Import Cobalt 20.master.0.239287

Includes the following patches:
  https://cobalt-review.googlesource.com/c/cobalt/+/5590
    by n1214.hwang@samsung.com
  https://cobalt-review.googlesource.com/c/cobalt/+/5530
    by errong.leng@samsung.com
diff --git a/src/base/base.gyp b/src/base/base.gyp
index 5eccf13..e442340 100644
--- a/src/base/base.gyp
+++ b/src/base/base.gyp
@@ -692,24 +692,6 @@
       'sources': [
       ],
     },
-
-
-    # Include this target for a main() function that simply instantiates
-    # and runs a base::TestSuite.
-    {
-      'target_name': 'run_all_unittests',
-      'type': 'static_library',
-      'dependencies': [
-        'test_support_base',
-      ],
-      'sources': [
-        'test/run_all_unittests.cc',
-      ],
-      'dependencies': [
-        '<(DEPTH)/testing/gmock.gyp:gmock',
-        '<(DEPTH)/testing/gtest.gyp:gtest',
-      ],
-    },
     {
       'target_name': 'base_unittests',
       'type': '<(gtest_target_type)',
@@ -948,7 +930,6 @@
         'base',
         'base_i18n',
         'base_static',
-        'run_all_unittests',
         'test_support_base',
         'third_party/dynamic_annotations/dynamic_annotations.gyp:dynamic_annotations',
       ],
@@ -969,6 +950,7 @@
         # Destructor is explicitly deleted.
         4624,
       ],
+      'includes': ['<(DEPTH)/base/test/test.gypi'],
     },
     {
       'target_name': 'test_support_base',
diff --git a/src/base/files/file_enumerator_starboard.cc b/src/base/files/file_enumerator_starboard.cc
index 1c2b3f0..70e9590 100644
--- a/src/base/files/file_enumerator_starboard.cc
+++ b/src/base/files/file_enumerator_starboard.cc
@@ -107,10 +107,23 @@
   };
 
   std::vector<FileEnumerator::FileInfo> ret;
-  SbDirectoryEntry entry;
   // We test if SbDirectoryGetNext returns parent directory file descriptor(..)
   // because the definition of SbDirectoryGetNext does not guarantee that.
   bool found_dot_dot = false;
+
+#if SB_API_VERSION >= SB_FEATURE_RUNTIME_CONFIGS_VERSION
+  std::vector<char> entry(SB_FILE_MAX_NAME);
+
+  while (SbDirectoryGetNext(dir, entry.data(), entry.size())) {
+    const char dot_dot_str[] = "..";
+    if (!SbStringCompare(entry.data(), dot_dot_str, sizeof(dot_dot_str))) {
+      found_dot_dot = true;
+    }
+    ret.push_back(GenerateEntry(entry.data()));
+  }
+#else   // SB_API_VERSION >= SB_FEATURE_RUNTIME_CONFIGS_VERSION
+  SbDirectoryEntry entry;
+
   while (SbDirectoryGetNext(dir, &entry)) {
     const char dot_dot_str[] = "..";
     if (!SbStringCompare(entry.name, dot_dot_str, sizeof(dot_dot_str))) {
@@ -118,6 +131,8 @@
     }
     ret.push_back(GenerateEntry(entry.name));
   }
+#endif  // SB_API_VERSION >= SB_FEATURE_RUNTIME_CONFIGS_VERSION
+
   if (!found_dot_dot) {
     ret.push_back(GenerateEntry(".."));
   }
diff --git a/src/base/task/task_scheduler/scheduler_worker_pool_impl.cc b/src/base/task/task_scheduler/scheduler_worker_pool_impl.cc
index 02454ed..da61163 100644
--- a/src/base/task/task_scheduler/scheduler_worker_pool_impl.cc
+++ b/src/base/task/task_scheduler/scheduler_worker_pool_impl.cc
@@ -50,7 +50,7 @@
     "TaskScheduler.NumTasksBetweenWaits.";
 constexpr char kNumThreadsHistogramPrefix[] = "TaskScheduler.NumWorkers.";
 #ifdef STARBOARD
-constexpr size_t kMaxNumberOfWorkers = SB_MAX_THREADS;
+const size_t kMaxNumberOfWorkers = SB_MAX_THREADS;
 #else
 constexpr size_t kMaxNumberOfWorkers = 256;
 #endif
diff --git a/src/base/task/task_scheduler/scheduler_worker_pool_impl_unittest.cc b/src/base/task/task_scheduler/scheduler_worker_pool_impl_unittest.cc
index 0fad634..816415f 100644
--- a/src/base/task/task_scheduler/scheduler_worker_pool_impl_unittest.cc
+++ b/src/base/task/task_scheduler/scheduler_worker_pool_impl_unittest.cc
@@ -1415,7 +1415,7 @@
 // leaves the pool in a valid state with regards to max tasks.
 TEST_F(TaskSchedulerWorkerPoolBlockingTest, MaximumWorkersTest) {
 #ifdef STARBOARD
-  constexpr size_t kMaxNumberOfWorkers = SB_MAX_THREADS;
+  const size_t kMaxNumberOfWorkers = SB_MAX_THREADS;
 #else
   constexpr size_t kMaxNumberOfWorkers = 256;
 #endif
@@ -1665,7 +1665,7 @@
 // test for https://crbug.com/810464.
 TEST_F(TaskSchedulerWorkerPoolImplStartInBodyTest, RacyCleanup) {
 #ifdef STARBOARD
-  constexpr size_t kLocalMaxTasks = SB_MAX_THREADS;
+  const size_t kLocalMaxTasks = SB_MAX_THREADS;
 #else
 #if defined(OS_FUCHSIA)
   // Fuchsia + QEMU doesn't deal well with *many* threads being
diff --git a/src/base/test/test.gypi b/src/base/test/test.gypi
new file mode 100644
index 0000000..c99d14d
--- /dev/null
+++ b/src/base/test/test.gypi
@@ -0,0 +1,16 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Include this file for a main() function that simply instantiates and runs a
+# base::TestSuite.
+{
+  'dependencies': [
+    '<(DEPTH)/base/base.gyp:test_support_base',
+    '<(DEPTH)/testing/gmock.gyp:gmock',
+    '<(DEPTH)/testing/gtest.gyp:gtest',
+  ],
+  'sources': [
+    '<(DEPTH)/base/test/run_all_unittests.cc',
+  ],
+}
diff --git a/src/base/time/time_unittest.cc b/src/base/time/time_unittest.cc
index 87d4cc5..a8a4a58 100644
--- a/src/base/time/time_unittest.cc
+++ b/src/base/time/time_unittest.cc
@@ -1000,14 +1000,22 @@
 // static
 ThreadTicks ThreadTicksOverride::now_ticks_;
 
-#if SB_HAS(TIME_THREAD_NOW)
+#if SB_API_VERSION >= SB_FEATURE_RUNTIME_CONFIGS_VERSION || \
+    SB_HAS(TIME_THREAD_NOW)
 // IOS doesn't support ThreadTicks::Now().
-#if defined(OS_IOS) || SB_HAS(TIME_THREAD_NOW)
+#if defined(OS_IOS)
 #define MAYBE_NowOverride DISABLED_NowOverride
 #else
 #define MAYBE_NowOverride NowOverride
 #endif
 TEST(ThreadTicks, MAYBE_NowOverride) {
+#if SB_API_VERSION >= SB_FEATURE_RUNTIME_CONFIGS_VERSION
+  if (!SbTimeIsTimeThreadNowSupported()) {
+    SB_LOG(INFO) << "Time thread now not supported. Test skipped.";
+    return;
+  }
+#endif  // SB_API_VERSION >= SB_FEATURE_RUNTIME_CONFIGS_VERSION
+
   ThreadTicksOverride::now_ticks_ = ThreadTicks::Min();
 
   // Override is not active. All Now() methods should return a sensible value.
@@ -1043,7 +1051,8 @@
   EXPECT_LE(initial_thread_ticks, subtle::ThreadTicksNowIgnoringOverride());
   EXPECT_GT(ThreadTicks::Max(), subtle::ThreadTicksNowIgnoringOverride());
 }
-#endif  // SB_HAS(TIME_THREAD_NOW)
+#endif  // SB_API_VERSION >= SB_FEATURE_RUNTIME_CONFIGS_VERSION ||
+        // SB_HAS(TIME_THREAD_NOW)
 
 TEST(ThreadTicks, ThreadNow) {
   if (ThreadTicks::IsSupported()) {
diff --git a/src/cobalt/CHANGELOG.md b/src/cobalt/CHANGELOG.md
index 06fe9c7..cce7b0f 100644
--- a/src/cobalt/CHANGELOG.md
+++ b/src/cobalt/CHANGELOG.md
@@ -6,16 +6,28 @@
 
  - **DevTools and WebDriver listen to ANY interface, except on Linux.**
 
-  DevTools and WebDriver servers listen to connections on any network interface
-  by default, except on Linux where they listen only to loopback (localhost) by
-  default. A new "--dev_servers_listen_ip" command line parameter can be used to
-  specify a different interface for both of them to listen to.
+   DevTools and WebDriver servers listen to connections on any network interface
+   by default, except on Linux where they listen only to loopback (localhost) by
+   default. A new "--dev_servers_listen_ip" command line parameter can be used
+   to specify a different interface for both of them to listen to.
 
  - **DevTools shows asynchronous stack traces.**
 
-  When stopped at a breakpoint within the handler function for an asynchronous
-  operation, the call stack in DevTools now shows both the current function as
-  well as the function where the asynchronous operation was initiated.
+   When stopped at a breakpoint within the handler function for an asynchronous
+   operation, the call stack in DevTools now shows both the current function as
+   well as the function where the asynchronous operation was initiated.
+
+ - **Optimized network buffer management and notification handling.**
+
+   Reduced unnecessary buffer copying during network downloading which results
+   in the reduction of CPU usage on both the NetworkModule thread and the
+   MainWebModule thread.  Peak memory usage during downloading is also reduced.
+   Also reduced redundant notifications from the NetworkModule thread to the
+   MainWebModule thread on downloading progresses.
+   CPU utilization of both threads is reduced by more than 10% with the above
+   optimizations on some less powerful platforms during high bitrate content
+   playback.  The lower CPU utilization of the MainWebModule thread allows it to
+   process other tasks (like Javascript execution) more responsively.
 
 ## Version 20
 
diff --git a/src/cobalt/audio/audio_test.gyp b/src/cobalt/audio/audio_test.gyp
index c354a85..c4e3979 100644
--- a/src/cobalt/audio/audio_test.gyp
+++ b/src/cobalt/audio/audio_test.gyp
@@ -26,7 +26,6 @@
       'dependencies': [
         '<(DEPTH)/cobalt/dom/dom.gyp:dom',
         '<(DEPTH)/cobalt/media/media.gyp:media',
-        '<(DEPTH)/cobalt/test/test.gyp:run_all_unittests',
         '<(DEPTH)/testing/gmock.gyp:gmock',
         '<(DEPTH)/testing/gtest.gyp:gtest',
 
@@ -34,6 +33,7 @@
         #       ScriptValueFactory has non-virtual method CreatePromise().
         '<(DEPTH)/cobalt/script/engine.gyp:engine',
       ],
+      'includes': [ '<(DEPTH)/cobalt/test/test.gypi' ],
     },
 
     {
diff --git a/src/cobalt/base/base.gyp b/src/cobalt/base/base.gyp
index a34c59f..f849ff4 100644
--- a/src/cobalt/base/base.gyp
+++ b/src/cobalt/base/base.gyp
@@ -111,10 +111,10 @@
       ],
       'dependencies': [
         'base',
-        '<(DEPTH)/cobalt/test/test.gyp:run_all_unittests',
         '<(DEPTH)/testing/gmock.gyp:gmock',
         '<(DEPTH)/testing/gtest.gyp:gtest',
       ],
+      'includes': [ '<(DEPTH)/cobalt/test/test.gypi' ],
     },
     {
       'target_name': 'base_test_deploy',
diff --git a/src/cobalt/bindings/testing/testing.gyp b/src/cobalt/bindings/testing/testing.gyp
index cfa6c3d..2898f07 100644
--- a/src/cobalt/bindings/testing/testing.gyp
+++ b/src/cobalt/bindings/testing/testing.gyp
@@ -187,12 +187,12 @@
       'dependencies': [
         '<(DEPTH)/cobalt/base/base.gyp:base',
         '<(DEPTH)/cobalt/script/engine.gyp:engine',
-        '<(DEPTH)/cobalt/test/test.gyp:run_all_unittests',
         '<(DEPTH)/testing/gmock.gyp:gmock',
         '<(DEPTH)/testing/gtest.gyp:gtest',
         'bindings',
         'bindings_test_implementation',
       ],
+      'includes': [ '<(DEPTH)/cobalt/test/test.gypi' ],
     },
 
     {
diff --git a/src/cobalt/black_box_tests/README.md b/src/cobalt/black_box_tests/README.md
index d830906..6a696ea 100644
--- a/src/cobalt/black_box_tests/README.md
+++ b/src/cobalt/black_box_tests/README.md
@@ -84,6 +84,6 @@
   1. Add a python test script in tests/.
   2. Add target web page(s) and associated resources(if any) to testdata/.
   3. Add the test name(name of the python test script) to black_box_tests.py
-     to automate new test. Add the name to either the list of tests requiring
+     to automate new test. Add the name to the list of tests requiring
      app launcher support for system signals(e.g. suspend/resume), or the list
-     of tests that don't.
+     of tests requiring deep link support, or the list of tests that don't.
diff --git a/src/cobalt/black_box_tests/black_box_tests.py b/src/cobalt/black_box_tests/black_box_tests.py
index c209f91..bfc8ce5 100644
--- a/src/cobalt/black_box_tests/black_box_tests.py
+++ b/src/cobalt/black_box_tests/black_box_tests.py
@@ -12,7 +12,6 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-
 from __future__ import absolute_import
 from __future__ import division
 from __future__ import print_function
@@ -54,6 +53,11 @@
     'web_debugger',
     'web_platform_tests',
 ]
+# These tests can only be run on platforms whose app launcher can send deep
+# links.
+_TESTS_NEEDING_DEEP_LINK = [
+    'fire_deep_link_before_load',
+]
 # Location of test files.
 _TEST_DIR_PATH = 'cobalt.black_box_tests.tests.'
 # Platform dependent device parameters.
@@ -108,10 +112,13 @@
       output_file=None,
       out_directory=out_directory)
 
+  test_targets = _TESTS_NO_SIGNAL
+
   if launcher.SupportsSuspendResume():
-    test_targets = _TESTS_NEEDING_SYSTEM_SIGNAL + _TESTS_NO_SIGNAL
-  else:
-    test_targets = _TESTS_NO_SIGNAL
+    test_targets += _TESTS_NEEDING_SYSTEM_SIGNAL
+
+  if launcher.SupportsDeepLink():
+    test_targets += _TESTS_NEEDING_DEEP_LINK
 
   test_suite = unittest.TestSuite()
   for test in test_targets:
@@ -123,8 +130,12 @@
 class BlackBoxTests(object):
   """Helper class to run all black box tests and return results."""
 
-  def __init__(self, server_binding_address, proxy_address=None,
-               proxy_port=None, test_name=None, wpt_http_port=None):
+  def __init__(self,
+               server_binding_address,
+               proxy_address=None,
+               proxy_port=None,
+               test_name=None,
+               wpt_http_port=None):
     logging.basicConfig(level=logging.DEBUG)
 
     # Setup global variables used by test cases
@@ -159,22 +170,21 @@
     # Test domains used in web platform tests to be resolved to the server
     # binding address.
     hosts = [
-        'web-platform.test',
-        'www.web-platform.test',
-        'www1.web-platform.test',
-        'www2.web-platform.test',
-        'xn--n8j6ds53lwwkrqhv28a.web-platform.test',
+        'web-platform.test', 'www.web-platform.test', 'www1.web-platform.test',
+        'www2.web-platform.test', 'xn--n8j6ds53lwwkrqhv28a.web-platform.test',
         'xn--lve-6lad.web-platform.test'
     ]
-    self.host_resolve_map = dict([(host, server_binding_address) for host in hosts])
+    self.host_resolve_map = dict([
+        (host, server_binding_address) for host in hosts
+    ])
 
   def Run(self):
     if self.proxy_port == '-1':
       return 1
     logging.info('Using proxy port: %s', self.proxy_port)
 
-    with ProxyServer(port=self.proxy_port,
-                     host_resolve_map=self.host_resolve_map):
+    with ProxyServer(
+        port=self.proxy_port, host_resolve_map=self.host_resolve_map):
       if self.test_name:
         suite = unittest.TestLoader().loadTestsFromModule(
             importlib.import_module(_TEST_DIR_PATH + self.test_name))
@@ -199,7 +209,8 @@
       socks.append((address, socket.socket(socket.AF_INET, socket.SOCK_STREAM)))
     try:
       for _ in range(_PORT_SELECTION_RETRY_LIMIT):
-        port = random.randint(_PORT_SELECTION_RANGE[0], _PORT_SELECTION_RANGE[1])
+        port = random.randint(_PORT_SELECTION_RANGE[0],
+                              _PORT_SELECTION_RANGE[1])
         unused = True
         for sock in socks:
           result = sock[1].connect_ex((sock[0], port))
@@ -208,9 +219,8 @@
             break
         if unused:
           return port
-      logging.error(
-          'Can not find unused port on addresses within %s attempts.' %
-          _PORT_SELECTION_RETRY_LIMIT)
+      logging.error('Can not find unused port on addresses within %s attempts.',
+                    _PORT_SELECTION_RETRY_LIMIT)
       return -1
     finally:
       for sock in socks:
@@ -219,28 +229,33 @@
 
 def main():
   parser = argparse.ArgumentParser()
-  parser.add_argument('--server_binding_address',
-                      default='127.0.0.1',
-                      help='Binding address used to create the test server.')
-  parser.add_argument('--proxy_address',
-                      default=None,
-                      help=('Address to the proxy server that all black box'
-                            'tests are run through. If not specified, the'
-                            'server binding address is used.'))
-  parser.add_argument('--proxy_port',
-                      default=None,
-                      help=('Port used to create the proxy server that all'
-                            'black box tests are run through. If not'
-                            'specified, a random free port is used.'))
-  parser.add_argument('--test_name',
-                      default=None,
-                      help=('Name of test to be run. If not specified, all '
-                            'tests are run.'))
-  parser.add_argument('--wpt_http_port',
-                      default=None,
-                       help=('Port used to create the web platform test http'
-                             'server. If not specified, a random free port is'
-                             'used.'))
+  parser.add_argument(
+      '--server_binding_address',
+      default='127.0.0.1',
+      help='Binding address used to create the test server.')
+  parser.add_argument(
+      '--proxy_address',
+      default=None,
+      help=('Address to the proxy server that all black box'
+            'tests are run through. If not specified, the'
+            'server binding address is used.'))
+  parser.add_argument(
+      '--proxy_port',
+      default=None,
+      help=('Port used to create the proxy server that all'
+            'black box tests are run through. If not'
+            'specified, a random free port is used.'))
+  parser.add_argument(
+      '--test_name',
+      default=None,
+      help=('Name of test to be run. If not specified, all '
+            'tests are run.'))
+  parser.add_argument(
+      '--wpt_http_port',
+      default=None,
+      help=('Port used to create the web platform test http'
+            'server. If not specified, a random free port is'
+            'used.'))
   args, _ = parser.parse_known_args()
 
   test_object = BlackBoxTests(args.server_binding_address, args.proxy_address,
diff --git a/src/cobalt/black_box_tests/proxy_server.py b/src/cobalt/black_box_tests/proxy_server.py
index ee29a39..df6c03b 100644
--- a/src/cobalt/black_box_tests/proxy_server.py
+++ b/src/cobalt/black_box_tests/proxy_server.py
@@ -32,7 +32,7 @@
 
 class ProxyServer(object):
 
-  def __init__(self, hostname='0.0.0.0', port='8000', host_resolve_map=None):
+  def __init__(self, hostname='127.0.0.1', port='8000', host_resolve_map=None):
     self.command = [
         'python',
         os.path.join(SRC_DIR, 'third_party', 'proxy_py', 'proxy.py'),
diff --git a/src/cobalt/black_box_tests/testdata/fire_deep_link_before_load.html b/src/cobalt/black_box_tests/testdata/fire_deep_link_before_load.html
new file mode 100644
index 0000000..6be8649
--- /dev/null
+++ b/src/cobalt/black_box_tests/testdata/fire_deep_link_before_load.html
@@ -0,0 +1,7 @@
+<HTML>
+  <HEAD></HEAD>
+  <BODY>
+    <script src='black_box_js_test_utils.js'></script>
+    <script src='fire_deep_link_before_load.js'></script>
+  </BODY>
+</HTML>
diff --git a/src/cobalt/black_box_tests/testdata/fire_deep_link_before_load.js b/src/cobalt/black_box_tests/testdata/fire_deep_link_before_load.js
new file mode 100644
index 0000000..cc2128c
--- /dev/null
+++ b/src/cobalt/black_box_tests/testdata/fire_deep_link_before_load.js
@@ -0,0 +1,35 @@
+// Copyright 2019 The Cobalt Authors. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Fail if the deep link is not received within 15 seconds.
+var kTimeout = 15 * 1000;
+var failTimer = setTimeout(fail, kTimeout);
+
+function fail() {
+    console.log("Failing due to timeout!");
+    assertTrue(false);
+}
+
+// The test sends "link 1", "link 2", "link 3" before load & so only "link 3"
+// should be handled.
+function listener(link) {
+    console.log("Received link: " + link.toString());
+    assertEqual("link 3", link);
+    console.log("Ending test");
+    onEndTest();
+    clearTimeout(failTimer);
+}
+
+h5vcc.runtime.onDeepLink.addListener(listener);
+console.log("Listener added");
\ No newline at end of file
diff --git a/src/cobalt/black_box_tests/tests/fire_deep_link_before_load.py b/src/cobalt/black_box_tests/tests/fire_deep_link_before_load.py
new file mode 100644
index 0000000..1a3daba
--- /dev/null
+++ b/src/cobalt/black_box_tests/tests/fire_deep_link_before_load.py
@@ -0,0 +1,119 @@
+# Copyright 2019 The Cobalt Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Tests sending deep links before load."""
+
+# This test script works by splitting the work over 3 threads, so that they
+# can each make progress even if they come across blocking operations.
+# The three threads are:
+#   1. Main thread, runs BlackBoxTestCase, sends suspend/resume signals, etc.
+#   2. HTTP Server, responsible for slowly responding to a fetch of a javascript
+#      file.
+#   3. Webdriver thread, instructs Cobalt to navigate to a URL
+#
+# Steps in ~ chronological order:
+#   1. Create a TCP socket and listen on all interfaces.
+#   2. Start Cobalt, and point it to the socket created in Step 1.
+#   3. Send 3 deep links.
+#   4. Load & run the javascript resource.
+#   5. Check to see if JSTestsSucceeded().
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+import _env  # pylint: disable=unused-import,g-bad-import-order
+
+import os
+import SimpleHTTPServer
+import threading
+import traceback
+import urlparse
+
+from cobalt.black_box_tests import black_box_tests
+from cobalt.black_box_tests.threaded_web_server import MakeRequestHandlerClass
+from cobalt.black_box_tests.threaded_web_server import ThreadedWebServer
+
+_FIRE_DEEP_LINK_BEFORE_LOAD_HTML = 'fire_deep_link_before_load.html'
+_FIRE_DEEP_LINK_BEFORE_LOAD_JS = 'fire_deep_link_before_load.js'
+_MAX_ALLOTTED_TIME_SECONDS = 60
+
+_links_fired = threading.Event()
+
+# The base path of the requested assets is the parent directory.
+_SERVER_ROOT_PATH = os.path.join(os.path.dirname(__file__), os.pardir)
+
+
+class JavascriptRequestDetector(MakeRequestHandlerClass(_SERVER_ROOT_PATH)):
+  """Proxies everything to SimpleHTTPRequestHandler, except some paths."""
+
+  def do_GET(self):  # pylint: disable=invalid-name
+    """Handles HTTP GET requests for resources."""
+
+    parsed_path = urlparse.urlparse(self.path)
+    if parsed_path.path == '/testdata/' + _FIRE_DEEP_LINK_BEFORE_LOAD_JS:
+      # It is important not to send any response back, so we block.
+      print('Waiting on links to be fired.')
+      _links_fired.wait()
+      print('Links have been fired. Getting JS.')
+
+    return SimpleHTTPServer.SimpleHTTPRequestHandler.do_GET(self)
+
+
+class FireDeepLinkBeforeLoad(black_box_tests.BlackBoxTestCase):
+  """Tests firing deep links before web module is loaded."""
+
+  def _LoadPage(self, webdriver, url):
+    """Instructs webdriver to navigate to url."""
+    try:
+      # Note: The following is a blocking request, and returns only when the
+      # page has fully loaded.  In this test, the page will not fully load
+      # so, this does not return until Cobalt exits.
+      webdriver.get(url)
+    except:  # pylint: disable=bare-except
+      traceback.print_exc()
+
+  def test_simple(self):
+
+    # Step 2. Start Cobalt, and point it to the socket created in Step 1.
+    try:
+      with ThreadedWebServer(JavascriptRequestDetector,
+                             self.GetBindingAddress()) as server:
+        with self.CreateCobaltRunner(url='about:blank') as runner:
+          target_url = server.GetURL(file_name='../testdata/' +
+                                     _FIRE_DEEP_LINK_BEFORE_LOAD_HTML)
+          cobalt_launcher_thread = threading.Thread(
+              target=FireDeepLinkBeforeLoad._LoadPage,
+              args=(self, runner.webdriver, target_url))
+          cobalt_launcher_thread.start()
+
+          # Step 3. Send 3 deep links
+          for i in range(1, 4):
+            link = 'link ' + str(i)
+            print('Sending link : ' + link)
+            self.assertTrue(runner.SendDeepLink(link) == 0)
+          print('Links fired.')
+          # Step 4. Load & run the javascript resource.
+          _links_fired.set()
+
+          # Step 5. Check to see if JSTestsSucceeded().
+          # Note that this call will check the DOM multiple times for a period
+          # of time (current default is 30 seconds).
+          self.assertTrue(runner.JSTestsSucceeded())
+    except:  # pylint: disable=bare-except
+      traceback.print_exc()
+      # Consider an exception being thrown as a test failure.
+      self.assertTrue(False)
+    finally:
+      print('Cleaning up.')
+      _links_fired.set()
diff --git a/src/cobalt/browser/application.cc b/src/cobalt/browser/application.cc
index 9b76975..2684cb9 100644
--- a/src/cobalt/browser/application.cc
+++ b/src/cobalt/browser/application.cc
@@ -678,12 +678,18 @@
   options.web_module_options.csp_enforcement_mode = dom::kCspEnforcementEnable;
 
   options.requested_viewport_size = requested_viewport_size;
+  options.web_module_loaded_callback =
+      base::Bind(&Application::DispatchEarlyDeepLink, base::Unretained(this));
   account_manager_.reset(new account::AccountManager());
   browser_module_.reset(
       new BrowserModule(initial_url,
                         (should_preload ? base::kApplicationStatePreloading
                                         : base::kApplicationStateStarted),
                         &event_dispatcher_, account_manager_.get(), options));
+#if SB_IS(EVERGREEN)
+  updater_module_.reset(new updater::UpdaterModule(
+      message_loop_, browser_module_->GetNetworkModule()));
+#endif
   UpdateUserAgent();
 
   app_status_ = (should_preload ? kPreloadingAppStatus : kRunningAppStatus);
@@ -785,6 +791,11 @@
         base::TimeDelta::FromSeconds(duration_in_seconds));
   }
 #endif  // ENABLE_DEBUG_COMMAND_LINE_SWITCHES
+
+#if SB_IS(EVERGREEN)
+  // Run the first update check after the application is started.
+  updater_module_->Update();
+#endif
 }
 
 Application::~Application() {
@@ -861,6 +872,7 @@
 
 void Application::HandleStarboardEvent(const SbEvent* starboard_event) {
   DCHECK(starboard_event);
+  DCHECK_EQ(base::MessageLoop::current(), message_loop_);
 
   // Forward input events to |SystemWindow|.
   if (starboard_event->type == kSbEventTypeInput) {
@@ -916,7 +928,13 @@
         // SB_HAS(ON_SCREEN_KEYBOARD)
     case kSbEventTypeLink: {
       const char* link = static_cast<const char*>(starboard_event->data);
-      DispatchEventInternal(new base::DeepLinkEvent(link));
+      if (browser_module_->IsWebModuleLoaded()) {
+        DLOG(INFO) << "Dispatching deep link " << link;
+        DispatchEventInternal(new base::DeepLinkEvent(link));
+      } else {
+        DLOG(INFO) << "Storing deep link " << link;
+        early_deep_link_ = link;
+      }
       break;
     }
     case kSbEventTypeAccessiblitySettingsChanged:
@@ -1201,5 +1219,14 @@
 }
 #endif  // defined(ENABLE_DEBUGGER) && defined(STARBOARD_ALLOWS_MEMORY_TRACKING)
 
+void Application::DispatchEarlyDeepLink() {
+  if (early_deep_link_.empty()) {
+    return;
+  }
+  DLOG(INFO) << "Dispatching early deep link " << early_deep_link_;
+  DispatchEventInternal(new base::DeepLinkEvent(early_deep_link_.c_str()));
+  early_deep_link_ = "";
+}
+
 }  // namespace browser
 }  // namespace cobalt
diff --git a/src/cobalt/browser/application.h b/src/cobalt/browser/application.h
index db55875..731f42f 100644
--- a/src/cobalt/browser/application.h
+++ b/src/cobalt/browser/application.h
@@ -28,6 +28,9 @@
 #include "cobalt/browser/browser_module.h"
 #include "cobalt/browser/memory_tracker/tool.h"
 #include "cobalt/system_window/system_window.h"
+#if SB_IS(EVERGREEN)
+#include "cobalt/updater/updater_module.h"
+#endif
 #include "starboard/event.h"
 
 #if defined(ENABLE_WEBDRIVER)
@@ -101,6 +104,11 @@
   // Main components of the Cobalt browser application.
   std::unique_ptr<BrowserModule> browser_module_;
 
+#if SB_IS(EVERGREEN)
+  // Cobalt Updater.
+  std::unique_ptr<updater::UpdaterModule> updater_module_;
+#endif
+
   // Event callbacks.
   base::EventCallback network_event_callback_;
   base::EventCallback deep_link_event_callback_;
@@ -212,6 +220,13 @@
   void OnMemoryTrackerCommand(const std::string& message);
 #endif  // defined(ENABLE_DEBUGGER) && defined(STARBOARD_ALLOWS_MEMORY_TRACKING)
 
+  // The latest link received before the Web Module is loaded is stored here.
+  std::string early_deep_link_;
+
+  // Dispach events for early deeplink. This should be called once the Web
+  // Module is loaded.
+  void DispatchEarlyDeepLink();
+
   DISALLOW_COPY_AND_ASSIGN(Application);
 };
 
diff --git a/src/cobalt/browser/browser.gyp b/src/cobalt/browser/browser.gyp
index 5ac69e0..666c6e9 100644
--- a/src/cobalt/browser/browser.gyp
+++ b/src/cobalt/browser/browser.gyp
@@ -15,6 +15,7 @@
 {
   'variables': {
     'sb_pedantic_warnings': 1,
+    'has_updater%' : '<!(python ../../build/file_exists.py <(DEPTH)/cobalt/updater/updater.gyp)',
   },
   'targets': [
     {
@@ -213,6 +214,11 @@
             'COBALT_MESH_CACHE_SIZE_IN_BYTES=<(mesh_cache_size_in_bytes)',
           ],
         }],
+        ['sb_evergreen == 1 and has_updater == "True"', {
+          'dependencies': [
+            '<(DEPTH)/cobalt/updater/updater.gyp:updater',
+          ],
+        }],
       ],
     },
 
@@ -241,11 +247,11 @@
         '<(DEPTH)/cobalt/speech/speech.gyp:speech',
         '<(DEPTH)/cobalt/storage/storage.gyp:storage',
         '<(DEPTH)/cobalt/storage/storage.gyp:storage_upgrade_copy_test_data',
-        '<(DEPTH)/cobalt/test/test.gyp:run_all_unittests',
         '<(DEPTH)/testing/gmock.gyp:gmock',
         '<(DEPTH)/testing/gtest.gyp:gtest',
         'browser',
       ],
+      'includes': [ '<(DEPTH)/cobalt/test/test.gypi' ],
     },
 
     {
diff --git a/src/cobalt/browser/browser_bindings_gen.gyp b/src/cobalt/browser/browser_bindings_gen.gyp
index 29cbc9d..beb8e78 100644
--- a/src/cobalt/browser/browser_bindings_gen.gyp
+++ b/src/cobalt/browser/browser_bindings_gen.gyp
@@ -232,6 +232,7 @@
         '../audio/audio_node_channel_count_mode.idl',
         '../audio/audio_node_channel_interpretation.idl',
         '../debug/console/console_command.idl',
+        '../debug/console/debug_console_mode.idl',
         '../dom/blob_property_bag.idl',
         '../dom/captions/caption_character_edge_style.idl',
         '../dom/captions/caption_color.idl',
diff --git a/src/cobalt/browser/browser_module.cc b/src/cobalt/browser/browser_module.cc
index 6052b23..9b4b07b 100644
--- a/src/cobalt/browser/browser_module.cc
+++ b/src/cobalt/browser/browser_module.cc
@@ -310,7 +310,8 @@
       main_web_module_generation_(0),
       next_timeline_id_(1),
       current_splash_screen_timeline_id_(-1),
-      current_main_web_module_timeline_id_(-1) {
+      current_main_web_module_timeline_id_(-1),
+      web_module_loaded_callback_(options_.web_module_loaded_callback) {
   TRACE_EVENT0("cobalt::browser", "BrowserModule::BrowserModule()");
 
   // Apply platform memory setting adjustments and defaults.
@@ -345,7 +346,18 @@
 #if defined(ENABLE_DEBUGGER)
   debug_console_layer_ = render_tree_combiner_.CreateLayer(kDebugConsoleZIndex);
 #endif
-  if (command_line->HasSwitch(browser::switches::kQrCodeOverlay)) {
+
+  int qr_code_overlay_slots = 4;
+  if (command_line->HasSwitch(switches::kQrCodeOverlay)) {
+    auto slots_in_string =
+        command_line->GetSwitchValueASCII(switches::kQrCodeOverlay);
+    if (!slots_in_string.empty()) {
+      auto result = base::StringToInt(slots_in_string, &qr_code_overlay_slots);
+      DCHECK(result) << "Failed to convert value of --"
+                     << switches::kQrCodeOverlay << ": "
+                     << qr_code_overlay_slots << " to int.";
+      DCHECK_GT(qr_code_overlay_slots, 0);
+    }
     qr_overlay_info_layer_ =
         render_tree_combiner_.CreateLayer(kOverlayInfoZIndex);
   } else {
@@ -430,7 +442,7 @@
   if (qr_overlay_info_layer_) {
     math::Size width_height = GetViewportSize().width_height();
     qr_code_overlay_.reset(new overlay_info::QrCodeOverlay(
-        width_height, GetResourceProvider(),
+        width_height, qr_code_overlay_slots, GetResourceProvider(),
         base::Bind(&BrowserModule::QueueOnQrCodeOverlayRenderTreeProduced,
                    base::Unretained(this))));
   }
@@ -680,7 +692,12 @@
   on_error_retry_count_ = 0;
 
   on_load_event_time_ = base::TimeTicks::Now().ToInternalValue();
+
   web_module_loaded_.Signal();
+
+  if (!web_module_loaded_callback_.is_null()) {
+    web_module_loaded_callback_.Run();
+  }
 }
 
 bool BrowserModule::WaitForLoad(const base::TimeDelta& timeout) {
diff --git a/src/cobalt/browser/browser_module.h b/src/cobalt/browser/browser_module.h
index b44418a..f4f9aa5 100644
--- a/src/cobalt/browser/browser_module.h
+++ b/src/cobalt/browser/browser_module.h
@@ -106,6 +106,7 @@
     base::Optional<cssom::ViewportSize> requested_viewport_size;
     bool enable_splash_screen_on_reloads;
     bool enable_on_screen_keyboard = true;
+    base::Closure web_module_loaded_callback;
   };
 
   // Type for a collection of URL handler callbacks that can potentially handle
@@ -119,6 +120,7 @@
                 const Options& options);
   ~BrowserModule();
 
+  network::NetworkModule* GetNetworkModule() { return &network_module_; }
   std::string GetUserAgent() { return network_module_.GetUserAgent(); }
 
   // Recreates web module with the given URL. In the case where Cobalt is
@@ -210,6 +212,8 @@
       const base::AccessibilityCaptionSettingsChangedEvent* event);
 #endif  // SB_API_VERSION >= SB_CAPTIONS_REQUIRED_VERSION || SB_HAS(CAPTIONS)
 
+  bool IsWebModuleLoaded() { return web_module_loaded_.IsSignaled(); }
+
  private:
 #if SB_HAS(CORE_DUMP_HANDLER_SUPPORT)
   static void CoreDumpHandler(void* browser_module_as_void);
@@ -666,6 +670,9 @@
   // by automem.  We want this so that we can check that it never changes, since
   // we do not have the ability to modify it after startup.
   base::Optional<int64_t> javascript_gc_threshold_in_bytes_;
+
+  // Callback to run when the Web Module is loaded.
+  base::Closure web_module_loaded_callback_;
 };
 
 }  // namespace browser
diff --git a/src/cobalt/browser/cobalt.gyp b/src/cobalt/browser/cobalt.gyp
index 8320f9b..471e9ec 100644
--- a/src/cobalt/browser/cobalt.gyp
+++ b/src/cobalt/browser/cobalt.gyp
@@ -15,7 +15,6 @@
 {
   'variables': {
     'sb_pedantic_warnings': 1,
-    'has_updater%' : '<!(python ../../build/file_exists.py <(DEPTH)/cobalt/updater/updater.gyp)',
   },
   'targets': [
     {
@@ -34,11 +33,6 @@
             '<(DEPTH)/cobalt/browser/splash_screen/splash_screen.gyp:copy_splash_screen',
           ],
         }],
-        ['sb_evergreen == 1 and has_updater == "True"', {
-          'dependencies': [
-            '<(DEPTH)/cobalt/updater/updater.gyp:updater',
-          ],
-        }],
       ],
     },
     {
@@ -91,7 +85,7 @@
         },
       ]
     }],
-    ['final_executable_type == "shared_library"', {
+    ['final_executable_type == "shared_library" and sb_evergreen != 1', {
       'targets': [
         {
           'target_name': 'cobalt_bin',
diff --git a/src/cobalt/browser/debug_console.cc b/src/cobalt/browser/debug_console.cc
index 56223d9..36b5caa 100644
--- a/src/cobalt/browser/debug_console.cc
+++ b/src/cobalt/browser/debug_console.cc
@@ -33,9 +33,11 @@
 const char kInitialDebugConsoleUrl[] =
     "file:///cobalt/debug/console/debug_console.html";
 
-const char kDebugConsoleOffString[] = "off";
-const char kDebugConsoleOnString[] = "on";
-const char kDebugConsoleHudString[] = "hud";
+const char kDebugConsoleModeOffString[] = "off";
+const char kDebugConsoleModeHudString[] = "hud";
+const char kDebugConsoleModeDebugString[] = "debug";
+const char kDebugConsoleModeDebugStringAlias[] = "on";  // Legacy name of mode.
+const char kDebugConsoleModeMediaString[] = "media";
 
 // Convert from a debug console visibility setting string to an integer
 // value specified by a constant defined in debug::console::DebugHub.
@@ -44,12 +46,16 @@
   // Static casting is necessary in order to get around what appears to be a
   // compiler error on Linux when implicitly constructing a base::Optional<int>
   // from a static const int.
-  if (mode_string == kDebugConsoleOffString) {
-    return static_cast<int>(debug::console::DebugHub::kDebugConsoleOff);
-  } else if (mode_string == kDebugConsoleHudString) {
-    return static_cast<int>(debug::console::DebugHub::kDebugConsoleHud);
-  } else if (mode_string == kDebugConsoleOnString) {
-    return static_cast<int>(debug::console::DebugHub::kDebugConsoleOn);
+  if (mode_string == kDebugConsoleModeOffString) {
+    return static_cast<int>(debug::console::kDebugConsoleModeOff);
+  } else if (mode_string == kDebugConsoleModeHudString) {
+    return static_cast<int>(debug::console::kDebugConsoleModeHud);
+  } else if (mode_string == kDebugConsoleModeDebugString) {
+    return static_cast<int>(debug::console::kDebugConsoleModeDebug);
+  } else if (mode_string == kDebugConsoleModeDebugStringAlias) {
+    return static_cast<int>(debug::console::kDebugConsoleModeDebug);
+  } else if (mode_string == kDebugConsoleModeMediaString) {
+    return static_cast<int>(debug::console::kDebugConsoleModeMedia);
   } else {
     DLOG(WARNING) << "Debug console mode \"" << mode_string
                   << "\" not recognized.";
@@ -82,7 +88,7 @@
   }
 
   // By default the debug console is off.
-  return debug::console::DebugHub::kDebugConsoleOff;
+  return debug::console::kDebugConsoleModeOff;
 }
 
 // A function to create a DebugHub object, to be injected into WebModule.
@@ -147,8 +153,8 @@
 
 bool DebugConsole::ShouldInjectInputEvents() {
   switch (GetMode()) {
-    case debug::console::DebugHub::kDebugConsoleOff:
-    case debug::console::DebugHub::kDebugConsoleHud:
+    case debug::console::kDebugConsoleModeOff:
+    case debug::console::kDebugConsoleModeHud:
       return false;
     default:
       return true;
@@ -197,12 +203,12 @@
 
 void DebugConsole::CycleMode() {
   base::AutoLock lock(mode_mutex_);
-  mode_ = (mode_ + 1) % debug::console::DebugHub::kDebugConsoleNumModes;
+  mode_ = (mode_ + 1) % debug::console::kDebugConsoleModeCount;
 }
 
-int DebugConsole::GetMode() {
+debug::console::DebugConsoleMode DebugConsole::GetMode() {
   base::AutoLock lock(mode_mutex_);
-  return mode_;
+  return static_cast<debug::console::DebugConsoleMode>(mode_);
 }
 
 }  // namespace browser
diff --git a/src/cobalt/browser/debug_console.h b/src/cobalt/browser/debug_console.h
index bb1fe25..f368fc5 100644
--- a/src/cobalt/browser/debug_console.h
+++ b/src/cobalt/browser/debug_console.h
@@ -27,6 +27,7 @@
 #include "cobalt/base/token.h"
 #include "cobalt/browser/lifecycle_observer.h"
 #include "cobalt/browser/web_module.h"
+#include "cobalt/debug/console/debug_console_mode.h"
 #include "cobalt/debug/console/debug_hub.h"
 #include "cobalt/dom/input_event_init.h"
 #include "cobalt/dom/keyboard_event_init.h"
@@ -86,7 +87,7 @@
 
   // Returns true iff the console is in a mode that is visible.
   bool IsVisible() {
-    return (GetMode() != debug::console::DebugHub::kDebugConsoleOff);
+    return (GetMode() != debug::console::kDebugConsoleModeOff);
   }
 
   void SetSize(const cssom::ViewportSize& window_dimensions,
@@ -114,7 +115,7 @@
   }
 
   // Returns the currently set debug console visibility mode.
-  int GetMode();
+  debug::console::DebugConsoleMode GetMode();
 
   // Returns true iff the debug console is in a state where it should route
   // input events to its web module.
diff --git a/src/cobalt/browser/switches.cc b/src/cobalt/browser/switches.cc
index edb849b..127556b 100644
--- a/src/cobalt/browser/switches.cc
+++ b/src/cobalt/browser/switches.cc
@@ -315,7 +315,10 @@
 const char kQrCodeOverlay[] = "qr_code_overlay";
 const char kQrCodeOverlayHelp[] =
     "Display QrCode based overlay information. These information can be used"
-    " for performance tuning or playback quality check.";
+    " for performance tuning or playback quality check.  By default qr code"
+    " will be displayed in 4 different locations on the screen alternatively,"
+    " and the number of locations can be overwritten by specifying it as the "
+    " value of the command line parameter, like '--qr_code_overlay=6'.";
 
 const char kReduceCpuMemoryBy[] = "reduce_cpu_memory_by";
 const char kReduceCpuMemoryByHelp[] =
diff --git a/src/cobalt/build/all.gyp b/src/cobalt/build/all.gyp
index e0d3b20..e484055 100644
--- a/src/cobalt/build/all.gyp
+++ b/src/cobalt/build/all.gyp
@@ -99,7 +99,7 @@
             '<(DEPTH)/starboard/elf_loader/elf_loader.gyp:elf_loader_test_deploy',
           ],
         }],
-        ['has_loader_app == "True"', {
+        ['has_loader_app == "True" and sb_evergreen != 1', {
           'dependencies': [
             '<(DEPTH)/starboard/loader_app/loader_app.gyp:*',
           ],
@@ -114,6 +114,7 @@
         ['sb_evergreen==1', {
           'dependencies': [
             '<(DEPTH)/third_party/musl/musl.gyp:musl_unittests',
+            '<(DEPTH)/starboard/loader_app/installation_manager.gyp:*',
           ],
         }],
       ],
diff --git a/src/cobalt/build/build.id b/src/cobalt/build/build.id
index a86f8a5..e411972 100644
--- a/src/cobalt/build/build.id
+++ b/src/cobalt/build/build.id
@@ -1 +1 @@
-234144
\ No newline at end of file
+239287
\ No newline at end of file
diff --git a/src/cobalt/csp/csp.gyp b/src/cobalt/csp/csp.gyp
index eaeaf4b..564888d 100644
--- a/src/cobalt/csp/csp.gyp
+++ b/src/cobalt/csp/csp.gyp
@@ -57,12 +57,12 @@
       ],
       'dependencies': [
         '<(DEPTH)/cobalt/base/base.gyp:base',
-        '<(DEPTH)/cobalt/test/test.gyp:run_all_unittests',
         '<(DEPTH)/testing/gmock.gyp:gmock',
         '<(DEPTH)/testing/gtest.gyp:gtest',
         'csp',
         'csp_copy_test_data',
       ],
+      'includes': [ '<(DEPTH)/cobalt/test/test.gypi' ],
     },
 
     {
diff --git a/src/cobalt/css_parser/css_parser.gyp b/src/cobalt/css_parser/css_parser.gyp
index 3c1f6c6..d498ca5 100644
--- a/src/cobalt/css_parser/css_parser.gyp
+++ b/src/cobalt/css_parser/css_parser.gyp
@@ -132,12 +132,12 @@
       ],
       'dependencies': [
         '<(DEPTH)/cobalt/base/base.gyp:base',
-        '<(DEPTH)/cobalt/test/test.gyp:run_all_unittests',
         '<(DEPTH)/testing/gmock.gyp:gmock',
         '<(DEPTH)/testing/gtest.gyp:gtest',
         'css_grammar',
         'css_parser',
       ],
+      'includes': [ '<(DEPTH)/cobalt/test/test.gypi' ],
     },
 
     {
diff --git a/src/cobalt/cssom/css_computed_style_data.h b/src/cobalt/cssom/css_computed_style_data.h
index f2c0b9f..557742d 100644
--- a/src/cobalt/cssom/css_computed_style_data.h
+++ b/src/cobalt/cssom/css_computed_style_data.h
@@ -23,6 +23,7 @@
 #include "base/containers/small_map.h"
 #include "base/memory/ref_counted.h"
 #include "cobalt/base/unused.h"
+#include "cobalt/cssom/keyword_value.h"
 #include "cobalt/cssom/property_definitions.h"
 #include "cobalt/cssom/property_value.h"
 
@@ -514,6 +515,18 @@
     return is_inline_before_blockification_;
   }
 
+  bool IsContainingBlockForPositionAbsoluteElements() const {
+    return IsPositioned() || IsTransformed();
+  }
+
+  bool IsPositioned() const {
+    return position() != cssom::KeywordValue::GetStatic();
+  }
+
+  bool IsTransformed() const {
+    return transform() != cssom::KeywordValue::GetNone();
+  }
+
  protected:
   void SetPropertyValue(const PropertyKey key,
                         const scoped_refptr<PropertyValue>& value);
diff --git a/src/cobalt/cssom/cssom_test.gyp b/src/cobalt/cssom/cssom_test.gyp
index d08f3d8..92fb5d6 100644
--- a/src/cobalt/cssom/cssom_test.gyp
+++ b/src/cobalt/cssom/cssom_test.gyp
@@ -58,10 +58,10 @@
         '<(DEPTH)/cobalt/base/base.gyp:base',
         '<(DEPTH)/cobalt/css_parser/css_parser.gyp:css_parser',
         '<(DEPTH)/cobalt/cssom/cssom.gyp:cssom',
-        '<(DEPTH)/cobalt/test/test.gyp:run_all_unittests',
         '<(DEPTH)/testing/gmock.gyp:gmock',
         '<(DEPTH)/testing/gtest.gyp:gtest',
       ],
+      'includes': [ '<(DEPTH)/cobalt/test/test.gypi' ],
     },
 
     {
diff --git a/src/cobalt/debug/backend/content/dom_agent.js b/src/cobalt/debug/backend/content/dom_agent.js
index 2dcf369..f702565 100644
--- a/src/cobalt/debug/backend/content/dom_agent.js
+++ b/src/cobalt/debug/backend/content/dom_agent.js
@@ -82,13 +82,6 @@
   return JSON.stringify(result);
 }
 
-// Returns the bounding box of a node. This pseudo-command in the DOM domain is
-// a helper for the C++ |DOMAgent::HighlightNode|.
-commands._getBoundingClientRect = function(params) {
-  var node = commands._findNode(params);
-  return JSON.stringify(node.getBoundingClientRect());
-}
-
 // Creates and returns a Node object that represents the specified node.
 // Adds the node's children up to the specified depth. A negative depth will
 // cause all descendants to be added.
diff --git a/src/cobalt/debug/backend/content/overlay_agent.js b/src/cobalt/debug/backend/content/overlay_agent.js
new file mode 100644
index 0000000..ef0f82c
--- /dev/null
+++ b/src/cobalt/debug/backend/content/overlay_agent.js
@@ -0,0 +1,179 @@
+// Copyright 2019 The Cobalt Authors. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+(function(debugBackend) {
+
+// Attach methods to handle commands in the 'Overlay' devtools domain.
+// https://chromedevtools.github.io/devtools-protocol/tot/Overlay
+let commands = debugBackend.Overlay = {};
+
+// Returns non-overlapping rectangles to highlight for the box model of a node.
+// This pseudo-command in the Overlay domain is a helper for the C++
+// |OverlayAgent::HighlightNode|.
+commands._highlightNodeRects = function(params) {
+  let node = debugBackend.DOM._findNode(params);
+  let config = params.highlightConfig || {};
+  let highlights = [];
+  if (node && node.getBoundingClientRect) {
+    let styles = window.getComputedStyle(node);
+    let content = node.getBoundingClientRect();
+    let color;
+    let box;
+
+    let transformed = isNodeTransformed(node);
+
+    if (!transformed) {
+      // Margin
+      color = config.marginColor;
+      box = styleBox(styles, 'margin');
+      if (color) {
+        boxRects(content, box).forEach(
+            rect => highlights.push(highlightParams(rect, color)));
+      }
+
+      // Border
+      color = config.borderColor;
+      box = styleBox(styles, 'border');
+      if (color) {
+        boxRects(content, box).forEach(
+            rect => highlights.push(highlightParams(rect, color)));
+      }
+      content = insetRect(content, box);
+
+      // Padding
+      color = config.paddingColor;
+      box = styleBox(styles, 'padding');
+      if (color) {
+        boxRects(content, box).forEach(
+            rect => highlights.push(highlightParams(rect, color)));
+      }
+      content = insetRect(content, box);
+    }
+
+    // Content
+    color = config.contentColor;
+    if (color) {
+      let highlight = highlightParams(content, color);
+      if (transformed) {
+        highlight.outlineColor = {r: 255, g: 0, b: 255, a: 1.0};
+      }
+      highlights.push(highlight);
+    }
+  }
+  return JSON.stringify({highlightRects: highlights});
+}
+
+// Returns the inset width of the 4 sides of a box in the computed style, with
+// margin being negative to place it outside the rect.
+function styleBox(styles, boxName) {
+  let suffix = boxName == 'border' ? '-width' : '';
+  let sign = boxName == 'margin' ? -1 : 1;
+  let box = {};
+  ['top', 'right', 'bottom', 'left'].forEach(side => {
+    let width = styles.getPropertyValue(`${boxName}-${side}${suffix}`);
+    box[side] = sign * parseFloat(width) || 0;
+  });
+  return box;
+}
+
+function isNodeTransformed(node) {
+  while (node) {
+    if (window.getComputedStyle(node).transform !== 'none') return true;
+    node = node.offsetParent;
+  }
+  return false;
+}
+
+// Returns an array of non-overlapping rectangles for the box around the inside
+// of |rect|, but the rectangle for any box side with negative width will be
+// on the outside of |rect|.
+function boxRects(rect, box) {
+  // Start out assuming box widths are all positive.
+  let outerT = rect.y;
+  let outerB = rect.y + rect.height;
+  let outerL = rect.x;
+  let outerR = rect.x + rect.width;
+  let innerT = outerT + box.top;
+  let innerB = outerB - box.bottom;
+  let innerL = outerL + box.left;
+  let innerR =  outerR - box.right;
+
+  // Swap any inner/outer "inverted" by a negative box side.
+  if (outerT > innerT) [outerT, innerT] = [innerT, outerT];
+  if (outerB < innerB) [outerB, innerB] = [innerB, outerB];
+  if (outerL > innerL) [outerL, innerL] = [innerL, outerL];
+  if (outerR < innerR) [outerR, innerR] = [innerR, outerR];
+
+  // +--------------+
+  // |              |
+  // +--+--------+--+
+  // |  |        |  |
+  // |  |        |  |
+  // +--+--------+--+
+  // |              |
+  // +--------------+
+  return [
+    // top
+    { x: outerL,
+      y: outerT,
+      width: outerR - outerL,
+      height: innerT - outerT },
+    // bottom
+    { x: outerL,
+      y: innerB,
+      width: outerR - outerL,
+      height: outerB - innerB },
+    // left
+    { x: outerL,
+      y: innerT,
+      width: innerL - outerL,
+      height: innerB - innerT },
+    // right
+    { x: innerR,
+      y: innerT,
+      width: outerR - innerR,
+      height: innerB - innerT },
+  ];
+}
+
+// Returns the rectangle with the box around the insides removed.
+function insetRect(rect, box) {
+  return {
+    x: rect.x + box.left,
+    y: rect.y + box.top,
+    width: rect.width - box.left - box.right,
+    height: rect.height - box.top - box.bottom };
+}
+
+// Returns parameters matching the DevTools protocol "Overlay.highlightRect"
+// parameters, as expected by the native overlay agent.
+function highlightParams(rect, color) {
+  // Copy each property rather than whole objects to ensure this can be
+  // converted with JSON.serialize().
+  return {
+    x: rect.x,
+    y: rect.y,
+    width: rect.width,
+    height: rect.height,
+    color: {
+      r: color.r,
+      g: color.g,
+      b: color.b,
+      a: color.a,
+    },
+  };
+}
+
+// TODO: Pass debugBackend from C++ instead of getting it from the window.
+})(window.debugBackend);
diff --git a/src/cobalt/debug/backend/css_agent.cc b/src/cobalt/debug/backend/css_agent.cc
index 38914e5..1de669c 100644
--- a/src/cobalt/debug/backend/css_agent.cc
+++ b/src/cobalt/debug/backend/css_agent.cc
@@ -14,6 +14,7 @@
 
 #include "cobalt/debug/backend/css_agent.h"
 
+#include "cobalt/dom/document.h"
 #include "cobalt/dom/html_element.h"
 
 namespace cobalt {
@@ -65,7 +66,8 @@
   CSSStyleRuleSequence css_rules;
   auto html_element = element->AsHTMLElement();
   if (html_element) {
-    html_element->UpdateMatchingRules();
+    html_element->node_document()->UpdateComputedStyleOnElementAndAncestor(
+        html_element.get());
     for (const auto& matching_rule : *html_element->matching_rules()) {
       css_rules.push_back(matching_rule.first);
     }
diff --git a/src/cobalt/debug/backend/debug_module.cc b/src/cobalt/debug/backend/debug_module.cc
index 0a854a2..5434789 100644
--- a/src/cobalt/debug/backend/debug_module.cc
+++ b/src/cobalt/debug/backend/debug_module.cc
@@ -29,6 +29,7 @@
 constexpr char kLogAgent[] = "LogAgent";
 constexpr char kDomAgent[] = "DomAgent";
 constexpr char kCssAgent[] = "CssAgent";
+constexpr char kOverlayAgent[] = "OverlayAgent";
 constexpr char kPageAgent[] = "PageAgent";
 constexpr char kTracingAgent[] = "TracingAgent";
 
@@ -152,7 +153,7 @@
   std::unique_ptr<RenderLayer> page_render_layer(new RenderLayer(base::Bind(
       &RenderOverlay::SetOverlay, base::Unretained(data.render_overlay))));
 
-  std::unique_ptr<RenderLayer> dom_render_layer(new RenderLayer(
+  std::unique_ptr<RenderLayer> overlay_render_layer(new RenderLayer(
       base::Bind(&RenderLayer::SetBackLayer, page_render_layer->AsWeakPtr())));
 
   // Create the agents that implement the various devtools protocol domains by
@@ -167,9 +168,10 @@
   }
   console_agent_.reset(new ConsoleAgent(debug_dispatcher_.get(), data.console));
   log_agent_.reset(new LogAgent(debug_dispatcher_.get()));
-  dom_agent_.reset(
-      new DOMAgent(debug_dispatcher_.get(), std::move(dom_render_layer)));
+  dom_agent_.reset(new DOMAgent(debug_dispatcher_.get()));
   css_agent_ = WrapRefCounted(new CSSAgent(debug_dispatcher_.get()));
+  overlay_agent_.reset(new OverlayAgent(debug_dispatcher_.get(),
+                                        std::move(overlay_render_layer)));
   page_agent_.reset(new PageAgent(debug_dispatcher_.get(), data.window,
                                   std::move(page_render_layer),
                                   data.resource_provider));
@@ -201,6 +203,7 @@
   log_agent_->Thaw(RemoveAgentState(kLogAgent, agents_state));
   dom_agent_->Thaw(RemoveAgentState(kDomAgent, agents_state));
   css_agent_->Thaw(RemoveAgentState(kCssAgent, agents_state));
+  overlay_agent_->Thaw(RemoveAgentState(kOverlayAgent, agents_state));
   page_agent_->Thaw(RemoveAgentState(kPageAgent, agents_state));
   tracing_agent_->Thaw(RemoveAgentState(kTracingAgent, agents_state));
 
@@ -228,6 +231,7 @@
   StoreAgentState(agents_state, kLogAgent, log_agent_->Freeze());
   StoreAgentState(agents_state, kDomAgent, dom_agent_->Freeze());
   StoreAgentState(agents_state, kCssAgent, css_agent_->Freeze());
+  StoreAgentState(agents_state, kOverlayAgent, overlay_agent_->Freeze());
   StoreAgentState(agents_state, kPageAgent, page_agent_->Freeze());
   StoreAgentState(agents_state, kTracingAgent, tracing_agent_->Freeze());
 
diff --git a/src/cobalt/debug/backend/debug_module.h b/src/cobalt/debug/backend/debug_module.h
index 07bb642..11f34bd 100644
--- a/src/cobalt/debug/backend/debug_module.h
+++ b/src/cobalt/debug/backend/debug_module.h
@@ -29,6 +29,7 @@
 #include "cobalt/debug/backend/debugger_state.h"
 #include "cobalt/debug/backend/dom_agent.h"
 #include "cobalt/debug/backend/log_agent.h"
+#include "cobalt/debug/backend/overlay_agent.h"
 #include "cobalt/debug/backend/page_agent.h"
 #include "cobalt/debug/backend/render_overlay.h"
 #include "cobalt/debug/backend/runtime_agent.h"
@@ -146,6 +147,7 @@
   std::unique_ptr<LogAgent> log_agent_;
   std::unique_ptr<DOMAgent> dom_agent_;
   scoped_refptr<CSSAgent> css_agent_;
+  std::unique_ptr<OverlayAgent> overlay_agent_;
   std::unique_ptr<PageAgent> page_agent_;
   std::unique_ptr<RuntimeAgent> runtime_agent_;
   std::unique_ptr<ScriptDebuggerAgent> script_debugger_agent_;
diff --git a/src/cobalt/debug/backend/dom_agent.cc b/src/cobalt/debug/backend/dom_agent.cc
index 268e26b..f3d5322 100644
--- a/src/cobalt/debug/backend/dom_agent.cc
+++ b/src/cobalt/debug/backend/dom_agent.cc
@@ -14,16 +14,6 @@
 
 #include "cobalt/debug/backend/dom_agent.h"
 
-#include <memory>
-#include <string>
-
-#include "base/bind.h"
-#include "cobalt/math/matrix3_f.h"
-#include "cobalt/math/transform_2d.h"
-#include "cobalt/render_tree/brush.h"
-#include "cobalt/render_tree/color_rgba.h"
-#include "cobalt/render_tree/rect_node.h"
-
 namespace cobalt {
 namespace debug {
 namespace backend {
@@ -37,17 +27,13 @@
 constexpr char kScriptFile[] = "dom_agent.js";
 }  // namespace
 
-DOMAgent::DOMAgent(DebugDispatcher* dispatcher,
-                   std::unique_ptr<RenderLayer> render_layer)
+DOMAgent::DOMAgent(DebugDispatcher* dispatcher)
     : dispatcher_(dispatcher),
-      render_layer_(std::move(render_layer)),
       ALLOW_THIS_IN_INITIALIZER_LIST(commands_(this, kInspectorDomain)) {
   DCHECK(dispatcher_);
 
   commands_["disable"] = &DOMAgent::Disable;
   commands_["enable"] = &DOMAgent::Enable;
-  commands_["highlightNode"] = &DOMAgent::HighlightNode;
-  commands_["hideHighlight"] = &DOMAgent::HideHighlight;
 }
 
 void DOMAgent::Thaw(JSONObject agent_state) {
@@ -72,77 +58,6 @@
 
 void DOMAgent::Disable(const Command& command) { command.SendResponse(); }
 
-// Unlike most other DOM command handlers, this one is not fully implemented
-// in JavaScript. Instead, the JS object is used to look up the node from the
-// parameters and return its bounding client rect, then the highlight itself
-// is rendered by calling the C++ function |RenderHighlight| to set the render
-// overlay.
-void DOMAgent::HighlightNode(const Command& command) {
-  // Get the bounding rectangle of the specified node.
-  JSONObject json_dom_rect = dispatcher_->RunScriptCommand(
-      "dom._getBoundingClientRect", command.GetParams());
-  double x = 0.0;
-  double y = 0.0;
-  double width = 0.0;
-  double height = 0.0;
-  json_dom_rect->GetDouble("result.x", &x);
-  json_dom_rect->GetDouble("result.y", &y);
-  json_dom_rect->GetDouble("result.width", &width);
-  json_dom_rect->GetDouble("result.height", &height);
-
-  scoped_refptr<dom::DOMRect> dom_rect(
-      new dom::DOMRect(static_cast<float>(x), static_cast<float>(y),
-                       static_cast<float>(width), static_cast<float>(height)));
-
-  // |highlight_config_value| still owned by |params|.
-  JSONObject params = JSONParse(command.GetParams());
-  base::DictionaryValue* highlight_config_value = NULL;
-  bool got_highlight_config =
-      params->GetDictionary("highlightConfig", &highlight_config_value);
-  DCHECK(got_highlight_config);
-  DCHECK(highlight_config_value);
-
-  RenderHighlight(dom_rect, highlight_config_value);
-
-  command.SendResponse();
-}
-
-void DOMAgent::HideHighlight(const Command& command) {
-  render_layer_->SetFrontLayer(scoped_refptr<render_tree::Node>());
-  command.SendResponse();
-}
-
-void DOMAgent::RenderHighlight(
-    const scoped_refptr<dom::DOMRect>& bounding_rect,
-    const base::DictionaryValue* highlight_config_value) {
-  // TODO: Should also render borders, etc.
-
-  // Content color is optional in the parameters, so use a fallback.
-  int r = 112;
-  int g = 168;
-  int b = 219;
-  double a = 0.66;
-  const base::DictionaryValue* content_color = NULL;
-  bool got_content_color =
-      highlight_config_value->GetDictionary("contentColor", &content_color);
-  if (got_content_color && content_color) {
-    content_color->GetInteger("r", &r);
-    content_color->GetInteger("g", &g);
-    content_color->GetInteger("b", &b);
-    content_color->GetDouble("a", &a);
-  }
-  render_tree::ColorRGBA color(r / 255.0f, g / 255.0f, b / 255.0f,
-                               static_cast<float>(a));
-
-  std::unique_ptr<render_tree::Brush> background_brush(
-      new render_tree::SolidColorBrush(color));
-  scoped_refptr<render_tree::Node> rect = new render_tree::RectNode(
-      math::RectF(bounding_rect->x(), bounding_rect->y(),
-                  bounding_rect->width(), bounding_rect->height()),
-      std::move(background_brush));
-  render_layer_->SetFrontLayer(rect);
-}
-
 }  // namespace backend
 }  // namespace debug
 }  // namespace cobalt
diff --git a/src/cobalt/debug/backend/dom_agent.h b/src/cobalt/debug/backend/dom_agent.h
index b1d5339..6e59ffb 100644
--- a/src/cobalt/debug/backend/dom_agent.h
+++ b/src/cobalt/debug/backend/dom_agent.h
@@ -14,16 +14,10 @@
 #ifndef COBALT_DEBUG_BACKEND_DOM_AGENT_H_
 #define COBALT_DEBUG_BACKEND_DOM_AGENT_H_
 
-#include <memory>
-#include <string>
-
-#include "base/memory/weak_ptr.h"
 #include "cobalt/debug/backend/command_map.h"
 #include "cobalt/debug/backend/debug_dispatcher.h"
-#include "cobalt/debug/backend/render_layer.h"
 #include "cobalt/debug/command.h"
 #include "cobalt/debug/json_object.h"
-#include "cobalt/dom/dom_rect.h"
 
 namespace cobalt {
 namespace debug {
@@ -31,8 +25,7 @@
 
 class DOMAgent {
  public:
-  DOMAgent(DebugDispatcher* dispatcher,
-           std::unique_ptr<RenderLayer> render_layer);
+  explicit DOMAgent(DebugDispatcher* dispatcher);
 
   void Thaw(JSONObject agent_state);
   JSONObject Freeze();
@@ -41,22 +34,8 @@
   void Enable(const Command& command);
   void Disable(const Command& command);
 
-  // Highlights a specified node according to highlight parameters.
-  void HighlightNode(const Command& command);
-
-  // Hides the node highlighting.
-  void HideHighlight(const Command& command);
-
-  // Renders a highlight to the overlay.
-  void RenderHighlight(const scoped_refptr<dom::DOMRect>& bounding_rect,
-                       const base::DictionaryValue* highlight_config_value);
-
-  // Helper object to connect to the debug dispatcher, etc.
   DebugDispatcher* dispatcher_;
 
-  // Render layer owned by this object.
-  std::unique_ptr<RenderLayer> render_layer_;
-
   // Map of member functions implementing commands.
   CommandMap<DOMAgent> commands_;
 
diff --git a/src/cobalt/debug/backend/overlay_agent.cc b/src/cobalt/debug/backend/overlay_agent.cc
new file mode 100644
index 0000000..449675c
--- /dev/null
+++ b/src/cobalt/debug/backend/overlay_agent.cc
@@ -0,0 +1,162 @@
+// Copyright 2019 The Cobalt Authors. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "cobalt/debug/backend/overlay_agent.h"
+
+#include "cobalt/math/clamp.h"
+#include "cobalt/math/rect_f.h"
+#include "cobalt/render_tree/brush.h"
+#include "cobalt/render_tree/color_rgba.h"
+#include "cobalt/render_tree/composition_node.h"
+#include "cobalt/render_tree/rect_node.h"
+
+namespace cobalt {
+namespace debug {
+namespace backend {
+
+using base::Value;
+using math::Clamp;
+using render_tree::ColorRGBA;
+
+namespace {
+// Definitions from the set specified here:
+// https://chromedevtools.github.io/devtools-protocol/tot/Overlay
+constexpr char kInspectorDomain[] = "Overlay";
+
+// File to load JavaScript Overlay DevTools domain implementation from.
+constexpr char kScriptFile[] = "overlay_agent.js";
+
+// Returns the float value of a param, or 0.0 if undefined or non-numeric.
+float GetFloatParam(const Value* params, base::StringPiece key) {
+  if (!params || !params->is_dict()) return 0.0f;
+  const Value* v = params->FindKey(key);
+  if (!v || !(v->is_double() || v->is_int())) return 0.0f;
+  return static_cast<float>(v->GetDouble());
+}
+
+// Returns an RGBA color defined by a param, or transparent if undefined.
+ColorRGBA RenderColor(const Value* params) {
+  float r = GetFloatParam(params, "r") / 255.0f;
+  float g = GetFloatParam(params, "g") / 255.0f;
+  float b = GetFloatParam(params, "b") / 255.0f;
+  float a = GetFloatParam(params, "a");
+  return ColorRGBA(Clamp(r, 0.0f, 1.0f), Clamp(g, 0.0f, 1.0f),
+                   Clamp(b, 0.0f, 1.0f), Clamp(a, 0.0f, 1.0f));
+}
+
+// Returns a rectangle to render according to the params for the DevTools
+// "Overlay.highlightRect" command.
+// https://chromedevtools.github.io/devtools-protocol/tot/Overlay#method-highlightRect
+scoped_refptr<render_tree::RectNode> RenderHighlightRect(const Value* params) {
+  float x = GetFloatParam(params, "x");
+  float y = GetFloatParam(params, "y");
+  float width = GetFloatParam(params, "width");
+  float height = GetFloatParam(params, "height");
+  ColorRGBA color(RenderColor(params->FindKey("color")));
+  const Value* outline_param = params->FindKey("outlineColor");
+  ColorRGBA outline_color(RenderColor(outline_param));
+  float outline_width = outline_param ? 1.0f : 0.0f;
+  return base::MakeRefCounted<render_tree::RectNode>(
+      math::RectF(x, y, width, height),
+      std::make_unique<render_tree::SolidColorBrush>(color),
+      std::make_unique<render_tree::Border>(render_tree::BorderSide(
+          outline_width, render_tree::kBorderStyleSolid, outline_color)));
+}
+
+}  // namespace
+
+OverlayAgent::OverlayAgent(DebugDispatcher* dispatcher,
+                           std::unique_ptr<RenderLayer> render_layer)
+    : dispatcher_(dispatcher),
+      render_layer_(std::move(render_layer)),
+      ALLOW_THIS_IN_INITIALIZER_LIST(commands_(this, kInspectorDomain)) {
+  DCHECK(dispatcher_);
+  DCHECK(render_layer_);
+
+  commands_["disable"] = &OverlayAgent::Disable;
+  commands_["enable"] = &OverlayAgent::Enable;
+  commands_["highlightNode"] = &OverlayAgent::HighlightNode;
+  commands_["highlightRect"] = &OverlayAgent::HighlightRect;
+  commands_["hideHighlight"] = &OverlayAgent::HideHighlight;
+}
+
+void OverlayAgent::Thaw(JSONObject agent_state) {
+  dispatcher_->AddDomain(kInspectorDomain, commands_.Bind());
+  script_loaded_ = dispatcher_->RunScriptFile(kScriptFile);
+  DLOG_IF(ERROR, !script_loaded_) << "Failed to load " << kScriptFile;
+}
+
+JSONObject OverlayAgent::Freeze() {
+  dispatcher_->RemoveDomain(kInspectorDomain);
+  return JSONObject();
+}
+
+void OverlayAgent::Enable(const Command& command) {
+  if (script_loaded_) {
+    enabled_ = true;
+    command.SendResponse();
+  } else {
+    command.SendErrorResponse(Command::kInternalError,
+                              "Cannot create Overlay inspector.");
+  }
+}
+
+void OverlayAgent::Disable(const Command& command) {
+  enabled_ = false;
+  command.SendResponse();
+}
+
+void OverlayAgent::HighlightNode(const Command& command) {
+  if (!enabled_) {
+    command.SendErrorResponse(Command::kInvalidRequest,
+                              "Overlay inspector not enabled.");
+    return;
+  }
+  // Use the injected JavaScript helper to get the rectangles to highlight for
+  // the specified node.
+  JSONObject rects_response = dispatcher_->RunScriptCommand(
+      "Overlay._highlightNodeRects", command.GetParams());
+  const Value* highlight_rects =
+      rects_response->FindPath({"result", "highlightRects"});
+  if (!highlight_rects) {
+    command.SendErrorResponse(Command::kInvalidParams,
+                              "Can't get node highlights.");
+    return;
+  }
+
+  // Render all the highlight rects as children of a CompositionNode.
+  render_tree::CompositionNode::Builder builder;
+  for (const Value& rect_params : highlight_rects->GetList()) {
+    builder.AddChild(RenderHighlightRect(&rect_params));
+  }
+  render_layer_->SetFrontLayer(
+      base::MakeRefCounted<render_tree::CompositionNode>(builder));
+
+  command.SendResponse();
+}
+
+void OverlayAgent::HighlightRect(const Command& command) {
+  JSONObject params = JSONParse(command.GetParams());
+  render_layer_->SetFrontLayer(RenderHighlightRect(params.get()));
+  command.SendResponse();
+}
+
+void OverlayAgent::HideHighlight(const Command& command) {
+  render_layer_->SetFrontLayer(scoped_refptr<render_tree::Node>());
+  command.SendResponse();
+}
+
+}  // namespace backend
+}  // namespace debug
+}  // namespace cobalt
diff --git a/src/cobalt/debug/backend/overlay_agent.h b/src/cobalt/debug/backend/overlay_agent.h
new file mode 100644
index 0000000..4b7425b
--- /dev/null
+++ b/src/cobalt/debug/backend/overlay_agent.h
@@ -0,0 +1,61 @@
+// Copyright 2019 The Cobalt Authors. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+#ifndef COBALT_DEBUG_BACKEND_OVERLAY_AGENT_H_
+#define COBALT_DEBUG_BACKEND_OVERLAY_AGENT_H_
+
+#include "cobalt/debug/backend/command_map.h"
+#include "cobalt/debug/backend/debug_dispatcher.h"
+#include "cobalt/debug/backend/render_layer.h"
+#include "cobalt/debug/command.h"
+#include "cobalt/debug/json_object.h"
+
+namespace cobalt {
+namespace debug {
+namespace backend {
+
+class OverlayAgent {
+ public:
+  OverlayAgent(DebugDispatcher* dispatcher,
+               std::unique_ptr<RenderLayer> render_layer);
+
+  void Thaw(JSONObject agent_state);
+  JSONObject Freeze();
+
+ private:
+  void Enable(const Command& command);
+  void Disable(const Command& command);
+
+  void HighlightNode(const Command& command);
+  void HighlightRect(const Command& command);
+  void HideHighlight(const Command& command);
+
+  DebugDispatcher* dispatcher_;
+
+  // Render layer owned by this object.
+  std::unique_ptr<RenderLayer> render_layer_;
+
+  // Map of member functions implementing commands.
+  CommandMap<OverlayAgent> commands_;
+
+  // Whether we successfully loaded the agent's JavaScript implementation.
+  bool script_loaded_ = false;
+
+  bool enabled_ = false;
+};
+
+}  // namespace backend
+}  // namespace debug
+}  // namespace cobalt
+
+#endif  // COBALT_DEBUG_BACKEND_OVERLAY_AGENT_H_
diff --git a/src/cobalt/debug/console/content/console_manager.js b/src/cobalt/debug/console/content/console_manager.js
new file mode 100644
index 0000000..532e00e
--- /dev/null
+++ b/src/cobalt/debug/console/content/console_manager.js
@@ -0,0 +1,196 @@
+// Copyright 2019 The Cobalt Authors. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+var consoleManager = null;
+
+function start() {
+  window.consoleManager = new ConsoleManager();
+}
+
+window.addEventListener('load', start);
+
+function ConsoleManager() {
+  // Handles communication with the debugger.
+  this.debuggerClient = new DebuggerClient();
+  // Number of animation frame samples since the last update.
+  this.animationFrameSamples = 0;
+  // A list of all the possible interactive consoles.
+  // Each item contains the mode and the reference to the actual console.
+  this.consoleRegistry = null;
+
+  this.initializeConsoles();
+
+  document.addEventListener('keydown', this.handleKeydown.bind(this));
+  document.addEventListener('keyup', this.handleKeyup.bind(this));
+  document.addEventListener('keypress', this.handleKeypress.bind(this));
+  document.addEventListener('wheel', this.handleWheel.bind(this));
+  document.addEventListener('input', this.handleInput.bind(this));
+  if (typeof window.onScreenKeyboard != 'undefined'
+      && window.onScreenKeyboard) {
+    window.onScreenKeyboard.onInput = this.handleInput.bind(this);
+  }
+  window.requestAnimationFrame(this.animate.bind(this));
+}
+
+ConsoleManager.prototype.initializeConsoles = function() {
+  this.consoleRegistry = [
+    {
+      console: new DebugConsole(this.debuggerClient),
+      mode: 'debug',
+      bodyClass: 'debugConsole hud',
+    },
+    {
+      console: new MediaConsole(this.debuggerClient),
+      mode: 'media',
+      bodyClass: 'mediaConsole',
+    },
+  ];
+
+  this.hudConsole = this.consoleRegistry[0].console;
+
+  this.consoleRegistry.forEach( entry => {
+    let ensureConsolesAreValid = function(method) {
+      if(typeof entry.console[method] != "function") {
+        console.warn(`Console "${entry.mode}" ${method}() is not implemented. \
+            Providing default empty implementation.`);
+        // Provide a default not-implemented warning message.
+        let consoleName = entry.mode;
+        let notImplementedMessage = function() {
+          console.log(
+              `Console "${consoleName}" ${method}() is not implemented.`);
+        };
+        entry.console[method] = notImplementedMessage;
+      }
+    };
+    ensureConsolesAreValid("update");
+    ensureConsolesAreValid("setVisible");
+    ensureConsolesAreValid("onKeydown");
+    ensureConsolesAreValid("onKeyup");
+    ensureConsolesAreValid("onKeypress");
+    ensureConsolesAreValid("onInput");
+    ensureConsolesAreValid("onWheel");
+  });
+}
+
+ConsoleManager.prototype.update = function() {
+  let mode = window.debugHub.getDebugConsoleMode();
+
+  if (mode !== 'off') {
+    this.debuggerClient.attach();
+  }
+
+  let activeConsole = this.getActiveConsole();
+  let bodyClass = '';
+  if (mode == 'hud') {
+    bodyClass = 'hud';
+    // The HUD is owned by the debug console, but since it has its own mode
+    // dedicated to it, it needs to be specifically updated when it is visible.
+    // TODO: Factor out hudConsole into its own console.
+    this.hudConsole.updateHud();
+  } else if (mode != 'off' && mode != 'hud') {
+    bodyClass = activeConsole.bodyClass;
+  }
+  document.body.className = bodyClass;
+
+  this.consoleRegistry.forEach((entry) => {
+    entry.console.setVisible(entry == activeConsole);
+  });
+
+  if (mode !== 'off') {
+    if (activeConsole) { activeConsole.console.update(); }
+  }
+}
+
+// Animation callback: updates state and animated nodes.
+ConsoleManager.prototype.animate = function(time) {
+  const subsample = 8;
+  this.animationFrameSamples = (this.animationFrameSamples + 1) % subsample;
+  if (this.animationFrameSamples == 0) {
+    this.update();
+  }
+  window.requestAnimationFrame(this.animate.bind(this));
+}
+
+ConsoleManager.prototype.getActiveConsole = function() {
+  let mode = window.debugHub.getDebugConsoleMode();
+  return this.consoleRegistry.find( entry => entry.mode === mode );
+}
+
+ConsoleManager.prototype.handleKeydown =  function(event) {
+  // Map of 'Unidentified' additional Cobalt keyCodes to equivalent keys.
+  const unidentifiedCobaltKeyMap = {
+    // kSbKeyGamepad1
+    0x8000: 'Enter',
+    // kSbKeyGamepad2
+    0x8001: 'Esc',
+    // kSbKeyGamepad3
+    0x8002: 'Home',
+    // kSbKeyGamepad5
+    0x8008: 'Enter',
+    // kSbKeyGamepad6
+    0x8009: 'Enter',
+    // kSbKeyGamepadDPadUp
+    0x800C: 'ArrowUp',
+    // kSbKeyGamepadDPadDown
+    0x800D: 'ArrowDown',
+    // kSbKeyGamepadDPadLeft
+    0x800E: 'ArrowLeft',
+    // kSbKeyGamepadDPadRight
+    0x800F: 'ArrowRight',
+    // kSbKeyGamepadLeftStickUp
+    0x8011: 'ArrowUp',
+    // kSbKeyGamepadLeftStickDown
+    0x8012: 'ArrowDown',
+    // kSbKeyGamepadLeftStickLeft
+    0x8013: 'ArrowLeft',
+    // kSbKeyGamepadLeftStickRight
+    0x8014: 'ArrowRight',
+    // kSbKeyGamepadRightStickUp
+    0x8015: 'ArrowUp',
+    // kSbKeyGamepadRightStickDown
+    0x8016: 'ArrowDown',
+    // kSbKeyGamepadRightStickLeft
+    0x8017: 'ArrowLeft',
+    // kSbKeyGamepadRightStickRight
+    0x8018: 'ArrowRight'
+  };
+
+  let key = event.key;
+  if (key == 'Unidentified') {
+    key = unidentifiedCobaltKeyMap[event.keyCode] || 'Unidentified';
+  }
+
+  let active = this.getActiveConsole();
+  if (active) { active.console.onKeydown(event); }
+}
+
+ConsoleManager.prototype.handleKeyup = function(event) {
+  let active = this.getActiveConsole();
+  if (active) { active.console.onKeyup(event); }
+}
+
+ConsoleManager.prototype.handleKeypress = function(event) {
+  let active = this.getActiveConsole();
+  if (active) { active.console.onKeypress(event); }
+}
+
+ConsoleManager.prototype.handleInput = function(event) {
+  let active = this.getActiveConsole();
+  if (active) { active.console.onInput(event); }
+}
+
+ConsoleManager.prototype.handleWheel = function(event) {
+  let active = this.getActiveConsole();
+  if (active) { active.console.onWheel(event); }
+}
diff --git a/src/cobalt/debug/console/content/debug_commands.js b/src/cobalt/debug/console/content/debug_commands.js
index e91754d..8c4456b 100644
--- a/src/cobalt/debug/console/content/debug_commands.js
+++ b/src/cobalt/debug/console/content/debug_commands.js
@@ -13,114 +13,117 @@
 // limitations under the License.
 
 function initDebugCommands() {
-  debug = new Object();
-  d = debug;
+  let debugCommands = {};
 
-  debug.cvalList = function() {
+  debugCommands.cvalList = function() {
     var result = consoleValues.listAll();
-    printToMessageLog(messageLog.INTERACTIVE, result);
+    printToMessageLog(MessageLog.INTERACTIVE, result);
   }
-  debug.cvalList.shortHelp = 'List all registered console values.';
-  debug.cvalList.longHelp =
+  debugCommands.cvalList.shortHelp = 'List all registered console values.';
+  debugCommands.cvalList.longHelp =
       'List all registered console values that can be displayed.\n' +
       'You can change what subset is displayed in the HUD using ' +
       'the cvalAdd and cvalRemove debug methods.';
 
-  debug.cvalAdd = function(substringToMatch) {
+  debugCommands.cvalAdd = function(substringToMatch) {
     var result = consoleValues.addActive(substringToMatch);
-    printToMessageLog(messageLog.INTERACTIVE, result);
+    printToMessageLog(MessageLog.INTERACTIVE, result);
     // After each change, save the active set with the default key.
     this.cvalSave();
   }
-  debug.cvalAdd.shortHelp = 'Adds one or more consoles value to the HUD.';
-  debug.cvalAdd.longHelp =
+  debugCommands.cvalAdd.shortHelp =
+      'Adds one or more consoles value to the HUD.';
+  debugCommands.cvalAdd.longHelp =
       'Adds any of the registered consolve values (displayed with cvalList) ' +
       'to the HUD whose name matches one of the specified space-separated '
       'prefixes.';
 
-  debug.cvalRemove = function(substringToMatch) {
+  debugCommands.cvalRemove = function(substringToMatch) {
     var result = consoleValues.removeActive(substringToMatch);
-    printToMessageLog(messageLog.INTERACTIVE, result);
+    printToMessageLog(MessageLog.INTERACTIVE, result);
     // After each change, save the active set with the default key.
     this.cvalSave();
   }
-  debug.cvalRemove.shortHelp =
+  debugCommands.cvalRemove.shortHelp =
       'Removes one or more consoles value from the HUD.';
-  debug.cvalRemove.longHelp =
+  debugCommands.cvalRemove.longHelp =
       'Removes any of the consolve values displayed in the HUD ' +
       'whose name matches one of the specified space-separated prefixes.';
 
-  debug.cvalSave = function(key) {
+  debugCommands.cvalSave = function(key) {
     var result = consoleValues.saveActiveSet(key);
-    printToMessageLog(messageLog.INTERACTIVE, result);
+    printToMessageLog(MessageLog.INTERACTIVE, result);
   }
-  debug.cvalSave.shortHelp =
+  debugCommands.cvalSave.shortHelp =
       'Saves the current set of console values displayed in the HUD.';
-  debug.cvalSave.longHelp =
+  debugCommands.cvalSave.longHelp =
       'Saves the set of console values currently displayed in the HUD ' +
       'to web local storage using the specified key. Saved display sets can ' +
       'be reloaded later using the cvalLoad debug method and the same key.\n' +
       'If no key is specified, uses a default value.';
 
-  debug.cvalLoad = function(key) {
+  debugCommands.cvalLoad = function(key) {
     var result = consoleValues.loadActiveSet(key);
-    printToMessageLog(messageLog.INTERACTIVE, result);
+    printToMessageLog(MessageLog.INTERACTIVE, result);
   }
-  debug.cvalLoad.shortHelp =
+  debugCommands.cvalLoad.shortHelp =
       'Loads a previously stored set of console values displayed in the HUD.';
-  debug.cvalLoad.longHelp =
+  debugCommands.cvalLoad.longHelp =
       'Loads the set of console values currently displayed in the HUD ' +
       'from a set previously saved in web local storage using the cvalSave ' +
       'debug method and the same key.\n' +
       'If no key is specified, uses a default value.';
 
-  debug.history = history;
-  debug.history.shortHelp = 'Display command history.';
-  debug.history.longHelp =
+  debugCommands.history = history;
+  debugCommands.history.shortHelp = 'Display command history.';
+  debugCommands.history.longHelp =
       'Display a list of all previously executed commands with an '+
       'index. You can re-execute any of the commands from the ' +
       'history by typing "!" followed by the index of that command.'
 
-  debug.help = help;
-  debug.help.shortHelp = 'Display this message, or detail for a specific command.';
-  debug.help.longHelp =
+  debugCommands.help = help.bind(this, debugCommands);
+  debugCommands.help.shortHelp =
+      'Display this message, or detail for a specific command.';
+  debugCommands.help.longHelp =
       'With no arguments, displays a summary of all commands. If the name of ' +
       'a command is specified, displays additional details about that command.';
 
-  debug.dir = dir;
-  debug.dir.shortHelp =
+  debugCommands.dir = dir;
+  debugCommands.dir.shortHelp =
       'Lists the properties of an object in the main web module.';
-  debug.dir.longHelp =
+  debugCommands.dir.longHelp =
       'Lists the properties of the specified object in the main web module. ' +
       'Remember to enclose the name of the object in quotes.';
 
-  debug.debugger = function() {
+  debugCommands.debugger = function() {
     return debuggerClient;
   }
-  debug.debugger.shortHelp =
+  debugCommands.debugger.shortHelp =
       'Get the debugger client';
-  debug.debugger.longHelp =
+  debugCommands.debugger.longHelp =
       'Get the debugger client. The debugger client can be used to issue ' +
       'JavaScript debugging commands to the main web module.';
 
-  addConsoleCommands();
+  addConsoleCommands(debugCommands);
+
+  return debugCommands;
 }
 
-function help(command) {
+function help(debugCommands, command) {
   var helpString = '';
   if (command) {
     // Detailed help on a specific command.
-    if (debug[command]) {
-      helpString = debug[command].longHelp;
+    if (debugCommands[command]) {
+      helpString = debugCommands[command].longHelp;
     } else {
       helpString = 'Command "' + command + '" not found.';
     }
   } else {
     // Summary help for all commands.
     helpString = 'Cobalt Debug Console commands:\n\n';
-    for (cmd in debug) {
+    for (cmd in debugCommands) {
       helpString += 'debug.' + cmd + '() - ';
-      helpString += debug[cmd].shortHelp + '\n';
+      helpString += debugCommands[cmd].shortHelp + '\n';
     }
     helpString +=
         '\nYou are entering JavaScript, so remember to use parentheses, ' +
@@ -129,13 +132,13 @@
         'All other text will be executed as JavaScript in the main web ' +
         'module.\n';
   }
-  printToMessageLog(messageLog.INTERACTIVE, helpString);
+  printToMessageLog(MessageLog.INTERACTIVE, helpString);
 }
 
 function history() {
   var history = commandInput.getHistory();
   for (var i = 0; i < history.length; i += 1) {
-    printToMessageLog(messageLog.INTERACTIVE, i + ' ' + history[i]);
+    printToMessageLog(MessageLog.INTERACTIVE, i + ' ' + history[i]);
   }
 }
 
@@ -148,18 +151,18 @@
   executeMain(js);
 }
 
-function addConsoleCommands() {
+function addConsoleCommands(debugCommands) {
   var consoleCommands = window.debugHub.consoleCommands;
   for (var i = 0; i < consoleCommands.length; i++) {
     var c = consoleCommands[i];
-    addOneConsoleCommand(c.command, c.shortHelp, c.longHelp);
+    addOneConsoleCommand(debugCommands, c.command, c.shortHelp, c.longHelp);
   }
 }
 
-function addOneConsoleCommand(command, shortHelp, longHelp) {
-  debug[command] = function(message) {
+function addOneConsoleCommand(debugCommands, command, shortHelp, longHelp) {
+  debugCommands[command] = function(message) {
     window.debugHub.sendConsoleCommand(command, message);
   }
-  debug[command].shortHelp = shortHelp;
-  debug[command].longHelp = longHelp;
+  debugCommands[command].shortHelp = shortHelp;
+  debugCommands[command].longHelp = longHelp;
 }
diff --git a/src/cobalt/debug/console/content/debug_console.css b/src/cobalt/debug/console/content/debug_console.css
index d0ccc4d..f087f24 100644
--- a/src/cobalt/debug/console/content/debug_console.css
+++ b/src/cobalt/debug/console/content/debug_console.css
@@ -12,6 +12,9 @@
   color: #FFFFFF;
   display: none;
 }
+body.hud #hudFrame {
+  display: block;
+}
 
 #hud {
   position: absolute;
@@ -27,7 +30,7 @@
   padding: 0.625em;
 }
 
-#consoleFrame {
+#debugConsoleFrame {
   position: absolute;
   top: 0;
   left: 0;
@@ -38,6 +41,9 @@
   overflow: hidden;
   display: none;
 }
+body.debugConsole #debugConsoleFrame {
+  display: block;
+}
 
 #messageContainerFrame {
   position: absolute;
@@ -122,3 +128,33 @@
   border-style: solid;
   padding: 0.625em;
 }
+
+#mediaConsoleFrame {
+  position: absolute;
+  top: 75%;
+  left: 0;
+  bottom: 0;
+  right: 0;
+  width: 100%;
+  background-color: rgba(128, 128, 128, 0.6);
+  color: #FFFFFF;
+  display: none;
+}
+body.mediaConsole #mediaConsoleFrame {
+  display: block;
+}
+
+#mediaConsole {
+  position: absolute;
+  top: 0.625em;
+  left: 0.625em;
+  bottom: 0.625em;
+  right: 0.625em;
+  background-color: rgba(0, 0, 0, 0.6);
+  color: #FFFFFF;
+  border: 0.0625em;
+  border-color: #606060;
+  border-style: solid;
+  padding: 0.625em;
+  white-space: pre;
+}
diff --git a/src/cobalt/debug/console/content/debug_console.html b/src/cobalt/debug/console/content/debug_console.html
index a60c6e4..baed2fd 100644
--- a/src/cobalt/debug/console/content/debug_console.html
+++ b/src/cobalt/debug/console/content/debug_console.html
@@ -5,23 +5,28 @@
   <link rel="stylesheet" type="text/css" href="debug_console.css">
 </head>
 
+<script type="text/javascript" src="console_manager.js"></script>
 <script type="text/javascript" src="debug_console.js"></script>
 <script type="text/javascript" src="console_values.js"></script>
 <script type="text/javascript" src="message_log.js"></script>
 <script type="text/javascript" src="command_input.js"></script>
 <script type="text/javascript" src="debug_commands.js"></script>
 <script type="text/javascript" src="debugger_client.js"></script>
+<script type="text/javascript" src="media_console.js"></script>
 
 <body>
   <div id="hudFrame">
     <div id="hud"></div>
   </div>
-  <div id="consoleFrame">
+  <div id="debugConsoleFrame">
     <div id="messageContainerFrame">
-      <div id = "messageContainer"></div>
+      <div id="messageContainer"></div>
     </div>
     <div id="in">> _</div>
   </div>
+  <div id="mediaConsoleFrame">
+    <div id="mediaConsole"></div>
+  </div>
 </body>
 
 </html>
diff --git a/src/cobalt/debug/console/content/debug_console.js b/src/cobalt/debug/console/content/debug_console.js
index e1028d5..088b744 100644
--- a/src/cobalt/debug/console/content/debug_console.js
+++ b/src/cobalt/debug/console/content/debug_console.js
@@ -12,171 +12,78 @@
 // See the License for the specific language governing permissions and
 // limitations under the License.
 
-
-// Text the user has typed since last hitting Enter
-var inputText = '';
-// The DOM node used as a container for message nodes.
-var messageLog = null;
-// Stores and manipulates the set of console values.
-var consoleValues = null;
-// Handles command input, editing, history traversal, etc.
-var commandInput = null;
-// Object to store methods to be executed in the debug console.
-var debug = null;
-// Shorthand reference for the debug object.
-var d = null;
-// Handles communication with the debugger.
-var debuggerClient = null;
-// Number of animation frame samples since the last update.
-var animationFrameSamples = 0;
-
-// Map of 'Unidentified' additional Cobalt keyCodes to equivalent keys.
-var unidentifiedCobaltKeyMap = {
-  // kSbKeyGamepad1
-  0x8000: 'Enter',
-  // kSbKeyGamepad2
-  0x8001: 'Esc',
-  // kSbKeyGamepad3
-  0x8002: 'Home',
-  // kSbKeyGamepad5
-  0x8008: 'Enter',
-  // kSbKeyGamepad6
-  0x8009: 'Enter',
-  // kSbKeyGamepadDPadUp
-  0x800C: 'ArrowUp',
-  // kSbKeyGamepadDPadDown
-  0x800D: 'ArrowDown',
-  // kSbKeyGamepadDPadLeft
-  0x800E: 'ArrowLeft',
-  // kSbKeyGamepadDPadRight
-  0x800F: 'ArrowRight',
-  // kSbKeyGamepadLeftStickUp
-  0x8011: 'ArrowUp',
-  // kSbKeyGamepadLeftStickDown
-  0x8012: 'ArrowDown',
-  // kSbKeyGamepadLeftStickLeft
-  0x8013: 'ArrowLeft',
-  // kSbKeyGamepadLeftStickRight
-  0x8014: 'ArrowRight',
-  // kSbKeyGamepadRightStickUp
-  0x8015: 'ArrowUp',
-  // kSbKeyGamepadRightStickDown
-  0x8016: 'ArrowDown',
-  // kSbKeyGamepadRightStickLeft
-  0x8017: 'ArrowLeft',
-  // kSbKeyGamepadRightStickRight
-  0x8018: 'ArrowRight'
-};
-
-function createMessageLog() {
-  var messageContainer = document.getElementById('messageContainer');
-  messageLog = new MessageLog(messageContainer);
-}
-
-function createCommandInput() {
-  var inputElem = document.getElementById('in');
-  this.commandInput = new CommandInput(inputElem);
-}
-
-function createConsoleValues() {
-  // Create the console values and attempt to load the active set using the
-  // default key. If this fails, it will leave the active set equal to the
-  // all registered CVals.
-  consoleValues = new ConsoleValues();
-  var loadResult = consoleValues.loadActiveSet();
-  printToMessageLog(messageLog.INTERACTIVE, loadResult);
-}
-
-function createDebuggerClient() {
-  debuggerClient = new DebuggerClient();
-}
-
-function showBlockElem(elem, doShow) {
-  if (elem) {
-    var display = doShow ? 'block' : 'none';
-    if (elem.style.display != display) {
-      elem.style.display = display;
-    }
+// Global for other modules to use.
+function printToMessageLog(severity, message) {
+  if (window.debugConsoleInstance) {
+    window.debugConsoleInstance.printToMessageLog(severity, message);
   }
 }
 
-function showConsole(doShow) {
-  showBlockElem(document.getElementById('consoleFrame'), doShow);
-  messageLog.setVisible(doShow);
+function DebugConsole(debuggerClient) {
+  window.debugConsoleInstance = this;
+
+  this.debuggerClient = debuggerClient;
+  // Text the user has typed since last hitting Enter
+  this.inputText = '';
+
+  this.commandInput = new CommandInput(document.getElementById('in'));
+  this.messageLog = new MessageLog(document.getElementById('messageContainer'));
+
+  this.consoleValues = new ConsoleValues();
+  let loadResult = this.consoleValues.loadActiveSet();
+  this.printToMessageLog(MessageLog.INTERACTIVE, loadResult);
+
+  this.debugCommands = initDebugCommands();
 }
 
-function isConsoleVisible() {
-  var mode = window.debugHub.getDebugConsoleMode();
-  return mode >= window.debugHub.DEBUG_CONSOLE_ON;
+DebugConsole.prototype.printToMessageLog = function(severity, message) {
+  this.messageLog.addMessage(severity, message);
 }
 
-function printToMessageLog(severity, message) {
-  messageLog.addMessage(severity, message);
-}
-
-function showHud(doShow) {
-  showBlockElem(document.getElementById('hudFrame'), doShow);
-}
-
-function printToHud(message) {
-  var elem = document.getElementById('hud');
+DebugConsole.prototype.printToHud = function(message) {
+  let elem = document.getElementById('hud');
   elem.textContent = message;
 }
 
-function updateHud(time) {
-  var mode = window.debugHub.getDebugConsoleMode();
-  if (mode >= window.debugHub.DEBUG_CONSOLE_HUD) {
-    consoleValues.update();
-    var cvalString = consoleValues.toString();
-    printToHud(cvalString);
-  }
+DebugConsole.prototype.updateHud = function() {
+  let mode = window.debugHub.getDebugConsoleMode();
+  this.consoleValues.update();
+  let cvalString = this.consoleValues.toString();
+  this.printToHud(cvalString);
 }
 
-function updateMode() {
-  var mode = window.debugHub.getDebugConsoleMode();
-  showConsole(mode >= window.debugHub.DEBUG_CONSOLE_ON);
-  showHud(mode >= window.debugHub.DEBUG_CONSOLE_HUD);
+DebugConsole.prototype.setVisible = function(visible) {
+  this.messageLog.setVisible(visible);
 }
 
-// Animation callback: updates state and animated nodes.
-function animate(time) {
-  var subsample = 8;
-  animationFrameSamples = (animationFrameSamples + 1) % subsample;
-  if (animationFrameSamples == 0) {
-    updateMode();
-    updateHud(time);
-    if (isConsoleVisible()) {
-      commandInput.animateBlink();
-      // This will do nothing if debugger is already attached.
-      debuggerClient.attach();
-    }
-  }
-  window.requestAnimationFrame(animate);
+DebugConsole.prototype.update = function() {
+  this.commandInput.animateBlink();
+  this.updateHud();
 }
 
 // Executes a command from the history buffer.
 // Index should be an integer (positive is an absolute index, negative is a
 // number of commands back from the current) or '!' to execute the last command.
-function executeCommandFromHistory(idx) {
+DebugConsole.prototype.executeCommandFromHistory = function(idx) {
   if (idx == '!') {
     idx = -1;
   }
   idx = parseInt(idx);
-  commandInput.setCurrentCommandFromHistory(idx);
-  executeCurrentCommand();
+  this.commandInput.setCurrentCommandFromHistory(idx);
+  this.executeCurrentCommand();
 }
 
 // Special commands that are executed immediately, not as JavaScript,
 // e.g. !N to execute the Nth command in the history buffer.
 // Returns true if the command is processed here, false otherwise.
-function executeImmediate(command) {
+DebugConsole.prototype.executeImmediate = function(command) {
   if (command[0] == '!') {
-    executeCommandFromHistory(command.substring(1));
+    this.executeCommandFromHistory(command.substring(1));
     return true;
   } else if (command.trim() == 'help') {
     // Treat 'help' as a special case for users not expecting JS execution.
     help();
-    commandInput.clearCurrentCommand();
+    this.commandInput.clearCurrentCommand();
     return true;
   }
   return false;
@@ -185,9 +92,11 @@
 // JavaScript commands executed in this (debug console) web module.
 // The only commands we execute here are methods of the debug object
 // (or its shorthand equivalent).
-function executeDebug(command) {
+DebugConsole.prototype.executeDebug = function(command) {
   if (command.trim().indexOf('debug.') == 0 ||
       command.trim().indexOf('d.') == 0) {
+    let debug = this.debugCommands;
+    let d = this.debugCommands;
     eval(command);
     return true;
   }
@@ -197,8 +106,9 @@
 // Execute a command as JavaScript in the main web module.
 // Use the debugger evaluate command, which gives us Command Line API access
 // and rich results with object preview.
-function executeMain(command) {
-  debuggerClient.evaluate(command);
+DebugConsole.prototype.executeMain = function(command) {
+  let callback = this.printToLogCallback.bind(this);
+  this.debuggerClient.evaluate(command, callback);
 }
 
 // Executes a command entered by the user.
@@ -208,118 +118,112 @@
 // 3. If no matching command is found, pass to the Cobalt DebugHub.
 //    DebugHub will execute any commands recognized on the C++ side,
 //    or pass to the main web module to be executed as JavaScript.
-function executeCommand(command) {
-  if (executeImmediate(command)) {
-    printToMessageLog(messageLog.INTERACTIVE, '');
+DebugConsole.prototype.executeCommand = function(command) {
+  if (this.executeImmediate(command)) {
+    this.printToMessageLog(MessageLog.INTERACTIVE, '');
     return;
   }
-  commandInput.storeAndClearCurrentCommand();
-  if (executeDebug(command)) {
-    printToMessageLog(messageLog.INTERACTIVE, '');
+  this.commandInput.storeAndClearCurrentCommand();
+  if (this.executeDebug(command)) {
+    this.printToMessageLog(MessageLog.INTERACTIVE, '');
     return;
   }
-  executeMain(command);
+  this.executeMain(command);
 }
 
 // Executes the current command in the CommandInput object.
 // Typically called when the user hits Enter.
-function executeCurrentCommand() {
-  var command = commandInput.getCurrentCommand();
-  printToMessageLog(messageLog.INTERACTIVE, '> ' + command);
-  executeCommand(command);
+DebugConsole.prototype.executeCurrentCommand = function() {
+  let command = this.commandInput.getCurrentCommand();
+  this.printToMessageLog(MessageLog.INTERACTIVE, '> ' + command);
+  this.executeCommand(command);
 }
 
-function onWheel(event) {
+DebugConsole.prototype.onWheel = function(event) {
   if (event.deltaY > 0) {
-    messageLog.scrollDown(event.deltaY);
+    this.messageLog.scrollDown(event.deltaY);
   } else if (event.deltaY < 0) {
-    messageLog.scrollUp(-event.deltaY);
+    this.messageLog.scrollUp(-event.deltaY);
   }
 }
 
-function onKeydown(event) {
-  var key = event.key;
-  if (key == 'Unidentified') {
-    key = unidentifiedCobaltKeyMap[event.keyCode] || 'Unidentified';
-  }
+DebugConsole.prototype.onKeydown = function(event) {
+  let key = event.key;
 
   if (key == 'ArrowLeft') {
-    commandInput.moveCursor(-1);
+    this.commandInput.moveCursor(-1);
   } else if (key == 'ArrowRight') {
-    commandInput.moveCursor(1);
+    this.commandInput.moveCursor(1);
   } else if (key == 'ArrowUp') {
-    commandInput.back();
+    this.commandInput.back();
   } else  if (key == 'ArrowDown') {
-    commandInput.forward();
+    this.commandInput.forward();
   } else if (key == 'Backspace') {
-    commandInput.deleteCharBehindCursor();
+    this.commandInput.deleteCharBehindCursor();
   } else if (key == 'Enter') {
-    executeCurrentCommand();
+    this.executeCurrentCommand();
   } else if (key == 'PageUp') {
-    messageLog.pageUp();
+    this.messageLog.pageUp();
   } else if (key == 'PageDown') {
-    messageLog.pageDown();
+    this.messageLog.pageDown();
   } else if (key == 'Delete') {
-    commandInput.deleteCharAtCursor();
+    this.commandInput.deleteCharAtCursor();
   } else if (key == 'Home') {
     if (event.ctrlKey) {
-      messageLog.toHead();
+      this.messageLog.toHead();
     } else {
-      commandInput.moveCursor(-1000);
+      this.commandInput.moveCursor(-1000);
     }
   } else if (key == 'End') {
     if (event.ctrlKey) {
-      messageLog.toTail();
+      this.messageLog.toTail();
     } else {
-      commandInput.moveCursor(1000);
+      this.commandInput.moveCursor(1000);
     }
   }
 }
 
-function onKeyup(event) {}
+DebugConsole.prototype.onKeyup = function(event) {}
 
-function onKeypress(event) {
-  var mode = window.debugHub.getDebugConsoleMode();
-  if (mode >= window.debugHub.DEBUG_CONSOLE_ON) {
-    event.preventDefault();
-    event.stopPropagation();
-    var c = event.charCode;
-    // If we have a printable character, insert it; otherwise ignore.
-    if (c >= 0x20 && c <= 0x7e) {
-      commandInput.insertStringBehindCursor(String.fromCharCode(c));
-    }
+DebugConsole.prototype.onKeypress = function(event) {
+  event.preventDefault();
+  event.stopPropagation();
+  let c = event.charCode;
+  // If we have a printable character, insert it; otherwise ignore.
+  if (c >= 0x20 && c <= 0x7e) {
+    this.commandInput.insertStringBehindCursor(String.fromCharCode(c));
   }
 }
 
-function onInput(event) {
+DebugConsole.prototype.onInput = function(event) {
   console.log('In DebugConsole onInput, event.data ' + event.data);
-  var mode = window.debugHub.getDebugConsoleMode();
-  if (mode >= window.debugHub.DEBUG_CONSOLE_ON && event.data) {
+  if (event.data) {
     event.preventDefault();
     event.stopPropagation();
-    commandInput.insertStringBehindCursor(event.data);
+    this.commandInput.insertStringBehindCursor(event.data);
   }
 }
 
-function start() {
-  createCommandInput();
-  createMessageLog();
-  createDebuggerClient();
-  showHud(false);
-  showConsole(false);
-  createConsoleValues();
-  initDebugCommands();
-  document.addEventListener('wheel', onWheel);
-  document.addEventListener('keypress', onKeypress);
-  document.addEventListener('keydown', onKeydown);
-  document.addEventListener('keyup', onKeyup);
-  if (typeof window.onScreenKeyboard != 'undefined'
-      && window.onScreenKeyboard) {
-    window.onScreenKeyboard.oninput = onInput;
+DebugConsole.prototype.printToLogCallback = function(result) {
+  if (result.wasThrown) {
+    this.printToMessageLog(MessageLog.ERROR,
+                      'Uncaught ' + result.result.description);
+  } else if (result.result.preview) {
+    this.printToMessageLog(MessageLog.INFO, result.result.preview.description);
+    if (result.result.preview.properties) {
+      for (let i = 0; i < result.result.preview.properties.length; ++i) {
+        let property = result.result.preview.properties[i];
+        this.printToMessageLog(MessageLog.INFO,
+                          '  ' + property.name + ': ' + property.value);
+      }
+    }
+    if (result.result.preview.overflow) {
+      this.printToMessageLog(MessageLog.INFO, '  ...');
+    }
+  } else if (result.result.description) {
+    this.printToMessageLog(MessageLog.INFO, result.result.description);
+  } else if (result.result.value) {
+    this.printToMessageLog(MessageLog.INFO, result.result.value.toString());
   }
-  curr = window.performance.now();
-  window.requestAnimationFrame(animate);
+  this.printToMessageLog(MessageLog.INFO, '');
 }
-
-window.addEventListener('load', start);
-
diff --git a/src/cobalt/debug/console/content/debugger_client.js b/src/cobalt/debug/console/content/debugger_client.js
index cc2b168..2ceb3e0 100644
--- a/src/cobalt/debug/console/content/debugger_client.js
+++ b/src/cobalt/debug/console/content/debugger_client.js
@@ -28,7 +28,7 @@
 DebuggerClient.prototype.attach = function() {
   if (this.attachState == this.DEBUGGER_DETACHED) {
     this.attachState = this.DEBUGGER_ATTACHING;
-    printToMessageLog(messageLog.INTERACTIVE,
+    printToMessageLog(MessageLog.INTERACTIVE,
                       'Attempting to attach to debugger...');
     this.scripts = [];
     debugHub.onEvent.addListener(this.onEventCallback);
@@ -37,7 +37,7 @@
     this.sendCommand('Log.enable');
     this.sendCommand('Runtime.enable');
   } else if (this.attachState == this.DEBUGGER_ATTACHING) {
-    printToMessageLog(messageLog.INTERACTIVE,
+    printToMessageLog(MessageLog.INTERACTIVE,
                       'Still attempting to attach to debugger...');
   }
 }
@@ -49,7 +49,7 @@
   for (var i in this.scripts) {
     var index = this.pad(i, 3);
     var scriptUrl = this.scripts[i].url;
-    printToMessageLog(messageLog.INTERACTIVE, index + ': ' + scriptUrl);
+    printToMessageLog(MessageLog.INTERACTIVE, index + ': ' + scriptUrl);
   }
 }
 
@@ -74,12 +74,11 @@
   var lines = scriptSource.split('\n');
   for (var i = 0; i < lines.length; i++) {
     var index = this.pad(i + 1, 4);
-    printToMessageLog(messageLog.INFO, index + ': ' + lines[i]);
+    printToMessageLog(MessageLog.INFO, index + ': ' + lines[i]);
   }
 }
 
-DebuggerClient.prototype.evaluate = function(expression) {
-  var callback = this.evaluateCallback.bind(this);
+DebuggerClient.prototype.evaluate = function(expression, callback) {
   var method = 'Runtime.evaluate';
   var params = {};
   params.contextId = this.executionContext;
@@ -91,30 +90,6 @@
   this.sendCommand(method, params, callback);
 }
 
-DebuggerClient.prototype.evaluateCallback = function(result) {
-  if (result.wasThrown) {
-    printToMessageLog(messageLog.ERROR,
-                      'Uncaught ' + result.result.description);
-  } else if (result.result.preview) {
-    printToMessageLog(messageLog.INFO, result.result.preview.description);
-    if (result.result.preview.properties) {
-      for (var i = 0; i < result.result.preview.properties.length; ++i) {
-        var property = result.result.preview.properties[i];
-        printToMessageLog(messageLog.INFO,
-                          '  ' + property.name + ': ' + property.value);
-      }
-    }
-    if (result.result.preview.overflow) {
-      printToMessageLog(messageLog.INFO, '  ...');
-    }
-  } else if (result.result.description) {
-    printToMessageLog(messageLog.INFO, result.result.description);
-  } else if (result.result.value) {
-    printToMessageLog(messageLog.INFO, result.result.value.toString());
-  }
-  printToMessageLog(messageLog.INFO, '');
-}
-
 // All debugger commands are routed through this method. Converts the command
 // parameters into a JSON string to pass to the debug dispatcher.
 DebuggerClient.prototype.sendCommand = function(method, commandParams,
@@ -133,7 +108,7 @@
 
   if (response && response.error) {
     printToMessageLog(
-        messageLog.ERROR,
+        MessageLog.ERROR,
         '[ERROR(' + response.error.code + '):' + method + '] ' +
             response.error.message);
   } else if (callback) {
@@ -149,10 +124,10 @@
 
 DebuggerClient.prototype.onAttach = function() {
   if (debugHub.lastError) {
-    printToMessageLog(messageLog.WARNING, 'Could not attach to debugger.');
+    printToMessageLog(MessageLog.WARNING, 'Could not attach to debugger.');
     this.attachState = this.DEBUGGER_DETACHED;
   } else {
-    printToMessageLog(messageLog.INTERACTIVE, 'Debugger attached.');
+    printToMessageLog(MessageLog.INTERACTIVE, 'Debugger attached.');
     this.attachState = this.DEBUGGER_ATTACHED;
   }
 }
@@ -178,13 +153,13 @@
 }
 
 DebuggerClient.prototype.onDetached = function() {
-  printToMessageLog(messageLog.INTERACTIVE, 'Debugger detached.');
+  printToMessageLog(MessageLog.INTERACTIVE, 'Debugger detached.');
   this.attachState = this.DEBUGGER_DETACHED;
 }
 
 DebuggerClient.prototype.onExecutionContextCreated = function(params) {
   this.executionContext = params.context.id;
-  printToMessageLog(messageLog.INFO,
+  printToMessageLog(MessageLog.INFO,
                     'Execution context created: ' + this.executionContext);
 }
 
@@ -205,9 +180,9 @@
 DebuggerClient.prototype.onConsoleApiCalled = function(params) {
   var severity = params.type;
   if (severity === "assert") {
-    severity = messageLog.ERROR;
+    severity = MessageLog.ERROR;
   } else if (severity === "log") {
-    severity = messageLog.INFO;
+    severity = MessageLog.INFO;
   }
 
   var message = '';
@@ -222,7 +197,7 @@
     }
   }
 
-  printToMessageLog(messageLog.CONSOLE + severity, message);
+  printToMessageLog(MessageLog.CONSOLE + severity, message);
 }
 
 DebuggerClient.prototype.onScriptParsed = function(params) {
diff --git a/src/cobalt/debug/console/content/media_console.js b/src/cobalt/debug/console/content/media_console.js
new file mode 100644
index 0000000..66b33d1
--- /dev/null
+++ b/src/cobalt/debug/console/content/media_console.js
@@ -0,0 +1,221 @@
+// Copyright 2019 The Cobalt Authors. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+const kNewline = '\r\n';
+
+const kMediaConsoleFrame = 'mediaConsoleFrame';
+
+const videoCodecFamilies = ['av01', 'avc1', 'avc3', 'hev1', 'hvc1', 'vp09',
+      'vp8', 'vp9'];
+const audioCodecFamilies = ['ac-3', 'ec-3', 'mp4a', 'opus', 'vorbis'];
+
+const kAltModifier = 'A-';
+const kCtrlModifier = 'C-';
+
+function MediaConsole(debuggerClient) {
+  let mediaConsoleNode = document.getElementById('mediaConsole');
+
+  this.debuggerClient = debuggerClient;
+  this.getDisabledMediaCodecs = window.debugHub.cVal.getValue.bind(
+      window.debugHub.cVal, 'Media.DisabledMediaCodecs');
+  this.setDisabledMediaCodecs = window.debugHub.sendConsoleCommand.bind(
+      window.debugHub, 'disable_media_codecs');
+  this.lastUpdateTime = window.performance.now();
+  this.updatePeriod = 1000;
+
+  // Registry of all the hotkeys and their function handlers.
+  // NOTE: This is not the full list, since more will be added once the codec
+  // specific controls are registered.
+  this.hotkeyRegistry = new Map([
+      ['p', {handler: this.onPlayPause.bind(this), help: '(p)ause/(p)lay'}],
+      [']', {handler: this.onIncreasePlaybackRate.bind(this),
+        help: '(])Increase Playback Rate'}],
+      ['[', {handler: this.onDecreasePlaybackRate.bind(this),
+        help: '([)Decrease Playback Rate'}],
+  ]);
+
+  this.hotkeyHelpNode = document.createTextNode('');
+  mediaConsoleNode.appendChild(this.hotkeyHelpNode);
+
+  // Dynamically added div will be changed as we get state information.
+  let playerStateElem = document.createElement('div');
+  this.playerStateText = document.createTextNode('');
+  playerStateElem.appendChild(this.playerStateText);
+  mediaConsoleNode.appendChild(playerStateElem);
+
+  this.printToMediaConsoleCallback = this.printToMediaConsole.bind(this);
+  this.codecSupportMap = new Map();
+  this.isInitialized = false;
+}
+
+MediaConsole.prototype.setVisible = function(visible) {
+  if (visible) {
+    this.initialize();
+  }
+}
+
+MediaConsole.prototype.initialize = function() {
+  if (this.initialized) { return; }
+
+  this.debuggerClient.attach();
+  this.initializeMediaConsoleContext();
+  this.initializeSupportedCodecs();
+
+  // Since |initializeSupportedCodecs| is resolved asynchronously, we need to
+  // wait until |codecSupportMap| is fully populated before finishing the rest
+  // of the initialization.
+  if (this.codecSupportMap.size > 0) {
+    this.registerCodecHotkeys();
+    this.generateHotkeyHelpText();
+    this.isInitialized = true;
+  }
+}
+
+MediaConsole.prototype.initializeMediaConsoleContext = function() {
+  let js = debugHub.readDebugContentText('console/media_console_context.js');
+  this.debuggerClient.evaluate(js);
+}
+
+MediaConsole.prototype.initializeSupportedCodecs = function() {
+  if (this.codecSupportMap.size > 0) {
+    return;
+  }
+
+  function handleSupportQueryResponse(response) {
+    let results = JSON.parse(response.result.value);
+    results.forEach(record => {
+      let codecFamily = record[0];
+      let isSupported = record[1] || !!this.codecSupportMap.get(codecFamily);
+      this.codecSupportMap.set(codecFamily, isSupported);
+    });
+  };
+
+  this.debuggerClient.evaluate(`_mediaConsoleContext.getSupportedCodecs()`,
+      handleSupportQueryResponse.bind(this));
+}
+
+MediaConsole.prototype.registerCodecHotkeys = function() {
+  // Codec control hotkeys are of the form: Modifier + Number.
+  // Due to the large number of codecs, we split all the video codecs into Ctrl
+  // and all the audio codecs into Alt.
+  // "C-" prefix indicates a key with the ctrlKey modifier.
+  // "A-" prefix indicates a key with the altKey modifier.
+  // Additionally, we only register those hotkeys that we filter as supported.
+  function registerCodecHotkey(modifier, codec, index) {
+    if (!this.codecSupportMap.get(codec)) {
+      return;
+    }
+    let modAndNum = `${modifier}${index + 1}`;
+    let helpStr = `( ${modAndNum} ) ${codec}`;
+    this.hotkeyRegistry.set(modAndNum, {
+      handler: this.onToggleCodec.bind(this, codec),
+      help: helpStr
+    });
+  };
+  videoCodecFamilies.forEach(registerCodecHotkey.bind(this, kCtrlModifier));
+  audioCodecFamilies.forEach(registerCodecHotkey.bind(this, kAltModifier));
+}
+
+MediaConsole.prototype.generateHotkeyHelpText = function() {
+  // Generate the help text that will be displayed at the top of the console
+  // output.
+  let hotkeysHelp = `Hotkeys: ${kNewline}`;
+  hotkeysHelp += `Ctrl+Num toggles video codecs ${kNewline}`;
+  hotkeysHelp += `Alt+Num toggles audio codecs ${kNewline}`;
+  const generateHelpText = function(hotkeyInfo, hotkey, map) {
+    hotkeysHelp += hotkeyInfo.help + ', ';
+  };
+  this.hotkeyRegistry.forEach(generateHelpText);
+  hotkeysHelp = hotkeysHelp.substring(0, hotkeysHelp.length-2);
+  hotkeysHelp += kNewline;
+  hotkeysHelp += kNewline;
+  hotkeysHelp += kNewline;
+
+  this.hotkeyHelpNode.nodeValue = hotkeysHelp;
+}
+
+MediaConsole.prototype.parseStateFromResponse = function(response) {
+  // TODO: Handle the situation where the response contains exceptionDetails.
+  // https://chromedevtools.github.io/devtools-protocol/1-3/Runtime#method-evaluate
+  return JSON.parse(response.result.value);
+}
+
+MediaConsole.prototype.printToMediaConsole = function(response) {
+  const state = this.parseStateFromResponse(response);
+  let videoStatus = 'No primary video.';
+  if(state.hasPrimaryVideo) { videoStatus = ''; }
+  this.playerStateText.textContent =
+      `Primary Video: ${videoStatus} ${kNewline} \
+      Paused: ${state.paused} ${kNewline} \
+      Current Time: ${state.currentTime} ${kNewline} \
+      Duration: ${state.duration} ${kNewline} \
+      Playback Rate: ${state.playbackRate} \
+      (default: ${state.defaultPlaybackRate}) ${kNewline} \
+      Disabled Media Codecs: ${state.disabledMediaCodecs}`;
+}
+
+MediaConsole.prototype.update = function() {
+  const t = window.performance.now();
+  if (t > this.lastUpdateTime + this.updatePeriod) {
+    this.lastUpdateTime = t;
+    this.debuggerClient.evaluate('_mediaConsoleContext.getPlayerState()',
+        this.printToMediaConsoleCallback);
+  }
+}
+
+MediaConsole.prototype.onWheel = function(event) {}
+
+MediaConsole.prototype.onKeyup = function(event) {}
+
+MediaConsole.prototype.onKeypress = function(event) {}
+
+MediaConsole.prototype.onInput = function(event) {}
+
+MediaConsole.prototype.onKeydown = function(event) {
+  let matchedHotkeyEntry = null;
+  this.hotkeyRegistry.forEach(function(hotkeyInfo, hotkey, map) {
+    let eventKey = event.key;
+    if(event.altKey) { eventKey = kAltModifier + eventKey; }
+    if(event.ctrlKey) { eventKey = kCtrlModifier + eventKey; }
+    if(eventKey == hotkey) {
+      matchedHotkeyEntry = hotkeyInfo;
+    }
+  });
+  if (matchedHotkeyEntry) {
+    matchedHotkeyEntry.handler();
+  }
+}
+
+MediaConsole.prototype.onPlayPause = function() {
+  this.debuggerClient.evaluate('_mediaConsoleContext.togglePlayPause()',
+      this.printToMediaConsoleCallback);
+}
+
+MediaConsole.prototype.onIncreasePlaybackRate = function() {
+  this.debuggerClient.evaluate('_mediaConsoleContext.increasePlaybackRate()',
+      this.printToMediaConsoleCallback);
+}
+
+MediaConsole.prototype.onDecreasePlaybackRate = function() {
+  this.debuggerClient.evaluate('_mediaConsoleContext.decreasePlaybackRate()',
+      this.printToMediaConsoleCallback);
+}
+
+MediaConsole.prototype.onToggleCodec = function(codecToToggle) {
+  let codecs = this.getDisabledMediaCodecs().split(";");
+  codecs = codecs.filter(s => s.length > 0);
+  toggled = codecs.filter(c => c != codecToToggle);
+  if(codecs.length == toggled.length) { toggled.push(codecToToggle); }
+  this.setDisabledMediaCodecs(toggled.join(';'));
+}
diff --git a/src/cobalt/debug/console/content/media_console_context.js b/src/cobalt/debug/console/content/media_console_context.js
new file mode 100644
index 0000000..bd0df0f
--- /dev/null
+++ b/src/cobalt/debug/console/content/media_console_context.js
@@ -0,0 +1,162 @@
+// Copyright 2019 The Cobalt Authors. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+(function() {
+
+let ctx = window._mediaConsoleContext = {};
+
+// NOTE: Place all "private" members and methods of |_mediaConsoleContext|
+// below. Private functions are not attached to the |_mediaConsoleContext|
+// directly. Rather they are referenced within the public functions, which
+// prevent it from being garbage collected.
+
+const kPlaybackRates = [0.25, 0.5, 0.75, 1.0, 1.25, 1.5, 1.75, 2.0];
+
+function getPrimaryVideo() {
+  const elem = document.querySelectorAll('video');
+  if (elem && elem.length > 0) {
+    let primary = null;
+    for (let i = 0; i < elem.length; i++) {
+      const rect = elem[i].getBoundingClientRect();
+      if (rect.width == window.innerWidth &&
+          rect.height == window.innerHeight) {
+        if (primary != null) {
+          console.warn('Two video elements found with the same\
+              dimensions as the main window.');
+        }
+        primary = elem[i];
+      }
+    }
+    return primary;
+  }
+  return null;
+};
+
+function extractState(video, disabledMediaCodecs) {
+  let state = {};
+  state.disabledMediaCodecs = disabledMediaCodecs;
+  state.hasPrimaryVideo = false;
+  if (video) {
+    state.hasPrimaryVideo = true;
+    state.paused = video.paused;
+    state.currentTime = video.currentTime;
+    state.duration = video.duration;
+    state.defaultPlaybackRate = video.defaultPlaybackRate;
+    state.playbackRate = video.playbackRate;
+  }
+  return JSON.stringify(state);
+};
+
+function getDisabledMediaCodecs() {
+  return window.h5vcc.cVal.getValue("Media.DisabledMediaCodecs");
+};
+
+// NOTE: Place all public members and methods of |_mediaConsoleContext|
+// below. They form closures with the above "private" members and methods
+// and hence can use them directly, without referencing |this|.
+
+ctx.getPlayerState = function() {
+  const video = getPrimaryVideo();
+  const disabledMediaCodecs = getDisabledMediaCodecs();
+  return extractState(video, disabledMediaCodecs);
+};
+
+ctx.togglePlayPause = function() {
+  let video = getPrimaryVideo();
+  if (video) {
+    if (video.paused) {
+      video.play();
+    } else {
+      video.pause();
+    }
+  }
+  return extractState(video, getDisabledMediaCodecs());
+};
+
+ctx.increasePlaybackRate = function() {
+  let video = getPrimaryVideo();
+  if (video) {
+    let i = kPlaybackRates.indexOf(video.playbackRate);
+    i = Math.min(i + 1, kPlaybackRates.length - 1);
+    video.playbackRate = kPlaybackRates[i];
+  }
+  return extractState(video, getDisabledMediaCodecs());
+};
+
+ctx.decreasePlaybackRate = function() {
+  let video = getPrimaryVideo();
+  if (video) {
+    let i = kPlaybackRates.indexOf(video.playbackRate);
+    i = Math.max(i - 1, 0);
+    video.playbackRate = kPlaybackRates[i];
+  }
+  return extractState(video, getDisabledMediaCodecs());
+};
+
+ctx.getSupportedCodecs = function() {
+  // By querying all the possible mime and codec pairs, we can determine
+  // which codecs are valid to control and toggle.  We use arbitrarily-
+  // chosen codec subformats to determine if the entire family is
+  // supported.
+  const kVideoCodecs = [
+      'av01.0.04M.10.0.110.09.16.09.0',
+      'avc1.640028',
+      'hvc1.1.2.L93.B0',
+      'vp09.02.10.10.01.09.16.09.01',
+      'vp8',
+      'vp9',
+  ];
+  const kVideoMIMEs = [
+      'video/mpeg',
+      'video/mp4',
+      'video/ogg',
+      'video/webm',
+  ];
+  const kAudioCodecs = [
+      'ac-3',
+      'ec-3',
+      'mp4a.40.2',
+      'opus',
+      'vorbis',
+  ];
+  const kAudioMIMEs = [
+      'audio/flac',
+      'audio/mpeg',
+      'audio/mp3',
+      'audio/mp4',
+      'audio/ogg',
+      'audio/wav',
+      'audio/webm',
+      'audio/x-m4a',
+  ];
+
+  let results = [];
+  kVideoMIMEs.forEach(mime => {
+    kVideoCodecs.forEach(codec => {
+      let family = codec.split('.')[0];
+      let mimeCodec = mime + '; codecs ="' + codec + '"';
+      results.push([family, MediaSource.isTypeSupported(mimeCodec)]);
+    })
+  });
+  kAudioMIMEs.forEach(mime => {
+    kAudioCodecs.forEach(codec => {
+      let family = codec.split('.')[0];
+      let mimeCodec = mime + '; codecs ="' + codec + '"';
+      results.push([family, MediaSource.isTypeSupported(mimeCodec)]);
+    })
+  });
+  return JSON.stringify(results);
+}
+
+})()
diff --git a/src/cobalt/debug/console/content/message_log.js b/src/cobalt/debug/console/content/message_log.js
index 03433bf..c22b510 100644
--- a/src/cobalt/debug/console/content/message_log.js
+++ b/src/cobalt/debug/console/content/message_log.js
@@ -77,16 +77,6 @@
 
 // Constructor for the message log object itself.
 function MessageLog(messageContainer) {
-  // Log levels defined by the 'level' property of LogEntry
-  // https://chromedevtools.github.io/devtools-protocol/1-3/Log#type-LogEntry
-  this.VERBOSE = "verbose";
-  this.INFO = "info";
-  this.WARNING = "warning";
-  this.ERROR = "error";
-  // Custom level used internally by the console.
-  this.INTERACTIVE = "interactive";
-  // Prefix on severity for messages from the JS console.
-  this.CONSOLE = '*';
   // Number of items to display on a single page.
   this.PAGE_SIZE = 50;
   // Number of items to scroll when the user pages up or down.
@@ -103,6 +93,17 @@
   this.visible = false;
 }
 
+// Log levels defined by the 'level' property of LogEntry
+// https://chromedevtools.github.io/devtools-protocol/1-3/Log#type-LogEntry
+MessageLog.VERBOSE = "verbose";
+MessageLog.INFO = "info";
+MessageLog.WARNING = "warning";
+MessageLog.ERROR = "error";
+// Custom level used internally by the console.
+MessageLog.INTERACTIVE = "interactive";
+// Prefix on severity for messages from the JS console.
+MessageLog.CONSOLE = '*';
+
 MessageLog.prototype.setVisible = function(visible) {
   var wasVisible = this.visible;
   this.visible = visible;
@@ -131,8 +132,8 @@
   elem.className = 'message';
   var text = document.createTextNode(message);
 
-  if (severity.startsWith(this.CONSOLE)) {
-    severity = severity.substr(this.CONSOLE.length);
+  if (severity.startsWith(MessageLog.CONSOLE)) {
+    severity = severity.substr(MessageLog.CONSOLE.length);
     elem.classList.add('console');
   }
 
diff --git a/src/cobalt/debug/console/debug_console_mode.idl b/src/cobalt/debug/console/debug_console_mode.idl
new file mode 100644
index 0000000..04163e2
--- /dev/null
+++ b/src/cobalt/debug/console/debug_console_mode.idl
@@ -0,0 +1,25 @@
+// Copyright 2019 The Cobalt Authors. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Custom interface to communicate with the debugger, modeled after
+// chrome.debugger extension API.
+// https://developer.chrome.com/extensions/debugger
+
+enum DebugConsoleMode {
+  "off",
+  "hud",
+  "debug",
+  "media",
+  "count"
+};
diff --git a/src/cobalt/debug/console/debug_hub.cc b/src/cobalt/debug/console/debug_hub.cc
index e8760d5..2d9ecdc 100644
--- a/src/cobalt/debug/console/debug_hub.cc
+++ b/src/cobalt/debug/console/debug_hub.cc
@@ -17,9 +17,13 @@
 #include <memory>
 #include <set>
 
+#include "base/files/file_path.h"
+#include "base/files/file_util.h"
 #include "base/json/json_writer.h"
+#include "base/path_service.h"
 #include "base/values.h"
 #include "cobalt/base/c_val.h"
+#include "cobalt/base/cobalt_paths.h"
 #include "cobalt/debug/console/command_manager.h"
 #include "cobalt/debug/json_object.h"
 
@@ -27,6 +31,10 @@
 namespace debug {
 namespace console {
 
+namespace {
+constexpr char kContentDir[] = "cobalt/debug";
+}  // namespace
+
 DebugHub::DebugHub(
     const GetHudModeCallback& get_hud_mode_callback,
     const CreateDebugClientCallback& create_debug_client_callback)
@@ -37,7 +45,7 @@
 
 DebugHub::~DebugHub() {}
 
-int DebugHub::GetDebugConsoleMode() const {
+debug::console::DebugConsoleMode DebugHub::GetDebugConsoleMode() const {
   return get_hud_mode_callback_.Run();
 }
 
@@ -62,6 +70,21 @@
   callback_reference.value().Run();
 }
 
+std::string DebugHub::ReadDebugContentText(const std::string& filename) {
+  std::string result;
+
+  base::FilePath file_path;
+  base::PathService::Get(paths::DIR_COBALT_WEB_ROOT, &file_path);
+  file_path = file_path.AppendASCII(kContentDir);
+  file_path = file_path.AppendASCII(filename);
+
+  std::string text;
+  if (!base::ReadFileToString(file_path, &text)) {
+    DLOG(WARNING) << "Cannot read file: " << file_path.value();
+  }
+  return text;
+}
+
 void DebugHub::SendCommand(const std::string& method,
                            const std::string& json_params,
                            const ResponseCallbackArg& callback) {
diff --git a/src/cobalt/debug/console/debug_hub.h b/src/cobalt/debug/console/debug_hub.h
index cb02444..319e862 100644
--- a/src/cobalt/debug/console/debug_hub.h
+++ b/src/cobalt/debug/console/debug_hub.h
@@ -22,6 +22,7 @@
 #include "base/message_loop/message_loop.h"
 #include "base/optional.h"
 #include "cobalt/debug/console/console_command.h"
+#include "cobalt/debug/console/debug_console_mode.h"
 #include "cobalt/debug/console/debugger_event_target.h"
 #include "cobalt/debug/debug_client.h"
 #include "cobalt/dom/c_val_view.h"
@@ -43,7 +44,7 @@
  public:
   // Function signature to call when we need to query for the Hud visibility
   // mode.
-  typedef base::Callback<int()> GetHudModeCallback;
+  typedef base::Callback<debug::console::DebugConsoleMode()> GetHudModeCallback;
 
   // JavaScript callback to be run when debugger attaches/detaches.
   typedef script::CallbackFunction<void()> AttachCallback;
@@ -54,12 +55,6 @@
       ResponseCallback;
   typedef script::ScriptValue<ResponseCallback> ResponseCallbackArg;
 
-  // Debug console visibility modes.
-  static const int kDebugConsoleOff = 0;
-  static const int kDebugConsoleHud = 1;
-  static const int kDebugConsoleOn = 2;
-  static const int kDebugConsoleNumModes = kDebugConsoleOn + 1;
-
   // Thread-safe ref-counted struct used to pass asynchronously executed
   // response callbacks around. Stores the message loop the callback must be
   // executed on as well as the callback itself.
@@ -80,11 +75,15 @@
 
   const scoped_refptr<dom::CValView>& c_val() const { return c_val_; }
 
-  int GetDebugConsoleMode() const;
+  debug::console::DebugConsoleMode GetDebugConsoleMode() const;
 
   void Attach(const AttachCallbackArg& callback);
   void Detach(const AttachCallbackArg& callback);
 
+  // Read a text file from the content/web/cobalt/debug/ directory and return
+  // its contents.
+  std::string ReadDebugContentText(const std::string& filename);
+
   // Sends a devtools protocol command to be executed in the context of the main
   // WebModule that is being debugged.
   void SendCommand(const std::string& method, const std::string& json_params,
diff --git a/src/cobalt/debug/console/debug_hub.idl b/src/cobalt/debug/console/debug_hub.idl
index 21af7d6..a9afb03 100644
--- a/src/cobalt/debug/console/debug_hub.idl
+++ b/src/cobalt/debug/console/debug_hub.idl
@@ -19,17 +19,14 @@
 [
   Conditional=ENABLE_DEBUGGER
 ] interface DebugHub {
-  const long DEBUG_CONSOLE_OFF = 0;
-  const long DEBUG_CONSOLE_HUD = 1;
-  const long DEBUG_CONSOLE_ON = 2;
-
   readonly attribute CValView cVal;
 
-  long getDebugConsoleMode();
+  DebugConsoleMode getDebugConsoleMode();
 
   void attach(AttachCallback cb);
   void detach(AttachCallback cb);
 
+  DOMString readDebugContentText(DOMString filename);
   void sendCommand(DOMString method, DOMString jsonParams, ResponseCallback cb);
 
   readonly attribute DOMString? lastError;
diff --git a/src/cobalt/debug/debug.gyp b/src/cobalt/debug/debug.gyp
index 059acf9..35da044 100644
--- a/src/cobalt/debug/debug.gyp
+++ b/src/cobalt/debug/debug.gyp
@@ -40,6 +40,8 @@
         'backend/dom_agent.h',
         'backend/log_agent.cc',
         'backend/log_agent.h',
+        'backend/overlay_agent.cc',
+        'backend/overlay_agent.h',
         'backend/page_agent.cc',
         'backend/page_agent.h',
         'backend/render_layer.cc',
diff --git a/src/cobalt/doc/web_debugging.md b/src/cobalt/doc/web_debugging.md
index b43a59b..c4711db 100644
--- a/src/cobalt/doc/web_debugging.md
+++ b/src/cobalt/doc/web_debugging.md
@@ -158,9 +158,6 @@
 
 ## Tips
 
-*   Don't open Cobalt's console overlay before attaching the remote DevTools as
-    that can cause the latter not to work.
-
 *   You can make Cobalt reload the current page by pressing F5 in the Cobalt
     window, or ctrl-R in the remote DevTools. This may be useful for debugging
     startup code in the web app. It may also help in case some source file is
diff --git a/src/cobalt/dom/dom_test.gyp b/src/cobalt/dom/dom_test.gyp
index 7dd6d14..29a0a2f 100644
--- a/src/cobalt/dom/dom_test.gyp
+++ b/src/cobalt/dom/dom_test.gyp
@@ -77,10 +77,10 @@
         '<(DEPTH)/cobalt/loader/loader.gyp:loader',
         '<(DEPTH)/cobalt/renderer/rasterizer/skia/skia/skia.gyp:skia',
         '<(DEPTH)/cobalt/speech/speech.gyp:speech',
-        '<(DEPTH)/cobalt/test/test.gyp:run_all_unittests',
         '<(DEPTH)/testing/gmock.gyp:gmock',
         '<(DEPTH)/testing/gtest.gyp:gtest',
       ],
+      'includes': [ '<(DEPTH)/cobalt/test/test.gypi' ],
     },
 
     {
diff --git a/src/cobalt/dom/eme/media_key_session.cc b/src/cobalt/dom/eme/media_key_session.cc
index 3570c14..5a9c775 100644
--- a/src/cobalt/dom/eme/media_key_session.cc
+++ b/src/cobalt/dom/eme/media_key_session.cc
@@ -215,12 +215,10 @@
   // 5.2. Use CDM to close the key session associated with session.
   drm_system_session_->Close();
 
-  // Let |MediaKeys| know that the session should be removed from the list
-  // of open sessions.
-  closed_callback_.Run(this);
-
+#if !SB_HAS(DRM_SESSION_CLOSED)
   // 5.3.1. Run the Session Closed algorithm on the session.
   OnSessionClosed();
+#endif  // !SB_HAS(DRM_SESSION_CLOSED)
 
   // 5.3.2. Resolve promise.
   promise->Resolve();
@@ -416,6 +414,10 @@
   //    - TODO: Implement expiration.
   // 7. Resolve promise.
   closed_promise_reference_.value().Resolve();
+
+  // Let |MediaKeys| know that the session should be removed from the list
+  // of open sessions.
+  closed_callback_.Run(this);
 }
 
 }  // namespace eme
diff --git a/src/cobalt/dom/html_element.cc b/src/cobalt/dom/html_element.cc
index 1d3a522..7b7483c 100644
--- a/src/cobalt/dom/html_element.cc
+++ b/src/cobalt/dom/html_element.cc
@@ -535,7 +535,7 @@
   //     has no associated scrolling box, or the element has no overflow,
   //     terminate these steps.
   if (!layout_boxes_ ||
-      scroll_width() <= layout_boxes_->GetPaddingEdgeWidth() ) {
+      scroll_width() <= layout_boxes_->GetPaddingEdgeWidth()) {
     // Make sure the UI navigation container is set to the expected 0.
     x = 0.0f;
   }
@@ -597,7 +597,7 @@
 }
 
 // Algorithm for offsetParent:
-//   https://www.w3.org/TR/2013/WD-cssom-view-20131217/#dom-htmlelement-offsetparent
+//   https://drafts.csswg.org/date/2019-10-11/cssom-view/#extensions-to-the-htmlelement-interface
 Element* HTMLElement::offset_parent() {
   DCHECK(node_document());
   node_document()->DoSynchronousLayout();
@@ -617,7 +617,9 @@
   // 2. Return the nearest ancestor element of the element for which at least
   //    one of the following is true and terminate this algorithm if such an
   //    ancestor is found:
-  //    . The computed value of the 'position' property is not 'static'.
+  //    . The element is a containing block of absolutely-positioned descendants
+  //      (regardless of whether there are any absolutely-positioned
+  //       descendants).
   //    . It is the HTML body element.
   for (Node* ancestor_node = parent_node(); ancestor_node;
        ancestor_node = ancestor_node->parent_node()) {
@@ -631,8 +633,8 @@
     }
     DCHECK(ancestor_html_element->computed_style());
     if (ancestor_html_element->AsHTMLBodyElement() ||
-        ancestor_html_element->computed_style()->position() !=
-            cssom::KeywordValue::GetStatic()) {
+        ancestor_html_element->computed_style()
+            ->IsContainingBlockForPositionAbsoluteElements()) {
       return ancestor_element;
     }
   }
@@ -1144,24 +1146,23 @@
   directionality_ = base::nullopt;
 }
 
-void HTMLElement::OnUiNavBlur() {
-  Blur();
-}
+void HTMLElement::OnUiNavBlur() { Blur(); }
 
 void HTMLElement::OnUiNavFocus() {
   // Ensure the focusing steps do not trigger the UI navigation item to
   // force focus again.
-  scoped_refptr<ui_navigation::NavItem> temp_item = ui_nav_item_;
-  ui_nav_item_ = nullptr;
-  Focus();
-  ui_nav_item_ = temp_item;
+  if (!ui_nav_focusing_) {
+    ui_nav_focusing_ = true;
+    Focus();
+    ui_nav_focusing_ = false;
+  }
 }
 
 void HTMLElement::OnUiNavScroll() {
   Document* document = node_document();
   scoped_refptr<Window> window(document ? document->window() : nullptr);
-  DispatchEvent(new UIEvent(base::Tokens::scroll(),
-                Event::kBubbles, Event::kNotCancelable, window));
+  DispatchEvent(new UIEvent(base::Tokens::scroll(), Event::kBubbles,
+                            Event::kNotCancelable, window));
 }
 
 HTMLElement::HTMLElement(Document* document, base::Token local_name)
@@ -1342,7 +1343,7 @@
   ClearRuleMatchingState();
 
   // Set the focus item for the UI navigation system.
-  if (ui_nav_item_ && !ui_nav_item_->IsContainer()) {
+  if (ui_nav_item_ && !ui_nav_item_->IsContainer() && !ui_nav_focusing_) {
     // Only navigation items attached to the root container are interactable.
     // If the item is not registered with a container, then force a layout to
     // connect items to their containers and eventually to the root container.
diff --git a/src/cobalt/dom/html_element.h b/src/cobalt/dom/html_element.h
index 5166bf8..d1f548d 100644
--- a/src/cobalt/dom/html_element.h
+++ b/src/cobalt/dom/html_element.h
@@ -493,6 +493,12 @@
   // boxes without requiring a new layout.
   scoped_refptr<ui_navigation::NavItem> ui_nav_item_;
 
+  // This temporary flag is used to avoid a cycle on focus changes. When the
+  // HTML element receives focus, it must inform the UI navigation item. When
+  // the UI navigation item receives focus (either by calling SetFocus or by an
+  // update from the UI engine), it will tell the HTML element it was focused.
+  bool ui_nav_focusing_ = false;
+
   // HTMLElement is a friend of Animatable so that animatable can insert and
   // remove animations into HTMLElement's set of animations.
   friend class DOMAnimatable;
diff --git a/src/cobalt/dom/source_buffer.cc b/src/cobalt/dom/source_buffer.cc
index 08ea6e4..b99fde2 100644
--- a/src/cobalt/dom/source_buffer.cc
+++ b/src/cobalt/dom/source_buffer.cc
@@ -45,9 +45,6 @@
 #include "cobalt/dom/source_buffer.h"
 
 #include <algorithm>
-#include <limits>
-#include <memory>
-#include <vector>
 
 #include "base/compiler_specific.h"
 #include "base/logging.h"
@@ -94,21 +91,11 @@
       id_(id),
       chunk_demuxer_(chunk_demuxer),
       media_source_(media_source),
-      track_defaults_(new TrackDefaultList(NULL)),
       event_queue_(event_queue),
-      mode_(kSourceBufferAppendModeSegments),
-      updating_(false),
-      timestamp_offset_(0),
       audio_tracks_(
           new AudioTrackList(settings, media_source->GetMediaElement())),
       video_tracks_(
-          new VideoTrackList(settings, media_source->GetMediaElement())),
-      append_window_start_(0),
-      append_window_end_(std::numeric_limits<double>::infinity()),
-      first_initialization_segment_received_(false),
-      pending_append_data_offset_(0),
-      pending_remove_start_(-1),
-      pending_remove_end_(-1) {
+          new VideoTrackList(settings, media_source->GetMediaElement())) {
   DCHECK(!id_.empty());
   DCHECK(media_source_);
   DCHECK(chunk_demuxer);
@@ -340,6 +327,9 @@
   chunk_demuxer_ = NULL;
   media_source_ = NULL;
   event_queue_ = NULL;
+
+  pending_append_data_.reset();
+  pending_append_data_capacity_ = 0;
 }
 
 double SourceBuffer::GetHighestPresentationTimestamp() const {
@@ -419,8 +409,15 @@
 
   DCHECK(data || size == 0);
   if (data) {
-    pending_append_data_.insert(pending_append_data_.end(), data, data + size);
+    DCHECK_EQ(pending_append_data_offset_, 0u);
+    if (pending_append_data_capacity_ < size) {
+      pending_append_data_.reset();
+      pending_append_data_.reset(new uint8_t[size]);
+      pending_append_data_capacity_ = size;
+    }
+    SbMemoryCopy(pending_append_data_.get(), data, size);
   }
+  pending_append_data_size_ = size;
   pending_append_data_offset_ = 0;
 
   updating_ = true;
@@ -436,15 +433,14 @@
 
   DCHECK(updating_);
 
-  DCHECK_GE(pending_append_data_.size(), pending_append_data_offset_);
-  size_t append_size =
-      pending_append_data_.size() - pending_append_data_offset_;
+  DCHECK_GE(pending_append_data_size_, pending_append_data_offset_);
+  size_t append_size = pending_append_data_size_ - pending_append_data_offset_;
   append_size = std::min(append_size, kMaxAppendSize);
 
-  uint8 dummy[1];
+  uint8_t dummy;
   const uint8* data_to_append =
-      append_size > 0 ? &pending_append_data_[0] + pending_append_data_offset_
-                      : dummy;
+      append_size > 0 ? pending_append_data_.get() + pending_append_data_offset_
+                      : &dummy;
 
   base::TimeDelta timestamp_offset = DoubleToTimeDelta(timestamp_offset_);
   bool success = chunk_demuxer_->AppendData(
@@ -456,20 +452,20 @@
   }
 
   if (!success) {
-    pending_append_data_.clear();
+    pending_append_data_size_ = 0;
     pending_append_data_offset_ = 0;
     AppendError();
   } else {
     pending_append_data_offset_ += append_size;
 
-    if (pending_append_data_offset_ < pending_append_data_.size()) {
+    if (pending_append_data_offset_ < pending_append_data_size_) {
       append_timer_.Start(FROM_HERE, base::TimeDelta(), this,
                           &SourceBuffer::OnAppendTimer);
       return;
     }
 
     updating_ = false;
-    pending_append_data_.clear();
+    pending_append_data_size_ = 0;
     pending_append_data_offset_ = 0;
 
     ScheduleEvent(base::Tokens::update());
@@ -524,7 +520,7 @@
   DCHECK_EQ(pending_remove_start_, -1);
 
   append_timer_.Stop();
-  pending_append_data_.clear();
+  pending_append_data_size_ = 0;
   pending_append_data_offset_ = 0;
 
   updating_ = false;
diff --git a/src/cobalt/dom/source_buffer.h b/src/cobalt/dom/source_buffer.h
index efdd34b..2aa4a3f 100644
--- a/src/cobalt/dom/source_buffer.h
+++ b/src/cobalt/dom/source_buffer.h
@@ -45,6 +45,7 @@
 #ifndef COBALT_DOM_SOURCE_BUFFER_H_
 #define COBALT_DOM_SOURCE_BUFFER_H_
 
+#include <limits>
 #include <memory>
 #include <string>
 #include <vector>
@@ -170,25 +171,27 @@
   const std::string id_;
   media::ChunkDemuxer* chunk_demuxer_;
   MediaSource* media_source_;
-  scoped_refptr<TrackDefaultList> track_defaults_;
+  scoped_refptr<TrackDefaultList> track_defaults_ = new TrackDefaultList(NULL);
   EventQueue* event_queue_;
 
-  SourceBufferAppendMode mode_;
-  bool updating_;
-  double timestamp_offset_;
+  SourceBufferAppendMode mode_ = kSourceBufferAppendModeSegments;
+  bool updating_ = false;
+  double timestamp_offset_ = 0;
   scoped_refptr<AudioTrackList> audio_tracks_;
   scoped_refptr<VideoTrackList> video_tracks_;
-  double append_window_start_;
-  double append_window_end_;
+  double append_window_start_ = 0;
+  double append_window_end_ = std::numeric_limits<double>::infinity();
 
   base::OneShotTimer append_timer_;
-  bool first_initialization_segment_received_;
-  std::vector<uint8_t> pending_append_data_;
-  size_t pending_append_data_offset_;
+  bool first_initialization_segment_received_ = false;
+  std::unique_ptr<uint8_t[]> pending_append_data_;
+  size_t pending_append_data_capacity_ = 0;
+  size_t pending_append_data_size_ = 0;
+  size_t pending_append_data_offset_ = 0;
 
   base::OneShotTimer remove_timer_;
-  double pending_remove_start_;
-  double pending_remove_end_;
+  double pending_remove_start_ = -1;
+  double pending_remove_end_ = -1;
 };
 
 }  // namespace dom
diff --git a/src/cobalt/dom_parser/dom_parser_test.gyp b/src/cobalt/dom_parser/dom_parser_test.gyp
index 6730d25..aedb612 100644
--- a/src/cobalt/dom_parser/dom_parser_test.gyp
+++ b/src/cobalt/dom_parser/dom_parser_test.gyp
@@ -28,10 +28,10 @@
         '<(DEPTH)/cobalt/dom/dom.gyp:dom',
         '<(DEPTH)/cobalt/dom/testing/dom_testing.gyp:dom_testing',
         '<(DEPTH)/cobalt/dom_parser/dom_parser.gyp:dom_parser',
-        '<(DEPTH)/cobalt/test/test.gyp:run_all_unittests',
         '<(DEPTH)/testing/gmock.gyp:gmock',
         '<(DEPTH)/testing/gtest.gyp:gtest',
       ],
+      'includes': [ '<(DEPTH)/cobalt/test/test.gypi' ],
     },
 
     {
diff --git a/src/cobalt/dom_parser/html_decoder.cc b/src/cobalt/dom_parser/html_decoder.cc
index 0adbef0..b662528 100644
--- a/src/cobalt/dom_parser/html_decoder.cc
+++ b/src/cobalt/dom_parser/html_decoder.cc
@@ -64,7 +64,10 @@
       require_csp_ == csp::kCSPOptional) {
     return loader::kLoadResponseContinue;
   } else {
-    DLOG(ERROR) << "Failure receiving Content Security Policy headers";
+    LOG(ERROR) << "Failure receiving Content Security Policy headers "
+                  "for URL: " << url_fetcher->GetURL() << ".";
+    LOG(ERROR) << "The server *must* send CSP headers or Cobalt will not "
+                  "load the page.";
     return loader::kLoadResponseAbort;
   }
 }
diff --git a/src/cobalt/extension/extension.gyp b/src/cobalt/extension/extension.gyp
index 340fbc9..bf63c0d 100644
--- a/src/cobalt/extension/extension.gyp
+++ b/src/cobalt/extension/extension.gyp
@@ -26,11 +26,11 @@
       'dependencies': [
         '<@(cobalt_platform_dependencies)',
         '<(DEPTH)/cobalt/base/base.gyp:base',
-        '<(DEPTH)/cobalt/test/test.gyp:run_all_unittests',
         '<(DEPTH)/starboard/starboard.gyp:starboard',
         '<(DEPTH)/testing/gmock.gyp:gmock',
         '<(DEPTH)/testing/gtest.gyp:gtest',
       ],
+      'includes': [ '<(DEPTH)/cobalt/test/test.gypi' ],
     },
     {
       'target_name': 'extension_test_deploy',
diff --git a/src/cobalt/extension/extension_test.cc b/src/cobalt/extension/extension_test.cc
index 044bdde..2615ba2 100644
--- a/src/cobalt/extension/extension_test.cc
+++ b/src/cobalt/extension/extension_test.cc
@@ -15,6 +15,7 @@
 #include <cmath>
 
 #include "cobalt/extension/graphics.h"
+#include "cobalt/extension/installation_manager.h"
 #include "cobalt/extension/platform_service.h"
 #include "starboard/system.h"
 #include "testing/gtest/include/gtest/gtest.h"
@@ -34,7 +35,8 @@
   }
 
   EXPECT_STREQ(extension_api->name, kExtensionName);
-  EXPECT_EQ(extension_api->version, 1) << "Invalid version";
+  EXPECT_TRUE(extension_api->version == 1 ||
+              extension_api->version == 2) << "Invalid version";
   EXPECT_TRUE(extension_api->Has != NULL);
   EXPECT_TRUE(extension_api->Open != NULL);
   EXPECT_TRUE(extension_api->Close != NULL);
@@ -57,19 +59,52 @@
   }
 
   EXPECT_STREQ(extension_api->name, kExtensionName);
-  EXPECT_EQ(extension_api->version, 1) << "Invalid version";
+  EXPECT_TRUE(extension_api->version == 1 ||
+              extension_api->version == 2) << "Invalid version";
   EXPECT_TRUE(extension_api->GetMaximumFrameIntervalInMilliseconds != NULL);
+  if (extension_api->version >= 2) {
+    EXPECT_TRUE(extension_api->GetMinimumFrameIntervalInMilliseconds != NULL);
+  }
 
   float maximum_frame_interval =
       extension_api->GetMaximumFrameIntervalInMilliseconds();
   EXPECT_FALSE(std::isnan(maximum_frame_interval));
 
+  if (extension_api->version >= 2) {
+    float minimum_frame_interval =
+      extension_api->GetMinimumFrameIntervalInMilliseconds();
+    EXPECT_GT(minimum_frame_interval, 0);
+  }
   const ExtensionApi* second_extension_api = static_cast<const ExtensionApi*>(
       SbSystemGetExtension(kExtensionName));
   EXPECT_EQ(second_extension_api, extension_api)
       << "Extension struct should be a singleton";
 }
 
+TEST(ExtensionTest, InstallationManager) {
+  typedef CobaltExtensionInstallationManagerApi ExtensionApi;
+  const char* kExtensionName = kCobaltExtensionInstallationManagerName;
+
+  const ExtensionApi* extension_api =
+      static_cast<const ExtensionApi*>(SbSystemGetExtension(kExtensionName));
+  if (!extension_api) {
+    return;
+  }
+
+  EXPECT_STREQ(extension_api->name, kExtensionName);
+  EXPECT_TRUE(extension_api->version == 1 ||
+              extension_api->version == 2) << "Invalid version";
+  EXPECT_TRUE(extension_api->GetCurrentInstallationIndex != NULL);
+  EXPECT_TRUE(extension_api->MarkInstallationSuccessful != NULL);
+  EXPECT_TRUE(extension_api->RequestRollForwardToInstallation != NULL);
+  EXPECT_TRUE(extension_api->GetInstallationPath != NULL);
+  EXPECT_TRUE(extension_api->SelectNewInstallationIndex != NULL);
+
+  const ExtensionApi* second_extension_api =
+      static_cast<const ExtensionApi*>(SbSystemGetExtension(kExtensionName));
+  EXPECT_EQ(second_extension_api, extension_api)
+      << "Extension struct should be a singleton";
+}
 }  // namespace extension
 }  // namespace cobalt
 #endif  // SB_API_VERSION >= 11
diff --git a/src/cobalt/extension/graphics.h b/src/cobalt/extension/graphics.h
index b68ac9f..b0e2bb8 100644
--- a/src/cobalt/extension/graphics.h
+++ b/src/cobalt/extension/graphics.h
@@ -48,6 +48,15 @@
   // precedence over this. For example, if the minimum frame time is 8ms and
   // the maximum frame interval is 0ms, then the renderer will target 125 fps.
   float (*GetMaximumFrameIntervalInMilliseconds)();
+
+  // The fields below this point were added in version 2 or later.
+
+  // Allow throttling of the frame rate. This is expressed in terms of
+  // milliseconds and can be a floating point number. Keep in mind that
+  // swapping frames may take some additional processing time, so it may be
+  // better to specify a lower delay. For example, '33' instead of '33.33'
+  // for 30 Hz refresh.
+  float (*GetMinimumFrameIntervalInMilliseconds)();
 } CobaltExtensionGraphicsApi;
 
 #ifdef __cplusplus
diff --git a/src/cobalt/extension/installation_manager.h b/src/cobalt/extension/installation_manager.h
new file mode 100644
index 0000000..4a99cda
--- /dev/null
+++ b/src/cobalt/extension/installation_manager.h
@@ -0,0 +1,55 @@
+// Copyright 2019 The Cobalt Authors. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#ifndef COBALT_EXTENSION_INSTALLATION_MANAGER_H_
+#define COBALT_EXTENSION_INSTALLATION_MANAGER_H_
+
+#include <stdint.h>
+
+#include "starboard/configuration.h"
+
+#define IM_EXT_ERROR -1
+#define IM_EXT_SUCCESS 0
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#define kCobaltExtensionInstallationManagerName \
+  "dev.cobalt.extension.InstallationManager"
+
+typedef struct CobaltExtensionInstallationManagerApi {
+  // Name should be the string kCobaltExtensionInstallationManagerName.
+  // This helps to validate that the extension API is correct.
+  const char* name;
+
+  // This specifies the version of the API that is implemented.
+  uint32_t version;
+
+  // Installation Manager API wrapper.
+  // For more details, check:
+  //  starboard/loader_app/installation_manager.h
+  int (*GetCurrentInstallationIndex)();
+  int (*MarkInstallationSuccessful)(int installation_index);
+  int (*RequestRollForwardToInstallation)(int installation_index);
+  int (*GetInstallationPath)(int installation_index, char* path,
+                             int path_length);
+  int (*SelectNewInstallationIndex)();
+} CobaltExtensionInstallationManagerApi;
+
+#ifdef __cplusplus
+}  // extern "C"
+#endif
+
+#endif  // COBALT_EXTENSION_INSTALLATION_MANAGER_H_
diff --git a/src/cobalt/input/input_device_manager_desktop.cc b/src/cobalt/input/input_device_manager_desktop.cc
index 85ccbb1..221b04f 100644
--- a/src/cobalt/input/input_device_manager_desktop.cc
+++ b/src/cobalt/input/input_device_manager_desktop.cc
@@ -241,8 +241,7 @@
   keypress_generator_filter_.HandleKeyboardEvent(type, keyboard_event);
 
   int32_t key_code_in_int32 = static_cast<int32_t>(key_code);
-  overlay_info::OverlayInfoRegistry::Register(
-      "input_manager:keydown", &key_code_in_int32, sizeof(key_code_in_int32));
+  overlay_info::OverlayInfoRegistry::Register("keydown", key_code_in_int32);
 }
 
 void InputDeviceManagerDesktop::HandlePointerEvent(
diff --git a/src/cobalt/layout/box.cc b/src/cobalt/layout/box.cc
index 4eff827..ac4ebed 100644
--- a/src/cobalt/layout/box.cc
+++ b/src/cobalt/layout/box.cc
@@ -132,11 +132,11 @@
 Box::~Box() { layout_stat_tracker_->OnBoxDestroyed(); }
 
 bool Box::IsPositioned() const {
-  return computed_style()->position() != cssom::KeywordValue::GetStatic();
+  return computed_style()->IsPositioned();
 }
 
 bool Box::IsTransformed() const {
-  return computed_style()->transform() != cssom::KeywordValue::GetNone();
+  return computed_style()->IsTransformed();
 }
 
 bool Box::IsAbsolutelyPositioned() const {
diff --git a/src/cobalt/layout/container_box.cc b/src/cobalt/layout/container_box.cc
index 3c9ef9e..acdddff 100644
--- a/src/cobalt/layout/container_box.cc
+++ b/src/cobalt/layout/container_box.cc
@@ -170,7 +170,8 @@
 }
 
 bool ContainerBox::IsContainingBlockForPositionAbsoluteElements() const {
-  return parent() == NULL || IsPositioned() || IsTransformed();
+  return parent() == NULL ||
+         computed_style()->IsContainingBlockForPositionAbsoluteElements();
 }
 
 bool ContainerBox::IsContainingBlockForPositionFixedElements() const {
diff --git a/src/cobalt/layout/layout.gyp b/src/cobalt/layout/layout.gyp
index bef1686..e608390 100644
--- a/src/cobalt/layout/layout.gyp
+++ b/src/cobalt/layout/layout.gyp
@@ -138,11 +138,11 @@
       'dependencies': [
         '<(DEPTH)/cobalt/css_parser/css_parser.gyp:css_parser',
         '<(DEPTH)/cobalt/layout/layout.gyp:layout_testing',
-        '<(DEPTH)/cobalt/test/test.gyp:run_all_unittests',
         '<(DEPTH)/testing/gmock.gyp:gmock',
         '<(DEPTH)/testing/gtest.gyp:gtest',
         'layout',
       ],
+      'includes': [ '<(DEPTH)/cobalt/test/test.gypi' ],
     },
 
     {
diff --git a/src/cobalt/layout/line_box.cc b/src/cobalt/layout/line_box.cc
index a73a401..75b83c1 100644
--- a/src/cobalt/layout/line_box.cc
+++ b/src/cobalt/layout/line_box.cc
@@ -871,9 +871,8 @@
   // The start edge offset at which the ellipsis was eventually placed. This
   // will be set by TryPlaceEllipsisOrProcessPlacedEllipsis() within one of the
   // child boxes.
-  // NOTE: While this is is guaranteed to be set later, initializing it here
-  // keeps compilers from complaining about it being an uninitialized variable
-  // below.
+  // NOTE: While this is guaranteed to be set later, initializing it here keeps
+  // compilers from complaining about it being an uninitialized variable below.
   LayoutUnit placed_start_edge_offset;
 
   // Walk each box within the line in base direction order attempting to place
diff --git a/src/cobalt/layout_tests/layout_tests.gyp b/src/cobalt/layout_tests/layout_tests.gyp
index 35683b2..05b34a6 100644
--- a/src/cobalt/layout_tests/layout_tests.gyp
+++ b/src/cobalt/layout_tests/layout_tests.gyp
@@ -65,12 +65,12 @@
         '<(DEPTH)/cobalt/base/base.gyp:base',
         '<(DEPTH)/cobalt/browser/browser.gyp:browser',
         '<(DEPTH)/cobalt/renderer/renderer.gyp:render_tree_pixel_tester',
-        '<(DEPTH)/cobalt/test/test.gyp:run_all_unittests',
         '<(DEPTH)/url/url.gyp:url',
         '<(DEPTH)/testing/gmock.gyp:gmock',
         '<(DEPTH)/testing/gtest.gyp:gtest',
         'layout_test_utils',
       ],
+      'includes': [ '<(DEPTH)/cobalt/test/test.gypi' ],
     },
 
     {
@@ -94,12 +94,12 @@
       'dependencies': [
         '<(DEPTH)/cobalt/base/base.gyp:base',
         '<(DEPTH)/cobalt/browser/browser.gyp:browser',
-        '<(DEPTH)/cobalt/test/test.gyp:run_all_unittests',
         '<(DEPTH)/url/url.gyp:url',
         '<(DEPTH)/testing/gmock.gyp:gmock',
         '<(DEPTH)/testing/gtest.gyp:gtest',
         'layout_test_utils',
       ],
+      'includes': [ '<(DEPTH)/cobalt/test/test.gypi' ],
     },
 
     {
diff --git a/src/cobalt/layout_tests/testdata/csp/img-src.html b/src/cobalt/layout_tests/testdata/csp/img-src.html
index ad628af..20a59ea 100644
--- a/src/cobalt/layout_tests/testdata/csp/img-src.html
+++ b/src/cobalt/layout_tests/testdata/csp/img-src.html
@@ -24,6 +24,7 @@
     var div_ids = ["insecure", "secure"];
 
     var urls = ["http://" + image_base, "https://" + image_base];
+    var num_errors = 0;
     for (var i = 0; i < 2; i++) {
       var divname = div_ids[i];
       var url = urls[i];
@@ -36,8 +37,12 @@
         }
       }
       images[i].onerror = function() {
-        // NOTE: This won't be called due to an outstanding bug.
+        ++num_errors;
         console.log('Error loading: ' + this.src);
+        if (num_errors >= 2) {
+          console.log('More errors than expected!');
+          window.testRunner.notifyDone();
+        }
       }
       images[i].src = urls[i];
     }
diff --git a/src/cobalt/loader/loader.gyp b/src/cobalt/loader/loader.gyp
index 87cdade..d05d443 100644
--- a/src/cobalt/loader/loader.gyp
+++ b/src/cobalt/loader/loader.gyp
@@ -159,7 +159,6 @@
       'dependencies': [
         '<(DEPTH)/cobalt/base/base.gyp:base',
         '<(DEPTH)/cobalt/math/math.gyp:math',
-        '<(DEPTH)/cobalt/test/test.gyp:run_all_unittests',
         '<(DEPTH)/testing/gmock.gyp:gmock',
         '<(DEPTH)/testing/gtest.gyp:gtest',
         '<(DEPTH)/third_party/ots/ots.gyp:ots',
@@ -167,6 +166,7 @@
         'loader_copy_test_data',
         '<@(cobalt_platform_dependencies)',
       ],
+      'includes': [ '<(DEPTH)/cobalt/test/test.gypi' ],
     },
 
     {
diff --git a/src/cobalt/loader/net_fetcher.cc b/src/cobalt/loader/net_fetcher.cc
index 3302418..bf5a097 100644
--- a/src/cobalt/loader/net_fetcher.cc
+++ b/src/cobalt/loader/net_fetcher.cc
@@ -172,11 +172,13 @@
     auto* download_data_writer =
         base::polymorphic_downcast<URLFetcherStringWriter*>(
             source->GetResponseWriter());
-    std::unique_ptr<std::string> data = download_data_writer->data();
-    if (!data->empty()) {
+    std::string data;
+    download_data_writer->GetAndResetData(&data);
+    if (!data.empty()) {
       DLOG(INFO) << "in OnURLFetchComplete data still has bytes: "
-                 << data->size();
-      handler()->OnReceivedPassed(this, std::move(data));
+                 << data.size();
+      handler()->OnReceivedPassed(
+          this, std::unique_ptr<std::string>(new std::string(std::move(data))));
     }
     handler()->OnDone(this);
   } else {
@@ -209,15 +211,17 @@
     auto* download_data_writer =
         base::polymorphic_downcast<URLFetcherStringWriter*>(
             source->GetResponseWriter());
-    std::unique_ptr<std::string> data = download_data_writer->data();
-    if (data->empty()) {
+    std::string data;
+    download_data_writer->GetAndResetData(&data);
+    if (data.empty()) {
       return;
     }
 #if defined(HANDLE_CORE_DUMP)
     net_fetcher_log.Get().IncrementFetchedBytes(
-        static_cast<int>(data->length()));
+        static_cast<int>(data.length()));
 #endif
-    handler()->OnReceivedPassed(this, std::move(data));
+    handler()->OnReceivedPassed(
+        this, std::unique_ptr<std::string>(new std::string(std::move(data))));
   }
 }
 
diff --git a/src/cobalt/loader/url_fetcher_string_writer.cc b/src/cobalt/loader/url_fetcher_string_writer.cc
index b583799..54ae81a 100644
--- a/src/cobalt/loader/url_fetcher_string_writer.cc
+++ b/src/cobalt/loader/url_fetcher_string_writer.cc
@@ -13,6 +13,8 @@
 // limitations under the License.
 
 #include "cobalt/loader/url_fetcher_string_writer.h"
+
+#include "base/logging.h"
 #include "net/base/net_errors.h"
 
 namespace cobalt {
@@ -24,6 +26,10 @@
 //   DCHECK(consumer_task_runner);
 // }
 
+namespace {
+const int64_t kPreAllocateThreshold = 64 * 1024;
+}  // namespace
+
 URLFetcherStringWriter::URLFetcherStringWriter() = default;
 
 URLFetcherStringWriter::~URLFetcherStringWriter() = default;
@@ -33,22 +39,56 @@
   return net::OK;
 }
 
-std::unique_ptr<std::string> URLFetcherStringWriter::data() {
+void URLFetcherStringWriter::OnResponseStarted(int64_t content_length) {
   base::AutoLock auto_lock(lock_);
-  if (!data_) {
-    return std::make_unique<std::string>();
+
+  if (content_length >= 0) {
+    content_length_ = content_length;
   }
-  return std::move(data_);
+}
+
+bool URLFetcherStringWriter::HasData() const {
+  base::AutoLock auto_lock(lock_);
+  return !data_.empty();
+}
+
+void URLFetcherStringWriter::GetAndResetData(std::string* data) {
+  DCHECK(data);
+
+  std::string empty;
+  data->swap(empty);
+
+  base::AutoLock auto_lock(lock_);
+  data_.swap(*data);
 }
 
 int URLFetcherStringWriter::Write(net::IOBuffer* buffer, int num_bytes,
                                   net::CompletionOnceCallback /*callback*/) {
   base::AutoLock auto_lock(lock_);
-  if (!data_) {
-    data_ = std::make_unique<std::string>();
+
+  if (content_offset_ == 0 && num_bytes <= content_length_) {
+    // Pre-allocate the whole buffer for small downloads, hope that all data can
+    // be downloaded before GetAndResetData() is called.
+    if (content_length_ <= kPreAllocateThreshold) {
+      data_.reserve(content_length_);
+    } else {
+      data_.reserve(kPreAllocateThreshold);
+    }
   }
 
-  data_->append(buffer->data(), num_bytes);
+  if (content_length_ > 0 && content_length_ > content_offset_ &&
+      data_.size() + num_bytes > data_.capacity()) {
+    // There is not enough memory allocated, and std::string is going to double
+    // the allocation.  So a content in "1M + 1" bytes may end up allocating 2M
+    // bytes.  Try to reserve the proper size to avoid this.
+    auto content_remaining = content_length_ - content_offset_;
+    if (data_.size() + content_remaining < data_.capacity() * 2) {
+      data_.reserve(data_.size() + content_remaining);
+    }
+  }
+
+  data_.append(buffer->data(), num_bytes);
+  content_offset_ += num_bytes;
   // consumer_task_runner_->PostTask(FROM_HERE,
   // base::Bind((on_write_callback_.Run), std::move(data)));
   return num_bytes;
diff --git a/src/cobalt/loader/url_fetcher_string_writer.h b/src/cobalt/loader/url_fetcher_string_writer.h
index 3bab4a6..72d87e8 100644
--- a/src/cobalt/loader/url_fetcher_string_writer.h
+++ b/src/cobalt/loader/url_fetcher_string_writer.h
@@ -35,19 +35,22 @@
   URLFetcherStringWriter();
   ~URLFetcherStringWriter() override;
 
-  std::unique_ptr<std::string> data();
+  bool HasData() const;
+  void GetAndResetData(std::string* data);
 
   // URLFetcherResponseWriter overrides:
   int Initialize(net::CompletionOnceCallback callback) override;
+  void OnResponseStarted(int64_t content_length) override;
   int Write(net::IOBuffer* buffer, int num_bytes,
             net::CompletionOnceCallback callback) override;
   int Finish(int net_error, net::CompletionOnceCallback callback) override;
 
  private:
-  // This class can be accessed by both network thread and MainWebModule
-  // thread.
-  base::Lock lock_;
-  std::unique_ptr<std::string> data_;
+  // This class can be accessed by both network thread and MainWebModule thread.
+  mutable base::Lock lock_;
+  int64_t content_length_ = -1;
+  int64_t content_offset_ = 0;
+  std::string data_;
   // OnWriteCallback on_write_callback_;
   // base::TaskRunner* consumer_task_runner_;
 
diff --git a/src/cobalt/math/math.gyp b/src/cobalt/math/math.gyp
index 5fc0326..344ec57 100644
--- a/src/cobalt/math/math.gyp
+++ b/src/cobalt/math/math.gyp
@@ -90,10 +90,10 @@
       'dependencies': [
         '<(DEPTH)/base/base.gyp:base',
         '<(DEPTH)/cobalt/base/base.gyp:base',
-        '<(DEPTH)/cobalt/test/test.gyp:run_all_unittests',
         '<(DEPTH)/testing/gtest.gyp:gtest',
         'math',
       ],
+      'includes': [ '<(DEPTH)/cobalt/test/test.gypi' ],
     },
     {
       'target_name': 'math_test_deploy',
diff --git a/src/cobalt/media/base/drm_system.cc b/src/cobalt/media/base/drm_system.cc
index 8ad4118..b94c858 100644
--- a/src/cobalt/media/base/drm_system.cc
+++ b/src/cobalt/media/base/drm_system.cc
@@ -28,8 +28,7 @@
 DECLARE_INSTANCE_COUNTER(DrmSystem);
 
 DrmSystem::Session::Session(
-    DrmSystem* drm_system
-    ,
+    DrmSystem* drm_system,
     SessionUpdateKeyStatusesCallback update_key_statuses_callback
 #if SB_HAS(DRM_SESSION_CLOSED)
     ,
@@ -41,7 +40,8 @@
 #if SB_HAS(DRM_SESSION_CLOSED)
       session_closed_callback_(session_closed_callback),
 #endif  // SB_HAS(DRM_SESSION_CLOSED)
-      closed_(false) {
+      closed_(false),
+      weak_factory_(this) {
   DCHECK(!update_key_statuses_callback_.is_null());
 #if SB_HAS(DRM_SESSION_CLOSED)
   DCHECK(!session_closed_callback_.is_null());
@@ -69,7 +69,7 @@
   update_request_generated_callback_ =
       session_update_request_generated_callback;
   drm_system_->GenerateSessionUpdateRequest(
-      this, type, init_data, init_data_length,
+      weak_factory_.GetWeakPtr(), type, init_data, init_data_length,
       session_update_request_generated_callback,
       session_update_request_did_not_generate_callback);
 }
@@ -169,8 +169,8 @@
 #endif  // SB_API_VERSION >= 10
 
 void DrmSystem::GenerateSessionUpdateRequest(
-    Session* session, const std::string& type, const uint8_t* init_data,
-    int init_data_length,
+    const base::WeakPtr<Session>& session, const std::string& type,
+    const uint8_t* init_data, int init_data_length,
     const SessionUpdateRequestGeneratedCallback&
         session_update_request_generated_callback,
     const SessionUpdateRequestDidNotGenerateCallback&
@@ -221,6 +221,8 @@
     SessionTicketAndOptionalId ticket_and_optional_id, SbDrmStatus status,
     SbDrmSessionRequestType type, const std::string& error_message,
     std::unique_ptr<uint8[]> message, int message_size) {
+  DCHECK(message_loop_->BelongsToCurrentThread());
+
   int ticket = ticket_and_optional_id.ticket;
   const base::Optional<std::string>& session_id = ticket_and_optional_id.id;
   if (SbDrmTicketIsValid(ticket)) {
@@ -237,21 +239,25 @@
     const SessionUpdateRequest& session_update_request =
         session_update_request_iterator->second;
 
-    // Interpret the result.
-    if (session_id) {
-      // Successful request generation.
+    // As DrmSystem::Session may be released, need to check it before using it.
+    if (session_update_request.session &&
+        !session_update_request.session->is_closed()) {
+      // Interpret the result.
+      if (session_id) {
+        // Successful request generation.
 
-      // Enable session lookup by id which is used by spontaneous callbacks.
-      session_update_request.session->set_id(*session_id);
-      id_to_session_map_.insert(
-          std::make_pair(*session_id, session_update_request.session));
+        // Enable session lookup by id which is used by spontaneous callbacks.
+        session_update_request.session->set_id(*session_id);
+        id_to_session_map_.insert(
+            std::make_pair(*session_id, session_update_request.session));
 
-      session_update_request.generated_callback.Run(type, std::move(message),
-                                                    message_size);
-    } else {
-      // Failure during request generation.
-      session_update_request.did_not_generate_callback.Run(status,
-                                                           error_message);
+        session_update_request.generated_callback.Run(type, std::move(message),
+                                                      message_size);
+      } else {
+        // Failure during request generation.
+        session_update_request.did_not_generate_callback.Run(status,
+                                                             error_message);
+      }
     }
 
     // Sweep the context of |GenerateSessionUpdateRequest| once license updated.
@@ -274,15 +280,19 @@
       LOG(ERROR) << "Unknown session id: " << *session_id << ".";
       return;
     }
-    Session* session = session_iterator->second;
 
-    session->update_request_generated_callback().Run(type, std::move(message),
-                                                     message_size);
+    // As DrmSystem::Session may be released, need to check it before using it.
+    if (session_iterator->second) {
+      session_iterator->second->update_request_generated_callback().Run(
+          type, std::move(message), message_size);
+    }
   }
 }
 
 void DrmSystem::OnSessionUpdated(int ticket, SbDrmStatus status,
                                  const std::string& error_message) {
+  DCHECK(message_loop_->BelongsToCurrentThread());
+
   // Restore the context of |UpdateSession|.
   TicketToSessionUpdateMap::iterator session_update_iterator =
       ticket_to_session_update_map_.find(ticket);
@@ -306,6 +316,8 @@
 void DrmSystem::OnSessionKeyStatusChanged(
     const std::string& session_id, const std::vector<std::string>& key_ids,
     const std::vector<SbDrmKeyStatus>& key_statuses) {
+  DCHECK(message_loop_->BelongsToCurrentThread());
+
   // Find the session by ID.
   IdToSessionMap::iterator session_iterator =
       id_to_session_map_.find(session_id);
@@ -313,13 +325,18 @@
     LOG(ERROR) << "Unknown session id: " << session_id << ".";
     return;
   }
-  Session* session = session_iterator->second;
 
-  session->update_key_statuses_callback().Run(key_ids, key_statuses);
+  // As DrmSystem::Session may be released, need to check it before using it.
+  if (session_iterator->second) {
+    session_iterator->second->update_key_statuses_callback().Run(key_ids,
+                                                                 key_statuses);
+  }
 }
 
 #if SB_HAS(DRM_SESSION_CLOSED)
 void DrmSystem::OnSessionClosed(const std::string& session_id) {
+  DCHECK(message_loop_->BelongsToCurrentThread());
+
   // Find the session by ID.
   IdToSessionMap::iterator session_iterator =
       id_to_session_map_.find(session_id);
@@ -327,9 +344,11 @@
     LOG(ERROR) << "Unknown session id: " << session_id << ".";
     return;
   }
-  Session* session = session_iterator->second;
 
-  session->session_closed_callback().Run();
+  // As DrmSystem::Session may be released, need to check it before using it.
+  if (session_iterator->second) {
+    session_iterator->second->session_closed_callback().Run();
+  }
   id_to_session_map_.erase(session_iterator);
 }
 #endif  // SB_HAS(DRM_SESSION_CLOSED)
@@ -337,6 +356,8 @@
 #if SB_API_VERSION >= 10
 void DrmSystem::OnServerCertificateUpdated(int ticket, SbDrmStatus status,
                                            const std::string& error_message) {
+  DCHECK(message_loop_->BelongsToCurrentThread());
+
   auto iter = ticket_to_server_certificate_updated_map_.find(ticket);
   if (iter == ticket_to_server_certificate_updated_map_.end()) {
     LOG(ERROR) << "Unknown ticket: " << ticket << ".";
diff --git a/src/cobalt/media/base/drm_system.h b/src/cobalt/media/base/drm_system.h
index d4290f8..a80bf34 100644
--- a/src/cobalt/media/base/drm_system.h
+++ b/src/cobalt/media/base/drm_system.h
@@ -132,6 +132,8 @@
     // Supports spontaneous invocations of |SbDrmSessionUpdateRequestFunc|.
     SessionUpdateRequestGeneratedCallback update_request_generated_callback_;
 
+    base::WeakPtrFactory<Session> weak_factory_;
+
     friend class DrmSystem;
 
     DISALLOW_COPY_AND_ASSIGN(Session);
@@ -160,14 +162,14 @@
  private:
   // Stores context of |GenerateSessionUpdateRequest|.
   struct SessionUpdateRequest {
-    Session* session;
+    base::WeakPtr<Session> session;
     SessionUpdateRequestGeneratedCallback generated_callback;
     SessionUpdateRequestDidNotGenerateCallback did_not_generate_callback;
   };
   typedef base::hash_map<int, SessionUpdateRequest>
       TicketToSessionUpdateRequestMap;
 
-  typedef base::hash_map<std::string, Session*> IdToSessionMap;
+  typedef base::hash_map<std::string, base::WeakPtr<Session>> IdToSessionMap;
 
   typedef base::hash_map<int, ServerCertificateUpdatedCallback>
       TicketToServerCertificateUpdatedMap;
@@ -188,8 +190,8 @@
 
   // Private API for |Session|.
   void GenerateSessionUpdateRequest(
-      Session* session, const std::string& type, const uint8_t* init_data,
-      int init_data_length,
+      const base::WeakPtr<Session>& session, const std::string& type,
+      const uint8_t* init_data, int init_data_length,
       const SessionUpdateRequestGeneratedCallback&
           session_update_request_generated_callback,
       const SessionUpdateRequestDidNotGenerateCallback&
diff --git a/src/cobalt/media/base/starboard_player.cc b/src/cobalt/media/base/starboard_player.cc
index c41e9ac..d0c7e92 100644
--- a/src/cobalt/media/base/starboard_player.cc
+++ b/src/cobalt/media/base/starboard_player.cc
@@ -268,6 +268,7 @@
   DCHECK(task_runner_->BelongsToCurrentThread());
 
   decoder_buffer_cache_.ClearAll();
+  seek_pending_ = false;
 
   if (state_ == kSuspended) {
     preroll_timestamp_ = time;
@@ -290,7 +291,6 @@
   SbPlayerSeek2(player_, time.InMicroseconds(), ticket_);
 #endif  // SB_API_VERSION < 10
 
-  seek_pending_ = false;
   SbPlayerSetPlaybackRate(player_, playback_rate_);
 }
 
@@ -434,13 +434,14 @@
 
   SbPlayerSetPlaybackRate(player_, 0.0);
 
+  set_bounds_helper_->SetPlayer(NULL);
+
   base::AutoLock auto_lock(lock_);
   GetInfo_Locked(&cached_video_frames_decoded_, &cached_video_frames_dropped_,
                  &preroll_timestamp_);
 
   state_ = kSuspended;
 
-  set_bounds_helper_->SetPlayer(NULL);
   video_frame_provider_->SetOutputMode(VideoFrameProvider::kOutputModeInvalid);
   video_frame_provider_->ResetGetCurrentSbDecodeTargetFunction();
 
diff --git a/src/cobalt/media/fetcher_buffered_data_source.cc b/src/cobalt/media/fetcher_buffered_data_source.cc
index cbf6f41..254f386 100644
--- a/src/cobalt/media/fetcher_buffered_data_source.cc
+++ b/src/cobalt/media/fetcher_buffered_data_source.cc
@@ -211,12 +211,13 @@
   auto* download_data_writer =
       base::polymorphic_downcast<loader::URLFetcherStringWriter*>(
           source->GetResponseWriter());
-  std::unique_ptr<std::string> download_data = download_data_writer->data();
-  size_t size = download_data->size();
+  std::string downloaded_data;
+  download_data_writer->GetAndResetData(&downloaded_data);
+  size_t size = downloaded_data.size();
   if (size == 0) {
     return;
   }
-  const uint8* data = reinterpret_cast<const uint8*>(download_data->data());
+  const uint8* data = reinterpret_cast<const uint8*>(downloaded_data.data());
   base::AutoLock auto_lock(lock_);
 
   if (fetcher_.get() != source || error_occured_) {
diff --git a/src/cobalt/media/fetcher_buffered_data_source.h b/src/cobalt/media/fetcher_buffered_data_source.h
index 6eca602..66fef53 100644
--- a/src/cobalt/media/fetcher_buffered_data_source.h
+++ b/src/cobalt/media/fetcher_buffered_data_source.h
@@ -28,6 +28,7 @@
 #include "cobalt/csp/content_security_policy.h"
 #include "cobalt/loader/fetcher.h"
 #include "cobalt/loader/origin.h"
+#include "cobalt/loader/url_fetcher_string_writer.h"
 #include "cobalt/media/player/buffered_data_source.h"
 #include "cobalt/network/network_module.h"
 #include "net/url_request/url_fetcher.h"
diff --git a/src/cobalt/media/media.gyp b/src/cobalt/media/media.gyp
index 7cd8793..38b157f 100644
--- a/src/cobalt/media/media.gyp
+++ b/src/cobalt/media/media.gyp
@@ -232,7 +232,6 @@
       'dependencies': [
         'media',
         '<(DEPTH)/cobalt/base/base.gyp:base',
-        '<(DEPTH)/cobalt/test/test.gyp:run_all_unittests',
         '<(DEPTH)/testing/gmock.gyp:gmock',
         '<(DEPTH)/testing/gtest.gyp:gtest',
       ],
@@ -241,6 +240,7 @@
         'filters/shell_mp4_map_unittest.cc',
         'filters/shell_rbsp_stream_unittest.cc',
       ],
+      'includes': [ '<(DEPTH)/cobalt/test/test.gypi' ],
     },
     {
       'target_name': 'media_test_deploy',
diff --git a/src/cobalt/media/sandbox/fuzzer_app.cc b/src/cobalt/media/sandbox/fuzzer_app.cc
index 9a9d0c2..18f58ae 100644
--- a/src/cobalt/media/sandbox/fuzzer_app.cc
+++ b/src/cobalt/media/sandbox/fuzzer_app.cc
@@ -113,11 +113,21 @@
     return;
   }
 
+#if SB_API_VERSION >= SB_FEATURE_RUNTIME_CONFIGS_VERSION
+  std::vector<char> entry(SB_FILE_MAX_NAME);
+
+  while (SbDirectoryGetNext(directory, entry.data(), entry.size())) {
+    std::string file_name = path_name + SB_FILE_SEP_STRING + entry.data();
+    AddFile(file_name, min_ratio, max_ratio, initial_seed);
+  }
+#else   // SB_API_VERSION >= SB_FEATURE_RUNTIME_CONFIGS_VERSION
   SbDirectoryEntry entry;
+
   while (SbDirectoryGetNext(directory, &entry)) {
     std::string file_name = path_name + SB_FILE_SEP_STRING + entry.name;
     AddFile(file_name, min_ratio, max_ratio, initial_seed);
   }
+#endif  // SB_API_VERSION >= SB_FEATURE_RUNTIME_CONFIGS_VERSION
 
   SbDirectoryClose(directory);
 }
diff --git a/src/cobalt/media_capture/media_capture_test.gyp b/src/cobalt/media_capture/media_capture_test.gyp
index a28069c..2d330ab 100644
--- a/src/cobalt/media_capture/media_capture_test.gyp
+++ b/src/cobalt/media_capture/media_capture_test.gyp
@@ -35,10 +35,10 @@
         # For Fake Microphone.
         '<(DEPTH)/cobalt/speech/speech.gyp:speech',
 
-        '<(DEPTH)/cobalt/test/test.gyp:run_all_unittests',
         '<(DEPTH)/testing/gmock.gyp:gmock',
         '<(DEPTH)/testing/gtest.gyp:gtest',
       ],
+      'includes': [ '<(DEPTH)/cobalt/test/test.gypi' ],
     },
 
     {
diff --git a/src/cobalt/media_session/media_session_test.gyp b/src/cobalt/media_session/media_session_test.gyp
index aa135c1..9c2c8d7 100644
--- a/src/cobalt/media_session/media_session_test.gyp
+++ b/src/cobalt/media_session/media_session_test.gyp
@@ -24,10 +24,10 @@
         '<(DEPTH)/cobalt/media_session/media_session.gyp:media_session',
         '<(DEPTH)/cobalt/base/base.gyp:base',
         '<(DEPTH)/cobalt/browser/browser.gyp:browser',
-        '<(DEPTH)/cobalt/test/test.gyp:run_all_unittests',
         '<(DEPTH)/testing/gmock.gyp:gmock',
         '<(DEPTH)/testing/gtest.gyp:gtest',
       ],
+      'includes': [ '<(DEPTH)/cobalt/test/test.gypi' ],
     },
     {
       'target_name': 'media_session_test_deploy',
diff --git a/src/cobalt/media_stream/media_stream_test.gyp b/src/cobalt/media_stream/media_stream_test.gyp
index ac57801..a7cc5b1 100644
--- a/src/cobalt/media_stream/media_stream_test.gyp
+++ b/src/cobalt/media_stream/media_stream_test.gyp
@@ -32,10 +32,10 @@
       'dependencies': [
         '<(DEPTH)/cobalt/dom/dom.gyp:dom',
         '<(DEPTH)/cobalt/media_stream/media_stream.gyp:media_stream',
-        '<(DEPTH)/cobalt/test/test.gyp:run_all_unittests',
         '<(DEPTH)/testing/gmock.gyp:gmock',
         '<(DEPTH)/testing/gtest.gyp:gtest',
       ],
+      'includes': [ '<(DEPTH)/cobalt/test/test.gypi' ],
     },
 
     {
diff --git a/src/cobalt/network/local_network.cc b/src/cobalt/network/local_network.cc
index 862eb19..07e5765 100644
--- a/src/cobalt/network/local_network.cc
+++ b/src/cobalt/network/local_network.cc
@@ -83,7 +83,7 @@
       return true;
     }
     if ((ip.address[0] == 172) &&
-        ((ip.address[1] >= 16) || (ip.address[1] <= 31))) {
+        ((ip.address[1] >= 16) && (ip.address[1] <= 31))) {
       // IP is in range 172.16.0.0 - 172.31.255.255 (172.16/12 prefix).
       return true;
     }
diff --git a/src/cobalt/network/network.gyp b/src/cobalt/network/network.gyp
index 08e7036..c06c9e9 100644
--- a/src/cobalt/network/network.gyp
+++ b/src/cobalt/network/network.gyp
@@ -116,11 +116,11 @@
       ],
       'dependencies': [
         '<(DEPTH)/cobalt/base/base.gyp:base',
-        '<(DEPTH)/cobalt/test/test.gyp:run_all_unittests',
         '<(DEPTH)/testing/gmock.gyp:gmock',
         '<(DEPTH)/testing/gtest.gyp:gtest',
         'network',
       ],
+      'includes': [ '<(DEPTH)/cobalt/test/test.gypi' ],
     },
     {
       'target_name': 'network_test_deploy',
diff --git a/src/cobalt/overlay_info/overlay_info.gyp b/src/cobalt/overlay_info/overlay_info.gyp
index 78c4b0f..9aab7f5 100644
--- a/src/cobalt/overlay_info/overlay_info.gyp
+++ b/src/cobalt/overlay_info/overlay_info.gyp
@@ -42,9 +42,9 @@
       ],
       'dependencies': [
         '<(DEPTH)/cobalt/overlay_info/overlay_info.gyp:overlay_info',
-        '<(DEPTH)/cobalt/test/test.gyp:run_all_unittests',
         '<(DEPTH)/testing/gtest.gyp:gtest',
       ],
+      'includes': [ '<(DEPTH)/cobalt/test/test.gypi' ],
     },
 
     {
diff --git a/src/cobalt/overlay_info/overlay_info_registry.cc b/src/cobalt/overlay_info/overlay_info_registry.cc
index 6b01448..1341374 100644
--- a/src/cobalt/overlay_info/overlay_info_registry.cc
+++ b/src/cobalt/overlay_info/overlay_info_registry.cc
@@ -32,8 +32,7 @@
   void Disable();
 
   void Register(const char* category, const char* str);
-  void Register(const char* category, const void* data, size_t data_size);
-  void RetrieveAndClear(std::vector<uint8_t>* infos);
+  void RetrieveAndClear(std::string* infos);
 
  private:
   // Reserve enough data for |infos_| to avoid extra allcations.
@@ -48,7 +47,7 @@
 
   bool enabled_ = true;
   starboard::Mutex mutex_;
-  std::vector<uint8_t> infos_;
+  std::string infos_;
 };
 
 // static
@@ -64,40 +63,25 @@
   infos_.clear();
 }
 
-void OverlayInfoRegistryImpl::Register(const char* category, const char* str) {
-  auto length = SbStringGetLength(str);
-  Register(category, reinterpret_cast<const uint8_t*>(str), length);
-}
-
-void OverlayInfoRegistryImpl::Register(const char* category, const void* data,
-                                       size_t data_size) {
+void OverlayInfoRegistryImpl::Register(const char* category, const char* data) {
   DCHECK(SbStringFindCharacter(
              category, static_cast<char>(OverlayInfoRegistry::kDelimiter)) ==
          NULL)
       << "Category " << category
       << " cannot contain the delimiter:" << OverlayInfoRegistry::kDelimiter;
-  auto category_size = SbStringGetLength(category);
-  auto total_size = category_size + 1 + data_size;
-
-  DCHECK_GT(category_size, 0u);
-  // Use |kMaxSizeOfData + 0| to avoid link error caused by DCHECK_LE.
-  DCHECK_LE(total_size, OverlayInfoRegistry::kMaxSizeOfData + 0);
-
-  starboard::ScopedLock scoped_lock(mutex_);
-  // Use |kMaxNumberOfPendingOverlayInfo + 0| to avoid link error caused by
-  // DCHECK_LE.
-  DCHECK_LE(infos_.size() + total_size,
-            OverlayInfoRegistry::kMaxNumberOfPendingOverlayInfo + 0);
-  if (enabled_) {
-    infos_.push_back(static_cast<uint8_t>(total_size));
-    infos_.insert(infos_.end(), category, category + category_size);
-    infos_.push_back(kDelimiter);
-    infos_.insert(infos_.end(), static_cast<const uint8_t*>(data),
-                  static_cast<const uint8_t*>(data) + data_size);
+  DCHECK(SbStringFindCharacter(
+             data, static_cast<char>(OverlayInfoRegistry::kDelimiter)) == NULL)
+      << "Data " << data
+      << " cannot contain the delimiter:" << OverlayInfoRegistry::kDelimiter;
+  if (!infos_.empty()) {
+    infos_ += kDelimiter;
   }
+  infos_ += category;
+  infos_ += kDelimiter;
+  infos_ += data;
 }
 
-void OverlayInfoRegistryImpl::RetrieveAndClear(std::vector<uint8_t>* infos) {
+void OverlayInfoRegistryImpl::RetrieveAndClear(std::string* infos) {
   DCHECK(infos);
 
   starboard::ScopedLock scoped_lock(mutex_);
@@ -114,16 +98,30 @@
   OverlayInfoRegistryImpl::GetInstance()->Disable();
 }
 
-void OverlayInfoRegistry::Register(const char* category, const char* str) {
-  OverlayInfoRegistryImpl::GetInstance()->Register(category, str);
+void OverlayInfoRegistry::Register(const char* category, const char* data) {
+  OverlayInfoRegistryImpl::GetInstance()->Register(category, data);
 }
 
 void OverlayInfoRegistry::Register(const char* category, const void* data,
                                    size_t data_size) {
-  OverlayInfoRegistryImpl::GetInstance()->Register(category, data, data_size);
+  const char kHex[] = "0123456789abcdef";
+
+  const uint8_t* data_as_bytes = static_cast<const uint8_t*>(data);
+  std::string data_in_hex;
+
+  data_in_hex.reserve(data_size * 2);
+
+  while (data_size > 0) {
+    data_in_hex += kHex[*data_as_bytes / 16];
+    data_in_hex += kHex[*data_as_bytes % 16];
+    ++data_as_bytes;
+    --data_size;
+  }
+  OverlayInfoRegistryImpl::GetInstance()->Register(category,
+                                                   data_in_hex.c_str());
 }
 
-void OverlayInfoRegistry::RetrieveAndClear(std::vector<uint8_t>* infos) {
+void OverlayInfoRegistry::RetrieveAndClear(std::string* infos) {
   OverlayInfoRegistryImpl::GetInstance()->RetrieveAndClear(infos);
 }
 
diff --git a/src/cobalt/overlay_info/overlay_info_registry.h b/src/cobalt/overlay_info/overlay_info_registry.h
index ac45190..7de2a28 100644
--- a/src/cobalt/overlay_info/overlay_info_registry.h
+++ b/src/cobalt/overlay_info/overlay_info_registry.h
@@ -15,34 +15,27 @@
 #ifndef COBALT_OVERLAY_INFO_OVERLAY_INFO_REGISTRY_H_
 #define COBALT_OVERLAY_INFO_OVERLAY_INFO_REGISTRY_H_
 
-#include <vector>
+#include <sstream>
+#include <string>
 
 #include "starboard/types.h"
 
 namespace cobalt {
 namespace overlay_info {
 
-// This class allows register of arbitrary overlay information in the form of
-// string or binary data from anywhere inside Cobalt.  It also allows a consumer
-// to retrieve and clear all registered info, such info will be displayed as
-// overlay.
+// This class allows register of arbitrary overlay information from anywhere
+// inside Cobalt.  It also allows a consumer to retrieve and clear all
+// registered info, such info will be displayed as overlay.  The data is stored
+// as std::string internally.
 // The class is thread safe and all its methods can be accessed from any thread.
 // On average it expects to have less than 10 such info registered per frame.
 //
-// The binary data or string are stored in the following format:
-// [<one byte size> <size bytes data>]*
-// Each data entry contains a string category and some binary data, separated by
-// the delimiter '<'.
-// For example, the overlay infos ("media", "pts"), ("renderer", "fps\x60"), and
-// ("dom", "keydown") will be stored as
-//   '\x09', 'm', 'e', 'd', 'i', 'a', '<', 'p', 't', 's',
-//   '\x0d', 'r', 'e', 'n', 'd', 'e', 'r', 'e', 'r', '<', 'f', 'p', 's', '\x60',
-//   '\x0b', 'd', 'o', 'm', '<', 'k', 'e', 'y', 'd', 'o', 'w', 'n',
-// and the size of the vector will be 36.  Note that C strings won't be NULL
-// terminated and their sizes are calculated by the size of the data.
+// The info is stored in the following format:
+//   name0,value0,name1,value1,...
+// Binary data will be converted into hex string before storing.
 class OverlayInfoRegistry {
  public:
-  static const uint8_t kDelimiter = '<';  // ':' doesn't work with some scanners
+  static const char kDelimiter = ',';  // ':' doesn't work with some scanners
   // The size of category and data combined should be less than or equal to
   // kMaxSizeOfData - 1.  The extra room of one byte is used by the delimiter.
   static const size_t kMaxSizeOfData = 255;
@@ -52,14 +45,20 @@
 
   static void Disable();
 
-  // |category| cannot contain ':'.  The sum of size of |category| and |string|
-  // cannot exceed 254.  It leaves room for the delimiter.
-  static void Register(const char* category, const char* str);
-  // |category| cannot contain ':'.  The sum of size of |category| and |data|
-  // cannot exceed 254.  It leaves room for the delimiter.
+  // Both |category| and |data| cannot contain the delimiter.
+  static void Register(const char* category, const char* data);
+  // Both |category| and |data| cannot contain the delimiter.
   static void Register(const char* category, const void* data,
                        size_t data_size);
-  static void RetrieveAndClear(std::vector<uint8_t>* infos);
+  // Both |category| and |data| cannot contain the delimiter.
+  template <typename T>
+  static void Register(const char* category, T data) {
+    std::stringstream ss;
+    ss << data;
+    Register(category, ss.str().c_str());
+  }
+
+  static void RetrieveAndClear(std::string* infos);
 };
 
 }  // namespace overlay_info
diff --git a/src/cobalt/overlay_info/overlay_info_registry_test.cc b/src/cobalt/overlay_info/overlay_info_registry_test.cc
index ce031ee..0b5d97e 100644
--- a/src/cobalt/overlay_info/overlay_info_registry_test.cc
+++ b/src/cobalt/overlay_info/overlay_info_registry_test.cc
@@ -20,6 +20,7 @@
 #include <vector>
 
 #include "base/logging.h"
+#include "base/strings/string_split.h"
 #include "starboard/memory.h"
 #include "starboard/types.h"
 #include "testing/gtest/include/gtest/gtest.h"
@@ -29,56 +30,36 @@
 
 namespace {
 
-typedef std::pair<std::string, std::vector<uint8_t>> ValuePair;
+typedef std::pair<std::string, std::string> ValuePair;
 
 // See comment of OverlayInfoRegistry for format of |info|.
-std::vector<ValuePair> ParseOverlayInfo(std::vector<uint8_t> info) {
-  std::vector<ValuePair> parsed_infos;
+bool ParseOverlayInfo(std::string infos, std::vector<ValuePair>* values) {
+  CHECK(values);
 
-  while (!info.empty()) {
-    // Parse the size
-    size_t size = info[0];
-    info.erase(info.begin());
+  const char delimiter[] = {OverlayInfoRegistry::kDelimiter, 0};
 
-    CHECK_LE(size, info.size());
-
-    // Parse the category name
-    const auto kDelimiter = OverlayInfoRegistry::kDelimiter;
-    auto iter = std::find(info.begin(), info.end(), kDelimiter);
-    CHECK(iter != info.end());
-
-    const auto category_length = iter - info.begin();
-    CHECK_LE(static_cast<size_t>(category_length), size);
-
-    std::string category(
-        reinterpret_cast<char*>(info.data()),
-        reinterpret_cast<char*>(info.data()) + category_length);
-
-    // Parse the data
-    std::vector<uint8_t> data(info.begin() + category_length + 1,
-                              info.begin() + size);
-    info.erase(info.begin(), info.begin() + size);
-
-    parsed_infos.push_back(std::make_pair(category, data));
+  auto tokens = base::SplitString(infos, delimiter, base::KEEP_WHITESPACE,
+                                  base::SPLIT_WANT_ALL);
+  if (tokens.size() % 2 != 0) {
+    return false;
   }
 
-  CHECK(info.empty());
-  return parsed_infos;
-}
+  while (!tokens.empty()) {
+    values->push_back(std::make_pair(tokens[0], tokens[1]));
+    tokens.erase(tokens.begin(), tokens.begin() + 2);
+  }
 
-bool IsSame(const std::vector<uint8_t>& data, const std::string& str) {
-  return data.size() == str.size() &&
-         SbMemoryCompare(data.data(), str.c_str(), data.size()) == 0;
+  return true;
 }
 
 }  // namespace
 
 TEST(OverlayInfoRegistryTest, RetrieveOnEmptyData) {
-  std::vector<uint8_t> infos('a');
+  std::string infos("a");
   OverlayInfoRegistry::RetrieveAndClear(&infos);
   EXPECT_TRUE(infos.empty());
 
-  std::vector<uint8_t> infos1('a');
+  std::string infos1("b");
   OverlayInfoRegistry::RetrieveAndClear(&infos1);
   EXPECT_TRUE(infos1.empty());
 }
@@ -92,33 +73,13 @@
 
     OverlayInfoRegistry::Register(kCategory, value.c_str());
 
-    std::vector<uint8_t> infos('a');
+    std::string infos("a");
     OverlayInfoRegistry::RetrieveAndClear(&infos);
-    auto parsed_infos = ParseOverlayInfo(infos);
+    std::vector<ValuePair> parsed_infos;
+    ASSERT_TRUE(ParseOverlayInfo(infos, &parsed_infos));
     EXPECT_EQ(parsed_infos.size(), 1);
     EXPECT_EQ(parsed_infos[0].first, kCategory);
-    EXPECT_TRUE(IsSame(parsed_infos[0].second, value));
-
-    OverlayInfoRegistry::RetrieveAndClear(&infos);
-    EXPECT_TRUE(infos.empty());
-  }
-}
-
-TEST(OverlayInfoRegistryTest, RegisterSingleBinaryStringAndRetrieve) {
-  const char kCategory[] = "category";
-  const size_t kMaxDataSize = 20;
-
-  for (size_t i = 0; i < kMaxDataSize; ++i) {
-    std::vector<uint8_t> value(i, static_cast<uint8_t>(i % 2));
-
-    OverlayInfoRegistry::Register(kCategory, value.data(), value.size());
-
-    std::vector<uint8_t> infos('a');
-    OverlayInfoRegistry::RetrieveAndClear(&infos);
-    auto parsed_infos = ParseOverlayInfo(infos);
-    EXPECT_EQ(parsed_infos.size(), 1);
-    EXPECT_EQ(parsed_infos[0].first, kCategory);
-    EXPECT_TRUE(parsed_infos[0].second == value);
+    EXPECT_EQ(parsed_infos[0].second, value);
 
     OverlayInfoRegistry::RetrieveAndClear(&infos);
     EXPECT_TRUE(infos.empty());
@@ -137,9 +98,10 @@
     OverlayInfoRegistry::Register(kCategory, values.back().c_str());
   }
 
-  std::vector<uint8_t> infos('a');
+  std::string infos("a");
   OverlayInfoRegistry::RetrieveAndClear(&infos);
-  auto parsed_infos = ParseOverlayInfo(infos);
+  std::vector<ValuePair> parsed_infos;
+  ASSERT_TRUE(ParseOverlayInfo(infos, &parsed_infos));
   OverlayInfoRegistry::RetrieveAndClear(&infos);
   EXPECT_TRUE(infos.empty());
 
@@ -147,35 +109,19 @@
 
   for (size_t i = 0; i < kMaxStringLength; ++i) {
     EXPECT_EQ(parsed_infos[i].first, kCategory);
-    EXPECT_TRUE(IsSame(parsed_infos[i].second, values[i]));
+    EXPECT_EQ(parsed_infos[i].second, values[i]);
   }
 }
 
-TEST(OverlayInfoRegistryTest, RegisterMultipleBinaryDataAndRetrieve) {
-  const char kCategory[] = "c";
-  const size_t kMaxDataSize = 20;
+TEST(OverlayInfoRegistryTest, RegisterMultipleTypes) {
+  OverlayInfoRegistry::Register("string", "string_value");
+  OverlayInfoRegistry::Register("int", -12345);
+  OverlayInfoRegistry::Register("uint64", 123456789012u);
 
-  std::vector<std::vector<uint8_t>> values;
-
-  for (size_t i = 0; i < kMaxDataSize; ++i) {
-    values.push_back(std::vector<uint8_t>(i, static_cast<uint8_t>(i % 2)));
-
-    OverlayInfoRegistry::Register(kCategory, values.back().data(),
-                                  values.back().size());
-  }
-
-  std::vector<uint8_t> infos('a');
+  std::string infos("a");
   OverlayInfoRegistry::RetrieveAndClear(&infos);
-  auto parsed_infos = ParseOverlayInfo(infos);
-  OverlayInfoRegistry::RetrieveAndClear(&infos);
-  EXPECT_TRUE(infos.empty());
 
-  ASSERT_EQ(parsed_infos.size(), kMaxDataSize);
-
-  for (size_t i = 0; i < kMaxDataSize; ++i) {
-    EXPECT_EQ(parsed_infos[i].first, kCategory);
-    EXPECT_TRUE(parsed_infos[i].second == values[i]);
-  }
+  EXPECT_EQ(infos, "string,string_value,int,-12345,uint64,123456789012");
 }
 
 }  // namespace overlay_info
diff --git a/src/cobalt/overlay_info/qr_code_overlay.cc b/src/cobalt/overlay_info/qr_code_overlay.cc
index 6fee031..f4691d3 100644
--- a/src/cobalt/overlay_info/qr_code_overlay.cc
+++ b/src/cobalt/overlay_info/qr_code_overlay.cc
@@ -15,13 +15,14 @@
 #include "cobalt/overlay_info/qr_code_overlay.h"
 
 #include <algorithm>
-#include <vector>
+#include <string>
 
 #include "base/compiler_specific.h"
 #include "base/logging.h"
 #include "base/trace_event/trace_event.h"
 #include "cobalt/overlay_info/overlay_info_registry.h"
 #include "cobalt/render_tree/animations/animate_node.h"
+#include "starboard/memory.h"
 #include "third_party/QR-Code-generator/cpp/QrCode.hpp"
 
 namespace cobalt {
@@ -32,16 +33,20 @@
 using qrcodegen::QrCode;
 using render_tree::Image;
 using render_tree::ImageNode;
+using render_tree::ResourceProvider;
 
-const int kModuleDimensionInPixels = 4;
+const int kMinimumQrCodeVersion = 3;
 const int kPixelSizeInBytes = 4;
-const uint32_t kBlack = 0x00000000;
+#if SB_IS_BIG_ENDIAN
+const uint32_t kBlack = 0x000000FF;
+#else   // SB_IS_BIG_ENDIAN
+const uint32_t kBlack = 0xFF000000;
+#endif  // SB_IS_BIG_ENDIAN
 const uint32_t kWhite = 0xFFFFFFFF;
 const uint32_t kBorderColor = kWhite;
-const int kCodeBorderInPixels = 16;
-const int kScreenMarginInPixels = 128;
+const int kScreenMarginInPixels = 64;
 
-int64_t s_frame_count_ = 0;
+uint32_t s_frame_index_ = 0;
 
 void DrawRect(int width, int height, int pitch_in_bytes, uint32_t color,
               uint8_t* target_buffer) {
@@ -55,43 +60,45 @@
   }
 }
 
-void DrawQrCode(const QrCode& qr_code, int pitch_in_bytes,
-                uint8_t* target_buffer) {
+void DrawQrCode(const QrCode& qr_code, int module_dimension_in_pixels,
+                int pitch_in_bytes, uint8_t* target_buffer) {
   uint8_t* row_data = target_buffer;
   for (int row = 0; row < qr_code.getSize(); ++row) {
     uint8_t* column_data = row_data;
     for (int column = 0; column < qr_code.getSize(); ++column) {
-      DrawRect(kModuleDimensionInPixels, kModuleDimensionInPixels,
+      DrawRect(module_dimension_in_pixels, module_dimension_in_pixels,
                pitch_in_bytes, qr_code.getModule(row, column) ? kBlack : kWhite,
                column_data);
-      column_data += kPixelSizeInBytes * kModuleDimensionInPixels;
+      column_data += kPixelSizeInBytes * module_dimension_in_pixels;
     }
 
-    row_data += pitch_in_bytes * kModuleDimensionInPixels;
+    row_data += pitch_in_bytes * module_dimension_in_pixels;
   }
 }
 
-scoped_refptr<Image> CreateImageForQrCodes(
-    const std::vector<QrCode>& qr_codes, const math::Size& screen_size,
-    render_tree::ResourceProvider* resource_provider) {
-  TRACE_EVENT0("cobalt::overlay_info", "CreateImageForQrCodes()");
+scoped_refptr<Image> CreateImageForQrCode(const QrCode& qr_code,
+                                          const math::Size& screen_size,
+                                          int slots,
+                                          ResourceProvider* resource_provider) {
+  TRACE_EVENT0("cobalt::overlay_info", "CreateImageForQrCode()");
 
-  int max_code_size = 0;
+  const int module_dimension_in_pixels = screen_size.height() > 1080 ? 16 : 8;
+  const int code_border_in_pixels = module_dimension_in_pixels * 2;
 
-  for (auto& qr_code : qr_codes) {
-    max_code_size = std::max(max_code_size, qr_code.getSize());
-  }
+  int qr_code_size_in_blocks = qr_code.getSize();
 
-  int column =
-      (screen_size.width() - kScreenMarginInPixels * 2 - kCodeBorderInPixels) /
-      (max_code_size * kModuleDimensionInPixels + kCodeBorderInPixels);
-  column = std::min(column, static_cast<int>(qr_codes.size()));
-  int row = (static_cast<int>(qr_codes.size()) + column - 1) / column;
+  int column = (screen_size.width() - kScreenMarginInPixels * 2 -
+                code_border_in_pixels) /
+               (qr_code_size_in_blocks * module_dimension_in_pixels +
+                code_border_in_pixels);
+  column = std::min(column, slots);
+  int row = (slots + column - 1) / column;
 
-  int image_width = column * max_code_size * kModuleDimensionInPixels +
-                    kCodeBorderInPixels * (column + 1);
-  int image_height = row * max_code_size * kModuleDimensionInPixels +
-                     kCodeBorderInPixels * (row + 1);
+  int image_width =
+      column * qr_code_size_in_blocks * module_dimension_in_pixels +
+      code_border_in_pixels * (column + 1);
+  int image_height = row * qr_code_size_in_blocks * module_dimension_in_pixels +
+                     code_border_in_pixels * (row + 1);
 
   auto image_data = resource_provider->AllocateImageData(
       math::Size(image_width, image_height), render_tree::kPixelFormatRGBA8,
@@ -99,59 +106,63 @@
   DCHECK(image_data);
   auto image_desc = image_data->GetDescriptor();
 
-  size_t qr_code_index = 0;
+  uint32_t slot_index = 0;
   auto row_data = image_data->GetMemory();
   for (int i = 0; i < row; ++i) {
-    // Draw the top border of all qr codes in the row.
-    DrawRect(image_width, kCodeBorderInPixels, image_desc.pitch_in_bytes,
+    // Draw the top border of all qr code blocks in the row.
+    DrawRect(image_width, code_border_in_pixels, image_desc.pitch_in_bytes,
              kBorderColor, row_data);
-    row_data += kCodeBorderInPixels * image_desc.pitch_in_bytes;
+    row_data += code_border_in_pixels * image_desc.pitch_in_bytes;
     auto column_data = row_data;
 
     for (int j = 0; j < column; ++j) {
       // Draw the left border.
-      DrawRect(kCodeBorderInPixels, max_code_size * kModuleDimensionInPixels,
+      DrawRect(code_border_in_pixels,
+               qr_code_size_in_blocks * module_dimension_in_pixels,
                image_desc.pitch_in_bytes, kBorderColor, column_data);
-      column_data += kCodeBorderInPixels * kPixelSizeInBytes;
-      if (qr_code_index < qr_codes.size()) {
+      column_data += code_border_in_pixels * kPixelSizeInBytes;
+      if (slot_index == s_frame_index_ % slots) {
         // Draw qr code.
-        DrawQrCode(qr_codes[qr_code_index], image_desc.pitch_in_bytes,
-                   column_data);
-        ++qr_code_index;
+        DrawQrCode(qr_code, module_dimension_in_pixels,
+                   image_desc.pitch_in_bytes, column_data);
+      } else {
+        DrawRect(qr_code_size_in_blocks * module_dimension_in_pixels,
+                 qr_code_size_in_blocks * module_dimension_in_pixels,
+                 image_desc.pitch_in_bytes, kBlack, column_data);
       }
-      column_data +=
-          max_code_size * kModuleDimensionInPixels * kPixelSizeInBytes;
+      ++slot_index;
+      column_data += qr_code_size_in_blocks * module_dimension_in_pixels *
+                     kPixelSizeInBytes;
     }
 
     // Draw the right border of the row.
-    DrawRect(kCodeBorderInPixels, max_code_size * kModuleDimensionInPixels,
+    DrawRect(code_border_in_pixels,
+             qr_code_size_in_blocks * module_dimension_in_pixels,
              image_desc.pitch_in_bytes, kBorderColor, column_data);
 
-    row_data +=
-        max_code_size * kModuleDimensionInPixels * image_desc.pitch_in_bytes;
+    row_data += qr_code_size_in_blocks * module_dimension_in_pixels *
+                image_desc.pitch_in_bytes;
   }
 
   // Draw the bottom border of all qr code.
-  DrawRect(image_width, kCodeBorderInPixels, image_desc.pitch_in_bytes,
+  DrawRect(image_width, code_border_in_pixels, image_desc.pitch_in_bytes,
            kBorderColor, row_data);
 
   return resource_provider->CreateImage(std::move(image_data));
 }
 
-void AnimateCB(math::Size screen_size,
-               render_tree::ResourceProvider* resource_provider,
-               render_tree::ImageNode::Builder* image_node,
-               base::TimeDelta time) {
+void AnimateCB(math::Size screen_size, int slots,
+               ResourceProvider* resource_provider,
+               ImageNode::Builder* image_node, base::TimeDelta time) {
   SB_UNREFERENCED_PARAMETER(time);
   DCHECK(image_node);
 
   TRACE_EVENT0("cobalt::overlay_info", "AnimateCB()");
 
-  OverlayInfoRegistry::Register("overlay_info:frame_count", &s_frame_count_,
-                                sizeof(s_frame_count_));
-  ++s_frame_count_;
+  OverlayInfoRegistry::Register("frame", s_frame_index_);
+  ++s_frame_index_;
 
-  std::vector<uint8_t> infos;
+  std::string infos;
   OverlayInfoRegistry::RetrieveAndClear(&infos);
 
   if (infos.empty()) {
@@ -159,26 +170,25 @@
     return;
   }
 
-  // Use a vector in case we decide to switch back to multiple qr codes.
-  std::vector<QrCode> qr_codes;
-  qr_codes.emplace_back(QrCode::encodeBinary(infos, QrCode::Ecc::LOW));
-
+  auto qrcode = QrCode::encodeText(infos.c_str(), QrCode::Ecc::LOW,
+                                   kMinimumQrCodeVersion);
   image_node->source =
-      CreateImageForQrCodes(qr_codes, screen_size, resource_provider);
+      CreateImageForQrCode(qrcode, screen_size, slots, resource_provider);
   auto image_size = image_node->source->GetSize();
-  // TODO: Move the QR code between draws to avoid tearing.
-  image_node->destination_rect =
-      math::RectF(kScreenMarginInPixels, kScreenMarginInPixels,
-                  image_size.width(), image_size.height());
+  image_node->destination_rect = math::RectF(
+      screen_size.width() - image_size.width() - kScreenMarginInPixels,
+      screen_size.height() - image_size.height() - kScreenMarginInPixels,
+      image_size.width(), image_size.height());
 }
 
 }  // namespace
 
 QrCodeOverlay::QrCodeOverlay(
-    const math::Size& screen_size,
-    render_tree::ResourceProvider* resource_provider,
+    const math::Size& screen_size, int slots,
+    ResourceProvider* resource_provider,
     const RenderTreeProducedCB& render_tree_produced_cb)
     : render_tree_produced_cb_(render_tree_produced_cb),
+      slots_(slots),
       screen_size_(screen_size),
       resource_provider_(resource_provider) {
   DCHECK_GT(screen_size.width(), 0);
@@ -196,8 +206,7 @@
   UpdateRenderTree();
 }
 
-void QrCodeOverlay::SetResourceProvider(
-    render_tree::ResourceProvider* resource_provider) {
+void QrCodeOverlay::SetResourceProvider(ResourceProvider* resource_provider) {
   resource_provider_ = resource_provider;
   UpdateRenderTree();
 }
@@ -212,8 +221,8 @@
   scoped_refptr<ImageNode> image_node = new ImageNode(nullptr);
   render_tree::animations::AnimateNode::Builder animate_node_builder;
 
-  animate_node_builder.Add(
-      image_node, base::Bind(AnimateCB, screen_size_, resource_provider_));
+  animate_node_builder.Add(image_node, base::Bind(AnimateCB, screen_size_,
+                                                  slots_, resource_provider_));
 
   render_tree_produced_cb_.Run(new render_tree::animations::AnimateNode(
       animate_node_builder, image_node));
diff --git a/src/cobalt/overlay_info/qr_code_overlay.h b/src/cobalt/overlay_info/qr_code_overlay.h
index 5d5aa15..02d630b 100644
--- a/src/cobalt/overlay_info/qr_code_overlay.h
+++ b/src/cobalt/overlay_info/qr_code_overlay.h
@@ -31,7 +31,7 @@
   typedef base::Callback<void(const scoped_refptr<render_tree::Node>&)>
       RenderTreeProducedCB;
 
-  QrCodeOverlay(const math::Size& screen_size,
+  QrCodeOverlay(const math::Size& screen_size, int slots,
                 render_tree::ResourceProvider* resource_provider,
                 const RenderTreeProducedCB& render_tree_produced_cb);
 
@@ -41,7 +41,11 @@
  private:
   void UpdateRenderTree();
 
-  RenderTreeProducedCB render_tree_produced_cb_;
+  const RenderTreeProducedCB render_tree_produced_cb_;
+  // Qr codes are displayed in rotating positions so it won't be blurred during
+  // capture.  The number of rotating positions are specified by |slots_|.
+  const int slots_;
+
   math::Size screen_size_;
   render_tree::ResourceProvider* resource_provider_;
 };
diff --git a/src/cobalt/page_visibility/page_visibility.gyp b/src/cobalt/page_visibility/page_visibility.gyp
index bef5f08..4d85443 100644
--- a/src/cobalt/page_visibility/page_visibility.gyp
+++ b/src/cobalt/page_visibility/page_visibility.gyp
@@ -47,11 +47,11 @@
         'page_visibility_state_test.cc',
       ],
       'dependencies': [
-        '<(DEPTH)/cobalt/test/test.gyp:run_all_unittests',
         '<(DEPTH)/testing/gmock.gyp:gmock',
         '<(DEPTH)/testing/gtest.gyp:gtest',
         'page_visibility',
       ],
+      'includes': [ '<(DEPTH)/cobalt/test/test.gypi' ],
     },
     {
       'target_name': 'page_visibility_test_deploy',
diff --git a/src/cobalt/render_tree/render_tree.gyp b/src/cobalt/render_tree/render_tree.gyp
index c7c0ec9..8f74058 100644
--- a/src/cobalt/render_tree/render_tree.gyp
+++ b/src/cobalt/render_tree/render_tree.gyp
@@ -104,12 +104,12 @@
         'node_visitor_test.cc',
       ],
       'dependencies': [
-        '<(DEPTH)/cobalt/test/test.gyp:run_all_unittests',
         '<(DEPTH)/testing/gmock.gyp:gmock',
         '<(DEPTH)/testing/gtest.gyp:gtest',
         'animations',
         'render_tree',
       ],
+      'includes': [ '<(DEPTH)/cobalt/test/test.gypi' ],
     },
 
     # Deploys the render tree library test on a console.
diff --git a/src/cobalt/renderer/backend/backend.gyp b/src/cobalt/renderer/backend/backend.gyp
index 3ff2de4..a3b5e85 100644
--- a/src/cobalt/renderer/backend/backend.gyp
+++ b/src/cobalt/renderer/backend/backend.gyp
@@ -23,7 +23,6 @@
         'render_target.cc',
         'render_target.h',
       ],
-
       'dependencies': [
         '<(DEPTH)/cobalt/base/base.gyp:base',
         '<(DEPTH)/cobalt/math/math.gyp:math',
diff --git a/src/cobalt/renderer/backend/graphics_context.cc b/src/cobalt/renderer/backend/graphics_context.cc
index 12f0e3c..4008b32 100644
--- a/src/cobalt/renderer/backend/graphics_context.cc
+++ b/src/cobalt/renderer/backend/graphics_context.cc
@@ -52,6 +52,16 @@
   return -1.0f;
 }
 
+float GraphicsContext::GetMinimumFrameIntervalInMilliseconds() {
+  if (graphics_extension_ && graphics_extension_->version >= 2) {
+    return graphics_extension_->GetMinimumFrameIntervalInMilliseconds();
+  }
+
+  // Return negative value, if the GraphicsExtension is not implemented
+  // or the GraphicsExtension version is below 2.
+  return -1.0f;
+}
+
 }  // namespace backend
 }  // namespace renderer
 }  // namespace cobalt
diff --git a/src/cobalt/renderer/backend/graphics_context.h b/src/cobalt/renderer/backend/graphics_context.h
index 8621f5e..ead28c3 100644
--- a/src/cobalt/renderer/backend/graphics_context.h
+++ b/src/cobalt/renderer/backend/graphics_context.h
@@ -85,6 +85,15 @@
   // only be presented when something changes.
   virtual float GetMaximumFrameIntervalInMilliseconds();
 
+  // Allow throttling of the frame rate. This is expressed in terms of
+  // milliseconds and can be a floating point number. Keep in mind that
+  // swapping frames may take some additional processing time, so it may be
+  // better to specify a lower delay. For example, '33' instead of '33.33'
+  // for 30 Hz refresh. If implemented, this takes precedence over the gyp
+  // variable 'cobalt_minimum_frame_time_in_milliseconds'.
+  // Note: Return a negative number if no value is specified by the platform.
+  virtual float GetMinimumFrameIntervalInMilliseconds();
+
  private:
   GraphicsSystem* system_;
 
diff --git a/src/cobalt/renderer/pipeline.cc b/src/cobalt/renderer/pipeline.cc
index ef54726..e2dad23 100644
--- a/src/cobalt/renderer/pipeline.cc
+++ b/src/cobalt/renderer/pipeline.cc
@@ -298,12 +298,20 @@
     // swaps. It is possible that a submission is not rendered (this can
     // happen if the render tree has not changed between submissions), so no
     // frame swap occurs, and the minimum frame time is the only throttle.
+    float minimum_frame_interval_milliseconds =
+        graphics_context_
+            ? graphics_context_->GetMinimumFrameIntervalInMilliseconds()
+            : -1.0f;
     COMPILE_ASSERT(COBALT_MINIMUM_FRAME_TIME_IN_MILLISECONDS > 0,
                    frame_time_must_be_positive);
+    if (minimum_frame_interval_milliseconds < 0.0f) {
+      minimum_frame_interval_milliseconds =
+          COBALT_MINIMUM_FRAME_TIME_IN_MILLISECONDS;
+    }
+    DCHECK(minimum_frame_interval_milliseconds > 0.0f);
     rasterize_timer_.emplace(
         FROM_HERE,
-        base::TimeDelta::FromMillisecondsD(
-            COBALT_MINIMUM_FRAME_TIME_IN_MILLISECONDS),
+        base::TimeDelta::FromMillisecondsD(minimum_frame_interval_milliseconds),
         base::BindRepeating(&Pipeline::RasterizeCurrentTree,
                             base::Unretained(this)));
     rasterize_timer_->Reset();
diff --git a/src/cobalt/renderer/rasterizer/blitter/rasterizer.gyp b/src/cobalt/renderer/rasterizer/blitter/rasterizer.gyp
index f989a6d..af03b7b 100644
--- a/src/cobalt/renderer/rasterizer/blitter/rasterizer.gyp
+++ b/src/cobalt/renderer/rasterizer/blitter/rasterizer.gyp
@@ -103,10 +103,10 @@
 
       'dependencies': [
         'hardware_rasterizer',
-        '<(DEPTH)/cobalt/test/test.gyp:run_all_unittests',
         '<(DEPTH)/testing/gmock.gyp:gmock',
         '<(DEPTH)/testing/gtest.gyp:gtest'
       ],
+      'includes': [ '<(DEPTH)/cobalt/test/test.gypi' ],
     },
 
     {
diff --git a/src/cobalt/renderer/rasterizer/egl/textured_mesh_renderer.cc b/src/cobalt/renderer/rasterizer/egl/textured_mesh_renderer.cc
index b3e56b2..0fd2ab2 100644
--- a/src/cobalt/renderer/rasterizer/egl/textured_mesh_renderer.cc
+++ b/src/cobalt/renderer/rasterizer/egl/textured_mesh_renderer.cc
@@ -85,10 +85,17 @@
     1.164f, 2.112f, 0.0f,   -1.12875f, 0.0f,   0.0f,    0.0f,    1.0f};
 
 // Used for 10bit unnormalized YUV images.
+// Y is between 64 and 940 inclusive. U and V are between 64 and 960 inclusive.
+// it is 1023/(940-64) = 1.1678 for Y and 1023/(960-64) = 1.1417 for U and V.
+// 64 is the scale factor for 10 bit.
+// Input YUV must be subtracted by (0.0625, 0.5, 0.5), so
+// -1.1678 * 0.0625  - 0 * 0.5         - 1.6835 * 0.5    = -0.9147
+// -1.1678 * 0.0625  -(-0.1878 * 0.5)  - (-0.6522 * 0.5) = 0.347
+// -1.1678 * 0.0625  - (2.1479f * 0.5) - 0 * 0.5         = -1.1469
 const float k10BitBT2020ColorMatrix[16] = {
-    64 * 1.1678f, 0.0f,          64 * 1.6835f,  -0.96925f,
-    64 * 1.1678f, 64 * -0.1878f, 64 * -0.6522f, 0.30025f,
-    64 * 1.1678f, 64 * 2.1479f,  0.0f,          -1.12875f,
+    64 * 1.1678f, 0.0f,          64 * 1.6835f,  -0.9147f,
+    64 * 1.1678f, 64 * -0.1878f, 64 * -0.6522f, 0.347f,
+    64 * 1.1678f, 64 * 2.1479f,  0.0f,          -1.1469f,
     0.0f,         0.0f,          0.0f,          1.0f};
 
 const float* GetColorMatrixForImageType(
diff --git a/src/cobalt/renderer/renderer.gyp b/src/cobalt/renderer/renderer.gyp
index 89d6804..b9b59e9 100644
--- a/src/cobalt/renderer/renderer.gyp
+++ b/src/cobalt/renderer/renderer.gyp
@@ -101,7 +101,6 @@
       ],
       'dependencies': [
         '<(DEPTH)/cobalt/base/base.gyp:base',
-        '<(DEPTH)/cobalt/test/test.gyp:run_all_unittests',
         '<(DEPTH)/testing/gmock.gyp:gmock',
         '<(DEPTH)/testing/gtest.gyp:gtest',
         'render_tree_pixel_tester',
@@ -112,6 +111,7 @@
           'defines' : ['ENABLE_MAP_TO_MESH'],
         }],
       ],
+      'includes': [ '<(DEPTH)/cobalt/test/test.gypi' ],
     },
     {
       'target_name': 'renderer_copy_test_data',
diff --git a/src/cobalt/renderer/test/png_utils/png_utils.gyp b/src/cobalt/renderer/test/png_utils/png_utils.gyp
index dbea190..f56ddc6 100644
--- a/src/cobalt/renderer/test/png_utils/png_utils.gyp
+++ b/src/cobalt/renderer/test/png_utils/png_utils.gyp
@@ -44,12 +44,12 @@
       ],
       'dependencies': [
         '<(DEPTH)/cobalt/base/base.gyp:base',
-        '<(DEPTH)/cobalt/test/test.gyp:run_all_unittests',
         '<(DEPTH)/testing/gmock.gyp:gmock',
         '<(DEPTH)/testing/gtest.gyp:gtest',
         'png_utils',
         'png_utils_copy_test_data',
       ],
+      'includes': [ '<(DEPTH)/cobalt/test/test.gypi' ],
     },
     {
       'target_name': 'png_utils_benchmark',
diff --git a/src/cobalt/samples/simple_example/simple_example.gyp b/src/cobalt/samples/simple_example/simple_example.gyp
index 5548fc2..7a6226c 100644
--- a/src/cobalt/samples/simple_example/simple_example.gyp
+++ b/src/cobalt/samples/simple_example/simple_example.gyp
@@ -91,11 +91,11 @@
       ],
       'dependencies': [
         '<(DEPTH)/cobalt/base/base.gyp:base',
-        '<(DEPTH)/cobalt/test/test.gyp:run_all_unittests',
         '<(DEPTH)/testing/gtest.gyp:gtest',
         'simple_example_lib',
         'simple_example_copy_test_data',
       ],
+      'includes': [ '<(DEPTH)/cobalt/test/test.gypi' ],
     },
 
     # This target is optional and is only needed if tests are using
diff --git a/src/cobalt/script/array_buffer.h b/src/cobalt/script/array_buffer.h
index 850fc52..eb5acce 100644
--- a/src/cobalt/script/array_buffer.h
+++ b/src/cobalt/script/array_buffer.h
@@ -15,6 +15,7 @@
 #ifndef COBALT_SCRIPT_ARRAY_BUFFER_H_
 #define COBALT_SCRIPT_ARRAY_BUFFER_H_
 
+#include <algorithm>
 #include <memory>
 
 #include "base/logging.h"
@@ -81,6 +82,7 @@
 //   DCHECK_EQ(data.data(), nullptr);
 class PreallocatedArrayBufferData {
  public:
+  PreallocatedArrayBufferData() = default;
   explicit PreallocatedArrayBufferData(size_t byte_length);
   ~PreallocatedArrayBufferData();
 
@@ -89,8 +91,17 @@
       default;
 
   void* data() { return data_; }
+  const void* data() const { return data_; }
   size_t byte_length() const { return byte_length_; }
 
+  void Swap(PreallocatedArrayBufferData* that) {
+    DCHECK(that);
+
+    std::swap(data_, that->data_);
+    std::swap(byte_length_, that->byte_length_);
+  }
+  void Resize(size_t new_byte_length);
+
  private:
   PreallocatedArrayBufferData(const PreallocatedArrayBufferData&) = delete;
   void operator=(const PreallocatedArrayBufferData&) = delete;
@@ -106,8 +117,8 @@
     byte_length_ = 0u;
   }
 
-  void* data_;
-  size_t byte_length_;
+  void* data_ = nullptr;
+  size_t byte_length_ = 0u;
 
   friend ArrayBuffer;
 };
diff --git a/src/cobalt/script/mozjs-45/mozjs_array_buffer.cc b/src/cobalt/script/mozjs-45/mozjs_array_buffer.cc
index 1fa7a43..714a582 100644
--- a/src/cobalt/script/mozjs-45/mozjs_array_buffer.cc
+++ b/src/cobalt/script/mozjs-45/mozjs_array_buffer.cc
@@ -12,11 +12,13 @@
 // See the License for the specific language governing permissions and
 // limitations under the License.
 
+#include <algorithm>
 #include <memory>
 
 #include "cobalt/script/mozjs-45/mozjs_array_buffer.h"
 
 #include "cobalt/base/polymorphic_downcast.h"
+#include "starboard/memory.h"
 
 namespace cobalt {
 namespace script {
@@ -36,6 +38,22 @@
   }
 }
 
+void PreallocatedArrayBufferData::Resize(size_t new_byte_length) {
+  if (byte_length_ == new_byte_length) {
+    return;
+  }
+  auto new_data = js_malloc(new_byte_length);
+  DCHECK(new_data);
+  if (data_) {
+    if (new_data) {
+      SbMemoryCopy(new_data, data_, std::min(byte_length_, new_byte_length));
+    }
+    js_free(data_);
+  }
+  data_ = new_data;
+  byte_length_ = new_byte_length;
+}
+
 // static
 Handle<ArrayBuffer> ArrayBuffer::New(GlobalEnvironment* global_environment,
                                      size_t byte_length) {
diff --git a/src/cobalt/script/v8c/v8c_array_buffer.cc b/src/cobalt/script/v8c/v8c_array_buffer.cc
index 13087af..7fe8b3a 100644
--- a/src/cobalt/script/v8c/v8c_array_buffer.cc
+++ b/src/cobalt/script/v8c/v8c_array_buffer.cc
@@ -17,6 +17,7 @@
 #include "cobalt/script/v8c/v8c_array_buffer.h"
 
 #include "cobalt/base/polymorphic_downcast.h"
+#include "starboard/memory.h"
 
 namespace cobalt {
 namespace script {
@@ -34,6 +35,11 @@
   }
 }
 
+void PreallocatedArrayBufferData::Resize(size_t new_byte_length) {
+  data_ = SbMemoryReallocate(data_, new_byte_length);
+  byte_length_ = new_byte_length;
+}
+
 // static
 Handle<ArrayBuffer> ArrayBuffer::New(GlobalEnvironment* global_environment,
                                      size_t byte_length) {
diff --git a/src/cobalt/script/v8c/v8c_callback_function.h b/src/cobalt/script/v8c/v8c_callback_function.h
index b958457..c069726 100644
--- a/src/cobalt/script/v8c/v8c_callback_function.h
+++ b/src/cobalt/script/v8c/v8c_callback_function.h
@@ -22,6 +22,8 @@
 #ifndef COBALT_SCRIPT_V8C_V8C_CALLBACK_FUNCTION_H_
 #define COBALT_SCRIPT_V8C_V8C_CALLBACK_FUNCTION_H_
 
+#include <string>
+
 #include "base/logging.h"
 #include "cobalt/script/callback_function.h"
 #include "cobalt/script/v8c/conversion_helpers.h"
@@ -90,6 +92,7 @@
 
     EntryScope entry_scope(isolate_);
     v8::Local<v8::Context> context = isolate_->GetCurrentContext();
+    v8::TryCatch try_catch(isolate_);
 
     // https://www.w3.org/TR/WebIDL/#es-invoking-callback-functions
     // Callback 'this' is set to null, unless overridden by other specifications
@@ -110,7 +113,16 @@
         function_as_object->CallAsFunction(context, this_value, argc, argv);
     v8::Local<v8::Value> return_value;
     if (!maybe_return_value.ToLocal(&return_value)) {
-      NOTIMPLEMENTED();
+      std::string description;
+      v8::Local<v8::Value> stack;
+      if (try_catch.StackTrace(context).ToLocal(&stack)) {
+        description = *v8::String::Utf8Value(isolate_, stack);
+      } else {
+        description = *v8::String::Utf8Value(isolate_, try_catch.Exception());
+      }
+      if (description.empty()) description = "Unknown exception";
+      // TODO: Send the description to the console instead of logging it.
+      LOG(ERROR) << description;
       callback_result.exception = true;
     } else {
       callback_result = ConvertCallbackReturnValue<R>(isolate_, return_value);
@@ -133,8 +145,7 @@
   V8cCallbackFunction(v8::Isolate* isolate, v8::Local<v8::Value> handle)
       : ScopedPersistent(isolate, handle), isolate_(isolate) {}
 
-  CallbackResult<R> Run(      typename
-      CallbackParamTraits<A1>::ForwardType a1)
+  CallbackResult<R> Run(typename CallbackParamTraits<A1>::ForwardType a1)
       const override {
     CallbackResult<R> callback_result;
     DCHECK(!this->IsEmpty());
@@ -146,6 +157,7 @@
 
     EntryScope entry_scope(isolate_);
     v8::Local<v8::Context> context = isolate_->GetCurrentContext();
+    v8::TryCatch try_catch(isolate_);
 
     // https://www.w3.org/TR/WebIDL/#es-invoking-callback-functions
     // Callback 'this' is set to null, unless overridden by other specifications
@@ -168,7 +180,16 @@
         function_as_object->CallAsFunction(context, this_value, argc, argv);
     v8::Local<v8::Value> return_value;
     if (!maybe_return_value.ToLocal(&return_value)) {
-      NOTIMPLEMENTED();
+      std::string description;
+      v8::Local<v8::Value> stack;
+      if (try_catch.StackTrace(context).ToLocal(&stack)) {
+        description = *v8::String::Utf8Value(isolate_, stack);
+      } else {
+        description = *v8::String::Utf8Value(isolate_, try_catch.Exception());
+      }
+      if (description.empty()) description = "Unknown exception";
+      // TODO: Send the description to the console instead of logging it.
+      LOG(ERROR) << description;
       callback_result.exception = true;
     } else {
       callback_result = ConvertCallbackReturnValue<R>(isolate_, return_value);
@@ -191,8 +212,7 @@
   V8cCallbackFunction(v8::Isolate* isolate, v8::Local<v8::Value> handle)
       : ScopedPersistent(isolate, handle), isolate_(isolate) {}
 
-  CallbackResult<R> Run(      typename
-      CallbackParamTraits<A1>::ForwardType a1,
+  CallbackResult<R> Run(typename CallbackParamTraits<A1>::ForwardType a1,
       typename CallbackParamTraits<A2>::ForwardType a2)
       const override {
     CallbackResult<R> callback_result;
@@ -205,6 +225,7 @@
 
     EntryScope entry_scope(isolate_);
     v8::Local<v8::Context> context = isolate_->GetCurrentContext();
+    v8::TryCatch try_catch(isolate_);
 
     // https://www.w3.org/TR/WebIDL/#es-invoking-callback-functions
     // Callback 'this' is set to null, unless overridden by other specifications
@@ -228,7 +249,16 @@
         function_as_object->CallAsFunction(context, this_value, argc, argv);
     v8::Local<v8::Value> return_value;
     if (!maybe_return_value.ToLocal(&return_value)) {
-      NOTIMPLEMENTED();
+      std::string description;
+      v8::Local<v8::Value> stack;
+      if (try_catch.StackTrace(context).ToLocal(&stack)) {
+        description = *v8::String::Utf8Value(isolate_, stack);
+      } else {
+        description = *v8::String::Utf8Value(isolate_, try_catch.Exception());
+      }
+      if (description.empty()) description = "Unknown exception";
+      // TODO: Send the description to the console instead of logging it.
+      LOG(ERROR) << description;
       callback_result.exception = true;
     } else {
       callback_result = ConvertCallbackReturnValue<R>(isolate_, return_value);
@@ -251,8 +281,7 @@
   V8cCallbackFunction(v8::Isolate* isolate, v8::Local<v8::Value> handle)
       : ScopedPersistent(isolate, handle), isolate_(isolate) {}
 
-  CallbackResult<R> Run(      typename
-      CallbackParamTraits<A1>::ForwardType a1,
+  CallbackResult<R> Run(typename CallbackParamTraits<A1>::ForwardType a1,
       typename CallbackParamTraits<A2>::ForwardType a2,
       typename CallbackParamTraits<A3>::ForwardType a3)
       const override {
@@ -266,6 +295,7 @@
 
     EntryScope entry_scope(isolate_);
     v8::Local<v8::Context> context = isolate_->GetCurrentContext();
+    v8::TryCatch try_catch(isolate_);
 
     // https://www.w3.org/TR/WebIDL/#es-invoking-callback-functions
     // Callback 'this' is set to null, unless overridden by other specifications
@@ -290,7 +320,16 @@
         function_as_object->CallAsFunction(context, this_value, argc, argv);
     v8::Local<v8::Value> return_value;
     if (!maybe_return_value.ToLocal(&return_value)) {
-      NOTIMPLEMENTED();
+      std::string description;
+      v8::Local<v8::Value> stack;
+      if (try_catch.StackTrace(context).ToLocal(&stack)) {
+        description = *v8::String::Utf8Value(isolate_, stack);
+      } else {
+        description = *v8::String::Utf8Value(isolate_, try_catch.Exception());
+      }
+      if (description.empty()) description = "Unknown exception";
+      // TODO: Send the description to the console instead of logging it.
+      LOG(ERROR) << description;
       callback_result.exception = true;
     } else {
       callback_result = ConvertCallbackReturnValue<R>(isolate_, return_value);
@@ -313,8 +352,7 @@
   V8cCallbackFunction(v8::Isolate* isolate, v8::Local<v8::Value> handle)
       : ScopedPersistent(isolate, handle), isolate_(isolate) {}
 
-  CallbackResult<R> Run(      typename
-      CallbackParamTraits<A1>::ForwardType a1,
+  CallbackResult<R> Run(typename CallbackParamTraits<A1>::ForwardType a1,
       typename CallbackParamTraits<A2>::ForwardType a2,
       typename CallbackParamTraits<A3>::ForwardType a3,
       typename CallbackParamTraits<A4>::ForwardType a4)
@@ -329,6 +367,7 @@
 
     EntryScope entry_scope(isolate_);
     v8::Local<v8::Context> context = isolate_->GetCurrentContext();
+    v8::TryCatch try_catch(isolate_);
 
     // https://www.w3.org/TR/WebIDL/#es-invoking-callback-functions
     // Callback 'this' is set to null, unless overridden by other specifications
@@ -354,7 +393,16 @@
         function_as_object->CallAsFunction(context, this_value, argc, argv);
     v8::Local<v8::Value> return_value;
     if (!maybe_return_value.ToLocal(&return_value)) {
-      NOTIMPLEMENTED();
+      std::string description;
+      v8::Local<v8::Value> stack;
+      if (try_catch.StackTrace(context).ToLocal(&stack)) {
+        description = *v8::String::Utf8Value(isolate_, stack);
+      } else {
+        description = *v8::String::Utf8Value(isolate_, try_catch.Exception());
+      }
+      if (description.empty()) description = "Unknown exception";
+      // TODO: Send the description to the console instead of logging it.
+      LOG(ERROR) << description;
       callback_result.exception = true;
     } else {
       callback_result = ConvertCallbackReturnValue<R>(isolate_, return_value);
@@ -378,8 +426,7 @@
   V8cCallbackFunction(v8::Isolate* isolate, v8::Local<v8::Value> handle)
       : ScopedPersistent(isolate, handle), isolate_(isolate) {}
 
-  CallbackResult<R> Run(      typename
-      CallbackParamTraits<A1>::ForwardType a1,
+  CallbackResult<R> Run(typename CallbackParamTraits<A1>::ForwardType a1,
       typename CallbackParamTraits<A2>::ForwardType a2,
       typename CallbackParamTraits<A3>::ForwardType a3,
       typename CallbackParamTraits<A4>::ForwardType a4,
@@ -395,6 +442,7 @@
 
     EntryScope entry_scope(isolate_);
     v8::Local<v8::Context> context = isolate_->GetCurrentContext();
+    v8::TryCatch try_catch(isolate_);
 
     // https://www.w3.org/TR/WebIDL/#es-invoking-callback-functions
     // Callback 'this' is set to null, unless overridden by other specifications
@@ -421,7 +469,16 @@
         function_as_object->CallAsFunction(context, this_value, argc, argv);
     v8::Local<v8::Value> return_value;
     if (!maybe_return_value.ToLocal(&return_value)) {
-      NOTIMPLEMENTED();
+      std::string description;
+      v8::Local<v8::Value> stack;
+      if (try_catch.StackTrace(context).ToLocal(&stack)) {
+        description = *v8::String::Utf8Value(isolate_, stack);
+      } else {
+        description = *v8::String::Utf8Value(isolate_, try_catch.Exception());
+      }
+      if (description.empty()) description = "Unknown exception";
+      // TODO: Send the description to the console instead of logging it.
+      LOG(ERROR) << description;
       callback_result.exception = true;
     } else {
       callback_result = ConvertCallbackReturnValue<R>(isolate_, return_value);
@@ -445,8 +502,7 @@
   V8cCallbackFunction(v8::Isolate* isolate, v8::Local<v8::Value> handle)
       : ScopedPersistent(isolate, handle), isolate_(isolate) {}
 
-  CallbackResult<R> Run(      typename
-      CallbackParamTraits<A1>::ForwardType a1,
+  CallbackResult<R> Run(typename CallbackParamTraits<A1>::ForwardType a1,
       typename CallbackParamTraits<A2>::ForwardType a2,
       typename CallbackParamTraits<A3>::ForwardType a3,
       typename CallbackParamTraits<A4>::ForwardType a4,
@@ -463,6 +519,7 @@
 
     EntryScope entry_scope(isolate_);
     v8::Local<v8::Context> context = isolate_->GetCurrentContext();
+    v8::TryCatch try_catch(isolate_);
 
     // https://www.w3.org/TR/WebIDL/#es-invoking-callback-functions
     // Callback 'this' is set to null, unless overridden by other specifications
@@ -490,7 +547,16 @@
         function_as_object->CallAsFunction(context, this_value, argc, argv);
     v8::Local<v8::Value> return_value;
     if (!maybe_return_value.ToLocal(&return_value)) {
-      NOTIMPLEMENTED();
+      std::string description;
+      v8::Local<v8::Value> stack;
+      if (try_catch.StackTrace(context).ToLocal(&stack)) {
+        description = *v8::String::Utf8Value(isolate_, stack);
+      } else {
+        description = *v8::String::Utf8Value(isolate_, try_catch.Exception());
+      }
+      if (description.empty()) description = "Unknown exception";
+      // TODO: Send the description to the console instead of logging it.
+      LOG(ERROR) << description;
       callback_result.exception = true;
     } else {
       callback_result = ConvertCallbackReturnValue<R>(isolate_, return_value);
@@ -514,8 +580,7 @@
   V8cCallbackFunction(v8::Isolate* isolate, v8::Local<v8::Value> handle)
       : ScopedPersistent(isolate, handle), isolate_(isolate) {}
 
-  CallbackResult<R> Run(      typename
-      CallbackParamTraits<A1>::ForwardType a1,
+  CallbackResult<R> Run(typename CallbackParamTraits<A1>::ForwardType a1,
       typename CallbackParamTraits<A2>::ForwardType a2,
       typename CallbackParamTraits<A3>::ForwardType a3,
       typename CallbackParamTraits<A4>::ForwardType a4,
@@ -533,6 +598,7 @@
 
     EntryScope entry_scope(isolate_);
     v8::Local<v8::Context> context = isolate_->GetCurrentContext();
+    v8::TryCatch try_catch(isolate_);
 
     // https://www.w3.org/TR/WebIDL/#es-invoking-callback-functions
     // Callback 'this' is set to null, unless overridden by other specifications
@@ -561,7 +627,16 @@
         function_as_object->CallAsFunction(context, this_value, argc, argv);
     v8::Local<v8::Value> return_value;
     if (!maybe_return_value.ToLocal(&return_value)) {
-      NOTIMPLEMENTED();
+      std::string description;
+      v8::Local<v8::Value> stack;
+      if (try_catch.StackTrace(context).ToLocal(&stack)) {
+        description = *v8::String::Utf8Value(isolate_, stack);
+      } else {
+        description = *v8::String::Utf8Value(isolate_, try_catch.Exception());
+      }
+      if (description.empty()) description = "Unknown exception";
+      // TODO: Send the description to the console instead of logging it.
+      LOG(ERROR) << description;
       callback_result.exception = true;
     } else {
       callback_result = ConvertCallbackReturnValue<R>(isolate_, return_value);
diff --git a/src/cobalt/script/v8c/v8c_callback_function.h.pump b/src/cobalt/script/v8c/v8c_callback_function.h.pump
index 42c9ffb..8bf8cd2 100644
--- a/src/cobalt/script/v8c/v8c_callback_function.h.pump
+++ b/src/cobalt/script/v8c/v8c_callback_function.h.pump
@@ -27,6 +27,8 @@
 #ifndef COBALT_SCRIPT_V8C_V8C_CALLBACK_FUNCTION_H_
 #define COBALT_SCRIPT_V8C_V8C_CALLBACK_FUNCTION_H_
 
+#include <string>
+
 #include "base/logging.h"
 #include "cobalt/script/callback_function.h"
 #include "cobalt/script/v8c/conversion_helpers.h"
@@ -95,8 +97,8 @@
   V8cCallbackFunction(v8::Isolate* isolate, v8::Local<v8::Value> handle)
       : ScopedPersistent(isolate, handle), isolate_(isolate) {}
 
-  CallbackResult<R> Run($for ARG , [[
-      typename CallbackParamTraits<A$(ARG)>::ForwardType a$(ARG)]])
+  CallbackResult<R> Run($for ARG ,
+      [[typename CallbackParamTraits<A$(ARG)>::ForwardType a$(ARG)]])
       const override {
     CallbackResult<R> callback_result;
     DCHECK(!this->IsEmpty());
@@ -108,6 +110,7 @@
 
     EntryScope entry_scope(isolate_);
     v8::Local<v8::Context> context = isolate_->GetCurrentContext();
+    v8::TryCatch try_catch(isolate_);
 
     // https://www.w3.org/TR/WebIDL/#es-invoking-callback-functions
     // Callback 'this' is set to null, unless overridden by other specifications
@@ -134,7 +137,16 @@
     v8::MaybeLocal<v8::Value> maybe_return_value = function_as_object->CallAsFunction(context, this_value, argc, argv);
     v8::Local<v8::Value> return_value;
     if (!maybe_return_value.ToLocal(&return_value)) {
-      NOTIMPLEMENTED();
+      std::string description;
+      v8::Local<v8::Value> stack;
+      if (try_catch.StackTrace(context).ToLocal(&stack)) {
+        description = *v8::String::Utf8Value(isolate_, stack);
+      } else {
+        description = *v8::String::Utf8Value(isolate_, try_catch.Exception());
+      }
+      if (description.empty()) description = "Unknown exception";
+      // TODO: Send the description to the console instead of logging it.
+      LOG(ERROR) << description;
       callback_result.exception = true;
     } else {
       callback_result = ConvertCallbackReturnValue<R>(isolate_, return_value);
diff --git a/src/cobalt/speech/google_speech_service.cc b/src/cobalt/speech/google_speech_service.cc
index 231bf06..30d7463 100644
--- a/src/cobalt/speech/google_speech_service.cc
+++ b/src/cobalt/speech/google_speech_service.cc
@@ -229,18 +229,22 @@
                             is_last_chunk));
 }
 
+// TODO: Refactor OnURLFetchDownloadProgress() into a private function that is
+//       called by OnURLFetchDownloadProgress() and OnURLFetchComplete(), to
+//       explicitly remove the unreferenced parameters.
 void GoogleSpeechService::OnURLFetchDownloadProgress(
     const net::URLFetcher* source, int64_t /*current*/, int64_t /*total*/,
     int64_t /*current_network_bytes*/) {
   DCHECK_EQ(thread_.message_loop(), base::MessageLoop::current());
-  std::unique_ptr<std::string> data = download_data_writer_->data();
+  std::string data;
+  download_data_writer_->GetAndResetData(&data);
 
   const net::URLRequestStatus& status = source->GetStatus();
   const int response_code = source->GetResponseCode();
 
   if (source == downstream_fetcher_.get()) {
     if (status.is_success() && IsResponseCodeSuccess(response_code)) {
-      chunked_byte_buffer_.Append(*data);
+      chunked_byte_buffer_.Append(data);
       while (chunked_byte_buffer_.HasChunks()) {
         std::unique_ptr<std::vector<uint8_t> > chunk =
             chunked_byte_buffer_.PopChunk();
@@ -272,12 +276,11 @@
 
 void GoogleSpeechService::OnURLFetchComplete(const net::URLFetcher* source) {
   DCHECK_EQ(thread_.message_loop(), base::MessageLoop::current());
-  std::unique_ptr<std::string> remaining_data = download_data_writer_->data();
-  int64_t length = remaining_data->length();
-  if (remaining_data && length > 0) {
-    OnURLFetchDownloadProgress(source, length, length, length);
+  if (download_data_writer_->HasData()) {
+    // Explicitly pass '-1' for all sizes, as it is not used by
+    // OnURLFetchDownloadProgress();
+    OnURLFetchDownloadProgress(source, -1, -1, -1);
   }
-  // no-op.
 }
 
 // static
diff --git a/src/cobalt/speech/google_speech_service.h b/src/cobalt/speech/google_speech_service.h
index a69ff9d..3768636 100644
--- a/src/cobalt/speech/google_speech_service.h
+++ b/src/cobalt/speech/google_speech_service.h
@@ -68,7 +68,7 @@
 
   // net::URLFetcherDelegate interface
   void OnURLFetchDownloadProgress(const net::URLFetcher* source,
-                                  int64_t current, int64_t total,
+                                  int64_t /*current*/, int64_t /*total*/,
                                   int64_t /*current_network_bytes*/) override;
   void OnURLFetchComplete(const net::URLFetcher* source) override;
   void OnURLFetchUploadProgress(const net::URLFetcher* /*source*/,
diff --git a/src/cobalt/storage/storage.gyp b/src/cobalt/storage/storage.gyp
index 9b77578..96ea7bc 100644
--- a/src/cobalt/storage/storage.gyp
+++ b/src/cobalt/storage/storage.gyp
@@ -55,12 +55,12 @@
       ],
       'dependencies': [
         '<(DEPTH)/cobalt/base/base.gyp:base',
-        '<(DEPTH)/cobalt/test/test.gyp:run_all_unittests',
         '<(DEPTH)/testing/gmock.gyp:gmock',
         '<(DEPTH)/testing/gtest.gyp:gtest',
         'storage',
         'storage_upgrade_copy_test_data',
       ],
+      'includes': [ '<(DEPTH)/cobalt/test/test.gypi' ],
     },
     {
       'target_name': 'storage_test_deploy',
diff --git a/src/cobalt/storage/store/store.gyp b/src/cobalt/storage/store/store.gyp
index d80a876..1cf9fc4 100644
--- a/src/cobalt/storage/store/store.gyp
+++ b/src/cobalt/storage/store/store.gyp
@@ -39,11 +39,11 @@
       ],
       'dependencies': [
         '<(DEPTH)/cobalt/base/base.gyp:base',
-        '<(DEPTH)/cobalt/test/test.gyp:run_all_unittests',
         '<(DEPTH)/testing/gmock.gyp:gmock',
         '<(DEPTH)/testing/gtest.gyp:gtest',
         'memory_store',
       ],
+      'includes': [ '<(DEPTH)/cobalt/test/test.gypi' ],
     },
     {
       'target_name': 'memory_store_test_deploy',
diff --git a/src/cobalt/storage/store_upgrade/upgrade.gyp b/src/cobalt/storage/store_upgrade/upgrade.gyp
index 1066f1a..23a196e 100644
--- a/src/cobalt/storage/store_upgrade/upgrade.gyp
+++ b/src/cobalt/storage/store_upgrade/upgrade.gyp
@@ -48,11 +48,11 @@
         'storage_upgrade',
         'storage_upgrade_copy_test_files',
         '<(DEPTH)/cobalt/base/base.gyp:base',
-        '<(DEPTH)/cobalt/test/test.gyp:run_all_unittests',
         '<(DEPTH)/testing/gmock.gyp:gmock',
         '<(DEPTH)/testing/gtest.gyp:gtest',
         '<(DEPTH)/third_party/protobuf/protobuf.gyp:protobuf_lite',
       ],
+      'includes': [ '<(DEPTH)/cobalt/test/test.gypi' ],
     },
     {
       'target_name': 'storage_upgrade_test_deploy',
diff --git a/src/cobalt/test/run_all_unittests.cc b/src/cobalt/test/run_all_unittests.cc
index 90025ec..0d69f0c 100644
--- a/src/cobalt/test/run_all_unittests.cc
+++ b/src/cobalt/test/run_all_unittests.cc
@@ -22,7 +22,6 @@
 #include "testing/gtest/include/gtest/gtest.h"
 
 namespace {
-
 int InitAndRunAllTests(int argc, char** argv) {
   base::CommandLine::Init(argc, argv);
   base::AtExitManager exit_manager;
diff --git a/src/cobalt/test/test.gyp b/src/cobalt/test/test.gyp
deleted file mode 100644
index 30af2ea..0000000
--- a/src/cobalt/test/test.gyp
+++ /dev/null
@@ -1,29 +0,0 @@
-# Copyright 2017 The Cobalt Authors. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-{
-  'targets': [
-    {
-      'target_name': 'run_all_unittests',
-      'type': 'static_library',
-      'dependencies': [
-        '<(DEPTH)/base/base.gyp:test_support_base',
-        '<(DEPTH)/testing/gtest.gyp:gtest',
-      ],
-      'sources': [
-        'run_all_unittests.cc',
-      ],
-    },
-  ]
-}
diff --git a/src/cobalt/test/test.gypi b/src/cobalt/test/test.gypi
new file mode 100644
index 0000000..9df4e38
--- /dev/null
+++ b/src/cobalt/test/test.gypi
@@ -0,0 +1,23 @@
+# Copyright 2017 The Cobalt Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+{
+  'dependencies': [
+    '<(DEPTH)/base/base.gyp:test_support_base',
+    '<(DEPTH)/testing/gtest.gyp:gtest',
+  ],
+  'sources': [
+    '<(DEPTH)/cobalt/test/run_all_unittests.cc',
+  ],
+}
diff --git a/src/cobalt/tools/automated_testing/cobalt_runner.py b/src/cobalt/tools/automated_testing/cobalt_runner.py
index 3f392c9..56a1b24 100644
--- a/src/cobalt/tools/automated_testing/cobalt_runner.py
+++ b/src/cobalt/tools/automated_testing/cobalt_runner.py
@@ -11,6 +11,7 @@
 import thread
 import threading
 import time
+import traceback
 
 import _env  # pylint: disable=unused-import
 from cobalt.tools.automated_testing import c_val_names
@@ -141,6 +142,10 @@
     """Sends a system signal to put Cobalt into suspend state."""
     self.launcher.SendSuspend()
 
+  def SendDeepLink(self, link):
+    """Sends a deep link to Cobalt."""
+    return self.launcher.SendDeepLink(link)
+
   def GetURL(self):
     return self.url
 
@@ -254,10 +259,18 @@
 
   def _KillLauncher(self):
     """Kills the launcher and its attached Cobalt instance."""
+    wait_for_runner_thread = True
     if self.CanExecuteJavaScript():
-      self.ExecuteJavaScript('window.close();')
+      try:
+        self.ExecuteJavaScript('window.close();')
+      except Exception:
+        wait_for_runner_thread = False
+        sys.stderr.write(
+            '***An exception was raised while trying to close the app:')
+        traceback.print_exc(file=sys.stderr)
 
-    self.runner_thread.join(COBALT_EXIT_TIMEOUT_SECONDS)
+    if wait_for_runner_thread:
+      self.runner_thread.join(COBALT_EXIT_TIMEOUT_SECONDS)
     if self.runner_thread.isAlive():
       sys.stderr.write(
           '***Runner thread still alive after sending graceful shutdown command, try again by killing app***\n'
@@ -503,7 +516,7 @@
   device_params.config = args.config
   device_params.device_id = args.device_id
   device_params.out_directory = args.out_directory
-  if args.target_params == None:
+  if args.target_params is None:
     device_params.target_params = []
   else:
     device_params.target_params = [args.target_params]
diff --git a/src/cobalt/web_animations/web_animations.gyp b/src/cobalt/web_animations/web_animations.gyp
index ff2eaf8..c350740 100644
--- a/src/cobalt/web_animations/web_animations.gyp
+++ b/src/cobalt/web_animations/web_animations.gyp
@@ -57,10 +57,10 @@
         'web_animations',
         '<(DEPTH)/cobalt/css_parser/css_parser.gyp:css_parser',
         '<(DEPTH)/cobalt/cssom/cssom.gyp:cssom',
-        '<(DEPTH)/cobalt/test/test.gyp:run_all_unittests',
         '<(DEPTH)/testing/gmock.gyp:gmock',
         '<(DEPTH)/testing/gtest.gyp:gtest',
       ],
+      'includes': [ '<(DEPTH)/cobalt/test/test.gypi' ],
     },
 
     {
diff --git a/src/cobalt/webdriver/screencast/screencast_module.cc b/src/cobalt/webdriver/screencast/screencast_module.cc
index ddbdbbd..9df5269 100644
--- a/src/cobalt/webdriver/screencast/screencast_module.cc
+++ b/src/cobalt/webdriver/screencast/screencast_module.cc
@@ -31,6 +31,8 @@
 
 namespace {
 const char kJpegContentType[] = "image/jpeg";
+// Add screencast frame rate as 30 fps.
+const int kScreencastFramesPerSecond = 30;
 }
 
 ScreencastModule::ScreencastModule(
@@ -88,7 +90,7 @@
       base::Bind(&ScreencastModule::TakeScreenshot, base::Unretained(this));
   screenshot_timer_->Start(FROM_HERE,
                            base::TimeDelta::FromMilliseconds(
-                               COBALT_MINIMUM_FRAME_TIME_IN_MILLISECONDS),
+                               1000.0f / kScreencastFramesPerSecond),
                            screenshot_event);
 }
 
diff --git a/src/cobalt/webdriver/webdriver.gyp b/src/cobalt/webdriver/webdriver.gyp
index f344d75..491531d 100644
--- a/src/cobalt/webdriver/webdriver.gyp
+++ b/src/cobalt/webdriver/webdriver.gyp
@@ -94,7 +94,6 @@
           'dependencies': [ 'copy_webdriver_data', ],
           'defines': [
             'ENABLE_WEBDRIVER',
-            'COBALT_MINIMUM_FRAME_TIME_IN_MILLISECONDS=<(cobalt_minimum_frame_time_in_milliseconds)',
           ],
           'all_dependent_settings': {
             'defines': [ 'ENABLE_WEBDRIVER', ],
diff --git a/src/cobalt/webdriver/webdriver_test.gyp b/src/cobalt/webdriver/webdriver_test.gyp
index 9342fdc..33c8ab8 100644
--- a/src/cobalt/webdriver/webdriver_test.gyp
+++ b/src/cobalt/webdriver/webdriver_test.gyp
@@ -15,11 +15,11 @@
       ],
       'dependencies': [
         '<(DEPTH)/cobalt/browser/browser.gyp:browser',
-        '<(DEPTH)/cobalt/test/test.gyp:run_all_unittests',
         '<(DEPTH)/testing/gmock.gyp:gmock',
         '<(DEPTH)/testing/gtest.gyp:gtest',
         'webdriver_copy_test_data',
       ],
+      'includes': [ '<(DEPTH)/cobalt/test/test.gypi' ],
     },
 
     {
diff --git a/src/cobalt/websocket/cobalt_web_socket_event_handler.cc b/src/cobalt/websocket/cobalt_web_socket_event_handler.cc
index 1d3db2e..399c19c 100644
--- a/src/cobalt/websocket/cobalt_web_socket_event_handler.cc
+++ b/src/cobalt/websocket/cobalt_web_socket_event_handler.cc
@@ -132,5 +132,9 @@
   creator_->OnWriteDone(bytes_written);
 }
 
+void CobaltWebSocketEventHandler::OnFlowControl(int64_t quota) {
+  creator_->OnFlowControl(quota);
+}
+
 }  // namespace websocket
 }  // namespace cobalt
\ No newline at end of file
diff --git a/src/cobalt/websocket/cobalt_web_socket_event_handler.h b/src/cobalt/websocket/cobalt_web_socket_event_handler.h
index 6cb92fb..b974895 100644
--- a/src/cobalt/websocket/cobalt_web_socket_event_handler.h
+++ b/src/cobalt/websocket/cobalt_web_socket_event_handler.h
@@ -57,7 +57,7 @@
   // Called to provide more send quota for this channel to the renderer
   // process. Currently the quota units are always bytes of message body
   // data. In future it might depend on the type of multiplexing in use.
-  virtual void OnFlowControl(int64_t /*quota*/) override {}
+  virtual void OnFlowControl(int64_t quota) override;
 
   // Called when the remote server has Started the WebSocket Closing
   // Handshake. The client should not attempt to send any more messages after
diff --git a/src/cobalt/websocket/mock_websocket_channel.cc b/src/cobalt/websocket/mock_websocket_channel.cc
new file mode 100644
index 0000000..8fc17cf
--- /dev/null
+++ b/src/cobalt/websocket/mock_websocket_channel.cc
@@ -0,0 +1,23 @@
+// Copyright 2019 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "cobalt/websocket/mock_websocket_channel.h"
+#include "cobalt/websocket/cobalt_web_socket_event_handler.h"
+
+// Generated constructors and destructors for GMock objects are very large. By
+// putting them in a separate file we can speed up compile times.
+
+namespace cobalt {
+namespace websocket {
+
+MockWebSocketChannel::MockWebSocketChannel(
+    WebSocketImpl* impl, network::NetworkModule* network_module)
+    : net::WebSocketChannel(std::unique_ptr<net::WebSocketEventInterface>(
+                                new CobaltWebSocketEventHandler(impl)),
+                            network_module->url_request_context()) {}
+
+MockWebSocketChannel::~MockWebSocketChannel() = default;
+
+}  // namespace websocket
+}  // namespace cobalt
\ No newline at end of file
diff --git a/src/cobalt/websocket/mock_websocket_channel.h b/src/cobalt/websocket/mock_websocket_channel.h
new file mode 100644
index 0000000..85bb534
--- /dev/null
+++ b/src/cobalt/websocket/mock_websocket_channel.h
@@ -0,0 +1,58 @@
+// Copyright 2019 The Cobalt Authors. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#ifndef THIRD_PARTY_BLINK_RENDERER_MODULES_WEBSOCKETS_MOCK_WEBSOCKET_CHANNEL_H_
+#define THIRD_PARTY_BLINK_RENDERER_MODULES_WEBSOCKETS_MOCK_WEBSOCKET_CHANNEL_H_
+
+#include <memory>
+
+#include "base/memory/scoped_refptr.h"
+#include "base/synchronization/lock.h"
+#include "cobalt/network/network_module.h"
+#include "cobalt/websocket/web_socket_impl.h"
+#include "net/websockets/websocket_channel.h"
+#include "testing/gmock/include/gmock/gmock.h"
+
+namespace cobalt {
+namespace websocket {
+
+class SourceLocation;
+
+class MockWebSocketChannel : public net::WebSocketChannel {
+ public:
+  MockWebSocketChannel(WebSocketImpl* impl,
+                       network::NetworkModule* network_module);
+  ~MockWebSocketChannel();
+
+  MOCK_METHOD4(MockSendFrame,
+               net::WebSocketChannel::ChannelState(
+                   bool fin, net::WebSocketFrameHeader::OpCode op_code,
+                   scoped_refptr<net::IOBuffer> buffer, size_t buffer_size));
+  net::WebSocketChannel::ChannelState SendFrame(
+      bool fin, net::WebSocketFrameHeader::OpCode op_code,
+      scoped_refptr<net::IOBuffer> buffer, size_t buffer_size) override {
+    base::AutoLock scoped_lock(lock_);
+    return MockSendFrame(fin, op_code, buffer, buffer_size);
+  }
+
+  base::Lock& lock() { return lock_; }
+
+ private:
+  base::Lock lock_;
+};
+
+}  // namespace websocket
+}  // namespace cobalt
+
+#endif  // THIRD_PARTY_BLINK_RENDERER_MODULES_WEBSOCKETS_MOCK_WEBSOCKET_CHANNEL_H_
\ No newline at end of file
diff --git a/src/cobalt/websocket/web_socket.h b/src/cobalt/websocket/web_socket.h
index 39ef2ec..1334aeb 100644
--- a/src/cobalt/websocket/web_socket.h
+++ b/src/cobalt/websocket/web_socket.h
@@ -242,6 +242,7 @@
   FRIEND_TEST_ALL_PREFIXES(WebSocketTest, FailInvalidSubProtocols);
   FRIEND_TEST_ALL_PREFIXES(WebSocketTest, SubProtocols);
   FRIEND_TEST_ALL_PREFIXES(WebSocketTest, DuplicatedSubProtocols);
+  friend class WebSocketImplTest;
 
   DISALLOW_COPY_AND_ASSIGN(WebSocket);
 };
diff --git a/src/cobalt/websocket/web_socket_impl.cc b/src/cobalt/websocket/web_socket_impl.cc
index 844d8f7..19e2062 100644
--- a/src/cobalt/websocket/web_socket_impl.cc
+++ b/src/cobalt/websocket/web_socket_impl.cc
@@ -27,9 +27,6 @@
 #include "cobalt/base/polymorphic_downcast.h"
 #include "cobalt/websocket/web_socket.h"
 #include "net/http/http_util.h"
-#include "net/websockets/websocket_errors.h"
-#include "net/websockets/websocket_frame.h"
-#include "net/websockets/websocket_handshake_stream_create_helper.h"
 #include "starboard/memory.h"
 
 namespace cobalt {
@@ -72,6 +69,7 @@
   // priority thread might be required.  Investigation is needed.
   delegate_task_runner_ =
       network_module_->url_request_context_getter()->GetNetworkTaskRunner();
+  DCHECK(delegate_task_runner_);
   base::WaitableEvent channel_created_event(
       base::WaitableEvent::ResetPolicy::MANUAL,
       base::WaitableEvent::InitialState::NOT_SIGNALED);
@@ -163,6 +161,12 @@
                             selected_subprotocol));
 }
 
+void WebSocketImpl::OnFlowControl(int64_t quota) {
+  DCHECK(current_quota_ >= 0);
+  current_quota_ += quota;
+  ProcessSendQueue();
+}
+
 void WebSocketImpl::OnWebSocketConnected(
     const std::string &selected_subprotocol) {
   DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
@@ -260,13 +264,40 @@
     DLOG(WARNING) << "Attempt to send over a closed channel.";
     return;
   }
+  SendQueueMessage new_message = {io_buffer, length, op_code};
+  send_queue_.push(std::move(new_message));
+  ProcessSendQueue();
+}
 
-  // this behavior is not just an optimization, but required in case
-  // we are closing the connection
-  auto channel_state =
-      websocket_channel_->SendFrame(true /*fin*/, op_code, io_buffer, length);
-  if (channel_state == net::WebSocketChannel::CHANNEL_DELETED) {
-    websocket_channel_.reset();
+void WebSocketImpl::ProcessSendQueue() {
+  DCHECK(delegate_task_runner_->BelongsToCurrentThread());
+  while (current_quota_ > 0 && !send_queue_.empty()) {
+    SendQueueMessage message = send_queue_.front();
+    size_t current_message_length = message.length - sent_size_of_top_message_;
+    bool final = false;
+    if (current_quota_ < static_cast<int64_t>(current_message_length)) {
+      // quota is not enough to send the top message.
+      scoped_refptr<net::IOBuffer> new_io_buffer(
+          new net::IOBuffer(static_cast<size_t>(current_quota_)));
+      SbMemoryCopy(new_io_buffer->data(),
+                   message.io_buffer->data() + sent_size_of_top_message_,
+                   current_quota_);
+      sent_size_of_top_message_ += current_quota_;
+      message.io_buffer = new_io_buffer;
+      current_message_length = current_quota_;
+      current_quota_ = 0;
+    } else {
+      // Sent all of the remaining top message.
+      final = true;
+      send_queue_.pop();
+      sent_size_of_top_message_ = 0;
+      current_quota_ -= current_message_length;
+    }
+    auto channel_state = websocket_channel_->SendFrame(
+        final, message.op_code, message.io_buffer, current_message_length);
+    if (channel_state == net::WebSocketChannel::CHANNEL_DELETED) {
+      websocket_channel_.reset();
+    }
   }
 }
 
diff --git a/src/cobalt/websocket/web_socket_impl.h b/src/cobalt/websocket/web_socket_impl.h
index 5e5977d..6c65620 100644
--- a/src/cobalt/websocket/web_socket_impl.h
+++ b/src/cobalt/websocket/web_socket_impl.h
@@ -16,6 +16,7 @@
 #define COBALT_WEBSOCKET_WEB_SOCKET_IMPL_H_
 
 #include <memory>
+#include <queue>
 #include <string>
 #include <vector>
 
@@ -32,7 +33,10 @@
 #include "cobalt/websocket/web_socket_message_container.h"
 #include "net/url_request/url_request_context_getter.h"
 #include "net/websockets/websocket_channel.h"
+#include "net/websockets/websocket_errors.h"
+#include "net/websockets/websocket_frame.h"
 #include "net/websockets/websocket_frame_parser.h"
+#include "net/websockets/websocket_handshake_stream_create_helper.h"
 #include "url/gurl.h"
 
 namespace cobalt {
@@ -85,6 +89,8 @@
 
   void OnHandshakeComplete(const std::string& selected_subprotocol);
 
+  void OnFlowControl(int64_t quota);
+
   struct CloseInfo {
     CloseInfo(const net::WebSocketError code, const std::string& reason)
         : code(code), reason(reason) {}
@@ -108,6 +114,7 @@
   bool SendHelper(const net::WebSocketFrameHeader::OpCode op_code,
                   const char* data, std::size_t length,
                   std::string* error_message);
+  void ProcessSendQueue();
 
   void OnWebSocketConnected(const std::string& selected_subprotocol);
   void OnWebSocketDisconnected(bool was_clean, uint16 code,
@@ -125,11 +132,23 @@
   std::string origin_;
   GURL connect_url_;
 
+  // Data buffering and flow control.
+  // Should only be modified on delegate(network) thread.
+  int64_t current_quota_ = 0;
+  struct SendQueueMessage {
+    scoped_refptr<net::IOBuffer> io_buffer;
+    size_t length;
+    net::WebSocketFrameHeader::OpCode op_code;
+  };
+  std::queue<SendQueueMessage> send_queue_;
+  size_t sent_size_of_top_message_ = 0;
+
   scoped_refptr<base::SingleThreadTaskRunner> delegate_task_runner_;
   scoped_refptr<base::SingleThreadTaskRunner> owner_task_runner_;
 
   ~WebSocketImpl();
   friend class base::RefCountedThreadSafe<WebSocketImpl>;
+  friend class WebSocketImplTest;
 
   DISALLOW_COPY_AND_ASSIGN(WebSocketImpl);
 };
diff --git a/src/cobalt/websocket/web_socket_impl_test.cc b/src/cobalt/websocket/web_socket_impl_test.cc
new file mode 100644
index 0000000..1c2d0ac
--- /dev/null
+++ b/src/cobalt/websocket/web_socket_impl_test.cc
@@ -0,0 +1,230 @@
+// Copyright 2017 The Cobalt Authors. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "cobalt/websocket/web_socket_impl.h"
+#include "cobalt/websocket/web_socket.h"
+
+#include <memory>
+#include <vector>
+
+#include "base/memory/ref_counted.h"
+#include "base/test/scoped_task_environment.h"
+#include "cobalt/base/polymorphic_downcast.h"
+#include "cobalt/dom/dom_exception.h"
+#include "cobalt/dom/dom_settings.h"
+#include "cobalt/dom/window.h"
+#include "cobalt/network/network_module.h"
+#include "cobalt/script/script_exception.h"
+#include "cobalt/script/testing/mock_exception_state.h"
+#include "cobalt/websocket/mock_websocket_channel.h"
+#include "testing/gtest/include/gtest/gtest.h"
+
+using ::testing::_;
+using ::testing::SaveArg;
+using ::testing::StrictMock;
+using ::testing::DefaultValue;
+using ::testing::Return;
+using cobalt::script::testing::MockExceptionState;
+
+namespace cobalt {
+namespace websocket {
+namespace {
+// These limits are copied from net::WebSocketChannel implementation.
+const int kDefaultSendQuotaHighWaterMark = 1 << 17;
+const int k800KB = 800;
+const int kTooMuch = kDefaultSendQuotaHighWaterMark + 1;
+const int kWayTooMuch = kDefaultSendQuotaHighWaterMark * 2 + 1;
+const int k512KB = 512;
+
+class FakeSettings : public dom::DOMSettings {
+ public:
+  FakeSettings()
+      : dom::DOMSettings(0, NULL, NULL, NULL, NULL, NULL, NULL, NULL,
+                         &null_debugger_hooks_, NULL),
+        base_("https://127.0.0.1:1234") {
+    network_module_.reset(new network::NetworkModule());
+    this->set_network_module(network_module_.get());
+  }
+  const GURL& base_url() const override { return base_; }
+
+  // public members, so that they're easier for testing.
+  base::NullDebuggerHooks null_debugger_hooks_;
+  GURL base_;
+  std::unique_ptr<network::NetworkModule> network_module_;
+};
+}  // namespace
+
+class WebSocketImplTest : public ::testing::Test {
+ public:
+  dom::DOMSettings* settings() const { return settings_.get(); }
+  void AddQuota(int quota) {
+    network_task_runner_->PostBlockingTask(
+        FROM_HERE,
+        base::Bind(&WebSocketImpl::OnFlowControl, websocket_impl_, quota));
+  }
+
+ protected:
+  WebSocketImplTest() : settings_(new FakeSettings()) {
+    std::vector<std::string> sub_protocols;
+    sub_protocols.push_back("chat");
+    // Use local URL so that WebSocket will not complain about URL format.
+    ws_ = new WebSocket(settings(), "wss://127.0.0.1:1234", sub_protocols,
+                        &exception_state_, false);
+
+    websocket_impl_ = ws_->impl_;
+    network_task_runner_ = settings_->network_module()
+                               ->url_request_context_getter()
+                               ->GetNetworkTaskRunner();
+    // The holder is only created to be base::Passed() on the next line, it will
+    // be empty so do not use it later.
+    network_task_runner_->PostBlockingTask(
+        FROM_HERE,
+        base::Bind(
+            [](scoped_refptr<WebSocketImpl> websocket_impl,
+               MockWebSocketChannel** mock_channel_slot,
+               dom::DOMSettings* settings) {
+              *mock_channel_slot = new MockWebSocketChannel(
+                  websocket_impl.get(), settings->network_module());
+              websocket_impl->websocket_channel_ =
+                  std::unique_ptr<net::WebSocketChannel>(*mock_channel_slot);
+            },
+            websocket_impl_, &mock_channel_, settings()));
+  }
+  ~WebSocketImplTest() {
+    network_task_runner_->PostBlockingTask(
+        FROM_HERE,
+        base::Bind(&WebSocketImpl::OnClose, websocket_impl_, true /*was_clan*/,
+                   net::kWebSocketNormalClosure /*error_code*/,
+                   "" /*close_reason*/));
+  }
+
+  base::test::ScopedTaskEnvironment env_;
+
+  std::unique_ptr<FakeSettings> settings_;
+  scoped_refptr<base::SingleThreadTaskRunner> network_task_runner_;
+  scoped_refptr<WebSocket> ws_;
+  scoped_refptr<WebSocketImpl> websocket_impl_;
+  MockWebSocketChannel* mock_channel_;
+  StrictMock<MockExceptionState> exception_state_;
+};
+
+TEST_F(WebSocketImplTest, NormalSizeRequest) {
+  // Normally the high watermark quota is given at websocket connection success.
+  AddQuota(kDefaultSendQuotaHighWaterMark);
+
+  {
+    base::AutoLock scoped_lock(mock_channel_->lock());
+    // mock_channel_ is created and used on network thread.
+    EXPECT_CALL(
+        *mock_channel_,
+        MockSendFrame(true, net::WebSocketFrameHeader::kOpCodeText, _, k800KB))
+        .Times(1)
+        .WillOnce(Return(net::WebSocketChannel::CHANNEL_ALIVE));
+  }
+
+  char data[k800KB];
+  int32 buffered_amount = 0;
+  std::string error;
+  websocket_impl_->SendText(data, k800KB, &buffered_amount, &error);
+}
+
+TEST_F(WebSocketImplTest, LargeRequest) {
+  AddQuota(kDefaultSendQuotaHighWaterMark);
+
+  // mock_channel_ is created and used on network thread.
+  {
+    base::AutoLock scoped_lock(mock_channel_->lock());
+    EXPECT_CALL(*mock_channel_,
+                MockSendFrame(true, net::WebSocketFrameHeader::kOpCodeText, _,
+                              kDefaultSendQuotaHighWaterMark))
+        .Times(1)
+        .WillOnce(Return(net::WebSocketChannel::CHANNEL_ALIVE));
+  }
+
+  char data[kDefaultSendQuotaHighWaterMark];
+  int32 buffered_amount = 0;
+  std::string error;
+  websocket_impl_->SendText(data, kDefaultSendQuotaHighWaterMark,
+                            &buffered_amount, &error);
+}
+
+TEST_F(WebSocketImplTest, OverLimitRequest) {
+  AddQuota(kDefaultSendQuotaHighWaterMark);
+
+  // mock_channel_ is created and used on network thread.
+  {
+    base::AutoLock scoped_lock(mock_channel_->lock());
+    EXPECT_CALL(*mock_channel_,
+                MockSendFrame(false, net::WebSocketFrameHeader::kOpCodeText, _,
+                              kDefaultSendQuotaHighWaterMark))
+        .Times(2)
+        .WillRepeatedly(Return(net::WebSocketChannel::CHANNEL_ALIVE));
+
+    EXPECT_CALL(
+        *mock_channel_,
+        MockSendFrame(true, net::WebSocketFrameHeader::kOpCodeText, _, 1))
+        .Times(1)
+        .WillOnce(Return(net::WebSocketChannel::CHANNEL_ALIVE));
+  }
+
+  char data[kWayTooMuch];
+  int32 buffered_amount = 0;
+  std::string error;
+  websocket_impl_->SendText(data, kWayTooMuch, &buffered_amount, &error);
+
+  AddQuota(kDefaultSendQuotaHighWaterMark);
+  AddQuota(kDefaultSendQuotaHighWaterMark);
+}
+
+
+TEST_F(WebSocketImplTest, ReuseSocketForLargeRequest) {
+  AddQuota(kDefaultSendQuotaHighWaterMark);
+
+  // mock_channel_ is created and used on network thread.
+  {
+    base::AutoLock scoped_lock(mock_channel_->lock());
+    EXPECT_CALL(*mock_channel_,
+                MockSendFrame(false, net::WebSocketFrameHeader::kOpCodeBinary,
+                              _, kDefaultSendQuotaHighWaterMark))
+        .Times(1)
+        .WillOnce(Return(net::WebSocketChannel::CHANNEL_ALIVE));
+    EXPECT_CALL(
+        *mock_channel_,
+        MockSendFrame(true, net::WebSocketFrameHeader::kOpCodeBinary, _, 1))
+        .Times(1)
+        .WillOnce(Return(net::WebSocketChannel::CHANNEL_ALIVE));
+    EXPECT_CALL(*mock_channel_,
+                MockSendFrame(false, net::WebSocketFrameHeader::kOpCodeText, _,
+                              k512KB - 1))
+        .Times(1)
+        .WillOnce(Return(net::WebSocketChannel::CHANNEL_ALIVE));
+    EXPECT_CALL(*mock_channel_,
+                MockSendFrame(true, net::WebSocketFrameHeader::kOpCodeText, _,
+                              kTooMuch - (k512KB - 1)))
+        .Times(1)
+        .WillOnce(Return(net::WebSocketChannel::CHANNEL_ALIVE));
+  }
+
+  char data[kTooMuch];
+  int32 buffered_amount = 0;
+  std::string error;
+  websocket_impl_->SendBinary(data, kTooMuch, &buffered_amount, &error);
+  websocket_impl_->SendText(data, kTooMuch, &buffered_amount, &error);
+
+  AddQuota(k512KB);
+  AddQuota(kDefaultSendQuotaHighWaterMark);
+}
+
+}  // namespace websocket
+}  // namespace cobalt
diff --git a/src/cobalt/websocket/web_socket_test.cc b/src/cobalt/websocket/web_socket_test.cc
index 575b46c..f85d4bb 100644
--- a/src/cobalt/websocket/web_socket_test.cc
+++ b/src/cobalt/websocket/web_socket_test.cc
@@ -36,6 +36,7 @@
 namespace cobalt {
 namespace websocket {
 
+namespace {
 class FakeSettings : public dom::testing::StubEnvironmentSettings {
  public:
   FakeSettings() : base_("https://example.com") {
@@ -48,6 +49,7 @@
   GURL base_;
   std::unique_ptr<network::NetworkModule> network_module_;
 };
+}  // namespace
 
 class WebSocketTest : public ::testing::Test {
  public:
diff --git a/src/cobalt/websocket/websocket.gyp b/src/cobalt/websocket/websocket.gyp
index 88e2bad..c3bfb3a 100644
--- a/src/cobalt/websocket/websocket.gyp
+++ b/src/cobalt/websocket/websocket.gyp
@@ -47,11 +47,12 @@
       'type': '<(gtest_target_type)',
       'sources': [
         'web_socket_test.cc',
+        'mock_websocket_channel.cc',
+        'web_socket_impl_test.cc',
       ],
       'dependencies': [
         'websocket',
         '<(DEPTH)/cobalt/dom/dom.gyp:dom',
-        '<(DEPTH)/cobalt/test/test.gyp:run_all_unittests',
         '<(DEPTH)/url/url.gyp:url',
         '<(DEPTH)/testing/gmock.gyp:gmock',
         '<(DEPTH)/testing/gtest.gyp:gtest',
@@ -68,6 +69,7 @@
           ],
         }],
       ],
+      'includes': [ '<(DEPTH)/cobalt/test/test.gypi' ],
     },
 
     {
diff --git a/src/cobalt/xhr/url_fetcher_buffer_writer.cc b/src/cobalt/xhr/url_fetcher_buffer_writer.cc
new file mode 100644
index 0000000..f8c4378
--- /dev/null
+++ b/src/cobalt/xhr/url_fetcher_buffer_writer.cc
@@ -0,0 +1,303 @@
+// Copyright 2019 Google Inc. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "cobalt/xhr/url_fetcher_buffer_writer.h"
+
+#include "base/logging.h"
+#include "net/base/net_errors.h"
+#include "starboard/memory.h"
+
+namespace cobalt {
+namespace xhr {
+
+namespace {
+
+// Allocate 64KB if the total size is unknown to avoid allocating small buffer
+// too many times.
+const int64_t kDefaultPreAllocateSizeInBytes = 64 * 1024;
+
+void ReleaseMemory(std::string* str) {
+  DCHECK(str);
+  std::string empty;
+  str->swap(empty);
+}
+
+void ReleaseMemory(script::PreallocatedArrayBufferData* data) {
+  DCHECK(data);
+  script::PreallocatedArrayBufferData empty;
+  data->Swap(&empty);
+}
+
+}  // namespace
+
+URLFetcherResponseWriter::Buffer::Buffer(Type type) : type_(type) {}
+
+void URLFetcherResponseWriter::Buffer::DisablePreallocate() {
+  base::AutoLock auto_lock(lock_);
+
+  DCHECK_EQ(GetSize_Locked(), 0u);
+  allow_preallocate_ = false;
+}
+
+void URLFetcherResponseWriter::Buffer::Clear() {
+  base::AutoLock auto_lock(lock_);
+
+  ReleaseMemory(&data_as_string_);
+  ReleaseMemory(&copy_of_data_as_string_);
+  ReleaseMemory(&data_as_array_buffer_);
+
+  download_progress_ = 0;
+  data_as_array_buffer_size_ = 0;
+}
+
+int64_t URLFetcherResponseWriter::Buffer::GetAndResetDownloadProgress() {
+  base::AutoLock auto_lock(lock_);
+  download_progress_ = GetSize_Locked();
+  return static_cast<int64_t>(download_progress_);
+}
+
+bool URLFetcherResponseWriter::Buffer::HasProgressSinceLastGetAndReset() const {
+  base::AutoLock auto_lock(lock_);
+  return GetSize_Locked() > download_progress_;
+}
+
+const std::string&
+URLFetcherResponseWriter::Buffer::GetReferenceOfStringAndSeal() {
+  base::AutoLock auto_lock(lock_);
+
+  UpdateType_Locked(kString);
+  allow_write_ = false;
+
+  return data_as_string_;
+}
+
+const std::string&
+URLFetcherResponseWriter::Buffer::GetTemporaryReferenceOfString() {
+  base::AutoLock auto_lock(lock_);
+
+  // This function can be further optimized by always return reference of
+  // |data_as_string_|, and only make a copy when |data_as_string_| is extended.
+  //  It is not done as GetTemporaryReferenceOfString() is currently not
+  // triggered.  It will only be called when JS app is retrieving responseText
+  // while the request is still in progress.
+
+  if (type_ == kString) {
+    copy_of_data_as_string_ = data_as_string_;
+  } else {
+    DCHECK_EQ(type_, kArrayBuffer);
+    const char* begin = static_cast<const char*>(data_as_array_buffer_.data());
+    copy_of_data_as_string_.assign(begin, begin + data_as_array_buffer_size_);
+  }
+
+  return copy_of_data_as_string_;
+}
+
+void URLFetcherResponseWriter::Buffer::GetAndReset(std::string* str) {
+  DCHECK(str);
+
+  ReleaseMemory(str);
+
+  base::AutoLock auto_lock(lock_);
+
+  UpdateType_Locked(kString);
+
+  if (capacity_known_ && data_as_string_.size() != data_as_string_.capacity()) {
+    DLOG(WARNING) << "String size " << data_as_string_.size()
+                  << " is different than its preset capacity "
+                  << data_as_string_.capacity();
+  }
+
+  data_as_string_.swap(*str);
+}
+
+void URLFetcherResponseWriter::Buffer::GetAndReset(
+    PreallocatedArrayBufferData* data) {
+  DCHECK(data);
+
+  ReleaseMemory(data);
+
+  base::AutoLock auto_lock(lock_);
+
+  UpdateType_Locked(kArrayBuffer);
+
+  if (data_as_array_buffer_.byte_length() != data_as_array_buffer_size_) {
+    DCHECK_LT(data_as_array_buffer_size_, data_as_array_buffer_.byte_length());
+    DLOG_IF(WARNING, capacity_known_)
+        << "ArrayBuffer size " << data_as_array_buffer_size_
+        << " is different than its preset capacity "
+        << data_as_array_buffer_.byte_length();
+    data_as_array_buffer_.Resize(data_as_array_buffer_size_);
+  }
+  data_as_array_buffer_.Swap(data);
+}
+
+void URLFetcherResponseWriter::Buffer::MaybePreallocate(int64_t capacity) {
+  base::AutoLock auto_lock(lock_);
+
+  if (!allow_preallocate_) {
+    return;
+  }
+
+  if (capacity < 0) {
+    capacity = kDefaultPreAllocateSizeInBytes;
+  } else {
+    capacity_known_ = true;
+  }
+
+  if (capacity == 0) {
+    return;
+  }
+
+  switch (type_) {
+    case kString:
+      DCHECK_EQ(data_as_string_.size(), 0u);
+      data_as_string_.reserve(capacity);
+      return;
+    case kArrayBuffer:
+      DCHECK_EQ(data_as_array_buffer_size_, 0u);
+      data_as_array_buffer_.Resize(capacity);
+      return;
+  }
+  NOTREACHED();
+}
+
+void URLFetcherResponseWriter::Buffer::Write(const void* buffer,
+                                             int num_bytes) {
+  DCHECK_GE(num_bytes, 0);
+
+  if (num_bytes <= 0) {
+    return;
+  }
+
+  base::AutoLock auto_lock(lock_);
+
+  DCHECK(allow_write_);
+
+  if (!allow_write_) {
+    return;
+  }
+
+  if (type_ == kString) {
+    if (capacity_known_ &&
+        num_bytes + data_as_string_.size() >= data_as_string_.capacity()) {
+      SB_LOG(WARNING) << "Data written is larger than the preset capacity "
+                      << data_as_string_.capacity();
+    }
+    data_as_string_.append(static_cast<const char*>(buffer), num_bytes);
+    return;
+  }
+
+  DCHECK_EQ(type_, kArrayBuffer);
+  if (data_as_array_buffer_size_ + num_bytes >
+      data_as_array_buffer_.byte_length()) {
+    if (capacity_known_) {
+      SB_LOG(WARNING) << "Data written is larger than the preset capacity "
+                      << data_as_array_buffer_.byte_length();
+    }
+    data_as_array_buffer_.Resize(data_as_array_buffer_size_ + num_bytes);
+  }
+
+  auto destination = static_cast<uint8_t*>(data_as_array_buffer_.data()) +
+                     data_as_array_buffer_size_;
+  SbMemoryCopy(destination, buffer, num_bytes);
+  data_as_array_buffer_size_ += num_bytes;
+}
+
+size_t URLFetcherResponseWriter::Buffer::GetSize_Locked() const {
+  lock_.AssertAcquired();
+
+  switch (type_) {
+    case kString:
+      return data_as_string_.size();
+    case kArrayBuffer:
+      return data_as_array_buffer_size_;
+  }
+  NOTREACHED();
+  return 0;
+}
+
+void URLFetcherResponseWriter::Buffer::UpdateType_Locked(Type type) {
+  lock_.AssertAcquired();
+
+  if (type_ == type) {
+    return;
+  }
+
+  DCHECK(allow_write_);
+
+  DLOG_IF(WARNING, GetSize_Locked() > 0)
+      << "Change response type from " << type_ << " to " << type
+      << " after response is started, which is less efficient.";
+
+  if (type_ == kString) {
+    DCHECK_EQ(type, kArrayBuffer);
+    DCHECK_EQ(data_as_array_buffer_size_, 0u);
+    DCHECK_EQ(data_as_array_buffer_.byte_length(), 0u);
+  } else {
+    DCHECK_EQ(type_, kArrayBuffer);
+    DCHECK_EQ(type, kString);
+    DCHECK_EQ(data_as_string_.size(), 0u);
+  }
+
+  type_ = type;
+
+  if (type == kArrayBuffer) {
+    data_as_array_buffer_.Resize(data_as_string_.capacity());
+    data_as_array_buffer_size_ = data_as_string_.size();
+    SbMemoryCopy(data_as_array_buffer_.data(), data_as_string_.data(),
+                 data_as_array_buffer_size_);
+
+    ReleaseMemory(&data_as_string_);
+    ReleaseMemory(&copy_of_data_as_string_);
+    return;
+  }
+
+  data_as_string_.reserve(data_as_array_buffer_.byte_length());
+  data_as_string_.append(static_cast<const char*>(data_as_array_buffer_.data()),
+                         data_as_array_buffer_size_);
+
+  ReleaseMemory(&data_as_array_buffer_);
+  data_as_array_buffer_size_ = 0;
+}
+
+URLFetcherResponseWriter::URLFetcherResponseWriter(
+    const scoped_refptr<Buffer>& buffer)
+    : buffer_(buffer) {
+  DCHECK(buffer_);
+}
+
+URLFetcherResponseWriter::~URLFetcherResponseWriter() = default;
+
+int URLFetcherResponseWriter::Initialize(
+    net::CompletionOnceCallback /*callback*/) {
+  return net::OK;
+}
+
+void URLFetcherResponseWriter::OnResponseStarted(int64_t content_length) {
+  buffer_->MaybePreallocate(content_length);
+}
+
+int URLFetcherResponseWriter::Write(net::IOBuffer* buffer, int num_bytes,
+                                    net::CompletionOnceCallback /*callback*/) {
+  buffer_->Write(buffer->data(), num_bytes);
+  return num_bytes;
+}
+
+int URLFetcherResponseWriter::Finish(int /*net_error*/,
+                                     net::CompletionOnceCallback /*callback*/) {
+  return net::OK;
+}
+
+}  // namespace xhr
+}  // namespace cobalt
diff --git a/src/cobalt/xhr/url_fetcher_buffer_writer.h b/src/cobalt/xhr/url_fetcher_buffer_writer.h
new file mode 100644
index 0000000..c82ce32
--- /dev/null
+++ b/src/cobalt/xhr/url_fetcher_buffer_writer.h
@@ -0,0 +1,112 @@
+// Copyright 2019 Google Inc. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#ifndef COBALT_XHR_URL_FETCHER_BUFFER_WRITER_H_
+#define COBALT_XHR_URL_FETCHER_BUFFER_WRITER_H_
+
+#include <memory>
+#include <string>
+
+#include "base/callback.h"
+#include "base/memory/ref_counted.h"
+#include "base/synchronization/lock.h"
+#include "base/task_runner.h"
+#include "cobalt/script/array_buffer.h"
+#include "net/base/io_buffer.h"
+#include "net/url_request/url_fetcher_response_writer.h"
+
+namespace cobalt {
+namespace xhr {
+
+class URLFetcherResponseWriter : public net::URLFetcherResponseWriter {
+ public:
+  class Buffer : public base::RefCountedThreadSafe<Buffer> {
+   public:
+    typedef script::PreallocatedArrayBufferData PreallocatedArrayBufferData;
+
+    enum Type {
+      kString,
+      kArrayBuffer,
+    };
+
+    explicit Buffer(Type type);
+
+    void DisablePreallocate();
+    void Clear();
+
+    int64_t GetAndResetDownloadProgress();
+    bool HasProgressSinceLastGetAndReset() const;
+
+    // When the following function is called, Write() can no longer be called to
+    // append more data.  It is the responsibility of the user of this class to
+    // ensure such behavior.
+    const std::string& GetReferenceOfStringAndSeal();
+    // Returns a reference to a std::string containing a copy of the data
+    // downloaded so far.  The reference is guaranteed to be valid until another
+    // public member function is called on this object.
+    const std::string& GetTemporaryReferenceOfString();
+
+    void GetAndReset(std::string* str);
+    void GetAndReset(PreallocatedArrayBufferData* data);
+
+    void MaybePreallocate(int64_t capacity);
+    void Write(const void* buffer, int num_bytes);
+
+   private:
+    size_t GetSize_Locked() const;
+
+    // It is possible (but extremely rare) that JS app changes response type
+    // after some data has been written on the network thread, in such case we
+    // allow to change the buffer type dynamically.
+    void UpdateType_Locked(Type type);
+
+    Type type_;
+    bool allow_preallocate_ = true;
+    bool capacity_known_ = false;
+
+    // This class can be accessed by both network and MainWebModule threads.
+    mutable base::Lock lock_;
+
+    bool allow_write_ = true;
+    size_t download_progress_ = 0;
+
+    // Data is stored in one of the following buffers, depends on the value of
+    // |type_|.
+    std::string data_as_string_;
+    // For use in GetReferenceOfString() so it can return a reference.
+    std::string copy_of_data_as_string_;
+    PreallocatedArrayBufferData data_as_array_buffer_;
+    size_t data_as_array_buffer_size_ = 0;
+  };
+
+  explicit URLFetcherResponseWriter(const scoped_refptr<Buffer>& buffer);
+  ~URLFetcherResponseWriter() override;
+
+  // URLFetcherResponseWriter overrides:
+  int Initialize(net::CompletionOnceCallback callback) override;
+  void OnResponseStarted(int64_t content_length) override;
+  int Write(net::IOBuffer* buffer, int num_bytes,
+            net::CompletionOnceCallback callback) override;
+  int Finish(int net_error, net::CompletionOnceCallback callback) override;
+
+ private:
+  scoped_refptr<Buffer> buffer_;
+
+  DISALLOW_COPY_AND_ASSIGN(URLFetcherResponseWriter);
+};
+
+}  // namespace xhr
+}  // namespace cobalt
+
+#endif  // COBALT_XHR_URL_FETCHER_BUFFER_WRITER_H_
diff --git a/src/cobalt/xhr/xhr.gyp b/src/cobalt/xhr/xhr.gyp
index eff72e7..86c029d 100644
--- a/src/cobalt/xhr/xhr.gyp
+++ b/src/cobalt/xhr/xhr.gyp
@@ -21,6 +21,8 @@
       'target_name': 'xhr',
       'type': 'static_library',
       'sources': [
+        'url_fetcher_buffer_writer.cc',
+        'url_fetcher_buffer_writer.h',
         'xhr_response_data.cc',
         'xhr_response_data.h',
         'xml_http_request.cc',
@@ -62,7 +64,6 @@
       'dependencies': [
         '<(DEPTH)/cobalt/base/base.gyp:base',
         '<(DEPTH)/cobalt/dom/dom.gyp:dom',
-        '<(DEPTH)/cobalt/test/test.gyp:run_all_unittests',
         '<(DEPTH)/testing/gmock.gyp:gmock',
         '<(DEPTH)/testing/gtest.gyp:gtest',
         'xhr',
@@ -71,6 +72,7 @@
         #       ScriptValueFactory has non-virtual method CreatePromise().
         '<(DEPTH)/cobalt/script/engine.gyp:engine',
       ],
+      'includes': [ '<(DEPTH)/cobalt/test/test.gypi' ],
     },
     {
       'target_name': 'xhr_test_deploy',
diff --git a/src/cobalt/xhr/xml_http_request.cc b/src/cobalt/xhr/xml_http_request.cc
index de95cc4..0ae707f 100644
--- a/src/cobalt/xhr/xml_http_request.cc
+++ b/src/cobalt/xhr/xml_http_request.cc
@@ -51,10 +51,6 @@
 // How many milliseconds must elapse between each progress event notification.
 const int kProgressPeriodMs = 50;
 
-// Allocate 64KB on receiving the first chunk to avoid allocating small buffer
-// too many times.
-const size_t kInitialReceivingBufferSize = 64 * 1024;
-
 const char* kResponseTypes[] = {
     "",             // kDefault
     "text",         // kText
@@ -168,6 +164,8 @@
 
 XMLHttpRequest::XMLHttpRequest(script::EnvironmentSettings* settings)
     : XMLHttpRequestEventTarget(settings),
+      response_body_(new URLFetcherResponseWriter::Buffer(
+          URLFetcherResponseWriter::Buffer::kString)),
       settings_(base::polymorphic_downcast<dom::DOMSettings*>(settings)),
       state_(kUnsent),
       response_type_(kDefault),
@@ -203,7 +201,7 @@
   }
   ChangeState(kUnsent);
 
-  response_body_.Clear();
+  response_body_->Clear();
   response_array_buffer_reference_.reset();
 }
 
@@ -485,7 +483,14 @@
     return base::EmptyString();
   }
 
-  return response_body_.string();
+  // Note that the conversion from |response_body_| to std::string when |state_|
+  // isn't kDone isn't efficient for large responses.  Fortunately this feature
+  // is rarely used.
+  if (state_ == kLoading) {
+    LOG(WARNING) << "Retrieving responseText while loading can be inefficient.";
+    return response_body_->GetTemporaryReferenceOfString();
+  }
+  return response_body_->GetReferenceOfStringAndSeal();
 }
 
 // https://www.w3.org/TR/2014/WD-XMLHttpRequest-20140130/#the-responsexml-attribute
@@ -659,19 +664,6 @@
     fetch_mode_callback_->value().Run(is_cross_origin_);
   }
 
-  // Reserve space for the content in the case of a regular XHR request.
-  DCHECK_EQ(response_body_.size(), 0u);
-  if (!fetch_callback_) {
-    const int64 content_length = http_response_headers_->GetContentLength();
-
-    // If we know the eventual content length, allocate the total response body.
-    // Otherwise just reserve a reasonably large initial chunk.
-    size_t bytes_to_reserve = content_length > 0
-                                  ? static_cast<size_t>(content_length)
-                                  : kInitialReceivingBufferSize;
-    response_body_.Reserve(bytes_to_reserve);
-  }
-
   // Further filter response headers as XHR's mode is cors
   if (is_cross_origin_) {
     size_t iter = 0;
@@ -707,7 +699,7 @@
 
   ChangeState(kHeadersReceived);
 
-  UpdateProgress();
+  UpdateProgress(0);
 }
 
 void XMLHttpRequest::OnURLFetchDownloadProgress(
@@ -717,27 +709,19 @@
   DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
   DCHECK_NE(state_, kDone);
 
-  auto* download_data_writer =
-      base::polymorphic_downcast<loader::URLFetcherStringWriter*>(
-          source->GetResponseWriter());
-  std::unique_ptr<std::string> download_data = download_data_writer->data();
-  if (!download_data.get() || download_data->empty()) {
+  if (response_body_->HasProgressSinceLastGetAndReset() == 0) {
     return;
   }
-  // Preserve the response body only for regular XHR requests. Fetch requests
-  // process the response in pieces, so do not need to keep the whole response.
-  if (!fetch_callback_) {
-    response_body_.Append(reinterpret_cast<const uint8*>(download_data->data()),
-                          download_data->size());
-  }
 
   // Signal to JavaScript that new data is now available.
   ChangeState(kLoading);
 
   if (fetch_callback_) {
+    std::string downloaded_data;
+    response_body_->GetAndReset(&downloaded_data);
     script::Handle<script::Uint8Array> data =
         script::Uint8Array::New(settings_->global_environment(),
-                                download_data->data(), download_data->size());
+                                downloaded_data.data(), downloaded_data.size());
     fetch_callback_->value().Run(data);
   }
 
@@ -746,7 +730,9 @@
   const base::TimeDelta elapsed(now - last_progress_time_);
   if (elapsed > base::TimeDelta::FromMilliseconds(kProgressPeriodMs)) {
     last_progress_time_ = now;
-    UpdateProgress();
+    // TODO: Investigate if we have to fire progress event with 0 loaded bytes
+    // when used as Fetch API.
+    UpdateProgress(response_body_->GetAndResetDownloadProgress());
   }
 }
 
@@ -787,7 +773,7 @@
       FireProgressEvent(upload_, base::Tokens::loadend());
     }
     ChangeState(kDone);
-    UpdateProgress();
+    UpdateProgress(response_body_->GetAndResetDownloadProgress());
     // Undo the ref we added in Send()
     DecrementActiveRequests();
   } else {
@@ -1038,13 +1024,14 @@
     // The request is done so it is safe to only keep the ArrayBuffer and clear
     // |response_body_|.  As |response_body_| will not be used unless the
     // request is re-opened.
-    auto array_buffer =
-        script::ArrayBuffer::New(settings_->global_environment(),
-                                 response_body_.data(), response_body_.size());
+    std::unique_ptr<script::PreallocatedArrayBufferData> downloaded_data(
+        new script::PreallocatedArrayBufferData());
+    response_body_->GetAndReset(downloaded_data.get());
+    auto array_buffer = script::ArrayBuffer::New(
+        settings_->global_environment(), std::move(downloaded_data));
     response_array_buffer_reference_.reset(
         new script::ScriptValue<script::ArrayBuffer>::Reference(this,
                                                                 array_buffer));
-    response_body_.Clear();
     return array_buffer;
   } else {
     return script::Handle<script::ArrayBuffer>(
@@ -1052,10 +1039,9 @@
   }
 }
 
-void XMLHttpRequest::UpdateProgress() {
+void XMLHttpRequest::UpdateProgress(int64_t received_length) {
   DCHECK(http_response_headers_);
   const int64 content_length = http_response_headers_->GetContentLength();
-  const int64 received_length = static_cast<int64>(response_body_.size());
   const bool length_computable =
       content_length > 0 && received_length <= content_length;
   const uint64 total =
@@ -1111,15 +1097,24 @@
 void XMLHttpRequest::StartRequest(const std::string& request_body) {
   TRACK_MEMORY_SCOPE("XHR");
 
-  response_body_.Clear();
   response_array_buffer_reference_.reset();
 
   network::NetworkModule* network_module =
       settings_->fetcher_factory()->network_module();
   url_fetcher_ = net::URLFetcher::Create(request_url_, method_, this);
   url_fetcher_->SetRequestContext(network_module->url_request_context_getter());
+  if (fetch_callback_) {
+    response_body_ = new URLFetcherResponseWriter::Buffer(
+        URLFetcherResponseWriter::Buffer::kString);
+    response_body_->DisablePreallocate();
+  } else {
+    response_body_ = new URLFetcherResponseWriter::Buffer(
+        response_type_ == kArrayBuffer
+            ? URLFetcherResponseWriter::Buffer::kArrayBuffer
+            : URLFetcherResponseWriter::Buffer::kString);
+  }
   std::unique_ptr<net::URLFetcherResponseWriter> download_data_writer(
-      new loader::URLFetcherStringWriter());
+      new URLFetcherResponseWriter(response_body_));
   url_fetcher_->SaveResponseWithWriter(std::move(download_data_writer));
   // Don't retry, let the caller deal with it.
   url_fetcher_->SetAutomaticallyRetryOn5xx(false);
@@ -1197,9 +1192,11 @@
       (xhr.response_type_ == XMLHttpRequest::kDefault ||
        xhr.response_type_ == XMLHttpRequest::kText)) {
     size_t kMaxSize = 4096;
-    response_text = base::StringPiece(
-        reinterpret_cast<const char*>(xhr.response_body_.data()),
-        std::min(kMaxSize, xhr.response_body_.size()));
+    const auto& response_body =
+        xhr.response_body_->GetTemporaryReferenceOfString();
+    response_text =
+        base::StringPiece(reinterpret_cast<const char*>(response_body.data()),
+                          std::min(kMaxSize, response_body.size()));
   }
 
   std::string xhr_out = base::StringPrintf(
@@ -1231,6 +1228,8 @@
 
 // https://www.w3.org/TR/2014/WD-XMLHttpRequest-20140130/#document-response-entity-body
 scoped_refptr<dom::Document> XMLHttpRequest::GetDocumentResponseEntityBody() {
+  DCHECK_EQ(state_, kDone);
+
   // Step 1..5
   const std::string final_mime_type =
       mime_type_override_.empty() ? response_mime_type_ : mime_type_override_;
@@ -1250,8 +1249,8 @@
       base::Bind(&XMLHttpRequest::XMLDecoderLoadCompleteCallback,
                  base::Unretained(this)));
   has_xml_decoder_error_ = false;
-  xml_decoder.DecodeChunk(response_body_.string().c_str(),
-                          response_body_.string().size());
+  xml_decoder.DecodeChunk(response_body_->GetReferenceOfStringAndSeal().c_str(),
+                          response_body_->GetReferenceOfStringAndSeal().size());
   xml_decoder.Finish();
   if (has_xml_decoder_error_) {
     return NULL;
diff --git a/src/cobalt/xhr/xml_http_request.h b/src/cobalt/xhr/xml_http_request.h
index 6ff0f76..4dd5375 100644
--- a/src/cobalt/xhr/xml_http_request.h
+++ b/src/cobalt/xhr/xml_http_request.h
@@ -34,7 +34,7 @@
 #include "cobalt/script/global_environment.h"
 #include "cobalt/script/typed_arrays.h"
 #include "cobalt/script/union_type.h"
-#include "cobalt/xhr/xhr_response_data.h"
+#include "cobalt/xhr/url_fetcher_buffer_writer.h"
 #include "cobalt/xhr/xml_http_request_event_target.h"
 #include "cobalt/xhr/xml_http_request_upload.h"
 #include "net/http/http_request_headers.h"
@@ -216,7 +216,7 @@
   // Return array buffer response body as an ArrayBuffer.
   script::Handle<script::ArrayBuffer> response_array_buffer();
 
-  void UpdateProgress();
+  void UpdateProgress(int64_t received_length);
 
   void StartRequest(const std::string& request_body);
 
@@ -253,7 +253,7 @@
 
   std::unique_ptr<net::URLFetcher> url_fetcher_;
   scoped_refptr<net::HttpResponseHeaders> http_response_headers_;
-  XhrResponseData response_body_;
+  scoped_refptr<URLFetcherResponseWriter::Buffer> response_body_;
   std::unique_ptr<script::ScriptValue<script::ArrayBuffer>::Reference>
       response_array_buffer_reference_;
   scoped_refptr<XMLHttpRequestUpload> upload_;
diff --git a/src/content/browser/speech/speech.gyp b/src/content/browser/speech/speech.gyp
index 397b372..e562177 100644
--- a/src/content/browser/speech/speech.gyp
+++ b/src/content/browser/speech/speech.gyp
@@ -39,12 +39,12 @@
       ],
       'dependencies': [
         'speech',
-        '<(DEPTH)/base/base.gyp:run_all_unittests',
         '<(DEPTH)/base/base.gyp:test_support_base',
         '<(DEPTH)/cobalt/media/media.gyp:media',
         '<(DEPTH)/testing/gtest.gyp:gtest',
         '<(DEPTH)/starboard/starboard.gyp:starboard',
       ],
+      'includes': ['<(DEPTH)/base/test/test.gypi'],
     },
 
     {
diff --git a/src/crypto/crypto.gyp b/src/crypto/crypto.gyp
index 354d689..48610c6 100644
--- a/src/crypto/crypto.gyp
+++ b/src/crypto/crypto.gyp
@@ -88,10 +88,10 @@
         'crypto',
         '<(DEPTH)/base/base.gyp:base',
         '<(DEPTH)/base/base.gyp:test_support_base',
-        '<(DEPTH)/base/base.gyp:run_all_unittests',
         '<(DEPTH)/testing/gmock.gyp:gmock',
         '<(DEPTH)/testing/gtest.gyp:gtest',
       ],
+      'includes': ['<(DEPTH)/base/test/test.gypi'],
     },
   ],
   'conditions': [
diff --git a/src/nb/analytics/memory_tracker_impl.cc b/src/nb/analytics/memory_tracker_impl.cc
index 58eab36..6b08eea 100644
--- a/src/nb/analytics/memory_tracker_impl.cc
+++ b/src/nb/analytics/memory_tracker_impl.cc
@@ -231,7 +231,10 @@
                                  const void* memory,
                                  size_t size) {
   // We might do something more interesting with MapMemory calls later.
+  MemoryTrackerImpl* t = static_cast<MemoryTrackerImpl*>(context);
+  t->PushAllocationGroupByName("Mapped Memory");
   OnMalloc(context, memory, size);
+  t->PopAllocationGroup();
 }
 
 void MemoryTrackerImpl::OnUnMapMem(void* context,
diff --git a/src/nb/fixed_no_free_allocator.cc b/src/nb/fixed_no_free_allocator.cc
index 9c60617..f4f8e1c 100644
--- a/src/nb/fixed_no_free_allocator.cc
+++ b/src/nb/fixed_no_free_allocator.cc
@@ -16,8 +16,6 @@
 
 #include "nb/fixed_no_free_allocator.h"
 
-#include <algorithm>
-
 #include "nb/pointer_arithmetic.h"
 #include "starboard/common/log.h"
 
@@ -54,8 +52,6 @@
 void* FixedNoFreeAllocator::Allocate(std::size_t* size,
                                      std::size_t alignment,
                                      bool align_pointer) {
-  *size = std::max<std::size_t>(*size, 1);
-
   // Find the next aligned memory available.
   uint8_t* aligned_next_memory =
       AsPointer(AlignUp(AsInteger(next_memory_), alignment));
diff --git a/src/nb/reuse_allocator_base.cc b/src/nb/reuse_allocator_base.cc
index 03e9903..95189ac 100644
--- a/src/nb/reuse_allocator_base.cc
+++ b/src/nb/reuse_allocator_base.cc
@@ -358,18 +358,45 @@
   // allocate the difference between |size| and the size of the right most block
   // in the hope that they are continuous and can be connect to a block that is
   // large enough to fulfill |size|.
-  size_t size_difference = size - free_blocks_.rbegin()->size();
-  if (max_capacity_ && capacity_ + size_difference > max_capacity_) {
+  size_t free_address = AsInteger(free_blocks_.rbegin()->address());
+  size_t free_size = free_blocks_.rbegin()->size();
+  size_t aligned_address = AlignUp(free_address, alignment);
+  // In order to calculate |size_to_allocate|, we need to account for two
+  // possible scenarios: when |aligned_address| is within the free block region,
+  // or when it is after the free block region.
+  //
+  // Scenario 1:
+  //
+  // |free_address|      |free_address + free_size|
+  //   |                 |
+  //   | <- free_size -> | <- size_to_allocate -> |
+  //   --------------------------------------------
+  //               |<-          size           -> |
+  //               |
+  // |aligned_address|
+  //
+  // Scenario 2:
+  //
+  // |free_address|
+  //   |
+  //   | <- free_size -> | <- size_to_allocate -> |
+  //   --------------------------------------------
+  //                     |           | <- size -> |
+  //                     |           |
+  // |free_address + free_size|  |aligned_address|
+  size_t size_to_allocate = aligned_address + size - free_address - free_size;
+  if (max_capacity_ && capacity_ + size_to_allocate > max_capacity_) {
     return free_blocks_.end();
   }
-  ptr = fallback_allocator_->AllocateForAlignment(&size_difference, alignment);
+  SB_DCHECK(size_to_allocate > 0);
+  ptr = fallback_allocator_->AllocateForAlignment(&size_to_allocate, 1);
   if (ptr == NULL) {
     return free_blocks_.end();
   }
 
   fallback_allocations_.push_back(ptr);
-  capacity_ += size_difference;
-  AddFreeBlock(MemoryBlock(ptr, size_difference));
+  capacity_ += size_to_allocate;
+  AddFreeBlock(MemoryBlock(ptr, size_to_allocate));
   FreeBlockSet::iterator iter = free_blocks_.end();
   --iter;
   return iter->CanFullfill(size, alignment) ? iter : free_blocks_.end();
diff --git a/src/net/base/ip_endpoint.cc b/src/net/base/ip_endpoint.cc
index 1ed221f..076d49c 100644
--- a/src/net/base/ip_endpoint.cc
+++ b/src/net/base/ip_endpoint.cc
@@ -54,7 +54,7 @@
 
     default:
       NOTREACHED();
-      break;
+      return false;
   }
 
   return true;
diff --git a/src/net/cert/internal/trust_store_in_memory_starboard.cc b/src/net/cert/internal/trust_store_in_memory_starboard.cc
index 3d53b20..e3f0856 100644
--- a/src/net/cert/internal/trust_store_in_memory_starboard.cc
+++ b/src/net/cert/internal/trust_store_in_memory_starboard.cc
@@ -75,8 +75,20 @@
 #endif
     return std::unordered_set<std::string>();
   }
-  SbDirectoryEntry dir_entry;
+
   std::unordered_set<std::string> trusted_certs_on_disk;
+#if SB_API_VERSION >= SB_FEATURE_RUNTIME_CONFIGS_VERSION
+  std::vector<char> dir_entry(SB_FILE_MAX_NAME);
+
+  while (SbDirectoryGetNext(sb_certs_directory, dir_entry.data(),
+                            dir_entry.size())) {
+    if (SbStringGetLength(dir_entry.data()) != kCertFileNameLength) {
+      continue;
+    }
+    trusted_certs_on_disk.emplace(dir_entry.data());
+  }
+#else   // SB_API_VERSION >= SB_FEATURE_RUNTIME_CONFIGS_VERSION
+  SbDirectoryEntry dir_entry;
 
   while (SbDirectoryGetNext(sb_certs_directory, &dir_entry)) {
     if (SbStringGetLength(dir_entry.name) != kCertFileNameLength) {
@@ -84,6 +96,8 @@
     }
     trusted_certs_on_disk.emplace(dir_entry.name);
   }
+#endif  // SB_API_VERSION >= SB_FEATURE_RUNTIME_CONFIGS_VERSION
+
   SbDirectoryClose(sb_certs_directory);
   return std::move(trusted_certs_on_disk);
 }
diff --git a/src/net/dial/dial_udp_server.cc b/src/net/dial/dial_udp_server.cc
index affd934..bca1005 100644
--- a/src/net/dial/dial_udp_server.cc
+++ b/src/net/dial/dial_udp_server.cc
@@ -140,20 +140,23 @@
   // If M-Search request was valid, send response. Else, keep quiet.
   if (ParseSearchRequest(std::string(read_buf_->data()))) {
     auto response = std::make_unique<std::string>();
-    *response = ConstructSearchResponse();
+    *response = std::move(ConstructSearchResponse());
     // Using the fake IOBuffer to avoid another copy.
     scoped_refptr<WrappedIOBuffer> fake_buffer =
         new WrappedIOBuffer(response->data());
     // After optimization, some compiler will dereference and get response size
     // later than passing response.
     auto response_size = response->size();
-    auto result = socket_->SendTo(
+    int result = socket_->SendTo(
         fake_buffer.get(), response_size, client_address_,
-        base::Bind([](scoped_refptr<WrappedIOBuffer>,
-                      std::unique_ptr<std::string>, int /*rv*/) {},
+        base::Bind(&DialUdpServer::WriteComplete, base::Unretained(this),
                    fake_buffer, base::Passed(&response)));
-    if (result < 0) {
-      DLOG(WARNING) << "Socket SentTo error code: " << result;
+    if (result == ERR_IO_PENDING) {
+      // WriteComplete is responsible for posting the next callback to accept
+      // connection.
+      return;
+    } else if (result < 0) {
+      LOG(ERROR) << "UDPSocket SendTo error: " << result;
     }
   }
 
@@ -166,6 +169,17 @@
                             base::Unretained(this)));
 }
 
+void DialUdpServer::WriteComplete(scoped_refptr<WrappedIOBuffer>,
+                                  std::unique_ptr<std::string>,
+                                  int rv) {
+  if (rv < 0) {
+    LOG(ERROR) << "UDPSocket completion callback error: " << rv;
+  }
+  thread_.task_runner()->PostTask(
+      FROM_HERE, base::Bind(&DialUdpServer::AcceptAndProcessConnection,
+                            base::Unretained(this)));
+}
+
 // Parse a request to make sure it is a M-Search.
 bool DialUdpServer::ParseSearchRequest(const std::string& request) {
   HttpServerRequestInfo info;
@@ -215,7 +229,7 @@
 
 // Since we are constructing a response from user-generated string,
 // ensure all user-generated strings pass through StringPrintf.
-const std::string DialUdpServer::ConstructSearchResponse() const {
+std::string DialUdpServer::ConstructSearchResponse() const {
   DCHECK(!location_url_.empty());
 
   std::string ret("HTTP/1.1 200 OK\r\n");
@@ -237,7 +251,7 @@
                                 DialSystemConfig::GetInstance()->model_uuid(),
                                 kDialStRequest));
   ret.append("\r\n");
-  return ret;
+  return std::move(ret);
 }
 
 }  // namespace net
diff --git a/src/net/dial/dial_udp_server.h b/src/net/dial/dial_udp_server.h
index c3115c9..7af175a 100644
--- a/src/net/dial/dial_udp_server.h
+++ b/src/net/dial/dial_udp_server.h
@@ -30,6 +30,10 @@
 
   virtual void DidClose(UDPSocket* sock);
 
+  void WriteComplete(scoped_refptr<WrappedIOBuffer>,
+                     std::unique_ptr<std::string>,
+                     int rv);
+
  private:
   FRIEND_TEST_ALL_PREFIXES(DialUdpServerTest, ParseSearchRequest);
 
@@ -42,7 +46,7 @@
   void AcceptAndProcessConnection();
 
   // Construct the appropriate search response.
-  const std::string ConstructSearchResponse() const;
+  std::string ConstructSearchResponse() const;
 
   // Parse a request to make sure it is a M-Search.
   static bool ParseSearchRequest(const std::string& request);
diff --git a/src/net/disk_cache/simple/simple_index_file_starboard.cc b/src/net/disk_cache/simple/simple_index_file_starboard.cc
index 3f9634b..29d913b 100644
--- a/src/net/disk_cache/simple/simple_index_file_starboard.cc
+++ b/src/net/disk_cache/simple/simple_index_file_starboard.cc
@@ -33,7 +33,19 @@
     PLOG(ERROR) << "opendir " << cache_path.value() << ", erron: " << error;
     return false;
   }
+#if SB_API_VERSION >= SB_FEATURE_RUNTIME_CONFIGS_VERSION
+  std::vector<char> entry(SB_FILE_MAX_NAME);
+
+  while (true) {
+    if (!SbDirectoryGetNext(dir, entry.data(), entry.size())) {
+      PLOG(ERROR) << "readdir " << cache_path.value();
+      return false;
+    }
+
+    const std::string file_name(entry.data());
+#else   // SB_API_VERSION >= SB_FEATURE_RUNTIME_CONFIGS_VERSION
   SbDirectoryEntry entry;
+
   while (true) {
     if (!SbDirectoryGetNext(dir, &entry)) {
       PLOG(ERROR) << "readdir " << cache_path.value();
@@ -41,6 +53,7 @@
     }
 
     const std::string file_name(entry.name);
+#endif  // SB_API_VERSION >= SB_FEATURE_RUNTIME_CONFIGS_VERSION
     if (file_name == "." || file_name == "..")
       continue;
     const base::FilePath file_path =
diff --git a/src/net/socket/transport_client_socket_pool.cc b/src/net/socket/transport_client_socket_pool.cc
index f5689ab..263e17c2 100644
--- a/src/net/socket/transport_client_socket_pool.cc
+++ b/src/net/socket/transport_client_socket_pool.cc
@@ -258,6 +258,15 @@
 int TransportConnectJob::DoResolveHostComplete(int result) {
   TRACE_EVENT0(kNetTracingCategory,
                "TransportConnectJob::DoResolveHostComplete");
+#ifdef STARBOARD
+    // Preferentially connect to an IPv4 address first, if available. Some
+    // hosts may have IPv6 addresses to which we can connect, but the read
+    // may still fail if the network is not properly configured. The existing
+    // code has a fallback mechanism to try different IPs in |addresses_|
+    // when connection fails. However, in this case, a connection can be made
+    // with the IPv6 address, but the read fails.
+    MakeAddressListStartWithIPv4(&addresses_);
+#endif
   connect_timing_.dns_end = base::TimeTicks::Now();
   // Overwrite connection start time, since for connections that do not go
   // through proxies, |connect_start| should not include dns lookup time.
diff --git a/src/net/socket/transport_client_socket_pool_unittest.cc b/src/net/socket/transport_client_socket_pool_unittest.cc
index 9d64c43..e8c7543 100644
--- a/src/net/socket/transport_client_socket_pool_unittest.cc
+++ b/src/net/socket/transport_client_socket_pool_unittest.cc
@@ -856,6 +856,10 @@
   handle.Reset();
 }
 
+// Disable this test since the TransportConnectJob::DoResolveHostComplete
+// customization causes the IPv4 address to be tried first, thus breaking
+// the assumptions of this test.
+#ifndef STARBOARD
 // Test the case of the IPv6 address stalling, and falling back to the IPv4
 // socket which finishes first.
 TEST_F(TransportClientSocketPoolTest, IPv6FallbackSocketIPv4FinishesFirst) {
@@ -903,7 +907,12 @@
 
   EXPECT_EQ(2, client_socket_factory_.allocation_count());
 }
+#endif
 
+// Disable this test since the TransportConnectJob::DoResolveHostComplete
+// customization causes the IPv4 address to be tried first, thus breaking
+// the assumptions of this test.
+#ifndef STARBOARD
 // Test the case of the IPv6 address being slow, thus falling back to trying to
 // connect to the IPv4 address, but having the connect to the IPv6 address
 // finish first.
@@ -955,6 +964,7 @@
 
   EXPECT_EQ(2, client_socket_factory_.allocation_count());
 }
+#endif
 
 TEST_F(TransportClientSocketPoolTest, IPv6NoIPv4AddressesToFallbackTo) {
   // Create a pool without backup jobs.
@@ -1072,6 +1082,10 @@
   EXPECT_TRUE(socket_data.IsUsingTCPFastOpen());
 }
 
+// Disable this test since the TransportConnectJob::DoResolveHostComplete
+// customization causes the IPv4 address to be tried first, thus breaking
+// the assumptions of this test.
+#ifndef STARBOARD
 // Test that if TCP FastOpen is enabled, it does not do anything when there
 // is a IPv6 address with fallback to an IPv4 address. This test tests the case
 // when the IPv6 connect fails and the IPv4 one succeeds.
@@ -1141,6 +1155,7 @@
   // Verify that TCP FastOpen was not turned on for the socket.
   EXPECT_FALSE(socket_data.IsUsingTCPFastOpen());
 }
+#endif
 
 // Test that SocketTag passed into TransportClientSocketPool is applied to
 // returned sockets.
diff --git a/src/net/socket/udp_socket_starboard.cc b/src/net/socket/udp_socket_starboard.cc
index 707fb74..369cf68 100644
--- a/src/net/socket/udp_socket_starboard.cc
+++ b/src/net/socket/udp_socket_starboard.cc
@@ -441,7 +441,7 @@
 
   if (result != ERR_IO_PENDING) {
     IPEndPoint log_address;
-    if (log_address.FromSbSocketAddress(&sb_address)) {
+    if (result < 0 || !log_address.FromSbSocketAddress(&sb_address)) {
       LogRead(result, buf->data(), NULL);
     } else {
       LogRead(result, buf->data(), &log_address);
diff --git a/src/net/url_request/url_fetcher_core.cc b/src/net/url_request/url_fetcher_core.cc
index 6d7b7f4..48c99c0 100644
--- a/src/net/url_request/url_fetcher_core.cc
+++ b/src/net/url_request/url_fetcher_core.cc
@@ -27,15 +27,45 @@
 #include "net/url_request/url_request_context.h"
 #include "net/url_request/url_request_context_getter.h"
 #include "net/url_request/url_request_throttler_manager.h"
+#include "starboard/time.h"
 #include "starboard/types.h"
 #include "url/origin.h"
 
 namespace {
 
+#if defined(STARBOARD)
+const SbTime kInformDownloadProgressInterval = 50 * kSbTimeMillisecond;
+#else   // defined(STARBOARD)
 const int kBufferSize = 4096;
+#endif  // defined(STARBOARD)
+
 const int kUploadProgressTimerInterval = 100;
+
 bool g_ignore_certificate_requests = false;
 
+#if defined(STARBOARD)
+int GetIOBufferSizeByContentSize(int content_size) {
+  // If |content_size| is unknown, use 64k as buffer size.
+  if (content_size < 0) {
+    return 64 * 1024;
+  }
+  // If the content is really small, use 4k anyway.
+  if (content_size <= 4 * 1024) {
+    return 4 * 1024;
+  }
+  // If the content is medium sized, use the size as buffer size.
+  if (content_size < 64 * 1024) {
+    return content_size;
+  }
+  // If the content is fairly large, use a much larger buffer size.
+  if (content_size >= 512 * 1024) {
+    return 256 * 1024;
+  }
+  // Otherwise use 64k as buffer size.
+  return 64 * 1024;
+}
+#endif  // defined(STARBOARD)
+
 }  // namespace
 
 namespace net {
@@ -80,6 +110,9 @@
       load_flags_(LOAD_NORMAL),
       allow_credentials_(base::nullopt),
       response_code_(URLFetcher::RESPONSE_CODE_INVALID),
+#if defined(STARBOARD)
+      io_buffer_size_(GetIOBufferSizeByContentSize(-1)),
+#endif  // defined(STARBOARD)
       url_request_data_key_(NULL),
       was_fetched_via_proxy_(false),
       was_cached_(false),
@@ -442,15 +475,22 @@
   }
 
   DCHECK(!buffer_);
-  if (request_type_ != URLFetcher::HEAD)
-    buffer_ = base::MakeRefCounted<IOBuffer>(kBufferSize);
 #if defined(STARBOARD)
+  if (request_type_ != URLFetcher::HEAD) {
+    response_writer_->OnResponseStarted(total_response_bytes_);
+    io_buffer_size_ = GetIOBufferSizeByContentSize(total_response_bytes_);
+    buffer_ = base::MakeRefCounted<IOBuffer>(io_buffer_size_);
+  }
+
   // We update this earlier than OnReadCompleted(), so that the delegate
   // can know about it if they call GetURL() in any callback.
   if (!stopped_on_redirect_) {
     url_ = request_->url();
   }
   InformDelegateResponseStarted();
+#else   // defined(STARBOARD)
+  if (request_type_ != URLFetcher::HEAD)
+    buffer_ = base::MakeRefCounted<IOBuffer>(kBufferSize);
 #endif  // defined(STARBOARD)
   ReadResponse();
 }
@@ -480,6 +520,35 @@
   if (throttler_manager)
     url_throttler_entry_ = throttler_manager->RegisterRequestUrl(url_);
 
+#if defined(STARBOARD)
+  // Prime it to the current time so it is only called after the loop, or every
+  // time when the loop takes |kInformDownloadProgressInterval|.
+  SbTime download_progress_informed_at = SbTimeGetMonotonicNow();
+  bool did_read_after_inform_download_progress = false;
+
+  while (bytes_read > 0) {
+    current_response_bytes_ += bytes_read;
+    did_read_after_inform_download_progress = true;
+    auto now = SbTimeGetMonotonicNow();
+    if (now - download_progress_informed_at > kInformDownloadProgressInterval) {
+      InformDelegateDownloadProgress();
+      download_progress_informed_at = now;
+      did_read_after_inform_download_progress = false;
+    }
+
+    const int result = WriteBuffer(
+        base::MakeRefCounted<DrainableIOBuffer>(buffer_, bytes_read));
+    if (result < 0) {
+      // Write failed or waiting for write completion.
+      return;
+    }
+    bytes_read = request_->Read(buffer_.get(), io_buffer_size_);
+  }
+
+  if (did_read_after_inform_download_progress) {
+    InformDelegateDownloadProgress();
+  }
+#else   // defined(STARBOARD)
   while (bytes_read > 0) {
     current_response_bytes_ += bytes_read;
     InformDelegateDownloadProgress();
@@ -492,6 +561,7 @@
     }
     bytes_read = request_->Read(buffer_.get(), kBufferSize);
   }
+#endif  // defined(STARBOARD)
 
   // See comments re: HEAD requests in ReadResponse().
   if (bytes_read != ERR_IO_PENDING || request_type_ == URLFetcher::HEAD) {
@@ -933,8 +1003,13 @@
   // completed immediately, without trying to read any data back (all we care
   // about is the response code and headers, which we already have).
   int bytes_read = 0;
+#if defined(STARBOARD)
+  if (request_type_ != URLFetcher::HEAD)
+    bytes_read = request_->Read(buffer_.get(), io_buffer_size_);
+#else   // defined(STARBOARD)
   if (request_type_ != URLFetcher::HEAD)
     bytes_read = request_->Read(buffer_.get(), kBufferSize);
+#endif  // defined(STARBOARD)
 
   OnReadCompleted(request_.get(), bytes_read);
 }
diff --git a/src/net/url_request/url_fetcher_core.h b/src/net/url_request/url_fetcher_core.h
index 05ebd67..3a18e18 100644
--- a/src/net/url_request/url_fetcher_core.h
+++ b/src/net/url_request/url_fetcher_core.h
@@ -258,6 +258,10 @@
   // Whether credentials are sent along with the request.
   base::Optional<bool> allow_credentials_;
   int response_code_;                // HTTP status code for the request
+
+#if defined(STARBOARD)
+  int io_buffer_size_;
+#endif  // defined(STARBOARD)
   scoped_refptr<IOBuffer> buffer_;
                                      // Read buffer
   scoped_refptr<URLRequestContextGetter> request_context_getter_;
diff --git a/src/net/url_request/url_fetcher_response_writer.h b/src/net/url_request/url_fetcher_response_writer.h
index 1076a3e..bd4ca14 100644
--- a/src/net/url_request/url_fetcher_response_writer.h
+++ b/src/net/url_request/url_fetcher_response_writer.h
@@ -37,6 +37,13 @@
   // Initialize() success results in discarding already written data.
   virtual int Initialize(CompletionOnceCallback callback) = 0;
 
+#if defined(STARBOARD)
+  // The user of this class *may* call this function before any calls to Write()
+  // to prime the instance with response size, so it has a chance to do some
+  // preparation work, like pre-allocate the buffer.
+  virtual void OnResponseStarted(int64_t content_length) = 0;
+#endif  // defined(STARBOARD)
+
   // Writes |num_bytes| bytes in |buffer|, and returns the number of bytes
   // written or an error code. If ERR_IO_PENDING is returned, |callback| will be
   // run later with the result.
@@ -70,6 +77,9 @@
 
   // URLFetcherResponseWriter overrides:
   int Initialize(CompletionOnceCallback callback) override;
+#if defined(STARBOARD)
+  void OnResponseStarted(int64_t /*content_length*/) override {}
+#endif  // defined(STARBOARD)
   int Write(IOBuffer* buffer,
             int num_bytes,
             CompletionOnceCallback callback) override;
@@ -96,6 +106,9 @@
 
   // URLFetcherResponseWriter overrides:
   int Initialize(CompletionOnceCallback callback) override;
+#if defined(STARBOARD)
+  void OnResponseStarted(int64_t /*content_length*/) override {}
+#endif  // defined(STARBOARD)
   int Write(IOBuffer* buffer,
             int num_bytes,
             CompletionOnceCallback callback) override;
diff --git a/src/net/websockets/websocket_channel.h b/src/net/websockets/websocket_channel.h
index 2eae388..d914a08 100644
--- a/src/net/websockets/websocket_channel.h
+++ b/src/net/websockets/websocket_channel.h
@@ -90,10 +90,15 @@
   // character boundaries. Calling SendFrame may result in synchronous calls to
   // |event_interface_| which may result in this object being deleted. In that
   // case, the return value will be CHANNEL_DELETED.
+#if defined(STARBOARD)
+  // Make it virtual to enable mocking for unit tests.
+  virtual ChannelState SendFrame(bool fin,
+#else
   ChannelState SendFrame(bool fin,
-                         WebSocketFrameHeader::OpCode op_code,
-                         scoped_refptr<IOBuffer> buffer,
-                         size_t buffer_size);
+#endif
+                                 WebSocketFrameHeader::OpCode op_code,
+                                 scoped_refptr<IOBuffer> buffer,
+                                 size_t buffer_size);
 
   // Sends |quota| units of flow control to the remote side. If the underlying
   // transport has a concept of |quota|, then it permits the remote server to
diff --git a/src/starboard/android/apk/app/src/main/java/dev/cobalt/media/AudioOutputManager.java b/src/starboard/android/apk/app/src/main/java/dev/cobalt/media/AudioOutputManager.java
index 2105e5b..4a091f8 100644
--- a/src/starboard/android/apk/app/src/main/java/dev/cobalt/media/AudioOutputManager.java
+++ b/src/starboard/android/apk/app/src/main/java/dev/cobalt/media/AudioOutputManager.java
@@ -19,7 +19,9 @@
 import android.annotation.TargetApi;
 import android.content.Context;
 import android.media.AudioDeviceInfo;
+import android.media.AudioFormat;
 import android.media.AudioManager;
+import android.media.AudioTrack;
 import android.os.Build;
 import dev.cobalt.util.Log;
 import dev.cobalt.util.UsedByNative;
@@ -105,4 +107,25 @@
     }
     return maxChannels;
   }
+
+  /** Returns the minimum buffer size of AudioTrack. */
+  @SuppressWarnings("unused")
+  @UsedByNative
+  int getMinBufferSize(int sampleType, int sampleRate, int channelCount) {
+    int channelConfig;
+    switch (channelCount) {
+      case 1:
+        channelConfig = AudioFormat.CHANNEL_OUT_MONO;
+        break;
+      case 2:
+        channelConfig = AudioFormat.CHANNEL_OUT_STEREO;
+        break;
+      case 6:
+        channelConfig = AudioFormat.CHANNEL_OUT_5POINT1;
+        break;
+      default:
+        throw new RuntimeException("Unsupported channel count: " + channelCount);
+    }
+    return AudioTrack.getMinBufferSize(sampleRate, channelConfig, sampleType);
+  }
 }
diff --git a/src/starboard/android/shared/audio_sink_get_min_buffer_size_in_frames.cc b/src/starboard/android/shared/audio_sink_get_min_buffer_size_in_frames.cc
index 2bd7c3f..c6af790 100644
--- a/src/starboard/android/shared/audio_sink_get_min_buffer_size_in_frames.cc
+++ b/src/starboard/android/shared/audio_sink_get_min_buffer_size_in_frames.cc
@@ -20,10 +20,6 @@
 int SbAudioSinkGetMinBufferSizeInFrames(int channels,
                                         SbMediaAudioSampleType sample_type,
                                         int sampling_frequency_hz) {
-  // Currently, we only use |min_required_frames_| for web audio, which
-  // only supports 48k mono or stereo sound.
-  SB_DCHECK(sampling_frequency_hz == 48000);
-
   if (channels <= 0 || channels > SbAudioSinkGetMaxChannels()) {
     SB_LOG(ERROR) << "Not support channels count " << channels;
     return -1;
@@ -33,7 +29,7 @@
     SB_LOG(ERROR) << "Not support sample type " << sample_type;
     return -1;
   }
-  if (sampling_frequency_hz <= 0) {
+  if (sampling_frequency_hz <= 0 || sampling_frequency_hz >= 50000) {
     SB_LOG(ERROR) << "Not support sample frequency " << sampling_frequency_hz;
     return -1;
   }
diff --git a/src/starboard/android/shared/audio_sink_min_required_frames_tester.cc b/src/starboard/android/shared/audio_sink_min_required_frames_tester.cc
index e9a87a3..6276641 100644
--- a/src/starboard/android/shared/audio_sink_min_required_frames_tester.cc
+++ b/src/starboard/android/shared/audio_sink_min_required_frames_tester.cc
@@ -39,22 +39,18 @@
 }
 }  // namespace
 
-MinRequiredFramesTester::MinRequiredFramesTester(int audio_sink_buffer_size,
-                                                 int max_required_frames,
-                                                 int default_required_frames,
+MinRequiredFramesTester::MinRequiredFramesTester(int max_required_frames,
                                                  int required_frames_increment,
                                                  int min_stable_played_frames)
-    : audio_sink_buffer_size_(audio_sink_buffer_size),
-      max_required_frames_(max_required_frames),
-      default_required_frames_(default_required_frames),
+    : max_required_frames_(max_required_frames),
       required_frames_increment_(required_frames_increment),
       min_stable_played_frames_(min_stable_played_frames),
       condition_variable_(mutex_),
-      destroyed_(false) {}
+      destroying_(false) {}
 
 MinRequiredFramesTester::~MinRequiredFramesTester() {
   SB_DCHECK(thread_checker_.CalledOnValidThread());
-  destroyed_.store(true);
+  destroying_.store(true);
   if (SbThreadIsValid(tester_thread_)) {
     {
       ScopedLock scoped_lock(mutex_);
@@ -65,19 +61,24 @@
   }
 }
 
-void MinRequiredFramesTester::StartTest(
+void MinRequiredFramesTester::AddTest(
     int number_of_channels,
     SbMediaAudioSampleType sample_type,
     int sample_rate,
-    OnMinRequiredFramesReceivedCallback received_cb) {
+    const OnMinRequiredFramesReceivedCallback& received_cb,
+    int default_required_frames) {
   SB_DCHECK(thread_checker_.CalledOnValidThread());
-  // MinRequiredFramesTester only supports to do test once now.
+  // MinRequiredFramesTester doesn't support to add test after starts.
   SB_DCHECK(!SbThreadIsValid(tester_thread_));
 
-  number_of_channels_ = number_of_channels;
-  sample_type_ = sample_type;
-  sample_rate_ = sample_rate;
-  received_cb_ = received_cb;
+  test_tasks_.emplace_back(number_of_channels, sample_type, sample_rate,
+                           received_cb, default_required_frames);
+}
+
+void MinRequiredFramesTester::Start() {
+  SB_DCHECK(thread_checker_.CalledOnValidThread());
+  // MinRequiredFramesTester only supports to start once.
+  SB_DCHECK(!SbThreadIsValid(tester_thread_));
 
   tester_thread_ =
       SbThreadCreate(0, kSbThreadPriorityLowest, kSbThreadNoAffinity, true,
@@ -97,45 +98,51 @@
 
 void MinRequiredFramesTester::TesterThreadFunc() {
   bool wait_timeout = false;
-  // Currently, we only support test once. But we can put following codes in
-  // a for loop easily to support test multiple times.
-  std::vector<uint8_t> silence_buffer(
-      max_required_frames_ * number_of_channels_ * GetSampleSize(sample_type_),
-      0);
-  void* frame_buffers[1];
-  frame_buffers[0] = silence_buffer.data();
-  // Set default values.
-  min_required_frames_ = default_required_frames_;
-  total_consumed_frames_ = 0;
-  last_underrun_count_ = -1;
-  last_total_consumed_frames_ = 0;
-  {
-    ScopedLock scoped_lock(mutex_);
-    // Need to check |destroyed_| before start, as MinRequiredFramesTester may
+  for (const TestTask& task : test_tasks_) {
+    // Need to check |destroying_| before start, as MinRequiredFramesTester may
     // be destroyed immediately after tester thread started.
-    if (!destroyed_.load()) {
-      audio_sink_ = new AudioTrackAudioSink(
-          NULL, number_of_channels_, sample_rate_, sample_type_, frame_buffers,
-          max_required_frames_,
-          audio_sink_buffer_size_ * number_of_channels_ *
-              GetSampleSize(sample_type_),
-          &MinRequiredFramesTester::UpdateSourceStatusFunc,
-          &MinRequiredFramesTester::ConsumeFramesFunc, this);
-      wait_timeout = !condition_variable_.WaitTimed(kSbTimeSecond * 5);
-      if (wait_timeout) {
-        SB_LOG(ERROR) << "Audio sink min required frames tester timeout.";
-        SB_NOTREACHED();
-      }
+    if (destroying_.load()) {
+      break;
     }
-  }
-  delete audio_sink_;
-  audio_sink_ = nullptr;
-  // Call |received_cb_| after audio sink thread is ended.
-  // |number_of_channels_|, |sample_type_|, |sample_rate_| and
-  // |min_required_frames_| are shared between two threads.
-  if (!destroyed_.load() && !wait_timeout) {
-    received_cb_(number_of_channels_, sample_type_, sample_rate_,
-                 min_required_frames_);
+    std::vector<uint8_t> silence_buffer(max_required_frames_ *
+                                            task.number_of_channels *
+                                            GetSampleSize(task.sample_type),
+                                        0);
+    void* frame_buffers[1];
+    frame_buffers[0] = silence_buffer.data();
+
+    // Set default values.
+    min_required_frames_ = task.default_required_frames;
+    total_consumed_frames_ = 0;
+    last_underrun_count_ = -1;
+    last_total_consumed_frames_ = 0;
+
+    audio_sink_ = new AudioTrackAudioSink(
+        NULL, task.number_of_channels, task.sample_rate, task.sample_type,
+        frame_buffers, max_required_frames_,
+        min_required_frames_ * task.number_of_channels *
+            GetSampleSize(task.sample_type),
+        &MinRequiredFramesTester::UpdateSourceStatusFunc,
+        &MinRequiredFramesTester::ConsumeFramesFunc, this);
+    {
+      ScopedLock scoped_lock(mutex_);
+      wait_timeout = !condition_variable_.WaitTimed(kSbTimeSecond * 5);
+    }
+
+    if (wait_timeout) {
+      SB_LOG(ERROR) << "Audio sink min required frames tester timeout.";
+      SB_NOTREACHED();
+    }
+
+    delete audio_sink_;
+    audio_sink_ = nullptr;
+
+    // Call |received_cb_| after audio sink thread is ended.
+    // |min_required_frames_| is shared between two threads.
+    if (!destroying_.load() && !wait_timeout) {
+      task.received_cb(task.number_of_channels, task.sample_type,
+                       task.sample_rate, min_required_frames_);
+    }
   }
 }
 
diff --git a/src/starboard/android/shared/audio_sink_min_required_frames_tester.h b/src/starboard/android/shared/audio_sink_min_required_frames_tester.h
index 9e53134..290011d 100644
--- a/src/starboard/android/shared/audio_sink_min_required_frames_tester.h
+++ b/src/starboard/android/shared/audio_sink_min_required_frames_tester.h
@@ -17,6 +17,7 @@
 
 #include <atomic>
 #include <functional>
+#include <vector>
 
 #include "starboard/common/condition_variable.h"
 #include "starboard/common/mutex.h"
@@ -40,19 +41,39 @@
                              int min_required_frames)>
       OnMinRequiredFramesReceivedCallback;
 
-  MinRequiredFramesTester(int audio_sink_buffer_size,
-                          int max_required_frames,
-                          int default_required_frames,
+  MinRequiredFramesTester(int max_required_frames,
                           int required_frames_increment,
                           int min_stable_played_frames);
   ~MinRequiredFramesTester();
 
-  void StartTest(int number_of_channels,
-                 SbMediaAudioSampleType sample_type,
-                 int sample_rate,
-                 OnMinRequiredFramesReceivedCallback received_cb);
+  void AddTest(int number_of_channels,
+               SbMediaAudioSampleType sample_type,
+               int sample_rate,
+               const OnMinRequiredFramesReceivedCallback& received_cb,
+               int default_required_frames);
+
+  void Start();
 
  private:
+  struct TestTask {
+    TestTask(int number_of_channels,
+             SbMediaAudioSampleType sample_type,
+             int sample_rate,
+             OnMinRequiredFramesReceivedCallback received_cb,
+             int default_required_frames)
+        : number_of_channels(number_of_channels),
+          sample_type(sample_type),
+          sample_rate(sample_rate),
+          received_cb(received_cb),
+          default_required_frames(default_required_frames) {}
+
+    const int number_of_channels;
+    const SbMediaAudioSampleType sample_type;
+    const int sample_rate;
+    const OnMinRequiredFramesReceivedCallback received_cb;
+    const int default_required_frames;
+  };
+
   static void* TesterThreadEntryPoint(void* context);
   void TesterThreadFunc();
 
@@ -73,24 +94,16 @@
   MinRequiredFramesTester(const MinRequiredFramesTester&) = delete;
   MinRequiredFramesTester& operator=(const MinRequiredFramesTester&) = delete;
 
-  const int audio_sink_buffer_size_;
   const int max_required_frames_;
-  const int default_required_frames_;
   const int required_frames_increment_;
   const int min_stable_played_frames_;
 
   ::starboard::shared::starboard::ThreadChecker thread_checker_;
 
-  // Shared variables between tester thread and audio sink thread.
+  std::vector<const TestTask> test_tasks_;
   AudioTrackAudioSink* audio_sink_ = nullptr;
-  int number_of_channels_;
-  SbMediaAudioSampleType sample_type_;
-  int sample_rate_;
   int min_required_frames_;
 
-  // Used only by tester thread.
-  OnMinRequiredFramesReceivedCallback received_cb_;
-
   // Used only by audio sink thread.
   int total_consumed_frames_;
   int last_underrun_count_;
@@ -99,7 +112,7 @@
   Mutex mutex_;
   ConditionVariable condition_variable_;
   SbThread tester_thread_ = kSbThreadInvalid;
-  std::atomic_bool destroyed_;
+  std::atomic_bool destroying_;
 };
 
 }  // namespace shared
diff --git a/src/starboard/android/shared/audio_track_audio_sink_type.cc b/src/starboard/android/shared/audio_track_audio_sink_type.cc
index 994d904..363d4f9 100644
--- a/src/starboard/android/shared/audio_track_audio_sink_type.cc
+++ b/src/starboard/android/shared/audio_track_audio_sink_type.cc
@@ -35,12 +35,13 @@
 const jint kNoOffset = 0;
 const size_t kSilenceFramesPerAppend = 1024;
 
-const int kAudioSinkBufferSize = 4 * 1024;
 const int kMaxRequiredFrames = 16 * 1024;
-const int kDefaultRequiredFrames = 8 * 1024;
 const int kRequiredFramesIncrement = 2 * 1024;
 const int kMinStablePlayedFrames = 12 * 1024;
 
+const int kSampleFrequency22k = 22050;
+const int kSampleFrequency48k = 48000;
+
 // Helper function to compute the size of the two valid starboard audio sample
 // types.
 size_t GetSampleSize(SbMediaAudioSampleType sample_type) {
@@ -80,7 +81,7 @@
     SbMediaAudioSampleType sample_type,
     SbAudioSinkFrameBuffers frame_buffers,
     int frames_per_channel,
-    int preferred_buffer_size,
+    int preferred_buffer_size_in_bytes,
     SbAudioSinkUpdateSourceStatusFunc update_source_status_func,
     SbAudioSinkConsumeFramesFunc consume_frame_func,
     void* context)
@@ -113,7 +114,7 @@
       j_audio_output_manager.Get(), "createAudioTrackBridge",
       "(IIII)Ldev/cobalt/media/AudioTrackBridge;",
       GetAudioFormatSampleType(sample_type_), sampling_frequency_hz_, channels_,
-      preferred_buffer_size);
+      preferred_buffer_size_in_bytes);
   if (!j_audio_track_bridge) {
     return;
   }
@@ -358,16 +359,25 @@
     int sampling_frequency_hz) {
   SB_DCHECK(audio_track_audio_sink_type_);
 
-  return audio_track_audio_sink_type_->min_required_frames_.load();
+  JniEnvExt* env = JniEnvExt::Get();
+  ScopedLocalJavaRef<jobject> j_audio_output_manager(
+      env->CallStarboardObjectMethodOrAbort(
+          "getAudioOutputManager", "()Ldev/cobalt/media/AudioOutputManager;"));
+  int audio_track_min_buffer_size = static_cast<int>(env->CallIntMethodOrAbort(
+      j_audio_output_manager.Get(), "getMinBufferSize", "(III)I",
+      GetAudioFormatSampleType(sample_type), sampling_frequency_hz, channels));
+  int audio_track_min_buffer_size_in_frames =
+      audio_track_min_buffer_size / channels / GetSampleSize(sample_type);
+  return std::max(
+      audio_track_min_buffer_size_in_frames,
+      audio_track_audio_sink_type_->GetMinBufferSizeInFramesInternal(
+          channels, sample_type, sampling_frequency_hz));
 }
 
 AudioTrackAudioSinkType::AudioTrackAudioSinkType()
-    : min_required_frames_tester_(kAudioSinkBufferSize,
-                                  kMaxRequiredFrames,
-                                  kDefaultRequiredFrames,
+    : min_required_frames_tester_(kMaxRequiredFrames,
                                   kRequiredFramesIncrement,
-                                  kMinStablePlayedFrames),
-      min_required_frames_(kMaxRequiredFrames) {}
+                                  kMinStablePlayedFrames) {}
 
 SbAudioSink AudioTrackAudioSinkType::Create(
     int channels,
@@ -384,13 +394,13 @@
   // large buffer may cause AudioTrack not able to start. And Cobalt now write
   // no more than 1s of audio data and no more than 0.5 ahead to starboard
   // player, limit the buffer size to store at most 0.5s of audio data.
-  int preferred_buffer_size =
+  int preferred_buffer_size_in_bytes =
       std::min(frames_per_channel, sampling_frequency_hz / 2) * channels *
       GetSampleSize(audio_sample_type);
   AudioTrackAudioSink* audio_sink = new AudioTrackAudioSink(
       this, channels, sampling_frequency_hz, audio_sample_type, frame_buffers,
-      frames_per_channel, preferred_buffer_size, update_source_status_func,
-      consume_frames_func, context);
+      frames_per_channel, preferred_buffer_size_in_bytes,
+      update_source_status_func, consume_frames_func, context);
   if (!audio_sink->IsAudioTrackValid()) {
     SB_DLOG(ERROR)
         << "AudioTrackAudioSinkType::Create failed to create audio track";
@@ -407,7 +417,8 @@
         SB_LOG(INFO) << "Received min required frames " << min_required_frames
                      << " for " << number_of_channels << " channels, "
                      << sample_rate << "hz.";
-        min_required_frames_.store(min_required_frames);
+        ScopedLock lock(min_required_frames_map_mutex_);
+        min_required_frames_map_[sample_rate] = min_required_frames;
       };
 
   SbMediaAudioSampleType sample_type = kSbMediaAudioSampleTypeFloat32;
@@ -415,12 +426,33 @@
     sample_type = kSbMediaAudioSampleTypeInt16Deprecated;
     SB_DCHECK(SbAudioSinkIsAudioSampleTypeSupported(sample_type));
   }
+  min_required_frames_tester_.AddTest(2, sample_type, kSampleFrequency48k,
+                                      onMinRequiredFramesForWebAudioReceived,
+                                      8 * 1024);
+  min_required_frames_tester_.AddTest(2, sample_type, kSampleFrequency22k,
+                                      onMinRequiredFramesForWebAudioReceived,
+                                      4 * 1024);
+  min_required_frames_tester_.Start();
+}
 
-  // Currently, cobalt only use |min_required_frames_| for web audio, which
-  // only supports 48k mono or stereo sound. It should be fine now to only
-  // test 48k stereo sound.
-  min_required_frames_tester_.StartTest(2, sample_type, 48000,
-                                        onMinRequiredFramesForWebAudioReceived);
+int AudioTrackAudioSinkType::GetMinBufferSizeInFramesInternal(
+    int channels,
+    SbMediaAudioSampleType sample_type,
+    int sampling_frequency_hz) {
+  if (sampling_frequency_hz <= kSampleFrequency22k) {
+    ScopedLock lock(min_required_frames_map_mutex_);
+    if (min_required_frames_map_.find(kSampleFrequency22k) !=
+        min_required_frames_map_.end()) {
+      return min_required_frames_map_[kSampleFrequency22k];
+    }
+  } else if (sampling_frequency_hz <= kSampleFrequency48k) {
+    ScopedLock lock(min_required_frames_map_mutex_);
+    if (min_required_frames_map_.find(kSampleFrequency48k) !=
+        min_required_frames_map_.end()) {
+      return min_required_frames_map_[kSampleFrequency48k];
+    }
+  }
+  return kMaxRequiredFrames;
 }
 
 }  // namespace shared
diff --git a/src/starboard/android/shared/audio_track_audio_sink_type.h b/src/starboard/android/shared/audio_track_audio_sink_type.h
index 821a146..966fa27 100644
--- a/src/starboard/android/shared/audio_track_audio_sink_type.h
+++ b/src/starboard/android/shared/audio_track_audio_sink_type.h
@@ -17,6 +17,7 @@
 
 #include <atomic>
 #include <functional>
+#include <map>
 
 #include "starboard/android/shared/audio_sink_min_required_frames_tester.h"
 #include "starboard/android/shared/jni_env_ext.h"
@@ -68,8 +69,14 @@
   void TestMinRequiredFrames();
 
  private:
-  std::atomic_int min_required_frames_;
+  int GetMinBufferSizeInFramesInternal(int channels,
+                                       SbMediaAudioSampleType sample_type,
+                                       int sampling_frequency_hz);
+
   MinRequiredFramesTester min_required_frames_tester_;
+  Mutex min_required_frames_map_mutex_;
+  // The minimum frames required to avoid underruns of different frequencies.
+  std::map<int, int> min_required_frames_map_;
 };
 
 class AudioTrackAudioSink : public SbAudioSinkPrivate {
diff --git a/src/starboard/android/shared/configuration_constants.cc b/src/starboard/android/shared/configuration_constants.cc
new file mode 100644
index 0000000..5b3c886
--- /dev/null
+++ b/src/starboard/android/shared/configuration_constants.cc
@@ -0,0 +1,23 @@
+// Copyright 2019 The Cobalt Authors. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// This file defines all configuration constants for a platform.
+
+#include "starboard/configuration_constants.h"
+
+// Determines the alignment that allocations should have on this platform.
+const size_t kSbMallocAlignment = 16;
+
+// The maximum length of a name for a thread, including the NULL-terminator.
+const int32_t kSbMaxThreadNameLength = 16;
diff --git a/src/starboard/android/shared/configuration_public.h b/src/starboard/android/shared/configuration_public.h
index 7abc208..c5110f8 100644
--- a/src/starboard/android/shared/configuration_public.h
+++ b/src/starboard/android/shared/configuration_public.h
@@ -291,9 +291,6 @@
 // specify that.
 #define SB_NETWORK_IO_BUFFER_ALIGNMENT 16
 
-// Determines the alignment that allocations should have on this platform.
-#define SB_MALLOC_ALIGNMENT ((size_t)16U)
-
 // Determines the threshhold of allocation size that should be done with mmap
 // (if available), rather than allocated within the core heap.
 #define SB_DEFAULT_MMAP_THRESHOLD ((size_t)(256 * 1024U))
@@ -330,9 +327,6 @@
 // to not include here to decrease symbol pollution.
 #define SB_MAX_THREAD_LOCAL_KEYS 128
 
-// The maximum length of the name for a thread, including the NULL-terminator.
-#define SB_MAX_THREAD_NAME_LENGTH 16
-
 // --- Tuneable Parameters ---------------------------------------------------
 
 // Specifies the network receive buffer size in bytes, set via
diff --git a/src/starboard/android/shared/directory_get_next.cc b/src/starboard/android/shared/directory_get_next.cc
index f1e5b02..525fdf4 100644
--- a/src/starboard/android/shared/directory_get_next.cc
+++ b/src/starboard/android/shared/directory_get_next.cc
@@ -15,21 +15,24 @@
 #include "starboard/directory.h"
 
 #include <android/asset_manager.h>
+#include <string.h>
 
 #include "starboard/android/shared/directory_internal.h"
 #include "starboard/shared/iso/impl/directory_get_next.h"
 
-bool SbDirectoryGetNext(SbDirectory directory, SbDirectoryEntry* out_entry) {
-  if (directory && directory->asset_dir && out_entry) {
+bool SbDirectoryGetNext(SbDirectory directory,
+                        char* out_entry,
+                        size_t out_entry_size) {
+  if (directory && directory->asset_dir && out_entry &&
+      out_entry_size >= SB_FILE_MAX_NAME) {
     const char* file_name = AAssetDir_getNextFileName(directory->asset_dir);
     if (file_name == NULL) {
       return false;
     }
-    size_t size = SB_ARRAY_SIZE_INT(out_entry->name);
-    SbStringCopy(out_entry->name, file_name, size);
+    SbStringCopy(out_entry, file_name, out_entry_size);
     return true;
   }
 
-  return ::starboard::shared::iso::impl::SbDirectoryGetNext(directory,
-                                                            out_entry);
+  return ::starboard::shared::iso::impl::SbDirectoryGetNext(
+      directory, out_entry, out_entry_size);
 }
diff --git a/src/starboard/android/shared/gyp_configuration.gypi b/src/starboard/android/shared/gyp_configuration.gypi
index c06cc60..d5f23e0 100644
--- a/src/starboard/android/shared/gyp_configuration.gypi
+++ b/src/starboard/android/shared/gyp_configuration.gypi
@@ -20,7 +20,8 @@
     'target_os': 'android',
     'final_executable_type': 'shared_library',
     'gtest_target_type': 'shared_library',
-    'sb_widevine_platform' : 'android',
+    'sb_widevine_platform': 'android',
+    'sb_enable_benchmark': 1,
 
     'gl_type': 'system_gles2',
     'enable_remote_debugging': 0,
diff --git a/src/starboard/android/shared/media_decoder.cc b/src/starboard/android/shared/media_decoder.cc
index f1a3dfb..24ed8a4 100644
--- a/src/starboard/android/shared/media_decoder.cc
+++ b/src/starboard/android/shared/media_decoder.cc
@@ -343,7 +343,7 @@
     std::vector<int>* input_buffer_indices) {
   SB_DCHECK(media_codec_bridge_);
 
-  // During secure playback, and only secure playback, is is possible that our
+  // During secure playback, and only secure playback, it is possible that our
   // attempt to enqueue an input buffer will be rejected by MediaCodec because
   // we do not have a key yet.  In this case, we hold on to the input buffer
   // that we have already set up, and repeatedly attempt to enqueue it until
diff --git a/src/starboard/android/shared/player_components_impl.cc b/src/starboard/android/shared/player_components_impl.cc
index c2f52aa..117fd97 100644
--- a/src/starboard/android/shared/player_components_impl.cc
+++ b/src/starboard/android/shared/player_components_impl.cc
@@ -58,8 +58,11 @@
           return audio_decoder_impl.PassAs<AudioDecoder>();
         }
       } else if (audio_sample_info.codec == kSbMediaAudioCodecOpus) {
-        return scoped_ptr<AudioDecoder>(
+        scoped_ptr<OpusAudioDecoder> audio_decoder_impl(
             new OpusAudioDecoder(audio_sample_info));
+        if (audio_decoder_impl->is_valid()) {
+          return audio_decoder_impl.PassAs<AudioDecoder>();
+        }
       } else {
         SB_NOTREACHED();
       }
diff --git a/src/starboard/android/shared/starboard_platform.gypi b/src/starboard/android/shared/starboard_platform.gypi
index ad89435..d8fb10e 100644
--- a/src/starboard/android/shared/starboard_platform.gypi
+++ b/src/starboard/android/shared/starboard_platform.gypi
@@ -80,6 +80,7 @@
         'bionic/private/bionic_macros.h',
         'bionic/private/ErrnoRestorer.h',
         'configuration_public.h',
+        'configuration_constants.cc',
         'decode_target_create.cc',
         'decode_target_create.h',
         'decode_target_get_info.cc',
diff --git a/src/starboard/android/shared/video_decoder.cc b/src/starboard/android/shared/video_decoder.cc
index 227b54c..b3d542b 100644
--- a/src/starboard/android/shared/video_decoder.cc
+++ b/src/starboard/android/shared/video_decoder.cc
@@ -524,21 +524,16 @@
     if (has_new_texture) {
       updateTexImage(decode_target_->data->surface_texture);
 
+      decode_target_->data->info.planes[0].width = frame_width_;
+      decode_target_->data->info.planes[0].height = frame_height_;
+      decode_target_->data->info.width = frame_width_;
+      decode_target_->data->info.height = frame_height_;
+
       float matrix4x4[16];
       getTransformMatrix(decode_target_->data->surface_texture, matrix4x4);
       SetDecodeTargetContentRegionFromMatrix(
-          &decode_target_->data->info.planes[0].content_region, 1, 1,
-          matrix4x4);
-
-      // Mark the decode target's width and height as 1, so that the
-      // |content_region|'s coordinates will be interpreted as normalized
-      // coordinates.  This is nice because on Android we're never explicitly
-      // told the texture width/height, and we are only provided the content
-      // region via normalized coordinates.
-      decode_target_->data->info.planes[0].width = 1;
-      decode_target_->data->info.planes[0].height = 1;
-      decode_target_->data->info.width = 1;
-      decode_target_->data->info.height = 1;
+          &decode_target_->data->info.planes[0].content_region, frame_width_,
+          frame_height_, matrix4x4);
 
       if (!first_texture_received_) {
         first_texture_received_ = true;
diff --git a/src/starboard/benchmark/benchmark.gyp b/src/starboard/benchmark/benchmark.gyp
new file mode 100644
index 0000000..3e626a2
--- /dev/null
+++ b/src/starboard/benchmark/benchmark.gyp
@@ -0,0 +1,47 @@
+# Copyright 2019 The Cobalt Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+{
+  'targets': [
+    {
+      'target_name': 'benchmark',
+      'type': '<(final_executable_type)',
+      'defines': [
+        # This allows the benchmarks to include internal only header files.
+        'STARBOARD_IMPLEMENTATION',
+      ],
+      'sources': [
+        '<(DEPTH)/starboard/benchmark/memory_benchmark.cc',
+        '<(DEPTH)/starboard/benchmark/thread_benchmark.cc',
+        '<(DEPTH)/starboard/common/benchmark_main.cc',
+      ],
+      'dependencies': [
+        '<@(cobalt_platform_dependencies)',
+        '<(DEPTH)/starboard/starboard.gyp:starboard',
+        '<(DEPTH)/third_party/google_benchmark/google_benchmark.gyp:google_benchmark',
+      ],
+    },
+    {
+      'target_name': 'benchmark_deploy',
+      'type': 'none',
+      'dependencies': [
+        'benchmark',
+      ],
+      'variables': {
+        'executable_name': 'benchmark',
+      },
+      'includes': [ '<(DEPTH)/starboard/build/deploy.gypi' ],
+    },
+  ],
+}
diff --git a/src/starboard/benchmark/memory_benchmark.cc b/src/starboard/benchmark/memory_benchmark.cc
new file mode 100644
index 0000000..24e3cf1
--- /dev/null
+++ b/src/starboard/benchmark/memory_benchmark.cc
@@ -0,0 +1,62 @@
+// Copyright 2019 The Cobalt Authors. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "starboard/memory.h"
+
+#include "third_party/google_benchmark/include/benchmark/benchmark.h"
+
+namespace starboard {
+namespace benchmark {
+namespace {
+
+void BM_MemoryCopy(::benchmark::State& state) {
+  void* memory1 = SbMemoryAllocate(state.range(0));
+  void* memory2 = SbMemoryAllocate(state.range(0));
+
+  for (auto _ : state) {
+    SbMemoryCopy(memory1, memory2, state.range(0));
+    ::benchmark::ClobberMemory();
+  }
+  state.SetBytesProcessed(int64_t(state.iterations()) *
+                          int64_t(state.range(0)));
+
+  SbMemoryDeallocate(memory1);
+  SbMemoryDeallocate(memory2);
+}
+
+void BM_MemoryMove(::benchmark::State& state) {
+  void* memory1 = SbMemoryAllocate(state.range(0));
+  void* memory2 = SbMemoryAllocate(state.range(0));
+
+  for (auto _ : state) {
+    SbMemoryMove(memory1, memory2, state.range(0));
+    ::benchmark::ClobberMemory();
+  }
+  state.SetBytesProcessed(int64_t(state.iterations()) *
+                          int64_t(state.range(0)));
+
+  SbMemoryDeallocate(memory1);
+  SbMemoryDeallocate(memory2);
+}
+
+BENCHMARK(BM_MemoryCopy)->RangeMultiplier(4)->Range(16, 1024 * 1024);
+BENCHMARK(BM_MemoryCopy)
+    ->Arg(1024 * 1024)
+    ->DenseThreadRange(1, 4)
+    ->UseRealTime();
+BENCHMARK(BM_MemoryMove)->Arg(1024 * 1024);
+
+}  // namespace
+}  // namespace benchmark
+}  // namespace starboard
diff --git a/src/starboard/benchmark/thread_benchmark.cc b/src/starboard/benchmark/thread_benchmark.cc
new file mode 100644
index 0000000..d112ef2
--- /dev/null
+++ b/src/starboard/benchmark/thread_benchmark.cc
@@ -0,0 +1,36 @@
+// Copyright 2019 The Cobalt Authors. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "third_party/google_benchmark/include/benchmark/benchmark.h"
+
+namespace starboard {
+namespace benchmark {
+namespace {
+
+void BM_IntegerAccumulation(::benchmark::State& state) {
+  const int kIterations = 1024 * 1024;
+  for (auto _ : state) {
+    int x = 0;
+    for (int i = 0; i < kIterations; ++i) {
+      ::benchmark::DoNotOptimize(x += i);
+    }
+  }
+  state.SetItemsProcessed(kIterations);
+}
+
+BENCHMARK(BM_IntegerAccumulation)->DenseThreadRange(1, 8)->UseRealTime();
+
+}  // namespace
+}  // namespace benchmark
+}  // namespace starboard
diff --git a/src/starboard/build/base_configuration.gypi b/src/starboard/build/base_configuration.gypi
index 38b074f..c0dfb8b 100644
--- a/src/starboard/build/base_configuration.gypi
+++ b/src/starboard/build/base_configuration.gypi
@@ -121,6 +121,9 @@
     # Used to indicate that the player is filter based.
     'sb_filter_based_player%': 1,
 
+    # Used to enable benchmarks.
+    'sb_enable_benchmark%': 0,
+
     # This variable dictates whether a given gyp target should be compiled with
     # optimization flags for size vs. speed.
     'optimize_target_for_speed%': 0,
diff --git a/src/starboard/common/benchmark_main.cc b/src/starboard/common/benchmark_main.cc
new file mode 100644
index 0000000..d100a56
--- /dev/null
+++ b/src/starboard/common/benchmark_main.cc
@@ -0,0 +1,33 @@
+// Copyright 2019 The Cobalt Authors. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "starboard/client_porting/wrap_main/wrap_main.h"
+#include "starboard/event.h"
+#include "starboard/system.h"
+#include "third_party/google_benchmark/include/benchmark/benchmark.h"
+
+namespace {
+int RunAllBenchmarks(int argc, char** argv) {
+  ::benchmark::Initialize(&argc, argv);
+  ::benchmark::RunSpecifiedBenchmarks();
+  return 0;
+}
+}  // namespace
+
+// When we are building Evergreen we need to export SbEventHandle so that the
+// ELF loader can find and invoke it.
+#if SB_IS(EVERGREEN)
+SB_EXPORT
+#endif  // SB_IS(EVERGREEN)
+STARBOARD_WRAP_SIMPLE_MAIN(RunAllBenchmarks);
diff --git a/src/starboard/configuration.h b/src/starboard/configuration.h
index d650414..607a24f 100644
--- a/src/starboard/configuration.h
+++ b/src/starboard/configuration.h
@@ -180,6 +180,36 @@
 //  values are consumed, take a look at //starboard/sabi.
 #define SB_SABI_FILE_VERSION SB_EXPERIMENTAL_API_VERSION
 
+// Updates the API guarantees of SbMutexAcquireTry.
+// SbMutexAcquireTry now has undefined behavior when it is invoked on a mutex
+// that has already been locked by the calling thread. In addition, since
+// SbMutexAcquireTry was used in SbMutexDestroy, SbMutexDestroy now has
+// undefined behavior when invoked on a locked mutex.
+#define SB_MUTEX_ACQUIRE_TRY_API_CHANGE_VERSION SB_EXPERIMENTAL_API_VERSION
+
+// Migrate the Starboard configuration variables from macros to extern consts.
+//
+// The migration allows Cobalt to make platform level decisions at runtime
+// instead of compile time which lets us create a more comprehensive Cobalt
+// binary.
+//
+// This means Cobalt must remove all references to these macros that would not
+// translate well to constants, i.e. in compile time references or initializing
+// arrays. Therefore, we needed to change the functionality of the function
+// `SbDirectoryGetNext` in "starboard/directory.h". Because we do not want to
+// use variable length arrays, we pass in a c-string and length to the function
+// to achieve the same result as before when passing in a `SbDirectoryEntry`.
+//
+// A platform will define the extern constants declared in
+// "starboard/configuration_constants.h". The definitions are done in
+// "starboard/<PLATFORM_PATH>/configuration_constants.cc".
+//
+// The exact mapping between macros and extern variables can be found in
+// "starboard/shared/starboard/configuration_constants_compatibility_defines.h"
+// though the naming scheme is very nearly the same: the old SB_FOO macro will
+// always become the constant kSbFoo.
+#define SB_FEATURE_RUNTIME_CONFIGS_VERSION SB_EXPERIMENTAL_API_VERSION
+
 // --- Release Candidate Feature Defines -------------------------------------
 
 // --- Common Detected Features ----------------------------------------------
@@ -265,6 +295,14 @@
 // and all configurations.
 #include STARBOARD_CONFIGURATION_INCLUDE
 
+#if SB_API_VERSION < SB_FEATURE_RUNTIME_CONFIGS_VERSION
+// After SB_FEATURE_RUNTIME_CONFIGS_VERSION, we start to use runtime constants
+// instead of macros for certain platform dependent configurations. This file
+// substitutes configuration macros for the corresponding runtime constants so
+// we don't reference these constants when they aren't defined.
+#include "starboard/shared/starboard/configuration_constants_compatibility_defines.h"
+#endif  // SB_API_VERSION < SB_FEATURE_RUNTIME_CONFIGS_VERSION
+
 // --- Overridable Helper Macros ---------------------------------------------
 
 // The following macros can be overridden in STARBOARD_CONFIGURATION_INCLUDE
@@ -586,10 +624,30 @@
 #error "Your platform must define SB_MAX_THREAD_LOCAL_KEYS."
 #endif
 
+#if SB_API_VERSION >= SB_FEATURE_RUNTIME_CONFIGS_VERSION
+
+#if defined(SB_MALLOC_ALIGNMENT)
+#error \
+    "SB_MALLOC_ALIGNMENT should not be defined in Starboard " \
+"versions 12 and later. Instead, define kSbMallocAlignment in " \
+"starboard/<PLATFORM_PATH>/configuration_constants.cc."
+#endif
+
+#if defined(SB_MAX_THREAD_NAME_LENGTH)
+#error \
+    "SB_MAX_THREAD_NAME_LENGTH should not be defined in Starboard " \
+"versions 12 and later. Instead, define kSbMaxThreadNameLength in " \
+"starboard/<PLATFORM_PATH>/configuration_constants.cc."
+#endif
+
+#else  // SB_API_VERSION >= SB_FEATURE_RUNTIME_CONFIGS_VERSION
+
 #if !defined(SB_MAX_THREAD_NAME_LENGTH)
 #error "Your platform must define SB_MAX_THREAD_NAME_LENGTH."
 #endif
 
+#endif  // SB_API_VERSION >= SB_FEATURE_RUNTIME_CONFIGS_VERSION
+
 #if (SB_API_VERSION < 12 && !defined(SB_HAS_MICROPHONE))
 #error \
     "Your platform must define SB_HAS_MICROPHONE in API versions 11 or earlier."
diff --git a/src/starboard/configuration_constants.h b/src/starboard/configuration_constants.h
new file mode 100644
index 0000000..1d772da
--- /dev/null
+++ b/src/starboard/configuration_constants.h
@@ -0,0 +1,36 @@
+// Copyright 2019 The Cobalt Authors. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Module Overview: Starboard Configuration Variables module
+//
+// Declares all configuration variables we will need to use at runtime.
+// These variables describe the current platform in detail to allow cobalt to
+// make runtime decisions based on per platform configurations.
+
+#ifndef STARBOARD_CONFIGURATION_CONSTANTS_H_
+#define STARBOARD_CONFIGURATION_CONSTANTS_H_
+
+#include "starboard/types.h"
+
+#if SB_API_VERSION >= SB_FEATURE_RUNTIME_CONFIGS_VERSION
+
+// Determines the alignment that allocations should have on this platform.
+extern const size_t kSbMallocAlignment;
+
+// The maximum length of the name for a thread, including the NULL-terminator.
+extern const int32_t kSbMaxThreadNameLength;
+
+#endif  // SB_API_VERSION >= SB_FEATURE_RUNTIME_CONFIGS_VERSION
+
+#endif  // STARBOARD_CONFIGURATION_CONSTANTS_H_
diff --git a/src/starboard/directory.h b/src/starboard/directory.h
index c9032b2..97ef448 100644
--- a/src/starboard/directory.h
+++ b/src/starboard/directory.h
@@ -34,11 +34,13 @@
 // A handle to an open directory stream.
 typedef struct SbDirectoryPrivate* SbDirectory;
 
+#if SB_API_VERSION < SB_FEATURE_RUNTIME_CONFIGS_VERSION
 // Represents a directory entry.
 typedef struct SbDirectoryEntry {
   // The name of this directory entry.
   char name[SB_FILE_MAX_NAME];
 } SbDirectoryEntry;
+#endif  // SB_API_VERSION < SB_FEATURE_RUNTIME_CONFIGS_VERSION
 
 // Well-defined value for an invalid directory stream handle.
 #define kSbDirectoryInvalid ((SbDirectory)NULL)
@@ -64,6 +66,24 @@
 // |directory|: The directory stream handle to close.
 SB_EXPORT bool SbDirectoryClose(SbDirectory directory);
 
+#if SB_API_VERSION >= SB_FEATURE_RUNTIME_CONFIGS_VERSION
+// Populates |out_entry| with the next entry in the specified directory stream,
+// and moves the stream forward by one entry.
+//
+// This function returns |true| if there was a next directory, and |false|
+// at the end of the directory stream or if |out_entry_size| is smaller than
+// SB_FILE_MAX_NAME.
+//
+// |directory|: The directory stream from which to retrieve the next directory.
+// |out_entry|: The null terminated string to be populated with the next
+//              directory entry. The space allocated for this string should be
+//              equal to |out_entry_size|.
+// |out_entry_size|: The size of the space allocated for |out_entry|. This
+//                   should be at least equal to SB_FILE_MAX_NAME.
+SB_EXPORT bool SbDirectoryGetNext(SbDirectory directory,
+                                  char* out_entry,
+                                  size_t out_entry_size);
+#else   // SB_API_VERSION >= SB_FEATURE_RUNTIME_CONFIGS_VERSION
 // Populates |out_entry| with the next entry in the specified directory stream,
 // and moves the stream forward by one entry.
 //
@@ -74,6 +94,7 @@
 // |out_entry|: The variable to be populated with the next directory entry.
 SB_EXPORT bool SbDirectoryGetNext(SbDirectory directory,
                                   SbDirectoryEntry* out_entry);
+#endif  // SB_API_VERSION >= SB_FEATURE_RUNTIME_CONFIGS_VERSION
 
 // Indicates whether SbDirectoryOpen is allowed for the given |path|.
 //
diff --git a/src/starboard/linux/shared/configuration_constants.cc b/src/starboard/linux/shared/configuration_constants.cc
new file mode 100644
index 0000000..973063a
--- /dev/null
+++ b/src/starboard/linux/shared/configuration_constants.cc
@@ -0,0 +1,27 @@
+// Copyright 2019 The Cobalt Authors. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// This file defines all configuration constants for a platform.
+
+#include "starboard/configuration_constants.h"
+
+#if SB_API_VERSION >= SB_FEATURE_RUNTIME_CONFIGS_VERSION
+
+// Determines the alignment that allocations should have on this platform.
+const size_t kSbMallocAlignment = 16;
+
+// The maximum length of a name for a thread, including the NULL-terminator.
+const int32_t kSbMaxThreadNameLength = 16;
+
+#endif  // SB_API_VERSION >= SB_FEATURE_RUNTIME_CONFIGS_VERSION
diff --git a/src/starboard/linux/shared/configuration_public.h b/src/starboard/linux/shared/configuration_public.h
index b3aeff7..485753c 100644
--- a/src/starboard/linux/shared/configuration_public.h
+++ b/src/starboard/linux/shared/configuration_public.h
@@ -287,8 +287,10 @@
 // specify that.
 #define SB_NETWORK_IO_BUFFER_ALIGNMENT 16
 
+#if SB_API_VERSION < SB_FEATURE_RUNTIME_CONFIGS_VERSION
 // Determines the alignment that allocations should have on this platform.
 #define SB_MALLOC_ALIGNMENT ((size_t)16U)
+#endif  // SB_API_VERSION < SB_FEATURE_RUNTIME_CONFIGS_VERSION
 
 // Determines the threshhold of allocation size that should be done with mmap
 // (if available), rather than allocated within the core heap.
@@ -319,8 +321,10 @@
 // The maximum number of thread local storage keys supported by this platform.
 #define SB_MAX_THREAD_LOCAL_KEYS 512
 
+#if SB_API_VERSION < SB_FEATURE_RUNTIME_CONFIGS_VERSION
 // The maximum length of the name for a thread, including the NULL-terminator.
 #define SB_MAX_THREAD_NAME_LENGTH 16
+#endif  // SB_API_VERSION < SB_FEATURE_RUNTIME_CONFIGS_VERSION
 
 // --- Tuneable Parameters ---------------------------------------------------
 
diff --git a/src/starboard/linux/shared/gyp_configuration.gypi b/src/starboard/linux/shared/gyp_configuration.gypi
index b224935..b565543 100644
--- a/src/starboard/linux/shared/gyp_configuration.gypi
+++ b/src/starboard/linux/shared/gyp_configuration.gypi
@@ -25,6 +25,8 @@
     'target_os': 'linux',
     'yasm_exists': 1,
     'sb_widevine_platform' : 'linux',
+    'sb_disable_opus_sse': 1,
+    'sb_enable_benchmark': 1,
 
     'platform_libraries': [
       '-lasound',
@@ -45,14 +47,14 @@
       'use_dlmalloc_allocator%': 0,
     },
     'conditions': [
-        ['sb_evergreen != 1', {
-          # TODO: allow starboard_platform to use system libc/libc++ in the
-          # future. For now, if this flags is enabled, a warning emerge saying
-          # it's unused anyway.
-          'linker_flags': [
-            '-static-libstdc++',
-          ],
-        }],
+      ['sb_evergreen != 1', {
+        # TODO: allow starboard_platform to use system libc/libc++ in the
+        # future. For now, if this flags is enabled, a warning emerge saying
+        # it's unused anyway.
+        'linker_flags': [
+          '-static-libstdc++',
+        ],
+      }],
     ],
   },
 
diff --git a/src/starboard/linux/shared/launcher.py b/src/starboard/linux/shared/launcher.py
index 662e68d..e5ba8f1 100644
--- a/src/starboard/linux/shared/launcher.py
+++ b/src/starboard/linux/shared/launcher.py
@@ -25,6 +25,7 @@
 
 import _env  # pylint: disable=unused-import
 from starboard.tools import abstract_launcher
+from starboard.tools import send_link
 
 STATUS_CHANGE_TIMEOUT = 15
 
@@ -124,6 +125,15 @@
     else:
       sys.stderr.write("Cannot send suspend to executable; it is closed.\n")
 
+  def SupportsDeepLink(self):
+    return True
+
+  def SendDeepLink(self, link):
+    # The connect call in SendLink occassionally fails. Retry a few times if this happens.
+    connection_attempts = 3
+    return send_link.SendLink(
+        os.path.basename(self.executable), link, connection_attempts)
+
   def WaitForProcessStatus(self, target_status, timeout):
     """Wait for Cobalt to turn to target status within specified timeout limit.
 
diff --git a/src/starboard/linux/shared/player_components_impl.cc b/src/starboard/linux/shared/player_components_impl.cc
index 53652ef..20baaaf 100644
--- a/src/starboard/linux/shared/player_components_impl.cc
+++ b/src/starboard/linux/shared/player_components_impl.cc
@@ -25,6 +25,7 @@
 #include "starboard/shared/libaom/aom_video_decoder.h"
 #include "starboard/shared/libde265/de265_video_decoder.h"
 #include "starboard/shared/libvpx/vpx_video_decoder.h"
+#include "starboard/shared/opus/opus_audio_decoder.h"
 #include "starboard/shared/starboard/player/filter/adaptive_audio_decoder_internal.h"
 #include "starboard/shared/starboard/player/filter/audio_decoder_internal.h"
 #include "starboard/shared/starboard/player/filter/audio_renderer_sink.h"
@@ -52,15 +53,25 @@
     SB_DCHECK(audio_decoder);
     SB_DCHECK(audio_renderer_sink);
 
+    typedef ::starboard::shared::ffmpeg::AudioDecoder FfmpegAudioDecoder;
+    typedef ::starboard::shared::opus::OpusAudioDecoder OpusAudioDecoder;
+
 #if SB_API_VERSION >= 11
     auto decoder_creator = [](const SbMediaAudioSampleInfo& audio_sample_info,
                               SbDrmSystem drm_system) {
-      typedef ::starboard::shared::ffmpeg::AudioDecoder AudioDecoderImpl;
-
-      scoped_ptr<AudioDecoderImpl> audio_decoder_impl(
-          AudioDecoderImpl::Create(audio_sample_info.codec, audio_sample_info));
-      if (audio_decoder_impl && audio_decoder_impl->is_valid()) {
-        return audio_decoder_impl.PassAs<AudioDecoder>();
+      if (audio_sample_info.codec == kSbMediaAudioCodecOpus) {
+        scoped_ptr<OpusAudioDecoder> audio_decoder_impl(
+            new OpusAudioDecoder(audio_sample_info));
+        if (audio_decoder_impl->is_valid()) {
+          return audio_decoder_impl.PassAs<AudioDecoder>();
+        }
+      } else {
+        scoped_ptr<FfmpegAudioDecoder> audio_decoder_impl(
+            FfmpegAudioDecoder::Create(audio_sample_info.codec,
+                                       audio_sample_info));
+        if (audio_decoder_impl && audio_decoder_impl->is_valid()) {
+          return audio_decoder_impl.PassAs<AudioDecoder>();
+        }
       }
       return scoped_ptr<AudioDecoder>();
     };
@@ -69,14 +80,23 @@
         new AdaptiveAudioDecoder(audio_parameters.audio_sample_info,
                                  audio_parameters.drm_system, decoder_creator));
 #else   // SB_API_VERSION >= 11
-    typedef ::starboard::shared::ffmpeg::AudioDecoder AudioDecoderImpl;
-
-    scoped_ptr<AudioDecoderImpl> audio_decoder_impl(AudioDecoderImpl::Create(
-        audio_parameters.audio_codec, audio_parameters.audio_sample_info));
-    if (audio_decoder_impl && audio_decoder_impl->is_valid()) {
-      audio_decoder->reset(audio_decoder_impl.release());
+    if (audio_parameters.audio_codec == kSbMediaAudioCodecOpus) {
+      scoped_ptr<OpusAudioDecoder> audio_decoder_impl(
+          new OpusAudioDecoder(audio_parameters.audio_sample_info));
+      if (audio_decoder_impl && audio_decoder_impl->is_valid()) {
+        audio_decoder->reset(audio_decoder_impl.release());
+      } else {
+        audio_decoder->reset();
+      }
     } else {
-      audio_decoder->reset();
+      scoped_ptr<FfmpegAudioDecoder> audio_decoder_impl(
+          FfmpegAudioDecoder::Create(audio_parameters.audio_codec,
+                                     audio_parameters.audio_sample_info));
+      if (audio_decoder_impl && audio_decoder_impl->is_valid()) {
+        audio_decoder->reset(audio_decoder_impl.release());
+      } else {
+        audio_decoder->reset();
+      }
     }
 #endif  // SB_API_VERSION >= 11
     audio_renderer_sink->reset(new AudioRendererSinkImpl);
diff --git a/src/starboard/linux/shared/starboard_platform.gypi b/src/starboard/linux/shared/starboard_platform.gypi
index d335946..b9286b3 100644
--- a/src/starboard/linux/shared/starboard_platform.gypi
+++ b/src/starboard/linux/shared/starboard_platform.gypi
@@ -31,6 +31,7 @@
       '<(DEPTH)/starboard/linux/shared/command_line_defaults.cc',
       '<(DEPTH)/starboard/linux/shared/command_line_defaults.h',
       '<(DEPTH)/starboard/linux/shared/configuration_public.h',
+      '<(DEPTH)/starboard/linux/shared/configuration_constants.cc',
       '<(DEPTH)/starboard/linux/shared/decode_target_get_info.cc',
       '<(DEPTH)/starboard/linux/shared/decode_target_internal.cc',
       '<(DEPTH)/starboard/linux/shared/decode_target_internal.h',
@@ -125,6 +126,8 @@
       '<(DEPTH)/starboard/shared/nouser/user_get_property.cc',
       '<(DEPTH)/starboard/shared/nouser/user_get_signed_in.cc',
       '<(DEPTH)/starboard/shared/nouser/user_internal.cc',
+      '<(DEPTH)/starboard/shared/opus/opus_audio_decoder.cc',
+      '<(DEPTH)/starboard/shared/opus/opus_audio_decoder.h',
       '<(DEPTH)/starboard/shared/posix/directory_create.cc',
       '<(DEPTH)/starboard/shared/posix/file_atomic_replace.cc',
       '<(DEPTH)/starboard/shared/posix/file_can_open.cc',
@@ -382,6 +385,7 @@
       '<(DEPTH)/third_party/de265_includes/de265_includes.gyp:de265',
       '<(DEPTH)/third_party/dlmalloc/dlmalloc.gyp:dlmalloc',
       '<(DEPTH)/third_party/libevent/libevent.gyp:libevent',
+      '<(DEPTH)/third_party/opus/opus.gyp:opus',
       '<(DEPTH)/third_party/pulseaudio_includes/pulseaudio_includes.gyp:pulseaudio',
     ],
     'conditions': [
diff --git a/src/starboard/mutex.h b/src/starboard/mutex.h
index 0f6226e..afd8927 100644
--- a/src/starboard/mutex.h
+++ b/src/starboard/mutex.h
@@ -54,10 +54,17 @@
 // |out_mutex|: The handle to the newly created mutex.
 SB_EXPORT bool SbMutexCreate(SbMutex* out_mutex);
 
+#if SB_API_VERSION >= SB_MUTEX_ACQUIRE_TRY_API_CHANGE_VERSION
+// Destroys a mutex. The return value indicates whether the destruction was
+// successful. Destroying a locked mutex results in undefined behavior.
+//
+// |mutex|: The mutex to be invalidated.
+#else   // SB_API_VERSION >= SB_MUTEX_ACQUIRE_TRY_API_CHANGE_VERSION
 // Destroys a mutex. The return value indicates whether the destruction was
 // successful.
 //
 // |mutex|: The mutex to be invalidated.
+#endif  // SB_API_VERSION >= SB_MUTEX_ACQUIRE_TRY_API_CHANGE_VERSION
 SB_EXPORT bool SbMutexDestroy(SbMutex* mutex);
 
 // Acquires |mutex|, blocking indefinitely. The return value identifies
@@ -67,11 +74,19 @@
 // |mutex|: The mutex to be acquired.
 SB_EXPORT SbMutexResult SbMutexAcquire(SbMutex* mutex);
 
+#if SB_API_VERSION >= SB_MUTEX_ACQUIRE_TRY_API_CHANGE_VERSION
+// Acquires |mutex|, without blocking. The return value identifies
+// the acquisition result. SbMutexes are not reentrant, so a recursive
+// acquisition has undefined behavior.
+//
+// |mutex|: The mutex to be acquired.
+#else   // SB_API_VERSION >= SB_MUTEX_ACQUIRE_TRY_API_CHANGE_VERSION
 // Acquires |mutex|, without blocking. The return value identifies
 // the acquisition result. SbMutexes are not reentrant, so a recursive
 // acquisition always fails.
 //
 // |mutex|: The mutex to be acquired.
+#endif  // SB_API_VERSION >= SB_MUTEX_ACQUIRE_TRY_API_CHANGE_VERSION
 SB_EXPORT SbMutexResult SbMutexAcquireTry(SbMutex* mutex);
 
 // Releases |mutex| held by the current thread. The return value indicates
diff --git a/src/starboard/nplb/audio_sink_helpers.cc b/src/starboard/nplb/audio_sink_helpers.cc
index 082ce62..b0a0d9c 100644
--- a/src/starboard/nplb/audio_sink_helpers.cc
+++ b/src/starboard/nplb/audio_sink_helpers.cc
@@ -14,6 +14,8 @@
 
 #include "starboard/nplb/audio_sink_helpers.h"
 
+#include <algorithm>
+
 #include "starboard/common/log.h"
 
 namespace starboard {
@@ -107,13 +109,12 @@
 
 void AudioSinkTestEnvironment::AppendFrame(int frames_to_append) {
   ScopedLock lock(mutex_);
-  frames_appended_ += frames_to_append;
+  AppendFrame_Locked(frames_to_append);
 }
 
-int AudioSinkTestEnvironment::GetFrameBufferFreeSpaceAmount() const {
+int AudioSinkTestEnvironment::GetFrameBufferFreeSpaceInFrames() const {
   ScopedLock lock(mutex_);
-  int frames_in_buffer = frames_appended_ - frames_consumed_;
-  return frame_buffers_.frames_per_channel() - frames_in_buffer;
+  return GetFrameBufferFreeSpaceInFrames_Locked();
 }
 
 bool AudioSinkTestEnvironment::WaitUntilUpdateStatusCalled() {
@@ -147,20 +148,48 @@
 }
 
 bool AudioSinkTestEnvironment::WaitUntilAllFramesAreConsumed() {
+  const int kMaximumFramesPerAppend = 1024;
+
   ScopedLock lock(mutex_);
   is_eos_reached_ = true;
+  int frames_appended_before_eos = frames_appended_;
   SbTimeMonotonic start = SbTimeGetMonotonicNow();
-  while (frames_appended_ != frames_consumed_) {
+  int silence_frames_appended = 0;
+
+  while (frames_consumed_ < frames_appended_before_eos) {
     SbTime time_elapsed = SbTimeGetMonotonicNow() - start;
     if (time_elapsed >= kTimeToTry) {
       return false;
     }
     SbTime time_to_wait = kTimeToTry - time_elapsed;
+
+    // Append silence as some audio sink implementations won't be able to finish
+    // playback to the last frames filled.
+    int silence_frames_to_append =
+        std::min({GetFrameBufferFreeSpaceInFrames_Locked(),
+                  frame_buffers_.frames_per_channel() - silence_frames_appended,
+                  kMaximumFramesPerAppend});
+    AppendFrame_Locked(silence_frames_to_append);
+    silence_frames_appended += silence_frames_to_append;
+
     condition_variable_.WaitTimed(time_to_wait);
   }
   return true;
 }
 
+void AudioSinkTestEnvironment::AppendFrame_Locked(int frames_to_append) {
+  mutex_.DCheckAcquired();
+
+  frames_appended_ += frames_to_append;
+}
+
+int AudioSinkTestEnvironment::GetFrameBufferFreeSpaceInFrames_Locked() const {
+  mutex_.DCheckAcquired();
+
+  int frames_in_buffer = frames_appended_ - frames_consumed_;
+  return frame_buffers_.frames_per_channel() - frames_in_buffer;
+}
+
 void AudioSinkTestEnvironment::OnUpdateSourceStatus(int* frames_in_buffer,
                                                     int* offset_in_frames,
                                                     bool* is_playing,
diff --git a/src/starboard/nplb/audio_sink_helpers.h b/src/starboard/nplb/audio_sink_helpers.h
index 215ce94..72921e0 100644
--- a/src/starboard/nplb/audio_sink_helpers.h
+++ b/src/starboard/nplb/audio_sink_helpers.h
@@ -81,7 +81,7 @@
   }
   void SetIsPlaying(bool is_playing);
   void AppendFrame(int frames_to_append);
-  int GetFrameBufferFreeSpaceAmount() const;
+  int GetFrameBufferFreeSpaceInFrames() const;
 
   // The following functions return true when the expected condition are met.
   // Return false on timeout.
@@ -90,6 +90,8 @@
   bool WaitUntilAllFramesAreConsumed();
 
  private:
+  void AppendFrame_Locked(int frames_to_append);
+  int GetFrameBufferFreeSpaceInFrames_Locked() const;
   void OnUpdateSourceStatus(int* frames_in_buffer,
                             int* offset_in_frames,
                             bool* is_playing,
diff --git a/src/starboard/nplb/audio_sink_test.cc b/src/starboard/nplb/audio_sink_test.cc
index caa7870..e6c059b 100644
--- a/src/starboard/nplb/audio_sink_test.cc
+++ b/src/starboard/nplb/audio_sink_test.cc
@@ -66,8 +66,8 @@
   environment.AppendFrame(frames_to_append);
 
   EXPECT_TRUE(environment.WaitUntilSomeFramesAreConsumed());
-  ASSERT_GT(environment.GetFrameBufferFreeSpaceAmount(), 0);
-  environment.AppendFrame(environment.GetFrameBufferFreeSpaceAmount());
+  ASSERT_GT(environment.GetFrameBufferFreeSpaceInFrames(), 0);
+  environment.AppendFrame(environment.GetFrameBufferFreeSpaceInFrames());
   EXPECT_TRUE(environment.WaitUntilAllFramesAreConsumed());
 }
 
@@ -82,10 +82,10 @@
   int frames_to_append = frame_buffers.frames_per_channel();
   environment.AppendFrame(frames_to_append);
 
-  int free_space = environment.GetFrameBufferFreeSpaceAmount();
+  int free_space = environment.GetFrameBufferFreeSpaceInFrames();
   EXPECT_TRUE(environment.WaitUntilUpdateStatusCalled());
   EXPECT_TRUE(environment.WaitUntilUpdateStatusCalled());
-  EXPECT_EQ(free_space, environment.GetFrameBufferFreeSpaceAmount());
+  EXPECT_EQ(free_space, environment.GetFrameBufferFreeSpaceInFrames());
   environment.SetIsPlaying(true);
   EXPECT_TRUE(environment.WaitUntilSomeFramesAreConsumed());
 }
@@ -101,8 +101,8 @@
 
   EXPECT_TRUE(environment.WaitUntilSomeFramesAreConsumed());
   SbThreadSleep(250 * kSbTimeMillisecond);
-  ASSERT_GT(environment.GetFrameBufferFreeSpaceAmount(), 0);
-  environment.AppendFrame(environment.GetFrameBufferFreeSpaceAmount());
+  ASSERT_GT(environment.GetFrameBufferFreeSpaceInFrames(), 0);
+  environment.AppendFrame(environment.GetFrameBufferFreeSpaceInFrames());
   EXPECT_TRUE(environment.WaitUntilAllFramesAreConsumed());
 }
 
@@ -117,7 +117,7 @@
   int frames_to_append = sample_rate / 4;
 
   while (frames_to_append > 0) {
-    int free_space = environment.GetFrameBufferFreeSpaceAmount();
+    int free_space = environment.GetFrameBufferFreeSpaceInFrames();
     environment.AppendFrame(std::min(free_space, frames_to_append));
     frames_to_append -= std::min(free_space, frames_to_append);
     ASSERT_TRUE(environment.WaitUntilSomeFramesAreConsumed());
diff --git a/src/starboard/nplb/condition_variable_wait_test.cc b/src/starboard/nplb/condition_variable_wait_test.cc
index 3e42109..6586d6c 100644
--- a/src/starboard/nplb/condition_variable_wait_test.cc
+++ b/src/starboard/nplb/condition_variable_wait_test.cc
@@ -52,7 +52,7 @@
   const int kMany = SB_MAX_THREADS > 64 ? 64 : SB_MAX_THREADS;
   WaiterContext context;
 
-  SbThread threads[kMany];
+  std::vector<SbThread> threads(kMany);
   for (int i = 0; i < kMany; ++i) {
     threads[i] = SbThreadCreate(0, kSbThreadNoPriority, kSbThreadNoAffinity,
                                 true, NULL, WaiterEntryPoint, &context);
diff --git a/src/starboard/nplb/directory_get_next_test.cc b/src/starboard/nplb/directory_get_next_test.cc
index 24c05ba..80a61ea 100644
--- a/src/starboard/nplb/directory_get_next_test.cc
+++ b/src/starboard/nplb/directory_get_next_test.cc
@@ -50,17 +50,26 @@
   StringSet names_to_find(names);
   int count = 0;
   while (true) {
+#if SB_API_VERSION >= SB_FEATURE_RUNTIME_CONFIGS_VERSION
+    std::vector<char> entry(SB_FILE_MAX_NAME, 0);
+    if (!SbDirectoryGetNext(directory, entry.data(), entry.size())) {
+      break;
+    }
+    const char* entry_name = entry.data();
+#else   // SB_API_VERSION >= SB_FEATURE_RUNTIME_CONFIGS_VERSION
     SbDirectoryEntry entry = {0};
     if (!SbDirectoryGetNext(directory, &entry)) {
       break;
     }
+    const char* entry_name = entry.name;
+#endif  // SB_API_VERSION >= SB_FEATURE_RUNTIME_CONFIGS_VERSION
 
     // SbDirectoryEntry just contains the last component of the absolute path to
     // the file, but ScopedRandomFile::filename() returns the full path.
     std::string filename;
     filename += directory_name;
     filename += SB_FILE_SEP_CHAR;
-    filename += entry.name;
+    filename += entry_name;
 
     StringSet::iterator iterator = names_to_find.find(filename);
     if (iterator != names_to_find.end()) {
@@ -68,7 +77,7 @@
     } else {
       // If it isn't in |names_to_find|, make sure it's some external entry and
       // not one of ours. Otherwise, an entry must have shown up twice.
-      EXPECT_TRUE(names.find(entry.name) == names.end());
+      EXPECT_TRUE(names.find(entry_name) == names.end());
     }
   }
 
@@ -79,8 +88,14 @@
 }
 
 TEST(SbDirectoryGetNextTest, FailureInvalidSbDirectory) {
+#if SB_API_VERSION >= SB_FEATURE_RUNTIME_CONFIGS_VERSION
+  std::vector<char> entry(SB_FILE_MAX_NAME, 0);
+  EXPECT_FALSE(SbDirectoryGetNext(kSbDirectoryInvalid, entry.data(),
+                                  entry.size()));
+#else  // SB_API_VERSION >= SB_FEATURE_RUNTIME_CONFIGS_VERSION
   SbDirectoryEntry entry = {0};
   EXPECT_FALSE(SbDirectoryGetNext(kSbDirectoryInvalid, &entry));
+#endif  // SB_API_VERSION >= SB_FEATURE_RUNTIME_CONFIGS_VERSION
 }
 
 TEST(SbDirectoryGetNextTest, FailureNullEntry) {
@@ -95,12 +110,20 @@
   SbDirectory directory = SbDirectoryOpen(path.c_str(), &error);
   EXPECT_TRUE(SbDirectoryIsValid(directory));
   EXPECT_EQ(kSbFileOk, error);
+#if SB_API_VERSION >= SB_FEATURE_RUNTIME_CONFIGS_VERSION
+  EXPECT_FALSE(SbDirectoryGetNext(directory, NULL, SB_FILE_MAX_NAME));
+#else   // SB_API_VERSION >= SB_FEATURE_RUNTIME_CONFIGS_VERSION
   EXPECT_FALSE(SbDirectoryGetNext(directory, NULL));
+#endif  // SB_API_VERSION >= SB_FEATURE_RUNTIME_CONFIGS_VERSION
   EXPECT_TRUE(SbDirectoryClose(directory));
 }
 
 TEST(SbDirectoryGetNextTest, FailureInvalidAndNull) {
+#if SB_API_VERSION >= SB_FEATURE_RUNTIME_CONFIGS_VERSION
+  EXPECT_FALSE(SbDirectoryGetNext(kSbDirectoryInvalid, NULL, SB_FILE_MAX_NAME));
+#else   // SB_API_VERSION >= SB_FEATURE_RUNTIME_CONFIGS_VERSION
   EXPECT_FALSE(SbDirectoryGetNext(kSbDirectoryInvalid, NULL));
+#endif  // SB_API_VERSION >= SB_FEATURE_RUNTIME_CONFIGS_VERSION
 }
 
 TEST(SbDirectoryGetNextTest, FailureOnEmptyDirectory) {
@@ -112,11 +135,42 @@
   ASSERT_TRUE(SbDirectoryIsValid(directory));
   ASSERT_EQ(kSbFileOk, error);
 
+#if SB_API_VERSION >= SB_FEATURE_RUNTIME_CONFIGS_VERSION
+  std::vector<char> entry(SB_FILE_MAX_NAME, 0);
+  EXPECT_FALSE(SbDirectoryGetNext(directory, entry.data(), entry.size()));
+#else   // SB_API_VERSION >= SB_FEATURE_RUNTIME_CONFIGS_VERSION
   SbDirectoryEntry entry = {0};
   EXPECT_FALSE(SbDirectoryGetNext(directory, &entry));
+#endif  // SB_API_VERSION >= SB_FEATURE_RUNTIME_CONFIGS_VERSION
   ASSERT_TRUE(SbDirectoryClose(directory));
 }
 
+#if SB_API_VERSION >= SB_FEATURE_RUNTIME_CONFIGS_VERSION
+TEST(SbDirectoryGetNextTest, FailureOnInsufficientSize) {
+  ScopedRandomFile file;
+  std::string directory_name = file.filename();
+  directory_name.resize(directory_name.find_last_of(SB_FILE_SEP_CHAR));
+  EXPECT_TRUE(SbFileExists(directory_name.c_str()))
+      << "Directory_name is " << directory_name;
+
+  SbFileError error = kSbFileErrorMax;
+  SbDirectory directory = SbDirectoryOpen(directory_name.c_str(), &error);
+  EXPECT_TRUE(SbDirectoryIsValid(directory));
+  EXPECT_EQ(kSbFileOk, error);
+
+  std::vector<char> entry(SB_FILE_MAX_NAME);
+  for (int i = 0; i < SB_FILE_MAX_NAME; i++)
+    entry[i] = i;
+  std::vector<char> entry_copy = entry;
+  EXPECT_EQ(SbDirectoryGetNext(directory, entry.data(), 0), false);
+  EXPECT_EQ(entry.size(), SB_FILE_MAX_NAME);
+  for (int i = 0; i < SB_FILE_MAX_NAME; i++)
+    EXPECT_EQ(entry[i], entry_copy[i]);
+
+  EXPECT_TRUE(SbDirectoryClose(directory));
+}
+#endif  // SB_API_VERSION >= SB_FEATURE_RUNTIME_CONFIGS_VERSION
+
 }  // namespace
 }  // namespace nplb
 }  // namespace starboard
diff --git a/src/starboard/nplb/directory_open_test.cc b/src/starboard/nplb/directory_open_test.cc
index 5b43367..f9d3155 100644
--- a/src/starboard/nplb/directory_open_test.cc
+++ b/src/starboard/nplb/directory_open_test.cc
@@ -55,16 +55,16 @@
   EXPECT_FILE_EXISTS(path);
 
   const int kMany = SB_FILE_MAX_OPEN;
-  SbDirectory directories[kMany] = {0};
+  std::vector<SbDirectory> directories(kMany, 0);
 
-  for (int i = 0; i < SB_ARRAY_SIZE_INT(directories); ++i) {
+  for (int i = 0; i < directories.size(); ++i) {
     SbFileError error = kSbFileErrorMax;
     directories[i] = SbDirectoryOpen(path.c_str(), &error);
     EXPECT_TRUE(SbDirectoryIsValid(directories[i]));
     EXPECT_EQ(kSbFileOk, error);
   }
 
-  for (int i = 0; i < SB_ARRAY_SIZE_INT(directories); ++i) {
+  for (int i = 0; i < directories.size(); ++i) {
     EXPECT_TRUE(SbDirectoryClose(directories[i]));
   }
 }
diff --git a/src/starboard/nplb/flat_map_test.cc b/src/starboard/nplb/flat_map_test.cc
index dde8625..4564290 100644
--- a/src/starboard/nplb/flat_map_test.cc
+++ b/src/starboard/nplb/flat_map_test.cc
@@ -90,11 +90,14 @@
 }
 
 SbTimeMonotonic GetThreadTimeMonotonicNow() {
-#if SB_HAS(TIME_THREAD_NOW)
-  return SbTimeGetMonotonicThreadNow();
-#else
-  return SbTimeGetMonotonicNow();
+#if SB_API_VERSION >= SB_TIME_THREAD_NOW_REQUIRED_VERSION || \
+    SB_HAS(TIME_THREAD_NOW)
+#if SB_API_VERSION >= SB_TIME_THREAD_NOW_REQUIRED_VERSION
+  if (SbTimeIsTimeThreadNowSupported())
 #endif
+    return SbTimeGetMonotonicThreadNow();
+#endif
+  return SbTimeGetMonotonicNow();
 }
 
 // Generic stringification of the input map type. This allows good error
diff --git a/src/starboard/nplb/mutex_acquire_try_test.cc b/src/starboard/nplb/mutex_acquire_try_test.cc
index d424f1c..806f5f8 100644
--- a/src/starboard/nplb/mutex_acquire_try_test.cc
+++ b/src/starboard/nplb/mutex_acquire_try_test.cc
@@ -16,10 +16,30 @@
 #include "starboard/configuration.h"
 #include "testing/gtest/include/gtest/gtest.h"
 
+#if SB_API_VERSION >= SB_MUTEX_ACQUIRE_TRY_API_CHANGE_VERSION
+#include "starboard/nplb/thread_helpers.h"
+#include "starboard/thread.h"
+#endif  // SB_API_VERSION >= SB_MUTEX_ACQUIRE_TRY_API_CHANGE_VERSION
+
 namespace starboard {
 namespace nplb {
 namespace {
 
+#if SB_API_VERSION >= SB_MUTEX_ACQUIRE_TRY_API_CHANGE_VERSION
+struct TestContext {
+  explicit TestContext(SbMutex* mutex) : was_locked_(false), mutex_(mutex) {}
+  bool was_locked_;
+  SbMutex* mutex_;
+};
+
+void* EntryPoint(void* parameter) {
+  TestContext* context = static_cast<TestContext*>(parameter);
+  context->was_locked_ =
+      (SbMutexAcquireTry(context->mutex_) == kSbMutexAcquired);
+  return NULL;
+}
+#endif  // SB_API_VERSION >= SB_MUTEX_ACQUIRE_TRY_API_CHANGE_VERSION
+
 TEST(SbMutexAcquireTryTest, SunnyDayUncontended) {
   SbMutex mutex;
   EXPECT_TRUE(SbMutexCreate(&mutex));
@@ -46,9 +66,20 @@
   EXPECT_EQ(result, kSbMutexAcquired);
   EXPECT_TRUE(SbMutexIsSuccess(result));
 
+#if SB_API_VERSION >= SB_MUTEX_ACQUIRE_TRY_API_CHANGE_VERSION
+  TestContext context(&mutex);
+  SbThread thread =
+      SbThreadCreate(0, kSbThreadNoPriority, kSbThreadNoAffinity, true,
+                     nplb::kThreadName, &EntryPoint, &context);
+
+  EXPECT_TRUE(SbThreadIsValid(thread));
+  EXPECT_TRUE(SbThreadJoin(thread, NULL));
+  EXPECT_FALSE(context.was_locked_);
+#else   // SB_API_VERSION >= SB_MUTEX_ACQUIRE_TRY_API_CHANGE_VERSION
   result = SbMutexAcquireTry(&mutex);
   EXPECT_EQ(result, kSbMutexBusy);
   EXPECT_FALSE(SbMutexIsSuccess(result));
+#endif  // SB_API_VERSION >= SB_MUTEX_ACQUIRE_TRY_API_CHANGE_VERSION
 
   EXPECT_TRUE(SbMutexRelease(&mutex));
   EXPECT_TRUE(SbMutexDestroy(&mutex));
diff --git a/src/starboard/nplb/mutex_destroy_test.cc b/src/starboard/nplb/mutex_destroy_test.cc
index 92395b6..02e072d 100644
--- a/src/starboard/nplb/mutex_destroy_test.cc
+++ b/src/starboard/nplb/mutex_destroy_test.cc
@@ -27,6 +27,10 @@
   EXPECT_TRUE(SbMutexDestroy(&mutex));
 }
 
+#if SB_API_VERSION >= SB_MUTEX_ACQUIRE_TRY_API_CHANGE_VERSION
+// Destroying a mutex that has already been destroyed is undefined behavior
+// and cannot be tested.
+#else   // SB_API_VERSION >= SB_MUTEX_ACQUIRE_TRY_API_CHANGE_VERSION
 TEST(SbMutexDestroyTest, RainyDayDestroyHeld) {
   SbMutex mutex;
   EXPECT_TRUE(SbMutexCreate(&mutex));
@@ -39,6 +43,7 @@
   EXPECT_TRUE(SbMutexRelease(&mutex));
   EXPECT_TRUE(SbMutexDestroy(&mutex));
 }
+#endif  // SB_API_VERSION >= SB_MUTEX_ACQUIRE_TRY_API_CHANGE_VERSION
 
 TEST(SbMutexDestroyTest, RainyDayNull) {
   EXPECT_FALSE(SbMutexDestroy(NULL));
diff --git a/src/starboard/nplb/nplb.gyp b/src/starboard/nplb/nplb.gyp
index cd280b6..6739ebe 100644
--- a/src/starboard/nplb/nplb.gyp
+++ b/src/starboard/nplb/nplb.gyp
@@ -165,6 +165,7 @@
         'memory_map_test.cc',
         'memory_move_test.cc',
         'memory_reallocate_test.cc',
+        'memory_reporter_test.cc',
         'memory_set_test.cc',
         'microphone_close_test.cc',
         'microphone_create_test.cc',
@@ -314,8 +315,6 @@
       'conditions': [
         ['sb_evergreen != 1', {
           'sources': [
-            # Segfaults for Cobalt Evergreen.
-            'memory_reporter_test.cc',
             # Segfaults or causes unresolved symbols for Cobalt Evergreen.
             'media_set_audio_write_duration_test.cc',
           ],
diff --git a/src/starboard/nplb/once_test.cc b/src/starboard/nplb/once_test.cc
index 0885860..b2743b8 100644
--- a/src/starboard/nplb/once_test.cc
+++ b/src/starboard/nplb/once_test.cc
@@ -98,7 +98,7 @@
 // routine got called exactly one time.
 TEST(SbOnceTest, SunnyDayMultipleThreadsInit) {
   const int kMany = SB_MAX_THREADS;
-  SbThread threads[kMany];
+  std::vector<SbThread> threads(kMany);
 
   const int kIterationCount = 10;
   for (int i = 0; i < kIterationCount; ++i) {
diff --git a/src/starboard/nplb/sabi/sabi.gypi b/src/starboard/nplb/sabi/sabi.gypi
index 3d76231..19c69ed 100644
--- a/src/starboard/nplb/sabi/sabi.gypi
+++ b/src/starboard/nplb/sabi/sabi.gypi
@@ -17,6 +17,7 @@
     'sabi_sources': [
       '<(DEPTH)/starboard/nplb/sabi/alignment_test.cc',
       '<(DEPTH)/starboard/nplb/sabi/endianness_test.cc',
+      '<(DEPTH)/starboard/nplb/sabi/signedness_and_size_of_enum_test.cc',
       '<(DEPTH)/starboard/nplb/sabi/signedness_of_char_test.cc',
       '<(DEPTH)/starboard/nplb/sabi/size_test.cc',
       '<(DEPTH)/starboard/nplb/sabi/struct_alignment_test.cc',
diff --git a/src/starboard/nplb/sabi/signedness_and_size_of_enum_test.cc b/src/starboard/nplb/sabi/signedness_and_size_of_enum_test.cc
new file mode 100644
index 0000000..bf66cd8
--- /dev/null
+++ b/src/starboard/nplb/sabi/signedness_and_size_of_enum_test.cc
@@ -0,0 +1,37 @@
+// Copyright 2019 The Cobalt Authors. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "starboard/configuration.h"
+
+#if SB_API_VERSION >= SB_SABI_FILE_VERSION
+
+namespace starboard {
+namespace sabi {
+namespace {
+
+typedef enum GenericEnumType {
+  kOnlyTag,
+} GenericEnumType;
+
+SB_COMPILE_ASSERT((static_cast<GenericEnumType>(-1) < 0) == SB_HAS_SIGNED_ENUM,
+                  SB_HAS_SIGNED_ENUM_is_inconsistent_with_sign_of_enum);
+
+SB_COMPILE_ASSERT(sizeof(GenericEnumType) == SB_SIZE_OF_ENUM,
+                  SB_SIZE_OF_ENUM_is_inconsistent_with_sizeof_enum);
+
+}  // namespace
+}  // namespace sabi
+}  // namespace starboard
+
+#endif  // SB_API_VERSION >= SB_SABI_FILE_VERSION
diff --git a/src/starboard/nplb/socket_waiter_add_test.cc b/src/starboard/nplb/socket_waiter_add_test.cc
index b56395c..a942c11 100644
--- a/src/starboard/nplb/socket_waiter_add_test.cc
+++ b/src/starboard/nplb/socket_waiter_add_test.cc
@@ -67,7 +67,7 @@
   EXPECT_TRUE(SbSocketWaiterIsValid(waiter));
 
   const int kMany = SB_FILE_MAX_OPEN;
-  SbSocket sockets[kMany] = {0};
+  std::vector<SbSocket> sockets(kMany, 0);
   for (int i = 0; i < kMany; ++i) {
     sockets[i] = SbSocketCreate(GetAddressType(), kSbSocketProtocolTcp);
     ASSERT_TRUE(SbSocketIsValid(sockets[i]));
diff --git a/src/starboard/nplb/speech_recognizer_cancel_test.cc b/src/starboard/nplb/speech_recognizer_cancel_test.cc
index 0743dc9..630e45a 100644
--- a/src/starboard/nplb/speech_recognizer_cancel_test.cc
+++ b/src/starboard/nplb/speech_recognizer_cancel_test.cc
@@ -20,9 +20,12 @@
 namespace starboard {
 namespace nplb {
 
-#if SB_HAS(SPEECH_RECOGNIZER) && SB_API_VERSION >= 5
+#if SB_API_VERSION >= SB_SPEECH_RECOGNIZER_REQUIRED_VERSION || \
+    SB_HAS(SPEECH_RECOGNIZER) && SB_API_VERSION >= 5
 
 TEST_F(SpeechRecognizerTest, CancelTestSunnyDay) {
+  if (SkipLocale())
+    return;
   SbSpeechRecognizer recognizer = SbSpeechRecognizerCreate(handler());
   EXPECT_TRUE(SbSpeechRecognizerIsValid(recognizer));
   SbSpeechConfiguration configuration = {true, true, 1};
@@ -38,6 +41,8 @@
 }
 
 TEST_F(SpeechRecognizerTest, CancelIsCalledMultipleTimes) {
+  if (SkipLocale())
+    return;
   SbSpeechRecognizer recognizer = SbSpeechRecognizerCreate(handler());
   EXPECT_TRUE(SbSpeechRecognizerIsValid(recognizer));
   SbSpeechConfiguration configuration = {true, false, 1};
@@ -55,6 +60,8 @@
 }
 
 TEST_F(SpeechRecognizerTest, CancelTestStartIsNotCalled) {
+  if (SkipLocale())
+    return;
   SbSpeechRecognizer recognizer = SbSpeechRecognizerCreate(handler());
   EXPECT_TRUE(SbSpeechRecognizerIsValid(recognizer));
   SbSpeechRecognizerCancel(recognizer);
@@ -62,10 +69,13 @@
 }
 
 TEST_F(SpeechRecognizerTest, CancelWithInvalidSpeechRecognizer) {
+  if (SkipLocale())
+    return;
   SbSpeechRecognizerCancel(kSbSpeechRecognizerInvalid);
 }
 
-#endif  // SB_HAS(SPEECH_RECOGNIZER) && SB_API_VERSION >= 5
+#endif  // SB_API_VERSION >= SB_SPEECH_RECOGNIZER_REQUIRED_VERSION ||
+        // SB_HAS(SPEECH_RECOGNIZER) && SB_API_VERSION >= 5
 
 }  // namespace nplb
 }  // namespace starboard
diff --git a/src/starboard/nplb/speech_recognizer_create_test.cc b/src/starboard/nplb/speech_recognizer_create_test.cc
index adfbc20..52da0c7 100644
--- a/src/starboard/nplb/speech_recognizer_create_test.cc
+++ b/src/starboard/nplb/speech_recognizer_create_test.cc
@@ -19,15 +19,19 @@
 namespace starboard {
 namespace nplb {
 
-#if SB_HAS(SPEECH_RECOGNIZER) && SB_API_VERSION >= 5
+#if SB_API_VERSION >= SB_SPEECH_RECOGNIZER_REQUIRED_VERSION || \
+    SB_HAS(SPEECH_RECOGNIZER) && SB_API_VERSION >= 5
 
 TEST_F(SpeechRecognizerTest, CreateTestSunnyDay) {
+  if (SkipLocale())
+    return;
   SbSpeechRecognizer recognizer = SbSpeechRecognizerCreate(handler());
   EXPECT_TRUE(SbSpeechRecognizerIsValid(recognizer));
   SbSpeechRecognizerDestroy(recognizer);
 }
 
-#endif  // SB_HAS(SPEECH_RECOGNIZER) && SB_API_VERSION >= 5
+#endif  // SB_API_VERSION >= SB_SPEECH_RECOGNIZER_REQUIRED_VERSION ||
+        // SB_HAS(SPEECH_RECOGNIZER) && SB_API_VERSION >= 5
 
 }  // namespace nplb
 }  // namespace starboard
diff --git a/src/starboard/nplb/speech_recognizer_destroy_test.cc b/src/starboard/nplb/speech_recognizer_destroy_test.cc
index 857fc96..f2f5fe1 100644
--- a/src/starboard/nplb/speech_recognizer_destroy_test.cc
+++ b/src/starboard/nplb/speech_recognizer_destroy_test.cc
@@ -19,13 +19,18 @@
 namespace starboard {
 namespace nplb {
 
-#if SB_HAS(SPEECH_RECOGNIZER) && SB_API_VERSION >= 5
+#if SB_API_VERSION >= SB_SPEECH_RECOGNIZER_REQUIRED_VERSION || \
+    SB_HAS(SPEECH_RECOGNIZER) && SB_API_VERSION >= 5
 
 TEST_F(SpeechRecognizerTest, DestroyInvalidSpeechRecognizer) {
+  if (SkipLocale())
+    return;
   SbSpeechRecognizerDestroy(kSbSpeechRecognizerInvalid);
 }
 
 TEST_F(SpeechRecognizerTest, DestroyRecognizerWithoutStopping) {
+  if (SkipLocale())
+    return;
   SbSpeechRecognizer recognizer = SbSpeechRecognizerCreate(handler());
   EXPECT_TRUE(SbSpeechRecognizerIsValid(recognizer));
   SbSpeechConfiguration configuration = {true, true, 1};
@@ -33,7 +38,8 @@
   SbSpeechRecognizerDestroy(recognizer);
 }
 
-#endif  // SB_HAS(SPEECH_RECOGNIZER) && SB_API_VERSION >= 5
+#endif  // SB_API_VERSION >= SB_SPEECH_RECOGNIZER_REQUIRED_VERSION ||
+        // SB_HAS(SPEECH_RECOGNIZER) && SB_API_VERSION >= 5
 
 }  // namespace nplb
 }  // namespace starboard
diff --git a/src/starboard/nplb/speech_recognizer_helper.h b/src/starboard/nplb/speech_recognizer_helper.h
index cba631c..5ab23b0 100644
--- a/src/starboard/nplb/speech_recognizer_helper.h
+++ b/src/starboard/nplb/speech_recognizer_helper.h
@@ -25,7 +25,8 @@
 namespace starboard {
 namespace nplb {
 
-#if SB_HAS(SPEECH_RECOGNIZER) && SB_API_VERSION >= 5
+#if SB_API_VERSION >= SB_SPEECH_RECOGNIZER_REQUIRED_VERSION || \
+    SB_HAS(SPEECH_RECOGNIZER) && SB_API_VERSION >= 5
 
 class SpeechRecognizerTest : public ::testing::Test {
  public:
@@ -48,6 +49,26 @@
   }
 
  protected:
+  bool isTestFixtureSupported;
+  virtual void SetUp() {
+    // We include all API tests at compile time after Starboard version 12, so
+    // we must do a runtime check to determine whether or not that API (and
+    // thus the test fixture) is supported.
+#if SB_API_VERSION >= SB_SPEECH_RECOGNIZER_REQUIRED_VERSION
+    isTestFixtureSupported = SbSpeechRecognizerIsSupported();
+#else
+    isTestFixtureSupported = true;
+#endif
+  }
+
+  // TODO: Use GTEST_SKIP in |SetUp| when we have a newer version of gtest.
+  bool SkipLocale() {
+    if (!isTestFixtureSupported) {
+      SB_LOG(INFO) << "Speech recognizer not supported. Test skipped.";
+    }
+    return !isTestFixtureSupported;
+  }
+
   // Per test teardown.
   virtual void TearDown() {
     // Wait for the speech recognizer server to tear down in order to start
@@ -62,7 +83,8 @@
   SbSpeechRecognizerHandler handler_;
 };
 
-#endif  // SB_HAS(SPEECH_RECOGNIZER) && SB_API_VERSION >= 5
+#endif  // SB_API_VERSION >= SB_SPEECH_RECOGNIZER_REQUIRED_VERSION ||
+        // SB_HAS(SPEECH_RECOGNIZER) && SB_API_VERSION >= 5
 
 }  // namespace nplb
 }  // namespace starboard
diff --git a/src/starboard/nplb/speech_recognizer_start_test.cc b/src/starboard/nplb/speech_recognizer_start_test.cc
index 15922f6..85acebc 100644
--- a/src/starboard/nplb/speech_recognizer_start_test.cc
+++ b/src/starboard/nplb/speech_recognizer_start_test.cc
@@ -19,9 +19,12 @@
 namespace starboard {
 namespace nplb {
 
-#if SB_HAS(SPEECH_RECOGNIZER) && SB_API_VERSION >= 5
+#if SB_API_VERSION >= SB_SPEECH_RECOGNIZER_REQUIRED_VERSION || \
+    SB_HAS(SPEECH_RECOGNIZER) && SB_API_VERSION >= 5
 
 TEST_F(SpeechRecognizerTest, StartTestSunnyDay) {
+  if (SkipLocale())
+    return;
   SbSpeechRecognizer recognizer = SbSpeechRecognizerCreate(handler());
   EXPECT_TRUE(SbSpeechRecognizerIsValid(recognizer));
   SbSpeechConfiguration configuration = {false, false, 1};
@@ -36,6 +39,8 @@
 }
 
 TEST_F(SpeechRecognizerTest, StartRecognizerWithContinuousRecognition) {
+  if (SkipLocale())
+    return;
   SbSpeechRecognizer recognizer = SbSpeechRecognizerCreate(handler());
   EXPECT_TRUE(SbSpeechRecognizerIsValid(recognizer));
   SbSpeechConfiguration configuration = {true, false, 1};
@@ -50,6 +55,8 @@
 }
 
 TEST_F(SpeechRecognizerTest, StartRecognizerWithInterimResults) {
+  if (SkipLocale())
+    return;
   SbSpeechRecognizer recognizer = SbSpeechRecognizerCreate(handler());
   EXPECT_TRUE(SbSpeechRecognizerIsValid(recognizer));
   SbSpeechConfiguration configuration = {false, true, 1};
@@ -64,6 +71,8 @@
 }
 
 TEST_F(SpeechRecognizerTest, StartRecognizerWith10MaxAlternatives) {
+  if (SkipLocale())
+    return;
   SbSpeechRecognizer recognizer = SbSpeechRecognizerCreate(handler());
   EXPECT_TRUE(SbSpeechRecognizerIsValid(recognizer));
   SbSpeechConfiguration configuration = {true, true, 10};
@@ -78,6 +87,8 @@
 }
 
 TEST_F(SpeechRecognizerTest, StartIsCalledMultipleTimes) {
+  if (SkipLocale())
+    return;
   SbSpeechRecognizer recognizer = SbSpeechRecognizerCreate(handler());
   EXPECT_TRUE(SbSpeechRecognizerIsValid(recognizer));
   SbSpeechConfiguration configuration = {true, true, 1};
@@ -95,13 +106,16 @@
 }
 
 TEST_F(SpeechRecognizerTest, StartWithInvalidSpeechRecognizer) {
+  if (SkipLocale())
+    return;
   SbSpeechConfiguration configuration = {true, true, 1};
   bool success =
       SbSpeechRecognizerStart(kSbSpeechRecognizerInvalid, &configuration);
   EXPECT_FALSE(success);
 }
 
-#endif  // SB_HAS(SPEECH_RECOGNIZER) && SB_API_VERSION >= 5
+#endif  // SB_API_VERSION >= SB_SPEECH_RECOGNIZER_REQUIRED_VERSION ||
+        // SB_HAS(SPEECH_RECOGNIZER) && SB_API_VERSION >= 5
 
 }  // namespace nplb
 }  // namespace starboard
diff --git a/src/starboard/nplb/speech_recognizer_stop_test.cc b/src/starboard/nplb/speech_recognizer_stop_test.cc
index bbad125..d8808be 100644
--- a/src/starboard/nplb/speech_recognizer_stop_test.cc
+++ b/src/starboard/nplb/speech_recognizer_stop_test.cc
@@ -19,9 +19,12 @@
 namespace starboard {
 namespace nplb {
 
-#if SB_HAS(SPEECH_RECOGNIZER) && SB_API_VERSION >= 5
+#if SB_API_VERSION >= SB_SPEECH_RECOGNIZER_REQUIRED_VERSION || \
+    SB_HAS(SPEECH_RECOGNIZER) && SB_API_VERSION >= 5
 
 TEST_F(SpeechRecognizerTest, StopIsCalledMultipleTimes) {
+  if (SkipLocale())
+    return;
   SbSpeechRecognizer recognizer = SbSpeechRecognizerCreate(handler());
   EXPECT_TRUE(SbSpeechRecognizerIsValid(recognizer));
   SbSpeechConfiguration configuration = {true, true, 1};
@@ -37,7 +40,8 @@
   SbSpeechRecognizerDestroy(recognizer);
 }
 
-#endif  // SB_HAS(SPEECH_RECOGNIZER) && SB_API_VERSION >= 5
+#endif  // SB_API_VERSION >= SB_SPEECH_RECOGNIZER_REQUIRED_VERSION ||
+        // SB_HAS(SPEECH_RECOGNIZER) && SB_API_VERSION >= 5
 
 }  // namespace nplb
 }  // namespace starboard
diff --git a/src/starboard/nplb/speech_synthesis_basic_test.cc b/src/starboard/nplb/speech_synthesis_basic_test.cc
index e6b09e3..65a2f1d 100644
--- a/src/starboard/nplb/speech_synthesis_basic_test.cc
+++ b/src/starboard/nplb/speech_synthesis_basic_test.cc
@@ -19,14 +19,22 @@
 namespace nplb {
 namespace {
 
-#if SB_HAS(SPEECH_SYNTHESIS)
+#if SB_API_VERSION >= SB_SPEECH_SYNTHESIS_REQUIRED_VERSION || \
+    SB_HAS(SPEECH_SYNTHESIS)
 
 TEST(SbSpeechSynthesisBasicTest, Basic) {
+#if SB_API_VERSION >= SB_SPEECH_SYNTHESIS_REQUIRED_VERSION
+  if (!SbSpeechSynthesisIsSupported()) {
+    SB_LOG(INFO) << "Speech synthesis not supported. Test skipped.";
+    return;
+  }
+#endif
   SbSpeechSynthesisSpeak("Hello");
   SbSpeechSynthesisCancel();
 }
 
-#endif  // SB_HAS(SPEECH_SYNTHESIS)
+#endif  // SB_API_VERSION >= SB_SPEECH_SYNTHESIS_REQUIRED_VERSION ||
+        // SB_HAS(SPEECH_SYNTHESIS)
 
 }  // namespace
 }  // namespace nplb
diff --git a/src/starboard/nplb/thread_create_test.cc b/src/starboard/nplb/thread_create_test.cc
index 716e001..74d4bef 100644
--- a/src/starboard/nplb/thread_create_test.cc
+++ b/src/starboard/nplb/thread_create_test.cc
@@ -142,7 +142,7 @@
 
 TEST(SbThreadCreateTest, Summertime) {
   const int kMany = SB_MAX_THREADS;
-  SbThread threads[kMany];
+  std::vector<SbThread> threads(kMany);
   for (int i = 0; i < kMany; ++i) {
     threads[i] = SbThreadCreate(0, kSbThreadNoPriority, kSbThreadNoAffinity,
                                 true, nplb::kThreadName, nplb::AddOneEntryPoint,
diff --git a/src/starboard/nplb/thread_local_value_test.cc b/src/starboard/nplb/thread_local_value_test.cc
index dcede6c..4c94f0f 100644
--- a/src/starboard/nplb/thread_local_value_test.cc
+++ b/src/starboard/nplb/thread_local_value_test.cc
@@ -177,7 +177,7 @@
 
 TEST(SbThreadLocalValueTest, SunnyDayMany) {
   const int kMany = (2 * SB_MAX_THREAD_LOCAL_KEYS) / 3;
-  SbThreadLocalKey keys[kMany];
+  std::vector<SbThreadLocalKey> keys(kMany);
 
   for (int i = 0; i < kMany; ++i) {
     keys[i] = SbThreadCreateLocalKey(NULL);
diff --git a/src/starboard/raspi/shared/configuration_constants.cc b/src/starboard/raspi/shared/configuration_constants.cc
new file mode 100644
index 0000000..5b3c886
--- /dev/null
+++ b/src/starboard/raspi/shared/configuration_constants.cc
@@ -0,0 +1,23 @@
+// Copyright 2019 The Cobalt Authors. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// This file defines all configuration constants for a platform.
+
+#include "starboard/configuration_constants.h"
+
+// Determines the alignment that allocations should have on this platform.
+const size_t kSbMallocAlignment = 16;
+
+// The maximum length of a name for a thread, including the NULL-terminator.
+const int32_t kSbMaxThreadNameLength = 16;
diff --git a/src/starboard/raspi/shared/configuration_public.h b/src/starboard/raspi/shared/configuration_public.h
index bbeb085..13cd368 100644
--- a/src/starboard/raspi/shared/configuration_public.h
+++ b/src/starboard/raspi/shared/configuration_public.h
@@ -288,9 +288,6 @@
 // specify that.
 #define SB_NETWORK_IO_BUFFER_ALIGNMENT 16
 
-// Determines the alignment that allocations should have on this platform.
-#define SB_MALLOC_ALIGNMENT ((size_t)16U)
-
 // Determines the threshhold of allocation size that should be done with mmap
 // (if available), rather than allocated within the core heap.
 #define SB_DEFAULT_MMAP_THRESHOLD ((size_t)(256 * 1024U))
@@ -329,9 +326,6 @@
 // The maximum number of thread local storage keys supported by this platform.
 #define SB_MAX_THREAD_LOCAL_KEYS 512
 
-// The maximum length of the name for a thread, including the NULL-terminator.
-#define SB_MAX_THREAD_NAME_LENGTH 16
-
 // --- Timing API ------------------------------------------------------------
 
 // Whether this platform has an API to retrieve how long the current thread
diff --git a/src/starboard/raspi/shared/starboard_platform.gypi b/src/starboard/raspi/shared/starboard_platform.gypi
index 17498ec..d43a9aa 100644
--- a/src/starboard/raspi/shared/starboard_platform.gypi
+++ b/src/starboard/raspi/shared/starboard_platform.gypi
@@ -43,6 +43,7 @@
         '<@(filter_based_player_sources)',
         '<(DEPTH)/starboard/linux/shared/atomic_public.h',
         '<(DEPTH)/starboard/linux/shared/configuration_public.h',
+        '<(DEPTH)/starboard/linux/shared/configuration_constants.cc',
         '<(DEPTH)/starboard/linux/shared/system_get_connection_type.cc',
         '<(DEPTH)/starboard/linux/shared/system_get_device_type.cc',
         '<(DEPTH)/starboard/linux/shared/system_get_path.cc',
diff --git a/src/starboard/shared/iso/directory_get_next.cc b/src/starboard/shared/iso/directory_get_next.cc
index e478a39..101ef08 100644
--- a/src/starboard/shared/iso/directory_get_next.cc
+++ b/src/starboard/shared/iso/directory_get_next.cc
@@ -16,7 +16,16 @@
 
 #include "starboard/shared/iso/impl/directory_get_next.h"
 
+#if SB_API_VERSION >= SB_FEATURE_RUNTIME_CONFIGS_VERSION
+bool SbDirectoryGetNext(SbDirectory directory,
+                        char* out_entry,
+                        size_t out_entry_size) {
+  return ::starboard::shared::iso::impl::SbDirectoryGetNext(
+      directory, out_entry, out_entry_size);
+}
+#else   // SB_API_VERSION >= SB_FEATURE_RUNTIME_CONFIGS_VERSION
 bool SbDirectoryGetNext(SbDirectory directory, SbDirectoryEntry* out_entry) {
   return ::starboard::shared::iso::impl::SbDirectoryGetNext(directory,
                                                             out_entry);
 }
+#endif  // SB_API_VERSION >= SB_FEATURE_RUNTIME_CONFIGS_VERSION
diff --git a/src/starboard/shared/iso/impl/directory_get_next.h b/src/starboard/shared/iso/impl/directory_get_next.h
index 1bb4a1d..74d3c39 100644
--- a/src/starboard/shared/iso/impl/directory_get_next.h
+++ b/src/starboard/shared/iso/impl/directory_get_next.h
@@ -31,7 +31,16 @@
 namespace iso {
 namespace impl {
 
-bool SbDirectoryGetNext(SbDirectory directory, SbDirectoryEntry* out_entry) {
+bool SbDirectoryGetNext(SbDirectory directory,
+#if SB_API_VERSION >= SB_FEATURE_RUNTIME_CONFIGS_VERSION
+                        char* out_entry,
+                        size_t out_entry_size) {
+  if (out_entry_size < SB_FILE_MAX_NAME) {
+    return false;
+  }
+#else   // SB_API_VERSION >= SB_FEATURE_RUNTIME_CONFIGS_VERSION
+                        SbDirectoryEntry* out_entry) {
+#endif  // SB_API_VERSION >= SB_FEATURE_RUNTIME_CONFIGS_VERSION
   if (!directory || !directory->directory || !out_entry) {
     return false;
   }
@@ -54,8 +63,13 @@
     }
   } while (true);
 
+#if SB_API_VERSION >= SB_FEATURE_RUNTIME_CONFIGS_VERSION
+  SbStringCopy(out_entry, dirent->d_name, out_entry_size);
+#else   // SB_API_VERSION >= SB_FEATURE_RUNTIME_CONFIGS_VERSION
   SbStringCopy(out_entry->name, dirent->d_name,
                SB_ARRAY_SIZE_INT(out_entry->name));
+#endif  // SB_API_VERSION >= SB_FEATURE_RUNTIME_CONFIGS_VERSION
+
   return true;
 }
 
diff --git a/src/starboard/shared/linux/dev_input/dev_input.cc b/src/starboard/shared/linux/dev_input/dev_input.cc
index 4e76bb4..2acca58 100644
--- a/src/starboard/shared/linux/dev_input/dev_input.cc
+++ b/src/starboard/shared/linux/dev_input/dev_input.cc
@@ -769,7 +769,19 @@
   }
 
   while (true) {
+#if SB_API_VERSION >= SB_FEATURE_RUNTIME_CONFIGS_VERSION
+    std::vector<char> entry(SB_FILE_MAX_NAME);
+
+    if (!SbDirectoryGetNext(directory, entry.data(), SB_FILE_MAX_NAME)) {
+      break;
+    }
+
+    std::string path = kDevicePath;
+    path += "/";
+    path += entry.data();
+#else   // SB_API_VERSION >= SB_FEATURE_RUNTIME_CONFIGS_VERSION
     SbDirectoryEntry entry;
+
     if (!SbDirectoryGetNext(directory, &entry)) {
       break;
     }
@@ -777,6 +789,7 @@
     std::string path = kDevicePath;
     path += "/";
     path += entry.name;
+#endif  // SB_API_VERSION >= SB_FEATURE_RUNTIME_CONFIGS_VERSION
 
     if (SbDirectoryCanOpen(path.c_str())) {
       // This is a subdirectory. Skip.
diff --git a/src/starboard/shared/linux/thread_set_name.cc b/src/starboard/shared/linux/thread_set_name.cc
index f6066ec..be97f15 100644
--- a/src/starboard/shared/linux/thread_set_name.cc
+++ b/src/starboard/shared/linux/thread_set_name.cc
@@ -22,13 +22,15 @@
 #include "starboard/common/log.h"
 #include "starboard/common/string.h"
 
+#include "starboard/configuration_constants.h"
+
 void SbThreadSetName(const char* name) {
   // We don't want to rename the main thread.
   if (SbThreadGetId() == getpid()) {
     return;
   }
 
-  char buffer[SB_MAX_THREAD_NAME_LENGTH] = {};
+  char buffer[kSbMaxThreadNameLength];
 
   if (SbStringGetLength(name) >= SB_ARRAY_SIZE_INT(buffer)) {
     SbStringCopy(buffer, name, SB_ARRAY_SIZE_INT(buffer));
diff --git a/src/starboard/shared/opus/opus_audio_decoder.cc b/src/starboard/shared/opus/opus_audio_decoder.cc
index 04dd0a6..5ca3d94 100644
--- a/src/starboard/shared/opus/opus_audio_decoder.cc
+++ b/src/starboard/shared/opus/opus_audio_decoder.cc
@@ -27,7 +27,6 @@
 namespace opus {
 
 namespace {
-const int kMaxOpusFramesPerAU = 9600;
 
 typedef struct {
   int nb_streams;
@@ -52,15 +51,6 @@
 OpusAudioDecoder::OpusAudioDecoder(
     const SbMediaAudioSampleInfo& audio_sample_info)
     : audio_sample_info_(audio_sample_info) {
-#if SB_HAS_QUIRK(SUPPORT_INT16_AUDIO_SAMPLES)
-  working_buffer_.resize(kMaxOpusFramesPerAU *
-                         audio_sample_info_.number_of_channels *
-                         sizeof(opus_int16));
-#else   // SB_HAS_QUIRK(SUPPORT_INT16_AUDIO_SAMPLES)
-  working_buffer_.resize(kMaxOpusFramesPerAU *
-                         audio_sample_info_.number_of_channels * sizeof(float));
-#endif  // SB_HAS_QUIRK(SUPPORT_INT16_AUDIO_SAMPLES)
-
   int error;
   int channels = audio_sample_info_.number_of_channels;
   if (channels > 8 || channels < 1) {
@@ -106,28 +96,44 @@
   SB_DCHECK(input_buffer);
   SB_DCHECK(output_cb_);
 
-  Schedule(consumed_cb);
-
   if (stream_ended_) {
     SB_LOG(ERROR) << "Decode() is called after WriteEndOfStream() is called.";
     return;
   }
 
+  scoped_refptr<DecodedAudio> decoded_audio = new DecodedAudio(
+      audio_sample_info_.number_of_channels, GetSampleType(),
+      kSbMediaAudioFrameStorageTypeInterleaved, input_buffer->timestamp(),
+      audio_sample_info_.number_of_channels * frames_per_au_ *
+          starboard::media::GetBytesPerSample(GetSampleType()));
+
 #if SB_HAS_QUIRK(SUPPORT_INT16_AUDIO_SAMPLES)
   const char kDecodeFunctionName[] = "opus_multistream_decode";
   int decoded_frames = opus_multistream_decode(
       decoder_, static_cast<const unsigned char*>(input_buffer->data()),
       input_buffer->size(),
-      reinterpret_cast<opus_int16*>(working_buffer_.data()),
-      kMaxOpusFramesPerAU, 0);
+      reinterpret_cast<opus_int16*>(decoded_audio->buffer()), frames_per_au_,
+      0);
 #else   // SB_HAS_QUIRK(SUPPORT_INT16_AUDIO_SAMPLES)
   const char kDecodeFunctionName[] = "opus_multistream_decode_float";
   int decoded_frames = opus_multistream_decode_float(
       decoder_, static_cast<const unsigned char*>(input_buffer->data()),
-      input_buffer->size(), reinterpret_cast<float*>(working_buffer_.data()),
-      kMaxOpusFramesPerAU, 0);
+      input_buffer->size(), reinterpret_cast<float*>(decoded_audio->buffer()),
+      frames_per_au_, 0);
 #endif  // SB_HAS_QUIRK(SUPPORT_INT16_AUDIO_SAMPLES)
+  if (decoded_frames == OPUS_BUFFER_TOO_SMALL &&
+      frames_per_au_ < kMaxOpusFramesPerAU) {
+    frames_per_au_ = kMaxOpusFramesPerAU;
+    // Send to decode again with the new |frames_per_au_|.
+    Decode(input_buffer, consumed_cb);
+    return;
+  }
   if (decoded_frames <= 0) {
+    // When the following check fails, it indicates that |frames_per_au_| is
+    // greater than or equal to |kMaxOpusFramesPerAU|, which should never happen
+    // for Opus.
+    SB_DCHECK(decoded_frames != OPUS_BUFFER_TOO_SMALL);
+
     // TODO: Consider fill it with silence.
     SB_LOG(ERROR) << kDecodeFunctionName
                   << "() failed with error code: " << decoded_frames;
@@ -141,14 +147,13 @@
     return;
   }
 
-  scoped_refptr<DecodedAudio> decoded_audio = new DecodedAudio(
-      audio_sample_info_.number_of_channels, GetSampleType(),
-      kSbMediaAudioFrameStorageTypeInterleaved, input_buffer->timestamp(),
-      audio_sample_info_.number_of_channels * decoded_frames *
-          starboard::media::GetBytesPerSample(GetSampleType()));
-  SbMemoryCopy(decoded_audio->buffer(), working_buffer_.data(),
-               decoded_audio->size());
+  frames_per_au_ = decoded_frames;
+  decoded_audio->ShrinkTo(audio_sample_info_.number_of_channels *
+                          frames_per_au_ *
+                          starboard::media::GetBytesPerSample(GetSampleType()));
+
   decoded_audios_.push(decoded_audio);
+  Schedule(consumed_cb);
   Schedule(output_cb_);
 }
 
diff --git a/src/starboard/shared/opus/opus_audio_decoder.h b/src/starboard/shared/opus/opus_audio_decoder.h
index 689c33d..2c0f2d8 100644
--- a/src/starboard/shared/opus/opus_audio_decoder.h
+++ b/src/starboard/shared/opus/opus_audio_decoder.h
@@ -48,6 +48,8 @@
   void Reset() override;
 
  private:
+  static const int kMaxOpusFramesPerAU = 9600;
+
   SbMediaAudioSampleType GetSampleType() const;
 
   OutputCB output_cb_;
@@ -57,7 +59,7 @@
   bool stream_ended_ = false;
   std::queue<scoped_refptr<DecodedAudio> > decoded_audios_;
   SbMediaAudioSampleInfo audio_sample_info_;
-  std::vector<uint8_t> working_buffer_;
+  int frames_per_au_ = kMaxOpusFramesPerAU;
 };
 
 }  // namespace opus
diff --git a/src/starboard/shared/pthread/mutex_destroy.cc b/src/starboard/shared/pthread/mutex_destroy.cc
index 799c83d..aa85d9c 100644
--- a/src/starboard/shared/pthread/mutex_destroy.cc
+++ b/src/starboard/shared/pthread/mutex_destroy.cc
@@ -17,6 +17,7 @@
 #include <pthread.h>
 
 #include "starboard/common/log.h"
+#include "starboard/configuration.h"
 #include "starboard/shared/pthread/is_success.h"
 
 bool SbMutexDestroy(SbMutex* mutex) {
@@ -24,6 +25,11 @@
     return false;
   }
 
+#if SB_API_VERSION >= SB_MUTEX_ACQUIRE_TRY_API_CHANGE_VERSION
+  // Both trying to recursively acquire a mutex that is locked by the calling
+  // thread, as well as deleting a locked mutex, result in undefined behavior.
+  return IsSuccess(pthread_mutex_destroy(mutex));
+#else   // SB_API_VERSION >= SB_MUTEX_ACQUIRE_TRY_API_CHANGE_VERSION
   // Destroying a locked mutex is undefined, so fail if the mutex is
   // already locked,
   if (!IsSuccess(pthread_mutex_trylock(mutex))) {
@@ -33,4 +39,5 @@
 
   return IsSuccess(pthread_mutex_unlock(mutex)) &&
          IsSuccess(pthread_mutex_destroy(mutex));
+#endif  // SB_API_VERSION >= SB_MUTEX_ACQUIRE_TRY_API_CHANGE_VERSION
 }
diff --git a/src/starboard/shared/pulse/pulse_audio_sink_type.cc b/src/starboard/shared/pulse/pulse_audio_sink_type.cc
index 3bfa883..ff87de2 100644
--- a/src/starboard/shared/pulse/pulse_audio_sink_type.cc
+++ b/src/starboard/shared/pulse/pulse_audio_sink_type.cc
@@ -30,6 +30,19 @@
 #include "starboard/thread.h"
 #include "starboard/time.h"
 
+#if defined(ADDRESS_SANITIZER)
+// By default, Leak Sanitizer and Address Sanitizer is expected to exist
+// together. However, this is not true for all platforms.
+// HAS_LEAK_SANTIZIER=0 explicitly removes the Leak Sanitizer from code.
+#ifndef HAS_LEAK_SANITIZER
+#define HAS_LEAK_SANITIZER 1
+#endif  // HAS_LEAK_SANITIZER
+#endif  // defined(ADDRESS_SANITIZER)
+
+#if HAS_LEAK_SANITIZER
+#include <sanitizer/lsan_interface.h>
+#endif  // HAS_LEAK_SANITIZER
+
 namespace starboard {
 namespace shared {
 namespace pulse {
@@ -424,7 +437,13 @@
     return false;
   }
   // Create pulse context.
+#if HAS_LEAK_SANITIZER
+  __lsan_disable();
+#endif
   context_ = pa_context_new(pa_mainloop_get_api(mainloop_), "cobalt_audio");
+#if HAS_LEAK_SANITIZER
+  __lsan_enable();
+#endif
   if (!context_) {
     SB_LOG(WARNING) << "Pulse audio error: cannot create context.";
     return false;
diff --git a/src/starboard/shared/starboard/configuration_constants_compatibility_defines.h b/src/starboard/shared/starboard/configuration_constants_compatibility_defines.h
new file mode 100644
index 0000000..3485e21
--- /dev/null
+++ b/src/starboard/shared/starboard/configuration_constants_compatibility_defines.h
@@ -0,0 +1,36 @@
+// Copyright 2019 The Cobalt Authors. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// This file defines macros to provide convenience for backwards compatibility
+// after the change that migrated certain configuration macros to extern
+// variables.
+
+#ifndef STARBOARD_SHARED_STARBOARD_CONFIGURATION_CONSTANTS_COMPATIBILITY_DEFINES_H_
+#define STARBOARD_SHARED_STARBOARD_CONFIGURATION_CONSTANTS_COMPATIBILITY_DEFINES_H_
+
+#if SB_API_VERSION >= SB_FEATURE_RUNTIME_CONFIGS_VERSION
+
+#error \
+    "This file is only relevant for Starboard versions before 12. Please do " \
+"not include this file otherwise."
+
+#else  // SB_API_VERSION >= SB_FEATURE_RUNTIME_CONFIGS_VERSION
+
+#define kSbMallocAlignment SB_MALLOC_ALIGNMENT
+
+#define kSbMaxThreadNameLength SB_MAX_THREAD_NAME_LENGTH
+
+#endif  // SB_API_VERSION >= SB_FEATURE_RUNTIME_CONFIGS_VERSION
+
+#endif  // STARBOARD_SHARED_STARBOARD_CONFIGURATION_CONSTANTS_COMPATIBILITY_DEFINES_H_
diff --git a/src/starboard/shared/starboard/player/job_queue.cc b/src/starboard/shared/starboard/player/job_queue.cc
index de6f160..5f87bc3 100644
--- a/src/starboard/shared/starboard/player/job_queue.cc
+++ b/src/starboard/shared/starboard/player/job_queue.cc
@@ -153,8 +153,10 @@
   JobToken job_token(current_job_token_);
   JobRecord job_record = {job_token, job, owner};
 #if ENABLE_JOB_QUEUE_PROFILING
-  job_record.stack_size =
-      SbSystemGetStack(job_record.stack, kProfileStackDepth);
+  if (kProfileStackDepth > 0) {
+    job_record.stack_size =
+        SbSystemGetStack(job_record.stack, kProfileStackDepth);
+  }
 #endif  // ENABLE_JOB_QUEUE_PROFILING
 
   SbTimeMonotonic time_to_run_job = SbTimeGetMonotonicNow() + delay;
@@ -202,6 +204,9 @@
     if (time_to_job_record_map_.empty() && wait_for_next_job) {
       // |kSbTimeMax| makes more sense here, but |kSbTimeDay| is much safer.
       condition_.WaitTimed(kSbTimeDay);
+#if ENABLE_JOB_QUEUE_PROFILING
+      ++wait_times_;
+#endif  // ENABLE_JOB_QUEUE_PROFILING
     }
     if (time_to_job_record_map_.empty()) {
       return false;
@@ -212,6 +217,9 @@
     if (delay > 0) {
       if (wait_for_next_job) {
         condition_.WaitTimed(delay);
+#if ENABLE_JOB_QUEUE_PROFILING
+        ++wait_times_;
+#endif  // ENABLE_JOB_QUEUE_PROFILING
         if (time_to_job_record_map_.empty()) {
           return false;
         }
@@ -239,6 +247,8 @@
   job_record.job();
 
 #if ENABLE_JOB_QUEUE_PROFILING
+  ++jobs_processed_;
+
   auto now = SbTimeGetMonotonicNow();
   auto elapsed = now - start;
   if (elapsed > max_job_interval_) {
@@ -246,7 +256,10 @@
     max_job_interval_ = elapsed;
   }
   if (now - last_reset_time_ > kProfileResetInterval) {
-    SB_LOG(INFO) << "================ Max job takes " << max_job_interval_;
+    SB_LOG(INFO) << "================ " << jobs_processed_
+                 << " jobs processed, and waited for " << wait_times_
+                 << " times since last reset on 0x" << this
+                 << ", max job takes " << max_job_interval_;
     for (int i = 0; i < job_record.stack_size; ++i) {
       char function_name[1024];
       if (SbSystemSymbolize(job_record.stack[i], function_name,
@@ -258,6 +271,8 @@
     }
     last_reset_time_ = now;
     max_job_interval_ = 0;
+    jobs_processed_ = 0;
+    wait_times_ = 0;
   }
 #endif  // ENABLE_JOB_QUEUE_PROFILING
   return true;
diff --git a/src/starboard/shared/starboard/player/job_queue.h b/src/starboard/shared/starboard/player/job_queue.h
index 231c1d4..b236fcd 100644
--- a/src/starboard/shared/starboard/player/job_queue.h
+++ b/src/starboard/shared/starboard/player/job_queue.h
@@ -163,6 +163,8 @@
   SbTimeMonotonic last_reset_time_ = SbTimeGetMonotonicNow();
   JobRecord job_record_with_max_interval_;
   SbTimeMonotonic max_job_interval_ = 0;
+  int jobs_processed_ = 0;
+  int wait_times_ = 0;
 #endif  // ENABLE_JOB_QUEUE_PROFILING
 };
 
diff --git a/src/starboard/shared/starboard/thread_local_storage_internal.cc b/src/starboard/shared/starboard/thread_local_storage_internal.cc
index 0e28828..f98994b 100644
--- a/src/starboard/shared/starboard/thread_local_storage_internal.cc
+++ b/src/starboard/shared/starboard/thread_local_storage_internal.cc
@@ -229,7 +229,12 @@
   KeyRecord* record = &data_->key_table_[key->index];
 
   record->destructor = destructor;
+#if SB_API_VERSION >= SB_FEATURE_RUNTIME_CONFIGS_VERSION
+  SbMemorySet(record->values.data(), 0,
+              record->values.size() * sizeof(record->values[0]));
+#else   // SB_API_VERSION >= SB_FEATURE_RUNTIME_CONFIGS_VERSION
   SbMemorySet(record->values, 0, sizeof(record->values));
+#endif  // SB_API_VERSION >= SB_FEATURE_RUNTIME_CONFIGS_VERSION
   record->valid = true;
 
   return key;
diff --git a/src/starboard/shared/starboard/thread_local_storage_internal.h b/src/starboard/shared/starboard/thread_local_storage_internal.h
index 9438fa9..312b98b 100644
--- a/src/starboard/shared/starboard/thread_local_storage_internal.h
+++ b/src/starboard/shared/starboard/thread_local_storage_internal.h
@@ -15,6 +15,8 @@
 #ifndef STARBOARD_SHARED_STARBOARD_THREAD_LOCAL_STORAGE_INTERNAL_H_
 #define STARBOARD_SHARED_STARBOARD_THREAD_LOCAL_STORAGE_INTERNAL_H_
 
+#include <vector>
+
 #include "starboard/common/mutex.h"
 #include "starboard/common/scoped_ptr.h"
 #include "starboard/shared/internal_only.h"
@@ -55,7 +57,11 @@
   struct KeyRecord {
     bool valid;
     SbThreadLocalDestructor destructor;
+#if SB_API_VERSION >= SB_FEATURE_RUNTIME_CONFIGS_VERSION
+    std::vector<void*> values(kMaxThreads);
+#else   // SB_API_VERSION >= SB_FEATURE_RUNTIME_CONFIGS_VERSION
     void* values[kMaxThreads];
+#endif  // SB_API_VERSION >= SB_FEATURE_RUNTIME_CONFIGS_VERSION
   };
 
   // Sets up the specified key.
diff --git a/src/starboard/shared/stub/directory_get_next.cc b/src/starboard/shared/stub/directory_get_next.cc
index 5173d4e..8987598 100644
--- a/src/starboard/shared/stub/directory_get_next.cc
+++ b/src/starboard/shared/stub/directory_get_next.cc
@@ -15,6 +15,7 @@
 #include "starboard/directory.h"
 
 bool SbDirectoryGetNext(SbDirectory /*directory*/,
-                        SbDirectoryEntry* /*out_entry*/) {
+                        char* /*out_entry*/,
+                        size_t /* out_entry_size */) {
   return false;
 }
diff --git a/src/starboard/starboard_all.gyp b/src/starboard/starboard_all.gyp
index 76bb448..bfc58b3 100644
--- a/src/starboard/starboard_all.gyp
+++ b/src/starboard/starboard_all.gyp
@@ -89,6 +89,11 @@
             '<(DEPTH)/starboard/shared/starboard/player/filter/tools/tools.gyp:*',
           ],
         }],
+        ['sb_enable_benchmark==1', {
+          'dependencies': [
+            '<(DEPTH)/starboard/benchmark/benchmark.gyp:*',
+          ],
+        }],
       ],
     },
   ],
diff --git a/src/starboard/starboard_headers_only.gyp b/src/starboard/starboard_headers_only.gyp
index 6546f76..db954f5 100644
--- a/src/starboard/starboard_headers_only.gyp
+++ b/src/starboard/starboard_headers_only.gyp
@@ -33,6 +33,7 @@
         'character.h',
         'condition_variable.h',
         'configuration.h',
+        'configuration_constants.h',
         'cpu_features.h',
         'decode_target.h',
         'directory.h',
diff --git a/src/starboard/stub/configuration_constants.cc b/src/starboard/stub/configuration_constants.cc
new file mode 100644
index 0000000..5b3c886
--- /dev/null
+++ b/src/starboard/stub/configuration_constants.cc
@@ -0,0 +1,23 @@
+// Copyright 2019 The Cobalt Authors. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// This file defines all configuration constants for a platform.
+
+#include "starboard/configuration_constants.h"
+
+// Determines the alignment that allocations should have on this platform.
+const size_t kSbMallocAlignment = 16;
+
+// The maximum length of a name for a thread, including the NULL-terminator.
+const int32_t kSbMaxThreadNameLength = 16;
diff --git a/src/starboard/stub/configuration_public.h b/src/starboard/stub/configuration_public.h
index 265be2a..95eaf1f 100644
--- a/src/starboard/stub/configuration_public.h
+++ b/src/starboard/stub/configuration_public.h
@@ -367,9 +367,6 @@
 // specify that.
 #define SB_NETWORK_IO_BUFFER_ALIGNMENT 16
 
-// Determines the alignment that allocations should have on this platform.
-#define SB_MALLOC_ALIGNMENT ((size_t)16U)
-
 // Determines the threshhold of allocation size that should be done with mmap
 // (if available), rather than allocated within the core heap.
 #define SB_DEFAULT_MMAP_THRESHOLD ((size_t)(256 * 1024U))
@@ -398,9 +395,6 @@
 // The maximum number of thread local storage keys supported by this platform.
 #define SB_MAX_THREAD_LOCAL_KEYS 512
 
-// The maximum length of the name for a thread, including the NULL-terminator.
-#define SB_MAX_THREAD_NAME_LENGTH 16
-
 // --- Timing API ------------------------------------------------------------
 
 // Whether this platform has an API to retrieve how long the current thread
diff --git a/src/starboard/stub/starboard_platform.gyp b/src/starboard/stub/starboard_platform.gyp
index 5971d03..5baf0ca 100644
--- a/src/starboard/stub/starboard_platform.gyp
+++ b/src/starboard/stub/starboard_platform.gyp
@@ -24,6 +24,7 @@
         'application_stub.cc',
         'application_stub.h',
         'atomic_public.h',
+        'configuration_constants.cc',
         'main.cc',
         'thread_types_public.h',
         # Include private stubs, if present.
diff --git a/src/starboard/tools/abstract_launcher.py b/src/starboard/tools/abstract_launcher.py
index 90dc288..a9b5063 100644
--- a/src/starboard/tools/abstract_launcher.py
+++ b/src/starboard/tools/abstract_launcher.py
@@ -172,6 +172,23 @@
 
     raise RuntimeError("Suspend not supported for this platform.")
 
+  def SupportsDeepLink(self):
+    return False
+
+  def SendDeepLink(self, link):
+    """Sends deep link to the launcher's executable.
+
+    Args:
+      link:  Link to send to the executable.
+
+    Raises:
+      RuntimeError: Deep link not supported on platform.
+    """
+
+    raise RuntimeError(
+        "Deep link not supported for this platform (link {} sent).".format(
+            link))
+
   def GetStartupTimeout(self):
     """Gets the number of seconds to wait before assuming a launcher timeout."""
 
diff --git a/src/starboard/tools/package.py b/src/starboard/tools/package.py
index 8e3245c..716596b 100644
--- a/src/starboard/tools/package.py
+++ b/src/starboard/tools/package.py
@@ -33,7 +33,8 @@
     path: Path to the platform
     root_module: An already-loaded module
     module_name: Name of a python module to load. If None, load the platform
-        directory as a python module.
+      directory as a python module.
+
   Returns:
     A module loaded with importlib.import_module
   Throws:
@@ -95,8 +96,9 @@
     except Exception as e:  # pylint: disable=broad-except
       # Catch all exceptions to avoid an error in one platform's Packager
       # halting the script for other platforms' packagers.
-      logging.warning('Exception iterating supported platform for platform '
-                      '%s: %s.', platform_info.name, e)
+      logging.warning(
+          'Exception iterating supported platform for platform '
+          '%s: %s.', platform_info.name, e)
 
   return packager_modules
 
@@ -121,10 +123,9 @@
 
     Args:
       targets: A list of targets to install the package to, or None on platforms
-          that support installing to a default target.
-
-    This method can be overridden to implement platform-specific steps to
-    install the package for that platform.
+        that support installing to a default target.  This method can be
+        overridden to implement platform-specific steps to install the package
+        for that platform.
     """
     del targets
 
@@ -148,6 +149,7 @@
     constructor.
     Args:
       options: A namespace object returned from ArgumentParser.parse_args
+
     Returns:
       A dict of kwargs to be passed to the Package constructor.
     """
@@ -168,12 +170,12 @@
   def GetPlatformInfo(self, platform_name):
     return self.platform_infos.get(platform_name, None)
 
-  def GetApplicationPackageInfo(self, platform_name, applciation_name):
+  def GetApplicationPackageInfo(self, platform_name, application_name):
     """Get application-specific packaging information."""
     platform_info = self.GetPlatformInfo(platform_name)
     try:
       return _ImportModule(platform_info.path, starboard,
-                           '%s.package' % applciation_name)
+                           '%s.package' % application_name)
     except ImportError as e:
       # No package parameters specified for this platform.
       logging.debug('Failed to import cobalt.package: %s', e)
@@ -187,6 +189,7 @@
       source_dir: The directory containing the application to be packaged.
       output_dir: The directory into which the package files should be placed.
       **kwargs: Platform-specific arguments.
+
     Returns:
       A PackageBase instance.
     """
diff --git a/src/starboard/tools/port_symlink.py b/src/starboard/tools/port_symlink.py
index afd1202..bd7a7a1 100644
--- a/src/starboard/tools/port_symlink.py
+++ b/src/starboard/tools/port_symlink.py
@@ -136,6 +136,11 @@
   formatter_class = argparse.RawDescriptionHelpFormatter
   parser = MyParser(epilog=help_msg, formatter_class=formatter_class)
   parser.add_argument(
+      '-a',
+      '--use_absolute_symlinks',
+      action='store_true',
+      help='Generated symlinks are stored as absolute paths.')
+  parser.add_argument(
       '-f',
       '--force',
       action='store_true',
@@ -155,6 +160,9 @@
   args = parser.parse_args()
 
   folder_path, link_path = args.link
+  if args.use_absolute_symlinks:
+    folder_path = os.path.abspath(folder_path)
+    link_path = os.path.abspath(link_path)
   if '.' in folder_path:
     d1 = os.path.abspath(folder_path)
   else:
diff --git a/src/starboard/tools/send_link.py b/src/starboard/tools/send_link.py
index 74d47f4..10a5a5d 100755
--- a/src/starboard/tools/send_link.py
+++ b/src/starboard/tools/send_link.py
@@ -34,6 +34,7 @@
 import sys
 import tempfile
 import textwrap
+import time
 
 
 def _Uncase(text):
@@ -76,7 +77,20 @@
   return None
 
 
-def _SendLink(executable, link):
+def _ConnectWithRetry(s, port, num_attempts):
+  for attempt in range(num_attempts):
+    if attempt > 0:
+      time.sleep(1)
+    try:
+      s.connect(('localhost', port))
+      return True
+    except (RuntimeError, IOError):
+      logging.error('Could not connect to port %d, attempt %d / %d', port,
+                    attempt, num_attempts)
+  return False
+
+
+def SendLink(executable, link, connection_attempts=1):
   """Sends a link to the process starting with the given executable name."""
 
   pids = _GetPids(executable)
@@ -109,7 +123,9 @@
 
   try:
     with closing(socket.socket(socket.AF_INET, socket.SOCK_STREAM)) as s:
-      s.connect(('localhost', port))
+      if not _ConnectWithRetry(s, port, connection_attempts):
+        logging.exception('Could not connect to port: %d', port)
+        return 1
       terminated_link = link + '\x00'
       bytes_sent = 0
       while bytes_sent < len(terminated_link):
@@ -138,7 +154,7 @@
   parser.add_argument(
       'link', type=str, help='The link content to send to the executable.')
   arguments = parser.parse_args()
-  return _SendLink(arguments.executable, arguments.link)
+  return SendLink(arguments.executable, arguments.link)
 
 
 if __name__ == '__main__':
diff --git a/src/starboard/tools/symbolize/_env.py b/src/starboard/tools/symbolize/_env.py
new file mode 100644
index 0000000..021908e
--- /dev/null
+++ b/src/starboard/tools/symbolize/_env.py
@@ -0,0 +1,26 @@
+#
+# Copyright 2017 The Cobalt Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+"""Ask the parent directory to load the project environment."""
+
+from imp import load_source
+from os import path
+import sys
+
+_ENV = path.abspath(path.join(path.dirname(__file__), path.pardir, '_env.py'))
+if not path.exists(_ENV):
+  print '%s: Can\'t find repo root.\nMissing parent: %s' % (__file__, _ENV)
+  sys.exit(1)
+load_source('', _ENV)
diff --git a/src/starboard/tools/symbolize/symbolize.py b/src/starboard/tools/symbolize/symbolize.py
new file mode 100644
index 0000000..cd135a6
--- /dev/null
+++ b/src/starboard/tools/symbolize/symbolize.py
@@ -0,0 +1,152 @@
+#!/usr/bin/env python
+
+# Copyright 2019 The Cobalt Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Lightweight utility to simplify resolving stack traces and crashes.
+
+This tool supports three different formats for crashes and stack traces, but can
+easily be expanded for addition cases. Examples of current formats are as
+follows:
+
+  Address Sanitizer
+    #1 0x7fdc59bbaa6b  (<unknown module>)
+
+  Cobalt
+    <unknown> [0x7efcdf1fd52b]
+
+  Raw
+    0x7efcdf1fd52b
+
+The results of the symbolizer will only be included if it was able to find the
+name of the symbol, and it does not appear to be malformed. The only exception
+is when the line was matched with the |_RAW| regular expression in which case it
+will always output the results of the symbolizer.
+"""
+
+import _env  # pylint: disable=unused-import
+
+import argparse
+import os
+import re
+import subprocess
+import sys
+
+from starboard.build import clang
+from starboard.tools import build
+
+_SYMBOLIZER = os.path.join(
+    build.GetToolchainsDir(), 'x86_64-linux-gnu-clang-chromium-{}'.format(
+        clang.GetClangSpecification().revision), 'bin', 'llvm-symbolizer')
+
+_RE_ASAN = re.compile(
+    r'\s*(#[0-9]{1,3})\s*(0x[a-z0-9]*)\s*\(<unknown\smodule>\)')
+_RE_COBALT = re.compile(r'\s*<unknown> \[(0x[a-z0-9]*)\]\s*')
+_RE_RAW = re.compile(r'^(0x[a-z0-9]*)$')
+
+
+def _Symbolize(filename, library, base_address):
+  """Attempts to resolve memory addresses within the file specified.
+
+  This function iterates through the file specified line by line. When a line is
+  found that matches one of our regular expressions it will stop and invoke
+  llvm-symbolizer with the offset of the symbol and the library specified. The
+  results are verified and the output formatted to match whichever crash-style
+  is being used.
+
+  Args:
+    filename:     The path to the file containing the stack trace.
+    library:      The path to the library that is believed to have the symbol.
+    base_address: The base address of the library when it was loaded and
+      crashed, typically found in the logs.
+  """
+  if not os.path.exists(filename):
+    raise ValueError('File not found: {}.'.format(filename))
+  if not os.path.exists(library):
+    raise ValueError('Library not found: {}.'.format(library))
+  with open(filename) as f:
+    for line in f:
+      # Address Sanitizer
+      match = _RE_ASAN.match(line)
+      if match:
+        offset = int(match.group(2), 0) - int(base_address, 0)
+        results = _RunSymbolizer(library, str(offset))
+        if results and '?' not in results[0] and '?' not in results[1]:
+          sys.stdout.write('    {} {} in {} {}\n'.format(
+              match.group(1), hex(offset), results[0], results[1]))
+          continue
+      # Cobalt
+      match = _RE_COBALT.match(line)
+      if match:
+        offset = int(match.group(1), 0) - int(base_address, 0)
+        results = _RunSymbolizer(library, str(offset))
+        if results and '?' not in results[0]:
+          sys.stdout.write('        {} [{}]\n'.format(hex(offset), results[0]))
+          continue
+      # Raw
+      match = _RE_RAW.match(line)
+      if match:
+        offset = int(match.group(1), 0) - int(base_address, 0)
+        results = _RunSymbolizer(library, str(offset))
+        if results:
+          sys.stdout.write('{} {} in {}\n'.format(
+              hex(offset), results[0], results[1]))
+          continue
+      sys.stdout.write(line)
+
+
+def _RunSymbolizer(library, offset):
+  """Uses a external symbolizer tool to resolve symbol names.
+
+  Args:
+    library: The path to the library that is believed to have the symbol.
+    offset:  The offset into the library of the symbol we are looking for.
+  """
+  if int(offset) >= 0:
+    command = subprocess.Popen([_SYMBOLIZER, '-e', library, offset, '-f'],
+                               stdout=subprocess.PIPE)
+    results = command.communicate()
+    if command.returncode == 0:
+      return results[0].split(os.linesep)
+  return None
+
+
+def main():
+  arg_parser = argparse.ArgumentParser()
+  arg_parser.add_argument(
+      '-f',
+      '--filename',
+      required=True,
+      help='The path to the file that contains the stack traces, crashes, or raw addresses.'
+  )
+  arg_parser.add_argument(
+      '-l',
+      '--library',
+      required=True,
+      help='The path to the library that is believed to contain the addresses.')
+  arg_parser.add_argument(
+      'base_address',
+      type=str,
+      nargs=1,
+      help='The base address of the library.')
+  args, _ = arg_parser.parse_known_args()
+
+  if not os.path.exists(_SYMBOLIZER):
+    raise ValueError(
+        'Please update {} with a valid llvm-symbolizer path.'.format(__file__))
+
+  return _Symbolize(args.filename, args.library, args.base_address[0])
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/src/starboard/tools/testing/test_runner.py b/src/starboard/tools/testing/test_runner.py
index ba8f2b6..ac03457 100755
--- a/src/starboard/tools/testing/test_runner.py
+++ b/src/starboard/tools/testing/test_runner.py
@@ -416,8 +416,7 @@
         coverage_directory=self.coverage_directory,
         env_variables=env,
         loader_platform=self.loader_platform,
-        loader_config=self.loader_config,
-        loader_out_directory=self.loader_out_directory)
+        loader_config=self.loader_config)
 
     test_reader = TestLineReader(read_pipe)
     test_launcher = TestLauncher(launcher)
diff --git a/src/third_party/QR-Code-generator/cpp/QrCode.cpp b/src/third_party/QR-Code-generator/cpp/QrCode.cpp
index 0f78920..0dbc086 100644
--- a/src/third_party/QR-Code-generator/cpp/QrCode.cpp
+++ b/src/third_party/QR-Code-generator/cpp/QrCode.cpp
@@ -54,9 +54,9 @@
 }
 
 
-QrCode QrCode::encodeText(const char *text, Ecc ecl) {
+QrCode QrCode::encodeText(const char *text, Ecc ecl, int minVersion) {
 	vector<QrSegment> segs = QrSegment::makeSegments(text);
-	return encodeSegments(segs, ecl);
+	return encodeSegments(segs, ecl, minVersion);
 }
 
 
diff --git a/src/third_party/QR-Code-generator/cpp/QrCode.hpp b/src/third_party/QR-Code-generator/cpp/QrCode.hpp
index 14a3f61..31dd1d7 100644
--- a/src/third_party/QR-Code-generator/cpp/QrCode.hpp
+++ b/src/third_party/QR-Code-generator/cpp/QrCode.hpp
@@ -64,7 +64,7 @@
 	 * QR Code version is automatically chosen for the output. The ECC level of the result may be higher than
 	 * the ecl argument if it can be done without increasing the version.
 	 */
-	public: static QrCode encodeText(const char *text, Ecc ecl);
+	public: static QrCode encodeText(const char *text, Ecc ecl, int minVersion);
 	
 	
 	/* 
diff --git a/src/third_party/angle/include/angle_hdr.h b/src/third_party/angle/include/angle_hdr.h
new file mode 100644
index 0000000..8a9dd6c
--- /dev/null
+++ b/src/third_party/angle/include/angle_hdr.h
@@ -0,0 +1,29 @@
+// Copyright 2019 The Cobalt Authors. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#ifndef ANGLE_HDR_H_
+#define ANGLE_HDR_H_
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+void SetHdrAngleModeEnabled(bool flag);
+bool IsHdrAngleModeEnabled();
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif  // ANGLE_HDR_H_
diff --git a/src/third_party/angle/src/common/angle_hdr.cpp b/src/third_party/angle/src/common/angle_hdr.cpp
new file mode 100644
index 0000000..f975cb5
--- /dev/null
+++ b/src/third_party/angle/src/common/angle_hdr.cpp
@@ -0,0 +1,42 @@
+// Copyright 2019 The Cobalt Authors. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#if defined(STARBOARD)
+#include "angle_hdr.h"
+
+#include "starboard/atomic.h"
+#include "starboard/common/log.h"
+
+namespace angle
+{
+
+starboard::atomic_int32_t hdr_angle_mode_enable(0);
+
+}
+
+void SetHdrAngleModeEnabled(bool flag)
+{
+    if (!flag && angle::hdr_angle_mode_enable.load() == 0)
+    {
+        return;
+    }
+    angle::hdr_angle_mode_enable.fetch_add(flag ? 1 : -1);
+    SB_DCHECK(angle::hdr_angle_mode_enable.load() >= 0);
+}
+
+bool IsHdrAngleModeEnabled()
+{
+    return angle::hdr_angle_mode_enable.load() > 0;
+}
+#endif  // STARBOARD
diff --git a/src/third_party/angle/src/libANGLE/renderer/d3d/DynamicHLSL.cpp b/src/third_party/angle/src/libANGLE/renderer/d3d/DynamicHLSL.cpp
index b4366e7..b669e8c 100644
--- a/src/third_party/angle/src/libANGLE/renderer/d3d/DynamicHLSL.cpp
+++ b/src/third_party/angle/src/libANGLE/renderer/d3d/DynamicHLSL.cpp
@@ -26,6 +26,63 @@
 namespace
 {
 
+#if defined(STARBOARD)
+// The following numbers are received from Recommendation ITU - R BT .2100 - 2(07 / 2018),
+// table 4 - PQ system reference non linear transfer functions
+// c1 = 0.8359375;
+// c2 = 18.8515625;
+// c3 = 18.6875;
+// m1 = 0.159301758125;
+// m2 = 78.84375;
+const std::string BT709_TO_BT2020_SHADER =
+    "struct PS_OUTPUT\n"
+    "{\n"
+    "    float4 gl_Color0 : SV_TARGET0;\n"
+    "};\n"
+    "#define kRefWhiteLevelSRGB 290.0f\n"
+    "#define kRefWhiteLevelPQ 10000.0f\n"
+    "static const float3x3 BT709_TO_BT2020 = { // ref: ARIB STD-B62 and BT.2087\n"
+    "  0.6274,    0.3293,    0.0433,\n"
+    "  0.0691,    0.9195,    0.0114,\n"
+    "  0.0164,    0.0880,    0.8956\n"
+    "};\n"
+    "float3 SRGB_EOTF(float3 E)\n"
+    "{\n"
+    "  float3 dark = E/12.92;\n"
+    "  float3 light = pow(abs((E+0.055)/(1+0.055)), 2.4);\n"
+    "  bool3 cri = E <= 0.04045;\n"
+    "  float3 cri_float = (float3)cri;\n"
+    "  float3 r = lerp(light, dark, cri_float);\n"
+    "  r = r * kRefWhiteLevelSRGB;\n"
+    "  return r;\n"
+    "}\n"
+    "//input: normalized L in units of RefWhite (1.0=100nits), output: normalized E\n"
+    "float3 PQ_OETF(float3 L)\n"
+    "{\n"
+    "  const float c1 = 0.8359375;\n"
+    "  const float c2 = 18.8515625;\n"
+    "  const float c3 = 18.6875;\n"
+    "  const float m1 = 0.159301758125;\n"
+    "  const float m2 = 78.84375;\n"
+    "  L = L / kRefWhiteLevelPQ;\n"
+    "  float3 Lm1 = pow(abs(L), m1);\n"
+    "  float3 X = (c1 + c2 * Lm1) / (1 + c3 * Lm1);\n"
+    "  float3 res = pow(abs(X), m2);\n"
+    "  return res;\n"
+    "}\n"
+    "PS_OUTPUT generateOutput()\n"
+    "{\n"
+    "    PS_OUTPUT output;\n"
+    "   \n"
+    "    float3 input_colors = gl_Color[0].rgb;\n"
+    "    float3 lin_osd_graphics = SRGB_EOTF(input_colors);\n"
+    "    lin_osd_graphics =  mul(BT709_TO_BT2020, lin_osd_graphics);\n"
+    "    output.gl_Color0.rgb = PQ_OETF(lin_osd_graphics);\n"
+    "    output.gl_Color0.a = gl_Color[0].a;\n"
+    "    return output;\n"
+    "}\n";
+#endif  // STARBOARD
+
 std::string HLSLComponentTypeString(GLenum componentType)
 {
     switch (componentType)
@@ -322,6 +379,21 @@
     return pixelHLSL;
 }
 
+#if defined(STARBOARD)
+std::string DynamicHLSL::generatePixelShaderForHdrOutputSignature(
+    const std::string &sourceShader,
+    const std::vector<PixelShaderOutputVariable> &outputVariables,
+    bool usesFragDepth,
+    const std::vector<GLenum> &outputLayout) const
+{
+    std::string pixelHLSL(sourceShader);
+    size_t outputInsertionPos = pixelHLSL.find(PIXEL_OUTPUT_STUB_STRING);
+    pixelHLSL.replace(outputInsertionPos, strlen(PIXEL_OUTPUT_STUB_STRING), BT709_TO_BT2020_SHADER);
+
+    return pixelHLSL;
+}
+#endif  // STARBOARD
+
 void DynamicHLSL::generateVaryingLinkHLSL(const VaryingPacking &varyingPacking,
                                           const BuiltinInfo &builtins,
                                           bool programUsesPointSize,
diff --git a/src/third_party/angle/src/libANGLE/renderer/d3d/DynamicHLSL.h b/src/third_party/angle/src/libANGLE/renderer/d3d/DynamicHLSL.h
index 0972a62..c0bfc05 100644
--- a/src/third_party/angle/src/libANGLE/renderer/d3d/DynamicHLSL.h
+++ b/src/third_party/angle/src/libANGLE/renderer/d3d/DynamicHLSL.h
@@ -112,6 +112,13 @@
         const std::string &sourceShader,
         const gl::InputLayout &inputLayout,
         const std::vector<sh::Attribute> &shaderAttributes) const;
+#if defined(STARBOARD)
+    std::string generatePixelShaderForHdrOutputSignature(
+        const std::string &sourceShader,
+        const std::vector<PixelShaderOutputVariable> &outputVariables,
+        bool usesFragDepth,
+        const std::vector<GLenum> &outputLayout) const;
+#endif  // STARBOARD
     std::string generatePixelShaderForOutputSignature(
         const std::string &sourceShader,
         const std::vector<PixelShaderOutputVariable> &outputVariables,
diff --git a/src/third_party/angle/src/libANGLE/renderer/d3d/ProgramD3D.cpp b/src/third_party/angle/src/libANGLE/renderer/d3d/ProgramD3D.cpp
index db531ee..789bced 100644
--- a/src/third_party/angle/src/libANGLE/renderer/d3d/ProgramD3D.cpp
+++ b/src/third_party/angle/src/libANGLE/renderer/d3d/ProgramD3D.cpp
@@ -1148,6 +1148,76 @@
 {
 }
 
+#if defined(STARBOARD)
+gl::Error ProgramD3D::getPixelExecutableForHdrFramebuffer(const gl::Framebuffer *fbo,
+                                                          ShaderExecutableD3D **outExecutable)
+{
+    mPixelShaderOutputFormatCache.clear();
+
+    const FramebufferD3D *fboD3D = GetImplAs<FramebufferD3D>(fbo);
+    const gl::AttachmentList &colorbuffers = fboD3D->getColorAttachmentsForRender();
+
+    for (size_t colorAttachment = 0; colorAttachment < colorbuffers.size(); ++colorAttachment)
+    {
+        const gl::FramebufferAttachment *colorbuffer = colorbuffers[colorAttachment];
+
+        if (colorbuffer)
+        {
+            mPixelShaderOutputFormatCache.push_back(colorbuffer->getBinding() == GL_BACK
+                                                        ? GL_COLOR_ATTACHMENT0
+                                                        : colorbuffer->getBinding());
+        }
+        else
+        {
+            mPixelShaderOutputFormatCache.push_back(GL_NONE);
+        }
+    }
+
+    return getPixelExecutableForHdrOutputLayout(mPixelShaderOutputFormatCache, outExecutable,
+                                                nullptr);
+}
+
+gl::Error ProgramD3D::getPixelExecutableForHdrOutputLayout(
+    const std::vector<GLenum> &outputSignature,
+    ShaderExecutableD3D **outExecutable,
+    gl::InfoLog *infoLog)
+{
+    if (mPixelHdrExecutable)
+    {
+        *outExecutable = mPixelHdrExecutable->shaderExecutable();
+        return gl::NoError();
+    }
+
+    std::string finalPixelHLSL = mDynamicHLSL->generatePixelShaderForHdrOutputSignature(
+        mPixelHLSL, mPixelShaderKey, mUsesFragDepth, outputSignature);
+
+    // Generate new pixel executable
+    ShaderExecutableD3D *pixelExecutable = nullptr;
+
+    gl::InfoLog tempInfoLog;
+    gl::InfoLog *currentInfoLog = infoLog ? infoLog : &tempInfoLog;
+
+    ANGLE_TRY(mRenderer->compileToExecutable(
+        *currentInfoLog, finalPixelHLSL, SHADER_PIXEL, mStreamOutVaryings,
+        (mState.getTransformFeedbackBufferMode() == GL_SEPARATE_ATTRIBS), mPixelWorkarounds,
+        &pixelExecutable));
+
+    if (pixelExecutable)
+    {
+        mPixelHdrExecutable =
+            std::unique_ptr<PixelExecutable>(new PixelExecutable(outputSignature, pixelExecutable));
+    }
+    else if (!infoLog)
+    {
+        ERR() << "Error compiling BT709 to BT2020 pixel executable:" << std::endl
+              << tempInfoLog.str() << std::endl;
+    }
+
+    *outExecutable = pixelExecutable;
+    return gl::NoError();
+}
+#endif  // STARBOARD
+
 gl::Error ProgramD3D::getPixelExecutableForFramebuffer(const gl::Framebuffer *fbo,
                                                        ShaderExecutableD3D **outExecutable)
 {
@@ -2328,6 +2398,9 @@
 {
     mVertexExecutables.clear();
     mPixelExecutables.clear();
+#if defined(STARBOARD)
+    mPixelHdrExecutable.reset();
+#endif  // STARBOARD
 
     for (auto &geometryExecutable : mGeometryExecutables)
     {
diff --git a/src/third_party/angle/src/libANGLE/renderer/d3d/ProgramD3D.h b/src/third_party/angle/src/libANGLE/renderer/d3d/ProgramD3D.h
index 19b7dbf..d6015fb 100644
--- a/src/third_party/angle/src/libANGLE/renderer/d3d/ProgramD3D.h
+++ b/src/third_party/angle/src/libANGLE/renderer/d3d/ProgramD3D.h
@@ -170,6 +170,13 @@
     gl::Error getPixelExecutableForOutputLayout(const std::vector<GLenum> &outputLayout,
                                                 ShaderExecutableD3D **outExectuable,
                                                 gl::InfoLog *infoLog);
+#if defined(STARBOARD)
+    gl::Error getPixelExecutableForHdrFramebuffer(const gl::Framebuffer *fbo,
+                                                  ShaderExecutableD3D **outExectuable);
+    gl::Error getPixelExecutableForHdrOutputLayout(const std::vector<GLenum> &outputLayout,
+                                                   ShaderExecutableD3D **outExectuable,
+                                                   gl::InfoLog *infoLog);
+#endif  // STARBOARD
     gl::Error getVertexExecutableForInputLayout(const gl::InputLayout &inputLayout,
                                                 ShaderExecutableD3D **outExectuable,
                                                 gl::InfoLog *infoLog);
@@ -396,7 +403,9 @@
     std::vector<std::unique_ptr<PixelExecutable>> mPixelExecutables;
     std::vector<std::unique_ptr<ShaderExecutableD3D>> mGeometryExecutables;
     std::unique_ptr<ShaderExecutableD3D> mComputeExecutable;
-
+#if defined(STARBOARD)
+    std::unique_ptr<PixelExecutable> mPixelHdrExecutable;
+#endif  // STARBOARD
     std::string mVertexHLSL;
     angle::CompilerWorkaroundsD3D mVertexWorkarounds;
 
diff --git a/src/third_party/angle/src/libANGLE/renderer/d3d/d3d11/Renderer11.cpp b/src/third_party/angle/src/libANGLE/renderer/d3d/d3d11/Renderer11.cpp
index d9b0b27..1a8601d 100644
--- a/src/third_party/angle/src/libANGLE/renderer/d3d/d3d11/Renderer11.cpp
+++ b/src/third_party/angle/src/libANGLE/renderer/d3d/d3d11/Renderer11.cpp
@@ -13,6 +13,9 @@
 #include <versionhelpers.h>
 #include <sstream>
 
+#if defined(STARBOARD)
+#include "angle_hdr.h"
+#endif  // STARBOARD
 #include "common/tls.h"
 #include "common/utilities.h"
 #include "libANGLE/Buffer.h"
@@ -378,6 +381,35 @@
 
 const uint32_t ScratchMemoryBufferLifetime = 1000;
 
+#if defined(STARBOARD)
+angle::Format::ID GetTextureFormatId(const gl::ContextState &data)
+{
+    const auto &glState    = data.getState();
+    ProgramD3D *programD3D = GetImplAs<ProgramD3D>(glState.getProgram());
+
+    gl::SamplerType type      = gl::SAMPLER_PIXEL;
+    unsigned int samplerRange = programD3D->getUsedSamplerRange(type);
+    for (unsigned int i = 0; i < samplerRange; i++)
+    {
+        GLint textureUnit = programD3D->getSamplerMapping(type, i, data.getCaps());
+        if (textureUnit != -1)
+        {
+            gl::Texture *texture = data.getState().getSamplerTexture(
+                textureUnit, programD3D->getSamplerTextureType(type, i));
+            ASSERT(texture);
+            rx::TextureD3D *textureD3D = GetImplAs<TextureD3D>(texture);
+            TextureStorage *texStorage = nullptr;
+            textureD3D->getNativeTexture(&texStorage);
+            if (texStorage)
+            {
+                return GetAs<TextureStorage11_2D>(texStorage)->getFormat().id;
+            }
+        }
+    }
+    return angle::Format::ID::NONE;
+}
+#endif  // STARBOARD
+
 }  // anonymous namespace
 
 Renderer11::Renderer11(egl::Display *display)
@@ -2455,7 +2487,30 @@
 
     const gl::Framebuffer *drawFramebuffer = glState.getDrawFramebuffer();
     ShaderExecutableD3D *pixelExe          = nullptr;
+#if defined(STARBOARD)
+    // While 10-bit HDR video is playing we run the pixel shader to apply color space for all UI
+    // elements conversion from 8-bit to 10-bit for all draw calls that do not involve the HDR video
+    // texture (look at spec ITU - R BT .2100 - 2(07 / 2018) for BT709 to BT2020 transform). This
+    // conversion  is applicable only once when we draw to the display - drawFramebuffer->id() is 0.
+    if (IsHdrAngleModeEnabled() && drawFramebuffer->id() == 0)
+    {
+        if (GetTextureFormatId(data) == angle::Format::ID::R10G10B10A2_UNORM ||
+            GetTextureFormatId(data) == angle::Format::ID::R16_UNORM)
+        {
+            ANGLE_TRY(programD3D->getPixelExecutableForFramebuffer(drawFramebuffer, &pixelExe));
+        }
+        else
+        {
+            ANGLE_TRY(programD3D->getPixelExecutableForHdrFramebuffer(drawFramebuffer, &pixelExe));
+        }
+    }
+    else
+    {
+        ANGLE_TRY(programD3D->getPixelExecutableForFramebuffer(drawFramebuffer, &pixelExe));
+    }
+#else
     ANGLE_TRY(programD3D->getPixelExecutableForFramebuffer(drawFramebuffer, &pixelExe));
+#endif  // STARBOARD
 
     ShaderExecutableD3D *geometryExe = nullptr;
     ANGLE_TRY(
diff --git a/src/third_party/angle/src/libANGLE/renderer/d3d/d3d11/SwapChain11.cpp b/src/third_party/angle/src/libANGLE/renderer/d3d/d3d11/SwapChain11.cpp
index a8f4627..4d693f0 100644
--- a/src/third_party/angle/src/libANGLE/renderer/d3d/d3d11/SwapChain11.cpp
+++ b/src/third_party/angle/src/libANGLE/renderer/d3d/d3d11/SwapChain11.cpp
@@ -10,6 +10,9 @@
 
 #include <EGL/eglext.h>
 
+#if defined(STARBOARD)
+#include "angle_hdr.h"
+#endif  // STARBOARD
 #include "libANGLE/features.h"
 #include "libANGLE/renderer/d3d/d3d11/formatutils11.h"
 #include "libANGLE/renderer/d3d/d3d11/NativeWindow11.h"
@@ -23,6 +26,12 @@
 #include "libANGLE/renderer/d3d/d3d11/shaders/compiled/passthroughrgba2d11ps.h"
 #include "libANGLE/renderer/d3d/d3d11/shaders/compiled/passthroughrgba2dms11ps.h"
 
+#if defined(STARBOARD)
+#include <initguid.h>
+#include <dxgi1_4.h>
+#include <dxgi1_6.h>
+#endif  // STARBOARD
+
 #ifdef ANGLE_ENABLE_KEYEDMUTEX
 #define ANGLE_RESOURCE_SHARE_TYPE D3D11_RESOURCE_MISC_SHARED_KEYEDMUTEX
 #else
@@ -753,6 +762,37 @@
         return result;
     }
 
+#if defined(STARBOARD)
+    if (IsHdrAngleModeEnabled())
+    {
+        if (mCurrentColorSpace != DXGI_COLOR_SPACE_RGB_FULL_G2084_NONE_P2020)
+        {
+            IDXGISwapChain3 *swapChain3 = static_cast<IDXGISwapChain3 *>(mSwapChain);
+            result = swapChain3->SetColorSpace1(DXGI_COLOR_SPACE_RGB_FULL_G2084_NONE_P2020);
+            if (FAILED(result))
+            {
+                ERR() << "Color space DXGI_COLOR_SPACE_RGB_FULL_G2084_NONE_P2020 setup failed.";
+                return EGL_BAD_CONFIG;
+            }
+            mCurrentColorSpace = DXGI_COLOR_SPACE_RGB_FULL_G2084_NONE_P2020;
+        }
+    }
+    else
+    {
+        if (mCurrentColorSpace != DXGI_COLOR_SPACE_RGB_FULL_G22_NONE_P709)
+        {
+            IDXGISwapChain3 *swapChain3 = static_cast<IDXGISwapChain3 *>(mSwapChain);
+            result = swapChain3->SetColorSpace1(DXGI_COLOR_SPACE_RGB_FULL_G22_NONE_P709);
+            if (FAILED(result))
+            {
+                ERR() << "Color space DXGI_COLOR_SPACE_RGB_FULL_G22_NONE_P709 setup failed.";
+                return EGL_BAD_CONFIG;
+            }
+            mCurrentColorSpace = DXGI_COLOR_SPACE_RGB_FULL_G22_NONE_P709;
+        }
+    }
+#endif  // STARBOARD
+
     mRenderer->onSwap();
 
     return EGL_SUCCESS;
diff --git a/src/third_party/angle/src/libANGLE/renderer/d3d/d3d11/SwapChain11.h b/src/third_party/angle/src/libANGLE/renderer/d3d/d3d11/SwapChain11.h
index 85bd35d..71b9ad4 100644
--- a/src/third_party/angle/src/libANGLE/renderer/d3d/d3d11/SwapChain11.h
+++ b/src/third_party/angle/src/libANGLE/renderer/d3d/d3d11/SwapChain11.h
@@ -114,6 +114,9 @@
 
     EGLint mEGLSamples;
     LONGLONG mQPCFrequency;
+#if defined(STARBOARD)
+    DXGI_COLOR_SPACE_TYPE mCurrentColorSpace = DXGI_COLOR_SPACE_CUSTOM;
+#endif // STARBOARD
 };
 
 }  // namespace rx
diff --git a/src/third_party/angle/src/libANGLE/renderer/d3d/d3d11/TextureStorage11.cpp b/src/third_party/angle/src/libANGLE/renderer/d3d/d3d11/TextureStorage11.cpp
index 5edc65d..449e811 100644
--- a/src/third_party/angle/src/libANGLE/renderer/d3d/d3d11/TextureStorage11.cpp
+++ b/src/third_party/angle/src/libANGLE/renderer/d3d/d3d11/TextureStorage11.cpp
@@ -264,6 +264,15 @@
     return gl::NoError();
 }
 
+#if defined(STARBOARD)
+const angle::Format &TextureStorage11_2D::getFormat()
+{
+    D3D11_TEXTURE2D_DESC desc = {0};
+    mTexture->GetDesc(&desc);
+    return d3d11_angle::GetFormat(desc.Format);
+}
+#endif  // STARBOARD
+
 gl::Error TextureStorage11::getCachedOrCreateSRV(const SRVKey &key,
                                                  ID3D11ShaderResourceView **outSRV)
 {
diff --git a/src/third_party/angle/src/libANGLE/renderer/d3d/d3d11/TextureStorage11.h b/src/third_party/angle/src/libANGLE/renderer/d3d/d3d11/TextureStorage11.h
index 366c7ea3..9712194 100644
--- a/src/third_party/angle/src/libANGLE/renderer/d3d/d3d11/TextureStorage11.h
+++ b/src/third_party/angle/src/libANGLE/renderer/d3d/d3d11/TextureStorage11.h
@@ -173,6 +173,9 @@
     gl::Error releaseAssociatedImage(const gl::ImageIndex &index, Image11 *incomingImage) override;
 
     gl::Error useLevelZeroWorkaroundTexture(bool useLevelZeroTexture) override;
+#if defined(STARBOARD)
+    const angle::Format &getFormat();
+#endif // STARBOARD
 
   protected:
     gl::Error getSwizzleTexture(ID3D11Resource **outTexture) override;
diff --git a/src/third_party/angle/src/libGLESv2.gypi b/src/third_party/angle/src/libGLESv2.gypi
index aa05e4c..b691558 100644
--- a/src/third_party/angle/src/libGLESv2.gypi
+++ b/src/third_party/angle/src/libGLESv2.gypi
@@ -14,6 +14,7 @@
             '<(DEPTH)/third_party/angle/src/common/MemoryBuffer.cpp',
             '<(DEPTH)/third_party/angle/src/common/MemoryBuffer.h',
             '<(DEPTH)/third_party/angle/src/common/Optional.h',
+            '<(DEPTH)/third_party/angle/src/common/angle_hdr.cpp',
             '<(DEPTH)/third_party/angle/src/common/angleutils.cpp',
             '<(DEPTH)/third_party/angle/src/common/angleutils.h',
             '<(DEPTH)/third_party/angle/src/common/bitset_utils.h',
@@ -92,6 +93,7 @@
         'libangle_includes':
         [
             '<(DEPTH)/third_party/angle/include/angle_gl.h',
+            '<(DEPTH)/third_party/angle/include/angle_hdr.h',
             '<(DEPTH)/third_party/angle/include/export.h',
             '<(DEPTH)/third_party/angle/include/EGL/egl.h',
             '<(DEPTH)/third_party/angle/include/EGL/eglext.h',
diff --git a/src/third_party/dlmalloc/dlmalloc_config.h b/src/third_party/dlmalloc/dlmalloc_config.h
index 8b8543f..9335f50 100644
--- a/src/third_party/dlmalloc/dlmalloc_config.h
+++ b/src/third_party/dlmalloc/dlmalloc_config.h
@@ -19,6 +19,7 @@
 #if defined(STARBOARD)
 #include <sys/types.h>  // for ssize_t, maybe should add to starboard/types.h
 #include "starboard/configuration.h"
+#include "starboard/configuration_constants.h"
 #include "starboard/mutex.h"
 // Define STARBOARD_IMPLEMENTATION to allow inclusion of an internal Starboard
 // header. This is "okay" because dlmalloc is essentially an implementation
@@ -132,7 +133,7 @@
 #define DEFAULT_MMAP_THRESHOLD SB_DEFAULT_MMAP_THRESHOLD
 #endif
 
-#define MALLOC_ALIGNMENT SB_MALLOC_ALIGNMENT
+#define MALLOC_ALIGNMENT kSbMallocAlignment
 #define FORCEINLINE SB_C_FORCE_INLINE
 #define NOINLINE SB_C_NOINLINE
 #define LACKS_UNISTD_H 1
diff --git a/src/third_party/google_benchmark/.clang-format b/src/third_party/google_benchmark/.clang-format
new file mode 100644
index 0000000..e7d00fe
--- /dev/null
+++ b/src/third_party/google_benchmark/.clang-format
@@ -0,0 +1,5 @@
+---
+Language:        Cpp
+BasedOnStyle:  Google
+PointerAlignment: Left
+...
diff --git a/src/third_party/google_benchmark/.travis-libcxx-setup.sh b/src/third_party/google_benchmark/.travis-libcxx-setup.sh
new file mode 100644
index 0000000..a591743
--- /dev/null
+++ b/src/third_party/google_benchmark/.travis-libcxx-setup.sh
@@ -0,0 +1,28 @@
+#!/usr/bin/env bash
+
+# Install a newer CMake version
+curl -sSL https://cmake.org/files/v3.6/cmake-3.6.1-Linux-x86_64.sh -o install-cmake.sh
+chmod +x install-cmake.sh
+sudo ./install-cmake.sh --prefix=/usr/local --skip-license
+
+# Checkout LLVM sources
+git clone --depth=1 https://github.com/llvm-mirror/llvm.git llvm-source
+git clone --depth=1 https://github.com/llvm-mirror/libcxx.git llvm-source/projects/libcxx
+git clone --depth=1 https://github.com/llvm-mirror/libcxxabi.git llvm-source/projects/libcxxabi
+
+# Setup libc++ options
+if [ -z "$BUILD_32_BITS" ]; then
+  export BUILD_32_BITS=OFF && echo disabling 32 bit build
+fi
+
+# Build and install libc++ (Use unstable ABI for better sanitizer coverage)
+mkdir llvm-build && cd llvm-build
+cmake -DCMAKE_C_COMPILER=${C_COMPILER} -DCMAKE_CXX_COMPILER=${COMPILER} \
+      -DCMAKE_BUILD_TYPE=RelWithDebInfo -DCMAKE_INSTALL_PREFIX=/usr \
+      -DLIBCXX_ABI_UNSTABLE=ON \
+      -DLLVM_USE_SANITIZER=${LIBCXX_SANITIZER} \
+      -DLLVM_BUILD_32_BITS=${BUILD_32_BITS} \
+      ../llvm-source
+make cxx -j2
+sudo make install-cxxabi install-cxx
+cd ../
diff --git a/src/third_party/google_benchmark/.travis.yml b/src/third_party/google_benchmark/.travis.yml
new file mode 100644
index 0000000..6b6cfc7
--- /dev/null
+++ b/src/third_party/google_benchmark/.travis.yml
@@ -0,0 +1,235 @@
+sudo: required
+dist: trusty
+language: cpp
+
+env:
+  global:
+    - /usr/local/bin:$PATH
+
+matrix:
+  include:
+    - compiler: gcc
+      addons:
+        apt:
+          packages:
+            - lcov
+      env: COMPILER=g++ C_COMPILER=gcc BUILD_TYPE=Coverage
+    - compiler: gcc
+      env: COMPILER=g++ C_COMPILER=gcc BUILD_TYPE=Debug
+    - compiler: gcc
+      env: COMPILER=g++ C_COMPILER=gcc BUILD_TYPE=Release
+    - compiler: gcc
+      addons:
+        apt:
+          packages:
+            - g++-multilib
+            - libc6:i386
+      env:
+        - COMPILER=g++
+        - C_COMPILER=gcc
+        - BUILD_TYPE=Debug
+        - BUILD_32_BITS=ON
+        - EXTRA_FLAGS="-m32"
+    - compiler: gcc
+      addons:
+        apt:
+          packages:
+            - g++-multilib
+            - libc6:i386
+      env:
+        - COMPILER=g++
+        - C_COMPILER=gcc
+        - BUILD_TYPE=Release
+        - BUILD_32_BITS=ON
+        - EXTRA_FLAGS="-m32"
+    - compiler: gcc
+      env:
+        - INSTALL_GCC6_FROM_PPA=1
+        - COMPILER=g++-6 C_COMPILER=gcc-6  BUILD_TYPE=Debug
+        - ENABLE_SANITIZER=1
+        - EXTRA_FLAGS="-fno-omit-frame-pointer -g -O2 -fsanitize=undefined,address -fuse-ld=gold"
+    - compiler: clang
+      env: COMPILER=clang++ C_COMPILER=clang BUILD_TYPE=Debug
+    - compiler: clang
+      env: COMPILER=clang++ C_COMPILER=clang BUILD_TYPE=Release
+    # Clang w/ libc++
+    - compiler: clang
+      dist: xenial
+      addons:
+        apt:
+          packages:
+            clang-3.8
+      env:
+        - INSTALL_GCC6_FROM_PPA=1
+        - COMPILER=clang++-3.8 C_COMPILER=clang-3.8 BUILD_TYPE=Debug
+        - LIBCXX_BUILD=1
+        - EXTRA_CXX_FLAGS="-stdlib=libc++"
+    - compiler: clang
+      dist: xenial
+      addons:
+        apt:
+          packages:
+            clang-3.8
+      env:
+        - INSTALL_GCC6_FROM_PPA=1
+        - COMPILER=clang++-3.8 C_COMPILER=clang-3.8 BUILD_TYPE=Release
+        - LIBCXX_BUILD=1
+        - EXTRA_CXX_FLAGS="-stdlib=libc++"
+    # Clang w/ 32bit libc++
+    - compiler: clang
+      dist: xenial
+      addons:
+        apt:
+          packages:
+            - clang-3.8
+            - g++-multilib
+            - libc6:i386
+      env:
+        - INSTALL_GCC6_FROM_PPA=1
+        - COMPILER=clang++-3.8 C_COMPILER=clang-3.8 BUILD_TYPE=Debug
+        - LIBCXX_BUILD=1
+        - BUILD_32_BITS=ON
+        - EXTRA_FLAGS="-m32"
+        - EXTRA_CXX_FLAGS="-stdlib=libc++"
+    # Clang w/ 32bit libc++
+    - compiler: clang
+      dist: xenial
+      addons:
+        apt:
+          packages:
+            - clang-3.8
+            - g++-multilib
+            - libc6:i386
+      env:
+        - INSTALL_GCC6_FROM_PPA=1
+        - COMPILER=clang++-3.8 C_COMPILER=clang-3.8 BUILD_TYPE=Release
+        - LIBCXX_BUILD=1
+        - BUILD_32_BITS=ON
+        - EXTRA_FLAGS="-m32"
+        - EXTRA_CXX_FLAGS="-stdlib=libc++"
+    # Clang w/ libc++, ASAN, UBSAN
+    - compiler: clang
+      dist: xenial
+      addons:
+        apt:
+          packages:
+            clang-3.8
+      env:
+        - INSTALL_GCC6_FROM_PPA=1
+        - COMPILER=clang++-3.8 C_COMPILER=clang-3.8 BUILD_TYPE=Debug
+        - LIBCXX_BUILD=1 LIBCXX_SANITIZER="Undefined;Address"
+        - ENABLE_SANITIZER=1
+        - EXTRA_FLAGS="-g -O2 -fno-omit-frame-pointer -fsanitize=undefined,address -fno-sanitize-recover=all"
+        - EXTRA_CXX_FLAGS="-stdlib=libc++"
+        - UBSAN_OPTIONS=print_stacktrace=1
+    # Clang w/ libc++ and MSAN
+    - compiler: clang
+      dist: xenial
+      addons:
+        apt:
+          packages:
+            clang-3.8
+      env:
+        - INSTALL_GCC6_FROM_PPA=1
+        - COMPILER=clang++-3.8 C_COMPILER=clang-3.8 BUILD_TYPE=Debug
+        - LIBCXX_BUILD=1 LIBCXX_SANITIZER=MemoryWithOrigins
+        - ENABLE_SANITIZER=1
+        - EXTRA_FLAGS="-g -O2 -fno-omit-frame-pointer -fsanitize=memory -fsanitize-memory-track-origins"
+        - EXTRA_CXX_FLAGS="-stdlib=libc++"
+    # Clang w/ libc++ and MSAN
+    - compiler: clang
+      dist: xenial
+      addons:
+        apt:
+          packages:
+            clang-3.8
+      env:
+        - INSTALL_GCC6_FROM_PPA=1
+        - COMPILER=clang++-3.8 C_COMPILER=clang-3.8 BUILD_TYPE=RelWithDebInfo
+        - LIBCXX_BUILD=1 LIBCXX_SANITIZER=Thread
+        - ENABLE_SANITIZER=1
+        - EXTRA_FLAGS="-g -O2 -fno-omit-frame-pointer -fsanitize=thread -fno-sanitize-recover=all"
+        - EXTRA_CXX_FLAGS="-stdlib=libc++"
+    - os: osx
+      osx_image: xcode8.3
+      compiler: clang
+      env:
+        - COMPILER=clang++ BUILD_TYPE=Debug
+    - os: osx
+      osx_image: xcode8.3
+      compiler: clang
+      env:
+        - COMPILER=clang++ BUILD_TYPE=Release
+    - os: osx
+      osx_image: xcode8.3
+      compiler: clang
+      env:
+        - COMPILER=clang++
+        - BUILD_TYPE=Release
+        - BUILD_32_BITS=ON
+        - EXTRA_FLAGS="-m32"
+    - os: osx
+      osx_image: xcode8.3
+      compiler: gcc
+      env:
+        - COMPILER=g++-7 C_COMPILER=gcc-7  BUILD_TYPE=Debug
+
+before_script:
+  - if [ -n "${LIBCXX_BUILD}" ]; then
+      source .travis-libcxx-setup.sh;
+    fi
+  - if [ -n "${ENABLE_SANITIZER}" ]; then
+      export EXTRA_OPTIONS="-DBENCHMARK_ENABLE_ASSEMBLY_TESTS=OFF";
+    else
+      export EXTRA_OPTIONS="";
+    fi
+  - mkdir -p build && cd build
+
+before_install:
+  - if [ -z "$BUILD_32_BITS" ]; then
+      export BUILD_32_BITS=OFF && echo disabling 32 bit build;
+    fi
+  - if [ -n "${INSTALL_GCC6_FROM_PPA}" ]; then
+      sudo add-apt-repository -y "ppa:ubuntu-toolchain-r/test";
+      sudo apt-get update --option Acquire::Retries=100 --option Acquire::http::Timeout="60";
+    fi
+
+install:
+  - if [ -n "${INSTALL_GCC6_FROM_PPA}" ]; then
+      travis_wait sudo -E apt-get -yq --no-install-suggests --no-install-recommends install g++-6;
+    fi
+  - if [ "${TRAVIS_OS_NAME}" == "linux" -a "${BUILD_32_BITS}" == "OFF" ]; then
+      travis_wait sudo -E apt-get -y --no-install-suggests --no-install-recommends install llvm-3.9-tools;
+      sudo cp /usr/lib/llvm-3.9/bin/FileCheck /usr/local/bin/;
+    fi
+  - if [ "${BUILD_TYPE}" == "Coverage" -a "${TRAVIS_OS_NAME}" == "linux" ]; then
+      PATH=~/.local/bin:${PATH};
+      pip install --user --upgrade pip;
+      travis_wait pip install --user cpp-coveralls;
+    fi
+  - if [ "${C_COMPILER}" == "gcc-7" -a "${TRAVIS_OS_NAME}" == "osx" ]; then
+      rm -f /usr/local/include/c++;
+      brew update;
+      travis_wait brew install gcc@7;
+    fi
+  - if [ "${TRAVIS_OS_NAME}" == "linux" ]; then
+      sudo apt-get update -qq;
+      sudo apt-get install -qq unzip cmake3;
+      wget https://github.com/bazelbuild/bazel/releases/download/0.10.1/bazel-0.10.1-installer-linux-x86_64.sh --output-document bazel-installer.sh;
+      travis_wait sudo bash bazel-installer.sh;
+    fi
+  - if [ "${TRAVIS_OS_NAME}" == "osx" ]; then
+      curl -L -o bazel-installer.sh https://github.com/bazelbuild/bazel/releases/download/0.10.1/bazel-0.10.1-installer-darwin-x86_64.sh;
+      travis_wait sudo bash bazel-installer.sh;
+    fi
+
+script:
+  - cmake -DCMAKE_C_COMPILER=${C_COMPILER} -DCMAKE_CXX_COMPILER=${COMPILER} -DCMAKE_BUILD_TYPE=${BUILD_TYPE} -DCMAKE_C_FLAGS="${EXTRA_FLAGS}" -DCMAKE_CXX_FLAGS="${EXTRA_FLAGS} ${EXTRA_CXX_FLAGS}" -DBENCHMARK_DOWNLOAD_DEPENDENCIES=ON -DBENCHMARK_BUILD_32_BITS=${BUILD_32_BITS} ${EXTRA_OPTIONS} ..
+  - make
+  - ctest -C ${BUILD_TYPE} --output-on-failure
+  - bazel test -c dbg --define google_benchmark.have_regex=posix --announce_rc --verbose_failures --test_output=errors --keep_going //test/...
+
+after_success:
+  - if [ "${BUILD_TYPE}" == "Coverage" -a "${TRAVIS_OS_NAME}" == "linux" ]; then
+      coveralls --include src --include include --gcov-options '\-lp' --root .. --build-root .;
+    fi
diff --git a/src/third_party/google_benchmark/.ycm_extra_conf.py b/src/third_party/google_benchmark/.ycm_extra_conf.py
new file mode 100644
index 0000000..5649ddc
--- /dev/null
+++ b/src/third_party/google_benchmark/.ycm_extra_conf.py
@@ -0,0 +1,115 @@
+import os
+import ycm_core
+
+# These are the compilation flags that will be used in case there's no
+# compilation database set (by default, one is not set).
+# CHANGE THIS LIST OF FLAGS. YES, THIS IS THE DROID YOU HAVE BEEN LOOKING FOR.
+flags = [
+'-Wall',
+'-Werror',
+'-pedantic-errors',
+'-std=c++0x',
+'-fno-strict-aliasing',
+'-O3',
+'-DNDEBUG',
+# ...and the same thing goes for the magic -x option which specifies the
+# language that the files to be compiled are written in. This is mostly
+# relevant for c++ headers.
+# For a C project, you would set this to 'c' instead of 'c++'.
+'-x', 'c++',
+'-I', 'include',
+'-isystem', '/usr/include',
+'-isystem', '/usr/local/include',
+]
+
+
+# Set this to the absolute path to the folder (NOT the file!) containing the
+# compile_commands.json file to use that instead of 'flags'. See here for
+# more details: http://clang.llvm.org/docs/JSONCompilationDatabase.html
+#
+# Most projects will NOT need to set this to anything; you can just change the
+# 'flags' list of compilation flags. Notice that YCM itself uses that approach.
+compilation_database_folder = ''
+
+if os.path.exists( compilation_database_folder ):
+  database = ycm_core.CompilationDatabase( compilation_database_folder )
+else:
+  database = None
+
+SOURCE_EXTENSIONS = [ '.cc' ]
+
+def DirectoryOfThisScript():
+  return os.path.dirname( os.path.abspath( __file__ ) )
+
+
+def MakeRelativePathsInFlagsAbsolute( flags, working_directory ):
+  if not working_directory:
+    return list( flags )
+  new_flags = []
+  make_next_absolute = False
+  path_flags = [ '-isystem', '-I', '-iquote', '--sysroot=' ]
+  for flag in flags:
+    new_flag = flag
+
+    if make_next_absolute:
+      make_next_absolute = False
+      if not flag.startswith( '/' ):
+        new_flag = os.path.join( working_directory, flag )
+
+    for path_flag in path_flags:
+      if flag == path_flag:
+        make_next_absolute = True
+        break
+
+      if flag.startswith( path_flag ):
+        path = flag[ len( path_flag ): ]
+        new_flag = path_flag + os.path.join( working_directory, path )
+        break
+
+    if new_flag:
+      new_flags.append( new_flag )
+  return new_flags
+
+
+def IsHeaderFile( filename ):
+  extension = os.path.splitext( filename )[ 1 ]
+  return extension in [ '.h', '.hxx', '.hpp', '.hh' ]
+
+
+def GetCompilationInfoForFile( filename ):
+  # The compilation_commands.json file generated by CMake does not have entries
+  # for header files. So we do our best by asking the db for flags for a
+  # corresponding source file, if any. If one exists, the flags for that file
+  # should be good enough.
+  if IsHeaderFile( filename ):
+    basename = os.path.splitext( filename )[ 0 ]
+    for extension in SOURCE_EXTENSIONS:
+      replacement_file = basename + extension
+      if os.path.exists( replacement_file ):
+        compilation_info = database.GetCompilationInfoForFile(
+          replacement_file )
+        if compilation_info.compiler_flags_:
+          return compilation_info
+    return None
+  return database.GetCompilationInfoForFile( filename )
+
+
+def FlagsForFile( filename, **kwargs ):
+  if database:
+    # Bear in mind that compilation_info.compiler_flags_ does NOT return a
+    # python list, but a "list-like" StringVec object
+    compilation_info = GetCompilationInfoForFile( filename )
+    if not compilation_info:
+      return None
+
+    final_flags = MakeRelativePathsInFlagsAbsolute(
+      compilation_info.compiler_flags_,
+      compilation_info.compiler_working_dir_ )
+  else:
+    relative_to = DirectoryOfThisScript()
+    final_flags = MakeRelativePathsInFlagsAbsolute( flags, relative_to )
+
+  return {
+    'flags': final_flags,
+    'do_cache': True
+  }
diff --git a/src/third_party/google_benchmark/AUTHORS b/src/third_party/google_benchmark/AUTHORS
new file mode 100644
index 0000000..35c4c8c
--- /dev/null
+++ b/src/third_party/google_benchmark/AUTHORS
@@ -0,0 +1,54 @@
+# This is the official list of benchmark authors for copyright purposes.
+# This file is distinct from the CONTRIBUTORS files.
+# See the latter for an explanation.
+#
+# Names should be added to this file as:
+#	Name or Organization <email address>
+# The email address is not required for organizations.
+#
+# Please keep the list sorted.
+
+Albert Pretorius <pretoalb@gmail.com>
+Alex Steele <steeleal123@gmail.com>
+Andriy Berestovskyy <berestovskyy@gmail.com>
+Arne Beer <arne@twobeer.de>
+Carto
+Christopher Seymour <chris.j.seymour@hotmail.com>
+Colin Braley <braley.colin@gmail.com>
+Daniel Harvey <danielharvey458@gmail.com>
+David Coeurjolly <david.coeurjolly@liris.cnrs.fr>
+Deniz Evrenci <denizevrenci@gmail.com>
+Dirac Research 
+Dominik Czarnota <dominik.b.czarnota@gmail.com>
+Eric Backus <eric_backus@alum.mit.edu>
+Eric Fiselier <eric@efcs.ca>
+Eugene Zhuk <eugene.zhuk@gmail.com>
+Evgeny Safronov <division494@gmail.com>
+Federico Ficarelli <federico.ficarelli@gmail.com>
+Felix Homann <linuxaudio@showlabor.de>
+Google Inc.
+International Business Machines Corporation
+Ismael Jimenez Martinez <ismael.jimenez.martinez@gmail.com>
+Jern-Kuan Leong <jernkuan@gmail.com>
+JianXiong Zhou <zhoujianxiong2@gmail.com>
+Joao Paulo Magalhaes <joaoppmagalhaes@gmail.com>
+Jussi Knuuttila <jussi.knuuttila@gmail.com>
+Kaito Udagawa <umireon@gmail.com>
+Kishan Kumar <kumar.kishan@outlook.com>
+Lei Xu <eddyxu@gmail.com>
+Matt Clarkson <mattyclarkson@gmail.com>
+Maxim Vafin <maxvafin@gmail.com>
+MongoDB Inc.
+Nick Hutchinson <nshutchinson@gmail.com>
+Oleksandr Sochka <sasha.sochka@gmail.com>
+Ori Livneh <ori.livneh@gmail.com>
+Paul Redmond <paul.redmond@gmail.com>
+Radoslav Yovchev <radoslav.tm@gmail.com>
+Roman Lebedev <lebedev.ri@gmail.com>
+Sayan Bhattacharjee <aero.sayan@gmail.com>
+Shuo Chen <chenshuo@chenshuo.com>
+Steinar H. Gunderson <sgunderson@bigfoot.com>
+Stripe, Inc.
+Yixuan Qiu <yixuanq@gmail.com>
+Yusuke Suzuki <utatane.tea@gmail.com>
+Zbigniew Skowron <zbychs@gmail.com>
diff --git a/src/third_party/google_benchmark/BUILD.bazel b/src/third_party/google_benchmark/BUILD.bazel
new file mode 100644
index 0000000..d97a019
--- /dev/null
+++ b/src/third_party/google_benchmark/BUILD.bazel
@@ -0,0 +1,44 @@
+licenses(["notice"])
+
+config_setting(
+    name = "windows",
+    values = {
+        "cpu": "x64_windows",
+    },
+    visibility = [":__subpackages__"],
+)
+
+load("@rules_cc//cc:defs.bzl", "cc_library")
+
+cc_library(
+    name = "benchmark",
+    srcs = glob(
+        [
+            "src/*.cc",
+            "src/*.h",
+        ],
+        exclude = ["src/benchmark_main.cc"],
+    ),
+    hdrs = ["include/benchmark/benchmark.h"],
+    linkopts = select({
+        ":windows": ["-DEFAULTLIB:shlwapi.lib"],
+        "//conditions:default": ["-pthread"],
+    }),
+    strip_include_prefix = "include",
+    visibility = ["//visibility:public"],
+)
+
+cc_library(
+    name = "benchmark_main",
+    srcs = ["src/benchmark_main.cc"],
+    hdrs = ["include/benchmark/benchmark.h"],
+    strip_include_prefix = "include",
+    visibility = ["//visibility:public"],
+    deps = [":benchmark"],
+)
+
+cc_library(
+    name = "benchmark_internal_headers",
+    hdrs = glob(["src/*.h"]),
+    visibility = ["//test:__pkg__"],
+)
diff --git a/src/third_party/google_benchmark/CMakeLists.txt b/src/third_party/google_benchmark/CMakeLists.txt
new file mode 100644
index 0000000..8cfe125
--- /dev/null
+++ b/src/third_party/google_benchmark/CMakeLists.txt
@@ -0,0 +1,277 @@
+cmake_minimum_required (VERSION 3.5.1)
+
+foreach(p
+    CMP0048 # OK to clear PROJECT_VERSION on project()
+    CMP0054 # CMake 3.1
+    CMP0056 # export EXE_LINKER_FLAGS to try_run
+    CMP0057 # Support no if() IN_LIST operator
+    CMP0063 # Honor visibility properties for all targets
+    )
+  if(POLICY ${p})
+    cmake_policy(SET ${p} NEW)
+  endif()
+endforeach()
+
+project (benchmark CXX)
+
+option(BENCHMARK_ENABLE_TESTING "Enable testing of the benchmark library." ON)
+option(BENCHMARK_ENABLE_EXCEPTIONS "Enable the use of exceptions in the benchmark library." ON)
+option(BENCHMARK_ENABLE_LTO "Enable link time optimisation of the benchmark library." OFF)
+option(BENCHMARK_USE_LIBCXX "Build and test using libc++ as the standard library." OFF)
+if(NOT MSVC)
+  option(BENCHMARK_BUILD_32_BITS "Build a 32 bit version of the library." OFF)
+else()
+  set(BENCHMARK_BUILD_32_BITS OFF CACHE BOOL "Build a 32 bit version of the library - unsupported when using MSVC)" FORCE)
+endif()
+option(BENCHMARK_ENABLE_INSTALL "Enable installation of benchmark. (Projects embedding benchmark may want to turn this OFF.)" ON)
+
+# Allow unmet dependencies to be met using CMake's ExternalProject mechanics, which
+# may require downloading the source code.
+option(BENCHMARK_DOWNLOAD_DEPENDENCIES "Allow the downloading and in-tree building of unmet dependencies" OFF)
+
+# This option can be used to disable building and running unit tests which depend on gtest
+# in cases where it is not possible to build or find a valid version of gtest.
+option(BENCHMARK_ENABLE_GTEST_TESTS "Enable building the unit tests which depend on gtest" ON)
+
+set(CMAKE_WINDOWS_EXPORT_ALL_SYMBOLS ON)
+set(ENABLE_ASSEMBLY_TESTS_DEFAULT OFF)
+function(should_enable_assembly_tests)
+  if(CMAKE_BUILD_TYPE)
+    string(TOLOWER ${CMAKE_BUILD_TYPE} CMAKE_BUILD_TYPE_LOWER)
+    if (${CMAKE_BUILD_TYPE_LOWER} MATCHES "coverage")
+      # FIXME: The --coverage flag needs to be removed when building assembly
+      # tests for this to work.
+      return()
+    endif()
+  endif()
+  if (MSVC)
+    return()
+  elseif(NOT CMAKE_SYSTEM_PROCESSOR MATCHES "x86_64")
+    return()
+  elseif(NOT CMAKE_SIZEOF_VOID_P EQUAL 8)
+    # FIXME: Make these work on 32 bit builds
+    return()
+  elseif(BENCHMARK_BUILD_32_BITS)
+     # FIXME: Make these work on 32 bit builds
+    return()
+  endif()
+  find_program(LLVM_FILECHECK_EXE FileCheck)
+  if (LLVM_FILECHECK_EXE)
+    set(LLVM_FILECHECK_EXE "${LLVM_FILECHECK_EXE}" CACHE PATH "llvm filecheck" FORCE)
+    message(STATUS "LLVM FileCheck Found: ${LLVM_FILECHECK_EXE}")
+  else()
+    message(STATUS "Failed to find LLVM FileCheck")
+    return()
+  endif()
+  set(ENABLE_ASSEMBLY_TESTS_DEFAULT ON PARENT_SCOPE)
+endfunction()
+should_enable_assembly_tests()
+
+# This option disables the building and running of the assembly verification tests
+option(BENCHMARK_ENABLE_ASSEMBLY_TESTS "Enable building and running the assembly tests"
+    ${ENABLE_ASSEMBLY_TESTS_DEFAULT})
+
+# Make sure we can import out CMake functions
+list(APPEND CMAKE_MODULE_PATH "${CMAKE_CURRENT_SOURCE_DIR}/cmake/Modules")
+list(APPEND CMAKE_MODULE_PATH "${CMAKE_CURRENT_SOURCE_DIR}/cmake")
+
+
+# Read the git tags to determine the project version
+include(GetGitVersion)
+get_git_version(GIT_VERSION)
+
+# Tell the user what versions we are using
+string(REGEX MATCH "[0-9]+\\.[0-9]+\\.[0-9]+" VERSION ${GIT_VERSION})
+message(STATUS "Version: ${VERSION}")
+
+# The version of the libraries
+set(GENERIC_LIB_VERSION ${VERSION})
+string(SUBSTRING ${VERSION} 0 1 GENERIC_LIB_SOVERSION)
+
+# Import our CMake modules
+include(CheckCXXCompilerFlag)
+include(AddCXXCompilerFlag)
+include(CXXFeatureCheck)
+
+if (BENCHMARK_BUILD_32_BITS)
+  add_required_cxx_compiler_flag(-m32)
+endif()
+
+if (MSVC)
+  # Turn compiler warnings up to 11
+  string(REGEX REPLACE "[-/]W[1-4]" "" CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}")
+  set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /W4")
+  add_definitions(-D_CRT_SECURE_NO_WARNINGS)
+
+  if (NOT BENCHMARK_ENABLE_EXCEPTIONS)
+    add_cxx_compiler_flag(-EHs-)
+    add_cxx_compiler_flag(-EHa-)
+    add_definitions(-D_HAS_EXCEPTIONS=0)
+  endif()
+  # Link time optimisation
+  if (BENCHMARK_ENABLE_LTO)
+    set(CMAKE_CXX_FLAGS_RELEASE "${CMAKE_CXX_FLAGS_RELEASE} /GL")
+    set(CMAKE_STATIC_LINKER_FLAGS_RELEASE "${CMAKE_STATIC_LINKER_FLAGS_RELEASE} /LTCG")
+    set(CMAKE_SHARED_LINKER_FLAGS_RELEASE "${CMAKE_SHARED_LINKER_FLAGS_RELEASE} /LTCG")
+    set(CMAKE_EXE_LINKER_FLAGS_RELEASE "${CMAKE_EXE_LINKER_FLAGS_RELEASE} /LTCG")
+
+    set(CMAKE_CXX_FLAGS_RELWITHDEBINFO "${CMAKE_CXX_FLAGS_RELWITHDEBINFO} /GL")
+    string(REGEX REPLACE "[-/]INCREMENTAL" "/INCREMENTAL:NO" CMAKE_STATIC_LINKER_FLAGS_RELWITHDEBINFO "${CMAKE_STATIC_LINKER_FLAGS_RELWITHDEBINFO}")
+    set(CMAKE_STATIC_LINKER_FLAGS_RELWITHDEBINFO "${CMAKE_STATIC_LINKER_FLAGS_RELWITHDEBINFO} /LTCG")
+    string(REGEX REPLACE "[-/]INCREMENTAL" "/INCREMENTAL:NO" CMAKE_SHARED_LINKER_FLAGS_RELWITHDEBINFO "${CMAKE_SHARED_LINKER_FLAGS_RELWITHDEBINFO}")
+    set(CMAKE_SHARED_LINKER_FLAGS_RELWITHDEBINFO "${CMAKE_SHARED_LINKER_FLAGS_RELWITHDEBINFO} /LTCG")
+    string(REGEX REPLACE "[-/]INCREMENTAL" "/INCREMENTAL:NO" CMAKE_EXE_LINKER_FLAGS_RELWITHDEBINFO "${CMAKE_EXE_LINKER_FLAGS_RELWITHDEBINFO}")
+    set(CMAKE_EXE_LINKER_FLAGS_RELWITHDEBINFO "${CMAKE_EXE_LINKER_FLAGS_RELWITHDEBINFO} /LTCG")
+
+    set(CMAKE_CXX_FLAGS_MINSIZEREL "${CMAKE_CXX_FLAGS_MINSIZEREL} /GL")
+    set(CMAKE_STATIC_LINKER_FLAGS_MINSIZEREL "${CMAKE_STATIC_LINKER_FLAGS_MINSIZEREL} /LTCG")
+    set(CMAKE_SHARED_LINKER_FLAGS_MINSIZEREL "${CMAKE_SHARED_LINKER_FLAGS_MINSIZEREL} /LTCG")
+    set(CMAKE_EXE_LINKER_FLAGS_MINSIZEREL "${CMAKE_EXE_LINKER_FLAGS_MINSIZEREL} /LTCG")
+  endif()
+else()
+  # Try and enable C++11. Don't use C++14 because it doesn't work in some
+  # configurations.
+  add_cxx_compiler_flag(-std=c++11)
+  if (NOT HAVE_CXX_FLAG_STD_CXX11)
+    add_cxx_compiler_flag(-std=c++0x)
+  endif()
+
+  # Turn compiler warnings up to 11
+  add_cxx_compiler_flag(-Wall)
+  add_cxx_compiler_flag(-Wextra)
+  add_cxx_compiler_flag(-Wshadow)
+  add_cxx_compiler_flag(-Werror RELEASE)
+  add_cxx_compiler_flag(-Werror RELWITHDEBINFO)
+  add_cxx_compiler_flag(-Werror MINSIZEREL)
+  add_cxx_compiler_flag(-pedantic)
+  add_cxx_compiler_flag(-pedantic-errors)
+  add_cxx_compiler_flag(-Wshorten-64-to-32)
+  add_cxx_compiler_flag(-fstrict-aliasing)
+  # Disable warnings regarding deprecated parts of the library while building
+  # and testing those parts of the library.
+  add_cxx_compiler_flag(-Wno-deprecated-declarations)
+  if (CMAKE_CXX_COMPILER_ID STREQUAL "Intel")
+    # Intel silently ignores '-Wno-deprecated-declarations',
+    # warning no. 1786 must be explicitly disabled.
+    # See #631 for rationale.
+    add_cxx_compiler_flag(-wd1786)
+  endif()
+  # Disable deprecation warnings for release builds (when -Werror is enabled).
+  add_cxx_compiler_flag(-Wno-deprecated RELEASE)
+  add_cxx_compiler_flag(-Wno-deprecated RELWITHDEBINFO)
+  add_cxx_compiler_flag(-Wno-deprecated MINSIZEREL)
+  if (NOT BENCHMARK_ENABLE_EXCEPTIONS)
+    add_cxx_compiler_flag(-fno-exceptions)
+  endif()
+
+  if (HAVE_CXX_FLAG_FSTRICT_ALIASING)
+    if (NOT CMAKE_CXX_COMPILER_ID STREQUAL "Intel") #ICC17u2: Many false positives for Wstrict-aliasing
+      add_cxx_compiler_flag(-Wstrict-aliasing)
+    endif()
+  endif()
+  # ICC17u2: overloaded virtual function "benchmark::Fixture::SetUp" is only partially overridden
+  # (because of deprecated overload)
+  add_cxx_compiler_flag(-wd654)
+  add_cxx_compiler_flag(-Wthread-safety)
+  if (HAVE_CXX_FLAG_WTHREAD_SAFETY)
+    cxx_feature_check(THREAD_SAFETY_ATTRIBUTES)
+  endif()
+
+  # On most UNIX like platforms g++ and clang++ define _GNU_SOURCE as a
+  # predefined macro, which turns on all of the wonderful libc extensions.
+  # However g++ doesn't do this in Cygwin so we have to define it ourselfs
+  # since we depend on GNU/POSIX/BSD extensions.
+  if (CYGWIN)
+    add_definitions(-D_GNU_SOURCE=1)
+  endif()
+
+  if (QNXNTO)
+    add_definitions(-D_QNX_SOURCE)
+  endif()
+
+  # Link time optimisation
+  if (BENCHMARK_ENABLE_LTO)
+    add_cxx_compiler_flag(-flto)
+    if ("${CMAKE_CXX_COMPILER_ID}" STREQUAL "GNU")
+      find_program(GCC_AR gcc-ar)
+      if (GCC_AR)
+        set(CMAKE_AR ${GCC_AR})
+      endif()
+      find_program(GCC_RANLIB gcc-ranlib)
+      if (GCC_RANLIB)
+        set(CMAKE_RANLIB ${GCC_RANLIB})
+      endif()
+    elseif("${CMAKE_CXX_COMPILER_ID}" MATCHES "Clang")
+      include(llvm-toolchain)
+    endif()
+  endif()
+
+  # Coverage build type
+  set(BENCHMARK_CXX_FLAGS_COVERAGE "${CMAKE_CXX_FLAGS_DEBUG}"
+    CACHE STRING "Flags used by the C++ compiler during coverage builds."
+    FORCE)
+  set(BENCHMARK_EXE_LINKER_FLAGS_COVERAGE "${CMAKE_EXE_LINKER_FLAGS_DEBUG}"
+    CACHE STRING "Flags used for linking binaries during coverage builds."
+    FORCE)
+  set(BENCHMARK_SHARED_LINKER_FLAGS_COVERAGE "${CMAKE_SHARED_LINKER_FLAGS_DEBUG}"
+    CACHE STRING "Flags used by the shared libraries linker during coverage builds."
+    FORCE)
+  mark_as_advanced(
+    BENCHMARK_CXX_FLAGS_COVERAGE
+    BENCHMARK_EXE_LINKER_FLAGS_COVERAGE
+    BENCHMARK_SHARED_LINKER_FLAGS_COVERAGE)
+  set(CMAKE_BUILD_TYPE "${CMAKE_BUILD_TYPE}" CACHE STRING
+    "Choose the type of build, options are: None Debug Release RelWithDebInfo MinSizeRel Coverage.")
+  add_cxx_compiler_flag(--coverage COVERAGE)
+endif()
+
+if (BENCHMARK_USE_LIBCXX)
+  if ("${CMAKE_CXX_COMPILER_ID}" MATCHES "Clang")
+    add_cxx_compiler_flag(-stdlib=libc++)
+  elseif ("${CMAKE_CXX_COMPILER_ID}" STREQUAL "GNU" OR
+          "${CMAKE_CXX_COMPILER_ID}" STREQUAL "Intel")
+    add_cxx_compiler_flag(-nostdinc++)
+    message(WARNING "libc++ header path must be manually specified using CMAKE_CXX_FLAGS")
+    # Adding -nodefaultlibs directly to CMAKE_<TYPE>_LINKER_FLAGS will break
+    # configuration checks such as 'find_package(Threads)'
+    list(APPEND BENCHMARK_CXX_LINKER_FLAGS -nodefaultlibs)
+    # -lc++ cannot be added directly to CMAKE_<TYPE>_LINKER_FLAGS because
+    # linker flags appear before all linker inputs and -lc++ must appear after.
+    list(APPEND BENCHMARK_CXX_LIBRARIES c++)
+  else()
+    message(FATAL_ERROR "-DBENCHMARK_USE_LIBCXX:BOOL=ON is not supported for compiler")
+  endif()
+endif(BENCHMARK_USE_LIBCXX)
+
+# C++ feature checks
+# Determine the correct regular expression engine to use
+cxx_feature_check(STD_REGEX)
+cxx_feature_check(GNU_POSIX_REGEX)
+cxx_feature_check(POSIX_REGEX)
+if(NOT HAVE_STD_REGEX AND NOT HAVE_GNU_POSIX_REGEX AND NOT HAVE_POSIX_REGEX)
+  message(FATAL_ERROR "Failed to determine the source files for the regular expression backend")
+endif()
+if (NOT BENCHMARK_ENABLE_EXCEPTIONS AND HAVE_STD_REGEX
+        AND NOT HAVE_GNU_POSIX_REGEX AND NOT HAVE_POSIX_REGEX)
+  message(WARNING "Using std::regex with exceptions disabled is not fully supported")
+endif()
+cxx_feature_check(STEADY_CLOCK)
+# Ensure we have pthreads
+set(THREADS_PREFER_PTHREAD_FLAG ON)
+find_package(Threads REQUIRED)
+
+# Set up directories
+include_directories(${PROJECT_SOURCE_DIR}/include)
+
+# Build the targets
+add_subdirectory(src)
+
+if (BENCHMARK_ENABLE_TESTING)
+  enable_testing()
+  if (BENCHMARK_ENABLE_GTEST_TESTS AND
+      NOT (TARGET gtest AND TARGET gtest_main AND
+           TARGET gmock AND TARGET gmock_main))
+    include(GoogleTest)
+  endif()
+  add_subdirectory(test)
+endif()
diff --git a/src/third_party/google_benchmark/CONTRIBUTING.md b/src/third_party/google_benchmark/CONTRIBUTING.md
new file mode 100644
index 0000000..43de4c9
--- /dev/null
+++ b/src/third_party/google_benchmark/CONTRIBUTING.md
@@ -0,0 +1,58 @@
+# How to contribute #
+
+We'd love to accept your patches and contributions to this project.  There are
+a just a few small guidelines you need to follow.
+
+
+## Contributor License Agreement ##
+
+Contributions to any Google project must be accompanied by a Contributor
+License Agreement.  This is not a copyright **assignment**, it simply gives
+Google permission to use and redistribute your contributions as part of the
+project.
+
+  * If you are an individual writing original source code and you're sure you
+    own the intellectual property, then you'll need to sign an [individual
+    CLA][].
+
+  * If you work for a company that wants to allow you to contribute your work,
+    then you'll need to sign a [corporate CLA][].
+
+You generally only need to submit a CLA once, so if you've already submitted
+one (even if it was for a different project), you probably don't need to do it
+again.
+
+[individual CLA]: https://developers.google.com/open-source/cla/individual
+[corporate CLA]: https://developers.google.com/open-source/cla/corporate
+
+Once your CLA is submitted (or if you already submitted one for
+another Google project), make a commit adding yourself to the
+[AUTHORS][] and [CONTRIBUTORS][] files. This commit can be part
+of your first [pull request][].
+
+[AUTHORS]: AUTHORS
+[CONTRIBUTORS]: CONTRIBUTORS
+
+
+## Submitting a patch ##
+
+  1. It's generally best to start by opening a new issue describing the bug or
+     feature you're intending to fix.  Even if you think it's relatively minor,
+     it's helpful to know what people are working on.  Mention in the initial
+     issue that you are planning to work on that bug or feature so that it can
+     be assigned to you.
+
+  1. Follow the normal process of [forking][] the project, and setup a new
+     branch to work in.  It's important that each group of changes be done in
+     separate branches in order to ensure that a pull request only includes the
+     commits related to that bug or feature.
+
+  1. Do your best to have [well-formed commit messages][] for each change.
+     This provides consistency throughout the project, and ensures that commit
+     messages are able to be formatted properly by various git tools.
+
+  1. Finally, push the commits to your fork and submit a [pull request][].
+
+[forking]: https://help.github.com/articles/fork-a-repo
+[well-formed commit messages]: http://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html
+[pull request]: https://help.github.com/articles/creating-a-pull-request
diff --git a/src/third_party/google_benchmark/CONTRIBUTORS b/src/third_party/google_benchmark/CONTRIBUTORS
new file mode 100644
index 0000000..6b64a00
--- /dev/null
+++ b/src/third_party/google_benchmark/CONTRIBUTORS
@@ -0,0 +1,76 @@
+# People who have agreed to one of the CLAs and can contribute patches.
+# The AUTHORS file lists the copyright holders; this file
+# lists people.  For example, Google employees are listed here
+# but not in AUTHORS, because Google holds the copyright.
+#
+# Names should be added to this file only after verifying that
+# the individual or the individual's organization has agreed to
+# the appropriate Contributor License Agreement, found here:
+#
+# https://developers.google.com/open-source/cla/individual
+# https://developers.google.com/open-source/cla/corporate
+#
+# The agreement for individuals can be filled out on the web.
+#
+# When adding J Random Contributor's name to this file,
+# either J's name or J's organization's name should be
+# added to the AUTHORS file, depending on whether the
+# individual or corporate CLA was used.
+#
+# Names should be added to this file as:
+#     Name <email address>
+#
+# Please keep the list sorted.
+
+Albert Pretorius <pretoalb@gmail.com>
+Alex Steele <steelal123@gmail.com>
+Andriy Berestovskyy <berestovskyy@gmail.com>
+Arne Beer <arne@twobeer.de>
+Billy Robert O'Neal III <billy.oneal@gmail.com> <bion@microsoft.com>
+Chris Kennelly <ckennelly@google.com> <ckennelly@ckennelly.com>
+Christopher Seymour <chris.j.seymour@hotmail.com>
+Colin Braley <braley.colin@gmail.com>
+Cyrille Faucheux <cyrille.faucheux@gmail.com>
+Daniel Harvey <danielharvey458@gmail.com>
+David Coeurjolly <david.coeurjolly@liris.cnrs.fr>
+Deniz Evrenci <denizevrenci@gmail.com>
+Dominic Hamon <dma@stripysock.com> <dominic@google.com>
+Dominik Czarnota <dominik.b.czarnota@gmail.com>
+Eric Backus <eric_backus@alum.mit.edu>
+Eric Fiselier <eric@efcs.ca>
+Eugene Zhuk <eugene.zhuk@gmail.com>
+Evgeny Safronov <division494@gmail.com>
+Federico Ficarelli <federico.ficarelli@gmail.com>
+Felix Homann <linuxaudio@showlabor.de>
+Geoffrey Martin-Noble <gcmn@google.com> <gmngeoffrey@gmail.com>
+Hannes Hauswedell <h2@fsfe.org>
+Ismael Jimenez Martinez <ismael.jimenez.martinez@gmail.com>
+Jern-Kuan Leong <jernkuan@gmail.com>
+JianXiong Zhou <zhoujianxiong2@gmail.com>
+Joao Paulo Magalhaes <joaoppmagalhaes@gmail.com>
+John Millikin <jmillikin@stripe.com>
+Jussi Knuuttila <jussi.knuuttila@gmail.com>
+Kai Wolf <kai.wolf@gmail.com>
+Kaito Udagawa <umireon@gmail.com>
+Kishan Kumar <kumar.kishan@outlook.com>
+Lei Xu <eddyxu@gmail.com>
+Matt Clarkson <mattyclarkson@gmail.com>
+Maxim Vafin <maxvafin@gmail.com>
+Nick Hutchinson <nshutchinson@gmail.com>
+Oleksandr Sochka <sasha.sochka@gmail.com>
+Ori Livneh <ori.livneh@gmail.com>
+Pascal Leroy <phl@google.com>
+Paul Redmond <paul.redmond@gmail.com>
+Pierre Phaneuf <pphaneuf@google.com>
+Radoslav Yovchev <radoslav.tm@gmail.com>
+Raul Marin <rmrodriguez@cartodb.com>
+Ray Glover <ray.glover@uk.ibm.com>
+Robert Guo <robert.guo@mongodb.com>
+Roman Lebedev <lebedev.ri@gmail.com>
+Sayan Bhattacharjee <aero.sayan@gmail.com>
+Shuo Chen <chenshuo@chenshuo.com>
+Tobias Ulvgård <tobias.ulvgard@dirac.se>
+Tom Madams <tom.ej.madams@gmail.com> <tmadams@google.com>
+Yixuan Qiu <yixuanq@gmail.com>
+Yusuke Suzuki <utatane.tea@gmail.com>
+Zbigniew Skowron <zbychs@gmail.com>
diff --git a/src/third_party/google_benchmark/LICENSE b/src/third_party/google_benchmark/LICENSE
new file mode 100644
index 0000000..d645695
--- /dev/null
+++ b/src/third_party/google_benchmark/LICENSE
@@ -0,0 +1,202 @@
+
+                                 Apache License
+                           Version 2.0, January 2004
+                        http://www.apache.org/licenses/
+
+   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+   1. Definitions.
+
+      "License" shall mean the terms and conditions for use, reproduction,
+      and distribution as defined by Sections 1 through 9 of this document.
+
+      "Licensor" shall mean the copyright owner or entity authorized by
+      the copyright owner that is granting the License.
+
+      "Legal Entity" shall mean the union of the acting entity and all
+      other entities that control, are controlled by, or are under common
+      control with that entity. For the purposes of this definition,
+      "control" means (i) the power, direct or indirect, to cause the
+      direction or management of such entity, whether by contract or
+      otherwise, or (ii) ownership of fifty percent (50%) or more of the
+      outstanding shares, or (iii) beneficial ownership of such entity.
+
+      "You" (or "Your") shall mean an individual or Legal Entity
+      exercising permissions granted by this License.
+
+      "Source" form shall mean the preferred form for making modifications,
+      including but not limited to software source code, documentation
+      source, and configuration files.
+
+      "Object" form shall mean any form resulting from mechanical
+      transformation or translation of a Source form, including but
+      not limited to compiled object code, generated documentation,
+      and conversions to other media types.
+
+      "Work" shall mean the work of authorship, whether in Source or
+      Object form, made available under the License, as indicated by a
+      copyright notice that is included in or attached to the work
+      (an example is provided in the Appendix below).
+
+      "Derivative Works" shall mean any work, whether in Source or Object
+      form, that is based on (or derived from) the Work and for which the
+      editorial revisions, annotations, elaborations, or other modifications
+      represent, as a whole, an original work of authorship. For the purposes
+      of this License, Derivative Works shall not include works that remain
+      separable from, or merely link (or bind by name) to the interfaces of,
+      the Work and Derivative Works thereof.
+
+      "Contribution" shall mean any work of authorship, including
+      the original version of the Work and any modifications or additions
+      to that Work or Derivative Works thereof, that is intentionally
+      submitted to Licensor for inclusion in the Work by the copyright owner
+      or by an individual or Legal Entity authorized to submit on behalf of
+      the copyright owner. For the purposes of this definition, "submitted"
+      means any form of electronic, verbal, or written communication sent
+      to the Licensor or its representatives, including but not limited to
+      communication on electronic mailing lists, source code control systems,
+      and issue tracking systems that are managed by, or on behalf of, the
+      Licensor for the purpose of discussing and improving the Work, but
+      excluding communication that is conspicuously marked or otherwise
+      designated in writing by the copyright owner as "Not a Contribution."
+
+      "Contributor" shall mean Licensor and any individual or Legal Entity
+      on behalf of whom a Contribution has been received by Licensor and
+      subsequently incorporated within the Work.
+
+   2. Grant of Copyright License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      copyright license to reproduce, prepare Derivative Works of,
+      publicly display, publicly perform, sublicense, and distribute the
+      Work and such Derivative Works in Source or Object form.
+
+   3. Grant of Patent License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      (except as stated in this section) patent license to make, have made,
+      use, offer to sell, sell, import, and otherwise transfer the Work,
+      where such license applies only to those patent claims licensable
+      by such Contributor that are necessarily infringed by their
+      Contribution(s) alone or by combination of their Contribution(s)
+      with the Work to which such Contribution(s) was submitted. If You
+      institute patent litigation against any entity (including a
+      cross-claim or counterclaim in a lawsuit) alleging that the Work
+      or a Contribution incorporated within the Work constitutes direct
+      or contributory patent infringement, then any patent licenses
+      granted to You under this License for that Work shall terminate
+      as of the date such litigation is filed.
+
+   4. Redistribution. You may reproduce and distribute copies of the
+      Work or Derivative Works thereof in any medium, with or without
+      modifications, and in Source or Object form, provided that You
+      meet the following conditions:
+
+      (a) You must give any other recipients of the Work or
+          Derivative Works a copy of this License; and
+
+      (b) You must cause any modified files to carry prominent notices
+          stating that You changed the files; and
+
+      (c) You must retain, in the Source form of any Derivative Works
+          that You distribute, all copyright, patent, trademark, and
+          attribution notices from the Source form of the Work,
+          excluding those notices that do not pertain to any part of
+          the Derivative Works; and
+
+      (d) If the Work includes a "NOTICE" text file as part of its
+          distribution, then any Derivative Works that You distribute must
+          include a readable copy of the attribution notices contained
+          within such NOTICE file, excluding those notices that do not
+          pertain to any part of the Derivative Works, in at least one
+          of the following places: within a NOTICE text file distributed
+          as part of the Derivative Works; within the Source form or
+          documentation, if provided along with the Derivative Works; or,
+          within a display generated by the Derivative Works, if and
+          wherever such third-party notices normally appear. The contents
+          of the NOTICE file are for informational purposes only and
+          do not modify the License. You may add Your own attribution
+          notices within Derivative Works that You distribute, alongside
+          or as an addendum to the NOTICE text from the Work, provided
+          that such additional attribution notices cannot be construed
+          as modifying the License.
+
+      You may add Your own copyright statement to Your modifications and
+      may provide additional or different license terms and conditions
+      for use, reproduction, or distribution of Your modifications, or
+      for any such Derivative Works as a whole, provided Your use,
+      reproduction, and distribution of the Work otherwise complies with
+      the conditions stated in this License.
+
+   5. Submission of Contributions. Unless You explicitly state otherwise,
+      any Contribution intentionally submitted for inclusion in the Work
+      by You to the Licensor shall be under the terms and conditions of
+      this License, without any additional terms or conditions.
+      Notwithstanding the above, nothing herein shall supersede or modify
+      the terms of any separate license agreement you may have executed
+      with Licensor regarding such Contributions.
+
+   6. Trademarks. This License does not grant permission to use the trade
+      names, trademarks, service marks, or product names of the Licensor,
+      except as required for reasonable and customary use in describing the
+      origin of the Work and reproducing the content of the NOTICE file.
+
+   7. Disclaimer of Warranty. Unless required by applicable law or
+      agreed to in writing, Licensor provides the Work (and each
+      Contributor provides its Contributions) on an "AS IS" BASIS,
+      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+      implied, including, without limitation, any warranties or conditions
+      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+      PARTICULAR PURPOSE. You are solely responsible for determining the
+      appropriateness of using or redistributing the Work and assume any
+      risks associated with Your exercise of permissions under this License.
+
+   8. Limitation of Liability. In no event and under no legal theory,
+      whether in tort (including negligence), contract, or otherwise,
+      unless required by applicable law (such as deliberate and grossly
+      negligent acts) or agreed to in writing, shall any Contributor be
+      liable to You for damages, including any direct, indirect, special,
+      incidental, or consequential damages of any character arising as a
+      result of this License or out of the use or inability to use the
+      Work (including but not limited to damages for loss of goodwill,
+      work stoppage, computer failure or malfunction, or any and all
+      other commercial damages or losses), even if such Contributor
+      has been advised of the possibility of such damages.
+
+   9. Accepting Warranty or Additional Liability. While redistributing
+      the Work or Derivative Works thereof, You may choose to offer,
+      and charge a fee for, acceptance of support, warranty, indemnity,
+      or other liability obligations and/or rights consistent with this
+      License. However, in accepting such obligations, You may act only
+      on Your own behalf and on Your sole responsibility, not on behalf
+      of any other Contributor, and only if You agree to indemnify,
+      defend, and hold each Contributor harmless for any liability
+      incurred by, or claims asserted against, such Contributor by reason
+      of your accepting any such warranty or additional liability.
+
+   END OF TERMS AND CONDITIONS
+
+   APPENDIX: How to apply the Apache License to your work.
+
+      To apply the Apache License to your work, attach the following
+      boilerplate notice, with the fields enclosed by brackets "[]"
+      replaced with your own identifying information. (Don't include
+      the brackets!)  The text should be enclosed in the appropriate
+      comment syntax for the file format. We also recommend that a
+      file or class name and description of purpose be included on the
+      same "printed page" as the copyright notice for easier
+      identification within third-party archives.
+
+   Copyright [yyyy] [name of copyright owner]
+
+   Licensed under the Apache License, Version 2.0 (the "License");
+   you may not use this file except in compliance with the License.
+   You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
diff --git a/src/third_party/google_benchmark/README.md b/src/third_party/google_benchmark/README.md
new file mode 100644
index 0000000..d972ab0
--- /dev/null
+++ b/src/third_party/google_benchmark/README.md
@@ -0,0 +1,1270 @@
+# Benchmark
+
+[![Build Status](https://travis-ci.org/google/benchmark.svg?branch=master)](https://travis-ci.org/google/benchmark)
+[![Build status](https://ci.appveyor.com/api/projects/status/u0qsyp7t1tk7cpxs/branch/master?svg=true)](https://ci.appveyor.com/project/google/benchmark/branch/master)
+[![Coverage Status](https://coveralls.io/repos/google/benchmark/badge.svg)](https://coveralls.io/r/google/benchmark)
+[![slackin](https://slackin-iqtfqnpzxd.now.sh/badge.svg)](https://slackin-iqtfqnpzxd.now.sh/)
+
+A library to benchmark code snippets, similar to unit tests. Example:
+
+```c++
+#include <benchmark/benchmark.h>
+
+static void BM_SomeFunction(benchmark::State& state) {
+  // Perform setup here
+  for (auto _ : state) {
+    // This code gets timed
+    SomeFunction();
+  }
+}
+// Register the function as a benchmark
+BENCHMARK(BM_SomeFunction);
+// Run the benchmark
+BENCHMARK_MAIN();
+```
+
+To get started, see [Requirements](#requirements) and
+[Installation](#installation). See [Usage](#usage) for a full example and the
+[User Guide](#user-guide) for a more comprehensive feature overview.
+
+It may also help to read the [Google Test documentation](https://github.com/google/googletest/blob/master/googletest/docs/primer.md)
+as some of the structural aspects of the APIs are similar.
+
+### Resources
+
+[Discussion group](https://groups.google.com/d/forum/benchmark-discuss)
+
+IRC channel: [freenode](https://freenode.net) #googlebenchmark
+
+[Additional Tooling Documentation](docs/tools.md)
+
+[Assembly Testing Documentation](docs/AssemblyTests.md)
+
+## Requirements
+
+The library can be used with C++03. However, it requires C++11 to build,
+including compiler and standard library support.
+
+The following minimum versions are required to build the library:
+
+* GCC 4.8
+* Clang 3.4
+* Visual Studio 14 2015
+* Intel 2015 Update 1
+
+See [Platform-Specific Build Instructions](#platform-specific-build-instructions).
+
+## Installation
+
+This describes the installation process using cmake. As pre-requisites, you'll
+need git and cmake installed.
+
+_See [dependencies.md](dependencies.md) for more details regarding supported
+versions of build tools._
+
+```bash
+# Check out the library.
+$ git clone https://github.com/google/benchmark.git
+# Benchmark requires Google Test as a dependency. Add the source tree as a subdirectory.
+$ git clone https://github.com/google/googletest.git benchmark/googletest
+# Go to the library root directory
+$ cd benchmark
+# Make a build directory to place the build output.
+$ mkdir build && cd build
+# Generate a Makefile with cmake.
+# Use cmake -G <generator> to generate a different file type.
+$ cmake ../
+# Build the library.
+# Use make -j<number_of_parallel_jobs> to speed up the build process, e.g. make -j8 .
+$ make
+```
+This builds the `benchmark` and `benchmark_main` libraries and tests.
+On a unix system, the build directory should now look something like this:
+
+```
+/benchmark
+  /build
+    /src
+      /libbenchmark.a
+      /libbenchmark_main.a
+    /test
+      ...
+```
+
+Next, you can run the tests to check the build.
+
+```bash
+$ make test
+```
+
+If you want to install the library globally, also run:
+
+```
+sudo make install
+```
+
+Note that Google Benchmark requires Google Test to build and run the tests. This
+dependency can be provided two ways:
+
+* Checkout the Google Test sources into `benchmark/googletest` as above.
+* Otherwise, if `-DBENCHMARK_DOWNLOAD_DEPENDENCIES=ON` is specified during
+  configuration, the library will automatically download and build any required
+  dependencies.
+
+If you do not wish to build and run the tests, add `-DBENCHMARK_ENABLE_GTEST_TESTS=OFF`
+to `CMAKE_ARGS`.
+
+### Debug vs Release
+
+By default, benchmark builds as a debug library. You will see a warning in the
+output when this is the case. To build it as a release library instead, use:
+
+```
+cmake -DCMAKE_BUILD_TYPE=Release
+```
+
+To enable link-time optimisation, use
+
+```
+cmake -DCMAKE_BUILD_TYPE=Release -DBENCHMARK_ENABLE_LTO=true
+```
+
+If you are using gcc, you might need to set `GCC_AR` and `GCC_RANLIB` cmake
+cache variables, if autodetection fails.
+
+If you are using clang, you may need to set `LLVMAR_EXECUTABLE`,
+`LLVMNM_EXECUTABLE` and `LLVMRANLIB_EXECUTABLE` cmake cache variables.
+
+
+### Stable and Experimental Library Versions
+
+The main branch contains the latest stable version of the benchmarking library;
+the API of which can be considered largely stable, with source breaking changes
+being made only upon the release of a new major version.
+
+Newer, experimental, features are implemented and tested on the
+[`v2` branch](https://github.com/google/benchmark/tree/v2). Users who wish
+to use, test, and provide feedback on the new features are encouraged to try
+this branch. However, this branch provides no stability guarantees and reserves
+the right to change and break the API at any time.
+
+## Usage
+
+### Basic usage
+
+Define a function that executes the code to measure, register it as a benchmark
+function using the `BENCHMARK` macro, and ensure an appropriate `main` function
+is available:
+
+```c++
+#include <benchmark/benchmark.h>
+
+static void BM_StringCreation(benchmark::State& state) {
+  for (auto _ : state)
+    std::string empty_string;
+}
+// Register the function as a benchmark
+BENCHMARK(BM_StringCreation);
+
+// Define another benchmark
+static void BM_StringCopy(benchmark::State& state) {
+  std::string x = "hello";
+  for (auto _ : state)
+    std::string copy(x);
+}
+BENCHMARK(BM_StringCopy);
+
+BENCHMARK_MAIN();
+```
+
+To run the benchmark, compile and link against the `benchmark` library
+(libbenchmark.a/.so). If you followed the build steps above, this
+library will be under the build directory you created.
+
+```bash
+# Example on linux after running the build steps above. Assumes the
+# `benchmark` and `build` directories are under the current directory.
+$ g++ mybenchmark.cc -std=c++11 -isystem benchmark/include \
+  -Lbenchmark/build/src -lbenchmark -lpthread -o mybenchmark
+```
+
+Alternatively, link against the `benchmark_main` library and remove
+`BENCHMARK_MAIN();` above to get the same behavior.
+
+The compiled executable will run all benchmarks by default. Pass the `--help`
+flag for option information or see the guide below.
+
+## Platform Specific Build Instructions
+
+### Building with GCC
+
+When the library is built using GCC it is necessary to link with the pthread
+library due to how GCC implements `std::thread`. Failing to link to pthread will
+lead to runtime exceptions (unless you're using libc++), not linker errors. See
+[issue #67](https://github.com/google/benchmark/issues/67) for more details. You
+can link to pthread by adding `-pthread` to your linker command. Note, you can
+also use `-lpthread`, but there are potential issues with ordering of command
+line parameters if you use that.
+
+### Building with Visual Studio 2015 or 2017
+
+The `shlwapi` library (`-lshlwapi`) is required to support a call to `CPUInfo` which reads the registry. Either add `shlwapi.lib` under `[ Configuration Properties > Linker > Input ]`, or use the following:
+
+```
+// Alternatively, can add libraries using linker options.
+#ifdef _WIN32
+#pragma comment ( lib, "Shlwapi.lib" )
+#ifdef _DEBUG
+#pragma comment ( lib, "benchmarkd.lib" )
+#else
+#pragma comment ( lib, "benchmark.lib" )
+#endif
+#endif
+```
+
+Can also use the graphical version of CMake:
+* Open `CMake GUI`.
+* Under `Where to build the binaries`, same path as source plus `build`.
+* Under `CMAKE_INSTALL_PREFIX`, same path as source plus `install`.
+* Click `Configure`, `Generate`, `Open Project`.
+* If build fails, try deleting entire directory and starting again, or unticking options to build less.
+
+### Building with Intel 2015 Update 1 or Intel System Studio Update 4
+
+See instructions for building with Visual Studio. Once built, right click on the solution and change the build to Intel.
+
+### Building on Solaris
+
+If you're running benchmarks on solaris, you'll want the kstat library linked in
+too (`-lkstat`).
+
+## User Guide
+
+### Command Line
+
+[Output Formats](#output-formats)
+
+[Output Files](#output-files)
+
+[Running Benchmarks](#running-benchmarks)
+
+[Running a Subset of Benchmarks](#running-a-subset-of-benchmarks)
+
+[Result Comparison](#result-comparison)
+
+### Library
+
+[Runtime and Reporting Considerations](#runtime-and-reporting-considerations)
+
+[Passing Arguments](#passing-arguments)
+
+[Calculating Asymptotic Complexity](#asymptotic-complexity)
+
+[Templated Benchmarks](#templated-benchmarks)
+
+[Fixtures](#fixtures)
+
+[Custom Counters](#custom-counters)
+
+[Multithreaded Benchmarks](#multithreaded-benchmarks)
+
+[CPU Timers](#cpu-timers)
+
+[Manual Timing](#manual-timing)
+
+[Setting the Time Unit](#setting-the-time-unit)
+
+[Preventing Optimization](#preventing-optimization)
+
+[Reporting Statistics](#reporting-statistics)
+
+[Custom Statistics](#custom-statistics)
+
+[Using RegisterBenchmark](#using-register-benchmark)
+
+[Exiting with an Error](#exiting-with-an-error)
+
+[A Faster KeepRunning Loop](#a-faster-keep-running-loop)
+
+[Disabling CPU Frequency Scaling](#disabling-cpu-frequency-scaling)
+
+
+<a name="output-formats" />
+
+### Output Formats
+
+The library supports multiple output formats. Use the
+`--benchmark_format=<console|json|csv>` flag (or set the
+`BENCHMARK_FORMAT=<console|json|csv>` environment variable) to set
+the format type. `console` is the default format.
+
+The Console format is intended to be a human readable format. By default
+the format generates color output. Context is output on stderr and the
+tabular data on stdout. Example tabular output looks like:
+
+```
+Benchmark                               Time(ns)    CPU(ns) Iterations
+----------------------------------------------------------------------
+BM_SetInsert/1024/1                        28928      29349      23853  133.097kB/s   33.2742k items/s
+BM_SetInsert/1024/8                        32065      32913      21375  949.487kB/s   237.372k items/s
+BM_SetInsert/1024/10                       33157      33648      21431  1.13369MB/s   290.225k items/s
+```
+
+The JSON format outputs human readable json split into two top level attributes.
+The `context` attribute contains information about the run in general, including
+information about the CPU and the date.
+The `benchmarks` attribute contains a list of every benchmark run. Example json
+output looks like:
+
+```json
+{
+  "context": {
+    "date": "2015/03/17-18:40:25",
+    "num_cpus": 40,
+    "mhz_per_cpu": 2801,
+    "cpu_scaling_enabled": false,
+    "build_type": "debug"
+  },
+  "benchmarks": [
+    {
+      "name": "BM_SetInsert/1024/1",
+      "iterations": 94877,
+      "real_time": 29275,
+      "cpu_time": 29836,
+      "bytes_per_second": 134066,
+      "items_per_second": 33516
+    },
+    {
+      "name": "BM_SetInsert/1024/8",
+      "iterations": 21609,
+      "real_time": 32317,
+      "cpu_time": 32429,
+      "bytes_per_second": 986770,
+      "items_per_second": 246693
+    },
+    {
+      "name": "BM_SetInsert/1024/10",
+      "iterations": 21393,
+      "real_time": 32724,
+      "cpu_time": 33355,
+      "bytes_per_second": 1199226,
+      "items_per_second": 299807
+    }
+  ]
+}
+```
+
+The CSV format outputs comma-separated values. The `context` is output on stderr
+and the CSV itself on stdout. Example CSV output looks like:
+
+```
+name,iterations,real_time,cpu_time,bytes_per_second,items_per_second,label
+"BM_SetInsert/1024/1",65465,17890.7,8407.45,475768,118942,
+"BM_SetInsert/1024/8",116606,18810.1,9766.64,3.27646e+06,819115,
+"BM_SetInsert/1024/10",106365,17238.4,8421.53,4.74973e+06,1.18743e+06,
+```
+
+<a name="output-files" />
+
+### Output Files
+
+Write benchmark results to a file with the `--benchmark_out=<filename>` option
+(or set `BENCHMARK_OUT`). Specify the output format with
+`--benchmark_out_format={json|console|csv}` (or set
+`BENCHMARK_OUT_FORMAT={json|console|csv}`). Note that specifying
+`--benchmark_out` does not suppress the console output.
+
+<a name="running-benchmarks" />
+
+### Running Benchmarks
+
+Benchmarks are executed by running the produced binaries. Benchmarks binaries,
+by default, accept options that may be specified either through their command
+line interface or by setting environment variables before execution. For every
+`--option_flag=<value>` CLI switch, a corresponding environment variable
+`OPTION_FLAG=<value>` exist and is used as default if set (CLI switches always
+ prevails). A complete list of CLI options is available running benchmarks
+ with the `--help` switch.
+
+<a name="running-a-subset-of-benchmarks" />
+
+### Running a Subset of Benchmarks
+
+The `--benchmark_filter=<regex>` option (or `BENCHMARK_FILTER=<regex>`
+environment variable) can be used to only run the benchmarks that match
+the specified `<regex>`. For example:
+
+```bash
+$ ./run_benchmarks.x --benchmark_filter=BM_memcpy/32
+Run on (1 X 2300 MHz CPU )
+2016-06-25 19:34:24
+Benchmark              Time           CPU Iterations
+----------------------------------------------------
+BM_memcpy/32          11 ns         11 ns   79545455
+BM_memcpy/32k       2181 ns       2185 ns     324074
+BM_memcpy/32          12 ns         12 ns   54687500
+BM_memcpy/32k       1834 ns       1837 ns     357143
+```
+
+<a name="result-comparison" />
+
+### Result comparison
+
+It is possible to compare the benchmarking results.
+See [Additional Tooling Documentation](docs/tools.md)
+
+<a name="runtime-and-reporting-considerations" />
+
+### Runtime and Reporting Considerations
+
+When the benchmark binary is executed, each benchmark function is run serially.
+The number of iterations to run is determined dynamically by running the
+benchmark a few times and measuring the time taken and ensuring that the
+ultimate result will be statistically stable. As such, faster benchmark
+functions will be run for more iterations than slower benchmark functions, and
+the number of iterations is thus reported.
+
+In all cases, the number of iterations for which the benchmark is run is
+governed by the amount of time the benchmark takes. Concretely, the number of
+iterations is at least one, not more than 1e9, until CPU time is greater than
+the minimum time, or the wallclock time is 5x minimum time. The minimum time is
+set per benchmark by calling `MinTime` on the registered benchmark object.
+
+Average timings are then reported over the iterations run. If multiple
+repetitions are requested using the `--benchmark_repetitions` command-line
+option, or at registration time, the benchmark function will be run several
+times and statistical results across these repetitions will also be reported.
+
+As well as the per-benchmark entries, a preamble in the report will include
+information about the machine on which the benchmarks are run.
+
+<a name="passing-arguments" />
+
+### Passing Arguments
+
+Sometimes a family of benchmarks can be implemented with just one routine that
+takes an extra argument to specify which one of the family of benchmarks to
+run. For example, the following code defines a family of benchmarks for
+measuring the speed of `memcpy()` calls of different lengths:
+
+```c++
+static void BM_memcpy(benchmark::State& state) {
+  char* src = new char[state.range(0)];
+  char* dst = new char[state.range(0)];
+  memset(src, 'x', state.range(0));
+  for (auto _ : state)
+    memcpy(dst, src, state.range(0));
+  state.SetBytesProcessed(int64_t(state.iterations()) *
+                          int64_t(state.range(0)));
+  delete[] src;
+  delete[] dst;
+}
+BENCHMARK(BM_memcpy)->Arg(8)->Arg(64)->Arg(512)->Arg(1<<10)->Arg(8<<10);
+```
+
+The preceding code is quite repetitive, and can be replaced with the following
+short-hand. The following invocation will pick a few appropriate arguments in
+the specified range and will generate a benchmark for each such argument.
+
+```c++
+BENCHMARK(BM_memcpy)->Range(8, 8<<10);
+```
+
+By default the arguments in the range are generated in multiples of eight and
+the command above selects [ 8, 64, 512, 4k, 8k ]. In the following code the
+range multiplier is changed to multiples of two.
+
+```c++
+BENCHMARK(BM_memcpy)->RangeMultiplier(2)->Range(8, 8<<10);
+```
+
+Now arguments generated are [ 8, 16, 32, 64, 128, 256, 512, 1024, 2k, 4k, 8k ].
+
+The preceding code shows a method of defining a sparse range.  The following
+example shows a method of defining a dense range. It is then used to benchmark
+the performance of `std::vector` initialization for uniformly increasing sizes.
+
+```c++
+static void BM_DenseRange(benchmark::State& state) {
+  for(auto _ : state) {
+    std::vector<int> v(state.range(0), state.range(0));
+    benchmark::DoNotOptimize(v.data());
+    benchmark::ClobberMemory();
+  }
+}
+BENCHMARK(BM_DenseRange)->DenseRange(0, 1024, 128);
+```
+
+Now arguments generated are [ 0, 128, 256, 384, 512, 640, 768, 896, 1024 ].
+
+You might have a benchmark that depends on two or more inputs. For example, the
+following code defines a family of benchmarks for measuring the speed of set
+insertion.
+
+```c++
+static void BM_SetInsert(benchmark::State& state) {
+  std::set<int> data;
+  for (auto _ : state) {
+    state.PauseTiming();
+    data = ConstructRandomSet(state.range(0));
+    state.ResumeTiming();
+    for (int j = 0; j < state.range(1); ++j)
+      data.insert(RandomNumber());
+  }
+}
+BENCHMARK(BM_SetInsert)
+    ->Args({1<<10, 128})
+    ->Args({2<<10, 128})
+    ->Args({4<<10, 128})
+    ->Args({8<<10, 128})
+    ->Args({1<<10, 512})
+    ->Args({2<<10, 512})
+    ->Args({4<<10, 512})
+    ->Args({8<<10, 512});
+```
+
+The preceding code is quite repetitive, and can be replaced with the following
+short-hand. The following macro will pick a few appropriate arguments in the
+product of the two specified ranges and will generate a benchmark for each such
+pair.
+
+```c++
+BENCHMARK(BM_SetInsert)->Ranges({{1<<10, 8<<10}, {128, 512}});
+```
+
+For more complex patterns of inputs, passing a custom function to `Apply` allows
+programmatic specification of an arbitrary set of arguments on which to run the
+benchmark. The following example enumerates a dense range on one parameter,
+and a sparse range on the second.
+
+```c++
+static void CustomArguments(benchmark::internal::Benchmark* b) {
+  for (int i = 0; i <= 10; ++i)
+    for (int j = 32; j <= 1024*1024; j *= 8)
+      b->Args({i, j});
+}
+BENCHMARK(BM_SetInsert)->Apply(CustomArguments);
+```
+
+#### Passing Arbitrary Arguments to a Benchmark
+
+In C++11 it is possible to define a benchmark that takes an arbitrary number
+of extra arguments. The `BENCHMARK_CAPTURE(func, test_case_name, ...args)`
+macro creates a benchmark that invokes `func`  with the `benchmark::State` as
+the first argument followed by the specified `args...`.
+The `test_case_name` is appended to the name of the benchmark and
+should describe the values passed.
+
+```c++
+template <class ...ExtraArgs>
+void BM_takes_args(benchmark::State& state, ExtraArgs&&... extra_args) {
+  [...]
+}
+// Registers a benchmark named "BM_takes_args/int_string_test" that passes
+// the specified values to `extra_args`.
+BENCHMARK_CAPTURE(BM_takes_args, int_string_test, 42, std::string("abc"));
+```
+
+Note that elements of `...args` may refer to global variables. Users should
+avoid modifying global state inside of a benchmark.
+
+<a name="asymptotic-complexity" />
+
+### Calculating Asymptotic Complexity (Big O)
+
+Asymptotic complexity might be calculated for a family of benchmarks. The
+following code will calculate the coefficient for the high-order term in the
+running time and the normalized root-mean square error of string comparison.
+
+```c++
+static void BM_StringCompare(benchmark::State& state) {
+  std::string s1(state.range(0), '-');
+  std::string s2(state.range(0), '-');
+  for (auto _ : state) {
+    benchmark::DoNotOptimize(s1.compare(s2));
+  }
+  state.SetComplexityN(state.range(0));
+}
+BENCHMARK(BM_StringCompare)
+    ->RangeMultiplier(2)->Range(1<<10, 1<<18)->Complexity(benchmark::oN);
+```
+
+As shown in the following invocation, asymptotic complexity might also be
+calculated automatically.
+
+```c++
+BENCHMARK(BM_StringCompare)
+    ->RangeMultiplier(2)->Range(1<<10, 1<<18)->Complexity();
+```
+
+The following code will specify asymptotic complexity with a lambda function,
+that might be used to customize high-order term calculation.
+
+```c++
+BENCHMARK(BM_StringCompare)->RangeMultiplier(2)
+    ->Range(1<<10, 1<<18)->Complexity([](int64_t n)->double{return n; });
+```
+
+<a name="templated-benchmarks" />
+
+### Templated Benchmarks
+
+This example produces and consumes messages of size `sizeof(v)` `range_x`
+times. It also outputs throughput in the absence of multiprogramming.
+
+```c++
+template <class Q> void BM_Sequential(benchmark::State& state) {
+  Q q;
+  typename Q::value_type v;
+  for (auto _ : state) {
+    for (int i = state.range(0); i--; )
+      q.push(v);
+    for (int e = state.range(0); e--; )
+      q.Wait(&v);
+  }
+  // actually messages, not bytes:
+  state.SetBytesProcessed(
+      static_cast<int64_t>(state.iterations())*state.range(0));
+}
+BENCHMARK_TEMPLATE(BM_Sequential, WaitQueue<int>)->Range(1<<0, 1<<10);
+```
+
+Three macros are provided for adding benchmark templates.
+
+```c++
+#ifdef BENCHMARK_HAS_CXX11
+#define BENCHMARK_TEMPLATE(func, ...) // Takes any number of parameters.
+#else // C++ < C++11
+#define BENCHMARK_TEMPLATE(func, arg1)
+#endif
+#define BENCHMARK_TEMPLATE1(func, arg1)
+#define BENCHMARK_TEMPLATE2(func, arg1, arg2)
+```
+
+<a name="fixtures" />
+
+### Fixtures
+
+Fixture tests are created by first defining a type that derives from
+`::benchmark::Fixture` and then creating/registering the tests using the
+following macros:
+
+* `BENCHMARK_F(ClassName, Method)`
+* `BENCHMARK_DEFINE_F(ClassName, Method)`
+* `BENCHMARK_REGISTER_F(ClassName, Method)`
+
+For Example:
+
+```c++
+class MyFixture : public benchmark::Fixture {
+public:
+  void SetUp(const ::benchmark::State& state) {
+  }
+
+  void TearDown(const ::benchmark::State& state) {
+  }
+};
+
+BENCHMARK_F(MyFixture, FooTest)(benchmark::State& st) {
+   for (auto _ : st) {
+     ...
+  }
+}
+
+BENCHMARK_DEFINE_F(MyFixture, BarTest)(benchmark::State& st) {
+   for (auto _ : st) {
+     ...
+  }
+}
+/* BarTest is NOT registered */
+BENCHMARK_REGISTER_F(MyFixture, BarTest)->Threads(2);
+/* BarTest is now registered */
+```
+
+#### Templated Fixtures
+
+Also you can create templated fixture by using the following macros:
+
+* `BENCHMARK_TEMPLATE_F(ClassName, Method, ...)`
+* `BENCHMARK_TEMPLATE_DEFINE_F(ClassName, Method, ...)`
+
+For example:
+
+```c++
+template<typename T>
+class MyFixture : public benchmark::Fixture {};
+
+BENCHMARK_TEMPLATE_F(MyFixture, IntTest, int)(benchmark::State& st) {
+   for (auto _ : st) {
+     ...
+  }
+}
+
+BENCHMARK_TEMPLATE_DEFINE_F(MyFixture, DoubleTest, double)(benchmark::State& st) {
+   for (auto _ : st) {
+     ...
+  }
+}
+
+BENCHMARK_REGISTER_F(MyFixture, DoubleTest)->Threads(2);
+```
+
+<a name="custom-counters" />
+
+### Custom Counters
+
+You can add your own counters with user-defined names. The example below
+will add columns "Foo", "Bar" and "Baz" in its output:
+
+```c++
+static void UserCountersExample1(benchmark::State& state) {
+  double numFoos = 0, numBars = 0, numBazs = 0;
+  for (auto _ : state) {
+    // ... count Foo,Bar,Baz events
+  }
+  state.counters["Foo"] = numFoos;
+  state.counters["Bar"] = numBars;
+  state.counters["Baz"] = numBazs;
+}
+```
+
+The `state.counters` object is a `std::map` with `std::string` keys
+and `Counter` values. The latter is a `double`-like class, via an implicit
+conversion to `double&`. Thus you can use all of the standard arithmetic
+assignment operators (`=,+=,-=,*=,/=`) to change the value of each counter.
+
+In multithreaded benchmarks, each counter is set on the calling thread only.
+When the benchmark finishes, the counters from each thread will be summed;
+the resulting sum is the value which will be shown for the benchmark.
+
+The `Counter` constructor accepts three parameters: the value as a `double`
+; a bit flag which allows you to show counters as rates, and/or as per-thread
+iteration, and/or as per-thread averages, and/or iteration invariants,
+and/or finally inverting the result; and a flag specifying the 'unit' - i.e.
+is 1k a 1000 (default, `benchmark::Counter::OneK::kIs1000`), or 1024
+(`benchmark::Counter::OneK::kIs1024`)?
+
+```c++
+  // sets a simple counter
+  state.counters["Foo"] = numFoos;
+
+  // Set the counter as a rate. It will be presented divided
+  // by the duration of the benchmark.
+  // Meaning: per one second, how many 'foo's are processed?
+  state.counters["FooRate"] = Counter(numFoos, benchmark::Counter::kIsRate);
+
+  // Set the counter as a rate. It will be presented divided
+  // by the duration of the benchmark, and the result inverted.
+  // Meaning: how many seconds it takes to process one 'foo'?
+  state.counters["FooInvRate"] = Counter(numFoos, benchmark::Counter::kIsRate | benchmark::Counter::kInvert);
+
+  // Set the counter as a thread-average quantity. It will
+  // be presented divided by the number of threads.
+  state.counters["FooAvg"] = Counter(numFoos, benchmark::Counter::kAvgThreads);
+
+  // There's also a combined flag:
+  state.counters["FooAvgRate"] = Counter(numFoos,benchmark::Counter::kAvgThreadsRate);
+
+  // This says that we process with the rate of state.range(0) bytes every iteration:
+  state.counters["BytesProcessed"] = Counter(state.range(0), benchmark::Counter::kIsIterationInvariantRate, benchmark::Counter::OneK::kIs1024);
+```
+
+When you're compiling in C++11 mode or later you can use `insert()` with
+`std::initializer_list`:
+
+```c++
+  // With C++11, this can be done:
+  state.counters.insert({{"Foo", numFoos}, {"Bar", numBars}, {"Baz", numBazs}});
+  // ... instead of:
+  state.counters["Foo"] = numFoos;
+  state.counters["Bar"] = numBars;
+  state.counters["Baz"] = numBazs;
+```
+
+#### Counter Reporting
+
+When using the console reporter, by default, user counters are printed at
+the end after the table, the same way as ``bytes_processed`` and
+``items_processed``. This is best for cases in which there are few counters,
+or where there are only a couple of lines per benchmark. Here's an example of
+the default output:
+
+```
+------------------------------------------------------------------------------
+Benchmark                        Time           CPU Iterations UserCounters...
+------------------------------------------------------------------------------
+BM_UserCounter/threads:8      2248 ns      10277 ns      68808 Bar=16 Bat=40 Baz=24 Foo=8
+BM_UserCounter/threads:1      9797 ns       9788 ns      71523 Bar=2 Bat=5 Baz=3 Foo=1024m
+BM_UserCounter/threads:2      4924 ns       9842 ns      71036 Bar=4 Bat=10 Baz=6 Foo=2
+BM_UserCounter/threads:4      2589 ns      10284 ns      68012 Bar=8 Bat=20 Baz=12 Foo=4
+BM_UserCounter/threads:8      2212 ns      10287 ns      68040 Bar=16 Bat=40 Baz=24 Foo=8
+BM_UserCounter/threads:16     1782 ns      10278 ns      68144 Bar=32 Bat=80 Baz=48 Foo=16
+BM_UserCounter/threads:32     1291 ns      10296 ns      68256 Bar=64 Bat=160 Baz=96 Foo=32
+BM_UserCounter/threads:4      2615 ns      10307 ns      68040 Bar=8 Bat=20 Baz=12 Foo=4
+BM_Factorial                    26 ns         26 ns   26608979 40320
+BM_Factorial/real_time          26 ns         26 ns   26587936 40320
+BM_CalculatePiRange/1           16 ns         16 ns   45704255 0
+BM_CalculatePiRange/8           73 ns         73 ns    9520927 3.28374
+BM_CalculatePiRange/64         609 ns        609 ns    1140647 3.15746
+BM_CalculatePiRange/512       4900 ns       4901 ns     142696 3.14355
+```
+
+If this doesn't suit you, you can print each counter as a table column by
+passing the flag `--benchmark_counters_tabular=true` to the benchmark
+application. This is best for cases in which there are a lot of counters, or
+a lot of lines per individual benchmark. Note that this will trigger a
+reprinting of the table header any time the counter set changes between
+individual benchmarks. Here's an example of corresponding output when
+`--benchmark_counters_tabular=true` is passed:
+
+```
+---------------------------------------------------------------------------------------
+Benchmark                        Time           CPU Iterations    Bar   Bat   Baz   Foo
+---------------------------------------------------------------------------------------
+BM_UserCounter/threads:8      2198 ns       9953 ns      70688     16    40    24     8
+BM_UserCounter/threads:1      9504 ns       9504 ns      73787      2     5     3     1
+BM_UserCounter/threads:2      4775 ns       9550 ns      72606      4    10     6     2
+BM_UserCounter/threads:4      2508 ns       9951 ns      70332      8    20    12     4
+BM_UserCounter/threads:8      2055 ns       9933 ns      70344     16    40    24     8
+BM_UserCounter/threads:16     1610 ns       9946 ns      70720     32    80    48    16
+BM_UserCounter/threads:32     1192 ns       9948 ns      70496     64   160    96    32
+BM_UserCounter/threads:4      2506 ns       9949 ns      70332      8    20    12     4
+--------------------------------------------------------------
+Benchmark                        Time           CPU Iterations
+--------------------------------------------------------------
+BM_Factorial                    26 ns         26 ns   26392245 40320
+BM_Factorial/real_time          26 ns         26 ns   26494107 40320
+BM_CalculatePiRange/1           15 ns         15 ns   45571597 0
+BM_CalculatePiRange/8           74 ns         74 ns    9450212 3.28374
+BM_CalculatePiRange/64         595 ns        595 ns    1173901 3.15746
+BM_CalculatePiRange/512       4752 ns       4752 ns     147380 3.14355
+BM_CalculatePiRange/4k       37970 ns      37972 ns      18453 3.14184
+BM_CalculatePiRange/32k     303733 ns     303744 ns       2305 3.14162
+BM_CalculatePiRange/256k   2434095 ns    2434186 ns        288 3.1416
+BM_CalculatePiRange/1024k  9721140 ns    9721413 ns         71 3.14159
+BM_CalculatePi/threads:8      2255 ns       9943 ns      70936
+```
+
+Note above the additional header printed when the benchmark changes from
+``BM_UserCounter`` to ``BM_Factorial``. This is because ``BM_Factorial`` does
+not have the same counter set as ``BM_UserCounter``.
+
+<a name="multithreaded-benchmarks"/>
+
+### Multithreaded Benchmarks
+
+In a multithreaded test (benchmark invoked by multiple threads simultaneously),
+it is guaranteed that none of the threads will start until all have reached
+the start of the benchmark loop, and all will have finished before any thread
+exits the benchmark loop. (This behavior is also provided by the `KeepRunning()`
+API) As such, any global setup or teardown can be wrapped in a check against the thread
+index:
+
+```c++
+static void BM_MultiThreaded(benchmark::State& state) {
+  if (state.thread_index == 0) {
+    // Setup code here.
+  }
+  for (auto _ : state) {
+    // Run the test as normal.
+  }
+  if (state.thread_index == 0) {
+    // Teardown code here.
+  }
+}
+BENCHMARK(BM_MultiThreaded)->Threads(2);
+```
+
+If the benchmarked code itself uses threads and you want to compare it to
+single-threaded code, you may want to use real-time ("wallclock") measurements
+for latency comparisons:
+
+```c++
+BENCHMARK(BM_test)->Range(8, 8<<10)->UseRealTime();
+```
+
+Without `UseRealTime`, CPU time is used by default.
+
+<a name="cpu-timers" />
+
+### CPU Timers
+
+By default, the CPU timer only measures the time spent by the main thread.
+If the benchmark itself uses threads internally, this measurement may not
+be what you are looking for. Instead, there is a way to measure the total
+CPU usage of the process, by all the threads.
+
+```c++
+void callee(int i);
+
+static void MyMain(int size) {
+#pragma omp parallel for
+  for(int i = 0; i < size; i++)
+    callee(i);
+}
+
+static void BM_OpenMP(benchmark::State& state) {
+  for (auto _ : state)
+    MyMain(state.range(0));
+}
+
+// Measure the time spent by the main thread, use it to decide for how long to
+// run the benchmark loop. Depending on the internal implementation detail may
+// measure to anywhere from near-zero (the overhead spent before/after work
+// handoff to worker thread[s]) to the whole single-thread time.
+BENCHMARK(BM_OpenMP)->Range(8, 8<<10);
+
+// Measure the user-visible time, the wall clock (literally, the time that
+// has passed on the clock on the wall), use it to decide for how long to
+// run the benchmark loop. This will always be meaningful, an will match the
+// time spent by the main thread in single-threaded case, in general decreasing
+// with the number of internal threads doing the work.
+BENCHMARK(BM_OpenMP)->Range(8, 8<<10)->UseRealTime();
+
+// Measure the total CPU consumption, use it to decide for how long to
+// run the benchmark loop. This will always measure to no less than the
+// time spent by the main thread in single-threaded case.
+BENCHMARK(BM_OpenMP)->Range(8, 8<<10)->MeasureProcessCPUTime();
+
+// A mixture of the last two. Measure the total CPU consumption, but use the
+// wall clock to decide for how long to run the benchmark loop.
+BENCHMARK(BM_OpenMP)->Range(8, 8<<10)->MeasureProcessCPUTime()->UseRealTime();
+```
+
+#### Controlling Timers
+
+Normally, the entire duration of the work loop (`for (auto _ : state) {}`)
+is measured. But sometimes, it is necessary to do some work inside of
+that loop, every iteration, but without counting that time to the benchmark time.
+That is possible, although it is not recommended, since it has high overhead.
+
+```c++
+static void BM_SetInsert_With_Timer_Control(benchmark::State& state) {
+  std::set<int> data;
+  for (auto _ : state) {
+    state.PauseTiming(); // Stop timers. They will not count until they are resumed.
+    data = ConstructRandomSet(state.range(0)); // Do something that should not be measured
+    state.ResumeTiming(); // And resume timers. They are now counting again.
+    // The rest will be measured.
+    for (int j = 0; j < state.range(1); ++j)
+      data.insert(RandomNumber());
+  }
+}
+BENCHMARK(BM_SetInsert_With_Timer_Control)->Ranges({{1<<10, 8<<10}, {128, 512}});
+```
+
+<a name="manual-timing" />
+
+### Manual Timing
+
+For benchmarking something for which neither CPU time nor real-time are
+correct or accurate enough, completely manual timing is supported using
+the `UseManualTime` function.
+
+When `UseManualTime` is used, the benchmarked code must call
+`SetIterationTime` once per iteration of the benchmark loop to
+report the manually measured time.
+
+An example use case for this is benchmarking GPU execution (e.g. OpenCL
+or CUDA kernels, OpenGL or Vulkan or Direct3D draw calls), which cannot
+be accurately measured using CPU time or real-time. Instead, they can be
+measured accurately using a dedicated API, and these measurement results
+can be reported back with `SetIterationTime`.
+
+```c++
+static void BM_ManualTiming(benchmark::State& state) {
+  int microseconds = state.range(0);
+  std::chrono::duration<double, std::micro> sleep_duration {
+    static_cast<double>(microseconds)
+  };
+
+  for (auto _ : state) {
+    auto start = std::chrono::high_resolution_clock::now();
+    // Simulate some useful workload with a sleep
+    std::this_thread::sleep_for(sleep_duration);
+    auto end = std::chrono::high_resolution_clock::now();
+
+    auto elapsed_seconds =
+      std::chrono::duration_cast<std::chrono::duration<double>>(
+        end - start);
+
+    state.SetIterationTime(elapsed_seconds.count());
+  }
+}
+BENCHMARK(BM_ManualTiming)->Range(1, 1<<17)->UseManualTime();
+```
+
+<a name="setting-the-time-unit" />
+
+### Setting the Time Unit
+
+If a benchmark runs a few milliseconds it may be hard to visually compare the
+measured times, since the output data is given in nanoseconds per default. In
+order to manually set the time unit, you can specify it manually:
+
+```c++
+BENCHMARK(BM_test)->Unit(benchmark::kMillisecond);
+```
+
+<a name="preventing-optimization" />
+
+### Preventing Optimization
+
+To prevent a value or expression from being optimized away by the compiler
+the `benchmark::DoNotOptimize(...)` and `benchmark::ClobberMemory()`
+functions can be used.
+
+```c++
+static void BM_test(benchmark::State& state) {
+  for (auto _ : state) {
+      int x = 0;
+      for (int i=0; i < 64; ++i) {
+        benchmark::DoNotOptimize(x += i);
+      }
+  }
+}
+```
+
+`DoNotOptimize(<expr>)` forces the  *result* of `<expr>` to be stored in either
+memory or a register. For GNU based compilers it acts as read/write barrier
+for global memory. More specifically it forces the compiler to flush pending
+writes to memory and reload any other values as necessary.
+
+Note that `DoNotOptimize(<expr>)` does not prevent optimizations on `<expr>`
+in any way. `<expr>` may even be removed entirely when the result is already
+known. For example:
+
+```c++
+  /* Example 1: `<expr>` is removed entirely. */
+  int foo(int x) { return x + 42; }
+  while (...) DoNotOptimize(foo(0)); // Optimized to DoNotOptimize(42);
+
+  /*  Example 2: Result of '<expr>' is only reused */
+  int bar(int) __attribute__((const));
+  while (...) DoNotOptimize(bar(0)); // Optimized to:
+  // int __result__ = bar(0);
+  // while (...) DoNotOptimize(__result__);
+```
+
+The second tool for preventing optimizations is `ClobberMemory()`. In essence
+`ClobberMemory()` forces the compiler to perform all pending writes to global
+memory. Memory managed by block scope objects must be "escaped" using
+`DoNotOptimize(...)` before it can be clobbered. In the below example
+`ClobberMemory()` prevents the call to `v.push_back(42)` from being optimized
+away.
+
+```c++
+static void BM_vector_push_back(benchmark::State& state) {
+  for (auto _ : state) {
+    std::vector<int> v;
+    v.reserve(1);
+    benchmark::DoNotOptimize(v.data()); // Allow v.data() to be clobbered.
+    v.push_back(42);
+    benchmark::ClobberMemory(); // Force 42 to be written to memory.
+  }
+}
+```
+
+Note that `ClobberMemory()` is only available for GNU or MSVC based compilers.
+
+<a name="reporting-statistics" />
+
+### Statistics: Reporting the Mean, Median and Standard Deviation of Repeated Benchmarks
+
+By default each benchmark is run once and that single result is reported.
+However benchmarks are often noisy and a single result may not be representative
+of the overall behavior. For this reason it's possible to repeatedly rerun the
+benchmark.
+
+The number of runs of each benchmark is specified globally by the
+`--benchmark_repetitions` flag or on a per benchmark basis by calling
+`Repetitions` on the registered benchmark object. When a benchmark is run more
+than once the mean, median and standard deviation of the runs will be reported.
+
+Additionally the `--benchmark_report_aggregates_only={true|false}`,
+`--benchmark_display_aggregates_only={true|false}` flags or
+`ReportAggregatesOnly(bool)`, `DisplayAggregatesOnly(bool)` functions can be
+used to change how repeated tests are reported. By default the result of each
+repeated run is reported. When `report aggregates only` option is `true`,
+only the aggregates (i.e. mean, median and standard deviation, maybe complexity
+measurements if they were requested) of the runs is reported, to both the
+reporters - standard output (console), and the file.
+However when only the `display aggregates only` option is `true`,
+only the aggregates are displayed in the standard output, while the file
+output still contains everything.
+Calling `ReportAggregatesOnly(bool)` / `DisplayAggregatesOnly(bool)` on a
+registered benchmark object overrides the value of the appropriate flag for that
+benchmark.
+
+<a name="custom-statistics" />
+
+### Custom Statistics
+
+While having mean, median and standard deviation is nice, this may not be
+enough for everyone. For example you may want to know what the largest
+observation is, e.g. because you have some real-time constraints. This is easy.
+The following code will specify a custom statistic to be calculated, defined
+by a lambda function.
+
+```c++
+void BM_spin_empty(benchmark::State& state) {
+  for (auto _ : state) {
+    for (int x = 0; x < state.range(0); ++x) {
+      benchmark::DoNotOptimize(x);
+    }
+  }
+}
+
+BENCHMARK(BM_spin_empty)
+  ->ComputeStatistics("max", [](const std::vector<double>& v) -> double {
+    return *(std::max_element(std::begin(v), std::end(v)));
+  })
+  ->Arg(512);
+```
+
+<a name="using-register-benchmark" />
+
+### Using RegisterBenchmark(name, fn, args...)
+
+The `RegisterBenchmark(name, func, args...)` function provides an alternative
+way to create and register benchmarks.
+`RegisterBenchmark(name, func, args...)` creates, registers, and returns a
+pointer to a new benchmark with the specified `name` that invokes
+`func(st, args...)` where `st` is a `benchmark::State` object.
+
+Unlike the `BENCHMARK` registration macros, which can only be used at the global
+scope, the `RegisterBenchmark` can be called anywhere. This allows for
+benchmark tests to be registered programmatically.
+
+Additionally `RegisterBenchmark` allows any callable object to be registered
+as a benchmark. Including capturing lambdas and function objects.
+
+For Example:
+```c++
+auto BM_test = [](benchmark::State& st, auto Inputs) { /* ... */ };
+
+int main(int argc, char** argv) {
+  for (auto& test_input : { /* ... */ })
+      benchmark::RegisterBenchmark(test_input.name(), BM_test, test_input);
+  benchmark::Initialize(&argc, argv);
+  benchmark::RunSpecifiedBenchmarks();
+}
+```
+
+<a name="exiting-with-an-error" />
+
+### Exiting with an Error
+
+When errors caused by external influences, such as file I/O and network
+communication, occur within a benchmark the
+`State::SkipWithError(const char* msg)` function can be used to skip that run
+of benchmark and report the error. Note that only future iterations of the
+`KeepRunning()` are skipped. For the ranged-for version of the benchmark loop
+Users must explicitly exit the loop, otherwise all iterations will be performed.
+Users may explicitly return to exit the benchmark immediately.
+
+The `SkipWithError(...)` function may be used at any point within the benchmark,
+including before and after the benchmark loop.
+
+For example:
+
+```c++
+static void BM_test(benchmark::State& state) {
+  auto resource = GetResource();
+  if (!resource.good()) {
+      state.SkipWithError("Resource is not good!");
+      // KeepRunning() loop will not be entered.
+  }
+  while (state.KeepRunning()) {
+      auto data = resource.read_data();
+      if (!resource.good()) {
+        state.SkipWithError("Failed to read data!");
+        break; // Needed to skip the rest of the iteration.
+     }
+     do_stuff(data);
+  }
+}
+
+static void BM_test_ranged_fo(benchmark::State & state) {
+  state.SkipWithError("test will not be entered");
+  for (auto _ : state) {
+    state.SkipWithError("Failed!");
+    break; // REQUIRED to prevent all further iterations.
+  }
+}
+```
+<a name="a-faster-keep-running-loop" />
+
+### A Faster KeepRunning Loop
+
+In C++11 mode, a ranged-based for loop should be used in preference to
+the `KeepRunning` loop for running the benchmarks. For example:
+
+```c++
+static void BM_Fast(benchmark::State &state) {
+  for (auto _ : state) {
+    FastOperation();
+  }
+}
+BENCHMARK(BM_Fast);
+```
+
+The reason the ranged-for loop is faster than using `KeepRunning`, is
+because `KeepRunning` requires a memory load and store of the iteration count
+ever iteration, whereas the ranged-for variant is able to keep the iteration count
+in a register.
+
+For example, an empty inner loop of using the ranged-based for method looks like:
+
+```asm
+# Loop Init
+  mov rbx, qword ptr [r14 + 104]
+  call benchmark::State::StartKeepRunning()
+  test rbx, rbx
+  je .LoopEnd
+.LoopHeader: # =>This Inner Loop Header: Depth=1
+  add rbx, -1
+  jne .LoopHeader
+.LoopEnd:
+```
+
+Compared to an empty `KeepRunning` loop, which looks like:
+
+```asm
+.LoopHeader: # in Loop: Header=BB0_3 Depth=1
+  cmp byte ptr [rbx], 1
+  jne .LoopInit
+.LoopBody: # =>This Inner Loop Header: Depth=1
+  mov rax, qword ptr [rbx + 8]
+  lea rcx, [rax + 1]
+  mov qword ptr [rbx + 8], rcx
+  cmp rax, qword ptr [rbx + 104]
+  jb .LoopHeader
+  jmp .LoopEnd
+.LoopInit:
+  mov rdi, rbx
+  call benchmark::State::StartKeepRunning()
+  jmp .LoopBody
+.LoopEnd:
+```
+
+Unless C++03 compatibility is required, the ranged-for variant of writing
+the benchmark loop should be preferred.
+
+<a name="disabling-cpu-frequency-scaling" />
+
+### Disabling CPU Frequency Scaling
+
+If you see this error:
+
+```
+***WARNING*** CPU scaling is enabled, the benchmark real time measurements may be noisy and will incur extra overhead.
+```
+
+you might want to disable the CPU frequency scaling while running the benchmark:
+
+```bash
+sudo cpupower frequency-set --governor performance
+./mybench
+sudo cpupower frequency-set --governor powersave
+```
diff --git a/src/third_party/google_benchmark/WORKSPACE b/src/third_party/google_benchmark/WORKSPACE
new file mode 100644
index 0000000..8df248a
--- /dev/null
+++ b/src/third_party/google_benchmark/WORKSPACE
@@ -0,0 +1,15 @@
+workspace(name = "com_github_google_benchmark")
+
+load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
+
+http_archive(
+    name = "rules_cc",
+    strip_prefix = "rules_cc-a508235df92e71d537fcbae0c7c952ea6957a912",
+    urls = ["https://github.com/bazelbuild/rules_cc/archive/a508235df92e71d537fcbae0c7c952ea6957a912.zip"],
+)
+
+http_archive(
+    name = "com_google_googletest",
+    strip_prefix = "googletest-3f0cf6b62ad1eb50d8736538363d3580dd640c3e",
+    urls = ["https://github.com/google/googletest/archive/3f0cf6b62ad1eb50d8736538363d3580dd640c3e.zip"],
+)
diff --git a/src/third_party/google_benchmark/_config.yml b/src/third_party/google_benchmark/_config.yml
new file mode 100644
index 0000000..1885487
--- /dev/null
+++ b/src/third_party/google_benchmark/_config.yml
@@ -0,0 +1 @@
+theme: jekyll-theme-midnight
\ No newline at end of file
diff --git a/src/third_party/google_benchmark/appveyor.yml b/src/third_party/google_benchmark/appveyor.yml
new file mode 100644
index 0000000..81da955
--- /dev/null
+++ b/src/third_party/google_benchmark/appveyor.yml
@@ -0,0 +1,50 @@
+version: '{build}'
+
+image: Visual Studio 2017
+
+configuration:
+  - Debug
+  - Release
+
+environment:
+  matrix:
+    - compiler: msvc-15-seh
+      generator: "Visual Studio 15 2017"
+
+    - compiler: msvc-15-seh
+      generator: "Visual Studio 15 2017 Win64"
+
+    - compiler: msvc-14-seh
+      generator: "Visual Studio 14 2015"
+
+    - compiler: msvc-14-seh
+      generator: "Visual Studio 14 2015 Win64"
+
+    - compiler: gcc-5.3.0-posix
+      generator: "MinGW Makefiles"
+      cxx_path: 'C:\mingw-w64\i686-5.3.0-posix-dwarf-rt_v4-rev0\mingw32\bin'
+      APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2015
+
+matrix:
+  fast_finish: true
+
+install:
+  # git bash conflicts with MinGW makefiles
+  - if "%generator%"=="MinGW Makefiles" (set "PATH=%PATH:C:\Program Files\Git\usr\bin;=%")
+  - if not "%cxx_path%"=="" (set "PATH=%PATH%;%cxx_path%")
+
+build_script:
+  - md _build -Force
+  - cd _build
+  - echo %configuration%
+  - cmake -G "%generator%" "-DCMAKE_BUILD_TYPE=%configuration%" -DBENCHMARK_DOWNLOAD_DEPENDENCIES=ON ..
+  - cmake --build . --config %configuration%
+
+test_script:
+  - ctest --build-config %configuration% --timeout 300 --output-on-failure
+
+artifacts:
+  - path: '_build/CMakeFiles/*.log'
+    name: logs
+  - path: '_build/Testing/**/*.xml'
+    name: test_results
diff --git a/src/third_party/google_benchmark/cmake/AddCXXCompilerFlag.cmake b/src/third_party/google_benchmark/cmake/AddCXXCompilerFlag.cmake
new file mode 100644
index 0000000..d0d2099
--- /dev/null
+++ b/src/third_party/google_benchmark/cmake/AddCXXCompilerFlag.cmake
@@ -0,0 +1,74 @@
+# - Adds a compiler flag if it is supported by the compiler
+#
+# This function checks that the supplied compiler flag is supported and then
+# adds it to the corresponding compiler flags
+#
+#  add_cxx_compiler_flag(<FLAG> [<VARIANT>])
+#
+# - Example
+#
+# include(AddCXXCompilerFlag)
+# add_cxx_compiler_flag(-Wall)
+# add_cxx_compiler_flag(-no-strict-aliasing RELEASE)
+# Requires CMake 2.6+
+
+if(__add_cxx_compiler_flag)
+  return()
+endif()
+set(__add_cxx_compiler_flag INCLUDED)
+
+include(CheckCXXCompilerFlag)
+
+function(mangle_compiler_flag FLAG OUTPUT)
+  string(TOUPPER "HAVE_CXX_FLAG_${FLAG}" SANITIZED_FLAG)
+  string(REPLACE "+" "X" SANITIZED_FLAG ${SANITIZED_FLAG})
+  string(REGEX REPLACE "[^A-Za-z_0-9]" "_" SANITIZED_FLAG ${SANITIZED_FLAG})
+  string(REGEX REPLACE "_+" "_" SANITIZED_FLAG ${SANITIZED_FLAG})
+  set(${OUTPUT} "${SANITIZED_FLAG}" PARENT_SCOPE)
+endfunction(mangle_compiler_flag)
+
+function(add_cxx_compiler_flag FLAG)
+  mangle_compiler_flag("${FLAG}" MANGLED_FLAG)
+  set(OLD_CMAKE_REQUIRED_FLAGS "${CMAKE_REQUIRED_FLAGS}")
+  set(CMAKE_REQUIRED_FLAGS "${CMAKE_REQUIRED_FLAGS} ${FLAG}")
+  check_cxx_compiler_flag("${FLAG}" ${MANGLED_FLAG})
+  set(CMAKE_REQUIRED_FLAGS "${OLD_CMAKE_REQUIRED_FLAGS}")
+  if(${MANGLED_FLAG})
+    set(VARIANT ${ARGV1})
+    if(ARGV1)
+      string(TOUPPER "_${VARIANT}" VARIANT)
+    endif()
+    set(CMAKE_CXX_FLAGS${VARIANT} "${CMAKE_CXX_FLAGS${VARIANT}} ${BENCHMARK_CXX_FLAGS${VARIANT}} ${FLAG}" PARENT_SCOPE)
+  endif()
+endfunction()
+
+function(add_required_cxx_compiler_flag FLAG)
+  mangle_compiler_flag("${FLAG}" MANGLED_FLAG)
+  set(OLD_CMAKE_REQUIRED_FLAGS "${CMAKE_REQUIRED_FLAGS}")
+  set(CMAKE_REQUIRED_FLAGS "${CMAKE_REQUIRED_FLAGS} ${FLAG}")
+  check_cxx_compiler_flag("${FLAG}" ${MANGLED_FLAG})
+  set(CMAKE_REQUIRED_FLAGS "${OLD_CMAKE_REQUIRED_FLAGS}")
+  if(${MANGLED_FLAG})
+    set(VARIANT ${ARGV1})
+    if(ARGV1)
+      string(TOUPPER "_${VARIANT}" VARIANT)
+    endif()
+    set(CMAKE_CXX_FLAGS${VARIANT} "${CMAKE_CXX_FLAGS${VARIANT}} ${FLAG}" PARENT_SCOPE)
+    set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} ${FLAG}" PARENT_SCOPE)
+    set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} ${FLAG}" PARENT_SCOPE)
+    set(CMAKE_MODULE_LINKER_FLAGS "${CMAKE_MODULE_LINKER_FLAGS} ${FLAG}" PARENT_SCOPE)
+    set(CMAKE_REQUIRED_FLAGS "${CMAKE_REQUIRED_FLAGS} ${FLAG}" PARENT_SCOPE)
+  else()
+    message(FATAL_ERROR "Required flag '${FLAG}' is not supported by the compiler")
+  endif()
+endfunction()
+
+function(check_cxx_warning_flag FLAG)
+  mangle_compiler_flag("${FLAG}" MANGLED_FLAG)
+  set(OLD_CMAKE_REQUIRED_FLAGS "${CMAKE_REQUIRED_FLAGS}")
+  # Add -Werror to ensure the compiler generates an error if the warning flag
+  # doesn't exist.
+  set(CMAKE_REQUIRED_FLAGS "${CMAKE_REQUIRED_FLAGS} -Werror ${FLAG}")
+  check_cxx_compiler_flag("${FLAG}" ${MANGLED_FLAG})
+  set(CMAKE_REQUIRED_FLAGS "${OLD_CMAKE_REQUIRED_FLAGS}")
+endfunction()
diff --git a/src/third_party/google_benchmark/cmake/CXXFeatureCheck.cmake b/src/third_party/google_benchmark/cmake/CXXFeatureCheck.cmake
new file mode 100644
index 0000000..059d510
--- /dev/null
+++ b/src/third_party/google_benchmark/cmake/CXXFeatureCheck.cmake
@@ -0,0 +1,64 @@
+# - Compile and run code to check for C++ features
+#
+# This functions compiles a source file under the `cmake` folder
+# and adds the corresponding `HAVE_[FILENAME]` flag to the CMake
+# environment
+#
+#  cxx_feature_check(<FLAG> [<VARIANT>])
+#
+# - Example
+#
+# include(CXXFeatureCheck)
+# cxx_feature_check(STD_REGEX)
+# Requires CMake 2.8.12+
+
+if(__cxx_feature_check)
+  return()
+endif()
+set(__cxx_feature_check INCLUDED)
+
+function(cxx_feature_check FILE)
+  string(TOLOWER ${FILE} FILE)
+  string(TOUPPER ${FILE} VAR)
+  string(TOUPPER "HAVE_${VAR}" FEATURE)
+  if (DEFINED HAVE_${VAR})
+    set(HAVE_${VAR} 1 PARENT_SCOPE)
+    add_definitions(-DHAVE_${VAR})
+    return()
+  endif()
+
+  if (NOT DEFINED COMPILE_${FEATURE})
+    message(STATUS "Performing Test ${FEATURE}")
+    if(CMAKE_CROSSCOMPILING)
+      try_compile(COMPILE_${FEATURE}
+              ${CMAKE_BINARY_DIR} ${CMAKE_CURRENT_SOURCE_DIR}/cmake/${FILE}.cpp
+              CMAKE_FLAGS ${BENCHMARK_CXX_LINKER_FLAGS}
+              LINK_LIBRARIES ${BENCHMARK_CXX_LIBRARIES})
+      if(COMPILE_${FEATURE})
+        message(WARNING
+              "If you see build failures due to cross compilation, try setting HAVE_${VAR} to 0")
+        set(RUN_${FEATURE} 0 CACHE INTERNAL "")
+      else()
+        set(RUN_${FEATURE} 1 CACHE INTERNAL "")
+      endif()
+    else()
+      message(STATUS "Performing Test ${FEATURE}")
+      try_run(RUN_${FEATURE} COMPILE_${FEATURE}
+              ${CMAKE_BINARY_DIR} ${CMAKE_CURRENT_SOURCE_DIR}/cmake/${FILE}.cpp
+              CMAKE_FLAGS ${BENCHMARK_CXX_LINKER_FLAGS}
+              LINK_LIBRARIES ${BENCHMARK_CXX_LIBRARIES})
+    endif()
+  endif()
+
+  if(RUN_${FEATURE} EQUAL 0)
+    message(STATUS "Performing Test ${FEATURE} -- success")
+    set(HAVE_${VAR} 1 PARENT_SCOPE)
+    add_definitions(-DHAVE_${VAR})
+  else()
+    if(NOT COMPILE_${FEATURE})
+      message(STATUS "Performing Test ${FEATURE} -- failed to compile")
+    else()
+      message(STATUS "Performing Test ${FEATURE} -- compiled but failed to run")
+    endif()
+  endif()
+endfunction()
diff --git a/src/third_party/google_benchmark/cmake/Config.cmake.in b/src/third_party/google_benchmark/cmake/Config.cmake.in
new file mode 100644
index 0000000..6e9256e
--- /dev/null
+++ b/src/third_party/google_benchmark/cmake/Config.cmake.in
@@ -0,0 +1 @@
+include("${CMAKE_CURRENT_LIST_DIR}/@targets_export_name@.cmake")
diff --git a/src/third_party/google_benchmark/cmake/GetGitVersion.cmake b/src/third_party/google_benchmark/cmake/GetGitVersion.cmake
new file mode 100644
index 0000000..4f10f22
--- /dev/null
+++ b/src/third_party/google_benchmark/cmake/GetGitVersion.cmake
@@ -0,0 +1,54 @@
+# - Returns a version string from Git tags
+#
+# This function inspects the annotated git tags for the project and returns a string
+# into a CMake variable
+#
+#  get_git_version(<var>)
+#
+# - Example
+#
+# include(GetGitVersion)
+# get_git_version(GIT_VERSION)
+#
+# Requires CMake 2.8.11+
+find_package(Git)
+
+if(__get_git_version)
+  return()
+endif()
+set(__get_git_version INCLUDED)
+
+function(get_git_version var)
+  if(GIT_EXECUTABLE)
+      execute_process(COMMAND ${GIT_EXECUTABLE} describe --match "v[0-9]*.[0-9]*.[0-9]*" --abbrev=8
+          WORKING_DIRECTORY ${PROJECT_SOURCE_DIR}
+          RESULT_VARIABLE status
+          OUTPUT_VARIABLE GIT_VERSION
+          ERROR_QUIET)
+      if(${status})
+          set(GIT_VERSION "v0.0.0")
+      else()
+          string(STRIP ${GIT_VERSION} GIT_VERSION)
+          string(REGEX REPLACE "-[0-9]+-g" "-" GIT_VERSION ${GIT_VERSION})
+      endif()
+
+      # Work out if the repository is dirty
+      execute_process(COMMAND ${GIT_EXECUTABLE} update-index -q --refresh
+          WORKING_DIRECTORY ${PROJECT_SOURCE_DIR}
+          OUTPUT_QUIET
+          ERROR_QUIET)
+      execute_process(COMMAND ${GIT_EXECUTABLE} diff-index --name-only HEAD --
+          WORKING_DIRECTORY ${PROJECT_SOURCE_DIR}
+          OUTPUT_VARIABLE GIT_DIFF_INDEX
+          ERROR_QUIET)
+      string(COMPARE NOTEQUAL "${GIT_DIFF_INDEX}" "" GIT_DIRTY)
+      if (${GIT_DIRTY})
+          set(GIT_VERSION "${GIT_VERSION}-dirty")
+      endif()
+  else()
+      set(GIT_VERSION "v0.0.0")
+  endif()
+
+  message(STATUS "git Version: ${GIT_VERSION}")
+  set(${var} ${GIT_VERSION} PARENT_SCOPE)
+endfunction()
diff --git a/src/third_party/google_benchmark/cmake/GoogleTest.cmake b/src/third_party/google_benchmark/cmake/GoogleTest.cmake
new file mode 100644
index 0000000..dd611fc
--- /dev/null
+++ b/src/third_party/google_benchmark/cmake/GoogleTest.cmake
@@ -0,0 +1,41 @@
+# Download and unpack googletest at configure time
+set(GOOGLETEST_PREFIX "${benchmark_BINARY_DIR}/third_party/googletest")
+configure_file(${benchmark_SOURCE_DIR}/cmake/GoogleTest.cmake.in ${GOOGLETEST_PREFIX}/CMakeLists.txt @ONLY)
+
+set(GOOGLETEST_PATH "${CMAKE_CURRENT_SOURCE_DIR}/googletest" CACHE PATH "") # Mind the quotes
+execute_process(COMMAND ${CMAKE_COMMAND} -G "${CMAKE_GENERATOR}"
+  -DALLOW_DOWNLOADING_GOOGLETEST=${BENCHMARK_DOWNLOAD_DEPENDENCIES} -DGOOGLETEST_PATH:PATH=${GOOGLETEST_PATH} .
+  RESULT_VARIABLE result
+  WORKING_DIRECTORY ${GOOGLETEST_PREFIX}
+)
+
+if(result)
+  message(FATAL_ERROR "CMake step for googletest failed: ${result}")
+endif()
+
+execute_process(
+  COMMAND ${CMAKE_COMMAND} --build .
+  RESULT_VARIABLE result
+  WORKING_DIRECTORY ${GOOGLETEST_PREFIX}
+)
+
+if(result)
+  message(FATAL_ERROR "Build step for googletest failed: ${result}")
+endif()
+
+# Prevent overriding the parent project's compiler/linker
+# settings on Windows
+set(gtest_force_shared_crt ON CACHE BOOL "" FORCE)
+
+include(${GOOGLETEST_PREFIX}/googletest-paths.cmake)
+
+# Add googletest directly to our build. This defines
+# the gtest and gtest_main targets.
+add_subdirectory(${GOOGLETEST_SOURCE_DIR}
+                 ${GOOGLETEST_BINARY_DIR}
+                 EXCLUDE_FROM_ALL)
+
+set_target_properties(gtest PROPERTIES INTERFACE_SYSTEM_INCLUDE_DIRECTORIES $<TARGET_PROPERTY:gtest,INTERFACE_INCLUDE_DIRECTORIES>)
+set_target_properties(gtest_main PROPERTIES INTERFACE_SYSTEM_INCLUDE_DIRECTORIES $<TARGET_PROPERTY:gtest_main,INTERFACE_INCLUDE_DIRECTORIES>)
+set_target_properties(gmock PROPERTIES INTERFACE_SYSTEM_INCLUDE_DIRECTORIES $<TARGET_PROPERTY:gmock,INTERFACE_INCLUDE_DIRECTORIES>)
+set_target_properties(gmock_main PROPERTIES INTERFACE_SYSTEM_INCLUDE_DIRECTORIES $<TARGET_PROPERTY:gmock_main,INTERFACE_INCLUDE_DIRECTORIES>)
diff --git a/src/third_party/google_benchmark/cmake/GoogleTest.cmake.in b/src/third_party/google_benchmark/cmake/GoogleTest.cmake.in
new file mode 100644
index 0000000..28818ee
--- /dev/null
+++ b/src/third_party/google_benchmark/cmake/GoogleTest.cmake.in
@@ -0,0 +1,58 @@
+cmake_minimum_required(VERSION 2.8.12)
+
+project(googletest-download NONE)
+
+# Enable ExternalProject CMake module
+include(ExternalProject)
+
+option(ALLOW_DOWNLOADING_GOOGLETEST "If googletest src tree is not found in location specified by GOOGLETEST_PATH, do fetch the archive from internet" OFF)
+set(GOOGLETEST_PATH "/usr/src/googletest" CACHE PATH
+                    "Path to the googletest root tree. Should contain googletest and googlemock subdirs. And CMakeLists.txt in root, and in both of these subdirs")
+
+# Download and install GoogleTest
+
+message(STATUS "Looking for Google Test sources")
+message(STATUS "Looking for Google Test sources in ${GOOGLETEST_PATH}")
+if(EXISTS "${GOOGLETEST_PATH}"            AND IS_DIRECTORY "${GOOGLETEST_PATH}"            AND EXISTS "${GOOGLETEST_PATH}/CMakeLists.txt" AND
+   EXISTS "${GOOGLETEST_PATH}/googletest" AND IS_DIRECTORY "${GOOGLETEST_PATH}/googletest" AND EXISTS "${GOOGLETEST_PATH}/googletest/CMakeLists.txt" AND
+   EXISTS "${GOOGLETEST_PATH}/googlemock" AND IS_DIRECTORY "${GOOGLETEST_PATH}/googlemock" AND EXISTS "${GOOGLETEST_PATH}/googlemock/CMakeLists.txt")
+  message(STATUS "Found Google Test in ${GOOGLETEST_PATH}")
+
+  ExternalProject_Add(
+    googletest
+    PREFIX            "${CMAKE_BINARY_DIR}"
+    DOWNLOAD_DIR      "${CMAKE_BINARY_DIR}/download"
+    SOURCE_DIR        "${GOOGLETEST_PATH}" # use existing src dir.
+    BINARY_DIR        "${CMAKE_BINARY_DIR}/build"
+    CONFIGURE_COMMAND ""
+    BUILD_COMMAND     ""
+    INSTALL_COMMAND   ""
+    TEST_COMMAND      ""
+  )
+else()
+  if(NOT ALLOW_DOWNLOADING_GOOGLETEST)
+    message(SEND_ERROR "Did not find Google Test sources! Either pass correct path in GOOGLETEST_PATH, or enable ALLOW_DOWNLOADING_GOOGLETEST, or disable BENCHMARK_ENABLE_GTEST_TESTS / BENCHMARK_ENABLE_TESTING.")
+  else()
+    message(WARNING "Did not find Google Test sources! Fetching from web...")
+    ExternalProject_Add(
+      googletest
+      GIT_REPOSITORY    https://github.com/google/googletest.git
+      GIT_TAG           master
+      PREFIX            "${CMAKE_BINARY_DIR}"
+      STAMP_DIR         "${CMAKE_BINARY_DIR}/stamp"
+      DOWNLOAD_DIR      "${CMAKE_BINARY_DIR}/download"
+      SOURCE_DIR        "${CMAKE_BINARY_DIR}/src"
+      BINARY_DIR        "${CMAKE_BINARY_DIR}/build"
+      CONFIGURE_COMMAND ""
+      BUILD_COMMAND     ""
+      INSTALL_COMMAND   ""
+      TEST_COMMAND      ""
+    )
+  endif()
+endif()
+
+ExternalProject_Get_Property(googletest SOURCE_DIR BINARY_DIR)
+file(WRITE googletest-paths.cmake
+"set(GOOGLETEST_SOURCE_DIR \"${SOURCE_DIR}\")
+set(GOOGLETEST_BINARY_DIR \"${BINARY_DIR}\")
+")
diff --git a/src/third_party/google_benchmark/cmake/Modules/FindLLVMAr.cmake b/src/third_party/google_benchmark/cmake/Modules/FindLLVMAr.cmake
new file mode 100644
index 0000000..2346981
--- /dev/null
+++ b/src/third_party/google_benchmark/cmake/Modules/FindLLVMAr.cmake
@@ -0,0 +1,16 @@
+include(FeatureSummary)
+
+find_program(LLVMAR_EXECUTABLE
+  NAMES llvm-ar
+  DOC "The llvm-ar executable"
+  )
+
+include(FindPackageHandleStandardArgs)
+find_package_handle_standard_args(LLVMAr
+  DEFAULT_MSG
+  LLVMAR_EXECUTABLE)
+
+SET_PACKAGE_PROPERTIES(LLVMAr PROPERTIES
+  URL https://llvm.org/docs/CommandGuide/llvm-ar.html
+  DESCRIPTION "create, modify, and extract from archives"
+)
diff --git a/src/third_party/google_benchmark/cmake/Modules/FindLLVMNm.cmake b/src/third_party/google_benchmark/cmake/Modules/FindLLVMNm.cmake
new file mode 100644
index 0000000..e56430a
--- /dev/null
+++ b/src/third_party/google_benchmark/cmake/Modules/FindLLVMNm.cmake
@@ -0,0 +1,16 @@
+include(FeatureSummary)
+
+find_program(LLVMNM_EXECUTABLE
+  NAMES llvm-nm
+  DOC "The llvm-nm executable"
+  )
+
+include(FindPackageHandleStandardArgs)
+find_package_handle_standard_args(LLVMNm
+  DEFAULT_MSG
+  LLVMNM_EXECUTABLE)
+
+SET_PACKAGE_PROPERTIES(LLVMNm PROPERTIES
+  URL https://llvm.org/docs/CommandGuide/llvm-nm.html
+  DESCRIPTION "list LLVM bitcode and object file’s symbol table"
+)
diff --git a/src/third_party/google_benchmark/cmake/Modules/FindLLVMRanLib.cmake b/src/third_party/google_benchmark/cmake/Modules/FindLLVMRanLib.cmake
new file mode 100644
index 0000000..7b53e1a
--- /dev/null
+++ b/src/third_party/google_benchmark/cmake/Modules/FindLLVMRanLib.cmake
@@ -0,0 +1,15 @@
+include(FeatureSummary)
+
+find_program(LLVMRANLIB_EXECUTABLE
+  NAMES llvm-ranlib
+  DOC "The llvm-ranlib executable"
+  )
+
+include(FindPackageHandleStandardArgs)
+find_package_handle_standard_args(LLVMRanLib
+  DEFAULT_MSG
+  LLVMRANLIB_EXECUTABLE)
+
+SET_PACKAGE_PROPERTIES(LLVMRanLib PROPERTIES
+  DESCRIPTION "generate index for LLVM archive"
+)
diff --git a/src/third_party/google_benchmark/cmake/benchmark.pc.in b/src/third_party/google_benchmark/cmake/benchmark.pc.in
new file mode 100644
index 0000000..43ca8f9
--- /dev/null
+++ b/src/third_party/google_benchmark/cmake/benchmark.pc.in
@@ -0,0 +1,12 @@
+prefix=@CMAKE_INSTALL_PREFIX@
+exec_prefix=${prefix}
+libdir=${prefix}/lib
+includedir=${prefix}/include
+
+Name: @PROJECT_NAME@
+Description: Google microbenchmark framework
+Version: @VERSION@
+
+Libs: -L${libdir} -lbenchmark
+Libs.private: -lpthread
+Cflags: -I${includedir}
diff --git a/src/third_party/google_benchmark/cmake/gnu_posix_regex.cpp b/src/third_party/google_benchmark/cmake/gnu_posix_regex.cpp
new file mode 100644
index 0000000..b5b91cd
--- /dev/null
+++ b/src/third_party/google_benchmark/cmake/gnu_posix_regex.cpp
@@ -0,0 +1,12 @@
+#include <gnuregex.h>
+#include <string>
+int main() {
+  std::string str = "test0159";
+  regex_t re;
+  int ec = regcomp(&re, "^[a-z]+[0-9]+$", REG_EXTENDED | REG_NOSUB);
+  if (ec != 0) {
+    return ec;
+  }
+  return regexec(&re, str.c_str(), 0, nullptr, 0) ? -1 : 0;
+}
+
diff --git a/src/third_party/google_benchmark/cmake/llvm-toolchain.cmake b/src/third_party/google_benchmark/cmake/llvm-toolchain.cmake
new file mode 100644
index 0000000..fc119e5
--- /dev/null
+++ b/src/third_party/google_benchmark/cmake/llvm-toolchain.cmake
@@ -0,0 +1,8 @@
+find_package(LLVMAr REQUIRED)
+set(CMAKE_AR "${LLVMAR_EXECUTABLE}" CACHE FILEPATH "" FORCE)
+
+find_package(LLVMNm REQUIRED)
+set(CMAKE_NM "${LLVMNM_EXECUTABLE}" CACHE FILEPATH "" FORCE)
+
+find_package(LLVMRanLib REQUIRED)
+set(CMAKE_RANLIB "${LLVMRANLIB_EXECUTABLE}" CACHE FILEPATH "" FORCE)
diff --git a/src/third_party/google_benchmark/cmake/posix_regex.cpp b/src/third_party/google_benchmark/cmake/posix_regex.cpp
new file mode 100644
index 0000000..466dc62
--- /dev/null
+++ b/src/third_party/google_benchmark/cmake/posix_regex.cpp
@@ -0,0 +1,14 @@
+#include <regex.h>
+#include <string>
+int main() {
+  std::string str = "test0159";
+  regex_t re;
+  int ec = regcomp(&re, "^[a-z]+[0-9]+$", REG_EXTENDED | REG_NOSUB);
+  if (ec != 0) {
+    return ec;
+  }
+  int ret = regexec(&re, str.c_str(), 0, nullptr, 0) ? -1 : 0;
+  regfree(&re);
+  return ret;
+}
+
diff --git a/src/third_party/google_benchmark/cmake/split_list.cmake b/src/third_party/google_benchmark/cmake/split_list.cmake
new file mode 100644
index 0000000..67aed3f
--- /dev/null
+++ b/src/third_party/google_benchmark/cmake/split_list.cmake
@@ -0,0 +1,3 @@
+macro(split_list listname)
+  string(REPLACE ";" " " ${listname} "${${listname}}")
+endmacro()
diff --git a/src/third_party/google_benchmark/cmake/std_regex.cpp b/src/third_party/google_benchmark/cmake/std_regex.cpp
new file mode 100644
index 0000000..696f2a2
--- /dev/null
+++ b/src/third_party/google_benchmark/cmake/std_regex.cpp
@@ -0,0 +1,10 @@
+#include <regex>
+#include <string>
+int main() {
+  const std::string str = "test0159";
+  std::regex re;
+  re = std::regex("^[a-z]+[0-9]+$",
+       std::regex_constants::extended | std::regex_constants::nosubs);
+  return std::regex_search(str, re) ? 0 : -1;
+}
+
diff --git a/src/third_party/google_benchmark/cmake/steady_clock.cpp b/src/third_party/google_benchmark/cmake/steady_clock.cpp
new file mode 100644
index 0000000..66d50d1
--- /dev/null
+++ b/src/third_party/google_benchmark/cmake/steady_clock.cpp
@@ -0,0 +1,7 @@
+#include <chrono>
+
+int main() {
+    typedef std::chrono::steady_clock Clock;
+    Clock::time_point tp = Clock::now();
+    ((void)tp);
+}
diff --git a/src/third_party/google_benchmark/cmake/thread_safety_attributes.cpp b/src/third_party/google_benchmark/cmake/thread_safety_attributes.cpp
new file mode 100644
index 0000000..46161ba
--- /dev/null
+++ b/src/third_party/google_benchmark/cmake/thread_safety_attributes.cpp
@@ -0,0 +1,4 @@
+#define HAVE_THREAD_SAFETY_ATTRIBUTES
+#include "../src/mutex.h"
+
+int main() {}
diff --git a/src/third_party/google_benchmark/conan/CMakeLists.txt b/src/third_party/google_benchmark/conan/CMakeLists.txt
new file mode 100644
index 0000000..15b92ca
--- /dev/null
+++ b/src/third_party/google_benchmark/conan/CMakeLists.txt
@@ -0,0 +1,7 @@
+cmake_minimum_required(VERSION 2.8.11)
+project(cmake_wrapper)
+
+include(conanbuildinfo.cmake)
+conan_basic_setup()
+
+include(${CMAKE_SOURCE_DIR}/CMakeListsOriginal.txt)
diff --git a/src/third_party/google_benchmark/conan/test_package/CMakeLists.txt b/src/third_party/google_benchmark/conan/test_package/CMakeLists.txt
new file mode 100644
index 0000000..089a6c7
--- /dev/null
+++ b/src/third_party/google_benchmark/conan/test_package/CMakeLists.txt
@@ -0,0 +1,10 @@
+cmake_minimum_required(VERSION 2.8.11)
+project(test_package)
+
+set(CMAKE_VERBOSE_MAKEFILE TRUE)
+
+include(${CMAKE_BINARY_DIR}/conanbuildinfo.cmake)
+conan_basic_setup()
+
+add_executable(${PROJECT_NAME} test_package.cpp)
+target_link_libraries(${PROJECT_NAME} ${CONAN_LIBS})
diff --git a/src/third_party/google_benchmark/conan/test_package/conanfile.py b/src/third_party/google_benchmark/conan/test_package/conanfile.py
new file mode 100644
index 0000000..d63f408
--- /dev/null
+++ b/src/third_party/google_benchmark/conan/test_package/conanfile.py
@@ -0,0 +1,19 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+from conans import ConanFile, CMake
+import os
+
+
+class TestPackageConan(ConanFile):
+    settings = "os", "compiler", "build_type", "arch"
+    generators = "cmake"
+
+    def build(self):
+        cmake = CMake(self)
+        cmake.configure()
+        cmake.build()
+
+    def test(self):
+        bin_path = os.path.join("bin", "test_package")
+        self.run(bin_path, run_environment=True)
diff --git a/src/third_party/google_benchmark/conan/test_package/test_package.cpp b/src/third_party/google_benchmark/conan/test_package/test_package.cpp
new file mode 100644
index 0000000..4fa7ec0
--- /dev/null
+++ b/src/third_party/google_benchmark/conan/test_package/test_package.cpp
@@ -0,0 +1,18 @@
+#include "benchmark/benchmark.h"
+
+void BM_StringCreation(benchmark::State& state) {
+    while (state.KeepRunning())
+        std::string empty_string;
+}
+
+BENCHMARK(BM_StringCreation);
+
+void BM_StringCopy(benchmark::State& state) {
+    std::string x = "hello";
+    while (state.KeepRunning())
+        std::string copy(x);
+}
+
+BENCHMARK(BM_StringCopy);
+
+BENCHMARK_MAIN();
diff --git a/src/third_party/google_benchmark/conanfile.py b/src/third_party/google_benchmark/conanfile.py
new file mode 100644
index 0000000..e31fc52
--- /dev/null
+++ b/src/third_party/google_benchmark/conanfile.py
@@ -0,0 +1,79 @@
+from conans import ConanFile, CMake, tools
+from conans.errors import ConanInvalidConfiguration
+import shutil
+import os
+
+
+class GoogleBenchmarkConan(ConanFile):
+    name = "benchmark"
+    description = "A microbenchmark support library."
+    topics = ("conan", "benchmark", "google", "microbenchmark")
+    url = "https://github.com/google/benchmark"
+    homepage = "https://github.com/google/benchmark"
+    author = "Google Inc."
+    license = "Apache-2.0"
+    exports_sources = ["*"]
+    generators = "cmake"
+
+    settings = "arch", "build_type", "compiler", "os"
+    options = {
+        "shared": [True, False],
+        "fPIC": [True, False],
+        "enable_lto": [True, False],
+        "enable_exceptions": [True, False]
+    }
+    default_options = {"shared": False, "fPIC": True, "enable_lto": False, "enable_exceptions": True}
+
+    _build_subfolder = "."
+
+    def source(self):
+        # Wrap the original CMake file to call conan_basic_setup
+        shutil.move("CMakeLists.txt", "CMakeListsOriginal.txt")
+        shutil.move(os.path.join("conan", "CMakeLists.txt"), "CMakeLists.txt")
+
+    def config_options(self):
+        if self.settings.os == "Windows":
+            if self.settings.compiler == "Visual Studio" and float(self.settings.compiler.version.value) <= 12:
+                raise ConanInvalidConfiguration("{} {} does not support Visual Studio <= 12".format(self.name, self.version))
+            del self.options.fPIC
+
+    def configure(self):
+        if self.settings.os == "Windows" and self.options.shared:
+            raise ConanInvalidConfiguration("Windows shared builds are not supported right now, see issue #639")
+
+    def _configure_cmake(self):
+        cmake = CMake(self)
+
+        cmake.definitions["BENCHMARK_ENABLE_TESTING"] = "OFF"
+        cmake.definitions["BENCHMARK_ENABLE_GTEST_TESTS"] = "OFF"
+        cmake.definitions["BENCHMARK_ENABLE_LTO"] = "ON" if self.options.enable_lto else "OFF"
+        cmake.definitions["BENCHMARK_ENABLE_EXCEPTIONS"] = "ON" if self.options.enable_exceptions else "OFF"
+
+        # See https://github.com/google/benchmark/pull/638 for Windows 32 build explanation
+        if self.settings.os != "Windows":
+            cmake.definitions["BENCHMARK_BUILD_32_BITS"] = "ON" if "64" not in str(self.settings.arch) else "OFF"
+            cmake.definitions["BENCHMARK_USE_LIBCXX"] = "ON" if (str(self.settings.compiler.libcxx) == "libc++") else "OFF"
+        else:
+            cmake.definitions["BENCHMARK_USE_LIBCXX"] = "OFF"
+
+        cmake.configure(build_folder=self._build_subfolder)
+        return cmake
+
+    def build(self):
+        cmake = self._configure_cmake()
+        cmake.build()
+
+    def package(self):
+        cmake = self._configure_cmake()
+        cmake.install()
+
+        self.copy(pattern="LICENSE", dst="licenses")
+
+    def package_info(self):
+        self.cpp_info.libs = tools.collect_libs(self)
+        if self.settings.os == "Linux":
+            self.cpp_info.libs.extend(["pthread", "rt"])
+        elif self.settings.os == "Windows":
+            self.cpp_info.libs.append("shlwapi")
+        elif self.settings.os == "SunOS":
+            self.cpp_info.libs.append("kstat")
diff --git a/src/third_party/google_benchmark/dependencies.md b/src/third_party/google_benchmark/dependencies.md
new file mode 100644
index 0000000..6289b4e
--- /dev/null
+++ b/src/third_party/google_benchmark/dependencies.md
@@ -0,0 +1,18 @@
+# Build tool dependency policy
+
+To ensure the broadest compatibility when building the benchmark library, but
+still allow forward progress, we require any build tooling to be available for:
+
+* Debian stable AND
+* The last two Ubuntu LTS releases AND
+
+Currently, this means using build tool versions that are available for Ubuntu
+16.04 (Xenial), Ubuntu 18.04 (Bionic), and Debian stretch.
+
+_Note, [travis](.travis.yml) runs under Ubuntu 14.04 (Trusty) for linux builds._
+
+## cmake
+The current supported version is cmake 3.5.1 as of 2018-06-06.
+
+_Note, this version is also available for Ubuntu 14.04, the previous Ubuntu LTS
+release, as `cmake3`._
diff --git a/src/third_party/google_benchmark/docs/AssemblyTests.md b/src/third_party/google_benchmark/docs/AssemblyTests.md
new file mode 100644
index 0000000..1fbdc26
--- /dev/null
+++ b/src/third_party/google_benchmark/docs/AssemblyTests.md
@@ -0,0 +1,147 @@
+# Assembly Tests
+
+The Benchmark library provides a number of functions whose primary
+purpose in to affect assembly generation, including `DoNotOptimize`
+and `ClobberMemory`. In addition there are other functions,
+such as `KeepRunning`, for which generating good assembly is paramount.
+
+For these functions it's important to have tests that verify the
+correctness and quality of the implementation. This requires testing
+the code generated by the compiler.
+
+This document describes how the Benchmark library tests compiler output,
+as well as how to properly write new tests.
+
+
+## Anatomy of a Test
+
+Writing a test has two steps:
+
+* Write the code you want to generate assembly for.
+* Add `// CHECK` lines to match against the verified assembly.
+
+Example:
+```c++
+
+// CHECK-LABEL: test_add:
+extern "C" int test_add() {
+    extern int ExternInt;
+    return ExternInt + 1;
+
+    // CHECK: movl ExternInt(%rip), %eax
+    // CHECK: addl %eax
+    // CHECK: ret
+}
+
+```
+
+#### LLVM Filecheck
+
+[LLVM's Filecheck](https://llvm.org/docs/CommandGuide/FileCheck.html)
+is used to test the generated assembly against the `// CHECK` lines
+specified in the tests source file. Please see the documentation
+linked above for information on how to write `CHECK` directives.
+
+#### Tips and Tricks:
+
+* Tests should match the minimal amount of output required to establish
+correctness. `CHECK` directives don't have to match on the exact next line
+after the previous match, so tests should omit checks for unimportant
+bits of assembly. ([`CHECK-NEXT`](https://llvm.org/docs/CommandGuide/FileCheck.html#the-check-next-directive)
+can be used to ensure a match occurs exactly after the previous match).
+
+* The tests are compiled with `-O3 -g0`. So we're only testing the
+optimized output.
+
+* The assembly output is further cleaned up using `tools/strip_asm.py`.
+This removes comments, assembler directives, and unused labels before
+the test is run.
+
+* The generated and stripped assembly file for a test is output under
+`<build-directory>/test/<test-name>.s`
+
+* Filecheck supports using [`CHECK` prefixes](https://llvm.org/docs/CommandGuide/FileCheck.html#cmdoption-check-prefixes)
+to specify lines that should only match in certain situations.
+The Benchmark tests use `CHECK-CLANG` and `CHECK-GNU` for lines that
+are only expected to match Clang or GCC's output respectively. Normal
+`CHECK` lines match against all compilers. (Note: `CHECK-NOT` and
+`CHECK-LABEL` are NOT prefixes. They are versions of non-prefixed
+`CHECK` lines)
+
+* Use `extern "C"` to disable name mangling for specific functions. This
+makes them easier to name in the `CHECK` lines.
+
+
+## Problems Writing Portable Tests
+
+Writing tests which check the code generated by a compiler are
+inherently non-portable. Different compilers and even different compiler
+versions may generate entirely different code. The Benchmark tests
+must tolerate this.
+
+LLVM Filecheck provides a number of mechanisms to help write
+"more portable" tests; including [matching using regular expressions](https://llvm.org/docs/CommandGuide/FileCheck.html#filecheck-pattern-matching-syntax),
+allowing the creation of [named variables](https://llvm.org/docs/CommandGuide/FileCheck.html#filecheck-variables)
+for later matching, and [checking non-sequential matches](https://llvm.org/docs/CommandGuide/FileCheck.html#the-check-dag-directive).
+
+#### Capturing Variables
+
+For example, say GCC stores a variable in a register but Clang stores
+it in memory. To write a test that tolerates both cases we "capture"
+the destination of the store, and then use the captured expression
+to write the remainder of the test.
+
+```c++
+// CHECK-LABEL: test_div_no_op_into_shr:
+extern "C" void test_div_no_op_into_shr(int value) {
+    int divisor = 2;
+    benchmark::DoNotOptimize(divisor); // hide the value from the optimizer
+    return value / divisor;
+
+    // CHECK: movl $2, [[DEST:.*]]
+    // CHECK: idivl [[DEST]]
+    // CHECK: ret
+}
+```
+
+#### Using Regular Expressions to Match Differing Output
+
+Often tests require testing assembly lines which may subtly differ
+between compilers or compiler versions. A common example of this
+is matching stack frame addresses. In this case regular expressions
+can be used to match the differing bits of output. For example:
+
+```c++
+int ExternInt;
+struct Point { int x, y, z; };
+
+// CHECK-LABEL: test_store_point:
+extern "C" void test_store_point() {
+    Point p{ExternInt, ExternInt, ExternInt};
+    benchmark::DoNotOptimize(p);
+
+    // CHECK: movl ExternInt(%rip), %eax
+    // CHECK: movl %eax, -{{[0-9]+}}(%rsp)
+    // CHECK: movl %eax, -{{[0-9]+}}(%rsp)
+    // CHECK: movl %eax, -{{[0-9]+}}(%rsp)
+    // CHECK: ret
+}
+```
+
+## Current Requirements and Limitations
+
+The tests require Filecheck to be installed along the `PATH` of the
+build machine. Otherwise the tests will be disabled.
+
+Additionally, as mentioned in the previous section, codegen tests are
+inherently non-portable. Currently the tests are limited to:
+
+* x86_64 targets.
+* Compiled with GCC or Clang
+
+Further work could be done, at least on a limited basis, to extend the
+tests to other architectures and compilers (using `CHECK` prefixes).
+
+Furthermore, the tests fail for builds which specify additional flags
+that modify code generation, including `--coverage` or `-fsanitize=`.
+
diff --git a/src/third_party/google_benchmark/docs/_config.yml b/src/third_party/google_benchmark/docs/_config.yml
new file mode 100644
index 0000000..1885487
--- /dev/null
+++ b/src/third_party/google_benchmark/docs/_config.yml
@@ -0,0 +1 @@
+theme: jekyll-theme-midnight
\ No newline at end of file
diff --git a/src/third_party/google_benchmark/docs/tools.md b/src/third_party/google_benchmark/docs/tools.md
new file mode 100644
index 0000000..4a3b2e9
--- /dev/null
+++ b/src/third_party/google_benchmark/docs/tools.md
@@ -0,0 +1,199 @@
+# Benchmark Tools
+
+## compare.py
+
+The `compare.py` can be used to compare the result of benchmarks.
+
+**NOTE**: the utility relies on the scipy package which can be installed using [these instructions](https://www.scipy.org/install.html).
+
+### Displaying aggregates only
+
+The switch `-a` / `--display_aggregates_only` can be used to control the
+displayment of the normal iterations vs the aggregates. When passed, it will
+be passthrough to the benchmark binaries to be run, and will be accounted for
+in the tool itself; only the aggregates will be displayed, but not normal runs.
+It only affects the display, the separate runs will still be used to calculate
+the U test.
+
+### Modes of operation
+
+There are three modes of operation:
+
+1. Just compare two benchmarks
+The program is invoked like:
+
+``` bash
+$ compare.py benchmarks <benchmark_baseline> <benchmark_contender> [benchmark options]...
+```
+Where `<benchmark_baseline>` and `<benchmark_contender>` either specify a benchmark executable file, or a JSON output file. The type of the input file is automatically detected. If a benchmark executable is specified then the benchmark is run to obtain the results. Otherwise the results are simply loaded from the output file.
+
+`[benchmark options]` will be passed to the benchmarks invocations. They can be anything that binary accepts, be it either normal `--benchmark_*` parameters, or some custom parameters your binary takes.
+
+Example output:
+```
+$ ./compare.py benchmarks ./a.out ./a.out
+RUNNING: ./a.out --benchmark_out=/tmp/tmprBT5nW
+Run on (8 X 4000 MHz CPU s)
+2017-11-07 21:16:44
+------------------------------------------------------
+Benchmark               Time           CPU Iterations
+------------------------------------------------------
+BM_memcpy/8            36 ns         36 ns   19101577   211.669MB/s
+BM_memcpy/64           76 ns         76 ns    9412571   800.199MB/s
+BM_memcpy/512          84 ns         84 ns    8249070   5.64771GB/s
+BM_memcpy/1024        116 ns        116 ns    6181763   8.19505GB/s
+BM_memcpy/8192        643 ns        643 ns    1062855   11.8636GB/s
+BM_copy/8             222 ns        222 ns    3137987   34.3772MB/s
+BM_copy/64           1608 ns       1608 ns     432758   37.9501MB/s
+BM_copy/512         12589 ns      12589 ns      54806   38.7867MB/s
+BM_copy/1024        25169 ns      25169 ns      27713   38.8003MB/s
+BM_copy/8192       201165 ns     201112 ns       3486   38.8466MB/s
+RUNNING: ./a.out --benchmark_out=/tmp/tmpt1wwG_
+Run on (8 X 4000 MHz CPU s)
+2017-11-07 21:16:53
+------------------------------------------------------
+Benchmark               Time           CPU Iterations
+------------------------------------------------------
+BM_memcpy/8            36 ns         36 ns   19397903   211.255MB/s
+BM_memcpy/64           73 ns         73 ns    9691174   839.635MB/s
+BM_memcpy/512          85 ns         85 ns    8312329   5.60101GB/s
+BM_memcpy/1024        118 ns        118 ns    6438774   8.11608GB/s
+BM_memcpy/8192        656 ns        656 ns    1068644   11.6277GB/s
+BM_copy/8             223 ns        223 ns    3146977   34.2338MB/s
+BM_copy/64           1611 ns       1611 ns     435340   37.8751MB/s
+BM_copy/512         12622 ns      12622 ns      54818   38.6844MB/s
+BM_copy/1024        25257 ns      25239 ns      27779   38.6927MB/s
+BM_copy/8192       205013 ns     205010 ns       3479    38.108MB/s
+Comparing ./a.out to ./a.out
+Benchmark                 Time             CPU      Time Old      Time New       CPU Old       CPU New
+------------------------------------------------------------------------------------------------------
+BM_memcpy/8            +0.0020         +0.0020            36            36            36            36
+BM_memcpy/64           -0.0468         -0.0470            76            73            76            73
+BM_memcpy/512          +0.0081         +0.0083            84            85            84            85
+BM_memcpy/1024         +0.0098         +0.0097           116           118           116           118
+BM_memcpy/8192         +0.0200         +0.0203           643           656           643           656
+BM_copy/8              +0.0046         +0.0042           222           223           222           223
+BM_copy/64             +0.0020         +0.0020          1608          1611          1608          1611
+BM_copy/512            +0.0027         +0.0026         12589         12622         12589         12622
+BM_copy/1024           +0.0035         +0.0028         25169         25257         25169         25239
+BM_copy/8192           +0.0191         +0.0194        201165        205013        201112        205010
+```
+
+What it does is for the every benchmark from the first run it looks for the benchmark with exactly the same name in the second run, and then compares the results. If the names differ, the benchmark is omitted from the diff.
+As you can note, the values in `Time` and `CPU` columns are calculated as `(new - old) / |old|`.
+
+2. Compare two different filters of one benchmark
+The program is invoked like:
+
+``` bash
+$ compare.py filters <benchmark> <filter_baseline> <filter_contender> [benchmark options]...
+```
+Where `<benchmark>` either specify a benchmark executable file, or a JSON output file. The type of the input file is automatically detected. If a benchmark executable is specified then the benchmark is run to obtain the results. Otherwise the results are simply loaded from the output file.
+
+Where `<filter_baseline>` and `<filter_contender>` are the same regex filters that you would pass to the `[--benchmark_filter=<regex>]` parameter of the benchmark binary.
+
+`[benchmark options]` will be passed to the benchmarks invocations. They can be anything that binary accepts, be it either normal `--benchmark_*` parameters, or some custom parameters your binary takes.
+
+Example output:
+```
+$ ./compare.py filters ./a.out BM_memcpy BM_copy
+RUNNING: ./a.out --benchmark_filter=BM_memcpy --benchmark_out=/tmp/tmpBWKk0k
+Run on (8 X 4000 MHz CPU s)
+2017-11-07 21:37:28
+------------------------------------------------------
+Benchmark               Time           CPU Iterations
+------------------------------------------------------
+BM_memcpy/8            36 ns         36 ns   17891491   211.215MB/s
+BM_memcpy/64           74 ns         74 ns    9400999   825.646MB/s
+BM_memcpy/512          87 ns         87 ns    8027453   5.46126GB/s
+BM_memcpy/1024        111 ns        111 ns    6116853    8.5648GB/s
+BM_memcpy/8192        657 ns        656 ns    1064679   11.6247GB/s
+RUNNING: ./a.out --benchmark_filter=BM_copy --benchmark_out=/tmp/tmpAvWcOM
+Run on (8 X 4000 MHz CPU s)
+2017-11-07 21:37:33
+----------------------------------------------------
+Benchmark             Time           CPU Iterations
+----------------------------------------------------
+BM_copy/8           227 ns        227 ns    3038700   33.6264MB/s
+BM_copy/64         1640 ns       1640 ns     426893   37.2154MB/s
+BM_copy/512       12804 ns      12801 ns      55417   38.1444MB/s
+BM_copy/1024      25409 ns      25407 ns      27516   38.4365MB/s
+BM_copy/8192     202986 ns     202990 ns       3454   38.4871MB/s
+Comparing BM_memcpy to BM_copy (from ./a.out)
+Benchmark                               Time             CPU      Time Old      Time New       CPU Old       CPU New
+--------------------------------------------------------------------------------------------------------------------
+[BM_memcpy vs. BM_copy]/8            +5.2829         +5.2812            36           227            36           227
+[BM_memcpy vs. BM_copy]/64          +21.1719        +21.1856            74          1640            74          1640
+[BM_memcpy vs. BM_copy]/512        +145.6487       +145.6097            87         12804            87         12801
+[BM_memcpy vs. BM_copy]/1024       +227.1860       +227.1776           111         25409           111         25407
+[BM_memcpy vs. BM_copy]/8192       +308.1664       +308.2898           657        202986           656        202990
+```
+
+As you can see, it applies filter to the benchmarks, both when running the benchmark, and before doing the diff. And to make the diff work, the matches are replaced with some common string. Thus, you can compare two different benchmark families within one benchmark binary.
+As you can note, the values in `Time` and `CPU` columns are calculated as `(new - old) / |old|`.
+
+3. Compare filter one from benchmark one to filter two from benchmark two:
+The program is invoked like:
+
+``` bash
+$ compare.py filters <benchmark_baseline> <filter_baseline> <benchmark_contender> <filter_contender> [benchmark options]...
+```
+
+Where `<benchmark_baseline>` and `<benchmark_contender>` either specify a benchmark executable file, or a JSON output file. The type of the input file is automatically detected. If a benchmark executable is specified then the benchmark is run to obtain the results. Otherwise the results are simply loaded from the output file.
+
+Where `<filter_baseline>` and `<filter_contender>` are the same regex filters that you would pass to the `[--benchmark_filter=<regex>]` parameter of the benchmark binary.
+
+`[benchmark options]` will be passed to the benchmarks invocations. They can be anything that binary accepts, be it either normal `--benchmark_*` parameters, or some custom parameters your binary takes.
+
+Example output:
+```
+$ ./compare.py benchmarksfiltered ./a.out BM_memcpy ./a.out BM_copy
+RUNNING: ./a.out --benchmark_filter=BM_memcpy --benchmark_out=/tmp/tmp_FvbYg
+Run on (8 X 4000 MHz CPU s)
+2017-11-07 21:38:27
+------------------------------------------------------
+Benchmark               Time           CPU Iterations
+------------------------------------------------------
+BM_memcpy/8            37 ns         37 ns   18953482   204.118MB/s
+BM_memcpy/64           74 ns         74 ns    9206578   828.245MB/s
+BM_memcpy/512          91 ns         91 ns    8086195   5.25476GB/s
+BM_memcpy/1024        120 ns        120 ns    5804513   7.95662GB/s
+BM_memcpy/8192        664 ns        664 ns    1028363   11.4948GB/s
+RUNNING: ./a.out --benchmark_filter=BM_copy --benchmark_out=/tmp/tmpDfL5iE
+Run on (8 X 4000 MHz CPU s)
+2017-11-07 21:38:32
+----------------------------------------------------
+Benchmark             Time           CPU Iterations
+----------------------------------------------------
+BM_copy/8           230 ns        230 ns    2985909   33.1161MB/s
+BM_copy/64         1654 ns       1653 ns     419408   36.9137MB/s
+BM_copy/512       13122 ns      13120 ns      53403   37.2156MB/s
+BM_copy/1024      26679 ns      26666 ns      26575   36.6218MB/s
+BM_copy/8192     215068 ns     215053 ns       3221   36.3283MB/s
+Comparing BM_memcpy (from ./a.out) to BM_copy (from ./a.out)
+Benchmark                               Time             CPU      Time Old      Time New       CPU Old       CPU New
+--------------------------------------------------------------------------------------------------------------------
+[BM_memcpy vs. BM_copy]/8            +5.1649         +5.1637            37           230            37           230
+[BM_memcpy vs. BM_copy]/64          +21.4352        +21.4374            74          1654            74          1653
+[BM_memcpy vs. BM_copy]/512        +143.6022       +143.5865            91         13122            91         13120
+[BM_memcpy vs. BM_copy]/1024       +221.5903       +221.4790           120         26679           120         26666
+[BM_memcpy vs. BM_copy]/8192       +322.9059       +323.0096           664        215068           664        215053
+```
+This is a mix of the previous two modes, two (potentially different) benchmark binaries are run, and a different filter is applied to each one.
+As you can note, the values in `Time` and `CPU` columns are calculated as `(new - old) / |old|`.
+
+### U test
+
+If there is a sufficient repetition count of the benchmarks, the tool can do
+a [U Test](https://en.wikipedia.org/wiki/Mann%E2%80%93Whitney_U_test), of the
+null hypothesis that it is equally likely that a randomly selected value from
+one sample will be less than or greater than a randomly selected value from a
+second sample.
+
+If the calculated p-value is below this value is lower than the significance
+level alpha, then the result is said to be statistically significant and the
+null hypothesis is rejected. Which in other words means that the two benchmarks
+aren't identical.
+
+**WARNING**: requires **LARGE** (no less than 9) number of repetitions to be
+meaningful!
diff --git a/src/third_party/google_benchmark/google_benchmark.gyp b/src/third_party/google_benchmark/google_benchmark.gyp
new file mode 100644
index 0000000..0464259
--- /dev/null
+++ b/src/third_party/google_benchmark/google_benchmark.gyp
@@ -0,0 +1,45 @@
+# Copyright 2019 The Cobalt Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+{
+  'targets': [
+    {
+      'target_name': 'google_benchmark',
+      'type': 'static_library',
+      'sources': [
+        'src/benchmark.cc',
+        'src/benchmark_api_internal.cc',
+        'src/benchmark_name.cc',
+        'src/benchmark_register.cc',
+        'src/benchmark_runner.cc',
+        'src/colorprint_starboard.cc',
+        'src/commandlineflags.cc',
+        'src/complexity.cc',
+        'src/console_reporter.cc',
+        'src/counter.cc',
+        'src/csv_reporter.cc',
+        'src/json_reporter.cc',
+        'src/reporter.cc',
+        'src/sleep.cc',
+        'src/statistics.cc',
+        'src/string_util.cc',
+        'src/sysinfo.cc',
+        'src/timers.cc',
+      ],
+      'include_dirs': [
+        'include',
+      ],
+    },
+  ],
+}
diff --git a/src/third_party/google_benchmark/include/benchmark/benchmark.h b/src/third_party/google_benchmark/include/benchmark/benchmark.h
new file mode 100644
index 0000000..144e212
--- /dev/null
+++ b/src/third_party/google_benchmark/include/benchmark/benchmark.h
@@ -0,0 +1,1586 @@
+// Copyright 2015 Google Inc. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Support for registering benchmarks for functions.
+
+/* Example usage:
+// Define a function that executes the code to be measured a
+// specified number of times:
+static void BM_StringCreation(benchmark::State& state) {
+  for (auto _ : state)
+    std::string empty_string;
+}
+
+// Register the function as a benchmark
+BENCHMARK(BM_StringCreation);
+
+// Define another benchmark
+static void BM_StringCopy(benchmark::State& state) {
+  std::string x = "hello";
+  for (auto _ : state)
+    std::string copy(x);
+}
+BENCHMARK(BM_StringCopy);
+
+// Augment the main() program to invoke benchmarks if specified
+// via the --benchmarks command line flag.  E.g.,
+//       my_unittest --benchmark_filter=all
+//       my_unittest --benchmark_filter=BM_StringCreation
+//       my_unittest --benchmark_filter=String
+//       my_unittest --benchmark_filter='Copy|Creation'
+int main(int argc, char** argv) {
+  benchmark::Initialize(&argc, argv);
+  benchmark::RunSpecifiedBenchmarks();
+  return 0;
+}
+
+// Sometimes a family of microbenchmarks can be implemented with
+// just one routine that takes an extra argument to specify which
+// one of the family of benchmarks to run.  For example, the following
+// code defines a family of microbenchmarks for measuring the speed
+// of memcpy() calls of different lengths:
+
+static void BM_memcpy(benchmark::State& state) {
+  char* src = new char[state.range(0)]; char* dst = new char[state.range(0)];
+  memset(src, 'x', state.range(0));
+  for (auto _ : state)
+    memcpy(dst, src, state.range(0));
+  state.SetBytesProcessed(state.iterations() * state.range(0));
+  delete[] src; delete[] dst;
+}
+BENCHMARK(BM_memcpy)->Arg(8)->Arg(64)->Arg(512)->Arg(1<<10)->Arg(8<<10);
+
+// The preceding code is quite repetitive, and can be replaced with the
+// following short-hand.  The following invocation will pick a few
+// appropriate arguments in the specified range and will generate a
+// microbenchmark for each such argument.
+BENCHMARK(BM_memcpy)->Range(8, 8<<10);
+
+// You might have a microbenchmark that depends on two inputs.  For
+// example, the following code defines a family of microbenchmarks for
+// measuring the speed of set insertion.
+static void BM_SetInsert(benchmark::State& state) {
+  set<int> data;
+  for (auto _ : state) {
+    state.PauseTiming();
+    data = ConstructRandomSet(state.range(0));
+    state.ResumeTiming();
+    for (int j = 0; j < state.range(1); ++j)
+      data.insert(RandomNumber());
+  }
+}
+BENCHMARK(BM_SetInsert)
+   ->Args({1<<10, 128})
+   ->Args({2<<10, 128})
+   ->Args({4<<10, 128})
+   ->Args({8<<10, 128})
+   ->Args({1<<10, 512})
+   ->Args({2<<10, 512})
+   ->Args({4<<10, 512})
+   ->Args({8<<10, 512});
+
+// The preceding code is quite repetitive, and can be replaced with
+// the following short-hand.  The following macro will pick a few
+// appropriate arguments in the product of the two specified ranges
+// and will generate a microbenchmark for each such pair.
+BENCHMARK(BM_SetInsert)->Ranges({{1<<10, 8<<10}, {128, 512}});
+
+// For more complex patterns of inputs, passing a custom function
+// to Apply allows programmatic specification of an
+// arbitrary set of arguments to run the microbenchmark on.
+// The following example enumerates a dense range on
+// one parameter, and a sparse range on the second.
+static void CustomArguments(benchmark::internal::Benchmark* b) {
+  for (int i = 0; i <= 10; ++i)
+    for (int j = 32; j <= 1024*1024; j *= 8)
+      b->Args({i, j});
+}
+BENCHMARK(BM_SetInsert)->Apply(CustomArguments);
+
+// Templated microbenchmarks work the same way:
+// Produce then consume 'size' messages 'iters' times
+// Measures throughput in the absence of multiprogramming.
+template <class Q> int BM_Sequential(benchmark::State& state) {
+  Q q;
+  typename Q::value_type v;
+  for (auto _ : state) {
+    for (int i = state.range(0); i--; )
+      q.push(v);
+    for (int e = state.range(0); e--; )
+      q.Wait(&v);
+  }
+  // actually messages, not bytes:
+  state.SetBytesProcessed(state.iterations() * state.range(0));
+}
+BENCHMARK_TEMPLATE(BM_Sequential, WaitQueue<int>)->Range(1<<0, 1<<10);
+
+Use `Benchmark::MinTime(double t)` to set the minimum time used to run the
+benchmark. This option overrides the `benchmark_min_time` flag.
+
+void BM_test(benchmark::State& state) {
+ ... body ...
+}
+BENCHMARK(BM_test)->MinTime(2.0); // Run for at least 2 seconds.
+
+In a multithreaded test, it is guaranteed that none of the threads will start
+until all have reached the loop start, and all will have finished before any
+thread exits the loop body. As such, any global setup or teardown you want to
+do can be wrapped in a check against the thread index:
+
+static void BM_MultiThreaded(benchmark::State& state) {
+  if (state.thread_index == 0) {
+    // Setup code here.
+  }
+  for (auto _ : state) {
+    // Run the test as normal.
+  }
+  if (state.thread_index == 0) {
+    // Teardown code here.
+  }
+}
+BENCHMARK(BM_MultiThreaded)->Threads(4);
+
+
+If a benchmark runs a few milliseconds it may be hard to visually compare the
+measured times, since the output data is given in nanoseconds per default. In
+order to manually set the time unit, you can specify it manually:
+
+BENCHMARK(BM_test)->Unit(benchmark::kMillisecond);
+*/
+
+#ifndef BENCHMARK_BENCHMARK_H_
+#define BENCHMARK_BENCHMARK_H_
+
+// The _MSVC_LANG check should detect Visual Studio 2015 Update 3 and newer.
+#if __cplusplus >= 201103L || (defined(_MSVC_LANG) && _MSVC_LANG >= 201103L)
+#define BENCHMARK_HAS_CXX11
+#endif
+
+#include <stdint.h>
+
+#include <algorithm>
+#include <cassert>
+#include <cstddef>
+#include <iosfwd>
+#include <map>
+#include <set>
+#include <string>
+#include <vector>
+
+#if defined(BENCHMARK_HAS_CXX11)
+#include <initializer_list>
+#include <type_traits>
+#include <utility>
+#endif
+
+#if defined(_MSC_VER)
+#include <intrin.h>  // for _ReadWriteBarrier
+#endif
+
+#ifndef BENCHMARK_HAS_CXX11
+#define BENCHMARK_DISALLOW_COPY_AND_ASSIGN(TypeName) \
+  TypeName(const TypeName&);                         \
+  TypeName& operator=(const TypeName&)
+#else
+#define BENCHMARK_DISALLOW_COPY_AND_ASSIGN(TypeName) \
+  TypeName(const TypeName&) = delete;                \
+  TypeName& operator=(const TypeName&) = delete
+#endif
+
+#if defined(__GNUC__)
+#define BENCHMARK_UNUSED __attribute__((unused))
+#define BENCHMARK_ALWAYS_INLINE __attribute__((always_inline))
+#define BENCHMARK_NOEXCEPT noexcept
+#define BENCHMARK_NOEXCEPT_OP(x) noexcept(x)
+#elif defined(_MSC_VER) && !defined(__clang__)
+#define BENCHMARK_UNUSED
+#define BENCHMARK_ALWAYS_INLINE __forceinline
+#if _MSC_VER >= 1900
+#define BENCHMARK_NOEXCEPT noexcept
+#define BENCHMARK_NOEXCEPT_OP(x) noexcept(x)
+#else
+#define BENCHMARK_NOEXCEPT
+#define BENCHMARK_NOEXCEPT_OP(x)
+#endif
+#define __func__ __FUNCTION__
+#else
+#define BENCHMARK_UNUSED
+#define BENCHMARK_ALWAYS_INLINE
+#define BENCHMARK_NOEXCEPT
+#define BENCHMARK_NOEXCEPT_OP(x)
+#endif
+
+#define BENCHMARK_INTERNAL_TOSTRING2(x) #x
+#define BENCHMARK_INTERNAL_TOSTRING(x) BENCHMARK_INTERNAL_TOSTRING2(x)
+
+#if defined(__GNUC__) || defined(__clang__)
+#define BENCHMARK_BUILTIN_EXPECT(x, y) __builtin_expect(x, y)
+#define BENCHMARK_DEPRECATED_MSG(msg) __attribute__((deprecated(msg)))
+#else
+#define BENCHMARK_BUILTIN_EXPECT(x, y) x
+#define BENCHMARK_DEPRECATED_MSG(msg)
+#define BENCHMARK_WARNING_MSG(msg)                           \
+  __pragma(message(__FILE__ "(" BENCHMARK_INTERNAL_TOSTRING( \
+      __LINE__) ") : warning note: " msg))
+#endif
+
+#if defined(__GNUC__) && !defined(__clang__)
+#define BENCHMARK_GCC_VERSION (__GNUC__ * 100 + __GNUC_MINOR__)
+#endif
+
+#ifndef __has_builtin
+#define __has_builtin(x) 0
+#endif
+
+#if defined(__GNUC__) || __has_builtin(__builtin_unreachable)
+#define BENCHMARK_UNREACHABLE() __builtin_unreachable()
+#elif defined(_MSC_VER)
+#define BENCHMARK_UNREACHABLE() __assume(false)
+#else
+#define BENCHMARK_UNREACHABLE() ((void)0)
+#endif
+
+namespace benchmark {
+class BenchmarkReporter;
+class MemoryManager;
+
+void Initialize(int* argc, char** argv);
+
+// Report to stdout all arguments in 'argv' as unrecognized except the first.
+// Returns true there is at least on unrecognized argument (i.e. 'argc' > 1).
+bool ReportUnrecognizedArguments(int argc, char** argv);
+
+// Generate a list of benchmarks matching the specified --benchmark_filter flag
+// and if --benchmark_list_tests is specified return after printing the name
+// of each matching benchmark. Otherwise run each matching benchmark and
+// report the results.
+//
+// The second and third overload use the specified 'display_reporter' and
+//  'file_reporter' respectively. 'file_reporter' will write to the file
+//  specified
+//   by '--benchmark_output'. If '--benchmark_output' is not given the
+//  'file_reporter' is ignored.
+//
+// RETURNS: The number of matching benchmarks.
+size_t RunSpecifiedBenchmarks();
+size_t RunSpecifiedBenchmarks(BenchmarkReporter* display_reporter);
+size_t RunSpecifiedBenchmarks(BenchmarkReporter* display_reporter,
+                              BenchmarkReporter* file_reporter);
+
+// Register a MemoryManager instance that will be used to collect and report
+// allocation measurements for benchmark runs.
+void RegisterMemoryManager(MemoryManager* memory_manager);
+
+namespace internal {
+class Benchmark;
+class BenchmarkImp;
+class BenchmarkFamilies;
+
+void UseCharPointer(char const volatile*);
+
+// Take ownership of the pointer and register the benchmark. Return the
+// registered benchmark.
+Benchmark* RegisterBenchmarkInternal(Benchmark*);
+
+// Ensure that the standard streams are properly initialized in every TU.
+int InitializeStreams();
+BENCHMARK_UNUSED static int stream_init_anchor = InitializeStreams();
+
+}  // namespace internal
+
+#if (!defined(__GNUC__) && !defined(__clang__)) || defined(__pnacl__) || \
+    defined(__EMSCRIPTEN__)
+#define BENCHMARK_HAS_NO_INLINE_ASSEMBLY
+#endif
+
+// The DoNotOptimize(...) function can be used to prevent a value or
+// expression from being optimized away by the compiler. This function is
+// intended to add little to no overhead.
+// See: https://youtu.be/nXaxk27zwlk?t=2441
+#ifndef BENCHMARK_HAS_NO_INLINE_ASSEMBLY
+template <class Tp>
+inline BENCHMARK_ALWAYS_INLINE void DoNotOptimize(Tp const& value) {
+  asm volatile("" : : "r,m"(value) : "memory");
+}
+
+template <class Tp>
+inline BENCHMARK_ALWAYS_INLINE void DoNotOptimize(Tp& value) {
+#if defined(__clang__)
+  asm volatile("" : "+r,m"(value) : : "memory");
+#else
+  asm volatile("" : "+m,r"(value) : : "memory");
+#endif
+}
+
+// Force the compiler to flush pending writes to global memory. Acts as an
+// effective read/write barrier
+inline BENCHMARK_ALWAYS_INLINE void ClobberMemory() {
+  asm volatile("" : : : "memory");
+}
+#elif defined(_MSC_VER)
+template <class Tp>
+inline BENCHMARK_ALWAYS_INLINE void DoNotOptimize(Tp const& value) {
+  internal::UseCharPointer(&reinterpret_cast<char const volatile&>(value));
+  _ReadWriteBarrier();
+}
+
+inline BENCHMARK_ALWAYS_INLINE void ClobberMemory() { _ReadWriteBarrier(); }
+#else
+template <class Tp>
+inline BENCHMARK_ALWAYS_INLINE void DoNotOptimize(Tp const& value) {
+  internal::UseCharPointer(&reinterpret_cast<char const volatile&>(value));
+}
+// FIXME Add ClobberMemory() for non-gnu and non-msvc compilers
+#endif
+
+// This class is used for user-defined counters.
+class Counter {
+ public:
+  enum Flags {
+    kDefaults = 0,
+    // Mark the counter as a rate. It will be presented divided
+    // by the duration of the benchmark.
+    kIsRate = 1U << 0U,
+    // Mark the counter as a thread-average quantity. It will be
+    // presented divided by the number of threads.
+    kAvgThreads = 1U << 1U,
+    // Mark the counter as a thread-average rate. See above.
+    kAvgThreadsRate = kIsRate | kAvgThreads,
+    // Mark the counter as a constant value, valid/same for *every* iteration.
+    // When reporting, it will be *multiplied* by the iteration count.
+    kIsIterationInvariant = 1U << 2U,
+    // Mark the counter as a constant rate.
+    // When reporting, it will be *multiplied* by the iteration count
+    // and then divided by the duration of the benchmark.
+    kIsIterationInvariantRate = kIsRate | kIsIterationInvariant,
+    // Mark the counter as a iteration-average quantity.
+    // It will be presented divided by the number of iterations.
+    kAvgIterations = 1U << 3U,
+    // Mark the counter as a iteration-average rate. See above.
+    kAvgIterationsRate = kIsRate | kAvgIterations,
+
+    // In the end, invert the result. This is always done last!
+    kInvert = 1U << 31U
+  };
+
+  enum OneK {
+    // 1'000 items per 1k
+    kIs1000 = 1000,
+    // 1'024 items per 1k
+    kIs1024 = 1024
+  };
+
+  double value;
+  Flags flags;
+  OneK oneK;
+
+  BENCHMARK_ALWAYS_INLINE
+  Counter(double v = 0., Flags f = kDefaults, OneK k = kIs1000)
+      : value(v), flags(f), oneK(k) {}
+
+  BENCHMARK_ALWAYS_INLINE operator double const&() const { return value; }
+  BENCHMARK_ALWAYS_INLINE operator double&() { return value; }
+};
+
+// A helper for user code to create unforeseen combinations of Flags, without
+// having to do this cast manually each time, or providing this operator.
+Counter::Flags inline operator|(const Counter::Flags& LHS,
+                                const Counter::Flags& RHS) {
+  return static_cast<Counter::Flags>(static_cast<int>(LHS) |
+                                     static_cast<int>(RHS));
+}
+
+// This is the container for the user-defined counters.
+typedef std::map<std::string, Counter> UserCounters;
+
+// TimeUnit is passed to a benchmark in order to specify the order of magnitude
+// for the measured time.
+enum TimeUnit { kNanosecond, kMicrosecond, kMillisecond };
+
+// BigO is passed to a benchmark in order to specify the asymptotic
+// computational
+// complexity for the benchmark. In case oAuto is selected, complexity will be
+// calculated automatically to the best fit.
+enum BigO { oNone, o1, oN, oNSquared, oNCubed, oLogN, oNLogN, oAuto, oLambda };
+
+typedef uint64_t IterationCount;
+
+// BigOFunc is passed to a benchmark in order to specify the asymptotic
+// computational complexity for the benchmark.
+typedef double(BigOFunc)(IterationCount);
+
+// StatisticsFunc is passed to a benchmark in order to compute some descriptive
+// statistics over all the measurements of some type
+typedef double(StatisticsFunc)(const std::vector<double>&);
+
+namespace internal {
+struct Statistics {
+  std::string name_;
+  StatisticsFunc* compute_;
+
+  Statistics(const std::string& name, StatisticsFunc* compute)
+      : name_(name), compute_(compute) {}
+};
+
+struct BenchmarkInstance;
+class ThreadTimer;
+class ThreadManager;
+
+enum AggregationReportMode
+#if defined(BENCHMARK_HAS_CXX11)
+    : unsigned
+#else
+#endif
+{
+  // The mode has not been manually specified
+  ARM_Unspecified = 0,
+  // The mode is user-specified.
+  // This may or may not be set when the following bit-flags are set.
+  ARM_Default = 1U << 0U,
+  // File reporter should only output aggregates.
+  ARM_FileReportAggregatesOnly = 1U << 1U,
+  // Display reporter should only output aggregates
+  ARM_DisplayReportAggregatesOnly = 1U << 2U,
+  // Both reporters should only display aggregates.
+  ARM_ReportAggregatesOnly =
+      ARM_FileReportAggregatesOnly | ARM_DisplayReportAggregatesOnly
+};
+
+}  // namespace internal
+
+// State is passed to a running Benchmark and contains state for the
+// benchmark to use.
+class State {
+ public:
+  struct StateIterator;
+  friend struct StateIterator;
+
+  // Returns iterators used to run each iteration of a benchmark using a
+  // C++11 ranged-based for loop. These functions should not be called directly.
+  //
+  // REQUIRES: The benchmark has not started running yet. Neither begin nor end
+  // have been called previously.
+  //
+  // NOTE: KeepRunning may not be used after calling either of these functions.
+  BENCHMARK_ALWAYS_INLINE StateIterator begin();
+  BENCHMARK_ALWAYS_INLINE StateIterator end();
+
+  // Returns true if the benchmark should continue through another iteration.
+  // NOTE: A benchmark may not return from the test until KeepRunning() has
+  // returned false.
+  bool KeepRunning();
+
+  // Returns true iff the benchmark should run n more iterations.
+  // REQUIRES: 'n' > 0.
+  // NOTE: A benchmark must not return from the test until KeepRunningBatch()
+  // has returned false.
+  // NOTE: KeepRunningBatch() may overshoot by up to 'n' iterations.
+  //
+  // Intended usage:
+  //   while (state.KeepRunningBatch(1000)) {
+  //     // process 1000 elements
+  //   }
+  bool KeepRunningBatch(IterationCount n);
+
+  // REQUIRES: timer is running and 'SkipWithError(...)' has not been called
+  //           by the current thread.
+  // Stop the benchmark timer.  If not called, the timer will be
+  // automatically stopped after the last iteration of the benchmark loop.
+  //
+  // For threaded benchmarks the PauseTiming() function only pauses the timing
+  // for the current thread.
+  //
+  // NOTE: The "real time" measurement is per-thread. If different threads
+  // report different measurements the largest one is reported.
+  //
+  // NOTE: PauseTiming()/ResumeTiming() are relatively
+  // heavyweight, and so their use should generally be avoided
+  // within each benchmark iteration, if possible.
+  void PauseTiming();
+
+  // REQUIRES: timer is not running and 'SkipWithError(...)' has not been called
+  //           by the current thread.
+  // Start the benchmark timer.  The timer is NOT running on entrance to the
+  // benchmark function. It begins running after control flow enters the
+  // benchmark loop.
+  //
+  // NOTE: PauseTiming()/ResumeTiming() are relatively
+  // heavyweight, and so their use should generally be avoided
+  // within each benchmark iteration, if possible.
+  void ResumeTiming();
+
+  // REQUIRES: 'SkipWithError(...)' has not been called previously by the
+  //            current thread.
+  // Report the benchmark as resulting in an error with the specified 'msg'.
+  // After this call the user may explicitly 'return' from the benchmark.
+  //
+  // If the ranged-for style of benchmark loop is used, the user must explicitly
+  // break from the loop, otherwise all future iterations will be run.
+  // If the 'KeepRunning()' loop is used the current thread will automatically
+  // exit the loop at the end of the current iteration.
+  //
+  // For threaded benchmarks only the current thread stops executing and future
+  // calls to `KeepRunning()` will block until all threads have completed
+  // the `KeepRunning()` loop. If multiple threads report an error only the
+  // first error message is used.
+  //
+  // NOTE: Calling 'SkipWithError(...)' does not cause the benchmark to exit
+  // the current scope immediately. If the function is called from within
+  // the 'KeepRunning()' loop the current iteration will finish. It is the users
+  // responsibility to exit the scope as needed.
+  void SkipWithError(const char* msg);
+
+  // REQUIRES: called exactly once per iteration of the benchmarking loop.
+  // Set the manually measured time for this benchmark iteration, which
+  // is used instead of automatically measured time if UseManualTime() was
+  // specified.
+  //
+  // For threaded benchmarks the final value will be set to the largest
+  // reported values.
+  void SetIterationTime(double seconds);
+
+  // Set the number of bytes processed by the current benchmark
+  // execution.  This routine is typically called once at the end of a
+  // throughput oriented benchmark.
+  //
+  // REQUIRES: a benchmark has exited its benchmarking loop.
+  BENCHMARK_ALWAYS_INLINE
+  void SetBytesProcessed(int64_t bytes) {
+    counters["bytes_per_second"] =
+        Counter(static_cast<double>(bytes), Counter::kIsRate, Counter::kIs1024);
+  }
+
+  BENCHMARK_ALWAYS_INLINE
+  int64_t bytes_processed() const {
+    if (counters.find("bytes_per_second") != counters.end())
+      return static_cast<int64_t>(counters.at("bytes_per_second"));
+    return 0;
+  }
+
+  // If this routine is called with complexity_n > 0 and complexity report is
+  // requested for the
+  // family benchmark, then current benchmark will be part of the computation
+  // and complexity_n will
+  // represent the length of N.
+  BENCHMARK_ALWAYS_INLINE
+  void SetComplexityN(int64_t complexity_n) { complexity_n_ = complexity_n; }
+
+  BENCHMARK_ALWAYS_INLINE
+  int64_t complexity_length_n() const { return complexity_n_; }
+
+  // If this routine is called with items > 0, then an items/s
+  // label is printed on the benchmark report line for the currently
+  // executing benchmark. It is typically called at the end of a processing
+  // benchmark where a processing items/second output is desired.
+  //
+  // REQUIRES: a benchmark has exited its benchmarking loop.
+  BENCHMARK_ALWAYS_INLINE
+  void SetItemsProcessed(int64_t items) {
+    counters["items_per_second"] =
+        Counter(static_cast<double>(items), benchmark::Counter::kIsRate);
+  }
+
+  BENCHMARK_ALWAYS_INLINE
+  int64_t items_processed() const {
+    if (counters.find("items_per_second") != counters.end())
+      return static_cast<int64_t>(counters.at("items_per_second"));
+    return 0;
+  }
+
+  // If this routine is called, the specified label is printed at the
+  // end of the benchmark report line for the currently executing
+  // benchmark.  Example:
+  //  static void BM_Compress(benchmark::State& state) {
+  //    ...
+  //    double compress = input_size / output_size;
+  //    state.SetLabel(StrFormat("compress:%.1f%%", 100.0*compression));
+  //  }
+  // Produces output that looks like:
+  //  BM_Compress   50         50   14115038  compress:27.3%
+  //
+  // REQUIRES: a benchmark has exited its benchmarking loop.
+  void SetLabel(const char* label);
+
+  void BENCHMARK_ALWAYS_INLINE SetLabel(const std::string& str) {
+    this->SetLabel(str.c_str());
+  }
+
+  // Range arguments for this run. CHECKs if the argument has been set.
+  BENCHMARK_ALWAYS_INLINE
+  int64_t range(std::size_t pos = 0) const {
+    assert(range_.size() > pos);
+    return range_[pos];
+  }
+
+  BENCHMARK_DEPRECATED_MSG("use 'range(0)' instead")
+  int64_t range_x() const { return range(0); }
+
+  BENCHMARK_DEPRECATED_MSG("use 'range(1)' instead")
+  int64_t range_y() const { return range(1); }
+
+  BENCHMARK_ALWAYS_INLINE
+  IterationCount iterations() const {
+    if (BENCHMARK_BUILTIN_EXPECT(!started_, false)) {
+      return 0;
+    }
+    return max_iterations - total_iterations_ + batch_leftover_;
+  }
+
+ private
+     :  // items we expect on the first cache line (ie 64 bytes of the struct)
+  // When total_iterations_ is 0, KeepRunning() and friends will return false.
+  // May be larger than max_iterations.
+  IterationCount total_iterations_;
+
+  // When using KeepRunningBatch(), batch_leftover_ holds the number of
+  // iterations beyond max_iters that were run. Used to track
+  // completed_iterations_ accurately.
+  IterationCount batch_leftover_;
+
+ public:
+  const IterationCount max_iterations;
+
+ private:
+  bool started_;
+  bool finished_;
+  bool error_occurred_;
+
+ private:  // items we don't need on the first cache line
+  std::vector<int64_t> range_;
+
+  int64_t complexity_n_;
+
+ public:
+  // Container for user-defined counters.
+  UserCounters counters;
+  // Index of the executing thread. Values from [0, threads).
+  const int thread_index;
+  // Number of threads concurrently executing the benchmark.
+  const int threads;
+
+ private:
+  State(IterationCount max_iters, const std::vector<int64_t>& ranges,
+        int thread_i, int n_threads, internal::ThreadTimer* timer,
+        internal::ThreadManager* manager);
+
+  void StartKeepRunning();
+  // Implementation of KeepRunning() and KeepRunningBatch().
+  // is_batch must be true unless n is 1.
+  bool KeepRunningInternal(IterationCount n, bool is_batch);
+  void FinishKeepRunning();
+  internal::ThreadTimer* timer_;
+  internal::ThreadManager* manager_;
+
+  friend struct internal::BenchmarkInstance;
+};
+
+inline BENCHMARK_ALWAYS_INLINE bool State::KeepRunning() {
+  return KeepRunningInternal(1, /*is_batch=*/false);
+}
+
+inline BENCHMARK_ALWAYS_INLINE bool State::KeepRunningBatch(IterationCount n) {
+  return KeepRunningInternal(n, /*is_batch=*/true);
+}
+
+inline BENCHMARK_ALWAYS_INLINE bool State::KeepRunningInternal(IterationCount n,
+                                                               bool is_batch) {
+  // total_iterations_ is set to 0 by the constructor, and always set to a
+  // nonzero value by StartKepRunning().
+  assert(n > 0);
+  // n must be 1 unless is_batch is true.
+  assert(is_batch || n == 1);
+  if (BENCHMARK_BUILTIN_EXPECT(total_iterations_ >= n, true)) {
+    total_iterations_ -= n;
+    return true;
+  }
+  if (!started_) {
+    StartKeepRunning();
+    if (!error_occurred_ && total_iterations_ >= n) {
+      total_iterations_ -= n;
+      return true;
+    }
+  }
+  // For non-batch runs, total_iterations_ must be 0 by now.
+  if (is_batch && total_iterations_ != 0) {
+    batch_leftover_ = n - total_iterations_;
+    total_iterations_ = 0;
+    return true;
+  }
+  FinishKeepRunning();
+  return false;
+}
+
+struct State::StateIterator {
+  struct BENCHMARK_UNUSED Value {};
+  typedef std::forward_iterator_tag iterator_category;
+  typedef Value value_type;
+  typedef Value reference;
+  typedef Value pointer;
+  typedef std::ptrdiff_t difference_type;
+
+ private:
+  friend class State;
+  BENCHMARK_ALWAYS_INLINE
+  StateIterator() : cached_(0), parent_() {}
+
+  BENCHMARK_ALWAYS_INLINE
+  explicit StateIterator(State* st)
+      : cached_(st->error_occurred_ ? 0 : st->max_iterations), parent_(st) {}
+
+ public:
+  BENCHMARK_ALWAYS_INLINE
+  Value operator*() const { return Value(); }
+
+  BENCHMARK_ALWAYS_INLINE
+  StateIterator& operator++() {
+    assert(cached_ > 0);
+    --cached_;
+    return *this;
+  }
+
+  BENCHMARK_ALWAYS_INLINE
+  bool operator!=(StateIterator const&) const {
+    if (BENCHMARK_BUILTIN_EXPECT(cached_ != 0, true)) return true;
+    parent_->FinishKeepRunning();
+    return false;
+  }
+
+ private:
+  IterationCount cached_;
+  State* const parent_;
+};
+
+inline BENCHMARK_ALWAYS_INLINE State::StateIterator State::begin() {
+  return StateIterator(this);
+}
+inline BENCHMARK_ALWAYS_INLINE State::StateIterator State::end() {
+  StartKeepRunning();
+  return StateIterator();
+}
+
+namespace internal {
+
+typedef void(Function)(State&);
+
+// ------------------------------------------------------
+// Benchmark registration object.  The BENCHMARK() macro expands
+// into an internal::Benchmark* object.  Various methods can
+// be called on this object to change the properties of the benchmark.
+// Each method returns "this" so that multiple method calls can
+// chained into one expression.
+class Benchmark {
+ public:
+  virtual ~Benchmark();
+
+  // Note: the following methods all return "this" so that multiple
+  // method calls can be chained together in one expression.
+
+  // Run this benchmark once with "x" as the extra argument passed
+  // to the function.
+  // REQUIRES: The function passed to the constructor must accept an arg1.
+  Benchmark* Arg(int64_t x);
+
+  // Run this benchmark with the given time unit for the generated output report
+  Benchmark* Unit(TimeUnit unit);
+
+  // Run this benchmark once for a number of values picked from the
+  // range [start..limit].  (start and limit are always picked.)
+  // REQUIRES: The function passed to the constructor must accept an arg1.
+  Benchmark* Range(int64_t start, int64_t limit);
+
+  // Run this benchmark once for all values in the range [start..limit] with
+  // specific step
+  // REQUIRES: The function passed to the constructor must accept an arg1.
+  Benchmark* DenseRange(int64_t start, int64_t limit, int step = 1);
+
+  // Run this benchmark once with "args" as the extra arguments passed
+  // to the function.
+  // REQUIRES: The function passed to the constructor must accept arg1, arg2 ...
+  Benchmark* Args(const std::vector<int64_t>& args);
+
+  // Equivalent to Args({x, y})
+  // NOTE: This is a legacy C++03 interface provided for compatibility only.
+  //   New code should use 'Args'.
+  Benchmark* ArgPair(int64_t x, int64_t y) {
+    std::vector<int64_t> args;
+    args.push_back(x);
+    args.push_back(y);
+    return Args(args);
+  }
+
+  // Run this benchmark once for a number of values picked from the
+  // ranges [start..limit].  (starts and limits are always picked.)
+  // REQUIRES: The function passed to the constructor must accept arg1, arg2 ...
+  Benchmark* Ranges(const std::vector<std::pair<int64_t, int64_t> >& ranges);
+
+  // Equivalent to ArgNames({name})
+  Benchmark* ArgName(const std::string& name);
+
+  // Set the argument names to display in the benchmark name. If not called,
+  // only argument values will be shown.
+  Benchmark* ArgNames(const std::vector<std::string>& names);
+
+  // Equivalent to Ranges({{lo1, hi1}, {lo2, hi2}}).
+  // NOTE: This is a legacy C++03 interface provided for compatibility only.
+  //   New code should use 'Ranges'.
+  Benchmark* RangePair(int64_t lo1, int64_t hi1, int64_t lo2, int64_t hi2) {
+    std::vector<std::pair<int64_t, int64_t> > ranges;
+    ranges.push_back(std::make_pair(lo1, hi1));
+    ranges.push_back(std::make_pair(lo2, hi2));
+    return Ranges(ranges);
+  }
+
+  // Pass this benchmark object to *func, which can customize
+  // the benchmark by calling various methods like Arg, Args,
+  // Threads, etc.
+  Benchmark* Apply(void (*func)(Benchmark* benchmark));
+
+  // Set the range multiplier for non-dense range. If not called, the range
+  // multiplier kRangeMultiplier will be used.
+  Benchmark* RangeMultiplier(int multiplier);
+
+  // Set the minimum amount of time to use when running this benchmark. This
+  // option overrides the `benchmark_min_time` flag.
+  // REQUIRES: `t > 0` and `Iterations` has not been called on this benchmark.
+  Benchmark* MinTime(double t);
+
+  // Specify the amount of iterations that should be run by this benchmark.
+  // REQUIRES: 'n > 0' and `MinTime` has not been called on this benchmark.
+  //
+  // NOTE: This function should only be used when *exact* iteration control is
+  //   needed and never to control or limit how long a benchmark runs, where
+  // `--benchmark_min_time=N` or `MinTime(...)` should be used instead.
+  Benchmark* Iterations(IterationCount n);
+
+  // Specify the amount of times to repeat this benchmark. This option overrides
+  // the `benchmark_repetitions` flag.
+  // REQUIRES: `n > 0`
+  Benchmark* Repetitions(int n);
+
+  // Specify if each repetition of the benchmark should be reported separately
+  // or if only the final statistics should be reported. If the benchmark
+  // is not repeated then the single result is always reported.
+  // Applies to *ALL* reporters (display and file).
+  Benchmark* ReportAggregatesOnly(bool value = true);
+
+  // Same as ReportAggregatesOnly(), but applies to display reporter only.
+  Benchmark* DisplayAggregatesOnly(bool value = true);
+
+  // By default, the CPU time is measured only for the main thread, which may
+  // be unrepresentative if the benchmark uses threads internally. If called,
+  // the total CPU time spent by all the threads will be measured instead.
+  // By default, the only the main thread CPU time will be measured.
+  Benchmark* MeasureProcessCPUTime();
+
+  // If a particular benchmark should use the Wall clock instead of the CPU time
+  // (be it either the CPU time of the main thread only (default), or the
+  // total CPU usage of the benchmark), call this method. If called, the elapsed
+  // (wall) time will be used to control how many iterations are run, and in the
+  // printing of items/second or MB/seconds values.
+  // If not called, the CPU time used by the benchmark will be used.
+  Benchmark* UseRealTime();
+
+  // If a benchmark must measure time manually (e.g. if GPU execution time is
+  // being
+  // measured), call this method. If called, each benchmark iteration should
+  // call
+  // SetIterationTime(seconds) to report the measured time, which will be used
+  // to control how many iterations are run, and in the printing of items/second
+  // or MB/second values.
+  Benchmark* UseManualTime();
+
+  // Set the asymptotic computational complexity for the benchmark. If called
+  // the asymptotic computational complexity will be shown on the output.
+  Benchmark* Complexity(BigO complexity = benchmark::oAuto);
+
+  // Set the asymptotic computational complexity for the benchmark. If called
+  // the asymptotic computational complexity will be shown on the output.
+  Benchmark* Complexity(BigOFunc* complexity);
+
+  // Add this statistics to be computed over all the values of benchmark run
+  Benchmark* ComputeStatistics(std::string name, StatisticsFunc* statistics);
+
+  // Support for running multiple copies of the same benchmark concurrently
+  // in multiple threads.  This may be useful when measuring the scaling
+  // of some piece of code.
+
+  // Run one instance of this benchmark concurrently in t threads.
+  Benchmark* Threads(int t);
+
+  // Pick a set of values T from [min_threads,max_threads].
+  // min_threads and max_threads are always included in T.  Run this
+  // benchmark once for each value in T.  The benchmark run for a
+  // particular value t consists of t threads running the benchmark
+  // function concurrently.  For example, consider:
+  //    BENCHMARK(Foo)->ThreadRange(1,16);
+  // This will run the following benchmarks:
+  //    Foo in 1 thread
+  //    Foo in 2 threads
+  //    Foo in 4 threads
+  //    Foo in 8 threads
+  //    Foo in 16 threads
+  Benchmark* ThreadRange(int min_threads, int max_threads);
+
+  // For each value n in the range, run this benchmark once using n threads.
+  // min_threads and max_threads are always included in the range.
+  // stride specifies the increment. E.g. DenseThreadRange(1, 8, 3) starts
+  // a benchmark with 1, 4, 7 and 8 threads.
+  Benchmark* DenseThreadRange(int min_threads, int max_threads, int stride = 1);
+
+  // Equivalent to ThreadRange(NumCPUs(), NumCPUs())
+  Benchmark* ThreadPerCpu();
+
+  virtual void Run(State& state) = 0;
+
+ protected:
+  explicit Benchmark(const char* name);
+  Benchmark(Benchmark const&);
+  void SetName(const char* name);
+
+  int ArgsCnt() const;
+
+ private:
+  friend class BenchmarkFamilies;
+
+  std::string name_;
+  AggregationReportMode aggregation_report_mode_;
+  std::vector<std::string> arg_names_;       // Args for all benchmark runs
+  std::vector<std::vector<int64_t> > args_;  // Args for all benchmark runs
+  TimeUnit time_unit_;
+  int range_multiplier_;
+  double min_time_;
+  IterationCount iterations_;
+  int repetitions_;
+  bool measure_process_cpu_time_;
+  bool use_real_time_;
+  bool use_manual_time_;
+  BigO complexity_;
+  BigOFunc* complexity_lambda_;
+  std::vector<Statistics> statistics_;
+  std::vector<int> thread_counts_;
+
+  Benchmark& operator=(Benchmark const&);
+};
+
+}  // namespace internal
+
+// Create and register a benchmark with the specified 'name' that invokes
+// the specified functor 'fn'.
+//
+// RETURNS: A pointer to the registered benchmark.
+internal::Benchmark* RegisterBenchmark(const char* name,
+                                       internal::Function* fn);
+
+#if defined(BENCHMARK_HAS_CXX11)
+template <class Lambda>
+internal::Benchmark* RegisterBenchmark(const char* name, Lambda&& fn);
+#endif
+
+// Remove all registered benchmarks. All pointers to previously registered
+// benchmarks are invalidated.
+void ClearRegisteredBenchmarks();
+
+namespace internal {
+// The class used to hold all Benchmarks created from static function.
+// (ie those created using the BENCHMARK(...) macros.
+class FunctionBenchmark : public Benchmark {
+ public:
+  FunctionBenchmark(const char* name, Function* func)
+      : Benchmark(name), func_(func) {}
+
+  virtual void Run(State& st);
+
+ private:
+  Function* func_;
+};
+
+#ifdef BENCHMARK_HAS_CXX11
+template <class Lambda>
+class LambdaBenchmark : public Benchmark {
+ public:
+  virtual void Run(State& st) { lambda_(st); }
+
+ private:
+  template <class OLambda>
+  LambdaBenchmark(const char* name, OLambda&& lam)
+      : Benchmark(name), lambda_(std::forward<OLambda>(lam)) {}
+
+  LambdaBenchmark(LambdaBenchmark const&) = delete;
+
+ private:
+  template <class Lam>
+  friend Benchmark* ::benchmark::RegisterBenchmark(const char*, Lam&&);
+
+  Lambda lambda_;
+};
+#endif
+
+}  // namespace internal
+
+inline internal::Benchmark* RegisterBenchmark(const char* name,
+                                              internal::Function* fn) {
+  return internal::RegisterBenchmarkInternal(
+      ::new internal::FunctionBenchmark(name, fn));
+}
+
+#ifdef BENCHMARK_HAS_CXX11
+template <class Lambda>
+internal::Benchmark* RegisterBenchmark(const char* name, Lambda&& fn) {
+  using BenchType =
+      internal::LambdaBenchmark<typename std::decay<Lambda>::type>;
+  return internal::RegisterBenchmarkInternal(
+      ::new BenchType(name, std::forward<Lambda>(fn)));
+}
+#endif
+
+#if defined(BENCHMARK_HAS_CXX11) && \
+    (!defined(BENCHMARK_GCC_VERSION) || BENCHMARK_GCC_VERSION >= 409)
+template <class Lambda, class... Args>
+internal::Benchmark* RegisterBenchmark(const char* name, Lambda&& fn,
+                                       Args&&... args) {
+  return benchmark::RegisterBenchmark(
+      name, [=](benchmark::State& st) { fn(st, args...); });
+}
+#else
+#define BENCHMARK_HAS_NO_VARIADIC_REGISTER_BENCHMARK
+#endif
+
+// The base class for all fixture tests.
+class Fixture : public internal::Benchmark {
+ public:
+  Fixture() : internal::Benchmark("") {}
+
+  virtual void Run(State& st) {
+    this->SetUp(st);
+    this->BenchmarkCase(st);
+    this->TearDown(st);
+  }
+
+  // These will be deprecated ...
+  virtual void SetUp(const State&) {}
+  virtual void TearDown(const State&) {}
+  // ... In favor of these.
+  virtual void SetUp(State& st) { SetUp(const_cast<const State&>(st)); }
+  virtual void TearDown(State& st) { TearDown(const_cast<const State&>(st)); }
+
+ protected:
+  virtual void BenchmarkCase(State&) = 0;
+};
+
+}  // namespace benchmark
+
+// ------------------------------------------------------
+// Macro to register benchmarks
+
+// Check that __COUNTER__ is defined and that __COUNTER__ increases by 1
+// every time it is expanded. X + 1 == X + 0 is used in case X is defined to be
+// empty. If X is empty the expression becomes (+1 == +0).
+#if defined(__COUNTER__) && (__COUNTER__ + 1 == __COUNTER__ + 0)
+#define BENCHMARK_PRIVATE_UNIQUE_ID __COUNTER__
+#else
+#define BENCHMARK_PRIVATE_UNIQUE_ID __LINE__
+#endif
+
+// Helpers for generating unique variable names
+#define BENCHMARK_PRIVATE_NAME(n) \
+  BENCHMARK_PRIVATE_CONCAT(_benchmark_, BENCHMARK_PRIVATE_UNIQUE_ID, n)
+#define BENCHMARK_PRIVATE_CONCAT(a, b, c) BENCHMARK_PRIVATE_CONCAT2(a, b, c)
+#define BENCHMARK_PRIVATE_CONCAT2(a, b, c) a##b##c
+
+#define BENCHMARK_PRIVATE_DECLARE(n)                                 \
+  static ::benchmark::internal::Benchmark* BENCHMARK_PRIVATE_NAME(n) \
+      BENCHMARK_UNUSED
+
+#define BENCHMARK(n)                                     \
+  BENCHMARK_PRIVATE_DECLARE(n) =                         \
+      (::benchmark::internal::RegisterBenchmarkInternal( \
+          new ::benchmark::internal::FunctionBenchmark(#n, n)))
+
+// Old-style macros
+#define BENCHMARK_WITH_ARG(n, a) BENCHMARK(n)->Arg((a))
+#define BENCHMARK_WITH_ARG2(n, a1, a2) BENCHMARK(n)->Args({(a1), (a2)})
+#define BENCHMARK_WITH_UNIT(n, t) BENCHMARK(n)->Unit((t))
+#define BENCHMARK_RANGE(n, lo, hi) BENCHMARK(n)->Range((lo), (hi))
+#define BENCHMARK_RANGE2(n, l1, h1, l2, h2) \
+  BENCHMARK(n)->RangePair({{(l1), (h1)}, {(l2), (h2)}})
+
+#ifdef BENCHMARK_HAS_CXX11
+
+// Register a benchmark which invokes the function specified by `func`
+// with the additional arguments specified by `...`.
+//
+// For example:
+//
+// template <class ...ExtraArgs>`
+// void BM_takes_args(benchmark::State& state, ExtraArgs&&... extra_args) {
+//  [...]
+//}
+// /* Registers a benchmark named "BM_takes_args/int_string_test` */
+// BENCHMARK_CAPTURE(BM_takes_args, int_string_test, 42, std::string("abc"));
+#define BENCHMARK_CAPTURE(func, test_case_name, ...)     \
+  BENCHMARK_PRIVATE_DECLARE(func) =                      \
+      (::benchmark::internal::RegisterBenchmarkInternal( \
+          new ::benchmark::internal::FunctionBenchmark(  \
+              #func "/" #test_case_name,                 \
+              [](::benchmark::State& st) { func(st, __VA_ARGS__); })))
+
+#endif  // BENCHMARK_HAS_CXX11
+
+// This will register a benchmark for a templatized function.  For example:
+//
+// template<int arg>
+// void BM_Foo(int iters);
+//
+// BENCHMARK_TEMPLATE(BM_Foo, 1);
+//
+// will register BM_Foo<1> as a benchmark.
+#define BENCHMARK_TEMPLATE1(n, a)                        \
+  BENCHMARK_PRIVATE_DECLARE(n) =                         \
+      (::benchmark::internal::RegisterBenchmarkInternal( \
+          new ::benchmark::internal::FunctionBenchmark(#n "<" #a ">", n<a>)))
+
+#define BENCHMARK_TEMPLATE2(n, a, b)                                         \
+  BENCHMARK_PRIVATE_DECLARE(n) =                                             \
+      (::benchmark::internal::RegisterBenchmarkInternal(                     \
+          new ::benchmark::internal::FunctionBenchmark(#n "<" #a "," #b ">", \
+                                                       n<a, b>)))
+
+#ifdef BENCHMARK_HAS_CXX11
+#define BENCHMARK_TEMPLATE(n, ...)                       \
+  BENCHMARK_PRIVATE_DECLARE(n) =                         \
+      (::benchmark::internal::RegisterBenchmarkInternal( \
+          new ::benchmark::internal::FunctionBenchmark(  \
+              #n "<" #__VA_ARGS__ ">", n<__VA_ARGS__>)))
+#else
+#define BENCHMARK_TEMPLATE(n, a) BENCHMARK_TEMPLATE1(n, a)
+#endif
+
+#define BENCHMARK_PRIVATE_DECLARE_F(BaseClass, Method)        \
+  class BaseClass##_##Method##_Benchmark : public BaseClass { \
+   public:                                                    \
+    BaseClass##_##Method##_Benchmark() : BaseClass() {        \
+      this->SetName(#BaseClass "/" #Method);                  \
+    }                                                         \
+                                                              \
+   protected:                                                 \
+    virtual void BenchmarkCase(::benchmark::State&);          \
+  };
+
+#define BENCHMARK_TEMPLATE1_PRIVATE_DECLARE_F(BaseClass, Method, a) \
+  class BaseClass##_##Method##_Benchmark : public BaseClass<a> {    \
+   public:                                                          \
+    BaseClass##_##Method##_Benchmark() : BaseClass<a>() {           \
+      this->SetName(#BaseClass "<" #a ">/" #Method);                \
+    }                                                               \
+                                                                    \
+   protected:                                                       \
+    virtual void BenchmarkCase(::benchmark::State&);                \
+  };
+
+#define BENCHMARK_TEMPLATE2_PRIVATE_DECLARE_F(BaseClass, Method, a, b) \
+  class BaseClass##_##Method##_Benchmark : public BaseClass<a, b> {    \
+   public:                                                             \
+    BaseClass##_##Method##_Benchmark() : BaseClass<a, b>() {           \
+      this->SetName(#BaseClass "<" #a "," #b ">/" #Method);            \
+    }                                                                  \
+                                                                       \
+   protected:                                                          \
+    virtual void BenchmarkCase(::benchmark::State&);                   \
+  };
+
+#ifdef BENCHMARK_HAS_CXX11
+#define BENCHMARK_TEMPLATE_PRIVATE_DECLARE_F(BaseClass, Method, ...)       \
+  class BaseClass##_##Method##_Benchmark : public BaseClass<__VA_ARGS__> { \
+   public:                                                                 \
+    BaseClass##_##Method##_Benchmark() : BaseClass<__VA_ARGS__>() {        \
+      this->SetName(#BaseClass "<" #__VA_ARGS__ ">/" #Method);             \
+    }                                                                      \
+                                                                           \
+   protected:                                                              \
+    virtual void BenchmarkCase(::benchmark::State&);                       \
+  };
+#else
+#define BENCHMARK_TEMPLATE_PRIVATE_DECLARE_F(n, a) \
+  BENCHMARK_TEMPLATE1_PRIVATE_DECLARE_F(n, a)
+#endif
+
+#define BENCHMARK_DEFINE_F(BaseClass, Method)    \
+  BENCHMARK_PRIVATE_DECLARE_F(BaseClass, Method) \
+  void BaseClass##_##Method##_Benchmark::BenchmarkCase
+
+#define BENCHMARK_TEMPLATE1_DEFINE_F(BaseClass, Method, a)    \
+  BENCHMARK_TEMPLATE1_PRIVATE_DECLARE_F(BaseClass, Method, a) \
+  void BaseClass##_##Method##_Benchmark::BenchmarkCase
+
+#define BENCHMARK_TEMPLATE2_DEFINE_F(BaseClass, Method, a, b)    \
+  BENCHMARK_TEMPLATE2_PRIVATE_DECLARE_F(BaseClass, Method, a, b) \
+  void BaseClass##_##Method##_Benchmark::BenchmarkCase
+
+#ifdef BENCHMARK_HAS_CXX11
+#define BENCHMARK_TEMPLATE_DEFINE_F(BaseClass, Method, ...)            \
+  BENCHMARK_TEMPLATE_PRIVATE_DECLARE_F(BaseClass, Method, __VA_ARGS__) \
+  void BaseClass##_##Method##_Benchmark::BenchmarkCase
+#else
+#define BENCHMARK_TEMPLATE_DEFINE_F(BaseClass, Method, a) \
+  BENCHMARK_TEMPLATE1_DEFINE_F(BaseClass, Method, a)
+#endif
+
+#define BENCHMARK_REGISTER_F(BaseClass, Method) \
+  BENCHMARK_PRIVATE_REGISTER_F(BaseClass##_##Method##_Benchmark)
+
+#define BENCHMARK_PRIVATE_REGISTER_F(TestName) \
+  BENCHMARK_PRIVATE_DECLARE(TestName) =        \
+      (::benchmark::internal::RegisterBenchmarkInternal(new TestName()))
+
+// This macro will define and register a benchmark within a fixture class.
+#define BENCHMARK_F(BaseClass, Method)           \
+  BENCHMARK_PRIVATE_DECLARE_F(BaseClass, Method) \
+  BENCHMARK_REGISTER_F(BaseClass, Method);       \
+  void BaseClass##_##Method##_Benchmark::BenchmarkCase
+
+#define BENCHMARK_TEMPLATE1_F(BaseClass, Method, a)           \
+  BENCHMARK_TEMPLATE1_PRIVATE_DECLARE_F(BaseClass, Method, a) \
+  BENCHMARK_REGISTER_F(BaseClass, Method);                    \
+  void BaseClass##_##Method##_Benchmark::BenchmarkCase
+
+#define BENCHMARK_TEMPLATE2_F(BaseClass, Method, a, b)           \
+  BENCHMARK_TEMPLATE2_PRIVATE_DECLARE_F(BaseClass, Method, a, b) \
+  BENCHMARK_REGISTER_F(BaseClass, Method);                       \
+  void BaseClass##_##Method##_Benchmark::BenchmarkCase
+
+#ifdef BENCHMARK_HAS_CXX11
+#define BENCHMARK_TEMPLATE_F(BaseClass, Method, ...)                   \
+  BENCHMARK_TEMPLATE_PRIVATE_DECLARE_F(BaseClass, Method, __VA_ARGS__) \
+  BENCHMARK_REGISTER_F(BaseClass, Method);                             \
+  void BaseClass##_##Method##_Benchmark::BenchmarkCase
+#else
+#define BENCHMARK_TEMPLATE_F(BaseClass, Method, a) \
+  BENCHMARK_TEMPLATE1_F(BaseClass, Method, a)
+#endif
+
+// Helper macro to create a main routine in a test that runs the benchmarks
+#define BENCHMARK_MAIN()                                                \
+  int main(int argc, char** argv) {                                     \
+    ::benchmark::Initialize(&argc, argv);                               \
+    if (::benchmark::ReportUnrecognizedArguments(argc, argv)) return 1; \
+    ::benchmark::RunSpecifiedBenchmarks();                              \
+  }                                                                     \
+  int main(int, char**)
+
+// ------------------------------------------------------
+// Benchmark Reporters
+
+namespace benchmark {
+
+struct CPUInfo {
+  struct CacheInfo {
+    std::string type;
+    int level;
+    int size;
+    int num_sharing;
+  };
+
+  int num_cpus;
+  double cycles_per_second;
+  std::vector<CacheInfo> caches;
+  bool scaling_enabled;
+  std::vector<double> load_avg;
+
+  static const CPUInfo& Get();
+
+ private:
+  CPUInfo();
+  BENCHMARK_DISALLOW_COPY_AND_ASSIGN(CPUInfo);
+};
+
+// Adding Struct for System Information
+struct SystemInfo {
+  std::string name;
+  static const SystemInfo& Get();
+
+ private:
+  SystemInfo();
+  BENCHMARK_DISALLOW_COPY_AND_ASSIGN(SystemInfo);
+};
+
+// BenchmarkName contains the components of the Benchmark's name
+// which allows individual fields to be modified or cleared before
+// building the final name using 'str()'.
+struct BenchmarkName {
+  std::string function_name;
+  std::string args;
+  std::string min_time;
+  std::string iterations;
+  std::string repetitions;
+  std::string time_type;
+  std::string threads;
+
+  // Return the full name of the benchmark with each non-empty
+  // field separated by a '/'
+  std::string str() const;
+};
+
+// Interface for custom benchmark result printers.
+// By default, benchmark reports are printed to stdout. However an application
+// can control the destination of the reports by calling
+// RunSpecifiedBenchmarks and passing it a custom reporter object.
+// The reporter object must implement the following interface.
+class BenchmarkReporter {
+ public:
+  struct Context {
+    CPUInfo const& cpu_info;
+    SystemInfo const& sys_info;
+    // The number of chars in the longest benchmark name.
+    size_t name_field_width;
+    static const char* executable_name;
+    Context();
+  };
+
+  struct Run {
+    static const int64_t no_repetition_index = -1;
+    enum RunType { RT_Iteration, RT_Aggregate };
+
+    Run()
+        : run_type(RT_Iteration),
+          error_occurred(false),
+          iterations(1),
+          threads(1),
+          time_unit(kNanosecond),
+          real_accumulated_time(0),
+          cpu_accumulated_time(0),
+          max_heapbytes_used(0),
+          complexity(oNone),
+          complexity_lambda(),
+          complexity_n(0),
+          report_big_o(false),
+          report_rms(false),
+          counters(),
+          has_memory_result(false),
+          allocs_per_iter(0.0),
+          max_bytes_used(0) {}
+
+    std::string benchmark_name() const;
+    BenchmarkName run_name;
+    RunType run_type;
+    std::string aggregate_name;
+    std::string report_label;  // Empty if not set by benchmark.
+    bool error_occurred;
+    std::string error_message;
+
+    IterationCount iterations;
+    int64_t threads;
+    int64_t repetition_index;
+    int64_t repetitions;
+    TimeUnit time_unit;
+    double real_accumulated_time;
+    double cpu_accumulated_time;
+
+    // Return a value representing the real time per iteration in the unit
+    // specified by 'time_unit'.
+    // NOTE: If 'iterations' is zero the returned value represents the
+    // accumulated time.
+    double GetAdjustedRealTime() const;
+
+    // Return a value representing the cpu time per iteration in the unit
+    // specified by 'time_unit'.
+    // NOTE: If 'iterations' is zero the returned value represents the
+    // accumulated time.
+    double GetAdjustedCPUTime() const;
+
+    // This is set to 0.0 if memory tracing is not enabled.
+    double max_heapbytes_used;
+
+    // Keep track of arguments to compute asymptotic complexity
+    BigO complexity;
+    BigOFunc* complexity_lambda;
+    int64_t complexity_n;
+
+    // what statistics to compute from the measurements
+    const std::vector<internal::Statistics>* statistics;
+
+    // Inform print function whether the current run is a complexity report
+    bool report_big_o;
+    bool report_rms;
+
+    UserCounters counters;
+
+    // Memory metrics.
+    bool has_memory_result;
+    double allocs_per_iter;
+    int64_t max_bytes_used;
+  };
+
+  // Construct a BenchmarkReporter with the output stream set to 'std::cout'
+  // and the error stream set to 'std::cerr'
+  BenchmarkReporter();
+
+  // Called once for every suite of benchmarks run.
+  // The parameter "context" contains information that the
+  // reporter may wish to use when generating its report, for example the
+  // platform under which the benchmarks are running. The benchmark run is
+  // never started if this function returns false, allowing the reporter
+  // to skip runs based on the context information.
+  virtual bool ReportContext(const Context& context) = 0;
+
+  // Called once for each group of benchmark runs, gives information about
+  // cpu-time and heap memory usage during the benchmark run. If the group
+  // of runs contained more than two entries then 'report' contains additional
+  // elements representing the mean and standard deviation of those runs.
+  // Additionally if this group of runs was the last in a family of benchmarks
+  // 'reports' contains additional entries representing the asymptotic
+  // complexity and RMS of that benchmark family.
+  virtual void ReportRuns(const std::vector<Run>& report) = 0;
+
+  // Called once and only once after ever group of benchmarks is run and
+  // reported.
+  virtual void Finalize() {}
+
+  // REQUIRES: The object referenced by 'out' is valid for the lifetime
+  // of the reporter.
+  void SetOutputStream(std::ostream* out) {
+    assert(out);
+    output_stream_ = out;
+  }
+
+  // REQUIRES: The object referenced by 'err' is valid for the lifetime
+  // of the reporter.
+  void SetErrorStream(std::ostream* err) {
+    assert(err);
+    error_stream_ = err;
+  }
+
+  std::ostream& GetOutputStream() const { return *output_stream_; }
+
+  std::ostream& GetErrorStream() const { return *error_stream_; }
+
+  virtual ~BenchmarkReporter();
+
+  // Write a human readable string to 'out' representing the specified
+  // 'context'.
+  // REQUIRES: 'out' is non-null.
+  static void PrintBasicContext(std::ostream* out, Context const& context);
+
+ private:
+  std::ostream* output_stream_;
+  std::ostream* error_stream_;
+};
+
+// Simple reporter that outputs benchmark data to the console. This is the
+// default reporter used by RunSpecifiedBenchmarks().
+class ConsoleReporter : public BenchmarkReporter {
+ public:
+  enum OutputOptions {
+    OO_None = 0,
+    OO_Color = 1,
+    OO_Tabular = 2,
+    OO_ColorTabular = OO_Color | OO_Tabular,
+    OO_Defaults = OO_ColorTabular
+  };
+  explicit ConsoleReporter(OutputOptions opts_ = OO_Defaults)
+      : output_options_(opts_),
+        name_field_width_(0),
+        prev_counters_(),
+        printed_header_(false) {}
+
+  virtual bool ReportContext(const Context& context);
+  virtual void ReportRuns(const std::vector<Run>& reports);
+
+ protected:
+  virtual void PrintRunData(const Run& report);
+  virtual void PrintHeader(const Run& report);
+
+  OutputOptions output_options_;
+  size_t name_field_width_;
+  UserCounters prev_counters_;
+  bool printed_header_;
+};
+
+class JSONReporter : public BenchmarkReporter {
+ public:
+  JSONReporter() : first_report_(true) {}
+  virtual bool ReportContext(const Context& context);
+  virtual void ReportRuns(const std::vector<Run>& reports);
+  virtual void Finalize();
+
+ private:
+  void PrintRunData(const Run& report);
+
+  bool first_report_;
+};
+
+class BENCHMARK_DEPRECATED_MSG(
+    "The CSV Reporter will be removed in a future release") CSVReporter
+    : public BenchmarkReporter {
+ public:
+  CSVReporter() : printed_header_(false) {}
+  virtual bool ReportContext(const Context& context);
+  virtual void ReportRuns(const std::vector<Run>& reports);
+
+ private:
+  void PrintRunData(const Run& report);
+
+  bool printed_header_;
+  std::set<std::string> user_counter_names_;
+};
+
+// If a MemoryManager is registered, it can be used to collect and report
+// allocation metrics for a run of the benchmark.
+class MemoryManager {
+ public:
+  struct Result {
+    Result() : num_allocs(0), max_bytes_used(0) {}
+
+    // The number of allocations made in total between Start and Stop.
+    int64_t num_allocs;
+
+    // The peak memory use between Start and Stop.
+    int64_t max_bytes_used;
+  };
+
+  virtual ~MemoryManager() {}
+
+  // Implement this to start recording allocation information.
+  virtual void Start() = 0;
+
+  // Implement this to stop recording and fill out the given Result structure.
+  virtual void Stop(Result* result) = 0;
+};
+
+inline const char* GetTimeUnitString(TimeUnit unit) {
+  switch (unit) {
+    case kMillisecond:
+      return "ms";
+    case kMicrosecond:
+      return "us";
+    case kNanosecond:
+      return "ns";
+  }
+  BENCHMARK_UNREACHABLE();
+}
+
+inline double GetTimeUnitMultiplier(TimeUnit unit) {
+  switch (unit) {
+    case kMillisecond:
+      return 1e3;
+    case kMicrosecond:
+      return 1e6;
+    case kNanosecond:
+      return 1e9;
+  }
+  BENCHMARK_UNREACHABLE();
+}
+
+}  // namespace benchmark
+
+#endif  // BENCHMARK_BENCHMARK_H_
diff --git a/src/third_party/google_benchmark/mingw.py b/src/third_party/google_benchmark/mingw.py
new file mode 100644
index 0000000..65cf4b8
--- /dev/null
+++ b/src/third_party/google_benchmark/mingw.py
@@ -0,0 +1,320 @@
+#! /usr/bin/env python
+# encoding: utf-8
+
+import argparse
+import errno
+import logging
+import os
+import platform
+import re
+import sys
+import subprocess
+import tempfile
+
+try:
+    import winreg
+except ImportError:
+    import _winreg as winreg
+try:
+    import urllib.request as request
+except ImportError:
+    import urllib as request
+try:
+    import urllib.parse as parse
+except ImportError:
+    import urlparse as parse
+
+class EmptyLogger(object):
+    '''
+    Provides an implementation that performs no logging
+    '''
+    def debug(self, *k, **kw):
+        pass
+    def info(self, *k, **kw):
+        pass
+    def warn(self, *k, **kw):
+        pass
+    def error(self, *k, **kw):
+        pass
+    def critical(self, *k, **kw):
+        pass
+    def setLevel(self, *k, **kw):
+        pass
+
+urls = (
+    'http://downloads.sourceforge.net/project/mingw-w64/Toolchains%20'
+        'targetting%20Win32/Personal%20Builds/mingw-builds/installer/'
+        'repository.txt',
+    'http://downloads.sourceforge.net/project/mingwbuilds/host-windows/'
+        'repository.txt'
+)
+'''
+A list of mingw-build repositories
+'''
+
+def repository(urls = urls, log = EmptyLogger()):
+    '''
+    Downloads and parse mingw-build repository files and parses them
+    '''
+    log.info('getting mingw-builds repository')
+    versions = {}
+    re_sourceforge = re.compile(r'http://sourceforge.net/projects/([^/]+)/files')
+    re_sub = r'http://downloads.sourceforge.net/project/\1'
+    for url in urls:
+        log.debug(' - requesting: %s', url)
+        socket = request.urlopen(url)
+        repo = socket.read()
+        if not isinstance(repo, str):
+            repo = repo.decode();
+        socket.close()
+        for entry in repo.split('\n')[:-1]:
+            value = entry.split('|')
+            version = tuple([int(n) for n in value[0].strip().split('.')])
+            version = versions.setdefault(version, {})
+            arch = value[1].strip()
+            if arch == 'x32':
+                arch = 'i686'
+            elif arch == 'x64':
+                arch = 'x86_64'
+            arch = version.setdefault(arch, {})
+            threading = arch.setdefault(value[2].strip(), {})
+            exceptions = threading.setdefault(value[3].strip(), {})
+            revision = exceptions.setdefault(int(value[4].strip()[3:]),
+                re_sourceforge.sub(re_sub, value[5].strip()))
+    return versions
+
+def find_in_path(file, path=None):
+    '''
+    Attempts to find an executable in the path
+    '''
+    if platform.system() == 'Windows':
+        file += '.exe'
+    if path is None:
+        path = os.environ.get('PATH', '')
+    if type(path) is type(''):
+        path = path.split(os.pathsep)
+    return list(filter(os.path.exists,
+        map(lambda dir, file=file: os.path.join(dir, file), path)))
+
+def find_7zip(log = EmptyLogger()):
+    '''
+    Attempts to find 7zip for unpacking the mingw-build archives
+    '''
+    log.info('finding 7zip')
+    path = find_in_path('7z')
+    if not path:
+        key = winreg.OpenKey(winreg.HKEY_LOCAL_MACHINE, r'SOFTWARE\7-Zip')
+        path, _ = winreg.QueryValueEx(key, 'Path')
+        path = [os.path.join(path, '7z.exe')]
+    log.debug('found \'%s\'', path[0])
+    return path[0]
+
+find_7zip()
+
+def unpack(archive, location, log = EmptyLogger()):
+    '''
+    Unpacks a mingw-builds archive
+    '''
+    sevenzip = find_7zip(log)
+    log.info('unpacking %s', os.path.basename(archive))
+    cmd = [sevenzip, 'x', archive, '-o' + location, '-y']
+    log.debug(' - %r', cmd)
+    with open(os.devnull, 'w') as devnull:
+        subprocess.check_call(cmd, stdout = devnull)
+
+def download(url, location, log = EmptyLogger()):
+    '''
+    Downloads and unpacks a mingw-builds archive
+    '''
+    log.info('downloading MinGW')
+    log.debug(' - url: %s', url)
+    log.debug(' - location: %s', location)
+
+    re_content = re.compile(r'attachment;[ \t]*filename=(")?([^"]*)(")?[\r\n]*')
+
+    stream = request.urlopen(url)
+    try:
+        content = stream.getheader('Content-Disposition') or ''
+    except AttributeError:
+        content = stream.headers.getheader('Content-Disposition') or ''
+    matches = re_content.match(content)
+    if matches:
+        filename = matches.group(2)
+    else:
+        parsed = parse.urlparse(stream.geturl())
+        filename = os.path.basename(parsed.path)
+
+    try:
+        os.makedirs(location)
+    except OSError as e:
+        if e.errno == errno.EEXIST and os.path.isdir(location):
+            pass
+        else:
+            raise
+
+    archive = os.path.join(location, filename)
+    with open(archive, 'wb') as out:
+        while True:
+            buf = stream.read(1024)
+            if not buf:
+                break
+            out.write(buf)
+    unpack(archive, location, log = log)
+    os.remove(archive)
+
+    possible = os.path.join(location, 'mingw64')
+    if not os.path.exists(possible):
+        possible = os.path.join(location, 'mingw32')
+        if not os.path.exists(possible):
+            raise ValueError('Failed to find unpacked MinGW: ' + possible)
+    return possible
+
+def root(location = None, arch = None, version = None, threading = None,
+        exceptions = None, revision = None, log = EmptyLogger()):
+    '''
+    Returns the root folder of a specific version of the mingw-builds variant
+    of gcc. Will download the compiler if needed
+    '''
+
+    # Get the repository if we don't have all the information
+    if not (arch and version and threading and exceptions and revision):
+        versions = repository(log = log)
+
+    # Determine some defaults
+    version = version or max(versions.keys())
+    if not arch:
+        arch = platform.machine().lower()
+        if arch == 'x86':
+            arch = 'i686'
+        elif arch == 'amd64':
+            arch = 'x86_64'
+    if not threading:
+        keys = versions[version][arch].keys()
+        if 'posix' in keys:
+            threading = 'posix'
+        elif 'win32' in keys:
+            threading = 'win32'
+        else:
+            threading = keys[0]
+    if not exceptions:
+        keys = versions[version][arch][threading].keys()
+        if 'seh' in keys:
+            exceptions = 'seh'
+        elif 'sjlj' in keys:
+            exceptions = 'sjlj'
+        else:
+            exceptions = keys[0]
+    if revision is None:
+        revision = max(versions[version][arch][threading][exceptions].keys())
+    if not location:
+        location = os.path.join(tempfile.gettempdir(), 'mingw-builds')
+
+    # Get the download url
+    url = versions[version][arch][threading][exceptions][revision]
+
+    # Tell the user whatzzup
+    log.info('finding MinGW %s', '.'.join(str(v) for v in version))
+    log.debug(' - arch: %s', arch)
+    log.debug(' - threading: %s', threading)
+    log.debug(' - exceptions: %s', exceptions)
+    log.debug(' - revision: %s', revision)
+    log.debug(' - url: %s', url)
+
+    # Store each specific revision differently
+    slug = '{version}-{arch}-{threading}-{exceptions}-rev{revision}'
+    slug = slug.format(
+        version = '.'.join(str(v) for v in version),
+        arch = arch,
+        threading = threading,
+        exceptions = exceptions,
+        revision = revision
+    )
+    if arch == 'x86_64':
+        root_dir = os.path.join(location, slug, 'mingw64')
+    elif arch == 'i686':
+        root_dir = os.path.join(location, slug, 'mingw32')
+    else:
+        raise ValueError('Unknown MinGW arch: ' + arch)
+
+    # Download if needed
+    if not os.path.exists(root_dir):
+        downloaded = download(url, os.path.join(location, slug), log = log)
+        if downloaded != root_dir:
+            raise ValueError('The location of mingw did not match\n%s\n%s'
+                % (downloaded, root_dir))
+
+    return root_dir
+
+def str2ver(string):
+    '''
+    Converts a version string into a tuple
+    '''
+    try:
+        version = tuple(int(v) for v in string.split('.'))
+        if len(version) is not 3:
+            raise ValueError()
+    except ValueError:
+        raise argparse.ArgumentTypeError(
+            'please provide a three digit version string')
+    return version
+
+def main():
+    '''
+    Invoked when the script is run directly by the python interpreter
+    '''
+    parser = argparse.ArgumentParser(
+        description = 'Downloads a specific version of MinGW',
+        formatter_class = argparse.ArgumentDefaultsHelpFormatter
+    )
+    parser.add_argument('--location',
+        help = 'the location to download the compiler to',
+        default = os.path.join(tempfile.gettempdir(), 'mingw-builds'))
+    parser.add_argument('--arch', required = True, choices = ['i686', 'x86_64'],
+        help = 'the target MinGW architecture string')
+    parser.add_argument('--version', type = str2ver,
+        help = 'the version of GCC to download')
+    parser.add_argument('--threading', choices = ['posix', 'win32'],
+        help = 'the threading type of the compiler')
+    parser.add_argument('--exceptions', choices = ['sjlj', 'seh', 'dwarf'],
+        help = 'the method to throw exceptions')
+    parser.add_argument('--revision', type=int,
+        help = 'the revision of the MinGW release')
+    group = parser.add_mutually_exclusive_group()
+    group.add_argument('-v', '--verbose', action='store_true',
+        help='increase the script output verbosity')
+    group.add_argument('-q', '--quiet', action='store_true',
+        help='only print errors and warning')
+    args = parser.parse_args()
+
+    # Create the logger
+    logger = logging.getLogger('mingw')
+    handler = logging.StreamHandler()
+    formatter = logging.Formatter('%(message)s')
+    handler.setFormatter(formatter)
+    logger.addHandler(handler)
+    logger.setLevel(logging.INFO)
+    if args.quiet:
+        logger.setLevel(logging.WARN)
+    if args.verbose:
+        logger.setLevel(logging.DEBUG)
+
+    # Get MinGW
+    root_dir = root(location = args.location, arch = args.arch,
+        version = args.version, threading = args.threading,
+        exceptions = args.exceptions, revision = args.revision,
+        log = logger)
+
+    sys.stdout.write('%s\n' % os.path.join(root_dir, 'bin'))
+
+if __name__ == '__main__':
+    try:
+        main()
+    except IOError as e:
+        sys.stderr.write('IO error: %s\n' % e)
+        sys.exit(1)
+    except OSError as e:
+        sys.stderr.write('OS error: %s\n' % e)
+        sys.exit(1)
+    except KeyboardInterrupt as e:
+        sys.stderr.write('Killed\n')
+        sys.exit(1)
diff --git a/src/third_party/google_benchmark/releasing.md b/src/third_party/google_benchmark/releasing.md
new file mode 100644
index 0000000..f0cd701
--- /dev/null
+++ b/src/third_party/google_benchmark/releasing.md
@@ -0,0 +1,16 @@
+# How to release
+
+* Make sure you're on master and synced to HEAD
+* Ensure the project builds and tests run (sanity check only, obviously)
+    * `parallel -j0 exec ::: test/*_test` can help ensure everything at least
+      passes
+* Prepare release notes
+    * `git log $(git describe --abbrev=0 --tags)..HEAD` gives you the list of
+      commits between the last annotated tag and HEAD
+    * Pick the most interesting.
+* Create a release through github's interface
+    * Note this will create a lightweight tag.
+    * Update this to an annotated tag:
+      * `git pull --tags`
+      * `git tag -a -f <tag> <tag>`
+      * `git push --force origin`
diff --git a/src/third_party/google_benchmark/src/CMakeLists.txt b/src/third_party/google_benchmark/src/CMakeLists.txt
new file mode 100644
index 0000000..eab1428
--- /dev/null
+++ b/src/third_party/google_benchmark/src/CMakeLists.txt
@@ -0,0 +1,112 @@
+# Allow the source files to find headers in src/
+include(GNUInstallDirs)
+include_directories(${PROJECT_SOURCE_DIR}/src)
+
+if (DEFINED BENCHMARK_CXX_LINKER_FLAGS)
+  list(APPEND CMAKE_SHARED_LINKER_FLAGS ${BENCHMARK_CXX_LINKER_FLAGS})
+  list(APPEND CMAKE_MODULE_LINKER_FLAGS ${BENCHMARK_CXX_LINKER_FLAGS})
+endif()
+
+file(GLOB
+  SOURCE_FILES
+    *.cc
+    ${PROJECT_SOURCE_DIR}/include/benchmark/*.h
+    ${CMAKE_CURRENT_SOURCE_DIR}/*.h)
+file(GLOB BENCHMARK_MAIN "benchmark_main.cc")
+foreach(item ${BENCHMARK_MAIN})
+  list(REMOVE_ITEM SOURCE_FILES "${item}")
+endforeach()
+
+add_library(benchmark ${SOURCE_FILES})
+set_target_properties(benchmark PROPERTIES
+  OUTPUT_NAME "benchmark"
+  VERSION ${GENERIC_LIB_VERSION}
+  SOVERSION ${GENERIC_LIB_SOVERSION}
+)
+target_include_directories(benchmark PUBLIC
+    $<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/../include>
+    )
+
+# Link threads.
+target_link_libraries(benchmark  ${BENCHMARK_CXX_LIBRARIES} ${CMAKE_THREAD_LIBS_INIT})
+find_library(LIBRT rt)
+if(LIBRT)
+  target_link_libraries(benchmark ${LIBRT})
+endif()
+
+if(CMAKE_BUILD_TYPE)
+  string(TOUPPER ${CMAKE_BUILD_TYPE} CMAKE_BUILD_TYPE_UPPER)
+endif()
+if(NOT CMAKE_THREAD_LIBS_INIT AND "${CMAKE_CXX_FLAGS} ${CMAKE_CXX_FLAGS_${CMAKE_BUILD_TYPE_UPPER}}" MATCHES ".*-fsanitize=[^ ]*address.*")
+  message(WARNING "CMake's FindThreads.cmake did not fail, but CMAKE_THREAD_LIBS_INIT ended up being empty. This was fixed in https://github.com/Kitware/CMake/commit/d53317130e84898c5328c237186dbd995aaf1c12 Let's guess that -pthread is sufficient.")
+  target_link_libraries(benchmark -pthread)
+endif()
+
+# We need extra libraries on Windows
+if(${CMAKE_SYSTEM_NAME} MATCHES "Windows")
+  target_link_libraries(benchmark shlwapi)
+endif()
+
+# We need extra libraries on Solaris
+if(${CMAKE_SYSTEM_NAME} MATCHES "SunOS")
+  target_link_libraries(benchmark kstat)
+endif()
+
+# Benchmark main library
+add_library(benchmark_main "benchmark_main.cc")
+set_target_properties(benchmark_main PROPERTIES
+  OUTPUT_NAME "benchmark_main"
+  VERSION ${GENERIC_LIB_VERSION}
+  SOVERSION ${GENERIC_LIB_SOVERSION}
+)
+target_include_directories(benchmark PUBLIC
+    $<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/../include>
+    )
+target_link_libraries(benchmark_main benchmark)
+
+
+set(generated_dir "${CMAKE_CURRENT_BINARY_DIR}/generated")
+
+set(version_config "${generated_dir}/${PROJECT_NAME}ConfigVersion.cmake")
+set(project_config "${generated_dir}/${PROJECT_NAME}Config.cmake")
+set(pkg_config "${generated_dir}/${PROJECT_NAME}.pc")
+set(targets_export_name "${PROJECT_NAME}Targets")
+
+set(namespace "${PROJECT_NAME}::")
+
+include(CMakePackageConfigHelpers)
+write_basic_package_version_file(
+  "${version_config}" VERSION ${GENERIC_LIB_VERSION} COMPATIBILITY SameMajorVersion
+)
+
+configure_file("${PROJECT_SOURCE_DIR}/cmake/Config.cmake.in" "${project_config}" @ONLY)
+configure_file("${PROJECT_SOURCE_DIR}/cmake/benchmark.pc.in" "${pkg_config}" @ONLY)
+
+if (BENCHMARK_ENABLE_INSTALL)
+  # Install target (will install the library to specified CMAKE_INSTALL_PREFIX variable)
+  install(
+    TARGETS benchmark benchmark_main
+    EXPORT ${targets_export_name}
+    ARCHIVE DESTINATION ${CMAKE_INSTALL_LIBDIR}
+    LIBRARY DESTINATION ${CMAKE_INSTALL_LIBDIR}
+    RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR}
+    INCLUDES DESTINATION ${CMAKE_INSTALL_INCLUDEDIR})
+
+  install(
+    DIRECTORY "${PROJECT_SOURCE_DIR}/include/benchmark"
+    DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}
+    FILES_MATCHING PATTERN "*.*h")
+
+  install(
+      FILES "${project_config}" "${version_config}"
+      DESTINATION "${CMAKE_INSTALL_LIBDIR}/cmake/${PROJECT_NAME}")
+
+  install(
+      FILES "${pkg_config}"
+      DESTINATION "${CMAKE_INSTALL_LIBDIR}/pkgconfig")
+
+  install(
+      EXPORT "${targets_export_name}"
+      NAMESPACE "${namespace}"
+      DESTINATION "${CMAKE_INSTALL_LIBDIR}/cmake/${PROJECT_NAME}")
+endif()
diff --git a/src/third_party/google_benchmark/src/arraysize.h b/src/third_party/google_benchmark/src/arraysize.h
new file mode 100644
index 0000000..51a50f2
--- /dev/null
+++ b/src/third_party/google_benchmark/src/arraysize.h
@@ -0,0 +1,33 @@
+#ifndef BENCHMARK_ARRAYSIZE_H_
+#define BENCHMARK_ARRAYSIZE_H_
+
+#include "internal_macros.h"
+
+namespace benchmark {
+namespace internal {
+// The arraysize(arr) macro returns the # of elements in an array arr.
+// The expression is a compile-time constant, and therefore can be
+// used in defining new arrays, for example.  If you use arraysize on
+// a pointer by mistake, you will get a compile-time error.
+//
+
+// This template function declaration is used in defining arraysize.
+// Note that the function doesn't need an implementation, as we only
+// use its type.
+template <typename T, size_t N>
+char (&ArraySizeHelper(T (&array)[N]))[N];
+
+// That gcc wants both of these prototypes seems mysterious. VC, for
+// its part, can't decide which to use (another mystery). Matching of
+// template overloads: the final frontier.
+#ifndef COMPILER_MSVC
+template <typename T, size_t N>
+char (&ArraySizeHelper(const T (&array)[N]))[N];
+#endif
+
+#define arraysize(array) (sizeof(::benchmark::internal::ArraySizeHelper(array)))
+
+}  // end namespace internal
+}  // end namespace benchmark
+
+#endif  // BENCHMARK_ARRAYSIZE_H_
diff --git a/src/third_party/google_benchmark/src/benchmark.cc b/src/third_party/google_benchmark/src/benchmark.cc
new file mode 100644
index 0000000..b751b9c
--- /dev/null
+++ b/src/third_party/google_benchmark/src/benchmark.cc
@@ -0,0 +1,499 @@
+// Copyright 2015 Google Inc. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "benchmark/benchmark.h"
+#include "benchmark_api_internal.h"
+#include "benchmark_runner.h"
+#include "internal_macros.h"
+
+#ifndef BENCHMARK_OS_WINDOWS
+#ifndef BENCHMARK_OS_FUCHSIA
+#include <sys/resource.h>
+#endif
+#include <sys/time.h>
+#include <unistd.h>
+#endif
+
+#include <algorithm>
+#include <atomic>
+#include <condition_variable>
+#include <cstdio>
+#include <cstdlib>
+#include <fstream>
+#include <iostream>
+#include <memory>
+#include <string>
+#include <thread>
+#include <utility>
+
+#include "check.h"
+#include "colorprint.h"
+#include "commandlineflags.h"
+#include "complexity.h"
+#include "counter.h"
+#include "internal_macros.h"
+#include "log.h"
+#include "mutex.h"
+#include "re.h"
+#include "statistics.h"
+#include "string_util.h"
+#include "thread_manager.h"
+#include "thread_timer.h"
+
+// Print a list of benchmarks. This option overrides all other options.
+DEFINE_bool(benchmark_list_tests, false);
+
+// A regular expression that specifies the set of benchmarks to execute.  If
+// this flag is empty, or if this flag is the string \"all\", all benchmarks
+// linked into the binary are run.
+DEFINE_string(benchmark_filter, ".");
+
+// Minimum number of seconds we should run benchmark before results are
+// considered significant.  For cpu-time based tests, this is the lower bound
+// on the total cpu time used by all threads that make up the test.  For
+// real-time based tests, this is the lower bound on the elapsed time of the
+// benchmark execution, regardless of number of threads.
+DEFINE_double(benchmark_min_time, 0.5);
+
+// The number of runs of each benchmark. If greater than 1, the mean and
+// standard deviation of the runs will be reported.
+DEFINE_int32(benchmark_repetitions, 1);
+
+// Report the result of each benchmark repetitions. When 'true' is specified
+// only the mean, standard deviation, and other statistics are reported for
+// repeated benchmarks. Affects all reporters.
+DEFINE_bool(benchmark_report_aggregates_only, false);
+
+// Display the result of each benchmark repetitions. When 'true' is specified
+// only the mean, standard deviation, and other statistics are displayed for
+// repeated benchmarks. Unlike benchmark_report_aggregates_only, only affects
+// the display reporter, but  *NOT* file reporter, which will still contain
+// all the output.
+DEFINE_bool(benchmark_display_aggregates_only, false);
+
+// The format to use for console output.
+// Valid values are 'console', 'json', or 'csv'.
+DEFINE_string(benchmark_format, "console");
+
+// The format to use for file output.
+// Valid values are 'console', 'json', or 'csv'.
+DEFINE_string(benchmark_out_format, "json");
+
+// The file to write additional output to.
+DEFINE_string(benchmark_out, "");
+
+// Whether to use colors in the output.  Valid values:
+// 'true'/'yes'/1, 'false'/'no'/0, and 'auto'. 'auto' means to use colors if
+// the output is being sent to a terminal and the TERM environment variable is
+// set to a terminal type that supports colors.
+DEFINE_string(benchmark_color, "auto");
+
+// Whether to use tabular format when printing user counters to the console.
+// Valid values: 'true'/'yes'/1, 'false'/'no'/0.  Defaults to false.
+DEFINE_bool(benchmark_counters_tabular, false);
+
+// The level of verbose logging to output
+DEFINE_int32(v, 0);
+
+namespace benchmark {
+
+namespace internal {
+
+// FIXME: wouldn't LTO mess this up?
+void UseCharPointer(char const volatile*) {}
+
+}  // namespace internal
+
+State::State(IterationCount max_iters, const std::vector<int64_t>& ranges,
+             int thread_i, int n_threads, internal::ThreadTimer* timer,
+             internal::ThreadManager* manager)
+    : total_iterations_(0),
+      batch_leftover_(0),
+      max_iterations(max_iters),
+      started_(false),
+      finished_(false),
+      error_occurred_(false),
+      range_(ranges),
+      complexity_n_(0),
+      counters(),
+      thread_index(thread_i),
+      threads(n_threads),
+      timer_(timer),
+      manager_(manager) {
+  CHECK(max_iterations != 0) << "At least one iteration must be run";
+  CHECK_LT(thread_index, threads) << "thread_index must be less than threads";
+
+  // Note: The use of offsetof below is technically undefined until C++17
+  // because State is not a standard layout type. However, all compilers
+  // currently provide well-defined behavior as an extension (which is
+  // demonstrated since constexpr evaluation must diagnose all undefined
+  // behavior). However, GCC and Clang also warn about this use of offsetof,
+  // which must be suppressed.
+#if defined(__INTEL_COMPILER)
+#pragma warning push
+#pragma warning(disable : 1875)
+#elif defined(__GNUC__)
+#pragma GCC diagnostic push
+#pragma GCC diagnostic ignored "-Winvalid-offsetof"
+#endif
+  // Offset tests to ensure commonly accessed data is on the first cache line.
+  const int cache_line_size = 64;
+  static_assert(offsetof(State, error_occurred_) <=
+                    (cache_line_size - sizeof(error_occurred_)),
+                "");
+#if defined(__INTEL_COMPILER)
+#pragma warning pop
+#elif defined(__GNUC__)
+#pragma GCC diagnostic pop
+#endif
+}
+
+void State::PauseTiming() {
+  // Add in time accumulated so far
+  CHECK(started_ && !finished_ && !error_occurred_);
+  timer_->StopTimer();
+}
+
+void State::ResumeTiming() {
+  CHECK(started_ && !finished_ && !error_occurred_);
+  timer_->StartTimer();
+}
+
+void State::SkipWithError(const char* msg) {
+  CHECK(msg);
+  error_occurred_ = true;
+  {
+    MutexLock l(manager_->GetBenchmarkMutex());
+    if (manager_->results.has_error_ == false) {
+      manager_->results.error_message_ = msg;
+      manager_->results.has_error_ = true;
+    }
+  }
+  total_iterations_ = 0;
+  if (timer_->running()) timer_->StopTimer();
+}
+
+void State::SetIterationTime(double seconds) {
+  timer_->SetIterationTime(seconds);
+}
+
+void State::SetLabel(const char* label) {
+  MutexLock l(manager_->GetBenchmarkMutex());
+  manager_->results.report_label_ = label;
+}
+
+void State::StartKeepRunning() {
+  CHECK(!started_ && !finished_);
+  started_ = true;
+  total_iterations_ = error_occurred_ ? 0 : max_iterations;
+  manager_->StartStopBarrier();
+  if (!error_occurred_) ResumeTiming();
+}
+
+void State::FinishKeepRunning() {
+  CHECK(started_ && (!finished_ || error_occurred_));
+  if (!error_occurred_) {
+    PauseTiming();
+  }
+  // Total iterations has now wrapped around past 0. Fix this.
+  total_iterations_ = 0;
+  finished_ = true;
+  manager_->StartStopBarrier();
+}
+
+namespace internal {
+namespace {
+
+void RunBenchmarks(const std::vector<BenchmarkInstance>& benchmarks,
+                   BenchmarkReporter* display_reporter,
+                   BenchmarkReporter* file_reporter) {
+  // Note the file_reporter can be null.
+  CHECK(display_reporter != nullptr);
+
+  // Determine the width of the name field using a minimum width of 10.
+  bool might_have_aggregates = FLAGS_benchmark_repetitions > 1;
+  size_t name_field_width = 10;
+  size_t stat_field_width = 0;
+  for (const BenchmarkInstance& benchmark : benchmarks) {
+    name_field_width =
+        std::max<size_t>(name_field_width, benchmark.name.str().size());
+    might_have_aggregates |= benchmark.repetitions > 1;
+
+    for (const auto& Stat : *benchmark.statistics)
+      stat_field_width = std::max<size_t>(stat_field_width, Stat.name_.size());
+  }
+  if (might_have_aggregates) name_field_width += 1 + stat_field_width;
+
+  // Print header here
+  BenchmarkReporter::Context context;
+  context.name_field_width = name_field_width;
+
+  // Keep track of running times of all instances of current benchmark
+  std::vector<BenchmarkReporter::Run> complexity_reports;
+
+  // We flush streams after invoking reporter methods that write to them. This
+  // ensures users get timely updates even when streams are not line-buffered.
+  auto flushStreams = [](BenchmarkReporter* reporter) {
+    if (!reporter) return;
+    std::flush(reporter->GetOutputStream());
+    std::flush(reporter->GetErrorStream());
+  };
+
+  if (display_reporter->ReportContext(context) &&
+      (!file_reporter || file_reporter->ReportContext(context))) {
+    flushStreams(display_reporter);
+    flushStreams(file_reporter);
+
+    for (const auto& benchmark : benchmarks) {
+      RunResults run_results = RunBenchmark(benchmark, &complexity_reports);
+
+      auto report = [&run_results](BenchmarkReporter* reporter,
+                                   bool report_aggregates_only) {
+        assert(reporter);
+        // If there are no aggregates, do output non-aggregates.
+        report_aggregates_only &= !run_results.aggregates_only.empty();
+        if (!report_aggregates_only)
+          reporter->ReportRuns(run_results.non_aggregates);
+        if (!run_results.aggregates_only.empty())
+          reporter->ReportRuns(run_results.aggregates_only);
+      };
+
+      report(display_reporter, run_results.display_report_aggregates_only);
+      if (file_reporter)
+        report(file_reporter, run_results.file_report_aggregates_only);
+
+      flushStreams(display_reporter);
+      flushStreams(file_reporter);
+    }
+  }
+  display_reporter->Finalize();
+  if (file_reporter) file_reporter->Finalize();
+  flushStreams(display_reporter);
+  flushStreams(file_reporter);
+}
+
+// Disable deprecated warnings temporarily because we need to reference
+// CSVReporter but don't want to trigger -Werror=-Wdeprecated
+#ifdef __GNUC__
+#pragma GCC diagnostic push
+#pragma GCC diagnostic ignored "-Wdeprecated"
+#endif
+
+std::unique_ptr<BenchmarkReporter> CreateReporter(
+    std::string const& name, ConsoleReporter::OutputOptions output_opts) {
+  typedef std::unique_ptr<BenchmarkReporter> PtrType;
+  if (name == "console") {
+    return PtrType(new ConsoleReporter(output_opts));
+  } else if (name == "json") {
+    return PtrType(new JSONReporter);
+  } else if (name == "csv") {
+    return PtrType(new CSVReporter);
+  } else {
+    std::cerr << "Unexpected format: '" << name << "'\n";
+    std::exit(1);
+  }
+}
+
+#ifdef __GNUC__
+#pragma GCC diagnostic pop
+#endif
+
+}  // end namespace
+
+bool IsZero(double n) {
+  return std::abs(n) < std::numeric_limits<double>::epsilon();
+}
+
+ConsoleReporter::OutputOptions GetOutputOptions(bool force_no_color) {
+  int output_opts = ConsoleReporter::OO_Defaults;
+  auto is_benchmark_color = [force_no_color]() -> bool {
+    if (force_no_color) {
+      return false;
+    }
+    if (FLAGS_benchmark_color == "auto") {
+      return IsColorTerminal();
+    }
+    return IsTruthyFlagValue(FLAGS_benchmark_color);
+  };
+  if (is_benchmark_color()) {
+    output_opts |= ConsoleReporter::OO_Color;
+  } else {
+    output_opts &= ~ConsoleReporter::OO_Color;
+  }
+  if (FLAGS_benchmark_counters_tabular) {
+    output_opts |= ConsoleReporter::OO_Tabular;
+  } else {
+    output_opts &= ~ConsoleReporter::OO_Tabular;
+  }
+  return static_cast<ConsoleReporter::OutputOptions>(output_opts);
+}
+
+}  // end namespace internal
+
+size_t RunSpecifiedBenchmarks() {
+  return RunSpecifiedBenchmarks(nullptr, nullptr);
+}
+
+size_t RunSpecifiedBenchmarks(BenchmarkReporter* display_reporter) {
+  return RunSpecifiedBenchmarks(display_reporter, nullptr);
+}
+
+size_t RunSpecifiedBenchmarks(BenchmarkReporter* display_reporter,
+                              BenchmarkReporter* file_reporter) {
+  std::string spec = FLAGS_benchmark_filter;
+  if (spec.empty() || spec == "all")
+    spec = ".";  // Regexp that matches all benchmarks
+
+  // Setup the reporters
+  std::ofstream output_file;
+  std::unique_ptr<BenchmarkReporter> default_display_reporter;
+  std::unique_ptr<BenchmarkReporter> default_file_reporter;
+  if (!display_reporter) {
+    default_display_reporter = internal::CreateReporter(
+        FLAGS_benchmark_format, internal::GetOutputOptions());
+    display_reporter = default_display_reporter.get();
+  }
+  auto& Out = display_reporter->GetOutputStream();
+  auto& Err = display_reporter->GetErrorStream();
+
+  std::string const& fname = FLAGS_benchmark_out;
+  if (fname.empty() && file_reporter) {
+    Err << "A custom file reporter was provided but "
+           "--benchmark_out=<file> was not specified."
+        << std::endl;
+    std::exit(1);
+  }
+  if (!fname.empty()) {
+    output_file.open(fname);
+    if (!output_file.is_open()) {
+      Err << "invalid file name: '" << fname << std::endl;
+      std::exit(1);
+    }
+    if (!file_reporter) {
+      default_file_reporter = internal::CreateReporter(
+          FLAGS_benchmark_out_format, ConsoleReporter::OO_None);
+      file_reporter = default_file_reporter.get();
+    }
+    file_reporter->SetOutputStream(&output_file);
+    file_reporter->SetErrorStream(&output_file);
+  }
+
+  std::vector<internal::BenchmarkInstance> benchmarks;
+  if (!FindBenchmarksInternal(spec, &benchmarks, &Err)) return 0;
+
+  if (benchmarks.empty()) {
+    Err << "Failed to match any benchmarks against regex: " << spec << "\n";
+    return 0;
+  }
+
+  if (FLAGS_benchmark_list_tests) {
+    for (auto const& benchmark : benchmarks)
+      Out << benchmark.name.str() << "\n";
+  } else {
+    internal::RunBenchmarks(benchmarks, display_reporter, file_reporter);
+  }
+
+  return benchmarks.size();
+}
+
+void RegisterMemoryManager(MemoryManager* manager) {
+  internal::memory_manager = manager;
+}
+
+namespace internal {
+
+void PrintUsageAndExit() {
+  fprintf(stdout,
+          "benchmark"
+          " [--benchmark_list_tests={true|false}]\n"
+          "          [--benchmark_filter=<regex>]\n"
+          "          [--benchmark_min_time=<min_time>]\n"
+          "          [--benchmark_repetitions=<num_repetitions>]\n"
+          "          [--benchmark_report_aggregates_only={true|false}]\n"
+          "          [--benchmark_display_aggregates_only={true|false}]\n"
+          "          [--benchmark_format=<console|json|csv>]\n"
+          "          [--benchmark_out=<filename>]\n"
+          "          [--benchmark_out_format=<json|console|csv>]\n"
+          "          [--benchmark_color={auto|true|false}]\n"
+          "          [--benchmark_counters_tabular={true|false}]\n"
+          "          [--v=<verbosity>]\n");
+  exit(0);
+}
+
+void ParseCommandLineFlags(int* argc, char** argv) {
+  using namespace benchmark;
+  BenchmarkReporter::Context::executable_name =
+      (argc && *argc > 0) ? argv[0] : "unknown";
+  for (int i = 1; argc && i < *argc; ++i) {
+    if (ParseBoolFlag(argv[i], "benchmark_list_tests",
+                      &FLAGS_benchmark_list_tests) ||
+        ParseStringFlag(argv[i], "benchmark_filter", &FLAGS_benchmark_filter) ||
+        ParseDoubleFlag(argv[i], "benchmark_min_time",
+                        &FLAGS_benchmark_min_time) ||
+        ParseInt32Flag(argv[i], "benchmark_repetitions",
+                       &FLAGS_benchmark_repetitions) ||
+        ParseBoolFlag(argv[i], "benchmark_report_aggregates_only",
+                      &FLAGS_benchmark_report_aggregates_only) ||
+        ParseBoolFlag(argv[i], "benchmark_display_aggregates_only",
+                      &FLAGS_benchmark_display_aggregates_only) ||
+        ParseStringFlag(argv[i], "benchmark_format", &FLAGS_benchmark_format) ||
+        ParseStringFlag(argv[i], "benchmark_out", &FLAGS_benchmark_out) ||
+        ParseStringFlag(argv[i], "benchmark_out_format",
+                        &FLAGS_benchmark_out_format) ||
+        ParseStringFlag(argv[i], "benchmark_color", &FLAGS_benchmark_color) ||
+        // "color_print" is the deprecated name for "benchmark_color".
+        // TODO: Remove this.
+        ParseStringFlag(argv[i], "color_print", &FLAGS_benchmark_color) ||
+        ParseBoolFlag(argv[i], "benchmark_counters_tabular",
+                      &FLAGS_benchmark_counters_tabular) ||
+        ParseInt32Flag(argv[i], "v", &FLAGS_v)) {
+      for (int j = i; j != *argc - 1; ++j) argv[j] = argv[j + 1];
+
+      --(*argc);
+      --i;
+    } else if (IsFlag(argv[i], "help")) {
+      PrintUsageAndExit();
+    }
+  }
+  for (auto const* flag :
+       {&FLAGS_benchmark_format, &FLAGS_benchmark_out_format})
+    if (*flag != "console" && *flag != "json" && *flag != "csv") {
+      PrintUsageAndExit();
+    }
+  if (FLAGS_benchmark_color.empty()) {
+    PrintUsageAndExit();
+  }
+}
+
+int InitializeStreams() {
+  static std::ios_base::Init init;
+  return 0;
+}
+
+}  // end namespace internal
+
+void Initialize(int* argc, char** argv) {
+  internal::ParseCommandLineFlags(argc, argv);
+  internal::LogLevel() = FLAGS_v;
+}
+
+bool ReportUnrecognizedArguments(int argc, char** argv) {
+  for (int i = 1; i < argc; ++i) {
+    fprintf(stderr, "%s: error: unrecognized command-line flag: %s\n", argv[0],
+            argv[i]);
+  }
+  return argc > 1;
+}
+
+}  // end namespace benchmark
diff --git a/src/third_party/google_benchmark/src/benchmark_api_internal.cc b/src/third_party/google_benchmark/src/benchmark_api_internal.cc
new file mode 100644
index 0000000..d468a25
--- /dev/null
+++ b/src/third_party/google_benchmark/src/benchmark_api_internal.cc
@@ -0,0 +1,15 @@
+#include "benchmark_api_internal.h"
+
+namespace benchmark {
+namespace internal {
+
+State BenchmarkInstance::Run(IterationCount iters, int thread_id,
+                             internal::ThreadTimer* timer,
+                             internal::ThreadManager* manager) const {
+  State st(iters, arg, thread_id, threads, timer, manager);
+  benchmark->Run(st);
+  return st;
+}
+
+}  // internal
+}  // benchmark
diff --git a/src/third_party/google_benchmark/src/benchmark_api_internal.h b/src/third_party/google_benchmark/src/benchmark_api_internal.h
new file mode 100644
index 0000000..264eff9
--- /dev/null
+++ b/src/third_party/google_benchmark/src/benchmark_api_internal.h
@@ -0,0 +1,53 @@
+#ifndef BENCHMARK_API_INTERNAL_H
+#define BENCHMARK_API_INTERNAL_H
+
+#include "benchmark/benchmark.h"
+#include "commandlineflags.h"
+
+#include <cmath>
+#include <iosfwd>
+#include <limits>
+#include <memory>
+#include <string>
+#include <vector>
+
+namespace benchmark {
+namespace internal {
+
+// Information kept per benchmark we may want to run
+struct BenchmarkInstance {
+  BenchmarkName name;
+  Benchmark* benchmark;
+  AggregationReportMode aggregation_report_mode;
+  std::vector<int64_t> arg;
+  TimeUnit time_unit;
+  int range_multiplier;
+  bool measure_process_cpu_time;
+  bool use_real_time;
+  bool use_manual_time;
+  BigO complexity;
+  BigOFunc* complexity_lambda;
+  UserCounters counters;
+  const std::vector<Statistics>* statistics;
+  bool last_benchmark_instance;
+  int repetitions;
+  double min_time;
+  IterationCount iterations;
+  int threads;  // Number of concurrent threads to us
+
+  State Run(IterationCount iters, int thread_id, internal::ThreadTimer* timer,
+            internal::ThreadManager* manager) const;
+};
+
+bool FindBenchmarksInternal(const std::string& re,
+                            std::vector<BenchmarkInstance>* benchmarks,
+                            std::ostream* Err);
+
+bool IsZero(double n);
+
+ConsoleReporter::OutputOptions GetOutputOptions(bool force_no_color = false);
+
+}  // end namespace internal
+}  // end namespace benchmark
+
+#endif  // BENCHMARK_API_INTERNAL_H
diff --git a/src/third_party/google_benchmark/src/benchmark_main.cc b/src/third_party/google_benchmark/src/benchmark_main.cc
new file mode 100644
index 0000000..b3b2478
--- /dev/null
+++ b/src/third_party/google_benchmark/src/benchmark_main.cc
@@ -0,0 +1,17 @@
+// Copyright 2018 Google Inc. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "benchmark/benchmark.h"
+
+BENCHMARK_MAIN();
diff --git a/src/third_party/google_benchmark/src/benchmark_name.cc b/src/third_party/google_benchmark/src/benchmark_name.cc
new file mode 100644
index 0000000..2a17ebc
--- /dev/null
+++ b/src/third_party/google_benchmark/src/benchmark_name.cc
@@ -0,0 +1,58 @@
+// Copyright 2015 Google Inc. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include <benchmark/benchmark.h>
+
+namespace benchmark {
+
+namespace {
+
+// Compute the total size of a pack of std::strings
+size_t size_impl() { return 0; }
+
+template <typename Head, typename... Tail>
+size_t size_impl(const Head& head, const Tail&... tail) {
+  return head.size() + size_impl(tail...);
+}
+
+// Join a pack of std::strings using a delimiter
+// TODO: use absl::StrJoin
+void join_impl(std::string&, char) {}
+
+template <typename Head, typename... Tail>
+void join_impl(std::string& s, const char delimiter, const Head& head,
+               const Tail&... tail) {
+  if (!s.empty() && !head.empty()) {
+    s += delimiter;
+  }
+
+  s += head;
+
+  join_impl(s, delimiter, tail...);
+}
+
+template <typename... Ts>
+std::string join(char delimiter, const Ts&... ts) {
+  std::string s;
+  s.reserve(sizeof...(Ts) + size_impl(ts...));
+  join_impl(s, delimiter, ts...);
+  return s;
+}
+}  // namespace
+
+std::string BenchmarkName::str() const {
+  return join('/', function_name, args, min_time, iterations, repetitions,
+              time_type, threads);
+}
+}  // namespace benchmark
diff --git a/src/third_party/google_benchmark/src/benchmark_register.cc b/src/third_party/google_benchmark/src/benchmark_register.cc
new file mode 100644
index 0000000..cca39b2
--- /dev/null
+++ b/src/third_party/google_benchmark/src/benchmark_register.cc
@@ -0,0 +1,506 @@
+// Copyright 2015 Google Inc. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "benchmark_register.h"
+
+#ifndef BENCHMARK_OS_WINDOWS
+#ifndef BENCHMARK_OS_FUCHSIA
+#include <sys/resource.h>
+#endif
+#include <sys/time.h>
+#include <unistd.h>
+#endif
+
+#include <algorithm>
+#include <atomic>
+#include <condition_variable>
+#include <cstdio>
+#include <cstdlib>
+#include <cstring>
+#include <fstream>
+#include <iostream>
+#include <memory>
+#include <sstream>
+#include <thread>
+
+#ifndef __STDC_FORMAT_MACROS
+#define __STDC_FORMAT_MACROS
+#endif
+#include <inttypes.h>
+
+#include "benchmark/benchmark.h"
+#include "benchmark_api_internal.h"
+#include "check.h"
+#include "commandlineflags.h"
+#include "complexity.h"
+#include "internal_macros.h"
+#include "log.h"
+#include "mutex.h"
+#include "re.h"
+#include "statistics.h"
+#include "string_util.h"
+#include "timers.h"
+
+namespace benchmark {
+
+namespace {
+// For non-dense Range, intermediate values are powers of kRangeMultiplier.
+static const int kRangeMultiplier = 8;
+// The size of a benchmark family determines is the number of inputs to repeat
+// the benchmark on. If this is "large" then warn the user during configuration.
+static const size_t kMaxFamilySize = 100;
+}  // end namespace
+
+namespace internal {
+
+//=============================================================================//
+//                         BenchmarkFamilies
+//=============================================================================//
+
+// Class for managing registered benchmarks.  Note that each registered
+// benchmark identifies a family of related benchmarks to run.
+class BenchmarkFamilies {
+ public:
+  static BenchmarkFamilies* GetInstance();
+
+  // Registers a benchmark family and returns the index assigned to it.
+  size_t AddBenchmark(std::unique_ptr<Benchmark> family);
+
+  // Clear all registered benchmark families.
+  void ClearBenchmarks();
+
+  // Extract the list of benchmark instances that match the specified
+  // regular expression.
+  bool FindBenchmarks(std::string re,
+                      std::vector<BenchmarkInstance>* benchmarks,
+                      std::ostream* Err);
+
+ private:
+  BenchmarkFamilies() {}
+
+  std::vector<std::unique_ptr<Benchmark>> families_;
+  Mutex mutex_;
+};
+
+BenchmarkFamilies* BenchmarkFamilies::GetInstance() {
+  static BenchmarkFamilies instance;
+  return &instance;
+}
+
+size_t BenchmarkFamilies::AddBenchmark(std::unique_ptr<Benchmark> family) {
+  MutexLock l(mutex_);
+  size_t index = families_.size();
+  families_.push_back(std::move(family));
+  return index;
+}
+
+void BenchmarkFamilies::ClearBenchmarks() {
+  MutexLock l(mutex_);
+  families_.clear();
+  families_.shrink_to_fit();
+}
+
+bool BenchmarkFamilies::FindBenchmarks(
+    std::string spec, std::vector<BenchmarkInstance>* benchmarks,
+    std::ostream* ErrStream) {
+  CHECK(ErrStream);
+  auto& Err = *ErrStream;
+  // Make regular expression out of command-line flag
+  std::string error_msg;
+  Regex re;
+  bool isNegativeFilter = false;
+  if (spec[0] == '-') {
+    spec.replace(0, 1, "");
+    isNegativeFilter = true;
+  }
+  if (!re.Init(spec, &error_msg)) {
+    Err << "Could not compile benchmark re: " << error_msg << std::endl;
+    return false;
+  }
+
+  // Special list of thread counts to use when none are specified
+  const std::vector<int> one_thread = {1};
+
+  MutexLock l(mutex_);
+  for (std::unique_ptr<Benchmark>& family : families_) {
+    // Family was deleted or benchmark doesn't match
+    if (!family) continue;
+
+    if (family->ArgsCnt() == -1) {
+      family->Args({});
+    }
+    const std::vector<int>* thread_counts =
+        (family->thread_counts_.empty()
+             ? &one_thread
+             : &static_cast<const std::vector<int>&>(family->thread_counts_));
+    const size_t family_size = family->args_.size() * thread_counts->size();
+    // The benchmark will be run at least 'family_size' different inputs.
+    // If 'family_size' is very large warn the user.
+    if (family_size > kMaxFamilySize) {
+      Err << "The number of inputs is very large. " << family->name_
+          << " will be repeated at least " << family_size << " times.\n";
+    }
+    // reserve in the special case the regex ".", since we know the final
+    // family size.
+    if (spec == ".") benchmarks->reserve(family_size);
+
+    for (auto const& args : family->args_) {
+      for (int num_threads : *thread_counts) {
+        BenchmarkInstance instance;
+        instance.name.function_name = family->name_;
+        instance.benchmark = family.get();
+        instance.aggregation_report_mode = family->aggregation_report_mode_;
+        instance.arg = args;
+        instance.time_unit = family->time_unit_;
+        instance.range_multiplier = family->range_multiplier_;
+        instance.min_time = family->min_time_;
+        instance.iterations = family->iterations_;
+        instance.repetitions = family->repetitions_;
+        instance.measure_process_cpu_time = family->measure_process_cpu_time_;
+        instance.use_real_time = family->use_real_time_;
+        instance.use_manual_time = family->use_manual_time_;
+        instance.complexity = family->complexity_;
+        instance.complexity_lambda = family->complexity_lambda_;
+        instance.statistics = &family->statistics_;
+        instance.threads = num_threads;
+
+        // Add arguments to instance name
+        size_t arg_i = 0;
+        for (auto const& arg : args) {
+          if (!instance.name.args.empty()) {
+            instance.name.args += '/';
+          }
+
+          if (arg_i < family->arg_names_.size()) {
+            const auto& arg_name = family->arg_names_[arg_i];
+            if (!arg_name.empty()) {
+              instance.name.args += StrFormat("%s:", arg_name.c_str());
+            }
+          }
+
+          instance.name.args += StrFormat("%" PRId64, arg);
+          ++arg_i;
+        }
+
+        if (!IsZero(family->min_time_))
+          instance.name.min_time =
+              StrFormat("min_time:%0.3f", family->min_time_);
+        if (family->iterations_ != 0) {
+          instance.name.iterations =
+              StrFormat("iterations:%lu",
+                        static_cast<unsigned long>(family->iterations_));
+        }
+        if (family->repetitions_ != 0)
+          instance.name.repetitions =
+              StrFormat("repeats:%d", family->repetitions_);
+
+        if (family->measure_process_cpu_time_) {
+          instance.name.time_type = "process_time";
+        }
+
+        if (family->use_manual_time_) {
+          if (!instance.name.time_type.empty()) {
+            instance.name.time_type += '/';
+          }
+          instance.name.time_type += "manual_time";
+        } else if (family->use_real_time_) {
+          if (!instance.name.time_type.empty()) {
+            instance.name.time_type += '/';
+          }
+          instance.name.time_type += "real_time";
+        }
+
+        // Add the number of threads used to the name
+        if (!family->thread_counts_.empty()) {
+          instance.name.threads = StrFormat("threads:%d", instance.threads);
+        }
+
+        const auto full_name = instance.name.str();
+        if ((re.Match(full_name) && !isNegativeFilter) ||
+            (!re.Match(full_name) && isNegativeFilter)) {
+          instance.last_benchmark_instance = (&args == &family->args_.back());
+          benchmarks->push_back(std::move(instance));
+        }
+      }
+    }
+  }
+  return true;
+}
+
+Benchmark* RegisterBenchmarkInternal(Benchmark* bench) {
+  std::unique_ptr<Benchmark> bench_ptr(bench);
+  BenchmarkFamilies* families = BenchmarkFamilies::GetInstance();
+  families->AddBenchmark(std::move(bench_ptr));
+  return bench;
+}
+
+// FIXME: This function is a hack so that benchmark.cc can access
+// `BenchmarkFamilies`
+bool FindBenchmarksInternal(const std::string& re,
+                            std::vector<BenchmarkInstance>* benchmarks,
+                            std::ostream* Err) {
+  return BenchmarkFamilies::GetInstance()->FindBenchmarks(re, benchmarks, Err);
+}
+
+//=============================================================================//
+//                               Benchmark
+//=============================================================================//
+
+Benchmark::Benchmark(const char* name)
+    : name_(name),
+      aggregation_report_mode_(ARM_Unspecified),
+      time_unit_(kNanosecond),
+      range_multiplier_(kRangeMultiplier),
+      min_time_(0),
+      iterations_(0),
+      repetitions_(0),
+      measure_process_cpu_time_(false),
+      use_real_time_(false),
+      use_manual_time_(false),
+      complexity_(oNone),
+      complexity_lambda_(nullptr) {
+  ComputeStatistics("mean", StatisticsMean);
+  ComputeStatistics("median", StatisticsMedian);
+  ComputeStatistics("stddev", StatisticsStdDev);
+}
+
+Benchmark::~Benchmark() {}
+
+Benchmark* Benchmark::Arg(int64_t x) {
+  CHECK(ArgsCnt() == -1 || ArgsCnt() == 1);
+  args_.push_back({x});
+  return this;
+}
+
+Benchmark* Benchmark::Unit(TimeUnit unit) {
+  time_unit_ = unit;
+  return this;
+}
+
+Benchmark* Benchmark::Range(int64_t start, int64_t limit) {
+  CHECK(ArgsCnt() == -1 || ArgsCnt() == 1);
+  std::vector<int64_t> arglist;
+  AddRange(&arglist, start, limit, range_multiplier_);
+
+  for (int64_t i : arglist) {
+    args_.push_back({i});
+  }
+  return this;
+}
+
+Benchmark* Benchmark::Ranges(
+    const std::vector<std::pair<int64_t, int64_t>>& ranges) {
+  CHECK(ArgsCnt() == -1 || ArgsCnt() == static_cast<int>(ranges.size()));
+  std::vector<std::vector<int64_t>> arglists(ranges.size());
+  std::size_t total = 1;
+  for (std::size_t i = 0; i < ranges.size(); i++) {
+    AddRange(&arglists[i], ranges[i].first, ranges[i].second,
+             range_multiplier_);
+    total *= arglists[i].size();
+  }
+
+  std::vector<std::size_t> ctr(arglists.size(), 0);
+
+  for (std::size_t i = 0; i < total; i++) {
+    std::vector<int64_t> tmp;
+    tmp.reserve(arglists.size());
+
+    for (std::size_t j = 0; j < arglists.size(); j++) {
+      tmp.push_back(arglists[j].at(ctr[j]));
+    }
+
+    args_.push_back(std::move(tmp));
+
+    for (std::size_t j = 0; j < arglists.size(); j++) {
+      if (ctr[j] + 1 < arglists[j].size()) {
+        ++ctr[j];
+        break;
+      }
+      ctr[j] = 0;
+    }
+  }
+  return this;
+}
+
+Benchmark* Benchmark::ArgName(const std::string& name) {
+  CHECK(ArgsCnt() == -1 || ArgsCnt() == 1);
+  arg_names_ = {name};
+  return this;
+}
+
+Benchmark* Benchmark::ArgNames(const std::vector<std::string>& names) {
+  CHECK(ArgsCnt() == -1 || ArgsCnt() == static_cast<int>(names.size()));
+  arg_names_ = names;
+  return this;
+}
+
+Benchmark* Benchmark::DenseRange(int64_t start, int64_t limit, int step) {
+  CHECK(ArgsCnt() == -1 || ArgsCnt() == 1);
+  CHECK_LE(start, limit);
+  for (int64_t arg = start; arg <= limit; arg += step) {
+    args_.push_back({arg});
+  }
+  return this;
+}
+
+Benchmark* Benchmark::Args(const std::vector<int64_t>& args) {
+  CHECK(ArgsCnt() == -1 || ArgsCnt() == static_cast<int>(args.size()));
+  args_.push_back(args);
+  return this;
+}
+
+Benchmark* Benchmark::Apply(void (*custom_arguments)(Benchmark* benchmark)) {
+  custom_arguments(this);
+  return this;
+}
+
+Benchmark* Benchmark::RangeMultiplier(int multiplier) {
+  CHECK(multiplier > 1);
+  range_multiplier_ = multiplier;
+  return this;
+}
+
+Benchmark* Benchmark::MinTime(double t) {
+  CHECK(t > 0.0);
+  CHECK(iterations_ == 0);
+  min_time_ = t;
+  return this;
+}
+
+Benchmark* Benchmark::Iterations(IterationCount n) {
+  CHECK(n > 0);
+  CHECK(IsZero(min_time_));
+  iterations_ = n;
+  return this;
+}
+
+Benchmark* Benchmark::Repetitions(int n) {
+  CHECK(n > 0);
+  repetitions_ = n;
+  return this;
+}
+
+Benchmark* Benchmark::ReportAggregatesOnly(bool value) {
+  aggregation_report_mode_ = value ? ARM_ReportAggregatesOnly : ARM_Default;
+  return this;
+}
+
+Benchmark* Benchmark::DisplayAggregatesOnly(bool value) {
+  // If we were called, the report mode is no longer 'unspecified', in any case.
+  aggregation_report_mode_ = static_cast<AggregationReportMode>(
+      aggregation_report_mode_ | ARM_Default);
+
+  if (value) {
+    aggregation_report_mode_ = static_cast<AggregationReportMode>(
+        aggregation_report_mode_ | ARM_DisplayReportAggregatesOnly);
+  } else {
+    aggregation_report_mode_ = static_cast<AggregationReportMode>(
+        aggregation_report_mode_ & ~ARM_DisplayReportAggregatesOnly);
+  }
+
+  return this;
+}
+
+Benchmark* Benchmark::MeasureProcessCPUTime() {
+  // Can be used together with UseRealTime() / UseManualTime().
+  measure_process_cpu_time_ = true;
+  return this;
+}
+
+Benchmark* Benchmark::UseRealTime() {
+  CHECK(!use_manual_time_)
+      << "Cannot set UseRealTime and UseManualTime simultaneously.";
+  use_real_time_ = true;
+  return this;
+}
+
+Benchmark* Benchmark::UseManualTime() {
+  CHECK(!use_real_time_)
+      << "Cannot set UseRealTime and UseManualTime simultaneously.";
+  use_manual_time_ = true;
+  return this;
+}
+
+Benchmark* Benchmark::Complexity(BigO complexity) {
+  complexity_ = complexity;
+  return this;
+}
+
+Benchmark* Benchmark::Complexity(BigOFunc* complexity) {
+  complexity_lambda_ = complexity;
+  complexity_ = oLambda;
+  return this;
+}
+
+Benchmark* Benchmark::ComputeStatistics(std::string name,
+                                        StatisticsFunc* statistics) {
+  statistics_.emplace_back(name, statistics);
+  return this;
+}
+
+Benchmark* Benchmark::Threads(int t) {
+  CHECK_GT(t, 0);
+  thread_counts_.push_back(t);
+  return this;
+}
+
+Benchmark* Benchmark::ThreadRange(int min_threads, int max_threads) {
+  CHECK_GT(min_threads, 0);
+  CHECK_GE(max_threads, min_threads);
+
+  AddRange(&thread_counts_, min_threads, max_threads, 2);
+  return this;
+}
+
+Benchmark* Benchmark::DenseThreadRange(int min_threads, int max_threads,
+                                       int stride) {
+  CHECK_GT(min_threads, 0);
+  CHECK_GE(max_threads, min_threads);
+  CHECK_GE(stride, 1);
+
+  for (auto i = min_threads; i < max_threads; i += stride) {
+    thread_counts_.push_back(i);
+  }
+  thread_counts_.push_back(max_threads);
+  return this;
+}
+
+Benchmark* Benchmark::ThreadPerCpu() {
+  thread_counts_.push_back(CPUInfo::Get().num_cpus);
+  return this;
+}
+
+void Benchmark::SetName(const char* name) { name_ = name; }
+
+int Benchmark::ArgsCnt() const {
+  if (args_.empty()) {
+    if (arg_names_.empty()) return -1;
+    return static_cast<int>(arg_names_.size());
+  }
+  return static_cast<int>(args_.front().size());
+}
+
+//=============================================================================//
+//                            FunctionBenchmark
+//=============================================================================//
+
+void FunctionBenchmark::Run(State& st) { func_(st); }
+
+}  // end namespace internal
+
+void ClearRegisteredBenchmarks() {
+  internal::BenchmarkFamilies::GetInstance()->ClearBenchmarks();
+}
+
+}  // end namespace benchmark
diff --git a/src/third_party/google_benchmark/src/benchmark_register.h b/src/third_party/google_benchmark/src/benchmark_register.h
new file mode 100644
index 0000000..61377d7
--- /dev/null
+++ b/src/third_party/google_benchmark/src/benchmark_register.h
@@ -0,0 +1,107 @@
+#ifndef BENCHMARK_REGISTER_H
+#define BENCHMARK_REGISTER_H
+
+#include <vector>
+
+#include "check.h"
+
+namespace benchmark {
+namespace internal {
+
+// Append the powers of 'mult' in the closed interval [lo, hi].
+// Returns iterator to the start of the inserted range.
+template <typename T>
+typename std::vector<T>::iterator
+AddPowers(std::vector<T>* dst, T lo, T hi, int mult) {
+  CHECK_GE(lo, 0);
+  CHECK_GE(hi, lo);
+  CHECK_GE(mult, 2);
+
+  const size_t start_offset = dst->size();
+
+  static const T kmax = std::numeric_limits<T>::max();
+
+  // Space out the values in multiples of "mult"
+  for (T i = 1; i <= hi; i *= mult) {
+    if (i >= lo) {
+      dst->push_back(i);
+    }
+    // Break the loop here since multiplying by
+    // 'mult' would move outside of the range of T
+    if (i > kmax / mult) break;
+  }
+
+  return dst->begin() + start_offset;
+}
+
+template <typename T>
+void AddNegatedPowers(std::vector<T>* dst, T lo, T hi, int mult) {
+  // We negate lo and hi so we require that they cannot be equal to 'min'.
+  CHECK_GT(lo, std::numeric_limits<T>::min());
+  CHECK_GT(hi, std::numeric_limits<T>::min());
+  CHECK_GE(hi, lo);
+  CHECK_LE(hi, 0);
+
+  // Add positive powers, then negate and reverse.
+  // Casts necessary since small integers get promoted
+  // to 'int' when negating.
+  const auto lo_complement = static_cast<T>(-lo);
+  const auto hi_complement = static_cast<T>(-hi);
+
+  const auto it = AddPowers(dst, hi_complement, lo_complement, mult);
+
+  std::for_each(it, dst->end(), [](T& t) { t *= -1; });
+  std::reverse(it, dst->end());
+}
+
+template <typename T>
+void AddRange(std::vector<T>* dst, T lo, T hi, int mult) {
+  static_assert(std::is_integral<T>::value && std::is_signed<T>::value,
+                "Args type must be a signed integer");
+
+  CHECK_GE(hi, lo);
+  CHECK_GE(mult, 2);
+
+  // Add "lo"
+  dst->push_back(lo);
+
+  // Handle lo == hi as a special case, so we then know
+  // lo < hi and so it is safe to add 1 to lo and subtract 1
+  // from hi without falling outside of the range of T.
+  if (lo == hi) return;
+
+  // Ensure that lo_inner <= hi_inner below.
+  if (lo + 1 == hi) {
+    dst->push_back(hi);
+    return;
+  }
+
+  // Add all powers of 'mult' in the range [lo+1, hi-1] (inclusive).
+  const auto lo_inner = static_cast<T>(lo + 1);
+  const auto hi_inner = static_cast<T>(hi - 1);
+
+  // Insert negative values
+  if (lo_inner < 0) {
+    AddNegatedPowers(dst, lo_inner, std::min(hi_inner, T{-1}), mult);
+  }
+
+  // Treat 0 as a special case (see discussion on #762).
+  if (lo <= 0 && hi >= 0) {
+    dst->push_back(0);
+  }
+
+  // Insert positive values
+  if (hi_inner > 0) {
+    AddPowers(dst, std::max(lo_inner, T{1}), hi_inner, mult);
+  }
+
+  // Add "hi" (if different from last value).
+  if (hi != dst->back()) {
+    dst->push_back(hi);
+  }
+}
+
+}  // namespace internal
+}  // namespace benchmark
+
+#endif  // BENCHMARK_REGISTER_H
diff --git a/src/third_party/google_benchmark/src/benchmark_runner.cc b/src/third_party/google_benchmark/src/benchmark_runner.cc
new file mode 100644
index 0000000..337fac1
--- /dev/null
+++ b/src/third_party/google_benchmark/src/benchmark_runner.cc
@@ -0,0 +1,361 @@
+// Copyright 2015 Google Inc. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "benchmark_runner.h"
+#include "benchmark/benchmark.h"
+#include "benchmark_api_internal.h"
+#include "internal_macros.h"
+
+#ifndef BENCHMARK_OS_WINDOWS
+#ifndef BENCHMARK_OS_FUCHSIA
+#include <sys/resource.h>
+#endif
+#include <sys/time.h>
+#include <unistd.h>
+#endif
+
+#include <algorithm>
+#include <atomic>
+#include <condition_variable>
+#include <cstdio>
+#include <cstdlib>
+#include <fstream>
+#include <iostream>
+#include <memory>
+#include <string>
+#include <thread>
+#include <utility>
+
+#include "check.h"
+#include "colorprint.h"
+#include "commandlineflags.h"
+#include "complexity.h"
+#include "counter.h"
+#include "internal_macros.h"
+#include "log.h"
+#include "mutex.h"
+#include "re.h"
+#include "statistics.h"
+#include "string_util.h"
+#include "thread_manager.h"
+#include "thread_timer.h"
+
+namespace benchmark {
+
+namespace internal {
+
+MemoryManager* memory_manager = nullptr;
+
+namespace {
+
+static constexpr IterationCount kMaxIterations = 1000000000;
+
+BenchmarkReporter::Run CreateRunReport(
+    const benchmark::internal::BenchmarkInstance& b,
+    const internal::ThreadManager::Result& results,
+    IterationCount memory_iterations,
+    const MemoryManager::Result& memory_result, double seconds,
+    int64_t repetition_index) {
+  // Create report about this benchmark run.
+  BenchmarkReporter::Run report;
+
+  report.run_name = b.name;
+  report.error_occurred = results.has_error_;
+  report.error_message = results.error_message_;
+  report.report_label = results.report_label_;
+  // This is the total iterations across all threads.
+  report.iterations = results.iterations;
+  report.time_unit = b.time_unit;
+  report.threads = b.threads;
+  report.repetition_index = repetition_index;
+  report.repetitions = b.repetitions;
+
+  if (!report.error_occurred) {
+    if (b.use_manual_time) {
+      report.real_accumulated_time = results.manual_time_used;
+    } else {
+      report.real_accumulated_time = results.real_time_used;
+    }
+    report.cpu_accumulated_time = results.cpu_time_used;
+    report.complexity_n = results.complexity_n;
+    report.complexity = b.complexity;
+    report.complexity_lambda = b.complexity_lambda;
+    report.statistics = b.statistics;
+    report.counters = results.counters;
+
+    if (memory_iterations > 0) {
+      report.has_memory_result = true;
+      report.allocs_per_iter =
+          memory_iterations ? static_cast<double>(memory_result.num_allocs) /
+                                  memory_iterations
+                            : 0;
+      report.max_bytes_used = memory_result.max_bytes_used;
+    }
+
+    internal::Finish(&report.counters, results.iterations, seconds, b.threads);
+  }
+  return report;
+}
+
+// Execute one thread of benchmark b for the specified number of iterations.
+// Adds the stats collected for the thread into *total.
+void RunInThread(const BenchmarkInstance* b, IterationCount iters,
+                 int thread_id, ThreadManager* manager) {
+  internal::ThreadTimer timer(
+      b->measure_process_cpu_time
+          ? internal::ThreadTimer::CreateProcessCpuTime()
+          : internal::ThreadTimer::Create());
+  State st = b->Run(iters, thread_id, &timer, manager);
+  CHECK(st.iterations() >= st.max_iterations)
+      << "Benchmark returned before State::KeepRunning() returned false!";
+  {
+    MutexLock l(manager->GetBenchmarkMutex());
+    internal::ThreadManager::Result& results = manager->results;
+    results.iterations += st.iterations();
+    results.cpu_time_used += timer.cpu_time_used();
+    results.real_time_used += timer.real_time_used();
+    results.manual_time_used += timer.manual_time_used();
+    results.complexity_n += st.complexity_length_n();
+    internal::Increment(&results.counters, st.counters);
+  }
+  manager->NotifyThreadComplete();
+}
+
+class BenchmarkRunner {
+ public:
+  BenchmarkRunner(const benchmark::internal::BenchmarkInstance& b_,
+                  std::vector<BenchmarkReporter::Run>* complexity_reports_)
+      : b(b_),
+        complexity_reports(*complexity_reports_),
+        min_time(!IsZero(b.min_time) ? b.min_time : FLAGS_benchmark_min_time),
+        repeats(b.repetitions != 0 ? b.repetitions
+                                   : FLAGS_benchmark_repetitions),
+        has_explicit_iteration_count(b.iterations != 0),
+        pool(b.threads - 1),
+        iters(has_explicit_iteration_count ? b.iterations : 1) {
+    run_results.display_report_aggregates_only =
+        (FLAGS_benchmark_report_aggregates_only ||
+         FLAGS_benchmark_display_aggregates_only);
+    run_results.file_report_aggregates_only =
+        FLAGS_benchmark_report_aggregates_only;
+    if (b.aggregation_report_mode != internal::ARM_Unspecified) {
+      run_results.display_report_aggregates_only =
+          (b.aggregation_report_mode &
+           internal::ARM_DisplayReportAggregatesOnly);
+      run_results.file_report_aggregates_only =
+          (b.aggregation_report_mode & internal::ARM_FileReportAggregatesOnly);
+    }
+
+    for (int repetition_num = 0; repetition_num < repeats; repetition_num++) {
+      DoOneRepetition(repetition_num);
+    }
+
+    // Calculate additional statistics
+    run_results.aggregates_only = ComputeStats(run_results.non_aggregates);
+
+    // Maybe calculate complexity report
+    if ((b.complexity != oNone) && b.last_benchmark_instance) {
+      auto additional_run_stats = ComputeBigO(complexity_reports);
+      run_results.aggregates_only.insert(run_results.aggregates_only.end(),
+                                         additional_run_stats.begin(),
+                                         additional_run_stats.end());
+      complexity_reports.clear();
+    }
+  }
+
+  RunResults&& get_results() { return std::move(run_results); }
+
+ private:
+  RunResults run_results;
+
+  const benchmark::internal::BenchmarkInstance& b;
+  std::vector<BenchmarkReporter::Run>& complexity_reports;
+
+  const double min_time;
+  const int repeats;
+  const bool has_explicit_iteration_count;
+
+  std::vector<std::thread> pool;
+
+  IterationCount iters;  // preserved between repetitions!
+  // So only the first repetition has to find/calculate it,
+  // the other repetitions will just use that precomputed iteration count.
+
+  struct IterationResults {
+    internal::ThreadManager::Result results;
+    IterationCount iters;
+    double seconds;
+  };
+  IterationResults DoNIterations() {
+    VLOG(2) << "Running " << b.name.str() << " for " << iters << "\n";
+
+    std::unique_ptr<internal::ThreadManager> manager;
+    manager.reset(new internal::ThreadManager(b.threads));
+
+    // Run all but one thread in separate threads
+    for (std::size_t ti = 0; ti < pool.size(); ++ti) {
+      pool[ti] = std::thread(&RunInThread, &b, iters, static_cast<int>(ti + 1),
+                             manager.get());
+    }
+    // And run one thread here directly.
+    // (If we were asked to run just one thread, we don't create new threads.)
+    // Yes, we need to do this here *after* we start the separate threads.
+    RunInThread(&b, iters, 0, manager.get());
+
+    // The main thread has finished. Now let's wait for the other threads.
+    manager->WaitForAllThreads();
+    for (std::thread& thread : pool) thread.join();
+
+    IterationResults i;
+    // Acquire the measurements/counters from the manager, UNDER THE LOCK!
+    {
+      MutexLock l(manager->GetBenchmarkMutex());
+      i.results = manager->results;
+    }
+
+    // And get rid of the manager.
+    manager.reset();
+
+    // Adjust real/manual time stats since they were reported per thread.
+    i.results.real_time_used /= b.threads;
+    i.results.manual_time_used /= b.threads;
+    // If we were measuring whole-process CPU usage, adjust the CPU time too.
+    if (b.measure_process_cpu_time) i.results.cpu_time_used /= b.threads;
+
+    VLOG(2) << "Ran in " << i.results.cpu_time_used << "/"
+            << i.results.real_time_used << "\n";
+
+    // So for how long were we running?
+    i.iters = iters;
+    // Base decisions off of real time if requested by this benchmark.
+    i.seconds = i.results.cpu_time_used;
+    if (b.use_manual_time) {
+      i.seconds = i.results.manual_time_used;
+    } else if (b.use_real_time) {
+      i.seconds = i.results.real_time_used;
+    }
+
+    return i;
+  }
+
+  IterationCount PredictNumItersNeeded(const IterationResults& i) const {
+    // See how much iterations should be increased by.
+    // Note: Avoid division by zero with max(seconds, 1ns).
+    double multiplier = min_time * 1.4 / std::max(i.seconds, 1e-9);
+    // If our last run was at least 10% of FLAGS_benchmark_min_time then we
+    // use the multiplier directly.
+    // Otherwise we use at most 10 times expansion.
+    // NOTE: When the last run was at least 10% of the min time the max
+    // expansion should be 14x.
+    bool is_significant = (i.seconds / min_time) > 0.1;
+    multiplier = is_significant ? multiplier : std::min(10.0, multiplier);
+    if (multiplier <= 1.0) multiplier = 2.0;
+
+    // So what seems to be the sufficiently-large iteration count? Round up.
+    const IterationCount max_next_iters =
+        std::lround(std::max(multiplier * i.iters, i.iters + 1.0));
+    // But we do have *some* sanity limits though..
+    const IterationCount next_iters = std::min(max_next_iters, kMaxIterations);
+
+    VLOG(3) << "Next iters: " << next_iters << ", " << multiplier << "\n";
+    return next_iters;  // round up before conversion to integer.
+  }
+
+  bool ShouldReportIterationResults(const IterationResults& i) const {
+    // Determine if this run should be reported;
+    // Either it has run for a sufficient amount of time
+    // or because an error was reported.
+    return i.results.has_error_ ||
+           i.iters >= kMaxIterations ||  // Too many iterations already.
+           i.seconds >= min_time ||      // The elapsed time is large enough.
+           // CPU time is specified but the elapsed real time greatly exceeds
+           // the minimum time.
+           // Note that user provided timers are except from this sanity check.
+           ((i.results.real_time_used >= 5 * min_time) && !b.use_manual_time);
+  }
+
+  void DoOneRepetition(int64_t repetition_index) {
+    const bool is_the_first_repetition = repetition_index == 0;
+    IterationResults i;
+
+    // We *may* be gradually increasing the length (iteration count)
+    // of the benchmark until we decide the results are significant.
+    // And once we do, we report those last results and exit.
+    // Please do note that the if there are repetitions, the iteration count
+    // is *only* calculated for the *first* repetition, and other repetitions
+    // simply use that precomputed iteration count.
+    for (;;) {
+      i = DoNIterations();
+
+      // Do we consider the results to be significant?
+      // If we are doing repetitions, and the first repetition was already done,
+      // it has calculated the correct iteration time, so we have run that very
+      // iteration count just now. No need to calculate anything. Just report.
+      // Else, the normal rules apply.
+      const bool results_are_significant = !is_the_first_repetition ||
+                                           has_explicit_iteration_count ||
+                                           ShouldReportIterationResults(i);
+
+      if (results_are_significant) break;  // Good, let's report them!
+
+      // Nope, bad iteration. Let's re-estimate the hopefully-sufficient
+      // iteration count, and run the benchmark again...
+
+      iters = PredictNumItersNeeded(i);
+      assert(iters > i.iters &&
+             "if we did more iterations than we want to do the next time, "
+             "then we should have accepted the current iteration run.");
+    }
+
+    // Oh, one last thing, we need to also produce the 'memory measurements'..
+    MemoryManager::Result memory_result;
+    IterationCount memory_iterations = 0;
+    if (memory_manager != nullptr) {
+      // Only run a few iterations to reduce the impact of one-time
+      // allocations in benchmarks that are not properly managed.
+      memory_iterations = std::min<IterationCount>(16, iters);
+      memory_manager->Start();
+      std::unique_ptr<internal::ThreadManager> manager;
+      manager.reset(new internal::ThreadManager(1));
+      RunInThread(&b, memory_iterations, 0, manager.get());
+      manager->WaitForAllThreads();
+      manager.reset();
+
+      memory_manager->Stop(&memory_result);
+    }
+
+    // Ok, now actualy report.
+    BenchmarkReporter::Run report =
+        CreateRunReport(b, i.results, memory_iterations, memory_result,
+                        i.seconds, repetition_index);
+
+    if (!report.error_occurred && b.complexity != oNone)
+      complexity_reports.push_back(report);
+
+    run_results.non_aggregates.push_back(report);
+  }
+};
+
+}  // end namespace
+
+RunResults RunBenchmark(
+    const benchmark::internal::BenchmarkInstance& b,
+    std::vector<BenchmarkReporter::Run>* complexity_reports) {
+  internal::BenchmarkRunner r(b, complexity_reports);
+  return r.get_results();
+}
+
+}  // end namespace internal
+
+}  // end namespace benchmark
diff --git a/src/third_party/google_benchmark/src/benchmark_runner.h b/src/third_party/google_benchmark/src/benchmark_runner.h
new file mode 100644
index 0000000..96e8282
--- /dev/null
+++ b/src/third_party/google_benchmark/src/benchmark_runner.h
@@ -0,0 +1,51 @@
+// Copyright 2015 Google Inc. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#ifndef BENCHMARK_RUNNER_H_
+#define BENCHMARK_RUNNER_H_
+
+#include "benchmark_api_internal.h"
+#include "internal_macros.h"
+
+DECLARE_double(benchmark_min_time);
+
+DECLARE_int32(benchmark_repetitions);
+
+DECLARE_bool(benchmark_report_aggregates_only);
+
+DECLARE_bool(benchmark_display_aggregates_only);
+
+namespace benchmark {
+
+namespace internal {
+
+extern MemoryManager* memory_manager;
+
+struct RunResults {
+  std::vector<BenchmarkReporter::Run> non_aggregates;
+  std::vector<BenchmarkReporter::Run> aggregates_only;
+
+  bool display_report_aggregates_only = false;
+  bool file_report_aggregates_only = false;
+};
+
+RunResults RunBenchmark(
+    const benchmark::internal::BenchmarkInstance& b,
+    std::vector<BenchmarkReporter::Run>* complexity_reports);
+
+}  // namespace internal
+
+}  // end namespace benchmark
+
+#endif  // BENCHMARK_RUNNER_H_
diff --git a/src/third_party/google_benchmark/src/check.h b/src/third_party/google_benchmark/src/check.h
new file mode 100644
index 0000000..f5f8253
--- /dev/null
+++ b/src/third_party/google_benchmark/src/check.h
@@ -0,0 +1,82 @@
+#ifndef CHECK_H_
+#define CHECK_H_
+
+#include <cmath>
+#include <cstdlib>
+#include <ostream>
+
+#include "internal_macros.h"
+#include "log.h"
+
+namespace benchmark {
+namespace internal {
+
+typedef void(AbortHandlerT)();
+
+inline AbortHandlerT*& GetAbortHandler() {
+  static AbortHandlerT* handler = &std::abort;
+  return handler;
+}
+
+BENCHMARK_NORETURN inline void CallAbortHandler() {
+  GetAbortHandler()();
+  std::abort();  // fallback to enforce noreturn
+}
+
+// CheckHandler is the class constructed by failing CHECK macros. CheckHandler
+// will log information about the failures and abort when it is destructed.
+class CheckHandler {
+ public:
+  CheckHandler(const char* check, const char* file, const char* func, int line)
+      : log_(GetErrorLogInstance()) {
+    log_ << file << ":" << line << ": " << func << ": Check `" << check
+         << "' failed. ";
+  }
+
+  LogType& GetLog() { return log_; }
+
+  BENCHMARK_NORETURN ~CheckHandler() BENCHMARK_NOEXCEPT_OP(false) {
+    log_ << std::endl;
+    CallAbortHandler();
+  }
+
+  CheckHandler& operator=(const CheckHandler&) = delete;
+  CheckHandler(const CheckHandler&) = delete;
+  CheckHandler() = delete;
+
+ private:
+  LogType& log_;
+};
+
+}  // end namespace internal
+}  // end namespace benchmark
+
+// The CHECK macro returns a std::ostream object that can have extra information
+// written to it.
+#ifndef NDEBUG
+#define CHECK(b)                                                             \
+  (b ? ::benchmark::internal::GetNullLogInstance()                           \
+     : ::benchmark::internal::CheckHandler(#b, __FILE__, __func__, __LINE__) \
+           .GetLog())
+#else
+#define CHECK(b) ::benchmark::internal::GetNullLogInstance()
+#endif
+
+// clang-format off
+// preserve whitespacing between operators for alignment
+#define CHECK_EQ(a, b) CHECK((a) == (b))
+#define CHECK_NE(a, b) CHECK((a) != (b))
+#define CHECK_GE(a, b) CHECK((a) >= (b))
+#define CHECK_LE(a, b) CHECK((a) <= (b))
+#define CHECK_GT(a, b) CHECK((a) > (b))
+#define CHECK_LT(a, b) CHECK((a) < (b))
+
+#define CHECK_FLOAT_EQ(a, b, eps) CHECK(std::fabs((a) - (b)) <  (eps))
+#define CHECK_FLOAT_NE(a, b, eps) CHECK(std::fabs((a) - (b)) >= (eps))
+#define CHECK_FLOAT_GE(a, b, eps) CHECK((a) - (b) > -(eps))
+#define CHECK_FLOAT_LE(a, b, eps) CHECK((b) - (a) > -(eps))
+#define CHECK_FLOAT_GT(a, b, eps) CHECK((a) - (b) >  (eps))
+#define CHECK_FLOAT_LT(a, b, eps) CHECK((b) - (a) >  (eps))
+//clang-format on
+
+#endif  // CHECK_H_
diff --git a/src/third_party/google_benchmark/src/colorprint.cc b/src/third_party/google_benchmark/src/colorprint.cc
new file mode 100644
index 0000000..fff6a98
--- /dev/null
+++ b/src/third_party/google_benchmark/src/colorprint.cc
@@ -0,0 +1,188 @@
+// Copyright 2015 Google Inc. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "colorprint.h"
+
+#include <cstdarg>
+#include <cstdio>
+#include <cstdlib>
+#include <cstring>
+#include <memory>
+#include <string>
+
+#include "check.h"
+#include "internal_macros.h"
+
+#ifdef BENCHMARK_OS_WINDOWS
+#include <windows.h>
+#include <io.h>
+#else
+#include <unistd.h>
+#endif  // BENCHMARK_OS_WINDOWS
+
+namespace benchmark {
+namespace {
+#ifdef BENCHMARK_OS_WINDOWS
+typedef WORD PlatformColorCode;
+#else
+typedef const char* PlatformColorCode;
+#endif
+
+PlatformColorCode GetPlatformColorCode(LogColor color) {
+#ifdef BENCHMARK_OS_WINDOWS
+  switch (color) {
+    case COLOR_RED:
+      return FOREGROUND_RED;
+    case COLOR_GREEN:
+      return FOREGROUND_GREEN;
+    case COLOR_YELLOW:
+      return FOREGROUND_RED | FOREGROUND_GREEN;
+    case COLOR_BLUE:
+      return FOREGROUND_BLUE;
+    case COLOR_MAGENTA:
+      return FOREGROUND_BLUE | FOREGROUND_RED;
+    case COLOR_CYAN:
+      return FOREGROUND_BLUE | FOREGROUND_GREEN;
+    case COLOR_WHITE:  // fall through to default
+    default:
+      return 0;
+  }
+#else
+  switch (color) {
+    case COLOR_RED:
+      return "1";
+    case COLOR_GREEN:
+      return "2";
+    case COLOR_YELLOW:
+      return "3";
+    case COLOR_BLUE:
+      return "4";
+    case COLOR_MAGENTA:
+      return "5";
+    case COLOR_CYAN:
+      return "6";
+    case COLOR_WHITE:
+      return "7";
+    default:
+      return nullptr;
+  };
+#endif
+}
+
+}  // end namespace
+
+std::string FormatString(const char* msg, va_list args) {
+  // we might need a second shot at this, so pre-emptivly make a copy
+  va_list args_cp;
+  va_copy(args_cp, args);
+
+  std::size_t size = 256;
+  char local_buff[256];
+  auto ret = vsnprintf(local_buff, size, msg, args_cp);
+
+  va_end(args_cp);
+
+  // currently there is no error handling for failure, so this is hack.
+  CHECK(ret >= 0);
+
+  if (ret == 0)  // handle empty expansion
+    return {};
+  else if (static_cast<size_t>(ret) < size)
+    return local_buff;
+  else {
+    // we did not provide a long enough buffer on our first attempt.
+    size = (size_t)ret + 1;  // + 1 for the null byte
+    std::unique_ptr<char[]> buff(new char[size]);
+    ret = vsnprintf(buff.get(), size, msg, args);
+    CHECK(ret > 0 && ((size_t)ret) < size);
+    return buff.get();
+  }
+}
+
+std::string FormatString(const char* msg, ...) {
+  va_list args;
+  va_start(args, msg);
+  auto tmp = FormatString(msg, args);
+  va_end(args);
+  return tmp;
+}
+
+void ColorPrintf(std::ostream& out, LogColor color, const char* fmt, ...) {
+  va_list args;
+  va_start(args, fmt);
+  ColorPrintf(out, color, fmt, args);
+  va_end(args);
+}
+
+void ColorPrintf(std::ostream& out, LogColor color, const char* fmt,
+                 va_list args) {
+#ifdef BENCHMARK_OS_WINDOWS
+  ((void)out);  // suppress unused warning
+
+  const HANDLE stdout_handle = GetStdHandle(STD_OUTPUT_HANDLE);
+
+  // Gets the current text color.
+  CONSOLE_SCREEN_BUFFER_INFO buffer_info;
+  GetConsoleScreenBufferInfo(stdout_handle, &buffer_info);
+  const WORD old_color_attrs = buffer_info.wAttributes;
+
+  // We need to flush the stream buffers into the console before each
+  // SetConsoleTextAttribute call lest it affect the text that is already
+  // printed but has not yet reached the console.
+  fflush(stdout);
+  SetConsoleTextAttribute(stdout_handle,
+                          GetPlatformColorCode(color) | FOREGROUND_INTENSITY);
+  vprintf(fmt, args);
+
+  fflush(stdout);
+  // Restores the text color.
+  SetConsoleTextAttribute(stdout_handle, old_color_attrs);
+#else
+  const char* color_code = GetPlatformColorCode(color);
+  if (color_code) out << FormatString("\033[0;3%sm", color_code);
+  out << FormatString(fmt, args) << "\033[m";
+#endif
+}
+
+bool IsColorTerminal() {
+#if BENCHMARK_OS_WINDOWS
+  // On Windows the TERM variable is usually not set, but the
+  // console there does support colors.
+  return 0 != _isatty(_fileno(stdout));
+#else
+  // On non-Windows platforms, we rely on the TERM variable. This list of
+  // supported TERM values is copied from Google Test:
+  // <https://github.com/google/googletest/blob/master/googletest/src/gtest.cc#L2925>.
+  const char* const SUPPORTED_TERM_VALUES[] = {
+      "xterm",         "xterm-color",     "xterm-256color",
+      "screen",        "screen-256color", "tmux",
+      "tmux-256color", "rxvt-unicode",    "rxvt-unicode-256color",
+      "linux",         "cygwin",
+  };
+
+  const char* const term = getenv("TERM");
+
+  bool term_supports_color = false;
+  for (const char* candidate : SUPPORTED_TERM_VALUES) {
+    if (term && 0 == strcmp(term, candidate)) {
+      term_supports_color = true;
+      break;
+    }
+  }
+
+  return 0 != isatty(fileno(stdout)) && term_supports_color;
+#endif  // BENCHMARK_OS_WINDOWS
+}
+
+}  // end namespace benchmark
diff --git a/src/third_party/google_benchmark/src/colorprint.h b/src/third_party/google_benchmark/src/colorprint.h
new file mode 100644
index 0000000..9f6fab9
--- /dev/null
+++ b/src/third_party/google_benchmark/src/colorprint.h
@@ -0,0 +1,33 @@
+#ifndef BENCHMARK_COLORPRINT_H_
+#define BENCHMARK_COLORPRINT_H_
+
+#include <cstdarg>
+#include <iostream>
+#include <string>
+
+namespace benchmark {
+enum LogColor {
+  COLOR_DEFAULT,
+  COLOR_RED,
+  COLOR_GREEN,
+  COLOR_YELLOW,
+  COLOR_BLUE,
+  COLOR_MAGENTA,
+  COLOR_CYAN,
+  COLOR_WHITE
+};
+
+std::string FormatString(const char* msg, va_list args);
+std::string FormatString(const char* msg, ...);
+
+void ColorPrintf(std::ostream& out, LogColor color, const char* fmt,
+                 va_list args);
+void ColorPrintf(std::ostream& out, LogColor color, const char* fmt, ...);
+
+// Returns true if stdout appears to be a terminal that supports colored
+// output, false otherwise.
+bool IsColorTerminal();
+
+}  // end namespace benchmark
+
+#endif  // BENCHMARK_COLORPRINT_H_
diff --git a/src/third_party/google_benchmark/src/colorprint_starboard.cc b/src/third_party/google_benchmark/src/colorprint_starboard.cc
new file mode 100644
index 0000000..d68d5a7
--- /dev/null
+++ b/src/third_party/google_benchmark/src/colorprint_starboard.cc
@@ -0,0 +1,62 @@
+// Copyright 2019 The Cobalt Authors. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "colorprint.h"
+
+#include <vector>
+
+#include "starboard/string.h"
+
+namespace benchmark {
+
+std::string FormatString(const char* msg, va_list args) {
+  va_list args_copy;
+  va_copy(args_copy, args);
+
+  int expected_size = ::SbStringFormat(NULL, 0, msg, args_copy);
+
+  va_end(args_copy);
+
+  if (expected_size <= 0) {
+    return std::string();
+  }
+
+  std::vector<char> buffer(expected_size + 1);
+  ::SbStringFormat(buffer.data(), buffer.size(), msg, args);
+  return std::string(buffer.data(), expected_size);
+}
+
+std::string FormatString(const char* msg, ...) {
+  va_list args;
+  va_start(args, msg);
+  auto tmp = FormatString(msg, args);
+  va_end(args);
+  return tmp;
+}
+
+void ColorPrintf(std::ostream& out, LogColor color, const char* fmt, ...) {
+  va_list args;
+  va_start(args, fmt);
+  ColorPrintf(out, color, fmt, args);
+  va_end(args);
+}
+
+void ColorPrintf(std::ostream& out, LogColor color, const char* fmt,
+                 va_list args) {
+  out << FormatString(fmt, args);
+}
+
+bool IsColorTerminal() { return false; }
+
+}  // end namespace benchmark
diff --git a/src/third_party/google_benchmark/src/commandlineflags.cc b/src/third_party/google_benchmark/src/commandlineflags.cc
new file mode 100644
index 0000000..4e60f0b
--- /dev/null
+++ b/src/third_party/google_benchmark/src/commandlineflags.cc
@@ -0,0 +1,228 @@
+// Copyright 2015 Google Inc. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "commandlineflags.h"
+
+#include <algorithm>
+#include <cctype>
+#include <cstdlib>
+#include <cstring>
+#include <iostream>
+#include <limits>
+
+namespace benchmark {
+namespace {
+
+// Parses 'str' for a 32-bit signed integer.  If successful, writes
+// the result to *value and returns true; otherwise leaves *value
+// unchanged and returns false.
+bool ParseInt32(const std::string& src_text, const char* str, int32_t* value) {
+  // Parses the environment variable as a decimal integer.
+  char* end = nullptr;
+  const long long_value = strtol(str, &end, 10);  // NOLINT
+
+  // Has strtol() consumed all characters in the string?
+  if (*end != '\0') {
+    // No - an invalid character was encountered.
+    std::cerr << src_text << " is expected to be a 32-bit integer, "
+              << "but actually has value \"" << str << "\".\n";
+    return false;
+  }
+
+  // Is the parsed value in the range of an Int32?
+  const int32_t result = static_cast<int32_t>(long_value);
+  if (long_value == std::numeric_limits<long>::max() ||
+      long_value == std::numeric_limits<long>::min() ||
+      // The parsed value overflows as a long.  (strtol() returns
+      // LONG_MAX or LONG_MIN when the input overflows.)
+      result != long_value
+      // The parsed value overflows as an Int32.
+  ) {
+    std::cerr << src_text << " is expected to be a 32-bit integer, "
+              << "but actually has value \"" << str << "\", "
+              << "which overflows.\n";
+    return false;
+  }
+
+  *value = result;
+  return true;
+}
+
+// Parses 'str' for a double.  If successful, writes the result to *value and
+// returns true; otherwise leaves *value unchanged and returns false.
+bool ParseDouble(const std::string& src_text, const char* str, double* value) {
+  // Parses the environment variable as a decimal integer.
+  char* end = nullptr;
+  const double double_value = strtod(str, &end);  // NOLINT
+
+  // Has strtol() consumed all characters in the string?
+  if (*end != '\0') {
+    // No - an invalid character was encountered.
+    std::cerr << src_text << " is expected to be a double, "
+              << "but actually has value \"" << str << "\".\n";
+    return false;
+  }
+
+  *value = double_value;
+  return true;
+}
+
+// Returns the name of the environment variable corresponding to the
+// given flag.  For example, FlagToEnvVar("foo") will return
+// "BENCHMARK_FOO" in the open-source version.
+static std::string FlagToEnvVar(const char* flag) {
+  const std::string flag_str(flag);
+
+  std::string env_var;
+  for (size_t i = 0; i != flag_str.length(); ++i)
+    env_var += static_cast<char>(::toupper(flag_str.c_str()[i]));
+
+  return "BENCHMARK_" + env_var;
+}
+
+}  // namespace
+
+bool BoolFromEnv(const char* flag, bool default_val) {
+  const std::string env_var = FlagToEnvVar(flag);
+  const char* const value_str = getenv(env_var.c_str());
+  return value_str == nullptr ? default_val : IsTruthyFlagValue(value_str);
+}
+
+int32_t Int32FromEnv(const char* flag, int32_t default_val) {
+  const std::string env_var = FlagToEnvVar(flag);
+  const char* const value_str = getenv(env_var.c_str());
+  int32_t value = default_val;
+  if (value_str == nullptr ||
+      !ParseInt32(std::string("Environment variable ") + env_var, value_str,
+                  &value)) {
+    return default_val;
+  }
+  return value;
+}
+
+double DoubleFromEnv(const char* flag, double default_val) {
+  const std::string env_var = FlagToEnvVar(flag);
+  const char* const value_str = getenv(env_var.c_str());
+  double value = default_val;
+  if (value_str == nullptr ||
+      !ParseDouble(std::string("Environment variable ") + env_var, value_str,
+                   &value)) {
+    return default_val;
+  }
+  return value;
+}
+
+const char* StringFromEnv(const char* flag, const char* default_val) {
+  const std::string env_var = FlagToEnvVar(flag);
+  const char* const value = getenv(env_var.c_str());
+  return value == nullptr ? default_val : value;
+}
+
+// Parses a string as a command line flag.  The string should have
+// the format "--flag=value".  When def_optional is true, the "=value"
+// part can be omitted.
+//
+// Returns the value of the flag, or nullptr if the parsing failed.
+const char* ParseFlagValue(const char* str, const char* flag,
+                           bool def_optional) {
+  // str and flag must not be nullptr.
+  if (str == nullptr || flag == nullptr) return nullptr;
+
+  // The flag must start with "--".
+  const std::string flag_str = std::string("--") + std::string(flag);
+  const size_t flag_len = flag_str.length();
+  if (strncmp(str, flag_str.c_str(), flag_len) != 0) return nullptr;
+
+  // Skips the flag name.
+  const char* flag_end = str + flag_len;
+
+  // When def_optional is true, it's OK to not have a "=value" part.
+  if (def_optional && (flag_end[0] == '\0')) return flag_end;
+
+  // If def_optional is true and there are more characters after the
+  // flag name, or if def_optional is false, there must be a '=' after
+  // the flag name.
+  if (flag_end[0] != '=') return nullptr;
+
+  // Returns the string after "=".
+  return flag_end + 1;
+}
+
+bool ParseBoolFlag(const char* str, const char* flag, bool* value) {
+  // Gets the value of the flag as a string.
+  const char* const value_str = ParseFlagValue(str, flag, true);
+
+  // Aborts if the parsing failed.
+  if (value_str == nullptr) return false;
+
+  // Converts the string value to a bool.
+  *value = IsTruthyFlagValue(value_str);
+  return true;
+}
+
+bool ParseInt32Flag(const char* str, const char* flag, int32_t* value) {
+  // Gets the value of the flag as a string.
+  const char* const value_str = ParseFlagValue(str, flag, false);
+
+  // Aborts if the parsing failed.
+  if (value_str == nullptr) return false;
+
+  // Sets *value to the value of the flag.
+  return ParseInt32(std::string("The value of flag --") + flag, value_str,
+                    value);
+}
+
+bool ParseDoubleFlag(const char* str, const char* flag, double* value) {
+  // Gets the value of the flag as a string.
+  const char* const value_str = ParseFlagValue(str, flag, false);
+
+  // Aborts if the parsing failed.
+  if (value_str == nullptr) return false;
+
+  // Sets *value to the value of the flag.
+  return ParseDouble(std::string("The value of flag --") + flag, value_str,
+                     value);
+}
+
+bool ParseStringFlag(const char* str, const char* flag, std::string* value) {
+  // Gets the value of the flag as a string.
+  const char* const value_str = ParseFlagValue(str, flag, false);
+
+  // Aborts if the parsing failed.
+  if (value_str == nullptr) return false;
+
+  *value = value_str;
+  return true;
+}
+
+bool IsFlag(const char* str, const char* flag) {
+  return (ParseFlagValue(str, flag, true) != nullptr);
+}
+
+bool IsTruthyFlagValue(const std::string& value) {
+  if (value.size() == 1) {
+    char v = value[0];
+    return isalnum(v) &&
+           !(v == '0' || v == 'f' || v == 'F' || v == 'n' || v == 'N');
+  } else if (!value.empty()) {
+    std::string value_lower(value);
+    std::transform(value_lower.begin(), value_lower.end(),
+                   value_lower.begin(), ::tolower);
+    return !(value_lower == "false" || value_lower == "no" ||
+             value_lower == "off");
+  } else
+    return true;
+}
+
+}  // end namespace benchmark
diff --git a/src/third_party/google_benchmark/src/commandlineflags.h b/src/third_party/google_benchmark/src/commandlineflags.h
new file mode 100644
index 0000000..3a1f6a8
--- /dev/null
+++ b/src/third_party/google_benchmark/src/commandlineflags.h
@@ -0,0 +1,103 @@
+#ifndef BENCHMARK_COMMANDLINEFLAGS_H_
+#define BENCHMARK_COMMANDLINEFLAGS_H_
+
+#include <cstdint>
+#include <string>
+
+// Macro for referencing flags.
+#define FLAG(name) FLAGS_##name
+
+// Macros for declaring flags.
+#define DECLARE_bool(name) extern bool FLAG(name)
+#define DECLARE_int32(name) extern int32_t FLAG(name)
+#define DECLARE_double(name) extern double FLAG(name)
+#define DECLARE_string(name) extern std::string FLAG(name)
+
+// Macros for defining flags.
+#define DEFINE_bool(name, default_val)            \
+  bool FLAG(name) =                               \
+    benchmark::BoolFromEnv(#name, default_val)
+#define DEFINE_int32(name, default_val)           \
+  int32_t FLAG(name) =                            \
+    benchmark::Int32FromEnv(#name, default_val)
+#define DEFINE_double(name, default_val)          \
+  double FLAG(name) =                             \
+    benchmark::DoubleFromEnv(#name, default_val)
+#define DEFINE_string(name, default_val)          \
+  std::string FLAG(name) =                        \
+    benchmark::StringFromEnv(#name, default_val)
+
+namespace benchmark {
+
+// Parses a bool from the environment variable
+// corresponding to the given flag.
+//
+// If the variable exists, returns IsTruthyFlagValue() value;  if not,
+// returns the given default value.
+bool BoolFromEnv(const char* flag, bool default_val);
+
+// Parses an Int32 from the environment variable
+// corresponding to the given flag.
+//
+// If the variable exists, returns ParseInt32() value;  if not, returns
+// the given default value.
+int32_t Int32FromEnv(const char* flag, int32_t default_val);
+
+// Parses an Double from the environment variable
+// corresponding to the given flag.
+//
+// If the variable exists, returns ParseDouble();  if not, returns
+// the given default value.
+double DoubleFromEnv(const char* flag, double default_val);
+
+// Parses a string from the environment variable
+// corresponding to the given flag.
+//
+// If variable exists, returns its value;  if not, returns
+// the given default value.
+const char* StringFromEnv(const char* flag, const char* default_val);
+
+// Parses a string for a bool flag, in the form of either
+// "--flag=value" or "--flag".
+//
+// In the former case, the value is taken as true if it passes IsTruthyValue().
+//
+// In the latter case, the value is taken as true.
+//
+// On success, stores the value of the flag in *value, and returns
+// true.  On failure, returns false without changing *value.
+bool ParseBoolFlag(const char* str, const char* flag, bool* value);
+
+// Parses a string for an Int32 flag, in the form of
+// "--flag=value".
+//
+// On success, stores the value of the flag in *value, and returns
+// true.  On failure, returns false without changing *value.
+bool ParseInt32Flag(const char* str, const char* flag, int32_t* value);
+
+// Parses a string for a Double flag, in the form of
+// "--flag=value".
+//
+// On success, stores the value of the flag in *value, and returns
+// true.  On failure, returns false without changing *value.
+bool ParseDoubleFlag(const char* str, const char* flag, double* value);
+
+// Parses a string for a string flag, in the form of
+// "--flag=value".
+//
+// On success, stores the value of the flag in *value, and returns
+// true.  On failure, returns false without changing *value.
+bool ParseStringFlag(const char* str, const char* flag, std::string* value);
+
+// Returns true if the string matches the flag.
+bool IsFlag(const char* str, const char* flag);
+
+// Returns true unless value starts with one of: '0', 'f', 'F', 'n' or 'N', or
+// some non-alphanumeric character. Also returns false if the value matches
+// one of 'no', 'false', 'off' (case-insensitive). As a special case, also
+// returns true if value is the empty string.
+bool IsTruthyFlagValue(const std::string& value);
+
+}  // end namespace benchmark
+
+#endif  // BENCHMARK_COMMANDLINEFLAGS_H_
diff --git a/src/third_party/google_benchmark/src/complexity.cc b/src/third_party/google_benchmark/src/complexity.cc
new file mode 100644
index 0000000..aeed67f
--- /dev/null
+++ b/src/third_party/google_benchmark/src/complexity.cc
@@ -0,0 +1,238 @@
+// Copyright 2016 Ismael Jimenez Martinez. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Source project : https://github.com/ismaelJimenez/cpp.leastsq
+// Adapted to be used with google benchmark
+
+#include "benchmark/benchmark.h"
+
+#include <algorithm>
+#include <cmath>
+#include "check.h"
+#include "complexity.h"
+
+namespace benchmark {
+
+// Internal function to calculate the different scalability forms
+BigOFunc* FittingCurve(BigO complexity) {
+  static const double kLog2E = 1.44269504088896340736;
+  switch (complexity) {
+    case oN:
+      return [](IterationCount n) -> double { return static_cast<double>(n); };
+    case oNSquared:
+      return [](IterationCount n) -> double { return std::pow(n, 2); };
+    case oNCubed:
+      return [](IterationCount n) -> double { return std::pow(n, 3); };
+    case oLogN:
+      /* Note: can't use log2 because Android's GNU STL lacks it */
+      return
+          [](IterationCount n) { return kLog2E * log(static_cast<double>(n)); };
+    case oNLogN:
+      /* Note: can't use log2 because Android's GNU STL lacks it */
+      return [](IterationCount n) {
+        return kLog2E * n * log(static_cast<double>(n));
+      };
+    case o1:
+    default:
+      return [](IterationCount) { return 1.0; };
+  }
+}
+
+// Function to return an string for the calculated complexity
+std::string GetBigOString(BigO complexity) {
+  switch (complexity) {
+    case oN:
+      return "N";
+    case oNSquared:
+      return "N^2";
+    case oNCubed:
+      return "N^3";
+    case oLogN:
+      return "lgN";
+    case oNLogN:
+      return "NlgN";
+    case o1:
+      return "(1)";
+    default:
+      return "f(N)";
+  }
+}
+
+// Find the coefficient for the high-order term in the running time, by
+// minimizing the sum of squares of relative error, for the fitting curve
+// given by the lambda expression.
+//   - n             : Vector containing the size of the benchmark tests.
+//   - time          : Vector containing the times for the benchmark tests.
+//   - fitting_curve : lambda expression (e.g. [](int64_t n) {return n; };).
+
+// For a deeper explanation on the algorithm logic, please refer to
+// https://en.wikipedia.org/wiki/Least_squares#Least_squares,_regression_analysis_and_statistics
+
+LeastSq MinimalLeastSq(const std::vector<int64_t>& n,
+                       const std::vector<double>& time,
+                       BigOFunc* fitting_curve) {
+  double sigma_gn = 0.0;
+  double sigma_gn_squared = 0.0;
+  double sigma_time = 0.0;
+  double sigma_time_gn = 0.0;
+
+  // Calculate least square fitting parameter
+  for (size_t i = 0; i < n.size(); ++i) {
+    double gn_i = fitting_curve(n[i]);
+    sigma_gn += gn_i;
+    sigma_gn_squared += gn_i * gn_i;
+    sigma_time += time[i];
+    sigma_time_gn += time[i] * gn_i;
+  }
+
+  LeastSq result;
+  result.complexity = oLambda;
+
+  // Calculate complexity.
+  result.coef = sigma_time_gn / sigma_gn_squared;
+
+  // Calculate RMS
+  double rms = 0.0;
+  for (size_t i = 0; i < n.size(); ++i) {
+    double fit = result.coef * fitting_curve(n[i]);
+    rms += pow((time[i] - fit), 2);
+  }
+
+  // Normalized RMS by the mean of the observed values
+  double mean = sigma_time / n.size();
+  result.rms = sqrt(rms / n.size()) / mean;
+
+  return result;
+}
+
+// Find the coefficient for the high-order term in the running time, by
+// minimizing the sum of squares of relative error.
+//   - n          : Vector containing the size of the benchmark tests.
+//   - time       : Vector containing the times for the benchmark tests.
+//   - complexity : If different than oAuto, the fitting curve will stick to
+//                  this one. If it is oAuto, it will be calculated the best
+//                  fitting curve.
+LeastSq MinimalLeastSq(const std::vector<int64_t>& n,
+                       const std::vector<double>& time, const BigO complexity) {
+  CHECK_EQ(n.size(), time.size());
+  CHECK_GE(n.size(), 2);  // Do not compute fitting curve is less than two
+                          // benchmark runs are given
+  CHECK_NE(complexity, oNone);
+
+  LeastSq best_fit;
+
+  if (complexity == oAuto) {
+    std::vector<BigO> fit_curves = {oLogN, oN, oNLogN, oNSquared, oNCubed};
+
+    // Take o1 as default best fitting curve
+    best_fit = MinimalLeastSq(n, time, FittingCurve(o1));
+    best_fit.complexity = o1;
+
+    // Compute all possible fitting curves and stick to the best one
+    for (const auto& fit : fit_curves) {
+      LeastSq current_fit = MinimalLeastSq(n, time, FittingCurve(fit));
+      if (current_fit.rms < best_fit.rms) {
+        best_fit = current_fit;
+        best_fit.complexity = fit;
+      }
+    }
+  } else {
+    best_fit = MinimalLeastSq(n, time, FittingCurve(complexity));
+    best_fit.complexity = complexity;
+  }
+
+  return best_fit;
+}
+
+std::vector<BenchmarkReporter::Run> ComputeBigO(
+    const std::vector<BenchmarkReporter::Run>& reports) {
+  typedef BenchmarkReporter::Run Run;
+  std::vector<Run> results;
+
+  if (reports.size() < 2) return results;
+
+  // Accumulators.
+  std::vector<int64_t> n;
+  std::vector<double> real_time;
+  std::vector<double> cpu_time;
+
+  // Populate the accumulators.
+  for (const Run& run : reports) {
+    CHECK_GT(run.complexity_n, 0) << "Did you forget to call SetComplexityN?";
+    n.push_back(run.complexity_n);
+    real_time.push_back(run.real_accumulated_time / run.iterations);
+    cpu_time.push_back(run.cpu_accumulated_time / run.iterations);
+  }
+
+  LeastSq result_cpu;
+  LeastSq result_real;
+
+  if (reports[0].complexity == oLambda) {
+    result_cpu = MinimalLeastSq(n, cpu_time, reports[0].complexity_lambda);
+    result_real = MinimalLeastSq(n, real_time, reports[0].complexity_lambda);
+  } else {
+    result_cpu = MinimalLeastSq(n, cpu_time, reports[0].complexity);
+    result_real = MinimalLeastSq(n, real_time, result_cpu.complexity);
+  }
+
+  // Drop the 'args' when reporting complexity.
+  auto run_name = reports[0].run_name;
+  run_name.args.clear();
+
+  // Get the data from the accumulator to BenchmarkReporter::Run's.
+  Run big_o;
+  big_o.run_name = run_name;
+  big_o.run_type = BenchmarkReporter::Run::RT_Aggregate;
+  big_o.repetitions = reports[0].repetitions;
+  big_o.repetition_index = Run::no_repetition_index;
+  big_o.threads = reports[0].threads;
+  big_o.aggregate_name = "BigO";
+  big_o.report_label = reports[0].report_label;
+  big_o.iterations = 0;
+  big_o.real_accumulated_time = result_real.coef;
+  big_o.cpu_accumulated_time = result_cpu.coef;
+  big_o.report_big_o = true;
+  big_o.complexity = result_cpu.complexity;
+
+  // All the time results are reported after being multiplied by the
+  // time unit multiplier. But since RMS is a relative quantity it
+  // should not be multiplied at all. So, here, we _divide_ it by the
+  // multiplier so that when it is multiplied later the result is the
+  // correct one.
+  double multiplier = GetTimeUnitMultiplier(reports[0].time_unit);
+
+  // Only add label to mean/stddev if it is same for all runs
+  Run rms;
+  rms.run_name = run_name;
+  rms.run_type = BenchmarkReporter::Run::RT_Aggregate;
+  rms.aggregate_name = "RMS";
+  rms.report_label = big_o.report_label;
+  rms.iterations = 0;
+  rms.repetition_index = Run::no_repetition_index;
+  rms.repetitions = reports[0].repetitions;
+  rms.threads = reports[0].threads;
+  rms.real_accumulated_time = result_real.rms / multiplier;
+  rms.cpu_accumulated_time = result_cpu.rms / multiplier;
+  rms.report_rms = true;
+  rms.complexity = result_cpu.complexity;
+  // don't forget to keep the time unit, or we won't be able to
+  // recover the correct value.
+  rms.time_unit = reports[0].time_unit;
+
+  results.push_back(big_o);
+  results.push_back(rms);
+  return results;
+}
+
+}  // end namespace benchmark
diff --git a/src/third_party/google_benchmark/src/complexity.h b/src/third_party/google_benchmark/src/complexity.h
new file mode 100644
index 0000000..df29b48
--- /dev/null
+++ b/src/third_party/google_benchmark/src/complexity.h
@@ -0,0 +1,55 @@
+// Copyright 2016 Ismael Jimenez Martinez. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Source project : https://github.com/ismaelJimenez/cpp.leastsq
+// Adapted to be used with google benchmark
+
+#ifndef COMPLEXITY_H_
+#define COMPLEXITY_H_
+
+#include <string>
+#include <vector>
+
+#include "benchmark/benchmark.h"
+
+namespace benchmark {
+
+// Return a vector containing the bigO and RMS information for the specified
+// list of reports. If 'reports.size() < 2' an empty vector is returned.
+std::vector<BenchmarkReporter::Run> ComputeBigO(
+    const std::vector<BenchmarkReporter::Run>& reports);
+
+// This data structure will contain the result returned by MinimalLeastSq
+//   - coef        : Estimated coeficient for the high-order term as
+//                   interpolated from data.
+//   - rms         : Normalized Root Mean Squared Error.
+//   - complexity  : Scalability form (e.g. oN, oNLogN). In case a scalability
+//                   form has been provided to MinimalLeastSq this will return
+//                   the same value. In case BigO::oAuto has been selected, this
+//                   parameter will return the best fitting curve detected.
+
+struct LeastSq {
+  LeastSq() : coef(0.0), rms(0.0), complexity(oNone) {}
+
+  double coef;
+  double rms;
+  BigO complexity;
+};
+
+// Function to return an string for the calculated complexity
+std::string GetBigOString(BigO complexity);
+
+}  // end namespace benchmark
+
+#endif  // COMPLEXITY_H_
diff --git a/src/third_party/google_benchmark/src/console_reporter.cc b/src/third_party/google_benchmark/src/console_reporter.cc
new file mode 100644
index 0000000..6fd7645
--- /dev/null
+++ b/src/third_party/google_benchmark/src/console_reporter.cc
@@ -0,0 +1,177 @@
+// Copyright 2015 Google Inc. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include <algorithm>
+#include <cstdint>
+#include <cstdio>
+#include <cstring>
+#include <iostream>
+#include <string>
+#include <tuple>
+#include <vector>
+
+#include "benchmark/benchmark.h"
+#include "check.h"
+#include "colorprint.h"
+#include "commandlineflags.h"
+#include "complexity.h"
+#include "counter.h"
+#include "internal_macros.h"
+#include "string_util.h"
+#include "timers.h"
+
+namespace benchmark {
+
+bool ConsoleReporter::ReportContext(const Context& context) {
+  name_field_width_ = context.name_field_width;
+  printed_header_ = false;
+  prev_counters_.clear();
+
+  PrintBasicContext(&GetErrorStream(), context);
+
+#ifdef BENCHMARK_OS_WINDOWS
+  if ((output_options_ & OO_Color) && &std::cout != &GetOutputStream()) {
+    GetErrorStream()
+        << "Color printing is only supported for stdout on windows."
+           " Disabling color printing\n";
+    output_options_ = static_cast< OutputOptions >(output_options_ & ~OO_Color);
+  }
+#endif
+
+  return true;
+}
+
+void ConsoleReporter::PrintHeader(const Run& run) {
+  std::string str = FormatString("%-*s %13s %15s %12s", static_cast<int>(name_field_width_),
+                                 "Benchmark", "Time", "CPU", "Iterations");
+  if(!run.counters.empty()) {
+    if(output_options_ & OO_Tabular) {
+      for(auto const& c : run.counters) {
+        str += FormatString(" %10s", c.first.c_str());
+      }
+    } else {
+      str += " UserCounters...";
+    }
+  }
+  std::string line = std::string(str.length(), '-');
+  GetOutputStream() << line << "\n" << str << "\n" << line << "\n";
+}
+
+void ConsoleReporter::ReportRuns(const std::vector<Run>& reports) {
+  for (const auto& run : reports) {
+    // print the header:
+    // --- if none was printed yet
+    bool print_header = !printed_header_;
+    // --- or if the format is tabular and this run
+    //     has different fields from the prev header
+    print_header |= (output_options_ & OO_Tabular) &&
+                    (!internal::SameNames(run.counters, prev_counters_));
+    if (print_header) {
+      printed_header_ = true;
+      prev_counters_ = run.counters;
+      PrintHeader(run);
+    }
+    // As an alternative to printing the headers like this, we could sort
+    // the benchmarks by header and then print. But this would require
+    // waiting for the full results before printing, or printing twice.
+    PrintRunData(run);
+  }
+}
+
+static void IgnoreColorPrint(std::ostream& out, LogColor, const char* fmt,
+                             ...) {
+  va_list args;
+  va_start(args, fmt);
+  out << FormatString(fmt, args);
+  va_end(args);
+}
+
+
+static std::string FormatTime(double time) {
+  // Align decimal places...
+  if (time < 1.0) {
+    return FormatString("%10.3f", time);
+  }
+  if (time < 10.0) {
+    return FormatString("%10.2f", time);
+  }
+  if (time < 100.0) {
+    return FormatString("%10.1f", time);
+  }
+  return FormatString("%10.0f", time);
+}
+
+void ConsoleReporter::PrintRunData(const Run& result) {
+  typedef void(PrinterFn)(std::ostream&, LogColor, const char*, ...);
+  auto& Out = GetOutputStream();
+  PrinterFn* printer = (output_options_ & OO_Color) ?
+                         (PrinterFn*)ColorPrintf : IgnoreColorPrint;
+  auto name_color =
+      (result.report_big_o || result.report_rms) ? COLOR_BLUE : COLOR_GREEN;
+  printer(Out, name_color, "%-*s ", name_field_width_,
+          result.benchmark_name().c_str());
+
+  if (result.error_occurred) {
+    printer(Out, COLOR_RED, "ERROR OCCURRED: \'%s\'",
+            result.error_message.c_str());
+    printer(Out, COLOR_DEFAULT, "\n");
+    return;
+  }
+
+  const double real_time = result.GetAdjustedRealTime();
+  const double cpu_time = result.GetAdjustedCPUTime();
+  const std::string real_time_str = FormatTime(real_time);
+  const std::string cpu_time_str = FormatTime(cpu_time);
+
+
+  if (result.report_big_o) {
+    std::string big_o = GetBigOString(result.complexity);
+    printer(Out, COLOR_YELLOW, "%10.2f %-4s %10.2f %-4s ", real_time, big_o.c_str(),
+            cpu_time, big_o.c_str());
+  } else if (result.report_rms) {
+    printer(Out, COLOR_YELLOW, "%10.0f %-4s %10.0f %-4s ", real_time * 100, "%",
+            cpu_time * 100, "%");
+  } else {
+    const char* timeLabel = GetTimeUnitString(result.time_unit);
+    printer(Out, COLOR_YELLOW, "%s %-4s %s %-4s ", real_time_str.c_str(), timeLabel,
+            cpu_time_str.c_str(), timeLabel);
+  }
+
+  if (!result.report_big_o && !result.report_rms) {
+    printer(Out, COLOR_CYAN, "%10lld", result.iterations);
+  }
+
+  for (auto& c : result.counters) {
+    const std::size_t cNameLen = std::max(std::string::size_type(10),
+                                          c.first.length());
+    auto const& s = HumanReadableNumber(c.second.value, c.second.oneK);
+    const char* unit = "";
+    if (c.second.flags & Counter::kIsRate)
+      unit = (c.second.flags & Counter::kInvert) ? "s" : "/s";
+    if (output_options_ & OO_Tabular) {
+      printer(Out, COLOR_DEFAULT, " %*s%s", cNameLen - strlen(unit), s.c_str(),
+              unit);
+    } else {
+      printer(Out, COLOR_DEFAULT, " %s=%s%s", c.first.c_str(), s.c_str(), unit);
+    }
+  }
+
+  if (!result.report_label.empty()) {
+    printer(Out, COLOR_DEFAULT, " %s", result.report_label.c_str());
+  }
+
+  printer(Out, COLOR_DEFAULT, "\n");
+}
+
+}  // end namespace benchmark
diff --git a/src/third_party/google_benchmark/src/counter.cc b/src/third_party/google_benchmark/src/counter.cc
new file mode 100644
index 0000000..cf5b78e
--- /dev/null
+++ b/src/third_party/google_benchmark/src/counter.cc
@@ -0,0 +1,80 @@
+// Copyright 2015 Google Inc. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "counter.h"
+
+namespace benchmark {
+namespace internal {
+
+double Finish(Counter const& c, IterationCount iterations, double cpu_time,
+              double num_threads) {
+  double v = c.value;
+  if (c.flags & Counter::kIsRate) {
+    v /= cpu_time;
+  }
+  if (c.flags & Counter::kAvgThreads) {
+    v /= num_threads;
+  }
+  if (c.flags & Counter::kIsIterationInvariant) {
+    v *= iterations;
+  }
+  if (c.flags & Counter::kAvgIterations) {
+    v /= iterations;
+  }
+
+  if (c.flags & Counter::kInvert) {  // Invert is *always* last.
+    v = 1.0 / v;
+  }
+  return v;
+}
+
+void Finish(UserCounters* l, IterationCount iterations, double cpu_time,
+            double num_threads) {
+  for (auto& c : *l) {
+    c.second.value = Finish(c.second, iterations, cpu_time, num_threads);
+  }
+}
+
+void Increment(UserCounters* l, UserCounters const& r) {
+  // add counters present in both or just in *l
+  for (auto& c : *l) {
+    auto it = r.find(c.first);
+    if (it != r.end()) {
+      c.second.value = c.second + it->second;
+    }
+  }
+  // add counters present in r, but not in *l
+  for (auto const& tc : r) {
+    auto it = l->find(tc.first);
+    if (it == l->end()) {
+      (*l)[tc.first] = tc.second;
+    }
+  }
+}
+
+bool SameNames(UserCounters const& l, UserCounters const& r) {
+  if (&l == &r) return true;
+  if (l.size() != r.size()) {
+    return false;
+  }
+  for (auto const& c : l) {
+    if (r.find(c.first) == r.end()) {
+      return false;
+    }
+  }
+  return true;
+}
+
+}  // end namespace internal
+}  // end namespace benchmark
diff --git a/src/third_party/google_benchmark/src/counter.h b/src/third_party/google_benchmark/src/counter.h
new file mode 100644
index 0000000..1f5a58e
--- /dev/null
+++ b/src/third_party/google_benchmark/src/counter.h
@@ -0,0 +1,32 @@
+// Copyright 2015 Google Inc. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#ifndef BENCHMARK_COUNTER_H_
+#define BENCHMARK_COUNTER_H_
+
+#include "benchmark/benchmark.h"
+
+namespace benchmark {
+
+// these counter-related functions are hidden to reduce API surface.
+namespace internal {
+void Finish(UserCounters* l, IterationCount iterations, double time,
+            double num_threads);
+void Increment(UserCounters* l, UserCounters const& r);
+bool SameNames(UserCounters const& l, UserCounters const& r);
+}  // end namespace internal
+
+}  // end namespace benchmark
+
+#endif  // BENCHMARK_COUNTER_H_
diff --git a/src/third_party/google_benchmark/src/csv_reporter.cc b/src/third_party/google_benchmark/src/csv_reporter.cc
new file mode 100644
index 0000000..af2c18f
--- /dev/null
+++ b/src/third_party/google_benchmark/src/csv_reporter.cc
@@ -0,0 +1,154 @@
+// Copyright 2015 Google Inc. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "benchmark/benchmark.h"
+#include "complexity.h"
+
+#include <algorithm>
+#include <cstdint>
+#include <iostream>
+#include <string>
+#include <tuple>
+#include <vector>
+
+#include "check.h"
+#include "string_util.h"
+#include "timers.h"
+
+// File format reference: http://edoceo.com/utilitas/csv-file-format.
+
+namespace benchmark {
+
+namespace {
+std::vector<std::string> elements = {
+    "name",           "iterations",       "real_time",        "cpu_time",
+    "time_unit",      "bytes_per_second", "items_per_second", "label",
+    "error_occurred", "error_message"};
+}  // namespace
+
+std::string CsvEscape(const std::string & s) {
+  std::string tmp;
+  tmp.reserve(s.size() + 2);
+  for (char c : s) {
+    switch (c) {
+    case '"' : tmp += "\"\""; break;
+    default  : tmp += c; break;
+    }
+  }
+  return '"' + tmp + '"';
+}
+
+bool CSVReporter::ReportContext(const Context& context) {
+  PrintBasicContext(&GetErrorStream(), context);
+  return true;
+}
+
+void CSVReporter::ReportRuns(const std::vector<Run>& reports) {
+  std::ostream& Out = GetOutputStream();
+
+  if (!printed_header_) {
+    // save the names of all the user counters
+    for (const auto& run : reports) {
+      for (const auto& cnt : run.counters) {
+        if (cnt.first == "bytes_per_second" || cnt.first == "items_per_second")
+          continue;
+        user_counter_names_.insert(cnt.first);
+      }
+    }
+
+    // print the header
+    for (auto B = elements.begin(); B != elements.end();) {
+      Out << *B++;
+      if (B != elements.end()) Out << ",";
+    }
+    for (auto B = user_counter_names_.begin();
+         B != user_counter_names_.end();) {
+      Out << ",\"" << *B++ << "\"";
+    }
+    Out << "\n";
+
+    printed_header_ = true;
+  } else {
+    // check that all the current counters are saved in the name set
+    for (const auto& run : reports) {
+      for (const auto& cnt : run.counters) {
+        if (cnt.first == "bytes_per_second" || cnt.first == "items_per_second")
+          continue;
+        CHECK(user_counter_names_.find(cnt.first) != user_counter_names_.end())
+            << "All counters must be present in each run. "
+            << "Counter named \"" << cnt.first
+            << "\" was not in a run after being added to the header";
+      }
+    }
+  }
+
+  // print results for each run
+  for (const auto& run : reports) {
+    PrintRunData(run);
+  }
+}
+
+void CSVReporter::PrintRunData(const Run& run) {
+  std::ostream& Out = GetOutputStream();
+  Out << CsvEscape(run.benchmark_name()) << ",";
+  if (run.error_occurred) {
+    Out << std::string(elements.size() - 3, ',');
+    Out << "true,";
+    Out << CsvEscape(run.error_message) << "\n";
+    return;
+  }
+
+  // Do not print iteration on bigO and RMS report
+  if (!run.report_big_o && !run.report_rms) {
+    Out << run.iterations;
+  }
+  Out << ",";
+
+  Out << run.GetAdjustedRealTime() << ",";
+  Out << run.GetAdjustedCPUTime() << ",";
+
+  // Do not print timeLabel on bigO and RMS report
+  if (run.report_big_o) {
+    Out << GetBigOString(run.complexity);
+  } else if (!run.report_rms) {
+    Out << GetTimeUnitString(run.time_unit);
+  }
+  Out << ",";
+
+  if (run.counters.find("bytes_per_second") != run.counters.end()) {
+    Out << run.counters.at("bytes_per_second");
+  }
+  Out << ",";
+  if (run.counters.find("items_per_second") != run.counters.end()) {
+    Out << run.counters.at("items_per_second");
+  }
+  Out << ",";
+  if (!run.report_label.empty()) {
+    Out << CsvEscape(run.report_label);
+  }
+  Out << ",,";  // for error_occurred and error_message
+
+  // Print user counters
+  for (const auto& ucn : user_counter_names_) {
+    auto it = run.counters.find(ucn);
+    if (it == run.counters.end()) {
+      Out << ",";
+    } else {
+      Out << "," << it->second;
+    }
+  }
+  Out << '\n';
+}
+
+}  // end namespace benchmark
diff --git a/src/third_party/google_benchmark/src/cycleclock.h b/src/third_party/google_benchmark/src/cycleclock.h
new file mode 100644
index 0000000..d5d62c4
--- /dev/null
+++ b/src/third_party/google_benchmark/src/cycleclock.h
@@ -0,0 +1,192 @@
+// ----------------------------------------------------------------------
+// CycleClock
+//    A CycleClock tells you the current time in Cycles.  The "time"
+//    is actually time since power-on.  This is like time() but doesn't
+//    involve a system call and is much more precise.
+//
+// NOTE: Not all cpu/platform/kernel combinations guarantee that this
+// clock increments at a constant rate or is synchronized across all logical
+// cpus in a system.
+//
+// If you need the above guarantees, please consider using a different
+// API. There are efforts to provide an interface which provides a millisecond
+// granularity and implemented as a memory read. A memory read is generally
+// cheaper than the CycleClock for many architectures.
+//
+// Also, in some out of order CPU implementations, the CycleClock is not
+// serializing. So if you're trying to count at cycles granularity, your
+// data might be inaccurate due to out of order instruction execution.
+// ----------------------------------------------------------------------
+
+#ifndef BENCHMARK_CYCLECLOCK_H_
+#define BENCHMARK_CYCLECLOCK_H_
+
+#include <cstdint>
+
+#include "benchmark/benchmark.h"
+#include "internal_macros.h"
+
+#if defined(BENCHMARK_OS_MACOSX)
+#include <mach/mach_time.h>
+#endif
+// For MSVC, we want to use '_asm rdtsc' when possible (since it works
+// with even ancient MSVC compilers), and when not possible the
+// __rdtsc intrinsic, declared in <intrin.h>.  Unfortunately, in some
+// environments, <windows.h> and <intrin.h> have conflicting
+// declarations of some other intrinsics, breaking compilation.
+// Therefore, we simply declare __rdtsc ourselves. See also
+// http://connect.microsoft.com/VisualStudio/feedback/details/262047
+#if defined(COMPILER_MSVC) && !defined(_M_IX86)
+extern "C" uint64_t __rdtsc();
+#pragma intrinsic(__rdtsc)
+#endif
+
+#if !defined(BENCHMARK_OS_WINDOWS) || defined(BENCHMARK_OS_MINGW)
+#include <sys/time.h>
+#include <time.h>
+#endif
+
+#ifdef BENCHMARK_OS_EMSCRIPTEN
+#include <emscripten.h>
+#endif
+
+namespace benchmark {
+// NOTE: only i386 and x86_64 have been well tested.
+// PPC, sparc, alpha, and ia64 are based on
+//    http://peter.kuscsik.com/wordpress/?p=14
+// with modifications by m3b.  See also
+//    https://setisvn.ssl.berkeley.edu/svn/lib/fftw-3.0.1/kernel/cycle.h
+namespace cycleclock {
+// This should return the number of cycles since power-on.  Thread-safe.
+inline BENCHMARK_ALWAYS_INLINE int64_t Now() {
+#if defined(BENCHMARK_OS_MACOSX)
+  // this goes at the top because we need ALL Macs, regardless of
+  // architecture, to return the number of "mach time units" that
+  // have passed since startup.  See sysinfo.cc where
+  // InitializeSystemInfo() sets the supposed cpu clock frequency of
+  // macs to the number of mach time units per second, not actual
+  // CPU clock frequency (which can change in the face of CPU
+  // frequency scaling).  Also note that when the Mac sleeps, this
+  // counter pauses; it does not continue counting, nor does it
+  // reset to zero.
+  return mach_absolute_time();
+#elif defined(BENCHMARK_OS_EMSCRIPTEN)
+  // this goes above x86-specific code because old versions of Emscripten
+  // define __x86_64__, although they have nothing to do with it.
+  return static_cast<int64_t>(emscripten_get_now() * 1e+6);
+#elif defined(__i386__)
+  int64_t ret;
+  __asm__ volatile("rdtsc" : "=A"(ret));
+  return ret;
+#elif defined(__x86_64__) || defined(__amd64__)
+  uint64_t low, high;
+  __asm__ volatile("rdtsc" : "=a"(low), "=d"(high));
+  return (high << 32) | low;
+#elif defined(__powerpc__) || defined(__ppc__)
+  // This returns a time-base, which is not always precisely a cycle-count.
+  int64_t tbl, tbu0, tbu1;
+  asm("mftbu %0" : "=r"(tbu0));
+  asm("mftb  %0" : "=r"(tbl));
+  asm("mftbu %0" : "=r"(tbu1));
+  tbl &= -static_cast<int64_t>(tbu0 == tbu1);
+  // high 32 bits in tbu1; low 32 bits in tbl  (tbu0 is garbage)
+  return (tbu1 << 32) | tbl;
+#elif defined(__sparc__)
+  int64_t tick;
+  asm(".byte 0x83, 0x41, 0x00, 0x00");
+  asm("mov   %%g1, %0" : "=r"(tick));
+  return tick;
+#elif defined(__ia64__)
+  int64_t itc;
+  asm("mov %0 = ar.itc" : "=r"(itc));
+  return itc;
+#elif defined(COMPILER_MSVC) && defined(_M_IX86)
+  // Older MSVC compilers (like 7.x) don't seem to support the
+  // __rdtsc intrinsic properly, so I prefer to use _asm instead
+  // when I know it will work.  Otherwise, I'll use __rdtsc and hope
+  // the code is being compiled with a non-ancient compiler.
+  _asm rdtsc
+#elif defined(COMPILER_MSVC)
+  return __rdtsc();
+#elif defined(BENCHMARK_OS_NACL)
+  // Native Client validator on x86/x86-64 allows RDTSC instructions,
+  // and this case is handled above. Native Client validator on ARM
+  // rejects MRC instructions (used in the ARM-specific sequence below),
+  // so we handle it here. Portable Native Client compiles to
+  // architecture-agnostic bytecode, which doesn't provide any
+  // cycle counter access mnemonics.
+
+  // Native Client does not provide any API to access cycle counter.
+  // Use clock_gettime(CLOCK_MONOTONIC, ...) instead of gettimeofday
+  // because is provides nanosecond resolution (which is noticable at
+  // least for PNaCl modules running on x86 Mac & Linux).
+  // Initialize to always return 0 if clock_gettime fails.
+  struct timespec ts = {0, 0};
+  clock_gettime(CLOCK_MONOTONIC, &ts);
+  return static_cast<int64_t>(ts.tv_sec) * 1000000000 + ts.tv_nsec;
+#elif defined(__aarch64__)
+  // System timer of ARMv8 runs at a different frequency than the CPU's.
+  // The frequency is fixed, typically in the range 1-50MHz.  It can be
+  // read at CNTFRQ special register.  We assume the OS has set up
+  // the virtual timer properly.
+  int64_t virtual_timer_value;
+  asm volatile("mrs %0, cntvct_el0" : "=r"(virtual_timer_value));
+  return virtual_timer_value;
+#elif defined(__ARM_ARCH)
+  // V6 is the earliest arch that has a standard cyclecount
+  // Native Client validator doesn't allow MRC instructions.
+#if (__ARM_ARCH >= 6)
+  uint32_t pmccntr;
+  uint32_t pmuseren;
+  uint32_t pmcntenset;
+  // Read the user mode perf monitor counter access permissions.
+  asm volatile("mrc p15, 0, %0, c9, c14, 0" : "=r"(pmuseren));
+  if (pmuseren & 1) {  // Allows reading perfmon counters for user mode code.
+    asm volatile("mrc p15, 0, %0, c9, c12, 1" : "=r"(pmcntenset));
+    if (pmcntenset & 0x80000000ul) {  // Is it counting?
+      asm volatile("mrc p15, 0, %0, c9, c13, 0" : "=r"(pmccntr));
+      // The counter is set up to count every 64th cycle
+      return static_cast<int64_t>(pmccntr) * 64;  // Should optimize to << 6
+    }
+  }
+#endif
+  struct timeval tv;
+  gettimeofday(&tv, nullptr);
+  return static_cast<int64_t>(tv.tv_sec) * 1000000 + tv.tv_usec;
+#elif defined(__mips__)
+  // mips apparently only allows rdtsc for superusers, so we fall
+  // back to gettimeofday.  It's possible clock_gettime would be better.
+  struct timeval tv;
+  gettimeofday(&tv, nullptr);
+  return static_cast<int64_t>(tv.tv_sec) * 1000000 + tv.tv_usec;
+#elif defined(__s390__)  // Covers both s390 and s390x.
+  // Return the CPU clock.
+  uint64_t tsc;
+  asm("stck %0" : "=Q"(tsc) : : "cc");
+  return tsc;
+#elif defined(__riscv) // RISC-V
+  // Use RDCYCLE (and RDCYCLEH on riscv32)
+#if __riscv_xlen == 32
+  uint64_t cycles_low, cycles_hi0, cycles_hi1;
+  asm("rdcycleh %0" : "=r"(cycles_hi0));
+  asm("rdcycle %0" : "=r"(cycles_lo));
+  asm("rdcycleh %0" : "=r"(cycles_hi1));
+  // This matches the PowerPC overflow detection, above
+  cycles_lo &= -static_cast<int64_t>(cycles_hi0 == cycles_hi1);
+  return (cycles_hi1 << 32) | cycles_lo;
+#else
+  uint64_t cycles;
+  asm("rdcycle %0" : "=r"(cycles));
+  return cycles;
+#endif
+#else
+// The soft failover to a generic implementation is automatic only for ARM.
+// For other platforms the developer is expected to make an attempt to create
+// a fast implementation and use generic version if nothing better is available.
+#error You need to define CycleTimer for your OS and CPU
+#endif
+}
+}  // end namespace cycleclock
+}  // end namespace benchmark
+
+#endif  // BENCHMARK_CYCLECLOCK_H_
diff --git a/src/third_party/google_benchmark/src/internal_macros.h b/src/third_party/google_benchmark/src/internal_macros.h
new file mode 100644
index 0000000..6adf00d
--- /dev/null
+++ b/src/third_party/google_benchmark/src/internal_macros.h
@@ -0,0 +1,94 @@
+#ifndef BENCHMARK_INTERNAL_MACROS_H_
+#define BENCHMARK_INTERNAL_MACROS_H_
+
+#include "benchmark/benchmark.h"
+
+/* Needed to detect STL */
+#include <cstdlib>
+
+// clang-format off
+
+#ifndef __has_feature
+#define __has_feature(x) 0
+#endif
+
+#if defined(__clang__)
+  #if !defined(COMPILER_CLANG)
+    #define COMPILER_CLANG
+  #endif
+#elif defined(_MSC_VER)
+  #if !defined(COMPILER_MSVC)
+    #define COMPILER_MSVC
+  #endif
+#elif defined(__GNUC__)
+  #if !defined(COMPILER_GCC)
+    #define COMPILER_GCC
+  #endif
+#endif
+
+#if __has_feature(cxx_attributes)
+  #define BENCHMARK_NORETURN [[noreturn]]
+#elif defined(__GNUC__)
+  #define BENCHMARK_NORETURN __attribute__((noreturn))
+#elif defined(COMPILER_MSVC)
+  #define BENCHMARK_NORETURN __declspec(noreturn)
+#else
+  #define BENCHMARK_NORETURN
+#endif
+
+#if defined(__CYGWIN__)
+  #define BENCHMARK_OS_CYGWIN 1
+#elif defined(_WIN32)
+  #define BENCHMARK_OS_WINDOWS 1
+  #if defined(__MINGW32__)
+    #define BENCHMARK_OS_MINGW 1
+  #endif
+#elif defined(__APPLE__)
+  #define BENCHMARK_OS_APPLE 1
+  #include "TargetConditionals.h"
+  #if defined(TARGET_OS_MAC)
+    #define BENCHMARK_OS_MACOSX 1
+    #if defined(TARGET_OS_IPHONE)
+      #define BENCHMARK_OS_IOS 1
+    #endif
+  #endif
+#elif defined(__FreeBSD__)
+  #define BENCHMARK_OS_FREEBSD 1
+#elif defined(__NetBSD__)
+  #define BENCHMARK_OS_NETBSD 1
+#elif defined(__OpenBSD__)
+  #define BENCHMARK_OS_OPENBSD 1
+#elif defined(__linux__)
+  #define BENCHMARK_OS_LINUX 1
+#elif defined(__native_client__)
+  #define BENCHMARK_OS_NACL 1
+#elif defined(__EMSCRIPTEN__)
+  #define BENCHMARK_OS_EMSCRIPTEN 1
+#elif defined(__rtems__)
+  #define BENCHMARK_OS_RTEMS 1
+#elif defined(__Fuchsia__)
+#define BENCHMARK_OS_FUCHSIA 1
+#elif defined (__SVR4) && defined (__sun)
+#define BENCHMARK_OS_SOLARIS 1
+#elif defined(__QNX__)
+#define BENCHMARK_OS_QNX 1
+#endif
+
+#if defined(__ANDROID__) && defined(__GLIBCXX__)
+#define BENCHMARK_STL_ANDROID_GNUSTL 1
+#endif
+
+#if !__has_feature(cxx_exceptions) && !defined(__cpp_exceptions) \
+     && !defined(__EXCEPTIONS)
+  #define BENCHMARK_HAS_NO_EXCEPTIONS
+#endif
+
+#if defined(COMPILER_CLANG) || defined(COMPILER_GCC)
+  #define BENCHMARK_MAYBE_UNUSED __attribute__((unused))
+#else
+  #define BENCHMARK_MAYBE_UNUSED
+#endif
+
+// clang-format on
+
+#endif  // BENCHMARK_INTERNAL_MACROS_H_
diff --git a/src/third_party/google_benchmark/src/json_reporter.cc b/src/third_party/google_benchmark/src/json_reporter.cc
new file mode 100644
index 0000000..e5f3c35
--- /dev/null
+++ b/src/third_party/google_benchmark/src/json_reporter.cc
@@ -0,0 +1,253 @@
+// Copyright 2015 Google Inc. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "benchmark/benchmark.h"
+#include "complexity.h"
+
+#include <algorithm>
+#include <cmath>
+#include <cstdint>
+#include <iomanip>  // for setprecision
+#include <iostream>
+#include <limits>
+#include <string>
+#include <tuple>
+#include <vector>
+
+#include "string_util.h"
+#include "timers.h"
+
+namespace benchmark {
+
+namespace {
+
+std::string StrEscape(const std::string & s) {
+  std::string tmp;
+  tmp.reserve(s.size());
+  for (char c : s) {
+    switch (c) {
+    case '\b': tmp += "\\b"; break;
+    case '\f': tmp += "\\f"; break;
+    case '\n': tmp += "\\n"; break;
+    case '\r': tmp += "\\r"; break;
+    case '\t': tmp += "\\t"; break;
+    case '\\': tmp += "\\\\"; break;
+    case '"' : tmp += "\\\""; break;
+    default  : tmp += c; break;
+    }
+  }
+  return tmp;
+}
+
+std::string FormatKV(std::string const& key, std::string const& value) {
+  return StrFormat("\"%s\": \"%s\"", StrEscape(key).c_str(), StrEscape(value).c_str());
+}
+
+std::string FormatKV(std::string const& key, const char* value) {
+  return StrFormat("\"%s\": \"%s\"", StrEscape(key).c_str(), StrEscape(value).c_str());
+}
+
+std::string FormatKV(std::string const& key, bool value) {
+  return StrFormat("\"%s\": %s", StrEscape(key).c_str(), value ? "true" : "false");
+}
+
+std::string FormatKV(std::string const& key, int64_t value) {
+  std::stringstream ss;
+  ss << '"' << StrEscape(key) << "\": " << value;
+  return ss.str();
+}
+
+std::string FormatKV(std::string const& key, IterationCount value) {
+  std::stringstream ss;
+  ss << '"' << StrEscape(key) << "\": " << value;
+  return ss.str();
+}
+
+std::string FormatKV(std::string const& key, double value) {
+  std::stringstream ss;
+  ss << '"' << StrEscape(key) << "\": ";
+
+  if (std::isnan(value))
+    ss << (value < 0 ? "-" : "") << "NaN";
+  else if (std::isinf(value))
+    ss << (value < 0 ? "-" : "") << "Infinity";
+  else {
+    const auto max_digits10 =
+        std::numeric_limits<decltype(value)>::max_digits10;
+    const auto max_fractional_digits10 = max_digits10 - 1;
+    ss << std::scientific << std::setprecision(max_fractional_digits10)
+       << value;
+  }
+  return ss.str();
+}
+
+int64_t RoundDouble(double v) { return std::lround(v); }
+
+}  // end namespace
+
+bool JSONReporter::ReportContext(const Context& context) {
+  std::ostream& out = GetOutputStream();
+
+  out << "{\n";
+  std::string inner_indent(2, ' ');
+
+  // Open context block and print context information.
+  out << inner_indent << "\"context\": {\n";
+  std::string indent(4, ' ');
+
+  std::string walltime_value = LocalDateTimeString();
+  out << indent << FormatKV("date", walltime_value) << ",\n";
+
+  out << indent << FormatKV("host_name", context.sys_info.name) << ",\n";
+
+  if (Context::executable_name) {
+    out << indent << FormatKV("executable", Context::executable_name) << ",\n";
+  }
+
+  CPUInfo const& info = context.cpu_info;
+  out << indent << FormatKV("num_cpus", static_cast<int64_t>(info.num_cpus))
+      << ",\n";
+  out << indent
+      << FormatKV("mhz_per_cpu",
+                  RoundDouble(info.cycles_per_second / 1000000.0))
+      << ",\n";
+  out << indent << FormatKV("cpu_scaling_enabled", info.scaling_enabled)
+      << ",\n";
+
+  out << indent << "\"caches\": [\n";
+  indent = std::string(6, ' ');
+  std::string cache_indent(8, ' ');
+  for (size_t i = 0; i < info.caches.size(); ++i) {
+    auto& CI = info.caches[i];
+    out << indent << "{\n";
+    out << cache_indent << FormatKV("type", CI.type) << ",\n";
+    out << cache_indent << FormatKV("level", static_cast<int64_t>(CI.level))
+        << ",\n";
+    out << cache_indent
+        << FormatKV("size", static_cast<int64_t>(CI.size)) << ",\n";
+    out << cache_indent
+        << FormatKV("num_sharing", static_cast<int64_t>(CI.num_sharing))
+        << "\n";
+    out << indent << "}";
+    if (i != info.caches.size() - 1) out << ",";
+    out << "\n";
+  }
+  indent = std::string(4, ' ');
+  out << indent << "],\n";
+  out << indent << "\"load_avg\": [";
+  for (auto it = info.load_avg.begin(); it != info.load_avg.end();) {
+    out << *it++;
+    if (it != info.load_avg.end()) out << ",";
+  }
+  out << "],\n";
+
+#if defined(NDEBUG)
+  const char build_type[] = "release";
+#else
+  const char build_type[] = "debug";
+#endif
+  out << indent << FormatKV("library_build_type", build_type) << "\n";
+  // Close context block and open the list of benchmarks.
+  out << inner_indent << "},\n";
+  out << inner_indent << "\"benchmarks\": [\n";
+  return true;
+}
+
+void JSONReporter::ReportRuns(std::vector<Run> const& reports) {
+  if (reports.empty()) {
+    return;
+  }
+  std::string indent(4, ' ');
+  std::ostream& out = GetOutputStream();
+  if (!first_report_) {
+    out << ",\n";
+  }
+  first_report_ = false;
+
+  for (auto it = reports.begin(); it != reports.end(); ++it) {
+    out << indent << "{\n";
+    PrintRunData(*it);
+    out << indent << '}';
+    auto it_cp = it;
+    if (++it_cp != reports.end()) {
+      out << ",\n";
+    }
+  }
+}
+
+void JSONReporter::Finalize() {
+  // Close the list of benchmarks and the top level object.
+  GetOutputStream() << "\n  ]\n}\n";
+}
+
+void JSONReporter::PrintRunData(Run const& run) {
+  std::string indent(6, ' ');
+  std::ostream& out = GetOutputStream();
+  out << indent << FormatKV("name", run.benchmark_name()) << ",\n";
+  out << indent << FormatKV("run_name", run.run_name.str()) << ",\n";
+  out << indent << FormatKV("run_type", [&run]() -> const char* {
+    switch (run.run_type) {
+      case BenchmarkReporter::Run::RT_Iteration:
+        return "iteration";
+      case BenchmarkReporter::Run::RT_Aggregate:
+        return "aggregate";
+    }
+    BENCHMARK_UNREACHABLE();
+  }()) << ",\n";
+  out << indent << FormatKV("repetitions", run.repetitions) << ",\n";
+  if (run.run_type != BenchmarkReporter::Run::RT_Aggregate) {
+    out << indent << FormatKV("repetition_index", run.repetition_index)
+        << ",\n";
+  }
+  out << indent << FormatKV("threads", run.threads) << ",\n";
+  if (run.run_type == BenchmarkReporter::Run::RT_Aggregate) {
+    out << indent << FormatKV("aggregate_name", run.aggregate_name) << ",\n";
+  }
+  if (run.error_occurred) {
+    out << indent << FormatKV("error_occurred", run.error_occurred) << ",\n";
+    out << indent << FormatKV("error_message", run.error_message) << ",\n";
+  }
+  if (!run.report_big_o && !run.report_rms) {
+    out << indent << FormatKV("iterations", run.iterations) << ",\n";
+    out << indent << FormatKV("real_time", run.GetAdjustedRealTime()) << ",\n";
+    out << indent << FormatKV("cpu_time", run.GetAdjustedCPUTime());
+    out << ",\n"
+        << indent << FormatKV("time_unit", GetTimeUnitString(run.time_unit));
+  } else if (run.report_big_o) {
+    out << indent << FormatKV("cpu_coefficient", run.GetAdjustedCPUTime())
+        << ",\n";
+    out << indent << FormatKV("real_coefficient", run.GetAdjustedRealTime())
+        << ",\n";
+    out << indent << FormatKV("big_o", GetBigOString(run.complexity)) << ",\n";
+    out << indent << FormatKV("time_unit", GetTimeUnitString(run.time_unit));
+  } else if (run.report_rms) {
+    out << indent << FormatKV("rms", run.GetAdjustedCPUTime());
+  }
+
+  for (auto& c : run.counters) {
+    out << ",\n" << indent << FormatKV(c.first, c.second);
+  }
+
+  if (run.has_memory_result) {
+    out << ",\n" << indent << FormatKV("allocs_per_iter", run.allocs_per_iter);
+    out << ",\n" << indent << FormatKV("max_bytes_used", run.max_bytes_used);
+  }
+
+  if (!run.report_label.empty()) {
+    out << ",\n" << indent << FormatKV("label", run.report_label);
+  }
+  out << '\n';
+}
+
+}  // end namespace benchmark
diff --git a/src/third_party/google_benchmark/src/log.h b/src/third_party/google_benchmark/src/log.h
new file mode 100644
index 0000000..47d0c35
--- /dev/null
+++ b/src/third_party/google_benchmark/src/log.h
@@ -0,0 +1,74 @@
+#ifndef BENCHMARK_LOG_H_
+#define BENCHMARK_LOG_H_
+
+#include <iostream>
+#include <ostream>
+
+#include "benchmark/benchmark.h"
+
+namespace benchmark {
+namespace internal {
+
+typedef std::basic_ostream<char>&(EndLType)(std::basic_ostream<char>&);
+
+class LogType {
+  friend LogType& GetNullLogInstance();
+  friend LogType& GetErrorLogInstance();
+
+  // FIXME: Add locking to output.
+  template <class Tp>
+  friend LogType& operator<<(LogType&, Tp const&);
+  friend LogType& operator<<(LogType&, EndLType*);
+
+ private:
+  LogType(std::ostream* out) : out_(out) {}
+  std::ostream* out_;
+  BENCHMARK_DISALLOW_COPY_AND_ASSIGN(LogType);
+};
+
+template <class Tp>
+LogType& operator<<(LogType& log, Tp const& value) {
+  if (log.out_) {
+    *log.out_ << value;
+  }
+  return log;
+}
+
+inline LogType& operator<<(LogType& log, EndLType* m) {
+  if (log.out_) {
+    *log.out_ << m;
+  }
+  return log;
+}
+
+inline int& LogLevel() {
+  static int log_level = 0;
+  return log_level;
+}
+
+inline LogType& GetNullLogInstance() {
+  static LogType log(nullptr);
+  return log;
+}
+
+inline LogType& GetErrorLogInstance() {
+  static LogType log(&std::clog);
+  return log;
+}
+
+inline LogType& GetLogInstanceForLevel(int level) {
+  if (level <= LogLevel()) {
+    return GetErrorLogInstance();
+  }
+  return GetNullLogInstance();
+}
+
+}  // end namespace internal
+}  // end namespace benchmark
+
+// clang-format off
+#define VLOG(x)                                                               \
+  (::benchmark::internal::GetLogInstanceForLevel(x) << "-- LOG(" << x << "):" \
+                                                                         " ")
+// clang-format on
+#endif
diff --git a/src/third_party/google_benchmark/src/mutex.h b/src/third_party/google_benchmark/src/mutex.h
new file mode 100644
index 0000000..5f461d0
--- /dev/null
+++ b/src/third_party/google_benchmark/src/mutex.h
@@ -0,0 +1,155 @@
+#ifndef BENCHMARK_MUTEX_H_
+#define BENCHMARK_MUTEX_H_
+
+#include <condition_variable>
+#include <mutex>
+
+#include "check.h"
+
+// Enable thread safety attributes only with clang.
+// The attributes can be safely erased when compiling with other compilers.
+#if defined(HAVE_THREAD_SAFETY_ATTRIBUTES)
+#define THREAD_ANNOTATION_ATTRIBUTE__(x) __attribute__((x))
+#else
+#define THREAD_ANNOTATION_ATTRIBUTE__(x)  // no-op
+#endif
+
+#define CAPABILITY(x) THREAD_ANNOTATION_ATTRIBUTE__(capability(x))
+
+#define SCOPED_CAPABILITY THREAD_ANNOTATION_ATTRIBUTE__(scoped_lockable)
+
+#define GUARDED_BY(x) THREAD_ANNOTATION_ATTRIBUTE__(guarded_by(x))
+
+#define PT_GUARDED_BY(x) THREAD_ANNOTATION_ATTRIBUTE__(pt_guarded_by(x))
+
+#define ACQUIRED_BEFORE(...) \
+  THREAD_ANNOTATION_ATTRIBUTE__(acquired_before(__VA_ARGS__))
+
+#define ACQUIRED_AFTER(...) \
+  THREAD_ANNOTATION_ATTRIBUTE__(acquired_after(__VA_ARGS__))
+
+#define REQUIRES(...) \
+  THREAD_ANNOTATION_ATTRIBUTE__(requires_capability(__VA_ARGS__))
+
+#define REQUIRES_SHARED(...) \
+  THREAD_ANNOTATION_ATTRIBUTE__(requires_shared_capability(__VA_ARGS__))
+
+#define ACQUIRE(...) \
+  THREAD_ANNOTATION_ATTRIBUTE__(acquire_capability(__VA_ARGS__))
+
+#define ACQUIRE_SHARED(...) \
+  THREAD_ANNOTATION_ATTRIBUTE__(acquire_shared_capability(__VA_ARGS__))
+
+#define RELEASE(...) \
+  THREAD_ANNOTATION_ATTRIBUTE__(release_capability(__VA_ARGS__))
+
+#define RELEASE_SHARED(...) \
+  THREAD_ANNOTATION_ATTRIBUTE__(release_shared_capability(__VA_ARGS__))
+
+#define TRY_ACQUIRE(...) \
+  THREAD_ANNOTATION_ATTRIBUTE__(try_acquire_capability(__VA_ARGS__))
+
+#define TRY_ACQUIRE_SHARED(...) \
+  THREAD_ANNOTATION_ATTRIBUTE__(try_acquire_shared_capability(__VA_ARGS__))
+
+#define EXCLUDES(...) THREAD_ANNOTATION_ATTRIBUTE__(locks_excluded(__VA_ARGS__))
+
+#define ASSERT_CAPABILITY(x) THREAD_ANNOTATION_ATTRIBUTE__(assert_capability(x))
+
+#define ASSERT_SHARED_CAPABILITY(x) \
+  THREAD_ANNOTATION_ATTRIBUTE__(assert_shared_capability(x))
+
+#define RETURN_CAPABILITY(x) THREAD_ANNOTATION_ATTRIBUTE__(lock_returned(x))
+
+#define NO_THREAD_SAFETY_ANALYSIS \
+  THREAD_ANNOTATION_ATTRIBUTE__(no_thread_safety_analysis)
+
+namespace benchmark {
+
+typedef std::condition_variable Condition;
+
+// NOTE: Wrappers for std::mutex and std::unique_lock are provided so that
+// we can annotate them with thread safety attributes and use the
+// -Wthread-safety warning with clang. The standard library types cannot be
+// used directly because they do not provided the required annotations.
+class CAPABILITY("mutex") Mutex {
+ public:
+  Mutex() {}
+
+  void lock() ACQUIRE() { mut_.lock(); }
+  void unlock() RELEASE() { mut_.unlock(); }
+  std::mutex& native_handle() { return mut_; }
+
+ private:
+  std::mutex mut_;
+};
+
+class SCOPED_CAPABILITY MutexLock {
+  typedef std::unique_lock<std::mutex> MutexLockImp;
+
+ public:
+  MutexLock(Mutex& m) ACQUIRE(m) : ml_(m.native_handle()) {}
+  ~MutexLock() RELEASE() {}
+  MutexLockImp& native_handle() { return ml_; }
+
+ private:
+  MutexLockImp ml_;
+};
+
+class Barrier {
+ public:
+  Barrier(int num_threads) : running_threads_(num_threads) {}
+
+  // Called by each thread
+  bool wait() EXCLUDES(lock_) {
+    bool last_thread = false;
+    {
+      MutexLock ml(lock_);
+      last_thread = createBarrier(ml);
+    }
+    if (last_thread) phase_condition_.notify_all();
+    return last_thread;
+  }
+
+  void removeThread() EXCLUDES(lock_) {
+    MutexLock ml(lock_);
+    --running_threads_;
+    if (entered_ != 0) phase_condition_.notify_all();
+  }
+
+ private:
+  Mutex lock_;
+  Condition phase_condition_;
+  int running_threads_;
+
+  // State for barrier management
+  int phase_number_ = 0;
+  int entered_ = 0;  // Number of threads that have entered this barrier
+
+  // Enter the barrier and wait until all other threads have also
+  // entered the barrier.  Returns iff this is the last thread to
+  // enter the barrier.
+  bool createBarrier(MutexLock& ml) REQUIRES(lock_) {
+    CHECK_LT(entered_, running_threads_);
+    entered_++;
+    if (entered_ < running_threads_) {
+      // Wait for all threads to enter
+      int phase_number_cp = phase_number_;
+      auto cb = [this, phase_number_cp]() {
+        return this->phase_number_ > phase_number_cp ||
+               entered_ == running_threads_;  // A thread has aborted in error
+      };
+      phase_condition_.wait(ml.native_handle(), cb);
+      if (phase_number_ > phase_number_cp) return false;
+      // else (running_threads_ == entered_) and we are the last thread.
+    }
+    // Last thread has reached the barrier
+    phase_number_++;
+    entered_ = 0;
+    return true;
+  }
+};
+
+}  // end namespace benchmark
+
+#endif  // BENCHMARK_MUTEX_H_
diff --git a/src/third_party/google_benchmark/src/re.h b/src/third_party/google_benchmark/src/re.h
new file mode 100644
index 0000000..fbe2503
--- /dev/null
+++ b/src/third_party/google_benchmark/src/re.h
@@ -0,0 +1,158 @@
+// Copyright 2015 Google Inc. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#ifndef BENCHMARK_RE_H_
+#define BENCHMARK_RE_H_
+
+#include "internal_macros.h"
+
+// clang-format off
+
+#if !defined(HAVE_STD_REGEX) && \
+    !defined(HAVE_GNU_POSIX_REGEX) && \
+    !defined(HAVE_POSIX_REGEX)
+  // No explicit regex selection; detect based on builtin hints.
+  #if defined(BENCHMARK_OS_LINUX) || defined(BENCHMARK_OS_APPLE)
+    #define HAVE_POSIX_REGEX 1
+  #elif __cplusplus >= 199711L
+    #define HAVE_STD_REGEX 1
+  #endif
+#endif
+
+// Prefer C regex libraries when compiling w/o exceptions so that we can
+// correctly report errors.
+#if defined(BENCHMARK_HAS_NO_EXCEPTIONS) && \
+    defined(BENCHMARK_HAVE_STD_REGEX) && \
+    (defined(HAVE_GNU_POSIX_REGEX) || defined(HAVE_POSIX_REGEX))
+  #undef HAVE_STD_REGEX
+#endif
+
+#if defined(HAVE_STD_REGEX)
+  #include <regex>
+#elif defined(HAVE_GNU_POSIX_REGEX)
+  #include <gnuregex.h>
+#elif defined(HAVE_POSIX_REGEX)
+  #include <regex.h>
+#else
+#error No regular expression backend was found!
+#endif
+
+// clang-format on
+
+#include <string>
+
+#include "check.h"
+
+namespace benchmark {
+
+// A wrapper around the POSIX regular expression API that provides automatic
+// cleanup
+class Regex {
+ public:
+  Regex() : init_(false) {}
+
+  ~Regex();
+
+  // Compile a regular expression matcher from spec.  Returns true on success.
+  //
+  // On failure (and if error is not nullptr), error is populated with a human
+  // readable error message if an error occurs.
+  bool Init(const std::string& spec, std::string* error);
+
+  // Returns whether str matches the compiled regular expression.
+  bool Match(const std::string& str);
+
+ private:
+  bool init_;
+// Underlying regular expression object
+#if defined(HAVE_STD_REGEX)
+  std::regex re_;
+#elif defined(HAVE_POSIX_REGEX) || defined(HAVE_GNU_POSIX_REGEX)
+  regex_t re_;
+#else
+#error No regular expression backend implementation available
+#endif
+};
+
+#if defined(HAVE_STD_REGEX)
+
+inline bool Regex::Init(const std::string& spec, std::string* error) {
+#ifdef BENCHMARK_HAS_NO_EXCEPTIONS
+  ((void)error);  // suppress unused warning
+#else
+  try {
+#endif
+  re_ = std::regex(spec, std::regex_constants::extended);
+  init_ = true;
+#ifndef BENCHMARK_HAS_NO_EXCEPTIONS
+}
+catch (const std::regex_error& e) {
+  if (error) {
+    *error = e.what();
+  }
+}
+#endif
+return init_;
+}
+
+inline Regex::~Regex() {}
+
+inline bool Regex::Match(const std::string& str) {
+  if (!init_) {
+    return false;
+  }
+  return std::regex_search(str, re_);
+}
+
+#else
+inline bool Regex::Init(const std::string& spec, std::string* error) {
+  int ec = regcomp(&re_, spec.c_str(), REG_EXTENDED | REG_NOSUB);
+  if (ec != 0) {
+    if (error) {
+      size_t needed = regerror(ec, &re_, nullptr, 0);
+      char* errbuf = new char[needed];
+      regerror(ec, &re_, errbuf, needed);
+
+      // regerror returns the number of bytes necessary to null terminate
+      // the string, so we move that when assigning to error.
+      CHECK_NE(needed, 0);
+      error->assign(errbuf, needed - 1);
+
+      delete[] errbuf;
+    }
+
+    return false;
+  }
+
+  init_ = true;
+  return true;
+}
+
+inline Regex::~Regex() {
+  if (init_) {
+    regfree(&re_);
+  }
+}
+
+inline bool Regex::Match(const std::string& str) {
+  if (!init_) {
+    return false;
+  }
+  return regexec(&re_, str.c_str(), 0, nullptr, 0) == 0;
+}
+#endif
+
+}  // end namespace benchmark
+
+#endif  // BENCHMARK_RE_H_
diff --git a/src/third_party/google_benchmark/src/reporter.cc b/src/third_party/google_benchmark/src/reporter.cc
new file mode 100644
index 0000000..cca6a11
--- /dev/null
+++ b/src/third_party/google_benchmark/src/reporter.cc
@@ -0,0 +1,140 @@
+// Copyright 2015 Google Inc. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "benchmark/benchmark.h"
+#include "timers.h"
+
+#include <cstdlib>
+
+#include <iostream>
+#include <tuple>
+#include <vector>
+
+#include "check.h"
+#include "starboard/log.h"
+#include "string_util.h"
+
+namespace benchmark {
+namespace {
+
+class sblog_ostreambuf : public std::streambuf {
+ public:
+  explicit sblog_ostreambuf(SbLogPriority priority) : priority_(priority) {}
+
+  std::streamsize xsputn(const char_type *s, std::streamsize n) override {
+    buffer_.insert(buffer_.end(), s, s + n);
+    if (buffer_.back() == '\n') {
+      SbLog(priority_, buffer_.c_str());
+      buffer_.clear();
+    }
+    return n;
+  }
+
+ private:
+  SbLogPriority priority_;
+  std::string buffer_;
+};
+
+std::ostream *GetOutputStream() {
+  static sblog_ostreambuf streambuf(kSbLogPriorityInfo);
+  static std::ostream os(&streambuf);
+  return &os;
+}
+
+std::ostream *GetErrorStream() {
+  static sblog_ostreambuf streambuf(kSbLogPriorityError);
+  static std::ostream os(&streambuf);
+  return &os;
+}
+
+}  // namespace
+
+BenchmarkReporter::BenchmarkReporter()
+    : output_stream_(benchmark::GetOutputStream()),
+      error_stream_(benchmark::GetErrorStream()) {}
+
+BenchmarkReporter::~BenchmarkReporter() {}
+
+void BenchmarkReporter::PrintBasicContext(std::ostream *out,
+                                          Context const &context) {
+  CHECK(out) << "cannot be null";
+  auto &Out = *out;
+
+  Out << LocalDateTimeString() << "\n";
+
+  if (context.executable_name)
+    Out << "Running " << context.executable_name << "\n";
+
+  const CPUInfo &info = context.cpu_info;
+  Out << "Run on (" << info.num_cpus << " X "
+      << (info.cycles_per_second / 1000000.0) << " MHz CPU "
+      << ((info.num_cpus > 1) ? "s" : "") << ")\n";
+  if (info.caches.size() != 0) {
+    Out << "CPU Caches:\n";
+    for (auto &CInfo : info.caches) {
+      Out << "  L" << CInfo.level << " " << CInfo.type << " "
+          << (CInfo.size / 1024) << " KiB";
+      if (CInfo.num_sharing != 0)
+        Out << " (x" << (info.num_cpus / CInfo.num_sharing) << ")";
+      Out << "\n";
+    }
+  }
+  if (!info.load_avg.empty()) {
+    Out << "Load Average: ";
+    for (auto It = info.load_avg.begin(); It != info.load_avg.end();) {
+      Out << StrFormat("%.2f", *It++);
+      if (It != info.load_avg.end()) Out << ", ";
+    }
+    Out << "\n";
+  }
+
+  if (info.scaling_enabled) {
+    Out << "***WARNING*** CPU scaling is enabled, the benchmark "
+           "real time measurements may be noisy and will incur extra "
+           "overhead.\n";
+  }
+
+#ifndef NDEBUG
+  Out << "***WARNING*** Library was built as DEBUG. Timings may be "
+         "affected.\n";
+#endif
+}
+
+// No initializer because it's already initialized to NULL.
+const char *BenchmarkReporter::Context::executable_name;
+
+BenchmarkReporter::Context::Context()
+    : cpu_info(CPUInfo::Get()), sys_info(SystemInfo::Get()) {}
+
+std::string BenchmarkReporter::Run::benchmark_name() const {
+  std::string name = run_name.str();
+  if (run_type == RT_Aggregate) {
+    name += "_" + aggregate_name;
+  }
+  return name;
+}
+
+double BenchmarkReporter::Run::GetAdjustedRealTime() const {
+  double new_time = real_accumulated_time * GetTimeUnitMultiplier(time_unit);
+  if (iterations != 0) new_time /= static_cast<double>(iterations);
+  return new_time;
+}
+
+double BenchmarkReporter::Run::GetAdjustedCPUTime() const {
+  double new_time = cpu_accumulated_time * GetTimeUnitMultiplier(time_unit);
+  if (iterations != 0) new_time /= static_cast<double>(iterations);
+  return new_time;
+}
+
+}  // end namespace benchmark
diff --git a/src/third_party/google_benchmark/src/sleep.cc b/src/third_party/google_benchmark/src/sleep.cc
new file mode 100644
index 0000000..1512ac9
--- /dev/null
+++ b/src/third_party/google_benchmark/src/sleep.cc
@@ -0,0 +1,51 @@
+// Copyright 2015 Google Inc. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "sleep.h"
+
+#include <cerrno>
+#include <cstdlib>
+#include <ctime>
+
+#include "internal_macros.h"
+
+#ifdef BENCHMARK_OS_WINDOWS
+#include <windows.h>
+#endif
+
+namespace benchmark {
+#ifdef BENCHMARK_OS_WINDOWS
+// Window's Sleep takes milliseconds argument.
+void SleepForMilliseconds(int milliseconds) { Sleep(milliseconds); }
+void SleepForSeconds(double seconds) {
+  SleepForMilliseconds(static_cast<int>(kNumMillisPerSecond * seconds));
+}
+#else   // BENCHMARK_OS_WINDOWS
+void SleepForMicroseconds(int microseconds) {
+  struct timespec sleep_time;
+  sleep_time.tv_sec = microseconds / kNumMicrosPerSecond;
+  sleep_time.tv_nsec = (microseconds % kNumMicrosPerSecond) * kNumNanosPerMicro;
+  while (nanosleep(&sleep_time, &sleep_time) != 0 && errno == EINTR)
+    ;  // Ignore signals and wait for the full interval to elapse.
+}
+
+void SleepForMilliseconds(int milliseconds) {
+  SleepForMicroseconds(milliseconds * kNumMicrosPerMilli);
+}
+
+void SleepForSeconds(double seconds) {
+  SleepForMicroseconds(static_cast<int>(seconds * kNumMicrosPerSecond));
+}
+#endif  // BENCHMARK_OS_WINDOWS
+}  // end namespace benchmark
diff --git a/src/third_party/google_benchmark/src/sleep.h b/src/third_party/google_benchmark/src/sleep.h
new file mode 100644
index 0000000..f98551a
--- /dev/null
+++ b/src/third_party/google_benchmark/src/sleep.h
@@ -0,0 +1,15 @@
+#ifndef BENCHMARK_SLEEP_H_
+#define BENCHMARK_SLEEP_H_
+
+namespace benchmark {
+const int kNumMillisPerSecond = 1000;
+const int kNumMicrosPerMilli = 1000;
+const int kNumMicrosPerSecond = kNumMillisPerSecond * 1000;
+const int kNumNanosPerMicro = 1000;
+const int kNumNanosPerSecond = kNumNanosPerMicro * kNumMicrosPerSecond;
+
+void SleepForMilliseconds(int milliseconds);
+void SleepForSeconds(double seconds);
+}  // end namespace benchmark
+
+#endif  // BENCHMARK_SLEEP_H_
diff --git a/src/third_party/google_benchmark/src/statistics.cc b/src/third_party/google_benchmark/src/statistics.cc
new file mode 100644
index 0000000..bd5a3d6
--- /dev/null
+++ b/src/third_party/google_benchmark/src/statistics.cc
@@ -0,0 +1,193 @@
+// Copyright 2016 Ismael Jimenez Martinez. All rights reserved.
+// Copyright 2017 Roman Lebedev. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "benchmark/benchmark.h"
+
+#include <algorithm>
+#include <cmath>
+#include <numeric>
+#include <string>
+#include <vector>
+#include "check.h"
+#include "statistics.h"
+
+namespace benchmark {
+
+auto StatisticsSum = [](const std::vector<double>& v) {
+  return std::accumulate(v.begin(), v.end(), 0.0);
+};
+
+double StatisticsMean(const std::vector<double>& v) {
+  if (v.empty()) return 0.0;
+  return StatisticsSum(v) * (1.0 / v.size());
+}
+
+double StatisticsMedian(const std::vector<double>& v) {
+  if (v.size() < 3) return StatisticsMean(v);
+  std::vector<double> copy(v);
+
+  auto center = copy.begin() + v.size() / 2;
+  std::nth_element(copy.begin(), center, copy.end());
+
+  // did we have an odd number of samples?
+  // if yes, then center is the median
+  // it no, then we are looking for the average between center and the value
+  // before
+  if (v.size() % 2 == 1) return *center;
+  auto center2 = copy.begin() + v.size() / 2 - 1;
+  std::nth_element(copy.begin(), center2, copy.end());
+  return (*center + *center2) / 2.0;
+}
+
+// Return the sum of the squares of this sample set
+auto SumSquares = [](const std::vector<double>& v) {
+  return std::inner_product(v.begin(), v.end(), v.begin(), 0.0);
+};
+
+auto Sqr = [](const double dat) { return dat * dat; };
+auto Sqrt = [](const double dat) {
+  // Avoid NaN due to imprecision in the calculations
+  if (dat < 0.0) return 0.0;
+  return std::sqrt(dat);
+};
+
+double StatisticsStdDev(const std::vector<double>& v) {
+  const auto mean = StatisticsMean(v);
+  if (v.empty()) return mean;
+
+  // Sample standard deviation is undefined for n = 1
+  if (v.size() == 1) return 0.0;
+
+  const double avg_squares = SumSquares(v) * (1.0 / v.size());
+  return Sqrt(v.size() / (v.size() - 1.0) * (avg_squares - Sqr(mean)));
+}
+
+std::vector<BenchmarkReporter::Run> ComputeStats(
+    const std::vector<BenchmarkReporter::Run>& reports) {
+  typedef BenchmarkReporter::Run Run;
+  std::vector<Run> results;
+
+  auto error_count =
+      std::count_if(reports.begin(), reports.end(),
+                    [](Run const& run) { return run.error_occurred; });
+
+  if (reports.size() - error_count < 2) {
+    // We don't report aggregated data if there was a single run.
+    return results;
+  }
+
+  // Accumulators.
+  std::vector<double> real_accumulated_time_stat;
+  std::vector<double> cpu_accumulated_time_stat;
+
+  real_accumulated_time_stat.reserve(reports.size());
+  cpu_accumulated_time_stat.reserve(reports.size());
+
+  // All repetitions should be run with the same number of iterations so we
+  // can take this information from the first benchmark.
+  const IterationCount run_iterations = reports.front().iterations;
+  // create stats for user counters
+  struct CounterStat {
+    Counter c;
+    std::vector<double> s;
+  };
+  std::map<std::string, CounterStat> counter_stats;
+  for (Run const& r : reports) {
+    for (auto const& cnt : r.counters) {
+      auto it = counter_stats.find(cnt.first);
+      if (it == counter_stats.end()) {
+        counter_stats.insert({cnt.first, {cnt.second, std::vector<double>{}}});
+        it = counter_stats.find(cnt.first);
+        it->second.s.reserve(reports.size());
+      } else {
+        CHECK_EQ(counter_stats[cnt.first].c.flags, cnt.second.flags);
+      }
+    }
+  }
+
+  // Populate the accumulators.
+  for (Run const& run : reports) {
+    CHECK_EQ(reports[0].benchmark_name(), run.benchmark_name());
+    CHECK_EQ(run_iterations, run.iterations);
+    if (run.error_occurred) continue;
+    real_accumulated_time_stat.emplace_back(run.real_accumulated_time);
+    cpu_accumulated_time_stat.emplace_back(run.cpu_accumulated_time);
+    // user counters
+    for (auto const& cnt : run.counters) {
+      auto it = counter_stats.find(cnt.first);
+      CHECK_NE(it, counter_stats.end());
+      it->second.s.emplace_back(cnt.second);
+    }
+  }
+
+  // Only add label if it is same for all runs
+  std::string report_label = reports[0].report_label;
+  for (std::size_t i = 1; i < reports.size(); i++) {
+    if (reports[i].report_label != report_label) {
+      report_label = "";
+      break;
+    }
+  }
+
+  const double iteration_rescale_factor =
+      double(reports.size()) / double(run_iterations);
+
+  for (const auto& Stat : *reports[0].statistics) {
+    // Get the data from the accumulator to BenchmarkReporter::Run's.
+    Run data;
+    data.run_name = reports[0].run_name;
+    data.run_type = BenchmarkReporter::Run::RT_Aggregate;
+    data.threads = reports[0].threads;
+    data.repetitions = reports[0].repetitions;
+    data.repetition_index = Run::no_repetition_index;
+    data.aggregate_name = Stat.name_;
+    data.report_label = report_label;
+
+    // It is incorrect to say that an aggregate is computed over
+    // run's iterations, because those iterations already got averaged.
+    // Similarly, if there are N repetitions with 1 iterations each,
+    // an aggregate will be computed over N measurements, not 1.
+    // Thus it is best to simply use the count of separate reports.
+    data.iterations = reports.size();
+
+    data.real_accumulated_time = Stat.compute_(real_accumulated_time_stat);
+    data.cpu_accumulated_time = Stat.compute_(cpu_accumulated_time_stat);
+
+    // We will divide these times by data.iterations when reporting, but the
+    // data.iterations is not nessesairly the scale of these measurements,
+    // because in each repetition, these timers are sum over all the iterations.
+    // And if we want to say that the stats are over N repetitions and not
+    // M iterations, we need to multiply these by (N/M).
+    data.real_accumulated_time *= iteration_rescale_factor;
+    data.cpu_accumulated_time *= iteration_rescale_factor;
+
+    data.time_unit = reports[0].time_unit;
+
+    // user counters
+    for (auto const& kv : counter_stats) {
+      // Do *NOT* rescale the custom counters. They are already properly scaled.
+      const auto uc_stat = Stat.compute_(kv.second.s);
+      auto c = Counter(uc_stat, counter_stats[kv.first].c.flags,
+                       counter_stats[kv.first].c.oneK);
+      data.counters[kv.first] = c;
+    }
+
+    results.push_back(data);
+  }
+
+  return results;
+}
+
+}  // end namespace benchmark
diff --git a/src/third_party/google_benchmark/src/statistics.h b/src/third_party/google_benchmark/src/statistics.h
new file mode 100644
index 0000000..7eccc85
--- /dev/null
+++ b/src/third_party/google_benchmark/src/statistics.h
@@ -0,0 +1,37 @@
+// Copyright 2016 Ismael Jimenez Martinez. All rights reserved.
+// Copyright 2017 Roman Lebedev. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#ifndef STATISTICS_H_
+#define STATISTICS_H_
+
+#include <vector>
+
+#include "benchmark/benchmark.h"
+
+namespace benchmark {
+
+// Return a vector containing the mean, median and standard devation information
+// (and any user-specified info) for the specified list of reports. If 'reports'
+// contains less than two non-errored runs an empty vector is returned
+std::vector<BenchmarkReporter::Run> ComputeStats(
+    const std::vector<BenchmarkReporter::Run>& reports);
+
+double StatisticsMean(const std::vector<double>& v);
+double StatisticsMedian(const std::vector<double>& v);
+double StatisticsStdDev(const std::vector<double>& v);
+
+}  // end namespace benchmark
+
+#endif  // STATISTICS_H_
diff --git a/src/third_party/google_benchmark/src/string_util.cc b/src/third_party/google_benchmark/src/string_util.cc
new file mode 100644
index 0000000..39b01a1
--- /dev/null
+++ b/src/third_party/google_benchmark/src/string_util.cc
@@ -0,0 +1,252 @@
+#include "string_util.h"
+
+#include <array>
+#include <cmath>
+#include <cstdarg>
+#include <cstdio>
+#include <memory>
+#include <sstream>
+
+#include "arraysize.h"
+
+namespace benchmark {
+namespace {
+
+// kilo, Mega, Giga, Tera, Peta, Exa, Zetta, Yotta.
+const char kBigSIUnits[] = "kMGTPEZY";
+// Kibi, Mebi, Gibi, Tebi, Pebi, Exbi, Zebi, Yobi.
+const char kBigIECUnits[] = "KMGTPEZY";
+// milli, micro, nano, pico, femto, atto, zepto, yocto.
+const char kSmallSIUnits[] = "munpfazy";
+
+// We require that all three arrays have the same size.
+static_assert(arraysize(kBigSIUnits) == arraysize(kBigIECUnits),
+              "SI and IEC unit arrays must be the same size");
+static_assert(arraysize(kSmallSIUnits) == arraysize(kBigSIUnits),
+              "Small SI and Big SI unit arrays must be the same size");
+
+static const int64_t kUnitsSize = arraysize(kBigSIUnits);
+
+void ToExponentAndMantissa(double val, double thresh, int precision,
+                           double one_k, std::string* mantissa,
+                           int64_t* exponent) {
+  std::stringstream mantissa_stream;
+
+  if (val < 0) {
+    mantissa_stream << "-";
+    val = -val;
+  }
+
+  // Adjust threshold so that it never excludes things which can't be rendered
+  // in 'precision' digits.
+  const double adjusted_threshold =
+      std::max(thresh, 1.0 / std::pow(10.0, precision));
+  const double big_threshold = adjusted_threshold * one_k;
+  const double small_threshold = adjusted_threshold;
+  // Values in ]simple_threshold,small_threshold[ will be printed as-is
+  const double simple_threshold = 0.01;
+
+  if (val > big_threshold) {
+    // Positive powers
+    double scaled = val;
+    for (size_t i = 0; i < arraysize(kBigSIUnits); ++i) {
+      scaled /= one_k;
+      if (scaled <= big_threshold) {
+        mantissa_stream << scaled;
+        *exponent = i + 1;
+        *mantissa = mantissa_stream.str();
+        return;
+      }
+    }
+    mantissa_stream << val;
+    *exponent = 0;
+  } else if (val < small_threshold) {
+    // Negative powers
+    if (val < simple_threshold) {
+      double scaled = val;
+      for (size_t i = 0; i < arraysize(kSmallSIUnits); ++i) {
+        scaled *= one_k;
+        if (scaled >= small_threshold) {
+          mantissa_stream << scaled;
+          *exponent = -static_cast<int64_t>(i + 1);
+          *mantissa = mantissa_stream.str();
+          return;
+        }
+      }
+    }
+    mantissa_stream << val;
+    *exponent = 0;
+  } else {
+    mantissa_stream << val;
+    *exponent = 0;
+  }
+  *mantissa = mantissa_stream.str();
+}
+
+std::string ExponentToPrefix(int64_t exponent, bool iec) {
+  if (exponent == 0) return "";
+
+  const int64_t index = (exponent > 0 ? exponent - 1 : -exponent - 1);
+  if (index >= kUnitsSize) return "";
+
+  const char* array =
+      (exponent > 0 ? (iec ? kBigIECUnits : kBigSIUnits) : kSmallSIUnits);
+  if (iec)
+    return array[index] + std::string("i");
+  else
+    return std::string(1, array[index]);
+}
+
+std::string ToBinaryStringFullySpecified(double value, double threshold,
+                                         int precision, double one_k = 1024.0) {
+  std::string mantissa;
+  int64_t exponent;
+  ToExponentAndMantissa(value, threshold, precision, one_k, &mantissa,
+                        &exponent);
+  return mantissa + ExponentToPrefix(exponent, false);
+}
+
+}  // end namespace
+
+void AppendHumanReadable(int n, std::string* str) {
+  std::stringstream ss;
+  // Round down to the nearest SI prefix.
+  ss << ToBinaryStringFullySpecified(n, 1.0, 0);
+  *str += ss.str();
+}
+
+std::string HumanReadableNumber(double n, double one_k) {
+  // 1.1 means that figures up to 1.1k should be shown with the next unit down;
+  // this softens edge effects.
+  // 1 means that we should show one decimal place of precision.
+  return ToBinaryStringFullySpecified(n, 1.1, 1, one_k);
+}
+
+std::string StrFormatImp(const char* msg, va_list args) {
+  // we might need a second shot at this, so pre-emptivly make a copy
+  va_list args_cp;
+  va_copy(args_cp, args);
+
+  // TODO(ericwf): use std::array for first attempt to avoid one memory
+  // allocation guess what the size might be
+  std::array<char, 256> local_buff;
+  std::size_t size = local_buff.size();
+  // 2015-10-08: vsnprintf is used instead of snd::vsnprintf due to a limitation
+  // in the android-ndk
+  auto ret = vsnprintf(local_buff.data(), size, msg, args_cp);
+
+  va_end(args_cp);
+
+  // handle empty expansion
+  if (ret == 0) return std::string{};
+  if (static_cast<std::size_t>(ret) < size)
+    return std::string(local_buff.data());
+
+  // we did not provide a long enough buffer on our first attempt.
+  // add 1 to size to account for null-byte in size cast to prevent overflow
+  size = static_cast<std::size_t>(ret) + 1;
+  auto buff_ptr = std::unique_ptr<char[]>(new char[size]);
+  // 2015-10-08: vsnprintf is used instead of snd::vsnprintf due to a limitation
+  // in the android-ndk
+  ret = vsnprintf(buff_ptr.get(), size, msg, args);
+  return std::string(buff_ptr.get());
+}
+
+std::string StrFormat(const char* format, ...) {
+  va_list args;
+  va_start(args, format);
+  std::string tmp = StrFormatImp(format, args);
+  va_end(args);
+  return tmp;
+}
+
+#ifdef BENCHMARK_STL_ANDROID_GNUSTL
+/*
+ * GNU STL in Android NDK lacks support for some C++11 functions, including
+ * stoul, stoi, stod. We reimplement them here using C functions strtoul,
+ * strtol, strtod. Note that reimplemented functions are in benchmark::
+ * namespace, not std:: namespace.
+ */
+unsigned long stoul(const std::string& str, size_t* pos, int base) {
+  /* Record previous errno */
+  const int oldErrno = errno;
+  errno = 0;
+
+  const char* strStart = str.c_str();
+  char* strEnd = const_cast<char*>(strStart);
+  const unsigned long result = strtoul(strStart, &strEnd, base);
+
+  const int strtoulErrno = errno;
+  /* Restore previous errno */
+  errno = oldErrno;
+
+  /* Check for errors and return */
+  if (strtoulErrno == ERANGE) {
+    throw std::out_of_range(
+      "stoul failed: " + str + " is outside of range of unsigned long");
+  } else if (strEnd == strStart || strtoulErrno != 0) {
+    throw std::invalid_argument(
+      "stoul failed: " + str + " is not an integer");
+  }
+  if (pos != nullptr) {
+    *pos = static_cast<size_t>(strEnd - strStart);
+  }
+  return result;
+}
+
+int stoi(const std::string& str, size_t* pos, int base) {
+  /* Record previous errno */
+  const int oldErrno = errno;
+  errno = 0;
+
+  const char* strStart = str.c_str();
+  char* strEnd = const_cast<char*>(strStart);
+  const long result = strtol(strStart, &strEnd, base);
+
+  const int strtolErrno = errno;
+  /* Restore previous errno */
+  errno = oldErrno;
+
+  /* Check for errors and return */
+  if (strtolErrno == ERANGE || long(int(result)) != result) {
+    throw std::out_of_range(
+      "stoul failed: " + str + " is outside of range of int");
+  } else if (strEnd == strStart || strtolErrno != 0) {
+    throw std::invalid_argument(
+      "stoul failed: " + str + " is not an integer");
+  }
+  if (pos != nullptr) {
+    *pos = static_cast<size_t>(strEnd - strStart);
+  }
+  return int(result);
+}
+
+double stod(const std::string& str, size_t* pos) {
+  /* Record previous errno */
+  const int oldErrno = errno;
+  errno = 0;
+
+  const char* strStart = str.c_str();
+  char* strEnd = const_cast<char*>(strStart);
+  const double result = strtod(strStart, &strEnd);
+
+  /* Restore previous errno */
+  const int strtodErrno = errno;
+  errno = oldErrno;
+
+  /* Check for errors and return */
+  if (strtodErrno == ERANGE) {
+    throw std::out_of_range(
+      "stoul failed: " + str + " is outside of range of int");
+  } else if (strEnd == strStart || strtodErrno != 0) {
+    throw std::invalid_argument(
+      "stoul failed: " + str + " is not an integer");
+  }
+  if (pos != nullptr) {
+    *pos = static_cast<size_t>(strEnd - strStart);
+  }
+  return result;
+}
+#endif
+
+}  // end namespace benchmark
diff --git a/src/third_party/google_benchmark/src/string_util.h b/src/third_party/google_benchmark/src/string_util.h
new file mode 100644
index 0000000..09d7b4b
--- /dev/null
+++ b/src/third_party/google_benchmark/src/string_util.h
@@ -0,0 +1,59 @@
+#ifndef BENCHMARK_STRING_UTIL_H_
+#define BENCHMARK_STRING_UTIL_H_
+
+#include <sstream>
+#include <string>
+#include <utility>
+#include "internal_macros.h"
+
+namespace benchmark {
+
+void AppendHumanReadable(int n, std::string* str);
+
+std::string HumanReadableNumber(double n, double one_k = 1024.0);
+
+#if defined(__MINGW32__)
+__attribute__((format(__MINGW_PRINTF_FORMAT, 1, 2)))
+#elif defined(__GNUC__)
+__attribute__((format(printf, 1, 2)))
+#endif
+std::string
+StrFormat(const char* format, ...);
+
+inline std::ostream& StrCatImp(std::ostream& out) BENCHMARK_NOEXCEPT {
+  return out;
+}
+
+template <class First, class... Rest>
+inline std::ostream& StrCatImp(std::ostream& out, First&& f, Rest&&... rest) {
+  out << std::forward<First>(f);
+  return StrCatImp(out, std::forward<Rest>(rest)...);
+}
+
+template <class... Args>
+inline std::string StrCat(Args&&... args) {
+  std::ostringstream ss;
+  StrCatImp(ss, std::forward<Args>(args)...);
+  return ss.str();
+}
+
+#ifdef BENCHMARK_STL_ANDROID_GNUSTL
+/*
+ * GNU STL in Android NDK lacks support for some C++11 functions, including
+ * stoul, stoi, stod. We reimplement them here using C functions strtoul,
+ * strtol, strtod. Note that reimplemented functions are in benchmark::
+ * namespace, not std:: namespace.
+ */
+unsigned long stoul(const std::string& str, size_t* pos = nullptr,
+                           int base = 10);
+int stoi(const std::string& str, size_t* pos = nullptr, int base = 10);
+double stod(const std::string& str, size_t* pos = nullptr);
+#else
+using std::stoul;
+using std::stoi;
+using std::stod;
+#endif
+
+}  // end namespace benchmark
+
+#endif  // BENCHMARK_STRING_UTIL_H_
diff --git a/src/third_party/google_benchmark/src/sysinfo.cc b/src/third_party/google_benchmark/src/sysinfo.cc
new file mode 100644
index 0000000..5b7c4af
--- /dev/null
+++ b/src/third_party/google_benchmark/src/sysinfo.cc
@@ -0,0 +1,708 @@
+// Copyright 2015 Google Inc. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "internal_macros.h"
+
+#ifdef BENCHMARK_OS_WINDOWS
+#include <shlwapi.h>
+#undef StrCat  // Don't let StrCat in string_util.h be renamed to lstrcatA
+#include <versionhelpers.h>
+#include <windows.h>
+#include <codecvt>
+#else
+#include <fcntl.h>
+#ifndef BENCHMARK_OS_FUCHSIA
+#include <sys/resource.h>
+#endif
+#include <sys/time.h>
+#include <sys/types.h>  // this header must be included before 'sys/sysctl.h' to avoid compilation error on FreeBSD
+#include <unistd.h>
+#if defined BENCHMARK_OS_FREEBSD || defined BENCHMARK_OS_MACOSX || \
+    defined BENCHMARK_OS_NETBSD || defined BENCHMARK_OS_OPENBSD
+#define BENCHMARK_HAS_SYSCTL
+#include <sys/sysctl.h>
+#endif
+#endif
+#if defined(BENCHMARK_OS_SOLARIS)
+#include <kstat.h>
+#endif
+#if defined(BENCHMARK_OS_QNX)
+#include <sys/syspage.h>
+#endif
+
+#include <algorithm>
+#include <array>
+#include <bitset>
+#include <cerrno>
+#include <climits>
+#include <cstdint>
+#include <cstdio>
+#include <cstdlib>
+#include <cstring>
+#include <fstream>
+#include <iostream>
+#include <iterator>
+#include <limits>
+#include <memory>
+#include <sstream>
+#include <locale>
+
+#include "check.h"
+#include "cycleclock.h"
+#include "internal_macros.h"
+#include "log.h"
+#include "sleep.h"
+#include "string_util.h"
+
+namespace benchmark {
+namespace {
+
+void PrintImp(std::ostream& out) { out << std::endl; }
+
+template <class First, class... Rest>
+void PrintImp(std::ostream& out, First&& f, Rest&&... rest) {
+  out << std::forward<First>(f);
+  PrintImp(out, std::forward<Rest>(rest)...);
+}
+
+template <class... Args>
+BENCHMARK_NORETURN void PrintErrorAndDie(Args&&... args) {
+  PrintImp(std::cerr, std::forward<Args>(args)...);
+  std::exit(EXIT_FAILURE);
+}
+
+#ifdef BENCHMARK_HAS_SYSCTL
+
+/// ValueUnion - A type used to correctly alias the byte-for-byte output of
+/// `sysctl` with the result type it's to be interpreted as.
+struct ValueUnion {
+  union DataT {
+    uint32_t uint32_value;
+    uint64_t uint64_value;
+    // For correct aliasing of union members from bytes.
+    char bytes[8];
+  };
+  using DataPtr = std::unique_ptr<DataT, decltype(&std::free)>;
+
+  // The size of the data union member + its trailing array size.
+  size_t Size;
+  DataPtr Buff;
+
+ public:
+  ValueUnion() : Size(0), Buff(nullptr, &std::free) {}
+
+  explicit ValueUnion(size_t BuffSize)
+      : Size(sizeof(DataT) + BuffSize),
+        Buff(::new (std::malloc(Size)) DataT(), &std::free) {}
+
+  ValueUnion(ValueUnion&& other) = default;
+
+  explicit operator bool() const { return bool(Buff); }
+
+  char* data() const { return Buff->bytes; }
+
+  std::string GetAsString() const { return std::string(data()); }
+
+  int64_t GetAsInteger() const {
+    if (Size == sizeof(Buff->uint32_value))
+      return static_cast<int32_t>(Buff->uint32_value);
+    else if (Size == sizeof(Buff->uint64_value))
+      return static_cast<int64_t>(Buff->uint64_value);
+    BENCHMARK_UNREACHABLE();
+  }
+
+  uint64_t GetAsUnsigned() const {
+    if (Size == sizeof(Buff->uint32_value))
+      return Buff->uint32_value;
+    else if (Size == sizeof(Buff->uint64_value))
+      return Buff->uint64_value;
+    BENCHMARK_UNREACHABLE();
+  }
+
+  template <class T, int N>
+  std::array<T, N> GetAsArray() {
+    const int ArrSize = sizeof(T) * N;
+    CHECK_LE(ArrSize, Size);
+    std::array<T, N> Arr;
+    std::memcpy(Arr.data(), data(), ArrSize);
+    return Arr;
+  }
+};
+
+ValueUnion GetSysctlImp(std::string const& Name) {
+#if defined BENCHMARK_OS_OPENBSD
+  int mib[2];
+
+  mib[0] = CTL_HW;
+  if ((Name == "hw.ncpu") || (Name == "hw.cpuspeed")){
+    ValueUnion buff(sizeof(int));
+
+    if (Name == "hw.ncpu") {
+      mib[1] = HW_NCPU;
+    } else {
+      mib[1] = HW_CPUSPEED;
+    }
+
+    if (sysctl(mib, 2, buff.data(), &buff.Size, nullptr, 0) == -1) {
+      return ValueUnion();
+    }
+    return buff;
+  }
+  return ValueUnion();
+#else
+  size_t CurBuffSize = 0;
+  if (sysctlbyname(Name.c_str(), nullptr, &CurBuffSize, nullptr, 0) == -1)
+    return ValueUnion();
+
+  ValueUnion buff(CurBuffSize);
+  if (sysctlbyname(Name.c_str(), buff.data(), &buff.Size, nullptr, 0) == 0)
+    return buff;
+  return ValueUnion();
+#endif
+}
+
+BENCHMARK_MAYBE_UNUSED
+bool GetSysctl(std::string const& Name, std::string* Out) {
+  Out->clear();
+  auto Buff = GetSysctlImp(Name);
+  if (!Buff) return false;
+  Out->assign(Buff.data());
+  return true;
+}
+
+template <class Tp,
+          class = typename std::enable_if<std::is_integral<Tp>::value>::type>
+bool GetSysctl(std::string const& Name, Tp* Out) {
+  *Out = 0;
+  auto Buff = GetSysctlImp(Name);
+  if (!Buff) return false;
+  *Out = static_cast<Tp>(Buff.GetAsUnsigned());
+  return true;
+}
+
+template <class Tp, size_t N>
+bool GetSysctl(std::string const& Name, std::array<Tp, N>* Out) {
+  auto Buff = GetSysctlImp(Name);
+  if (!Buff) return false;
+  *Out = Buff.GetAsArray<Tp, N>();
+  return true;
+}
+#endif
+
+template <class ArgT>
+bool ReadFromFile(std::string const& fname, ArgT* arg) {
+  *arg = ArgT();
+  std::ifstream f(fname.c_str());
+  if (!f.is_open()) return false;
+  f >> *arg;
+  return f.good();
+}
+
+bool CpuScalingEnabled(int num_cpus) {
+  // We don't have a valid CPU count, so don't even bother.
+  if (num_cpus <= 0) return false;
+#ifdef BENCHMARK_OS_QNX
+  return false;
+#endif
+#ifndef BENCHMARK_OS_WINDOWS
+  // On Linux, the CPUfreq subsystem exposes CPU information as files on the
+  // local file system. If reading the exported files fails, then we may not be
+  // running on Linux, so we silently ignore all the read errors.
+  std::string res;
+  for (int cpu = 0; cpu < num_cpus; ++cpu) {
+    std::string governor_file =
+        StrCat("/sys/devices/system/cpu/cpu", cpu, "/cpufreq/scaling_governor");
+    if (ReadFromFile(governor_file, &res) && res != "performance") return true;
+  }
+#endif
+  return false;
+}
+
+int CountSetBitsInCPUMap(std::string Val) {
+  auto CountBits = [](std::string Part) {
+    using CPUMask = std::bitset<sizeof(std::uintptr_t) * CHAR_BIT>;
+    Part = "0x" + Part;
+    CPUMask Mask(benchmark::stoul(Part, nullptr, 16));
+    return static_cast<int>(Mask.count());
+  };
+  size_t Pos;
+  int total = 0;
+  while ((Pos = Val.find(',')) != std::string::npos) {
+    total += CountBits(Val.substr(0, Pos));
+    Val = Val.substr(Pos + 1);
+  }
+  if (!Val.empty()) {
+    total += CountBits(Val);
+  }
+  return total;
+}
+
+BENCHMARK_MAYBE_UNUSED
+std::vector<CPUInfo::CacheInfo> GetCacheSizesFromKVFS() {
+  std::vector<CPUInfo::CacheInfo> res;
+  std::string dir = "/sys/devices/system/cpu/cpu0/cache/";
+  int Idx = 0;
+  while (true) {
+    CPUInfo::CacheInfo info;
+    std::string FPath = StrCat(dir, "index", Idx++, "/");
+    std::ifstream f(StrCat(FPath, "size").c_str());
+    if (!f.is_open()) break;
+    std::string suffix;
+    f >> info.size;
+    if (f.fail())
+      PrintErrorAndDie("Failed while reading file '", FPath, "size'");
+    if (f.good()) {
+      f >> suffix;
+      if (f.bad())
+        PrintErrorAndDie(
+            "Invalid cache size format: failed to read size suffix");
+      else if (f && suffix != "K")
+        PrintErrorAndDie("Invalid cache size format: Expected bytes ", suffix);
+      else if (suffix == "K")
+        info.size *= 1024;
+    }
+    if (!ReadFromFile(StrCat(FPath, "type"), &info.type))
+      PrintErrorAndDie("Failed to read from file ", FPath, "type");
+    if (!ReadFromFile(StrCat(FPath, "level"), &info.level))
+      PrintErrorAndDie("Failed to read from file ", FPath, "level");
+    std::string map_str;
+    if (!ReadFromFile(StrCat(FPath, "shared_cpu_map"), &map_str))
+      PrintErrorAndDie("Failed to read from file ", FPath, "shared_cpu_map");
+    info.num_sharing = CountSetBitsInCPUMap(map_str);
+    res.push_back(info);
+  }
+
+  return res;
+}
+
+#ifdef BENCHMARK_OS_MACOSX
+std::vector<CPUInfo::CacheInfo> GetCacheSizesMacOSX() {
+  std::vector<CPUInfo::CacheInfo> res;
+  std::array<uint64_t, 4> CacheCounts{{0, 0, 0, 0}};
+  GetSysctl("hw.cacheconfig", &CacheCounts);
+
+  struct {
+    std::string name;
+    std::string type;
+    int level;
+    uint64_t num_sharing;
+  } Cases[] = {{"hw.l1dcachesize", "Data", 1, CacheCounts[1]},
+               {"hw.l1icachesize", "Instruction", 1, CacheCounts[1]},
+               {"hw.l2cachesize", "Unified", 2, CacheCounts[2]},
+               {"hw.l3cachesize", "Unified", 3, CacheCounts[3]}};
+  for (auto& C : Cases) {
+    int val;
+    if (!GetSysctl(C.name, &val)) continue;
+    CPUInfo::CacheInfo info;
+    info.type = C.type;
+    info.level = C.level;
+    info.size = val;
+    info.num_sharing = static_cast<int>(C.num_sharing);
+    res.push_back(std::move(info));
+  }
+  return res;
+}
+#elif defined(BENCHMARK_OS_WINDOWS)
+std::vector<CPUInfo::CacheInfo> GetCacheSizesWindows() {
+  std::vector<CPUInfo::CacheInfo> res;
+  DWORD buffer_size = 0;
+  using PInfo = SYSTEM_LOGICAL_PROCESSOR_INFORMATION;
+  using CInfo = CACHE_DESCRIPTOR;
+
+  using UPtr = std::unique_ptr<PInfo, decltype(&std::free)>;
+  GetLogicalProcessorInformation(nullptr, &buffer_size);
+  UPtr buff((PInfo*)malloc(buffer_size), &std::free);
+  if (!GetLogicalProcessorInformation(buff.get(), &buffer_size))
+    PrintErrorAndDie("Failed during call to GetLogicalProcessorInformation: ",
+                     GetLastError());
+
+  PInfo* it = buff.get();
+  PInfo* end = buff.get() + (buffer_size / sizeof(PInfo));
+
+  for (; it != end; ++it) {
+    if (it->Relationship != RelationCache) continue;
+    using BitSet = std::bitset<sizeof(ULONG_PTR) * CHAR_BIT>;
+    BitSet B(it->ProcessorMask);
+    // To prevent duplicates, only consider caches where CPU 0 is specified
+    if (!B.test(0)) continue;
+    CInfo* Cache = &it->Cache;
+    CPUInfo::CacheInfo C;
+    C.num_sharing = static_cast<int>(B.count());
+    C.level = Cache->Level;
+    C.size = Cache->Size;
+    switch (Cache->Type) {
+      case CacheUnified:
+        C.type = "Unified";
+        break;
+      case CacheInstruction:
+        C.type = "Instruction";
+        break;
+      case CacheData:
+        C.type = "Data";
+        break;
+      case CacheTrace:
+        C.type = "Trace";
+        break;
+      default:
+        C.type = "Unknown";
+        break;
+    }
+    res.push_back(C);
+  }
+  return res;
+}
+#elif BENCHMARK_OS_QNX
+std::vector<CPUInfo::CacheInfo> GetCacheSizesQNX() {
+  std::vector<CPUInfo::CacheInfo> res;
+  struct cacheattr_entry *cache = SYSPAGE_ENTRY(cacheattr);
+  uint32_t const elsize = SYSPAGE_ELEMENT_SIZE(cacheattr);
+  int num = SYSPAGE_ENTRY_SIZE(cacheattr) / elsize ;
+  for(int i = 0; i < num; ++i ) {
+    CPUInfo::CacheInfo info;
+    switch (cache->flags){
+      case CACHE_FLAG_INSTR :
+        info.type = "Instruction";
+        info.level = 1;
+        break;
+      case CACHE_FLAG_DATA :
+        info.type = "Data";
+        info.level = 1;
+        break;
+      case CACHE_FLAG_UNIFIED :
+        info.type = "Unified";
+        info.level = 2;
+      case CACHE_FLAG_SHARED :
+        info.type = "Shared";
+        info.level = 3;
+      default :
+        continue;
+        break;
+    }
+    info.size = cache->line_size * cache->num_lines;
+    info.num_sharing = 0;
+    res.push_back(std::move(info));
+    cache = SYSPAGE_ARRAY_ADJ_OFFSET(cacheattr, cache, elsize);
+  }
+  return res;
+}
+#endif
+
+std::vector<CPUInfo::CacheInfo> GetCacheSizes() {
+#ifdef BENCHMARK_OS_MACOSX
+  return GetCacheSizesMacOSX();
+#elif defined(BENCHMARK_OS_WINDOWS)
+  return GetCacheSizesWindows();
+#elif defined(BENCHMARK_OS_QNX)
+  return GetCacheSizesQNX();
+#else
+  return GetCacheSizesFromKVFS();
+#endif
+}
+
+std::string GetSystemName() {
+#if defined(BENCHMARK_OS_WINDOWS)
+  std::string str;
+  const unsigned COUNT = MAX_COMPUTERNAME_LENGTH+1;
+  TCHAR  hostname[COUNT] = {'\0'};
+  DWORD DWCOUNT = COUNT;
+  if (!GetComputerName(hostname, &DWCOUNT))
+    return std::string("");
+#ifndef UNICODE
+  str = std::string(hostname, DWCOUNT);
+#else
+  //Using wstring_convert, Is deprecated in C++17
+  using convert_type = std::codecvt_utf8<wchar_t>;
+  std::wstring_convert<convert_type, wchar_t> converter;
+  std::wstring wStr(hostname, DWCOUNT);
+  str = converter.to_bytes(wStr);
+#endif
+  return str;
+#else // defined(BENCHMARK_OS_WINDOWS)
+#ifndef HOST_NAME_MAX
+#ifdef BENCHMARK_HAS_SYSCTL // BSD/Mac Doesnt have HOST_NAME_MAX defined
+#define HOST_NAME_MAX 64
+#elif defined(BENCHMARK_OS_NACL)
+#define HOST_NAME_MAX 64
+#elif defined(BENCHMARK_OS_QNX)
+#define HOST_NAME_MAX 154
+#elif defined(BENCHMARK_OS_RTEMS)
+#define HOST_NAME_MAX 256
+#else
+#warning "HOST_NAME_MAX not defined. using 64"
+#define HOST_NAME_MAX 64
+#endif
+#endif // def HOST_NAME_MAX
+  char hostname[HOST_NAME_MAX];
+  int retVal = gethostname(hostname, HOST_NAME_MAX);
+  if (retVal != 0) return std::string("");
+  return std::string(hostname);
+#endif // Catch-all POSIX block.
+}
+
+int GetNumCPUs() {
+#ifdef BENCHMARK_HAS_SYSCTL
+  int NumCPU = -1;
+  if (GetSysctl("hw.ncpu", &NumCPU)) return NumCPU;
+  fprintf(stderr, "Err: %s\n", strerror(errno));
+  std::exit(EXIT_FAILURE);
+#elif defined(BENCHMARK_OS_WINDOWS)
+  SYSTEM_INFO sysinfo;
+  // Use memset as opposed to = {} to avoid GCC missing initializer false
+  // positives.
+  std::memset(&sysinfo, 0, sizeof(SYSTEM_INFO));
+  GetSystemInfo(&sysinfo);
+  return sysinfo.dwNumberOfProcessors;  // number of logical
+                                        // processors in the current
+                                        // group
+#elif defined(BENCHMARK_OS_SOLARIS)
+  // Returns -1 in case of a failure.
+  int NumCPU = sysconf(_SC_NPROCESSORS_ONLN);
+  if (NumCPU < 0) {
+    fprintf(stderr,
+            "sysconf(_SC_NPROCESSORS_ONLN) failed with error: %s\n",
+            strerror(errno));
+  }
+  return NumCPU;
+#elif defined(BENCHMARK_OS_QNX)
+  return static_cast<int>(_syspage_ptr->num_cpu);
+#else
+  int NumCPUs = 0;
+  int MaxID = -1;
+  std::ifstream f("/proc/cpuinfo");
+  if (!f.is_open()) {
+    std::cerr << "failed to open /proc/cpuinfo\n";
+    return -1;
+  }
+  const std::string Key = "processor";
+  std::string ln;
+  while (std::getline(f, ln)) {
+    if (ln.empty()) continue;
+    size_t SplitIdx = ln.find(':');
+    std::string value;
+#if defined(__s390__)
+    // s390 has another format in /proc/cpuinfo
+    // it needs to be parsed differently
+    if (SplitIdx != std::string::npos) value = ln.substr(Key.size()+1,SplitIdx-Key.size()-1);
+#else
+    if (SplitIdx != std::string::npos) value = ln.substr(SplitIdx + 1);
+#endif
+    if (ln.size() >= Key.size() && ln.compare(0, Key.size(), Key) == 0) {
+      NumCPUs++;
+      if (!value.empty()) {
+        int CurID = benchmark::stoi(value);
+        MaxID = std::max(CurID, MaxID);
+      }
+    }
+  }
+  if (f.bad()) {
+    std::cerr << "Failure reading /proc/cpuinfo\n";
+    return -1;
+  }
+  if (!f.eof()) {
+    std::cerr << "Failed to read to end of /proc/cpuinfo\n";
+    return -1;
+  }
+  f.close();
+
+  if ((MaxID + 1) != NumCPUs) {
+    fprintf(stderr,
+            "CPU ID assignments in /proc/cpuinfo seem messed up."
+            " This is usually caused by a bad BIOS.\n");
+  }
+  return NumCPUs;
+#endif
+  BENCHMARK_UNREACHABLE();
+}
+
+double GetCPUCyclesPerSecond() {
+#if defined BENCHMARK_OS_LINUX || defined BENCHMARK_OS_CYGWIN
+  long freq;
+
+  // If the kernel is exporting the tsc frequency use that. There are issues
+  // where cpuinfo_max_freq cannot be relied on because the BIOS may be
+  // exporintg an invalid p-state (on x86) or p-states may be used to put the
+  // processor in a new mode (turbo mode). Essentially, those frequencies
+  // cannot always be relied upon. The same reasons apply to /proc/cpuinfo as
+  // well.
+  if (ReadFromFile("/sys/devices/system/cpu/cpu0/tsc_freq_khz", &freq)
+      // If CPU scaling is in effect, we want to use the *maximum* frequency,
+      // not whatever CPU speed some random processor happens to be using now.
+      || ReadFromFile("/sys/devices/system/cpu/cpu0/cpufreq/cpuinfo_max_freq",
+                      &freq)) {
+    // The value is in kHz (as the file name suggests).  For example, on a
+    // 2GHz warpstation, the file contains the value "2000000".
+    return freq * 1000.0;
+  }
+
+  const double error_value = -1;
+  double bogo_clock = error_value;
+
+  std::ifstream f("/proc/cpuinfo");
+  if (!f.is_open()) {
+    std::cerr << "failed to open /proc/cpuinfo\n";
+    return error_value;
+  }
+
+  auto startsWithKey = [](std::string const& Value, std::string const& Key) {
+    if (Key.size() > Value.size()) return false;
+    auto Cmp = [&](char X, char Y) {
+      return std::tolower(X) == std::tolower(Y);
+    };
+    return std::equal(Key.begin(), Key.end(), Value.begin(), Cmp);
+  };
+
+  std::string ln;
+  while (std::getline(f, ln)) {
+    if (ln.empty()) continue;
+    size_t SplitIdx = ln.find(':');
+    std::string value;
+    if (SplitIdx != std::string::npos) value = ln.substr(SplitIdx + 1);
+    // When parsing the "cpu MHz" and "bogomips" (fallback) entries, we only
+    // accept positive values. Some environments (virtual machines) report zero,
+    // which would cause infinite looping in WallTime_Init.
+    if (startsWithKey(ln, "cpu MHz")) {
+      if (!value.empty()) {
+        double cycles_per_second = benchmark::stod(value) * 1000000.0;
+        if (cycles_per_second > 0) return cycles_per_second;
+      }
+    } else if (startsWithKey(ln, "bogomips")) {
+      if (!value.empty()) {
+        bogo_clock = benchmark::stod(value) * 1000000.0;
+        if (bogo_clock < 0.0) bogo_clock = error_value;
+      }
+    }
+  }
+  if (f.bad()) {
+    std::cerr << "Failure reading /proc/cpuinfo\n";
+    return error_value;
+  }
+  if (!f.eof()) {
+    std::cerr << "Failed to read to end of /proc/cpuinfo\n";
+    return error_value;
+  }
+  f.close();
+  // If we found the bogomips clock, but nothing better, we'll use it (but
+  // we're not happy about it); otherwise, fallback to the rough estimation
+  // below.
+  if (bogo_clock >= 0.0) return bogo_clock;
+
+#elif defined BENCHMARK_HAS_SYSCTL
+  constexpr auto* FreqStr =
+#if defined(BENCHMARK_OS_FREEBSD) || defined(BENCHMARK_OS_NETBSD)
+      "machdep.tsc_freq";
+#elif defined BENCHMARK_OS_OPENBSD
+      "hw.cpuspeed";
+#else
+      "hw.cpufrequency";
+#endif
+  unsigned long long hz = 0;
+#if defined BENCHMARK_OS_OPENBSD
+  if (GetSysctl(FreqStr, &hz)) return hz * 1000000;
+#else
+  if (GetSysctl(FreqStr, &hz)) return hz;
+#endif
+  fprintf(stderr, "Unable to determine clock rate from sysctl: %s: %s\n",
+          FreqStr, strerror(errno));
+
+#elif defined BENCHMARK_OS_WINDOWS
+  // In NT, read MHz from the registry. If we fail to do so or we're in win9x
+  // then make a crude estimate.
+  DWORD data, data_size = sizeof(data);
+  if (IsWindowsXPOrGreater() &&
+      SUCCEEDED(
+          SHGetValueA(HKEY_LOCAL_MACHINE,
+                      "HARDWARE\\DESCRIPTION\\System\\CentralProcessor\\0",
+                      "~MHz", nullptr, &data, &data_size)))
+    return static_cast<double>((int64_t)data *
+                               (int64_t)(1000 * 1000));  // was mhz
+#elif defined (BENCHMARK_OS_SOLARIS)
+  kstat_ctl_t *kc = kstat_open();
+  if (!kc) {
+    std::cerr << "failed to open /dev/kstat\n";
+    return -1;
+  }
+  kstat_t *ksp = kstat_lookup(kc, (char*)"cpu_info", -1, (char*)"cpu_info0");
+  if (!ksp) {
+    std::cerr << "failed to lookup in /dev/kstat\n";
+    return -1;
+  }
+  if (kstat_read(kc, ksp, NULL) < 0) {
+    std::cerr << "failed to read from /dev/kstat\n";
+    return -1;
+  }
+  kstat_named_t *knp =
+      (kstat_named_t*)kstat_data_lookup(ksp, (char*)"current_clock_Hz");
+  if (!knp) {
+    std::cerr << "failed to lookup data in /dev/kstat\n";
+    return -1;
+  }
+  if (knp->data_type != KSTAT_DATA_UINT64) {
+    std::cerr << "current_clock_Hz is of unexpected data type: "
+              << knp->data_type << "\n";
+    return -1;
+  }
+  double clock_hz = knp->value.ui64;
+  kstat_close(kc);
+  return clock_hz;
+#elif defined (BENCHMARK_OS_QNX)
+  return static_cast<double>((int64_t)(SYSPAGE_ENTRY(cpuinfo)->speed) *
+                             (int64_t)(1000 * 1000));
+#endif
+  // If we've fallen through, attempt to roughly estimate the CPU clock rate.
+  const int estimate_time_ms = 1000;
+  const auto start_ticks = cycleclock::Now();
+  SleepForMilliseconds(estimate_time_ms);
+  return static_cast<double>(cycleclock::Now() - start_ticks);
+}
+
+std::vector<double> GetLoadAvg() {
+#if (defined BENCHMARK_OS_FREEBSD || defined(BENCHMARK_OS_LINUX) || \
+    defined BENCHMARK_OS_MACOSX || defined BENCHMARK_OS_NETBSD ||  \
+    defined BENCHMARK_OS_OPENBSD) && !defined(__ANDROID__)
+  constexpr int kMaxSamples = 3;
+  std::vector<double> res(kMaxSamples, 0.0);
+  const int nelem = getloadavg(res.data(), kMaxSamples);
+  if (nelem < 1) {
+    res.clear();
+  } else {
+    res.resize(nelem);
+  }
+  return res;
+#else
+  return {};
+#endif
+}
+
+}  // end namespace
+
+const CPUInfo& CPUInfo::Get() {
+  static const CPUInfo* info = new CPUInfo();
+  return *info;
+}
+
+CPUInfo::CPUInfo()
+    : num_cpus(GetNumCPUs()),
+      cycles_per_second(GetCPUCyclesPerSecond()),
+      caches(GetCacheSizes()),
+      scaling_enabled(CpuScalingEnabled(num_cpus)),
+      load_avg(GetLoadAvg()) {}
+
+
+const SystemInfo& SystemInfo::Get() {
+  static const SystemInfo* info = new SystemInfo();
+  return *info;
+}
+
+SystemInfo::SystemInfo() : name(GetSystemName()) {}
+}  // end namespace benchmark
diff --git a/src/third_party/google_benchmark/src/thread_manager.h b/src/third_party/google_benchmark/src/thread_manager.h
new file mode 100644
index 0000000..28e2dd5
--- /dev/null
+++ b/src/third_party/google_benchmark/src/thread_manager.h
@@ -0,0 +1,64 @@
+#ifndef BENCHMARK_THREAD_MANAGER_H
+#define BENCHMARK_THREAD_MANAGER_H
+
+#include <atomic>
+
+#include "benchmark/benchmark.h"
+#include "mutex.h"
+
+namespace benchmark {
+namespace internal {
+
+class ThreadManager {
+ public:
+  explicit ThreadManager(int num_threads)
+      : alive_threads_(num_threads), start_stop_barrier_(num_threads) {}
+
+  Mutex& GetBenchmarkMutex() const RETURN_CAPABILITY(benchmark_mutex_) {
+    return benchmark_mutex_;
+  }
+
+  bool StartStopBarrier() EXCLUDES(end_cond_mutex_) {
+    return start_stop_barrier_.wait();
+  }
+
+  void NotifyThreadComplete() EXCLUDES(end_cond_mutex_) {
+    start_stop_barrier_.removeThread();
+    if (--alive_threads_ == 0) {
+      MutexLock lock(end_cond_mutex_);
+      end_condition_.notify_all();
+    }
+  }
+
+  void WaitForAllThreads() EXCLUDES(end_cond_mutex_) {
+    MutexLock lock(end_cond_mutex_);
+    end_condition_.wait(lock.native_handle(),
+                        [this]() { return alive_threads_ == 0; });
+  }
+
+ public:
+  struct Result {
+    IterationCount iterations = 0;
+    double real_time_used = 0;
+    double cpu_time_used = 0;
+    double manual_time_used = 0;
+    int64_t complexity_n = 0;
+    std::string report_label_;
+    std::string error_message_;
+    bool has_error_ = false;
+    UserCounters counters;
+  };
+  GUARDED_BY(GetBenchmarkMutex()) Result results;
+
+ private:
+  mutable Mutex benchmark_mutex_;
+  std::atomic<int> alive_threads_;
+  Barrier start_stop_barrier_;
+  Mutex end_cond_mutex_;
+  Condition end_condition_;
+};
+
+}  // namespace internal
+}  // namespace benchmark
+
+#endif  // BENCHMARK_THREAD_MANAGER_H
diff --git a/src/third_party/google_benchmark/src/thread_timer.h b/src/third_party/google_benchmark/src/thread_timer.h
new file mode 100644
index 0000000..1703ca0
--- /dev/null
+++ b/src/third_party/google_benchmark/src/thread_timer.h
@@ -0,0 +1,86 @@
+#ifndef BENCHMARK_THREAD_TIMER_H
+#define BENCHMARK_THREAD_TIMER_H
+
+#include "check.h"
+#include "timers.h"
+
+namespace benchmark {
+namespace internal {
+
+class ThreadTimer {
+  explicit ThreadTimer(bool measure_process_cpu_time_)
+      : measure_process_cpu_time(measure_process_cpu_time_) {}
+
+ public:
+  static ThreadTimer Create() {
+    return ThreadTimer(/*measure_process_cpu_time_=*/false);
+  }
+  static ThreadTimer CreateProcessCpuTime() {
+    return ThreadTimer(/*measure_process_cpu_time_=*/true);
+  }
+
+  // Called by each thread
+  void StartTimer() {
+    running_ = true;
+    start_real_time_ = ChronoClockNow();
+    start_cpu_time_ = ReadCpuTimerOfChoice();
+  }
+
+  // Called by each thread
+  void StopTimer() {
+    CHECK(running_);
+    running_ = false;
+    real_time_used_ += ChronoClockNow() - start_real_time_;
+    // Floating point error can result in the subtraction producing a negative
+    // time. Guard against that.
+    cpu_time_used_ +=
+        std::max<double>(ReadCpuTimerOfChoice() - start_cpu_time_, 0);
+  }
+
+  // Called by each thread
+  void SetIterationTime(double seconds) { manual_time_used_ += seconds; }
+
+  bool running() const { return running_; }
+
+  // REQUIRES: timer is not running
+  double real_time_used() const {
+    CHECK(!running_);
+    return real_time_used_;
+  }
+
+  // REQUIRES: timer is not running
+  double cpu_time_used() const {
+    CHECK(!running_);
+    return cpu_time_used_;
+  }
+
+  // REQUIRES: timer is not running
+  double manual_time_used() const {
+    CHECK(!running_);
+    return manual_time_used_;
+  }
+
+ private:
+  double ReadCpuTimerOfChoice() const {
+    if (measure_process_cpu_time) return ProcessCPUUsage();
+    return ThreadCPUUsage();
+  }
+
+  // should the thread, or the process, time be measured?
+  const bool measure_process_cpu_time;
+
+  bool running_ = false;        // Is the timer running
+  double start_real_time_ = 0;  // If running_
+  double start_cpu_time_ = 0;   // If running_
+
+  // Accumulated time so far (does not contain current slice if running_)
+  double real_time_used_ = 0;
+  double cpu_time_used_ = 0;
+  // Manually set iteration time. User sets this with SetIterationTime(seconds).
+  double manual_time_used_ = 0;
+};
+
+}  // namespace internal
+}  // namespace benchmark
+
+#endif  // BENCHMARK_THREAD_TIMER_H
diff --git a/src/third_party/google_benchmark/src/timers.cc b/src/third_party/google_benchmark/src/timers.cc
new file mode 100644
index 0000000..7613ff9
--- /dev/null
+++ b/src/third_party/google_benchmark/src/timers.cc
@@ -0,0 +1,217 @@
+// Copyright 2015 Google Inc. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "timers.h"
+#include "internal_macros.h"
+
+#ifdef BENCHMARK_OS_WINDOWS
+#include <shlwapi.h>
+#undef StrCat  // Don't let StrCat in string_util.h be renamed to lstrcatA
+#include <versionhelpers.h>
+#include <windows.h>
+#else
+#include <fcntl.h>
+#ifndef BENCHMARK_OS_FUCHSIA
+#include <sys/resource.h>
+#endif
+#include <sys/time.h>
+#include <sys/types.h>  // this header must be included before 'sys/sysctl.h' to avoid compilation error on FreeBSD
+#include <unistd.h>
+#if defined BENCHMARK_OS_FREEBSD || defined BENCHMARK_OS_MACOSX
+#include <sys/sysctl.h>
+#endif
+#if defined(BENCHMARK_OS_MACOSX)
+#include <mach/mach_init.h>
+#include <mach/mach_port.h>
+#include <mach/thread_act.h>
+#endif
+#endif
+
+#ifdef BENCHMARK_OS_EMSCRIPTEN
+#include <emscripten.h>
+#endif
+
+#include <cerrno>
+#include <cstdint>
+#include <cstdio>
+#include <cstdlib>
+#include <cstring>
+#include <ctime>
+#include <iostream>
+#include <limits>
+#include <mutex>
+
+#include "check.h"
+#include "log.h"
+#include "sleep.h"
+#include "string_util.h"
+
+namespace benchmark {
+
+// Suppress unused warnings on helper functions.
+#if defined(__GNUC__)
+#pragma GCC diagnostic ignored "-Wunused-function"
+#endif
+
+namespace {
+#if defined(BENCHMARK_OS_WINDOWS)
+double MakeTime(FILETIME const& kernel_time, FILETIME const& user_time) {
+  ULARGE_INTEGER kernel;
+  ULARGE_INTEGER user;
+  kernel.HighPart = kernel_time.dwHighDateTime;
+  kernel.LowPart = kernel_time.dwLowDateTime;
+  user.HighPart = user_time.dwHighDateTime;
+  user.LowPart = user_time.dwLowDateTime;
+  return (static_cast<double>(kernel.QuadPart) +
+          static_cast<double>(user.QuadPart)) *
+         1e-7;
+}
+#elif !defined(BENCHMARK_OS_FUCHSIA)
+double MakeTime(struct rusage const& ru) {
+  return (static_cast<double>(ru.ru_utime.tv_sec) +
+          static_cast<double>(ru.ru_utime.tv_usec) * 1e-6 +
+          static_cast<double>(ru.ru_stime.tv_sec) +
+          static_cast<double>(ru.ru_stime.tv_usec) * 1e-6);
+}
+#endif
+#if defined(BENCHMARK_OS_MACOSX)
+double MakeTime(thread_basic_info_data_t const& info) {
+  return (static_cast<double>(info.user_time.seconds) +
+          static_cast<double>(info.user_time.microseconds) * 1e-6 +
+          static_cast<double>(info.system_time.seconds) +
+          static_cast<double>(info.system_time.microseconds) * 1e-6);
+}
+#endif
+#if defined(CLOCK_PROCESS_CPUTIME_ID) || defined(CLOCK_THREAD_CPUTIME_ID)
+double MakeTime(struct timespec const& ts) {
+  return ts.tv_sec + (static_cast<double>(ts.tv_nsec) * 1e-9);
+}
+#endif
+
+BENCHMARK_NORETURN static void DiagnoseAndExit(const char* msg) {
+  std::cerr << "ERROR: " << msg << std::endl;
+  std::exit(EXIT_FAILURE);
+}
+
+}  // end namespace
+
+double ProcessCPUUsage() {
+#if defined(BENCHMARK_OS_WINDOWS)
+  HANDLE proc = GetCurrentProcess();
+  FILETIME creation_time;
+  FILETIME exit_time;
+  FILETIME kernel_time;
+  FILETIME user_time;
+  if (GetProcessTimes(proc, &creation_time, &exit_time, &kernel_time,
+                      &user_time))
+    return MakeTime(kernel_time, user_time);
+  DiagnoseAndExit("GetProccessTimes() failed");
+#elif defined(BENCHMARK_OS_EMSCRIPTEN)
+  // clock_gettime(CLOCK_PROCESS_CPUTIME_ID, ...) returns 0 on Emscripten.
+  // Use Emscripten-specific API. Reported CPU time would be exactly the
+  // same as total time, but this is ok because there aren't long-latency
+  // syncronous system calls in Emscripten.
+  return emscripten_get_now() * 1e-3;
+#elif defined(CLOCK_PROCESS_CPUTIME_ID) && !defined(BENCHMARK_OS_MACOSX)
+  // FIXME We want to use clock_gettime, but its not available in MacOS 10.11. See
+  // https://github.com/google/benchmark/pull/292
+  struct timespec spec;
+  if (clock_gettime(CLOCK_PROCESS_CPUTIME_ID, &spec) == 0)
+    return MakeTime(spec);
+  DiagnoseAndExit("clock_gettime(CLOCK_PROCESS_CPUTIME_ID, ...) failed");
+#else
+  struct rusage ru;
+  if (getrusage(RUSAGE_SELF, &ru) == 0) return MakeTime(ru);
+  DiagnoseAndExit("getrusage(RUSAGE_SELF, ...) failed");
+#endif
+}
+
+double ThreadCPUUsage() {
+#if defined(BENCHMARK_OS_WINDOWS)
+  HANDLE this_thread = GetCurrentThread();
+  FILETIME creation_time;
+  FILETIME exit_time;
+  FILETIME kernel_time;
+  FILETIME user_time;
+  GetThreadTimes(this_thread, &creation_time, &exit_time, &kernel_time,
+                 &user_time);
+  return MakeTime(kernel_time, user_time);
+#elif defined(BENCHMARK_OS_MACOSX)
+  // FIXME We want to use clock_gettime, but its not available in MacOS 10.11. See
+  // https://github.com/google/benchmark/pull/292
+  mach_msg_type_number_t count = THREAD_BASIC_INFO_COUNT;
+  thread_basic_info_data_t info;
+  mach_port_t thread = pthread_mach_thread_np(pthread_self());
+  if (thread_info(thread, THREAD_BASIC_INFO, (thread_info_t)&info, &count) ==
+      KERN_SUCCESS) {
+    return MakeTime(info);
+  }
+  DiagnoseAndExit("ThreadCPUUsage() failed when evaluating thread_info");
+#elif defined(BENCHMARK_OS_EMSCRIPTEN)
+  // Emscripten doesn't support traditional threads
+  return ProcessCPUUsage();
+#elif defined(BENCHMARK_OS_RTEMS)
+  // RTEMS doesn't support CLOCK_THREAD_CPUTIME_ID. See
+  // https://github.com/RTEMS/rtems/blob/master/cpukit/posix/src/clockgettime.c
+  return ProcessCPUUsage();
+#elif defined(BENCHMARK_OS_SOLARIS)
+  struct rusage ru;
+  if (getrusage(RUSAGE_LWP, &ru) == 0) return MakeTime(ru);
+  DiagnoseAndExit("getrusage(RUSAGE_LWP, ...) failed");
+#elif defined(CLOCK_THREAD_CPUTIME_ID)
+  struct timespec ts;
+  if (clock_gettime(CLOCK_THREAD_CPUTIME_ID, &ts) == 0) return MakeTime(ts);
+  DiagnoseAndExit("clock_gettime(CLOCK_THREAD_CPUTIME_ID, ...) failed");
+#else
+#error Per-thread timing is not available on your system.
+#endif
+}
+
+namespace {
+
+std::string DateTimeString(bool local) {
+  typedef std::chrono::system_clock Clock;
+  std::time_t now = Clock::to_time_t(Clock::now());
+  const std::size_t kStorageSize = 128;
+  char storage[kStorageSize];
+  std::size_t written;
+
+  if (local) {
+#if defined(BENCHMARK_OS_WINDOWS)
+    written =
+        std::strftime(storage, sizeof(storage), "%x %X", ::localtime(&now));
+#else
+    std::tm timeinfo;
+    ::localtime_r(&now, &timeinfo);
+    written = std::strftime(storage, sizeof(storage), "%F %T", &timeinfo);
+#endif
+  } else {
+#if defined(BENCHMARK_OS_WINDOWS)
+    written = std::strftime(storage, sizeof(storage), "%x %X", ::gmtime(&now));
+#else
+    std::tm timeinfo;
+    ::gmtime_r(&now, &timeinfo);
+    written = std::strftime(storage, sizeof(storage), "%F %T", &timeinfo);
+#endif
+  }
+  CHECK(written < kStorageSize);
+  ((void)written);  // prevent unused variable in optimized mode.
+  return std::string(storage);
+}
+
+}  // end namespace
+
+std::string LocalDateTimeString() { return DateTimeString(true); }
+
+}  // end namespace benchmark
diff --git a/src/third_party/google_benchmark/src/timers.h b/src/third_party/google_benchmark/src/timers.h
new file mode 100644
index 0000000..65606cc
--- /dev/null
+++ b/src/third_party/google_benchmark/src/timers.h
@@ -0,0 +1,48 @@
+#ifndef BENCHMARK_TIMERS_H
+#define BENCHMARK_TIMERS_H
+
+#include <chrono>
+#include <string>
+
+namespace benchmark {
+
+// Return the CPU usage of the current process
+double ProcessCPUUsage();
+
+// Return the CPU usage of the children of the current process
+double ChildrenCPUUsage();
+
+// Return the CPU usage of the current thread
+double ThreadCPUUsage();
+
+#if defined(HAVE_STEADY_CLOCK)
+template <bool HighResIsSteady = std::chrono::high_resolution_clock::is_steady>
+struct ChooseSteadyClock {
+  typedef std::chrono::high_resolution_clock type;
+};
+
+template <>
+struct ChooseSteadyClock<false> {
+  typedef std::chrono::steady_clock type;
+};
+#endif
+
+struct ChooseClockType {
+#if defined(HAVE_STEADY_CLOCK)
+  typedef ChooseSteadyClock<>::type type;
+#else
+  typedef std::chrono::high_resolution_clock type;
+#endif
+};
+
+inline double ChronoClockNow() {
+  typedef ChooseClockType::type ClockType;
+  using FpSeconds = std::chrono::duration<double, std::chrono::seconds::period>;
+  return FpSeconds(ClockType::now().time_since_epoch()).count();
+}
+
+std::string LocalDateTimeString();
+
+}  // end namespace benchmark
+
+#endif  // BENCHMARK_TIMERS_H
diff --git a/src/third_party/google_benchmark/test/AssemblyTests.cmake b/src/third_party/google_benchmark/test/AssemblyTests.cmake
new file mode 100644
index 0000000..3d07858
--- /dev/null
+++ b/src/third_party/google_benchmark/test/AssemblyTests.cmake
@@ -0,0 +1,46 @@
+
+include(split_list)
+
+set(ASM_TEST_FLAGS "")
+check_cxx_compiler_flag(-O3 BENCHMARK_HAS_O3_FLAG)
+if (BENCHMARK_HAS_O3_FLAG)
+  list(APPEND ASM_TEST_FLAGS -O3)
+endif()
+
+check_cxx_compiler_flag(-g0 BENCHMARK_HAS_G0_FLAG)
+if (BENCHMARK_HAS_G0_FLAG)
+  list(APPEND ASM_TEST_FLAGS -g0)
+endif()
+
+check_cxx_compiler_flag(-fno-stack-protector BENCHMARK_HAS_FNO_STACK_PROTECTOR_FLAG)
+if (BENCHMARK_HAS_FNO_STACK_PROTECTOR_FLAG)
+  list(APPEND ASM_TEST_FLAGS -fno-stack-protector)
+endif()
+
+split_list(ASM_TEST_FLAGS)
+string(TOUPPER "${CMAKE_CXX_COMPILER_ID}" ASM_TEST_COMPILER)
+
+macro(add_filecheck_test name)
+  cmake_parse_arguments(ARG "" "" "CHECK_PREFIXES" ${ARGV})
+  add_library(${name} OBJECT ${name}.cc)
+  set_target_properties(${name} PROPERTIES COMPILE_FLAGS "-S ${ASM_TEST_FLAGS}")
+  set(ASM_OUTPUT_FILE "${CMAKE_CURRENT_BINARY_DIR}/${name}.s")
+  add_custom_target(copy_${name} ALL
+      COMMAND ${PROJECT_SOURCE_DIR}/tools/strip_asm.py
+        $<TARGET_OBJECTS:${name}>
+        ${ASM_OUTPUT_FILE}
+      BYPRODUCTS ${ASM_OUTPUT_FILE})
+  add_dependencies(copy_${name} ${name})
+  if (NOT ARG_CHECK_PREFIXES)
+    set(ARG_CHECK_PREFIXES "CHECK")
+  endif()
+  foreach(prefix ${ARG_CHECK_PREFIXES})
+    add_test(NAME run_${name}_${prefix}
+        COMMAND
+          ${LLVM_FILECHECK_EXE} ${name}.cc
+          --input-file=${ASM_OUTPUT_FILE}
+          --check-prefixes=CHECK,CHECK-${ASM_TEST_COMPILER}
+        WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR})
+  endforeach()
+endmacro()
+
diff --git a/src/third_party/google_benchmark/test/BUILD b/src/third_party/google_benchmark/test/BUILD
new file mode 100644
index 0000000..9bb8cb0
--- /dev/null
+++ b/src/third_party/google_benchmark/test/BUILD
@@ -0,0 +1,73 @@
+TEST_COPTS = [
+    "-pedantic",
+    "-pedantic-errors",
+    "-std=c++11",
+    "-Wall",
+    "-Wextra",
+    "-Wshadow",
+    #    "-Wshorten-64-to-32",
+    "-Wfloat-equal",
+    "-fstrict-aliasing",
+]
+
+PER_SRC_COPTS = ({
+    "cxx03_test.cc": ["-std=c++03"],
+    # Some of the issues with DoNotOptimize only occur when optimization is enabled
+    "donotoptimize_test.cc": ["-O3"],
+})
+
+TEST_ARGS = ["--benchmark_min_time=0.01"]
+
+PER_SRC_TEST_ARGS = ({
+    "user_counters_tabular_test.cc": ["--benchmark_counters_tabular=true"],
+})
+
+load("@rules_cc//cc:defs.bzl", "cc_library", "cc_test")
+
+cc_library(
+    name = "output_test_helper",
+    testonly = 1,
+    srcs = ["output_test_helper.cc"],
+    hdrs = ["output_test.h"],
+    copts = TEST_COPTS,
+    deps = [
+        "//:benchmark",
+        "//:benchmark_internal_headers",
+    ],
+)
+
+[
+    cc_test(
+        name = test_src[:-len(".cc")],
+        size = "small",
+        srcs = [test_src],
+        args = TEST_ARGS + PER_SRC_TEST_ARGS.get(test_src, []),
+        copts = TEST_COPTS + PER_SRC_COPTS.get(test_src, []),
+        deps = [
+            ":output_test_helper",
+            "//:benchmark",
+            "//:benchmark_internal_headers",
+            "@com_google_googletest//:gtest",
+        ] + (
+            ["@com_google_googletest//:gtest_main"] if (test_src[-len("gtest.cc"):] == "gtest.cc") else []
+        ),
+        # FIXME: Add support for assembly tests to bazel.
+        # See Issue #556
+        # https://github.com/google/benchmark/issues/556
+    )
+    for test_src in glob(
+        ["*test.cc"],
+        exclude = [
+            "*_assembly_test.cc",
+            "link_main_test.cc",
+        ],
+    )
+]
+
+cc_test(
+    name = "link_main_test",
+    size = "small",
+    srcs = ["link_main_test.cc"],
+    copts = TEST_COPTS,
+    deps = ["//:benchmark_main"],
+)
diff --git a/src/third_party/google_benchmark/test/CMakeLists.txt b/src/third_party/google_benchmark/test/CMakeLists.txt
new file mode 100644
index 0000000..ddcb1a1
--- /dev/null
+++ b/src/third_party/google_benchmark/test/CMakeLists.txt
@@ -0,0 +1,259 @@
+# Enable the tests
+
+find_package(Threads REQUIRED)
+include(CheckCXXCompilerFlag)
+
+# NOTE: Some tests use `<cassert>` to perform the test. Therefore we must
+# strip -DNDEBUG from the default CMake flags in DEBUG mode.
+string(TOUPPER "${CMAKE_BUILD_TYPE}" uppercase_CMAKE_BUILD_TYPE)
+if( NOT uppercase_CMAKE_BUILD_TYPE STREQUAL "DEBUG" )
+  add_definitions( -UNDEBUG )
+  add_definitions(-DTEST_BENCHMARK_LIBRARY_HAS_NO_ASSERTIONS)
+  # Also remove /D NDEBUG to avoid MSVC warnings about conflicting defines.
+  foreach (flags_var_to_scrub
+      CMAKE_CXX_FLAGS_RELEASE
+      CMAKE_CXX_FLAGS_RELWITHDEBINFO
+      CMAKE_CXX_FLAGS_MINSIZEREL
+      CMAKE_C_FLAGS_RELEASE
+      CMAKE_C_FLAGS_RELWITHDEBINFO
+      CMAKE_C_FLAGS_MINSIZEREL)
+    string (REGEX REPLACE "(^| )[/-]D *NDEBUG($| )" " "
+      "${flags_var_to_scrub}" "${${flags_var_to_scrub}}")
+  endforeach()
+endif()
+
+check_cxx_compiler_flag(-O3 BENCHMARK_HAS_O3_FLAG)
+set(BENCHMARK_O3_FLAG "")
+if (BENCHMARK_HAS_O3_FLAG)
+  set(BENCHMARK_O3_FLAG "-O3")
+endif()
+
+# NOTE: These flags must be added after find_package(Threads REQUIRED) otherwise
+# they will break the configuration check.
+if (DEFINED BENCHMARK_CXX_LINKER_FLAGS)
+  list(APPEND CMAKE_EXE_LINKER_FLAGS ${BENCHMARK_CXX_LINKER_FLAGS})
+endif()
+
+add_library(output_test_helper STATIC output_test_helper.cc output_test.h)
+
+macro(compile_benchmark_test name)
+  add_executable(${name} "${name}.cc")
+  target_link_libraries(${name} benchmark ${CMAKE_THREAD_LIBS_INIT})
+endmacro(compile_benchmark_test)
+
+macro(compile_benchmark_test_with_main name)
+  add_executable(${name} "${name}.cc")
+  target_link_libraries(${name} benchmark_main)
+endmacro(compile_benchmark_test_with_main)
+
+macro(compile_output_test name)
+  add_executable(${name} "${name}.cc" output_test.h)
+  target_link_libraries(${name} output_test_helper benchmark
+          ${BENCHMARK_CXX_LIBRARIES} ${CMAKE_THREAD_LIBS_INIT})
+endmacro(compile_output_test)
+
+# Demonstration executable
+compile_benchmark_test(benchmark_test)
+add_test(NAME benchmark COMMAND benchmark_test --benchmark_min_time=0.01)
+
+compile_benchmark_test(filter_test)
+macro(add_filter_test name filter expect)
+  add_test(NAME ${name} COMMAND filter_test --benchmark_min_time=0.01 --benchmark_filter=${filter} ${expect})
+  add_test(NAME ${name}_list_only COMMAND filter_test --benchmark_list_tests --benchmark_filter=${filter} ${expect})
+endmacro(add_filter_test)
+
+add_filter_test(filter_simple "Foo" 3)
+add_filter_test(filter_simple_negative "-Foo" 2)
+add_filter_test(filter_suffix "BM_.*" 4)
+add_filter_test(filter_suffix_negative "-BM_.*" 1)
+add_filter_test(filter_regex_all ".*" 5)
+add_filter_test(filter_regex_all_negative "-.*" 0)
+add_filter_test(filter_regex_blank "" 5)
+add_filter_test(filter_regex_blank_negative "-" 0)
+add_filter_test(filter_regex_none "monkey" 0)
+add_filter_test(filter_regex_none_negative "-monkey" 5)
+add_filter_test(filter_regex_wildcard ".*Foo.*" 3)
+add_filter_test(filter_regex_wildcard_negative "-.*Foo.*" 2)
+add_filter_test(filter_regex_begin "^BM_.*" 4)
+add_filter_test(filter_regex_begin_negative "-^BM_.*" 1)
+add_filter_test(filter_regex_begin2 "^N" 1)
+add_filter_test(filter_regex_begin2_negative "-^N" 4)
+add_filter_test(filter_regex_end ".*Ba$" 1)
+add_filter_test(filter_regex_end_negative "-.*Ba$" 4)
+
+compile_benchmark_test(options_test)
+add_test(NAME options_benchmarks COMMAND options_test --benchmark_min_time=0.01)
+
+compile_benchmark_test(basic_test)
+add_test(NAME basic_benchmark COMMAND basic_test --benchmark_min_time=0.01)
+
+compile_benchmark_test(diagnostics_test)
+add_test(NAME diagnostics_test COMMAND diagnostics_test --benchmark_min_time=0.01)
+
+compile_benchmark_test(skip_with_error_test)
+add_test(NAME skip_with_error_test COMMAND skip_with_error_test --benchmark_min_time=0.01)
+
+compile_benchmark_test(donotoptimize_test)
+# Some of the issues with DoNotOptimize only occur when optimization is enabled
+check_cxx_compiler_flag(-O3 BENCHMARK_HAS_O3_FLAG)
+if (BENCHMARK_HAS_O3_FLAG)
+  set_target_properties(donotoptimize_test PROPERTIES COMPILE_FLAGS "-O3")
+endif()
+add_test(NAME donotoptimize_test COMMAND donotoptimize_test --benchmark_min_time=0.01)
+
+compile_benchmark_test(fixture_test)
+add_test(NAME fixture_test COMMAND fixture_test --benchmark_min_time=0.01)
+
+compile_benchmark_test(register_benchmark_test)
+add_test(NAME register_benchmark_test COMMAND register_benchmark_test --benchmark_min_time=0.01)
+
+compile_benchmark_test(map_test)
+add_test(NAME map_test COMMAND map_test --benchmark_min_time=0.01)
+
+compile_benchmark_test(multiple_ranges_test)
+add_test(NAME multiple_ranges_test COMMAND multiple_ranges_test --benchmark_min_time=0.01)
+
+compile_benchmark_test_with_main(link_main_test)
+add_test(NAME link_main_test COMMAND link_main_test --benchmark_min_time=0.01)
+
+compile_output_test(reporter_output_test)
+add_test(NAME reporter_output_test COMMAND reporter_output_test --benchmark_min_time=0.01)
+
+compile_output_test(templated_fixture_test)
+add_test(NAME templated_fixture_test COMMAND templated_fixture_test --benchmark_min_time=0.01)
+
+compile_output_test(user_counters_test)
+add_test(NAME user_counters_test COMMAND user_counters_test --benchmark_min_time=0.01)
+
+compile_output_test(internal_threading_test)
+add_test(NAME internal_threading_test COMMAND internal_threading_test --benchmark_min_time=0.01)
+
+compile_output_test(report_aggregates_only_test)
+add_test(NAME report_aggregates_only_test COMMAND report_aggregates_only_test --benchmark_min_time=0.01)
+
+compile_output_test(display_aggregates_only_test)
+add_test(NAME display_aggregates_only_test COMMAND display_aggregates_only_test --benchmark_min_time=0.01)
+
+compile_output_test(user_counters_tabular_test)
+add_test(NAME user_counters_tabular_test COMMAND user_counters_tabular_test --benchmark_counters_tabular=true --benchmark_min_time=0.01)
+
+compile_output_test(user_counters_thousands_test)
+add_test(NAME user_counters_thousands_test COMMAND user_counters_thousands_test --benchmark_min_time=0.01)
+
+compile_output_test(memory_manager_test)
+add_test(NAME memory_manager_test COMMAND memory_manager_test --benchmark_min_time=0.01)
+
+check_cxx_compiler_flag(-std=c++03 BENCHMARK_HAS_CXX03_FLAG)
+if (BENCHMARK_HAS_CXX03_FLAG)
+  compile_benchmark_test(cxx03_test)
+  set_target_properties(cxx03_test
+      PROPERTIES
+      COMPILE_FLAGS "-std=c++03")
+  # libstdc++ provides different definitions within <map> between dialects. When
+  # LTO is enabled and -Werror is specified GCC diagnoses this ODR violation
+  # causing the test to fail to compile. To prevent this we explicitly disable
+  # the warning.
+  check_cxx_compiler_flag(-Wno-odr BENCHMARK_HAS_WNO_ODR)
+  if (BENCHMARK_ENABLE_LTO AND BENCHMARK_HAS_WNO_ODR)
+    set_target_properties(cxx03_test
+        PROPERTIES
+        LINK_FLAGS "-Wno-odr")
+  endif()
+  add_test(NAME cxx03 COMMAND cxx03_test --benchmark_min_time=0.01)
+endif()
+
+# Attempt to work around flaky test failures when running on Appveyor servers.
+if (DEFINED ENV{APPVEYOR})
+  set(COMPLEXITY_MIN_TIME "0.5")
+else()
+  set(COMPLEXITY_MIN_TIME "0.01")
+endif()
+compile_output_test(complexity_test)
+add_test(NAME complexity_benchmark COMMAND complexity_test --benchmark_min_time=${COMPLEXITY_MIN_TIME})
+
+###############################################################################
+# GoogleTest Unit Tests
+###############################################################################
+
+if (BENCHMARK_ENABLE_GTEST_TESTS)
+  macro(compile_gtest name)
+    add_executable(${name} "${name}.cc")
+    target_link_libraries(${name} benchmark
+        gmock_main ${CMAKE_THREAD_LIBS_INIT})
+  endmacro(compile_gtest)
+
+  macro(add_gtest name)
+    compile_gtest(${name})
+    add_test(NAME ${name} COMMAND ${name})
+  endmacro()
+
+  add_gtest(benchmark_gtest)
+  add_gtest(benchmark_name_gtest)
+  add_gtest(commandlineflags_gtest)
+  add_gtest(statistics_gtest)
+  add_gtest(string_util_gtest)
+endif(BENCHMARK_ENABLE_GTEST_TESTS)
+
+###############################################################################
+# Assembly Unit Tests
+###############################################################################
+
+if (BENCHMARK_ENABLE_ASSEMBLY_TESTS)
+  if (NOT LLVM_FILECHECK_EXE)
+    message(FATAL_ERROR "LLVM FileCheck is required when including this file")
+  endif()
+  include(AssemblyTests.cmake)
+  add_filecheck_test(donotoptimize_assembly_test)
+  add_filecheck_test(state_assembly_test)
+  add_filecheck_test(clobber_memory_assembly_test)
+endif()
+
+
+
+###############################################################################
+# Code Coverage Configuration
+###############################################################################
+
+# Add the coverage command(s)
+if(CMAKE_BUILD_TYPE)
+  string(TOLOWER ${CMAKE_BUILD_TYPE} CMAKE_BUILD_TYPE_LOWER)
+endif()
+if (${CMAKE_BUILD_TYPE_LOWER} MATCHES "coverage")
+  find_program(GCOV gcov)
+  find_program(LCOV lcov)
+  find_program(GENHTML genhtml)
+  find_program(CTEST ctest)
+  if (GCOV AND LCOV AND GENHTML AND CTEST AND HAVE_CXX_FLAG_COVERAGE)
+    add_custom_command(
+      OUTPUT ${CMAKE_BINARY_DIR}/lcov/index.html
+      COMMAND ${LCOV} -q -z -d .
+      COMMAND ${LCOV} -q --no-external -c -b "${CMAKE_SOURCE_DIR}" -d . -o before.lcov -i
+      COMMAND ${CTEST} --force-new-ctest-process
+      COMMAND ${LCOV} -q --no-external -c -b "${CMAKE_SOURCE_DIR}" -d . -o after.lcov
+      COMMAND ${LCOV} -q -a before.lcov -a after.lcov --output-file final.lcov
+      COMMAND ${LCOV} -q -r final.lcov "'${CMAKE_SOURCE_DIR}/test/*'" -o final.lcov
+      COMMAND ${GENHTML} final.lcov -o lcov --demangle-cpp --sort -p "${CMAKE_BINARY_DIR}" -t benchmark
+      DEPENDS filter_test benchmark_test options_test basic_test fixture_test cxx03_test complexity_test
+      WORKING_DIRECTORY ${CMAKE_BINARY_DIR}
+      COMMENT "Running LCOV"
+    )
+    add_custom_target(coverage
+      DEPENDS ${CMAKE_BINARY_DIR}/lcov/index.html
+      COMMENT "LCOV report at lcov/index.html"
+    )
+    message(STATUS "Coverage command added")
+  else()
+    if (HAVE_CXX_FLAG_COVERAGE)
+      set(CXX_FLAG_COVERAGE_MESSAGE supported)
+    else()
+      set(CXX_FLAG_COVERAGE_MESSAGE unavailable)
+    endif()
+    message(WARNING
+      "Coverage not available:\n"
+      "  gcov: ${GCOV}\n"
+      "  lcov: ${LCOV}\n"
+      "  genhtml: ${GENHTML}\n"
+      "  ctest: ${CTEST}\n"
+      "  --coverage flag: ${CXX_FLAG_COVERAGE_MESSAGE}")
+  endif()
+endif()
diff --git a/src/third_party/google_benchmark/test/basic_test.cc b/src/third_party/google_benchmark/test/basic_test.cc
new file mode 100644
index 0000000..5f3dd1a
--- /dev/null
+++ b/src/third_party/google_benchmark/test/basic_test.cc
@@ -0,0 +1,136 @@
+
+#include "benchmark/benchmark.h"
+
+#define BASIC_BENCHMARK_TEST(x) BENCHMARK(x)->Arg(8)->Arg(512)->Arg(8192)
+
+void BM_empty(benchmark::State& state) {
+  for (auto _ : state) {
+    benchmark::DoNotOptimize(state.iterations());
+  }
+}
+BENCHMARK(BM_empty);
+BENCHMARK(BM_empty)->ThreadPerCpu();
+
+void BM_spin_empty(benchmark::State& state) {
+  for (auto _ : state) {
+    for (int x = 0; x < state.range(0); ++x) {
+      benchmark::DoNotOptimize(x);
+    }
+  }
+}
+BASIC_BENCHMARK_TEST(BM_spin_empty);
+BASIC_BENCHMARK_TEST(BM_spin_empty)->ThreadPerCpu();
+
+void BM_spin_pause_before(benchmark::State& state) {
+  for (int i = 0; i < state.range(0); ++i) {
+    benchmark::DoNotOptimize(i);
+  }
+  for (auto _ : state) {
+    for (int i = 0; i < state.range(0); ++i) {
+      benchmark::DoNotOptimize(i);
+    }
+  }
+}
+BASIC_BENCHMARK_TEST(BM_spin_pause_before);
+BASIC_BENCHMARK_TEST(BM_spin_pause_before)->ThreadPerCpu();
+
+void BM_spin_pause_during(benchmark::State& state) {
+  for (auto _ : state) {
+    state.PauseTiming();
+    for (int i = 0; i < state.range(0); ++i) {
+      benchmark::DoNotOptimize(i);
+    }
+    state.ResumeTiming();
+    for (int i = 0; i < state.range(0); ++i) {
+      benchmark::DoNotOptimize(i);
+    }
+  }
+}
+BASIC_BENCHMARK_TEST(BM_spin_pause_during);
+BASIC_BENCHMARK_TEST(BM_spin_pause_during)->ThreadPerCpu();
+
+void BM_pause_during(benchmark::State& state) {
+  for (auto _ : state) {
+    state.PauseTiming();
+    state.ResumeTiming();
+  }
+}
+BENCHMARK(BM_pause_during);
+BENCHMARK(BM_pause_during)->ThreadPerCpu();
+BENCHMARK(BM_pause_during)->UseRealTime();
+BENCHMARK(BM_pause_during)->UseRealTime()->ThreadPerCpu();
+
+void BM_spin_pause_after(benchmark::State& state) {
+  for (auto _ : state) {
+    for (int i = 0; i < state.range(0); ++i) {
+      benchmark::DoNotOptimize(i);
+    }
+  }
+  for (int i = 0; i < state.range(0); ++i) {
+    benchmark::DoNotOptimize(i);
+  }
+}
+BASIC_BENCHMARK_TEST(BM_spin_pause_after);
+BASIC_BENCHMARK_TEST(BM_spin_pause_after)->ThreadPerCpu();
+
+void BM_spin_pause_before_and_after(benchmark::State& state) {
+  for (int i = 0; i < state.range(0); ++i) {
+    benchmark::DoNotOptimize(i);
+  }
+  for (auto _ : state) {
+    for (int i = 0; i < state.range(0); ++i) {
+      benchmark::DoNotOptimize(i);
+    }
+  }
+  for (int i = 0; i < state.range(0); ++i) {
+    benchmark::DoNotOptimize(i);
+  }
+}
+BASIC_BENCHMARK_TEST(BM_spin_pause_before_and_after);
+BASIC_BENCHMARK_TEST(BM_spin_pause_before_and_after)->ThreadPerCpu();
+
+void BM_empty_stop_start(benchmark::State& state) {
+  for (auto _ : state) {
+  }
+}
+BENCHMARK(BM_empty_stop_start);
+BENCHMARK(BM_empty_stop_start)->ThreadPerCpu();
+
+
+void BM_KeepRunning(benchmark::State& state) {
+  benchmark::IterationCount iter_count = 0;
+  assert(iter_count == state.iterations());
+  while (state.KeepRunning()) {
+    ++iter_count;
+  }
+  assert(iter_count == state.iterations());
+}
+BENCHMARK(BM_KeepRunning);
+
+void BM_KeepRunningBatch(benchmark::State& state) {
+  // Choose a prime batch size to avoid evenly dividing max_iterations.
+  const benchmark::IterationCount batch_size = 101;
+  benchmark::IterationCount iter_count = 0;
+  while (state.KeepRunningBatch(batch_size)) {
+    iter_count += batch_size;
+  }
+  assert(state.iterations() == iter_count);
+}
+BENCHMARK(BM_KeepRunningBatch);
+
+void BM_RangedFor(benchmark::State& state) {
+  benchmark::IterationCount iter_count = 0;
+  for (auto _ : state) {
+    ++iter_count;
+  }
+  assert(iter_count == state.max_iterations);
+}
+BENCHMARK(BM_RangedFor);
+
+// Ensure that StateIterator provides all the necessary typedefs required to
+// instantiate std::iterator_traits.
+static_assert(std::is_same<
+  typename std::iterator_traits<benchmark::State::StateIterator>::value_type,
+  typename benchmark::State::StateIterator::value_type>::value, "");
+
+BENCHMARK_MAIN();
diff --git a/src/third_party/google_benchmark/test/benchmark_gtest.cc b/src/third_party/google_benchmark/test/benchmark_gtest.cc
new file mode 100644
index 0000000..9557b20
--- /dev/null
+++ b/src/third_party/google_benchmark/test/benchmark_gtest.cc
@@ -0,0 +1,128 @@
+#include <vector>
+
+#include "../src/benchmark_register.h"
+#include "gmock/gmock.h"
+#include "gtest/gtest.h"
+
+namespace benchmark {
+namespace internal {
+namespace {
+
+TEST(AddRangeTest, Simple) {
+  std::vector<int> dst;
+  AddRange(&dst, 1, 2, 2);
+  EXPECT_THAT(dst, testing::ElementsAre(1, 2));
+}
+
+TEST(AddRangeTest, Simple64) {
+  std::vector<int64_t> dst;
+  AddRange(&dst, static_cast<int64_t>(1), static_cast<int64_t>(2), 2);
+  EXPECT_THAT(dst, testing::ElementsAre(1, 2));
+}
+
+TEST(AddRangeTest, Advanced) {
+  std::vector<int> dst;
+  AddRange(&dst, 5, 15, 2);
+  EXPECT_THAT(dst, testing::ElementsAre(5, 8, 15));
+}
+
+TEST(AddRangeTest, Advanced64) {
+  std::vector<int64_t> dst;
+  AddRange(&dst, static_cast<int64_t>(5), static_cast<int64_t>(15), 2);
+  EXPECT_THAT(dst, testing::ElementsAre(5, 8, 15));
+}
+
+TEST(AddRangeTest, FullRange8) {
+  std::vector<int8_t> dst;
+  AddRange(&dst, int8_t{1}, std::numeric_limits<int8_t>::max(), 8);
+  EXPECT_THAT(dst, testing::ElementsAre(1, 8, 64, 127));
+}
+
+TEST(AddRangeTest, FullRange64) {
+  std::vector<int64_t> dst;
+  AddRange(&dst, int64_t{1}, std::numeric_limits<int64_t>::max(), 1024);
+  EXPECT_THAT(
+      dst, testing::ElementsAre(1LL, 1024LL, 1048576LL, 1073741824LL,
+                                1099511627776LL, 1125899906842624LL,
+                                1152921504606846976LL, 9223372036854775807LL));
+}
+
+TEST(AddRangeTest, NegativeRanges) {
+  std::vector<int> dst;
+  AddRange(&dst, -8, 0, 2);
+  EXPECT_THAT(dst, testing::ElementsAre(-8, -4, -2, -1, 0));
+}
+
+TEST(AddRangeTest, StrictlyNegative) {
+  std::vector<int> dst;
+  AddRange(&dst, -8, -1, 2);
+  EXPECT_THAT(dst, testing::ElementsAre(-8, -4, -2, -1));
+}
+
+TEST(AddRangeTest, SymmetricNegativeRanges) {
+  std::vector<int> dst;
+  AddRange(&dst, -8, 8, 2);
+  EXPECT_THAT(dst, testing::ElementsAre(-8, -4, -2, -1, 0, 1, 2, 4, 8));
+}
+
+TEST(AddRangeTest, SymmetricNegativeRangesOddMult) {
+  std::vector<int> dst;
+  AddRange(&dst, -30, 32, 5);
+  EXPECT_THAT(dst, testing::ElementsAre(-30, -25, -5, -1, 0, 1, 5, 25, 32));
+}
+
+TEST(AddRangeTest, NegativeRangesAsymmetric) {
+  std::vector<int> dst;
+  AddRange(&dst, -3, 5, 2);
+  EXPECT_THAT(dst, testing::ElementsAre(-3, -2, -1, 0, 1, 2, 4, 5));
+}
+
+TEST(AddRangeTest, NegativeRangesLargeStep) {
+  // Always include -1, 0, 1 when crossing zero.
+  std::vector<int> dst;
+  AddRange(&dst, -8, 8, 10);
+  EXPECT_THAT(dst, testing::ElementsAre(-8, -1, 0, 1, 8));
+}
+
+TEST(AddRangeTest, ZeroOnlyRange) {
+  std::vector<int> dst;
+  AddRange(&dst, 0, 0, 2);
+  EXPECT_THAT(dst, testing::ElementsAre(0));
+}
+
+TEST(AddRangeTest, NegativeRange64) {
+  std::vector<int64_t> dst;
+  AddRange<int64_t>(&dst, -4, 4, 2);
+  EXPECT_THAT(dst, testing::ElementsAre(-4, -2, -1, 0, 1, 2, 4));
+}
+
+TEST(AddRangeTest, NegativeRangePreservesExistingOrder) {
+  // If elements already exist in the range, ensure we don't change
+  // their ordering by adding negative values.
+  std::vector<int64_t> dst = {1, 2, 3};
+  AddRange<int64_t>(&dst, -2, 2, 2);
+  EXPECT_THAT(dst, testing::ElementsAre(1, 2, 3, -2, -1, 0, 1, 2));
+}
+
+TEST(AddRangeTest, FullNegativeRange64) {
+  std::vector<int64_t> dst;
+  const auto min = std::numeric_limits<int64_t>::min();
+  const auto max = std::numeric_limits<int64_t>::max();
+  AddRange(&dst, min, max, 1024);
+  EXPECT_THAT(
+      dst, testing::ElementsAreArray(std::vector<int64_t>{
+               min, -1152921504606846976LL, -1125899906842624LL,
+               -1099511627776LL, -1073741824LL, -1048576LL, -1024LL, -1LL, 0LL,
+               1LL, 1024LL, 1048576LL, 1073741824LL, 1099511627776LL,
+               1125899906842624LL, 1152921504606846976LL, max}));
+}
+
+TEST(AddRangeTest, Simple8) {
+  std::vector<int8_t> dst;
+  AddRange<int8_t>(&dst, 1, 8, 2);
+  EXPECT_THAT(dst, testing::ElementsAre(1, 2, 4, 8));
+}
+
+}  // namespace
+}  // namespace internal
+}  // namespace benchmark
diff --git a/src/third_party/google_benchmark/test/benchmark_name_gtest.cc b/src/third_party/google_benchmark/test/benchmark_name_gtest.cc
new file mode 100644
index 0000000..afb401c
--- /dev/null
+++ b/src/third_party/google_benchmark/test/benchmark_name_gtest.cc
@@ -0,0 +1,74 @@
+#include "benchmark/benchmark.h"
+#include "gtest/gtest.h"
+
+namespace {
+
+using namespace benchmark;
+using namespace benchmark::internal;
+
+TEST(BenchmarkNameTest, Empty) {
+  const auto name = BenchmarkName();
+  EXPECT_EQ(name.str(), std::string());
+}
+
+TEST(BenchmarkNameTest, FunctionName) {
+  auto name = BenchmarkName();
+  name.function_name = "function_name";
+  EXPECT_EQ(name.str(), "function_name");
+}
+
+TEST(BenchmarkNameTest, FunctionNameAndArgs) {
+  auto name = BenchmarkName();
+  name.function_name = "function_name";
+  name.args = "some_args:3/4/5";
+  EXPECT_EQ(name.str(), "function_name/some_args:3/4/5");
+}
+
+TEST(BenchmarkNameTest, MinTime) {
+  auto name = BenchmarkName();
+  name.function_name = "function_name";
+  name.args = "some_args:3/4";
+  name.min_time = "min_time:3.4s";
+  EXPECT_EQ(name.str(), "function_name/some_args:3/4/min_time:3.4s");
+}
+
+TEST(BenchmarkNameTest, Iterations) {
+  auto name = BenchmarkName();
+  name.function_name = "function_name";
+  name.min_time = "min_time:3.4s";
+  name.iterations = "iterations:42";
+  EXPECT_EQ(name.str(), "function_name/min_time:3.4s/iterations:42");
+}
+
+TEST(BenchmarkNameTest, Repetitions) {
+  auto name = BenchmarkName();
+  name.function_name = "function_name";
+  name.min_time = "min_time:3.4s";
+  name.repetitions = "repetitions:24";
+  EXPECT_EQ(name.str(), "function_name/min_time:3.4s/repetitions:24");
+}
+
+TEST(BenchmarkNameTest, TimeType) {
+  auto name = BenchmarkName();
+  name.function_name = "function_name";
+  name.min_time = "min_time:3.4s";
+  name.time_type = "hammer_time";
+  EXPECT_EQ(name.str(), "function_name/min_time:3.4s/hammer_time");
+}
+
+TEST(BenchmarkNameTest, Threads) {
+  auto name = BenchmarkName();
+  name.function_name = "function_name";
+  name.min_time = "min_time:3.4s";
+  name.threads = "threads:256";
+  EXPECT_EQ(name.str(), "function_name/min_time:3.4s/threads:256");
+}
+
+TEST(BenchmarkNameTest, TestEmptyFunctionName) {
+  auto name = BenchmarkName();
+  name.args = "first:3/second:4";
+  name.threads = "threads:22";
+  EXPECT_EQ(name.str(), "first:3/second:4/threads:22");
+}
+
+}  // end namespace
diff --git a/src/third_party/google_benchmark/test/benchmark_test.cc b/src/third_party/google_benchmark/test/benchmark_test.cc
new file mode 100644
index 0000000..3cd4f55
--- /dev/null
+++ b/src/third_party/google_benchmark/test/benchmark_test.cc
@@ -0,0 +1,245 @@
+#include "benchmark/benchmark.h"
+
+#include <assert.h>
+#include <math.h>
+#include <stdint.h>
+
+#include <chrono>
+#include <cstdlib>
+#include <iostream>
+#include <limits>
+#include <list>
+#include <map>
+#include <mutex>
+#include <set>
+#include <sstream>
+#include <string>
+#include <thread>
+#include <utility>
+#include <vector>
+
+#if defined(__GNUC__)
+#define BENCHMARK_NOINLINE __attribute__((noinline))
+#else
+#define BENCHMARK_NOINLINE
+#endif
+
+namespace {
+
+int BENCHMARK_NOINLINE Factorial(uint32_t n) {
+  return (n == 1) ? 1 : n * Factorial(n - 1);
+}
+
+double CalculatePi(int depth) {
+  double pi = 0.0;
+  for (int i = 0; i < depth; ++i) {
+    double numerator = static_cast<double>(((i % 2) * 2) - 1);
+    double denominator = static_cast<double>((2 * i) - 1);
+    pi += numerator / denominator;
+  }
+  return (pi - 1.0) * 4;
+}
+
+std::set<int64_t> ConstructRandomSet(int64_t size) {
+  std::set<int64_t> s;
+  for (int i = 0; i < size; ++i) s.insert(s.end(), i);
+  return s;
+}
+
+std::mutex test_vector_mu;
+std::vector<int>* test_vector = nullptr;
+
+}  // end namespace
+
+static void BM_Factorial(benchmark::State& state) {
+  int fac_42 = 0;
+  for (auto _ : state) fac_42 = Factorial(8);
+  // Prevent compiler optimizations
+  std::stringstream ss;
+  ss << fac_42;
+  state.SetLabel(ss.str());
+}
+BENCHMARK(BM_Factorial);
+BENCHMARK(BM_Factorial)->UseRealTime();
+
+static void BM_CalculatePiRange(benchmark::State& state) {
+  double pi = 0.0;
+  for (auto _ : state) pi = CalculatePi(static_cast<int>(state.range(0)));
+  std::stringstream ss;
+  ss << pi;
+  state.SetLabel(ss.str());
+}
+BENCHMARK_RANGE(BM_CalculatePiRange, 1, 1024 * 1024);
+
+static void BM_CalculatePi(benchmark::State& state) {
+  static const int depth = 1024;
+  for (auto _ : state) {
+    benchmark::DoNotOptimize(CalculatePi(static_cast<int>(depth)));
+  }
+}
+BENCHMARK(BM_CalculatePi)->Threads(8);
+BENCHMARK(BM_CalculatePi)->ThreadRange(1, 32);
+BENCHMARK(BM_CalculatePi)->ThreadPerCpu();
+
+static void BM_SetInsert(benchmark::State& state) {
+  std::set<int64_t> data;
+  for (auto _ : state) {
+    state.PauseTiming();
+    data = ConstructRandomSet(state.range(0));
+    state.ResumeTiming();
+    for (int j = 0; j < state.range(1); ++j) data.insert(rand());
+  }
+  state.SetItemsProcessed(state.iterations() * state.range(1));
+  state.SetBytesProcessed(state.iterations() * state.range(1) * sizeof(int));
+}
+
+// Test many inserts at once to reduce the total iterations needed. Otherwise, the slower,
+// non-timed part of each iteration will make the benchmark take forever.
+BENCHMARK(BM_SetInsert)->Ranges({{1 << 10, 8 << 10}, {128, 512}});
+
+template <typename Container,
+          typename ValueType = typename Container::value_type>
+static void BM_Sequential(benchmark::State& state) {
+  ValueType v = 42;
+  for (auto _ : state) {
+    Container c;
+    for (int64_t i = state.range(0); --i;) c.push_back(v);
+  }
+  const int64_t items_processed = state.iterations() * state.range(0);
+  state.SetItemsProcessed(items_processed);
+  state.SetBytesProcessed(items_processed * sizeof(v));
+}
+BENCHMARK_TEMPLATE2(BM_Sequential, std::vector<int>, int)
+    ->Range(1 << 0, 1 << 10);
+BENCHMARK_TEMPLATE(BM_Sequential, std::list<int>)->Range(1 << 0, 1 << 10);
+// Test the variadic version of BENCHMARK_TEMPLATE in C++11 and beyond.
+#ifdef BENCHMARK_HAS_CXX11
+BENCHMARK_TEMPLATE(BM_Sequential, std::vector<int>, int)->Arg(512);
+#endif
+
+static void BM_StringCompare(benchmark::State& state) {
+  size_t len = static_cast<size_t>(state.range(0));
+  std::string s1(len, '-');
+  std::string s2(len, '-');
+  for (auto _ : state) benchmark::DoNotOptimize(s1.compare(s2));
+}
+BENCHMARK(BM_StringCompare)->Range(1, 1 << 20);
+
+static void BM_SetupTeardown(benchmark::State& state) {
+  if (state.thread_index == 0) {
+    // No need to lock test_vector_mu here as this is running single-threaded.
+    test_vector = new std::vector<int>();
+  }
+  int i = 0;
+  for (auto _ : state) {
+    std::lock_guard<std::mutex> l(test_vector_mu);
+    if (i % 2 == 0)
+      test_vector->push_back(i);
+    else
+      test_vector->pop_back();
+    ++i;
+  }
+  if (state.thread_index == 0) {
+    delete test_vector;
+  }
+}
+BENCHMARK(BM_SetupTeardown)->ThreadPerCpu();
+
+static void BM_LongTest(benchmark::State& state) {
+  double tracker = 0.0;
+  for (auto _ : state) {
+    for (int i = 0; i < state.range(0); ++i)
+      benchmark::DoNotOptimize(tracker += i);
+  }
+}
+BENCHMARK(BM_LongTest)->Range(1 << 16, 1 << 28);
+
+static void BM_ParallelMemset(benchmark::State& state) {
+  int64_t size = state.range(0) / static_cast<int64_t>(sizeof(int));
+  int thread_size = static_cast<int>(size) / state.threads;
+  int from = thread_size * state.thread_index;
+  int to = from + thread_size;
+
+  if (state.thread_index == 0) {
+    test_vector = new std::vector<int>(static_cast<size_t>(size));
+  }
+
+  for (auto _ : state) {
+    for (int i = from; i < to; i++) {
+      // No need to lock test_vector_mu as ranges
+      // do not overlap between threads.
+      benchmark::DoNotOptimize(test_vector->at(i) = 1);
+    }
+  }
+
+  if (state.thread_index == 0) {
+    delete test_vector;
+  }
+}
+BENCHMARK(BM_ParallelMemset)->Arg(10 << 20)->ThreadRange(1, 4);
+
+static void BM_ManualTiming(benchmark::State& state) {
+  int64_t slept_for = 0;
+  int64_t microseconds = state.range(0);
+  std::chrono::duration<double, std::micro> sleep_duration{
+      static_cast<double>(microseconds)};
+
+  for (auto _ : state) {
+    auto start = std::chrono::high_resolution_clock::now();
+    // Simulate some useful workload with a sleep
+    std::this_thread::sleep_for(
+        std::chrono::duration_cast<std::chrono::nanoseconds>(sleep_duration));
+    auto end = std::chrono::high_resolution_clock::now();
+
+    auto elapsed =
+        std::chrono::duration_cast<std::chrono::duration<double>>(end - start);
+
+    state.SetIterationTime(elapsed.count());
+    slept_for += microseconds;
+  }
+  state.SetItemsProcessed(slept_for);
+}
+BENCHMARK(BM_ManualTiming)->Range(1, 1 << 14)->UseRealTime();
+BENCHMARK(BM_ManualTiming)->Range(1, 1 << 14)->UseManualTime();
+
+#ifdef BENCHMARK_HAS_CXX11
+
+template <class... Args>
+void BM_with_args(benchmark::State& state, Args&&...) {
+  for (auto _ : state) {
+  }
+}
+BENCHMARK_CAPTURE(BM_with_args, int_test, 42, 43, 44);
+BENCHMARK_CAPTURE(BM_with_args, string_and_pair_test, std::string("abc"),
+                  std::pair<int, double>(42, 3.8));
+
+void BM_non_template_args(benchmark::State& state, int, double) {
+  while(state.KeepRunning()) {}
+}
+BENCHMARK_CAPTURE(BM_non_template_args, basic_test, 0, 0);
+
+#endif  // BENCHMARK_HAS_CXX11
+
+static void BM_DenseThreadRanges(benchmark::State& st) {
+  switch (st.range(0)) {
+    case 1:
+      assert(st.threads == 1 || st.threads == 2 || st.threads == 3);
+      break;
+    case 2:
+      assert(st.threads == 1 || st.threads == 3 || st.threads == 4);
+      break;
+    case 3:
+      assert(st.threads == 5 || st.threads == 8 || st.threads == 11 ||
+             st.threads == 14);
+      break;
+    default:
+      assert(false && "Invalid test case number");
+  }
+  while (st.KeepRunning()) {
+  }
+}
+BENCHMARK(BM_DenseThreadRanges)->Arg(1)->DenseThreadRange(1, 3);
+BENCHMARK(BM_DenseThreadRanges)->Arg(2)->DenseThreadRange(1, 4, 2);
+BENCHMARK(BM_DenseThreadRanges)->Arg(3)->DenseThreadRange(5, 14, 3);
+
+BENCHMARK_MAIN();
diff --git a/src/third_party/google_benchmark/test/clobber_memory_assembly_test.cc b/src/third_party/google_benchmark/test/clobber_memory_assembly_test.cc
new file mode 100644
index 0000000..f41911a
--- /dev/null
+++ b/src/third_party/google_benchmark/test/clobber_memory_assembly_test.cc
@@ -0,0 +1,64 @@
+#include <benchmark/benchmark.h>
+
+#ifdef __clang__
+#pragma clang diagnostic ignored "-Wreturn-type"
+#endif
+
+extern "C" {
+
+extern int ExternInt;
+extern int ExternInt2;
+extern int ExternInt3;
+
+}
+
+// CHECK-LABEL: test_basic:
+extern "C" void test_basic() {
+  int x;
+  benchmark::DoNotOptimize(&x);
+  x = 101;
+  benchmark::ClobberMemory();
+  // CHECK: leaq [[DEST:[^,]+]], %rax
+  // CHECK: movl $101, [[DEST]]
+  // CHECK: ret
+}
+
+// CHECK-LABEL: test_redundant_store:
+extern "C" void test_redundant_store() {
+  ExternInt = 3;
+  benchmark::ClobberMemory();
+  ExternInt = 51;
+  // CHECK-DAG: ExternInt
+  // CHECK-DAG: movl $3
+  // CHECK: movl $51
+}
+
+// CHECK-LABEL: test_redundant_read:
+extern "C" void test_redundant_read() {
+  int x;
+  benchmark::DoNotOptimize(&x);
+  x = ExternInt;
+  benchmark::ClobberMemory();
+  x = ExternInt2;
+  // CHECK: leaq [[DEST:[^,]+]], %rax
+  // CHECK: ExternInt(%rip)
+  // CHECK: movl %eax, [[DEST]]
+  // CHECK-NOT: ExternInt2
+  // CHECK: ret
+}
+
+// CHECK-LABEL: test_redundant_read2:
+extern "C" void test_redundant_read2() {
+  int x;
+  benchmark::DoNotOptimize(&x);
+  x = ExternInt;
+  benchmark::ClobberMemory();
+  x = ExternInt2;
+  benchmark::ClobberMemory();
+  // CHECK: leaq [[DEST:[^,]+]], %rax
+  // CHECK: ExternInt(%rip)
+  // CHECK: movl %eax, [[DEST]]
+  // CHECK: ExternInt2(%rip)
+  // CHECK: movl %eax, [[DEST]]
+  // CHECK: ret
+}
diff --git a/src/third_party/google_benchmark/test/commandlineflags_gtest.cc b/src/third_party/google_benchmark/test/commandlineflags_gtest.cc
new file mode 100644
index 0000000..36bdb44
--- /dev/null
+++ b/src/third_party/google_benchmark/test/commandlineflags_gtest.cc
@@ -0,0 +1,201 @@
+#include <cstdlib>
+
+#include "../src/commandlineflags.h"
+#include "../src/internal_macros.h"
+#include "gtest/gtest.h"
+
+namespace benchmark {
+namespace {
+
+#if defined(BENCHMARK_OS_WINDOWS)
+int setenv(const char* name, const char* value, int overwrite) {
+  if (!overwrite) {
+    // NOTE: getenv_s is far superior but not available under mingw.
+    char* env_value = getenv(name);
+    if (env_value == nullptr) {
+      return -1;
+    }
+  }
+  return _putenv_s(name, value);
+}
+
+int unsetenv(const char* name) {
+  return _putenv_s(name, "");
+}
+
+#endif  // BENCHMARK_OS_WINDOWS
+
+TEST(BoolFromEnv, Default) {
+  ASSERT_EQ(unsetenv("BENCHMARK_NOT_IN_ENV"), 0);
+  EXPECT_EQ(BoolFromEnv("not_in_env", true), true);
+}
+
+TEST(BoolFromEnv, False) {
+  ASSERT_EQ(setenv("BENCHMARK_IN_ENV", "0", 1), 0);
+  EXPECT_EQ(BoolFromEnv("in_env", true), false);
+  unsetenv("BENCHMARK_IN_ENV");
+
+  ASSERT_EQ(setenv("BENCHMARK_IN_ENV", "N", 1), 0);
+  EXPECT_EQ(BoolFromEnv("in_env", true), false);
+  unsetenv("BENCHMARK_IN_ENV");
+
+  ASSERT_EQ(setenv("BENCHMARK_IN_ENV", "n", 1), 0);
+  EXPECT_EQ(BoolFromEnv("in_env", true), false);
+  unsetenv("BENCHMARK_IN_ENV");
+
+  ASSERT_EQ(setenv("BENCHMARK_IN_ENV", "NO", 1), 0);
+  EXPECT_EQ(BoolFromEnv("in_env", true), false);
+  unsetenv("BENCHMARK_IN_ENV");
+
+  ASSERT_EQ(setenv("BENCHMARK_IN_ENV", "No", 1), 0);
+  EXPECT_EQ(BoolFromEnv("in_env", true), false);
+  unsetenv("BENCHMARK_IN_ENV");
+
+  ASSERT_EQ(setenv("BENCHMARK_IN_ENV", "no", 1), 0);
+  EXPECT_EQ(BoolFromEnv("in_env", true), false);
+  unsetenv("BENCHMARK_IN_ENV");
+
+  ASSERT_EQ(setenv("BENCHMARK_IN_ENV", "F", 1), 0);
+  EXPECT_EQ(BoolFromEnv("in_env", true), false);
+  unsetenv("BENCHMARK_IN_ENV");
+
+  ASSERT_EQ(setenv("BENCHMARK_IN_ENV", "f", 1), 0);
+  EXPECT_EQ(BoolFromEnv("in_env", true), false);
+  unsetenv("BENCHMARK_IN_ENV");
+
+  ASSERT_EQ(setenv("BENCHMARK_IN_ENV", "FALSE", 1), 0);
+  EXPECT_EQ(BoolFromEnv("in_env", true), false);
+  unsetenv("BENCHMARK_IN_ENV");
+
+  ASSERT_EQ(setenv("BENCHMARK_IN_ENV", "False", 1), 0);
+  EXPECT_EQ(BoolFromEnv("in_env", true), false);
+  unsetenv("BENCHMARK_IN_ENV");
+
+  ASSERT_EQ(setenv("BENCHMARK_IN_ENV", "false", 1), 0);
+  EXPECT_EQ(BoolFromEnv("in_env", true), false);
+  unsetenv("BENCHMARK_IN_ENV");
+
+  ASSERT_EQ(setenv("BENCHMARK_IN_ENV", "OFF", 1), 0);
+  EXPECT_EQ(BoolFromEnv("in_env", true), false);
+  unsetenv("BENCHMARK_IN_ENV");
+
+  ASSERT_EQ(setenv("BENCHMARK_IN_ENV", "Off", 1), 0);
+  EXPECT_EQ(BoolFromEnv("in_env", true), false);
+  unsetenv("BENCHMARK_IN_ENV");
+
+  ASSERT_EQ(setenv("BENCHMARK_IN_ENV", "off", 1), 0);
+  EXPECT_EQ(BoolFromEnv("in_env", true), false);
+  unsetenv("BENCHMARK_IN_ENV");
+}
+
+TEST(BoolFromEnv, True) {
+  ASSERT_EQ(setenv("BENCHMARK_IN_ENV", "1", 1), 0);
+  EXPECT_EQ(BoolFromEnv("in_env", false), true);
+  unsetenv("BENCHMARK_IN_ENV");
+
+  ASSERT_EQ(setenv("BENCHMARK_IN_ENV", "Y", 1), 0);
+  EXPECT_EQ(BoolFromEnv("in_env", false), true);
+  unsetenv("BENCHMARK_IN_ENV");
+
+  ASSERT_EQ(setenv("BENCHMARK_IN_ENV", "y", 1), 0);
+  EXPECT_EQ(BoolFromEnv("in_env", false), true);
+  unsetenv("BENCHMARK_IN_ENV");
+
+  ASSERT_EQ(setenv("BENCHMARK_IN_ENV", "YES", 1), 0);
+  EXPECT_EQ(BoolFromEnv("in_env", false), true);
+  unsetenv("BENCHMARK_IN_ENV");
+
+  ASSERT_EQ(setenv("BENCHMARK_IN_ENV", "Yes", 1), 0);
+  EXPECT_EQ(BoolFromEnv("in_env", false), true);
+  unsetenv("BENCHMARK_IN_ENV");
+
+  ASSERT_EQ(setenv("BENCHMARK_IN_ENV", "yes", 1), 0);
+  EXPECT_EQ(BoolFromEnv("in_env", false), true);
+  unsetenv("BENCHMARK_IN_ENV");
+
+  ASSERT_EQ(setenv("BENCHMARK_IN_ENV", "T", 1), 0);
+  EXPECT_EQ(BoolFromEnv("in_env", false), true);
+  unsetenv("BENCHMARK_IN_ENV");
+
+  ASSERT_EQ(setenv("BENCHMARK_IN_ENV", "t", 1), 0);
+  EXPECT_EQ(BoolFromEnv("in_env", false), true);
+  unsetenv("BENCHMARK_IN_ENV");
+
+  ASSERT_EQ(setenv("BENCHMARK_IN_ENV", "TRUE", 1), 0);
+  EXPECT_EQ(BoolFromEnv("in_env", false), true);
+  unsetenv("BENCHMARK_IN_ENV");
+
+  ASSERT_EQ(setenv("BENCHMARK_IN_ENV", "True", 1), 0);
+  EXPECT_EQ(BoolFromEnv("in_env", false), true);
+  unsetenv("BENCHMARK_IN_ENV");
+
+  ASSERT_EQ(setenv("BENCHMARK_IN_ENV", "true", 1), 0);
+  EXPECT_EQ(BoolFromEnv("in_env", false), true);
+  unsetenv("BENCHMARK_IN_ENV");
+
+  ASSERT_EQ(setenv("BENCHMARK_IN_ENV", "ON", 1), 0);
+  EXPECT_EQ(BoolFromEnv("in_env", false), true);
+  unsetenv("BENCHMARK_IN_ENV");
+
+  ASSERT_EQ(setenv("BENCHMARK_IN_ENV", "On", 1), 0);
+  EXPECT_EQ(BoolFromEnv("in_env", false), true);
+  unsetenv("BENCHMARK_IN_ENV");
+
+  ASSERT_EQ(setenv("BENCHMARK_IN_ENV", "on", 1), 0);
+  EXPECT_EQ(BoolFromEnv("in_env", false), true);
+  unsetenv("BENCHMARK_IN_ENV");
+
+#ifndef BENCHMARK_OS_WINDOWS
+  ASSERT_EQ(setenv("BENCHMARK_IN_ENV", "", 1), 0);
+  EXPECT_EQ(BoolFromEnv("in_env", false), true);
+  unsetenv("BENCHMARK_IN_ENV");
+#endif
+}
+
+TEST(Int32FromEnv, NotInEnv) {
+  ASSERT_EQ(unsetenv("BENCHMARK_NOT_IN_ENV"), 0);
+  EXPECT_EQ(Int32FromEnv("not_in_env", 42), 42);
+}
+
+TEST(Int32FromEnv, InvalidInteger) {
+  ASSERT_EQ(setenv("BENCHMARK_IN_ENV", "foo", 1), 0);
+  EXPECT_EQ(Int32FromEnv("in_env", 42), 42);
+  unsetenv("BENCHMARK_IN_ENV");
+}
+
+TEST(Int32FromEnv, ValidInteger) {
+  ASSERT_EQ(setenv("BENCHMARK_IN_ENV", "42", 1), 0);
+  EXPECT_EQ(Int32FromEnv("in_env", 64), 42);
+  unsetenv("BENCHMARK_IN_ENV");
+}
+
+TEST(DoubleFromEnv, NotInEnv) {
+  ASSERT_EQ(unsetenv("BENCHMARK_NOT_IN_ENV"), 0);
+  EXPECT_EQ(DoubleFromEnv("not_in_env", 0.51), 0.51);
+}
+
+TEST(DoubleFromEnv, InvalidReal) {
+  ASSERT_EQ(setenv("BENCHMARK_IN_ENV", "foo", 1), 0);
+  EXPECT_EQ(DoubleFromEnv("in_env", 0.51), 0.51);
+  unsetenv("BENCHMARK_IN_ENV");
+}
+
+TEST(DoubleFromEnv, ValidReal) {
+  ASSERT_EQ(setenv("BENCHMARK_IN_ENV", "0.51", 1), 0);
+  EXPECT_EQ(DoubleFromEnv("in_env", 0.71), 0.51);
+  unsetenv("BENCHMARK_IN_ENV");
+}
+
+TEST(StringFromEnv, Default) {
+  ASSERT_EQ(unsetenv("BENCHMARK_NOT_IN_ENV"), 0);
+  EXPECT_STREQ(StringFromEnv("not_in_env", "foo"), "foo");
+}
+
+TEST(StringFromEnv, Valid) {
+  ASSERT_EQ(setenv("BENCHMARK_IN_ENV", "foo", 1), 0);
+  EXPECT_STREQ(StringFromEnv("in_env", "bar"), "foo");
+  unsetenv("BENCHMARK_IN_ENV");
+}
+
+}  // namespace
+}  // namespace benchmark
diff --git a/src/third_party/google_benchmark/test/complexity_test.cc b/src/third_party/google_benchmark/test/complexity_test.cc
new file mode 100644
index 0000000..5681fdc
--- /dev/null
+++ b/src/third_party/google_benchmark/test/complexity_test.cc
@@ -0,0 +1,213 @@
+#undef NDEBUG
+#include <algorithm>
+#include <cassert>
+#include <cmath>
+#include <cstdlib>
+#include <vector>
+#include "benchmark/benchmark.h"
+#include "output_test.h"
+
+namespace {
+
+#define ADD_COMPLEXITY_CASES(...) \
+  int CONCAT(dummy, __LINE__) = AddComplexityTest(__VA_ARGS__)
+
+int AddComplexityTest(std::string test_name, std::string big_o_test_name,
+                      std::string rms_test_name, std::string big_o) {
+  SetSubstitutions({{"%name", test_name},
+                    {"%bigo_name", big_o_test_name},
+                    {"%rms_name", rms_test_name},
+                    {"%bigo_str", "[ ]* %float " + big_o},
+                    {"%bigo", big_o},
+                    {"%rms", "[ ]*[0-9]+ %"}});
+  AddCases(
+      TC_ConsoleOut,
+      {{"^%bigo_name %bigo_str %bigo_str[ ]*$"},
+       {"^%bigo_name", MR_Not},  // Assert we we didn't only matched a name.
+       {"^%rms_name %rms %rms[ ]*$", MR_Next}});
+  AddCases(TC_JSONOut, {{"\"name\": \"%bigo_name\",$"},
+                        {"\"run_name\": \"%name\",$", MR_Next},
+                        {"\"run_type\": \"aggregate\",$", MR_Next},
+                        {"\"repetitions\": %int,$", MR_Next},
+                        {"\"threads\": 1,$", MR_Next},
+                        {"\"aggregate_name\": \"BigO\",$", MR_Next},
+                        {"\"cpu_coefficient\": %float,$", MR_Next},
+                        {"\"real_coefficient\": %float,$", MR_Next},
+                        {"\"big_o\": \"%bigo\",$", MR_Next},
+                        {"\"time_unit\": \"ns\"$", MR_Next},
+                        {"}", MR_Next},
+                        {"\"name\": \"%rms_name\",$"},
+                        {"\"run_name\": \"%name\",$", MR_Next},
+                        {"\"run_type\": \"aggregate\",$", MR_Next},
+                        {"\"repetitions\": %int,$", MR_Next},
+                        {"\"threads\": 1,$", MR_Next},
+                        {"\"aggregate_name\": \"RMS\",$", MR_Next},
+                        {"\"rms\": %float$", MR_Next},
+                        {"}", MR_Next}});
+  AddCases(TC_CSVOut, {{"^\"%bigo_name\",,%float,%float,%bigo,,,,,$"},
+                       {"^\"%bigo_name\"", MR_Not},
+                       {"^\"%rms_name\",,%float,%float,,,,,,$", MR_Next}});
+  return 0;
+}
+
+}  // end namespace
+
+// ========================================================================= //
+// --------------------------- Testing BigO O(1) --------------------------- //
+// ========================================================================= //
+
+void BM_Complexity_O1(benchmark::State& state) {
+  for (auto _ : state) {
+    for (int i = 0; i < 1024; ++i) {
+      benchmark::DoNotOptimize(&i);
+    }
+  }
+  state.SetComplexityN(state.range(0));
+}
+BENCHMARK(BM_Complexity_O1)->Range(1, 1 << 18)->Complexity(benchmark::o1);
+BENCHMARK(BM_Complexity_O1)->Range(1, 1 << 18)->Complexity();
+BENCHMARK(BM_Complexity_O1)
+    ->Range(1, 1 << 18)
+    ->Complexity([](benchmark::IterationCount) { return 1.0; });
+
+const char *one_test_name = "BM_Complexity_O1";
+const char *big_o_1_test_name = "BM_Complexity_O1_BigO";
+const char *rms_o_1_test_name = "BM_Complexity_O1_RMS";
+const char *enum_big_o_1 = "\\([0-9]+\\)";
+// FIXME: Tolerate both '(1)' and 'lgN' as output when the complexity is auto
+// deduced.
+// See https://github.com/google/benchmark/issues/272
+const char *auto_big_o_1 = "(\\([0-9]+\\))|(lgN)";
+const char *lambda_big_o_1 = "f\\(N\\)";
+
+// Add enum tests
+ADD_COMPLEXITY_CASES(one_test_name, big_o_1_test_name, rms_o_1_test_name,
+                     enum_big_o_1);
+
+// Add auto enum tests
+ADD_COMPLEXITY_CASES(one_test_name, big_o_1_test_name, rms_o_1_test_name,
+                     auto_big_o_1);
+
+// Add lambda tests
+ADD_COMPLEXITY_CASES(one_test_name, big_o_1_test_name, rms_o_1_test_name,
+                     lambda_big_o_1);
+
+// ========================================================================= //
+// --------------------------- Testing BigO O(N) --------------------------- //
+// ========================================================================= //
+
+std::vector<int> ConstructRandomVector(int64_t size) {
+  std::vector<int> v;
+  v.reserve(static_cast<int>(size));
+  for (int i = 0; i < size; ++i) {
+    v.push_back(static_cast<int>(std::rand() % size));
+  }
+  return v;
+}
+
+void BM_Complexity_O_N(benchmark::State& state) {
+  auto v = ConstructRandomVector(state.range(0));
+  // Test worst case scenario (item not in vector)
+  const int64_t item_not_in_vector = state.range(0) * 2;
+  for (auto _ : state) {
+    benchmark::DoNotOptimize(std::find(v.begin(), v.end(), item_not_in_vector));
+  }
+  state.SetComplexityN(state.range(0));
+}
+BENCHMARK(BM_Complexity_O_N)
+    ->RangeMultiplier(2)
+    ->Range(1 << 10, 1 << 16)
+    ->Complexity(benchmark::oN);
+BENCHMARK(BM_Complexity_O_N)
+    ->RangeMultiplier(2)
+    ->Range(1 << 10, 1 << 16)
+    ->Complexity([](benchmark::IterationCount n) -> double {
+      return static_cast<double>(n);
+    });
+BENCHMARK(BM_Complexity_O_N)
+    ->RangeMultiplier(2)
+    ->Range(1 << 10, 1 << 16)
+    ->Complexity();
+
+const char *n_test_name = "BM_Complexity_O_N";
+const char *big_o_n_test_name = "BM_Complexity_O_N_BigO";
+const char *rms_o_n_test_name = "BM_Complexity_O_N_RMS";
+const char *enum_auto_big_o_n = "N";
+const char *lambda_big_o_n = "f\\(N\\)";
+
+// Add enum tests
+ADD_COMPLEXITY_CASES(n_test_name, big_o_n_test_name, rms_o_n_test_name,
+                     enum_auto_big_o_n);
+
+// Add lambda tests
+ADD_COMPLEXITY_CASES(n_test_name, big_o_n_test_name, rms_o_n_test_name,
+                     lambda_big_o_n);
+
+// ========================================================================= //
+// ------------------------- Testing BigO O(N*lgN) ------------------------- //
+// ========================================================================= //
+
+static void BM_Complexity_O_N_log_N(benchmark::State& state) {
+  auto v = ConstructRandomVector(state.range(0));
+  for (auto _ : state) {
+    std::sort(v.begin(), v.end());
+  }
+  state.SetComplexityN(state.range(0));
+}
+static const double kLog2E = 1.44269504088896340736;
+BENCHMARK(BM_Complexity_O_N_log_N)
+    ->RangeMultiplier(2)
+    ->Range(1 << 10, 1 << 16)
+    ->Complexity(benchmark::oNLogN);
+BENCHMARK(BM_Complexity_O_N_log_N)
+    ->RangeMultiplier(2)
+    ->Range(1 << 10, 1 << 16)
+    ->Complexity([](benchmark::IterationCount n) {
+      return kLog2E * n * log(static_cast<double>(n));
+    });
+BENCHMARK(BM_Complexity_O_N_log_N)
+    ->RangeMultiplier(2)
+    ->Range(1 << 10, 1 << 16)
+    ->Complexity();
+
+const char *n_lg_n_test_name = "BM_Complexity_O_N_log_N";
+const char *big_o_n_lg_n_test_name = "BM_Complexity_O_N_log_N_BigO";
+const char *rms_o_n_lg_n_test_name = "BM_Complexity_O_N_log_N_RMS";
+const char *enum_auto_big_o_n_lg_n = "NlgN";
+const char *lambda_big_o_n_lg_n = "f\\(N\\)";
+
+// Add enum tests
+ADD_COMPLEXITY_CASES(n_lg_n_test_name, big_o_n_lg_n_test_name,
+                     rms_o_n_lg_n_test_name, enum_auto_big_o_n_lg_n);
+
+// Add lambda tests
+ADD_COMPLEXITY_CASES(n_lg_n_test_name, big_o_n_lg_n_test_name,
+                     rms_o_n_lg_n_test_name, lambda_big_o_n_lg_n);
+
+// ========================================================================= //
+// -------- Testing formatting of Complexity with captured args ------------ //
+// ========================================================================= //
+
+void BM_ComplexityCaptureArgs(benchmark::State& state, int n) {
+  for (auto _ : state) {
+    // This test requires a non-zero CPU time to avoid divide-by-zero
+    benchmark::DoNotOptimize(state.iterations());
+  }
+  state.SetComplexityN(n);
+}
+
+BENCHMARK_CAPTURE(BM_ComplexityCaptureArgs, capture_test, 100)
+    ->Complexity(benchmark::oN)
+    ->Ranges({{1, 2}, {3, 4}});
+
+const std::string complexity_capture_name =
+    "BM_ComplexityCaptureArgs/capture_test";
+
+ADD_COMPLEXITY_CASES(complexity_capture_name, complexity_capture_name + "_BigO",
+                     complexity_capture_name + "_RMS", "N");
+
+// ========================================================================= //
+// --------------------------- TEST CASES END ------------------------------ //
+// ========================================================================= //
+
+int main(int argc, char *argv[]) { RunOutputTests(argc, argv); }
diff --git a/src/third_party/google_benchmark/test/cxx03_test.cc b/src/third_party/google_benchmark/test/cxx03_test.cc
new file mode 100644
index 0000000..c4c9a52
--- /dev/null
+++ b/src/third_party/google_benchmark/test/cxx03_test.cc
@@ -0,0 +1,63 @@
+#undef NDEBUG
+#include <cassert>
+#include <cstddef>
+
+#include "benchmark/benchmark.h"
+
+#if __cplusplus >= 201103L
+#error C++11 or greater detected. Should be C++03.
+#endif
+
+#ifdef BENCHMARK_HAS_CXX11
+#error C++11 or greater detected by the library. BENCHMARK_HAS_CXX11 is defined.
+#endif
+
+void BM_empty(benchmark::State& state) {
+  while (state.KeepRunning()) {
+    volatile benchmark::IterationCount x = state.iterations();
+    ((void)x);
+  }
+}
+BENCHMARK(BM_empty);
+
+// The new C++11 interface for args/ranges requires initializer list support.
+// Therefore we provide the old interface to support C++03.
+void BM_old_arg_range_interface(benchmark::State& state) {
+  assert((state.range(0) == 1 && state.range(1) == 2) ||
+         (state.range(0) == 5 && state.range(1) == 6));
+  while (state.KeepRunning()) {
+  }
+}
+BENCHMARK(BM_old_arg_range_interface)->ArgPair(1, 2)->RangePair(5, 5, 6, 6);
+
+template <class T, class U>
+void BM_template2(benchmark::State& state) {
+  BM_empty(state);
+}
+BENCHMARK_TEMPLATE2(BM_template2, int, long);
+
+template <class T>
+void BM_template1(benchmark::State& state) {
+  BM_empty(state);
+}
+BENCHMARK_TEMPLATE(BM_template1, long);
+BENCHMARK_TEMPLATE1(BM_template1, int);
+
+template <class T>
+struct BM_Fixture : public ::benchmark::Fixture {
+};
+
+BENCHMARK_TEMPLATE_F(BM_Fixture, BM_template1, long)(benchmark::State& state) {
+  BM_empty(state);
+}
+BENCHMARK_TEMPLATE1_F(BM_Fixture, BM_template2, int)(benchmark::State& state) {
+  BM_empty(state);
+}
+
+void BM_counters(benchmark::State& state) {
+    BM_empty(state);
+    state.counters["Foo"] = 2;
+}
+BENCHMARK(BM_counters);
+
+BENCHMARK_MAIN();
diff --git a/src/third_party/google_benchmark/test/diagnostics_test.cc b/src/third_party/google_benchmark/test/diagnostics_test.cc
new file mode 100644
index 0000000..dd64a33
--- /dev/null
+++ b/src/third_party/google_benchmark/test/diagnostics_test.cc
@@ -0,0 +1,80 @@
+// Testing:
+//   State::PauseTiming()
+//   State::ResumeTiming()
+// Test that CHECK's within these function diagnose when they are called
+// outside of the KeepRunning() loop.
+//
+// NOTE: Users should NOT include or use src/check.h. This is only done in
+// order to test library internals.
+
+#include <cstdlib>
+#include <stdexcept>
+
+#include "../src/check.h"
+#include "benchmark/benchmark.h"
+
+#if defined(__GNUC__) && !defined(__EXCEPTIONS)
+#define TEST_HAS_NO_EXCEPTIONS
+#endif
+
+void TestHandler() {
+#ifndef TEST_HAS_NO_EXCEPTIONS
+  throw std::logic_error("");
+#else
+  std::abort();
+#endif
+}
+
+void try_invalid_pause_resume(benchmark::State& state) {
+#if !defined(TEST_BENCHMARK_LIBRARY_HAS_NO_ASSERTIONS) && !defined(TEST_HAS_NO_EXCEPTIONS)
+  try {
+    state.PauseTiming();
+    std::abort();
+  } catch (std::logic_error const&) {
+  }
+  try {
+    state.ResumeTiming();
+    std::abort();
+  } catch (std::logic_error const&) {
+  }
+#else
+  (void)state;  // avoid unused warning
+#endif
+}
+
+void BM_diagnostic_test(benchmark::State& state) {
+  static bool called_once = false;
+
+  if (called_once == false) try_invalid_pause_resume(state);
+
+  for (auto _ : state) {
+    benchmark::DoNotOptimize(state.iterations());
+  }
+
+  if (called_once == false) try_invalid_pause_resume(state);
+
+  called_once = true;
+}
+BENCHMARK(BM_diagnostic_test);
+
+
+void BM_diagnostic_test_keep_running(benchmark::State& state) {
+  static bool called_once = false;
+
+  if (called_once == false) try_invalid_pause_resume(state);
+
+  while(state.KeepRunning()) {
+    benchmark::DoNotOptimize(state.iterations());
+  }
+
+  if (called_once == false) try_invalid_pause_resume(state);
+
+  called_once = true;
+}
+BENCHMARK(BM_diagnostic_test_keep_running);
+
+int main(int argc, char* argv[]) {
+  benchmark::internal::GetAbortHandler() = &TestHandler;
+  benchmark::Initialize(&argc, argv);
+  benchmark::RunSpecifiedBenchmarks();
+}
diff --git a/src/third_party/google_benchmark/test/display_aggregates_only_test.cc b/src/third_party/google_benchmark/test/display_aggregates_only_test.cc
new file mode 100644
index 0000000..3c36d3f
--- /dev/null
+++ b/src/third_party/google_benchmark/test/display_aggregates_only_test.cc
@@ -0,0 +1,43 @@
+
+#undef NDEBUG
+#include <cstdio>
+#include <string>
+
+#include "benchmark/benchmark.h"
+#include "output_test.h"
+
+// Ok this test is super ugly. We want to check what happens with the file
+// reporter in the presence of DisplayAggregatesOnly().
+// We do not care about console output, the normal tests check that already.
+
+void BM_SummaryRepeat(benchmark::State& state) {
+  for (auto _ : state) {
+  }
+}
+BENCHMARK(BM_SummaryRepeat)->Repetitions(3)->DisplayAggregatesOnly();
+
+int main(int argc, char* argv[]) {
+  const std::string output = GetFileReporterOutput(argc, argv);
+
+  if (SubstrCnt(output, "\"name\": \"BM_SummaryRepeat/repeats:3") != 6 ||
+      SubstrCnt(output, "\"name\": \"BM_SummaryRepeat/repeats:3\"") != 3 ||
+      SubstrCnt(output, "\"name\": \"BM_SummaryRepeat/repeats:3_mean\"") != 1 ||
+      SubstrCnt(output, "\"name\": \"BM_SummaryRepeat/repeats:3_median\"") !=
+          1 ||
+      SubstrCnt(output, "\"name\": \"BM_SummaryRepeat/repeats:3_stddev\"") !=
+          1) {
+    std::cout << "Precondition mismatch. Expected to only find 6 "
+                 "occurrences of \"BM_SummaryRepeat/repeats:3\" substring:\n"
+                 "\"name\": \"BM_SummaryRepeat/repeats:3\", "
+                 "\"name\": \"BM_SummaryRepeat/repeats:3\", "
+                 "\"name\": \"BM_SummaryRepeat/repeats:3\", "
+                 "\"name\": \"BM_SummaryRepeat/repeats:3_mean\", "
+                 "\"name\": \"BM_SummaryRepeat/repeats:3_median\", "
+                 "\"name\": \"BM_SummaryRepeat/repeats:3_stddev\"\nThe entire "
+                 "output:\n";
+    std::cout << output;
+    return 1;
+  }
+
+  return 0;
+}
diff --git a/src/third_party/google_benchmark/test/donotoptimize_assembly_test.cc b/src/third_party/google_benchmark/test/donotoptimize_assembly_test.cc
new file mode 100644
index 0000000..d4b0bab
--- /dev/null
+++ b/src/third_party/google_benchmark/test/donotoptimize_assembly_test.cc
@@ -0,0 +1,163 @@
+#include <benchmark/benchmark.h>
+
+#ifdef __clang__
+#pragma clang diagnostic ignored "-Wreturn-type"
+#endif
+
+extern "C" {
+
+extern int ExternInt;
+extern int ExternInt2;
+extern int ExternInt3;
+
+inline int Add42(int x) { return x + 42; }
+
+struct NotTriviallyCopyable {
+  NotTriviallyCopyable();
+  explicit NotTriviallyCopyable(int x) : value(x) {}
+  NotTriviallyCopyable(NotTriviallyCopyable const&);
+  int value;
+};
+
+struct Large {
+  int value;
+  int data[2];
+};
+
+}
+// CHECK-LABEL: test_with_rvalue:
+extern "C" void test_with_rvalue() {
+  benchmark::DoNotOptimize(Add42(0));
+  // CHECK: movl $42, %eax
+  // CHECK: ret
+}
+
+// CHECK-LABEL: test_with_large_rvalue:
+extern "C" void test_with_large_rvalue() {
+  benchmark::DoNotOptimize(Large{ExternInt, {ExternInt, ExternInt}});
+  // CHECK: ExternInt(%rip)
+  // CHECK: movl %eax, -{{[0-9]+}}(%[[REG:[a-z]+]]
+  // CHECK: movl %eax, -{{[0-9]+}}(%[[REG]])
+  // CHECK: movl %eax, -{{[0-9]+}}(%[[REG]])
+  // CHECK: ret
+}
+
+// CHECK-LABEL: test_with_non_trivial_rvalue:
+extern "C" void test_with_non_trivial_rvalue() {
+  benchmark::DoNotOptimize(NotTriviallyCopyable(ExternInt));
+  // CHECK: mov{{l|q}} ExternInt(%rip)
+  // CHECK: ret
+}
+
+// CHECK-LABEL: test_with_lvalue:
+extern "C" void test_with_lvalue() {
+  int x = 101;
+  benchmark::DoNotOptimize(x);
+  // CHECK-GNU: movl $101, %eax
+  // CHECK-CLANG: movl $101, -{{[0-9]+}}(%[[REG:[a-z]+]])
+  // CHECK: ret
+}
+
+// CHECK-LABEL: test_with_large_lvalue:
+extern "C" void test_with_large_lvalue() {
+  Large L{ExternInt, {ExternInt, ExternInt}};
+  benchmark::DoNotOptimize(L);
+  // CHECK: ExternInt(%rip)
+  // CHECK: movl %eax, -{{[0-9]+}}(%[[REG:[a-z]+]])
+  // CHECK: movl %eax, -{{[0-9]+}}(%[[REG]])
+  // CHECK: movl %eax, -{{[0-9]+}}(%[[REG]])
+  // CHECK: ret
+}
+
+// CHECK-LABEL: test_with_non_trivial_lvalue:
+extern "C" void test_with_non_trivial_lvalue() {
+  NotTriviallyCopyable NTC(ExternInt);
+  benchmark::DoNotOptimize(NTC);
+  // CHECK: ExternInt(%rip)
+  // CHECK: movl %eax, -{{[0-9]+}}(%[[REG:[a-z]+]])
+  // CHECK: ret
+}
+
+// CHECK-LABEL: test_with_const_lvalue:
+extern "C" void test_with_const_lvalue() {
+  const int x = 123;
+  benchmark::DoNotOptimize(x);
+  // CHECK: movl $123, %eax
+  // CHECK: ret
+}
+
+// CHECK-LABEL: test_with_large_const_lvalue:
+extern "C" void test_with_large_const_lvalue() {
+  const Large L{ExternInt, {ExternInt, ExternInt}};
+  benchmark::DoNotOptimize(L);
+  // CHECK: ExternInt(%rip)
+  // CHECK: movl %eax, -{{[0-9]+}}(%[[REG:[a-z]+]])
+  // CHECK: movl %eax, -{{[0-9]+}}(%[[REG]])
+  // CHECK: movl %eax, -{{[0-9]+}}(%[[REG]])
+  // CHECK: ret
+}
+
+// CHECK-LABEL: test_with_non_trivial_const_lvalue:
+extern "C" void test_with_non_trivial_const_lvalue() {
+  const NotTriviallyCopyable Obj(ExternInt);
+  benchmark::DoNotOptimize(Obj);
+  // CHECK: mov{{q|l}} ExternInt(%rip)
+  // CHECK: ret
+}
+
+// CHECK-LABEL: test_div_by_two:
+extern "C" int test_div_by_two(int input) {
+  int divisor = 2;
+  benchmark::DoNotOptimize(divisor);
+  return input / divisor;
+  // CHECK: movl $2, [[DEST:.*]]
+  // CHECK: idivl [[DEST]]
+  // CHECK: ret
+}
+
+// CHECK-LABEL: test_inc_integer:
+extern "C" int test_inc_integer() {
+  int x = 0;
+  for (int i=0; i < 5; ++i)
+    benchmark::DoNotOptimize(++x);
+  // CHECK: movl $1, [[DEST:.*]]
+  // CHECK: {{(addl \$1,|incl)}} [[DEST]]
+  // CHECK: {{(addl \$1,|incl)}} [[DEST]]
+  // CHECK: {{(addl \$1,|incl)}} [[DEST]]
+  // CHECK: {{(addl \$1,|incl)}} [[DEST]]
+  // CHECK-CLANG: movl [[DEST]], %eax
+  // CHECK: ret
+  return x;
+}
+
+// CHECK-LABEL: test_pointer_rvalue
+extern "C" void test_pointer_rvalue() {
+  // CHECK: movl $42, [[DEST:.*]]
+  // CHECK: leaq [[DEST]], %rax
+  // CHECK-CLANG: movq %rax, -{{[0-9]+}}(%[[REG:[a-z]+]])
+  // CHECK: ret
+  int x = 42;
+  benchmark::DoNotOptimize(&x);
+}
+
+// CHECK-LABEL: test_pointer_const_lvalue:
+extern "C" void test_pointer_const_lvalue() {
+  // CHECK: movl $42, [[DEST:.*]]
+  // CHECK: leaq [[DEST]], %rax
+  // CHECK-CLANG: movq %rax, -{{[0-9]+}}(%[[REG:[a-z]+]])
+  // CHECK: ret
+  int x = 42;
+  int * const xp = &x;
+  benchmark::DoNotOptimize(xp);
+}
+
+// CHECK-LABEL: test_pointer_lvalue:
+extern "C" void test_pointer_lvalue() {
+  // CHECK: movl $42, [[DEST:.*]]
+  // CHECK: leaq [[DEST]], %rax
+  // CHECK-CLANG: movq %rax, -{{[0-9]+}}(%[[REG:[a-z+]+]])
+  // CHECK: ret
+  int x = 42;
+  int *xp = &x;
+  benchmark::DoNotOptimize(xp);
+}
diff --git a/src/third_party/google_benchmark/test/donotoptimize_test.cc b/src/third_party/google_benchmark/test/donotoptimize_test.cc
new file mode 100644
index 0000000..2ce92d1
--- /dev/null
+++ b/src/third_party/google_benchmark/test/donotoptimize_test.cc
@@ -0,0 +1,52 @@
+#include "benchmark/benchmark.h"
+
+#include <cstdint>
+
+namespace {
+#if defined(__GNUC__)
+std::uint64_t double_up(const std::uint64_t x) __attribute__((const));
+#endif
+std::uint64_t double_up(const std::uint64_t x) { return x * 2; }
+}
+
+// Using DoNotOptimize on types like BitRef seem to cause a lot of problems
+// with the inline assembly on both GCC and Clang.
+struct BitRef {
+  int index;
+  unsigned char &byte;
+
+public:
+  static BitRef Make() {
+    static unsigned char arr[2] = {};
+    BitRef b(1, arr[0]);
+    return b;
+  }
+private:
+  BitRef(int i, unsigned char& b) : index(i), byte(b) {}
+};
+
+int main(int, char*[]) {
+  // this test verifies compilation of DoNotOptimize() for some types
+
+  char buffer8[8] = "";
+  benchmark::DoNotOptimize(buffer8);
+
+  char buffer20[20] = "";
+  benchmark::DoNotOptimize(buffer20);
+
+  char buffer1024[1024] = "";
+  benchmark::DoNotOptimize(buffer1024);
+  benchmark::DoNotOptimize(&buffer1024[0]);
+
+  int x = 123;
+  benchmark::DoNotOptimize(x);
+  benchmark::DoNotOptimize(&x);
+  benchmark::DoNotOptimize(x += 42);
+
+  benchmark::DoNotOptimize(double_up(x));
+
+  // These tests are to e
+  benchmark::DoNotOptimize(BitRef::Make());
+  BitRef lval = BitRef::Make();
+  benchmark::DoNotOptimize(lval);
+}
diff --git a/src/third_party/google_benchmark/test/filter_test.cc b/src/third_party/google_benchmark/test/filter_test.cc
new file mode 100644
index 0000000..0e27065
--- /dev/null
+++ b/src/third_party/google_benchmark/test/filter_test.cc
@@ -0,0 +1,104 @@
+#include "benchmark/benchmark.h"
+
+#include <cassert>
+#include <cmath>
+#include <cstdint>
+#include <cstdlib>
+
+#include <iostream>
+#include <limits>
+#include <sstream>
+#include <string>
+
+namespace {
+
+class TestReporter : public benchmark::ConsoleReporter {
+ public:
+  virtual bool ReportContext(const Context& context) {
+    return ConsoleReporter::ReportContext(context);
+  };
+
+  virtual void ReportRuns(const std::vector<Run>& report) {
+    ++count_;
+    ConsoleReporter::ReportRuns(report);
+  };
+
+  TestReporter() : count_(0) {}
+
+  virtual ~TestReporter() {}
+
+  size_t GetCount() const { return count_; }
+
+ private:
+  mutable size_t count_;
+};
+
+}  // end namespace
+
+static void NoPrefix(benchmark::State& state) {
+  for (auto _ : state) {
+  }
+}
+BENCHMARK(NoPrefix);
+
+static void BM_Foo(benchmark::State& state) {
+  for (auto _ : state) {
+  }
+}
+BENCHMARK(BM_Foo);
+
+static void BM_Bar(benchmark::State& state) {
+  for (auto _ : state) {
+  }
+}
+BENCHMARK(BM_Bar);
+
+static void BM_FooBar(benchmark::State& state) {
+  for (auto _ : state) {
+  }
+}
+BENCHMARK(BM_FooBar);
+
+static void BM_FooBa(benchmark::State& state) {
+  for (auto _ : state) {
+  }
+}
+BENCHMARK(BM_FooBa);
+
+int main(int argc, char **argv) {
+  bool list_only = false;
+  for (int i = 0; i < argc; ++i)
+    list_only |= std::string(argv[i]).find("--benchmark_list_tests") !=
+                 std::string::npos;
+
+  benchmark::Initialize(&argc, argv);
+
+  TestReporter test_reporter;
+  const size_t returned_count =
+      benchmark::RunSpecifiedBenchmarks(&test_reporter);
+
+  if (argc == 2) {
+    // Make sure we ran all of the tests
+    std::stringstream ss(argv[1]);
+    size_t expected_return;
+    ss >> expected_return;
+
+    if (returned_count != expected_return) {
+      std::cerr << "ERROR: Expected " << expected_return
+                << " tests to match the filter but returned_count = "
+                << returned_count << std::endl;
+      return -1;
+    }
+
+    const size_t expected_reports = list_only ? 0 : expected_return;
+    const size_t reports_count = test_reporter.GetCount();
+    if (reports_count != expected_reports) {
+      std::cerr << "ERROR: Expected " << expected_reports
+                << " tests to be run but reported_count = " << reports_count
+                << std::endl;
+      return -1;
+    }
+  }
+
+  return 0;
+}
diff --git a/src/third_party/google_benchmark/test/fixture_test.cc b/src/third_party/google_benchmark/test/fixture_test.cc
new file mode 100644
index 0000000..1462b10
--- /dev/null
+++ b/src/third_party/google_benchmark/test/fixture_test.cc
@@ -0,0 +1,49 @@
+
+#include "benchmark/benchmark.h"
+
+#include <cassert>
+#include <memory>
+
+class MyFixture : public ::benchmark::Fixture {
+ public:
+  void SetUp(const ::benchmark::State& state) {
+    if (state.thread_index == 0) {
+      assert(data.get() == nullptr);
+      data.reset(new int(42));
+    }
+  }
+
+  void TearDown(const ::benchmark::State& state) {
+    if (state.thread_index == 0) {
+      assert(data.get() != nullptr);
+      data.reset();
+    }
+  }
+
+  ~MyFixture() { assert(data == nullptr); }
+
+  std::unique_ptr<int> data;
+};
+
+BENCHMARK_F(MyFixture, Foo)(benchmark::State &st) {
+  assert(data.get() != nullptr);
+  assert(*data == 42);
+  for (auto _ : st) {
+  }
+}
+
+BENCHMARK_DEFINE_F(MyFixture, Bar)(benchmark::State& st) {
+  if (st.thread_index == 0) {
+    assert(data.get() != nullptr);
+    assert(*data == 42);
+  }
+  for (auto _ : st) {
+    assert(data.get() != nullptr);
+    assert(*data == 42);
+  }
+  st.SetItemsProcessed(st.range(0));
+}
+BENCHMARK_REGISTER_F(MyFixture, Bar)->Arg(42);
+BENCHMARK_REGISTER_F(MyFixture, Bar)->Arg(42)->ThreadPerCpu();
+
+BENCHMARK_MAIN();
diff --git a/src/third_party/google_benchmark/test/internal_threading_test.cc b/src/third_party/google_benchmark/test/internal_threading_test.cc
new file mode 100644
index 0000000..039d7c1
--- /dev/null
+++ b/src/third_party/google_benchmark/test/internal_threading_test.cc
@@ -0,0 +1,184 @@
+
+#undef NDEBUG
+
+#include <chrono>
+#include <thread>
+#include "../src/timers.h"
+#include "benchmark/benchmark.h"
+#include "output_test.h"
+
+static const std::chrono::duration<double, std::milli> time_frame(50);
+static const double time_frame_in_sec(
+    std::chrono::duration_cast<std::chrono::duration<double, std::ratio<1, 1>>>(
+        time_frame)
+        .count());
+
+void MyBusySpinwait() {
+  const auto start = benchmark::ChronoClockNow();
+
+  while (true) {
+    const auto now = benchmark::ChronoClockNow();
+    const auto elapsed = now - start;
+
+    if (std::chrono::duration<double, std::chrono::seconds::period>(elapsed) >=
+        time_frame)
+      return;
+  }
+}
+
+// ========================================================================= //
+// --------------------------- TEST CASES BEGIN ---------------------------- //
+// ========================================================================= //
+
+// ========================================================================= //
+// BM_MainThread
+
+void BM_MainThread(benchmark::State& state) {
+  for (auto _ : state) {
+    MyBusySpinwait();
+    state.SetIterationTime(time_frame_in_sec);
+  }
+  state.counters["invtime"] =
+      benchmark::Counter{1, benchmark::Counter::kIsRate};
+}
+
+BENCHMARK(BM_MainThread)->Iterations(1)->Threads(1);
+BENCHMARK(BM_MainThread)->Iterations(1)->Threads(1)->UseRealTime();
+BENCHMARK(BM_MainThread)->Iterations(1)->Threads(1)->UseManualTime();
+BENCHMARK(BM_MainThread)->Iterations(1)->Threads(1)->MeasureProcessCPUTime();
+BENCHMARK(BM_MainThread)
+    ->Iterations(1)
+    ->Threads(1)
+    ->MeasureProcessCPUTime()
+    ->UseRealTime();
+BENCHMARK(BM_MainThread)
+    ->Iterations(1)
+    ->Threads(1)
+    ->MeasureProcessCPUTime()
+    ->UseManualTime();
+
+BENCHMARK(BM_MainThread)->Iterations(1)->Threads(2);
+BENCHMARK(BM_MainThread)->Iterations(1)->Threads(2)->UseRealTime();
+BENCHMARK(BM_MainThread)->Iterations(1)->Threads(2)->UseManualTime();
+BENCHMARK(BM_MainThread)->Iterations(1)->Threads(2)->MeasureProcessCPUTime();
+BENCHMARK(BM_MainThread)
+    ->Iterations(1)
+    ->Threads(2)
+    ->MeasureProcessCPUTime()
+    ->UseRealTime();
+BENCHMARK(BM_MainThread)
+    ->Iterations(1)
+    ->Threads(2)
+    ->MeasureProcessCPUTime()
+    ->UseManualTime();
+
+// ========================================================================= //
+// BM_WorkerThread
+
+void BM_WorkerThread(benchmark::State& state) {
+  for (auto _ : state) {
+    std::thread Worker(&MyBusySpinwait);
+    Worker.join();
+    state.SetIterationTime(time_frame_in_sec);
+  }
+  state.counters["invtime"] =
+      benchmark::Counter{1, benchmark::Counter::kIsRate};
+}
+
+BENCHMARK(BM_WorkerThread)->Iterations(1)->Threads(1);
+BENCHMARK(BM_WorkerThread)->Iterations(1)->Threads(1)->UseRealTime();
+BENCHMARK(BM_WorkerThread)->Iterations(1)->Threads(1)->UseManualTime();
+BENCHMARK(BM_WorkerThread)->Iterations(1)->Threads(1)->MeasureProcessCPUTime();
+BENCHMARK(BM_WorkerThread)
+    ->Iterations(1)
+    ->Threads(1)
+    ->MeasureProcessCPUTime()
+    ->UseRealTime();
+BENCHMARK(BM_WorkerThread)
+    ->Iterations(1)
+    ->Threads(1)
+    ->MeasureProcessCPUTime()
+    ->UseManualTime();
+
+BENCHMARK(BM_WorkerThread)->Iterations(1)->Threads(2);
+BENCHMARK(BM_WorkerThread)->Iterations(1)->Threads(2)->UseRealTime();
+BENCHMARK(BM_WorkerThread)->Iterations(1)->Threads(2)->UseManualTime();
+BENCHMARK(BM_WorkerThread)->Iterations(1)->Threads(2)->MeasureProcessCPUTime();
+BENCHMARK(BM_WorkerThread)
+    ->Iterations(1)
+    ->Threads(2)
+    ->MeasureProcessCPUTime()
+    ->UseRealTime();
+BENCHMARK(BM_WorkerThread)
+    ->Iterations(1)
+    ->Threads(2)
+    ->MeasureProcessCPUTime()
+    ->UseManualTime();
+
+// ========================================================================= //
+// BM_MainThreadAndWorkerThread
+
+void BM_MainThreadAndWorkerThread(benchmark::State& state) {
+  for (auto _ : state) {
+    std::thread Worker(&MyBusySpinwait);
+    MyBusySpinwait();
+    Worker.join();
+    state.SetIterationTime(time_frame_in_sec);
+  }
+  state.counters["invtime"] =
+      benchmark::Counter{1, benchmark::Counter::kIsRate};
+}
+
+BENCHMARK(BM_MainThreadAndWorkerThread)->Iterations(1)->Threads(1);
+BENCHMARK(BM_MainThreadAndWorkerThread)
+    ->Iterations(1)
+    ->Threads(1)
+    ->UseRealTime();
+BENCHMARK(BM_MainThreadAndWorkerThread)
+    ->Iterations(1)
+    ->Threads(1)
+    ->UseManualTime();
+BENCHMARK(BM_MainThreadAndWorkerThread)
+    ->Iterations(1)
+    ->Threads(1)
+    ->MeasureProcessCPUTime();
+BENCHMARK(BM_MainThreadAndWorkerThread)
+    ->Iterations(1)
+    ->Threads(1)
+    ->MeasureProcessCPUTime()
+    ->UseRealTime();
+BENCHMARK(BM_MainThreadAndWorkerThread)
+    ->Iterations(1)
+    ->Threads(1)
+    ->MeasureProcessCPUTime()
+    ->UseManualTime();
+
+BENCHMARK(BM_MainThreadAndWorkerThread)->Iterations(1)->Threads(2);
+BENCHMARK(BM_MainThreadAndWorkerThread)
+    ->Iterations(1)
+    ->Threads(2)
+    ->UseRealTime();
+BENCHMARK(BM_MainThreadAndWorkerThread)
+    ->Iterations(1)
+    ->Threads(2)
+    ->UseManualTime();
+BENCHMARK(BM_MainThreadAndWorkerThread)
+    ->Iterations(1)
+    ->Threads(2)
+    ->MeasureProcessCPUTime();
+BENCHMARK(BM_MainThreadAndWorkerThread)
+    ->Iterations(1)
+    ->Threads(2)
+    ->MeasureProcessCPUTime()
+    ->UseRealTime();
+BENCHMARK(BM_MainThreadAndWorkerThread)
+    ->Iterations(1)
+    ->Threads(2)
+    ->MeasureProcessCPUTime()
+    ->UseManualTime();
+
+// ========================================================================= //
+// ---------------------------- TEST CASES END ----------------------------- //
+// ========================================================================= //
+
+int main(int argc, char* argv[]) { RunOutputTests(argc, argv); }
diff --git a/src/third_party/google_benchmark/test/link_main_test.cc b/src/third_party/google_benchmark/test/link_main_test.cc
new file mode 100644
index 0000000..241ad5c
--- /dev/null
+++ b/src/third_party/google_benchmark/test/link_main_test.cc
@@ -0,0 +1,8 @@
+#include "benchmark/benchmark.h"
+
+void BM_empty(benchmark::State& state) {
+  for (auto _ : state) {
+    benchmark::DoNotOptimize(state.iterations());
+  }
+}
+BENCHMARK(BM_empty);
diff --git a/src/third_party/google_benchmark/test/map_test.cc b/src/third_party/google_benchmark/test/map_test.cc
new file mode 100644
index 0000000..dbf7982
--- /dev/null
+++ b/src/third_party/google_benchmark/test/map_test.cc
@@ -0,0 +1,57 @@
+#include "benchmark/benchmark.h"
+
+#include <cstdlib>
+#include <map>
+
+namespace {
+
+std::map<int, int> ConstructRandomMap(int size) {
+  std::map<int, int> m;
+  for (int i = 0; i < size; ++i) {
+    m.insert(std::make_pair(std::rand() % size, std::rand() % size));
+  }
+  return m;
+}
+
+}  // namespace
+
+// Basic version.
+static void BM_MapLookup(benchmark::State& state) {
+  const int size = static_cast<int>(state.range(0));
+  std::map<int, int> m;
+  for (auto _ : state) {
+    state.PauseTiming();
+    m = ConstructRandomMap(size);
+    state.ResumeTiming();
+    for (int i = 0; i < size; ++i) {
+      benchmark::DoNotOptimize(m.find(std::rand() % size));
+    }
+  }
+  state.SetItemsProcessed(state.iterations() * size);
+}
+BENCHMARK(BM_MapLookup)->Range(1 << 3, 1 << 12);
+
+// Using fixtures.
+class MapFixture : public ::benchmark::Fixture {
+ public:
+  void SetUp(const ::benchmark::State& st) {
+    m = ConstructRandomMap(static_cast<int>(st.range(0)));
+  }
+
+  void TearDown(const ::benchmark::State&) { m.clear(); }
+
+  std::map<int, int> m;
+};
+
+BENCHMARK_DEFINE_F(MapFixture, Lookup)(benchmark::State& state) {
+  const int size = static_cast<int>(state.range(0));
+  for (auto _ : state) {
+    for (int i = 0; i < size; ++i) {
+      benchmark::DoNotOptimize(m.find(std::rand() % size));
+    }
+  }
+  state.SetItemsProcessed(state.iterations() * size);
+}
+BENCHMARK_REGISTER_F(MapFixture, Lookup)->Range(1 << 3, 1 << 12);
+
+BENCHMARK_MAIN();
diff --git a/src/third_party/google_benchmark/test/memory_manager_test.cc b/src/third_party/google_benchmark/test/memory_manager_test.cc
new file mode 100644
index 0000000..90bed16
--- /dev/null
+++ b/src/third_party/google_benchmark/test/memory_manager_test.cc
@@ -0,0 +1,44 @@
+#include <memory>
+
+#include "../src/check.h"
+#include "benchmark/benchmark.h"
+#include "output_test.h"
+
+class TestMemoryManager : public benchmark::MemoryManager {
+  void Start() {}
+  void Stop(Result* result) {
+    result->num_allocs = 42;
+    result->max_bytes_used = 42000;
+  }
+};
+
+void BM_empty(benchmark::State& state) {
+  for (auto _ : state) {
+    benchmark::DoNotOptimize(state.iterations());
+  }
+}
+BENCHMARK(BM_empty);
+
+ADD_CASES(TC_ConsoleOut, {{"^BM_empty %console_report$"}});
+ADD_CASES(TC_JSONOut, {{"\"name\": \"BM_empty\",$"},
+                       {"\"run_name\": \"BM_empty\",$", MR_Next},
+                       {"\"run_type\": \"iteration\",$", MR_Next},
+                       {"\"repetitions\": 0,$", MR_Next},
+                       {"\"repetition_index\": 0,$", MR_Next},
+                       {"\"threads\": 1,$", MR_Next},
+                       {"\"iterations\": %int,$", MR_Next},
+                       {"\"real_time\": %float,$", MR_Next},
+                       {"\"cpu_time\": %float,$", MR_Next},
+                       {"\"time_unit\": \"ns\",$", MR_Next},
+                       {"\"allocs_per_iter\": %float,$", MR_Next},
+                       {"\"max_bytes_used\": 42000$", MR_Next},
+                       {"}", MR_Next}});
+ADD_CASES(TC_CSVOut, {{"^\"BM_empty\",%csv_report$"}});
+
+int main(int argc, char* argv[]) {
+  std::unique_ptr<benchmark::MemoryManager> mm(new TestMemoryManager());
+
+  benchmark::RegisterMemoryManager(mm.get());
+  RunOutputTests(argc, argv);
+  benchmark::RegisterMemoryManager(nullptr);
+}
diff --git a/src/third_party/google_benchmark/test/multiple_ranges_test.cc b/src/third_party/google_benchmark/test/multiple_ranges_test.cc
new file mode 100644
index 0000000..b25f40e
--- /dev/null
+++ b/src/third_party/google_benchmark/test/multiple_ranges_test.cc
@@ -0,0 +1,96 @@
+#include "benchmark/benchmark.h"
+
+#include <cassert>
+#include <iostream>
+#include <set>
+#include <vector>
+
+class MultipleRangesFixture : public ::benchmark::Fixture {
+ public:
+  MultipleRangesFixture()
+      : expectedValues({{1, 3, 5},
+                        {1, 3, 8},
+                        {1, 3, 15},
+                        {2, 3, 5},
+                        {2, 3, 8},
+                        {2, 3, 15},
+                        {1, 4, 5},
+                        {1, 4, 8},
+                        {1, 4, 15},
+                        {2, 4, 5},
+                        {2, 4, 8},
+                        {2, 4, 15},
+                        {1, 7, 5},
+                        {1, 7, 8},
+                        {1, 7, 15},
+                        {2, 7, 5},
+                        {2, 7, 8},
+                        {2, 7, 15},
+                        {7, 6, 3}}) {}
+
+  void SetUp(const ::benchmark::State& state) {
+    std::vector<int64_t> ranges = {state.range(0), state.range(1),
+                                   state.range(2)};
+
+    assert(expectedValues.find(ranges) != expectedValues.end());
+
+    actualValues.insert(ranges);
+  }
+
+  // NOTE: This is not TearDown as we want to check after _all_ runs are
+  // complete.
+  virtual ~MultipleRangesFixture() {
+    if (actualValues != expectedValues) {
+      std::cout << "EXPECTED\n";
+      for (auto v : expectedValues) {
+        std::cout << "{";
+        for (int64_t iv : v) {
+          std::cout << iv << ", ";
+        }
+        std::cout << "}\n";
+      }
+      std::cout << "ACTUAL\n";
+      for (auto v : actualValues) {
+        std::cout << "{";
+        for (int64_t iv : v) {
+          std::cout << iv << ", ";
+        }
+        std::cout << "}\n";
+      }
+    }
+  }
+
+  std::set<std::vector<int64_t>> expectedValues;
+  std::set<std::vector<int64_t>> actualValues;
+};
+
+BENCHMARK_DEFINE_F(MultipleRangesFixture, Empty)(benchmark::State& state) {
+  for (auto _ : state) {
+    int64_t product = state.range(0) * state.range(1) * state.range(2);
+    for (int64_t x = 0; x < product; x++) {
+      benchmark::DoNotOptimize(x);
+    }
+  }
+}
+
+BENCHMARK_REGISTER_F(MultipleRangesFixture, Empty)
+    ->RangeMultiplier(2)
+    ->Ranges({{1, 2}, {3, 7}, {5, 15}})
+    ->Args({7, 6, 3});
+
+void BM_CheckDefaultArgument(benchmark::State& state) {
+  // Test that the 'range()' without an argument is the same as 'range(0)'.
+  assert(state.range() == state.range(0));
+  assert(state.range() != state.range(1));
+  for (auto _ : state) {
+  }
+}
+BENCHMARK(BM_CheckDefaultArgument)->Ranges({{1, 5}, {6, 10}});
+
+static void BM_MultipleRanges(benchmark::State& st) {
+  for (auto _ : st) {
+  }
+}
+BENCHMARK(BM_MultipleRanges)->Ranges({{5, 5}, {6, 6}});
+
+BENCHMARK_MAIN();
diff --git a/src/third_party/google_benchmark/test/options_test.cc b/src/third_party/google_benchmark/test/options_test.cc
new file mode 100644
index 0000000..7bfc235
--- /dev/null
+++ b/src/third_party/google_benchmark/test/options_test.cc
@@ -0,0 +1,75 @@
+#include "benchmark/benchmark.h"
+#include <chrono>
+#include <thread>
+
+#if defined(NDEBUG)
+#undef NDEBUG
+#endif
+#include <cassert>
+
+void BM_basic(benchmark::State& state) {
+  for (auto _ : state) {
+  }
+}
+
+void BM_basic_slow(benchmark::State& state) {
+  std::chrono::milliseconds sleep_duration(state.range(0));
+  for (auto _ : state) {
+    std::this_thread::sleep_for(
+        std::chrono::duration_cast<std::chrono::nanoseconds>(sleep_duration));
+  }
+}
+
+BENCHMARK(BM_basic);
+BENCHMARK(BM_basic)->Arg(42);
+BENCHMARK(BM_basic_slow)->Arg(10)->Unit(benchmark::kNanosecond);
+BENCHMARK(BM_basic_slow)->Arg(100)->Unit(benchmark::kMicrosecond);
+BENCHMARK(BM_basic_slow)->Arg(1000)->Unit(benchmark::kMillisecond);
+BENCHMARK(BM_basic)->Range(1, 8);
+BENCHMARK(BM_basic)->RangeMultiplier(2)->Range(1, 8);
+BENCHMARK(BM_basic)->DenseRange(10, 15);
+BENCHMARK(BM_basic)->Args({42, 42});
+BENCHMARK(BM_basic)->Ranges({{64, 512}, {64, 512}});
+BENCHMARK(BM_basic)->MinTime(0.7);
+BENCHMARK(BM_basic)->UseRealTime();
+BENCHMARK(BM_basic)->ThreadRange(2, 4);
+BENCHMARK(BM_basic)->ThreadPerCpu();
+BENCHMARK(BM_basic)->Repetitions(3);
+BENCHMARK(BM_basic)
+    ->RangeMultiplier(std::numeric_limits<int>::max())
+    ->Range(std::numeric_limits<int64_t>::min(),
+            std::numeric_limits<int64_t>::max());
+
+// Negative ranges
+BENCHMARK(BM_basic)->Range(-64, -1);
+BENCHMARK(BM_basic)->RangeMultiplier(4)->Range(-8, 8);
+BENCHMARK(BM_basic)->DenseRange(-2, 2, 1);
+BENCHMARK(BM_basic)->Ranges({{-64, 1}, {-8, -1}});
+
+void CustomArgs(benchmark::internal::Benchmark* b) {
+  for (int i = 0; i < 10; ++i) {
+    b->Arg(i);
+  }
+}
+
+BENCHMARK(BM_basic)->Apply(CustomArgs);
+
+void BM_explicit_iteration_count(benchmark::State& state) {
+  // Test that benchmarks specified with an explicit iteration count are
+  // only run once.
+  static bool invoked_before = false;
+  assert(!invoked_before);
+  invoked_before = true;
+
+  // Test that the requested iteration count is respected.
+  assert(state.max_iterations == 42);
+  size_t actual_iterations = 0;
+  for (auto _ : state)
+    ++actual_iterations;
+  assert(state.iterations() == state.max_iterations);
+  assert(state.iterations() == 42);
+
+}
+BENCHMARK(BM_explicit_iteration_count)->Iterations(42);
+
+BENCHMARK_MAIN();
diff --git a/src/third_party/google_benchmark/test/output_test.h b/src/third_party/google_benchmark/test/output_test.h
new file mode 100644
index 0000000..9385761
--- /dev/null
+++ b/src/third_party/google_benchmark/test/output_test.h
@@ -0,0 +1,213 @@
+#ifndef TEST_OUTPUT_TEST_H
+#define TEST_OUTPUT_TEST_H
+
+#undef NDEBUG
+#include <functional>
+#include <initializer_list>
+#include <memory>
+#include <sstream>
+#include <string>
+#include <utility>
+#include <vector>
+
+#include "../src/re.h"
+#include "benchmark/benchmark.h"
+
+#define CONCAT2(x, y) x##y
+#define CONCAT(x, y) CONCAT2(x, y)
+
+#define ADD_CASES(...) int CONCAT(dummy, __LINE__) = ::AddCases(__VA_ARGS__)
+
+#define SET_SUBSTITUTIONS(...) \
+  int CONCAT(dummy, __LINE__) = ::SetSubstitutions(__VA_ARGS__)
+
+enum MatchRules {
+  MR_Default,  // Skip non-matching lines until a match is found.
+  MR_Next,     // Match must occur on the next line.
+  MR_Not  // No line between the current position and the next match matches
+          // the regex
+};
+
+struct TestCase {
+  TestCase(std::string re, int rule = MR_Default);
+
+  std::string regex_str;
+  int match_rule;
+  std::string substituted_regex;
+  std::shared_ptr<benchmark::Regex> regex;
+};
+
+enum TestCaseID {
+  TC_ConsoleOut,
+  TC_ConsoleErr,
+  TC_JSONOut,
+  TC_JSONErr,
+  TC_CSVOut,
+  TC_CSVErr,
+
+  TC_NumID  // PRIVATE
+};
+
+// Add a list of test cases to be run against the output specified by
+// 'ID'
+int AddCases(TestCaseID ID, std::initializer_list<TestCase> il);
+
+// Add or set a list of substitutions to be performed on constructed regex's
+// See 'output_test_helper.cc' for a list of default substitutions.
+int SetSubstitutions(
+    std::initializer_list<std::pair<std::string, std::string>> il);
+
+// Run all output tests.
+void RunOutputTests(int argc, char* argv[]);
+
+// Count the number of 'pat' substrings in the 'haystack' string.
+int SubstrCnt(const std::string& haystack, const std::string& pat);
+
+// Run registered benchmarks with file reporter enabled, and return the content
+// outputted by the file reporter.
+std::string GetFileReporterOutput(int argc, char* argv[]);
+
+// ========================================================================= //
+// ------------------------- Results checking ------------------------------ //
+// ========================================================================= //
+
+// Call this macro to register a benchmark for checking its results. This
+// should be all that's needed. It subscribes a function to check the (CSV)
+// results of a benchmark. This is done only after verifying that the output
+// strings are really as expected.
+// bm_name_pattern: a name or a regex pattern which will be matched against
+//                  all the benchmark names. Matching benchmarks
+//                  will be the subject of a call to checker_function
+// checker_function: should be of type ResultsCheckFn (see below)
+#define CHECK_BENCHMARK_RESULTS(bm_name_pattern, checker_function) \
+  size_t CONCAT(dummy, __LINE__) = AddChecker(bm_name_pattern, checker_function)
+
+struct Results;
+typedef std::function<void(Results const&)> ResultsCheckFn;
+
+size_t AddChecker(const char* bm_name_pattern, ResultsCheckFn fn);
+
+// Class holding the results of a benchmark.
+// It is passed in calls to checker functions.
+struct Results {
+  // the benchmark name
+  std::string name;
+  // the benchmark fields
+  std::map<std::string, std::string> values;
+
+  Results(const std::string& n) : name(n) {}
+
+  int NumThreads() const;
+
+  double NumIterations() const;
+
+  typedef enum { kCpuTime, kRealTime } BenchmarkTime;
+
+  // get cpu_time or real_time in seconds
+  double GetTime(BenchmarkTime which) const;
+
+  // get the real_time duration of the benchmark in seconds.
+  // it is better to use fuzzy float checks for this, as the float
+  // ASCII formatting is lossy.
+  double DurationRealTime() const {
+    return NumIterations() * GetTime(kRealTime);
+  }
+  // get the cpu_time duration of the benchmark in seconds
+  double DurationCPUTime() const {
+    return NumIterations() * GetTime(kCpuTime);
+  }
+
+  // get the string for a result by name, or nullptr if the name
+  // is not found
+  const std::string* Get(const char* entry_name) const {
+    auto it = values.find(entry_name);
+    if (it == values.end()) return nullptr;
+    return &it->second;
+  }
+
+  // get a result by name, parsed as a specific type.
+  // NOTE: for counters, use GetCounterAs instead.
+  template <class T>
+  T GetAs(const char* entry_name) const;
+
+  // counters are written as doubles, so they have to be read first
+  // as a double, and only then converted to the asked type.
+  template <class T>
+  T GetCounterAs(const char* entry_name) const {
+    double dval = GetAs<double>(entry_name);
+    T tval = static_cast<T>(dval);
+    return tval;
+  }
+};
+
+template <class T>
+T Results::GetAs(const char* entry_name) const {
+  auto* sv = Get(entry_name);
+  CHECK(sv != nullptr && !sv->empty());
+  std::stringstream ss;
+  ss << *sv;
+  T out;
+  ss >> out;
+  CHECK(!ss.fail());
+  return out;
+}
+
+//----------------------------------
+// Macros to help in result checking. Do not use them with arguments causing
+// side-effects.
+
+// clang-format off
+
+#define _CHECK_RESULT_VALUE(entry, getfn, var_type, var_name, relationship, value) \
+    CONCAT(CHECK_, relationship)                                        \
+    (entry.getfn< var_type >(var_name), (value)) << "\n"                \
+    << __FILE__ << ":" << __LINE__ << ": " << (entry).name << ":\n"     \
+    << __FILE__ << ":" << __LINE__ << ": "                              \
+    << "expected (" << #var_type << ")" << (var_name)                   \
+    << "=" << (entry).getfn< var_type >(var_name)                       \
+    << " to be " #relationship " to " << (value) << "\n"
+
+// check with tolerance. eps_factor is the tolerance window, which is
+// interpreted relative to value (eg, 0.1 means 10% of value).
+#define _CHECK_FLOAT_RESULT_VALUE(entry, getfn, var_type, var_name, relationship, value, eps_factor) \
+    CONCAT(CHECK_FLOAT_, relationship)                                  \
+    (entry.getfn< var_type >(var_name), (value), (eps_factor) * (value)) << "\n" \
+    << __FILE__ << ":" << __LINE__ << ": " << (entry).name << ":\n"     \
+    << __FILE__ << ":" << __LINE__ << ": "                              \
+    << "expected (" << #var_type << ")" << (var_name)                   \
+    << "=" << (entry).getfn< var_type >(var_name)                       \
+    << " to be " #relationship " to " << (value) << "\n"                \
+    << __FILE__ << ":" << __LINE__ << ": "                              \
+    << "with tolerance of " << (eps_factor) * (value)                   \
+    << " (" << (eps_factor)*100. << "%), "                              \
+    << "but delta was " << ((entry).getfn< var_type >(var_name) - (value)) \
+    << " (" << (((entry).getfn< var_type >(var_name) - (value))         \
+               /                                                        \
+               ((value) > 1.e-5 || value < -1.e-5 ? value : 1.e-5)*100.) \
+    << "%)"
+
+#define CHECK_RESULT_VALUE(entry, var_type, var_name, relationship, value) \
+    _CHECK_RESULT_VALUE(entry, GetAs, var_type, var_name, relationship, value)
+
+#define CHECK_COUNTER_VALUE(entry, var_type, var_name, relationship, value) \
+    _CHECK_RESULT_VALUE(entry, GetCounterAs, var_type, var_name, relationship, value)
+
+#define CHECK_FLOAT_RESULT_VALUE(entry, var_name, relationship, value, eps_factor) \
+    _CHECK_FLOAT_RESULT_VALUE(entry, GetAs, double, var_name, relationship, value, eps_factor)
+
+#define CHECK_FLOAT_COUNTER_VALUE(entry, var_name, relationship, value, eps_factor) \
+    _CHECK_FLOAT_RESULT_VALUE(entry, GetCounterAs, double, var_name, relationship, value, eps_factor)
+
+// clang-format on
+
+// ========================================================================= //
+// --------------------------- Misc Utilities ------------------------------ //
+// ========================================================================= //
+
+namespace {
+
+const char* const dec_re = "[0-9]*[.]?[0-9]+([eE][-+][0-9]+)?";
+
+}  //  end namespace
+
+#endif  // TEST_OUTPUT_TEST_H
diff --git a/src/third_party/google_benchmark/test/output_test_helper.cc b/src/third_party/google_benchmark/test/output_test_helper.cc
new file mode 100644
index 0000000..bdb34c8
--- /dev/null
+++ b/src/third_party/google_benchmark/test/output_test_helper.cc
@@ -0,0 +1,515 @@
+#include <cstdio>
+#include <cstring>
+#include <fstream>
+#include <iostream>
+#include <map>
+#include <memory>
+#include <random>
+#include <sstream>
+#include <streambuf>
+
+#include "../src/benchmark_api_internal.h"
+#include "../src/check.h"  // NOTE: check.h is for internal use only!
+#include "../src/re.h"     // NOTE: re.h is for internal use only
+#include "output_test.h"
+
+// ========================================================================= //
+// ------------------------------ Internals -------------------------------- //
+// ========================================================================= //
+namespace internal {
+namespace {
+
+using TestCaseList = std::vector<TestCase>;
+
+// Use a vector because the order elements are added matters during iteration.
+// std::map/unordered_map don't guarantee that.
+// For example:
+//  SetSubstitutions({{"%HelloWorld", "Hello"}, {"%Hello", "Hi"}});
+//     Substitute("%HelloWorld") // Always expands to Hello.
+using SubMap = std::vector<std::pair<std::string, std::string>>;
+
+TestCaseList& GetTestCaseList(TestCaseID ID) {
+  // Uses function-local statics to ensure initialization occurs
+  // before first use.
+  static TestCaseList lists[TC_NumID];
+  return lists[ID];
+}
+
+SubMap& GetSubstitutions() {
+  // Don't use 'dec_re' from header because it may not yet be initialized.
+  // clang-format off
+  static std::string safe_dec_re = "[0-9]*[.]?[0-9]+([eE][-+][0-9]+)?";
+  static std::string time_re = "([0-9]+[.])?[0-9]+";
+  static SubMap map = {
+      {"%float", "[0-9]*[.]?[0-9]+([eE][-+][0-9]+)?"},
+      // human-readable float
+      {"%hrfloat", "[0-9]*[.]?[0-9]+([eE][-+][0-9]+)?[kMGTPEZYmunpfazy]?"},
+      {"%int", "[ ]*[0-9]+"},
+      {" %s ", "[ ]+"},
+      {"%time", "[ ]*" + time_re + "[ ]+ns"},
+      {"%console_report", "[ ]*" + time_re + "[ ]+ns [ ]*" + time_re + "[ ]+ns [ ]*[0-9]+"},
+      {"%console_time_only_report", "[ ]*" + time_re + "[ ]+ns [ ]*" + time_re + "[ ]+ns"},
+      {"%console_us_report", "[ ]*" + time_re + "[ ]+us [ ]*" + time_re + "[ ]+us [ ]*[0-9]+"},
+      {"%console_us_time_only_report", "[ ]*" + time_re + "[ ]+us [ ]*" + time_re + "[ ]+us"},
+      {"%csv_header",
+       "name,iterations,real_time,cpu_time,time_unit,bytes_per_second,"
+       "items_per_second,label,error_occurred,error_message"},
+      {"%csv_report", "[0-9]+," + safe_dec_re + "," + safe_dec_re + ",ns,,,,,"},
+      {"%csv_us_report", "[0-9]+," + safe_dec_re + "," + safe_dec_re + ",us,,,,,"},
+      {"%csv_bytes_report",
+       "[0-9]+," + safe_dec_re + "," + safe_dec_re + ",ns," + safe_dec_re + ",,,,"},
+      {"%csv_items_report",
+       "[0-9]+," + safe_dec_re + "," + safe_dec_re + ",ns,," + safe_dec_re + ",,,"},
+      {"%csv_bytes_items_report",
+       "[0-9]+," + safe_dec_re + "," + safe_dec_re + ",ns," + safe_dec_re +
+       "," + safe_dec_re + ",,,"},
+      {"%csv_label_report_begin", "[0-9]+," + safe_dec_re + "," + safe_dec_re + ",ns,,,"},
+      {"%csv_label_report_end", ",,"}};
+  // clang-format on
+  return map;
+}
+
+std::string PerformSubstitutions(std::string source) {
+  SubMap const& subs = GetSubstitutions();
+  using SizeT = std::string::size_type;
+  for (auto const& KV : subs) {
+    SizeT pos;
+    SizeT next_start = 0;
+    while ((pos = source.find(KV.first, next_start)) != std::string::npos) {
+      next_start = pos + KV.second.size();
+      source.replace(pos, KV.first.size(), KV.second);
+    }
+  }
+  return source;
+}
+
+void CheckCase(std::stringstream& remaining_output, TestCase const& TC,
+               TestCaseList const& not_checks) {
+  std::string first_line;
+  bool on_first = true;
+  std::string line;
+  while (remaining_output.eof() == false) {
+    CHECK(remaining_output.good());
+    std::getline(remaining_output, line);
+    if (on_first) {
+      first_line = line;
+      on_first = false;
+    }
+    for (const auto& NC : not_checks) {
+      CHECK(!NC.regex->Match(line))
+          << "Unexpected match for line \"" << line << "\" for MR_Not regex \""
+          << NC.regex_str << "\""
+          << "\n    actual regex string \"" << TC.substituted_regex << "\""
+          << "\n    started matching near: " << first_line;
+    }
+    if (TC.regex->Match(line)) return;
+    CHECK(TC.match_rule != MR_Next)
+        << "Expected line \"" << line << "\" to match regex \"" << TC.regex_str
+        << "\""
+        << "\n    actual regex string \"" << TC.substituted_regex << "\""
+        << "\n    started matching near: " << first_line;
+  }
+  CHECK(remaining_output.eof() == false)
+      << "End of output reached before match for regex \"" << TC.regex_str
+      << "\" was found"
+      << "\n    actual regex string \"" << TC.substituted_regex << "\""
+      << "\n    started matching near: " << first_line;
+}
+
+void CheckCases(TestCaseList const& checks, std::stringstream& output) {
+  std::vector<TestCase> not_checks;
+  for (size_t i = 0; i < checks.size(); ++i) {
+    const auto& TC = checks[i];
+    if (TC.match_rule == MR_Not) {
+      not_checks.push_back(TC);
+      continue;
+    }
+    CheckCase(output, TC, not_checks);
+    not_checks.clear();
+  }
+}
+
+class TestReporter : public benchmark::BenchmarkReporter {
+ public:
+  TestReporter(std::vector<benchmark::BenchmarkReporter*> reps)
+      : reporters_(reps) {}
+
+  virtual bool ReportContext(const Context& context) {
+    bool last_ret = false;
+    bool first = true;
+    for (auto rep : reporters_) {
+      bool new_ret = rep->ReportContext(context);
+      CHECK(first || new_ret == last_ret)
+          << "Reports return different values for ReportContext";
+      first = false;
+      last_ret = new_ret;
+    }
+    (void)first;
+    return last_ret;
+  }
+
+  void ReportRuns(const std::vector<Run>& report) {
+    for (auto rep : reporters_) rep->ReportRuns(report);
+  }
+  void Finalize() {
+    for (auto rep : reporters_) rep->Finalize();
+  }
+
+ private:
+  std::vector<benchmark::BenchmarkReporter*> reporters_;
+};
+}  // namespace
+
+}  // end namespace internal
+
+// ========================================================================= //
+// -------------------------- Results checking ----------------------------- //
+// ========================================================================= //
+
+namespace internal {
+
+// Utility class to manage subscribers for checking benchmark results.
+// It works by parsing the CSV output to read the results.
+class ResultsChecker {
+ public:
+  struct PatternAndFn : public TestCase {  // reusing TestCase for its regexes
+    PatternAndFn(const std::string& rx, ResultsCheckFn fn_)
+        : TestCase(rx), fn(fn_) {}
+    ResultsCheckFn fn;
+  };
+
+  std::vector<PatternAndFn> check_patterns;
+  std::vector<Results> results;
+  std::vector<std::string> field_names;
+
+  void Add(const std::string& entry_pattern, ResultsCheckFn fn);
+
+  void CheckResults(std::stringstream& output);
+
+ private:
+  void SetHeader_(const std::string& csv_header);
+  void SetValues_(const std::string& entry_csv_line);
+
+  std::vector<std::string> SplitCsv_(const std::string& line);
+};
+
+// store the static ResultsChecker in a function to prevent initialization
+// order problems
+ResultsChecker& GetResultsChecker() {
+  static ResultsChecker rc;
+  return rc;
+}
+
+// add a results checker for a benchmark
+void ResultsChecker::Add(const std::string& entry_pattern, ResultsCheckFn fn) {
+  check_patterns.emplace_back(entry_pattern, fn);
+}
+
+// check the results of all subscribed benchmarks
+void ResultsChecker::CheckResults(std::stringstream& output) {
+  // first reset the stream to the start
+  {
+    auto start = std::stringstream::pos_type(0);
+    // clear before calling tellg()
+    output.clear();
+    // seek to zero only when needed
+    if (output.tellg() > start) output.seekg(start);
+    // and just in case
+    output.clear();
+  }
+  // now go over every line and publish it to the ResultsChecker
+  std::string line;
+  bool on_first = true;
+  while (output.eof() == false) {
+    CHECK(output.good());
+    std::getline(output, line);
+    if (on_first) {
+      SetHeader_(line);  // this is important
+      on_first = false;
+      continue;
+    }
+    SetValues_(line);
+  }
+  // finally we can call the subscribed check functions
+  for (const auto& p : check_patterns) {
+    VLOG(2) << "--------------------------------\n";
+    VLOG(2) << "checking for benchmarks matching " << p.regex_str << "...\n";
+    for (const auto& r : results) {
+      if (!p.regex->Match(r.name)) {
+        VLOG(2) << p.regex_str << " is not matched by " << r.name << "\n";
+        continue;
+      } else {
+        VLOG(2) << p.regex_str << " is matched by " << r.name << "\n";
+      }
+      VLOG(1) << "Checking results of " << r.name << ": ... \n";
+      p.fn(r);
+      VLOG(1) << "Checking results of " << r.name << ": OK.\n";
+    }
+  }
+}
+
+// prepare for the names in this header
+void ResultsChecker::SetHeader_(const std::string& csv_header) {
+  field_names = SplitCsv_(csv_header);
+}
+
+// set the values for a benchmark
+void ResultsChecker::SetValues_(const std::string& entry_csv_line) {
+  if (entry_csv_line.empty()) return;  // some lines are empty
+  CHECK(!field_names.empty());
+  auto vals = SplitCsv_(entry_csv_line);
+  CHECK_EQ(vals.size(), field_names.size());
+  results.emplace_back(vals[0]);  // vals[0] is the benchmark name
+  auto& entry = results.back();
+  for (size_t i = 1, e = vals.size(); i < e; ++i) {
+    entry.values[field_names[i]] = vals[i];
+  }
+}
+
+// a quick'n'dirty csv splitter (eliminating quotes)
+std::vector<std::string> ResultsChecker::SplitCsv_(const std::string& line) {
+  std::vector<std::string> out;
+  if (line.empty()) return out;
+  if (!field_names.empty()) out.reserve(field_names.size());
+  size_t prev = 0, pos = line.find_first_of(','), curr = pos;
+  while (pos != line.npos) {
+    CHECK(curr > 0);
+    if (line[prev] == '"') ++prev;
+    if (line[curr - 1] == '"') --curr;
+    out.push_back(line.substr(prev, curr - prev));
+    prev = pos + 1;
+    pos = line.find_first_of(',', pos + 1);
+    curr = pos;
+  }
+  curr = line.size();
+  if (line[prev] == '"') ++prev;
+  if (line[curr - 1] == '"') --curr;
+  out.push_back(line.substr(prev, curr - prev));
+  return out;
+}
+
+}  // end namespace internal
+
+size_t AddChecker(const char* bm_name, ResultsCheckFn fn) {
+  auto& rc = internal::GetResultsChecker();
+  rc.Add(bm_name, fn);
+  return rc.results.size();
+}
+
+int Results::NumThreads() const {
+  auto pos = name.find("/threads:");
+  if (pos == name.npos) return 1;
+  auto end = name.find('/', pos + 9);
+  std::stringstream ss;
+  ss << name.substr(pos + 9, end);
+  int num = 1;
+  ss >> num;
+  CHECK(!ss.fail());
+  return num;
+}
+
+double Results::NumIterations() const {
+  return GetAs<double>("iterations");
+}
+
+double Results::GetTime(BenchmarkTime which) const {
+  CHECK(which == kCpuTime || which == kRealTime);
+  const char* which_str = which == kCpuTime ? "cpu_time" : "real_time";
+  double val = GetAs<double>(which_str);
+  auto unit = Get("time_unit");
+  CHECK(unit);
+  if (*unit == "ns") {
+    return val * 1.e-9;
+  } else if (*unit == "us") {
+    return val * 1.e-6;
+  } else if (*unit == "ms") {
+    return val * 1.e-3;
+  } else if (*unit == "s") {
+    return val;
+  } else {
+    CHECK(1 == 0) << "unknown time unit: " << *unit;
+    return 0;
+  }
+}
+
+// ========================================================================= //
+// -------------------------- Public API Definitions------------------------ //
+// ========================================================================= //
+
+TestCase::TestCase(std::string re, int rule)
+    : regex_str(std::move(re)),
+      match_rule(rule),
+      substituted_regex(internal::PerformSubstitutions(regex_str)),
+      regex(std::make_shared<benchmark::Regex>()) {
+  std::string err_str;
+  regex->Init(substituted_regex, &err_str);
+  CHECK(err_str.empty()) << "Could not construct regex \"" << substituted_regex
+                         << "\""
+                         << "\n    originally \"" << regex_str << "\""
+                         << "\n    got error: " << err_str;
+}
+
+int AddCases(TestCaseID ID, std::initializer_list<TestCase> il) {
+  auto& L = internal::GetTestCaseList(ID);
+  L.insert(L.end(), il);
+  return 0;
+}
+
+int SetSubstitutions(
+    std::initializer_list<std::pair<std::string, std::string>> il) {
+  auto& subs = internal::GetSubstitutions();
+  for (auto KV : il) {
+    bool exists = false;
+    KV.second = internal::PerformSubstitutions(KV.second);
+    for (auto& EKV : subs) {
+      if (EKV.first == KV.first) {
+        EKV.second = std::move(KV.second);
+        exists = true;
+        break;
+      }
+    }
+    if (!exists) subs.push_back(std::move(KV));
+  }
+  return 0;
+}
+
+// Disable deprecated warnings temporarily because we need to reference
+// CSVReporter but don't want to trigger -Werror=-Wdeprecated
+#ifdef __GNUC__
+#pragma GCC diagnostic push
+#pragma GCC diagnostic ignored "-Wdeprecated"
+#endif
+void RunOutputTests(int argc, char* argv[]) {
+  using internal::GetTestCaseList;
+  benchmark::Initialize(&argc, argv);
+  auto options = benchmark::internal::GetOutputOptions(/*force_no_color*/ true);
+  benchmark::ConsoleReporter CR(options);
+  benchmark::JSONReporter JR;
+  benchmark::CSVReporter CSVR;
+  struct ReporterTest {
+    const char* name;
+    std::vector<TestCase>& output_cases;
+    std::vector<TestCase>& error_cases;
+    benchmark::BenchmarkReporter& reporter;
+    std::stringstream out_stream;
+    std::stringstream err_stream;
+
+    ReporterTest(const char* n, std::vector<TestCase>& out_tc,
+                 std::vector<TestCase>& err_tc,
+                 benchmark::BenchmarkReporter& br)
+        : name(n), output_cases(out_tc), error_cases(err_tc), reporter(br) {
+      reporter.SetOutputStream(&out_stream);
+      reporter.SetErrorStream(&err_stream);
+    }
+  } TestCases[] = {
+      {"ConsoleReporter", GetTestCaseList(TC_ConsoleOut),
+       GetTestCaseList(TC_ConsoleErr), CR},
+      {"JSONReporter", GetTestCaseList(TC_JSONOut), GetTestCaseList(TC_JSONErr),
+       JR},
+      {"CSVReporter", GetTestCaseList(TC_CSVOut), GetTestCaseList(TC_CSVErr),
+       CSVR},
+  };
+
+  // Create the test reporter and run the benchmarks.
+  std::cout << "Running benchmarks...\n";
+  internal::TestReporter test_rep({&CR, &JR, &CSVR});
+  benchmark::RunSpecifiedBenchmarks(&test_rep);
+
+  for (auto& rep_test : TestCases) {
+    std::string msg = std::string("\nTesting ") + rep_test.name + " Output\n";
+    std::string banner(msg.size() - 1, '-');
+    std::cout << banner << msg << banner << "\n";
+
+    std::cerr << rep_test.err_stream.str();
+    std::cout << rep_test.out_stream.str();
+
+    internal::CheckCases(rep_test.error_cases, rep_test.err_stream);
+    internal::CheckCases(rep_test.output_cases, rep_test.out_stream);
+
+    std::cout << "\n";
+  }
+
+  // now that we know the output is as expected, we can dispatch
+  // the checks to subscribees.
+  auto& csv = TestCases[2];
+  // would use == but gcc spits a warning
+  CHECK(std::strcmp(csv.name, "CSVReporter") == 0);
+  internal::GetResultsChecker().CheckResults(csv.out_stream);
+}
+
+#ifdef __GNUC__
+#pragma GCC diagnostic pop
+#endif
+
+int SubstrCnt(const std::string& haystack, const std::string& pat) {
+  if (pat.length() == 0) return 0;
+  int count = 0;
+  for (size_t offset = haystack.find(pat); offset != std::string::npos;
+       offset = haystack.find(pat, offset + pat.length()))
+    ++count;
+  return count;
+}
+
+static char ToHex(int ch) {
+  return ch < 10 ? static_cast<char>('0' + ch)
+                 : static_cast<char>('a' + (ch - 10));
+}
+
+static char RandomHexChar() {
+  static std::mt19937 rd{std::random_device{}()};
+  static std::uniform_int_distribution<int> mrand{0, 15};
+  return ToHex(mrand(rd));
+}
+
+static std::string GetRandomFileName() {
+  std::string model = "test.%%%%%%";
+  for (auto & ch :  model) {
+    if (ch == '%')
+      ch = RandomHexChar();
+  }
+  return model;
+}
+
+static bool FileExists(std::string const& name) {
+  std::ifstream in(name.c_str());
+  return in.good();
+}
+
+static std::string GetTempFileName() {
+  // This function attempts to avoid race conditions where two tests
+  // create the same file at the same time. However, it still introduces races
+  // similar to tmpnam.
+  int retries = 3;
+  while (--retries) {
+    std::string name = GetRandomFileName();
+    if (!FileExists(name))
+      return name;
+  }
+  std::cerr << "Failed to create unique temporary file name" << std::endl;
+  std::abort();
+}
+
+std::string GetFileReporterOutput(int argc, char* argv[]) {
+  std::vector<char*> new_argv(argv, argv + argc);
+  assert(static_cast<decltype(new_argv)::size_type>(argc) == new_argv.size());
+
+  std::string tmp_file_name = GetTempFileName();
+  std::cout << "Will be using this as the tmp file: " << tmp_file_name << '\n';
+
+  std::string tmp = "--benchmark_out=";
+  tmp += tmp_file_name;
+  new_argv.emplace_back(const_cast<char*>(tmp.c_str()));
+
+  argc = int(new_argv.size());
+
+  benchmark::Initialize(&argc, new_argv.data());
+  benchmark::RunSpecifiedBenchmarks();
+
+  // Read the output back from the file, and delete the file.
+  std::ifstream tmp_stream(tmp_file_name);
+  std::string output = std::string((std::istreambuf_iterator<char>(tmp_stream)),
+                                   std::istreambuf_iterator<char>());
+  std::remove(tmp_file_name.c_str());
+
+  return output;
+}
diff --git a/src/third_party/google_benchmark/test/register_benchmark_test.cc b/src/third_party/google_benchmark/test/register_benchmark_test.cc
new file mode 100644
index 0000000..3ac5b21
--- /dev/null
+++ b/src/third_party/google_benchmark/test/register_benchmark_test.cc
@@ -0,0 +1,184 @@
+
+#undef NDEBUG
+#include <cassert>
+#include <vector>
+
+#include "../src/check.h"  // NOTE: check.h is for internal use only!
+#include "benchmark/benchmark.h"
+
+namespace {
+
+class TestReporter : public benchmark::ConsoleReporter {
+ public:
+  virtual void ReportRuns(const std::vector<Run>& report) {
+    all_runs_.insert(all_runs_.end(), begin(report), end(report));
+    ConsoleReporter::ReportRuns(report);
+  }
+
+  std::vector<Run> all_runs_;
+};
+
+struct TestCase {
+  std::string name;
+  const char* label;
+  // Note: not explicit as we rely on it being converted through ADD_CASES.
+  TestCase(const char* xname) : TestCase(xname, nullptr) {}
+  TestCase(const char* xname, const char* xlabel)
+      : name(xname), label(xlabel) {}
+
+  typedef benchmark::BenchmarkReporter::Run Run;
+
+  void CheckRun(Run const& run) const {
+    // clang-format off
+    CHECK(name == run.benchmark_name()) << "expected " << name << " got "
+                                      << run.benchmark_name();
+    if (label) {
+      CHECK(run.report_label == label) << "expected " << label << " got "
+                                       << run.report_label;
+    } else {
+      CHECK(run.report_label == "");
+    }
+    // clang-format on
+  }
+};
+
+std::vector<TestCase> ExpectedResults;
+
+int AddCases(std::initializer_list<TestCase> const& v) {
+  for (auto N : v) {
+    ExpectedResults.push_back(N);
+  }
+  return 0;
+}
+
+#define CONCAT(x, y) CONCAT2(x, y)
+#define CONCAT2(x, y) x##y
+#define ADD_CASES(...) int CONCAT(dummy, __LINE__) = AddCases({__VA_ARGS__})
+
+}  // end namespace
+
+typedef benchmark::internal::Benchmark* ReturnVal;
+
+//----------------------------------------------------------------------------//
+// Test RegisterBenchmark with no additional arguments
+//----------------------------------------------------------------------------//
+void BM_function(benchmark::State& state) {
+  for (auto _ : state) {
+  }
+}
+BENCHMARK(BM_function);
+ReturnVal dummy = benchmark::RegisterBenchmark(
+    "BM_function_manual_registration", BM_function);
+ADD_CASES({"BM_function"}, {"BM_function_manual_registration"});
+
+//----------------------------------------------------------------------------//
+// Test RegisterBenchmark with additional arguments
+// Note: GCC <= 4.8 do not support this form of RegisterBenchmark because they
+//       reject the variadic pack expansion of lambda captures.
+//----------------------------------------------------------------------------//
+#ifndef BENCHMARK_HAS_NO_VARIADIC_REGISTER_BENCHMARK
+
+void BM_extra_args(benchmark::State& st, const char* label) {
+  for (auto _ : st) {
+  }
+  st.SetLabel(label);
+}
+int RegisterFromFunction() {
+  std::pair<const char*, const char*> cases[] = {
+      {"test1", "One"}, {"test2", "Two"}, {"test3", "Three"}};
+  for (auto const& c : cases)
+    benchmark::RegisterBenchmark(c.first, &BM_extra_args, c.second);
+  return 0;
+}
+int dummy2 = RegisterFromFunction();
+ADD_CASES({"test1", "One"}, {"test2", "Two"}, {"test3", "Three"});
+
+#endif  // BENCHMARK_HAS_NO_VARIADIC_REGISTER_BENCHMARK
+
+//----------------------------------------------------------------------------//
+// Test RegisterBenchmark with different callable types
+//----------------------------------------------------------------------------//
+
+struct CustomFixture {
+  void operator()(benchmark::State& st) {
+    for (auto _ : st) {
+    }
+  }
+};
+
+void TestRegistrationAtRuntime() {
+#ifdef BENCHMARK_HAS_CXX11
+  {
+    CustomFixture fx;
+    benchmark::RegisterBenchmark("custom_fixture", fx);
+    AddCases({"custom_fixture"});
+  }
+#endif
+#ifndef BENCHMARK_HAS_NO_VARIADIC_REGISTER_BENCHMARK
+  {
+    const char* x = "42";
+    auto capturing_lam = [=](benchmark::State& st) {
+      for (auto _ : st) {
+      }
+      st.SetLabel(x);
+    };
+    benchmark::RegisterBenchmark("lambda_benchmark", capturing_lam);
+    AddCases({{"lambda_benchmark", x}});
+  }
+#endif
+}
+
+// Test that all benchmarks, registered at either during static init or runtime,
+// are run and the results are passed to the reported.
+void RunTestOne() {
+  TestRegistrationAtRuntime();
+
+  TestReporter test_reporter;
+  benchmark::RunSpecifiedBenchmarks(&test_reporter);
+
+  typedef benchmark::BenchmarkReporter::Run Run;
+  auto EB = ExpectedResults.begin();
+
+  for (Run const& run : test_reporter.all_runs_) {
+    assert(EB != ExpectedResults.end());
+    EB->CheckRun(run);
+    ++EB;
+  }
+  assert(EB == ExpectedResults.end());
+}
+
+// Test that ClearRegisteredBenchmarks() clears all previously registered
+// benchmarks.
+// Also test that new benchmarks can be registered and ran afterwards.
+void RunTestTwo() {
+  assert(ExpectedResults.size() != 0 &&
+         "must have at least one registered benchmark");
+  ExpectedResults.clear();
+  benchmark::ClearRegisteredBenchmarks();
+
+  TestReporter test_reporter;
+  size_t num_ran = benchmark::RunSpecifiedBenchmarks(&test_reporter);
+  assert(num_ran == 0);
+  assert(test_reporter.all_runs_.begin() == test_reporter.all_runs_.end());
+
+  TestRegistrationAtRuntime();
+  num_ran = benchmark::RunSpecifiedBenchmarks(&test_reporter);
+  assert(num_ran == ExpectedResults.size());
+
+  typedef benchmark::BenchmarkReporter::Run Run;
+  auto EB = ExpectedResults.begin();
+
+  for (Run const& run : test_reporter.all_runs_) {
+    assert(EB != ExpectedResults.end());
+    EB->CheckRun(run);
+    ++EB;
+  }
+  assert(EB == ExpectedResults.end());
+}
+
+int main(int argc, char* argv[]) {
+  benchmark::Initialize(&argc, argv);
+
+  RunTestOne();
+  RunTestTwo();
+}
diff --git a/src/third_party/google_benchmark/test/report_aggregates_only_test.cc b/src/third_party/google_benchmark/test/report_aggregates_only_test.cc
new file mode 100644
index 0000000..9646b9b
--- /dev/null
+++ b/src/third_party/google_benchmark/test/report_aggregates_only_test.cc
@@ -0,0 +1,39 @@
+
+#undef NDEBUG
+#include <cstdio>
+#include <string>
+
+#include "benchmark/benchmark.h"
+#include "output_test.h"
+
+// Ok this test is super ugly. We want to check what happens with the file
+// reporter in the presence of ReportAggregatesOnly().
+// We do not care about console output, the normal tests check that already.
+
+void BM_SummaryRepeat(benchmark::State& state) {
+  for (auto _ : state) {
+  }
+}
+BENCHMARK(BM_SummaryRepeat)->Repetitions(3)->ReportAggregatesOnly();
+
+int main(int argc, char* argv[]) {
+  const std::string output = GetFileReporterOutput(argc, argv);
+
+  if (SubstrCnt(output, "\"name\": \"BM_SummaryRepeat/repeats:3") != 3 ||
+      SubstrCnt(output, "\"name\": \"BM_SummaryRepeat/repeats:3_mean\"") != 1 ||
+      SubstrCnt(output, "\"name\": \"BM_SummaryRepeat/repeats:3_median\"") !=
+          1 ||
+      SubstrCnt(output, "\"name\": \"BM_SummaryRepeat/repeats:3_stddev\"") !=
+          1) {
+    std::cout << "Precondition mismatch. Expected to only find three "
+                 "occurrences of \"BM_SummaryRepeat/repeats:3\" substring:\n"
+                 "\"name\": \"BM_SummaryRepeat/repeats:3_mean\", "
+                 "\"name\": \"BM_SummaryRepeat/repeats:3_median\", "
+                 "\"name\": \"BM_SummaryRepeat/repeats:3_stddev\"\nThe entire "
+                 "output:\n";
+    std::cout << output;
+    return 1;
+  }
+
+  return 0;
+}
diff --git a/src/third_party/google_benchmark/test/reporter_output_test.cc b/src/third_party/google_benchmark/test/reporter_output_test.cc
new file mode 100644
index 0000000..1a96b5f
--- /dev/null
+++ b/src/third_party/google_benchmark/test/reporter_output_test.cc
@@ -0,0 +1,748 @@
+
+#undef NDEBUG
+#include <utility>
+
+#include "benchmark/benchmark.h"
+#include "output_test.h"
+
+// ========================================================================= //
+// ---------------------- Testing Prologue Output -------------------------- //
+// ========================================================================= //
+
+ADD_CASES(TC_ConsoleOut, {{"^[-]+$", MR_Next},
+                          {"^Benchmark %s Time %s CPU %s Iterations$", MR_Next},
+                          {"^[-]+$", MR_Next}});
+static int AddContextCases() {
+  AddCases(TC_ConsoleErr,
+           {
+               {"%int[-/]%int[-/]%int %int:%int:%int$", MR_Default},
+               {"Running .*/reporter_output_test(\\.exe)?$", MR_Next},
+               {"Run on \\(%int X %float MHz CPU s?\\)", MR_Next},
+           });
+  AddCases(TC_JSONOut,
+           {{"^\\{", MR_Default},
+            {"\"context\":", MR_Next},
+            {"\"date\": \"", MR_Next},
+            {"\"host_name\":", MR_Next},
+            {"\"executable\": \".*(/|\\\\)reporter_output_test(\\.exe)?\",",
+             MR_Next},
+            {"\"num_cpus\": %int,$", MR_Next},
+            {"\"mhz_per_cpu\": %float,$", MR_Next},
+            {"\"cpu_scaling_enabled\": ", MR_Next},
+            {"\"caches\": \\[$", MR_Next}});
+  auto const& Info = benchmark::CPUInfo::Get();
+  auto const& Caches = Info.caches;
+  if (!Caches.empty()) {
+    AddCases(TC_ConsoleErr, {{"CPU Caches:$", MR_Next}});
+  }
+  for (size_t I = 0; I < Caches.size(); ++I) {
+    std::string num_caches_str =
+        Caches[I].num_sharing != 0 ? " \\(x%int\\)$" : "$";
+    AddCases(TC_ConsoleErr,
+             {{"L%int (Data|Instruction|Unified) %int KiB" + num_caches_str,
+               MR_Next}});
+    AddCases(TC_JSONOut, {{"\\{$", MR_Next},
+                          {"\"type\": \"", MR_Next},
+                          {"\"level\": %int,$", MR_Next},
+                          {"\"size\": %int,$", MR_Next},
+                          {"\"num_sharing\": %int$", MR_Next},
+                          {"}[,]{0,1}$", MR_Next}});
+  }
+  AddCases(TC_JSONOut, {{"],$"}});
+  auto const& LoadAvg = Info.load_avg;
+  if (!LoadAvg.empty()) {
+    AddCases(TC_ConsoleErr,
+             {{"Load Average: (%float, ){0,2}%float$", MR_Next}});
+  }
+  AddCases(TC_JSONOut, {{"\"load_avg\": \\[(%float,?){0,3}],$", MR_Next}});
+  return 0;
+}
+int dummy_register = AddContextCases();
+ADD_CASES(TC_CSVOut, {{"%csv_header"}});
+
+// ========================================================================= //
+// ------------------------ Testing Basic Output --------------------------- //
+// ========================================================================= //
+
+void BM_basic(benchmark::State& state) {
+  for (auto _ : state) {
+  }
+}
+BENCHMARK(BM_basic);
+
+ADD_CASES(TC_ConsoleOut, {{"^BM_basic %console_report$"}});
+ADD_CASES(TC_JSONOut, {{"\"name\": \"BM_basic\",$"},
+                       {"\"run_name\": \"BM_basic\",$", MR_Next},
+                       {"\"run_type\": \"iteration\",$", MR_Next},
+                       {"\"repetitions\": 0,$", MR_Next},
+                       {"\"repetition_index\": 0,$", MR_Next},
+                       {"\"threads\": 1,$", MR_Next},
+                       {"\"iterations\": %int,$", MR_Next},
+                       {"\"real_time\": %float,$", MR_Next},
+                       {"\"cpu_time\": %float,$", MR_Next},
+                       {"\"time_unit\": \"ns\"$", MR_Next},
+                       {"}", MR_Next}});
+ADD_CASES(TC_CSVOut, {{"^\"BM_basic\",%csv_report$"}});
+
+// ========================================================================= //
+// ------------------------ Testing Bytes per Second Output ---------------- //
+// ========================================================================= //
+
+void BM_bytes_per_second(benchmark::State& state) {
+  for (auto _ : state) {
+    // This test requires a non-zero CPU time to avoid divide-by-zero
+    benchmark::DoNotOptimize(state.iterations());
+  }
+  state.SetBytesProcessed(1);
+}
+BENCHMARK(BM_bytes_per_second);
+
+ADD_CASES(TC_ConsoleOut, {{"^BM_bytes_per_second %console_report "
+                           "bytes_per_second=%float[kM]{0,1}/s$"}});
+ADD_CASES(TC_JSONOut, {{"\"name\": \"BM_bytes_per_second\",$"},
+                       {"\"run_name\": \"BM_bytes_per_second\",$", MR_Next},
+                       {"\"run_type\": \"iteration\",$", MR_Next},
+                       {"\"repetitions\": 0,$", MR_Next},
+                       {"\"repetition_index\": 0,$", MR_Next},
+                       {"\"threads\": 1,$", MR_Next},
+                       {"\"iterations\": %int,$", MR_Next},
+                       {"\"real_time\": %float,$", MR_Next},
+                       {"\"cpu_time\": %float,$", MR_Next},
+                       {"\"time_unit\": \"ns\",$", MR_Next},
+                       {"\"bytes_per_second\": %float$", MR_Next},
+                       {"}", MR_Next}});
+ADD_CASES(TC_CSVOut, {{"^\"BM_bytes_per_second\",%csv_bytes_report$"}});
+
+// ========================================================================= //
+// ------------------------ Testing Items per Second Output ---------------- //
+// ========================================================================= //
+
+void BM_items_per_second(benchmark::State& state) {
+  for (auto _ : state) {
+    // This test requires a non-zero CPU time to avoid divide-by-zero
+    benchmark::DoNotOptimize(state.iterations());
+  }
+  state.SetItemsProcessed(1);
+}
+BENCHMARK(BM_items_per_second);
+
+ADD_CASES(TC_ConsoleOut, {{"^BM_items_per_second %console_report "
+                           "items_per_second=%float[kM]{0,1}/s$"}});
+ADD_CASES(TC_JSONOut, {{"\"name\": \"BM_items_per_second\",$"},
+                       {"\"run_name\": \"BM_items_per_second\",$", MR_Next},
+                       {"\"run_type\": \"iteration\",$", MR_Next},
+                       {"\"repetitions\": 0,$", MR_Next},
+                       {"\"repetition_index\": 0,$", MR_Next},
+                       {"\"threads\": 1,$", MR_Next},
+                       {"\"iterations\": %int,$", MR_Next},
+                       {"\"real_time\": %float,$", MR_Next},
+                       {"\"cpu_time\": %float,$", MR_Next},
+                       {"\"time_unit\": \"ns\",$", MR_Next},
+                       {"\"items_per_second\": %float$", MR_Next},
+                       {"}", MR_Next}});
+ADD_CASES(TC_CSVOut, {{"^\"BM_items_per_second\",%csv_items_report$"}});
+
+// ========================================================================= //
+// ------------------------ Testing Label Output --------------------------- //
+// ========================================================================= //
+
+void BM_label(benchmark::State& state) {
+  for (auto _ : state) {
+  }
+  state.SetLabel("some label");
+}
+BENCHMARK(BM_label);
+
+ADD_CASES(TC_ConsoleOut, {{"^BM_label %console_report some label$"}});
+ADD_CASES(TC_JSONOut, {{"\"name\": \"BM_label\",$"},
+                       {"\"run_name\": \"BM_label\",$", MR_Next},
+                       {"\"run_type\": \"iteration\",$", MR_Next},
+                       {"\"repetitions\": 0,$", MR_Next},
+                       {"\"repetition_index\": 0,$", MR_Next},
+                       {"\"threads\": 1,$", MR_Next},
+                       {"\"iterations\": %int,$", MR_Next},
+                       {"\"real_time\": %float,$", MR_Next},
+                       {"\"cpu_time\": %float,$", MR_Next},
+                       {"\"time_unit\": \"ns\",$", MR_Next},
+                       {"\"label\": \"some label\"$", MR_Next},
+                       {"}", MR_Next}});
+ADD_CASES(TC_CSVOut, {{"^\"BM_label\",%csv_label_report_begin\"some "
+                       "label\"%csv_label_report_end$"}});
+
+// ========================================================================= //
+// ------------------------ Testing Error Output --------------------------- //
+// ========================================================================= //
+
+void BM_error(benchmark::State& state) {
+  state.SkipWithError("message");
+  for (auto _ : state) {
+  }
+}
+BENCHMARK(BM_error);
+ADD_CASES(TC_ConsoleOut, {{"^BM_error[ ]+ERROR OCCURRED: 'message'$"}});
+ADD_CASES(TC_JSONOut, {{"\"name\": \"BM_error\",$"},
+                       {"\"run_name\": \"BM_error\",$", MR_Next},
+                       {"\"run_type\": \"iteration\",$", MR_Next},
+                       {"\"repetitions\": 0,$", MR_Next},
+                       {"\"repetition_index\": 0,$", MR_Next},
+                       {"\"threads\": 1,$", MR_Next},
+                       {"\"error_occurred\": true,$", MR_Next},
+                       {"\"error_message\": \"message\",$", MR_Next}});
+
+ADD_CASES(TC_CSVOut, {{"^\"BM_error\",,,,,,,,true,\"message\"$"}});
+
+// ========================================================================= //
+// ------------------------ Testing No Arg Name Output -----------------------
+// //
+// ========================================================================= //
+
+void BM_no_arg_name(benchmark::State& state) {
+  for (auto _ : state) {
+  }
+}
+BENCHMARK(BM_no_arg_name)->Arg(3);
+ADD_CASES(TC_ConsoleOut, {{"^BM_no_arg_name/3 %console_report$"}});
+ADD_CASES(TC_JSONOut, {{"\"name\": \"BM_no_arg_name/3\",$"},
+                       {"\"run_name\": \"BM_no_arg_name/3\",$", MR_Next},
+                       {"\"run_type\": \"iteration\",$", MR_Next},
+                       {"\"repetitions\": 0,$", MR_Next},
+                       {"\"repetition_index\": 0,$", MR_Next},
+                       {"\"threads\": 1,$", MR_Next}});
+ADD_CASES(TC_CSVOut, {{"^\"BM_no_arg_name/3\",%csv_report$"}});
+
+// ========================================================================= //
+// ------------------------ Testing Arg Name Output ----------------------- //
+// ========================================================================= //
+
+void BM_arg_name(benchmark::State& state) {
+  for (auto _ : state) {
+  }
+}
+BENCHMARK(BM_arg_name)->ArgName("first")->Arg(3);
+ADD_CASES(TC_ConsoleOut, {{"^BM_arg_name/first:3 %console_report$"}});
+ADD_CASES(TC_JSONOut, {{"\"name\": \"BM_arg_name/first:3\",$"},
+                       {"\"run_name\": \"BM_arg_name/first:3\",$", MR_Next},
+                       {"\"run_type\": \"iteration\",$", MR_Next},
+                       {"\"repetitions\": 0,$", MR_Next},
+                       {"\"repetition_index\": 0,$", MR_Next},
+                       {"\"threads\": 1,$", MR_Next}});
+ADD_CASES(TC_CSVOut, {{"^\"BM_arg_name/first:3\",%csv_report$"}});
+
+// ========================================================================= //
+// ------------------------ Testing Arg Names Output ----------------------- //
+// ========================================================================= //
+
+void BM_arg_names(benchmark::State& state) {
+  for (auto _ : state) {
+  }
+}
+BENCHMARK(BM_arg_names)->Args({2, 5, 4})->ArgNames({"first", "", "third"});
+ADD_CASES(TC_ConsoleOut,
+          {{"^BM_arg_names/first:2/5/third:4 %console_report$"}});
+ADD_CASES(TC_JSONOut,
+          {{"\"name\": \"BM_arg_names/first:2/5/third:4\",$"},
+           {"\"run_name\": \"BM_arg_names/first:2/5/third:4\",$", MR_Next},
+           {"\"run_type\": \"iteration\",$", MR_Next},
+           {"\"repetitions\": 0,$", MR_Next},
+           {"\"repetition_index\": 0,$", MR_Next},
+           {"\"threads\": 1,$", MR_Next}});
+ADD_CASES(TC_CSVOut, {{"^\"BM_arg_names/first:2/5/third:4\",%csv_report$"}});
+
+// ========================================================================= //
+// ------------------------ Testing Big Args Output ------------------------ //
+// ========================================================================= //
+
+void BM_BigArgs(benchmark::State& state) {
+  for (auto _ : state) {
+  }
+}
+BENCHMARK(BM_BigArgs)->RangeMultiplier(2)->Range(1U << 30U, 1U << 31U);
+ADD_CASES(TC_ConsoleOut, {{"^BM_BigArgs/1073741824 %console_report$"},
+                          {"^BM_BigArgs/2147483648 %console_report$"}});
+
+// ========================================================================= //
+// ----------------------- Testing Complexity Output ----------------------- //
+// ========================================================================= //
+
+void BM_Complexity_O1(benchmark::State& state) {
+  for (auto _ : state) {
+    // This test requires a non-zero CPU time to avoid divide-by-zero
+    benchmark::DoNotOptimize(state.iterations());
+  }
+  state.SetComplexityN(state.range(0));
+}
+BENCHMARK(BM_Complexity_O1)->Range(1, 1 << 18)->Complexity(benchmark::o1);
+SET_SUBSTITUTIONS({{"%bigOStr", "[ ]* %float \\([0-9]+\\)"},
+                   {"%RMS", "[ ]*[0-9]+ %"}});
+ADD_CASES(TC_ConsoleOut, {{"^BM_Complexity_O1_BigO %bigOStr %bigOStr[ ]*$"},
+                          {"^BM_Complexity_O1_RMS %RMS %RMS[ ]*$"}});
+
+// ========================================================================= //
+// ----------------------- Testing Aggregate Output ------------------------ //
+// ========================================================================= //
+
+// Test that non-aggregate data is printed by default
+void BM_Repeat(benchmark::State& state) {
+  for (auto _ : state) {
+  }
+}
+// need two repetitions min to be able to output any aggregate output
+BENCHMARK(BM_Repeat)->Repetitions(2);
+ADD_CASES(TC_ConsoleOut,
+          {{"^BM_Repeat/repeats:2 %console_report$"},
+           {"^BM_Repeat/repeats:2 %console_report$"},
+           {"^BM_Repeat/repeats:2_mean %console_time_only_report [ ]*2$"},
+           {"^BM_Repeat/repeats:2_median %console_time_only_report [ ]*2$"},
+           {"^BM_Repeat/repeats:2_stddev %console_time_only_report [ ]*2$"}});
+ADD_CASES(TC_JSONOut, {{"\"name\": \"BM_Repeat/repeats:2\",$"},
+                       {"\"run_name\": \"BM_Repeat/repeats:2\"", MR_Next},
+                       {"\"run_type\": \"iteration\",$", MR_Next},
+                       {"\"repetitions\": 2,$", MR_Next},
+                       {"\"repetition_index\": 0,$", MR_Next},
+                       {"\"threads\": 1,$", MR_Next},
+                       {"\"name\": \"BM_Repeat/repeats:2\",$"},
+                       {"\"run_name\": \"BM_Repeat/repeats:2\",$", MR_Next},
+                       {"\"run_type\": \"iteration\",$", MR_Next},
+                       {"\"repetitions\": 2,$", MR_Next},
+                       {"\"repetition_index\": 1,$", MR_Next},
+                       {"\"threads\": 1,$", MR_Next},
+                       {"\"name\": \"BM_Repeat/repeats:2_mean\",$"},
+                       {"\"run_name\": \"BM_Repeat/repeats:2\",$", MR_Next},
+                       {"\"run_type\": \"aggregate\",$", MR_Next},
+                       {"\"repetitions\": 2,$", MR_Next},
+                       {"\"threads\": 1,$", MR_Next},
+                       {"\"aggregate_name\": \"mean\",$", MR_Next},
+                       {"\"iterations\": 2,$", MR_Next},
+                       {"\"name\": \"BM_Repeat/repeats:2_median\",$"},
+                       {"\"run_name\": \"BM_Repeat/repeats:2\",$", MR_Next},
+                       {"\"run_type\": \"aggregate\",$", MR_Next},
+                       {"\"repetitions\": 2,$", MR_Next},
+                       {"\"threads\": 1,$", MR_Next},
+                       {"\"aggregate_name\": \"median\",$", MR_Next},
+                       {"\"iterations\": 2,$", MR_Next},
+                       {"\"name\": \"BM_Repeat/repeats:2_stddev\",$"},
+                       {"\"run_name\": \"BM_Repeat/repeats:2\",$", MR_Next},
+                       {"\"run_type\": \"aggregate\",$", MR_Next},
+                       {"\"repetitions\": 2,$", MR_Next},
+                       {"\"threads\": 1,$", MR_Next},
+                       {"\"aggregate_name\": \"stddev\",$", MR_Next},
+                       {"\"iterations\": 2,$", MR_Next}});
+ADD_CASES(TC_CSVOut, {{"^\"BM_Repeat/repeats:2\",%csv_report$"},
+                      {"^\"BM_Repeat/repeats:2\",%csv_report$"},
+                      {"^\"BM_Repeat/repeats:2_mean\",%csv_report$"},
+                      {"^\"BM_Repeat/repeats:2_median\",%csv_report$"},
+                      {"^\"BM_Repeat/repeats:2_stddev\",%csv_report$"}});
+// but for two repetitions, mean and median is the same, so let's repeat..
+BENCHMARK(BM_Repeat)->Repetitions(3);
+ADD_CASES(TC_ConsoleOut,
+          {{"^BM_Repeat/repeats:3 %console_report$"},
+           {"^BM_Repeat/repeats:3 %console_report$"},
+           {"^BM_Repeat/repeats:3 %console_report$"},
+           {"^BM_Repeat/repeats:3_mean %console_time_only_report [ ]*3$"},
+           {"^BM_Repeat/repeats:3_median %console_time_only_report [ ]*3$"},
+           {"^BM_Repeat/repeats:3_stddev %console_time_only_report [ ]*3$"}});
+ADD_CASES(TC_JSONOut, {{"\"name\": \"BM_Repeat/repeats:3\",$"},
+                       {"\"run_name\": \"BM_Repeat/repeats:3\",$", MR_Next},
+                       {"\"run_type\": \"iteration\",$", MR_Next},
+                       {"\"repetitions\": 3,$", MR_Next},
+                       {"\"repetition_index\": 0,$", MR_Next},
+                       {"\"threads\": 1,$", MR_Next},
+                       {"\"name\": \"BM_Repeat/repeats:3\",$"},
+                       {"\"run_name\": \"BM_Repeat/repeats:3\",$", MR_Next},
+                       {"\"run_type\": \"iteration\",$", MR_Next},
+                       {"\"repetitions\": 3,$", MR_Next},
+                       {"\"repetition_index\": 1,$", MR_Next},
+                       {"\"threads\": 1,$", MR_Next},
+                       {"\"name\": \"BM_Repeat/repeats:3\",$"},
+                       {"\"run_name\": \"BM_Repeat/repeats:3\",$", MR_Next},
+                       {"\"run_type\": \"iteration\",$", MR_Next},
+                       {"\"repetitions\": 3,$", MR_Next},
+                       {"\"repetition_index\": 2,$", MR_Next},
+                       {"\"threads\": 1,$", MR_Next},
+                       {"\"name\": \"BM_Repeat/repeats:3_mean\",$"},
+                       {"\"run_name\": \"BM_Repeat/repeats:3\",$", MR_Next},
+                       {"\"run_type\": \"aggregate\",$", MR_Next},
+                       {"\"repetitions\": 3,$", MR_Next},
+                       {"\"threads\": 1,$", MR_Next},
+                       {"\"aggregate_name\": \"mean\",$", MR_Next},
+                       {"\"iterations\": 3,$", MR_Next},
+                       {"\"name\": \"BM_Repeat/repeats:3_median\",$"},
+                       {"\"run_name\": \"BM_Repeat/repeats:3\",$", MR_Next},
+                       {"\"run_type\": \"aggregate\",$", MR_Next},
+                       {"\"repetitions\": 3,$", MR_Next},
+                       {"\"threads\": 1,$", MR_Next},
+                       {"\"aggregate_name\": \"median\",$", MR_Next},
+                       {"\"iterations\": 3,$", MR_Next},
+                       {"\"name\": \"BM_Repeat/repeats:3_stddev\",$"},
+                       {"\"run_name\": \"BM_Repeat/repeats:3\",$", MR_Next},
+                       {"\"run_type\": \"aggregate\",$", MR_Next},
+                       {"\"repetitions\": 3,$", MR_Next},
+                       {"\"threads\": 1,$", MR_Next},
+                       {"\"aggregate_name\": \"stddev\",$", MR_Next},
+                       {"\"iterations\": 3,$", MR_Next}});
+ADD_CASES(TC_CSVOut, {{"^\"BM_Repeat/repeats:3\",%csv_report$"},
+                      {"^\"BM_Repeat/repeats:3\",%csv_report$"},
+                      {"^\"BM_Repeat/repeats:3\",%csv_report$"},
+                      {"^\"BM_Repeat/repeats:3_mean\",%csv_report$"},
+                      {"^\"BM_Repeat/repeats:3_median\",%csv_report$"},
+                      {"^\"BM_Repeat/repeats:3_stddev\",%csv_report$"}});
+// median differs between even/odd number of repetitions, so just to be sure
+BENCHMARK(BM_Repeat)->Repetitions(4);
+ADD_CASES(TC_ConsoleOut,
+          {{"^BM_Repeat/repeats:4 %console_report$"},
+           {"^BM_Repeat/repeats:4 %console_report$"},
+           {"^BM_Repeat/repeats:4 %console_report$"},
+           {"^BM_Repeat/repeats:4 %console_report$"},
+           {"^BM_Repeat/repeats:4_mean %console_time_only_report [ ]*4$"},
+           {"^BM_Repeat/repeats:4_median %console_time_only_report [ ]*4$"},
+           {"^BM_Repeat/repeats:4_stddev %console_time_only_report [ ]*4$"}});
+ADD_CASES(TC_JSONOut, {{"\"name\": \"BM_Repeat/repeats:4\",$"},
+                       {"\"run_name\": \"BM_Repeat/repeats:4\",$", MR_Next},
+                       {"\"run_type\": \"iteration\",$", MR_Next},
+                       {"\"repetitions\": 4,$", MR_Next},
+                       {"\"repetition_index\": 0,$", MR_Next},
+                       {"\"threads\": 1,$", MR_Next},
+                       {"\"name\": \"BM_Repeat/repeats:4\",$"},
+                       {"\"run_name\": \"BM_Repeat/repeats:4\",$", MR_Next},
+                       {"\"run_type\": \"iteration\",$", MR_Next},
+                       {"\"repetitions\": 4,$", MR_Next},
+                       {"\"repetition_index\": 1,$", MR_Next},
+                       {"\"threads\": 1,$", MR_Next},
+                       {"\"name\": \"BM_Repeat/repeats:4\",$"},
+                       {"\"run_name\": \"BM_Repeat/repeats:4\",$", MR_Next},
+                       {"\"run_type\": \"iteration\",$", MR_Next},
+                       {"\"repetitions\": 4,$", MR_Next},
+                       {"\"repetition_index\": 2,$", MR_Next},
+                       {"\"threads\": 1,$", MR_Next},
+                       {"\"name\": \"BM_Repeat/repeats:4\",$"},
+                       {"\"run_name\": \"BM_Repeat/repeats:4\",$", MR_Next},
+                       {"\"run_type\": \"iteration\",$", MR_Next},
+                       {"\"repetitions\": 4,$", MR_Next},
+                       {"\"repetition_index\": 3,$", MR_Next},
+                       {"\"threads\": 1,$", MR_Next},
+                       {"\"name\": \"BM_Repeat/repeats:4_mean\",$"},
+                       {"\"run_name\": \"BM_Repeat/repeats:4\",$", MR_Next},
+                       {"\"run_type\": \"aggregate\",$", MR_Next},
+                       {"\"repetitions\": 4,$", MR_Next},
+                       {"\"threads\": 1,$", MR_Next},
+                       {"\"aggregate_name\": \"mean\",$", MR_Next},
+                       {"\"iterations\": 4,$", MR_Next},
+                       {"\"name\": \"BM_Repeat/repeats:4_median\",$"},
+                       {"\"run_name\": \"BM_Repeat/repeats:4\",$", MR_Next},
+                       {"\"run_type\": \"aggregate\",$", MR_Next},
+                       {"\"repetitions\": 4,$", MR_Next},
+                       {"\"threads\": 1,$", MR_Next},
+                       {"\"aggregate_name\": \"median\",$", MR_Next},
+                       {"\"iterations\": 4,$", MR_Next},
+                       {"\"name\": \"BM_Repeat/repeats:4_stddev\",$"},
+                       {"\"run_name\": \"BM_Repeat/repeats:4\",$", MR_Next},
+                       {"\"run_type\": \"aggregate\",$", MR_Next},
+                       {"\"repetitions\": 4,$", MR_Next},
+                       {"\"threads\": 1,$", MR_Next},
+                       {"\"aggregate_name\": \"stddev\",$", MR_Next},
+                       {"\"iterations\": 4,$", MR_Next}});
+ADD_CASES(TC_CSVOut, {{"^\"BM_Repeat/repeats:4\",%csv_report$"},
+                      {"^\"BM_Repeat/repeats:4\",%csv_report$"},
+                      {"^\"BM_Repeat/repeats:4\",%csv_report$"},
+                      {"^\"BM_Repeat/repeats:4\",%csv_report$"},
+                      {"^\"BM_Repeat/repeats:4_mean\",%csv_report$"},
+                      {"^\"BM_Repeat/repeats:4_median\",%csv_report$"},
+                      {"^\"BM_Repeat/repeats:4_stddev\",%csv_report$"}});
+
+// Test that a non-repeated test still prints non-aggregate results even when
+// only-aggregate reports have been requested
+void BM_RepeatOnce(benchmark::State& state) {
+  for (auto _ : state) {
+  }
+}
+BENCHMARK(BM_RepeatOnce)->Repetitions(1)->ReportAggregatesOnly();
+ADD_CASES(TC_ConsoleOut, {{"^BM_RepeatOnce/repeats:1 %console_report$"}});
+ADD_CASES(TC_JSONOut, {{"\"name\": \"BM_RepeatOnce/repeats:1\",$"},
+                       {"\"run_name\": \"BM_RepeatOnce/repeats:1\",$", MR_Next},
+                       {"\"run_type\": \"iteration\",$", MR_Next},
+                       {"\"repetitions\": 1,$", MR_Next},
+                       {"\"repetition_index\": 0,$", MR_Next},
+                       {"\"threads\": 1,$", MR_Next}});
+ADD_CASES(TC_CSVOut, {{"^\"BM_RepeatOnce/repeats:1\",%csv_report$"}});
+
+// Test that non-aggregate data is not reported
+void BM_SummaryRepeat(benchmark::State& state) {
+  for (auto _ : state) {
+  }
+}
+BENCHMARK(BM_SummaryRepeat)->Repetitions(3)->ReportAggregatesOnly();
+ADD_CASES(
+    TC_ConsoleOut,
+    {{".*BM_SummaryRepeat/repeats:3 ", MR_Not},
+     {"^BM_SummaryRepeat/repeats:3_mean %console_time_only_report [ ]*3$"},
+     {"^BM_SummaryRepeat/repeats:3_median %console_time_only_report [ ]*3$"},
+     {"^BM_SummaryRepeat/repeats:3_stddev %console_time_only_report [ ]*3$"}});
+ADD_CASES(TC_JSONOut,
+          {{".*BM_SummaryRepeat/repeats:3 ", MR_Not},
+           {"\"name\": \"BM_SummaryRepeat/repeats:3_mean\",$"},
+           {"\"run_name\": \"BM_SummaryRepeat/repeats:3\",$", MR_Next},
+           {"\"run_type\": \"aggregate\",$", MR_Next},
+           {"\"repetitions\": 3,$", MR_Next},
+           {"\"threads\": 1,$", MR_Next},
+           {"\"aggregate_name\": \"mean\",$", MR_Next},
+           {"\"iterations\": 3,$", MR_Next},
+           {"\"name\": \"BM_SummaryRepeat/repeats:3_median\",$"},
+           {"\"run_name\": \"BM_SummaryRepeat/repeats:3\",$", MR_Next},
+           {"\"run_type\": \"aggregate\",$", MR_Next},
+           {"\"repetitions\": 3,$", MR_Next},
+           {"\"threads\": 1,$", MR_Next},
+           {"\"aggregate_name\": \"median\",$", MR_Next},
+           {"\"iterations\": 3,$", MR_Next},
+           {"\"name\": \"BM_SummaryRepeat/repeats:3_stddev\",$"},
+           {"\"run_name\": \"BM_SummaryRepeat/repeats:3\",$", MR_Next},
+           {"\"run_type\": \"aggregate\",$", MR_Next},
+           {"\"repetitions\": 3,$", MR_Next},
+           {"\"threads\": 1,$", MR_Next},
+           {"\"aggregate_name\": \"stddev\",$", MR_Next},
+           {"\"iterations\": 3,$", MR_Next}});
+ADD_CASES(TC_CSVOut, {{".*BM_SummaryRepeat/repeats:3 ", MR_Not},
+                      {"^\"BM_SummaryRepeat/repeats:3_mean\",%csv_report$"},
+                      {"^\"BM_SummaryRepeat/repeats:3_median\",%csv_report$"},
+                      {"^\"BM_SummaryRepeat/repeats:3_stddev\",%csv_report$"}});
+
+// Test that non-aggregate data is not displayed.
+// NOTE: this test is kinda bad. we are only testing the display output.
+//       But we don't check that the file output still contains everything...
+void BM_SummaryDisplay(benchmark::State& state) {
+  for (auto _ : state) {
+  }
+}
+BENCHMARK(BM_SummaryDisplay)->Repetitions(2)->DisplayAggregatesOnly();
+ADD_CASES(
+    TC_ConsoleOut,
+    {{".*BM_SummaryDisplay/repeats:2 ", MR_Not},
+     {"^BM_SummaryDisplay/repeats:2_mean %console_time_only_report [ ]*2$"},
+     {"^BM_SummaryDisplay/repeats:2_median %console_time_only_report [ ]*2$"},
+     {"^BM_SummaryDisplay/repeats:2_stddev %console_time_only_report [ ]*2$"}});
+ADD_CASES(TC_JSONOut,
+          {{".*BM_SummaryDisplay/repeats:2 ", MR_Not},
+           {"\"name\": \"BM_SummaryDisplay/repeats:2_mean\",$"},
+           {"\"run_name\": \"BM_SummaryDisplay/repeats:2\",$", MR_Next},
+           {"\"run_type\": \"aggregate\",$", MR_Next},
+           {"\"repetitions\": 2,$", MR_Next},
+           {"\"threads\": 1,$", MR_Next},
+           {"\"aggregate_name\": \"mean\",$", MR_Next},
+           {"\"iterations\": 2,$", MR_Next},
+           {"\"name\": \"BM_SummaryDisplay/repeats:2_median\",$"},
+           {"\"run_name\": \"BM_SummaryDisplay/repeats:2\",$", MR_Next},
+           {"\"run_type\": \"aggregate\",$", MR_Next},
+           {"\"repetitions\": 2,$", MR_Next},
+           {"\"threads\": 1,$", MR_Next},
+           {"\"aggregate_name\": \"median\",$", MR_Next},
+           {"\"iterations\": 2,$", MR_Next},
+           {"\"name\": \"BM_SummaryDisplay/repeats:2_stddev\",$"},
+           {"\"run_name\": \"BM_SummaryDisplay/repeats:2\",$", MR_Next},
+           {"\"run_type\": \"aggregate\",$", MR_Next},
+           {"\"repetitions\": 2,$", MR_Next},
+           {"\"threads\": 1,$", MR_Next},
+           {"\"aggregate_name\": \"stddev\",$", MR_Next},
+           {"\"iterations\": 2,$", MR_Next}});
+ADD_CASES(TC_CSVOut,
+          {{".*BM_SummaryDisplay/repeats:2 ", MR_Not},
+           {"^\"BM_SummaryDisplay/repeats:2_mean\",%csv_report$"},
+           {"^\"BM_SummaryDisplay/repeats:2_median\",%csv_report$"},
+           {"^\"BM_SummaryDisplay/repeats:2_stddev\",%csv_report$"}});
+
+// Test repeats with custom time unit.
+void BM_RepeatTimeUnit(benchmark::State& state) {
+  for (auto _ : state) {
+  }
+}
+BENCHMARK(BM_RepeatTimeUnit)
+    ->Repetitions(3)
+    ->ReportAggregatesOnly()
+    ->Unit(benchmark::kMicrosecond);
+ADD_CASES(
+    TC_ConsoleOut,
+    {{".*BM_RepeatTimeUnit/repeats:3 ", MR_Not},
+     {"^BM_RepeatTimeUnit/repeats:3_mean %console_us_time_only_report [ ]*3$"},
+     {"^BM_RepeatTimeUnit/repeats:3_median %console_us_time_only_report [ "
+      "]*3$"},
+     {"^BM_RepeatTimeUnit/repeats:3_stddev %console_us_time_only_report [ "
+      "]*3$"}});
+ADD_CASES(TC_JSONOut,
+          {{".*BM_RepeatTimeUnit/repeats:3 ", MR_Not},
+           {"\"name\": \"BM_RepeatTimeUnit/repeats:3_mean\",$"},
+           {"\"run_name\": \"BM_RepeatTimeUnit/repeats:3\",$", MR_Next},
+           {"\"run_type\": \"aggregate\",$", MR_Next},
+           {"\"repetitions\": 3,$", MR_Next},
+           {"\"threads\": 1,$", MR_Next},
+           {"\"aggregate_name\": \"mean\",$", MR_Next},
+           {"\"iterations\": 3,$", MR_Next},
+           {"\"time_unit\": \"us\",?$"},
+           {"\"name\": \"BM_RepeatTimeUnit/repeats:3_median\",$"},
+           {"\"run_name\": \"BM_RepeatTimeUnit/repeats:3\",$", MR_Next},
+           {"\"run_type\": \"aggregate\",$", MR_Next},
+           {"\"repetitions\": 3,$", MR_Next},
+           {"\"threads\": 1,$", MR_Next},
+           {"\"aggregate_name\": \"median\",$", MR_Next},
+           {"\"iterations\": 3,$", MR_Next},
+           {"\"time_unit\": \"us\",?$"},
+           {"\"name\": \"BM_RepeatTimeUnit/repeats:3_stddev\",$"},
+           {"\"run_name\": \"BM_RepeatTimeUnit/repeats:3\",$", MR_Next},
+           {"\"run_type\": \"aggregate\",$", MR_Next},
+           {"\"repetitions\": 3,$", MR_Next},
+           {"\"threads\": 1,$", MR_Next},
+           {"\"aggregate_name\": \"stddev\",$", MR_Next},
+           {"\"iterations\": 3,$", MR_Next},
+           {"\"time_unit\": \"us\",?$"}});
+ADD_CASES(TC_CSVOut,
+          {{".*BM_RepeatTimeUnit/repeats:3 ", MR_Not},
+           {"^\"BM_RepeatTimeUnit/repeats:3_mean\",%csv_us_report$"},
+           {"^\"BM_RepeatTimeUnit/repeats:3_median\",%csv_us_report$"},
+           {"^\"BM_RepeatTimeUnit/repeats:3_stddev\",%csv_us_report$"}});
+
+// ========================================================================= //
+// -------------------- Testing user-provided statistics ------------------- //
+// ========================================================================= //
+
+const auto UserStatistics = [](const std::vector<double>& v) {
+  return v.back();
+};
+void BM_UserStats(benchmark::State& state) {
+  for (auto _ : state) {
+    state.SetIterationTime(150 / 10e8);
+  }
+}
+// clang-format off
+BENCHMARK(BM_UserStats)
+  ->Repetitions(3)
+  ->Iterations(5)
+  ->UseManualTime()
+  ->ComputeStatistics("", UserStatistics);
+// clang-format on
+
+// check that user-provided stats is calculated, and is after the default-ones
+// empty string as name is intentional, it would sort before anything else
+ADD_CASES(TC_ConsoleOut, {{"^BM_UserStats/iterations:5/repeats:3/manual_time [ "
+                           "]* 150 ns %time [ ]*5$"},
+                          {"^BM_UserStats/iterations:5/repeats:3/manual_time [ "
+                           "]* 150 ns %time [ ]*5$"},
+                          {"^BM_UserStats/iterations:5/repeats:3/manual_time [ "
+                           "]* 150 ns %time [ ]*5$"},
+                          {"^BM_UserStats/iterations:5/repeats:3/"
+                           "manual_time_mean [ ]* 150 ns %time [ ]*3$"},
+                          {"^BM_UserStats/iterations:5/repeats:3/"
+                           "manual_time_median [ ]* 150 ns %time [ ]*3$"},
+                          {"^BM_UserStats/iterations:5/repeats:3/"
+                           "manual_time_stddev [ ]* 0.000 ns %time [ ]*3$"},
+                          {"^BM_UserStats/iterations:5/repeats:3/manual_time_ "
+                           "[ ]* 150 ns %time [ ]*3$"}});
+ADD_CASES(
+    TC_JSONOut,
+    {{"\"name\": \"BM_UserStats/iterations:5/repeats:3/manual_time\",$"},
+     {"\"run_name\": \"BM_UserStats/iterations:5/repeats:3/manual_time\",$",
+      MR_Next},
+     {"\"run_type\": \"iteration\",$", MR_Next},
+     {"\"repetitions\": 3,$", MR_Next},
+     {"\"repetition_index\": 0,$", MR_Next},
+     {"\"threads\": 1,$", MR_Next},
+     {"\"iterations\": 5,$", MR_Next},
+     {"\"real_time\": 1\\.5(0)*e\\+(0)*2,$", MR_Next},
+     {"\"name\": \"BM_UserStats/iterations:5/repeats:3/manual_time\",$"},
+     {"\"run_name\": \"BM_UserStats/iterations:5/repeats:3/manual_time\",$",
+      MR_Next},
+     {"\"run_type\": \"iteration\",$", MR_Next},
+     {"\"repetitions\": 3,$", MR_Next},
+     {"\"repetition_index\": 1,$", MR_Next},
+     {"\"threads\": 1,$", MR_Next},
+     {"\"iterations\": 5,$", MR_Next},
+     {"\"real_time\": 1\\.5(0)*e\\+(0)*2,$", MR_Next},
+     {"\"name\": \"BM_UserStats/iterations:5/repeats:3/manual_time\",$"},
+     {"\"run_name\": \"BM_UserStats/iterations:5/repeats:3/manual_time\",$",
+      MR_Next},
+     {"\"run_type\": \"iteration\",$", MR_Next},
+     {"\"repetitions\": 3,$", MR_Next},
+     {"\"repetition_index\": 2,$", MR_Next},
+     {"\"threads\": 1,$", MR_Next},
+     {"\"iterations\": 5,$", MR_Next},
+     {"\"real_time\": 1\\.5(0)*e\\+(0)*2,$", MR_Next},
+     {"\"name\": \"BM_UserStats/iterations:5/repeats:3/manual_time_mean\",$"},
+     {"\"run_name\": \"BM_UserStats/iterations:5/repeats:3/manual_time\",$",
+      MR_Next},
+     {"\"run_type\": \"aggregate\",$", MR_Next},
+     {"\"repetitions\": 3,$", MR_Next},
+     {"\"threads\": 1,$", MR_Next},
+     {"\"aggregate_name\": \"mean\",$", MR_Next},
+     {"\"iterations\": 3,$", MR_Next},
+     {"\"real_time\": 1\\.5(0)*e\\+(0)*2,$", MR_Next},
+     {"\"name\": \"BM_UserStats/iterations:5/repeats:3/manual_time_median\",$"},
+     {"\"run_name\": \"BM_UserStats/iterations:5/repeats:3/manual_time\",$",
+      MR_Next},
+     {"\"run_type\": \"aggregate\",$", MR_Next},
+     {"\"repetitions\": 3,$", MR_Next},
+     {"\"threads\": 1,$", MR_Next},
+     {"\"aggregate_name\": \"median\",$", MR_Next},
+     {"\"iterations\": 3,$", MR_Next},
+     {"\"real_time\": 1\\.5(0)*e\\+(0)*2,$", MR_Next},
+     {"\"name\": \"BM_UserStats/iterations:5/repeats:3/manual_time_stddev\",$"},
+     {"\"run_name\": \"BM_UserStats/iterations:5/repeats:3/manual_time\",$",
+      MR_Next},
+     {"\"run_type\": \"aggregate\",$", MR_Next},
+     {"\"repetitions\": 3,$", MR_Next},
+     {"\"threads\": 1,$", MR_Next},
+     {"\"aggregate_name\": \"stddev\",$", MR_Next},
+     {"\"iterations\": 3,$", MR_Next},
+     {"\"real_time\": %float,$", MR_Next},
+     {"\"name\": \"BM_UserStats/iterations:5/repeats:3/manual_time_\",$"},
+     {"\"run_name\": \"BM_UserStats/iterations:5/repeats:3/manual_time\",$",
+      MR_Next},
+     {"\"run_type\": \"aggregate\",$", MR_Next},
+     {"\"repetitions\": 3,$", MR_Next},
+     {"\"threads\": 1,$", MR_Next},
+     {"\"aggregate_name\": \"\",$", MR_Next},
+     {"\"iterations\": 3,$", MR_Next},
+     {"\"real_time\": 1\\.5(0)*e\\+(0)*2,$", MR_Next}});
+ADD_CASES(
+    TC_CSVOut,
+    {{"^\"BM_UserStats/iterations:5/repeats:3/manual_time\",%csv_report$"},
+     {"^\"BM_UserStats/iterations:5/repeats:3/manual_time\",%csv_report$"},
+     {"^\"BM_UserStats/iterations:5/repeats:3/manual_time\",%csv_report$"},
+     {"^\"BM_UserStats/iterations:5/repeats:3/manual_time_mean\",%csv_report$"},
+     {"^\"BM_UserStats/iterations:5/repeats:3/"
+      "manual_time_median\",%csv_report$"},
+     {"^\"BM_UserStats/iterations:5/repeats:3/"
+      "manual_time_stddev\",%csv_report$"},
+     {"^\"BM_UserStats/iterations:5/repeats:3/manual_time_\",%csv_report$"}});
+
+// ========================================================================= //
+// ------------------------- Testing StrEscape JSON ------------------------ //
+// ========================================================================= //
+#if 0 // enable when csv testing code correctly handles multi-line fields
+void BM_JSON_Format(benchmark::State& state) {
+  state.SkipWithError("val\b\f\n\r\t\\\"with\"es,capes");
+  for (auto _ : state) {
+  }
+}
+BENCHMARK(BM_JSON_Format);
+ADD_CASES(TC_JSONOut, {{"\"name\": \"BM_JSON_Format\",$"},
+                       {"\"run_name\": \"BM_JSON_Format\",$", MR_Next},
+                       {"\"run_type\": \"iteration\",$", MR_Next},
+                       {"\"repetitions\": 0,$", MR_Next},
+                       {"\"repetition_index\": 0,$", MR_Next},
+                       {"\"threads\": 1,$", MR_Next},
+                       {"\"error_occurred\": true,$", MR_Next},
+                       {R"("error_message": "val\\b\\f\\n\\r\\t\\\\\\"with\\"es,capes",$)", MR_Next}});
+#endif
+// ========================================================================= //
+// -------------------------- Testing CsvEscape ---------------------------- //
+// ========================================================================= //
+
+void BM_CSV_Format(benchmark::State& state) {
+  state.SkipWithError("\"freedom\"");
+  for (auto _ : state) {
+  }
+}
+BENCHMARK(BM_CSV_Format);
+ADD_CASES(TC_CSVOut, {{"^\"BM_CSV_Format\",,,,,,,,true,\"\"\"freedom\"\"\"$"}});
+
+// ========================================================================= //
+// --------------------------- TEST CASES END ------------------------------ //
+// ========================================================================= //
+
+int main(int argc, char* argv[]) { RunOutputTests(argc, argv); }
diff --git a/src/third_party/google_benchmark/test/skip_with_error_test.cc b/src/third_party/google_benchmark/test/skip_with_error_test.cc
new file mode 100644
index 0000000..0657977
--- /dev/null
+++ b/src/third_party/google_benchmark/test/skip_with_error_test.cc
@@ -0,0 +1,189 @@
+
+#undef NDEBUG
+#include <cassert>
+#include <vector>
+
+#include "../src/check.h"  // NOTE: check.h is for internal use only!
+#include "benchmark/benchmark.h"
+
+namespace {
+
+class TestReporter : public benchmark::ConsoleReporter {
+ public:
+  virtual bool ReportContext(const Context& context) {
+    return ConsoleReporter::ReportContext(context);
+  };
+
+  virtual void ReportRuns(const std::vector<Run>& report) {
+    all_runs_.insert(all_runs_.end(), begin(report), end(report));
+    ConsoleReporter::ReportRuns(report);
+  }
+
+  TestReporter() {}
+  virtual ~TestReporter() {}
+
+  mutable std::vector<Run> all_runs_;
+};
+
+struct TestCase {
+  std::string name;
+  bool error_occurred;
+  std::string error_message;
+
+  typedef benchmark::BenchmarkReporter::Run Run;
+
+  void CheckRun(Run const& run) const {
+    CHECK(name == run.benchmark_name())
+        << "expected " << name << " got " << run.benchmark_name();
+    CHECK(error_occurred == run.error_occurred);
+    CHECK(error_message == run.error_message);
+    if (error_occurred) {
+      // CHECK(run.iterations == 0);
+    } else {
+      CHECK(run.iterations != 0);
+    }
+  }
+};
+
+std::vector<TestCase> ExpectedResults;
+
+int AddCases(const char* base_name, std::initializer_list<TestCase> const& v) {
+  for (auto TC : v) {
+    TC.name = base_name + TC.name;
+    ExpectedResults.push_back(std::move(TC));
+  }
+  return 0;
+}
+
+#define CONCAT(x, y) CONCAT2(x, y)
+#define CONCAT2(x, y) x##y
+#define ADD_CASES(...) int CONCAT(dummy, __LINE__) = AddCases(__VA_ARGS__)
+
+}  // end namespace
+
+void BM_error_before_running(benchmark::State& state) {
+  state.SkipWithError("error message");
+  while (state.KeepRunning()) {
+    assert(false);
+  }
+}
+BENCHMARK(BM_error_before_running);
+ADD_CASES("BM_error_before_running", {{"", true, "error message"}});
+
+void BM_error_before_running_batch(benchmark::State& state) {
+  state.SkipWithError("error message");
+  while (state.KeepRunningBatch(17)) {
+    assert(false);
+  }
+}
+BENCHMARK(BM_error_before_running_batch);
+ADD_CASES("BM_error_before_running_batch", {{"", true, "error message"}});
+
+void BM_error_before_running_range_for(benchmark::State& state) {
+  state.SkipWithError("error message");
+  for (auto _ : state) {
+    assert(false);
+  }
+}
+BENCHMARK(BM_error_before_running_range_for);
+ADD_CASES("BM_error_before_running_range_for", {{"", true, "error message"}});
+
+void BM_error_during_running(benchmark::State& state) {
+  int first_iter = true;
+  while (state.KeepRunning()) {
+    if (state.range(0) == 1 && state.thread_index <= (state.threads / 2)) {
+      assert(first_iter);
+      first_iter = false;
+      state.SkipWithError("error message");
+    } else {
+      state.PauseTiming();
+      state.ResumeTiming();
+    }
+  }
+}
+BENCHMARK(BM_error_during_running)->Arg(1)->Arg(2)->ThreadRange(1, 8);
+ADD_CASES("BM_error_during_running", {{"/1/threads:1", true, "error message"},
+                                      {"/1/threads:2", true, "error message"},
+                                      {"/1/threads:4", true, "error message"},
+                                      {"/1/threads:8", true, "error message"},
+                                      {"/2/threads:1", false, ""},
+                                      {"/2/threads:2", false, ""},
+                                      {"/2/threads:4", false, ""},
+                                      {"/2/threads:8", false, ""}});
+
+void BM_error_during_running_ranged_for(benchmark::State& state) {
+  assert(state.max_iterations > 3 && "test requires at least a few iterations");
+  int first_iter = true;
+  // NOTE: Users should not write the for loop explicitly.
+  for (auto It = state.begin(), End = state.end(); It != End; ++It) {
+    if (state.range(0) == 1) {
+      assert(first_iter);
+      first_iter = false;
+      state.SkipWithError("error message");
+      // Test the unfortunate but documented behavior that the ranged-for loop
+      // doesn't automatically terminate when SkipWithError is set.
+      assert(++It != End);
+      break;  // Required behavior
+    }
+  }
+}
+BENCHMARK(BM_error_during_running_ranged_for)->Arg(1)->Arg(2)->Iterations(5);
+ADD_CASES("BM_error_during_running_ranged_for",
+          {{"/1/iterations:5", true, "error message"},
+           {"/2/iterations:5", false, ""}});
+
+void BM_error_after_running(benchmark::State& state) {
+  for (auto _ : state) {
+    benchmark::DoNotOptimize(state.iterations());
+  }
+  if (state.thread_index <= (state.threads / 2))
+    state.SkipWithError("error message");
+}
+BENCHMARK(BM_error_after_running)->ThreadRange(1, 8);
+ADD_CASES("BM_error_after_running", {{"/threads:1", true, "error message"},
+                                     {"/threads:2", true, "error message"},
+                                     {"/threads:4", true, "error message"},
+                                     {"/threads:8", true, "error message"}});
+
+void BM_error_while_paused(benchmark::State& state) {
+  bool first_iter = true;
+  while (state.KeepRunning()) {
+    if (state.range(0) == 1 && state.thread_index <= (state.threads / 2)) {
+      assert(first_iter);
+      first_iter = false;
+      state.PauseTiming();
+      state.SkipWithError("error message");
+    } else {
+      state.PauseTiming();
+      state.ResumeTiming();
+    }
+  }
+}
+BENCHMARK(BM_error_while_paused)->Arg(1)->Arg(2)->ThreadRange(1, 8);
+ADD_CASES("BM_error_while_paused", {{"/1/threads:1", true, "error message"},
+                                    {"/1/threads:2", true, "error message"},
+                                    {"/1/threads:4", true, "error message"},
+                                    {"/1/threads:8", true, "error message"},
+                                    {"/2/threads:1", false, ""},
+                                    {"/2/threads:2", false, ""},
+                                    {"/2/threads:4", false, ""},
+                                    {"/2/threads:8", false, ""}});
+
+int main(int argc, char* argv[]) {
+  benchmark::Initialize(&argc, argv);
+
+  TestReporter test_reporter;
+  benchmark::RunSpecifiedBenchmarks(&test_reporter);
+
+  typedef benchmark::BenchmarkReporter::Run Run;
+  auto EB = ExpectedResults.begin();
+
+  for (Run const& run : test_reporter.all_runs_) {
+    assert(EB != ExpectedResults.end());
+    EB->CheckRun(run);
+    ++EB;
+  }
+  assert(EB == ExpectedResults.end());
+
+  return 0;
+}
diff --git a/src/third_party/google_benchmark/test/state_assembly_test.cc b/src/third_party/google_benchmark/test/state_assembly_test.cc
new file mode 100644
index 0000000..7ddbb3b
--- /dev/null
+++ b/src/third_party/google_benchmark/test/state_assembly_test.cc
@@ -0,0 +1,68 @@
+#include <benchmark/benchmark.h>
+
+#ifdef __clang__
+#pragma clang diagnostic ignored "-Wreturn-type"
+#endif
+
+// clang-format off
+extern "C" {
+  extern int ExternInt;
+  benchmark::State& GetState();
+  void Fn();
+}
+// clang-format on
+
+using benchmark::State;
+
+// CHECK-LABEL: test_for_auto_loop:
+extern "C" int test_for_auto_loop() {
+  State& S = GetState();
+  int x = 42;
+  // CHECK: 	[[CALL:call(q)*]]	_ZN9benchmark5State16StartKeepRunningEv
+  // CHECK-NEXT: testq %rbx, %rbx
+  // CHECK-NEXT: je [[LOOP_END:.*]]
+
+  for (auto _ : S) {
+    // CHECK: .L[[LOOP_HEAD:[a-zA-Z0-9_]+]]:
+    // CHECK-GNU-NEXT: subq $1, %rbx
+    // CHECK-CLANG-NEXT: {{(addq \$1, %rax|incq %rax|addq \$-1, %rbx)}}
+    // CHECK-NEXT: jne .L[[LOOP_HEAD]]
+    benchmark::DoNotOptimize(x);
+  }
+  // CHECK: [[LOOP_END]]:
+  // CHECK: [[CALL]]	_ZN9benchmark5State17FinishKeepRunningEv
+
+  // CHECK: movl $101, %eax
+  // CHECK: ret
+  return 101;
+}
+
+// CHECK-LABEL: test_while_loop:
+extern "C" int test_while_loop() {
+  State& S = GetState();
+  int x = 42;
+
+  // CHECK: j{{(e|mp)}} .L[[LOOP_HEADER:[a-zA-Z0-9_]+]]
+  // CHECK-NEXT: .L[[LOOP_BODY:[a-zA-Z0-9_]+]]:
+  while (S.KeepRunning()) {
+    // CHECK-GNU-NEXT: subq $1, %[[IREG:[a-z]+]]
+    // CHECK-CLANG-NEXT: {{(addq \$-1,|decq)}} %[[IREG:[a-z]+]]
+    // CHECK: movq %[[IREG]], [[DEST:.*]]
+    benchmark::DoNotOptimize(x);
+  }
+  // CHECK-DAG: movq [[DEST]], %[[IREG]]
+  // CHECK-DAG: testq %[[IREG]], %[[IREG]]
+  // CHECK-DAG: jne .L[[LOOP_BODY]]
+  // CHECK-DAG: .L[[LOOP_HEADER]]:
+
+  // CHECK: cmpb $0
+  // CHECK-NEXT: jne .L[[LOOP_END:[a-zA-Z0-9_]+]]
+  // CHECK: [[CALL:call(q)*]] _ZN9benchmark5State16StartKeepRunningEv
+
+  // CHECK: .L[[LOOP_END]]:
+  // CHECK: [[CALL]] _ZN9benchmark5State17FinishKeepRunningEv
+
+  // CHECK: movl $101, %eax
+  // CHECK: ret
+  return 101;
+}
diff --git a/src/third_party/google_benchmark/test/statistics_gtest.cc b/src/third_party/google_benchmark/test/statistics_gtest.cc
new file mode 100644
index 0000000..99e3149
--- /dev/null
+++ b/src/third_party/google_benchmark/test/statistics_gtest.cc
@@ -0,0 +1,28 @@
+//===---------------------------------------------------------------------===//
+// statistics_test - Unit tests for src/statistics.cc
+//===---------------------------------------------------------------------===//
+
+#include "../src/statistics.h"
+#include "gtest/gtest.h"
+
+namespace {
+TEST(StatisticsTest, Mean) {
+  EXPECT_DOUBLE_EQ(benchmark::StatisticsMean({42, 42, 42, 42}), 42.0);
+  EXPECT_DOUBLE_EQ(benchmark::StatisticsMean({1, 2, 3, 4}), 2.5);
+  EXPECT_DOUBLE_EQ(benchmark::StatisticsMean({1, 2, 5, 10, 10, 14}), 7.0);
+}
+
+TEST(StatisticsTest, Median) {
+  EXPECT_DOUBLE_EQ(benchmark::StatisticsMedian({42, 42, 42, 42}), 42.0);
+  EXPECT_DOUBLE_EQ(benchmark::StatisticsMedian({1, 2, 3, 4}), 2.5);
+  EXPECT_DOUBLE_EQ(benchmark::StatisticsMedian({1, 2, 5, 10, 10}), 5.0);
+}
+
+TEST(StatisticsTest, StdDev) {
+  EXPECT_DOUBLE_EQ(benchmark::StatisticsStdDev({101, 101, 101, 101}), 0.0);
+  EXPECT_DOUBLE_EQ(benchmark::StatisticsStdDev({1, 2, 3}), 1.0);
+  EXPECT_FLOAT_EQ(benchmark::StatisticsStdDev({1.5, 2.4, 3.3, 4.2, 5.1}),
+                  1.42302495);
+}
+
+}  // end namespace
diff --git a/src/third_party/google_benchmark/test/string_util_gtest.cc b/src/third_party/google_benchmark/test/string_util_gtest.cc
new file mode 100644
index 0000000..01bf155
--- /dev/null
+++ b/src/third_party/google_benchmark/test/string_util_gtest.cc
@@ -0,0 +1,153 @@
+//===---------------------------------------------------------------------===//
+// statistics_test - Unit tests for src/statistics.cc
+//===---------------------------------------------------------------------===//
+
+#include "../src/string_util.h"
+#include "../src/internal_macros.h"
+#include "gtest/gtest.h"
+
+namespace {
+TEST(StringUtilTest, stoul) {
+  {
+    size_t pos = 0;
+    EXPECT_EQ(0ul, benchmark::stoul("0", &pos));
+    EXPECT_EQ(1ul, pos);
+  }
+  {
+    size_t pos = 0;
+    EXPECT_EQ(7ul, benchmark::stoul("7", &pos));
+    EXPECT_EQ(1ul, pos);
+  }
+  {
+    size_t pos = 0;
+    EXPECT_EQ(135ul, benchmark::stoul("135", &pos));
+    EXPECT_EQ(3ul, pos);
+  }
+#if ULONG_MAX == 0xFFFFFFFFul
+  {
+    size_t pos = 0;
+    EXPECT_EQ(0xFFFFFFFFul, benchmark::stoul("4294967295", &pos));
+    EXPECT_EQ(10ul, pos);
+  }
+#elif ULONG_MAX == 0xFFFFFFFFFFFFFFFFul
+  {
+    size_t pos = 0;
+    EXPECT_EQ(0xFFFFFFFFFFFFFFFFul, benchmark::stoul("18446744073709551615", &pos));
+    EXPECT_EQ(20ul, pos);
+  }
+#endif
+  {
+    size_t pos = 0;
+    EXPECT_EQ(10ul, benchmark::stoul("1010", &pos, 2));
+    EXPECT_EQ(4ul, pos);
+  }
+  {
+    size_t pos = 0;
+    EXPECT_EQ(520ul, benchmark::stoul("1010", &pos, 8));
+    EXPECT_EQ(4ul, pos);
+  }
+  {
+    size_t pos = 0;
+    EXPECT_EQ(1010ul, benchmark::stoul("1010", &pos, 10));
+    EXPECT_EQ(4ul, pos);
+  }
+  {
+    size_t pos = 0;
+    EXPECT_EQ(4112ul, benchmark::stoul("1010", &pos, 16));
+    EXPECT_EQ(4ul, pos);
+  }
+  {
+    size_t pos = 0;
+    EXPECT_EQ(0xBEEFul, benchmark::stoul("BEEF", &pos, 16));
+    EXPECT_EQ(4ul, pos);
+  }
+#ifndef BENCHMARK_HAS_NO_EXCEPTIONS
+  {
+    ASSERT_THROW(benchmark::stoul("this is a test"), std::invalid_argument);
+  }
+#endif
+}
+
+TEST(StringUtilTest, stoi) {
+  {
+    size_t pos = 0;
+    EXPECT_EQ(0, benchmark::stoi("0", &pos));
+    EXPECT_EQ(1ul, pos);
+  }
+  {
+    size_t pos = 0;
+    EXPECT_EQ(-17, benchmark::stoi("-17", &pos));
+    EXPECT_EQ(3ul, pos);
+  }
+  {
+    size_t pos = 0;
+    EXPECT_EQ(1357, benchmark::stoi("1357", &pos));
+    EXPECT_EQ(4ul, pos);
+  }
+  {
+    size_t pos = 0;
+    EXPECT_EQ(10, benchmark::stoi("1010", &pos, 2));
+    EXPECT_EQ(4ul, pos);
+  }
+  {
+    size_t pos = 0;
+    EXPECT_EQ(520, benchmark::stoi("1010", &pos, 8));
+    EXPECT_EQ(4ul, pos);
+  }
+  {
+    size_t pos = 0;
+    EXPECT_EQ(1010, benchmark::stoi("1010", &pos, 10));
+    EXPECT_EQ(4ul, pos);
+  }
+  {
+    size_t pos = 0;
+    EXPECT_EQ(4112, benchmark::stoi("1010", &pos, 16));
+    EXPECT_EQ(4ul, pos);
+  }
+  {
+    size_t pos = 0;
+    EXPECT_EQ(0xBEEF, benchmark::stoi("BEEF", &pos, 16));
+    EXPECT_EQ(4ul, pos);
+  }
+#ifndef BENCHMARK_HAS_NO_EXCEPTIONS
+  {
+    ASSERT_THROW(benchmark::stoi("this is a test"), std::invalid_argument);
+  }
+#endif
+}
+
+TEST(StringUtilTest, stod) {
+  {
+    size_t pos = 0;
+    EXPECT_EQ(0.0, benchmark::stod("0", &pos));
+    EXPECT_EQ(1ul, pos);
+  }
+  {
+    size_t pos = 0;
+    EXPECT_EQ(-84.0, benchmark::stod("-84", &pos));
+    EXPECT_EQ(3ul, pos);
+  }
+  {
+    size_t pos = 0;
+    EXPECT_EQ(1234.0, benchmark::stod("1234", &pos));
+    EXPECT_EQ(4ul, pos);
+  }
+  {
+    size_t pos = 0;
+    EXPECT_EQ(1.5, benchmark::stod("1.5", &pos));
+    EXPECT_EQ(3ul, pos);
+  }
+  {
+    size_t pos = 0;
+    /* Note: exactly representable as double */
+    EXPECT_EQ(-1.25e+9, benchmark::stod("-1.25e+9", &pos));
+    EXPECT_EQ(8ul, pos);
+  }
+#ifndef BENCHMARK_HAS_NO_EXCEPTIONS
+  {
+    ASSERT_THROW(benchmark::stod("this is a test"), std::invalid_argument);
+  }
+#endif
+}
+
+}  // end namespace
diff --git a/src/third_party/google_benchmark/test/templated_fixture_test.cc b/src/third_party/google_benchmark/test/templated_fixture_test.cc
new file mode 100644
index 0000000..fe9865c
--- /dev/null
+++ b/src/third_party/google_benchmark/test/templated_fixture_test.cc
@@ -0,0 +1,28 @@
+
+#include "benchmark/benchmark.h"
+
+#include <cassert>
+#include <memory>
+
+template <typename T>
+class MyFixture : public ::benchmark::Fixture {
+ public:
+  MyFixture() : data(0) {}
+
+  T data;
+};
+
+BENCHMARK_TEMPLATE_F(MyFixture, Foo, int)(benchmark::State& st) {
+  for (auto _ : st) {
+    data += 1;
+  }
+}
+
+BENCHMARK_TEMPLATE_DEFINE_F(MyFixture, Bar, double)(benchmark::State& st) {
+  for (auto _ : st) {
+    data += 1.0;
+  }
+}
+BENCHMARK_REGISTER_F(MyFixture, Bar);
+
+BENCHMARK_MAIN();
diff --git a/src/third_party/google_benchmark/test/user_counters_tabular_test.cc b/src/third_party/google_benchmark/test/user_counters_tabular_test.cc
new file mode 100644
index 0000000..18373c0
--- /dev/null
+++ b/src/third_party/google_benchmark/test/user_counters_tabular_test.cc
@@ -0,0 +1,285 @@
+
+#undef NDEBUG
+
+#include "benchmark/benchmark.h"
+#include "output_test.h"
+
+// @todo: <jpmag> this checks the full output at once; the rule for
+// CounterSet1 was failing because it was not matching "^[-]+$".
+// @todo: <jpmag> check that the counters are vertically aligned.
+ADD_CASES(
+    TC_ConsoleOut,
+    {
+        // keeping these lines long improves readability, so:
+        // clang-format off
+    {"^[-]+$", MR_Next},
+    {"^Benchmark %s Time %s CPU %s Iterations %s Bar %s Bat %s Baz %s Foo %s Frob %s Lob$", MR_Next},
+    {"^[-]+$", MR_Next},
+    {"^BM_Counters_Tabular/threads:%int %console_report [ ]*%hrfloat [ ]*%hrfloat [ ]*%hrfloat [ ]*%hrfloat [ ]*%hrfloat [ ]*%hrfloat$", MR_Next},
+    {"^BM_Counters_Tabular/threads:%int %console_report [ ]*%hrfloat [ ]*%hrfloat [ ]*%hrfloat [ ]*%hrfloat [ ]*%hrfloat [ ]*%hrfloat$", MR_Next},
+    {"^BM_Counters_Tabular/threads:%int %console_report [ ]*%hrfloat [ ]*%hrfloat [ ]*%hrfloat [ ]*%hrfloat [ ]*%hrfloat [ ]*%hrfloat$", MR_Next},
+    {"^BM_Counters_Tabular/threads:%int %console_report [ ]*%hrfloat [ ]*%hrfloat [ ]*%hrfloat [ ]*%hrfloat [ ]*%hrfloat [ ]*%hrfloat$", MR_Next},
+    {"^BM_Counters_Tabular/threads:%int %console_report [ ]*%hrfloat [ ]*%hrfloat [ ]*%hrfloat [ ]*%hrfloat [ ]*%hrfloat [ ]*%hrfloat$", MR_Next},
+    {"^BM_CounterRates_Tabular/threads:%int %console_report [ ]*%hrfloat/s [ ]*%hrfloat/s [ ]*%hrfloat/s [ ]*%hrfloat/s [ ]*%hrfloat/s [ ]*%hrfloat/s$", MR_Next},
+    {"^BM_CounterRates_Tabular/threads:%int %console_report [ ]*%hrfloat/s [ ]*%hrfloat/s [ ]*%hrfloat/s [ ]*%hrfloat/s [ ]*%hrfloat/s [ ]*%hrfloat/s$", MR_Next},
+    {"^BM_CounterRates_Tabular/threads:%int %console_report [ ]*%hrfloat/s [ ]*%hrfloat/s [ ]*%hrfloat/s [ ]*%hrfloat/s [ ]*%hrfloat/s [ ]*%hrfloat/s$", MR_Next},
+    {"^BM_CounterRates_Tabular/threads:%int %console_report [ ]*%hrfloat/s [ ]*%hrfloat/s [ ]*%hrfloat/s [ ]*%hrfloat/s [ ]*%hrfloat/s [ ]*%hrfloat/s$", MR_Next},
+    {"^BM_CounterRates_Tabular/threads:%int %console_report [ ]*%hrfloat/s [ ]*%hrfloat/s [ ]*%hrfloat/s [ ]*%hrfloat/s [ ]*%hrfloat/s [ ]*%hrfloat/s$", MR_Next},
+    {"^[-]+$", MR_Next},
+    {"^Benchmark %s Time %s CPU %s Iterations %s Bar %s Baz %s Foo$", MR_Next},
+    {"^[-]+$", MR_Next},
+    {"^BM_CounterSet0_Tabular/threads:%int %console_report [ ]*%hrfloat [ ]*%hrfloat [ ]*%hrfloat$", MR_Next},
+    {"^BM_CounterSet0_Tabular/threads:%int %console_report [ ]*%hrfloat [ ]*%hrfloat [ ]*%hrfloat$", MR_Next},
+    {"^BM_CounterSet0_Tabular/threads:%int %console_report [ ]*%hrfloat [ ]*%hrfloat [ ]*%hrfloat$", MR_Next},
+    {"^BM_CounterSet0_Tabular/threads:%int %console_report [ ]*%hrfloat [ ]*%hrfloat [ ]*%hrfloat$", MR_Next},
+    {"^BM_CounterSet0_Tabular/threads:%int %console_report [ ]*%hrfloat [ ]*%hrfloat [ ]*%hrfloat$", MR_Next},
+    {"^BM_CounterSet1_Tabular/threads:%int %console_report [ ]*%hrfloat [ ]*%hrfloat [ ]*%hrfloat$", MR_Next},
+    {"^BM_CounterSet1_Tabular/threads:%int %console_report [ ]*%hrfloat [ ]*%hrfloat [ ]*%hrfloat$", MR_Next},
+    {"^BM_CounterSet1_Tabular/threads:%int %console_report [ ]*%hrfloat [ ]*%hrfloat [ ]*%hrfloat$", MR_Next},
+    {"^BM_CounterSet1_Tabular/threads:%int %console_report [ ]*%hrfloat [ ]*%hrfloat [ ]*%hrfloat$", MR_Next},
+    {"^BM_CounterSet1_Tabular/threads:%int %console_report [ ]*%hrfloat [ ]*%hrfloat [ ]*%hrfloat$", MR_Next},
+    {"^[-]+$", MR_Next},
+    {"^Benchmark %s Time %s CPU %s Iterations %s Bat %s Baz %s Foo$", MR_Next},
+    {"^[-]+$", MR_Next},
+    {"^BM_CounterSet2_Tabular/threads:%int %console_report [ ]*%hrfloat [ ]*%hrfloat [ ]*%hrfloat$", MR_Next},
+    {"^BM_CounterSet2_Tabular/threads:%int %console_report [ ]*%hrfloat [ ]*%hrfloat [ ]*%hrfloat$", MR_Next},
+    {"^BM_CounterSet2_Tabular/threads:%int %console_report [ ]*%hrfloat [ ]*%hrfloat [ ]*%hrfloat$", MR_Next},
+    {"^BM_CounterSet2_Tabular/threads:%int %console_report [ ]*%hrfloat [ ]*%hrfloat [ ]*%hrfloat$", MR_Next},
+    {"^BM_CounterSet2_Tabular/threads:%int %console_report [ ]*%hrfloat [ ]*%hrfloat [ ]*%hrfloat$"},
+        // clang-format on
+    });
+ADD_CASES(TC_CSVOut, {{"%csv_header,"
+                       "\"Bar\",\"Bat\",\"Baz\",\"Foo\",\"Frob\",\"Lob\""}});
+
+// ========================================================================= //
+// ------------------------- Tabular Counters Output ----------------------- //
+// ========================================================================= //
+
+void BM_Counters_Tabular(benchmark::State& state) {
+  for (auto _ : state) {
+  }
+  namespace bm = benchmark;
+  state.counters.insert({
+      {"Foo", {1, bm::Counter::kAvgThreads}},
+      {"Bar", {2, bm::Counter::kAvgThreads}},
+      {"Baz", {4, bm::Counter::kAvgThreads}},
+      {"Bat", {8, bm::Counter::kAvgThreads}},
+      {"Frob", {16, bm::Counter::kAvgThreads}},
+      {"Lob", {32, bm::Counter::kAvgThreads}},
+  });
+}
+BENCHMARK(BM_Counters_Tabular)->ThreadRange(1, 16);
+ADD_CASES(TC_JSONOut,
+          {{"\"name\": \"BM_Counters_Tabular/threads:%int\",$"},
+           {"\"run_name\": \"BM_Counters_Tabular/threads:%int\",$", MR_Next},
+           {"\"run_type\": \"iteration\",$", MR_Next},
+           {"\"repetitions\": 0,$", MR_Next},
+           {"\"repetition_index\": 0,$", MR_Next},
+           {"\"threads\": 1,$", MR_Next},
+           {"\"iterations\": %int,$", MR_Next},
+           {"\"real_time\": %float,$", MR_Next},
+           {"\"cpu_time\": %float,$", MR_Next},
+           {"\"time_unit\": \"ns\",$", MR_Next},
+           {"\"Bar\": %float,$", MR_Next},
+           {"\"Bat\": %float,$", MR_Next},
+           {"\"Baz\": %float,$", MR_Next},
+           {"\"Foo\": %float,$", MR_Next},
+           {"\"Frob\": %float,$", MR_Next},
+           {"\"Lob\": %float$", MR_Next},
+           {"}", MR_Next}});
+ADD_CASES(TC_CSVOut, {{"^\"BM_Counters_Tabular/threads:%int\",%csv_report,"
+                       "%float,%float,%float,%float,%float,%float$"}});
+// VS2013 does not allow this function to be passed as a lambda argument
+// to CHECK_BENCHMARK_RESULTS()
+void CheckTabular(Results const& e) {
+  CHECK_COUNTER_VALUE(e, int, "Foo", EQ, 1);
+  CHECK_COUNTER_VALUE(e, int, "Bar", EQ, 2);
+  CHECK_COUNTER_VALUE(e, int, "Baz", EQ, 4);
+  CHECK_COUNTER_VALUE(e, int, "Bat", EQ, 8);
+  CHECK_COUNTER_VALUE(e, int, "Frob", EQ, 16);
+  CHECK_COUNTER_VALUE(e, int, "Lob", EQ, 32);
+}
+CHECK_BENCHMARK_RESULTS("BM_Counters_Tabular/threads:%int", &CheckTabular);
+
+// ========================================================================= //
+// -------------------- Tabular+Rate Counters Output ----------------------- //
+// ========================================================================= //
+
+void BM_CounterRates_Tabular(benchmark::State& state) {
+  for (auto _ : state) {
+    // This test requires a non-zero CPU time to avoid divide-by-zero
+    benchmark::DoNotOptimize(state.iterations());
+  }
+  namespace bm = benchmark;
+  state.counters.insert({
+      {"Foo", {1, bm::Counter::kAvgThreadsRate}},
+      {"Bar", {2, bm::Counter::kAvgThreadsRate}},
+      {"Baz", {4, bm::Counter::kAvgThreadsRate}},
+      {"Bat", {8, bm::Counter::kAvgThreadsRate}},
+      {"Frob", {16, bm::Counter::kAvgThreadsRate}},
+      {"Lob", {32, bm::Counter::kAvgThreadsRate}},
+  });
+}
+BENCHMARK(BM_CounterRates_Tabular)->ThreadRange(1, 16);
+ADD_CASES(TC_JSONOut,
+          {{"\"name\": \"BM_CounterRates_Tabular/threads:%int\",$"},
+           {"\"run_name\": \"BM_CounterRates_Tabular/threads:%int\",$",
+            MR_Next},
+           {"\"run_type\": \"iteration\",$", MR_Next},
+           {"\"repetitions\": 0,$", MR_Next},
+           {"\"repetition_index\": 0,$", MR_Next},
+           {"\"threads\": 1,$", MR_Next},
+           {"\"iterations\": %int,$", MR_Next},
+           {"\"real_time\": %float,$", MR_Next},
+           {"\"cpu_time\": %float,$", MR_Next},
+           {"\"time_unit\": \"ns\",$", MR_Next},
+           {"\"Bar\": %float,$", MR_Next},
+           {"\"Bat\": %float,$", MR_Next},
+           {"\"Baz\": %float,$", MR_Next},
+           {"\"Foo\": %float,$", MR_Next},
+           {"\"Frob\": %float,$", MR_Next},
+           {"\"Lob\": %float$", MR_Next},
+           {"}", MR_Next}});
+ADD_CASES(TC_CSVOut, {{"^\"BM_CounterRates_Tabular/threads:%int\",%csv_report,"
+                       "%float,%float,%float,%float,%float,%float$"}});
+// VS2013 does not allow this function to be passed as a lambda argument
+// to CHECK_BENCHMARK_RESULTS()
+void CheckTabularRate(Results const& e) {
+  double t = e.DurationCPUTime();
+  CHECK_FLOAT_COUNTER_VALUE(e, "Foo", EQ, 1. / t, 0.001);
+  CHECK_FLOAT_COUNTER_VALUE(e, "Bar", EQ, 2. / t, 0.001);
+  CHECK_FLOAT_COUNTER_VALUE(e, "Baz", EQ, 4. / t, 0.001);
+  CHECK_FLOAT_COUNTER_VALUE(e, "Bat", EQ, 8. / t, 0.001);
+  CHECK_FLOAT_COUNTER_VALUE(e, "Frob", EQ, 16. / t, 0.001);
+  CHECK_FLOAT_COUNTER_VALUE(e, "Lob", EQ, 32. / t, 0.001);
+}
+CHECK_BENCHMARK_RESULTS("BM_CounterRates_Tabular/threads:%int",
+                        &CheckTabularRate);
+
+// ========================================================================= //
+// ------------------------- Tabular Counters Output ----------------------- //
+// ========================================================================= //
+
+// set only some of the counters
+void BM_CounterSet0_Tabular(benchmark::State& state) {
+  for (auto _ : state) {
+  }
+  namespace bm = benchmark;
+  state.counters.insert({
+      {"Foo", {10, bm::Counter::kAvgThreads}},
+      {"Bar", {20, bm::Counter::kAvgThreads}},
+      {"Baz", {40, bm::Counter::kAvgThreads}},
+  });
+}
+BENCHMARK(BM_CounterSet0_Tabular)->ThreadRange(1, 16);
+ADD_CASES(TC_JSONOut,
+          {{"\"name\": \"BM_CounterSet0_Tabular/threads:%int\",$"},
+           {"\"run_name\": \"BM_CounterSet0_Tabular/threads:%int\",$", MR_Next},
+           {"\"run_type\": \"iteration\",$", MR_Next},
+           {"\"repetitions\": 0,$", MR_Next},
+           {"\"repetition_index\": 0,$", MR_Next},
+           {"\"threads\": 1,$", MR_Next},
+           {"\"iterations\": %int,$", MR_Next},
+           {"\"real_time\": %float,$", MR_Next},
+           {"\"cpu_time\": %float,$", MR_Next},
+           {"\"time_unit\": \"ns\",$", MR_Next},
+           {"\"Bar\": %float,$", MR_Next},
+           {"\"Baz\": %float,$", MR_Next},
+           {"\"Foo\": %float$", MR_Next},
+           {"}", MR_Next}});
+ADD_CASES(TC_CSVOut, {{"^\"BM_CounterSet0_Tabular/threads:%int\",%csv_report,"
+                       "%float,,%float,%float,,"}});
+// VS2013 does not allow this function to be passed as a lambda argument
+// to CHECK_BENCHMARK_RESULTS()
+void CheckSet0(Results const& e) {
+  CHECK_COUNTER_VALUE(e, int, "Foo", EQ, 10);
+  CHECK_COUNTER_VALUE(e, int, "Bar", EQ, 20);
+  CHECK_COUNTER_VALUE(e, int, "Baz", EQ, 40);
+}
+CHECK_BENCHMARK_RESULTS("BM_CounterSet0_Tabular", &CheckSet0);
+
+// again.
+void BM_CounterSet1_Tabular(benchmark::State& state) {
+  for (auto _ : state) {
+  }
+  namespace bm = benchmark;
+  state.counters.insert({
+      {"Foo", {15, bm::Counter::kAvgThreads}},
+      {"Bar", {25, bm::Counter::kAvgThreads}},
+      {"Baz", {45, bm::Counter::kAvgThreads}},
+  });
+}
+BENCHMARK(BM_CounterSet1_Tabular)->ThreadRange(1, 16);
+ADD_CASES(TC_JSONOut,
+          {{"\"name\": \"BM_CounterSet1_Tabular/threads:%int\",$"},
+           {"\"run_name\": \"BM_CounterSet1_Tabular/threads:%int\",$", MR_Next},
+           {"\"run_type\": \"iteration\",$", MR_Next},
+           {"\"repetitions\": 0,$", MR_Next},
+           {"\"repetition_index\": 0,$", MR_Next},
+           {"\"threads\": 1,$", MR_Next},
+           {"\"iterations\": %int,$", MR_Next},
+           {"\"real_time\": %float,$", MR_Next},
+           {"\"cpu_time\": %float,$", MR_Next},
+           {"\"time_unit\": \"ns\",$", MR_Next},
+           {"\"Bar\": %float,$", MR_Next},
+           {"\"Baz\": %float,$", MR_Next},
+           {"\"Foo\": %float$", MR_Next},
+           {"}", MR_Next}});
+ADD_CASES(TC_CSVOut, {{"^\"BM_CounterSet1_Tabular/threads:%int\",%csv_report,"
+                       "%float,,%float,%float,,"}});
+// VS2013 does not allow this function to be passed as a lambda argument
+// to CHECK_BENCHMARK_RESULTS()
+void CheckSet1(Results const& e) {
+  CHECK_COUNTER_VALUE(e, int, "Foo", EQ, 15);
+  CHECK_COUNTER_VALUE(e, int, "Bar", EQ, 25);
+  CHECK_COUNTER_VALUE(e, int, "Baz", EQ, 45);
+}
+CHECK_BENCHMARK_RESULTS("BM_CounterSet1_Tabular/threads:%int", &CheckSet1);
+
+// ========================================================================= //
+// ------------------------- Tabular Counters Output ----------------------- //
+// ========================================================================= //
+
+// set only some of the counters, different set now.
+void BM_CounterSet2_Tabular(benchmark::State& state) {
+  for (auto _ : state) {
+  }
+  namespace bm = benchmark;
+  state.counters.insert({
+      {"Foo", {10, bm::Counter::kAvgThreads}},
+      {"Bat", {30, bm::Counter::kAvgThreads}},
+      {"Baz", {40, bm::Counter::kAvgThreads}},
+  });
+}
+BENCHMARK(BM_CounterSet2_Tabular)->ThreadRange(1, 16);
+ADD_CASES(TC_JSONOut,
+          {{"\"name\": \"BM_CounterSet2_Tabular/threads:%int\",$"},
+           {"\"run_name\": \"BM_CounterSet2_Tabular/threads:%int\",$", MR_Next},
+           {"\"run_type\": \"iteration\",$", MR_Next},
+           {"\"repetitions\": 0,$", MR_Next},
+           {"\"repetition_index\": 0,$", MR_Next},
+           {"\"threads\": 1,$", MR_Next},
+           {"\"iterations\": %int,$", MR_Next},
+           {"\"real_time\": %float,$", MR_Next},
+           {"\"cpu_time\": %float,$", MR_Next},
+           {"\"time_unit\": \"ns\",$", MR_Next},
+           {"\"Bat\": %float,$", MR_Next},
+           {"\"Baz\": %float,$", MR_Next},
+           {"\"Foo\": %float$", MR_Next},
+           {"}", MR_Next}});
+ADD_CASES(TC_CSVOut, {{"^\"BM_CounterSet2_Tabular/threads:%int\",%csv_report,"
+                       ",%float,%float,%float,,"}});
+// VS2013 does not allow this function to be passed as a lambda argument
+// to CHECK_BENCHMARK_RESULTS()
+void CheckSet2(Results const& e) {
+  CHECK_COUNTER_VALUE(e, int, "Foo", EQ, 10);
+  CHECK_COUNTER_VALUE(e, int, "Bat", EQ, 30);
+  CHECK_COUNTER_VALUE(e, int, "Baz", EQ, 40);
+}
+CHECK_BENCHMARK_RESULTS("BM_CounterSet2_Tabular", &CheckSet2);
+
+// ========================================================================= //
+// --------------------------- TEST CASES END ------------------------------ //
+// ========================================================================= //
+
+int main(int argc, char* argv[]) { RunOutputTests(argc, argv); }
diff --git a/src/third_party/google_benchmark/test/user_counters_test.cc b/src/third_party/google_benchmark/test/user_counters_test.cc
new file mode 100644
index 0000000..5699f4f
--- /dev/null
+++ b/src/third_party/google_benchmark/test/user_counters_test.cc
@@ -0,0 +1,531 @@
+
+#undef NDEBUG
+
+#include "benchmark/benchmark.h"
+#include "output_test.h"
+
+// ========================================================================= //
+// ---------------------- Testing Prologue Output -------------------------- //
+// ========================================================================= //
+
+// clang-format off
+
+ADD_CASES(TC_ConsoleOut,
+          {{"^[-]+$", MR_Next},
+           {"^Benchmark %s Time %s CPU %s Iterations UserCounters...$", MR_Next},
+           {"^[-]+$", MR_Next}});
+ADD_CASES(TC_CSVOut, {{"%csv_header,\"bar\",\"foo\""}});
+
+// clang-format on
+
+// ========================================================================= //
+// ------------------------- Simple Counters Output ------------------------ //
+// ========================================================================= //
+
+void BM_Counters_Simple(benchmark::State& state) {
+  for (auto _ : state) {
+  }
+  state.counters["foo"] = 1;
+  state.counters["bar"] = 2 * (double)state.iterations();
+}
+BENCHMARK(BM_Counters_Simple);
+ADD_CASES(TC_ConsoleOut,
+          {{"^BM_Counters_Simple %console_report bar=%hrfloat foo=%hrfloat$"}});
+ADD_CASES(TC_JSONOut, {{"\"name\": \"BM_Counters_Simple\",$"},
+                       {"\"run_name\": \"BM_Counters_Simple\",$", MR_Next},
+                       {"\"run_type\": \"iteration\",$", MR_Next},
+                       {"\"repetitions\": 0,$", MR_Next},
+                       {"\"repetition_index\": 0,$", MR_Next},
+                       {"\"threads\": 1,$", MR_Next},
+                       {"\"iterations\": %int,$", MR_Next},
+                       {"\"real_time\": %float,$", MR_Next},
+                       {"\"cpu_time\": %float,$", MR_Next},
+                       {"\"time_unit\": \"ns\",$", MR_Next},
+                       {"\"bar\": %float,$", MR_Next},
+                       {"\"foo\": %float$", MR_Next},
+                       {"}", MR_Next}});
+ADD_CASES(TC_CSVOut, {{"^\"BM_Counters_Simple\",%csv_report,%float,%float$"}});
+// VS2013 does not allow this function to be passed as a lambda argument
+// to CHECK_BENCHMARK_RESULTS()
+void CheckSimple(Results const& e) {
+  double its = e.NumIterations();
+  CHECK_COUNTER_VALUE(e, int, "foo", EQ, 1);
+  // check that the value of bar is within 0.1% of the expected value
+  CHECK_FLOAT_COUNTER_VALUE(e, "bar", EQ, 2. * its, 0.001);
+}
+CHECK_BENCHMARK_RESULTS("BM_Counters_Simple", &CheckSimple);
+
+// ========================================================================= //
+// --------------------- Counters+Items+Bytes/s Output --------------------- //
+// ========================================================================= //
+
+namespace {
+int num_calls1 = 0;
+}
+void BM_Counters_WithBytesAndItemsPSec(benchmark::State& state) {
+  for (auto _ : state) {
+    // This test requires a non-zero CPU time to avoid divide-by-zero
+    benchmark::DoNotOptimize(state.iterations());
+  }
+  state.counters["foo"] = 1;
+  state.counters["bar"] = ++num_calls1;
+  state.SetBytesProcessed(364);
+  state.SetItemsProcessed(150);
+}
+BENCHMARK(BM_Counters_WithBytesAndItemsPSec);
+ADD_CASES(TC_ConsoleOut, {{"^BM_Counters_WithBytesAndItemsPSec %console_report "
+                           "bar=%hrfloat bytes_per_second=%hrfloat/s "
+                           "foo=%hrfloat items_per_second=%hrfloat/s$"}});
+ADD_CASES(TC_JSONOut,
+          {{"\"name\": \"BM_Counters_WithBytesAndItemsPSec\",$"},
+           {"\"run_name\": \"BM_Counters_WithBytesAndItemsPSec\",$", MR_Next},
+           {"\"run_type\": \"iteration\",$", MR_Next},
+           {"\"repetitions\": 0,$", MR_Next},
+           {"\"repetition_index\": 0,$", MR_Next},
+           {"\"threads\": 1,$", MR_Next},
+           {"\"iterations\": %int,$", MR_Next},
+           {"\"real_time\": %float,$", MR_Next},
+           {"\"cpu_time\": %float,$", MR_Next},
+           {"\"time_unit\": \"ns\",$", MR_Next},
+           {"\"bar\": %float,$", MR_Next},
+           {"\"bytes_per_second\": %float,$", MR_Next},
+           {"\"foo\": %float,$", MR_Next},
+           {"\"items_per_second\": %float$", MR_Next},
+           {"}", MR_Next}});
+ADD_CASES(TC_CSVOut, {{"^\"BM_Counters_WithBytesAndItemsPSec\","
+                       "%csv_bytes_items_report,%float,%float$"}});
+// VS2013 does not allow this function to be passed as a lambda argument
+// to CHECK_BENCHMARK_RESULTS()
+void CheckBytesAndItemsPSec(Results const& e) {
+  double t = e.DurationCPUTime();  // this (and not real time) is the time used
+  CHECK_COUNTER_VALUE(e, int, "foo", EQ, 1);
+  CHECK_COUNTER_VALUE(e, int, "bar", EQ, num_calls1);
+  // check that the values are within 0.1% of the expected values
+  CHECK_FLOAT_RESULT_VALUE(e, "bytes_per_second", EQ, 364. / t, 0.001);
+  CHECK_FLOAT_RESULT_VALUE(e, "items_per_second", EQ, 150. / t, 0.001);
+}
+CHECK_BENCHMARK_RESULTS("BM_Counters_WithBytesAndItemsPSec",
+                        &CheckBytesAndItemsPSec);
+
+// ========================================================================= //
+// ------------------------- Rate Counters Output -------------------------- //
+// ========================================================================= //
+
+void BM_Counters_Rate(benchmark::State& state) {
+  for (auto _ : state) {
+    // This test requires a non-zero CPU time to avoid divide-by-zero
+    benchmark::DoNotOptimize(state.iterations());
+  }
+  namespace bm = benchmark;
+  state.counters["foo"] = bm::Counter{1, bm::Counter::kIsRate};
+  state.counters["bar"] = bm::Counter{2, bm::Counter::kIsRate};
+}
+BENCHMARK(BM_Counters_Rate);
+ADD_CASES(
+    TC_ConsoleOut,
+    {{"^BM_Counters_Rate %console_report bar=%hrfloat/s foo=%hrfloat/s$"}});
+ADD_CASES(TC_JSONOut, {{"\"name\": \"BM_Counters_Rate\",$"},
+                       {"\"run_name\": \"BM_Counters_Rate\",$", MR_Next},
+                       {"\"run_type\": \"iteration\",$", MR_Next},
+                       {"\"repetitions\": 0,$", MR_Next},
+                       {"\"repetition_index\": 0,$", MR_Next},
+                       {"\"threads\": 1,$", MR_Next},
+                       {"\"iterations\": %int,$", MR_Next},
+                       {"\"real_time\": %float,$", MR_Next},
+                       {"\"cpu_time\": %float,$", MR_Next},
+                       {"\"time_unit\": \"ns\",$", MR_Next},
+                       {"\"bar\": %float,$", MR_Next},
+                       {"\"foo\": %float$", MR_Next},
+                       {"}", MR_Next}});
+ADD_CASES(TC_CSVOut, {{"^\"BM_Counters_Rate\",%csv_report,%float,%float$"}});
+// VS2013 does not allow this function to be passed as a lambda argument
+// to CHECK_BENCHMARK_RESULTS()
+void CheckRate(Results const& e) {
+  double t = e.DurationCPUTime();  // this (and not real time) is the time used
+  // check that the values are within 0.1% of the expected values
+  CHECK_FLOAT_COUNTER_VALUE(e, "foo", EQ, 1. / t, 0.001);
+  CHECK_FLOAT_COUNTER_VALUE(e, "bar", EQ, 2. / t, 0.001);
+}
+CHECK_BENCHMARK_RESULTS("BM_Counters_Rate", &CheckRate);
+
+// ========================================================================= //
+// ----------------------- Inverted Counters Output ------------------------ //
+// ========================================================================= //
+
+void BM_Invert(benchmark::State& state) {
+  for (auto _ : state) {
+    // This test requires a non-zero CPU time to avoid divide-by-zero
+    benchmark::DoNotOptimize(state.iterations());
+  }
+  namespace bm = benchmark;
+  state.counters["foo"] = bm::Counter{0.0001, bm::Counter::kInvert};
+  state.counters["bar"] = bm::Counter{10000, bm::Counter::kInvert};
+}
+BENCHMARK(BM_Invert);
+ADD_CASES(TC_ConsoleOut,
+          {{"^BM_Invert %console_report bar=%hrfloatu foo=%hrfloatk$"}});
+ADD_CASES(TC_JSONOut, {{"\"name\": \"BM_Invert\",$"},
+                       {"\"run_name\": \"BM_Invert\",$", MR_Next},
+                       {"\"run_type\": \"iteration\",$", MR_Next},
+                       {"\"repetitions\": 0,$", MR_Next},
+                       {"\"repetition_index\": 0,$", MR_Next},
+                       {"\"threads\": 1,$", MR_Next},
+                       {"\"iterations\": %int,$", MR_Next},
+                       {"\"real_time\": %float,$", MR_Next},
+                       {"\"cpu_time\": %float,$", MR_Next},
+                       {"\"time_unit\": \"ns\",$", MR_Next},
+                       {"\"bar\": %float,$", MR_Next},
+                       {"\"foo\": %float$", MR_Next},
+                       {"}", MR_Next}});
+ADD_CASES(TC_CSVOut, {{"^\"BM_Invert\",%csv_report,%float,%float$"}});
+// VS2013 does not allow this function to be passed as a lambda argument
+// to CHECK_BENCHMARK_RESULTS()
+void CheckInvert(Results const& e) {
+  CHECK_FLOAT_COUNTER_VALUE(e, "foo", EQ, 10000, 0.0001);
+  CHECK_FLOAT_COUNTER_VALUE(e, "bar", EQ, 0.0001, 0.0001);
+}
+CHECK_BENCHMARK_RESULTS("BM_Invert", &CheckInvert);
+
+// ========================================================================= //
+// ------------------------- InvertedRate Counters Output
+// -------------------------- //
+// ========================================================================= //
+
+void BM_Counters_InvertedRate(benchmark::State& state) {
+  for (auto _ : state) {
+    // This test requires a non-zero CPU time to avoid divide-by-zero
+    benchmark::DoNotOptimize(state.iterations());
+  }
+  namespace bm = benchmark;
+  state.counters["foo"] =
+      bm::Counter{1, bm::Counter::kIsRate | bm::Counter::kInvert};
+  state.counters["bar"] =
+      bm::Counter{8192, bm::Counter::kIsRate | bm::Counter::kInvert};
+}
+BENCHMARK(BM_Counters_InvertedRate);
+ADD_CASES(TC_ConsoleOut, {{"^BM_Counters_InvertedRate %console_report "
+                           "bar=%hrfloats foo=%hrfloats$"}});
+ADD_CASES(TC_JSONOut,
+          {{"\"name\": \"BM_Counters_InvertedRate\",$"},
+           {"\"run_name\": \"BM_Counters_InvertedRate\",$", MR_Next},
+           {"\"run_type\": \"iteration\",$", MR_Next},
+           {"\"repetitions\": 0,$", MR_Next},
+           {"\"repetition_index\": 0,$", MR_Next},
+           {"\"threads\": 1,$", MR_Next},
+           {"\"iterations\": %int,$", MR_Next},
+           {"\"real_time\": %float,$", MR_Next},
+           {"\"cpu_time\": %float,$", MR_Next},
+           {"\"time_unit\": \"ns\",$", MR_Next},
+           {"\"bar\": %float,$", MR_Next},
+           {"\"foo\": %float$", MR_Next},
+           {"}", MR_Next}});
+ADD_CASES(TC_CSVOut,
+          {{"^\"BM_Counters_InvertedRate\",%csv_report,%float,%float$"}});
+// VS2013 does not allow this function to be passed as a lambda argument
+// to CHECK_BENCHMARK_RESULTS()
+void CheckInvertedRate(Results const& e) {
+  double t = e.DurationCPUTime();  // this (and not real time) is the time used
+  // check that the values are within 0.1% of the expected values
+  CHECK_FLOAT_COUNTER_VALUE(e, "foo", EQ, t, 0.001);
+  CHECK_FLOAT_COUNTER_VALUE(e, "bar", EQ, t / 8192.0, 0.001);
+}
+CHECK_BENCHMARK_RESULTS("BM_Counters_InvertedRate", &CheckInvertedRate);
+
+// ========================================================================= //
+// ------------------------- Thread Counters Output ------------------------ //
+// ========================================================================= //
+
+void BM_Counters_Threads(benchmark::State& state) {
+  for (auto _ : state) {
+  }
+  state.counters["foo"] = 1;
+  state.counters["bar"] = 2;
+}
+BENCHMARK(BM_Counters_Threads)->ThreadRange(1, 8);
+ADD_CASES(TC_ConsoleOut, {{"^BM_Counters_Threads/threads:%int %console_report "
+                           "bar=%hrfloat foo=%hrfloat$"}});
+ADD_CASES(TC_JSONOut,
+          {{"\"name\": \"BM_Counters_Threads/threads:%int\",$"},
+           {"\"run_name\": \"BM_Counters_Threads/threads:%int\",$", MR_Next},
+           {"\"run_type\": \"iteration\",$", MR_Next},
+           {"\"repetitions\": 0,$", MR_Next},
+           {"\"repetition_index\": 0,$", MR_Next},
+           {"\"threads\": 1,$", MR_Next},
+           {"\"iterations\": %int,$", MR_Next},
+           {"\"real_time\": %float,$", MR_Next},
+           {"\"cpu_time\": %float,$", MR_Next},
+           {"\"time_unit\": \"ns\",$", MR_Next},
+           {"\"bar\": %float,$", MR_Next},
+           {"\"foo\": %float$", MR_Next},
+           {"}", MR_Next}});
+ADD_CASES(
+    TC_CSVOut,
+    {{"^\"BM_Counters_Threads/threads:%int\",%csv_report,%float,%float$"}});
+// VS2013 does not allow this function to be passed as a lambda argument
+// to CHECK_BENCHMARK_RESULTS()
+void CheckThreads(Results const& e) {
+  CHECK_COUNTER_VALUE(e, int, "foo", EQ, e.NumThreads());
+  CHECK_COUNTER_VALUE(e, int, "bar", EQ, 2 * e.NumThreads());
+}
+CHECK_BENCHMARK_RESULTS("BM_Counters_Threads/threads:%int", &CheckThreads);
+
+// ========================================================================= //
+// ---------------------- ThreadAvg Counters Output ------------------------ //
+// ========================================================================= //
+
+void BM_Counters_AvgThreads(benchmark::State& state) {
+  for (auto _ : state) {
+  }
+  namespace bm = benchmark;
+  state.counters["foo"] = bm::Counter{1, bm::Counter::kAvgThreads};
+  state.counters["bar"] = bm::Counter{2, bm::Counter::kAvgThreads};
+}
+BENCHMARK(BM_Counters_AvgThreads)->ThreadRange(1, 8);
+ADD_CASES(TC_ConsoleOut, {{"^BM_Counters_AvgThreads/threads:%int "
+                           "%console_report bar=%hrfloat foo=%hrfloat$"}});
+ADD_CASES(TC_JSONOut,
+          {{"\"name\": \"BM_Counters_AvgThreads/threads:%int\",$"},
+           {"\"run_name\": \"BM_Counters_AvgThreads/threads:%int\",$", MR_Next},
+           {"\"run_type\": \"iteration\",$", MR_Next},
+           {"\"repetitions\": 0,$", MR_Next},
+           {"\"repetition_index\": 0,$", MR_Next},
+           {"\"threads\": 1,$", MR_Next},
+           {"\"iterations\": %int,$", MR_Next},
+           {"\"real_time\": %float,$", MR_Next},
+           {"\"cpu_time\": %float,$", MR_Next},
+           {"\"time_unit\": \"ns\",$", MR_Next},
+           {"\"bar\": %float,$", MR_Next},
+           {"\"foo\": %float$", MR_Next},
+           {"}", MR_Next}});
+ADD_CASES(
+    TC_CSVOut,
+    {{"^\"BM_Counters_AvgThreads/threads:%int\",%csv_report,%float,%float$"}});
+// VS2013 does not allow this function to be passed as a lambda argument
+// to CHECK_BENCHMARK_RESULTS()
+void CheckAvgThreads(Results const& e) {
+  CHECK_COUNTER_VALUE(e, int, "foo", EQ, 1);
+  CHECK_COUNTER_VALUE(e, int, "bar", EQ, 2);
+}
+CHECK_BENCHMARK_RESULTS("BM_Counters_AvgThreads/threads:%int",
+                        &CheckAvgThreads);
+
+// ========================================================================= //
+// ---------------------- ThreadAvg Counters Output ------------------------ //
+// ========================================================================= //
+
+void BM_Counters_AvgThreadsRate(benchmark::State& state) {
+  for (auto _ : state) {
+    // This test requires a non-zero CPU time to avoid divide-by-zero
+    benchmark::DoNotOptimize(state.iterations());
+  }
+  namespace bm = benchmark;
+  state.counters["foo"] = bm::Counter{1, bm::Counter::kAvgThreadsRate};
+  state.counters["bar"] = bm::Counter{2, bm::Counter::kAvgThreadsRate};
+}
+BENCHMARK(BM_Counters_AvgThreadsRate)->ThreadRange(1, 8);
+ADD_CASES(TC_ConsoleOut, {{"^BM_Counters_AvgThreadsRate/threads:%int "
+                           "%console_report bar=%hrfloat/s foo=%hrfloat/s$"}});
+ADD_CASES(TC_JSONOut,
+          {{"\"name\": \"BM_Counters_AvgThreadsRate/threads:%int\",$"},
+           {"\"run_name\": \"BM_Counters_AvgThreadsRate/threads:%int\",$",
+            MR_Next},
+           {"\"run_type\": \"iteration\",$", MR_Next},
+           {"\"repetitions\": 0,$", MR_Next},
+           {"\"repetition_index\": 0,$", MR_Next},
+           {"\"threads\": 1,$", MR_Next},
+           {"\"iterations\": %int,$", MR_Next},
+           {"\"real_time\": %float,$", MR_Next},
+           {"\"cpu_time\": %float,$", MR_Next},
+           {"\"time_unit\": \"ns\",$", MR_Next},
+           {"\"bar\": %float,$", MR_Next},
+           {"\"foo\": %float$", MR_Next},
+           {"}", MR_Next}});
+ADD_CASES(TC_CSVOut, {{"^\"BM_Counters_AvgThreadsRate/"
+                       "threads:%int\",%csv_report,%float,%float$"}});
+// VS2013 does not allow this function to be passed as a lambda argument
+// to CHECK_BENCHMARK_RESULTS()
+void CheckAvgThreadsRate(Results const& e) {
+  CHECK_FLOAT_COUNTER_VALUE(e, "foo", EQ, 1. / e.DurationCPUTime(), 0.001);
+  CHECK_FLOAT_COUNTER_VALUE(e, "bar", EQ, 2. / e.DurationCPUTime(), 0.001);
+}
+CHECK_BENCHMARK_RESULTS("BM_Counters_AvgThreadsRate/threads:%int",
+                        &CheckAvgThreadsRate);
+
+// ========================================================================= //
+// ------------------- IterationInvariant Counters Output ------------------ //
+// ========================================================================= //
+
+void BM_Counters_IterationInvariant(benchmark::State& state) {
+  for (auto _ : state) {
+  }
+  namespace bm = benchmark;
+  state.counters["foo"] = bm::Counter{1, bm::Counter::kIsIterationInvariant};
+  state.counters["bar"] = bm::Counter{2, bm::Counter::kIsIterationInvariant};
+}
+BENCHMARK(BM_Counters_IterationInvariant);
+ADD_CASES(TC_ConsoleOut, {{"^BM_Counters_IterationInvariant %console_report "
+                           "bar=%hrfloat foo=%hrfloat$"}});
+ADD_CASES(TC_JSONOut,
+          {{"\"name\": \"BM_Counters_IterationInvariant\",$"},
+           {"\"run_name\": \"BM_Counters_IterationInvariant\",$", MR_Next},
+           {"\"run_type\": \"iteration\",$", MR_Next},
+           {"\"repetitions\": 0,$", MR_Next},
+           {"\"repetition_index\": 0,$", MR_Next},
+           {"\"threads\": 1,$", MR_Next},
+           {"\"iterations\": %int,$", MR_Next},
+           {"\"real_time\": %float,$", MR_Next},
+           {"\"cpu_time\": %float,$", MR_Next},
+           {"\"time_unit\": \"ns\",$", MR_Next},
+           {"\"bar\": %float,$", MR_Next},
+           {"\"foo\": %float$", MR_Next},
+           {"}", MR_Next}});
+ADD_CASES(TC_CSVOut,
+          {{"^\"BM_Counters_IterationInvariant\",%csv_report,%float,%float$"}});
+// VS2013 does not allow this function to be passed as a lambda argument
+// to CHECK_BENCHMARK_RESULTS()
+void CheckIterationInvariant(Results const& e) {
+  double its = e.NumIterations();
+  // check that the values are within 0.1% of the expected value
+  CHECK_FLOAT_COUNTER_VALUE(e, "foo", EQ, its, 0.001);
+  CHECK_FLOAT_COUNTER_VALUE(e, "bar", EQ, 2. * its, 0.001);
+}
+CHECK_BENCHMARK_RESULTS("BM_Counters_IterationInvariant",
+                        &CheckIterationInvariant);
+
+// ========================================================================= //
+// ----------------- IterationInvariantRate Counters Output ---------------- //
+// ========================================================================= //
+
+void BM_Counters_kIsIterationInvariantRate(benchmark::State& state) {
+  for (auto _ : state) {
+    // This test requires a non-zero CPU time to avoid divide-by-zero
+    benchmark::DoNotOptimize(state.iterations());
+  }
+  namespace bm = benchmark;
+  state.counters["foo"] =
+      bm::Counter{1, bm::Counter::kIsIterationInvariantRate};
+  state.counters["bar"] =
+      bm::Counter{2, bm::Counter::kIsRate | bm::Counter::kIsIterationInvariant};
+}
+BENCHMARK(BM_Counters_kIsIterationInvariantRate);
+ADD_CASES(TC_ConsoleOut, {{"^BM_Counters_kIsIterationInvariantRate "
+                           "%console_report bar=%hrfloat/s foo=%hrfloat/s$"}});
+ADD_CASES(TC_JSONOut,
+          {{"\"name\": \"BM_Counters_kIsIterationInvariantRate\",$"},
+           {"\"run_name\": \"BM_Counters_kIsIterationInvariantRate\",$",
+            MR_Next},
+           {"\"run_type\": \"iteration\",$", MR_Next},
+           {"\"repetitions\": 0,$", MR_Next},
+           {"\"repetition_index\": 0,$", MR_Next},
+           {"\"threads\": 1,$", MR_Next},
+           {"\"iterations\": %int,$", MR_Next},
+           {"\"real_time\": %float,$", MR_Next},
+           {"\"cpu_time\": %float,$", MR_Next},
+           {"\"time_unit\": \"ns\",$", MR_Next},
+           {"\"bar\": %float,$", MR_Next},
+           {"\"foo\": %float$", MR_Next},
+           {"}", MR_Next}});
+ADD_CASES(TC_CSVOut, {{"^\"BM_Counters_kIsIterationInvariantRate\",%csv_report,"
+                       "%float,%float$"}});
+// VS2013 does not allow this function to be passed as a lambda argument
+// to CHECK_BENCHMARK_RESULTS()
+void CheckIsIterationInvariantRate(Results const& e) {
+  double its = e.NumIterations();
+  double t = e.DurationCPUTime();  // this (and not real time) is the time used
+  // check that the values are within 0.1% of the expected values
+  CHECK_FLOAT_COUNTER_VALUE(e, "foo", EQ, its * 1. / t, 0.001);
+  CHECK_FLOAT_COUNTER_VALUE(e, "bar", EQ, its * 2. / t, 0.001);
+}
+CHECK_BENCHMARK_RESULTS("BM_Counters_kIsIterationInvariantRate",
+                        &CheckIsIterationInvariantRate);
+
+// ========================================================================= //
+// ------------------- AvgIterations Counters Output ------------------ //
+// ========================================================================= //
+
+void BM_Counters_AvgIterations(benchmark::State& state) {
+  for (auto _ : state) {
+  }
+  namespace bm = benchmark;
+  state.counters["foo"] = bm::Counter{1, bm::Counter::kAvgIterations};
+  state.counters["bar"] = bm::Counter{2, bm::Counter::kAvgIterations};
+}
+BENCHMARK(BM_Counters_AvgIterations);
+ADD_CASES(TC_ConsoleOut, {{"^BM_Counters_AvgIterations %console_report "
+                           "bar=%hrfloat foo=%hrfloat$"}});
+ADD_CASES(TC_JSONOut,
+          {{"\"name\": \"BM_Counters_AvgIterations\",$"},
+           {"\"run_name\": \"BM_Counters_AvgIterations\",$", MR_Next},
+           {"\"run_type\": \"iteration\",$", MR_Next},
+           {"\"repetitions\": 0,$", MR_Next},
+           {"\"repetition_index\": 0,$", MR_Next},
+           {"\"threads\": 1,$", MR_Next},
+           {"\"iterations\": %int,$", MR_Next},
+           {"\"real_time\": %float,$", MR_Next},
+           {"\"cpu_time\": %float,$", MR_Next},
+           {"\"time_unit\": \"ns\",$", MR_Next},
+           {"\"bar\": %float,$", MR_Next},
+           {"\"foo\": %float$", MR_Next},
+           {"}", MR_Next}});
+ADD_CASES(TC_CSVOut,
+          {{"^\"BM_Counters_AvgIterations\",%csv_report,%float,%float$"}});
+// VS2013 does not allow this function to be passed as a lambda argument
+// to CHECK_BENCHMARK_RESULTS()
+void CheckAvgIterations(Results const& e) {
+  double its = e.NumIterations();
+  // check that the values are within 0.1% of the expected value
+  CHECK_FLOAT_COUNTER_VALUE(e, "foo", EQ, 1. / its, 0.001);
+  CHECK_FLOAT_COUNTER_VALUE(e, "bar", EQ, 2. / its, 0.001);
+}
+CHECK_BENCHMARK_RESULTS("BM_Counters_AvgIterations", &CheckAvgIterations);
+
+// ========================================================================= //
+// ----------------- AvgIterationsRate Counters Output ---------------- //
+// ========================================================================= //
+
+void BM_Counters_kAvgIterationsRate(benchmark::State& state) {
+  for (auto _ : state) {
+    // This test requires a non-zero CPU time to avoid divide-by-zero
+    benchmark::DoNotOptimize(state.iterations());
+  }
+  namespace bm = benchmark;
+  state.counters["foo"] = bm::Counter{1, bm::Counter::kAvgIterationsRate};
+  state.counters["bar"] =
+      bm::Counter{2, bm::Counter::kIsRate | bm::Counter::kAvgIterations};
+}
+BENCHMARK(BM_Counters_kAvgIterationsRate);
+ADD_CASES(TC_ConsoleOut, {{"^BM_Counters_kAvgIterationsRate "
+                           "%console_report bar=%hrfloat/s foo=%hrfloat/s$"}});
+ADD_CASES(TC_JSONOut,
+          {{"\"name\": \"BM_Counters_kAvgIterationsRate\",$"},
+           {"\"run_name\": \"BM_Counters_kAvgIterationsRate\",$", MR_Next},
+           {"\"run_type\": \"iteration\",$", MR_Next},
+           {"\"repetitions\": 0,$", MR_Next},
+           {"\"repetition_index\": 0,$", MR_Next},
+           {"\"threads\": 1,$", MR_Next},
+           {"\"iterations\": %int,$", MR_Next},
+           {"\"real_time\": %float,$", MR_Next},
+           {"\"cpu_time\": %float,$", MR_Next},
+           {"\"time_unit\": \"ns\",$", MR_Next},
+           {"\"bar\": %float,$", MR_Next},
+           {"\"foo\": %float$", MR_Next},
+           {"}", MR_Next}});
+ADD_CASES(TC_CSVOut, {{"^\"BM_Counters_kAvgIterationsRate\",%csv_report,"
+                       "%float,%float$"}});
+// VS2013 does not allow this function to be passed as a lambda argument
+// to CHECK_BENCHMARK_RESULTS()
+void CheckAvgIterationsRate(Results const& e) {
+  double its = e.NumIterations();
+  double t = e.DurationCPUTime();  // this (and not real time) is the time used
+  // check that the values are within 0.1% of the expected values
+  CHECK_FLOAT_COUNTER_VALUE(e, "foo", EQ, 1. / its / t, 0.001);
+  CHECK_FLOAT_COUNTER_VALUE(e, "bar", EQ, 2. / its / t, 0.001);
+}
+CHECK_BENCHMARK_RESULTS("BM_Counters_kAvgIterationsRate",
+                        &CheckAvgIterationsRate);
+
+// ========================================================================= //
+// --------------------------- TEST CASES END ------------------------------ //
+// ========================================================================= //
+
+int main(int argc, char* argv[]) { RunOutputTests(argc, argv); }
diff --git a/src/third_party/google_benchmark/test/user_counters_thousands_test.cc b/src/third_party/google_benchmark/test/user_counters_thousands_test.cc
new file mode 100644
index 0000000..21d8285
--- /dev/null
+++ b/src/third_party/google_benchmark/test/user_counters_thousands_test.cc
@@ -0,0 +1,173 @@
+
+#undef NDEBUG
+
+#include "benchmark/benchmark.h"
+#include "output_test.h"
+
+// ========================================================================= //
+// ------------------------ Thousands Customisation ------------------------ //
+// ========================================================================= //
+
+void BM_Counters_Thousands(benchmark::State& state) {
+  for (auto _ : state) {
+  }
+  namespace bm = benchmark;
+  state.counters.insert({
+      {"t0_1000000DefaultBase",
+       bm::Counter(1000 * 1000, bm::Counter::kDefaults)},
+      {"t1_1000000Base1000", bm::Counter(1000 * 1000, bm::Counter::kDefaults,
+                                         benchmark::Counter::OneK::kIs1000)},
+      {"t2_1000000Base1024", bm::Counter(1000 * 1000, bm::Counter::kDefaults,
+                                         benchmark::Counter::OneK::kIs1024)},
+      {"t3_1048576Base1000", bm::Counter(1024 * 1024, bm::Counter::kDefaults,
+                                         benchmark::Counter::OneK::kIs1000)},
+      {"t4_1048576Base1024", bm::Counter(1024 * 1024, bm::Counter::kDefaults,
+                                         benchmark::Counter::OneK::kIs1024)},
+  });
+}
+BENCHMARK(BM_Counters_Thousands)->Repetitions(2);
+ADD_CASES(
+    TC_ConsoleOut,
+    {
+        {"^BM_Counters_Thousands/repeats:2 %console_report "
+         "t0_1000000DefaultBase=1000k "
+         "t1_1000000Base1000=1000k t2_1000000Base1024=976.56[23]k "
+         "t3_1048576Base1000=1048.58k t4_1048576Base1024=1024k$"},
+        {"^BM_Counters_Thousands/repeats:2 %console_report "
+         "t0_1000000DefaultBase=1000k "
+         "t1_1000000Base1000=1000k t2_1000000Base1024=976.56[23]k "
+         "t3_1048576Base1000=1048.58k t4_1048576Base1024=1024k$"},
+        {"^BM_Counters_Thousands/repeats:2_mean %console_report "
+         "t0_1000000DefaultBase=1000k t1_1000000Base1000=1000k "
+         "t2_1000000Base1024=976.56[23]k t3_1048576Base1000=1048.58k "
+         "t4_1048576Base1024=1024k$"},
+        {"^BM_Counters_Thousands/repeats:2_median %console_report "
+         "t0_1000000DefaultBase=1000k t1_1000000Base1000=1000k "
+         "t2_1000000Base1024=976.56[23]k t3_1048576Base1000=1048.58k "
+         "t4_1048576Base1024=1024k$"},
+        {"^BM_Counters_Thousands/repeats:2_stddev %console_time_only_report [ "
+         "]*2 t0_1000000DefaultBase=0 t1_1000000Base1000=0 "
+         "t2_1000000Base1024=0 t3_1048576Base1000=0 t4_1048576Base1024=0$"},
+    });
+ADD_CASES(TC_JSONOut,
+          {{"\"name\": \"BM_Counters_Thousands/repeats:2\",$"},
+           {"\"run_name\": \"BM_Counters_Thousands/repeats:2\",$", MR_Next},
+           {"\"run_type\": \"iteration\",$", MR_Next},
+           {"\"repetitions\": 2,$", MR_Next},
+           {"\"repetition_index\": 0,$", MR_Next},
+           {"\"threads\": 1,$", MR_Next},
+           {"\"iterations\": %int,$", MR_Next},
+           {"\"real_time\": %float,$", MR_Next},
+           {"\"cpu_time\": %float,$", MR_Next},
+           {"\"time_unit\": \"ns\",$", MR_Next},
+           {"\"t0_1000000DefaultBase\": 1\\.(0)*e\\+(0)*6,$", MR_Next},
+           {"\"t1_1000000Base1000\": 1\\.(0)*e\\+(0)*6,$", MR_Next},
+           {"\"t2_1000000Base1024\": 1\\.(0)*e\\+(0)*6,$", MR_Next},
+           {"\"t3_1048576Base1000\": 1\\.048576(0)*e\\+(0)*6,$", MR_Next},
+           {"\"t4_1048576Base1024\": 1\\.048576(0)*e\\+(0)*6$", MR_Next},
+           {"}", MR_Next}});
+ADD_CASES(TC_JSONOut,
+          {{"\"name\": \"BM_Counters_Thousands/repeats:2\",$"},
+           {"\"run_name\": \"BM_Counters_Thousands/repeats:2\",$", MR_Next},
+           {"\"run_type\": \"iteration\",$", MR_Next},
+           {"\"repetitions\": 2,$", MR_Next},
+           {"\"repetition_index\": 1,$", MR_Next},
+           {"\"threads\": 1,$", MR_Next},
+           {"\"iterations\": %int,$", MR_Next},
+           {"\"real_time\": %float,$", MR_Next},
+           {"\"cpu_time\": %float,$", MR_Next},
+           {"\"time_unit\": \"ns\",$", MR_Next},
+           {"\"t0_1000000DefaultBase\": 1\\.(0)*e\\+(0)*6,$", MR_Next},
+           {"\"t1_1000000Base1000\": 1\\.(0)*e\\+(0)*6,$", MR_Next},
+           {"\"t2_1000000Base1024\": 1\\.(0)*e\\+(0)*6,$", MR_Next},
+           {"\"t3_1048576Base1000\": 1\\.048576(0)*e\\+(0)*6,$", MR_Next},
+           {"\"t4_1048576Base1024\": 1\\.048576(0)*e\\+(0)*6$", MR_Next},
+           {"}", MR_Next}});
+ADD_CASES(TC_JSONOut,
+          {{"\"name\": \"BM_Counters_Thousands/repeats:2_mean\",$"},
+           {"\"run_name\": \"BM_Counters_Thousands/repeats:2\",$", MR_Next},
+           {"\"run_type\": \"aggregate\",$", MR_Next},
+           {"\"repetitions\": 2,$", MR_Next},
+           {"\"threads\": 1,$", MR_Next},
+           {"\"aggregate_name\": \"mean\",$", MR_Next},
+           {"\"iterations\": 2,$", MR_Next},
+           {"\"real_time\": %float,$", MR_Next},
+           {"\"cpu_time\": %float,$", MR_Next},
+           {"\"time_unit\": \"ns\",$", MR_Next},
+           {"\"t0_1000000DefaultBase\": 1\\.(0)*e\\+(0)*6,$", MR_Next},
+           {"\"t1_1000000Base1000\": 1\\.(0)*e\\+(0)*6,$", MR_Next},
+           {"\"t2_1000000Base1024\": 1\\.(0)*e\\+(0)*6,$", MR_Next},
+           {"\"t3_1048576Base1000\": 1\\.048576(0)*e\\+(0)*6,$", MR_Next},
+           {"\"t4_1048576Base1024\": 1\\.048576(0)*e\\+(0)*6$", MR_Next},
+           {"}", MR_Next}});
+ADD_CASES(TC_JSONOut,
+          {{"\"name\": \"BM_Counters_Thousands/repeats:2_median\",$"},
+           {"\"run_name\": \"BM_Counters_Thousands/repeats:2\",$", MR_Next},
+           {"\"run_type\": \"aggregate\",$", MR_Next},
+           {"\"repetitions\": 2,$", MR_Next},
+           {"\"threads\": 1,$", MR_Next},
+           {"\"aggregate_name\": \"median\",$", MR_Next},
+           {"\"iterations\": 2,$", MR_Next},
+           {"\"real_time\": %float,$", MR_Next},
+           {"\"cpu_time\": %float,$", MR_Next},
+           {"\"time_unit\": \"ns\",$", MR_Next},
+           {"\"t0_1000000DefaultBase\": 1\\.(0)*e\\+(0)*6,$", MR_Next},
+           {"\"t1_1000000Base1000\": 1\\.(0)*e\\+(0)*6,$", MR_Next},
+           {"\"t2_1000000Base1024\": 1\\.(0)*e\\+(0)*6,$", MR_Next},
+           {"\"t3_1048576Base1000\": 1\\.048576(0)*e\\+(0)*6,$", MR_Next},
+           {"\"t4_1048576Base1024\": 1\\.048576(0)*e\\+(0)*6$", MR_Next},
+           {"}", MR_Next}});
+ADD_CASES(TC_JSONOut,
+          {{"\"name\": \"BM_Counters_Thousands/repeats:2_stddev\",$"},
+           {"\"run_name\": \"BM_Counters_Thousands/repeats:2\",$", MR_Next},
+           {"\"run_type\": \"aggregate\",$", MR_Next},
+           {"\"repetitions\": 2,$", MR_Next},
+           {"\"threads\": 1,$", MR_Next},
+           {"\"aggregate_name\": \"stddev\",$", MR_Next},
+           {"\"iterations\": 2,$", MR_Next},
+           {"\"real_time\": %float,$", MR_Next},
+           {"\"cpu_time\": %float,$", MR_Next},
+           {"\"time_unit\": \"ns\",$", MR_Next},
+           {"\"t0_1000000DefaultBase\": 0\\.(0)*e\\+(0)*,$", MR_Next},
+           {"\"t1_1000000Base1000\": 0\\.(0)*e\\+(0)*,$", MR_Next},
+           {"\"t2_1000000Base1024\": 0\\.(0)*e\\+(0)*,$", MR_Next},
+           {"\"t3_1048576Base1000\": 0\\.(0)*e\\+(0)*,$", MR_Next},
+           {"\"t4_1048576Base1024\": 0\\.(0)*e\\+(0)*$", MR_Next},
+           {"}", MR_Next}});
+
+ADD_CASES(
+    TC_CSVOut,
+    {{"^\"BM_Counters_Thousands/"
+      "repeats:2\",%csv_report,1e\\+(0)*6,1e\\+(0)*6,1e\\+(0)*6,1\\.04858e\\+("
+      "0)*6,1\\.04858e\\+(0)*6$"},
+     {"^\"BM_Counters_Thousands/"
+      "repeats:2\",%csv_report,1e\\+(0)*6,1e\\+(0)*6,1e\\+(0)*6,1\\.04858e\\+("
+      "0)*6,1\\.04858e\\+(0)*6$"},
+     {"^\"BM_Counters_Thousands/"
+      "repeats:2_mean\",%csv_report,1e\\+(0)*6,1e\\+(0)*6,1e\\+(0)*6,1\\."
+      "04858e\\+(0)*6,1\\.04858e\\+(0)*6$"},
+     {"^\"BM_Counters_Thousands/"
+      "repeats:2_median\",%csv_report,1e\\+(0)*6,1e\\+(0)*6,1e\\+(0)*6,1\\."
+      "04858e\\+(0)*6,1\\.04858e\\+(0)*6$"},
+     {"^\"BM_Counters_Thousands/repeats:2_stddev\",%csv_report,0,0,0,0,0$"}});
+// VS2013 does not allow this function to be passed as a lambda argument
+// to CHECK_BENCHMARK_RESULTS()
+void CheckThousands(Results const& e) {
+  if (e.name != "BM_Counters_Thousands/repeats:2")
+    return;  // Do not check the aggregates!
+
+  // check that the values are within 0.01% of the expected values
+  CHECK_FLOAT_COUNTER_VALUE(e, "t0_1000000DefaultBase", EQ, 1000 * 1000,
+                            0.0001);
+  CHECK_FLOAT_COUNTER_VALUE(e, "t1_1000000Base1000", EQ, 1000 * 1000, 0.0001);
+  CHECK_FLOAT_COUNTER_VALUE(e, "t2_1000000Base1024", EQ, 1000 * 1000, 0.0001);
+  CHECK_FLOAT_COUNTER_VALUE(e, "t3_1048576Base1000", EQ, 1024 * 1024, 0.0001);
+  CHECK_FLOAT_COUNTER_VALUE(e, "t4_1048576Base1024", EQ, 1024 * 1024, 0.0001);
+}
+CHECK_BENCHMARK_RESULTS("BM_Counters_Thousands", &CheckThousands);
+
+// ========================================================================= //
+// --------------------------- TEST CASES END ------------------------------ //
+// ========================================================================= //
+
+int main(int argc, char* argv[]) { RunOutputTests(argc, argv); }
diff --git a/src/third_party/google_benchmark/tools/compare.py b/src/third_party/google_benchmark/tools/compare.py
new file mode 100755
index 0000000..539ace6
--- /dev/null
+++ b/src/third_party/google_benchmark/tools/compare.py
@@ -0,0 +1,408 @@
+#!/usr/bin/env python
+
+import unittest
+"""
+compare.py - versatile benchmark output compare tool
+"""
+
+import argparse
+from argparse import ArgumentParser
+import sys
+import gbench
+from gbench import util, report
+from gbench.util import *
+
+
+def check_inputs(in1, in2, flags):
+    """
+    Perform checking on the user provided inputs and diagnose any abnormalities
+    """
+    in1_kind, in1_err = classify_input_file(in1)
+    in2_kind, in2_err = classify_input_file(in2)
+    output_file = find_benchmark_flag('--benchmark_out=', flags)
+    output_type = find_benchmark_flag('--benchmark_out_format=', flags)
+    if in1_kind == IT_Executable and in2_kind == IT_Executable and output_file:
+        print(("WARNING: '--benchmark_out=%s' will be passed to both "
+               "benchmarks causing it to be overwritten") % output_file)
+    if in1_kind == IT_JSON and in2_kind == IT_JSON and len(flags) > 0:
+        print("WARNING: passing optional flags has no effect since both "
+              "inputs are JSON")
+    if output_type is not None and output_type != 'json':
+        print(("ERROR: passing '--benchmark_out_format=%s' to 'compare.py`"
+               " is not supported.") % output_type)
+        sys.exit(1)
+
+
+def create_parser():
+    parser = ArgumentParser(
+        description='versatile benchmark output compare tool')
+
+    parser.add_argument(
+        '-a',
+        '--display_aggregates_only',
+        dest='display_aggregates_only',
+        action="store_true",
+        help="If there are repetitions, by default, we display everything - the"
+             " actual runs, and the aggregates computed. Sometimes, it is "
+             "desirable to only view the aggregates. E.g. when there are a lot "
+             "of repetitions. Do note that only the display is affected. "
+             "Internally, all the actual runs are still used, e.g. for U test.")
+
+    utest = parser.add_argument_group()
+    utest.add_argument(
+        '--no-utest',
+        dest='utest',
+        default=True,
+        action="store_false",
+        help="The tool can do a two-tailed Mann-Whitney U test with the null hypothesis that it is equally likely that a randomly selected value from one sample will be less than or greater than a randomly selected value from a second sample.\nWARNING: requires **LARGE** (no less than {}) number of repetitions to be meaningful!\nThe test is being done by default, if at least {} repetitions were done.\nThis option can disable the U Test.".format(report.UTEST_OPTIMAL_REPETITIONS, report.UTEST_MIN_REPETITIONS))
+    alpha_default = 0.05
+    utest.add_argument(
+        "--alpha",
+        dest='utest_alpha',
+        default=alpha_default,
+        type=float,
+        help=("significance level alpha. if the calculated p-value is below this value, then the result is said to be statistically significant and the null hypothesis is rejected.\n(default: %0.4f)") %
+        alpha_default)
+
+    subparsers = parser.add_subparsers(
+        help='This tool has multiple modes of operation:',
+        dest='mode')
+
+    parser_a = subparsers.add_parser(
+        'benchmarks',
+        help='The most simple use-case, compare all the output of these two benchmarks')
+    baseline = parser_a.add_argument_group(
+        'baseline', 'The benchmark baseline')
+    baseline.add_argument(
+        'test_baseline',
+        metavar='test_baseline',
+        type=argparse.FileType('r'),
+        nargs=1,
+        help='A benchmark executable or JSON output file')
+    contender = parser_a.add_argument_group(
+        'contender', 'The benchmark that will be compared against the baseline')
+    contender.add_argument(
+        'test_contender',
+        metavar='test_contender',
+        type=argparse.FileType('r'),
+        nargs=1,
+        help='A benchmark executable or JSON output file')
+    parser_a.add_argument(
+        'benchmark_options',
+        metavar='benchmark_options',
+        nargs=argparse.REMAINDER,
+        help='Arguments to pass when running benchmark executables')
+
+    parser_b = subparsers.add_parser(
+        'filters', help='Compare filter one with the filter two of benchmark')
+    baseline = parser_b.add_argument_group(
+        'baseline', 'The benchmark baseline')
+    baseline.add_argument(
+        'test',
+        metavar='test',
+        type=argparse.FileType('r'),
+        nargs=1,
+        help='A benchmark executable or JSON output file')
+    baseline.add_argument(
+        'filter_baseline',
+        metavar='filter_baseline',
+        type=str,
+        nargs=1,
+        help='The first filter, that will be used as baseline')
+    contender = parser_b.add_argument_group(
+        'contender', 'The benchmark that will be compared against the baseline')
+    contender.add_argument(
+        'filter_contender',
+        metavar='filter_contender',
+        type=str,
+        nargs=1,
+        help='The second filter, that will be compared against the baseline')
+    parser_b.add_argument(
+        'benchmark_options',
+        metavar='benchmark_options',
+        nargs=argparse.REMAINDER,
+        help='Arguments to pass when running benchmark executables')
+
+    parser_c = subparsers.add_parser(
+        'benchmarksfiltered',
+        help='Compare filter one of first benchmark with filter two of the second benchmark')
+    baseline = parser_c.add_argument_group(
+        'baseline', 'The benchmark baseline')
+    baseline.add_argument(
+        'test_baseline',
+        metavar='test_baseline',
+        type=argparse.FileType('r'),
+        nargs=1,
+        help='A benchmark executable or JSON output file')
+    baseline.add_argument(
+        'filter_baseline',
+        metavar='filter_baseline',
+        type=str,
+        nargs=1,
+        help='The first filter, that will be used as baseline')
+    contender = parser_c.add_argument_group(
+        'contender', 'The benchmark that will be compared against the baseline')
+    contender.add_argument(
+        'test_contender',
+        metavar='test_contender',
+        type=argparse.FileType('r'),
+        nargs=1,
+        help='The second benchmark executable or JSON output file, that will be compared against the baseline')
+    contender.add_argument(
+        'filter_contender',
+        metavar='filter_contender',
+        type=str,
+        nargs=1,
+        help='The second filter, that will be compared against the baseline')
+    parser_c.add_argument(
+        'benchmark_options',
+        metavar='benchmark_options',
+        nargs=argparse.REMAINDER,
+        help='Arguments to pass when running benchmark executables')
+
+    return parser
+
+
+def main():
+    # Parse the command line flags
+    parser = create_parser()
+    args, unknown_args = parser.parse_known_args()
+    if args.mode is None:
+        parser.print_help()
+        exit(1)
+    assert not unknown_args
+    benchmark_options = args.benchmark_options
+
+    if args.mode == 'benchmarks':
+        test_baseline = args.test_baseline[0].name
+        test_contender = args.test_contender[0].name
+        filter_baseline = ''
+        filter_contender = ''
+
+        # NOTE: if test_baseline == test_contender, you are analyzing the stdev
+
+        description = 'Comparing %s to %s' % (test_baseline, test_contender)
+    elif args.mode == 'filters':
+        test_baseline = args.test[0].name
+        test_contender = args.test[0].name
+        filter_baseline = args.filter_baseline[0]
+        filter_contender = args.filter_contender[0]
+
+        # NOTE: if filter_baseline == filter_contender, you are analyzing the
+        # stdev
+
+        description = 'Comparing %s to %s (from %s)' % (
+            filter_baseline, filter_contender, args.test[0].name)
+    elif args.mode == 'benchmarksfiltered':
+        test_baseline = args.test_baseline[0].name
+        test_contender = args.test_contender[0].name
+        filter_baseline = args.filter_baseline[0]
+        filter_contender = args.filter_contender[0]
+
+        # NOTE: if test_baseline == test_contender and
+        # filter_baseline == filter_contender, you are analyzing the stdev
+
+        description = 'Comparing %s (from %s) to %s (from %s)' % (
+            filter_baseline, test_baseline, filter_contender, test_contender)
+    else:
+        # should never happen
+        print("Unrecognized mode of operation: '%s'" % args.mode)
+        parser.print_help()
+        exit(1)
+
+    check_inputs(test_baseline, test_contender, benchmark_options)
+
+    if args.display_aggregates_only:
+        benchmark_options += ['--benchmark_display_aggregates_only=true']
+
+    options_baseline = []
+    options_contender = []
+
+    if filter_baseline and filter_contender:
+        options_baseline = ['--benchmark_filter=%s' % filter_baseline]
+        options_contender = ['--benchmark_filter=%s' % filter_contender]
+
+    # Run the benchmarks and report the results
+    json1 = json1_orig = gbench.util.run_or_load_benchmark(
+        test_baseline, benchmark_options + options_baseline)
+    json2 = json2_orig = gbench.util.run_or_load_benchmark(
+        test_contender, benchmark_options + options_contender)
+
+    # Now, filter the benchmarks so that the difference report can work
+    if filter_baseline and filter_contender:
+        replacement = '[%s vs. %s]' % (filter_baseline, filter_contender)
+        json1 = gbench.report.filter_benchmark(
+            json1_orig, filter_baseline, replacement)
+        json2 = gbench.report.filter_benchmark(
+            json2_orig, filter_contender, replacement)
+
+    # Diff and output
+    output_lines = gbench.report.generate_difference_report(
+        json1, json2, args.display_aggregates_only,
+        args.utest, args.utest_alpha)
+    print(description)
+    for ln in output_lines:
+        print(ln)
+
+
+class TestParser(unittest.TestCase):
+    def setUp(self):
+        self.parser = create_parser()
+        testInputs = os.path.join(
+            os.path.dirname(
+                os.path.realpath(__file__)),
+            'gbench',
+            'Inputs')
+        self.testInput0 = os.path.join(testInputs, 'test1_run1.json')
+        self.testInput1 = os.path.join(testInputs, 'test1_run2.json')
+
+    def test_benchmarks_basic(self):
+        parsed = self.parser.parse_args(
+            ['benchmarks', self.testInput0, self.testInput1])
+        self.assertFalse(parsed.display_aggregates_only)
+        self.assertTrue(parsed.utest)
+        self.assertEqual(parsed.mode, 'benchmarks')
+        self.assertEqual(parsed.test_baseline[0].name, self.testInput0)
+        self.assertEqual(parsed.test_contender[0].name, self.testInput1)
+        self.assertFalse(parsed.benchmark_options)
+
+    def test_benchmarks_basic_without_utest(self):
+        parsed = self.parser.parse_args(
+            ['--no-utest', 'benchmarks', self.testInput0, self.testInput1])
+        self.assertFalse(parsed.display_aggregates_only)
+        self.assertFalse(parsed.utest)
+        self.assertEqual(parsed.utest_alpha, 0.05)
+        self.assertEqual(parsed.mode, 'benchmarks')
+        self.assertEqual(parsed.test_baseline[0].name, self.testInput0)
+        self.assertEqual(parsed.test_contender[0].name, self.testInput1)
+        self.assertFalse(parsed.benchmark_options)
+
+    def test_benchmarks_basic_display_aggregates_only(self):
+        parsed = self.parser.parse_args(
+            ['-a', 'benchmarks', self.testInput0, self.testInput1])
+        self.assertTrue(parsed.display_aggregates_only)
+        self.assertTrue(parsed.utest)
+        self.assertEqual(parsed.mode, 'benchmarks')
+        self.assertEqual(parsed.test_baseline[0].name, self.testInput0)
+        self.assertEqual(parsed.test_contender[0].name, self.testInput1)
+        self.assertFalse(parsed.benchmark_options)
+
+    def test_benchmarks_basic_with_utest_alpha(self):
+        parsed = self.parser.parse_args(
+            ['--alpha=0.314', 'benchmarks', self.testInput0, self.testInput1])
+        self.assertFalse(parsed.display_aggregates_only)
+        self.assertTrue(parsed.utest)
+        self.assertEqual(parsed.utest_alpha, 0.314)
+        self.assertEqual(parsed.mode, 'benchmarks')
+        self.assertEqual(parsed.test_baseline[0].name, self.testInput0)
+        self.assertEqual(parsed.test_contender[0].name, self.testInput1)
+        self.assertFalse(parsed.benchmark_options)
+
+    def test_benchmarks_basic_without_utest_with_utest_alpha(self):
+        parsed = self.parser.parse_args(
+            ['--no-utest', '--alpha=0.314', 'benchmarks', self.testInput0, self.testInput1])
+        self.assertFalse(parsed.display_aggregates_only)
+        self.assertFalse(parsed.utest)
+        self.assertEqual(parsed.utest_alpha, 0.314)
+        self.assertEqual(parsed.mode, 'benchmarks')
+        self.assertEqual(parsed.test_baseline[0].name, self.testInput0)
+        self.assertEqual(parsed.test_contender[0].name, self.testInput1)
+        self.assertFalse(parsed.benchmark_options)
+
+    def test_benchmarks_with_remainder(self):
+        parsed = self.parser.parse_args(
+            ['benchmarks', self.testInput0, self.testInput1, 'd'])
+        self.assertFalse(parsed.display_aggregates_only)
+        self.assertTrue(parsed.utest)
+        self.assertEqual(parsed.mode, 'benchmarks')
+        self.assertEqual(parsed.test_baseline[0].name, self.testInput0)
+        self.assertEqual(parsed.test_contender[0].name, self.testInput1)
+        self.assertEqual(parsed.benchmark_options, ['d'])
+
+    def test_benchmarks_with_remainder_after_doubleminus(self):
+        parsed = self.parser.parse_args(
+            ['benchmarks', self.testInput0, self.testInput1, '--', 'e'])
+        self.assertFalse(parsed.display_aggregates_only)
+        self.assertTrue(parsed.utest)
+        self.assertEqual(parsed.mode, 'benchmarks')
+        self.assertEqual(parsed.test_baseline[0].name, self.testInput0)
+        self.assertEqual(parsed.test_contender[0].name, self.testInput1)
+        self.assertEqual(parsed.benchmark_options, ['e'])
+
+    def test_filters_basic(self):
+        parsed = self.parser.parse_args(
+            ['filters', self.testInput0, 'c', 'd'])
+        self.assertFalse(parsed.display_aggregates_only)
+        self.assertTrue(parsed.utest)
+        self.assertEqual(parsed.mode, 'filters')
+        self.assertEqual(parsed.test[0].name, self.testInput0)
+        self.assertEqual(parsed.filter_baseline[0], 'c')
+        self.assertEqual(parsed.filter_contender[0], 'd')
+        self.assertFalse(parsed.benchmark_options)
+
+    def test_filters_with_remainder(self):
+        parsed = self.parser.parse_args(
+            ['filters', self.testInput0, 'c', 'd', 'e'])
+        self.assertFalse(parsed.display_aggregates_only)
+        self.assertTrue(parsed.utest)
+        self.assertEqual(parsed.mode, 'filters')
+        self.assertEqual(parsed.test[0].name, self.testInput0)
+        self.assertEqual(parsed.filter_baseline[0], 'c')
+        self.assertEqual(parsed.filter_contender[0], 'd')
+        self.assertEqual(parsed.benchmark_options, ['e'])
+
+    def test_filters_with_remainder_after_doubleminus(self):
+        parsed = self.parser.parse_args(
+            ['filters', self.testInput0, 'c', 'd', '--', 'f'])
+        self.assertFalse(parsed.display_aggregates_only)
+        self.assertTrue(parsed.utest)
+        self.assertEqual(parsed.mode, 'filters')
+        self.assertEqual(parsed.test[0].name, self.testInput0)
+        self.assertEqual(parsed.filter_baseline[0], 'c')
+        self.assertEqual(parsed.filter_contender[0], 'd')
+        self.assertEqual(parsed.benchmark_options, ['f'])
+
+    def test_benchmarksfiltered_basic(self):
+        parsed = self.parser.parse_args(
+            ['benchmarksfiltered', self.testInput0, 'c', self.testInput1, 'e'])
+        self.assertFalse(parsed.display_aggregates_only)
+        self.assertTrue(parsed.utest)
+        self.assertEqual(parsed.mode, 'benchmarksfiltered')
+        self.assertEqual(parsed.test_baseline[0].name, self.testInput0)
+        self.assertEqual(parsed.filter_baseline[0], 'c')
+        self.assertEqual(parsed.test_contender[0].name, self.testInput1)
+        self.assertEqual(parsed.filter_contender[0], 'e')
+        self.assertFalse(parsed.benchmark_options)
+
+    def test_benchmarksfiltered_with_remainder(self):
+        parsed = self.parser.parse_args(
+            ['benchmarksfiltered', self.testInput0, 'c', self.testInput1, 'e', 'f'])
+        self.assertFalse(parsed.display_aggregates_only)
+        self.assertTrue(parsed.utest)
+        self.assertEqual(parsed.mode, 'benchmarksfiltered')
+        self.assertEqual(parsed.test_baseline[0].name, self.testInput0)
+        self.assertEqual(parsed.filter_baseline[0], 'c')
+        self.assertEqual(parsed.test_contender[0].name, self.testInput1)
+        self.assertEqual(parsed.filter_contender[0], 'e')
+        self.assertEqual(parsed.benchmark_options[0], 'f')
+
+    def test_benchmarksfiltered_with_remainder_after_doubleminus(self):
+        parsed = self.parser.parse_args(
+            ['benchmarksfiltered', self.testInput0, 'c', self.testInput1, 'e', '--', 'g'])
+        self.assertFalse(parsed.display_aggregates_only)
+        self.assertTrue(parsed.utest)
+        self.assertEqual(parsed.mode, 'benchmarksfiltered')
+        self.assertEqual(parsed.test_baseline[0].name, self.testInput0)
+        self.assertEqual(parsed.filter_baseline[0], 'c')
+        self.assertEqual(parsed.test_contender[0].name, self.testInput1)
+        self.assertEqual(parsed.filter_contender[0], 'e')
+        self.assertEqual(parsed.benchmark_options[0], 'g')
+
+
+if __name__ == '__main__':
+    # unittest.main()
+    main()
+
+# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4
+# kate: tab-width: 4; replace-tabs on; indent-width 4; tab-indents: off;
+# kate: indent-mode python; remove-trailing-spaces modified;
diff --git a/src/third_party/google_benchmark/tools/gbench/Inputs/test1_run1.json b/src/third_party/google_benchmark/tools/gbench/Inputs/test1_run1.json
new file mode 100644
index 0000000..601e327
--- /dev/null
+++ b/src/third_party/google_benchmark/tools/gbench/Inputs/test1_run1.json
@@ -0,0 +1,119 @@
+{
+  "context": {
+    "date": "2016-08-02 17:44:46",
+    "num_cpus": 4,
+    "mhz_per_cpu": 4228,
+    "cpu_scaling_enabled": false,
+    "library_build_type": "release"
+  },
+  "benchmarks": [
+    {
+      "name": "BM_SameTimes",
+      "iterations": 1000,
+      "real_time": 10,
+      "cpu_time": 10,
+      "time_unit": "ns"
+    },
+    {
+      "name": "BM_2xFaster",
+      "iterations": 1000,
+      "real_time": 50,
+      "cpu_time": 50,
+      "time_unit": "ns"
+    },
+    {
+      "name": "BM_2xSlower",
+      "iterations": 1000,
+      "real_time": 50,
+      "cpu_time": 50,
+      "time_unit": "ns"
+    },
+    {
+      "name": "BM_1PercentFaster",
+      "iterations": 1000,
+      "real_time": 100,
+      "cpu_time": 100,
+      "time_unit": "ns"
+    },
+    {
+      "name": "BM_1PercentSlower",
+      "iterations": 1000,
+      "real_time": 100,
+      "cpu_time": 100,
+      "time_unit": "ns"
+    },
+    {
+      "name": "BM_10PercentFaster",
+      "iterations": 1000,
+      "real_time": 100,
+      "cpu_time": 100,
+      "time_unit": "ns"
+    },
+    {
+      "name": "BM_10PercentSlower",
+      "iterations": 1000,
+      "real_time": 100,
+      "cpu_time": 100,
+      "time_unit": "ns"
+    },
+    {
+      "name": "BM_100xSlower",
+      "iterations": 1000,
+      "real_time": 100,
+      "cpu_time": 100,
+      "time_unit": "ns"
+    },
+    {
+      "name": "BM_100xFaster",
+      "iterations": 1000,
+      "real_time": 10000,
+      "cpu_time": 10000,
+      "time_unit": "ns"
+    },
+    {
+      "name": "BM_10PercentCPUToTime",
+      "iterations": 1000,
+      "real_time": 100,
+      "cpu_time": 100,
+      "time_unit": "ns"
+    },
+    {
+      "name": "BM_ThirdFaster",
+      "iterations": 1000,
+      "real_time": 100,
+      "cpu_time": 100,
+      "time_unit": "ns"
+    },
+    {
+      "name": "MyComplexityTest_BigO",
+      "run_name": "MyComplexityTest",
+      "run_type": "aggregate",
+      "aggregate_name": "BigO",
+      "cpu_coefficient": 4.2749856294592886e+00,
+      "real_coefficient": 6.4789275289789780e+00,
+      "big_o": "N",
+      "time_unit": "ns"
+    },
+    {
+      "name": "MyComplexityTest_RMS",
+      "run_name": "MyComplexityTest",
+      "run_type": "aggregate",
+      "aggregate_name": "RMS",
+      "rms": 4.5097802512472874e-03
+    },
+    {
+      "name": "BM_NotBadTimeUnit",
+      "iterations": 1000,
+      "real_time": 0.4,
+      "cpu_time": 0.5,
+      "time_unit": "s"
+    },
+    {
+      "name": "BM_DifferentTimeUnit",
+      "iterations": 1,
+      "real_time": 1,
+      "cpu_time": 1,
+      "time_unit": "s"
+    }
+  ]
+}
diff --git a/src/third_party/google_benchmark/tools/gbench/Inputs/test1_run2.json b/src/third_party/google_benchmark/tools/gbench/Inputs/test1_run2.json
new file mode 100644
index 0000000..3cbcf39
--- /dev/null
+++ b/src/third_party/google_benchmark/tools/gbench/Inputs/test1_run2.json
@@ -0,0 +1,119 @@
+{
+  "context": {
+    "date": "2016-08-02 17:44:46",
+    "num_cpus": 4,
+    "mhz_per_cpu": 4228,
+    "cpu_scaling_enabled": false,
+    "library_build_type": "release"
+  },
+  "benchmarks": [
+    {
+      "name": "BM_SameTimes",
+      "iterations": 1000,
+      "real_time": 10,
+      "cpu_time": 10,
+      "time_unit": "ns"
+    },
+    {
+      "name": "BM_2xFaster",
+      "iterations": 1000,
+      "real_time": 25,
+      "cpu_time": 25,
+      "time_unit": "ns"
+    },
+    {
+      "name": "BM_2xSlower",
+      "iterations": 20833333,
+      "real_time": 100,
+      "cpu_time": 100,
+      "time_unit": "ns"
+    },
+    {
+      "name": "BM_1PercentFaster",
+      "iterations": 1000,
+      "real_time": 98.9999999,
+      "cpu_time": 98.9999999,
+      "time_unit": "ns"
+    },
+    {
+      "name": "BM_1PercentSlower",
+      "iterations": 1000,
+      "real_time": 100.9999999,
+      "cpu_time": 100.9999999,
+      "time_unit": "ns"
+    },
+    {
+      "name": "BM_10PercentFaster",
+      "iterations": 1000,
+      "real_time": 90,
+      "cpu_time": 90,
+      "time_unit": "ns"
+    },
+    {
+      "name": "BM_10PercentSlower",
+      "iterations": 1000,
+      "real_time": 110,
+      "cpu_time": 110,
+      "time_unit": "ns"
+    },
+    {
+      "name": "BM_100xSlower",
+      "iterations": 1000,
+      "real_time": 1.0000e+04,
+      "cpu_time": 1.0000e+04,
+      "time_unit": "ns"
+    },
+    {
+      "name": "BM_100xFaster",
+      "iterations": 1000,
+      "real_time": 100,
+      "cpu_time": 100,
+      "time_unit": "ns"
+    },
+    {
+      "name": "BM_10PercentCPUToTime",
+      "iterations": 1000,
+      "real_time": 110,
+      "cpu_time": 90,
+      "time_unit": "ns"
+    },
+    {
+      "name": "BM_ThirdFaster",
+      "iterations": 1000,
+      "real_time": 66.665,
+      "cpu_time": 66.664,
+      "time_unit": "ns"
+    },
+    {
+      "name": "MyComplexityTest_BigO",
+      "run_name": "MyComplexityTest",
+      "run_type": "aggregate",
+      "aggregate_name": "BigO",
+      "cpu_coefficient": 5.6215779594361486e+00,
+      "real_coefficient": 5.6288314793554610e+00,
+      "big_o": "N",
+      "time_unit": "ns"
+    },
+    {
+      "name": "MyComplexityTest_RMS",
+      "run_name": "MyComplexityTest",
+      "run_type": "aggregate",
+      "aggregate_name": "RMS",
+      "rms": 3.3128901852342174e-03
+    },
+    {
+      "name": "BM_NotBadTimeUnit",
+      "iterations": 1000,
+      "real_time": 0.04,
+      "cpu_time": 0.6,
+      "time_unit": "s"
+    },
+    {
+      "name": "BM_DifferentTimeUnit",
+      "iterations": 1,
+      "real_time": 1,
+      "cpu_time": 1,
+      "time_unit": "ns"
+    }
+  ]
+}
diff --git a/src/third_party/google_benchmark/tools/gbench/Inputs/test2_run.json b/src/third_party/google_benchmark/tools/gbench/Inputs/test2_run.json
new file mode 100644
index 0000000..15bc698
--- /dev/null
+++ b/src/third_party/google_benchmark/tools/gbench/Inputs/test2_run.json
@@ -0,0 +1,81 @@
+{
+  "context": {
+    "date": "2016-08-02 17:44:46",
+    "num_cpus": 4,
+    "mhz_per_cpu": 4228,
+    "cpu_scaling_enabled": false,
+    "library_build_type": "release"
+  },
+  "benchmarks": [
+    {
+      "name": "BM_Hi",
+      "iterations": 1234,
+      "real_time": 42,
+      "cpu_time": 24,
+      "time_unit": "ms"
+    },
+    {
+      "name": "BM_Zero",
+      "iterations": 1000,
+      "real_time": 10,
+      "cpu_time": 10,
+      "time_unit": "ns"
+    },
+    {
+      "name": "BM_Zero/4",
+      "iterations": 4000,
+      "real_time": 40,
+      "cpu_time": 40,
+      "time_unit": "ns"
+    },
+    {
+      "name": "Prefix/BM_Zero",
+      "iterations": 2000,
+      "real_time": 20,
+      "cpu_time": 20,
+      "time_unit": "ns"
+    },
+    {
+      "name": "Prefix/BM_Zero/3",
+      "iterations": 3000,
+      "real_time": 30,
+      "cpu_time": 30,
+      "time_unit": "ns"
+    },
+    {
+      "name": "BM_One",
+      "iterations": 5000,
+      "real_time": 5,
+      "cpu_time": 5,
+      "time_unit": "ns"
+    },
+    {
+      "name": "BM_One/4",
+      "iterations": 2000,
+      "real_time": 20,
+      "cpu_time": 20,
+      "time_unit": "ns"
+    },
+    {
+      "name": "Prefix/BM_One",
+      "iterations": 1000,
+      "real_time": 10,
+      "cpu_time": 10,
+      "time_unit": "ns"
+    },
+    {
+      "name": "Prefix/BM_One/3",
+      "iterations": 1500,
+      "real_time": 15,
+      "cpu_time": 15,
+      "time_unit": "ns"
+    },
+    {
+      "name": "BM_Bye",
+      "iterations": 5321,
+      "real_time": 11,
+      "cpu_time": 63,
+      "time_unit": "ns"
+    }
+  ]
+}
diff --git a/src/third_party/google_benchmark/tools/gbench/Inputs/test3_run0.json b/src/third_party/google_benchmark/tools/gbench/Inputs/test3_run0.json
new file mode 100644
index 0000000..49f8b06
--- /dev/null
+++ b/src/third_party/google_benchmark/tools/gbench/Inputs/test3_run0.json
@@ -0,0 +1,65 @@
+{
+  "context": {
+    "date": "2016-08-02 17:44:46",
+    "num_cpus": 4,
+    "mhz_per_cpu": 4228,
+    "cpu_scaling_enabled": false,
+    "library_build_type": "release"
+  },
+  "benchmarks": [
+    {
+      "name": "BM_One",
+      "run_type": "aggregate",
+      "iterations": 1000,
+      "real_time": 10,
+      "cpu_time": 100,
+      "time_unit": "ns"
+    },
+    {
+      "name": "BM_Two",
+      "iterations": 1000,
+      "real_time": 9,
+      "cpu_time": 90,
+      "time_unit": "ns"
+    },
+    {
+      "name": "BM_Two",
+      "iterations": 1000,
+      "real_time": 8,
+      "cpu_time": 86,
+      "time_unit": "ns"
+    },
+    {
+      "name": "short",
+      "run_type": "aggregate",
+      "iterations": 1000,
+      "real_time": 8,
+      "cpu_time": 80,
+      "time_unit": "ns"
+    },
+    {
+      "name": "short",
+      "run_type": "aggregate",
+      "iterations": 1000,
+      "real_time": 8,
+      "cpu_time": 77,
+      "time_unit": "ns"
+    },
+    {
+      "name": "medium",
+      "run_type": "iteration",
+      "iterations": 1000,
+      "real_time": 8,
+      "cpu_time": 80,
+      "time_unit": "ns"
+    },
+    {
+      "name": "medium",
+      "run_type": "iteration",
+      "iterations": 1000,
+      "real_time": 9,
+      "cpu_time": 82,
+      "time_unit": "ns"
+    }
+  ]
+}
diff --git a/src/third_party/google_benchmark/tools/gbench/Inputs/test3_run1.json b/src/third_party/google_benchmark/tools/gbench/Inputs/test3_run1.json
new file mode 100644
index 0000000..acc5ba1
--- /dev/null
+++ b/src/third_party/google_benchmark/tools/gbench/Inputs/test3_run1.json
@@ -0,0 +1,65 @@
+{
+  "context": {
+    "date": "2016-08-02 17:44:46",
+    "num_cpus": 4,
+    "mhz_per_cpu": 4228,
+    "cpu_scaling_enabled": false,
+    "library_build_type": "release"
+  },
+  "benchmarks": [
+    {
+      "name": "BM_One",
+      "iterations": 1000,
+      "real_time": 9,
+      "cpu_time": 110,
+      "time_unit": "ns"
+    },
+    {
+      "name": "BM_Two",
+      "run_type": "aggregate",
+      "iterations": 1000,
+      "real_time": 10,
+      "cpu_time": 89,
+      "time_unit": "ns"
+    },
+    {
+      "name": "BM_Two",
+      "iterations": 1000,
+      "real_time": 7,
+      "cpu_time": 72,
+      "time_unit": "ns"
+    },
+    {
+      "name": "short",
+      "run_type": "aggregate",
+      "iterations": 1000,
+      "real_time": 7,
+      "cpu_time": 75,
+      "time_unit": "ns"
+    },
+    {
+      "name": "short",
+      "run_type": "aggregate",
+      "iterations": 762,
+      "real_time": 4.54,
+      "cpu_time": 66.6,
+      "time_unit": "ns"
+    },
+    {
+      "name": "short",
+      "run_type": "iteration",
+      "iterations": 1000,
+      "real_time": 800,
+      "cpu_time": 1,
+      "time_unit": "ns"
+    },
+    {
+      "name": "medium",
+      "run_type": "iteration",
+      "iterations": 1200,
+      "real_time": 5,
+      "cpu_time": 53,
+      "time_unit": "ns"
+    }
+  ]
+}
diff --git a/src/third_party/google_benchmark/tools/gbench/__init__.py b/src/third_party/google_benchmark/tools/gbench/__init__.py
new file mode 100644
index 0000000..fce1a1a
--- /dev/null
+++ b/src/third_party/google_benchmark/tools/gbench/__init__.py
@@ -0,0 +1,8 @@
+"""Google Benchmark tooling"""
+
+__author__ = 'Eric Fiselier'
+__email__ = 'eric@efcs.ca'
+__versioninfo__ = (0, 5, 0)
+__version__ = '.'.join(str(v) for v in __versioninfo__) + 'dev'
+
+__all__ = []
diff --git a/src/third_party/google_benchmark/tools/gbench/report.py b/src/third_party/google_benchmark/tools/gbench/report.py
new file mode 100644
index 0000000..5bd3a8d
--- /dev/null
+++ b/src/third_party/google_benchmark/tools/gbench/report.py
@@ -0,0 +1,541 @@
+import unittest
+"""report.py - Utilities for reporting statistics about benchmark results
+"""
+import os
+import re
+import copy
+
+from scipy.stats import mannwhitneyu
+
+
+class BenchmarkColor(object):
+    def __init__(self, name, code):
+        self.name = name
+        self.code = code
+
+    def __repr__(self):
+        return '%s%r' % (self.__class__.__name__,
+                         (self.name, self.code))
+
+    def __format__(self, format):
+        return self.code
+
+
+# Benchmark Colors Enumeration
+BC_NONE = BenchmarkColor('NONE', '')
+BC_MAGENTA = BenchmarkColor('MAGENTA', '\033[95m')
+BC_CYAN = BenchmarkColor('CYAN', '\033[96m')
+BC_OKBLUE = BenchmarkColor('OKBLUE', '\033[94m')
+BC_OKGREEN = BenchmarkColor('OKGREEN', '\033[32m')
+BC_HEADER = BenchmarkColor('HEADER', '\033[92m')
+BC_WARNING = BenchmarkColor('WARNING', '\033[93m')
+BC_WHITE = BenchmarkColor('WHITE', '\033[97m')
+BC_FAIL = BenchmarkColor('FAIL', '\033[91m')
+BC_ENDC = BenchmarkColor('ENDC', '\033[0m')
+BC_BOLD = BenchmarkColor('BOLD', '\033[1m')
+BC_UNDERLINE = BenchmarkColor('UNDERLINE', '\033[4m')
+
+UTEST_MIN_REPETITIONS = 2
+UTEST_OPTIMAL_REPETITIONS = 9  # Lowest reasonable number, More is better.
+UTEST_COL_NAME = "_pvalue"
+
+
+def color_format(use_color, fmt_str, *args, **kwargs):
+    """
+    Return the result of 'fmt_str.format(*args, **kwargs)' after transforming
+    'args' and 'kwargs' according to the value of 'use_color'. If 'use_color'
+    is False then all color codes in 'args' and 'kwargs' are replaced with
+    the empty string.
+    """
+    assert use_color is True or use_color is False
+    if not use_color:
+        args = [arg if not isinstance(arg, BenchmarkColor) else BC_NONE
+                for arg in args]
+        kwargs = {key: arg if not isinstance(arg, BenchmarkColor) else BC_NONE
+                  for key, arg in kwargs.items()}
+    return fmt_str.format(*args, **kwargs)
+
+
+def find_longest_name(benchmark_list):
+    """
+    Return the length of the longest benchmark name in a given list of
+    benchmark JSON objects
+    """
+    longest_name = 1
+    for bc in benchmark_list:
+        if len(bc['name']) > longest_name:
+            longest_name = len(bc['name'])
+    return longest_name
+
+
+def calculate_change(old_val, new_val):
+    """
+    Return a float representing the decimal change between old_val and new_val.
+    """
+    if old_val == 0 and new_val == 0:
+        return 0.0
+    if old_val == 0:
+        return float(new_val - old_val) / (float(old_val + new_val) / 2)
+    return float(new_val - old_val) / abs(old_val)
+
+
+def filter_benchmark(json_orig, family, replacement=""):
+    """
+    Apply a filter to the json, and only leave the 'family' of benchmarks.
+    """
+    regex = re.compile(family)
+    filtered = {}
+    filtered['benchmarks'] = []
+    for be in json_orig['benchmarks']:
+        if not regex.search(be['name']):
+            continue
+        filteredbench = copy.deepcopy(be)  # Do NOT modify the old name!
+        filteredbench['name'] = regex.sub(replacement, filteredbench['name'])
+        filtered['benchmarks'].append(filteredbench)
+    return filtered
+
+
+def get_unique_benchmark_names(json):
+    """
+    While *keeping* the order, give all the unique 'names' used for benchmarks.
+    """
+    seen = set()
+    uniqued = [x['name'] for x in json['benchmarks']
+               if x['name'] not in seen and
+               (seen.add(x['name']) or True)]
+    return uniqued
+
+
+def intersect(list1, list2):
+    """
+    Given two lists, get a new list consisting of the elements only contained
+    in *both of the input lists*, while preserving the ordering.
+    """
+    return [x for x in list1 if x in list2]
+
+
+def is_potentially_comparable_benchmark(x):
+    return ('time_unit' in x and 'real_time' in x and 'cpu_time' in x)
+
+
+def partition_benchmarks(json1, json2):
+    """
+    While preserving the ordering, find benchmarks with the same names in
+    both of the inputs, and group them.
+    (i.e. partition/filter into groups with common name)
+    """
+    json1_unique_names = get_unique_benchmark_names(json1)
+    json2_unique_names = get_unique_benchmark_names(json2)
+    names = intersect(json1_unique_names, json2_unique_names)
+    partitions = []
+    for name in names:
+        time_unit = None
+        # Pick the time unit from the first entry of the lhs benchmark.
+        # We should be careful not to crash with unexpected input.
+        for x in json1['benchmarks']:
+            if (x['name'] == name and is_potentially_comparable_benchmark(x)):
+                time_unit = x['time_unit']
+                break
+        if time_unit is None:
+            continue
+        # Filter by name and time unit.
+        # All the repetitions are assumed to be comparable.
+        lhs = [x for x in json1['benchmarks'] if x['name'] == name and
+               x['time_unit'] == time_unit]
+        rhs = [x for x in json2['benchmarks'] if x['name'] == name and
+               x['time_unit'] == time_unit]
+        partitions.append([lhs, rhs])
+    return partitions
+
+
+def extract_field(partition, field_name):
+    # The count of elements may be different. We want *all* of them.
+    lhs = [x[field_name] for x in partition[0]]
+    rhs = [x[field_name] for x in partition[1]]
+    return [lhs, rhs]
+
+def calc_utest(timings_cpu, timings_time):
+    min_rep_cnt = min(len(timings_time[0]),
+                      len(timings_time[1]),
+                      len(timings_cpu[0]),
+                      len(timings_cpu[1]))
+
+    # Does *everything* has at least UTEST_MIN_REPETITIONS repetitions?
+    if min_rep_cnt < UTEST_MIN_REPETITIONS:
+        return False, None, None
+
+    time_pvalue = mannwhitneyu(
+        timings_time[0], timings_time[1], alternative='two-sided').pvalue
+    cpu_pvalue = mannwhitneyu(
+        timings_cpu[0], timings_cpu[1], alternative='two-sided').pvalue
+
+    return (min_rep_cnt >= UTEST_OPTIMAL_REPETITIONS), cpu_pvalue, time_pvalue
+
+def print_utest(partition, utest_alpha, first_col_width, use_color=True):
+    def get_utest_color(pval):
+        return BC_FAIL if pval >= utest_alpha else BC_OKGREEN
+
+    timings_time = extract_field(partition, 'real_time')
+    timings_cpu = extract_field(partition, 'cpu_time')
+    have_optimal_repetitions, cpu_pvalue, time_pvalue = calc_utest(timings_cpu, timings_time)
+
+    # Check if we failed miserably with minimum required repetitions for utest
+    if not have_optimal_repetitions and cpu_pvalue is None and time_pvalue is None:
+        return []
+
+    dsc = "U Test, Repetitions: {} vs {}".format(
+        len(timings_cpu[0]), len(timings_cpu[1]))
+    dsc_color = BC_OKGREEN
+
+    # We still got some results to show but issue a warning about it.
+    if not have_optimal_repetitions:
+        dsc_color = BC_WARNING
+        dsc += ". WARNING: Results unreliable! {}+ repetitions recommended.".format(
+            UTEST_OPTIMAL_REPETITIONS)
+
+    special_str = "{}{:<{}s}{endc}{}{:16.4f}{endc}{}{:16.4f}{endc}{}      {}"
+
+    last_name = partition[0][0]['name']
+    return [color_format(use_color,
+                         special_str,
+                         BC_HEADER,
+                         "{}{}".format(last_name, UTEST_COL_NAME),
+                         first_col_width,
+                         get_utest_color(time_pvalue), time_pvalue,
+                         get_utest_color(cpu_pvalue), cpu_pvalue,
+                         dsc_color, dsc,
+                         endc=BC_ENDC)]
+
+
+def generate_difference_report(
+        json1,
+        json2,
+        display_aggregates_only=False,
+        utest=False,
+        utest_alpha=0.05,
+        use_color=True):
+    """
+    Calculate and report the difference between each test of two benchmarks
+    runs specified as 'json1' and 'json2'.
+    """
+    assert utest is True or utest is False
+    first_col_width = find_longest_name(json1['benchmarks'])
+
+    def find_test(name):
+        for b in json2['benchmarks']:
+            if b['name'] == name:
+                return b
+        return None
+
+    first_col_width = max(
+        first_col_width,
+        len('Benchmark'))
+    first_col_width += len(UTEST_COL_NAME)
+    first_line = "{:<{}s}Time             CPU      Time Old      Time New       CPU Old       CPU New".format(
+        'Benchmark', 12 + first_col_width)
+    output_strs = [first_line, '-' * len(first_line)]
+
+    partitions = partition_benchmarks(json1, json2)
+    for partition in partitions:
+        # Careful, we may have different repetition count.
+        for i in range(min(len(partition[0]), len(partition[1]))):
+            bn = partition[0][i]
+            other_bench = partition[1][i]
+
+            # *If* we were asked to only display aggregates,
+            # and if it is non-aggregate, then skip it.
+            if display_aggregates_only and 'run_type' in bn and 'run_type' in other_bench:
+                assert bn['run_type'] == other_bench['run_type']
+                if bn['run_type'] != 'aggregate':
+                    continue
+
+            fmt_str = "{}{:<{}s}{endc}{}{:+16.4f}{endc}{}{:+16.4f}{endc}{:14.0f}{:14.0f}{endc}{:14.0f}{:14.0f}"
+
+            def get_color(res):
+                if res > 0.05:
+                    return BC_FAIL
+                elif res > -0.07:
+                    return BC_WHITE
+                else:
+                    return BC_CYAN
+
+            tres = calculate_change(bn['real_time'], other_bench['real_time'])
+            cpures = calculate_change(bn['cpu_time'], other_bench['cpu_time'])
+            output_strs += [color_format(use_color,
+                                         fmt_str,
+                                         BC_HEADER,
+                                         bn['name'],
+                                         first_col_width,
+                                         get_color(tres),
+                                         tres,
+                                         get_color(cpures),
+                                         cpures,
+                                         bn['real_time'],
+                                         other_bench['real_time'],
+                                         bn['cpu_time'],
+                                         other_bench['cpu_time'],
+                                         endc=BC_ENDC)]
+
+        # After processing the whole partition, if requested, do the U test.
+        if utest:
+            output_strs += print_utest(partition,
+                                       utest_alpha=utest_alpha,
+                                       first_col_width=first_col_width,
+                                       use_color=use_color)
+
+    return output_strs
+
+
+###############################################################################
+# Unit tests
+
+
+class TestGetUniqueBenchmarkNames(unittest.TestCase):
+    def load_results(self):
+        import json
+        testInputs = os.path.join(
+            os.path.dirname(
+                os.path.realpath(__file__)),
+            'Inputs')
+        testOutput = os.path.join(testInputs, 'test3_run0.json')
+        with open(testOutput, 'r') as f:
+            json = json.load(f)
+        return json
+
+    def test_basic(self):
+        expect_lines = [
+            'BM_One',
+            'BM_Two',
+            'short',  # These two are not sorted
+            'medium',  # These two are not sorted
+        ]
+        json = self.load_results()
+        output_lines = get_unique_benchmark_names(json)
+        print("\n")
+        print("\n".join(output_lines))
+        self.assertEqual(len(output_lines), len(expect_lines))
+        for i in range(0, len(output_lines)):
+            self.assertEqual(expect_lines[i], output_lines[i])
+
+
+class TestReportDifference(unittest.TestCase):
+    def load_results(self):
+        import json
+        testInputs = os.path.join(
+            os.path.dirname(
+                os.path.realpath(__file__)),
+            'Inputs')
+        testOutput1 = os.path.join(testInputs, 'test1_run1.json')
+        testOutput2 = os.path.join(testInputs, 'test1_run2.json')
+        with open(testOutput1, 'r') as f:
+            json1 = json.load(f)
+        with open(testOutput2, 'r') as f:
+            json2 = json.load(f)
+        return json1, json2
+
+    def test_basic(self):
+        expect_lines = [
+            ['BM_SameTimes', '+0.0000', '+0.0000', '10', '10', '10', '10'],
+            ['BM_2xFaster', '-0.5000', '-0.5000', '50', '25', '50', '25'],
+            ['BM_2xSlower', '+1.0000', '+1.0000', '50', '100', '50', '100'],
+            ['BM_1PercentFaster', '-0.0100', '-0.0100', '100', '99', '100', '99'],
+            ['BM_1PercentSlower', '+0.0100', '+0.0100', '100', '101', '100', '101'],
+            ['BM_10PercentFaster', '-0.1000', '-0.1000', '100', '90', '100', '90'],
+            ['BM_10PercentSlower', '+0.1000', '+0.1000', '100', '110', '100', '110'],
+            ['BM_100xSlower', '+99.0000', '+99.0000',
+                '100', '10000', '100', '10000'],
+            ['BM_100xFaster', '-0.9900', '-0.9900',
+                '10000', '100', '10000', '100'],
+            ['BM_10PercentCPUToTime', '+0.1000',
+                '-0.1000', '100', '110', '100', '90'],
+            ['BM_ThirdFaster', '-0.3333', '-0.3334', '100', '67', '100', '67'],
+            ['BM_NotBadTimeUnit', '-0.9000', '+0.2000', '0', '0', '0', '1'],
+        ]
+        json1, json2 = self.load_results()
+        output_lines_with_header = generate_difference_report(
+            json1, json2, use_color=False)
+        output_lines = output_lines_with_header[2:]
+        print("\n")
+        print("\n".join(output_lines_with_header))
+        self.assertEqual(len(output_lines), len(expect_lines))
+        for i in range(0, len(output_lines)):
+            parts = [x for x in output_lines[i].split(' ') if x]
+            self.assertEqual(len(parts), 7)
+            self.assertEqual(expect_lines[i], parts)
+
+
+class TestReportDifferenceBetweenFamilies(unittest.TestCase):
+    def load_result(self):
+        import json
+        testInputs = os.path.join(
+            os.path.dirname(
+                os.path.realpath(__file__)),
+            'Inputs')
+        testOutput = os.path.join(testInputs, 'test2_run.json')
+        with open(testOutput, 'r') as f:
+            json = json.load(f)
+        return json
+
+    def test_basic(self):
+        expect_lines = [
+            ['.', '-0.5000', '-0.5000', '10', '5', '10', '5'],
+            ['./4', '-0.5000', '-0.5000', '40', '20', '40', '20'],
+            ['Prefix/.', '-0.5000', '-0.5000', '20', '10', '20', '10'],
+            ['Prefix/./3', '-0.5000', '-0.5000', '30', '15', '30', '15'],
+        ]
+        json = self.load_result()
+        json1 = filter_benchmark(json, "BM_Z.ro", ".")
+        json2 = filter_benchmark(json, "BM_O.e", ".")
+        output_lines_with_header = generate_difference_report(
+            json1, json2, use_color=False)
+        output_lines = output_lines_with_header[2:]
+        print("\n")
+        print("\n".join(output_lines_with_header))
+        self.assertEqual(len(output_lines), len(expect_lines))
+        for i in range(0, len(output_lines)):
+            parts = [x for x in output_lines[i].split(' ') if x]
+            self.assertEqual(len(parts), 7)
+            self.assertEqual(expect_lines[i], parts)
+
+
+class TestReportDifferenceWithUTest(unittest.TestCase):
+    def load_results(self):
+        import json
+        testInputs = os.path.join(
+            os.path.dirname(
+                os.path.realpath(__file__)),
+            'Inputs')
+        testOutput1 = os.path.join(testInputs, 'test3_run0.json')
+        testOutput2 = os.path.join(testInputs, 'test3_run1.json')
+        with open(testOutput1, 'r') as f:
+            json1 = json.load(f)
+        with open(testOutput2, 'r') as f:
+            json2 = json.load(f)
+        return json1, json2
+
+    def test_utest(self):
+        expect_lines = []
+        expect_lines = [
+            ['BM_One', '-0.1000', '+0.1000', '10', '9', '100', '110'],
+            ['BM_Two', '+0.1111', '-0.0111', '9', '10', '90', '89'],
+            ['BM_Two', '-0.1250', '-0.1628', '8', '7', '86', '72'],
+            ['BM_Two_pvalue',
+             '0.6985',
+             '0.6985',
+             'U',
+             'Test,',
+             'Repetitions:',
+             '2',
+             'vs',
+             '2.',
+             'WARNING:',
+             'Results',
+             'unreliable!',
+             '9+',
+             'repetitions',
+             'recommended.'],
+            ['short', '-0.1250', '-0.0625', '8', '7', '80', '75'],
+            ['short', '-0.4325', '-0.1351', '8', '5', '77', '67'],
+            ['short_pvalue',
+             '0.7671',
+             '0.1489',
+             'U',
+             'Test,',
+             'Repetitions:',
+             '2',
+             'vs',
+             '3.',
+             'WARNING:',
+             'Results',
+             'unreliable!',
+             '9+',
+             'repetitions',
+             'recommended.'],
+            ['medium', '-0.3750', '-0.3375', '8', '5', '80', '53'],
+        ]
+        json1, json2 = self.load_results()
+        output_lines_with_header = generate_difference_report(
+            json1, json2, utest=True, utest_alpha=0.05, use_color=False)
+        output_lines = output_lines_with_header[2:]
+        print("\n")
+        print("\n".join(output_lines_with_header))
+        self.assertEqual(len(output_lines), len(expect_lines))
+        for i in range(0, len(output_lines)):
+            parts = [x for x in output_lines[i].split(' ') if x]
+            self.assertEqual(expect_lines[i], parts)
+
+
+class TestReportDifferenceWithUTestWhileDisplayingAggregatesOnly(
+        unittest.TestCase):
+    def load_results(self):
+        import json
+        testInputs = os.path.join(
+            os.path.dirname(
+                os.path.realpath(__file__)),
+            'Inputs')
+        testOutput1 = os.path.join(testInputs, 'test3_run0.json')
+        testOutput2 = os.path.join(testInputs, 'test3_run1.json')
+        with open(testOutput1, 'r') as f:
+            json1 = json.load(f)
+        with open(testOutput2, 'r') as f:
+            json2 = json.load(f)
+        return json1, json2
+
+    def test_utest(self):
+        expect_lines = []
+        expect_lines = [
+            ['BM_One', '-0.1000', '+0.1000', '10', '9', '100', '110'],
+            ['BM_Two', '+0.1111', '-0.0111', '9', '10', '90', '89'],
+            ['BM_Two', '-0.1250', '-0.1628', '8', '7', '86', '72'],
+            ['BM_Two_pvalue',
+             '0.6985',
+             '0.6985',
+             'U',
+             'Test,',
+             'Repetitions:',
+             '2',
+             'vs',
+             '2.',
+             'WARNING:',
+             'Results',
+             'unreliable!',
+             '9+',
+             'repetitions',
+             'recommended.'],
+            ['short', '-0.1250', '-0.0625', '8', '7', '80', '75'],
+            ['short', '-0.4325', '-0.1351', '8', '5', '77', '67'],
+            ['short_pvalue',
+             '0.7671',
+             '0.1489',
+             'U',
+             'Test,',
+             'Repetitions:',
+             '2',
+             'vs',
+             '3.',
+             'WARNING:',
+             'Results',
+             'unreliable!',
+             '9+',
+             'repetitions',
+             'recommended.'],
+        ]
+        json1, json2 = self.load_results()
+        output_lines_with_header = generate_difference_report(
+            json1, json2, display_aggregates_only=True,
+            utest=True, utest_alpha=0.05, use_color=False)
+        output_lines = output_lines_with_header[2:]
+        print("\n")
+        print("\n".join(output_lines_with_header))
+        self.assertEqual(len(output_lines), len(expect_lines))
+        for i in range(0, len(output_lines)):
+            parts = [x for x in output_lines[i].split(' ') if x]
+            self.assertEqual(expect_lines[i], parts)
+
+
+if __name__ == '__main__':
+    unittest.main()
+
+# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4
+# kate: tab-width: 4; replace-tabs on; indent-width 4; tab-indents: off;
+# kate: indent-mode python; remove-trailing-spaces modified;
diff --git a/src/third_party/google_benchmark/tools/gbench/util.py b/src/third_party/google_benchmark/tools/gbench/util.py
new file mode 100644
index 0000000..1f8e8e2
--- /dev/null
+++ b/src/third_party/google_benchmark/tools/gbench/util.py
@@ -0,0 +1,164 @@
+"""util.py - General utilities for running, loading, and processing benchmarks
+"""
+import json
+import os
+import tempfile
+import subprocess
+import sys
+
+# Input file type enumeration
+IT_Invalid = 0
+IT_JSON = 1
+IT_Executable = 2
+
+_num_magic_bytes = 2 if sys.platform.startswith('win') else 4
+
+
+def is_executable_file(filename):
+    """
+    Return 'True' if 'filename' names a valid file which is likely
+    an executable. A file is considered an executable if it starts with the
+    magic bytes for a EXE, Mach O, or ELF file.
+    """
+    if not os.path.isfile(filename):
+        return False
+    with open(filename, mode='rb') as f:
+        magic_bytes = f.read(_num_magic_bytes)
+    if sys.platform == 'darwin':
+        return magic_bytes in [
+            b'\xfe\xed\xfa\xce',  # MH_MAGIC
+            b'\xce\xfa\xed\xfe',  # MH_CIGAM
+            b'\xfe\xed\xfa\xcf',  # MH_MAGIC_64
+            b'\xcf\xfa\xed\xfe',  # MH_CIGAM_64
+            b'\xca\xfe\xba\xbe',  # FAT_MAGIC
+            b'\xbe\xba\xfe\xca'   # FAT_CIGAM
+        ]
+    elif sys.platform.startswith('win'):
+        return magic_bytes == b'MZ'
+    else:
+        return magic_bytes == b'\x7FELF'
+
+
+def is_json_file(filename):
+    """
+    Returns 'True' if 'filename' names a valid JSON output file.
+    'False' otherwise.
+    """
+    try:
+        with open(filename, 'r') as f:
+            json.load(f)
+        return True
+    except BaseException:
+        pass
+    return False
+
+
+def classify_input_file(filename):
+    """
+    Return a tuple (type, msg) where 'type' specifies the classified type
+    of 'filename'. If 'type' is 'IT_Invalid' then 'msg' is a human readable
+    string represeting the error.
+    """
+    ftype = IT_Invalid
+    err_msg = None
+    if not os.path.exists(filename):
+        err_msg = "'%s' does not exist" % filename
+    elif not os.path.isfile(filename):
+        err_msg = "'%s' does not name a file" % filename
+    elif is_executable_file(filename):
+        ftype = IT_Executable
+    elif is_json_file(filename):
+        ftype = IT_JSON
+    else:
+        err_msg = "'%s' does not name a valid benchmark executable or JSON file" % filename
+    return ftype, err_msg
+
+
+def check_input_file(filename):
+    """
+    Classify the file named by 'filename' and return the classification.
+    If the file is classified as 'IT_Invalid' print an error message and exit
+    the program.
+    """
+    ftype, msg = classify_input_file(filename)
+    if ftype == IT_Invalid:
+        print("Invalid input file: %s" % msg)
+        sys.exit(1)
+    return ftype
+
+
+def find_benchmark_flag(prefix, benchmark_flags):
+    """
+    Search the specified list of flags for a flag matching `<prefix><arg>` and
+    if it is found return the arg it specifies. If specified more than once the
+    last value is returned. If the flag is not found None is returned.
+    """
+    assert prefix.startswith('--') and prefix.endswith('=')
+    result = None
+    for f in benchmark_flags:
+        if f.startswith(prefix):
+            result = f[len(prefix):]
+    return result
+
+
+def remove_benchmark_flags(prefix, benchmark_flags):
+    """
+    Return a new list containing the specified benchmark_flags except those
+    with the specified prefix.
+    """
+    assert prefix.startswith('--') and prefix.endswith('=')
+    return [f for f in benchmark_flags if not f.startswith(prefix)]
+
+
+def load_benchmark_results(fname):
+    """
+    Read benchmark output from a file and return the JSON object.
+    REQUIRES: 'fname' names a file containing JSON benchmark output.
+    """
+    with open(fname, 'r') as f:
+        return json.load(f)
+
+
+def run_benchmark(exe_name, benchmark_flags):
+    """
+    Run a benchmark specified by 'exe_name' with the specified
+    'benchmark_flags'. The benchmark is run directly as a subprocess to preserve
+    real time console output.
+    RETURNS: A JSON object representing the benchmark output
+    """
+    output_name = find_benchmark_flag('--benchmark_out=',
+                                      benchmark_flags)
+    is_temp_output = False
+    if output_name is None:
+        is_temp_output = True
+        thandle, output_name = tempfile.mkstemp()
+        os.close(thandle)
+        benchmark_flags = list(benchmark_flags) + \
+            ['--benchmark_out=%s' % output_name]
+
+    cmd = [exe_name] + benchmark_flags
+    print("RUNNING: %s" % ' '.join(cmd))
+    exitCode = subprocess.call(cmd)
+    if exitCode != 0:
+        print('TEST FAILED...')
+        sys.exit(exitCode)
+    json_res = load_benchmark_results(output_name)
+    if is_temp_output:
+        os.unlink(output_name)
+    return json_res
+
+
+def run_or_load_benchmark(filename, benchmark_flags):
+    """
+    Get the results for a specified benchmark. If 'filename' specifies
+    an executable benchmark then the results are generated by running the
+    benchmark. Otherwise 'filename' must name a valid JSON output file,
+    which is loaded and the result returned.
+    """
+    ftype = check_input_file(filename)
+    if ftype == IT_JSON:
+        return load_benchmark_results(filename)
+    elif ftype == IT_Executable:
+        return run_benchmark(filename, benchmark_flags)
+    else:
+        assert False  # This branch is unreachable
diff --git a/src/third_party/google_benchmark/tools/strip_asm.py b/src/third_party/google_benchmark/tools/strip_asm.py
new file mode 100755
index 0000000..9030550
--- /dev/null
+++ b/src/third_party/google_benchmark/tools/strip_asm.py
@@ -0,0 +1,151 @@
+#!/usr/bin/env python
+
+"""
+strip_asm.py - Cleanup ASM output for the specified file
+"""
+
+from argparse import ArgumentParser
+import sys
+import os
+import re
+
+def find_used_labels(asm):
+    found = set()
+    label_re = re.compile("\s*j[a-z]+\s+\.L([a-zA-Z0-9][a-zA-Z0-9_]*)")
+    for l in asm.splitlines():
+        m = label_re.match(l)
+        if m:
+            found.add('.L%s' % m.group(1))
+    return found
+
+
+def normalize_labels(asm):
+    decls = set()
+    label_decl = re.compile("^[.]{0,1}L([a-zA-Z0-9][a-zA-Z0-9_]*)(?=:)")
+    for l in asm.splitlines():
+        m = label_decl.match(l)
+        if m:
+            decls.add(m.group(0))
+    if len(decls) == 0:
+        return asm
+    needs_dot = next(iter(decls))[0] != '.'
+    if not needs_dot:
+        return asm
+    for ld in decls:
+        asm = re.sub("(^|\s+)" + ld + "(?=:|\s)", '\\1.' + ld, asm)
+    return asm
+
+
+def transform_labels(asm):
+    asm = normalize_labels(asm)
+    used_decls = find_used_labels(asm)
+    new_asm = ''
+    label_decl = re.compile("^\.L([a-zA-Z0-9][a-zA-Z0-9_]*)(?=:)")
+    for l in asm.splitlines():
+        m = label_decl.match(l)
+        if not m or m.group(0) in used_decls:
+            new_asm += l
+            new_asm += '\n'
+    return new_asm
+
+
+def is_identifier(tk):
+    if len(tk) == 0:
+        return False
+    first = tk[0]
+    if not first.isalpha() and first != '_':
+        return False
+    for i in range(1, len(tk)):
+        c = tk[i]
+        if not c.isalnum() and c != '_':
+            return False
+    return True
+
+def process_identifiers(l):
+    """
+    process_identifiers - process all identifiers and modify them to have
+    consistent names across all platforms; specifically across ELF and MachO.
+    For example, MachO inserts an additional understore at the beginning of
+    names. This function removes that.
+    """
+    parts = re.split(r'([a-zA-Z0-9_]+)', l)
+    new_line = ''
+    for tk in parts:
+        if is_identifier(tk):
+            if tk.startswith('__Z'):
+                tk = tk[1:]
+            elif tk.startswith('_') and len(tk) > 1 and \
+                    tk[1].isalpha() and tk[1] != 'Z':
+                tk = tk[1:]
+        new_line += tk
+    return new_line
+
+
+def process_asm(asm):
+    """
+    Strip the ASM of unwanted directives and lines
+    """
+    new_contents = ''
+    asm = transform_labels(asm)
+
+    # TODO: Add more things we want to remove
+    discard_regexes = [
+        re.compile("\s+\..*$"), # directive
+        re.compile("\s*#(NO_APP|APP)$"), #inline ASM
+        re.compile("\s*#.*$"), # comment line
+        re.compile("\s*\.globa?l\s*([.a-zA-Z_][a-zA-Z0-9$_.]*)"), #global directive
+        re.compile("\s*\.(string|asciz|ascii|[1248]?byte|short|word|long|quad|value|zero)"),
+    ]
+    keep_regexes = [
+
+    ]
+    fn_label_def = re.compile("^[a-zA-Z_][a-zA-Z0-9_.]*:")
+    for l in asm.splitlines():
+        # Remove Mach-O attribute
+        l = l.replace('@GOTPCREL', '')
+        add_line = True
+        for reg in discard_regexes:
+            if reg.match(l) is not None:
+                add_line = False
+                break
+        for reg in keep_regexes:
+            if reg.match(l) is not None:
+                add_line = True
+                break
+        if add_line:
+            if fn_label_def.match(l) and len(new_contents) != 0:
+                new_contents += '\n'
+            l = process_identifiers(l)
+            new_contents += l
+            new_contents += '\n'
+    return new_contents
+
+def main():
+    parser = ArgumentParser(
+        description='generate a stripped assembly file')
+    parser.add_argument(
+        'input', metavar='input', type=str, nargs=1,
+        help='An input assembly file')
+    parser.add_argument(
+        'out', metavar='output', type=str, nargs=1,
+        help='The output file')
+    args, unknown_args = parser.parse_known_args()
+    input = args.input[0]
+    output = args.out[0]
+    if not os.path.isfile(input):
+        print(("ERROR: input file '%s' does not exist") % input)
+        sys.exit(1)
+    contents = None
+    with open(input, 'r') as f:
+        contents = f.read()
+    new_contents = process_asm(contents)
+    with open(output, 'w') as f:
+        f.write(new_contents)
+
+
+if __name__ == '__main__':
+    main()
+
+# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4
+# kate: tab-width: 4; replace-tabs on; indent-width 4; tab-indents: off;
+# kate: indent-mode python; remove-trailing-spaces modified;
diff --git a/src/third_party/icu/source/common/putil.cpp b/src/third_party/icu/source/common/putil.cpp
index 1a4beb5..b07e55b 100644
--- a/src/third_party/icu/source/common/putil.cpp
+++ b/src/third_party/icu/source/common/putil.cpp
@@ -1207,14 +1207,12 @@
         }
         uprv_strcpy(newDataDir, directory);
 
-#if (U_FILE_SEP_CHAR != U_FILE_ALT_SEP_CHAR)
-        {
-            char *p;
-            while(p = uprv_strchr(newDataDir, U_FILE_ALT_SEP_CHAR)) {
-                *p = U_FILE_SEP_CHAR;
-            }
+        if (U_FILE_SEP_CHAR != U_FILE_ALT_SEP_CHAR) {
+          char* p;
+          while ((p = uprv_strchr(newDataDir, U_FILE_ALT_SEP_CHAR))) {
+            *p = U_FILE_SEP_CHAR;
+          }
         }
-#endif
     }
 
     if (gDataDirectory && *gDataDirectory) {
@@ -1235,11 +1233,11 @@
     return TRUE;
   }
 
-#if (U_FILE_SEP_CHAR != U_FILE_ALT_SEP_CHAR)
-  if(*path == U_FILE_ALT_SEP_CHAR) {
-    return TRUE;
+  if (U_FILE_SEP_CHAR != U_FILE_ALT_SEP_CHAR) {
+    if (*path == U_FILE_ALT_SEP_CHAR) {
+      return TRUE;
+    }
   }
-#endif
 
 #if U_PLATFORM_USES_ONLY_WIN32_API
   if( (((path[0] >= 'A') && (path[0] <= 'Z')) ||
@@ -1338,12 +1336,12 @@
     }
     gTimeZoneFilesDirectory->clear();
     gTimeZoneFilesDirectory->append(path, status);
-#if (U_FILE_SEP_CHAR != U_FILE_ALT_SEP_CHAR)
-    char *p = gTimeZoneFilesDirectory->data();
-    while (p = uprv_strchr(p, U_FILE_ALT_SEP_CHAR)) {
+    if (U_FILE_SEP_CHAR != U_FILE_ALT_SEP_CHAR) {
+      char* p = gTimeZoneFilesDirectory->data();
+      while ((p = uprv_strchr(p, U_FILE_ALT_SEP_CHAR))) {
         *p = U_FILE_SEP_CHAR;
+      }
     }
-#endif
 }
 
 #define TO_STRING(x) TO_STRING_2(x) 
diff --git a/src/third_party/icu/source/common/udata.cpp b/src/third_party/icu/source/common/udata.cpp
index 339292a..006e3b2 100644
--- a/src/third_party/icu/source/common/udata.cpp
+++ b/src/third_party/icu/source/common/udata.cpp
@@ -1152,23 +1152,25 @@
       isICUData = TRUE;
     }
 
-#if (U_FILE_SEP_CHAR != U_FILE_ALT_SEP_CHAR)  /* Windows:  try "foo\bar" and "foo/bar" */
+    /* Windows:  try "foo\bar" and "foo/bar" */
     /* remap from alternate path char to the main one */
-    CharString altSepPath;
-    if(path) {
-        if(uprv_strchr(path,U_FILE_ALT_SEP_CHAR) != NULL) {
-            altSepPath.append(path, *pErrorCode);
-            char *p;
-            while((p=uprv_strchr(altSepPath.data(), U_FILE_ALT_SEP_CHAR))) {
-                *p = U_FILE_SEP_CHAR;
-            }
-#if defined (UDATA_DEBUG)
-            fprintf(stderr, "Changed path from [%s] to [%s]\n", path, altSepPath.s);
+    if (U_FILE_SEP_CHAR != U_FILE_ALT_SEP_CHAR) {
+      CharString altSepPath;
+      if (path) {
+        if (uprv_strchr(path, U_FILE_ALT_SEP_CHAR) != NULL) {
+          altSepPath.append(path, *pErrorCode);
+          char* p;
+          while ((p = uprv_strchr(altSepPath.data(), U_FILE_ALT_SEP_CHAR))) {
+            *p = U_FILE_SEP_CHAR;
+          }
+#if defined(UDATA_DEBUG)
+          fprintf(stderr, "Changed path from [%s] to [%s]\n", path,
+                  altSepPath.s);
 #endif
-            path = altSepPath.data();
+          path = altSepPath.data();
         }
+      }
     }
-#endif
 
     CharString tocEntryName; /* entry name in tree format. ex:  'icudt28b/coll/ar.res' */
     CharString tocEntryPath; /* entry name in path format. ex:  'icudt28b\\coll\\ar.res' */
diff --git a/src/third_party/icu/source/test/cintltst/creststn.c b/src/third_party/icu/source/test/cintltst/creststn.c
index a17270a..26d0bad 100644
--- a/src/third_party/icu/source/test/cintltst/creststn.c
+++ b/src/third_party/icu/source/test/cintltst/creststn.c
@@ -1000,8 +1000,7 @@
     utestdatapath = (UChar*) malloc((len+10)*sizeof(UChar));
 
     u_charsToUChars(testdatapath, utestdatapath, (int32_t)strlen(testdatapath)+1);
-#if (U_FILE_SEP_CHAR != U_FILE_ALT_SEP_CHAR) && U_FILE_SEP_CHAR == '\\'
-    {
+    if ((U_FILE_SEP_CHAR != U_FILE_ALT_SEP_CHAR) && U_FILE_SEP_CHAR == '\\') {
         /* Convert all backslashes to forward slashes so that we can make sure that ures_openU
            can handle invariant characters. */
         UChar *backslash;
@@ -1009,7 +1008,6 @@
             *backslash = 0x002F;
         }
     }
-#endif
 
     u_memset(largeBuffer, 0x0030, sizeof(largeBuffer)/sizeof(largeBuffer[0]));
     largeBuffer[sizeof(largeBuffer)/sizeof(largeBuffer[0])-1] = 0;
diff --git a/src/third_party/icu/source/tools/genrb/derb.cpp b/src/third_party/icu/source/tools/genrb/derb.cpp
index 1f6a2de..9a364f9 100644
--- a/src/third_party/icu/source/tools/genrb/derb.cpp
+++ b/src/third_party/icu/source/tools/genrb/derb.cpp
@@ -85,11 +85,11 @@
 
     /* Get the name of tool. */
     pname = uprv_strrchr(*argv, U_FILE_SEP_CHAR);
-#if U_FILE_SEP_CHAR != U_FILE_ALT_SEP_CHAR
-    if (!pname) {
+    if (U_FILE_SEP_CHAR != U_FILE_ALT_SEP_CHAR) {
+      if (!pname) {
         pname = uprv_strrchr(*argv, U_FILE_ALT_SEP_CHAR);
+      }
     }
-#endif
     if (!pname) {
         pname = *argv;
     } else {
@@ -214,11 +214,11 @@
                 thename = arg;
             } else {
                 const char *q = uprv_strrchr(arg, U_FILE_SEP_CHAR);
-#if U_FILE_SEP_CHAR != U_FILE_ALT_SEP_CHAR
-                if (q == NULL) {
+                if (U_FILE_SEP_CHAR != U_FILE_ALT_SEP_CHAR) {
+                  if (q == NULL) {
                     q = uprv_strrchr(arg, U_FILE_ALT_SEP_CHAR);
+                  }
                 }
-#endif
                 infile.append(inputDir, status);
                 if(q != NULL) {
                     infile.appendPathPart(icu::StringPiece(arg, (int32_t)(q - arg)), status);
diff --git a/src/third_party/icu/source/tools/pkgdata/pkgtypes.c b/src/third_party/icu/source/tools/pkgdata/pkgtypes.c
index 2769e45..6d34c4c 100644
--- a/src/third_party/icu/source/tools/pkgdata/pkgtypes.c
+++ b/src/third_party/icu/source/tools/pkgdata/pkgtypes.c
@@ -210,8 +210,7 @@
     char aBuf[1024];
     char *rPtr;
     rPtr = uprv_strrchr(strAlias, U_FILE_SEP_CHAR);
-#if (U_FILE_SEP_CHAR != U_FILE_ALT_SEP_CHAR)
-    {
+    if (U_FILE_SEP_CHAR != U_FILE_ALT_SEP_CHAR) {
         char *aPtr = uprv_strrchr(strAlias, U_FILE_ALT_SEP_CHAR);
         if(!rPtr || /* regular char wasn't found or.. */
             (aPtr && (aPtr > rPtr)))
@@ -219,7 +218,6 @@
             rPtr = aPtr; /* may copy NULL which is OK */
         }
     }
-#endif
     if(!rPtr) {
         return l; /* no dir path */
     }
diff --git a/src/third_party/icu/source/tools/toolutil/package.cpp b/src/third_party/icu/source/tools/toolutil/package.cpp
index 7369068..5e32bd5 100644
--- a/src/third_party/icu/source/tools/toolutil/package.cpp
+++ b/src/third_party/icu/source/tools/toolutil/package.cpp
@@ -209,9 +209,6 @@
 /*
  * Turn tree separators and alternate file separators into normal file separators.
  */
-#if U_TREE_ENTRY_SEP_CHAR==U_FILE_SEP_CHAR && U_FILE_ALT_SEP_CHAR==U_FILE_SEP_CHAR
-#define treeToPath(s)
-#else
 static void
 treeToPath(char *s) {
     char *t;
@@ -222,14 +219,10 @@
         }
     }
 }
-#endif
 
 /*
  * Turn file separators into tree separators.
  */
-#if U_TREE_ENTRY_SEP_CHAR==U_FILE_SEP_CHAR && U_FILE_ALT_SEP_CHAR==U_FILE_SEP_CHAR
-#define pathToTree(s)
-#else
 static void
 pathToTree(char *s) {
     char *t;
@@ -240,7 +233,6 @@
         }
     }
 }
-#endif
 
 /*
  * Prepend the path (if any) to the name and run the name through treeToName().
@@ -273,7 +265,10 @@
         exit(U_BUFFER_OVERFLOW_ERROR);
     }
     strcpy(s, name);
-    treeToPath(s);
+    if (U_TREE_ENTRY_SEP_CHAR != U_FILE_SEP_CHAR ||
+        U_FILE_ALT_SEP_CHAR != U_FILE_SEP_CHAR) {
+      treeToPath(s);
+    }
 }
 
 static void
@@ -1043,7 +1038,10 @@
         // copy the item's name
         items[idx].name=allocString(TRUE, strlen(name));
         strcpy(items[idx].name, name);
-        pathToTree(items[idx].name);
+        if (U_TREE_ENTRY_SEP_CHAR != U_FILE_SEP_CHAR ||
+            U_FILE_ALT_SEP_CHAR != U_FILE_SEP_CHAR) {
+          pathToTree(items[idx].name);
+        }
     } else {
         // same-name item found, replace it
         if(items[idx].isDataOwned) {
diff --git a/src/third_party/icu/source/tools/toolutil/pkg_gencmn.c b/src/third_party/icu/source/tools/toolutil/pkg_gencmn.c
index 25f3608..839d96b 100644
--- a/src/third_party/icu/source/tools/toolutil/pkg_gencmn.c
+++ b/src/third_party/icu/source/tools/toolutil/pkg_gencmn.c
@@ -192,14 +192,12 @@
         }
 
         /* add the file */
-#if (U_FILE_SEP_CHAR != U_FILE_ALT_SEP_CHAR)
-        {
+        if (U_FILE_SEP_CHAR != U_FILE_ALT_SEP_CHAR) {
           char *t;
           while((t = uprv_strchr(line,U_FILE_ALT_SEP_CHAR))) {
             *t = U_FILE_SEP_CHAR;
           }
         }
-#endif
         addFile(getLongPathname(line), name, source, sourceTOC, verbose);
     }
 
@@ -525,24 +523,23 @@
                            /*  when conditional code below is not compiled.      */
     uprv_strcat(fullPath, path);
 
-#if (U_FILE_ALT_SEP_CHAR != U_TREE_ENTRY_SEP_CHAR)
-#if (U_FILE_ALT_SEP_CHAR != U_FILE_SEP_CHAR)
-    /* replace tree separator (such as '/') with file sep char (such as ':' or '\\') */
-    for(;fullPath[n];n++) {
-        if(fullPath[n] == U_FILE_ALT_SEP_CHAR) {
-            fullPath[n] = U_FILE_SEP_CHAR;
+    if (U_FILE_ALT_SEP_CHAR != U_TREE_ENTRY_SEP_CHAR
+        && U_FILE_ALT_SEP_CHAR != U_FILE_SEP_CHAR) {
+        /* replace tree separator (such as '/') with file sep char (such as ':' or '\\') */
+        for(;fullPath[n];n++) {
+            if(fullPath[n] == U_FILE_ALT_SEP_CHAR) {
+                fullPath[n] = U_FILE_SEP_CHAR;
+            }
         }
     }
-#endif
-#endif
-#if (U_FILE_SEP_CHAR != U_TREE_ENTRY_SEP_CHAR)
-    /* replace tree separator (such as '/') with file sep char (such as ':' or '\\') */
-    for(;fullPath[n];n++) {
-        if(fullPath[n] == U_TREE_ENTRY_SEP_CHAR) {
-            fullPath[n] = U_FILE_SEP_CHAR;
+    if (U_FILE_SEP_CHAR != U_TREE_ENTRY_SEP_CHAR) {
+        /* replace tree separator (such as '/') with file sep char (such as ':' or '\\') */
+        for(;fullPath[n];n++) {
+            if(fullPath[n] == U_TREE_ENTRY_SEP_CHAR) {
+                fullPath[n] = U_FILE_SEP_CHAR;
+            }
         }
     }
-#endif
     return fullPath;
 }
 
@@ -555,17 +552,20 @@
 static void
 fixDirToTreePath(char *s)
 {
-#if (U_FILE_SEP_CHAR != U_TREE_ENTRY_SEP_CHAR) || ((U_FILE_ALT_SEP_CHAR != U_FILE_SEP_CHAR) && (U_FILE_ALT_SEP_CHAR != U_TREE_ENTRY_SEP_CHAR))
-    char *t;
-#endif
-#if (U_FILE_SEP_CHAR != U_TREE_ENTRY_SEP_CHAR)
-    for(t=s;t=uprv_strchr(t,U_FILE_SEP_CHAR);) {
-        *t = U_TREE_ENTRY_SEP_CHAR;
+    if ((U_FILE_SEP_CHAR != U_TREE_ENTRY_SEP_CHAR)
+        || ((U_FILE_ALT_SEP_CHAR != U_FILE_SEP_CHAR)
+        && (U_FILE_ALT_SEP_CHAR != U_TREE_ENTRY_SEP_CHAR))) {
+        char *t;
+        if (U_FILE_SEP_CHAR != U_TREE_ENTRY_SEP_CHAR) {
+            for(t=s;t=uprv_strchr(t,U_FILE_SEP_CHAR);) {
+                *t = U_TREE_ENTRY_SEP_CHAR;
+            }
+        }
+        if (U_FILE_ALT_SEP_CHAR != U_FILE_SEP_CHAR
+            && U_FILE_ALT_SEP_CHAR != U_TREE_ENTRY_SEP_CHAR) {
+            for(t=s;t=uprv_strchr(t,U_FILE_ALT_SEP_CHAR);) {
+                *t = U_TREE_ENTRY_SEP_CHAR;
+            }
+        }
     }
-#endif
-#if (U_FILE_ALT_SEP_CHAR != U_FILE_SEP_CHAR) && (U_FILE_ALT_SEP_CHAR != U_TREE_ENTRY_SEP_CHAR)
-    for(t=s;t=uprv_strchr(t,U_FILE_ALT_SEP_CHAR);) {
-        *t = U_TREE_ENTRY_SEP_CHAR;
-    }
-#endif
 }
diff --git a/src/third_party/icu/source/tools/toolutil/toolutil.cpp b/src/third_party/icu/source/tools/toolutil/toolutil.cpp
index 48bb777..8ac1b06 100644
--- a/src/third_party/icu/source/tools/toolutil/toolutil.cpp
+++ b/src/third_party/icu/source/tools/toolutil/toolutil.cpp
@@ -128,12 +128,12 @@
   int32_t resultLen = 0;
 
   const char *basename=uprv_strrchr(path, U_FILE_SEP_CHAR);
-#if U_FILE_ALT_SEP_CHAR!=U_FILE_SEP_CHAR
-  const char *basenameAlt=uprv_strrchr(path, U_FILE_ALT_SEP_CHAR);
-  if(basenameAlt && (!basename || basename<basenameAlt)) {
-    basename = basenameAlt;
+  if (U_FILE_ALT_SEP_CHAR != U_FILE_SEP_CHAR) {
+    const char* basenameAlt = uprv_strrchr(path, U_FILE_ALT_SEP_CHAR);
+    if (basenameAlt && (!basename || basename < basenameAlt)) {
+      basename = basenameAlt;
+    }
   }
-#endif
   if(!basename) {
     /* no basename - return ''. */
     resultPtr = "";
@@ -160,17 +160,17 @@
 findBasename(const char *filename) {
     const char *basename=uprv_strrchr(filename, U_FILE_SEP_CHAR);
 
-#if U_FILE_ALT_SEP_CHAR!=U_FILE_SEP_CHAR
+    if (U_FILE_ALT_SEP_CHAR != U_FILE_SEP_CHAR) {
 #if !(U_PLATFORM == U_PF_CYGWIN && U_PLATFORM_USES_ONLY_WIN32_API)
-    if(basename==NULL)
+      if (basename == NULL)
 #endif
-    {
+      {
         /* Use lenient matching on Windows, which can accept either \ or /
-           This is useful for environments like Win32+CygWin which have both.
+        This is useful for environments like Win32+CygWin which have both.
         */
-        basename=uprv_strrchr(filename, U_FILE_ALT_SEP_CHAR);
+        basename = uprv_strrchr(filename, U_FILE_ALT_SEP_CHAR);
+      }
     }
-#endif
 
     if(basename!=NULL) {
         return basename+1;
diff --git a/src/third_party/vulkan-headers/src/.cmake-format.py b/src/third_party/vulkan-headers/src/.cmake-format.py
new file mode 100644
index 0000000..07d2f99
--- /dev/null
+++ b/src/third_party/vulkan-headers/src/.cmake-format.py
@@ -0,0 +1,34 @@
+# Configuration for cmake-format (v0.4.1, circa Jul 2018)
+# https://github.com/cheshirekow/cmake_format
+
+# How wide to allow formatted cmake files
+line_width = 132
+
+# How many spaces to tab for indent
+tab_size = 4
+
+# If arglists are longer than this, break them always
+max_subargs_per_line = 3
+
+# If true, separate flow control names from their parentheses with a space
+separate_ctrl_name_with_space = False
+
+# If true, separate function names from parentheses with a space
+separate_fn_name_with_space = False
+
+# If a statement is wrapped to more than one line, than dangle the closing
+# parenthesis on it's own line
+dangle_parens = False
+
+# What character to use for bulleted lists
+bullet_char = u'*'
+
+# What character to use as punctuation after numerals in an enumerated list
+enum_char = u'.'
+
+# What style line endings to use in the output.
+line_ending = u'unix'
+
+# Format command names consistently as 'lower' or 'upper' case
+command_case = u'lower'
+
diff --git a/src/third_party/vulkan-headers/src/BUILD.gn b/src/third_party/vulkan-headers/src/BUILD.gn
new file mode 100644
index 0000000..79d186f
--- /dev/null
+++ b/src/third_party/vulkan-headers/src/BUILD.gn
@@ -0,0 +1,52 @@
+# Copyright (C) 2018-2019 The ANGLE Project Authors.
+# Copyright (C) 2019 LunarG, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+config("vulkan_headers_config") {
+  include_dirs = [ "include" ]
+
+  if (is_win) {
+    defines = [ "VK_USE_PLATFORM_WIN32_KHR" ]
+  }
+  if (is_linux) {
+    defines = [ "VK_USE_PLATFORM_XCB_KHR" ]
+  }
+  if (is_android) {
+    defines = [ "VK_USE_PLATFORM_ANDROID_KHR" ]
+  }
+  if (is_fuchsia) {
+    defines = [ "VK_USE_PLATFORM_FUCHSIA" ]
+  }
+  if (is_mac) {
+    defines = [ "VK_USE_PLATFORM_METAL_EXT" ]
+  }
+  if (defined(is_ggp) && is_ggp) {
+    defines = [ "VK_USE_PLATFORM_GGP" ]
+  }
+}
+
+# Vulkan headers only, no compiled sources.
+source_set("vulkan_headers") {
+  sources = [
+    "include/vulkan/vk_icd.h",
+    "include/vulkan/vk_layer.h",
+    "include/vulkan/vk_platform.h",
+    "include/vulkan/vk_sdk_platform.h",
+    "include/vulkan/vulkan.h",
+    "include/vulkan/vulkan.hpp",
+    "include/vulkan/vulkan_core.h",
+  ]
+  public_configs = [ ":vulkan_headers_config" ]
+}
+
diff --git a/src/third_party/vulkan-headers/src/BUILD.md b/src/third_party/vulkan-headers/src/BUILD.md
new file mode 100644
index 0000000..1188134
--- /dev/null
+++ b/src/third_party/vulkan-headers/src/BUILD.md
@@ -0,0 +1,274 @@
+# Build Instructions
+
+Instructions for building this repository on Windows, Linux, and MacOS.
+
+## Index
+
+1. [Contributing](#contributing-to-the-repository)
+1. [Repository Content](#repository-content)
+1. [Repository Set-up](#repository-set-up)
+1. [Windows Build](#building-on-windows)
+1. [Linux Build](#building-on-linux)
+1. [MacOS Build](#building-on-macos)
+
+## Contributing to the Repository
+
+The contents of this repository are sourced primarily from the Khronos Vulkan
+API specification [repository](https://github.com/KhronosGroup/Vulkan-Docs).
+Please visit that repository for information on contributing.
+
+## Repository Content
+
+This repository contains the Vulkan header files and the Vulkan API definition
+(registry) with its related files. This repository does not create libraries
+or executables.
+
+However, this repository contains CMake build configuration files to "install"
+the files from this repository to a specific install directory. For example,
+you can install the files to a system directory such as `/usr/local` on Linux.
+
+If you are building other Vulkan-related repositories such as
+[Vulkan-Loader](https://github.com/KhronosGroup/Vulkan-Loader),
+you need to build the install target of this repository and provide the
+resulting install directory to those repositories.
+
+### Installed Files
+
+The `install` target installs the following files under the directory
+indicated by *install_dir*:
+
+- *install_dir*`/include/vulkan` : The header files found in the
+ `include/vulkan` directory of this repository
+- *install_dir*`/share/vulkan/registry` : The registry files found in the
+  `registry` directory of this repository
+
+The `uninstall` target can be used to remove the above files from the install
+directory.
+
+## Repository Set-Up
+
+### Download the Repository
+
+To create your local git repository:
+
+    git clone https://github.com/KhronosGroup/Vulkan-Headers.git
+
+### Repository Dependencies
+
+This repository does not depend on any other repositories.
+
+### Build and Install Directories
+
+A common convention is to place the build directory in the top directory of
+the repository with a name of `build` and place the install directory as a
+child of the build directory with the name `install`. The remainder of these
+instructions follow this convention, although you can use any name for these
+directories and place them in any location.
+
+## Building On Windows
+
+### Windows Development Environment Requirements
+
+- Windows
+  - Any Personal Computer version supported by Microsoft
+- Microsoft [Visual Studio](https://www.visualstudio.com/)
+  - Versions
+    - [2015](https://www.visualstudio.com/vs/older-downloads/)
+    - [2017](https://www.visualstudio.com/vs/older-downloads/)
+    - [2019](https://www.visualstudio.com/vs/downloads/)
+  - The Community Edition of each of the above versions is sufficient, as
+    well as any more capable edition.
+- [CMake 3.10.2](https://cmake.org/files/v3.10/cmake-3.10.2-win64-x64.zip) is recommended.
+  - Use the installer option to add CMake to the system PATH
+- Git Client Support
+  - [Git for Windows](http://git-scm.com/download/win) is a popular solution
+    for Windows
+  - Some IDEs (e.g., [Visual Studio](https://www.visualstudio.com/),
+    [GitHub Desktop](https://desktop.github.com/)) have integrated
+    Git client support
+
+### Windows Build - Microsoft Visual Studio
+
+The general approach is to run CMake to generate the Visual Studio project
+files. Then either run CMake with the `--build` option to build from the
+command line or use the Visual Studio IDE to open the generated solution and
+work with the solution interactively.
+
+#### Windows Quick Start
+
+From a "Developer Command Prompt for VS 201x" console:
+
+    cd Vulkan-Headers
+    mkdir build
+    cd build
+    cmake ..
+    cmake --build . --target install
+
+See below for the details.
+
+#### Use `CMake` to Create the Visual Studio Project Files
+
+From within a "Developer Command Prompt for VS 201x" console, change your
+current directory to the top of the cloned repository directory, create a
+build directory and generate the Visual Studio project files:
+
+    cd Vulkan-Headers
+    mkdir build
+    cd build
+    cmake ..
+
+> Note: The `..` parameter tells `cmake` the location of the top of the
+> repository. If you place your build directory someplace else, you'll need to
+> specify the location of the repository top differently.
+
+The CMake configuration files set the default install directory location to
+`$CMAKE_BINARY_DIR\install`, which is a child of your build directory. In this
+example, the install directory becomes the `Vulkan-Headers\build\install`
+directory.
+
+The project installs the header files to
+
+    Vulkan-Headers\build\install\include\vulkan
+
+and installs the registry files to
+
+    Vulkan-Headers\build\install\share\vulkan\registry
+
+You can change the install directory with the `CMAKE_INSTALL_PREFIX` CMake
+variable.
+
+For example:
+
+    cd Vulkan-Headers
+    mkdir build
+    cd build
+    cmake -DCMAKE_INSTALL_PREFIX=/c/Users/dev/install ..  # MINGW64 shell
+
+As it starts generating the project files, `cmake` responds with something
+like:
+
+    -- Building for: Visual Studio 14 2015
+
+which is a 32-bit generator.
+
+Since this repository does not compile anything, there is no need to specify a
+specific generator such as "Visual Studio 14 2015 Win64", so the default
+generator should suffice.
+
+The above steps create a Windows solution file named `Vulkan-Headers.sln` in
+the build directory.
+
+At this point, you can build the solution from the command line or open the
+generated solution with Visual Studio.
+
+#### Build the Solution From the Command Line
+
+While still in the build directory:
+
+    cmake --build . --target install
+
+to build the install target.
+
+Build the `uninstall` target to remove the files from the install directory.
+
+    cmake --build . --target uninstall
+
+#### Build the Solution With Visual Studio
+
+Launch Visual Studio and open the "Vulkan-Headers.sln" solution file in the
+build directory. Build the `INSTALL` target from the Visual Studio solution
+explorer.
+
+Build the `uninstall` target to remove the files from the install directory.
+
+> Note: Since there are only the `INSTALL` and `uninstall` projects in the
+> solution, building the solution from the command line may be more efficient
+> than starting Visual Studio for these simple operations.
+
+## Building On Linux
+
+### Linux Development Environment Requirements
+
+There are no specific Linux distribution or compiler version requirements for
+building this repository. The required tools are
+
+- [CMake 3.10.2](https://cmake.org/files/v3.10/cmake-3.10.2-Linux-x86_64.tar.gz) is recommended.
+- git
+
+### Linux Build
+
+The general approach is to run CMake to generate make files. Then either run
+CMake with the `--build` option or `make` to build from the command line.
+
+#### Linux Quick Start
+
+    cd Vulkan-Headers
+    mkdir build
+    cd build
+    cmake -DCMAKE_INSTALL_PREFIX=install ..
+    make install
+
+See below for the details.
+
+#### Use CMake to Create the Make Files
+
+Change your current directory to the top of the cloned repository directory,
+create a build directory and generate the make files:
+
+    cd Vulkan-Headers
+    mkdir build
+    cd build
+    cmake -DCMAKE_INSTALL_PREFIX=install ..
+
+> Note: The `..` parameter tells `cmake` the location of the top of the
+> repository. If you place your `build` directory someplace else, you'll need
+> to specify the location of the repository top differently.
+
+Set the `CMAKE_INSTALL_PREFIX` variable to the directory to serve as the
+destination directory for the `install` target.
+
+The above `cmake` command sets the install directory to
+`$CMAKE_BINARY_DIR/install`, which is a child of your `build` directory. In
+this example, the install directory becomes the `Vulkan-Headers/build/install`
+directory.
+
+The make file install target installs the header files to
+
+    Vulkan-Headers/build/install/include/vulkan
+
+and installs the registry files to
+
+    Vulkan-Headers/build/install/share/vulkan/registry
+
+> Note: For Linux, the default value for `CMAKE_INSTALL_PREFIX` is
+> `/usr/local`, which would be used if you do not specify
+> `CMAKE_INSTALL_PREFIX`. In this case, you may need to use `sudo` to install
+> to system directories later when you run `make install`.
+
+Note that after generating the make files, running `make`:
+
+    make
+
+does nothing, since there are no libraries or executables to build.
+
+To install the header files:
+
+    make install
+
+or
+
+    cmake --build . --target install
+
+To uninstall the files from the install directories, you can execute:
+
+    make uninstall
+
+or
+
+    cmake --build . --target uninstall
+
+## Building on MacOS
+
+The instructions for building this repository on MacOS are similar to those for Linux.
+
+[CMake 3.10.2](https://cmake.org/files/v3.10/cmake-3.10.2-Darwin-x86_64.tar.gz) is recommended.
diff --git a/src/third_party/vulkan-headers/src/CMakeLists.txt b/src/third_party/vulkan-headers/src/CMakeLists.txt
new file mode 100644
index 0000000..fc96c5e
--- /dev/null
+++ b/src/third_party/vulkan-headers/src/CMakeLists.txt
@@ -0,0 +1,58 @@
+# ~~~
+# Copyright (c) 2018 Valve Corporation
+# Copyright (c) 2018 LunarG, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ~~~
+
+# CMake project initialization ---------------------------------------------------------------------------------------------------
+# This section contains pre-project() initialization, and ends with the project() command.
+
+cmake_minimum_required(VERSION 3.10.2)
+
+# NONE = this project has no language toolchain requirement.
+project(Vulkan-Headers NONE)
+
+# User-interface declarations ----------------------------------------------------------------------------------------------------
+# This section contains variables that affect development GUIs (e.g. CMake GUI and IDEs), such as option(), folders, and variables
+# with the CACHE property.
+
+include(GNUInstallDirs)
+
+if(WIN32 AND CMAKE_INSTALL_PREFIX_INITIALIZED_TO_DEFAULT)
+    # Windows: if install locations not set by user, set install prefix to "<build_dir>\install".
+    set(CMAKE_INSTALL_PREFIX "${CMAKE_BINARY_DIR}/install" CACHE PATH "default install path" FORCE)
+endif()
+
+# --------------------------------------------------------------------------------------------------------------------------------
+
+# define exported targets for nested project builds to consume
+add_library(Vulkan-Headers INTERFACE)
+target_include_directories(Vulkan-Headers INTERFACE "${CMAKE_CURRENT_SOURCE_DIR}/include")
+add_library(Vulkan::Headers ALIAS Vulkan-Headers)
+
+add_library(Vulkan-Registry INTERFACE)
+target_include_directories(Vulkan-Registry INTERFACE "${CMAKE_CURRENT_SOURCE_DIR}/registry")
+add_library(Vulkan::Registry ALIAS Vulkan-Registry)
+
+install(DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}/include/vulkan" DESTINATION ${CMAKE_INSTALL_INCLUDEDIR})
+install(DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}/registry" DESTINATION ${CMAKE_INSTALL_DATADIR}/vulkan)
+
+# uninstall target
+if(NOT TARGET uninstall)
+    configure_file("${CMAKE_CURRENT_SOURCE_DIR}/cmake/cmake_uninstall.cmake.in"
+                   "${CMAKE_CURRENT_BINARY_DIR}/cmake_uninstall.cmake"
+                   IMMEDIATE
+                   @ONLY)
+    add_custom_target(uninstall COMMAND ${CMAKE_COMMAND} -P ${CMAKE_CURRENT_BINARY_DIR}/cmake_uninstall.cmake)
+endif()
diff --git a/src/third_party/vulkan-headers/src/CODE_OF_CONDUCT.md b/src/third_party/vulkan-headers/src/CODE_OF_CONDUCT.md
new file mode 100644
index 0000000..a11610b
--- /dev/null
+++ b/src/third_party/vulkan-headers/src/CODE_OF_CONDUCT.md
@@ -0,0 +1 @@
+A reminder that this issue tracker is managed by the Khronos Group. Interactions here should follow the Khronos Code of Conduct (https://www.khronos.org/developers/code-of-conduct), which prohibits aggressive or derogatory language. Please keep the discussion friendly and civil.
diff --git a/src/third_party/vulkan-headers/src/LICENSE.txt b/src/third_party/vulkan-headers/src/LICENSE.txt
new file mode 100644
index 0000000..d645695
--- /dev/null
+++ b/src/third_party/vulkan-headers/src/LICENSE.txt
@@ -0,0 +1,202 @@
+
+                                 Apache License
+                           Version 2.0, January 2004
+                        http://www.apache.org/licenses/
+
+   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+   1. Definitions.
+
+      "License" shall mean the terms and conditions for use, reproduction,
+      and distribution as defined by Sections 1 through 9 of this document.
+
+      "Licensor" shall mean the copyright owner or entity authorized by
+      the copyright owner that is granting the License.
+
+      "Legal Entity" shall mean the union of the acting entity and all
+      other entities that control, are controlled by, or are under common
+      control with that entity. For the purposes of this definition,
+      "control" means (i) the power, direct or indirect, to cause the
+      direction or management of such entity, whether by contract or
+      otherwise, or (ii) ownership of fifty percent (50%) or more of the
+      outstanding shares, or (iii) beneficial ownership of such entity.
+
+      "You" (or "Your") shall mean an individual or Legal Entity
+      exercising permissions granted by this License.
+
+      "Source" form shall mean the preferred form for making modifications,
+      including but not limited to software source code, documentation
+      source, and configuration files.
+
+      "Object" form shall mean any form resulting from mechanical
+      transformation or translation of a Source form, including but
+      not limited to compiled object code, generated documentation,
+      and conversions to other media types.
+
+      "Work" shall mean the work of authorship, whether in Source or
+      Object form, made available under the License, as indicated by a
+      copyright notice that is included in or attached to the work
+      (an example is provided in the Appendix below).
+
+      "Derivative Works" shall mean any work, whether in Source or Object
+      form, that is based on (or derived from) the Work and for which the
+      editorial revisions, annotations, elaborations, or other modifications
+      represent, as a whole, an original work of authorship. For the purposes
+      of this License, Derivative Works shall not include works that remain
+      separable from, or merely link (or bind by name) to the interfaces of,
+      the Work and Derivative Works thereof.
+
+      "Contribution" shall mean any work of authorship, including
+      the original version of the Work and any modifications or additions
+      to that Work or Derivative Works thereof, that is intentionally
+      submitted to Licensor for inclusion in the Work by the copyright owner
+      or by an individual or Legal Entity authorized to submit on behalf of
+      the copyright owner. For the purposes of this definition, "submitted"
+      means any form of electronic, verbal, or written communication sent
+      to the Licensor or its representatives, including but not limited to
+      communication on electronic mailing lists, source code control systems,
+      and issue tracking systems that are managed by, or on behalf of, the
+      Licensor for the purpose of discussing and improving the Work, but
+      excluding communication that is conspicuously marked or otherwise
+      designated in writing by the copyright owner as "Not a Contribution."
+
+      "Contributor" shall mean Licensor and any individual or Legal Entity
+      on behalf of whom a Contribution has been received by Licensor and
+      subsequently incorporated within the Work.
+
+   2. Grant of Copyright License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      copyright license to reproduce, prepare Derivative Works of,
+      publicly display, publicly perform, sublicense, and distribute the
+      Work and such Derivative Works in Source or Object form.
+
+   3. Grant of Patent License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      (except as stated in this section) patent license to make, have made,
+      use, offer to sell, sell, import, and otherwise transfer the Work,
+      where such license applies only to those patent claims licensable
+      by such Contributor that are necessarily infringed by their
+      Contribution(s) alone or by combination of their Contribution(s)
+      with the Work to which such Contribution(s) was submitted. If You
+      institute patent litigation against any entity (including a
+      cross-claim or counterclaim in a lawsuit) alleging that the Work
+      or a Contribution incorporated within the Work constitutes direct
+      or contributory patent infringement, then any patent licenses
+      granted to You under this License for that Work shall terminate
+      as of the date such litigation is filed.
+
+   4. Redistribution. You may reproduce and distribute copies of the
+      Work or Derivative Works thereof in any medium, with or without
+      modifications, and in Source or Object form, provided that You
+      meet the following conditions:
+
+      (a) You must give any other recipients of the Work or
+          Derivative Works a copy of this License; and
+
+      (b) You must cause any modified files to carry prominent notices
+          stating that You changed the files; and
+
+      (c) You must retain, in the Source form of any Derivative Works
+          that You distribute, all copyright, patent, trademark, and
+          attribution notices from the Source form of the Work,
+          excluding those notices that do not pertain to any part of
+          the Derivative Works; and
+
+      (d) If the Work includes a "NOTICE" text file as part of its
+          distribution, then any Derivative Works that You distribute must
+          include a readable copy of the attribution notices contained
+          within such NOTICE file, excluding those notices that do not
+          pertain to any part of the Derivative Works, in at least one
+          of the following places: within a NOTICE text file distributed
+          as part of the Derivative Works; within the Source form or
+          documentation, if provided along with the Derivative Works; or,
+          within a display generated by the Derivative Works, if and
+          wherever such third-party notices normally appear. The contents
+          of the NOTICE file are for informational purposes only and
+          do not modify the License. You may add Your own attribution
+          notices within Derivative Works that You distribute, alongside
+          or as an addendum to the NOTICE text from the Work, provided
+          that such additional attribution notices cannot be construed
+          as modifying the License.
+
+      You may add Your own copyright statement to Your modifications and
+      may provide additional or different license terms and conditions
+      for use, reproduction, or distribution of Your modifications, or
+      for any such Derivative Works as a whole, provided Your use,
+      reproduction, and distribution of the Work otherwise complies with
+      the conditions stated in this License.
+
+   5. Submission of Contributions. Unless You explicitly state otherwise,
+      any Contribution intentionally submitted for inclusion in the Work
+      by You to the Licensor shall be under the terms and conditions of
+      this License, without any additional terms or conditions.
+      Notwithstanding the above, nothing herein shall supersede or modify
+      the terms of any separate license agreement you may have executed
+      with Licensor regarding such Contributions.
+
+   6. Trademarks. This License does not grant permission to use the trade
+      names, trademarks, service marks, or product names of the Licensor,
+      except as required for reasonable and customary use in describing the
+      origin of the Work and reproducing the content of the NOTICE file.
+
+   7. Disclaimer of Warranty. Unless required by applicable law or
+      agreed to in writing, Licensor provides the Work (and each
+      Contributor provides its Contributions) on an "AS IS" BASIS,
+      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+      implied, including, without limitation, any warranties or conditions
+      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+      PARTICULAR PURPOSE. You are solely responsible for determining the
+      appropriateness of using or redistributing the Work and assume any
+      risks associated with Your exercise of permissions under this License.
+
+   8. Limitation of Liability. In no event and under no legal theory,
+      whether in tort (including negligence), contract, or otherwise,
+      unless required by applicable law (such as deliberate and grossly
+      negligent acts) or agreed to in writing, shall any Contributor be
+      liable to You for damages, including any direct, indirect, special,
+      incidental, or consequential damages of any character arising as a
+      result of this License or out of the use or inability to use the
+      Work (including but not limited to damages for loss of goodwill,
+      work stoppage, computer failure or malfunction, or any and all
+      other commercial damages or losses), even if such Contributor
+      has been advised of the possibility of such damages.
+
+   9. Accepting Warranty or Additional Liability. While redistributing
+      the Work or Derivative Works thereof, You may choose to offer,
+      and charge a fee for, acceptance of support, warranty, indemnity,
+      or other liability obligations and/or rights consistent with this
+      License. However, in accepting such obligations, You may act only
+      on Your own behalf and on Your sole responsibility, not on behalf
+      of any other Contributor, and only if You agree to indemnify,
+      defend, and hold each Contributor harmless for any liability
+      incurred by, or claims asserted against, such Contributor by reason
+      of your accepting any such warranty or additional liability.
+
+   END OF TERMS AND CONDITIONS
+
+   APPENDIX: How to apply the Apache License to your work.
+
+      To apply the Apache License to your work, attach the following
+      boilerplate notice, with the fields enclosed by brackets "[]"
+      replaced with your own identifying information. (Don't include
+      the brackets!)  The text should be enclosed in the appropriate
+      comment syntax for the file format. We also recommend that a
+      file or class name and description of purpose be included on the
+      same "printed page" as the copyright notice for easier
+      identification within third-party archives.
+
+   Copyright [yyyy] [name of copyright owner]
+
+   Licensed under the Apache License, Version 2.0 (the "License");
+   you may not use this file except in compliance with the License.
+   You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
diff --git a/src/third_party/vulkan-headers/src/README.md b/src/third_party/vulkan-headers/src/README.md
new file mode 100644
index 0000000..4687289
--- /dev/null
+++ b/src/third_party/vulkan-headers/src/README.md
@@ -0,0 +1,35 @@
+# Vulkan-Headers
+
+Vulkan header files and API registry
+
+## Repository Content
+
+The contents of this repository are largely obtained from other repositories and are
+collected, coordinated, and curated here.
+
+Do not propose pull requests to this repository which modify any files under
+include/vulkan/ or registry/. All such files are generated from the
+Vulkan-Docs repository and, in the case of include/vulkan/vulkan.hpp, the
+Vulkan-Hpp repository. Any changes must be made in those repositories.
+
+The projects for these repositories are:
+
+- [KhronosGroup/Vulkan-Docs](https://github.com/KhronosGroup/Vulkan-Docs)
+  - Core Vulkan headers and Registry
+- [KhronosGroup/Vulkan-Hpp](https://github.com/KhronosGroup/Vulkan-Hpp)
+  - C++ Bindings for Vulkan
+
+Please visit the appropriate project in the above list for obtaining additional information,
+asking questions, or opening issues.
+
+## Version Tagging Scheme
+
+Updates to the `Vulkan-Headers` repository which correspond to a new Vulkan
+specification release are tagged using the following format:
+`v<`_`version`_`>` (e.g., `v1.1.96`).
+
+**Note**: Marked version releases have undergone thorough testing but do not
+imply the same quality level as SDK tags. SDK tags follow the
+`sdk-<`_`version`_`>.<`_`patch`_`>` format (e.g., `sdk-1.1.92.0`).
+
+This scheme was adopted following the 1.1.96 Vulkan specification release.
diff --git a/src/third_party/vulkan-headers/src/cmake/Copyright_cmake.txt b/src/third_party/vulkan-headers/src/cmake/Copyright_cmake.txt
new file mode 100644
index 0000000..743c634
--- /dev/null
+++ b/src/third_party/vulkan-headers/src/cmake/Copyright_cmake.txt
@@ -0,0 +1,126 @@
+CMake - Cross Platform Makefile Generator
+Copyright 2000-2018 Kitware, Inc. and Contributors
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions
+are met:
+
+* Redistributions of source code must retain the above copyright
+  notice, this list of conditions and the following disclaimer.
+
+* Redistributions in binary form must reproduce the above copyright
+  notice, this list of conditions and the following disclaimer in the
+  documentation and/or other materials provided with the distribution.
+
+* Neither the name of Kitware, Inc. nor the names of Contributors
+  may be used to endorse or promote products derived from this
+  software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+------------------------------------------------------------------------------
+
+The following individuals and institutions are among the Contributors:
+
+* Aaron C. Meadows <cmake@shadowguarddev.com>
+* Adriaan de Groot <groot@kde.org>
+* Aleksey Avdeev <solo@altlinux.ru>
+* Alexander Neundorf <neundorf@kde.org>
+* Alexander Smorkalov <alexander.smorkalov@itseez.com>
+* Alexey Sokolov <sokolov@google.com>
+* Alex Turbov <i.zaufi@gmail.com>
+* Andreas Pakulat <apaku@gmx.de>
+* Andreas Schneider <asn@cryptomilk.org>
+* André Rigland Brodtkorb <Andre.Brodtkorb@ifi.uio.no>
+* Axel Huebl, Helmholtz-Zentrum Dresden - Rossendorf
+* Benjamin Eikel
+* Bjoern Ricks <bjoern.ricks@gmail.com>
+* Brad Hards <bradh@kde.org>
+* Christopher Harvey
+* Christoph Grüninger <foss@grueninger.de>
+* Clement Creusot <creusot@cs.york.ac.uk>
+* Daniel Blezek <blezek@gmail.com>
+* Daniel Pfeifer <daniel@pfeifer-mail.de>
+* Enrico Scholz <enrico.scholz@informatik.tu-chemnitz.de>
+* Eran Ifrah <eran.ifrah@gmail.com>
+* Esben Mose Hansen, Ange Optimization ApS
+* Geoffrey Viola <geoffrey.viola@asirobots.com>
+* Google Inc
+* Gregor Jasny
+* Helio Chissini de Castro <helio@kde.org>
+* Ilya Lavrenov <ilya.lavrenov@itseez.com>
+* Insight Software Consortium <insightsoftwareconsortium.org>
+* Jan Woetzel
+* Kelly Thompson <kgt@lanl.gov>
+* Konstantin Podsvirov <konstantin@podsvirov.pro>
+* Mario Bensi <mbensi@ipsquad.net>
+* Mathieu Malaterre <mathieu.malaterre@gmail.com>
+* Matthaeus G. Chajdas
+* Matthias Kretz <kretz@kde.org>
+* Matthias Maennich <matthias@maennich.net>
+* Michael Stürmer
+* Miguel A. Figueroa-Villanueva
+* Mike Jackson
+* Mike McQuaid <mike@mikemcquaid.com>
+* Nicolas Bock <nicolasbock@gmail.com>
+* Nicolas Despres <nicolas.despres@gmail.com>
+* Nikita Krupen'ko <krnekit@gmail.com>
+* NVIDIA Corporation <www.nvidia.com>
+* OpenGamma Ltd. <opengamma.com>
+* Patrick Stotko <stotko@cs.uni-bonn.de>
+* Per Øyvind Karlsen <peroyvind@mandriva.org>
+* Peter Collingbourne <peter@pcc.me.uk>
+* Petr Gotthard <gotthard@honeywell.com>
+* Philip Lowman <philip@yhbt.com>
+* Philippe Proulx <pproulx@efficios.com>
+* Raffi Enficiaud, Max Planck Society
+* Raumfeld <raumfeld.com>
+* Roger Leigh <rleigh@codelibre.net>
+* Rolf Eike Beer <eike@sf-mail.de>
+* Roman Donchenko <roman.donchenko@itseez.com>
+* Roman Kharitonov <roman.kharitonov@itseez.com>
+* Ruslan Baratov
+* Sebastian Holtermann <sebholt@xwmw.org>
+* Stephen Kelly <steveire@gmail.com>
+* Sylvain Joubert <joubert.sy@gmail.com>
+* Thomas Sondergaard <ts@medical-insight.com>
+* Tobias Hunger <tobias.hunger@qt.io>
+* Todd Gamblin <tgamblin@llnl.gov>
+* Tristan Carel
+* University of Dundee
+* Vadim Zhukov
+* Will Dicharry <wdicharry@stellarscience.com>
+
+See version control history for details of individual contributions.
+
+The above copyright and license notice applies to distributions of
+CMake in source and binary form.  Third-party software packages supplied
+with CMake under compatible licenses provide their own copyright notices
+documented in corresponding subdirectories or source files.
+
+------------------------------------------------------------------------------
+
+CMake was initially developed by Kitware with the following sponsorship:
+
+ * National Library of Medicine at the National Institutes of Health
+   as part of the Insight Segmentation and Registration Toolkit (ITK).
+
+ * US National Labs (Los Alamos, Livermore, Sandia) ASC Parallel
+   Visualization Initiative.
+
+ * National Alliance for Medical Image Computing (NAMIC) is funded by the
+   National Institutes of Health through the NIH Roadmap for Medical Research,
+   Grant U54 EB005149.
+
+ * Kitware, Inc.
diff --git a/src/third_party/vulkan-headers/src/cmake/cmake_uninstall.cmake.in b/src/third_party/vulkan-headers/src/cmake/cmake_uninstall.cmake.in
new file mode 100644
index 0000000..2037e36
--- /dev/null
+++ b/src/third_party/vulkan-headers/src/cmake/cmake_uninstall.cmake.in
@@ -0,0 +1,21 @@
+if(NOT EXISTS "@CMAKE_CURRENT_BINARY_DIR@/install_manifest.txt")
+  message(FATAL_ERROR "Cannot find install manifest: @CMAKE_CURRENT_BINARY_DIR@/install_manifest.txt")
+endif(NOT EXISTS "@CMAKE_CURRENT_BINARY_DIR@/install_manifest.txt")
+
+file(READ "@CMAKE_CURRENT_BINARY_DIR@/install_manifest.txt" files)
+string(REGEX REPLACE "\n" ";" files "${files}")
+foreach(file ${files})
+  message(STATUS "Uninstalling $ENV{DESTDIR}${file}")
+  if(IS_SYMLINK "$ENV{DESTDIR}${file}" OR EXISTS "$ENV{DESTDIR}${file}")
+    exec_program(
+      "@CMAKE_COMMAND@" ARGS "-E remove \"$ENV{DESTDIR}${file}\""
+      OUTPUT_VARIABLE rm_out
+      RETURN_VALUE rm_retval
+      )
+    if(NOT "${rm_retval}" STREQUAL 0)
+      message(FATAL_ERROR "Problem when removing $ENV{DESTDIR}${file}")
+    endif(NOT "${rm_retval}" STREQUAL 0)
+  else(IS_SYMLINK "$ENV{DESTDIR}${file}" OR EXISTS "$ENV{DESTDIR}${file}")
+    message(STATUS "File $ENV{DESTDIR}${file} does not exist.")
+  endif(IS_SYMLINK "$ENV{DESTDIR}${file}" OR EXISTS "$ENV{DESTDIR}${file}")
+endforeach(file)
diff --git a/src/third_party/vulkan-headers/src/include/vulkan/vk_icd.h b/src/third_party/vulkan-headers/src/include/vulkan/vk_icd.h
new file mode 100644
index 0000000..5dff59a
--- /dev/null
+++ b/src/third_party/vulkan-headers/src/include/vulkan/vk_icd.h
@@ -0,0 +1,183 @@
+//
+// File: vk_icd.h
+//
+/*
+ * Copyright (c) 2015-2016 The Khronos Group Inc.
+ * Copyright (c) 2015-2016 Valve Corporation
+ * Copyright (c) 2015-2016 LunarG, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+#ifndef VKICD_H
+#define VKICD_H
+
+#include "vulkan.h"
+#include <stdbool.h>
+
+// Loader-ICD version negotiation API.  Versions add the following features:
+//   Version 0 - Initial.  Doesn't support vk_icdGetInstanceProcAddr
+//               or vk_icdNegotiateLoaderICDInterfaceVersion.
+//   Version 1 - Add support for vk_icdGetInstanceProcAddr.
+//   Version 2 - Add Loader/ICD Interface version negotiation
+//               via vk_icdNegotiateLoaderICDInterfaceVersion.
+//   Version 3 - Add ICD creation/destruction of KHR_surface objects.
+//   Version 4 - Add unknown physical device extension qyering via
+//               vk_icdGetPhysicalDeviceProcAddr.
+//   Version 5 - Tells ICDs that the loader is now paying attention to the
+//               application version of Vulkan passed into the ApplicationInfo
+//               structure during vkCreateInstance.  This will tell the ICD
+//               that if the loader is older, it should automatically fail a
+//               call for any API version > 1.0.  Otherwise, the loader will
+//               manually determine if it can support the expected version.
+#define CURRENT_LOADER_ICD_INTERFACE_VERSION 5
+#define MIN_SUPPORTED_LOADER_ICD_INTERFACE_VERSION 0
+#define MIN_PHYS_DEV_EXTENSION_ICD_INTERFACE_VERSION 4
+typedef VkResult(VKAPI_PTR *PFN_vkNegotiateLoaderICDInterfaceVersion)(uint32_t *pVersion);
+
+// This is defined in vk_layer.h which will be found by the loader, but if an ICD is building against this
+// file directly, it won't be found.
+#ifndef PFN_GetPhysicalDeviceProcAddr
+typedef PFN_vkVoidFunction(VKAPI_PTR *PFN_GetPhysicalDeviceProcAddr)(VkInstance instance, const char *pName);
+#endif
+
+/*
+ * The ICD must reserve space for a pointer for the loader's dispatch
+ * table, at the start of <each object>.
+ * The ICD must initialize this variable using the SET_LOADER_MAGIC_VALUE macro.
+ */
+
+#define ICD_LOADER_MAGIC 0x01CDC0DE
+
+typedef union {
+    uintptr_t loaderMagic;
+    void *loaderData;
+} VK_LOADER_DATA;
+
+static inline void set_loader_magic_value(void *pNewObject) {
+    VK_LOADER_DATA *loader_info = (VK_LOADER_DATA *)pNewObject;
+    loader_info->loaderMagic = ICD_LOADER_MAGIC;
+}
+
+static inline bool valid_loader_magic_value(void *pNewObject) {
+    const VK_LOADER_DATA *loader_info = (VK_LOADER_DATA *)pNewObject;
+    return (loader_info->loaderMagic & 0xffffffff) == ICD_LOADER_MAGIC;
+}
+
+/*
+ * Windows and Linux ICDs will treat VkSurfaceKHR as a pointer to a struct that
+ * contains the platform-specific connection and surface information.
+ */
+typedef enum {
+    VK_ICD_WSI_PLATFORM_MIR,
+    VK_ICD_WSI_PLATFORM_WAYLAND,
+    VK_ICD_WSI_PLATFORM_WIN32,
+    VK_ICD_WSI_PLATFORM_XCB,
+    VK_ICD_WSI_PLATFORM_XLIB,
+    VK_ICD_WSI_PLATFORM_ANDROID,
+    VK_ICD_WSI_PLATFORM_MACOS,
+    VK_ICD_WSI_PLATFORM_IOS,
+    VK_ICD_WSI_PLATFORM_DISPLAY,
+    VK_ICD_WSI_PLATFORM_HEADLESS,
+    VK_ICD_WSI_PLATFORM_METAL,
+} VkIcdWsiPlatform;
+
+typedef struct {
+    VkIcdWsiPlatform platform;
+} VkIcdSurfaceBase;
+
+#ifdef VK_USE_PLATFORM_MIR_KHR
+typedef struct {
+    VkIcdSurfaceBase base;
+    MirConnection *connection;
+    MirSurface *mirSurface;
+} VkIcdSurfaceMir;
+#endif  // VK_USE_PLATFORM_MIR_KHR
+
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+typedef struct {
+    VkIcdSurfaceBase base;
+    struct wl_display *display;
+    struct wl_surface *surface;
+} VkIcdSurfaceWayland;
+#endif  // VK_USE_PLATFORM_WAYLAND_KHR
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+typedef struct {
+    VkIcdSurfaceBase base;
+    HINSTANCE hinstance;
+    HWND hwnd;
+} VkIcdSurfaceWin32;
+#endif  // VK_USE_PLATFORM_WIN32_KHR
+
+#ifdef VK_USE_PLATFORM_XCB_KHR
+typedef struct {
+    VkIcdSurfaceBase base;
+    xcb_connection_t *connection;
+    xcb_window_t window;
+} VkIcdSurfaceXcb;
+#endif  // VK_USE_PLATFORM_XCB_KHR
+
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+typedef struct {
+    VkIcdSurfaceBase base;
+    Display *dpy;
+    Window window;
+} VkIcdSurfaceXlib;
+#endif  // VK_USE_PLATFORM_XLIB_KHR
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+typedef struct {
+    VkIcdSurfaceBase base;
+    struct ANativeWindow *window;
+} VkIcdSurfaceAndroid;
+#endif  // VK_USE_PLATFORM_ANDROID_KHR
+
+#ifdef VK_USE_PLATFORM_MACOS_MVK
+typedef struct {
+    VkIcdSurfaceBase base;
+    const void *pView;
+} VkIcdSurfaceMacOS;
+#endif  // VK_USE_PLATFORM_MACOS_MVK
+
+#ifdef VK_USE_PLATFORM_IOS_MVK
+typedef struct {
+    VkIcdSurfaceBase base;
+    const void *pView;
+} VkIcdSurfaceIOS;
+#endif  // VK_USE_PLATFORM_IOS_MVK
+
+typedef struct {
+    VkIcdSurfaceBase base;
+    VkDisplayModeKHR displayMode;
+    uint32_t planeIndex;
+    uint32_t planeStackIndex;
+    VkSurfaceTransformFlagBitsKHR transform;
+    float globalAlpha;
+    VkDisplayPlaneAlphaFlagBitsKHR alphaMode;
+    VkExtent2D imageExtent;
+} VkIcdSurfaceDisplay;
+
+typedef struct {
+    VkIcdSurfaceBase base;
+} VkIcdSurfaceHeadless;
+
+#ifdef VK_USE_PLATFORM_METAL_EXT
+typedef struct {
+    VkIcdSurfaceBase base;
+    const CAMetalLayer *pLayer;
+} VkIcdSurfaceMetal;
+#endif // VK_USE_PLATFORM_METAL_EXT
+
+#endif  // VKICD_H
diff --git a/src/third_party/vulkan-headers/src/include/vulkan/vk_layer.h b/src/third_party/vulkan-headers/src/include/vulkan/vk_layer.h
new file mode 100644
index 0000000..fa76520
--- /dev/null
+++ b/src/third_party/vulkan-headers/src/include/vulkan/vk_layer.h
@@ -0,0 +1,202 @@
+//
+// File: vk_layer.h
+//
+/*
+ * Copyright (c) 2015-2017 The Khronos Group Inc.
+ * Copyright (c) 2015-2017 Valve Corporation
+ * Copyright (c) 2015-2017 LunarG, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+/* Need to define dispatch table
+ * Core struct can then have ptr to dispatch table at the top
+ * Along with object ptrs for current and next OBJ
+ */
+#pragma once
+
+#include "vulkan.h"
+#if defined(__GNUC__) && __GNUC__ >= 4
+#define VK_LAYER_EXPORT __attribute__((visibility("default")))
+#elif defined(__SUNPRO_C) && (__SUNPRO_C >= 0x590)
+#define VK_LAYER_EXPORT __attribute__((visibility("default")))
+#else
+#define VK_LAYER_EXPORT
+#endif
+
+#define MAX_NUM_UNKNOWN_EXTS 250
+
+ // Loader-Layer version negotiation API.  Versions add the following features:
+ //   Versions 0/1 - Initial.  Doesn't support vk_layerGetPhysicalDeviceProcAddr
+ //                  or vk_icdNegotiateLoaderLayerInterfaceVersion.
+ //   Version 2    - Add support for vk_layerGetPhysicalDeviceProcAddr and
+ //                  vk_icdNegotiateLoaderLayerInterfaceVersion.
+#define CURRENT_LOADER_LAYER_INTERFACE_VERSION 2
+#define MIN_SUPPORTED_LOADER_LAYER_INTERFACE_VERSION 1
+
+#define VK_CURRENT_CHAIN_VERSION 1
+
+// Typedef for use in the interfaces below
+typedef PFN_vkVoidFunction (VKAPI_PTR *PFN_GetPhysicalDeviceProcAddr)(VkInstance instance, const char* pName);
+
+// Version negotiation values
+typedef enum VkNegotiateLayerStructType {
+    LAYER_NEGOTIATE_UNINTIALIZED = 0,
+    LAYER_NEGOTIATE_INTERFACE_STRUCT = 1,
+} VkNegotiateLayerStructType;
+
+// Version negotiation structures
+typedef struct VkNegotiateLayerInterface {
+    VkNegotiateLayerStructType sType;
+    void *pNext;
+    uint32_t loaderLayerInterfaceVersion;
+    PFN_vkGetInstanceProcAddr pfnGetInstanceProcAddr;
+    PFN_vkGetDeviceProcAddr pfnGetDeviceProcAddr;
+    PFN_GetPhysicalDeviceProcAddr pfnGetPhysicalDeviceProcAddr;
+} VkNegotiateLayerInterface;
+
+// Version negotiation functions
+typedef VkResult (VKAPI_PTR *PFN_vkNegotiateLoaderLayerInterfaceVersion)(VkNegotiateLayerInterface *pVersionStruct);
+
+// Function prototype for unknown physical device extension command
+typedef VkResult(VKAPI_PTR *PFN_PhysDevExt)(VkPhysicalDevice phys_device);
+
+// ------------------------------------------------------------------------------------------------
+// CreateInstance and CreateDevice support structures
+
+/* Sub type of structure for instance and device loader ext of CreateInfo.
+ * When sType == VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO
+ * or sType == VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO
+ * then VkLayerFunction indicates struct type pointed to by pNext
+ */
+typedef enum VkLayerFunction_ {
+    VK_LAYER_LINK_INFO = 0,
+    VK_LOADER_DATA_CALLBACK = 1,
+    VK_LOADER_LAYER_CREATE_DEVICE_CALLBACK = 2
+} VkLayerFunction;
+
+typedef struct VkLayerInstanceLink_ {
+    struct VkLayerInstanceLink_ *pNext;
+    PFN_vkGetInstanceProcAddr pfnNextGetInstanceProcAddr;
+    PFN_GetPhysicalDeviceProcAddr pfnNextGetPhysicalDeviceProcAddr;
+} VkLayerInstanceLink;
+
+/*
+ * When creating the device chain the loader needs to pass
+ * down information about it's device structure needed at
+ * the end of the chain. Passing the data via the
+ * VkLayerDeviceInfo avoids issues with finding the
+ * exact instance being used.
+ */
+typedef struct VkLayerDeviceInfo_ {
+    void *device_info;
+    PFN_vkGetInstanceProcAddr pfnNextGetInstanceProcAddr;
+} VkLayerDeviceInfo;
+
+typedef VkResult (VKAPI_PTR *PFN_vkSetInstanceLoaderData)(VkInstance instance,
+        void *object);
+typedef VkResult (VKAPI_PTR *PFN_vkSetDeviceLoaderData)(VkDevice device,
+        void *object);
+typedef VkResult (VKAPI_PTR *PFN_vkLayerCreateDevice)(VkInstance instance, VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo *pCreateInfo,
+						      const VkAllocationCallbacks *pAllocator, VkDevice *pDevice, PFN_vkGetInstanceProcAddr layerGIPA, PFN_vkGetDeviceProcAddr *nextGDPA);
+typedef void (VKAPI_PTR *PFN_vkLayerDestroyDevice)(VkDevice physicalDevice, const VkAllocationCallbacks *pAllocator, PFN_vkDestroyDevice destroyFunction);
+typedef struct {
+    VkStructureType sType; // VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO
+    const void *pNext;
+    VkLayerFunction function;
+    union {
+        VkLayerInstanceLink *pLayerInfo;
+        PFN_vkSetInstanceLoaderData pfnSetInstanceLoaderData;
+        struct {
+	  PFN_vkLayerCreateDevice pfnLayerCreateDevice;
+	  PFN_vkLayerDestroyDevice pfnLayerDestroyDevice;
+	} layerDevice;
+    } u;
+} VkLayerInstanceCreateInfo;
+
+typedef struct VkLayerDeviceLink_ {
+    struct VkLayerDeviceLink_ *pNext;
+    PFN_vkGetInstanceProcAddr pfnNextGetInstanceProcAddr;
+    PFN_vkGetDeviceProcAddr pfnNextGetDeviceProcAddr;
+} VkLayerDeviceLink;
+
+typedef struct {
+    VkStructureType sType; // VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO
+    const void *pNext;
+    VkLayerFunction function;
+    union {
+        VkLayerDeviceLink *pLayerInfo;
+        PFN_vkSetDeviceLoaderData pfnSetDeviceLoaderData;
+    } u;
+} VkLayerDeviceCreateInfo;
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+VKAPI_ATTR VkResult VKAPI_CALL vkNegotiateLoaderLayerInterfaceVersion(VkNegotiateLayerInterface *pVersionStruct);
+
+typedef enum VkChainType {
+    VK_CHAIN_TYPE_UNKNOWN = 0,
+    VK_CHAIN_TYPE_ENUMERATE_INSTANCE_EXTENSION_PROPERTIES = 1,
+    VK_CHAIN_TYPE_ENUMERATE_INSTANCE_LAYER_PROPERTIES = 2,
+    VK_CHAIN_TYPE_ENUMERATE_INSTANCE_VERSION = 3,
+} VkChainType;
+
+typedef struct VkChainHeader {
+    VkChainType type;
+    uint32_t version;
+    uint32_t size;
+} VkChainHeader;
+
+typedef struct VkEnumerateInstanceExtensionPropertiesChain {
+    VkChainHeader header;
+    VkResult(VKAPI_PTR *pfnNextLayer)(const struct VkEnumerateInstanceExtensionPropertiesChain *, const char *, uint32_t *,
+                                      VkExtensionProperties *);
+    const struct VkEnumerateInstanceExtensionPropertiesChain *pNextLink;
+
+#if defined(__cplusplus)
+    inline VkResult CallDown(const char *pLayerName, uint32_t *pPropertyCount, VkExtensionProperties *pProperties) const {
+        return pfnNextLayer(pNextLink, pLayerName, pPropertyCount, pProperties);
+    }
+#endif
+} VkEnumerateInstanceExtensionPropertiesChain;
+
+typedef struct VkEnumerateInstanceLayerPropertiesChain {
+    VkChainHeader header;
+    VkResult(VKAPI_PTR *pfnNextLayer)(const struct VkEnumerateInstanceLayerPropertiesChain *, uint32_t *, VkLayerProperties *);
+    const struct VkEnumerateInstanceLayerPropertiesChain *pNextLink;
+
+#if defined(__cplusplus)
+    inline VkResult CallDown(uint32_t *pPropertyCount, VkLayerProperties *pProperties) const {
+        return pfnNextLayer(pNextLink, pPropertyCount, pProperties);
+    }
+#endif
+} VkEnumerateInstanceLayerPropertiesChain;
+
+typedef struct VkEnumerateInstanceVersionChain {
+    VkChainHeader header;
+    VkResult(VKAPI_PTR *pfnNextLayer)(const struct VkEnumerateInstanceVersionChain *, uint32_t *);
+    const struct VkEnumerateInstanceVersionChain *pNextLink;
+
+#if defined(__cplusplus)
+    inline VkResult CallDown(uint32_t *pApiVersion) const {
+        return pfnNextLayer(pNextLink, pApiVersion);
+    }
+#endif
+} VkEnumerateInstanceVersionChain;
+
+#ifdef __cplusplus
+}
+#endif
diff --git a/src/third_party/vulkan-headers/src/include/vulkan/vk_platform.h b/src/third_party/vulkan-headers/src/include/vulkan/vk_platform.h
new file mode 100644
index 0000000..7289299
--- /dev/null
+++ b/src/third_party/vulkan-headers/src/include/vulkan/vk_platform.h
@@ -0,0 +1,92 @@
+//
+// File: vk_platform.h
+//
+/*
+** Copyright (c) 2014-2017 The Khronos Group Inc.
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+
+#ifndef VK_PLATFORM_H_
+#define VK_PLATFORM_H_
+
+#ifdef __cplusplus
+extern "C"
+{
+#endif // __cplusplus
+
+/*
+***************************************************************************************************
+*   Platform-specific directives and type declarations
+***************************************************************************************************
+*/
+
+/* Platform-specific calling convention macros.
+ *
+ * Platforms should define these so that Vulkan clients call Vulkan commands
+ * with the same calling conventions that the Vulkan implementation expects.
+ *
+ * VKAPI_ATTR - Placed before the return type in function declarations.
+ *              Useful for C++11 and GCC/Clang-style function attribute syntax.
+ * VKAPI_CALL - Placed after the return type in function declarations.
+ *              Useful for MSVC-style calling convention syntax.
+ * VKAPI_PTR  - Placed between the '(' and '*' in function pointer types.
+ *
+ * Function declaration:  VKAPI_ATTR void VKAPI_CALL vkCommand(void);
+ * Function pointer type: typedef void (VKAPI_PTR *PFN_vkCommand)(void);
+ */
+#if defined(_WIN32)
+    // On Windows, Vulkan commands use the stdcall convention
+    #define VKAPI_ATTR
+    #define VKAPI_CALL __stdcall
+    #define VKAPI_PTR  VKAPI_CALL
+#elif defined(__ANDROID__) && defined(__ARM_ARCH) && __ARM_ARCH < 7
+    #error "Vulkan isn't supported for the 'armeabi' NDK ABI"
+#elif defined(__ANDROID__) && defined(__ARM_ARCH) && __ARM_ARCH >= 7 && defined(__ARM_32BIT_STATE)
+    // On Android 32-bit ARM targets, Vulkan functions use the "hardfloat"
+    // calling convention, i.e. float parameters are passed in registers. This
+    // is true even if the rest of the application passes floats on the stack,
+    // as it does by default when compiling for the armeabi-v7a NDK ABI.
+    #define VKAPI_ATTR __attribute__((pcs("aapcs-vfp")))
+    #define VKAPI_CALL
+    #define VKAPI_PTR  VKAPI_ATTR
+#else
+    // On other platforms, use the default calling convention
+    #define VKAPI_ATTR
+    #define VKAPI_CALL
+    #define VKAPI_PTR
+#endif
+
+#include <stddef.h>
+
+#if !defined(VK_NO_STDINT_H)
+    #if defined(_MSC_VER) && (_MSC_VER < 1600)
+        typedef signed   __int8  int8_t;
+        typedef unsigned __int8  uint8_t;
+        typedef signed   __int16 int16_t;
+        typedef unsigned __int16 uint16_t;
+        typedef signed   __int32 int32_t;
+        typedef unsigned __int32 uint32_t;
+        typedef signed   __int64 int64_t;
+        typedef unsigned __int64 uint64_t;
+    #else
+        #include <stdint.h>
+    #endif
+#endif // !defined(VK_NO_STDINT_H)
+
+#ifdef __cplusplus
+} // extern "C"
+#endif // __cplusplus
+
+#endif
diff --git a/src/third_party/vulkan-headers/src/include/vulkan/vk_sdk_platform.h b/src/third_party/vulkan-headers/src/include/vulkan/vk_sdk_platform.h
new file mode 100644
index 0000000..96d8676
--- /dev/null
+++ b/src/third_party/vulkan-headers/src/include/vulkan/vk_sdk_platform.h
@@ -0,0 +1,69 @@
+//
+// File: vk_sdk_platform.h
+//
+/*
+ * Copyright (c) 2015-2016 The Khronos Group Inc.
+ * Copyright (c) 2015-2016 Valve Corporation
+ * Copyright (c) 2015-2016 LunarG, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef VK_SDK_PLATFORM_H
+#define VK_SDK_PLATFORM_H
+
+#if defined(_WIN32)
+#define NOMINMAX
+#ifndef __cplusplus
+#undef inline
+#define inline __inline
+#endif // __cplusplus
+
+#if (defined(_MSC_VER) && _MSC_VER < 1900 /*vs2015*/)
+// C99:
+// Microsoft didn't implement C99 in Visual Studio; but started adding it with
+// VS2013.  However, VS2013 still didn't have snprintf().  The following is a
+// work-around (Note: The _CRT_SECURE_NO_WARNINGS macro must be set in the
+// "CMakeLists.txt" file).
+// NOTE: This is fixed in Visual Studio 2015.
+#define snprintf _snprintf
+#endif
+
+#define strdup _strdup
+
+#endif // _WIN32
+
+// Check for noexcept support using clang, with fallback to Windows or GCC version numbers
+#ifndef NOEXCEPT
+#if defined(__clang__)
+#if __has_feature(cxx_noexcept)
+#define HAS_NOEXCEPT
+#endif
+#else
+#if defined(__GXX_EXPERIMENTAL_CXX0X__) && __GNUC__ * 10 + __GNUC_MINOR__ >= 46
+#define HAS_NOEXCEPT
+#else
+#if defined(_MSC_FULL_VER) && _MSC_FULL_VER >= 190023026 && defined(_HAS_EXCEPTIONS) && _HAS_EXCEPTIONS
+#define HAS_NOEXCEPT
+#endif
+#endif
+#endif
+
+#ifdef HAS_NOEXCEPT
+#define NOEXCEPT noexcept
+#else
+#define NOEXCEPT
+#endif
+#endif
+
+#endif  // VK_SDK_PLATFORM_H
diff --git a/src/third_party/vulkan-headers/src/include/vulkan/vulkan.h b/src/third_party/vulkan-headers/src/include/vulkan/vulkan.h
new file mode 100644
index 0000000..5f853f9
--- /dev/null
+++ b/src/third_party/vulkan-headers/src/include/vulkan/vulkan.h
@@ -0,0 +1,86 @@
+#ifndef VULKAN_H_
+#define VULKAN_H_ 1
+
+/*
+** Copyright (c) 2015-2019 The Khronos Group Inc.
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+#include "vk_platform.h"
+#include "vulkan_core.h"
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+#include "vulkan_android.h"
+#endif
+
+#ifdef VK_USE_PLATFORM_FUCHSIA
+#include <zircon/types.h>
+#include "vulkan_fuchsia.h"
+#endif
+
+#ifdef VK_USE_PLATFORM_IOS_MVK
+#include "vulkan_ios.h"
+#endif
+
+
+#ifdef VK_USE_PLATFORM_MACOS_MVK
+#include "vulkan_macos.h"
+#endif
+
+#ifdef VK_USE_PLATFORM_METAL_EXT
+#include "vulkan_metal.h"
+#endif
+
+#ifdef VK_USE_PLATFORM_VI_NN
+#include "vulkan_vi.h"
+#endif
+
+
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+#include <wayland-client.h>
+#include "vulkan_wayland.h"
+#endif
+
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+#include <windows.h>
+#include "vulkan_win32.h"
+#endif
+
+
+#ifdef VK_USE_PLATFORM_XCB_KHR
+#include <xcb/xcb.h>
+#include "vulkan_xcb.h"
+#endif
+
+
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+#include <X11/Xlib.h>
+#include "vulkan_xlib.h"
+#endif
+
+
+#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
+#include <X11/Xlib.h>
+#include <X11/extensions/Xrandr.h>
+#include "vulkan_xlib_xrandr.h"
+#endif
+
+
+#ifdef VK_USE_PLATFORM_GGP
+#include <ggp_c/vulkan_types.h>
+#include "vulkan_ggp.h"
+#endif
+
+#endif // VULKAN_H_
diff --git a/src/third_party/vulkan-headers/src/include/vulkan/vulkan.hpp b/src/third_party/vulkan-headers/src/include/vulkan/vulkan.hpp
new file mode 100644
index 0000000..38dbbe5
--- /dev/null
+++ b/src/third_party/vulkan-headers/src/include/vulkan/vulkan.hpp
@@ -0,0 +1,69257 @@
+// Copyright (c) 2015-2019 The Khronos Group Inc.
+// 
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// 
+//     http://www.apache.org/licenses/LICENSE-2.0
+// 
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+// 
+// ---- Exceptions to the Apache 2.0 License: ----
+// 
+// As an exception, if you use this Software to generate code and portions of
+// this Software are embedded into the generated code as a result, you may
+// redistribute such product without providing attribution as would otherwise
+// be required by Sections 4(a), 4(b) and 4(d) of the License.
+// 
+// In addition, if you combine or link code generated by this Software with
+// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1
+// ("`Combined Software`") and if a court of competent jurisdiction determines
+// that the patent provision (Section 3), the indemnity provision (Section 9)
+// or other Section of the License conflicts with the conditions of the
+// applicable GPL or LGPL license, you may retroactively and prospectively
+// choose to deem waived or otherwise exclude such Section(s) of the License,
+// but only in their entirety and only with respect to the Combined Software.
+//     
+
+// This header is generated from the Khronos Vulkan XML API Registry.
+
+#ifndef VULKAN_HPP
+#define VULKAN_HPP
+
+#include <algorithm>
+#include <array>
+#include <cstddef>
+#include <cstdint>
+#include <cstring>
+#include <initializer_list>
+#include <string>
+#include <system_error>
+#include <tuple>
+#include <type_traits>
+#include <vulkan/vulkan.h>
+
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+# include <memory>
+# include <vector>
+#endif
+
+#if !defined(VULKAN_HPP_ASSERT)
+# include <cassert>
+# define VULKAN_HPP_ASSERT   assert
+#endif
+
+#if !defined(VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL)
+# define VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL 1
+#endif
+
+#if VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL == 1
+#  if defined(__linux__) || defined(__APPLE__)
+#   include <dlfcn.h>
+#  endif
+
+#  if defined(_WIN32)
+#   include <windows.h>
+#  endif
+#endif
+
+static_assert( VK_HEADER_VERSION ==  130 , "Wrong VK_HEADER_VERSION!" );
+
+// 32-bit vulkan is not typesafe for handles, so don't allow copy constructors on this platform by default.
+// To enable this feature on 32-bit platforms please define VULKAN_HPP_TYPESAFE_CONVERSION
+#if defined(__LP64__) || defined(_WIN64) || (defined(__x86_64__) && !defined(__ILP32__) ) || defined(_M_X64) || defined(__ia64) || defined (_M_IA64) || defined(__aarch64__) || defined(__powerpc64__)
+# if !defined( VULKAN_HPP_TYPESAFE_CONVERSION )
+#  define VULKAN_HPP_TYPESAFE_CONVERSION
+# endif
+#endif
+
+// <tuple> includes <sys/sysmacros.h> through some other header
+// this results in major(x) being resolved to gnu_dev_major(x)
+// which is an expression in a constructor initializer list.
+#if defined(major)
+  #undef major
+#endif
+#if defined(minor)
+  #undef minor
+#endif
+
+// Windows defines MemoryBarrier which is deprecated and collides
+// with the VULKAN_HPP_NAMESPACE::MemoryBarrier struct.
+#if defined(MemoryBarrier)
+  #undef MemoryBarrier
+#endif
+
+#if !defined(VULKAN_HPP_HAS_UNRESTRICTED_UNIONS)
+# if defined(__clang__)
+#  if __has_feature(cxx_unrestricted_unions)
+#   define VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
+#  endif
+# elif defined(__GNUC__)
+#  define GCC_VERSION (__GNUC__ * 10000 + __GNUC_MINOR__ * 100 + __GNUC_PATCHLEVEL__)
+#  if 40600 <= GCC_VERSION
+#   define VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
+#  endif
+# elif defined(_MSC_VER)
+#  if 1900 <= _MSC_VER
+#   define VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
+#  endif
+# endif
+#endif
+
+#if !defined(VULKAN_HPP_INLINE)
+# if defined(__clang__)
+#  if __has_attribute(always_inline)
+#   define VULKAN_HPP_INLINE __attribute__((always_inline)) __inline__
+#  else
+#   define VULKAN_HPP_INLINE inline
+#  endif
+# elif defined(__GNUC__)
+#  define VULKAN_HPP_INLINE __attribute__((always_inline)) __inline__
+# elif defined(_MSC_VER)
+#  define VULKAN_HPP_INLINE inline
+# else
+#  define VULKAN_HPP_INLINE inline
+# endif
+#endif
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+# define VULKAN_HPP_TYPESAFE_EXPLICIT
+#else
+# define VULKAN_HPP_TYPESAFE_EXPLICIT explicit
+#endif
+
+#if defined(__cpp_constexpr)
+# define VULKAN_HPP_CONSTEXPR constexpr
+# if __cpp_constexpr >= 201304
+#  define VULKAN_HPP_CONSTEXPR_14  constexpr
+# else
+#  define VULKAN_HPP_CONSTEXPR_14
+# endif
+# define VULKAN_HPP_CONST_OR_CONSTEXPR  constexpr
+#else
+# define VULKAN_HPP_CONSTEXPR
+# define VULKAN_HPP_CONSTEXPR_14
+# define VULKAN_HPP_CONST_OR_CONSTEXPR  const
+#endif
+
+#if !defined(VULKAN_HPP_NOEXCEPT)
+# if defined(_MSC_VER) && (_MSC_VER <= 1800)
+#  define VULKAN_HPP_NOEXCEPT
+# else
+#  define VULKAN_HPP_NOEXCEPT noexcept
+#  define VULKAN_HPP_HAS_NOEXCEPT 1
+# endif
+#endif
+
+#if !defined(VULKAN_HPP_NAMESPACE)
+#define VULKAN_HPP_NAMESPACE vk
+#endif
+
+#define VULKAN_HPP_STRINGIFY2(text) #text
+#define VULKAN_HPP_STRINGIFY(text) VULKAN_HPP_STRINGIFY2(text)
+#define VULKAN_HPP_NAMESPACE_STRING VULKAN_HPP_STRINGIFY(VULKAN_HPP_NAMESPACE)
+
+namespace VULKAN_HPP_NAMESPACE
+{
+#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
+  template <typename T>
+  class ArrayProxy
+  {
+  public:
+    VULKAN_HPP_CONSTEXPR ArrayProxy(std::nullptr_t) VULKAN_HPP_NOEXCEPT
+      : m_count(0)
+      , m_ptr(nullptr)
+    {}
+
+    ArrayProxy(typename std::remove_reference<T>::type & ptr) VULKAN_HPP_NOEXCEPT
+      : m_count(1)
+      , m_ptr(&ptr)
+    {}
+
+    ArrayProxy(uint32_t count, T * ptr) VULKAN_HPP_NOEXCEPT
+      : m_count(count)
+      , m_ptr(ptr)
+    {}
+
+    template <size_t N>
+    ArrayProxy(std::array<typename std::remove_const<T>::type, N> & data) VULKAN_HPP_NOEXCEPT
+      : m_count(N)
+      , m_ptr(data.data())
+    {}
+
+    template <size_t N>
+    ArrayProxy(std::array<typename std::remove_const<T>::type, N> const& data) VULKAN_HPP_NOEXCEPT
+      : m_count(N)
+      , m_ptr(data.data())
+    {}
+
+    template <class Allocator = std::allocator<typename std::remove_const<T>::type>>
+    ArrayProxy(std::vector<typename std::remove_const<T>::type, Allocator> & data) VULKAN_HPP_NOEXCEPT
+      : m_count(static_cast<uint32_t>(data.size()))
+      , m_ptr(data.data())
+    {}
+
+    template <class Allocator = std::allocator<typename std::remove_const<T>::type>>
+    ArrayProxy(std::vector<typename std::remove_const<T>::type, Allocator> const& data) VULKAN_HPP_NOEXCEPT
+      : m_count(static_cast<uint32_t>(data.size()))
+      , m_ptr(data.data())
+    {}
+
+    ArrayProxy(std::initializer_list<typename std::remove_reference<T>::type> const& data) VULKAN_HPP_NOEXCEPT
+      : m_count(static_cast<uint32_t>(data.end() - data.begin()))
+      , m_ptr(data.begin())
+    {}
+
+    const T * begin() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_ptr;
+    }
+
+    const T * end() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_ptr + m_count;
+    }
+
+    const T & front() const VULKAN_HPP_NOEXCEPT
+    {
+      VULKAN_HPP_ASSERT(m_count && m_ptr);
+      return *m_ptr;
+    }
+
+    const T & back() const VULKAN_HPP_NOEXCEPT
+    {
+      VULKAN_HPP_ASSERT(m_count && m_ptr);
+      return *(m_ptr + m_count - 1);
+    }
+
+    bool empty() const VULKAN_HPP_NOEXCEPT
+    {
+      return (m_count == 0);
+    }
+
+    uint32_t size() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_count;
+    }
+
+    T * data() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_ptr;
+    }
+
+  private:
+    uint32_t  m_count;
+    T *       m_ptr;
+  };
+#endif
+
+  template <typename FlagBitsType> struct FlagTraits
+  {
+    enum { allFlags = 0 };
+  };
+
+  template <typename BitType, typename MaskType = VkFlags>
+  class Flags
+  {
+  public:
+    VULKAN_HPP_CONSTEXPR Flags() VULKAN_HPP_NOEXCEPT
+      : m_mask(0)
+    {}
+
+    VULKAN_HPP_CONSTEXPR Flags(BitType bit) VULKAN_HPP_NOEXCEPT
+      : m_mask(static_cast<MaskType>(bit))
+    {}
+
+    VULKAN_HPP_CONSTEXPR Flags(Flags<BitType> const& rhs) VULKAN_HPP_NOEXCEPT
+      : m_mask(rhs.m_mask)
+    {}
+
+    VULKAN_HPP_CONSTEXPR explicit Flags(MaskType flags) VULKAN_HPP_NOEXCEPT
+      : m_mask(flags)
+    {}
+
+    Flags<BitType> & operator=(Flags<BitType> const& rhs) VULKAN_HPP_NOEXCEPT
+    {
+      m_mask = rhs.m_mask;
+      return *this;
+    }
+
+    Flags<BitType> & operator|=(Flags<BitType> const& rhs) VULKAN_HPP_NOEXCEPT
+    {
+      m_mask |= rhs.m_mask;
+      return *this;
+    }
+
+    Flags<BitType> & operator&=(Flags<BitType> const& rhs) VULKAN_HPP_NOEXCEPT
+    {
+      m_mask &= rhs.m_mask;
+      return *this;
+    }
+
+    Flags<BitType> & operator^=(Flags<BitType> const& rhs) VULKAN_HPP_NOEXCEPT
+    {
+      m_mask ^= rhs.m_mask;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR Flags<BitType> operator|(Flags<BitType> const& rhs) const VULKAN_HPP_NOEXCEPT
+    {
+      return Flags<BitType>(m_mask | rhs.m_mask);
+    }
+
+    VULKAN_HPP_CONSTEXPR Flags<BitType> operator&(Flags<BitType> const& rhs) const VULKAN_HPP_NOEXCEPT
+    {
+      return Flags<BitType>(m_mask & rhs.m_mask);
+    }
+
+    VULKAN_HPP_CONSTEXPR Flags<BitType> operator^(Flags<BitType> const& rhs) const VULKAN_HPP_NOEXCEPT
+    {
+      return Flags<BitType>(m_mask ^ rhs.m_mask);
+    }
+
+    VULKAN_HPP_CONSTEXPR bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return !m_mask;
+    }
+
+    VULKAN_HPP_CONSTEXPR Flags<BitType> operator~() const VULKAN_HPP_NOEXCEPT
+    {
+      return Flags<BitType>(m_mask ^ FlagTraits<BitType>::allFlags);
+    }
+
+    VULKAN_HPP_CONSTEXPR bool operator==(Flags<BitType> const& rhs) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_mask == rhs.m_mask;
+    }
+
+    VULKAN_HPP_CONSTEXPR bool operator!=(Flags<BitType> const& rhs) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_mask != rhs.m_mask;
+    }
+
+    explicit VULKAN_HPP_CONSTEXPR operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return !!m_mask;
+    }
+
+    explicit VULKAN_HPP_CONSTEXPR operator MaskType() const VULKAN_HPP_NOEXCEPT
+    {
+        return m_mask;
+    }
+
+  private:
+    MaskType  m_mask;
+  };
+
+  template <typename BitType>
+  VULKAN_HPP_CONSTEXPR Flags<BitType> operator|(BitType bit, Flags<BitType> const& flags) VULKAN_HPP_NOEXCEPT
+  {
+    return flags | bit;
+  }
+
+  template <typename BitType>
+  VULKAN_HPP_CONSTEXPR Flags<BitType> operator&(BitType bit, Flags<BitType> const& flags) VULKAN_HPP_NOEXCEPT
+  {
+    return flags & bit;
+  }
+
+  template <typename BitType>
+  VULKAN_HPP_CONSTEXPR Flags<BitType> operator^(BitType bit, Flags<BitType> const& flags) VULKAN_HPP_NOEXCEPT
+  {
+    return flags ^ bit;
+  }
+
+  template <typename BitType>
+  VULKAN_HPP_CONSTEXPR bool operator==(BitType bit, Flags<BitType> const& flags) VULKAN_HPP_NOEXCEPT
+  {
+    return flags == bit;
+  }
+
+  template <typename BitType>
+  VULKAN_HPP_CONSTEXPR bool operator!=(BitType bit, Flags<BitType> const& flags) VULKAN_HPP_NOEXCEPT
+  {
+    return flags != bit;
+  }
+
+  template <typename RefType>
+  class Optional
+  {
+  public:
+    Optional(RefType & reference) VULKAN_HPP_NOEXCEPT { m_ptr = &reference; }
+    Optional(RefType * ptr) VULKAN_HPP_NOEXCEPT { m_ptr = ptr; }
+    Optional(std::nullptr_t) VULKAN_HPP_NOEXCEPT { m_ptr = nullptr; }
+
+    operator RefType*() const VULKAN_HPP_NOEXCEPT { return m_ptr; }
+    RefType const* operator->() const VULKAN_HPP_NOEXCEPT { return m_ptr; }
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT { return !!m_ptr; }
+
+  private:
+    RefType *m_ptr;
+  };
+
+  template <typename X, typename Y> struct isStructureChainValid { enum { value = false }; };
+
+  template <typename P, typename T>
+  struct TypeList
+  {
+    using list = P;
+    using last = T;
+  };
+
+  template <typename List, typename X>
+  struct extendCheck
+  {
+    static const bool valid = isStructureChainValid<typename List::last, X>::value || extendCheck<typename List::list,X>::valid;
+  };
+
+  template <typename T, typename X>
+  struct extendCheck<TypeList<void,T>,X>
+  {
+    static const bool valid = isStructureChainValid<T, X>::value;
+  };
+
+  template <typename X>
+  struct extendCheck<void,X>
+  {
+    static const bool valid = true;
+  };
+
+  template<typename Type, class...>
+  struct isPartOfStructureChain
+  {
+    static const bool valid = false;
+  };
+
+  template<typename Type, typename Head, typename... Tail>
+  struct isPartOfStructureChain<Type, Head, Tail...>
+  {
+    static const bool valid = std::is_same<Type, Head>::value || isPartOfStructureChain<Type, Tail...>::valid;
+  };
+
+  template <class Element>
+  class StructureChainElement
+  {
+  public:
+    explicit operator Element&() VULKAN_HPP_NOEXCEPT { return value; }
+    explicit operator const Element&() const VULKAN_HPP_NOEXCEPT { return value; }
+  private:
+    Element value;
+  };
+
+  template<typename ...StructureElements>
+  class StructureChain : private StructureChainElement<StructureElements>...
+  {
+  public:
+    StructureChain() VULKAN_HPP_NOEXCEPT
+    {
+      link<void, StructureElements...>();
+    }
+
+    StructureChain(StructureChain const &rhs) VULKAN_HPP_NOEXCEPT
+    {
+      linkAndCopy<void, StructureElements...>(rhs);
+    }
+
+    StructureChain(StructureElements const &... elems) VULKAN_HPP_NOEXCEPT
+    {
+      linkAndCopyElements<void, StructureElements...>(elems...);
+    }
+
+    StructureChain& operator=(StructureChain const &rhs) VULKAN_HPP_NOEXCEPT
+    {
+      linkAndCopy<void, StructureElements...>(rhs);
+      return *this;
+    }
+
+    template<typename ClassType> ClassType& get() VULKAN_HPP_NOEXCEPT { return static_cast<ClassType&>(*this);}
+
+    template<typename ClassTypeA, typename ClassTypeB, typename ...ClassTypes>
+    std::tuple<ClassTypeA, ClassTypeB, ClassTypes...> get()
+    {
+        return std::tuple_cat(
+            std::make_tuple(get<ClassTypeA>(),get<ClassTypeB>()),
+            std::make_tuple(get<ClassTypes>()...)
+        );
+    }
+
+    template<typename ClassType>
+    void unlink() VULKAN_HPP_NOEXCEPT
+    {
+      static_assert(isPartOfStructureChain<ClassType, StructureElements...>::valid, "Can't unlink Structure that's not part of this StructureChain!");
+      static_assert(!std::is_same<ClassType, typename std::tuple_element<0, std::tuple<StructureElements...>>::type>::value, "It's not allowed to unlink the first element!");
+      VkBaseOutStructure * ptr = reinterpret_cast<VkBaseOutStructure*>(&get<ClassType>());
+      assert(ptr != nullptr);
+      VkBaseOutStructure ** ppNext = &(reinterpret_cast<VkBaseOutStructure*>(this)->pNext);
+      assert(*ppNext != nullptr);
+      while (*ppNext != ptr)
+      {
+        ppNext = &(*ppNext)->pNext;
+        assert(*ppNext != nullptr);   // fires, if the ClassType member has already been unlinked !
+      }
+      assert(*ppNext == ptr);
+      *ppNext = (*ppNext)->pNext;
+    }
+
+    template <typename ClassType>
+    void relink() VULKAN_HPP_NOEXCEPT
+    {
+      static_assert(isPartOfStructureChain<ClassType, StructureElements...>::valid, "Can't relink Structure that's not part of this StructureChain!");
+      static_assert(!std::is_same<ClassType, typename std::tuple_element<0, std::tuple<StructureElements...>>::type>::value, "It's not allowed to have the first element unlinked!");
+      VkBaseOutStructure * ptr = reinterpret_cast<VkBaseOutStructure*>(&get<ClassType>());
+      assert(ptr != nullptr);
+      VkBaseOutStructure ** ppNext = &(reinterpret_cast<VkBaseOutStructure*>(this)->pNext);
+      assert(*ppNext != nullptr);
+#if !defined(NDEBUG)
+      while (*ppNext)
+      {
+        assert(*ppNext != ptr);   // fires, if the ClassType member has not been unlinked before
+        ppNext = &(*ppNext)->pNext;
+      }
+      ppNext = &(reinterpret_cast<VkBaseOutStructure*>(this)->pNext);
+#endif
+      ptr->pNext = *ppNext;
+      *ppNext = ptr;
+    }
+
+  private:
+    template<typename List, typename X>
+    void link() VULKAN_HPP_NOEXCEPT
+    {
+      static_assert(extendCheck<List, X>::valid, "The structure chain is not valid!");
+    }
+
+    template<typename List, typename X, typename Y, typename ...Z>
+    void link() VULKAN_HPP_NOEXCEPT
+    {
+      static_assert(extendCheck<List,X>::valid, "The structure chain is not valid!");
+      X& x = static_cast<X&>(*this);
+      Y& y = static_cast<Y&>(*this);
+      x.pNext = &y;
+      link<TypeList<List, X>, Y, Z...>();
+    }
+
+    template<typename List, typename X>
+    void linkAndCopy(StructureChain const &rhs) VULKAN_HPP_NOEXCEPT
+    {
+      static_assert(extendCheck<List, X>::valid, "The structure chain is not valid!");
+      static_cast<X&>(*this) = static_cast<X const &>(rhs);
+    }
+
+    template<typename List, typename X, typename Y, typename ...Z>
+    void linkAndCopy(StructureChain const &rhs) VULKAN_HPP_NOEXCEPT
+    {
+      static_assert(extendCheck<List, X>::valid, "The structure chain is not valid!");
+      X& x = static_cast<X&>(*this);
+      Y& y = static_cast<Y&>(*this);
+      x = static_cast<X const &>(rhs);
+      x.pNext = &y;
+      linkAndCopy<TypeList<List, X>, Y, Z...>(rhs);
+    }
+
+    template<typename List, typename X>
+    void linkAndCopyElements(X const &xelem) VULKAN_HPP_NOEXCEPT
+    {
+      static_assert(extendCheck<List, X>::valid, "The structure chain is not valid!");
+      static_cast<X&>(*this) = xelem;
+    }
+
+    template<typename List, typename X, typename Y, typename ...Z>
+    void linkAndCopyElements(X const &xelem, Y const &yelem, Z const &... zelem) VULKAN_HPP_NOEXCEPT
+    {
+      static_assert(extendCheck<List, X>::valid, "The structure chain is not valid!");
+      X& x = static_cast<X&>(*this);
+      Y& y = static_cast<Y&>(*this);
+      x = xelem;
+      x.pNext = &y;
+      linkAndCopyElements<TypeList<List, X>, Y, Z...>(yelem, zelem...);
+    }
+  };
+
+#if !defined(VULKAN_HPP_NO_SMART_HANDLE)
+  template <typename Type, typename Dispatch> class UniqueHandleTraits;
+
+  template <typename Type, typename Dispatch>
+  class UniqueHandle : public UniqueHandleTraits<Type,Dispatch>::deleter
+  {
+  private:
+    using Deleter = typename UniqueHandleTraits<Type,Dispatch>::deleter;
+
+  public:
+    using element_type = Type;
+
+    UniqueHandle()
+      : Deleter()
+      , m_value()
+    {}
+
+    explicit UniqueHandle( Type const& value, Deleter const& deleter = Deleter() ) VULKAN_HPP_NOEXCEPT
+      : Deleter( deleter)
+      , m_value( value )
+    {}
+
+    UniqueHandle( UniqueHandle const& ) = delete;
+
+    UniqueHandle( UniqueHandle && other ) VULKAN_HPP_NOEXCEPT
+      : Deleter( std::move( static_cast<Deleter&>( other ) ) )
+      , m_value( other.release() )
+    {}
+
+    ~UniqueHandle() VULKAN_HPP_NOEXCEPT
+    {
+      if ( m_value ) this->destroy( m_value );
+    }
+
+    UniqueHandle & operator=( UniqueHandle const& ) = delete;
+
+    UniqueHandle & operator=( UniqueHandle && other ) VULKAN_HPP_NOEXCEPT
+    {
+      reset( other.release() );
+      *static_cast<Deleter*>(this) = std::move( static_cast<Deleter&>(other) );
+      return *this;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_value.operator bool();
+    }
+
+    Type const* operator->() const VULKAN_HPP_NOEXCEPT
+    {
+      return &m_value;
+    }
+
+    Type * operator->() VULKAN_HPP_NOEXCEPT
+    {
+      return &m_value;
+    }
+
+    Type const& operator*() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_value;
+    }
+
+    Type & operator*() VULKAN_HPP_NOEXCEPT
+    {
+      return m_value;
+    }
+
+    const Type & get() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_value;
+    }
+
+    Type & get() VULKAN_HPP_NOEXCEPT
+    {
+      return m_value;
+    }
+
+    void reset( Type const& value = Type() ) VULKAN_HPP_NOEXCEPT
+    {
+      if ( m_value != value )
+      {
+        if ( m_value ) this->destroy( m_value );
+        m_value = value;
+      }
+    }
+
+    Type release() VULKAN_HPP_NOEXCEPT
+    {
+      Type value = m_value;
+      m_value = nullptr;
+      return value;
+    }
+
+    void swap( UniqueHandle<Type,Dispatch> & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      std::swap(m_value, rhs.m_value);
+      std::swap(static_cast<Deleter&>(*this), static_cast<Deleter&>(rhs));
+    }
+
+  private:
+    Type    m_value;
+  };
+
+  template <typename UniqueType>
+  VULKAN_HPP_INLINE std::vector<typename UniqueType::element_type> uniqueToRaw(std::vector<UniqueType> const& handles)
+  {
+    std::vector<typename UniqueType::element_type> newBuffer(handles.size());
+    std::transform(handles.begin(), handles.end(), newBuffer.begin(), [](UniqueType const& handle) { return handle.get(); });
+    return newBuffer;
+  }
+
+  template <typename Type, typename Dispatch>
+  VULKAN_HPP_INLINE void swap( UniqueHandle<Type,Dispatch> & lhs, UniqueHandle<Type,Dispatch> & rhs ) VULKAN_HPP_NOEXCEPT
+  {
+    lhs.swap( rhs );
+  }
+#endif
+
+#if !defined(VK_NO_PROTOTYPES)
+  class DispatchLoaderStatic
+  {
+  public:
+    VkResult vkCreateInstance( const VkInstanceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkInstance* pInstance ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateInstance( pCreateInfo, pAllocator, pInstance );
+    }
+
+    VkResult vkEnumerateInstanceExtensionProperties( const char* pLayerName, uint32_t* pPropertyCount, VkExtensionProperties* pProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkEnumerateInstanceExtensionProperties( pLayerName, pPropertyCount, pProperties );
+    }
+
+    VkResult vkEnumerateInstanceLayerProperties( uint32_t* pPropertyCount, VkLayerProperties* pProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkEnumerateInstanceLayerProperties( pPropertyCount, pProperties );
+    }
+
+    VkResult vkEnumerateInstanceVersion( uint32_t* pApiVersion ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkEnumerateInstanceVersion( pApiVersion );
+    }
+
+    VkResult vkBeginCommandBuffer( VkCommandBuffer commandBuffer, const VkCommandBufferBeginInfo* pBeginInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkBeginCommandBuffer( commandBuffer, pBeginInfo );
+    }
+
+    void vkCmdBeginConditionalRenderingEXT( VkCommandBuffer commandBuffer, const VkConditionalRenderingBeginInfoEXT* pConditionalRenderingBegin ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdBeginConditionalRenderingEXT( commandBuffer, pConditionalRenderingBegin );
+    }
+
+    void vkCmdBeginDebugUtilsLabelEXT( VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT* pLabelInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdBeginDebugUtilsLabelEXT( commandBuffer, pLabelInfo );
+    }
+
+    void vkCmdBeginQuery( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, VkQueryControlFlags flags ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdBeginQuery( commandBuffer, queryPool, query, flags );
+    }
+
+    void vkCmdBeginQueryIndexedEXT( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, VkQueryControlFlags flags, uint32_t index ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdBeginQueryIndexedEXT( commandBuffer, queryPool, query, flags, index );
+    }
+
+    void vkCmdBeginRenderPass( VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo* pRenderPassBegin, VkSubpassContents contents ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdBeginRenderPass( commandBuffer, pRenderPassBegin, contents );
+    }
+
+    void vkCmdBeginRenderPass2KHR( VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo* pRenderPassBegin, const VkSubpassBeginInfoKHR* pSubpassBeginInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdBeginRenderPass2KHR( commandBuffer, pRenderPassBegin, pSubpassBeginInfo );
+    }
+
+    void vkCmdBeginTransformFeedbackEXT( VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VkBuffer* pCounterBuffers, const VkDeviceSize* pCounterBufferOffsets ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdBeginTransformFeedbackEXT( commandBuffer, firstCounterBuffer, counterBufferCount, pCounterBuffers, pCounterBufferOffsets );
+    }
+
+    void vkCmdBindDescriptorSets( VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t firstSet, uint32_t descriptorSetCount, const VkDescriptorSet* pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t* pDynamicOffsets ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdBindDescriptorSets( commandBuffer, pipelineBindPoint, layout, firstSet, descriptorSetCount, pDescriptorSets, dynamicOffsetCount, pDynamicOffsets );
+    }
+
+    void vkCmdBindIndexBuffer( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkIndexType indexType ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdBindIndexBuffer( commandBuffer, buffer, offset, indexType );
+    }
+
+    void vkCmdBindPipeline( VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdBindPipeline( commandBuffer, pipelineBindPoint, pipeline );
+    }
+
+    void vkCmdBindShadingRateImageNV( VkCommandBuffer commandBuffer, VkImageView imageView, VkImageLayout imageLayout ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdBindShadingRateImageNV( commandBuffer, imageView, imageLayout );
+    }
+
+    void vkCmdBindTransformFeedbackBuffersEXT( VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets, const VkDeviceSize* pSizes ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdBindTransformFeedbackBuffersEXT( commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets, pSizes );
+    }
+
+    void vkCmdBindVertexBuffers( VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdBindVertexBuffers( commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets );
+    }
+
+    void vkCmdBlitImage( VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageBlit* pRegions, VkFilter filter ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdBlitImage( commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions, filter );
+    }
+
+    void vkCmdBuildAccelerationStructureNV( VkCommandBuffer commandBuffer, const VkAccelerationStructureInfoNV* pInfo, VkBuffer instanceData, VkDeviceSize instanceOffset, VkBool32 update, VkAccelerationStructureNV dst, VkAccelerationStructureNV src, VkBuffer scratch, VkDeviceSize scratchOffset ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdBuildAccelerationStructureNV( commandBuffer, pInfo, instanceData, instanceOffset, update, dst, src, scratch, scratchOffset );
+    }
+
+    void vkCmdClearAttachments( VkCommandBuffer commandBuffer, uint32_t attachmentCount, const VkClearAttachment* pAttachments, uint32_t rectCount, const VkClearRect* pRects ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdClearAttachments( commandBuffer, attachmentCount, pAttachments, rectCount, pRects );
+    }
+
+    void vkCmdClearColorImage( VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, const VkClearColorValue* pColor, uint32_t rangeCount, const VkImageSubresourceRange* pRanges ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdClearColorImage( commandBuffer, image, imageLayout, pColor, rangeCount, pRanges );
+    }
+
+    void vkCmdClearDepthStencilImage( VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, const VkClearDepthStencilValue* pDepthStencil, uint32_t rangeCount, const VkImageSubresourceRange* pRanges ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdClearDepthStencilImage( commandBuffer, image, imageLayout, pDepthStencil, rangeCount, pRanges );
+    }
+
+    void vkCmdCopyAccelerationStructureNV( VkCommandBuffer commandBuffer, VkAccelerationStructureNV dst, VkAccelerationStructureNV src, VkCopyAccelerationStructureModeNV mode ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdCopyAccelerationStructureNV( commandBuffer, dst, src, mode );
+    }
+
+    void vkCmdCopyBuffer( VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer, uint32_t regionCount, const VkBufferCopy* pRegions ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdCopyBuffer( commandBuffer, srcBuffer, dstBuffer, regionCount, pRegions );
+    }
+
+    void vkCmdCopyBufferToImage( VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkBufferImageCopy* pRegions ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdCopyBufferToImage( commandBuffer, srcBuffer, dstImage, dstImageLayout, regionCount, pRegions );
+    }
+
+    void vkCmdCopyImage( VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageCopy* pRegions ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdCopyImage( commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions );
+    }
+
+    void vkCmdCopyImageToBuffer( VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkBuffer dstBuffer, uint32_t regionCount, const VkBufferImageCopy* pRegions ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdCopyImageToBuffer( commandBuffer, srcImage, srcImageLayout, dstBuffer, regionCount, pRegions );
+    }
+
+    void vkCmdCopyQueryPoolResults( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize stride, VkQueryResultFlags flags ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdCopyQueryPoolResults( commandBuffer, queryPool, firstQuery, queryCount, dstBuffer, dstOffset, stride, flags );
+    }
+
+    void vkCmdDebugMarkerBeginEXT( VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT* pMarkerInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdDebugMarkerBeginEXT( commandBuffer, pMarkerInfo );
+    }
+
+    void vkCmdDebugMarkerEndEXT( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdDebugMarkerEndEXT( commandBuffer );
+    }
+
+    void vkCmdDebugMarkerInsertEXT( VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT* pMarkerInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdDebugMarkerInsertEXT( commandBuffer, pMarkerInfo );
+    }
+
+    void vkCmdDispatch( VkCommandBuffer commandBuffer, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdDispatch( commandBuffer, groupCountX, groupCountY, groupCountZ );
+    }
+
+    void vkCmdDispatchBase( VkCommandBuffer commandBuffer, uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdDispatchBase( commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ );
+    }
+
+    void vkCmdDispatchBaseKHR( VkCommandBuffer commandBuffer, uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdDispatchBaseKHR( commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ );
+    }
+
+    void vkCmdDispatchIndirect( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdDispatchIndirect( commandBuffer, buffer, offset );
+    }
+
+    void vkCmdDraw( VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdDraw( commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance );
+    }
+
+    void vkCmdDrawIndexed( VkCommandBuffer commandBuffer, uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdDrawIndexed( commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance );
+    }
+
+    void vkCmdDrawIndexedIndirect( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdDrawIndexedIndirect( commandBuffer, buffer, offset, drawCount, stride );
+    }
+
+    void vkCmdDrawIndexedIndirectCountAMD( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdDrawIndexedIndirectCountAMD( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride );
+    }
+
+    void vkCmdDrawIndexedIndirectCountKHR( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdDrawIndexedIndirectCountKHR( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride );
+    }
+
+    void vkCmdDrawIndirect( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdDrawIndirect( commandBuffer, buffer, offset, drawCount, stride );
+    }
+
+    void vkCmdDrawIndirectByteCountEXT( VkCommandBuffer commandBuffer, uint32_t instanceCount, uint32_t firstInstance, VkBuffer counterBuffer, VkDeviceSize counterBufferOffset, uint32_t counterOffset, uint32_t vertexStride ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdDrawIndirectByteCountEXT( commandBuffer, instanceCount, firstInstance, counterBuffer, counterBufferOffset, counterOffset, vertexStride );
+    }
+
+    void vkCmdDrawIndirectCountAMD( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdDrawIndirectCountAMD( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride );
+    }
+
+    void vkCmdDrawIndirectCountKHR( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdDrawIndirectCountKHR( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride );
+    }
+
+    void vkCmdDrawMeshTasksIndirectCountNV( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdDrawMeshTasksIndirectCountNV( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride );
+    }
+
+    void vkCmdDrawMeshTasksIndirectNV( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdDrawMeshTasksIndirectNV( commandBuffer, buffer, offset, drawCount, stride );
+    }
+
+    void vkCmdDrawMeshTasksNV( VkCommandBuffer commandBuffer, uint32_t taskCount, uint32_t firstTask ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdDrawMeshTasksNV( commandBuffer, taskCount, firstTask );
+    }
+
+    void vkCmdEndConditionalRenderingEXT( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdEndConditionalRenderingEXT( commandBuffer );
+    }
+
+    void vkCmdEndDebugUtilsLabelEXT( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdEndDebugUtilsLabelEXT( commandBuffer );
+    }
+
+    void vkCmdEndQuery( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdEndQuery( commandBuffer, queryPool, query );
+    }
+
+    void vkCmdEndQueryIndexedEXT( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, uint32_t index ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdEndQueryIndexedEXT( commandBuffer, queryPool, query, index );
+    }
+
+    void vkCmdEndRenderPass( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdEndRenderPass( commandBuffer );
+    }
+
+    void vkCmdEndRenderPass2KHR( VkCommandBuffer commandBuffer, const VkSubpassEndInfoKHR* pSubpassEndInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdEndRenderPass2KHR( commandBuffer, pSubpassEndInfo );
+    }
+
+    void vkCmdEndTransformFeedbackEXT( VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VkBuffer* pCounterBuffers, const VkDeviceSize* pCounterBufferOffsets ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdEndTransformFeedbackEXT( commandBuffer, firstCounterBuffer, counterBufferCount, pCounterBuffers, pCounterBufferOffsets );
+    }
+
+    void vkCmdExecuteCommands( VkCommandBuffer commandBuffer, uint32_t commandBufferCount, const VkCommandBuffer* pCommandBuffers ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdExecuteCommands( commandBuffer, commandBufferCount, pCommandBuffers );
+    }
+
+    void vkCmdFillBuffer( VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize size, uint32_t data ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdFillBuffer( commandBuffer, dstBuffer, dstOffset, size, data );
+    }
+
+    void vkCmdInsertDebugUtilsLabelEXT( VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT* pLabelInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdInsertDebugUtilsLabelEXT( commandBuffer, pLabelInfo );
+    }
+
+    void vkCmdNextSubpass( VkCommandBuffer commandBuffer, VkSubpassContents contents ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdNextSubpass( commandBuffer, contents );
+    }
+
+    void vkCmdNextSubpass2KHR( VkCommandBuffer commandBuffer, const VkSubpassBeginInfoKHR* pSubpassBeginInfo, const VkSubpassEndInfoKHR* pSubpassEndInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdNextSubpass2KHR( commandBuffer, pSubpassBeginInfo, pSubpassEndInfo );
+    }
+
+    void vkCmdPipelineBarrier( VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdPipelineBarrier( commandBuffer, srcStageMask, dstStageMask, dependencyFlags, memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers );
+    }
+
+    void vkCmdProcessCommandsNVX( VkCommandBuffer commandBuffer, const VkCmdProcessCommandsInfoNVX* pProcessCommandsInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdProcessCommandsNVX( commandBuffer, pProcessCommandsInfo );
+    }
+
+    void vkCmdPushConstants( VkCommandBuffer commandBuffer, VkPipelineLayout layout, VkShaderStageFlags stageFlags, uint32_t offset, uint32_t size, const void* pValues ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdPushConstants( commandBuffer, layout, stageFlags, offset, size, pValues );
+    }
+
+    void vkCmdPushDescriptorSetKHR( VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount, const VkWriteDescriptorSet* pDescriptorWrites ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdPushDescriptorSetKHR( commandBuffer, pipelineBindPoint, layout, set, descriptorWriteCount, pDescriptorWrites );
+    }
+
+    void vkCmdPushDescriptorSetWithTemplateKHR( VkCommandBuffer commandBuffer, VkDescriptorUpdateTemplate descriptorUpdateTemplate, VkPipelineLayout layout, uint32_t set, const void* pData ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdPushDescriptorSetWithTemplateKHR( commandBuffer, descriptorUpdateTemplate, layout, set, pData );
+    }
+
+    void vkCmdReserveSpaceForCommandsNVX( VkCommandBuffer commandBuffer, const VkCmdReserveSpaceForCommandsInfoNVX* pReserveSpaceInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdReserveSpaceForCommandsNVX( commandBuffer, pReserveSpaceInfo );
+    }
+
+    void vkCmdResetEvent( VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdResetEvent( commandBuffer, event, stageMask );
+    }
+
+    void vkCmdResetQueryPool( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdResetQueryPool( commandBuffer, queryPool, firstQuery, queryCount );
+    }
+
+    void vkCmdResolveImage( VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageResolve* pRegions ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdResolveImage( commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions );
+    }
+
+    void vkCmdSetBlendConstants( VkCommandBuffer commandBuffer, const float blendConstants[4] ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetBlendConstants( commandBuffer, blendConstants );
+    }
+
+    void vkCmdSetCheckpointNV( VkCommandBuffer commandBuffer, const void* pCheckpointMarker ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetCheckpointNV( commandBuffer, pCheckpointMarker );
+    }
+
+    void vkCmdSetCoarseSampleOrderNV( VkCommandBuffer commandBuffer, VkCoarseSampleOrderTypeNV sampleOrderType, uint32_t customSampleOrderCount, const VkCoarseSampleOrderCustomNV* pCustomSampleOrders ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetCoarseSampleOrderNV( commandBuffer, sampleOrderType, customSampleOrderCount, pCustomSampleOrders );
+    }
+
+    void vkCmdSetDepthBias( VkCommandBuffer commandBuffer, float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetDepthBias( commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor );
+    }
+
+    void vkCmdSetDepthBounds( VkCommandBuffer commandBuffer, float minDepthBounds, float maxDepthBounds ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetDepthBounds( commandBuffer, minDepthBounds, maxDepthBounds );
+    }
+
+    void vkCmdSetDeviceMask( VkCommandBuffer commandBuffer, uint32_t deviceMask ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetDeviceMask( commandBuffer, deviceMask );
+    }
+
+    void vkCmdSetDeviceMaskKHR( VkCommandBuffer commandBuffer, uint32_t deviceMask ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetDeviceMaskKHR( commandBuffer, deviceMask );
+    }
+
+    void vkCmdSetDiscardRectangleEXT( VkCommandBuffer commandBuffer, uint32_t firstDiscardRectangle, uint32_t discardRectangleCount, const VkRect2D* pDiscardRectangles ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetDiscardRectangleEXT( commandBuffer, firstDiscardRectangle, discardRectangleCount, pDiscardRectangles );
+    }
+
+    void vkCmdSetEvent( VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetEvent( commandBuffer, event, stageMask );
+    }
+
+    void vkCmdSetExclusiveScissorNV( VkCommandBuffer commandBuffer, uint32_t firstExclusiveScissor, uint32_t exclusiveScissorCount, const VkRect2D* pExclusiveScissors ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetExclusiveScissorNV( commandBuffer, firstExclusiveScissor, exclusiveScissorCount, pExclusiveScissors );
+    }
+
+    void vkCmdSetLineStippleEXT( VkCommandBuffer commandBuffer, uint32_t lineStippleFactor, uint16_t lineStipplePattern ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetLineStippleEXT( commandBuffer, lineStippleFactor, lineStipplePattern );
+    }
+
+    void vkCmdSetLineWidth( VkCommandBuffer commandBuffer, float lineWidth ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetLineWidth( commandBuffer, lineWidth );
+    }
+
+    VkResult vkCmdSetPerformanceMarkerINTEL( VkCommandBuffer commandBuffer, const VkPerformanceMarkerInfoINTEL* pMarkerInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetPerformanceMarkerINTEL( commandBuffer, pMarkerInfo );
+    }
+
+    VkResult vkCmdSetPerformanceOverrideINTEL( VkCommandBuffer commandBuffer, const VkPerformanceOverrideInfoINTEL* pOverrideInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetPerformanceOverrideINTEL( commandBuffer, pOverrideInfo );
+    }
+
+    VkResult vkCmdSetPerformanceStreamMarkerINTEL( VkCommandBuffer commandBuffer, const VkPerformanceStreamMarkerInfoINTEL* pMarkerInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetPerformanceStreamMarkerINTEL( commandBuffer, pMarkerInfo );
+    }
+
+    void vkCmdSetSampleLocationsEXT( VkCommandBuffer commandBuffer, const VkSampleLocationsInfoEXT* pSampleLocationsInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetSampleLocationsEXT( commandBuffer, pSampleLocationsInfo );
+    }
+
+    void vkCmdSetScissor( VkCommandBuffer commandBuffer, uint32_t firstScissor, uint32_t scissorCount, const VkRect2D* pScissors ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetScissor( commandBuffer, firstScissor, scissorCount, pScissors );
+    }
+
+    void vkCmdSetStencilCompareMask( VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t compareMask ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetStencilCompareMask( commandBuffer, faceMask, compareMask );
+    }
+
+    void vkCmdSetStencilReference( VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t reference ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetStencilReference( commandBuffer, faceMask, reference );
+    }
+
+    void vkCmdSetStencilWriteMask( VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t writeMask ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetStencilWriteMask( commandBuffer, faceMask, writeMask );
+    }
+
+    void vkCmdSetViewport( VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewport* pViewports ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetViewport( commandBuffer, firstViewport, viewportCount, pViewports );
+    }
+
+    void vkCmdSetViewportShadingRatePaletteNV( VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkShadingRatePaletteNV* pShadingRatePalettes ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetViewportShadingRatePaletteNV( commandBuffer, firstViewport, viewportCount, pShadingRatePalettes );
+    }
+
+    void vkCmdSetViewportWScalingNV( VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewportWScalingNV* pViewportWScalings ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetViewportWScalingNV( commandBuffer, firstViewport, viewportCount, pViewportWScalings );
+    }
+
+    void vkCmdTraceRaysNV( VkCommandBuffer commandBuffer, VkBuffer raygenShaderBindingTableBuffer, VkDeviceSize raygenShaderBindingOffset, VkBuffer missShaderBindingTableBuffer, VkDeviceSize missShaderBindingOffset, VkDeviceSize missShaderBindingStride, VkBuffer hitShaderBindingTableBuffer, VkDeviceSize hitShaderBindingOffset, VkDeviceSize hitShaderBindingStride, VkBuffer callableShaderBindingTableBuffer, VkDeviceSize callableShaderBindingOffset, VkDeviceSize callableShaderBindingStride, uint32_t width, uint32_t height, uint32_t depth ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdTraceRaysNV( commandBuffer, raygenShaderBindingTableBuffer, raygenShaderBindingOffset, missShaderBindingTableBuffer, missShaderBindingOffset, missShaderBindingStride, hitShaderBindingTableBuffer, hitShaderBindingOffset, hitShaderBindingStride, callableShaderBindingTableBuffer, callableShaderBindingOffset, callableShaderBindingStride, width, height, depth );
+    }
+
+    void vkCmdUpdateBuffer( VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize dataSize, const void* pData ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdUpdateBuffer( commandBuffer, dstBuffer, dstOffset, dataSize, pData );
+    }
+
+    void vkCmdWaitEvents( VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent* pEvents, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdWaitEvents( commandBuffer, eventCount, pEvents, srcStageMask, dstStageMask, memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers );
+    }
+
+    void vkCmdWriteAccelerationStructuresPropertiesNV( VkCommandBuffer commandBuffer, uint32_t accelerationStructureCount, const VkAccelerationStructureNV* pAccelerationStructures, VkQueryType queryType, VkQueryPool queryPool, uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdWriteAccelerationStructuresPropertiesNV( commandBuffer, accelerationStructureCount, pAccelerationStructures, queryType, queryPool, firstQuery );
+    }
+
+    void vkCmdWriteBufferMarkerAMD( VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage, VkBuffer dstBuffer, VkDeviceSize dstOffset, uint32_t marker ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdWriteBufferMarkerAMD( commandBuffer, pipelineStage, dstBuffer, dstOffset, marker );
+    }
+
+    void vkCmdWriteTimestamp( VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage, VkQueryPool queryPool, uint32_t query ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdWriteTimestamp( commandBuffer, pipelineStage, queryPool, query );
+    }
+
+    VkResult vkEndCommandBuffer( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkEndCommandBuffer( commandBuffer );
+    }
+
+    VkResult vkResetCommandBuffer( VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkResetCommandBuffer( commandBuffer, flags );
+    }
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    VkResult vkAcquireFullScreenExclusiveModeEXT( VkDevice device, VkSwapchainKHR swapchain ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkAcquireFullScreenExclusiveModeEXT( device, swapchain );
+    }
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+    VkResult vkAcquireNextImage2KHR( VkDevice device, const VkAcquireNextImageInfoKHR* pAcquireInfo, uint32_t* pImageIndex ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkAcquireNextImage2KHR( device, pAcquireInfo, pImageIndex );
+    }
+
+    VkResult vkAcquireNextImageKHR( VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout, VkSemaphore semaphore, VkFence fence, uint32_t* pImageIndex ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkAcquireNextImageKHR( device, swapchain, timeout, semaphore, fence, pImageIndex );
+    }
+
+    VkResult vkAcquirePerformanceConfigurationINTEL( VkDevice device, const VkPerformanceConfigurationAcquireInfoINTEL* pAcquireInfo, VkPerformanceConfigurationINTEL* pConfiguration ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkAcquirePerformanceConfigurationINTEL( device, pAcquireInfo, pConfiguration );
+    }
+
+    VkResult vkAcquireProfilingLockKHR( VkDevice device, const VkAcquireProfilingLockInfoKHR* pInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkAcquireProfilingLockKHR( device, pInfo );
+    }
+
+    VkResult vkAllocateCommandBuffers( VkDevice device, const VkCommandBufferAllocateInfo* pAllocateInfo, VkCommandBuffer* pCommandBuffers ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkAllocateCommandBuffers( device, pAllocateInfo, pCommandBuffers );
+    }
+
+    VkResult vkAllocateDescriptorSets( VkDevice device, const VkDescriptorSetAllocateInfo* pAllocateInfo, VkDescriptorSet* pDescriptorSets ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkAllocateDescriptorSets( device, pAllocateInfo, pDescriptorSets );
+    }
+
+    VkResult vkAllocateMemory( VkDevice device, const VkMemoryAllocateInfo* pAllocateInfo, const VkAllocationCallbacks* pAllocator, VkDeviceMemory* pMemory ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkAllocateMemory( device, pAllocateInfo, pAllocator, pMemory );
+    }
+
+    VkResult vkBindAccelerationStructureMemoryNV( VkDevice device, uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoNV* pBindInfos ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkBindAccelerationStructureMemoryNV( device, bindInfoCount, pBindInfos );
+    }
+
+    VkResult vkBindBufferMemory( VkDevice device, VkBuffer buffer, VkDeviceMemory memory, VkDeviceSize memoryOffset ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkBindBufferMemory( device, buffer, memory, memoryOffset );
+    }
+
+    VkResult vkBindBufferMemory2( VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo* pBindInfos ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkBindBufferMemory2( device, bindInfoCount, pBindInfos );
+    }
+
+    VkResult vkBindBufferMemory2KHR( VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo* pBindInfos ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkBindBufferMemory2KHR( device, bindInfoCount, pBindInfos );
+    }
+
+    VkResult vkBindImageMemory( VkDevice device, VkImage image, VkDeviceMemory memory, VkDeviceSize memoryOffset ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkBindImageMemory( device, image, memory, memoryOffset );
+    }
+
+    VkResult vkBindImageMemory2( VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo* pBindInfos ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkBindImageMemory2( device, bindInfoCount, pBindInfos );
+    }
+
+    VkResult vkBindImageMemory2KHR( VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo* pBindInfos ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkBindImageMemory2KHR( device, bindInfoCount, pBindInfos );
+    }
+
+    VkResult vkCompileDeferredNV( VkDevice device, VkPipeline pipeline, uint32_t shader ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCompileDeferredNV( device, pipeline, shader );
+    }
+
+    VkResult vkCreateAccelerationStructureNV( VkDevice device, const VkAccelerationStructureCreateInfoNV* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkAccelerationStructureNV* pAccelerationStructure ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateAccelerationStructureNV( device, pCreateInfo, pAllocator, pAccelerationStructure );
+    }
+
+    VkResult vkCreateBuffer( VkDevice device, const VkBufferCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkBuffer* pBuffer ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateBuffer( device, pCreateInfo, pAllocator, pBuffer );
+    }
+
+    VkResult vkCreateBufferView( VkDevice device, const VkBufferViewCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkBufferView* pView ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateBufferView( device, pCreateInfo, pAllocator, pView );
+    }
+
+    VkResult vkCreateCommandPool( VkDevice device, const VkCommandPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkCommandPool* pCommandPool ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateCommandPool( device, pCreateInfo, pAllocator, pCommandPool );
+    }
+
+    VkResult vkCreateComputePipelines( VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkComputePipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateComputePipelines( device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines );
+    }
+
+    VkResult vkCreateDescriptorPool( VkDevice device, const VkDescriptorPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorPool* pDescriptorPool ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateDescriptorPool( device, pCreateInfo, pAllocator, pDescriptorPool );
+    }
+
+    VkResult vkCreateDescriptorSetLayout( VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorSetLayout* pSetLayout ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateDescriptorSetLayout( device, pCreateInfo, pAllocator, pSetLayout );
+    }
+
+    VkResult vkCreateDescriptorUpdateTemplate( VkDevice device, const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateDescriptorUpdateTemplate( device, pCreateInfo, pAllocator, pDescriptorUpdateTemplate );
+    }
+
+    VkResult vkCreateDescriptorUpdateTemplateKHR( VkDevice device, const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateDescriptorUpdateTemplateKHR( device, pCreateInfo, pAllocator, pDescriptorUpdateTemplate );
+    }
+
+    VkResult vkCreateEvent( VkDevice device, const VkEventCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkEvent* pEvent ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateEvent( device, pCreateInfo, pAllocator, pEvent );
+    }
+
+    VkResult vkCreateFence( VkDevice device, const VkFenceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateFence( device, pCreateInfo, pAllocator, pFence );
+    }
+
+    VkResult vkCreateFramebuffer( VkDevice device, const VkFramebufferCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkFramebuffer* pFramebuffer ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateFramebuffer( device, pCreateInfo, pAllocator, pFramebuffer );
+    }
+
+    VkResult vkCreateGraphicsPipelines( VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkGraphicsPipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateGraphicsPipelines( device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines );
+    }
+
+    VkResult vkCreateImage( VkDevice device, const VkImageCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkImage* pImage ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateImage( device, pCreateInfo, pAllocator, pImage );
+    }
+
+    VkResult vkCreateImageView( VkDevice device, const VkImageViewCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkImageView* pView ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateImageView( device, pCreateInfo, pAllocator, pView );
+    }
+
+    VkResult vkCreateIndirectCommandsLayoutNVX( VkDevice device, const VkIndirectCommandsLayoutCreateInfoNVX* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkIndirectCommandsLayoutNVX* pIndirectCommandsLayout ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateIndirectCommandsLayoutNVX( device, pCreateInfo, pAllocator, pIndirectCommandsLayout );
+    }
+
+    VkResult vkCreateObjectTableNVX( VkDevice device, const VkObjectTableCreateInfoNVX* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkObjectTableNVX* pObjectTable ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateObjectTableNVX( device, pCreateInfo, pAllocator, pObjectTable );
+    }
+
+    VkResult vkCreatePipelineCache( VkDevice device, const VkPipelineCacheCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPipelineCache* pPipelineCache ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreatePipelineCache( device, pCreateInfo, pAllocator, pPipelineCache );
+    }
+
+    VkResult vkCreatePipelineLayout( VkDevice device, const VkPipelineLayoutCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPipelineLayout* pPipelineLayout ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreatePipelineLayout( device, pCreateInfo, pAllocator, pPipelineLayout );
+    }
+
+    VkResult vkCreateQueryPool( VkDevice device, const VkQueryPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkQueryPool* pQueryPool ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateQueryPool( device, pCreateInfo, pAllocator, pQueryPool );
+    }
+
+    VkResult vkCreateRayTracingPipelinesNV( VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkRayTracingPipelineCreateInfoNV* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateRayTracingPipelinesNV( device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines );
+    }
+
+    VkResult vkCreateRenderPass( VkDevice device, const VkRenderPassCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateRenderPass( device, pCreateInfo, pAllocator, pRenderPass );
+    }
+
+    VkResult vkCreateRenderPass2KHR( VkDevice device, const VkRenderPassCreateInfo2KHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateRenderPass2KHR( device, pCreateInfo, pAllocator, pRenderPass );
+    }
+
+    VkResult vkCreateSampler( VkDevice device, const VkSamplerCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSampler* pSampler ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateSampler( device, pCreateInfo, pAllocator, pSampler );
+    }
+
+    VkResult vkCreateSamplerYcbcrConversion( VkDevice device, const VkSamplerYcbcrConversionCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSamplerYcbcrConversion* pYcbcrConversion ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateSamplerYcbcrConversion( device, pCreateInfo, pAllocator, pYcbcrConversion );
+    }
+
+    VkResult vkCreateSamplerYcbcrConversionKHR( VkDevice device, const VkSamplerYcbcrConversionCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSamplerYcbcrConversion* pYcbcrConversion ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateSamplerYcbcrConversionKHR( device, pCreateInfo, pAllocator, pYcbcrConversion );
+    }
+
+    VkResult vkCreateSemaphore( VkDevice device, const VkSemaphoreCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSemaphore* pSemaphore ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateSemaphore( device, pCreateInfo, pAllocator, pSemaphore );
+    }
+
+    VkResult vkCreateShaderModule( VkDevice device, const VkShaderModuleCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkShaderModule* pShaderModule ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateShaderModule( device, pCreateInfo, pAllocator, pShaderModule );
+    }
+
+    VkResult vkCreateSharedSwapchainsKHR( VkDevice device, uint32_t swapchainCount, const VkSwapchainCreateInfoKHR* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkSwapchainKHR* pSwapchains ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateSharedSwapchainsKHR( device, swapchainCount, pCreateInfos, pAllocator, pSwapchains );
+    }
+
+    VkResult vkCreateSwapchainKHR( VkDevice device, const VkSwapchainCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSwapchainKHR* pSwapchain ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateSwapchainKHR( device, pCreateInfo, pAllocator, pSwapchain );
+    }
+
+    VkResult vkCreateValidationCacheEXT( VkDevice device, const VkValidationCacheCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkValidationCacheEXT* pValidationCache ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateValidationCacheEXT( device, pCreateInfo, pAllocator, pValidationCache );
+    }
+
+    VkResult vkDebugMarkerSetObjectNameEXT( VkDevice device, const VkDebugMarkerObjectNameInfoEXT* pNameInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDebugMarkerSetObjectNameEXT( device, pNameInfo );
+    }
+
+    VkResult vkDebugMarkerSetObjectTagEXT( VkDevice device, const VkDebugMarkerObjectTagInfoEXT* pTagInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDebugMarkerSetObjectTagEXT( device, pTagInfo );
+    }
+
+    void vkDestroyAccelerationStructureNV( VkDevice device, VkAccelerationStructureNV accelerationStructure, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyAccelerationStructureNV( device, accelerationStructure, pAllocator );
+    }
+
+    void vkDestroyBuffer( VkDevice device, VkBuffer buffer, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyBuffer( device, buffer, pAllocator );
+    }
+
+    void vkDestroyBufferView( VkDevice device, VkBufferView bufferView, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyBufferView( device, bufferView, pAllocator );
+    }
+
+    void vkDestroyCommandPool( VkDevice device, VkCommandPool commandPool, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyCommandPool( device, commandPool, pAllocator );
+    }
+
+    void vkDestroyDescriptorPool( VkDevice device, VkDescriptorPool descriptorPool, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyDescriptorPool( device, descriptorPool, pAllocator );
+    }
+
+    void vkDestroyDescriptorSetLayout( VkDevice device, VkDescriptorSetLayout descriptorSetLayout, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyDescriptorSetLayout( device, descriptorSetLayout, pAllocator );
+    }
+
+    void vkDestroyDescriptorUpdateTemplate( VkDevice device, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyDescriptorUpdateTemplate( device, descriptorUpdateTemplate, pAllocator );
+    }
+
+    void vkDestroyDescriptorUpdateTemplateKHR( VkDevice device, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyDescriptorUpdateTemplateKHR( device, descriptorUpdateTemplate, pAllocator );
+    }
+
+    void vkDestroyDevice( VkDevice device, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyDevice( device, pAllocator );
+    }
+
+    void vkDestroyEvent( VkDevice device, VkEvent event, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyEvent( device, event, pAllocator );
+    }
+
+    void vkDestroyFence( VkDevice device, VkFence fence, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyFence( device, fence, pAllocator );
+    }
+
+    void vkDestroyFramebuffer( VkDevice device, VkFramebuffer framebuffer, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyFramebuffer( device, framebuffer, pAllocator );
+    }
+
+    void vkDestroyImage( VkDevice device, VkImage image, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyImage( device, image, pAllocator );
+    }
+
+    void vkDestroyImageView( VkDevice device, VkImageView imageView, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyImageView( device, imageView, pAllocator );
+    }
+
+    void vkDestroyIndirectCommandsLayoutNVX( VkDevice device, VkIndirectCommandsLayoutNVX indirectCommandsLayout, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyIndirectCommandsLayoutNVX( device, indirectCommandsLayout, pAllocator );
+    }
+
+    void vkDestroyObjectTableNVX( VkDevice device, VkObjectTableNVX objectTable, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyObjectTableNVX( device, objectTable, pAllocator );
+    }
+
+    void vkDestroyPipeline( VkDevice device, VkPipeline pipeline, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyPipeline( device, pipeline, pAllocator );
+    }
+
+    void vkDestroyPipelineCache( VkDevice device, VkPipelineCache pipelineCache, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyPipelineCache( device, pipelineCache, pAllocator );
+    }
+
+    void vkDestroyPipelineLayout( VkDevice device, VkPipelineLayout pipelineLayout, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyPipelineLayout( device, pipelineLayout, pAllocator );
+    }
+
+    void vkDestroyQueryPool( VkDevice device, VkQueryPool queryPool, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyQueryPool( device, queryPool, pAllocator );
+    }
+
+    void vkDestroyRenderPass( VkDevice device, VkRenderPass renderPass, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyRenderPass( device, renderPass, pAllocator );
+    }
+
+    void vkDestroySampler( VkDevice device, VkSampler sampler, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroySampler( device, sampler, pAllocator );
+    }
+
+    void vkDestroySamplerYcbcrConversion( VkDevice device, VkSamplerYcbcrConversion ycbcrConversion, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroySamplerYcbcrConversion( device, ycbcrConversion, pAllocator );
+    }
+
+    void vkDestroySamplerYcbcrConversionKHR( VkDevice device, VkSamplerYcbcrConversion ycbcrConversion, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroySamplerYcbcrConversionKHR( device, ycbcrConversion, pAllocator );
+    }
+
+    void vkDestroySemaphore( VkDevice device, VkSemaphore semaphore, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroySemaphore( device, semaphore, pAllocator );
+    }
+
+    void vkDestroyShaderModule( VkDevice device, VkShaderModule shaderModule, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyShaderModule( device, shaderModule, pAllocator );
+    }
+
+    void vkDestroySwapchainKHR( VkDevice device, VkSwapchainKHR swapchain, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroySwapchainKHR( device, swapchain, pAllocator );
+    }
+
+    void vkDestroyValidationCacheEXT( VkDevice device, VkValidationCacheEXT validationCache, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyValidationCacheEXT( device, validationCache, pAllocator );
+    }
+
+    VkResult vkDeviceWaitIdle( VkDevice device ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDeviceWaitIdle( device );
+    }
+
+    VkResult vkDisplayPowerControlEXT( VkDevice device, VkDisplayKHR display, const VkDisplayPowerInfoEXT* pDisplayPowerInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDisplayPowerControlEXT( device, display, pDisplayPowerInfo );
+    }
+
+    VkResult vkFlushMappedMemoryRanges( VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange* pMemoryRanges ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkFlushMappedMemoryRanges( device, memoryRangeCount, pMemoryRanges );
+    }
+
+    void vkFreeCommandBuffers( VkDevice device, VkCommandPool commandPool, uint32_t commandBufferCount, const VkCommandBuffer* pCommandBuffers ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkFreeCommandBuffers( device, commandPool, commandBufferCount, pCommandBuffers );
+    }
+
+    VkResult vkFreeDescriptorSets( VkDevice device, VkDescriptorPool descriptorPool, uint32_t descriptorSetCount, const VkDescriptorSet* pDescriptorSets ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkFreeDescriptorSets( device, descriptorPool, descriptorSetCount, pDescriptorSets );
+    }
+
+    void vkFreeMemory( VkDevice device, VkDeviceMemory memory, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkFreeMemory( device, memory, pAllocator );
+    }
+
+    VkResult vkGetAccelerationStructureHandleNV( VkDevice device, VkAccelerationStructureNV accelerationStructure, size_t dataSize, void* pData ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetAccelerationStructureHandleNV( device, accelerationStructure, dataSize, pData );
+    }
+
+    void vkGetAccelerationStructureMemoryRequirementsNV( VkDevice device, const VkAccelerationStructureMemoryRequirementsInfoNV* pInfo, VkMemoryRequirements2KHR* pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetAccelerationStructureMemoryRequirementsNV( device, pInfo, pMemoryRequirements );
+    }
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+    VkResult vkGetAndroidHardwareBufferPropertiesANDROID( VkDevice device, const struct AHardwareBuffer* buffer, VkAndroidHardwareBufferPropertiesANDROID* pProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetAndroidHardwareBufferPropertiesANDROID( device, buffer, pProperties );
+    }
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+    VkDeviceAddress vkGetBufferDeviceAddressEXT( VkDevice device, const VkBufferDeviceAddressInfoKHR* pInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetBufferDeviceAddressEXT( device, pInfo );
+    }
+
+    VkDeviceAddress vkGetBufferDeviceAddressKHR( VkDevice device, const VkBufferDeviceAddressInfoKHR* pInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetBufferDeviceAddressKHR( device, pInfo );
+    }
+
+    void vkGetBufferMemoryRequirements( VkDevice device, VkBuffer buffer, VkMemoryRequirements* pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetBufferMemoryRequirements( device, buffer, pMemoryRequirements );
+    }
+
+    void vkGetBufferMemoryRequirements2( VkDevice device, const VkBufferMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetBufferMemoryRequirements2( device, pInfo, pMemoryRequirements );
+    }
+
+    void vkGetBufferMemoryRequirements2KHR( VkDevice device, const VkBufferMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetBufferMemoryRequirements2KHR( device, pInfo, pMemoryRequirements );
+    }
+
+    uint64_t vkGetBufferOpaqueCaptureAddressKHR( VkDevice device, const VkBufferDeviceAddressInfoKHR* pInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetBufferOpaqueCaptureAddressKHR( device, pInfo );
+    }
+
+    VkResult vkGetCalibratedTimestampsEXT( VkDevice device, uint32_t timestampCount, const VkCalibratedTimestampInfoEXT* pTimestampInfos, uint64_t* pTimestamps, uint64_t* pMaxDeviation ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetCalibratedTimestampsEXT( device, timestampCount, pTimestampInfos, pTimestamps, pMaxDeviation );
+    }
+
+    void vkGetDescriptorSetLayoutSupport( VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, VkDescriptorSetLayoutSupport* pSupport ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetDescriptorSetLayoutSupport( device, pCreateInfo, pSupport );
+    }
+
+    void vkGetDescriptorSetLayoutSupportKHR( VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, VkDescriptorSetLayoutSupport* pSupport ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetDescriptorSetLayoutSupportKHR( device, pCreateInfo, pSupport );
+    }
+
+    void vkGetDeviceGroupPeerMemoryFeatures( VkDevice device, uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VkPeerMemoryFeatureFlags* pPeerMemoryFeatures ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetDeviceGroupPeerMemoryFeatures( device, heapIndex, localDeviceIndex, remoteDeviceIndex, pPeerMemoryFeatures );
+    }
+
+    void vkGetDeviceGroupPeerMemoryFeaturesKHR( VkDevice device, uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VkPeerMemoryFeatureFlags* pPeerMemoryFeatures ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetDeviceGroupPeerMemoryFeaturesKHR( device, heapIndex, localDeviceIndex, remoteDeviceIndex, pPeerMemoryFeatures );
+    }
+
+    VkResult vkGetDeviceGroupPresentCapabilitiesKHR( VkDevice device, VkDeviceGroupPresentCapabilitiesKHR* pDeviceGroupPresentCapabilities ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetDeviceGroupPresentCapabilitiesKHR( device, pDeviceGroupPresentCapabilities );
+    }
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    VkResult vkGetDeviceGroupSurfacePresentModes2EXT( VkDevice device, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, VkDeviceGroupPresentModeFlagsKHR* pModes ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetDeviceGroupSurfacePresentModes2EXT( device, pSurfaceInfo, pModes );
+    }
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+    VkResult vkGetDeviceGroupSurfacePresentModesKHR( VkDevice device, VkSurfaceKHR surface, VkDeviceGroupPresentModeFlagsKHR* pModes ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetDeviceGroupSurfacePresentModesKHR( device, surface, pModes );
+    }
+
+    void vkGetDeviceMemoryCommitment( VkDevice device, VkDeviceMemory memory, VkDeviceSize* pCommittedMemoryInBytes ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetDeviceMemoryCommitment( device, memory, pCommittedMemoryInBytes );
+    }
+
+    uint64_t vkGetDeviceMemoryOpaqueCaptureAddressKHR( VkDevice device, const VkDeviceMemoryOpaqueCaptureAddressInfoKHR* pInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetDeviceMemoryOpaqueCaptureAddressKHR( device, pInfo );
+    }
+
+    PFN_vkVoidFunction vkGetDeviceProcAddr( VkDevice device, const char* pName ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetDeviceProcAddr( device, pName );
+    }
+
+    void vkGetDeviceQueue( VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex, VkQueue* pQueue ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetDeviceQueue( device, queueFamilyIndex, queueIndex, pQueue );
+    }
+
+    void vkGetDeviceQueue2( VkDevice device, const VkDeviceQueueInfo2* pQueueInfo, VkQueue* pQueue ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetDeviceQueue2( device, pQueueInfo, pQueue );
+    }
+
+    VkResult vkGetEventStatus( VkDevice device, VkEvent event ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetEventStatus( device, event );
+    }
+
+    VkResult vkGetFenceFdKHR( VkDevice device, const VkFenceGetFdInfoKHR* pGetFdInfo, int* pFd ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetFenceFdKHR( device, pGetFdInfo, pFd );
+    }
+
+    VkResult vkGetFenceStatus( VkDevice device, VkFence fence ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetFenceStatus( device, fence );
+    }
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    VkResult vkGetFenceWin32HandleKHR( VkDevice device, const VkFenceGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetFenceWin32HandleKHR( device, pGetWin32HandleInfo, pHandle );
+    }
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+    VkResult vkGetImageDrmFormatModifierPropertiesEXT( VkDevice device, VkImage image, VkImageDrmFormatModifierPropertiesEXT* pProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetImageDrmFormatModifierPropertiesEXT( device, image, pProperties );
+    }
+
+    void vkGetImageMemoryRequirements( VkDevice device, VkImage image, VkMemoryRequirements* pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetImageMemoryRequirements( device, image, pMemoryRequirements );
+    }
+
+    void vkGetImageMemoryRequirements2( VkDevice device, const VkImageMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetImageMemoryRequirements2( device, pInfo, pMemoryRequirements );
+    }
+
+    void vkGetImageMemoryRequirements2KHR( VkDevice device, const VkImageMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetImageMemoryRequirements2KHR( device, pInfo, pMemoryRequirements );
+    }
+
+    void vkGetImageSparseMemoryRequirements( VkDevice device, VkImage image, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements* pSparseMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetImageSparseMemoryRequirements( device, image, pSparseMemoryRequirementCount, pSparseMemoryRequirements );
+    }
+
+    void vkGetImageSparseMemoryRequirements2( VkDevice device, const VkImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2* pSparseMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetImageSparseMemoryRequirements2( device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements );
+    }
+
+    void vkGetImageSparseMemoryRequirements2KHR( VkDevice device, const VkImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2* pSparseMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetImageSparseMemoryRequirements2KHR( device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements );
+    }
+
+    void vkGetImageSubresourceLayout( VkDevice device, VkImage image, const VkImageSubresource* pSubresource, VkSubresourceLayout* pLayout ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetImageSubresourceLayout( device, image, pSubresource, pLayout );
+    }
+
+    uint32_t vkGetImageViewHandleNVX( VkDevice device, const VkImageViewHandleInfoNVX* pInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetImageViewHandleNVX( device, pInfo );
+    }
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+    VkResult vkGetMemoryAndroidHardwareBufferANDROID( VkDevice device, const VkMemoryGetAndroidHardwareBufferInfoANDROID* pInfo, struct AHardwareBuffer** pBuffer ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetMemoryAndroidHardwareBufferANDROID( device, pInfo, pBuffer );
+    }
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+    VkResult vkGetMemoryFdKHR( VkDevice device, const VkMemoryGetFdInfoKHR* pGetFdInfo, int* pFd ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetMemoryFdKHR( device, pGetFdInfo, pFd );
+    }
+
+    VkResult vkGetMemoryFdPropertiesKHR( VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, int fd, VkMemoryFdPropertiesKHR* pMemoryFdProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetMemoryFdPropertiesKHR( device, handleType, fd, pMemoryFdProperties );
+    }
+
+    VkResult vkGetMemoryHostPointerPropertiesEXT( VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, const void* pHostPointer, VkMemoryHostPointerPropertiesEXT* pMemoryHostPointerProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetMemoryHostPointerPropertiesEXT( device, handleType, pHostPointer, pMemoryHostPointerProperties );
+    }
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    VkResult vkGetMemoryWin32HandleKHR( VkDevice device, const VkMemoryGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetMemoryWin32HandleKHR( device, pGetWin32HandleInfo, pHandle );
+    }
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    VkResult vkGetMemoryWin32HandleNV( VkDevice device, VkDeviceMemory memory, VkExternalMemoryHandleTypeFlagsNV handleType, HANDLE* pHandle ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetMemoryWin32HandleNV( device, memory, handleType, pHandle );
+    }
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    VkResult vkGetMemoryWin32HandlePropertiesKHR( VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, VkMemoryWin32HandlePropertiesKHR* pMemoryWin32HandleProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetMemoryWin32HandlePropertiesKHR( device, handleType, handle, pMemoryWin32HandleProperties );
+    }
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+    VkResult vkGetPastPresentationTimingGOOGLE( VkDevice device, VkSwapchainKHR swapchain, uint32_t* pPresentationTimingCount, VkPastPresentationTimingGOOGLE* pPresentationTimings ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPastPresentationTimingGOOGLE( device, swapchain, pPresentationTimingCount, pPresentationTimings );
+    }
+
+    VkResult vkGetPerformanceParameterINTEL( VkDevice device, VkPerformanceParameterTypeINTEL parameter, VkPerformanceValueINTEL* pValue ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPerformanceParameterINTEL( device, parameter, pValue );
+    }
+
+    VkResult vkGetPipelineCacheData( VkDevice device, VkPipelineCache pipelineCache, size_t* pDataSize, void* pData ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPipelineCacheData( device, pipelineCache, pDataSize, pData );
+    }
+
+    VkResult vkGetPipelineExecutableInternalRepresentationsKHR( VkDevice device, const VkPipelineExecutableInfoKHR* pExecutableInfo, uint32_t* pInternalRepresentationCount, VkPipelineExecutableInternalRepresentationKHR* pInternalRepresentations ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPipelineExecutableInternalRepresentationsKHR( device, pExecutableInfo, pInternalRepresentationCount, pInternalRepresentations );
+    }
+
+    VkResult vkGetPipelineExecutablePropertiesKHR( VkDevice device, const VkPipelineInfoKHR* pPipelineInfo, uint32_t* pExecutableCount, VkPipelineExecutablePropertiesKHR* pProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPipelineExecutablePropertiesKHR( device, pPipelineInfo, pExecutableCount, pProperties );
+    }
+
+    VkResult vkGetPipelineExecutableStatisticsKHR( VkDevice device, const VkPipelineExecutableInfoKHR* pExecutableInfo, uint32_t* pStatisticCount, VkPipelineExecutableStatisticKHR* pStatistics ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPipelineExecutableStatisticsKHR( device, pExecutableInfo, pStatisticCount, pStatistics );
+    }
+
+    VkResult vkGetQueryPoolResults( VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, void* pData, VkDeviceSize stride, VkQueryResultFlags flags ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetQueryPoolResults( device, queryPool, firstQuery, queryCount, dataSize, pData, stride, flags );
+    }
+
+    VkResult vkGetRayTracingShaderGroupHandlesNV( VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetRayTracingShaderGroupHandlesNV( device, pipeline, firstGroup, groupCount, dataSize, pData );
+    }
+
+    VkResult vkGetRefreshCycleDurationGOOGLE( VkDevice device, VkSwapchainKHR swapchain, VkRefreshCycleDurationGOOGLE* pDisplayTimingProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetRefreshCycleDurationGOOGLE( device, swapchain, pDisplayTimingProperties );
+    }
+
+    void vkGetRenderAreaGranularity( VkDevice device, VkRenderPass renderPass, VkExtent2D* pGranularity ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetRenderAreaGranularity( device, renderPass, pGranularity );
+    }
+
+    VkResult vkGetSemaphoreCounterValueKHR( VkDevice device, VkSemaphore semaphore, uint64_t* pValue ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetSemaphoreCounterValueKHR( device, semaphore, pValue );
+    }
+
+    VkResult vkGetSemaphoreFdKHR( VkDevice device, const VkSemaphoreGetFdInfoKHR* pGetFdInfo, int* pFd ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetSemaphoreFdKHR( device, pGetFdInfo, pFd );
+    }
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    VkResult vkGetSemaphoreWin32HandleKHR( VkDevice device, const VkSemaphoreGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetSemaphoreWin32HandleKHR( device, pGetWin32HandleInfo, pHandle );
+    }
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+    VkResult vkGetShaderInfoAMD( VkDevice device, VkPipeline pipeline, VkShaderStageFlagBits shaderStage, VkShaderInfoTypeAMD infoType, size_t* pInfoSize, void* pInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetShaderInfoAMD( device, pipeline, shaderStage, infoType, pInfoSize, pInfo );
+    }
+
+    VkResult vkGetSwapchainCounterEXT( VkDevice device, VkSwapchainKHR swapchain, VkSurfaceCounterFlagBitsEXT counter, uint64_t* pCounterValue ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetSwapchainCounterEXT( device, swapchain, counter, pCounterValue );
+    }
+
+    VkResult vkGetSwapchainImagesKHR( VkDevice device, VkSwapchainKHR swapchain, uint32_t* pSwapchainImageCount, VkImage* pSwapchainImages ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetSwapchainImagesKHR( device, swapchain, pSwapchainImageCount, pSwapchainImages );
+    }
+
+    VkResult vkGetSwapchainStatusKHR( VkDevice device, VkSwapchainKHR swapchain ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetSwapchainStatusKHR( device, swapchain );
+    }
+
+    VkResult vkGetValidationCacheDataEXT( VkDevice device, VkValidationCacheEXT validationCache, size_t* pDataSize, void* pData ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetValidationCacheDataEXT( device, validationCache, pDataSize, pData );
+    }
+
+    VkResult vkImportFenceFdKHR( VkDevice device, const VkImportFenceFdInfoKHR* pImportFenceFdInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkImportFenceFdKHR( device, pImportFenceFdInfo );
+    }
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    VkResult vkImportFenceWin32HandleKHR( VkDevice device, const VkImportFenceWin32HandleInfoKHR* pImportFenceWin32HandleInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkImportFenceWin32HandleKHR( device, pImportFenceWin32HandleInfo );
+    }
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+    VkResult vkImportSemaphoreFdKHR( VkDevice device, const VkImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkImportSemaphoreFdKHR( device, pImportSemaphoreFdInfo );
+    }
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    VkResult vkImportSemaphoreWin32HandleKHR( VkDevice device, const VkImportSemaphoreWin32HandleInfoKHR* pImportSemaphoreWin32HandleInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkImportSemaphoreWin32HandleKHR( device, pImportSemaphoreWin32HandleInfo );
+    }
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+    VkResult vkInitializePerformanceApiINTEL( VkDevice device, const VkInitializePerformanceApiInfoINTEL* pInitializeInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkInitializePerformanceApiINTEL( device, pInitializeInfo );
+    }
+
+    VkResult vkInvalidateMappedMemoryRanges( VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange* pMemoryRanges ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkInvalidateMappedMemoryRanges( device, memoryRangeCount, pMemoryRanges );
+    }
+
+    VkResult vkMapMemory( VkDevice device, VkDeviceMemory memory, VkDeviceSize offset, VkDeviceSize size, VkMemoryMapFlags flags, void** ppData ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkMapMemory( device, memory, offset, size, flags, ppData );
+    }
+
+    VkResult vkMergePipelineCaches( VkDevice device, VkPipelineCache dstCache, uint32_t srcCacheCount, const VkPipelineCache* pSrcCaches ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkMergePipelineCaches( device, dstCache, srcCacheCount, pSrcCaches );
+    }
+
+    VkResult vkMergeValidationCachesEXT( VkDevice device, VkValidationCacheEXT dstCache, uint32_t srcCacheCount, const VkValidationCacheEXT* pSrcCaches ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkMergeValidationCachesEXT( device, dstCache, srcCacheCount, pSrcCaches );
+    }
+
+    VkResult vkRegisterDeviceEventEXT( VkDevice device, const VkDeviceEventInfoEXT* pDeviceEventInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkRegisterDeviceEventEXT( device, pDeviceEventInfo, pAllocator, pFence );
+    }
+
+    VkResult vkRegisterDisplayEventEXT( VkDevice device, VkDisplayKHR display, const VkDisplayEventInfoEXT* pDisplayEventInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkRegisterDisplayEventEXT( device, display, pDisplayEventInfo, pAllocator, pFence );
+    }
+
+    VkResult vkRegisterObjectsNVX( VkDevice device, VkObjectTableNVX objectTable, uint32_t objectCount, const VkObjectTableEntryNVX* const* ppObjectTableEntries, const uint32_t* pObjectIndices ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkRegisterObjectsNVX( device, objectTable, objectCount, ppObjectTableEntries, pObjectIndices );
+    }
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    VkResult vkReleaseFullScreenExclusiveModeEXT( VkDevice device, VkSwapchainKHR swapchain ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkReleaseFullScreenExclusiveModeEXT( device, swapchain );
+    }
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+    VkResult vkReleasePerformanceConfigurationINTEL( VkDevice device, VkPerformanceConfigurationINTEL configuration ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkReleasePerformanceConfigurationINTEL( device, configuration );
+    }
+
+    void vkReleaseProfilingLockKHR( VkDevice device ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkReleaseProfilingLockKHR( device );
+    }
+
+    VkResult vkResetCommandPool( VkDevice device, VkCommandPool commandPool, VkCommandPoolResetFlags flags ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkResetCommandPool( device, commandPool, flags );
+    }
+
+    VkResult vkResetDescriptorPool( VkDevice device, VkDescriptorPool descriptorPool, VkDescriptorPoolResetFlags flags ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkResetDescriptorPool( device, descriptorPool, flags );
+    }
+
+    VkResult vkResetEvent( VkDevice device, VkEvent event ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkResetEvent( device, event );
+    }
+
+    VkResult vkResetFences( VkDevice device, uint32_t fenceCount, const VkFence* pFences ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkResetFences( device, fenceCount, pFences );
+    }
+
+    void vkResetQueryPoolEXT( VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkResetQueryPoolEXT( device, queryPool, firstQuery, queryCount );
+    }
+
+    VkResult vkSetDebugUtilsObjectNameEXT( VkDevice device, const VkDebugUtilsObjectNameInfoEXT* pNameInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkSetDebugUtilsObjectNameEXT( device, pNameInfo );
+    }
+
+    VkResult vkSetDebugUtilsObjectTagEXT( VkDevice device, const VkDebugUtilsObjectTagInfoEXT* pTagInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkSetDebugUtilsObjectTagEXT( device, pTagInfo );
+    }
+
+    VkResult vkSetEvent( VkDevice device, VkEvent event ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkSetEvent( device, event );
+    }
+
+    void vkSetHdrMetadataEXT( VkDevice device, uint32_t swapchainCount, const VkSwapchainKHR* pSwapchains, const VkHdrMetadataEXT* pMetadata ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkSetHdrMetadataEXT( device, swapchainCount, pSwapchains, pMetadata );
+    }
+
+    void vkSetLocalDimmingAMD( VkDevice device, VkSwapchainKHR swapChain, VkBool32 localDimmingEnable ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkSetLocalDimmingAMD( device, swapChain, localDimmingEnable );
+    }
+
+    VkResult vkSignalSemaphoreKHR( VkDevice device, const VkSemaphoreSignalInfoKHR* pSignalInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkSignalSemaphoreKHR( device, pSignalInfo );
+    }
+
+    void vkTrimCommandPool( VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlags flags ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkTrimCommandPool( device, commandPool, flags );
+    }
+
+    void vkTrimCommandPoolKHR( VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlags flags ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkTrimCommandPoolKHR( device, commandPool, flags );
+    }
+
+    void vkUninitializePerformanceApiINTEL( VkDevice device ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkUninitializePerformanceApiINTEL( device );
+    }
+
+    void vkUnmapMemory( VkDevice device, VkDeviceMemory memory ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkUnmapMemory( device, memory );
+    }
+
+    VkResult vkUnregisterObjectsNVX( VkDevice device, VkObjectTableNVX objectTable, uint32_t objectCount, const VkObjectEntryTypeNVX* pObjectEntryTypes, const uint32_t* pObjectIndices ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkUnregisterObjectsNVX( device, objectTable, objectCount, pObjectEntryTypes, pObjectIndices );
+    }
+
+    void vkUpdateDescriptorSetWithTemplate( VkDevice device, VkDescriptorSet descriptorSet, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkUpdateDescriptorSetWithTemplate( device, descriptorSet, descriptorUpdateTemplate, pData );
+    }
+
+    void vkUpdateDescriptorSetWithTemplateKHR( VkDevice device, VkDescriptorSet descriptorSet, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkUpdateDescriptorSetWithTemplateKHR( device, descriptorSet, descriptorUpdateTemplate, pData );
+    }
+
+    void vkUpdateDescriptorSets( VkDevice device, uint32_t descriptorWriteCount, const VkWriteDescriptorSet* pDescriptorWrites, uint32_t descriptorCopyCount, const VkCopyDescriptorSet* pDescriptorCopies ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkUpdateDescriptorSets( device, descriptorWriteCount, pDescriptorWrites, descriptorCopyCount, pDescriptorCopies );
+    }
+
+    VkResult vkWaitForFences( VkDevice device, uint32_t fenceCount, const VkFence* pFences, VkBool32 waitAll, uint64_t timeout ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkWaitForFences( device, fenceCount, pFences, waitAll, timeout );
+    }
+
+    VkResult vkWaitSemaphoresKHR( VkDevice device, const VkSemaphoreWaitInfoKHR* pWaitInfo, uint64_t timeout ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkWaitSemaphoresKHR( device, pWaitInfo, timeout );
+    }
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+    VkResult vkCreateAndroidSurfaceKHR( VkInstance instance, const VkAndroidSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateAndroidSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface );
+    }
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+    VkResult vkCreateDebugReportCallbackEXT( VkInstance instance, const VkDebugReportCallbackCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDebugReportCallbackEXT* pCallback ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateDebugReportCallbackEXT( instance, pCreateInfo, pAllocator, pCallback );
+    }
+
+    VkResult vkCreateDebugUtilsMessengerEXT( VkInstance instance, const VkDebugUtilsMessengerCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDebugUtilsMessengerEXT* pMessenger ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateDebugUtilsMessengerEXT( instance, pCreateInfo, pAllocator, pMessenger );
+    }
+
+    VkResult vkCreateDisplayPlaneSurfaceKHR( VkInstance instance, const VkDisplaySurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateDisplayPlaneSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface );
+    }
+
+    VkResult vkCreateHeadlessSurfaceEXT( VkInstance instance, const VkHeadlessSurfaceCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateHeadlessSurfaceEXT( instance, pCreateInfo, pAllocator, pSurface );
+    }
+
+#ifdef VK_USE_PLATFORM_IOS_MVK
+    VkResult vkCreateIOSSurfaceMVK( VkInstance instance, const VkIOSSurfaceCreateInfoMVK* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateIOSSurfaceMVK( instance, pCreateInfo, pAllocator, pSurface );
+    }
+#endif /*VK_USE_PLATFORM_IOS_MVK*/
+
+#ifdef VK_USE_PLATFORM_FUCHSIA
+    VkResult vkCreateImagePipeSurfaceFUCHSIA( VkInstance instance, const VkImagePipeSurfaceCreateInfoFUCHSIA* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateImagePipeSurfaceFUCHSIA( instance, pCreateInfo, pAllocator, pSurface );
+    }
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+#ifdef VK_USE_PLATFORM_MACOS_MVK
+    VkResult vkCreateMacOSSurfaceMVK( VkInstance instance, const VkMacOSSurfaceCreateInfoMVK* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateMacOSSurfaceMVK( instance, pCreateInfo, pAllocator, pSurface );
+    }
+#endif /*VK_USE_PLATFORM_MACOS_MVK*/
+
+#ifdef VK_USE_PLATFORM_METAL_EXT
+    VkResult vkCreateMetalSurfaceEXT( VkInstance instance, const VkMetalSurfaceCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateMetalSurfaceEXT( instance, pCreateInfo, pAllocator, pSurface );
+    }
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+
+#ifdef VK_USE_PLATFORM_GGP
+    VkResult vkCreateStreamDescriptorSurfaceGGP( VkInstance instance, const VkStreamDescriptorSurfaceCreateInfoGGP* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateStreamDescriptorSurfaceGGP( instance, pCreateInfo, pAllocator, pSurface );
+    }
+#endif /*VK_USE_PLATFORM_GGP*/
+
+#ifdef VK_USE_PLATFORM_VI_NN
+    VkResult vkCreateViSurfaceNN( VkInstance instance, const VkViSurfaceCreateInfoNN* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateViSurfaceNN( instance, pCreateInfo, pAllocator, pSurface );
+    }
+#endif /*VK_USE_PLATFORM_VI_NN*/
+
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+    VkResult vkCreateWaylandSurfaceKHR( VkInstance instance, const VkWaylandSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateWaylandSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface );
+    }
+#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    VkResult vkCreateWin32SurfaceKHR( VkInstance instance, const VkWin32SurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateWin32SurfaceKHR( instance, pCreateInfo, pAllocator, pSurface );
+    }
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#ifdef VK_USE_PLATFORM_XCB_KHR
+    VkResult vkCreateXcbSurfaceKHR( VkInstance instance, const VkXcbSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateXcbSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface );
+    }
+#endif /*VK_USE_PLATFORM_XCB_KHR*/
+
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+    VkResult vkCreateXlibSurfaceKHR( VkInstance instance, const VkXlibSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateXlibSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface );
+    }
+#endif /*VK_USE_PLATFORM_XLIB_KHR*/
+
+    void vkDebugReportMessageEXT( VkInstance instance, VkDebugReportFlagsEXT flags, VkDebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const char* pLayerPrefix, const char* pMessage ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDebugReportMessageEXT( instance, flags, objectType, object, location, messageCode, pLayerPrefix, pMessage );
+    }
+
+    void vkDestroyDebugReportCallbackEXT( VkInstance instance, VkDebugReportCallbackEXT callback, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyDebugReportCallbackEXT( instance, callback, pAllocator );
+    }
+
+    void vkDestroyDebugUtilsMessengerEXT( VkInstance instance, VkDebugUtilsMessengerEXT messenger, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyDebugUtilsMessengerEXT( instance, messenger, pAllocator );
+    }
+
+    void vkDestroyInstance( VkInstance instance, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyInstance( instance, pAllocator );
+    }
+
+    void vkDestroySurfaceKHR( VkInstance instance, VkSurfaceKHR surface, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroySurfaceKHR( instance, surface, pAllocator );
+    }
+
+    VkResult vkEnumeratePhysicalDeviceGroups( VkInstance instance, uint32_t* pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkEnumeratePhysicalDeviceGroups( instance, pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties );
+    }
+
+    VkResult vkEnumeratePhysicalDeviceGroupsKHR( VkInstance instance, uint32_t* pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkEnumeratePhysicalDeviceGroupsKHR( instance, pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties );
+    }
+
+    VkResult vkEnumeratePhysicalDevices( VkInstance instance, uint32_t* pPhysicalDeviceCount, VkPhysicalDevice* pPhysicalDevices ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkEnumeratePhysicalDevices( instance, pPhysicalDeviceCount, pPhysicalDevices );
+    }
+
+    PFN_vkVoidFunction vkGetInstanceProcAddr( VkInstance instance, const char* pName ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetInstanceProcAddr( instance, pName );
+    }
+
+    void vkSubmitDebugUtilsMessageEXT( VkInstance instance, VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VkDebugUtilsMessageTypeFlagsEXT messageTypes, const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkSubmitDebugUtilsMessageEXT( instance, messageSeverity, messageTypes, pCallbackData );
+    }
+
+#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
+    VkResult vkAcquireXlibDisplayEXT( VkPhysicalDevice physicalDevice, Display* dpy, VkDisplayKHR display ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkAcquireXlibDisplayEXT( physicalDevice, dpy, display );
+    }
+#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/
+
+    VkResult vkCreateDevice( VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDevice* pDevice ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateDevice( physicalDevice, pCreateInfo, pAllocator, pDevice );
+    }
+
+    VkResult vkCreateDisplayModeKHR( VkPhysicalDevice physicalDevice, VkDisplayKHR display, const VkDisplayModeCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDisplayModeKHR* pMode ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateDisplayModeKHR( physicalDevice, display, pCreateInfo, pAllocator, pMode );
+    }
+
+    VkResult vkEnumerateDeviceExtensionProperties( VkPhysicalDevice physicalDevice, const char* pLayerName, uint32_t* pPropertyCount, VkExtensionProperties* pProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkEnumerateDeviceExtensionProperties( physicalDevice, pLayerName, pPropertyCount, pProperties );
+    }
+
+    VkResult vkEnumerateDeviceLayerProperties( VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkLayerProperties* pProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkEnumerateDeviceLayerProperties( physicalDevice, pPropertyCount, pProperties );
+    }
+
+    VkResult vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, uint32_t* pCounterCount, VkPerformanceCounterKHR* pCounters, VkPerformanceCounterDescriptionKHR* pCounterDescriptions ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( physicalDevice, queueFamilyIndex, pCounterCount, pCounters, pCounterDescriptions );
+    }
+
+    VkResult vkGetDisplayModeProperties2KHR( VkPhysicalDevice physicalDevice, VkDisplayKHR display, uint32_t* pPropertyCount, VkDisplayModeProperties2KHR* pProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetDisplayModeProperties2KHR( physicalDevice, display, pPropertyCount, pProperties );
+    }
+
+    VkResult vkGetDisplayModePropertiesKHR( VkPhysicalDevice physicalDevice, VkDisplayKHR display, uint32_t* pPropertyCount, VkDisplayModePropertiesKHR* pProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetDisplayModePropertiesKHR( physicalDevice, display, pPropertyCount, pProperties );
+    }
+
+    VkResult vkGetDisplayPlaneCapabilities2KHR( VkPhysicalDevice physicalDevice, const VkDisplayPlaneInfo2KHR* pDisplayPlaneInfo, VkDisplayPlaneCapabilities2KHR* pCapabilities ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetDisplayPlaneCapabilities2KHR( physicalDevice, pDisplayPlaneInfo, pCapabilities );
+    }
+
+    VkResult vkGetDisplayPlaneCapabilitiesKHR( VkPhysicalDevice physicalDevice, VkDisplayModeKHR mode, uint32_t planeIndex, VkDisplayPlaneCapabilitiesKHR* pCapabilities ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetDisplayPlaneCapabilitiesKHR( physicalDevice, mode, planeIndex, pCapabilities );
+    }
+
+    VkResult vkGetDisplayPlaneSupportedDisplaysKHR( VkPhysicalDevice physicalDevice, uint32_t planeIndex, uint32_t* pDisplayCount, VkDisplayKHR* pDisplays ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetDisplayPlaneSupportedDisplaysKHR( physicalDevice, planeIndex, pDisplayCount, pDisplays );
+    }
+
+    VkResult vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( VkPhysicalDevice physicalDevice, uint32_t* pTimeDomainCount, VkTimeDomainEXT* pTimeDomains ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( physicalDevice, pTimeDomainCount, pTimeDomains );
+    }
+
+    VkResult vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkCooperativeMatrixPropertiesNV* pProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( physicalDevice, pPropertyCount, pProperties );
+    }
+
+    VkResult vkGetPhysicalDeviceDisplayPlaneProperties2KHR( VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPlaneProperties2KHR* pProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceDisplayPlaneProperties2KHR( physicalDevice, pPropertyCount, pProperties );
+    }
+
+    VkResult vkGetPhysicalDeviceDisplayPlanePropertiesKHR( VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPlanePropertiesKHR* pProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceDisplayPlanePropertiesKHR( physicalDevice, pPropertyCount, pProperties );
+    }
+
+    VkResult vkGetPhysicalDeviceDisplayProperties2KHR( VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayProperties2KHR* pProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceDisplayProperties2KHR( physicalDevice, pPropertyCount, pProperties );
+    }
+
+    VkResult vkGetPhysicalDeviceDisplayPropertiesKHR( VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPropertiesKHR* pProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceDisplayPropertiesKHR( physicalDevice, pPropertyCount, pProperties );
+    }
+
+    void vkGetPhysicalDeviceExternalBufferProperties( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo, VkExternalBufferProperties* pExternalBufferProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceExternalBufferProperties( physicalDevice, pExternalBufferInfo, pExternalBufferProperties );
+    }
+
+    void vkGetPhysicalDeviceExternalBufferPropertiesKHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo, VkExternalBufferProperties* pExternalBufferProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceExternalBufferPropertiesKHR( physicalDevice, pExternalBufferInfo, pExternalBufferProperties );
+    }
+
+    void vkGetPhysicalDeviceExternalFenceProperties( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo, VkExternalFenceProperties* pExternalFenceProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceExternalFenceProperties( physicalDevice, pExternalFenceInfo, pExternalFenceProperties );
+    }
+
+    void vkGetPhysicalDeviceExternalFencePropertiesKHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo, VkExternalFenceProperties* pExternalFenceProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceExternalFencePropertiesKHR( physicalDevice, pExternalFenceInfo, pExternalFenceProperties );
+    }
+
+    VkResult vkGetPhysicalDeviceExternalImageFormatPropertiesNV( VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkExternalMemoryHandleTypeFlagsNV externalHandleType, VkExternalImageFormatPropertiesNV* pExternalImageFormatProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceExternalImageFormatPropertiesNV( physicalDevice, format, type, tiling, usage, flags, externalHandleType, pExternalImageFormatProperties );
+    }
+
+    void vkGetPhysicalDeviceExternalSemaphoreProperties( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, VkExternalSemaphoreProperties* pExternalSemaphoreProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceExternalSemaphoreProperties( physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties );
+    }
+
+    void vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, VkExternalSemaphoreProperties* pExternalSemaphoreProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties );
+    }
+
+    void vkGetPhysicalDeviceFeatures( VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures* pFeatures ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceFeatures( physicalDevice, pFeatures );
+    }
+
+    void vkGetPhysicalDeviceFeatures2( VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2* pFeatures ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceFeatures2( physicalDevice, pFeatures );
+    }
+
+    void vkGetPhysicalDeviceFeatures2KHR( VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2* pFeatures ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceFeatures2KHR( physicalDevice, pFeatures );
+    }
+
+    void vkGetPhysicalDeviceFormatProperties( VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties* pFormatProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceFormatProperties( physicalDevice, format, pFormatProperties );
+    }
+
+    void vkGetPhysicalDeviceFormatProperties2( VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties2* pFormatProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceFormatProperties2( physicalDevice, format, pFormatProperties );
+    }
+
+    void vkGetPhysicalDeviceFormatProperties2KHR( VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties2* pFormatProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceFormatProperties2KHR( physicalDevice, format, pFormatProperties );
+    }
+
+    void vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX( VkPhysicalDevice physicalDevice, VkDeviceGeneratedCommandsFeaturesNVX* pFeatures, VkDeviceGeneratedCommandsLimitsNVX* pLimits ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX( physicalDevice, pFeatures, pLimits );
+    }
+
+    VkResult vkGetPhysicalDeviceImageFormatProperties( VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkImageFormatProperties* pImageFormatProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceImageFormatProperties( physicalDevice, format, type, tiling, usage, flags, pImageFormatProperties );
+    }
+
+    VkResult vkGetPhysicalDeviceImageFormatProperties2( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo, VkImageFormatProperties2* pImageFormatProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceImageFormatProperties2( physicalDevice, pImageFormatInfo, pImageFormatProperties );
+    }
+
+    VkResult vkGetPhysicalDeviceImageFormatProperties2KHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo, VkImageFormatProperties2* pImageFormatProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceImageFormatProperties2KHR( physicalDevice, pImageFormatInfo, pImageFormatProperties );
+    }
+
+    void vkGetPhysicalDeviceMemoryProperties( VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties* pMemoryProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceMemoryProperties( physicalDevice, pMemoryProperties );
+    }
+
+    void vkGetPhysicalDeviceMemoryProperties2( VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties2* pMemoryProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceMemoryProperties2( physicalDevice, pMemoryProperties );
+    }
+
+    void vkGetPhysicalDeviceMemoryProperties2KHR( VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties2* pMemoryProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceMemoryProperties2KHR( physicalDevice, pMemoryProperties );
+    }
+
+    void vkGetPhysicalDeviceMultisamplePropertiesEXT( VkPhysicalDevice physicalDevice, VkSampleCountFlagBits samples, VkMultisamplePropertiesEXT* pMultisampleProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceMultisamplePropertiesEXT( physicalDevice, samples, pMultisampleProperties );
+    }
+
+    VkResult vkGetPhysicalDevicePresentRectanglesKHR( VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pRectCount, VkRect2D* pRects ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDevicePresentRectanglesKHR( physicalDevice, surface, pRectCount, pRects );
+    }
+
+    void vkGetPhysicalDeviceProperties( VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties* pProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceProperties( physicalDevice, pProperties );
+    }
+
+    void vkGetPhysicalDeviceProperties2( VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties2* pProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceProperties2( physicalDevice, pProperties );
+    }
+
+    void vkGetPhysicalDeviceProperties2KHR( VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties2* pProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceProperties2KHR( physicalDevice, pProperties );
+    }
+
+    void vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( VkPhysicalDevice physicalDevice, const VkQueryPoolPerformanceCreateInfoKHR* pPerformanceQueryCreateInfo, uint32_t* pNumPasses ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( physicalDevice, pPerformanceQueryCreateInfo, pNumPasses );
+    }
+
+    void vkGetPhysicalDeviceQueueFamilyProperties( VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties* pQueueFamilyProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceQueueFamilyProperties( physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties );
+    }
+
+    void vkGetPhysicalDeviceQueueFamilyProperties2( VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties2* pQueueFamilyProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceQueueFamilyProperties2( physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties );
+    }
+
+    void vkGetPhysicalDeviceQueueFamilyProperties2KHR( VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties2* pQueueFamilyProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceQueueFamilyProperties2KHR( physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties );
+    }
+
+    void vkGetPhysicalDeviceSparseImageFormatProperties( VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkSampleCountFlagBits samples, VkImageUsageFlags usage, VkImageTiling tiling, uint32_t* pPropertyCount, VkSparseImageFormatProperties* pProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceSparseImageFormatProperties( physicalDevice, format, type, samples, usage, tiling, pPropertyCount, pProperties );
+    }
+
+    void vkGetPhysicalDeviceSparseImageFormatProperties2( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, VkSparseImageFormatProperties2* pProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceSparseImageFormatProperties2( physicalDevice, pFormatInfo, pPropertyCount, pProperties );
+    }
+
+    void vkGetPhysicalDeviceSparseImageFormatProperties2KHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, VkSparseImageFormatProperties2* pProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceSparseImageFormatProperties2KHR( physicalDevice, pFormatInfo, pPropertyCount, pProperties );
+    }
+
+    VkResult vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( VkPhysicalDevice physicalDevice, uint32_t* pCombinationCount, VkFramebufferMixedSamplesCombinationNV* pCombinations ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( physicalDevice, pCombinationCount, pCombinations );
+    }
+
+    VkResult vkGetPhysicalDeviceSurfaceCapabilities2EXT( VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, VkSurfaceCapabilities2EXT* pSurfaceCapabilities ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceSurfaceCapabilities2EXT( physicalDevice, surface, pSurfaceCapabilities );
+    }
+
+    VkResult vkGetPhysicalDeviceSurfaceCapabilities2KHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, VkSurfaceCapabilities2KHR* pSurfaceCapabilities ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceSurfaceCapabilities2KHR( physicalDevice, pSurfaceInfo, pSurfaceCapabilities );
+    }
+
+    VkResult vkGetPhysicalDeviceSurfaceCapabilitiesKHR( VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, VkSurfaceCapabilitiesKHR* pSurfaceCapabilities ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceSurfaceCapabilitiesKHR( physicalDevice, surface, pSurfaceCapabilities );
+    }
+
+    VkResult vkGetPhysicalDeviceSurfaceFormats2KHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pSurfaceFormatCount, VkSurfaceFormat2KHR* pSurfaceFormats ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceSurfaceFormats2KHR( physicalDevice, pSurfaceInfo, pSurfaceFormatCount, pSurfaceFormats );
+    }
+
+    VkResult vkGetPhysicalDeviceSurfaceFormatsKHR( VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pSurfaceFormatCount, VkSurfaceFormatKHR* pSurfaceFormats ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceSurfaceFormatsKHR( physicalDevice, surface, pSurfaceFormatCount, pSurfaceFormats );
+    }
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    VkResult vkGetPhysicalDeviceSurfacePresentModes2EXT( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pPresentModeCount, VkPresentModeKHR* pPresentModes ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceSurfacePresentModes2EXT( physicalDevice, pSurfaceInfo, pPresentModeCount, pPresentModes );
+    }
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+    VkResult vkGetPhysicalDeviceSurfacePresentModesKHR( VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pPresentModeCount, VkPresentModeKHR* pPresentModes ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceSurfacePresentModesKHR( physicalDevice, surface, pPresentModeCount, pPresentModes );
+    }
+
+    VkResult vkGetPhysicalDeviceSurfaceSupportKHR( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, VkSurfaceKHR surface, VkBool32* pSupported ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceSurfaceSupportKHR( physicalDevice, queueFamilyIndex, surface, pSupported );
+    }
+
+    VkResult vkGetPhysicalDeviceToolPropertiesEXT( VkPhysicalDevice physicalDevice, uint32_t* pToolCount, VkPhysicalDeviceToolPropertiesEXT* pToolProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceToolPropertiesEXT( physicalDevice, pToolCount, pToolProperties );
+    }
+
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+    VkBool32 vkGetPhysicalDeviceWaylandPresentationSupportKHR( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, struct wl_display* display ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceWaylandPresentationSupportKHR( physicalDevice, queueFamilyIndex, display );
+    }
+#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    VkBool32 vkGetPhysicalDeviceWin32PresentationSupportKHR( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceWin32PresentationSupportKHR( physicalDevice, queueFamilyIndex );
+    }
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#ifdef VK_USE_PLATFORM_XCB_KHR
+    VkBool32 vkGetPhysicalDeviceXcbPresentationSupportKHR( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, xcb_connection_t* connection, xcb_visualid_t visual_id ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceXcbPresentationSupportKHR( physicalDevice, queueFamilyIndex, connection, visual_id );
+    }
+#endif /*VK_USE_PLATFORM_XCB_KHR*/
+
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+    VkBool32 vkGetPhysicalDeviceXlibPresentationSupportKHR( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, Display* dpy, VisualID visualID ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceXlibPresentationSupportKHR( physicalDevice, queueFamilyIndex, dpy, visualID );
+    }
+#endif /*VK_USE_PLATFORM_XLIB_KHR*/
+
+#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
+    VkResult vkGetRandROutputDisplayEXT( VkPhysicalDevice physicalDevice, Display* dpy, RROutput rrOutput, VkDisplayKHR* pDisplay ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetRandROutputDisplayEXT( physicalDevice, dpy, rrOutput, pDisplay );
+    }
+#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/
+
+    VkResult vkReleaseDisplayEXT( VkPhysicalDevice physicalDevice, VkDisplayKHR display ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkReleaseDisplayEXT( physicalDevice, display );
+    }
+
+    void vkGetQueueCheckpointDataNV( VkQueue queue, uint32_t* pCheckpointDataCount, VkCheckpointDataNV* pCheckpointData ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetQueueCheckpointDataNV( queue, pCheckpointDataCount, pCheckpointData );
+    }
+
+    void vkQueueBeginDebugUtilsLabelEXT( VkQueue queue, const VkDebugUtilsLabelEXT* pLabelInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkQueueBeginDebugUtilsLabelEXT( queue, pLabelInfo );
+    }
+
+    VkResult vkQueueBindSparse( VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo* pBindInfo, VkFence fence ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkQueueBindSparse( queue, bindInfoCount, pBindInfo, fence );
+    }
+
+    void vkQueueEndDebugUtilsLabelEXT( VkQueue queue ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkQueueEndDebugUtilsLabelEXT( queue );
+    }
+
+    void vkQueueInsertDebugUtilsLabelEXT( VkQueue queue, const VkDebugUtilsLabelEXT* pLabelInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkQueueInsertDebugUtilsLabelEXT( queue, pLabelInfo );
+    }
+
+    VkResult vkQueuePresentKHR( VkQueue queue, const VkPresentInfoKHR* pPresentInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkQueuePresentKHR( queue, pPresentInfo );
+    }
+
+    VkResult vkQueueSetPerformanceConfigurationINTEL( VkQueue queue, VkPerformanceConfigurationINTEL configuration ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkQueueSetPerformanceConfigurationINTEL( queue, configuration );
+    }
+
+    VkResult vkQueueSubmit( VkQueue queue, uint32_t submitCount, const VkSubmitInfo* pSubmits, VkFence fence ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkQueueSubmit( queue, submitCount, pSubmits, fence );
+    }
+
+    VkResult vkQueueWaitIdle( VkQueue queue ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkQueueWaitIdle( queue );
+    }
+  };
+#endif
+
+  class DispatchLoaderDynamic;
+#if !defined(VULKAN_HPP_DISPATCH_LOADER_DYNAMIC)
+# if defined(VK_NO_PROTOTYPES)
+#  define VULKAN_HPP_DISPATCH_LOADER_DYNAMIC 1
+# else
+#  define VULKAN_HPP_DISPATCH_LOADER_DYNAMIC 0
+# endif
+#endif
+
+#if !defined(VULKAN_HPP_DEFAULT_DISPATCHER)
+# if VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1
+#  define VULKAN_HPP_DEFAULT_DISPATCHER ::VULKAN_HPP_NAMESPACE::defaultDispatchLoaderDynamic
+#  define VULKAN_HPP_DEFAULT_DISPATCH_LOADER_DYNAMIC_STORAGE namespace VULKAN_HPP_NAMESPACE { DispatchLoaderDynamic defaultDispatchLoaderDynamic; }
+  extern DispatchLoaderDynamic defaultDispatchLoaderDynamic;
+# else
+#  define VULKAN_HPP_DEFAULT_DISPATCHER ::VULKAN_HPP_NAMESPACE::DispatchLoaderStatic()
+#  define VULKAN_HPP_DEFAULT_DISPATCH_LOADER_DYNAMIC_STORAGE
+# endif
+#endif
+
+#if !defined(VULKAN_HPP_DEFAULT_DISPATCHER_TYPE)
+# if VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1
+  #define VULKAN_HPP_DEFAULT_DISPATCHER_TYPE ::VULKAN_HPP_NAMESPACE::DispatchLoaderDynamic
+# else
+#  define VULKAN_HPP_DEFAULT_DISPATCHER_TYPE ::VULKAN_HPP_NAMESPACE::DispatchLoaderStatic
+# endif
+#endif
+
+  struct AllocationCallbacks;
+
+  template <typename OwnerType, typename Dispatch>
+  class ObjectDestroy
+  {
+    public:
+      ObjectDestroy()
+        : m_owner()
+        , m_allocationCallbacks( nullptr )
+        , m_dispatch( nullptr )
+      {}
+
+      ObjectDestroy( OwnerType owner, Optional<const AllocationCallbacks> allocationCallbacks, Dispatch const &dispatch ) VULKAN_HPP_NOEXCEPT
+        : m_owner( owner )
+        , m_allocationCallbacks( allocationCallbacks )
+        , m_dispatch( &dispatch )
+      {}
+
+      OwnerType getOwner() const VULKAN_HPP_NOEXCEPT { return m_owner; }
+      Optional<const AllocationCallbacks> getAllocator() const VULKAN_HPP_NOEXCEPT { return m_allocationCallbacks; }
+
+    protected:
+      template <typename T>
+      void destroy(T t) VULKAN_HPP_NOEXCEPT
+      {
+        assert( m_owner && m_dispatch );
+        m_owner.destroy( t, m_allocationCallbacks, *m_dispatch );
+      }
+
+    private:
+      OwnerType m_owner;
+      Optional<const AllocationCallbacks> m_allocationCallbacks;
+      Dispatch const* m_dispatch;
+  };
+
+  class NoParent;
+
+  template <typename Dispatch>
+  class ObjectDestroy<NoParent,Dispatch>
+  {
+    public:
+      ObjectDestroy()
+        : m_allocationCallbacks( nullptr )
+        , m_dispatch( nullptr )
+      {}
+
+      ObjectDestroy( Optional<const AllocationCallbacks> allocationCallbacks, Dispatch const &dispatch = VULKAN_HPP_DEFAULT_DISPATCHER ) VULKAN_HPP_NOEXCEPT
+        : m_allocationCallbacks( allocationCallbacks )
+        , m_dispatch( &dispatch )
+      {}
+
+      Optional<const AllocationCallbacks> getAllocator() const VULKAN_HPP_NOEXCEPT { return m_allocationCallbacks; }
+
+    protected:
+      template <typename T>
+      void destroy(T t) VULKAN_HPP_NOEXCEPT
+      {
+        assert( m_dispatch );
+        t.destroy( m_allocationCallbacks, *m_dispatch );
+      }
+
+    private:
+      Optional<const AllocationCallbacks> m_allocationCallbacks;
+      Dispatch const* m_dispatch;
+  };
+
+  template <typename OwnerType, typename Dispatch>
+  class ObjectFree
+  {
+    public:
+      ObjectFree()
+        : m_owner()
+        , m_allocationCallbacks( nullptr )
+        , m_dispatch( nullptr )
+      {}
+
+      ObjectFree( OwnerType owner, Optional<const AllocationCallbacks> allocationCallbacks, Dispatch const &dispatch ) VULKAN_HPP_NOEXCEPT
+        : m_owner( owner )
+        , m_allocationCallbacks( allocationCallbacks )
+        , m_dispatch( &dispatch )
+      {}
+
+      OwnerType getOwner() const VULKAN_HPP_NOEXCEPT { return m_owner; }
+      Optional<const AllocationCallbacks> getAllocator() const VULKAN_HPP_NOEXCEPT { return m_allocationCallbacks; }
+
+    protected:
+      template <typename T>
+      void destroy(T t) VULKAN_HPP_NOEXCEPT
+      {
+        assert( m_owner && m_dispatch );
+        m_owner.free( t, m_allocationCallbacks, *m_dispatch );
+      }
+
+    private:
+      OwnerType m_owner;
+      Optional<const AllocationCallbacks> m_allocationCallbacks;
+      Dispatch const* m_dispatch;
+  };
+
+  template <typename OwnerType, typename PoolType, typename Dispatch>
+  class PoolFree
+  {
+    public:
+      PoolFree( OwnerType owner = OwnerType(), PoolType pool = PoolType(), Dispatch const &dispatch = VULKAN_HPP_DEFAULT_DISPATCHER ) VULKAN_HPP_NOEXCEPT
+        : m_owner( owner )
+        , m_pool( pool )
+        , m_dispatch( &dispatch )
+      {}
+
+      OwnerType getOwner() const VULKAN_HPP_NOEXCEPT { return m_owner; }
+      PoolType getPool() const VULKAN_HPP_NOEXCEPT { return m_pool; }
+
+    protected:
+      template <typename T>
+      void destroy(T t) VULKAN_HPP_NOEXCEPT
+      {
+        m_owner.free( m_pool, t, *m_dispatch );
+      }
+
+    private:
+      OwnerType m_owner;
+      PoolType m_pool;
+      Dispatch const* m_dispatch;
+  };
+
+  template <typename T, size_t N, size_t I>
+  class ConstExpressionArrayCopy
+  {
+    public:
+      VULKAN_HPP_CONSTEXPR_14 static void copy(T dst[N], std::array<T,N> const& src) VULKAN_HPP_NOEXCEPT
+      {
+        dst[I-1] = src[I-1];
+        ConstExpressionArrayCopy<T, N, I - 1>::copy(dst, src);
+      }
+  };
+
+  template <typename T, size_t N>
+  class ConstExpressionArrayCopy<T, N, 0>
+  {
+    public:
+      VULKAN_HPP_CONSTEXPR_14 static void copy(T /*dst*/[N], std::array<T,N> const& /*src*/) VULKAN_HPP_NOEXCEPT {}
+  };
+
+  using Bool32 = uint32_t;
+  using DeviceAddress = uint64_t;
+  using DeviceSize = uint64_t;
+  using SampleMask = uint32_t;
+
+  enum class AccelerationStructureMemoryRequirementsTypeNV
+  {
+    eObject = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV,
+    eBuildScratch = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV,
+    eUpdateScratch = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( AccelerationStructureMemoryRequirementsTypeNV value )
+  {
+    switch ( value )
+    {
+      case AccelerationStructureMemoryRequirementsTypeNV::eObject : return "Object";
+      case AccelerationStructureMemoryRequirementsTypeNV::eBuildScratch : return "BuildScratch";
+      case AccelerationStructureMemoryRequirementsTypeNV::eUpdateScratch : return "UpdateScratch";
+      default: return "invalid";
+    }
+  }
+
+  enum class AccelerationStructureTypeNV
+  {
+    eTopLevel = VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_NV,
+    eBottomLevel = VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_NV
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( AccelerationStructureTypeNV value )
+  {
+    switch ( value )
+    {
+      case AccelerationStructureTypeNV::eTopLevel : return "TopLevel";
+      case AccelerationStructureTypeNV::eBottomLevel : return "BottomLevel";
+      default: return "invalid";
+    }
+  }
+
+  enum class AttachmentLoadOp
+  {
+    eLoad = VK_ATTACHMENT_LOAD_OP_LOAD,
+    eClear = VK_ATTACHMENT_LOAD_OP_CLEAR,
+    eDontCare = VK_ATTACHMENT_LOAD_OP_DONT_CARE
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( AttachmentLoadOp value )
+  {
+    switch ( value )
+    {
+      case AttachmentLoadOp::eLoad : return "Load";
+      case AttachmentLoadOp::eClear : return "Clear";
+      case AttachmentLoadOp::eDontCare : return "DontCare";
+      default: return "invalid";
+    }
+  }
+
+  enum class AttachmentStoreOp
+  {
+    eStore = VK_ATTACHMENT_STORE_OP_STORE,
+    eDontCare = VK_ATTACHMENT_STORE_OP_DONT_CARE
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( AttachmentStoreOp value )
+  {
+    switch ( value )
+    {
+      case AttachmentStoreOp::eStore : return "Store";
+      case AttachmentStoreOp::eDontCare : return "DontCare";
+      default: return "invalid";
+    }
+  }
+
+  enum class BlendFactor
+  {
+    eZero = VK_BLEND_FACTOR_ZERO,
+    eOne = VK_BLEND_FACTOR_ONE,
+    eSrcColor = VK_BLEND_FACTOR_SRC_COLOR,
+    eOneMinusSrcColor = VK_BLEND_FACTOR_ONE_MINUS_SRC_COLOR,
+    eDstColor = VK_BLEND_FACTOR_DST_COLOR,
+    eOneMinusDstColor = VK_BLEND_FACTOR_ONE_MINUS_DST_COLOR,
+    eSrcAlpha = VK_BLEND_FACTOR_SRC_ALPHA,
+    eOneMinusSrcAlpha = VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA,
+    eDstAlpha = VK_BLEND_FACTOR_DST_ALPHA,
+    eOneMinusDstAlpha = VK_BLEND_FACTOR_ONE_MINUS_DST_ALPHA,
+    eConstantColor = VK_BLEND_FACTOR_CONSTANT_COLOR,
+    eOneMinusConstantColor = VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_COLOR,
+    eConstantAlpha = VK_BLEND_FACTOR_CONSTANT_ALPHA,
+    eOneMinusConstantAlpha = VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA,
+    eSrcAlphaSaturate = VK_BLEND_FACTOR_SRC_ALPHA_SATURATE,
+    eSrc1Color = VK_BLEND_FACTOR_SRC1_COLOR,
+    eOneMinusSrc1Color = VK_BLEND_FACTOR_ONE_MINUS_SRC1_COLOR,
+    eSrc1Alpha = VK_BLEND_FACTOR_SRC1_ALPHA,
+    eOneMinusSrc1Alpha = VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( BlendFactor value )
+  {
+    switch ( value )
+    {
+      case BlendFactor::eZero : return "Zero";
+      case BlendFactor::eOne : return "One";
+      case BlendFactor::eSrcColor : return "SrcColor";
+      case BlendFactor::eOneMinusSrcColor : return "OneMinusSrcColor";
+      case BlendFactor::eDstColor : return "DstColor";
+      case BlendFactor::eOneMinusDstColor : return "OneMinusDstColor";
+      case BlendFactor::eSrcAlpha : return "SrcAlpha";
+      case BlendFactor::eOneMinusSrcAlpha : return "OneMinusSrcAlpha";
+      case BlendFactor::eDstAlpha : return "DstAlpha";
+      case BlendFactor::eOneMinusDstAlpha : return "OneMinusDstAlpha";
+      case BlendFactor::eConstantColor : return "ConstantColor";
+      case BlendFactor::eOneMinusConstantColor : return "OneMinusConstantColor";
+      case BlendFactor::eConstantAlpha : return "ConstantAlpha";
+      case BlendFactor::eOneMinusConstantAlpha : return "OneMinusConstantAlpha";
+      case BlendFactor::eSrcAlphaSaturate : return "SrcAlphaSaturate";
+      case BlendFactor::eSrc1Color : return "Src1Color";
+      case BlendFactor::eOneMinusSrc1Color : return "OneMinusSrc1Color";
+      case BlendFactor::eSrc1Alpha : return "Src1Alpha";
+      case BlendFactor::eOneMinusSrc1Alpha : return "OneMinusSrc1Alpha";
+      default: return "invalid";
+    }
+  }
+
+  enum class BlendOp
+  {
+    eAdd = VK_BLEND_OP_ADD,
+    eSubtract = VK_BLEND_OP_SUBTRACT,
+    eReverseSubtract = VK_BLEND_OP_REVERSE_SUBTRACT,
+    eMin = VK_BLEND_OP_MIN,
+    eMax = VK_BLEND_OP_MAX,
+    eZeroEXT = VK_BLEND_OP_ZERO_EXT,
+    eSrcEXT = VK_BLEND_OP_SRC_EXT,
+    eDstEXT = VK_BLEND_OP_DST_EXT,
+    eSrcOverEXT = VK_BLEND_OP_SRC_OVER_EXT,
+    eDstOverEXT = VK_BLEND_OP_DST_OVER_EXT,
+    eSrcInEXT = VK_BLEND_OP_SRC_IN_EXT,
+    eDstInEXT = VK_BLEND_OP_DST_IN_EXT,
+    eSrcOutEXT = VK_BLEND_OP_SRC_OUT_EXT,
+    eDstOutEXT = VK_BLEND_OP_DST_OUT_EXT,
+    eSrcAtopEXT = VK_BLEND_OP_SRC_ATOP_EXT,
+    eDstAtopEXT = VK_BLEND_OP_DST_ATOP_EXT,
+    eXorEXT = VK_BLEND_OP_XOR_EXT,
+    eMultiplyEXT = VK_BLEND_OP_MULTIPLY_EXT,
+    eScreenEXT = VK_BLEND_OP_SCREEN_EXT,
+    eOverlayEXT = VK_BLEND_OP_OVERLAY_EXT,
+    eDarkenEXT = VK_BLEND_OP_DARKEN_EXT,
+    eLightenEXT = VK_BLEND_OP_LIGHTEN_EXT,
+    eColordodgeEXT = VK_BLEND_OP_COLORDODGE_EXT,
+    eColorburnEXT = VK_BLEND_OP_COLORBURN_EXT,
+    eHardlightEXT = VK_BLEND_OP_HARDLIGHT_EXT,
+    eSoftlightEXT = VK_BLEND_OP_SOFTLIGHT_EXT,
+    eDifferenceEXT = VK_BLEND_OP_DIFFERENCE_EXT,
+    eExclusionEXT = VK_BLEND_OP_EXCLUSION_EXT,
+    eInvertEXT = VK_BLEND_OP_INVERT_EXT,
+    eInvertRgbEXT = VK_BLEND_OP_INVERT_RGB_EXT,
+    eLineardodgeEXT = VK_BLEND_OP_LINEARDODGE_EXT,
+    eLinearburnEXT = VK_BLEND_OP_LINEARBURN_EXT,
+    eVividlightEXT = VK_BLEND_OP_VIVIDLIGHT_EXT,
+    eLinearlightEXT = VK_BLEND_OP_LINEARLIGHT_EXT,
+    ePinlightEXT = VK_BLEND_OP_PINLIGHT_EXT,
+    eHardmixEXT = VK_BLEND_OP_HARDMIX_EXT,
+    eHslHueEXT = VK_BLEND_OP_HSL_HUE_EXT,
+    eHslSaturationEXT = VK_BLEND_OP_HSL_SATURATION_EXT,
+    eHslColorEXT = VK_BLEND_OP_HSL_COLOR_EXT,
+    eHslLuminosityEXT = VK_BLEND_OP_HSL_LUMINOSITY_EXT,
+    ePlusEXT = VK_BLEND_OP_PLUS_EXT,
+    ePlusClampedEXT = VK_BLEND_OP_PLUS_CLAMPED_EXT,
+    ePlusClampedAlphaEXT = VK_BLEND_OP_PLUS_CLAMPED_ALPHA_EXT,
+    ePlusDarkerEXT = VK_BLEND_OP_PLUS_DARKER_EXT,
+    eMinusEXT = VK_BLEND_OP_MINUS_EXT,
+    eMinusClampedEXT = VK_BLEND_OP_MINUS_CLAMPED_EXT,
+    eContrastEXT = VK_BLEND_OP_CONTRAST_EXT,
+    eInvertOvgEXT = VK_BLEND_OP_INVERT_OVG_EXT,
+    eRedEXT = VK_BLEND_OP_RED_EXT,
+    eGreenEXT = VK_BLEND_OP_GREEN_EXT,
+    eBlueEXT = VK_BLEND_OP_BLUE_EXT
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( BlendOp value )
+  {
+    switch ( value )
+    {
+      case BlendOp::eAdd : return "Add";
+      case BlendOp::eSubtract : return "Subtract";
+      case BlendOp::eReverseSubtract : return "ReverseSubtract";
+      case BlendOp::eMin : return "Min";
+      case BlendOp::eMax : return "Max";
+      case BlendOp::eZeroEXT : return "ZeroEXT";
+      case BlendOp::eSrcEXT : return "SrcEXT";
+      case BlendOp::eDstEXT : return "DstEXT";
+      case BlendOp::eSrcOverEXT : return "SrcOverEXT";
+      case BlendOp::eDstOverEXT : return "DstOverEXT";
+      case BlendOp::eSrcInEXT : return "SrcInEXT";
+      case BlendOp::eDstInEXT : return "DstInEXT";
+      case BlendOp::eSrcOutEXT : return "SrcOutEXT";
+      case BlendOp::eDstOutEXT : return "DstOutEXT";
+      case BlendOp::eSrcAtopEXT : return "SrcAtopEXT";
+      case BlendOp::eDstAtopEXT : return "DstAtopEXT";
+      case BlendOp::eXorEXT : return "XorEXT";
+      case BlendOp::eMultiplyEXT : return "MultiplyEXT";
+      case BlendOp::eScreenEXT : return "ScreenEXT";
+      case BlendOp::eOverlayEXT : return "OverlayEXT";
+      case BlendOp::eDarkenEXT : return "DarkenEXT";
+      case BlendOp::eLightenEXT : return "LightenEXT";
+      case BlendOp::eColordodgeEXT : return "ColordodgeEXT";
+      case BlendOp::eColorburnEXT : return "ColorburnEXT";
+      case BlendOp::eHardlightEXT : return "HardlightEXT";
+      case BlendOp::eSoftlightEXT : return "SoftlightEXT";
+      case BlendOp::eDifferenceEXT : return "DifferenceEXT";
+      case BlendOp::eExclusionEXT : return "ExclusionEXT";
+      case BlendOp::eInvertEXT : return "InvertEXT";
+      case BlendOp::eInvertRgbEXT : return "InvertRgbEXT";
+      case BlendOp::eLineardodgeEXT : return "LineardodgeEXT";
+      case BlendOp::eLinearburnEXT : return "LinearburnEXT";
+      case BlendOp::eVividlightEXT : return "VividlightEXT";
+      case BlendOp::eLinearlightEXT : return "LinearlightEXT";
+      case BlendOp::ePinlightEXT : return "PinlightEXT";
+      case BlendOp::eHardmixEXT : return "HardmixEXT";
+      case BlendOp::eHslHueEXT : return "HslHueEXT";
+      case BlendOp::eHslSaturationEXT : return "HslSaturationEXT";
+      case BlendOp::eHslColorEXT : return "HslColorEXT";
+      case BlendOp::eHslLuminosityEXT : return "HslLuminosityEXT";
+      case BlendOp::ePlusEXT : return "PlusEXT";
+      case BlendOp::ePlusClampedEXT : return "PlusClampedEXT";
+      case BlendOp::ePlusClampedAlphaEXT : return "PlusClampedAlphaEXT";
+      case BlendOp::ePlusDarkerEXT : return "PlusDarkerEXT";
+      case BlendOp::eMinusEXT : return "MinusEXT";
+      case BlendOp::eMinusClampedEXT : return "MinusClampedEXT";
+      case BlendOp::eContrastEXT : return "ContrastEXT";
+      case BlendOp::eInvertOvgEXT : return "InvertOvgEXT";
+      case BlendOp::eRedEXT : return "RedEXT";
+      case BlendOp::eGreenEXT : return "GreenEXT";
+      case BlendOp::eBlueEXT : return "BlueEXT";
+      default: return "invalid";
+    }
+  }
+
+  enum class BlendOverlapEXT
+  {
+    eUncorrelated = VK_BLEND_OVERLAP_UNCORRELATED_EXT,
+    eDisjoint = VK_BLEND_OVERLAP_DISJOINT_EXT,
+    eConjoint = VK_BLEND_OVERLAP_CONJOINT_EXT
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( BlendOverlapEXT value )
+  {
+    switch ( value )
+    {
+      case BlendOverlapEXT::eUncorrelated : return "Uncorrelated";
+      case BlendOverlapEXT::eDisjoint : return "Disjoint";
+      case BlendOverlapEXT::eConjoint : return "Conjoint";
+      default: return "invalid";
+    }
+  }
+
+  enum class BorderColor
+  {
+    eFloatTransparentBlack = VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK,
+    eIntTransparentBlack = VK_BORDER_COLOR_INT_TRANSPARENT_BLACK,
+    eFloatOpaqueBlack = VK_BORDER_COLOR_FLOAT_OPAQUE_BLACK,
+    eIntOpaqueBlack = VK_BORDER_COLOR_INT_OPAQUE_BLACK,
+    eFloatOpaqueWhite = VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE,
+    eIntOpaqueWhite = VK_BORDER_COLOR_INT_OPAQUE_WHITE
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( BorderColor value )
+  {
+    switch ( value )
+    {
+      case BorderColor::eFloatTransparentBlack : return "FloatTransparentBlack";
+      case BorderColor::eIntTransparentBlack : return "IntTransparentBlack";
+      case BorderColor::eFloatOpaqueBlack : return "FloatOpaqueBlack";
+      case BorderColor::eIntOpaqueBlack : return "IntOpaqueBlack";
+      case BorderColor::eFloatOpaqueWhite : return "FloatOpaqueWhite";
+      case BorderColor::eIntOpaqueWhite : return "IntOpaqueWhite";
+      default: return "invalid";
+    }
+  }
+
+  enum class ChromaLocation
+  {
+    eCositedEven = VK_CHROMA_LOCATION_COSITED_EVEN,
+    eMidpoint = VK_CHROMA_LOCATION_MIDPOINT,
+    eCositedEvenKHR = VK_CHROMA_LOCATION_COSITED_EVEN_KHR,
+    eMidpointKHR = VK_CHROMA_LOCATION_MIDPOINT_KHR
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( ChromaLocation value )
+  {
+    switch ( value )
+    {
+      case ChromaLocation::eCositedEven : return "CositedEven";
+      case ChromaLocation::eMidpoint : return "Midpoint";
+      default: return "invalid";
+    }
+  }
+
+  enum class CoarseSampleOrderTypeNV
+  {
+    eDefault = VK_COARSE_SAMPLE_ORDER_TYPE_DEFAULT_NV,
+    eCustom = VK_COARSE_SAMPLE_ORDER_TYPE_CUSTOM_NV,
+    ePixelMajor = VK_COARSE_SAMPLE_ORDER_TYPE_PIXEL_MAJOR_NV,
+    eSampleMajor = VK_COARSE_SAMPLE_ORDER_TYPE_SAMPLE_MAJOR_NV
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( CoarseSampleOrderTypeNV value )
+  {
+    switch ( value )
+    {
+      case CoarseSampleOrderTypeNV::eDefault : return "Default";
+      case CoarseSampleOrderTypeNV::eCustom : return "Custom";
+      case CoarseSampleOrderTypeNV::ePixelMajor : return "PixelMajor";
+      case CoarseSampleOrderTypeNV::eSampleMajor : return "SampleMajor";
+      default: return "invalid";
+    }
+  }
+
+  enum class ColorSpaceKHR
+  {
+    eSrgbNonlinear = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR,
+    eDisplayP3NonlinearEXT = VK_COLOR_SPACE_DISPLAY_P3_NONLINEAR_EXT,
+    eExtendedSrgbLinearEXT = VK_COLOR_SPACE_EXTENDED_SRGB_LINEAR_EXT,
+    eDisplayP3LinearEXT = VK_COLOR_SPACE_DISPLAY_P3_LINEAR_EXT,
+    eDciP3NonlinearEXT = VK_COLOR_SPACE_DCI_P3_NONLINEAR_EXT,
+    eBt709LinearEXT = VK_COLOR_SPACE_BT709_LINEAR_EXT,
+    eBt709NonlinearEXT = VK_COLOR_SPACE_BT709_NONLINEAR_EXT,
+    eBt2020LinearEXT = VK_COLOR_SPACE_BT2020_LINEAR_EXT,
+    eHdr10St2084EXT = VK_COLOR_SPACE_HDR10_ST2084_EXT,
+    eDolbyvisionEXT = VK_COLOR_SPACE_DOLBYVISION_EXT,
+    eHdr10HlgEXT = VK_COLOR_SPACE_HDR10_HLG_EXT,
+    eAdobergbLinearEXT = VK_COLOR_SPACE_ADOBERGB_LINEAR_EXT,
+    eAdobergbNonlinearEXT = VK_COLOR_SPACE_ADOBERGB_NONLINEAR_EXT,
+    ePassThroughEXT = VK_COLOR_SPACE_PASS_THROUGH_EXT,
+    eExtendedSrgbNonlinearEXT = VK_COLOR_SPACE_EXTENDED_SRGB_NONLINEAR_EXT,
+    eDisplayNativeAMD = VK_COLOR_SPACE_DISPLAY_NATIVE_AMD,
+    eVkColorspaceSrgbNonlinear = VK_COLORSPACE_SRGB_NONLINEAR_KHR,
+    eDciP3LinearEXT = VK_COLOR_SPACE_DCI_P3_LINEAR_EXT
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( ColorSpaceKHR value )
+  {
+    switch ( value )
+    {
+      case ColorSpaceKHR::eSrgbNonlinear : return "SrgbNonlinear";
+      case ColorSpaceKHR::eDisplayP3NonlinearEXT : return "DisplayP3NonlinearEXT";
+      case ColorSpaceKHR::eExtendedSrgbLinearEXT : return "ExtendedSrgbLinearEXT";
+      case ColorSpaceKHR::eDisplayP3LinearEXT : return "DisplayP3LinearEXT";
+      case ColorSpaceKHR::eDciP3NonlinearEXT : return "DciP3NonlinearEXT";
+      case ColorSpaceKHR::eBt709LinearEXT : return "Bt709LinearEXT";
+      case ColorSpaceKHR::eBt709NonlinearEXT : return "Bt709NonlinearEXT";
+      case ColorSpaceKHR::eBt2020LinearEXT : return "Bt2020LinearEXT";
+      case ColorSpaceKHR::eHdr10St2084EXT : return "Hdr10St2084EXT";
+      case ColorSpaceKHR::eDolbyvisionEXT : return "DolbyvisionEXT";
+      case ColorSpaceKHR::eHdr10HlgEXT : return "Hdr10HlgEXT";
+      case ColorSpaceKHR::eAdobergbLinearEXT : return "AdobergbLinearEXT";
+      case ColorSpaceKHR::eAdobergbNonlinearEXT : return "AdobergbNonlinearEXT";
+      case ColorSpaceKHR::ePassThroughEXT : return "PassThroughEXT";
+      case ColorSpaceKHR::eExtendedSrgbNonlinearEXT : return "ExtendedSrgbNonlinearEXT";
+      case ColorSpaceKHR::eDisplayNativeAMD : return "DisplayNativeAMD";
+      default: return "invalid";
+    }
+  }
+
+  enum class CommandBufferLevel
+  {
+    ePrimary = VK_COMMAND_BUFFER_LEVEL_PRIMARY,
+    eSecondary = VK_COMMAND_BUFFER_LEVEL_SECONDARY
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( CommandBufferLevel value )
+  {
+    switch ( value )
+    {
+      case CommandBufferLevel::ePrimary : return "Primary";
+      case CommandBufferLevel::eSecondary : return "Secondary";
+      default: return "invalid";
+    }
+  }
+
+  enum class CompareOp
+  {
+    eNever = VK_COMPARE_OP_NEVER,
+    eLess = VK_COMPARE_OP_LESS,
+    eEqual = VK_COMPARE_OP_EQUAL,
+    eLessOrEqual = VK_COMPARE_OP_LESS_OR_EQUAL,
+    eGreater = VK_COMPARE_OP_GREATER,
+    eNotEqual = VK_COMPARE_OP_NOT_EQUAL,
+    eGreaterOrEqual = VK_COMPARE_OP_GREATER_OR_EQUAL,
+    eAlways = VK_COMPARE_OP_ALWAYS
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( CompareOp value )
+  {
+    switch ( value )
+    {
+      case CompareOp::eNever : return "Never";
+      case CompareOp::eLess : return "Less";
+      case CompareOp::eEqual : return "Equal";
+      case CompareOp::eLessOrEqual : return "LessOrEqual";
+      case CompareOp::eGreater : return "Greater";
+      case CompareOp::eNotEqual : return "NotEqual";
+      case CompareOp::eGreaterOrEqual : return "GreaterOrEqual";
+      case CompareOp::eAlways : return "Always";
+      default: return "invalid";
+    }
+  }
+
+  enum class ComponentSwizzle
+  {
+    eIdentity = VK_COMPONENT_SWIZZLE_IDENTITY,
+    eZero = VK_COMPONENT_SWIZZLE_ZERO,
+    eOne = VK_COMPONENT_SWIZZLE_ONE,
+    eR = VK_COMPONENT_SWIZZLE_R,
+    eG = VK_COMPONENT_SWIZZLE_G,
+    eB = VK_COMPONENT_SWIZZLE_B,
+    eA = VK_COMPONENT_SWIZZLE_A
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( ComponentSwizzle value )
+  {
+    switch ( value )
+    {
+      case ComponentSwizzle::eIdentity : return "Identity";
+      case ComponentSwizzle::eZero : return "Zero";
+      case ComponentSwizzle::eOne : return "One";
+      case ComponentSwizzle::eR : return "R";
+      case ComponentSwizzle::eG : return "G";
+      case ComponentSwizzle::eB : return "B";
+      case ComponentSwizzle::eA : return "A";
+      default: return "invalid";
+    }
+  }
+
+  enum class ComponentTypeNV
+  {
+    eFloat16 = VK_COMPONENT_TYPE_FLOAT16_NV,
+    eFloat32 = VK_COMPONENT_TYPE_FLOAT32_NV,
+    eFloat64 = VK_COMPONENT_TYPE_FLOAT64_NV,
+    eSint8 = VK_COMPONENT_TYPE_SINT8_NV,
+    eSint16 = VK_COMPONENT_TYPE_SINT16_NV,
+    eSint32 = VK_COMPONENT_TYPE_SINT32_NV,
+    eSint64 = VK_COMPONENT_TYPE_SINT64_NV,
+    eUint8 = VK_COMPONENT_TYPE_UINT8_NV,
+    eUint16 = VK_COMPONENT_TYPE_UINT16_NV,
+    eUint32 = VK_COMPONENT_TYPE_UINT32_NV,
+    eUint64 = VK_COMPONENT_TYPE_UINT64_NV
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( ComponentTypeNV value )
+  {
+    switch ( value )
+    {
+      case ComponentTypeNV::eFloat16 : return "Float16";
+      case ComponentTypeNV::eFloat32 : return "Float32";
+      case ComponentTypeNV::eFloat64 : return "Float64";
+      case ComponentTypeNV::eSint8 : return "Sint8";
+      case ComponentTypeNV::eSint16 : return "Sint16";
+      case ComponentTypeNV::eSint32 : return "Sint32";
+      case ComponentTypeNV::eSint64 : return "Sint64";
+      case ComponentTypeNV::eUint8 : return "Uint8";
+      case ComponentTypeNV::eUint16 : return "Uint16";
+      case ComponentTypeNV::eUint32 : return "Uint32";
+      case ComponentTypeNV::eUint64 : return "Uint64";
+      default: return "invalid";
+    }
+  }
+
+  enum class ConservativeRasterizationModeEXT
+  {
+    eDisabled = VK_CONSERVATIVE_RASTERIZATION_MODE_DISABLED_EXT,
+    eOverestimate = VK_CONSERVATIVE_RASTERIZATION_MODE_OVERESTIMATE_EXT,
+    eUnderestimate = VK_CONSERVATIVE_RASTERIZATION_MODE_UNDERESTIMATE_EXT
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( ConservativeRasterizationModeEXT value )
+  {
+    switch ( value )
+    {
+      case ConservativeRasterizationModeEXT::eDisabled : return "Disabled";
+      case ConservativeRasterizationModeEXT::eOverestimate : return "Overestimate";
+      case ConservativeRasterizationModeEXT::eUnderestimate : return "Underestimate";
+      default: return "invalid";
+    }
+  }
+
+  enum class CopyAccelerationStructureModeNV
+  {
+    eClone = VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_NV,
+    eCompact = VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_NV
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( CopyAccelerationStructureModeNV value )
+  {
+    switch ( value )
+    {
+      case CopyAccelerationStructureModeNV::eClone : return "Clone";
+      case CopyAccelerationStructureModeNV::eCompact : return "Compact";
+      default: return "invalid";
+    }
+  }
+
+  enum class CoverageModulationModeNV
+  {
+    eNone = VK_COVERAGE_MODULATION_MODE_NONE_NV,
+    eRgb = VK_COVERAGE_MODULATION_MODE_RGB_NV,
+    eAlpha = VK_COVERAGE_MODULATION_MODE_ALPHA_NV,
+    eRgba = VK_COVERAGE_MODULATION_MODE_RGBA_NV
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( CoverageModulationModeNV value )
+  {
+    switch ( value )
+    {
+      case CoverageModulationModeNV::eNone : return "None";
+      case CoverageModulationModeNV::eRgb : return "Rgb";
+      case CoverageModulationModeNV::eAlpha : return "Alpha";
+      case CoverageModulationModeNV::eRgba : return "Rgba";
+      default: return "invalid";
+    }
+  }
+
+  enum class CoverageReductionModeNV
+  {
+    eMerge = VK_COVERAGE_REDUCTION_MODE_MERGE_NV,
+    eTruncate = VK_COVERAGE_REDUCTION_MODE_TRUNCATE_NV
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( CoverageReductionModeNV value )
+  {
+    switch ( value )
+    {
+      case CoverageReductionModeNV::eMerge : return "Merge";
+      case CoverageReductionModeNV::eTruncate : return "Truncate";
+      default: return "invalid";
+    }
+  }
+
+  enum class DebugReportObjectTypeEXT
+  {
+    eUnknown = VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT,
+    eInstance = VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT,
+    ePhysicalDevice = VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT,
+    eDevice = VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+    eQueue = VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT,
+    eSemaphore = VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT,
+    eCommandBuffer = VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+    eFence = VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT,
+    eDeviceMemory = VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,
+    eBuffer = VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT,
+    eImage = VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+    eEvent = VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT,
+    eQueryPool = VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT,
+    eBufferView = VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT,
+    eImageView = VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT,
+    eShaderModule = VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT,
+    ePipelineCache = VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT,
+    ePipelineLayout = VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT,
+    eRenderPass = VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
+    ePipeline = VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
+    eDescriptorSetLayout = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT,
+    eSampler = VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT,
+    eDescriptorPool = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT,
+    eDescriptorSet = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT,
+    eFramebuffer = VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT,
+    eCommandPool = VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT,
+    eSurfaceKHR = VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT,
+    eSwapchainKHR = VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT,
+    eDebugReportCallbackEXT = VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT_EXT,
+    eDisplayKHR = VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_KHR_EXT,
+    eDisplayModeKHR = VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_MODE_KHR_EXT,
+    eObjectTableNVX = VK_DEBUG_REPORT_OBJECT_TYPE_OBJECT_TABLE_NVX_EXT,
+    eIndirectCommandsLayoutNVX = VK_DEBUG_REPORT_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX_EXT,
+    eValidationCacheEXT = VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT_EXT,
+    eSamplerYcbcrConversion = VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT,
+    eDescriptorUpdateTemplate = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_EXT,
+    eAccelerationStructureNV = VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV_EXT,
+    eDebugReport = VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_EXT,
+    eValidationCache = VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT,
+    eDescriptorUpdateTemplateKHR = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_KHR_EXT,
+    eSamplerYcbcrConversionKHR = VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR_EXT
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( DebugReportObjectTypeEXT value )
+  {
+    switch ( value )
+    {
+      case DebugReportObjectTypeEXT::eUnknown : return "Unknown";
+      case DebugReportObjectTypeEXT::eInstance : return "Instance";
+      case DebugReportObjectTypeEXT::ePhysicalDevice : return "PhysicalDevice";
+      case DebugReportObjectTypeEXT::eDevice : return "Device";
+      case DebugReportObjectTypeEXT::eQueue : return "Queue";
+      case DebugReportObjectTypeEXT::eSemaphore : return "Semaphore";
+      case DebugReportObjectTypeEXT::eCommandBuffer : return "CommandBuffer";
+      case DebugReportObjectTypeEXT::eFence : return "Fence";
+      case DebugReportObjectTypeEXT::eDeviceMemory : return "DeviceMemory";
+      case DebugReportObjectTypeEXT::eBuffer : return "Buffer";
+      case DebugReportObjectTypeEXT::eImage : return "Image";
+      case DebugReportObjectTypeEXT::eEvent : return "Event";
+      case DebugReportObjectTypeEXT::eQueryPool : return "QueryPool";
+      case DebugReportObjectTypeEXT::eBufferView : return "BufferView";
+      case DebugReportObjectTypeEXT::eImageView : return "ImageView";
+      case DebugReportObjectTypeEXT::eShaderModule : return "ShaderModule";
+      case DebugReportObjectTypeEXT::ePipelineCache : return "PipelineCache";
+      case DebugReportObjectTypeEXT::ePipelineLayout : return "PipelineLayout";
+      case DebugReportObjectTypeEXT::eRenderPass : return "RenderPass";
+      case DebugReportObjectTypeEXT::ePipeline : return "Pipeline";
+      case DebugReportObjectTypeEXT::eDescriptorSetLayout : return "DescriptorSetLayout";
+      case DebugReportObjectTypeEXT::eSampler : return "Sampler";
+      case DebugReportObjectTypeEXT::eDescriptorPool : return "DescriptorPool";
+      case DebugReportObjectTypeEXT::eDescriptorSet : return "DescriptorSet";
+      case DebugReportObjectTypeEXT::eFramebuffer : return "Framebuffer";
+      case DebugReportObjectTypeEXT::eCommandPool : return "CommandPool";
+      case DebugReportObjectTypeEXT::eSurfaceKHR : return "SurfaceKHR";
+      case DebugReportObjectTypeEXT::eSwapchainKHR : return "SwapchainKHR";
+      case DebugReportObjectTypeEXT::eDebugReportCallbackEXT : return "DebugReportCallbackEXT";
+      case DebugReportObjectTypeEXT::eDisplayKHR : return "DisplayKHR";
+      case DebugReportObjectTypeEXT::eDisplayModeKHR : return "DisplayModeKHR";
+      case DebugReportObjectTypeEXT::eObjectTableNVX : return "ObjectTableNVX";
+      case DebugReportObjectTypeEXT::eIndirectCommandsLayoutNVX : return "IndirectCommandsLayoutNVX";
+      case DebugReportObjectTypeEXT::eValidationCacheEXT : return "ValidationCacheEXT";
+      case DebugReportObjectTypeEXT::eSamplerYcbcrConversion : return "SamplerYcbcrConversion";
+      case DebugReportObjectTypeEXT::eDescriptorUpdateTemplate : return "DescriptorUpdateTemplate";
+      case DebugReportObjectTypeEXT::eAccelerationStructureNV : return "AccelerationStructureNV";
+      default: return "invalid";
+    }
+  }
+
+  enum class DescriptorType
+  {
+    eSampler = VK_DESCRIPTOR_TYPE_SAMPLER,
+    eCombinedImageSampler = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
+    eSampledImage = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE,
+    eStorageImage = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE,
+    eUniformTexelBuffer = VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER,
+    eStorageTexelBuffer = VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER,
+    eUniformBuffer = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER,
+    eStorageBuffer = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER,
+    eUniformBufferDynamic = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC,
+    eStorageBufferDynamic = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC,
+    eInputAttachment = VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT,
+    eInlineUniformBlockEXT = VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT,
+    eAccelerationStructureNV = VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( DescriptorType value )
+  {
+    switch ( value )
+    {
+      case DescriptorType::eSampler : return "Sampler";
+      case DescriptorType::eCombinedImageSampler : return "CombinedImageSampler";
+      case DescriptorType::eSampledImage : return "SampledImage";
+      case DescriptorType::eStorageImage : return "StorageImage";
+      case DescriptorType::eUniformTexelBuffer : return "UniformTexelBuffer";
+      case DescriptorType::eStorageTexelBuffer : return "StorageTexelBuffer";
+      case DescriptorType::eUniformBuffer : return "UniformBuffer";
+      case DescriptorType::eStorageBuffer : return "StorageBuffer";
+      case DescriptorType::eUniformBufferDynamic : return "UniformBufferDynamic";
+      case DescriptorType::eStorageBufferDynamic : return "StorageBufferDynamic";
+      case DescriptorType::eInputAttachment : return "InputAttachment";
+      case DescriptorType::eInlineUniformBlockEXT : return "InlineUniformBlockEXT";
+      case DescriptorType::eAccelerationStructureNV : return "AccelerationStructureNV";
+      default: return "invalid";
+    }
+  }
+
+  enum class DescriptorUpdateTemplateType
+  {
+    eDescriptorSet = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET,
+    ePushDescriptorsKHR = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR,
+    eDescriptorSetKHR = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET_KHR
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( DescriptorUpdateTemplateType value )
+  {
+    switch ( value )
+    {
+      case DescriptorUpdateTemplateType::eDescriptorSet : return "DescriptorSet";
+      case DescriptorUpdateTemplateType::ePushDescriptorsKHR : return "PushDescriptorsKHR";
+      default: return "invalid";
+    }
+  }
+
+  enum class DeviceEventTypeEXT
+  {
+    eDisplayHotplug = VK_DEVICE_EVENT_TYPE_DISPLAY_HOTPLUG_EXT
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( DeviceEventTypeEXT value )
+  {
+    switch ( value )
+    {
+      case DeviceEventTypeEXT::eDisplayHotplug : return "DisplayHotplug";
+      default: return "invalid";
+    }
+  }
+
+  enum class DiscardRectangleModeEXT
+  {
+    eInclusive = VK_DISCARD_RECTANGLE_MODE_INCLUSIVE_EXT,
+    eExclusive = VK_DISCARD_RECTANGLE_MODE_EXCLUSIVE_EXT
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( DiscardRectangleModeEXT value )
+  {
+    switch ( value )
+    {
+      case DiscardRectangleModeEXT::eInclusive : return "Inclusive";
+      case DiscardRectangleModeEXT::eExclusive : return "Exclusive";
+      default: return "invalid";
+    }
+  }
+
+  enum class DisplayEventTypeEXT
+  {
+    eFirstPixelOut = VK_DISPLAY_EVENT_TYPE_FIRST_PIXEL_OUT_EXT
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( DisplayEventTypeEXT value )
+  {
+    switch ( value )
+    {
+      case DisplayEventTypeEXT::eFirstPixelOut : return "FirstPixelOut";
+      default: return "invalid";
+    }
+  }
+
+  enum class DisplayPowerStateEXT
+  {
+    eOff = VK_DISPLAY_POWER_STATE_OFF_EXT,
+    eSuspend = VK_DISPLAY_POWER_STATE_SUSPEND_EXT,
+    eOn = VK_DISPLAY_POWER_STATE_ON_EXT
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( DisplayPowerStateEXT value )
+  {
+    switch ( value )
+    {
+      case DisplayPowerStateEXT::eOff : return "Off";
+      case DisplayPowerStateEXT::eSuspend : return "Suspend";
+      case DisplayPowerStateEXT::eOn : return "On";
+      default: return "invalid";
+    }
+  }
+
+  enum class DriverIdKHR
+  {
+    eAmdProprietary = VK_DRIVER_ID_AMD_PROPRIETARY_KHR,
+    eAmdOpenSource = VK_DRIVER_ID_AMD_OPEN_SOURCE_KHR,
+    eMesaRadv = VK_DRIVER_ID_MESA_RADV_KHR,
+    eNvidiaProprietary = VK_DRIVER_ID_NVIDIA_PROPRIETARY_KHR,
+    eIntelProprietaryWindows = VK_DRIVER_ID_INTEL_PROPRIETARY_WINDOWS_KHR,
+    eIntelOpenSourceMESA = VK_DRIVER_ID_INTEL_OPEN_SOURCE_MESA_KHR,
+    eImaginationProprietary = VK_DRIVER_ID_IMAGINATION_PROPRIETARY_KHR,
+    eQualcommProprietary = VK_DRIVER_ID_QUALCOMM_PROPRIETARY_KHR,
+    eArmProprietary = VK_DRIVER_ID_ARM_PROPRIETARY_KHR,
+    eGoogleSwiftshader = VK_DRIVER_ID_GOOGLE_SWIFTSHADER_KHR,
+    eGgpProprietary = VK_DRIVER_ID_GGP_PROPRIETARY_KHR,
+    eBroadcomProprietary = VK_DRIVER_ID_BROADCOM_PROPRIETARY_KHR
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( DriverIdKHR value )
+  {
+    switch ( value )
+    {
+      case DriverIdKHR::eAmdProprietary : return "AmdProprietary";
+      case DriverIdKHR::eAmdOpenSource : return "AmdOpenSource";
+      case DriverIdKHR::eMesaRadv : return "MesaRadv";
+      case DriverIdKHR::eNvidiaProprietary : return "NvidiaProprietary";
+      case DriverIdKHR::eIntelProprietaryWindows : return "IntelProprietaryWindows";
+      case DriverIdKHR::eIntelOpenSourceMESA : return "IntelOpenSourceMESA";
+      case DriverIdKHR::eImaginationProprietary : return "ImaginationProprietary";
+      case DriverIdKHR::eQualcommProprietary : return "QualcommProprietary";
+      case DriverIdKHR::eArmProprietary : return "ArmProprietary";
+      case DriverIdKHR::eGoogleSwiftshader : return "GoogleSwiftshader";
+      case DriverIdKHR::eGgpProprietary : return "GgpProprietary";
+      case DriverIdKHR::eBroadcomProprietary : return "BroadcomProprietary";
+      default: return "invalid";
+    }
+  }
+
+  enum class DynamicState
+  {
+    eViewport = VK_DYNAMIC_STATE_VIEWPORT,
+    eScissor = VK_DYNAMIC_STATE_SCISSOR,
+    eLineWidth = VK_DYNAMIC_STATE_LINE_WIDTH,
+    eDepthBias = VK_DYNAMIC_STATE_DEPTH_BIAS,
+    eBlendConstants = VK_DYNAMIC_STATE_BLEND_CONSTANTS,
+    eDepthBounds = VK_DYNAMIC_STATE_DEPTH_BOUNDS,
+    eStencilCompareMask = VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK,
+    eStencilWriteMask = VK_DYNAMIC_STATE_STENCIL_WRITE_MASK,
+    eStencilReference = VK_DYNAMIC_STATE_STENCIL_REFERENCE,
+    eViewportWScalingNV = VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV,
+    eDiscardRectangleEXT = VK_DYNAMIC_STATE_DISCARD_RECTANGLE_EXT,
+    eSampleLocationsEXT = VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_EXT,
+    eViewportShadingRatePaletteNV = VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV,
+    eViewportCoarseSampleOrderNV = VK_DYNAMIC_STATE_VIEWPORT_COARSE_SAMPLE_ORDER_NV,
+    eExclusiveScissorNV = VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_NV,
+    eLineStippleEXT = VK_DYNAMIC_STATE_LINE_STIPPLE_EXT
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( DynamicState value )
+  {
+    switch ( value )
+    {
+      case DynamicState::eViewport : return "Viewport";
+      case DynamicState::eScissor : return "Scissor";
+      case DynamicState::eLineWidth : return "LineWidth";
+      case DynamicState::eDepthBias : return "DepthBias";
+      case DynamicState::eBlendConstants : return "BlendConstants";
+      case DynamicState::eDepthBounds : return "DepthBounds";
+      case DynamicState::eStencilCompareMask : return "StencilCompareMask";
+      case DynamicState::eStencilWriteMask : return "StencilWriteMask";
+      case DynamicState::eStencilReference : return "StencilReference";
+      case DynamicState::eViewportWScalingNV : return "ViewportWScalingNV";
+      case DynamicState::eDiscardRectangleEXT : return "DiscardRectangleEXT";
+      case DynamicState::eSampleLocationsEXT : return "SampleLocationsEXT";
+      case DynamicState::eViewportShadingRatePaletteNV : return "ViewportShadingRatePaletteNV";
+      case DynamicState::eViewportCoarseSampleOrderNV : return "ViewportCoarseSampleOrderNV";
+      case DynamicState::eExclusiveScissorNV : return "ExclusiveScissorNV";
+      case DynamicState::eLineStippleEXT : return "LineStippleEXT";
+      default: return "invalid";
+    }
+  }
+
+  enum class Filter
+  {
+    eNearest = VK_FILTER_NEAREST,
+    eLinear = VK_FILTER_LINEAR,
+    eCubicIMG = VK_FILTER_CUBIC_IMG,
+    eCubicEXT = VK_FILTER_CUBIC_EXT
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( Filter value )
+  {
+    switch ( value )
+    {
+      case Filter::eNearest : return "Nearest";
+      case Filter::eLinear : return "Linear";
+      case Filter::eCubicIMG : return "CubicIMG";
+      default: return "invalid";
+    }
+  }
+
+  enum class Format
+  {
+    eUndefined = VK_FORMAT_UNDEFINED,
+    eR4G4UnormPack8 = VK_FORMAT_R4G4_UNORM_PACK8,
+    eR4G4B4A4UnormPack16 = VK_FORMAT_R4G4B4A4_UNORM_PACK16,
+    eB4G4R4A4UnormPack16 = VK_FORMAT_B4G4R4A4_UNORM_PACK16,
+    eR5G6B5UnormPack16 = VK_FORMAT_R5G6B5_UNORM_PACK16,
+    eB5G6R5UnormPack16 = VK_FORMAT_B5G6R5_UNORM_PACK16,
+    eR5G5B5A1UnormPack16 = VK_FORMAT_R5G5B5A1_UNORM_PACK16,
+    eB5G5R5A1UnormPack16 = VK_FORMAT_B5G5R5A1_UNORM_PACK16,
+    eA1R5G5B5UnormPack16 = VK_FORMAT_A1R5G5B5_UNORM_PACK16,
+    eR8Unorm = VK_FORMAT_R8_UNORM,
+    eR8Snorm = VK_FORMAT_R8_SNORM,
+    eR8Uscaled = VK_FORMAT_R8_USCALED,
+    eR8Sscaled = VK_FORMAT_R8_SSCALED,
+    eR8Uint = VK_FORMAT_R8_UINT,
+    eR8Sint = VK_FORMAT_R8_SINT,
+    eR8Srgb = VK_FORMAT_R8_SRGB,
+    eR8G8Unorm = VK_FORMAT_R8G8_UNORM,
+    eR8G8Snorm = VK_FORMAT_R8G8_SNORM,
+    eR8G8Uscaled = VK_FORMAT_R8G8_USCALED,
+    eR8G8Sscaled = VK_FORMAT_R8G8_SSCALED,
+    eR8G8Uint = VK_FORMAT_R8G8_UINT,
+    eR8G8Sint = VK_FORMAT_R8G8_SINT,
+    eR8G8Srgb = VK_FORMAT_R8G8_SRGB,
+    eR8G8B8Unorm = VK_FORMAT_R8G8B8_UNORM,
+    eR8G8B8Snorm = VK_FORMAT_R8G8B8_SNORM,
+    eR8G8B8Uscaled = VK_FORMAT_R8G8B8_USCALED,
+    eR8G8B8Sscaled = VK_FORMAT_R8G8B8_SSCALED,
+    eR8G8B8Uint = VK_FORMAT_R8G8B8_UINT,
+    eR8G8B8Sint = VK_FORMAT_R8G8B8_SINT,
+    eR8G8B8Srgb = VK_FORMAT_R8G8B8_SRGB,
+    eB8G8R8Unorm = VK_FORMAT_B8G8R8_UNORM,
+    eB8G8R8Snorm = VK_FORMAT_B8G8R8_SNORM,
+    eB8G8R8Uscaled = VK_FORMAT_B8G8R8_USCALED,
+    eB8G8R8Sscaled = VK_FORMAT_B8G8R8_SSCALED,
+    eB8G8R8Uint = VK_FORMAT_B8G8R8_UINT,
+    eB8G8R8Sint = VK_FORMAT_B8G8R8_SINT,
+    eB8G8R8Srgb = VK_FORMAT_B8G8R8_SRGB,
+    eR8G8B8A8Unorm = VK_FORMAT_R8G8B8A8_UNORM,
+    eR8G8B8A8Snorm = VK_FORMAT_R8G8B8A8_SNORM,
+    eR8G8B8A8Uscaled = VK_FORMAT_R8G8B8A8_USCALED,
+    eR8G8B8A8Sscaled = VK_FORMAT_R8G8B8A8_SSCALED,
+    eR8G8B8A8Uint = VK_FORMAT_R8G8B8A8_UINT,
+    eR8G8B8A8Sint = VK_FORMAT_R8G8B8A8_SINT,
+    eR8G8B8A8Srgb = VK_FORMAT_R8G8B8A8_SRGB,
+    eB8G8R8A8Unorm = VK_FORMAT_B8G8R8A8_UNORM,
+    eB8G8R8A8Snorm = VK_FORMAT_B8G8R8A8_SNORM,
+    eB8G8R8A8Uscaled = VK_FORMAT_B8G8R8A8_USCALED,
+    eB8G8R8A8Sscaled = VK_FORMAT_B8G8R8A8_SSCALED,
+    eB8G8R8A8Uint = VK_FORMAT_B8G8R8A8_UINT,
+    eB8G8R8A8Sint = VK_FORMAT_B8G8R8A8_SINT,
+    eB8G8R8A8Srgb = VK_FORMAT_B8G8R8A8_SRGB,
+    eA8B8G8R8UnormPack32 = VK_FORMAT_A8B8G8R8_UNORM_PACK32,
+    eA8B8G8R8SnormPack32 = VK_FORMAT_A8B8G8R8_SNORM_PACK32,
+    eA8B8G8R8UscaledPack32 = VK_FORMAT_A8B8G8R8_USCALED_PACK32,
+    eA8B8G8R8SscaledPack32 = VK_FORMAT_A8B8G8R8_SSCALED_PACK32,
+    eA8B8G8R8UintPack32 = VK_FORMAT_A8B8G8R8_UINT_PACK32,
+    eA8B8G8R8SintPack32 = VK_FORMAT_A8B8G8R8_SINT_PACK32,
+    eA8B8G8R8SrgbPack32 = VK_FORMAT_A8B8G8R8_SRGB_PACK32,
+    eA2R10G10B10UnormPack32 = VK_FORMAT_A2R10G10B10_UNORM_PACK32,
+    eA2R10G10B10SnormPack32 = VK_FORMAT_A2R10G10B10_SNORM_PACK32,
+    eA2R10G10B10UscaledPack32 = VK_FORMAT_A2R10G10B10_USCALED_PACK32,
+    eA2R10G10B10SscaledPack32 = VK_FORMAT_A2R10G10B10_SSCALED_PACK32,
+    eA2R10G10B10UintPack32 = VK_FORMAT_A2R10G10B10_UINT_PACK32,
+    eA2R10G10B10SintPack32 = VK_FORMAT_A2R10G10B10_SINT_PACK32,
+    eA2B10G10R10UnormPack32 = VK_FORMAT_A2B10G10R10_UNORM_PACK32,
+    eA2B10G10R10SnormPack32 = VK_FORMAT_A2B10G10R10_SNORM_PACK32,
+    eA2B10G10R10UscaledPack32 = VK_FORMAT_A2B10G10R10_USCALED_PACK32,
+    eA2B10G10R10SscaledPack32 = VK_FORMAT_A2B10G10R10_SSCALED_PACK32,
+    eA2B10G10R10UintPack32 = VK_FORMAT_A2B10G10R10_UINT_PACK32,
+    eA2B10G10R10SintPack32 = VK_FORMAT_A2B10G10R10_SINT_PACK32,
+    eR16Unorm = VK_FORMAT_R16_UNORM,
+    eR16Snorm = VK_FORMAT_R16_SNORM,
+    eR16Uscaled = VK_FORMAT_R16_USCALED,
+    eR16Sscaled = VK_FORMAT_R16_SSCALED,
+    eR16Uint = VK_FORMAT_R16_UINT,
+    eR16Sint = VK_FORMAT_R16_SINT,
+    eR16Sfloat = VK_FORMAT_R16_SFLOAT,
+    eR16G16Unorm = VK_FORMAT_R16G16_UNORM,
+    eR16G16Snorm = VK_FORMAT_R16G16_SNORM,
+    eR16G16Uscaled = VK_FORMAT_R16G16_USCALED,
+    eR16G16Sscaled = VK_FORMAT_R16G16_SSCALED,
+    eR16G16Uint = VK_FORMAT_R16G16_UINT,
+    eR16G16Sint = VK_FORMAT_R16G16_SINT,
+    eR16G16Sfloat = VK_FORMAT_R16G16_SFLOAT,
+    eR16G16B16Unorm = VK_FORMAT_R16G16B16_UNORM,
+    eR16G16B16Snorm = VK_FORMAT_R16G16B16_SNORM,
+    eR16G16B16Uscaled = VK_FORMAT_R16G16B16_USCALED,
+    eR16G16B16Sscaled = VK_FORMAT_R16G16B16_SSCALED,
+    eR16G16B16Uint = VK_FORMAT_R16G16B16_UINT,
+    eR16G16B16Sint = VK_FORMAT_R16G16B16_SINT,
+    eR16G16B16Sfloat = VK_FORMAT_R16G16B16_SFLOAT,
+    eR16G16B16A16Unorm = VK_FORMAT_R16G16B16A16_UNORM,
+    eR16G16B16A16Snorm = VK_FORMAT_R16G16B16A16_SNORM,
+    eR16G16B16A16Uscaled = VK_FORMAT_R16G16B16A16_USCALED,
+    eR16G16B16A16Sscaled = VK_FORMAT_R16G16B16A16_SSCALED,
+    eR16G16B16A16Uint = VK_FORMAT_R16G16B16A16_UINT,
+    eR16G16B16A16Sint = VK_FORMAT_R16G16B16A16_SINT,
+    eR16G16B16A16Sfloat = VK_FORMAT_R16G16B16A16_SFLOAT,
+    eR32Uint = VK_FORMAT_R32_UINT,
+    eR32Sint = VK_FORMAT_R32_SINT,
+    eR32Sfloat = VK_FORMAT_R32_SFLOAT,
+    eR32G32Uint = VK_FORMAT_R32G32_UINT,
+    eR32G32Sint = VK_FORMAT_R32G32_SINT,
+    eR32G32Sfloat = VK_FORMAT_R32G32_SFLOAT,
+    eR32G32B32Uint = VK_FORMAT_R32G32B32_UINT,
+    eR32G32B32Sint = VK_FORMAT_R32G32B32_SINT,
+    eR32G32B32Sfloat = VK_FORMAT_R32G32B32_SFLOAT,
+    eR32G32B32A32Uint = VK_FORMAT_R32G32B32A32_UINT,
+    eR32G32B32A32Sint = VK_FORMAT_R32G32B32A32_SINT,
+    eR32G32B32A32Sfloat = VK_FORMAT_R32G32B32A32_SFLOAT,
+    eR64Uint = VK_FORMAT_R64_UINT,
+    eR64Sint = VK_FORMAT_R64_SINT,
+    eR64Sfloat = VK_FORMAT_R64_SFLOAT,
+    eR64G64Uint = VK_FORMAT_R64G64_UINT,
+    eR64G64Sint = VK_FORMAT_R64G64_SINT,
+    eR64G64Sfloat = VK_FORMAT_R64G64_SFLOAT,
+    eR64G64B64Uint = VK_FORMAT_R64G64B64_UINT,
+    eR64G64B64Sint = VK_FORMAT_R64G64B64_SINT,
+    eR64G64B64Sfloat = VK_FORMAT_R64G64B64_SFLOAT,
+    eR64G64B64A64Uint = VK_FORMAT_R64G64B64A64_UINT,
+    eR64G64B64A64Sint = VK_FORMAT_R64G64B64A64_SINT,
+    eR64G64B64A64Sfloat = VK_FORMAT_R64G64B64A64_SFLOAT,
+    eB10G11R11UfloatPack32 = VK_FORMAT_B10G11R11_UFLOAT_PACK32,
+    eE5B9G9R9UfloatPack32 = VK_FORMAT_E5B9G9R9_UFLOAT_PACK32,
+    eD16Unorm = VK_FORMAT_D16_UNORM,
+    eX8D24UnormPack32 = VK_FORMAT_X8_D24_UNORM_PACK32,
+    eD32Sfloat = VK_FORMAT_D32_SFLOAT,
+    eS8Uint = VK_FORMAT_S8_UINT,
+    eD16UnormS8Uint = VK_FORMAT_D16_UNORM_S8_UINT,
+    eD24UnormS8Uint = VK_FORMAT_D24_UNORM_S8_UINT,
+    eD32SfloatS8Uint = VK_FORMAT_D32_SFLOAT_S8_UINT,
+    eBc1RgbUnormBlock = VK_FORMAT_BC1_RGB_UNORM_BLOCK,
+    eBc1RgbSrgbBlock = VK_FORMAT_BC1_RGB_SRGB_BLOCK,
+    eBc1RgbaUnormBlock = VK_FORMAT_BC1_RGBA_UNORM_BLOCK,
+    eBc1RgbaSrgbBlock = VK_FORMAT_BC1_RGBA_SRGB_BLOCK,
+    eBc2UnormBlock = VK_FORMAT_BC2_UNORM_BLOCK,
+    eBc2SrgbBlock = VK_FORMAT_BC2_SRGB_BLOCK,
+    eBc3UnormBlock = VK_FORMAT_BC3_UNORM_BLOCK,
+    eBc3SrgbBlock = VK_FORMAT_BC3_SRGB_BLOCK,
+    eBc4UnormBlock = VK_FORMAT_BC4_UNORM_BLOCK,
+    eBc4SnormBlock = VK_FORMAT_BC4_SNORM_BLOCK,
+    eBc5UnormBlock = VK_FORMAT_BC5_UNORM_BLOCK,
+    eBc5SnormBlock = VK_FORMAT_BC5_SNORM_BLOCK,
+    eBc6HUfloatBlock = VK_FORMAT_BC6H_UFLOAT_BLOCK,
+    eBc6HSfloatBlock = VK_FORMAT_BC6H_SFLOAT_BLOCK,
+    eBc7UnormBlock = VK_FORMAT_BC7_UNORM_BLOCK,
+    eBc7SrgbBlock = VK_FORMAT_BC7_SRGB_BLOCK,
+    eEtc2R8G8B8UnormBlock = VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK,
+    eEtc2R8G8B8SrgbBlock = VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK,
+    eEtc2R8G8B8A1UnormBlock = VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK,
+    eEtc2R8G8B8A1SrgbBlock = VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK,
+    eEtc2R8G8B8A8UnormBlock = VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK,
+    eEtc2R8G8B8A8SrgbBlock = VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK,
+    eEacR11UnormBlock = VK_FORMAT_EAC_R11_UNORM_BLOCK,
+    eEacR11SnormBlock = VK_FORMAT_EAC_R11_SNORM_BLOCK,
+    eEacR11G11UnormBlock = VK_FORMAT_EAC_R11G11_UNORM_BLOCK,
+    eEacR11G11SnormBlock = VK_FORMAT_EAC_R11G11_SNORM_BLOCK,
+    eAstc4x4UnormBlock = VK_FORMAT_ASTC_4x4_UNORM_BLOCK,
+    eAstc4x4SrgbBlock = VK_FORMAT_ASTC_4x4_SRGB_BLOCK,
+    eAstc5x4UnormBlock = VK_FORMAT_ASTC_5x4_UNORM_BLOCK,
+    eAstc5x4SrgbBlock = VK_FORMAT_ASTC_5x4_SRGB_BLOCK,
+    eAstc5x5UnormBlock = VK_FORMAT_ASTC_5x5_UNORM_BLOCK,
+    eAstc5x5SrgbBlock = VK_FORMAT_ASTC_5x5_SRGB_BLOCK,
+    eAstc6x5UnormBlock = VK_FORMAT_ASTC_6x5_UNORM_BLOCK,
+    eAstc6x5SrgbBlock = VK_FORMAT_ASTC_6x5_SRGB_BLOCK,
+    eAstc6x6UnormBlock = VK_FORMAT_ASTC_6x6_UNORM_BLOCK,
+    eAstc6x6SrgbBlock = VK_FORMAT_ASTC_6x6_SRGB_BLOCK,
+    eAstc8x5UnormBlock = VK_FORMAT_ASTC_8x5_UNORM_BLOCK,
+    eAstc8x5SrgbBlock = VK_FORMAT_ASTC_8x5_SRGB_BLOCK,
+    eAstc8x6UnormBlock = VK_FORMAT_ASTC_8x6_UNORM_BLOCK,
+    eAstc8x6SrgbBlock = VK_FORMAT_ASTC_8x6_SRGB_BLOCK,
+    eAstc8x8UnormBlock = VK_FORMAT_ASTC_8x8_UNORM_BLOCK,
+    eAstc8x8SrgbBlock = VK_FORMAT_ASTC_8x8_SRGB_BLOCK,
+    eAstc10x5UnormBlock = VK_FORMAT_ASTC_10x5_UNORM_BLOCK,
+    eAstc10x5SrgbBlock = VK_FORMAT_ASTC_10x5_SRGB_BLOCK,
+    eAstc10x6UnormBlock = VK_FORMAT_ASTC_10x6_UNORM_BLOCK,
+    eAstc10x6SrgbBlock = VK_FORMAT_ASTC_10x6_SRGB_BLOCK,
+    eAstc10x8UnormBlock = VK_FORMAT_ASTC_10x8_UNORM_BLOCK,
+    eAstc10x8SrgbBlock = VK_FORMAT_ASTC_10x8_SRGB_BLOCK,
+    eAstc10x10UnormBlock = VK_FORMAT_ASTC_10x10_UNORM_BLOCK,
+    eAstc10x10SrgbBlock = VK_FORMAT_ASTC_10x10_SRGB_BLOCK,
+    eAstc12x10UnormBlock = VK_FORMAT_ASTC_12x10_UNORM_BLOCK,
+    eAstc12x10SrgbBlock = VK_FORMAT_ASTC_12x10_SRGB_BLOCK,
+    eAstc12x12UnormBlock = VK_FORMAT_ASTC_12x12_UNORM_BLOCK,
+    eAstc12x12SrgbBlock = VK_FORMAT_ASTC_12x12_SRGB_BLOCK,
+    eG8B8G8R8422Unorm = VK_FORMAT_G8B8G8R8_422_UNORM,
+    eB8G8R8G8422Unorm = VK_FORMAT_B8G8R8G8_422_UNORM,
+    eG8B8R83Plane420Unorm = VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM,
+    eG8B8R82Plane420Unorm = VK_FORMAT_G8_B8R8_2PLANE_420_UNORM,
+    eG8B8R83Plane422Unorm = VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM,
+    eG8B8R82Plane422Unorm = VK_FORMAT_G8_B8R8_2PLANE_422_UNORM,
+    eG8B8R83Plane444Unorm = VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM,
+    eR10X6UnormPack16 = VK_FORMAT_R10X6_UNORM_PACK16,
+    eR10X6G10X6Unorm2Pack16 = VK_FORMAT_R10X6G10X6_UNORM_2PACK16,
+    eR10X6G10X6B10X6A10X6Unorm4Pack16 = VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16,
+    eG10X6B10X6G10X6R10X6422Unorm4Pack16 = VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16,
+    eB10X6G10X6R10X6G10X6422Unorm4Pack16 = VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16,
+    eG10X6B10X6R10X63Plane420Unorm3Pack16 = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16,
+    eG10X6B10X6R10X62Plane420Unorm3Pack16 = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16,
+    eG10X6B10X6R10X63Plane422Unorm3Pack16 = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16,
+    eG10X6B10X6R10X62Plane422Unorm3Pack16 = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16,
+    eG10X6B10X6R10X63Plane444Unorm3Pack16 = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16,
+    eR12X4UnormPack16 = VK_FORMAT_R12X4_UNORM_PACK16,
+    eR12X4G12X4Unorm2Pack16 = VK_FORMAT_R12X4G12X4_UNORM_2PACK16,
+    eR12X4G12X4B12X4A12X4Unorm4Pack16 = VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16,
+    eG12X4B12X4G12X4R12X4422Unorm4Pack16 = VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16,
+    eB12X4G12X4R12X4G12X4422Unorm4Pack16 = VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16,
+    eG12X4B12X4R12X43Plane420Unorm3Pack16 = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16,
+    eG12X4B12X4R12X42Plane420Unorm3Pack16 = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16,
+    eG12X4B12X4R12X43Plane422Unorm3Pack16 = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16,
+    eG12X4B12X4R12X42Plane422Unorm3Pack16 = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16,
+    eG12X4B12X4R12X43Plane444Unorm3Pack16 = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16,
+    eG16B16G16R16422Unorm = VK_FORMAT_G16B16G16R16_422_UNORM,
+    eB16G16R16G16422Unorm = VK_FORMAT_B16G16R16G16_422_UNORM,
+    eG16B16R163Plane420Unorm = VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM,
+    eG16B16R162Plane420Unorm = VK_FORMAT_G16_B16R16_2PLANE_420_UNORM,
+    eG16B16R163Plane422Unorm = VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM,
+    eG16B16R162Plane422Unorm = VK_FORMAT_G16_B16R16_2PLANE_422_UNORM,
+    eG16B16R163Plane444Unorm = VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM,
+    ePvrtc12BppUnormBlockIMG = VK_FORMAT_PVRTC1_2BPP_UNORM_BLOCK_IMG,
+    ePvrtc14BppUnormBlockIMG = VK_FORMAT_PVRTC1_4BPP_UNORM_BLOCK_IMG,
+    ePvrtc22BppUnormBlockIMG = VK_FORMAT_PVRTC2_2BPP_UNORM_BLOCK_IMG,
+    ePvrtc24BppUnormBlockIMG = VK_FORMAT_PVRTC2_4BPP_UNORM_BLOCK_IMG,
+    ePvrtc12BppSrgbBlockIMG = VK_FORMAT_PVRTC1_2BPP_SRGB_BLOCK_IMG,
+    ePvrtc14BppSrgbBlockIMG = VK_FORMAT_PVRTC1_4BPP_SRGB_BLOCK_IMG,
+    ePvrtc22BppSrgbBlockIMG = VK_FORMAT_PVRTC2_2BPP_SRGB_BLOCK_IMG,
+    ePvrtc24BppSrgbBlockIMG = VK_FORMAT_PVRTC2_4BPP_SRGB_BLOCK_IMG,
+    eAstc4x4SfloatBlockEXT = VK_FORMAT_ASTC_4x4_SFLOAT_BLOCK_EXT,
+    eAstc5x4SfloatBlockEXT = VK_FORMAT_ASTC_5x4_SFLOAT_BLOCK_EXT,
+    eAstc5x5SfloatBlockEXT = VK_FORMAT_ASTC_5x5_SFLOAT_BLOCK_EXT,
+    eAstc6x5SfloatBlockEXT = VK_FORMAT_ASTC_6x5_SFLOAT_BLOCK_EXT,
+    eAstc6x6SfloatBlockEXT = VK_FORMAT_ASTC_6x6_SFLOAT_BLOCK_EXT,
+    eAstc8x5SfloatBlockEXT = VK_FORMAT_ASTC_8x5_SFLOAT_BLOCK_EXT,
+    eAstc8x6SfloatBlockEXT = VK_FORMAT_ASTC_8x6_SFLOAT_BLOCK_EXT,
+    eAstc8x8SfloatBlockEXT = VK_FORMAT_ASTC_8x8_SFLOAT_BLOCK_EXT,
+    eAstc10x5SfloatBlockEXT = VK_FORMAT_ASTC_10x5_SFLOAT_BLOCK_EXT,
+    eAstc10x6SfloatBlockEXT = VK_FORMAT_ASTC_10x6_SFLOAT_BLOCK_EXT,
+    eAstc10x8SfloatBlockEXT = VK_FORMAT_ASTC_10x8_SFLOAT_BLOCK_EXT,
+    eAstc10x10SfloatBlockEXT = VK_FORMAT_ASTC_10x10_SFLOAT_BLOCK_EXT,
+    eAstc12x10SfloatBlockEXT = VK_FORMAT_ASTC_12x10_SFLOAT_BLOCK_EXT,
+    eAstc12x12SfloatBlockEXT = VK_FORMAT_ASTC_12x12_SFLOAT_BLOCK_EXT,
+    eG8B8G8R8422UnormKHR = VK_FORMAT_G8B8G8R8_422_UNORM_KHR,
+    eB8G8R8G8422UnormKHR = VK_FORMAT_B8G8R8G8_422_UNORM_KHR,
+    eG8B8R83Plane420UnormKHR = VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM_KHR,
+    eG8B8R82Plane420UnormKHR = VK_FORMAT_G8_B8R8_2PLANE_420_UNORM_KHR,
+    eG8B8R83Plane422UnormKHR = VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM_KHR,
+    eG8B8R82Plane422UnormKHR = VK_FORMAT_G8_B8R8_2PLANE_422_UNORM_KHR,
+    eG8B8R83Plane444UnormKHR = VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM_KHR,
+    eR10X6UnormPack16KHR = VK_FORMAT_R10X6_UNORM_PACK16_KHR,
+    eR10X6G10X6Unorm2Pack16KHR = VK_FORMAT_R10X6G10X6_UNORM_2PACK16_KHR,
+    eR10X6G10X6B10X6A10X6Unorm4Pack16KHR = VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16_KHR,
+    eG10X6B10X6G10X6R10X6422Unorm4Pack16KHR = VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16_KHR,
+    eB10X6G10X6R10X6G10X6422Unorm4Pack16KHR = VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16_KHR,
+    eG10X6B10X6R10X63Plane420Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16_KHR,
+    eG10X6B10X6R10X62Plane420Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16_KHR,
+    eG10X6B10X6R10X63Plane422Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16_KHR,
+    eG10X6B10X6R10X62Plane422Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16_KHR,
+    eG10X6B10X6R10X63Plane444Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16_KHR,
+    eR12X4UnormPack16KHR = VK_FORMAT_R12X4_UNORM_PACK16_KHR,
+    eR12X4G12X4Unorm2Pack16KHR = VK_FORMAT_R12X4G12X4_UNORM_2PACK16_KHR,
+    eR12X4G12X4B12X4A12X4Unorm4Pack16KHR = VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16_KHR,
+    eG12X4B12X4G12X4R12X4422Unorm4Pack16KHR = VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16_KHR,
+    eB12X4G12X4R12X4G12X4422Unorm4Pack16KHR = VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16_KHR,
+    eG12X4B12X4R12X43Plane420Unorm3Pack16KHR = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16_KHR,
+    eG12X4B12X4R12X42Plane420Unorm3Pack16KHR = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16_KHR,
+    eG12X4B12X4R12X43Plane422Unorm3Pack16KHR = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16_KHR,
+    eG12X4B12X4R12X42Plane422Unorm3Pack16KHR = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16_KHR,
+    eG12X4B12X4R12X43Plane444Unorm3Pack16KHR = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16_KHR,
+    eG16B16G16R16422UnormKHR = VK_FORMAT_G16B16G16R16_422_UNORM_KHR,
+    eB16G16R16G16422UnormKHR = VK_FORMAT_B16G16R16G16_422_UNORM_KHR,
+    eG16B16R163Plane420UnormKHR = VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM_KHR,
+    eG16B16R162Plane420UnormKHR = VK_FORMAT_G16_B16R16_2PLANE_420_UNORM_KHR,
+    eG16B16R163Plane422UnormKHR = VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM_KHR,
+    eG16B16R162Plane422UnormKHR = VK_FORMAT_G16_B16R16_2PLANE_422_UNORM_KHR,
+    eG16B16R163Plane444UnormKHR = VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM_KHR
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( Format value )
+  {
+    switch ( value )
+    {
+      case Format::eUndefined : return "Undefined";
+      case Format::eR4G4UnormPack8 : return "R4G4UnormPack8";
+      case Format::eR4G4B4A4UnormPack16 : return "R4G4B4A4UnormPack16";
+      case Format::eB4G4R4A4UnormPack16 : return "B4G4R4A4UnormPack16";
+      case Format::eR5G6B5UnormPack16 : return "R5G6B5UnormPack16";
+      case Format::eB5G6R5UnormPack16 : return "B5G6R5UnormPack16";
+      case Format::eR5G5B5A1UnormPack16 : return "R5G5B5A1UnormPack16";
+      case Format::eB5G5R5A1UnormPack16 : return "B5G5R5A1UnormPack16";
+      case Format::eA1R5G5B5UnormPack16 : return "A1R5G5B5UnormPack16";
+      case Format::eR8Unorm : return "R8Unorm";
+      case Format::eR8Snorm : return "R8Snorm";
+      case Format::eR8Uscaled : return "R8Uscaled";
+      case Format::eR8Sscaled : return "R8Sscaled";
+      case Format::eR8Uint : return "R8Uint";
+      case Format::eR8Sint : return "R8Sint";
+      case Format::eR8Srgb : return "R8Srgb";
+      case Format::eR8G8Unorm : return "R8G8Unorm";
+      case Format::eR8G8Snorm : return "R8G8Snorm";
+      case Format::eR8G8Uscaled : return "R8G8Uscaled";
+      case Format::eR8G8Sscaled : return "R8G8Sscaled";
+      case Format::eR8G8Uint : return "R8G8Uint";
+      case Format::eR8G8Sint : return "R8G8Sint";
+      case Format::eR8G8Srgb : return "R8G8Srgb";
+      case Format::eR8G8B8Unorm : return "R8G8B8Unorm";
+      case Format::eR8G8B8Snorm : return "R8G8B8Snorm";
+      case Format::eR8G8B8Uscaled : return "R8G8B8Uscaled";
+      case Format::eR8G8B8Sscaled : return "R8G8B8Sscaled";
+      case Format::eR8G8B8Uint : return "R8G8B8Uint";
+      case Format::eR8G8B8Sint : return "R8G8B8Sint";
+      case Format::eR8G8B8Srgb : return "R8G8B8Srgb";
+      case Format::eB8G8R8Unorm : return "B8G8R8Unorm";
+      case Format::eB8G8R8Snorm : return "B8G8R8Snorm";
+      case Format::eB8G8R8Uscaled : return "B8G8R8Uscaled";
+      case Format::eB8G8R8Sscaled : return "B8G8R8Sscaled";
+      case Format::eB8G8R8Uint : return "B8G8R8Uint";
+      case Format::eB8G8R8Sint : return "B8G8R8Sint";
+      case Format::eB8G8R8Srgb : return "B8G8R8Srgb";
+      case Format::eR8G8B8A8Unorm : return "R8G8B8A8Unorm";
+      case Format::eR8G8B8A8Snorm : return "R8G8B8A8Snorm";
+      case Format::eR8G8B8A8Uscaled : return "R8G8B8A8Uscaled";
+      case Format::eR8G8B8A8Sscaled : return "R8G8B8A8Sscaled";
+      case Format::eR8G8B8A8Uint : return "R8G8B8A8Uint";
+      case Format::eR8G8B8A8Sint : return "R8G8B8A8Sint";
+      case Format::eR8G8B8A8Srgb : return "R8G8B8A8Srgb";
+      case Format::eB8G8R8A8Unorm : return "B8G8R8A8Unorm";
+      case Format::eB8G8R8A8Snorm : return "B8G8R8A8Snorm";
+      case Format::eB8G8R8A8Uscaled : return "B8G8R8A8Uscaled";
+      case Format::eB8G8R8A8Sscaled : return "B8G8R8A8Sscaled";
+      case Format::eB8G8R8A8Uint : return "B8G8R8A8Uint";
+      case Format::eB8G8R8A8Sint : return "B8G8R8A8Sint";
+      case Format::eB8G8R8A8Srgb : return "B8G8R8A8Srgb";
+      case Format::eA8B8G8R8UnormPack32 : return "A8B8G8R8UnormPack32";
+      case Format::eA8B8G8R8SnormPack32 : return "A8B8G8R8SnormPack32";
+      case Format::eA8B8G8R8UscaledPack32 : return "A8B8G8R8UscaledPack32";
+      case Format::eA8B8G8R8SscaledPack32 : return "A8B8G8R8SscaledPack32";
+      case Format::eA8B8G8R8UintPack32 : return "A8B8G8R8UintPack32";
+      case Format::eA8B8G8R8SintPack32 : return "A8B8G8R8SintPack32";
+      case Format::eA8B8G8R8SrgbPack32 : return "A8B8G8R8SrgbPack32";
+      case Format::eA2R10G10B10UnormPack32 : return "A2R10G10B10UnormPack32";
+      case Format::eA2R10G10B10SnormPack32 : return "A2R10G10B10SnormPack32";
+      case Format::eA2R10G10B10UscaledPack32 : return "A2R10G10B10UscaledPack32";
+      case Format::eA2R10G10B10SscaledPack32 : return "A2R10G10B10SscaledPack32";
+      case Format::eA2R10G10B10UintPack32 : return "A2R10G10B10UintPack32";
+      case Format::eA2R10G10B10SintPack32 : return "A2R10G10B10SintPack32";
+      case Format::eA2B10G10R10UnormPack32 : return "A2B10G10R10UnormPack32";
+      case Format::eA2B10G10R10SnormPack32 : return "A2B10G10R10SnormPack32";
+      case Format::eA2B10G10R10UscaledPack32 : return "A2B10G10R10UscaledPack32";
+      case Format::eA2B10G10R10SscaledPack32 : return "A2B10G10R10SscaledPack32";
+      case Format::eA2B10G10R10UintPack32 : return "A2B10G10R10UintPack32";
+      case Format::eA2B10G10R10SintPack32 : return "A2B10G10R10SintPack32";
+      case Format::eR16Unorm : return "R16Unorm";
+      case Format::eR16Snorm : return "R16Snorm";
+      case Format::eR16Uscaled : return "R16Uscaled";
+      case Format::eR16Sscaled : return "R16Sscaled";
+      case Format::eR16Uint : return "R16Uint";
+      case Format::eR16Sint : return "R16Sint";
+      case Format::eR16Sfloat : return "R16Sfloat";
+      case Format::eR16G16Unorm : return "R16G16Unorm";
+      case Format::eR16G16Snorm : return "R16G16Snorm";
+      case Format::eR16G16Uscaled : return "R16G16Uscaled";
+      case Format::eR16G16Sscaled : return "R16G16Sscaled";
+      case Format::eR16G16Uint : return "R16G16Uint";
+      case Format::eR16G16Sint : return "R16G16Sint";
+      case Format::eR16G16Sfloat : return "R16G16Sfloat";
+      case Format::eR16G16B16Unorm : return "R16G16B16Unorm";
+      case Format::eR16G16B16Snorm : return "R16G16B16Snorm";
+      case Format::eR16G16B16Uscaled : return "R16G16B16Uscaled";
+      case Format::eR16G16B16Sscaled : return "R16G16B16Sscaled";
+      case Format::eR16G16B16Uint : return "R16G16B16Uint";
+      case Format::eR16G16B16Sint : return "R16G16B16Sint";
+      case Format::eR16G16B16Sfloat : return "R16G16B16Sfloat";
+      case Format::eR16G16B16A16Unorm : return "R16G16B16A16Unorm";
+      case Format::eR16G16B16A16Snorm : return "R16G16B16A16Snorm";
+      case Format::eR16G16B16A16Uscaled : return "R16G16B16A16Uscaled";
+      case Format::eR16G16B16A16Sscaled : return "R16G16B16A16Sscaled";
+      case Format::eR16G16B16A16Uint : return "R16G16B16A16Uint";
+      case Format::eR16G16B16A16Sint : return "R16G16B16A16Sint";
+      case Format::eR16G16B16A16Sfloat : return "R16G16B16A16Sfloat";
+      case Format::eR32Uint : return "R32Uint";
+      case Format::eR32Sint : return "R32Sint";
+      case Format::eR32Sfloat : return "R32Sfloat";
+      case Format::eR32G32Uint : return "R32G32Uint";
+      case Format::eR32G32Sint : return "R32G32Sint";
+      case Format::eR32G32Sfloat : return "R32G32Sfloat";
+      case Format::eR32G32B32Uint : return "R32G32B32Uint";
+      case Format::eR32G32B32Sint : return "R32G32B32Sint";
+      case Format::eR32G32B32Sfloat : return "R32G32B32Sfloat";
+      case Format::eR32G32B32A32Uint : return "R32G32B32A32Uint";
+      case Format::eR32G32B32A32Sint : return "R32G32B32A32Sint";
+      case Format::eR32G32B32A32Sfloat : return "R32G32B32A32Sfloat";
+      case Format::eR64Uint : return "R64Uint";
+      case Format::eR64Sint : return "R64Sint";
+      case Format::eR64Sfloat : return "R64Sfloat";
+      case Format::eR64G64Uint : return "R64G64Uint";
+      case Format::eR64G64Sint : return "R64G64Sint";
+      case Format::eR64G64Sfloat : return "R64G64Sfloat";
+      case Format::eR64G64B64Uint : return "R64G64B64Uint";
+      case Format::eR64G64B64Sint : return "R64G64B64Sint";
+      case Format::eR64G64B64Sfloat : return "R64G64B64Sfloat";
+      case Format::eR64G64B64A64Uint : return "R64G64B64A64Uint";
+      case Format::eR64G64B64A64Sint : return "R64G64B64A64Sint";
+      case Format::eR64G64B64A64Sfloat : return "R64G64B64A64Sfloat";
+      case Format::eB10G11R11UfloatPack32 : return "B10G11R11UfloatPack32";
+      case Format::eE5B9G9R9UfloatPack32 : return "E5B9G9R9UfloatPack32";
+      case Format::eD16Unorm : return "D16Unorm";
+      case Format::eX8D24UnormPack32 : return "X8D24UnormPack32";
+      case Format::eD32Sfloat : return "D32Sfloat";
+      case Format::eS8Uint : return "S8Uint";
+      case Format::eD16UnormS8Uint : return "D16UnormS8Uint";
+      case Format::eD24UnormS8Uint : return "D24UnormS8Uint";
+      case Format::eD32SfloatS8Uint : return "D32SfloatS8Uint";
+      case Format::eBc1RgbUnormBlock : return "Bc1RgbUnormBlock";
+      case Format::eBc1RgbSrgbBlock : return "Bc1RgbSrgbBlock";
+      case Format::eBc1RgbaUnormBlock : return "Bc1RgbaUnormBlock";
+      case Format::eBc1RgbaSrgbBlock : return "Bc1RgbaSrgbBlock";
+      case Format::eBc2UnormBlock : return "Bc2UnormBlock";
+      case Format::eBc2SrgbBlock : return "Bc2SrgbBlock";
+      case Format::eBc3UnormBlock : return "Bc3UnormBlock";
+      case Format::eBc3SrgbBlock : return "Bc3SrgbBlock";
+      case Format::eBc4UnormBlock : return "Bc4UnormBlock";
+      case Format::eBc4SnormBlock : return "Bc4SnormBlock";
+      case Format::eBc5UnormBlock : return "Bc5UnormBlock";
+      case Format::eBc5SnormBlock : return "Bc5SnormBlock";
+      case Format::eBc6HUfloatBlock : return "Bc6HUfloatBlock";
+      case Format::eBc6HSfloatBlock : return "Bc6HSfloatBlock";
+      case Format::eBc7UnormBlock : return "Bc7UnormBlock";
+      case Format::eBc7SrgbBlock : return "Bc7SrgbBlock";
+      case Format::eEtc2R8G8B8UnormBlock : return "Etc2R8G8B8UnormBlock";
+      case Format::eEtc2R8G8B8SrgbBlock : return "Etc2R8G8B8SrgbBlock";
+      case Format::eEtc2R8G8B8A1UnormBlock : return "Etc2R8G8B8A1UnormBlock";
+      case Format::eEtc2R8G8B8A1SrgbBlock : return "Etc2R8G8B8A1SrgbBlock";
+      case Format::eEtc2R8G8B8A8UnormBlock : return "Etc2R8G8B8A8UnormBlock";
+      case Format::eEtc2R8G8B8A8SrgbBlock : return "Etc2R8G8B8A8SrgbBlock";
+      case Format::eEacR11UnormBlock : return "EacR11UnormBlock";
+      case Format::eEacR11SnormBlock : return "EacR11SnormBlock";
+      case Format::eEacR11G11UnormBlock : return "EacR11G11UnormBlock";
+      case Format::eEacR11G11SnormBlock : return "EacR11G11SnormBlock";
+      case Format::eAstc4x4UnormBlock : return "Astc4x4UnormBlock";
+      case Format::eAstc4x4SrgbBlock : return "Astc4x4SrgbBlock";
+      case Format::eAstc5x4UnormBlock : return "Astc5x4UnormBlock";
+      case Format::eAstc5x4SrgbBlock : return "Astc5x4SrgbBlock";
+      case Format::eAstc5x5UnormBlock : return "Astc5x5UnormBlock";
+      case Format::eAstc5x5SrgbBlock : return "Astc5x5SrgbBlock";
+      case Format::eAstc6x5UnormBlock : return "Astc6x5UnormBlock";
+      case Format::eAstc6x5SrgbBlock : return "Astc6x5SrgbBlock";
+      case Format::eAstc6x6UnormBlock : return "Astc6x6UnormBlock";
+      case Format::eAstc6x6SrgbBlock : return "Astc6x6SrgbBlock";
+      case Format::eAstc8x5UnormBlock : return "Astc8x5UnormBlock";
+      case Format::eAstc8x5SrgbBlock : return "Astc8x5SrgbBlock";
+      case Format::eAstc8x6UnormBlock : return "Astc8x6UnormBlock";
+      case Format::eAstc8x6SrgbBlock : return "Astc8x6SrgbBlock";
+      case Format::eAstc8x8UnormBlock : return "Astc8x8UnormBlock";
+      case Format::eAstc8x8SrgbBlock : return "Astc8x8SrgbBlock";
+      case Format::eAstc10x5UnormBlock : return "Astc10x5UnormBlock";
+      case Format::eAstc10x5SrgbBlock : return "Astc10x5SrgbBlock";
+      case Format::eAstc10x6UnormBlock : return "Astc10x6UnormBlock";
+      case Format::eAstc10x6SrgbBlock : return "Astc10x6SrgbBlock";
+      case Format::eAstc10x8UnormBlock : return "Astc10x8UnormBlock";
+      case Format::eAstc10x8SrgbBlock : return "Astc10x8SrgbBlock";
+      case Format::eAstc10x10UnormBlock : return "Astc10x10UnormBlock";
+      case Format::eAstc10x10SrgbBlock : return "Astc10x10SrgbBlock";
+      case Format::eAstc12x10UnormBlock : return "Astc12x10UnormBlock";
+      case Format::eAstc12x10SrgbBlock : return "Astc12x10SrgbBlock";
+      case Format::eAstc12x12UnormBlock : return "Astc12x12UnormBlock";
+      case Format::eAstc12x12SrgbBlock : return "Astc12x12SrgbBlock";
+      case Format::eG8B8G8R8422Unorm : return "G8B8G8R8422Unorm";
+      case Format::eB8G8R8G8422Unorm : return "B8G8R8G8422Unorm";
+      case Format::eG8B8R83Plane420Unorm : return "G8B8R83Plane420Unorm";
+      case Format::eG8B8R82Plane420Unorm : return "G8B8R82Plane420Unorm";
+      case Format::eG8B8R83Plane422Unorm : return "G8B8R83Plane422Unorm";
+      case Format::eG8B8R82Plane422Unorm : return "G8B8R82Plane422Unorm";
+      case Format::eG8B8R83Plane444Unorm : return "G8B8R83Plane444Unorm";
+      case Format::eR10X6UnormPack16 : return "R10X6UnormPack16";
+      case Format::eR10X6G10X6Unorm2Pack16 : return "R10X6G10X6Unorm2Pack16";
+      case Format::eR10X6G10X6B10X6A10X6Unorm4Pack16 : return "R10X6G10X6B10X6A10X6Unorm4Pack16";
+      case Format::eG10X6B10X6G10X6R10X6422Unorm4Pack16 : return "G10X6B10X6G10X6R10X6422Unorm4Pack16";
+      case Format::eB10X6G10X6R10X6G10X6422Unorm4Pack16 : return "B10X6G10X6R10X6G10X6422Unorm4Pack16";
+      case Format::eG10X6B10X6R10X63Plane420Unorm3Pack16 : return "G10X6B10X6R10X63Plane420Unorm3Pack16";
+      case Format::eG10X6B10X6R10X62Plane420Unorm3Pack16 : return "G10X6B10X6R10X62Plane420Unorm3Pack16";
+      case Format::eG10X6B10X6R10X63Plane422Unorm3Pack16 : return "G10X6B10X6R10X63Plane422Unorm3Pack16";
+      case Format::eG10X6B10X6R10X62Plane422Unorm3Pack16 : return "G10X6B10X6R10X62Plane422Unorm3Pack16";
+      case Format::eG10X6B10X6R10X63Plane444Unorm3Pack16 : return "G10X6B10X6R10X63Plane444Unorm3Pack16";
+      case Format::eR12X4UnormPack16 : return "R12X4UnormPack16";
+      case Format::eR12X4G12X4Unorm2Pack16 : return "R12X4G12X4Unorm2Pack16";
+      case Format::eR12X4G12X4B12X4A12X4Unorm4Pack16 : return "R12X4G12X4B12X4A12X4Unorm4Pack16";
+      case Format::eG12X4B12X4G12X4R12X4422Unorm4Pack16 : return "G12X4B12X4G12X4R12X4422Unorm4Pack16";
+      case Format::eB12X4G12X4R12X4G12X4422Unorm4Pack16 : return "B12X4G12X4R12X4G12X4422Unorm4Pack16";
+      case Format::eG12X4B12X4R12X43Plane420Unorm3Pack16 : return "G12X4B12X4R12X43Plane420Unorm3Pack16";
+      case Format::eG12X4B12X4R12X42Plane420Unorm3Pack16 : return "G12X4B12X4R12X42Plane420Unorm3Pack16";
+      case Format::eG12X4B12X4R12X43Plane422Unorm3Pack16 : return "G12X4B12X4R12X43Plane422Unorm3Pack16";
+      case Format::eG12X4B12X4R12X42Plane422Unorm3Pack16 : return "G12X4B12X4R12X42Plane422Unorm3Pack16";
+      case Format::eG12X4B12X4R12X43Plane444Unorm3Pack16 : return "G12X4B12X4R12X43Plane444Unorm3Pack16";
+      case Format::eG16B16G16R16422Unorm : return "G16B16G16R16422Unorm";
+      case Format::eB16G16R16G16422Unorm : return "B16G16R16G16422Unorm";
+      case Format::eG16B16R163Plane420Unorm : return "G16B16R163Plane420Unorm";
+      case Format::eG16B16R162Plane420Unorm : return "G16B16R162Plane420Unorm";
+      case Format::eG16B16R163Plane422Unorm : return "G16B16R163Plane422Unorm";
+      case Format::eG16B16R162Plane422Unorm : return "G16B16R162Plane422Unorm";
+      case Format::eG16B16R163Plane444Unorm : return "G16B16R163Plane444Unorm";
+      case Format::ePvrtc12BppUnormBlockIMG : return "Pvrtc12BppUnormBlockIMG";
+      case Format::ePvrtc14BppUnormBlockIMG : return "Pvrtc14BppUnormBlockIMG";
+      case Format::ePvrtc22BppUnormBlockIMG : return "Pvrtc22BppUnormBlockIMG";
+      case Format::ePvrtc24BppUnormBlockIMG : return "Pvrtc24BppUnormBlockIMG";
+      case Format::ePvrtc12BppSrgbBlockIMG : return "Pvrtc12BppSrgbBlockIMG";
+      case Format::ePvrtc14BppSrgbBlockIMG : return "Pvrtc14BppSrgbBlockIMG";
+      case Format::ePvrtc22BppSrgbBlockIMG : return "Pvrtc22BppSrgbBlockIMG";
+      case Format::ePvrtc24BppSrgbBlockIMG : return "Pvrtc24BppSrgbBlockIMG";
+      case Format::eAstc4x4SfloatBlockEXT : return "Astc4x4SfloatBlockEXT";
+      case Format::eAstc5x4SfloatBlockEXT : return "Astc5x4SfloatBlockEXT";
+      case Format::eAstc5x5SfloatBlockEXT : return "Astc5x5SfloatBlockEXT";
+      case Format::eAstc6x5SfloatBlockEXT : return "Astc6x5SfloatBlockEXT";
+      case Format::eAstc6x6SfloatBlockEXT : return "Astc6x6SfloatBlockEXT";
+      case Format::eAstc8x5SfloatBlockEXT : return "Astc8x5SfloatBlockEXT";
+      case Format::eAstc8x6SfloatBlockEXT : return "Astc8x6SfloatBlockEXT";
+      case Format::eAstc8x8SfloatBlockEXT : return "Astc8x8SfloatBlockEXT";
+      case Format::eAstc10x5SfloatBlockEXT : return "Astc10x5SfloatBlockEXT";
+      case Format::eAstc10x6SfloatBlockEXT : return "Astc10x6SfloatBlockEXT";
+      case Format::eAstc10x8SfloatBlockEXT : return "Astc10x8SfloatBlockEXT";
+      case Format::eAstc10x10SfloatBlockEXT : return "Astc10x10SfloatBlockEXT";
+      case Format::eAstc12x10SfloatBlockEXT : return "Astc12x10SfloatBlockEXT";
+      case Format::eAstc12x12SfloatBlockEXT : return "Astc12x12SfloatBlockEXT";
+      default: return "invalid";
+    }
+  }
+
+  enum class FrontFace
+  {
+    eCounterClockwise = VK_FRONT_FACE_COUNTER_CLOCKWISE,
+    eClockwise = VK_FRONT_FACE_CLOCKWISE
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( FrontFace value )
+  {
+    switch ( value )
+    {
+      case FrontFace::eCounterClockwise : return "CounterClockwise";
+      case FrontFace::eClockwise : return "Clockwise";
+      default: return "invalid";
+    }
+  }
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+  enum class FullScreenExclusiveEXT
+  {
+    eDefault = VK_FULL_SCREEN_EXCLUSIVE_DEFAULT_EXT,
+    eAllowed = VK_FULL_SCREEN_EXCLUSIVE_ALLOWED_EXT,
+    eDisallowed = VK_FULL_SCREEN_EXCLUSIVE_DISALLOWED_EXT,
+    eApplicationControlled = VK_FULL_SCREEN_EXCLUSIVE_APPLICATION_CONTROLLED_EXT
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( FullScreenExclusiveEXT value )
+  {
+    switch ( value )
+    {
+      case FullScreenExclusiveEXT::eDefault : return "Default";
+      case FullScreenExclusiveEXT::eAllowed : return "Allowed";
+      case FullScreenExclusiveEXT::eDisallowed : return "Disallowed";
+      case FullScreenExclusiveEXT::eApplicationControlled : return "ApplicationControlled";
+      default: return "invalid";
+    }
+  }
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  enum class GeometryTypeNV
+  {
+    eTriangles = VK_GEOMETRY_TYPE_TRIANGLES_NV,
+    eAabbs = VK_GEOMETRY_TYPE_AABBS_NV
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( GeometryTypeNV value )
+  {
+    switch ( value )
+    {
+      case GeometryTypeNV::eTriangles : return "Triangles";
+      case GeometryTypeNV::eAabbs : return "Aabbs";
+      default: return "invalid";
+    }
+  }
+
+  enum class ImageLayout
+  {
+    eUndefined = VK_IMAGE_LAYOUT_UNDEFINED,
+    eGeneral = VK_IMAGE_LAYOUT_GENERAL,
+    eColorAttachmentOptimal = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
+    eDepthStencilAttachmentOptimal = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL,
+    eDepthStencilReadOnlyOptimal = VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL,
+    eShaderReadOnlyOptimal = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL,
+    eTransferSrcOptimal = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
+    eTransferDstOptimal = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
+    ePreinitialized = VK_IMAGE_LAYOUT_PREINITIALIZED,
+    eDepthReadOnlyStencilAttachmentOptimal = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL,
+    eDepthAttachmentStencilReadOnlyOptimal = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL,
+    ePresentSrcKHR = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR,
+    eSharedPresentKHR = VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR,
+    eShadingRateOptimalNV = VK_IMAGE_LAYOUT_SHADING_RATE_OPTIMAL_NV,
+    eFragmentDensityMapOptimalEXT = VK_IMAGE_LAYOUT_FRAGMENT_DENSITY_MAP_OPTIMAL_EXT,
+    eDepthAttachmentOptimalKHR = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR,
+    eDepthReadOnlyOptimalKHR = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR,
+    eStencilAttachmentOptimalKHR = VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR,
+    eStencilReadOnlyOptimalKHR = VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR,
+    eDepthReadOnlyStencilAttachmentOptimalKHR = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL_KHR,
+    eDepthAttachmentStencilReadOnlyOptimalKHR = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL_KHR
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( ImageLayout value )
+  {
+    switch ( value )
+    {
+      case ImageLayout::eUndefined : return "Undefined";
+      case ImageLayout::eGeneral : return "General";
+      case ImageLayout::eColorAttachmentOptimal : return "ColorAttachmentOptimal";
+      case ImageLayout::eDepthStencilAttachmentOptimal : return "DepthStencilAttachmentOptimal";
+      case ImageLayout::eDepthStencilReadOnlyOptimal : return "DepthStencilReadOnlyOptimal";
+      case ImageLayout::eShaderReadOnlyOptimal : return "ShaderReadOnlyOptimal";
+      case ImageLayout::eTransferSrcOptimal : return "TransferSrcOptimal";
+      case ImageLayout::eTransferDstOptimal : return "TransferDstOptimal";
+      case ImageLayout::ePreinitialized : return "Preinitialized";
+      case ImageLayout::eDepthReadOnlyStencilAttachmentOptimal : return "DepthReadOnlyStencilAttachmentOptimal";
+      case ImageLayout::eDepthAttachmentStencilReadOnlyOptimal : return "DepthAttachmentStencilReadOnlyOptimal";
+      case ImageLayout::ePresentSrcKHR : return "PresentSrcKHR";
+      case ImageLayout::eSharedPresentKHR : return "SharedPresentKHR";
+      case ImageLayout::eShadingRateOptimalNV : return "ShadingRateOptimalNV";
+      case ImageLayout::eFragmentDensityMapOptimalEXT : return "FragmentDensityMapOptimalEXT";
+      case ImageLayout::eDepthAttachmentOptimalKHR : return "DepthAttachmentOptimalKHR";
+      case ImageLayout::eDepthReadOnlyOptimalKHR : return "DepthReadOnlyOptimalKHR";
+      case ImageLayout::eStencilAttachmentOptimalKHR : return "StencilAttachmentOptimalKHR";
+      case ImageLayout::eStencilReadOnlyOptimalKHR : return "StencilReadOnlyOptimalKHR";
+      default: return "invalid";
+    }
+  }
+
+  enum class ImageTiling
+  {
+    eOptimal = VK_IMAGE_TILING_OPTIMAL,
+    eLinear = VK_IMAGE_TILING_LINEAR,
+    eDrmFormatModifierEXT = VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( ImageTiling value )
+  {
+    switch ( value )
+    {
+      case ImageTiling::eOptimal : return "Optimal";
+      case ImageTiling::eLinear : return "Linear";
+      case ImageTiling::eDrmFormatModifierEXT : return "DrmFormatModifierEXT";
+      default: return "invalid";
+    }
+  }
+
+  enum class ImageType
+  {
+    e1D = VK_IMAGE_TYPE_1D,
+    e2D = VK_IMAGE_TYPE_2D,
+    e3D = VK_IMAGE_TYPE_3D
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( ImageType value )
+  {
+    switch ( value )
+    {
+      case ImageType::e1D : return "1D";
+      case ImageType::e2D : return "2D";
+      case ImageType::e3D : return "3D";
+      default: return "invalid";
+    }
+  }
+
+  enum class ImageViewType
+  {
+    e1D = VK_IMAGE_VIEW_TYPE_1D,
+    e2D = VK_IMAGE_VIEW_TYPE_2D,
+    e3D = VK_IMAGE_VIEW_TYPE_3D,
+    eCube = VK_IMAGE_VIEW_TYPE_CUBE,
+    e1DArray = VK_IMAGE_VIEW_TYPE_1D_ARRAY,
+    e2DArray = VK_IMAGE_VIEW_TYPE_2D_ARRAY,
+    eCubeArray = VK_IMAGE_VIEW_TYPE_CUBE_ARRAY
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( ImageViewType value )
+  {
+    switch ( value )
+    {
+      case ImageViewType::e1D : return "1D";
+      case ImageViewType::e2D : return "2D";
+      case ImageViewType::e3D : return "3D";
+      case ImageViewType::eCube : return "Cube";
+      case ImageViewType::e1DArray : return "1DArray";
+      case ImageViewType::e2DArray : return "2DArray";
+      case ImageViewType::eCubeArray : return "CubeArray";
+      default: return "invalid";
+    }
+  }
+
+  enum class IndexType
+  {
+    eUint16 = VK_INDEX_TYPE_UINT16,
+    eUint32 = VK_INDEX_TYPE_UINT32,
+    eNoneNV = VK_INDEX_TYPE_NONE_NV,
+    eUint8EXT = VK_INDEX_TYPE_UINT8_EXT
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( IndexType value )
+  {
+    switch ( value )
+    {
+      case IndexType::eUint16 : return "Uint16";
+      case IndexType::eUint32 : return "Uint32";
+      case IndexType::eNoneNV : return "NoneNV";
+      case IndexType::eUint8EXT : return "Uint8EXT";
+      default: return "invalid";
+    }
+  }
+
+  enum class IndirectCommandsTokenTypeNVX
+  {
+    ePipeline = VK_INDIRECT_COMMANDS_TOKEN_TYPE_PIPELINE_NVX,
+    eDescriptorSet = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DESCRIPTOR_SET_NVX,
+    eIndexBuffer = VK_INDIRECT_COMMANDS_TOKEN_TYPE_INDEX_BUFFER_NVX,
+    eVertexBuffer = VK_INDIRECT_COMMANDS_TOKEN_TYPE_VERTEX_BUFFER_NVX,
+    ePushConstant = VK_INDIRECT_COMMANDS_TOKEN_TYPE_PUSH_CONSTANT_NVX,
+    eDrawIndexed = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_NVX,
+    eDraw = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_NVX,
+    eDispatch = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DISPATCH_NVX
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( IndirectCommandsTokenTypeNVX value )
+  {
+    switch ( value )
+    {
+      case IndirectCommandsTokenTypeNVX::ePipeline : return "Pipeline";
+      case IndirectCommandsTokenTypeNVX::eDescriptorSet : return "DescriptorSet";
+      case IndirectCommandsTokenTypeNVX::eIndexBuffer : return "IndexBuffer";
+      case IndirectCommandsTokenTypeNVX::eVertexBuffer : return "VertexBuffer";
+      case IndirectCommandsTokenTypeNVX::ePushConstant : return "PushConstant";
+      case IndirectCommandsTokenTypeNVX::eDrawIndexed : return "DrawIndexed";
+      case IndirectCommandsTokenTypeNVX::eDraw : return "Draw";
+      case IndirectCommandsTokenTypeNVX::eDispatch : return "Dispatch";
+      default: return "invalid";
+    }
+  }
+
+  enum class InternalAllocationType
+  {
+    eExecutable = VK_INTERNAL_ALLOCATION_TYPE_EXECUTABLE
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( InternalAllocationType value )
+  {
+    switch ( value )
+    {
+      case InternalAllocationType::eExecutable : return "Executable";
+      default: return "invalid";
+    }
+  }
+
+  enum class LineRasterizationModeEXT
+  {
+    eDefault = VK_LINE_RASTERIZATION_MODE_DEFAULT_EXT,
+    eRectangular = VK_LINE_RASTERIZATION_MODE_RECTANGULAR_EXT,
+    eBresenham = VK_LINE_RASTERIZATION_MODE_BRESENHAM_EXT,
+    eRectangularSmooth = VK_LINE_RASTERIZATION_MODE_RECTANGULAR_SMOOTH_EXT
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( LineRasterizationModeEXT value )
+  {
+    switch ( value )
+    {
+      case LineRasterizationModeEXT::eDefault : return "Default";
+      case LineRasterizationModeEXT::eRectangular : return "Rectangular";
+      case LineRasterizationModeEXT::eBresenham : return "Bresenham";
+      case LineRasterizationModeEXT::eRectangularSmooth : return "RectangularSmooth";
+      default: return "invalid";
+    }
+  }
+
+  enum class LogicOp
+  {
+    eClear = VK_LOGIC_OP_CLEAR,
+    eAnd = VK_LOGIC_OP_AND,
+    eAndReverse = VK_LOGIC_OP_AND_REVERSE,
+    eCopy = VK_LOGIC_OP_COPY,
+    eAndInverted = VK_LOGIC_OP_AND_INVERTED,
+    eNoOp = VK_LOGIC_OP_NO_OP,
+    eXor = VK_LOGIC_OP_XOR,
+    eOr = VK_LOGIC_OP_OR,
+    eNor = VK_LOGIC_OP_NOR,
+    eEquivalent = VK_LOGIC_OP_EQUIVALENT,
+    eInvert = VK_LOGIC_OP_INVERT,
+    eOrReverse = VK_LOGIC_OP_OR_REVERSE,
+    eCopyInverted = VK_LOGIC_OP_COPY_INVERTED,
+    eOrInverted = VK_LOGIC_OP_OR_INVERTED,
+    eNand = VK_LOGIC_OP_NAND,
+    eSet = VK_LOGIC_OP_SET
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( LogicOp value )
+  {
+    switch ( value )
+    {
+      case LogicOp::eClear : return "Clear";
+      case LogicOp::eAnd : return "And";
+      case LogicOp::eAndReverse : return "AndReverse";
+      case LogicOp::eCopy : return "Copy";
+      case LogicOp::eAndInverted : return "AndInverted";
+      case LogicOp::eNoOp : return "NoOp";
+      case LogicOp::eXor : return "Xor";
+      case LogicOp::eOr : return "Or";
+      case LogicOp::eNor : return "Nor";
+      case LogicOp::eEquivalent : return "Equivalent";
+      case LogicOp::eInvert : return "Invert";
+      case LogicOp::eOrReverse : return "OrReverse";
+      case LogicOp::eCopyInverted : return "CopyInverted";
+      case LogicOp::eOrInverted : return "OrInverted";
+      case LogicOp::eNand : return "Nand";
+      case LogicOp::eSet : return "Set";
+      default: return "invalid";
+    }
+  }
+
+  enum class MemoryOverallocationBehaviorAMD
+  {
+    eDefault = VK_MEMORY_OVERALLOCATION_BEHAVIOR_DEFAULT_AMD,
+    eAllowed = VK_MEMORY_OVERALLOCATION_BEHAVIOR_ALLOWED_AMD,
+    eDisallowed = VK_MEMORY_OVERALLOCATION_BEHAVIOR_DISALLOWED_AMD
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( MemoryOverallocationBehaviorAMD value )
+  {
+    switch ( value )
+    {
+      case MemoryOverallocationBehaviorAMD::eDefault : return "Default";
+      case MemoryOverallocationBehaviorAMD::eAllowed : return "Allowed";
+      case MemoryOverallocationBehaviorAMD::eDisallowed : return "Disallowed";
+      default: return "invalid";
+    }
+  }
+
+  enum class ObjectEntryTypeNVX
+  {
+    eDescriptorSet = VK_OBJECT_ENTRY_TYPE_DESCRIPTOR_SET_NVX,
+    ePipeline = VK_OBJECT_ENTRY_TYPE_PIPELINE_NVX,
+    eIndexBuffer = VK_OBJECT_ENTRY_TYPE_INDEX_BUFFER_NVX,
+    eVertexBuffer = VK_OBJECT_ENTRY_TYPE_VERTEX_BUFFER_NVX,
+    ePushConstant = VK_OBJECT_ENTRY_TYPE_PUSH_CONSTANT_NVX
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( ObjectEntryTypeNVX value )
+  {
+    switch ( value )
+    {
+      case ObjectEntryTypeNVX::eDescriptorSet : return "DescriptorSet";
+      case ObjectEntryTypeNVX::ePipeline : return "Pipeline";
+      case ObjectEntryTypeNVX::eIndexBuffer : return "IndexBuffer";
+      case ObjectEntryTypeNVX::eVertexBuffer : return "VertexBuffer";
+      case ObjectEntryTypeNVX::ePushConstant : return "PushConstant";
+      default: return "invalid";
+    }
+  }
+
+  enum class ObjectType
+  {
+    eUnknown = VK_OBJECT_TYPE_UNKNOWN,
+    eInstance = VK_OBJECT_TYPE_INSTANCE,
+    ePhysicalDevice = VK_OBJECT_TYPE_PHYSICAL_DEVICE,
+    eDevice = VK_OBJECT_TYPE_DEVICE,
+    eQueue = VK_OBJECT_TYPE_QUEUE,
+    eSemaphore = VK_OBJECT_TYPE_SEMAPHORE,
+    eCommandBuffer = VK_OBJECT_TYPE_COMMAND_BUFFER,
+    eFence = VK_OBJECT_TYPE_FENCE,
+    eDeviceMemory = VK_OBJECT_TYPE_DEVICE_MEMORY,
+    eBuffer = VK_OBJECT_TYPE_BUFFER,
+    eImage = VK_OBJECT_TYPE_IMAGE,
+    eEvent = VK_OBJECT_TYPE_EVENT,
+    eQueryPool = VK_OBJECT_TYPE_QUERY_POOL,
+    eBufferView = VK_OBJECT_TYPE_BUFFER_VIEW,
+    eImageView = VK_OBJECT_TYPE_IMAGE_VIEW,
+    eShaderModule = VK_OBJECT_TYPE_SHADER_MODULE,
+    ePipelineCache = VK_OBJECT_TYPE_PIPELINE_CACHE,
+    ePipelineLayout = VK_OBJECT_TYPE_PIPELINE_LAYOUT,
+    eRenderPass = VK_OBJECT_TYPE_RENDER_PASS,
+    ePipeline = VK_OBJECT_TYPE_PIPELINE,
+    eDescriptorSetLayout = VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT,
+    eSampler = VK_OBJECT_TYPE_SAMPLER,
+    eDescriptorPool = VK_OBJECT_TYPE_DESCRIPTOR_POOL,
+    eDescriptorSet = VK_OBJECT_TYPE_DESCRIPTOR_SET,
+    eFramebuffer = VK_OBJECT_TYPE_FRAMEBUFFER,
+    eCommandPool = VK_OBJECT_TYPE_COMMAND_POOL,
+    eSamplerYcbcrConversion = VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION,
+    eDescriptorUpdateTemplate = VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE,
+    eSurfaceKHR = VK_OBJECT_TYPE_SURFACE_KHR,
+    eSwapchainKHR = VK_OBJECT_TYPE_SWAPCHAIN_KHR,
+    eDisplayKHR = VK_OBJECT_TYPE_DISPLAY_KHR,
+    eDisplayModeKHR = VK_OBJECT_TYPE_DISPLAY_MODE_KHR,
+    eDebugReportCallbackEXT = VK_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT,
+    eObjectTableNVX = VK_OBJECT_TYPE_OBJECT_TABLE_NVX,
+    eIndirectCommandsLayoutNVX = VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX,
+    eDebugUtilsMessengerEXT = VK_OBJECT_TYPE_DEBUG_UTILS_MESSENGER_EXT,
+    eValidationCacheEXT = VK_OBJECT_TYPE_VALIDATION_CACHE_EXT,
+    eAccelerationStructureNV = VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV,
+    ePerformanceConfigurationINTEL = VK_OBJECT_TYPE_PERFORMANCE_CONFIGURATION_INTEL,
+    eDescriptorUpdateTemplateKHR = VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_KHR,
+    eSamplerYcbcrConversionKHR = VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( ObjectType value )
+  {
+    switch ( value )
+    {
+      case ObjectType::eUnknown : return "Unknown";
+      case ObjectType::eInstance : return "Instance";
+      case ObjectType::ePhysicalDevice : return "PhysicalDevice";
+      case ObjectType::eDevice : return "Device";
+      case ObjectType::eQueue : return "Queue";
+      case ObjectType::eSemaphore : return "Semaphore";
+      case ObjectType::eCommandBuffer : return "CommandBuffer";
+      case ObjectType::eFence : return "Fence";
+      case ObjectType::eDeviceMemory : return "DeviceMemory";
+      case ObjectType::eBuffer : return "Buffer";
+      case ObjectType::eImage : return "Image";
+      case ObjectType::eEvent : return "Event";
+      case ObjectType::eQueryPool : return "QueryPool";
+      case ObjectType::eBufferView : return "BufferView";
+      case ObjectType::eImageView : return "ImageView";
+      case ObjectType::eShaderModule : return "ShaderModule";
+      case ObjectType::ePipelineCache : return "PipelineCache";
+      case ObjectType::ePipelineLayout : return "PipelineLayout";
+      case ObjectType::eRenderPass : return "RenderPass";
+      case ObjectType::ePipeline : return "Pipeline";
+      case ObjectType::eDescriptorSetLayout : return "DescriptorSetLayout";
+      case ObjectType::eSampler : return "Sampler";
+      case ObjectType::eDescriptorPool : return "DescriptorPool";
+      case ObjectType::eDescriptorSet : return "DescriptorSet";
+      case ObjectType::eFramebuffer : return "Framebuffer";
+      case ObjectType::eCommandPool : return "CommandPool";
+      case ObjectType::eSamplerYcbcrConversion : return "SamplerYcbcrConversion";
+      case ObjectType::eDescriptorUpdateTemplate : return "DescriptorUpdateTemplate";
+      case ObjectType::eSurfaceKHR : return "SurfaceKHR";
+      case ObjectType::eSwapchainKHR : return "SwapchainKHR";
+      case ObjectType::eDisplayKHR : return "DisplayKHR";
+      case ObjectType::eDisplayModeKHR : return "DisplayModeKHR";
+      case ObjectType::eDebugReportCallbackEXT : return "DebugReportCallbackEXT";
+      case ObjectType::eObjectTableNVX : return "ObjectTableNVX";
+      case ObjectType::eIndirectCommandsLayoutNVX : return "IndirectCommandsLayoutNVX";
+      case ObjectType::eDebugUtilsMessengerEXT : return "DebugUtilsMessengerEXT";
+      case ObjectType::eValidationCacheEXT : return "ValidationCacheEXT";
+      case ObjectType::eAccelerationStructureNV : return "AccelerationStructureNV";
+      case ObjectType::ePerformanceConfigurationINTEL : return "PerformanceConfigurationINTEL";
+      default: return "invalid";
+    }
+  }
+
+  enum class PerformanceConfigurationTypeINTEL
+  {
+    eCommandQueueMetricsDiscoveryActivated = VK_PERFORMANCE_CONFIGURATION_TYPE_COMMAND_QUEUE_METRICS_DISCOVERY_ACTIVATED_INTEL
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( PerformanceConfigurationTypeINTEL value )
+  {
+    switch ( value )
+    {
+      case PerformanceConfigurationTypeINTEL::eCommandQueueMetricsDiscoveryActivated : return "CommandQueueMetricsDiscoveryActivated";
+      default: return "invalid";
+    }
+  }
+
+  enum class PerformanceCounterScopeKHR
+  {
+    eVkQueryScopeCommandBuffer = VK_QUERY_SCOPE_COMMAND_BUFFER_KHR,
+    eVkQueryScopeRenderPass = VK_QUERY_SCOPE_RENDER_PASS_KHR,
+    eVkQueryScopeCommand = VK_QUERY_SCOPE_COMMAND_KHR
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( PerformanceCounterScopeKHR value )
+  {
+    switch ( value )
+    {
+      case PerformanceCounterScopeKHR::eVkQueryScopeCommandBuffer : return "VkQueryScopeCommandBuffer";
+      case PerformanceCounterScopeKHR::eVkQueryScopeRenderPass : return "VkQueryScopeRenderPass";
+      case PerformanceCounterScopeKHR::eVkQueryScopeCommand : return "VkQueryScopeCommand";
+      default: return "invalid";
+    }
+  }
+
+  enum class PerformanceCounterStorageKHR
+  {
+    eInt32 = VK_PERFORMANCE_COUNTER_STORAGE_INT32_KHR,
+    eInt64 = VK_PERFORMANCE_COUNTER_STORAGE_INT64_KHR,
+    eUint32 = VK_PERFORMANCE_COUNTER_STORAGE_UINT32_KHR,
+    eUint64 = VK_PERFORMANCE_COUNTER_STORAGE_UINT64_KHR,
+    eFloat32 = VK_PERFORMANCE_COUNTER_STORAGE_FLOAT32_KHR,
+    eFloat64 = VK_PERFORMANCE_COUNTER_STORAGE_FLOAT64_KHR
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( PerformanceCounterStorageKHR value )
+  {
+    switch ( value )
+    {
+      case PerformanceCounterStorageKHR::eInt32 : return "Int32";
+      case PerformanceCounterStorageKHR::eInt64 : return "Int64";
+      case PerformanceCounterStorageKHR::eUint32 : return "Uint32";
+      case PerformanceCounterStorageKHR::eUint64 : return "Uint64";
+      case PerformanceCounterStorageKHR::eFloat32 : return "Float32";
+      case PerformanceCounterStorageKHR::eFloat64 : return "Float64";
+      default: return "invalid";
+    }
+  }
+
+  enum class PerformanceCounterUnitKHR
+  {
+    eGeneric = VK_PERFORMANCE_COUNTER_UNIT_GENERIC_KHR,
+    ePercentage = VK_PERFORMANCE_COUNTER_UNIT_PERCENTAGE_KHR,
+    eNanoseconds = VK_PERFORMANCE_COUNTER_UNIT_NANOSECONDS_KHR,
+    eBytes = VK_PERFORMANCE_COUNTER_UNIT_BYTES_KHR,
+    eBytesPerSecond = VK_PERFORMANCE_COUNTER_UNIT_BYTES_PER_SECOND_KHR,
+    eKelvin = VK_PERFORMANCE_COUNTER_UNIT_KELVIN_KHR,
+    eWatts = VK_PERFORMANCE_COUNTER_UNIT_WATTS_KHR,
+    eVolts = VK_PERFORMANCE_COUNTER_UNIT_VOLTS_KHR,
+    eAmps = VK_PERFORMANCE_COUNTER_UNIT_AMPS_KHR,
+    eHertz = VK_PERFORMANCE_COUNTER_UNIT_HERTZ_KHR,
+    eCycles = VK_PERFORMANCE_COUNTER_UNIT_CYCLES_KHR
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( PerformanceCounterUnitKHR value )
+  {
+    switch ( value )
+    {
+      case PerformanceCounterUnitKHR::eGeneric : return "Generic";
+      case PerformanceCounterUnitKHR::ePercentage : return "Percentage";
+      case PerformanceCounterUnitKHR::eNanoseconds : return "Nanoseconds";
+      case PerformanceCounterUnitKHR::eBytes : return "Bytes";
+      case PerformanceCounterUnitKHR::eBytesPerSecond : return "BytesPerSecond";
+      case PerformanceCounterUnitKHR::eKelvin : return "Kelvin";
+      case PerformanceCounterUnitKHR::eWatts : return "Watts";
+      case PerformanceCounterUnitKHR::eVolts : return "Volts";
+      case PerformanceCounterUnitKHR::eAmps : return "Amps";
+      case PerformanceCounterUnitKHR::eHertz : return "Hertz";
+      case PerformanceCounterUnitKHR::eCycles : return "Cycles";
+      default: return "invalid";
+    }
+  }
+
+  enum class PerformanceOverrideTypeINTEL
+  {
+    eNullHardware = VK_PERFORMANCE_OVERRIDE_TYPE_NULL_HARDWARE_INTEL,
+    eFlushGpuCaches = VK_PERFORMANCE_OVERRIDE_TYPE_FLUSH_GPU_CACHES_INTEL
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( PerformanceOverrideTypeINTEL value )
+  {
+    switch ( value )
+    {
+      case PerformanceOverrideTypeINTEL::eNullHardware : return "NullHardware";
+      case PerformanceOverrideTypeINTEL::eFlushGpuCaches : return "FlushGpuCaches";
+      default: return "invalid";
+    }
+  }
+
+  enum class PerformanceParameterTypeINTEL
+  {
+    eHwCountersSupported = VK_PERFORMANCE_PARAMETER_TYPE_HW_COUNTERS_SUPPORTED_INTEL,
+    eStreamMarkerValidBits = VK_PERFORMANCE_PARAMETER_TYPE_STREAM_MARKER_VALID_BITS_INTEL
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( PerformanceParameterTypeINTEL value )
+  {
+    switch ( value )
+    {
+      case PerformanceParameterTypeINTEL::eHwCountersSupported : return "HwCountersSupported";
+      case PerformanceParameterTypeINTEL::eStreamMarkerValidBits : return "StreamMarkerValidBits";
+      default: return "invalid";
+    }
+  }
+
+  enum class PerformanceValueTypeINTEL
+  {
+    eUint32 = VK_PERFORMANCE_VALUE_TYPE_UINT32_INTEL,
+    eUint64 = VK_PERFORMANCE_VALUE_TYPE_UINT64_INTEL,
+    eFloat = VK_PERFORMANCE_VALUE_TYPE_FLOAT_INTEL,
+    eBool = VK_PERFORMANCE_VALUE_TYPE_BOOL_INTEL,
+    eString = VK_PERFORMANCE_VALUE_TYPE_STRING_INTEL
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( PerformanceValueTypeINTEL value )
+  {
+    switch ( value )
+    {
+      case PerformanceValueTypeINTEL::eUint32 : return "Uint32";
+      case PerformanceValueTypeINTEL::eUint64 : return "Uint64";
+      case PerformanceValueTypeINTEL::eFloat : return "Float";
+      case PerformanceValueTypeINTEL::eBool : return "Bool";
+      case PerformanceValueTypeINTEL::eString : return "String";
+      default: return "invalid";
+    }
+  }
+
+  enum class PhysicalDeviceType
+  {
+    eOther = VK_PHYSICAL_DEVICE_TYPE_OTHER,
+    eIntegratedGpu = VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU,
+    eDiscreteGpu = VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU,
+    eVirtualGpu = VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU,
+    eCpu = VK_PHYSICAL_DEVICE_TYPE_CPU
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( PhysicalDeviceType value )
+  {
+    switch ( value )
+    {
+      case PhysicalDeviceType::eOther : return "Other";
+      case PhysicalDeviceType::eIntegratedGpu : return "IntegratedGpu";
+      case PhysicalDeviceType::eDiscreteGpu : return "DiscreteGpu";
+      case PhysicalDeviceType::eVirtualGpu : return "VirtualGpu";
+      case PhysicalDeviceType::eCpu : return "Cpu";
+      default: return "invalid";
+    }
+  }
+
+  enum class PipelineBindPoint
+  {
+    eGraphics = VK_PIPELINE_BIND_POINT_GRAPHICS,
+    eCompute = VK_PIPELINE_BIND_POINT_COMPUTE,
+    eRayTracingNV = VK_PIPELINE_BIND_POINT_RAY_TRACING_NV
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineBindPoint value )
+  {
+    switch ( value )
+    {
+      case PipelineBindPoint::eGraphics : return "Graphics";
+      case PipelineBindPoint::eCompute : return "Compute";
+      case PipelineBindPoint::eRayTracingNV : return "RayTracingNV";
+      default: return "invalid";
+    }
+  }
+
+  enum class PipelineCacheHeaderVersion
+  {
+    eOne = VK_PIPELINE_CACHE_HEADER_VERSION_ONE
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineCacheHeaderVersion value )
+  {
+    switch ( value )
+    {
+      case PipelineCacheHeaderVersion::eOne : return "One";
+      default: return "invalid";
+    }
+  }
+
+  enum class PipelineExecutableStatisticFormatKHR
+  {
+    eBool32 = VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_BOOL32_KHR,
+    eInt64 = VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_INT64_KHR,
+    eUint64 = VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_UINT64_KHR,
+    eFloat64 = VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_FLOAT64_KHR
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineExecutableStatisticFormatKHR value )
+  {
+    switch ( value )
+    {
+      case PipelineExecutableStatisticFormatKHR::eBool32 : return "Bool32";
+      case PipelineExecutableStatisticFormatKHR::eInt64 : return "Int64";
+      case PipelineExecutableStatisticFormatKHR::eUint64 : return "Uint64";
+      case PipelineExecutableStatisticFormatKHR::eFloat64 : return "Float64";
+      default: return "invalid";
+    }
+  }
+
+  enum class PointClippingBehavior
+  {
+    eAllClipPlanes = VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES,
+    eUserClipPlanesOnly = VK_POINT_CLIPPING_BEHAVIOR_USER_CLIP_PLANES_ONLY,
+    eAllClipPlanesKHR = VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES_KHR,
+    eUserClipPlanesOnlyKHR = VK_POINT_CLIPPING_BEHAVIOR_USER_CLIP_PLANES_ONLY_KHR
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( PointClippingBehavior value )
+  {
+    switch ( value )
+    {
+      case PointClippingBehavior::eAllClipPlanes : return "AllClipPlanes";
+      case PointClippingBehavior::eUserClipPlanesOnly : return "UserClipPlanesOnly";
+      default: return "invalid";
+    }
+  }
+
+  enum class PolygonMode
+  {
+    eFill = VK_POLYGON_MODE_FILL,
+    eLine = VK_POLYGON_MODE_LINE,
+    ePoint = VK_POLYGON_MODE_POINT,
+    eFillRectangleNV = VK_POLYGON_MODE_FILL_RECTANGLE_NV
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( PolygonMode value )
+  {
+    switch ( value )
+    {
+      case PolygonMode::eFill : return "Fill";
+      case PolygonMode::eLine : return "Line";
+      case PolygonMode::ePoint : return "Point";
+      case PolygonMode::eFillRectangleNV : return "FillRectangleNV";
+      default: return "invalid";
+    }
+  }
+
+  enum class PresentModeKHR
+  {
+    eImmediate = VK_PRESENT_MODE_IMMEDIATE_KHR,
+    eMailbox = VK_PRESENT_MODE_MAILBOX_KHR,
+    eFifo = VK_PRESENT_MODE_FIFO_KHR,
+    eFifoRelaxed = VK_PRESENT_MODE_FIFO_RELAXED_KHR,
+    eSharedDemandRefresh = VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR,
+    eSharedContinuousRefresh = VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( PresentModeKHR value )
+  {
+    switch ( value )
+    {
+      case PresentModeKHR::eImmediate : return "Immediate";
+      case PresentModeKHR::eMailbox : return "Mailbox";
+      case PresentModeKHR::eFifo : return "Fifo";
+      case PresentModeKHR::eFifoRelaxed : return "FifoRelaxed";
+      case PresentModeKHR::eSharedDemandRefresh : return "SharedDemandRefresh";
+      case PresentModeKHR::eSharedContinuousRefresh : return "SharedContinuousRefresh";
+      default: return "invalid";
+    }
+  }
+
+  enum class PrimitiveTopology
+  {
+    ePointList = VK_PRIMITIVE_TOPOLOGY_POINT_LIST,
+    eLineList = VK_PRIMITIVE_TOPOLOGY_LINE_LIST,
+    eLineStrip = VK_PRIMITIVE_TOPOLOGY_LINE_STRIP,
+    eTriangleList = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST,
+    eTriangleStrip = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP,
+    eTriangleFan = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_FAN,
+    eLineListWithAdjacency = VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY,
+    eLineStripWithAdjacency = VK_PRIMITIVE_TOPOLOGY_LINE_STRIP_WITH_ADJACENCY,
+    eTriangleListWithAdjacency = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY,
+    eTriangleStripWithAdjacency = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY,
+    ePatchList = VK_PRIMITIVE_TOPOLOGY_PATCH_LIST
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( PrimitiveTopology value )
+  {
+    switch ( value )
+    {
+      case PrimitiveTopology::ePointList : return "PointList";
+      case PrimitiveTopology::eLineList : return "LineList";
+      case PrimitiveTopology::eLineStrip : return "LineStrip";
+      case PrimitiveTopology::eTriangleList : return "TriangleList";
+      case PrimitiveTopology::eTriangleStrip : return "TriangleStrip";
+      case PrimitiveTopology::eTriangleFan : return "TriangleFan";
+      case PrimitiveTopology::eLineListWithAdjacency : return "LineListWithAdjacency";
+      case PrimitiveTopology::eLineStripWithAdjacency : return "LineStripWithAdjacency";
+      case PrimitiveTopology::eTriangleListWithAdjacency : return "TriangleListWithAdjacency";
+      case PrimitiveTopology::eTriangleStripWithAdjacency : return "TriangleStripWithAdjacency";
+      case PrimitiveTopology::ePatchList : return "PatchList";
+      default: return "invalid";
+    }
+  }
+
+  enum class QueryPoolSamplingModeINTEL
+  {
+    eManual = VK_QUERY_POOL_SAMPLING_MODE_MANUAL_INTEL
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( QueryPoolSamplingModeINTEL value )
+  {
+    switch ( value )
+    {
+      case QueryPoolSamplingModeINTEL::eManual : return "Manual";
+      default: return "invalid";
+    }
+  }
+
+  enum class QueryType
+  {
+    eOcclusion = VK_QUERY_TYPE_OCCLUSION,
+    ePipelineStatistics = VK_QUERY_TYPE_PIPELINE_STATISTICS,
+    eTimestamp = VK_QUERY_TYPE_TIMESTAMP,
+    eTransformFeedbackStreamEXT = VK_QUERY_TYPE_TRANSFORM_FEEDBACK_STREAM_EXT,
+    ePerformanceQueryKHR = VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR,
+    eAccelerationStructureCompactedSizeNV = VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_NV,
+    ePerformanceQueryINTEL = VK_QUERY_TYPE_PERFORMANCE_QUERY_INTEL
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( QueryType value )
+  {
+    switch ( value )
+    {
+      case QueryType::eOcclusion : return "Occlusion";
+      case QueryType::ePipelineStatistics : return "PipelineStatistics";
+      case QueryType::eTimestamp : return "Timestamp";
+      case QueryType::eTransformFeedbackStreamEXT : return "TransformFeedbackStreamEXT";
+      case QueryType::ePerformanceQueryKHR : return "PerformanceQueryKHR";
+      case QueryType::eAccelerationStructureCompactedSizeNV : return "AccelerationStructureCompactedSizeNV";
+      case QueryType::ePerformanceQueryINTEL : return "PerformanceQueryINTEL";
+      default: return "invalid";
+    }
+  }
+
+  enum class QueueGlobalPriorityEXT
+  {
+    eLow = VK_QUEUE_GLOBAL_PRIORITY_LOW_EXT,
+    eMedium = VK_QUEUE_GLOBAL_PRIORITY_MEDIUM_EXT,
+    eHigh = VK_QUEUE_GLOBAL_PRIORITY_HIGH_EXT,
+    eRealtime = VK_QUEUE_GLOBAL_PRIORITY_REALTIME_EXT
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( QueueGlobalPriorityEXT value )
+  {
+    switch ( value )
+    {
+      case QueueGlobalPriorityEXT::eLow : return "Low";
+      case QueueGlobalPriorityEXT::eMedium : return "Medium";
+      case QueueGlobalPriorityEXT::eHigh : return "High";
+      case QueueGlobalPriorityEXT::eRealtime : return "Realtime";
+      default: return "invalid";
+    }
+  }
+
+  enum class RasterizationOrderAMD
+  {
+    eStrict = VK_RASTERIZATION_ORDER_STRICT_AMD,
+    eRelaxed = VK_RASTERIZATION_ORDER_RELAXED_AMD
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( RasterizationOrderAMD value )
+  {
+    switch ( value )
+    {
+      case RasterizationOrderAMD::eStrict : return "Strict";
+      case RasterizationOrderAMD::eRelaxed : return "Relaxed";
+      default: return "invalid";
+    }
+  }
+
+  enum class RayTracingShaderGroupTypeNV
+  {
+    eGeneral = VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_NV,
+    eTrianglesHitGroup = VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_NV,
+    eProceduralHitGroup = VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_NV
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( RayTracingShaderGroupTypeNV value )
+  {
+    switch ( value )
+    {
+      case RayTracingShaderGroupTypeNV::eGeneral : return "General";
+      case RayTracingShaderGroupTypeNV::eTrianglesHitGroup : return "TrianglesHitGroup";
+      case RayTracingShaderGroupTypeNV::eProceduralHitGroup : return "ProceduralHitGroup";
+      default: return "invalid";
+    }
+  }
+
+  enum class Result
+  {
+    eSuccess = VK_SUCCESS,
+    eNotReady = VK_NOT_READY,
+    eTimeout = VK_TIMEOUT,
+    eEventSet = VK_EVENT_SET,
+    eEventReset = VK_EVENT_RESET,
+    eIncomplete = VK_INCOMPLETE,
+    eErrorOutOfHostMemory = VK_ERROR_OUT_OF_HOST_MEMORY,
+    eErrorOutOfDeviceMemory = VK_ERROR_OUT_OF_DEVICE_MEMORY,
+    eErrorInitializationFailed = VK_ERROR_INITIALIZATION_FAILED,
+    eErrorDeviceLost = VK_ERROR_DEVICE_LOST,
+    eErrorMemoryMapFailed = VK_ERROR_MEMORY_MAP_FAILED,
+    eErrorLayerNotPresent = VK_ERROR_LAYER_NOT_PRESENT,
+    eErrorExtensionNotPresent = VK_ERROR_EXTENSION_NOT_PRESENT,
+    eErrorFeatureNotPresent = VK_ERROR_FEATURE_NOT_PRESENT,
+    eErrorIncompatibleDriver = VK_ERROR_INCOMPATIBLE_DRIVER,
+    eErrorTooManyObjects = VK_ERROR_TOO_MANY_OBJECTS,
+    eErrorFormatNotSupported = VK_ERROR_FORMAT_NOT_SUPPORTED,
+    eErrorFragmentedPool = VK_ERROR_FRAGMENTED_POOL,
+    eErrorOutOfPoolMemory = VK_ERROR_OUT_OF_POOL_MEMORY,
+    eErrorInvalidExternalHandle = VK_ERROR_INVALID_EXTERNAL_HANDLE,
+    eErrorSurfaceLostKHR = VK_ERROR_SURFACE_LOST_KHR,
+    eErrorNativeWindowInUseKHR = VK_ERROR_NATIVE_WINDOW_IN_USE_KHR,
+    eSuboptimalKHR = VK_SUBOPTIMAL_KHR,
+    eErrorOutOfDateKHR = VK_ERROR_OUT_OF_DATE_KHR,
+    eErrorIncompatibleDisplayKHR = VK_ERROR_INCOMPATIBLE_DISPLAY_KHR,
+    eErrorValidationFailedEXT = VK_ERROR_VALIDATION_FAILED_EXT,
+    eErrorInvalidShaderNV = VK_ERROR_INVALID_SHADER_NV,
+    eErrorInvalidDrmFormatModifierPlaneLayoutEXT = VK_ERROR_INVALID_DRM_FORMAT_MODIFIER_PLANE_LAYOUT_EXT,
+    eErrorFragmentationEXT = VK_ERROR_FRAGMENTATION_EXT,
+    eErrorNotPermittedEXT = VK_ERROR_NOT_PERMITTED_EXT,
+    eErrorFullScreenExclusiveModeLostEXT = VK_ERROR_FULL_SCREEN_EXCLUSIVE_MODE_LOST_EXT,
+    eErrorInvalidOpaqueCaptureAddressKHR = VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS_KHR,
+    eErrorOutOfPoolMemoryKHR = VK_ERROR_OUT_OF_POOL_MEMORY_KHR,
+    eErrorInvalidExternalHandleKHR = VK_ERROR_INVALID_EXTERNAL_HANDLE_KHR,
+    eErrorInvalidDeviceAddressEXT = VK_ERROR_INVALID_DEVICE_ADDRESS_EXT
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( Result value )
+  {
+    switch ( value )
+    {
+      case Result::eSuccess : return "Success";
+      case Result::eNotReady : return "NotReady";
+      case Result::eTimeout : return "Timeout";
+      case Result::eEventSet : return "EventSet";
+      case Result::eEventReset : return "EventReset";
+      case Result::eIncomplete : return "Incomplete";
+      case Result::eErrorOutOfHostMemory : return "ErrorOutOfHostMemory";
+      case Result::eErrorOutOfDeviceMemory : return "ErrorOutOfDeviceMemory";
+      case Result::eErrorInitializationFailed : return "ErrorInitializationFailed";
+      case Result::eErrorDeviceLost : return "ErrorDeviceLost";
+      case Result::eErrorMemoryMapFailed : return "ErrorMemoryMapFailed";
+      case Result::eErrorLayerNotPresent : return "ErrorLayerNotPresent";
+      case Result::eErrorExtensionNotPresent : return "ErrorExtensionNotPresent";
+      case Result::eErrorFeatureNotPresent : return "ErrorFeatureNotPresent";
+      case Result::eErrorIncompatibleDriver : return "ErrorIncompatibleDriver";
+      case Result::eErrorTooManyObjects : return "ErrorTooManyObjects";
+      case Result::eErrorFormatNotSupported : return "ErrorFormatNotSupported";
+      case Result::eErrorFragmentedPool : return "ErrorFragmentedPool";
+      case Result::eErrorOutOfPoolMemory : return "ErrorOutOfPoolMemory";
+      case Result::eErrorInvalidExternalHandle : return "ErrorInvalidExternalHandle";
+      case Result::eErrorSurfaceLostKHR : return "ErrorSurfaceLostKHR";
+      case Result::eErrorNativeWindowInUseKHR : return "ErrorNativeWindowInUseKHR";
+      case Result::eSuboptimalKHR : return "SuboptimalKHR";
+      case Result::eErrorOutOfDateKHR : return "ErrorOutOfDateKHR";
+      case Result::eErrorIncompatibleDisplayKHR : return "ErrorIncompatibleDisplayKHR";
+      case Result::eErrorValidationFailedEXT : return "ErrorValidationFailedEXT";
+      case Result::eErrorInvalidShaderNV : return "ErrorInvalidShaderNV";
+      case Result::eErrorInvalidDrmFormatModifierPlaneLayoutEXT : return "ErrorInvalidDrmFormatModifierPlaneLayoutEXT";
+      case Result::eErrorFragmentationEXT : return "ErrorFragmentationEXT";
+      case Result::eErrorNotPermittedEXT : return "ErrorNotPermittedEXT";
+      case Result::eErrorFullScreenExclusiveModeLostEXT : return "ErrorFullScreenExclusiveModeLostEXT";
+      case Result::eErrorInvalidOpaqueCaptureAddressKHR : return "ErrorInvalidOpaqueCaptureAddressKHR";
+      default: return "invalid";
+    }
+  }
+
+  enum class SamplerAddressMode
+  {
+    eRepeat = VK_SAMPLER_ADDRESS_MODE_REPEAT,
+    eMirroredRepeat = VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT,
+    eClampToEdge = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,
+    eClampToBorder = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER,
+    eMirrorClampToEdge = VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE,
+    eMirrorClampToEdgeKHR = VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE_KHR
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( SamplerAddressMode value )
+  {
+    switch ( value )
+    {
+      case SamplerAddressMode::eRepeat : return "Repeat";
+      case SamplerAddressMode::eMirroredRepeat : return "MirroredRepeat";
+      case SamplerAddressMode::eClampToEdge : return "ClampToEdge";
+      case SamplerAddressMode::eClampToBorder : return "ClampToBorder";
+      case SamplerAddressMode::eMirrorClampToEdge : return "MirrorClampToEdge";
+      default: return "invalid";
+    }
+  }
+
+  enum class SamplerMipmapMode
+  {
+    eNearest = VK_SAMPLER_MIPMAP_MODE_NEAREST,
+    eLinear = VK_SAMPLER_MIPMAP_MODE_LINEAR
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( SamplerMipmapMode value )
+  {
+    switch ( value )
+    {
+      case SamplerMipmapMode::eNearest : return "Nearest";
+      case SamplerMipmapMode::eLinear : return "Linear";
+      default: return "invalid";
+    }
+  }
+
+  enum class SamplerReductionModeEXT
+  {
+    eWeightedAverage = VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE_EXT,
+    eMin = VK_SAMPLER_REDUCTION_MODE_MIN_EXT,
+    eMax = VK_SAMPLER_REDUCTION_MODE_MAX_EXT
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( SamplerReductionModeEXT value )
+  {
+    switch ( value )
+    {
+      case SamplerReductionModeEXT::eWeightedAverage : return "WeightedAverage";
+      case SamplerReductionModeEXT::eMin : return "Min";
+      case SamplerReductionModeEXT::eMax : return "Max";
+      default: return "invalid";
+    }
+  }
+
+  enum class SamplerYcbcrModelConversion
+  {
+    eRgbIdentity = VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY,
+    eYcbcrIdentity = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_IDENTITY,
+    eYcbcr709 = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709,
+    eYcbcr601 = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_601,
+    eYcbcr2020 = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020,
+    eRgbIdentityKHR = VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY_KHR,
+    eYcbcrIdentityKHR = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_IDENTITY_KHR,
+    eYcbcr709KHR = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709_KHR,
+    eYcbcr601KHR = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_601_KHR,
+    eYcbcr2020KHR = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020_KHR
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( SamplerYcbcrModelConversion value )
+  {
+    switch ( value )
+    {
+      case SamplerYcbcrModelConversion::eRgbIdentity : return "RgbIdentity";
+      case SamplerYcbcrModelConversion::eYcbcrIdentity : return "YcbcrIdentity";
+      case SamplerYcbcrModelConversion::eYcbcr709 : return "Ycbcr709";
+      case SamplerYcbcrModelConversion::eYcbcr601 : return "Ycbcr601";
+      case SamplerYcbcrModelConversion::eYcbcr2020 : return "Ycbcr2020";
+      default: return "invalid";
+    }
+  }
+
+  enum class SamplerYcbcrRange
+  {
+    eItuFull = VK_SAMPLER_YCBCR_RANGE_ITU_FULL,
+    eItuNarrow = VK_SAMPLER_YCBCR_RANGE_ITU_NARROW,
+    eItuFullKHR = VK_SAMPLER_YCBCR_RANGE_ITU_FULL_KHR,
+    eItuNarrowKHR = VK_SAMPLER_YCBCR_RANGE_ITU_NARROW_KHR
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( SamplerYcbcrRange value )
+  {
+    switch ( value )
+    {
+      case SamplerYcbcrRange::eItuFull : return "ItuFull";
+      case SamplerYcbcrRange::eItuNarrow : return "ItuNarrow";
+      default: return "invalid";
+    }
+  }
+
+  enum class ScopeNV
+  {
+    eDevice = VK_SCOPE_DEVICE_NV,
+    eWorkgroup = VK_SCOPE_WORKGROUP_NV,
+    eSubgroup = VK_SCOPE_SUBGROUP_NV,
+    eQueueFamily = VK_SCOPE_QUEUE_FAMILY_NV
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( ScopeNV value )
+  {
+    switch ( value )
+    {
+      case ScopeNV::eDevice : return "Device";
+      case ScopeNV::eWorkgroup : return "Workgroup";
+      case ScopeNV::eSubgroup : return "Subgroup";
+      case ScopeNV::eQueueFamily : return "QueueFamily";
+      default: return "invalid";
+    }
+  }
+
+  enum class SemaphoreTypeKHR
+  {
+    eBinary = VK_SEMAPHORE_TYPE_BINARY_KHR,
+    eTimeline = VK_SEMAPHORE_TYPE_TIMELINE_KHR
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( SemaphoreTypeKHR value )
+  {
+    switch ( value )
+    {
+      case SemaphoreTypeKHR::eBinary : return "Binary";
+      case SemaphoreTypeKHR::eTimeline : return "Timeline";
+      default: return "invalid";
+    }
+  }
+
+  enum class ShaderFloatControlsIndependenceKHR
+  {
+    e32BitOnly = VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_32_BIT_ONLY_KHR,
+    eAll = VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_ALL_KHR,
+    eNone = VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_NONE_KHR
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( ShaderFloatControlsIndependenceKHR value )
+  {
+    switch ( value )
+    {
+      case ShaderFloatControlsIndependenceKHR::e32BitOnly : return "32BitOnly";
+      case ShaderFloatControlsIndependenceKHR::eAll : return "All";
+      case ShaderFloatControlsIndependenceKHR::eNone : return "None";
+      default: return "invalid";
+    }
+  }
+
+  enum class ShaderInfoTypeAMD
+  {
+    eStatistics = VK_SHADER_INFO_TYPE_STATISTICS_AMD,
+    eBinary = VK_SHADER_INFO_TYPE_BINARY_AMD,
+    eDisassembly = VK_SHADER_INFO_TYPE_DISASSEMBLY_AMD
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( ShaderInfoTypeAMD value )
+  {
+    switch ( value )
+    {
+      case ShaderInfoTypeAMD::eStatistics : return "Statistics";
+      case ShaderInfoTypeAMD::eBinary : return "Binary";
+      case ShaderInfoTypeAMD::eDisassembly : return "Disassembly";
+      default: return "invalid";
+    }
+  }
+
+  enum class ShadingRatePaletteEntryNV
+  {
+    eNoInvocations = VK_SHADING_RATE_PALETTE_ENTRY_NO_INVOCATIONS_NV,
+    e16InvocationsPerPixel = VK_SHADING_RATE_PALETTE_ENTRY_16_INVOCATIONS_PER_PIXEL_NV,
+    e8InvocationsPerPixel = VK_SHADING_RATE_PALETTE_ENTRY_8_INVOCATIONS_PER_PIXEL_NV,
+    e4InvocationsPerPixel = VK_SHADING_RATE_PALETTE_ENTRY_4_INVOCATIONS_PER_PIXEL_NV,
+    e2InvocationsPerPixel = VK_SHADING_RATE_PALETTE_ENTRY_2_INVOCATIONS_PER_PIXEL_NV,
+    e1InvocationPerPixel = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_PIXEL_NV,
+    e1InvocationPer2X1Pixels = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X1_PIXELS_NV,
+    e1InvocationPer1X2Pixels = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_1X2_PIXELS_NV,
+    e1InvocationPer2X2Pixels = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X2_PIXELS_NV,
+    e1InvocationPer4X2Pixels = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_4X2_PIXELS_NV,
+    e1InvocationPer2X4Pixels = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X4_PIXELS_NV,
+    e1InvocationPer4X4Pixels = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_4X4_PIXELS_NV
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( ShadingRatePaletteEntryNV value )
+  {
+    switch ( value )
+    {
+      case ShadingRatePaletteEntryNV::eNoInvocations : return "NoInvocations";
+      case ShadingRatePaletteEntryNV::e16InvocationsPerPixel : return "16InvocationsPerPixel";
+      case ShadingRatePaletteEntryNV::e8InvocationsPerPixel : return "8InvocationsPerPixel";
+      case ShadingRatePaletteEntryNV::e4InvocationsPerPixel : return "4InvocationsPerPixel";
+      case ShadingRatePaletteEntryNV::e2InvocationsPerPixel : return "2InvocationsPerPixel";
+      case ShadingRatePaletteEntryNV::e1InvocationPerPixel : return "1InvocationPerPixel";
+      case ShadingRatePaletteEntryNV::e1InvocationPer2X1Pixels : return "1InvocationPer2X1Pixels";
+      case ShadingRatePaletteEntryNV::e1InvocationPer1X2Pixels : return "1InvocationPer1X2Pixels";
+      case ShadingRatePaletteEntryNV::e1InvocationPer2X2Pixels : return "1InvocationPer2X2Pixels";
+      case ShadingRatePaletteEntryNV::e1InvocationPer4X2Pixels : return "1InvocationPer4X2Pixels";
+      case ShadingRatePaletteEntryNV::e1InvocationPer2X4Pixels : return "1InvocationPer2X4Pixels";
+      case ShadingRatePaletteEntryNV::e1InvocationPer4X4Pixels : return "1InvocationPer4X4Pixels";
+      default: return "invalid";
+    }
+  }
+
+  enum class SharingMode
+  {
+    eExclusive = VK_SHARING_MODE_EXCLUSIVE,
+    eConcurrent = VK_SHARING_MODE_CONCURRENT
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( SharingMode value )
+  {
+    switch ( value )
+    {
+      case SharingMode::eExclusive : return "Exclusive";
+      case SharingMode::eConcurrent : return "Concurrent";
+      default: return "invalid";
+    }
+  }
+
+  enum class StencilOp
+  {
+    eKeep = VK_STENCIL_OP_KEEP,
+    eZero = VK_STENCIL_OP_ZERO,
+    eReplace = VK_STENCIL_OP_REPLACE,
+    eIncrementAndClamp = VK_STENCIL_OP_INCREMENT_AND_CLAMP,
+    eDecrementAndClamp = VK_STENCIL_OP_DECREMENT_AND_CLAMP,
+    eInvert = VK_STENCIL_OP_INVERT,
+    eIncrementAndWrap = VK_STENCIL_OP_INCREMENT_AND_WRAP,
+    eDecrementAndWrap = VK_STENCIL_OP_DECREMENT_AND_WRAP
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( StencilOp value )
+  {
+    switch ( value )
+    {
+      case StencilOp::eKeep : return "Keep";
+      case StencilOp::eZero : return "Zero";
+      case StencilOp::eReplace : return "Replace";
+      case StencilOp::eIncrementAndClamp : return "IncrementAndClamp";
+      case StencilOp::eDecrementAndClamp : return "DecrementAndClamp";
+      case StencilOp::eInvert : return "Invert";
+      case StencilOp::eIncrementAndWrap : return "IncrementAndWrap";
+      case StencilOp::eDecrementAndWrap : return "DecrementAndWrap";
+      default: return "invalid";
+    }
+  }
+
+  enum class StructureType
+  {
+    eApplicationInfo = VK_STRUCTURE_TYPE_APPLICATION_INFO,
+    eInstanceCreateInfo = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO,
+    eDeviceQueueCreateInfo = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO,
+    eDeviceCreateInfo = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO,
+    eSubmitInfo = VK_STRUCTURE_TYPE_SUBMIT_INFO,
+    eMemoryAllocateInfo = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,
+    eMappedMemoryRange = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE,
+    eBindSparseInfo = VK_STRUCTURE_TYPE_BIND_SPARSE_INFO,
+    eFenceCreateInfo = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO,
+    eSemaphoreCreateInfo = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO,
+    eEventCreateInfo = VK_STRUCTURE_TYPE_EVENT_CREATE_INFO,
+    eQueryPoolCreateInfo = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO,
+    eBufferCreateInfo = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,
+    eBufferViewCreateInfo = VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO,
+    eImageCreateInfo = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
+    eImageViewCreateInfo = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
+    eShaderModuleCreateInfo = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO,
+    ePipelineCacheCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO,
+    ePipelineShaderStageCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,
+    ePipelineVertexInputStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO,
+    ePipelineInputAssemblyStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO,
+    ePipelineTessellationStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO,
+    ePipelineViewportStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO,
+    ePipelineRasterizationStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO,
+    ePipelineMultisampleStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO,
+    ePipelineDepthStencilStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO,
+    ePipelineColorBlendStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO,
+    ePipelineDynamicStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO,
+    eGraphicsPipelineCreateInfo = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO,
+    eComputePipelineCreateInfo = VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO,
+    ePipelineLayoutCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO,
+    eSamplerCreateInfo = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO,
+    eDescriptorSetLayoutCreateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO,
+    eDescriptorPoolCreateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO,
+    eDescriptorSetAllocateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO,
+    eWriteDescriptorSet = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
+    eCopyDescriptorSet = VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET,
+    eFramebufferCreateInfo = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO,
+    eRenderPassCreateInfo = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO,
+    eCommandPoolCreateInfo = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,
+    eCommandBufferAllocateInfo = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,
+    eCommandBufferInheritanceInfo = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO,
+    eCommandBufferBeginInfo = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
+    eRenderPassBeginInfo = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO,
+    eBufferMemoryBarrier = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,
+    eImageMemoryBarrier = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
+    eMemoryBarrier = VK_STRUCTURE_TYPE_MEMORY_BARRIER,
+    eLoaderInstanceCreateInfo = VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO,
+    eLoaderDeviceCreateInfo = VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO,
+    ePhysicalDeviceSubgroupProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_PROPERTIES,
+    eBindBufferMemoryInfo = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO,
+    eBindImageMemoryInfo = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO,
+    ePhysicalDevice16BitStorageFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES,
+    eMemoryDedicatedRequirements = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS,
+    eMemoryDedicatedAllocateInfo = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO,
+    eMemoryAllocateFlagsInfo = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO,
+    eDeviceGroupRenderPassBeginInfo = VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO,
+    eDeviceGroupCommandBufferBeginInfo = VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO,
+    eDeviceGroupSubmitInfo = VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO,
+    eDeviceGroupBindSparseInfo = VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO,
+    eBindBufferMemoryDeviceGroupInfo = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO,
+    eBindImageMemoryDeviceGroupInfo = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO,
+    ePhysicalDeviceGroupProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES,
+    eDeviceGroupDeviceCreateInfo = VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO,
+    eBufferMemoryRequirementsInfo2 = VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2,
+    eImageMemoryRequirementsInfo2 = VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2,
+    eImageSparseMemoryRequirementsInfo2 = VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2,
+    eMemoryRequirements2 = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2,
+    eSparseImageMemoryRequirements2 = VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2,
+    ePhysicalDeviceFeatures2 = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2,
+    ePhysicalDeviceProperties2 = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2,
+    eFormatProperties2 = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2,
+    eImageFormatProperties2 = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2,
+    ePhysicalDeviceImageFormatInfo2 = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2,
+    eQueueFamilyProperties2 = VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2,
+    ePhysicalDeviceMemoryProperties2 = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2,
+    eSparseImageFormatProperties2 = VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2,
+    ePhysicalDeviceSparseImageFormatInfo2 = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2,
+    ePhysicalDevicePointClippingProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES,
+    eRenderPassInputAttachmentAspectCreateInfo = VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO,
+    eImageViewUsageCreateInfo = VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO,
+    ePipelineTessellationDomainOriginStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO,
+    eRenderPassMultiviewCreateInfo = VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO,
+    ePhysicalDeviceMultiviewFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES,
+    ePhysicalDeviceMultiviewProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES,
+    ePhysicalDeviceVariablePointersFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES,
+    eProtectedSubmitInfo = VK_STRUCTURE_TYPE_PROTECTED_SUBMIT_INFO,
+    ePhysicalDeviceProtectedMemoryFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_FEATURES,
+    ePhysicalDeviceProtectedMemoryProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_PROPERTIES,
+    eDeviceQueueInfo2 = VK_STRUCTURE_TYPE_DEVICE_QUEUE_INFO_2,
+    eSamplerYcbcrConversionCreateInfo = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO,
+    eSamplerYcbcrConversionInfo = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO,
+    eBindImagePlaneMemoryInfo = VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO,
+    eImagePlaneMemoryRequirementsInfo = VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO,
+    ePhysicalDeviceSamplerYcbcrConversionFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES,
+    eSamplerYcbcrConversionImageFormatProperties = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES,
+    eDescriptorUpdateTemplateCreateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO,
+    ePhysicalDeviceExternalImageFormatInfo = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO,
+    eExternalImageFormatProperties = VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES,
+    ePhysicalDeviceExternalBufferInfo = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO,
+    eExternalBufferProperties = VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES,
+    ePhysicalDeviceIdProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES,
+    eExternalMemoryBufferCreateInfo = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO,
+    eExternalMemoryImageCreateInfo = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO,
+    eExportMemoryAllocateInfo = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO,
+    ePhysicalDeviceExternalFenceInfo = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO,
+    eExternalFenceProperties = VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES,
+    eExportFenceCreateInfo = VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO,
+    eExportSemaphoreCreateInfo = VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO,
+    ePhysicalDeviceExternalSemaphoreInfo = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO,
+    eExternalSemaphoreProperties = VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES,
+    ePhysicalDeviceMaintenance3Properties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES,
+    eDescriptorSetLayoutSupport = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT,
+    ePhysicalDeviceShaderDrawParametersFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES,
+    eSwapchainCreateInfoKHR = VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR,
+    ePresentInfoKHR = VK_STRUCTURE_TYPE_PRESENT_INFO_KHR,
+    eDeviceGroupPresentCapabilitiesKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_CAPABILITIES_KHR,
+    eImageSwapchainCreateInfoKHR = VK_STRUCTURE_TYPE_IMAGE_SWAPCHAIN_CREATE_INFO_KHR,
+    eBindImageMemorySwapchainInfoKHR = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_SWAPCHAIN_INFO_KHR,
+    eAcquireNextImageInfoKHR = VK_STRUCTURE_TYPE_ACQUIRE_NEXT_IMAGE_INFO_KHR,
+    eDeviceGroupPresentInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_INFO_KHR,
+    eDeviceGroupSwapchainCreateInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_SWAPCHAIN_CREATE_INFO_KHR,
+    eDisplayModeCreateInfoKHR = VK_STRUCTURE_TYPE_DISPLAY_MODE_CREATE_INFO_KHR,
+    eDisplaySurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_DISPLAY_SURFACE_CREATE_INFO_KHR,
+    eDisplayPresentInfoKHR = VK_STRUCTURE_TYPE_DISPLAY_PRESENT_INFO_KHR,
+    eXlibSurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_XLIB_SURFACE_CREATE_INFO_KHR,
+    eXcbSurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_XCB_SURFACE_CREATE_INFO_KHR,
+    eWaylandSurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_WAYLAND_SURFACE_CREATE_INFO_KHR,
+    eAndroidSurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_ANDROID_SURFACE_CREATE_INFO_KHR,
+    eWin32SurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR,
+    eDebugReportCallbackCreateInfoEXT = VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT,
+    ePipelineRasterizationStateRasterizationOrderAMD = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_RASTERIZATION_ORDER_AMD,
+    eDebugMarkerObjectNameInfoEXT = VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_NAME_INFO_EXT,
+    eDebugMarkerObjectTagInfoEXT = VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_TAG_INFO_EXT,
+    eDebugMarkerMarkerInfoEXT = VK_STRUCTURE_TYPE_DEBUG_MARKER_MARKER_INFO_EXT,
+    eDedicatedAllocationImageCreateInfoNV = VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_IMAGE_CREATE_INFO_NV,
+    eDedicatedAllocationBufferCreateInfoNV = VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_BUFFER_CREATE_INFO_NV,
+    eDedicatedAllocationMemoryAllocateInfoNV = VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_MEMORY_ALLOCATE_INFO_NV,
+    ePhysicalDeviceTransformFeedbackFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_FEATURES_EXT,
+    ePhysicalDeviceTransformFeedbackPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_PROPERTIES_EXT,
+    ePipelineRasterizationStateStreamCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_STREAM_CREATE_INFO_EXT,
+    eImageViewHandleInfoNVX = VK_STRUCTURE_TYPE_IMAGE_VIEW_HANDLE_INFO_NVX,
+    eTextureLodGatherFormatPropertiesAMD = VK_STRUCTURE_TYPE_TEXTURE_LOD_GATHER_FORMAT_PROPERTIES_AMD,
+    eStreamDescriptorSurfaceCreateInfoGGP = VK_STRUCTURE_TYPE_STREAM_DESCRIPTOR_SURFACE_CREATE_INFO_GGP,
+    ePhysicalDeviceCornerSampledImageFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CORNER_SAMPLED_IMAGE_FEATURES_NV,
+    eExternalMemoryImageCreateInfoNV = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_NV,
+    eExportMemoryAllocateInfoNV = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_NV,
+    eImportMemoryWin32HandleInfoNV = VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_NV,
+    eExportMemoryWin32HandleInfoNV = VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_NV,
+    eWin32KeyedMutexAcquireReleaseInfoNV = VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_NV,
+    eValidationFlagsEXT = VK_STRUCTURE_TYPE_VALIDATION_FLAGS_EXT,
+    eViSurfaceCreateInfoNN = VK_STRUCTURE_TYPE_VI_SURFACE_CREATE_INFO_NN,
+    ePhysicalDeviceTextureCompressionAstcHdrFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXTURE_COMPRESSION_ASTC_HDR_FEATURES_EXT,
+    eImageViewAstcDecodeModeEXT = VK_STRUCTURE_TYPE_IMAGE_VIEW_ASTC_DECODE_MODE_EXT,
+    ePhysicalDeviceAstcDecodeFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ASTC_DECODE_FEATURES_EXT,
+    eImportMemoryWin32HandleInfoKHR = VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_KHR,
+    eExportMemoryWin32HandleInfoKHR = VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_KHR,
+    eMemoryWin32HandlePropertiesKHR = VK_STRUCTURE_TYPE_MEMORY_WIN32_HANDLE_PROPERTIES_KHR,
+    eMemoryGetWin32HandleInfoKHR = VK_STRUCTURE_TYPE_MEMORY_GET_WIN32_HANDLE_INFO_KHR,
+    eImportMemoryFdInfoKHR = VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR,
+    eMemoryFdPropertiesKHR = VK_STRUCTURE_TYPE_MEMORY_FD_PROPERTIES_KHR,
+    eMemoryGetFdInfoKHR = VK_STRUCTURE_TYPE_MEMORY_GET_FD_INFO_KHR,
+    eWin32KeyedMutexAcquireReleaseInfoKHR = VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_KHR,
+    eImportSemaphoreWin32HandleInfoKHR = VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR,
+    eExportSemaphoreWin32HandleInfoKHR = VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR,
+    eD3D12FenceSubmitInfoKHR = VK_STRUCTURE_TYPE_D3D12_FENCE_SUBMIT_INFO_KHR,
+    eSemaphoreGetWin32HandleInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_GET_WIN32_HANDLE_INFO_KHR,
+    eImportSemaphoreFdInfoKHR = VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_FD_INFO_KHR,
+    eSemaphoreGetFdInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_GET_FD_INFO_KHR,
+    ePhysicalDevicePushDescriptorPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR,
+    eCommandBufferInheritanceConditionalRenderingInfoEXT = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_CONDITIONAL_RENDERING_INFO_EXT,
+    ePhysicalDeviceConditionalRenderingFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONDITIONAL_RENDERING_FEATURES_EXT,
+    eConditionalRenderingBeginInfoEXT = VK_STRUCTURE_TYPE_CONDITIONAL_RENDERING_BEGIN_INFO_EXT,
+    ePhysicalDeviceShaderFloat16Int8FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES_KHR,
+    ePresentRegionsKHR = VK_STRUCTURE_TYPE_PRESENT_REGIONS_KHR,
+    eObjectTableCreateInfoNVX = VK_STRUCTURE_TYPE_OBJECT_TABLE_CREATE_INFO_NVX,
+    eIndirectCommandsLayoutCreateInfoNVX = VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_NVX,
+    eCmdProcessCommandsInfoNVX = VK_STRUCTURE_TYPE_CMD_PROCESS_COMMANDS_INFO_NVX,
+    eCmdReserveSpaceForCommandsInfoNVX = VK_STRUCTURE_TYPE_CMD_RESERVE_SPACE_FOR_COMMANDS_INFO_NVX,
+    eDeviceGeneratedCommandsLimitsNVX = VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_LIMITS_NVX,
+    eDeviceGeneratedCommandsFeaturesNVX = VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_FEATURES_NVX,
+    ePipelineViewportWScalingStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_W_SCALING_STATE_CREATE_INFO_NV,
+    eSurfaceCapabilities2EXT = VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_EXT,
+    eDisplayPowerInfoEXT = VK_STRUCTURE_TYPE_DISPLAY_POWER_INFO_EXT,
+    eDeviceEventInfoEXT = VK_STRUCTURE_TYPE_DEVICE_EVENT_INFO_EXT,
+    eDisplayEventInfoEXT = VK_STRUCTURE_TYPE_DISPLAY_EVENT_INFO_EXT,
+    eSwapchainCounterCreateInfoEXT = VK_STRUCTURE_TYPE_SWAPCHAIN_COUNTER_CREATE_INFO_EXT,
+    ePresentTimesInfoGOOGLE = VK_STRUCTURE_TYPE_PRESENT_TIMES_INFO_GOOGLE,
+    ePhysicalDeviceMultiviewPerViewAttributesPropertiesNVX = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_ATTRIBUTES_PROPERTIES_NVX,
+    ePipelineViewportSwizzleStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SWIZZLE_STATE_CREATE_INFO_NV,
+    ePhysicalDeviceDiscardRectanglePropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DISCARD_RECTANGLE_PROPERTIES_EXT,
+    ePipelineDiscardRectangleStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_DISCARD_RECTANGLE_STATE_CREATE_INFO_EXT,
+    ePhysicalDeviceConservativeRasterizationPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONSERVATIVE_RASTERIZATION_PROPERTIES_EXT,
+    ePipelineRasterizationConservativeStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_CONSERVATIVE_STATE_CREATE_INFO_EXT,
+    ePhysicalDeviceDepthClipEnableFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLIP_ENABLE_FEATURES_EXT,
+    ePipelineRasterizationDepthClipStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_DEPTH_CLIP_STATE_CREATE_INFO_EXT,
+    eHdrMetadataEXT = VK_STRUCTURE_TYPE_HDR_METADATA_EXT,
+    ePhysicalDeviceImagelessFramebufferFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES_KHR,
+    eFramebufferAttachmentsCreateInfoKHR = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO_KHR,
+    eFramebufferAttachmentImageInfoKHR = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO_KHR,
+    eRenderPassAttachmentBeginInfoKHR = VK_STRUCTURE_TYPE_RENDER_PASS_ATTACHMENT_BEGIN_INFO_KHR,
+    eAttachmentDescription2KHR = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_2_KHR,
+    eAttachmentReference2KHR = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2_KHR,
+    eSubpassDescription2KHR = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_2_KHR,
+    eSubpassDependency2KHR = VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2_KHR,
+    eRenderPassCreateInfo2KHR = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO_2_KHR,
+    eSubpassBeginInfoKHR = VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO_KHR,
+    eSubpassEndInfoKHR = VK_STRUCTURE_TYPE_SUBPASS_END_INFO_KHR,
+    eSharedPresentSurfaceCapabilitiesKHR = VK_STRUCTURE_TYPE_SHARED_PRESENT_SURFACE_CAPABILITIES_KHR,
+    eImportFenceWin32HandleInfoKHR = VK_STRUCTURE_TYPE_IMPORT_FENCE_WIN32_HANDLE_INFO_KHR,
+    eExportFenceWin32HandleInfoKHR = VK_STRUCTURE_TYPE_EXPORT_FENCE_WIN32_HANDLE_INFO_KHR,
+    eFenceGetWin32HandleInfoKHR = VK_STRUCTURE_TYPE_FENCE_GET_WIN32_HANDLE_INFO_KHR,
+    eImportFenceFdInfoKHR = VK_STRUCTURE_TYPE_IMPORT_FENCE_FD_INFO_KHR,
+    eFenceGetFdInfoKHR = VK_STRUCTURE_TYPE_FENCE_GET_FD_INFO_KHR,
+    ePhysicalDevicePerformanceQueryFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PERFORMANCE_QUERY_FEATURES_KHR,
+    ePhysicalDevicePerformanceQueryPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PERFORMANCE_QUERY_PROPERTIES_KHR,
+    eQueryPoolPerformanceCreateInfoKHR = VK_STRUCTURE_TYPE_QUERY_POOL_PERFORMANCE_CREATE_INFO_KHR,
+    ePerformanceQuerySubmitInfoKHR = VK_STRUCTURE_TYPE_PERFORMANCE_QUERY_SUBMIT_INFO_KHR,
+    eAcquireProfilingLockInfoKHR = VK_STRUCTURE_TYPE_ACQUIRE_PROFILING_LOCK_INFO_KHR,
+    ePerformanceCounterKHR = VK_STRUCTURE_TYPE_PERFORMANCE_COUNTER_KHR,
+    ePerformanceCounterDescriptionKHR = VK_STRUCTURE_TYPE_PERFORMANCE_COUNTER_DESCRIPTION_KHR,
+    ePhysicalDeviceSurfaceInfo2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SURFACE_INFO_2_KHR,
+    eSurfaceCapabilities2KHR = VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_KHR,
+    eSurfaceFormat2KHR = VK_STRUCTURE_TYPE_SURFACE_FORMAT_2_KHR,
+    eDisplayProperties2KHR = VK_STRUCTURE_TYPE_DISPLAY_PROPERTIES_2_KHR,
+    eDisplayPlaneProperties2KHR = VK_STRUCTURE_TYPE_DISPLAY_PLANE_PROPERTIES_2_KHR,
+    eDisplayModeProperties2KHR = VK_STRUCTURE_TYPE_DISPLAY_MODE_PROPERTIES_2_KHR,
+    eDisplayPlaneInfo2KHR = VK_STRUCTURE_TYPE_DISPLAY_PLANE_INFO_2_KHR,
+    eDisplayPlaneCapabilities2KHR = VK_STRUCTURE_TYPE_DISPLAY_PLANE_CAPABILITIES_2_KHR,
+    eIosSurfaceCreateInfoMVK = VK_STRUCTURE_TYPE_IOS_SURFACE_CREATE_INFO_MVK,
+    eMacosSurfaceCreateInfoMVK = VK_STRUCTURE_TYPE_MACOS_SURFACE_CREATE_INFO_MVK,
+    eDebugUtilsObjectNameInfoEXT = VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT,
+    eDebugUtilsObjectTagInfoEXT = VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_TAG_INFO_EXT,
+    eDebugUtilsLabelEXT = VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT,
+    eDebugUtilsMessengerCallbackDataEXT = VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CALLBACK_DATA_EXT,
+    eDebugUtilsMessengerCreateInfoEXT = VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT,
+    eAndroidHardwareBufferUsageANDROID = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_USAGE_ANDROID,
+    eAndroidHardwareBufferPropertiesANDROID = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID,
+    eAndroidHardwareBufferFormatPropertiesANDROID = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID,
+    eImportAndroidHardwareBufferInfoANDROID = VK_STRUCTURE_TYPE_IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID,
+    eMemoryGetAndroidHardwareBufferInfoANDROID = VK_STRUCTURE_TYPE_MEMORY_GET_ANDROID_HARDWARE_BUFFER_INFO_ANDROID,
+    eExternalFormatANDROID = VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID,
+    ePhysicalDeviceSamplerFilterMinmaxPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES_EXT,
+    eSamplerReductionModeCreateInfoEXT = VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO_EXT,
+    ePhysicalDeviceInlineUniformBlockFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES_EXT,
+    ePhysicalDeviceInlineUniformBlockPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_PROPERTIES_EXT,
+    eWriteDescriptorSetInlineUniformBlockEXT = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK_EXT,
+    eDescriptorPoolInlineUniformBlockCreateInfoEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO_EXT,
+    eSampleLocationsInfoEXT = VK_STRUCTURE_TYPE_SAMPLE_LOCATIONS_INFO_EXT,
+    eRenderPassSampleLocationsBeginInfoEXT = VK_STRUCTURE_TYPE_RENDER_PASS_SAMPLE_LOCATIONS_BEGIN_INFO_EXT,
+    ePipelineSampleLocationsStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_SAMPLE_LOCATIONS_STATE_CREATE_INFO_EXT,
+    ePhysicalDeviceSampleLocationsPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLE_LOCATIONS_PROPERTIES_EXT,
+    eMultisamplePropertiesEXT = VK_STRUCTURE_TYPE_MULTISAMPLE_PROPERTIES_EXT,
+    eImageFormatListCreateInfoKHR = VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO_KHR,
+    ePhysicalDeviceBlendOperationAdvancedFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_FEATURES_EXT,
+    ePhysicalDeviceBlendOperationAdvancedPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_PROPERTIES_EXT,
+    ePipelineColorBlendAdvancedStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_ADVANCED_STATE_CREATE_INFO_EXT,
+    ePipelineCoverageToColorStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_TO_COLOR_STATE_CREATE_INFO_NV,
+    ePipelineCoverageModulationStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_MODULATION_STATE_CREATE_INFO_NV,
+    ePhysicalDeviceShaderSmBuiltinsFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_FEATURES_NV,
+    ePhysicalDeviceShaderSmBuiltinsPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_PROPERTIES_NV,
+    eDrmFormatModifierPropertiesListEXT = VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT,
+    eDrmFormatModifierPropertiesEXT = VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT,
+    ePhysicalDeviceImageDrmFormatModifierInfoEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_DRM_FORMAT_MODIFIER_INFO_EXT,
+    eImageDrmFormatModifierListCreateInfoEXT = VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_LIST_CREATE_INFO_EXT,
+    eImageDrmFormatModifierExplicitCreateInfoEXT = VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_EXPLICIT_CREATE_INFO_EXT,
+    eImageDrmFormatModifierPropertiesEXT = VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT,
+    eValidationCacheCreateInfoEXT = VK_STRUCTURE_TYPE_VALIDATION_CACHE_CREATE_INFO_EXT,
+    eShaderModuleValidationCacheCreateInfoEXT = VK_STRUCTURE_TYPE_SHADER_MODULE_VALIDATION_CACHE_CREATE_INFO_EXT,
+    eDescriptorSetLayoutBindingFlagsCreateInfoEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT,
+    ePhysicalDeviceDescriptorIndexingFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES_EXT,
+    ePhysicalDeviceDescriptorIndexingPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES_EXT,
+    eDescriptorSetVariableDescriptorCountAllocateInfoEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO_EXT,
+    eDescriptorSetVariableDescriptorCountLayoutSupportEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT_EXT,
+    ePipelineViewportShadingRateImageStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SHADING_RATE_IMAGE_STATE_CREATE_INFO_NV,
+    ePhysicalDeviceShadingRateImageFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_FEATURES_NV,
+    ePhysicalDeviceShadingRateImagePropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_PROPERTIES_NV,
+    ePipelineViewportCoarseSampleOrderStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_COARSE_SAMPLE_ORDER_STATE_CREATE_INFO_NV,
+    eRayTracingPipelineCreateInfoNV = VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_NV,
+    eAccelerationStructureCreateInfoNV = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_NV,
+    eGeometryNV = VK_STRUCTURE_TYPE_GEOMETRY_NV,
+    eGeometryTrianglesNV = VK_STRUCTURE_TYPE_GEOMETRY_TRIANGLES_NV,
+    eGeometryAabbNV = VK_STRUCTURE_TYPE_GEOMETRY_AABB_NV,
+    eBindAccelerationStructureMemoryInfoNV = VK_STRUCTURE_TYPE_BIND_ACCELERATION_STRUCTURE_MEMORY_INFO_NV,
+    eWriteDescriptorSetAccelerationStructureNV = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_NV,
+    eAccelerationStructureMemoryRequirementsInfoNV = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV,
+    ePhysicalDeviceRayTracingPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PROPERTIES_NV,
+    eRayTracingShaderGroupCreateInfoNV = VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV,
+    eAccelerationStructureInfoNV = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_INFO_NV,
+    ePhysicalDeviceRepresentativeFragmentTestFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_REPRESENTATIVE_FRAGMENT_TEST_FEATURES_NV,
+    ePipelineRepresentativeFragmentTestStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_REPRESENTATIVE_FRAGMENT_TEST_STATE_CREATE_INFO_NV,
+    ePhysicalDeviceImageViewImageFormatInfoEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_VIEW_IMAGE_FORMAT_INFO_EXT,
+    eFilterCubicImageViewImageFormatPropertiesEXT = VK_STRUCTURE_TYPE_FILTER_CUBIC_IMAGE_VIEW_IMAGE_FORMAT_PROPERTIES_EXT,
+    eDeviceQueueGlobalPriorityCreateInfoEXT = VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_EXT,
+    ePhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES_KHR,
+    ePhysicalDevice8BitStorageFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES_KHR,
+    eImportMemoryHostPointerInfoEXT = VK_STRUCTURE_TYPE_IMPORT_MEMORY_HOST_POINTER_INFO_EXT,
+    eMemoryHostPointerPropertiesEXT = VK_STRUCTURE_TYPE_MEMORY_HOST_POINTER_PROPERTIES_EXT,
+    ePhysicalDeviceExternalMemoryHostPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_HOST_PROPERTIES_EXT,
+    ePhysicalDeviceShaderAtomicInt64FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES_KHR,
+    ePhysicalDeviceShaderClockFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CLOCK_FEATURES_KHR,
+    ePipelineCompilerControlCreateInfoAMD = VK_STRUCTURE_TYPE_PIPELINE_COMPILER_CONTROL_CREATE_INFO_AMD,
+    eCalibratedTimestampInfoEXT = VK_STRUCTURE_TYPE_CALIBRATED_TIMESTAMP_INFO_EXT,
+    ePhysicalDeviceShaderCorePropertiesAMD = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_AMD,
+    eDeviceMemoryOverallocationCreateInfoAMD = VK_STRUCTURE_TYPE_DEVICE_MEMORY_OVERALLOCATION_CREATE_INFO_AMD,
+    ePhysicalDeviceVertexAttributeDivisorPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_EXT,
+    ePipelineVertexInputDivisorStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_EXT,
+    ePhysicalDeviceVertexAttributeDivisorFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_EXT,
+    ePresentFrameTokenGGP = VK_STRUCTURE_TYPE_PRESENT_FRAME_TOKEN_GGP,
+    ePipelineCreationFeedbackCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_CREATION_FEEDBACK_CREATE_INFO_EXT,
+    ePhysicalDeviceDriverPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES_KHR,
+    ePhysicalDeviceFloatControlsPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES_KHR,
+    ePhysicalDeviceDepthStencilResolvePropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES_KHR,
+    eSubpassDescriptionDepthStencilResolveKHR = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE_KHR,
+    ePhysicalDeviceComputeShaderDerivativesFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_FEATURES_NV,
+    ePhysicalDeviceMeshShaderFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_FEATURES_NV,
+    ePhysicalDeviceMeshShaderPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_PROPERTIES_NV,
+    ePhysicalDeviceFragmentShaderBarycentricFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_FEATURES_NV,
+    ePhysicalDeviceShaderImageFootprintFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_IMAGE_FOOTPRINT_FEATURES_NV,
+    ePipelineViewportExclusiveScissorStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_EXCLUSIVE_SCISSOR_STATE_CREATE_INFO_NV,
+    ePhysicalDeviceExclusiveScissorFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXCLUSIVE_SCISSOR_FEATURES_NV,
+    eCheckpointDataNV = VK_STRUCTURE_TYPE_CHECKPOINT_DATA_NV,
+    eQueueFamilyCheckpointPropertiesNV = VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_NV,
+    ePhysicalDeviceTimelineSemaphoreFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES_KHR,
+    ePhysicalDeviceTimelineSemaphorePropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES_KHR,
+    eSemaphoreTypeCreateInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_TYPE_CREATE_INFO_KHR,
+    eTimelineSemaphoreSubmitInfoKHR = VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO_KHR,
+    eSemaphoreWaitInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO_KHR,
+    eSemaphoreSignalInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_SIGNAL_INFO_KHR,
+    ePhysicalDeviceShaderIntegerFunctions2FeaturesINTEL = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_FUNCTIONS_2_FEATURES_INTEL,
+    eQueryPoolCreateInfoINTEL = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO_INTEL,
+    eInitializePerformanceApiInfoINTEL = VK_STRUCTURE_TYPE_INITIALIZE_PERFORMANCE_API_INFO_INTEL,
+    ePerformanceMarkerInfoINTEL = VK_STRUCTURE_TYPE_PERFORMANCE_MARKER_INFO_INTEL,
+    ePerformanceStreamMarkerInfoINTEL = VK_STRUCTURE_TYPE_PERFORMANCE_STREAM_MARKER_INFO_INTEL,
+    ePerformanceOverrideInfoINTEL = VK_STRUCTURE_TYPE_PERFORMANCE_OVERRIDE_INFO_INTEL,
+    ePerformanceConfigurationAcquireInfoINTEL = VK_STRUCTURE_TYPE_PERFORMANCE_CONFIGURATION_ACQUIRE_INFO_INTEL,
+    ePhysicalDeviceVulkanMemoryModelFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES_KHR,
+    ePhysicalDevicePciBusInfoPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PCI_BUS_INFO_PROPERTIES_EXT,
+    eDisplayNativeHdrSurfaceCapabilitiesAMD = VK_STRUCTURE_TYPE_DISPLAY_NATIVE_HDR_SURFACE_CAPABILITIES_AMD,
+    eSwapchainDisplayNativeHdrCreateInfoAMD = VK_STRUCTURE_TYPE_SWAPCHAIN_DISPLAY_NATIVE_HDR_CREATE_INFO_AMD,
+    eImagepipeSurfaceCreateInfoFUCHSIA = VK_STRUCTURE_TYPE_IMAGEPIPE_SURFACE_CREATE_INFO_FUCHSIA,
+    eMetalSurfaceCreateInfoEXT = VK_STRUCTURE_TYPE_METAL_SURFACE_CREATE_INFO_EXT,
+    ePhysicalDeviceFragmentDensityMapFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_FEATURES_EXT,
+    ePhysicalDeviceFragmentDensityMapPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_PROPERTIES_EXT,
+    eRenderPassFragmentDensityMapCreateInfoEXT = VK_STRUCTURE_TYPE_RENDER_PASS_FRAGMENT_DENSITY_MAP_CREATE_INFO_EXT,
+    ePhysicalDeviceScalarBlockLayoutFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES_EXT,
+    ePhysicalDeviceSubgroupSizeControlPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES_EXT,
+    ePipelineShaderStageRequiredSubgroupSizeCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO_EXT,
+    ePhysicalDeviceSubgroupSizeControlFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES_EXT,
+    ePhysicalDeviceShaderCoreProperties2AMD = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_2_AMD,
+    ePhysicalDeviceCoherentMemoryFeaturesAMD = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COHERENT_MEMORY_FEATURES_AMD,
+    ePhysicalDeviceMemoryBudgetPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT,
+    ePhysicalDeviceMemoryPriorityFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PRIORITY_FEATURES_EXT,
+    eMemoryPriorityAllocateInfoEXT = VK_STRUCTURE_TYPE_MEMORY_PRIORITY_ALLOCATE_INFO_EXT,
+    eSurfaceProtectedCapabilitiesKHR = VK_STRUCTURE_TYPE_SURFACE_PROTECTED_CAPABILITIES_KHR,
+    ePhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEDICATED_ALLOCATION_IMAGE_ALIASING_FEATURES_NV,
+    ePhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES_KHR,
+    eAttachmentReferenceStencilLayoutKHR = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_STENCIL_LAYOUT_KHR,
+    eAttachmentDescriptionStencilLayoutKHR = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT_KHR,
+    ePhysicalDeviceBufferDeviceAddressFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_EXT,
+    eBufferDeviceAddressCreateInfoEXT = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_CREATE_INFO_EXT,
+    ePhysicalDeviceToolPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TOOL_PROPERTIES_EXT,
+    eImageStencilUsageCreateInfoEXT = VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO_EXT,
+    eValidationFeaturesEXT = VK_STRUCTURE_TYPE_VALIDATION_FEATURES_EXT,
+    ePhysicalDeviceCooperativeMatrixFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_FEATURES_NV,
+    eCooperativeMatrixPropertiesNV = VK_STRUCTURE_TYPE_COOPERATIVE_MATRIX_PROPERTIES_NV,
+    ePhysicalDeviceCooperativeMatrixPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_PROPERTIES_NV,
+    ePhysicalDeviceCoverageReductionModeFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COVERAGE_REDUCTION_MODE_FEATURES_NV,
+    ePipelineCoverageReductionStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_REDUCTION_STATE_CREATE_INFO_NV,
+    eFramebufferMixedSamplesCombinationNV = VK_STRUCTURE_TYPE_FRAMEBUFFER_MIXED_SAMPLES_COMBINATION_NV,
+    ePhysicalDeviceFragmentShaderInterlockFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_INTERLOCK_FEATURES_EXT,
+    ePhysicalDeviceYcbcrImageArraysFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_YCBCR_IMAGE_ARRAYS_FEATURES_EXT,
+    ePhysicalDeviceUniformBufferStandardLayoutFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES_KHR,
+    eSurfaceFullScreenExclusiveInfoEXT = VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_INFO_EXT,
+    eSurfaceCapabilitiesFullScreenExclusiveEXT = VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_FULL_SCREEN_EXCLUSIVE_EXT,
+    eSurfaceFullScreenExclusiveWin32InfoEXT = VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_WIN32_INFO_EXT,
+    eHeadlessSurfaceCreateInfoEXT = VK_STRUCTURE_TYPE_HEADLESS_SURFACE_CREATE_INFO_EXT,
+    ePhysicalDeviceBufferDeviceAddressFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_KHR,
+    eBufferDeviceAddressInfoKHR = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO_KHR,
+    eBufferOpaqueCaptureAddressCreateInfoKHR = VK_STRUCTURE_TYPE_BUFFER_OPAQUE_CAPTURE_ADDRESS_CREATE_INFO_KHR,
+    eMemoryOpaqueCaptureAddressAllocateInfoKHR = VK_STRUCTURE_TYPE_MEMORY_OPAQUE_CAPTURE_ADDRESS_ALLOCATE_INFO_KHR,
+    eDeviceMemoryOpaqueCaptureAddressInfoKHR = VK_STRUCTURE_TYPE_DEVICE_MEMORY_OPAQUE_CAPTURE_ADDRESS_INFO_KHR,
+    ePhysicalDeviceLineRasterizationFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_EXT,
+    ePipelineRasterizationLineStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO_EXT,
+    ePhysicalDeviceLineRasterizationPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES_EXT,
+    ePhysicalDeviceHostQueryResetFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES_EXT,
+    ePhysicalDeviceIndexTypeUint8FeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_EXT,
+    ePhysicalDevicePipelineExecutablePropertiesFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_EXECUTABLE_PROPERTIES_FEATURES_KHR,
+    ePipelineInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_INFO_KHR,
+    ePipelineExecutablePropertiesKHR = VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_PROPERTIES_KHR,
+    ePipelineExecutableInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INFO_KHR,
+    ePipelineExecutableStatisticKHR = VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_STATISTIC_KHR,
+    ePipelineExecutableInternalRepresentationKHR = VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INTERNAL_REPRESENTATION_KHR,
+    ePhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES_EXT,
+    ePhysicalDeviceTexelBufferAlignmentFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_FEATURES_EXT,
+    ePhysicalDeviceTexelBufferAlignmentPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES_EXT,
+    ePhysicalDeviceVariablePointerFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES,
+    ePhysicalDeviceShaderDrawParameterFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETER_FEATURES,
+    eDebugReportCreateInfoEXT = VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT,
+    eRenderPassMultiviewCreateInfoKHR = VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO_KHR,
+    ePhysicalDeviceMultiviewFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES_KHR,
+    ePhysicalDeviceMultiviewPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES_KHR,
+    ePhysicalDeviceFeatures2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2_KHR,
+    ePhysicalDeviceProperties2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2_KHR,
+    eFormatProperties2KHR = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2_KHR,
+    eImageFormatProperties2KHR = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2_KHR,
+    ePhysicalDeviceImageFormatInfo2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2_KHR,
+    eQueueFamilyProperties2KHR = VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2_KHR,
+    ePhysicalDeviceMemoryProperties2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2_KHR,
+    eSparseImageFormatProperties2KHR = VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2_KHR,
+    ePhysicalDeviceSparseImageFormatInfo2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2_KHR,
+    eMemoryAllocateFlagsInfoKHR = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO_KHR,
+    eDeviceGroupRenderPassBeginInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO_KHR,
+    eDeviceGroupCommandBufferBeginInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO_KHR,
+    eDeviceGroupSubmitInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO_KHR,
+    eDeviceGroupBindSparseInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO_KHR,
+    eBindBufferMemoryDeviceGroupInfoKHR = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO_KHR,
+    eBindImageMemoryDeviceGroupInfoKHR = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO_KHR,
+    ePhysicalDeviceGroupPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES_KHR,
+    eDeviceGroupDeviceCreateInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO_KHR,
+    ePhysicalDeviceExternalImageFormatInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO_KHR,
+    eExternalImageFormatPropertiesKHR = VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES_KHR,
+    ePhysicalDeviceExternalBufferInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO_KHR,
+    eExternalBufferPropertiesKHR = VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES_KHR,
+    ePhysicalDeviceIdPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES_KHR,
+    eExternalMemoryBufferCreateInfoKHR = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO_KHR,
+    eExternalMemoryImageCreateInfoKHR = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_KHR,
+    eExportMemoryAllocateInfoKHR = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_KHR,
+    ePhysicalDeviceExternalSemaphoreInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO_KHR,
+    eExternalSemaphorePropertiesKHR = VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES_KHR,
+    eExportSemaphoreCreateInfoKHR = VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO_KHR,
+    ePhysicalDeviceFloat16Int8FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT16_INT8_FEATURES_KHR,
+    ePhysicalDevice16BitStorageFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES_KHR,
+    eDescriptorUpdateTemplateCreateInfoKHR = VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO_KHR,
+    ePhysicalDeviceExternalFenceInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO_KHR,
+    eExternalFencePropertiesKHR = VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES_KHR,
+    eExportFenceCreateInfoKHR = VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO_KHR,
+    ePhysicalDevicePointClippingPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES_KHR,
+    eRenderPassInputAttachmentAspectCreateInfoKHR = VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO_KHR,
+    eImageViewUsageCreateInfoKHR = VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO_KHR,
+    ePipelineTessellationDomainOriginStateCreateInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO_KHR,
+    ePhysicalDeviceVariablePointerFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES_KHR,
+    ePhysicalDeviceVariablePointersFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES_KHR,
+    eMemoryDedicatedRequirementsKHR = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR,
+    eMemoryDedicatedAllocateInfoKHR = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR,
+    eBufferMemoryRequirementsInfo2KHR = VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR,
+    eImageMemoryRequirementsInfo2KHR = VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR,
+    eImageSparseMemoryRequirementsInfo2KHR = VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2_KHR,
+    eMemoryRequirements2KHR = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR,
+    eSparseImageMemoryRequirements2KHR = VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2_KHR,
+    eSamplerYcbcrConversionCreateInfoKHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO_KHR,
+    eSamplerYcbcrConversionInfoKHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO_KHR,
+    eBindImagePlaneMemoryInfoKHR = VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO_KHR,
+    eImagePlaneMemoryRequirementsInfoKHR = VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO_KHR,
+    ePhysicalDeviceSamplerYcbcrConversionFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES_KHR,
+    eSamplerYcbcrConversionImageFormatPropertiesKHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES_KHR,
+    eBindBufferMemoryInfoKHR = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO_KHR,
+    eBindImageMemoryInfoKHR = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO_KHR,
+    ePhysicalDeviceMaintenance3PropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES_KHR,
+    eDescriptorSetLayoutSupportKHR = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT_KHR,
+    ePhysicalDeviceBufferAddressFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_ADDRESS_FEATURES_EXT,
+    eBufferDeviceAddressInfoEXT = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO_EXT
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( StructureType value )
+  {
+    switch ( value )
+    {
+      case StructureType::eApplicationInfo : return "ApplicationInfo";
+      case StructureType::eInstanceCreateInfo : return "InstanceCreateInfo";
+      case StructureType::eDeviceQueueCreateInfo : return "DeviceQueueCreateInfo";
+      case StructureType::eDeviceCreateInfo : return "DeviceCreateInfo";
+      case StructureType::eSubmitInfo : return "SubmitInfo";
+      case StructureType::eMemoryAllocateInfo : return "MemoryAllocateInfo";
+      case StructureType::eMappedMemoryRange : return "MappedMemoryRange";
+      case StructureType::eBindSparseInfo : return "BindSparseInfo";
+      case StructureType::eFenceCreateInfo : return "FenceCreateInfo";
+      case StructureType::eSemaphoreCreateInfo : return "SemaphoreCreateInfo";
+      case StructureType::eEventCreateInfo : return "EventCreateInfo";
+      case StructureType::eQueryPoolCreateInfo : return "QueryPoolCreateInfo";
+      case StructureType::eBufferCreateInfo : return "BufferCreateInfo";
+      case StructureType::eBufferViewCreateInfo : return "BufferViewCreateInfo";
+      case StructureType::eImageCreateInfo : return "ImageCreateInfo";
+      case StructureType::eImageViewCreateInfo : return "ImageViewCreateInfo";
+      case StructureType::eShaderModuleCreateInfo : return "ShaderModuleCreateInfo";
+      case StructureType::ePipelineCacheCreateInfo : return "PipelineCacheCreateInfo";
+      case StructureType::ePipelineShaderStageCreateInfo : return "PipelineShaderStageCreateInfo";
+      case StructureType::ePipelineVertexInputStateCreateInfo : return "PipelineVertexInputStateCreateInfo";
+      case StructureType::ePipelineInputAssemblyStateCreateInfo : return "PipelineInputAssemblyStateCreateInfo";
+      case StructureType::ePipelineTessellationStateCreateInfo : return "PipelineTessellationStateCreateInfo";
+      case StructureType::ePipelineViewportStateCreateInfo : return "PipelineViewportStateCreateInfo";
+      case StructureType::ePipelineRasterizationStateCreateInfo : return "PipelineRasterizationStateCreateInfo";
+      case StructureType::ePipelineMultisampleStateCreateInfo : return "PipelineMultisampleStateCreateInfo";
+      case StructureType::ePipelineDepthStencilStateCreateInfo : return "PipelineDepthStencilStateCreateInfo";
+      case StructureType::ePipelineColorBlendStateCreateInfo : return "PipelineColorBlendStateCreateInfo";
+      case StructureType::ePipelineDynamicStateCreateInfo : return "PipelineDynamicStateCreateInfo";
+      case StructureType::eGraphicsPipelineCreateInfo : return "GraphicsPipelineCreateInfo";
+      case StructureType::eComputePipelineCreateInfo : return "ComputePipelineCreateInfo";
+      case StructureType::ePipelineLayoutCreateInfo : return "PipelineLayoutCreateInfo";
+      case StructureType::eSamplerCreateInfo : return "SamplerCreateInfo";
+      case StructureType::eDescriptorSetLayoutCreateInfo : return "DescriptorSetLayoutCreateInfo";
+      case StructureType::eDescriptorPoolCreateInfo : return "DescriptorPoolCreateInfo";
+      case StructureType::eDescriptorSetAllocateInfo : return "DescriptorSetAllocateInfo";
+      case StructureType::eWriteDescriptorSet : return "WriteDescriptorSet";
+      case StructureType::eCopyDescriptorSet : return "CopyDescriptorSet";
+      case StructureType::eFramebufferCreateInfo : return "FramebufferCreateInfo";
+      case StructureType::eRenderPassCreateInfo : return "RenderPassCreateInfo";
+      case StructureType::eCommandPoolCreateInfo : return "CommandPoolCreateInfo";
+      case StructureType::eCommandBufferAllocateInfo : return "CommandBufferAllocateInfo";
+      case StructureType::eCommandBufferInheritanceInfo : return "CommandBufferInheritanceInfo";
+      case StructureType::eCommandBufferBeginInfo : return "CommandBufferBeginInfo";
+      case StructureType::eRenderPassBeginInfo : return "RenderPassBeginInfo";
+      case StructureType::eBufferMemoryBarrier : return "BufferMemoryBarrier";
+      case StructureType::eImageMemoryBarrier : return "ImageMemoryBarrier";
+      case StructureType::eMemoryBarrier : return "MemoryBarrier";
+      case StructureType::eLoaderInstanceCreateInfo : return "LoaderInstanceCreateInfo";
+      case StructureType::eLoaderDeviceCreateInfo : return "LoaderDeviceCreateInfo";
+      case StructureType::ePhysicalDeviceSubgroupProperties : return "PhysicalDeviceSubgroupProperties";
+      case StructureType::eBindBufferMemoryInfo : return "BindBufferMemoryInfo";
+      case StructureType::eBindImageMemoryInfo : return "BindImageMemoryInfo";
+      case StructureType::ePhysicalDevice16BitStorageFeatures : return "PhysicalDevice16BitStorageFeatures";
+      case StructureType::eMemoryDedicatedRequirements : return "MemoryDedicatedRequirements";
+      case StructureType::eMemoryDedicatedAllocateInfo : return "MemoryDedicatedAllocateInfo";
+      case StructureType::eMemoryAllocateFlagsInfo : return "MemoryAllocateFlagsInfo";
+      case StructureType::eDeviceGroupRenderPassBeginInfo : return "DeviceGroupRenderPassBeginInfo";
+      case StructureType::eDeviceGroupCommandBufferBeginInfo : return "DeviceGroupCommandBufferBeginInfo";
+      case StructureType::eDeviceGroupSubmitInfo : return "DeviceGroupSubmitInfo";
+      case StructureType::eDeviceGroupBindSparseInfo : return "DeviceGroupBindSparseInfo";
+      case StructureType::eBindBufferMemoryDeviceGroupInfo : return "BindBufferMemoryDeviceGroupInfo";
+      case StructureType::eBindImageMemoryDeviceGroupInfo : return "BindImageMemoryDeviceGroupInfo";
+      case StructureType::ePhysicalDeviceGroupProperties : return "PhysicalDeviceGroupProperties";
+      case StructureType::eDeviceGroupDeviceCreateInfo : return "DeviceGroupDeviceCreateInfo";
+      case StructureType::eBufferMemoryRequirementsInfo2 : return "BufferMemoryRequirementsInfo2";
+      case StructureType::eImageMemoryRequirementsInfo2 : return "ImageMemoryRequirementsInfo2";
+      case StructureType::eImageSparseMemoryRequirementsInfo2 : return "ImageSparseMemoryRequirementsInfo2";
+      case StructureType::eMemoryRequirements2 : return "MemoryRequirements2";
+      case StructureType::eSparseImageMemoryRequirements2 : return "SparseImageMemoryRequirements2";
+      case StructureType::ePhysicalDeviceFeatures2 : return "PhysicalDeviceFeatures2";
+      case StructureType::ePhysicalDeviceProperties2 : return "PhysicalDeviceProperties2";
+      case StructureType::eFormatProperties2 : return "FormatProperties2";
+      case StructureType::eImageFormatProperties2 : return "ImageFormatProperties2";
+      case StructureType::ePhysicalDeviceImageFormatInfo2 : return "PhysicalDeviceImageFormatInfo2";
+      case StructureType::eQueueFamilyProperties2 : return "QueueFamilyProperties2";
+      case StructureType::ePhysicalDeviceMemoryProperties2 : return "PhysicalDeviceMemoryProperties2";
+      case StructureType::eSparseImageFormatProperties2 : return "SparseImageFormatProperties2";
+      case StructureType::ePhysicalDeviceSparseImageFormatInfo2 : return "PhysicalDeviceSparseImageFormatInfo2";
+      case StructureType::ePhysicalDevicePointClippingProperties : return "PhysicalDevicePointClippingProperties";
+      case StructureType::eRenderPassInputAttachmentAspectCreateInfo : return "RenderPassInputAttachmentAspectCreateInfo";
+      case StructureType::eImageViewUsageCreateInfo : return "ImageViewUsageCreateInfo";
+      case StructureType::ePipelineTessellationDomainOriginStateCreateInfo : return "PipelineTessellationDomainOriginStateCreateInfo";
+      case StructureType::eRenderPassMultiviewCreateInfo : return "RenderPassMultiviewCreateInfo";
+      case StructureType::ePhysicalDeviceMultiviewFeatures : return "PhysicalDeviceMultiviewFeatures";
+      case StructureType::ePhysicalDeviceMultiviewProperties : return "PhysicalDeviceMultiviewProperties";
+      case StructureType::ePhysicalDeviceVariablePointersFeatures : return "PhysicalDeviceVariablePointersFeatures";
+      case StructureType::eProtectedSubmitInfo : return "ProtectedSubmitInfo";
+      case StructureType::ePhysicalDeviceProtectedMemoryFeatures : return "PhysicalDeviceProtectedMemoryFeatures";
+      case StructureType::ePhysicalDeviceProtectedMemoryProperties : return "PhysicalDeviceProtectedMemoryProperties";
+      case StructureType::eDeviceQueueInfo2 : return "DeviceQueueInfo2";
+      case StructureType::eSamplerYcbcrConversionCreateInfo : return "SamplerYcbcrConversionCreateInfo";
+      case StructureType::eSamplerYcbcrConversionInfo : return "SamplerYcbcrConversionInfo";
+      case StructureType::eBindImagePlaneMemoryInfo : return "BindImagePlaneMemoryInfo";
+      case StructureType::eImagePlaneMemoryRequirementsInfo : return "ImagePlaneMemoryRequirementsInfo";
+      case StructureType::ePhysicalDeviceSamplerYcbcrConversionFeatures : return "PhysicalDeviceSamplerYcbcrConversionFeatures";
+      case StructureType::eSamplerYcbcrConversionImageFormatProperties : return "SamplerYcbcrConversionImageFormatProperties";
+      case StructureType::eDescriptorUpdateTemplateCreateInfo : return "DescriptorUpdateTemplateCreateInfo";
+      case StructureType::ePhysicalDeviceExternalImageFormatInfo : return "PhysicalDeviceExternalImageFormatInfo";
+      case StructureType::eExternalImageFormatProperties : return "ExternalImageFormatProperties";
+      case StructureType::ePhysicalDeviceExternalBufferInfo : return "PhysicalDeviceExternalBufferInfo";
+      case StructureType::eExternalBufferProperties : return "ExternalBufferProperties";
+      case StructureType::ePhysicalDeviceIdProperties : return "PhysicalDeviceIdProperties";
+      case StructureType::eExternalMemoryBufferCreateInfo : return "ExternalMemoryBufferCreateInfo";
+      case StructureType::eExternalMemoryImageCreateInfo : return "ExternalMemoryImageCreateInfo";
+      case StructureType::eExportMemoryAllocateInfo : return "ExportMemoryAllocateInfo";
+      case StructureType::ePhysicalDeviceExternalFenceInfo : return "PhysicalDeviceExternalFenceInfo";
+      case StructureType::eExternalFenceProperties : return "ExternalFenceProperties";
+      case StructureType::eExportFenceCreateInfo : return "ExportFenceCreateInfo";
+      case StructureType::eExportSemaphoreCreateInfo : return "ExportSemaphoreCreateInfo";
+      case StructureType::ePhysicalDeviceExternalSemaphoreInfo : return "PhysicalDeviceExternalSemaphoreInfo";
+      case StructureType::eExternalSemaphoreProperties : return "ExternalSemaphoreProperties";
+      case StructureType::ePhysicalDeviceMaintenance3Properties : return "PhysicalDeviceMaintenance3Properties";
+      case StructureType::eDescriptorSetLayoutSupport : return "DescriptorSetLayoutSupport";
+      case StructureType::ePhysicalDeviceShaderDrawParametersFeatures : return "PhysicalDeviceShaderDrawParametersFeatures";
+      case StructureType::eSwapchainCreateInfoKHR : return "SwapchainCreateInfoKHR";
+      case StructureType::ePresentInfoKHR : return "PresentInfoKHR";
+      case StructureType::eDeviceGroupPresentCapabilitiesKHR : return "DeviceGroupPresentCapabilitiesKHR";
+      case StructureType::eImageSwapchainCreateInfoKHR : return "ImageSwapchainCreateInfoKHR";
+      case StructureType::eBindImageMemorySwapchainInfoKHR : return "BindImageMemorySwapchainInfoKHR";
+      case StructureType::eAcquireNextImageInfoKHR : return "AcquireNextImageInfoKHR";
+      case StructureType::eDeviceGroupPresentInfoKHR : return "DeviceGroupPresentInfoKHR";
+      case StructureType::eDeviceGroupSwapchainCreateInfoKHR : return "DeviceGroupSwapchainCreateInfoKHR";
+      case StructureType::eDisplayModeCreateInfoKHR : return "DisplayModeCreateInfoKHR";
+      case StructureType::eDisplaySurfaceCreateInfoKHR : return "DisplaySurfaceCreateInfoKHR";
+      case StructureType::eDisplayPresentInfoKHR : return "DisplayPresentInfoKHR";
+      case StructureType::eXlibSurfaceCreateInfoKHR : return "XlibSurfaceCreateInfoKHR";
+      case StructureType::eXcbSurfaceCreateInfoKHR : return "XcbSurfaceCreateInfoKHR";
+      case StructureType::eWaylandSurfaceCreateInfoKHR : return "WaylandSurfaceCreateInfoKHR";
+      case StructureType::eAndroidSurfaceCreateInfoKHR : return "AndroidSurfaceCreateInfoKHR";
+      case StructureType::eWin32SurfaceCreateInfoKHR : return "Win32SurfaceCreateInfoKHR";
+      case StructureType::eDebugReportCallbackCreateInfoEXT : return "DebugReportCallbackCreateInfoEXT";
+      case StructureType::ePipelineRasterizationStateRasterizationOrderAMD : return "PipelineRasterizationStateRasterizationOrderAMD";
+      case StructureType::eDebugMarkerObjectNameInfoEXT : return "DebugMarkerObjectNameInfoEXT";
+      case StructureType::eDebugMarkerObjectTagInfoEXT : return "DebugMarkerObjectTagInfoEXT";
+      case StructureType::eDebugMarkerMarkerInfoEXT : return "DebugMarkerMarkerInfoEXT";
+      case StructureType::eDedicatedAllocationImageCreateInfoNV : return "DedicatedAllocationImageCreateInfoNV";
+      case StructureType::eDedicatedAllocationBufferCreateInfoNV : return "DedicatedAllocationBufferCreateInfoNV";
+      case StructureType::eDedicatedAllocationMemoryAllocateInfoNV : return "DedicatedAllocationMemoryAllocateInfoNV";
+      case StructureType::ePhysicalDeviceTransformFeedbackFeaturesEXT : return "PhysicalDeviceTransformFeedbackFeaturesEXT";
+      case StructureType::ePhysicalDeviceTransformFeedbackPropertiesEXT : return "PhysicalDeviceTransformFeedbackPropertiesEXT";
+      case StructureType::ePipelineRasterizationStateStreamCreateInfoEXT : return "PipelineRasterizationStateStreamCreateInfoEXT";
+      case StructureType::eImageViewHandleInfoNVX : return "ImageViewHandleInfoNVX";
+      case StructureType::eTextureLodGatherFormatPropertiesAMD : return "TextureLodGatherFormatPropertiesAMD";
+      case StructureType::eStreamDescriptorSurfaceCreateInfoGGP : return "StreamDescriptorSurfaceCreateInfoGGP";
+      case StructureType::ePhysicalDeviceCornerSampledImageFeaturesNV : return "PhysicalDeviceCornerSampledImageFeaturesNV";
+      case StructureType::eExternalMemoryImageCreateInfoNV : return "ExternalMemoryImageCreateInfoNV";
+      case StructureType::eExportMemoryAllocateInfoNV : return "ExportMemoryAllocateInfoNV";
+      case StructureType::eImportMemoryWin32HandleInfoNV : return "ImportMemoryWin32HandleInfoNV";
+      case StructureType::eExportMemoryWin32HandleInfoNV : return "ExportMemoryWin32HandleInfoNV";
+      case StructureType::eWin32KeyedMutexAcquireReleaseInfoNV : return "Win32KeyedMutexAcquireReleaseInfoNV";
+      case StructureType::eValidationFlagsEXT : return "ValidationFlagsEXT";
+      case StructureType::eViSurfaceCreateInfoNN : return "ViSurfaceCreateInfoNN";
+      case StructureType::ePhysicalDeviceTextureCompressionAstcHdrFeaturesEXT : return "PhysicalDeviceTextureCompressionAstcHdrFeaturesEXT";
+      case StructureType::eImageViewAstcDecodeModeEXT : return "ImageViewAstcDecodeModeEXT";
+      case StructureType::ePhysicalDeviceAstcDecodeFeaturesEXT : return "PhysicalDeviceAstcDecodeFeaturesEXT";
+      case StructureType::eImportMemoryWin32HandleInfoKHR : return "ImportMemoryWin32HandleInfoKHR";
+      case StructureType::eExportMemoryWin32HandleInfoKHR : return "ExportMemoryWin32HandleInfoKHR";
+      case StructureType::eMemoryWin32HandlePropertiesKHR : return "MemoryWin32HandlePropertiesKHR";
+      case StructureType::eMemoryGetWin32HandleInfoKHR : return "MemoryGetWin32HandleInfoKHR";
+      case StructureType::eImportMemoryFdInfoKHR : return "ImportMemoryFdInfoKHR";
+      case StructureType::eMemoryFdPropertiesKHR : return "MemoryFdPropertiesKHR";
+      case StructureType::eMemoryGetFdInfoKHR : return "MemoryGetFdInfoKHR";
+      case StructureType::eWin32KeyedMutexAcquireReleaseInfoKHR : return "Win32KeyedMutexAcquireReleaseInfoKHR";
+      case StructureType::eImportSemaphoreWin32HandleInfoKHR : return "ImportSemaphoreWin32HandleInfoKHR";
+      case StructureType::eExportSemaphoreWin32HandleInfoKHR : return "ExportSemaphoreWin32HandleInfoKHR";
+      case StructureType::eD3D12FenceSubmitInfoKHR : return "D3D12FenceSubmitInfoKHR";
+      case StructureType::eSemaphoreGetWin32HandleInfoKHR : return "SemaphoreGetWin32HandleInfoKHR";
+      case StructureType::eImportSemaphoreFdInfoKHR : return "ImportSemaphoreFdInfoKHR";
+      case StructureType::eSemaphoreGetFdInfoKHR : return "SemaphoreGetFdInfoKHR";
+      case StructureType::ePhysicalDevicePushDescriptorPropertiesKHR : return "PhysicalDevicePushDescriptorPropertiesKHR";
+      case StructureType::eCommandBufferInheritanceConditionalRenderingInfoEXT : return "CommandBufferInheritanceConditionalRenderingInfoEXT";
+      case StructureType::ePhysicalDeviceConditionalRenderingFeaturesEXT : return "PhysicalDeviceConditionalRenderingFeaturesEXT";
+      case StructureType::eConditionalRenderingBeginInfoEXT : return "ConditionalRenderingBeginInfoEXT";
+      case StructureType::ePhysicalDeviceShaderFloat16Int8FeaturesKHR : return "PhysicalDeviceShaderFloat16Int8FeaturesKHR";
+      case StructureType::ePresentRegionsKHR : return "PresentRegionsKHR";
+      case StructureType::eObjectTableCreateInfoNVX : return "ObjectTableCreateInfoNVX";
+      case StructureType::eIndirectCommandsLayoutCreateInfoNVX : return "IndirectCommandsLayoutCreateInfoNVX";
+      case StructureType::eCmdProcessCommandsInfoNVX : return "CmdProcessCommandsInfoNVX";
+      case StructureType::eCmdReserveSpaceForCommandsInfoNVX : return "CmdReserveSpaceForCommandsInfoNVX";
+      case StructureType::eDeviceGeneratedCommandsLimitsNVX : return "DeviceGeneratedCommandsLimitsNVX";
+      case StructureType::eDeviceGeneratedCommandsFeaturesNVX : return "DeviceGeneratedCommandsFeaturesNVX";
+      case StructureType::ePipelineViewportWScalingStateCreateInfoNV : return "PipelineViewportWScalingStateCreateInfoNV";
+      case StructureType::eSurfaceCapabilities2EXT : return "SurfaceCapabilities2EXT";
+      case StructureType::eDisplayPowerInfoEXT : return "DisplayPowerInfoEXT";
+      case StructureType::eDeviceEventInfoEXT : return "DeviceEventInfoEXT";
+      case StructureType::eDisplayEventInfoEXT : return "DisplayEventInfoEXT";
+      case StructureType::eSwapchainCounterCreateInfoEXT : return "SwapchainCounterCreateInfoEXT";
+      case StructureType::ePresentTimesInfoGOOGLE : return "PresentTimesInfoGOOGLE";
+      case StructureType::ePhysicalDeviceMultiviewPerViewAttributesPropertiesNVX : return "PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX";
+      case StructureType::ePipelineViewportSwizzleStateCreateInfoNV : return "PipelineViewportSwizzleStateCreateInfoNV";
+      case StructureType::ePhysicalDeviceDiscardRectanglePropertiesEXT : return "PhysicalDeviceDiscardRectanglePropertiesEXT";
+      case StructureType::ePipelineDiscardRectangleStateCreateInfoEXT : return "PipelineDiscardRectangleStateCreateInfoEXT";
+      case StructureType::ePhysicalDeviceConservativeRasterizationPropertiesEXT : return "PhysicalDeviceConservativeRasterizationPropertiesEXT";
+      case StructureType::ePipelineRasterizationConservativeStateCreateInfoEXT : return "PipelineRasterizationConservativeStateCreateInfoEXT";
+      case StructureType::ePhysicalDeviceDepthClipEnableFeaturesEXT : return "PhysicalDeviceDepthClipEnableFeaturesEXT";
+      case StructureType::ePipelineRasterizationDepthClipStateCreateInfoEXT : return "PipelineRasterizationDepthClipStateCreateInfoEXT";
+      case StructureType::eHdrMetadataEXT : return "HdrMetadataEXT";
+      case StructureType::ePhysicalDeviceImagelessFramebufferFeaturesKHR : return "PhysicalDeviceImagelessFramebufferFeaturesKHR";
+      case StructureType::eFramebufferAttachmentsCreateInfoKHR : return "FramebufferAttachmentsCreateInfoKHR";
+      case StructureType::eFramebufferAttachmentImageInfoKHR : return "FramebufferAttachmentImageInfoKHR";
+      case StructureType::eRenderPassAttachmentBeginInfoKHR : return "RenderPassAttachmentBeginInfoKHR";
+      case StructureType::eAttachmentDescription2KHR : return "AttachmentDescription2KHR";
+      case StructureType::eAttachmentReference2KHR : return "AttachmentReference2KHR";
+      case StructureType::eSubpassDescription2KHR : return "SubpassDescription2KHR";
+      case StructureType::eSubpassDependency2KHR : return "SubpassDependency2KHR";
+      case StructureType::eRenderPassCreateInfo2KHR : return "RenderPassCreateInfo2KHR";
+      case StructureType::eSubpassBeginInfoKHR : return "SubpassBeginInfoKHR";
+      case StructureType::eSubpassEndInfoKHR : return "SubpassEndInfoKHR";
+      case StructureType::eSharedPresentSurfaceCapabilitiesKHR : return "SharedPresentSurfaceCapabilitiesKHR";
+      case StructureType::eImportFenceWin32HandleInfoKHR : return "ImportFenceWin32HandleInfoKHR";
+      case StructureType::eExportFenceWin32HandleInfoKHR : return "ExportFenceWin32HandleInfoKHR";
+      case StructureType::eFenceGetWin32HandleInfoKHR : return "FenceGetWin32HandleInfoKHR";
+      case StructureType::eImportFenceFdInfoKHR : return "ImportFenceFdInfoKHR";
+      case StructureType::eFenceGetFdInfoKHR : return "FenceGetFdInfoKHR";
+      case StructureType::ePhysicalDevicePerformanceQueryFeaturesKHR : return "PhysicalDevicePerformanceQueryFeaturesKHR";
+      case StructureType::ePhysicalDevicePerformanceQueryPropertiesKHR : return "PhysicalDevicePerformanceQueryPropertiesKHR";
+      case StructureType::eQueryPoolPerformanceCreateInfoKHR : return "QueryPoolPerformanceCreateInfoKHR";
+      case StructureType::ePerformanceQuerySubmitInfoKHR : return "PerformanceQuerySubmitInfoKHR";
+      case StructureType::eAcquireProfilingLockInfoKHR : return "AcquireProfilingLockInfoKHR";
+      case StructureType::ePerformanceCounterKHR : return "PerformanceCounterKHR";
+      case StructureType::ePerformanceCounterDescriptionKHR : return "PerformanceCounterDescriptionKHR";
+      case StructureType::ePhysicalDeviceSurfaceInfo2KHR : return "PhysicalDeviceSurfaceInfo2KHR";
+      case StructureType::eSurfaceCapabilities2KHR : return "SurfaceCapabilities2KHR";
+      case StructureType::eSurfaceFormat2KHR : return "SurfaceFormat2KHR";
+      case StructureType::eDisplayProperties2KHR : return "DisplayProperties2KHR";
+      case StructureType::eDisplayPlaneProperties2KHR : return "DisplayPlaneProperties2KHR";
+      case StructureType::eDisplayModeProperties2KHR : return "DisplayModeProperties2KHR";
+      case StructureType::eDisplayPlaneInfo2KHR : return "DisplayPlaneInfo2KHR";
+      case StructureType::eDisplayPlaneCapabilities2KHR : return "DisplayPlaneCapabilities2KHR";
+      case StructureType::eIosSurfaceCreateInfoMVK : return "IosSurfaceCreateInfoMVK";
+      case StructureType::eMacosSurfaceCreateInfoMVK : return "MacosSurfaceCreateInfoMVK";
+      case StructureType::eDebugUtilsObjectNameInfoEXT : return "DebugUtilsObjectNameInfoEXT";
+      case StructureType::eDebugUtilsObjectTagInfoEXT : return "DebugUtilsObjectTagInfoEXT";
+      case StructureType::eDebugUtilsLabelEXT : return "DebugUtilsLabelEXT";
+      case StructureType::eDebugUtilsMessengerCallbackDataEXT : return "DebugUtilsMessengerCallbackDataEXT";
+      case StructureType::eDebugUtilsMessengerCreateInfoEXT : return "DebugUtilsMessengerCreateInfoEXT";
+      case StructureType::eAndroidHardwareBufferUsageANDROID : return "AndroidHardwareBufferUsageANDROID";
+      case StructureType::eAndroidHardwareBufferPropertiesANDROID : return "AndroidHardwareBufferPropertiesANDROID";
+      case StructureType::eAndroidHardwareBufferFormatPropertiesANDROID : return "AndroidHardwareBufferFormatPropertiesANDROID";
+      case StructureType::eImportAndroidHardwareBufferInfoANDROID : return "ImportAndroidHardwareBufferInfoANDROID";
+      case StructureType::eMemoryGetAndroidHardwareBufferInfoANDROID : return "MemoryGetAndroidHardwareBufferInfoANDROID";
+      case StructureType::eExternalFormatANDROID : return "ExternalFormatANDROID";
+      case StructureType::ePhysicalDeviceSamplerFilterMinmaxPropertiesEXT : return "PhysicalDeviceSamplerFilterMinmaxPropertiesEXT";
+      case StructureType::eSamplerReductionModeCreateInfoEXT : return "SamplerReductionModeCreateInfoEXT";
+      case StructureType::ePhysicalDeviceInlineUniformBlockFeaturesEXT : return "PhysicalDeviceInlineUniformBlockFeaturesEXT";
+      case StructureType::ePhysicalDeviceInlineUniformBlockPropertiesEXT : return "PhysicalDeviceInlineUniformBlockPropertiesEXT";
+      case StructureType::eWriteDescriptorSetInlineUniformBlockEXT : return "WriteDescriptorSetInlineUniformBlockEXT";
+      case StructureType::eDescriptorPoolInlineUniformBlockCreateInfoEXT : return "DescriptorPoolInlineUniformBlockCreateInfoEXT";
+      case StructureType::eSampleLocationsInfoEXT : return "SampleLocationsInfoEXT";
+      case StructureType::eRenderPassSampleLocationsBeginInfoEXT : return "RenderPassSampleLocationsBeginInfoEXT";
+      case StructureType::ePipelineSampleLocationsStateCreateInfoEXT : return "PipelineSampleLocationsStateCreateInfoEXT";
+      case StructureType::ePhysicalDeviceSampleLocationsPropertiesEXT : return "PhysicalDeviceSampleLocationsPropertiesEXT";
+      case StructureType::eMultisamplePropertiesEXT : return "MultisamplePropertiesEXT";
+      case StructureType::eImageFormatListCreateInfoKHR : return "ImageFormatListCreateInfoKHR";
+      case StructureType::ePhysicalDeviceBlendOperationAdvancedFeaturesEXT : return "PhysicalDeviceBlendOperationAdvancedFeaturesEXT";
+      case StructureType::ePhysicalDeviceBlendOperationAdvancedPropertiesEXT : return "PhysicalDeviceBlendOperationAdvancedPropertiesEXT";
+      case StructureType::ePipelineColorBlendAdvancedStateCreateInfoEXT : return "PipelineColorBlendAdvancedStateCreateInfoEXT";
+      case StructureType::ePipelineCoverageToColorStateCreateInfoNV : return "PipelineCoverageToColorStateCreateInfoNV";
+      case StructureType::ePipelineCoverageModulationStateCreateInfoNV : return "PipelineCoverageModulationStateCreateInfoNV";
+      case StructureType::ePhysicalDeviceShaderSmBuiltinsFeaturesNV : return "PhysicalDeviceShaderSmBuiltinsFeaturesNV";
+      case StructureType::ePhysicalDeviceShaderSmBuiltinsPropertiesNV : return "PhysicalDeviceShaderSmBuiltinsPropertiesNV";
+      case StructureType::eDrmFormatModifierPropertiesListEXT : return "DrmFormatModifierPropertiesListEXT";
+      case StructureType::eDrmFormatModifierPropertiesEXT : return "DrmFormatModifierPropertiesEXT";
+      case StructureType::ePhysicalDeviceImageDrmFormatModifierInfoEXT : return "PhysicalDeviceImageDrmFormatModifierInfoEXT";
+      case StructureType::eImageDrmFormatModifierListCreateInfoEXT : return "ImageDrmFormatModifierListCreateInfoEXT";
+      case StructureType::eImageDrmFormatModifierExplicitCreateInfoEXT : return "ImageDrmFormatModifierExplicitCreateInfoEXT";
+      case StructureType::eImageDrmFormatModifierPropertiesEXT : return "ImageDrmFormatModifierPropertiesEXT";
+      case StructureType::eValidationCacheCreateInfoEXT : return "ValidationCacheCreateInfoEXT";
+      case StructureType::eShaderModuleValidationCacheCreateInfoEXT : return "ShaderModuleValidationCacheCreateInfoEXT";
+      case StructureType::eDescriptorSetLayoutBindingFlagsCreateInfoEXT : return "DescriptorSetLayoutBindingFlagsCreateInfoEXT";
+      case StructureType::ePhysicalDeviceDescriptorIndexingFeaturesEXT : return "PhysicalDeviceDescriptorIndexingFeaturesEXT";
+      case StructureType::ePhysicalDeviceDescriptorIndexingPropertiesEXT : return "PhysicalDeviceDescriptorIndexingPropertiesEXT";
+      case StructureType::eDescriptorSetVariableDescriptorCountAllocateInfoEXT : return "DescriptorSetVariableDescriptorCountAllocateInfoEXT";
+      case StructureType::eDescriptorSetVariableDescriptorCountLayoutSupportEXT : return "DescriptorSetVariableDescriptorCountLayoutSupportEXT";
+      case StructureType::ePipelineViewportShadingRateImageStateCreateInfoNV : return "PipelineViewportShadingRateImageStateCreateInfoNV";
+      case StructureType::ePhysicalDeviceShadingRateImageFeaturesNV : return "PhysicalDeviceShadingRateImageFeaturesNV";
+      case StructureType::ePhysicalDeviceShadingRateImagePropertiesNV : return "PhysicalDeviceShadingRateImagePropertiesNV";
+      case StructureType::ePipelineViewportCoarseSampleOrderStateCreateInfoNV : return "PipelineViewportCoarseSampleOrderStateCreateInfoNV";
+      case StructureType::eRayTracingPipelineCreateInfoNV : return "RayTracingPipelineCreateInfoNV";
+      case StructureType::eAccelerationStructureCreateInfoNV : return "AccelerationStructureCreateInfoNV";
+      case StructureType::eGeometryNV : return "GeometryNV";
+      case StructureType::eGeometryTrianglesNV : return "GeometryTrianglesNV";
+      case StructureType::eGeometryAabbNV : return "GeometryAabbNV";
+      case StructureType::eBindAccelerationStructureMemoryInfoNV : return "BindAccelerationStructureMemoryInfoNV";
+      case StructureType::eWriteDescriptorSetAccelerationStructureNV : return "WriteDescriptorSetAccelerationStructureNV";
+      case StructureType::eAccelerationStructureMemoryRequirementsInfoNV : return "AccelerationStructureMemoryRequirementsInfoNV";
+      case StructureType::ePhysicalDeviceRayTracingPropertiesNV : return "PhysicalDeviceRayTracingPropertiesNV";
+      case StructureType::eRayTracingShaderGroupCreateInfoNV : return "RayTracingShaderGroupCreateInfoNV";
+      case StructureType::eAccelerationStructureInfoNV : return "AccelerationStructureInfoNV";
+      case StructureType::ePhysicalDeviceRepresentativeFragmentTestFeaturesNV : return "PhysicalDeviceRepresentativeFragmentTestFeaturesNV";
+      case StructureType::ePipelineRepresentativeFragmentTestStateCreateInfoNV : return "PipelineRepresentativeFragmentTestStateCreateInfoNV";
+      case StructureType::ePhysicalDeviceImageViewImageFormatInfoEXT : return "PhysicalDeviceImageViewImageFormatInfoEXT";
+      case StructureType::eFilterCubicImageViewImageFormatPropertiesEXT : return "FilterCubicImageViewImageFormatPropertiesEXT";
+      case StructureType::eDeviceQueueGlobalPriorityCreateInfoEXT : return "DeviceQueueGlobalPriorityCreateInfoEXT";
+      case StructureType::ePhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR : return "PhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR";
+      case StructureType::ePhysicalDevice8BitStorageFeaturesKHR : return "PhysicalDevice8BitStorageFeaturesKHR";
+      case StructureType::eImportMemoryHostPointerInfoEXT : return "ImportMemoryHostPointerInfoEXT";
+      case StructureType::eMemoryHostPointerPropertiesEXT : return "MemoryHostPointerPropertiesEXT";
+      case StructureType::ePhysicalDeviceExternalMemoryHostPropertiesEXT : return "PhysicalDeviceExternalMemoryHostPropertiesEXT";
+      case StructureType::ePhysicalDeviceShaderAtomicInt64FeaturesKHR : return "PhysicalDeviceShaderAtomicInt64FeaturesKHR";
+      case StructureType::ePhysicalDeviceShaderClockFeaturesKHR : return "PhysicalDeviceShaderClockFeaturesKHR";
+      case StructureType::ePipelineCompilerControlCreateInfoAMD : return "PipelineCompilerControlCreateInfoAMD";
+      case StructureType::eCalibratedTimestampInfoEXT : return "CalibratedTimestampInfoEXT";
+      case StructureType::ePhysicalDeviceShaderCorePropertiesAMD : return "PhysicalDeviceShaderCorePropertiesAMD";
+      case StructureType::eDeviceMemoryOverallocationCreateInfoAMD : return "DeviceMemoryOverallocationCreateInfoAMD";
+      case StructureType::ePhysicalDeviceVertexAttributeDivisorPropertiesEXT : return "PhysicalDeviceVertexAttributeDivisorPropertiesEXT";
+      case StructureType::ePipelineVertexInputDivisorStateCreateInfoEXT : return "PipelineVertexInputDivisorStateCreateInfoEXT";
+      case StructureType::ePhysicalDeviceVertexAttributeDivisorFeaturesEXT : return "PhysicalDeviceVertexAttributeDivisorFeaturesEXT";
+      case StructureType::ePresentFrameTokenGGP : return "PresentFrameTokenGGP";
+      case StructureType::ePipelineCreationFeedbackCreateInfoEXT : return "PipelineCreationFeedbackCreateInfoEXT";
+      case StructureType::ePhysicalDeviceDriverPropertiesKHR : return "PhysicalDeviceDriverPropertiesKHR";
+      case StructureType::ePhysicalDeviceFloatControlsPropertiesKHR : return "PhysicalDeviceFloatControlsPropertiesKHR";
+      case StructureType::ePhysicalDeviceDepthStencilResolvePropertiesKHR : return "PhysicalDeviceDepthStencilResolvePropertiesKHR";
+      case StructureType::eSubpassDescriptionDepthStencilResolveKHR : return "SubpassDescriptionDepthStencilResolveKHR";
+      case StructureType::ePhysicalDeviceComputeShaderDerivativesFeaturesNV : return "PhysicalDeviceComputeShaderDerivativesFeaturesNV";
+      case StructureType::ePhysicalDeviceMeshShaderFeaturesNV : return "PhysicalDeviceMeshShaderFeaturesNV";
+      case StructureType::ePhysicalDeviceMeshShaderPropertiesNV : return "PhysicalDeviceMeshShaderPropertiesNV";
+      case StructureType::ePhysicalDeviceFragmentShaderBarycentricFeaturesNV : return "PhysicalDeviceFragmentShaderBarycentricFeaturesNV";
+      case StructureType::ePhysicalDeviceShaderImageFootprintFeaturesNV : return "PhysicalDeviceShaderImageFootprintFeaturesNV";
+      case StructureType::ePipelineViewportExclusiveScissorStateCreateInfoNV : return "PipelineViewportExclusiveScissorStateCreateInfoNV";
+      case StructureType::ePhysicalDeviceExclusiveScissorFeaturesNV : return "PhysicalDeviceExclusiveScissorFeaturesNV";
+      case StructureType::eCheckpointDataNV : return "CheckpointDataNV";
+      case StructureType::eQueueFamilyCheckpointPropertiesNV : return "QueueFamilyCheckpointPropertiesNV";
+      case StructureType::ePhysicalDeviceTimelineSemaphoreFeaturesKHR : return "PhysicalDeviceTimelineSemaphoreFeaturesKHR";
+      case StructureType::ePhysicalDeviceTimelineSemaphorePropertiesKHR : return "PhysicalDeviceTimelineSemaphorePropertiesKHR";
+      case StructureType::eSemaphoreTypeCreateInfoKHR : return "SemaphoreTypeCreateInfoKHR";
+      case StructureType::eTimelineSemaphoreSubmitInfoKHR : return "TimelineSemaphoreSubmitInfoKHR";
+      case StructureType::eSemaphoreWaitInfoKHR : return "SemaphoreWaitInfoKHR";
+      case StructureType::eSemaphoreSignalInfoKHR : return "SemaphoreSignalInfoKHR";
+      case StructureType::ePhysicalDeviceShaderIntegerFunctions2FeaturesINTEL : return "PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL";
+      case StructureType::eQueryPoolCreateInfoINTEL : return "QueryPoolCreateInfoINTEL";
+      case StructureType::eInitializePerformanceApiInfoINTEL : return "InitializePerformanceApiInfoINTEL";
+      case StructureType::ePerformanceMarkerInfoINTEL : return "PerformanceMarkerInfoINTEL";
+      case StructureType::ePerformanceStreamMarkerInfoINTEL : return "PerformanceStreamMarkerInfoINTEL";
+      case StructureType::ePerformanceOverrideInfoINTEL : return "PerformanceOverrideInfoINTEL";
+      case StructureType::ePerformanceConfigurationAcquireInfoINTEL : return "PerformanceConfigurationAcquireInfoINTEL";
+      case StructureType::ePhysicalDeviceVulkanMemoryModelFeaturesKHR : return "PhysicalDeviceVulkanMemoryModelFeaturesKHR";
+      case StructureType::ePhysicalDevicePciBusInfoPropertiesEXT : return "PhysicalDevicePciBusInfoPropertiesEXT";
+      case StructureType::eDisplayNativeHdrSurfaceCapabilitiesAMD : return "DisplayNativeHdrSurfaceCapabilitiesAMD";
+      case StructureType::eSwapchainDisplayNativeHdrCreateInfoAMD : return "SwapchainDisplayNativeHdrCreateInfoAMD";
+      case StructureType::eImagepipeSurfaceCreateInfoFUCHSIA : return "ImagepipeSurfaceCreateInfoFUCHSIA";
+      case StructureType::eMetalSurfaceCreateInfoEXT : return "MetalSurfaceCreateInfoEXT";
+      case StructureType::ePhysicalDeviceFragmentDensityMapFeaturesEXT : return "PhysicalDeviceFragmentDensityMapFeaturesEXT";
+      case StructureType::ePhysicalDeviceFragmentDensityMapPropertiesEXT : return "PhysicalDeviceFragmentDensityMapPropertiesEXT";
+      case StructureType::eRenderPassFragmentDensityMapCreateInfoEXT : return "RenderPassFragmentDensityMapCreateInfoEXT";
+      case StructureType::ePhysicalDeviceScalarBlockLayoutFeaturesEXT : return "PhysicalDeviceScalarBlockLayoutFeaturesEXT";
+      case StructureType::ePhysicalDeviceSubgroupSizeControlPropertiesEXT : return "PhysicalDeviceSubgroupSizeControlPropertiesEXT";
+      case StructureType::ePipelineShaderStageRequiredSubgroupSizeCreateInfoEXT : return "PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT";
+      case StructureType::ePhysicalDeviceSubgroupSizeControlFeaturesEXT : return "PhysicalDeviceSubgroupSizeControlFeaturesEXT";
+      case StructureType::ePhysicalDeviceShaderCoreProperties2AMD : return "PhysicalDeviceShaderCoreProperties2AMD";
+      case StructureType::ePhysicalDeviceCoherentMemoryFeaturesAMD : return "PhysicalDeviceCoherentMemoryFeaturesAMD";
+      case StructureType::ePhysicalDeviceMemoryBudgetPropertiesEXT : return "PhysicalDeviceMemoryBudgetPropertiesEXT";
+      case StructureType::ePhysicalDeviceMemoryPriorityFeaturesEXT : return "PhysicalDeviceMemoryPriorityFeaturesEXT";
+      case StructureType::eMemoryPriorityAllocateInfoEXT : return "MemoryPriorityAllocateInfoEXT";
+      case StructureType::eSurfaceProtectedCapabilitiesKHR : return "SurfaceProtectedCapabilitiesKHR";
+      case StructureType::ePhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV : return "PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV";
+      case StructureType::ePhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR : return "PhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR";
+      case StructureType::eAttachmentReferenceStencilLayoutKHR : return "AttachmentReferenceStencilLayoutKHR";
+      case StructureType::eAttachmentDescriptionStencilLayoutKHR : return "AttachmentDescriptionStencilLayoutKHR";
+      case StructureType::ePhysicalDeviceBufferDeviceAddressFeaturesEXT : return "PhysicalDeviceBufferDeviceAddressFeaturesEXT";
+      case StructureType::eBufferDeviceAddressCreateInfoEXT : return "BufferDeviceAddressCreateInfoEXT";
+      case StructureType::ePhysicalDeviceToolPropertiesEXT : return "PhysicalDeviceToolPropertiesEXT";
+      case StructureType::eImageStencilUsageCreateInfoEXT : return "ImageStencilUsageCreateInfoEXT";
+      case StructureType::eValidationFeaturesEXT : return "ValidationFeaturesEXT";
+      case StructureType::ePhysicalDeviceCooperativeMatrixFeaturesNV : return "PhysicalDeviceCooperativeMatrixFeaturesNV";
+      case StructureType::eCooperativeMatrixPropertiesNV : return "CooperativeMatrixPropertiesNV";
+      case StructureType::ePhysicalDeviceCooperativeMatrixPropertiesNV : return "PhysicalDeviceCooperativeMatrixPropertiesNV";
+      case StructureType::ePhysicalDeviceCoverageReductionModeFeaturesNV : return "PhysicalDeviceCoverageReductionModeFeaturesNV";
+      case StructureType::ePipelineCoverageReductionStateCreateInfoNV : return "PipelineCoverageReductionStateCreateInfoNV";
+      case StructureType::eFramebufferMixedSamplesCombinationNV : return "FramebufferMixedSamplesCombinationNV";
+      case StructureType::ePhysicalDeviceFragmentShaderInterlockFeaturesEXT : return "PhysicalDeviceFragmentShaderInterlockFeaturesEXT";
+      case StructureType::ePhysicalDeviceYcbcrImageArraysFeaturesEXT : return "PhysicalDeviceYcbcrImageArraysFeaturesEXT";
+      case StructureType::ePhysicalDeviceUniformBufferStandardLayoutFeaturesKHR : return "PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR";
+      case StructureType::eSurfaceFullScreenExclusiveInfoEXT : return "SurfaceFullScreenExclusiveInfoEXT";
+      case StructureType::eSurfaceCapabilitiesFullScreenExclusiveEXT : return "SurfaceCapabilitiesFullScreenExclusiveEXT";
+      case StructureType::eSurfaceFullScreenExclusiveWin32InfoEXT : return "SurfaceFullScreenExclusiveWin32InfoEXT";
+      case StructureType::eHeadlessSurfaceCreateInfoEXT : return "HeadlessSurfaceCreateInfoEXT";
+      case StructureType::ePhysicalDeviceBufferDeviceAddressFeaturesKHR : return "PhysicalDeviceBufferDeviceAddressFeaturesKHR";
+      case StructureType::eBufferDeviceAddressInfoKHR : return "BufferDeviceAddressInfoKHR";
+      case StructureType::eBufferOpaqueCaptureAddressCreateInfoKHR : return "BufferOpaqueCaptureAddressCreateInfoKHR";
+      case StructureType::eMemoryOpaqueCaptureAddressAllocateInfoKHR : return "MemoryOpaqueCaptureAddressAllocateInfoKHR";
+      case StructureType::eDeviceMemoryOpaqueCaptureAddressInfoKHR : return "DeviceMemoryOpaqueCaptureAddressInfoKHR";
+      case StructureType::ePhysicalDeviceLineRasterizationFeaturesEXT : return "PhysicalDeviceLineRasterizationFeaturesEXT";
+      case StructureType::ePipelineRasterizationLineStateCreateInfoEXT : return "PipelineRasterizationLineStateCreateInfoEXT";
+      case StructureType::ePhysicalDeviceLineRasterizationPropertiesEXT : return "PhysicalDeviceLineRasterizationPropertiesEXT";
+      case StructureType::ePhysicalDeviceHostQueryResetFeaturesEXT : return "PhysicalDeviceHostQueryResetFeaturesEXT";
+      case StructureType::ePhysicalDeviceIndexTypeUint8FeaturesEXT : return "PhysicalDeviceIndexTypeUint8FeaturesEXT";
+      case StructureType::ePhysicalDevicePipelineExecutablePropertiesFeaturesKHR : return "PhysicalDevicePipelineExecutablePropertiesFeaturesKHR";
+      case StructureType::ePipelineInfoKHR : return "PipelineInfoKHR";
+      case StructureType::ePipelineExecutablePropertiesKHR : return "PipelineExecutablePropertiesKHR";
+      case StructureType::ePipelineExecutableInfoKHR : return "PipelineExecutableInfoKHR";
+      case StructureType::ePipelineExecutableStatisticKHR : return "PipelineExecutableStatisticKHR";
+      case StructureType::ePipelineExecutableInternalRepresentationKHR : return "PipelineExecutableInternalRepresentationKHR";
+      case StructureType::ePhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT : return "PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT";
+      case StructureType::ePhysicalDeviceTexelBufferAlignmentFeaturesEXT : return "PhysicalDeviceTexelBufferAlignmentFeaturesEXT";
+      case StructureType::ePhysicalDeviceTexelBufferAlignmentPropertiesEXT : return "PhysicalDeviceTexelBufferAlignmentPropertiesEXT";
+      default: return "invalid";
+    }
+  }
+
+  enum class SubpassContents
+  {
+    eInline = VK_SUBPASS_CONTENTS_INLINE,
+    eSecondaryCommandBuffers = VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( SubpassContents value )
+  {
+    switch ( value )
+    {
+      case SubpassContents::eInline : return "Inline";
+      case SubpassContents::eSecondaryCommandBuffers : return "SecondaryCommandBuffers";
+      default: return "invalid";
+    }
+  }
+
+  enum class SystemAllocationScope
+  {
+    eCommand = VK_SYSTEM_ALLOCATION_SCOPE_COMMAND,
+    eObject = VK_SYSTEM_ALLOCATION_SCOPE_OBJECT,
+    eCache = VK_SYSTEM_ALLOCATION_SCOPE_CACHE,
+    eDevice = VK_SYSTEM_ALLOCATION_SCOPE_DEVICE,
+    eInstance = VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( SystemAllocationScope value )
+  {
+    switch ( value )
+    {
+      case SystemAllocationScope::eCommand : return "Command";
+      case SystemAllocationScope::eObject : return "Object";
+      case SystemAllocationScope::eCache : return "Cache";
+      case SystemAllocationScope::eDevice : return "Device";
+      case SystemAllocationScope::eInstance : return "Instance";
+      default: return "invalid";
+    }
+  }
+
+  enum class TessellationDomainOrigin
+  {
+    eUpperLeft = VK_TESSELLATION_DOMAIN_ORIGIN_UPPER_LEFT,
+    eLowerLeft = VK_TESSELLATION_DOMAIN_ORIGIN_LOWER_LEFT,
+    eUpperLeftKHR = VK_TESSELLATION_DOMAIN_ORIGIN_UPPER_LEFT_KHR,
+    eLowerLeftKHR = VK_TESSELLATION_DOMAIN_ORIGIN_LOWER_LEFT_KHR
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( TessellationDomainOrigin value )
+  {
+    switch ( value )
+    {
+      case TessellationDomainOrigin::eUpperLeft : return "UpperLeft";
+      case TessellationDomainOrigin::eLowerLeft : return "LowerLeft";
+      default: return "invalid";
+    }
+  }
+
+  enum class TimeDomainEXT
+  {
+    eDevice = VK_TIME_DOMAIN_DEVICE_EXT,
+    eClockMonotonic = VK_TIME_DOMAIN_CLOCK_MONOTONIC_EXT,
+    eClockMonotonicRaw = VK_TIME_DOMAIN_CLOCK_MONOTONIC_RAW_EXT,
+    eQueryPerformanceCounter = VK_TIME_DOMAIN_QUERY_PERFORMANCE_COUNTER_EXT
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( TimeDomainEXT value )
+  {
+    switch ( value )
+    {
+      case TimeDomainEXT::eDevice : return "Device";
+      case TimeDomainEXT::eClockMonotonic : return "ClockMonotonic";
+      case TimeDomainEXT::eClockMonotonicRaw : return "ClockMonotonicRaw";
+      case TimeDomainEXT::eQueryPerformanceCounter : return "QueryPerformanceCounter";
+      default: return "invalid";
+    }
+  }
+
+  enum class ValidationCacheHeaderVersionEXT
+  {
+    eOne = VK_VALIDATION_CACHE_HEADER_VERSION_ONE_EXT
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( ValidationCacheHeaderVersionEXT value )
+  {
+    switch ( value )
+    {
+      case ValidationCacheHeaderVersionEXT::eOne : return "One";
+      default: return "invalid";
+    }
+  }
+
+  enum class ValidationCheckEXT
+  {
+    eAll = VK_VALIDATION_CHECK_ALL_EXT,
+    eShaders = VK_VALIDATION_CHECK_SHADERS_EXT
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( ValidationCheckEXT value )
+  {
+    switch ( value )
+    {
+      case ValidationCheckEXT::eAll : return "All";
+      case ValidationCheckEXT::eShaders : return "Shaders";
+      default: return "invalid";
+    }
+  }
+
+  enum class ValidationFeatureDisableEXT
+  {
+    eAll = VK_VALIDATION_FEATURE_DISABLE_ALL_EXT,
+    eShaders = VK_VALIDATION_FEATURE_DISABLE_SHADERS_EXT,
+    eThreadSafety = VK_VALIDATION_FEATURE_DISABLE_THREAD_SAFETY_EXT,
+    eApiParameters = VK_VALIDATION_FEATURE_DISABLE_API_PARAMETERS_EXT,
+    eObjectLifetimes = VK_VALIDATION_FEATURE_DISABLE_OBJECT_LIFETIMES_EXT,
+    eCoreChecks = VK_VALIDATION_FEATURE_DISABLE_CORE_CHECKS_EXT,
+    eUniqueHandles = VK_VALIDATION_FEATURE_DISABLE_UNIQUE_HANDLES_EXT
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( ValidationFeatureDisableEXT value )
+  {
+    switch ( value )
+    {
+      case ValidationFeatureDisableEXT::eAll : return "All";
+      case ValidationFeatureDisableEXT::eShaders : return "Shaders";
+      case ValidationFeatureDisableEXT::eThreadSafety : return "ThreadSafety";
+      case ValidationFeatureDisableEXT::eApiParameters : return "ApiParameters";
+      case ValidationFeatureDisableEXT::eObjectLifetimes : return "ObjectLifetimes";
+      case ValidationFeatureDisableEXT::eCoreChecks : return "CoreChecks";
+      case ValidationFeatureDisableEXT::eUniqueHandles : return "UniqueHandles";
+      default: return "invalid";
+    }
+  }
+
+  enum class ValidationFeatureEnableEXT
+  {
+    eGpuAssisted = VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_EXT,
+    eGpuAssistedReserveBindingSlot = VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_RESERVE_BINDING_SLOT_EXT,
+    eBestPractices = VK_VALIDATION_FEATURE_ENABLE_BEST_PRACTICES_EXT
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( ValidationFeatureEnableEXT value )
+  {
+    switch ( value )
+    {
+      case ValidationFeatureEnableEXT::eGpuAssisted : return "GpuAssisted";
+      case ValidationFeatureEnableEXT::eGpuAssistedReserveBindingSlot : return "GpuAssistedReserveBindingSlot";
+      case ValidationFeatureEnableEXT::eBestPractices : return "BestPractices";
+      default: return "invalid";
+    }
+  }
+
+  enum class VendorId
+  {
+    eVIV = VK_VENDOR_ID_VIV,
+    eVSI = VK_VENDOR_ID_VSI,
+    eKazan = VK_VENDOR_ID_KAZAN
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( VendorId value )
+  {
+    switch ( value )
+    {
+      case VendorId::eVIV : return "VIV";
+      case VendorId::eVSI : return "VSI";
+      case VendorId::eKazan : return "Kazan";
+      default: return "invalid";
+    }
+  }
+
+  enum class VertexInputRate
+  {
+    eVertex = VK_VERTEX_INPUT_RATE_VERTEX,
+    eInstance = VK_VERTEX_INPUT_RATE_INSTANCE
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( VertexInputRate value )
+  {
+    switch ( value )
+    {
+      case VertexInputRate::eVertex : return "Vertex";
+      case VertexInputRate::eInstance : return "Instance";
+      default: return "invalid";
+    }
+  }
+
+  enum class ViewportCoordinateSwizzleNV
+  {
+    ePositiveX = VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_X_NV,
+    eNegativeX = VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_X_NV,
+    ePositiveY = VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_Y_NV,
+    eNegativeY = VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_Y_NV,
+    ePositiveZ = VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_Z_NV,
+    eNegativeZ = VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_Z_NV,
+    ePositiveW = VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_W_NV,
+    eNegativeW = VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_W_NV
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( ViewportCoordinateSwizzleNV value )
+  {
+    switch ( value )
+    {
+      case ViewportCoordinateSwizzleNV::ePositiveX : return "PositiveX";
+      case ViewportCoordinateSwizzleNV::eNegativeX : return "NegativeX";
+      case ViewportCoordinateSwizzleNV::ePositiveY : return "PositiveY";
+      case ViewportCoordinateSwizzleNV::eNegativeY : return "NegativeY";
+      case ViewportCoordinateSwizzleNV::ePositiveZ : return "PositiveZ";
+      case ViewportCoordinateSwizzleNV::eNegativeZ : return "NegativeZ";
+      case ViewportCoordinateSwizzleNV::ePositiveW : return "PositiveW";
+      case ViewportCoordinateSwizzleNV::eNegativeW : return "NegativeW";
+      default: return "invalid";
+    }
+  }
+
+  template<ObjectType value>
+  struct cpp_type
+  {
+  };
+
+  enum class AccessFlagBits
+  {
+    eIndirectCommandRead = VK_ACCESS_INDIRECT_COMMAND_READ_BIT,
+    eIndexRead = VK_ACCESS_INDEX_READ_BIT,
+    eVertexAttributeRead = VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT,
+    eUniformRead = VK_ACCESS_UNIFORM_READ_BIT,
+    eInputAttachmentRead = VK_ACCESS_INPUT_ATTACHMENT_READ_BIT,
+    eShaderRead = VK_ACCESS_SHADER_READ_BIT,
+    eShaderWrite = VK_ACCESS_SHADER_WRITE_BIT,
+    eColorAttachmentRead = VK_ACCESS_COLOR_ATTACHMENT_READ_BIT,
+    eColorAttachmentWrite = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
+    eDepthStencilAttachmentRead = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT,
+    eDepthStencilAttachmentWrite = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT,
+    eTransferRead = VK_ACCESS_TRANSFER_READ_BIT,
+    eTransferWrite = VK_ACCESS_TRANSFER_WRITE_BIT,
+    eHostRead = VK_ACCESS_HOST_READ_BIT,
+    eHostWrite = VK_ACCESS_HOST_WRITE_BIT,
+    eMemoryRead = VK_ACCESS_MEMORY_READ_BIT,
+    eMemoryWrite = VK_ACCESS_MEMORY_WRITE_BIT,
+    eTransformFeedbackWriteEXT = VK_ACCESS_TRANSFORM_FEEDBACK_WRITE_BIT_EXT,
+    eTransformFeedbackCounterReadEXT = VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_READ_BIT_EXT,
+    eTransformFeedbackCounterWriteEXT = VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_WRITE_BIT_EXT,
+    eConditionalRenderingReadEXT = VK_ACCESS_CONDITIONAL_RENDERING_READ_BIT_EXT,
+    eCommandProcessReadNVX = VK_ACCESS_COMMAND_PROCESS_READ_BIT_NVX,
+    eCommandProcessWriteNVX = VK_ACCESS_COMMAND_PROCESS_WRITE_BIT_NVX,
+    eColorAttachmentReadNoncoherentEXT = VK_ACCESS_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT,
+    eShadingRateImageReadNV = VK_ACCESS_SHADING_RATE_IMAGE_READ_BIT_NV,
+    eAccelerationStructureReadNV = VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_NV,
+    eAccelerationStructureWriteNV = VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_NV,
+    eFragmentDensityMapReadEXT = VK_ACCESS_FRAGMENT_DENSITY_MAP_READ_BIT_EXT
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( AccessFlagBits value )
+  {
+    switch ( value )
+    {
+      case AccessFlagBits::eIndirectCommandRead : return "IndirectCommandRead";
+      case AccessFlagBits::eIndexRead : return "IndexRead";
+      case AccessFlagBits::eVertexAttributeRead : return "VertexAttributeRead";
+      case AccessFlagBits::eUniformRead : return "UniformRead";
+      case AccessFlagBits::eInputAttachmentRead : return "InputAttachmentRead";
+      case AccessFlagBits::eShaderRead : return "ShaderRead";
+      case AccessFlagBits::eShaderWrite : return "ShaderWrite";
+      case AccessFlagBits::eColorAttachmentRead : return "ColorAttachmentRead";
+      case AccessFlagBits::eColorAttachmentWrite : return "ColorAttachmentWrite";
+      case AccessFlagBits::eDepthStencilAttachmentRead : return "DepthStencilAttachmentRead";
+      case AccessFlagBits::eDepthStencilAttachmentWrite : return "DepthStencilAttachmentWrite";
+      case AccessFlagBits::eTransferRead : return "TransferRead";
+      case AccessFlagBits::eTransferWrite : return "TransferWrite";
+      case AccessFlagBits::eHostRead : return "HostRead";
+      case AccessFlagBits::eHostWrite : return "HostWrite";
+      case AccessFlagBits::eMemoryRead : return "MemoryRead";
+      case AccessFlagBits::eMemoryWrite : return "MemoryWrite";
+      case AccessFlagBits::eTransformFeedbackWriteEXT : return "TransformFeedbackWriteEXT";
+      case AccessFlagBits::eTransformFeedbackCounterReadEXT : return "TransformFeedbackCounterReadEXT";
+      case AccessFlagBits::eTransformFeedbackCounterWriteEXT : return "TransformFeedbackCounterWriteEXT";
+      case AccessFlagBits::eConditionalRenderingReadEXT : return "ConditionalRenderingReadEXT";
+      case AccessFlagBits::eCommandProcessReadNVX : return "CommandProcessReadNVX";
+      case AccessFlagBits::eCommandProcessWriteNVX : return "CommandProcessWriteNVX";
+      case AccessFlagBits::eColorAttachmentReadNoncoherentEXT : return "ColorAttachmentReadNoncoherentEXT";
+      case AccessFlagBits::eShadingRateImageReadNV : return "ShadingRateImageReadNV";
+      case AccessFlagBits::eAccelerationStructureReadNV : return "AccelerationStructureReadNV";
+      case AccessFlagBits::eAccelerationStructureWriteNV : return "AccelerationStructureWriteNV";
+      case AccessFlagBits::eFragmentDensityMapReadEXT : return "FragmentDensityMapReadEXT";
+      default: return "invalid";
+    }
+  }
+
+  using AccessFlags = Flags<AccessFlagBits, VkAccessFlags>;
+
+  template <> struct FlagTraits<AccessFlagBits>
+  {
+    enum
+    {
+      allFlags = VkFlags(AccessFlagBits::eIndirectCommandRead) | VkFlags(AccessFlagBits::eIndexRead) | VkFlags(AccessFlagBits::eVertexAttributeRead) | VkFlags(AccessFlagBits::eUniformRead) | VkFlags(AccessFlagBits::eInputAttachmentRead) | VkFlags(AccessFlagBits::eShaderRead) | VkFlags(AccessFlagBits::eShaderWrite) | VkFlags(AccessFlagBits::eColorAttachmentRead) | VkFlags(AccessFlagBits::eColorAttachmentWrite) | VkFlags(AccessFlagBits::eDepthStencilAttachmentRead) | VkFlags(AccessFlagBits::eDepthStencilAttachmentWrite) | VkFlags(AccessFlagBits::eTransferRead) | VkFlags(AccessFlagBits::eTransferWrite) | VkFlags(AccessFlagBits::eHostRead) | VkFlags(AccessFlagBits::eHostWrite) | VkFlags(AccessFlagBits::eMemoryRead) | VkFlags(AccessFlagBits::eMemoryWrite) | VkFlags(AccessFlagBits::eTransformFeedbackWriteEXT) | VkFlags(AccessFlagBits::eTransformFeedbackCounterReadEXT) | VkFlags(AccessFlagBits::eTransformFeedbackCounterWriteEXT) | VkFlags(AccessFlagBits::eConditionalRenderingReadEXT) | VkFlags(AccessFlagBits::eCommandProcessReadNVX) | VkFlags(AccessFlagBits::eCommandProcessWriteNVX) | VkFlags(AccessFlagBits::eColorAttachmentReadNoncoherentEXT) | VkFlags(AccessFlagBits::eShadingRateImageReadNV) | VkFlags(AccessFlagBits::eAccelerationStructureReadNV) | VkFlags(AccessFlagBits::eAccelerationStructureWriteNV) | VkFlags(AccessFlagBits::eFragmentDensityMapReadEXT)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AccessFlags operator|( AccessFlagBits bit0, AccessFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return AccessFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AccessFlags operator&( AccessFlagBits bit0, AccessFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return AccessFlags( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AccessFlags operator^( AccessFlagBits bit0, AccessFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return AccessFlags( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AccessFlags operator~( AccessFlagBits bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( AccessFlags( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( AccessFlags value  )
+  {
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & AccessFlagBits::eIndirectCommandRead ) result += "IndirectCommandRead | ";
+    if ( value & AccessFlagBits::eIndexRead ) result += "IndexRead | ";
+    if ( value & AccessFlagBits::eVertexAttributeRead ) result += "VertexAttributeRead | ";
+    if ( value & AccessFlagBits::eUniformRead ) result += "UniformRead | ";
+    if ( value & AccessFlagBits::eInputAttachmentRead ) result += "InputAttachmentRead | ";
+    if ( value & AccessFlagBits::eShaderRead ) result += "ShaderRead | ";
+    if ( value & AccessFlagBits::eShaderWrite ) result += "ShaderWrite | ";
+    if ( value & AccessFlagBits::eColorAttachmentRead ) result += "ColorAttachmentRead | ";
+    if ( value & AccessFlagBits::eColorAttachmentWrite ) result += "ColorAttachmentWrite | ";
+    if ( value & AccessFlagBits::eDepthStencilAttachmentRead ) result += "DepthStencilAttachmentRead | ";
+    if ( value & AccessFlagBits::eDepthStencilAttachmentWrite ) result += "DepthStencilAttachmentWrite | ";
+    if ( value & AccessFlagBits::eTransferRead ) result += "TransferRead | ";
+    if ( value & AccessFlagBits::eTransferWrite ) result += "TransferWrite | ";
+    if ( value & AccessFlagBits::eHostRead ) result += "HostRead | ";
+    if ( value & AccessFlagBits::eHostWrite ) result += "HostWrite | ";
+    if ( value & AccessFlagBits::eMemoryRead ) result += "MemoryRead | ";
+    if ( value & AccessFlagBits::eMemoryWrite ) result += "MemoryWrite | ";
+    if ( value & AccessFlagBits::eTransformFeedbackWriteEXT ) result += "TransformFeedbackWriteEXT | ";
+    if ( value & AccessFlagBits::eTransformFeedbackCounterReadEXT ) result += "TransformFeedbackCounterReadEXT | ";
+    if ( value & AccessFlagBits::eTransformFeedbackCounterWriteEXT ) result += "TransformFeedbackCounterWriteEXT | ";
+    if ( value & AccessFlagBits::eConditionalRenderingReadEXT ) result += "ConditionalRenderingReadEXT | ";
+    if ( value & AccessFlagBits::eCommandProcessReadNVX ) result += "CommandProcessReadNVX | ";
+    if ( value & AccessFlagBits::eCommandProcessWriteNVX ) result += "CommandProcessWriteNVX | ";
+    if ( value & AccessFlagBits::eColorAttachmentReadNoncoherentEXT ) result += "ColorAttachmentReadNoncoherentEXT | ";
+    if ( value & AccessFlagBits::eShadingRateImageReadNV ) result += "ShadingRateImageReadNV | ";
+    if ( value & AccessFlagBits::eAccelerationStructureReadNV ) result += "AccelerationStructureReadNV | ";
+    if ( value & AccessFlagBits::eAccelerationStructureWriteNV ) result += "AccelerationStructureWriteNV | ";
+    if ( value & AccessFlagBits::eFragmentDensityMapReadEXT ) result += "FragmentDensityMapReadEXT | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+  enum class AcquireProfilingLockFlagBitsKHR
+  {};
+
+  VULKAN_HPP_INLINE std::string to_string( AcquireProfilingLockFlagBitsKHR )
+  {
+    return "(void)";
+  }
+
+  using AcquireProfilingLockFlagsKHR = Flags<AcquireProfilingLockFlagBitsKHR, VkAcquireProfilingLockFlagsKHR>;
+
+  VULKAN_HPP_INLINE std::string to_string( AcquireProfilingLockFlagsKHR  )
+  {
+    return "{}";
+  }
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+  enum class AndroidSurfaceCreateFlagBitsKHR
+  {};
+
+  VULKAN_HPP_INLINE std::string to_string( AndroidSurfaceCreateFlagBitsKHR )
+  {
+    return "(void)";
+  }
+
+  using AndroidSurfaceCreateFlagsKHR = Flags<AndroidSurfaceCreateFlagBitsKHR, VkAndroidSurfaceCreateFlagsKHR>;
+
+  VULKAN_HPP_INLINE std::string to_string( AndroidSurfaceCreateFlagsKHR  )
+  {
+    return "{}";
+  }
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+  enum class AttachmentDescriptionFlagBits
+  {
+    eMayAlias = VK_ATTACHMENT_DESCRIPTION_MAY_ALIAS_BIT
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( AttachmentDescriptionFlagBits value )
+  {
+    switch ( value )
+    {
+      case AttachmentDescriptionFlagBits::eMayAlias : return "MayAlias";
+      default: return "invalid";
+    }
+  }
+
+  using AttachmentDescriptionFlags = Flags<AttachmentDescriptionFlagBits, VkAttachmentDescriptionFlags>;
+
+  template <> struct FlagTraits<AttachmentDescriptionFlagBits>
+  {
+    enum
+    {
+      allFlags = VkFlags(AttachmentDescriptionFlagBits::eMayAlias)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AttachmentDescriptionFlags operator|( AttachmentDescriptionFlagBits bit0, AttachmentDescriptionFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return AttachmentDescriptionFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AttachmentDescriptionFlags operator&( AttachmentDescriptionFlagBits bit0, AttachmentDescriptionFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return AttachmentDescriptionFlags( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AttachmentDescriptionFlags operator^( AttachmentDescriptionFlagBits bit0, AttachmentDescriptionFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return AttachmentDescriptionFlags( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AttachmentDescriptionFlags operator~( AttachmentDescriptionFlagBits bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( AttachmentDescriptionFlags( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( AttachmentDescriptionFlags value  )
+  {
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & AttachmentDescriptionFlagBits::eMayAlias ) result += "MayAlias | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+  enum class BufferCreateFlagBits
+  {
+    eSparseBinding = VK_BUFFER_CREATE_SPARSE_BINDING_BIT,
+    eSparseResidency = VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT,
+    eSparseAliased = VK_BUFFER_CREATE_SPARSE_ALIASED_BIT,
+    eProtected = VK_BUFFER_CREATE_PROTECTED_BIT,
+    eDeviceAddressCaptureReplayKHR = VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_KHR,
+    eDeviceAddressCaptureReplayEXT = VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_EXT
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( BufferCreateFlagBits value )
+  {
+    switch ( value )
+    {
+      case BufferCreateFlagBits::eSparseBinding : return "SparseBinding";
+      case BufferCreateFlagBits::eSparseResidency : return "SparseResidency";
+      case BufferCreateFlagBits::eSparseAliased : return "SparseAliased";
+      case BufferCreateFlagBits::eProtected : return "Protected";
+      case BufferCreateFlagBits::eDeviceAddressCaptureReplayKHR : return "DeviceAddressCaptureReplayKHR";
+      default: return "invalid";
+    }
+  }
+
+  using BufferCreateFlags = Flags<BufferCreateFlagBits, VkBufferCreateFlags>;
+
+  template <> struct FlagTraits<BufferCreateFlagBits>
+  {
+    enum
+    {
+      allFlags = VkFlags(BufferCreateFlagBits::eSparseBinding) | VkFlags(BufferCreateFlagBits::eSparseResidency) | VkFlags(BufferCreateFlagBits::eSparseAliased) | VkFlags(BufferCreateFlagBits::eProtected) | VkFlags(BufferCreateFlagBits::eDeviceAddressCaptureReplayKHR)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BufferCreateFlags operator|( BufferCreateFlagBits bit0, BufferCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return BufferCreateFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BufferCreateFlags operator&( BufferCreateFlagBits bit0, BufferCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return BufferCreateFlags( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BufferCreateFlags operator^( BufferCreateFlagBits bit0, BufferCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return BufferCreateFlags( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BufferCreateFlags operator~( BufferCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( BufferCreateFlags( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( BufferCreateFlags value  )
+  {
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & BufferCreateFlagBits::eSparseBinding ) result += "SparseBinding | ";
+    if ( value & BufferCreateFlagBits::eSparseResidency ) result += "SparseResidency | ";
+    if ( value & BufferCreateFlagBits::eSparseAliased ) result += "SparseAliased | ";
+    if ( value & BufferCreateFlagBits::eProtected ) result += "Protected | ";
+    if ( value & BufferCreateFlagBits::eDeviceAddressCaptureReplayKHR ) result += "DeviceAddressCaptureReplayKHR | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+  enum class BufferUsageFlagBits
+  {
+    eTransferSrc = VK_BUFFER_USAGE_TRANSFER_SRC_BIT,
+    eTransferDst = VK_BUFFER_USAGE_TRANSFER_DST_BIT,
+    eUniformTexelBuffer = VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT,
+    eStorageTexelBuffer = VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT,
+    eUniformBuffer = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT,
+    eStorageBuffer = VK_BUFFER_USAGE_STORAGE_BUFFER_BIT,
+    eIndexBuffer = VK_BUFFER_USAGE_INDEX_BUFFER_BIT,
+    eVertexBuffer = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT,
+    eIndirectBuffer = VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT,
+    eTransformFeedbackBufferEXT = VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_BUFFER_BIT_EXT,
+    eTransformFeedbackCounterBufferEXT = VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_COUNTER_BUFFER_BIT_EXT,
+    eConditionalRenderingEXT = VK_BUFFER_USAGE_CONDITIONAL_RENDERING_BIT_EXT,
+    eRayTracingNV = VK_BUFFER_USAGE_RAY_TRACING_BIT_NV,
+    eShaderDeviceAddressKHR = VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_KHR,
+    eShaderDeviceAddressEXT = VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_EXT
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( BufferUsageFlagBits value )
+  {
+    switch ( value )
+    {
+      case BufferUsageFlagBits::eTransferSrc : return "TransferSrc";
+      case BufferUsageFlagBits::eTransferDst : return "TransferDst";
+      case BufferUsageFlagBits::eUniformTexelBuffer : return "UniformTexelBuffer";
+      case BufferUsageFlagBits::eStorageTexelBuffer : return "StorageTexelBuffer";
+      case BufferUsageFlagBits::eUniformBuffer : return "UniformBuffer";
+      case BufferUsageFlagBits::eStorageBuffer : return "StorageBuffer";
+      case BufferUsageFlagBits::eIndexBuffer : return "IndexBuffer";
+      case BufferUsageFlagBits::eVertexBuffer : return "VertexBuffer";
+      case BufferUsageFlagBits::eIndirectBuffer : return "IndirectBuffer";
+      case BufferUsageFlagBits::eTransformFeedbackBufferEXT : return "TransformFeedbackBufferEXT";
+      case BufferUsageFlagBits::eTransformFeedbackCounterBufferEXT : return "TransformFeedbackCounterBufferEXT";
+      case BufferUsageFlagBits::eConditionalRenderingEXT : return "ConditionalRenderingEXT";
+      case BufferUsageFlagBits::eRayTracingNV : return "RayTracingNV";
+      case BufferUsageFlagBits::eShaderDeviceAddressKHR : return "ShaderDeviceAddressKHR";
+      default: return "invalid";
+    }
+  }
+
+  using BufferUsageFlags = Flags<BufferUsageFlagBits, VkBufferUsageFlags>;
+
+  template <> struct FlagTraits<BufferUsageFlagBits>
+  {
+    enum
+    {
+      allFlags = VkFlags(BufferUsageFlagBits::eTransferSrc) | VkFlags(BufferUsageFlagBits::eTransferDst) | VkFlags(BufferUsageFlagBits::eUniformTexelBuffer) | VkFlags(BufferUsageFlagBits::eStorageTexelBuffer) | VkFlags(BufferUsageFlagBits::eUniformBuffer) | VkFlags(BufferUsageFlagBits::eStorageBuffer) | VkFlags(BufferUsageFlagBits::eIndexBuffer) | VkFlags(BufferUsageFlagBits::eVertexBuffer) | VkFlags(BufferUsageFlagBits::eIndirectBuffer) | VkFlags(BufferUsageFlagBits::eTransformFeedbackBufferEXT) | VkFlags(BufferUsageFlagBits::eTransformFeedbackCounterBufferEXT) | VkFlags(BufferUsageFlagBits::eConditionalRenderingEXT) | VkFlags(BufferUsageFlagBits::eRayTracingNV) | VkFlags(BufferUsageFlagBits::eShaderDeviceAddressKHR)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BufferUsageFlags operator|( BufferUsageFlagBits bit0, BufferUsageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return BufferUsageFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BufferUsageFlags operator&( BufferUsageFlagBits bit0, BufferUsageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return BufferUsageFlags( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BufferUsageFlags operator^( BufferUsageFlagBits bit0, BufferUsageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return BufferUsageFlags( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BufferUsageFlags operator~( BufferUsageFlagBits bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( BufferUsageFlags( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( BufferUsageFlags value  )
+  {
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & BufferUsageFlagBits::eTransferSrc ) result += "TransferSrc | ";
+    if ( value & BufferUsageFlagBits::eTransferDst ) result += "TransferDst | ";
+    if ( value & BufferUsageFlagBits::eUniformTexelBuffer ) result += "UniformTexelBuffer | ";
+    if ( value & BufferUsageFlagBits::eStorageTexelBuffer ) result += "StorageTexelBuffer | ";
+    if ( value & BufferUsageFlagBits::eUniformBuffer ) result += "UniformBuffer | ";
+    if ( value & BufferUsageFlagBits::eStorageBuffer ) result += "StorageBuffer | ";
+    if ( value & BufferUsageFlagBits::eIndexBuffer ) result += "IndexBuffer | ";
+    if ( value & BufferUsageFlagBits::eVertexBuffer ) result += "VertexBuffer | ";
+    if ( value & BufferUsageFlagBits::eIndirectBuffer ) result += "IndirectBuffer | ";
+    if ( value & BufferUsageFlagBits::eTransformFeedbackBufferEXT ) result += "TransformFeedbackBufferEXT | ";
+    if ( value & BufferUsageFlagBits::eTransformFeedbackCounterBufferEXT ) result += "TransformFeedbackCounterBufferEXT | ";
+    if ( value & BufferUsageFlagBits::eConditionalRenderingEXT ) result += "ConditionalRenderingEXT | ";
+    if ( value & BufferUsageFlagBits::eRayTracingNV ) result += "RayTracingNV | ";
+    if ( value & BufferUsageFlagBits::eShaderDeviceAddressKHR ) result += "ShaderDeviceAddressKHR | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+  enum class BufferViewCreateFlagBits
+  {};
+
+  VULKAN_HPP_INLINE std::string to_string( BufferViewCreateFlagBits )
+  {
+    return "(void)";
+  }
+
+  using BufferViewCreateFlags = Flags<BufferViewCreateFlagBits, VkBufferViewCreateFlags>;
+
+  VULKAN_HPP_INLINE std::string to_string( BufferViewCreateFlags  )
+  {
+    return "{}";
+  }
+
+  enum class BuildAccelerationStructureFlagBitsNV
+  {
+    eAllowUpdate = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_NV,
+    eAllowCompaction = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_NV,
+    ePreferFastTrace = VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_TRACE_BIT_NV,
+    ePreferFastBuild = VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_BUILD_BIT_NV,
+    eLowMemory = VK_BUILD_ACCELERATION_STRUCTURE_LOW_MEMORY_BIT_NV
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( BuildAccelerationStructureFlagBitsNV value )
+  {
+    switch ( value )
+    {
+      case BuildAccelerationStructureFlagBitsNV::eAllowUpdate : return "AllowUpdate";
+      case BuildAccelerationStructureFlagBitsNV::eAllowCompaction : return "AllowCompaction";
+      case BuildAccelerationStructureFlagBitsNV::ePreferFastTrace : return "PreferFastTrace";
+      case BuildAccelerationStructureFlagBitsNV::ePreferFastBuild : return "PreferFastBuild";
+      case BuildAccelerationStructureFlagBitsNV::eLowMemory : return "LowMemory";
+      default: return "invalid";
+    }
+  }
+
+  using BuildAccelerationStructureFlagsNV = Flags<BuildAccelerationStructureFlagBitsNV, VkBuildAccelerationStructureFlagsNV>;
+
+  template <> struct FlagTraits<BuildAccelerationStructureFlagBitsNV>
+  {
+    enum
+    {
+      allFlags = VkFlags(BuildAccelerationStructureFlagBitsNV::eAllowUpdate) | VkFlags(BuildAccelerationStructureFlagBitsNV::eAllowCompaction) | VkFlags(BuildAccelerationStructureFlagBitsNV::ePreferFastTrace) | VkFlags(BuildAccelerationStructureFlagBitsNV::ePreferFastBuild) | VkFlags(BuildAccelerationStructureFlagBitsNV::eLowMemory)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BuildAccelerationStructureFlagsNV operator|( BuildAccelerationStructureFlagBitsNV bit0, BuildAccelerationStructureFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return BuildAccelerationStructureFlagsNV( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BuildAccelerationStructureFlagsNV operator&( BuildAccelerationStructureFlagBitsNV bit0, BuildAccelerationStructureFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return BuildAccelerationStructureFlagsNV( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BuildAccelerationStructureFlagsNV operator^( BuildAccelerationStructureFlagBitsNV bit0, BuildAccelerationStructureFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return BuildAccelerationStructureFlagsNV( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BuildAccelerationStructureFlagsNV operator~( BuildAccelerationStructureFlagBitsNV bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( BuildAccelerationStructureFlagsNV( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( BuildAccelerationStructureFlagsNV value  )
+  {
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & BuildAccelerationStructureFlagBitsNV::eAllowUpdate ) result += "AllowUpdate | ";
+    if ( value & BuildAccelerationStructureFlagBitsNV::eAllowCompaction ) result += "AllowCompaction | ";
+    if ( value & BuildAccelerationStructureFlagBitsNV::ePreferFastTrace ) result += "PreferFastTrace | ";
+    if ( value & BuildAccelerationStructureFlagBitsNV::ePreferFastBuild ) result += "PreferFastBuild | ";
+    if ( value & BuildAccelerationStructureFlagBitsNV::eLowMemory ) result += "LowMemory | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+  enum class ColorComponentFlagBits
+  {
+    eR = VK_COLOR_COMPONENT_R_BIT,
+    eG = VK_COLOR_COMPONENT_G_BIT,
+    eB = VK_COLOR_COMPONENT_B_BIT,
+    eA = VK_COLOR_COMPONENT_A_BIT
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( ColorComponentFlagBits value )
+  {
+    switch ( value )
+    {
+      case ColorComponentFlagBits::eR : return "R";
+      case ColorComponentFlagBits::eG : return "G";
+      case ColorComponentFlagBits::eB : return "B";
+      case ColorComponentFlagBits::eA : return "A";
+      default: return "invalid";
+    }
+  }
+
+  using ColorComponentFlags = Flags<ColorComponentFlagBits, VkColorComponentFlags>;
+
+  template <> struct FlagTraits<ColorComponentFlagBits>
+  {
+    enum
+    {
+      allFlags = VkFlags(ColorComponentFlagBits::eR) | VkFlags(ColorComponentFlagBits::eG) | VkFlags(ColorComponentFlagBits::eB) | VkFlags(ColorComponentFlagBits::eA)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ColorComponentFlags operator|( ColorComponentFlagBits bit0, ColorComponentFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return ColorComponentFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ColorComponentFlags operator&( ColorComponentFlagBits bit0, ColorComponentFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return ColorComponentFlags( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ColorComponentFlags operator^( ColorComponentFlagBits bit0, ColorComponentFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return ColorComponentFlags( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ColorComponentFlags operator~( ColorComponentFlagBits bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( ColorComponentFlags( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( ColorComponentFlags value  )
+  {
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & ColorComponentFlagBits::eR ) result += "R | ";
+    if ( value & ColorComponentFlagBits::eG ) result += "G | ";
+    if ( value & ColorComponentFlagBits::eB ) result += "B | ";
+    if ( value & ColorComponentFlagBits::eA ) result += "A | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+  enum class CommandBufferResetFlagBits
+  {
+    eReleaseResources = VK_COMMAND_BUFFER_RESET_RELEASE_RESOURCES_BIT
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( CommandBufferResetFlagBits value )
+  {
+    switch ( value )
+    {
+      case CommandBufferResetFlagBits::eReleaseResources : return "ReleaseResources";
+      default: return "invalid";
+    }
+  }
+
+  using CommandBufferResetFlags = Flags<CommandBufferResetFlagBits, VkCommandBufferResetFlags>;
+
+  template <> struct FlagTraits<CommandBufferResetFlagBits>
+  {
+    enum
+    {
+      allFlags = VkFlags(CommandBufferResetFlagBits::eReleaseResources)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandBufferResetFlags operator|( CommandBufferResetFlagBits bit0, CommandBufferResetFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return CommandBufferResetFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandBufferResetFlags operator&( CommandBufferResetFlagBits bit0, CommandBufferResetFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return CommandBufferResetFlags( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandBufferResetFlags operator^( CommandBufferResetFlagBits bit0, CommandBufferResetFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return CommandBufferResetFlags( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandBufferResetFlags operator~( CommandBufferResetFlagBits bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( CommandBufferResetFlags( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( CommandBufferResetFlags value  )
+  {
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & CommandBufferResetFlagBits::eReleaseResources ) result += "ReleaseResources | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+  enum class CommandBufferUsageFlagBits
+  {
+    eOneTimeSubmit = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT,
+    eRenderPassContinue = VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT,
+    eSimultaneousUse = VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( CommandBufferUsageFlagBits value )
+  {
+    switch ( value )
+    {
+      case CommandBufferUsageFlagBits::eOneTimeSubmit : return "OneTimeSubmit";
+      case CommandBufferUsageFlagBits::eRenderPassContinue : return "RenderPassContinue";
+      case CommandBufferUsageFlagBits::eSimultaneousUse : return "SimultaneousUse";
+      default: return "invalid";
+    }
+  }
+
+  using CommandBufferUsageFlags = Flags<CommandBufferUsageFlagBits, VkCommandBufferUsageFlags>;
+
+  template <> struct FlagTraits<CommandBufferUsageFlagBits>
+  {
+    enum
+    {
+      allFlags = VkFlags(CommandBufferUsageFlagBits::eOneTimeSubmit) | VkFlags(CommandBufferUsageFlagBits::eRenderPassContinue) | VkFlags(CommandBufferUsageFlagBits::eSimultaneousUse)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandBufferUsageFlags operator|( CommandBufferUsageFlagBits bit0, CommandBufferUsageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return CommandBufferUsageFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandBufferUsageFlags operator&( CommandBufferUsageFlagBits bit0, CommandBufferUsageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return CommandBufferUsageFlags( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandBufferUsageFlags operator^( CommandBufferUsageFlagBits bit0, CommandBufferUsageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return CommandBufferUsageFlags( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandBufferUsageFlags operator~( CommandBufferUsageFlagBits bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( CommandBufferUsageFlags( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( CommandBufferUsageFlags value  )
+  {
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & CommandBufferUsageFlagBits::eOneTimeSubmit ) result += "OneTimeSubmit | ";
+    if ( value & CommandBufferUsageFlagBits::eRenderPassContinue ) result += "RenderPassContinue | ";
+    if ( value & CommandBufferUsageFlagBits::eSimultaneousUse ) result += "SimultaneousUse | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+  enum class CommandPoolCreateFlagBits
+  {
+    eTransient = VK_COMMAND_POOL_CREATE_TRANSIENT_BIT,
+    eResetCommandBuffer = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT,
+    eProtected = VK_COMMAND_POOL_CREATE_PROTECTED_BIT
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( CommandPoolCreateFlagBits value )
+  {
+    switch ( value )
+    {
+      case CommandPoolCreateFlagBits::eTransient : return "Transient";
+      case CommandPoolCreateFlagBits::eResetCommandBuffer : return "ResetCommandBuffer";
+      case CommandPoolCreateFlagBits::eProtected : return "Protected";
+      default: return "invalid";
+    }
+  }
+
+  using CommandPoolCreateFlags = Flags<CommandPoolCreateFlagBits, VkCommandPoolCreateFlags>;
+
+  template <> struct FlagTraits<CommandPoolCreateFlagBits>
+  {
+    enum
+    {
+      allFlags = VkFlags(CommandPoolCreateFlagBits::eTransient) | VkFlags(CommandPoolCreateFlagBits::eResetCommandBuffer) | VkFlags(CommandPoolCreateFlagBits::eProtected)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandPoolCreateFlags operator|( CommandPoolCreateFlagBits bit0, CommandPoolCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return CommandPoolCreateFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandPoolCreateFlags operator&( CommandPoolCreateFlagBits bit0, CommandPoolCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return CommandPoolCreateFlags( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandPoolCreateFlags operator^( CommandPoolCreateFlagBits bit0, CommandPoolCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return CommandPoolCreateFlags( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandPoolCreateFlags operator~( CommandPoolCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( CommandPoolCreateFlags( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( CommandPoolCreateFlags value  )
+  {
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & CommandPoolCreateFlagBits::eTransient ) result += "Transient | ";
+    if ( value & CommandPoolCreateFlagBits::eResetCommandBuffer ) result += "ResetCommandBuffer | ";
+    if ( value & CommandPoolCreateFlagBits::eProtected ) result += "Protected | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+  enum class CommandPoolResetFlagBits
+  {
+    eReleaseResources = VK_COMMAND_POOL_RESET_RELEASE_RESOURCES_BIT
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( CommandPoolResetFlagBits value )
+  {
+    switch ( value )
+    {
+      case CommandPoolResetFlagBits::eReleaseResources : return "ReleaseResources";
+      default: return "invalid";
+    }
+  }
+
+  using CommandPoolResetFlags = Flags<CommandPoolResetFlagBits, VkCommandPoolResetFlags>;
+
+  template <> struct FlagTraits<CommandPoolResetFlagBits>
+  {
+    enum
+    {
+      allFlags = VkFlags(CommandPoolResetFlagBits::eReleaseResources)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandPoolResetFlags operator|( CommandPoolResetFlagBits bit0, CommandPoolResetFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return CommandPoolResetFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandPoolResetFlags operator&( CommandPoolResetFlagBits bit0, CommandPoolResetFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return CommandPoolResetFlags( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandPoolResetFlags operator^( CommandPoolResetFlagBits bit0, CommandPoolResetFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return CommandPoolResetFlags( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandPoolResetFlags operator~( CommandPoolResetFlagBits bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( CommandPoolResetFlags( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( CommandPoolResetFlags value  )
+  {
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & CommandPoolResetFlagBits::eReleaseResources ) result += "ReleaseResources | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+  enum class CommandPoolTrimFlagBits
+  {};
+
+  VULKAN_HPP_INLINE std::string to_string( CommandPoolTrimFlagBits )
+  {
+    return "(void)";
+  }
+
+  using CommandPoolTrimFlags = Flags<CommandPoolTrimFlagBits, VkCommandPoolTrimFlags>;
+
+  using CommandPoolTrimFlagsKHR = CommandPoolTrimFlags;
+
+  VULKAN_HPP_INLINE std::string to_string( CommandPoolTrimFlags  )
+  {
+    return "{}";
+  }
+
+  enum class CompositeAlphaFlagBitsKHR
+  {
+    eOpaque = VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR,
+    ePreMultiplied = VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR,
+    ePostMultiplied = VK_COMPOSITE_ALPHA_POST_MULTIPLIED_BIT_KHR,
+    eInherit = VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( CompositeAlphaFlagBitsKHR value )
+  {
+    switch ( value )
+    {
+      case CompositeAlphaFlagBitsKHR::eOpaque : return "Opaque";
+      case CompositeAlphaFlagBitsKHR::ePreMultiplied : return "PreMultiplied";
+      case CompositeAlphaFlagBitsKHR::ePostMultiplied : return "PostMultiplied";
+      case CompositeAlphaFlagBitsKHR::eInherit : return "Inherit";
+      default: return "invalid";
+    }
+  }
+
+  using CompositeAlphaFlagsKHR = Flags<CompositeAlphaFlagBitsKHR, VkCompositeAlphaFlagsKHR>;
+
+  template <> struct FlagTraits<CompositeAlphaFlagBitsKHR>
+  {
+    enum
+    {
+      allFlags = VkFlags(CompositeAlphaFlagBitsKHR::eOpaque) | VkFlags(CompositeAlphaFlagBitsKHR::ePreMultiplied) | VkFlags(CompositeAlphaFlagBitsKHR::ePostMultiplied) | VkFlags(CompositeAlphaFlagBitsKHR::eInherit)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CompositeAlphaFlagsKHR operator|( CompositeAlphaFlagBitsKHR bit0, CompositeAlphaFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return CompositeAlphaFlagsKHR( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CompositeAlphaFlagsKHR operator&( CompositeAlphaFlagBitsKHR bit0, CompositeAlphaFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return CompositeAlphaFlagsKHR( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CompositeAlphaFlagsKHR operator^( CompositeAlphaFlagBitsKHR bit0, CompositeAlphaFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return CompositeAlphaFlagsKHR( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CompositeAlphaFlagsKHR operator~( CompositeAlphaFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( CompositeAlphaFlagsKHR( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( CompositeAlphaFlagsKHR value  )
+  {
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & CompositeAlphaFlagBitsKHR::eOpaque ) result += "Opaque | ";
+    if ( value & CompositeAlphaFlagBitsKHR::ePreMultiplied ) result += "PreMultiplied | ";
+    if ( value & CompositeAlphaFlagBitsKHR::ePostMultiplied ) result += "PostMultiplied | ";
+    if ( value & CompositeAlphaFlagBitsKHR::eInherit ) result += "Inherit | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+  enum class ConditionalRenderingFlagBitsEXT
+  {
+    eInverted = VK_CONDITIONAL_RENDERING_INVERTED_BIT_EXT
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( ConditionalRenderingFlagBitsEXT value )
+  {
+    switch ( value )
+    {
+      case ConditionalRenderingFlagBitsEXT::eInverted : return "Inverted";
+      default: return "invalid";
+    }
+  }
+
+  using ConditionalRenderingFlagsEXT = Flags<ConditionalRenderingFlagBitsEXT, VkConditionalRenderingFlagsEXT>;
+
+  template <> struct FlagTraits<ConditionalRenderingFlagBitsEXT>
+  {
+    enum
+    {
+      allFlags = VkFlags(ConditionalRenderingFlagBitsEXT::eInverted)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ConditionalRenderingFlagsEXT operator|( ConditionalRenderingFlagBitsEXT bit0, ConditionalRenderingFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return ConditionalRenderingFlagsEXT( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ConditionalRenderingFlagsEXT operator&( ConditionalRenderingFlagBitsEXT bit0, ConditionalRenderingFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return ConditionalRenderingFlagsEXT( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ConditionalRenderingFlagsEXT operator^( ConditionalRenderingFlagBitsEXT bit0, ConditionalRenderingFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return ConditionalRenderingFlagsEXT( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ConditionalRenderingFlagsEXT operator~( ConditionalRenderingFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( ConditionalRenderingFlagsEXT( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( ConditionalRenderingFlagsEXT value  )
+  {
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & ConditionalRenderingFlagBitsEXT::eInverted ) result += "Inverted | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+  enum class CullModeFlagBits
+  {
+    eNone = VK_CULL_MODE_NONE,
+    eFront = VK_CULL_MODE_FRONT_BIT,
+    eBack = VK_CULL_MODE_BACK_BIT,
+    eFrontAndBack = VK_CULL_MODE_FRONT_AND_BACK
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( CullModeFlagBits value )
+  {
+    switch ( value )
+    {
+      case CullModeFlagBits::eNone : return "None";
+      case CullModeFlagBits::eFront : return "Front";
+      case CullModeFlagBits::eBack : return "Back";
+      case CullModeFlagBits::eFrontAndBack : return "FrontAndBack";
+      default: return "invalid";
+    }
+  }
+
+  using CullModeFlags = Flags<CullModeFlagBits, VkCullModeFlags>;
+
+  template <> struct FlagTraits<CullModeFlagBits>
+  {
+    enum
+    {
+      allFlags = VkFlags(CullModeFlagBits::eNone) | VkFlags(CullModeFlagBits::eFront) | VkFlags(CullModeFlagBits::eBack) | VkFlags(CullModeFlagBits::eFrontAndBack)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CullModeFlags operator|( CullModeFlagBits bit0, CullModeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return CullModeFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CullModeFlags operator&( CullModeFlagBits bit0, CullModeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return CullModeFlags( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CullModeFlags operator^( CullModeFlagBits bit0, CullModeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return CullModeFlags( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CullModeFlags operator~( CullModeFlagBits bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( CullModeFlags( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( CullModeFlags value  )
+  {
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & CullModeFlagBits::eFront ) result += "Front | ";
+    if ( value & CullModeFlagBits::eBack ) result += "Back | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+  enum class DebugReportFlagBitsEXT
+  {
+    eInformation = VK_DEBUG_REPORT_INFORMATION_BIT_EXT,
+    eWarning = VK_DEBUG_REPORT_WARNING_BIT_EXT,
+    ePerformanceWarning = VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT,
+    eError = VK_DEBUG_REPORT_ERROR_BIT_EXT,
+    eDebug = VK_DEBUG_REPORT_DEBUG_BIT_EXT
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( DebugReportFlagBitsEXT value )
+  {
+    switch ( value )
+    {
+      case DebugReportFlagBitsEXT::eInformation : return "Information";
+      case DebugReportFlagBitsEXT::eWarning : return "Warning";
+      case DebugReportFlagBitsEXT::ePerformanceWarning : return "PerformanceWarning";
+      case DebugReportFlagBitsEXT::eError : return "Error";
+      case DebugReportFlagBitsEXT::eDebug : return "Debug";
+      default: return "invalid";
+    }
+  }
+
+  using DebugReportFlagsEXT = Flags<DebugReportFlagBitsEXT, VkDebugReportFlagsEXT>;
+
+  template <> struct FlagTraits<DebugReportFlagBitsEXT>
+  {
+    enum
+    {
+      allFlags = VkFlags(DebugReportFlagBitsEXT::eInformation) | VkFlags(DebugReportFlagBitsEXT::eWarning) | VkFlags(DebugReportFlagBitsEXT::ePerformanceWarning) | VkFlags(DebugReportFlagBitsEXT::eError) | VkFlags(DebugReportFlagBitsEXT::eDebug)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugReportFlagsEXT operator|( DebugReportFlagBitsEXT bit0, DebugReportFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return DebugReportFlagsEXT( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugReportFlagsEXT operator&( DebugReportFlagBitsEXT bit0, DebugReportFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return DebugReportFlagsEXT( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugReportFlagsEXT operator^( DebugReportFlagBitsEXT bit0, DebugReportFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return DebugReportFlagsEXT( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugReportFlagsEXT operator~( DebugReportFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( DebugReportFlagsEXT( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( DebugReportFlagsEXT value  )
+  {
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & DebugReportFlagBitsEXT::eInformation ) result += "Information | ";
+    if ( value & DebugReportFlagBitsEXT::eWarning ) result += "Warning | ";
+    if ( value & DebugReportFlagBitsEXT::ePerformanceWarning ) result += "PerformanceWarning | ";
+    if ( value & DebugReportFlagBitsEXT::eError ) result += "Error | ";
+    if ( value & DebugReportFlagBitsEXT::eDebug ) result += "Debug | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+  enum class DebugUtilsMessageSeverityFlagBitsEXT
+  {
+    eVerbose = VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT,
+    eInfo = VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT,
+    eWarning = VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT,
+    eError = VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessageSeverityFlagBitsEXT value )
+  {
+    switch ( value )
+    {
+      case DebugUtilsMessageSeverityFlagBitsEXT::eVerbose : return "Verbose";
+      case DebugUtilsMessageSeverityFlagBitsEXT::eInfo : return "Info";
+      case DebugUtilsMessageSeverityFlagBitsEXT::eWarning : return "Warning";
+      case DebugUtilsMessageSeverityFlagBitsEXT::eError : return "Error";
+      default: return "invalid";
+    }
+  }
+
+  using DebugUtilsMessageSeverityFlagsEXT = Flags<DebugUtilsMessageSeverityFlagBitsEXT, VkDebugUtilsMessageSeverityFlagsEXT>;
+
+  template <> struct FlagTraits<DebugUtilsMessageSeverityFlagBitsEXT>
+  {
+    enum
+    {
+      allFlags = VkFlags(DebugUtilsMessageSeverityFlagBitsEXT::eVerbose) | VkFlags(DebugUtilsMessageSeverityFlagBitsEXT::eInfo) | VkFlags(DebugUtilsMessageSeverityFlagBitsEXT::eWarning) | VkFlags(DebugUtilsMessageSeverityFlagBitsEXT::eError)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugUtilsMessageSeverityFlagsEXT operator|( DebugUtilsMessageSeverityFlagBitsEXT bit0, DebugUtilsMessageSeverityFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return DebugUtilsMessageSeverityFlagsEXT( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugUtilsMessageSeverityFlagsEXT operator&( DebugUtilsMessageSeverityFlagBitsEXT bit0, DebugUtilsMessageSeverityFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return DebugUtilsMessageSeverityFlagsEXT( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugUtilsMessageSeverityFlagsEXT operator^( DebugUtilsMessageSeverityFlagBitsEXT bit0, DebugUtilsMessageSeverityFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return DebugUtilsMessageSeverityFlagsEXT( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugUtilsMessageSeverityFlagsEXT operator~( DebugUtilsMessageSeverityFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( DebugUtilsMessageSeverityFlagsEXT( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessageSeverityFlagsEXT value  )
+  {
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & DebugUtilsMessageSeverityFlagBitsEXT::eVerbose ) result += "Verbose | ";
+    if ( value & DebugUtilsMessageSeverityFlagBitsEXT::eInfo ) result += "Info | ";
+    if ( value & DebugUtilsMessageSeverityFlagBitsEXT::eWarning ) result += "Warning | ";
+    if ( value & DebugUtilsMessageSeverityFlagBitsEXT::eError ) result += "Error | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+  enum class DebugUtilsMessageTypeFlagBitsEXT
+  {
+    eGeneral = VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT,
+    eValidation = VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT,
+    ePerformance = VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessageTypeFlagBitsEXT value )
+  {
+    switch ( value )
+    {
+      case DebugUtilsMessageTypeFlagBitsEXT::eGeneral : return "General";
+      case DebugUtilsMessageTypeFlagBitsEXT::eValidation : return "Validation";
+      case DebugUtilsMessageTypeFlagBitsEXT::ePerformance : return "Performance";
+      default: return "invalid";
+    }
+  }
+
+  using DebugUtilsMessageTypeFlagsEXT = Flags<DebugUtilsMessageTypeFlagBitsEXT, VkDebugUtilsMessageTypeFlagsEXT>;
+
+  template <> struct FlagTraits<DebugUtilsMessageTypeFlagBitsEXT>
+  {
+    enum
+    {
+      allFlags = VkFlags(DebugUtilsMessageTypeFlagBitsEXT::eGeneral) | VkFlags(DebugUtilsMessageTypeFlagBitsEXT::eValidation) | VkFlags(DebugUtilsMessageTypeFlagBitsEXT::ePerformance)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugUtilsMessageTypeFlagsEXT operator|( DebugUtilsMessageTypeFlagBitsEXT bit0, DebugUtilsMessageTypeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return DebugUtilsMessageTypeFlagsEXT( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugUtilsMessageTypeFlagsEXT operator&( DebugUtilsMessageTypeFlagBitsEXT bit0, DebugUtilsMessageTypeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return DebugUtilsMessageTypeFlagsEXT( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugUtilsMessageTypeFlagsEXT operator^( DebugUtilsMessageTypeFlagBitsEXT bit0, DebugUtilsMessageTypeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return DebugUtilsMessageTypeFlagsEXT( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugUtilsMessageTypeFlagsEXT operator~( DebugUtilsMessageTypeFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( DebugUtilsMessageTypeFlagsEXT( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessageTypeFlagsEXT value  )
+  {
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & DebugUtilsMessageTypeFlagBitsEXT::eGeneral ) result += "General | ";
+    if ( value & DebugUtilsMessageTypeFlagBitsEXT::eValidation ) result += "Validation | ";
+    if ( value & DebugUtilsMessageTypeFlagBitsEXT::ePerformance ) result += "Performance | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+  enum class DebugUtilsMessengerCallbackDataFlagBitsEXT
+  {};
+
+  VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessengerCallbackDataFlagBitsEXT )
+  {
+    return "(void)";
+  }
+
+  using DebugUtilsMessengerCallbackDataFlagsEXT = Flags<DebugUtilsMessengerCallbackDataFlagBitsEXT, VkDebugUtilsMessengerCallbackDataFlagsEXT>;
+
+  VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessengerCallbackDataFlagsEXT  )
+  {
+    return "{}";
+  }
+
+  enum class DebugUtilsMessengerCreateFlagBitsEXT
+  {};
+
+  VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessengerCreateFlagBitsEXT )
+  {
+    return "(void)";
+  }
+
+  using DebugUtilsMessengerCreateFlagsEXT = Flags<DebugUtilsMessengerCreateFlagBitsEXT, VkDebugUtilsMessengerCreateFlagsEXT>;
+
+  VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessengerCreateFlagsEXT  )
+  {
+    return "{}";
+  }
+
+  enum class DependencyFlagBits
+  {
+    eByRegion = VK_DEPENDENCY_BY_REGION_BIT,
+    eDeviceGroup = VK_DEPENDENCY_DEVICE_GROUP_BIT,
+    eViewLocal = VK_DEPENDENCY_VIEW_LOCAL_BIT,
+    eViewLocalKHR = VK_DEPENDENCY_VIEW_LOCAL_BIT_KHR,
+    eDeviceGroupKHR = VK_DEPENDENCY_DEVICE_GROUP_BIT_KHR
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( DependencyFlagBits value )
+  {
+    switch ( value )
+    {
+      case DependencyFlagBits::eByRegion : return "ByRegion";
+      case DependencyFlagBits::eDeviceGroup : return "DeviceGroup";
+      case DependencyFlagBits::eViewLocal : return "ViewLocal";
+      default: return "invalid";
+    }
+  }
+
+  using DependencyFlags = Flags<DependencyFlagBits, VkDependencyFlags>;
+
+  template <> struct FlagTraits<DependencyFlagBits>
+  {
+    enum
+    {
+      allFlags = VkFlags(DependencyFlagBits::eByRegion) | VkFlags(DependencyFlagBits::eDeviceGroup) | VkFlags(DependencyFlagBits::eViewLocal)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DependencyFlags operator|( DependencyFlagBits bit0, DependencyFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return DependencyFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DependencyFlags operator&( DependencyFlagBits bit0, DependencyFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return DependencyFlags( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DependencyFlags operator^( DependencyFlagBits bit0, DependencyFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return DependencyFlags( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DependencyFlags operator~( DependencyFlagBits bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( DependencyFlags( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( DependencyFlags value  )
+  {
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & DependencyFlagBits::eByRegion ) result += "ByRegion | ";
+    if ( value & DependencyFlagBits::eDeviceGroup ) result += "DeviceGroup | ";
+    if ( value & DependencyFlagBits::eViewLocal ) result += "ViewLocal | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+  enum class DescriptorBindingFlagBitsEXT
+  {
+    eUpdateAfterBind = VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT,
+    eUpdateUnusedWhilePending = VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT_EXT,
+    ePartiallyBound = VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT_EXT,
+    eVariableDescriptorCount = VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT_EXT
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( DescriptorBindingFlagBitsEXT value )
+  {
+    switch ( value )
+    {
+      case DescriptorBindingFlagBitsEXT::eUpdateAfterBind : return "UpdateAfterBind";
+      case DescriptorBindingFlagBitsEXT::eUpdateUnusedWhilePending : return "UpdateUnusedWhilePending";
+      case DescriptorBindingFlagBitsEXT::ePartiallyBound : return "PartiallyBound";
+      case DescriptorBindingFlagBitsEXT::eVariableDescriptorCount : return "VariableDescriptorCount";
+      default: return "invalid";
+    }
+  }
+
+  using DescriptorBindingFlagsEXT = Flags<DescriptorBindingFlagBitsEXT, VkDescriptorBindingFlagsEXT>;
+
+  template <> struct FlagTraits<DescriptorBindingFlagBitsEXT>
+  {
+    enum
+    {
+      allFlags = VkFlags(DescriptorBindingFlagBitsEXT::eUpdateAfterBind) | VkFlags(DescriptorBindingFlagBitsEXT::eUpdateUnusedWhilePending) | VkFlags(DescriptorBindingFlagBitsEXT::ePartiallyBound) | VkFlags(DescriptorBindingFlagBitsEXT::eVariableDescriptorCount)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorBindingFlagsEXT operator|( DescriptorBindingFlagBitsEXT bit0, DescriptorBindingFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return DescriptorBindingFlagsEXT( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorBindingFlagsEXT operator&( DescriptorBindingFlagBitsEXT bit0, DescriptorBindingFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return DescriptorBindingFlagsEXT( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorBindingFlagsEXT operator^( DescriptorBindingFlagBitsEXT bit0, DescriptorBindingFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return DescriptorBindingFlagsEXT( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorBindingFlagsEXT operator~( DescriptorBindingFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( DescriptorBindingFlagsEXT( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( DescriptorBindingFlagsEXT value  )
+  {
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & DescriptorBindingFlagBitsEXT::eUpdateAfterBind ) result += "UpdateAfterBind | ";
+    if ( value & DescriptorBindingFlagBitsEXT::eUpdateUnusedWhilePending ) result += "UpdateUnusedWhilePending | ";
+    if ( value & DescriptorBindingFlagBitsEXT::ePartiallyBound ) result += "PartiallyBound | ";
+    if ( value & DescriptorBindingFlagBitsEXT::eVariableDescriptorCount ) result += "VariableDescriptorCount | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+  enum class DescriptorPoolCreateFlagBits
+  {
+    eFreeDescriptorSet = VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT,
+    eUpdateAfterBindEXT = VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT_EXT
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( DescriptorPoolCreateFlagBits value )
+  {
+    switch ( value )
+    {
+      case DescriptorPoolCreateFlagBits::eFreeDescriptorSet : return "FreeDescriptorSet";
+      case DescriptorPoolCreateFlagBits::eUpdateAfterBindEXT : return "UpdateAfterBindEXT";
+      default: return "invalid";
+    }
+  }
+
+  using DescriptorPoolCreateFlags = Flags<DescriptorPoolCreateFlagBits, VkDescriptorPoolCreateFlags>;
+
+  template <> struct FlagTraits<DescriptorPoolCreateFlagBits>
+  {
+    enum
+    {
+      allFlags = VkFlags(DescriptorPoolCreateFlagBits::eFreeDescriptorSet) | VkFlags(DescriptorPoolCreateFlagBits::eUpdateAfterBindEXT)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorPoolCreateFlags operator|( DescriptorPoolCreateFlagBits bit0, DescriptorPoolCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return DescriptorPoolCreateFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorPoolCreateFlags operator&( DescriptorPoolCreateFlagBits bit0, DescriptorPoolCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return DescriptorPoolCreateFlags( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorPoolCreateFlags operator^( DescriptorPoolCreateFlagBits bit0, DescriptorPoolCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return DescriptorPoolCreateFlags( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorPoolCreateFlags operator~( DescriptorPoolCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( DescriptorPoolCreateFlags( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( DescriptorPoolCreateFlags value  )
+  {
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & DescriptorPoolCreateFlagBits::eFreeDescriptorSet ) result += "FreeDescriptorSet | ";
+    if ( value & DescriptorPoolCreateFlagBits::eUpdateAfterBindEXT ) result += "UpdateAfterBindEXT | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+  enum class DescriptorPoolResetFlagBits
+  {};
+
+  VULKAN_HPP_INLINE std::string to_string( DescriptorPoolResetFlagBits )
+  {
+    return "(void)";
+  }
+
+  using DescriptorPoolResetFlags = Flags<DescriptorPoolResetFlagBits, VkDescriptorPoolResetFlags>;
+
+  VULKAN_HPP_INLINE std::string to_string( DescriptorPoolResetFlags  )
+  {
+    return "{}";
+  }
+
+  enum class DescriptorSetLayoutCreateFlagBits
+  {
+    ePushDescriptorKHR = VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR,
+    eUpdateAfterBindPoolEXT = VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( DescriptorSetLayoutCreateFlagBits value )
+  {
+    switch ( value )
+    {
+      case DescriptorSetLayoutCreateFlagBits::ePushDescriptorKHR : return "PushDescriptorKHR";
+      case DescriptorSetLayoutCreateFlagBits::eUpdateAfterBindPoolEXT : return "UpdateAfterBindPoolEXT";
+      default: return "invalid";
+    }
+  }
+
+  using DescriptorSetLayoutCreateFlags = Flags<DescriptorSetLayoutCreateFlagBits, VkDescriptorSetLayoutCreateFlags>;
+
+  template <> struct FlagTraits<DescriptorSetLayoutCreateFlagBits>
+  {
+    enum
+    {
+      allFlags = VkFlags(DescriptorSetLayoutCreateFlagBits::ePushDescriptorKHR) | VkFlags(DescriptorSetLayoutCreateFlagBits::eUpdateAfterBindPoolEXT)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorSetLayoutCreateFlags operator|( DescriptorSetLayoutCreateFlagBits bit0, DescriptorSetLayoutCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return DescriptorSetLayoutCreateFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorSetLayoutCreateFlags operator&( DescriptorSetLayoutCreateFlagBits bit0, DescriptorSetLayoutCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return DescriptorSetLayoutCreateFlags( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorSetLayoutCreateFlags operator^( DescriptorSetLayoutCreateFlagBits bit0, DescriptorSetLayoutCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return DescriptorSetLayoutCreateFlags( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorSetLayoutCreateFlags operator~( DescriptorSetLayoutCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( DescriptorSetLayoutCreateFlags( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( DescriptorSetLayoutCreateFlags value  )
+  {
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & DescriptorSetLayoutCreateFlagBits::ePushDescriptorKHR ) result += "PushDescriptorKHR | ";
+    if ( value & DescriptorSetLayoutCreateFlagBits::eUpdateAfterBindPoolEXT ) result += "UpdateAfterBindPoolEXT | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+  enum class DescriptorUpdateTemplateCreateFlagBits
+  {};
+
+  VULKAN_HPP_INLINE std::string to_string( DescriptorUpdateTemplateCreateFlagBits )
+  {
+    return "(void)";
+  }
+
+  using DescriptorUpdateTemplateCreateFlags = Flags<DescriptorUpdateTemplateCreateFlagBits, VkDescriptorUpdateTemplateCreateFlags>;
+
+  using DescriptorUpdateTemplateCreateFlagsKHR = DescriptorUpdateTemplateCreateFlags;
+
+  VULKAN_HPP_INLINE std::string to_string( DescriptorUpdateTemplateCreateFlags  )
+  {
+    return "{}";
+  }
+
+  enum class DeviceCreateFlagBits
+  {};
+
+  VULKAN_HPP_INLINE std::string to_string( DeviceCreateFlagBits )
+  {
+    return "(void)";
+  }
+
+  using DeviceCreateFlags = Flags<DeviceCreateFlagBits, VkDeviceCreateFlags>;
+
+  VULKAN_HPP_INLINE std::string to_string( DeviceCreateFlags  )
+  {
+    return "{}";
+  }
+
+  enum class DeviceGroupPresentModeFlagBitsKHR
+  {
+    eLocal = VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_BIT_KHR,
+    eRemote = VK_DEVICE_GROUP_PRESENT_MODE_REMOTE_BIT_KHR,
+    eSum = VK_DEVICE_GROUP_PRESENT_MODE_SUM_BIT_KHR,
+    eLocalMultiDevice = VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_MULTI_DEVICE_BIT_KHR
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( DeviceGroupPresentModeFlagBitsKHR value )
+  {
+    switch ( value )
+    {
+      case DeviceGroupPresentModeFlagBitsKHR::eLocal : return "Local";
+      case DeviceGroupPresentModeFlagBitsKHR::eRemote : return "Remote";
+      case DeviceGroupPresentModeFlagBitsKHR::eSum : return "Sum";
+      case DeviceGroupPresentModeFlagBitsKHR::eLocalMultiDevice : return "LocalMultiDevice";
+      default: return "invalid";
+    }
+  }
+
+  using DeviceGroupPresentModeFlagsKHR = Flags<DeviceGroupPresentModeFlagBitsKHR, VkDeviceGroupPresentModeFlagsKHR>;
+
+  template <> struct FlagTraits<DeviceGroupPresentModeFlagBitsKHR>
+  {
+    enum
+    {
+      allFlags = VkFlags(DeviceGroupPresentModeFlagBitsKHR::eLocal) | VkFlags(DeviceGroupPresentModeFlagBitsKHR::eRemote) | VkFlags(DeviceGroupPresentModeFlagBitsKHR::eSum) | VkFlags(DeviceGroupPresentModeFlagBitsKHR::eLocalMultiDevice)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceGroupPresentModeFlagsKHR operator|( DeviceGroupPresentModeFlagBitsKHR bit0, DeviceGroupPresentModeFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return DeviceGroupPresentModeFlagsKHR( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceGroupPresentModeFlagsKHR operator&( DeviceGroupPresentModeFlagBitsKHR bit0, DeviceGroupPresentModeFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return DeviceGroupPresentModeFlagsKHR( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceGroupPresentModeFlagsKHR operator^( DeviceGroupPresentModeFlagBitsKHR bit0, DeviceGroupPresentModeFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return DeviceGroupPresentModeFlagsKHR( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceGroupPresentModeFlagsKHR operator~( DeviceGroupPresentModeFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( DeviceGroupPresentModeFlagsKHR( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( DeviceGroupPresentModeFlagsKHR value  )
+  {
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & DeviceGroupPresentModeFlagBitsKHR::eLocal ) result += "Local | ";
+    if ( value & DeviceGroupPresentModeFlagBitsKHR::eRemote ) result += "Remote | ";
+    if ( value & DeviceGroupPresentModeFlagBitsKHR::eSum ) result += "Sum | ";
+    if ( value & DeviceGroupPresentModeFlagBitsKHR::eLocalMultiDevice ) result += "LocalMultiDevice | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+  enum class DeviceQueueCreateFlagBits
+  {
+    eProtected = VK_DEVICE_QUEUE_CREATE_PROTECTED_BIT
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( DeviceQueueCreateFlagBits value )
+  {
+    switch ( value )
+    {
+      case DeviceQueueCreateFlagBits::eProtected : return "Protected";
+      default: return "invalid";
+    }
+  }
+
+  using DeviceQueueCreateFlags = Flags<DeviceQueueCreateFlagBits, VkDeviceQueueCreateFlags>;
+
+  template <> struct FlagTraits<DeviceQueueCreateFlagBits>
+  {
+    enum
+    {
+      allFlags = VkFlags(DeviceQueueCreateFlagBits::eProtected)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceQueueCreateFlags operator|( DeviceQueueCreateFlagBits bit0, DeviceQueueCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return DeviceQueueCreateFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceQueueCreateFlags operator&( DeviceQueueCreateFlagBits bit0, DeviceQueueCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return DeviceQueueCreateFlags( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceQueueCreateFlags operator^( DeviceQueueCreateFlagBits bit0, DeviceQueueCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return DeviceQueueCreateFlags( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceQueueCreateFlags operator~( DeviceQueueCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( DeviceQueueCreateFlags( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( DeviceQueueCreateFlags value  )
+  {
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & DeviceQueueCreateFlagBits::eProtected ) result += "Protected | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+  enum class DisplayModeCreateFlagBitsKHR
+  {};
+
+  VULKAN_HPP_INLINE std::string to_string( DisplayModeCreateFlagBitsKHR )
+  {
+    return "(void)";
+  }
+
+  using DisplayModeCreateFlagsKHR = Flags<DisplayModeCreateFlagBitsKHR, VkDisplayModeCreateFlagsKHR>;
+
+  VULKAN_HPP_INLINE std::string to_string( DisplayModeCreateFlagsKHR  )
+  {
+    return "{}";
+  }
+
+  enum class DisplayPlaneAlphaFlagBitsKHR
+  {
+    eOpaque = VK_DISPLAY_PLANE_ALPHA_OPAQUE_BIT_KHR,
+    eGlobal = VK_DISPLAY_PLANE_ALPHA_GLOBAL_BIT_KHR,
+    ePerPixel = VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_BIT_KHR,
+    ePerPixelPremultiplied = VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_PREMULTIPLIED_BIT_KHR
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( DisplayPlaneAlphaFlagBitsKHR value )
+  {
+    switch ( value )
+    {
+      case DisplayPlaneAlphaFlagBitsKHR::eOpaque : return "Opaque";
+      case DisplayPlaneAlphaFlagBitsKHR::eGlobal : return "Global";
+      case DisplayPlaneAlphaFlagBitsKHR::ePerPixel : return "PerPixel";
+      case DisplayPlaneAlphaFlagBitsKHR::ePerPixelPremultiplied : return "PerPixelPremultiplied";
+      default: return "invalid";
+    }
+  }
+
+  using DisplayPlaneAlphaFlagsKHR = Flags<DisplayPlaneAlphaFlagBitsKHR, VkDisplayPlaneAlphaFlagsKHR>;
+
+  template <> struct FlagTraits<DisplayPlaneAlphaFlagBitsKHR>
+  {
+    enum
+    {
+      allFlags = VkFlags(DisplayPlaneAlphaFlagBitsKHR::eOpaque) | VkFlags(DisplayPlaneAlphaFlagBitsKHR::eGlobal) | VkFlags(DisplayPlaneAlphaFlagBitsKHR::ePerPixel) | VkFlags(DisplayPlaneAlphaFlagBitsKHR::ePerPixelPremultiplied)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DisplayPlaneAlphaFlagsKHR operator|( DisplayPlaneAlphaFlagBitsKHR bit0, DisplayPlaneAlphaFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return DisplayPlaneAlphaFlagsKHR( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DisplayPlaneAlphaFlagsKHR operator&( DisplayPlaneAlphaFlagBitsKHR bit0, DisplayPlaneAlphaFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return DisplayPlaneAlphaFlagsKHR( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DisplayPlaneAlphaFlagsKHR operator^( DisplayPlaneAlphaFlagBitsKHR bit0, DisplayPlaneAlphaFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return DisplayPlaneAlphaFlagsKHR( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DisplayPlaneAlphaFlagsKHR operator~( DisplayPlaneAlphaFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( DisplayPlaneAlphaFlagsKHR( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( DisplayPlaneAlphaFlagsKHR value  )
+  {
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & DisplayPlaneAlphaFlagBitsKHR::eOpaque ) result += "Opaque | ";
+    if ( value & DisplayPlaneAlphaFlagBitsKHR::eGlobal ) result += "Global | ";
+    if ( value & DisplayPlaneAlphaFlagBitsKHR::ePerPixel ) result += "PerPixel | ";
+    if ( value & DisplayPlaneAlphaFlagBitsKHR::ePerPixelPremultiplied ) result += "PerPixelPremultiplied | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+  enum class DisplaySurfaceCreateFlagBitsKHR
+  {};
+
+  VULKAN_HPP_INLINE std::string to_string( DisplaySurfaceCreateFlagBitsKHR )
+  {
+    return "(void)";
+  }
+
+  using DisplaySurfaceCreateFlagsKHR = Flags<DisplaySurfaceCreateFlagBitsKHR, VkDisplaySurfaceCreateFlagsKHR>;
+
+  VULKAN_HPP_INLINE std::string to_string( DisplaySurfaceCreateFlagsKHR  )
+  {
+    return "{}";
+  }
+
+  enum class EventCreateFlagBits
+  {};
+
+  VULKAN_HPP_INLINE std::string to_string( EventCreateFlagBits )
+  {
+    return "(void)";
+  }
+
+  using EventCreateFlags = Flags<EventCreateFlagBits, VkEventCreateFlags>;
+
+  VULKAN_HPP_INLINE std::string to_string( EventCreateFlags  )
+  {
+    return "{}";
+  }
+
+  enum class ExternalFenceFeatureFlagBits
+  {
+    eExportable = VK_EXTERNAL_FENCE_FEATURE_EXPORTABLE_BIT,
+    eImportable = VK_EXTERNAL_FENCE_FEATURE_IMPORTABLE_BIT,
+    eExportableKHR = VK_EXTERNAL_FENCE_FEATURE_EXPORTABLE_BIT_KHR,
+    eImportableKHR = VK_EXTERNAL_FENCE_FEATURE_IMPORTABLE_BIT_KHR
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( ExternalFenceFeatureFlagBits value )
+  {
+    switch ( value )
+    {
+      case ExternalFenceFeatureFlagBits::eExportable : return "Exportable";
+      case ExternalFenceFeatureFlagBits::eImportable : return "Importable";
+      default: return "invalid";
+    }
+  }
+
+  using ExternalFenceFeatureFlags = Flags<ExternalFenceFeatureFlagBits, VkExternalFenceFeatureFlags>;
+
+  template <> struct FlagTraits<ExternalFenceFeatureFlagBits>
+  {
+    enum
+    {
+      allFlags = VkFlags(ExternalFenceFeatureFlagBits::eExportable) | VkFlags(ExternalFenceFeatureFlagBits::eImportable)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalFenceFeatureFlags operator|( ExternalFenceFeatureFlagBits bit0, ExternalFenceFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return ExternalFenceFeatureFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalFenceFeatureFlags operator&( ExternalFenceFeatureFlagBits bit0, ExternalFenceFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return ExternalFenceFeatureFlags( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalFenceFeatureFlags operator^( ExternalFenceFeatureFlagBits bit0, ExternalFenceFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return ExternalFenceFeatureFlags( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalFenceFeatureFlags operator~( ExternalFenceFeatureFlagBits bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( ExternalFenceFeatureFlags( bits ) );
+  }
+
+  using ExternalFenceFeatureFlagsKHR = ExternalFenceFeatureFlags;
+
+  VULKAN_HPP_INLINE std::string to_string( ExternalFenceFeatureFlags value  )
+  {
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & ExternalFenceFeatureFlagBits::eExportable ) result += "Exportable | ";
+    if ( value & ExternalFenceFeatureFlagBits::eImportable ) result += "Importable | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+  enum class ExternalFenceHandleTypeFlagBits
+  {
+    eOpaqueFd = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_FD_BIT,
+    eOpaqueWin32 = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_BIT,
+    eOpaqueWin32Kmt = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT,
+    eSyncFd = VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT,
+    eOpaqueFdKHR = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_FD_BIT_KHR,
+    eOpaqueWin32KHR = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_BIT_KHR,
+    eOpaqueWin32KmtKHR = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_KHR,
+    eSyncFdKHR = VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT_KHR
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( ExternalFenceHandleTypeFlagBits value )
+  {
+    switch ( value )
+    {
+      case ExternalFenceHandleTypeFlagBits::eOpaqueFd : return "OpaqueFd";
+      case ExternalFenceHandleTypeFlagBits::eOpaqueWin32 : return "OpaqueWin32";
+      case ExternalFenceHandleTypeFlagBits::eOpaqueWin32Kmt : return "OpaqueWin32Kmt";
+      case ExternalFenceHandleTypeFlagBits::eSyncFd : return "SyncFd";
+      default: return "invalid";
+    }
+  }
+
+  using ExternalFenceHandleTypeFlags = Flags<ExternalFenceHandleTypeFlagBits, VkExternalFenceHandleTypeFlags>;
+
+  template <> struct FlagTraits<ExternalFenceHandleTypeFlagBits>
+  {
+    enum
+    {
+      allFlags = VkFlags(ExternalFenceHandleTypeFlagBits::eOpaqueFd) | VkFlags(ExternalFenceHandleTypeFlagBits::eOpaqueWin32) | VkFlags(ExternalFenceHandleTypeFlagBits::eOpaqueWin32Kmt) | VkFlags(ExternalFenceHandleTypeFlagBits::eSyncFd)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalFenceHandleTypeFlags operator|( ExternalFenceHandleTypeFlagBits bit0, ExternalFenceHandleTypeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return ExternalFenceHandleTypeFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalFenceHandleTypeFlags operator&( ExternalFenceHandleTypeFlagBits bit0, ExternalFenceHandleTypeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return ExternalFenceHandleTypeFlags( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalFenceHandleTypeFlags operator^( ExternalFenceHandleTypeFlagBits bit0, ExternalFenceHandleTypeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return ExternalFenceHandleTypeFlags( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalFenceHandleTypeFlags operator~( ExternalFenceHandleTypeFlagBits bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( ExternalFenceHandleTypeFlags( bits ) );
+  }
+
+  using ExternalFenceHandleTypeFlagsKHR = ExternalFenceHandleTypeFlags;
+
+  VULKAN_HPP_INLINE std::string to_string( ExternalFenceHandleTypeFlags value  )
+  {
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & ExternalFenceHandleTypeFlagBits::eOpaqueFd ) result += "OpaqueFd | ";
+    if ( value & ExternalFenceHandleTypeFlagBits::eOpaqueWin32 ) result += "OpaqueWin32 | ";
+    if ( value & ExternalFenceHandleTypeFlagBits::eOpaqueWin32Kmt ) result += "OpaqueWin32Kmt | ";
+    if ( value & ExternalFenceHandleTypeFlagBits::eSyncFd ) result += "SyncFd | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+  enum class ExternalMemoryFeatureFlagBits
+  {
+    eDedicatedOnly = VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT,
+    eExportable = VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT,
+    eImportable = VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT,
+    eDedicatedOnlyKHR = VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT_KHR,
+    eExportableKHR = VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT_KHR,
+    eImportableKHR = VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT_KHR
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( ExternalMemoryFeatureFlagBits value )
+  {
+    switch ( value )
+    {
+      case ExternalMemoryFeatureFlagBits::eDedicatedOnly : return "DedicatedOnly";
+      case ExternalMemoryFeatureFlagBits::eExportable : return "Exportable";
+      case ExternalMemoryFeatureFlagBits::eImportable : return "Importable";
+      default: return "invalid";
+    }
+  }
+
+  using ExternalMemoryFeatureFlags = Flags<ExternalMemoryFeatureFlagBits, VkExternalMemoryFeatureFlags>;
+
+  template <> struct FlagTraits<ExternalMemoryFeatureFlagBits>
+  {
+    enum
+    {
+      allFlags = VkFlags(ExternalMemoryFeatureFlagBits::eDedicatedOnly) | VkFlags(ExternalMemoryFeatureFlagBits::eExportable) | VkFlags(ExternalMemoryFeatureFlagBits::eImportable)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryFeatureFlags operator|( ExternalMemoryFeatureFlagBits bit0, ExternalMemoryFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return ExternalMemoryFeatureFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryFeatureFlags operator&( ExternalMemoryFeatureFlagBits bit0, ExternalMemoryFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return ExternalMemoryFeatureFlags( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryFeatureFlags operator^( ExternalMemoryFeatureFlagBits bit0, ExternalMemoryFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return ExternalMemoryFeatureFlags( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryFeatureFlags operator~( ExternalMemoryFeatureFlagBits bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( ExternalMemoryFeatureFlags( bits ) );
+  }
+
+  using ExternalMemoryFeatureFlagsKHR = ExternalMemoryFeatureFlags;
+
+  VULKAN_HPP_INLINE std::string to_string( ExternalMemoryFeatureFlags value  )
+  {
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & ExternalMemoryFeatureFlagBits::eDedicatedOnly ) result += "DedicatedOnly | ";
+    if ( value & ExternalMemoryFeatureFlagBits::eExportable ) result += "Exportable | ";
+    if ( value & ExternalMemoryFeatureFlagBits::eImportable ) result += "Importable | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+  enum class ExternalMemoryFeatureFlagBitsNV
+  {
+    eDedicatedOnly = VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT_NV,
+    eExportable = VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT_NV,
+    eImportable = VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT_NV
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( ExternalMemoryFeatureFlagBitsNV value )
+  {
+    switch ( value )
+    {
+      case ExternalMemoryFeatureFlagBitsNV::eDedicatedOnly : return "DedicatedOnly";
+      case ExternalMemoryFeatureFlagBitsNV::eExportable : return "Exportable";
+      case ExternalMemoryFeatureFlagBitsNV::eImportable : return "Importable";
+      default: return "invalid";
+    }
+  }
+
+  using ExternalMemoryFeatureFlagsNV = Flags<ExternalMemoryFeatureFlagBitsNV, VkExternalMemoryFeatureFlagsNV>;
+
+  template <> struct FlagTraits<ExternalMemoryFeatureFlagBitsNV>
+  {
+    enum
+    {
+      allFlags = VkFlags(ExternalMemoryFeatureFlagBitsNV::eDedicatedOnly) | VkFlags(ExternalMemoryFeatureFlagBitsNV::eExportable) | VkFlags(ExternalMemoryFeatureFlagBitsNV::eImportable)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryFeatureFlagsNV operator|( ExternalMemoryFeatureFlagBitsNV bit0, ExternalMemoryFeatureFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return ExternalMemoryFeatureFlagsNV( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryFeatureFlagsNV operator&( ExternalMemoryFeatureFlagBitsNV bit0, ExternalMemoryFeatureFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return ExternalMemoryFeatureFlagsNV( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryFeatureFlagsNV operator^( ExternalMemoryFeatureFlagBitsNV bit0, ExternalMemoryFeatureFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return ExternalMemoryFeatureFlagsNV( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryFeatureFlagsNV operator~( ExternalMemoryFeatureFlagBitsNV bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( ExternalMemoryFeatureFlagsNV( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( ExternalMemoryFeatureFlagsNV value  )
+  {
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & ExternalMemoryFeatureFlagBitsNV::eDedicatedOnly ) result += "DedicatedOnly | ";
+    if ( value & ExternalMemoryFeatureFlagBitsNV::eExportable ) result += "Exportable | ";
+    if ( value & ExternalMemoryFeatureFlagBitsNV::eImportable ) result += "Importable | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+  enum class ExternalMemoryHandleTypeFlagBits
+  {
+    eOpaqueFd = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT,
+    eOpaqueWin32 = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT,
+    eOpaqueWin32Kmt = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT,
+    eD3D11Texture = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT,
+    eD3D11TextureKmt = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_KMT_BIT,
+    eD3D12Heap = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP_BIT,
+    eD3D12Resource = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT,
+    eDmaBufEXT = VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT,
+    eAndroidHardwareBufferANDROID = VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID,
+    eHostAllocationEXT = VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT,
+    eHostMappedForeignMemoryEXT = VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_MAPPED_FOREIGN_MEMORY_BIT_EXT,
+    eOpaqueFdKHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT_KHR,
+    eOpaqueWin32KHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT_KHR,
+    eOpaqueWin32KmtKHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_KHR,
+    eD3D11TextureKHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT_KHR,
+    eD3D11TextureKmtKHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_KMT_BIT_KHR,
+    eD3D12HeapKHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP_BIT_KHR,
+    eD3D12ResourceKHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT_KHR
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( ExternalMemoryHandleTypeFlagBits value )
+  {
+    switch ( value )
+    {
+      case ExternalMemoryHandleTypeFlagBits::eOpaqueFd : return "OpaqueFd";
+      case ExternalMemoryHandleTypeFlagBits::eOpaqueWin32 : return "OpaqueWin32";
+      case ExternalMemoryHandleTypeFlagBits::eOpaqueWin32Kmt : return "OpaqueWin32Kmt";
+      case ExternalMemoryHandleTypeFlagBits::eD3D11Texture : return "D3D11Texture";
+      case ExternalMemoryHandleTypeFlagBits::eD3D11TextureKmt : return "D3D11TextureKmt";
+      case ExternalMemoryHandleTypeFlagBits::eD3D12Heap : return "D3D12Heap";
+      case ExternalMemoryHandleTypeFlagBits::eD3D12Resource : return "D3D12Resource";
+      case ExternalMemoryHandleTypeFlagBits::eDmaBufEXT : return "DmaBufEXT";
+      case ExternalMemoryHandleTypeFlagBits::eAndroidHardwareBufferANDROID : return "AndroidHardwareBufferANDROID";
+      case ExternalMemoryHandleTypeFlagBits::eHostAllocationEXT : return "HostAllocationEXT";
+      case ExternalMemoryHandleTypeFlagBits::eHostMappedForeignMemoryEXT : return "HostMappedForeignMemoryEXT";
+      default: return "invalid";
+    }
+  }
+
+  using ExternalMemoryHandleTypeFlags = Flags<ExternalMemoryHandleTypeFlagBits, VkExternalMemoryHandleTypeFlags>;
+
+  template <> struct FlagTraits<ExternalMemoryHandleTypeFlagBits>
+  {
+    enum
+    {
+      allFlags = VkFlags(ExternalMemoryHandleTypeFlagBits::eOpaqueFd) | VkFlags(ExternalMemoryHandleTypeFlagBits::eOpaqueWin32) | VkFlags(ExternalMemoryHandleTypeFlagBits::eOpaqueWin32Kmt) | VkFlags(ExternalMemoryHandleTypeFlagBits::eD3D11Texture) | VkFlags(ExternalMemoryHandleTypeFlagBits::eD3D11TextureKmt) | VkFlags(ExternalMemoryHandleTypeFlagBits::eD3D12Heap) | VkFlags(ExternalMemoryHandleTypeFlagBits::eD3D12Resource) | VkFlags(ExternalMemoryHandleTypeFlagBits::eDmaBufEXT) | VkFlags(ExternalMemoryHandleTypeFlagBits::eAndroidHardwareBufferANDROID) | VkFlags(ExternalMemoryHandleTypeFlagBits::eHostAllocationEXT) | VkFlags(ExternalMemoryHandleTypeFlagBits::eHostMappedForeignMemoryEXT)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryHandleTypeFlags operator|( ExternalMemoryHandleTypeFlagBits bit0, ExternalMemoryHandleTypeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return ExternalMemoryHandleTypeFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryHandleTypeFlags operator&( ExternalMemoryHandleTypeFlagBits bit0, ExternalMemoryHandleTypeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return ExternalMemoryHandleTypeFlags( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryHandleTypeFlags operator^( ExternalMemoryHandleTypeFlagBits bit0, ExternalMemoryHandleTypeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return ExternalMemoryHandleTypeFlags( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryHandleTypeFlags operator~( ExternalMemoryHandleTypeFlagBits bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( ExternalMemoryHandleTypeFlags( bits ) );
+  }
+
+  using ExternalMemoryHandleTypeFlagsKHR = ExternalMemoryHandleTypeFlags;
+
+  VULKAN_HPP_INLINE std::string to_string( ExternalMemoryHandleTypeFlags value  )
+  {
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & ExternalMemoryHandleTypeFlagBits::eOpaqueFd ) result += "OpaqueFd | ";
+    if ( value & ExternalMemoryHandleTypeFlagBits::eOpaqueWin32 ) result += "OpaqueWin32 | ";
+    if ( value & ExternalMemoryHandleTypeFlagBits::eOpaqueWin32Kmt ) result += "OpaqueWin32Kmt | ";
+    if ( value & ExternalMemoryHandleTypeFlagBits::eD3D11Texture ) result += "D3D11Texture | ";
+    if ( value & ExternalMemoryHandleTypeFlagBits::eD3D11TextureKmt ) result += "D3D11TextureKmt | ";
+    if ( value & ExternalMemoryHandleTypeFlagBits::eD3D12Heap ) result += "D3D12Heap | ";
+    if ( value & ExternalMemoryHandleTypeFlagBits::eD3D12Resource ) result += "D3D12Resource | ";
+    if ( value & ExternalMemoryHandleTypeFlagBits::eDmaBufEXT ) result += "DmaBufEXT | ";
+    if ( value & ExternalMemoryHandleTypeFlagBits::eAndroidHardwareBufferANDROID ) result += "AndroidHardwareBufferANDROID | ";
+    if ( value & ExternalMemoryHandleTypeFlagBits::eHostAllocationEXT ) result += "HostAllocationEXT | ";
+    if ( value & ExternalMemoryHandleTypeFlagBits::eHostMappedForeignMemoryEXT ) result += "HostMappedForeignMemoryEXT | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+  enum class ExternalMemoryHandleTypeFlagBitsNV
+  {
+    eOpaqueWin32 = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT_NV,
+    eOpaqueWin32Kmt = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_NV,
+    eD3D11Image = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_BIT_NV,
+    eD3D11ImageKmt = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_KMT_BIT_NV
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( ExternalMemoryHandleTypeFlagBitsNV value )
+  {
+    switch ( value )
+    {
+      case ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32 : return "OpaqueWin32";
+      case ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32Kmt : return "OpaqueWin32Kmt";
+      case ExternalMemoryHandleTypeFlagBitsNV::eD3D11Image : return "D3D11Image";
+      case ExternalMemoryHandleTypeFlagBitsNV::eD3D11ImageKmt : return "D3D11ImageKmt";
+      default: return "invalid";
+    }
+  }
+
+  using ExternalMemoryHandleTypeFlagsNV = Flags<ExternalMemoryHandleTypeFlagBitsNV, VkExternalMemoryHandleTypeFlagsNV>;
+
+  template <> struct FlagTraits<ExternalMemoryHandleTypeFlagBitsNV>
+  {
+    enum
+    {
+      allFlags = VkFlags(ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32) | VkFlags(ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32Kmt) | VkFlags(ExternalMemoryHandleTypeFlagBitsNV::eD3D11Image) | VkFlags(ExternalMemoryHandleTypeFlagBitsNV::eD3D11ImageKmt)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryHandleTypeFlagsNV operator|( ExternalMemoryHandleTypeFlagBitsNV bit0, ExternalMemoryHandleTypeFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return ExternalMemoryHandleTypeFlagsNV( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryHandleTypeFlagsNV operator&( ExternalMemoryHandleTypeFlagBitsNV bit0, ExternalMemoryHandleTypeFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return ExternalMemoryHandleTypeFlagsNV( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryHandleTypeFlagsNV operator^( ExternalMemoryHandleTypeFlagBitsNV bit0, ExternalMemoryHandleTypeFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return ExternalMemoryHandleTypeFlagsNV( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryHandleTypeFlagsNV operator~( ExternalMemoryHandleTypeFlagBitsNV bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( ExternalMemoryHandleTypeFlagsNV( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( ExternalMemoryHandleTypeFlagsNV value  )
+  {
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32 ) result += "OpaqueWin32 | ";
+    if ( value & ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32Kmt ) result += "OpaqueWin32Kmt | ";
+    if ( value & ExternalMemoryHandleTypeFlagBitsNV::eD3D11Image ) result += "D3D11Image | ";
+    if ( value & ExternalMemoryHandleTypeFlagBitsNV::eD3D11ImageKmt ) result += "D3D11ImageKmt | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+  enum class ExternalSemaphoreFeatureFlagBits
+  {
+    eExportable = VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT,
+    eImportable = VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT,
+    eExportableKHR = VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT_KHR,
+    eImportableKHR = VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT_KHR
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( ExternalSemaphoreFeatureFlagBits value )
+  {
+    switch ( value )
+    {
+      case ExternalSemaphoreFeatureFlagBits::eExportable : return "Exportable";
+      case ExternalSemaphoreFeatureFlagBits::eImportable : return "Importable";
+      default: return "invalid";
+    }
+  }
+
+  using ExternalSemaphoreFeatureFlags = Flags<ExternalSemaphoreFeatureFlagBits, VkExternalSemaphoreFeatureFlags>;
+
+  template <> struct FlagTraits<ExternalSemaphoreFeatureFlagBits>
+  {
+    enum
+    {
+      allFlags = VkFlags(ExternalSemaphoreFeatureFlagBits::eExportable) | VkFlags(ExternalSemaphoreFeatureFlagBits::eImportable)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalSemaphoreFeatureFlags operator|( ExternalSemaphoreFeatureFlagBits bit0, ExternalSemaphoreFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return ExternalSemaphoreFeatureFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalSemaphoreFeatureFlags operator&( ExternalSemaphoreFeatureFlagBits bit0, ExternalSemaphoreFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return ExternalSemaphoreFeatureFlags( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalSemaphoreFeatureFlags operator^( ExternalSemaphoreFeatureFlagBits bit0, ExternalSemaphoreFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return ExternalSemaphoreFeatureFlags( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalSemaphoreFeatureFlags operator~( ExternalSemaphoreFeatureFlagBits bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( ExternalSemaphoreFeatureFlags( bits ) );
+  }
+
+  using ExternalSemaphoreFeatureFlagsKHR = ExternalSemaphoreFeatureFlags;
+
+  VULKAN_HPP_INLINE std::string to_string( ExternalSemaphoreFeatureFlags value  )
+  {
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & ExternalSemaphoreFeatureFlagBits::eExportable ) result += "Exportable | ";
+    if ( value & ExternalSemaphoreFeatureFlagBits::eImportable ) result += "Importable | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+  enum class ExternalSemaphoreHandleTypeFlagBits
+  {
+    eOpaqueFd = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT,
+    eOpaqueWin32 = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT,
+    eOpaqueWin32Kmt = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT,
+    eD3D12Fence = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT,
+    eSyncFd = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT,
+    eOpaqueFdKHR = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT_KHR,
+    eOpaqueWin32KHR = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT_KHR,
+    eOpaqueWin32KmtKHR = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_KHR,
+    eD3D12FenceKHR = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT_KHR,
+    eSyncFdKHR = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT_KHR
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( ExternalSemaphoreHandleTypeFlagBits value )
+  {
+    switch ( value )
+    {
+      case ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd : return "OpaqueFd";
+      case ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32 : return "OpaqueWin32";
+      case ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32Kmt : return "OpaqueWin32Kmt";
+      case ExternalSemaphoreHandleTypeFlagBits::eD3D12Fence : return "D3D12Fence";
+      case ExternalSemaphoreHandleTypeFlagBits::eSyncFd : return "SyncFd";
+      default: return "invalid";
+    }
+  }
+
+  using ExternalSemaphoreHandleTypeFlags = Flags<ExternalSemaphoreHandleTypeFlagBits, VkExternalSemaphoreHandleTypeFlags>;
+
+  template <> struct FlagTraits<ExternalSemaphoreHandleTypeFlagBits>
+  {
+    enum
+    {
+      allFlags = VkFlags(ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd) | VkFlags(ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32) | VkFlags(ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32Kmt) | VkFlags(ExternalSemaphoreHandleTypeFlagBits::eD3D12Fence) | VkFlags(ExternalSemaphoreHandleTypeFlagBits::eSyncFd)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalSemaphoreHandleTypeFlags operator|( ExternalSemaphoreHandleTypeFlagBits bit0, ExternalSemaphoreHandleTypeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return ExternalSemaphoreHandleTypeFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalSemaphoreHandleTypeFlags operator&( ExternalSemaphoreHandleTypeFlagBits bit0, ExternalSemaphoreHandleTypeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return ExternalSemaphoreHandleTypeFlags( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalSemaphoreHandleTypeFlags operator^( ExternalSemaphoreHandleTypeFlagBits bit0, ExternalSemaphoreHandleTypeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return ExternalSemaphoreHandleTypeFlags( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalSemaphoreHandleTypeFlags operator~( ExternalSemaphoreHandleTypeFlagBits bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( ExternalSemaphoreHandleTypeFlags( bits ) );
+  }
+
+  using ExternalSemaphoreHandleTypeFlagsKHR = ExternalSemaphoreHandleTypeFlags;
+
+  VULKAN_HPP_INLINE std::string to_string( ExternalSemaphoreHandleTypeFlags value  )
+  {
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd ) result += "OpaqueFd | ";
+    if ( value & ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32 ) result += "OpaqueWin32 | ";
+    if ( value & ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32Kmt ) result += "OpaqueWin32Kmt | ";
+    if ( value & ExternalSemaphoreHandleTypeFlagBits::eD3D12Fence ) result += "D3D12Fence | ";
+    if ( value & ExternalSemaphoreHandleTypeFlagBits::eSyncFd ) result += "SyncFd | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+  enum class FenceCreateFlagBits
+  {
+    eSignaled = VK_FENCE_CREATE_SIGNALED_BIT
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( FenceCreateFlagBits value )
+  {
+    switch ( value )
+    {
+      case FenceCreateFlagBits::eSignaled : return "Signaled";
+      default: return "invalid";
+    }
+  }
+
+  using FenceCreateFlags = Flags<FenceCreateFlagBits, VkFenceCreateFlags>;
+
+  template <> struct FlagTraits<FenceCreateFlagBits>
+  {
+    enum
+    {
+      allFlags = VkFlags(FenceCreateFlagBits::eSignaled)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FenceCreateFlags operator|( FenceCreateFlagBits bit0, FenceCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return FenceCreateFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FenceCreateFlags operator&( FenceCreateFlagBits bit0, FenceCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return FenceCreateFlags( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FenceCreateFlags operator^( FenceCreateFlagBits bit0, FenceCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return FenceCreateFlags( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FenceCreateFlags operator~( FenceCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( FenceCreateFlags( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( FenceCreateFlags value  )
+  {
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & FenceCreateFlagBits::eSignaled ) result += "Signaled | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+  enum class FenceImportFlagBits
+  {
+    eTemporary = VK_FENCE_IMPORT_TEMPORARY_BIT,
+    eTemporaryKHR = VK_FENCE_IMPORT_TEMPORARY_BIT_KHR
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( FenceImportFlagBits value )
+  {
+    switch ( value )
+    {
+      case FenceImportFlagBits::eTemporary : return "Temporary";
+      default: return "invalid";
+    }
+  }
+
+  using FenceImportFlags = Flags<FenceImportFlagBits, VkFenceImportFlags>;
+
+  template <> struct FlagTraits<FenceImportFlagBits>
+  {
+    enum
+    {
+      allFlags = VkFlags(FenceImportFlagBits::eTemporary)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FenceImportFlags operator|( FenceImportFlagBits bit0, FenceImportFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return FenceImportFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FenceImportFlags operator&( FenceImportFlagBits bit0, FenceImportFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return FenceImportFlags( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FenceImportFlags operator^( FenceImportFlagBits bit0, FenceImportFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return FenceImportFlags( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FenceImportFlags operator~( FenceImportFlagBits bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( FenceImportFlags( bits ) );
+  }
+
+  using FenceImportFlagsKHR = FenceImportFlags;
+
+  VULKAN_HPP_INLINE std::string to_string( FenceImportFlags value  )
+  {
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & FenceImportFlagBits::eTemporary ) result += "Temporary | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+  enum class FormatFeatureFlagBits
+  {
+    eSampledImage = VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT,
+    eStorageImage = VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT,
+    eStorageImageAtomic = VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT,
+    eUniformTexelBuffer = VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT,
+    eStorageTexelBuffer = VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT,
+    eStorageTexelBufferAtomic = VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_ATOMIC_BIT,
+    eVertexBuffer = VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT,
+    eColorAttachment = VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT,
+    eColorAttachmentBlend = VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT,
+    eDepthStencilAttachment = VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT,
+    eBlitSrc = VK_FORMAT_FEATURE_BLIT_SRC_BIT,
+    eBlitDst = VK_FORMAT_FEATURE_BLIT_DST_BIT,
+    eSampledImageFilterLinear = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT,
+    eTransferSrc = VK_FORMAT_FEATURE_TRANSFER_SRC_BIT,
+    eTransferDst = VK_FORMAT_FEATURE_TRANSFER_DST_BIT,
+    eMidpointChromaSamples = VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT,
+    eSampledImageYcbcrConversionLinearFilter = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT,
+    eSampledImageYcbcrConversionSeparateReconstructionFilter = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT,
+    eSampledImageYcbcrConversionChromaReconstructionExplicit = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT,
+    eSampledImageYcbcrConversionChromaReconstructionExplicitForceable = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT,
+    eDisjoint = VK_FORMAT_FEATURE_DISJOINT_BIT,
+    eCositedChromaSamples = VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT,
+    eSampledImageFilterCubicIMG = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_IMG,
+    eSampledImageFilterMinmaxEXT = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT_EXT,
+    eFragmentDensityMapEXT = VK_FORMAT_FEATURE_FRAGMENT_DENSITY_MAP_BIT_EXT,
+    eTransferSrcKHR = VK_FORMAT_FEATURE_TRANSFER_SRC_BIT_KHR,
+    eTransferDstKHR = VK_FORMAT_FEATURE_TRANSFER_DST_BIT_KHR,
+    eMidpointChromaSamplesKHR = VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT_KHR,
+    eSampledImageYcbcrConversionLinearFilterKHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT_KHR,
+    eSampledImageYcbcrConversionSeparateReconstructionFilterKHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT_KHR,
+    eSampledImageYcbcrConversionChromaReconstructionExplicitKHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT_KHR,
+    eSampledImageYcbcrConversionChromaReconstructionExplicitForceableKHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT_KHR,
+    eDisjointKHR = VK_FORMAT_FEATURE_DISJOINT_BIT_KHR,
+    eCositedChromaSamplesKHR = VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT_KHR,
+    eSampledImageFilterCubicEXT = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( FormatFeatureFlagBits value )
+  {
+    switch ( value )
+    {
+      case FormatFeatureFlagBits::eSampledImage : return "SampledImage";
+      case FormatFeatureFlagBits::eStorageImage : return "StorageImage";
+      case FormatFeatureFlagBits::eStorageImageAtomic : return "StorageImageAtomic";
+      case FormatFeatureFlagBits::eUniformTexelBuffer : return "UniformTexelBuffer";
+      case FormatFeatureFlagBits::eStorageTexelBuffer : return "StorageTexelBuffer";
+      case FormatFeatureFlagBits::eStorageTexelBufferAtomic : return "StorageTexelBufferAtomic";
+      case FormatFeatureFlagBits::eVertexBuffer : return "VertexBuffer";
+      case FormatFeatureFlagBits::eColorAttachment : return "ColorAttachment";
+      case FormatFeatureFlagBits::eColorAttachmentBlend : return "ColorAttachmentBlend";
+      case FormatFeatureFlagBits::eDepthStencilAttachment : return "DepthStencilAttachment";
+      case FormatFeatureFlagBits::eBlitSrc : return "BlitSrc";
+      case FormatFeatureFlagBits::eBlitDst : return "BlitDst";
+      case FormatFeatureFlagBits::eSampledImageFilterLinear : return "SampledImageFilterLinear";
+      case FormatFeatureFlagBits::eTransferSrc : return "TransferSrc";
+      case FormatFeatureFlagBits::eTransferDst : return "TransferDst";
+      case FormatFeatureFlagBits::eMidpointChromaSamples : return "MidpointChromaSamples";
+      case FormatFeatureFlagBits::eSampledImageYcbcrConversionLinearFilter : return "SampledImageYcbcrConversionLinearFilter";
+      case FormatFeatureFlagBits::eSampledImageYcbcrConversionSeparateReconstructionFilter : return "SampledImageYcbcrConversionSeparateReconstructionFilter";
+      case FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicit : return "SampledImageYcbcrConversionChromaReconstructionExplicit";
+      case FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicitForceable : return "SampledImageYcbcrConversionChromaReconstructionExplicitForceable";
+      case FormatFeatureFlagBits::eDisjoint : return "Disjoint";
+      case FormatFeatureFlagBits::eCositedChromaSamples : return "CositedChromaSamples";
+      case FormatFeatureFlagBits::eSampledImageFilterCubicIMG : return "SampledImageFilterCubicIMG";
+      case FormatFeatureFlagBits::eSampledImageFilterMinmaxEXT : return "SampledImageFilterMinmaxEXT";
+      case FormatFeatureFlagBits::eFragmentDensityMapEXT : return "FragmentDensityMapEXT";
+      default: return "invalid";
+    }
+  }
+
+  using FormatFeatureFlags = Flags<FormatFeatureFlagBits, VkFormatFeatureFlags>;
+
+  template <> struct FlagTraits<FormatFeatureFlagBits>
+  {
+    enum
+    {
+      allFlags = VkFlags(FormatFeatureFlagBits::eSampledImage) | VkFlags(FormatFeatureFlagBits::eStorageImage) | VkFlags(FormatFeatureFlagBits::eStorageImageAtomic) | VkFlags(FormatFeatureFlagBits::eUniformTexelBuffer) | VkFlags(FormatFeatureFlagBits::eStorageTexelBuffer) | VkFlags(FormatFeatureFlagBits::eStorageTexelBufferAtomic) | VkFlags(FormatFeatureFlagBits::eVertexBuffer) | VkFlags(FormatFeatureFlagBits::eColorAttachment) | VkFlags(FormatFeatureFlagBits::eColorAttachmentBlend) | VkFlags(FormatFeatureFlagBits::eDepthStencilAttachment) | VkFlags(FormatFeatureFlagBits::eBlitSrc) | VkFlags(FormatFeatureFlagBits::eBlitDst) | VkFlags(FormatFeatureFlagBits::eSampledImageFilterLinear) | VkFlags(FormatFeatureFlagBits::eTransferSrc) | VkFlags(FormatFeatureFlagBits::eTransferDst) | VkFlags(FormatFeatureFlagBits::eMidpointChromaSamples) | VkFlags(FormatFeatureFlagBits::eSampledImageYcbcrConversionLinearFilter) | VkFlags(FormatFeatureFlagBits::eSampledImageYcbcrConversionSeparateReconstructionFilter) | VkFlags(FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicit) | VkFlags(FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicitForceable) | VkFlags(FormatFeatureFlagBits::eDisjoint) | VkFlags(FormatFeatureFlagBits::eCositedChromaSamples) | VkFlags(FormatFeatureFlagBits::eSampledImageFilterCubicIMG) | VkFlags(FormatFeatureFlagBits::eSampledImageFilterMinmaxEXT) | VkFlags(FormatFeatureFlagBits::eFragmentDensityMapEXT)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FormatFeatureFlags operator|( FormatFeatureFlagBits bit0, FormatFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return FormatFeatureFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FormatFeatureFlags operator&( FormatFeatureFlagBits bit0, FormatFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return FormatFeatureFlags( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FormatFeatureFlags operator^( FormatFeatureFlagBits bit0, FormatFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return FormatFeatureFlags( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FormatFeatureFlags operator~( FormatFeatureFlagBits bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( FormatFeatureFlags( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( FormatFeatureFlags value  )
+  {
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & FormatFeatureFlagBits::eSampledImage ) result += "SampledImage | ";
+    if ( value & FormatFeatureFlagBits::eStorageImage ) result += "StorageImage | ";
+    if ( value & FormatFeatureFlagBits::eStorageImageAtomic ) result += "StorageImageAtomic | ";
+    if ( value & FormatFeatureFlagBits::eUniformTexelBuffer ) result += "UniformTexelBuffer | ";
+    if ( value & FormatFeatureFlagBits::eStorageTexelBuffer ) result += "StorageTexelBuffer | ";
+    if ( value & FormatFeatureFlagBits::eStorageTexelBufferAtomic ) result += "StorageTexelBufferAtomic | ";
+    if ( value & FormatFeatureFlagBits::eVertexBuffer ) result += "VertexBuffer | ";
+    if ( value & FormatFeatureFlagBits::eColorAttachment ) result += "ColorAttachment | ";
+    if ( value & FormatFeatureFlagBits::eColorAttachmentBlend ) result += "ColorAttachmentBlend | ";
+    if ( value & FormatFeatureFlagBits::eDepthStencilAttachment ) result += "DepthStencilAttachment | ";
+    if ( value & FormatFeatureFlagBits::eBlitSrc ) result += "BlitSrc | ";
+    if ( value & FormatFeatureFlagBits::eBlitDst ) result += "BlitDst | ";
+    if ( value & FormatFeatureFlagBits::eSampledImageFilterLinear ) result += "SampledImageFilterLinear | ";
+    if ( value & FormatFeatureFlagBits::eTransferSrc ) result += "TransferSrc | ";
+    if ( value & FormatFeatureFlagBits::eTransferDst ) result += "TransferDst | ";
+    if ( value & FormatFeatureFlagBits::eMidpointChromaSamples ) result += "MidpointChromaSamples | ";
+    if ( value & FormatFeatureFlagBits::eSampledImageYcbcrConversionLinearFilter ) result += "SampledImageYcbcrConversionLinearFilter | ";
+    if ( value & FormatFeatureFlagBits::eSampledImageYcbcrConversionSeparateReconstructionFilter ) result += "SampledImageYcbcrConversionSeparateReconstructionFilter | ";
+    if ( value & FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicit ) result += "SampledImageYcbcrConversionChromaReconstructionExplicit | ";
+    if ( value & FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicitForceable ) result += "SampledImageYcbcrConversionChromaReconstructionExplicitForceable | ";
+    if ( value & FormatFeatureFlagBits::eDisjoint ) result += "Disjoint | ";
+    if ( value & FormatFeatureFlagBits::eCositedChromaSamples ) result += "CositedChromaSamples | ";
+    if ( value & FormatFeatureFlagBits::eSampledImageFilterCubicIMG ) result += "SampledImageFilterCubicIMG | ";
+    if ( value & FormatFeatureFlagBits::eSampledImageFilterMinmaxEXT ) result += "SampledImageFilterMinmaxEXT | ";
+    if ( value & FormatFeatureFlagBits::eFragmentDensityMapEXT ) result += "FragmentDensityMapEXT | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+  enum class FramebufferCreateFlagBits
+  {
+    eImagelessKHR = VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( FramebufferCreateFlagBits value )
+  {
+    switch ( value )
+    {
+      case FramebufferCreateFlagBits::eImagelessKHR : return "ImagelessKHR";
+      default: return "invalid";
+    }
+  }
+
+  using FramebufferCreateFlags = Flags<FramebufferCreateFlagBits, VkFramebufferCreateFlags>;
+
+  template <> struct FlagTraits<FramebufferCreateFlagBits>
+  {
+    enum
+    {
+      allFlags = VkFlags(FramebufferCreateFlagBits::eImagelessKHR)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FramebufferCreateFlags operator|( FramebufferCreateFlagBits bit0, FramebufferCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return FramebufferCreateFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FramebufferCreateFlags operator&( FramebufferCreateFlagBits bit0, FramebufferCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return FramebufferCreateFlags( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FramebufferCreateFlags operator^( FramebufferCreateFlagBits bit0, FramebufferCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return FramebufferCreateFlags( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FramebufferCreateFlags operator~( FramebufferCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( FramebufferCreateFlags( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( FramebufferCreateFlags value  )
+  {
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & FramebufferCreateFlagBits::eImagelessKHR ) result += "ImagelessKHR | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+  enum class GeometryFlagBitsNV
+  {
+    eOpaque = VK_GEOMETRY_OPAQUE_BIT_NV,
+    eNoDuplicateAnyHitInvocation = VK_GEOMETRY_NO_DUPLICATE_ANY_HIT_INVOCATION_BIT_NV
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( GeometryFlagBitsNV value )
+  {
+    switch ( value )
+    {
+      case GeometryFlagBitsNV::eOpaque : return "Opaque";
+      case GeometryFlagBitsNV::eNoDuplicateAnyHitInvocation : return "NoDuplicateAnyHitInvocation";
+      default: return "invalid";
+    }
+  }
+
+  using GeometryFlagsNV = Flags<GeometryFlagBitsNV, VkGeometryFlagsNV>;
+
+  template <> struct FlagTraits<GeometryFlagBitsNV>
+  {
+    enum
+    {
+      allFlags = VkFlags(GeometryFlagBitsNV::eOpaque) | VkFlags(GeometryFlagBitsNV::eNoDuplicateAnyHitInvocation)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryFlagsNV operator|( GeometryFlagBitsNV bit0, GeometryFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return GeometryFlagsNV( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryFlagsNV operator&( GeometryFlagBitsNV bit0, GeometryFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return GeometryFlagsNV( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryFlagsNV operator^( GeometryFlagBitsNV bit0, GeometryFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return GeometryFlagsNV( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryFlagsNV operator~( GeometryFlagBitsNV bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( GeometryFlagsNV( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( GeometryFlagsNV value  )
+  {
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & GeometryFlagBitsNV::eOpaque ) result += "Opaque | ";
+    if ( value & GeometryFlagBitsNV::eNoDuplicateAnyHitInvocation ) result += "NoDuplicateAnyHitInvocation | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+  enum class GeometryInstanceFlagBitsNV
+  {
+    eTriangleCullDisable = VK_GEOMETRY_INSTANCE_TRIANGLE_CULL_DISABLE_BIT_NV,
+    eTriangleFrontCounterclockwise = VK_GEOMETRY_INSTANCE_TRIANGLE_FRONT_COUNTERCLOCKWISE_BIT_NV,
+    eForceOpaque = VK_GEOMETRY_INSTANCE_FORCE_OPAQUE_BIT_NV,
+    eForceNoOpaque = VK_GEOMETRY_INSTANCE_FORCE_NO_OPAQUE_BIT_NV
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( GeometryInstanceFlagBitsNV value )
+  {
+    switch ( value )
+    {
+      case GeometryInstanceFlagBitsNV::eTriangleCullDisable : return "TriangleCullDisable";
+      case GeometryInstanceFlagBitsNV::eTriangleFrontCounterclockwise : return "TriangleFrontCounterclockwise";
+      case GeometryInstanceFlagBitsNV::eForceOpaque : return "ForceOpaque";
+      case GeometryInstanceFlagBitsNV::eForceNoOpaque : return "ForceNoOpaque";
+      default: return "invalid";
+    }
+  }
+
+  using GeometryInstanceFlagsNV = Flags<GeometryInstanceFlagBitsNV, VkGeometryInstanceFlagsNV>;
+
+  template <> struct FlagTraits<GeometryInstanceFlagBitsNV>
+  {
+    enum
+    {
+      allFlags = VkFlags(GeometryInstanceFlagBitsNV::eTriangleCullDisable) | VkFlags(GeometryInstanceFlagBitsNV::eTriangleFrontCounterclockwise) | VkFlags(GeometryInstanceFlagBitsNV::eForceOpaque) | VkFlags(GeometryInstanceFlagBitsNV::eForceNoOpaque)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryInstanceFlagsNV operator|( GeometryInstanceFlagBitsNV bit0, GeometryInstanceFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return GeometryInstanceFlagsNV( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryInstanceFlagsNV operator&( GeometryInstanceFlagBitsNV bit0, GeometryInstanceFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return GeometryInstanceFlagsNV( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryInstanceFlagsNV operator^( GeometryInstanceFlagBitsNV bit0, GeometryInstanceFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return GeometryInstanceFlagsNV( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryInstanceFlagsNV operator~( GeometryInstanceFlagBitsNV bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( GeometryInstanceFlagsNV( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( GeometryInstanceFlagsNV value  )
+  {
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & GeometryInstanceFlagBitsNV::eTriangleCullDisable ) result += "TriangleCullDisable | ";
+    if ( value & GeometryInstanceFlagBitsNV::eTriangleFrontCounterclockwise ) result += "TriangleFrontCounterclockwise | ";
+    if ( value & GeometryInstanceFlagBitsNV::eForceOpaque ) result += "ForceOpaque | ";
+    if ( value & GeometryInstanceFlagBitsNV::eForceNoOpaque ) result += "ForceNoOpaque | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+  enum class HeadlessSurfaceCreateFlagBitsEXT
+  {};
+
+  VULKAN_HPP_INLINE std::string to_string( HeadlessSurfaceCreateFlagBitsEXT )
+  {
+    return "(void)";
+  }
+
+  using HeadlessSurfaceCreateFlagsEXT = Flags<HeadlessSurfaceCreateFlagBitsEXT, VkHeadlessSurfaceCreateFlagsEXT>;
+
+  VULKAN_HPP_INLINE std::string to_string( HeadlessSurfaceCreateFlagsEXT  )
+  {
+    return "{}";
+  }
+
+#ifdef VK_USE_PLATFORM_IOS_MVK
+  enum class IOSSurfaceCreateFlagBitsMVK
+  {};
+
+  VULKAN_HPP_INLINE std::string to_string( IOSSurfaceCreateFlagBitsMVK )
+  {
+    return "(void)";
+  }
+
+  using IOSSurfaceCreateFlagsMVK = Flags<IOSSurfaceCreateFlagBitsMVK, VkIOSSurfaceCreateFlagsMVK>;
+
+  VULKAN_HPP_INLINE std::string to_string( IOSSurfaceCreateFlagsMVK  )
+  {
+    return "{}";
+  }
+#endif /*VK_USE_PLATFORM_IOS_MVK*/
+
+  enum class ImageAspectFlagBits
+  {
+    eColor = VK_IMAGE_ASPECT_COLOR_BIT,
+    eDepth = VK_IMAGE_ASPECT_DEPTH_BIT,
+    eStencil = VK_IMAGE_ASPECT_STENCIL_BIT,
+    eMetadata = VK_IMAGE_ASPECT_METADATA_BIT,
+    ePlane0 = VK_IMAGE_ASPECT_PLANE_0_BIT,
+    ePlane1 = VK_IMAGE_ASPECT_PLANE_1_BIT,
+    ePlane2 = VK_IMAGE_ASPECT_PLANE_2_BIT,
+    eMemoryPlane0EXT = VK_IMAGE_ASPECT_MEMORY_PLANE_0_BIT_EXT,
+    eMemoryPlane1EXT = VK_IMAGE_ASPECT_MEMORY_PLANE_1_BIT_EXT,
+    eMemoryPlane2EXT = VK_IMAGE_ASPECT_MEMORY_PLANE_2_BIT_EXT,
+    eMemoryPlane3EXT = VK_IMAGE_ASPECT_MEMORY_PLANE_3_BIT_EXT,
+    ePlane0KHR = VK_IMAGE_ASPECT_PLANE_0_BIT_KHR,
+    ePlane1KHR = VK_IMAGE_ASPECT_PLANE_1_BIT_KHR,
+    ePlane2KHR = VK_IMAGE_ASPECT_PLANE_2_BIT_KHR
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( ImageAspectFlagBits value )
+  {
+    switch ( value )
+    {
+      case ImageAspectFlagBits::eColor : return "Color";
+      case ImageAspectFlagBits::eDepth : return "Depth";
+      case ImageAspectFlagBits::eStencil : return "Stencil";
+      case ImageAspectFlagBits::eMetadata : return "Metadata";
+      case ImageAspectFlagBits::ePlane0 : return "Plane0";
+      case ImageAspectFlagBits::ePlane1 : return "Plane1";
+      case ImageAspectFlagBits::ePlane2 : return "Plane2";
+      case ImageAspectFlagBits::eMemoryPlane0EXT : return "MemoryPlane0EXT";
+      case ImageAspectFlagBits::eMemoryPlane1EXT : return "MemoryPlane1EXT";
+      case ImageAspectFlagBits::eMemoryPlane2EXT : return "MemoryPlane2EXT";
+      case ImageAspectFlagBits::eMemoryPlane3EXT : return "MemoryPlane3EXT";
+      default: return "invalid";
+    }
+  }
+
+  using ImageAspectFlags = Flags<ImageAspectFlagBits, VkImageAspectFlags>;
+
+  template <> struct FlagTraits<ImageAspectFlagBits>
+  {
+    enum
+    {
+      allFlags = VkFlags(ImageAspectFlagBits::eColor) | VkFlags(ImageAspectFlagBits::eDepth) | VkFlags(ImageAspectFlagBits::eStencil) | VkFlags(ImageAspectFlagBits::eMetadata) | VkFlags(ImageAspectFlagBits::ePlane0) | VkFlags(ImageAspectFlagBits::ePlane1) | VkFlags(ImageAspectFlagBits::ePlane2) | VkFlags(ImageAspectFlagBits::eMemoryPlane0EXT) | VkFlags(ImageAspectFlagBits::eMemoryPlane1EXT) | VkFlags(ImageAspectFlagBits::eMemoryPlane2EXT) | VkFlags(ImageAspectFlagBits::eMemoryPlane3EXT)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageAspectFlags operator|( ImageAspectFlagBits bit0, ImageAspectFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return ImageAspectFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageAspectFlags operator&( ImageAspectFlagBits bit0, ImageAspectFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return ImageAspectFlags( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageAspectFlags operator^( ImageAspectFlagBits bit0, ImageAspectFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return ImageAspectFlags( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageAspectFlags operator~( ImageAspectFlagBits bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( ImageAspectFlags( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( ImageAspectFlags value  )
+  {
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & ImageAspectFlagBits::eColor ) result += "Color | ";
+    if ( value & ImageAspectFlagBits::eDepth ) result += "Depth | ";
+    if ( value & ImageAspectFlagBits::eStencil ) result += "Stencil | ";
+    if ( value & ImageAspectFlagBits::eMetadata ) result += "Metadata | ";
+    if ( value & ImageAspectFlagBits::ePlane0 ) result += "Plane0 | ";
+    if ( value & ImageAspectFlagBits::ePlane1 ) result += "Plane1 | ";
+    if ( value & ImageAspectFlagBits::ePlane2 ) result += "Plane2 | ";
+    if ( value & ImageAspectFlagBits::eMemoryPlane0EXT ) result += "MemoryPlane0EXT | ";
+    if ( value & ImageAspectFlagBits::eMemoryPlane1EXT ) result += "MemoryPlane1EXT | ";
+    if ( value & ImageAspectFlagBits::eMemoryPlane2EXT ) result += "MemoryPlane2EXT | ";
+    if ( value & ImageAspectFlagBits::eMemoryPlane3EXT ) result += "MemoryPlane3EXT | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+  enum class ImageCreateFlagBits
+  {
+    eSparseBinding = VK_IMAGE_CREATE_SPARSE_BINDING_BIT,
+    eSparseResidency = VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT,
+    eSparseAliased = VK_IMAGE_CREATE_SPARSE_ALIASED_BIT,
+    eMutableFormat = VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT,
+    eCubeCompatible = VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT,
+    eAlias = VK_IMAGE_CREATE_ALIAS_BIT,
+    eSplitInstanceBindRegions = VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT,
+    e2DArrayCompatible = VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT,
+    eBlockTexelViewCompatible = VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT,
+    eExtendedUsage = VK_IMAGE_CREATE_EXTENDED_USAGE_BIT,
+    eProtected = VK_IMAGE_CREATE_PROTECTED_BIT,
+    eDisjoint = VK_IMAGE_CREATE_DISJOINT_BIT,
+    eCornerSampledNV = VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV,
+    eSampleLocationsCompatibleDepthEXT = VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT,
+    eSubsampledEXT = VK_IMAGE_CREATE_SUBSAMPLED_BIT_EXT,
+    eSplitInstanceBindRegionsKHR = VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR,
+    e2DArrayCompatibleKHR = VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT_KHR,
+    eBlockTexelViewCompatibleKHR = VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT_KHR,
+    eExtendedUsageKHR = VK_IMAGE_CREATE_EXTENDED_USAGE_BIT_KHR,
+    eDisjointKHR = VK_IMAGE_CREATE_DISJOINT_BIT_KHR,
+    eAliasKHR = VK_IMAGE_CREATE_ALIAS_BIT_KHR
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( ImageCreateFlagBits value )
+  {
+    switch ( value )
+    {
+      case ImageCreateFlagBits::eSparseBinding : return "SparseBinding";
+      case ImageCreateFlagBits::eSparseResidency : return "SparseResidency";
+      case ImageCreateFlagBits::eSparseAliased : return "SparseAliased";
+      case ImageCreateFlagBits::eMutableFormat : return "MutableFormat";
+      case ImageCreateFlagBits::eCubeCompatible : return "CubeCompatible";
+      case ImageCreateFlagBits::eAlias : return "Alias";
+      case ImageCreateFlagBits::eSplitInstanceBindRegions : return "SplitInstanceBindRegions";
+      case ImageCreateFlagBits::e2DArrayCompatible : return "2DArrayCompatible";
+      case ImageCreateFlagBits::eBlockTexelViewCompatible : return "BlockTexelViewCompatible";
+      case ImageCreateFlagBits::eExtendedUsage : return "ExtendedUsage";
+      case ImageCreateFlagBits::eProtected : return "Protected";
+      case ImageCreateFlagBits::eDisjoint : return "Disjoint";
+      case ImageCreateFlagBits::eCornerSampledNV : return "CornerSampledNV";
+      case ImageCreateFlagBits::eSampleLocationsCompatibleDepthEXT : return "SampleLocationsCompatibleDepthEXT";
+      case ImageCreateFlagBits::eSubsampledEXT : return "SubsampledEXT";
+      default: return "invalid";
+    }
+  }
+
+  using ImageCreateFlags = Flags<ImageCreateFlagBits, VkImageCreateFlags>;
+
+  template <> struct FlagTraits<ImageCreateFlagBits>
+  {
+    enum
+    {
+      allFlags = VkFlags(ImageCreateFlagBits::eSparseBinding) | VkFlags(ImageCreateFlagBits::eSparseResidency) | VkFlags(ImageCreateFlagBits::eSparseAliased) | VkFlags(ImageCreateFlagBits::eMutableFormat) | VkFlags(ImageCreateFlagBits::eCubeCompatible) | VkFlags(ImageCreateFlagBits::eAlias) | VkFlags(ImageCreateFlagBits::eSplitInstanceBindRegions) | VkFlags(ImageCreateFlagBits::e2DArrayCompatible) | VkFlags(ImageCreateFlagBits::eBlockTexelViewCompatible) | VkFlags(ImageCreateFlagBits::eExtendedUsage) | VkFlags(ImageCreateFlagBits::eProtected) | VkFlags(ImageCreateFlagBits::eDisjoint) | VkFlags(ImageCreateFlagBits::eCornerSampledNV) | VkFlags(ImageCreateFlagBits::eSampleLocationsCompatibleDepthEXT) | VkFlags(ImageCreateFlagBits::eSubsampledEXT)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageCreateFlags operator|( ImageCreateFlagBits bit0, ImageCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return ImageCreateFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageCreateFlags operator&( ImageCreateFlagBits bit0, ImageCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return ImageCreateFlags( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageCreateFlags operator^( ImageCreateFlagBits bit0, ImageCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return ImageCreateFlags( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageCreateFlags operator~( ImageCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( ImageCreateFlags( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( ImageCreateFlags value  )
+  {
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & ImageCreateFlagBits::eSparseBinding ) result += "SparseBinding | ";
+    if ( value & ImageCreateFlagBits::eSparseResidency ) result += "SparseResidency | ";
+    if ( value & ImageCreateFlagBits::eSparseAliased ) result += "SparseAliased | ";
+    if ( value & ImageCreateFlagBits::eMutableFormat ) result += "MutableFormat | ";
+    if ( value & ImageCreateFlagBits::eCubeCompatible ) result += "CubeCompatible | ";
+    if ( value & ImageCreateFlagBits::eAlias ) result += "Alias | ";
+    if ( value & ImageCreateFlagBits::eSplitInstanceBindRegions ) result += "SplitInstanceBindRegions | ";
+    if ( value & ImageCreateFlagBits::e2DArrayCompatible ) result += "2DArrayCompatible | ";
+    if ( value & ImageCreateFlagBits::eBlockTexelViewCompatible ) result += "BlockTexelViewCompatible | ";
+    if ( value & ImageCreateFlagBits::eExtendedUsage ) result += "ExtendedUsage | ";
+    if ( value & ImageCreateFlagBits::eProtected ) result += "Protected | ";
+    if ( value & ImageCreateFlagBits::eDisjoint ) result += "Disjoint | ";
+    if ( value & ImageCreateFlagBits::eCornerSampledNV ) result += "CornerSampledNV | ";
+    if ( value & ImageCreateFlagBits::eSampleLocationsCompatibleDepthEXT ) result += "SampleLocationsCompatibleDepthEXT | ";
+    if ( value & ImageCreateFlagBits::eSubsampledEXT ) result += "SubsampledEXT | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+#ifdef VK_USE_PLATFORM_FUCHSIA
+  enum class ImagePipeSurfaceCreateFlagBitsFUCHSIA
+  {};
+
+  VULKAN_HPP_INLINE std::string to_string( ImagePipeSurfaceCreateFlagBitsFUCHSIA )
+  {
+    return "(void)";
+  }
+
+  using ImagePipeSurfaceCreateFlagsFUCHSIA = Flags<ImagePipeSurfaceCreateFlagBitsFUCHSIA, VkImagePipeSurfaceCreateFlagsFUCHSIA>;
+
+  VULKAN_HPP_INLINE std::string to_string( ImagePipeSurfaceCreateFlagsFUCHSIA  )
+  {
+    return "{}";
+  }
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+  enum class ImageUsageFlagBits
+  {
+    eTransferSrc = VK_IMAGE_USAGE_TRANSFER_SRC_BIT,
+    eTransferDst = VK_IMAGE_USAGE_TRANSFER_DST_BIT,
+    eSampled = VK_IMAGE_USAGE_SAMPLED_BIT,
+    eStorage = VK_IMAGE_USAGE_STORAGE_BIT,
+    eColorAttachment = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT,
+    eDepthStencilAttachment = VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT,
+    eTransientAttachment = VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT,
+    eInputAttachment = VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT,
+    eShadingRateImageNV = VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV,
+    eFragmentDensityMapEXT = VK_IMAGE_USAGE_FRAGMENT_DENSITY_MAP_BIT_EXT
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( ImageUsageFlagBits value )
+  {
+    switch ( value )
+    {
+      case ImageUsageFlagBits::eTransferSrc : return "TransferSrc";
+      case ImageUsageFlagBits::eTransferDst : return "TransferDst";
+      case ImageUsageFlagBits::eSampled : return "Sampled";
+      case ImageUsageFlagBits::eStorage : return "Storage";
+      case ImageUsageFlagBits::eColorAttachment : return "ColorAttachment";
+      case ImageUsageFlagBits::eDepthStencilAttachment : return "DepthStencilAttachment";
+      case ImageUsageFlagBits::eTransientAttachment : return "TransientAttachment";
+      case ImageUsageFlagBits::eInputAttachment : return "InputAttachment";
+      case ImageUsageFlagBits::eShadingRateImageNV : return "ShadingRateImageNV";
+      case ImageUsageFlagBits::eFragmentDensityMapEXT : return "FragmentDensityMapEXT";
+      default: return "invalid";
+    }
+  }
+
+  using ImageUsageFlags = Flags<ImageUsageFlagBits, VkImageUsageFlags>;
+
+  template <> struct FlagTraits<ImageUsageFlagBits>
+  {
+    enum
+    {
+      allFlags = VkFlags(ImageUsageFlagBits::eTransferSrc) | VkFlags(ImageUsageFlagBits::eTransferDst) | VkFlags(ImageUsageFlagBits::eSampled) | VkFlags(ImageUsageFlagBits::eStorage) | VkFlags(ImageUsageFlagBits::eColorAttachment) | VkFlags(ImageUsageFlagBits::eDepthStencilAttachment) | VkFlags(ImageUsageFlagBits::eTransientAttachment) | VkFlags(ImageUsageFlagBits::eInputAttachment) | VkFlags(ImageUsageFlagBits::eShadingRateImageNV) | VkFlags(ImageUsageFlagBits::eFragmentDensityMapEXT)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageUsageFlags operator|( ImageUsageFlagBits bit0, ImageUsageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return ImageUsageFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageUsageFlags operator&( ImageUsageFlagBits bit0, ImageUsageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return ImageUsageFlags( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageUsageFlags operator^( ImageUsageFlagBits bit0, ImageUsageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return ImageUsageFlags( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageUsageFlags operator~( ImageUsageFlagBits bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( ImageUsageFlags( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( ImageUsageFlags value  )
+  {
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & ImageUsageFlagBits::eTransferSrc ) result += "TransferSrc | ";
+    if ( value & ImageUsageFlagBits::eTransferDst ) result += "TransferDst | ";
+    if ( value & ImageUsageFlagBits::eSampled ) result += "Sampled | ";
+    if ( value & ImageUsageFlagBits::eStorage ) result += "Storage | ";
+    if ( value & ImageUsageFlagBits::eColorAttachment ) result += "ColorAttachment | ";
+    if ( value & ImageUsageFlagBits::eDepthStencilAttachment ) result += "DepthStencilAttachment | ";
+    if ( value & ImageUsageFlagBits::eTransientAttachment ) result += "TransientAttachment | ";
+    if ( value & ImageUsageFlagBits::eInputAttachment ) result += "InputAttachment | ";
+    if ( value & ImageUsageFlagBits::eShadingRateImageNV ) result += "ShadingRateImageNV | ";
+    if ( value & ImageUsageFlagBits::eFragmentDensityMapEXT ) result += "FragmentDensityMapEXT | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+  enum class ImageViewCreateFlagBits
+  {
+    eFragmentDensityMapDynamicEXT = VK_IMAGE_VIEW_CREATE_FRAGMENT_DENSITY_MAP_DYNAMIC_BIT_EXT
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( ImageViewCreateFlagBits value )
+  {
+    switch ( value )
+    {
+      case ImageViewCreateFlagBits::eFragmentDensityMapDynamicEXT : return "FragmentDensityMapDynamicEXT";
+      default: return "invalid";
+    }
+  }
+
+  using ImageViewCreateFlags = Flags<ImageViewCreateFlagBits, VkImageViewCreateFlags>;
+
+  template <> struct FlagTraits<ImageViewCreateFlagBits>
+  {
+    enum
+    {
+      allFlags = VkFlags(ImageViewCreateFlagBits::eFragmentDensityMapDynamicEXT)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageViewCreateFlags operator|( ImageViewCreateFlagBits bit0, ImageViewCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return ImageViewCreateFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageViewCreateFlags operator&( ImageViewCreateFlagBits bit0, ImageViewCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return ImageViewCreateFlags( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageViewCreateFlags operator^( ImageViewCreateFlagBits bit0, ImageViewCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return ImageViewCreateFlags( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageViewCreateFlags operator~( ImageViewCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( ImageViewCreateFlags( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( ImageViewCreateFlags value  )
+  {
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & ImageViewCreateFlagBits::eFragmentDensityMapDynamicEXT ) result += "FragmentDensityMapDynamicEXT | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+  enum class IndirectCommandsLayoutUsageFlagBitsNVX
+  {
+    eUnorderedSequences = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_UNORDERED_SEQUENCES_BIT_NVX,
+    eSparseSequences = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_SPARSE_SEQUENCES_BIT_NVX,
+    eEmptyExecutions = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_EMPTY_EXECUTIONS_BIT_NVX,
+    eIndexedSequences = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_INDEXED_SEQUENCES_BIT_NVX
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( IndirectCommandsLayoutUsageFlagBitsNVX value )
+  {
+    switch ( value )
+    {
+      case IndirectCommandsLayoutUsageFlagBitsNVX::eUnorderedSequences : return "UnorderedSequences";
+      case IndirectCommandsLayoutUsageFlagBitsNVX::eSparseSequences : return "SparseSequences";
+      case IndirectCommandsLayoutUsageFlagBitsNVX::eEmptyExecutions : return "EmptyExecutions";
+      case IndirectCommandsLayoutUsageFlagBitsNVX::eIndexedSequences : return "IndexedSequences";
+      default: return "invalid";
+    }
+  }
+
+  using IndirectCommandsLayoutUsageFlagsNVX = Flags<IndirectCommandsLayoutUsageFlagBitsNVX, VkIndirectCommandsLayoutUsageFlagsNVX>;
+
+  template <> struct FlagTraits<IndirectCommandsLayoutUsageFlagBitsNVX>
+  {
+    enum
+    {
+      allFlags = VkFlags(IndirectCommandsLayoutUsageFlagBitsNVX::eUnorderedSequences) | VkFlags(IndirectCommandsLayoutUsageFlagBitsNVX::eSparseSequences) | VkFlags(IndirectCommandsLayoutUsageFlagBitsNVX::eEmptyExecutions) | VkFlags(IndirectCommandsLayoutUsageFlagBitsNVX::eIndexedSequences)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutUsageFlagsNVX operator|( IndirectCommandsLayoutUsageFlagBitsNVX bit0, IndirectCommandsLayoutUsageFlagBitsNVX bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return IndirectCommandsLayoutUsageFlagsNVX( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutUsageFlagsNVX operator&( IndirectCommandsLayoutUsageFlagBitsNVX bit0, IndirectCommandsLayoutUsageFlagBitsNVX bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return IndirectCommandsLayoutUsageFlagsNVX( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutUsageFlagsNVX operator^( IndirectCommandsLayoutUsageFlagBitsNVX bit0, IndirectCommandsLayoutUsageFlagBitsNVX bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return IndirectCommandsLayoutUsageFlagsNVX( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutUsageFlagsNVX operator~( IndirectCommandsLayoutUsageFlagBitsNVX bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( IndirectCommandsLayoutUsageFlagsNVX( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( IndirectCommandsLayoutUsageFlagsNVX value  )
+  {
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & IndirectCommandsLayoutUsageFlagBitsNVX::eUnorderedSequences ) result += "UnorderedSequences | ";
+    if ( value & IndirectCommandsLayoutUsageFlagBitsNVX::eSparseSequences ) result += "SparseSequences | ";
+    if ( value & IndirectCommandsLayoutUsageFlagBitsNVX::eEmptyExecutions ) result += "EmptyExecutions | ";
+    if ( value & IndirectCommandsLayoutUsageFlagBitsNVX::eIndexedSequences ) result += "IndexedSequences | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+  enum class InstanceCreateFlagBits
+  {};
+
+  VULKAN_HPP_INLINE std::string to_string( InstanceCreateFlagBits )
+  {
+    return "(void)";
+  }
+
+  using InstanceCreateFlags = Flags<InstanceCreateFlagBits, VkInstanceCreateFlags>;
+
+  VULKAN_HPP_INLINE std::string to_string( InstanceCreateFlags  )
+  {
+    return "{}";
+  }
+
+#ifdef VK_USE_PLATFORM_MACOS_MVK
+  enum class MacOSSurfaceCreateFlagBitsMVK
+  {};
+
+  VULKAN_HPP_INLINE std::string to_string( MacOSSurfaceCreateFlagBitsMVK )
+  {
+    return "(void)";
+  }
+
+  using MacOSSurfaceCreateFlagsMVK = Flags<MacOSSurfaceCreateFlagBitsMVK, VkMacOSSurfaceCreateFlagsMVK>;
+
+  VULKAN_HPP_INLINE std::string to_string( MacOSSurfaceCreateFlagsMVK  )
+  {
+    return "{}";
+  }
+#endif /*VK_USE_PLATFORM_MACOS_MVK*/
+
+  enum class MemoryAllocateFlagBits
+  {
+    eDeviceMask = VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT,
+    eDeviceAddressKHR = VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT_KHR,
+    eDeviceAddressCaptureReplayKHR = VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_KHR,
+    eDeviceMaskKHR = VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT_KHR
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( MemoryAllocateFlagBits value )
+  {
+    switch ( value )
+    {
+      case MemoryAllocateFlagBits::eDeviceMask : return "DeviceMask";
+      case MemoryAllocateFlagBits::eDeviceAddressKHR : return "DeviceAddressKHR";
+      case MemoryAllocateFlagBits::eDeviceAddressCaptureReplayKHR : return "DeviceAddressCaptureReplayKHR";
+      default: return "invalid";
+    }
+  }
+
+  using MemoryAllocateFlags = Flags<MemoryAllocateFlagBits, VkMemoryAllocateFlags>;
+
+  template <> struct FlagTraits<MemoryAllocateFlagBits>
+  {
+    enum
+    {
+      allFlags = VkFlags(MemoryAllocateFlagBits::eDeviceMask) | VkFlags(MemoryAllocateFlagBits::eDeviceAddressKHR) | VkFlags(MemoryAllocateFlagBits::eDeviceAddressCaptureReplayKHR)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryAllocateFlags operator|( MemoryAllocateFlagBits bit0, MemoryAllocateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return MemoryAllocateFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryAllocateFlags operator&( MemoryAllocateFlagBits bit0, MemoryAllocateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return MemoryAllocateFlags( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryAllocateFlags operator^( MemoryAllocateFlagBits bit0, MemoryAllocateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return MemoryAllocateFlags( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryAllocateFlags operator~( MemoryAllocateFlagBits bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( MemoryAllocateFlags( bits ) );
+  }
+
+  using MemoryAllocateFlagsKHR = MemoryAllocateFlags;
+
+  VULKAN_HPP_INLINE std::string to_string( MemoryAllocateFlags value  )
+  {
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & MemoryAllocateFlagBits::eDeviceMask ) result += "DeviceMask | ";
+    if ( value & MemoryAllocateFlagBits::eDeviceAddressKHR ) result += "DeviceAddressKHR | ";
+    if ( value & MemoryAllocateFlagBits::eDeviceAddressCaptureReplayKHR ) result += "DeviceAddressCaptureReplayKHR | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+  enum class MemoryHeapFlagBits
+  {
+    eDeviceLocal = VK_MEMORY_HEAP_DEVICE_LOCAL_BIT,
+    eMultiInstance = VK_MEMORY_HEAP_MULTI_INSTANCE_BIT,
+    eMultiInstanceKHR = VK_MEMORY_HEAP_MULTI_INSTANCE_BIT_KHR
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( MemoryHeapFlagBits value )
+  {
+    switch ( value )
+    {
+      case MemoryHeapFlagBits::eDeviceLocal : return "DeviceLocal";
+      case MemoryHeapFlagBits::eMultiInstance : return "MultiInstance";
+      default: return "invalid";
+    }
+  }
+
+  using MemoryHeapFlags = Flags<MemoryHeapFlagBits, VkMemoryHeapFlags>;
+
+  template <> struct FlagTraits<MemoryHeapFlagBits>
+  {
+    enum
+    {
+      allFlags = VkFlags(MemoryHeapFlagBits::eDeviceLocal) | VkFlags(MemoryHeapFlagBits::eMultiInstance)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryHeapFlags operator|( MemoryHeapFlagBits bit0, MemoryHeapFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return MemoryHeapFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryHeapFlags operator&( MemoryHeapFlagBits bit0, MemoryHeapFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return MemoryHeapFlags( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryHeapFlags operator^( MemoryHeapFlagBits bit0, MemoryHeapFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return MemoryHeapFlags( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryHeapFlags operator~( MemoryHeapFlagBits bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( MemoryHeapFlags( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( MemoryHeapFlags value  )
+  {
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & MemoryHeapFlagBits::eDeviceLocal ) result += "DeviceLocal | ";
+    if ( value & MemoryHeapFlagBits::eMultiInstance ) result += "MultiInstance | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+  enum class MemoryMapFlagBits
+  {};
+
+  VULKAN_HPP_INLINE std::string to_string( MemoryMapFlagBits )
+  {
+    return "(void)";
+  }
+
+  using MemoryMapFlags = Flags<MemoryMapFlagBits, VkMemoryMapFlags>;
+
+  VULKAN_HPP_INLINE std::string to_string( MemoryMapFlags  )
+  {
+    return "{}";
+  }
+
+  enum class MemoryPropertyFlagBits
+  {
+    eDeviceLocal = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT,
+    eHostVisible = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT,
+    eHostCoherent = VK_MEMORY_PROPERTY_HOST_COHERENT_BIT,
+    eHostCached = VK_MEMORY_PROPERTY_HOST_CACHED_BIT,
+    eLazilyAllocated = VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT,
+    eProtected = VK_MEMORY_PROPERTY_PROTECTED_BIT,
+    eDeviceCoherentAMD = VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD,
+    eDeviceUncachedAMD = VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( MemoryPropertyFlagBits value )
+  {
+    switch ( value )
+    {
+      case MemoryPropertyFlagBits::eDeviceLocal : return "DeviceLocal";
+      case MemoryPropertyFlagBits::eHostVisible : return "HostVisible";
+      case MemoryPropertyFlagBits::eHostCoherent : return "HostCoherent";
+      case MemoryPropertyFlagBits::eHostCached : return "HostCached";
+      case MemoryPropertyFlagBits::eLazilyAllocated : return "LazilyAllocated";
+      case MemoryPropertyFlagBits::eProtected : return "Protected";
+      case MemoryPropertyFlagBits::eDeviceCoherentAMD : return "DeviceCoherentAMD";
+      case MemoryPropertyFlagBits::eDeviceUncachedAMD : return "DeviceUncachedAMD";
+      default: return "invalid";
+    }
+  }
+
+  using MemoryPropertyFlags = Flags<MemoryPropertyFlagBits, VkMemoryPropertyFlags>;
+
+  template <> struct FlagTraits<MemoryPropertyFlagBits>
+  {
+    enum
+    {
+      allFlags = VkFlags(MemoryPropertyFlagBits::eDeviceLocal) | VkFlags(MemoryPropertyFlagBits::eHostVisible) | VkFlags(MemoryPropertyFlagBits::eHostCoherent) | VkFlags(MemoryPropertyFlagBits::eHostCached) | VkFlags(MemoryPropertyFlagBits::eLazilyAllocated) | VkFlags(MemoryPropertyFlagBits::eProtected) | VkFlags(MemoryPropertyFlagBits::eDeviceCoherentAMD) | VkFlags(MemoryPropertyFlagBits::eDeviceUncachedAMD)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryPropertyFlags operator|( MemoryPropertyFlagBits bit0, MemoryPropertyFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return MemoryPropertyFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryPropertyFlags operator&( MemoryPropertyFlagBits bit0, MemoryPropertyFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return MemoryPropertyFlags( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryPropertyFlags operator^( MemoryPropertyFlagBits bit0, MemoryPropertyFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return MemoryPropertyFlags( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryPropertyFlags operator~( MemoryPropertyFlagBits bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( MemoryPropertyFlags( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( MemoryPropertyFlags value  )
+  {
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & MemoryPropertyFlagBits::eDeviceLocal ) result += "DeviceLocal | ";
+    if ( value & MemoryPropertyFlagBits::eHostVisible ) result += "HostVisible | ";
+    if ( value & MemoryPropertyFlagBits::eHostCoherent ) result += "HostCoherent | ";
+    if ( value & MemoryPropertyFlagBits::eHostCached ) result += "HostCached | ";
+    if ( value & MemoryPropertyFlagBits::eLazilyAllocated ) result += "LazilyAllocated | ";
+    if ( value & MemoryPropertyFlagBits::eProtected ) result += "Protected | ";
+    if ( value & MemoryPropertyFlagBits::eDeviceCoherentAMD ) result += "DeviceCoherentAMD | ";
+    if ( value & MemoryPropertyFlagBits::eDeviceUncachedAMD ) result += "DeviceUncachedAMD | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+#ifdef VK_USE_PLATFORM_METAL_EXT
+  enum class MetalSurfaceCreateFlagBitsEXT
+  {};
+
+  VULKAN_HPP_INLINE std::string to_string( MetalSurfaceCreateFlagBitsEXT )
+  {
+    return "(void)";
+  }
+
+  using MetalSurfaceCreateFlagsEXT = Flags<MetalSurfaceCreateFlagBitsEXT, VkMetalSurfaceCreateFlagsEXT>;
+
+  VULKAN_HPP_INLINE std::string to_string( MetalSurfaceCreateFlagsEXT  )
+  {
+    return "{}";
+  }
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+
+  enum class ObjectEntryUsageFlagBitsNVX
+  {
+    eGraphics = VK_OBJECT_ENTRY_USAGE_GRAPHICS_BIT_NVX,
+    eCompute = VK_OBJECT_ENTRY_USAGE_COMPUTE_BIT_NVX
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( ObjectEntryUsageFlagBitsNVX value )
+  {
+    switch ( value )
+    {
+      case ObjectEntryUsageFlagBitsNVX::eGraphics : return "Graphics";
+      case ObjectEntryUsageFlagBitsNVX::eCompute : return "Compute";
+      default: return "invalid";
+    }
+  }
+
+  using ObjectEntryUsageFlagsNVX = Flags<ObjectEntryUsageFlagBitsNVX, VkObjectEntryUsageFlagsNVX>;
+
+  template <> struct FlagTraits<ObjectEntryUsageFlagBitsNVX>
+  {
+    enum
+    {
+      allFlags = VkFlags(ObjectEntryUsageFlagBitsNVX::eGraphics) | VkFlags(ObjectEntryUsageFlagBitsNVX::eCompute)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ObjectEntryUsageFlagsNVX operator|( ObjectEntryUsageFlagBitsNVX bit0, ObjectEntryUsageFlagBitsNVX bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return ObjectEntryUsageFlagsNVX( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ObjectEntryUsageFlagsNVX operator&( ObjectEntryUsageFlagBitsNVX bit0, ObjectEntryUsageFlagBitsNVX bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return ObjectEntryUsageFlagsNVX( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ObjectEntryUsageFlagsNVX operator^( ObjectEntryUsageFlagBitsNVX bit0, ObjectEntryUsageFlagBitsNVX bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return ObjectEntryUsageFlagsNVX( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ObjectEntryUsageFlagsNVX operator~( ObjectEntryUsageFlagBitsNVX bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( ObjectEntryUsageFlagsNVX( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( ObjectEntryUsageFlagsNVX value  )
+  {
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & ObjectEntryUsageFlagBitsNVX::eGraphics ) result += "Graphics | ";
+    if ( value & ObjectEntryUsageFlagBitsNVX::eCompute ) result += "Compute | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+  enum class PeerMemoryFeatureFlagBits
+  {
+    eCopySrc = VK_PEER_MEMORY_FEATURE_COPY_SRC_BIT,
+    eCopyDst = VK_PEER_MEMORY_FEATURE_COPY_DST_BIT,
+    eGenericSrc = VK_PEER_MEMORY_FEATURE_GENERIC_SRC_BIT,
+    eGenericDst = VK_PEER_MEMORY_FEATURE_GENERIC_DST_BIT,
+    eCopySrcKHR = VK_PEER_MEMORY_FEATURE_COPY_SRC_BIT_KHR,
+    eCopyDstKHR = VK_PEER_MEMORY_FEATURE_COPY_DST_BIT_KHR,
+    eGenericSrcKHR = VK_PEER_MEMORY_FEATURE_GENERIC_SRC_BIT_KHR,
+    eGenericDstKHR = VK_PEER_MEMORY_FEATURE_GENERIC_DST_BIT_KHR
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( PeerMemoryFeatureFlagBits value )
+  {
+    switch ( value )
+    {
+      case PeerMemoryFeatureFlagBits::eCopySrc : return "CopySrc";
+      case PeerMemoryFeatureFlagBits::eCopyDst : return "CopyDst";
+      case PeerMemoryFeatureFlagBits::eGenericSrc : return "GenericSrc";
+      case PeerMemoryFeatureFlagBits::eGenericDst : return "GenericDst";
+      default: return "invalid";
+    }
+  }
+
+  using PeerMemoryFeatureFlags = Flags<PeerMemoryFeatureFlagBits, VkPeerMemoryFeatureFlags>;
+
+  template <> struct FlagTraits<PeerMemoryFeatureFlagBits>
+  {
+    enum
+    {
+      allFlags = VkFlags(PeerMemoryFeatureFlagBits::eCopySrc) | VkFlags(PeerMemoryFeatureFlagBits::eCopyDst) | VkFlags(PeerMemoryFeatureFlagBits::eGenericSrc) | VkFlags(PeerMemoryFeatureFlagBits::eGenericDst)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PeerMemoryFeatureFlags operator|( PeerMemoryFeatureFlagBits bit0, PeerMemoryFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return PeerMemoryFeatureFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PeerMemoryFeatureFlags operator&( PeerMemoryFeatureFlagBits bit0, PeerMemoryFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return PeerMemoryFeatureFlags( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PeerMemoryFeatureFlags operator^( PeerMemoryFeatureFlagBits bit0, PeerMemoryFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return PeerMemoryFeatureFlags( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PeerMemoryFeatureFlags operator~( PeerMemoryFeatureFlagBits bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( PeerMemoryFeatureFlags( bits ) );
+  }
+
+  using PeerMemoryFeatureFlagsKHR = PeerMemoryFeatureFlags;
+
+  VULKAN_HPP_INLINE std::string to_string( PeerMemoryFeatureFlags value  )
+  {
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & PeerMemoryFeatureFlagBits::eCopySrc ) result += "CopySrc | ";
+    if ( value & PeerMemoryFeatureFlagBits::eCopyDst ) result += "CopyDst | ";
+    if ( value & PeerMemoryFeatureFlagBits::eGenericSrc ) result += "GenericSrc | ";
+    if ( value & PeerMemoryFeatureFlagBits::eGenericDst ) result += "GenericDst | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+  enum class PerformanceCounterDescriptionFlagBitsKHR
+  {
+    ePerformanceImpacting = VK_PERFORMANCE_COUNTER_DESCRIPTION_PERFORMANCE_IMPACTING_KHR,
+    eConcurrentlyImpacted = VK_PERFORMANCE_COUNTER_DESCRIPTION_CONCURRENTLY_IMPACTED_KHR
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( PerformanceCounterDescriptionFlagBitsKHR value )
+  {
+    switch ( value )
+    {
+      case PerformanceCounterDescriptionFlagBitsKHR::ePerformanceImpacting : return "PerformanceImpacting";
+      case PerformanceCounterDescriptionFlagBitsKHR::eConcurrentlyImpacted : return "ConcurrentlyImpacted";
+      default: return "invalid";
+    }
+  }
+
+  using PerformanceCounterDescriptionFlagsKHR = Flags<PerformanceCounterDescriptionFlagBitsKHR, VkPerformanceCounterDescriptionFlagsKHR>;
+
+  template <> struct FlagTraits<PerformanceCounterDescriptionFlagBitsKHR>
+  {
+    enum
+    {
+      allFlags = VkFlags(PerformanceCounterDescriptionFlagBitsKHR::ePerformanceImpacting) | VkFlags(PerformanceCounterDescriptionFlagBitsKHR::eConcurrentlyImpacted)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PerformanceCounterDescriptionFlagsKHR operator|( PerformanceCounterDescriptionFlagBitsKHR bit0, PerformanceCounterDescriptionFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return PerformanceCounterDescriptionFlagsKHR( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PerformanceCounterDescriptionFlagsKHR operator&( PerformanceCounterDescriptionFlagBitsKHR bit0, PerformanceCounterDescriptionFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return PerformanceCounterDescriptionFlagsKHR( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PerformanceCounterDescriptionFlagsKHR operator^( PerformanceCounterDescriptionFlagBitsKHR bit0, PerformanceCounterDescriptionFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return PerformanceCounterDescriptionFlagsKHR( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PerformanceCounterDescriptionFlagsKHR operator~( PerformanceCounterDescriptionFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( PerformanceCounterDescriptionFlagsKHR( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( PerformanceCounterDescriptionFlagsKHR value  )
+  {
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & PerformanceCounterDescriptionFlagBitsKHR::ePerformanceImpacting ) result += "PerformanceImpacting | ";
+    if ( value & PerformanceCounterDescriptionFlagBitsKHR::eConcurrentlyImpacted ) result += "ConcurrentlyImpacted | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+  enum class PipelineCacheCreateFlagBits
+  {};
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineCacheCreateFlagBits )
+  {
+    return "(void)";
+  }
+
+  using PipelineCacheCreateFlags = Flags<PipelineCacheCreateFlagBits, VkPipelineCacheCreateFlags>;
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineCacheCreateFlags  )
+  {
+    return "{}";
+  }
+
+  enum class PipelineColorBlendStateCreateFlagBits
+  {};
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineColorBlendStateCreateFlagBits )
+  {
+    return "(void)";
+  }
+
+  using PipelineColorBlendStateCreateFlags = Flags<PipelineColorBlendStateCreateFlagBits, VkPipelineColorBlendStateCreateFlags>;
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineColorBlendStateCreateFlags  )
+  {
+    return "{}";
+  }
+
+  enum class PipelineCompilerControlFlagBitsAMD
+  {};
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineCompilerControlFlagBitsAMD )
+  {
+    return "(void)";
+  }
+
+  using PipelineCompilerControlFlagsAMD = Flags<PipelineCompilerControlFlagBitsAMD, VkPipelineCompilerControlFlagsAMD>;
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineCompilerControlFlagsAMD  )
+  {
+    return "{}";
+  }
+
+  enum class PipelineCoverageModulationStateCreateFlagBitsNV
+  {};
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineCoverageModulationStateCreateFlagBitsNV )
+  {
+    return "(void)";
+  }
+
+  using PipelineCoverageModulationStateCreateFlagsNV = Flags<PipelineCoverageModulationStateCreateFlagBitsNV, VkPipelineCoverageModulationStateCreateFlagsNV>;
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineCoverageModulationStateCreateFlagsNV  )
+  {
+    return "{}";
+  }
+
+  enum class PipelineCoverageReductionStateCreateFlagBitsNV
+  {};
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineCoverageReductionStateCreateFlagBitsNV )
+  {
+    return "(void)";
+  }
+
+  using PipelineCoverageReductionStateCreateFlagsNV = Flags<PipelineCoverageReductionStateCreateFlagBitsNV, VkPipelineCoverageReductionStateCreateFlagsNV>;
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineCoverageReductionStateCreateFlagsNV  )
+  {
+    return "{}";
+  }
+
+  enum class PipelineCoverageToColorStateCreateFlagBitsNV
+  {};
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineCoverageToColorStateCreateFlagBitsNV )
+  {
+    return "(void)";
+  }
+
+  using PipelineCoverageToColorStateCreateFlagsNV = Flags<PipelineCoverageToColorStateCreateFlagBitsNV, VkPipelineCoverageToColorStateCreateFlagsNV>;
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineCoverageToColorStateCreateFlagsNV  )
+  {
+    return "{}";
+  }
+
+  enum class PipelineCreateFlagBits
+  {
+    eDisableOptimization = VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT,
+    eAllowDerivatives = VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT,
+    eDerivative = VK_PIPELINE_CREATE_DERIVATIVE_BIT,
+    eViewIndexFromDeviceIndex = VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT,
+    eDispatchBase = VK_PIPELINE_CREATE_DISPATCH_BASE_BIT,
+    eDeferCompileNV = VK_PIPELINE_CREATE_DEFER_COMPILE_BIT_NV,
+    eCaptureStatisticsKHR = VK_PIPELINE_CREATE_CAPTURE_STATISTICS_BIT_KHR,
+    eCaptureInternalRepresentationsKHR = VK_PIPELINE_CREATE_CAPTURE_INTERNAL_REPRESENTATIONS_BIT_KHR,
+    eViewIndexFromDeviceIndexKHR = VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT_KHR,
+    eDispatchBaseKHR = VK_PIPELINE_CREATE_DISPATCH_BASE_KHR
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineCreateFlagBits value )
+  {
+    switch ( value )
+    {
+      case PipelineCreateFlagBits::eDisableOptimization : return "DisableOptimization";
+      case PipelineCreateFlagBits::eAllowDerivatives : return "AllowDerivatives";
+      case PipelineCreateFlagBits::eDerivative : return "Derivative";
+      case PipelineCreateFlagBits::eViewIndexFromDeviceIndex : return "ViewIndexFromDeviceIndex";
+      case PipelineCreateFlagBits::eDispatchBase : return "DispatchBase";
+      case PipelineCreateFlagBits::eDeferCompileNV : return "DeferCompileNV";
+      case PipelineCreateFlagBits::eCaptureStatisticsKHR : return "CaptureStatisticsKHR";
+      case PipelineCreateFlagBits::eCaptureInternalRepresentationsKHR : return "CaptureInternalRepresentationsKHR";
+      default: return "invalid";
+    }
+  }
+
+  using PipelineCreateFlags = Flags<PipelineCreateFlagBits, VkPipelineCreateFlags>;
+
+  template <> struct FlagTraits<PipelineCreateFlagBits>
+  {
+    enum
+    {
+      allFlags = VkFlags(PipelineCreateFlagBits::eDisableOptimization) | VkFlags(PipelineCreateFlagBits::eAllowDerivatives) | VkFlags(PipelineCreateFlagBits::eDerivative) | VkFlags(PipelineCreateFlagBits::eViewIndexFromDeviceIndex) | VkFlags(PipelineCreateFlagBits::eDispatchBase) | VkFlags(PipelineCreateFlagBits::eDeferCompileNV) | VkFlags(PipelineCreateFlagBits::eCaptureStatisticsKHR) | VkFlags(PipelineCreateFlagBits::eCaptureInternalRepresentationsKHR)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCreateFlags operator|( PipelineCreateFlagBits bit0, PipelineCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return PipelineCreateFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCreateFlags operator&( PipelineCreateFlagBits bit0, PipelineCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return PipelineCreateFlags( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCreateFlags operator^( PipelineCreateFlagBits bit0, PipelineCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return PipelineCreateFlags( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCreateFlags operator~( PipelineCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( PipelineCreateFlags( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineCreateFlags value  )
+  {
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & PipelineCreateFlagBits::eDisableOptimization ) result += "DisableOptimization | ";
+    if ( value & PipelineCreateFlagBits::eAllowDerivatives ) result += "AllowDerivatives | ";
+    if ( value & PipelineCreateFlagBits::eDerivative ) result += "Derivative | ";
+    if ( value & PipelineCreateFlagBits::eViewIndexFromDeviceIndex ) result += "ViewIndexFromDeviceIndex | ";
+    if ( value & PipelineCreateFlagBits::eDispatchBase ) result += "DispatchBase | ";
+    if ( value & PipelineCreateFlagBits::eDeferCompileNV ) result += "DeferCompileNV | ";
+    if ( value & PipelineCreateFlagBits::eCaptureStatisticsKHR ) result += "CaptureStatisticsKHR | ";
+    if ( value & PipelineCreateFlagBits::eCaptureInternalRepresentationsKHR ) result += "CaptureInternalRepresentationsKHR | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+  enum class PipelineCreationFeedbackFlagBitsEXT
+  {
+    eValid = VK_PIPELINE_CREATION_FEEDBACK_VALID_BIT_EXT,
+    eApplicationPipelineCacheHit = VK_PIPELINE_CREATION_FEEDBACK_APPLICATION_PIPELINE_CACHE_HIT_BIT_EXT,
+    eBasePipelineAcceleration = VK_PIPELINE_CREATION_FEEDBACK_BASE_PIPELINE_ACCELERATION_BIT_EXT
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineCreationFeedbackFlagBitsEXT value )
+  {
+    switch ( value )
+    {
+      case PipelineCreationFeedbackFlagBitsEXT::eValid : return "Valid";
+      case PipelineCreationFeedbackFlagBitsEXT::eApplicationPipelineCacheHit : return "ApplicationPipelineCacheHit";
+      case PipelineCreationFeedbackFlagBitsEXT::eBasePipelineAcceleration : return "BasePipelineAcceleration";
+      default: return "invalid";
+    }
+  }
+
+  using PipelineCreationFeedbackFlagsEXT = Flags<PipelineCreationFeedbackFlagBitsEXT, VkPipelineCreationFeedbackFlagsEXT>;
+
+  template <> struct FlagTraits<PipelineCreationFeedbackFlagBitsEXT>
+  {
+    enum
+    {
+      allFlags = VkFlags(PipelineCreationFeedbackFlagBitsEXT::eValid) | VkFlags(PipelineCreationFeedbackFlagBitsEXT::eApplicationPipelineCacheHit) | VkFlags(PipelineCreationFeedbackFlagBitsEXT::eBasePipelineAcceleration)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCreationFeedbackFlagsEXT operator|( PipelineCreationFeedbackFlagBitsEXT bit0, PipelineCreationFeedbackFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return PipelineCreationFeedbackFlagsEXT( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCreationFeedbackFlagsEXT operator&( PipelineCreationFeedbackFlagBitsEXT bit0, PipelineCreationFeedbackFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return PipelineCreationFeedbackFlagsEXT( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCreationFeedbackFlagsEXT operator^( PipelineCreationFeedbackFlagBitsEXT bit0, PipelineCreationFeedbackFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return PipelineCreationFeedbackFlagsEXT( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCreationFeedbackFlagsEXT operator~( PipelineCreationFeedbackFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( PipelineCreationFeedbackFlagsEXT( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineCreationFeedbackFlagsEXT value  )
+  {
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & PipelineCreationFeedbackFlagBitsEXT::eValid ) result += "Valid | ";
+    if ( value & PipelineCreationFeedbackFlagBitsEXT::eApplicationPipelineCacheHit ) result += "ApplicationPipelineCacheHit | ";
+    if ( value & PipelineCreationFeedbackFlagBitsEXT::eBasePipelineAcceleration ) result += "BasePipelineAcceleration | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+  enum class PipelineDepthStencilStateCreateFlagBits
+  {};
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineDepthStencilStateCreateFlagBits )
+  {
+    return "(void)";
+  }
+
+  using PipelineDepthStencilStateCreateFlags = Flags<PipelineDepthStencilStateCreateFlagBits, VkPipelineDepthStencilStateCreateFlags>;
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineDepthStencilStateCreateFlags  )
+  {
+    return "{}";
+  }
+
+  enum class PipelineDiscardRectangleStateCreateFlagBitsEXT
+  {};
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineDiscardRectangleStateCreateFlagBitsEXT )
+  {
+    return "(void)";
+  }
+
+  using PipelineDiscardRectangleStateCreateFlagsEXT = Flags<PipelineDiscardRectangleStateCreateFlagBitsEXT, VkPipelineDiscardRectangleStateCreateFlagsEXT>;
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineDiscardRectangleStateCreateFlagsEXT  )
+  {
+    return "{}";
+  }
+
+  enum class PipelineDynamicStateCreateFlagBits
+  {};
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineDynamicStateCreateFlagBits )
+  {
+    return "(void)";
+  }
+
+  using PipelineDynamicStateCreateFlags = Flags<PipelineDynamicStateCreateFlagBits, VkPipelineDynamicStateCreateFlags>;
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineDynamicStateCreateFlags  )
+  {
+    return "{}";
+  }
+
+  enum class PipelineInputAssemblyStateCreateFlagBits
+  {};
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineInputAssemblyStateCreateFlagBits )
+  {
+    return "(void)";
+  }
+
+  using PipelineInputAssemblyStateCreateFlags = Flags<PipelineInputAssemblyStateCreateFlagBits, VkPipelineInputAssemblyStateCreateFlags>;
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineInputAssemblyStateCreateFlags  )
+  {
+    return "{}";
+  }
+
+  enum class PipelineLayoutCreateFlagBits
+  {};
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineLayoutCreateFlagBits )
+  {
+    return "(void)";
+  }
+
+  using PipelineLayoutCreateFlags = Flags<PipelineLayoutCreateFlagBits, VkPipelineLayoutCreateFlags>;
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineLayoutCreateFlags  )
+  {
+    return "{}";
+  }
+
+  enum class PipelineMultisampleStateCreateFlagBits
+  {};
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineMultisampleStateCreateFlagBits )
+  {
+    return "(void)";
+  }
+
+  using PipelineMultisampleStateCreateFlags = Flags<PipelineMultisampleStateCreateFlagBits, VkPipelineMultisampleStateCreateFlags>;
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineMultisampleStateCreateFlags  )
+  {
+    return "{}";
+  }
+
+  enum class PipelineRasterizationConservativeStateCreateFlagBitsEXT
+  {};
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationConservativeStateCreateFlagBitsEXT )
+  {
+    return "(void)";
+  }
+
+  using PipelineRasterizationConservativeStateCreateFlagsEXT = Flags<PipelineRasterizationConservativeStateCreateFlagBitsEXT, VkPipelineRasterizationConservativeStateCreateFlagsEXT>;
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationConservativeStateCreateFlagsEXT  )
+  {
+    return "{}";
+  }
+
+  enum class PipelineRasterizationDepthClipStateCreateFlagBitsEXT
+  {};
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationDepthClipStateCreateFlagBitsEXT )
+  {
+    return "(void)";
+  }
+
+  using PipelineRasterizationDepthClipStateCreateFlagsEXT = Flags<PipelineRasterizationDepthClipStateCreateFlagBitsEXT, VkPipelineRasterizationDepthClipStateCreateFlagsEXT>;
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationDepthClipStateCreateFlagsEXT  )
+  {
+    return "{}";
+  }
+
+  enum class PipelineRasterizationStateCreateFlagBits
+  {};
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationStateCreateFlagBits )
+  {
+    return "(void)";
+  }
+
+  using PipelineRasterizationStateCreateFlags = Flags<PipelineRasterizationStateCreateFlagBits, VkPipelineRasterizationStateCreateFlags>;
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationStateCreateFlags  )
+  {
+    return "{}";
+  }
+
+  enum class PipelineRasterizationStateStreamCreateFlagBitsEXT
+  {};
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationStateStreamCreateFlagBitsEXT )
+  {
+    return "(void)";
+  }
+
+  using PipelineRasterizationStateStreamCreateFlagsEXT = Flags<PipelineRasterizationStateStreamCreateFlagBitsEXT, VkPipelineRasterizationStateStreamCreateFlagsEXT>;
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationStateStreamCreateFlagsEXT  )
+  {
+    return "{}";
+  }
+
+  enum class PipelineShaderStageCreateFlagBits
+  {
+    eAllowVaryingSubgroupSizeEXT = VK_PIPELINE_SHADER_STAGE_CREATE_ALLOW_VARYING_SUBGROUP_SIZE_BIT_EXT,
+    eRequireFullSubgroupsEXT = VK_PIPELINE_SHADER_STAGE_CREATE_REQUIRE_FULL_SUBGROUPS_BIT_EXT
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineShaderStageCreateFlagBits value )
+  {
+    switch ( value )
+    {
+      case PipelineShaderStageCreateFlagBits::eAllowVaryingSubgroupSizeEXT : return "AllowVaryingSubgroupSizeEXT";
+      case PipelineShaderStageCreateFlagBits::eRequireFullSubgroupsEXT : return "RequireFullSubgroupsEXT";
+      default: return "invalid";
+    }
+  }
+
+  using PipelineShaderStageCreateFlags = Flags<PipelineShaderStageCreateFlagBits, VkPipelineShaderStageCreateFlags>;
+
+  template <> struct FlagTraits<PipelineShaderStageCreateFlagBits>
+  {
+    enum
+    {
+      allFlags = VkFlags(PipelineShaderStageCreateFlagBits::eAllowVaryingSubgroupSizeEXT) | VkFlags(PipelineShaderStageCreateFlagBits::eRequireFullSubgroupsEXT)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineShaderStageCreateFlags operator|( PipelineShaderStageCreateFlagBits bit0, PipelineShaderStageCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return PipelineShaderStageCreateFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineShaderStageCreateFlags operator&( PipelineShaderStageCreateFlagBits bit0, PipelineShaderStageCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return PipelineShaderStageCreateFlags( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineShaderStageCreateFlags operator^( PipelineShaderStageCreateFlagBits bit0, PipelineShaderStageCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return PipelineShaderStageCreateFlags( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineShaderStageCreateFlags operator~( PipelineShaderStageCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( PipelineShaderStageCreateFlags( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineShaderStageCreateFlags value  )
+  {
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & PipelineShaderStageCreateFlagBits::eAllowVaryingSubgroupSizeEXT ) result += "AllowVaryingSubgroupSizeEXT | ";
+    if ( value & PipelineShaderStageCreateFlagBits::eRequireFullSubgroupsEXT ) result += "RequireFullSubgroupsEXT | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+  enum class PipelineStageFlagBits
+  {
+    eTopOfPipe = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT,
+    eDrawIndirect = VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT,
+    eVertexInput = VK_PIPELINE_STAGE_VERTEX_INPUT_BIT,
+    eVertexShader = VK_PIPELINE_STAGE_VERTEX_SHADER_BIT,
+    eTessellationControlShader = VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT,
+    eTessellationEvaluationShader = VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT,
+    eGeometryShader = VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT,
+    eFragmentShader = VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT,
+    eEarlyFragmentTests = VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT,
+    eLateFragmentTests = VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT,
+    eColorAttachmentOutput = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
+    eComputeShader = VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT,
+    eTransfer = VK_PIPELINE_STAGE_TRANSFER_BIT,
+    eBottomOfPipe = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT,
+    eHost = VK_PIPELINE_STAGE_HOST_BIT,
+    eAllGraphics = VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT,
+    eAllCommands = VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
+    eTransformFeedbackEXT = VK_PIPELINE_STAGE_TRANSFORM_FEEDBACK_BIT_EXT,
+    eConditionalRenderingEXT = VK_PIPELINE_STAGE_CONDITIONAL_RENDERING_BIT_EXT,
+    eCommandProcessNVX = VK_PIPELINE_STAGE_COMMAND_PROCESS_BIT_NVX,
+    eShadingRateImageNV = VK_PIPELINE_STAGE_SHADING_RATE_IMAGE_BIT_NV,
+    eRayTracingShaderNV = VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_NV,
+    eAccelerationStructureBuildNV = VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_NV,
+    eTaskShaderNV = VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV,
+    eMeshShaderNV = VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV,
+    eFragmentDensityProcessEXT = VK_PIPELINE_STAGE_FRAGMENT_DENSITY_PROCESS_BIT_EXT
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineStageFlagBits value )
+  {
+    switch ( value )
+    {
+      case PipelineStageFlagBits::eTopOfPipe : return "TopOfPipe";
+      case PipelineStageFlagBits::eDrawIndirect : return "DrawIndirect";
+      case PipelineStageFlagBits::eVertexInput : return "VertexInput";
+      case PipelineStageFlagBits::eVertexShader : return "VertexShader";
+      case PipelineStageFlagBits::eTessellationControlShader : return "TessellationControlShader";
+      case PipelineStageFlagBits::eTessellationEvaluationShader : return "TessellationEvaluationShader";
+      case PipelineStageFlagBits::eGeometryShader : return "GeometryShader";
+      case PipelineStageFlagBits::eFragmentShader : return "FragmentShader";
+      case PipelineStageFlagBits::eEarlyFragmentTests : return "EarlyFragmentTests";
+      case PipelineStageFlagBits::eLateFragmentTests : return "LateFragmentTests";
+      case PipelineStageFlagBits::eColorAttachmentOutput : return "ColorAttachmentOutput";
+      case PipelineStageFlagBits::eComputeShader : return "ComputeShader";
+      case PipelineStageFlagBits::eTransfer : return "Transfer";
+      case PipelineStageFlagBits::eBottomOfPipe : return "BottomOfPipe";
+      case PipelineStageFlagBits::eHost : return "Host";
+      case PipelineStageFlagBits::eAllGraphics : return "AllGraphics";
+      case PipelineStageFlagBits::eAllCommands : return "AllCommands";
+      case PipelineStageFlagBits::eTransformFeedbackEXT : return "TransformFeedbackEXT";
+      case PipelineStageFlagBits::eConditionalRenderingEXT : return "ConditionalRenderingEXT";
+      case PipelineStageFlagBits::eCommandProcessNVX : return "CommandProcessNVX";
+      case PipelineStageFlagBits::eShadingRateImageNV : return "ShadingRateImageNV";
+      case PipelineStageFlagBits::eRayTracingShaderNV : return "RayTracingShaderNV";
+      case PipelineStageFlagBits::eAccelerationStructureBuildNV : return "AccelerationStructureBuildNV";
+      case PipelineStageFlagBits::eTaskShaderNV : return "TaskShaderNV";
+      case PipelineStageFlagBits::eMeshShaderNV : return "MeshShaderNV";
+      case PipelineStageFlagBits::eFragmentDensityProcessEXT : return "FragmentDensityProcessEXT";
+      default: return "invalid";
+    }
+  }
+
+  using PipelineStageFlags = Flags<PipelineStageFlagBits, VkPipelineStageFlags>;
+
+  template <> struct FlagTraits<PipelineStageFlagBits>
+  {
+    enum
+    {
+      allFlags = VkFlags(PipelineStageFlagBits::eTopOfPipe) | VkFlags(PipelineStageFlagBits::eDrawIndirect) | VkFlags(PipelineStageFlagBits::eVertexInput) | VkFlags(PipelineStageFlagBits::eVertexShader) | VkFlags(PipelineStageFlagBits::eTessellationControlShader) | VkFlags(PipelineStageFlagBits::eTessellationEvaluationShader) | VkFlags(PipelineStageFlagBits::eGeometryShader) | VkFlags(PipelineStageFlagBits::eFragmentShader) | VkFlags(PipelineStageFlagBits::eEarlyFragmentTests) | VkFlags(PipelineStageFlagBits::eLateFragmentTests) | VkFlags(PipelineStageFlagBits::eColorAttachmentOutput) | VkFlags(PipelineStageFlagBits::eComputeShader) | VkFlags(PipelineStageFlagBits::eTransfer) | VkFlags(PipelineStageFlagBits::eBottomOfPipe) | VkFlags(PipelineStageFlagBits::eHost) | VkFlags(PipelineStageFlagBits::eAllGraphics) | VkFlags(PipelineStageFlagBits::eAllCommands) | VkFlags(PipelineStageFlagBits::eTransformFeedbackEXT) | VkFlags(PipelineStageFlagBits::eConditionalRenderingEXT) | VkFlags(PipelineStageFlagBits::eCommandProcessNVX) | VkFlags(PipelineStageFlagBits::eShadingRateImageNV) | VkFlags(PipelineStageFlagBits::eRayTracingShaderNV) | VkFlags(PipelineStageFlagBits::eAccelerationStructureBuildNV) | VkFlags(PipelineStageFlagBits::eTaskShaderNV) | VkFlags(PipelineStageFlagBits::eMeshShaderNV) | VkFlags(PipelineStageFlagBits::eFragmentDensityProcessEXT)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineStageFlags operator|( PipelineStageFlagBits bit0, PipelineStageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return PipelineStageFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineStageFlags operator&( PipelineStageFlagBits bit0, PipelineStageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return PipelineStageFlags( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineStageFlags operator^( PipelineStageFlagBits bit0, PipelineStageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return PipelineStageFlags( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineStageFlags operator~( PipelineStageFlagBits bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( PipelineStageFlags( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineStageFlags value  )
+  {
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & PipelineStageFlagBits::eTopOfPipe ) result += "TopOfPipe | ";
+    if ( value & PipelineStageFlagBits::eDrawIndirect ) result += "DrawIndirect | ";
+    if ( value & PipelineStageFlagBits::eVertexInput ) result += "VertexInput | ";
+    if ( value & PipelineStageFlagBits::eVertexShader ) result += "VertexShader | ";
+    if ( value & PipelineStageFlagBits::eTessellationControlShader ) result += "TessellationControlShader | ";
+    if ( value & PipelineStageFlagBits::eTessellationEvaluationShader ) result += "TessellationEvaluationShader | ";
+    if ( value & PipelineStageFlagBits::eGeometryShader ) result += "GeometryShader | ";
+    if ( value & PipelineStageFlagBits::eFragmentShader ) result += "FragmentShader | ";
+    if ( value & PipelineStageFlagBits::eEarlyFragmentTests ) result += "EarlyFragmentTests | ";
+    if ( value & PipelineStageFlagBits::eLateFragmentTests ) result += "LateFragmentTests | ";
+    if ( value & PipelineStageFlagBits::eColorAttachmentOutput ) result += "ColorAttachmentOutput | ";
+    if ( value & PipelineStageFlagBits::eComputeShader ) result += "ComputeShader | ";
+    if ( value & PipelineStageFlagBits::eTransfer ) result += "Transfer | ";
+    if ( value & PipelineStageFlagBits::eBottomOfPipe ) result += "BottomOfPipe | ";
+    if ( value & PipelineStageFlagBits::eHost ) result += "Host | ";
+    if ( value & PipelineStageFlagBits::eAllGraphics ) result += "AllGraphics | ";
+    if ( value & PipelineStageFlagBits::eAllCommands ) result += "AllCommands | ";
+    if ( value & PipelineStageFlagBits::eTransformFeedbackEXT ) result += "TransformFeedbackEXT | ";
+    if ( value & PipelineStageFlagBits::eConditionalRenderingEXT ) result += "ConditionalRenderingEXT | ";
+    if ( value & PipelineStageFlagBits::eCommandProcessNVX ) result += "CommandProcessNVX | ";
+    if ( value & PipelineStageFlagBits::eShadingRateImageNV ) result += "ShadingRateImageNV | ";
+    if ( value & PipelineStageFlagBits::eRayTracingShaderNV ) result += "RayTracingShaderNV | ";
+    if ( value & PipelineStageFlagBits::eAccelerationStructureBuildNV ) result += "AccelerationStructureBuildNV | ";
+    if ( value & PipelineStageFlagBits::eTaskShaderNV ) result += "TaskShaderNV | ";
+    if ( value & PipelineStageFlagBits::eMeshShaderNV ) result += "MeshShaderNV | ";
+    if ( value & PipelineStageFlagBits::eFragmentDensityProcessEXT ) result += "FragmentDensityProcessEXT | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+  enum class PipelineTessellationStateCreateFlagBits
+  {};
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineTessellationStateCreateFlagBits )
+  {
+    return "(void)";
+  }
+
+  using PipelineTessellationStateCreateFlags = Flags<PipelineTessellationStateCreateFlagBits, VkPipelineTessellationStateCreateFlags>;
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineTessellationStateCreateFlags  )
+  {
+    return "{}";
+  }
+
+  enum class PipelineVertexInputStateCreateFlagBits
+  {};
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineVertexInputStateCreateFlagBits )
+  {
+    return "(void)";
+  }
+
+  using PipelineVertexInputStateCreateFlags = Flags<PipelineVertexInputStateCreateFlagBits, VkPipelineVertexInputStateCreateFlags>;
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineVertexInputStateCreateFlags  )
+  {
+    return "{}";
+  }
+
+  enum class PipelineViewportStateCreateFlagBits
+  {};
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineViewportStateCreateFlagBits )
+  {
+    return "(void)";
+  }
+
+  using PipelineViewportStateCreateFlags = Flags<PipelineViewportStateCreateFlagBits, VkPipelineViewportStateCreateFlags>;
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineViewportStateCreateFlags  )
+  {
+    return "{}";
+  }
+
+  enum class PipelineViewportSwizzleStateCreateFlagBitsNV
+  {};
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineViewportSwizzleStateCreateFlagBitsNV )
+  {
+    return "(void)";
+  }
+
+  using PipelineViewportSwizzleStateCreateFlagsNV = Flags<PipelineViewportSwizzleStateCreateFlagBitsNV, VkPipelineViewportSwizzleStateCreateFlagsNV>;
+
+  VULKAN_HPP_INLINE std::string to_string( PipelineViewportSwizzleStateCreateFlagsNV  )
+  {
+    return "{}";
+  }
+
+  enum class QueryControlFlagBits
+  {
+    ePrecise = VK_QUERY_CONTROL_PRECISE_BIT
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( QueryControlFlagBits value )
+  {
+    switch ( value )
+    {
+      case QueryControlFlagBits::ePrecise : return "Precise";
+      default: return "invalid";
+    }
+  }
+
+  using QueryControlFlags = Flags<QueryControlFlagBits, VkQueryControlFlags>;
+
+  template <> struct FlagTraits<QueryControlFlagBits>
+  {
+    enum
+    {
+      allFlags = VkFlags(QueryControlFlagBits::ePrecise)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryControlFlags operator|( QueryControlFlagBits bit0, QueryControlFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return QueryControlFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryControlFlags operator&( QueryControlFlagBits bit0, QueryControlFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return QueryControlFlags( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryControlFlags operator^( QueryControlFlagBits bit0, QueryControlFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return QueryControlFlags( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryControlFlags operator~( QueryControlFlagBits bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( QueryControlFlags( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( QueryControlFlags value  )
+  {
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & QueryControlFlagBits::ePrecise ) result += "Precise | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+  enum class QueryPipelineStatisticFlagBits
+  {
+    eInputAssemblyVertices = VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_VERTICES_BIT,
+    eInputAssemblyPrimitives = VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_PRIMITIVES_BIT,
+    eVertexShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_VERTEX_SHADER_INVOCATIONS_BIT,
+    eGeometryShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_INVOCATIONS_BIT,
+    eGeometryShaderPrimitives = VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_PRIMITIVES_BIT,
+    eClippingInvocations = VK_QUERY_PIPELINE_STATISTIC_CLIPPING_INVOCATIONS_BIT,
+    eClippingPrimitives = VK_QUERY_PIPELINE_STATISTIC_CLIPPING_PRIMITIVES_BIT,
+    eFragmentShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_FRAGMENT_SHADER_INVOCATIONS_BIT,
+    eTessellationControlShaderPatches = VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_CONTROL_SHADER_PATCHES_BIT,
+    eTessellationEvaluationShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_EVALUATION_SHADER_INVOCATIONS_BIT,
+    eComputeShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_COMPUTE_SHADER_INVOCATIONS_BIT
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( QueryPipelineStatisticFlagBits value )
+  {
+    switch ( value )
+    {
+      case QueryPipelineStatisticFlagBits::eInputAssemblyVertices : return "InputAssemblyVertices";
+      case QueryPipelineStatisticFlagBits::eInputAssemblyPrimitives : return "InputAssemblyPrimitives";
+      case QueryPipelineStatisticFlagBits::eVertexShaderInvocations : return "VertexShaderInvocations";
+      case QueryPipelineStatisticFlagBits::eGeometryShaderInvocations : return "GeometryShaderInvocations";
+      case QueryPipelineStatisticFlagBits::eGeometryShaderPrimitives : return "GeometryShaderPrimitives";
+      case QueryPipelineStatisticFlagBits::eClippingInvocations : return "ClippingInvocations";
+      case QueryPipelineStatisticFlagBits::eClippingPrimitives : return "ClippingPrimitives";
+      case QueryPipelineStatisticFlagBits::eFragmentShaderInvocations : return "FragmentShaderInvocations";
+      case QueryPipelineStatisticFlagBits::eTessellationControlShaderPatches : return "TessellationControlShaderPatches";
+      case QueryPipelineStatisticFlagBits::eTessellationEvaluationShaderInvocations : return "TessellationEvaluationShaderInvocations";
+      case QueryPipelineStatisticFlagBits::eComputeShaderInvocations : return "ComputeShaderInvocations";
+      default: return "invalid";
+    }
+  }
+
+  using QueryPipelineStatisticFlags = Flags<QueryPipelineStatisticFlagBits, VkQueryPipelineStatisticFlags>;
+
+  template <> struct FlagTraits<QueryPipelineStatisticFlagBits>
+  {
+    enum
+    {
+      allFlags = VkFlags(QueryPipelineStatisticFlagBits::eInputAssemblyVertices) | VkFlags(QueryPipelineStatisticFlagBits::eInputAssemblyPrimitives) | VkFlags(QueryPipelineStatisticFlagBits::eVertexShaderInvocations) | VkFlags(QueryPipelineStatisticFlagBits::eGeometryShaderInvocations) | VkFlags(QueryPipelineStatisticFlagBits::eGeometryShaderPrimitives) | VkFlags(QueryPipelineStatisticFlagBits::eClippingInvocations) | VkFlags(QueryPipelineStatisticFlagBits::eClippingPrimitives) | VkFlags(QueryPipelineStatisticFlagBits::eFragmentShaderInvocations) | VkFlags(QueryPipelineStatisticFlagBits::eTessellationControlShaderPatches) | VkFlags(QueryPipelineStatisticFlagBits::eTessellationEvaluationShaderInvocations) | VkFlags(QueryPipelineStatisticFlagBits::eComputeShaderInvocations)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryPipelineStatisticFlags operator|( QueryPipelineStatisticFlagBits bit0, QueryPipelineStatisticFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return QueryPipelineStatisticFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryPipelineStatisticFlags operator&( QueryPipelineStatisticFlagBits bit0, QueryPipelineStatisticFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return QueryPipelineStatisticFlags( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryPipelineStatisticFlags operator^( QueryPipelineStatisticFlagBits bit0, QueryPipelineStatisticFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return QueryPipelineStatisticFlags( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryPipelineStatisticFlags operator~( QueryPipelineStatisticFlagBits bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( QueryPipelineStatisticFlags( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( QueryPipelineStatisticFlags value  )
+  {
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & QueryPipelineStatisticFlagBits::eInputAssemblyVertices ) result += "InputAssemblyVertices | ";
+    if ( value & QueryPipelineStatisticFlagBits::eInputAssemblyPrimitives ) result += "InputAssemblyPrimitives | ";
+    if ( value & QueryPipelineStatisticFlagBits::eVertexShaderInvocations ) result += "VertexShaderInvocations | ";
+    if ( value & QueryPipelineStatisticFlagBits::eGeometryShaderInvocations ) result += "GeometryShaderInvocations | ";
+    if ( value & QueryPipelineStatisticFlagBits::eGeometryShaderPrimitives ) result += "GeometryShaderPrimitives | ";
+    if ( value & QueryPipelineStatisticFlagBits::eClippingInvocations ) result += "ClippingInvocations | ";
+    if ( value & QueryPipelineStatisticFlagBits::eClippingPrimitives ) result += "ClippingPrimitives | ";
+    if ( value & QueryPipelineStatisticFlagBits::eFragmentShaderInvocations ) result += "FragmentShaderInvocations | ";
+    if ( value & QueryPipelineStatisticFlagBits::eTessellationControlShaderPatches ) result += "TessellationControlShaderPatches | ";
+    if ( value & QueryPipelineStatisticFlagBits::eTessellationEvaluationShaderInvocations ) result += "TessellationEvaluationShaderInvocations | ";
+    if ( value & QueryPipelineStatisticFlagBits::eComputeShaderInvocations ) result += "ComputeShaderInvocations | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+  enum class QueryPoolCreateFlagBits
+  {};
+
+  VULKAN_HPP_INLINE std::string to_string( QueryPoolCreateFlagBits )
+  {
+    return "(void)";
+  }
+
+  using QueryPoolCreateFlags = Flags<QueryPoolCreateFlagBits, VkQueryPoolCreateFlags>;
+
+  VULKAN_HPP_INLINE std::string to_string( QueryPoolCreateFlags  )
+  {
+    return "{}";
+  }
+
+  enum class QueryResultFlagBits
+  {
+    e64 = VK_QUERY_RESULT_64_BIT,
+    eWait = VK_QUERY_RESULT_WAIT_BIT,
+    eWithAvailability = VK_QUERY_RESULT_WITH_AVAILABILITY_BIT,
+    ePartial = VK_QUERY_RESULT_PARTIAL_BIT
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( QueryResultFlagBits value )
+  {
+    switch ( value )
+    {
+      case QueryResultFlagBits::e64 : return "64";
+      case QueryResultFlagBits::eWait : return "Wait";
+      case QueryResultFlagBits::eWithAvailability : return "WithAvailability";
+      case QueryResultFlagBits::ePartial : return "Partial";
+      default: return "invalid";
+    }
+  }
+
+  using QueryResultFlags = Flags<QueryResultFlagBits, VkQueryResultFlags>;
+
+  template <> struct FlagTraits<QueryResultFlagBits>
+  {
+    enum
+    {
+      allFlags = VkFlags(QueryResultFlagBits::e64) | VkFlags(QueryResultFlagBits::eWait) | VkFlags(QueryResultFlagBits::eWithAvailability) | VkFlags(QueryResultFlagBits::ePartial)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryResultFlags operator|( QueryResultFlagBits bit0, QueryResultFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return QueryResultFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryResultFlags operator&( QueryResultFlagBits bit0, QueryResultFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return QueryResultFlags( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryResultFlags operator^( QueryResultFlagBits bit0, QueryResultFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return QueryResultFlags( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryResultFlags operator~( QueryResultFlagBits bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( QueryResultFlags( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( QueryResultFlags value  )
+  {
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & QueryResultFlagBits::e64 ) result += "64 | ";
+    if ( value & QueryResultFlagBits::eWait ) result += "Wait | ";
+    if ( value & QueryResultFlagBits::eWithAvailability ) result += "WithAvailability | ";
+    if ( value & QueryResultFlagBits::ePartial ) result += "Partial | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+  enum class QueueFlagBits
+  {
+    eGraphics = VK_QUEUE_GRAPHICS_BIT,
+    eCompute = VK_QUEUE_COMPUTE_BIT,
+    eTransfer = VK_QUEUE_TRANSFER_BIT,
+    eSparseBinding = VK_QUEUE_SPARSE_BINDING_BIT,
+    eProtected = VK_QUEUE_PROTECTED_BIT
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( QueueFlagBits value )
+  {
+    switch ( value )
+    {
+      case QueueFlagBits::eGraphics : return "Graphics";
+      case QueueFlagBits::eCompute : return "Compute";
+      case QueueFlagBits::eTransfer : return "Transfer";
+      case QueueFlagBits::eSparseBinding : return "SparseBinding";
+      case QueueFlagBits::eProtected : return "Protected";
+      default: return "invalid";
+    }
+  }
+
+  using QueueFlags = Flags<QueueFlagBits, VkQueueFlags>;
+
+  template <> struct FlagTraits<QueueFlagBits>
+  {
+    enum
+    {
+      allFlags = VkFlags(QueueFlagBits::eGraphics) | VkFlags(QueueFlagBits::eCompute) | VkFlags(QueueFlagBits::eTransfer) | VkFlags(QueueFlagBits::eSparseBinding) | VkFlags(QueueFlagBits::eProtected)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueueFlags operator|( QueueFlagBits bit0, QueueFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return QueueFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueueFlags operator&( QueueFlagBits bit0, QueueFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return QueueFlags( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueueFlags operator^( QueueFlagBits bit0, QueueFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return QueueFlags( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueueFlags operator~( QueueFlagBits bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( QueueFlags( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( QueueFlags value  )
+  {
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & QueueFlagBits::eGraphics ) result += "Graphics | ";
+    if ( value & QueueFlagBits::eCompute ) result += "Compute | ";
+    if ( value & QueueFlagBits::eTransfer ) result += "Transfer | ";
+    if ( value & QueueFlagBits::eSparseBinding ) result += "SparseBinding | ";
+    if ( value & QueueFlagBits::eProtected ) result += "Protected | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+  enum class RenderPassCreateFlagBits
+  {};
+
+  VULKAN_HPP_INLINE std::string to_string( RenderPassCreateFlagBits )
+  {
+    return "(void)";
+  }
+
+  using RenderPassCreateFlags = Flags<RenderPassCreateFlagBits, VkRenderPassCreateFlags>;
+
+  VULKAN_HPP_INLINE std::string to_string( RenderPassCreateFlags  )
+  {
+    return "{}";
+  }
+
+  enum class ResolveModeFlagBitsKHR
+  {
+    eNone = VK_RESOLVE_MODE_NONE_KHR,
+    eSampleZero = VK_RESOLVE_MODE_SAMPLE_ZERO_BIT_KHR,
+    eAverage = VK_RESOLVE_MODE_AVERAGE_BIT_KHR,
+    eMin = VK_RESOLVE_MODE_MIN_BIT_KHR,
+    eMax = VK_RESOLVE_MODE_MAX_BIT_KHR
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( ResolveModeFlagBitsKHR value )
+  {
+    switch ( value )
+    {
+      case ResolveModeFlagBitsKHR::eNone : return "None";
+      case ResolveModeFlagBitsKHR::eSampleZero : return "SampleZero";
+      case ResolveModeFlagBitsKHR::eAverage : return "Average";
+      case ResolveModeFlagBitsKHR::eMin : return "Min";
+      case ResolveModeFlagBitsKHR::eMax : return "Max";
+      default: return "invalid";
+    }
+  }
+
+  using ResolveModeFlagsKHR = Flags<ResolveModeFlagBitsKHR, VkResolveModeFlagsKHR>;
+
+  template <> struct FlagTraits<ResolveModeFlagBitsKHR>
+  {
+    enum
+    {
+      allFlags = VkFlags(ResolveModeFlagBitsKHR::eNone) | VkFlags(ResolveModeFlagBitsKHR::eSampleZero) | VkFlags(ResolveModeFlagBitsKHR::eAverage) | VkFlags(ResolveModeFlagBitsKHR::eMin) | VkFlags(ResolveModeFlagBitsKHR::eMax)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ResolveModeFlagsKHR operator|( ResolveModeFlagBitsKHR bit0, ResolveModeFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return ResolveModeFlagsKHR( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ResolveModeFlagsKHR operator&( ResolveModeFlagBitsKHR bit0, ResolveModeFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return ResolveModeFlagsKHR( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ResolveModeFlagsKHR operator^( ResolveModeFlagBitsKHR bit0, ResolveModeFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return ResolveModeFlagsKHR( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ResolveModeFlagsKHR operator~( ResolveModeFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( ResolveModeFlagsKHR( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( ResolveModeFlagsKHR value  )
+  {
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & ResolveModeFlagBitsKHR::eSampleZero ) result += "SampleZero | ";
+    if ( value & ResolveModeFlagBitsKHR::eAverage ) result += "Average | ";
+    if ( value & ResolveModeFlagBitsKHR::eMin ) result += "Min | ";
+    if ( value & ResolveModeFlagBitsKHR::eMax ) result += "Max | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+  enum class SampleCountFlagBits
+  {
+    e1 = VK_SAMPLE_COUNT_1_BIT,
+    e2 = VK_SAMPLE_COUNT_2_BIT,
+    e4 = VK_SAMPLE_COUNT_4_BIT,
+    e8 = VK_SAMPLE_COUNT_8_BIT,
+    e16 = VK_SAMPLE_COUNT_16_BIT,
+    e32 = VK_SAMPLE_COUNT_32_BIT,
+    e64 = VK_SAMPLE_COUNT_64_BIT
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( SampleCountFlagBits value )
+  {
+    switch ( value )
+    {
+      case SampleCountFlagBits::e1 : return "1";
+      case SampleCountFlagBits::e2 : return "2";
+      case SampleCountFlagBits::e4 : return "4";
+      case SampleCountFlagBits::e8 : return "8";
+      case SampleCountFlagBits::e16 : return "16";
+      case SampleCountFlagBits::e32 : return "32";
+      case SampleCountFlagBits::e64 : return "64";
+      default: return "invalid";
+    }
+  }
+
+  using SampleCountFlags = Flags<SampleCountFlagBits, VkSampleCountFlags>;
+
+  template <> struct FlagTraits<SampleCountFlagBits>
+  {
+    enum
+    {
+      allFlags = VkFlags(SampleCountFlagBits::e1) | VkFlags(SampleCountFlagBits::e2) | VkFlags(SampleCountFlagBits::e4) | VkFlags(SampleCountFlagBits::e8) | VkFlags(SampleCountFlagBits::e16) | VkFlags(SampleCountFlagBits::e32) | VkFlags(SampleCountFlagBits::e64)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SampleCountFlags operator|( SampleCountFlagBits bit0, SampleCountFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return SampleCountFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SampleCountFlags operator&( SampleCountFlagBits bit0, SampleCountFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return SampleCountFlags( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SampleCountFlags operator^( SampleCountFlagBits bit0, SampleCountFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return SampleCountFlags( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SampleCountFlags operator~( SampleCountFlagBits bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( SampleCountFlags( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( SampleCountFlags value  )
+  {
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & SampleCountFlagBits::e1 ) result += "1 | ";
+    if ( value & SampleCountFlagBits::e2 ) result += "2 | ";
+    if ( value & SampleCountFlagBits::e4 ) result += "4 | ";
+    if ( value & SampleCountFlagBits::e8 ) result += "8 | ";
+    if ( value & SampleCountFlagBits::e16 ) result += "16 | ";
+    if ( value & SampleCountFlagBits::e32 ) result += "32 | ";
+    if ( value & SampleCountFlagBits::e64 ) result += "64 | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+  enum class SamplerCreateFlagBits
+  {
+    eSubsampledEXT = VK_SAMPLER_CREATE_SUBSAMPLED_BIT_EXT,
+    eSubsampledCoarseReconstructionEXT = VK_SAMPLER_CREATE_SUBSAMPLED_COARSE_RECONSTRUCTION_BIT_EXT
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( SamplerCreateFlagBits value )
+  {
+    switch ( value )
+    {
+      case SamplerCreateFlagBits::eSubsampledEXT : return "SubsampledEXT";
+      case SamplerCreateFlagBits::eSubsampledCoarseReconstructionEXT : return "SubsampledCoarseReconstructionEXT";
+      default: return "invalid";
+    }
+  }
+
+  using SamplerCreateFlags = Flags<SamplerCreateFlagBits, VkSamplerCreateFlags>;
+
+  template <> struct FlagTraits<SamplerCreateFlagBits>
+  {
+    enum
+    {
+      allFlags = VkFlags(SamplerCreateFlagBits::eSubsampledEXT) | VkFlags(SamplerCreateFlagBits::eSubsampledCoarseReconstructionEXT)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SamplerCreateFlags operator|( SamplerCreateFlagBits bit0, SamplerCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return SamplerCreateFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SamplerCreateFlags operator&( SamplerCreateFlagBits bit0, SamplerCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return SamplerCreateFlags( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SamplerCreateFlags operator^( SamplerCreateFlagBits bit0, SamplerCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return SamplerCreateFlags( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SamplerCreateFlags operator~( SamplerCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( SamplerCreateFlags( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( SamplerCreateFlags value  )
+  {
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & SamplerCreateFlagBits::eSubsampledEXT ) result += "SubsampledEXT | ";
+    if ( value & SamplerCreateFlagBits::eSubsampledCoarseReconstructionEXT ) result += "SubsampledCoarseReconstructionEXT | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+  enum class SemaphoreCreateFlagBits
+  {};
+
+  VULKAN_HPP_INLINE std::string to_string( SemaphoreCreateFlagBits )
+  {
+    return "(void)";
+  }
+
+  using SemaphoreCreateFlags = Flags<SemaphoreCreateFlagBits, VkSemaphoreCreateFlags>;
+
+  VULKAN_HPP_INLINE std::string to_string( SemaphoreCreateFlags  )
+  {
+    return "{}";
+  }
+
+  enum class SemaphoreImportFlagBits
+  {
+    eTemporary = VK_SEMAPHORE_IMPORT_TEMPORARY_BIT,
+    eTemporaryKHR = VK_SEMAPHORE_IMPORT_TEMPORARY_BIT_KHR
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( SemaphoreImportFlagBits value )
+  {
+    switch ( value )
+    {
+      case SemaphoreImportFlagBits::eTemporary : return "Temporary";
+      default: return "invalid";
+    }
+  }
+
+  using SemaphoreImportFlags = Flags<SemaphoreImportFlagBits, VkSemaphoreImportFlags>;
+
+  template <> struct FlagTraits<SemaphoreImportFlagBits>
+  {
+    enum
+    {
+      allFlags = VkFlags(SemaphoreImportFlagBits::eTemporary)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SemaphoreImportFlags operator|( SemaphoreImportFlagBits bit0, SemaphoreImportFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return SemaphoreImportFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SemaphoreImportFlags operator&( SemaphoreImportFlagBits bit0, SemaphoreImportFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return SemaphoreImportFlags( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SemaphoreImportFlags operator^( SemaphoreImportFlagBits bit0, SemaphoreImportFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return SemaphoreImportFlags( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SemaphoreImportFlags operator~( SemaphoreImportFlagBits bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( SemaphoreImportFlags( bits ) );
+  }
+
+  using SemaphoreImportFlagsKHR = SemaphoreImportFlags;
+
+  VULKAN_HPP_INLINE std::string to_string( SemaphoreImportFlags value  )
+  {
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & SemaphoreImportFlagBits::eTemporary ) result += "Temporary | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+  enum class SemaphoreWaitFlagBitsKHR
+  {
+    eAny = VK_SEMAPHORE_WAIT_ANY_BIT_KHR
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( SemaphoreWaitFlagBitsKHR value )
+  {
+    switch ( value )
+    {
+      case SemaphoreWaitFlagBitsKHR::eAny : return "Any";
+      default: return "invalid";
+    }
+  }
+
+  using SemaphoreWaitFlagsKHR = Flags<SemaphoreWaitFlagBitsKHR, VkSemaphoreWaitFlagsKHR>;
+
+  template <> struct FlagTraits<SemaphoreWaitFlagBitsKHR>
+  {
+    enum
+    {
+      allFlags = VkFlags(SemaphoreWaitFlagBitsKHR::eAny)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SemaphoreWaitFlagsKHR operator|( SemaphoreWaitFlagBitsKHR bit0, SemaphoreWaitFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return SemaphoreWaitFlagsKHR( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SemaphoreWaitFlagsKHR operator&( SemaphoreWaitFlagBitsKHR bit0, SemaphoreWaitFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return SemaphoreWaitFlagsKHR( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SemaphoreWaitFlagsKHR operator^( SemaphoreWaitFlagBitsKHR bit0, SemaphoreWaitFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return SemaphoreWaitFlagsKHR( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SemaphoreWaitFlagsKHR operator~( SemaphoreWaitFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( SemaphoreWaitFlagsKHR( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( SemaphoreWaitFlagsKHR value  )
+  {
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & SemaphoreWaitFlagBitsKHR::eAny ) result += "Any | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+  enum class ShaderCorePropertiesFlagBitsAMD
+  {};
+
+  VULKAN_HPP_INLINE std::string to_string( ShaderCorePropertiesFlagBitsAMD )
+  {
+    return "(void)";
+  }
+
+  using ShaderCorePropertiesFlagsAMD = Flags<ShaderCorePropertiesFlagBitsAMD, VkShaderCorePropertiesFlagsAMD>;
+
+  VULKAN_HPP_INLINE std::string to_string( ShaderCorePropertiesFlagsAMD  )
+  {
+    return "{}";
+  }
+
+  enum class ShaderModuleCreateFlagBits
+  {};
+
+  VULKAN_HPP_INLINE std::string to_string( ShaderModuleCreateFlagBits )
+  {
+    return "(void)";
+  }
+
+  using ShaderModuleCreateFlags = Flags<ShaderModuleCreateFlagBits, VkShaderModuleCreateFlags>;
+
+  VULKAN_HPP_INLINE std::string to_string( ShaderModuleCreateFlags  )
+  {
+    return "{}";
+  }
+
+  enum class ShaderStageFlagBits
+  {
+    eVertex = VK_SHADER_STAGE_VERTEX_BIT,
+    eTessellationControl = VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT,
+    eTessellationEvaluation = VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT,
+    eGeometry = VK_SHADER_STAGE_GEOMETRY_BIT,
+    eFragment = VK_SHADER_STAGE_FRAGMENT_BIT,
+    eCompute = VK_SHADER_STAGE_COMPUTE_BIT,
+    eAllGraphics = VK_SHADER_STAGE_ALL_GRAPHICS,
+    eAll = VK_SHADER_STAGE_ALL,
+    eRaygenNV = VK_SHADER_STAGE_RAYGEN_BIT_NV,
+    eAnyHitNV = VK_SHADER_STAGE_ANY_HIT_BIT_NV,
+    eClosestHitNV = VK_SHADER_STAGE_CLOSEST_HIT_BIT_NV,
+    eMissNV = VK_SHADER_STAGE_MISS_BIT_NV,
+    eIntersectionNV = VK_SHADER_STAGE_INTERSECTION_BIT_NV,
+    eCallableNV = VK_SHADER_STAGE_CALLABLE_BIT_NV,
+    eTaskNV = VK_SHADER_STAGE_TASK_BIT_NV,
+    eMeshNV = VK_SHADER_STAGE_MESH_BIT_NV
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( ShaderStageFlagBits value )
+  {
+    switch ( value )
+    {
+      case ShaderStageFlagBits::eVertex : return "Vertex";
+      case ShaderStageFlagBits::eTessellationControl : return "TessellationControl";
+      case ShaderStageFlagBits::eTessellationEvaluation : return "TessellationEvaluation";
+      case ShaderStageFlagBits::eGeometry : return "Geometry";
+      case ShaderStageFlagBits::eFragment : return "Fragment";
+      case ShaderStageFlagBits::eCompute : return "Compute";
+      case ShaderStageFlagBits::eAllGraphics : return "AllGraphics";
+      case ShaderStageFlagBits::eAll : return "All";
+      case ShaderStageFlagBits::eRaygenNV : return "RaygenNV";
+      case ShaderStageFlagBits::eAnyHitNV : return "AnyHitNV";
+      case ShaderStageFlagBits::eClosestHitNV : return "ClosestHitNV";
+      case ShaderStageFlagBits::eMissNV : return "MissNV";
+      case ShaderStageFlagBits::eIntersectionNV : return "IntersectionNV";
+      case ShaderStageFlagBits::eCallableNV : return "CallableNV";
+      case ShaderStageFlagBits::eTaskNV : return "TaskNV";
+      case ShaderStageFlagBits::eMeshNV : return "MeshNV";
+      default: return "invalid";
+    }
+  }
+
+  using ShaderStageFlags = Flags<ShaderStageFlagBits, VkShaderStageFlags>;
+
+  template <> struct FlagTraits<ShaderStageFlagBits>
+  {
+    enum
+    {
+      allFlags = VkFlags(ShaderStageFlagBits::eVertex) | VkFlags(ShaderStageFlagBits::eTessellationControl) | VkFlags(ShaderStageFlagBits::eTessellationEvaluation) | VkFlags(ShaderStageFlagBits::eGeometry) | VkFlags(ShaderStageFlagBits::eFragment) | VkFlags(ShaderStageFlagBits::eCompute) | VkFlags(ShaderStageFlagBits::eAllGraphics) | VkFlags(ShaderStageFlagBits::eAll) | VkFlags(ShaderStageFlagBits::eRaygenNV) | VkFlags(ShaderStageFlagBits::eAnyHitNV) | VkFlags(ShaderStageFlagBits::eClosestHitNV) | VkFlags(ShaderStageFlagBits::eMissNV) | VkFlags(ShaderStageFlagBits::eIntersectionNV) | VkFlags(ShaderStageFlagBits::eCallableNV) | VkFlags(ShaderStageFlagBits::eTaskNV) | VkFlags(ShaderStageFlagBits::eMeshNV)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ShaderStageFlags operator|( ShaderStageFlagBits bit0, ShaderStageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return ShaderStageFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ShaderStageFlags operator&( ShaderStageFlagBits bit0, ShaderStageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return ShaderStageFlags( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ShaderStageFlags operator^( ShaderStageFlagBits bit0, ShaderStageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return ShaderStageFlags( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ShaderStageFlags operator~( ShaderStageFlagBits bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( ShaderStageFlags( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( ShaderStageFlags value  )
+  {
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & ShaderStageFlagBits::eVertex ) result += "Vertex | ";
+    if ( value & ShaderStageFlagBits::eTessellationControl ) result += "TessellationControl | ";
+    if ( value & ShaderStageFlagBits::eTessellationEvaluation ) result += "TessellationEvaluation | ";
+    if ( value & ShaderStageFlagBits::eGeometry ) result += "Geometry | ";
+    if ( value & ShaderStageFlagBits::eFragment ) result += "Fragment | ";
+    if ( value & ShaderStageFlagBits::eCompute ) result += "Compute | ";
+    if ( value & ShaderStageFlagBits::eRaygenNV ) result += "RaygenNV | ";
+    if ( value & ShaderStageFlagBits::eAnyHitNV ) result += "AnyHitNV | ";
+    if ( value & ShaderStageFlagBits::eClosestHitNV ) result += "ClosestHitNV | ";
+    if ( value & ShaderStageFlagBits::eMissNV ) result += "MissNV | ";
+    if ( value & ShaderStageFlagBits::eIntersectionNV ) result += "IntersectionNV | ";
+    if ( value & ShaderStageFlagBits::eCallableNV ) result += "CallableNV | ";
+    if ( value & ShaderStageFlagBits::eTaskNV ) result += "TaskNV | ";
+    if ( value & ShaderStageFlagBits::eMeshNV ) result += "MeshNV | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+  enum class SparseImageFormatFlagBits
+  {
+    eSingleMiptail = VK_SPARSE_IMAGE_FORMAT_SINGLE_MIPTAIL_BIT,
+    eAlignedMipSize = VK_SPARSE_IMAGE_FORMAT_ALIGNED_MIP_SIZE_BIT,
+    eNonstandardBlockSize = VK_SPARSE_IMAGE_FORMAT_NONSTANDARD_BLOCK_SIZE_BIT
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( SparseImageFormatFlagBits value )
+  {
+    switch ( value )
+    {
+      case SparseImageFormatFlagBits::eSingleMiptail : return "SingleMiptail";
+      case SparseImageFormatFlagBits::eAlignedMipSize : return "AlignedMipSize";
+      case SparseImageFormatFlagBits::eNonstandardBlockSize : return "NonstandardBlockSize";
+      default: return "invalid";
+    }
+  }
+
+  using SparseImageFormatFlags = Flags<SparseImageFormatFlagBits, VkSparseImageFormatFlags>;
+
+  template <> struct FlagTraits<SparseImageFormatFlagBits>
+  {
+    enum
+    {
+      allFlags = VkFlags(SparseImageFormatFlagBits::eSingleMiptail) | VkFlags(SparseImageFormatFlagBits::eAlignedMipSize) | VkFlags(SparseImageFormatFlagBits::eNonstandardBlockSize)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SparseImageFormatFlags operator|( SparseImageFormatFlagBits bit0, SparseImageFormatFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return SparseImageFormatFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SparseImageFormatFlags operator&( SparseImageFormatFlagBits bit0, SparseImageFormatFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return SparseImageFormatFlags( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SparseImageFormatFlags operator^( SparseImageFormatFlagBits bit0, SparseImageFormatFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return SparseImageFormatFlags( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SparseImageFormatFlags operator~( SparseImageFormatFlagBits bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( SparseImageFormatFlags( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( SparseImageFormatFlags value  )
+  {
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & SparseImageFormatFlagBits::eSingleMiptail ) result += "SingleMiptail | ";
+    if ( value & SparseImageFormatFlagBits::eAlignedMipSize ) result += "AlignedMipSize | ";
+    if ( value & SparseImageFormatFlagBits::eNonstandardBlockSize ) result += "NonstandardBlockSize | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+  enum class SparseMemoryBindFlagBits
+  {
+    eMetadata = VK_SPARSE_MEMORY_BIND_METADATA_BIT
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( SparseMemoryBindFlagBits value )
+  {
+    switch ( value )
+    {
+      case SparseMemoryBindFlagBits::eMetadata : return "Metadata";
+      default: return "invalid";
+    }
+  }
+
+  using SparseMemoryBindFlags = Flags<SparseMemoryBindFlagBits, VkSparseMemoryBindFlags>;
+
+  template <> struct FlagTraits<SparseMemoryBindFlagBits>
+  {
+    enum
+    {
+      allFlags = VkFlags(SparseMemoryBindFlagBits::eMetadata)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SparseMemoryBindFlags operator|( SparseMemoryBindFlagBits bit0, SparseMemoryBindFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return SparseMemoryBindFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SparseMemoryBindFlags operator&( SparseMemoryBindFlagBits bit0, SparseMemoryBindFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return SparseMemoryBindFlags( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SparseMemoryBindFlags operator^( SparseMemoryBindFlagBits bit0, SparseMemoryBindFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return SparseMemoryBindFlags( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SparseMemoryBindFlags operator~( SparseMemoryBindFlagBits bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( SparseMemoryBindFlags( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( SparseMemoryBindFlags value  )
+  {
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & SparseMemoryBindFlagBits::eMetadata ) result += "Metadata | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+  enum class StencilFaceFlagBits
+  {
+    eFront = VK_STENCIL_FACE_FRONT_BIT,
+    eBack = VK_STENCIL_FACE_BACK_BIT,
+    eFrontAndBack = VK_STENCIL_FACE_FRONT_AND_BACK,
+    eVkStencilFrontAndBack = VK_STENCIL_FRONT_AND_BACK
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( StencilFaceFlagBits value )
+  {
+    switch ( value )
+    {
+      case StencilFaceFlagBits::eFront : return "Front";
+      case StencilFaceFlagBits::eBack : return "Back";
+      case StencilFaceFlagBits::eFrontAndBack : return "FrontAndBack";
+      default: return "invalid";
+    }
+  }
+
+  using StencilFaceFlags = Flags<StencilFaceFlagBits, VkStencilFaceFlags>;
+
+  template <> struct FlagTraits<StencilFaceFlagBits>
+  {
+    enum
+    {
+      allFlags = VkFlags(StencilFaceFlagBits::eFront) | VkFlags(StencilFaceFlagBits::eBack) | VkFlags(StencilFaceFlagBits::eFrontAndBack)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR StencilFaceFlags operator|( StencilFaceFlagBits bit0, StencilFaceFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return StencilFaceFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR StencilFaceFlags operator&( StencilFaceFlagBits bit0, StencilFaceFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return StencilFaceFlags( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR StencilFaceFlags operator^( StencilFaceFlagBits bit0, StencilFaceFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return StencilFaceFlags( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR StencilFaceFlags operator~( StencilFaceFlagBits bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( StencilFaceFlags( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( StencilFaceFlags value  )
+  {
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & StencilFaceFlagBits::eFront ) result += "Front | ";
+    if ( value & StencilFaceFlagBits::eBack ) result += "Back | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+#ifdef VK_USE_PLATFORM_GGP
+  enum class StreamDescriptorSurfaceCreateFlagBitsGGP
+  {};
+
+  VULKAN_HPP_INLINE std::string to_string( StreamDescriptorSurfaceCreateFlagBitsGGP )
+  {
+    return "(void)";
+  }
+
+  using StreamDescriptorSurfaceCreateFlagsGGP = Flags<StreamDescriptorSurfaceCreateFlagBitsGGP, VkStreamDescriptorSurfaceCreateFlagsGGP>;
+
+  VULKAN_HPP_INLINE std::string to_string( StreamDescriptorSurfaceCreateFlagsGGP  )
+  {
+    return "{}";
+  }
+#endif /*VK_USE_PLATFORM_GGP*/
+
+  enum class SubgroupFeatureFlagBits
+  {
+    eBasic = VK_SUBGROUP_FEATURE_BASIC_BIT,
+    eVote = VK_SUBGROUP_FEATURE_VOTE_BIT,
+    eArithmetic = VK_SUBGROUP_FEATURE_ARITHMETIC_BIT,
+    eBallot = VK_SUBGROUP_FEATURE_BALLOT_BIT,
+    eShuffle = VK_SUBGROUP_FEATURE_SHUFFLE_BIT,
+    eShuffleRelative = VK_SUBGROUP_FEATURE_SHUFFLE_RELATIVE_BIT,
+    eClustered = VK_SUBGROUP_FEATURE_CLUSTERED_BIT,
+    eQuad = VK_SUBGROUP_FEATURE_QUAD_BIT,
+    ePartitionedNV = VK_SUBGROUP_FEATURE_PARTITIONED_BIT_NV
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( SubgroupFeatureFlagBits value )
+  {
+    switch ( value )
+    {
+      case SubgroupFeatureFlagBits::eBasic : return "Basic";
+      case SubgroupFeatureFlagBits::eVote : return "Vote";
+      case SubgroupFeatureFlagBits::eArithmetic : return "Arithmetic";
+      case SubgroupFeatureFlagBits::eBallot : return "Ballot";
+      case SubgroupFeatureFlagBits::eShuffle : return "Shuffle";
+      case SubgroupFeatureFlagBits::eShuffleRelative : return "ShuffleRelative";
+      case SubgroupFeatureFlagBits::eClustered : return "Clustered";
+      case SubgroupFeatureFlagBits::eQuad : return "Quad";
+      case SubgroupFeatureFlagBits::ePartitionedNV : return "PartitionedNV";
+      default: return "invalid";
+    }
+  }
+
+  using SubgroupFeatureFlags = Flags<SubgroupFeatureFlagBits, VkSubgroupFeatureFlags>;
+
+  template <> struct FlagTraits<SubgroupFeatureFlagBits>
+  {
+    enum
+    {
+      allFlags = VkFlags(SubgroupFeatureFlagBits::eBasic) | VkFlags(SubgroupFeatureFlagBits::eVote) | VkFlags(SubgroupFeatureFlagBits::eArithmetic) | VkFlags(SubgroupFeatureFlagBits::eBallot) | VkFlags(SubgroupFeatureFlagBits::eShuffle) | VkFlags(SubgroupFeatureFlagBits::eShuffleRelative) | VkFlags(SubgroupFeatureFlagBits::eClustered) | VkFlags(SubgroupFeatureFlagBits::eQuad) | VkFlags(SubgroupFeatureFlagBits::ePartitionedNV)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubgroupFeatureFlags operator|( SubgroupFeatureFlagBits bit0, SubgroupFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return SubgroupFeatureFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubgroupFeatureFlags operator&( SubgroupFeatureFlagBits bit0, SubgroupFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return SubgroupFeatureFlags( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubgroupFeatureFlags operator^( SubgroupFeatureFlagBits bit0, SubgroupFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return SubgroupFeatureFlags( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubgroupFeatureFlags operator~( SubgroupFeatureFlagBits bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( SubgroupFeatureFlags( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( SubgroupFeatureFlags value  )
+  {
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & SubgroupFeatureFlagBits::eBasic ) result += "Basic | ";
+    if ( value & SubgroupFeatureFlagBits::eVote ) result += "Vote | ";
+    if ( value & SubgroupFeatureFlagBits::eArithmetic ) result += "Arithmetic | ";
+    if ( value & SubgroupFeatureFlagBits::eBallot ) result += "Ballot | ";
+    if ( value & SubgroupFeatureFlagBits::eShuffle ) result += "Shuffle | ";
+    if ( value & SubgroupFeatureFlagBits::eShuffleRelative ) result += "ShuffleRelative | ";
+    if ( value & SubgroupFeatureFlagBits::eClustered ) result += "Clustered | ";
+    if ( value & SubgroupFeatureFlagBits::eQuad ) result += "Quad | ";
+    if ( value & SubgroupFeatureFlagBits::ePartitionedNV ) result += "PartitionedNV | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+  enum class SubpassDescriptionFlagBits
+  {
+    ePerViewAttributesNVX = VK_SUBPASS_DESCRIPTION_PER_VIEW_ATTRIBUTES_BIT_NVX,
+    ePerViewPositionXOnlyNVX = VK_SUBPASS_DESCRIPTION_PER_VIEW_POSITION_X_ONLY_BIT_NVX
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( SubpassDescriptionFlagBits value )
+  {
+    switch ( value )
+    {
+      case SubpassDescriptionFlagBits::ePerViewAttributesNVX : return "PerViewAttributesNVX";
+      case SubpassDescriptionFlagBits::ePerViewPositionXOnlyNVX : return "PerViewPositionXOnlyNVX";
+      default: return "invalid";
+    }
+  }
+
+  using SubpassDescriptionFlags = Flags<SubpassDescriptionFlagBits, VkSubpassDescriptionFlags>;
+
+  template <> struct FlagTraits<SubpassDescriptionFlagBits>
+  {
+    enum
+    {
+      allFlags = VkFlags(SubpassDescriptionFlagBits::ePerViewAttributesNVX) | VkFlags(SubpassDescriptionFlagBits::ePerViewPositionXOnlyNVX)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubpassDescriptionFlags operator|( SubpassDescriptionFlagBits bit0, SubpassDescriptionFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return SubpassDescriptionFlags( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubpassDescriptionFlags operator&( SubpassDescriptionFlagBits bit0, SubpassDescriptionFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return SubpassDescriptionFlags( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubpassDescriptionFlags operator^( SubpassDescriptionFlagBits bit0, SubpassDescriptionFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return SubpassDescriptionFlags( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubpassDescriptionFlags operator~( SubpassDescriptionFlagBits bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( SubpassDescriptionFlags( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( SubpassDescriptionFlags value  )
+  {
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & SubpassDescriptionFlagBits::ePerViewAttributesNVX ) result += "PerViewAttributesNVX | ";
+    if ( value & SubpassDescriptionFlagBits::ePerViewPositionXOnlyNVX ) result += "PerViewPositionXOnlyNVX | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+  enum class SurfaceCounterFlagBitsEXT
+  {
+    eVblank = VK_SURFACE_COUNTER_VBLANK_EXT
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( SurfaceCounterFlagBitsEXT value )
+  {
+    switch ( value )
+    {
+      case SurfaceCounterFlagBitsEXT::eVblank : return "Vblank";
+      default: return "invalid";
+    }
+  }
+
+  using SurfaceCounterFlagsEXT = Flags<SurfaceCounterFlagBitsEXT, VkSurfaceCounterFlagsEXT>;
+
+  template <> struct FlagTraits<SurfaceCounterFlagBitsEXT>
+  {
+    enum
+    {
+      allFlags = VkFlags(SurfaceCounterFlagBitsEXT::eVblank)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SurfaceCounterFlagsEXT operator|( SurfaceCounterFlagBitsEXT bit0, SurfaceCounterFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return SurfaceCounterFlagsEXT( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SurfaceCounterFlagsEXT operator&( SurfaceCounterFlagBitsEXT bit0, SurfaceCounterFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return SurfaceCounterFlagsEXT( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SurfaceCounterFlagsEXT operator^( SurfaceCounterFlagBitsEXT bit0, SurfaceCounterFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return SurfaceCounterFlagsEXT( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SurfaceCounterFlagsEXT operator~( SurfaceCounterFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( SurfaceCounterFlagsEXT( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( SurfaceCounterFlagsEXT value  )
+  {
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & SurfaceCounterFlagBitsEXT::eVblank ) result += "Vblank | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+  enum class SurfaceTransformFlagBitsKHR
+  {
+    eIdentity = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR,
+    eRotate90 = VK_SURFACE_TRANSFORM_ROTATE_90_BIT_KHR,
+    eRotate180 = VK_SURFACE_TRANSFORM_ROTATE_180_BIT_KHR,
+    eRotate270 = VK_SURFACE_TRANSFORM_ROTATE_270_BIT_KHR,
+    eHorizontalMirror = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_BIT_KHR,
+    eHorizontalMirrorRotate90 = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_90_BIT_KHR,
+    eHorizontalMirrorRotate180 = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_180_BIT_KHR,
+    eHorizontalMirrorRotate270 = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_270_BIT_KHR,
+    eInherit = VK_SURFACE_TRANSFORM_INHERIT_BIT_KHR
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( SurfaceTransformFlagBitsKHR value )
+  {
+    switch ( value )
+    {
+      case SurfaceTransformFlagBitsKHR::eIdentity : return "Identity";
+      case SurfaceTransformFlagBitsKHR::eRotate90 : return "Rotate90";
+      case SurfaceTransformFlagBitsKHR::eRotate180 : return "Rotate180";
+      case SurfaceTransformFlagBitsKHR::eRotate270 : return "Rotate270";
+      case SurfaceTransformFlagBitsKHR::eHorizontalMirror : return "HorizontalMirror";
+      case SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate90 : return "HorizontalMirrorRotate90";
+      case SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate180 : return "HorizontalMirrorRotate180";
+      case SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate270 : return "HorizontalMirrorRotate270";
+      case SurfaceTransformFlagBitsKHR::eInherit : return "Inherit";
+      default: return "invalid";
+    }
+  }
+
+  using SurfaceTransformFlagsKHR = Flags<SurfaceTransformFlagBitsKHR, VkSurfaceTransformFlagsKHR>;
+
+  template <> struct FlagTraits<SurfaceTransformFlagBitsKHR>
+  {
+    enum
+    {
+      allFlags = VkFlags(SurfaceTransformFlagBitsKHR::eIdentity) | VkFlags(SurfaceTransformFlagBitsKHR::eRotate90) | VkFlags(SurfaceTransformFlagBitsKHR::eRotate180) | VkFlags(SurfaceTransformFlagBitsKHR::eRotate270) | VkFlags(SurfaceTransformFlagBitsKHR::eHorizontalMirror) | VkFlags(SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate90) | VkFlags(SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate180) | VkFlags(SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate270) | VkFlags(SurfaceTransformFlagBitsKHR::eInherit)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SurfaceTransformFlagsKHR operator|( SurfaceTransformFlagBitsKHR bit0, SurfaceTransformFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return SurfaceTransformFlagsKHR( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SurfaceTransformFlagsKHR operator&( SurfaceTransformFlagBitsKHR bit0, SurfaceTransformFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return SurfaceTransformFlagsKHR( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SurfaceTransformFlagsKHR operator^( SurfaceTransformFlagBitsKHR bit0, SurfaceTransformFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return SurfaceTransformFlagsKHR( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SurfaceTransformFlagsKHR operator~( SurfaceTransformFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( SurfaceTransformFlagsKHR( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( SurfaceTransformFlagsKHR value  )
+  {
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & SurfaceTransformFlagBitsKHR::eIdentity ) result += "Identity | ";
+    if ( value & SurfaceTransformFlagBitsKHR::eRotate90 ) result += "Rotate90 | ";
+    if ( value & SurfaceTransformFlagBitsKHR::eRotate180 ) result += "Rotate180 | ";
+    if ( value & SurfaceTransformFlagBitsKHR::eRotate270 ) result += "Rotate270 | ";
+    if ( value & SurfaceTransformFlagBitsKHR::eHorizontalMirror ) result += "HorizontalMirror | ";
+    if ( value & SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate90 ) result += "HorizontalMirrorRotate90 | ";
+    if ( value & SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate180 ) result += "HorizontalMirrorRotate180 | ";
+    if ( value & SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate270 ) result += "HorizontalMirrorRotate270 | ";
+    if ( value & SurfaceTransformFlagBitsKHR::eInherit ) result += "Inherit | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+  enum class SwapchainCreateFlagBitsKHR
+  {
+    eSplitInstanceBindRegions = VK_SWAPCHAIN_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR,
+    eProtected = VK_SWAPCHAIN_CREATE_PROTECTED_BIT_KHR,
+    eMutableFormat = VK_SWAPCHAIN_CREATE_MUTABLE_FORMAT_BIT_KHR
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( SwapchainCreateFlagBitsKHR value )
+  {
+    switch ( value )
+    {
+      case SwapchainCreateFlagBitsKHR::eSplitInstanceBindRegions : return "SplitInstanceBindRegions";
+      case SwapchainCreateFlagBitsKHR::eProtected : return "Protected";
+      case SwapchainCreateFlagBitsKHR::eMutableFormat : return "MutableFormat";
+      default: return "invalid";
+    }
+  }
+
+  using SwapchainCreateFlagsKHR = Flags<SwapchainCreateFlagBitsKHR, VkSwapchainCreateFlagsKHR>;
+
+  template <> struct FlagTraits<SwapchainCreateFlagBitsKHR>
+  {
+    enum
+    {
+      allFlags = VkFlags(SwapchainCreateFlagBitsKHR::eSplitInstanceBindRegions) | VkFlags(SwapchainCreateFlagBitsKHR::eProtected) | VkFlags(SwapchainCreateFlagBitsKHR::eMutableFormat)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SwapchainCreateFlagsKHR operator|( SwapchainCreateFlagBitsKHR bit0, SwapchainCreateFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return SwapchainCreateFlagsKHR( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SwapchainCreateFlagsKHR operator&( SwapchainCreateFlagBitsKHR bit0, SwapchainCreateFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return SwapchainCreateFlagsKHR( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SwapchainCreateFlagsKHR operator^( SwapchainCreateFlagBitsKHR bit0, SwapchainCreateFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return SwapchainCreateFlagsKHR( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SwapchainCreateFlagsKHR operator~( SwapchainCreateFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( SwapchainCreateFlagsKHR( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( SwapchainCreateFlagsKHR value  )
+  {
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & SwapchainCreateFlagBitsKHR::eSplitInstanceBindRegions ) result += "SplitInstanceBindRegions | ";
+    if ( value & SwapchainCreateFlagBitsKHR::eProtected ) result += "Protected | ";
+    if ( value & SwapchainCreateFlagBitsKHR::eMutableFormat ) result += "MutableFormat | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+  enum class ToolPurposeFlagBitsEXT
+  {
+    eValidation = VK_TOOL_PURPOSE_VALIDATION_BIT_EXT,
+    eProfiling = VK_TOOL_PURPOSE_PROFILING_BIT_EXT,
+    eTracing = VK_TOOL_PURPOSE_TRACING_BIT_EXT,
+    eAdditionalFeatures = VK_TOOL_PURPOSE_ADDITIONAL_FEATURES_BIT_EXT,
+    eModifyingFeatures = VK_TOOL_PURPOSE_MODIFYING_FEATURES_BIT_EXT,
+    eDebugReporting = VK_TOOL_PURPOSE_DEBUG_REPORTING_BIT_EXT,
+    eDebugMarkers = VK_TOOL_PURPOSE_DEBUG_MARKERS_BIT_EXT
+  };
+
+  VULKAN_HPP_INLINE std::string to_string( ToolPurposeFlagBitsEXT value )
+  {
+    switch ( value )
+    {
+      case ToolPurposeFlagBitsEXT::eValidation : return "Validation";
+      case ToolPurposeFlagBitsEXT::eProfiling : return "Profiling";
+      case ToolPurposeFlagBitsEXT::eTracing : return "Tracing";
+      case ToolPurposeFlagBitsEXT::eAdditionalFeatures : return "AdditionalFeatures";
+      case ToolPurposeFlagBitsEXT::eModifyingFeatures : return "ModifyingFeatures";
+      case ToolPurposeFlagBitsEXT::eDebugReporting : return "DebugReporting";
+      case ToolPurposeFlagBitsEXT::eDebugMarkers : return "DebugMarkers";
+      default: return "invalid";
+    }
+  }
+
+  using ToolPurposeFlagsEXT = Flags<ToolPurposeFlagBitsEXT, VkToolPurposeFlagsEXT>;
+
+  template <> struct FlagTraits<ToolPurposeFlagBitsEXT>
+  {
+    enum
+    {
+      allFlags = VkFlags(ToolPurposeFlagBitsEXT::eValidation) | VkFlags(ToolPurposeFlagBitsEXT::eProfiling) | VkFlags(ToolPurposeFlagBitsEXT::eTracing) | VkFlags(ToolPurposeFlagBitsEXT::eAdditionalFeatures) | VkFlags(ToolPurposeFlagBitsEXT::eModifyingFeatures) | VkFlags(ToolPurposeFlagBitsEXT::eDebugReporting) | VkFlags(ToolPurposeFlagBitsEXT::eDebugMarkers)
+    };
+  };
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ToolPurposeFlagsEXT operator|( ToolPurposeFlagBitsEXT bit0, ToolPurposeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return ToolPurposeFlagsEXT( bit0 ) | bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ToolPurposeFlagsEXT operator&( ToolPurposeFlagBitsEXT bit0, ToolPurposeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return ToolPurposeFlagsEXT( bit0 ) & bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ToolPurposeFlagsEXT operator^( ToolPurposeFlagBitsEXT bit0, ToolPurposeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
+  {
+    return ToolPurposeFlagsEXT( bit0 ) ^ bit1;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ToolPurposeFlagsEXT operator~( ToolPurposeFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( ToolPurposeFlagsEXT( bits ) );
+  }
+
+  VULKAN_HPP_INLINE std::string to_string( ToolPurposeFlagsEXT value  )
+  {
+    if ( !value ) return "{}";
+    std::string result;
+
+    if ( value & ToolPurposeFlagBitsEXT::eValidation ) result += "Validation | ";
+    if ( value & ToolPurposeFlagBitsEXT::eProfiling ) result += "Profiling | ";
+    if ( value & ToolPurposeFlagBitsEXT::eTracing ) result += "Tracing | ";
+    if ( value & ToolPurposeFlagBitsEXT::eAdditionalFeatures ) result += "AdditionalFeatures | ";
+    if ( value & ToolPurposeFlagBitsEXT::eModifyingFeatures ) result += "ModifyingFeatures | ";
+    if ( value & ToolPurposeFlagBitsEXT::eDebugReporting ) result += "DebugReporting | ";
+    if ( value & ToolPurposeFlagBitsEXT::eDebugMarkers ) result += "DebugMarkers | ";
+    return "{ " + result.substr(0, result.size() - 3) + " }";
+  }
+
+  enum class ValidationCacheCreateFlagBitsEXT
+  {};
+
+  VULKAN_HPP_INLINE std::string to_string( ValidationCacheCreateFlagBitsEXT )
+  {
+    return "(void)";
+  }
+
+  using ValidationCacheCreateFlagsEXT = Flags<ValidationCacheCreateFlagBitsEXT, VkValidationCacheCreateFlagsEXT>;
+
+  VULKAN_HPP_INLINE std::string to_string( ValidationCacheCreateFlagsEXT  )
+  {
+    return "{}";
+  }
+
+#ifdef VK_USE_PLATFORM_VI_NN
+  enum class ViSurfaceCreateFlagBitsNN
+  {};
+
+  VULKAN_HPP_INLINE std::string to_string( ViSurfaceCreateFlagBitsNN )
+  {
+    return "(void)";
+  }
+
+  using ViSurfaceCreateFlagsNN = Flags<ViSurfaceCreateFlagBitsNN, VkViSurfaceCreateFlagsNN>;
+
+  VULKAN_HPP_INLINE std::string to_string( ViSurfaceCreateFlagsNN  )
+  {
+    return "{}";
+  }
+#endif /*VK_USE_PLATFORM_VI_NN*/
+
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+  enum class WaylandSurfaceCreateFlagBitsKHR
+  {};
+
+  VULKAN_HPP_INLINE std::string to_string( WaylandSurfaceCreateFlagBitsKHR )
+  {
+    return "(void)";
+  }
+
+  using WaylandSurfaceCreateFlagsKHR = Flags<WaylandSurfaceCreateFlagBitsKHR, VkWaylandSurfaceCreateFlagsKHR>;
+
+  VULKAN_HPP_INLINE std::string to_string( WaylandSurfaceCreateFlagsKHR  )
+  {
+    return "{}";
+  }
+#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+  enum class Win32SurfaceCreateFlagBitsKHR
+  {};
+
+  VULKAN_HPP_INLINE std::string to_string( Win32SurfaceCreateFlagBitsKHR )
+  {
+    return "(void)";
+  }
+
+  using Win32SurfaceCreateFlagsKHR = Flags<Win32SurfaceCreateFlagBitsKHR, VkWin32SurfaceCreateFlagsKHR>;
+
+  VULKAN_HPP_INLINE std::string to_string( Win32SurfaceCreateFlagsKHR  )
+  {
+    return "{}";
+  }
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#ifdef VK_USE_PLATFORM_XCB_KHR
+  enum class XcbSurfaceCreateFlagBitsKHR
+  {};
+
+  VULKAN_HPP_INLINE std::string to_string( XcbSurfaceCreateFlagBitsKHR )
+  {
+    return "(void)";
+  }
+
+  using XcbSurfaceCreateFlagsKHR = Flags<XcbSurfaceCreateFlagBitsKHR, VkXcbSurfaceCreateFlagsKHR>;
+
+  VULKAN_HPP_INLINE std::string to_string( XcbSurfaceCreateFlagsKHR  )
+  {
+    return "{}";
+  }
+#endif /*VK_USE_PLATFORM_XCB_KHR*/
+
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+  enum class XlibSurfaceCreateFlagBitsKHR
+  {};
+
+  VULKAN_HPP_INLINE std::string to_string( XlibSurfaceCreateFlagBitsKHR )
+  {
+    return "(void)";
+  }
+
+  using XlibSurfaceCreateFlagsKHR = Flags<XlibSurfaceCreateFlagBitsKHR, VkXlibSurfaceCreateFlagsKHR>;
+
+  VULKAN_HPP_INLINE std::string to_string( XlibSurfaceCreateFlagsKHR  )
+  {
+    return "{}";
+  }
+#endif /*VK_USE_PLATFORM_XLIB_KHR*/
+} // namespace VULKAN_HPP_NAMESPACE
+
+namespace std
+{
+  template <>
+  struct is_error_code_enum<VULKAN_HPP_NAMESPACE::Result> : public true_type
+  {};
+}
+
+namespace VULKAN_HPP_NAMESPACE
+{
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  class ErrorCategoryImpl : public std::error_category
+  {
+    public:
+    virtual const char* name() const VULKAN_HPP_NOEXCEPT override { return VULKAN_HPP_NAMESPACE_STRING"::Result"; }
+    virtual std::string message(int ev) const override { return to_string(static_cast<Result>(ev)); }
+  };
+
+  class Error
+  {
+    public:
+    Error() VULKAN_HPP_NOEXCEPT = default;
+    Error(const Error&) VULKAN_HPP_NOEXCEPT = default;
+    virtual ~Error() VULKAN_HPP_NOEXCEPT = default;
+
+    virtual const char* what() const VULKAN_HPP_NOEXCEPT = 0;
+  };
+
+  class LogicError : public Error, public std::logic_error
+  {
+    public:
+    explicit LogicError( const std::string& what )
+      : Error(), std::logic_error(what) {}
+    explicit LogicError( char const * what )
+      : Error(), std::logic_error(what) {}
+
+    virtual const char* what() const VULKAN_HPP_NOEXCEPT { return std::logic_error::what(); }
+  };
+
+  class SystemError : public Error, public std::system_error
+  {
+    public:
+    SystemError( std::error_code ec )
+      : Error(), std::system_error(ec) {}
+    SystemError( std::error_code ec, std::string const& what )
+      : Error(), std::system_error(ec, what) {}
+    SystemError( std::error_code ec, char const * what )
+      : Error(), std::system_error(ec, what) {}
+    SystemError( int ev, std::error_category const& ecat )
+      : Error(), std::system_error(ev, ecat) {}
+    SystemError( int ev, std::error_category const& ecat, std::string const& what)
+      : Error(), std::system_error(ev, ecat, what) {}
+    SystemError( int ev, std::error_category const& ecat, char const * what)
+      : Error(), std::system_error(ev, ecat, what) {}
+
+    virtual const char* what() const VULKAN_HPP_NOEXCEPT { return std::system_error::what(); }
+  };
+
+  VULKAN_HPP_INLINE const std::error_category& errorCategory() VULKAN_HPP_NOEXCEPT
+  {
+    static ErrorCategoryImpl instance;
+    return instance;
+  }
+
+  VULKAN_HPP_INLINE std::error_code make_error_code(Result e) VULKAN_HPP_NOEXCEPT
+  {
+    return std::error_code(static_cast<int>(e), errorCategory());
+  }
+
+  VULKAN_HPP_INLINE std::error_condition make_error_condition(Result e) VULKAN_HPP_NOEXCEPT
+  {
+    return std::error_condition(static_cast<int>(e), errorCategory());
+  }
+
+  class OutOfHostMemoryError : public SystemError
+  {
+  public:
+    OutOfHostMemoryError( std::string const& message )
+      : SystemError( make_error_code( Result::eErrorOutOfHostMemory ), message ) {}
+    OutOfHostMemoryError( char const * message )
+      : SystemError( make_error_code( Result::eErrorOutOfHostMemory ), message ) {}
+  };
+
+  class OutOfDeviceMemoryError : public SystemError
+  {
+  public:
+    OutOfDeviceMemoryError( std::string const& message )
+      : SystemError( make_error_code( Result::eErrorOutOfDeviceMemory ), message ) {}
+    OutOfDeviceMemoryError( char const * message )
+      : SystemError( make_error_code( Result::eErrorOutOfDeviceMemory ), message ) {}
+  };
+
+  class InitializationFailedError : public SystemError
+  {
+  public:
+    InitializationFailedError( std::string const& message )
+      : SystemError( make_error_code( Result::eErrorInitializationFailed ), message ) {}
+    InitializationFailedError( char const * message )
+      : SystemError( make_error_code( Result::eErrorInitializationFailed ), message ) {}
+  };
+
+  class DeviceLostError : public SystemError
+  {
+  public:
+    DeviceLostError( std::string const& message )
+      : SystemError( make_error_code( Result::eErrorDeviceLost ), message ) {}
+    DeviceLostError( char const * message )
+      : SystemError( make_error_code( Result::eErrorDeviceLost ), message ) {}
+  };
+
+  class MemoryMapFailedError : public SystemError
+  {
+  public:
+    MemoryMapFailedError( std::string const& message )
+      : SystemError( make_error_code( Result::eErrorMemoryMapFailed ), message ) {}
+    MemoryMapFailedError( char const * message )
+      : SystemError( make_error_code( Result::eErrorMemoryMapFailed ), message ) {}
+  };
+
+  class LayerNotPresentError : public SystemError
+  {
+  public:
+    LayerNotPresentError( std::string const& message )
+      : SystemError( make_error_code( Result::eErrorLayerNotPresent ), message ) {}
+    LayerNotPresentError( char const * message )
+      : SystemError( make_error_code( Result::eErrorLayerNotPresent ), message ) {}
+  };
+
+  class ExtensionNotPresentError : public SystemError
+  {
+  public:
+    ExtensionNotPresentError( std::string const& message )
+      : SystemError( make_error_code( Result::eErrorExtensionNotPresent ), message ) {}
+    ExtensionNotPresentError( char const * message )
+      : SystemError( make_error_code( Result::eErrorExtensionNotPresent ), message ) {}
+  };
+
+  class FeatureNotPresentError : public SystemError
+  {
+  public:
+    FeatureNotPresentError( std::string const& message )
+      : SystemError( make_error_code( Result::eErrorFeatureNotPresent ), message ) {}
+    FeatureNotPresentError( char const * message )
+      : SystemError( make_error_code( Result::eErrorFeatureNotPresent ), message ) {}
+  };
+
+  class IncompatibleDriverError : public SystemError
+  {
+  public:
+    IncompatibleDriverError( std::string const& message )
+      : SystemError( make_error_code( Result::eErrorIncompatibleDriver ), message ) {}
+    IncompatibleDriverError( char const * message )
+      : SystemError( make_error_code( Result::eErrorIncompatibleDriver ), message ) {}
+  };
+
+  class TooManyObjectsError : public SystemError
+  {
+  public:
+    TooManyObjectsError( std::string const& message )
+      : SystemError( make_error_code( Result::eErrorTooManyObjects ), message ) {}
+    TooManyObjectsError( char const * message )
+      : SystemError( make_error_code( Result::eErrorTooManyObjects ), message ) {}
+  };
+
+  class FormatNotSupportedError : public SystemError
+  {
+  public:
+    FormatNotSupportedError( std::string const& message )
+      : SystemError( make_error_code( Result::eErrorFormatNotSupported ), message ) {}
+    FormatNotSupportedError( char const * message )
+      : SystemError( make_error_code( Result::eErrorFormatNotSupported ), message ) {}
+  };
+
+  class FragmentedPoolError : public SystemError
+  {
+  public:
+    FragmentedPoolError( std::string const& message )
+      : SystemError( make_error_code( Result::eErrorFragmentedPool ), message ) {}
+    FragmentedPoolError( char const * message )
+      : SystemError( make_error_code( Result::eErrorFragmentedPool ), message ) {}
+  };
+
+  class OutOfPoolMemoryError : public SystemError
+  {
+  public:
+    OutOfPoolMemoryError( std::string const& message )
+      : SystemError( make_error_code( Result::eErrorOutOfPoolMemory ), message ) {}
+    OutOfPoolMemoryError( char const * message )
+      : SystemError( make_error_code( Result::eErrorOutOfPoolMemory ), message ) {}
+  };
+
+  class InvalidExternalHandleError : public SystemError
+  {
+  public:
+    InvalidExternalHandleError( std::string const& message )
+      : SystemError( make_error_code( Result::eErrorInvalidExternalHandle ), message ) {}
+    InvalidExternalHandleError( char const * message )
+      : SystemError( make_error_code( Result::eErrorInvalidExternalHandle ), message ) {}
+  };
+
+  class SurfaceLostKHRError : public SystemError
+  {
+  public:
+    SurfaceLostKHRError( std::string const& message )
+      : SystemError( make_error_code( Result::eErrorSurfaceLostKHR ), message ) {}
+    SurfaceLostKHRError( char const * message )
+      : SystemError( make_error_code( Result::eErrorSurfaceLostKHR ), message ) {}
+  };
+
+  class NativeWindowInUseKHRError : public SystemError
+  {
+  public:
+    NativeWindowInUseKHRError( std::string const& message )
+      : SystemError( make_error_code( Result::eErrorNativeWindowInUseKHR ), message ) {}
+    NativeWindowInUseKHRError( char const * message )
+      : SystemError( make_error_code( Result::eErrorNativeWindowInUseKHR ), message ) {}
+  };
+
+  class OutOfDateKHRError : public SystemError
+  {
+  public:
+    OutOfDateKHRError( std::string const& message )
+      : SystemError( make_error_code( Result::eErrorOutOfDateKHR ), message ) {}
+    OutOfDateKHRError( char const * message )
+      : SystemError( make_error_code( Result::eErrorOutOfDateKHR ), message ) {}
+  };
+
+  class IncompatibleDisplayKHRError : public SystemError
+  {
+  public:
+    IncompatibleDisplayKHRError( std::string const& message )
+      : SystemError( make_error_code( Result::eErrorIncompatibleDisplayKHR ), message ) {}
+    IncompatibleDisplayKHRError( char const * message )
+      : SystemError( make_error_code( Result::eErrorIncompatibleDisplayKHR ), message ) {}
+  };
+
+  class ValidationFailedEXTError : public SystemError
+  {
+  public:
+    ValidationFailedEXTError( std::string const& message )
+      : SystemError( make_error_code( Result::eErrorValidationFailedEXT ), message ) {}
+    ValidationFailedEXTError( char const * message )
+      : SystemError( make_error_code( Result::eErrorValidationFailedEXT ), message ) {}
+  };
+
+  class InvalidShaderNVError : public SystemError
+  {
+  public:
+    InvalidShaderNVError( std::string const& message )
+      : SystemError( make_error_code( Result::eErrorInvalidShaderNV ), message ) {}
+    InvalidShaderNVError( char const * message )
+      : SystemError( make_error_code( Result::eErrorInvalidShaderNV ), message ) {}
+  };
+
+  class InvalidDrmFormatModifierPlaneLayoutEXTError : public SystemError
+  {
+  public:
+    InvalidDrmFormatModifierPlaneLayoutEXTError( std::string const& message )
+      : SystemError( make_error_code( Result::eErrorInvalidDrmFormatModifierPlaneLayoutEXT ), message ) {}
+    InvalidDrmFormatModifierPlaneLayoutEXTError( char const * message )
+      : SystemError( make_error_code( Result::eErrorInvalidDrmFormatModifierPlaneLayoutEXT ), message ) {}
+  };
+
+  class FragmentationEXTError : public SystemError
+  {
+  public:
+    FragmentationEXTError( std::string const& message )
+      : SystemError( make_error_code( Result::eErrorFragmentationEXT ), message ) {}
+    FragmentationEXTError( char const * message )
+      : SystemError( make_error_code( Result::eErrorFragmentationEXT ), message ) {}
+  };
+
+  class NotPermittedEXTError : public SystemError
+  {
+  public:
+    NotPermittedEXTError( std::string const& message )
+      : SystemError( make_error_code( Result::eErrorNotPermittedEXT ), message ) {}
+    NotPermittedEXTError( char const * message )
+      : SystemError( make_error_code( Result::eErrorNotPermittedEXT ), message ) {}
+  };
+
+  class FullScreenExclusiveModeLostEXTError : public SystemError
+  {
+  public:
+    FullScreenExclusiveModeLostEXTError( std::string const& message )
+      : SystemError( make_error_code( Result::eErrorFullScreenExclusiveModeLostEXT ), message ) {}
+    FullScreenExclusiveModeLostEXTError( char const * message )
+      : SystemError( make_error_code( Result::eErrorFullScreenExclusiveModeLostEXT ), message ) {}
+  };
+
+  class InvalidOpaqueCaptureAddressKHRError : public SystemError
+  {
+  public:
+    InvalidOpaqueCaptureAddressKHRError( std::string const& message )
+      : SystemError( make_error_code( Result::eErrorInvalidOpaqueCaptureAddressKHR ), message ) {}
+    InvalidOpaqueCaptureAddressKHRError( char const * message )
+      : SystemError( make_error_code( Result::eErrorInvalidOpaqueCaptureAddressKHR ), message ) {}
+  };
+
+  [[noreturn]] static void throwResultException( Result result, char const * message )
+  {
+    switch ( result )
+    {
+      case Result::eErrorOutOfHostMemory: throw OutOfHostMemoryError( message );
+      case Result::eErrorOutOfDeviceMemory: throw OutOfDeviceMemoryError( message );
+      case Result::eErrorInitializationFailed: throw InitializationFailedError( message );
+      case Result::eErrorDeviceLost: throw DeviceLostError( message );
+      case Result::eErrorMemoryMapFailed: throw MemoryMapFailedError( message );
+      case Result::eErrorLayerNotPresent: throw LayerNotPresentError( message );
+      case Result::eErrorExtensionNotPresent: throw ExtensionNotPresentError( message );
+      case Result::eErrorFeatureNotPresent: throw FeatureNotPresentError( message );
+      case Result::eErrorIncompatibleDriver: throw IncompatibleDriverError( message );
+      case Result::eErrorTooManyObjects: throw TooManyObjectsError( message );
+      case Result::eErrorFormatNotSupported: throw FormatNotSupportedError( message );
+      case Result::eErrorFragmentedPool: throw FragmentedPoolError( message );
+      case Result::eErrorOutOfPoolMemory: throw OutOfPoolMemoryError( message );
+      case Result::eErrorInvalidExternalHandle: throw InvalidExternalHandleError( message );
+      case Result::eErrorSurfaceLostKHR: throw SurfaceLostKHRError( message );
+      case Result::eErrorNativeWindowInUseKHR: throw NativeWindowInUseKHRError( message );
+      case Result::eErrorOutOfDateKHR: throw OutOfDateKHRError( message );
+      case Result::eErrorIncompatibleDisplayKHR: throw IncompatibleDisplayKHRError( message );
+      case Result::eErrorValidationFailedEXT: throw ValidationFailedEXTError( message );
+      case Result::eErrorInvalidShaderNV: throw InvalidShaderNVError( message );
+      case Result::eErrorInvalidDrmFormatModifierPlaneLayoutEXT: throw InvalidDrmFormatModifierPlaneLayoutEXTError( message );
+      case Result::eErrorFragmentationEXT: throw FragmentationEXTError( message );
+      case Result::eErrorNotPermittedEXT: throw NotPermittedEXTError( message );
+      case Result::eErrorFullScreenExclusiveModeLostEXT: throw FullScreenExclusiveModeLostEXTError( message );
+      case Result::eErrorInvalidOpaqueCaptureAddressKHR: throw InvalidOpaqueCaptureAddressKHRError( message );
+      default: throw SystemError( make_error_code( result ) );
+    }
+  }
+#endif
+
+  template <typename T> void ignore(T const&) VULKAN_HPP_NOEXCEPT {}
+
+  template <typename T>
+  struct ResultValue
+  {
+#ifdef VULKAN_HPP_HAS_NOEXCEPT
+    ResultValue( Result r, T & v ) VULKAN_HPP_NOEXCEPT(VULKAN_HPP_NOEXCEPT(T(v)))
+#else
+    ResultValue( Result r, T & v )
+#endif
+      : result( r )
+      , value( v )
+    {}
+
+#ifdef VULKAN_HPP_HAS_NOEXCEPT
+    ResultValue( Result r, T && v ) VULKAN_HPP_NOEXCEPT(VULKAN_HPP_NOEXCEPT(T(std::move(v))))
+#else
+    ResultValue( Result r, T && v )
+#endif
+      : result( r )
+      , value( std::move( v ) )
+    {}
+
+    Result  result;
+    T       value;
+
+    operator std::tuple<Result&, T&>() VULKAN_HPP_NOEXCEPT { return std::tuple<Result&, T&>(result, value); }
+  };
+
+  template <typename T>
+  struct ResultValueType
+  {
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+    typedef ResultValue<T>  type;
+#else
+    typedef T               type;
+#endif
+  };
+
+  template <>
+  struct ResultValueType<void>
+  {
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+    typedef Result type;
+#else
+    typedef void   type;
+#endif
+  };
+
+  VULKAN_HPP_INLINE ResultValueType<void>::type createResultValue( Result result, char const * message )
+  {
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+    ignore(message);
+    VULKAN_HPP_ASSERT( result == Result::eSuccess );
+    return result;
+#else
+    if ( result != Result::eSuccess )
+    {
+      throwResultException( result, message );
+    }
+#endif
+  }
+
+  template <typename T>
+  VULKAN_HPP_INLINE typename ResultValueType<T>::type createResultValue( Result result, T & data, char const * message )
+  {
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+    ignore(message);
+    VULKAN_HPP_ASSERT( result == Result::eSuccess );
+    return ResultValue<T>( result, std::move( data ) );
+#else
+    if ( result != Result::eSuccess )
+    {
+      throwResultException( result, message );
+    }
+    return std::move( data );
+#endif
+  }
+
+  VULKAN_HPP_INLINE Result createResultValue( Result result, char const * message, std::initializer_list<Result> successCodes )
+  {
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+    ignore(message);
+    VULKAN_HPP_ASSERT( std::find( successCodes.begin(), successCodes.end(), result ) != successCodes.end() );
+#else
+    if ( std::find( successCodes.begin(), successCodes.end(), result ) == successCodes.end() )
+    {
+      throwResultException( result, message );
+    }
+#endif
+    return result;
+  }
+
+  template <typename T>
+  VULKAN_HPP_INLINE ResultValue<T> createResultValue( Result result, T & data, char const * message, std::initializer_list<Result> successCodes )
+  {
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+    ignore(message);
+    VULKAN_HPP_ASSERT( std::find( successCodes.begin(), successCodes.end(), result ) != successCodes.end() );
+#else
+    if ( std::find( successCodes.begin(), successCodes.end(), result ) == successCodes.end() )
+    {
+      throwResultException( result, message );
+    }
+#endif
+    return ResultValue<T>( result, data );
+  }
+
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename T, typename D>
+  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<T,D>>::type createResultValue( Result result, T & data, char const * message, typename UniqueHandleTraits<T,D>::deleter const& deleter )
+  {
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+    ignore(message);
+    VULKAN_HPP_ASSERT( result == Result::eSuccess );
+    return ResultValue<UniqueHandle<T,D>>( result, UniqueHandle<T,D>(data, deleter) );
+#else
+    if ( result != Result::eSuccess )
+    {
+      throwResultException( result, message );
+    }
+    return UniqueHandle<T,D>(data, deleter);
+#endif
+  }
+#endif
+
+  struct AccelerationStructureCreateInfoNV;
+  struct AccelerationStructureInfoNV;
+  struct AccelerationStructureMemoryRequirementsInfoNV;
+  struct AcquireNextImageInfoKHR;
+  struct AcquireProfilingLockInfoKHR;
+  struct AllocationCallbacks;
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+  struct AndroidHardwareBufferFormatPropertiesANDROID;
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+  struct AndroidHardwareBufferPropertiesANDROID;
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+  struct AndroidHardwareBufferUsageANDROID;
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+  struct AndroidSurfaceCreateInfoKHR;
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+  struct ApplicationInfo;
+  struct AttachmentDescription;
+  struct AttachmentDescription2KHR;
+  struct AttachmentDescriptionStencilLayoutKHR;
+  struct AttachmentReference;
+  struct AttachmentReference2KHR;
+  struct AttachmentReferenceStencilLayoutKHR;
+  struct AttachmentSampleLocationsEXT;
+  struct BaseInStructure;
+  struct BaseOutStructure;
+  struct BindAccelerationStructureMemoryInfoNV;
+  struct BindBufferMemoryDeviceGroupInfo;
+  using BindBufferMemoryDeviceGroupInfoKHR = BindBufferMemoryDeviceGroupInfo;
+  struct BindBufferMemoryInfo;
+  using BindBufferMemoryInfoKHR = BindBufferMemoryInfo;
+  struct BindImageMemoryDeviceGroupInfo;
+  using BindImageMemoryDeviceGroupInfoKHR = BindImageMemoryDeviceGroupInfo;
+  struct BindImageMemoryInfo;
+  using BindImageMemoryInfoKHR = BindImageMemoryInfo;
+  struct BindImageMemorySwapchainInfoKHR;
+  struct BindImagePlaneMemoryInfo;
+  using BindImagePlaneMemoryInfoKHR = BindImagePlaneMemoryInfo;
+  struct BindSparseInfo;
+  struct BufferCopy;
+  struct BufferCreateInfo;
+  struct BufferDeviceAddressCreateInfoEXT;
+  struct BufferDeviceAddressInfoKHR;
+  using BufferDeviceAddressInfoEXT = BufferDeviceAddressInfoKHR;
+  struct BufferImageCopy;
+  struct BufferMemoryBarrier;
+  struct BufferMemoryRequirementsInfo2;
+  using BufferMemoryRequirementsInfo2KHR = BufferMemoryRequirementsInfo2;
+  struct BufferOpaqueCaptureAddressCreateInfoKHR;
+  struct BufferViewCreateInfo;
+  struct CalibratedTimestampInfoEXT;
+  struct CheckpointDataNV;
+  struct ClearAttachment;
+  union ClearColorValue;
+  struct ClearDepthStencilValue;
+  struct ClearRect;
+  union ClearValue;
+  struct CmdProcessCommandsInfoNVX;
+  struct CmdReserveSpaceForCommandsInfoNVX;
+  struct CoarseSampleLocationNV;
+  struct CoarseSampleOrderCustomNV;
+  struct CommandBufferAllocateInfo;
+  struct CommandBufferBeginInfo;
+  struct CommandBufferInheritanceConditionalRenderingInfoEXT;
+  struct CommandBufferInheritanceInfo;
+  struct CommandPoolCreateInfo;
+  struct ComponentMapping;
+  struct ComputePipelineCreateInfo;
+  struct ConditionalRenderingBeginInfoEXT;
+  struct ConformanceVersionKHR;
+  struct CooperativeMatrixPropertiesNV;
+  struct CopyDescriptorSet;
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+  struct D3D12FenceSubmitInfoKHR;
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+  struct DebugMarkerMarkerInfoEXT;
+  struct DebugMarkerObjectNameInfoEXT;
+  struct DebugMarkerObjectTagInfoEXT;
+  struct DebugReportCallbackCreateInfoEXT;
+  struct DebugUtilsLabelEXT;
+  struct DebugUtilsMessengerCallbackDataEXT;
+  struct DebugUtilsMessengerCreateInfoEXT;
+  struct DebugUtilsObjectNameInfoEXT;
+  struct DebugUtilsObjectTagInfoEXT;
+  struct DedicatedAllocationBufferCreateInfoNV;
+  struct DedicatedAllocationImageCreateInfoNV;
+  struct DedicatedAllocationMemoryAllocateInfoNV;
+  struct DescriptorBufferInfo;
+  struct DescriptorImageInfo;
+  struct DescriptorPoolCreateInfo;
+  struct DescriptorPoolInlineUniformBlockCreateInfoEXT;
+  struct DescriptorPoolSize;
+  struct DescriptorSetAllocateInfo;
+  struct DescriptorSetLayoutBinding;
+  struct DescriptorSetLayoutBindingFlagsCreateInfoEXT;
+  struct DescriptorSetLayoutCreateInfo;
+  struct DescriptorSetLayoutSupport;
+  using DescriptorSetLayoutSupportKHR = DescriptorSetLayoutSupport;
+  struct DescriptorSetVariableDescriptorCountAllocateInfoEXT;
+  struct DescriptorSetVariableDescriptorCountLayoutSupportEXT;
+  struct DescriptorUpdateTemplateCreateInfo;
+  using DescriptorUpdateTemplateCreateInfoKHR = DescriptorUpdateTemplateCreateInfo;
+  struct DescriptorUpdateTemplateEntry;
+  using DescriptorUpdateTemplateEntryKHR = DescriptorUpdateTemplateEntry;
+  struct DeviceCreateInfo;
+  struct DeviceEventInfoEXT;
+  struct DeviceGeneratedCommandsFeaturesNVX;
+  struct DeviceGeneratedCommandsLimitsNVX;
+  struct DeviceGroupBindSparseInfo;
+  using DeviceGroupBindSparseInfoKHR = DeviceGroupBindSparseInfo;
+  struct DeviceGroupCommandBufferBeginInfo;
+  using DeviceGroupCommandBufferBeginInfoKHR = DeviceGroupCommandBufferBeginInfo;
+  struct DeviceGroupDeviceCreateInfo;
+  using DeviceGroupDeviceCreateInfoKHR = DeviceGroupDeviceCreateInfo;
+  struct DeviceGroupPresentCapabilitiesKHR;
+  struct DeviceGroupPresentInfoKHR;
+  struct DeviceGroupRenderPassBeginInfo;
+  using DeviceGroupRenderPassBeginInfoKHR = DeviceGroupRenderPassBeginInfo;
+  struct DeviceGroupSubmitInfo;
+  using DeviceGroupSubmitInfoKHR = DeviceGroupSubmitInfo;
+  struct DeviceGroupSwapchainCreateInfoKHR;
+  struct DeviceMemoryOpaqueCaptureAddressInfoKHR;
+  struct DeviceMemoryOverallocationCreateInfoAMD;
+  struct DeviceQueueCreateInfo;
+  struct DeviceQueueGlobalPriorityCreateInfoEXT;
+  struct DeviceQueueInfo2;
+  struct DispatchIndirectCommand;
+  struct DisplayEventInfoEXT;
+  struct DisplayModeCreateInfoKHR;
+  struct DisplayModeParametersKHR;
+  struct DisplayModeProperties2KHR;
+  struct DisplayModePropertiesKHR;
+  struct DisplayNativeHdrSurfaceCapabilitiesAMD;
+  struct DisplayPlaneCapabilities2KHR;
+  struct DisplayPlaneCapabilitiesKHR;
+  struct DisplayPlaneInfo2KHR;
+  struct DisplayPlaneProperties2KHR;
+  struct DisplayPlanePropertiesKHR;
+  struct DisplayPowerInfoEXT;
+  struct DisplayPresentInfoKHR;
+  struct DisplayProperties2KHR;
+  struct DisplayPropertiesKHR;
+  struct DisplaySurfaceCreateInfoKHR;
+  struct DrawIndexedIndirectCommand;
+  struct DrawIndirectCommand;
+  struct DrawMeshTasksIndirectCommandNV;
+  struct DrmFormatModifierPropertiesEXT;
+  struct DrmFormatModifierPropertiesListEXT;
+  struct EventCreateInfo;
+  struct ExportFenceCreateInfo;
+  using ExportFenceCreateInfoKHR = ExportFenceCreateInfo;
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+  struct ExportFenceWin32HandleInfoKHR;
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+  struct ExportMemoryAllocateInfo;
+  using ExportMemoryAllocateInfoKHR = ExportMemoryAllocateInfo;
+  struct ExportMemoryAllocateInfoNV;
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+  struct ExportMemoryWin32HandleInfoKHR;
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+  struct ExportMemoryWin32HandleInfoNV;
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+  struct ExportSemaphoreCreateInfo;
+  using ExportSemaphoreCreateInfoKHR = ExportSemaphoreCreateInfo;
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+  struct ExportSemaphoreWin32HandleInfoKHR;
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+  struct ExtensionProperties;
+  struct Extent2D;
+  struct Extent3D;
+  struct ExternalBufferProperties;
+  using ExternalBufferPropertiesKHR = ExternalBufferProperties;
+  struct ExternalFenceProperties;
+  using ExternalFencePropertiesKHR = ExternalFenceProperties;
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+  struct ExternalFormatANDROID;
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+  struct ExternalImageFormatProperties;
+  using ExternalImageFormatPropertiesKHR = ExternalImageFormatProperties;
+  struct ExternalImageFormatPropertiesNV;
+  struct ExternalMemoryBufferCreateInfo;
+  using ExternalMemoryBufferCreateInfoKHR = ExternalMemoryBufferCreateInfo;
+  struct ExternalMemoryImageCreateInfo;
+  using ExternalMemoryImageCreateInfoKHR = ExternalMemoryImageCreateInfo;
+  struct ExternalMemoryImageCreateInfoNV;
+  struct ExternalMemoryProperties;
+  using ExternalMemoryPropertiesKHR = ExternalMemoryProperties;
+  struct ExternalSemaphoreProperties;
+  using ExternalSemaphorePropertiesKHR = ExternalSemaphoreProperties;
+  struct FenceCreateInfo;
+  struct FenceGetFdInfoKHR;
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+  struct FenceGetWin32HandleInfoKHR;
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+  struct FilterCubicImageViewImageFormatPropertiesEXT;
+  struct FormatProperties;
+  struct FormatProperties2;
+  using FormatProperties2KHR = FormatProperties2;
+  struct FramebufferAttachmentImageInfoKHR;
+  struct FramebufferAttachmentsCreateInfoKHR;
+  struct FramebufferCreateInfo;
+  struct FramebufferMixedSamplesCombinationNV;
+  struct GeometryAABBNV;
+  struct GeometryDataNV;
+  struct GeometryNV;
+  struct GeometryTrianglesNV;
+  struct GraphicsPipelineCreateInfo;
+  struct HdrMetadataEXT;
+  struct HeadlessSurfaceCreateInfoEXT;
+#ifdef VK_USE_PLATFORM_IOS_MVK
+  struct IOSSurfaceCreateInfoMVK;
+#endif /*VK_USE_PLATFORM_IOS_MVK*/
+  struct ImageBlit;
+  struct ImageCopy;
+  struct ImageCreateInfo;
+  struct ImageDrmFormatModifierExplicitCreateInfoEXT;
+  struct ImageDrmFormatModifierListCreateInfoEXT;
+  struct ImageDrmFormatModifierPropertiesEXT;
+  struct ImageFormatListCreateInfoKHR;
+  struct ImageFormatProperties;
+  struct ImageFormatProperties2;
+  using ImageFormatProperties2KHR = ImageFormatProperties2;
+  struct ImageMemoryBarrier;
+  struct ImageMemoryRequirementsInfo2;
+  using ImageMemoryRequirementsInfo2KHR = ImageMemoryRequirementsInfo2;
+#ifdef VK_USE_PLATFORM_FUCHSIA
+  struct ImagePipeSurfaceCreateInfoFUCHSIA;
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+  struct ImagePlaneMemoryRequirementsInfo;
+  using ImagePlaneMemoryRequirementsInfoKHR = ImagePlaneMemoryRequirementsInfo;
+  struct ImageResolve;
+  struct ImageSparseMemoryRequirementsInfo2;
+  using ImageSparseMemoryRequirementsInfo2KHR = ImageSparseMemoryRequirementsInfo2;
+  struct ImageStencilUsageCreateInfoEXT;
+  struct ImageSubresource;
+  struct ImageSubresourceLayers;
+  struct ImageSubresourceRange;
+  struct ImageSwapchainCreateInfoKHR;
+  struct ImageViewASTCDecodeModeEXT;
+  struct ImageViewCreateInfo;
+  struct ImageViewHandleInfoNVX;
+  struct ImageViewUsageCreateInfo;
+  using ImageViewUsageCreateInfoKHR = ImageViewUsageCreateInfo;
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+  struct ImportAndroidHardwareBufferInfoANDROID;
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+  struct ImportFenceFdInfoKHR;
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+  struct ImportFenceWin32HandleInfoKHR;
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+  struct ImportMemoryFdInfoKHR;
+  struct ImportMemoryHostPointerInfoEXT;
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+  struct ImportMemoryWin32HandleInfoKHR;
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+  struct ImportMemoryWin32HandleInfoNV;
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+  struct ImportSemaphoreFdInfoKHR;
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+  struct ImportSemaphoreWin32HandleInfoKHR;
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+  struct IndirectCommandsLayoutCreateInfoNVX;
+  struct IndirectCommandsLayoutTokenNVX;
+  struct IndirectCommandsTokenNVX;
+  struct InitializePerformanceApiInfoINTEL;
+  struct InputAttachmentAspectReference;
+  using InputAttachmentAspectReferenceKHR = InputAttachmentAspectReference;
+  struct InstanceCreateInfo;
+  struct LayerProperties;
+#ifdef VK_USE_PLATFORM_MACOS_MVK
+  struct MacOSSurfaceCreateInfoMVK;
+#endif /*VK_USE_PLATFORM_MACOS_MVK*/
+  struct MappedMemoryRange;
+  struct MemoryAllocateFlagsInfo;
+  using MemoryAllocateFlagsInfoKHR = MemoryAllocateFlagsInfo;
+  struct MemoryAllocateInfo;
+  struct MemoryBarrier;
+  struct MemoryDedicatedAllocateInfo;
+  using MemoryDedicatedAllocateInfoKHR = MemoryDedicatedAllocateInfo;
+  struct MemoryDedicatedRequirements;
+  using MemoryDedicatedRequirementsKHR = MemoryDedicatedRequirements;
+  struct MemoryFdPropertiesKHR;
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+  struct MemoryGetAndroidHardwareBufferInfoANDROID;
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+  struct MemoryGetFdInfoKHR;
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+  struct MemoryGetWin32HandleInfoKHR;
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+  struct MemoryHeap;
+  struct MemoryHostPointerPropertiesEXT;
+  struct MemoryOpaqueCaptureAddressAllocateInfoKHR;
+  struct MemoryPriorityAllocateInfoEXT;
+  struct MemoryRequirements;
+  struct MemoryRequirements2;
+  using MemoryRequirements2KHR = MemoryRequirements2;
+  struct MemoryType;
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+  struct MemoryWin32HandlePropertiesKHR;
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+#ifdef VK_USE_PLATFORM_METAL_EXT
+  struct MetalSurfaceCreateInfoEXT;
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+  struct MultisamplePropertiesEXT;
+  struct ObjectTableCreateInfoNVX;
+  struct ObjectTableDescriptorSetEntryNVX;
+  struct ObjectTableEntryNVX;
+  struct ObjectTableIndexBufferEntryNVX;
+  struct ObjectTablePipelineEntryNVX;
+  struct ObjectTablePushConstantEntryNVX;
+  struct ObjectTableVertexBufferEntryNVX;
+  struct Offset2D;
+  struct Offset3D;
+  struct PastPresentationTimingGOOGLE;
+  struct PerformanceConfigurationAcquireInfoINTEL;
+  struct PerformanceCounterDescriptionKHR;
+  struct PerformanceCounterKHR;
+  union PerformanceCounterResultKHR;
+  struct PerformanceMarkerInfoINTEL;
+  struct PerformanceOverrideInfoINTEL;
+  struct PerformanceQuerySubmitInfoKHR;
+  struct PerformanceStreamMarkerInfoINTEL;
+  union PerformanceValueDataINTEL;
+  struct PerformanceValueINTEL;
+  struct PhysicalDevice16BitStorageFeatures;
+  using PhysicalDevice16BitStorageFeaturesKHR = PhysicalDevice16BitStorageFeatures;
+  struct PhysicalDevice8BitStorageFeaturesKHR;
+  struct PhysicalDeviceASTCDecodeFeaturesEXT;
+  struct PhysicalDeviceBlendOperationAdvancedFeaturesEXT;
+  struct PhysicalDeviceBlendOperationAdvancedPropertiesEXT;
+  struct PhysicalDeviceBufferDeviceAddressFeaturesEXT;
+  using PhysicalDeviceBufferAddressFeaturesEXT = PhysicalDeviceBufferDeviceAddressFeaturesEXT;
+  struct PhysicalDeviceBufferDeviceAddressFeaturesKHR;
+  struct PhysicalDeviceCoherentMemoryFeaturesAMD;
+  struct PhysicalDeviceComputeShaderDerivativesFeaturesNV;
+  struct PhysicalDeviceConditionalRenderingFeaturesEXT;
+  struct PhysicalDeviceConservativeRasterizationPropertiesEXT;
+  struct PhysicalDeviceCooperativeMatrixFeaturesNV;
+  struct PhysicalDeviceCooperativeMatrixPropertiesNV;
+  struct PhysicalDeviceCornerSampledImageFeaturesNV;
+  struct PhysicalDeviceCoverageReductionModeFeaturesNV;
+  struct PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV;
+  struct PhysicalDeviceDepthClipEnableFeaturesEXT;
+  struct PhysicalDeviceDepthStencilResolvePropertiesKHR;
+  struct PhysicalDeviceDescriptorIndexingFeaturesEXT;
+  struct PhysicalDeviceDescriptorIndexingPropertiesEXT;
+  struct PhysicalDeviceDiscardRectanglePropertiesEXT;
+  struct PhysicalDeviceDriverPropertiesKHR;
+  struct PhysicalDeviceExclusiveScissorFeaturesNV;
+  struct PhysicalDeviceExternalBufferInfo;
+  using PhysicalDeviceExternalBufferInfoKHR = PhysicalDeviceExternalBufferInfo;
+  struct PhysicalDeviceExternalFenceInfo;
+  using PhysicalDeviceExternalFenceInfoKHR = PhysicalDeviceExternalFenceInfo;
+  struct PhysicalDeviceExternalImageFormatInfo;
+  using PhysicalDeviceExternalImageFormatInfoKHR = PhysicalDeviceExternalImageFormatInfo;
+  struct PhysicalDeviceExternalMemoryHostPropertiesEXT;
+  struct PhysicalDeviceExternalSemaphoreInfo;
+  using PhysicalDeviceExternalSemaphoreInfoKHR = PhysicalDeviceExternalSemaphoreInfo;
+  struct PhysicalDeviceFeatures;
+  struct PhysicalDeviceFeatures2;
+  using PhysicalDeviceFeatures2KHR = PhysicalDeviceFeatures2;
+  struct PhysicalDeviceFloatControlsPropertiesKHR;
+  struct PhysicalDeviceFragmentDensityMapFeaturesEXT;
+  struct PhysicalDeviceFragmentDensityMapPropertiesEXT;
+  struct PhysicalDeviceFragmentShaderBarycentricFeaturesNV;
+  struct PhysicalDeviceFragmentShaderInterlockFeaturesEXT;
+  struct PhysicalDeviceGroupProperties;
+  using PhysicalDeviceGroupPropertiesKHR = PhysicalDeviceGroupProperties;
+  struct PhysicalDeviceHostQueryResetFeaturesEXT;
+  struct PhysicalDeviceIDProperties;
+  using PhysicalDeviceIDPropertiesKHR = PhysicalDeviceIDProperties;
+  struct PhysicalDeviceImageDrmFormatModifierInfoEXT;
+  struct PhysicalDeviceImageFormatInfo2;
+  using PhysicalDeviceImageFormatInfo2KHR = PhysicalDeviceImageFormatInfo2;
+  struct PhysicalDeviceImageViewImageFormatInfoEXT;
+  struct PhysicalDeviceImagelessFramebufferFeaturesKHR;
+  struct PhysicalDeviceIndexTypeUint8FeaturesEXT;
+  struct PhysicalDeviceInlineUniformBlockFeaturesEXT;
+  struct PhysicalDeviceInlineUniformBlockPropertiesEXT;
+  struct PhysicalDeviceLimits;
+  struct PhysicalDeviceLineRasterizationFeaturesEXT;
+  struct PhysicalDeviceLineRasterizationPropertiesEXT;
+  struct PhysicalDeviceMaintenance3Properties;
+  using PhysicalDeviceMaintenance3PropertiesKHR = PhysicalDeviceMaintenance3Properties;
+  struct PhysicalDeviceMemoryBudgetPropertiesEXT;
+  struct PhysicalDeviceMemoryPriorityFeaturesEXT;
+  struct PhysicalDeviceMemoryProperties;
+  struct PhysicalDeviceMemoryProperties2;
+  using PhysicalDeviceMemoryProperties2KHR = PhysicalDeviceMemoryProperties2;
+  struct PhysicalDeviceMeshShaderFeaturesNV;
+  struct PhysicalDeviceMeshShaderPropertiesNV;
+  struct PhysicalDeviceMultiviewFeatures;
+  using PhysicalDeviceMultiviewFeaturesKHR = PhysicalDeviceMultiviewFeatures;
+  struct PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX;
+  struct PhysicalDeviceMultiviewProperties;
+  using PhysicalDeviceMultiviewPropertiesKHR = PhysicalDeviceMultiviewProperties;
+  struct PhysicalDevicePCIBusInfoPropertiesEXT;
+  struct PhysicalDevicePerformanceQueryFeaturesKHR;
+  struct PhysicalDevicePerformanceQueryPropertiesKHR;
+  struct PhysicalDevicePipelineExecutablePropertiesFeaturesKHR;
+  struct PhysicalDevicePointClippingProperties;
+  using PhysicalDevicePointClippingPropertiesKHR = PhysicalDevicePointClippingProperties;
+  struct PhysicalDeviceProperties;
+  struct PhysicalDeviceProperties2;
+  using PhysicalDeviceProperties2KHR = PhysicalDeviceProperties2;
+  struct PhysicalDeviceProtectedMemoryFeatures;
+  struct PhysicalDeviceProtectedMemoryProperties;
+  struct PhysicalDevicePushDescriptorPropertiesKHR;
+  struct PhysicalDeviceRayTracingPropertiesNV;
+  struct PhysicalDeviceRepresentativeFragmentTestFeaturesNV;
+  struct PhysicalDeviceSampleLocationsPropertiesEXT;
+  struct PhysicalDeviceSamplerFilterMinmaxPropertiesEXT;
+  struct PhysicalDeviceSamplerYcbcrConversionFeatures;
+  using PhysicalDeviceSamplerYcbcrConversionFeaturesKHR = PhysicalDeviceSamplerYcbcrConversionFeatures;
+  struct PhysicalDeviceScalarBlockLayoutFeaturesEXT;
+  struct PhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR;
+  struct PhysicalDeviceShaderAtomicInt64FeaturesKHR;
+  struct PhysicalDeviceShaderClockFeaturesKHR;
+  struct PhysicalDeviceShaderCoreProperties2AMD;
+  struct PhysicalDeviceShaderCorePropertiesAMD;
+  struct PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT;
+  struct PhysicalDeviceShaderDrawParametersFeatures;
+  using PhysicalDeviceShaderDrawParameterFeatures = PhysicalDeviceShaderDrawParametersFeatures;
+  struct PhysicalDeviceShaderFloat16Int8FeaturesKHR;
+  using PhysicalDeviceFloat16Int8FeaturesKHR = PhysicalDeviceShaderFloat16Int8FeaturesKHR;
+  struct PhysicalDeviceShaderImageFootprintFeaturesNV;
+  struct PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL;
+  struct PhysicalDeviceShaderSMBuiltinsFeaturesNV;
+  struct PhysicalDeviceShaderSMBuiltinsPropertiesNV;
+  struct PhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR;
+  struct PhysicalDeviceShadingRateImageFeaturesNV;
+  struct PhysicalDeviceShadingRateImagePropertiesNV;
+  struct PhysicalDeviceSparseImageFormatInfo2;
+  using PhysicalDeviceSparseImageFormatInfo2KHR = PhysicalDeviceSparseImageFormatInfo2;
+  struct PhysicalDeviceSparseProperties;
+  struct PhysicalDeviceSubgroupProperties;
+  struct PhysicalDeviceSubgroupSizeControlFeaturesEXT;
+  struct PhysicalDeviceSubgroupSizeControlPropertiesEXT;
+  struct PhysicalDeviceSurfaceInfo2KHR;
+  struct PhysicalDeviceTexelBufferAlignmentFeaturesEXT;
+  struct PhysicalDeviceTexelBufferAlignmentPropertiesEXT;
+  struct PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT;
+  struct PhysicalDeviceTimelineSemaphoreFeaturesKHR;
+  struct PhysicalDeviceTimelineSemaphorePropertiesKHR;
+  struct PhysicalDeviceToolPropertiesEXT;
+  struct PhysicalDeviceTransformFeedbackFeaturesEXT;
+  struct PhysicalDeviceTransformFeedbackPropertiesEXT;
+  struct PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR;
+  struct PhysicalDeviceVariablePointersFeatures;
+  using PhysicalDeviceVariablePointersFeaturesKHR = PhysicalDeviceVariablePointersFeatures;
+  using PhysicalDeviceVariablePointerFeaturesKHR = PhysicalDeviceVariablePointersFeatures;
+  using PhysicalDeviceVariablePointerFeatures = PhysicalDeviceVariablePointersFeatures;
+  struct PhysicalDeviceVertexAttributeDivisorFeaturesEXT;
+  struct PhysicalDeviceVertexAttributeDivisorPropertiesEXT;
+  struct PhysicalDeviceVulkanMemoryModelFeaturesKHR;
+  struct PhysicalDeviceYcbcrImageArraysFeaturesEXT;
+  struct PipelineCacheCreateInfo;
+  struct PipelineColorBlendAdvancedStateCreateInfoEXT;
+  struct PipelineColorBlendAttachmentState;
+  struct PipelineColorBlendStateCreateInfo;
+  struct PipelineCompilerControlCreateInfoAMD;
+  struct PipelineCoverageModulationStateCreateInfoNV;
+  struct PipelineCoverageReductionStateCreateInfoNV;
+  struct PipelineCoverageToColorStateCreateInfoNV;
+  struct PipelineCreationFeedbackCreateInfoEXT;
+  struct PipelineCreationFeedbackEXT;
+  struct PipelineDepthStencilStateCreateInfo;
+  struct PipelineDiscardRectangleStateCreateInfoEXT;
+  struct PipelineDynamicStateCreateInfo;
+  struct PipelineExecutableInfoKHR;
+  struct PipelineExecutableInternalRepresentationKHR;
+  struct PipelineExecutablePropertiesKHR;
+  struct PipelineExecutableStatisticKHR;
+  union PipelineExecutableStatisticValueKHR;
+  struct PipelineInfoKHR;
+  struct PipelineInputAssemblyStateCreateInfo;
+  struct PipelineLayoutCreateInfo;
+  struct PipelineMultisampleStateCreateInfo;
+  struct PipelineRasterizationConservativeStateCreateInfoEXT;
+  struct PipelineRasterizationDepthClipStateCreateInfoEXT;
+  struct PipelineRasterizationLineStateCreateInfoEXT;
+  struct PipelineRasterizationStateCreateInfo;
+  struct PipelineRasterizationStateRasterizationOrderAMD;
+  struct PipelineRasterizationStateStreamCreateInfoEXT;
+  struct PipelineRepresentativeFragmentTestStateCreateInfoNV;
+  struct PipelineSampleLocationsStateCreateInfoEXT;
+  struct PipelineShaderStageCreateInfo;
+  struct PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT;
+  struct PipelineTessellationDomainOriginStateCreateInfo;
+  using PipelineTessellationDomainOriginStateCreateInfoKHR = PipelineTessellationDomainOriginStateCreateInfo;
+  struct PipelineTessellationStateCreateInfo;
+  struct PipelineVertexInputDivisorStateCreateInfoEXT;
+  struct PipelineVertexInputStateCreateInfo;
+  struct PipelineViewportCoarseSampleOrderStateCreateInfoNV;
+  struct PipelineViewportExclusiveScissorStateCreateInfoNV;
+  struct PipelineViewportShadingRateImageStateCreateInfoNV;
+  struct PipelineViewportStateCreateInfo;
+  struct PipelineViewportSwizzleStateCreateInfoNV;
+  struct PipelineViewportWScalingStateCreateInfoNV;
+#ifdef VK_USE_PLATFORM_GGP
+  struct PresentFrameTokenGGP;
+#endif /*VK_USE_PLATFORM_GGP*/
+  struct PresentInfoKHR;
+  struct PresentRegionKHR;
+  struct PresentRegionsKHR;
+  struct PresentTimeGOOGLE;
+  struct PresentTimesInfoGOOGLE;
+  struct ProtectedSubmitInfo;
+  struct PushConstantRange;
+  struct QueryPoolCreateInfo;
+  struct QueryPoolCreateInfoINTEL;
+  struct QueryPoolPerformanceCreateInfoKHR;
+  struct QueueFamilyCheckpointPropertiesNV;
+  struct QueueFamilyProperties;
+  struct QueueFamilyProperties2;
+  using QueueFamilyProperties2KHR = QueueFamilyProperties2;
+  struct RayTracingPipelineCreateInfoNV;
+  struct RayTracingShaderGroupCreateInfoNV;
+  struct Rect2D;
+  struct RectLayerKHR;
+  struct RefreshCycleDurationGOOGLE;
+  struct RenderPassAttachmentBeginInfoKHR;
+  struct RenderPassBeginInfo;
+  struct RenderPassCreateInfo;
+  struct RenderPassCreateInfo2KHR;
+  struct RenderPassFragmentDensityMapCreateInfoEXT;
+  struct RenderPassInputAttachmentAspectCreateInfo;
+  using RenderPassInputAttachmentAspectCreateInfoKHR = RenderPassInputAttachmentAspectCreateInfo;
+  struct RenderPassMultiviewCreateInfo;
+  using RenderPassMultiviewCreateInfoKHR = RenderPassMultiviewCreateInfo;
+  struct RenderPassSampleLocationsBeginInfoEXT;
+  struct SampleLocationEXT;
+  struct SampleLocationsInfoEXT;
+  struct SamplerCreateInfo;
+  struct SamplerReductionModeCreateInfoEXT;
+  struct SamplerYcbcrConversionCreateInfo;
+  using SamplerYcbcrConversionCreateInfoKHR = SamplerYcbcrConversionCreateInfo;
+  struct SamplerYcbcrConversionImageFormatProperties;
+  using SamplerYcbcrConversionImageFormatPropertiesKHR = SamplerYcbcrConversionImageFormatProperties;
+  struct SamplerYcbcrConversionInfo;
+  using SamplerYcbcrConversionInfoKHR = SamplerYcbcrConversionInfo;
+  struct SemaphoreCreateInfo;
+  struct SemaphoreGetFdInfoKHR;
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+  struct SemaphoreGetWin32HandleInfoKHR;
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+  struct SemaphoreSignalInfoKHR;
+  struct SemaphoreTypeCreateInfoKHR;
+  struct SemaphoreWaitInfoKHR;
+  struct ShaderModuleCreateInfo;
+  struct ShaderModuleValidationCacheCreateInfoEXT;
+  struct ShaderResourceUsageAMD;
+  struct ShaderStatisticsInfoAMD;
+  struct ShadingRatePaletteNV;
+  struct SharedPresentSurfaceCapabilitiesKHR;
+  struct SparseBufferMemoryBindInfo;
+  struct SparseImageFormatProperties;
+  struct SparseImageFormatProperties2;
+  using SparseImageFormatProperties2KHR = SparseImageFormatProperties2;
+  struct SparseImageMemoryBind;
+  struct SparseImageMemoryBindInfo;
+  struct SparseImageMemoryRequirements;
+  struct SparseImageMemoryRequirements2;
+  using SparseImageMemoryRequirements2KHR = SparseImageMemoryRequirements2;
+  struct SparseImageOpaqueMemoryBindInfo;
+  struct SparseMemoryBind;
+  struct SpecializationInfo;
+  struct SpecializationMapEntry;
+  struct StencilOpState;
+#ifdef VK_USE_PLATFORM_GGP
+  struct StreamDescriptorSurfaceCreateInfoGGP;
+#endif /*VK_USE_PLATFORM_GGP*/
+  struct SubmitInfo;
+  struct SubpassBeginInfoKHR;
+  struct SubpassDependency;
+  struct SubpassDependency2KHR;
+  struct SubpassDescription;
+  struct SubpassDescription2KHR;
+  struct SubpassDescriptionDepthStencilResolveKHR;
+  struct SubpassEndInfoKHR;
+  struct SubpassSampleLocationsEXT;
+  struct SubresourceLayout;
+  struct SurfaceCapabilities2EXT;
+  struct SurfaceCapabilities2KHR;
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+  struct SurfaceCapabilitiesFullScreenExclusiveEXT;
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+  struct SurfaceCapabilitiesKHR;
+  struct SurfaceFormat2KHR;
+  struct SurfaceFormatKHR;
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+  struct SurfaceFullScreenExclusiveInfoEXT;
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+  struct SurfaceFullScreenExclusiveWin32InfoEXT;
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+  struct SurfaceProtectedCapabilitiesKHR;
+  struct SwapchainCounterCreateInfoEXT;
+  struct SwapchainCreateInfoKHR;
+  struct SwapchainDisplayNativeHdrCreateInfoAMD;
+  struct TextureLODGatherFormatPropertiesAMD;
+  struct TimelineSemaphoreSubmitInfoKHR;
+  struct ValidationCacheCreateInfoEXT;
+  struct ValidationFeaturesEXT;
+  struct ValidationFlagsEXT;
+  struct VertexInputAttributeDescription;
+  struct VertexInputBindingDescription;
+  struct VertexInputBindingDivisorDescriptionEXT;
+#ifdef VK_USE_PLATFORM_VI_NN
+  struct ViSurfaceCreateInfoNN;
+#endif /*VK_USE_PLATFORM_VI_NN*/
+  struct Viewport;
+  struct ViewportSwizzleNV;
+  struct ViewportWScalingNV;
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+  struct WaylandSurfaceCreateInfoKHR;
+#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+  struct Win32KeyedMutexAcquireReleaseInfoKHR;
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+  struct Win32KeyedMutexAcquireReleaseInfoNV;
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+  struct Win32SurfaceCreateInfoKHR;
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+  struct WriteDescriptorSet;
+  struct WriteDescriptorSetAccelerationStructureNV;
+  struct WriteDescriptorSetInlineUniformBlockEXT;
+  struct XYColorEXT;
+#ifdef VK_USE_PLATFORM_XCB_KHR
+  struct XcbSurfaceCreateInfoKHR;
+#endif /*VK_USE_PLATFORM_XCB_KHR*/
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+  struct XlibSurfaceCreateInfoKHR;
+#endif /*VK_USE_PLATFORM_XLIB_KHR*/
+
+  class SurfaceKHR
+  {
+  public:
+    using CType = VkSurfaceKHR;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eSurfaceKHR;
+
+  public:
+    VULKAN_HPP_CONSTEXPR SurfaceKHR() VULKAN_HPP_NOEXCEPT
+      : m_surfaceKHR(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_CONSTEXPR SurfaceKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+      : m_surfaceKHR(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT SurfaceKHR( VkSurfaceKHR surfaceKHR ) VULKAN_HPP_NOEXCEPT
+      : m_surfaceKHR( surfaceKHR )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    SurfaceKHR & operator=(VkSurfaceKHR surfaceKHR) VULKAN_HPP_NOEXCEPT
+    {
+      m_surfaceKHR = surfaceKHR;
+      return *this;
+    }
+#endif
+
+    SurfaceKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_surfaceKHR = VK_NULL_HANDLE;
+      return *this;
+    }
+
+    bool operator==( SurfaceKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_surfaceKHR == rhs.m_surfaceKHR;
+    }
+
+    bool operator!=(SurfaceKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_surfaceKHR != rhs.m_surfaceKHR;
+    }
+
+    bool operator<(SurfaceKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_surfaceKHR < rhs.m_surfaceKHR;
+    }
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSurfaceKHR() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_surfaceKHR;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_surfaceKHR != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_surfaceKHR == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkSurfaceKHR m_surfaceKHR;
+  };
+  static_assert( sizeof( SurfaceKHR ) == sizeof( VkSurfaceKHR ), "handle and wrapper have different size!" );
+
+  template <>
+  struct cpp_type<ObjectType::eSurfaceKHR>
+  {
+    using type = SurfaceKHR;
+  };
+
+  class DebugReportCallbackEXT
+  {
+  public:
+    using CType = VkDebugReportCallbackEXT;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eDebugReportCallbackEXT;
+
+  public:
+    VULKAN_HPP_CONSTEXPR DebugReportCallbackEXT() VULKAN_HPP_NOEXCEPT
+      : m_debugReportCallbackEXT(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_CONSTEXPR DebugReportCallbackEXT( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+      : m_debugReportCallbackEXT(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT DebugReportCallbackEXT( VkDebugReportCallbackEXT debugReportCallbackEXT ) VULKAN_HPP_NOEXCEPT
+      : m_debugReportCallbackEXT( debugReportCallbackEXT )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    DebugReportCallbackEXT & operator=(VkDebugReportCallbackEXT debugReportCallbackEXT) VULKAN_HPP_NOEXCEPT
+    {
+      m_debugReportCallbackEXT = debugReportCallbackEXT;
+      return *this;
+    }
+#endif
+
+    DebugReportCallbackEXT & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_debugReportCallbackEXT = VK_NULL_HANDLE;
+      return *this;
+    }
+
+    bool operator==( DebugReportCallbackEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_debugReportCallbackEXT == rhs.m_debugReportCallbackEXT;
+    }
+
+    bool operator!=(DebugReportCallbackEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_debugReportCallbackEXT != rhs.m_debugReportCallbackEXT;
+    }
+
+    bool operator<(DebugReportCallbackEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_debugReportCallbackEXT < rhs.m_debugReportCallbackEXT;
+    }
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDebugReportCallbackEXT() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_debugReportCallbackEXT;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_debugReportCallbackEXT != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_debugReportCallbackEXT == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkDebugReportCallbackEXT m_debugReportCallbackEXT;
+  };
+  static_assert( sizeof( DebugReportCallbackEXT ) == sizeof( VkDebugReportCallbackEXT ), "handle and wrapper have different size!" );
+
+  template <>
+  struct cpp_type<ObjectType::eDebugReportCallbackEXT>
+  {
+    using type = DebugReportCallbackEXT;
+  };
+
+  class DebugUtilsMessengerEXT
+  {
+  public:
+    using CType = VkDebugUtilsMessengerEXT;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eDebugUtilsMessengerEXT;
+
+  public:
+    VULKAN_HPP_CONSTEXPR DebugUtilsMessengerEXT() VULKAN_HPP_NOEXCEPT
+      : m_debugUtilsMessengerEXT(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_CONSTEXPR DebugUtilsMessengerEXT( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+      : m_debugUtilsMessengerEXT(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT DebugUtilsMessengerEXT( VkDebugUtilsMessengerEXT debugUtilsMessengerEXT ) VULKAN_HPP_NOEXCEPT
+      : m_debugUtilsMessengerEXT( debugUtilsMessengerEXT )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    DebugUtilsMessengerEXT & operator=(VkDebugUtilsMessengerEXT debugUtilsMessengerEXT) VULKAN_HPP_NOEXCEPT
+    {
+      m_debugUtilsMessengerEXT = debugUtilsMessengerEXT;
+      return *this;
+    }
+#endif
+
+    DebugUtilsMessengerEXT & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_debugUtilsMessengerEXT = VK_NULL_HANDLE;
+      return *this;
+    }
+
+    bool operator==( DebugUtilsMessengerEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_debugUtilsMessengerEXT == rhs.m_debugUtilsMessengerEXT;
+    }
+
+    bool operator!=(DebugUtilsMessengerEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_debugUtilsMessengerEXT != rhs.m_debugUtilsMessengerEXT;
+    }
+
+    bool operator<(DebugUtilsMessengerEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_debugUtilsMessengerEXT < rhs.m_debugUtilsMessengerEXT;
+    }
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDebugUtilsMessengerEXT() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_debugUtilsMessengerEXT;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_debugUtilsMessengerEXT != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_debugUtilsMessengerEXT == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkDebugUtilsMessengerEXT m_debugUtilsMessengerEXT;
+  };
+  static_assert( sizeof( DebugUtilsMessengerEXT ) == sizeof( VkDebugUtilsMessengerEXT ), "handle and wrapper have different size!" );
+
+  template <>
+  struct cpp_type<ObjectType::eDebugUtilsMessengerEXT>
+  {
+    using type = DebugUtilsMessengerEXT;
+  };
+
+  class DisplayKHR
+  {
+  public:
+    using CType = VkDisplayKHR;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eDisplayKHR;
+
+  public:
+    VULKAN_HPP_CONSTEXPR DisplayKHR() VULKAN_HPP_NOEXCEPT
+      : m_displayKHR(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_CONSTEXPR DisplayKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+      : m_displayKHR(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT DisplayKHR( VkDisplayKHR displayKHR ) VULKAN_HPP_NOEXCEPT
+      : m_displayKHR( displayKHR )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    DisplayKHR & operator=(VkDisplayKHR displayKHR) VULKAN_HPP_NOEXCEPT
+    {
+      m_displayKHR = displayKHR;
+      return *this;
+    }
+#endif
+
+    DisplayKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_displayKHR = VK_NULL_HANDLE;
+      return *this;
+    }
+
+    bool operator==( DisplayKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_displayKHR == rhs.m_displayKHR;
+    }
+
+    bool operator!=(DisplayKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_displayKHR != rhs.m_displayKHR;
+    }
+
+    bool operator<(DisplayKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_displayKHR < rhs.m_displayKHR;
+    }
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDisplayKHR() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_displayKHR;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_displayKHR != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_displayKHR == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkDisplayKHR m_displayKHR;
+  };
+  static_assert( sizeof( DisplayKHR ) == sizeof( VkDisplayKHR ), "handle and wrapper have different size!" );
+
+  template <>
+  struct cpp_type<ObjectType::eDisplayKHR>
+  {
+    using type = DisplayKHR;
+  };
+
+  class SwapchainKHR
+  {
+  public:
+    using CType = VkSwapchainKHR;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eSwapchainKHR;
+
+  public:
+    VULKAN_HPP_CONSTEXPR SwapchainKHR() VULKAN_HPP_NOEXCEPT
+      : m_swapchainKHR(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_CONSTEXPR SwapchainKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+      : m_swapchainKHR(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT SwapchainKHR( VkSwapchainKHR swapchainKHR ) VULKAN_HPP_NOEXCEPT
+      : m_swapchainKHR( swapchainKHR )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    SwapchainKHR & operator=(VkSwapchainKHR swapchainKHR) VULKAN_HPP_NOEXCEPT
+    {
+      m_swapchainKHR = swapchainKHR;
+      return *this;
+    }
+#endif
+
+    SwapchainKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_swapchainKHR = VK_NULL_HANDLE;
+      return *this;
+    }
+
+    bool operator==( SwapchainKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_swapchainKHR == rhs.m_swapchainKHR;
+    }
+
+    bool operator!=(SwapchainKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_swapchainKHR != rhs.m_swapchainKHR;
+    }
+
+    bool operator<(SwapchainKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_swapchainKHR < rhs.m_swapchainKHR;
+    }
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSwapchainKHR() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_swapchainKHR;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_swapchainKHR != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_swapchainKHR == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkSwapchainKHR m_swapchainKHR;
+  };
+  static_assert( sizeof( SwapchainKHR ) == sizeof( VkSwapchainKHR ), "handle and wrapper have different size!" );
+
+  template <>
+  struct cpp_type<ObjectType::eSwapchainKHR>
+  {
+    using type = SwapchainKHR;
+  };
+
+  class Semaphore
+  {
+  public:
+    using CType = VkSemaphore;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eSemaphore;
+
+  public:
+    VULKAN_HPP_CONSTEXPR Semaphore() VULKAN_HPP_NOEXCEPT
+      : m_semaphore(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_CONSTEXPR Semaphore( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+      : m_semaphore(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT Semaphore( VkSemaphore semaphore ) VULKAN_HPP_NOEXCEPT
+      : m_semaphore( semaphore )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    Semaphore & operator=(VkSemaphore semaphore) VULKAN_HPP_NOEXCEPT
+    {
+      m_semaphore = semaphore;
+      return *this;
+    }
+#endif
+
+    Semaphore & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_semaphore = VK_NULL_HANDLE;
+      return *this;
+    }
+
+    bool operator==( Semaphore const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_semaphore == rhs.m_semaphore;
+    }
+
+    bool operator!=(Semaphore const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_semaphore != rhs.m_semaphore;
+    }
+
+    bool operator<(Semaphore const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_semaphore < rhs.m_semaphore;
+    }
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSemaphore() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_semaphore;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_semaphore != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_semaphore == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkSemaphore m_semaphore;
+  };
+  static_assert( sizeof( Semaphore ) == sizeof( VkSemaphore ), "handle and wrapper have different size!" );
+
+  template <>
+  struct cpp_type<ObjectType::eSemaphore>
+  {
+    using type = Semaphore;
+  };
+
+  class Fence
+  {
+  public:
+    using CType = VkFence;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eFence;
+
+  public:
+    VULKAN_HPP_CONSTEXPR Fence() VULKAN_HPP_NOEXCEPT
+      : m_fence(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_CONSTEXPR Fence( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+      : m_fence(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT Fence( VkFence fence ) VULKAN_HPP_NOEXCEPT
+      : m_fence( fence )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    Fence & operator=(VkFence fence) VULKAN_HPP_NOEXCEPT
+    {
+      m_fence = fence;
+      return *this;
+    }
+#endif
+
+    Fence & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_fence = VK_NULL_HANDLE;
+      return *this;
+    }
+
+    bool operator==( Fence const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_fence == rhs.m_fence;
+    }
+
+    bool operator!=(Fence const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_fence != rhs.m_fence;
+    }
+
+    bool operator<(Fence const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_fence < rhs.m_fence;
+    }
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkFence() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_fence;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_fence != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_fence == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkFence m_fence;
+  };
+  static_assert( sizeof( Fence ) == sizeof( VkFence ), "handle and wrapper have different size!" );
+
+  template <>
+  struct cpp_type<ObjectType::eFence>
+  {
+    using type = Fence;
+  };
+
+  class PerformanceConfigurationINTEL
+  {
+  public:
+    using CType = VkPerformanceConfigurationINTEL;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::ePerformanceConfigurationINTEL;
+
+  public:
+    VULKAN_HPP_CONSTEXPR PerformanceConfigurationINTEL() VULKAN_HPP_NOEXCEPT
+      : m_performanceConfigurationINTEL(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_CONSTEXPR PerformanceConfigurationINTEL( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+      : m_performanceConfigurationINTEL(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT PerformanceConfigurationINTEL( VkPerformanceConfigurationINTEL performanceConfigurationINTEL ) VULKAN_HPP_NOEXCEPT
+      : m_performanceConfigurationINTEL( performanceConfigurationINTEL )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    PerformanceConfigurationINTEL & operator=(VkPerformanceConfigurationINTEL performanceConfigurationINTEL) VULKAN_HPP_NOEXCEPT
+    {
+      m_performanceConfigurationINTEL = performanceConfigurationINTEL;
+      return *this;
+    }
+#endif
+
+    PerformanceConfigurationINTEL & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_performanceConfigurationINTEL = VK_NULL_HANDLE;
+      return *this;
+    }
+
+    bool operator==( PerformanceConfigurationINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_performanceConfigurationINTEL == rhs.m_performanceConfigurationINTEL;
+    }
+
+    bool operator!=(PerformanceConfigurationINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_performanceConfigurationINTEL != rhs.m_performanceConfigurationINTEL;
+    }
+
+    bool operator<(PerformanceConfigurationINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_performanceConfigurationINTEL < rhs.m_performanceConfigurationINTEL;
+    }
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPerformanceConfigurationINTEL() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_performanceConfigurationINTEL;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_performanceConfigurationINTEL != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_performanceConfigurationINTEL == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkPerformanceConfigurationINTEL m_performanceConfigurationINTEL;
+  };
+  static_assert( sizeof( PerformanceConfigurationINTEL ) == sizeof( VkPerformanceConfigurationINTEL ), "handle and wrapper have different size!" );
+
+  template <>
+  struct cpp_type<ObjectType::ePerformanceConfigurationINTEL>
+  {
+    using type = PerformanceConfigurationINTEL;
+  };
+
+  class QueryPool
+  {
+  public:
+    using CType = VkQueryPool;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eQueryPool;
+
+  public:
+    VULKAN_HPP_CONSTEXPR QueryPool() VULKAN_HPP_NOEXCEPT
+      : m_queryPool(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_CONSTEXPR QueryPool( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+      : m_queryPool(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT QueryPool( VkQueryPool queryPool ) VULKAN_HPP_NOEXCEPT
+      : m_queryPool( queryPool )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    QueryPool & operator=(VkQueryPool queryPool) VULKAN_HPP_NOEXCEPT
+    {
+      m_queryPool = queryPool;
+      return *this;
+    }
+#endif
+
+    QueryPool & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_queryPool = VK_NULL_HANDLE;
+      return *this;
+    }
+
+    bool operator==( QueryPool const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_queryPool == rhs.m_queryPool;
+    }
+
+    bool operator!=(QueryPool const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_queryPool != rhs.m_queryPool;
+    }
+
+    bool operator<(QueryPool const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_queryPool < rhs.m_queryPool;
+    }
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkQueryPool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_queryPool;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_queryPool != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_queryPool == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkQueryPool m_queryPool;
+  };
+  static_assert( sizeof( QueryPool ) == sizeof( VkQueryPool ), "handle and wrapper have different size!" );
+
+  template <>
+  struct cpp_type<ObjectType::eQueryPool>
+  {
+    using type = QueryPool;
+  };
+
+  class Buffer
+  {
+  public:
+    using CType = VkBuffer;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eBuffer;
+
+  public:
+    VULKAN_HPP_CONSTEXPR Buffer() VULKAN_HPP_NOEXCEPT
+      : m_buffer(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_CONSTEXPR Buffer( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+      : m_buffer(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT Buffer( VkBuffer buffer ) VULKAN_HPP_NOEXCEPT
+      : m_buffer( buffer )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    Buffer & operator=(VkBuffer buffer) VULKAN_HPP_NOEXCEPT
+    {
+      m_buffer = buffer;
+      return *this;
+    }
+#endif
+
+    Buffer & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_buffer = VK_NULL_HANDLE;
+      return *this;
+    }
+
+    bool operator==( Buffer const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_buffer == rhs.m_buffer;
+    }
+
+    bool operator!=(Buffer const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_buffer != rhs.m_buffer;
+    }
+
+    bool operator<(Buffer const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_buffer < rhs.m_buffer;
+    }
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkBuffer() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_buffer;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_buffer != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_buffer == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkBuffer m_buffer;
+  };
+  static_assert( sizeof( Buffer ) == sizeof( VkBuffer ), "handle and wrapper have different size!" );
+
+  template <>
+  struct cpp_type<ObjectType::eBuffer>
+  {
+    using type = Buffer;
+  };
+
+  class PipelineLayout
+  {
+  public:
+    using CType = VkPipelineLayout;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::ePipelineLayout;
+
+  public:
+    VULKAN_HPP_CONSTEXPR PipelineLayout() VULKAN_HPP_NOEXCEPT
+      : m_pipelineLayout(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_CONSTEXPR PipelineLayout( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+      : m_pipelineLayout(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT PipelineLayout( VkPipelineLayout pipelineLayout ) VULKAN_HPP_NOEXCEPT
+      : m_pipelineLayout( pipelineLayout )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    PipelineLayout & operator=(VkPipelineLayout pipelineLayout) VULKAN_HPP_NOEXCEPT
+    {
+      m_pipelineLayout = pipelineLayout;
+      return *this;
+    }
+#endif
+
+    PipelineLayout & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_pipelineLayout = VK_NULL_HANDLE;
+      return *this;
+    }
+
+    bool operator==( PipelineLayout const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_pipelineLayout == rhs.m_pipelineLayout;
+    }
+
+    bool operator!=(PipelineLayout const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_pipelineLayout != rhs.m_pipelineLayout;
+    }
+
+    bool operator<(PipelineLayout const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_pipelineLayout < rhs.m_pipelineLayout;
+    }
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPipelineLayout() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_pipelineLayout;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_pipelineLayout != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_pipelineLayout == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkPipelineLayout m_pipelineLayout;
+  };
+  static_assert( sizeof( PipelineLayout ) == sizeof( VkPipelineLayout ), "handle and wrapper have different size!" );
+
+  template <>
+  struct cpp_type<ObjectType::ePipelineLayout>
+  {
+    using type = PipelineLayout;
+  };
+
+  class DescriptorSet
+  {
+  public:
+    using CType = VkDescriptorSet;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eDescriptorSet;
+
+  public:
+    VULKAN_HPP_CONSTEXPR DescriptorSet() VULKAN_HPP_NOEXCEPT
+      : m_descriptorSet(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_CONSTEXPR DescriptorSet( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+      : m_descriptorSet(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT DescriptorSet( VkDescriptorSet descriptorSet ) VULKAN_HPP_NOEXCEPT
+      : m_descriptorSet( descriptorSet )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    DescriptorSet & operator=(VkDescriptorSet descriptorSet) VULKAN_HPP_NOEXCEPT
+    {
+      m_descriptorSet = descriptorSet;
+      return *this;
+    }
+#endif
+
+    DescriptorSet & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_descriptorSet = VK_NULL_HANDLE;
+      return *this;
+    }
+
+    bool operator==( DescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_descriptorSet == rhs.m_descriptorSet;
+    }
+
+    bool operator!=(DescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_descriptorSet != rhs.m_descriptorSet;
+    }
+
+    bool operator<(DescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_descriptorSet < rhs.m_descriptorSet;
+    }
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDescriptorSet() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_descriptorSet;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_descriptorSet != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_descriptorSet == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkDescriptorSet m_descriptorSet;
+  };
+  static_assert( sizeof( DescriptorSet ) == sizeof( VkDescriptorSet ), "handle and wrapper have different size!" );
+
+  template <>
+  struct cpp_type<ObjectType::eDescriptorSet>
+  {
+    using type = DescriptorSet;
+  };
+
+  class Pipeline
+  {
+  public:
+    using CType = VkPipeline;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::ePipeline;
+
+  public:
+    VULKAN_HPP_CONSTEXPR Pipeline() VULKAN_HPP_NOEXCEPT
+      : m_pipeline(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_CONSTEXPR Pipeline( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+      : m_pipeline(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT Pipeline( VkPipeline pipeline ) VULKAN_HPP_NOEXCEPT
+      : m_pipeline( pipeline )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    Pipeline & operator=(VkPipeline pipeline) VULKAN_HPP_NOEXCEPT
+    {
+      m_pipeline = pipeline;
+      return *this;
+    }
+#endif
+
+    Pipeline & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_pipeline = VK_NULL_HANDLE;
+      return *this;
+    }
+
+    bool operator==( Pipeline const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_pipeline == rhs.m_pipeline;
+    }
+
+    bool operator!=(Pipeline const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_pipeline != rhs.m_pipeline;
+    }
+
+    bool operator<(Pipeline const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_pipeline < rhs.m_pipeline;
+    }
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPipeline() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_pipeline;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_pipeline != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_pipeline == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkPipeline m_pipeline;
+  };
+  static_assert( sizeof( Pipeline ) == sizeof( VkPipeline ), "handle and wrapper have different size!" );
+
+  template <>
+  struct cpp_type<ObjectType::ePipeline>
+  {
+    using type = Pipeline;
+  };
+
+  class ImageView
+  {
+  public:
+    using CType = VkImageView;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eImageView;
+
+  public:
+    VULKAN_HPP_CONSTEXPR ImageView() VULKAN_HPP_NOEXCEPT
+      : m_imageView(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_CONSTEXPR ImageView( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+      : m_imageView(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT ImageView( VkImageView imageView ) VULKAN_HPP_NOEXCEPT
+      : m_imageView( imageView )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    ImageView & operator=(VkImageView imageView) VULKAN_HPP_NOEXCEPT
+    {
+      m_imageView = imageView;
+      return *this;
+    }
+#endif
+
+    ImageView & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_imageView = VK_NULL_HANDLE;
+      return *this;
+    }
+
+    bool operator==( ImageView const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_imageView == rhs.m_imageView;
+    }
+
+    bool operator!=(ImageView const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_imageView != rhs.m_imageView;
+    }
+
+    bool operator<(ImageView const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_imageView < rhs.m_imageView;
+    }
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkImageView() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_imageView;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_imageView != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_imageView == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkImageView m_imageView;
+  };
+  static_assert( sizeof( ImageView ) == sizeof( VkImageView ), "handle and wrapper have different size!" );
+
+  template <>
+  struct cpp_type<ObjectType::eImageView>
+  {
+    using type = ImageView;
+  };
+
+  class Image
+  {
+  public:
+    using CType = VkImage;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eImage;
+
+  public:
+    VULKAN_HPP_CONSTEXPR Image() VULKAN_HPP_NOEXCEPT
+      : m_image(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_CONSTEXPR Image( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+      : m_image(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT Image( VkImage image ) VULKAN_HPP_NOEXCEPT
+      : m_image( image )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    Image & operator=(VkImage image) VULKAN_HPP_NOEXCEPT
+    {
+      m_image = image;
+      return *this;
+    }
+#endif
+
+    Image & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_image = VK_NULL_HANDLE;
+      return *this;
+    }
+
+    bool operator==( Image const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_image == rhs.m_image;
+    }
+
+    bool operator!=(Image const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_image != rhs.m_image;
+    }
+
+    bool operator<(Image const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_image < rhs.m_image;
+    }
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkImage() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_image;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_image != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_image == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkImage m_image;
+  };
+  static_assert( sizeof( Image ) == sizeof( VkImage ), "handle and wrapper have different size!" );
+
+  template <>
+  struct cpp_type<ObjectType::eImage>
+  {
+    using type = Image;
+  };
+
+  class AccelerationStructureNV
+  {
+  public:
+    using CType = VkAccelerationStructureNV;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eAccelerationStructureNV;
+
+  public:
+    VULKAN_HPP_CONSTEXPR AccelerationStructureNV() VULKAN_HPP_NOEXCEPT
+      : m_accelerationStructureNV(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_CONSTEXPR AccelerationStructureNV( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+      : m_accelerationStructureNV(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT AccelerationStructureNV( VkAccelerationStructureNV accelerationStructureNV ) VULKAN_HPP_NOEXCEPT
+      : m_accelerationStructureNV( accelerationStructureNV )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    AccelerationStructureNV & operator=(VkAccelerationStructureNV accelerationStructureNV) VULKAN_HPP_NOEXCEPT
+    {
+      m_accelerationStructureNV = accelerationStructureNV;
+      return *this;
+    }
+#endif
+
+    AccelerationStructureNV & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_accelerationStructureNV = VK_NULL_HANDLE;
+      return *this;
+    }
+
+    bool operator==( AccelerationStructureNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_accelerationStructureNV == rhs.m_accelerationStructureNV;
+    }
+
+    bool operator!=(AccelerationStructureNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_accelerationStructureNV != rhs.m_accelerationStructureNV;
+    }
+
+    bool operator<(AccelerationStructureNV const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_accelerationStructureNV < rhs.m_accelerationStructureNV;
+    }
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkAccelerationStructureNV() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_accelerationStructureNV;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_accelerationStructureNV != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_accelerationStructureNV == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkAccelerationStructureNV m_accelerationStructureNV;
+  };
+  static_assert( sizeof( AccelerationStructureNV ) == sizeof( VkAccelerationStructureNV ), "handle and wrapper have different size!" );
+
+  template <>
+  struct cpp_type<ObjectType::eAccelerationStructureNV>
+  {
+    using type = AccelerationStructureNV;
+  };
+
+  class DescriptorUpdateTemplate
+  {
+  public:
+    using CType = VkDescriptorUpdateTemplate;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eDescriptorUpdateTemplate;
+
+  public:
+    VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplate() VULKAN_HPP_NOEXCEPT
+      : m_descriptorUpdateTemplate(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplate( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+      : m_descriptorUpdateTemplate(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT DescriptorUpdateTemplate( VkDescriptorUpdateTemplate descriptorUpdateTemplate ) VULKAN_HPP_NOEXCEPT
+      : m_descriptorUpdateTemplate( descriptorUpdateTemplate )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    DescriptorUpdateTemplate & operator=(VkDescriptorUpdateTemplate descriptorUpdateTemplate) VULKAN_HPP_NOEXCEPT
+    {
+      m_descriptorUpdateTemplate = descriptorUpdateTemplate;
+      return *this;
+    }
+#endif
+
+    DescriptorUpdateTemplate & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_descriptorUpdateTemplate = VK_NULL_HANDLE;
+      return *this;
+    }
+
+    bool operator==( DescriptorUpdateTemplate const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_descriptorUpdateTemplate == rhs.m_descriptorUpdateTemplate;
+    }
+
+    bool operator!=(DescriptorUpdateTemplate const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_descriptorUpdateTemplate != rhs.m_descriptorUpdateTemplate;
+    }
+
+    bool operator<(DescriptorUpdateTemplate const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_descriptorUpdateTemplate < rhs.m_descriptorUpdateTemplate;
+    }
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDescriptorUpdateTemplate() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_descriptorUpdateTemplate;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_descriptorUpdateTemplate != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_descriptorUpdateTemplate == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkDescriptorUpdateTemplate m_descriptorUpdateTemplate;
+  };
+  static_assert( sizeof( DescriptorUpdateTemplate ) == sizeof( VkDescriptorUpdateTemplate ), "handle and wrapper have different size!" );
+
+  template <>
+  struct cpp_type<ObjectType::eDescriptorUpdateTemplate>
+  {
+    using type = DescriptorUpdateTemplate;
+  };
+  using DescriptorUpdateTemplateKHR = DescriptorUpdateTemplate;
+
+  class Event
+  {
+  public:
+    using CType = VkEvent;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eEvent;
+
+  public:
+    VULKAN_HPP_CONSTEXPR Event() VULKAN_HPP_NOEXCEPT
+      : m_event(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_CONSTEXPR Event( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+      : m_event(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT Event( VkEvent event ) VULKAN_HPP_NOEXCEPT
+      : m_event( event )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    Event & operator=(VkEvent event) VULKAN_HPP_NOEXCEPT
+    {
+      m_event = event;
+      return *this;
+    }
+#endif
+
+    Event & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_event = VK_NULL_HANDLE;
+      return *this;
+    }
+
+    bool operator==( Event const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_event == rhs.m_event;
+    }
+
+    bool operator!=(Event const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_event != rhs.m_event;
+    }
+
+    bool operator<(Event const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_event < rhs.m_event;
+    }
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkEvent() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_event;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_event != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_event == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkEvent m_event;
+  };
+  static_assert( sizeof( Event ) == sizeof( VkEvent ), "handle and wrapper have different size!" );
+
+  template <>
+  struct cpp_type<ObjectType::eEvent>
+  {
+    using type = Event;
+  };
+
+  class CommandBuffer
+  {
+  public:
+    using CType = VkCommandBuffer;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eCommandBuffer;
+
+  public:
+    VULKAN_HPP_CONSTEXPR CommandBuffer() VULKAN_HPP_NOEXCEPT
+      : m_commandBuffer(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_CONSTEXPR CommandBuffer( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+      : m_commandBuffer(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT CommandBuffer( VkCommandBuffer commandBuffer ) VULKAN_HPP_NOEXCEPT
+      : m_commandBuffer( commandBuffer )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    CommandBuffer & operator=(VkCommandBuffer commandBuffer) VULKAN_HPP_NOEXCEPT
+    {
+      m_commandBuffer = commandBuffer;
+      return *this;
+    }
+#endif
+
+    CommandBuffer & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_commandBuffer = VK_NULL_HANDLE;
+      return *this;
+    }
+
+    bool operator==( CommandBuffer const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_commandBuffer == rhs.m_commandBuffer;
+    }
+
+    bool operator!=(CommandBuffer const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_commandBuffer != rhs.m_commandBuffer;
+    }
+
+    bool operator<(CommandBuffer const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_commandBuffer < rhs.m_commandBuffer;
+    }
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result begin( const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo* pBeginInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<void>::type begin( const CommandBufferBeginInfo & beginInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void beginConditionalRenderingEXT( const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT* pConditionalRenderingBegin, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void beginConditionalRenderingEXT( const ConditionalRenderingBeginInfoEXT & conditionalRenderingBegin, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pLabelInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void beginDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void beginQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, VULKAN_HPP_NAMESPACE::QueryControlFlags flags, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void beginQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, VULKAN_HPP_NAMESPACE::QueryControlFlags flags, uint32_t index, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void beginRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo* pRenderPassBegin, VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void beginRenderPass( const RenderPassBeginInfo & renderPassBegin, VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void beginRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo* pRenderPassBegin, const VULKAN_HPP_NAMESPACE::SubpassBeginInfoKHR* pSubpassBeginInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void beginRenderPass2KHR( const RenderPassBeginInfo & renderPassBegin, const SubpassBeginInfoKHR & subpassBeginInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void beginTransformFeedbackEXT( uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VULKAN_HPP_NAMESPACE::Buffer* pCounterBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pCounterBufferOffsets, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void beginTransformFeedbackEXT( uint32_t firstCounterBuffer, ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> counterBuffers, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> counterBufferOffsets, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t firstSet, uint32_t descriptorSetCount, const VULKAN_HPP_NAMESPACE::DescriptorSet* pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t* pDynamicOffsets, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t firstSet, ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> descriptorSets, ArrayProxy<const uint32_t> dynamicOffsets, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void bindIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::IndexType indexType, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void bindPipeline( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::Pipeline pipeline, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void bindShadingRateImageNV( VULKAN_HPP_NAMESPACE::ImageView imageView, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void bindTransformFeedbackBuffersEXT( uint32_t firstBinding, uint32_t bindingCount, const VULKAN_HPP_NAMESPACE::Buffer* pBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pOffsets, const VULKAN_HPP_NAMESPACE::DeviceSize* pSizes, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void bindTransformFeedbackBuffersEXT( uint32_t firstBinding, ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> buffers, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> offsets, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> sizes, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void bindVertexBuffers( uint32_t firstBinding, uint32_t bindingCount, const VULKAN_HPP_NAMESPACE::Buffer* pBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pOffsets, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void bindVertexBuffers( uint32_t firstBinding, ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> buffers, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> offsets, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::ImageBlit* pRegions, VULKAN_HPP_NAMESPACE::Filter filter, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageBlit> regions, VULKAN_HPP_NAMESPACE::Filter filter, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void buildAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV* pInfo, VULKAN_HPP_NAMESPACE::Buffer instanceData, VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, VULKAN_HPP_NAMESPACE::Bool32 update, VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, VULKAN_HPP_NAMESPACE::Buffer scratch, VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void buildAccelerationStructureNV( const AccelerationStructureInfoNV & info, VULKAN_HPP_NAMESPACE::Buffer instanceData, VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, VULKAN_HPP_NAMESPACE::Bool32 update, VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, VULKAN_HPP_NAMESPACE::Buffer scratch, VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void clearAttachments( uint32_t attachmentCount, const VULKAN_HPP_NAMESPACE::ClearAttachment* pAttachments, uint32_t rectCount, const VULKAN_HPP_NAMESPACE::ClearRect* pRects, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void clearAttachments( ArrayProxy<const VULKAN_HPP_NAMESPACE::ClearAttachment> attachments, ArrayProxy<const VULKAN_HPP_NAMESPACE::ClearRect> rects, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void clearColorImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const VULKAN_HPP_NAMESPACE::ClearColorValue* pColor, uint32_t rangeCount, const VULKAN_HPP_NAMESPACE::ImageSubresourceRange* pRanges, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void clearColorImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const ClearColorValue & color, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceRange> ranges, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue* pDepthStencil, uint32_t rangeCount, const VULKAN_HPP_NAMESPACE::ImageSubresourceRange* pRanges, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const ClearDepthStencilValue & depthStencil, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceRange> ranges, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void copyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeNV mode, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::BufferCopy* pRegions, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferCopy> regions, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::BufferImageCopy* pRegions, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferImageCopy> regions, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::ImageCopy* pRegions, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageCopy> regions, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::BufferImageCopy* pRegions, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferImageCopy> regions, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void copyQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void debugMarkerBeginEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT* pMarkerInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void debugMarkerBeginEXT( const DebugMarkerMarkerInfoEXT & markerInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void debugMarkerEndEXT(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void debugMarkerInsertEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT* pMarkerInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void debugMarkerInsertEXT( const DebugMarkerMarkerInfoEXT & markerInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void dispatch( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void dispatchBase( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void dispatchBaseKHR( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void dispatchIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void draw( uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void drawIndexed( uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void drawIndexedIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void drawIndexedIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void drawIndexedIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void drawIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void drawIndirectByteCountEXT( uint32_t instanceCount, uint32_t firstInstance, VULKAN_HPP_NAMESPACE::Buffer counterBuffer, VULKAN_HPP_NAMESPACE::DeviceSize counterBufferOffset, uint32_t counterOffset, uint32_t vertexStride, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void drawIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void drawIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void drawMeshTasksIndirectCountNV( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void drawMeshTasksIndirectNV( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void drawMeshTasksNV( uint32_t taskCount, uint32_t firstTask, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void endConditionalRenderingEXT(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void endDebugUtilsLabelEXT(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void endQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void endQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, uint32_t index, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void endRenderPass(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void endRenderPass2KHR( const VULKAN_HPP_NAMESPACE::SubpassEndInfoKHR* pSubpassEndInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void endRenderPass2KHR( const SubpassEndInfoKHR & subpassEndInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void endTransformFeedbackEXT( uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VULKAN_HPP_NAMESPACE::Buffer* pCounterBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pCounterBufferOffsets, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void endTransformFeedbackEXT( uint32_t firstCounterBuffer, ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> counterBuffers, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> counterBufferOffsets, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void executeCommands( uint32_t commandBufferCount, const VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void executeCommands( ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> commandBuffers, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void fillBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize size, uint32_t data, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pLabelInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void insertDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void nextSubpass( VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void nextSubpass2KHR( const VULKAN_HPP_NAMESPACE::SubpassBeginInfoKHR* pSubpassBeginInfo, const VULKAN_HPP_NAMESPACE::SubpassEndInfoKHR* pSubpassEndInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void nextSubpass2KHR( const SubpassBeginInfoKHR & subpassBeginInfo, const SubpassEndInfoKHR & subpassEndInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const VULKAN_HPP_NAMESPACE::MemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier* pImageMemoryBarriers, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, ArrayProxy<const VULKAN_HPP_NAMESPACE::MemoryBarrier> memoryBarriers, ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier> bufferMemoryBarriers, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier> imageMemoryBarriers, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void processCommandsNVX( const VULKAN_HPP_NAMESPACE::CmdProcessCommandsInfoNVX* pProcessCommandsInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void processCommandsNVX( const CmdProcessCommandsInfoNVX & processCommandsInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, uint32_t offset, uint32_t size, const void* pValues, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename T, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, uint32_t offset, ArrayProxy<const T> values, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount, const VULKAN_HPP_NAMESPACE::WriteDescriptorSet* pDescriptorWrites, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteDescriptorSet> descriptorWrites, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void pushDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, const void* pData, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void reserveSpaceForCommandsNVX( const VULKAN_HPP_NAMESPACE::CmdReserveSpaceForCommandsInfoNVX* pReserveSpaceInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void reserveSpaceForCommandsNVX( const CmdReserveSpaceForCommandsInfoNVX & reserveSpaceInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void resetEvent( VULKAN_HPP_NAMESPACE::Event event, VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::ImageResolve* pRegions, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageResolve> regions, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setBlendConstants( const float blendConstants[4], Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setCheckpointNV( const void* pCheckpointMarker, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, uint32_t customSampleOrderCount, const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV* pCustomSampleOrders, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, ArrayProxy<const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV> customSampleOrders, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setDepthBias( float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setDepthBounds( float minDepthBounds, float maxDepthBounds, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setDeviceMask( uint32_t deviceMask, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setDeviceMaskKHR( uint32_t deviceMask, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setDiscardRectangleEXT( uint32_t firstDiscardRectangle, uint32_t discardRectangleCount, const VULKAN_HPP_NAMESPACE::Rect2D* pDiscardRectangles, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setDiscardRectangleEXT( uint32_t firstDiscardRectangle, ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> discardRectangles, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setEvent( VULKAN_HPP_NAMESPACE::Event event, VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setExclusiveScissorNV( uint32_t firstExclusiveScissor, uint32_t exclusiveScissorCount, const VULKAN_HPP_NAMESPACE::Rect2D* pExclusiveScissors, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setExclusiveScissorNV( uint32_t firstExclusiveScissor, ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> exclusiveScissors, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setLineStippleEXT( uint32_t lineStippleFactor, uint16_t lineStipplePattern, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setLineWidth( float lineWidth, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result setPerformanceMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL* pMarkerInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<void>::type setPerformanceMarkerINTEL( const PerformanceMarkerInfoINTEL & markerInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result setPerformanceOverrideINTEL( const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL* pOverrideInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<void>::type setPerformanceOverrideINTEL( const PerformanceOverrideInfoINTEL & overrideInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result setPerformanceStreamMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL* pMarkerInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<void>::type setPerformanceStreamMarkerINTEL( const PerformanceStreamMarkerInfoINTEL & markerInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setSampleLocationsEXT( const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT* pSampleLocationsInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setSampleLocationsEXT( const SampleLocationsInfoEXT & sampleLocationsInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setScissor( uint32_t firstScissor, uint32_t scissorCount, const VULKAN_HPP_NAMESPACE::Rect2D* pScissors, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setScissor( uint32_t firstScissor, ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> scissors, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setStencilCompareMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t compareMask, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setStencilReference( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t reference, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setStencilWriteMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t writeMask, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setViewport( uint32_t firstViewport, uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::Viewport* pViewports, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setViewport( uint32_t firstViewport, ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> viewports, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setViewportShadingRatePaletteNV( uint32_t firstViewport, uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV* pShadingRatePalettes, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setViewportShadingRatePaletteNV( uint32_t firstViewport, ArrayProxy<const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV> shadingRatePalettes, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setViewportWScalingNV( uint32_t firstViewport, uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::ViewportWScalingNV* pViewportWScalings, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setViewportWScalingNV( uint32_t firstViewport, ArrayProxy<const VULKAN_HPP_NAMESPACE::ViewportWScalingNV> viewportWScalings, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void traceRaysNV( VULKAN_HPP_NAMESPACE::Buffer raygenShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderBindingOffset, VULKAN_HPP_NAMESPACE::Buffer missShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingStride, VULKAN_HPP_NAMESPACE::Buffer hitShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingStride, VULKAN_HPP_NAMESPACE::Buffer callableShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingStride, uint32_t width, uint32_t height, uint32_t depth, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize dataSize, const void* pData, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename T, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, ArrayProxy<const T> data, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void waitEvents( uint32_t eventCount, const VULKAN_HPP_NAMESPACE::Event* pEvents, VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const VULKAN_HPP_NAMESPACE::MemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier* pImageMemoryBarriers, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void waitEvents( ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> events, VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, ArrayProxy<const VULKAN_HPP_NAMESPACE::MemoryBarrier> memoryBarriers, ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier> bufferMemoryBarriers, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier> imageMemoryBarriers, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void writeAccelerationStructuresPropertiesNV( uint32_t accelerationStructureCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureNV* pAccelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void writeAccelerationStructuresPropertiesNV( ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureNV> accelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void writeBufferMarkerAMD( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, uint32_t marker, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void writeTimestamp( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result end(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#else
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<void>::type end(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#else
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<void>::type reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkCommandBuffer() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_commandBuffer;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_commandBuffer != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_commandBuffer == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkCommandBuffer m_commandBuffer;
+  };
+  static_assert( sizeof( CommandBuffer ) == sizeof( VkCommandBuffer ), "handle and wrapper have different size!" );
+
+  template <>
+  struct cpp_type<ObjectType::eCommandBuffer>
+  {
+    using type = CommandBuffer;
+  };
+
+  class DeviceMemory
+  {
+  public:
+    using CType = VkDeviceMemory;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eDeviceMemory;
+
+  public:
+    VULKAN_HPP_CONSTEXPR DeviceMemory() VULKAN_HPP_NOEXCEPT
+      : m_deviceMemory(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_CONSTEXPR DeviceMemory( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+      : m_deviceMemory(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT DeviceMemory( VkDeviceMemory deviceMemory ) VULKAN_HPP_NOEXCEPT
+      : m_deviceMemory( deviceMemory )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    DeviceMemory & operator=(VkDeviceMemory deviceMemory) VULKAN_HPP_NOEXCEPT
+    {
+      m_deviceMemory = deviceMemory;
+      return *this;
+    }
+#endif
+
+    DeviceMemory & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_deviceMemory = VK_NULL_HANDLE;
+      return *this;
+    }
+
+    bool operator==( DeviceMemory const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_deviceMemory == rhs.m_deviceMemory;
+    }
+
+    bool operator!=(DeviceMemory const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_deviceMemory != rhs.m_deviceMemory;
+    }
+
+    bool operator<(DeviceMemory const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_deviceMemory < rhs.m_deviceMemory;
+    }
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDeviceMemory() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_deviceMemory;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_deviceMemory != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_deviceMemory == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkDeviceMemory m_deviceMemory;
+  };
+  static_assert( sizeof( DeviceMemory ) == sizeof( VkDeviceMemory ), "handle and wrapper have different size!" );
+
+  template <>
+  struct cpp_type<ObjectType::eDeviceMemory>
+  {
+    using type = DeviceMemory;
+  };
+
+  class BufferView
+  {
+  public:
+    using CType = VkBufferView;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eBufferView;
+
+  public:
+    VULKAN_HPP_CONSTEXPR BufferView() VULKAN_HPP_NOEXCEPT
+      : m_bufferView(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_CONSTEXPR BufferView( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+      : m_bufferView(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT BufferView( VkBufferView bufferView ) VULKAN_HPP_NOEXCEPT
+      : m_bufferView( bufferView )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    BufferView & operator=(VkBufferView bufferView) VULKAN_HPP_NOEXCEPT
+    {
+      m_bufferView = bufferView;
+      return *this;
+    }
+#endif
+
+    BufferView & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_bufferView = VK_NULL_HANDLE;
+      return *this;
+    }
+
+    bool operator==( BufferView const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_bufferView == rhs.m_bufferView;
+    }
+
+    bool operator!=(BufferView const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_bufferView != rhs.m_bufferView;
+    }
+
+    bool operator<(BufferView const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_bufferView < rhs.m_bufferView;
+    }
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkBufferView() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_bufferView;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_bufferView != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_bufferView == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkBufferView m_bufferView;
+  };
+  static_assert( sizeof( BufferView ) == sizeof( VkBufferView ), "handle and wrapper have different size!" );
+
+  template <>
+  struct cpp_type<ObjectType::eBufferView>
+  {
+    using type = BufferView;
+  };
+
+  class CommandPool
+  {
+  public:
+    using CType = VkCommandPool;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eCommandPool;
+
+  public:
+    VULKAN_HPP_CONSTEXPR CommandPool() VULKAN_HPP_NOEXCEPT
+      : m_commandPool(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_CONSTEXPR CommandPool( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+      : m_commandPool(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT CommandPool( VkCommandPool commandPool ) VULKAN_HPP_NOEXCEPT
+      : m_commandPool( commandPool )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    CommandPool & operator=(VkCommandPool commandPool) VULKAN_HPP_NOEXCEPT
+    {
+      m_commandPool = commandPool;
+      return *this;
+    }
+#endif
+
+    CommandPool & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_commandPool = VK_NULL_HANDLE;
+      return *this;
+    }
+
+    bool operator==( CommandPool const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_commandPool == rhs.m_commandPool;
+    }
+
+    bool operator!=(CommandPool const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_commandPool != rhs.m_commandPool;
+    }
+
+    bool operator<(CommandPool const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_commandPool < rhs.m_commandPool;
+    }
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkCommandPool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_commandPool;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_commandPool != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_commandPool == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkCommandPool m_commandPool;
+  };
+  static_assert( sizeof( CommandPool ) == sizeof( VkCommandPool ), "handle and wrapper have different size!" );
+
+  template <>
+  struct cpp_type<ObjectType::eCommandPool>
+  {
+    using type = CommandPool;
+  };
+
+  class PipelineCache
+  {
+  public:
+    using CType = VkPipelineCache;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::ePipelineCache;
+
+  public:
+    VULKAN_HPP_CONSTEXPR PipelineCache() VULKAN_HPP_NOEXCEPT
+      : m_pipelineCache(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_CONSTEXPR PipelineCache( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+      : m_pipelineCache(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT PipelineCache( VkPipelineCache pipelineCache ) VULKAN_HPP_NOEXCEPT
+      : m_pipelineCache( pipelineCache )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    PipelineCache & operator=(VkPipelineCache pipelineCache) VULKAN_HPP_NOEXCEPT
+    {
+      m_pipelineCache = pipelineCache;
+      return *this;
+    }
+#endif
+
+    PipelineCache & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_pipelineCache = VK_NULL_HANDLE;
+      return *this;
+    }
+
+    bool operator==( PipelineCache const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_pipelineCache == rhs.m_pipelineCache;
+    }
+
+    bool operator!=(PipelineCache const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_pipelineCache != rhs.m_pipelineCache;
+    }
+
+    bool operator<(PipelineCache const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_pipelineCache < rhs.m_pipelineCache;
+    }
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPipelineCache() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_pipelineCache;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_pipelineCache != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_pipelineCache == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkPipelineCache m_pipelineCache;
+  };
+  static_assert( sizeof( PipelineCache ) == sizeof( VkPipelineCache ), "handle and wrapper have different size!" );
+
+  template <>
+  struct cpp_type<ObjectType::ePipelineCache>
+  {
+    using type = PipelineCache;
+  };
+
+  class DescriptorPool
+  {
+  public:
+    using CType = VkDescriptorPool;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eDescriptorPool;
+
+  public:
+    VULKAN_HPP_CONSTEXPR DescriptorPool() VULKAN_HPP_NOEXCEPT
+      : m_descriptorPool(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_CONSTEXPR DescriptorPool( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+      : m_descriptorPool(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT DescriptorPool( VkDescriptorPool descriptorPool ) VULKAN_HPP_NOEXCEPT
+      : m_descriptorPool( descriptorPool )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    DescriptorPool & operator=(VkDescriptorPool descriptorPool) VULKAN_HPP_NOEXCEPT
+    {
+      m_descriptorPool = descriptorPool;
+      return *this;
+    }
+#endif
+
+    DescriptorPool & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_descriptorPool = VK_NULL_HANDLE;
+      return *this;
+    }
+
+    bool operator==( DescriptorPool const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_descriptorPool == rhs.m_descriptorPool;
+    }
+
+    bool operator!=(DescriptorPool const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_descriptorPool != rhs.m_descriptorPool;
+    }
+
+    bool operator<(DescriptorPool const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_descriptorPool < rhs.m_descriptorPool;
+    }
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDescriptorPool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_descriptorPool;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_descriptorPool != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_descriptorPool == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkDescriptorPool m_descriptorPool;
+  };
+  static_assert( sizeof( DescriptorPool ) == sizeof( VkDescriptorPool ), "handle and wrapper have different size!" );
+
+  template <>
+  struct cpp_type<ObjectType::eDescriptorPool>
+  {
+    using type = DescriptorPool;
+  };
+
+  class DescriptorSetLayout
+  {
+  public:
+    using CType = VkDescriptorSetLayout;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eDescriptorSetLayout;
+
+  public:
+    VULKAN_HPP_CONSTEXPR DescriptorSetLayout() VULKAN_HPP_NOEXCEPT
+      : m_descriptorSetLayout(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_CONSTEXPR DescriptorSetLayout( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+      : m_descriptorSetLayout(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT DescriptorSetLayout( VkDescriptorSetLayout descriptorSetLayout ) VULKAN_HPP_NOEXCEPT
+      : m_descriptorSetLayout( descriptorSetLayout )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    DescriptorSetLayout & operator=(VkDescriptorSetLayout descriptorSetLayout) VULKAN_HPP_NOEXCEPT
+    {
+      m_descriptorSetLayout = descriptorSetLayout;
+      return *this;
+    }
+#endif
+
+    DescriptorSetLayout & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_descriptorSetLayout = VK_NULL_HANDLE;
+      return *this;
+    }
+
+    bool operator==( DescriptorSetLayout const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_descriptorSetLayout == rhs.m_descriptorSetLayout;
+    }
+
+    bool operator!=(DescriptorSetLayout const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_descriptorSetLayout != rhs.m_descriptorSetLayout;
+    }
+
+    bool operator<(DescriptorSetLayout const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_descriptorSetLayout < rhs.m_descriptorSetLayout;
+    }
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDescriptorSetLayout() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_descriptorSetLayout;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_descriptorSetLayout != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_descriptorSetLayout == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkDescriptorSetLayout m_descriptorSetLayout;
+  };
+  static_assert( sizeof( DescriptorSetLayout ) == sizeof( VkDescriptorSetLayout ), "handle and wrapper have different size!" );
+
+  template <>
+  struct cpp_type<ObjectType::eDescriptorSetLayout>
+  {
+    using type = DescriptorSetLayout;
+  };
+
+  class Framebuffer
+  {
+  public:
+    using CType = VkFramebuffer;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eFramebuffer;
+
+  public:
+    VULKAN_HPP_CONSTEXPR Framebuffer() VULKAN_HPP_NOEXCEPT
+      : m_framebuffer(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_CONSTEXPR Framebuffer( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+      : m_framebuffer(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT Framebuffer( VkFramebuffer framebuffer ) VULKAN_HPP_NOEXCEPT
+      : m_framebuffer( framebuffer )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    Framebuffer & operator=(VkFramebuffer framebuffer) VULKAN_HPP_NOEXCEPT
+    {
+      m_framebuffer = framebuffer;
+      return *this;
+    }
+#endif
+
+    Framebuffer & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_framebuffer = VK_NULL_HANDLE;
+      return *this;
+    }
+
+    bool operator==( Framebuffer const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_framebuffer == rhs.m_framebuffer;
+    }
+
+    bool operator!=(Framebuffer const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_framebuffer != rhs.m_framebuffer;
+    }
+
+    bool operator<(Framebuffer const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_framebuffer < rhs.m_framebuffer;
+    }
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkFramebuffer() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_framebuffer;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_framebuffer != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_framebuffer == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkFramebuffer m_framebuffer;
+  };
+  static_assert( sizeof( Framebuffer ) == sizeof( VkFramebuffer ), "handle and wrapper have different size!" );
+
+  template <>
+  struct cpp_type<ObjectType::eFramebuffer>
+  {
+    using type = Framebuffer;
+  };
+
+  class IndirectCommandsLayoutNVX
+  {
+  public:
+    using CType = VkIndirectCommandsLayoutNVX;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eIndirectCommandsLayoutNVX;
+
+  public:
+    VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutNVX() VULKAN_HPP_NOEXCEPT
+      : m_indirectCommandsLayoutNVX(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutNVX( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+      : m_indirectCommandsLayoutNVX(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT IndirectCommandsLayoutNVX( VkIndirectCommandsLayoutNVX indirectCommandsLayoutNVX ) VULKAN_HPP_NOEXCEPT
+      : m_indirectCommandsLayoutNVX( indirectCommandsLayoutNVX )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    IndirectCommandsLayoutNVX & operator=(VkIndirectCommandsLayoutNVX indirectCommandsLayoutNVX) VULKAN_HPP_NOEXCEPT
+    {
+      m_indirectCommandsLayoutNVX = indirectCommandsLayoutNVX;
+      return *this;
+    }
+#endif
+
+    IndirectCommandsLayoutNVX & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_indirectCommandsLayoutNVX = VK_NULL_HANDLE;
+      return *this;
+    }
+
+    bool operator==( IndirectCommandsLayoutNVX const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_indirectCommandsLayoutNVX == rhs.m_indirectCommandsLayoutNVX;
+    }
+
+    bool operator!=(IndirectCommandsLayoutNVX const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_indirectCommandsLayoutNVX != rhs.m_indirectCommandsLayoutNVX;
+    }
+
+    bool operator<(IndirectCommandsLayoutNVX const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_indirectCommandsLayoutNVX < rhs.m_indirectCommandsLayoutNVX;
+    }
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkIndirectCommandsLayoutNVX() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_indirectCommandsLayoutNVX;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_indirectCommandsLayoutNVX != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_indirectCommandsLayoutNVX == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkIndirectCommandsLayoutNVX m_indirectCommandsLayoutNVX;
+  };
+  static_assert( sizeof( IndirectCommandsLayoutNVX ) == sizeof( VkIndirectCommandsLayoutNVX ), "handle and wrapper have different size!" );
+
+  template <>
+  struct cpp_type<ObjectType::eIndirectCommandsLayoutNVX>
+  {
+    using type = IndirectCommandsLayoutNVX;
+  };
+
+  class ObjectTableNVX
+  {
+  public:
+    using CType = VkObjectTableNVX;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eObjectTableNVX;
+
+  public:
+    VULKAN_HPP_CONSTEXPR ObjectTableNVX() VULKAN_HPP_NOEXCEPT
+      : m_objectTableNVX(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_CONSTEXPR ObjectTableNVX( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+      : m_objectTableNVX(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT ObjectTableNVX( VkObjectTableNVX objectTableNVX ) VULKAN_HPP_NOEXCEPT
+      : m_objectTableNVX( objectTableNVX )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    ObjectTableNVX & operator=(VkObjectTableNVX objectTableNVX) VULKAN_HPP_NOEXCEPT
+    {
+      m_objectTableNVX = objectTableNVX;
+      return *this;
+    }
+#endif
+
+    ObjectTableNVX & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_objectTableNVX = VK_NULL_HANDLE;
+      return *this;
+    }
+
+    bool operator==( ObjectTableNVX const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_objectTableNVX == rhs.m_objectTableNVX;
+    }
+
+    bool operator!=(ObjectTableNVX const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_objectTableNVX != rhs.m_objectTableNVX;
+    }
+
+    bool operator<(ObjectTableNVX const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_objectTableNVX < rhs.m_objectTableNVX;
+    }
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkObjectTableNVX() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_objectTableNVX;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_objectTableNVX != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_objectTableNVX == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkObjectTableNVX m_objectTableNVX;
+  };
+  static_assert( sizeof( ObjectTableNVX ) == sizeof( VkObjectTableNVX ), "handle and wrapper have different size!" );
+
+  template <>
+  struct cpp_type<ObjectType::eObjectTableNVX>
+  {
+    using type = ObjectTableNVX;
+  };
+
+  class RenderPass
+  {
+  public:
+    using CType = VkRenderPass;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eRenderPass;
+
+  public:
+    VULKAN_HPP_CONSTEXPR RenderPass() VULKAN_HPP_NOEXCEPT
+      : m_renderPass(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_CONSTEXPR RenderPass( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+      : m_renderPass(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT RenderPass( VkRenderPass renderPass ) VULKAN_HPP_NOEXCEPT
+      : m_renderPass( renderPass )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    RenderPass & operator=(VkRenderPass renderPass) VULKAN_HPP_NOEXCEPT
+    {
+      m_renderPass = renderPass;
+      return *this;
+    }
+#endif
+
+    RenderPass & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_renderPass = VK_NULL_HANDLE;
+      return *this;
+    }
+
+    bool operator==( RenderPass const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_renderPass == rhs.m_renderPass;
+    }
+
+    bool operator!=(RenderPass const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_renderPass != rhs.m_renderPass;
+    }
+
+    bool operator<(RenderPass const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_renderPass < rhs.m_renderPass;
+    }
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkRenderPass() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_renderPass;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_renderPass != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_renderPass == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkRenderPass m_renderPass;
+  };
+  static_assert( sizeof( RenderPass ) == sizeof( VkRenderPass ), "handle and wrapper have different size!" );
+
+  template <>
+  struct cpp_type<ObjectType::eRenderPass>
+  {
+    using type = RenderPass;
+  };
+
+  class Sampler
+  {
+  public:
+    using CType = VkSampler;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eSampler;
+
+  public:
+    VULKAN_HPP_CONSTEXPR Sampler() VULKAN_HPP_NOEXCEPT
+      : m_sampler(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_CONSTEXPR Sampler( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+      : m_sampler(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT Sampler( VkSampler sampler ) VULKAN_HPP_NOEXCEPT
+      : m_sampler( sampler )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    Sampler & operator=(VkSampler sampler) VULKAN_HPP_NOEXCEPT
+    {
+      m_sampler = sampler;
+      return *this;
+    }
+#endif
+
+    Sampler & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_sampler = VK_NULL_HANDLE;
+      return *this;
+    }
+
+    bool operator==( Sampler const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_sampler == rhs.m_sampler;
+    }
+
+    bool operator!=(Sampler const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_sampler != rhs.m_sampler;
+    }
+
+    bool operator<(Sampler const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_sampler < rhs.m_sampler;
+    }
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSampler() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_sampler;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_sampler != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_sampler == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkSampler m_sampler;
+  };
+  static_assert( sizeof( Sampler ) == sizeof( VkSampler ), "handle and wrapper have different size!" );
+
+  template <>
+  struct cpp_type<ObjectType::eSampler>
+  {
+    using type = Sampler;
+  };
+
+  class SamplerYcbcrConversion
+  {
+  public:
+    using CType = VkSamplerYcbcrConversion;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eSamplerYcbcrConversion;
+
+  public:
+    VULKAN_HPP_CONSTEXPR SamplerYcbcrConversion() VULKAN_HPP_NOEXCEPT
+      : m_samplerYcbcrConversion(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_CONSTEXPR SamplerYcbcrConversion( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+      : m_samplerYcbcrConversion(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT SamplerYcbcrConversion( VkSamplerYcbcrConversion samplerYcbcrConversion ) VULKAN_HPP_NOEXCEPT
+      : m_samplerYcbcrConversion( samplerYcbcrConversion )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    SamplerYcbcrConversion & operator=(VkSamplerYcbcrConversion samplerYcbcrConversion) VULKAN_HPP_NOEXCEPT
+    {
+      m_samplerYcbcrConversion = samplerYcbcrConversion;
+      return *this;
+    }
+#endif
+
+    SamplerYcbcrConversion & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_samplerYcbcrConversion = VK_NULL_HANDLE;
+      return *this;
+    }
+
+    bool operator==( SamplerYcbcrConversion const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_samplerYcbcrConversion == rhs.m_samplerYcbcrConversion;
+    }
+
+    bool operator!=(SamplerYcbcrConversion const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_samplerYcbcrConversion != rhs.m_samplerYcbcrConversion;
+    }
+
+    bool operator<(SamplerYcbcrConversion const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_samplerYcbcrConversion < rhs.m_samplerYcbcrConversion;
+    }
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSamplerYcbcrConversion() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_samplerYcbcrConversion;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_samplerYcbcrConversion != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_samplerYcbcrConversion == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkSamplerYcbcrConversion m_samplerYcbcrConversion;
+  };
+  static_assert( sizeof( SamplerYcbcrConversion ) == sizeof( VkSamplerYcbcrConversion ), "handle and wrapper have different size!" );
+
+  template <>
+  struct cpp_type<ObjectType::eSamplerYcbcrConversion>
+  {
+    using type = SamplerYcbcrConversion;
+  };
+  using SamplerYcbcrConversionKHR = SamplerYcbcrConversion;
+
+  class ShaderModule
+  {
+  public:
+    using CType = VkShaderModule;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eShaderModule;
+
+  public:
+    VULKAN_HPP_CONSTEXPR ShaderModule() VULKAN_HPP_NOEXCEPT
+      : m_shaderModule(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_CONSTEXPR ShaderModule( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+      : m_shaderModule(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT ShaderModule( VkShaderModule shaderModule ) VULKAN_HPP_NOEXCEPT
+      : m_shaderModule( shaderModule )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    ShaderModule & operator=(VkShaderModule shaderModule) VULKAN_HPP_NOEXCEPT
+    {
+      m_shaderModule = shaderModule;
+      return *this;
+    }
+#endif
+
+    ShaderModule & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_shaderModule = VK_NULL_HANDLE;
+      return *this;
+    }
+
+    bool operator==( ShaderModule const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_shaderModule == rhs.m_shaderModule;
+    }
+
+    bool operator!=(ShaderModule const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_shaderModule != rhs.m_shaderModule;
+    }
+
+    bool operator<(ShaderModule const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_shaderModule < rhs.m_shaderModule;
+    }
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkShaderModule() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_shaderModule;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_shaderModule != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_shaderModule == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkShaderModule m_shaderModule;
+  };
+  static_assert( sizeof( ShaderModule ) == sizeof( VkShaderModule ), "handle and wrapper have different size!" );
+
+  template <>
+  struct cpp_type<ObjectType::eShaderModule>
+  {
+    using type = ShaderModule;
+  };
+
+  class ValidationCacheEXT
+  {
+  public:
+    using CType = VkValidationCacheEXT;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eValidationCacheEXT;
+
+  public:
+    VULKAN_HPP_CONSTEXPR ValidationCacheEXT() VULKAN_HPP_NOEXCEPT
+      : m_validationCacheEXT(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_CONSTEXPR ValidationCacheEXT( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+      : m_validationCacheEXT(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT ValidationCacheEXT( VkValidationCacheEXT validationCacheEXT ) VULKAN_HPP_NOEXCEPT
+      : m_validationCacheEXT( validationCacheEXT )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    ValidationCacheEXT & operator=(VkValidationCacheEXT validationCacheEXT) VULKAN_HPP_NOEXCEPT
+    {
+      m_validationCacheEXT = validationCacheEXT;
+      return *this;
+    }
+#endif
+
+    ValidationCacheEXT & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_validationCacheEXT = VK_NULL_HANDLE;
+      return *this;
+    }
+
+    bool operator==( ValidationCacheEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_validationCacheEXT == rhs.m_validationCacheEXT;
+    }
+
+    bool operator!=(ValidationCacheEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_validationCacheEXT != rhs.m_validationCacheEXT;
+    }
+
+    bool operator<(ValidationCacheEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_validationCacheEXT < rhs.m_validationCacheEXT;
+    }
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkValidationCacheEXT() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_validationCacheEXT;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_validationCacheEXT != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_validationCacheEXT == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkValidationCacheEXT m_validationCacheEXT;
+  };
+  static_assert( sizeof( ValidationCacheEXT ) == sizeof( VkValidationCacheEXT ), "handle and wrapper have different size!" );
+
+  template <>
+  struct cpp_type<ObjectType::eValidationCacheEXT>
+  {
+    using type = ValidationCacheEXT;
+  };
+
+  class Queue
+  {
+  public:
+    using CType = VkQueue;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eQueue;
+
+  public:
+    VULKAN_HPP_CONSTEXPR Queue() VULKAN_HPP_NOEXCEPT
+      : m_queue(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_CONSTEXPR Queue( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+      : m_queue(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT Queue( VkQueue queue ) VULKAN_HPP_NOEXCEPT
+      : m_queue( queue )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    Queue & operator=(VkQueue queue) VULKAN_HPP_NOEXCEPT
+    {
+      m_queue = queue;
+      return *this;
+    }
+#endif
+
+    Queue & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_queue = VK_NULL_HANDLE;
+      return *this;
+    }
+
+    bool operator==( Queue const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_queue == rhs.m_queue;
+    }
+
+    bool operator!=(Queue const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_queue != rhs.m_queue;
+    }
+
+    bool operator<(Queue const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_queue < rhs.m_queue;
+    }
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getCheckpointDataNV( uint32_t* pCheckpointDataCount, VULKAN_HPP_NAMESPACE::CheckpointDataNV* pCheckpointData, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Allocator = std::allocator<CheckpointDataNV>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    std::vector<CheckpointDataNV,Allocator> getCheckpointDataNV(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+    template<typename Allocator = std::allocator<CheckpointDataNV>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    std::vector<CheckpointDataNV,Allocator> getCheckpointDataNV(Allocator const& vectorAllocator, Dispatch const &d ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pLabelInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void beginDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result bindSparse( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindSparseInfo* pBindInfo, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<void>::type bindSparse( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindSparseInfo> bindInfo, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void endDebugUtilsLabelEXT(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pLabelInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void insertDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result presentKHR( const VULKAN_HPP_NAMESPACE::PresentInfoKHR* pPresentInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result presentKHR( const PresentInfoKHR & presentInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#else
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<void>::type setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result submit( uint32_t submitCount, const VULKAN_HPP_NAMESPACE::SubmitInfo* pSubmits, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<void>::type submit( ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo> submits, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result waitIdle(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#else
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<void>::type waitIdle(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkQueue() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_queue;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_queue != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_queue == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkQueue m_queue;
+  };
+  static_assert( sizeof( Queue ) == sizeof( VkQueue ), "handle and wrapper have different size!" );
+
+  template <>
+  struct cpp_type<ObjectType::eQueue>
+  {
+    using type = Queue;
+  };
+
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  class Device;
+  template <typename Dispatch> class UniqueHandleTraits<AccelerationStructureNV, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
+  using UniqueAccelerationStructureNV = UniqueHandle<AccelerationStructureNV, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+  template <typename Dispatch> class UniqueHandleTraits<Buffer, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
+  using UniqueBuffer = UniqueHandle<Buffer, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+  template <typename Dispatch> class UniqueHandleTraits<BufferView, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
+  using UniqueBufferView = UniqueHandle<BufferView, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+  template <typename Dispatch> class UniqueHandleTraits<CommandBuffer, Dispatch> { public: using deleter = PoolFree<Device, CommandPool, Dispatch>; };
+  using UniqueCommandBuffer = UniqueHandle<CommandBuffer, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+  template <typename Dispatch> class UniqueHandleTraits<CommandPool, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
+  using UniqueCommandPool = UniqueHandle<CommandPool, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+  template <typename Dispatch> class UniqueHandleTraits<DescriptorPool, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
+  using UniqueDescriptorPool = UniqueHandle<DescriptorPool, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+  template <typename Dispatch> class UniqueHandleTraits<DescriptorSet, Dispatch> { public: using deleter = PoolFree<Device, DescriptorPool, Dispatch>; };
+  using UniqueDescriptorSet = UniqueHandle<DescriptorSet, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+  template <typename Dispatch> class UniqueHandleTraits<DescriptorSetLayout, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
+  using UniqueDescriptorSetLayout = UniqueHandle<DescriptorSetLayout, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+  template <typename Dispatch> class UniqueHandleTraits<DescriptorUpdateTemplate, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
+  using UniqueDescriptorUpdateTemplate = UniqueHandle<DescriptorUpdateTemplate, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+  template <typename Dispatch> class UniqueHandleTraits<DeviceMemory, Dispatch> { public: using deleter = ObjectFree<Device, Dispatch>; };
+  using UniqueDeviceMemory = UniqueHandle<DeviceMemory, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+  template <typename Dispatch> class UniqueHandleTraits<Event, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
+  using UniqueEvent = UniqueHandle<Event, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+  template <typename Dispatch> class UniqueHandleTraits<Fence, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
+  using UniqueFence = UniqueHandle<Fence, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+  template <typename Dispatch> class UniqueHandleTraits<Framebuffer, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
+  using UniqueFramebuffer = UniqueHandle<Framebuffer, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+  template <typename Dispatch> class UniqueHandleTraits<Image, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
+  using UniqueImage = UniqueHandle<Image, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+  template <typename Dispatch> class UniqueHandleTraits<ImageView, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
+  using UniqueImageView = UniqueHandle<ImageView, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+  template <typename Dispatch> class UniqueHandleTraits<IndirectCommandsLayoutNVX, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
+  using UniqueIndirectCommandsLayoutNVX = UniqueHandle<IndirectCommandsLayoutNVX, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+  template <typename Dispatch> class UniqueHandleTraits<ObjectTableNVX, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
+  using UniqueObjectTableNVX = UniqueHandle<ObjectTableNVX, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+  template <typename Dispatch> class UniqueHandleTraits<Pipeline, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
+  using UniquePipeline = UniqueHandle<Pipeline, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+  template <typename Dispatch> class UniqueHandleTraits<PipelineCache, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
+  using UniquePipelineCache = UniqueHandle<PipelineCache, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+  template <typename Dispatch> class UniqueHandleTraits<PipelineLayout, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
+  using UniquePipelineLayout = UniqueHandle<PipelineLayout, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+  template <typename Dispatch> class UniqueHandleTraits<QueryPool, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
+  using UniqueQueryPool = UniqueHandle<QueryPool, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+  template <typename Dispatch> class UniqueHandleTraits<RenderPass, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
+  using UniqueRenderPass = UniqueHandle<RenderPass, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+  template <typename Dispatch> class UniqueHandleTraits<Sampler, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
+  using UniqueSampler = UniqueHandle<Sampler, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+  template <typename Dispatch> class UniqueHandleTraits<SamplerYcbcrConversion, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
+  using UniqueSamplerYcbcrConversion = UniqueHandle<SamplerYcbcrConversion, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+  template <typename Dispatch> class UniqueHandleTraits<Semaphore, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
+  using UniqueSemaphore = UniqueHandle<Semaphore, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+  template <typename Dispatch> class UniqueHandleTraits<ShaderModule, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
+  using UniqueShaderModule = UniqueHandle<ShaderModule, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+  template <typename Dispatch> class UniqueHandleTraits<SwapchainKHR, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
+  using UniqueSwapchainKHR = UniqueHandle<SwapchainKHR, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+  template <typename Dispatch> class UniqueHandleTraits<ValidationCacheEXT, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
+  using UniqueValidationCacheEXT = UniqueHandle<ValidationCacheEXT, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+
+  class Device
+  {
+  public:
+    using CType = VkDevice;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eDevice;
+
+  public:
+    VULKAN_HPP_CONSTEXPR Device() VULKAN_HPP_NOEXCEPT
+      : m_device(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_CONSTEXPR Device( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+      : m_device(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT Device( VkDevice device ) VULKAN_HPP_NOEXCEPT
+      : m_device( device )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    Device & operator=(VkDevice device) VULKAN_HPP_NOEXCEPT
+    {
+      m_device = device;
+      return *this;
+    }
+#endif
+
+    Device & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_device = VK_NULL_HANDLE;
+      return *this;
+    }
+
+    bool operator==( Device const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_device == rhs.m_device;
+    }
+
+    bool operator!=(Device const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_device != rhs.m_device;
+    }
+
+    bool operator<(Device const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_device < rhs.m_device;
+    }
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result acquireFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#else
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<void>::type acquireFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result acquireNextImage2KHR( const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR* pAcquireInfo, uint32_t* pImageIndex, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValue<uint32_t> acquireNextImage2KHR( const AcquireNextImageInfoKHR & acquireInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint64_t timeout, VULKAN_HPP_NAMESPACE::Semaphore semaphore, VULKAN_HPP_NAMESPACE::Fence fence, uint32_t* pImageIndex, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValue<uint32_t> acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint64_t timeout, VULKAN_HPP_NAMESPACE::Semaphore semaphore, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result acquirePerformanceConfigurationINTEL( const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL* pAcquireInfo, VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL* pConfiguration, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL>::type acquirePerformanceConfigurationINTEL( const PerformanceConfigurationAcquireInfoINTEL & acquireInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result acquireProfilingLockKHR( const VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR* pInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<void>::type acquireProfilingLockKHR( const AcquireProfilingLockInfoKHR & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result allocateCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo* pAllocateInfo, VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Allocator = std::allocator<CommandBuffer>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<std::vector<CommandBuffer,Allocator>>::type allocateCommandBuffers( const CommandBufferAllocateInfo & allocateInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+    template<typename Allocator = std::allocator<CommandBuffer>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<std::vector<CommandBuffer,Allocator>>::type allocateCommandBuffers( const CommandBufferAllocateInfo & allocateInfo, Allocator const& vectorAllocator, Dispatch const &d ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template<typename Allocator = std::allocator<UniqueCommandBuffer>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<std::vector<UniqueHandle<CommandBuffer,Dispatch>,Allocator>>::type allocateCommandBuffersUnique( const CommandBufferAllocateInfo & allocateInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+    template<typename Allocator = std::allocator<UniqueCommandBuffer>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<std::vector<UniqueHandle<CommandBuffer,Dispatch>,Allocator>>::type allocateCommandBuffersUnique( const CommandBufferAllocateInfo & allocateInfo, Allocator const& vectorAllocator, Dispatch const &d ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result allocateDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo* pAllocateInfo, VULKAN_HPP_NAMESPACE::DescriptorSet* pDescriptorSets, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Allocator = std::allocator<DescriptorSet>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<std::vector<DescriptorSet,Allocator>>::type allocateDescriptorSets( const DescriptorSetAllocateInfo & allocateInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+    template<typename Allocator = std::allocator<DescriptorSet>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<std::vector<DescriptorSet,Allocator>>::type allocateDescriptorSets( const DescriptorSetAllocateInfo & allocateInfo, Allocator const& vectorAllocator, Dispatch const &d ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template<typename Allocator = std::allocator<UniqueDescriptorSet>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<std::vector<UniqueHandle<DescriptorSet,Dispatch>,Allocator>>::type allocateDescriptorSetsUnique( const DescriptorSetAllocateInfo & allocateInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+    template<typename Allocator = std::allocator<UniqueDescriptorSet>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<std::vector<UniqueHandle<DescriptorSet,Dispatch>,Allocator>>::type allocateDescriptorSetsUnique( const DescriptorSetAllocateInfo & allocateInfo, Allocator const& vectorAllocator, Dispatch const &d ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result allocateMemory( const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo* pAllocateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DeviceMemory* pMemory, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<VULKAN_HPP_NAMESPACE::DeviceMemory>::type allocateMemory( const MemoryAllocateInfo & allocateInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<UniqueHandle<DeviceMemory,Dispatch>>::type allocateMemoryUnique( const MemoryAllocateInfo & allocateInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result bindAccelerationStructureMemoryNV( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV* pBindInfos, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<void>::type bindAccelerationStructureMemoryNV( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV> bindInfos, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result bindBufferMemory( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#else
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<void>::type bindBufferMemory( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result bindBufferMemory2( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo* pBindInfos, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<void>::type bindBufferMemory2( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo> bindInfos, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result bindBufferMemory2KHR( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo* pBindInfos, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<void>::type bindBufferMemory2KHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo> bindInfos, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result bindImageMemory( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#else
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<void>::type bindImageMemory( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result bindImageMemory2( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo* pBindInfos, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<void>::type bindImageMemory2( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo> bindInfos, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result bindImageMemory2KHR( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo* pBindInfos, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<void>::type bindImageMemory2KHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo> bindInfos, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t shader, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#else
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<void>::type compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t shader, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result createAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::AccelerationStructureNV* pAccelerationStructure, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<VULKAN_HPP_NAMESPACE::AccelerationStructureNV>::type createAccelerationStructureNV( const AccelerationStructureCreateInfoNV & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<UniqueHandle<AccelerationStructureNV,Dispatch>>::type createAccelerationStructureNVUnique( const AccelerationStructureCreateInfoNV & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result createBuffer( const VULKAN_HPP_NAMESPACE::BufferCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Buffer* pBuffer, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<VULKAN_HPP_NAMESPACE::Buffer>::type createBuffer( const BufferCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<UniqueHandle<Buffer,Dispatch>>::type createBufferUnique( const BufferCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result createBufferView( const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::BufferView* pView, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<VULKAN_HPP_NAMESPACE::BufferView>::type createBufferView( const BufferViewCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<UniqueHandle<BufferView,Dispatch>>::type createBufferViewUnique( const BufferViewCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result createCommandPool( const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::CommandPool* pCommandPool, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<VULKAN_HPP_NAMESPACE::CommandPool>::type createCommandPool( const CommandPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<UniqueHandle<CommandPool,Dispatch>>::type createCommandPoolUnique( const CommandPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, uint32_t createInfoCount, const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo* pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Pipeline* pPipelines, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Allocator = std::allocator<Pipeline>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<std::vector<Pipeline,Allocator>>::type createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> createInfos, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+    template<typename Allocator = std::allocator<Pipeline>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<std::vector<Pipeline,Allocator>>::type createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> createInfos, Optional<const AllocationCallbacks> allocator, Allocator const& vectorAllocator, Dispatch const &d ) const;
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<Pipeline>::type createComputePipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const ComputePipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template<typename Allocator = std::allocator<UniquePipeline>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<std::vector<UniqueHandle<Pipeline,Dispatch>,Allocator>>::type createComputePipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> createInfos, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+    template<typename Allocator = std::allocator<UniquePipeline>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<std::vector<UniqueHandle<Pipeline,Dispatch>,Allocator>>::type createComputePipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> createInfos, Optional<const AllocationCallbacks> allocator, Allocator const& vectorAllocator, Dispatch const &d ) const;
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<UniqueHandle<Pipeline,Dispatch>>::type createComputePipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const ComputePipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result createDescriptorPool( const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DescriptorPool* pDescriptorPool, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorPool>::type createDescriptorPool( const DescriptorPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<UniqueHandle<DescriptorPool,Dispatch>>::type createDescriptorPoolUnique( const DescriptorPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result createDescriptorSetLayout( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DescriptorSetLayout* pSetLayout, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorSetLayout>::type createDescriptorSetLayout( const DescriptorSetLayoutCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<UniqueHandle<DescriptorSetLayout,Dispatch>>::type createDescriptorSetLayoutUnique( const DescriptorSetLayoutCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result createDescriptorUpdateTemplate( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate* pDescriptorUpdateTemplate, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate>::type createDescriptorUpdateTemplate( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<UniqueHandle<DescriptorUpdateTemplate,Dispatch>>::type createDescriptorUpdateTemplateUnique( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result createDescriptorUpdateTemplateKHR( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate* pDescriptorUpdateTemplate, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate>::type createDescriptorUpdateTemplateKHR( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<UniqueHandle<DescriptorUpdateTemplate,Dispatch>>::type createDescriptorUpdateTemplateKHRUnique( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result createEvent( const VULKAN_HPP_NAMESPACE::EventCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Event* pEvent, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<VULKAN_HPP_NAMESPACE::Event>::type createEvent( const EventCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<UniqueHandle<Event,Dispatch>>::type createEventUnique( const EventCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result createFence( const VULKAN_HPP_NAMESPACE::FenceCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Fence* pFence, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<VULKAN_HPP_NAMESPACE::Fence>::type createFence( const FenceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<UniqueHandle<Fence,Dispatch>>::type createFenceUnique( const FenceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result createFramebuffer( const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Framebuffer* pFramebuffer, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<VULKAN_HPP_NAMESPACE::Framebuffer>::type createFramebuffer( const FramebufferCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<UniqueHandle<Framebuffer,Dispatch>>::type createFramebufferUnique( const FramebufferCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, uint32_t createInfoCount, const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo* pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Pipeline* pPipelines, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Allocator = std::allocator<Pipeline>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<std::vector<Pipeline,Allocator>>::type createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> createInfos, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+    template<typename Allocator = std::allocator<Pipeline>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<std::vector<Pipeline,Allocator>>::type createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> createInfos, Optional<const AllocationCallbacks> allocator, Allocator const& vectorAllocator, Dispatch const &d ) const;
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<Pipeline>::type createGraphicsPipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const GraphicsPipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template<typename Allocator = std::allocator<UniquePipeline>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<std::vector<UniqueHandle<Pipeline,Dispatch>,Allocator>>::type createGraphicsPipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> createInfos, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+    template<typename Allocator = std::allocator<UniquePipeline>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<std::vector<UniqueHandle<Pipeline,Dispatch>,Allocator>>::type createGraphicsPipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> createInfos, Optional<const AllocationCallbacks> allocator, Allocator const& vectorAllocator, Dispatch const &d ) const;
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<UniqueHandle<Pipeline,Dispatch>>::type createGraphicsPipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const GraphicsPipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result createImage( const VULKAN_HPP_NAMESPACE::ImageCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Image* pImage, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<VULKAN_HPP_NAMESPACE::Image>::type createImage( const ImageCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<UniqueHandle<Image,Dispatch>>::type createImageUnique( const ImageCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result createImageView( const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::ImageView* pView, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<VULKAN_HPP_NAMESPACE::ImageView>::type createImageView( const ImageViewCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<UniqueHandle<ImageView,Dispatch>>::type createImageViewUnique( const ImageViewCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result createIndirectCommandsLayoutNVX( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNVX* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX* pIndirectCommandsLayout, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX>::type createIndirectCommandsLayoutNVX( const IndirectCommandsLayoutCreateInfoNVX & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<UniqueHandle<IndirectCommandsLayoutNVX,Dispatch>>::type createIndirectCommandsLayoutNVXUnique( const IndirectCommandsLayoutCreateInfoNVX & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result createObjectTableNVX( const VULKAN_HPP_NAMESPACE::ObjectTableCreateInfoNVX* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::ObjectTableNVX* pObjectTable, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<VULKAN_HPP_NAMESPACE::ObjectTableNVX>::type createObjectTableNVX( const ObjectTableCreateInfoNVX & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<UniqueHandle<ObjectTableNVX,Dispatch>>::type createObjectTableNVXUnique( const ObjectTableCreateInfoNVX & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result createPipelineCache( const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::PipelineCache* pPipelineCache, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<VULKAN_HPP_NAMESPACE::PipelineCache>::type createPipelineCache( const PipelineCacheCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<UniqueHandle<PipelineCache,Dispatch>>::type createPipelineCacheUnique( const PipelineCacheCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result createPipelineLayout( const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::PipelineLayout* pPipelineLayout, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<VULKAN_HPP_NAMESPACE::PipelineLayout>::type createPipelineLayout( const PipelineLayoutCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<UniqueHandle<PipelineLayout,Dispatch>>::type createPipelineLayoutUnique( const PipelineLayoutCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result createQueryPool( const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::QueryPool* pQueryPool, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<VULKAN_HPP_NAMESPACE::QueryPool>::type createQueryPool( const QueryPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<UniqueHandle<QueryPool,Dispatch>>::type createQueryPoolUnique( const QueryPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, uint32_t createInfoCount, const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV* pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Pipeline* pPipelines, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Allocator = std::allocator<Pipeline>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<std::vector<Pipeline,Allocator>>::type createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> createInfos, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+    template<typename Allocator = std::allocator<Pipeline>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<std::vector<Pipeline,Allocator>>::type createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> createInfos, Optional<const AllocationCallbacks> allocator, Allocator const& vectorAllocator, Dispatch const &d ) const;
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<Pipeline>::type createRayTracingPipelineNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const RayTracingPipelineCreateInfoNV & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template<typename Allocator = std::allocator<UniquePipeline>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<std::vector<UniqueHandle<Pipeline,Dispatch>,Allocator>>::type createRayTracingPipelinesNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> createInfos, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+    template<typename Allocator = std::allocator<UniquePipeline>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<std::vector<UniqueHandle<Pipeline,Dispatch>,Allocator>>::type createRayTracingPipelinesNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> createInfos, Optional<const AllocationCallbacks> allocator, Allocator const& vectorAllocator, Dispatch const &d ) const;
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<UniqueHandle<Pipeline,Dispatch>>::type createRayTracingPipelineNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const RayTracingPipelineCreateInfoNV & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result createRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::RenderPass* pRenderPass, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<VULKAN_HPP_NAMESPACE::RenderPass>::type createRenderPass( const RenderPassCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<UniqueHandle<RenderPass,Dispatch>>::type createRenderPassUnique( const RenderPassCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result createRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2KHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::RenderPass* pRenderPass, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<VULKAN_HPP_NAMESPACE::RenderPass>::type createRenderPass2KHR( const RenderPassCreateInfo2KHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<UniqueHandle<RenderPass,Dispatch>>::type createRenderPass2KHRUnique( const RenderPassCreateInfo2KHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result createSampler( const VULKAN_HPP_NAMESPACE::SamplerCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Sampler* pSampler, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<VULKAN_HPP_NAMESPACE::Sampler>::type createSampler( const SamplerCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<UniqueHandle<Sampler,Dispatch>>::type createSamplerUnique( const SamplerCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result createSamplerYcbcrConversion( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion* pYcbcrConversion, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion>::type createSamplerYcbcrConversion( const SamplerYcbcrConversionCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<UniqueHandle<SamplerYcbcrConversion,Dispatch>>::type createSamplerYcbcrConversionUnique( const SamplerYcbcrConversionCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result createSamplerYcbcrConversionKHR( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion* pYcbcrConversion, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion>::type createSamplerYcbcrConversionKHR( const SamplerYcbcrConversionCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<UniqueHandle<SamplerYcbcrConversion,Dispatch>>::type createSamplerYcbcrConversionKHRUnique( const SamplerYcbcrConversionCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result createSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Semaphore* pSemaphore, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<VULKAN_HPP_NAMESPACE::Semaphore>::type createSemaphore( const SemaphoreCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<UniqueHandle<Semaphore,Dispatch>>::type createSemaphoreUnique( const SemaphoreCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result createShaderModule( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::ShaderModule* pShaderModule, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<VULKAN_HPP_NAMESPACE::ShaderModule>::type createShaderModule( const ShaderModuleCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<UniqueHandle<ShaderModule,Dispatch>>::type createShaderModuleUnique( const ShaderModuleCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result createSharedSwapchainsKHR( uint32_t swapchainCount, const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR* pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SwapchainKHR* pSwapchains, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Allocator = std::allocator<SwapchainKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<std::vector<SwapchainKHR,Allocator>>::type createSharedSwapchainsKHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> createInfos, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+    template<typename Allocator = std::allocator<SwapchainKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<std::vector<SwapchainKHR,Allocator>>::type createSharedSwapchainsKHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> createInfos, Optional<const AllocationCallbacks> allocator, Allocator const& vectorAllocator, Dispatch const &d ) const;
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<SwapchainKHR>::type createSharedSwapchainKHR( const SwapchainCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template<typename Allocator = std::allocator<UniqueSwapchainKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<std::vector<UniqueHandle<SwapchainKHR,Dispatch>,Allocator>>::type createSharedSwapchainsKHRUnique( ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> createInfos, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+    template<typename Allocator = std::allocator<UniqueSwapchainKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<std::vector<UniqueHandle<SwapchainKHR,Dispatch>,Allocator>>::type createSharedSwapchainsKHRUnique( ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> createInfos, Optional<const AllocationCallbacks> allocator, Allocator const& vectorAllocator, Dispatch const &d ) const;
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<UniqueHandle<SwapchainKHR,Dispatch>>::type createSharedSwapchainKHRUnique( const SwapchainCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result createSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SwapchainKHR* pSwapchain, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<VULKAN_HPP_NAMESPACE::SwapchainKHR>::type createSwapchainKHR( const SwapchainCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<UniqueHandle<SwapchainKHR,Dispatch>>::type createSwapchainKHRUnique( const SwapchainCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result createValidationCacheEXT( const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::ValidationCacheEXT* pValidationCache, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<VULKAN_HPP_NAMESPACE::ValidationCacheEXT>::type createValidationCacheEXT( const ValidationCacheCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<UniqueHandle<ValidationCacheEXT,Dispatch>>::type createValidationCacheEXTUnique( const ValidationCacheCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result debugMarkerSetObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT* pNameInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<void>::type debugMarkerSetObjectNameEXT( const DebugMarkerObjectNameInfoEXT & nameInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result debugMarkerSetObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT* pTagInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<void>::type debugMarkerSetObjectTagEXT( const DebugMarkerObjectTagInfoEXT & tagInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::Buffer buffer, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::Buffer buffer, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::BufferView bufferView, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::BufferView bufferView, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::CommandPool commandPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::CommandPool commandPool, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyEvent( VULKAN_HPP_NAMESPACE::Event event, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyEvent( VULKAN_HPP_NAMESPACE::Event event, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::Event event, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::Event event, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyFence( VULKAN_HPP_NAMESPACE::Fence fence, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyFence( VULKAN_HPP_NAMESPACE::Fence fence, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::Fence fence, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::Fence fence, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyImage( VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyImage( VULKAN_HPP_NAMESPACE::Image image, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::Image image, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyImageView( VULKAN_HPP_NAMESPACE::ImageView imageView, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyImageView( VULKAN_HPP_NAMESPACE::ImageView imageView, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::ImageView imageView, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::ImageView imageView, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyIndirectCommandsLayoutNVX( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX indirectCommandsLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyIndirectCommandsLayoutNVX( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX indirectCommandsLayout, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX indirectCommandsLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX indirectCommandsLayout, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyObjectTableNVX( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyObjectTableNVX( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::Pipeline pipeline, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::Pipeline pipeline, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::QueryPool queryPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::QueryPool queryPool, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::RenderPass renderPass, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::RenderPass renderPass, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroySampler( VULKAN_HPP_NAMESPACE::Sampler sampler, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroySampler( VULKAN_HPP_NAMESPACE::Sampler sampler, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::Sampler sampler, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::Sampler sampler, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroySamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroySamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::Semaphore semaphore, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyShaderModule( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyShaderModule( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result waitIdle(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#else
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<void>::type waitIdle(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT* pDisplayPowerInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<void>::type displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, const DisplayPowerInfoEXT & displayPowerInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result flushMappedMemoryRanges( uint32_t memoryRangeCount, const VULKAN_HPP_NAMESPACE::MappedMemoryRange* pMemoryRanges, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<void>::type flushMappedMemoryRanges( ArrayProxy<const VULKAN_HPP_NAMESPACE::MappedMemoryRange> memoryRanges, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool, uint32_t commandBufferCount, const VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool, ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> commandBuffers, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void free( VULKAN_HPP_NAMESPACE::CommandPool commandPool, uint32_t commandBufferCount, const VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void free( VULKAN_HPP_NAMESPACE::CommandPool commandPool, ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> commandBuffers, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result freeDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, uint32_t descriptorSetCount, const VULKAN_HPP_NAMESPACE::DescriptorSet* pDescriptorSets, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<void>::type freeDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> descriptorSets, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result free( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, uint32_t descriptorSetCount, const VULKAN_HPP_NAMESPACE::DescriptorSet* pDescriptorSets, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<void>::type free( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> descriptorSets, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void freeMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void freeMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void free( VULKAN_HPP_NAMESPACE::DeviceMemory memory, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void free( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, size_t dataSize, void* pData, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename T, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<void>::type getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, ArrayProxy<T> data, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getAccelerationStructureMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR* pMemoryRequirements, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR getAccelerationStructureMemoryRequirementsNV( const AccelerationStructureMemoryRequirementsInfoNV & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+    template<typename X, typename Y, typename ...Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    StructureChain<X, Y, Z...> getAccelerationStructureMemoryRequirementsNV( const AccelerationStructureMemoryRequirementsInfoNV & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer* buffer, VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID>::type getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+    template<typename X, typename Y, typename ...Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<StructureChain<X, Y, Z...>>::type getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    DeviceAddress getBufferAddressEXT( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfoKHR* pInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    DeviceAddress getBufferAddressEXT( const BufferDeviceAddressInfoKHR & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    DeviceAddress getBufferAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfoKHR* pInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    DeviceAddress getBufferAddressKHR( const BufferDeviceAddressInfoKHR & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getBufferMemoryRequirements( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::MemoryRequirements* pMemoryRequirements, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NAMESPACE::MemoryRequirements getBufferMemoryRequirements( VULKAN_HPP_NAMESPACE::Buffer buffer, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2* pMemoryRequirements, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NAMESPACE::MemoryRequirements2 getBufferMemoryRequirements2( const BufferMemoryRequirementsInfo2 & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+    template<typename X, typename Y, typename ...Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    StructureChain<X, Y, Z...> getBufferMemoryRequirements2( const BufferMemoryRequirementsInfo2 & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2* pMemoryRequirements, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NAMESPACE::MemoryRequirements2 getBufferMemoryRequirements2KHR( const BufferMemoryRequirementsInfo2 & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+    template<typename X, typename Y, typename ...Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    StructureChain<X, Y, Z...> getBufferMemoryRequirements2KHR( const BufferMemoryRequirementsInfo2 & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    uint64_t getBufferOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfoKHR* pInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    uint64_t getBufferOpaqueCaptureAddressKHR( const BufferDeviceAddressInfoKHR & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result getCalibratedTimestampsEXT( uint32_t timestampCount, const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT* pTimestampInfos, uint64_t* pTimestamps, uint64_t* pMaxDeviation, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<uint64_t>::type getCalibratedTimestampsEXT( ArrayProxy<const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT> timestampInfos, ArrayProxy<uint64_t> timestamps, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo* pCreateInfo, VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport* pSupport, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport getDescriptorSetLayoutSupport( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+    template<typename X, typename Y, typename ...Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    StructureChain<X, Y, Z...> getDescriptorSetLayoutSupport( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo* pCreateInfo, VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport* pSupport, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport getDescriptorSetLayoutSupportKHR( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+    template<typename X, typename Y, typename ...Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    StructureChain<X, Y, Z...> getDescriptorSetLayoutSupportKHR( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getGroupPeerMemoryFeatures( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags* pPeerMemoryFeatures, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags getGroupPeerMemoryFeatures( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags* pPeerMemoryFeatures, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result getGroupPresentCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR* pDeviceGroupPresentCapabilities, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR>::type getGroupPresentCapabilitiesKHR(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result getGroupSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR* pModes, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR>::type getGroupSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR* pModes, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR>::type getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getMemoryCommitment( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize* pCommittedMemoryInBytes, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NAMESPACE::DeviceSize getMemoryCommitment( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    uint64_t getMemoryOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfoKHR* pInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    uint64_t getMemoryOpaqueCaptureAddressKHR( const DeviceMemoryOpaqueCaptureAddressInfoKHR & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    PFN_vkVoidFunction getProcAddr( const char* pName, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    PFN_vkVoidFunction getProcAddr( const std::string & name, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, VULKAN_HPP_NAMESPACE::Queue* pQueue, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NAMESPACE::Queue getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getQueue2( const VULKAN_HPP_NAMESPACE::DeviceQueueInfo2* pQueueInfo, VULKAN_HPP_NAMESPACE::Queue* pQueue, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NAMESPACE::Queue getQueue2( const DeviceQueueInfo2 & queueInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result getEventStatus( VULKAN_HPP_NAMESPACE::Event event, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result getFenceFdKHR( const VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR* pGetFdInfo, int* pFd, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<int>::type getFenceFdKHR( const FenceGetFdInfoKHR & getFdInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result getFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<HANDLE>::type getFenceWin32HandleKHR( const FenceGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result getImageDrmFormatModifierPropertiesEXT( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT>::type getImageDrmFormatModifierPropertiesEXT( VULKAN_HPP_NAMESPACE::Image image, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getImageMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::MemoryRequirements* pMemoryRequirements, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NAMESPACE::MemoryRequirements getImageMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2* pMemoryRequirements, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NAMESPACE::MemoryRequirements2 getImageMemoryRequirements2( const ImageMemoryRequirementsInfo2 & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+    template<typename X, typename Y, typename ...Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    StructureChain<X, Y, Z...> getImageMemoryRequirements2( const ImageMemoryRequirementsInfo2 & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2* pMemoryRequirements, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NAMESPACE::MemoryRequirements2 getImageMemoryRequirements2KHR( const ImageMemoryRequirementsInfo2 & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+    template<typename X, typename Y, typename ...Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    StructureChain<X, Y, Z...> getImageMemoryRequirements2KHR( const ImageMemoryRequirementsInfo2 & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, uint32_t* pSparseMemoryRequirementCount, VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements* pSparseMemoryRequirements, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Allocator = std::allocator<SparseImageMemoryRequirements>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    std::vector<SparseImageMemoryRequirements,Allocator> getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+    template<typename Allocator = std::allocator<SparseImageMemoryRequirements>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    std::vector<SparseImageMemoryRequirements,Allocator> getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, Allocator const& vectorAllocator, Dispatch const &d ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getImageSparseMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2* pSparseMemoryRequirements, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Allocator = std::allocator<SparseImageMemoryRequirements2>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    std::vector<SparseImageMemoryRequirements2,Allocator> getImageSparseMemoryRequirements2( const ImageSparseMemoryRequirementsInfo2 & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+    template<typename Allocator = std::allocator<SparseImageMemoryRequirements2>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    std::vector<SparseImageMemoryRequirements2,Allocator> getImageSparseMemoryRequirements2( const ImageSparseMemoryRequirementsInfo2 & info, Allocator const& vectorAllocator, Dispatch const &d ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getImageSparseMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2* pSparseMemoryRequirements, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Allocator = std::allocator<SparseImageMemoryRequirements2>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    std::vector<SparseImageMemoryRequirements2,Allocator> getImageSparseMemoryRequirements2KHR( const ImageSparseMemoryRequirementsInfo2 & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+    template<typename Allocator = std::allocator<SparseImageMemoryRequirements2>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    std::vector<SparseImageMemoryRequirements2,Allocator> getImageSparseMemoryRequirements2KHR( const ImageSparseMemoryRequirementsInfo2 & info, Allocator const& vectorAllocator, Dispatch const &d ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getImageSubresourceLayout( VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource* pSubresource, VULKAN_HPP_NAMESPACE::SubresourceLayout* pLayout, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NAMESPACE::SubresourceLayout getImageSubresourceLayout( VULKAN_HPP_NAMESPACE::Image image, const ImageSubresource & subresource, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    uint32_t getImageViewHandleNVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX* pInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    uint32_t getImageViewHandleNVX( const ImageViewHandleInfoNVX & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result getMemoryAndroidHardwareBufferANDROID( const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID* pInfo, struct AHardwareBuffer** pBuffer, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<struct AHardwareBuffer*>::type getMemoryAndroidHardwareBufferANDROID( const MemoryGetAndroidHardwareBufferInfoANDROID & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result getMemoryFdKHR( const VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR* pGetFdInfo, int* pFd, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<int>::type getMemoryFdKHR( const MemoryGetFdInfoKHR & getFdInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, int fd, VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR* pMemoryFdProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR>::type getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, int fd, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, const void* pHostPointer, VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT* pMemoryHostPointerProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT>::type getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, const void* pHostPointer, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result getMemoryWin32HandleKHR( const VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<HANDLE>::type getMemoryWin32HandleKHR( const MemoryGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType, HANDLE* pHandle, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<HANDLE>::type getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR* pMemoryWin32HandleProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR>::type getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint32_t* pPresentationTimingCount, VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE* pPresentationTimings, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Allocator = std::allocator<PastPresentationTimingGOOGLE>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<std::vector<PastPresentationTimingGOOGLE,Allocator>>::type getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+    template<typename Allocator = std::allocator<PastPresentationTimingGOOGLE>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<std::vector<PastPresentationTimingGOOGLE,Allocator>>::type getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Allocator const& vectorAllocator, Dispatch const &d ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter, VULKAN_HPP_NAMESPACE::PerformanceValueINTEL* pValue, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<VULKAN_HPP_NAMESPACE::PerformanceValueINTEL>::type getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, size_t* pDataSize, void* pData, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Allocator = std::allocator<uint8_t>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<std::vector<uint8_t,Allocator>>::type getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+    template<typename Allocator = std::allocator<uint8_t>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<std::vector<uint8_t,Allocator>>::type getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Allocator const& vectorAllocator, Dispatch const &d ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result getPipelineExecutableInternalRepresentationsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR* pExecutableInfo, uint32_t* pInternalRepresentationCount, VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR* pInternalRepresentations, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Allocator = std::allocator<PipelineExecutableInternalRepresentationKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<std::vector<PipelineExecutableInternalRepresentationKHR,Allocator>>::type getPipelineExecutableInternalRepresentationsKHR( const PipelineExecutableInfoKHR & executableInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+    template<typename Allocator = std::allocator<PipelineExecutableInternalRepresentationKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<std::vector<PipelineExecutableInternalRepresentationKHR,Allocator>>::type getPipelineExecutableInternalRepresentationsKHR( const PipelineExecutableInfoKHR & executableInfo, Allocator const& vectorAllocator, Dispatch const &d ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result getPipelineExecutablePropertiesKHR( const VULKAN_HPP_NAMESPACE::PipelineInfoKHR* pPipelineInfo, uint32_t* pExecutableCount, VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Allocator = std::allocator<PipelineExecutablePropertiesKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<std::vector<PipelineExecutablePropertiesKHR,Allocator>>::type getPipelineExecutablePropertiesKHR( const PipelineInfoKHR & pipelineInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+    template<typename Allocator = std::allocator<PipelineExecutablePropertiesKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<std::vector<PipelineExecutablePropertiesKHR,Allocator>>::type getPipelineExecutablePropertiesKHR( const PipelineInfoKHR & pipelineInfo, Allocator const& vectorAllocator, Dispatch const &d ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result getPipelineExecutableStatisticsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR* pExecutableInfo, uint32_t* pStatisticCount, VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR* pStatistics, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Allocator = std::allocator<PipelineExecutableStatisticKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<std::vector<PipelineExecutableStatisticKHR,Allocator>>::type getPipelineExecutableStatisticsKHR( const PipelineExecutableInfoKHR & executableInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+    template<typename Allocator = std::allocator<PipelineExecutableStatisticKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<std::vector<PipelineExecutableStatisticKHR,Allocator>>::type getPipelineExecutableStatisticsKHR( const PipelineExecutableInfoKHR & executableInfo, Allocator const& vectorAllocator, Dispatch const &d ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, void* pData, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename T, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, ArrayProxy<T> data, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename T, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<void>::type getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, ArrayProxy<T> data, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result getRefreshCycleDurationGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE* pDisplayTimingProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE>::type getRefreshCycleDurationGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getRenderAreaGranularity( VULKAN_HPP_NAMESPACE::RenderPass renderPass, VULKAN_HPP_NAMESPACE::Extent2D* pGranularity, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NAMESPACE::Extent2D getRenderAreaGranularity( VULKAN_HPP_NAMESPACE::RenderPass renderPass, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result getSemaphoreCounterValueKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore, uint64_t* pValue, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<uint64_t>::type getSemaphoreCounterValueKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result getSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR* pGetFdInfo, int* pFd, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<int>::type getSemaphoreFdKHR( const SemaphoreGetFdInfoKHR & getFdInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result getSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<HANDLE>::type getSemaphoreWin32HandleKHR( const SemaphoreGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, size_t* pInfoSize, void* pInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Allocator = std::allocator<uint8_t>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<std::vector<uint8_t,Allocator>>::type getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+    template<typename Allocator = std::allocator<uint8_t>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<std::vector<uint8_t,Allocator>>::type getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, Allocator const& vectorAllocator, Dispatch const &d ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result getSwapchainCounterEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter, uint64_t* pCounterValue, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<uint64_t>::type getSwapchainCounterEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint32_t* pSwapchainImageCount, VULKAN_HPP_NAMESPACE::Image* pSwapchainImages, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Allocator = std::allocator<Image>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<std::vector<Image,Allocator>>::type getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+    template<typename Allocator = std::allocator<Image>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<std::vector<Image,Allocator>>::type getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Allocator const& vectorAllocator, Dispatch const &d ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result getSwapchainStatusKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, size_t* pDataSize, void* pData, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Allocator = std::allocator<uint8_t>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<std::vector<uint8_t,Allocator>>::type getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+    template<typename Allocator = std::allocator<uint8_t>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<std::vector<uint8_t,Allocator>>::type getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Allocator const& vectorAllocator, Dispatch const &d ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result importFenceFdKHR( const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR* pImportFenceFdInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<void>::type importFenceFdKHR( const ImportFenceFdInfoKHR & importFenceFdInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result importFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR* pImportFenceWin32HandleInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<void>::type importFenceWin32HandleKHR( const ImportFenceWin32HandleInfoKHR & importFenceWin32HandleInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result importSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<void>::type importSemaphoreFdKHR( const ImportSemaphoreFdInfoKHR & importSemaphoreFdInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result importSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR* pImportSemaphoreWin32HandleInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<void>::type importSemaphoreWin32HandleKHR( const ImportSemaphoreWin32HandleInfoKHR & importSemaphoreWin32HandleInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result initializePerformanceApiINTEL( const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL* pInitializeInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<void>::type initializePerformanceApiINTEL( const InitializePerformanceApiInfoINTEL & initializeInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result invalidateMappedMemoryRanges( uint32_t memoryRangeCount, const VULKAN_HPP_NAMESPACE::MappedMemoryRange* pMemoryRanges, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<void>::type invalidateMappedMemoryRanges( ArrayProxy<const VULKAN_HPP_NAMESPACE::MappedMemoryRange> memoryRanges, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::DeviceSize size, VULKAN_HPP_NAMESPACE::MemoryMapFlags flags, void** ppData, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<void*>::type mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::DeviceSize size, VULKAN_HPP_NAMESPACE::MemoryMapFlags flags = MemoryMapFlags(), Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache dstCache, uint32_t srcCacheCount, const VULKAN_HPP_NAMESPACE::PipelineCache* pSrcCaches, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<void>::type mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache dstCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::PipelineCache> srcCaches, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result mergeValidationCachesEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache, uint32_t srcCacheCount, const VULKAN_HPP_NAMESPACE::ValidationCacheEXT* pSrcCaches, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<void>::type mergeValidationCachesEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::ValidationCacheEXT> srcCaches, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result registerEventEXT( const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT* pDeviceEventInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Fence* pFence, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<VULKAN_HPP_NAMESPACE::Fence>::type registerEventEXT( const DeviceEventInfoEXT & deviceEventInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<UniqueHandle<Fence,Dispatch>>::type registerEventEXTUnique( const DeviceEventInfoEXT & deviceEventInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT* pDisplayEventInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Fence* pFence, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<VULKAN_HPP_NAMESPACE::Fence>::type registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, const DisplayEventInfoEXT & displayEventInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<UniqueHandle<Fence,Dispatch>>::type registerDisplayEventEXTUnique( VULKAN_HPP_NAMESPACE::DisplayKHR display, const DisplayEventInfoEXT & displayEventInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result registerObjectsNVX( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable, uint32_t objectCount, const VULKAN_HPP_NAMESPACE::ObjectTableEntryNVX* const* ppObjectTableEntries, const uint32_t* pObjectIndices, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<void>::type registerObjectsNVX( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable, ArrayProxy<const VULKAN_HPP_NAMESPACE::ObjectTableEntryNVX* const> pObjectTableEntries, ArrayProxy<const uint32_t> objectIndices, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result releaseFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#else
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<void>::type releaseFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result releasePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#else
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<void>::type releasePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void releaseProfilingLockKHR(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#else
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<void>::type resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags = DescriptorPoolResetFlags(), Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#else
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<void>::type resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags = DescriptorPoolResetFlags(), Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result resetEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#else
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<void>::type resetEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result resetFences( uint32_t fenceCount, const VULKAN_HPP_NAMESPACE::Fence* pFences, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<void>::type resetFences( ArrayProxy<const VULKAN_HPP_NAMESPACE::Fence> fences, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void resetQueryPoolEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result setDebugUtilsObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT* pNameInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<void>::type setDebugUtilsObjectNameEXT( const DebugUtilsObjectNameInfoEXT & nameInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result setDebugUtilsObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT* pTagInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<void>::type setDebugUtilsObjectTagEXT( const DebugUtilsObjectTagInfoEXT & tagInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result setEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#else
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<void>::type setEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setHdrMetadataEXT( uint32_t swapchainCount, const VULKAN_HPP_NAMESPACE::SwapchainKHR* pSwapchains, const VULKAN_HPP_NAMESPACE::HdrMetadataEXT* pMetadata, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setHdrMetadataEXT( ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainKHR> swapchains, ArrayProxy<const VULKAN_HPP_NAMESPACE::HdrMetadataEXT> metadata, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void setLocalDimmingAMD( VULKAN_HPP_NAMESPACE::SwapchainKHR swapChain, VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result signalSemaphoreKHR( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfoKHR* pSignalInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<void>::type signalSemaphoreKHR( const SemaphoreSignalInfoKHR & signalInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void trimCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags = CommandPoolTrimFlags(), Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void trimCommandPoolKHR( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags = CommandPoolTrimFlags(), Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void uninitializePerformanceApiINTEL(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void unmapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result unregisterObjectsNVX( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable, uint32_t objectCount, const VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX* pObjectEntryTypes, const uint32_t* pObjectIndices, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<void>::type unregisterObjectsNVX( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable, ArrayProxy<const VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX> objectEntryTypes, ArrayProxy<const uint32_t> objectIndices, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void updateDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void updateDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void updateDescriptorSets( uint32_t descriptorWriteCount, const VULKAN_HPP_NAMESPACE::WriteDescriptorSet* pDescriptorWrites, uint32_t descriptorCopyCount, const VULKAN_HPP_NAMESPACE::CopyDescriptorSet* pDescriptorCopies, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void updateDescriptorSets( ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteDescriptorSet> descriptorWrites, ArrayProxy<const VULKAN_HPP_NAMESPACE::CopyDescriptorSet> descriptorCopies, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result waitForFences( uint32_t fenceCount, const VULKAN_HPP_NAMESPACE::Fence* pFences, VULKAN_HPP_NAMESPACE::Bool32 waitAll, uint64_t timeout, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result waitForFences( ArrayProxy<const VULKAN_HPP_NAMESPACE::Fence> fences, VULKAN_HPP_NAMESPACE::Bool32 waitAll, uint64_t timeout, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result waitSemaphoresKHR( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfoKHR* pWaitInfo, uint64_t timeout, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result waitSemaphoresKHR( const SemaphoreWaitInfoKHR & waitInfo, uint64_t timeout, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDevice() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_device;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_device != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_device == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkDevice m_device;
+  };
+  static_assert( sizeof( Device ) == sizeof( VkDevice ), "handle and wrapper have different size!" );
+
+  template <>
+  struct cpp_type<ObjectType::eDevice>
+  {
+    using type = Device;
+  };
+
+  class DisplayModeKHR
+  {
+  public:
+    using CType = VkDisplayModeKHR;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eDisplayModeKHR;
+
+  public:
+    VULKAN_HPP_CONSTEXPR DisplayModeKHR() VULKAN_HPP_NOEXCEPT
+      : m_displayModeKHR(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_CONSTEXPR DisplayModeKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+      : m_displayModeKHR(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT DisplayModeKHR( VkDisplayModeKHR displayModeKHR ) VULKAN_HPP_NOEXCEPT
+      : m_displayModeKHR( displayModeKHR )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    DisplayModeKHR & operator=(VkDisplayModeKHR displayModeKHR) VULKAN_HPP_NOEXCEPT
+    {
+      m_displayModeKHR = displayModeKHR;
+      return *this;
+    }
+#endif
+
+    DisplayModeKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_displayModeKHR = VK_NULL_HANDLE;
+      return *this;
+    }
+
+    bool operator==( DisplayModeKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_displayModeKHR == rhs.m_displayModeKHR;
+    }
+
+    bool operator!=(DisplayModeKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_displayModeKHR != rhs.m_displayModeKHR;
+    }
+
+    bool operator<(DisplayModeKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_displayModeKHR < rhs.m_displayModeKHR;
+    }
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDisplayModeKHR() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_displayModeKHR;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_displayModeKHR != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_displayModeKHR == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkDisplayModeKHR m_displayModeKHR;
+  };
+  static_assert( sizeof( DisplayModeKHR ) == sizeof( VkDisplayModeKHR ), "handle and wrapper have different size!" );
+
+  template <>
+  struct cpp_type<ObjectType::eDisplayModeKHR>
+  {
+    using type = DisplayModeKHR;
+  };
+
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch> class UniqueHandleTraits<Device, Dispatch> { public: using deleter = ObjectDestroy<NoParent, Dispatch>; };
+  using UniqueDevice = UniqueHandle<Device, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+
+  class PhysicalDevice
+  {
+  public:
+    using CType = VkPhysicalDevice;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::ePhysicalDevice;
+
+  public:
+    VULKAN_HPP_CONSTEXPR PhysicalDevice() VULKAN_HPP_NOEXCEPT
+      : m_physicalDevice(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_CONSTEXPR PhysicalDevice( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+      : m_physicalDevice(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT PhysicalDevice( VkPhysicalDevice physicalDevice ) VULKAN_HPP_NOEXCEPT
+      : m_physicalDevice( physicalDevice )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    PhysicalDevice & operator=(VkPhysicalDevice physicalDevice) VULKAN_HPP_NOEXCEPT
+    {
+      m_physicalDevice = physicalDevice;
+      return *this;
+    }
+#endif
+
+    PhysicalDevice & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_physicalDevice = VK_NULL_HANDLE;
+      return *this;
+    }
+
+    bool operator==( PhysicalDevice const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_physicalDevice == rhs.m_physicalDevice;
+    }
+
+    bool operator!=(PhysicalDevice const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_physicalDevice != rhs.m_physicalDevice;
+    }
+
+    bool operator<(PhysicalDevice const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_physicalDevice < rhs.m_physicalDevice;
+    }
+
+#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result acquireXlibDisplayEXT( Display* dpy, VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<Display>::type acquireXlibDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result createDevice( const VULKAN_HPP_NAMESPACE::DeviceCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Device* pDevice, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<VULKAN_HPP_NAMESPACE::Device>::type createDevice( const DeviceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<UniqueHandle<Device,Dispatch>>::type createDeviceUnique( const DeviceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DisplayModeKHR* pMode, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<VULKAN_HPP_NAMESPACE::DisplayModeKHR>::type createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, const DisplayModeCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result enumerateDeviceExtensionProperties( const char* pLayerName, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::ExtensionProperties* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Allocator = std::allocator<ExtensionProperties>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<std::vector<ExtensionProperties,Allocator>>::type enumerateDeviceExtensionProperties( Optional<const std::string> layerName = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+    template<typename Allocator = std::allocator<ExtensionProperties>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<std::vector<ExtensionProperties,Allocator>>::type enumerateDeviceExtensionProperties( Optional<const std::string> layerName, Allocator const& vectorAllocator, Dispatch const &d ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result enumerateDeviceLayerProperties( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::LayerProperties* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Allocator = std::allocator<LayerProperties>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<std::vector<LayerProperties,Allocator>>::type enumerateDeviceLayerProperties(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+    template<typename Allocator = std::allocator<LayerProperties>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<std::vector<LayerProperties,Allocator>>::type enumerateDeviceLayerProperties(Allocator const& vectorAllocator, Dispatch const &d ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, uint32_t* pCounterCount, VULKAN_HPP_NAMESPACE::PerformanceCounterKHR* pCounters, VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR* pCounterDescriptions, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Allocator = std::allocator<PerformanceCounterDescriptionKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<std::vector<PerformanceCounterDescriptionKHR,Allocator>>::type enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, ArrayProxy<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR> counters, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+    template<typename Allocator = std::allocator<PerformanceCounterDescriptionKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<std::vector<PerformanceCounterDescriptionKHR,Allocator>>::type enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, ArrayProxy<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR> counters, Allocator const& vectorAllocator, Dispatch const &d ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Allocator = std::allocator<DisplayModeProperties2KHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<std::vector<DisplayModeProperties2KHR,Allocator>>::type getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+    template<typename Allocator = std::allocator<DisplayModeProperties2KHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<std::vector<DisplayModeProperties2KHR,Allocator>>::type getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Allocator const& vectorAllocator, Dispatch const &d ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Allocator = std::allocator<DisplayModePropertiesKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<std::vector<DisplayModePropertiesKHR,Allocator>>::type getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+    template<typename Allocator = std::allocator<DisplayModePropertiesKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<std::vector<DisplayModePropertiesKHR,Allocator>>::type getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Allocator const& vectorAllocator, Dispatch const &d ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result getDisplayPlaneCapabilities2KHR( const VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR* pDisplayPlaneInfo, VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR* pCapabilities, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR>::type getDisplayPlaneCapabilities2KHR( const DisplayPlaneInfo2KHR & displayPlaneInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result getDisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode, uint32_t planeIndex, VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR* pCapabilities, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR>::type getDisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode, uint32_t planeIndex, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, uint32_t* pDisplayCount, VULKAN_HPP_NAMESPACE::DisplayKHR* pDisplays, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Allocator = std::allocator<DisplayKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<std::vector<DisplayKHR,Allocator>>::type getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+    template<typename Allocator = std::allocator<DisplayKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<std::vector<DisplayKHR,Allocator>>::type getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, Allocator const& vectorAllocator, Dispatch const &d ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result getCalibrateableTimeDomainsEXT( uint32_t* pTimeDomainCount, VULKAN_HPP_NAMESPACE::TimeDomainEXT* pTimeDomains, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Allocator = std::allocator<TimeDomainEXT>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<std::vector<TimeDomainEXT,Allocator>>::type getCalibrateableTimeDomainsEXT(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+    template<typename Allocator = std::allocator<TimeDomainEXT>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<std::vector<TimeDomainEXT,Allocator>>::type getCalibrateableTimeDomainsEXT(Allocator const& vectorAllocator, Dispatch const &d ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result getCooperativeMatrixPropertiesNV( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Allocator = std::allocator<CooperativeMatrixPropertiesNV>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<std::vector<CooperativeMatrixPropertiesNV,Allocator>>::type getCooperativeMatrixPropertiesNV(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+    template<typename Allocator = std::allocator<CooperativeMatrixPropertiesNV>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<std::vector<CooperativeMatrixPropertiesNV,Allocator>>::type getCooperativeMatrixPropertiesNV(Allocator const& vectorAllocator, Dispatch const &d ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result getDisplayPlaneProperties2KHR( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Allocator = std::allocator<DisplayPlaneProperties2KHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<std::vector<DisplayPlaneProperties2KHR,Allocator>>::type getDisplayPlaneProperties2KHR(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+    template<typename Allocator = std::allocator<DisplayPlaneProperties2KHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<std::vector<DisplayPlaneProperties2KHR,Allocator>>::type getDisplayPlaneProperties2KHR(Allocator const& vectorAllocator, Dispatch const &d ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result getDisplayPlanePropertiesKHR( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Allocator = std::allocator<DisplayPlanePropertiesKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<std::vector<DisplayPlanePropertiesKHR,Allocator>>::type getDisplayPlanePropertiesKHR(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+    template<typename Allocator = std::allocator<DisplayPlanePropertiesKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<std::vector<DisplayPlanePropertiesKHR,Allocator>>::type getDisplayPlanePropertiesKHR(Allocator const& vectorAllocator, Dispatch const &d ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result getDisplayProperties2KHR( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayProperties2KHR* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Allocator = std::allocator<DisplayProperties2KHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<std::vector<DisplayProperties2KHR,Allocator>>::type getDisplayProperties2KHR(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+    template<typename Allocator = std::allocator<DisplayProperties2KHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<std::vector<DisplayProperties2KHR,Allocator>>::type getDisplayProperties2KHR(Allocator const& vectorAllocator, Dispatch const &d ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result getDisplayPropertiesKHR( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Allocator = std::allocator<DisplayPropertiesKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<std::vector<DisplayPropertiesKHR,Allocator>>::type getDisplayPropertiesKHR(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+    template<typename Allocator = std::allocator<DisplayPropertiesKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<std::vector<DisplayPropertiesKHR,Allocator>>::type getDisplayPropertiesKHR(Allocator const& vectorAllocator, Dispatch const &d ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getExternalBufferProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo* pExternalBufferInfo, VULKAN_HPP_NAMESPACE::ExternalBufferProperties* pExternalBufferProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NAMESPACE::ExternalBufferProperties getExternalBufferProperties( const PhysicalDeviceExternalBufferInfo & externalBufferInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getExternalBufferPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo* pExternalBufferInfo, VULKAN_HPP_NAMESPACE::ExternalBufferProperties* pExternalBufferProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NAMESPACE::ExternalBufferProperties getExternalBufferPropertiesKHR( const PhysicalDeviceExternalBufferInfo & externalBufferInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getExternalFenceProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo* pExternalFenceInfo, VULKAN_HPP_NAMESPACE::ExternalFenceProperties* pExternalFenceProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NAMESPACE::ExternalFenceProperties getExternalFenceProperties( const PhysicalDeviceExternalFenceInfo & externalFenceInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getExternalFencePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo* pExternalFenceInfo, VULKAN_HPP_NAMESPACE::ExternalFenceProperties* pExternalFenceProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NAMESPACE::ExternalFenceProperties getExternalFencePropertiesKHR( const PhysicalDeviceExternalFenceInfo & externalFenceInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result getExternalImageFormatPropertiesNV( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::ImageTiling tiling, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType, VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV* pExternalImageFormatProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV>::type getExternalImageFormatPropertiesNV( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::ImageTiling tiling, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getExternalSemaphoreProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties* pExternalSemaphoreProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties getExternalSemaphoreProperties( const PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getExternalSemaphorePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties* pExternalSemaphoreProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties getExternalSemaphorePropertiesKHR( const PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getFeatures( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures* pFeatures, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures getFeatures(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getFeatures2( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2* pFeatures, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 getFeatures2(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+    template<typename X, typename Y, typename ...Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    StructureChain<X, Y, Z...> getFeatures2(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getFeatures2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2* pFeatures, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 getFeatures2KHR(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+    template<typename X, typename Y, typename ...Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    StructureChain<X, Y, Z...> getFeatures2KHR(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::FormatProperties* pFormatProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NAMESPACE::FormatProperties getFormatProperties( VULKAN_HPP_NAMESPACE::Format format, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::FormatProperties2* pFormatProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NAMESPACE::FormatProperties2 getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+    template<typename X, typename Y, typename ...Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    StructureChain<X, Y, Z...> getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::FormatProperties2* pFormatProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NAMESPACE::FormatProperties2 getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+    template<typename X, typename Y, typename ...Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    StructureChain<X, Y, Z...> getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getGeneratedCommandsPropertiesNVX( VULKAN_HPP_NAMESPACE::DeviceGeneratedCommandsFeaturesNVX* pFeatures, VULKAN_HPP_NAMESPACE::DeviceGeneratedCommandsLimitsNVX* pLimits, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NAMESPACE::DeviceGeneratedCommandsLimitsNVX getGeneratedCommandsPropertiesNVX( DeviceGeneratedCommandsFeaturesNVX & features, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::ImageTiling tiling, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, VULKAN_HPP_NAMESPACE::ImageFormatProperties* pImageFormatProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<VULKAN_HPP_NAMESPACE::ImageFormatProperties>::type getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::ImageTiling tiling, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2* pImageFormatInfo, VULKAN_HPP_NAMESPACE::ImageFormatProperties2* pImageFormatProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>::type getImageFormatProperties2( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+    template<typename X, typename Y, typename ...Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<StructureChain<X, Y, Z...>>::type getImageFormatProperties2( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result getImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2* pImageFormatInfo, VULKAN_HPP_NAMESPACE::ImageFormatProperties2* pImageFormatProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>::type getImageFormatProperties2KHR( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+    template<typename X, typename Y, typename ...Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<StructureChain<X, Y, Z...>>::type getImageFormatProperties2KHR( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getMemoryProperties( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties* pMemoryProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties getMemoryProperties(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getMemoryProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2* pMemoryProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 getMemoryProperties2(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+    template<typename X, typename Y, typename ...Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    StructureChain<X, Y, Z...> getMemoryProperties2(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getMemoryProperties2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2* pMemoryProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 getMemoryProperties2KHR(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+    template<typename X, typename Y, typename ...Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    StructureChain<X, Y, Z...> getMemoryProperties2KHR(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getMultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT* pMultisampleProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT getMultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, uint32_t* pRectCount, VULKAN_HPP_NAMESPACE::Rect2D* pRects, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Allocator = std::allocator<Rect2D>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<std::vector<Rect2D,Allocator>>::type getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+    template<typename Allocator = std::allocator<Rect2D>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<std::vector<Rect2D,Allocator>>::type getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Allocator const& vectorAllocator, Dispatch const &d ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getProperties( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties getProperties(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 getProperties2(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+    template<typename X, typename Y, typename ...Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    StructureChain<X, Y, Z...> getProperties2(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getProperties2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 getProperties2KHR(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+    template<typename X, typename Y, typename ...Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    StructureChain<X, Y, Z...> getProperties2KHR(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getQueueFamilyPerformanceQueryPassesKHR( const VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR* pPerformanceQueryCreateInfo, uint32_t* pNumPasses, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    uint32_t getQueueFamilyPerformanceQueryPassesKHR( const QueryPoolPerformanceCreateInfoKHR & performanceQueryCreateInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getQueueFamilyProperties( uint32_t* pQueueFamilyPropertyCount, VULKAN_HPP_NAMESPACE::QueueFamilyProperties* pQueueFamilyProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Allocator = std::allocator<QueueFamilyProperties>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    std::vector<QueueFamilyProperties,Allocator> getQueueFamilyProperties(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+    template<typename Allocator = std::allocator<QueueFamilyProperties>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    std::vector<QueueFamilyProperties,Allocator> getQueueFamilyProperties(Allocator const& vectorAllocator, Dispatch const &d ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getQueueFamilyProperties2( uint32_t* pQueueFamilyPropertyCount, VULKAN_HPP_NAMESPACE::QueueFamilyProperties2* pQueueFamilyProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Allocator = std::allocator<QueueFamilyProperties2>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    std::vector<QueueFamilyProperties2,Allocator> getQueueFamilyProperties2(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+    template<typename Allocator = std::allocator<QueueFamilyProperties2>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    std::vector<QueueFamilyProperties2,Allocator> getQueueFamilyProperties2(Allocator const& vectorAllocator, Dispatch const &d ) const;
+    template<typename StructureChain, typename Allocator = std::allocator<StructureChain>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    std::vector<StructureChain,Allocator> getQueueFamilyProperties2(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+    template<typename StructureChain, typename Allocator = std::allocator<StructureChain>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    std::vector<StructureChain,Allocator> getQueueFamilyProperties2(Allocator const& vectorAllocator, Dispatch const &d ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getQueueFamilyProperties2KHR( uint32_t* pQueueFamilyPropertyCount, VULKAN_HPP_NAMESPACE::QueueFamilyProperties2* pQueueFamilyProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Allocator = std::allocator<QueueFamilyProperties2>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    std::vector<QueueFamilyProperties2,Allocator> getQueueFamilyProperties2KHR(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+    template<typename Allocator = std::allocator<QueueFamilyProperties2>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    std::vector<QueueFamilyProperties2,Allocator> getQueueFamilyProperties2KHR(Allocator const& vectorAllocator, Dispatch const &d ) const;
+    template<typename StructureChain, typename Allocator = std::allocator<StructureChain>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    std::vector<StructureChain,Allocator> getQueueFamilyProperties2KHR(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+    template<typename StructureChain, typename Allocator = std::allocator<StructureChain>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    std::vector<StructureChain,Allocator> getQueueFamilyProperties2KHR(Allocator const& vectorAllocator, Dispatch const &d ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageTiling tiling, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::SparseImageFormatProperties* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Allocator = std::allocator<SparseImageFormatProperties>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    std::vector<SparseImageFormatProperties,Allocator> getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageTiling tiling, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+    template<typename Allocator = std::allocator<SparseImageFormatProperties>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    std::vector<SparseImageFormatProperties,Allocator> getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageTiling tiling, Allocator const& vectorAllocator, Dispatch const &d ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getSparseImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Allocator = std::allocator<SparseImageFormatProperties2>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    std::vector<SparseImageFormatProperties2,Allocator> getSparseImageFormatProperties2( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+    template<typename Allocator = std::allocator<SparseImageFormatProperties2>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    std::vector<SparseImageFormatProperties2,Allocator> getSparseImageFormatProperties2( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Allocator const& vectorAllocator, Dispatch const &d ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void getSparseImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Allocator = std::allocator<SparseImageFormatProperties2>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    std::vector<SparseImageFormatProperties2,Allocator> getSparseImageFormatProperties2KHR( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+    template<typename Allocator = std::allocator<SparseImageFormatProperties2>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    std::vector<SparseImageFormatProperties2,Allocator> getSparseImageFormatProperties2KHR( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Allocator const& vectorAllocator, Dispatch const &d ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result getSupportedFramebufferMixedSamplesCombinationsNV( uint32_t* pCombinationCount, VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV* pCombinations, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Allocator = std::allocator<FramebufferMixedSamplesCombinationNV>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<std::vector<FramebufferMixedSamplesCombinationNV,Allocator>>::type getSupportedFramebufferMixedSamplesCombinationsNV(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+    template<typename Allocator = std::allocator<FramebufferMixedSamplesCombinationNV>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<std::vector<FramebufferMixedSamplesCombinationNV,Allocator>>::type getSupportedFramebufferMixedSamplesCombinationsNV(Allocator const& vectorAllocator, Dispatch const &d ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT* pSurfaceCapabilities, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT>::type getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR* pSurfaceCapabilities, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR>::type getSurfaceCapabilities2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+    template<typename X, typename Y, typename ...Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<StructureChain<X, Y, Z...>>::type getSurfaceCapabilities2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR* pSurfaceCapabilities, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR>::type getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pSurfaceFormatCount, VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR* pSurfaceFormats, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Allocator = std::allocator<SurfaceFormat2KHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<std::vector<SurfaceFormat2KHR,Allocator>>::type getSurfaceFormats2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+    template<typename Allocator = std::allocator<SurfaceFormat2KHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<std::vector<SurfaceFormat2KHR,Allocator>>::type getSurfaceFormats2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Allocator const& vectorAllocator, Dispatch const &d ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, uint32_t* pSurfaceFormatCount, VULKAN_HPP_NAMESPACE::SurfaceFormatKHR* pSurfaceFormats, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Allocator = std::allocator<SurfaceFormatKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<std::vector<SurfaceFormatKHR,Allocator>>::type getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+    template<typename Allocator = std::allocator<SurfaceFormatKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<std::vector<SurfaceFormatKHR,Allocator>>::type getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Allocator const& vectorAllocator, Dispatch const &d ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result getSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pPresentModeCount, VULKAN_HPP_NAMESPACE::PresentModeKHR* pPresentModes, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Allocator = std::allocator<PresentModeKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<std::vector<PresentModeKHR,Allocator>>::type getSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+    template<typename Allocator = std::allocator<PresentModeKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<std::vector<PresentModeKHR,Allocator>>::type getSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Allocator const& vectorAllocator, Dispatch const &d ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, uint32_t* pPresentModeCount, VULKAN_HPP_NAMESPACE::PresentModeKHR* pPresentModes, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Allocator = std::allocator<PresentModeKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<std::vector<PresentModeKHR,Allocator>>::type getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+    template<typename Allocator = std::allocator<PresentModeKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<std::vector<PresentModeKHR,Allocator>>::type getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Allocator const& vectorAllocator, Dispatch const &d ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result getSurfaceSupportKHR( uint32_t queueFamilyIndex, VULKAN_HPP_NAMESPACE::SurfaceKHR surface, VULKAN_HPP_NAMESPACE::Bool32* pSupported, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<VULKAN_HPP_NAMESPACE::Bool32>::type getSurfaceSupportKHR( uint32_t queueFamilyIndex, VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result getToolPropertiesEXT( uint32_t* pToolCount, VULKAN_HPP_NAMESPACE::PhysicalDeviceToolPropertiesEXT* pToolProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Allocator = std::allocator<PhysicalDeviceToolPropertiesEXT>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<std::vector<PhysicalDeviceToolPropertiesEXT,Allocator>>::type getToolPropertiesEXT(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+    template<typename Allocator = std::allocator<PhysicalDeviceToolPropertiesEXT>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<std::vector<PhysicalDeviceToolPropertiesEXT,Allocator>>::type getToolPropertiesEXT(Allocator const& vectorAllocator, Dispatch const &d ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Bool32 getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, struct wl_display* display, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Bool32 getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, struct wl_display & display, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Bool32 getWin32PresentationSupportKHR( uint32_t queueFamilyIndex, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#ifdef VK_USE_PLATFORM_XCB_KHR
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Bool32 getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, xcb_connection_t* connection, xcb_visualid_t visual_id, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Bool32 getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, xcb_connection_t & connection, xcb_visualid_t visual_id, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_XCB_KHR*/
+
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Bool32 getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display* dpy, VisualID visualID, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Bool32 getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display & dpy, VisualID visualID, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_XLIB_KHR*/
+
+#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result getRandROutputDisplayEXT( Display* dpy, RROutput rrOutput, VULKAN_HPP_NAMESPACE::DisplayKHR* pDisplay, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<VULKAN_HPP_NAMESPACE::DisplayKHR>::type getRandROutputDisplayEXT( Display & dpy, RROutput rrOutput, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#else
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<void>::type releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPhysicalDevice() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_physicalDevice;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_physicalDevice != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_physicalDevice == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkPhysicalDevice m_physicalDevice;
+  };
+  static_assert( sizeof( PhysicalDevice ) == sizeof( VkPhysicalDevice ), "handle and wrapper have different size!" );
+
+  template <>
+  struct cpp_type<ObjectType::ePhysicalDevice>
+  {
+    using type = PhysicalDevice;
+  };
+
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  class Instance;
+  template <typename Dispatch> class UniqueHandleTraits<DebugReportCallbackEXT, Dispatch> { public: using deleter = ObjectDestroy<Instance, Dispatch>; };
+  using UniqueDebugReportCallbackEXT = UniqueHandle<DebugReportCallbackEXT, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+  template <typename Dispatch> class UniqueHandleTraits<DebugUtilsMessengerEXT, Dispatch> { public: using deleter = ObjectDestroy<Instance, Dispatch>; };
+  using UniqueDebugUtilsMessengerEXT = UniqueHandle<DebugUtilsMessengerEXT, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+  template <typename Dispatch> class UniqueHandleTraits<SurfaceKHR, Dispatch> { public: using deleter = ObjectDestroy<Instance, Dispatch>; };
+  using UniqueSurfaceKHR = UniqueHandle<SurfaceKHR, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+
+  class Instance
+  {
+  public:
+    using CType = VkInstance;
+
+    static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eInstance;
+
+  public:
+    VULKAN_HPP_CONSTEXPR Instance() VULKAN_HPP_NOEXCEPT
+      : m_instance(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_CONSTEXPR Instance( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+      : m_instance(VK_NULL_HANDLE)
+    {}
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT Instance( VkInstance instance ) VULKAN_HPP_NOEXCEPT
+      : m_instance( instance )
+    {}
+
+#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
+    Instance & operator=(VkInstance instance) VULKAN_HPP_NOEXCEPT
+    {
+      m_instance = instance;
+      return *this;
+    }
+#endif
+
+    Instance & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_instance = VK_NULL_HANDLE;
+      return *this;
+    }
+
+    bool operator==( Instance const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_instance == rhs.m_instance;
+    }
+
+    bool operator!=(Instance const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_instance != rhs.m_instance;
+    }
+
+    bool operator<(Instance const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_instance < rhs.m_instance;
+    }
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result createAndroidSurfaceKHR( const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createAndroidSurfaceKHR( const AndroidSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type createAndroidSurfaceKHRUnique( const AndroidSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result createDebugReportCallbackEXT( const VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT* pCallback, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT>::type createDebugReportCallbackEXT( const DebugReportCallbackCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<UniqueHandle<DebugReportCallbackEXT,Dispatch>>::type createDebugReportCallbackEXTUnique( const DebugReportCallbackCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result createDebugUtilsMessengerEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT* pMessenger, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT>::type createDebugUtilsMessengerEXT( const DebugUtilsMessengerCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<UniqueHandle<DebugUtilsMessengerEXT,Dispatch>>::type createDebugUtilsMessengerEXTUnique( const DebugUtilsMessengerCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result createDisplayPlaneSurfaceKHR( const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createDisplayPlaneSurfaceKHR( const DisplaySurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type createDisplayPlaneSurfaceKHRUnique( const DisplaySurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result createHeadlessSurfaceEXT( const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createHeadlessSurfaceEXT( const HeadlessSurfaceCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type createHeadlessSurfaceEXTUnique( const HeadlessSurfaceCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VK_USE_PLATFORM_IOS_MVK
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result createIOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createIOSSurfaceMVK( const IOSSurfaceCreateInfoMVK & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type createIOSSurfaceMVKUnique( const IOSSurfaceCreateInfoMVK & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_IOS_MVK*/
+
+#ifdef VK_USE_PLATFORM_FUCHSIA
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result createImagePipeSurfaceFUCHSIA( const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createImagePipeSurfaceFUCHSIA( const ImagePipeSurfaceCreateInfoFUCHSIA & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type createImagePipeSurfaceFUCHSIAUnique( const ImagePipeSurfaceCreateInfoFUCHSIA & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+#ifdef VK_USE_PLATFORM_MACOS_MVK
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result createMacOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createMacOSSurfaceMVK( const MacOSSurfaceCreateInfoMVK & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type createMacOSSurfaceMVKUnique( const MacOSSurfaceCreateInfoMVK & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_MACOS_MVK*/
+
+#ifdef VK_USE_PLATFORM_METAL_EXT
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result createMetalSurfaceEXT( const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createMetalSurfaceEXT( const MetalSurfaceCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type createMetalSurfaceEXTUnique( const MetalSurfaceCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+
+#ifdef VK_USE_PLATFORM_GGP
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result createStreamDescriptorSurfaceGGP( const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createStreamDescriptorSurfaceGGP( const StreamDescriptorSurfaceCreateInfoGGP & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type createStreamDescriptorSurfaceGGPUnique( const StreamDescriptorSurfaceCreateInfoGGP & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_GGP*/
+
+#ifdef VK_USE_PLATFORM_VI_NN
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result createViSurfaceNN( const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createViSurfaceNN( const ViSurfaceCreateInfoNN & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type createViSurfaceNNUnique( const ViSurfaceCreateInfoNN & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_VI_NN*/
+
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result createWaylandSurfaceKHR( const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createWaylandSurfaceKHR( const WaylandSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type createWaylandSurfaceKHRUnique( const WaylandSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result createWin32SurfaceKHR( const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createWin32SurfaceKHR( const Win32SurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type createWin32SurfaceKHRUnique( const Win32SurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#ifdef VK_USE_PLATFORM_XCB_KHR
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result createXcbSurfaceKHR( const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createXcbSurfaceKHR( const XcbSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type createXcbSurfaceKHRUnique( const XcbSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_XCB_KHR*/
+
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result createXlibSurfaceKHR( const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createXlibSurfaceKHR( const XlibSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type createXlibSurfaceKHRUnique( const XlibSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_XLIB_KHR*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const char* pLayerPrefix, const char* pMessage, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const std::string & layerPrefix, const std::string & message, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroyDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroySurfaceKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroySurfaceKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void destroy( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result enumeratePhysicalDeviceGroups( uint32_t* pPhysicalDeviceGroupCount, VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Allocator = std::allocator<PhysicalDeviceGroupProperties>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<std::vector<PhysicalDeviceGroupProperties,Allocator>>::type enumeratePhysicalDeviceGroups(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+    template<typename Allocator = std::allocator<PhysicalDeviceGroupProperties>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<std::vector<PhysicalDeviceGroupProperties,Allocator>>::type enumeratePhysicalDeviceGroups(Allocator const& vectorAllocator, Dispatch const &d ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result enumeratePhysicalDeviceGroupsKHR( uint32_t* pPhysicalDeviceGroupCount, VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Allocator = std::allocator<PhysicalDeviceGroupProperties>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<std::vector<PhysicalDeviceGroupProperties,Allocator>>::type enumeratePhysicalDeviceGroupsKHR(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+    template<typename Allocator = std::allocator<PhysicalDeviceGroupProperties>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<std::vector<PhysicalDeviceGroupProperties,Allocator>>::type enumeratePhysicalDeviceGroupsKHR(Allocator const& vectorAllocator, Dispatch const &d ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    Result enumeratePhysicalDevices( uint32_t* pPhysicalDeviceCount, VULKAN_HPP_NAMESPACE::PhysicalDevice* pPhysicalDevices, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Allocator = std::allocator<PhysicalDevice>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<std::vector<PhysicalDevice,Allocator>>::type enumeratePhysicalDevices(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+    template<typename Allocator = std::allocator<PhysicalDevice>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    typename ResultValueType<std::vector<PhysicalDevice,Allocator>>::type enumeratePhysicalDevices(Allocator const& vectorAllocator, Dispatch const &d ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    PFN_vkVoidFunction getProcAddr( const char* pName, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    PFN_vkVoidFunction getProcAddr( const std::string & name, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes, const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT* pCallbackData, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+    template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    void submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes, const DebugUtilsMessengerCallbackDataEXT & callbackData, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkInstance() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_instance;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_instance != VK_NULL_HANDLE;
+    }
+
+    bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_instance == VK_NULL_HANDLE;
+    }
+
+  private:
+    VkInstance m_instance;
+  };
+  static_assert( sizeof( Instance ) == sizeof( VkInstance ), "handle and wrapper have different size!" );
+
+  template <>
+  struct cpp_type<ObjectType::eInstance>
+  {
+    using type = Instance;
+  };
+
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch> class UniqueHandleTraits<Instance, Dispatch> { public: using deleter = ObjectDestroy<NoParent, Dispatch>; };
+  using UniqueInstance = UniqueHandle<Instance, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+
+  template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+  Result createInstance( const VULKAN_HPP_NAMESPACE::InstanceCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Instance* pInstance, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER );
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+  ResultValueType<VULKAN_HPP_NAMESPACE::Instance>::type createInstance( const InstanceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER );
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+  typename ResultValueType<UniqueHandle<Instance,Dispatch>>::type createInstanceUnique( const InstanceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER );
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+  Result enumerateInstanceExtensionProperties( const char* pLayerName, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::ExtensionProperties* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER );
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Allocator = std::allocator<ExtensionProperties>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+  typename ResultValueType<std::vector<ExtensionProperties,Allocator>>::type enumerateInstanceExtensionProperties( Optional<const std::string> layerName = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER );
+  template<typename Allocator = std::allocator<ExtensionProperties>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+  typename ResultValueType<std::vector<ExtensionProperties,Allocator>>::type enumerateInstanceExtensionProperties( Optional<const std::string> layerName, Allocator const& vectorAllocator, Dispatch const &d );
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+  Result enumerateInstanceLayerProperties( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::LayerProperties* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER );
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Allocator = std::allocator<LayerProperties>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+  typename ResultValueType<std::vector<LayerProperties,Allocator>>::type enumerateInstanceLayerProperties(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER );
+  template<typename Allocator = std::allocator<LayerProperties>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+  typename ResultValueType<std::vector<LayerProperties,Allocator>>::type enumerateInstanceLayerProperties(Allocator const& vectorAllocator, Dispatch const &d );
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+  Result enumerateInstanceVersion( uint32_t* pApiVersion, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER );
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+  ResultValueType<uint32_t>::type enumerateInstanceVersion(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER );
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  struct GeometryTrianglesNV
+  {
+    VULKAN_HPP_CONSTEXPR GeometryTrianglesNV( VULKAN_HPP_NAMESPACE::Buffer vertexData_ = VULKAN_HPP_NAMESPACE::Buffer(),
+                                              VULKAN_HPP_NAMESPACE::DeviceSize vertexOffset_ = 0,
+                                              uint32_t vertexCount_ = 0,
+                                              VULKAN_HPP_NAMESPACE::DeviceSize vertexStride_ = 0,
+                                              VULKAN_HPP_NAMESPACE::Format vertexFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
+                                              VULKAN_HPP_NAMESPACE::Buffer indexData_ = VULKAN_HPP_NAMESPACE::Buffer(),
+                                              VULKAN_HPP_NAMESPACE::DeviceSize indexOffset_ = 0,
+                                              uint32_t indexCount_ = 0,
+                                              VULKAN_HPP_NAMESPACE::IndexType indexType_ = VULKAN_HPP_NAMESPACE::IndexType::eUint16,
+                                              VULKAN_HPP_NAMESPACE::Buffer transformData_ = VULKAN_HPP_NAMESPACE::Buffer(),
+                                              VULKAN_HPP_NAMESPACE::DeviceSize transformOffset_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : vertexData( vertexData_ )
+      , vertexOffset( vertexOffset_ )
+      , vertexCount( vertexCount_ )
+      , vertexStride( vertexStride_ )
+      , vertexFormat( vertexFormat_ )
+      , indexData( indexData_ )
+      , indexOffset( indexOffset_ )
+      , indexCount( indexCount_ )
+      , indexType( indexType_ )
+      , transformData( transformData_ )
+      , transformOffset( transformOffset_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::GeometryTrianglesNV & operator=( VULKAN_HPP_NAMESPACE::GeometryTrianglesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::GeometryTrianglesNV ) - offsetof( GeometryTrianglesNV, pNext ) );
+      return *this;
+    }
+
+    GeometryTrianglesNV( VkGeometryTrianglesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    GeometryTrianglesNV& operator=( VkGeometryTrianglesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::GeometryTrianglesNV const *>(&rhs);
+      return *this;
+    }
+
+    GeometryTrianglesNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    GeometryTrianglesNV & setVertexData( VULKAN_HPP_NAMESPACE::Buffer vertexData_ ) VULKAN_HPP_NOEXCEPT
+    {
+      vertexData = vertexData_;
+      return *this;
+    }
+
+    GeometryTrianglesNV & setVertexOffset( VULKAN_HPP_NAMESPACE::DeviceSize vertexOffset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      vertexOffset = vertexOffset_;
+      return *this;
+    }
+
+    GeometryTrianglesNV & setVertexCount( uint32_t vertexCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      vertexCount = vertexCount_;
+      return *this;
+    }
+
+    GeometryTrianglesNV & setVertexStride( VULKAN_HPP_NAMESPACE::DeviceSize vertexStride_ ) VULKAN_HPP_NOEXCEPT
+    {
+      vertexStride = vertexStride_;
+      return *this;
+    }
+
+    GeometryTrianglesNV & setVertexFormat( VULKAN_HPP_NAMESPACE::Format vertexFormat_ ) VULKAN_HPP_NOEXCEPT
+    {
+      vertexFormat = vertexFormat_;
+      return *this;
+    }
+
+    GeometryTrianglesNV & setIndexData( VULKAN_HPP_NAMESPACE::Buffer indexData_ ) VULKAN_HPP_NOEXCEPT
+    {
+      indexData = indexData_;
+      return *this;
+    }
+
+    GeometryTrianglesNV & setIndexOffset( VULKAN_HPP_NAMESPACE::DeviceSize indexOffset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      indexOffset = indexOffset_;
+      return *this;
+    }
+
+    GeometryTrianglesNV & setIndexCount( uint32_t indexCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      indexCount = indexCount_;
+      return *this;
+    }
+
+    GeometryTrianglesNV & setIndexType( VULKAN_HPP_NAMESPACE::IndexType indexType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      indexType = indexType_;
+      return *this;
+    }
+
+    GeometryTrianglesNV & setTransformData( VULKAN_HPP_NAMESPACE::Buffer transformData_ ) VULKAN_HPP_NOEXCEPT
+    {
+      transformData = transformData_;
+      return *this;
+    }
+
+    GeometryTrianglesNV & setTransformOffset( VULKAN_HPP_NAMESPACE::DeviceSize transformOffset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      transformOffset = transformOffset_;
+      return *this;
+    }
+
+    operator VkGeometryTrianglesNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkGeometryTrianglesNV*>( this );
+    }
+
+    operator VkGeometryTrianglesNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkGeometryTrianglesNV*>( this );
+    }
+
+    bool operator==( GeometryTrianglesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( vertexData == rhs.vertexData )
+          && ( vertexOffset == rhs.vertexOffset )
+          && ( vertexCount == rhs.vertexCount )
+          && ( vertexStride == rhs.vertexStride )
+          && ( vertexFormat == rhs.vertexFormat )
+          && ( indexData == rhs.indexData )
+          && ( indexOffset == rhs.indexOffset )
+          && ( indexCount == rhs.indexCount )
+          && ( indexType == rhs.indexType )
+          && ( transformData == rhs.transformData )
+          && ( transformOffset == rhs.transformOffset );
+    }
+
+    bool operator!=( GeometryTrianglesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeometryTrianglesNV;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::Buffer vertexData;
+    VULKAN_HPP_NAMESPACE::DeviceSize vertexOffset;
+    uint32_t vertexCount;
+    VULKAN_HPP_NAMESPACE::DeviceSize vertexStride;
+    VULKAN_HPP_NAMESPACE::Format vertexFormat;
+    VULKAN_HPP_NAMESPACE::Buffer indexData;
+    VULKAN_HPP_NAMESPACE::DeviceSize indexOffset;
+    uint32_t indexCount;
+    VULKAN_HPP_NAMESPACE::IndexType indexType;
+    VULKAN_HPP_NAMESPACE::Buffer transformData;
+    VULKAN_HPP_NAMESPACE::DeviceSize transformOffset;
+  };
+  static_assert( sizeof( GeometryTrianglesNV ) == sizeof( VkGeometryTrianglesNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<GeometryTrianglesNV>::value, "struct wrapper is not a standard layout!" );
+
+  struct GeometryAABBNV
+  {
+    VULKAN_HPP_CONSTEXPR GeometryAABBNV( VULKAN_HPP_NAMESPACE::Buffer aabbData_ = VULKAN_HPP_NAMESPACE::Buffer(),
+                                         uint32_t numAABBs_ = 0,
+                                         uint32_t stride_ = 0,
+                                         VULKAN_HPP_NAMESPACE::DeviceSize offset_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : aabbData( aabbData_ )
+      , numAABBs( numAABBs_ )
+      , stride( stride_ )
+      , offset( offset_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::GeometryAABBNV & operator=( VULKAN_HPP_NAMESPACE::GeometryAABBNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::GeometryAABBNV ) - offsetof( GeometryAABBNV, pNext ) );
+      return *this;
+    }
+
+    GeometryAABBNV( VkGeometryAABBNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    GeometryAABBNV& operator=( VkGeometryAABBNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::GeometryAABBNV const *>(&rhs);
+      return *this;
+    }
+
+    GeometryAABBNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    GeometryAABBNV & setAabbData( VULKAN_HPP_NAMESPACE::Buffer aabbData_ ) VULKAN_HPP_NOEXCEPT
+    {
+      aabbData = aabbData_;
+      return *this;
+    }
+
+    GeometryAABBNV & setNumAABBs( uint32_t numAABBs_ ) VULKAN_HPP_NOEXCEPT
+    {
+      numAABBs = numAABBs_;
+      return *this;
+    }
+
+    GeometryAABBNV & setStride( uint32_t stride_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stride = stride_;
+      return *this;
+    }
+
+    GeometryAABBNV & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      offset = offset_;
+      return *this;
+    }
+
+    operator VkGeometryAABBNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkGeometryAABBNV*>( this );
+    }
+
+    operator VkGeometryAABBNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkGeometryAABBNV*>( this );
+    }
+
+    bool operator==( GeometryAABBNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( aabbData == rhs.aabbData )
+          && ( numAABBs == rhs.numAABBs )
+          && ( stride == rhs.stride )
+          && ( offset == rhs.offset );
+    }
+
+    bool operator!=( GeometryAABBNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeometryAabbNV;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::Buffer aabbData;
+    uint32_t numAABBs;
+    uint32_t stride;
+    VULKAN_HPP_NAMESPACE::DeviceSize offset;
+  };
+  static_assert( sizeof( GeometryAABBNV ) == sizeof( VkGeometryAABBNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<GeometryAABBNV>::value, "struct wrapper is not a standard layout!" );
+
+  struct GeometryDataNV
+  {
+    VULKAN_HPP_CONSTEXPR GeometryDataNV( VULKAN_HPP_NAMESPACE::GeometryTrianglesNV triangles_ = VULKAN_HPP_NAMESPACE::GeometryTrianglesNV(),
+                                         VULKAN_HPP_NAMESPACE::GeometryAABBNV aabbs_ = VULKAN_HPP_NAMESPACE::GeometryAABBNV() ) VULKAN_HPP_NOEXCEPT
+      : triangles( triangles_ )
+      , aabbs( aabbs_ )
+    {}
+
+    GeometryDataNV( VkGeometryDataNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    GeometryDataNV& operator=( VkGeometryDataNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::GeometryDataNV const *>(&rhs);
+      return *this;
+    }
+
+    GeometryDataNV & setTriangles( VULKAN_HPP_NAMESPACE::GeometryTrianglesNV triangles_ ) VULKAN_HPP_NOEXCEPT
+    {
+      triangles = triangles_;
+      return *this;
+    }
+
+    GeometryDataNV & setAabbs( VULKAN_HPP_NAMESPACE::GeometryAABBNV aabbs_ ) VULKAN_HPP_NOEXCEPT
+    {
+      aabbs = aabbs_;
+      return *this;
+    }
+
+    operator VkGeometryDataNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkGeometryDataNV*>( this );
+    }
+
+    operator VkGeometryDataNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkGeometryDataNV*>( this );
+    }
+
+    bool operator==( GeometryDataNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( triangles == rhs.triangles )
+          && ( aabbs == rhs.aabbs );
+    }
+
+    bool operator!=( GeometryDataNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    VULKAN_HPP_NAMESPACE::GeometryTrianglesNV triangles;
+    VULKAN_HPP_NAMESPACE::GeometryAABBNV aabbs;
+  };
+  static_assert( sizeof( GeometryDataNV ) == sizeof( VkGeometryDataNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<GeometryDataNV>::value, "struct wrapper is not a standard layout!" );
+
+  struct GeometryNV
+  {
+    VULKAN_HPP_CONSTEXPR GeometryNV( VULKAN_HPP_NAMESPACE::GeometryTypeNV geometryType_ = VULKAN_HPP_NAMESPACE::GeometryTypeNV::eTriangles,
+                                     VULKAN_HPP_NAMESPACE::GeometryDataNV geometry_ = VULKAN_HPP_NAMESPACE::GeometryDataNV(),
+                                     VULKAN_HPP_NAMESPACE::GeometryFlagsNV flags_ = VULKAN_HPP_NAMESPACE::GeometryFlagsNV() ) VULKAN_HPP_NOEXCEPT
+      : geometryType( geometryType_ )
+      , geometry( geometry_ )
+      , flags( flags_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::GeometryNV & operator=( VULKAN_HPP_NAMESPACE::GeometryNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::GeometryNV ) - offsetof( GeometryNV, pNext ) );
+      return *this;
+    }
+
+    GeometryNV( VkGeometryNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    GeometryNV& operator=( VkGeometryNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::GeometryNV const *>(&rhs);
+      return *this;
+    }
+
+    GeometryNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    GeometryNV & setGeometryType( VULKAN_HPP_NAMESPACE::GeometryTypeNV geometryType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      geometryType = geometryType_;
+      return *this;
+    }
+
+    GeometryNV & setGeometry( VULKAN_HPP_NAMESPACE::GeometryDataNV geometry_ ) VULKAN_HPP_NOEXCEPT
+    {
+      geometry = geometry_;
+      return *this;
+    }
+
+    GeometryNV & setFlags( VULKAN_HPP_NAMESPACE::GeometryFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    operator VkGeometryNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkGeometryNV*>( this );
+    }
+
+    operator VkGeometryNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkGeometryNV*>( this );
+    }
+
+    bool operator==( GeometryNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( geometryType == rhs.geometryType )
+          && ( geometry == rhs.geometry )
+          && ( flags == rhs.flags );
+    }
+
+    bool operator!=( GeometryNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeometryNV;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::GeometryTypeNV geometryType;
+    VULKAN_HPP_NAMESPACE::GeometryDataNV geometry;
+    VULKAN_HPP_NAMESPACE::GeometryFlagsNV flags;
+  };
+  static_assert( sizeof( GeometryNV ) == sizeof( VkGeometryNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<GeometryNV>::value, "struct wrapper is not a standard layout!" );
+
+  struct AccelerationStructureInfoNV
+  {
+    VULKAN_HPP_CONSTEXPR AccelerationStructureInfoNV( VULKAN_HPP_NAMESPACE::AccelerationStructureTypeNV type_ = VULKAN_HPP_NAMESPACE::AccelerationStructureTypeNV::eTopLevel,
+                                                      VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsNV flags_ = VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsNV(),
+                                                      uint32_t instanceCount_ = 0,
+                                                      uint32_t geometryCount_ = 0,
+                                                      const VULKAN_HPP_NAMESPACE::GeometryNV* pGeometries_ = nullptr ) VULKAN_HPP_NOEXCEPT
+      : type( type_ )
+      , flags( flags_ )
+      , instanceCount( instanceCount_ )
+      , geometryCount( geometryCount_ )
+      , pGeometries( pGeometries_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV & operator=( VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV ) - offsetof( AccelerationStructureInfoNV, pNext ) );
+      return *this;
+    }
+
+    AccelerationStructureInfoNV( VkAccelerationStructureInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    AccelerationStructureInfoNV& operator=( VkAccelerationStructureInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV const *>(&rhs);
+      return *this;
+    }
+
+    AccelerationStructureInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    AccelerationStructureInfoNV & setType( VULKAN_HPP_NAMESPACE::AccelerationStructureTypeNV type_ ) VULKAN_HPP_NOEXCEPT
+    {
+      type = type_;
+      return *this;
+    }
+
+    AccelerationStructureInfoNV & setFlags( VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    AccelerationStructureInfoNV & setInstanceCount( uint32_t instanceCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      instanceCount = instanceCount_;
+      return *this;
+    }
+
+    AccelerationStructureInfoNV & setGeometryCount( uint32_t geometryCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      geometryCount = geometryCount_;
+      return *this;
+    }
+
+    AccelerationStructureInfoNV & setPGeometries( const VULKAN_HPP_NAMESPACE::GeometryNV* pGeometries_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pGeometries = pGeometries_;
+      return *this;
+    }
+
+    operator VkAccelerationStructureInfoNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkAccelerationStructureInfoNV*>( this );
+    }
+
+    operator VkAccelerationStructureInfoNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkAccelerationStructureInfoNV*>( this );
+    }
+
+    bool operator==( AccelerationStructureInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( type == rhs.type )
+          && ( flags == rhs.flags )
+          && ( instanceCount == rhs.instanceCount )
+          && ( geometryCount == rhs.geometryCount )
+          && ( pGeometries == rhs.pGeometries );
+    }
+
+    bool operator!=( AccelerationStructureInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureInfoNV;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::AccelerationStructureTypeNV type;
+    VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsNV flags;
+    uint32_t instanceCount;
+    uint32_t geometryCount;
+    const VULKAN_HPP_NAMESPACE::GeometryNV* pGeometries;
+  };
+  static_assert( sizeof( AccelerationStructureInfoNV ) == sizeof( VkAccelerationStructureInfoNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<AccelerationStructureInfoNV>::value, "struct wrapper is not a standard layout!" );
+
+  struct AccelerationStructureCreateInfoNV
+  {
+    VULKAN_HPP_CONSTEXPR AccelerationStructureCreateInfoNV( VULKAN_HPP_NAMESPACE::DeviceSize compactedSize_ = 0,
+                                                            VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV info_ = VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV() ) VULKAN_HPP_NOEXCEPT
+      : compactedSize( compactedSize_ )
+      , info( info_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV & operator=( VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV ) - offsetof( AccelerationStructureCreateInfoNV, pNext ) );
+      return *this;
+    }
+
+    AccelerationStructureCreateInfoNV( VkAccelerationStructureCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    AccelerationStructureCreateInfoNV& operator=( VkAccelerationStructureCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV const *>(&rhs);
+      return *this;
+    }
+
+    AccelerationStructureCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    AccelerationStructureCreateInfoNV & setCompactedSize( VULKAN_HPP_NAMESPACE::DeviceSize compactedSize_ ) VULKAN_HPP_NOEXCEPT
+    {
+      compactedSize = compactedSize_;
+      return *this;
+    }
+
+    AccelerationStructureCreateInfoNV & setInfo( VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV info_ ) VULKAN_HPP_NOEXCEPT
+    {
+      info = info_;
+      return *this;
+    }
+
+    operator VkAccelerationStructureCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkAccelerationStructureCreateInfoNV*>( this );
+    }
+
+    operator VkAccelerationStructureCreateInfoNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkAccelerationStructureCreateInfoNV*>( this );
+    }
+
+    bool operator==( AccelerationStructureCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( compactedSize == rhs.compactedSize )
+          && ( info == rhs.info );
+    }
+
+    bool operator!=( AccelerationStructureCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureCreateInfoNV;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::DeviceSize compactedSize;
+    VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV info;
+  };
+  static_assert( sizeof( AccelerationStructureCreateInfoNV ) == sizeof( VkAccelerationStructureCreateInfoNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<AccelerationStructureCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
+
+  struct AccelerationStructureMemoryRequirementsInfoNV
+  {
+    VULKAN_HPP_CONSTEXPR AccelerationStructureMemoryRequirementsInfoNV( VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV type_ = VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV::eObject,
+                                                                        VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure_ = VULKAN_HPP_NAMESPACE::AccelerationStructureNV() ) VULKAN_HPP_NOEXCEPT
+      : type( type_ )
+      , accelerationStructure( accelerationStructure_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV & operator=( VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV ) - offsetof( AccelerationStructureMemoryRequirementsInfoNV, pNext ) );
+      return *this;
+    }
+
+    AccelerationStructureMemoryRequirementsInfoNV( VkAccelerationStructureMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    AccelerationStructureMemoryRequirementsInfoNV& operator=( VkAccelerationStructureMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV const *>(&rhs);
+      return *this;
+    }
+
+    AccelerationStructureMemoryRequirementsInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    AccelerationStructureMemoryRequirementsInfoNV & setType( VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV type_ ) VULKAN_HPP_NOEXCEPT
+    {
+      type = type_;
+      return *this;
+    }
+
+    AccelerationStructureMemoryRequirementsInfoNV & setAccelerationStructure( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure_ ) VULKAN_HPP_NOEXCEPT
+    {
+      accelerationStructure = accelerationStructure_;
+      return *this;
+    }
+
+    operator VkAccelerationStructureMemoryRequirementsInfoNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkAccelerationStructureMemoryRequirementsInfoNV*>( this );
+    }
+
+    operator VkAccelerationStructureMemoryRequirementsInfoNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkAccelerationStructureMemoryRequirementsInfoNV*>( this );
+    }
+
+    bool operator==( AccelerationStructureMemoryRequirementsInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( type == rhs.type )
+          && ( accelerationStructure == rhs.accelerationStructure );
+    }
+
+    bool operator!=( AccelerationStructureMemoryRequirementsInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureMemoryRequirementsInfoNV;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV type;
+    VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure;
+  };
+  static_assert( sizeof( AccelerationStructureMemoryRequirementsInfoNV ) == sizeof( VkAccelerationStructureMemoryRequirementsInfoNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<AccelerationStructureMemoryRequirementsInfoNV>::value, "struct wrapper is not a standard layout!" );
+
+  struct AcquireNextImageInfoKHR
+  {
+    VULKAN_HPP_CONSTEXPR AcquireNextImageInfoKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ = VULKAN_HPP_NAMESPACE::SwapchainKHR(),
+                                                  uint64_t timeout_ = 0,
+                                                  VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = VULKAN_HPP_NAMESPACE::Semaphore(),
+                                                  VULKAN_HPP_NAMESPACE::Fence fence_ = VULKAN_HPP_NAMESPACE::Fence(),
+                                                  uint32_t deviceMask_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : swapchain( swapchain_ )
+      , timeout( timeout_ )
+      , semaphore( semaphore_ )
+      , fence( fence_ )
+      , deviceMask( deviceMask_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR & operator=( VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR ) - offsetof( AcquireNextImageInfoKHR, pNext ) );
+      return *this;
+    }
+
+    AcquireNextImageInfoKHR( VkAcquireNextImageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    AcquireNextImageInfoKHR& operator=( VkAcquireNextImageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR const *>(&rhs);
+      return *this;
+    }
+
+    AcquireNextImageInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    AcquireNextImageInfoKHR & setSwapchain( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ ) VULKAN_HPP_NOEXCEPT
+    {
+      swapchain = swapchain_;
+      return *this;
+    }
+
+    AcquireNextImageInfoKHR & setTimeout( uint64_t timeout_ ) VULKAN_HPP_NOEXCEPT
+    {
+      timeout = timeout_;
+      return *this;
+    }
+
+    AcquireNextImageInfoKHR & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
+    {
+      semaphore = semaphore_;
+      return *this;
+    }
+
+    AcquireNextImageInfoKHR & setFence( VULKAN_HPP_NAMESPACE::Fence fence_ ) VULKAN_HPP_NOEXCEPT
+    {
+      fence = fence_;
+      return *this;
+    }
+
+    AcquireNextImageInfoKHR & setDeviceMask( uint32_t deviceMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      deviceMask = deviceMask_;
+      return *this;
+    }
+
+    operator VkAcquireNextImageInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkAcquireNextImageInfoKHR*>( this );
+    }
+
+    operator VkAcquireNextImageInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkAcquireNextImageInfoKHR*>( this );
+    }
+
+    bool operator==( AcquireNextImageInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( swapchain == rhs.swapchain )
+          && ( timeout == rhs.timeout )
+          && ( semaphore == rhs.semaphore )
+          && ( fence == rhs.fence )
+          && ( deviceMask == rhs.deviceMask );
+    }
+
+    bool operator!=( AcquireNextImageInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAcquireNextImageInfoKHR;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain;
+    uint64_t timeout;
+    VULKAN_HPP_NAMESPACE::Semaphore semaphore;
+    VULKAN_HPP_NAMESPACE::Fence fence;
+    uint32_t deviceMask;
+  };
+  static_assert( sizeof( AcquireNextImageInfoKHR ) == sizeof( VkAcquireNextImageInfoKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<AcquireNextImageInfoKHR>::value, "struct wrapper is not a standard layout!" );
+
+  struct AcquireProfilingLockInfoKHR
+  {
+    VULKAN_HPP_CONSTEXPR AcquireProfilingLockInfoKHR( VULKAN_HPP_NAMESPACE::AcquireProfilingLockFlagsKHR flags_ = VULKAN_HPP_NAMESPACE::AcquireProfilingLockFlagsKHR(),
+                                                      uint64_t timeout_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : flags( flags_ )
+      , timeout( timeout_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR & operator=( VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR ) - offsetof( AcquireProfilingLockInfoKHR, pNext ) );
+      return *this;
+    }
+
+    AcquireProfilingLockInfoKHR( VkAcquireProfilingLockInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    AcquireProfilingLockInfoKHR& operator=( VkAcquireProfilingLockInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR const *>(&rhs);
+      return *this;
+    }
+
+    AcquireProfilingLockInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    AcquireProfilingLockInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::AcquireProfilingLockFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    AcquireProfilingLockInfoKHR & setTimeout( uint64_t timeout_ ) VULKAN_HPP_NOEXCEPT
+    {
+      timeout = timeout_;
+      return *this;
+    }
+
+    operator VkAcquireProfilingLockInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkAcquireProfilingLockInfoKHR*>( this );
+    }
+
+    operator VkAcquireProfilingLockInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkAcquireProfilingLockInfoKHR*>( this );
+    }
+
+    bool operator==( AcquireProfilingLockInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( timeout == rhs.timeout );
+    }
+
+    bool operator!=( AcquireProfilingLockInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAcquireProfilingLockInfoKHR;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::AcquireProfilingLockFlagsKHR flags;
+    uint64_t timeout;
+  };
+  static_assert( sizeof( AcquireProfilingLockInfoKHR ) == sizeof( VkAcquireProfilingLockInfoKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<AcquireProfilingLockInfoKHR>::value, "struct wrapper is not a standard layout!" );
+
+  struct AllocationCallbacks
+  {
+    VULKAN_HPP_CONSTEXPR AllocationCallbacks( void* pUserData_ = nullptr,
+                                              PFN_vkAllocationFunction pfnAllocation_ = nullptr,
+                                              PFN_vkReallocationFunction pfnReallocation_ = nullptr,
+                                              PFN_vkFreeFunction pfnFree_ = nullptr,
+                                              PFN_vkInternalAllocationNotification pfnInternalAllocation_ = nullptr,
+                                              PFN_vkInternalFreeNotification pfnInternalFree_ = nullptr ) VULKAN_HPP_NOEXCEPT
+      : pUserData( pUserData_ )
+      , pfnAllocation( pfnAllocation_ )
+      , pfnReallocation( pfnReallocation_ )
+      , pfnFree( pfnFree_ )
+      , pfnInternalAllocation( pfnInternalAllocation_ )
+      , pfnInternalFree( pfnInternalFree_ )
+    {}
+
+    AllocationCallbacks( VkAllocationCallbacks const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    AllocationCallbacks& operator=( VkAllocationCallbacks const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AllocationCallbacks const *>(&rhs);
+      return *this;
+    }
+
+    AllocationCallbacks & setPUserData( void* pUserData_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pUserData = pUserData_;
+      return *this;
+    }
+
+    AllocationCallbacks & setPfnAllocation( PFN_vkAllocationFunction pfnAllocation_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pfnAllocation = pfnAllocation_;
+      return *this;
+    }
+
+    AllocationCallbacks & setPfnReallocation( PFN_vkReallocationFunction pfnReallocation_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pfnReallocation = pfnReallocation_;
+      return *this;
+    }
+
+    AllocationCallbacks & setPfnFree( PFN_vkFreeFunction pfnFree_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pfnFree = pfnFree_;
+      return *this;
+    }
+
+    AllocationCallbacks & setPfnInternalAllocation( PFN_vkInternalAllocationNotification pfnInternalAllocation_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pfnInternalAllocation = pfnInternalAllocation_;
+      return *this;
+    }
+
+    AllocationCallbacks & setPfnInternalFree( PFN_vkInternalFreeNotification pfnInternalFree_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pfnInternalFree = pfnInternalFree_;
+      return *this;
+    }
+
+    operator VkAllocationCallbacks const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkAllocationCallbacks*>( this );
+    }
+
+    operator VkAllocationCallbacks &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkAllocationCallbacks*>( this );
+    }
+
+    bool operator==( AllocationCallbacks const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( pUserData == rhs.pUserData )
+          && ( pfnAllocation == rhs.pfnAllocation )
+          && ( pfnReallocation == rhs.pfnReallocation )
+          && ( pfnFree == rhs.pfnFree )
+          && ( pfnInternalAllocation == rhs.pfnInternalAllocation )
+          && ( pfnInternalFree == rhs.pfnInternalFree );
+    }
+
+    bool operator!=( AllocationCallbacks const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    void* pUserData;
+    PFN_vkAllocationFunction pfnAllocation;
+    PFN_vkReallocationFunction pfnReallocation;
+    PFN_vkFreeFunction pfnFree;
+    PFN_vkInternalAllocationNotification pfnInternalAllocation;
+    PFN_vkInternalFreeNotification pfnInternalFree;
+  };
+  static_assert( sizeof( AllocationCallbacks ) == sizeof( VkAllocationCallbacks ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<AllocationCallbacks>::value, "struct wrapper is not a standard layout!" );
+
+  struct ComponentMapping
+  {
+    VULKAN_HPP_CONSTEXPR ComponentMapping( VULKAN_HPP_NAMESPACE::ComponentSwizzle r_ = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity,
+                                           VULKAN_HPP_NAMESPACE::ComponentSwizzle g_ = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity,
+                                           VULKAN_HPP_NAMESPACE::ComponentSwizzle b_ = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity,
+                                           VULKAN_HPP_NAMESPACE::ComponentSwizzle a_ = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity ) VULKAN_HPP_NOEXCEPT
+      : r( r_ )
+      , g( g_ )
+      , b( b_ )
+      , a( a_ )
+    {}
+
+    ComponentMapping( VkComponentMapping const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    ComponentMapping& operator=( VkComponentMapping const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ComponentMapping const *>(&rhs);
+      return *this;
+    }
+
+    ComponentMapping & setR( VULKAN_HPP_NAMESPACE::ComponentSwizzle r_ ) VULKAN_HPP_NOEXCEPT
+    {
+      r = r_;
+      return *this;
+    }
+
+    ComponentMapping & setG( VULKAN_HPP_NAMESPACE::ComponentSwizzle g_ ) VULKAN_HPP_NOEXCEPT
+    {
+      g = g_;
+      return *this;
+    }
+
+    ComponentMapping & setB( VULKAN_HPP_NAMESPACE::ComponentSwizzle b_ ) VULKAN_HPP_NOEXCEPT
+    {
+      b = b_;
+      return *this;
+    }
+
+    ComponentMapping & setA( VULKAN_HPP_NAMESPACE::ComponentSwizzle a_ ) VULKAN_HPP_NOEXCEPT
+    {
+      a = a_;
+      return *this;
+    }
+
+    operator VkComponentMapping const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkComponentMapping*>( this );
+    }
+
+    operator VkComponentMapping &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkComponentMapping*>( this );
+    }
+
+    bool operator==( ComponentMapping const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( r == rhs.r )
+          && ( g == rhs.g )
+          && ( b == rhs.b )
+          && ( a == rhs.a );
+    }
+
+    bool operator!=( ComponentMapping const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    VULKAN_HPP_NAMESPACE::ComponentSwizzle r;
+    VULKAN_HPP_NAMESPACE::ComponentSwizzle g;
+    VULKAN_HPP_NAMESPACE::ComponentSwizzle b;
+    VULKAN_HPP_NAMESPACE::ComponentSwizzle a;
+  };
+  static_assert( sizeof( ComponentMapping ) == sizeof( VkComponentMapping ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ComponentMapping>::value, "struct wrapper is not a standard layout!" );
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+
+  struct AndroidHardwareBufferFormatPropertiesANDROID
+  {
+    AndroidHardwareBufferFormatPropertiesANDROID( VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
+                                                  uint64_t externalFormat_ = 0,
+                                                  VULKAN_HPP_NAMESPACE::FormatFeatureFlags formatFeatures_ = VULKAN_HPP_NAMESPACE::FormatFeatureFlags(),
+                                                  VULKAN_HPP_NAMESPACE::ComponentMapping samplerYcbcrConversionComponents_ = VULKAN_HPP_NAMESPACE::ComponentMapping(),
+                                                  VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion suggestedYcbcrModel_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity,
+                                                  VULKAN_HPP_NAMESPACE::SamplerYcbcrRange suggestedYcbcrRange_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull,
+                                                  VULKAN_HPP_NAMESPACE::ChromaLocation suggestedXChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven,
+                                                  VULKAN_HPP_NAMESPACE::ChromaLocation suggestedYChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven ) VULKAN_HPP_NOEXCEPT
+      : format( format_ )
+      , externalFormat( externalFormat_ )
+      , formatFeatures( formatFeatures_ )
+      , samplerYcbcrConversionComponents( samplerYcbcrConversionComponents_ )
+      , suggestedYcbcrModel( suggestedYcbcrModel_ )
+      , suggestedYcbcrRange( suggestedYcbcrRange_ )
+      , suggestedXChromaOffset( suggestedXChromaOffset_ )
+      , suggestedYChromaOffset( suggestedYChromaOffset_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::AndroidHardwareBufferFormatPropertiesANDROID & operator=( VULKAN_HPP_NAMESPACE::AndroidHardwareBufferFormatPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::AndroidHardwareBufferFormatPropertiesANDROID ) - offsetof( AndroidHardwareBufferFormatPropertiesANDROID, pNext ) );
+      return *this;
+    }
+
+    AndroidHardwareBufferFormatPropertiesANDROID( VkAndroidHardwareBufferFormatPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    AndroidHardwareBufferFormatPropertiesANDROID& operator=( VkAndroidHardwareBufferFormatPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferFormatPropertiesANDROID const *>(&rhs);
+      return *this;
+    }
+
+    operator VkAndroidHardwareBufferFormatPropertiesANDROID const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkAndroidHardwareBufferFormatPropertiesANDROID*>( this );
+    }
+
+    operator VkAndroidHardwareBufferFormatPropertiesANDROID &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkAndroidHardwareBufferFormatPropertiesANDROID*>( this );
+    }
+
+    bool operator==( AndroidHardwareBufferFormatPropertiesANDROID const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( format == rhs.format )
+          && ( externalFormat == rhs.externalFormat )
+          && ( formatFeatures == rhs.formatFeatures )
+          && ( samplerYcbcrConversionComponents == rhs.samplerYcbcrConversionComponents )
+          && ( suggestedYcbcrModel == rhs.suggestedYcbcrModel )
+          && ( suggestedYcbcrRange == rhs.suggestedYcbcrRange )
+          && ( suggestedXChromaOffset == rhs.suggestedXChromaOffset )
+          && ( suggestedYChromaOffset == rhs.suggestedYChromaOffset );
+    }
+
+    bool operator!=( AndroidHardwareBufferFormatPropertiesANDROID const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAndroidHardwareBufferFormatPropertiesANDROID;
+    void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::Format format;
+    uint64_t externalFormat;
+    VULKAN_HPP_NAMESPACE::FormatFeatureFlags formatFeatures;
+    VULKAN_HPP_NAMESPACE::ComponentMapping samplerYcbcrConversionComponents;
+    VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion suggestedYcbcrModel;
+    VULKAN_HPP_NAMESPACE::SamplerYcbcrRange suggestedYcbcrRange;
+    VULKAN_HPP_NAMESPACE::ChromaLocation suggestedXChromaOffset;
+    VULKAN_HPP_NAMESPACE::ChromaLocation suggestedYChromaOffset;
+  };
+  static_assert( sizeof( AndroidHardwareBufferFormatPropertiesANDROID ) == sizeof( VkAndroidHardwareBufferFormatPropertiesANDROID ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<AndroidHardwareBufferFormatPropertiesANDROID>::value, "struct wrapper is not a standard layout!" );
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+
+  struct AndroidHardwareBufferPropertiesANDROID
+  {
+    AndroidHardwareBufferPropertiesANDROID( VULKAN_HPP_NAMESPACE::DeviceSize allocationSize_ = 0,
+                                            uint32_t memoryTypeBits_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : allocationSize( allocationSize_ )
+      , memoryTypeBits( memoryTypeBits_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID & operator=( VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID ) - offsetof( AndroidHardwareBufferPropertiesANDROID, pNext ) );
+      return *this;
+    }
+
+    AndroidHardwareBufferPropertiesANDROID( VkAndroidHardwareBufferPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    AndroidHardwareBufferPropertiesANDROID& operator=( VkAndroidHardwareBufferPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID const *>(&rhs);
+      return *this;
+    }
+
+    operator VkAndroidHardwareBufferPropertiesANDROID const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkAndroidHardwareBufferPropertiesANDROID*>( this );
+    }
+
+    operator VkAndroidHardwareBufferPropertiesANDROID &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkAndroidHardwareBufferPropertiesANDROID*>( this );
+    }
+
+    bool operator==( AndroidHardwareBufferPropertiesANDROID const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( allocationSize == rhs.allocationSize )
+          && ( memoryTypeBits == rhs.memoryTypeBits );
+    }
+
+    bool operator!=( AndroidHardwareBufferPropertiesANDROID const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAndroidHardwareBufferPropertiesANDROID;
+    void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::DeviceSize allocationSize;
+    uint32_t memoryTypeBits;
+  };
+  static_assert( sizeof( AndroidHardwareBufferPropertiesANDROID ) == sizeof( VkAndroidHardwareBufferPropertiesANDROID ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<AndroidHardwareBufferPropertiesANDROID>::value, "struct wrapper is not a standard layout!" );
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+
+  struct AndroidHardwareBufferUsageANDROID
+  {
+    AndroidHardwareBufferUsageANDROID( uint64_t androidHardwareBufferUsage_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : androidHardwareBufferUsage( androidHardwareBufferUsage_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::AndroidHardwareBufferUsageANDROID & operator=( VULKAN_HPP_NAMESPACE::AndroidHardwareBufferUsageANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::AndroidHardwareBufferUsageANDROID ) - offsetof( AndroidHardwareBufferUsageANDROID, pNext ) );
+      return *this;
+    }
+
+    AndroidHardwareBufferUsageANDROID( VkAndroidHardwareBufferUsageANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    AndroidHardwareBufferUsageANDROID& operator=( VkAndroidHardwareBufferUsageANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferUsageANDROID const *>(&rhs);
+      return *this;
+    }
+
+    operator VkAndroidHardwareBufferUsageANDROID const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkAndroidHardwareBufferUsageANDROID*>( this );
+    }
+
+    operator VkAndroidHardwareBufferUsageANDROID &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkAndroidHardwareBufferUsageANDROID*>( this );
+    }
+
+    bool operator==( AndroidHardwareBufferUsageANDROID const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( androidHardwareBufferUsage == rhs.androidHardwareBufferUsage );
+    }
+
+    bool operator!=( AndroidHardwareBufferUsageANDROID const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAndroidHardwareBufferUsageANDROID;
+    void* pNext = nullptr;
+    uint64_t androidHardwareBufferUsage;
+  };
+  static_assert( sizeof( AndroidHardwareBufferUsageANDROID ) == sizeof( VkAndroidHardwareBufferUsageANDROID ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<AndroidHardwareBufferUsageANDROID>::value, "struct wrapper is not a standard layout!" );
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+
+  struct AndroidSurfaceCreateInfoKHR
+  {
+    VULKAN_HPP_CONSTEXPR AndroidSurfaceCreateInfoKHR( VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateFlagsKHR flags_ = VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateFlagsKHR(),
+                                                      struct ANativeWindow* window_ = nullptr ) VULKAN_HPP_NOEXCEPT
+      : flags( flags_ )
+      , window( window_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR & operator=( VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR ) - offsetof( AndroidSurfaceCreateInfoKHR, pNext ) );
+      return *this;
+    }
+
+    AndroidSurfaceCreateInfoKHR( VkAndroidSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    AndroidSurfaceCreateInfoKHR& operator=( VkAndroidSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR const *>(&rhs);
+      return *this;
+    }
+
+    AndroidSurfaceCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    AndroidSurfaceCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    AndroidSurfaceCreateInfoKHR & setWindow( struct ANativeWindow* window_ ) VULKAN_HPP_NOEXCEPT
+    {
+      window = window_;
+      return *this;
+    }
+
+    operator VkAndroidSurfaceCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkAndroidSurfaceCreateInfoKHR*>( this );
+    }
+
+    operator VkAndroidSurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkAndroidSurfaceCreateInfoKHR*>( this );
+    }
+
+    bool operator==( AndroidSurfaceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( window == rhs.window );
+    }
+
+    bool operator!=( AndroidSurfaceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAndroidSurfaceCreateInfoKHR;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateFlagsKHR flags;
+    struct ANativeWindow* window;
+  };
+  static_assert( sizeof( AndroidSurfaceCreateInfoKHR ) == sizeof( VkAndroidSurfaceCreateInfoKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<AndroidSurfaceCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+  struct ApplicationInfo
+  {
+    VULKAN_HPP_CONSTEXPR ApplicationInfo( const char* pApplicationName_ = nullptr,
+                                          uint32_t applicationVersion_ = 0,
+                                          const char* pEngineName_ = nullptr,
+                                          uint32_t engineVersion_ = 0,
+                                          uint32_t apiVersion_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : pApplicationName( pApplicationName_ )
+      , applicationVersion( applicationVersion_ )
+      , pEngineName( pEngineName_ )
+      , engineVersion( engineVersion_ )
+      , apiVersion( apiVersion_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::ApplicationInfo & operator=( VULKAN_HPP_NAMESPACE::ApplicationInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ApplicationInfo ) - offsetof( ApplicationInfo, pNext ) );
+      return *this;
+    }
+
+    ApplicationInfo( VkApplicationInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    ApplicationInfo& operator=( VkApplicationInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ApplicationInfo const *>(&rhs);
+      return *this;
+    }
+
+    ApplicationInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ApplicationInfo & setPApplicationName( const char* pApplicationName_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pApplicationName = pApplicationName_;
+      return *this;
+    }
+
+    ApplicationInfo & setApplicationVersion( uint32_t applicationVersion_ ) VULKAN_HPP_NOEXCEPT
+    {
+      applicationVersion = applicationVersion_;
+      return *this;
+    }
+
+    ApplicationInfo & setPEngineName( const char* pEngineName_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pEngineName = pEngineName_;
+      return *this;
+    }
+
+    ApplicationInfo & setEngineVersion( uint32_t engineVersion_ ) VULKAN_HPP_NOEXCEPT
+    {
+      engineVersion = engineVersion_;
+      return *this;
+    }
+
+    ApplicationInfo & setApiVersion( uint32_t apiVersion_ ) VULKAN_HPP_NOEXCEPT
+    {
+      apiVersion = apiVersion_;
+      return *this;
+    }
+
+    operator VkApplicationInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkApplicationInfo*>( this );
+    }
+
+    operator VkApplicationInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkApplicationInfo*>( this );
+    }
+
+    bool operator==( ApplicationInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( pApplicationName == rhs.pApplicationName )
+          && ( applicationVersion == rhs.applicationVersion )
+          && ( pEngineName == rhs.pEngineName )
+          && ( engineVersion == rhs.engineVersion )
+          && ( apiVersion == rhs.apiVersion );
+    }
+
+    bool operator!=( ApplicationInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eApplicationInfo;
+    const void* pNext = nullptr;
+    const char* pApplicationName;
+    uint32_t applicationVersion;
+    const char* pEngineName;
+    uint32_t engineVersion;
+    uint32_t apiVersion;
+  };
+  static_assert( sizeof( ApplicationInfo ) == sizeof( VkApplicationInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ApplicationInfo>::value, "struct wrapper is not a standard layout!" );
+
+  struct AttachmentDescription
+  {
+    VULKAN_HPP_CONSTEXPR AttachmentDescription( VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags_ = VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags(),
+                                                VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
+                                                VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1,
+                                                VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp_ = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad,
+                                                VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp_ = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore,
+                                                VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp_ = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad,
+                                                VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp_ = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore,
+                                                VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined,
+                                                VULKAN_HPP_NAMESPACE::ImageLayout finalLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined ) VULKAN_HPP_NOEXCEPT
+      : flags( flags_ )
+      , format( format_ )
+      , samples( samples_ )
+      , loadOp( loadOp_ )
+      , storeOp( storeOp_ )
+      , stencilLoadOp( stencilLoadOp_ )
+      , stencilStoreOp( stencilStoreOp_ )
+      , initialLayout( initialLayout_ )
+      , finalLayout( finalLayout_ )
+    {}
+
+    AttachmentDescription( VkAttachmentDescription const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    AttachmentDescription& operator=( VkAttachmentDescription const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AttachmentDescription const *>(&rhs);
+      return *this;
+    }
+
+    AttachmentDescription & setFlags( VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    AttachmentDescription & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
+    {
+      format = format_;
+      return *this;
+    }
+
+    AttachmentDescription & setSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ ) VULKAN_HPP_NOEXCEPT
+    {
+      samples = samples_;
+      return *this;
+    }
+
+    AttachmentDescription & setLoadOp( VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp_ ) VULKAN_HPP_NOEXCEPT
+    {
+      loadOp = loadOp_;
+      return *this;
+    }
+
+    AttachmentDescription & setStoreOp( VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp_ ) VULKAN_HPP_NOEXCEPT
+    {
+      storeOp = storeOp_;
+      return *this;
+    }
+
+    AttachmentDescription & setStencilLoadOp( VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stencilLoadOp = stencilLoadOp_;
+      return *this;
+    }
+
+    AttachmentDescription & setStencilStoreOp( VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stencilStoreOp = stencilStoreOp_;
+      return *this;
+    }
+
+    AttachmentDescription & setInitialLayout( VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ ) VULKAN_HPP_NOEXCEPT
+    {
+      initialLayout = initialLayout_;
+      return *this;
+    }
+
+    AttachmentDescription & setFinalLayout( VULKAN_HPP_NAMESPACE::ImageLayout finalLayout_ ) VULKAN_HPP_NOEXCEPT
+    {
+      finalLayout = finalLayout_;
+      return *this;
+    }
+
+    operator VkAttachmentDescription const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkAttachmentDescription*>( this );
+    }
+
+    operator VkAttachmentDescription &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkAttachmentDescription*>( this );
+    }
+
+    bool operator==( AttachmentDescription const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( flags == rhs.flags )
+          && ( format == rhs.format )
+          && ( samples == rhs.samples )
+          && ( loadOp == rhs.loadOp )
+          && ( storeOp == rhs.storeOp )
+          && ( stencilLoadOp == rhs.stencilLoadOp )
+          && ( stencilStoreOp == rhs.stencilStoreOp )
+          && ( initialLayout == rhs.initialLayout )
+          && ( finalLayout == rhs.finalLayout );
+    }
+
+    bool operator!=( AttachmentDescription const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags;
+    VULKAN_HPP_NAMESPACE::Format format;
+    VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples;
+    VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp;
+    VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp;
+    VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp;
+    VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp;
+    VULKAN_HPP_NAMESPACE::ImageLayout initialLayout;
+    VULKAN_HPP_NAMESPACE::ImageLayout finalLayout;
+  };
+  static_assert( sizeof( AttachmentDescription ) == sizeof( VkAttachmentDescription ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<AttachmentDescription>::value, "struct wrapper is not a standard layout!" );
+
+  struct AttachmentDescription2KHR
+  {
+    VULKAN_HPP_CONSTEXPR AttachmentDescription2KHR( VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags_ = VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags(),
+                                                    VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
+                                                    VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1,
+                                                    VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp_ = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad,
+                                                    VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp_ = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore,
+                                                    VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp_ = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad,
+                                                    VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp_ = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore,
+                                                    VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined,
+                                                    VULKAN_HPP_NAMESPACE::ImageLayout finalLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined ) VULKAN_HPP_NOEXCEPT
+      : flags( flags_ )
+      , format( format_ )
+      , samples( samples_ )
+      , loadOp( loadOp_ )
+      , storeOp( storeOp_ )
+      , stencilLoadOp( stencilLoadOp_ )
+      , stencilStoreOp( stencilStoreOp_ )
+      , initialLayout( initialLayout_ )
+      , finalLayout( finalLayout_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::AttachmentDescription2KHR & operator=( VULKAN_HPP_NAMESPACE::AttachmentDescription2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::AttachmentDescription2KHR ) - offsetof( AttachmentDescription2KHR, pNext ) );
+      return *this;
+    }
+
+    AttachmentDescription2KHR( VkAttachmentDescription2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    AttachmentDescription2KHR& operator=( VkAttachmentDescription2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AttachmentDescription2KHR const *>(&rhs);
+      return *this;
+    }
+
+    AttachmentDescription2KHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    AttachmentDescription2KHR & setFlags( VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    AttachmentDescription2KHR & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
+    {
+      format = format_;
+      return *this;
+    }
+
+    AttachmentDescription2KHR & setSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ ) VULKAN_HPP_NOEXCEPT
+    {
+      samples = samples_;
+      return *this;
+    }
+
+    AttachmentDescription2KHR & setLoadOp( VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp_ ) VULKAN_HPP_NOEXCEPT
+    {
+      loadOp = loadOp_;
+      return *this;
+    }
+
+    AttachmentDescription2KHR & setStoreOp( VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp_ ) VULKAN_HPP_NOEXCEPT
+    {
+      storeOp = storeOp_;
+      return *this;
+    }
+
+    AttachmentDescription2KHR & setStencilLoadOp( VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stencilLoadOp = stencilLoadOp_;
+      return *this;
+    }
+
+    AttachmentDescription2KHR & setStencilStoreOp( VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stencilStoreOp = stencilStoreOp_;
+      return *this;
+    }
+
+    AttachmentDescription2KHR & setInitialLayout( VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ ) VULKAN_HPP_NOEXCEPT
+    {
+      initialLayout = initialLayout_;
+      return *this;
+    }
+
+    AttachmentDescription2KHR & setFinalLayout( VULKAN_HPP_NAMESPACE::ImageLayout finalLayout_ ) VULKAN_HPP_NOEXCEPT
+    {
+      finalLayout = finalLayout_;
+      return *this;
+    }
+
+    operator VkAttachmentDescription2KHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkAttachmentDescription2KHR*>( this );
+    }
+
+    operator VkAttachmentDescription2KHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkAttachmentDescription2KHR*>( this );
+    }
+
+    bool operator==( AttachmentDescription2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( format == rhs.format )
+          && ( samples == rhs.samples )
+          && ( loadOp == rhs.loadOp )
+          && ( storeOp == rhs.storeOp )
+          && ( stencilLoadOp == rhs.stencilLoadOp )
+          && ( stencilStoreOp == rhs.stencilStoreOp )
+          && ( initialLayout == rhs.initialLayout )
+          && ( finalLayout == rhs.finalLayout );
+    }
+
+    bool operator!=( AttachmentDescription2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAttachmentDescription2KHR;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags;
+    VULKAN_HPP_NAMESPACE::Format format;
+    VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples;
+    VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp;
+    VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp;
+    VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp;
+    VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp;
+    VULKAN_HPP_NAMESPACE::ImageLayout initialLayout;
+    VULKAN_HPP_NAMESPACE::ImageLayout finalLayout;
+  };
+  static_assert( sizeof( AttachmentDescription2KHR ) == sizeof( VkAttachmentDescription2KHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<AttachmentDescription2KHR>::value, "struct wrapper is not a standard layout!" );
+
+  struct AttachmentDescriptionStencilLayoutKHR
+  {
+    VULKAN_HPP_CONSTEXPR AttachmentDescriptionStencilLayoutKHR( VULKAN_HPP_NAMESPACE::ImageLayout stencilInitialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined,
+                                                                VULKAN_HPP_NAMESPACE::ImageLayout stencilFinalLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined ) VULKAN_HPP_NOEXCEPT
+      : stencilInitialLayout( stencilInitialLayout_ )
+      , stencilFinalLayout( stencilFinalLayout_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::AttachmentDescriptionStencilLayoutKHR & operator=( VULKAN_HPP_NAMESPACE::AttachmentDescriptionStencilLayoutKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::AttachmentDescriptionStencilLayoutKHR ) - offsetof( AttachmentDescriptionStencilLayoutKHR, pNext ) );
+      return *this;
+    }
+
+    AttachmentDescriptionStencilLayoutKHR( VkAttachmentDescriptionStencilLayoutKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    AttachmentDescriptionStencilLayoutKHR& operator=( VkAttachmentDescriptionStencilLayoutKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AttachmentDescriptionStencilLayoutKHR const *>(&rhs);
+      return *this;
+    }
+
+    AttachmentDescriptionStencilLayoutKHR & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    AttachmentDescriptionStencilLayoutKHR & setStencilInitialLayout( VULKAN_HPP_NAMESPACE::ImageLayout stencilInitialLayout_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stencilInitialLayout = stencilInitialLayout_;
+      return *this;
+    }
+
+    AttachmentDescriptionStencilLayoutKHR & setStencilFinalLayout( VULKAN_HPP_NAMESPACE::ImageLayout stencilFinalLayout_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stencilFinalLayout = stencilFinalLayout_;
+      return *this;
+    }
+
+    operator VkAttachmentDescriptionStencilLayoutKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkAttachmentDescriptionStencilLayoutKHR*>( this );
+    }
+
+    operator VkAttachmentDescriptionStencilLayoutKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkAttachmentDescriptionStencilLayoutKHR*>( this );
+    }
+
+    bool operator==( AttachmentDescriptionStencilLayoutKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( stencilInitialLayout == rhs.stencilInitialLayout )
+          && ( stencilFinalLayout == rhs.stencilFinalLayout );
+    }
+
+    bool operator!=( AttachmentDescriptionStencilLayoutKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAttachmentDescriptionStencilLayoutKHR;
+    void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::ImageLayout stencilInitialLayout;
+    VULKAN_HPP_NAMESPACE::ImageLayout stencilFinalLayout;
+  };
+  static_assert( sizeof( AttachmentDescriptionStencilLayoutKHR ) == sizeof( VkAttachmentDescriptionStencilLayoutKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<AttachmentDescriptionStencilLayoutKHR>::value, "struct wrapper is not a standard layout!" );
+
+  struct AttachmentReference
+  {
+    VULKAN_HPP_CONSTEXPR AttachmentReference( uint32_t attachment_ = 0,
+                                              VULKAN_HPP_NAMESPACE::ImageLayout layout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined ) VULKAN_HPP_NOEXCEPT
+      : attachment( attachment_ )
+      , layout( layout_ )
+    {}
+
+    AttachmentReference( VkAttachmentReference const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    AttachmentReference& operator=( VkAttachmentReference const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AttachmentReference const *>(&rhs);
+      return *this;
+    }
+
+    AttachmentReference & setAttachment( uint32_t attachment_ ) VULKAN_HPP_NOEXCEPT
+    {
+      attachment = attachment_;
+      return *this;
+    }
+
+    AttachmentReference & setLayout( VULKAN_HPP_NAMESPACE::ImageLayout layout_ ) VULKAN_HPP_NOEXCEPT
+    {
+      layout = layout_;
+      return *this;
+    }
+
+    operator VkAttachmentReference const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkAttachmentReference*>( this );
+    }
+
+    operator VkAttachmentReference &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkAttachmentReference*>( this );
+    }
+
+    bool operator==( AttachmentReference const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( attachment == rhs.attachment )
+          && ( layout == rhs.layout );
+    }
+
+    bool operator!=( AttachmentReference const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    uint32_t attachment;
+    VULKAN_HPP_NAMESPACE::ImageLayout layout;
+  };
+  static_assert( sizeof( AttachmentReference ) == sizeof( VkAttachmentReference ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<AttachmentReference>::value, "struct wrapper is not a standard layout!" );
+
+  struct AttachmentReference2KHR
+  {
+    VULKAN_HPP_CONSTEXPR AttachmentReference2KHR( uint32_t attachment_ = 0,
+                                                  VULKAN_HPP_NAMESPACE::ImageLayout layout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined,
+                                                  VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = VULKAN_HPP_NAMESPACE::ImageAspectFlags() ) VULKAN_HPP_NOEXCEPT
+      : attachment( attachment_ )
+      , layout( layout_ )
+      , aspectMask( aspectMask_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::AttachmentReference2KHR & operator=( VULKAN_HPP_NAMESPACE::AttachmentReference2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::AttachmentReference2KHR ) - offsetof( AttachmentReference2KHR, pNext ) );
+      return *this;
+    }
+
+    AttachmentReference2KHR( VkAttachmentReference2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    AttachmentReference2KHR& operator=( VkAttachmentReference2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AttachmentReference2KHR const *>(&rhs);
+      return *this;
+    }
+
+    AttachmentReference2KHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    AttachmentReference2KHR & setAttachment( uint32_t attachment_ ) VULKAN_HPP_NOEXCEPT
+    {
+      attachment = attachment_;
+      return *this;
+    }
+
+    AttachmentReference2KHR & setLayout( VULKAN_HPP_NAMESPACE::ImageLayout layout_ ) VULKAN_HPP_NOEXCEPT
+    {
+      layout = layout_;
+      return *this;
+    }
+
+    AttachmentReference2KHR & setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      aspectMask = aspectMask_;
+      return *this;
+    }
+
+    operator VkAttachmentReference2KHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkAttachmentReference2KHR*>( this );
+    }
+
+    operator VkAttachmentReference2KHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkAttachmentReference2KHR*>( this );
+    }
+
+    bool operator==( AttachmentReference2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( attachment == rhs.attachment )
+          && ( layout == rhs.layout )
+          && ( aspectMask == rhs.aspectMask );
+    }
+
+    bool operator!=( AttachmentReference2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAttachmentReference2KHR;
+    const void* pNext = nullptr;
+    uint32_t attachment;
+    VULKAN_HPP_NAMESPACE::ImageLayout layout;
+    VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask;
+  };
+  static_assert( sizeof( AttachmentReference2KHR ) == sizeof( VkAttachmentReference2KHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<AttachmentReference2KHR>::value, "struct wrapper is not a standard layout!" );
+
+  struct AttachmentReferenceStencilLayoutKHR
+  {
+    VULKAN_HPP_CONSTEXPR AttachmentReferenceStencilLayoutKHR( VULKAN_HPP_NAMESPACE::ImageLayout stencilLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined ) VULKAN_HPP_NOEXCEPT
+      : stencilLayout( stencilLayout_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::AttachmentReferenceStencilLayoutKHR & operator=( VULKAN_HPP_NAMESPACE::AttachmentReferenceStencilLayoutKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::AttachmentReferenceStencilLayoutKHR ) - offsetof( AttachmentReferenceStencilLayoutKHR, pNext ) );
+      return *this;
+    }
+
+    AttachmentReferenceStencilLayoutKHR( VkAttachmentReferenceStencilLayoutKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    AttachmentReferenceStencilLayoutKHR& operator=( VkAttachmentReferenceStencilLayoutKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AttachmentReferenceStencilLayoutKHR const *>(&rhs);
+      return *this;
+    }
+
+    AttachmentReferenceStencilLayoutKHR & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    AttachmentReferenceStencilLayoutKHR & setStencilLayout( VULKAN_HPP_NAMESPACE::ImageLayout stencilLayout_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stencilLayout = stencilLayout_;
+      return *this;
+    }
+
+    operator VkAttachmentReferenceStencilLayoutKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkAttachmentReferenceStencilLayoutKHR*>( this );
+    }
+
+    operator VkAttachmentReferenceStencilLayoutKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkAttachmentReferenceStencilLayoutKHR*>( this );
+    }
+
+    bool operator==( AttachmentReferenceStencilLayoutKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( stencilLayout == rhs.stencilLayout );
+    }
+
+    bool operator!=( AttachmentReferenceStencilLayoutKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAttachmentReferenceStencilLayoutKHR;
+    void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::ImageLayout stencilLayout;
+  };
+  static_assert( sizeof( AttachmentReferenceStencilLayoutKHR ) == sizeof( VkAttachmentReferenceStencilLayoutKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<AttachmentReferenceStencilLayoutKHR>::value, "struct wrapper is not a standard layout!" );
+
+  struct Extent2D
+  {
+    VULKAN_HPP_CONSTEXPR Extent2D( uint32_t width_ = 0,
+                                   uint32_t height_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : width( width_ )
+      , height( height_ )
+    {}
+
+    Extent2D( VkExtent2D const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    Extent2D& operator=( VkExtent2D const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::Extent2D const *>(&rhs);
+      return *this;
+    }
+
+    Extent2D & setWidth( uint32_t width_ ) VULKAN_HPP_NOEXCEPT
+    {
+      width = width_;
+      return *this;
+    }
+
+    Extent2D & setHeight( uint32_t height_ ) VULKAN_HPP_NOEXCEPT
+    {
+      height = height_;
+      return *this;
+    }
+
+    operator VkExtent2D const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkExtent2D*>( this );
+    }
+
+    operator VkExtent2D &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkExtent2D*>( this );
+    }
+
+    bool operator==( Extent2D const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( width == rhs.width )
+          && ( height == rhs.height );
+    }
+
+    bool operator!=( Extent2D const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    uint32_t width;
+    uint32_t height;
+  };
+  static_assert( sizeof( Extent2D ) == sizeof( VkExtent2D ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<Extent2D>::value, "struct wrapper is not a standard layout!" );
+
+  struct SampleLocationEXT
+  {
+    VULKAN_HPP_CONSTEXPR SampleLocationEXT( float x_ = 0,
+                                            float y_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : x( x_ )
+      , y( y_ )
+    {}
+
+    SampleLocationEXT( VkSampleLocationEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    SampleLocationEXT& operator=( VkSampleLocationEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SampleLocationEXT const *>(&rhs);
+      return *this;
+    }
+
+    SampleLocationEXT & setX( float x_ ) VULKAN_HPP_NOEXCEPT
+    {
+      x = x_;
+      return *this;
+    }
+
+    SampleLocationEXT & setY( float y_ ) VULKAN_HPP_NOEXCEPT
+    {
+      y = y_;
+      return *this;
+    }
+
+    operator VkSampleLocationEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSampleLocationEXT*>( this );
+    }
+
+    operator VkSampleLocationEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSampleLocationEXT*>( this );
+    }
+
+    bool operator==( SampleLocationEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( x == rhs.x )
+          && ( y == rhs.y );
+    }
+
+    bool operator!=( SampleLocationEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    float x;
+    float y;
+  };
+  static_assert( sizeof( SampleLocationEXT ) == sizeof( VkSampleLocationEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<SampleLocationEXT>::value, "struct wrapper is not a standard layout!" );
+
+  struct SampleLocationsInfoEXT
+  {
+    VULKAN_HPP_CONSTEXPR SampleLocationsInfoEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits sampleLocationsPerPixel_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1,
+                                                 VULKAN_HPP_NAMESPACE::Extent2D sampleLocationGridSize_ = VULKAN_HPP_NAMESPACE::Extent2D(),
+                                                 uint32_t sampleLocationsCount_ = 0,
+                                                 const VULKAN_HPP_NAMESPACE::SampleLocationEXT* pSampleLocations_ = nullptr ) VULKAN_HPP_NOEXCEPT
+      : sampleLocationsPerPixel( sampleLocationsPerPixel_ )
+      , sampleLocationGridSize( sampleLocationGridSize_ )
+      , sampleLocationsCount( sampleLocationsCount_ )
+      , pSampleLocations( pSampleLocations_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT & operator=( VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT ) - offsetof( SampleLocationsInfoEXT, pNext ) );
+      return *this;
+    }
+
+    SampleLocationsInfoEXT( VkSampleLocationsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    SampleLocationsInfoEXT& operator=( VkSampleLocationsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT const *>(&rhs);
+      return *this;
+    }
+
+    SampleLocationsInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    SampleLocationsInfoEXT & setSampleLocationsPerPixel( VULKAN_HPP_NAMESPACE::SampleCountFlagBits sampleLocationsPerPixel_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sampleLocationsPerPixel = sampleLocationsPerPixel_;
+      return *this;
+    }
+
+    SampleLocationsInfoEXT & setSampleLocationGridSize( VULKAN_HPP_NAMESPACE::Extent2D sampleLocationGridSize_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sampleLocationGridSize = sampleLocationGridSize_;
+      return *this;
+    }
+
+    SampleLocationsInfoEXT & setSampleLocationsCount( uint32_t sampleLocationsCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sampleLocationsCount = sampleLocationsCount_;
+      return *this;
+    }
+
+    SampleLocationsInfoEXT & setPSampleLocations( const VULKAN_HPP_NAMESPACE::SampleLocationEXT* pSampleLocations_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pSampleLocations = pSampleLocations_;
+      return *this;
+    }
+
+    operator VkSampleLocationsInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSampleLocationsInfoEXT*>( this );
+    }
+
+    operator VkSampleLocationsInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSampleLocationsInfoEXT*>( this );
+    }
+
+    bool operator==( SampleLocationsInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( sampleLocationsPerPixel == rhs.sampleLocationsPerPixel )
+          && ( sampleLocationGridSize == rhs.sampleLocationGridSize )
+          && ( sampleLocationsCount == rhs.sampleLocationsCount )
+          && ( pSampleLocations == rhs.pSampleLocations );
+    }
+
+    bool operator!=( SampleLocationsInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSampleLocationsInfoEXT;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::SampleCountFlagBits sampleLocationsPerPixel;
+    VULKAN_HPP_NAMESPACE::Extent2D sampleLocationGridSize;
+    uint32_t sampleLocationsCount;
+    const VULKAN_HPP_NAMESPACE::SampleLocationEXT* pSampleLocations;
+  };
+  static_assert( sizeof( SampleLocationsInfoEXT ) == sizeof( VkSampleLocationsInfoEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<SampleLocationsInfoEXT>::value, "struct wrapper is not a standard layout!" );
+
+  struct AttachmentSampleLocationsEXT
+  {
+    VULKAN_HPP_CONSTEXPR AttachmentSampleLocationsEXT( uint32_t attachmentIndex_ = 0,
+                                                       VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo_ = VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT() ) VULKAN_HPP_NOEXCEPT
+      : attachmentIndex( attachmentIndex_ )
+      , sampleLocationsInfo( sampleLocationsInfo_ )
+    {}
+
+    AttachmentSampleLocationsEXT( VkAttachmentSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    AttachmentSampleLocationsEXT& operator=( VkAttachmentSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT const *>(&rhs);
+      return *this;
+    }
+
+    AttachmentSampleLocationsEXT & setAttachmentIndex( uint32_t attachmentIndex_ ) VULKAN_HPP_NOEXCEPT
+    {
+      attachmentIndex = attachmentIndex_;
+      return *this;
+    }
+
+    AttachmentSampleLocationsEXT & setSampleLocationsInfo( VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sampleLocationsInfo = sampleLocationsInfo_;
+      return *this;
+    }
+
+    operator VkAttachmentSampleLocationsEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkAttachmentSampleLocationsEXT*>( this );
+    }
+
+    operator VkAttachmentSampleLocationsEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkAttachmentSampleLocationsEXT*>( this );
+    }
+
+    bool operator==( AttachmentSampleLocationsEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( attachmentIndex == rhs.attachmentIndex )
+          && ( sampleLocationsInfo == rhs.sampleLocationsInfo );
+    }
+
+    bool operator!=( AttachmentSampleLocationsEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    uint32_t attachmentIndex;
+    VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo;
+  };
+  static_assert( sizeof( AttachmentSampleLocationsEXT ) == sizeof( VkAttachmentSampleLocationsEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<AttachmentSampleLocationsEXT>::value, "struct wrapper is not a standard layout!" );
+
+  struct BaseInStructure
+  {
+    BaseInStructure() VULKAN_HPP_NOEXCEPT
+    {}
+
+    BaseInStructure( VkBaseInStructure const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    BaseInStructure& operator=( VkBaseInStructure const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BaseInStructure const *>(&rhs);
+      return *this;
+    }
+
+    BaseInStructure & setPNext( const struct VULKAN_HPP_NAMESPACE::BaseInStructure* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    operator VkBaseInStructure const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkBaseInStructure*>( this );
+    }
+
+    operator VkBaseInStructure &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkBaseInStructure*>( this );
+    }
+
+    bool operator==( BaseInStructure const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext );
+    }
+
+    bool operator!=( BaseInStructure const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    VULKAN_HPP_NAMESPACE::StructureType sType;
+    const struct VULKAN_HPP_NAMESPACE::BaseInStructure* pNext = nullptr;
+  };
+  static_assert( sizeof( BaseInStructure ) == sizeof( VkBaseInStructure ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<BaseInStructure>::value, "struct wrapper is not a standard layout!" );
+
+  struct BaseOutStructure
+  {
+    BaseOutStructure() VULKAN_HPP_NOEXCEPT
+    {}
+
+    BaseOutStructure( VkBaseOutStructure const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    BaseOutStructure& operator=( VkBaseOutStructure const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BaseOutStructure const *>(&rhs);
+      return *this;
+    }
+
+    BaseOutStructure & setPNext( struct VULKAN_HPP_NAMESPACE::BaseOutStructure* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    operator VkBaseOutStructure const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkBaseOutStructure*>( this );
+    }
+
+    operator VkBaseOutStructure &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkBaseOutStructure*>( this );
+    }
+
+    bool operator==( BaseOutStructure const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext );
+    }
+
+    bool operator!=( BaseOutStructure const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    VULKAN_HPP_NAMESPACE::StructureType sType;
+    struct VULKAN_HPP_NAMESPACE::BaseOutStructure* pNext = nullptr;
+  };
+  static_assert( sizeof( BaseOutStructure ) == sizeof( VkBaseOutStructure ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<BaseOutStructure>::value, "struct wrapper is not a standard layout!" );
+
+  struct BindAccelerationStructureMemoryInfoNV
+  {
+    VULKAN_HPP_CONSTEXPR BindAccelerationStructureMemoryInfoNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure_ = VULKAN_HPP_NAMESPACE::AccelerationStructureNV(),
+                                                                VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = VULKAN_HPP_NAMESPACE::DeviceMemory(),
+                                                                VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = 0,
+                                                                uint32_t deviceIndexCount_ = 0,
+                                                                const uint32_t* pDeviceIndices_ = nullptr ) VULKAN_HPP_NOEXCEPT
+      : accelerationStructure( accelerationStructure_ )
+      , memory( memory_ )
+      , memoryOffset( memoryOffset_ )
+      , deviceIndexCount( deviceIndexCount_ )
+      , pDeviceIndices( pDeviceIndices_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV & operator=( VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV ) - offsetof( BindAccelerationStructureMemoryInfoNV, pNext ) );
+      return *this;
+    }
+
+    BindAccelerationStructureMemoryInfoNV( VkBindAccelerationStructureMemoryInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    BindAccelerationStructureMemoryInfoNV& operator=( VkBindAccelerationStructureMemoryInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV const *>(&rhs);
+      return *this;
+    }
+
+    BindAccelerationStructureMemoryInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    BindAccelerationStructureMemoryInfoNV & setAccelerationStructure( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure_ ) VULKAN_HPP_NOEXCEPT
+    {
+      accelerationStructure = accelerationStructure_;
+      return *this;
+    }
+
+    BindAccelerationStructureMemoryInfoNV & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
+    {
+      memory = memory_;
+      return *this;
+    }
+
+    BindAccelerationStructureMemoryInfoNV & setMemoryOffset( VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      memoryOffset = memoryOffset_;
+      return *this;
+    }
+
+    BindAccelerationStructureMemoryInfoNV & setDeviceIndexCount( uint32_t deviceIndexCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      deviceIndexCount = deviceIndexCount_;
+      return *this;
+    }
+
+    BindAccelerationStructureMemoryInfoNV & setPDeviceIndices( const uint32_t* pDeviceIndices_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pDeviceIndices = pDeviceIndices_;
+      return *this;
+    }
+
+    operator VkBindAccelerationStructureMemoryInfoNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkBindAccelerationStructureMemoryInfoNV*>( this );
+    }
+
+    operator VkBindAccelerationStructureMemoryInfoNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkBindAccelerationStructureMemoryInfoNV*>( this );
+    }
+
+    bool operator==( BindAccelerationStructureMemoryInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( accelerationStructure == rhs.accelerationStructure )
+          && ( memory == rhs.memory )
+          && ( memoryOffset == rhs.memoryOffset )
+          && ( deviceIndexCount == rhs.deviceIndexCount )
+          && ( pDeviceIndices == rhs.pDeviceIndices );
+    }
+
+    bool operator!=( BindAccelerationStructureMemoryInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindAccelerationStructureMemoryInfoNV;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure;
+    VULKAN_HPP_NAMESPACE::DeviceMemory memory;
+    VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset;
+    uint32_t deviceIndexCount;
+    const uint32_t* pDeviceIndices;
+  };
+  static_assert( sizeof( BindAccelerationStructureMemoryInfoNV ) == sizeof( VkBindAccelerationStructureMemoryInfoNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<BindAccelerationStructureMemoryInfoNV>::value, "struct wrapper is not a standard layout!" );
+
+  struct BindBufferMemoryDeviceGroupInfo
+  {
+    VULKAN_HPP_CONSTEXPR BindBufferMemoryDeviceGroupInfo( uint32_t deviceIndexCount_ = 0,
+                                                          const uint32_t* pDeviceIndices_ = nullptr ) VULKAN_HPP_NOEXCEPT
+      : deviceIndexCount( deviceIndexCount_ )
+      , pDeviceIndices( pDeviceIndices_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::BindBufferMemoryDeviceGroupInfo & operator=( VULKAN_HPP_NAMESPACE::BindBufferMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::BindBufferMemoryDeviceGroupInfo ) - offsetof( BindBufferMemoryDeviceGroupInfo, pNext ) );
+      return *this;
+    }
+
+    BindBufferMemoryDeviceGroupInfo( VkBindBufferMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    BindBufferMemoryDeviceGroupInfo& operator=( VkBindBufferMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindBufferMemoryDeviceGroupInfo const *>(&rhs);
+      return *this;
+    }
+
+    BindBufferMemoryDeviceGroupInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    BindBufferMemoryDeviceGroupInfo & setDeviceIndexCount( uint32_t deviceIndexCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      deviceIndexCount = deviceIndexCount_;
+      return *this;
+    }
+
+    BindBufferMemoryDeviceGroupInfo & setPDeviceIndices( const uint32_t* pDeviceIndices_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pDeviceIndices = pDeviceIndices_;
+      return *this;
+    }
+
+    operator VkBindBufferMemoryDeviceGroupInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkBindBufferMemoryDeviceGroupInfo*>( this );
+    }
+
+    operator VkBindBufferMemoryDeviceGroupInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkBindBufferMemoryDeviceGroupInfo*>( this );
+    }
+
+    bool operator==( BindBufferMemoryDeviceGroupInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( deviceIndexCount == rhs.deviceIndexCount )
+          && ( pDeviceIndices == rhs.pDeviceIndices );
+    }
+
+    bool operator!=( BindBufferMemoryDeviceGroupInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindBufferMemoryDeviceGroupInfo;
+    const void* pNext = nullptr;
+    uint32_t deviceIndexCount;
+    const uint32_t* pDeviceIndices;
+  };
+  static_assert( sizeof( BindBufferMemoryDeviceGroupInfo ) == sizeof( VkBindBufferMemoryDeviceGroupInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<BindBufferMemoryDeviceGroupInfo>::value, "struct wrapper is not a standard layout!" );
+
+  struct BindBufferMemoryInfo
+  {
+    VULKAN_HPP_CONSTEXPR BindBufferMemoryInfo( VULKAN_HPP_NAMESPACE::Buffer buffer_ = VULKAN_HPP_NAMESPACE::Buffer(),
+                                               VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = VULKAN_HPP_NAMESPACE::DeviceMemory(),
+                                               VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : buffer( buffer_ )
+      , memory( memory_ )
+      , memoryOffset( memoryOffset_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo & operator=( VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo ) - offsetof( BindBufferMemoryInfo, pNext ) );
+      return *this;
+    }
+
+    BindBufferMemoryInfo( VkBindBufferMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    BindBufferMemoryInfo& operator=( VkBindBufferMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo const *>(&rhs);
+      return *this;
+    }
+
+    BindBufferMemoryInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    BindBufferMemoryInfo & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      buffer = buffer_;
+      return *this;
+    }
+
+    BindBufferMemoryInfo & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
+    {
+      memory = memory_;
+      return *this;
+    }
+
+    BindBufferMemoryInfo & setMemoryOffset( VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      memoryOffset = memoryOffset_;
+      return *this;
+    }
+
+    operator VkBindBufferMemoryInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkBindBufferMemoryInfo*>( this );
+    }
+
+    operator VkBindBufferMemoryInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkBindBufferMemoryInfo*>( this );
+    }
+
+    bool operator==( BindBufferMemoryInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( buffer == rhs.buffer )
+          && ( memory == rhs.memory )
+          && ( memoryOffset == rhs.memoryOffset );
+    }
+
+    bool operator!=( BindBufferMemoryInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindBufferMemoryInfo;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::Buffer buffer;
+    VULKAN_HPP_NAMESPACE::DeviceMemory memory;
+    VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset;
+  };
+  static_assert( sizeof( BindBufferMemoryInfo ) == sizeof( VkBindBufferMemoryInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<BindBufferMemoryInfo>::value, "struct wrapper is not a standard layout!" );
+
+  struct Offset2D
+  {
+    VULKAN_HPP_CONSTEXPR Offset2D( int32_t x_ = 0,
+                                   int32_t y_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : x( x_ )
+      , y( y_ )
+    {}
+
+    Offset2D( VkOffset2D const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    Offset2D& operator=( VkOffset2D const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::Offset2D const *>(&rhs);
+      return *this;
+    }
+
+    Offset2D & setX( int32_t x_ ) VULKAN_HPP_NOEXCEPT
+    {
+      x = x_;
+      return *this;
+    }
+
+    Offset2D & setY( int32_t y_ ) VULKAN_HPP_NOEXCEPT
+    {
+      y = y_;
+      return *this;
+    }
+
+    operator VkOffset2D const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkOffset2D*>( this );
+    }
+
+    operator VkOffset2D &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkOffset2D*>( this );
+    }
+
+    bool operator==( Offset2D const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( x == rhs.x )
+          && ( y == rhs.y );
+    }
+
+    bool operator!=( Offset2D const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    int32_t x;
+    int32_t y;
+  };
+  static_assert( sizeof( Offset2D ) == sizeof( VkOffset2D ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<Offset2D>::value, "struct wrapper is not a standard layout!" );
+
+  struct Rect2D
+  {
+    VULKAN_HPP_CONSTEXPR Rect2D( VULKAN_HPP_NAMESPACE::Offset2D offset_ = VULKAN_HPP_NAMESPACE::Offset2D(),
+                                 VULKAN_HPP_NAMESPACE::Extent2D extent_ = VULKAN_HPP_NAMESPACE::Extent2D() ) VULKAN_HPP_NOEXCEPT
+      : offset( offset_ )
+      , extent( extent_ )
+    {}
+
+    Rect2D( VkRect2D const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    Rect2D& operator=( VkRect2D const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::Rect2D const *>(&rhs);
+      return *this;
+    }
+
+    Rect2D & setOffset( VULKAN_HPP_NAMESPACE::Offset2D offset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      offset = offset_;
+      return *this;
+    }
+
+    Rect2D & setExtent( VULKAN_HPP_NAMESPACE::Extent2D extent_ ) VULKAN_HPP_NOEXCEPT
+    {
+      extent = extent_;
+      return *this;
+    }
+
+    operator VkRect2D const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkRect2D*>( this );
+    }
+
+    operator VkRect2D &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkRect2D*>( this );
+    }
+
+    bool operator==( Rect2D const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( offset == rhs.offset )
+          && ( extent == rhs.extent );
+    }
+
+    bool operator!=( Rect2D const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    VULKAN_HPP_NAMESPACE::Offset2D offset;
+    VULKAN_HPP_NAMESPACE::Extent2D extent;
+  };
+  static_assert( sizeof( Rect2D ) == sizeof( VkRect2D ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<Rect2D>::value, "struct wrapper is not a standard layout!" );
+
+  struct BindImageMemoryDeviceGroupInfo
+  {
+    VULKAN_HPP_CONSTEXPR BindImageMemoryDeviceGroupInfo( uint32_t deviceIndexCount_ = 0,
+                                                         const uint32_t* pDeviceIndices_ = nullptr,
+                                                         uint32_t splitInstanceBindRegionCount_ = 0,
+                                                         const VULKAN_HPP_NAMESPACE::Rect2D* pSplitInstanceBindRegions_ = nullptr ) VULKAN_HPP_NOEXCEPT
+      : deviceIndexCount( deviceIndexCount_ )
+      , pDeviceIndices( pDeviceIndices_ )
+      , splitInstanceBindRegionCount( splitInstanceBindRegionCount_ )
+      , pSplitInstanceBindRegions( pSplitInstanceBindRegions_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::BindImageMemoryDeviceGroupInfo & operator=( VULKAN_HPP_NAMESPACE::BindImageMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::BindImageMemoryDeviceGroupInfo ) - offsetof( BindImageMemoryDeviceGroupInfo, pNext ) );
+      return *this;
+    }
+
+    BindImageMemoryDeviceGroupInfo( VkBindImageMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    BindImageMemoryDeviceGroupInfo& operator=( VkBindImageMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindImageMemoryDeviceGroupInfo const *>(&rhs);
+      return *this;
+    }
+
+    BindImageMemoryDeviceGroupInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    BindImageMemoryDeviceGroupInfo & setDeviceIndexCount( uint32_t deviceIndexCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      deviceIndexCount = deviceIndexCount_;
+      return *this;
+    }
+
+    BindImageMemoryDeviceGroupInfo & setPDeviceIndices( const uint32_t* pDeviceIndices_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pDeviceIndices = pDeviceIndices_;
+      return *this;
+    }
+
+    BindImageMemoryDeviceGroupInfo & setSplitInstanceBindRegionCount( uint32_t splitInstanceBindRegionCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      splitInstanceBindRegionCount = splitInstanceBindRegionCount_;
+      return *this;
+    }
+
+    BindImageMemoryDeviceGroupInfo & setPSplitInstanceBindRegions( const VULKAN_HPP_NAMESPACE::Rect2D* pSplitInstanceBindRegions_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pSplitInstanceBindRegions = pSplitInstanceBindRegions_;
+      return *this;
+    }
+
+    operator VkBindImageMemoryDeviceGroupInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkBindImageMemoryDeviceGroupInfo*>( this );
+    }
+
+    operator VkBindImageMemoryDeviceGroupInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkBindImageMemoryDeviceGroupInfo*>( this );
+    }
+
+    bool operator==( BindImageMemoryDeviceGroupInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( deviceIndexCount == rhs.deviceIndexCount )
+          && ( pDeviceIndices == rhs.pDeviceIndices )
+          && ( splitInstanceBindRegionCount == rhs.splitInstanceBindRegionCount )
+          && ( pSplitInstanceBindRegions == rhs.pSplitInstanceBindRegions );
+    }
+
+    bool operator!=( BindImageMemoryDeviceGroupInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindImageMemoryDeviceGroupInfo;
+    const void* pNext = nullptr;
+    uint32_t deviceIndexCount;
+    const uint32_t* pDeviceIndices;
+    uint32_t splitInstanceBindRegionCount;
+    const VULKAN_HPP_NAMESPACE::Rect2D* pSplitInstanceBindRegions;
+  };
+  static_assert( sizeof( BindImageMemoryDeviceGroupInfo ) == sizeof( VkBindImageMemoryDeviceGroupInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<BindImageMemoryDeviceGroupInfo>::value, "struct wrapper is not a standard layout!" );
+
+  struct BindImageMemoryInfo
+  {
+    VULKAN_HPP_CONSTEXPR BindImageMemoryInfo( VULKAN_HPP_NAMESPACE::Image image_ = VULKAN_HPP_NAMESPACE::Image(),
+                                              VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = VULKAN_HPP_NAMESPACE::DeviceMemory(),
+                                              VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : image( image_ )
+      , memory( memory_ )
+      , memoryOffset( memoryOffset_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::BindImageMemoryInfo & operator=( VULKAN_HPP_NAMESPACE::BindImageMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::BindImageMemoryInfo ) - offsetof( BindImageMemoryInfo, pNext ) );
+      return *this;
+    }
+
+    BindImageMemoryInfo( VkBindImageMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    BindImageMemoryInfo& operator=( VkBindImageMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindImageMemoryInfo const *>(&rhs);
+      return *this;
+    }
+
+    BindImageMemoryInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    BindImageMemoryInfo & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
+    {
+      image = image_;
+      return *this;
+    }
+
+    BindImageMemoryInfo & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
+    {
+      memory = memory_;
+      return *this;
+    }
+
+    BindImageMemoryInfo & setMemoryOffset( VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      memoryOffset = memoryOffset_;
+      return *this;
+    }
+
+    operator VkBindImageMemoryInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkBindImageMemoryInfo*>( this );
+    }
+
+    operator VkBindImageMemoryInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkBindImageMemoryInfo*>( this );
+    }
+
+    bool operator==( BindImageMemoryInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( image == rhs.image )
+          && ( memory == rhs.memory )
+          && ( memoryOffset == rhs.memoryOffset );
+    }
+
+    bool operator!=( BindImageMemoryInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindImageMemoryInfo;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::Image image;
+    VULKAN_HPP_NAMESPACE::DeviceMemory memory;
+    VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset;
+  };
+  static_assert( sizeof( BindImageMemoryInfo ) == sizeof( VkBindImageMemoryInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<BindImageMemoryInfo>::value, "struct wrapper is not a standard layout!" );
+
+  struct BindImageMemorySwapchainInfoKHR
+  {
+    VULKAN_HPP_CONSTEXPR BindImageMemorySwapchainInfoKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ = VULKAN_HPP_NAMESPACE::SwapchainKHR(),
+                                                          uint32_t imageIndex_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : swapchain( swapchain_ )
+      , imageIndex( imageIndex_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::BindImageMemorySwapchainInfoKHR & operator=( VULKAN_HPP_NAMESPACE::BindImageMemorySwapchainInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::BindImageMemorySwapchainInfoKHR ) - offsetof( BindImageMemorySwapchainInfoKHR, pNext ) );
+      return *this;
+    }
+
+    BindImageMemorySwapchainInfoKHR( VkBindImageMemorySwapchainInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    BindImageMemorySwapchainInfoKHR& operator=( VkBindImageMemorySwapchainInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindImageMemorySwapchainInfoKHR const *>(&rhs);
+      return *this;
+    }
+
+    BindImageMemorySwapchainInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    BindImageMemorySwapchainInfoKHR & setSwapchain( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ ) VULKAN_HPP_NOEXCEPT
+    {
+      swapchain = swapchain_;
+      return *this;
+    }
+
+    BindImageMemorySwapchainInfoKHR & setImageIndex( uint32_t imageIndex_ ) VULKAN_HPP_NOEXCEPT
+    {
+      imageIndex = imageIndex_;
+      return *this;
+    }
+
+    operator VkBindImageMemorySwapchainInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkBindImageMemorySwapchainInfoKHR*>( this );
+    }
+
+    operator VkBindImageMemorySwapchainInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkBindImageMemorySwapchainInfoKHR*>( this );
+    }
+
+    bool operator==( BindImageMemorySwapchainInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( swapchain == rhs.swapchain )
+          && ( imageIndex == rhs.imageIndex );
+    }
+
+    bool operator!=( BindImageMemorySwapchainInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindImageMemorySwapchainInfoKHR;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain;
+    uint32_t imageIndex;
+  };
+  static_assert( sizeof( BindImageMemorySwapchainInfoKHR ) == sizeof( VkBindImageMemorySwapchainInfoKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<BindImageMemorySwapchainInfoKHR>::value, "struct wrapper is not a standard layout!" );
+
+  struct BindImagePlaneMemoryInfo
+  {
+    VULKAN_HPP_CONSTEXPR BindImagePlaneMemoryInfo( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor ) VULKAN_HPP_NOEXCEPT
+      : planeAspect( planeAspect_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::BindImagePlaneMemoryInfo & operator=( VULKAN_HPP_NAMESPACE::BindImagePlaneMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::BindImagePlaneMemoryInfo ) - offsetof( BindImagePlaneMemoryInfo, pNext ) );
+      return *this;
+    }
+
+    BindImagePlaneMemoryInfo( VkBindImagePlaneMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    BindImagePlaneMemoryInfo& operator=( VkBindImagePlaneMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindImagePlaneMemoryInfo const *>(&rhs);
+      return *this;
+    }
+
+    BindImagePlaneMemoryInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    BindImagePlaneMemoryInfo & setPlaneAspect( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ ) VULKAN_HPP_NOEXCEPT
+    {
+      planeAspect = planeAspect_;
+      return *this;
+    }
+
+    operator VkBindImagePlaneMemoryInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkBindImagePlaneMemoryInfo*>( this );
+    }
+
+    operator VkBindImagePlaneMemoryInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkBindImagePlaneMemoryInfo*>( this );
+    }
+
+    bool operator==( BindImagePlaneMemoryInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( planeAspect == rhs.planeAspect );
+    }
+
+    bool operator!=( BindImagePlaneMemoryInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindImagePlaneMemoryInfo;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect;
+  };
+  static_assert( sizeof( BindImagePlaneMemoryInfo ) == sizeof( VkBindImagePlaneMemoryInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<BindImagePlaneMemoryInfo>::value, "struct wrapper is not a standard layout!" );
+
+  struct SparseMemoryBind
+  {
+    VULKAN_HPP_CONSTEXPR SparseMemoryBind( VULKAN_HPP_NAMESPACE::DeviceSize resourceOffset_ = 0,
+                                           VULKAN_HPP_NAMESPACE::DeviceSize size_ = 0,
+                                           VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = VULKAN_HPP_NAMESPACE::DeviceMemory(),
+                                           VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = 0,
+                                           VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags_ = VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags() ) VULKAN_HPP_NOEXCEPT
+      : resourceOffset( resourceOffset_ )
+      , size( size_ )
+      , memory( memory_ )
+      , memoryOffset( memoryOffset_ )
+      , flags( flags_ )
+    {}
+
+    SparseMemoryBind( VkSparseMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    SparseMemoryBind& operator=( VkSparseMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SparseMemoryBind const *>(&rhs);
+      return *this;
+    }
+
+    SparseMemoryBind & setResourceOffset( VULKAN_HPP_NAMESPACE::DeviceSize resourceOffset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      resourceOffset = resourceOffset_;
+      return *this;
+    }
+
+    SparseMemoryBind & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
+    {
+      size = size_;
+      return *this;
+    }
+
+    SparseMemoryBind & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
+    {
+      memory = memory_;
+      return *this;
+    }
+
+    SparseMemoryBind & setMemoryOffset( VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      memoryOffset = memoryOffset_;
+      return *this;
+    }
+
+    SparseMemoryBind & setFlags( VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    operator VkSparseMemoryBind const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSparseMemoryBind*>( this );
+    }
+
+    operator VkSparseMemoryBind &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSparseMemoryBind*>( this );
+    }
+
+    bool operator==( SparseMemoryBind const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( resourceOffset == rhs.resourceOffset )
+          && ( size == rhs.size )
+          && ( memory == rhs.memory )
+          && ( memoryOffset == rhs.memoryOffset )
+          && ( flags == rhs.flags );
+    }
+
+    bool operator!=( SparseMemoryBind const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    VULKAN_HPP_NAMESPACE::DeviceSize resourceOffset;
+    VULKAN_HPP_NAMESPACE::DeviceSize size;
+    VULKAN_HPP_NAMESPACE::DeviceMemory memory;
+    VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset;
+    VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags;
+  };
+  static_assert( sizeof( SparseMemoryBind ) == sizeof( VkSparseMemoryBind ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<SparseMemoryBind>::value, "struct wrapper is not a standard layout!" );
+
+  struct SparseBufferMemoryBindInfo
+  {
+    VULKAN_HPP_CONSTEXPR SparseBufferMemoryBindInfo( VULKAN_HPP_NAMESPACE::Buffer buffer_ = VULKAN_HPP_NAMESPACE::Buffer(),
+                                                     uint32_t bindCount_ = 0,
+                                                     const VULKAN_HPP_NAMESPACE::SparseMemoryBind* pBinds_ = nullptr ) VULKAN_HPP_NOEXCEPT
+      : buffer( buffer_ )
+      , bindCount( bindCount_ )
+      , pBinds( pBinds_ )
+    {}
+
+    SparseBufferMemoryBindInfo( VkSparseBufferMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    SparseBufferMemoryBindInfo& operator=( VkSparseBufferMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo const *>(&rhs);
+      return *this;
+    }
+
+    SparseBufferMemoryBindInfo & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      buffer = buffer_;
+      return *this;
+    }
+
+    SparseBufferMemoryBindInfo & setBindCount( uint32_t bindCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      bindCount = bindCount_;
+      return *this;
+    }
+
+    SparseBufferMemoryBindInfo & setPBinds( const VULKAN_HPP_NAMESPACE::SparseMemoryBind* pBinds_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pBinds = pBinds_;
+      return *this;
+    }
+
+    operator VkSparseBufferMemoryBindInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSparseBufferMemoryBindInfo*>( this );
+    }
+
+    operator VkSparseBufferMemoryBindInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSparseBufferMemoryBindInfo*>( this );
+    }
+
+    bool operator==( SparseBufferMemoryBindInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( buffer == rhs.buffer )
+          && ( bindCount == rhs.bindCount )
+          && ( pBinds == rhs.pBinds );
+    }
+
+    bool operator!=( SparseBufferMemoryBindInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    VULKAN_HPP_NAMESPACE::Buffer buffer;
+    uint32_t bindCount;
+    const VULKAN_HPP_NAMESPACE::SparseMemoryBind* pBinds;
+  };
+  static_assert( sizeof( SparseBufferMemoryBindInfo ) == sizeof( VkSparseBufferMemoryBindInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<SparseBufferMemoryBindInfo>::value, "struct wrapper is not a standard layout!" );
+
+  struct SparseImageOpaqueMemoryBindInfo
+  {
+    VULKAN_HPP_CONSTEXPR SparseImageOpaqueMemoryBindInfo( VULKAN_HPP_NAMESPACE::Image image_ = VULKAN_HPP_NAMESPACE::Image(),
+                                                          uint32_t bindCount_ = 0,
+                                                          const VULKAN_HPP_NAMESPACE::SparseMemoryBind* pBinds_ = nullptr ) VULKAN_HPP_NOEXCEPT
+      : image( image_ )
+      , bindCount( bindCount_ )
+      , pBinds( pBinds_ )
+    {}
+
+    SparseImageOpaqueMemoryBindInfo( VkSparseImageOpaqueMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    SparseImageOpaqueMemoryBindInfo& operator=( VkSparseImageOpaqueMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo const *>(&rhs);
+      return *this;
+    }
+
+    SparseImageOpaqueMemoryBindInfo & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
+    {
+      image = image_;
+      return *this;
+    }
+
+    SparseImageOpaqueMemoryBindInfo & setBindCount( uint32_t bindCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      bindCount = bindCount_;
+      return *this;
+    }
+
+    SparseImageOpaqueMemoryBindInfo & setPBinds( const VULKAN_HPP_NAMESPACE::SparseMemoryBind* pBinds_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pBinds = pBinds_;
+      return *this;
+    }
+
+    operator VkSparseImageOpaqueMemoryBindInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSparseImageOpaqueMemoryBindInfo*>( this );
+    }
+
+    operator VkSparseImageOpaqueMemoryBindInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSparseImageOpaqueMemoryBindInfo*>( this );
+    }
+
+    bool operator==( SparseImageOpaqueMemoryBindInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( image == rhs.image )
+          && ( bindCount == rhs.bindCount )
+          && ( pBinds == rhs.pBinds );
+    }
+
+    bool operator!=( SparseImageOpaqueMemoryBindInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    VULKAN_HPP_NAMESPACE::Image image;
+    uint32_t bindCount;
+    const VULKAN_HPP_NAMESPACE::SparseMemoryBind* pBinds;
+  };
+  static_assert( sizeof( SparseImageOpaqueMemoryBindInfo ) == sizeof( VkSparseImageOpaqueMemoryBindInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<SparseImageOpaqueMemoryBindInfo>::value, "struct wrapper is not a standard layout!" );
+
+  struct ImageSubresource
+  {
+    VULKAN_HPP_CONSTEXPR ImageSubresource( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = VULKAN_HPP_NAMESPACE::ImageAspectFlags(),
+                                           uint32_t mipLevel_ = 0,
+                                           uint32_t arrayLayer_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : aspectMask( aspectMask_ )
+      , mipLevel( mipLevel_ )
+      , arrayLayer( arrayLayer_ )
+    {}
+
+    ImageSubresource( VkImageSubresource const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    ImageSubresource& operator=( VkImageSubresource const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageSubresource const *>(&rhs);
+      return *this;
+    }
+
+    ImageSubresource & setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      aspectMask = aspectMask_;
+      return *this;
+    }
+
+    ImageSubresource & setMipLevel( uint32_t mipLevel_ ) VULKAN_HPP_NOEXCEPT
+    {
+      mipLevel = mipLevel_;
+      return *this;
+    }
+
+    ImageSubresource & setArrayLayer( uint32_t arrayLayer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      arrayLayer = arrayLayer_;
+      return *this;
+    }
+
+    operator VkImageSubresource const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImageSubresource*>( this );
+    }
+
+    operator VkImageSubresource &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImageSubresource*>( this );
+    }
+
+    bool operator==( ImageSubresource const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( aspectMask == rhs.aspectMask )
+          && ( mipLevel == rhs.mipLevel )
+          && ( arrayLayer == rhs.arrayLayer );
+    }
+
+    bool operator!=( ImageSubresource const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask;
+    uint32_t mipLevel;
+    uint32_t arrayLayer;
+  };
+  static_assert( sizeof( ImageSubresource ) == sizeof( VkImageSubresource ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ImageSubresource>::value, "struct wrapper is not a standard layout!" );
+
+  struct Offset3D
+  {
+    VULKAN_HPP_CONSTEXPR Offset3D( int32_t x_ = 0,
+                                   int32_t y_ = 0,
+                                   int32_t z_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : x( x_ )
+      , y( y_ )
+      , z( z_ )
+    {}
+
+    explicit Offset3D( Offset2D const& offset2D,
+                       int32_t z_ = 0 )
+      : x( offset2D.x )
+      , y( offset2D.y )
+      , z( z_ )
+    {}
+
+    Offset3D( VkOffset3D const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    Offset3D& operator=( VkOffset3D const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::Offset3D const *>(&rhs);
+      return *this;
+    }
+
+    Offset3D & setX( int32_t x_ ) VULKAN_HPP_NOEXCEPT
+    {
+      x = x_;
+      return *this;
+    }
+
+    Offset3D & setY( int32_t y_ ) VULKAN_HPP_NOEXCEPT
+    {
+      y = y_;
+      return *this;
+    }
+
+    Offset3D & setZ( int32_t z_ ) VULKAN_HPP_NOEXCEPT
+    {
+      z = z_;
+      return *this;
+    }
+
+    operator VkOffset3D const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkOffset3D*>( this );
+    }
+
+    operator VkOffset3D &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkOffset3D*>( this );
+    }
+
+    bool operator==( Offset3D const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( x == rhs.x )
+          && ( y == rhs.y )
+          && ( z == rhs.z );
+    }
+
+    bool operator!=( Offset3D const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    int32_t x;
+    int32_t y;
+    int32_t z;
+  };
+  static_assert( sizeof( Offset3D ) == sizeof( VkOffset3D ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<Offset3D>::value, "struct wrapper is not a standard layout!" );
+
+  struct Extent3D
+  {
+    VULKAN_HPP_CONSTEXPR Extent3D( uint32_t width_ = 0,
+                                   uint32_t height_ = 0,
+                                   uint32_t depth_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : width( width_ )
+      , height( height_ )
+      , depth( depth_ )
+    {}
+
+    explicit Extent3D( Extent2D const& extent2D,
+                       uint32_t depth_ = 0 )
+      : width( extent2D.width )
+      , height( extent2D.height )
+      , depth( depth_ )
+    {}
+
+    Extent3D( VkExtent3D const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    Extent3D& operator=( VkExtent3D const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::Extent3D const *>(&rhs);
+      return *this;
+    }
+
+    Extent3D & setWidth( uint32_t width_ ) VULKAN_HPP_NOEXCEPT
+    {
+      width = width_;
+      return *this;
+    }
+
+    Extent3D & setHeight( uint32_t height_ ) VULKAN_HPP_NOEXCEPT
+    {
+      height = height_;
+      return *this;
+    }
+
+    Extent3D & setDepth( uint32_t depth_ ) VULKAN_HPP_NOEXCEPT
+    {
+      depth = depth_;
+      return *this;
+    }
+
+    operator VkExtent3D const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkExtent3D*>( this );
+    }
+
+    operator VkExtent3D &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkExtent3D*>( this );
+    }
+
+    bool operator==( Extent3D const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( width == rhs.width )
+          && ( height == rhs.height )
+          && ( depth == rhs.depth );
+    }
+
+    bool operator!=( Extent3D const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    uint32_t width;
+    uint32_t height;
+    uint32_t depth;
+  };
+  static_assert( sizeof( Extent3D ) == sizeof( VkExtent3D ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<Extent3D>::value, "struct wrapper is not a standard layout!" );
+
+  struct SparseImageMemoryBind
+  {
+    VULKAN_HPP_CONSTEXPR SparseImageMemoryBind( VULKAN_HPP_NAMESPACE::ImageSubresource subresource_ = VULKAN_HPP_NAMESPACE::ImageSubresource(),
+                                                VULKAN_HPP_NAMESPACE::Offset3D offset_ = VULKAN_HPP_NAMESPACE::Offset3D(),
+                                                VULKAN_HPP_NAMESPACE::Extent3D extent_ = VULKAN_HPP_NAMESPACE::Extent3D(),
+                                                VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = VULKAN_HPP_NAMESPACE::DeviceMemory(),
+                                                VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = 0,
+                                                VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags_ = VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags() ) VULKAN_HPP_NOEXCEPT
+      : subresource( subresource_ )
+      , offset( offset_ )
+      , extent( extent_ )
+      , memory( memory_ )
+      , memoryOffset( memoryOffset_ )
+      , flags( flags_ )
+    {}
+
+    SparseImageMemoryBind( VkSparseImageMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    SparseImageMemoryBind& operator=( VkSparseImageMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SparseImageMemoryBind const *>(&rhs);
+      return *this;
+    }
+
+    SparseImageMemoryBind & setSubresource( VULKAN_HPP_NAMESPACE::ImageSubresource subresource_ ) VULKAN_HPP_NOEXCEPT
+    {
+      subresource = subresource_;
+      return *this;
+    }
+
+    SparseImageMemoryBind & setOffset( VULKAN_HPP_NAMESPACE::Offset3D offset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      offset = offset_;
+      return *this;
+    }
+
+    SparseImageMemoryBind & setExtent( VULKAN_HPP_NAMESPACE::Extent3D extent_ ) VULKAN_HPP_NOEXCEPT
+    {
+      extent = extent_;
+      return *this;
+    }
+
+    SparseImageMemoryBind & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
+    {
+      memory = memory_;
+      return *this;
+    }
+
+    SparseImageMemoryBind & setMemoryOffset( VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      memoryOffset = memoryOffset_;
+      return *this;
+    }
+
+    SparseImageMemoryBind & setFlags( VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    operator VkSparseImageMemoryBind const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSparseImageMemoryBind*>( this );
+    }
+
+    operator VkSparseImageMemoryBind &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSparseImageMemoryBind*>( this );
+    }
+
+    bool operator==( SparseImageMemoryBind const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( subresource == rhs.subresource )
+          && ( offset == rhs.offset )
+          && ( extent == rhs.extent )
+          && ( memory == rhs.memory )
+          && ( memoryOffset == rhs.memoryOffset )
+          && ( flags == rhs.flags );
+    }
+
+    bool operator!=( SparseImageMemoryBind const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    VULKAN_HPP_NAMESPACE::ImageSubresource subresource;
+    VULKAN_HPP_NAMESPACE::Offset3D offset;
+    VULKAN_HPP_NAMESPACE::Extent3D extent;
+    VULKAN_HPP_NAMESPACE::DeviceMemory memory;
+    VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset;
+    VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags;
+  };
+  static_assert( sizeof( SparseImageMemoryBind ) == sizeof( VkSparseImageMemoryBind ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<SparseImageMemoryBind>::value, "struct wrapper is not a standard layout!" );
+
+  struct SparseImageMemoryBindInfo
+  {
+    VULKAN_HPP_CONSTEXPR SparseImageMemoryBindInfo( VULKAN_HPP_NAMESPACE::Image image_ = VULKAN_HPP_NAMESPACE::Image(),
+                                                    uint32_t bindCount_ = 0,
+                                                    const VULKAN_HPP_NAMESPACE::SparseImageMemoryBind* pBinds_ = nullptr ) VULKAN_HPP_NOEXCEPT
+      : image( image_ )
+      , bindCount( bindCount_ )
+      , pBinds( pBinds_ )
+    {}
+
+    SparseImageMemoryBindInfo( VkSparseImageMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    SparseImageMemoryBindInfo& operator=( VkSparseImageMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo const *>(&rhs);
+      return *this;
+    }
+
+    SparseImageMemoryBindInfo & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
+    {
+      image = image_;
+      return *this;
+    }
+
+    SparseImageMemoryBindInfo & setBindCount( uint32_t bindCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      bindCount = bindCount_;
+      return *this;
+    }
+
+    SparseImageMemoryBindInfo & setPBinds( const VULKAN_HPP_NAMESPACE::SparseImageMemoryBind* pBinds_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pBinds = pBinds_;
+      return *this;
+    }
+
+    operator VkSparseImageMemoryBindInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSparseImageMemoryBindInfo*>( this );
+    }
+
+    operator VkSparseImageMemoryBindInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSparseImageMemoryBindInfo*>( this );
+    }
+
+    bool operator==( SparseImageMemoryBindInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( image == rhs.image )
+          && ( bindCount == rhs.bindCount )
+          && ( pBinds == rhs.pBinds );
+    }
+
+    bool operator!=( SparseImageMemoryBindInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    VULKAN_HPP_NAMESPACE::Image image;
+    uint32_t bindCount;
+    const VULKAN_HPP_NAMESPACE::SparseImageMemoryBind* pBinds;
+  };
+  static_assert( sizeof( SparseImageMemoryBindInfo ) == sizeof( VkSparseImageMemoryBindInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<SparseImageMemoryBindInfo>::value, "struct wrapper is not a standard layout!" );
+
+  struct BindSparseInfo
+  {
+    VULKAN_HPP_CONSTEXPR BindSparseInfo( uint32_t waitSemaphoreCount_ = 0,
+                                         const VULKAN_HPP_NAMESPACE::Semaphore* pWaitSemaphores_ = nullptr,
+                                         uint32_t bufferBindCount_ = 0,
+                                         const VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo* pBufferBinds_ = nullptr,
+                                         uint32_t imageOpaqueBindCount_ = 0,
+                                         const VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo* pImageOpaqueBinds_ = nullptr,
+                                         uint32_t imageBindCount_ = 0,
+                                         const VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo* pImageBinds_ = nullptr,
+                                         uint32_t signalSemaphoreCount_ = 0,
+                                         const VULKAN_HPP_NAMESPACE::Semaphore* pSignalSemaphores_ = nullptr ) VULKAN_HPP_NOEXCEPT
+      : waitSemaphoreCount( waitSemaphoreCount_ )
+      , pWaitSemaphores( pWaitSemaphores_ )
+      , bufferBindCount( bufferBindCount_ )
+      , pBufferBinds( pBufferBinds_ )
+      , imageOpaqueBindCount( imageOpaqueBindCount_ )
+      , pImageOpaqueBinds( pImageOpaqueBinds_ )
+      , imageBindCount( imageBindCount_ )
+      , pImageBinds( pImageBinds_ )
+      , signalSemaphoreCount( signalSemaphoreCount_ )
+      , pSignalSemaphores( pSignalSemaphores_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::BindSparseInfo & operator=( VULKAN_HPP_NAMESPACE::BindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::BindSparseInfo ) - offsetof( BindSparseInfo, pNext ) );
+      return *this;
+    }
+
+    BindSparseInfo( VkBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    BindSparseInfo& operator=( VkBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindSparseInfo const *>(&rhs);
+      return *this;
+    }
+
+    BindSparseInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    BindSparseInfo & setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      waitSemaphoreCount = waitSemaphoreCount_;
+      return *this;
+    }
+
+    BindSparseInfo & setPWaitSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore* pWaitSemaphores_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pWaitSemaphores = pWaitSemaphores_;
+      return *this;
+    }
+
+    BindSparseInfo & setBufferBindCount( uint32_t bufferBindCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      bufferBindCount = bufferBindCount_;
+      return *this;
+    }
+
+    BindSparseInfo & setPBufferBinds( const VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo* pBufferBinds_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pBufferBinds = pBufferBinds_;
+      return *this;
+    }
+
+    BindSparseInfo & setImageOpaqueBindCount( uint32_t imageOpaqueBindCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      imageOpaqueBindCount = imageOpaqueBindCount_;
+      return *this;
+    }
+
+    BindSparseInfo & setPImageOpaqueBinds( const VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo* pImageOpaqueBinds_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pImageOpaqueBinds = pImageOpaqueBinds_;
+      return *this;
+    }
+
+    BindSparseInfo & setImageBindCount( uint32_t imageBindCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      imageBindCount = imageBindCount_;
+      return *this;
+    }
+
+    BindSparseInfo & setPImageBinds( const VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo* pImageBinds_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pImageBinds = pImageBinds_;
+      return *this;
+    }
+
+    BindSparseInfo & setSignalSemaphoreCount( uint32_t signalSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      signalSemaphoreCount = signalSemaphoreCount_;
+      return *this;
+    }
+
+    BindSparseInfo & setPSignalSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore* pSignalSemaphores_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pSignalSemaphores = pSignalSemaphores_;
+      return *this;
+    }
+
+    operator VkBindSparseInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkBindSparseInfo*>( this );
+    }
+
+    operator VkBindSparseInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkBindSparseInfo*>( this );
+    }
+
+    bool operator==( BindSparseInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( waitSemaphoreCount == rhs.waitSemaphoreCount )
+          && ( pWaitSemaphores == rhs.pWaitSemaphores )
+          && ( bufferBindCount == rhs.bufferBindCount )
+          && ( pBufferBinds == rhs.pBufferBinds )
+          && ( imageOpaqueBindCount == rhs.imageOpaqueBindCount )
+          && ( pImageOpaqueBinds == rhs.pImageOpaqueBinds )
+          && ( imageBindCount == rhs.imageBindCount )
+          && ( pImageBinds == rhs.pImageBinds )
+          && ( signalSemaphoreCount == rhs.signalSemaphoreCount )
+          && ( pSignalSemaphores == rhs.pSignalSemaphores );
+    }
+
+    bool operator!=( BindSparseInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindSparseInfo;
+    const void* pNext = nullptr;
+    uint32_t waitSemaphoreCount;
+    const VULKAN_HPP_NAMESPACE::Semaphore* pWaitSemaphores;
+    uint32_t bufferBindCount;
+    const VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo* pBufferBinds;
+    uint32_t imageOpaqueBindCount;
+    const VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo* pImageOpaqueBinds;
+    uint32_t imageBindCount;
+    const VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo* pImageBinds;
+    uint32_t signalSemaphoreCount;
+    const VULKAN_HPP_NAMESPACE::Semaphore* pSignalSemaphores;
+  };
+  static_assert( sizeof( BindSparseInfo ) == sizeof( VkBindSparseInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<BindSparseInfo>::value, "struct wrapper is not a standard layout!" );
+
+  struct BufferCopy
+  {
+    VULKAN_HPP_CONSTEXPR BufferCopy( VULKAN_HPP_NAMESPACE::DeviceSize srcOffset_ = 0,
+                                     VULKAN_HPP_NAMESPACE::DeviceSize dstOffset_ = 0,
+                                     VULKAN_HPP_NAMESPACE::DeviceSize size_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : srcOffset( srcOffset_ )
+      , dstOffset( dstOffset_ )
+      , size( size_ )
+    {}
+
+    BufferCopy( VkBufferCopy const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    BufferCopy& operator=( VkBufferCopy const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferCopy const *>(&rhs);
+      return *this;
+    }
+
+    BufferCopy & setSrcOffset( VULKAN_HPP_NAMESPACE::DeviceSize srcOffset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcOffset = srcOffset_;
+      return *this;
+    }
+
+    BufferCopy & setDstOffset( VULKAN_HPP_NAMESPACE::DeviceSize dstOffset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstOffset = dstOffset_;
+      return *this;
+    }
+
+    BufferCopy & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
+    {
+      size = size_;
+      return *this;
+    }
+
+    operator VkBufferCopy const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkBufferCopy*>( this );
+    }
+
+    operator VkBufferCopy &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkBufferCopy*>( this );
+    }
+
+    bool operator==( BufferCopy const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( srcOffset == rhs.srcOffset )
+          && ( dstOffset == rhs.dstOffset )
+          && ( size == rhs.size );
+    }
+
+    bool operator!=( BufferCopy const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    VULKAN_HPP_NAMESPACE::DeviceSize srcOffset;
+    VULKAN_HPP_NAMESPACE::DeviceSize dstOffset;
+    VULKAN_HPP_NAMESPACE::DeviceSize size;
+  };
+  static_assert( sizeof( BufferCopy ) == sizeof( VkBufferCopy ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<BufferCopy>::value, "struct wrapper is not a standard layout!" );
+
+  struct BufferCreateInfo
+  {
+    VULKAN_HPP_CONSTEXPR BufferCreateInfo( VULKAN_HPP_NAMESPACE::BufferCreateFlags flags_ = VULKAN_HPP_NAMESPACE::BufferCreateFlags(),
+                                           VULKAN_HPP_NAMESPACE::DeviceSize size_ = 0,
+                                           VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_ = VULKAN_HPP_NAMESPACE::BufferUsageFlags(),
+                                           VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive,
+                                           uint32_t queueFamilyIndexCount_ = 0,
+                                           const uint32_t* pQueueFamilyIndices_ = nullptr ) VULKAN_HPP_NOEXCEPT
+      : flags( flags_ )
+      , size( size_ )
+      , usage( usage_ )
+      , sharingMode( sharingMode_ )
+      , queueFamilyIndexCount( queueFamilyIndexCount_ )
+      , pQueueFamilyIndices( pQueueFamilyIndices_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::BufferCreateInfo & operator=( VULKAN_HPP_NAMESPACE::BufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::BufferCreateInfo ) - offsetof( BufferCreateInfo, pNext ) );
+      return *this;
+    }
+
+    BufferCreateInfo( VkBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    BufferCreateInfo& operator=( VkBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferCreateInfo const *>(&rhs);
+      return *this;
+    }
+
+    BufferCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    BufferCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::BufferCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    BufferCreateInfo & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
+    {
+      size = size_;
+      return *this;
+    }
+
+    BufferCreateInfo & setUsage( VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT
+    {
+      usage = usage_;
+      return *this;
+    }
+
+    BufferCreateInfo & setSharingMode( VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sharingMode = sharingMode_;
+      return *this;
+    }
+
+    BufferCreateInfo & setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      queueFamilyIndexCount = queueFamilyIndexCount_;
+      return *this;
+    }
+
+    BufferCreateInfo & setPQueueFamilyIndices( const uint32_t* pQueueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pQueueFamilyIndices = pQueueFamilyIndices_;
+      return *this;
+    }
+
+    operator VkBufferCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkBufferCreateInfo*>( this );
+    }
+
+    operator VkBufferCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkBufferCreateInfo*>( this );
+    }
+
+    bool operator==( BufferCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( size == rhs.size )
+          && ( usage == rhs.usage )
+          && ( sharingMode == rhs.sharingMode )
+          && ( queueFamilyIndexCount == rhs.queueFamilyIndexCount )
+          && ( pQueueFamilyIndices == rhs.pQueueFamilyIndices );
+    }
+
+    bool operator!=( BufferCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferCreateInfo;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::BufferCreateFlags flags;
+    VULKAN_HPP_NAMESPACE::DeviceSize size;
+    VULKAN_HPP_NAMESPACE::BufferUsageFlags usage;
+    VULKAN_HPP_NAMESPACE::SharingMode sharingMode;
+    uint32_t queueFamilyIndexCount;
+    const uint32_t* pQueueFamilyIndices;
+  };
+  static_assert( sizeof( BufferCreateInfo ) == sizeof( VkBufferCreateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<BufferCreateInfo>::value, "struct wrapper is not a standard layout!" );
+
+  struct BufferDeviceAddressCreateInfoEXT
+  {
+    VULKAN_HPP_CONSTEXPR BufferDeviceAddressCreateInfoEXT( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : deviceAddress( deviceAddress_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::BufferDeviceAddressCreateInfoEXT & operator=( VULKAN_HPP_NAMESPACE::BufferDeviceAddressCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::BufferDeviceAddressCreateInfoEXT ) - offsetof( BufferDeviceAddressCreateInfoEXT, pNext ) );
+      return *this;
+    }
+
+    BufferDeviceAddressCreateInfoEXT( VkBufferDeviceAddressCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    BufferDeviceAddressCreateInfoEXT& operator=( VkBufferDeviceAddressCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferDeviceAddressCreateInfoEXT const *>(&rhs);
+      return *this;
+    }
+
+    BufferDeviceAddressCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    BufferDeviceAddressCreateInfoEXT & setDeviceAddress( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT
+    {
+      deviceAddress = deviceAddress_;
+      return *this;
+    }
+
+    operator VkBufferDeviceAddressCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkBufferDeviceAddressCreateInfoEXT*>( this );
+    }
+
+    operator VkBufferDeviceAddressCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkBufferDeviceAddressCreateInfoEXT*>( this );
+    }
+
+    bool operator==( BufferDeviceAddressCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( deviceAddress == rhs.deviceAddress );
+    }
+
+    bool operator!=( BufferDeviceAddressCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferDeviceAddressCreateInfoEXT;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress;
+  };
+  static_assert( sizeof( BufferDeviceAddressCreateInfoEXT ) == sizeof( VkBufferDeviceAddressCreateInfoEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<BufferDeviceAddressCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+
+  struct BufferDeviceAddressInfoKHR
+  {
+    VULKAN_HPP_CONSTEXPR BufferDeviceAddressInfoKHR( VULKAN_HPP_NAMESPACE::Buffer buffer_ = VULKAN_HPP_NAMESPACE::Buffer() ) VULKAN_HPP_NOEXCEPT
+      : buffer( buffer_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfoKHR & operator=( VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfoKHR ) - offsetof( BufferDeviceAddressInfoKHR, pNext ) );
+      return *this;
+    }
+
+    BufferDeviceAddressInfoKHR( VkBufferDeviceAddressInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    BufferDeviceAddressInfoKHR& operator=( VkBufferDeviceAddressInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfoKHR const *>(&rhs);
+      return *this;
+    }
+
+    BufferDeviceAddressInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    BufferDeviceAddressInfoKHR & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      buffer = buffer_;
+      return *this;
+    }
+
+    operator VkBufferDeviceAddressInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkBufferDeviceAddressInfoKHR*>( this );
+    }
+
+    operator VkBufferDeviceAddressInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkBufferDeviceAddressInfoKHR*>( this );
+    }
+
+    bool operator==( BufferDeviceAddressInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( buffer == rhs.buffer );
+    }
+
+    bool operator!=( BufferDeviceAddressInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferDeviceAddressInfoKHR;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::Buffer buffer;
+  };
+  static_assert( sizeof( BufferDeviceAddressInfoKHR ) == sizeof( VkBufferDeviceAddressInfoKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<BufferDeviceAddressInfoKHR>::value, "struct wrapper is not a standard layout!" );
+
+  struct ImageSubresourceLayers
+  {
+    VULKAN_HPP_CONSTEXPR ImageSubresourceLayers( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = VULKAN_HPP_NAMESPACE::ImageAspectFlags(),
+                                                 uint32_t mipLevel_ = 0,
+                                                 uint32_t baseArrayLayer_ = 0,
+                                                 uint32_t layerCount_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : aspectMask( aspectMask_ )
+      , mipLevel( mipLevel_ )
+      , baseArrayLayer( baseArrayLayer_ )
+      , layerCount( layerCount_ )
+    {}
+
+    ImageSubresourceLayers( VkImageSubresourceLayers const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    ImageSubresourceLayers& operator=( VkImageSubresourceLayers const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const *>(&rhs);
+      return *this;
+    }
+
+    ImageSubresourceLayers & setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      aspectMask = aspectMask_;
+      return *this;
+    }
+
+    ImageSubresourceLayers & setMipLevel( uint32_t mipLevel_ ) VULKAN_HPP_NOEXCEPT
+    {
+      mipLevel = mipLevel_;
+      return *this;
+    }
+
+    ImageSubresourceLayers & setBaseArrayLayer( uint32_t baseArrayLayer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      baseArrayLayer = baseArrayLayer_;
+      return *this;
+    }
+
+    ImageSubresourceLayers & setLayerCount( uint32_t layerCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      layerCount = layerCount_;
+      return *this;
+    }
+
+    operator VkImageSubresourceLayers const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImageSubresourceLayers*>( this );
+    }
+
+    operator VkImageSubresourceLayers &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImageSubresourceLayers*>( this );
+    }
+
+    bool operator==( ImageSubresourceLayers const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( aspectMask == rhs.aspectMask )
+          && ( mipLevel == rhs.mipLevel )
+          && ( baseArrayLayer == rhs.baseArrayLayer )
+          && ( layerCount == rhs.layerCount );
+    }
+
+    bool operator!=( ImageSubresourceLayers const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask;
+    uint32_t mipLevel;
+    uint32_t baseArrayLayer;
+    uint32_t layerCount;
+  };
+  static_assert( sizeof( ImageSubresourceLayers ) == sizeof( VkImageSubresourceLayers ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ImageSubresourceLayers>::value, "struct wrapper is not a standard layout!" );
+
+  struct BufferImageCopy
+  {
+    VULKAN_HPP_CONSTEXPR BufferImageCopy( VULKAN_HPP_NAMESPACE::DeviceSize bufferOffset_ = 0,
+                                          uint32_t bufferRowLength_ = 0,
+                                          uint32_t bufferImageHeight_ = 0,
+                                          VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource_ = VULKAN_HPP_NAMESPACE::ImageSubresourceLayers(),
+                                          VULKAN_HPP_NAMESPACE::Offset3D imageOffset_ = VULKAN_HPP_NAMESPACE::Offset3D(),
+                                          VULKAN_HPP_NAMESPACE::Extent3D imageExtent_ = VULKAN_HPP_NAMESPACE::Extent3D() ) VULKAN_HPP_NOEXCEPT
+      : bufferOffset( bufferOffset_ )
+      , bufferRowLength( bufferRowLength_ )
+      , bufferImageHeight( bufferImageHeight_ )
+      , imageSubresource( imageSubresource_ )
+      , imageOffset( imageOffset_ )
+      , imageExtent( imageExtent_ )
+    {}
+
+    BufferImageCopy( VkBufferImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    BufferImageCopy& operator=( VkBufferImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferImageCopy const *>(&rhs);
+      return *this;
+    }
+
+    BufferImageCopy & setBufferOffset( VULKAN_HPP_NAMESPACE::DeviceSize bufferOffset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      bufferOffset = bufferOffset_;
+      return *this;
+    }
+
+    BufferImageCopy & setBufferRowLength( uint32_t bufferRowLength_ ) VULKAN_HPP_NOEXCEPT
+    {
+      bufferRowLength = bufferRowLength_;
+      return *this;
+    }
+
+    BufferImageCopy & setBufferImageHeight( uint32_t bufferImageHeight_ ) VULKAN_HPP_NOEXCEPT
+    {
+      bufferImageHeight = bufferImageHeight_;
+      return *this;
+    }
+
+    BufferImageCopy & setImageSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource_ ) VULKAN_HPP_NOEXCEPT
+    {
+      imageSubresource = imageSubresource_;
+      return *this;
+    }
+
+    BufferImageCopy & setImageOffset( VULKAN_HPP_NAMESPACE::Offset3D imageOffset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      imageOffset = imageOffset_;
+      return *this;
+    }
+
+    BufferImageCopy & setImageExtent( VULKAN_HPP_NAMESPACE::Extent3D imageExtent_ ) VULKAN_HPP_NOEXCEPT
+    {
+      imageExtent = imageExtent_;
+      return *this;
+    }
+
+    operator VkBufferImageCopy const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkBufferImageCopy*>( this );
+    }
+
+    operator VkBufferImageCopy &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkBufferImageCopy*>( this );
+    }
+
+    bool operator==( BufferImageCopy const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( bufferOffset == rhs.bufferOffset )
+          && ( bufferRowLength == rhs.bufferRowLength )
+          && ( bufferImageHeight == rhs.bufferImageHeight )
+          && ( imageSubresource == rhs.imageSubresource )
+          && ( imageOffset == rhs.imageOffset )
+          && ( imageExtent == rhs.imageExtent );
+    }
+
+    bool operator!=( BufferImageCopy const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    VULKAN_HPP_NAMESPACE::DeviceSize bufferOffset;
+    uint32_t bufferRowLength;
+    uint32_t bufferImageHeight;
+    VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource;
+    VULKAN_HPP_NAMESPACE::Offset3D imageOffset;
+    VULKAN_HPP_NAMESPACE::Extent3D imageExtent;
+  };
+  static_assert( sizeof( BufferImageCopy ) == sizeof( VkBufferImageCopy ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<BufferImageCopy>::value, "struct wrapper is not a standard layout!" );
+
+  struct BufferMemoryBarrier
+  {
+    VULKAN_HPP_CONSTEXPR BufferMemoryBarrier( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = VULKAN_HPP_NAMESPACE::AccessFlags(),
+                                              VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = VULKAN_HPP_NAMESPACE::AccessFlags(),
+                                              uint32_t srcQueueFamilyIndex_ = 0,
+                                              uint32_t dstQueueFamilyIndex_ = 0,
+                                              VULKAN_HPP_NAMESPACE::Buffer buffer_ = VULKAN_HPP_NAMESPACE::Buffer(),
+                                              VULKAN_HPP_NAMESPACE::DeviceSize offset_ = 0,
+                                              VULKAN_HPP_NAMESPACE::DeviceSize size_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : srcAccessMask( srcAccessMask_ )
+      , dstAccessMask( dstAccessMask_ )
+      , srcQueueFamilyIndex( srcQueueFamilyIndex_ )
+      , dstQueueFamilyIndex( dstQueueFamilyIndex_ )
+      , buffer( buffer_ )
+      , offset( offset_ )
+      , size( size_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::BufferMemoryBarrier & operator=( VULKAN_HPP_NAMESPACE::BufferMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::BufferMemoryBarrier ) - offsetof( BufferMemoryBarrier, pNext ) );
+      return *this;
+    }
+
+    BufferMemoryBarrier( VkBufferMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    BufferMemoryBarrier& operator=( VkBufferMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferMemoryBarrier const *>(&rhs);
+      return *this;
+    }
+
+    BufferMemoryBarrier & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    BufferMemoryBarrier & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcAccessMask = srcAccessMask_;
+      return *this;
+    }
+
+    BufferMemoryBarrier & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstAccessMask = dstAccessMask_;
+      return *this;
+    }
+
+    BufferMemoryBarrier & setSrcQueueFamilyIndex( uint32_t srcQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcQueueFamilyIndex = srcQueueFamilyIndex_;
+      return *this;
+    }
+
+    BufferMemoryBarrier & setDstQueueFamilyIndex( uint32_t dstQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstQueueFamilyIndex = dstQueueFamilyIndex_;
+      return *this;
+    }
+
+    BufferMemoryBarrier & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      buffer = buffer_;
+      return *this;
+    }
+
+    BufferMemoryBarrier & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      offset = offset_;
+      return *this;
+    }
+
+    BufferMemoryBarrier & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
+    {
+      size = size_;
+      return *this;
+    }
+
+    operator VkBufferMemoryBarrier const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkBufferMemoryBarrier*>( this );
+    }
+
+    operator VkBufferMemoryBarrier &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkBufferMemoryBarrier*>( this );
+    }
+
+    bool operator==( BufferMemoryBarrier const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( srcAccessMask == rhs.srcAccessMask )
+          && ( dstAccessMask == rhs.dstAccessMask )
+          && ( srcQueueFamilyIndex == rhs.srcQueueFamilyIndex )
+          && ( dstQueueFamilyIndex == rhs.dstQueueFamilyIndex )
+          && ( buffer == rhs.buffer )
+          && ( offset == rhs.offset )
+          && ( size == rhs.size );
+    }
+
+    bool operator!=( BufferMemoryBarrier const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferMemoryBarrier;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask;
+    VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask;
+    uint32_t srcQueueFamilyIndex;
+    uint32_t dstQueueFamilyIndex;
+    VULKAN_HPP_NAMESPACE::Buffer buffer;
+    VULKAN_HPP_NAMESPACE::DeviceSize offset;
+    VULKAN_HPP_NAMESPACE::DeviceSize size;
+  };
+  static_assert( sizeof( BufferMemoryBarrier ) == sizeof( VkBufferMemoryBarrier ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<BufferMemoryBarrier>::value, "struct wrapper is not a standard layout!" );
+
+  struct BufferMemoryRequirementsInfo2
+  {
+    VULKAN_HPP_CONSTEXPR BufferMemoryRequirementsInfo2( VULKAN_HPP_NAMESPACE::Buffer buffer_ = VULKAN_HPP_NAMESPACE::Buffer() ) VULKAN_HPP_NOEXCEPT
+      : buffer( buffer_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & operator=( VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 ) - offsetof( BufferMemoryRequirementsInfo2, pNext ) );
+      return *this;
+    }
+
+    BufferMemoryRequirementsInfo2( VkBufferMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    BufferMemoryRequirementsInfo2& operator=( VkBufferMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 const *>(&rhs);
+      return *this;
+    }
+
+    BufferMemoryRequirementsInfo2 & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    BufferMemoryRequirementsInfo2 & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      buffer = buffer_;
+      return *this;
+    }
+
+    operator VkBufferMemoryRequirementsInfo2 const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkBufferMemoryRequirementsInfo2*>( this );
+    }
+
+    operator VkBufferMemoryRequirementsInfo2 &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkBufferMemoryRequirementsInfo2*>( this );
+    }
+
+    bool operator==( BufferMemoryRequirementsInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( buffer == rhs.buffer );
+    }
+
+    bool operator!=( BufferMemoryRequirementsInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferMemoryRequirementsInfo2;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::Buffer buffer;
+  };
+  static_assert( sizeof( BufferMemoryRequirementsInfo2 ) == sizeof( VkBufferMemoryRequirementsInfo2 ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<BufferMemoryRequirementsInfo2>::value, "struct wrapper is not a standard layout!" );
+
+  struct BufferOpaqueCaptureAddressCreateInfoKHR
+  {
+    VULKAN_HPP_CONSTEXPR BufferOpaqueCaptureAddressCreateInfoKHR( uint64_t opaqueCaptureAddress_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : opaqueCaptureAddress( opaqueCaptureAddress_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::BufferOpaqueCaptureAddressCreateInfoKHR & operator=( VULKAN_HPP_NAMESPACE::BufferOpaqueCaptureAddressCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::BufferOpaqueCaptureAddressCreateInfoKHR ) - offsetof( BufferOpaqueCaptureAddressCreateInfoKHR, pNext ) );
+      return *this;
+    }
+
+    BufferOpaqueCaptureAddressCreateInfoKHR( VkBufferOpaqueCaptureAddressCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    BufferOpaqueCaptureAddressCreateInfoKHR& operator=( VkBufferOpaqueCaptureAddressCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferOpaqueCaptureAddressCreateInfoKHR const *>(&rhs);
+      return *this;
+    }
+
+    BufferOpaqueCaptureAddressCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    BufferOpaqueCaptureAddressCreateInfoKHR & setOpaqueCaptureAddress( uint64_t opaqueCaptureAddress_ ) VULKAN_HPP_NOEXCEPT
+    {
+      opaqueCaptureAddress = opaqueCaptureAddress_;
+      return *this;
+    }
+
+    operator VkBufferOpaqueCaptureAddressCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkBufferOpaqueCaptureAddressCreateInfoKHR*>( this );
+    }
+
+    operator VkBufferOpaqueCaptureAddressCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkBufferOpaqueCaptureAddressCreateInfoKHR*>( this );
+    }
+
+    bool operator==( BufferOpaqueCaptureAddressCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( opaqueCaptureAddress == rhs.opaqueCaptureAddress );
+    }
+
+    bool operator!=( BufferOpaqueCaptureAddressCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferOpaqueCaptureAddressCreateInfoKHR;
+    const void* pNext = nullptr;
+    uint64_t opaqueCaptureAddress;
+  };
+  static_assert( sizeof( BufferOpaqueCaptureAddressCreateInfoKHR ) == sizeof( VkBufferOpaqueCaptureAddressCreateInfoKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<BufferOpaqueCaptureAddressCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
+
+  struct BufferViewCreateInfo
+  {
+    VULKAN_HPP_CONSTEXPR BufferViewCreateInfo( VULKAN_HPP_NAMESPACE::BufferViewCreateFlags flags_ = VULKAN_HPP_NAMESPACE::BufferViewCreateFlags(),
+                                               VULKAN_HPP_NAMESPACE::Buffer buffer_ = VULKAN_HPP_NAMESPACE::Buffer(),
+                                               VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
+                                               VULKAN_HPP_NAMESPACE::DeviceSize offset_ = 0,
+                                               VULKAN_HPP_NAMESPACE::DeviceSize range_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : flags( flags_ )
+      , buffer( buffer_ )
+      , format( format_ )
+      , offset( offset_ )
+      , range( range_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::BufferViewCreateInfo & operator=( VULKAN_HPP_NAMESPACE::BufferViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::BufferViewCreateInfo ) - offsetof( BufferViewCreateInfo, pNext ) );
+      return *this;
+    }
+
+    BufferViewCreateInfo( VkBufferViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    BufferViewCreateInfo& operator=( VkBufferViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferViewCreateInfo const *>(&rhs);
+      return *this;
+    }
+
+    BufferViewCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    BufferViewCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::BufferViewCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    BufferViewCreateInfo & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      buffer = buffer_;
+      return *this;
+    }
+
+    BufferViewCreateInfo & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
+    {
+      format = format_;
+      return *this;
+    }
+
+    BufferViewCreateInfo & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      offset = offset_;
+      return *this;
+    }
+
+    BufferViewCreateInfo & setRange( VULKAN_HPP_NAMESPACE::DeviceSize range_ ) VULKAN_HPP_NOEXCEPT
+    {
+      range = range_;
+      return *this;
+    }
+
+    operator VkBufferViewCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkBufferViewCreateInfo*>( this );
+    }
+
+    operator VkBufferViewCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkBufferViewCreateInfo*>( this );
+    }
+
+    bool operator==( BufferViewCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( buffer == rhs.buffer )
+          && ( format == rhs.format )
+          && ( offset == rhs.offset )
+          && ( range == rhs.range );
+    }
+
+    bool operator!=( BufferViewCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferViewCreateInfo;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::BufferViewCreateFlags flags;
+    VULKAN_HPP_NAMESPACE::Buffer buffer;
+    VULKAN_HPP_NAMESPACE::Format format;
+    VULKAN_HPP_NAMESPACE::DeviceSize offset;
+    VULKAN_HPP_NAMESPACE::DeviceSize range;
+  };
+  static_assert( sizeof( BufferViewCreateInfo ) == sizeof( VkBufferViewCreateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<BufferViewCreateInfo>::value, "struct wrapper is not a standard layout!" );
+
+  struct CalibratedTimestampInfoEXT
+  {
+    VULKAN_HPP_CONSTEXPR CalibratedTimestampInfoEXT( VULKAN_HPP_NAMESPACE::TimeDomainEXT timeDomain_ = VULKAN_HPP_NAMESPACE::TimeDomainEXT::eDevice ) VULKAN_HPP_NOEXCEPT
+      : timeDomain( timeDomain_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT & operator=( VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT ) - offsetof( CalibratedTimestampInfoEXT, pNext ) );
+      return *this;
+    }
+
+    CalibratedTimestampInfoEXT( VkCalibratedTimestampInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    CalibratedTimestampInfoEXT& operator=( VkCalibratedTimestampInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT const *>(&rhs);
+      return *this;
+    }
+
+    CalibratedTimestampInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    CalibratedTimestampInfoEXT & setTimeDomain( VULKAN_HPP_NAMESPACE::TimeDomainEXT timeDomain_ ) VULKAN_HPP_NOEXCEPT
+    {
+      timeDomain = timeDomain_;
+      return *this;
+    }
+
+    operator VkCalibratedTimestampInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkCalibratedTimestampInfoEXT*>( this );
+    }
+
+    operator VkCalibratedTimestampInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkCalibratedTimestampInfoEXT*>( this );
+    }
+
+    bool operator==( CalibratedTimestampInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( timeDomain == rhs.timeDomain );
+    }
+
+    bool operator!=( CalibratedTimestampInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCalibratedTimestampInfoEXT;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::TimeDomainEXT timeDomain;
+  };
+  static_assert( sizeof( CalibratedTimestampInfoEXT ) == sizeof( VkCalibratedTimestampInfoEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<CalibratedTimestampInfoEXT>::value, "struct wrapper is not a standard layout!" );
+
+  struct CheckpointDataNV
+  {
+    CheckpointDataNV( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits stage_ = VULKAN_HPP_NAMESPACE::PipelineStageFlagBits::eTopOfPipe,
+                      void* pCheckpointMarker_ = nullptr ) VULKAN_HPP_NOEXCEPT
+      : stage( stage_ )
+      , pCheckpointMarker( pCheckpointMarker_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::CheckpointDataNV & operator=( VULKAN_HPP_NAMESPACE::CheckpointDataNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::CheckpointDataNV ) - offsetof( CheckpointDataNV, pNext ) );
+      return *this;
+    }
+
+    CheckpointDataNV( VkCheckpointDataNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    CheckpointDataNV& operator=( VkCheckpointDataNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CheckpointDataNV const *>(&rhs);
+      return *this;
+    }
+
+    operator VkCheckpointDataNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkCheckpointDataNV*>( this );
+    }
+
+    operator VkCheckpointDataNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkCheckpointDataNV*>( this );
+    }
+
+    bool operator==( CheckpointDataNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( stage == rhs.stage )
+          && ( pCheckpointMarker == rhs.pCheckpointMarker );
+    }
+
+    bool operator!=( CheckpointDataNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCheckpointDataNV;
+    void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::PipelineStageFlagBits stage;
+    void* pCheckpointMarker;
+  };
+  static_assert( sizeof( CheckpointDataNV ) == sizeof( VkCheckpointDataNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<CheckpointDataNV>::value, "struct wrapper is not a standard layout!" );
+
+  union ClearColorValue
+  {
+    ClearColorValue( const std::array<float,4>& float32_ = { { 0 } } )
+    {
+      memcpy( float32, float32_.data(), 4 * sizeof( float ) );
+    }
+
+    ClearColorValue( const std::array<int32_t,4>& int32_ )
+    {
+      memcpy( int32, int32_.data(), 4 * sizeof( int32_t ) );
+    }
+
+    ClearColorValue( const std::array<uint32_t,4>& uint32_ )
+    {
+      memcpy( uint32, uint32_.data(), 4 * sizeof( uint32_t ) );
+    }
+
+    ClearColorValue & setFloat32( std::array<float,4> float32_ ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( float32, float32_.data(), 4 * sizeof( float ) );
+      return *this;
+    }
+
+    ClearColorValue & setInt32( std::array<int32_t,4> int32_ ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( int32, int32_.data(), 4 * sizeof( int32_t ) );
+      return *this;
+    }
+
+    ClearColorValue & setUint32( std::array<uint32_t,4> uint32_ ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( uint32, uint32_.data(), 4 * sizeof( uint32_t ) );
+      return *this;
+    }
+    operator VkClearColorValue const&() const
+    {
+      return *reinterpret_cast<const VkClearColorValue*>(this);
+    }
+
+    operator VkClearColorValue &()
+    {
+      return *reinterpret_cast<VkClearColorValue*>(this);
+    }
+
+    float float32[4];
+    int32_t int32[4];
+    uint32_t uint32[4];
+  };
+
+  struct ClearDepthStencilValue
+  {
+    VULKAN_HPP_CONSTEXPR ClearDepthStencilValue( float depth_ = 0,
+                                                 uint32_t stencil_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : depth( depth_ )
+      , stencil( stencil_ )
+    {}
+
+    ClearDepthStencilValue( VkClearDepthStencilValue const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    ClearDepthStencilValue& operator=( VkClearDepthStencilValue const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ClearDepthStencilValue const *>(&rhs);
+      return *this;
+    }
+
+    ClearDepthStencilValue & setDepth( float depth_ ) VULKAN_HPP_NOEXCEPT
+    {
+      depth = depth_;
+      return *this;
+    }
+
+    ClearDepthStencilValue & setStencil( uint32_t stencil_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stencil = stencil_;
+      return *this;
+    }
+
+    operator VkClearDepthStencilValue const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkClearDepthStencilValue*>( this );
+    }
+
+    operator VkClearDepthStencilValue &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkClearDepthStencilValue*>( this );
+    }
+
+    bool operator==( ClearDepthStencilValue const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( depth == rhs.depth )
+          && ( stencil == rhs.stencil );
+    }
+
+    bool operator!=( ClearDepthStencilValue const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    float depth;
+    uint32_t stencil;
+  };
+  static_assert( sizeof( ClearDepthStencilValue ) == sizeof( VkClearDepthStencilValue ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ClearDepthStencilValue>::value, "struct wrapper is not a standard layout!" );
+
+  union ClearValue
+  {
+    ClearValue( VULKAN_HPP_NAMESPACE::ClearColorValue color_ = VULKAN_HPP_NAMESPACE::ClearColorValue() )
+    {
+      color = color_;
+    }
+
+    ClearValue( VULKAN_HPP_NAMESPACE::ClearDepthStencilValue depthStencil_ )
+    {
+      depthStencil = depthStencil_;
+    }
+
+    ClearValue & setColor( VULKAN_HPP_NAMESPACE::ClearColorValue color_ ) VULKAN_HPP_NOEXCEPT
+    {
+      color = color_;
+      return *this;
+    }
+
+    ClearValue & setDepthStencil( VULKAN_HPP_NAMESPACE::ClearDepthStencilValue depthStencil_ ) VULKAN_HPP_NOEXCEPT
+    {
+      depthStencil = depthStencil_;
+      return *this;
+    }
+    operator VkClearValue const&() const
+    {
+      return *reinterpret_cast<const VkClearValue*>(this);
+    }
+
+    operator VkClearValue &()
+    {
+      return *reinterpret_cast<VkClearValue*>(this);
+    }
+
+#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
+    VULKAN_HPP_NAMESPACE::ClearColorValue color;
+    VULKAN_HPP_NAMESPACE::ClearDepthStencilValue depthStencil;
+#else
+    VkClearColorValue color;
+    VkClearDepthStencilValue depthStencil;
+#endif  /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/
+  };
+
+  struct ClearAttachment
+  {
+    ClearAttachment( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = VULKAN_HPP_NAMESPACE::ImageAspectFlags(),
+                     uint32_t colorAttachment_ = 0,
+                     VULKAN_HPP_NAMESPACE::ClearValue clearValue_ = VULKAN_HPP_NAMESPACE::ClearValue() ) VULKAN_HPP_NOEXCEPT
+      : aspectMask( aspectMask_ )
+      , colorAttachment( colorAttachment_ )
+      , clearValue( clearValue_ )
+    {}
+
+    ClearAttachment( VkClearAttachment const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    ClearAttachment& operator=( VkClearAttachment const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ClearAttachment const *>(&rhs);
+      return *this;
+    }
+
+    ClearAttachment & setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      aspectMask = aspectMask_;
+      return *this;
+    }
+
+    ClearAttachment & setColorAttachment( uint32_t colorAttachment_ ) VULKAN_HPP_NOEXCEPT
+    {
+      colorAttachment = colorAttachment_;
+      return *this;
+    }
+
+    ClearAttachment & setClearValue( VULKAN_HPP_NAMESPACE::ClearValue clearValue_ ) VULKAN_HPP_NOEXCEPT
+    {
+      clearValue = clearValue_;
+      return *this;
+    }
+
+    operator VkClearAttachment const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkClearAttachment*>( this );
+    }
+
+    operator VkClearAttachment &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkClearAttachment*>( this );
+    }
+
+  public:
+    VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask;
+    uint32_t colorAttachment;
+    VULKAN_HPP_NAMESPACE::ClearValue clearValue;
+  };
+  static_assert( sizeof( ClearAttachment ) == sizeof( VkClearAttachment ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ClearAttachment>::value, "struct wrapper is not a standard layout!" );
+
+  struct ClearRect
+  {
+    VULKAN_HPP_CONSTEXPR ClearRect( VULKAN_HPP_NAMESPACE::Rect2D rect_ = VULKAN_HPP_NAMESPACE::Rect2D(),
+                                    uint32_t baseArrayLayer_ = 0,
+                                    uint32_t layerCount_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : rect( rect_ )
+      , baseArrayLayer( baseArrayLayer_ )
+      , layerCount( layerCount_ )
+    {}
+
+    ClearRect( VkClearRect const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    ClearRect& operator=( VkClearRect const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ClearRect const *>(&rhs);
+      return *this;
+    }
+
+    ClearRect & setRect( VULKAN_HPP_NAMESPACE::Rect2D rect_ ) VULKAN_HPP_NOEXCEPT
+    {
+      rect = rect_;
+      return *this;
+    }
+
+    ClearRect & setBaseArrayLayer( uint32_t baseArrayLayer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      baseArrayLayer = baseArrayLayer_;
+      return *this;
+    }
+
+    ClearRect & setLayerCount( uint32_t layerCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      layerCount = layerCount_;
+      return *this;
+    }
+
+    operator VkClearRect const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkClearRect*>( this );
+    }
+
+    operator VkClearRect &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkClearRect*>( this );
+    }
+
+    bool operator==( ClearRect const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( rect == rhs.rect )
+          && ( baseArrayLayer == rhs.baseArrayLayer )
+          && ( layerCount == rhs.layerCount );
+    }
+
+    bool operator!=( ClearRect const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    VULKAN_HPP_NAMESPACE::Rect2D rect;
+    uint32_t baseArrayLayer;
+    uint32_t layerCount;
+  };
+  static_assert( sizeof( ClearRect ) == sizeof( VkClearRect ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ClearRect>::value, "struct wrapper is not a standard layout!" );
+
+  struct IndirectCommandsTokenNVX
+  {
+    VULKAN_HPP_CONSTEXPR IndirectCommandsTokenNVX( VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNVX tokenType_ = VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNVX::ePipeline,
+                                                   VULKAN_HPP_NAMESPACE::Buffer buffer_ = VULKAN_HPP_NAMESPACE::Buffer(),
+                                                   VULKAN_HPP_NAMESPACE::DeviceSize offset_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : tokenType( tokenType_ )
+      , buffer( buffer_ )
+      , offset( offset_ )
+    {}
+
+    IndirectCommandsTokenNVX( VkIndirectCommandsTokenNVX const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    IndirectCommandsTokenNVX& operator=( VkIndirectCommandsTokenNVX const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::IndirectCommandsTokenNVX const *>(&rhs);
+      return *this;
+    }
+
+    IndirectCommandsTokenNVX & setTokenType( VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNVX tokenType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      tokenType = tokenType_;
+      return *this;
+    }
+
+    IndirectCommandsTokenNVX & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      buffer = buffer_;
+      return *this;
+    }
+
+    IndirectCommandsTokenNVX & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      offset = offset_;
+      return *this;
+    }
+
+    operator VkIndirectCommandsTokenNVX const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkIndirectCommandsTokenNVX*>( this );
+    }
+
+    operator VkIndirectCommandsTokenNVX &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkIndirectCommandsTokenNVX*>( this );
+    }
+
+    bool operator==( IndirectCommandsTokenNVX const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( tokenType == rhs.tokenType )
+          && ( buffer == rhs.buffer )
+          && ( offset == rhs.offset );
+    }
+
+    bool operator!=( IndirectCommandsTokenNVX const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNVX tokenType;
+    VULKAN_HPP_NAMESPACE::Buffer buffer;
+    VULKAN_HPP_NAMESPACE::DeviceSize offset;
+  };
+  static_assert( sizeof( IndirectCommandsTokenNVX ) == sizeof( VkIndirectCommandsTokenNVX ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<IndirectCommandsTokenNVX>::value, "struct wrapper is not a standard layout!" );
+
+  struct CmdProcessCommandsInfoNVX
+  {
+    VULKAN_HPP_CONSTEXPR CmdProcessCommandsInfoNVX( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable_ = VULKAN_HPP_NAMESPACE::ObjectTableNVX(),
+                                                    VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX indirectCommandsLayout_ = VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX(),
+                                                    uint32_t indirectCommandsTokenCount_ = 0,
+                                                    const VULKAN_HPP_NAMESPACE::IndirectCommandsTokenNVX* pIndirectCommandsTokens_ = nullptr,
+                                                    uint32_t maxSequencesCount_ = 0,
+                                                    VULKAN_HPP_NAMESPACE::CommandBuffer targetCommandBuffer_ = VULKAN_HPP_NAMESPACE::CommandBuffer(),
+                                                    VULKAN_HPP_NAMESPACE::Buffer sequencesCountBuffer_ = VULKAN_HPP_NAMESPACE::Buffer(),
+                                                    VULKAN_HPP_NAMESPACE::DeviceSize sequencesCountOffset_ = 0,
+                                                    VULKAN_HPP_NAMESPACE::Buffer sequencesIndexBuffer_ = VULKAN_HPP_NAMESPACE::Buffer(),
+                                                    VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : objectTable( objectTable_ )
+      , indirectCommandsLayout( indirectCommandsLayout_ )
+      , indirectCommandsTokenCount( indirectCommandsTokenCount_ )
+      , pIndirectCommandsTokens( pIndirectCommandsTokens_ )
+      , maxSequencesCount( maxSequencesCount_ )
+      , targetCommandBuffer( targetCommandBuffer_ )
+      , sequencesCountBuffer( sequencesCountBuffer_ )
+      , sequencesCountOffset( sequencesCountOffset_ )
+      , sequencesIndexBuffer( sequencesIndexBuffer_ )
+      , sequencesIndexOffset( sequencesIndexOffset_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::CmdProcessCommandsInfoNVX & operator=( VULKAN_HPP_NAMESPACE::CmdProcessCommandsInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::CmdProcessCommandsInfoNVX ) - offsetof( CmdProcessCommandsInfoNVX, pNext ) );
+      return *this;
+    }
+
+    CmdProcessCommandsInfoNVX( VkCmdProcessCommandsInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    CmdProcessCommandsInfoNVX& operator=( VkCmdProcessCommandsInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CmdProcessCommandsInfoNVX const *>(&rhs);
+      return *this;
+    }
+
+    CmdProcessCommandsInfoNVX & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    CmdProcessCommandsInfoNVX & setObjectTable( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable_ ) VULKAN_HPP_NOEXCEPT
+    {
+      objectTable = objectTable_;
+      return *this;
+    }
+
+    CmdProcessCommandsInfoNVX & setIndirectCommandsLayout( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX indirectCommandsLayout_ ) VULKAN_HPP_NOEXCEPT
+    {
+      indirectCommandsLayout = indirectCommandsLayout_;
+      return *this;
+    }
+
+    CmdProcessCommandsInfoNVX & setIndirectCommandsTokenCount( uint32_t indirectCommandsTokenCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      indirectCommandsTokenCount = indirectCommandsTokenCount_;
+      return *this;
+    }
+
+    CmdProcessCommandsInfoNVX & setPIndirectCommandsTokens( const VULKAN_HPP_NAMESPACE::IndirectCommandsTokenNVX* pIndirectCommandsTokens_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pIndirectCommandsTokens = pIndirectCommandsTokens_;
+      return *this;
+    }
+
+    CmdProcessCommandsInfoNVX & setMaxSequencesCount( uint32_t maxSequencesCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      maxSequencesCount = maxSequencesCount_;
+      return *this;
+    }
+
+    CmdProcessCommandsInfoNVX & setTargetCommandBuffer( VULKAN_HPP_NAMESPACE::CommandBuffer targetCommandBuffer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      targetCommandBuffer = targetCommandBuffer_;
+      return *this;
+    }
+
+    CmdProcessCommandsInfoNVX & setSequencesCountBuffer( VULKAN_HPP_NAMESPACE::Buffer sequencesCountBuffer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sequencesCountBuffer = sequencesCountBuffer_;
+      return *this;
+    }
+
+    CmdProcessCommandsInfoNVX & setSequencesCountOffset( VULKAN_HPP_NAMESPACE::DeviceSize sequencesCountOffset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sequencesCountOffset = sequencesCountOffset_;
+      return *this;
+    }
+
+    CmdProcessCommandsInfoNVX & setSequencesIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer sequencesIndexBuffer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sequencesIndexBuffer = sequencesIndexBuffer_;
+      return *this;
+    }
+
+    CmdProcessCommandsInfoNVX & setSequencesIndexOffset( VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sequencesIndexOffset = sequencesIndexOffset_;
+      return *this;
+    }
+
+    operator VkCmdProcessCommandsInfoNVX const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkCmdProcessCommandsInfoNVX*>( this );
+    }
+
+    operator VkCmdProcessCommandsInfoNVX &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkCmdProcessCommandsInfoNVX*>( this );
+    }
+
+    bool operator==( CmdProcessCommandsInfoNVX const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( objectTable == rhs.objectTable )
+          && ( indirectCommandsLayout == rhs.indirectCommandsLayout )
+          && ( indirectCommandsTokenCount == rhs.indirectCommandsTokenCount )
+          && ( pIndirectCommandsTokens == rhs.pIndirectCommandsTokens )
+          && ( maxSequencesCount == rhs.maxSequencesCount )
+          && ( targetCommandBuffer == rhs.targetCommandBuffer )
+          && ( sequencesCountBuffer == rhs.sequencesCountBuffer )
+          && ( sequencesCountOffset == rhs.sequencesCountOffset )
+          && ( sequencesIndexBuffer == rhs.sequencesIndexBuffer )
+          && ( sequencesIndexOffset == rhs.sequencesIndexOffset );
+    }
+
+    bool operator!=( CmdProcessCommandsInfoNVX const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCmdProcessCommandsInfoNVX;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable;
+    VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX indirectCommandsLayout;
+    uint32_t indirectCommandsTokenCount;
+    const VULKAN_HPP_NAMESPACE::IndirectCommandsTokenNVX* pIndirectCommandsTokens;
+    uint32_t maxSequencesCount;
+    VULKAN_HPP_NAMESPACE::CommandBuffer targetCommandBuffer;
+    VULKAN_HPP_NAMESPACE::Buffer sequencesCountBuffer;
+    VULKAN_HPP_NAMESPACE::DeviceSize sequencesCountOffset;
+    VULKAN_HPP_NAMESPACE::Buffer sequencesIndexBuffer;
+    VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset;
+  };
+  static_assert( sizeof( CmdProcessCommandsInfoNVX ) == sizeof( VkCmdProcessCommandsInfoNVX ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<CmdProcessCommandsInfoNVX>::value, "struct wrapper is not a standard layout!" );
+
+  struct CmdReserveSpaceForCommandsInfoNVX
+  {
+    VULKAN_HPP_CONSTEXPR CmdReserveSpaceForCommandsInfoNVX( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable_ = VULKAN_HPP_NAMESPACE::ObjectTableNVX(),
+                                                            VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX indirectCommandsLayout_ = VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX(),
+                                                            uint32_t maxSequencesCount_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : objectTable( objectTable_ )
+      , indirectCommandsLayout( indirectCommandsLayout_ )
+      , maxSequencesCount( maxSequencesCount_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::CmdReserveSpaceForCommandsInfoNVX & operator=( VULKAN_HPP_NAMESPACE::CmdReserveSpaceForCommandsInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::CmdReserveSpaceForCommandsInfoNVX ) - offsetof( CmdReserveSpaceForCommandsInfoNVX, pNext ) );
+      return *this;
+    }
+
+    CmdReserveSpaceForCommandsInfoNVX( VkCmdReserveSpaceForCommandsInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    CmdReserveSpaceForCommandsInfoNVX& operator=( VkCmdReserveSpaceForCommandsInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CmdReserveSpaceForCommandsInfoNVX const *>(&rhs);
+      return *this;
+    }
+
+    CmdReserveSpaceForCommandsInfoNVX & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    CmdReserveSpaceForCommandsInfoNVX & setObjectTable( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable_ ) VULKAN_HPP_NOEXCEPT
+    {
+      objectTable = objectTable_;
+      return *this;
+    }
+
+    CmdReserveSpaceForCommandsInfoNVX & setIndirectCommandsLayout( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX indirectCommandsLayout_ ) VULKAN_HPP_NOEXCEPT
+    {
+      indirectCommandsLayout = indirectCommandsLayout_;
+      return *this;
+    }
+
+    CmdReserveSpaceForCommandsInfoNVX & setMaxSequencesCount( uint32_t maxSequencesCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      maxSequencesCount = maxSequencesCount_;
+      return *this;
+    }
+
+    operator VkCmdReserveSpaceForCommandsInfoNVX const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkCmdReserveSpaceForCommandsInfoNVX*>( this );
+    }
+
+    operator VkCmdReserveSpaceForCommandsInfoNVX &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkCmdReserveSpaceForCommandsInfoNVX*>( this );
+    }
+
+    bool operator==( CmdReserveSpaceForCommandsInfoNVX const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( objectTable == rhs.objectTable )
+          && ( indirectCommandsLayout == rhs.indirectCommandsLayout )
+          && ( maxSequencesCount == rhs.maxSequencesCount );
+    }
+
+    bool operator!=( CmdReserveSpaceForCommandsInfoNVX const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCmdReserveSpaceForCommandsInfoNVX;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable;
+    VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX indirectCommandsLayout;
+    uint32_t maxSequencesCount;
+  };
+  static_assert( sizeof( CmdReserveSpaceForCommandsInfoNVX ) == sizeof( VkCmdReserveSpaceForCommandsInfoNVX ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<CmdReserveSpaceForCommandsInfoNVX>::value, "struct wrapper is not a standard layout!" );
+
+  struct CoarseSampleLocationNV
+  {
+    VULKAN_HPP_CONSTEXPR CoarseSampleLocationNV( uint32_t pixelX_ = 0,
+                                                 uint32_t pixelY_ = 0,
+                                                 uint32_t sample_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : pixelX( pixelX_ )
+      , pixelY( pixelY_ )
+      , sample( sample_ )
+    {}
+
+    CoarseSampleLocationNV( VkCoarseSampleLocationNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    CoarseSampleLocationNV& operator=( VkCoarseSampleLocationNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV const *>(&rhs);
+      return *this;
+    }
+
+    CoarseSampleLocationNV & setPixelX( uint32_t pixelX_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pixelX = pixelX_;
+      return *this;
+    }
+
+    CoarseSampleLocationNV & setPixelY( uint32_t pixelY_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pixelY = pixelY_;
+      return *this;
+    }
+
+    CoarseSampleLocationNV & setSample( uint32_t sample_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sample = sample_;
+      return *this;
+    }
+
+    operator VkCoarseSampleLocationNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkCoarseSampleLocationNV*>( this );
+    }
+
+    operator VkCoarseSampleLocationNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkCoarseSampleLocationNV*>( this );
+    }
+
+    bool operator==( CoarseSampleLocationNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( pixelX == rhs.pixelX )
+          && ( pixelY == rhs.pixelY )
+          && ( sample == rhs.sample );
+    }
+
+    bool operator!=( CoarseSampleLocationNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    uint32_t pixelX;
+    uint32_t pixelY;
+    uint32_t sample;
+  };
+  static_assert( sizeof( CoarseSampleLocationNV ) == sizeof( VkCoarseSampleLocationNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<CoarseSampleLocationNV>::value, "struct wrapper is not a standard layout!" );
+
+  struct CoarseSampleOrderCustomNV
+  {
+    VULKAN_HPP_CONSTEXPR CoarseSampleOrderCustomNV( VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV shadingRate_ = VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV::eNoInvocations,
+                                                    uint32_t sampleCount_ = 0,
+                                                    uint32_t sampleLocationCount_ = 0,
+                                                    const VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV* pSampleLocations_ = nullptr ) VULKAN_HPP_NOEXCEPT
+      : shadingRate( shadingRate_ )
+      , sampleCount( sampleCount_ )
+      , sampleLocationCount( sampleLocationCount_ )
+      , pSampleLocations( pSampleLocations_ )
+    {}
+
+    CoarseSampleOrderCustomNV( VkCoarseSampleOrderCustomNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    CoarseSampleOrderCustomNV& operator=( VkCoarseSampleOrderCustomNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV const *>(&rhs);
+      return *this;
+    }
+
+    CoarseSampleOrderCustomNV & setShadingRate( VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV shadingRate_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shadingRate = shadingRate_;
+      return *this;
+    }
+
+    CoarseSampleOrderCustomNV & setSampleCount( uint32_t sampleCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sampleCount = sampleCount_;
+      return *this;
+    }
+
+    CoarseSampleOrderCustomNV & setSampleLocationCount( uint32_t sampleLocationCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sampleLocationCount = sampleLocationCount_;
+      return *this;
+    }
+
+    CoarseSampleOrderCustomNV & setPSampleLocations( const VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV* pSampleLocations_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pSampleLocations = pSampleLocations_;
+      return *this;
+    }
+
+    operator VkCoarseSampleOrderCustomNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkCoarseSampleOrderCustomNV*>( this );
+    }
+
+    operator VkCoarseSampleOrderCustomNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkCoarseSampleOrderCustomNV*>( this );
+    }
+
+    bool operator==( CoarseSampleOrderCustomNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( shadingRate == rhs.shadingRate )
+          && ( sampleCount == rhs.sampleCount )
+          && ( sampleLocationCount == rhs.sampleLocationCount )
+          && ( pSampleLocations == rhs.pSampleLocations );
+    }
+
+    bool operator!=( CoarseSampleOrderCustomNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV shadingRate;
+    uint32_t sampleCount;
+    uint32_t sampleLocationCount;
+    const VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV* pSampleLocations;
+  };
+  static_assert( sizeof( CoarseSampleOrderCustomNV ) == sizeof( VkCoarseSampleOrderCustomNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<CoarseSampleOrderCustomNV>::value, "struct wrapper is not a standard layout!" );
+
+  struct CommandBufferAllocateInfo
+  {
+    VULKAN_HPP_CONSTEXPR CommandBufferAllocateInfo( VULKAN_HPP_NAMESPACE::CommandPool commandPool_ = VULKAN_HPP_NAMESPACE::CommandPool(),
+                                                    VULKAN_HPP_NAMESPACE::CommandBufferLevel level_ = VULKAN_HPP_NAMESPACE::CommandBufferLevel::ePrimary,
+                                                    uint32_t commandBufferCount_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : commandPool( commandPool_ )
+      , level( level_ )
+      , commandBufferCount( commandBufferCount_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & operator=( VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo ) - offsetof( CommandBufferAllocateInfo, pNext ) );
+      return *this;
+    }
+
+    CommandBufferAllocateInfo( VkCommandBufferAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    CommandBufferAllocateInfo& operator=( VkCommandBufferAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo const *>(&rhs);
+      return *this;
+    }
+
+    CommandBufferAllocateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    CommandBufferAllocateInfo & setCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool_ ) VULKAN_HPP_NOEXCEPT
+    {
+      commandPool = commandPool_;
+      return *this;
+    }
+
+    CommandBufferAllocateInfo & setLevel( VULKAN_HPP_NAMESPACE::CommandBufferLevel level_ ) VULKAN_HPP_NOEXCEPT
+    {
+      level = level_;
+      return *this;
+    }
+
+    CommandBufferAllocateInfo & setCommandBufferCount( uint32_t commandBufferCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      commandBufferCount = commandBufferCount_;
+      return *this;
+    }
+
+    operator VkCommandBufferAllocateInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkCommandBufferAllocateInfo*>( this );
+    }
+
+    operator VkCommandBufferAllocateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkCommandBufferAllocateInfo*>( this );
+    }
+
+    bool operator==( CommandBufferAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( commandPool == rhs.commandPool )
+          && ( level == rhs.level )
+          && ( commandBufferCount == rhs.commandBufferCount );
+    }
+
+    bool operator!=( CommandBufferAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferAllocateInfo;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::CommandPool commandPool;
+    VULKAN_HPP_NAMESPACE::CommandBufferLevel level;
+    uint32_t commandBufferCount;
+  };
+  static_assert( sizeof( CommandBufferAllocateInfo ) == sizeof( VkCommandBufferAllocateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<CommandBufferAllocateInfo>::value, "struct wrapper is not a standard layout!" );
+
+  struct CommandBufferInheritanceInfo
+  {
+    VULKAN_HPP_CONSTEXPR CommandBufferInheritanceInfo( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = VULKAN_HPP_NAMESPACE::RenderPass(),
+                                                       uint32_t subpass_ = 0,
+                                                       VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_ = VULKAN_HPP_NAMESPACE::Framebuffer(),
+                                                       VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryEnable_ = 0,
+                                                       VULKAN_HPP_NAMESPACE::QueryControlFlags queryFlags_ = VULKAN_HPP_NAMESPACE::QueryControlFlags(),
+                                                       VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics_ = VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags() ) VULKAN_HPP_NOEXCEPT
+      : renderPass( renderPass_ )
+      , subpass( subpass_ )
+      , framebuffer( framebuffer_ )
+      , occlusionQueryEnable( occlusionQueryEnable_ )
+      , queryFlags( queryFlags_ )
+      , pipelineStatistics( pipelineStatistics_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo & operator=( VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo ) - offsetof( CommandBufferInheritanceInfo, pNext ) );
+      return *this;
+    }
+
+    CommandBufferInheritanceInfo( VkCommandBufferInheritanceInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    CommandBufferInheritanceInfo& operator=( VkCommandBufferInheritanceInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo const *>(&rhs);
+      return *this;
+    }
+
+    CommandBufferInheritanceInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    CommandBufferInheritanceInfo & setRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ ) VULKAN_HPP_NOEXCEPT
+    {
+      renderPass = renderPass_;
+      return *this;
+    }
+
+    CommandBufferInheritanceInfo & setSubpass( uint32_t subpass_ ) VULKAN_HPP_NOEXCEPT
+    {
+      subpass = subpass_;
+      return *this;
+    }
+
+    CommandBufferInheritanceInfo & setFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      framebuffer = framebuffer_;
+      return *this;
+    }
+
+    CommandBufferInheritanceInfo & setOcclusionQueryEnable( VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryEnable_ ) VULKAN_HPP_NOEXCEPT
+    {
+      occlusionQueryEnable = occlusionQueryEnable_;
+      return *this;
+    }
+
+    CommandBufferInheritanceInfo & setQueryFlags( VULKAN_HPP_NAMESPACE::QueryControlFlags queryFlags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      queryFlags = queryFlags_;
+      return *this;
+    }
+
+    CommandBufferInheritanceInfo & setPipelineStatistics( VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pipelineStatistics = pipelineStatistics_;
+      return *this;
+    }
+
+    operator VkCommandBufferInheritanceInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkCommandBufferInheritanceInfo*>( this );
+    }
+
+    operator VkCommandBufferInheritanceInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkCommandBufferInheritanceInfo*>( this );
+    }
+
+    bool operator==( CommandBufferInheritanceInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( renderPass == rhs.renderPass )
+          && ( subpass == rhs.subpass )
+          && ( framebuffer == rhs.framebuffer )
+          && ( occlusionQueryEnable == rhs.occlusionQueryEnable )
+          && ( queryFlags == rhs.queryFlags )
+          && ( pipelineStatistics == rhs.pipelineStatistics );
+    }
+
+    bool operator!=( CommandBufferInheritanceInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferInheritanceInfo;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::RenderPass renderPass;
+    uint32_t subpass;
+    VULKAN_HPP_NAMESPACE::Framebuffer framebuffer;
+    VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryEnable;
+    VULKAN_HPP_NAMESPACE::QueryControlFlags queryFlags;
+    VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics;
+  };
+  static_assert( sizeof( CommandBufferInheritanceInfo ) == sizeof( VkCommandBufferInheritanceInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<CommandBufferInheritanceInfo>::value, "struct wrapper is not a standard layout!" );
+
+  struct CommandBufferBeginInfo
+  {
+    VULKAN_HPP_CONSTEXPR CommandBufferBeginInfo( VULKAN_HPP_NAMESPACE::CommandBufferUsageFlags flags_ = VULKAN_HPP_NAMESPACE::CommandBufferUsageFlags(),
+                                                 const VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo* pInheritanceInfo_ = nullptr ) VULKAN_HPP_NOEXCEPT
+      : flags( flags_ )
+      , pInheritanceInfo( pInheritanceInfo_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo & operator=( VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo ) - offsetof( CommandBufferBeginInfo, pNext ) );
+      return *this;
+    }
+
+    CommandBufferBeginInfo( VkCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    CommandBufferBeginInfo& operator=( VkCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo const *>(&rhs);
+      return *this;
+    }
+
+    CommandBufferBeginInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    CommandBufferBeginInfo & setFlags( VULKAN_HPP_NAMESPACE::CommandBufferUsageFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    CommandBufferBeginInfo & setPInheritanceInfo( const VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo* pInheritanceInfo_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pInheritanceInfo = pInheritanceInfo_;
+      return *this;
+    }
+
+    operator VkCommandBufferBeginInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkCommandBufferBeginInfo*>( this );
+    }
+
+    operator VkCommandBufferBeginInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkCommandBufferBeginInfo*>( this );
+    }
+
+    bool operator==( CommandBufferBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( pInheritanceInfo == rhs.pInheritanceInfo );
+    }
+
+    bool operator!=( CommandBufferBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferBeginInfo;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::CommandBufferUsageFlags flags;
+    const VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo* pInheritanceInfo;
+  };
+  static_assert( sizeof( CommandBufferBeginInfo ) == sizeof( VkCommandBufferBeginInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<CommandBufferBeginInfo>::value, "struct wrapper is not a standard layout!" );
+
+  struct CommandBufferInheritanceConditionalRenderingInfoEXT
+  {
+    VULKAN_HPP_CONSTEXPR CommandBufferInheritanceConditionalRenderingInfoEXT( VULKAN_HPP_NAMESPACE::Bool32 conditionalRenderingEnable_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : conditionalRenderingEnable( conditionalRenderingEnable_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::CommandBufferInheritanceConditionalRenderingInfoEXT & operator=( VULKAN_HPP_NAMESPACE::CommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::CommandBufferInheritanceConditionalRenderingInfoEXT ) - offsetof( CommandBufferInheritanceConditionalRenderingInfoEXT, pNext ) );
+      return *this;
+    }
+
+    CommandBufferInheritanceConditionalRenderingInfoEXT( VkCommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    CommandBufferInheritanceConditionalRenderingInfoEXT& operator=( VkCommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CommandBufferInheritanceConditionalRenderingInfoEXT const *>(&rhs);
+      return *this;
+    }
+
+    CommandBufferInheritanceConditionalRenderingInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    CommandBufferInheritanceConditionalRenderingInfoEXT & setConditionalRenderingEnable( VULKAN_HPP_NAMESPACE::Bool32 conditionalRenderingEnable_ ) VULKAN_HPP_NOEXCEPT
+    {
+      conditionalRenderingEnable = conditionalRenderingEnable_;
+      return *this;
+    }
+
+    operator VkCommandBufferInheritanceConditionalRenderingInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkCommandBufferInheritanceConditionalRenderingInfoEXT*>( this );
+    }
+
+    operator VkCommandBufferInheritanceConditionalRenderingInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkCommandBufferInheritanceConditionalRenderingInfoEXT*>( this );
+    }
+
+    bool operator==( CommandBufferInheritanceConditionalRenderingInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( conditionalRenderingEnable == rhs.conditionalRenderingEnable );
+    }
+
+    bool operator!=( CommandBufferInheritanceConditionalRenderingInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferInheritanceConditionalRenderingInfoEXT;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::Bool32 conditionalRenderingEnable;
+  };
+  static_assert( sizeof( CommandBufferInheritanceConditionalRenderingInfoEXT ) == sizeof( VkCommandBufferInheritanceConditionalRenderingInfoEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<CommandBufferInheritanceConditionalRenderingInfoEXT>::value, "struct wrapper is not a standard layout!" );
+
+  struct CommandPoolCreateInfo
+  {
+    VULKAN_HPP_CONSTEXPR CommandPoolCreateInfo( VULKAN_HPP_NAMESPACE::CommandPoolCreateFlags flags_ = VULKAN_HPP_NAMESPACE::CommandPoolCreateFlags(),
+                                                uint32_t queueFamilyIndex_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : flags( flags_ )
+      , queueFamilyIndex( queueFamilyIndex_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo & operator=( VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo ) - offsetof( CommandPoolCreateInfo, pNext ) );
+      return *this;
+    }
+
+    CommandPoolCreateInfo( VkCommandPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    CommandPoolCreateInfo& operator=( VkCommandPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo const *>(&rhs);
+      return *this;
+    }
+
+    CommandPoolCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    CommandPoolCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::CommandPoolCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    CommandPoolCreateInfo & setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
+    {
+      queueFamilyIndex = queueFamilyIndex_;
+      return *this;
+    }
+
+    operator VkCommandPoolCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkCommandPoolCreateInfo*>( this );
+    }
+
+    operator VkCommandPoolCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkCommandPoolCreateInfo*>( this );
+    }
+
+    bool operator==( CommandPoolCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( queueFamilyIndex == rhs.queueFamilyIndex );
+    }
+
+    bool operator!=( CommandPoolCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandPoolCreateInfo;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::CommandPoolCreateFlags flags;
+    uint32_t queueFamilyIndex;
+  };
+  static_assert( sizeof( CommandPoolCreateInfo ) == sizeof( VkCommandPoolCreateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<CommandPoolCreateInfo>::value, "struct wrapper is not a standard layout!" );
+
+  struct SpecializationMapEntry
+  {
+    VULKAN_HPP_CONSTEXPR SpecializationMapEntry( uint32_t constantID_ = 0,
+                                                 uint32_t offset_ = 0,
+                                                 size_t size_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : constantID( constantID_ )
+      , offset( offset_ )
+      , size( size_ )
+    {}
+
+    SpecializationMapEntry( VkSpecializationMapEntry const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    SpecializationMapEntry& operator=( VkSpecializationMapEntry const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SpecializationMapEntry const *>(&rhs);
+      return *this;
+    }
+
+    SpecializationMapEntry & setConstantID( uint32_t constantID_ ) VULKAN_HPP_NOEXCEPT
+    {
+      constantID = constantID_;
+      return *this;
+    }
+
+    SpecializationMapEntry & setOffset( uint32_t offset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      offset = offset_;
+      return *this;
+    }
+
+    SpecializationMapEntry & setSize( size_t size_ ) VULKAN_HPP_NOEXCEPT
+    {
+      size = size_;
+      return *this;
+    }
+
+    operator VkSpecializationMapEntry const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSpecializationMapEntry*>( this );
+    }
+
+    operator VkSpecializationMapEntry &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSpecializationMapEntry*>( this );
+    }
+
+    bool operator==( SpecializationMapEntry const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( constantID == rhs.constantID )
+          && ( offset == rhs.offset )
+          && ( size == rhs.size );
+    }
+
+    bool operator!=( SpecializationMapEntry const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    uint32_t constantID;
+    uint32_t offset;
+    size_t size;
+  };
+  static_assert( sizeof( SpecializationMapEntry ) == sizeof( VkSpecializationMapEntry ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<SpecializationMapEntry>::value, "struct wrapper is not a standard layout!" );
+
+  struct SpecializationInfo
+  {
+    VULKAN_HPP_CONSTEXPR SpecializationInfo( uint32_t mapEntryCount_ = 0,
+                                             const VULKAN_HPP_NAMESPACE::SpecializationMapEntry* pMapEntries_ = nullptr,
+                                             size_t dataSize_ = 0,
+                                             const void* pData_ = nullptr ) VULKAN_HPP_NOEXCEPT
+      : mapEntryCount( mapEntryCount_ )
+      , pMapEntries( pMapEntries_ )
+      , dataSize( dataSize_ )
+      , pData( pData_ )
+    {}
+
+    SpecializationInfo( VkSpecializationInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    SpecializationInfo& operator=( VkSpecializationInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SpecializationInfo const *>(&rhs);
+      return *this;
+    }
+
+    SpecializationInfo & setMapEntryCount( uint32_t mapEntryCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      mapEntryCount = mapEntryCount_;
+      return *this;
+    }
+
+    SpecializationInfo & setPMapEntries( const VULKAN_HPP_NAMESPACE::SpecializationMapEntry* pMapEntries_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pMapEntries = pMapEntries_;
+      return *this;
+    }
+
+    SpecializationInfo & setDataSize( size_t dataSize_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dataSize = dataSize_;
+      return *this;
+    }
+
+    SpecializationInfo & setPData( const void* pData_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pData = pData_;
+      return *this;
+    }
+
+    operator VkSpecializationInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSpecializationInfo*>( this );
+    }
+
+    operator VkSpecializationInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSpecializationInfo*>( this );
+    }
+
+    bool operator==( SpecializationInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( mapEntryCount == rhs.mapEntryCount )
+          && ( pMapEntries == rhs.pMapEntries )
+          && ( dataSize == rhs.dataSize )
+          && ( pData == rhs.pData );
+    }
+
+    bool operator!=( SpecializationInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    uint32_t mapEntryCount;
+    const VULKAN_HPP_NAMESPACE::SpecializationMapEntry* pMapEntries;
+    size_t dataSize;
+    const void* pData;
+  };
+  static_assert( sizeof( SpecializationInfo ) == sizeof( VkSpecializationInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<SpecializationInfo>::value, "struct wrapper is not a standard layout!" );
+
+  struct PipelineShaderStageCreateInfo
+  {
+    VULKAN_HPP_CONSTEXPR PipelineShaderStageCreateInfo( VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateFlags flags_ = VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateFlags(),
+                                                        VULKAN_HPP_NAMESPACE::ShaderStageFlagBits stage_ = VULKAN_HPP_NAMESPACE::ShaderStageFlagBits::eVertex,
+                                                        VULKAN_HPP_NAMESPACE::ShaderModule module_ = VULKAN_HPP_NAMESPACE::ShaderModule(),
+                                                        const char* pName_ = nullptr,
+                                                        const VULKAN_HPP_NAMESPACE::SpecializationInfo* pSpecializationInfo_ = nullptr ) VULKAN_HPP_NOEXCEPT
+      : flags( flags_ )
+      , stage( stage_ )
+      , module( module_ )
+      , pName( pName_ )
+      , pSpecializationInfo( pSpecializationInfo_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo & operator=( VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo ) - offsetof( PipelineShaderStageCreateInfo, pNext ) );
+      return *this;
+    }
+
+    PipelineShaderStageCreateInfo( VkPipelineShaderStageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PipelineShaderStageCreateInfo& operator=( VkPipelineShaderStageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo const *>(&rhs);
+      return *this;
+    }
+
+    PipelineShaderStageCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PipelineShaderStageCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    PipelineShaderStageCreateInfo & setStage( VULKAN_HPP_NAMESPACE::ShaderStageFlagBits stage_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stage = stage_;
+      return *this;
+    }
+
+    PipelineShaderStageCreateInfo & setModule( VULKAN_HPP_NAMESPACE::ShaderModule module_ ) VULKAN_HPP_NOEXCEPT
+    {
+      module = module_;
+      return *this;
+    }
+
+    PipelineShaderStageCreateInfo & setPName( const char* pName_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pName = pName_;
+      return *this;
+    }
+
+    PipelineShaderStageCreateInfo & setPSpecializationInfo( const VULKAN_HPP_NAMESPACE::SpecializationInfo* pSpecializationInfo_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pSpecializationInfo = pSpecializationInfo_;
+      return *this;
+    }
+
+    operator VkPipelineShaderStageCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineShaderStageCreateInfo*>( this );
+    }
+
+    operator VkPipelineShaderStageCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineShaderStageCreateInfo*>( this );
+    }
+
+    bool operator==( PipelineShaderStageCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( stage == rhs.stage )
+          && ( module == rhs.module )
+          && ( pName == rhs.pName )
+          && ( pSpecializationInfo == rhs.pSpecializationInfo );
+    }
+
+    bool operator!=( PipelineShaderStageCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineShaderStageCreateInfo;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateFlags flags;
+    VULKAN_HPP_NAMESPACE::ShaderStageFlagBits stage;
+    VULKAN_HPP_NAMESPACE::ShaderModule module;
+    const char* pName;
+    const VULKAN_HPP_NAMESPACE::SpecializationInfo* pSpecializationInfo;
+  };
+  static_assert( sizeof( PipelineShaderStageCreateInfo ) == sizeof( VkPipelineShaderStageCreateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PipelineShaderStageCreateInfo>::value, "struct wrapper is not a standard layout!" );
+
+  struct ComputePipelineCreateInfo
+  {
+    VULKAN_HPP_CONSTEXPR ComputePipelineCreateInfo( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ = VULKAN_HPP_NAMESPACE::PipelineCreateFlags(),
+                                                    VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo stage_ = VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo(),
+                                                    VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = VULKAN_HPP_NAMESPACE::PipelineLayout(),
+                                                    VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = VULKAN_HPP_NAMESPACE::Pipeline(),
+                                                    int32_t basePipelineIndex_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : flags( flags_ )
+      , stage( stage_ )
+      , layout( layout_ )
+      , basePipelineHandle( basePipelineHandle_ )
+      , basePipelineIndex( basePipelineIndex_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & operator=( VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo ) - offsetof( ComputePipelineCreateInfo, pNext ) );
+      return *this;
+    }
+
+    ComputePipelineCreateInfo( VkComputePipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    ComputePipelineCreateInfo& operator=( VkComputePipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo const *>(&rhs);
+      return *this;
+    }
+
+    ComputePipelineCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ComputePipelineCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    ComputePipelineCreateInfo & setStage( VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo stage_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stage = stage_;
+      return *this;
+    }
+
+    ComputePipelineCreateInfo & setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT
+    {
+      layout = layout_;
+      return *this;
+    }
+
+    ComputePipelineCreateInfo & setBasePipelineHandle( VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ ) VULKAN_HPP_NOEXCEPT
+    {
+      basePipelineHandle = basePipelineHandle_;
+      return *this;
+    }
+
+    ComputePipelineCreateInfo & setBasePipelineIndex( int32_t basePipelineIndex_ ) VULKAN_HPP_NOEXCEPT
+    {
+      basePipelineIndex = basePipelineIndex_;
+      return *this;
+    }
+
+    operator VkComputePipelineCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkComputePipelineCreateInfo*>( this );
+    }
+
+    operator VkComputePipelineCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkComputePipelineCreateInfo*>( this );
+    }
+
+    bool operator==( ComputePipelineCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( stage == rhs.stage )
+          && ( layout == rhs.layout )
+          && ( basePipelineHandle == rhs.basePipelineHandle )
+          && ( basePipelineIndex == rhs.basePipelineIndex );
+    }
+
+    bool operator!=( ComputePipelineCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eComputePipelineCreateInfo;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags;
+    VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo stage;
+    VULKAN_HPP_NAMESPACE::PipelineLayout layout;
+    VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle;
+    int32_t basePipelineIndex;
+  };
+  static_assert( sizeof( ComputePipelineCreateInfo ) == sizeof( VkComputePipelineCreateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ComputePipelineCreateInfo>::value, "struct wrapper is not a standard layout!" );
+
+  struct ConditionalRenderingBeginInfoEXT
+  {
+    VULKAN_HPP_CONSTEXPR ConditionalRenderingBeginInfoEXT( VULKAN_HPP_NAMESPACE::Buffer buffer_ = VULKAN_HPP_NAMESPACE::Buffer(),
+                                                           VULKAN_HPP_NAMESPACE::DeviceSize offset_ = 0,
+                                                           VULKAN_HPP_NAMESPACE::ConditionalRenderingFlagsEXT flags_ = VULKAN_HPP_NAMESPACE::ConditionalRenderingFlagsEXT() ) VULKAN_HPP_NOEXCEPT
+      : buffer( buffer_ )
+      , offset( offset_ )
+      , flags( flags_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT & operator=( VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT ) - offsetof( ConditionalRenderingBeginInfoEXT, pNext ) );
+      return *this;
+    }
+
+    ConditionalRenderingBeginInfoEXT( VkConditionalRenderingBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    ConditionalRenderingBeginInfoEXT& operator=( VkConditionalRenderingBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT const *>(&rhs);
+      return *this;
+    }
+
+    ConditionalRenderingBeginInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ConditionalRenderingBeginInfoEXT & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      buffer = buffer_;
+      return *this;
+    }
+
+    ConditionalRenderingBeginInfoEXT & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      offset = offset_;
+      return *this;
+    }
+
+    ConditionalRenderingBeginInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::ConditionalRenderingFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    operator VkConditionalRenderingBeginInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkConditionalRenderingBeginInfoEXT*>( this );
+    }
+
+    operator VkConditionalRenderingBeginInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkConditionalRenderingBeginInfoEXT*>( this );
+    }
+
+    bool operator==( ConditionalRenderingBeginInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( buffer == rhs.buffer )
+          && ( offset == rhs.offset )
+          && ( flags == rhs.flags );
+    }
+
+    bool operator!=( ConditionalRenderingBeginInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eConditionalRenderingBeginInfoEXT;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::Buffer buffer;
+    VULKAN_HPP_NAMESPACE::DeviceSize offset;
+    VULKAN_HPP_NAMESPACE::ConditionalRenderingFlagsEXT flags;
+  };
+  static_assert( sizeof( ConditionalRenderingBeginInfoEXT ) == sizeof( VkConditionalRenderingBeginInfoEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ConditionalRenderingBeginInfoEXT>::value, "struct wrapper is not a standard layout!" );
+
+  struct ConformanceVersionKHR
+  {
+    VULKAN_HPP_CONSTEXPR ConformanceVersionKHR( uint8_t major_ = 0,
+                                                uint8_t minor_ = 0,
+                                                uint8_t subminor_ = 0,
+                                                uint8_t patch_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : major( major_ )
+      , minor( minor_ )
+      , subminor( subminor_ )
+      , patch( patch_ )
+    {}
+
+    ConformanceVersionKHR( VkConformanceVersionKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    ConformanceVersionKHR& operator=( VkConformanceVersionKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ConformanceVersionKHR const *>(&rhs);
+      return *this;
+    }
+
+    ConformanceVersionKHR & setMajor( uint8_t major_ ) VULKAN_HPP_NOEXCEPT
+    {
+      major = major_;
+      return *this;
+    }
+
+    ConformanceVersionKHR & setMinor( uint8_t minor_ ) VULKAN_HPP_NOEXCEPT
+    {
+      minor = minor_;
+      return *this;
+    }
+
+    ConformanceVersionKHR & setSubminor( uint8_t subminor_ ) VULKAN_HPP_NOEXCEPT
+    {
+      subminor = subminor_;
+      return *this;
+    }
+
+    ConformanceVersionKHR & setPatch( uint8_t patch_ ) VULKAN_HPP_NOEXCEPT
+    {
+      patch = patch_;
+      return *this;
+    }
+
+    operator VkConformanceVersionKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkConformanceVersionKHR*>( this );
+    }
+
+    operator VkConformanceVersionKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkConformanceVersionKHR*>( this );
+    }
+
+    bool operator==( ConformanceVersionKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( major == rhs.major )
+          && ( minor == rhs.minor )
+          && ( subminor == rhs.subminor )
+          && ( patch == rhs.patch );
+    }
+
+    bool operator!=( ConformanceVersionKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    uint8_t major;
+    uint8_t minor;
+    uint8_t subminor;
+    uint8_t patch;
+  };
+  static_assert( sizeof( ConformanceVersionKHR ) == sizeof( VkConformanceVersionKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ConformanceVersionKHR>::value, "struct wrapper is not a standard layout!" );
+
+  struct CooperativeMatrixPropertiesNV
+  {
+    VULKAN_HPP_CONSTEXPR CooperativeMatrixPropertiesNV( uint32_t MSize_ = 0,
+                                                        uint32_t NSize_ = 0,
+                                                        uint32_t KSize_ = 0,
+                                                        VULKAN_HPP_NAMESPACE::ComponentTypeNV AType_ = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16,
+                                                        VULKAN_HPP_NAMESPACE::ComponentTypeNV BType_ = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16,
+                                                        VULKAN_HPP_NAMESPACE::ComponentTypeNV CType_ = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16,
+                                                        VULKAN_HPP_NAMESPACE::ComponentTypeNV DType_ = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16,
+                                                        VULKAN_HPP_NAMESPACE::ScopeNV scope_ = VULKAN_HPP_NAMESPACE::ScopeNV::eDevice ) VULKAN_HPP_NOEXCEPT
+      : MSize( MSize_ )
+      , NSize( NSize_ )
+      , KSize( KSize_ )
+      , AType( AType_ )
+      , BType( BType_ )
+      , CType( CType_ )
+      , DType( DType_ )
+      , scope( scope_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV & operator=( VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV ) - offsetof( CooperativeMatrixPropertiesNV, pNext ) );
+      return *this;
+    }
+
+    CooperativeMatrixPropertiesNV( VkCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    CooperativeMatrixPropertiesNV& operator=( VkCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV const *>(&rhs);
+      return *this;
+    }
+
+    CooperativeMatrixPropertiesNV & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    CooperativeMatrixPropertiesNV & setMSize( uint32_t MSize_ ) VULKAN_HPP_NOEXCEPT
+    {
+      MSize = MSize_;
+      return *this;
+    }
+
+    CooperativeMatrixPropertiesNV & setNSize( uint32_t NSize_ ) VULKAN_HPP_NOEXCEPT
+    {
+      NSize = NSize_;
+      return *this;
+    }
+
+    CooperativeMatrixPropertiesNV & setKSize( uint32_t KSize_ ) VULKAN_HPP_NOEXCEPT
+    {
+      KSize = KSize_;
+      return *this;
+    }
+
+    CooperativeMatrixPropertiesNV & setAType( VULKAN_HPP_NAMESPACE::ComponentTypeNV AType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      AType = AType_;
+      return *this;
+    }
+
+    CooperativeMatrixPropertiesNV & setBType( VULKAN_HPP_NAMESPACE::ComponentTypeNV BType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      BType = BType_;
+      return *this;
+    }
+
+    CooperativeMatrixPropertiesNV & setCType( VULKAN_HPP_NAMESPACE::ComponentTypeNV CType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      CType = CType_;
+      return *this;
+    }
+
+    CooperativeMatrixPropertiesNV & setDType( VULKAN_HPP_NAMESPACE::ComponentTypeNV DType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      DType = DType_;
+      return *this;
+    }
+
+    CooperativeMatrixPropertiesNV & setScope( VULKAN_HPP_NAMESPACE::ScopeNV scope_ ) VULKAN_HPP_NOEXCEPT
+    {
+      scope = scope_;
+      return *this;
+    }
+
+    operator VkCooperativeMatrixPropertiesNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkCooperativeMatrixPropertiesNV*>( this );
+    }
+
+    operator VkCooperativeMatrixPropertiesNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkCooperativeMatrixPropertiesNV*>( this );
+    }
+
+    bool operator==( CooperativeMatrixPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( MSize == rhs.MSize )
+          && ( NSize == rhs.NSize )
+          && ( KSize == rhs.KSize )
+          && ( AType == rhs.AType )
+          && ( BType == rhs.BType )
+          && ( CType == rhs.CType )
+          && ( DType == rhs.DType )
+          && ( scope == rhs.scope );
+    }
+
+    bool operator!=( CooperativeMatrixPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCooperativeMatrixPropertiesNV;
+    void* pNext = nullptr;
+    uint32_t MSize;
+    uint32_t NSize;
+    uint32_t KSize;
+    VULKAN_HPP_NAMESPACE::ComponentTypeNV AType;
+    VULKAN_HPP_NAMESPACE::ComponentTypeNV BType;
+    VULKAN_HPP_NAMESPACE::ComponentTypeNV CType;
+    VULKAN_HPP_NAMESPACE::ComponentTypeNV DType;
+    VULKAN_HPP_NAMESPACE::ScopeNV scope;
+  };
+  static_assert( sizeof( CooperativeMatrixPropertiesNV ) == sizeof( VkCooperativeMatrixPropertiesNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<CooperativeMatrixPropertiesNV>::value, "struct wrapper is not a standard layout!" );
+
+  struct CopyDescriptorSet
+  {
+    VULKAN_HPP_CONSTEXPR CopyDescriptorSet( VULKAN_HPP_NAMESPACE::DescriptorSet srcSet_ = VULKAN_HPP_NAMESPACE::DescriptorSet(),
+                                            uint32_t srcBinding_ = 0,
+                                            uint32_t srcArrayElement_ = 0,
+                                            VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_ = VULKAN_HPP_NAMESPACE::DescriptorSet(),
+                                            uint32_t dstBinding_ = 0,
+                                            uint32_t dstArrayElement_ = 0,
+                                            uint32_t descriptorCount_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : srcSet( srcSet_ )
+      , srcBinding( srcBinding_ )
+      , srcArrayElement( srcArrayElement_ )
+      , dstSet( dstSet_ )
+      , dstBinding( dstBinding_ )
+      , dstArrayElement( dstArrayElement_ )
+      , descriptorCount( descriptorCount_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::CopyDescriptorSet & operator=( VULKAN_HPP_NAMESPACE::CopyDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::CopyDescriptorSet ) - offsetof( CopyDescriptorSet, pNext ) );
+      return *this;
+    }
+
+    CopyDescriptorSet( VkCopyDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    CopyDescriptorSet& operator=( VkCopyDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyDescriptorSet const *>(&rhs);
+      return *this;
+    }
+
+    CopyDescriptorSet & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    CopyDescriptorSet & setSrcSet( VULKAN_HPP_NAMESPACE::DescriptorSet srcSet_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcSet = srcSet_;
+      return *this;
+    }
+
+    CopyDescriptorSet & setSrcBinding( uint32_t srcBinding_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcBinding = srcBinding_;
+      return *this;
+    }
+
+    CopyDescriptorSet & setSrcArrayElement( uint32_t srcArrayElement_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcArrayElement = srcArrayElement_;
+      return *this;
+    }
+
+    CopyDescriptorSet & setDstSet( VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstSet = dstSet_;
+      return *this;
+    }
+
+    CopyDescriptorSet & setDstBinding( uint32_t dstBinding_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstBinding = dstBinding_;
+      return *this;
+    }
+
+    CopyDescriptorSet & setDstArrayElement( uint32_t dstArrayElement_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstArrayElement = dstArrayElement_;
+      return *this;
+    }
+
+    CopyDescriptorSet & setDescriptorCount( uint32_t descriptorCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorCount = descriptorCount_;
+      return *this;
+    }
+
+    operator VkCopyDescriptorSet const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkCopyDescriptorSet*>( this );
+    }
+
+    operator VkCopyDescriptorSet &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkCopyDescriptorSet*>( this );
+    }
+
+    bool operator==( CopyDescriptorSet const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( srcSet == rhs.srcSet )
+          && ( srcBinding == rhs.srcBinding )
+          && ( srcArrayElement == rhs.srcArrayElement )
+          && ( dstSet == rhs.dstSet )
+          && ( dstBinding == rhs.dstBinding )
+          && ( dstArrayElement == rhs.dstArrayElement )
+          && ( descriptorCount == rhs.descriptorCount );
+    }
+
+    bool operator!=( CopyDescriptorSet const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyDescriptorSet;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::DescriptorSet srcSet;
+    uint32_t srcBinding;
+    uint32_t srcArrayElement;
+    VULKAN_HPP_NAMESPACE::DescriptorSet dstSet;
+    uint32_t dstBinding;
+    uint32_t dstArrayElement;
+    uint32_t descriptorCount;
+  };
+  static_assert( sizeof( CopyDescriptorSet ) == sizeof( VkCopyDescriptorSet ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<CopyDescriptorSet>::value, "struct wrapper is not a standard layout!" );
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+  struct D3D12FenceSubmitInfoKHR
+  {
+    VULKAN_HPP_CONSTEXPR D3D12FenceSubmitInfoKHR( uint32_t waitSemaphoreValuesCount_ = 0,
+                                                  const uint64_t* pWaitSemaphoreValues_ = nullptr,
+                                                  uint32_t signalSemaphoreValuesCount_ = 0,
+                                                  const uint64_t* pSignalSemaphoreValues_ = nullptr ) VULKAN_HPP_NOEXCEPT
+      : waitSemaphoreValuesCount( waitSemaphoreValuesCount_ )
+      , pWaitSemaphoreValues( pWaitSemaphoreValues_ )
+      , signalSemaphoreValuesCount( signalSemaphoreValuesCount_ )
+      , pSignalSemaphoreValues( pSignalSemaphoreValues_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::D3D12FenceSubmitInfoKHR & operator=( VULKAN_HPP_NAMESPACE::D3D12FenceSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::D3D12FenceSubmitInfoKHR ) - offsetof( D3D12FenceSubmitInfoKHR, pNext ) );
+      return *this;
+    }
+
+    D3D12FenceSubmitInfoKHR( VkD3D12FenceSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    D3D12FenceSubmitInfoKHR& operator=( VkD3D12FenceSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::D3D12FenceSubmitInfoKHR const *>(&rhs);
+      return *this;
+    }
+
+    D3D12FenceSubmitInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    D3D12FenceSubmitInfoKHR & setWaitSemaphoreValuesCount( uint32_t waitSemaphoreValuesCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      waitSemaphoreValuesCount = waitSemaphoreValuesCount_;
+      return *this;
+    }
+
+    D3D12FenceSubmitInfoKHR & setPWaitSemaphoreValues( const uint64_t* pWaitSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pWaitSemaphoreValues = pWaitSemaphoreValues_;
+      return *this;
+    }
+
+    D3D12FenceSubmitInfoKHR & setSignalSemaphoreValuesCount( uint32_t signalSemaphoreValuesCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      signalSemaphoreValuesCount = signalSemaphoreValuesCount_;
+      return *this;
+    }
+
+    D3D12FenceSubmitInfoKHR & setPSignalSemaphoreValues( const uint64_t* pSignalSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pSignalSemaphoreValues = pSignalSemaphoreValues_;
+      return *this;
+    }
+
+    operator VkD3D12FenceSubmitInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkD3D12FenceSubmitInfoKHR*>( this );
+    }
+
+    operator VkD3D12FenceSubmitInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkD3D12FenceSubmitInfoKHR*>( this );
+    }
+
+    bool operator==( D3D12FenceSubmitInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( waitSemaphoreValuesCount == rhs.waitSemaphoreValuesCount )
+          && ( pWaitSemaphoreValues == rhs.pWaitSemaphoreValues )
+          && ( signalSemaphoreValuesCount == rhs.signalSemaphoreValuesCount )
+          && ( pSignalSemaphoreValues == rhs.pSignalSemaphoreValues );
+    }
+
+    bool operator!=( D3D12FenceSubmitInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eD3D12FenceSubmitInfoKHR;
+    const void* pNext = nullptr;
+    uint32_t waitSemaphoreValuesCount;
+    const uint64_t* pWaitSemaphoreValues;
+    uint32_t signalSemaphoreValuesCount;
+    const uint64_t* pSignalSemaphoreValues;
+  };
+  static_assert( sizeof( D3D12FenceSubmitInfoKHR ) == sizeof( VkD3D12FenceSubmitInfoKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<D3D12FenceSubmitInfoKHR>::value, "struct wrapper is not a standard layout!" );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  struct DebugMarkerMarkerInfoEXT
+  {
+    VULKAN_HPP_CONSTEXPR_14 DebugMarkerMarkerInfoEXT( const char* pMarkerName_ = nullptr,
+                                                      std::array<float,4> const& color_ = { { 0 } } ) VULKAN_HPP_NOEXCEPT
+      : pMarkerName( pMarkerName_ )
+      , color{}
+    {
+      VULKAN_HPP_NAMESPACE::ConstExpressionArrayCopy<float,4,4>::copy( color, color_ );
+    }
+
+    VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT & operator=( VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT ) - offsetof( DebugMarkerMarkerInfoEXT, pNext ) );
+      return *this;
+    }
+
+    DebugMarkerMarkerInfoEXT( VkDebugMarkerMarkerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    DebugMarkerMarkerInfoEXT& operator=( VkDebugMarkerMarkerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT const *>(&rhs);
+      return *this;
+    }
+
+    DebugMarkerMarkerInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    DebugMarkerMarkerInfoEXT & setPMarkerName( const char* pMarkerName_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pMarkerName = pMarkerName_;
+      return *this;
+    }
+
+    DebugMarkerMarkerInfoEXT & setColor( std::array<float,4> color_ ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( color, color_.data(), 4 * sizeof( float ) );
+      return *this;
+    }
+
+    operator VkDebugMarkerMarkerInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDebugMarkerMarkerInfoEXT*>( this );
+    }
+
+    operator VkDebugMarkerMarkerInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDebugMarkerMarkerInfoEXT*>( this );
+    }
+
+    bool operator==( DebugMarkerMarkerInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( pMarkerName == rhs.pMarkerName )
+          && ( memcmp( color, rhs.color, 4 * sizeof( float ) ) == 0 );
+    }
+
+    bool operator!=( DebugMarkerMarkerInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugMarkerMarkerInfoEXT;
+    const void* pNext = nullptr;
+    const char* pMarkerName;
+    float color[4];
+  };
+  static_assert( sizeof( DebugMarkerMarkerInfoEXT ) == sizeof( VkDebugMarkerMarkerInfoEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DebugMarkerMarkerInfoEXT>::value, "struct wrapper is not a standard layout!" );
+
+  struct DebugMarkerObjectNameInfoEXT
+  {
+    VULKAN_HPP_CONSTEXPR DebugMarkerObjectNameInfoEXT( VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_ = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown,
+                                                       uint64_t object_ = 0,
+                                                       const char* pObjectName_ = nullptr ) VULKAN_HPP_NOEXCEPT
+      : objectType( objectType_ )
+      , object( object_ )
+      , pObjectName( pObjectName_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT & operator=( VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT ) - offsetof( DebugMarkerObjectNameInfoEXT, pNext ) );
+      return *this;
+    }
+
+    DebugMarkerObjectNameInfoEXT( VkDebugMarkerObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    DebugMarkerObjectNameInfoEXT& operator=( VkDebugMarkerObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT const *>(&rhs);
+      return *this;
+    }
+
+    DebugMarkerObjectNameInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    DebugMarkerObjectNameInfoEXT & setObjectType( VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      objectType = objectType_;
+      return *this;
+    }
+
+    DebugMarkerObjectNameInfoEXT & setObject( uint64_t object_ ) VULKAN_HPP_NOEXCEPT
+    {
+      object = object_;
+      return *this;
+    }
+
+    DebugMarkerObjectNameInfoEXT & setPObjectName( const char* pObjectName_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pObjectName = pObjectName_;
+      return *this;
+    }
+
+    operator VkDebugMarkerObjectNameInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDebugMarkerObjectNameInfoEXT*>( this );
+    }
+
+    operator VkDebugMarkerObjectNameInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDebugMarkerObjectNameInfoEXT*>( this );
+    }
+
+    bool operator==( DebugMarkerObjectNameInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( objectType == rhs.objectType )
+          && ( object == rhs.object )
+          && ( pObjectName == rhs.pObjectName );
+    }
+
+    bool operator!=( DebugMarkerObjectNameInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugMarkerObjectNameInfoEXT;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType;
+    uint64_t object;
+    const char* pObjectName;
+  };
+  static_assert( sizeof( DebugMarkerObjectNameInfoEXT ) == sizeof( VkDebugMarkerObjectNameInfoEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DebugMarkerObjectNameInfoEXT>::value, "struct wrapper is not a standard layout!" );
+
+  struct DebugMarkerObjectTagInfoEXT
+  {
+    VULKAN_HPP_CONSTEXPR DebugMarkerObjectTagInfoEXT( VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_ = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown,
+                                                      uint64_t object_ = 0,
+                                                      uint64_t tagName_ = 0,
+                                                      size_t tagSize_ = 0,
+                                                      const void* pTag_ = nullptr ) VULKAN_HPP_NOEXCEPT
+      : objectType( objectType_ )
+      , object( object_ )
+      , tagName( tagName_ )
+      , tagSize( tagSize_ )
+      , pTag( pTag_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT & operator=( VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT ) - offsetof( DebugMarkerObjectTagInfoEXT, pNext ) );
+      return *this;
+    }
+
+    DebugMarkerObjectTagInfoEXT( VkDebugMarkerObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    DebugMarkerObjectTagInfoEXT& operator=( VkDebugMarkerObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT const *>(&rhs);
+      return *this;
+    }
+
+    DebugMarkerObjectTagInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    DebugMarkerObjectTagInfoEXT & setObjectType( VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      objectType = objectType_;
+      return *this;
+    }
+
+    DebugMarkerObjectTagInfoEXT & setObject( uint64_t object_ ) VULKAN_HPP_NOEXCEPT
+    {
+      object = object_;
+      return *this;
+    }
+
+    DebugMarkerObjectTagInfoEXT & setTagName( uint64_t tagName_ ) VULKAN_HPP_NOEXCEPT
+    {
+      tagName = tagName_;
+      return *this;
+    }
+
+    DebugMarkerObjectTagInfoEXT & setTagSize( size_t tagSize_ ) VULKAN_HPP_NOEXCEPT
+    {
+      tagSize = tagSize_;
+      return *this;
+    }
+
+    DebugMarkerObjectTagInfoEXT & setPTag( const void* pTag_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pTag = pTag_;
+      return *this;
+    }
+
+    operator VkDebugMarkerObjectTagInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDebugMarkerObjectTagInfoEXT*>( this );
+    }
+
+    operator VkDebugMarkerObjectTagInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDebugMarkerObjectTagInfoEXT*>( this );
+    }
+
+    bool operator==( DebugMarkerObjectTagInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( objectType == rhs.objectType )
+          && ( object == rhs.object )
+          && ( tagName == rhs.tagName )
+          && ( tagSize == rhs.tagSize )
+          && ( pTag == rhs.pTag );
+    }
+
+    bool operator!=( DebugMarkerObjectTagInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugMarkerObjectTagInfoEXT;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType;
+    uint64_t object;
+    uint64_t tagName;
+    size_t tagSize;
+    const void* pTag;
+  };
+  static_assert( sizeof( DebugMarkerObjectTagInfoEXT ) == sizeof( VkDebugMarkerObjectTagInfoEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DebugMarkerObjectTagInfoEXT>::value, "struct wrapper is not a standard layout!" );
+
+  struct DebugReportCallbackCreateInfoEXT
+  {
+    VULKAN_HPP_CONSTEXPR DebugReportCallbackCreateInfoEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags_ = VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT(),
+                                                           PFN_vkDebugReportCallbackEXT pfnCallback_ = nullptr,
+                                                           void* pUserData_ = nullptr ) VULKAN_HPP_NOEXCEPT
+      : flags( flags_ )
+      , pfnCallback( pfnCallback_ )
+      , pUserData( pUserData_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT & operator=( VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT ) - offsetof( DebugReportCallbackCreateInfoEXT, pNext ) );
+      return *this;
+    }
+
+    DebugReportCallbackCreateInfoEXT( VkDebugReportCallbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    DebugReportCallbackCreateInfoEXT& operator=( VkDebugReportCallbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT const *>(&rhs);
+      return *this;
+    }
+
+    DebugReportCallbackCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    DebugReportCallbackCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    DebugReportCallbackCreateInfoEXT & setPfnCallback( PFN_vkDebugReportCallbackEXT pfnCallback_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pfnCallback = pfnCallback_;
+      return *this;
+    }
+
+    DebugReportCallbackCreateInfoEXT & setPUserData( void* pUserData_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pUserData = pUserData_;
+      return *this;
+    }
+
+    operator VkDebugReportCallbackCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT*>( this );
+    }
+
+    operator VkDebugReportCallbackCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDebugReportCallbackCreateInfoEXT*>( this );
+    }
+
+    bool operator==( DebugReportCallbackCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( pfnCallback == rhs.pfnCallback )
+          && ( pUserData == rhs.pUserData );
+    }
+
+    bool operator!=( DebugReportCallbackCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugReportCallbackCreateInfoEXT;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags;
+    PFN_vkDebugReportCallbackEXT pfnCallback;
+    void* pUserData;
+  };
+  static_assert( sizeof( DebugReportCallbackCreateInfoEXT ) == sizeof( VkDebugReportCallbackCreateInfoEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DebugReportCallbackCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+
+  struct DebugUtilsLabelEXT
+  {
+    VULKAN_HPP_CONSTEXPR_14 DebugUtilsLabelEXT( const char* pLabelName_ = nullptr,
+                                                std::array<float,4> const& color_ = { { 0 } } ) VULKAN_HPP_NOEXCEPT
+      : pLabelName( pLabelName_ )
+      , color{}
+    {
+      VULKAN_HPP_NAMESPACE::ConstExpressionArrayCopy<float,4,4>::copy( color, color_ );
+    }
+
+    VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & operator=( VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT ) - offsetof( DebugUtilsLabelEXT, pNext ) );
+      return *this;
+    }
+
+    DebugUtilsLabelEXT( VkDebugUtilsLabelEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    DebugUtilsLabelEXT& operator=( VkDebugUtilsLabelEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT const *>(&rhs);
+      return *this;
+    }
+
+    DebugUtilsLabelEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    DebugUtilsLabelEXT & setPLabelName( const char* pLabelName_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pLabelName = pLabelName_;
+      return *this;
+    }
+
+    DebugUtilsLabelEXT & setColor( std::array<float,4> color_ ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( color, color_.data(), 4 * sizeof( float ) );
+      return *this;
+    }
+
+    operator VkDebugUtilsLabelEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDebugUtilsLabelEXT*>( this );
+    }
+
+    operator VkDebugUtilsLabelEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDebugUtilsLabelEXT*>( this );
+    }
+
+    bool operator==( DebugUtilsLabelEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( pLabelName == rhs.pLabelName )
+          && ( memcmp( color, rhs.color, 4 * sizeof( float ) ) == 0 );
+    }
+
+    bool operator!=( DebugUtilsLabelEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsLabelEXT;
+    const void* pNext = nullptr;
+    const char* pLabelName;
+    float color[4];
+  };
+  static_assert( sizeof( DebugUtilsLabelEXT ) == sizeof( VkDebugUtilsLabelEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DebugUtilsLabelEXT>::value, "struct wrapper is not a standard layout!" );
+
+  struct DebugUtilsObjectNameInfoEXT
+  {
+    VULKAN_HPP_CONSTEXPR DebugUtilsObjectNameInfoEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_ = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown,
+                                                      uint64_t objectHandle_ = 0,
+                                                      const char* pObjectName_ = nullptr ) VULKAN_HPP_NOEXCEPT
+      : objectType( objectType_ )
+      , objectHandle( objectHandle_ )
+      , pObjectName( pObjectName_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT & operator=( VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT ) - offsetof( DebugUtilsObjectNameInfoEXT, pNext ) );
+      return *this;
+    }
+
+    DebugUtilsObjectNameInfoEXT( VkDebugUtilsObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    DebugUtilsObjectNameInfoEXT& operator=( VkDebugUtilsObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT const *>(&rhs);
+      return *this;
+    }
+
+    DebugUtilsObjectNameInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    DebugUtilsObjectNameInfoEXT & setObjectType( VULKAN_HPP_NAMESPACE::ObjectType objectType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      objectType = objectType_;
+      return *this;
+    }
+
+    DebugUtilsObjectNameInfoEXT & setObjectHandle( uint64_t objectHandle_ ) VULKAN_HPP_NOEXCEPT
+    {
+      objectHandle = objectHandle_;
+      return *this;
+    }
+
+    DebugUtilsObjectNameInfoEXT & setPObjectName( const char* pObjectName_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pObjectName = pObjectName_;
+      return *this;
+    }
+
+    operator VkDebugUtilsObjectNameInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDebugUtilsObjectNameInfoEXT*>( this );
+    }
+
+    operator VkDebugUtilsObjectNameInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDebugUtilsObjectNameInfoEXT*>( this );
+    }
+
+    bool operator==( DebugUtilsObjectNameInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( objectType == rhs.objectType )
+          && ( objectHandle == rhs.objectHandle )
+          && ( pObjectName == rhs.pObjectName );
+    }
+
+    bool operator!=( DebugUtilsObjectNameInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsObjectNameInfoEXT;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::ObjectType objectType;
+    uint64_t objectHandle;
+    const char* pObjectName;
+  };
+  static_assert( sizeof( DebugUtilsObjectNameInfoEXT ) == sizeof( VkDebugUtilsObjectNameInfoEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DebugUtilsObjectNameInfoEXT>::value, "struct wrapper is not a standard layout!" );
+
+  struct DebugUtilsMessengerCallbackDataEXT
+  {
+    VULKAN_HPP_CONSTEXPR DebugUtilsMessengerCallbackDataEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataFlagsEXT flags_ = VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataFlagsEXT(),
+                                                             const char* pMessageIdName_ = nullptr,
+                                                             int32_t messageIdNumber_ = 0,
+                                                             const char* pMessage_ = nullptr,
+                                                             uint32_t queueLabelCount_ = 0,
+                                                             const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pQueueLabels_ = nullptr,
+                                                             uint32_t cmdBufLabelCount_ = 0,
+                                                             const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pCmdBufLabels_ = nullptr,
+                                                             uint32_t objectCount_ = 0,
+                                                             const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT* pObjects_ = nullptr ) VULKAN_HPP_NOEXCEPT
+      : flags( flags_ )
+      , pMessageIdName( pMessageIdName_ )
+      , messageIdNumber( messageIdNumber_ )
+      , pMessage( pMessage_ )
+      , queueLabelCount( queueLabelCount_ )
+      , pQueueLabels( pQueueLabels_ )
+      , cmdBufLabelCount( cmdBufLabelCount_ )
+      , pCmdBufLabels( pCmdBufLabels_ )
+      , objectCount( objectCount_ )
+      , pObjects( pObjects_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT & operator=( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT ) - offsetof( DebugUtilsMessengerCallbackDataEXT, pNext ) );
+      return *this;
+    }
+
+    DebugUtilsMessengerCallbackDataEXT( VkDebugUtilsMessengerCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    DebugUtilsMessengerCallbackDataEXT& operator=( VkDebugUtilsMessengerCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT const *>(&rhs);
+      return *this;
+    }
+
+    DebugUtilsMessengerCallbackDataEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    DebugUtilsMessengerCallbackDataEXT & setFlags( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    DebugUtilsMessengerCallbackDataEXT & setPMessageIdName( const char* pMessageIdName_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pMessageIdName = pMessageIdName_;
+      return *this;
+    }
+
+    DebugUtilsMessengerCallbackDataEXT & setMessageIdNumber( int32_t messageIdNumber_ ) VULKAN_HPP_NOEXCEPT
+    {
+      messageIdNumber = messageIdNumber_;
+      return *this;
+    }
+
+    DebugUtilsMessengerCallbackDataEXT & setPMessage( const char* pMessage_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pMessage = pMessage_;
+      return *this;
+    }
+
+    DebugUtilsMessengerCallbackDataEXT & setQueueLabelCount( uint32_t queueLabelCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      queueLabelCount = queueLabelCount_;
+      return *this;
+    }
+
+    DebugUtilsMessengerCallbackDataEXT & setPQueueLabels( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pQueueLabels_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pQueueLabels = pQueueLabels_;
+      return *this;
+    }
+
+    DebugUtilsMessengerCallbackDataEXT & setCmdBufLabelCount( uint32_t cmdBufLabelCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      cmdBufLabelCount = cmdBufLabelCount_;
+      return *this;
+    }
+
+    DebugUtilsMessengerCallbackDataEXT & setPCmdBufLabels( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pCmdBufLabels_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pCmdBufLabels = pCmdBufLabels_;
+      return *this;
+    }
+
+    DebugUtilsMessengerCallbackDataEXT & setObjectCount( uint32_t objectCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      objectCount = objectCount_;
+      return *this;
+    }
+
+    DebugUtilsMessengerCallbackDataEXT & setPObjects( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT* pObjects_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pObjects = pObjects_;
+      return *this;
+    }
+
+    operator VkDebugUtilsMessengerCallbackDataEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDebugUtilsMessengerCallbackDataEXT*>( this );
+    }
+
+    operator VkDebugUtilsMessengerCallbackDataEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDebugUtilsMessengerCallbackDataEXT*>( this );
+    }
+
+    bool operator==( DebugUtilsMessengerCallbackDataEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( pMessageIdName == rhs.pMessageIdName )
+          && ( messageIdNumber == rhs.messageIdNumber )
+          && ( pMessage == rhs.pMessage )
+          && ( queueLabelCount == rhs.queueLabelCount )
+          && ( pQueueLabels == rhs.pQueueLabels )
+          && ( cmdBufLabelCount == rhs.cmdBufLabelCount )
+          && ( pCmdBufLabels == rhs.pCmdBufLabels )
+          && ( objectCount == rhs.objectCount )
+          && ( pObjects == rhs.pObjects );
+    }
+
+    bool operator!=( DebugUtilsMessengerCallbackDataEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsMessengerCallbackDataEXT;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataFlagsEXT flags;
+    const char* pMessageIdName;
+    int32_t messageIdNumber;
+    const char* pMessage;
+    uint32_t queueLabelCount;
+    const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pQueueLabels;
+    uint32_t cmdBufLabelCount;
+    const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pCmdBufLabels;
+    uint32_t objectCount;
+    const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT* pObjects;
+  };
+  static_assert( sizeof( DebugUtilsMessengerCallbackDataEXT ) == sizeof( VkDebugUtilsMessengerCallbackDataEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DebugUtilsMessengerCallbackDataEXT>::value, "struct wrapper is not a standard layout!" );
+
+  struct DebugUtilsMessengerCreateInfoEXT
+  {
+    VULKAN_HPP_CONSTEXPR DebugUtilsMessengerCreateInfoEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateFlagsEXT flags_ = VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateFlagsEXT(),
+                                                           VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagsEXT messageSeverity_ = VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagsEXT(),
+                                                           VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageType_ = VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT(),
+                                                           PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback_ = nullptr,
+                                                           void* pUserData_ = nullptr ) VULKAN_HPP_NOEXCEPT
+      : flags( flags_ )
+      , messageSeverity( messageSeverity_ )
+      , messageType( messageType_ )
+      , pfnUserCallback( pfnUserCallback_ )
+      , pUserData( pUserData_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT & operator=( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT ) - offsetof( DebugUtilsMessengerCreateInfoEXT, pNext ) );
+      return *this;
+    }
+
+    DebugUtilsMessengerCreateInfoEXT( VkDebugUtilsMessengerCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    DebugUtilsMessengerCreateInfoEXT& operator=( VkDebugUtilsMessengerCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT const *>(&rhs);
+      return *this;
+    }
+
+    DebugUtilsMessengerCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    DebugUtilsMessengerCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    DebugUtilsMessengerCreateInfoEXT & setMessageSeverity( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagsEXT messageSeverity_ ) VULKAN_HPP_NOEXCEPT
+    {
+      messageSeverity = messageSeverity_;
+      return *this;
+    }
+
+    DebugUtilsMessengerCreateInfoEXT & setMessageType( VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      messageType = messageType_;
+      return *this;
+    }
+
+    DebugUtilsMessengerCreateInfoEXT & setPfnUserCallback( PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pfnUserCallback = pfnUserCallback_;
+      return *this;
+    }
+
+    DebugUtilsMessengerCreateInfoEXT & setPUserData( void* pUserData_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pUserData = pUserData_;
+      return *this;
+    }
+
+    operator VkDebugUtilsMessengerCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDebugUtilsMessengerCreateInfoEXT*>( this );
+    }
+
+    operator VkDebugUtilsMessengerCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDebugUtilsMessengerCreateInfoEXT*>( this );
+    }
+
+    bool operator==( DebugUtilsMessengerCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( messageSeverity == rhs.messageSeverity )
+          && ( messageType == rhs.messageType )
+          && ( pfnUserCallback == rhs.pfnUserCallback )
+          && ( pUserData == rhs.pUserData );
+    }
+
+    bool operator!=( DebugUtilsMessengerCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsMessengerCreateInfoEXT;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateFlagsEXT flags;
+    VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagsEXT messageSeverity;
+    VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageType;
+    PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback;
+    void* pUserData;
+  };
+  static_assert( sizeof( DebugUtilsMessengerCreateInfoEXT ) == sizeof( VkDebugUtilsMessengerCreateInfoEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DebugUtilsMessengerCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+
+  struct DebugUtilsObjectTagInfoEXT
+  {
+    VULKAN_HPP_CONSTEXPR DebugUtilsObjectTagInfoEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_ = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown,
+                                                     uint64_t objectHandle_ = 0,
+                                                     uint64_t tagName_ = 0,
+                                                     size_t tagSize_ = 0,
+                                                     const void* pTag_ = nullptr ) VULKAN_HPP_NOEXCEPT
+      : objectType( objectType_ )
+      , objectHandle( objectHandle_ )
+      , tagName( tagName_ )
+      , tagSize( tagSize_ )
+      , pTag( pTag_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT & operator=( VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT ) - offsetof( DebugUtilsObjectTagInfoEXT, pNext ) );
+      return *this;
+    }
+
+    DebugUtilsObjectTagInfoEXT( VkDebugUtilsObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    DebugUtilsObjectTagInfoEXT& operator=( VkDebugUtilsObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT const *>(&rhs);
+      return *this;
+    }
+
+    DebugUtilsObjectTagInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    DebugUtilsObjectTagInfoEXT & setObjectType( VULKAN_HPP_NAMESPACE::ObjectType objectType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      objectType = objectType_;
+      return *this;
+    }
+
+    DebugUtilsObjectTagInfoEXT & setObjectHandle( uint64_t objectHandle_ ) VULKAN_HPP_NOEXCEPT
+    {
+      objectHandle = objectHandle_;
+      return *this;
+    }
+
+    DebugUtilsObjectTagInfoEXT & setTagName( uint64_t tagName_ ) VULKAN_HPP_NOEXCEPT
+    {
+      tagName = tagName_;
+      return *this;
+    }
+
+    DebugUtilsObjectTagInfoEXT & setTagSize( size_t tagSize_ ) VULKAN_HPP_NOEXCEPT
+    {
+      tagSize = tagSize_;
+      return *this;
+    }
+
+    DebugUtilsObjectTagInfoEXT & setPTag( const void* pTag_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pTag = pTag_;
+      return *this;
+    }
+
+    operator VkDebugUtilsObjectTagInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDebugUtilsObjectTagInfoEXT*>( this );
+    }
+
+    operator VkDebugUtilsObjectTagInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDebugUtilsObjectTagInfoEXT*>( this );
+    }
+
+    bool operator==( DebugUtilsObjectTagInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( objectType == rhs.objectType )
+          && ( objectHandle == rhs.objectHandle )
+          && ( tagName == rhs.tagName )
+          && ( tagSize == rhs.tagSize )
+          && ( pTag == rhs.pTag );
+    }
+
+    bool operator!=( DebugUtilsObjectTagInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsObjectTagInfoEXT;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::ObjectType objectType;
+    uint64_t objectHandle;
+    uint64_t tagName;
+    size_t tagSize;
+    const void* pTag;
+  };
+  static_assert( sizeof( DebugUtilsObjectTagInfoEXT ) == sizeof( VkDebugUtilsObjectTagInfoEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DebugUtilsObjectTagInfoEXT>::value, "struct wrapper is not a standard layout!" );
+
+  struct DedicatedAllocationBufferCreateInfoNV
+  {
+    VULKAN_HPP_CONSTEXPR DedicatedAllocationBufferCreateInfoNV( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : dedicatedAllocation( dedicatedAllocation_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::DedicatedAllocationBufferCreateInfoNV & operator=( VULKAN_HPP_NAMESPACE::DedicatedAllocationBufferCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DedicatedAllocationBufferCreateInfoNV ) - offsetof( DedicatedAllocationBufferCreateInfoNV, pNext ) );
+      return *this;
+    }
+
+    DedicatedAllocationBufferCreateInfoNV( VkDedicatedAllocationBufferCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    DedicatedAllocationBufferCreateInfoNV& operator=( VkDedicatedAllocationBufferCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DedicatedAllocationBufferCreateInfoNV const *>(&rhs);
+      return *this;
+    }
+
+    DedicatedAllocationBufferCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    DedicatedAllocationBufferCreateInfoNV & setDedicatedAllocation( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dedicatedAllocation = dedicatedAllocation_;
+      return *this;
+    }
+
+    operator VkDedicatedAllocationBufferCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDedicatedAllocationBufferCreateInfoNV*>( this );
+    }
+
+    operator VkDedicatedAllocationBufferCreateInfoNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDedicatedAllocationBufferCreateInfoNV*>( this );
+    }
+
+    bool operator==( DedicatedAllocationBufferCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( dedicatedAllocation == rhs.dedicatedAllocation );
+    }
+
+    bool operator!=( DedicatedAllocationBufferCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDedicatedAllocationBufferCreateInfoNV;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation;
+  };
+  static_assert( sizeof( DedicatedAllocationBufferCreateInfoNV ) == sizeof( VkDedicatedAllocationBufferCreateInfoNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DedicatedAllocationBufferCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
+
+  struct DedicatedAllocationImageCreateInfoNV
+  {
+    VULKAN_HPP_CONSTEXPR DedicatedAllocationImageCreateInfoNV( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : dedicatedAllocation( dedicatedAllocation_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::DedicatedAllocationImageCreateInfoNV & operator=( VULKAN_HPP_NAMESPACE::DedicatedAllocationImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DedicatedAllocationImageCreateInfoNV ) - offsetof( DedicatedAllocationImageCreateInfoNV, pNext ) );
+      return *this;
+    }
+
+    DedicatedAllocationImageCreateInfoNV( VkDedicatedAllocationImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    DedicatedAllocationImageCreateInfoNV& operator=( VkDedicatedAllocationImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DedicatedAllocationImageCreateInfoNV const *>(&rhs);
+      return *this;
+    }
+
+    DedicatedAllocationImageCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    DedicatedAllocationImageCreateInfoNV & setDedicatedAllocation( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dedicatedAllocation = dedicatedAllocation_;
+      return *this;
+    }
+
+    operator VkDedicatedAllocationImageCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDedicatedAllocationImageCreateInfoNV*>( this );
+    }
+
+    operator VkDedicatedAllocationImageCreateInfoNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDedicatedAllocationImageCreateInfoNV*>( this );
+    }
+
+    bool operator==( DedicatedAllocationImageCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( dedicatedAllocation == rhs.dedicatedAllocation );
+    }
+
+    bool operator!=( DedicatedAllocationImageCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDedicatedAllocationImageCreateInfoNV;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation;
+  };
+  static_assert( sizeof( DedicatedAllocationImageCreateInfoNV ) == sizeof( VkDedicatedAllocationImageCreateInfoNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DedicatedAllocationImageCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
+
+  struct DedicatedAllocationMemoryAllocateInfoNV
+  {
+    VULKAN_HPP_CONSTEXPR DedicatedAllocationMemoryAllocateInfoNV( VULKAN_HPP_NAMESPACE::Image image_ = VULKAN_HPP_NAMESPACE::Image(),
+                                                                  VULKAN_HPP_NAMESPACE::Buffer buffer_ = VULKAN_HPP_NAMESPACE::Buffer() ) VULKAN_HPP_NOEXCEPT
+      : image( image_ )
+      , buffer( buffer_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::DedicatedAllocationMemoryAllocateInfoNV & operator=( VULKAN_HPP_NAMESPACE::DedicatedAllocationMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DedicatedAllocationMemoryAllocateInfoNV ) - offsetof( DedicatedAllocationMemoryAllocateInfoNV, pNext ) );
+      return *this;
+    }
+
+    DedicatedAllocationMemoryAllocateInfoNV( VkDedicatedAllocationMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    DedicatedAllocationMemoryAllocateInfoNV& operator=( VkDedicatedAllocationMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DedicatedAllocationMemoryAllocateInfoNV const *>(&rhs);
+      return *this;
+    }
+
+    DedicatedAllocationMemoryAllocateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    DedicatedAllocationMemoryAllocateInfoNV & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
+    {
+      image = image_;
+      return *this;
+    }
+
+    DedicatedAllocationMemoryAllocateInfoNV & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      buffer = buffer_;
+      return *this;
+    }
+
+    operator VkDedicatedAllocationMemoryAllocateInfoNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDedicatedAllocationMemoryAllocateInfoNV*>( this );
+    }
+
+    operator VkDedicatedAllocationMemoryAllocateInfoNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDedicatedAllocationMemoryAllocateInfoNV*>( this );
+    }
+
+    bool operator==( DedicatedAllocationMemoryAllocateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( image == rhs.image )
+          && ( buffer == rhs.buffer );
+    }
+
+    bool operator!=( DedicatedAllocationMemoryAllocateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDedicatedAllocationMemoryAllocateInfoNV;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::Image image;
+    VULKAN_HPP_NAMESPACE::Buffer buffer;
+  };
+  static_assert( sizeof( DedicatedAllocationMemoryAllocateInfoNV ) == sizeof( VkDedicatedAllocationMemoryAllocateInfoNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DedicatedAllocationMemoryAllocateInfoNV>::value, "struct wrapper is not a standard layout!" );
+
+  struct DescriptorBufferInfo
+  {
+    VULKAN_HPP_CONSTEXPR DescriptorBufferInfo( VULKAN_HPP_NAMESPACE::Buffer buffer_ = VULKAN_HPP_NAMESPACE::Buffer(),
+                                               VULKAN_HPP_NAMESPACE::DeviceSize offset_ = 0,
+                                               VULKAN_HPP_NAMESPACE::DeviceSize range_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : buffer( buffer_ )
+      , offset( offset_ )
+      , range( range_ )
+    {}
+
+    DescriptorBufferInfo( VkDescriptorBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    DescriptorBufferInfo& operator=( VkDescriptorBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorBufferInfo const *>(&rhs);
+      return *this;
+    }
+
+    DescriptorBufferInfo & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      buffer = buffer_;
+      return *this;
+    }
+
+    DescriptorBufferInfo & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      offset = offset_;
+      return *this;
+    }
+
+    DescriptorBufferInfo & setRange( VULKAN_HPP_NAMESPACE::DeviceSize range_ ) VULKAN_HPP_NOEXCEPT
+    {
+      range = range_;
+      return *this;
+    }
+
+    operator VkDescriptorBufferInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDescriptorBufferInfo*>( this );
+    }
+
+    operator VkDescriptorBufferInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDescriptorBufferInfo*>( this );
+    }
+
+    bool operator==( DescriptorBufferInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( buffer == rhs.buffer )
+          && ( offset == rhs.offset )
+          && ( range == rhs.range );
+    }
+
+    bool operator!=( DescriptorBufferInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    VULKAN_HPP_NAMESPACE::Buffer buffer;
+    VULKAN_HPP_NAMESPACE::DeviceSize offset;
+    VULKAN_HPP_NAMESPACE::DeviceSize range;
+  };
+  static_assert( sizeof( DescriptorBufferInfo ) == sizeof( VkDescriptorBufferInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DescriptorBufferInfo>::value, "struct wrapper is not a standard layout!" );
+
+  struct DescriptorImageInfo
+  {
+    VULKAN_HPP_CONSTEXPR DescriptorImageInfo( VULKAN_HPP_NAMESPACE::Sampler sampler_ = VULKAN_HPP_NAMESPACE::Sampler(),
+                                              VULKAN_HPP_NAMESPACE::ImageView imageView_ = VULKAN_HPP_NAMESPACE::ImageView(),
+                                              VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined ) VULKAN_HPP_NOEXCEPT
+      : sampler( sampler_ )
+      , imageView( imageView_ )
+      , imageLayout( imageLayout_ )
+    {}
+
+    DescriptorImageInfo( VkDescriptorImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    DescriptorImageInfo& operator=( VkDescriptorImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorImageInfo const *>(&rhs);
+      return *this;
+    }
+
+    DescriptorImageInfo & setSampler( VULKAN_HPP_NAMESPACE::Sampler sampler_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sampler = sampler_;
+      return *this;
+    }
+
+    DescriptorImageInfo & setImageView( VULKAN_HPP_NAMESPACE::ImageView imageView_ ) VULKAN_HPP_NOEXCEPT
+    {
+      imageView = imageView_;
+      return *this;
+    }
+
+    DescriptorImageInfo & setImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ ) VULKAN_HPP_NOEXCEPT
+    {
+      imageLayout = imageLayout_;
+      return *this;
+    }
+
+    operator VkDescriptorImageInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDescriptorImageInfo*>( this );
+    }
+
+    operator VkDescriptorImageInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDescriptorImageInfo*>( this );
+    }
+
+    bool operator==( DescriptorImageInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sampler == rhs.sampler )
+          && ( imageView == rhs.imageView )
+          && ( imageLayout == rhs.imageLayout );
+    }
+
+    bool operator!=( DescriptorImageInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    VULKAN_HPP_NAMESPACE::Sampler sampler;
+    VULKAN_HPP_NAMESPACE::ImageView imageView;
+    VULKAN_HPP_NAMESPACE::ImageLayout imageLayout;
+  };
+  static_assert( sizeof( DescriptorImageInfo ) == sizeof( VkDescriptorImageInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DescriptorImageInfo>::value, "struct wrapper is not a standard layout!" );
+
+  struct DescriptorPoolSize
+  {
+    VULKAN_HPP_CONSTEXPR DescriptorPoolSize( VULKAN_HPP_NAMESPACE::DescriptorType type_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler,
+                                             uint32_t descriptorCount_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : type( type_ )
+      , descriptorCount( descriptorCount_ )
+    {}
+
+    DescriptorPoolSize( VkDescriptorPoolSize const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    DescriptorPoolSize& operator=( VkDescriptorPoolSize const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorPoolSize const *>(&rhs);
+      return *this;
+    }
+
+    DescriptorPoolSize & setType( VULKAN_HPP_NAMESPACE::DescriptorType type_ ) VULKAN_HPP_NOEXCEPT
+    {
+      type = type_;
+      return *this;
+    }
+
+    DescriptorPoolSize & setDescriptorCount( uint32_t descriptorCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorCount = descriptorCount_;
+      return *this;
+    }
+
+    operator VkDescriptorPoolSize const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDescriptorPoolSize*>( this );
+    }
+
+    operator VkDescriptorPoolSize &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDescriptorPoolSize*>( this );
+    }
+
+    bool operator==( DescriptorPoolSize const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( type == rhs.type )
+          && ( descriptorCount == rhs.descriptorCount );
+    }
+
+    bool operator!=( DescriptorPoolSize const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    VULKAN_HPP_NAMESPACE::DescriptorType type;
+    uint32_t descriptorCount;
+  };
+  static_assert( sizeof( DescriptorPoolSize ) == sizeof( VkDescriptorPoolSize ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DescriptorPoolSize>::value, "struct wrapper is not a standard layout!" );
+
+  struct DescriptorPoolCreateInfo
+  {
+    VULKAN_HPP_CONSTEXPR DescriptorPoolCreateInfo( VULKAN_HPP_NAMESPACE::DescriptorPoolCreateFlags flags_ = VULKAN_HPP_NAMESPACE::DescriptorPoolCreateFlags(),
+                                                   uint32_t maxSets_ = 0,
+                                                   uint32_t poolSizeCount_ = 0,
+                                                   const VULKAN_HPP_NAMESPACE::DescriptorPoolSize* pPoolSizes_ = nullptr ) VULKAN_HPP_NOEXCEPT
+      : flags( flags_ )
+      , maxSets( maxSets_ )
+      , poolSizeCount( poolSizeCount_ )
+      , pPoolSizes( pPoolSizes_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo & operator=( VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo ) - offsetof( DescriptorPoolCreateInfo, pNext ) );
+      return *this;
+    }
+
+    DescriptorPoolCreateInfo( VkDescriptorPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    DescriptorPoolCreateInfo& operator=( VkDescriptorPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo const *>(&rhs);
+      return *this;
+    }
+
+    DescriptorPoolCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    DescriptorPoolCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::DescriptorPoolCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    DescriptorPoolCreateInfo & setMaxSets( uint32_t maxSets_ ) VULKAN_HPP_NOEXCEPT
+    {
+      maxSets = maxSets_;
+      return *this;
+    }
+
+    DescriptorPoolCreateInfo & setPoolSizeCount( uint32_t poolSizeCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      poolSizeCount = poolSizeCount_;
+      return *this;
+    }
+
+    DescriptorPoolCreateInfo & setPPoolSizes( const VULKAN_HPP_NAMESPACE::DescriptorPoolSize* pPoolSizes_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pPoolSizes = pPoolSizes_;
+      return *this;
+    }
+
+    operator VkDescriptorPoolCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDescriptorPoolCreateInfo*>( this );
+    }
+
+    operator VkDescriptorPoolCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDescriptorPoolCreateInfo*>( this );
+    }
+
+    bool operator==( DescriptorPoolCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( maxSets == rhs.maxSets )
+          && ( poolSizeCount == rhs.poolSizeCount )
+          && ( pPoolSizes == rhs.pPoolSizes );
+    }
+
+    bool operator!=( DescriptorPoolCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorPoolCreateInfo;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::DescriptorPoolCreateFlags flags;
+    uint32_t maxSets;
+    uint32_t poolSizeCount;
+    const VULKAN_HPP_NAMESPACE::DescriptorPoolSize* pPoolSizes;
+  };
+  static_assert( sizeof( DescriptorPoolCreateInfo ) == sizeof( VkDescriptorPoolCreateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DescriptorPoolCreateInfo>::value, "struct wrapper is not a standard layout!" );
+
+  struct DescriptorPoolInlineUniformBlockCreateInfoEXT
+  {
+    VULKAN_HPP_CONSTEXPR DescriptorPoolInlineUniformBlockCreateInfoEXT( uint32_t maxInlineUniformBlockBindings_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : maxInlineUniformBlockBindings( maxInlineUniformBlockBindings_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::DescriptorPoolInlineUniformBlockCreateInfoEXT & operator=( VULKAN_HPP_NAMESPACE::DescriptorPoolInlineUniformBlockCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DescriptorPoolInlineUniformBlockCreateInfoEXT ) - offsetof( DescriptorPoolInlineUniformBlockCreateInfoEXT, pNext ) );
+      return *this;
+    }
+
+    DescriptorPoolInlineUniformBlockCreateInfoEXT( VkDescriptorPoolInlineUniformBlockCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    DescriptorPoolInlineUniformBlockCreateInfoEXT& operator=( VkDescriptorPoolInlineUniformBlockCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorPoolInlineUniformBlockCreateInfoEXT const *>(&rhs);
+      return *this;
+    }
+
+    DescriptorPoolInlineUniformBlockCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    DescriptorPoolInlineUniformBlockCreateInfoEXT & setMaxInlineUniformBlockBindings( uint32_t maxInlineUniformBlockBindings_ ) VULKAN_HPP_NOEXCEPT
+    {
+      maxInlineUniformBlockBindings = maxInlineUniformBlockBindings_;
+      return *this;
+    }
+
+    operator VkDescriptorPoolInlineUniformBlockCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDescriptorPoolInlineUniformBlockCreateInfoEXT*>( this );
+    }
+
+    operator VkDescriptorPoolInlineUniformBlockCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDescriptorPoolInlineUniformBlockCreateInfoEXT*>( this );
+    }
+
+    bool operator==( DescriptorPoolInlineUniformBlockCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( maxInlineUniformBlockBindings == rhs.maxInlineUniformBlockBindings );
+    }
+
+    bool operator!=( DescriptorPoolInlineUniformBlockCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorPoolInlineUniformBlockCreateInfoEXT;
+    const void* pNext = nullptr;
+    uint32_t maxInlineUniformBlockBindings;
+  };
+  static_assert( sizeof( DescriptorPoolInlineUniformBlockCreateInfoEXT ) == sizeof( VkDescriptorPoolInlineUniformBlockCreateInfoEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DescriptorPoolInlineUniformBlockCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+
+  struct DescriptorSetAllocateInfo
+  {
+    VULKAN_HPP_CONSTEXPR DescriptorSetAllocateInfo( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool_ = VULKAN_HPP_NAMESPACE::DescriptorPool(),
+                                                    uint32_t descriptorSetCount_ = 0,
+                                                    const VULKAN_HPP_NAMESPACE::DescriptorSetLayout* pSetLayouts_ = nullptr ) VULKAN_HPP_NOEXCEPT
+      : descriptorPool( descriptorPool_ )
+      , descriptorSetCount( descriptorSetCount_ )
+      , pSetLayouts( pSetLayouts_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & operator=( VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo ) - offsetof( DescriptorSetAllocateInfo, pNext ) );
+      return *this;
+    }
+
+    DescriptorSetAllocateInfo( VkDescriptorSetAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    DescriptorSetAllocateInfo& operator=( VkDescriptorSetAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo const *>(&rhs);
+      return *this;
+    }
+
+    DescriptorSetAllocateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    DescriptorSetAllocateInfo & setDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorPool = descriptorPool_;
+      return *this;
+    }
+
+    DescriptorSetAllocateInfo & setDescriptorSetCount( uint32_t descriptorSetCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorSetCount = descriptorSetCount_;
+      return *this;
+    }
+
+    DescriptorSetAllocateInfo & setPSetLayouts( const VULKAN_HPP_NAMESPACE::DescriptorSetLayout* pSetLayouts_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pSetLayouts = pSetLayouts_;
+      return *this;
+    }
+
+    operator VkDescriptorSetAllocateInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDescriptorSetAllocateInfo*>( this );
+    }
+
+    operator VkDescriptorSetAllocateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDescriptorSetAllocateInfo*>( this );
+    }
+
+    bool operator==( DescriptorSetAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( descriptorPool == rhs.descriptorPool )
+          && ( descriptorSetCount == rhs.descriptorSetCount )
+          && ( pSetLayouts == rhs.pSetLayouts );
+    }
+
+    bool operator!=( DescriptorSetAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetAllocateInfo;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool;
+    uint32_t descriptorSetCount;
+    const VULKAN_HPP_NAMESPACE::DescriptorSetLayout* pSetLayouts;
+  };
+  static_assert( sizeof( DescriptorSetAllocateInfo ) == sizeof( VkDescriptorSetAllocateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DescriptorSetAllocateInfo>::value, "struct wrapper is not a standard layout!" );
+
+  struct DescriptorSetLayoutBinding
+  {
+    VULKAN_HPP_CONSTEXPR DescriptorSetLayoutBinding( uint32_t binding_ = 0,
+                                                     VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler,
+                                                     uint32_t descriptorCount_ = 0,
+                                                     VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ = VULKAN_HPP_NAMESPACE::ShaderStageFlags(),
+                                                     const VULKAN_HPP_NAMESPACE::Sampler* pImmutableSamplers_ = nullptr ) VULKAN_HPP_NOEXCEPT
+      : binding( binding_ )
+      , descriptorType( descriptorType_ )
+      , descriptorCount( descriptorCount_ )
+      , stageFlags( stageFlags_ )
+      , pImmutableSamplers( pImmutableSamplers_ )
+    {}
+
+    DescriptorSetLayoutBinding( VkDescriptorSetLayoutBinding const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    DescriptorSetLayoutBinding& operator=( VkDescriptorSetLayoutBinding const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding const *>(&rhs);
+      return *this;
+    }
+
+    DescriptorSetLayoutBinding & setBinding( uint32_t binding_ ) VULKAN_HPP_NOEXCEPT
+    {
+      binding = binding_;
+      return *this;
+    }
+
+    DescriptorSetLayoutBinding & setDescriptorType( VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorType = descriptorType_;
+      return *this;
+    }
+
+    DescriptorSetLayoutBinding & setDescriptorCount( uint32_t descriptorCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorCount = descriptorCount_;
+      return *this;
+    }
+
+    DescriptorSetLayoutBinding & setStageFlags( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stageFlags = stageFlags_;
+      return *this;
+    }
+
+    DescriptorSetLayoutBinding & setPImmutableSamplers( const VULKAN_HPP_NAMESPACE::Sampler* pImmutableSamplers_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pImmutableSamplers = pImmutableSamplers_;
+      return *this;
+    }
+
+    operator VkDescriptorSetLayoutBinding const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDescriptorSetLayoutBinding*>( this );
+    }
+
+    operator VkDescriptorSetLayoutBinding &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDescriptorSetLayoutBinding*>( this );
+    }
+
+    bool operator==( DescriptorSetLayoutBinding const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( binding == rhs.binding )
+          && ( descriptorType == rhs.descriptorType )
+          && ( descriptorCount == rhs.descriptorCount )
+          && ( stageFlags == rhs.stageFlags )
+          && ( pImmutableSamplers == rhs.pImmutableSamplers );
+    }
+
+    bool operator!=( DescriptorSetLayoutBinding const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    uint32_t binding;
+    VULKAN_HPP_NAMESPACE::DescriptorType descriptorType;
+    uint32_t descriptorCount;
+    VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags;
+    const VULKAN_HPP_NAMESPACE::Sampler* pImmutableSamplers;
+  };
+  static_assert( sizeof( DescriptorSetLayoutBinding ) == sizeof( VkDescriptorSetLayoutBinding ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DescriptorSetLayoutBinding>::value, "struct wrapper is not a standard layout!" );
+
+  struct DescriptorSetLayoutBindingFlagsCreateInfoEXT
+  {
+    VULKAN_HPP_CONSTEXPR DescriptorSetLayoutBindingFlagsCreateInfoEXT( uint32_t bindingCount_ = 0,
+                                                                       const VULKAN_HPP_NAMESPACE::DescriptorBindingFlagsEXT* pBindingFlags_ = nullptr ) VULKAN_HPP_NOEXCEPT
+      : bindingCount( bindingCount_ )
+      , pBindingFlags( pBindingFlags_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBindingFlagsCreateInfoEXT & operator=( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBindingFlagsCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBindingFlagsCreateInfoEXT ) - offsetof( DescriptorSetLayoutBindingFlagsCreateInfoEXT, pNext ) );
+      return *this;
+    }
+
+    DescriptorSetLayoutBindingFlagsCreateInfoEXT( VkDescriptorSetLayoutBindingFlagsCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    DescriptorSetLayoutBindingFlagsCreateInfoEXT& operator=( VkDescriptorSetLayoutBindingFlagsCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBindingFlagsCreateInfoEXT const *>(&rhs);
+      return *this;
+    }
+
+    DescriptorSetLayoutBindingFlagsCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    DescriptorSetLayoutBindingFlagsCreateInfoEXT & setBindingCount( uint32_t bindingCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      bindingCount = bindingCount_;
+      return *this;
+    }
+
+    DescriptorSetLayoutBindingFlagsCreateInfoEXT & setPBindingFlags( const VULKAN_HPP_NAMESPACE::DescriptorBindingFlagsEXT* pBindingFlags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pBindingFlags = pBindingFlags_;
+      return *this;
+    }
+
+    operator VkDescriptorSetLayoutBindingFlagsCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDescriptorSetLayoutBindingFlagsCreateInfoEXT*>( this );
+    }
+
+    operator VkDescriptorSetLayoutBindingFlagsCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDescriptorSetLayoutBindingFlagsCreateInfoEXT*>( this );
+    }
+
+    bool operator==( DescriptorSetLayoutBindingFlagsCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( bindingCount == rhs.bindingCount )
+          && ( pBindingFlags == rhs.pBindingFlags );
+    }
+
+    bool operator!=( DescriptorSetLayoutBindingFlagsCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetLayoutBindingFlagsCreateInfoEXT;
+    const void* pNext = nullptr;
+    uint32_t bindingCount;
+    const VULKAN_HPP_NAMESPACE::DescriptorBindingFlagsEXT* pBindingFlags;
+  };
+  static_assert( sizeof( DescriptorSetLayoutBindingFlagsCreateInfoEXT ) == sizeof( VkDescriptorSetLayoutBindingFlagsCreateInfoEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DescriptorSetLayoutBindingFlagsCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+
+  struct DescriptorSetLayoutCreateInfo
+  {
+    VULKAN_HPP_CONSTEXPR DescriptorSetLayoutCreateInfo( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateFlags flags_ = VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateFlags(),
+                                                        uint32_t bindingCount_ = 0,
+                                                        const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding* pBindings_ = nullptr ) VULKAN_HPP_NOEXCEPT
+      : flags( flags_ )
+      , bindingCount( bindingCount_ )
+      , pBindings( pBindings_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & operator=( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo ) - offsetof( DescriptorSetLayoutCreateInfo, pNext ) );
+      return *this;
+    }
+
+    DescriptorSetLayoutCreateInfo( VkDescriptorSetLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    DescriptorSetLayoutCreateInfo& operator=( VkDescriptorSetLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo const *>(&rhs);
+      return *this;
+    }
+
+    DescriptorSetLayoutCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    DescriptorSetLayoutCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    DescriptorSetLayoutCreateInfo & setBindingCount( uint32_t bindingCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      bindingCount = bindingCount_;
+      return *this;
+    }
+
+    DescriptorSetLayoutCreateInfo & setPBindings( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding* pBindings_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pBindings = pBindings_;
+      return *this;
+    }
+
+    operator VkDescriptorSetLayoutCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDescriptorSetLayoutCreateInfo*>( this );
+    }
+
+    operator VkDescriptorSetLayoutCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDescriptorSetLayoutCreateInfo*>( this );
+    }
+
+    bool operator==( DescriptorSetLayoutCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( bindingCount == rhs.bindingCount )
+          && ( pBindings == rhs.pBindings );
+    }
+
+    bool operator!=( DescriptorSetLayoutCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetLayoutCreateInfo;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateFlags flags;
+    uint32_t bindingCount;
+    const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding* pBindings;
+  };
+  static_assert( sizeof( DescriptorSetLayoutCreateInfo ) == sizeof( VkDescriptorSetLayoutCreateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DescriptorSetLayoutCreateInfo>::value, "struct wrapper is not a standard layout!" );
+
+  struct DescriptorSetLayoutSupport
+  {
+    DescriptorSetLayoutSupport( VULKAN_HPP_NAMESPACE::Bool32 supported_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : supported( supported_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport & operator=( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport ) - offsetof( DescriptorSetLayoutSupport, pNext ) );
+      return *this;
+    }
+
+    DescriptorSetLayoutSupport( VkDescriptorSetLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    DescriptorSetLayoutSupport& operator=( VkDescriptorSetLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport const *>(&rhs);
+      return *this;
+    }
+
+    operator VkDescriptorSetLayoutSupport const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDescriptorSetLayoutSupport*>( this );
+    }
+
+    operator VkDescriptorSetLayoutSupport &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDescriptorSetLayoutSupport*>( this );
+    }
+
+    bool operator==( DescriptorSetLayoutSupport const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( supported == rhs.supported );
+    }
+
+    bool operator!=( DescriptorSetLayoutSupport const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetLayoutSupport;
+    void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::Bool32 supported;
+  };
+  static_assert( sizeof( DescriptorSetLayoutSupport ) == sizeof( VkDescriptorSetLayoutSupport ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DescriptorSetLayoutSupport>::value, "struct wrapper is not a standard layout!" );
+
+  struct DescriptorSetVariableDescriptorCountAllocateInfoEXT
+  {
+    VULKAN_HPP_CONSTEXPR DescriptorSetVariableDescriptorCountAllocateInfoEXT( uint32_t descriptorSetCount_ = 0,
+                                                                              const uint32_t* pDescriptorCounts_ = nullptr ) VULKAN_HPP_NOEXCEPT
+      : descriptorSetCount( descriptorSetCount_ )
+      , pDescriptorCounts( pDescriptorCounts_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountAllocateInfoEXT & operator=( VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountAllocateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountAllocateInfoEXT ) - offsetof( DescriptorSetVariableDescriptorCountAllocateInfoEXT, pNext ) );
+      return *this;
+    }
+
+    DescriptorSetVariableDescriptorCountAllocateInfoEXT( VkDescriptorSetVariableDescriptorCountAllocateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    DescriptorSetVariableDescriptorCountAllocateInfoEXT& operator=( VkDescriptorSetVariableDescriptorCountAllocateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountAllocateInfoEXT const *>(&rhs);
+      return *this;
+    }
+
+    DescriptorSetVariableDescriptorCountAllocateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    DescriptorSetVariableDescriptorCountAllocateInfoEXT & setDescriptorSetCount( uint32_t descriptorSetCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorSetCount = descriptorSetCount_;
+      return *this;
+    }
+
+    DescriptorSetVariableDescriptorCountAllocateInfoEXT & setPDescriptorCounts( const uint32_t* pDescriptorCounts_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pDescriptorCounts = pDescriptorCounts_;
+      return *this;
+    }
+
+    operator VkDescriptorSetVariableDescriptorCountAllocateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDescriptorSetVariableDescriptorCountAllocateInfoEXT*>( this );
+    }
+
+    operator VkDescriptorSetVariableDescriptorCountAllocateInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDescriptorSetVariableDescriptorCountAllocateInfoEXT*>( this );
+    }
+
+    bool operator==( DescriptorSetVariableDescriptorCountAllocateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( descriptorSetCount == rhs.descriptorSetCount )
+          && ( pDescriptorCounts == rhs.pDescriptorCounts );
+    }
+
+    bool operator!=( DescriptorSetVariableDescriptorCountAllocateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetVariableDescriptorCountAllocateInfoEXT;
+    const void* pNext = nullptr;
+    uint32_t descriptorSetCount;
+    const uint32_t* pDescriptorCounts;
+  };
+  static_assert( sizeof( DescriptorSetVariableDescriptorCountAllocateInfoEXT ) == sizeof( VkDescriptorSetVariableDescriptorCountAllocateInfoEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DescriptorSetVariableDescriptorCountAllocateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+
+  struct DescriptorSetVariableDescriptorCountLayoutSupportEXT
+  {
+    DescriptorSetVariableDescriptorCountLayoutSupportEXT( uint32_t maxVariableDescriptorCount_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : maxVariableDescriptorCount( maxVariableDescriptorCount_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountLayoutSupportEXT & operator=( VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountLayoutSupportEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountLayoutSupportEXT ) - offsetof( DescriptorSetVariableDescriptorCountLayoutSupportEXT, pNext ) );
+      return *this;
+    }
+
+    DescriptorSetVariableDescriptorCountLayoutSupportEXT( VkDescriptorSetVariableDescriptorCountLayoutSupportEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    DescriptorSetVariableDescriptorCountLayoutSupportEXT& operator=( VkDescriptorSetVariableDescriptorCountLayoutSupportEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountLayoutSupportEXT const *>(&rhs);
+      return *this;
+    }
+
+    operator VkDescriptorSetVariableDescriptorCountLayoutSupportEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDescriptorSetVariableDescriptorCountLayoutSupportEXT*>( this );
+    }
+
+    operator VkDescriptorSetVariableDescriptorCountLayoutSupportEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDescriptorSetVariableDescriptorCountLayoutSupportEXT*>( this );
+    }
+
+    bool operator==( DescriptorSetVariableDescriptorCountLayoutSupportEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( maxVariableDescriptorCount == rhs.maxVariableDescriptorCount );
+    }
+
+    bool operator!=( DescriptorSetVariableDescriptorCountLayoutSupportEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetVariableDescriptorCountLayoutSupportEXT;
+    void* pNext = nullptr;
+    uint32_t maxVariableDescriptorCount;
+  };
+  static_assert( sizeof( DescriptorSetVariableDescriptorCountLayoutSupportEXT ) == sizeof( VkDescriptorSetVariableDescriptorCountLayoutSupportEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DescriptorSetVariableDescriptorCountLayoutSupportEXT>::value, "struct wrapper is not a standard layout!" );
+
+  struct DescriptorUpdateTemplateEntry
+  {
+    VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplateEntry( uint32_t dstBinding_ = 0,
+                                                        uint32_t dstArrayElement_ = 0,
+                                                        uint32_t descriptorCount_ = 0,
+                                                        VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler,
+                                                        size_t offset_ = 0,
+                                                        size_t stride_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : dstBinding( dstBinding_ )
+      , dstArrayElement( dstArrayElement_ )
+      , descriptorCount( descriptorCount_ )
+      , descriptorType( descriptorType_ )
+      , offset( offset_ )
+      , stride( stride_ )
+    {}
+
+    DescriptorUpdateTemplateEntry( VkDescriptorUpdateTemplateEntry const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    DescriptorUpdateTemplateEntry& operator=( VkDescriptorUpdateTemplateEntry const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry const *>(&rhs);
+      return *this;
+    }
+
+    DescriptorUpdateTemplateEntry & setDstBinding( uint32_t dstBinding_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstBinding = dstBinding_;
+      return *this;
+    }
+
+    DescriptorUpdateTemplateEntry & setDstArrayElement( uint32_t dstArrayElement_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstArrayElement = dstArrayElement_;
+      return *this;
+    }
+
+    DescriptorUpdateTemplateEntry & setDescriptorCount( uint32_t descriptorCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorCount = descriptorCount_;
+      return *this;
+    }
+
+    DescriptorUpdateTemplateEntry & setDescriptorType( VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorType = descriptorType_;
+      return *this;
+    }
+
+    DescriptorUpdateTemplateEntry & setOffset( size_t offset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      offset = offset_;
+      return *this;
+    }
+
+    DescriptorUpdateTemplateEntry & setStride( size_t stride_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stride = stride_;
+      return *this;
+    }
+
+    operator VkDescriptorUpdateTemplateEntry const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDescriptorUpdateTemplateEntry*>( this );
+    }
+
+    operator VkDescriptorUpdateTemplateEntry &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDescriptorUpdateTemplateEntry*>( this );
+    }
+
+    bool operator==( DescriptorUpdateTemplateEntry const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( dstBinding == rhs.dstBinding )
+          && ( dstArrayElement == rhs.dstArrayElement )
+          && ( descriptorCount == rhs.descriptorCount )
+          && ( descriptorType == rhs.descriptorType )
+          && ( offset == rhs.offset )
+          && ( stride == rhs.stride );
+    }
+
+    bool operator!=( DescriptorUpdateTemplateEntry const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    uint32_t dstBinding;
+    uint32_t dstArrayElement;
+    uint32_t descriptorCount;
+    VULKAN_HPP_NAMESPACE::DescriptorType descriptorType;
+    size_t offset;
+    size_t stride;
+  };
+  static_assert( sizeof( DescriptorUpdateTemplateEntry ) == sizeof( VkDescriptorUpdateTemplateEntry ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DescriptorUpdateTemplateEntry>::value, "struct wrapper is not a standard layout!" );
+
+  struct DescriptorUpdateTemplateCreateInfo
+  {
+    VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplateCreateInfo( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateFlags flags_ = VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateFlags(),
+                                                             uint32_t descriptorUpdateEntryCount_ = 0,
+                                                             const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry* pDescriptorUpdateEntries_ = nullptr,
+                                                             VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType templateType_ = VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType::eDescriptorSet,
+                                                             VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout_ = VULKAN_HPP_NAMESPACE::DescriptorSetLayout(),
+                                                             VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics,
+                                                             VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ = VULKAN_HPP_NAMESPACE::PipelineLayout(),
+                                                             uint32_t set_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : flags( flags_ )
+      , descriptorUpdateEntryCount( descriptorUpdateEntryCount_ )
+      , pDescriptorUpdateEntries( pDescriptorUpdateEntries_ )
+      , templateType( templateType_ )
+      , descriptorSetLayout( descriptorSetLayout_ )
+      , pipelineBindPoint( pipelineBindPoint_ )
+      , pipelineLayout( pipelineLayout_ )
+      , set( set_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo & operator=( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo ) - offsetof( DescriptorUpdateTemplateCreateInfo, pNext ) );
+      return *this;
+    }
+
+    DescriptorUpdateTemplateCreateInfo( VkDescriptorUpdateTemplateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    DescriptorUpdateTemplateCreateInfo& operator=( VkDescriptorUpdateTemplateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo const *>(&rhs);
+      return *this;
+    }
+
+    DescriptorUpdateTemplateCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    DescriptorUpdateTemplateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    DescriptorUpdateTemplateCreateInfo & setDescriptorUpdateEntryCount( uint32_t descriptorUpdateEntryCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorUpdateEntryCount = descriptorUpdateEntryCount_;
+      return *this;
+    }
+
+    DescriptorUpdateTemplateCreateInfo & setPDescriptorUpdateEntries( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry* pDescriptorUpdateEntries_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pDescriptorUpdateEntries = pDescriptorUpdateEntries_;
+      return *this;
+    }
+
+    DescriptorUpdateTemplateCreateInfo & setTemplateType( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType templateType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      templateType = templateType_;
+      return *this;
+    }
+
+    DescriptorUpdateTemplateCreateInfo & setDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorSetLayout = descriptorSetLayout_;
+      return *this;
+    }
+
+    DescriptorUpdateTemplateCreateInfo & setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pipelineBindPoint = pipelineBindPoint_;
+      return *this;
+    }
+
+    DescriptorUpdateTemplateCreateInfo & setPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pipelineLayout = pipelineLayout_;
+      return *this;
+    }
+
+    DescriptorUpdateTemplateCreateInfo & setSet( uint32_t set_ ) VULKAN_HPP_NOEXCEPT
+    {
+      set = set_;
+      return *this;
+    }
+
+    operator VkDescriptorUpdateTemplateCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo*>( this );
+    }
+
+    operator VkDescriptorUpdateTemplateCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDescriptorUpdateTemplateCreateInfo*>( this );
+    }
+
+    bool operator==( DescriptorUpdateTemplateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( descriptorUpdateEntryCount == rhs.descriptorUpdateEntryCount )
+          && ( pDescriptorUpdateEntries == rhs.pDescriptorUpdateEntries )
+          && ( templateType == rhs.templateType )
+          && ( descriptorSetLayout == rhs.descriptorSetLayout )
+          && ( pipelineBindPoint == rhs.pipelineBindPoint )
+          && ( pipelineLayout == rhs.pipelineLayout )
+          && ( set == rhs.set );
+    }
+
+    bool operator!=( DescriptorUpdateTemplateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorUpdateTemplateCreateInfo;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateFlags flags;
+    uint32_t descriptorUpdateEntryCount;
+    const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry* pDescriptorUpdateEntries;
+    VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType templateType;
+    VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout;
+    VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint;
+    VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout;
+    uint32_t set;
+  };
+  static_assert( sizeof( DescriptorUpdateTemplateCreateInfo ) == sizeof( VkDescriptorUpdateTemplateCreateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DescriptorUpdateTemplateCreateInfo>::value, "struct wrapper is not a standard layout!" );
+
+  struct DeviceQueueCreateInfo
+  {
+    VULKAN_HPP_CONSTEXPR DeviceQueueCreateInfo( VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_ = VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags(),
+                                                uint32_t queueFamilyIndex_ = 0,
+                                                uint32_t queueCount_ = 0,
+                                                const float* pQueuePriorities_ = nullptr ) VULKAN_HPP_NOEXCEPT
+      : flags( flags_ )
+      , queueFamilyIndex( queueFamilyIndex_ )
+      , queueCount( queueCount_ )
+      , pQueuePriorities( pQueuePriorities_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo & operator=( VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo ) - offsetof( DeviceQueueCreateInfo, pNext ) );
+      return *this;
+    }
+
+    DeviceQueueCreateInfo( VkDeviceQueueCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    DeviceQueueCreateInfo& operator=( VkDeviceQueueCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo const *>(&rhs);
+      return *this;
+    }
+
+    DeviceQueueCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    DeviceQueueCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    DeviceQueueCreateInfo & setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
+    {
+      queueFamilyIndex = queueFamilyIndex_;
+      return *this;
+    }
+
+    DeviceQueueCreateInfo & setQueueCount( uint32_t queueCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      queueCount = queueCount_;
+      return *this;
+    }
+
+    DeviceQueueCreateInfo & setPQueuePriorities( const float* pQueuePriorities_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pQueuePriorities = pQueuePriorities_;
+      return *this;
+    }
+
+    operator VkDeviceQueueCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDeviceQueueCreateInfo*>( this );
+    }
+
+    operator VkDeviceQueueCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDeviceQueueCreateInfo*>( this );
+    }
+
+    bool operator==( DeviceQueueCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( queueFamilyIndex == rhs.queueFamilyIndex )
+          && ( queueCount == rhs.queueCount )
+          && ( pQueuePriorities == rhs.pQueuePriorities );
+    }
+
+    bool operator!=( DeviceQueueCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceQueueCreateInfo;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags;
+    uint32_t queueFamilyIndex;
+    uint32_t queueCount;
+    const float* pQueuePriorities;
+  };
+  static_assert( sizeof( DeviceQueueCreateInfo ) == sizeof( VkDeviceQueueCreateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DeviceQueueCreateInfo>::value, "struct wrapper is not a standard layout!" );
+
+  struct PhysicalDeviceFeatures
+  {
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceFeatures( VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess_ = 0,
+                                                 VULKAN_HPP_NAMESPACE::Bool32 fullDrawIndexUint32_ = 0,
+                                                 VULKAN_HPP_NAMESPACE::Bool32 imageCubeArray_ = 0,
+                                                 VULKAN_HPP_NAMESPACE::Bool32 independentBlend_ = 0,
+                                                 VULKAN_HPP_NAMESPACE::Bool32 geometryShader_ = 0,
+                                                 VULKAN_HPP_NAMESPACE::Bool32 tessellationShader_ = 0,
+                                                 VULKAN_HPP_NAMESPACE::Bool32 sampleRateShading_ = 0,
+                                                 VULKAN_HPP_NAMESPACE::Bool32 dualSrcBlend_ = 0,
+                                                 VULKAN_HPP_NAMESPACE::Bool32 logicOp_ = 0,
+                                                 VULKAN_HPP_NAMESPACE::Bool32 multiDrawIndirect_ = 0,
+                                                 VULKAN_HPP_NAMESPACE::Bool32 drawIndirectFirstInstance_ = 0,
+                                                 VULKAN_HPP_NAMESPACE::Bool32 depthClamp_ = 0,
+                                                 VULKAN_HPP_NAMESPACE::Bool32 depthBiasClamp_ = 0,
+                                                 VULKAN_HPP_NAMESPACE::Bool32 fillModeNonSolid_ = 0,
+                                                 VULKAN_HPP_NAMESPACE::Bool32 depthBounds_ = 0,
+                                                 VULKAN_HPP_NAMESPACE::Bool32 wideLines_ = 0,
+                                                 VULKAN_HPP_NAMESPACE::Bool32 largePoints_ = 0,
+                                                 VULKAN_HPP_NAMESPACE::Bool32 alphaToOne_ = 0,
+                                                 VULKAN_HPP_NAMESPACE::Bool32 multiViewport_ = 0,
+                                                 VULKAN_HPP_NAMESPACE::Bool32 samplerAnisotropy_ = 0,
+                                                 VULKAN_HPP_NAMESPACE::Bool32 textureCompressionETC2_ = 0,
+                                                 VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_LDR_ = 0,
+                                                 VULKAN_HPP_NAMESPACE::Bool32 textureCompressionBC_ = 0,
+                                                 VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryPrecise_ = 0,
+                                                 VULKAN_HPP_NAMESPACE::Bool32 pipelineStatisticsQuery_ = 0,
+                                                 VULKAN_HPP_NAMESPACE::Bool32 vertexPipelineStoresAndAtomics_ = 0,
+                                                 VULKAN_HPP_NAMESPACE::Bool32 fragmentStoresAndAtomics_ = 0,
+                                                 VULKAN_HPP_NAMESPACE::Bool32 shaderTessellationAndGeometryPointSize_ = 0,
+                                                 VULKAN_HPP_NAMESPACE::Bool32 shaderImageGatherExtended_ = 0,
+                                                 VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageExtendedFormats_ = 0,
+                                                 VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageMultisample_ = 0,
+                                                 VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageReadWithoutFormat_ = 0,
+                                                 VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageWriteWithoutFormat_ = 0,
+                                                 VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayDynamicIndexing_ = 0,
+                                                 VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayDynamicIndexing_ = 0,
+                                                 VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayDynamicIndexing_ = 0,
+                                                 VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayDynamicIndexing_ = 0,
+                                                 VULKAN_HPP_NAMESPACE::Bool32 shaderClipDistance_ = 0,
+                                                 VULKAN_HPP_NAMESPACE::Bool32 shaderCullDistance_ = 0,
+                                                 VULKAN_HPP_NAMESPACE::Bool32 shaderFloat64_ = 0,
+                                                 VULKAN_HPP_NAMESPACE::Bool32 shaderInt64_ = 0,
+                                                 VULKAN_HPP_NAMESPACE::Bool32 shaderInt16_ = 0,
+                                                 VULKAN_HPP_NAMESPACE::Bool32 shaderResourceResidency_ = 0,
+                                                 VULKAN_HPP_NAMESPACE::Bool32 shaderResourceMinLod_ = 0,
+                                                 VULKAN_HPP_NAMESPACE::Bool32 sparseBinding_ = 0,
+                                                 VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyBuffer_ = 0,
+                                                 VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage2D_ = 0,
+                                                 VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage3D_ = 0,
+                                                 VULKAN_HPP_NAMESPACE::Bool32 sparseResidency2Samples_ = 0,
+                                                 VULKAN_HPP_NAMESPACE::Bool32 sparseResidency4Samples_ = 0,
+                                                 VULKAN_HPP_NAMESPACE::Bool32 sparseResidency8Samples_ = 0,
+                                                 VULKAN_HPP_NAMESPACE::Bool32 sparseResidency16Samples_ = 0,
+                                                 VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyAliased_ = 0,
+                                                 VULKAN_HPP_NAMESPACE::Bool32 variableMultisampleRate_ = 0,
+                                                 VULKAN_HPP_NAMESPACE::Bool32 inheritedQueries_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : robustBufferAccess( robustBufferAccess_ )
+      , fullDrawIndexUint32( fullDrawIndexUint32_ )
+      , imageCubeArray( imageCubeArray_ )
+      , independentBlend( independentBlend_ )
+      , geometryShader( geometryShader_ )
+      , tessellationShader( tessellationShader_ )
+      , sampleRateShading( sampleRateShading_ )
+      , dualSrcBlend( dualSrcBlend_ )
+      , logicOp( logicOp_ )
+      , multiDrawIndirect( multiDrawIndirect_ )
+      , drawIndirectFirstInstance( drawIndirectFirstInstance_ )
+      , depthClamp( depthClamp_ )
+      , depthBiasClamp( depthBiasClamp_ )
+      , fillModeNonSolid( fillModeNonSolid_ )
+      , depthBounds( depthBounds_ )
+      , wideLines( wideLines_ )
+      , largePoints( largePoints_ )
+      , alphaToOne( alphaToOne_ )
+      , multiViewport( multiViewport_ )
+      , samplerAnisotropy( samplerAnisotropy_ )
+      , textureCompressionETC2( textureCompressionETC2_ )
+      , textureCompressionASTC_LDR( textureCompressionASTC_LDR_ )
+      , textureCompressionBC( textureCompressionBC_ )
+      , occlusionQueryPrecise( occlusionQueryPrecise_ )
+      , pipelineStatisticsQuery( pipelineStatisticsQuery_ )
+      , vertexPipelineStoresAndAtomics( vertexPipelineStoresAndAtomics_ )
+      , fragmentStoresAndAtomics( fragmentStoresAndAtomics_ )
+      , shaderTessellationAndGeometryPointSize( shaderTessellationAndGeometryPointSize_ )
+      , shaderImageGatherExtended( shaderImageGatherExtended_ )
+      , shaderStorageImageExtendedFormats( shaderStorageImageExtendedFormats_ )
+      , shaderStorageImageMultisample( shaderStorageImageMultisample_ )
+      , shaderStorageImageReadWithoutFormat( shaderStorageImageReadWithoutFormat_ )
+      , shaderStorageImageWriteWithoutFormat( shaderStorageImageWriteWithoutFormat_ )
+      , shaderUniformBufferArrayDynamicIndexing( shaderUniformBufferArrayDynamicIndexing_ )
+      , shaderSampledImageArrayDynamicIndexing( shaderSampledImageArrayDynamicIndexing_ )
+      , shaderStorageBufferArrayDynamicIndexing( shaderStorageBufferArrayDynamicIndexing_ )
+      , shaderStorageImageArrayDynamicIndexing( shaderStorageImageArrayDynamicIndexing_ )
+      , shaderClipDistance( shaderClipDistance_ )
+      , shaderCullDistance( shaderCullDistance_ )
+      , shaderFloat64( shaderFloat64_ )
+      , shaderInt64( shaderInt64_ )
+      , shaderInt16( shaderInt16_ )
+      , shaderResourceResidency( shaderResourceResidency_ )
+      , shaderResourceMinLod( shaderResourceMinLod_ )
+      , sparseBinding( sparseBinding_ )
+      , sparseResidencyBuffer( sparseResidencyBuffer_ )
+      , sparseResidencyImage2D( sparseResidencyImage2D_ )
+      , sparseResidencyImage3D( sparseResidencyImage3D_ )
+      , sparseResidency2Samples( sparseResidency2Samples_ )
+      , sparseResidency4Samples( sparseResidency4Samples_ )
+      , sparseResidency8Samples( sparseResidency8Samples_ )
+      , sparseResidency16Samples( sparseResidency16Samples_ )
+      , sparseResidencyAliased( sparseResidencyAliased_ )
+      , variableMultisampleRate( variableMultisampleRate_ )
+      , inheritedQueries( inheritedQueries_ )
+    {}
+
+    PhysicalDeviceFeatures( VkPhysicalDeviceFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PhysicalDeviceFeatures& operator=( VkPhysicalDeviceFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures const *>(&rhs);
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setRobustBufferAccess( VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess_ ) VULKAN_HPP_NOEXCEPT
+    {
+      robustBufferAccess = robustBufferAccess_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setFullDrawIndexUint32( VULKAN_HPP_NAMESPACE::Bool32 fullDrawIndexUint32_ ) VULKAN_HPP_NOEXCEPT
+    {
+      fullDrawIndexUint32 = fullDrawIndexUint32_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setImageCubeArray( VULKAN_HPP_NAMESPACE::Bool32 imageCubeArray_ ) VULKAN_HPP_NOEXCEPT
+    {
+      imageCubeArray = imageCubeArray_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setIndependentBlend( VULKAN_HPP_NAMESPACE::Bool32 independentBlend_ ) VULKAN_HPP_NOEXCEPT
+    {
+      independentBlend = independentBlend_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setGeometryShader( VULKAN_HPP_NAMESPACE::Bool32 geometryShader_ ) VULKAN_HPP_NOEXCEPT
+    {
+      geometryShader = geometryShader_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setTessellationShader( VULKAN_HPP_NAMESPACE::Bool32 tessellationShader_ ) VULKAN_HPP_NOEXCEPT
+    {
+      tessellationShader = tessellationShader_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setSampleRateShading( VULKAN_HPP_NAMESPACE::Bool32 sampleRateShading_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sampleRateShading = sampleRateShading_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setDualSrcBlend( VULKAN_HPP_NAMESPACE::Bool32 dualSrcBlend_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dualSrcBlend = dualSrcBlend_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setLogicOp( VULKAN_HPP_NAMESPACE::Bool32 logicOp_ ) VULKAN_HPP_NOEXCEPT
+    {
+      logicOp = logicOp_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setMultiDrawIndirect( VULKAN_HPP_NAMESPACE::Bool32 multiDrawIndirect_ ) VULKAN_HPP_NOEXCEPT
+    {
+      multiDrawIndirect = multiDrawIndirect_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setDrawIndirectFirstInstance( VULKAN_HPP_NAMESPACE::Bool32 drawIndirectFirstInstance_ ) VULKAN_HPP_NOEXCEPT
+    {
+      drawIndirectFirstInstance = drawIndirectFirstInstance_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setDepthClamp( VULKAN_HPP_NAMESPACE::Bool32 depthClamp_ ) VULKAN_HPP_NOEXCEPT
+    {
+      depthClamp = depthClamp_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setDepthBiasClamp( VULKAN_HPP_NAMESPACE::Bool32 depthBiasClamp_ ) VULKAN_HPP_NOEXCEPT
+    {
+      depthBiasClamp = depthBiasClamp_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setFillModeNonSolid( VULKAN_HPP_NAMESPACE::Bool32 fillModeNonSolid_ ) VULKAN_HPP_NOEXCEPT
+    {
+      fillModeNonSolid = fillModeNonSolid_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setDepthBounds( VULKAN_HPP_NAMESPACE::Bool32 depthBounds_ ) VULKAN_HPP_NOEXCEPT
+    {
+      depthBounds = depthBounds_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setWideLines( VULKAN_HPP_NAMESPACE::Bool32 wideLines_ ) VULKAN_HPP_NOEXCEPT
+    {
+      wideLines = wideLines_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setLargePoints( VULKAN_HPP_NAMESPACE::Bool32 largePoints_ ) VULKAN_HPP_NOEXCEPT
+    {
+      largePoints = largePoints_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setAlphaToOne( VULKAN_HPP_NAMESPACE::Bool32 alphaToOne_ ) VULKAN_HPP_NOEXCEPT
+    {
+      alphaToOne = alphaToOne_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setMultiViewport( VULKAN_HPP_NAMESPACE::Bool32 multiViewport_ ) VULKAN_HPP_NOEXCEPT
+    {
+      multiViewport = multiViewport_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setSamplerAnisotropy( VULKAN_HPP_NAMESPACE::Bool32 samplerAnisotropy_ ) VULKAN_HPP_NOEXCEPT
+    {
+      samplerAnisotropy = samplerAnisotropy_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setTextureCompressionETC2( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionETC2_ ) VULKAN_HPP_NOEXCEPT
+    {
+      textureCompressionETC2 = textureCompressionETC2_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setTextureCompressionASTC_LDR( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_LDR_ ) VULKAN_HPP_NOEXCEPT
+    {
+      textureCompressionASTC_LDR = textureCompressionASTC_LDR_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setTextureCompressionBC( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionBC_ ) VULKAN_HPP_NOEXCEPT
+    {
+      textureCompressionBC = textureCompressionBC_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setOcclusionQueryPrecise( VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryPrecise_ ) VULKAN_HPP_NOEXCEPT
+    {
+      occlusionQueryPrecise = occlusionQueryPrecise_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setPipelineStatisticsQuery( VULKAN_HPP_NAMESPACE::Bool32 pipelineStatisticsQuery_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pipelineStatisticsQuery = pipelineStatisticsQuery_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setVertexPipelineStoresAndAtomics( VULKAN_HPP_NAMESPACE::Bool32 vertexPipelineStoresAndAtomics_ ) VULKAN_HPP_NOEXCEPT
+    {
+      vertexPipelineStoresAndAtomics = vertexPipelineStoresAndAtomics_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setFragmentStoresAndAtomics( VULKAN_HPP_NAMESPACE::Bool32 fragmentStoresAndAtomics_ ) VULKAN_HPP_NOEXCEPT
+    {
+      fragmentStoresAndAtomics = fragmentStoresAndAtomics_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setShaderTessellationAndGeometryPointSize( VULKAN_HPP_NAMESPACE::Bool32 shaderTessellationAndGeometryPointSize_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderTessellationAndGeometryPointSize = shaderTessellationAndGeometryPointSize_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setShaderImageGatherExtended( VULKAN_HPP_NAMESPACE::Bool32 shaderImageGatherExtended_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderImageGatherExtended = shaderImageGatherExtended_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setShaderStorageImageExtendedFormats( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageExtendedFormats_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderStorageImageExtendedFormats = shaderStorageImageExtendedFormats_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setShaderStorageImageMultisample( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageMultisample_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderStorageImageMultisample = shaderStorageImageMultisample_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setShaderStorageImageReadWithoutFormat( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageReadWithoutFormat_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderStorageImageReadWithoutFormat = shaderStorageImageReadWithoutFormat_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setShaderStorageImageWriteWithoutFormat( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageWriteWithoutFormat_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderStorageImageWriteWithoutFormat = shaderStorageImageWriteWithoutFormat_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setShaderUniformBufferArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderUniformBufferArrayDynamicIndexing = shaderUniformBufferArrayDynamicIndexing_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setShaderSampledImageArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderSampledImageArrayDynamicIndexing = shaderSampledImageArrayDynamicIndexing_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setShaderStorageBufferArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderStorageBufferArrayDynamicIndexing = shaderStorageBufferArrayDynamicIndexing_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setShaderStorageImageArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderStorageImageArrayDynamicIndexing = shaderStorageImageArrayDynamicIndexing_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setShaderClipDistance( VULKAN_HPP_NAMESPACE::Bool32 shaderClipDistance_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderClipDistance = shaderClipDistance_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setShaderCullDistance( VULKAN_HPP_NAMESPACE::Bool32 shaderCullDistance_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderCullDistance = shaderCullDistance_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setShaderFloat64( VULKAN_HPP_NAMESPACE::Bool32 shaderFloat64_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderFloat64 = shaderFloat64_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setShaderInt64( VULKAN_HPP_NAMESPACE::Bool32 shaderInt64_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderInt64 = shaderInt64_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setShaderInt16( VULKAN_HPP_NAMESPACE::Bool32 shaderInt16_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderInt16 = shaderInt16_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setShaderResourceResidency( VULKAN_HPP_NAMESPACE::Bool32 shaderResourceResidency_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderResourceResidency = shaderResourceResidency_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setShaderResourceMinLod( VULKAN_HPP_NAMESPACE::Bool32 shaderResourceMinLod_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderResourceMinLod = shaderResourceMinLod_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setSparseBinding( VULKAN_HPP_NAMESPACE::Bool32 sparseBinding_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sparseBinding = sparseBinding_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setSparseResidencyBuffer( VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyBuffer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sparseResidencyBuffer = sparseResidencyBuffer_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setSparseResidencyImage2D( VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage2D_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sparseResidencyImage2D = sparseResidencyImage2D_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setSparseResidencyImage3D( VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage3D_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sparseResidencyImage3D = sparseResidencyImage3D_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setSparseResidency2Samples( VULKAN_HPP_NAMESPACE::Bool32 sparseResidency2Samples_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sparseResidency2Samples = sparseResidency2Samples_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setSparseResidency4Samples( VULKAN_HPP_NAMESPACE::Bool32 sparseResidency4Samples_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sparseResidency4Samples = sparseResidency4Samples_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setSparseResidency8Samples( VULKAN_HPP_NAMESPACE::Bool32 sparseResidency8Samples_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sparseResidency8Samples = sparseResidency8Samples_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setSparseResidency16Samples( VULKAN_HPP_NAMESPACE::Bool32 sparseResidency16Samples_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sparseResidency16Samples = sparseResidency16Samples_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setSparseResidencyAliased( VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyAliased_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sparseResidencyAliased = sparseResidencyAliased_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setVariableMultisampleRate( VULKAN_HPP_NAMESPACE::Bool32 variableMultisampleRate_ ) VULKAN_HPP_NOEXCEPT
+    {
+      variableMultisampleRate = variableMultisampleRate_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures & setInheritedQueries( VULKAN_HPP_NAMESPACE::Bool32 inheritedQueries_ ) VULKAN_HPP_NOEXCEPT
+    {
+      inheritedQueries = inheritedQueries_;
+      return *this;
+    }
+
+    operator VkPhysicalDeviceFeatures const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceFeatures*>( this );
+    }
+
+    operator VkPhysicalDeviceFeatures &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceFeatures*>( this );
+    }
+
+    bool operator==( PhysicalDeviceFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( robustBufferAccess == rhs.robustBufferAccess )
+          && ( fullDrawIndexUint32 == rhs.fullDrawIndexUint32 )
+          && ( imageCubeArray == rhs.imageCubeArray )
+          && ( independentBlend == rhs.independentBlend )
+          && ( geometryShader == rhs.geometryShader )
+          && ( tessellationShader == rhs.tessellationShader )
+          && ( sampleRateShading == rhs.sampleRateShading )
+          && ( dualSrcBlend == rhs.dualSrcBlend )
+          && ( logicOp == rhs.logicOp )
+          && ( multiDrawIndirect == rhs.multiDrawIndirect )
+          && ( drawIndirectFirstInstance == rhs.drawIndirectFirstInstance )
+          && ( depthClamp == rhs.depthClamp )
+          && ( depthBiasClamp == rhs.depthBiasClamp )
+          && ( fillModeNonSolid == rhs.fillModeNonSolid )
+          && ( depthBounds == rhs.depthBounds )
+          && ( wideLines == rhs.wideLines )
+          && ( largePoints == rhs.largePoints )
+          && ( alphaToOne == rhs.alphaToOne )
+          && ( multiViewport == rhs.multiViewport )
+          && ( samplerAnisotropy == rhs.samplerAnisotropy )
+          && ( textureCompressionETC2 == rhs.textureCompressionETC2 )
+          && ( textureCompressionASTC_LDR == rhs.textureCompressionASTC_LDR )
+          && ( textureCompressionBC == rhs.textureCompressionBC )
+          && ( occlusionQueryPrecise == rhs.occlusionQueryPrecise )
+          && ( pipelineStatisticsQuery == rhs.pipelineStatisticsQuery )
+          && ( vertexPipelineStoresAndAtomics == rhs.vertexPipelineStoresAndAtomics )
+          && ( fragmentStoresAndAtomics == rhs.fragmentStoresAndAtomics )
+          && ( shaderTessellationAndGeometryPointSize == rhs.shaderTessellationAndGeometryPointSize )
+          && ( shaderImageGatherExtended == rhs.shaderImageGatherExtended )
+          && ( shaderStorageImageExtendedFormats == rhs.shaderStorageImageExtendedFormats )
+          && ( shaderStorageImageMultisample == rhs.shaderStorageImageMultisample )
+          && ( shaderStorageImageReadWithoutFormat == rhs.shaderStorageImageReadWithoutFormat )
+          && ( shaderStorageImageWriteWithoutFormat == rhs.shaderStorageImageWriteWithoutFormat )
+          && ( shaderUniformBufferArrayDynamicIndexing == rhs.shaderUniformBufferArrayDynamicIndexing )
+          && ( shaderSampledImageArrayDynamicIndexing == rhs.shaderSampledImageArrayDynamicIndexing )
+          && ( shaderStorageBufferArrayDynamicIndexing == rhs.shaderStorageBufferArrayDynamicIndexing )
+          && ( shaderStorageImageArrayDynamicIndexing == rhs.shaderStorageImageArrayDynamicIndexing )
+          && ( shaderClipDistance == rhs.shaderClipDistance )
+          && ( shaderCullDistance == rhs.shaderCullDistance )
+          && ( shaderFloat64 == rhs.shaderFloat64 )
+          && ( shaderInt64 == rhs.shaderInt64 )
+          && ( shaderInt16 == rhs.shaderInt16 )
+          && ( shaderResourceResidency == rhs.shaderResourceResidency )
+          && ( shaderResourceMinLod == rhs.shaderResourceMinLod )
+          && ( sparseBinding == rhs.sparseBinding )
+          && ( sparseResidencyBuffer == rhs.sparseResidencyBuffer )
+          && ( sparseResidencyImage2D == rhs.sparseResidencyImage2D )
+          && ( sparseResidencyImage3D == rhs.sparseResidencyImage3D )
+          && ( sparseResidency2Samples == rhs.sparseResidency2Samples )
+          && ( sparseResidency4Samples == rhs.sparseResidency4Samples )
+          && ( sparseResidency8Samples == rhs.sparseResidency8Samples )
+          && ( sparseResidency16Samples == rhs.sparseResidency16Samples )
+          && ( sparseResidencyAliased == rhs.sparseResidencyAliased )
+          && ( variableMultisampleRate == rhs.variableMultisampleRate )
+          && ( inheritedQueries == rhs.inheritedQueries );
+    }
+
+    bool operator!=( PhysicalDeviceFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess;
+    VULKAN_HPP_NAMESPACE::Bool32 fullDrawIndexUint32;
+    VULKAN_HPP_NAMESPACE::Bool32 imageCubeArray;
+    VULKAN_HPP_NAMESPACE::Bool32 independentBlend;
+    VULKAN_HPP_NAMESPACE::Bool32 geometryShader;
+    VULKAN_HPP_NAMESPACE::Bool32 tessellationShader;
+    VULKAN_HPP_NAMESPACE::Bool32 sampleRateShading;
+    VULKAN_HPP_NAMESPACE::Bool32 dualSrcBlend;
+    VULKAN_HPP_NAMESPACE::Bool32 logicOp;
+    VULKAN_HPP_NAMESPACE::Bool32 multiDrawIndirect;
+    VULKAN_HPP_NAMESPACE::Bool32 drawIndirectFirstInstance;
+    VULKAN_HPP_NAMESPACE::Bool32 depthClamp;
+    VULKAN_HPP_NAMESPACE::Bool32 depthBiasClamp;
+    VULKAN_HPP_NAMESPACE::Bool32 fillModeNonSolid;
+    VULKAN_HPP_NAMESPACE::Bool32 depthBounds;
+    VULKAN_HPP_NAMESPACE::Bool32 wideLines;
+    VULKAN_HPP_NAMESPACE::Bool32 largePoints;
+    VULKAN_HPP_NAMESPACE::Bool32 alphaToOne;
+    VULKAN_HPP_NAMESPACE::Bool32 multiViewport;
+    VULKAN_HPP_NAMESPACE::Bool32 samplerAnisotropy;
+    VULKAN_HPP_NAMESPACE::Bool32 textureCompressionETC2;
+    VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_LDR;
+    VULKAN_HPP_NAMESPACE::Bool32 textureCompressionBC;
+    VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryPrecise;
+    VULKAN_HPP_NAMESPACE::Bool32 pipelineStatisticsQuery;
+    VULKAN_HPP_NAMESPACE::Bool32 vertexPipelineStoresAndAtomics;
+    VULKAN_HPP_NAMESPACE::Bool32 fragmentStoresAndAtomics;
+    VULKAN_HPP_NAMESPACE::Bool32 shaderTessellationAndGeometryPointSize;
+    VULKAN_HPP_NAMESPACE::Bool32 shaderImageGatherExtended;
+    VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageExtendedFormats;
+    VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageMultisample;
+    VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageReadWithoutFormat;
+    VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageWriteWithoutFormat;
+    VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayDynamicIndexing;
+    VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayDynamicIndexing;
+    VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayDynamicIndexing;
+    VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayDynamicIndexing;
+    VULKAN_HPP_NAMESPACE::Bool32 shaderClipDistance;
+    VULKAN_HPP_NAMESPACE::Bool32 shaderCullDistance;
+    VULKAN_HPP_NAMESPACE::Bool32 shaderFloat64;
+    VULKAN_HPP_NAMESPACE::Bool32 shaderInt64;
+    VULKAN_HPP_NAMESPACE::Bool32 shaderInt16;
+    VULKAN_HPP_NAMESPACE::Bool32 shaderResourceResidency;
+    VULKAN_HPP_NAMESPACE::Bool32 shaderResourceMinLod;
+    VULKAN_HPP_NAMESPACE::Bool32 sparseBinding;
+    VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyBuffer;
+    VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage2D;
+    VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage3D;
+    VULKAN_HPP_NAMESPACE::Bool32 sparseResidency2Samples;
+    VULKAN_HPP_NAMESPACE::Bool32 sparseResidency4Samples;
+    VULKAN_HPP_NAMESPACE::Bool32 sparseResidency8Samples;
+    VULKAN_HPP_NAMESPACE::Bool32 sparseResidency16Samples;
+    VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyAliased;
+    VULKAN_HPP_NAMESPACE::Bool32 variableMultisampleRate;
+    VULKAN_HPP_NAMESPACE::Bool32 inheritedQueries;
+  };
+  static_assert( sizeof( PhysicalDeviceFeatures ) == sizeof( VkPhysicalDeviceFeatures ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceFeatures>::value, "struct wrapper is not a standard layout!" );
+
+  struct DeviceCreateInfo
+  {
+    VULKAN_HPP_CONSTEXPR DeviceCreateInfo( VULKAN_HPP_NAMESPACE::DeviceCreateFlags flags_ = VULKAN_HPP_NAMESPACE::DeviceCreateFlags(),
+                                           uint32_t queueCreateInfoCount_ = 0,
+                                           const VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo* pQueueCreateInfos_ = nullptr,
+                                           uint32_t enabledLayerCount_ = 0,
+                                           const char* const* ppEnabledLayerNames_ = nullptr,
+                                           uint32_t enabledExtensionCount_ = 0,
+                                           const char* const* ppEnabledExtensionNames_ = nullptr,
+                                           const VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures* pEnabledFeatures_ = nullptr ) VULKAN_HPP_NOEXCEPT
+      : flags( flags_ )
+      , queueCreateInfoCount( queueCreateInfoCount_ )
+      , pQueueCreateInfos( pQueueCreateInfos_ )
+      , enabledLayerCount( enabledLayerCount_ )
+      , ppEnabledLayerNames( ppEnabledLayerNames_ )
+      , enabledExtensionCount( enabledExtensionCount_ )
+      , ppEnabledExtensionNames( ppEnabledExtensionNames_ )
+      , pEnabledFeatures( pEnabledFeatures_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::DeviceCreateInfo & operator=( VULKAN_HPP_NAMESPACE::DeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DeviceCreateInfo ) - offsetof( DeviceCreateInfo, pNext ) );
+      return *this;
+    }
+
+    DeviceCreateInfo( VkDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    DeviceCreateInfo& operator=( VkDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceCreateInfo const *>(&rhs);
+      return *this;
+    }
+
+    DeviceCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    DeviceCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::DeviceCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    DeviceCreateInfo & setQueueCreateInfoCount( uint32_t queueCreateInfoCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      queueCreateInfoCount = queueCreateInfoCount_;
+      return *this;
+    }
+
+    DeviceCreateInfo & setPQueueCreateInfos( const VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo* pQueueCreateInfos_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pQueueCreateInfos = pQueueCreateInfos_;
+      return *this;
+    }
+
+    DeviceCreateInfo & setEnabledLayerCount( uint32_t enabledLayerCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      enabledLayerCount = enabledLayerCount_;
+      return *this;
+    }
+
+    DeviceCreateInfo & setPpEnabledLayerNames( const char* const* ppEnabledLayerNames_ ) VULKAN_HPP_NOEXCEPT
+    {
+      ppEnabledLayerNames = ppEnabledLayerNames_;
+      return *this;
+    }
+
+    DeviceCreateInfo & setEnabledExtensionCount( uint32_t enabledExtensionCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      enabledExtensionCount = enabledExtensionCount_;
+      return *this;
+    }
+
+    DeviceCreateInfo & setPpEnabledExtensionNames( const char* const* ppEnabledExtensionNames_ ) VULKAN_HPP_NOEXCEPT
+    {
+      ppEnabledExtensionNames = ppEnabledExtensionNames_;
+      return *this;
+    }
+
+    DeviceCreateInfo & setPEnabledFeatures( const VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures* pEnabledFeatures_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pEnabledFeatures = pEnabledFeatures_;
+      return *this;
+    }
+
+    operator VkDeviceCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDeviceCreateInfo*>( this );
+    }
+
+    operator VkDeviceCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDeviceCreateInfo*>( this );
+    }
+
+    bool operator==( DeviceCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( queueCreateInfoCount == rhs.queueCreateInfoCount )
+          && ( pQueueCreateInfos == rhs.pQueueCreateInfos )
+          && ( enabledLayerCount == rhs.enabledLayerCount )
+          && ( ppEnabledLayerNames == rhs.ppEnabledLayerNames )
+          && ( enabledExtensionCount == rhs.enabledExtensionCount )
+          && ( ppEnabledExtensionNames == rhs.ppEnabledExtensionNames )
+          && ( pEnabledFeatures == rhs.pEnabledFeatures );
+    }
+
+    bool operator!=( DeviceCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceCreateInfo;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::DeviceCreateFlags flags;
+    uint32_t queueCreateInfoCount;
+    const VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo* pQueueCreateInfos;
+    uint32_t enabledLayerCount;
+    const char* const* ppEnabledLayerNames;
+    uint32_t enabledExtensionCount;
+    const char* const* ppEnabledExtensionNames;
+    const VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures* pEnabledFeatures;
+  };
+  static_assert( sizeof( DeviceCreateInfo ) == sizeof( VkDeviceCreateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DeviceCreateInfo>::value, "struct wrapper is not a standard layout!" );
+
+  struct DeviceEventInfoEXT
+  {
+    VULKAN_HPP_CONSTEXPR DeviceEventInfoEXT( VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT deviceEvent_ = VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT::eDisplayHotplug ) VULKAN_HPP_NOEXCEPT
+      : deviceEvent( deviceEvent_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT & operator=( VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT ) - offsetof( DeviceEventInfoEXT, pNext ) );
+      return *this;
+    }
+
+    DeviceEventInfoEXT( VkDeviceEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    DeviceEventInfoEXT& operator=( VkDeviceEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT const *>(&rhs);
+      return *this;
+    }
+
+    DeviceEventInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    DeviceEventInfoEXT & setDeviceEvent( VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT deviceEvent_ ) VULKAN_HPP_NOEXCEPT
+    {
+      deviceEvent = deviceEvent_;
+      return *this;
+    }
+
+    operator VkDeviceEventInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDeviceEventInfoEXT*>( this );
+    }
+
+    operator VkDeviceEventInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDeviceEventInfoEXT*>( this );
+    }
+
+    bool operator==( DeviceEventInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( deviceEvent == rhs.deviceEvent );
+    }
+
+    bool operator!=( DeviceEventInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceEventInfoEXT;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT deviceEvent;
+  };
+  static_assert( sizeof( DeviceEventInfoEXT ) == sizeof( VkDeviceEventInfoEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DeviceEventInfoEXT>::value, "struct wrapper is not a standard layout!" );
+
+  struct DeviceGeneratedCommandsFeaturesNVX
+  {
+    VULKAN_HPP_CONSTEXPR DeviceGeneratedCommandsFeaturesNVX( VULKAN_HPP_NAMESPACE::Bool32 computeBindingPointSupport_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : computeBindingPointSupport( computeBindingPointSupport_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::DeviceGeneratedCommandsFeaturesNVX & operator=( VULKAN_HPP_NAMESPACE::DeviceGeneratedCommandsFeaturesNVX const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DeviceGeneratedCommandsFeaturesNVX ) - offsetof( DeviceGeneratedCommandsFeaturesNVX, pNext ) );
+      return *this;
+    }
+
+    DeviceGeneratedCommandsFeaturesNVX( VkDeviceGeneratedCommandsFeaturesNVX const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    DeviceGeneratedCommandsFeaturesNVX& operator=( VkDeviceGeneratedCommandsFeaturesNVX const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceGeneratedCommandsFeaturesNVX const *>(&rhs);
+      return *this;
+    }
+
+    DeviceGeneratedCommandsFeaturesNVX & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    DeviceGeneratedCommandsFeaturesNVX & setComputeBindingPointSupport( VULKAN_HPP_NAMESPACE::Bool32 computeBindingPointSupport_ ) VULKAN_HPP_NOEXCEPT
+    {
+      computeBindingPointSupport = computeBindingPointSupport_;
+      return *this;
+    }
+
+    operator VkDeviceGeneratedCommandsFeaturesNVX const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDeviceGeneratedCommandsFeaturesNVX*>( this );
+    }
+
+    operator VkDeviceGeneratedCommandsFeaturesNVX &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDeviceGeneratedCommandsFeaturesNVX*>( this );
+    }
+
+    bool operator==( DeviceGeneratedCommandsFeaturesNVX const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( computeBindingPointSupport == rhs.computeBindingPointSupport );
+    }
+
+    bool operator!=( DeviceGeneratedCommandsFeaturesNVX const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGeneratedCommandsFeaturesNVX;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::Bool32 computeBindingPointSupport;
+  };
+  static_assert( sizeof( DeviceGeneratedCommandsFeaturesNVX ) == sizeof( VkDeviceGeneratedCommandsFeaturesNVX ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DeviceGeneratedCommandsFeaturesNVX>::value, "struct wrapper is not a standard layout!" );
+
+  struct DeviceGeneratedCommandsLimitsNVX
+  {
+    VULKAN_HPP_CONSTEXPR DeviceGeneratedCommandsLimitsNVX( uint32_t maxIndirectCommandsLayoutTokenCount_ = 0,
+                                                           uint32_t maxObjectEntryCounts_ = 0,
+                                                           uint32_t minSequenceCountBufferOffsetAlignment_ = 0,
+                                                           uint32_t minSequenceIndexBufferOffsetAlignment_ = 0,
+                                                           uint32_t minCommandsTokenBufferOffsetAlignment_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : maxIndirectCommandsLayoutTokenCount( maxIndirectCommandsLayoutTokenCount_ )
+      , maxObjectEntryCounts( maxObjectEntryCounts_ )
+      , minSequenceCountBufferOffsetAlignment( minSequenceCountBufferOffsetAlignment_ )
+      , minSequenceIndexBufferOffsetAlignment( minSequenceIndexBufferOffsetAlignment_ )
+      , minCommandsTokenBufferOffsetAlignment( minCommandsTokenBufferOffsetAlignment_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::DeviceGeneratedCommandsLimitsNVX & operator=( VULKAN_HPP_NAMESPACE::DeviceGeneratedCommandsLimitsNVX const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DeviceGeneratedCommandsLimitsNVX ) - offsetof( DeviceGeneratedCommandsLimitsNVX, pNext ) );
+      return *this;
+    }
+
+    DeviceGeneratedCommandsLimitsNVX( VkDeviceGeneratedCommandsLimitsNVX const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    DeviceGeneratedCommandsLimitsNVX& operator=( VkDeviceGeneratedCommandsLimitsNVX const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceGeneratedCommandsLimitsNVX const *>(&rhs);
+      return *this;
+    }
+
+    DeviceGeneratedCommandsLimitsNVX & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    DeviceGeneratedCommandsLimitsNVX & setMaxIndirectCommandsLayoutTokenCount( uint32_t maxIndirectCommandsLayoutTokenCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      maxIndirectCommandsLayoutTokenCount = maxIndirectCommandsLayoutTokenCount_;
+      return *this;
+    }
+
+    DeviceGeneratedCommandsLimitsNVX & setMaxObjectEntryCounts( uint32_t maxObjectEntryCounts_ ) VULKAN_HPP_NOEXCEPT
+    {
+      maxObjectEntryCounts = maxObjectEntryCounts_;
+      return *this;
+    }
+
+    DeviceGeneratedCommandsLimitsNVX & setMinSequenceCountBufferOffsetAlignment( uint32_t minSequenceCountBufferOffsetAlignment_ ) VULKAN_HPP_NOEXCEPT
+    {
+      minSequenceCountBufferOffsetAlignment = minSequenceCountBufferOffsetAlignment_;
+      return *this;
+    }
+
+    DeviceGeneratedCommandsLimitsNVX & setMinSequenceIndexBufferOffsetAlignment( uint32_t minSequenceIndexBufferOffsetAlignment_ ) VULKAN_HPP_NOEXCEPT
+    {
+      minSequenceIndexBufferOffsetAlignment = minSequenceIndexBufferOffsetAlignment_;
+      return *this;
+    }
+
+    DeviceGeneratedCommandsLimitsNVX & setMinCommandsTokenBufferOffsetAlignment( uint32_t minCommandsTokenBufferOffsetAlignment_ ) VULKAN_HPP_NOEXCEPT
+    {
+      minCommandsTokenBufferOffsetAlignment = minCommandsTokenBufferOffsetAlignment_;
+      return *this;
+    }
+
+    operator VkDeviceGeneratedCommandsLimitsNVX const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDeviceGeneratedCommandsLimitsNVX*>( this );
+    }
+
+    operator VkDeviceGeneratedCommandsLimitsNVX &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDeviceGeneratedCommandsLimitsNVX*>( this );
+    }
+
+    bool operator==( DeviceGeneratedCommandsLimitsNVX const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( maxIndirectCommandsLayoutTokenCount == rhs.maxIndirectCommandsLayoutTokenCount )
+          && ( maxObjectEntryCounts == rhs.maxObjectEntryCounts )
+          && ( minSequenceCountBufferOffsetAlignment == rhs.minSequenceCountBufferOffsetAlignment )
+          && ( minSequenceIndexBufferOffsetAlignment == rhs.minSequenceIndexBufferOffsetAlignment )
+          && ( minCommandsTokenBufferOffsetAlignment == rhs.minCommandsTokenBufferOffsetAlignment );
+    }
+
+    bool operator!=( DeviceGeneratedCommandsLimitsNVX const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGeneratedCommandsLimitsNVX;
+    const void* pNext = nullptr;
+    uint32_t maxIndirectCommandsLayoutTokenCount;
+    uint32_t maxObjectEntryCounts;
+    uint32_t minSequenceCountBufferOffsetAlignment;
+    uint32_t minSequenceIndexBufferOffsetAlignment;
+    uint32_t minCommandsTokenBufferOffsetAlignment;
+  };
+  static_assert( sizeof( DeviceGeneratedCommandsLimitsNVX ) == sizeof( VkDeviceGeneratedCommandsLimitsNVX ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DeviceGeneratedCommandsLimitsNVX>::value, "struct wrapper is not a standard layout!" );
+
+  struct DeviceGroupBindSparseInfo
+  {
+    VULKAN_HPP_CONSTEXPR DeviceGroupBindSparseInfo( uint32_t resourceDeviceIndex_ = 0,
+                                                    uint32_t memoryDeviceIndex_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : resourceDeviceIndex( resourceDeviceIndex_ )
+      , memoryDeviceIndex( memoryDeviceIndex_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::DeviceGroupBindSparseInfo & operator=( VULKAN_HPP_NAMESPACE::DeviceGroupBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DeviceGroupBindSparseInfo ) - offsetof( DeviceGroupBindSparseInfo, pNext ) );
+      return *this;
+    }
+
+    DeviceGroupBindSparseInfo( VkDeviceGroupBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    DeviceGroupBindSparseInfo& operator=( VkDeviceGroupBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceGroupBindSparseInfo const *>(&rhs);
+      return *this;
+    }
+
+    DeviceGroupBindSparseInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    DeviceGroupBindSparseInfo & setResourceDeviceIndex( uint32_t resourceDeviceIndex_ ) VULKAN_HPP_NOEXCEPT
+    {
+      resourceDeviceIndex = resourceDeviceIndex_;
+      return *this;
+    }
+
+    DeviceGroupBindSparseInfo & setMemoryDeviceIndex( uint32_t memoryDeviceIndex_ ) VULKAN_HPP_NOEXCEPT
+    {
+      memoryDeviceIndex = memoryDeviceIndex_;
+      return *this;
+    }
+
+    operator VkDeviceGroupBindSparseInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDeviceGroupBindSparseInfo*>( this );
+    }
+
+    operator VkDeviceGroupBindSparseInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDeviceGroupBindSparseInfo*>( this );
+    }
+
+    bool operator==( DeviceGroupBindSparseInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( resourceDeviceIndex == rhs.resourceDeviceIndex )
+          && ( memoryDeviceIndex == rhs.memoryDeviceIndex );
+    }
+
+    bool operator!=( DeviceGroupBindSparseInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupBindSparseInfo;
+    const void* pNext = nullptr;
+    uint32_t resourceDeviceIndex;
+    uint32_t memoryDeviceIndex;
+  };
+  static_assert( sizeof( DeviceGroupBindSparseInfo ) == sizeof( VkDeviceGroupBindSparseInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DeviceGroupBindSparseInfo>::value, "struct wrapper is not a standard layout!" );
+
+  struct DeviceGroupCommandBufferBeginInfo
+  {
+    VULKAN_HPP_CONSTEXPR DeviceGroupCommandBufferBeginInfo( uint32_t deviceMask_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : deviceMask( deviceMask_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::DeviceGroupCommandBufferBeginInfo & operator=( VULKAN_HPP_NAMESPACE::DeviceGroupCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DeviceGroupCommandBufferBeginInfo ) - offsetof( DeviceGroupCommandBufferBeginInfo, pNext ) );
+      return *this;
+    }
+
+    DeviceGroupCommandBufferBeginInfo( VkDeviceGroupCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    DeviceGroupCommandBufferBeginInfo& operator=( VkDeviceGroupCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceGroupCommandBufferBeginInfo const *>(&rhs);
+      return *this;
+    }
+
+    DeviceGroupCommandBufferBeginInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    DeviceGroupCommandBufferBeginInfo & setDeviceMask( uint32_t deviceMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      deviceMask = deviceMask_;
+      return *this;
+    }
+
+    operator VkDeviceGroupCommandBufferBeginInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDeviceGroupCommandBufferBeginInfo*>( this );
+    }
+
+    operator VkDeviceGroupCommandBufferBeginInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDeviceGroupCommandBufferBeginInfo*>( this );
+    }
+
+    bool operator==( DeviceGroupCommandBufferBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( deviceMask == rhs.deviceMask );
+    }
+
+    bool operator!=( DeviceGroupCommandBufferBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupCommandBufferBeginInfo;
+    const void* pNext = nullptr;
+    uint32_t deviceMask;
+  };
+  static_assert( sizeof( DeviceGroupCommandBufferBeginInfo ) == sizeof( VkDeviceGroupCommandBufferBeginInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DeviceGroupCommandBufferBeginInfo>::value, "struct wrapper is not a standard layout!" );
+
+  struct DeviceGroupDeviceCreateInfo
+  {
+    VULKAN_HPP_CONSTEXPR DeviceGroupDeviceCreateInfo( uint32_t physicalDeviceCount_ = 0,
+                                                      const VULKAN_HPP_NAMESPACE::PhysicalDevice* pPhysicalDevices_ = nullptr ) VULKAN_HPP_NOEXCEPT
+      : physicalDeviceCount( physicalDeviceCount_ )
+      , pPhysicalDevices( pPhysicalDevices_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::DeviceGroupDeviceCreateInfo & operator=( VULKAN_HPP_NAMESPACE::DeviceGroupDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DeviceGroupDeviceCreateInfo ) - offsetof( DeviceGroupDeviceCreateInfo, pNext ) );
+      return *this;
+    }
+
+    DeviceGroupDeviceCreateInfo( VkDeviceGroupDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    DeviceGroupDeviceCreateInfo& operator=( VkDeviceGroupDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceGroupDeviceCreateInfo const *>(&rhs);
+      return *this;
+    }
+
+    DeviceGroupDeviceCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    DeviceGroupDeviceCreateInfo & setPhysicalDeviceCount( uint32_t physicalDeviceCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      physicalDeviceCount = physicalDeviceCount_;
+      return *this;
+    }
+
+    DeviceGroupDeviceCreateInfo & setPPhysicalDevices( const VULKAN_HPP_NAMESPACE::PhysicalDevice* pPhysicalDevices_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pPhysicalDevices = pPhysicalDevices_;
+      return *this;
+    }
+
+    operator VkDeviceGroupDeviceCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDeviceGroupDeviceCreateInfo*>( this );
+    }
+
+    operator VkDeviceGroupDeviceCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDeviceGroupDeviceCreateInfo*>( this );
+    }
+
+    bool operator==( DeviceGroupDeviceCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( physicalDeviceCount == rhs.physicalDeviceCount )
+          && ( pPhysicalDevices == rhs.pPhysicalDevices );
+    }
+
+    bool operator!=( DeviceGroupDeviceCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupDeviceCreateInfo;
+    const void* pNext = nullptr;
+    uint32_t physicalDeviceCount;
+    const VULKAN_HPP_NAMESPACE::PhysicalDevice* pPhysicalDevices;
+  };
+  static_assert( sizeof( DeviceGroupDeviceCreateInfo ) == sizeof( VkDeviceGroupDeviceCreateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DeviceGroupDeviceCreateInfo>::value, "struct wrapper is not a standard layout!" );
+
+  struct DeviceGroupPresentCapabilitiesKHR
+  {
+    DeviceGroupPresentCapabilitiesKHR( std::array<uint32_t,VK_MAX_DEVICE_GROUP_SIZE> const& presentMask_ = { { 0 } },
+                                       VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes_ = VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR() ) VULKAN_HPP_NOEXCEPT
+      : presentMask{}
+      , modes( modes_ )
+    {
+      VULKAN_HPP_NAMESPACE::ConstExpressionArrayCopy<uint32_t,VK_MAX_DEVICE_GROUP_SIZE,VK_MAX_DEVICE_GROUP_SIZE>::copy( presentMask, presentMask_ );
+    }
+
+    VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR & operator=( VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR ) - offsetof( DeviceGroupPresentCapabilitiesKHR, pNext ) );
+      return *this;
+    }
+
+    DeviceGroupPresentCapabilitiesKHR( VkDeviceGroupPresentCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    DeviceGroupPresentCapabilitiesKHR& operator=( VkDeviceGroupPresentCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR const *>(&rhs);
+      return *this;
+    }
+
+    operator VkDeviceGroupPresentCapabilitiesKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDeviceGroupPresentCapabilitiesKHR*>( this );
+    }
+
+    operator VkDeviceGroupPresentCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDeviceGroupPresentCapabilitiesKHR*>( this );
+    }
+
+    bool operator==( DeviceGroupPresentCapabilitiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( memcmp( presentMask, rhs.presentMask, VK_MAX_DEVICE_GROUP_SIZE * sizeof( uint32_t ) ) == 0 )
+          && ( modes == rhs.modes );
+    }
+
+    bool operator!=( DeviceGroupPresentCapabilitiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupPresentCapabilitiesKHR;
+    const void* pNext = nullptr;
+    uint32_t presentMask[VK_MAX_DEVICE_GROUP_SIZE];
+    VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes;
+  };
+  static_assert( sizeof( DeviceGroupPresentCapabilitiesKHR ) == sizeof( VkDeviceGroupPresentCapabilitiesKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DeviceGroupPresentCapabilitiesKHR>::value, "struct wrapper is not a standard layout!" );
+
+  struct DeviceGroupPresentInfoKHR
+  {
+    VULKAN_HPP_CONSTEXPR DeviceGroupPresentInfoKHR( uint32_t swapchainCount_ = 0,
+                                                    const uint32_t* pDeviceMasks_ = nullptr,
+                                                    VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR mode_ = VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR::eLocal ) VULKAN_HPP_NOEXCEPT
+      : swapchainCount( swapchainCount_ )
+      , pDeviceMasks( pDeviceMasks_ )
+      , mode( mode_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::DeviceGroupPresentInfoKHR & operator=( VULKAN_HPP_NAMESPACE::DeviceGroupPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DeviceGroupPresentInfoKHR ) - offsetof( DeviceGroupPresentInfoKHR, pNext ) );
+      return *this;
+    }
+
+    DeviceGroupPresentInfoKHR( VkDeviceGroupPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    DeviceGroupPresentInfoKHR& operator=( VkDeviceGroupPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceGroupPresentInfoKHR const *>(&rhs);
+      return *this;
+    }
+
+    DeviceGroupPresentInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    DeviceGroupPresentInfoKHR & setSwapchainCount( uint32_t swapchainCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      swapchainCount = swapchainCount_;
+      return *this;
+    }
+
+    DeviceGroupPresentInfoKHR & setPDeviceMasks( const uint32_t* pDeviceMasks_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pDeviceMasks = pDeviceMasks_;
+      return *this;
+    }
+
+    DeviceGroupPresentInfoKHR & setMode( VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR mode_ ) VULKAN_HPP_NOEXCEPT
+    {
+      mode = mode_;
+      return *this;
+    }
+
+    operator VkDeviceGroupPresentInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDeviceGroupPresentInfoKHR*>( this );
+    }
+
+    operator VkDeviceGroupPresentInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDeviceGroupPresentInfoKHR*>( this );
+    }
+
+    bool operator==( DeviceGroupPresentInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( swapchainCount == rhs.swapchainCount )
+          && ( pDeviceMasks == rhs.pDeviceMasks )
+          && ( mode == rhs.mode );
+    }
+
+    bool operator!=( DeviceGroupPresentInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupPresentInfoKHR;
+    const void* pNext = nullptr;
+    uint32_t swapchainCount;
+    const uint32_t* pDeviceMasks;
+    VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR mode;
+  };
+  static_assert( sizeof( DeviceGroupPresentInfoKHR ) == sizeof( VkDeviceGroupPresentInfoKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DeviceGroupPresentInfoKHR>::value, "struct wrapper is not a standard layout!" );
+
+  struct DeviceGroupRenderPassBeginInfo
+  {
+    VULKAN_HPP_CONSTEXPR DeviceGroupRenderPassBeginInfo( uint32_t deviceMask_ = 0,
+                                                         uint32_t deviceRenderAreaCount_ = 0,
+                                                         const VULKAN_HPP_NAMESPACE::Rect2D* pDeviceRenderAreas_ = nullptr ) VULKAN_HPP_NOEXCEPT
+      : deviceMask( deviceMask_ )
+      , deviceRenderAreaCount( deviceRenderAreaCount_ )
+      , pDeviceRenderAreas( pDeviceRenderAreas_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::DeviceGroupRenderPassBeginInfo & operator=( VULKAN_HPP_NAMESPACE::DeviceGroupRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DeviceGroupRenderPassBeginInfo ) - offsetof( DeviceGroupRenderPassBeginInfo, pNext ) );
+      return *this;
+    }
+
+    DeviceGroupRenderPassBeginInfo( VkDeviceGroupRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    DeviceGroupRenderPassBeginInfo& operator=( VkDeviceGroupRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceGroupRenderPassBeginInfo const *>(&rhs);
+      return *this;
+    }
+
+    DeviceGroupRenderPassBeginInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    DeviceGroupRenderPassBeginInfo & setDeviceMask( uint32_t deviceMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      deviceMask = deviceMask_;
+      return *this;
+    }
+
+    DeviceGroupRenderPassBeginInfo & setDeviceRenderAreaCount( uint32_t deviceRenderAreaCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      deviceRenderAreaCount = deviceRenderAreaCount_;
+      return *this;
+    }
+
+    DeviceGroupRenderPassBeginInfo & setPDeviceRenderAreas( const VULKAN_HPP_NAMESPACE::Rect2D* pDeviceRenderAreas_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pDeviceRenderAreas = pDeviceRenderAreas_;
+      return *this;
+    }
+
+    operator VkDeviceGroupRenderPassBeginInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDeviceGroupRenderPassBeginInfo*>( this );
+    }
+
+    operator VkDeviceGroupRenderPassBeginInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDeviceGroupRenderPassBeginInfo*>( this );
+    }
+
+    bool operator==( DeviceGroupRenderPassBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( deviceMask == rhs.deviceMask )
+          && ( deviceRenderAreaCount == rhs.deviceRenderAreaCount )
+          && ( pDeviceRenderAreas == rhs.pDeviceRenderAreas );
+    }
+
+    bool operator!=( DeviceGroupRenderPassBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupRenderPassBeginInfo;
+    const void* pNext = nullptr;
+    uint32_t deviceMask;
+    uint32_t deviceRenderAreaCount;
+    const VULKAN_HPP_NAMESPACE::Rect2D* pDeviceRenderAreas;
+  };
+  static_assert( sizeof( DeviceGroupRenderPassBeginInfo ) == sizeof( VkDeviceGroupRenderPassBeginInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DeviceGroupRenderPassBeginInfo>::value, "struct wrapper is not a standard layout!" );
+
+  struct DeviceGroupSubmitInfo
+  {
+    VULKAN_HPP_CONSTEXPR DeviceGroupSubmitInfo( uint32_t waitSemaphoreCount_ = 0,
+                                                const uint32_t* pWaitSemaphoreDeviceIndices_ = nullptr,
+                                                uint32_t commandBufferCount_ = 0,
+                                                const uint32_t* pCommandBufferDeviceMasks_ = nullptr,
+                                                uint32_t signalSemaphoreCount_ = 0,
+                                                const uint32_t* pSignalSemaphoreDeviceIndices_ = nullptr ) VULKAN_HPP_NOEXCEPT
+      : waitSemaphoreCount( waitSemaphoreCount_ )
+      , pWaitSemaphoreDeviceIndices( pWaitSemaphoreDeviceIndices_ )
+      , commandBufferCount( commandBufferCount_ )
+      , pCommandBufferDeviceMasks( pCommandBufferDeviceMasks_ )
+      , signalSemaphoreCount( signalSemaphoreCount_ )
+      , pSignalSemaphoreDeviceIndices( pSignalSemaphoreDeviceIndices_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::DeviceGroupSubmitInfo & operator=( VULKAN_HPP_NAMESPACE::DeviceGroupSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DeviceGroupSubmitInfo ) - offsetof( DeviceGroupSubmitInfo, pNext ) );
+      return *this;
+    }
+
+    DeviceGroupSubmitInfo( VkDeviceGroupSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    DeviceGroupSubmitInfo& operator=( VkDeviceGroupSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceGroupSubmitInfo const *>(&rhs);
+      return *this;
+    }
+
+    DeviceGroupSubmitInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    DeviceGroupSubmitInfo & setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      waitSemaphoreCount = waitSemaphoreCount_;
+      return *this;
+    }
+
+    DeviceGroupSubmitInfo & setPWaitSemaphoreDeviceIndices( const uint32_t* pWaitSemaphoreDeviceIndices_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pWaitSemaphoreDeviceIndices = pWaitSemaphoreDeviceIndices_;
+      return *this;
+    }
+
+    DeviceGroupSubmitInfo & setCommandBufferCount( uint32_t commandBufferCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      commandBufferCount = commandBufferCount_;
+      return *this;
+    }
+
+    DeviceGroupSubmitInfo & setPCommandBufferDeviceMasks( const uint32_t* pCommandBufferDeviceMasks_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pCommandBufferDeviceMasks = pCommandBufferDeviceMasks_;
+      return *this;
+    }
+
+    DeviceGroupSubmitInfo & setSignalSemaphoreCount( uint32_t signalSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      signalSemaphoreCount = signalSemaphoreCount_;
+      return *this;
+    }
+
+    DeviceGroupSubmitInfo & setPSignalSemaphoreDeviceIndices( const uint32_t* pSignalSemaphoreDeviceIndices_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pSignalSemaphoreDeviceIndices = pSignalSemaphoreDeviceIndices_;
+      return *this;
+    }
+
+    operator VkDeviceGroupSubmitInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDeviceGroupSubmitInfo*>( this );
+    }
+
+    operator VkDeviceGroupSubmitInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDeviceGroupSubmitInfo*>( this );
+    }
+
+    bool operator==( DeviceGroupSubmitInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( waitSemaphoreCount == rhs.waitSemaphoreCount )
+          && ( pWaitSemaphoreDeviceIndices == rhs.pWaitSemaphoreDeviceIndices )
+          && ( commandBufferCount == rhs.commandBufferCount )
+          && ( pCommandBufferDeviceMasks == rhs.pCommandBufferDeviceMasks )
+          && ( signalSemaphoreCount == rhs.signalSemaphoreCount )
+          && ( pSignalSemaphoreDeviceIndices == rhs.pSignalSemaphoreDeviceIndices );
+    }
+
+    bool operator!=( DeviceGroupSubmitInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupSubmitInfo;
+    const void* pNext = nullptr;
+    uint32_t waitSemaphoreCount;
+    const uint32_t* pWaitSemaphoreDeviceIndices;
+    uint32_t commandBufferCount;
+    const uint32_t* pCommandBufferDeviceMasks;
+    uint32_t signalSemaphoreCount;
+    const uint32_t* pSignalSemaphoreDeviceIndices;
+  };
+  static_assert( sizeof( DeviceGroupSubmitInfo ) == sizeof( VkDeviceGroupSubmitInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DeviceGroupSubmitInfo>::value, "struct wrapper is not a standard layout!" );
+
+  struct DeviceGroupSwapchainCreateInfoKHR
+  {
+    VULKAN_HPP_CONSTEXPR DeviceGroupSwapchainCreateInfoKHR( VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes_ = VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR() ) VULKAN_HPP_NOEXCEPT
+      : modes( modes_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::DeviceGroupSwapchainCreateInfoKHR & operator=( VULKAN_HPP_NAMESPACE::DeviceGroupSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DeviceGroupSwapchainCreateInfoKHR ) - offsetof( DeviceGroupSwapchainCreateInfoKHR, pNext ) );
+      return *this;
+    }
+
+    DeviceGroupSwapchainCreateInfoKHR( VkDeviceGroupSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    DeviceGroupSwapchainCreateInfoKHR& operator=( VkDeviceGroupSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceGroupSwapchainCreateInfoKHR const *>(&rhs);
+      return *this;
+    }
+
+    DeviceGroupSwapchainCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    DeviceGroupSwapchainCreateInfoKHR & setModes( VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes_ ) VULKAN_HPP_NOEXCEPT
+    {
+      modes = modes_;
+      return *this;
+    }
+
+    operator VkDeviceGroupSwapchainCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDeviceGroupSwapchainCreateInfoKHR*>( this );
+    }
+
+    operator VkDeviceGroupSwapchainCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDeviceGroupSwapchainCreateInfoKHR*>( this );
+    }
+
+    bool operator==( DeviceGroupSwapchainCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( modes == rhs.modes );
+    }
+
+    bool operator!=( DeviceGroupSwapchainCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupSwapchainCreateInfoKHR;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes;
+  };
+  static_assert( sizeof( DeviceGroupSwapchainCreateInfoKHR ) == sizeof( VkDeviceGroupSwapchainCreateInfoKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DeviceGroupSwapchainCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
+
+  struct DeviceMemoryOpaqueCaptureAddressInfoKHR
+  {
+    VULKAN_HPP_CONSTEXPR DeviceMemoryOpaqueCaptureAddressInfoKHR( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = VULKAN_HPP_NAMESPACE::DeviceMemory() ) VULKAN_HPP_NOEXCEPT
+      : memory( memory_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfoKHR & operator=( VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfoKHR ) - offsetof( DeviceMemoryOpaqueCaptureAddressInfoKHR, pNext ) );
+      return *this;
+    }
+
+    DeviceMemoryOpaqueCaptureAddressInfoKHR( VkDeviceMemoryOpaqueCaptureAddressInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    DeviceMemoryOpaqueCaptureAddressInfoKHR& operator=( VkDeviceMemoryOpaqueCaptureAddressInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfoKHR const *>(&rhs);
+      return *this;
+    }
+
+    DeviceMemoryOpaqueCaptureAddressInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    DeviceMemoryOpaqueCaptureAddressInfoKHR & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
+    {
+      memory = memory_;
+      return *this;
+    }
+
+    operator VkDeviceMemoryOpaqueCaptureAddressInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfoKHR*>( this );
+    }
+
+    operator VkDeviceMemoryOpaqueCaptureAddressInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDeviceMemoryOpaqueCaptureAddressInfoKHR*>( this );
+    }
+
+    bool operator==( DeviceMemoryOpaqueCaptureAddressInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( memory == rhs.memory );
+    }
+
+    bool operator!=( DeviceMemoryOpaqueCaptureAddressInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceMemoryOpaqueCaptureAddressInfoKHR;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::DeviceMemory memory;
+  };
+  static_assert( sizeof( DeviceMemoryOpaqueCaptureAddressInfoKHR ) == sizeof( VkDeviceMemoryOpaqueCaptureAddressInfoKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DeviceMemoryOpaqueCaptureAddressInfoKHR>::value, "struct wrapper is not a standard layout!" );
+
+  struct DeviceMemoryOverallocationCreateInfoAMD
+  {
+    VULKAN_HPP_CONSTEXPR DeviceMemoryOverallocationCreateInfoAMD( VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD overallocationBehavior_ = VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD::eDefault ) VULKAN_HPP_NOEXCEPT
+      : overallocationBehavior( overallocationBehavior_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::DeviceMemoryOverallocationCreateInfoAMD & operator=( VULKAN_HPP_NAMESPACE::DeviceMemoryOverallocationCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DeviceMemoryOverallocationCreateInfoAMD ) - offsetof( DeviceMemoryOverallocationCreateInfoAMD, pNext ) );
+      return *this;
+    }
+
+    DeviceMemoryOverallocationCreateInfoAMD( VkDeviceMemoryOverallocationCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    DeviceMemoryOverallocationCreateInfoAMD& operator=( VkDeviceMemoryOverallocationCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceMemoryOverallocationCreateInfoAMD const *>(&rhs);
+      return *this;
+    }
+
+    DeviceMemoryOverallocationCreateInfoAMD & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    DeviceMemoryOverallocationCreateInfoAMD & setOverallocationBehavior( VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD overallocationBehavior_ ) VULKAN_HPP_NOEXCEPT
+    {
+      overallocationBehavior = overallocationBehavior_;
+      return *this;
+    }
+
+    operator VkDeviceMemoryOverallocationCreateInfoAMD const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDeviceMemoryOverallocationCreateInfoAMD*>( this );
+    }
+
+    operator VkDeviceMemoryOverallocationCreateInfoAMD &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDeviceMemoryOverallocationCreateInfoAMD*>( this );
+    }
+
+    bool operator==( DeviceMemoryOverallocationCreateInfoAMD const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( overallocationBehavior == rhs.overallocationBehavior );
+    }
+
+    bool operator!=( DeviceMemoryOverallocationCreateInfoAMD const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceMemoryOverallocationCreateInfoAMD;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD overallocationBehavior;
+  };
+  static_assert( sizeof( DeviceMemoryOverallocationCreateInfoAMD ) == sizeof( VkDeviceMemoryOverallocationCreateInfoAMD ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DeviceMemoryOverallocationCreateInfoAMD>::value, "struct wrapper is not a standard layout!" );
+
+  struct DeviceQueueGlobalPriorityCreateInfoEXT
+  {
+    VULKAN_HPP_CONSTEXPR DeviceQueueGlobalPriorityCreateInfoEXT( VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT globalPriority_ = VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT::eLow ) VULKAN_HPP_NOEXCEPT
+      : globalPriority( globalPriority_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::DeviceQueueGlobalPriorityCreateInfoEXT & operator=( VULKAN_HPP_NAMESPACE::DeviceQueueGlobalPriorityCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DeviceQueueGlobalPriorityCreateInfoEXT ) - offsetof( DeviceQueueGlobalPriorityCreateInfoEXT, pNext ) );
+      return *this;
+    }
+
+    DeviceQueueGlobalPriorityCreateInfoEXT( VkDeviceQueueGlobalPriorityCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    DeviceQueueGlobalPriorityCreateInfoEXT& operator=( VkDeviceQueueGlobalPriorityCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceQueueGlobalPriorityCreateInfoEXT const *>(&rhs);
+      return *this;
+    }
+
+    DeviceQueueGlobalPriorityCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    DeviceQueueGlobalPriorityCreateInfoEXT & setGlobalPriority( VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT globalPriority_ ) VULKAN_HPP_NOEXCEPT
+    {
+      globalPriority = globalPriority_;
+      return *this;
+    }
+
+    operator VkDeviceQueueGlobalPriorityCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDeviceQueueGlobalPriorityCreateInfoEXT*>( this );
+    }
+
+    operator VkDeviceQueueGlobalPriorityCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDeviceQueueGlobalPriorityCreateInfoEXT*>( this );
+    }
+
+    bool operator==( DeviceQueueGlobalPriorityCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( globalPriority == rhs.globalPriority );
+    }
+
+    bool operator!=( DeviceQueueGlobalPriorityCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceQueueGlobalPriorityCreateInfoEXT;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT globalPriority;
+  };
+  static_assert( sizeof( DeviceQueueGlobalPriorityCreateInfoEXT ) == sizeof( VkDeviceQueueGlobalPriorityCreateInfoEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DeviceQueueGlobalPriorityCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+
+  struct DeviceQueueInfo2
+  {
+    VULKAN_HPP_CONSTEXPR DeviceQueueInfo2( VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_ = VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags(),
+                                           uint32_t queueFamilyIndex_ = 0,
+                                           uint32_t queueIndex_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : flags( flags_ )
+      , queueFamilyIndex( queueFamilyIndex_ )
+      , queueIndex( queueIndex_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 & operator=( VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 ) - offsetof( DeviceQueueInfo2, pNext ) );
+      return *this;
+    }
+
+    DeviceQueueInfo2( VkDeviceQueueInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    DeviceQueueInfo2& operator=( VkDeviceQueueInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 const *>(&rhs);
+      return *this;
+    }
+
+    DeviceQueueInfo2 & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    DeviceQueueInfo2 & setFlags( VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    DeviceQueueInfo2 & setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
+    {
+      queueFamilyIndex = queueFamilyIndex_;
+      return *this;
+    }
+
+    DeviceQueueInfo2 & setQueueIndex( uint32_t queueIndex_ ) VULKAN_HPP_NOEXCEPT
+    {
+      queueIndex = queueIndex_;
+      return *this;
+    }
+
+    operator VkDeviceQueueInfo2 const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDeviceQueueInfo2*>( this );
+    }
+
+    operator VkDeviceQueueInfo2 &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDeviceQueueInfo2*>( this );
+    }
+
+    bool operator==( DeviceQueueInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( queueFamilyIndex == rhs.queueFamilyIndex )
+          && ( queueIndex == rhs.queueIndex );
+    }
+
+    bool operator!=( DeviceQueueInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceQueueInfo2;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags;
+    uint32_t queueFamilyIndex;
+    uint32_t queueIndex;
+  };
+  static_assert( sizeof( DeviceQueueInfo2 ) == sizeof( VkDeviceQueueInfo2 ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DeviceQueueInfo2>::value, "struct wrapper is not a standard layout!" );
+
+  struct DispatchIndirectCommand
+  {
+    VULKAN_HPP_CONSTEXPR DispatchIndirectCommand( uint32_t x_ = 0,
+                                                  uint32_t y_ = 0,
+                                                  uint32_t z_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : x( x_ )
+      , y( y_ )
+      , z( z_ )
+    {}
+
+    DispatchIndirectCommand( VkDispatchIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    DispatchIndirectCommand& operator=( VkDispatchIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DispatchIndirectCommand const *>(&rhs);
+      return *this;
+    }
+
+    DispatchIndirectCommand & setX( uint32_t x_ ) VULKAN_HPP_NOEXCEPT
+    {
+      x = x_;
+      return *this;
+    }
+
+    DispatchIndirectCommand & setY( uint32_t y_ ) VULKAN_HPP_NOEXCEPT
+    {
+      y = y_;
+      return *this;
+    }
+
+    DispatchIndirectCommand & setZ( uint32_t z_ ) VULKAN_HPP_NOEXCEPT
+    {
+      z = z_;
+      return *this;
+    }
+
+    operator VkDispatchIndirectCommand const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDispatchIndirectCommand*>( this );
+    }
+
+    operator VkDispatchIndirectCommand &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDispatchIndirectCommand*>( this );
+    }
+
+    bool operator==( DispatchIndirectCommand const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( x == rhs.x )
+          && ( y == rhs.y )
+          && ( z == rhs.z );
+    }
+
+    bool operator!=( DispatchIndirectCommand const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    uint32_t x;
+    uint32_t y;
+    uint32_t z;
+  };
+  static_assert( sizeof( DispatchIndirectCommand ) == sizeof( VkDispatchIndirectCommand ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DispatchIndirectCommand>::value, "struct wrapper is not a standard layout!" );
+
+  struct DisplayEventInfoEXT
+  {
+    VULKAN_HPP_CONSTEXPR DisplayEventInfoEXT( VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT displayEvent_ = VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT::eFirstPixelOut ) VULKAN_HPP_NOEXCEPT
+      : displayEvent( displayEvent_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT & operator=( VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT ) - offsetof( DisplayEventInfoEXT, pNext ) );
+      return *this;
+    }
+
+    DisplayEventInfoEXT( VkDisplayEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    DisplayEventInfoEXT& operator=( VkDisplayEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT const *>(&rhs);
+      return *this;
+    }
+
+    DisplayEventInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    DisplayEventInfoEXT & setDisplayEvent( VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT displayEvent_ ) VULKAN_HPP_NOEXCEPT
+    {
+      displayEvent = displayEvent_;
+      return *this;
+    }
+
+    operator VkDisplayEventInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDisplayEventInfoEXT*>( this );
+    }
+
+    operator VkDisplayEventInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDisplayEventInfoEXT*>( this );
+    }
+
+    bool operator==( DisplayEventInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( displayEvent == rhs.displayEvent );
+    }
+
+    bool operator!=( DisplayEventInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayEventInfoEXT;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT displayEvent;
+  };
+  static_assert( sizeof( DisplayEventInfoEXT ) == sizeof( VkDisplayEventInfoEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DisplayEventInfoEXT>::value, "struct wrapper is not a standard layout!" );
+
+  struct DisplayModeParametersKHR
+  {
+    VULKAN_HPP_CONSTEXPR DisplayModeParametersKHR( VULKAN_HPP_NAMESPACE::Extent2D visibleRegion_ = VULKAN_HPP_NAMESPACE::Extent2D(),
+                                                   uint32_t refreshRate_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : visibleRegion( visibleRegion_ )
+      , refreshRate( refreshRate_ )
+    {}
+
+    DisplayModeParametersKHR( VkDisplayModeParametersKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    DisplayModeParametersKHR& operator=( VkDisplayModeParametersKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR const *>(&rhs);
+      return *this;
+    }
+
+    DisplayModeParametersKHR & setVisibleRegion( VULKAN_HPP_NAMESPACE::Extent2D visibleRegion_ ) VULKAN_HPP_NOEXCEPT
+    {
+      visibleRegion = visibleRegion_;
+      return *this;
+    }
+
+    DisplayModeParametersKHR & setRefreshRate( uint32_t refreshRate_ ) VULKAN_HPP_NOEXCEPT
+    {
+      refreshRate = refreshRate_;
+      return *this;
+    }
+
+    operator VkDisplayModeParametersKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDisplayModeParametersKHR*>( this );
+    }
+
+    operator VkDisplayModeParametersKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDisplayModeParametersKHR*>( this );
+    }
+
+    bool operator==( DisplayModeParametersKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( visibleRegion == rhs.visibleRegion )
+          && ( refreshRate == rhs.refreshRate );
+    }
+
+    bool operator!=( DisplayModeParametersKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    VULKAN_HPP_NAMESPACE::Extent2D visibleRegion;
+    uint32_t refreshRate;
+  };
+  static_assert( sizeof( DisplayModeParametersKHR ) == sizeof( VkDisplayModeParametersKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DisplayModeParametersKHR>::value, "struct wrapper is not a standard layout!" );
+
+  struct DisplayModeCreateInfoKHR
+  {
+    VULKAN_HPP_CONSTEXPR DisplayModeCreateInfoKHR( VULKAN_HPP_NAMESPACE::DisplayModeCreateFlagsKHR flags_ = VULKAN_HPP_NAMESPACE::DisplayModeCreateFlagsKHR(),
+                                                   VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR parameters_ = VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR() ) VULKAN_HPP_NOEXCEPT
+      : flags( flags_ )
+      , parameters( parameters_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR & operator=( VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR ) - offsetof( DisplayModeCreateInfoKHR, pNext ) );
+      return *this;
+    }
+
+    DisplayModeCreateInfoKHR( VkDisplayModeCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    DisplayModeCreateInfoKHR& operator=( VkDisplayModeCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR const *>(&rhs);
+      return *this;
+    }
+
+    DisplayModeCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    DisplayModeCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::DisplayModeCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    DisplayModeCreateInfoKHR & setParameters( VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR parameters_ ) VULKAN_HPP_NOEXCEPT
+    {
+      parameters = parameters_;
+      return *this;
+    }
+
+    operator VkDisplayModeCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDisplayModeCreateInfoKHR*>( this );
+    }
+
+    operator VkDisplayModeCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDisplayModeCreateInfoKHR*>( this );
+    }
+
+    bool operator==( DisplayModeCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( parameters == rhs.parameters );
+    }
+
+    bool operator!=( DisplayModeCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayModeCreateInfoKHR;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::DisplayModeCreateFlagsKHR flags;
+    VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR parameters;
+  };
+  static_assert( sizeof( DisplayModeCreateInfoKHR ) == sizeof( VkDisplayModeCreateInfoKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DisplayModeCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
+
+  struct DisplayModePropertiesKHR
+  {
+    DisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode_ = VULKAN_HPP_NAMESPACE::DisplayModeKHR(),
+                              VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR parameters_ = VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR() ) VULKAN_HPP_NOEXCEPT
+      : displayMode( displayMode_ )
+      , parameters( parameters_ )
+    {}
+
+    DisplayModePropertiesKHR( VkDisplayModePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    DisplayModePropertiesKHR& operator=( VkDisplayModePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR const *>(&rhs);
+      return *this;
+    }
+
+    operator VkDisplayModePropertiesKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDisplayModePropertiesKHR*>( this );
+    }
+
+    operator VkDisplayModePropertiesKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDisplayModePropertiesKHR*>( this );
+    }
+
+    bool operator==( DisplayModePropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( displayMode == rhs.displayMode )
+          && ( parameters == rhs.parameters );
+    }
+
+    bool operator!=( DisplayModePropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode;
+    VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR parameters;
+  };
+  static_assert( sizeof( DisplayModePropertiesKHR ) == sizeof( VkDisplayModePropertiesKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DisplayModePropertiesKHR>::value, "struct wrapper is not a standard layout!" );
+
+  struct DisplayModeProperties2KHR
+  {
+    DisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR displayModeProperties_ = VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR() ) VULKAN_HPP_NOEXCEPT
+      : displayModeProperties( displayModeProperties_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR & operator=( VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR ) - offsetof( DisplayModeProperties2KHR, pNext ) );
+      return *this;
+    }
+
+    DisplayModeProperties2KHR( VkDisplayModeProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    DisplayModeProperties2KHR& operator=( VkDisplayModeProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR const *>(&rhs);
+      return *this;
+    }
+
+    operator VkDisplayModeProperties2KHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDisplayModeProperties2KHR*>( this );
+    }
+
+    operator VkDisplayModeProperties2KHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDisplayModeProperties2KHR*>( this );
+    }
+
+    bool operator==( DisplayModeProperties2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( displayModeProperties == rhs.displayModeProperties );
+    }
+
+    bool operator!=( DisplayModeProperties2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayModeProperties2KHR;
+    void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR displayModeProperties;
+  };
+  static_assert( sizeof( DisplayModeProperties2KHR ) == sizeof( VkDisplayModeProperties2KHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DisplayModeProperties2KHR>::value, "struct wrapper is not a standard layout!" );
+
+  struct DisplayNativeHdrSurfaceCapabilitiesAMD
+  {
+    DisplayNativeHdrSurfaceCapabilitiesAMD( VULKAN_HPP_NAMESPACE::Bool32 localDimmingSupport_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : localDimmingSupport( localDimmingSupport_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::DisplayNativeHdrSurfaceCapabilitiesAMD & operator=( VULKAN_HPP_NAMESPACE::DisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DisplayNativeHdrSurfaceCapabilitiesAMD ) - offsetof( DisplayNativeHdrSurfaceCapabilitiesAMD, pNext ) );
+      return *this;
+    }
+
+    DisplayNativeHdrSurfaceCapabilitiesAMD( VkDisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    DisplayNativeHdrSurfaceCapabilitiesAMD& operator=( VkDisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayNativeHdrSurfaceCapabilitiesAMD const *>(&rhs);
+      return *this;
+    }
+
+    operator VkDisplayNativeHdrSurfaceCapabilitiesAMD const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDisplayNativeHdrSurfaceCapabilitiesAMD*>( this );
+    }
+
+    operator VkDisplayNativeHdrSurfaceCapabilitiesAMD &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDisplayNativeHdrSurfaceCapabilitiesAMD*>( this );
+    }
+
+    bool operator==( DisplayNativeHdrSurfaceCapabilitiesAMD const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( localDimmingSupport == rhs.localDimmingSupport );
+    }
+
+    bool operator!=( DisplayNativeHdrSurfaceCapabilitiesAMD const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayNativeHdrSurfaceCapabilitiesAMD;
+    void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::Bool32 localDimmingSupport;
+  };
+  static_assert( sizeof( DisplayNativeHdrSurfaceCapabilitiesAMD ) == sizeof( VkDisplayNativeHdrSurfaceCapabilitiesAMD ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DisplayNativeHdrSurfaceCapabilitiesAMD>::value, "struct wrapper is not a standard layout!" );
+
+  struct DisplayPlaneCapabilitiesKHR
+  {
+    DisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagsKHR supportedAlpha_ = VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagsKHR(),
+                                 VULKAN_HPP_NAMESPACE::Offset2D minSrcPosition_ = VULKAN_HPP_NAMESPACE::Offset2D(),
+                                 VULKAN_HPP_NAMESPACE::Offset2D maxSrcPosition_ = VULKAN_HPP_NAMESPACE::Offset2D(),
+                                 VULKAN_HPP_NAMESPACE::Extent2D minSrcExtent_ = VULKAN_HPP_NAMESPACE::Extent2D(),
+                                 VULKAN_HPP_NAMESPACE::Extent2D maxSrcExtent_ = VULKAN_HPP_NAMESPACE::Extent2D(),
+                                 VULKAN_HPP_NAMESPACE::Offset2D minDstPosition_ = VULKAN_HPP_NAMESPACE::Offset2D(),
+                                 VULKAN_HPP_NAMESPACE::Offset2D maxDstPosition_ = VULKAN_HPP_NAMESPACE::Offset2D(),
+                                 VULKAN_HPP_NAMESPACE::Extent2D minDstExtent_ = VULKAN_HPP_NAMESPACE::Extent2D(),
+                                 VULKAN_HPP_NAMESPACE::Extent2D maxDstExtent_ = VULKAN_HPP_NAMESPACE::Extent2D() ) VULKAN_HPP_NOEXCEPT
+      : supportedAlpha( supportedAlpha_ )
+      , minSrcPosition( minSrcPosition_ )
+      , maxSrcPosition( maxSrcPosition_ )
+      , minSrcExtent( minSrcExtent_ )
+      , maxSrcExtent( maxSrcExtent_ )
+      , minDstPosition( minDstPosition_ )
+      , maxDstPosition( maxDstPosition_ )
+      , minDstExtent( minDstExtent_ )
+      , maxDstExtent( maxDstExtent_ )
+    {}
+
+    DisplayPlaneCapabilitiesKHR( VkDisplayPlaneCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    DisplayPlaneCapabilitiesKHR& operator=( VkDisplayPlaneCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR const *>(&rhs);
+      return *this;
+    }
+
+    operator VkDisplayPlaneCapabilitiesKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDisplayPlaneCapabilitiesKHR*>( this );
+    }
+
+    operator VkDisplayPlaneCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDisplayPlaneCapabilitiesKHR*>( this );
+    }
+
+    bool operator==( DisplayPlaneCapabilitiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( supportedAlpha == rhs.supportedAlpha )
+          && ( minSrcPosition == rhs.minSrcPosition )
+          && ( maxSrcPosition == rhs.maxSrcPosition )
+          && ( minSrcExtent == rhs.minSrcExtent )
+          && ( maxSrcExtent == rhs.maxSrcExtent )
+          && ( minDstPosition == rhs.minDstPosition )
+          && ( maxDstPosition == rhs.maxDstPosition )
+          && ( minDstExtent == rhs.minDstExtent )
+          && ( maxDstExtent == rhs.maxDstExtent );
+    }
+
+    bool operator!=( DisplayPlaneCapabilitiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagsKHR supportedAlpha;
+    VULKAN_HPP_NAMESPACE::Offset2D minSrcPosition;
+    VULKAN_HPP_NAMESPACE::Offset2D maxSrcPosition;
+    VULKAN_HPP_NAMESPACE::Extent2D minSrcExtent;
+    VULKAN_HPP_NAMESPACE::Extent2D maxSrcExtent;
+    VULKAN_HPP_NAMESPACE::Offset2D minDstPosition;
+    VULKAN_HPP_NAMESPACE::Offset2D maxDstPosition;
+    VULKAN_HPP_NAMESPACE::Extent2D minDstExtent;
+    VULKAN_HPP_NAMESPACE::Extent2D maxDstExtent;
+  };
+  static_assert( sizeof( DisplayPlaneCapabilitiesKHR ) == sizeof( VkDisplayPlaneCapabilitiesKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DisplayPlaneCapabilitiesKHR>::value, "struct wrapper is not a standard layout!" );
+
+  struct DisplayPlaneCapabilities2KHR
+  {
+    DisplayPlaneCapabilities2KHR( VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR capabilities_ = VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR() ) VULKAN_HPP_NOEXCEPT
+      : capabilities( capabilities_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR & operator=( VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR ) - offsetof( DisplayPlaneCapabilities2KHR, pNext ) );
+      return *this;
+    }
+
+    DisplayPlaneCapabilities2KHR( VkDisplayPlaneCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    DisplayPlaneCapabilities2KHR& operator=( VkDisplayPlaneCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR const *>(&rhs);
+      return *this;
+    }
+
+    operator VkDisplayPlaneCapabilities2KHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDisplayPlaneCapabilities2KHR*>( this );
+    }
+
+    operator VkDisplayPlaneCapabilities2KHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDisplayPlaneCapabilities2KHR*>( this );
+    }
+
+    bool operator==( DisplayPlaneCapabilities2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( capabilities == rhs.capabilities );
+    }
+
+    bool operator!=( DisplayPlaneCapabilities2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPlaneCapabilities2KHR;
+    void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR capabilities;
+  };
+  static_assert( sizeof( DisplayPlaneCapabilities2KHR ) == sizeof( VkDisplayPlaneCapabilities2KHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DisplayPlaneCapabilities2KHR>::value, "struct wrapper is not a standard layout!" );
+
+  struct DisplayPlaneInfo2KHR
+  {
+    VULKAN_HPP_CONSTEXPR DisplayPlaneInfo2KHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode_ = VULKAN_HPP_NAMESPACE::DisplayModeKHR(),
+                                               uint32_t planeIndex_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : mode( mode_ )
+      , planeIndex( planeIndex_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR & operator=( VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR ) - offsetof( DisplayPlaneInfo2KHR, pNext ) );
+      return *this;
+    }
+
+    DisplayPlaneInfo2KHR( VkDisplayPlaneInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    DisplayPlaneInfo2KHR& operator=( VkDisplayPlaneInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR const *>(&rhs);
+      return *this;
+    }
+
+    DisplayPlaneInfo2KHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    DisplayPlaneInfo2KHR & setMode( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode_ ) VULKAN_HPP_NOEXCEPT
+    {
+      mode = mode_;
+      return *this;
+    }
+
+    DisplayPlaneInfo2KHR & setPlaneIndex( uint32_t planeIndex_ ) VULKAN_HPP_NOEXCEPT
+    {
+      planeIndex = planeIndex_;
+      return *this;
+    }
+
+    operator VkDisplayPlaneInfo2KHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDisplayPlaneInfo2KHR*>( this );
+    }
+
+    operator VkDisplayPlaneInfo2KHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDisplayPlaneInfo2KHR*>( this );
+    }
+
+    bool operator==( DisplayPlaneInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( mode == rhs.mode )
+          && ( planeIndex == rhs.planeIndex );
+    }
+
+    bool operator!=( DisplayPlaneInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPlaneInfo2KHR;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::DisplayModeKHR mode;
+    uint32_t planeIndex;
+  };
+  static_assert( sizeof( DisplayPlaneInfo2KHR ) == sizeof( VkDisplayPlaneInfo2KHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DisplayPlaneInfo2KHR>::value, "struct wrapper is not a standard layout!" );
+
+  struct DisplayPlanePropertiesKHR
+  {
+    DisplayPlanePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR currentDisplay_ = VULKAN_HPP_NAMESPACE::DisplayKHR(),
+                               uint32_t currentStackIndex_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : currentDisplay( currentDisplay_ )
+      , currentStackIndex( currentStackIndex_ )
+    {}
+
+    DisplayPlanePropertiesKHR( VkDisplayPlanePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    DisplayPlanePropertiesKHR& operator=( VkDisplayPlanePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR const *>(&rhs);
+      return *this;
+    }
+
+    operator VkDisplayPlanePropertiesKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDisplayPlanePropertiesKHR*>( this );
+    }
+
+    operator VkDisplayPlanePropertiesKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDisplayPlanePropertiesKHR*>( this );
+    }
+
+    bool operator==( DisplayPlanePropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( currentDisplay == rhs.currentDisplay )
+          && ( currentStackIndex == rhs.currentStackIndex );
+    }
+
+    bool operator!=( DisplayPlanePropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    VULKAN_HPP_NAMESPACE::DisplayKHR currentDisplay;
+    uint32_t currentStackIndex;
+  };
+  static_assert( sizeof( DisplayPlanePropertiesKHR ) == sizeof( VkDisplayPlanePropertiesKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DisplayPlanePropertiesKHR>::value, "struct wrapper is not a standard layout!" );
+
+  struct DisplayPlaneProperties2KHR
+  {
+    DisplayPlaneProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR displayPlaneProperties_ = VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR() ) VULKAN_HPP_NOEXCEPT
+      : displayPlaneProperties( displayPlaneProperties_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR & operator=( VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR ) - offsetof( DisplayPlaneProperties2KHR, pNext ) );
+      return *this;
+    }
+
+    DisplayPlaneProperties2KHR( VkDisplayPlaneProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    DisplayPlaneProperties2KHR& operator=( VkDisplayPlaneProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR const *>(&rhs);
+      return *this;
+    }
+
+    operator VkDisplayPlaneProperties2KHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDisplayPlaneProperties2KHR*>( this );
+    }
+
+    operator VkDisplayPlaneProperties2KHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDisplayPlaneProperties2KHR*>( this );
+    }
+
+    bool operator==( DisplayPlaneProperties2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( displayPlaneProperties == rhs.displayPlaneProperties );
+    }
+
+    bool operator!=( DisplayPlaneProperties2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPlaneProperties2KHR;
+    void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR displayPlaneProperties;
+  };
+  static_assert( sizeof( DisplayPlaneProperties2KHR ) == sizeof( VkDisplayPlaneProperties2KHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DisplayPlaneProperties2KHR>::value, "struct wrapper is not a standard layout!" );
+
+  struct DisplayPowerInfoEXT
+  {
+    VULKAN_HPP_CONSTEXPR DisplayPowerInfoEXT( VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT powerState_ = VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT::eOff ) VULKAN_HPP_NOEXCEPT
+      : powerState( powerState_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT & operator=( VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT ) - offsetof( DisplayPowerInfoEXT, pNext ) );
+      return *this;
+    }
+
+    DisplayPowerInfoEXT( VkDisplayPowerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    DisplayPowerInfoEXT& operator=( VkDisplayPowerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT const *>(&rhs);
+      return *this;
+    }
+
+    DisplayPowerInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    DisplayPowerInfoEXT & setPowerState( VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT powerState_ ) VULKAN_HPP_NOEXCEPT
+    {
+      powerState = powerState_;
+      return *this;
+    }
+
+    operator VkDisplayPowerInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDisplayPowerInfoEXT*>( this );
+    }
+
+    operator VkDisplayPowerInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDisplayPowerInfoEXT*>( this );
+    }
+
+    bool operator==( DisplayPowerInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( powerState == rhs.powerState );
+    }
+
+    bool operator!=( DisplayPowerInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPowerInfoEXT;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT powerState;
+  };
+  static_assert( sizeof( DisplayPowerInfoEXT ) == sizeof( VkDisplayPowerInfoEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DisplayPowerInfoEXT>::value, "struct wrapper is not a standard layout!" );
+
+  struct DisplayPresentInfoKHR
+  {
+    VULKAN_HPP_CONSTEXPR DisplayPresentInfoKHR( VULKAN_HPP_NAMESPACE::Rect2D srcRect_ = VULKAN_HPP_NAMESPACE::Rect2D(),
+                                                VULKAN_HPP_NAMESPACE::Rect2D dstRect_ = VULKAN_HPP_NAMESPACE::Rect2D(),
+                                                VULKAN_HPP_NAMESPACE::Bool32 persistent_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : srcRect( srcRect_ )
+      , dstRect( dstRect_ )
+      , persistent( persistent_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::DisplayPresentInfoKHR & operator=( VULKAN_HPP_NAMESPACE::DisplayPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DisplayPresentInfoKHR ) - offsetof( DisplayPresentInfoKHR, pNext ) );
+      return *this;
+    }
+
+    DisplayPresentInfoKHR( VkDisplayPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    DisplayPresentInfoKHR& operator=( VkDisplayPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayPresentInfoKHR const *>(&rhs);
+      return *this;
+    }
+
+    DisplayPresentInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    DisplayPresentInfoKHR & setSrcRect( VULKAN_HPP_NAMESPACE::Rect2D srcRect_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcRect = srcRect_;
+      return *this;
+    }
+
+    DisplayPresentInfoKHR & setDstRect( VULKAN_HPP_NAMESPACE::Rect2D dstRect_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstRect = dstRect_;
+      return *this;
+    }
+
+    DisplayPresentInfoKHR & setPersistent( VULKAN_HPP_NAMESPACE::Bool32 persistent_ ) VULKAN_HPP_NOEXCEPT
+    {
+      persistent = persistent_;
+      return *this;
+    }
+
+    operator VkDisplayPresentInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDisplayPresentInfoKHR*>( this );
+    }
+
+    operator VkDisplayPresentInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDisplayPresentInfoKHR*>( this );
+    }
+
+    bool operator==( DisplayPresentInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( srcRect == rhs.srcRect )
+          && ( dstRect == rhs.dstRect )
+          && ( persistent == rhs.persistent );
+    }
+
+    bool operator!=( DisplayPresentInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPresentInfoKHR;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::Rect2D srcRect;
+    VULKAN_HPP_NAMESPACE::Rect2D dstRect;
+    VULKAN_HPP_NAMESPACE::Bool32 persistent;
+  };
+  static_assert( sizeof( DisplayPresentInfoKHR ) == sizeof( VkDisplayPresentInfoKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DisplayPresentInfoKHR>::value, "struct wrapper is not a standard layout!" );
+
+  struct DisplayPropertiesKHR
+  {
+    DisplayPropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display_ = VULKAN_HPP_NAMESPACE::DisplayKHR(),
+                          const char* displayName_ = nullptr,
+                          VULKAN_HPP_NAMESPACE::Extent2D physicalDimensions_ = VULKAN_HPP_NAMESPACE::Extent2D(),
+                          VULKAN_HPP_NAMESPACE::Extent2D physicalResolution_ = VULKAN_HPP_NAMESPACE::Extent2D(),
+                          VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR(),
+                          VULKAN_HPP_NAMESPACE::Bool32 planeReorderPossible_ = 0,
+                          VULKAN_HPP_NAMESPACE::Bool32 persistentContent_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : display( display_ )
+      , displayName( displayName_ )
+      , physicalDimensions( physicalDimensions_ )
+      , physicalResolution( physicalResolution_ )
+      , supportedTransforms( supportedTransforms_ )
+      , planeReorderPossible( planeReorderPossible_ )
+      , persistentContent( persistentContent_ )
+    {}
+
+    DisplayPropertiesKHR( VkDisplayPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    DisplayPropertiesKHR& operator=( VkDisplayPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR const *>(&rhs);
+      return *this;
+    }
+
+    operator VkDisplayPropertiesKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDisplayPropertiesKHR*>( this );
+    }
+
+    operator VkDisplayPropertiesKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDisplayPropertiesKHR*>( this );
+    }
+
+    bool operator==( DisplayPropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( display == rhs.display )
+          && ( displayName == rhs.displayName )
+          && ( physicalDimensions == rhs.physicalDimensions )
+          && ( physicalResolution == rhs.physicalResolution )
+          && ( supportedTransforms == rhs.supportedTransforms )
+          && ( planeReorderPossible == rhs.planeReorderPossible )
+          && ( persistentContent == rhs.persistentContent );
+    }
+
+    bool operator!=( DisplayPropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    VULKAN_HPP_NAMESPACE::DisplayKHR display;
+    const char* displayName;
+    VULKAN_HPP_NAMESPACE::Extent2D physicalDimensions;
+    VULKAN_HPP_NAMESPACE::Extent2D physicalResolution;
+    VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms;
+    VULKAN_HPP_NAMESPACE::Bool32 planeReorderPossible;
+    VULKAN_HPP_NAMESPACE::Bool32 persistentContent;
+  };
+  static_assert( sizeof( DisplayPropertiesKHR ) == sizeof( VkDisplayPropertiesKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DisplayPropertiesKHR>::value, "struct wrapper is not a standard layout!" );
+
+  struct DisplayProperties2KHR
+  {
+    DisplayProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR displayProperties_ = VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR() ) VULKAN_HPP_NOEXCEPT
+      : displayProperties( displayProperties_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::DisplayProperties2KHR & operator=( VULKAN_HPP_NAMESPACE::DisplayProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DisplayProperties2KHR ) - offsetof( DisplayProperties2KHR, pNext ) );
+      return *this;
+    }
+
+    DisplayProperties2KHR( VkDisplayProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    DisplayProperties2KHR& operator=( VkDisplayProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayProperties2KHR const *>(&rhs);
+      return *this;
+    }
+
+    operator VkDisplayProperties2KHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDisplayProperties2KHR*>( this );
+    }
+
+    operator VkDisplayProperties2KHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDisplayProperties2KHR*>( this );
+    }
+
+    bool operator==( DisplayProperties2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( displayProperties == rhs.displayProperties );
+    }
+
+    bool operator!=( DisplayProperties2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayProperties2KHR;
+    void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR displayProperties;
+  };
+  static_assert( sizeof( DisplayProperties2KHR ) == sizeof( VkDisplayProperties2KHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DisplayProperties2KHR>::value, "struct wrapper is not a standard layout!" );
+
+  struct DisplaySurfaceCreateInfoKHR
+  {
+    VULKAN_HPP_CONSTEXPR DisplaySurfaceCreateInfoKHR( VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateFlagsKHR flags_ = VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateFlagsKHR(),
+                                                      VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode_ = VULKAN_HPP_NAMESPACE::DisplayModeKHR(),
+                                                      uint32_t planeIndex_ = 0,
+                                                      uint32_t planeStackIndex_ = 0,
+                                                      VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity,
+                                                      float globalAlpha_ = 0,
+                                                      VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR alphaMode_ = VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR::eOpaque,
+                                                      VULKAN_HPP_NAMESPACE::Extent2D imageExtent_ = VULKAN_HPP_NAMESPACE::Extent2D() ) VULKAN_HPP_NOEXCEPT
+      : flags( flags_ )
+      , displayMode( displayMode_ )
+      , planeIndex( planeIndex_ )
+      , planeStackIndex( planeStackIndex_ )
+      , transform( transform_ )
+      , globalAlpha( globalAlpha_ )
+      , alphaMode( alphaMode_ )
+      , imageExtent( imageExtent_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR & operator=( VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR ) - offsetof( DisplaySurfaceCreateInfoKHR, pNext ) );
+      return *this;
+    }
+
+    DisplaySurfaceCreateInfoKHR( VkDisplaySurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    DisplaySurfaceCreateInfoKHR& operator=( VkDisplaySurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR const *>(&rhs);
+      return *this;
+    }
+
+    DisplaySurfaceCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    DisplaySurfaceCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    DisplaySurfaceCreateInfoKHR & setDisplayMode( VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode_ ) VULKAN_HPP_NOEXCEPT
+    {
+      displayMode = displayMode_;
+      return *this;
+    }
+
+    DisplaySurfaceCreateInfoKHR & setPlaneIndex( uint32_t planeIndex_ ) VULKAN_HPP_NOEXCEPT
+    {
+      planeIndex = planeIndex_;
+      return *this;
+    }
+
+    DisplaySurfaceCreateInfoKHR & setPlaneStackIndex( uint32_t planeStackIndex_ ) VULKAN_HPP_NOEXCEPT
+    {
+      planeStackIndex = planeStackIndex_;
+      return *this;
+    }
+
+    DisplaySurfaceCreateInfoKHR & setTransform( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ ) VULKAN_HPP_NOEXCEPT
+    {
+      transform = transform_;
+      return *this;
+    }
+
+    DisplaySurfaceCreateInfoKHR & setGlobalAlpha( float globalAlpha_ ) VULKAN_HPP_NOEXCEPT
+    {
+      globalAlpha = globalAlpha_;
+      return *this;
+    }
+
+    DisplaySurfaceCreateInfoKHR & setAlphaMode( VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR alphaMode_ ) VULKAN_HPP_NOEXCEPT
+    {
+      alphaMode = alphaMode_;
+      return *this;
+    }
+
+    DisplaySurfaceCreateInfoKHR & setImageExtent( VULKAN_HPP_NAMESPACE::Extent2D imageExtent_ ) VULKAN_HPP_NOEXCEPT
+    {
+      imageExtent = imageExtent_;
+      return *this;
+    }
+
+    operator VkDisplaySurfaceCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR*>( this );
+    }
+
+    operator VkDisplaySurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDisplaySurfaceCreateInfoKHR*>( this );
+    }
+
+    bool operator==( DisplaySurfaceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( displayMode == rhs.displayMode )
+          && ( planeIndex == rhs.planeIndex )
+          && ( planeStackIndex == rhs.planeStackIndex )
+          && ( transform == rhs.transform )
+          && ( globalAlpha == rhs.globalAlpha )
+          && ( alphaMode == rhs.alphaMode )
+          && ( imageExtent == rhs.imageExtent );
+    }
+
+    bool operator!=( DisplaySurfaceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplaySurfaceCreateInfoKHR;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateFlagsKHR flags;
+    VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode;
+    uint32_t planeIndex;
+    uint32_t planeStackIndex;
+    VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform;
+    float globalAlpha;
+    VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR alphaMode;
+    VULKAN_HPP_NAMESPACE::Extent2D imageExtent;
+  };
+  static_assert( sizeof( DisplaySurfaceCreateInfoKHR ) == sizeof( VkDisplaySurfaceCreateInfoKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DisplaySurfaceCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
+
+  struct DrawIndexedIndirectCommand
+  {
+    VULKAN_HPP_CONSTEXPR DrawIndexedIndirectCommand( uint32_t indexCount_ = 0,
+                                                     uint32_t instanceCount_ = 0,
+                                                     uint32_t firstIndex_ = 0,
+                                                     int32_t vertexOffset_ = 0,
+                                                     uint32_t firstInstance_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : indexCount( indexCount_ )
+      , instanceCount( instanceCount_ )
+      , firstIndex( firstIndex_ )
+      , vertexOffset( vertexOffset_ )
+      , firstInstance( firstInstance_ )
+    {}
+
+    DrawIndexedIndirectCommand( VkDrawIndexedIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    DrawIndexedIndirectCommand& operator=( VkDrawIndexedIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DrawIndexedIndirectCommand const *>(&rhs);
+      return *this;
+    }
+
+    DrawIndexedIndirectCommand & setIndexCount( uint32_t indexCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      indexCount = indexCount_;
+      return *this;
+    }
+
+    DrawIndexedIndirectCommand & setInstanceCount( uint32_t instanceCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      instanceCount = instanceCount_;
+      return *this;
+    }
+
+    DrawIndexedIndirectCommand & setFirstIndex( uint32_t firstIndex_ ) VULKAN_HPP_NOEXCEPT
+    {
+      firstIndex = firstIndex_;
+      return *this;
+    }
+
+    DrawIndexedIndirectCommand & setVertexOffset( int32_t vertexOffset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      vertexOffset = vertexOffset_;
+      return *this;
+    }
+
+    DrawIndexedIndirectCommand & setFirstInstance( uint32_t firstInstance_ ) VULKAN_HPP_NOEXCEPT
+    {
+      firstInstance = firstInstance_;
+      return *this;
+    }
+
+    operator VkDrawIndexedIndirectCommand const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDrawIndexedIndirectCommand*>( this );
+    }
+
+    operator VkDrawIndexedIndirectCommand &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDrawIndexedIndirectCommand*>( this );
+    }
+
+    bool operator==( DrawIndexedIndirectCommand const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( indexCount == rhs.indexCount )
+          && ( instanceCount == rhs.instanceCount )
+          && ( firstIndex == rhs.firstIndex )
+          && ( vertexOffset == rhs.vertexOffset )
+          && ( firstInstance == rhs.firstInstance );
+    }
+
+    bool operator!=( DrawIndexedIndirectCommand const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    uint32_t indexCount;
+    uint32_t instanceCount;
+    uint32_t firstIndex;
+    int32_t vertexOffset;
+    uint32_t firstInstance;
+  };
+  static_assert( sizeof( DrawIndexedIndirectCommand ) == sizeof( VkDrawIndexedIndirectCommand ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DrawIndexedIndirectCommand>::value, "struct wrapper is not a standard layout!" );
+
+  struct DrawIndirectCommand
+  {
+    VULKAN_HPP_CONSTEXPR DrawIndirectCommand( uint32_t vertexCount_ = 0,
+                                              uint32_t instanceCount_ = 0,
+                                              uint32_t firstVertex_ = 0,
+                                              uint32_t firstInstance_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : vertexCount( vertexCount_ )
+      , instanceCount( instanceCount_ )
+      , firstVertex( firstVertex_ )
+      , firstInstance( firstInstance_ )
+    {}
+
+    DrawIndirectCommand( VkDrawIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    DrawIndirectCommand& operator=( VkDrawIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DrawIndirectCommand const *>(&rhs);
+      return *this;
+    }
+
+    DrawIndirectCommand & setVertexCount( uint32_t vertexCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      vertexCount = vertexCount_;
+      return *this;
+    }
+
+    DrawIndirectCommand & setInstanceCount( uint32_t instanceCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      instanceCount = instanceCount_;
+      return *this;
+    }
+
+    DrawIndirectCommand & setFirstVertex( uint32_t firstVertex_ ) VULKAN_HPP_NOEXCEPT
+    {
+      firstVertex = firstVertex_;
+      return *this;
+    }
+
+    DrawIndirectCommand & setFirstInstance( uint32_t firstInstance_ ) VULKAN_HPP_NOEXCEPT
+    {
+      firstInstance = firstInstance_;
+      return *this;
+    }
+
+    operator VkDrawIndirectCommand const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDrawIndirectCommand*>( this );
+    }
+
+    operator VkDrawIndirectCommand &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDrawIndirectCommand*>( this );
+    }
+
+    bool operator==( DrawIndirectCommand const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( vertexCount == rhs.vertexCount )
+          && ( instanceCount == rhs.instanceCount )
+          && ( firstVertex == rhs.firstVertex )
+          && ( firstInstance == rhs.firstInstance );
+    }
+
+    bool operator!=( DrawIndirectCommand const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    uint32_t vertexCount;
+    uint32_t instanceCount;
+    uint32_t firstVertex;
+    uint32_t firstInstance;
+  };
+  static_assert( sizeof( DrawIndirectCommand ) == sizeof( VkDrawIndirectCommand ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DrawIndirectCommand>::value, "struct wrapper is not a standard layout!" );
+
+  struct DrawMeshTasksIndirectCommandNV
+  {
+    VULKAN_HPP_CONSTEXPR DrawMeshTasksIndirectCommandNV( uint32_t taskCount_ = 0,
+                                                         uint32_t firstTask_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : taskCount( taskCount_ )
+      , firstTask( firstTask_ )
+    {}
+
+    DrawMeshTasksIndirectCommandNV( VkDrawMeshTasksIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    DrawMeshTasksIndirectCommandNV& operator=( VkDrawMeshTasksIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DrawMeshTasksIndirectCommandNV const *>(&rhs);
+      return *this;
+    }
+
+    DrawMeshTasksIndirectCommandNV & setTaskCount( uint32_t taskCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      taskCount = taskCount_;
+      return *this;
+    }
+
+    DrawMeshTasksIndirectCommandNV & setFirstTask( uint32_t firstTask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      firstTask = firstTask_;
+      return *this;
+    }
+
+    operator VkDrawMeshTasksIndirectCommandNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDrawMeshTasksIndirectCommandNV*>( this );
+    }
+
+    operator VkDrawMeshTasksIndirectCommandNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDrawMeshTasksIndirectCommandNV*>( this );
+    }
+
+    bool operator==( DrawMeshTasksIndirectCommandNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( taskCount == rhs.taskCount )
+          && ( firstTask == rhs.firstTask );
+    }
+
+    bool operator!=( DrawMeshTasksIndirectCommandNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    uint32_t taskCount;
+    uint32_t firstTask;
+  };
+  static_assert( sizeof( DrawMeshTasksIndirectCommandNV ) == sizeof( VkDrawMeshTasksIndirectCommandNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DrawMeshTasksIndirectCommandNV>::value, "struct wrapper is not a standard layout!" );
+
+  struct DrmFormatModifierPropertiesEXT
+  {
+    DrmFormatModifierPropertiesEXT( uint64_t drmFormatModifier_ = 0,
+                                    uint32_t drmFormatModifierPlaneCount_ = 0,
+                                    VULKAN_HPP_NAMESPACE::FormatFeatureFlags drmFormatModifierTilingFeatures_ = VULKAN_HPP_NAMESPACE::FormatFeatureFlags() ) VULKAN_HPP_NOEXCEPT
+      : drmFormatModifier( drmFormatModifier_ )
+      , drmFormatModifierPlaneCount( drmFormatModifierPlaneCount_ )
+      , drmFormatModifierTilingFeatures( drmFormatModifierTilingFeatures_ )
+    {}
+
+    DrmFormatModifierPropertiesEXT( VkDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    DrmFormatModifierPropertiesEXT& operator=( VkDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesEXT const *>(&rhs);
+      return *this;
+    }
+
+    operator VkDrmFormatModifierPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDrmFormatModifierPropertiesEXT*>( this );
+    }
+
+    operator VkDrmFormatModifierPropertiesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDrmFormatModifierPropertiesEXT*>( this );
+    }
+
+    bool operator==( DrmFormatModifierPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( drmFormatModifier == rhs.drmFormatModifier )
+          && ( drmFormatModifierPlaneCount == rhs.drmFormatModifierPlaneCount )
+          && ( drmFormatModifierTilingFeatures == rhs.drmFormatModifierTilingFeatures );
+    }
+
+    bool operator!=( DrmFormatModifierPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    uint64_t drmFormatModifier;
+    uint32_t drmFormatModifierPlaneCount;
+    VULKAN_HPP_NAMESPACE::FormatFeatureFlags drmFormatModifierTilingFeatures;
+  };
+  static_assert( sizeof( DrmFormatModifierPropertiesEXT ) == sizeof( VkDrmFormatModifierPropertiesEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DrmFormatModifierPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
+
+  struct DrmFormatModifierPropertiesListEXT
+  {
+    DrmFormatModifierPropertiesListEXT( uint32_t drmFormatModifierCount_ = 0,
+                                        VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesEXT* pDrmFormatModifierProperties_ = nullptr ) VULKAN_HPP_NOEXCEPT
+      : drmFormatModifierCount( drmFormatModifierCount_ )
+      , pDrmFormatModifierProperties( pDrmFormatModifierProperties_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesListEXT & operator=( VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesListEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesListEXT ) - offsetof( DrmFormatModifierPropertiesListEXT, pNext ) );
+      return *this;
+    }
+
+    DrmFormatModifierPropertiesListEXT( VkDrmFormatModifierPropertiesListEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    DrmFormatModifierPropertiesListEXT& operator=( VkDrmFormatModifierPropertiesListEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesListEXT const *>(&rhs);
+      return *this;
+    }
+
+    operator VkDrmFormatModifierPropertiesListEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkDrmFormatModifierPropertiesListEXT*>( this );
+    }
+
+    operator VkDrmFormatModifierPropertiesListEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkDrmFormatModifierPropertiesListEXT*>( this );
+    }
+
+    bool operator==( DrmFormatModifierPropertiesListEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( drmFormatModifierCount == rhs.drmFormatModifierCount )
+          && ( pDrmFormatModifierProperties == rhs.pDrmFormatModifierProperties );
+    }
+
+    bool operator!=( DrmFormatModifierPropertiesListEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDrmFormatModifierPropertiesListEXT;
+    void* pNext = nullptr;
+    uint32_t drmFormatModifierCount;
+    VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesEXT* pDrmFormatModifierProperties;
+  };
+  static_assert( sizeof( DrmFormatModifierPropertiesListEXT ) == sizeof( VkDrmFormatModifierPropertiesListEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<DrmFormatModifierPropertiesListEXT>::value, "struct wrapper is not a standard layout!" );
+
+  struct EventCreateInfo
+  {
+    VULKAN_HPP_CONSTEXPR EventCreateInfo( VULKAN_HPP_NAMESPACE::EventCreateFlags flags_ = VULKAN_HPP_NAMESPACE::EventCreateFlags() ) VULKAN_HPP_NOEXCEPT
+      : flags( flags_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::EventCreateInfo & operator=( VULKAN_HPP_NAMESPACE::EventCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::EventCreateInfo ) - offsetof( EventCreateInfo, pNext ) );
+      return *this;
+    }
+
+    EventCreateInfo( VkEventCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    EventCreateInfo& operator=( VkEventCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::EventCreateInfo const *>(&rhs);
+      return *this;
+    }
+
+    EventCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    EventCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::EventCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    operator VkEventCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkEventCreateInfo*>( this );
+    }
+
+    operator VkEventCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkEventCreateInfo*>( this );
+    }
+
+    bool operator==( EventCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags );
+    }
+
+    bool operator!=( EventCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eEventCreateInfo;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::EventCreateFlags flags;
+  };
+  static_assert( sizeof( EventCreateInfo ) == sizeof( VkEventCreateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<EventCreateInfo>::value, "struct wrapper is not a standard layout!" );
+
+  struct ExportFenceCreateInfo
+  {
+    VULKAN_HPP_CONSTEXPR ExportFenceCreateInfo( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags handleTypes_ = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags() ) VULKAN_HPP_NOEXCEPT
+      : handleTypes( handleTypes_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::ExportFenceCreateInfo & operator=( VULKAN_HPP_NAMESPACE::ExportFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ExportFenceCreateInfo ) - offsetof( ExportFenceCreateInfo, pNext ) );
+      return *this;
+    }
+
+    ExportFenceCreateInfo( VkExportFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    ExportFenceCreateInfo& operator=( VkExportFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExportFenceCreateInfo const *>(&rhs);
+      return *this;
+    }
+
+    ExportFenceCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ExportFenceCreateInfo & setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT
+    {
+      handleTypes = handleTypes_;
+      return *this;
+    }
+
+    operator VkExportFenceCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkExportFenceCreateInfo*>( this );
+    }
+
+    operator VkExportFenceCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkExportFenceCreateInfo*>( this );
+    }
+
+    bool operator==( ExportFenceCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( handleTypes == rhs.handleTypes );
+    }
+
+    bool operator!=( ExportFenceCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportFenceCreateInfo;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags handleTypes;
+  };
+  static_assert( sizeof( ExportFenceCreateInfo ) == sizeof( VkExportFenceCreateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ExportFenceCreateInfo>::value, "struct wrapper is not a standard layout!" );
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+  struct ExportFenceWin32HandleInfoKHR
+  {
+    VULKAN_HPP_CONSTEXPR ExportFenceWin32HandleInfoKHR( const SECURITY_ATTRIBUTES* pAttributes_ = nullptr,
+                                                        DWORD dwAccess_ = 0,
+                                                        LPCWSTR name_ = nullptr ) VULKAN_HPP_NOEXCEPT
+      : pAttributes( pAttributes_ )
+      , dwAccess( dwAccess_ )
+      , name( name_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::ExportFenceWin32HandleInfoKHR & operator=( VULKAN_HPP_NAMESPACE::ExportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ExportFenceWin32HandleInfoKHR ) - offsetof( ExportFenceWin32HandleInfoKHR, pNext ) );
+      return *this;
+    }
+
+    ExportFenceWin32HandleInfoKHR( VkExportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    ExportFenceWin32HandleInfoKHR& operator=( VkExportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExportFenceWin32HandleInfoKHR const *>(&rhs);
+      return *this;
+    }
+
+    ExportFenceWin32HandleInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ExportFenceWin32HandleInfoKHR & setPAttributes( const SECURITY_ATTRIBUTES* pAttributes_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pAttributes = pAttributes_;
+      return *this;
+    }
+
+    ExportFenceWin32HandleInfoKHR & setDwAccess( DWORD dwAccess_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dwAccess = dwAccess_;
+      return *this;
+    }
+
+    ExportFenceWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT
+    {
+      name = name_;
+      return *this;
+    }
+
+    operator VkExportFenceWin32HandleInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkExportFenceWin32HandleInfoKHR*>( this );
+    }
+
+    operator VkExportFenceWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkExportFenceWin32HandleInfoKHR*>( this );
+    }
+
+    bool operator==( ExportFenceWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( pAttributes == rhs.pAttributes )
+          && ( dwAccess == rhs.dwAccess )
+          && ( name == rhs.name );
+    }
+
+    bool operator!=( ExportFenceWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportFenceWin32HandleInfoKHR;
+    const void* pNext = nullptr;
+    const SECURITY_ATTRIBUTES* pAttributes;
+    DWORD dwAccess;
+    LPCWSTR name;
+  };
+  static_assert( sizeof( ExportFenceWin32HandleInfoKHR ) == sizeof( VkExportFenceWin32HandleInfoKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ExportFenceWin32HandleInfoKHR>::value, "struct wrapper is not a standard layout!" );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  struct ExportMemoryAllocateInfo
+  {
+    VULKAN_HPP_CONSTEXPR ExportMemoryAllocateInfo( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags() ) VULKAN_HPP_NOEXCEPT
+      : handleTypes( handleTypes_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::ExportMemoryAllocateInfo & operator=( VULKAN_HPP_NAMESPACE::ExportMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ExportMemoryAllocateInfo ) - offsetof( ExportMemoryAllocateInfo, pNext ) );
+      return *this;
+    }
+
+    ExportMemoryAllocateInfo( VkExportMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    ExportMemoryAllocateInfo& operator=( VkExportMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExportMemoryAllocateInfo const *>(&rhs);
+      return *this;
+    }
+
+    ExportMemoryAllocateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ExportMemoryAllocateInfo & setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT
+    {
+      handleTypes = handleTypes_;
+      return *this;
+    }
+
+    operator VkExportMemoryAllocateInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkExportMemoryAllocateInfo*>( this );
+    }
+
+    operator VkExportMemoryAllocateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkExportMemoryAllocateInfo*>( this );
+    }
+
+    bool operator==( ExportMemoryAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( handleTypes == rhs.handleTypes );
+    }
+
+    bool operator!=( ExportMemoryAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMemoryAllocateInfo;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes;
+  };
+  static_assert( sizeof( ExportMemoryAllocateInfo ) == sizeof( VkExportMemoryAllocateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ExportMemoryAllocateInfo>::value, "struct wrapper is not a standard layout!" );
+
+  struct ExportMemoryAllocateInfoNV
+  {
+    VULKAN_HPP_CONSTEXPR ExportMemoryAllocateInfoNV( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV() ) VULKAN_HPP_NOEXCEPT
+      : handleTypes( handleTypes_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::ExportMemoryAllocateInfoNV & operator=( VULKAN_HPP_NAMESPACE::ExportMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ExportMemoryAllocateInfoNV ) - offsetof( ExportMemoryAllocateInfoNV, pNext ) );
+      return *this;
+    }
+
+    ExportMemoryAllocateInfoNV( VkExportMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    ExportMemoryAllocateInfoNV& operator=( VkExportMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExportMemoryAllocateInfoNV const *>(&rhs);
+      return *this;
+    }
+
+    ExportMemoryAllocateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ExportMemoryAllocateInfoNV & setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes_ ) VULKAN_HPP_NOEXCEPT
+    {
+      handleTypes = handleTypes_;
+      return *this;
+    }
+
+    operator VkExportMemoryAllocateInfoNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkExportMemoryAllocateInfoNV*>( this );
+    }
+
+    operator VkExportMemoryAllocateInfoNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkExportMemoryAllocateInfoNV*>( this );
+    }
+
+    bool operator==( ExportMemoryAllocateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( handleTypes == rhs.handleTypes );
+    }
+
+    bool operator!=( ExportMemoryAllocateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMemoryAllocateInfoNV;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes;
+  };
+  static_assert( sizeof( ExportMemoryAllocateInfoNV ) == sizeof( VkExportMemoryAllocateInfoNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ExportMemoryAllocateInfoNV>::value, "struct wrapper is not a standard layout!" );
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+  struct ExportMemoryWin32HandleInfoKHR
+  {
+    VULKAN_HPP_CONSTEXPR ExportMemoryWin32HandleInfoKHR( const SECURITY_ATTRIBUTES* pAttributes_ = nullptr,
+                                                         DWORD dwAccess_ = 0,
+                                                         LPCWSTR name_ = nullptr ) VULKAN_HPP_NOEXCEPT
+      : pAttributes( pAttributes_ )
+      , dwAccess( dwAccess_ )
+      , name( name_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::ExportMemoryWin32HandleInfoKHR & operator=( VULKAN_HPP_NAMESPACE::ExportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ExportMemoryWin32HandleInfoKHR ) - offsetof( ExportMemoryWin32HandleInfoKHR, pNext ) );
+      return *this;
+    }
+
+    ExportMemoryWin32HandleInfoKHR( VkExportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    ExportMemoryWin32HandleInfoKHR& operator=( VkExportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExportMemoryWin32HandleInfoKHR const *>(&rhs);
+      return *this;
+    }
+
+    ExportMemoryWin32HandleInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ExportMemoryWin32HandleInfoKHR & setPAttributes( const SECURITY_ATTRIBUTES* pAttributes_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pAttributes = pAttributes_;
+      return *this;
+    }
+
+    ExportMemoryWin32HandleInfoKHR & setDwAccess( DWORD dwAccess_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dwAccess = dwAccess_;
+      return *this;
+    }
+
+    ExportMemoryWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT
+    {
+      name = name_;
+      return *this;
+    }
+
+    operator VkExportMemoryWin32HandleInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkExportMemoryWin32HandleInfoKHR*>( this );
+    }
+
+    operator VkExportMemoryWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkExportMemoryWin32HandleInfoKHR*>( this );
+    }
+
+    bool operator==( ExportMemoryWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( pAttributes == rhs.pAttributes )
+          && ( dwAccess == rhs.dwAccess )
+          && ( name == rhs.name );
+    }
+
+    bool operator!=( ExportMemoryWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMemoryWin32HandleInfoKHR;
+    const void* pNext = nullptr;
+    const SECURITY_ATTRIBUTES* pAttributes;
+    DWORD dwAccess;
+    LPCWSTR name;
+  };
+  static_assert( sizeof( ExportMemoryWin32HandleInfoKHR ) == sizeof( VkExportMemoryWin32HandleInfoKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ExportMemoryWin32HandleInfoKHR>::value, "struct wrapper is not a standard layout!" );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+  struct ExportMemoryWin32HandleInfoNV
+  {
+    VULKAN_HPP_CONSTEXPR ExportMemoryWin32HandleInfoNV( const SECURITY_ATTRIBUTES* pAttributes_ = nullptr,
+                                                        DWORD dwAccess_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : pAttributes( pAttributes_ )
+      , dwAccess( dwAccess_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::ExportMemoryWin32HandleInfoNV & operator=( VULKAN_HPP_NAMESPACE::ExportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ExportMemoryWin32HandleInfoNV ) - offsetof( ExportMemoryWin32HandleInfoNV, pNext ) );
+      return *this;
+    }
+
+    ExportMemoryWin32HandleInfoNV( VkExportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    ExportMemoryWin32HandleInfoNV& operator=( VkExportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExportMemoryWin32HandleInfoNV const *>(&rhs);
+      return *this;
+    }
+
+    ExportMemoryWin32HandleInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ExportMemoryWin32HandleInfoNV & setPAttributes( const SECURITY_ATTRIBUTES* pAttributes_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pAttributes = pAttributes_;
+      return *this;
+    }
+
+    ExportMemoryWin32HandleInfoNV & setDwAccess( DWORD dwAccess_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dwAccess = dwAccess_;
+      return *this;
+    }
+
+    operator VkExportMemoryWin32HandleInfoNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkExportMemoryWin32HandleInfoNV*>( this );
+    }
+
+    operator VkExportMemoryWin32HandleInfoNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkExportMemoryWin32HandleInfoNV*>( this );
+    }
+
+    bool operator==( ExportMemoryWin32HandleInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( pAttributes == rhs.pAttributes )
+          && ( dwAccess == rhs.dwAccess );
+    }
+
+    bool operator!=( ExportMemoryWin32HandleInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMemoryWin32HandleInfoNV;
+    const void* pNext = nullptr;
+    const SECURITY_ATTRIBUTES* pAttributes;
+    DWORD dwAccess;
+  };
+  static_assert( sizeof( ExportMemoryWin32HandleInfoNV ) == sizeof( VkExportMemoryWin32HandleInfoNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ExportMemoryWin32HandleInfoNV>::value, "struct wrapper is not a standard layout!" );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  struct ExportSemaphoreCreateInfo
+  {
+    VULKAN_HPP_CONSTEXPR ExportSemaphoreCreateInfo( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags handleTypes_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags() ) VULKAN_HPP_NOEXCEPT
+      : handleTypes( handleTypes_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::ExportSemaphoreCreateInfo & operator=( VULKAN_HPP_NAMESPACE::ExportSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ExportSemaphoreCreateInfo ) - offsetof( ExportSemaphoreCreateInfo, pNext ) );
+      return *this;
+    }
+
+    ExportSemaphoreCreateInfo( VkExportSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    ExportSemaphoreCreateInfo& operator=( VkExportSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExportSemaphoreCreateInfo const *>(&rhs);
+      return *this;
+    }
+
+    ExportSemaphoreCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ExportSemaphoreCreateInfo & setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT
+    {
+      handleTypes = handleTypes_;
+      return *this;
+    }
+
+    operator VkExportSemaphoreCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkExportSemaphoreCreateInfo*>( this );
+    }
+
+    operator VkExportSemaphoreCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkExportSemaphoreCreateInfo*>( this );
+    }
+
+    bool operator==( ExportSemaphoreCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( handleTypes == rhs.handleTypes );
+    }
+
+    bool operator!=( ExportSemaphoreCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportSemaphoreCreateInfo;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags handleTypes;
+  };
+  static_assert( sizeof( ExportSemaphoreCreateInfo ) == sizeof( VkExportSemaphoreCreateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ExportSemaphoreCreateInfo>::value, "struct wrapper is not a standard layout!" );
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+  struct ExportSemaphoreWin32HandleInfoKHR
+  {
+    VULKAN_HPP_CONSTEXPR ExportSemaphoreWin32HandleInfoKHR( const SECURITY_ATTRIBUTES* pAttributes_ = nullptr,
+                                                            DWORD dwAccess_ = 0,
+                                                            LPCWSTR name_ = nullptr ) VULKAN_HPP_NOEXCEPT
+      : pAttributes( pAttributes_ )
+      , dwAccess( dwAccess_ )
+      , name( name_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::ExportSemaphoreWin32HandleInfoKHR & operator=( VULKAN_HPP_NAMESPACE::ExportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ExportSemaphoreWin32HandleInfoKHR ) - offsetof( ExportSemaphoreWin32HandleInfoKHR, pNext ) );
+      return *this;
+    }
+
+    ExportSemaphoreWin32HandleInfoKHR( VkExportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    ExportSemaphoreWin32HandleInfoKHR& operator=( VkExportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExportSemaphoreWin32HandleInfoKHR const *>(&rhs);
+      return *this;
+    }
+
+    ExportSemaphoreWin32HandleInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ExportSemaphoreWin32HandleInfoKHR & setPAttributes( const SECURITY_ATTRIBUTES* pAttributes_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pAttributes = pAttributes_;
+      return *this;
+    }
+
+    ExportSemaphoreWin32HandleInfoKHR & setDwAccess( DWORD dwAccess_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dwAccess = dwAccess_;
+      return *this;
+    }
+
+    ExportSemaphoreWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT
+    {
+      name = name_;
+      return *this;
+    }
+
+    operator VkExportSemaphoreWin32HandleInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkExportSemaphoreWin32HandleInfoKHR*>( this );
+    }
+
+    operator VkExportSemaphoreWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkExportSemaphoreWin32HandleInfoKHR*>( this );
+    }
+
+    bool operator==( ExportSemaphoreWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( pAttributes == rhs.pAttributes )
+          && ( dwAccess == rhs.dwAccess )
+          && ( name == rhs.name );
+    }
+
+    bool operator!=( ExportSemaphoreWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportSemaphoreWin32HandleInfoKHR;
+    const void* pNext = nullptr;
+    const SECURITY_ATTRIBUTES* pAttributes;
+    DWORD dwAccess;
+    LPCWSTR name;
+  };
+  static_assert( sizeof( ExportSemaphoreWin32HandleInfoKHR ) == sizeof( VkExportSemaphoreWin32HandleInfoKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ExportSemaphoreWin32HandleInfoKHR>::value, "struct wrapper is not a standard layout!" );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  struct ExtensionProperties
+  {
+    ExtensionProperties( std::array<char,VK_MAX_EXTENSION_NAME_SIZE> const& extensionName_ = { { 0 } },
+                         uint32_t specVersion_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : extensionName{}
+      , specVersion( specVersion_ )
+    {
+      VULKAN_HPP_NAMESPACE::ConstExpressionArrayCopy<char,VK_MAX_EXTENSION_NAME_SIZE,VK_MAX_EXTENSION_NAME_SIZE>::copy( extensionName, extensionName_ );
+    }
+
+    ExtensionProperties( VkExtensionProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    ExtensionProperties& operator=( VkExtensionProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExtensionProperties const *>(&rhs);
+      return *this;
+    }
+
+    operator VkExtensionProperties const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkExtensionProperties*>( this );
+    }
+
+    operator VkExtensionProperties &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkExtensionProperties*>( this );
+    }
+
+    bool operator==( ExtensionProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( memcmp( extensionName, rhs.extensionName, VK_MAX_EXTENSION_NAME_SIZE * sizeof( char ) ) == 0 )
+          && ( specVersion == rhs.specVersion );
+    }
+
+    bool operator!=( ExtensionProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    char extensionName[VK_MAX_EXTENSION_NAME_SIZE];
+    uint32_t specVersion;
+  };
+  static_assert( sizeof( ExtensionProperties ) == sizeof( VkExtensionProperties ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ExtensionProperties>::value, "struct wrapper is not a standard layout!" );
+
+  struct ExternalMemoryProperties
+  {
+    ExternalMemoryProperties( VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlags externalMemoryFeatures_ = VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlags(),
+                              VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags exportFromImportedHandleTypes_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags(),
+                              VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags compatibleHandleTypes_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags() ) VULKAN_HPP_NOEXCEPT
+      : externalMemoryFeatures( externalMemoryFeatures_ )
+      , exportFromImportedHandleTypes( exportFromImportedHandleTypes_ )
+      , compatibleHandleTypes( compatibleHandleTypes_ )
+    {}
+
+    ExternalMemoryProperties( VkExternalMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    ExternalMemoryProperties& operator=( VkExternalMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExternalMemoryProperties const *>(&rhs);
+      return *this;
+    }
+
+    operator VkExternalMemoryProperties const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkExternalMemoryProperties*>( this );
+    }
+
+    operator VkExternalMemoryProperties &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkExternalMemoryProperties*>( this );
+    }
+
+    bool operator==( ExternalMemoryProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( externalMemoryFeatures == rhs.externalMemoryFeatures )
+          && ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes )
+          && ( compatibleHandleTypes == rhs.compatibleHandleTypes );
+    }
+
+    bool operator!=( ExternalMemoryProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlags externalMemoryFeatures;
+    VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags exportFromImportedHandleTypes;
+    VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags compatibleHandleTypes;
+  };
+  static_assert( sizeof( ExternalMemoryProperties ) == sizeof( VkExternalMemoryProperties ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ExternalMemoryProperties>::value, "struct wrapper is not a standard layout!" );
+
+  struct ExternalBufferProperties
+  {
+    ExternalBufferProperties( VULKAN_HPP_NAMESPACE::ExternalMemoryProperties externalMemoryProperties_ = VULKAN_HPP_NAMESPACE::ExternalMemoryProperties() ) VULKAN_HPP_NOEXCEPT
+      : externalMemoryProperties( externalMemoryProperties_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::ExternalBufferProperties & operator=( VULKAN_HPP_NAMESPACE::ExternalBufferProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ExternalBufferProperties ) - offsetof( ExternalBufferProperties, pNext ) );
+      return *this;
+    }
+
+    ExternalBufferProperties( VkExternalBufferProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    ExternalBufferProperties& operator=( VkExternalBufferProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExternalBufferProperties const *>(&rhs);
+      return *this;
+    }
+
+    operator VkExternalBufferProperties const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkExternalBufferProperties*>( this );
+    }
+
+    operator VkExternalBufferProperties &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkExternalBufferProperties*>( this );
+    }
+
+    bool operator==( ExternalBufferProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( externalMemoryProperties == rhs.externalMemoryProperties );
+    }
+
+    bool operator!=( ExternalBufferProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalBufferProperties;
+    void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::ExternalMemoryProperties externalMemoryProperties;
+  };
+  static_assert( sizeof( ExternalBufferProperties ) == sizeof( VkExternalBufferProperties ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ExternalBufferProperties>::value, "struct wrapper is not a standard layout!" );
+
+  struct ExternalFenceProperties
+  {
+    ExternalFenceProperties( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags exportFromImportedHandleTypes_ = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags(),
+                             VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags compatibleHandleTypes_ = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags(),
+                             VULKAN_HPP_NAMESPACE::ExternalFenceFeatureFlags externalFenceFeatures_ = VULKAN_HPP_NAMESPACE::ExternalFenceFeatureFlags() ) VULKAN_HPP_NOEXCEPT
+      : exportFromImportedHandleTypes( exportFromImportedHandleTypes_ )
+      , compatibleHandleTypes( compatibleHandleTypes_ )
+      , externalFenceFeatures( externalFenceFeatures_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::ExternalFenceProperties & operator=( VULKAN_HPP_NAMESPACE::ExternalFenceProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ExternalFenceProperties ) - offsetof( ExternalFenceProperties, pNext ) );
+      return *this;
+    }
+
+    ExternalFenceProperties( VkExternalFenceProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    ExternalFenceProperties& operator=( VkExternalFenceProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExternalFenceProperties const *>(&rhs);
+      return *this;
+    }
+
+    operator VkExternalFenceProperties const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkExternalFenceProperties*>( this );
+    }
+
+    operator VkExternalFenceProperties &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkExternalFenceProperties*>( this );
+    }
+
+    bool operator==( ExternalFenceProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes )
+          && ( compatibleHandleTypes == rhs.compatibleHandleTypes )
+          && ( externalFenceFeatures == rhs.externalFenceFeatures );
+    }
+
+    bool operator!=( ExternalFenceProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalFenceProperties;
+    void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags exportFromImportedHandleTypes;
+    VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags compatibleHandleTypes;
+    VULKAN_HPP_NAMESPACE::ExternalFenceFeatureFlags externalFenceFeatures;
+  };
+  static_assert( sizeof( ExternalFenceProperties ) == sizeof( VkExternalFenceProperties ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ExternalFenceProperties>::value, "struct wrapper is not a standard layout!" );
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+
+  struct ExternalFormatANDROID
+  {
+    VULKAN_HPP_CONSTEXPR ExternalFormatANDROID( uint64_t externalFormat_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : externalFormat( externalFormat_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::ExternalFormatANDROID & operator=( VULKAN_HPP_NAMESPACE::ExternalFormatANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ExternalFormatANDROID ) - offsetof( ExternalFormatANDROID, pNext ) );
+      return *this;
+    }
+
+    ExternalFormatANDROID( VkExternalFormatANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    ExternalFormatANDROID& operator=( VkExternalFormatANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExternalFormatANDROID const *>(&rhs);
+      return *this;
+    }
+
+    ExternalFormatANDROID & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ExternalFormatANDROID & setExternalFormat( uint64_t externalFormat_ ) VULKAN_HPP_NOEXCEPT
+    {
+      externalFormat = externalFormat_;
+      return *this;
+    }
+
+    operator VkExternalFormatANDROID const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkExternalFormatANDROID*>( this );
+    }
+
+    operator VkExternalFormatANDROID &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkExternalFormatANDROID*>( this );
+    }
+
+    bool operator==( ExternalFormatANDROID const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( externalFormat == rhs.externalFormat );
+    }
+
+    bool operator!=( ExternalFormatANDROID const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalFormatANDROID;
+    void* pNext = nullptr;
+    uint64_t externalFormat;
+  };
+  static_assert( sizeof( ExternalFormatANDROID ) == sizeof( VkExternalFormatANDROID ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ExternalFormatANDROID>::value, "struct wrapper is not a standard layout!" );
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+  struct ExternalImageFormatProperties
+  {
+    ExternalImageFormatProperties( VULKAN_HPP_NAMESPACE::ExternalMemoryProperties externalMemoryProperties_ = VULKAN_HPP_NAMESPACE::ExternalMemoryProperties() ) VULKAN_HPP_NOEXCEPT
+      : externalMemoryProperties( externalMemoryProperties_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::ExternalImageFormatProperties & operator=( VULKAN_HPP_NAMESPACE::ExternalImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ExternalImageFormatProperties ) - offsetof( ExternalImageFormatProperties, pNext ) );
+      return *this;
+    }
+
+    ExternalImageFormatProperties( VkExternalImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    ExternalImageFormatProperties& operator=( VkExternalImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExternalImageFormatProperties const *>(&rhs);
+      return *this;
+    }
+
+    operator VkExternalImageFormatProperties const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkExternalImageFormatProperties*>( this );
+    }
+
+    operator VkExternalImageFormatProperties &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkExternalImageFormatProperties*>( this );
+    }
+
+    bool operator==( ExternalImageFormatProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( externalMemoryProperties == rhs.externalMemoryProperties );
+    }
+
+    bool operator!=( ExternalImageFormatProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalImageFormatProperties;
+    void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::ExternalMemoryProperties externalMemoryProperties;
+  };
+  static_assert( sizeof( ExternalImageFormatProperties ) == sizeof( VkExternalImageFormatProperties ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ExternalImageFormatProperties>::value, "struct wrapper is not a standard layout!" );
+
+  struct ImageFormatProperties
+  {
+    ImageFormatProperties( VULKAN_HPP_NAMESPACE::Extent3D maxExtent_ = VULKAN_HPP_NAMESPACE::Extent3D(),
+                           uint32_t maxMipLevels_ = 0,
+                           uint32_t maxArrayLayers_ = 0,
+                           VULKAN_HPP_NAMESPACE::SampleCountFlags sampleCounts_ = VULKAN_HPP_NAMESPACE::SampleCountFlags(),
+                           VULKAN_HPP_NAMESPACE::DeviceSize maxResourceSize_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : maxExtent( maxExtent_ )
+      , maxMipLevels( maxMipLevels_ )
+      , maxArrayLayers( maxArrayLayers_ )
+      , sampleCounts( sampleCounts_ )
+      , maxResourceSize( maxResourceSize_ )
+    {}
+
+    ImageFormatProperties( VkImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    ImageFormatProperties& operator=( VkImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageFormatProperties const *>(&rhs);
+      return *this;
+    }
+
+    operator VkImageFormatProperties const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImageFormatProperties*>( this );
+    }
+
+    operator VkImageFormatProperties &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImageFormatProperties*>( this );
+    }
+
+    bool operator==( ImageFormatProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( maxExtent == rhs.maxExtent )
+          && ( maxMipLevels == rhs.maxMipLevels )
+          && ( maxArrayLayers == rhs.maxArrayLayers )
+          && ( sampleCounts == rhs.sampleCounts )
+          && ( maxResourceSize == rhs.maxResourceSize );
+    }
+
+    bool operator!=( ImageFormatProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    VULKAN_HPP_NAMESPACE::Extent3D maxExtent;
+    uint32_t maxMipLevels;
+    uint32_t maxArrayLayers;
+    VULKAN_HPP_NAMESPACE::SampleCountFlags sampleCounts;
+    VULKAN_HPP_NAMESPACE::DeviceSize maxResourceSize;
+  };
+  static_assert( sizeof( ImageFormatProperties ) == sizeof( VkImageFormatProperties ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ImageFormatProperties>::value, "struct wrapper is not a standard layout!" );
+
+  struct ExternalImageFormatPropertiesNV
+  {
+    ExternalImageFormatPropertiesNV( VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties_ = VULKAN_HPP_NAMESPACE::ImageFormatProperties(),
+                                     VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlagsNV externalMemoryFeatures_ = VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlagsNV(),
+                                     VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV exportFromImportedHandleTypes_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV(),
+                                     VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV compatibleHandleTypes_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV() ) VULKAN_HPP_NOEXCEPT
+      : imageFormatProperties( imageFormatProperties_ )
+      , externalMemoryFeatures( externalMemoryFeatures_ )
+      , exportFromImportedHandleTypes( exportFromImportedHandleTypes_ )
+      , compatibleHandleTypes( compatibleHandleTypes_ )
+    {}
+
+    ExternalImageFormatPropertiesNV( VkExternalImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    ExternalImageFormatPropertiesNV& operator=( VkExternalImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV const *>(&rhs);
+      return *this;
+    }
+
+    operator VkExternalImageFormatPropertiesNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkExternalImageFormatPropertiesNV*>( this );
+    }
+
+    operator VkExternalImageFormatPropertiesNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkExternalImageFormatPropertiesNV*>( this );
+    }
+
+    bool operator==( ExternalImageFormatPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( imageFormatProperties == rhs.imageFormatProperties )
+          && ( externalMemoryFeatures == rhs.externalMemoryFeatures )
+          && ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes )
+          && ( compatibleHandleTypes == rhs.compatibleHandleTypes );
+    }
+
+    bool operator!=( ExternalImageFormatPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties;
+    VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlagsNV externalMemoryFeatures;
+    VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV exportFromImportedHandleTypes;
+    VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV compatibleHandleTypes;
+  };
+  static_assert( sizeof( ExternalImageFormatPropertiesNV ) == sizeof( VkExternalImageFormatPropertiesNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ExternalImageFormatPropertiesNV>::value, "struct wrapper is not a standard layout!" );
+
+  struct ExternalMemoryBufferCreateInfo
+  {
+    VULKAN_HPP_CONSTEXPR ExternalMemoryBufferCreateInfo( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags() ) VULKAN_HPP_NOEXCEPT
+      : handleTypes( handleTypes_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::ExternalMemoryBufferCreateInfo & operator=( VULKAN_HPP_NAMESPACE::ExternalMemoryBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ExternalMemoryBufferCreateInfo ) - offsetof( ExternalMemoryBufferCreateInfo, pNext ) );
+      return *this;
+    }
+
+    ExternalMemoryBufferCreateInfo( VkExternalMemoryBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    ExternalMemoryBufferCreateInfo& operator=( VkExternalMemoryBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExternalMemoryBufferCreateInfo const *>(&rhs);
+      return *this;
+    }
+
+    ExternalMemoryBufferCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ExternalMemoryBufferCreateInfo & setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT
+    {
+      handleTypes = handleTypes_;
+      return *this;
+    }
+
+    operator VkExternalMemoryBufferCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkExternalMemoryBufferCreateInfo*>( this );
+    }
+
+    operator VkExternalMemoryBufferCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkExternalMemoryBufferCreateInfo*>( this );
+    }
+
+    bool operator==( ExternalMemoryBufferCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( handleTypes == rhs.handleTypes );
+    }
+
+    bool operator!=( ExternalMemoryBufferCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalMemoryBufferCreateInfo;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes;
+  };
+  static_assert( sizeof( ExternalMemoryBufferCreateInfo ) == sizeof( VkExternalMemoryBufferCreateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ExternalMemoryBufferCreateInfo>::value, "struct wrapper is not a standard layout!" );
+
+  struct ExternalMemoryImageCreateInfo
+  {
+    VULKAN_HPP_CONSTEXPR ExternalMemoryImageCreateInfo( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags() ) VULKAN_HPP_NOEXCEPT
+      : handleTypes( handleTypes_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::ExternalMemoryImageCreateInfo & operator=( VULKAN_HPP_NAMESPACE::ExternalMemoryImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ExternalMemoryImageCreateInfo ) - offsetof( ExternalMemoryImageCreateInfo, pNext ) );
+      return *this;
+    }
+
+    ExternalMemoryImageCreateInfo( VkExternalMemoryImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    ExternalMemoryImageCreateInfo& operator=( VkExternalMemoryImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExternalMemoryImageCreateInfo const *>(&rhs);
+      return *this;
+    }
+
+    ExternalMemoryImageCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ExternalMemoryImageCreateInfo & setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT
+    {
+      handleTypes = handleTypes_;
+      return *this;
+    }
+
+    operator VkExternalMemoryImageCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkExternalMemoryImageCreateInfo*>( this );
+    }
+
+    operator VkExternalMemoryImageCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkExternalMemoryImageCreateInfo*>( this );
+    }
+
+    bool operator==( ExternalMemoryImageCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( handleTypes == rhs.handleTypes );
+    }
+
+    bool operator!=( ExternalMemoryImageCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalMemoryImageCreateInfo;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes;
+  };
+  static_assert( sizeof( ExternalMemoryImageCreateInfo ) == sizeof( VkExternalMemoryImageCreateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ExternalMemoryImageCreateInfo>::value, "struct wrapper is not a standard layout!" );
+
+  struct ExternalMemoryImageCreateInfoNV
+  {
+    VULKAN_HPP_CONSTEXPR ExternalMemoryImageCreateInfoNV( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV() ) VULKAN_HPP_NOEXCEPT
+      : handleTypes( handleTypes_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::ExternalMemoryImageCreateInfoNV & operator=( VULKAN_HPP_NAMESPACE::ExternalMemoryImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ExternalMemoryImageCreateInfoNV ) - offsetof( ExternalMemoryImageCreateInfoNV, pNext ) );
+      return *this;
+    }
+
+    ExternalMemoryImageCreateInfoNV( VkExternalMemoryImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    ExternalMemoryImageCreateInfoNV& operator=( VkExternalMemoryImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExternalMemoryImageCreateInfoNV const *>(&rhs);
+      return *this;
+    }
+
+    ExternalMemoryImageCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ExternalMemoryImageCreateInfoNV & setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes_ ) VULKAN_HPP_NOEXCEPT
+    {
+      handleTypes = handleTypes_;
+      return *this;
+    }
+
+    operator VkExternalMemoryImageCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkExternalMemoryImageCreateInfoNV*>( this );
+    }
+
+    operator VkExternalMemoryImageCreateInfoNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkExternalMemoryImageCreateInfoNV*>( this );
+    }
+
+    bool operator==( ExternalMemoryImageCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( handleTypes == rhs.handleTypes );
+    }
+
+    bool operator!=( ExternalMemoryImageCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalMemoryImageCreateInfoNV;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes;
+  };
+  static_assert( sizeof( ExternalMemoryImageCreateInfoNV ) == sizeof( VkExternalMemoryImageCreateInfoNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ExternalMemoryImageCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
+
+  struct ExternalSemaphoreProperties
+  {
+    ExternalSemaphoreProperties( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags exportFromImportedHandleTypes_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags(),
+                                 VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags compatibleHandleTypes_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags(),
+                                 VULKAN_HPP_NAMESPACE::ExternalSemaphoreFeatureFlags externalSemaphoreFeatures_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreFeatureFlags() ) VULKAN_HPP_NOEXCEPT
+      : exportFromImportedHandleTypes( exportFromImportedHandleTypes_ )
+      , compatibleHandleTypes( compatibleHandleTypes_ )
+      , externalSemaphoreFeatures( externalSemaphoreFeatures_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties & operator=( VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties ) - offsetof( ExternalSemaphoreProperties, pNext ) );
+      return *this;
+    }
+
+    ExternalSemaphoreProperties( VkExternalSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    ExternalSemaphoreProperties& operator=( VkExternalSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties const *>(&rhs);
+      return *this;
+    }
+
+    operator VkExternalSemaphoreProperties const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkExternalSemaphoreProperties*>( this );
+    }
+
+    operator VkExternalSemaphoreProperties &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkExternalSemaphoreProperties*>( this );
+    }
+
+    bool operator==( ExternalSemaphoreProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes )
+          && ( compatibleHandleTypes == rhs.compatibleHandleTypes )
+          && ( externalSemaphoreFeatures == rhs.externalSemaphoreFeatures );
+    }
+
+    bool operator!=( ExternalSemaphoreProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalSemaphoreProperties;
+    void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags exportFromImportedHandleTypes;
+    VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags compatibleHandleTypes;
+    VULKAN_HPP_NAMESPACE::ExternalSemaphoreFeatureFlags externalSemaphoreFeatures;
+  };
+  static_assert( sizeof( ExternalSemaphoreProperties ) == sizeof( VkExternalSemaphoreProperties ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ExternalSemaphoreProperties>::value, "struct wrapper is not a standard layout!" );
+
+  struct FenceCreateInfo
+  {
+    VULKAN_HPP_CONSTEXPR FenceCreateInfo( VULKAN_HPP_NAMESPACE::FenceCreateFlags flags_ = VULKAN_HPP_NAMESPACE::FenceCreateFlags() ) VULKAN_HPP_NOEXCEPT
+      : flags( flags_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::FenceCreateInfo & operator=( VULKAN_HPP_NAMESPACE::FenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::FenceCreateInfo ) - offsetof( FenceCreateInfo, pNext ) );
+      return *this;
+    }
+
+    FenceCreateInfo( VkFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    FenceCreateInfo& operator=( VkFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FenceCreateInfo const *>(&rhs);
+      return *this;
+    }
+
+    FenceCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    FenceCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::FenceCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    operator VkFenceCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkFenceCreateInfo*>( this );
+    }
+
+    operator VkFenceCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkFenceCreateInfo*>( this );
+    }
+
+    bool operator==( FenceCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags );
+    }
+
+    bool operator!=( FenceCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFenceCreateInfo;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::FenceCreateFlags flags;
+  };
+  static_assert( sizeof( FenceCreateInfo ) == sizeof( VkFenceCreateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<FenceCreateInfo>::value, "struct wrapper is not a standard layout!" );
+
+  struct FenceGetFdInfoKHR
+  {
+    VULKAN_HPP_CONSTEXPR FenceGetFdInfoKHR( VULKAN_HPP_NAMESPACE::Fence fence_ = VULKAN_HPP_NAMESPACE::Fence(),
+                                            VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT
+      : fence( fence_ )
+      , handleType( handleType_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR & operator=( VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR ) - offsetof( FenceGetFdInfoKHR, pNext ) );
+      return *this;
+    }
+
+    FenceGetFdInfoKHR( VkFenceGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    FenceGetFdInfoKHR& operator=( VkFenceGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR const *>(&rhs);
+      return *this;
+    }
+
+    FenceGetFdInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    FenceGetFdInfoKHR & setFence( VULKAN_HPP_NAMESPACE::Fence fence_ ) VULKAN_HPP_NOEXCEPT
+    {
+      fence = fence_;
+      return *this;
+    }
+
+    FenceGetFdInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      handleType = handleType_;
+      return *this;
+    }
+
+    operator VkFenceGetFdInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkFenceGetFdInfoKHR*>( this );
+    }
+
+    operator VkFenceGetFdInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkFenceGetFdInfoKHR*>( this );
+    }
+
+    bool operator==( FenceGetFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( fence == rhs.fence )
+          && ( handleType == rhs.handleType );
+    }
+
+    bool operator!=( FenceGetFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFenceGetFdInfoKHR;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::Fence fence;
+    VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType;
+  };
+  static_assert( sizeof( FenceGetFdInfoKHR ) == sizeof( VkFenceGetFdInfoKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<FenceGetFdInfoKHR>::value, "struct wrapper is not a standard layout!" );
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+  struct FenceGetWin32HandleInfoKHR
+  {
+    VULKAN_HPP_CONSTEXPR FenceGetWin32HandleInfoKHR( VULKAN_HPP_NAMESPACE::Fence fence_ = VULKAN_HPP_NAMESPACE::Fence(),
+                                                     VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT
+      : fence( fence_ )
+      , handleType( handleType_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR & operator=( VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR ) - offsetof( FenceGetWin32HandleInfoKHR, pNext ) );
+      return *this;
+    }
+
+    FenceGetWin32HandleInfoKHR( VkFenceGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    FenceGetWin32HandleInfoKHR& operator=( VkFenceGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR const *>(&rhs);
+      return *this;
+    }
+
+    FenceGetWin32HandleInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    FenceGetWin32HandleInfoKHR & setFence( VULKAN_HPP_NAMESPACE::Fence fence_ ) VULKAN_HPP_NOEXCEPT
+    {
+      fence = fence_;
+      return *this;
+    }
+
+    FenceGetWin32HandleInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      handleType = handleType_;
+      return *this;
+    }
+
+    operator VkFenceGetWin32HandleInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkFenceGetWin32HandleInfoKHR*>( this );
+    }
+
+    operator VkFenceGetWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkFenceGetWin32HandleInfoKHR*>( this );
+    }
+
+    bool operator==( FenceGetWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( fence == rhs.fence )
+          && ( handleType == rhs.handleType );
+    }
+
+    bool operator!=( FenceGetWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFenceGetWin32HandleInfoKHR;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::Fence fence;
+    VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType;
+  };
+  static_assert( sizeof( FenceGetWin32HandleInfoKHR ) == sizeof( VkFenceGetWin32HandleInfoKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<FenceGetWin32HandleInfoKHR>::value, "struct wrapper is not a standard layout!" );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  struct FilterCubicImageViewImageFormatPropertiesEXT
+  {
+    FilterCubicImageViewImageFormatPropertiesEXT( VULKAN_HPP_NAMESPACE::Bool32 filterCubic_ = 0,
+                                                  VULKAN_HPP_NAMESPACE::Bool32 filterCubicMinmax_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : filterCubic( filterCubic_ )
+      , filterCubicMinmax( filterCubicMinmax_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::FilterCubicImageViewImageFormatPropertiesEXT & operator=( VULKAN_HPP_NAMESPACE::FilterCubicImageViewImageFormatPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::FilterCubicImageViewImageFormatPropertiesEXT ) - offsetof( FilterCubicImageViewImageFormatPropertiesEXT, pNext ) );
+      return *this;
+    }
+
+    FilterCubicImageViewImageFormatPropertiesEXT( VkFilterCubicImageViewImageFormatPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    FilterCubicImageViewImageFormatPropertiesEXT& operator=( VkFilterCubicImageViewImageFormatPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FilterCubicImageViewImageFormatPropertiesEXT const *>(&rhs);
+      return *this;
+    }
+
+    operator VkFilterCubicImageViewImageFormatPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkFilterCubicImageViewImageFormatPropertiesEXT*>( this );
+    }
+
+    operator VkFilterCubicImageViewImageFormatPropertiesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkFilterCubicImageViewImageFormatPropertiesEXT*>( this );
+    }
+
+    bool operator==( FilterCubicImageViewImageFormatPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( filterCubic == rhs.filterCubic )
+          && ( filterCubicMinmax == rhs.filterCubicMinmax );
+    }
+
+    bool operator!=( FilterCubicImageViewImageFormatPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFilterCubicImageViewImageFormatPropertiesEXT;
+    void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::Bool32 filterCubic;
+    VULKAN_HPP_NAMESPACE::Bool32 filterCubicMinmax;
+  };
+  static_assert( sizeof( FilterCubicImageViewImageFormatPropertiesEXT ) == sizeof( VkFilterCubicImageViewImageFormatPropertiesEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<FilterCubicImageViewImageFormatPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
+
+  struct FormatProperties
+  {
+    FormatProperties( VULKAN_HPP_NAMESPACE::FormatFeatureFlags linearTilingFeatures_ = VULKAN_HPP_NAMESPACE::FormatFeatureFlags(),
+                      VULKAN_HPP_NAMESPACE::FormatFeatureFlags optimalTilingFeatures_ = VULKAN_HPP_NAMESPACE::FormatFeatureFlags(),
+                      VULKAN_HPP_NAMESPACE::FormatFeatureFlags bufferFeatures_ = VULKAN_HPP_NAMESPACE::FormatFeatureFlags() ) VULKAN_HPP_NOEXCEPT
+      : linearTilingFeatures( linearTilingFeatures_ )
+      , optimalTilingFeatures( optimalTilingFeatures_ )
+      , bufferFeatures( bufferFeatures_ )
+    {}
+
+    FormatProperties( VkFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    FormatProperties& operator=( VkFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FormatProperties const *>(&rhs);
+      return *this;
+    }
+
+    operator VkFormatProperties const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkFormatProperties*>( this );
+    }
+
+    operator VkFormatProperties &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkFormatProperties*>( this );
+    }
+
+    bool operator==( FormatProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( linearTilingFeatures == rhs.linearTilingFeatures )
+          && ( optimalTilingFeatures == rhs.optimalTilingFeatures )
+          && ( bufferFeatures == rhs.bufferFeatures );
+    }
+
+    bool operator!=( FormatProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    VULKAN_HPP_NAMESPACE::FormatFeatureFlags linearTilingFeatures;
+    VULKAN_HPP_NAMESPACE::FormatFeatureFlags optimalTilingFeatures;
+    VULKAN_HPP_NAMESPACE::FormatFeatureFlags bufferFeatures;
+  };
+  static_assert( sizeof( FormatProperties ) == sizeof( VkFormatProperties ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<FormatProperties>::value, "struct wrapper is not a standard layout!" );
+
+  struct FormatProperties2
+  {
+    FormatProperties2( VULKAN_HPP_NAMESPACE::FormatProperties formatProperties_ = VULKAN_HPP_NAMESPACE::FormatProperties() ) VULKAN_HPP_NOEXCEPT
+      : formatProperties( formatProperties_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::FormatProperties2 & operator=( VULKAN_HPP_NAMESPACE::FormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::FormatProperties2 ) - offsetof( FormatProperties2, pNext ) );
+      return *this;
+    }
+
+    FormatProperties2( VkFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    FormatProperties2& operator=( VkFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FormatProperties2 const *>(&rhs);
+      return *this;
+    }
+
+    operator VkFormatProperties2 const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkFormatProperties2*>( this );
+    }
+
+    operator VkFormatProperties2 &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkFormatProperties2*>( this );
+    }
+
+    bool operator==( FormatProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( formatProperties == rhs.formatProperties );
+    }
+
+    bool operator!=( FormatProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFormatProperties2;
+    void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::FormatProperties formatProperties;
+  };
+  static_assert( sizeof( FormatProperties2 ) == sizeof( VkFormatProperties2 ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<FormatProperties2>::value, "struct wrapper is not a standard layout!" );
+
+  struct FramebufferAttachmentImageInfoKHR
+  {
+    VULKAN_HPP_CONSTEXPR FramebufferAttachmentImageInfoKHR( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ = VULKAN_HPP_NAMESPACE::ImageCreateFlags(),
+                                                            VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = VULKAN_HPP_NAMESPACE::ImageUsageFlags(),
+                                                            uint32_t width_ = 0,
+                                                            uint32_t height_ = 0,
+                                                            uint32_t layerCount_ = 0,
+                                                            uint32_t viewFormatCount_ = 0,
+                                                            const VULKAN_HPP_NAMESPACE::Format* pViewFormats_ = nullptr ) VULKAN_HPP_NOEXCEPT
+      : flags( flags_ )
+      , usage( usage_ )
+      , width( width_ )
+      , height( height_ )
+      , layerCount( layerCount_ )
+      , viewFormatCount( viewFormatCount_ )
+      , pViewFormats( pViewFormats_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfoKHR & operator=( VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfoKHR ) - offsetof( FramebufferAttachmentImageInfoKHR, pNext ) );
+      return *this;
+    }
+
+    FramebufferAttachmentImageInfoKHR( VkFramebufferAttachmentImageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    FramebufferAttachmentImageInfoKHR& operator=( VkFramebufferAttachmentImageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfoKHR const *>(&rhs);
+      return *this;
+    }
+
+    FramebufferAttachmentImageInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    FramebufferAttachmentImageInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    FramebufferAttachmentImageInfoKHR & setUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT
+    {
+      usage = usage_;
+      return *this;
+    }
+
+    FramebufferAttachmentImageInfoKHR & setWidth( uint32_t width_ ) VULKAN_HPP_NOEXCEPT
+    {
+      width = width_;
+      return *this;
+    }
+
+    FramebufferAttachmentImageInfoKHR & setHeight( uint32_t height_ ) VULKAN_HPP_NOEXCEPT
+    {
+      height = height_;
+      return *this;
+    }
+
+    FramebufferAttachmentImageInfoKHR & setLayerCount( uint32_t layerCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      layerCount = layerCount_;
+      return *this;
+    }
+
+    FramebufferAttachmentImageInfoKHR & setViewFormatCount( uint32_t viewFormatCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      viewFormatCount = viewFormatCount_;
+      return *this;
+    }
+
+    FramebufferAttachmentImageInfoKHR & setPViewFormats( const VULKAN_HPP_NAMESPACE::Format* pViewFormats_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pViewFormats = pViewFormats_;
+      return *this;
+    }
+
+    operator VkFramebufferAttachmentImageInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkFramebufferAttachmentImageInfoKHR*>( this );
+    }
+
+    operator VkFramebufferAttachmentImageInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkFramebufferAttachmentImageInfoKHR*>( this );
+    }
+
+    bool operator==( FramebufferAttachmentImageInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( usage == rhs.usage )
+          && ( width == rhs.width )
+          && ( height == rhs.height )
+          && ( layerCount == rhs.layerCount )
+          && ( viewFormatCount == rhs.viewFormatCount )
+          && ( pViewFormats == rhs.pViewFormats );
+    }
+
+    bool operator!=( FramebufferAttachmentImageInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFramebufferAttachmentImageInfoKHR;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::ImageCreateFlags flags;
+    VULKAN_HPP_NAMESPACE::ImageUsageFlags usage;
+    uint32_t width;
+    uint32_t height;
+    uint32_t layerCount;
+    uint32_t viewFormatCount;
+    const VULKAN_HPP_NAMESPACE::Format* pViewFormats;
+  };
+  static_assert( sizeof( FramebufferAttachmentImageInfoKHR ) == sizeof( VkFramebufferAttachmentImageInfoKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<FramebufferAttachmentImageInfoKHR>::value, "struct wrapper is not a standard layout!" );
+
+  struct FramebufferAttachmentsCreateInfoKHR
+  {
+    VULKAN_HPP_CONSTEXPR FramebufferAttachmentsCreateInfoKHR( uint32_t attachmentImageInfoCount_ = 0,
+                                                              const VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfoKHR* pAttachmentImageInfos_ = nullptr ) VULKAN_HPP_NOEXCEPT
+      : attachmentImageInfoCount( attachmentImageInfoCount_ )
+      , pAttachmentImageInfos( pAttachmentImageInfos_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::FramebufferAttachmentsCreateInfoKHR & operator=( VULKAN_HPP_NAMESPACE::FramebufferAttachmentsCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::FramebufferAttachmentsCreateInfoKHR ) - offsetof( FramebufferAttachmentsCreateInfoKHR, pNext ) );
+      return *this;
+    }
+
+    FramebufferAttachmentsCreateInfoKHR( VkFramebufferAttachmentsCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    FramebufferAttachmentsCreateInfoKHR& operator=( VkFramebufferAttachmentsCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FramebufferAttachmentsCreateInfoKHR const *>(&rhs);
+      return *this;
+    }
+
+    FramebufferAttachmentsCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    FramebufferAttachmentsCreateInfoKHR & setAttachmentImageInfoCount( uint32_t attachmentImageInfoCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      attachmentImageInfoCount = attachmentImageInfoCount_;
+      return *this;
+    }
+
+    FramebufferAttachmentsCreateInfoKHR & setPAttachmentImageInfos( const VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfoKHR* pAttachmentImageInfos_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pAttachmentImageInfos = pAttachmentImageInfos_;
+      return *this;
+    }
+
+    operator VkFramebufferAttachmentsCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkFramebufferAttachmentsCreateInfoKHR*>( this );
+    }
+
+    operator VkFramebufferAttachmentsCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkFramebufferAttachmentsCreateInfoKHR*>( this );
+    }
+
+    bool operator==( FramebufferAttachmentsCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( attachmentImageInfoCount == rhs.attachmentImageInfoCount )
+          && ( pAttachmentImageInfos == rhs.pAttachmentImageInfos );
+    }
+
+    bool operator!=( FramebufferAttachmentsCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFramebufferAttachmentsCreateInfoKHR;
+    const void* pNext = nullptr;
+    uint32_t attachmentImageInfoCount;
+    const VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfoKHR* pAttachmentImageInfos;
+  };
+  static_assert( sizeof( FramebufferAttachmentsCreateInfoKHR ) == sizeof( VkFramebufferAttachmentsCreateInfoKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<FramebufferAttachmentsCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
+
+  struct FramebufferCreateInfo
+  {
+    VULKAN_HPP_CONSTEXPR FramebufferCreateInfo( VULKAN_HPP_NAMESPACE::FramebufferCreateFlags flags_ = VULKAN_HPP_NAMESPACE::FramebufferCreateFlags(),
+                                                VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = VULKAN_HPP_NAMESPACE::RenderPass(),
+                                                uint32_t attachmentCount_ = 0,
+                                                const VULKAN_HPP_NAMESPACE::ImageView* pAttachments_ = nullptr,
+                                                uint32_t width_ = 0,
+                                                uint32_t height_ = 0,
+                                                uint32_t layers_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : flags( flags_ )
+      , renderPass( renderPass_ )
+      , attachmentCount( attachmentCount_ )
+      , pAttachments( pAttachments_ )
+      , width( width_ )
+      , height( height_ )
+      , layers( layers_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::FramebufferCreateInfo & operator=( VULKAN_HPP_NAMESPACE::FramebufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::FramebufferCreateInfo ) - offsetof( FramebufferCreateInfo, pNext ) );
+      return *this;
+    }
+
+    FramebufferCreateInfo( VkFramebufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    FramebufferCreateInfo& operator=( VkFramebufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FramebufferCreateInfo const *>(&rhs);
+      return *this;
+    }
+
+    FramebufferCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    FramebufferCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::FramebufferCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    FramebufferCreateInfo & setRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ ) VULKAN_HPP_NOEXCEPT
+    {
+      renderPass = renderPass_;
+      return *this;
+    }
+
+    FramebufferCreateInfo & setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      attachmentCount = attachmentCount_;
+      return *this;
+    }
+
+    FramebufferCreateInfo & setPAttachments( const VULKAN_HPP_NAMESPACE::ImageView* pAttachments_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pAttachments = pAttachments_;
+      return *this;
+    }
+
+    FramebufferCreateInfo & setWidth( uint32_t width_ ) VULKAN_HPP_NOEXCEPT
+    {
+      width = width_;
+      return *this;
+    }
+
+    FramebufferCreateInfo & setHeight( uint32_t height_ ) VULKAN_HPP_NOEXCEPT
+    {
+      height = height_;
+      return *this;
+    }
+
+    FramebufferCreateInfo & setLayers( uint32_t layers_ ) VULKAN_HPP_NOEXCEPT
+    {
+      layers = layers_;
+      return *this;
+    }
+
+    operator VkFramebufferCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkFramebufferCreateInfo*>( this );
+    }
+
+    operator VkFramebufferCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkFramebufferCreateInfo*>( this );
+    }
+
+    bool operator==( FramebufferCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( renderPass == rhs.renderPass )
+          && ( attachmentCount == rhs.attachmentCount )
+          && ( pAttachments == rhs.pAttachments )
+          && ( width == rhs.width )
+          && ( height == rhs.height )
+          && ( layers == rhs.layers );
+    }
+
+    bool operator!=( FramebufferCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFramebufferCreateInfo;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::FramebufferCreateFlags flags;
+    VULKAN_HPP_NAMESPACE::RenderPass renderPass;
+    uint32_t attachmentCount;
+    const VULKAN_HPP_NAMESPACE::ImageView* pAttachments;
+    uint32_t width;
+    uint32_t height;
+    uint32_t layers;
+  };
+  static_assert( sizeof( FramebufferCreateInfo ) == sizeof( VkFramebufferCreateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<FramebufferCreateInfo>::value, "struct wrapper is not a standard layout!" );
+
+  struct FramebufferMixedSamplesCombinationNV
+  {
+    FramebufferMixedSamplesCombinationNV( VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode_ = VULKAN_HPP_NAMESPACE::CoverageReductionModeNV::eMerge,
+                                          VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1,
+                                          VULKAN_HPP_NAMESPACE::SampleCountFlags depthStencilSamples_ = VULKAN_HPP_NAMESPACE::SampleCountFlags(),
+                                          VULKAN_HPP_NAMESPACE::SampleCountFlags colorSamples_ = VULKAN_HPP_NAMESPACE::SampleCountFlags() ) VULKAN_HPP_NOEXCEPT
+      : coverageReductionMode( coverageReductionMode_ )
+      , rasterizationSamples( rasterizationSamples_ )
+      , depthStencilSamples( depthStencilSamples_ )
+      , colorSamples( colorSamples_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV & operator=( VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV ) - offsetof( FramebufferMixedSamplesCombinationNV, pNext ) );
+      return *this;
+    }
+
+    FramebufferMixedSamplesCombinationNV( VkFramebufferMixedSamplesCombinationNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    FramebufferMixedSamplesCombinationNV& operator=( VkFramebufferMixedSamplesCombinationNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV const *>(&rhs);
+      return *this;
+    }
+
+    operator VkFramebufferMixedSamplesCombinationNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkFramebufferMixedSamplesCombinationNV*>( this );
+    }
+
+    operator VkFramebufferMixedSamplesCombinationNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkFramebufferMixedSamplesCombinationNV*>( this );
+    }
+
+    bool operator==( FramebufferMixedSamplesCombinationNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( coverageReductionMode == rhs.coverageReductionMode )
+          && ( rasterizationSamples == rhs.rasterizationSamples )
+          && ( depthStencilSamples == rhs.depthStencilSamples )
+          && ( colorSamples == rhs.colorSamples );
+    }
+
+    bool operator!=( FramebufferMixedSamplesCombinationNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFramebufferMixedSamplesCombinationNV;
+    void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode;
+    VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples;
+    VULKAN_HPP_NAMESPACE::SampleCountFlags depthStencilSamples;
+    VULKAN_HPP_NAMESPACE::SampleCountFlags colorSamples;
+  };
+  static_assert( sizeof( FramebufferMixedSamplesCombinationNV ) == sizeof( VkFramebufferMixedSamplesCombinationNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<FramebufferMixedSamplesCombinationNV>::value, "struct wrapper is not a standard layout!" );
+
+  struct VertexInputBindingDescription
+  {
+    VULKAN_HPP_CONSTEXPR VertexInputBindingDescription( uint32_t binding_ = 0,
+                                                        uint32_t stride_ = 0,
+                                                        VULKAN_HPP_NAMESPACE::VertexInputRate inputRate_ = VULKAN_HPP_NAMESPACE::VertexInputRate::eVertex ) VULKAN_HPP_NOEXCEPT
+      : binding( binding_ )
+      , stride( stride_ )
+      , inputRate( inputRate_ )
+    {}
+
+    VertexInputBindingDescription( VkVertexInputBindingDescription const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    VertexInputBindingDescription& operator=( VkVertexInputBindingDescription const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VertexInputBindingDescription const *>(&rhs);
+      return *this;
+    }
+
+    VertexInputBindingDescription & setBinding( uint32_t binding_ ) VULKAN_HPP_NOEXCEPT
+    {
+      binding = binding_;
+      return *this;
+    }
+
+    VertexInputBindingDescription & setStride( uint32_t stride_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stride = stride_;
+      return *this;
+    }
+
+    VertexInputBindingDescription & setInputRate( VULKAN_HPP_NAMESPACE::VertexInputRate inputRate_ ) VULKAN_HPP_NOEXCEPT
+    {
+      inputRate = inputRate_;
+      return *this;
+    }
+
+    operator VkVertexInputBindingDescription const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkVertexInputBindingDescription*>( this );
+    }
+
+    operator VkVertexInputBindingDescription &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkVertexInputBindingDescription*>( this );
+    }
+
+    bool operator==( VertexInputBindingDescription const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( binding == rhs.binding )
+          && ( stride == rhs.stride )
+          && ( inputRate == rhs.inputRate );
+    }
+
+    bool operator!=( VertexInputBindingDescription const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    uint32_t binding;
+    uint32_t stride;
+    VULKAN_HPP_NAMESPACE::VertexInputRate inputRate;
+  };
+  static_assert( sizeof( VertexInputBindingDescription ) == sizeof( VkVertexInputBindingDescription ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<VertexInputBindingDescription>::value, "struct wrapper is not a standard layout!" );
+
+  struct VertexInputAttributeDescription
+  {
+    VULKAN_HPP_CONSTEXPR VertexInputAttributeDescription( uint32_t location_ = 0,
+                                                          uint32_t binding_ = 0,
+                                                          VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
+                                                          uint32_t offset_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : location( location_ )
+      , binding( binding_ )
+      , format( format_ )
+      , offset( offset_ )
+    {}
+
+    VertexInputAttributeDescription( VkVertexInputAttributeDescription const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    VertexInputAttributeDescription& operator=( VkVertexInputAttributeDescription const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription const *>(&rhs);
+      return *this;
+    }
+
+    VertexInputAttributeDescription & setLocation( uint32_t location_ ) VULKAN_HPP_NOEXCEPT
+    {
+      location = location_;
+      return *this;
+    }
+
+    VertexInputAttributeDescription & setBinding( uint32_t binding_ ) VULKAN_HPP_NOEXCEPT
+    {
+      binding = binding_;
+      return *this;
+    }
+
+    VertexInputAttributeDescription & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
+    {
+      format = format_;
+      return *this;
+    }
+
+    VertexInputAttributeDescription & setOffset( uint32_t offset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      offset = offset_;
+      return *this;
+    }
+
+    operator VkVertexInputAttributeDescription const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkVertexInputAttributeDescription*>( this );
+    }
+
+    operator VkVertexInputAttributeDescription &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkVertexInputAttributeDescription*>( this );
+    }
+
+    bool operator==( VertexInputAttributeDescription const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( location == rhs.location )
+          && ( binding == rhs.binding )
+          && ( format == rhs.format )
+          && ( offset == rhs.offset );
+    }
+
+    bool operator!=( VertexInputAttributeDescription const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    uint32_t location;
+    uint32_t binding;
+    VULKAN_HPP_NAMESPACE::Format format;
+    uint32_t offset;
+  };
+  static_assert( sizeof( VertexInputAttributeDescription ) == sizeof( VkVertexInputAttributeDescription ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<VertexInputAttributeDescription>::value, "struct wrapper is not a standard layout!" );
+
+  struct PipelineVertexInputStateCreateInfo
+  {
+    VULKAN_HPP_CONSTEXPR PipelineVertexInputStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateFlags flags_ = VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateFlags(),
+                                                             uint32_t vertexBindingDescriptionCount_ = 0,
+                                                             const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription* pVertexBindingDescriptions_ = nullptr,
+                                                             uint32_t vertexAttributeDescriptionCount_ = 0,
+                                                             const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription* pVertexAttributeDescriptions_ = nullptr ) VULKAN_HPP_NOEXCEPT
+      : flags( flags_ )
+      , vertexBindingDescriptionCount( vertexBindingDescriptionCount_ )
+      , pVertexBindingDescriptions( pVertexBindingDescriptions_ )
+      , vertexAttributeDescriptionCount( vertexAttributeDescriptionCount_ )
+      , pVertexAttributeDescriptions( pVertexAttributeDescriptions_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo & operator=( VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo ) - offsetof( PipelineVertexInputStateCreateInfo, pNext ) );
+      return *this;
+    }
+
+    PipelineVertexInputStateCreateInfo( VkPipelineVertexInputStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PipelineVertexInputStateCreateInfo& operator=( VkPipelineVertexInputStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo const *>(&rhs);
+      return *this;
+    }
+
+    PipelineVertexInputStateCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PipelineVertexInputStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    PipelineVertexInputStateCreateInfo & setVertexBindingDescriptionCount( uint32_t vertexBindingDescriptionCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      vertexBindingDescriptionCount = vertexBindingDescriptionCount_;
+      return *this;
+    }
+
+    PipelineVertexInputStateCreateInfo & setPVertexBindingDescriptions( const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription* pVertexBindingDescriptions_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pVertexBindingDescriptions = pVertexBindingDescriptions_;
+      return *this;
+    }
+
+    PipelineVertexInputStateCreateInfo & setVertexAttributeDescriptionCount( uint32_t vertexAttributeDescriptionCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      vertexAttributeDescriptionCount = vertexAttributeDescriptionCount_;
+      return *this;
+    }
+
+    PipelineVertexInputStateCreateInfo & setPVertexAttributeDescriptions( const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription* pVertexAttributeDescriptions_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pVertexAttributeDescriptions = pVertexAttributeDescriptions_;
+      return *this;
+    }
+
+    operator VkPipelineVertexInputStateCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineVertexInputStateCreateInfo*>( this );
+    }
+
+    operator VkPipelineVertexInputStateCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineVertexInputStateCreateInfo*>( this );
+    }
+
+    bool operator==( PipelineVertexInputStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( vertexBindingDescriptionCount == rhs.vertexBindingDescriptionCount )
+          && ( pVertexBindingDescriptions == rhs.pVertexBindingDescriptions )
+          && ( vertexAttributeDescriptionCount == rhs.vertexAttributeDescriptionCount )
+          && ( pVertexAttributeDescriptions == rhs.pVertexAttributeDescriptions );
+    }
+
+    bool operator!=( PipelineVertexInputStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineVertexInputStateCreateInfo;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateFlags flags;
+    uint32_t vertexBindingDescriptionCount;
+    const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription* pVertexBindingDescriptions;
+    uint32_t vertexAttributeDescriptionCount;
+    const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription* pVertexAttributeDescriptions;
+  };
+  static_assert( sizeof( PipelineVertexInputStateCreateInfo ) == sizeof( VkPipelineVertexInputStateCreateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PipelineVertexInputStateCreateInfo>::value, "struct wrapper is not a standard layout!" );
+
+  struct PipelineInputAssemblyStateCreateInfo
+  {
+    VULKAN_HPP_CONSTEXPR PipelineInputAssemblyStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateFlags flags_ = VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateFlags(),
+                                                               VULKAN_HPP_NAMESPACE::PrimitiveTopology topology_ = VULKAN_HPP_NAMESPACE::PrimitiveTopology::ePointList,
+                                                               VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : flags( flags_ )
+      , topology( topology_ )
+      , primitiveRestartEnable( primitiveRestartEnable_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo & operator=( VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo ) - offsetof( PipelineInputAssemblyStateCreateInfo, pNext ) );
+      return *this;
+    }
+
+    PipelineInputAssemblyStateCreateInfo( VkPipelineInputAssemblyStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PipelineInputAssemblyStateCreateInfo& operator=( VkPipelineInputAssemblyStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo const *>(&rhs);
+      return *this;
+    }
+
+    PipelineInputAssemblyStateCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PipelineInputAssemblyStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    PipelineInputAssemblyStateCreateInfo & setTopology( VULKAN_HPP_NAMESPACE::PrimitiveTopology topology_ ) VULKAN_HPP_NOEXCEPT
+    {
+      topology = topology_;
+      return *this;
+    }
+
+    PipelineInputAssemblyStateCreateInfo & setPrimitiveRestartEnable( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable_ ) VULKAN_HPP_NOEXCEPT
+    {
+      primitiveRestartEnable = primitiveRestartEnable_;
+      return *this;
+    }
+
+    operator VkPipelineInputAssemblyStateCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineInputAssemblyStateCreateInfo*>( this );
+    }
+
+    operator VkPipelineInputAssemblyStateCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineInputAssemblyStateCreateInfo*>( this );
+    }
+
+    bool operator==( PipelineInputAssemblyStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( topology == rhs.topology )
+          && ( primitiveRestartEnable == rhs.primitiveRestartEnable );
+    }
+
+    bool operator!=( PipelineInputAssemblyStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineInputAssemblyStateCreateInfo;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateFlags flags;
+    VULKAN_HPP_NAMESPACE::PrimitiveTopology topology;
+    VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable;
+  };
+  static_assert( sizeof( PipelineInputAssemblyStateCreateInfo ) == sizeof( VkPipelineInputAssemblyStateCreateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PipelineInputAssemblyStateCreateInfo>::value, "struct wrapper is not a standard layout!" );
+
+  struct PipelineTessellationStateCreateInfo
+  {
+    VULKAN_HPP_CONSTEXPR PipelineTessellationStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateFlags flags_ = VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateFlags(),
+                                                              uint32_t patchControlPoints_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : flags( flags_ )
+      , patchControlPoints( patchControlPoints_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo & operator=( VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo ) - offsetof( PipelineTessellationStateCreateInfo, pNext ) );
+      return *this;
+    }
+
+    PipelineTessellationStateCreateInfo( VkPipelineTessellationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PipelineTessellationStateCreateInfo& operator=( VkPipelineTessellationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo const *>(&rhs);
+      return *this;
+    }
+
+    PipelineTessellationStateCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PipelineTessellationStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    PipelineTessellationStateCreateInfo & setPatchControlPoints( uint32_t patchControlPoints_ ) VULKAN_HPP_NOEXCEPT
+    {
+      patchControlPoints = patchControlPoints_;
+      return *this;
+    }
+
+    operator VkPipelineTessellationStateCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineTessellationStateCreateInfo*>( this );
+    }
+
+    operator VkPipelineTessellationStateCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineTessellationStateCreateInfo*>( this );
+    }
+
+    bool operator==( PipelineTessellationStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( patchControlPoints == rhs.patchControlPoints );
+    }
+
+    bool operator!=( PipelineTessellationStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineTessellationStateCreateInfo;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateFlags flags;
+    uint32_t patchControlPoints;
+  };
+  static_assert( sizeof( PipelineTessellationStateCreateInfo ) == sizeof( VkPipelineTessellationStateCreateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PipelineTessellationStateCreateInfo>::value, "struct wrapper is not a standard layout!" );
+
+  struct Viewport
+  {
+    VULKAN_HPP_CONSTEXPR Viewport( float x_ = 0,
+                                   float y_ = 0,
+                                   float width_ = 0,
+                                   float height_ = 0,
+                                   float minDepth_ = 0,
+                                   float maxDepth_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : x( x_ )
+      , y( y_ )
+      , width( width_ )
+      , height( height_ )
+      , minDepth( minDepth_ )
+      , maxDepth( maxDepth_ )
+    {}
+
+    Viewport( VkViewport const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    Viewport& operator=( VkViewport const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::Viewport const *>(&rhs);
+      return *this;
+    }
+
+    Viewport & setX( float x_ ) VULKAN_HPP_NOEXCEPT
+    {
+      x = x_;
+      return *this;
+    }
+
+    Viewport & setY( float y_ ) VULKAN_HPP_NOEXCEPT
+    {
+      y = y_;
+      return *this;
+    }
+
+    Viewport & setWidth( float width_ ) VULKAN_HPP_NOEXCEPT
+    {
+      width = width_;
+      return *this;
+    }
+
+    Viewport & setHeight( float height_ ) VULKAN_HPP_NOEXCEPT
+    {
+      height = height_;
+      return *this;
+    }
+
+    Viewport & setMinDepth( float minDepth_ ) VULKAN_HPP_NOEXCEPT
+    {
+      minDepth = minDepth_;
+      return *this;
+    }
+
+    Viewport & setMaxDepth( float maxDepth_ ) VULKAN_HPP_NOEXCEPT
+    {
+      maxDepth = maxDepth_;
+      return *this;
+    }
+
+    operator VkViewport const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkViewport*>( this );
+    }
+
+    operator VkViewport &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkViewport*>( this );
+    }
+
+    bool operator==( Viewport const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( x == rhs.x )
+          && ( y == rhs.y )
+          && ( width == rhs.width )
+          && ( height == rhs.height )
+          && ( minDepth == rhs.minDepth )
+          && ( maxDepth == rhs.maxDepth );
+    }
+
+    bool operator!=( Viewport const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    float x;
+    float y;
+    float width;
+    float height;
+    float minDepth;
+    float maxDepth;
+  };
+  static_assert( sizeof( Viewport ) == sizeof( VkViewport ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<Viewport>::value, "struct wrapper is not a standard layout!" );
+
+  struct PipelineViewportStateCreateInfo
+  {
+    VULKAN_HPP_CONSTEXPR PipelineViewportStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateFlags flags_ = VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateFlags(),
+                                                          uint32_t viewportCount_ = 0,
+                                                          const VULKAN_HPP_NAMESPACE::Viewport* pViewports_ = nullptr,
+                                                          uint32_t scissorCount_ = 0,
+                                                          const VULKAN_HPP_NAMESPACE::Rect2D* pScissors_ = nullptr ) VULKAN_HPP_NOEXCEPT
+      : flags( flags_ )
+      , viewportCount( viewportCount_ )
+      , pViewports( pViewports_ )
+      , scissorCount( scissorCount_ )
+      , pScissors( pScissors_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo & operator=( VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo ) - offsetof( PipelineViewportStateCreateInfo, pNext ) );
+      return *this;
+    }
+
+    PipelineViewportStateCreateInfo( VkPipelineViewportStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PipelineViewportStateCreateInfo& operator=( VkPipelineViewportStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo const *>(&rhs);
+      return *this;
+    }
+
+    PipelineViewportStateCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PipelineViewportStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    PipelineViewportStateCreateInfo & setViewportCount( uint32_t viewportCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      viewportCount = viewportCount_;
+      return *this;
+    }
+
+    PipelineViewportStateCreateInfo & setPViewports( const VULKAN_HPP_NAMESPACE::Viewport* pViewports_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pViewports = pViewports_;
+      return *this;
+    }
+
+    PipelineViewportStateCreateInfo & setScissorCount( uint32_t scissorCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      scissorCount = scissorCount_;
+      return *this;
+    }
+
+    PipelineViewportStateCreateInfo & setPScissors( const VULKAN_HPP_NAMESPACE::Rect2D* pScissors_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pScissors = pScissors_;
+      return *this;
+    }
+
+    operator VkPipelineViewportStateCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineViewportStateCreateInfo*>( this );
+    }
+
+    operator VkPipelineViewportStateCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineViewportStateCreateInfo*>( this );
+    }
+
+    bool operator==( PipelineViewportStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( viewportCount == rhs.viewportCount )
+          && ( pViewports == rhs.pViewports )
+          && ( scissorCount == rhs.scissorCount )
+          && ( pScissors == rhs.pScissors );
+    }
+
+    bool operator!=( PipelineViewportStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportStateCreateInfo;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateFlags flags;
+    uint32_t viewportCount;
+    const VULKAN_HPP_NAMESPACE::Viewport* pViewports;
+    uint32_t scissorCount;
+    const VULKAN_HPP_NAMESPACE::Rect2D* pScissors;
+  };
+  static_assert( sizeof( PipelineViewportStateCreateInfo ) == sizeof( VkPipelineViewportStateCreateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PipelineViewportStateCreateInfo>::value, "struct wrapper is not a standard layout!" );
+
+  struct PipelineRasterizationStateCreateInfo
+  {
+    VULKAN_HPP_CONSTEXPR PipelineRasterizationStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateFlags flags_ = VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateFlags(),
+                                                               VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable_ = 0,
+                                                               VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable_ = 0,
+                                                               VULKAN_HPP_NAMESPACE::PolygonMode polygonMode_ = VULKAN_HPP_NAMESPACE::PolygonMode::eFill,
+                                                               VULKAN_HPP_NAMESPACE::CullModeFlags cullMode_ = VULKAN_HPP_NAMESPACE::CullModeFlags(),
+                                                               VULKAN_HPP_NAMESPACE::FrontFace frontFace_ = VULKAN_HPP_NAMESPACE::FrontFace::eCounterClockwise,
+                                                               VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable_ = 0,
+                                                               float depthBiasConstantFactor_ = 0,
+                                                               float depthBiasClamp_ = 0,
+                                                               float depthBiasSlopeFactor_ = 0,
+                                                               float lineWidth_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : flags( flags_ )
+      , depthClampEnable( depthClampEnable_ )
+      , rasterizerDiscardEnable( rasterizerDiscardEnable_ )
+      , polygonMode( polygonMode_ )
+      , cullMode( cullMode_ )
+      , frontFace( frontFace_ )
+      , depthBiasEnable( depthBiasEnable_ )
+      , depthBiasConstantFactor( depthBiasConstantFactor_ )
+      , depthBiasClamp( depthBiasClamp_ )
+      , depthBiasSlopeFactor( depthBiasSlopeFactor_ )
+      , lineWidth( lineWidth_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo & operator=( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo ) - offsetof( PipelineRasterizationStateCreateInfo, pNext ) );
+      return *this;
+    }
+
+    PipelineRasterizationStateCreateInfo( VkPipelineRasterizationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PipelineRasterizationStateCreateInfo& operator=( VkPipelineRasterizationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo const *>(&rhs);
+      return *this;
+    }
+
+    PipelineRasterizationStateCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PipelineRasterizationStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    PipelineRasterizationStateCreateInfo & setDepthClampEnable( VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable_ ) VULKAN_HPP_NOEXCEPT
+    {
+      depthClampEnable = depthClampEnable_;
+      return *this;
+    }
+
+    PipelineRasterizationStateCreateInfo & setRasterizerDiscardEnable( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable_ ) VULKAN_HPP_NOEXCEPT
+    {
+      rasterizerDiscardEnable = rasterizerDiscardEnable_;
+      return *this;
+    }
+
+    PipelineRasterizationStateCreateInfo & setPolygonMode( VULKAN_HPP_NAMESPACE::PolygonMode polygonMode_ ) VULKAN_HPP_NOEXCEPT
+    {
+      polygonMode = polygonMode_;
+      return *this;
+    }
+
+    PipelineRasterizationStateCreateInfo & setCullMode( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode_ ) VULKAN_HPP_NOEXCEPT
+    {
+      cullMode = cullMode_;
+      return *this;
+    }
+
+    PipelineRasterizationStateCreateInfo & setFrontFace( VULKAN_HPP_NAMESPACE::FrontFace frontFace_ ) VULKAN_HPP_NOEXCEPT
+    {
+      frontFace = frontFace_;
+      return *this;
+    }
+
+    PipelineRasterizationStateCreateInfo & setDepthBiasEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable_ ) VULKAN_HPP_NOEXCEPT
+    {
+      depthBiasEnable = depthBiasEnable_;
+      return *this;
+    }
+
+    PipelineRasterizationStateCreateInfo & setDepthBiasConstantFactor( float depthBiasConstantFactor_ ) VULKAN_HPP_NOEXCEPT
+    {
+      depthBiasConstantFactor = depthBiasConstantFactor_;
+      return *this;
+    }
+
+    PipelineRasterizationStateCreateInfo & setDepthBiasClamp( float depthBiasClamp_ ) VULKAN_HPP_NOEXCEPT
+    {
+      depthBiasClamp = depthBiasClamp_;
+      return *this;
+    }
+
+    PipelineRasterizationStateCreateInfo & setDepthBiasSlopeFactor( float depthBiasSlopeFactor_ ) VULKAN_HPP_NOEXCEPT
+    {
+      depthBiasSlopeFactor = depthBiasSlopeFactor_;
+      return *this;
+    }
+
+    PipelineRasterizationStateCreateInfo & setLineWidth( float lineWidth_ ) VULKAN_HPP_NOEXCEPT
+    {
+      lineWidth = lineWidth_;
+      return *this;
+    }
+
+    operator VkPipelineRasterizationStateCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineRasterizationStateCreateInfo*>( this );
+    }
+
+    operator VkPipelineRasterizationStateCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineRasterizationStateCreateInfo*>( this );
+    }
+
+    bool operator==( PipelineRasterizationStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( depthClampEnable == rhs.depthClampEnable )
+          && ( rasterizerDiscardEnable == rhs.rasterizerDiscardEnable )
+          && ( polygonMode == rhs.polygonMode )
+          && ( cullMode == rhs.cullMode )
+          && ( frontFace == rhs.frontFace )
+          && ( depthBiasEnable == rhs.depthBiasEnable )
+          && ( depthBiasConstantFactor == rhs.depthBiasConstantFactor )
+          && ( depthBiasClamp == rhs.depthBiasClamp )
+          && ( depthBiasSlopeFactor == rhs.depthBiasSlopeFactor )
+          && ( lineWidth == rhs.lineWidth );
+    }
+
+    bool operator!=( PipelineRasterizationStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationStateCreateInfo;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateFlags flags;
+    VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable;
+    VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable;
+    VULKAN_HPP_NAMESPACE::PolygonMode polygonMode;
+    VULKAN_HPP_NAMESPACE::CullModeFlags cullMode;
+    VULKAN_HPP_NAMESPACE::FrontFace frontFace;
+    VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable;
+    float depthBiasConstantFactor;
+    float depthBiasClamp;
+    float depthBiasSlopeFactor;
+    float lineWidth;
+  };
+  static_assert( sizeof( PipelineRasterizationStateCreateInfo ) == sizeof( VkPipelineRasterizationStateCreateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PipelineRasterizationStateCreateInfo>::value, "struct wrapper is not a standard layout!" );
+
+  struct PipelineMultisampleStateCreateInfo
+  {
+    VULKAN_HPP_CONSTEXPR PipelineMultisampleStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateFlags flags_ = VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateFlags(),
+                                                             VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1,
+                                                             VULKAN_HPP_NAMESPACE::Bool32 sampleShadingEnable_ = 0,
+                                                             float minSampleShading_ = 0,
+                                                             const VULKAN_HPP_NAMESPACE::SampleMask* pSampleMask_ = nullptr,
+                                                             VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable_ = 0,
+                                                             VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : flags( flags_ )
+      , rasterizationSamples( rasterizationSamples_ )
+      , sampleShadingEnable( sampleShadingEnable_ )
+      , minSampleShading( minSampleShading_ )
+      , pSampleMask( pSampleMask_ )
+      , alphaToCoverageEnable( alphaToCoverageEnable_ )
+      , alphaToOneEnable( alphaToOneEnable_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo & operator=( VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo ) - offsetof( PipelineMultisampleStateCreateInfo, pNext ) );
+      return *this;
+    }
+
+    PipelineMultisampleStateCreateInfo( VkPipelineMultisampleStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PipelineMultisampleStateCreateInfo& operator=( VkPipelineMultisampleStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo const *>(&rhs);
+      return *this;
+    }
+
+    PipelineMultisampleStateCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PipelineMultisampleStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    PipelineMultisampleStateCreateInfo & setRasterizationSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ ) VULKAN_HPP_NOEXCEPT
+    {
+      rasterizationSamples = rasterizationSamples_;
+      return *this;
+    }
+
+    PipelineMultisampleStateCreateInfo & setSampleShadingEnable( VULKAN_HPP_NAMESPACE::Bool32 sampleShadingEnable_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sampleShadingEnable = sampleShadingEnable_;
+      return *this;
+    }
+
+    PipelineMultisampleStateCreateInfo & setMinSampleShading( float minSampleShading_ ) VULKAN_HPP_NOEXCEPT
+    {
+      minSampleShading = minSampleShading_;
+      return *this;
+    }
+
+    PipelineMultisampleStateCreateInfo & setPSampleMask( const VULKAN_HPP_NAMESPACE::SampleMask* pSampleMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pSampleMask = pSampleMask_;
+      return *this;
+    }
+
+    PipelineMultisampleStateCreateInfo & setAlphaToCoverageEnable( VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable_ ) VULKAN_HPP_NOEXCEPT
+    {
+      alphaToCoverageEnable = alphaToCoverageEnable_;
+      return *this;
+    }
+
+    PipelineMultisampleStateCreateInfo & setAlphaToOneEnable( VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable_ ) VULKAN_HPP_NOEXCEPT
+    {
+      alphaToOneEnable = alphaToOneEnable_;
+      return *this;
+    }
+
+    operator VkPipelineMultisampleStateCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineMultisampleStateCreateInfo*>( this );
+    }
+
+    operator VkPipelineMultisampleStateCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineMultisampleStateCreateInfo*>( this );
+    }
+
+    bool operator==( PipelineMultisampleStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( rasterizationSamples == rhs.rasterizationSamples )
+          && ( sampleShadingEnable == rhs.sampleShadingEnable )
+          && ( minSampleShading == rhs.minSampleShading )
+          && ( pSampleMask == rhs.pSampleMask )
+          && ( alphaToCoverageEnable == rhs.alphaToCoverageEnable )
+          && ( alphaToOneEnable == rhs.alphaToOneEnable );
+    }
+
+    bool operator!=( PipelineMultisampleStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineMultisampleStateCreateInfo;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateFlags flags;
+    VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples;
+    VULKAN_HPP_NAMESPACE::Bool32 sampleShadingEnable;
+    float minSampleShading;
+    const VULKAN_HPP_NAMESPACE::SampleMask* pSampleMask;
+    VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable;
+    VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable;
+  };
+  static_assert( sizeof( PipelineMultisampleStateCreateInfo ) == sizeof( VkPipelineMultisampleStateCreateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PipelineMultisampleStateCreateInfo>::value, "struct wrapper is not a standard layout!" );
+
+  struct StencilOpState
+  {
+    VULKAN_HPP_CONSTEXPR StencilOpState( VULKAN_HPP_NAMESPACE::StencilOp failOp_ = VULKAN_HPP_NAMESPACE::StencilOp::eKeep,
+                                         VULKAN_HPP_NAMESPACE::StencilOp passOp_ = VULKAN_HPP_NAMESPACE::StencilOp::eKeep,
+                                         VULKAN_HPP_NAMESPACE::StencilOp depthFailOp_ = VULKAN_HPP_NAMESPACE::StencilOp::eKeep,
+                                         VULKAN_HPP_NAMESPACE::CompareOp compareOp_ = VULKAN_HPP_NAMESPACE::CompareOp::eNever,
+                                         uint32_t compareMask_ = 0,
+                                         uint32_t writeMask_ = 0,
+                                         uint32_t reference_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : failOp( failOp_ )
+      , passOp( passOp_ )
+      , depthFailOp( depthFailOp_ )
+      , compareOp( compareOp_ )
+      , compareMask( compareMask_ )
+      , writeMask( writeMask_ )
+      , reference( reference_ )
+    {}
+
+    StencilOpState( VkStencilOpState const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    StencilOpState& operator=( VkStencilOpState const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::StencilOpState const *>(&rhs);
+      return *this;
+    }
+
+    StencilOpState & setFailOp( VULKAN_HPP_NAMESPACE::StencilOp failOp_ ) VULKAN_HPP_NOEXCEPT
+    {
+      failOp = failOp_;
+      return *this;
+    }
+
+    StencilOpState & setPassOp( VULKAN_HPP_NAMESPACE::StencilOp passOp_ ) VULKAN_HPP_NOEXCEPT
+    {
+      passOp = passOp_;
+      return *this;
+    }
+
+    StencilOpState & setDepthFailOp( VULKAN_HPP_NAMESPACE::StencilOp depthFailOp_ ) VULKAN_HPP_NOEXCEPT
+    {
+      depthFailOp = depthFailOp_;
+      return *this;
+    }
+
+    StencilOpState & setCompareOp( VULKAN_HPP_NAMESPACE::CompareOp compareOp_ ) VULKAN_HPP_NOEXCEPT
+    {
+      compareOp = compareOp_;
+      return *this;
+    }
+
+    StencilOpState & setCompareMask( uint32_t compareMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      compareMask = compareMask_;
+      return *this;
+    }
+
+    StencilOpState & setWriteMask( uint32_t writeMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      writeMask = writeMask_;
+      return *this;
+    }
+
+    StencilOpState & setReference( uint32_t reference_ ) VULKAN_HPP_NOEXCEPT
+    {
+      reference = reference_;
+      return *this;
+    }
+
+    operator VkStencilOpState const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkStencilOpState*>( this );
+    }
+
+    operator VkStencilOpState &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkStencilOpState*>( this );
+    }
+
+    bool operator==( StencilOpState const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( failOp == rhs.failOp )
+          && ( passOp == rhs.passOp )
+          && ( depthFailOp == rhs.depthFailOp )
+          && ( compareOp == rhs.compareOp )
+          && ( compareMask == rhs.compareMask )
+          && ( writeMask == rhs.writeMask )
+          && ( reference == rhs.reference );
+    }
+
+    bool operator!=( StencilOpState const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    VULKAN_HPP_NAMESPACE::StencilOp failOp;
+    VULKAN_HPP_NAMESPACE::StencilOp passOp;
+    VULKAN_HPP_NAMESPACE::StencilOp depthFailOp;
+    VULKAN_HPP_NAMESPACE::CompareOp compareOp;
+    uint32_t compareMask;
+    uint32_t writeMask;
+    uint32_t reference;
+  };
+  static_assert( sizeof( StencilOpState ) == sizeof( VkStencilOpState ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<StencilOpState>::value, "struct wrapper is not a standard layout!" );
+
+  struct PipelineDepthStencilStateCreateInfo
+  {
+    VULKAN_HPP_CONSTEXPR PipelineDepthStencilStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateFlags flags_ = VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateFlags(),
+                                                              VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable_ = 0,
+                                                              VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable_ = 0,
+                                                              VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp_ = VULKAN_HPP_NAMESPACE::CompareOp::eNever,
+                                                              VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable_ = 0,
+                                                              VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable_ = 0,
+                                                              VULKAN_HPP_NAMESPACE::StencilOpState front_ = VULKAN_HPP_NAMESPACE::StencilOpState(),
+                                                              VULKAN_HPP_NAMESPACE::StencilOpState back_ = VULKAN_HPP_NAMESPACE::StencilOpState(),
+                                                              float minDepthBounds_ = 0,
+                                                              float maxDepthBounds_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : flags( flags_ )
+      , depthTestEnable( depthTestEnable_ )
+      , depthWriteEnable( depthWriteEnable_ )
+      , depthCompareOp( depthCompareOp_ )
+      , depthBoundsTestEnable( depthBoundsTestEnable_ )
+      , stencilTestEnable( stencilTestEnable_ )
+      , front( front_ )
+      , back( back_ )
+      , minDepthBounds( minDepthBounds_ )
+      , maxDepthBounds( maxDepthBounds_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo & operator=( VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo ) - offsetof( PipelineDepthStencilStateCreateInfo, pNext ) );
+      return *this;
+    }
+
+    PipelineDepthStencilStateCreateInfo( VkPipelineDepthStencilStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PipelineDepthStencilStateCreateInfo& operator=( VkPipelineDepthStencilStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo const *>(&rhs);
+      return *this;
+    }
+
+    PipelineDepthStencilStateCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PipelineDepthStencilStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    PipelineDepthStencilStateCreateInfo & setDepthTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable_ ) VULKAN_HPP_NOEXCEPT
+    {
+      depthTestEnable = depthTestEnable_;
+      return *this;
+    }
+
+    PipelineDepthStencilStateCreateInfo & setDepthWriteEnable( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable_ ) VULKAN_HPP_NOEXCEPT
+    {
+      depthWriteEnable = depthWriteEnable_;
+      return *this;
+    }
+
+    PipelineDepthStencilStateCreateInfo & setDepthCompareOp( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp_ ) VULKAN_HPP_NOEXCEPT
+    {
+      depthCompareOp = depthCompareOp_;
+      return *this;
+    }
+
+    PipelineDepthStencilStateCreateInfo & setDepthBoundsTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable_ ) VULKAN_HPP_NOEXCEPT
+    {
+      depthBoundsTestEnable = depthBoundsTestEnable_;
+      return *this;
+    }
+
+    PipelineDepthStencilStateCreateInfo & setStencilTestEnable( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stencilTestEnable = stencilTestEnable_;
+      return *this;
+    }
+
+    PipelineDepthStencilStateCreateInfo & setFront( VULKAN_HPP_NAMESPACE::StencilOpState front_ ) VULKAN_HPP_NOEXCEPT
+    {
+      front = front_;
+      return *this;
+    }
+
+    PipelineDepthStencilStateCreateInfo & setBack( VULKAN_HPP_NAMESPACE::StencilOpState back_ ) VULKAN_HPP_NOEXCEPT
+    {
+      back = back_;
+      return *this;
+    }
+
+    PipelineDepthStencilStateCreateInfo & setMinDepthBounds( float minDepthBounds_ ) VULKAN_HPP_NOEXCEPT
+    {
+      minDepthBounds = minDepthBounds_;
+      return *this;
+    }
+
+    PipelineDepthStencilStateCreateInfo & setMaxDepthBounds( float maxDepthBounds_ ) VULKAN_HPP_NOEXCEPT
+    {
+      maxDepthBounds = maxDepthBounds_;
+      return *this;
+    }
+
+    operator VkPipelineDepthStencilStateCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineDepthStencilStateCreateInfo*>( this );
+    }
+
+    operator VkPipelineDepthStencilStateCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineDepthStencilStateCreateInfo*>( this );
+    }
+
+    bool operator==( PipelineDepthStencilStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( depthTestEnable == rhs.depthTestEnable )
+          && ( depthWriteEnable == rhs.depthWriteEnable )
+          && ( depthCompareOp == rhs.depthCompareOp )
+          && ( depthBoundsTestEnable == rhs.depthBoundsTestEnable )
+          && ( stencilTestEnable == rhs.stencilTestEnable )
+          && ( front == rhs.front )
+          && ( back == rhs.back )
+          && ( minDepthBounds == rhs.minDepthBounds )
+          && ( maxDepthBounds == rhs.maxDepthBounds );
+    }
+
+    bool operator!=( PipelineDepthStencilStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineDepthStencilStateCreateInfo;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateFlags flags;
+    VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable;
+    VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable;
+    VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp;
+    VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable;
+    VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable;
+    VULKAN_HPP_NAMESPACE::StencilOpState front;
+    VULKAN_HPP_NAMESPACE::StencilOpState back;
+    float minDepthBounds;
+    float maxDepthBounds;
+  };
+  static_assert( sizeof( PipelineDepthStencilStateCreateInfo ) == sizeof( VkPipelineDepthStencilStateCreateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PipelineDepthStencilStateCreateInfo>::value, "struct wrapper is not a standard layout!" );
+
+  struct PipelineColorBlendAttachmentState
+  {
+    VULKAN_HPP_CONSTEXPR PipelineColorBlendAttachmentState( VULKAN_HPP_NAMESPACE::Bool32 blendEnable_ = 0,
+                                                            VULKAN_HPP_NAMESPACE::BlendFactor srcColorBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero,
+                                                            VULKAN_HPP_NAMESPACE::BlendFactor dstColorBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero,
+                                                            VULKAN_HPP_NAMESPACE::BlendOp colorBlendOp_ = VULKAN_HPP_NAMESPACE::BlendOp::eAdd,
+                                                            VULKAN_HPP_NAMESPACE::BlendFactor srcAlphaBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero,
+                                                            VULKAN_HPP_NAMESPACE::BlendFactor dstAlphaBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero,
+                                                            VULKAN_HPP_NAMESPACE::BlendOp alphaBlendOp_ = VULKAN_HPP_NAMESPACE::BlendOp::eAdd,
+                                                            VULKAN_HPP_NAMESPACE::ColorComponentFlags colorWriteMask_ = VULKAN_HPP_NAMESPACE::ColorComponentFlags() ) VULKAN_HPP_NOEXCEPT
+      : blendEnable( blendEnable_ )
+      , srcColorBlendFactor( srcColorBlendFactor_ )
+      , dstColorBlendFactor( dstColorBlendFactor_ )
+      , colorBlendOp( colorBlendOp_ )
+      , srcAlphaBlendFactor( srcAlphaBlendFactor_ )
+      , dstAlphaBlendFactor( dstAlphaBlendFactor_ )
+      , alphaBlendOp( alphaBlendOp_ )
+      , colorWriteMask( colorWriteMask_ )
+    {}
+
+    PipelineColorBlendAttachmentState( VkPipelineColorBlendAttachmentState const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PipelineColorBlendAttachmentState& operator=( VkPipelineColorBlendAttachmentState const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState const *>(&rhs);
+      return *this;
+    }
+
+    PipelineColorBlendAttachmentState & setBlendEnable( VULKAN_HPP_NAMESPACE::Bool32 blendEnable_ ) VULKAN_HPP_NOEXCEPT
+    {
+      blendEnable = blendEnable_;
+      return *this;
+    }
+
+    PipelineColorBlendAttachmentState & setSrcColorBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor srcColorBlendFactor_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcColorBlendFactor = srcColorBlendFactor_;
+      return *this;
+    }
+
+    PipelineColorBlendAttachmentState & setDstColorBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor dstColorBlendFactor_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstColorBlendFactor = dstColorBlendFactor_;
+      return *this;
+    }
+
+    PipelineColorBlendAttachmentState & setColorBlendOp( VULKAN_HPP_NAMESPACE::BlendOp colorBlendOp_ ) VULKAN_HPP_NOEXCEPT
+    {
+      colorBlendOp = colorBlendOp_;
+      return *this;
+    }
+
+    PipelineColorBlendAttachmentState & setSrcAlphaBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor srcAlphaBlendFactor_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcAlphaBlendFactor = srcAlphaBlendFactor_;
+      return *this;
+    }
+
+    PipelineColorBlendAttachmentState & setDstAlphaBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor dstAlphaBlendFactor_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstAlphaBlendFactor = dstAlphaBlendFactor_;
+      return *this;
+    }
+
+    PipelineColorBlendAttachmentState & setAlphaBlendOp( VULKAN_HPP_NAMESPACE::BlendOp alphaBlendOp_ ) VULKAN_HPP_NOEXCEPT
+    {
+      alphaBlendOp = alphaBlendOp_;
+      return *this;
+    }
+
+    PipelineColorBlendAttachmentState & setColorWriteMask( VULKAN_HPP_NAMESPACE::ColorComponentFlags colorWriteMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      colorWriteMask = colorWriteMask_;
+      return *this;
+    }
+
+    operator VkPipelineColorBlendAttachmentState const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineColorBlendAttachmentState*>( this );
+    }
+
+    operator VkPipelineColorBlendAttachmentState &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineColorBlendAttachmentState*>( this );
+    }
+
+    bool operator==( PipelineColorBlendAttachmentState const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( blendEnable == rhs.blendEnable )
+          && ( srcColorBlendFactor == rhs.srcColorBlendFactor )
+          && ( dstColorBlendFactor == rhs.dstColorBlendFactor )
+          && ( colorBlendOp == rhs.colorBlendOp )
+          && ( srcAlphaBlendFactor == rhs.srcAlphaBlendFactor )
+          && ( dstAlphaBlendFactor == rhs.dstAlphaBlendFactor )
+          && ( alphaBlendOp == rhs.alphaBlendOp )
+          && ( colorWriteMask == rhs.colorWriteMask );
+    }
+
+    bool operator!=( PipelineColorBlendAttachmentState const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    VULKAN_HPP_NAMESPACE::Bool32 blendEnable;
+    VULKAN_HPP_NAMESPACE::BlendFactor srcColorBlendFactor;
+    VULKAN_HPP_NAMESPACE::BlendFactor dstColorBlendFactor;
+    VULKAN_HPP_NAMESPACE::BlendOp colorBlendOp;
+    VULKAN_HPP_NAMESPACE::BlendFactor srcAlphaBlendFactor;
+    VULKAN_HPP_NAMESPACE::BlendFactor dstAlphaBlendFactor;
+    VULKAN_HPP_NAMESPACE::BlendOp alphaBlendOp;
+    VULKAN_HPP_NAMESPACE::ColorComponentFlags colorWriteMask;
+  };
+  static_assert( sizeof( PipelineColorBlendAttachmentState ) == sizeof( VkPipelineColorBlendAttachmentState ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PipelineColorBlendAttachmentState>::value, "struct wrapper is not a standard layout!" );
+
+  struct PipelineColorBlendStateCreateInfo
+  {
+    VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateFlags flags_ = VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateFlags(),
+                                                               VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable_ = 0,
+                                                               VULKAN_HPP_NAMESPACE::LogicOp logicOp_ = VULKAN_HPP_NAMESPACE::LogicOp::eClear,
+                                                               uint32_t attachmentCount_ = 0,
+                                                               const VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState* pAttachments_ = nullptr,
+                                                               std::array<float,4> const& blendConstants_ = { { 0 } } ) VULKAN_HPP_NOEXCEPT
+      : flags( flags_ )
+      , logicOpEnable( logicOpEnable_ )
+      , logicOp( logicOp_ )
+      , attachmentCount( attachmentCount_ )
+      , pAttachments( pAttachments_ )
+      , blendConstants{}
+    {
+      VULKAN_HPP_NAMESPACE::ConstExpressionArrayCopy<float,4,4>::copy( blendConstants, blendConstants_ );
+    }
+
+    VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo & operator=( VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo ) - offsetof( PipelineColorBlendStateCreateInfo, pNext ) );
+      return *this;
+    }
+
+    PipelineColorBlendStateCreateInfo( VkPipelineColorBlendStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PipelineColorBlendStateCreateInfo& operator=( VkPipelineColorBlendStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo const *>(&rhs);
+      return *this;
+    }
+
+    PipelineColorBlendStateCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PipelineColorBlendStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    PipelineColorBlendStateCreateInfo & setLogicOpEnable( VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable_ ) VULKAN_HPP_NOEXCEPT
+    {
+      logicOpEnable = logicOpEnable_;
+      return *this;
+    }
+
+    PipelineColorBlendStateCreateInfo & setLogicOp( VULKAN_HPP_NAMESPACE::LogicOp logicOp_ ) VULKAN_HPP_NOEXCEPT
+    {
+      logicOp = logicOp_;
+      return *this;
+    }
+
+    PipelineColorBlendStateCreateInfo & setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      attachmentCount = attachmentCount_;
+      return *this;
+    }
+
+    PipelineColorBlendStateCreateInfo & setPAttachments( const VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState* pAttachments_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pAttachments = pAttachments_;
+      return *this;
+    }
+
+    PipelineColorBlendStateCreateInfo & setBlendConstants( std::array<float,4> blendConstants_ ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( blendConstants, blendConstants_.data(), 4 * sizeof( float ) );
+      return *this;
+    }
+
+    operator VkPipelineColorBlendStateCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineColorBlendStateCreateInfo*>( this );
+    }
+
+    operator VkPipelineColorBlendStateCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineColorBlendStateCreateInfo*>( this );
+    }
+
+    bool operator==( PipelineColorBlendStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( logicOpEnable == rhs.logicOpEnable )
+          && ( logicOp == rhs.logicOp )
+          && ( attachmentCount == rhs.attachmentCount )
+          && ( pAttachments == rhs.pAttachments )
+          && ( memcmp( blendConstants, rhs.blendConstants, 4 * sizeof( float ) ) == 0 );
+    }
+
+    bool operator!=( PipelineColorBlendStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineColorBlendStateCreateInfo;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateFlags flags;
+    VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable;
+    VULKAN_HPP_NAMESPACE::LogicOp logicOp;
+    uint32_t attachmentCount;
+    const VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState* pAttachments;
+    float blendConstants[4];
+  };
+  static_assert( sizeof( PipelineColorBlendStateCreateInfo ) == sizeof( VkPipelineColorBlendStateCreateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PipelineColorBlendStateCreateInfo>::value, "struct wrapper is not a standard layout!" );
+
+  struct PipelineDynamicStateCreateInfo
+  {
+    VULKAN_HPP_CONSTEXPR PipelineDynamicStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateFlags flags_ = VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateFlags(),
+                                                         uint32_t dynamicStateCount_ = 0,
+                                                         const VULKAN_HPP_NAMESPACE::DynamicState* pDynamicStates_ = nullptr ) VULKAN_HPP_NOEXCEPT
+      : flags( flags_ )
+      , dynamicStateCount( dynamicStateCount_ )
+      , pDynamicStates( pDynamicStates_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo & operator=( VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo ) - offsetof( PipelineDynamicStateCreateInfo, pNext ) );
+      return *this;
+    }
+
+    PipelineDynamicStateCreateInfo( VkPipelineDynamicStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PipelineDynamicStateCreateInfo& operator=( VkPipelineDynamicStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo const *>(&rhs);
+      return *this;
+    }
+
+    PipelineDynamicStateCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PipelineDynamicStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    PipelineDynamicStateCreateInfo & setDynamicStateCount( uint32_t dynamicStateCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dynamicStateCount = dynamicStateCount_;
+      return *this;
+    }
+
+    PipelineDynamicStateCreateInfo & setPDynamicStates( const VULKAN_HPP_NAMESPACE::DynamicState* pDynamicStates_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pDynamicStates = pDynamicStates_;
+      return *this;
+    }
+
+    operator VkPipelineDynamicStateCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineDynamicStateCreateInfo*>( this );
+    }
+
+    operator VkPipelineDynamicStateCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineDynamicStateCreateInfo*>( this );
+    }
+
+    bool operator==( PipelineDynamicStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( dynamicStateCount == rhs.dynamicStateCount )
+          && ( pDynamicStates == rhs.pDynamicStates );
+    }
+
+    bool operator!=( PipelineDynamicStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineDynamicStateCreateInfo;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateFlags flags;
+    uint32_t dynamicStateCount;
+    const VULKAN_HPP_NAMESPACE::DynamicState* pDynamicStates;
+  };
+  static_assert( sizeof( PipelineDynamicStateCreateInfo ) == sizeof( VkPipelineDynamicStateCreateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PipelineDynamicStateCreateInfo>::value, "struct wrapper is not a standard layout!" );
+
+  struct GraphicsPipelineCreateInfo
+  {
+    VULKAN_HPP_CONSTEXPR GraphicsPipelineCreateInfo( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ = VULKAN_HPP_NAMESPACE::PipelineCreateFlags(),
+                                                     uint32_t stageCount_ = 0,
+                                                     const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo* pStages_ = nullptr,
+                                                     const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo* pVertexInputState_ = nullptr,
+                                                     const VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo* pInputAssemblyState_ = nullptr,
+                                                     const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo* pTessellationState_ = nullptr,
+                                                     const VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo* pViewportState_ = nullptr,
+                                                     const VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo* pRasterizationState_ = nullptr,
+                                                     const VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo* pMultisampleState_ = nullptr,
+                                                     const VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo* pDepthStencilState_ = nullptr,
+                                                     const VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo* pColorBlendState_ = nullptr,
+                                                     const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo* pDynamicState_ = nullptr,
+                                                     VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = VULKAN_HPP_NAMESPACE::PipelineLayout(),
+                                                     VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = VULKAN_HPP_NAMESPACE::RenderPass(),
+                                                     uint32_t subpass_ = 0,
+                                                     VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = VULKAN_HPP_NAMESPACE::Pipeline(),
+                                                     int32_t basePipelineIndex_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : flags( flags_ )
+      , stageCount( stageCount_ )
+      , pStages( pStages_ )
+      , pVertexInputState( pVertexInputState_ )
+      , pInputAssemblyState( pInputAssemblyState_ )
+      , pTessellationState( pTessellationState_ )
+      , pViewportState( pViewportState_ )
+      , pRasterizationState( pRasterizationState_ )
+      , pMultisampleState( pMultisampleState_ )
+      , pDepthStencilState( pDepthStencilState_ )
+      , pColorBlendState( pColorBlendState_ )
+      , pDynamicState( pDynamicState_ )
+      , layout( layout_ )
+      , renderPass( renderPass_ )
+      , subpass( subpass_ )
+      , basePipelineHandle( basePipelineHandle_ )
+      , basePipelineIndex( basePipelineIndex_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo & operator=( VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo ) - offsetof( GraphicsPipelineCreateInfo, pNext ) );
+      return *this;
+    }
+
+    GraphicsPipelineCreateInfo( VkGraphicsPipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    GraphicsPipelineCreateInfo& operator=( VkGraphicsPipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo const *>(&rhs);
+      return *this;
+    }
+
+    GraphicsPipelineCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    GraphicsPipelineCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    GraphicsPipelineCreateInfo & setStageCount( uint32_t stageCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stageCount = stageCount_;
+      return *this;
+    }
+
+    GraphicsPipelineCreateInfo & setPStages( const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo* pStages_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pStages = pStages_;
+      return *this;
+    }
+
+    GraphicsPipelineCreateInfo & setPVertexInputState( const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo* pVertexInputState_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pVertexInputState = pVertexInputState_;
+      return *this;
+    }
+
+    GraphicsPipelineCreateInfo & setPInputAssemblyState( const VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo* pInputAssemblyState_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pInputAssemblyState = pInputAssemblyState_;
+      return *this;
+    }
+
+    GraphicsPipelineCreateInfo & setPTessellationState( const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo* pTessellationState_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pTessellationState = pTessellationState_;
+      return *this;
+    }
+
+    GraphicsPipelineCreateInfo & setPViewportState( const VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo* pViewportState_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pViewportState = pViewportState_;
+      return *this;
+    }
+
+    GraphicsPipelineCreateInfo & setPRasterizationState( const VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo* pRasterizationState_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pRasterizationState = pRasterizationState_;
+      return *this;
+    }
+
+    GraphicsPipelineCreateInfo & setPMultisampleState( const VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo* pMultisampleState_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pMultisampleState = pMultisampleState_;
+      return *this;
+    }
+
+    GraphicsPipelineCreateInfo & setPDepthStencilState( const VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo* pDepthStencilState_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pDepthStencilState = pDepthStencilState_;
+      return *this;
+    }
+
+    GraphicsPipelineCreateInfo & setPColorBlendState( const VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo* pColorBlendState_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pColorBlendState = pColorBlendState_;
+      return *this;
+    }
+
+    GraphicsPipelineCreateInfo & setPDynamicState( const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo* pDynamicState_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pDynamicState = pDynamicState_;
+      return *this;
+    }
+
+    GraphicsPipelineCreateInfo & setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT
+    {
+      layout = layout_;
+      return *this;
+    }
+
+    GraphicsPipelineCreateInfo & setRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ ) VULKAN_HPP_NOEXCEPT
+    {
+      renderPass = renderPass_;
+      return *this;
+    }
+
+    GraphicsPipelineCreateInfo & setSubpass( uint32_t subpass_ ) VULKAN_HPP_NOEXCEPT
+    {
+      subpass = subpass_;
+      return *this;
+    }
+
+    GraphicsPipelineCreateInfo & setBasePipelineHandle( VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ ) VULKAN_HPP_NOEXCEPT
+    {
+      basePipelineHandle = basePipelineHandle_;
+      return *this;
+    }
+
+    GraphicsPipelineCreateInfo & setBasePipelineIndex( int32_t basePipelineIndex_ ) VULKAN_HPP_NOEXCEPT
+    {
+      basePipelineIndex = basePipelineIndex_;
+      return *this;
+    }
+
+    operator VkGraphicsPipelineCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkGraphicsPipelineCreateInfo*>( this );
+    }
+
+    operator VkGraphicsPipelineCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkGraphicsPipelineCreateInfo*>( this );
+    }
+
+    bool operator==( GraphicsPipelineCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( stageCount == rhs.stageCount )
+          && ( pStages == rhs.pStages )
+          && ( pVertexInputState == rhs.pVertexInputState )
+          && ( pInputAssemblyState == rhs.pInputAssemblyState )
+          && ( pTessellationState == rhs.pTessellationState )
+          && ( pViewportState == rhs.pViewportState )
+          && ( pRasterizationState == rhs.pRasterizationState )
+          && ( pMultisampleState == rhs.pMultisampleState )
+          && ( pDepthStencilState == rhs.pDepthStencilState )
+          && ( pColorBlendState == rhs.pColorBlendState )
+          && ( pDynamicState == rhs.pDynamicState )
+          && ( layout == rhs.layout )
+          && ( renderPass == rhs.renderPass )
+          && ( subpass == rhs.subpass )
+          && ( basePipelineHandle == rhs.basePipelineHandle )
+          && ( basePipelineIndex == rhs.basePipelineIndex );
+    }
+
+    bool operator!=( GraphicsPipelineCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGraphicsPipelineCreateInfo;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags;
+    uint32_t stageCount;
+    const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo* pStages;
+    const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo* pVertexInputState;
+    const VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo* pInputAssemblyState;
+    const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo* pTessellationState;
+    const VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo* pViewportState;
+    const VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo* pRasterizationState;
+    const VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo* pMultisampleState;
+    const VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo* pDepthStencilState;
+    const VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo* pColorBlendState;
+    const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo* pDynamicState;
+    VULKAN_HPP_NAMESPACE::PipelineLayout layout;
+    VULKAN_HPP_NAMESPACE::RenderPass renderPass;
+    uint32_t subpass;
+    VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle;
+    int32_t basePipelineIndex;
+  };
+  static_assert( sizeof( GraphicsPipelineCreateInfo ) == sizeof( VkGraphicsPipelineCreateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<GraphicsPipelineCreateInfo>::value, "struct wrapper is not a standard layout!" );
+
+  struct XYColorEXT
+  {
+    VULKAN_HPP_CONSTEXPR XYColorEXT( float x_ = 0,
+                                     float y_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : x( x_ )
+      , y( y_ )
+    {}
+
+    XYColorEXT( VkXYColorEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    XYColorEXT& operator=( VkXYColorEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::XYColorEXT const *>(&rhs);
+      return *this;
+    }
+
+    XYColorEXT & setX( float x_ ) VULKAN_HPP_NOEXCEPT
+    {
+      x = x_;
+      return *this;
+    }
+
+    XYColorEXT & setY( float y_ ) VULKAN_HPP_NOEXCEPT
+    {
+      y = y_;
+      return *this;
+    }
+
+    operator VkXYColorEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkXYColorEXT*>( this );
+    }
+
+    operator VkXYColorEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkXYColorEXT*>( this );
+    }
+
+    bool operator==( XYColorEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( x == rhs.x )
+          && ( y == rhs.y );
+    }
+
+    bool operator!=( XYColorEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    float x;
+    float y;
+  };
+  static_assert( sizeof( XYColorEXT ) == sizeof( VkXYColorEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<XYColorEXT>::value, "struct wrapper is not a standard layout!" );
+
+  struct HdrMetadataEXT
+  {
+    VULKAN_HPP_CONSTEXPR HdrMetadataEXT( VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryRed_ = VULKAN_HPP_NAMESPACE::XYColorEXT(),
+                                         VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryGreen_ = VULKAN_HPP_NAMESPACE::XYColorEXT(),
+                                         VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryBlue_ = VULKAN_HPP_NAMESPACE::XYColorEXT(),
+                                         VULKAN_HPP_NAMESPACE::XYColorEXT whitePoint_ = VULKAN_HPP_NAMESPACE::XYColorEXT(),
+                                         float maxLuminance_ = 0,
+                                         float minLuminance_ = 0,
+                                         float maxContentLightLevel_ = 0,
+                                         float maxFrameAverageLightLevel_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : displayPrimaryRed( displayPrimaryRed_ )
+      , displayPrimaryGreen( displayPrimaryGreen_ )
+      , displayPrimaryBlue( displayPrimaryBlue_ )
+      , whitePoint( whitePoint_ )
+      , maxLuminance( maxLuminance_ )
+      , minLuminance( minLuminance_ )
+      , maxContentLightLevel( maxContentLightLevel_ )
+      , maxFrameAverageLightLevel( maxFrameAverageLightLevel_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::HdrMetadataEXT & operator=( VULKAN_HPP_NAMESPACE::HdrMetadataEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::HdrMetadataEXT ) - offsetof( HdrMetadataEXT, pNext ) );
+      return *this;
+    }
+
+    HdrMetadataEXT( VkHdrMetadataEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    HdrMetadataEXT& operator=( VkHdrMetadataEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::HdrMetadataEXT const *>(&rhs);
+      return *this;
+    }
+
+    HdrMetadataEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    HdrMetadataEXT & setDisplayPrimaryRed( VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryRed_ ) VULKAN_HPP_NOEXCEPT
+    {
+      displayPrimaryRed = displayPrimaryRed_;
+      return *this;
+    }
+
+    HdrMetadataEXT & setDisplayPrimaryGreen( VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryGreen_ ) VULKAN_HPP_NOEXCEPT
+    {
+      displayPrimaryGreen = displayPrimaryGreen_;
+      return *this;
+    }
+
+    HdrMetadataEXT & setDisplayPrimaryBlue( VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryBlue_ ) VULKAN_HPP_NOEXCEPT
+    {
+      displayPrimaryBlue = displayPrimaryBlue_;
+      return *this;
+    }
+
+    HdrMetadataEXT & setWhitePoint( VULKAN_HPP_NAMESPACE::XYColorEXT whitePoint_ ) VULKAN_HPP_NOEXCEPT
+    {
+      whitePoint = whitePoint_;
+      return *this;
+    }
+
+    HdrMetadataEXT & setMaxLuminance( float maxLuminance_ ) VULKAN_HPP_NOEXCEPT
+    {
+      maxLuminance = maxLuminance_;
+      return *this;
+    }
+
+    HdrMetadataEXT & setMinLuminance( float minLuminance_ ) VULKAN_HPP_NOEXCEPT
+    {
+      minLuminance = minLuminance_;
+      return *this;
+    }
+
+    HdrMetadataEXT & setMaxContentLightLevel( float maxContentLightLevel_ ) VULKAN_HPP_NOEXCEPT
+    {
+      maxContentLightLevel = maxContentLightLevel_;
+      return *this;
+    }
+
+    HdrMetadataEXT & setMaxFrameAverageLightLevel( float maxFrameAverageLightLevel_ ) VULKAN_HPP_NOEXCEPT
+    {
+      maxFrameAverageLightLevel = maxFrameAverageLightLevel_;
+      return *this;
+    }
+
+    operator VkHdrMetadataEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkHdrMetadataEXT*>( this );
+    }
+
+    operator VkHdrMetadataEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkHdrMetadataEXT*>( this );
+    }
+
+    bool operator==( HdrMetadataEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( displayPrimaryRed == rhs.displayPrimaryRed )
+          && ( displayPrimaryGreen == rhs.displayPrimaryGreen )
+          && ( displayPrimaryBlue == rhs.displayPrimaryBlue )
+          && ( whitePoint == rhs.whitePoint )
+          && ( maxLuminance == rhs.maxLuminance )
+          && ( minLuminance == rhs.minLuminance )
+          && ( maxContentLightLevel == rhs.maxContentLightLevel )
+          && ( maxFrameAverageLightLevel == rhs.maxFrameAverageLightLevel );
+    }
+
+    bool operator!=( HdrMetadataEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eHdrMetadataEXT;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryRed;
+    VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryGreen;
+    VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryBlue;
+    VULKAN_HPP_NAMESPACE::XYColorEXT whitePoint;
+    float maxLuminance;
+    float minLuminance;
+    float maxContentLightLevel;
+    float maxFrameAverageLightLevel;
+  };
+  static_assert( sizeof( HdrMetadataEXT ) == sizeof( VkHdrMetadataEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<HdrMetadataEXT>::value, "struct wrapper is not a standard layout!" );
+
+  struct HeadlessSurfaceCreateInfoEXT
+  {
+    VULKAN_HPP_CONSTEXPR HeadlessSurfaceCreateInfoEXT( VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateFlagsEXT flags_ = VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateFlagsEXT() ) VULKAN_HPP_NOEXCEPT
+      : flags( flags_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT & operator=( VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT ) - offsetof( HeadlessSurfaceCreateInfoEXT, pNext ) );
+      return *this;
+    }
+
+    HeadlessSurfaceCreateInfoEXT( VkHeadlessSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    HeadlessSurfaceCreateInfoEXT& operator=( VkHeadlessSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT const *>(&rhs);
+      return *this;
+    }
+
+    HeadlessSurfaceCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    HeadlessSurfaceCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    operator VkHeadlessSurfaceCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkHeadlessSurfaceCreateInfoEXT*>( this );
+    }
+
+    operator VkHeadlessSurfaceCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkHeadlessSurfaceCreateInfoEXT*>( this );
+    }
+
+    bool operator==( HeadlessSurfaceCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags );
+    }
+
+    bool operator!=( HeadlessSurfaceCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eHeadlessSurfaceCreateInfoEXT;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateFlagsEXT flags;
+  };
+  static_assert( sizeof( HeadlessSurfaceCreateInfoEXT ) == sizeof( VkHeadlessSurfaceCreateInfoEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<HeadlessSurfaceCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+
+#ifdef VK_USE_PLATFORM_IOS_MVK
+
+  struct IOSSurfaceCreateInfoMVK
+  {
+    VULKAN_HPP_CONSTEXPR IOSSurfaceCreateInfoMVK( VULKAN_HPP_NAMESPACE::IOSSurfaceCreateFlagsMVK flags_ = VULKAN_HPP_NAMESPACE::IOSSurfaceCreateFlagsMVK(),
+                                                  const void* pView_ = nullptr ) VULKAN_HPP_NOEXCEPT
+      : flags( flags_ )
+      , pView( pView_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK & operator=( VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK ) - offsetof( IOSSurfaceCreateInfoMVK, pNext ) );
+      return *this;
+    }
+
+    IOSSurfaceCreateInfoMVK( VkIOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    IOSSurfaceCreateInfoMVK& operator=( VkIOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK const *>(&rhs);
+      return *this;
+    }
+
+    IOSSurfaceCreateInfoMVK & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    IOSSurfaceCreateInfoMVK & setFlags( VULKAN_HPP_NAMESPACE::IOSSurfaceCreateFlagsMVK flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    IOSSurfaceCreateInfoMVK & setPView( const void* pView_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pView = pView_;
+      return *this;
+    }
+
+    operator VkIOSSurfaceCreateInfoMVK const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkIOSSurfaceCreateInfoMVK*>( this );
+    }
+
+    operator VkIOSSurfaceCreateInfoMVK &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkIOSSurfaceCreateInfoMVK*>( this );
+    }
+
+    bool operator==( IOSSurfaceCreateInfoMVK const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( pView == rhs.pView );
+    }
+
+    bool operator!=( IOSSurfaceCreateInfoMVK const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eIosSurfaceCreateInfoMVK;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::IOSSurfaceCreateFlagsMVK flags;
+    const void* pView;
+  };
+  static_assert( sizeof( IOSSurfaceCreateInfoMVK ) == sizeof( VkIOSSurfaceCreateInfoMVK ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<IOSSurfaceCreateInfoMVK>::value, "struct wrapper is not a standard layout!" );
+#endif /*VK_USE_PLATFORM_IOS_MVK*/
+
+  struct ImageBlit
+  {
+    VULKAN_HPP_CONSTEXPR_14 ImageBlit( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = VULKAN_HPP_NAMESPACE::ImageSubresourceLayers(),
+                                       std::array<VULKAN_HPP_NAMESPACE::Offset3D,2> const& srcOffsets_ = { { VULKAN_HPP_NAMESPACE::Offset3D() } },
+                                       VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = VULKAN_HPP_NAMESPACE::ImageSubresourceLayers(),
+                                       std::array<VULKAN_HPP_NAMESPACE::Offset3D,2> const& dstOffsets_ = { { VULKAN_HPP_NAMESPACE::Offset3D() } } ) VULKAN_HPP_NOEXCEPT
+      : srcSubresource( srcSubresource_ )
+      , srcOffsets{}
+      , dstSubresource( dstSubresource_ )
+      , dstOffsets{}
+    {
+      VULKAN_HPP_NAMESPACE::ConstExpressionArrayCopy<VULKAN_HPP_NAMESPACE::Offset3D,2,2>::copy( srcOffsets, srcOffsets_ );
+      VULKAN_HPP_NAMESPACE::ConstExpressionArrayCopy<VULKAN_HPP_NAMESPACE::Offset3D,2,2>::copy( dstOffsets, dstOffsets_ );
+    }
+
+    ImageBlit( VkImageBlit const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    ImageBlit& operator=( VkImageBlit const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageBlit const *>(&rhs);
+      return *this;
+    }
+
+    ImageBlit & setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcSubresource = srcSubresource_;
+      return *this;
+    }
+
+    ImageBlit & setSrcOffsets( std::array<VULKAN_HPP_NAMESPACE::Offset3D,2> srcOffsets_ ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( srcOffsets, srcOffsets_.data(), 2 * sizeof( VULKAN_HPP_NAMESPACE::Offset3D ) );
+      return *this;
+    }
+
+    ImageBlit & setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstSubresource = dstSubresource_;
+      return *this;
+    }
+
+    ImageBlit & setDstOffsets( std::array<VULKAN_HPP_NAMESPACE::Offset3D,2> dstOffsets_ ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( dstOffsets, dstOffsets_.data(), 2 * sizeof( VULKAN_HPP_NAMESPACE::Offset3D ) );
+      return *this;
+    }
+
+    operator VkImageBlit const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImageBlit*>( this );
+    }
+
+    operator VkImageBlit &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImageBlit*>( this );
+    }
+
+    bool operator==( ImageBlit const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( srcSubresource == rhs.srcSubresource )
+          && ( memcmp( srcOffsets, rhs.srcOffsets, 2 * sizeof( VULKAN_HPP_NAMESPACE::Offset3D ) ) == 0 )
+          && ( dstSubresource == rhs.dstSubresource )
+          && ( memcmp( dstOffsets, rhs.dstOffsets, 2 * sizeof( VULKAN_HPP_NAMESPACE::Offset3D ) ) == 0 );
+    }
+
+    bool operator!=( ImageBlit const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource;
+    VULKAN_HPP_NAMESPACE::Offset3D srcOffsets[2];
+    VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource;
+    VULKAN_HPP_NAMESPACE::Offset3D dstOffsets[2];
+  };
+  static_assert( sizeof( ImageBlit ) == sizeof( VkImageBlit ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ImageBlit>::value, "struct wrapper is not a standard layout!" );
+
+  struct ImageCopy
+  {
+    VULKAN_HPP_CONSTEXPR ImageCopy( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = VULKAN_HPP_NAMESPACE::ImageSubresourceLayers(),
+                                    VULKAN_HPP_NAMESPACE::Offset3D srcOffset_ = VULKAN_HPP_NAMESPACE::Offset3D(),
+                                    VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = VULKAN_HPP_NAMESPACE::ImageSubresourceLayers(),
+                                    VULKAN_HPP_NAMESPACE::Offset3D dstOffset_ = VULKAN_HPP_NAMESPACE::Offset3D(),
+                                    VULKAN_HPP_NAMESPACE::Extent3D extent_ = VULKAN_HPP_NAMESPACE::Extent3D() ) VULKAN_HPP_NOEXCEPT
+      : srcSubresource( srcSubresource_ )
+      , srcOffset( srcOffset_ )
+      , dstSubresource( dstSubresource_ )
+      , dstOffset( dstOffset_ )
+      , extent( extent_ )
+    {}
+
+    ImageCopy( VkImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    ImageCopy& operator=( VkImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageCopy const *>(&rhs);
+      return *this;
+    }
+
+    ImageCopy & setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcSubresource = srcSubresource_;
+      return *this;
+    }
+
+    ImageCopy & setSrcOffset( VULKAN_HPP_NAMESPACE::Offset3D srcOffset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcOffset = srcOffset_;
+      return *this;
+    }
+
+    ImageCopy & setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstSubresource = dstSubresource_;
+      return *this;
+    }
+
+    ImageCopy & setDstOffset( VULKAN_HPP_NAMESPACE::Offset3D dstOffset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstOffset = dstOffset_;
+      return *this;
+    }
+
+    ImageCopy & setExtent( VULKAN_HPP_NAMESPACE::Extent3D extent_ ) VULKAN_HPP_NOEXCEPT
+    {
+      extent = extent_;
+      return *this;
+    }
+
+    operator VkImageCopy const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImageCopy*>( this );
+    }
+
+    operator VkImageCopy &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImageCopy*>( this );
+    }
+
+    bool operator==( ImageCopy const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( srcSubresource == rhs.srcSubresource )
+          && ( srcOffset == rhs.srcOffset )
+          && ( dstSubresource == rhs.dstSubresource )
+          && ( dstOffset == rhs.dstOffset )
+          && ( extent == rhs.extent );
+    }
+
+    bool operator!=( ImageCopy const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource;
+    VULKAN_HPP_NAMESPACE::Offset3D srcOffset;
+    VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource;
+    VULKAN_HPP_NAMESPACE::Offset3D dstOffset;
+    VULKAN_HPP_NAMESPACE::Extent3D extent;
+  };
+  static_assert( sizeof( ImageCopy ) == sizeof( VkImageCopy ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ImageCopy>::value, "struct wrapper is not a standard layout!" );
+
+  struct ImageCreateInfo
+  {
+    VULKAN_HPP_CONSTEXPR ImageCreateInfo( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ = VULKAN_HPP_NAMESPACE::ImageCreateFlags(),
+                                          VULKAN_HPP_NAMESPACE::ImageType imageType_ = VULKAN_HPP_NAMESPACE::ImageType::e1D,
+                                          VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
+                                          VULKAN_HPP_NAMESPACE::Extent3D extent_ = VULKAN_HPP_NAMESPACE::Extent3D(),
+                                          uint32_t mipLevels_ = 0,
+                                          uint32_t arrayLayers_ = 0,
+                                          VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1,
+                                          VULKAN_HPP_NAMESPACE::ImageTiling tiling_ = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal,
+                                          VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = VULKAN_HPP_NAMESPACE::ImageUsageFlags(),
+                                          VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive,
+                                          uint32_t queueFamilyIndexCount_ = 0,
+                                          const uint32_t* pQueueFamilyIndices_ = nullptr,
+                                          VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined ) VULKAN_HPP_NOEXCEPT
+      : flags( flags_ )
+      , imageType( imageType_ )
+      , format( format_ )
+      , extent( extent_ )
+      , mipLevels( mipLevels_ )
+      , arrayLayers( arrayLayers_ )
+      , samples( samples_ )
+      , tiling( tiling_ )
+      , usage( usage_ )
+      , sharingMode( sharingMode_ )
+      , queueFamilyIndexCount( queueFamilyIndexCount_ )
+      , pQueueFamilyIndices( pQueueFamilyIndices_ )
+      , initialLayout( initialLayout_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::ImageCreateInfo & operator=( VULKAN_HPP_NAMESPACE::ImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ImageCreateInfo ) - offsetof( ImageCreateInfo, pNext ) );
+      return *this;
+    }
+
+    ImageCreateInfo( VkImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    ImageCreateInfo& operator=( VkImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageCreateInfo const *>(&rhs);
+      return *this;
+    }
+
+    ImageCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ImageCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    ImageCreateInfo & setImageType( VULKAN_HPP_NAMESPACE::ImageType imageType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      imageType = imageType_;
+      return *this;
+    }
+
+    ImageCreateInfo & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
+    {
+      format = format_;
+      return *this;
+    }
+
+    ImageCreateInfo & setExtent( VULKAN_HPP_NAMESPACE::Extent3D extent_ ) VULKAN_HPP_NOEXCEPT
+    {
+      extent = extent_;
+      return *this;
+    }
+
+    ImageCreateInfo & setMipLevels( uint32_t mipLevels_ ) VULKAN_HPP_NOEXCEPT
+    {
+      mipLevels = mipLevels_;
+      return *this;
+    }
+
+    ImageCreateInfo & setArrayLayers( uint32_t arrayLayers_ ) VULKAN_HPP_NOEXCEPT
+    {
+      arrayLayers = arrayLayers_;
+      return *this;
+    }
+
+    ImageCreateInfo & setSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ ) VULKAN_HPP_NOEXCEPT
+    {
+      samples = samples_;
+      return *this;
+    }
+
+    ImageCreateInfo & setTiling( VULKAN_HPP_NAMESPACE::ImageTiling tiling_ ) VULKAN_HPP_NOEXCEPT
+    {
+      tiling = tiling_;
+      return *this;
+    }
+
+    ImageCreateInfo & setUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT
+    {
+      usage = usage_;
+      return *this;
+    }
+
+    ImageCreateInfo & setSharingMode( VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sharingMode = sharingMode_;
+      return *this;
+    }
+
+    ImageCreateInfo & setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      queueFamilyIndexCount = queueFamilyIndexCount_;
+      return *this;
+    }
+
+    ImageCreateInfo & setPQueueFamilyIndices( const uint32_t* pQueueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pQueueFamilyIndices = pQueueFamilyIndices_;
+      return *this;
+    }
+
+    ImageCreateInfo & setInitialLayout( VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ ) VULKAN_HPP_NOEXCEPT
+    {
+      initialLayout = initialLayout_;
+      return *this;
+    }
+
+    operator VkImageCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImageCreateInfo*>( this );
+    }
+
+    operator VkImageCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImageCreateInfo*>( this );
+    }
+
+    bool operator==( ImageCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( imageType == rhs.imageType )
+          && ( format == rhs.format )
+          && ( extent == rhs.extent )
+          && ( mipLevels == rhs.mipLevels )
+          && ( arrayLayers == rhs.arrayLayers )
+          && ( samples == rhs.samples )
+          && ( tiling == rhs.tiling )
+          && ( usage == rhs.usage )
+          && ( sharingMode == rhs.sharingMode )
+          && ( queueFamilyIndexCount == rhs.queueFamilyIndexCount )
+          && ( pQueueFamilyIndices == rhs.pQueueFamilyIndices )
+          && ( initialLayout == rhs.initialLayout );
+    }
+
+    bool operator!=( ImageCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageCreateInfo;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::ImageCreateFlags flags;
+    VULKAN_HPP_NAMESPACE::ImageType imageType;
+    VULKAN_HPP_NAMESPACE::Format format;
+    VULKAN_HPP_NAMESPACE::Extent3D extent;
+    uint32_t mipLevels;
+    uint32_t arrayLayers;
+    VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples;
+    VULKAN_HPP_NAMESPACE::ImageTiling tiling;
+    VULKAN_HPP_NAMESPACE::ImageUsageFlags usage;
+    VULKAN_HPP_NAMESPACE::SharingMode sharingMode;
+    uint32_t queueFamilyIndexCount;
+    const uint32_t* pQueueFamilyIndices;
+    VULKAN_HPP_NAMESPACE::ImageLayout initialLayout;
+  };
+  static_assert( sizeof( ImageCreateInfo ) == sizeof( VkImageCreateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ImageCreateInfo>::value, "struct wrapper is not a standard layout!" );
+
+  struct SubresourceLayout
+  {
+    SubresourceLayout( VULKAN_HPP_NAMESPACE::DeviceSize offset_ = 0,
+                       VULKAN_HPP_NAMESPACE::DeviceSize size_ = 0,
+                       VULKAN_HPP_NAMESPACE::DeviceSize rowPitch_ = 0,
+                       VULKAN_HPP_NAMESPACE::DeviceSize arrayPitch_ = 0,
+                       VULKAN_HPP_NAMESPACE::DeviceSize depthPitch_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : offset( offset_ )
+      , size( size_ )
+      , rowPitch( rowPitch_ )
+      , arrayPitch( arrayPitch_ )
+      , depthPitch( depthPitch_ )
+    {}
+
+    SubresourceLayout( VkSubresourceLayout const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    SubresourceLayout& operator=( VkSubresourceLayout const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubresourceLayout const *>(&rhs);
+      return *this;
+    }
+
+    operator VkSubresourceLayout const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSubresourceLayout*>( this );
+    }
+
+    operator VkSubresourceLayout &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSubresourceLayout*>( this );
+    }
+
+    bool operator==( SubresourceLayout const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( offset == rhs.offset )
+          && ( size == rhs.size )
+          && ( rowPitch == rhs.rowPitch )
+          && ( arrayPitch == rhs.arrayPitch )
+          && ( depthPitch == rhs.depthPitch );
+    }
+
+    bool operator!=( SubresourceLayout const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    VULKAN_HPP_NAMESPACE::DeviceSize offset;
+    VULKAN_HPP_NAMESPACE::DeviceSize size;
+    VULKAN_HPP_NAMESPACE::DeviceSize rowPitch;
+    VULKAN_HPP_NAMESPACE::DeviceSize arrayPitch;
+    VULKAN_HPP_NAMESPACE::DeviceSize depthPitch;
+  };
+  static_assert( sizeof( SubresourceLayout ) == sizeof( VkSubresourceLayout ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<SubresourceLayout>::value, "struct wrapper is not a standard layout!" );
+
+  struct ImageDrmFormatModifierExplicitCreateInfoEXT
+  {
+    VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierExplicitCreateInfoEXT( uint64_t drmFormatModifier_ = 0,
+                                                                      uint32_t drmFormatModifierPlaneCount_ = 0,
+                                                                      const VULKAN_HPP_NAMESPACE::SubresourceLayout* pPlaneLayouts_ = nullptr ) VULKAN_HPP_NOEXCEPT
+      : drmFormatModifier( drmFormatModifier_ )
+      , drmFormatModifierPlaneCount( drmFormatModifierPlaneCount_ )
+      , pPlaneLayouts( pPlaneLayouts_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierExplicitCreateInfoEXT & operator=( VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierExplicitCreateInfoEXT ) - offsetof( ImageDrmFormatModifierExplicitCreateInfoEXT, pNext ) );
+      return *this;
+    }
+
+    ImageDrmFormatModifierExplicitCreateInfoEXT( VkImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    ImageDrmFormatModifierExplicitCreateInfoEXT& operator=( VkImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierExplicitCreateInfoEXT const *>(&rhs);
+      return *this;
+    }
+
+    ImageDrmFormatModifierExplicitCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ImageDrmFormatModifierExplicitCreateInfoEXT & setDrmFormatModifier( uint64_t drmFormatModifier_ ) VULKAN_HPP_NOEXCEPT
+    {
+      drmFormatModifier = drmFormatModifier_;
+      return *this;
+    }
+
+    ImageDrmFormatModifierExplicitCreateInfoEXT & setDrmFormatModifierPlaneCount( uint32_t drmFormatModifierPlaneCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      drmFormatModifierPlaneCount = drmFormatModifierPlaneCount_;
+      return *this;
+    }
+
+    ImageDrmFormatModifierExplicitCreateInfoEXT & setPPlaneLayouts( const VULKAN_HPP_NAMESPACE::SubresourceLayout* pPlaneLayouts_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pPlaneLayouts = pPlaneLayouts_;
+      return *this;
+    }
+
+    operator VkImageDrmFormatModifierExplicitCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImageDrmFormatModifierExplicitCreateInfoEXT*>( this );
+    }
+
+    operator VkImageDrmFormatModifierExplicitCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImageDrmFormatModifierExplicitCreateInfoEXT*>( this );
+    }
+
+    bool operator==( ImageDrmFormatModifierExplicitCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( drmFormatModifier == rhs.drmFormatModifier )
+          && ( drmFormatModifierPlaneCount == rhs.drmFormatModifierPlaneCount )
+          && ( pPlaneLayouts == rhs.pPlaneLayouts );
+    }
+
+    bool operator!=( ImageDrmFormatModifierExplicitCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageDrmFormatModifierExplicitCreateInfoEXT;
+    const void* pNext = nullptr;
+    uint64_t drmFormatModifier;
+    uint32_t drmFormatModifierPlaneCount;
+    const VULKAN_HPP_NAMESPACE::SubresourceLayout* pPlaneLayouts;
+  };
+  static_assert( sizeof( ImageDrmFormatModifierExplicitCreateInfoEXT ) == sizeof( VkImageDrmFormatModifierExplicitCreateInfoEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ImageDrmFormatModifierExplicitCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+
+  struct ImageDrmFormatModifierListCreateInfoEXT
+  {
+    VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierListCreateInfoEXT( uint32_t drmFormatModifierCount_ = 0,
+                                                                  const uint64_t* pDrmFormatModifiers_ = nullptr ) VULKAN_HPP_NOEXCEPT
+      : drmFormatModifierCount( drmFormatModifierCount_ )
+      , pDrmFormatModifiers( pDrmFormatModifiers_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierListCreateInfoEXT & operator=( VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierListCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierListCreateInfoEXT ) - offsetof( ImageDrmFormatModifierListCreateInfoEXT, pNext ) );
+      return *this;
+    }
+
+    ImageDrmFormatModifierListCreateInfoEXT( VkImageDrmFormatModifierListCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    ImageDrmFormatModifierListCreateInfoEXT& operator=( VkImageDrmFormatModifierListCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierListCreateInfoEXT const *>(&rhs);
+      return *this;
+    }
+
+    ImageDrmFormatModifierListCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ImageDrmFormatModifierListCreateInfoEXT & setDrmFormatModifierCount( uint32_t drmFormatModifierCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      drmFormatModifierCount = drmFormatModifierCount_;
+      return *this;
+    }
+
+    ImageDrmFormatModifierListCreateInfoEXT & setPDrmFormatModifiers( const uint64_t* pDrmFormatModifiers_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pDrmFormatModifiers = pDrmFormatModifiers_;
+      return *this;
+    }
+
+    operator VkImageDrmFormatModifierListCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImageDrmFormatModifierListCreateInfoEXT*>( this );
+    }
+
+    operator VkImageDrmFormatModifierListCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImageDrmFormatModifierListCreateInfoEXT*>( this );
+    }
+
+    bool operator==( ImageDrmFormatModifierListCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( drmFormatModifierCount == rhs.drmFormatModifierCount )
+          && ( pDrmFormatModifiers == rhs.pDrmFormatModifiers );
+    }
+
+    bool operator!=( ImageDrmFormatModifierListCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageDrmFormatModifierListCreateInfoEXT;
+    const void* pNext = nullptr;
+    uint32_t drmFormatModifierCount;
+    const uint64_t* pDrmFormatModifiers;
+  };
+  static_assert( sizeof( ImageDrmFormatModifierListCreateInfoEXT ) == sizeof( VkImageDrmFormatModifierListCreateInfoEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ImageDrmFormatModifierListCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+
+  struct ImageDrmFormatModifierPropertiesEXT
+  {
+    ImageDrmFormatModifierPropertiesEXT( uint64_t drmFormatModifier_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : drmFormatModifier( drmFormatModifier_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT & operator=( VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT ) - offsetof( ImageDrmFormatModifierPropertiesEXT, pNext ) );
+      return *this;
+    }
+
+    ImageDrmFormatModifierPropertiesEXT( VkImageDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    ImageDrmFormatModifierPropertiesEXT& operator=( VkImageDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT const *>(&rhs);
+      return *this;
+    }
+
+    operator VkImageDrmFormatModifierPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImageDrmFormatModifierPropertiesEXT*>( this );
+    }
+
+    operator VkImageDrmFormatModifierPropertiesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImageDrmFormatModifierPropertiesEXT*>( this );
+    }
+
+    bool operator==( ImageDrmFormatModifierPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( drmFormatModifier == rhs.drmFormatModifier );
+    }
+
+    bool operator!=( ImageDrmFormatModifierPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageDrmFormatModifierPropertiesEXT;
+    void* pNext = nullptr;
+    uint64_t drmFormatModifier;
+  };
+  static_assert( sizeof( ImageDrmFormatModifierPropertiesEXT ) == sizeof( VkImageDrmFormatModifierPropertiesEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ImageDrmFormatModifierPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
+
+  struct ImageFormatListCreateInfoKHR
+  {
+    VULKAN_HPP_CONSTEXPR ImageFormatListCreateInfoKHR( uint32_t viewFormatCount_ = 0,
+                                                       const VULKAN_HPP_NAMESPACE::Format* pViewFormats_ = nullptr ) VULKAN_HPP_NOEXCEPT
+      : viewFormatCount( viewFormatCount_ )
+      , pViewFormats( pViewFormats_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::ImageFormatListCreateInfoKHR & operator=( VULKAN_HPP_NAMESPACE::ImageFormatListCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ImageFormatListCreateInfoKHR ) - offsetof( ImageFormatListCreateInfoKHR, pNext ) );
+      return *this;
+    }
+
+    ImageFormatListCreateInfoKHR( VkImageFormatListCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    ImageFormatListCreateInfoKHR& operator=( VkImageFormatListCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageFormatListCreateInfoKHR const *>(&rhs);
+      return *this;
+    }
+
+    ImageFormatListCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ImageFormatListCreateInfoKHR & setViewFormatCount( uint32_t viewFormatCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      viewFormatCount = viewFormatCount_;
+      return *this;
+    }
+
+    ImageFormatListCreateInfoKHR & setPViewFormats( const VULKAN_HPP_NAMESPACE::Format* pViewFormats_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pViewFormats = pViewFormats_;
+      return *this;
+    }
+
+    operator VkImageFormatListCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImageFormatListCreateInfoKHR*>( this );
+    }
+
+    operator VkImageFormatListCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImageFormatListCreateInfoKHR*>( this );
+    }
+
+    bool operator==( ImageFormatListCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( viewFormatCount == rhs.viewFormatCount )
+          && ( pViewFormats == rhs.pViewFormats );
+    }
+
+    bool operator!=( ImageFormatListCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageFormatListCreateInfoKHR;
+    const void* pNext = nullptr;
+    uint32_t viewFormatCount;
+    const VULKAN_HPP_NAMESPACE::Format* pViewFormats;
+  };
+  static_assert( sizeof( ImageFormatListCreateInfoKHR ) == sizeof( VkImageFormatListCreateInfoKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ImageFormatListCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
+
+  struct ImageFormatProperties2
+  {
+    ImageFormatProperties2( VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties_ = VULKAN_HPP_NAMESPACE::ImageFormatProperties() ) VULKAN_HPP_NOEXCEPT
+      : imageFormatProperties( imageFormatProperties_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::ImageFormatProperties2 & operator=( VULKAN_HPP_NAMESPACE::ImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ImageFormatProperties2 ) - offsetof( ImageFormatProperties2, pNext ) );
+      return *this;
+    }
+
+    ImageFormatProperties2( VkImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    ImageFormatProperties2& operator=( VkImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageFormatProperties2 const *>(&rhs);
+      return *this;
+    }
+
+    operator VkImageFormatProperties2 const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImageFormatProperties2*>( this );
+    }
+
+    operator VkImageFormatProperties2 &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImageFormatProperties2*>( this );
+    }
+
+    bool operator==( ImageFormatProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( imageFormatProperties == rhs.imageFormatProperties );
+    }
+
+    bool operator!=( ImageFormatProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageFormatProperties2;
+    void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties;
+  };
+  static_assert( sizeof( ImageFormatProperties2 ) == sizeof( VkImageFormatProperties2 ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ImageFormatProperties2>::value, "struct wrapper is not a standard layout!" );
+
+  struct ImageSubresourceRange
+  {
+    VULKAN_HPP_CONSTEXPR ImageSubresourceRange( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = VULKAN_HPP_NAMESPACE::ImageAspectFlags(),
+                                                uint32_t baseMipLevel_ = 0,
+                                                uint32_t levelCount_ = 0,
+                                                uint32_t baseArrayLayer_ = 0,
+                                                uint32_t layerCount_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : aspectMask( aspectMask_ )
+      , baseMipLevel( baseMipLevel_ )
+      , levelCount( levelCount_ )
+      , baseArrayLayer( baseArrayLayer_ )
+      , layerCount( layerCount_ )
+    {}
+
+    ImageSubresourceRange( VkImageSubresourceRange const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    ImageSubresourceRange& operator=( VkImageSubresourceRange const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageSubresourceRange const *>(&rhs);
+      return *this;
+    }
+
+    ImageSubresourceRange & setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      aspectMask = aspectMask_;
+      return *this;
+    }
+
+    ImageSubresourceRange & setBaseMipLevel( uint32_t baseMipLevel_ ) VULKAN_HPP_NOEXCEPT
+    {
+      baseMipLevel = baseMipLevel_;
+      return *this;
+    }
+
+    ImageSubresourceRange & setLevelCount( uint32_t levelCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      levelCount = levelCount_;
+      return *this;
+    }
+
+    ImageSubresourceRange & setBaseArrayLayer( uint32_t baseArrayLayer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      baseArrayLayer = baseArrayLayer_;
+      return *this;
+    }
+
+    ImageSubresourceRange & setLayerCount( uint32_t layerCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      layerCount = layerCount_;
+      return *this;
+    }
+
+    operator VkImageSubresourceRange const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImageSubresourceRange*>( this );
+    }
+
+    operator VkImageSubresourceRange &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImageSubresourceRange*>( this );
+    }
+
+    bool operator==( ImageSubresourceRange const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( aspectMask == rhs.aspectMask )
+          && ( baseMipLevel == rhs.baseMipLevel )
+          && ( levelCount == rhs.levelCount )
+          && ( baseArrayLayer == rhs.baseArrayLayer )
+          && ( layerCount == rhs.layerCount );
+    }
+
+    bool operator!=( ImageSubresourceRange const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask;
+    uint32_t baseMipLevel;
+    uint32_t levelCount;
+    uint32_t baseArrayLayer;
+    uint32_t layerCount;
+  };
+  static_assert( sizeof( ImageSubresourceRange ) == sizeof( VkImageSubresourceRange ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ImageSubresourceRange>::value, "struct wrapper is not a standard layout!" );
+
+  struct ImageMemoryBarrier
+  {
+    VULKAN_HPP_CONSTEXPR ImageMemoryBarrier( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = VULKAN_HPP_NAMESPACE::AccessFlags(),
+                                             VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = VULKAN_HPP_NAMESPACE::AccessFlags(),
+                                             VULKAN_HPP_NAMESPACE::ImageLayout oldLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined,
+                                             VULKAN_HPP_NAMESPACE::ImageLayout newLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined,
+                                             uint32_t srcQueueFamilyIndex_ = 0,
+                                             uint32_t dstQueueFamilyIndex_ = 0,
+                                             VULKAN_HPP_NAMESPACE::Image image_ = VULKAN_HPP_NAMESPACE::Image(),
+                                             VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange_ = VULKAN_HPP_NAMESPACE::ImageSubresourceRange() ) VULKAN_HPP_NOEXCEPT
+      : srcAccessMask( srcAccessMask_ )
+      , dstAccessMask( dstAccessMask_ )
+      , oldLayout( oldLayout_ )
+      , newLayout( newLayout_ )
+      , srcQueueFamilyIndex( srcQueueFamilyIndex_ )
+      , dstQueueFamilyIndex( dstQueueFamilyIndex_ )
+      , image( image_ )
+      , subresourceRange( subresourceRange_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::ImageMemoryBarrier & operator=( VULKAN_HPP_NAMESPACE::ImageMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ImageMemoryBarrier ) - offsetof( ImageMemoryBarrier, pNext ) );
+      return *this;
+    }
+
+    ImageMemoryBarrier( VkImageMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    ImageMemoryBarrier& operator=( VkImageMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageMemoryBarrier const *>(&rhs);
+      return *this;
+    }
+
+    ImageMemoryBarrier & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ImageMemoryBarrier & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcAccessMask = srcAccessMask_;
+      return *this;
+    }
+
+    ImageMemoryBarrier & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstAccessMask = dstAccessMask_;
+      return *this;
+    }
+
+    ImageMemoryBarrier & setOldLayout( VULKAN_HPP_NAMESPACE::ImageLayout oldLayout_ ) VULKAN_HPP_NOEXCEPT
+    {
+      oldLayout = oldLayout_;
+      return *this;
+    }
+
+    ImageMemoryBarrier & setNewLayout( VULKAN_HPP_NAMESPACE::ImageLayout newLayout_ ) VULKAN_HPP_NOEXCEPT
+    {
+      newLayout = newLayout_;
+      return *this;
+    }
+
+    ImageMemoryBarrier & setSrcQueueFamilyIndex( uint32_t srcQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcQueueFamilyIndex = srcQueueFamilyIndex_;
+      return *this;
+    }
+
+    ImageMemoryBarrier & setDstQueueFamilyIndex( uint32_t dstQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstQueueFamilyIndex = dstQueueFamilyIndex_;
+      return *this;
+    }
+
+    ImageMemoryBarrier & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
+    {
+      image = image_;
+      return *this;
+    }
+
+    ImageMemoryBarrier & setSubresourceRange( VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange_ ) VULKAN_HPP_NOEXCEPT
+    {
+      subresourceRange = subresourceRange_;
+      return *this;
+    }
+
+    operator VkImageMemoryBarrier const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImageMemoryBarrier*>( this );
+    }
+
+    operator VkImageMemoryBarrier &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImageMemoryBarrier*>( this );
+    }
+
+    bool operator==( ImageMemoryBarrier const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( srcAccessMask == rhs.srcAccessMask )
+          && ( dstAccessMask == rhs.dstAccessMask )
+          && ( oldLayout == rhs.oldLayout )
+          && ( newLayout == rhs.newLayout )
+          && ( srcQueueFamilyIndex == rhs.srcQueueFamilyIndex )
+          && ( dstQueueFamilyIndex == rhs.dstQueueFamilyIndex )
+          && ( image == rhs.image )
+          && ( subresourceRange == rhs.subresourceRange );
+    }
+
+    bool operator!=( ImageMemoryBarrier const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageMemoryBarrier;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask;
+    VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask;
+    VULKAN_HPP_NAMESPACE::ImageLayout oldLayout;
+    VULKAN_HPP_NAMESPACE::ImageLayout newLayout;
+    uint32_t srcQueueFamilyIndex;
+    uint32_t dstQueueFamilyIndex;
+    VULKAN_HPP_NAMESPACE::Image image;
+    VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange;
+  };
+  static_assert( sizeof( ImageMemoryBarrier ) == sizeof( VkImageMemoryBarrier ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ImageMemoryBarrier>::value, "struct wrapper is not a standard layout!" );
+
+  struct ImageMemoryRequirementsInfo2
+  {
+    VULKAN_HPP_CONSTEXPR ImageMemoryRequirementsInfo2( VULKAN_HPP_NAMESPACE::Image image_ = VULKAN_HPP_NAMESPACE::Image() ) VULKAN_HPP_NOEXCEPT
+      : image( image_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & operator=( VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 ) - offsetof( ImageMemoryRequirementsInfo2, pNext ) );
+      return *this;
+    }
+
+    ImageMemoryRequirementsInfo2( VkImageMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    ImageMemoryRequirementsInfo2& operator=( VkImageMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 const *>(&rhs);
+      return *this;
+    }
+
+    ImageMemoryRequirementsInfo2 & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ImageMemoryRequirementsInfo2 & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
+    {
+      image = image_;
+      return *this;
+    }
+
+    operator VkImageMemoryRequirementsInfo2 const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImageMemoryRequirementsInfo2*>( this );
+    }
+
+    operator VkImageMemoryRequirementsInfo2 &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImageMemoryRequirementsInfo2*>( this );
+    }
+
+    bool operator==( ImageMemoryRequirementsInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( image == rhs.image );
+    }
+
+    bool operator!=( ImageMemoryRequirementsInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageMemoryRequirementsInfo2;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::Image image;
+  };
+  static_assert( sizeof( ImageMemoryRequirementsInfo2 ) == sizeof( VkImageMemoryRequirementsInfo2 ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ImageMemoryRequirementsInfo2>::value, "struct wrapper is not a standard layout!" );
+
+#ifdef VK_USE_PLATFORM_FUCHSIA
+
+  struct ImagePipeSurfaceCreateInfoFUCHSIA
+  {
+    VULKAN_HPP_CONSTEXPR ImagePipeSurfaceCreateInfoFUCHSIA( VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateFlagsFUCHSIA flags_ = VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateFlagsFUCHSIA(),
+                                                            zx_handle_t imagePipeHandle_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : flags( flags_ )
+      , imagePipeHandle( imagePipeHandle_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA & operator=( VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA ) - offsetof( ImagePipeSurfaceCreateInfoFUCHSIA, pNext ) );
+      return *this;
+    }
+
+    ImagePipeSurfaceCreateInfoFUCHSIA( VkImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    ImagePipeSurfaceCreateInfoFUCHSIA& operator=( VkImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA const *>(&rhs);
+      return *this;
+    }
+
+    ImagePipeSurfaceCreateInfoFUCHSIA & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ImagePipeSurfaceCreateInfoFUCHSIA & setFlags( VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateFlagsFUCHSIA flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    ImagePipeSurfaceCreateInfoFUCHSIA & setImagePipeHandle( zx_handle_t imagePipeHandle_ ) VULKAN_HPP_NOEXCEPT
+    {
+      imagePipeHandle = imagePipeHandle_;
+      return *this;
+    }
+
+    operator VkImagePipeSurfaceCreateInfoFUCHSIA const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImagePipeSurfaceCreateInfoFUCHSIA*>( this );
+    }
+
+    operator VkImagePipeSurfaceCreateInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImagePipeSurfaceCreateInfoFUCHSIA*>( this );
+    }
+
+    bool operator==( ImagePipeSurfaceCreateInfoFUCHSIA const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( imagePipeHandle == rhs.imagePipeHandle );
+    }
+
+    bool operator!=( ImagePipeSurfaceCreateInfoFUCHSIA const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImagepipeSurfaceCreateInfoFUCHSIA;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateFlagsFUCHSIA flags;
+    zx_handle_t imagePipeHandle;
+  };
+  static_assert( sizeof( ImagePipeSurfaceCreateInfoFUCHSIA ) == sizeof( VkImagePipeSurfaceCreateInfoFUCHSIA ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ImagePipeSurfaceCreateInfoFUCHSIA>::value, "struct wrapper is not a standard layout!" );
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+  struct ImagePlaneMemoryRequirementsInfo
+  {
+    VULKAN_HPP_CONSTEXPR ImagePlaneMemoryRequirementsInfo( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor ) VULKAN_HPP_NOEXCEPT
+      : planeAspect( planeAspect_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::ImagePlaneMemoryRequirementsInfo & operator=( VULKAN_HPP_NAMESPACE::ImagePlaneMemoryRequirementsInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ImagePlaneMemoryRequirementsInfo ) - offsetof( ImagePlaneMemoryRequirementsInfo, pNext ) );
+      return *this;
+    }
+
+    ImagePlaneMemoryRequirementsInfo( VkImagePlaneMemoryRequirementsInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    ImagePlaneMemoryRequirementsInfo& operator=( VkImagePlaneMemoryRequirementsInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImagePlaneMemoryRequirementsInfo const *>(&rhs);
+      return *this;
+    }
+
+    ImagePlaneMemoryRequirementsInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ImagePlaneMemoryRequirementsInfo & setPlaneAspect( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ ) VULKAN_HPP_NOEXCEPT
+    {
+      planeAspect = planeAspect_;
+      return *this;
+    }
+
+    operator VkImagePlaneMemoryRequirementsInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImagePlaneMemoryRequirementsInfo*>( this );
+    }
+
+    operator VkImagePlaneMemoryRequirementsInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImagePlaneMemoryRequirementsInfo*>( this );
+    }
+
+    bool operator==( ImagePlaneMemoryRequirementsInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( planeAspect == rhs.planeAspect );
+    }
+
+    bool operator!=( ImagePlaneMemoryRequirementsInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImagePlaneMemoryRequirementsInfo;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect;
+  };
+  static_assert( sizeof( ImagePlaneMemoryRequirementsInfo ) == sizeof( VkImagePlaneMemoryRequirementsInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ImagePlaneMemoryRequirementsInfo>::value, "struct wrapper is not a standard layout!" );
+
+  struct ImageResolve
+  {
+    VULKAN_HPP_CONSTEXPR ImageResolve( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = VULKAN_HPP_NAMESPACE::ImageSubresourceLayers(),
+                                       VULKAN_HPP_NAMESPACE::Offset3D srcOffset_ = VULKAN_HPP_NAMESPACE::Offset3D(),
+                                       VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = VULKAN_HPP_NAMESPACE::ImageSubresourceLayers(),
+                                       VULKAN_HPP_NAMESPACE::Offset3D dstOffset_ = VULKAN_HPP_NAMESPACE::Offset3D(),
+                                       VULKAN_HPP_NAMESPACE::Extent3D extent_ = VULKAN_HPP_NAMESPACE::Extent3D() ) VULKAN_HPP_NOEXCEPT
+      : srcSubresource( srcSubresource_ )
+      , srcOffset( srcOffset_ )
+      , dstSubresource( dstSubresource_ )
+      , dstOffset( dstOffset_ )
+      , extent( extent_ )
+    {}
+
+    ImageResolve( VkImageResolve const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    ImageResolve& operator=( VkImageResolve const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageResolve const *>(&rhs);
+      return *this;
+    }
+
+    ImageResolve & setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcSubresource = srcSubresource_;
+      return *this;
+    }
+
+    ImageResolve & setSrcOffset( VULKAN_HPP_NAMESPACE::Offset3D srcOffset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcOffset = srcOffset_;
+      return *this;
+    }
+
+    ImageResolve & setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstSubresource = dstSubresource_;
+      return *this;
+    }
+
+    ImageResolve & setDstOffset( VULKAN_HPP_NAMESPACE::Offset3D dstOffset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstOffset = dstOffset_;
+      return *this;
+    }
+
+    ImageResolve & setExtent( VULKAN_HPP_NAMESPACE::Extent3D extent_ ) VULKAN_HPP_NOEXCEPT
+    {
+      extent = extent_;
+      return *this;
+    }
+
+    operator VkImageResolve const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImageResolve*>( this );
+    }
+
+    operator VkImageResolve &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImageResolve*>( this );
+    }
+
+    bool operator==( ImageResolve const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( srcSubresource == rhs.srcSubresource )
+          && ( srcOffset == rhs.srcOffset )
+          && ( dstSubresource == rhs.dstSubresource )
+          && ( dstOffset == rhs.dstOffset )
+          && ( extent == rhs.extent );
+    }
+
+    bool operator!=( ImageResolve const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource;
+    VULKAN_HPP_NAMESPACE::Offset3D srcOffset;
+    VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource;
+    VULKAN_HPP_NAMESPACE::Offset3D dstOffset;
+    VULKAN_HPP_NAMESPACE::Extent3D extent;
+  };
+  static_assert( sizeof( ImageResolve ) == sizeof( VkImageResolve ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ImageResolve>::value, "struct wrapper is not a standard layout!" );
+
+  struct ImageSparseMemoryRequirementsInfo2
+  {
+    VULKAN_HPP_CONSTEXPR ImageSparseMemoryRequirementsInfo2( VULKAN_HPP_NAMESPACE::Image image_ = VULKAN_HPP_NAMESPACE::Image() ) VULKAN_HPP_NOEXCEPT
+      : image( image_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & operator=( VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 ) - offsetof( ImageSparseMemoryRequirementsInfo2, pNext ) );
+      return *this;
+    }
+
+    ImageSparseMemoryRequirementsInfo2( VkImageSparseMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    ImageSparseMemoryRequirementsInfo2& operator=( VkImageSparseMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 const *>(&rhs);
+      return *this;
+    }
+
+    ImageSparseMemoryRequirementsInfo2 & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ImageSparseMemoryRequirementsInfo2 & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
+    {
+      image = image_;
+      return *this;
+    }
+
+    operator VkImageSparseMemoryRequirementsInfo2 const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2*>( this );
+    }
+
+    operator VkImageSparseMemoryRequirementsInfo2 &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImageSparseMemoryRequirementsInfo2*>( this );
+    }
+
+    bool operator==( ImageSparseMemoryRequirementsInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( image == rhs.image );
+    }
+
+    bool operator!=( ImageSparseMemoryRequirementsInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageSparseMemoryRequirementsInfo2;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::Image image;
+  };
+  static_assert( sizeof( ImageSparseMemoryRequirementsInfo2 ) == sizeof( VkImageSparseMemoryRequirementsInfo2 ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ImageSparseMemoryRequirementsInfo2>::value, "struct wrapper is not a standard layout!" );
+
+  struct ImageStencilUsageCreateInfoEXT
+  {
+    VULKAN_HPP_CONSTEXPR ImageStencilUsageCreateInfoEXT( VULKAN_HPP_NAMESPACE::ImageUsageFlags stencilUsage_ = VULKAN_HPP_NAMESPACE::ImageUsageFlags() ) VULKAN_HPP_NOEXCEPT
+      : stencilUsage( stencilUsage_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::ImageStencilUsageCreateInfoEXT & operator=( VULKAN_HPP_NAMESPACE::ImageStencilUsageCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ImageStencilUsageCreateInfoEXT ) - offsetof( ImageStencilUsageCreateInfoEXT, pNext ) );
+      return *this;
+    }
+
+    ImageStencilUsageCreateInfoEXT( VkImageStencilUsageCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    ImageStencilUsageCreateInfoEXT& operator=( VkImageStencilUsageCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageStencilUsageCreateInfoEXT const *>(&rhs);
+      return *this;
+    }
+
+    ImageStencilUsageCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ImageStencilUsageCreateInfoEXT & setStencilUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags stencilUsage_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stencilUsage = stencilUsage_;
+      return *this;
+    }
+
+    operator VkImageStencilUsageCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImageStencilUsageCreateInfoEXT*>( this );
+    }
+
+    operator VkImageStencilUsageCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImageStencilUsageCreateInfoEXT*>( this );
+    }
+
+    bool operator==( ImageStencilUsageCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( stencilUsage == rhs.stencilUsage );
+    }
+
+    bool operator!=( ImageStencilUsageCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageStencilUsageCreateInfoEXT;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::ImageUsageFlags stencilUsage;
+  };
+  static_assert( sizeof( ImageStencilUsageCreateInfoEXT ) == sizeof( VkImageStencilUsageCreateInfoEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ImageStencilUsageCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+
+  struct ImageSwapchainCreateInfoKHR
+  {
+    VULKAN_HPP_CONSTEXPR ImageSwapchainCreateInfoKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ = VULKAN_HPP_NAMESPACE::SwapchainKHR() ) VULKAN_HPP_NOEXCEPT
+      : swapchain( swapchain_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::ImageSwapchainCreateInfoKHR & operator=( VULKAN_HPP_NAMESPACE::ImageSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ImageSwapchainCreateInfoKHR ) - offsetof( ImageSwapchainCreateInfoKHR, pNext ) );
+      return *this;
+    }
+
+    ImageSwapchainCreateInfoKHR( VkImageSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    ImageSwapchainCreateInfoKHR& operator=( VkImageSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageSwapchainCreateInfoKHR const *>(&rhs);
+      return *this;
+    }
+
+    ImageSwapchainCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ImageSwapchainCreateInfoKHR & setSwapchain( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ ) VULKAN_HPP_NOEXCEPT
+    {
+      swapchain = swapchain_;
+      return *this;
+    }
+
+    operator VkImageSwapchainCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImageSwapchainCreateInfoKHR*>( this );
+    }
+
+    operator VkImageSwapchainCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImageSwapchainCreateInfoKHR*>( this );
+    }
+
+    bool operator==( ImageSwapchainCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( swapchain == rhs.swapchain );
+    }
+
+    bool operator!=( ImageSwapchainCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageSwapchainCreateInfoKHR;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain;
+  };
+  static_assert( sizeof( ImageSwapchainCreateInfoKHR ) == sizeof( VkImageSwapchainCreateInfoKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ImageSwapchainCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
+
+  struct ImageViewASTCDecodeModeEXT
+  {
+    VULKAN_HPP_CONSTEXPR ImageViewASTCDecodeModeEXT( VULKAN_HPP_NAMESPACE::Format decodeMode_ = VULKAN_HPP_NAMESPACE::Format::eUndefined ) VULKAN_HPP_NOEXCEPT
+      : decodeMode( decodeMode_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::ImageViewASTCDecodeModeEXT & operator=( VULKAN_HPP_NAMESPACE::ImageViewASTCDecodeModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ImageViewASTCDecodeModeEXT ) - offsetof( ImageViewASTCDecodeModeEXT, pNext ) );
+      return *this;
+    }
+
+    ImageViewASTCDecodeModeEXT( VkImageViewASTCDecodeModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    ImageViewASTCDecodeModeEXT& operator=( VkImageViewASTCDecodeModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageViewASTCDecodeModeEXT const *>(&rhs);
+      return *this;
+    }
+
+    ImageViewASTCDecodeModeEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ImageViewASTCDecodeModeEXT & setDecodeMode( VULKAN_HPP_NAMESPACE::Format decodeMode_ ) VULKAN_HPP_NOEXCEPT
+    {
+      decodeMode = decodeMode_;
+      return *this;
+    }
+
+    operator VkImageViewASTCDecodeModeEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImageViewASTCDecodeModeEXT*>( this );
+    }
+
+    operator VkImageViewASTCDecodeModeEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImageViewASTCDecodeModeEXT*>( this );
+    }
+
+    bool operator==( ImageViewASTCDecodeModeEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( decodeMode == rhs.decodeMode );
+    }
+
+    bool operator!=( ImageViewASTCDecodeModeEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewAstcDecodeModeEXT;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::Format decodeMode;
+  };
+  static_assert( sizeof( ImageViewASTCDecodeModeEXT ) == sizeof( VkImageViewASTCDecodeModeEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ImageViewASTCDecodeModeEXT>::value, "struct wrapper is not a standard layout!" );
+
+  struct ImageViewCreateInfo
+  {
+    VULKAN_HPP_CONSTEXPR ImageViewCreateInfo( VULKAN_HPP_NAMESPACE::ImageViewCreateFlags flags_ = VULKAN_HPP_NAMESPACE::ImageViewCreateFlags(),
+                                              VULKAN_HPP_NAMESPACE::Image image_ = VULKAN_HPP_NAMESPACE::Image(),
+                                              VULKAN_HPP_NAMESPACE::ImageViewType viewType_ = VULKAN_HPP_NAMESPACE::ImageViewType::e1D,
+                                              VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
+                                              VULKAN_HPP_NAMESPACE::ComponentMapping components_ = VULKAN_HPP_NAMESPACE::ComponentMapping(),
+                                              VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange_ = VULKAN_HPP_NAMESPACE::ImageSubresourceRange() ) VULKAN_HPP_NOEXCEPT
+      : flags( flags_ )
+      , image( image_ )
+      , viewType( viewType_ )
+      , format( format_ )
+      , components( components_ )
+      , subresourceRange( subresourceRange_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::ImageViewCreateInfo & operator=( VULKAN_HPP_NAMESPACE::ImageViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ImageViewCreateInfo ) - offsetof( ImageViewCreateInfo, pNext ) );
+      return *this;
+    }
+
+    ImageViewCreateInfo( VkImageViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    ImageViewCreateInfo& operator=( VkImageViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageViewCreateInfo const *>(&rhs);
+      return *this;
+    }
+
+    ImageViewCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ImageViewCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::ImageViewCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    ImageViewCreateInfo & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
+    {
+      image = image_;
+      return *this;
+    }
+
+    ImageViewCreateInfo & setViewType( VULKAN_HPP_NAMESPACE::ImageViewType viewType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      viewType = viewType_;
+      return *this;
+    }
+
+    ImageViewCreateInfo & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
+    {
+      format = format_;
+      return *this;
+    }
+
+    ImageViewCreateInfo & setComponents( VULKAN_HPP_NAMESPACE::ComponentMapping components_ ) VULKAN_HPP_NOEXCEPT
+    {
+      components = components_;
+      return *this;
+    }
+
+    ImageViewCreateInfo & setSubresourceRange( VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange_ ) VULKAN_HPP_NOEXCEPT
+    {
+      subresourceRange = subresourceRange_;
+      return *this;
+    }
+
+    operator VkImageViewCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImageViewCreateInfo*>( this );
+    }
+
+    operator VkImageViewCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImageViewCreateInfo*>( this );
+    }
+
+    bool operator==( ImageViewCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( image == rhs.image )
+          && ( viewType == rhs.viewType )
+          && ( format == rhs.format )
+          && ( components == rhs.components )
+          && ( subresourceRange == rhs.subresourceRange );
+    }
+
+    bool operator!=( ImageViewCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewCreateInfo;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::ImageViewCreateFlags flags;
+    VULKAN_HPP_NAMESPACE::Image image;
+    VULKAN_HPP_NAMESPACE::ImageViewType viewType;
+    VULKAN_HPP_NAMESPACE::Format format;
+    VULKAN_HPP_NAMESPACE::ComponentMapping components;
+    VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange;
+  };
+  static_assert( sizeof( ImageViewCreateInfo ) == sizeof( VkImageViewCreateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ImageViewCreateInfo>::value, "struct wrapper is not a standard layout!" );
+
+  struct ImageViewHandleInfoNVX
+  {
+    VULKAN_HPP_CONSTEXPR ImageViewHandleInfoNVX( VULKAN_HPP_NAMESPACE::ImageView imageView_ = VULKAN_HPP_NAMESPACE::ImageView(),
+                                                 VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler,
+                                                 VULKAN_HPP_NAMESPACE::Sampler sampler_ = VULKAN_HPP_NAMESPACE::Sampler() ) VULKAN_HPP_NOEXCEPT
+      : imageView( imageView_ )
+      , descriptorType( descriptorType_ )
+      , sampler( sampler_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX & operator=( VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX ) - offsetof( ImageViewHandleInfoNVX, pNext ) );
+      return *this;
+    }
+
+    ImageViewHandleInfoNVX( VkImageViewHandleInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    ImageViewHandleInfoNVX& operator=( VkImageViewHandleInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX const *>(&rhs);
+      return *this;
+    }
+
+    ImageViewHandleInfoNVX & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ImageViewHandleInfoNVX & setImageView( VULKAN_HPP_NAMESPACE::ImageView imageView_ ) VULKAN_HPP_NOEXCEPT
+    {
+      imageView = imageView_;
+      return *this;
+    }
+
+    ImageViewHandleInfoNVX & setDescriptorType( VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorType = descriptorType_;
+      return *this;
+    }
+
+    ImageViewHandleInfoNVX & setSampler( VULKAN_HPP_NAMESPACE::Sampler sampler_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sampler = sampler_;
+      return *this;
+    }
+
+    operator VkImageViewHandleInfoNVX const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImageViewHandleInfoNVX*>( this );
+    }
+
+    operator VkImageViewHandleInfoNVX &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImageViewHandleInfoNVX*>( this );
+    }
+
+    bool operator==( ImageViewHandleInfoNVX const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( imageView == rhs.imageView )
+          && ( descriptorType == rhs.descriptorType )
+          && ( sampler == rhs.sampler );
+    }
+
+    bool operator!=( ImageViewHandleInfoNVX const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewHandleInfoNVX;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::ImageView imageView;
+    VULKAN_HPP_NAMESPACE::DescriptorType descriptorType;
+    VULKAN_HPP_NAMESPACE::Sampler sampler;
+  };
+  static_assert( sizeof( ImageViewHandleInfoNVX ) == sizeof( VkImageViewHandleInfoNVX ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ImageViewHandleInfoNVX>::value, "struct wrapper is not a standard layout!" );
+
+  struct ImageViewUsageCreateInfo
+  {
+    VULKAN_HPP_CONSTEXPR ImageViewUsageCreateInfo( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = VULKAN_HPP_NAMESPACE::ImageUsageFlags() ) VULKAN_HPP_NOEXCEPT
+      : usage( usage_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::ImageViewUsageCreateInfo & operator=( VULKAN_HPP_NAMESPACE::ImageViewUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ImageViewUsageCreateInfo ) - offsetof( ImageViewUsageCreateInfo, pNext ) );
+      return *this;
+    }
+
+    ImageViewUsageCreateInfo( VkImageViewUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    ImageViewUsageCreateInfo& operator=( VkImageViewUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageViewUsageCreateInfo const *>(&rhs);
+      return *this;
+    }
+
+    ImageViewUsageCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ImageViewUsageCreateInfo & setUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT
+    {
+      usage = usage_;
+      return *this;
+    }
+
+    operator VkImageViewUsageCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImageViewUsageCreateInfo*>( this );
+    }
+
+    operator VkImageViewUsageCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImageViewUsageCreateInfo*>( this );
+    }
+
+    bool operator==( ImageViewUsageCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( usage == rhs.usage );
+    }
+
+    bool operator!=( ImageViewUsageCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewUsageCreateInfo;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::ImageUsageFlags usage;
+  };
+  static_assert( sizeof( ImageViewUsageCreateInfo ) == sizeof( VkImageViewUsageCreateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ImageViewUsageCreateInfo>::value, "struct wrapper is not a standard layout!" );
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+
+  struct ImportAndroidHardwareBufferInfoANDROID
+  {
+    VULKAN_HPP_CONSTEXPR ImportAndroidHardwareBufferInfoANDROID( struct AHardwareBuffer* buffer_ = nullptr ) VULKAN_HPP_NOEXCEPT
+      : buffer( buffer_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::ImportAndroidHardwareBufferInfoANDROID & operator=( VULKAN_HPP_NAMESPACE::ImportAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ImportAndroidHardwareBufferInfoANDROID ) - offsetof( ImportAndroidHardwareBufferInfoANDROID, pNext ) );
+      return *this;
+    }
+
+    ImportAndroidHardwareBufferInfoANDROID( VkImportAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    ImportAndroidHardwareBufferInfoANDROID& operator=( VkImportAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportAndroidHardwareBufferInfoANDROID const *>(&rhs);
+      return *this;
+    }
+
+    ImportAndroidHardwareBufferInfoANDROID & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ImportAndroidHardwareBufferInfoANDROID & setBuffer( struct AHardwareBuffer* buffer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      buffer = buffer_;
+      return *this;
+    }
+
+    operator VkImportAndroidHardwareBufferInfoANDROID const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImportAndroidHardwareBufferInfoANDROID*>( this );
+    }
+
+    operator VkImportAndroidHardwareBufferInfoANDROID &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImportAndroidHardwareBufferInfoANDROID*>( this );
+    }
+
+    bool operator==( ImportAndroidHardwareBufferInfoANDROID const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( buffer == rhs.buffer );
+    }
+
+    bool operator!=( ImportAndroidHardwareBufferInfoANDROID const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportAndroidHardwareBufferInfoANDROID;
+    const void* pNext = nullptr;
+    struct AHardwareBuffer* buffer;
+  };
+  static_assert( sizeof( ImportAndroidHardwareBufferInfoANDROID ) == sizeof( VkImportAndroidHardwareBufferInfoANDROID ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ImportAndroidHardwareBufferInfoANDROID>::value, "struct wrapper is not a standard layout!" );
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+  struct ImportFenceFdInfoKHR
+  {
+    VULKAN_HPP_CONSTEXPR ImportFenceFdInfoKHR( VULKAN_HPP_NAMESPACE::Fence fence_ = VULKAN_HPP_NAMESPACE::Fence(),
+                                               VULKAN_HPP_NAMESPACE::FenceImportFlags flags_ = VULKAN_HPP_NAMESPACE::FenceImportFlags(),
+                                               VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd,
+                                               int fd_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : fence( fence_ )
+      , flags( flags_ )
+      , handleType( handleType_ )
+      , fd( fd_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR & operator=( VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR ) - offsetof( ImportFenceFdInfoKHR, pNext ) );
+      return *this;
+    }
+
+    ImportFenceFdInfoKHR( VkImportFenceFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    ImportFenceFdInfoKHR& operator=( VkImportFenceFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR const *>(&rhs);
+      return *this;
+    }
+
+    ImportFenceFdInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ImportFenceFdInfoKHR & setFence( VULKAN_HPP_NAMESPACE::Fence fence_ ) VULKAN_HPP_NOEXCEPT
+    {
+      fence = fence_;
+      return *this;
+    }
+
+    ImportFenceFdInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::FenceImportFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    ImportFenceFdInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      handleType = handleType_;
+      return *this;
+    }
+
+    ImportFenceFdInfoKHR & setFd( int fd_ ) VULKAN_HPP_NOEXCEPT
+    {
+      fd = fd_;
+      return *this;
+    }
+
+    operator VkImportFenceFdInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImportFenceFdInfoKHR*>( this );
+    }
+
+    operator VkImportFenceFdInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImportFenceFdInfoKHR*>( this );
+    }
+
+    bool operator==( ImportFenceFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( fence == rhs.fence )
+          && ( flags == rhs.flags )
+          && ( handleType == rhs.handleType )
+          && ( fd == rhs.fd );
+    }
+
+    bool operator!=( ImportFenceFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportFenceFdInfoKHR;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::Fence fence;
+    VULKAN_HPP_NAMESPACE::FenceImportFlags flags;
+    VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType;
+    int fd;
+  };
+  static_assert( sizeof( ImportFenceFdInfoKHR ) == sizeof( VkImportFenceFdInfoKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ImportFenceFdInfoKHR>::value, "struct wrapper is not a standard layout!" );
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+  struct ImportFenceWin32HandleInfoKHR
+  {
+    VULKAN_HPP_CONSTEXPR ImportFenceWin32HandleInfoKHR( VULKAN_HPP_NAMESPACE::Fence fence_ = VULKAN_HPP_NAMESPACE::Fence(),
+                                                        VULKAN_HPP_NAMESPACE::FenceImportFlags flags_ = VULKAN_HPP_NAMESPACE::FenceImportFlags(),
+                                                        VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd,
+                                                        HANDLE handle_ = 0,
+                                                        LPCWSTR name_ = nullptr ) VULKAN_HPP_NOEXCEPT
+      : fence( fence_ )
+      , flags( flags_ )
+      , handleType( handleType_ )
+      , handle( handle_ )
+      , name( name_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR & operator=( VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR ) - offsetof( ImportFenceWin32HandleInfoKHR, pNext ) );
+      return *this;
+    }
+
+    ImportFenceWin32HandleInfoKHR( VkImportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    ImportFenceWin32HandleInfoKHR& operator=( VkImportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR const *>(&rhs);
+      return *this;
+    }
+
+    ImportFenceWin32HandleInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ImportFenceWin32HandleInfoKHR & setFence( VULKAN_HPP_NAMESPACE::Fence fence_ ) VULKAN_HPP_NOEXCEPT
+    {
+      fence = fence_;
+      return *this;
+    }
+
+    ImportFenceWin32HandleInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::FenceImportFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    ImportFenceWin32HandleInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      handleType = handleType_;
+      return *this;
+    }
+
+    ImportFenceWin32HandleInfoKHR & setHandle( HANDLE handle_ ) VULKAN_HPP_NOEXCEPT
+    {
+      handle = handle_;
+      return *this;
+    }
+
+    ImportFenceWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT
+    {
+      name = name_;
+      return *this;
+    }
+
+    operator VkImportFenceWin32HandleInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImportFenceWin32HandleInfoKHR*>( this );
+    }
+
+    operator VkImportFenceWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImportFenceWin32HandleInfoKHR*>( this );
+    }
+
+    bool operator==( ImportFenceWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( fence == rhs.fence )
+          && ( flags == rhs.flags )
+          && ( handleType == rhs.handleType )
+          && ( handle == rhs.handle )
+          && ( name == rhs.name );
+    }
+
+    bool operator!=( ImportFenceWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportFenceWin32HandleInfoKHR;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::Fence fence;
+    VULKAN_HPP_NAMESPACE::FenceImportFlags flags;
+    VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType;
+    HANDLE handle;
+    LPCWSTR name;
+  };
+  static_assert( sizeof( ImportFenceWin32HandleInfoKHR ) == sizeof( VkImportFenceWin32HandleInfoKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ImportFenceWin32HandleInfoKHR>::value, "struct wrapper is not a standard layout!" );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  struct ImportMemoryFdInfoKHR
+  {
+    VULKAN_HPP_CONSTEXPR ImportMemoryFdInfoKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd,
+                                                int fd_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : handleType( handleType_ )
+      , fd( fd_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::ImportMemoryFdInfoKHR & operator=( VULKAN_HPP_NAMESPACE::ImportMemoryFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ImportMemoryFdInfoKHR ) - offsetof( ImportMemoryFdInfoKHR, pNext ) );
+      return *this;
+    }
+
+    ImportMemoryFdInfoKHR( VkImportMemoryFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    ImportMemoryFdInfoKHR& operator=( VkImportMemoryFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportMemoryFdInfoKHR const *>(&rhs);
+      return *this;
+    }
+
+    ImportMemoryFdInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ImportMemoryFdInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      handleType = handleType_;
+      return *this;
+    }
+
+    ImportMemoryFdInfoKHR & setFd( int fd_ ) VULKAN_HPP_NOEXCEPT
+    {
+      fd = fd_;
+      return *this;
+    }
+
+    operator VkImportMemoryFdInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImportMemoryFdInfoKHR*>( this );
+    }
+
+    operator VkImportMemoryFdInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImportMemoryFdInfoKHR*>( this );
+    }
+
+    bool operator==( ImportMemoryFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( handleType == rhs.handleType )
+          && ( fd == rhs.fd );
+    }
+
+    bool operator!=( ImportMemoryFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryFdInfoKHR;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType;
+    int fd;
+  };
+  static_assert( sizeof( ImportMemoryFdInfoKHR ) == sizeof( VkImportMemoryFdInfoKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ImportMemoryFdInfoKHR>::value, "struct wrapper is not a standard layout!" );
+
+  struct ImportMemoryHostPointerInfoEXT
+  {
+    VULKAN_HPP_CONSTEXPR ImportMemoryHostPointerInfoEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd,
+                                                         void* pHostPointer_ = nullptr ) VULKAN_HPP_NOEXCEPT
+      : handleType( handleType_ )
+      , pHostPointer( pHostPointer_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::ImportMemoryHostPointerInfoEXT & operator=( VULKAN_HPP_NAMESPACE::ImportMemoryHostPointerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ImportMemoryHostPointerInfoEXT ) - offsetof( ImportMemoryHostPointerInfoEXT, pNext ) );
+      return *this;
+    }
+
+    ImportMemoryHostPointerInfoEXT( VkImportMemoryHostPointerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    ImportMemoryHostPointerInfoEXT& operator=( VkImportMemoryHostPointerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportMemoryHostPointerInfoEXT const *>(&rhs);
+      return *this;
+    }
+
+    ImportMemoryHostPointerInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ImportMemoryHostPointerInfoEXT & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      handleType = handleType_;
+      return *this;
+    }
+
+    ImportMemoryHostPointerInfoEXT & setPHostPointer( void* pHostPointer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pHostPointer = pHostPointer_;
+      return *this;
+    }
+
+    operator VkImportMemoryHostPointerInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImportMemoryHostPointerInfoEXT*>( this );
+    }
+
+    operator VkImportMemoryHostPointerInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImportMemoryHostPointerInfoEXT*>( this );
+    }
+
+    bool operator==( ImportMemoryHostPointerInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( handleType == rhs.handleType )
+          && ( pHostPointer == rhs.pHostPointer );
+    }
+
+    bool operator!=( ImportMemoryHostPointerInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryHostPointerInfoEXT;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType;
+    void* pHostPointer;
+  };
+  static_assert( sizeof( ImportMemoryHostPointerInfoEXT ) == sizeof( VkImportMemoryHostPointerInfoEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ImportMemoryHostPointerInfoEXT>::value, "struct wrapper is not a standard layout!" );
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+  struct ImportMemoryWin32HandleInfoKHR
+  {
+    VULKAN_HPP_CONSTEXPR ImportMemoryWin32HandleInfoKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd,
+                                                         HANDLE handle_ = 0,
+                                                         LPCWSTR name_ = nullptr ) VULKAN_HPP_NOEXCEPT
+      : handleType( handleType_ )
+      , handle( handle_ )
+      , name( name_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::ImportMemoryWin32HandleInfoKHR & operator=( VULKAN_HPP_NAMESPACE::ImportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ImportMemoryWin32HandleInfoKHR ) - offsetof( ImportMemoryWin32HandleInfoKHR, pNext ) );
+      return *this;
+    }
+
+    ImportMemoryWin32HandleInfoKHR( VkImportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    ImportMemoryWin32HandleInfoKHR& operator=( VkImportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportMemoryWin32HandleInfoKHR const *>(&rhs);
+      return *this;
+    }
+
+    ImportMemoryWin32HandleInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ImportMemoryWin32HandleInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      handleType = handleType_;
+      return *this;
+    }
+
+    ImportMemoryWin32HandleInfoKHR & setHandle( HANDLE handle_ ) VULKAN_HPP_NOEXCEPT
+    {
+      handle = handle_;
+      return *this;
+    }
+
+    ImportMemoryWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT
+    {
+      name = name_;
+      return *this;
+    }
+
+    operator VkImportMemoryWin32HandleInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImportMemoryWin32HandleInfoKHR*>( this );
+    }
+
+    operator VkImportMemoryWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImportMemoryWin32HandleInfoKHR*>( this );
+    }
+
+    bool operator==( ImportMemoryWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( handleType == rhs.handleType )
+          && ( handle == rhs.handle )
+          && ( name == rhs.name );
+    }
+
+    bool operator!=( ImportMemoryWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryWin32HandleInfoKHR;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType;
+    HANDLE handle;
+    LPCWSTR name;
+  };
+  static_assert( sizeof( ImportMemoryWin32HandleInfoKHR ) == sizeof( VkImportMemoryWin32HandleInfoKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ImportMemoryWin32HandleInfoKHR>::value, "struct wrapper is not a standard layout!" );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+  struct ImportMemoryWin32HandleInfoNV
+  {
+    VULKAN_HPP_CONSTEXPR ImportMemoryWin32HandleInfoNV( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV(),
+                                                        HANDLE handle_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : handleType( handleType_ )
+      , handle( handle_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::ImportMemoryWin32HandleInfoNV & operator=( VULKAN_HPP_NAMESPACE::ImportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ImportMemoryWin32HandleInfoNV ) - offsetof( ImportMemoryWin32HandleInfoNV, pNext ) );
+      return *this;
+    }
+
+    ImportMemoryWin32HandleInfoNV( VkImportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    ImportMemoryWin32HandleInfoNV& operator=( VkImportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportMemoryWin32HandleInfoNV const *>(&rhs);
+      return *this;
+    }
+
+    ImportMemoryWin32HandleInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ImportMemoryWin32HandleInfoNV & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      handleType = handleType_;
+      return *this;
+    }
+
+    ImportMemoryWin32HandleInfoNV & setHandle( HANDLE handle_ ) VULKAN_HPP_NOEXCEPT
+    {
+      handle = handle_;
+      return *this;
+    }
+
+    operator VkImportMemoryWin32HandleInfoNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImportMemoryWin32HandleInfoNV*>( this );
+    }
+
+    operator VkImportMemoryWin32HandleInfoNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImportMemoryWin32HandleInfoNV*>( this );
+    }
+
+    bool operator==( ImportMemoryWin32HandleInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( handleType == rhs.handleType )
+          && ( handle == rhs.handle );
+    }
+
+    bool operator!=( ImportMemoryWin32HandleInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryWin32HandleInfoNV;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType;
+    HANDLE handle;
+  };
+  static_assert( sizeof( ImportMemoryWin32HandleInfoNV ) == sizeof( VkImportMemoryWin32HandleInfoNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ImportMemoryWin32HandleInfoNV>::value, "struct wrapper is not a standard layout!" );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  struct ImportSemaphoreFdInfoKHR
+  {
+    VULKAN_HPP_CONSTEXPR ImportSemaphoreFdInfoKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = VULKAN_HPP_NAMESPACE::Semaphore(),
+                                                   VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ = VULKAN_HPP_NAMESPACE::SemaphoreImportFlags(),
+                                                   VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd,
+                                                   int fd_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : semaphore( semaphore_ )
+      , flags( flags_ )
+      , handleType( handleType_ )
+      , fd( fd_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR & operator=( VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR ) - offsetof( ImportSemaphoreFdInfoKHR, pNext ) );
+      return *this;
+    }
+
+    ImportSemaphoreFdInfoKHR( VkImportSemaphoreFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    ImportSemaphoreFdInfoKHR& operator=( VkImportSemaphoreFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR const *>(&rhs);
+      return *this;
+    }
+
+    ImportSemaphoreFdInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ImportSemaphoreFdInfoKHR & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
+    {
+      semaphore = semaphore_;
+      return *this;
+    }
+
+    ImportSemaphoreFdInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    ImportSemaphoreFdInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      handleType = handleType_;
+      return *this;
+    }
+
+    ImportSemaphoreFdInfoKHR & setFd( int fd_ ) VULKAN_HPP_NOEXCEPT
+    {
+      fd = fd_;
+      return *this;
+    }
+
+    operator VkImportSemaphoreFdInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImportSemaphoreFdInfoKHR*>( this );
+    }
+
+    operator VkImportSemaphoreFdInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImportSemaphoreFdInfoKHR*>( this );
+    }
+
+    bool operator==( ImportSemaphoreFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( semaphore == rhs.semaphore )
+          && ( flags == rhs.flags )
+          && ( handleType == rhs.handleType )
+          && ( fd == rhs.fd );
+    }
+
+    bool operator!=( ImportSemaphoreFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportSemaphoreFdInfoKHR;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::Semaphore semaphore;
+    VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags;
+    VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType;
+    int fd;
+  };
+  static_assert( sizeof( ImportSemaphoreFdInfoKHR ) == sizeof( VkImportSemaphoreFdInfoKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ImportSemaphoreFdInfoKHR>::value, "struct wrapper is not a standard layout!" );
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+  struct ImportSemaphoreWin32HandleInfoKHR
+  {
+    VULKAN_HPP_CONSTEXPR ImportSemaphoreWin32HandleInfoKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = VULKAN_HPP_NAMESPACE::Semaphore(),
+                                                            VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ = VULKAN_HPP_NAMESPACE::SemaphoreImportFlags(),
+                                                            VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd,
+                                                            HANDLE handle_ = 0,
+                                                            LPCWSTR name_ = nullptr ) VULKAN_HPP_NOEXCEPT
+      : semaphore( semaphore_ )
+      , flags( flags_ )
+      , handleType( handleType_ )
+      , handle( handle_ )
+      , name( name_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR & operator=( VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR ) - offsetof( ImportSemaphoreWin32HandleInfoKHR, pNext ) );
+      return *this;
+    }
+
+    ImportSemaphoreWin32HandleInfoKHR( VkImportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    ImportSemaphoreWin32HandleInfoKHR& operator=( VkImportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR const *>(&rhs);
+      return *this;
+    }
+
+    ImportSemaphoreWin32HandleInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ImportSemaphoreWin32HandleInfoKHR & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
+    {
+      semaphore = semaphore_;
+      return *this;
+    }
+
+    ImportSemaphoreWin32HandleInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    ImportSemaphoreWin32HandleInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      handleType = handleType_;
+      return *this;
+    }
+
+    ImportSemaphoreWin32HandleInfoKHR & setHandle( HANDLE handle_ ) VULKAN_HPP_NOEXCEPT
+    {
+      handle = handle_;
+      return *this;
+    }
+
+    ImportSemaphoreWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT
+    {
+      name = name_;
+      return *this;
+    }
+
+    operator VkImportSemaphoreWin32HandleInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkImportSemaphoreWin32HandleInfoKHR*>( this );
+    }
+
+    operator VkImportSemaphoreWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkImportSemaphoreWin32HandleInfoKHR*>( this );
+    }
+
+    bool operator==( ImportSemaphoreWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( semaphore == rhs.semaphore )
+          && ( flags == rhs.flags )
+          && ( handleType == rhs.handleType )
+          && ( handle == rhs.handle )
+          && ( name == rhs.name );
+    }
+
+    bool operator!=( ImportSemaphoreWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportSemaphoreWin32HandleInfoKHR;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::Semaphore semaphore;
+    VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags;
+    VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType;
+    HANDLE handle;
+    LPCWSTR name;
+  };
+  static_assert( sizeof( ImportSemaphoreWin32HandleInfoKHR ) == sizeof( VkImportSemaphoreWin32HandleInfoKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ImportSemaphoreWin32HandleInfoKHR>::value, "struct wrapper is not a standard layout!" );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  struct IndirectCommandsLayoutTokenNVX
+  {
+    VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutTokenNVX( VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNVX tokenType_ = VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNVX::ePipeline,
+                                                         uint32_t bindingUnit_ = 0,
+                                                         uint32_t dynamicCount_ = 0,
+                                                         uint32_t divisor_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : tokenType( tokenType_ )
+      , bindingUnit( bindingUnit_ )
+      , dynamicCount( dynamicCount_ )
+      , divisor( divisor_ )
+    {}
+
+    IndirectCommandsLayoutTokenNVX( VkIndirectCommandsLayoutTokenNVX const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    IndirectCommandsLayoutTokenNVX& operator=( VkIndirectCommandsLayoutTokenNVX const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNVX const *>(&rhs);
+      return *this;
+    }
+
+    IndirectCommandsLayoutTokenNVX & setTokenType( VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNVX tokenType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      tokenType = tokenType_;
+      return *this;
+    }
+
+    IndirectCommandsLayoutTokenNVX & setBindingUnit( uint32_t bindingUnit_ ) VULKAN_HPP_NOEXCEPT
+    {
+      bindingUnit = bindingUnit_;
+      return *this;
+    }
+
+    IndirectCommandsLayoutTokenNVX & setDynamicCount( uint32_t dynamicCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dynamicCount = dynamicCount_;
+      return *this;
+    }
+
+    IndirectCommandsLayoutTokenNVX & setDivisor( uint32_t divisor_ ) VULKAN_HPP_NOEXCEPT
+    {
+      divisor = divisor_;
+      return *this;
+    }
+
+    operator VkIndirectCommandsLayoutTokenNVX const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkIndirectCommandsLayoutTokenNVX*>( this );
+    }
+
+    operator VkIndirectCommandsLayoutTokenNVX &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkIndirectCommandsLayoutTokenNVX*>( this );
+    }
+
+    bool operator==( IndirectCommandsLayoutTokenNVX const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( tokenType == rhs.tokenType )
+          && ( bindingUnit == rhs.bindingUnit )
+          && ( dynamicCount == rhs.dynamicCount )
+          && ( divisor == rhs.divisor );
+    }
+
+    bool operator!=( IndirectCommandsLayoutTokenNVX const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNVX tokenType;
+    uint32_t bindingUnit;
+    uint32_t dynamicCount;
+    uint32_t divisor;
+  };
+  static_assert( sizeof( IndirectCommandsLayoutTokenNVX ) == sizeof( VkIndirectCommandsLayoutTokenNVX ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<IndirectCommandsLayoutTokenNVX>::value, "struct wrapper is not a standard layout!" );
+
+  struct IndirectCommandsLayoutCreateInfoNVX
+  {
+    VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutCreateInfoNVX( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics,
+                                                              VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsNVX flags_ = VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsNVX(),
+                                                              uint32_t tokenCount_ = 0,
+                                                              const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNVX* pTokens_ = nullptr ) VULKAN_HPP_NOEXCEPT
+      : pipelineBindPoint( pipelineBindPoint_ )
+      , flags( flags_ )
+      , tokenCount( tokenCount_ )
+      , pTokens( pTokens_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNVX & operator=( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNVX ) - offsetof( IndirectCommandsLayoutCreateInfoNVX, pNext ) );
+      return *this;
+    }
+
+    IndirectCommandsLayoutCreateInfoNVX( VkIndirectCommandsLayoutCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    IndirectCommandsLayoutCreateInfoNVX& operator=( VkIndirectCommandsLayoutCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNVX const *>(&rhs);
+      return *this;
+    }
+
+    IndirectCommandsLayoutCreateInfoNVX & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    IndirectCommandsLayoutCreateInfoNVX & setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pipelineBindPoint = pipelineBindPoint_;
+      return *this;
+    }
+
+    IndirectCommandsLayoutCreateInfoNVX & setFlags( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsNVX flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    IndirectCommandsLayoutCreateInfoNVX & setTokenCount( uint32_t tokenCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      tokenCount = tokenCount_;
+      return *this;
+    }
+
+    IndirectCommandsLayoutCreateInfoNVX & setPTokens( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNVX* pTokens_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pTokens = pTokens_;
+      return *this;
+    }
+
+    operator VkIndirectCommandsLayoutCreateInfoNVX const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoNVX*>( this );
+    }
+
+    operator VkIndirectCommandsLayoutCreateInfoNVX &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkIndirectCommandsLayoutCreateInfoNVX*>( this );
+    }
+
+    bool operator==( IndirectCommandsLayoutCreateInfoNVX const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( pipelineBindPoint == rhs.pipelineBindPoint )
+          && ( flags == rhs.flags )
+          && ( tokenCount == rhs.tokenCount )
+          && ( pTokens == rhs.pTokens );
+    }
+
+    bool operator!=( IndirectCommandsLayoutCreateInfoNVX const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eIndirectCommandsLayoutCreateInfoNVX;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint;
+    VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsNVX flags;
+    uint32_t tokenCount;
+    const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNVX* pTokens;
+  };
+  static_assert( sizeof( IndirectCommandsLayoutCreateInfoNVX ) == sizeof( VkIndirectCommandsLayoutCreateInfoNVX ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<IndirectCommandsLayoutCreateInfoNVX>::value, "struct wrapper is not a standard layout!" );
+
+  struct InitializePerformanceApiInfoINTEL
+  {
+    VULKAN_HPP_CONSTEXPR InitializePerformanceApiInfoINTEL( void* pUserData_ = nullptr ) VULKAN_HPP_NOEXCEPT
+      : pUserData( pUserData_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL & operator=( VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL ) - offsetof( InitializePerformanceApiInfoINTEL, pNext ) );
+      return *this;
+    }
+
+    InitializePerformanceApiInfoINTEL( VkInitializePerformanceApiInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    InitializePerformanceApiInfoINTEL& operator=( VkInitializePerformanceApiInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL const *>(&rhs);
+      return *this;
+    }
+
+    InitializePerformanceApiInfoINTEL & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    InitializePerformanceApiInfoINTEL & setPUserData( void* pUserData_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pUserData = pUserData_;
+      return *this;
+    }
+
+    operator VkInitializePerformanceApiInfoINTEL const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkInitializePerformanceApiInfoINTEL*>( this );
+    }
+
+    operator VkInitializePerformanceApiInfoINTEL &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkInitializePerformanceApiInfoINTEL*>( this );
+    }
+
+    bool operator==( InitializePerformanceApiInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( pUserData == rhs.pUserData );
+    }
+
+    bool operator!=( InitializePerformanceApiInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eInitializePerformanceApiInfoINTEL;
+    const void* pNext = nullptr;
+    void* pUserData;
+  };
+  static_assert( sizeof( InitializePerformanceApiInfoINTEL ) == sizeof( VkInitializePerformanceApiInfoINTEL ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<InitializePerformanceApiInfoINTEL>::value, "struct wrapper is not a standard layout!" );
+
+  struct InputAttachmentAspectReference
+  {
+    VULKAN_HPP_CONSTEXPR InputAttachmentAspectReference( uint32_t subpass_ = 0,
+                                                         uint32_t inputAttachmentIndex_ = 0,
+                                                         VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = VULKAN_HPP_NAMESPACE::ImageAspectFlags() ) VULKAN_HPP_NOEXCEPT
+      : subpass( subpass_ )
+      , inputAttachmentIndex( inputAttachmentIndex_ )
+      , aspectMask( aspectMask_ )
+    {}
+
+    InputAttachmentAspectReference( VkInputAttachmentAspectReference const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    InputAttachmentAspectReference& operator=( VkInputAttachmentAspectReference const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference const *>(&rhs);
+      return *this;
+    }
+
+    InputAttachmentAspectReference & setSubpass( uint32_t subpass_ ) VULKAN_HPP_NOEXCEPT
+    {
+      subpass = subpass_;
+      return *this;
+    }
+
+    InputAttachmentAspectReference & setInputAttachmentIndex( uint32_t inputAttachmentIndex_ ) VULKAN_HPP_NOEXCEPT
+    {
+      inputAttachmentIndex = inputAttachmentIndex_;
+      return *this;
+    }
+
+    InputAttachmentAspectReference & setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      aspectMask = aspectMask_;
+      return *this;
+    }
+
+    operator VkInputAttachmentAspectReference const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkInputAttachmentAspectReference*>( this );
+    }
+
+    operator VkInputAttachmentAspectReference &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkInputAttachmentAspectReference*>( this );
+    }
+
+    bool operator==( InputAttachmentAspectReference const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( subpass == rhs.subpass )
+          && ( inputAttachmentIndex == rhs.inputAttachmentIndex )
+          && ( aspectMask == rhs.aspectMask );
+    }
+
+    bool operator!=( InputAttachmentAspectReference const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    uint32_t subpass;
+    uint32_t inputAttachmentIndex;
+    VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask;
+  };
+  static_assert( sizeof( InputAttachmentAspectReference ) == sizeof( VkInputAttachmentAspectReference ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<InputAttachmentAspectReference>::value, "struct wrapper is not a standard layout!" );
+
+  struct InstanceCreateInfo
+  {
+    VULKAN_HPP_CONSTEXPR InstanceCreateInfo( VULKAN_HPP_NAMESPACE::InstanceCreateFlags flags_ = VULKAN_HPP_NAMESPACE::InstanceCreateFlags(),
+                                             const VULKAN_HPP_NAMESPACE::ApplicationInfo* pApplicationInfo_ = nullptr,
+                                             uint32_t enabledLayerCount_ = 0,
+                                             const char* const* ppEnabledLayerNames_ = nullptr,
+                                             uint32_t enabledExtensionCount_ = 0,
+                                             const char* const* ppEnabledExtensionNames_ = nullptr ) VULKAN_HPP_NOEXCEPT
+      : flags( flags_ )
+      , pApplicationInfo( pApplicationInfo_ )
+      , enabledLayerCount( enabledLayerCount_ )
+      , ppEnabledLayerNames( ppEnabledLayerNames_ )
+      , enabledExtensionCount( enabledExtensionCount_ )
+      , ppEnabledExtensionNames( ppEnabledExtensionNames_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::InstanceCreateInfo & operator=( VULKAN_HPP_NAMESPACE::InstanceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::InstanceCreateInfo ) - offsetof( InstanceCreateInfo, pNext ) );
+      return *this;
+    }
+
+    InstanceCreateInfo( VkInstanceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    InstanceCreateInfo& operator=( VkInstanceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::InstanceCreateInfo const *>(&rhs);
+      return *this;
+    }
+
+    InstanceCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    InstanceCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::InstanceCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    InstanceCreateInfo & setPApplicationInfo( const VULKAN_HPP_NAMESPACE::ApplicationInfo* pApplicationInfo_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pApplicationInfo = pApplicationInfo_;
+      return *this;
+    }
+
+    InstanceCreateInfo & setEnabledLayerCount( uint32_t enabledLayerCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      enabledLayerCount = enabledLayerCount_;
+      return *this;
+    }
+
+    InstanceCreateInfo & setPpEnabledLayerNames( const char* const* ppEnabledLayerNames_ ) VULKAN_HPP_NOEXCEPT
+    {
+      ppEnabledLayerNames = ppEnabledLayerNames_;
+      return *this;
+    }
+
+    InstanceCreateInfo & setEnabledExtensionCount( uint32_t enabledExtensionCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      enabledExtensionCount = enabledExtensionCount_;
+      return *this;
+    }
+
+    InstanceCreateInfo & setPpEnabledExtensionNames( const char* const* ppEnabledExtensionNames_ ) VULKAN_HPP_NOEXCEPT
+    {
+      ppEnabledExtensionNames = ppEnabledExtensionNames_;
+      return *this;
+    }
+
+    operator VkInstanceCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkInstanceCreateInfo*>( this );
+    }
+
+    operator VkInstanceCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkInstanceCreateInfo*>( this );
+    }
+
+    bool operator==( InstanceCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( pApplicationInfo == rhs.pApplicationInfo )
+          && ( enabledLayerCount == rhs.enabledLayerCount )
+          && ( ppEnabledLayerNames == rhs.ppEnabledLayerNames )
+          && ( enabledExtensionCount == rhs.enabledExtensionCount )
+          && ( ppEnabledExtensionNames == rhs.ppEnabledExtensionNames );
+    }
+
+    bool operator!=( InstanceCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eInstanceCreateInfo;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::InstanceCreateFlags flags;
+    const VULKAN_HPP_NAMESPACE::ApplicationInfo* pApplicationInfo;
+    uint32_t enabledLayerCount;
+    const char* const* ppEnabledLayerNames;
+    uint32_t enabledExtensionCount;
+    const char* const* ppEnabledExtensionNames;
+  };
+  static_assert( sizeof( InstanceCreateInfo ) == sizeof( VkInstanceCreateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<InstanceCreateInfo>::value, "struct wrapper is not a standard layout!" );
+
+  struct LayerProperties
+  {
+    LayerProperties( std::array<char,VK_MAX_EXTENSION_NAME_SIZE> const& layerName_ = { { 0 } },
+                     uint32_t specVersion_ = 0,
+                     uint32_t implementationVersion_ = 0,
+                     std::array<char,VK_MAX_DESCRIPTION_SIZE> const& description_ = { { 0 } } ) VULKAN_HPP_NOEXCEPT
+      : layerName{}
+      , specVersion( specVersion_ )
+      , implementationVersion( implementationVersion_ )
+      , description{}
+    {
+      VULKAN_HPP_NAMESPACE::ConstExpressionArrayCopy<char,VK_MAX_EXTENSION_NAME_SIZE,VK_MAX_EXTENSION_NAME_SIZE>::copy( layerName, layerName_ );
+      VULKAN_HPP_NAMESPACE::ConstExpressionArrayCopy<char,VK_MAX_DESCRIPTION_SIZE,VK_MAX_DESCRIPTION_SIZE>::copy( description, description_ );
+    }
+
+    LayerProperties( VkLayerProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    LayerProperties& operator=( VkLayerProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::LayerProperties const *>(&rhs);
+      return *this;
+    }
+
+    operator VkLayerProperties const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkLayerProperties*>( this );
+    }
+
+    operator VkLayerProperties &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkLayerProperties*>( this );
+    }
+
+    bool operator==( LayerProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( memcmp( layerName, rhs.layerName, VK_MAX_EXTENSION_NAME_SIZE * sizeof( char ) ) == 0 )
+          && ( specVersion == rhs.specVersion )
+          && ( implementationVersion == rhs.implementationVersion )
+          && ( memcmp( description, rhs.description, VK_MAX_DESCRIPTION_SIZE * sizeof( char ) ) == 0 );
+    }
+
+    bool operator!=( LayerProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    char layerName[VK_MAX_EXTENSION_NAME_SIZE];
+    uint32_t specVersion;
+    uint32_t implementationVersion;
+    char description[VK_MAX_DESCRIPTION_SIZE];
+  };
+  static_assert( sizeof( LayerProperties ) == sizeof( VkLayerProperties ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<LayerProperties>::value, "struct wrapper is not a standard layout!" );
+
+#ifdef VK_USE_PLATFORM_MACOS_MVK
+
+  struct MacOSSurfaceCreateInfoMVK
+  {
+    VULKAN_HPP_CONSTEXPR MacOSSurfaceCreateInfoMVK( VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateFlagsMVK flags_ = VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateFlagsMVK(),
+                                                    const void* pView_ = nullptr ) VULKAN_HPP_NOEXCEPT
+      : flags( flags_ )
+      , pView( pView_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK & operator=( VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK ) - offsetof( MacOSSurfaceCreateInfoMVK, pNext ) );
+      return *this;
+    }
+
+    MacOSSurfaceCreateInfoMVK( VkMacOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    MacOSSurfaceCreateInfoMVK& operator=( VkMacOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK const *>(&rhs);
+      return *this;
+    }
+
+    MacOSSurfaceCreateInfoMVK & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    MacOSSurfaceCreateInfoMVK & setFlags( VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateFlagsMVK flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    MacOSSurfaceCreateInfoMVK & setPView( const void* pView_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pView = pView_;
+      return *this;
+    }
+
+    operator VkMacOSSurfaceCreateInfoMVK const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkMacOSSurfaceCreateInfoMVK*>( this );
+    }
+
+    operator VkMacOSSurfaceCreateInfoMVK &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkMacOSSurfaceCreateInfoMVK*>( this );
+    }
+
+    bool operator==( MacOSSurfaceCreateInfoMVK const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( pView == rhs.pView );
+    }
+
+    bool operator!=( MacOSSurfaceCreateInfoMVK const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMacosSurfaceCreateInfoMVK;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateFlagsMVK flags;
+    const void* pView;
+  };
+  static_assert( sizeof( MacOSSurfaceCreateInfoMVK ) == sizeof( VkMacOSSurfaceCreateInfoMVK ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<MacOSSurfaceCreateInfoMVK>::value, "struct wrapper is not a standard layout!" );
+#endif /*VK_USE_PLATFORM_MACOS_MVK*/
+
+  struct MappedMemoryRange
+  {
+    VULKAN_HPP_CONSTEXPR MappedMemoryRange( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = VULKAN_HPP_NAMESPACE::DeviceMemory(),
+                                            VULKAN_HPP_NAMESPACE::DeviceSize offset_ = 0,
+                                            VULKAN_HPP_NAMESPACE::DeviceSize size_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : memory( memory_ )
+      , offset( offset_ )
+      , size( size_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::MappedMemoryRange & operator=( VULKAN_HPP_NAMESPACE::MappedMemoryRange const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::MappedMemoryRange ) - offsetof( MappedMemoryRange, pNext ) );
+      return *this;
+    }
+
+    MappedMemoryRange( VkMappedMemoryRange const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    MappedMemoryRange& operator=( VkMappedMemoryRange const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MappedMemoryRange const *>(&rhs);
+      return *this;
+    }
+
+    MappedMemoryRange & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    MappedMemoryRange & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
+    {
+      memory = memory_;
+      return *this;
+    }
+
+    MappedMemoryRange & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      offset = offset_;
+      return *this;
+    }
+
+    MappedMemoryRange & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
+    {
+      size = size_;
+      return *this;
+    }
+
+    operator VkMappedMemoryRange const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkMappedMemoryRange*>( this );
+    }
+
+    operator VkMappedMemoryRange &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkMappedMemoryRange*>( this );
+    }
+
+    bool operator==( MappedMemoryRange const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( memory == rhs.memory )
+          && ( offset == rhs.offset )
+          && ( size == rhs.size );
+    }
+
+    bool operator!=( MappedMemoryRange const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMappedMemoryRange;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::DeviceMemory memory;
+    VULKAN_HPP_NAMESPACE::DeviceSize offset;
+    VULKAN_HPP_NAMESPACE::DeviceSize size;
+  };
+  static_assert( sizeof( MappedMemoryRange ) == sizeof( VkMappedMemoryRange ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<MappedMemoryRange>::value, "struct wrapper is not a standard layout!" );
+
+  struct MemoryAllocateFlagsInfo
+  {
+    VULKAN_HPP_CONSTEXPR MemoryAllocateFlagsInfo( VULKAN_HPP_NAMESPACE::MemoryAllocateFlags flags_ = VULKAN_HPP_NAMESPACE::MemoryAllocateFlags(),
+                                                  uint32_t deviceMask_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : flags( flags_ )
+      , deviceMask( deviceMask_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::MemoryAllocateFlagsInfo & operator=( VULKAN_HPP_NAMESPACE::MemoryAllocateFlagsInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::MemoryAllocateFlagsInfo ) - offsetof( MemoryAllocateFlagsInfo, pNext ) );
+      return *this;
+    }
+
+    MemoryAllocateFlagsInfo( VkMemoryAllocateFlagsInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    MemoryAllocateFlagsInfo& operator=( VkMemoryAllocateFlagsInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryAllocateFlagsInfo const *>(&rhs);
+      return *this;
+    }
+
+    MemoryAllocateFlagsInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    MemoryAllocateFlagsInfo & setFlags( VULKAN_HPP_NAMESPACE::MemoryAllocateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    MemoryAllocateFlagsInfo & setDeviceMask( uint32_t deviceMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      deviceMask = deviceMask_;
+      return *this;
+    }
+
+    operator VkMemoryAllocateFlagsInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkMemoryAllocateFlagsInfo*>( this );
+    }
+
+    operator VkMemoryAllocateFlagsInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkMemoryAllocateFlagsInfo*>( this );
+    }
+
+    bool operator==( MemoryAllocateFlagsInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( deviceMask == rhs.deviceMask );
+    }
+
+    bool operator!=( MemoryAllocateFlagsInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryAllocateFlagsInfo;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::MemoryAllocateFlags flags;
+    uint32_t deviceMask;
+  };
+  static_assert( sizeof( MemoryAllocateFlagsInfo ) == sizeof( VkMemoryAllocateFlagsInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<MemoryAllocateFlagsInfo>::value, "struct wrapper is not a standard layout!" );
+
+  struct MemoryAllocateInfo
+  {
+    VULKAN_HPP_CONSTEXPR MemoryAllocateInfo( VULKAN_HPP_NAMESPACE::DeviceSize allocationSize_ = 0,
+                                             uint32_t memoryTypeIndex_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : allocationSize( allocationSize_ )
+      , memoryTypeIndex( memoryTypeIndex_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::MemoryAllocateInfo & operator=( VULKAN_HPP_NAMESPACE::MemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::MemoryAllocateInfo ) - offsetof( MemoryAllocateInfo, pNext ) );
+      return *this;
+    }
+
+    MemoryAllocateInfo( VkMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    MemoryAllocateInfo& operator=( VkMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryAllocateInfo const *>(&rhs);
+      return *this;
+    }
+
+    MemoryAllocateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    MemoryAllocateInfo & setAllocationSize( VULKAN_HPP_NAMESPACE::DeviceSize allocationSize_ ) VULKAN_HPP_NOEXCEPT
+    {
+      allocationSize = allocationSize_;
+      return *this;
+    }
+
+    MemoryAllocateInfo & setMemoryTypeIndex( uint32_t memoryTypeIndex_ ) VULKAN_HPP_NOEXCEPT
+    {
+      memoryTypeIndex = memoryTypeIndex_;
+      return *this;
+    }
+
+    operator VkMemoryAllocateInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkMemoryAllocateInfo*>( this );
+    }
+
+    operator VkMemoryAllocateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkMemoryAllocateInfo*>( this );
+    }
+
+    bool operator==( MemoryAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( allocationSize == rhs.allocationSize )
+          && ( memoryTypeIndex == rhs.memoryTypeIndex );
+    }
+
+    bool operator!=( MemoryAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryAllocateInfo;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::DeviceSize allocationSize;
+    uint32_t memoryTypeIndex;
+  };
+  static_assert( sizeof( MemoryAllocateInfo ) == sizeof( VkMemoryAllocateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<MemoryAllocateInfo>::value, "struct wrapper is not a standard layout!" );
+
+  struct MemoryBarrier
+  {
+    VULKAN_HPP_CONSTEXPR MemoryBarrier( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = VULKAN_HPP_NAMESPACE::AccessFlags(),
+                                        VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = VULKAN_HPP_NAMESPACE::AccessFlags() ) VULKAN_HPP_NOEXCEPT
+      : srcAccessMask( srcAccessMask_ )
+      , dstAccessMask( dstAccessMask_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::MemoryBarrier & operator=( VULKAN_HPP_NAMESPACE::MemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::MemoryBarrier ) - offsetof( MemoryBarrier, pNext ) );
+      return *this;
+    }
+
+    MemoryBarrier( VkMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    MemoryBarrier& operator=( VkMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryBarrier const *>(&rhs);
+      return *this;
+    }
+
+    MemoryBarrier & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    MemoryBarrier & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcAccessMask = srcAccessMask_;
+      return *this;
+    }
+
+    MemoryBarrier & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstAccessMask = dstAccessMask_;
+      return *this;
+    }
+
+    operator VkMemoryBarrier const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkMemoryBarrier*>( this );
+    }
+
+    operator VkMemoryBarrier &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkMemoryBarrier*>( this );
+    }
+
+    bool operator==( MemoryBarrier const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( srcAccessMask == rhs.srcAccessMask )
+          && ( dstAccessMask == rhs.dstAccessMask );
+    }
+
+    bool operator!=( MemoryBarrier const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryBarrier;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask;
+    VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask;
+  };
+  static_assert( sizeof( MemoryBarrier ) == sizeof( VkMemoryBarrier ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<MemoryBarrier>::value, "struct wrapper is not a standard layout!" );
+
+  struct MemoryDedicatedAllocateInfo
+  {
+    VULKAN_HPP_CONSTEXPR MemoryDedicatedAllocateInfo( VULKAN_HPP_NAMESPACE::Image image_ = VULKAN_HPP_NAMESPACE::Image(),
+                                                      VULKAN_HPP_NAMESPACE::Buffer buffer_ = VULKAN_HPP_NAMESPACE::Buffer() ) VULKAN_HPP_NOEXCEPT
+      : image( image_ )
+      , buffer( buffer_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::MemoryDedicatedAllocateInfo & operator=( VULKAN_HPP_NAMESPACE::MemoryDedicatedAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::MemoryDedicatedAllocateInfo ) - offsetof( MemoryDedicatedAllocateInfo, pNext ) );
+      return *this;
+    }
+
+    MemoryDedicatedAllocateInfo( VkMemoryDedicatedAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    MemoryDedicatedAllocateInfo& operator=( VkMemoryDedicatedAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryDedicatedAllocateInfo const *>(&rhs);
+      return *this;
+    }
+
+    MemoryDedicatedAllocateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    MemoryDedicatedAllocateInfo & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
+    {
+      image = image_;
+      return *this;
+    }
+
+    MemoryDedicatedAllocateInfo & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      buffer = buffer_;
+      return *this;
+    }
+
+    operator VkMemoryDedicatedAllocateInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkMemoryDedicatedAllocateInfo*>( this );
+    }
+
+    operator VkMemoryDedicatedAllocateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkMemoryDedicatedAllocateInfo*>( this );
+    }
+
+    bool operator==( MemoryDedicatedAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( image == rhs.image )
+          && ( buffer == rhs.buffer );
+    }
+
+    bool operator!=( MemoryDedicatedAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryDedicatedAllocateInfo;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::Image image;
+    VULKAN_HPP_NAMESPACE::Buffer buffer;
+  };
+  static_assert( sizeof( MemoryDedicatedAllocateInfo ) == sizeof( VkMemoryDedicatedAllocateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<MemoryDedicatedAllocateInfo>::value, "struct wrapper is not a standard layout!" );
+
+  struct MemoryDedicatedRequirements
+  {
+    MemoryDedicatedRequirements( VULKAN_HPP_NAMESPACE::Bool32 prefersDedicatedAllocation_ = 0,
+                                 VULKAN_HPP_NAMESPACE::Bool32 requiresDedicatedAllocation_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : prefersDedicatedAllocation( prefersDedicatedAllocation_ )
+      , requiresDedicatedAllocation( requiresDedicatedAllocation_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::MemoryDedicatedRequirements & operator=( VULKAN_HPP_NAMESPACE::MemoryDedicatedRequirements const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::MemoryDedicatedRequirements ) - offsetof( MemoryDedicatedRequirements, pNext ) );
+      return *this;
+    }
+
+    MemoryDedicatedRequirements( VkMemoryDedicatedRequirements const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    MemoryDedicatedRequirements& operator=( VkMemoryDedicatedRequirements const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryDedicatedRequirements const *>(&rhs);
+      return *this;
+    }
+
+    operator VkMemoryDedicatedRequirements const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkMemoryDedicatedRequirements*>( this );
+    }
+
+    operator VkMemoryDedicatedRequirements &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkMemoryDedicatedRequirements*>( this );
+    }
+
+    bool operator==( MemoryDedicatedRequirements const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( prefersDedicatedAllocation == rhs.prefersDedicatedAllocation )
+          && ( requiresDedicatedAllocation == rhs.requiresDedicatedAllocation );
+    }
+
+    bool operator!=( MemoryDedicatedRequirements const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryDedicatedRequirements;
+    void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::Bool32 prefersDedicatedAllocation;
+    VULKAN_HPP_NAMESPACE::Bool32 requiresDedicatedAllocation;
+  };
+  static_assert( sizeof( MemoryDedicatedRequirements ) == sizeof( VkMemoryDedicatedRequirements ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<MemoryDedicatedRequirements>::value, "struct wrapper is not a standard layout!" );
+
+  struct MemoryFdPropertiesKHR
+  {
+    MemoryFdPropertiesKHR( uint32_t memoryTypeBits_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : memoryTypeBits( memoryTypeBits_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR & operator=( VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR ) - offsetof( MemoryFdPropertiesKHR, pNext ) );
+      return *this;
+    }
+
+    MemoryFdPropertiesKHR( VkMemoryFdPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    MemoryFdPropertiesKHR& operator=( VkMemoryFdPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR const *>(&rhs);
+      return *this;
+    }
+
+    operator VkMemoryFdPropertiesKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkMemoryFdPropertiesKHR*>( this );
+    }
+
+    operator VkMemoryFdPropertiesKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkMemoryFdPropertiesKHR*>( this );
+    }
+
+    bool operator==( MemoryFdPropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( memoryTypeBits == rhs.memoryTypeBits );
+    }
+
+    bool operator!=( MemoryFdPropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryFdPropertiesKHR;
+    void* pNext = nullptr;
+    uint32_t memoryTypeBits;
+  };
+  static_assert( sizeof( MemoryFdPropertiesKHR ) == sizeof( VkMemoryFdPropertiesKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<MemoryFdPropertiesKHR>::value, "struct wrapper is not a standard layout!" );
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+
+  struct MemoryGetAndroidHardwareBufferInfoANDROID
+  {
+    VULKAN_HPP_CONSTEXPR MemoryGetAndroidHardwareBufferInfoANDROID( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = VULKAN_HPP_NAMESPACE::DeviceMemory() ) VULKAN_HPP_NOEXCEPT
+      : memory( memory_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID & operator=( VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID ) - offsetof( MemoryGetAndroidHardwareBufferInfoANDROID, pNext ) );
+      return *this;
+    }
+
+    MemoryGetAndroidHardwareBufferInfoANDROID( VkMemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    MemoryGetAndroidHardwareBufferInfoANDROID& operator=( VkMemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID const *>(&rhs);
+      return *this;
+    }
+
+    MemoryGetAndroidHardwareBufferInfoANDROID & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    MemoryGetAndroidHardwareBufferInfoANDROID & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
+    {
+      memory = memory_;
+      return *this;
+    }
+
+    operator VkMemoryGetAndroidHardwareBufferInfoANDROID const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkMemoryGetAndroidHardwareBufferInfoANDROID*>( this );
+    }
+
+    operator VkMemoryGetAndroidHardwareBufferInfoANDROID &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkMemoryGetAndroidHardwareBufferInfoANDROID*>( this );
+    }
+
+    bool operator==( MemoryGetAndroidHardwareBufferInfoANDROID const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( memory == rhs.memory );
+    }
+
+    bool operator!=( MemoryGetAndroidHardwareBufferInfoANDROID const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryGetAndroidHardwareBufferInfoANDROID;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::DeviceMemory memory;
+  };
+  static_assert( sizeof( MemoryGetAndroidHardwareBufferInfoANDROID ) == sizeof( VkMemoryGetAndroidHardwareBufferInfoANDROID ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<MemoryGetAndroidHardwareBufferInfoANDROID>::value, "struct wrapper is not a standard layout!" );
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+  struct MemoryGetFdInfoKHR
+  {
+    VULKAN_HPP_CONSTEXPR MemoryGetFdInfoKHR( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = VULKAN_HPP_NAMESPACE::DeviceMemory(),
+                                             VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT
+      : memory( memory_ )
+      , handleType( handleType_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR & operator=( VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR ) - offsetof( MemoryGetFdInfoKHR, pNext ) );
+      return *this;
+    }
+
+    MemoryGetFdInfoKHR( VkMemoryGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    MemoryGetFdInfoKHR& operator=( VkMemoryGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR const *>(&rhs);
+      return *this;
+    }
+
+    MemoryGetFdInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    MemoryGetFdInfoKHR & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
+    {
+      memory = memory_;
+      return *this;
+    }
+
+    MemoryGetFdInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      handleType = handleType_;
+      return *this;
+    }
+
+    operator VkMemoryGetFdInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkMemoryGetFdInfoKHR*>( this );
+    }
+
+    operator VkMemoryGetFdInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkMemoryGetFdInfoKHR*>( this );
+    }
+
+    bool operator==( MemoryGetFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( memory == rhs.memory )
+          && ( handleType == rhs.handleType );
+    }
+
+    bool operator!=( MemoryGetFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryGetFdInfoKHR;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::DeviceMemory memory;
+    VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType;
+  };
+  static_assert( sizeof( MemoryGetFdInfoKHR ) == sizeof( VkMemoryGetFdInfoKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<MemoryGetFdInfoKHR>::value, "struct wrapper is not a standard layout!" );
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+  struct MemoryGetWin32HandleInfoKHR
+  {
+    VULKAN_HPP_CONSTEXPR MemoryGetWin32HandleInfoKHR( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = VULKAN_HPP_NAMESPACE::DeviceMemory(),
+                                                      VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT
+      : memory( memory_ )
+      , handleType( handleType_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR & operator=( VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR ) - offsetof( MemoryGetWin32HandleInfoKHR, pNext ) );
+      return *this;
+    }
+
+    MemoryGetWin32HandleInfoKHR( VkMemoryGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    MemoryGetWin32HandleInfoKHR& operator=( VkMemoryGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR const *>(&rhs);
+      return *this;
+    }
+
+    MemoryGetWin32HandleInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    MemoryGetWin32HandleInfoKHR & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
+    {
+      memory = memory_;
+      return *this;
+    }
+
+    MemoryGetWin32HandleInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      handleType = handleType_;
+      return *this;
+    }
+
+    operator VkMemoryGetWin32HandleInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkMemoryGetWin32HandleInfoKHR*>( this );
+    }
+
+    operator VkMemoryGetWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkMemoryGetWin32HandleInfoKHR*>( this );
+    }
+
+    bool operator==( MemoryGetWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( memory == rhs.memory )
+          && ( handleType == rhs.handleType );
+    }
+
+    bool operator!=( MemoryGetWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryGetWin32HandleInfoKHR;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::DeviceMemory memory;
+    VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType;
+  };
+  static_assert( sizeof( MemoryGetWin32HandleInfoKHR ) == sizeof( VkMemoryGetWin32HandleInfoKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<MemoryGetWin32HandleInfoKHR>::value, "struct wrapper is not a standard layout!" );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  struct MemoryHeap
+  {
+    MemoryHeap( VULKAN_HPP_NAMESPACE::DeviceSize size_ = 0,
+                VULKAN_HPP_NAMESPACE::MemoryHeapFlags flags_ = VULKAN_HPP_NAMESPACE::MemoryHeapFlags() ) VULKAN_HPP_NOEXCEPT
+      : size( size_ )
+      , flags( flags_ )
+    {}
+
+    MemoryHeap( VkMemoryHeap const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    MemoryHeap& operator=( VkMemoryHeap const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryHeap const *>(&rhs);
+      return *this;
+    }
+
+    operator VkMemoryHeap const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkMemoryHeap*>( this );
+    }
+
+    operator VkMemoryHeap &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkMemoryHeap*>( this );
+    }
+
+    bool operator==( MemoryHeap const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( size == rhs.size )
+          && ( flags == rhs.flags );
+    }
+
+    bool operator!=( MemoryHeap const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    VULKAN_HPP_NAMESPACE::DeviceSize size;
+    VULKAN_HPP_NAMESPACE::MemoryHeapFlags flags;
+  };
+  static_assert( sizeof( MemoryHeap ) == sizeof( VkMemoryHeap ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<MemoryHeap>::value, "struct wrapper is not a standard layout!" );
+
+  struct MemoryHostPointerPropertiesEXT
+  {
+    MemoryHostPointerPropertiesEXT( uint32_t memoryTypeBits_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : memoryTypeBits( memoryTypeBits_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT & operator=( VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT ) - offsetof( MemoryHostPointerPropertiesEXT, pNext ) );
+      return *this;
+    }
+
+    MemoryHostPointerPropertiesEXT( VkMemoryHostPointerPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    MemoryHostPointerPropertiesEXT& operator=( VkMemoryHostPointerPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT const *>(&rhs);
+      return *this;
+    }
+
+    operator VkMemoryHostPointerPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkMemoryHostPointerPropertiesEXT*>( this );
+    }
+
+    operator VkMemoryHostPointerPropertiesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkMemoryHostPointerPropertiesEXT*>( this );
+    }
+
+    bool operator==( MemoryHostPointerPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( memoryTypeBits == rhs.memoryTypeBits );
+    }
+
+    bool operator!=( MemoryHostPointerPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryHostPointerPropertiesEXT;
+    void* pNext = nullptr;
+    uint32_t memoryTypeBits;
+  };
+  static_assert( sizeof( MemoryHostPointerPropertiesEXT ) == sizeof( VkMemoryHostPointerPropertiesEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<MemoryHostPointerPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
+
+  struct MemoryOpaqueCaptureAddressAllocateInfoKHR
+  {
+    VULKAN_HPP_CONSTEXPR MemoryOpaqueCaptureAddressAllocateInfoKHR( uint64_t opaqueCaptureAddress_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : opaqueCaptureAddress( opaqueCaptureAddress_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::MemoryOpaqueCaptureAddressAllocateInfoKHR & operator=( VULKAN_HPP_NAMESPACE::MemoryOpaqueCaptureAddressAllocateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::MemoryOpaqueCaptureAddressAllocateInfoKHR ) - offsetof( MemoryOpaqueCaptureAddressAllocateInfoKHR, pNext ) );
+      return *this;
+    }
+
+    MemoryOpaqueCaptureAddressAllocateInfoKHR( VkMemoryOpaqueCaptureAddressAllocateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    MemoryOpaqueCaptureAddressAllocateInfoKHR& operator=( VkMemoryOpaqueCaptureAddressAllocateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryOpaqueCaptureAddressAllocateInfoKHR const *>(&rhs);
+      return *this;
+    }
+
+    MemoryOpaqueCaptureAddressAllocateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    MemoryOpaqueCaptureAddressAllocateInfoKHR & setOpaqueCaptureAddress( uint64_t opaqueCaptureAddress_ ) VULKAN_HPP_NOEXCEPT
+    {
+      opaqueCaptureAddress = opaqueCaptureAddress_;
+      return *this;
+    }
+
+    operator VkMemoryOpaqueCaptureAddressAllocateInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkMemoryOpaqueCaptureAddressAllocateInfoKHR*>( this );
+    }
+
+    operator VkMemoryOpaqueCaptureAddressAllocateInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkMemoryOpaqueCaptureAddressAllocateInfoKHR*>( this );
+    }
+
+    bool operator==( MemoryOpaqueCaptureAddressAllocateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( opaqueCaptureAddress == rhs.opaqueCaptureAddress );
+    }
+
+    bool operator!=( MemoryOpaqueCaptureAddressAllocateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryOpaqueCaptureAddressAllocateInfoKHR;
+    const void* pNext = nullptr;
+    uint64_t opaqueCaptureAddress;
+  };
+  static_assert( sizeof( MemoryOpaqueCaptureAddressAllocateInfoKHR ) == sizeof( VkMemoryOpaqueCaptureAddressAllocateInfoKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<MemoryOpaqueCaptureAddressAllocateInfoKHR>::value, "struct wrapper is not a standard layout!" );
+
+  struct MemoryPriorityAllocateInfoEXT
+  {
+    VULKAN_HPP_CONSTEXPR MemoryPriorityAllocateInfoEXT( float priority_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : priority( priority_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::MemoryPriorityAllocateInfoEXT & operator=( VULKAN_HPP_NAMESPACE::MemoryPriorityAllocateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::MemoryPriorityAllocateInfoEXT ) - offsetof( MemoryPriorityAllocateInfoEXT, pNext ) );
+      return *this;
+    }
+
+    MemoryPriorityAllocateInfoEXT( VkMemoryPriorityAllocateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    MemoryPriorityAllocateInfoEXT& operator=( VkMemoryPriorityAllocateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryPriorityAllocateInfoEXT const *>(&rhs);
+      return *this;
+    }
+
+    MemoryPriorityAllocateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    MemoryPriorityAllocateInfoEXT & setPriority( float priority_ ) VULKAN_HPP_NOEXCEPT
+    {
+      priority = priority_;
+      return *this;
+    }
+
+    operator VkMemoryPriorityAllocateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkMemoryPriorityAllocateInfoEXT*>( this );
+    }
+
+    operator VkMemoryPriorityAllocateInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkMemoryPriorityAllocateInfoEXT*>( this );
+    }
+
+    bool operator==( MemoryPriorityAllocateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( priority == rhs.priority );
+    }
+
+    bool operator!=( MemoryPriorityAllocateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryPriorityAllocateInfoEXT;
+    const void* pNext = nullptr;
+    float priority;
+  };
+  static_assert( sizeof( MemoryPriorityAllocateInfoEXT ) == sizeof( VkMemoryPriorityAllocateInfoEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<MemoryPriorityAllocateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+
+  struct MemoryRequirements
+  {
+    MemoryRequirements( VULKAN_HPP_NAMESPACE::DeviceSize size_ = 0,
+                        VULKAN_HPP_NAMESPACE::DeviceSize alignment_ = 0,
+                        uint32_t memoryTypeBits_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : size( size_ )
+      , alignment( alignment_ )
+      , memoryTypeBits( memoryTypeBits_ )
+    {}
+
+    MemoryRequirements( VkMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    MemoryRequirements& operator=( VkMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryRequirements const *>(&rhs);
+      return *this;
+    }
+
+    operator VkMemoryRequirements const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkMemoryRequirements*>( this );
+    }
+
+    operator VkMemoryRequirements &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkMemoryRequirements*>( this );
+    }
+
+    bool operator==( MemoryRequirements const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( size == rhs.size )
+          && ( alignment == rhs.alignment )
+          && ( memoryTypeBits == rhs.memoryTypeBits );
+    }
+
+    bool operator!=( MemoryRequirements const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    VULKAN_HPP_NAMESPACE::DeviceSize size;
+    VULKAN_HPP_NAMESPACE::DeviceSize alignment;
+    uint32_t memoryTypeBits;
+  };
+  static_assert( sizeof( MemoryRequirements ) == sizeof( VkMemoryRequirements ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<MemoryRequirements>::value, "struct wrapper is not a standard layout!" );
+
+  struct MemoryRequirements2
+  {
+    MemoryRequirements2( VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements_ = VULKAN_HPP_NAMESPACE::MemoryRequirements() ) VULKAN_HPP_NOEXCEPT
+      : memoryRequirements( memoryRequirements_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::MemoryRequirements2 & operator=( VULKAN_HPP_NAMESPACE::MemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::MemoryRequirements2 ) - offsetof( MemoryRequirements2, pNext ) );
+      return *this;
+    }
+
+    MemoryRequirements2( VkMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    MemoryRequirements2& operator=( VkMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryRequirements2 const *>(&rhs);
+      return *this;
+    }
+
+    operator VkMemoryRequirements2 const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkMemoryRequirements2*>( this );
+    }
+
+    operator VkMemoryRequirements2 &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkMemoryRequirements2*>( this );
+    }
+
+    bool operator==( MemoryRequirements2 const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( memoryRequirements == rhs.memoryRequirements );
+    }
+
+    bool operator!=( MemoryRequirements2 const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryRequirements2;
+    void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements;
+  };
+  static_assert( sizeof( MemoryRequirements2 ) == sizeof( VkMemoryRequirements2 ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<MemoryRequirements2>::value, "struct wrapper is not a standard layout!" );
+
+  struct MemoryType
+  {
+    MemoryType( VULKAN_HPP_NAMESPACE::MemoryPropertyFlags propertyFlags_ = VULKAN_HPP_NAMESPACE::MemoryPropertyFlags(),
+                uint32_t heapIndex_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : propertyFlags( propertyFlags_ )
+      , heapIndex( heapIndex_ )
+    {}
+
+    MemoryType( VkMemoryType const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    MemoryType& operator=( VkMemoryType const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryType const *>(&rhs);
+      return *this;
+    }
+
+    operator VkMemoryType const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkMemoryType*>( this );
+    }
+
+    operator VkMemoryType &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkMemoryType*>( this );
+    }
+
+    bool operator==( MemoryType const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( propertyFlags == rhs.propertyFlags )
+          && ( heapIndex == rhs.heapIndex );
+    }
+
+    bool operator!=( MemoryType const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    VULKAN_HPP_NAMESPACE::MemoryPropertyFlags propertyFlags;
+    uint32_t heapIndex;
+  };
+  static_assert( sizeof( MemoryType ) == sizeof( VkMemoryType ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<MemoryType>::value, "struct wrapper is not a standard layout!" );
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+  struct MemoryWin32HandlePropertiesKHR
+  {
+    MemoryWin32HandlePropertiesKHR( uint32_t memoryTypeBits_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : memoryTypeBits( memoryTypeBits_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR & operator=( VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR ) - offsetof( MemoryWin32HandlePropertiesKHR, pNext ) );
+      return *this;
+    }
+
+    MemoryWin32HandlePropertiesKHR( VkMemoryWin32HandlePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    MemoryWin32HandlePropertiesKHR& operator=( VkMemoryWin32HandlePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR const *>(&rhs);
+      return *this;
+    }
+
+    operator VkMemoryWin32HandlePropertiesKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkMemoryWin32HandlePropertiesKHR*>( this );
+    }
+
+    operator VkMemoryWin32HandlePropertiesKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkMemoryWin32HandlePropertiesKHR*>( this );
+    }
+
+    bool operator==( MemoryWin32HandlePropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( memoryTypeBits == rhs.memoryTypeBits );
+    }
+
+    bool operator!=( MemoryWin32HandlePropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryWin32HandlePropertiesKHR;
+    void* pNext = nullptr;
+    uint32_t memoryTypeBits;
+  };
+  static_assert( sizeof( MemoryWin32HandlePropertiesKHR ) == sizeof( VkMemoryWin32HandlePropertiesKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<MemoryWin32HandlePropertiesKHR>::value, "struct wrapper is not a standard layout!" );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#ifdef VK_USE_PLATFORM_METAL_EXT
+
+  struct MetalSurfaceCreateInfoEXT
+  {
+    VULKAN_HPP_CONSTEXPR MetalSurfaceCreateInfoEXT( VULKAN_HPP_NAMESPACE::MetalSurfaceCreateFlagsEXT flags_ = VULKAN_HPP_NAMESPACE::MetalSurfaceCreateFlagsEXT(),
+                                                    const CAMetalLayer* pLayer_ = nullptr ) VULKAN_HPP_NOEXCEPT
+      : flags( flags_ )
+      , pLayer( pLayer_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT & operator=( VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT ) - offsetof( MetalSurfaceCreateInfoEXT, pNext ) );
+      return *this;
+    }
+
+    MetalSurfaceCreateInfoEXT( VkMetalSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    MetalSurfaceCreateInfoEXT& operator=( VkMetalSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT const *>(&rhs);
+      return *this;
+    }
+
+    MetalSurfaceCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    MetalSurfaceCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::MetalSurfaceCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    MetalSurfaceCreateInfoEXT & setPLayer( const CAMetalLayer* pLayer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pLayer = pLayer_;
+      return *this;
+    }
+
+    operator VkMetalSurfaceCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkMetalSurfaceCreateInfoEXT*>( this );
+    }
+
+    operator VkMetalSurfaceCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkMetalSurfaceCreateInfoEXT*>( this );
+    }
+
+    bool operator==( MetalSurfaceCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( pLayer == rhs.pLayer );
+    }
+
+    bool operator!=( MetalSurfaceCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMetalSurfaceCreateInfoEXT;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::MetalSurfaceCreateFlagsEXT flags;
+    const CAMetalLayer* pLayer;
+  };
+  static_assert( sizeof( MetalSurfaceCreateInfoEXT ) == sizeof( VkMetalSurfaceCreateInfoEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<MetalSurfaceCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+
+  struct MultisamplePropertiesEXT
+  {
+    MultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::Extent2D maxSampleLocationGridSize_ = VULKAN_HPP_NAMESPACE::Extent2D() ) VULKAN_HPP_NOEXCEPT
+      : maxSampleLocationGridSize( maxSampleLocationGridSize_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT & operator=( VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT ) - offsetof( MultisamplePropertiesEXT, pNext ) );
+      return *this;
+    }
+
+    MultisamplePropertiesEXT( VkMultisamplePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    MultisamplePropertiesEXT& operator=( VkMultisamplePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT const *>(&rhs);
+      return *this;
+    }
+
+    operator VkMultisamplePropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkMultisamplePropertiesEXT*>( this );
+    }
+
+    operator VkMultisamplePropertiesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkMultisamplePropertiesEXT*>( this );
+    }
+
+    bool operator==( MultisamplePropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( maxSampleLocationGridSize == rhs.maxSampleLocationGridSize );
+    }
+
+    bool operator!=( MultisamplePropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMultisamplePropertiesEXT;
+    void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::Extent2D maxSampleLocationGridSize;
+  };
+  static_assert( sizeof( MultisamplePropertiesEXT ) == sizeof( VkMultisamplePropertiesEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<MultisamplePropertiesEXT>::value, "struct wrapper is not a standard layout!" );
+
+  struct ObjectTableCreateInfoNVX
+  {
+    VULKAN_HPP_CONSTEXPR ObjectTableCreateInfoNVX( uint32_t objectCount_ = 0,
+                                                   const VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX* pObjectEntryTypes_ = nullptr,
+                                                   const uint32_t* pObjectEntryCounts_ = nullptr,
+                                                   const VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX* pObjectEntryUsageFlags_ = nullptr,
+                                                   uint32_t maxUniformBuffersPerDescriptor_ = 0,
+                                                   uint32_t maxStorageBuffersPerDescriptor_ = 0,
+                                                   uint32_t maxStorageImagesPerDescriptor_ = 0,
+                                                   uint32_t maxSampledImagesPerDescriptor_ = 0,
+                                                   uint32_t maxPipelineLayouts_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : objectCount( objectCount_ )
+      , pObjectEntryTypes( pObjectEntryTypes_ )
+      , pObjectEntryCounts( pObjectEntryCounts_ )
+      , pObjectEntryUsageFlags( pObjectEntryUsageFlags_ )
+      , maxUniformBuffersPerDescriptor( maxUniformBuffersPerDescriptor_ )
+      , maxStorageBuffersPerDescriptor( maxStorageBuffersPerDescriptor_ )
+      , maxStorageImagesPerDescriptor( maxStorageImagesPerDescriptor_ )
+      , maxSampledImagesPerDescriptor( maxSampledImagesPerDescriptor_ )
+      , maxPipelineLayouts( maxPipelineLayouts_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::ObjectTableCreateInfoNVX & operator=( VULKAN_HPP_NAMESPACE::ObjectTableCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ObjectTableCreateInfoNVX ) - offsetof( ObjectTableCreateInfoNVX, pNext ) );
+      return *this;
+    }
+
+    ObjectTableCreateInfoNVX( VkObjectTableCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    ObjectTableCreateInfoNVX& operator=( VkObjectTableCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ObjectTableCreateInfoNVX const *>(&rhs);
+      return *this;
+    }
+
+    ObjectTableCreateInfoNVX & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ObjectTableCreateInfoNVX & setObjectCount( uint32_t objectCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      objectCount = objectCount_;
+      return *this;
+    }
+
+    ObjectTableCreateInfoNVX & setPObjectEntryTypes( const VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX* pObjectEntryTypes_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pObjectEntryTypes = pObjectEntryTypes_;
+      return *this;
+    }
+
+    ObjectTableCreateInfoNVX & setPObjectEntryCounts( const uint32_t* pObjectEntryCounts_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pObjectEntryCounts = pObjectEntryCounts_;
+      return *this;
+    }
+
+    ObjectTableCreateInfoNVX & setPObjectEntryUsageFlags( const VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX* pObjectEntryUsageFlags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pObjectEntryUsageFlags = pObjectEntryUsageFlags_;
+      return *this;
+    }
+
+    ObjectTableCreateInfoNVX & setMaxUniformBuffersPerDescriptor( uint32_t maxUniformBuffersPerDescriptor_ ) VULKAN_HPP_NOEXCEPT
+    {
+      maxUniformBuffersPerDescriptor = maxUniformBuffersPerDescriptor_;
+      return *this;
+    }
+
+    ObjectTableCreateInfoNVX & setMaxStorageBuffersPerDescriptor( uint32_t maxStorageBuffersPerDescriptor_ ) VULKAN_HPP_NOEXCEPT
+    {
+      maxStorageBuffersPerDescriptor = maxStorageBuffersPerDescriptor_;
+      return *this;
+    }
+
+    ObjectTableCreateInfoNVX & setMaxStorageImagesPerDescriptor( uint32_t maxStorageImagesPerDescriptor_ ) VULKAN_HPP_NOEXCEPT
+    {
+      maxStorageImagesPerDescriptor = maxStorageImagesPerDescriptor_;
+      return *this;
+    }
+
+    ObjectTableCreateInfoNVX & setMaxSampledImagesPerDescriptor( uint32_t maxSampledImagesPerDescriptor_ ) VULKAN_HPP_NOEXCEPT
+    {
+      maxSampledImagesPerDescriptor = maxSampledImagesPerDescriptor_;
+      return *this;
+    }
+
+    ObjectTableCreateInfoNVX & setMaxPipelineLayouts( uint32_t maxPipelineLayouts_ ) VULKAN_HPP_NOEXCEPT
+    {
+      maxPipelineLayouts = maxPipelineLayouts_;
+      return *this;
+    }
+
+    operator VkObjectTableCreateInfoNVX const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkObjectTableCreateInfoNVX*>( this );
+    }
+
+    operator VkObjectTableCreateInfoNVX &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkObjectTableCreateInfoNVX*>( this );
+    }
+
+    bool operator==( ObjectTableCreateInfoNVX const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( objectCount == rhs.objectCount )
+          && ( pObjectEntryTypes == rhs.pObjectEntryTypes )
+          && ( pObjectEntryCounts == rhs.pObjectEntryCounts )
+          && ( pObjectEntryUsageFlags == rhs.pObjectEntryUsageFlags )
+          && ( maxUniformBuffersPerDescriptor == rhs.maxUniformBuffersPerDescriptor )
+          && ( maxStorageBuffersPerDescriptor == rhs.maxStorageBuffersPerDescriptor )
+          && ( maxStorageImagesPerDescriptor == rhs.maxStorageImagesPerDescriptor )
+          && ( maxSampledImagesPerDescriptor == rhs.maxSampledImagesPerDescriptor )
+          && ( maxPipelineLayouts == rhs.maxPipelineLayouts );
+    }
+
+    bool operator!=( ObjectTableCreateInfoNVX const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eObjectTableCreateInfoNVX;
+    const void* pNext = nullptr;
+    uint32_t objectCount;
+    const VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX* pObjectEntryTypes;
+    const uint32_t* pObjectEntryCounts;
+    const VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX* pObjectEntryUsageFlags;
+    uint32_t maxUniformBuffersPerDescriptor;
+    uint32_t maxStorageBuffersPerDescriptor;
+    uint32_t maxStorageImagesPerDescriptor;
+    uint32_t maxSampledImagesPerDescriptor;
+    uint32_t maxPipelineLayouts;
+  };
+  static_assert( sizeof( ObjectTableCreateInfoNVX ) == sizeof( VkObjectTableCreateInfoNVX ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ObjectTableCreateInfoNVX>::value, "struct wrapper is not a standard layout!" );
+
+  struct ObjectTableEntryNVX
+  {
+    VULKAN_HPP_CONSTEXPR ObjectTableEntryNVX( VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type_ = VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX::eDescriptorSet,
+                                              VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags_ = VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX() ) VULKAN_HPP_NOEXCEPT
+      : type( type_ )
+      , flags( flags_ )
+    {}
+
+    ObjectTableEntryNVX( VkObjectTableEntryNVX const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    ObjectTableEntryNVX& operator=( VkObjectTableEntryNVX const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ObjectTableEntryNVX const *>(&rhs);
+      return *this;
+    }
+
+    ObjectTableEntryNVX & setType( VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type_ ) VULKAN_HPP_NOEXCEPT
+    {
+      type = type_;
+      return *this;
+    }
+
+    ObjectTableEntryNVX & setFlags( VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    operator VkObjectTableEntryNVX const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkObjectTableEntryNVX*>( this );
+    }
+
+    operator VkObjectTableEntryNVX &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkObjectTableEntryNVX*>( this );
+    }
+
+    bool operator==( ObjectTableEntryNVX const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( type == rhs.type )
+          && ( flags == rhs.flags );
+    }
+
+    bool operator!=( ObjectTableEntryNVX const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type;
+    VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags;
+  };
+  static_assert( sizeof( ObjectTableEntryNVX ) == sizeof( VkObjectTableEntryNVX ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ObjectTableEntryNVX>::value, "struct wrapper is not a standard layout!" );
+
+  struct ObjectTableDescriptorSetEntryNVX
+  {
+    VULKAN_HPP_CONSTEXPR ObjectTableDescriptorSetEntryNVX( VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type_ = VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX::eDescriptorSet,
+                                                           VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags_ = VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX(),
+                                                           VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ = VULKAN_HPP_NAMESPACE::PipelineLayout(),
+                                                           VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet_ = VULKAN_HPP_NAMESPACE::DescriptorSet() ) VULKAN_HPP_NOEXCEPT
+      : type( type_ )
+      , flags( flags_ )
+      , pipelineLayout( pipelineLayout_ )
+      , descriptorSet( descriptorSet_ )
+    {}
+
+    explicit ObjectTableDescriptorSetEntryNVX( ObjectTableEntryNVX const& objectTableEntryNVX,
+                                               VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ = VULKAN_HPP_NAMESPACE::PipelineLayout(),
+                                               VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet_ = VULKAN_HPP_NAMESPACE::DescriptorSet() )
+      : type( objectTableEntryNVX.type )
+      , flags( objectTableEntryNVX.flags )
+      , pipelineLayout( pipelineLayout_ )
+      , descriptorSet( descriptorSet_ )
+    {}
+
+    ObjectTableDescriptorSetEntryNVX( VkObjectTableDescriptorSetEntryNVX const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    ObjectTableDescriptorSetEntryNVX& operator=( VkObjectTableDescriptorSetEntryNVX const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ObjectTableDescriptorSetEntryNVX const *>(&rhs);
+      return *this;
+    }
+
+    ObjectTableDescriptorSetEntryNVX & setType( VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type_ ) VULKAN_HPP_NOEXCEPT
+    {
+      type = type_;
+      return *this;
+    }
+
+    ObjectTableDescriptorSetEntryNVX & setFlags( VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    ObjectTableDescriptorSetEntryNVX & setPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pipelineLayout = pipelineLayout_;
+      return *this;
+    }
+
+    ObjectTableDescriptorSetEntryNVX & setDescriptorSet( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorSet = descriptorSet_;
+      return *this;
+    }
+
+    operator VkObjectTableDescriptorSetEntryNVX const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkObjectTableDescriptorSetEntryNVX*>( this );
+    }
+
+    operator VkObjectTableDescriptorSetEntryNVX &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkObjectTableDescriptorSetEntryNVX*>( this );
+    }
+
+    bool operator==( ObjectTableDescriptorSetEntryNVX const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( type == rhs.type )
+          && ( flags == rhs.flags )
+          && ( pipelineLayout == rhs.pipelineLayout )
+          && ( descriptorSet == rhs.descriptorSet );
+    }
+
+    bool operator!=( ObjectTableDescriptorSetEntryNVX const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type;
+    VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags;
+    VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout;
+    VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet;
+  };
+  static_assert( sizeof( ObjectTableDescriptorSetEntryNVX ) == sizeof( VkObjectTableDescriptorSetEntryNVX ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ObjectTableDescriptorSetEntryNVX>::value, "struct wrapper is not a standard layout!" );
+
+  struct ObjectTableIndexBufferEntryNVX
+  {
+    VULKAN_HPP_CONSTEXPR ObjectTableIndexBufferEntryNVX( VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type_ = VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX::eDescriptorSet,
+                                                         VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags_ = VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX(),
+                                                         VULKAN_HPP_NAMESPACE::Buffer buffer_ = VULKAN_HPP_NAMESPACE::Buffer(),
+                                                         VULKAN_HPP_NAMESPACE::IndexType indexType_ = VULKAN_HPP_NAMESPACE::IndexType::eUint16 ) VULKAN_HPP_NOEXCEPT
+      : type( type_ )
+      , flags( flags_ )
+      , buffer( buffer_ )
+      , indexType( indexType_ )
+    {}
+
+    explicit ObjectTableIndexBufferEntryNVX( ObjectTableEntryNVX const& objectTableEntryNVX,
+                                             VULKAN_HPP_NAMESPACE::Buffer buffer_ = VULKAN_HPP_NAMESPACE::Buffer(),
+                                             VULKAN_HPP_NAMESPACE::IndexType indexType_ = VULKAN_HPP_NAMESPACE::IndexType::eUint16 )
+      : type( objectTableEntryNVX.type )
+      , flags( objectTableEntryNVX.flags )
+      , buffer( buffer_ )
+      , indexType( indexType_ )
+    {}
+
+    ObjectTableIndexBufferEntryNVX( VkObjectTableIndexBufferEntryNVX const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    ObjectTableIndexBufferEntryNVX& operator=( VkObjectTableIndexBufferEntryNVX const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ObjectTableIndexBufferEntryNVX const *>(&rhs);
+      return *this;
+    }
+
+    ObjectTableIndexBufferEntryNVX & setType( VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type_ ) VULKAN_HPP_NOEXCEPT
+    {
+      type = type_;
+      return *this;
+    }
+
+    ObjectTableIndexBufferEntryNVX & setFlags( VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    ObjectTableIndexBufferEntryNVX & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      buffer = buffer_;
+      return *this;
+    }
+
+    ObjectTableIndexBufferEntryNVX & setIndexType( VULKAN_HPP_NAMESPACE::IndexType indexType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      indexType = indexType_;
+      return *this;
+    }
+
+    operator VkObjectTableIndexBufferEntryNVX const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkObjectTableIndexBufferEntryNVX*>( this );
+    }
+
+    operator VkObjectTableIndexBufferEntryNVX &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkObjectTableIndexBufferEntryNVX*>( this );
+    }
+
+    bool operator==( ObjectTableIndexBufferEntryNVX const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( type == rhs.type )
+          && ( flags == rhs.flags )
+          && ( buffer == rhs.buffer )
+          && ( indexType == rhs.indexType );
+    }
+
+    bool operator!=( ObjectTableIndexBufferEntryNVX const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type;
+    VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags;
+    VULKAN_HPP_NAMESPACE::Buffer buffer;
+    VULKAN_HPP_NAMESPACE::IndexType indexType;
+  };
+  static_assert( sizeof( ObjectTableIndexBufferEntryNVX ) == sizeof( VkObjectTableIndexBufferEntryNVX ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ObjectTableIndexBufferEntryNVX>::value, "struct wrapper is not a standard layout!" );
+
+  struct ObjectTablePipelineEntryNVX
+  {
+    VULKAN_HPP_CONSTEXPR ObjectTablePipelineEntryNVX( VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type_ = VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX::eDescriptorSet,
+                                                      VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags_ = VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX(),
+                                                      VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = VULKAN_HPP_NAMESPACE::Pipeline() ) VULKAN_HPP_NOEXCEPT
+      : type( type_ )
+      , flags( flags_ )
+      , pipeline( pipeline_ )
+    {}
+
+    explicit ObjectTablePipelineEntryNVX( ObjectTableEntryNVX const& objectTableEntryNVX,
+                                          VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = VULKAN_HPP_NAMESPACE::Pipeline() )
+      : type( objectTableEntryNVX.type )
+      , flags( objectTableEntryNVX.flags )
+      , pipeline( pipeline_ )
+    {}
+
+    ObjectTablePipelineEntryNVX( VkObjectTablePipelineEntryNVX const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    ObjectTablePipelineEntryNVX& operator=( VkObjectTablePipelineEntryNVX const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ObjectTablePipelineEntryNVX const *>(&rhs);
+      return *this;
+    }
+
+    ObjectTablePipelineEntryNVX & setType( VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type_ ) VULKAN_HPP_NOEXCEPT
+    {
+      type = type_;
+      return *this;
+    }
+
+    ObjectTablePipelineEntryNVX & setFlags( VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    ObjectTablePipelineEntryNVX & setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pipeline = pipeline_;
+      return *this;
+    }
+
+    operator VkObjectTablePipelineEntryNVX const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkObjectTablePipelineEntryNVX*>( this );
+    }
+
+    operator VkObjectTablePipelineEntryNVX &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkObjectTablePipelineEntryNVX*>( this );
+    }
+
+    bool operator==( ObjectTablePipelineEntryNVX const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( type == rhs.type )
+          && ( flags == rhs.flags )
+          && ( pipeline == rhs.pipeline );
+    }
+
+    bool operator!=( ObjectTablePipelineEntryNVX const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type;
+    VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags;
+    VULKAN_HPP_NAMESPACE::Pipeline pipeline;
+  };
+  static_assert( sizeof( ObjectTablePipelineEntryNVX ) == sizeof( VkObjectTablePipelineEntryNVX ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ObjectTablePipelineEntryNVX>::value, "struct wrapper is not a standard layout!" );
+
+  struct ObjectTablePushConstantEntryNVX
+  {
+    VULKAN_HPP_CONSTEXPR ObjectTablePushConstantEntryNVX( VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type_ = VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX::eDescriptorSet,
+                                                          VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags_ = VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX(),
+                                                          VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ = VULKAN_HPP_NAMESPACE::PipelineLayout(),
+                                                          VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ = VULKAN_HPP_NAMESPACE::ShaderStageFlags() ) VULKAN_HPP_NOEXCEPT
+      : type( type_ )
+      , flags( flags_ )
+      , pipelineLayout( pipelineLayout_ )
+      , stageFlags( stageFlags_ )
+    {}
+
+    explicit ObjectTablePushConstantEntryNVX( ObjectTableEntryNVX const& objectTableEntryNVX,
+                                              VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ = VULKAN_HPP_NAMESPACE::PipelineLayout(),
+                                              VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ = VULKAN_HPP_NAMESPACE::ShaderStageFlags() )
+      : type( objectTableEntryNVX.type )
+      , flags( objectTableEntryNVX.flags )
+      , pipelineLayout( pipelineLayout_ )
+      , stageFlags( stageFlags_ )
+    {}
+
+    ObjectTablePushConstantEntryNVX( VkObjectTablePushConstantEntryNVX const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    ObjectTablePushConstantEntryNVX& operator=( VkObjectTablePushConstantEntryNVX const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ObjectTablePushConstantEntryNVX const *>(&rhs);
+      return *this;
+    }
+
+    ObjectTablePushConstantEntryNVX & setType( VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type_ ) VULKAN_HPP_NOEXCEPT
+    {
+      type = type_;
+      return *this;
+    }
+
+    ObjectTablePushConstantEntryNVX & setFlags( VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    ObjectTablePushConstantEntryNVX & setPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pipelineLayout = pipelineLayout_;
+      return *this;
+    }
+
+    ObjectTablePushConstantEntryNVX & setStageFlags( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stageFlags = stageFlags_;
+      return *this;
+    }
+
+    operator VkObjectTablePushConstantEntryNVX const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkObjectTablePushConstantEntryNVX*>( this );
+    }
+
+    operator VkObjectTablePushConstantEntryNVX &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkObjectTablePushConstantEntryNVX*>( this );
+    }
+
+    bool operator==( ObjectTablePushConstantEntryNVX const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( type == rhs.type )
+          && ( flags == rhs.flags )
+          && ( pipelineLayout == rhs.pipelineLayout )
+          && ( stageFlags == rhs.stageFlags );
+    }
+
+    bool operator!=( ObjectTablePushConstantEntryNVX const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type;
+    VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags;
+    VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout;
+    VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags;
+  };
+  static_assert( sizeof( ObjectTablePushConstantEntryNVX ) == sizeof( VkObjectTablePushConstantEntryNVX ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ObjectTablePushConstantEntryNVX>::value, "struct wrapper is not a standard layout!" );
+
+  struct ObjectTableVertexBufferEntryNVX
+  {
+    VULKAN_HPP_CONSTEXPR ObjectTableVertexBufferEntryNVX( VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type_ = VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX::eDescriptorSet,
+                                                          VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags_ = VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX(),
+                                                          VULKAN_HPP_NAMESPACE::Buffer buffer_ = VULKAN_HPP_NAMESPACE::Buffer() ) VULKAN_HPP_NOEXCEPT
+      : type( type_ )
+      , flags( flags_ )
+      , buffer( buffer_ )
+    {}
+
+    explicit ObjectTableVertexBufferEntryNVX( ObjectTableEntryNVX const& objectTableEntryNVX,
+                                              VULKAN_HPP_NAMESPACE::Buffer buffer_ = VULKAN_HPP_NAMESPACE::Buffer() )
+      : type( objectTableEntryNVX.type )
+      , flags( objectTableEntryNVX.flags )
+      , buffer( buffer_ )
+    {}
+
+    ObjectTableVertexBufferEntryNVX( VkObjectTableVertexBufferEntryNVX const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    ObjectTableVertexBufferEntryNVX& operator=( VkObjectTableVertexBufferEntryNVX const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ObjectTableVertexBufferEntryNVX const *>(&rhs);
+      return *this;
+    }
+
+    ObjectTableVertexBufferEntryNVX & setType( VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type_ ) VULKAN_HPP_NOEXCEPT
+    {
+      type = type_;
+      return *this;
+    }
+
+    ObjectTableVertexBufferEntryNVX & setFlags( VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    ObjectTableVertexBufferEntryNVX & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      buffer = buffer_;
+      return *this;
+    }
+
+    operator VkObjectTableVertexBufferEntryNVX const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkObjectTableVertexBufferEntryNVX*>( this );
+    }
+
+    operator VkObjectTableVertexBufferEntryNVX &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkObjectTableVertexBufferEntryNVX*>( this );
+    }
+
+    bool operator==( ObjectTableVertexBufferEntryNVX const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( type == rhs.type )
+          && ( flags == rhs.flags )
+          && ( buffer == rhs.buffer );
+    }
+
+    bool operator!=( ObjectTableVertexBufferEntryNVX const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type;
+    VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags;
+    VULKAN_HPP_NAMESPACE::Buffer buffer;
+  };
+  static_assert( sizeof( ObjectTableVertexBufferEntryNVX ) == sizeof( VkObjectTableVertexBufferEntryNVX ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ObjectTableVertexBufferEntryNVX>::value, "struct wrapper is not a standard layout!" );
+
+  struct PastPresentationTimingGOOGLE
+  {
+    PastPresentationTimingGOOGLE( uint32_t presentID_ = 0,
+                                  uint64_t desiredPresentTime_ = 0,
+                                  uint64_t actualPresentTime_ = 0,
+                                  uint64_t earliestPresentTime_ = 0,
+                                  uint64_t presentMargin_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : presentID( presentID_ )
+      , desiredPresentTime( desiredPresentTime_ )
+      , actualPresentTime( actualPresentTime_ )
+      , earliestPresentTime( earliestPresentTime_ )
+      , presentMargin( presentMargin_ )
+    {}
+
+    PastPresentationTimingGOOGLE( VkPastPresentationTimingGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PastPresentationTimingGOOGLE& operator=( VkPastPresentationTimingGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE const *>(&rhs);
+      return *this;
+    }
+
+    operator VkPastPresentationTimingGOOGLE const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPastPresentationTimingGOOGLE*>( this );
+    }
+
+    operator VkPastPresentationTimingGOOGLE &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPastPresentationTimingGOOGLE*>( this );
+    }
+
+    bool operator==( PastPresentationTimingGOOGLE const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( presentID == rhs.presentID )
+          && ( desiredPresentTime == rhs.desiredPresentTime )
+          && ( actualPresentTime == rhs.actualPresentTime )
+          && ( earliestPresentTime == rhs.earliestPresentTime )
+          && ( presentMargin == rhs.presentMargin );
+    }
+
+    bool operator!=( PastPresentationTimingGOOGLE const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    uint32_t presentID;
+    uint64_t desiredPresentTime;
+    uint64_t actualPresentTime;
+    uint64_t earliestPresentTime;
+    uint64_t presentMargin;
+  };
+  static_assert( sizeof( PastPresentationTimingGOOGLE ) == sizeof( VkPastPresentationTimingGOOGLE ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PastPresentationTimingGOOGLE>::value, "struct wrapper is not a standard layout!" );
+
+  struct PerformanceConfigurationAcquireInfoINTEL
+  {
+    VULKAN_HPP_CONSTEXPR PerformanceConfigurationAcquireInfoINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL type_ = VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL::eCommandQueueMetricsDiscoveryActivated ) VULKAN_HPP_NOEXCEPT
+      : type( type_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL & operator=( VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL ) - offsetof( PerformanceConfigurationAcquireInfoINTEL, pNext ) );
+      return *this;
+    }
+
+    PerformanceConfigurationAcquireInfoINTEL( VkPerformanceConfigurationAcquireInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PerformanceConfigurationAcquireInfoINTEL& operator=( VkPerformanceConfigurationAcquireInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL const *>(&rhs);
+      return *this;
+    }
+
+    PerformanceConfigurationAcquireInfoINTEL & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PerformanceConfigurationAcquireInfoINTEL & setType( VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL type_ ) VULKAN_HPP_NOEXCEPT
+    {
+      type = type_;
+      return *this;
+    }
+
+    operator VkPerformanceConfigurationAcquireInfoINTEL const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPerformanceConfigurationAcquireInfoINTEL*>( this );
+    }
+
+    operator VkPerformanceConfigurationAcquireInfoINTEL &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPerformanceConfigurationAcquireInfoINTEL*>( this );
+    }
+
+    bool operator==( PerformanceConfigurationAcquireInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( type == rhs.type );
+    }
+
+    bool operator!=( PerformanceConfigurationAcquireInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceConfigurationAcquireInfoINTEL;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL type;
+  };
+  static_assert( sizeof( PerformanceConfigurationAcquireInfoINTEL ) == sizeof( VkPerformanceConfigurationAcquireInfoINTEL ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PerformanceConfigurationAcquireInfoINTEL>::value, "struct wrapper is not a standard layout!" );
+
+  struct PerformanceCounterDescriptionKHR
+  {
+    PerformanceCounterDescriptionKHR( VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionFlagsKHR flags_ = VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionFlagsKHR(),
+                                      std::array<char,VK_MAX_DESCRIPTION_SIZE> const& name_ = { { 0 } },
+                                      std::array<char,VK_MAX_DESCRIPTION_SIZE> const& category_ = { { 0 } },
+                                      std::array<char,VK_MAX_DESCRIPTION_SIZE> const& description_ = { { 0 } } ) VULKAN_HPP_NOEXCEPT
+      : flags( flags_ )
+      , name{}
+      , category{}
+      , description{}
+    {
+      VULKAN_HPP_NAMESPACE::ConstExpressionArrayCopy<char,VK_MAX_DESCRIPTION_SIZE,VK_MAX_DESCRIPTION_SIZE>::copy( name, name_ );
+      VULKAN_HPP_NAMESPACE::ConstExpressionArrayCopy<char,VK_MAX_DESCRIPTION_SIZE,VK_MAX_DESCRIPTION_SIZE>::copy( category, category_ );
+      VULKAN_HPP_NAMESPACE::ConstExpressionArrayCopy<char,VK_MAX_DESCRIPTION_SIZE,VK_MAX_DESCRIPTION_SIZE>::copy( description, description_ );
+    }
+
+    VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR & operator=( VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR ) - offsetof( PerformanceCounterDescriptionKHR, pNext ) );
+      return *this;
+    }
+
+    PerformanceCounterDescriptionKHR( VkPerformanceCounterDescriptionKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PerformanceCounterDescriptionKHR& operator=( VkPerformanceCounterDescriptionKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR const *>(&rhs);
+      return *this;
+    }
+
+    operator VkPerformanceCounterDescriptionKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPerformanceCounterDescriptionKHR*>( this );
+    }
+
+    operator VkPerformanceCounterDescriptionKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPerformanceCounterDescriptionKHR*>( this );
+    }
+
+    bool operator==( PerformanceCounterDescriptionKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( memcmp( name, rhs.name, VK_MAX_DESCRIPTION_SIZE * sizeof( char ) ) == 0 )
+          && ( memcmp( category, rhs.category, VK_MAX_DESCRIPTION_SIZE * sizeof( char ) ) == 0 )
+          && ( memcmp( description, rhs.description, VK_MAX_DESCRIPTION_SIZE * sizeof( char ) ) == 0 );
+    }
+
+    bool operator!=( PerformanceCounterDescriptionKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceCounterDescriptionKHR;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionFlagsKHR flags;
+    char name[VK_MAX_DESCRIPTION_SIZE];
+    char category[VK_MAX_DESCRIPTION_SIZE];
+    char description[VK_MAX_DESCRIPTION_SIZE];
+  };
+  static_assert( sizeof( PerformanceCounterDescriptionKHR ) == sizeof( VkPerformanceCounterDescriptionKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PerformanceCounterDescriptionKHR>::value, "struct wrapper is not a standard layout!" );
+
+  struct PerformanceCounterKHR
+  {
+    PerformanceCounterKHR( VULKAN_HPP_NAMESPACE::PerformanceCounterUnitKHR unit_ = VULKAN_HPP_NAMESPACE::PerformanceCounterUnitKHR::eGeneric,
+                           VULKAN_HPP_NAMESPACE::PerformanceCounterScopeKHR scope_ = VULKAN_HPP_NAMESPACE::PerformanceCounterScopeKHR::eVkQueryScopeCommandBuffer,
+                           VULKAN_HPP_NAMESPACE::PerformanceCounterStorageKHR storage_ = VULKAN_HPP_NAMESPACE::PerformanceCounterStorageKHR::eInt32,
+                           std::array<uint8_t,VK_UUID_SIZE> const& uuid_ = { { 0 } } ) VULKAN_HPP_NOEXCEPT
+      : unit( unit_ )
+      , scope( scope_ )
+      , storage( storage_ )
+      , uuid{}
+    {
+      VULKAN_HPP_NAMESPACE::ConstExpressionArrayCopy<uint8_t,VK_UUID_SIZE,VK_UUID_SIZE>::copy( uuid, uuid_ );
+    }
+
+    VULKAN_HPP_NAMESPACE::PerformanceCounterKHR & operator=( VULKAN_HPP_NAMESPACE::PerformanceCounterKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PerformanceCounterKHR ) - offsetof( PerformanceCounterKHR, pNext ) );
+      return *this;
+    }
+
+    PerformanceCounterKHR( VkPerformanceCounterKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PerformanceCounterKHR& operator=( VkPerformanceCounterKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR const *>(&rhs);
+      return *this;
+    }
+
+    operator VkPerformanceCounterKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPerformanceCounterKHR*>( this );
+    }
+
+    operator VkPerformanceCounterKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPerformanceCounterKHR*>( this );
+    }
+
+    bool operator==( PerformanceCounterKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( unit == rhs.unit )
+          && ( scope == rhs.scope )
+          && ( storage == rhs.storage )
+          && ( memcmp( uuid, rhs.uuid, VK_UUID_SIZE * sizeof( uint8_t ) ) == 0 );
+    }
+
+    bool operator!=( PerformanceCounterKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceCounterKHR;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::PerformanceCounterUnitKHR unit;
+    VULKAN_HPP_NAMESPACE::PerformanceCounterScopeKHR scope;
+    VULKAN_HPP_NAMESPACE::PerformanceCounterStorageKHR storage;
+    uint8_t uuid[VK_UUID_SIZE];
+  };
+  static_assert( sizeof( PerformanceCounterKHR ) == sizeof( VkPerformanceCounterKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PerformanceCounterKHR>::value, "struct wrapper is not a standard layout!" );
+
+  union PerformanceCounterResultKHR
+  {
+    PerformanceCounterResultKHR( int32_t int32_ = 0 )
+    {
+      int32 = int32_;
+    }
+
+    PerformanceCounterResultKHR( int64_t int64_ )
+    {
+      int64 = int64_;
+    }
+
+    PerformanceCounterResultKHR( uint32_t uint32_ )
+    {
+      uint32 = uint32_;
+    }
+
+    PerformanceCounterResultKHR( uint64_t uint64_ )
+    {
+      uint64 = uint64_;
+    }
+
+    PerformanceCounterResultKHR( float float32_ )
+    {
+      float32 = float32_;
+    }
+
+    PerformanceCounterResultKHR( double float64_ )
+    {
+      float64 = float64_;
+    }
+
+    PerformanceCounterResultKHR & setInt32( int32_t int32_ ) VULKAN_HPP_NOEXCEPT
+    {
+      int32 = int32_;
+      return *this;
+    }
+
+    PerformanceCounterResultKHR & setInt64( int64_t int64_ ) VULKAN_HPP_NOEXCEPT
+    {
+      int64 = int64_;
+      return *this;
+    }
+
+    PerformanceCounterResultKHR & setUint32( uint32_t uint32_ ) VULKAN_HPP_NOEXCEPT
+    {
+      uint32 = uint32_;
+      return *this;
+    }
+
+    PerformanceCounterResultKHR & setUint64( uint64_t uint64_ ) VULKAN_HPP_NOEXCEPT
+    {
+      uint64 = uint64_;
+      return *this;
+    }
+
+    PerformanceCounterResultKHR & setFloat32( float float32_ ) VULKAN_HPP_NOEXCEPT
+    {
+      float32 = float32_;
+      return *this;
+    }
+
+    PerformanceCounterResultKHR & setFloat64( double float64_ ) VULKAN_HPP_NOEXCEPT
+    {
+      float64 = float64_;
+      return *this;
+    }
+    operator VkPerformanceCounterResultKHR const&() const
+    {
+      return *reinterpret_cast<const VkPerformanceCounterResultKHR*>(this);
+    }
+
+    operator VkPerformanceCounterResultKHR &()
+    {
+      return *reinterpret_cast<VkPerformanceCounterResultKHR*>(this);
+    }
+
+    int32_t int32;
+    int64_t int64;
+    uint32_t uint32;
+    uint64_t uint64;
+    float float32;
+    double float64;
+  };
+
+  struct PerformanceMarkerInfoINTEL
+  {
+    VULKAN_HPP_CONSTEXPR PerformanceMarkerInfoINTEL( uint64_t marker_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : marker( marker_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL & operator=( VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL ) - offsetof( PerformanceMarkerInfoINTEL, pNext ) );
+      return *this;
+    }
+
+    PerformanceMarkerInfoINTEL( VkPerformanceMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PerformanceMarkerInfoINTEL& operator=( VkPerformanceMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL const *>(&rhs);
+      return *this;
+    }
+
+    PerformanceMarkerInfoINTEL & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PerformanceMarkerInfoINTEL & setMarker( uint64_t marker_ ) VULKAN_HPP_NOEXCEPT
+    {
+      marker = marker_;
+      return *this;
+    }
+
+    operator VkPerformanceMarkerInfoINTEL const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPerformanceMarkerInfoINTEL*>( this );
+    }
+
+    operator VkPerformanceMarkerInfoINTEL &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPerformanceMarkerInfoINTEL*>( this );
+    }
+
+    bool operator==( PerformanceMarkerInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( marker == rhs.marker );
+    }
+
+    bool operator!=( PerformanceMarkerInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceMarkerInfoINTEL;
+    const void* pNext = nullptr;
+    uint64_t marker;
+  };
+  static_assert( sizeof( PerformanceMarkerInfoINTEL ) == sizeof( VkPerformanceMarkerInfoINTEL ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PerformanceMarkerInfoINTEL>::value, "struct wrapper is not a standard layout!" );
+
+  struct PerformanceOverrideInfoINTEL
+  {
+    VULKAN_HPP_CONSTEXPR PerformanceOverrideInfoINTEL( VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL type_ = VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL::eNullHardware,
+                                                       VULKAN_HPP_NAMESPACE::Bool32 enable_ = 0,
+                                                       uint64_t parameter_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : type( type_ )
+      , enable( enable_ )
+      , parameter( parameter_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL & operator=( VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL ) - offsetof( PerformanceOverrideInfoINTEL, pNext ) );
+      return *this;
+    }
+
+    PerformanceOverrideInfoINTEL( VkPerformanceOverrideInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PerformanceOverrideInfoINTEL& operator=( VkPerformanceOverrideInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL const *>(&rhs);
+      return *this;
+    }
+
+    PerformanceOverrideInfoINTEL & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PerformanceOverrideInfoINTEL & setType( VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL type_ ) VULKAN_HPP_NOEXCEPT
+    {
+      type = type_;
+      return *this;
+    }
+
+    PerformanceOverrideInfoINTEL & setEnable( VULKAN_HPP_NAMESPACE::Bool32 enable_ ) VULKAN_HPP_NOEXCEPT
+    {
+      enable = enable_;
+      return *this;
+    }
+
+    PerformanceOverrideInfoINTEL & setParameter( uint64_t parameter_ ) VULKAN_HPP_NOEXCEPT
+    {
+      parameter = parameter_;
+      return *this;
+    }
+
+    operator VkPerformanceOverrideInfoINTEL const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPerformanceOverrideInfoINTEL*>( this );
+    }
+
+    operator VkPerformanceOverrideInfoINTEL &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPerformanceOverrideInfoINTEL*>( this );
+    }
+
+    bool operator==( PerformanceOverrideInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( type == rhs.type )
+          && ( enable == rhs.enable )
+          && ( parameter == rhs.parameter );
+    }
+
+    bool operator!=( PerformanceOverrideInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceOverrideInfoINTEL;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL type;
+    VULKAN_HPP_NAMESPACE::Bool32 enable;
+    uint64_t parameter;
+  };
+  static_assert( sizeof( PerformanceOverrideInfoINTEL ) == sizeof( VkPerformanceOverrideInfoINTEL ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PerformanceOverrideInfoINTEL>::value, "struct wrapper is not a standard layout!" );
+
+  struct PerformanceQuerySubmitInfoKHR
+  {
+    VULKAN_HPP_CONSTEXPR PerformanceQuerySubmitInfoKHR( uint32_t counterPassIndex_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : counterPassIndex( counterPassIndex_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PerformanceQuerySubmitInfoKHR & operator=( VULKAN_HPP_NAMESPACE::PerformanceQuerySubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PerformanceQuerySubmitInfoKHR ) - offsetof( PerformanceQuerySubmitInfoKHR, pNext ) );
+      return *this;
+    }
+
+    PerformanceQuerySubmitInfoKHR( VkPerformanceQuerySubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PerformanceQuerySubmitInfoKHR& operator=( VkPerformanceQuerySubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PerformanceQuerySubmitInfoKHR const *>(&rhs);
+      return *this;
+    }
+
+    PerformanceQuerySubmitInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PerformanceQuerySubmitInfoKHR & setCounterPassIndex( uint32_t counterPassIndex_ ) VULKAN_HPP_NOEXCEPT
+    {
+      counterPassIndex = counterPassIndex_;
+      return *this;
+    }
+
+    operator VkPerformanceQuerySubmitInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPerformanceQuerySubmitInfoKHR*>( this );
+    }
+
+    operator VkPerformanceQuerySubmitInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPerformanceQuerySubmitInfoKHR*>( this );
+    }
+
+    bool operator==( PerformanceQuerySubmitInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( counterPassIndex == rhs.counterPassIndex );
+    }
+
+    bool operator!=( PerformanceQuerySubmitInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceQuerySubmitInfoKHR;
+    const void* pNext = nullptr;
+    uint32_t counterPassIndex;
+  };
+  static_assert( sizeof( PerformanceQuerySubmitInfoKHR ) == sizeof( VkPerformanceQuerySubmitInfoKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PerformanceQuerySubmitInfoKHR>::value, "struct wrapper is not a standard layout!" );
+
+  struct PerformanceStreamMarkerInfoINTEL
+  {
+    VULKAN_HPP_CONSTEXPR PerformanceStreamMarkerInfoINTEL( uint32_t marker_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : marker( marker_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL & operator=( VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL ) - offsetof( PerformanceStreamMarkerInfoINTEL, pNext ) );
+      return *this;
+    }
+
+    PerformanceStreamMarkerInfoINTEL( VkPerformanceStreamMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PerformanceStreamMarkerInfoINTEL& operator=( VkPerformanceStreamMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL const *>(&rhs);
+      return *this;
+    }
+
+    PerformanceStreamMarkerInfoINTEL & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PerformanceStreamMarkerInfoINTEL & setMarker( uint32_t marker_ ) VULKAN_HPP_NOEXCEPT
+    {
+      marker = marker_;
+      return *this;
+    }
+
+    operator VkPerformanceStreamMarkerInfoINTEL const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPerformanceStreamMarkerInfoINTEL*>( this );
+    }
+
+    operator VkPerformanceStreamMarkerInfoINTEL &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPerformanceStreamMarkerInfoINTEL*>( this );
+    }
+
+    bool operator==( PerformanceStreamMarkerInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( marker == rhs.marker );
+    }
+
+    bool operator!=( PerformanceStreamMarkerInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceStreamMarkerInfoINTEL;
+    const void* pNext = nullptr;
+    uint32_t marker;
+  };
+  static_assert( sizeof( PerformanceStreamMarkerInfoINTEL ) == sizeof( VkPerformanceStreamMarkerInfoINTEL ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PerformanceStreamMarkerInfoINTEL>::value, "struct wrapper is not a standard layout!" );
+
+  union PerformanceValueDataINTEL
+  {
+    PerformanceValueDataINTEL( uint32_t value32_ = 0 )
+    {
+      value32 = value32_;
+    }
+
+    PerformanceValueDataINTEL( uint64_t value64_ )
+    {
+      value64 = value64_;
+    }
+
+    PerformanceValueDataINTEL( float valueFloat_ )
+    {
+      valueFloat = valueFloat_;
+    }
+
+    PerformanceValueDataINTEL( const char* valueString_ )
+    {
+      valueString = valueString_;
+    }
+
+    PerformanceValueDataINTEL & setValue32( uint32_t value32_ ) VULKAN_HPP_NOEXCEPT
+    {
+      value32 = value32_;
+      return *this;
+    }
+
+    PerformanceValueDataINTEL & setValue64( uint64_t value64_ ) VULKAN_HPP_NOEXCEPT
+    {
+      value64 = value64_;
+      return *this;
+    }
+
+    PerformanceValueDataINTEL & setValueFloat( float valueFloat_ ) VULKAN_HPP_NOEXCEPT
+    {
+      valueFloat = valueFloat_;
+      return *this;
+    }
+
+    PerformanceValueDataINTEL & setValueBool( VULKAN_HPP_NAMESPACE::Bool32 valueBool_ ) VULKAN_HPP_NOEXCEPT
+    {
+      valueBool = valueBool_;
+      return *this;
+    }
+
+    PerformanceValueDataINTEL & setValueString( const char* valueString_ ) VULKAN_HPP_NOEXCEPT
+    {
+      valueString = valueString_;
+      return *this;
+    }
+    operator VkPerformanceValueDataINTEL const&() const
+    {
+      return *reinterpret_cast<const VkPerformanceValueDataINTEL*>(this);
+    }
+
+    operator VkPerformanceValueDataINTEL &()
+    {
+      return *reinterpret_cast<VkPerformanceValueDataINTEL*>(this);
+    }
+
+#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
+    uint32_t value32;
+    uint64_t value64;
+    float valueFloat;
+    VULKAN_HPP_NAMESPACE::Bool32 valueBool;
+    const char* valueString;
+#else
+    uint32_t value32;
+    uint64_t value64;
+    float valueFloat;
+    VkBool32 valueBool;
+    const char* valueString;
+#endif  /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/
+  };
+
+  struct PerformanceValueINTEL
+  {
+    PerformanceValueINTEL( VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL type_ = VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL::eUint32,
+                           VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL data_ = VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL() ) VULKAN_HPP_NOEXCEPT
+      : type( type_ )
+      , data( data_ )
+    {}
+
+    PerformanceValueINTEL( VkPerformanceValueINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PerformanceValueINTEL& operator=( VkPerformanceValueINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PerformanceValueINTEL const *>(&rhs);
+      return *this;
+    }
+
+    PerformanceValueINTEL & setType( VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL type_ ) VULKAN_HPP_NOEXCEPT
+    {
+      type = type_;
+      return *this;
+    }
+
+    PerformanceValueINTEL & setData( VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL data_ ) VULKAN_HPP_NOEXCEPT
+    {
+      data = data_;
+      return *this;
+    }
+
+    operator VkPerformanceValueINTEL const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPerformanceValueINTEL*>( this );
+    }
+
+    operator VkPerformanceValueINTEL &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPerformanceValueINTEL*>( this );
+    }
+
+  public:
+    VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL type;
+    VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL data;
+  };
+  static_assert( sizeof( PerformanceValueINTEL ) == sizeof( VkPerformanceValueINTEL ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PerformanceValueINTEL>::value, "struct wrapper is not a standard layout!" );
+
+  struct PhysicalDevice16BitStorageFeatures
+  {
+    VULKAN_HPP_CONSTEXPR PhysicalDevice16BitStorageFeatures( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess_ = 0,
+                                                             VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess_ = 0,
+                                                             VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16_ = 0,
+                                                             VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : storageBuffer16BitAccess( storageBuffer16BitAccess_ )
+      , uniformAndStorageBuffer16BitAccess( uniformAndStorageBuffer16BitAccess_ )
+      , storagePushConstant16( storagePushConstant16_ )
+      , storageInputOutput16( storageInputOutput16_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PhysicalDevice16BitStorageFeatures & operator=( VULKAN_HPP_NAMESPACE::PhysicalDevice16BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevice16BitStorageFeatures ) - offsetof( PhysicalDevice16BitStorageFeatures, pNext ) );
+      return *this;
+    }
+
+    PhysicalDevice16BitStorageFeatures( VkPhysicalDevice16BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PhysicalDevice16BitStorageFeatures& operator=( VkPhysicalDevice16BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevice16BitStorageFeatures const *>(&rhs);
+      return *this;
+    }
+
+    PhysicalDevice16BitStorageFeatures & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDevice16BitStorageFeatures & setStorageBuffer16BitAccess( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess_ ) VULKAN_HPP_NOEXCEPT
+    {
+      storageBuffer16BitAccess = storageBuffer16BitAccess_;
+      return *this;
+    }
+
+    PhysicalDevice16BitStorageFeatures & setUniformAndStorageBuffer16BitAccess( VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess_ ) VULKAN_HPP_NOEXCEPT
+    {
+      uniformAndStorageBuffer16BitAccess = uniformAndStorageBuffer16BitAccess_;
+      return *this;
+    }
+
+    PhysicalDevice16BitStorageFeatures & setStoragePushConstant16( VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16_ ) VULKAN_HPP_NOEXCEPT
+    {
+      storagePushConstant16 = storagePushConstant16_;
+      return *this;
+    }
+
+    PhysicalDevice16BitStorageFeatures & setStorageInputOutput16( VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16_ ) VULKAN_HPP_NOEXCEPT
+    {
+      storageInputOutput16 = storageInputOutput16_;
+      return *this;
+    }
+
+    operator VkPhysicalDevice16BitStorageFeatures const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDevice16BitStorageFeatures*>( this );
+    }
+
+    operator VkPhysicalDevice16BitStorageFeatures &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDevice16BitStorageFeatures*>( this );
+    }
+
+    bool operator==( PhysicalDevice16BitStorageFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( storageBuffer16BitAccess == rhs.storageBuffer16BitAccess )
+          && ( uniformAndStorageBuffer16BitAccess == rhs.uniformAndStorageBuffer16BitAccess )
+          && ( storagePushConstant16 == rhs.storagePushConstant16 )
+          && ( storageInputOutput16 == rhs.storageInputOutput16 );
+    }
+
+    bool operator!=( PhysicalDevice16BitStorageFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevice16BitStorageFeatures;
+    void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess;
+    VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess;
+    VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16;
+    VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16;
+  };
+  static_assert( sizeof( PhysicalDevice16BitStorageFeatures ) == sizeof( VkPhysicalDevice16BitStorageFeatures ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDevice16BitStorageFeatures>::value, "struct wrapper is not a standard layout!" );
+
+  struct PhysicalDevice8BitStorageFeaturesKHR
+  {
+    VULKAN_HPP_CONSTEXPR PhysicalDevice8BitStorageFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess_ = 0,
+                                                               VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess_ = 0,
+                                                               VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : storageBuffer8BitAccess( storageBuffer8BitAccess_ )
+      , uniformAndStorageBuffer8BitAccess( uniformAndStorageBuffer8BitAccess_ )
+      , storagePushConstant8( storagePushConstant8_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PhysicalDevice8BitStorageFeaturesKHR & operator=( VULKAN_HPP_NAMESPACE::PhysicalDevice8BitStorageFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevice8BitStorageFeaturesKHR ) - offsetof( PhysicalDevice8BitStorageFeaturesKHR, pNext ) );
+      return *this;
+    }
+
+    PhysicalDevice8BitStorageFeaturesKHR( VkPhysicalDevice8BitStorageFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PhysicalDevice8BitStorageFeaturesKHR& operator=( VkPhysicalDevice8BitStorageFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevice8BitStorageFeaturesKHR const *>(&rhs);
+      return *this;
+    }
+
+    PhysicalDevice8BitStorageFeaturesKHR & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDevice8BitStorageFeaturesKHR & setStorageBuffer8BitAccess( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess_ ) VULKAN_HPP_NOEXCEPT
+    {
+      storageBuffer8BitAccess = storageBuffer8BitAccess_;
+      return *this;
+    }
+
+    PhysicalDevice8BitStorageFeaturesKHR & setUniformAndStorageBuffer8BitAccess( VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess_ ) VULKAN_HPP_NOEXCEPT
+    {
+      uniformAndStorageBuffer8BitAccess = uniformAndStorageBuffer8BitAccess_;
+      return *this;
+    }
+
+    PhysicalDevice8BitStorageFeaturesKHR & setStoragePushConstant8( VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8_ ) VULKAN_HPP_NOEXCEPT
+    {
+      storagePushConstant8 = storagePushConstant8_;
+      return *this;
+    }
+
+    operator VkPhysicalDevice8BitStorageFeaturesKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDevice8BitStorageFeaturesKHR*>( this );
+    }
+
+    operator VkPhysicalDevice8BitStorageFeaturesKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDevice8BitStorageFeaturesKHR*>( this );
+    }
+
+    bool operator==( PhysicalDevice8BitStorageFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( storageBuffer8BitAccess == rhs.storageBuffer8BitAccess )
+          && ( uniformAndStorageBuffer8BitAccess == rhs.uniformAndStorageBuffer8BitAccess )
+          && ( storagePushConstant8 == rhs.storagePushConstant8 );
+    }
+
+    bool operator!=( PhysicalDevice8BitStorageFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevice8BitStorageFeaturesKHR;
+    void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess;
+    VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess;
+    VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8;
+  };
+  static_assert( sizeof( PhysicalDevice8BitStorageFeaturesKHR ) == sizeof( VkPhysicalDevice8BitStorageFeaturesKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDevice8BitStorageFeaturesKHR>::value, "struct wrapper is not a standard layout!" );
+
+  struct PhysicalDeviceASTCDecodeFeaturesEXT
+  {
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceASTCDecodeFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 decodeModeSharedExponent_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : decodeModeSharedExponent( decodeModeSharedExponent_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceASTCDecodeFeaturesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceASTCDecodeFeaturesEXT ) - offsetof( PhysicalDeviceASTCDecodeFeaturesEXT, pNext ) );
+      return *this;
+    }
+
+    PhysicalDeviceASTCDecodeFeaturesEXT( VkPhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PhysicalDeviceASTCDecodeFeaturesEXT& operator=( VkPhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceASTCDecodeFeaturesEXT const *>(&rhs);
+      return *this;
+    }
+
+    PhysicalDeviceASTCDecodeFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceASTCDecodeFeaturesEXT & setDecodeModeSharedExponent( VULKAN_HPP_NAMESPACE::Bool32 decodeModeSharedExponent_ ) VULKAN_HPP_NOEXCEPT
+    {
+      decodeModeSharedExponent = decodeModeSharedExponent_;
+      return *this;
+    }
+
+    operator VkPhysicalDeviceASTCDecodeFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceASTCDecodeFeaturesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceASTCDecodeFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceASTCDecodeFeaturesEXT*>( this );
+    }
+
+    bool operator==( PhysicalDeviceASTCDecodeFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( decodeModeSharedExponent == rhs.decodeModeSharedExponent );
+    }
+
+    bool operator!=( PhysicalDeviceASTCDecodeFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceAstcDecodeFeaturesEXT;
+    void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::Bool32 decodeModeSharedExponent;
+  };
+  static_assert( sizeof( PhysicalDeviceASTCDecodeFeaturesEXT ) == sizeof( VkPhysicalDeviceASTCDecodeFeaturesEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceASTCDecodeFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+
+  struct PhysicalDeviceBlendOperationAdvancedFeaturesEXT
+  {
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceBlendOperationAdvancedFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCoherentOperations_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : advancedBlendCoherentOperations( advancedBlendCoherentOperations_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceBlendOperationAdvancedFeaturesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceBlendOperationAdvancedFeaturesEXT ) - offsetof( PhysicalDeviceBlendOperationAdvancedFeaturesEXT, pNext ) );
+      return *this;
+    }
+
+    PhysicalDeviceBlendOperationAdvancedFeaturesEXT( VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PhysicalDeviceBlendOperationAdvancedFeaturesEXT& operator=( VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceBlendOperationAdvancedFeaturesEXT const *>(&rhs);
+      return *this;
+    }
+
+    PhysicalDeviceBlendOperationAdvancedFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceBlendOperationAdvancedFeaturesEXT & setAdvancedBlendCoherentOperations( VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCoherentOperations_ ) VULKAN_HPP_NOEXCEPT
+    {
+      advancedBlendCoherentOperations = advancedBlendCoherentOperations_;
+      return *this;
+    }
+
+    operator VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT*>( this );
+    }
+
+    bool operator==( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( advancedBlendCoherentOperations == rhs.advancedBlendCoherentOperations );
+    }
+
+    bool operator!=( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceBlendOperationAdvancedFeaturesEXT;
+    void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCoherentOperations;
+  };
+  static_assert( sizeof( PhysicalDeviceBlendOperationAdvancedFeaturesEXT ) == sizeof( VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceBlendOperationAdvancedFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+
+  struct PhysicalDeviceBlendOperationAdvancedPropertiesEXT
+  {
+    PhysicalDeviceBlendOperationAdvancedPropertiesEXT( uint32_t advancedBlendMaxColorAttachments_ = 0,
+                                                       VULKAN_HPP_NAMESPACE::Bool32 advancedBlendIndependentBlend_ = 0,
+                                                       VULKAN_HPP_NAMESPACE::Bool32 advancedBlendNonPremultipliedSrcColor_ = 0,
+                                                       VULKAN_HPP_NAMESPACE::Bool32 advancedBlendNonPremultipliedDstColor_ = 0,
+                                                       VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCorrelatedOverlap_ = 0,
+                                                       VULKAN_HPP_NAMESPACE::Bool32 advancedBlendAllOperations_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : advancedBlendMaxColorAttachments( advancedBlendMaxColorAttachments_ )
+      , advancedBlendIndependentBlend( advancedBlendIndependentBlend_ )
+      , advancedBlendNonPremultipliedSrcColor( advancedBlendNonPremultipliedSrcColor_ )
+      , advancedBlendNonPremultipliedDstColor( advancedBlendNonPremultipliedDstColor_ )
+      , advancedBlendCorrelatedOverlap( advancedBlendCorrelatedOverlap_ )
+      , advancedBlendAllOperations( advancedBlendAllOperations_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceBlendOperationAdvancedPropertiesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceBlendOperationAdvancedPropertiesEXT ) - offsetof( PhysicalDeviceBlendOperationAdvancedPropertiesEXT, pNext ) );
+      return *this;
+    }
+
+    PhysicalDeviceBlendOperationAdvancedPropertiesEXT( VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PhysicalDeviceBlendOperationAdvancedPropertiesEXT& operator=( VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceBlendOperationAdvancedPropertiesEXT const *>(&rhs);
+      return *this;
+    }
+
+    operator VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT*>( this );
+    }
+
+    bool operator==( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( advancedBlendMaxColorAttachments == rhs.advancedBlendMaxColorAttachments )
+          && ( advancedBlendIndependentBlend == rhs.advancedBlendIndependentBlend )
+          && ( advancedBlendNonPremultipliedSrcColor == rhs.advancedBlendNonPremultipliedSrcColor )
+          && ( advancedBlendNonPremultipliedDstColor == rhs.advancedBlendNonPremultipliedDstColor )
+          && ( advancedBlendCorrelatedOverlap == rhs.advancedBlendCorrelatedOverlap )
+          && ( advancedBlendAllOperations == rhs.advancedBlendAllOperations );
+    }
+
+    bool operator!=( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceBlendOperationAdvancedPropertiesEXT;
+    void* pNext = nullptr;
+    uint32_t advancedBlendMaxColorAttachments;
+    VULKAN_HPP_NAMESPACE::Bool32 advancedBlendIndependentBlend;
+    VULKAN_HPP_NAMESPACE::Bool32 advancedBlendNonPremultipliedSrcColor;
+    VULKAN_HPP_NAMESPACE::Bool32 advancedBlendNonPremultipliedDstColor;
+    VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCorrelatedOverlap;
+    VULKAN_HPP_NAMESPACE::Bool32 advancedBlendAllOperations;
+  };
+  static_assert( sizeof( PhysicalDeviceBlendOperationAdvancedPropertiesEXT ) == sizeof( VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceBlendOperationAdvancedPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
+
+  struct PhysicalDeviceBufferDeviceAddressFeaturesEXT
+  {
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceBufferDeviceAddressFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ = 0,
+                                                                       VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ = 0,
+                                                                       VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : bufferDeviceAddress( bufferDeviceAddress_ )
+      , bufferDeviceAddressCaptureReplay( bufferDeviceAddressCaptureReplay_ )
+      , bufferDeviceAddressMultiDevice( bufferDeviceAddressMultiDevice_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferDeviceAddressFeaturesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferDeviceAddressFeaturesEXT ) - offsetof( PhysicalDeviceBufferDeviceAddressFeaturesEXT, pNext ) );
+      return *this;
+    }
+
+    PhysicalDeviceBufferDeviceAddressFeaturesEXT( VkPhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PhysicalDeviceBufferDeviceAddressFeaturesEXT& operator=( VkPhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferDeviceAddressFeaturesEXT const *>(&rhs);
+      return *this;
+    }
+
+    PhysicalDeviceBufferDeviceAddressFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceBufferDeviceAddressFeaturesEXT & setBufferDeviceAddress( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ ) VULKAN_HPP_NOEXCEPT
+    {
+      bufferDeviceAddress = bufferDeviceAddress_;
+      return *this;
+    }
+
+    PhysicalDeviceBufferDeviceAddressFeaturesEXT & setBufferDeviceAddressCaptureReplay( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ ) VULKAN_HPP_NOEXCEPT
+    {
+      bufferDeviceAddressCaptureReplay = bufferDeviceAddressCaptureReplay_;
+      return *this;
+    }
+
+    PhysicalDeviceBufferDeviceAddressFeaturesEXT & setBufferDeviceAddressMultiDevice( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ ) VULKAN_HPP_NOEXCEPT
+    {
+      bufferDeviceAddressMultiDevice = bufferDeviceAddressMultiDevice_;
+      return *this;
+    }
+
+    operator VkPhysicalDeviceBufferDeviceAddressFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceBufferDeviceAddressFeaturesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceBufferDeviceAddressFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceBufferDeviceAddressFeaturesEXT*>( this );
+    }
+
+    bool operator==( PhysicalDeviceBufferDeviceAddressFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( bufferDeviceAddress == rhs.bufferDeviceAddress )
+          && ( bufferDeviceAddressCaptureReplay == rhs.bufferDeviceAddressCaptureReplay )
+          && ( bufferDeviceAddressMultiDevice == rhs.bufferDeviceAddressMultiDevice );
+    }
+
+    bool operator!=( PhysicalDeviceBufferDeviceAddressFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceBufferDeviceAddressFeaturesEXT;
+    void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress;
+    VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay;
+    VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice;
+  };
+  static_assert( sizeof( PhysicalDeviceBufferDeviceAddressFeaturesEXT ) == sizeof( VkPhysicalDeviceBufferDeviceAddressFeaturesEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceBufferDeviceAddressFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+
+  struct PhysicalDeviceBufferDeviceAddressFeaturesKHR
+  {
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceBufferDeviceAddressFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ = 0,
+                                                                       VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ = 0,
+                                                                       VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : bufferDeviceAddress( bufferDeviceAddress_ )
+      , bufferDeviceAddressCaptureReplay( bufferDeviceAddressCaptureReplay_ )
+      , bufferDeviceAddressMultiDevice( bufferDeviceAddressMultiDevice_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferDeviceAddressFeaturesKHR & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferDeviceAddressFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferDeviceAddressFeaturesKHR ) - offsetof( PhysicalDeviceBufferDeviceAddressFeaturesKHR, pNext ) );
+      return *this;
+    }
+
+    PhysicalDeviceBufferDeviceAddressFeaturesKHR( VkPhysicalDeviceBufferDeviceAddressFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PhysicalDeviceBufferDeviceAddressFeaturesKHR& operator=( VkPhysicalDeviceBufferDeviceAddressFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferDeviceAddressFeaturesKHR const *>(&rhs);
+      return *this;
+    }
+
+    PhysicalDeviceBufferDeviceAddressFeaturesKHR & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceBufferDeviceAddressFeaturesKHR & setBufferDeviceAddress( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ ) VULKAN_HPP_NOEXCEPT
+    {
+      bufferDeviceAddress = bufferDeviceAddress_;
+      return *this;
+    }
+
+    PhysicalDeviceBufferDeviceAddressFeaturesKHR & setBufferDeviceAddressCaptureReplay( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ ) VULKAN_HPP_NOEXCEPT
+    {
+      bufferDeviceAddressCaptureReplay = bufferDeviceAddressCaptureReplay_;
+      return *this;
+    }
+
+    PhysicalDeviceBufferDeviceAddressFeaturesKHR & setBufferDeviceAddressMultiDevice( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ ) VULKAN_HPP_NOEXCEPT
+    {
+      bufferDeviceAddressMultiDevice = bufferDeviceAddressMultiDevice_;
+      return *this;
+    }
+
+    operator VkPhysicalDeviceBufferDeviceAddressFeaturesKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceBufferDeviceAddressFeaturesKHR*>( this );
+    }
+
+    operator VkPhysicalDeviceBufferDeviceAddressFeaturesKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceBufferDeviceAddressFeaturesKHR*>( this );
+    }
+
+    bool operator==( PhysicalDeviceBufferDeviceAddressFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( bufferDeviceAddress == rhs.bufferDeviceAddress )
+          && ( bufferDeviceAddressCaptureReplay == rhs.bufferDeviceAddressCaptureReplay )
+          && ( bufferDeviceAddressMultiDevice == rhs.bufferDeviceAddressMultiDevice );
+    }
+
+    bool operator!=( PhysicalDeviceBufferDeviceAddressFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceBufferDeviceAddressFeaturesKHR;
+    void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress;
+    VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay;
+    VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice;
+  };
+  static_assert( sizeof( PhysicalDeviceBufferDeviceAddressFeaturesKHR ) == sizeof( VkPhysicalDeviceBufferDeviceAddressFeaturesKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceBufferDeviceAddressFeaturesKHR>::value, "struct wrapper is not a standard layout!" );
+
+  struct PhysicalDeviceCoherentMemoryFeaturesAMD
+  {
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceCoherentMemoryFeaturesAMD( VULKAN_HPP_NAMESPACE::Bool32 deviceCoherentMemory_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : deviceCoherentMemory( deviceCoherentMemory_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceCoherentMemoryFeaturesAMD & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCoherentMemoryFeaturesAMD ) - offsetof( PhysicalDeviceCoherentMemoryFeaturesAMD, pNext ) );
+      return *this;
+    }
+
+    PhysicalDeviceCoherentMemoryFeaturesAMD( VkPhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PhysicalDeviceCoherentMemoryFeaturesAMD& operator=( VkPhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceCoherentMemoryFeaturesAMD const *>(&rhs);
+      return *this;
+    }
+
+    PhysicalDeviceCoherentMemoryFeaturesAMD & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceCoherentMemoryFeaturesAMD & setDeviceCoherentMemory( VULKAN_HPP_NAMESPACE::Bool32 deviceCoherentMemory_ ) VULKAN_HPP_NOEXCEPT
+    {
+      deviceCoherentMemory = deviceCoherentMemory_;
+      return *this;
+    }
+
+    operator VkPhysicalDeviceCoherentMemoryFeaturesAMD const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceCoherentMemoryFeaturesAMD*>( this );
+    }
+
+    operator VkPhysicalDeviceCoherentMemoryFeaturesAMD &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceCoherentMemoryFeaturesAMD*>( this );
+    }
+
+    bool operator==( PhysicalDeviceCoherentMemoryFeaturesAMD const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( deviceCoherentMemory == rhs.deviceCoherentMemory );
+    }
+
+    bool operator!=( PhysicalDeviceCoherentMemoryFeaturesAMD const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCoherentMemoryFeaturesAMD;
+    void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::Bool32 deviceCoherentMemory;
+  };
+  static_assert( sizeof( PhysicalDeviceCoherentMemoryFeaturesAMD ) == sizeof( VkPhysicalDeviceCoherentMemoryFeaturesAMD ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceCoherentMemoryFeaturesAMD>::value, "struct wrapper is not a standard layout!" );
+
+  struct PhysicalDeviceComputeShaderDerivativesFeaturesNV
+  {
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceComputeShaderDerivativesFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupQuads_ = 0,
+                                                                           VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupLinear_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : computeDerivativeGroupQuads( computeDerivativeGroupQuads_ )
+      , computeDerivativeGroupLinear( computeDerivativeGroupLinear_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceComputeShaderDerivativesFeaturesNV & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceComputeShaderDerivativesFeaturesNV ) - offsetof( PhysicalDeviceComputeShaderDerivativesFeaturesNV, pNext ) );
+      return *this;
+    }
+
+    PhysicalDeviceComputeShaderDerivativesFeaturesNV( VkPhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PhysicalDeviceComputeShaderDerivativesFeaturesNV& operator=( VkPhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceComputeShaderDerivativesFeaturesNV const *>(&rhs);
+      return *this;
+    }
+
+    PhysicalDeviceComputeShaderDerivativesFeaturesNV & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceComputeShaderDerivativesFeaturesNV & setComputeDerivativeGroupQuads( VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupQuads_ ) VULKAN_HPP_NOEXCEPT
+    {
+      computeDerivativeGroupQuads = computeDerivativeGroupQuads_;
+      return *this;
+    }
+
+    PhysicalDeviceComputeShaderDerivativesFeaturesNV & setComputeDerivativeGroupLinear( VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupLinear_ ) VULKAN_HPP_NOEXCEPT
+    {
+      computeDerivativeGroupLinear = computeDerivativeGroupLinear_;
+      return *this;
+    }
+
+    operator VkPhysicalDeviceComputeShaderDerivativesFeaturesNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceComputeShaderDerivativesFeaturesNV*>( this );
+    }
+
+    operator VkPhysicalDeviceComputeShaderDerivativesFeaturesNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceComputeShaderDerivativesFeaturesNV*>( this );
+    }
+
+    bool operator==( PhysicalDeviceComputeShaderDerivativesFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( computeDerivativeGroupQuads == rhs.computeDerivativeGroupQuads )
+          && ( computeDerivativeGroupLinear == rhs.computeDerivativeGroupLinear );
+    }
+
+    bool operator!=( PhysicalDeviceComputeShaderDerivativesFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceComputeShaderDerivativesFeaturesNV;
+    void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupQuads;
+    VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupLinear;
+  };
+  static_assert( sizeof( PhysicalDeviceComputeShaderDerivativesFeaturesNV ) == sizeof( VkPhysicalDeviceComputeShaderDerivativesFeaturesNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceComputeShaderDerivativesFeaturesNV>::value, "struct wrapper is not a standard layout!" );
+
+  struct PhysicalDeviceConditionalRenderingFeaturesEXT
+  {
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceConditionalRenderingFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 conditionalRendering_ = 0,
+                                                                        VULKAN_HPP_NAMESPACE::Bool32 inheritedConditionalRendering_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : conditionalRendering( conditionalRendering_ )
+      , inheritedConditionalRendering( inheritedConditionalRendering_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceConditionalRenderingFeaturesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceConditionalRenderingFeaturesEXT ) - offsetof( PhysicalDeviceConditionalRenderingFeaturesEXT, pNext ) );
+      return *this;
+    }
+
+    PhysicalDeviceConditionalRenderingFeaturesEXT( VkPhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PhysicalDeviceConditionalRenderingFeaturesEXT& operator=( VkPhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceConditionalRenderingFeaturesEXT const *>(&rhs);
+      return *this;
+    }
+
+    PhysicalDeviceConditionalRenderingFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceConditionalRenderingFeaturesEXT & setConditionalRendering( VULKAN_HPP_NAMESPACE::Bool32 conditionalRendering_ ) VULKAN_HPP_NOEXCEPT
+    {
+      conditionalRendering = conditionalRendering_;
+      return *this;
+    }
+
+    PhysicalDeviceConditionalRenderingFeaturesEXT & setInheritedConditionalRendering( VULKAN_HPP_NAMESPACE::Bool32 inheritedConditionalRendering_ ) VULKAN_HPP_NOEXCEPT
+    {
+      inheritedConditionalRendering = inheritedConditionalRendering_;
+      return *this;
+    }
+
+    operator VkPhysicalDeviceConditionalRenderingFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceConditionalRenderingFeaturesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceConditionalRenderingFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceConditionalRenderingFeaturesEXT*>( this );
+    }
+
+    bool operator==( PhysicalDeviceConditionalRenderingFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( conditionalRendering == rhs.conditionalRendering )
+          && ( inheritedConditionalRendering == rhs.inheritedConditionalRendering );
+    }
+
+    bool operator!=( PhysicalDeviceConditionalRenderingFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceConditionalRenderingFeaturesEXT;
+    void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::Bool32 conditionalRendering;
+    VULKAN_HPP_NAMESPACE::Bool32 inheritedConditionalRendering;
+  };
+  static_assert( sizeof( PhysicalDeviceConditionalRenderingFeaturesEXT ) == sizeof( VkPhysicalDeviceConditionalRenderingFeaturesEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceConditionalRenderingFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+
+  struct PhysicalDeviceConservativeRasterizationPropertiesEXT
+  {
+    PhysicalDeviceConservativeRasterizationPropertiesEXT( float primitiveOverestimationSize_ = 0,
+                                                          float maxExtraPrimitiveOverestimationSize_ = 0,
+                                                          float extraPrimitiveOverestimationSizeGranularity_ = 0,
+                                                          VULKAN_HPP_NAMESPACE::Bool32 primitiveUnderestimation_ = 0,
+                                                          VULKAN_HPP_NAMESPACE::Bool32 conservativePointAndLineRasterization_ = 0,
+                                                          VULKAN_HPP_NAMESPACE::Bool32 degenerateTrianglesRasterized_ = 0,
+                                                          VULKAN_HPP_NAMESPACE::Bool32 degenerateLinesRasterized_ = 0,
+                                                          VULKAN_HPP_NAMESPACE::Bool32 fullyCoveredFragmentShaderInputVariable_ = 0,
+                                                          VULKAN_HPP_NAMESPACE::Bool32 conservativeRasterizationPostDepthCoverage_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : primitiveOverestimationSize( primitiveOverestimationSize_ )
+      , maxExtraPrimitiveOverestimationSize( maxExtraPrimitiveOverestimationSize_ )
+      , extraPrimitiveOverestimationSizeGranularity( extraPrimitiveOverestimationSizeGranularity_ )
+      , primitiveUnderestimation( primitiveUnderestimation_ )
+      , conservativePointAndLineRasterization( conservativePointAndLineRasterization_ )
+      , degenerateTrianglesRasterized( degenerateTrianglesRasterized_ )
+      , degenerateLinesRasterized( degenerateLinesRasterized_ )
+      , fullyCoveredFragmentShaderInputVariable( fullyCoveredFragmentShaderInputVariable_ )
+      , conservativeRasterizationPostDepthCoverage( conservativeRasterizationPostDepthCoverage_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceConservativeRasterizationPropertiesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceConservativeRasterizationPropertiesEXT ) - offsetof( PhysicalDeviceConservativeRasterizationPropertiesEXT, pNext ) );
+      return *this;
+    }
+
+    PhysicalDeviceConservativeRasterizationPropertiesEXT( VkPhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PhysicalDeviceConservativeRasterizationPropertiesEXT& operator=( VkPhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceConservativeRasterizationPropertiesEXT const *>(&rhs);
+      return *this;
+    }
+
+    operator VkPhysicalDeviceConservativeRasterizationPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceConservativeRasterizationPropertiesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceConservativeRasterizationPropertiesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceConservativeRasterizationPropertiesEXT*>( this );
+    }
+
+    bool operator==( PhysicalDeviceConservativeRasterizationPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( primitiveOverestimationSize == rhs.primitiveOverestimationSize )
+          && ( maxExtraPrimitiveOverestimationSize == rhs.maxExtraPrimitiveOverestimationSize )
+          && ( extraPrimitiveOverestimationSizeGranularity == rhs.extraPrimitiveOverestimationSizeGranularity )
+          && ( primitiveUnderestimation == rhs.primitiveUnderestimation )
+          && ( conservativePointAndLineRasterization == rhs.conservativePointAndLineRasterization )
+          && ( degenerateTrianglesRasterized == rhs.degenerateTrianglesRasterized )
+          && ( degenerateLinesRasterized == rhs.degenerateLinesRasterized )
+          && ( fullyCoveredFragmentShaderInputVariable == rhs.fullyCoveredFragmentShaderInputVariable )
+          && ( conservativeRasterizationPostDepthCoverage == rhs.conservativeRasterizationPostDepthCoverage );
+    }
+
+    bool operator!=( PhysicalDeviceConservativeRasterizationPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceConservativeRasterizationPropertiesEXT;
+    void* pNext = nullptr;
+    float primitiveOverestimationSize;
+    float maxExtraPrimitiveOverestimationSize;
+    float extraPrimitiveOverestimationSizeGranularity;
+    VULKAN_HPP_NAMESPACE::Bool32 primitiveUnderestimation;
+    VULKAN_HPP_NAMESPACE::Bool32 conservativePointAndLineRasterization;
+    VULKAN_HPP_NAMESPACE::Bool32 degenerateTrianglesRasterized;
+    VULKAN_HPP_NAMESPACE::Bool32 degenerateLinesRasterized;
+    VULKAN_HPP_NAMESPACE::Bool32 fullyCoveredFragmentShaderInputVariable;
+    VULKAN_HPP_NAMESPACE::Bool32 conservativeRasterizationPostDepthCoverage;
+  };
+  static_assert( sizeof( PhysicalDeviceConservativeRasterizationPropertiesEXT ) == sizeof( VkPhysicalDeviceConservativeRasterizationPropertiesEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceConservativeRasterizationPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
+
+  struct PhysicalDeviceCooperativeMatrixFeaturesNV
+  {
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrix_ = 0,
+                                                                    VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixRobustBufferAccess_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : cooperativeMatrix( cooperativeMatrix_ )
+      , cooperativeMatrixRobustBufferAccess( cooperativeMatrixRobustBufferAccess_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixFeaturesNV & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixFeaturesNV ) - offsetof( PhysicalDeviceCooperativeMatrixFeaturesNV, pNext ) );
+      return *this;
+    }
+
+    PhysicalDeviceCooperativeMatrixFeaturesNV( VkPhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PhysicalDeviceCooperativeMatrixFeaturesNV& operator=( VkPhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixFeaturesNV const *>(&rhs);
+      return *this;
+    }
+
+    PhysicalDeviceCooperativeMatrixFeaturesNV & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceCooperativeMatrixFeaturesNV & setCooperativeMatrix( VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrix_ ) VULKAN_HPP_NOEXCEPT
+    {
+      cooperativeMatrix = cooperativeMatrix_;
+      return *this;
+    }
+
+    PhysicalDeviceCooperativeMatrixFeaturesNV & setCooperativeMatrixRobustBufferAccess( VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixRobustBufferAccess_ ) VULKAN_HPP_NOEXCEPT
+    {
+      cooperativeMatrixRobustBufferAccess = cooperativeMatrixRobustBufferAccess_;
+      return *this;
+    }
+
+    operator VkPhysicalDeviceCooperativeMatrixFeaturesNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceCooperativeMatrixFeaturesNV*>( this );
+    }
+
+    operator VkPhysicalDeviceCooperativeMatrixFeaturesNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceCooperativeMatrixFeaturesNV*>( this );
+    }
+
+    bool operator==( PhysicalDeviceCooperativeMatrixFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( cooperativeMatrix == rhs.cooperativeMatrix )
+          && ( cooperativeMatrixRobustBufferAccess == rhs.cooperativeMatrixRobustBufferAccess );
+    }
+
+    bool operator!=( PhysicalDeviceCooperativeMatrixFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCooperativeMatrixFeaturesNV;
+    void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrix;
+    VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixRobustBufferAccess;
+  };
+  static_assert( sizeof( PhysicalDeviceCooperativeMatrixFeaturesNV ) == sizeof( VkPhysicalDeviceCooperativeMatrixFeaturesNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceCooperativeMatrixFeaturesNV>::value, "struct wrapper is not a standard layout!" );
+
+  struct PhysicalDeviceCooperativeMatrixPropertiesNV
+  {
+    PhysicalDeviceCooperativeMatrixPropertiesNV( VULKAN_HPP_NAMESPACE::ShaderStageFlags cooperativeMatrixSupportedStages_ = VULKAN_HPP_NAMESPACE::ShaderStageFlags() ) VULKAN_HPP_NOEXCEPT
+      : cooperativeMatrixSupportedStages( cooperativeMatrixSupportedStages_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixPropertiesNV & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixPropertiesNV ) - offsetof( PhysicalDeviceCooperativeMatrixPropertiesNV, pNext ) );
+      return *this;
+    }
+
+    PhysicalDeviceCooperativeMatrixPropertiesNV( VkPhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PhysicalDeviceCooperativeMatrixPropertiesNV& operator=( VkPhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixPropertiesNV const *>(&rhs);
+      return *this;
+    }
+
+    operator VkPhysicalDeviceCooperativeMatrixPropertiesNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceCooperativeMatrixPropertiesNV*>( this );
+    }
+
+    operator VkPhysicalDeviceCooperativeMatrixPropertiesNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceCooperativeMatrixPropertiesNV*>( this );
+    }
+
+    bool operator==( PhysicalDeviceCooperativeMatrixPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( cooperativeMatrixSupportedStages == rhs.cooperativeMatrixSupportedStages );
+    }
+
+    bool operator!=( PhysicalDeviceCooperativeMatrixPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCooperativeMatrixPropertiesNV;
+    void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::ShaderStageFlags cooperativeMatrixSupportedStages;
+  };
+  static_assert( sizeof( PhysicalDeviceCooperativeMatrixPropertiesNV ) == sizeof( VkPhysicalDeviceCooperativeMatrixPropertiesNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceCooperativeMatrixPropertiesNV>::value, "struct wrapper is not a standard layout!" );
+
+  struct PhysicalDeviceCornerSampledImageFeaturesNV
+  {
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceCornerSampledImageFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 cornerSampledImage_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : cornerSampledImage( cornerSampledImage_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceCornerSampledImageFeaturesNV & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCornerSampledImageFeaturesNV ) - offsetof( PhysicalDeviceCornerSampledImageFeaturesNV, pNext ) );
+      return *this;
+    }
+
+    PhysicalDeviceCornerSampledImageFeaturesNV( VkPhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PhysicalDeviceCornerSampledImageFeaturesNV& operator=( VkPhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceCornerSampledImageFeaturesNV const *>(&rhs);
+      return *this;
+    }
+
+    PhysicalDeviceCornerSampledImageFeaturesNV & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceCornerSampledImageFeaturesNV & setCornerSampledImage( VULKAN_HPP_NAMESPACE::Bool32 cornerSampledImage_ ) VULKAN_HPP_NOEXCEPT
+    {
+      cornerSampledImage = cornerSampledImage_;
+      return *this;
+    }
+
+    operator VkPhysicalDeviceCornerSampledImageFeaturesNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceCornerSampledImageFeaturesNV*>( this );
+    }
+
+    operator VkPhysicalDeviceCornerSampledImageFeaturesNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceCornerSampledImageFeaturesNV*>( this );
+    }
+
+    bool operator==( PhysicalDeviceCornerSampledImageFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( cornerSampledImage == rhs.cornerSampledImage );
+    }
+
+    bool operator!=( PhysicalDeviceCornerSampledImageFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCornerSampledImageFeaturesNV;
+    void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::Bool32 cornerSampledImage;
+  };
+  static_assert( sizeof( PhysicalDeviceCornerSampledImageFeaturesNV ) == sizeof( VkPhysicalDeviceCornerSampledImageFeaturesNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceCornerSampledImageFeaturesNV>::value, "struct wrapper is not a standard layout!" );
+
+  struct PhysicalDeviceCoverageReductionModeFeaturesNV
+  {
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceCoverageReductionModeFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 coverageReductionMode_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : coverageReductionMode( coverageReductionMode_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceCoverageReductionModeFeaturesNV & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCoverageReductionModeFeaturesNV ) - offsetof( PhysicalDeviceCoverageReductionModeFeaturesNV, pNext ) );
+      return *this;
+    }
+
+    PhysicalDeviceCoverageReductionModeFeaturesNV( VkPhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PhysicalDeviceCoverageReductionModeFeaturesNV& operator=( VkPhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceCoverageReductionModeFeaturesNV const *>(&rhs);
+      return *this;
+    }
+
+    PhysicalDeviceCoverageReductionModeFeaturesNV & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceCoverageReductionModeFeaturesNV & setCoverageReductionMode( VULKAN_HPP_NAMESPACE::Bool32 coverageReductionMode_ ) VULKAN_HPP_NOEXCEPT
+    {
+      coverageReductionMode = coverageReductionMode_;
+      return *this;
+    }
+
+    operator VkPhysicalDeviceCoverageReductionModeFeaturesNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceCoverageReductionModeFeaturesNV*>( this );
+    }
+
+    operator VkPhysicalDeviceCoverageReductionModeFeaturesNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceCoverageReductionModeFeaturesNV*>( this );
+    }
+
+    bool operator==( PhysicalDeviceCoverageReductionModeFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( coverageReductionMode == rhs.coverageReductionMode );
+    }
+
+    bool operator!=( PhysicalDeviceCoverageReductionModeFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCoverageReductionModeFeaturesNV;
+    void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::Bool32 coverageReductionMode;
+  };
+  static_assert( sizeof( PhysicalDeviceCoverageReductionModeFeaturesNV ) == sizeof( VkPhysicalDeviceCoverageReductionModeFeaturesNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceCoverageReductionModeFeaturesNV>::value, "struct wrapper is not a standard layout!" );
+
+  struct PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV
+  {
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocationImageAliasing_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : dedicatedAllocationImageAliasing( dedicatedAllocationImageAliasing_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV ) - offsetof( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV, pNext ) );
+      return *this;
+    }
+
+    PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV( VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV& operator=( VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const *>(&rhs);
+      return *this;
+    }
+
+    PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV & setDedicatedAllocationImageAliasing( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocationImageAliasing_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dedicatedAllocationImageAliasing = dedicatedAllocationImageAliasing_;
+      return *this;
+    }
+
+    operator VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV*>( this );
+    }
+
+    operator VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV*>( this );
+    }
+
+    bool operator==( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( dedicatedAllocationImageAliasing == rhs.dedicatedAllocationImageAliasing );
+    }
+
+    bool operator!=( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV;
+    void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocationImageAliasing;
+  };
+  static_assert( sizeof( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV ) == sizeof( VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV>::value, "struct wrapper is not a standard layout!" );
+
+  struct PhysicalDeviceDepthClipEnableFeaturesEXT
+  {
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthClipEnableFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : depthClipEnable( depthClipEnable_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClipEnableFeaturesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClipEnableFeaturesEXT ) - offsetof( PhysicalDeviceDepthClipEnableFeaturesEXT, pNext ) );
+      return *this;
+    }
+
+    PhysicalDeviceDepthClipEnableFeaturesEXT( VkPhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PhysicalDeviceDepthClipEnableFeaturesEXT& operator=( VkPhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClipEnableFeaturesEXT const *>(&rhs);
+      return *this;
+    }
+
+    PhysicalDeviceDepthClipEnableFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceDepthClipEnableFeaturesEXT & setDepthClipEnable( VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable_ ) VULKAN_HPP_NOEXCEPT
+    {
+      depthClipEnable = depthClipEnable_;
+      return *this;
+    }
+
+    operator VkPhysicalDeviceDepthClipEnableFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceDepthClipEnableFeaturesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceDepthClipEnableFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceDepthClipEnableFeaturesEXT*>( this );
+    }
+
+    bool operator==( PhysicalDeviceDepthClipEnableFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( depthClipEnable == rhs.depthClipEnable );
+    }
+
+    bool operator!=( PhysicalDeviceDepthClipEnableFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDepthClipEnableFeaturesEXT;
+    void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable;
+  };
+  static_assert( sizeof( PhysicalDeviceDepthClipEnableFeaturesEXT ) == sizeof( VkPhysicalDeviceDepthClipEnableFeaturesEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceDepthClipEnableFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+
+  struct PhysicalDeviceDepthStencilResolvePropertiesKHR
+  {
+    PhysicalDeviceDepthStencilResolvePropertiesKHR( VULKAN_HPP_NAMESPACE::ResolveModeFlagsKHR supportedDepthResolveModes_ = VULKAN_HPP_NAMESPACE::ResolveModeFlagsKHR(),
+                                                    VULKAN_HPP_NAMESPACE::ResolveModeFlagsKHR supportedStencilResolveModes_ = VULKAN_HPP_NAMESPACE::ResolveModeFlagsKHR(),
+                                                    VULKAN_HPP_NAMESPACE::Bool32 independentResolveNone_ = 0,
+                                                    VULKAN_HPP_NAMESPACE::Bool32 independentResolve_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : supportedDepthResolveModes( supportedDepthResolveModes_ )
+      , supportedStencilResolveModes( supportedStencilResolveModes_ )
+      , independentResolveNone( independentResolveNone_ )
+      , independentResolve( independentResolve_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthStencilResolvePropertiesKHR & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthStencilResolvePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthStencilResolvePropertiesKHR ) - offsetof( PhysicalDeviceDepthStencilResolvePropertiesKHR, pNext ) );
+      return *this;
+    }
+
+    PhysicalDeviceDepthStencilResolvePropertiesKHR( VkPhysicalDeviceDepthStencilResolvePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PhysicalDeviceDepthStencilResolvePropertiesKHR& operator=( VkPhysicalDeviceDepthStencilResolvePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthStencilResolvePropertiesKHR const *>(&rhs);
+      return *this;
+    }
+
+    operator VkPhysicalDeviceDepthStencilResolvePropertiesKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceDepthStencilResolvePropertiesKHR*>( this );
+    }
+
+    operator VkPhysicalDeviceDepthStencilResolvePropertiesKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceDepthStencilResolvePropertiesKHR*>( this );
+    }
+
+    bool operator==( PhysicalDeviceDepthStencilResolvePropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( supportedDepthResolveModes == rhs.supportedDepthResolveModes )
+          && ( supportedStencilResolveModes == rhs.supportedStencilResolveModes )
+          && ( independentResolveNone == rhs.independentResolveNone )
+          && ( independentResolve == rhs.independentResolve );
+    }
+
+    bool operator!=( PhysicalDeviceDepthStencilResolvePropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDepthStencilResolvePropertiesKHR;
+    void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::ResolveModeFlagsKHR supportedDepthResolveModes;
+    VULKAN_HPP_NAMESPACE::ResolveModeFlagsKHR supportedStencilResolveModes;
+    VULKAN_HPP_NAMESPACE::Bool32 independentResolveNone;
+    VULKAN_HPP_NAMESPACE::Bool32 independentResolve;
+  };
+  static_assert( sizeof( PhysicalDeviceDepthStencilResolvePropertiesKHR ) == sizeof( VkPhysicalDeviceDepthStencilResolvePropertiesKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceDepthStencilResolvePropertiesKHR>::value, "struct wrapper is not a standard layout!" );
+
+  struct PhysicalDeviceDescriptorIndexingFeaturesEXT
+  {
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorIndexingFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing_ = 0,
+                                                                      VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing_ = 0,
+                                                                      VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing_ = 0,
+                                                                      VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing_ = 0,
+                                                                      VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing_ = 0,
+                                                                      VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing_ = 0,
+                                                                      VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing_ = 0,
+                                                                      VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing_ = 0,
+                                                                      VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_ = 0,
+                                                                      VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_ = 0,
+                                                                      VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind_ = 0,
+                                                                      VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind_ = 0,
+                                                                      VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind_ = 0,
+                                                                      VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind_ = 0,
+                                                                      VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_ = 0,
+                                                                      VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_ = 0,
+                                                                      VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending_ = 0,
+                                                                      VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound_ = 0,
+                                                                      VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount_ = 0,
+                                                                      VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : shaderInputAttachmentArrayDynamicIndexing( shaderInputAttachmentArrayDynamicIndexing_ )
+      , shaderUniformTexelBufferArrayDynamicIndexing( shaderUniformTexelBufferArrayDynamicIndexing_ )
+      , shaderStorageTexelBufferArrayDynamicIndexing( shaderStorageTexelBufferArrayDynamicIndexing_ )
+      , shaderUniformBufferArrayNonUniformIndexing( shaderUniformBufferArrayNonUniformIndexing_ )
+      , shaderSampledImageArrayNonUniformIndexing( shaderSampledImageArrayNonUniformIndexing_ )
+      , shaderStorageBufferArrayNonUniformIndexing( shaderStorageBufferArrayNonUniformIndexing_ )
+      , shaderStorageImageArrayNonUniformIndexing( shaderStorageImageArrayNonUniformIndexing_ )
+      , shaderInputAttachmentArrayNonUniformIndexing( shaderInputAttachmentArrayNonUniformIndexing_ )
+      , shaderUniformTexelBufferArrayNonUniformIndexing( shaderUniformTexelBufferArrayNonUniformIndexing_ )
+      , shaderStorageTexelBufferArrayNonUniformIndexing( shaderStorageTexelBufferArrayNonUniformIndexing_ )
+      , descriptorBindingUniformBufferUpdateAfterBind( descriptorBindingUniformBufferUpdateAfterBind_ )
+      , descriptorBindingSampledImageUpdateAfterBind( descriptorBindingSampledImageUpdateAfterBind_ )
+      , descriptorBindingStorageImageUpdateAfterBind( descriptorBindingStorageImageUpdateAfterBind_ )
+      , descriptorBindingStorageBufferUpdateAfterBind( descriptorBindingStorageBufferUpdateAfterBind_ )
+      , descriptorBindingUniformTexelBufferUpdateAfterBind( descriptorBindingUniformTexelBufferUpdateAfterBind_ )
+      , descriptorBindingStorageTexelBufferUpdateAfterBind( descriptorBindingStorageTexelBufferUpdateAfterBind_ )
+      , descriptorBindingUpdateUnusedWhilePending( descriptorBindingUpdateUnusedWhilePending_ )
+      , descriptorBindingPartiallyBound( descriptorBindingPartiallyBound_ )
+      , descriptorBindingVariableDescriptorCount( descriptorBindingVariableDescriptorCount_ )
+      , runtimeDescriptorArray( runtimeDescriptorArray_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingFeaturesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingFeaturesEXT ) - offsetof( PhysicalDeviceDescriptorIndexingFeaturesEXT, pNext ) );
+      return *this;
+    }
+
+    PhysicalDeviceDescriptorIndexingFeaturesEXT( VkPhysicalDeviceDescriptorIndexingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PhysicalDeviceDescriptorIndexingFeaturesEXT& operator=( VkPhysicalDeviceDescriptorIndexingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingFeaturesEXT const *>(&rhs);
+      return *this;
+    }
+
+    PhysicalDeviceDescriptorIndexingFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceDescriptorIndexingFeaturesEXT & setShaderInputAttachmentArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderInputAttachmentArrayDynamicIndexing = shaderInputAttachmentArrayDynamicIndexing_;
+      return *this;
+    }
+
+    PhysicalDeviceDescriptorIndexingFeaturesEXT & setShaderUniformTexelBufferArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderUniformTexelBufferArrayDynamicIndexing = shaderUniformTexelBufferArrayDynamicIndexing_;
+      return *this;
+    }
+
+    PhysicalDeviceDescriptorIndexingFeaturesEXT & setShaderStorageTexelBufferArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderStorageTexelBufferArrayDynamicIndexing = shaderStorageTexelBufferArrayDynamicIndexing_;
+      return *this;
+    }
+
+    PhysicalDeviceDescriptorIndexingFeaturesEXT & setShaderUniformBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderUniformBufferArrayNonUniformIndexing = shaderUniformBufferArrayNonUniformIndexing_;
+      return *this;
+    }
+
+    PhysicalDeviceDescriptorIndexingFeaturesEXT & setShaderSampledImageArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderSampledImageArrayNonUniformIndexing = shaderSampledImageArrayNonUniformIndexing_;
+      return *this;
+    }
+
+    PhysicalDeviceDescriptorIndexingFeaturesEXT & setShaderStorageBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderStorageBufferArrayNonUniformIndexing = shaderStorageBufferArrayNonUniformIndexing_;
+      return *this;
+    }
+
+    PhysicalDeviceDescriptorIndexingFeaturesEXT & setShaderStorageImageArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderStorageImageArrayNonUniformIndexing = shaderStorageImageArrayNonUniformIndexing_;
+      return *this;
+    }
+
+    PhysicalDeviceDescriptorIndexingFeaturesEXT & setShaderInputAttachmentArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderInputAttachmentArrayNonUniformIndexing = shaderInputAttachmentArrayNonUniformIndexing_;
+      return *this;
+    }
+
+    PhysicalDeviceDescriptorIndexingFeaturesEXT & setShaderUniformTexelBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderUniformTexelBufferArrayNonUniformIndexing = shaderUniformTexelBufferArrayNonUniformIndexing_;
+      return *this;
+    }
+
+    PhysicalDeviceDescriptorIndexingFeaturesEXT & setShaderStorageTexelBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderStorageTexelBufferArrayNonUniformIndexing = shaderStorageTexelBufferArrayNonUniformIndexing_;
+      return *this;
+    }
+
+    PhysicalDeviceDescriptorIndexingFeaturesEXT & setDescriptorBindingUniformBufferUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorBindingUniformBufferUpdateAfterBind = descriptorBindingUniformBufferUpdateAfterBind_;
+      return *this;
+    }
+
+    PhysicalDeviceDescriptorIndexingFeaturesEXT & setDescriptorBindingSampledImageUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorBindingSampledImageUpdateAfterBind = descriptorBindingSampledImageUpdateAfterBind_;
+      return *this;
+    }
+
+    PhysicalDeviceDescriptorIndexingFeaturesEXT & setDescriptorBindingStorageImageUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorBindingStorageImageUpdateAfterBind = descriptorBindingStorageImageUpdateAfterBind_;
+      return *this;
+    }
+
+    PhysicalDeviceDescriptorIndexingFeaturesEXT & setDescriptorBindingStorageBufferUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorBindingStorageBufferUpdateAfterBind = descriptorBindingStorageBufferUpdateAfterBind_;
+      return *this;
+    }
+
+    PhysicalDeviceDescriptorIndexingFeaturesEXT & setDescriptorBindingUniformTexelBufferUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorBindingUniformTexelBufferUpdateAfterBind = descriptorBindingUniformTexelBufferUpdateAfterBind_;
+      return *this;
+    }
+
+    PhysicalDeviceDescriptorIndexingFeaturesEXT & setDescriptorBindingStorageTexelBufferUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorBindingStorageTexelBufferUpdateAfterBind = descriptorBindingStorageTexelBufferUpdateAfterBind_;
+      return *this;
+    }
+
+    PhysicalDeviceDescriptorIndexingFeaturesEXT & setDescriptorBindingUpdateUnusedWhilePending( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorBindingUpdateUnusedWhilePending = descriptorBindingUpdateUnusedWhilePending_;
+      return *this;
+    }
+
+    PhysicalDeviceDescriptorIndexingFeaturesEXT & setDescriptorBindingPartiallyBound( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorBindingPartiallyBound = descriptorBindingPartiallyBound_;
+      return *this;
+    }
+
+    PhysicalDeviceDescriptorIndexingFeaturesEXT & setDescriptorBindingVariableDescriptorCount( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorBindingVariableDescriptorCount = descriptorBindingVariableDescriptorCount_;
+      return *this;
+    }
+
+    PhysicalDeviceDescriptorIndexingFeaturesEXT & setRuntimeDescriptorArray( VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray_ ) VULKAN_HPP_NOEXCEPT
+    {
+      runtimeDescriptorArray = runtimeDescriptorArray_;
+      return *this;
+    }
+
+    operator VkPhysicalDeviceDescriptorIndexingFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceDescriptorIndexingFeaturesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceDescriptorIndexingFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceDescriptorIndexingFeaturesEXT*>( this );
+    }
+
+    bool operator==( PhysicalDeviceDescriptorIndexingFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( shaderInputAttachmentArrayDynamicIndexing == rhs.shaderInputAttachmentArrayDynamicIndexing )
+          && ( shaderUniformTexelBufferArrayDynamicIndexing == rhs.shaderUniformTexelBufferArrayDynamicIndexing )
+          && ( shaderStorageTexelBufferArrayDynamicIndexing == rhs.shaderStorageTexelBufferArrayDynamicIndexing )
+          && ( shaderUniformBufferArrayNonUniformIndexing == rhs.shaderUniformBufferArrayNonUniformIndexing )
+          && ( shaderSampledImageArrayNonUniformIndexing == rhs.shaderSampledImageArrayNonUniformIndexing )
+          && ( shaderStorageBufferArrayNonUniformIndexing == rhs.shaderStorageBufferArrayNonUniformIndexing )
+          && ( shaderStorageImageArrayNonUniformIndexing == rhs.shaderStorageImageArrayNonUniformIndexing )
+          && ( shaderInputAttachmentArrayNonUniformIndexing == rhs.shaderInputAttachmentArrayNonUniformIndexing )
+          && ( shaderUniformTexelBufferArrayNonUniformIndexing == rhs.shaderUniformTexelBufferArrayNonUniformIndexing )
+          && ( shaderStorageTexelBufferArrayNonUniformIndexing == rhs.shaderStorageTexelBufferArrayNonUniformIndexing )
+          && ( descriptorBindingUniformBufferUpdateAfterBind == rhs.descriptorBindingUniformBufferUpdateAfterBind )
+          && ( descriptorBindingSampledImageUpdateAfterBind == rhs.descriptorBindingSampledImageUpdateAfterBind )
+          && ( descriptorBindingStorageImageUpdateAfterBind == rhs.descriptorBindingStorageImageUpdateAfterBind )
+          && ( descriptorBindingStorageBufferUpdateAfterBind == rhs.descriptorBindingStorageBufferUpdateAfterBind )
+          && ( descriptorBindingUniformTexelBufferUpdateAfterBind == rhs.descriptorBindingUniformTexelBufferUpdateAfterBind )
+          && ( descriptorBindingStorageTexelBufferUpdateAfterBind == rhs.descriptorBindingStorageTexelBufferUpdateAfterBind )
+          && ( descriptorBindingUpdateUnusedWhilePending == rhs.descriptorBindingUpdateUnusedWhilePending )
+          && ( descriptorBindingPartiallyBound == rhs.descriptorBindingPartiallyBound )
+          && ( descriptorBindingVariableDescriptorCount == rhs.descriptorBindingVariableDescriptorCount )
+          && ( runtimeDescriptorArray == rhs.runtimeDescriptorArray );
+    }
+
+    bool operator!=( PhysicalDeviceDescriptorIndexingFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDescriptorIndexingFeaturesEXT;
+    void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing;
+    VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing;
+    VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing;
+    VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing;
+    VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing;
+    VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing;
+    VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing;
+    VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing;
+    VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing;
+    VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing;
+    VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind;
+    VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind;
+    VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind;
+    VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind;
+    VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind;
+    VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind;
+    VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending;
+    VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound;
+    VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount;
+    VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray;
+  };
+  static_assert( sizeof( PhysicalDeviceDescriptorIndexingFeaturesEXT ) == sizeof( VkPhysicalDeviceDescriptorIndexingFeaturesEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceDescriptorIndexingFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+
+  struct PhysicalDeviceDescriptorIndexingPropertiesEXT
+  {
+    PhysicalDeviceDescriptorIndexingPropertiesEXT( uint32_t maxUpdateAfterBindDescriptorsInAllPools_ = 0,
+                                                   VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexingNative_ = 0,
+                                                   VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexingNative_ = 0,
+                                                   VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexingNative_ = 0,
+                                                   VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexingNative_ = 0,
+                                                   VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexingNative_ = 0,
+                                                   VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccessUpdateAfterBind_ = 0,
+                                                   VULKAN_HPP_NAMESPACE::Bool32 quadDivergentImplicitLod_ = 0,
+                                                   uint32_t maxPerStageDescriptorUpdateAfterBindSamplers_ = 0,
+                                                   uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers_ = 0,
+                                                   uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers_ = 0,
+                                                   uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages_ = 0,
+                                                   uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages_ = 0,
+                                                   uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments_ = 0,
+                                                   uint32_t maxPerStageUpdateAfterBindResources_ = 0,
+                                                   uint32_t maxDescriptorSetUpdateAfterBindSamplers_ = 0,
+                                                   uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers_ = 0,
+                                                   uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ = 0,
+                                                   uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers_ = 0,
+                                                   uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ = 0,
+                                                   uint32_t maxDescriptorSetUpdateAfterBindSampledImages_ = 0,
+                                                   uint32_t maxDescriptorSetUpdateAfterBindStorageImages_ = 0,
+                                                   uint32_t maxDescriptorSetUpdateAfterBindInputAttachments_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : maxUpdateAfterBindDescriptorsInAllPools( maxUpdateAfterBindDescriptorsInAllPools_ )
+      , shaderUniformBufferArrayNonUniformIndexingNative( shaderUniformBufferArrayNonUniformIndexingNative_ )
+      , shaderSampledImageArrayNonUniformIndexingNative( shaderSampledImageArrayNonUniformIndexingNative_ )
+      , shaderStorageBufferArrayNonUniformIndexingNative( shaderStorageBufferArrayNonUniformIndexingNative_ )
+      , shaderStorageImageArrayNonUniformIndexingNative( shaderStorageImageArrayNonUniformIndexingNative_ )
+      , shaderInputAttachmentArrayNonUniformIndexingNative( shaderInputAttachmentArrayNonUniformIndexingNative_ )
+      , robustBufferAccessUpdateAfterBind( robustBufferAccessUpdateAfterBind_ )
+      , quadDivergentImplicitLod( quadDivergentImplicitLod_ )
+      , maxPerStageDescriptorUpdateAfterBindSamplers( maxPerStageDescriptorUpdateAfterBindSamplers_ )
+      , maxPerStageDescriptorUpdateAfterBindUniformBuffers( maxPerStageDescriptorUpdateAfterBindUniformBuffers_ )
+      , maxPerStageDescriptorUpdateAfterBindStorageBuffers( maxPerStageDescriptorUpdateAfterBindStorageBuffers_ )
+      , maxPerStageDescriptorUpdateAfterBindSampledImages( maxPerStageDescriptorUpdateAfterBindSampledImages_ )
+      , maxPerStageDescriptorUpdateAfterBindStorageImages( maxPerStageDescriptorUpdateAfterBindStorageImages_ )
+      , maxPerStageDescriptorUpdateAfterBindInputAttachments( maxPerStageDescriptorUpdateAfterBindInputAttachments_ )
+      , maxPerStageUpdateAfterBindResources( maxPerStageUpdateAfterBindResources_ )
+      , maxDescriptorSetUpdateAfterBindSamplers( maxDescriptorSetUpdateAfterBindSamplers_ )
+      , maxDescriptorSetUpdateAfterBindUniformBuffers( maxDescriptorSetUpdateAfterBindUniformBuffers_ )
+      , maxDescriptorSetUpdateAfterBindUniformBuffersDynamic( maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ )
+      , maxDescriptorSetUpdateAfterBindStorageBuffers( maxDescriptorSetUpdateAfterBindStorageBuffers_ )
+      , maxDescriptorSetUpdateAfterBindStorageBuffersDynamic( maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ )
+      , maxDescriptorSetUpdateAfterBindSampledImages( maxDescriptorSetUpdateAfterBindSampledImages_ )
+      , maxDescriptorSetUpdateAfterBindStorageImages( maxDescriptorSetUpdateAfterBindStorageImages_ )
+      , maxDescriptorSetUpdateAfterBindInputAttachments( maxDescriptorSetUpdateAfterBindInputAttachments_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingPropertiesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingPropertiesEXT ) - offsetof( PhysicalDeviceDescriptorIndexingPropertiesEXT, pNext ) );
+      return *this;
+    }
+
+    PhysicalDeviceDescriptorIndexingPropertiesEXT( VkPhysicalDeviceDescriptorIndexingPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PhysicalDeviceDescriptorIndexingPropertiesEXT& operator=( VkPhysicalDeviceDescriptorIndexingPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingPropertiesEXT const *>(&rhs);
+      return *this;
+    }
+
+    operator VkPhysicalDeviceDescriptorIndexingPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceDescriptorIndexingPropertiesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceDescriptorIndexingPropertiesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceDescriptorIndexingPropertiesEXT*>( this );
+    }
+
+    bool operator==( PhysicalDeviceDescriptorIndexingPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( maxUpdateAfterBindDescriptorsInAllPools == rhs.maxUpdateAfterBindDescriptorsInAllPools )
+          && ( shaderUniformBufferArrayNonUniformIndexingNative == rhs.shaderUniformBufferArrayNonUniformIndexingNative )
+          && ( shaderSampledImageArrayNonUniformIndexingNative == rhs.shaderSampledImageArrayNonUniformIndexingNative )
+          && ( shaderStorageBufferArrayNonUniformIndexingNative == rhs.shaderStorageBufferArrayNonUniformIndexingNative )
+          && ( shaderStorageImageArrayNonUniformIndexingNative == rhs.shaderStorageImageArrayNonUniformIndexingNative )
+          && ( shaderInputAttachmentArrayNonUniformIndexingNative == rhs.shaderInputAttachmentArrayNonUniformIndexingNative )
+          && ( robustBufferAccessUpdateAfterBind == rhs.robustBufferAccessUpdateAfterBind )
+          && ( quadDivergentImplicitLod == rhs.quadDivergentImplicitLod )
+          && ( maxPerStageDescriptorUpdateAfterBindSamplers == rhs.maxPerStageDescriptorUpdateAfterBindSamplers )
+          && ( maxPerStageDescriptorUpdateAfterBindUniformBuffers == rhs.maxPerStageDescriptorUpdateAfterBindUniformBuffers )
+          && ( maxPerStageDescriptorUpdateAfterBindStorageBuffers == rhs.maxPerStageDescriptorUpdateAfterBindStorageBuffers )
+          && ( maxPerStageDescriptorUpdateAfterBindSampledImages == rhs.maxPerStageDescriptorUpdateAfterBindSampledImages )
+          && ( maxPerStageDescriptorUpdateAfterBindStorageImages == rhs.maxPerStageDescriptorUpdateAfterBindStorageImages )
+          && ( maxPerStageDescriptorUpdateAfterBindInputAttachments == rhs.maxPerStageDescriptorUpdateAfterBindInputAttachments )
+          && ( maxPerStageUpdateAfterBindResources == rhs.maxPerStageUpdateAfterBindResources )
+          && ( maxDescriptorSetUpdateAfterBindSamplers == rhs.maxDescriptorSetUpdateAfterBindSamplers )
+          && ( maxDescriptorSetUpdateAfterBindUniformBuffers == rhs.maxDescriptorSetUpdateAfterBindUniformBuffers )
+          && ( maxDescriptorSetUpdateAfterBindUniformBuffersDynamic == rhs.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic )
+          && ( maxDescriptorSetUpdateAfterBindStorageBuffers == rhs.maxDescriptorSetUpdateAfterBindStorageBuffers )
+          && ( maxDescriptorSetUpdateAfterBindStorageBuffersDynamic == rhs.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic )
+          && ( maxDescriptorSetUpdateAfterBindSampledImages == rhs.maxDescriptorSetUpdateAfterBindSampledImages )
+          && ( maxDescriptorSetUpdateAfterBindStorageImages == rhs.maxDescriptorSetUpdateAfterBindStorageImages )
+          && ( maxDescriptorSetUpdateAfterBindInputAttachments == rhs.maxDescriptorSetUpdateAfterBindInputAttachments );
+    }
+
+    bool operator!=( PhysicalDeviceDescriptorIndexingPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDescriptorIndexingPropertiesEXT;
+    void* pNext = nullptr;
+    uint32_t maxUpdateAfterBindDescriptorsInAllPools;
+    VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexingNative;
+    VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexingNative;
+    VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexingNative;
+    VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexingNative;
+    VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexingNative;
+    VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccessUpdateAfterBind;
+    VULKAN_HPP_NAMESPACE::Bool32 quadDivergentImplicitLod;
+    uint32_t maxPerStageDescriptorUpdateAfterBindSamplers;
+    uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers;
+    uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers;
+    uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages;
+    uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages;
+    uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments;
+    uint32_t maxPerStageUpdateAfterBindResources;
+    uint32_t maxDescriptorSetUpdateAfterBindSamplers;
+    uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers;
+    uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic;
+    uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers;
+    uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic;
+    uint32_t maxDescriptorSetUpdateAfterBindSampledImages;
+    uint32_t maxDescriptorSetUpdateAfterBindStorageImages;
+    uint32_t maxDescriptorSetUpdateAfterBindInputAttachments;
+  };
+  static_assert( sizeof( PhysicalDeviceDescriptorIndexingPropertiesEXT ) == sizeof( VkPhysicalDeviceDescriptorIndexingPropertiesEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceDescriptorIndexingPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
+
+  struct PhysicalDeviceDiscardRectanglePropertiesEXT
+  {
+    PhysicalDeviceDiscardRectanglePropertiesEXT( uint32_t maxDiscardRectangles_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : maxDiscardRectangles( maxDiscardRectangles_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceDiscardRectanglePropertiesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDiscardRectanglePropertiesEXT ) - offsetof( PhysicalDeviceDiscardRectanglePropertiesEXT, pNext ) );
+      return *this;
+    }
+
+    PhysicalDeviceDiscardRectanglePropertiesEXT( VkPhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PhysicalDeviceDiscardRectanglePropertiesEXT& operator=( VkPhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDiscardRectanglePropertiesEXT const *>(&rhs);
+      return *this;
+    }
+
+    operator VkPhysicalDeviceDiscardRectanglePropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceDiscardRectanglePropertiesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceDiscardRectanglePropertiesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceDiscardRectanglePropertiesEXT*>( this );
+    }
+
+    bool operator==( PhysicalDeviceDiscardRectanglePropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( maxDiscardRectangles == rhs.maxDiscardRectangles );
+    }
+
+    bool operator!=( PhysicalDeviceDiscardRectanglePropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDiscardRectanglePropertiesEXT;
+    void* pNext = nullptr;
+    uint32_t maxDiscardRectangles;
+  };
+  static_assert( sizeof( PhysicalDeviceDiscardRectanglePropertiesEXT ) == sizeof( VkPhysicalDeviceDiscardRectanglePropertiesEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceDiscardRectanglePropertiesEXT>::value, "struct wrapper is not a standard layout!" );
+
+  struct PhysicalDeviceDriverPropertiesKHR
+  {
+    PhysicalDeviceDriverPropertiesKHR( VULKAN_HPP_NAMESPACE::DriverIdKHR driverID_ = VULKAN_HPP_NAMESPACE::DriverIdKHR::eAmdProprietary,
+                                       std::array<char,VK_MAX_DRIVER_NAME_SIZE_KHR> const& driverName_ = { { 0 } },
+                                       std::array<char,VK_MAX_DRIVER_INFO_SIZE_KHR> const& driverInfo_ = { { 0 } },
+                                       VULKAN_HPP_NAMESPACE::ConformanceVersionKHR conformanceVersion_ = VULKAN_HPP_NAMESPACE::ConformanceVersionKHR() ) VULKAN_HPP_NOEXCEPT
+      : driverID( driverID_ )
+      , driverName{}
+      , driverInfo{}
+      , conformanceVersion( conformanceVersion_ )
+    {
+      VULKAN_HPP_NAMESPACE::ConstExpressionArrayCopy<char,VK_MAX_DRIVER_NAME_SIZE_KHR,VK_MAX_DRIVER_NAME_SIZE_KHR>::copy( driverName, driverName_ );
+      VULKAN_HPP_NAMESPACE::ConstExpressionArrayCopy<char,VK_MAX_DRIVER_INFO_SIZE_KHR,VK_MAX_DRIVER_INFO_SIZE_KHR>::copy( driverInfo, driverInfo_ );
+    }
+
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceDriverPropertiesKHR & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceDriverPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDriverPropertiesKHR ) - offsetof( PhysicalDeviceDriverPropertiesKHR, pNext ) );
+      return *this;
+    }
+
+    PhysicalDeviceDriverPropertiesKHR( VkPhysicalDeviceDriverPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PhysicalDeviceDriverPropertiesKHR& operator=( VkPhysicalDeviceDriverPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDriverPropertiesKHR const *>(&rhs);
+      return *this;
+    }
+
+    operator VkPhysicalDeviceDriverPropertiesKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceDriverPropertiesKHR*>( this );
+    }
+
+    operator VkPhysicalDeviceDriverPropertiesKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceDriverPropertiesKHR*>( this );
+    }
+
+    bool operator==( PhysicalDeviceDriverPropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( driverID == rhs.driverID )
+          && ( memcmp( driverName, rhs.driverName, VK_MAX_DRIVER_NAME_SIZE_KHR * sizeof( char ) ) == 0 )
+          && ( memcmp( driverInfo, rhs.driverInfo, VK_MAX_DRIVER_INFO_SIZE_KHR * sizeof( char ) ) == 0 )
+          && ( conformanceVersion == rhs.conformanceVersion );
+    }
+
+    bool operator!=( PhysicalDeviceDriverPropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDriverPropertiesKHR;
+    void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::DriverIdKHR driverID;
+    char driverName[VK_MAX_DRIVER_NAME_SIZE_KHR];
+    char driverInfo[VK_MAX_DRIVER_INFO_SIZE_KHR];
+    VULKAN_HPP_NAMESPACE::ConformanceVersionKHR conformanceVersion;
+  };
+  static_assert( sizeof( PhysicalDeviceDriverPropertiesKHR ) == sizeof( VkPhysicalDeviceDriverPropertiesKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceDriverPropertiesKHR>::value, "struct wrapper is not a standard layout!" );
+
+  struct PhysicalDeviceExclusiveScissorFeaturesNV
+  {
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceExclusiveScissorFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 exclusiveScissor_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : exclusiveScissor( exclusiveScissor_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceExclusiveScissorFeaturesNV & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExclusiveScissorFeaturesNV ) - offsetof( PhysicalDeviceExclusiveScissorFeaturesNV, pNext ) );
+      return *this;
+    }
+
+    PhysicalDeviceExclusiveScissorFeaturesNV( VkPhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PhysicalDeviceExclusiveScissorFeaturesNV& operator=( VkPhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceExclusiveScissorFeaturesNV const *>(&rhs);
+      return *this;
+    }
+
+    PhysicalDeviceExclusiveScissorFeaturesNV & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceExclusiveScissorFeaturesNV & setExclusiveScissor( VULKAN_HPP_NAMESPACE::Bool32 exclusiveScissor_ ) VULKAN_HPP_NOEXCEPT
+    {
+      exclusiveScissor = exclusiveScissor_;
+      return *this;
+    }
+
+    operator VkPhysicalDeviceExclusiveScissorFeaturesNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceExclusiveScissorFeaturesNV*>( this );
+    }
+
+    operator VkPhysicalDeviceExclusiveScissorFeaturesNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceExclusiveScissorFeaturesNV*>( this );
+    }
+
+    bool operator==( PhysicalDeviceExclusiveScissorFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( exclusiveScissor == rhs.exclusiveScissor );
+    }
+
+    bool operator!=( PhysicalDeviceExclusiveScissorFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExclusiveScissorFeaturesNV;
+    void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::Bool32 exclusiveScissor;
+  };
+  static_assert( sizeof( PhysicalDeviceExclusiveScissorFeaturesNV ) == sizeof( VkPhysicalDeviceExclusiveScissorFeaturesNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceExclusiveScissorFeaturesNV>::value, "struct wrapper is not a standard layout!" );
+
+  struct PhysicalDeviceExternalBufferInfo
+  {
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalBufferInfo( VULKAN_HPP_NAMESPACE::BufferCreateFlags flags_ = VULKAN_HPP_NAMESPACE::BufferCreateFlags(),
+                                                           VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_ = VULKAN_HPP_NAMESPACE::BufferUsageFlags(),
+                                                           VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT
+      : flags( flags_ )
+      , usage( usage_ )
+      , handleType( handleType_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo ) - offsetof( PhysicalDeviceExternalBufferInfo, pNext ) );
+      return *this;
+    }
+
+    PhysicalDeviceExternalBufferInfo( VkPhysicalDeviceExternalBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PhysicalDeviceExternalBufferInfo& operator=( VkPhysicalDeviceExternalBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo const *>(&rhs);
+      return *this;
+    }
+
+    PhysicalDeviceExternalBufferInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceExternalBufferInfo & setFlags( VULKAN_HPP_NAMESPACE::BufferCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    PhysicalDeviceExternalBufferInfo & setUsage( VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT
+    {
+      usage = usage_;
+      return *this;
+    }
+
+    PhysicalDeviceExternalBufferInfo & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      handleType = handleType_;
+      return *this;
+    }
+
+    operator VkPhysicalDeviceExternalBufferInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo*>( this );
+    }
+
+    operator VkPhysicalDeviceExternalBufferInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceExternalBufferInfo*>( this );
+    }
+
+    bool operator==( PhysicalDeviceExternalBufferInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( usage == rhs.usage )
+          && ( handleType == rhs.handleType );
+    }
+
+    bool operator!=( PhysicalDeviceExternalBufferInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalBufferInfo;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::BufferCreateFlags flags;
+    VULKAN_HPP_NAMESPACE::BufferUsageFlags usage;
+    VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType;
+  };
+  static_assert( sizeof( PhysicalDeviceExternalBufferInfo ) == sizeof( VkPhysicalDeviceExternalBufferInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceExternalBufferInfo>::value, "struct wrapper is not a standard layout!" );
+
+  struct PhysicalDeviceExternalFenceInfo
+  {
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalFenceInfo( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT
+      : handleType( handleType_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo ) - offsetof( PhysicalDeviceExternalFenceInfo, pNext ) );
+      return *this;
+    }
+
+    PhysicalDeviceExternalFenceInfo( VkPhysicalDeviceExternalFenceInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PhysicalDeviceExternalFenceInfo& operator=( VkPhysicalDeviceExternalFenceInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo const *>(&rhs);
+      return *this;
+    }
+
+    PhysicalDeviceExternalFenceInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceExternalFenceInfo & setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      handleType = handleType_;
+      return *this;
+    }
+
+    operator VkPhysicalDeviceExternalFenceInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo*>( this );
+    }
+
+    operator VkPhysicalDeviceExternalFenceInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceExternalFenceInfo*>( this );
+    }
+
+    bool operator==( PhysicalDeviceExternalFenceInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( handleType == rhs.handleType );
+    }
+
+    bool operator!=( PhysicalDeviceExternalFenceInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalFenceInfo;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType;
+  };
+  static_assert( sizeof( PhysicalDeviceExternalFenceInfo ) == sizeof( VkPhysicalDeviceExternalFenceInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceExternalFenceInfo>::value, "struct wrapper is not a standard layout!" );
+
+  struct PhysicalDeviceExternalImageFormatInfo
+  {
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalImageFormatInfo( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT
+      : handleType( handleType_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalImageFormatInfo & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalImageFormatInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalImageFormatInfo ) - offsetof( PhysicalDeviceExternalImageFormatInfo, pNext ) );
+      return *this;
+    }
+
+    PhysicalDeviceExternalImageFormatInfo( VkPhysicalDeviceExternalImageFormatInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PhysicalDeviceExternalImageFormatInfo& operator=( VkPhysicalDeviceExternalImageFormatInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalImageFormatInfo const *>(&rhs);
+      return *this;
+    }
+
+    PhysicalDeviceExternalImageFormatInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceExternalImageFormatInfo & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      handleType = handleType_;
+      return *this;
+    }
+
+    operator VkPhysicalDeviceExternalImageFormatInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceExternalImageFormatInfo*>( this );
+    }
+
+    operator VkPhysicalDeviceExternalImageFormatInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceExternalImageFormatInfo*>( this );
+    }
+
+    bool operator==( PhysicalDeviceExternalImageFormatInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( handleType == rhs.handleType );
+    }
+
+    bool operator!=( PhysicalDeviceExternalImageFormatInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalImageFormatInfo;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType;
+  };
+  static_assert( sizeof( PhysicalDeviceExternalImageFormatInfo ) == sizeof( VkPhysicalDeviceExternalImageFormatInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceExternalImageFormatInfo>::value, "struct wrapper is not a standard layout!" );
+
+  struct PhysicalDeviceExternalMemoryHostPropertiesEXT
+  {
+    PhysicalDeviceExternalMemoryHostPropertiesEXT( VULKAN_HPP_NAMESPACE::DeviceSize minImportedHostPointerAlignment_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : minImportedHostPointerAlignment( minImportedHostPointerAlignment_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalMemoryHostPropertiesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalMemoryHostPropertiesEXT ) - offsetof( PhysicalDeviceExternalMemoryHostPropertiesEXT, pNext ) );
+      return *this;
+    }
+
+    PhysicalDeviceExternalMemoryHostPropertiesEXT( VkPhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PhysicalDeviceExternalMemoryHostPropertiesEXT& operator=( VkPhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalMemoryHostPropertiesEXT const *>(&rhs);
+      return *this;
+    }
+
+    operator VkPhysicalDeviceExternalMemoryHostPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceExternalMemoryHostPropertiesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceExternalMemoryHostPropertiesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceExternalMemoryHostPropertiesEXT*>( this );
+    }
+
+    bool operator==( PhysicalDeviceExternalMemoryHostPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( minImportedHostPointerAlignment == rhs.minImportedHostPointerAlignment );
+    }
+
+    bool operator!=( PhysicalDeviceExternalMemoryHostPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalMemoryHostPropertiesEXT;
+    void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::DeviceSize minImportedHostPointerAlignment;
+  };
+  static_assert( sizeof( PhysicalDeviceExternalMemoryHostPropertiesEXT ) == sizeof( VkPhysicalDeviceExternalMemoryHostPropertiesEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceExternalMemoryHostPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
+
+  struct PhysicalDeviceExternalSemaphoreInfo
+  {
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalSemaphoreInfo( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT
+      : handleType( handleType_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo ) - offsetof( PhysicalDeviceExternalSemaphoreInfo, pNext ) );
+      return *this;
+    }
+
+    PhysicalDeviceExternalSemaphoreInfo( VkPhysicalDeviceExternalSemaphoreInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PhysicalDeviceExternalSemaphoreInfo& operator=( VkPhysicalDeviceExternalSemaphoreInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo const *>(&rhs);
+      return *this;
+    }
+
+    PhysicalDeviceExternalSemaphoreInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceExternalSemaphoreInfo & setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      handleType = handleType_;
+      return *this;
+    }
+
+    operator VkPhysicalDeviceExternalSemaphoreInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo*>( this );
+    }
+
+    operator VkPhysicalDeviceExternalSemaphoreInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceExternalSemaphoreInfo*>( this );
+    }
+
+    bool operator==( PhysicalDeviceExternalSemaphoreInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( handleType == rhs.handleType );
+    }
+
+    bool operator!=( PhysicalDeviceExternalSemaphoreInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalSemaphoreInfo;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType;
+  };
+  static_assert( sizeof( PhysicalDeviceExternalSemaphoreInfo ) == sizeof( VkPhysicalDeviceExternalSemaphoreInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceExternalSemaphoreInfo>::value, "struct wrapper is not a standard layout!" );
+
+  struct PhysicalDeviceFeatures2
+  {
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceFeatures2( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures features_ = VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures() ) VULKAN_HPP_NOEXCEPT
+      : features( features_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 ) - offsetof( PhysicalDeviceFeatures2, pNext ) );
+      return *this;
+    }
+
+    PhysicalDeviceFeatures2( VkPhysicalDeviceFeatures2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PhysicalDeviceFeatures2& operator=( VkPhysicalDeviceFeatures2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 const *>(&rhs);
+      return *this;
+    }
+
+    PhysicalDeviceFeatures2 & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceFeatures2 & setFeatures( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures features_ ) VULKAN_HPP_NOEXCEPT
+    {
+      features = features_;
+      return *this;
+    }
+
+    operator VkPhysicalDeviceFeatures2 const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceFeatures2*>( this );
+    }
+
+    operator VkPhysicalDeviceFeatures2 &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceFeatures2*>( this );
+    }
+
+    bool operator==( PhysicalDeviceFeatures2 const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( features == rhs.features );
+    }
+
+    bool operator!=( PhysicalDeviceFeatures2 const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFeatures2;
+    void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures features;
+  };
+  static_assert( sizeof( PhysicalDeviceFeatures2 ) == sizeof( VkPhysicalDeviceFeatures2 ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceFeatures2>::value, "struct wrapper is not a standard layout!" );
+
+  struct PhysicalDeviceFloatControlsPropertiesKHR
+  {
+    PhysicalDeviceFloatControlsPropertiesKHR( VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependenceKHR denormBehaviorIndependence_ = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependenceKHR::e32BitOnly,
+                                              VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependenceKHR roundingModeIndependence_ = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependenceKHR::e32BitOnly,
+                                              VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat16_ = 0,
+                                              VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat32_ = 0,
+                                              VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat64_ = 0,
+                                              VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat16_ = 0,
+                                              VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat32_ = 0,
+                                              VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat64_ = 0,
+                                              VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat16_ = 0,
+                                              VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat32_ = 0,
+                                              VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat64_ = 0,
+                                              VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat16_ = 0,
+                                              VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat32_ = 0,
+                                              VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat64_ = 0,
+                                              VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat16_ = 0,
+                                              VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat32_ = 0,
+                                              VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat64_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : denormBehaviorIndependence( denormBehaviorIndependence_ )
+      , roundingModeIndependence( roundingModeIndependence_ )
+      , shaderSignedZeroInfNanPreserveFloat16( shaderSignedZeroInfNanPreserveFloat16_ )
+      , shaderSignedZeroInfNanPreserveFloat32( shaderSignedZeroInfNanPreserveFloat32_ )
+      , shaderSignedZeroInfNanPreserveFloat64( shaderSignedZeroInfNanPreserveFloat64_ )
+      , shaderDenormPreserveFloat16( shaderDenormPreserveFloat16_ )
+      , shaderDenormPreserveFloat32( shaderDenormPreserveFloat32_ )
+      , shaderDenormPreserveFloat64( shaderDenormPreserveFloat64_ )
+      , shaderDenormFlushToZeroFloat16( shaderDenormFlushToZeroFloat16_ )
+      , shaderDenormFlushToZeroFloat32( shaderDenormFlushToZeroFloat32_ )
+      , shaderDenormFlushToZeroFloat64( shaderDenormFlushToZeroFloat64_ )
+      , shaderRoundingModeRTEFloat16( shaderRoundingModeRTEFloat16_ )
+      , shaderRoundingModeRTEFloat32( shaderRoundingModeRTEFloat32_ )
+      , shaderRoundingModeRTEFloat64( shaderRoundingModeRTEFloat64_ )
+      , shaderRoundingModeRTZFloat16( shaderRoundingModeRTZFloat16_ )
+      , shaderRoundingModeRTZFloat32( shaderRoundingModeRTZFloat32_ )
+      , shaderRoundingModeRTZFloat64( shaderRoundingModeRTZFloat64_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceFloatControlsPropertiesKHR & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceFloatControlsPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFloatControlsPropertiesKHR ) - offsetof( PhysicalDeviceFloatControlsPropertiesKHR, pNext ) );
+      return *this;
+    }
+
+    PhysicalDeviceFloatControlsPropertiesKHR( VkPhysicalDeviceFloatControlsPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PhysicalDeviceFloatControlsPropertiesKHR& operator=( VkPhysicalDeviceFloatControlsPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFloatControlsPropertiesKHR const *>(&rhs);
+      return *this;
+    }
+
+    operator VkPhysicalDeviceFloatControlsPropertiesKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceFloatControlsPropertiesKHR*>( this );
+    }
+
+    operator VkPhysicalDeviceFloatControlsPropertiesKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceFloatControlsPropertiesKHR*>( this );
+    }
+
+    bool operator==( PhysicalDeviceFloatControlsPropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( denormBehaviorIndependence == rhs.denormBehaviorIndependence )
+          && ( roundingModeIndependence == rhs.roundingModeIndependence )
+          && ( shaderSignedZeroInfNanPreserveFloat16 == rhs.shaderSignedZeroInfNanPreserveFloat16 )
+          && ( shaderSignedZeroInfNanPreserveFloat32 == rhs.shaderSignedZeroInfNanPreserveFloat32 )
+          && ( shaderSignedZeroInfNanPreserveFloat64 == rhs.shaderSignedZeroInfNanPreserveFloat64 )
+          && ( shaderDenormPreserveFloat16 == rhs.shaderDenormPreserveFloat16 )
+          && ( shaderDenormPreserveFloat32 == rhs.shaderDenormPreserveFloat32 )
+          && ( shaderDenormPreserveFloat64 == rhs.shaderDenormPreserveFloat64 )
+          && ( shaderDenormFlushToZeroFloat16 == rhs.shaderDenormFlushToZeroFloat16 )
+          && ( shaderDenormFlushToZeroFloat32 == rhs.shaderDenormFlushToZeroFloat32 )
+          && ( shaderDenormFlushToZeroFloat64 == rhs.shaderDenormFlushToZeroFloat64 )
+          && ( shaderRoundingModeRTEFloat16 == rhs.shaderRoundingModeRTEFloat16 )
+          && ( shaderRoundingModeRTEFloat32 == rhs.shaderRoundingModeRTEFloat32 )
+          && ( shaderRoundingModeRTEFloat64 == rhs.shaderRoundingModeRTEFloat64 )
+          && ( shaderRoundingModeRTZFloat16 == rhs.shaderRoundingModeRTZFloat16 )
+          && ( shaderRoundingModeRTZFloat32 == rhs.shaderRoundingModeRTZFloat32 )
+          && ( shaderRoundingModeRTZFloat64 == rhs.shaderRoundingModeRTZFloat64 );
+    }
+
+    bool operator!=( PhysicalDeviceFloatControlsPropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFloatControlsPropertiesKHR;
+    void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependenceKHR denormBehaviorIndependence;
+    VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependenceKHR roundingModeIndependence;
+    VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat16;
+    VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat32;
+    VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat64;
+    VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat16;
+    VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat32;
+    VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat64;
+    VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat16;
+    VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat32;
+    VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat64;
+    VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat16;
+    VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat32;
+    VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat64;
+    VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat16;
+    VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat32;
+    VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat64;
+  };
+  static_assert( sizeof( PhysicalDeviceFloatControlsPropertiesKHR ) == sizeof( VkPhysicalDeviceFloatControlsPropertiesKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceFloatControlsPropertiesKHR>::value, "struct wrapper is not a standard layout!" );
+
+  struct PhysicalDeviceFragmentDensityMapFeaturesEXT
+  {
+    PhysicalDeviceFragmentDensityMapFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMap_ = 0,
+                                                 VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDynamic_ = 0,
+                                                 VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapNonSubsampledImages_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : fragmentDensityMap( fragmentDensityMap_ )
+      , fragmentDensityMapDynamic( fragmentDensityMapDynamic_ )
+      , fragmentDensityMapNonSubsampledImages( fragmentDensityMapNonSubsampledImages_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapFeaturesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapFeaturesEXT ) - offsetof( PhysicalDeviceFragmentDensityMapFeaturesEXT, pNext ) );
+      return *this;
+    }
+
+    PhysicalDeviceFragmentDensityMapFeaturesEXT( VkPhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PhysicalDeviceFragmentDensityMapFeaturesEXT& operator=( VkPhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapFeaturesEXT const *>(&rhs);
+      return *this;
+    }
+
+    operator VkPhysicalDeviceFragmentDensityMapFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceFragmentDensityMapFeaturesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceFragmentDensityMapFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceFragmentDensityMapFeaturesEXT*>( this );
+    }
+
+    bool operator==( PhysicalDeviceFragmentDensityMapFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( fragmentDensityMap == rhs.fragmentDensityMap )
+          && ( fragmentDensityMapDynamic == rhs.fragmentDensityMapDynamic )
+          && ( fragmentDensityMapNonSubsampledImages == rhs.fragmentDensityMapNonSubsampledImages );
+    }
+
+    bool operator!=( PhysicalDeviceFragmentDensityMapFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMapFeaturesEXT;
+    void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMap;
+    VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDynamic;
+    VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapNonSubsampledImages;
+  };
+  static_assert( sizeof( PhysicalDeviceFragmentDensityMapFeaturesEXT ) == sizeof( VkPhysicalDeviceFragmentDensityMapFeaturesEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceFragmentDensityMapFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+
+  struct PhysicalDeviceFragmentDensityMapPropertiesEXT
+  {
+    PhysicalDeviceFragmentDensityMapPropertiesEXT( VULKAN_HPP_NAMESPACE::Extent2D minFragmentDensityTexelSize_ = VULKAN_HPP_NAMESPACE::Extent2D(),
+                                                   VULKAN_HPP_NAMESPACE::Extent2D maxFragmentDensityTexelSize_ = VULKAN_HPP_NAMESPACE::Extent2D(),
+                                                   VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityInvocations_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : minFragmentDensityTexelSize( minFragmentDensityTexelSize_ )
+      , maxFragmentDensityTexelSize( maxFragmentDensityTexelSize_ )
+      , fragmentDensityInvocations( fragmentDensityInvocations_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapPropertiesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapPropertiesEXT ) - offsetof( PhysicalDeviceFragmentDensityMapPropertiesEXT, pNext ) );
+      return *this;
+    }
+
+    PhysicalDeviceFragmentDensityMapPropertiesEXT( VkPhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PhysicalDeviceFragmentDensityMapPropertiesEXT& operator=( VkPhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapPropertiesEXT const *>(&rhs);
+      return *this;
+    }
+
+    operator VkPhysicalDeviceFragmentDensityMapPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceFragmentDensityMapPropertiesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceFragmentDensityMapPropertiesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceFragmentDensityMapPropertiesEXT*>( this );
+    }
+
+    bool operator==( PhysicalDeviceFragmentDensityMapPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( minFragmentDensityTexelSize == rhs.minFragmentDensityTexelSize )
+          && ( maxFragmentDensityTexelSize == rhs.maxFragmentDensityTexelSize )
+          && ( fragmentDensityInvocations == rhs.fragmentDensityInvocations );
+    }
+
+    bool operator!=( PhysicalDeviceFragmentDensityMapPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMapPropertiesEXT;
+    void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::Extent2D minFragmentDensityTexelSize;
+    VULKAN_HPP_NAMESPACE::Extent2D maxFragmentDensityTexelSize;
+    VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityInvocations;
+  };
+  static_assert( sizeof( PhysicalDeviceFragmentDensityMapPropertiesEXT ) == sizeof( VkPhysicalDeviceFragmentDensityMapPropertiesEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceFragmentDensityMapPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
+
+  struct PhysicalDeviceFragmentShaderBarycentricFeaturesNV
+  {
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShaderBarycentricFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderBarycentric_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : fragmentShaderBarycentric( fragmentShaderBarycentric_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderBarycentricFeaturesNV & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderBarycentricFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderBarycentricFeaturesNV ) - offsetof( PhysicalDeviceFragmentShaderBarycentricFeaturesNV, pNext ) );
+      return *this;
+    }
+
+    PhysicalDeviceFragmentShaderBarycentricFeaturesNV( VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PhysicalDeviceFragmentShaderBarycentricFeaturesNV& operator=( VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderBarycentricFeaturesNV const *>(&rhs);
+      return *this;
+    }
+
+    PhysicalDeviceFragmentShaderBarycentricFeaturesNV & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceFragmentShaderBarycentricFeaturesNV & setFragmentShaderBarycentric( VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderBarycentric_ ) VULKAN_HPP_NOEXCEPT
+    {
+      fragmentShaderBarycentric = fragmentShaderBarycentric_;
+      return *this;
+    }
+
+    operator VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV*>( this );
+    }
+
+    operator VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV*>( this );
+    }
+
+    bool operator==( PhysicalDeviceFragmentShaderBarycentricFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( fragmentShaderBarycentric == rhs.fragmentShaderBarycentric );
+    }
+
+    bool operator!=( PhysicalDeviceFragmentShaderBarycentricFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShaderBarycentricFeaturesNV;
+    void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderBarycentric;
+  };
+  static_assert( sizeof( PhysicalDeviceFragmentShaderBarycentricFeaturesNV ) == sizeof( VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceFragmentShaderBarycentricFeaturesNV>::value, "struct wrapper is not a standard layout!" );
+
+  struct PhysicalDeviceFragmentShaderInterlockFeaturesEXT
+  {
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShaderInterlockFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderSampleInterlock_ = 0,
+                                                                           VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderPixelInterlock_ = 0,
+                                                                           VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderShadingRateInterlock_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : fragmentShaderSampleInterlock( fragmentShaderSampleInterlock_ )
+      , fragmentShaderPixelInterlock( fragmentShaderPixelInterlock_ )
+      , fragmentShaderShadingRateInterlock( fragmentShaderShadingRateInterlock_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderInterlockFeaturesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderInterlockFeaturesEXT ) - offsetof( PhysicalDeviceFragmentShaderInterlockFeaturesEXT, pNext ) );
+      return *this;
+    }
+
+    PhysicalDeviceFragmentShaderInterlockFeaturesEXT( VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PhysicalDeviceFragmentShaderInterlockFeaturesEXT& operator=( VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderInterlockFeaturesEXT const *>(&rhs);
+      return *this;
+    }
+
+    PhysicalDeviceFragmentShaderInterlockFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceFragmentShaderInterlockFeaturesEXT & setFragmentShaderSampleInterlock( VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderSampleInterlock_ ) VULKAN_HPP_NOEXCEPT
+    {
+      fragmentShaderSampleInterlock = fragmentShaderSampleInterlock_;
+      return *this;
+    }
+
+    PhysicalDeviceFragmentShaderInterlockFeaturesEXT & setFragmentShaderPixelInterlock( VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderPixelInterlock_ ) VULKAN_HPP_NOEXCEPT
+    {
+      fragmentShaderPixelInterlock = fragmentShaderPixelInterlock_;
+      return *this;
+    }
+
+    PhysicalDeviceFragmentShaderInterlockFeaturesEXT & setFragmentShaderShadingRateInterlock( VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderShadingRateInterlock_ ) VULKAN_HPP_NOEXCEPT
+    {
+      fragmentShaderShadingRateInterlock = fragmentShaderShadingRateInterlock_;
+      return *this;
+    }
+
+    operator VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT*>( this );
+    }
+
+    bool operator==( PhysicalDeviceFragmentShaderInterlockFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( fragmentShaderSampleInterlock == rhs.fragmentShaderSampleInterlock )
+          && ( fragmentShaderPixelInterlock == rhs.fragmentShaderPixelInterlock )
+          && ( fragmentShaderShadingRateInterlock == rhs.fragmentShaderShadingRateInterlock );
+    }
+
+    bool operator!=( PhysicalDeviceFragmentShaderInterlockFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShaderInterlockFeaturesEXT;
+    void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderSampleInterlock;
+    VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderPixelInterlock;
+    VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderShadingRateInterlock;
+  };
+  static_assert( sizeof( PhysicalDeviceFragmentShaderInterlockFeaturesEXT ) == sizeof( VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceFragmentShaderInterlockFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+
+  struct PhysicalDeviceGroupProperties
+  {
+    PhysicalDeviceGroupProperties( uint32_t physicalDeviceCount_ = 0,
+                                   std::array<VULKAN_HPP_NAMESPACE::PhysicalDevice,VK_MAX_DEVICE_GROUP_SIZE> const& physicalDevices_ = { { VULKAN_HPP_NAMESPACE::PhysicalDevice() } },
+                                   VULKAN_HPP_NAMESPACE::Bool32 subsetAllocation_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : physicalDeviceCount( physicalDeviceCount_ )
+      , physicalDevices{}
+      , subsetAllocation( subsetAllocation_ )
+    {
+      VULKAN_HPP_NAMESPACE::ConstExpressionArrayCopy<VULKAN_HPP_NAMESPACE::PhysicalDevice,VK_MAX_DEVICE_GROUP_SIZE,VK_MAX_DEVICE_GROUP_SIZE>::copy( physicalDevices, physicalDevices_ );
+    }
+
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties ) - offsetof( PhysicalDeviceGroupProperties, pNext ) );
+      return *this;
+    }
+
+    PhysicalDeviceGroupProperties( VkPhysicalDeviceGroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PhysicalDeviceGroupProperties& operator=( VkPhysicalDeviceGroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties const *>(&rhs);
+      return *this;
+    }
+
+    operator VkPhysicalDeviceGroupProperties const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceGroupProperties*>( this );
+    }
+
+    operator VkPhysicalDeviceGroupProperties &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceGroupProperties*>( this );
+    }
+
+    bool operator==( PhysicalDeviceGroupProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( physicalDeviceCount == rhs.physicalDeviceCount )
+          && ( memcmp( physicalDevices, rhs.physicalDevices, VK_MAX_DEVICE_GROUP_SIZE * sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevice ) ) == 0 )
+          && ( subsetAllocation == rhs.subsetAllocation );
+    }
+
+    bool operator!=( PhysicalDeviceGroupProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceGroupProperties;
+    void* pNext = nullptr;
+    uint32_t physicalDeviceCount;
+    VULKAN_HPP_NAMESPACE::PhysicalDevice physicalDevices[VK_MAX_DEVICE_GROUP_SIZE];
+    VULKAN_HPP_NAMESPACE::Bool32 subsetAllocation;
+  };
+  static_assert( sizeof( PhysicalDeviceGroupProperties ) == sizeof( VkPhysicalDeviceGroupProperties ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceGroupProperties>::value, "struct wrapper is not a standard layout!" );
+
+  struct PhysicalDeviceHostQueryResetFeaturesEXT
+  {
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceHostQueryResetFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : hostQueryReset( hostQueryReset_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceHostQueryResetFeaturesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceHostQueryResetFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceHostQueryResetFeaturesEXT ) - offsetof( PhysicalDeviceHostQueryResetFeaturesEXT, pNext ) );
+      return *this;
+    }
+
+    PhysicalDeviceHostQueryResetFeaturesEXT( VkPhysicalDeviceHostQueryResetFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PhysicalDeviceHostQueryResetFeaturesEXT& operator=( VkPhysicalDeviceHostQueryResetFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceHostQueryResetFeaturesEXT const *>(&rhs);
+      return *this;
+    }
+
+    PhysicalDeviceHostQueryResetFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceHostQueryResetFeaturesEXT & setHostQueryReset( VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      hostQueryReset = hostQueryReset_;
+      return *this;
+    }
+
+    operator VkPhysicalDeviceHostQueryResetFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceHostQueryResetFeaturesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceHostQueryResetFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceHostQueryResetFeaturesEXT*>( this );
+    }
+
+    bool operator==( PhysicalDeviceHostQueryResetFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( hostQueryReset == rhs.hostQueryReset );
+    }
+
+    bool operator!=( PhysicalDeviceHostQueryResetFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceHostQueryResetFeaturesEXT;
+    void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset;
+  };
+  static_assert( sizeof( PhysicalDeviceHostQueryResetFeaturesEXT ) == sizeof( VkPhysicalDeviceHostQueryResetFeaturesEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceHostQueryResetFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+
+  struct PhysicalDeviceIDProperties
+  {
+    PhysicalDeviceIDProperties( std::array<uint8_t,VK_UUID_SIZE> const& deviceUUID_ = { { 0 } },
+                                std::array<uint8_t,VK_UUID_SIZE> const& driverUUID_ = { { 0 } },
+                                std::array<uint8_t,VK_LUID_SIZE> const& deviceLUID_ = { { 0 } },
+                                uint32_t deviceNodeMask_ = 0,
+                                VULKAN_HPP_NAMESPACE::Bool32 deviceLUIDValid_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : deviceUUID{}
+      , driverUUID{}
+      , deviceLUID{}
+      , deviceNodeMask( deviceNodeMask_ )
+      , deviceLUIDValid( deviceLUIDValid_ )
+    {
+      VULKAN_HPP_NAMESPACE::ConstExpressionArrayCopy<uint8_t,VK_UUID_SIZE,VK_UUID_SIZE>::copy( deviceUUID, deviceUUID_ );
+      VULKAN_HPP_NAMESPACE::ConstExpressionArrayCopy<uint8_t,VK_UUID_SIZE,VK_UUID_SIZE>::copy( driverUUID, driverUUID_ );
+      VULKAN_HPP_NAMESPACE::ConstExpressionArrayCopy<uint8_t,VK_LUID_SIZE,VK_LUID_SIZE>::copy( deviceLUID, deviceLUID_ );
+    }
+
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceIDProperties & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceIDProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceIDProperties ) - offsetof( PhysicalDeviceIDProperties, pNext ) );
+      return *this;
+    }
+
+    PhysicalDeviceIDProperties( VkPhysicalDeviceIDProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PhysicalDeviceIDProperties& operator=( VkPhysicalDeviceIDProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceIDProperties const *>(&rhs);
+      return *this;
+    }
+
+    operator VkPhysicalDeviceIDProperties const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceIDProperties*>( this );
+    }
+
+    operator VkPhysicalDeviceIDProperties &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceIDProperties*>( this );
+    }
+
+    bool operator==( PhysicalDeviceIDProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( memcmp( deviceUUID, rhs.deviceUUID, VK_UUID_SIZE * sizeof( uint8_t ) ) == 0 )
+          && ( memcmp( driverUUID, rhs.driverUUID, VK_UUID_SIZE * sizeof( uint8_t ) ) == 0 )
+          && ( memcmp( deviceLUID, rhs.deviceLUID, VK_LUID_SIZE * sizeof( uint8_t ) ) == 0 )
+          && ( deviceNodeMask == rhs.deviceNodeMask )
+          && ( deviceLUIDValid == rhs.deviceLUIDValid );
+    }
+
+    bool operator!=( PhysicalDeviceIDProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceIdProperties;
+    void* pNext = nullptr;
+    uint8_t deviceUUID[VK_UUID_SIZE];
+    uint8_t driverUUID[VK_UUID_SIZE];
+    uint8_t deviceLUID[VK_LUID_SIZE];
+    uint32_t deviceNodeMask;
+    VULKAN_HPP_NAMESPACE::Bool32 deviceLUIDValid;
+  };
+  static_assert( sizeof( PhysicalDeviceIDProperties ) == sizeof( VkPhysicalDeviceIDProperties ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceIDProperties>::value, "struct wrapper is not a standard layout!" );
+
+  struct PhysicalDeviceImageDrmFormatModifierInfoEXT
+  {
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceImageDrmFormatModifierInfoEXT( uint64_t drmFormatModifier_ = 0,
+                                                                      VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive,
+                                                                      uint32_t queueFamilyIndexCount_ = 0,
+                                                                      const uint32_t* pQueueFamilyIndices_ = nullptr ) VULKAN_HPP_NOEXCEPT
+      : drmFormatModifier( drmFormatModifier_ )
+      , sharingMode( sharingMode_ )
+      , queueFamilyIndexCount( queueFamilyIndexCount_ )
+      , pQueueFamilyIndices( pQueueFamilyIndices_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceImageDrmFormatModifierInfoEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageDrmFormatModifierInfoEXT ) - offsetof( PhysicalDeviceImageDrmFormatModifierInfoEXT, pNext ) );
+      return *this;
+    }
+
+    PhysicalDeviceImageDrmFormatModifierInfoEXT( VkPhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PhysicalDeviceImageDrmFormatModifierInfoEXT& operator=( VkPhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageDrmFormatModifierInfoEXT const *>(&rhs);
+      return *this;
+    }
+
+    PhysicalDeviceImageDrmFormatModifierInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceImageDrmFormatModifierInfoEXT & setDrmFormatModifier( uint64_t drmFormatModifier_ ) VULKAN_HPP_NOEXCEPT
+    {
+      drmFormatModifier = drmFormatModifier_;
+      return *this;
+    }
+
+    PhysicalDeviceImageDrmFormatModifierInfoEXT & setSharingMode( VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sharingMode = sharingMode_;
+      return *this;
+    }
+
+    PhysicalDeviceImageDrmFormatModifierInfoEXT & setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      queueFamilyIndexCount = queueFamilyIndexCount_;
+      return *this;
+    }
+
+    PhysicalDeviceImageDrmFormatModifierInfoEXT & setPQueueFamilyIndices( const uint32_t* pQueueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pQueueFamilyIndices = pQueueFamilyIndices_;
+      return *this;
+    }
+
+    operator VkPhysicalDeviceImageDrmFormatModifierInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceImageDrmFormatModifierInfoEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceImageDrmFormatModifierInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceImageDrmFormatModifierInfoEXT*>( this );
+    }
+
+    bool operator==( PhysicalDeviceImageDrmFormatModifierInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( drmFormatModifier == rhs.drmFormatModifier )
+          && ( sharingMode == rhs.sharingMode )
+          && ( queueFamilyIndexCount == rhs.queueFamilyIndexCount )
+          && ( pQueueFamilyIndices == rhs.pQueueFamilyIndices );
+    }
+
+    bool operator!=( PhysicalDeviceImageDrmFormatModifierInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageDrmFormatModifierInfoEXT;
+    const void* pNext = nullptr;
+    uint64_t drmFormatModifier;
+    VULKAN_HPP_NAMESPACE::SharingMode sharingMode;
+    uint32_t queueFamilyIndexCount;
+    const uint32_t* pQueueFamilyIndices;
+  };
+  static_assert( sizeof( PhysicalDeviceImageDrmFormatModifierInfoEXT ) == sizeof( VkPhysicalDeviceImageDrmFormatModifierInfoEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceImageDrmFormatModifierInfoEXT>::value, "struct wrapper is not a standard layout!" );
+
+  struct PhysicalDeviceImageFormatInfo2
+  {
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceImageFormatInfo2( VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
+                                                         VULKAN_HPP_NAMESPACE::ImageType type_ = VULKAN_HPP_NAMESPACE::ImageType::e1D,
+                                                         VULKAN_HPP_NAMESPACE::ImageTiling tiling_ = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal,
+                                                         VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = VULKAN_HPP_NAMESPACE::ImageUsageFlags(),
+                                                         VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ = VULKAN_HPP_NAMESPACE::ImageCreateFlags() ) VULKAN_HPP_NOEXCEPT
+      : format( format_ )
+      , type( type_ )
+      , tiling( tiling_ )
+      , usage( usage_ )
+      , flags( flags_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 ) - offsetof( PhysicalDeviceImageFormatInfo2, pNext ) );
+      return *this;
+    }
+
+    PhysicalDeviceImageFormatInfo2( VkPhysicalDeviceImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PhysicalDeviceImageFormatInfo2& operator=( VkPhysicalDeviceImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 const *>(&rhs);
+      return *this;
+    }
+
+    PhysicalDeviceImageFormatInfo2 & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceImageFormatInfo2 & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
+    {
+      format = format_;
+      return *this;
+    }
+
+    PhysicalDeviceImageFormatInfo2 & setType( VULKAN_HPP_NAMESPACE::ImageType type_ ) VULKAN_HPP_NOEXCEPT
+    {
+      type = type_;
+      return *this;
+    }
+
+    PhysicalDeviceImageFormatInfo2 & setTiling( VULKAN_HPP_NAMESPACE::ImageTiling tiling_ ) VULKAN_HPP_NOEXCEPT
+    {
+      tiling = tiling_;
+      return *this;
+    }
+
+    PhysicalDeviceImageFormatInfo2 & setUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT
+    {
+      usage = usage_;
+      return *this;
+    }
+
+    PhysicalDeviceImageFormatInfo2 & setFlags( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    operator VkPhysicalDeviceImageFormatInfo2 const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2*>( this );
+    }
+
+    operator VkPhysicalDeviceImageFormatInfo2 &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceImageFormatInfo2*>( this );
+    }
+
+    bool operator==( PhysicalDeviceImageFormatInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( format == rhs.format )
+          && ( type == rhs.type )
+          && ( tiling == rhs.tiling )
+          && ( usage == rhs.usage )
+          && ( flags == rhs.flags );
+    }
+
+    bool operator!=( PhysicalDeviceImageFormatInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageFormatInfo2;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::Format format;
+    VULKAN_HPP_NAMESPACE::ImageType type;
+    VULKAN_HPP_NAMESPACE::ImageTiling tiling;
+    VULKAN_HPP_NAMESPACE::ImageUsageFlags usage;
+    VULKAN_HPP_NAMESPACE::ImageCreateFlags flags;
+  };
+  static_assert( sizeof( PhysicalDeviceImageFormatInfo2 ) == sizeof( VkPhysicalDeviceImageFormatInfo2 ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceImageFormatInfo2>::value, "struct wrapper is not a standard layout!" );
+
+  struct PhysicalDeviceImageViewImageFormatInfoEXT
+  {
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceImageViewImageFormatInfoEXT( VULKAN_HPP_NAMESPACE::ImageViewType imageViewType_ = VULKAN_HPP_NAMESPACE::ImageViewType::e1D ) VULKAN_HPP_NOEXCEPT
+      : imageViewType( imageViewType_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceImageViewImageFormatInfoEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageViewImageFormatInfoEXT ) - offsetof( PhysicalDeviceImageViewImageFormatInfoEXT, pNext ) );
+      return *this;
+    }
+
+    PhysicalDeviceImageViewImageFormatInfoEXT( VkPhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PhysicalDeviceImageViewImageFormatInfoEXT& operator=( VkPhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageViewImageFormatInfoEXT const *>(&rhs);
+      return *this;
+    }
+
+    PhysicalDeviceImageViewImageFormatInfoEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceImageViewImageFormatInfoEXT & setImageViewType( VULKAN_HPP_NAMESPACE::ImageViewType imageViewType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      imageViewType = imageViewType_;
+      return *this;
+    }
+
+    operator VkPhysicalDeviceImageViewImageFormatInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceImageViewImageFormatInfoEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceImageViewImageFormatInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceImageViewImageFormatInfoEXT*>( this );
+    }
+
+    bool operator==( PhysicalDeviceImageViewImageFormatInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( imageViewType == rhs.imageViewType );
+    }
+
+    bool operator!=( PhysicalDeviceImageViewImageFormatInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageViewImageFormatInfoEXT;
+    void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::ImageViewType imageViewType;
+  };
+  static_assert( sizeof( PhysicalDeviceImageViewImageFormatInfoEXT ) == sizeof( VkPhysicalDeviceImageViewImageFormatInfoEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceImageViewImageFormatInfoEXT>::value, "struct wrapper is not a standard layout!" );
+
+  struct PhysicalDeviceImagelessFramebufferFeaturesKHR
+  {
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceImagelessFramebufferFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : imagelessFramebuffer( imagelessFramebuffer_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceImagelessFramebufferFeaturesKHR & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceImagelessFramebufferFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceImagelessFramebufferFeaturesKHR ) - offsetof( PhysicalDeviceImagelessFramebufferFeaturesKHR, pNext ) );
+      return *this;
+    }
+
+    PhysicalDeviceImagelessFramebufferFeaturesKHR( VkPhysicalDeviceImagelessFramebufferFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PhysicalDeviceImagelessFramebufferFeaturesKHR& operator=( VkPhysicalDeviceImagelessFramebufferFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceImagelessFramebufferFeaturesKHR const *>(&rhs);
+      return *this;
+    }
+
+    PhysicalDeviceImagelessFramebufferFeaturesKHR & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceImagelessFramebufferFeaturesKHR & setImagelessFramebuffer( VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      imagelessFramebuffer = imagelessFramebuffer_;
+      return *this;
+    }
+
+    operator VkPhysicalDeviceImagelessFramebufferFeaturesKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceImagelessFramebufferFeaturesKHR*>( this );
+    }
+
+    operator VkPhysicalDeviceImagelessFramebufferFeaturesKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceImagelessFramebufferFeaturesKHR*>( this );
+    }
+
+    bool operator==( PhysicalDeviceImagelessFramebufferFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( imagelessFramebuffer == rhs.imagelessFramebuffer );
+    }
+
+    bool operator!=( PhysicalDeviceImagelessFramebufferFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImagelessFramebufferFeaturesKHR;
+    void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer;
+  };
+  static_assert( sizeof( PhysicalDeviceImagelessFramebufferFeaturesKHR ) == sizeof( VkPhysicalDeviceImagelessFramebufferFeaturesKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceImagelessFramebufferFeaturesKHR>::value, "struct wrapper is not a standard layout!" );
+
+  struct PhysicalDeviceIndexTypeUint8FeaturesEXT
+  {
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceIndexTypeUint8FeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 indexTypeUint8_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : indexTypeUint8( indexTypeUint8_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceIndexTypeUint8FeaturesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceIndexTypeUint8FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceIndexTypeUint8FeaturesEXT ) - offsetof( PhysicalDeviceIndexTypeUint8FeaturesEXT, pNext ) );
+      return *this;
+    }
+
+    PhysicalDeviceIndexTypeUint8FeaturesEXT( VkPhysicalDeviceIndexTypeUint8FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PhysicalDeviceIndexTypeUint8FeaturesEXT& operator=( VkPhysicalDeviceIndexTypeUint8FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceIndexTypeUint8FeaturesEXT const *>(&rhs);
+      return *this;
+    }
+
+    PhysicalDeviceIndexTypeUint8FeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceIndexTypeUint8FeaturesEXT & setIndexTypeUint8( VULKAN_HPP_NAMESPACE::Bool32 indexTypeUint8_ ) VULKAN_HPP_NOEXCEPT
+    {
+      indexTypeUint8 = indexTypeUint8_;
+      return *this;
+    }
+
+    operator VkPhysicalDeviceIndexTypeUint8FeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceIndexTypeUint8FeaturesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceIndexTypeUint8FeaturesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceIndexTypeUint8FeaturesEXT*>( this );
+    }
+
+    bool operator==( PhysicalDeviceIndexTypeUint8FeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( indexTypeUint8 == rhs.indexTypeUint8 );
+    }
+
+    bool operator!=( PhysicalDeviceIndexTypeUint8FeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceIndexTypeUint8FeaturesEXT;
+    void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::Bool32 indexTypeUint8;
+  };
+  static_assert( sizeof( PhysicalDeviceIndexTypeUint8FeaturesEXT ) == sizeof( VkPhysicalDeviceIndexTypeUint8FeaturesEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceIndexTypeUint8FeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+
+  struct PhysicalDeviceInlineUniformBlockFeaturesEXT
+  {
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceInlineUniformBlockFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 inlineUniformBlock_ = 0,
+                                                                      VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : inlineUniformBlock( inlineUniformBlock_ )
+      , descriptorBindingInlineUniformBlockUpdateAfterBind( descriptorBindingInlineUniformBlockUpdateAfterBind_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockFeaturesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockFeaturesEXT ) - offsetof( PhysicalDeviceInlineUniformBlockFeaturesEXT, pNext ) );
+      return *this;
+    }
+
+    PhysicalDeviceInlineUniformBlockFeaturesEXT( VkPhysicalDeviceInlineUniformBlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PhysicalDeviceInlineUniformBlockFeaturesEXT& operator=( VkPhysicalDeviceInlineUniformBlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockFeaturesEXT const *>(&rhs);
+      return *this;
+    }
+
+    PhysicalDeviceInlineUniformBlockFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceInlineUniformBlockFeaturesEXT & setInlineUniformBlock( VULKAN_HPP_NAMESPACE::Bool32 inlineUniformBlock_ ) VULKAN_HPP_NOEXCEPT
+    {
+      inlineUniformBlock = inlineUniformBlock_;
+      return *this;
+    }
+
+    PhysicalDeviceInlineUniformBlockFeaturesEXT & setDescriptorBindingInlineUniformBlockUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorBindingInlineUniformBlockUpdateAfterBind = descriptorBindingInlineUniformBlockUpdateAfterBind_;
+      return *this;
+    }
+
+    operator VkPhysicalDeviceInlineUniformBlockFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceInlineUniformBlockFeaturesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceInlineUniformBlockFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceInlineUniformBlockFeaturesEXT*>( this );
+    }
+
+    bool operator==( PhysicalDeviceInlineUniformBlockFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( inlineUniformBlock == rhs.inlineUniformBlock )
+          && ( descriptorBindingInlineUniformBlockUpdateAfterBind == rhs.descriptorBindingInlineUniformBlockUpdateAfterBind );
+    }
+
+    bool operator!=( PhysicalDeviceInlineUniformBlockFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceInlineUniformBlockFeaturesEXT;
+    void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::Bool32 inlineUniformBlock;
+    VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind;
+  };
+  static_assert( sizeof( PhysicalDeviceInlineUniformBlockFeaturesEXT ) == sizeof( VkPhysicalDeviceInlineUniformBlockFeaturesEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceInlineUniformBlockFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+
+  struct PhysicalDeviceInlineUniformBlockPropertiesEXT
+  {
+    PhysicalDeviceInlineUniformBlockPropertiesEXT( uint32_t maxInlineUniformBlockSize_ = 0,
+                                                   uint32_t maxPerStageDescriptorInlineUniformBlocks_ = 0,
+                                                   uint32_t maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks_ = 0,
+                                                   uint32_t maxDescriptorSetInlineUniformBlocks_ = 0,
+                                                   uint32_t maxDescriptorSetUpdateAfterBindInlineUniformBlocks_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : maxInlineUniformBlockSize( maxInlineUniformBlockSize_ )
+      , maxPerStageDescriptorInlineUniformBlocks( maxPerStageDescriptorInlineUniformBlocks_ )
+      , maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks( maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks_ )
+      , maxDescriptorSetInlineUniformBlocks( maxDescriptorSetInlineUniformBlocks_ )
+      , maxDescriptorSetUpdateAfterBindInlineUniformBlocks( maxDescriptorSetUpdateAfterBindInlineUniformBlocks_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockPropertiesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockPropertiesEXT ) - offsetof( PhysicalDeviceInlineUniformBlockPropertiesEXT, pNext ) );
+      return *this;
+    }
+
+    PhysicalDeviceInlineUniformBlockPropertiesEXT( VkPhysicalDeviceInlineUniformBlockPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PhysicalDeviceInlineUniformBlockPropertiesEXT& operator=( VkPhysicalDeviceInlineUniformBlockPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockPropertiesEXT const *>(&rhs);
+      return *this;
+    }
+
+    operator VkPhysicalDeviceInlineUniformBlockPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceInlineUniformBlockPropertiesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceInlineUniformBlockPropertiesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceInlineUniformBlockPropertiesEXT*>( this );
+    }
+
+    bool operator==( PhysicalDeviceInlineUniformBlockPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( maxInlineUniformBlockSize == rhs.maxInlineUniformBlockSize )
+          && ( maxPerStageDescriptorInlineUniformBlocks == rhs.maxPerStageDescriptorInlineUniformBlocks )
+          && ( maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks == rhs.maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks )
+          && ( maxDescriptorSetInlineUniformBlocks == rhs.maxDescriptorSetInlineUniformBlocks )
+          && ( maxDescriptorSetUpdateAfterBindInlineUniformBlocks == rhs.maxDescriptorSetUpdateAfterBindInlineUniformBlocks );
+    }
+
+    bool operator!=( PhysicalDeviceInlineUniformBlockPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceInlineUniformBlockPropertiesEXT;
+    void* pNext = nullptr;
+    uint32_t maxInlineUniformBlockSize;
+    uint32_t maxPerStageDescriptorInlineUniformBlocks;
+    uint32_t maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks;
+    uint32_t maxDescriptorSetInlineUniformBlocks;
+    uint32_t maxDescriptorSetUpdateAfterBindInlineUniformBlocks;
+  };
+  static_assert( sizeof( PhysicalDeviceInlineUniformBlockPropertiesEXT ) == sizeof( VkPhysicalDeviceInlineUniformBlockPropertiesEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceInlineUniformBlockPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
+
+  struct PhysicalDeviceLimits
+  {
+    PhysicalDeviceLimits( uint32_t maxImageDimension1D_ = 0,
+                          uint32_t maxImageDimension2D_ = 0,
+                          uint32_t maxImageDimension3D_ = 0,
+                          uint32_t maxImageDimensionCube_ = 0,
+                          uint32_t maxImageArrayLayers_ = 0,
+                          uint32_t maxTexelBufferElements_ = 0,
+                          uint32_t maxUniformBufferRange_ = 0,
+                          uint32_t maxStorageBufferRange_ = 0,
+                          uint32_t maxPushConstantsSize_ = 0,
+                          uint32_t maxMemoryAllocationCount_ = 0,
+                          uint32_t maxSamplerAllocationCount_ = 0,
+                          VULKAN_HPP_NAMESPACE::DeviceSize bufferImageGranularity_ = 0,
+                          VULKAN_HPP_NAMESPACE::DeviceSize sparseAddressSpaceSize_ = 0,
+                          uint32_t maxBoundDescriptorSets_ = 0,
+                          uint32_t maxPerStageDescriptorSamplers_ = 0,
+                          uint32_t maxPerStageDescriptorUniformBuffers_ = 0,
+                          uint32_t maxPerStageDescriptorStorageBuffers_ = 0,
+                          uint32_t maxPerStageDescriptorSampledImages_ = 0,
+                          uint32_t maxPerStageDescriptorStorageImages_ = 0,
+                          uint32_t maxPerStageDescriptorInputAttachments_ = 0,
+                          uint32_t maxPerStageResources_ = 0,
+                          uint32_t maxDescriptorSetSamplers_ = 0,
+                          uint32_t maxDescriptorSetUniformBuffers_ = 0,
+                          uint32_t maxDescriptorSetUniformBuffersDynamic_ = 0,
+                          uint32_t maxDescriptorSetStorageBuffers_ = 0,
+                          uint32_t maxDescriptorSetStorageBuffersDynamic_ = 0,
+                          uint32_t maxDescriptorSetSampledImages_ = 0,
+                          uint32_t maxDescriptorSetStorageImages_ = 0,
+                          uint32_t maxDescriptorSetInputAttachments_ = 0,
+                          uint32_t maxVertexInputAttributes_ = 0,
+                          uint32_t maxVertexInputBindings_ = 0,
+                          uint32_t maxVertexInputAttributeOffset_ = 0,
+                          uint32_t maxVertexInputBindingStride_ = 0,
+                          uint32_t maxVertexOutputComponents_ = 0,
+                          uint32_t maxTessellationGenerationLevel_ = 0,
+                          uint32_t maxTessellationPatchSize_ = 0,
+                          uint32_t maxTessellationControlPerVertexInputComponents_ = 0,
+                          uint32_t maxTessellationControlPerVertexOutputComponents_ = 0,
+                          uint32_t maxTessellationControlPerPatchOutputComponents_ = 0,
+                          uint32_t maxTessellationControlTotalOutputComponents_ = 0,
+                          uint32_t maxTessellationEvaluationInputComponents_ = 0,
+                          uint32_t maxTessellationEvaluationOutputComponents_ = 0,
+                          uint32_t maxGeometryShaderInvocations_ = 0,
+                          uint32_t maxGeometryInputComponents_ = 0,
+                          uint32_t maxGeometryOutputComponents_ = 0,
+                          uint32_t maxGeometryOutputVertices_ = 0,
+                          uint32_t maxGeometryTotalOutputComponents_ = 0,
+                          uint32_t maxFragmentInputComponents_ = 0,
+                          uint32_t maxFragmentOutputAttachments_ = 0,
+                          uint32_t maxFragmentDualSrcAttachments_ = 0,
+                          uint32_t maxFragmentCombinedOutputResources_ = 0,
+                          uint32_t maxComputeSharedMemorySize_ = 0,
+                          std::array<uint32_t,3> const& maxComputeWorkGroupCount_ = { { 0 } },
+                          uint32_t maxComputeWorkGroupInvocations_ = 0,
+                          std::array<uint32_t,3> const& maxComputeWorkGroupSize_ = { { 0 } },
+                          uint32_t subPixelPrecisionBits_ = 0,
+                          uint32_t subTexelPrecisionBits_ = 0,
+                          uint32_t mipmapPrecisionBits_ = 0,
+                          uint32_t maxDrawIndexedIndexValue_ = 0,
+                          uint32_t maxDrawIndirectCount_ = 0,
+                          float maxSamplerLodBias_ = 0,
+                          float maxSamplerAnisotropy_ = 0,
+                          uint32_t maxViewports_ = 0,
+                          std::array<uint32_t,2> const& maxViewportDimensions_ = { { 0 } },
+                          std::array<float,2> const& viewportBoundsRange_ = { { 0 } },
+                          uint32_t viewportSubPixelBits_ = 0,
+                          size_t minMemoryMapAlignment_ = 0,
+                          VULKAN_HPP_NAMESPACE::DeviceSize minTexelBufferOffsetAlignment_ = 0,
+                          VULKAN_HPP_NAMESPACE::DeviceSize minUniformBufferOffsetAlignment_ = 0,
+                          VULKAN_HPP_NAMESPACE::DeviceSize minStorageBufferOffsetAlignment_ = 0,
+                          int32_t minTexelOffset_ = 0,
+                          uint32_t maxTexelOffset_ = 0,
+                          int32_t minTexelGatherOffset_ = 0,
+                          uint32_t maxTexelGatherOffset_ = 0,
+                          float minInterpolationOffset_ = 0,
+                          float maxInterpolationOffset_ = 0,
+                          uint32_t subPixelInterpolationOffsetBits_ = 0,
+                          uint32_t maxFramebufferWidth_ = 0,
+                          uint32_t maxFramebufferHeight_ = 0,
+                          uint32_t maxFramebufferLayers_ = 0,
+                          VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferColorSampleCounts_ = VULKAN_HPP_NAMESPACE::SampleCountFlags(),
+                          VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferDepthSampleCounts_ = VULKAN_HPP_NAMESPACE::SampleCountFlags(),
+                          VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferStencilSampleCounts_ = VULKAN_HPP_NAMESPACE::SampleCountFlags(),
+                          VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferNoAttachmentsSampleCounts_ = VULKAN_HPP_NAMESPACE::SampleCountFlags(),
+                          uint32_t maxColorAttachments_ = 0,
+                          VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageColorSampleCounts_ = VULKAN_HPP_NAMESPACE::SampleCountFlags(),
+                          VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageIntegerSampleCounts_ = VULKAN_HPP_NAMESPACE::SampleCountFlags(),
+                          VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageDepthSampleCounts_ = VULKAN_HPP_NAMESPACE::SampleCountFlags(),
+                          VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageStencilSampleCounts_ = VULKAN_HPP_NAMESPACE::SampleCountFlags(),
+                          VULKAN_HPP_NAMESPACE::SampleCountFlags storageImageSampleCounts_ = VULKAN_HPP_NAMESPACE::SampleCountFlags(),
+                          uint32_t maxSampleMaskWords_ = 0,
+                          VULKAN_HPP_NAMESPACE::Bool32 timestampComputeAndGraphics_ = 0,
+                          float timestampPeriod_ = 0,
+                          uint32_t maxClipDistances_ = 0,
+                          uint32_t maxCullDistances_ = 0,
+                          uint32_t maxCombinedClipAndCullDistances_ = 0,
+                          uint32_t discreteQueuePriorities_ = 0,
+                          std::array<float,2> const& pointSizeRange_ = { { 0 } },
+                          std::array<float,2> const& lineWidthRange_ = { { 0 } },
+                          float pointSizeGranularity_ = 0,
+                          float lineWidthGranularity_ = 0,
+                          VULKAN_HPP_NAMESPACE::Bool32 strictLines_ = 0,
+                          VULKAN_HPP_NAMESPACE::Bool32 standardSampleLocations_ = 0,
+                          VULKAN_HPP_NAMESPACE::DeviceSize optimalBufferCopyOffsetAlignment_ = 0,
+                          VULKAN_HPP_NAMESPACE::DeviceSize optimalBufferCopyRowPitchAlignment_ = 0,
+                          VULKAN_HPP_NAMESPACE::DeviceSize nonCoherentAtomSize_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : maxImageDimension1D( maxImageDimension1D_ )
+      , maxImageDimension2D( maxImageDimension2D_ )
+      , maxImageDimension3D( maxImageDimension3D_ )
+      , maxImageDimensionCube( maxImageDimensionCube_ )
+      , maxImageArrayLayers( maxImageArrayLayers_ )
+      , maxTexelBufferElements( maxTexelBufferElements_ )
+      , maxUniformBufferRange( maxUniformBufferRange_ )
+      , maxStorageBufferRange( maxStorageBufferRange_ )
+      , maxPushConstantsSize( maxPushConstantsSize_ )
+      , maxMemoryAllocationCount( maxMemoryAllocationCount_ )
+      , maxSamplerAllocationCount( maxSamplerAllocationCount_ )
+      , bufferImageGranularity( bufferImageGranularity_ )
+      , sparseAddressSpaceSize( sparseAddressSpaceSize_ )
+      , maxBoundDescriptorSets( maxBoundDescriptorSets_ )
+      , maxPerStageDescriptorSamplers( maxPerStageDescriptorSamplers_ )
+      , maxPerStageDescriptorUniformBuffers( maxPerStageDescriptorUniformBuffers_ )
+      , maxPerStageDescriptorStorageBuffers( maxPerStageDescriptorStorageBuffers_ )
+      , maxPerStageDescriptorSampledImages( maxPerStageDescriptorSampledImages_ )
+      , maxPerStageDescriptorStorageImages( maxPerStageDescriptorStorageImages_ )
+      , maxPerStageDescriptorInputAttachments( maxPerStageDescriptorInputAttachments_ )
+      , maxPerStageResources( maxPerStageResources_ )
+      , maxDescriptorSetSamplers( maxDescriptorSetSamplers_ )
+      , maxDescriptorSetUniformBuffers( maxDescriptorSetUniformBuffers_ )
+      , maxDescriptorSetUniformBuffersDynamic( maxDescriptorSetUniformBuffersDynamic_ )
+      , maxDescriptorSetStorageBuffers( maxDescriptorSetStorageBuffers_ )
+      , maxDescriptorSetStorageBuffersDynamic( maxDescriptorSetStorageBuffersDynamic_ )
+      , maxDescriptorSetSampledImages( maxDescriptorSetSampledImages_ )
+      , maxDescriptorSetStorageImages( maxDescriptorSetStorageImages_ )
+      , maxDescriptorSetInputAttachments( maxDescriptorSetInputAttachments_ )
+      , maxVertexInputAttributes( maxVertexInputAttributes_ )
+      , maxVertexInputBindings( maxVertexInputBindings_ )
+      , maxVertexInputAttributeOffset( maxVertexInputAttributeOffset_ )
+      , maxVertexInputBindingStride( maxVertexInputBindingStride_ )
+      , maxVertexOutputComponents( maxVertexOutputComponents_ )
+      , maxTessellationGenerationLevel( maxTessellationGenerationLevel_ )
+      , maxTessellationPatchSize( maxTessellationPatchSize_ )
+      , maxTessellationControlPerVertexInputComponents( maxTessellationControlPerVertexInputComponents_ )
+      , maxTessellationControlPerVertexOutputComponents( maxTessellationControlPerVertexOutputComponents_ )
+      , maxTessellationControlPerPatchOutputComponents( maxTessellationControlPerPatchOutputComponents_ )
+      , maxTessellationControlTotalOutputComponents( maxTessellationControlTotalOutputComponents_ )
+      , maxTessellationEvaluationInputComponents( maxTessellationEvaluationInputComponents_ )
+      , maxTessellationEvaluationOutputComponents( maxTessellationEvaluationOutputComponents_ )
+      , maxGeometryShaderInvocations( maxGeometryShaderInvocations_ )
+      , maxGeometryInputComponents( maxGeometryInputComponents_ )
+      , maxGeometryOutputComponents( maxGeometryOutputComponents_ )
+      , maxGeometryOutputVertices( maxGeometryOutputVertices_ )
+      , maxGeometryTotalOutputComponents( maxGeometryTotalOutputComponents_ )
+      , maxFragmentInputComponents( maxFragmentInputComponents_ )
+      , maxFragmentOutputAttachments( maxFragmentOutputAttachments_ )
+      , maxFragmentDualSrcAttachments( maxFragmentDualSrcAttachments_ )
+      , maxFragmentCombinedOutputResources( maxFragmentCombinedOutputResources_ )
+      , maxComputeSharedMemorySize( maxComputeSharedMemorySize_ )
+      , maxComputeWorkGroupCount{}
+      , maxComputeWorkGroupInvocations( maxComputeWorkGroupInvocations_ )
+      , maxComputeWorkGroupSize{}
+      , subPixelPrecisionBits( subPixelPrecisionBits_ )
+      , subTexelPrecisionBits( subTexelPrecisionBits_ )
+      , mipmapPrecisionBits( mipmapPrecisionBits_ )
+      , maxDrawIndexedIndexValue( maxDrawIndexedIndexValue_ )
+      , maxDrawIndirectCount( maxDrawIndirectCount_ )
+      , maxSamplerLodBias( maxSamplerLodBias_ )
+      , maxSamplerAnisotropy( maxSamplerAnisotropy_ )
+      , maxViewports( maxViewports_ )
+      , maxViewportDimensions{}
+      , viewportBoundsRange{}
+      , viewportSubPixelBits( viewportSubPixelBits_ )
+      , minMemoryMapAlignment( minMemoryMapAlignment_ )
+      , minTexelBufferOffsetAlignment( minTexelBufferOffsetAlignment_ )
+      , minUniformBufferOffsetAlignment( minUniformBufferOffsetAlignment_ )
+      , minStorageBufferOffsetAlignment( minStorageBufferOffsetAlignment_ )
+      , minTexelOffset( minTexelOffset_ )
+      , maxTexelOffset( maxTexelOffset_ )
+      , minTexelGatherOffset( minTexelGatherOffset_ )
+      , maxTexelGatherOffset( maxTexelGatherOffset_ )
+      , minInterpolationOffset( minInterpolationOffset_ )
+      , maxInterpolationOffset( maxInterpolationOffset_ )
+      , subPixelInterpolationOffsetBits( subPixelInterpolationOffsetBits_ )
+      , maxFramebufferWidth( maxFramebufferWidth_ )
+      , maxFramebufferHeight( maxFramebufferHeight_ )
+      , maxFramebufferLayers( maxFramebufferLayers_ )
+      , framebufferColorSampleCounts( framebufferColorSampleCounts_ )
+      , framebufferDepthSampleCounts( framebufferDepthSampleCounts_ )
+      , framebufferStencilSampleCounts( framebufferStencilSampleCounts_ )
+      , framebufferNoAttachmentsSampleCounts( framebufferNoAttachmentsSampleCounts_ )
+      , maxColorAttachments( maxColorAttachments_ )
+      , sampledImageColorSampleCounts( sampledImageColorSampleCounts_ )
+      , sampledImageIntegerSampleCounts( sampledImageIntegerSampleCounts_ )
+      , sampledImageDepthSampleCounts( sampledImageDepthSampleCounts_ )
+      , sampledImageStencilSampleCounts( sampledImageStencilSampleCounts_ )
+      , storageImageSampleCounts( storageImageSampleCounts_ )
+      , maxSampleMaskWords( maxSampleMaskWords_ )
+      , timestampComputeAndGraphics( timestampComputeAndGraphics_ )
+      , timestampPeriod( timestampPeriod_ )
+      , maxClipDistances( maxClipDistances_ )
+      , maxCullDistances( maxCullDistances_ )
+      , maxCombinedClipAndCullDistances( maxCombinedClipAndCullDistances_ )
+      , discreteQueuePriorities( discreteQueuePriorities_ )
+      , pointSizeRange{}
+      , lineWidthRange{}
+      , pointSizeGranularity( pointSizeGranularity_ )
+      , lineWidthGranularity( lineWidthGranularity_ )
+      , strictLines( strictLines_ )
+      , standardSampleLocations( standardSampleLocations_ )
+      , optimalBufferCopyOffsetAlignment( optimalBufferCopyOffsetAlignment_ )
+      , optimalBufferCopyRowPitchAlignment( optimalBufferCopyRowPitchAlignment_ )
+      , nonCoherentAtomSize( nonCoherentAtomSize_ )
+    {
+      VULKAN_HPP_NAMESPACE::ConstExpressionArrayCopy<uint32_t,3,3>::copy( maxComputeWorkGroupCount, maxComputeWorkGroupCount_ );
+      VULKAN_HPP_NAMESPACE::ConstExpressionArrayCopy<uint32_t,3,3>::copy( maxComputeWorkGroupSize, maxComputeWorkGroupSize_ );
+      VULKAN_HPP_NAMESPACE::ConstExpressionArrayCopy<uint32_t,2,2>::copy( maxViewportDimensions, maxViewportDimensions_ );
+      VULKAN_HPP_NAMESPACE::ConstExpressionArrayCopy<float,2,2>::copy( viewportBoundsRange, viewportBoundsRange_ );
+      VULKAN_HPP_NAMESPACE::ConstExpressionArrayCopy<float,2,2>::copy( pointSizeRange, pointSizeRange_ );
+      VULKAN_HPP_NAMESPACE::ConstExpressionArrayCopy<float,2,2>::copy( lineWidthRange, lineWidthRange_ );
+    }
+
+    PhysicalDeviceLimits( VkPhysicalDeviceLimits const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PhysicalDeviceLimits& operator=( VkPhysicalDeviceLimits const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits const *>(&rhs);
+      return *this;
+    }
+
+    operator VkPhysicalDeviceLimits const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceLimits*>( this );
+    }
+
+    operator VkPhysicalDeviceLimits &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceLimits*>( this );
+    }
+
+    bool operator==( PhysicalDeviceLimits const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( maxImageDimension1D == rhs.maxImageDimension1D )
+          && ( maxImageDimension2D == rhs.maxImageDimension2D )
+          && ( maxImageDimension3D == rhs.maxImageDimension3D )
+          && ( maxImageDimensionCube == rhs.maxImageDimensionCube )
+          && ( maxImageArrayLayers == rhs.maxImageArrayLayers )
+          && ( maxTexelBufferElements == rhs.maxTexelBufferElements )
+          && ( maxUniformBufferRange == rhs.maxUniformBufferRange )
+          && ( maxStorageBufferRange == rhs.maxStorageBufferRange )
+          && ( maxPushConstantsSize == rhs.maxPushConstantsSize )
+          && ( maxMemoryAllocationCount == rhs.maxMemoryAllocationCount )
+          && ( maxSamplerAllocationCount == rhs.maxSamplerAllocationCount )
+          && ( bufferImageGranularity == rhs.bufferImageGranularity )
+          && ( sparseAddressSpaceSize == rhs.sparseAddressSpaceSize )
+          && ( maxBoundDescriptorSets == rhs.maxBoundDescriptorSets )
+          && ( maxPerStageDescriptorSamplers == rhs.maxPerStageDescriptorSamplers )
+          && ( maxPerStageDescriptorUniformBuffers == rhs.maxPerStageDescriptorUniformBuffers )
+          && ( maxPerStageDescriptorStorageBuffers == rhs.maxPerStageDescriptorStorageBuffers )
+          && ( maxPerStageDescriptorSampledImages == rhs.maxPerStageDescriptorSampledImages )
+          && ( maxPerStageDescriptorStorageImages == rhs.maxPerStageDescriptorStorageImages )
+          && ( maxPerStageDescriptorInputAttachments == rhs.maxPerStageDescriptorInputAttachments )
+          && ( maxPerStageResources == rhs.maxPerStageResources )
+          && ( maxDescriptorSetSamplers == rhs.maxDescriptorSetSamplers )
+          && ( maxDescriptorSetUniformBuffers == rhs.maxDescriptorSetUniformBuffers )
+          && ( maxDescriptorSetUniformBuffersDynamic == rhs.maxDescriptorSetUniformBuffersDynamic )
+          && ( maxDescriptorSetStorageBuffers == rhs.maxDescriptorSetStorageBuffers )
+          && ( maxDescriptorSetStorageBuffersDynamic == rhs.maxDescriptorSetStorageBuffersDynamic )
+          && ( maxDescriptorSetSampledImages == rhs.maxDescriptorSetSampledImages )
+          && ( maxDescriptorSetStorageImages == rhs.maxDescriptorSetStorageImages )
+          && ( maxDescriptorSetInputAttachments == rhs.maxDescriptorSetInputAttachments )
+          && ( maxVertexInputAttributes == rhs.maxVertexInputAttributes )
+          && ( maxVertexInputBindings == rhs.maxVertexInputBindings )
+          && ( maxVertexInputAttributeOffset == rhs.maxVertexInputAttributeOffset )
+          && ( maxVertexInputBindingStride == rhs.maxVertexInputBindingStride )
+          && ( maxVertexOutputComponents == rhs.maxVertexOutputComponents )
+          && ( maxTessellationGenerationLevel == rhs.maxTessellationGenerationLevel )
+          && ( maxTessellationPatchSize == rhs.maxTessellationPatchSize )
+          && ( maxTessellationControlPerVertexInputComponents == rhs.maxTessellationControlPerVertexInputComponents )
+          && ( maxTessellationControlPerVertexOutputComponents == rhs.maxTessellationControlPerVertexOutputComponents )
+          && ( maxTessellationControlPerPatchOutputComponents == rhs.maxTessellationControlPerPatchOutputComponents )
+          && ( maxTessellationControlTotalOutputComponents == rhs.maxTessellationControlTotalOutputComponents )
+          && ( maxTessellationEvaluationInputComponents == rhs.maxTessellationEvaluationInputComponents )
+          && ( maxTessellationEvaluationOutputComponents == rhs.maxTessellationEvaluationOutputComponents )
+          && ( maxGeometryShaderInvocations == rhs.maxGeometryShaderInvocations )
+          && ( maxGeometryInputComponents == rhs.maxGeometryInputComponents )
+          && ( maxGeometryOutputComponents == rhs.maxGeometryOutputComponents )
+          && ( maxGeometryOutputVertices == rhs.maxGeometryOutputVertices )
+          && ( maxGeometryTotalOutputComponents == rhs.maxGeometryTotalOutputComponents )
+          && ( maxFragmentInputComponents == rhs.maxFragmentInputComponents )
+          && ( maxFragmentOutputAttachments == rhs.maxFragmentOutputAttachments )
+          && ( maxFragmentDualSrcAttachments == rhs.maxFragmentDualSrcAttachments )
+          && ( maxFragmentCombinedOutputResources == rhs.maxFragmentCombinedOutputResources )
+          && ( maxComputeSharedMemorySize == rhs.maxComputeSharedMemorySize )
+          && ( memcmp( maxComputeWorkGroupCount, rhs.maxComputeWorkGroupCount, 3 * sizeof( uint32_t ) ) == 0 )
+          && ( maxComputeWorkGroupInvocations == rhs.maxComputeWorkGroupInvocations )
+          && ( memcmp( maxComputeWorkGroupSize, rhs.maxComputeWorkGroupSize, 3 * sizeof( uint32_t ) ) == 0 )
+          && ( subPixelPrecisionBits == rhs.subPixelPrecisionBits )
+          && ( subTexelPrecisionBits == rhs.subTexelPrecisionBits )
+          && ( mipmapPrecisionBits == rhs.mipmapPrecisionBits )
+          && ( maxDrawIndexedIndexValue == rhs.maxDrawIndexedIndexValue )
+          && ( maxDrawIndirectCount == rhs.maxDrawIndirectCount )
+          && ( maxSamplerLodBias == rhs.maxSamplerLodBias )
+          && ( maxSamplerAnisotropy == rhs.maxSamplerAnisotropy )
+          && ( maxViewports == rhs.maxViewports )
+          && ( memcmp( maxViewportDimensions, rhs.maxViewportDimensions, 2 * sizeof( uint32_t ) ) == 0 )
+          && ( memcmp( viewportBoundsRange, rhs.viewportBoundsRange, 2 * sizeof( float ) ) == 0 )
+          && ( viewportSubPixelBits == rhs.viewportSubPixelBits )
+          && ( minMemoryMapAlignment == rhs.minMemoryMapAlignment )
+          && ( minTexelBufferOffsetAlignment == rhs.minTexelBufferOffsetAlignment )
+          && ( minUniformBufferOffsetAlignment == rhs.minUniformBufferOffsetAlignment )
+          && ( minStorageBufferOffsetAlignment == rhs.minStorageBufferOffsetAlignment )
+          && ( minTexelOffset == rhs.minTexelOffset )
+          && ( maxTexelOffset == rhs.maxTexelOffset )
+          && ( minTexelGatherOffset == rhs.minTexelGatherOffset )
+          && ( maxTexelGatherOffset == rhs.maxTexelGatherOffset )
+          && ( minInterpolationOffset == rhs.minInterpolationOffset )
+          && ( maxInterpolationOffset == rhs.maxInterpolationOffset )
+          && ( subPixelInterpolationOffsetBits == rhs.subPixelInterpolationOffsetBits )
+          && ( maxFramebufferWidth == rhs.maxFramebufferWidth )
+          && ( maxFramebufferHeight == rhs.maxFramebufferHeight )
+          && ( maxFramebufferLayers == rhs.maxFramebufferLayers )
+          && ( framebufferColorSampleCounts == rhs.framebufferColorSampleCounts )
+          && ( framebufferDepthSampleCounts == rhs.framebufferDepthSampleCounts )
+          && ( framebufferStencilSampleCounts == rhs.framebufferStencilSampleCounts )
+          && ( framebufferNoAttachmentsSampleCounts == rhs.framebufferNoAttachmentsSampleCounts )
+          && ( maxColorAttachments == rhs.maxColorAttachments )
+          && ( sampledImageColorSampleCounts == rhs.sampledImageColorSampleCounts )
+          && ( sampledImageIntegerSampleCounts == rhs.sampledImageIntegerSampleCounts )
+          && ( sampledImageDepthSampleCounts == rhs.sampledImageDepthSampleCounts )
+          && ( sampledImageStencilSampleCounts == rhs.sampledImageStencilSampleCounts )
+          && ( storageImageSampleCounts == rhs.storageImageSampleCounts )
+          && ( maxSampleMaskWords == rhs.maxSampleMaskWords )
+          && ( timestampComputeAndGraphics == rhs.timestampComputeAndGraphics )
+          && ( timestampPeriod == rhs.timestampPeriod )
+          && ( maxClipDistances == rhs.maxClipDistances )
+          && ( maxCullDistances == rhs.maxCullDistances )
+          && ( maxCombinedClipAndCullDistances == rhs.maxCombinedClipAndCullDistances )
+          && ( discreteQueuePriorities == rhs.discreteQueuePriorities )
+          && ( memcmp( pointSizeRange, rhs.pointSizeRange, 2 * sizeof( float ) ) == 0 )
+          && ( memcmp( lineWidthRange, rhs.lineWidthRange, 2 * sizeof( float ) ) == 0 )
+          && ( pointSizeGranularity == rhs.pointSizeGranularity )
+          && ( lineWidthGranularity == rhs.lineWidthGranularity )
+          && ( strictLines == rhs.strictLines )
+          && ( standardSampleLocations == rhs.standardSampleLocations )
+          && ( optimalBufferCopyOffsetAlignment == rhs.optimalBufferCopyOffsetAlignment )
+          && ( optimalBufferCopyRowPitchAlignment == rhs.optimalBufferCopyRowPitchAlignment )
+          && ( nonCoherentAtomSize == rhs.nonCoherentAtomSize );
+    }
+
+    bool operator!=( PhysicalDeviceLimits const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    uint32_t maxImageDimension1D;
+    uint32_t maxImageDimension2D;
+    uint32_t maxImageDimension3D;
+    uint32_t maxImageDimensionCube;
+    uint32_t maxImageArrayLayers;
+    uint32_t maxTexelBufferElements;
+    uint32_t maxUniformBufferRange;
+    uint32_t maxStorageBufferRange;
+    uint32_t maxPushConstantsSize;
+    uint32_t maxMemoryAllocationCount;
+    uint32_t maxSamplerAllocationCount;
+    VULKAN_HPP_NAMESPACE::DeviceSize bufferImageGranularity;
+    VULKAN_HPP_NAMESPACE::DeviceSize sparseAddressSpaceSize;
+    uint32_t maxBoundDescriptorSets;
+    uint32_t maxPerStageDescriptorSamplers;
+    uint32_t maxPerStageDescriptorUniformBuffers;
+    uint32_t maxPerStageDescriptorStorageBuffers;
+    uint32_t maxPerStageDescriptorSampledImages;
+    uint32_t maxPerStageDescriptorStorageImages;
+    uint32_t maxPerStageDescriptorInputAttachments;
+    uint32_t maxPerStageResources;
+    uint32_t maxDescriptorSetSamplers;
+    uint32_t maxDescriptorSetUniformBuffers;
+    uint32_t maxDescriptorSetUniformBuffersDynamic;
+    uint32_t maxDescriptorSetStorageBuffers;
+    uint32_t maxDescriptorSetStorageBuffersDynamic;
+    uint32_t maxDescriptorSetSampledImages;
+    uint32_t maxDescriptorSetStorageImages;
+    uint32_t maxDescriptorSetInputAttachments;
+    uint32_t maxVertexInputAttributes;
+    uint32_t maxVertexInputBindings;
+    uint32_t maxVertexInputAttributeOffset;
+    uint32_t maxVertexInputBindingStride;
+    uint32_t maxVertexOutputComponents;
+    uint32_t maxTessellationGenerationLevel;
+    uint32_t maxTessellationPatchSize;
+    uint32_t maxTessellationControlPerVertexInputComponents;
+    uint32_t maxTessellationControlPerVertexOutputComponents;
+    uint32_t maxTessellationControlPerPatchOutputComponents;
+    uint32_t maxTessellationControlTotalOutputComponents;
+    uint32_t maxTessellationEvaluationInputComponents;
+    uint32_t maxTessellationEvaluationOutputComponents;
+    uint32_t maxGeometryShaderInvocations;
+    uint32_t maxGeometryInputComponents;
+    uint32_t maxGeometryOutputComponents;
+    uint32_t maxGeometryOutputVertices;
+    uint32_t maxGeometryTotalOutputComponents;
+    uint32_t maxFragmentInputComponents;
+    uint32_t maxFragmentOutputAttachments;
+    uint32_t maxFragmentDualSrcAttachments;
+    uint32_t maxFragmentCombinedOutputResources;
+    uint32_t maxComputeSharedMemorySize;
+    uint32_t maxComputeWorkGroupCount[3];
+    uint32_t maxComputeWorkGroupInvocations;
+    uint32_t maxComputeWorkGroupSize[3];
+    uint32_t subPixelPrecisionBits;
+    uint32_t subTexelPrecisionBits;
+    uint32_t mipmapPrecisionBits;
+    uint32_t maxDrawIndexedIndexValue;
+    uint32_t maxDrawIndirectCount;
+    float maxSamplerLodBias;
+    float maxSamplerAnisotropy;
+    uint32_t maxViewports;
+    uint32_t maxViewportDimensions[2];
+    float viewportBoundsRange[2];
+    uint32_t viewportSubPixelBits;
+    size_t minMemoryMapAlignment;
+    VULKAN_HPP_NAMESPACE::DeviceSize minTexelBufferOffsetAlignment;
+    VULKAN_HPP_NAMESPACE::DeviceSize minUniformBufferOffsetAlignment;
+    VULKAN_HPP_NAMESPACE::DeviceSize minStorageBufferOffsetAlignment;
+    int32_t minTexelOffset;
+    uint32_t maxTexelOffset;
+    int32_t minTexelGatherOffset;
+    uint32_t maxTexelGatherOffset;
+    float minInterpolationOffset;
+    float maxInterpolationOffset;
+    uint32_t subPixelInterpolationOffsetBits;
+    uint32_t maxFramebufferWidth;
+    uint32_t maxFramebufferHeight;
+    uint32_t maxFramebufferLayers;
+    VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferColorSampleCounts;
+    VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferDepthSampleCounts;
+    VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferStencilSampleCounts;
+    VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferNoAttachmentsSampleCounts;
+    uint32_t maxColorAttachments;
+    VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageColorSampleCounts;
+    VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageIntegerSampleCounts;
+    VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageDepthSampleCounts;
+    VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageStencilSampleCounts;
+    VULKAN_HPP_NAMESPACE::SampleCountFlags storageImageSampleCounts;
+    uint32_t maxSampleMaskWords;
+    VULKAN_HPP_NAMESPACE::Bool32 timestampComputeAndGraphics;
+    float timestampPeriod;
+    uint32_t maxClipDistances;
+    uint32_t maxCullDistances;
+    uint32_t maxCombinedClipAndCullDistances;
+    uint32_t discreteQueuePriorities;
+    float pointSizeRange[2];
+    float lineWidthRange[2];
+    float pointSizeGranularity;
+    float lineWidthGranularity;
+    VULKAN_HPP_NAMESPACE::Bool32 strictLines;
+    VULKAN_HPP_NAMESPACE::Bool32 standardSampleLocations;
+    VULKAN_HPP_NAMESPACE::DeviceSize optimalBufferCopyOffsetAlignment;
+    VULKAN_HPP_NAMESPACE::DeviceSize optimalBufferCopyRowPitchAlignment;
+    VULKAN_HPP_NAMESPACE::DeviceSize nonCoherentAtomSize;
+  };
+  static_assert( sizeof( PhysicalDeviceLimits ) == sizeof( VkPhysicalDeviceLimits ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceLimits>::value, "struct wrapper is not a standard layout!" );
+
+  struct PhysicalDeviceLineRasterizationFeaturesEXT
+  {
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceLineRasterizationFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 rectangularLines_ = 0,
+                                                                     VULKAN_HPP_NAMESPACE::Bool32 bresenhamLines_ = 0,
+                                                                     VULKAN_HPP_NAMESPACE::Bool32 smoothLines_ = 0,
+                                                                     VULKAN_HPP_NAMESPACE::Bool32 stippledRectangularLines_ = 0,
+                                                                     VULKAN_HPP_NAMESPACE::Bool32 stippledBresenhamLines_ = 0,
+                                                                     VULKAN_HPP_NAMESPACE::Bool32 stippledSmoothLines_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : rectangularLines( rectangularLines_ )
+      , bresenhamLines( bresenhamLines_ )
+      , smoothLines( smoothLines_ )
+      , stippledRectangularLines( stippledRectangularLines_ )
+      , stippledBresenhamLines( stippledBresenhamLines_ )
+      , stippledSmoothLines( stippledSmoothLines_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationFeaturesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationFeaturesEXT ) - offsetof( PhysicalDeviceLineRasterizationFeaturesEXT, pNext ) );
+      return *this;
+    }
+
+    PhysicalDeviceLineRasterizationFeaturesEXT( VkPhysicalDeviceLineRasterizationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PhysicalDeviceLineRasterizationFeaturesEXT& operator=( VkPhysicalDeviceLineRasterizationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationFeaturesEXT const *>(&rhs);
+      return *this;
+    }
+
+    PhysicalDeviceLineRasterizationFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceLineRasterizationFeaturesEXT & setRectangularLines( VULKAN_HPP_NAMESPACE::Bool32 rectangularLines_ ) VULKAN_HPP_NOEXCEPT
+    {
+      rectangularLines = rectangularLines_;
+      return *this;
+    }
+
+    PhysicalDeviceLineRasterizationFeaturesEXT & setBresenhamLines( VULKAN_HPP_NAMESPACE::Bool32 bresenhamLines_ ) VULKAN_HPP_NOEXCEPT
+    {
+      bresenhamLines = bresenhamLines_;
+      return *this;
+    }
+
+    PhysicalDeviceLineRasterizationFeaturesEXT & setSmoothLines( VULKAN_HPP_NAMESPACE::Bool32 smoothLines_ ) VULKAN_HPP_NOEXCEPT
+    {
+      smoothLines = smoothLines_;
+      return *this;
+    }
+
+    PhysicalDeviceLineRasterizationFeaturesEXT & setStippledRectangularLines( VULKAN_HPP_NAMESPACE::Bool32 stippledRectangularLines_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stippledRectangularLines = stippledRectangularLines_;
+      return *this;
+    }
+
+    PhysicalDeviceLineRasterizationFeaturesEXT & setStippledBresenhamLines( VULKAN_HPP_NAMESPACE::Bool32 stippledBresenhamLines_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stippledBresenhamLines = stippledBresenhamLines_;
+      return *this;
+    }
+
+    PhysicalDeviceLineRasterizationFeaturesEXT & setStippledSmoothLines( VULKAN_HPP_NAMESPACE::Bool32 stippledSmoothLines_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stippledSmoothLines = stippledSmoothLines_;
+      return *this;
+    }
+
+    operator VkPhysicalDeviceLineRasterizationFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceLineRasterizationFeaturesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceLineRasterizationFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceLineRasterizationFeaturesEXT*>( this );
+    }
+
+    bool operator==( PhysicalDeviceLineRasterizationFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( rectangularLines == rhs.rectangularLines )
+          && ( bresenhamLines == rhs.bresenhamLines )
+          && ( smoothLines == rhs.smoothLines )
+          && ( stippledRectangularLines == rhs.stippledRectangularLines )
+          && ( stippledBresenhamLines == rhs.stippledBresenhamLines )
+          && ( stippledSmoothLines == rhs.stippledSmoothLines );
+    }
+
+    bool operator!=( PhysicalDeviceLineRasterizationFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceLineRasterizationFeaturesEXT;
+    void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::Bool32 rectangularLines;
+    VULKAN_HPP_NAMESPACE::Bool32 bresenhamLines;
+    VULKAN_HPP_NAMESPACE::Bool32 smoothLines;
+    VULKAN_HPP_NAMESPACE::Bool32 stippledRectangularLines;
+    VULKAN_HPP_NAMESPACE::Bool32 stippledBresenhamLines;
+    VULKAN_HPP_NAMESPACE::Bool32 stippledSmoothLines;
+  };
+  static_assert( sizeof( PhysicalDeviceLineRasterizationFeaturesEXT ) == sizeof( VkPhysicalDeviceLineRasterizationFeaturesEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceLineRasterizationFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+
+  struct PhysicalDeviceLineRasterizationPropertiesEXT
+  {
+    PhysicalDeviceLineRasterizationPropertiesEXT( uint32_t lineSubPixelPrecisionBits_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : lineSubPixelPrecisionBits( lineSubPixelPrecisionBits_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationPropertiesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationPropertiesEXT ) - offsetof( PhysicalDeviceLineRasterizationPropertiesEXT, pNext ) );
+      return *this;
+    }
+
+    PhysicalDeviceLineRasterizationPropertiesEXT( VkPhysicalDeviceLineRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PhysicalDeviceLineRasterizationPropertiesEXT& operator=( VkPhysicalDeviceLineRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationPropertiesEXT const *>(&rhs);
+      return *this;
+    }
+
+    operator VkPhysicalDeviceLineRasterizationPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceLineRasterizationPropertiesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceLineRasterizationPropertiesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceLineRasterizationPropertiesEXT*>( this );
+    }
+
+    bool operator==( PhysicalDeviceLineRasterizationPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( lineSubPixelPrecisionBits == rhs.lineSubPixelPrecisionBits );
+    }
+
+    bool operator!=( PhysicalDeviceLineRasterizationPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceLineRasterizationPropertiesEXT;
+    void* pNext = nullptr;
+    uint32_t lineSubPixelPrecisionBits;
+  };
+  static_assert( sizeof( PhysicalDeviceLineRasterizationPropertiesEXT ) == sizeof( VkPhysicalDeviceLineRasterizationPropertiesEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceLineRasterizationPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
+
+  struct PhysicalDeviceMaintenance3Properties
+  {
+    PhysicalDeviceMaintenance3Properties( uint32_t maxPerSetDescriptors_ = 0,
+                                          VULKAN_HPP_NAMESPACE::DeviceSize maxMemoryAllocationSize_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : maxPerSetDescriptors( maxPerSetDescriptors_ )
+      , maxMemoryAllocationSize( maxMemoryAllocationSize_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance3Properties & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance3Properties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance3Properties ) - offsetof( PhysicalDeviceMaintenance3Properties, pNext ) );
+      return *this;
+    }
+
+    PhysicalDeviceMaintenance3Properties( VkPhysicalDeviceMaintenance3Properties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PhysicalDeviceMaintenance3Properties& operator=( VkPhysicalDeviceMaintenance3Properties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance3Properties const *>(&rhs);
+      return *this;
+    }
+
+    operator VkPhysicalDeviceMaintenance3Properties const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceMaintenance3Properties*>( this );
+    }
+
+    operator VkPhysicalDeviceMaintenance3Properties &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceMaintenance3Properties*>( this );
+    }
+
+    bool operator==( PhysicalDeviceMaintenance3Properties const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( maxPerSetDescriptors == rhs.maxPerSetDescriptors )
+          && ( maxMemoryAllocationSize == rhs.maxMemoryAllocationSize );
+    }
+
+    bool operator!=( PhysicalDeviceMaintenance3Properties const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMaintenance3Properties;
+    void* pNext = nullptr;
+    uint32_t maxPerSetDescriptors;
+    VULKAN_HPP_NAMESPACE::DeviceSize maxMemoryAllocationSize;
+  };
+  static_assert( sizeof( PhysicalDeviceMaintenance3Properties ) == sizeof( VkPhysicalDeviceMaintenance3Properties ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceMaintenance3Properties>::value, "struct wrapper is not a standard layout!" );
+
+  struct PhysicalDeviceMemoryBudgetPropertiesEXT
+  {
+    PhysicalDeviceMemoryBudgetPropertiesEXT( std::array<VULKAN_HPP_NAMESPACE::DeviceSize,VK_MAX_MEMORY_HEAPS> const& heapBudget_ = { { 0 } },
+                                             std::array<VULKAN_HPP_NAMESPACE::DeviceSize,VK_MAX_MEMORY_HEAPS> const& heapUsage_ = { { 0 } } ) VULKAN_HPP_NOEXCEPT
+      : heapBudget{}
+      , heapUsage{}
+    {
+      VULKAN_HPP_NAMESPACE::ConstExpressionArrayCopy<VULKAN_HPP_NAMESPACE::DeviceSize,VK_MAX_MEMORY_HEAPS,VK_MAX_MEMORY_HEAPS>::copy( heapBudget, heapBudget_ );
+      VULKAN_HPP_NAMESPACE::ConstExpressionArrayCopy<VULKAN_HPP_NAMESPACE::DeviceSize,VK_MAX_MEMORY_HEAPS,VK_MAX_MEMORY_HEAPS>::copy( heapUsage, heapUsage_ );
+    }
+
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryBudgetPropertiesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryBudgetPropertiesEXT ) - offsetof( PhysicalDeviceMemoryBudgetPropertiesEXT, pNext ) );
+      return *this;
+    }
+
+    PhysicalDeviceMemoryBudgetPropertiesEXT( VkPhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PhysicalDeviceMemoryBudgetPropertiesEXT& operator=( VkPhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryBudgetPropertiesEXT const *>(&rhs);
+      return *this;
+    }
+
+    operator VkPhysicalDeviceMemoryBudgetPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceMemoryBudgetPropertiesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceMemoryBudgetPropertiesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceMemoryBudgetPropertiesEXT*>( this );
+    }
+
+    bool operator==( PhysicalDeviceMemoryBudgetPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( memcmp( heapBudget, rhs.heapBudget, VK_MAX_MEMORY_HEAPS * sizeof( VULKAN_HPP_NAMESPACE::DeviceSize ) ) == 0 )
+          && ( memcmp( heapUsage, rhs.heapUsage, VK_MAX_MEMORY_HEAPS * sizeof( VULKAN_HPP_NAMESPACE::DeviceSize ) ) == 0 );
+    }
+
+    bool operator!=( PhysicalDeviceMemoryBudgetPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMemoryBudgetPropertiesEXT;
+    void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::DeviceSize heapBudget[VK_MAX_MEMORY_HEAPS];
+    VULKAN_HPP_NAMESPACE::DeviceSize heapUsage[VK_MAX_MEMORY_HEAPS];
+  };
+  static_assert( sizeof( PhysicalDeviceMemoryBudgetPropertiesEXT ) == sizeof( VkPhysicalDeviceMemoryBudgetPropertiesEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceMemoryBudgetPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
+
+  struct PhysicalDeviceMemoryPriorityFeaturesEXT
+  {
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceMemoryPriorityFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 memoryPriority_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : memoryPriority( memoryPriority_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryPriorityFeaturesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryPriorityFeaturesEXT ) - offsetof( PhysicalDeviceMemoryPriorityFeaturesEXT, pNext ) );
+      return *this;
+    }
+
+    PhysicalDeviceMemoryPriorityFeaturesEXT( VkPhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PhysicalDeviceMemoryPriorityFeaturesEXT& operator=( VkPhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryPriorityFeaturesEXT const *>(&rhs);
+      return *this;
+    }
+
+    PhysicalDeviceMemoryPriorityFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceMemoryPriorityFeaturesEXT & setMemoryPriority( VULKAN_HPP_NAMESPACE::Bool32 memoryPriority_ ) VULKAN_HPP_NOEXCEPT
+    {
+      memoryPriority = memoryPriority_;
+      return *this;
+    }
+
+    operator VkPhysicalDeviceMemoryPriorityFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceMemoryPriorityFeaturesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceMemoryPriorityFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceMemoryPriorityFeaturesEXT*>( this );
+    }
+
+    bool operator==( PhysicalDeviceMemoryPriorityFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( memoryPriority == rhs.memoryPriority );
+    }
+
+    bool operator!=( PhysicalDeviceMemoryPriorityFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMemoryPriorityFeaturesEXT;
+    void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::Bool32 memoryPriority;
+  };
+  static_assert( sizeof( PhysicalDeviceMemoryPriorityFeaturesEXT ) == sizeof( VkPhysicalDeviceMemoryPriorityFeaturesEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceMemoryPriorityFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+
+  struct PhysicalDeviceMemoryProperties
+  {
+    PhysicalDeviceMemoryProperties( uint32_t memoryTypeCount_ = 0,
+                                    std::array<VULKAN_HPP_NAMESPACE::MemoryType,VK_MAX_MEMORY_TYPES> const& memoryTypes_ = { { VULKAN_HPP_NAMESPACE::MemoryType() } },
+                                    uint32_t memoryHeapCount_ = 0,
+                                    std::array<VULKAN_HPP_NAMESPACE::MemoryHeap,VK_MAX_MEMORY_HEAPS> const& memoryHeaps_ = { { VULKAN_HPP_NAMESPACE::MemoryHeap() } } ) VULKAN_HPP_NOEXCEPT
+      : memoryTypeCount( memoryTypeCount_ )
+      , memoryTypes{}
+      , memoryHeapCount( memoryHeapCount_ )
+      , memoryHeaps{}
+    {
+      VULKAN_HPP_NAMESPACE::ConstExpressionArrayCopy<VULKAN_HPP_NAMESPACE::MemoryType,VK_MAX_MEMORY_TYPES,VK_MAX_MEMORY_TYPES>::copy( memoryTypes, memoryTypes_ );
+      VULKAN_HPP_NAMESPACE::ConstExpressionArrayCopy<VULKAN_HPP_NAMESPACE::MemoryHeap,VK_MAX_MEMORY_HEAPS,VK_MAX_MEMORY_HEAPS>::copy( memoryHeaps, memoryHeaps_ );
+    }
+
+    PhysicalDeviceMemoryProperties( VkPhysicalDeviceMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PhysicalDeviceMemoryProperties& operator=( VkPhysicalDeviceMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties const *>(&rhs);
+      return *this;
+    }
+
+    operator VkPhysicalDeviceMemoryProperties const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceMemoryProperties*>( this );
+    }
+
+    operator VkPhysicalDeviceMemoryProperties &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceMemoryProperties*>( this );
+    }
+
+    bool operator==( PhysicalDeviceMemoryProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( memoryTypeCount == rhs.memoryTypeCount )
+          && ( memcmp( memoryTypes, rhs.memoryTypes, VK_MAX_MEMORY_TYPES * sizeof( VULKAN_HPP_NAMESPACE::MemoryType ) ) == 0 )
+          && ( memoryHeapCount == rhs.memoryHeapCount )
+          && ( memcmp( memoryHeaps, rhs.memoryHeaps, VK_MAX_MEMORY_HEAPS * sizeof( VULKAN_HPP_NAMESPACE::MemoryHeap ) ) == 0 );
+    }
+
+    bool operator!=( PhysicalDeviceMemoryProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    uint32_t memoryTypeCount;
+    VULKAN_HPP_NAMESPACE::MemoryType memoryTypes[VK_MAX_MEMORY_TYPES];
+    uint32_t memoryHeapCount;
+    VULKAN_HPP_NAMESPACE::MemoryHeap memoryHeaps[VK_MAX_MEMORY_HEAPS];
+  };
+  static_assert( sizeof( PhysicalDeviceMemoryProperties ) == sizeof( VkPhysicalDeviceMemoryProperties ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceMemoryProperties>::value, "struct wrapper is not a standard layout!" );
+
+  struct PhysicalDeviceMemoryProperties2
+  {
+    PhysicalDeviceMemoryProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties memoryProperties_ = VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties() ) VULKAN_HPP_NOEXCEPT
+      : memoryProperties( memoryProperties_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 ) - offsetof( PhysicalDeviceMemoryProperties2, pNext ) );
+      return *this;
+    }
+
+    PhysicalDeviceMemoryProperties2( VkPhysicalDeviceMemoryProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PhysicalDeviceMemoryProperties2& operator=( VkPhysicalDeviceMemoryProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 const *>(&rhs);
+      return *this;
+    }
+
+    operator VkPhysicalDeviceMemoryProperties2 const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceMemoryProperties2*>( this );
+    }
+
+    operator VkPhysicalDeviceMemoryProperties2 &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceMemoryProperties2*>( this );
+    }
+
+    bool operator==( PhysicalDeviceMemoryProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( memoryProperties == rhs.memoryProperties );
+    }
+
+    bool operator!=( PhysicalDeviceMemoryProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMemoryProperties2;
+    void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties memoryProperties;
+  };
+  static_assert( sizeof( PhysicalDeviceMemoryProperties2 ) == sizeof( VkPhysicalDeviceMemoryProperties2 ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceMemoryProperties2>::value, "struct wrapper is not a standard layout!" );
+
+  struct PhysicalDeviceMeshShaderFeaturesNV
+  {
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceMeshShaderFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 taskShader_ = 0,
+                                                             VULKAN_HPP_NAMESPACE::Bool32 meshShader_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : taskShader( taskShader_ )
+      , meshShader( meshShader_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderFeaturesNV & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderFeaturesNV ) - offsetof( PhysicalDeviceMeshShaderFeaturesNV, pNext ) );
+      return *this;
+    }
+
+    PhysicalDeviceMeshShaderFeaturesNV( VkPhysicalDeviceMeshShaderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PhysicalDeviceMeshShaderFeaturesNV& operator=( VkPhysicalDeviceMeshShaderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderFeaturesNV const *>(&rhs);
+      return *this;
+    }
+
+    PhysicalDeviceMeshShaderFeaturesNV & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceMeshShaderFeaturesNV & setTaskShader( VULKAN_HPP_NAMESPACE::Bool32 taskShader_ ) VULKAN_HPP_NOEXCEPT
+    {
+      taskShader = taskShader_;
+      return *this;
+    }
+
+    PhysicalDeviceMeshShaderFeaturesNV & setMeshShader( VULKAN_HPP_NAMESPACE::Bool32 meshShader_ ) VULKAN_HPP_NOEXCEPT
+    {
+      meshShader = meshShader_;
+      return *this;
+    }
+
+    operator VkPhysicalDeviceMeshShaderFeaturesNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceMeshShaderFeaturesNV*>( this );
+    }
+
+    operator VkPhysicalDeviceMeshShaderFeaturesNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceMeshShaderFeaturesNV*>( this );
+    }
+
+    bool operator==( PhysicalDeviceMeshShaderFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( taskShader == rhs.taskShader )
+          && ( meshShader == rhs.meshShader );
+    }
+
+    bool operator!=( PhysicalDeviceMeshShaderFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMeshShaderFeaturesNV;
+    void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::Bool32 taskShader;
+    VULKAN_HPP_NAMESPACE::Bool32 meshShader;
+  };
+  static_assert( sizeof( PhysicalDeviceMeshShaderFeaturesNV ) == sizeof( VkPhysicalDeviceMeshShaderFeaturesNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceMeshShaderFeaturesNV>::value, "struct wrapper is not a standard layout!" );
+
+  struct PhysicalDeviceMeshShaderPropertiesNV
+  {
+    PhysicalDeviceMeshShaderPropertiesNV( uint32_t maxDrawMeshTasksCount_ = 0,
+                                          uint32_t maxTaskWorkGroupInvocations_ = 0,
+                                          std::array<uint32_t,3> const& maxTaskWorkGroupSize_ = { { 0 } },
+                                          uint32_t maxTaskTotalMemorySize_ = 0,
+                                          uint32_t maxTaskOutputCount_ = 0,
+                                          uint32_t maxMeshWorkGroupInvocations_ = 0,
+                                          std::array<uint32_t,3> const& maxMeshWorkGroupSize_ = { { 0 } },
+                                          uint32_t maxMeshTotalMemorySize_ = 0,
+                                          uint32_t maxMeshOutputVertices_ = 0,
+                                          uint32_t maxMeshOutputPrimitives_ = 0,
+                                          uint32_t maxMeshMultiviewViewCount_ = 0,
+                                          uint32_t meshOutputPerVertexGranularity_ = 0,
+                                          uint32_t meshOutputPerPrimitiveGranularity_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : maxDrawMeshTasksCount( maxDrawMeshTasksCount_ )
+      , maxTaskWorkGroupInvocations( maxTaskWorkGroupInvocations_ )
+      , maxTaskWorkGroupSize{}
+      , maxTaskTotalMemorySize( maxTaskTotalMemorySize_ )
+      , maxTaskOutputCount( maxTaskOutputCount_ )
+      , maxMeshWorkGroupInvocations( maxMeshWorkGroupInvocations_ )
+      , maxMeshWorkGroupSize{}
+      , maxMeshTotalMemorySize( maxMeshTotalMemorySize_ )
+      , maxMeshOutputVertices( maxMeshOutputVertices_ )
+      , maxMeshOutputPrimitives( maxMeshOutputPrimitives_ )
+      , maxMeshMultiviewViewCount( maxMeshMultiviewViewCount_ )
+      , meshOutputPerVertexGranularity( meshOutputPerVertexGranularity_ )
+      , meshOutputPerPrimitiveGranularity( meshOutputPerPrimitiveGranularity_ )
+    {
+      VULKAN_HPP_NAMESPACE::ConstExpressionArrayCopy<uint32_t,3,3>::copy( maxTaskWorkGroupSize, maxTaskWorkGroupSize_ );
+      VULKAN_HPP_NAMESPACE::ConstExpressionArrayCopy<uint32_t,3,3>::copy( maxMeshWorkGroupSize, maxMeshWorkGroupSize_ );
+    }
+
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderPropertiesNV & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderPropertiesNV ) - offsetof( PhysicalDeviceMeshShaderPropertiesNV, pNext ) );
+      return *this;
+    }
+
+    PhysicalDeviceMeshShaderPropertiesNV( VkPhysicalDeviceMeshShaderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PhysicalDeviceMeshShaderPropertiesNV& operator=( VkPhysicalDeviceMeshShaderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderPropertiesNV const *>(&rhs);
+      return *this;
+    }
+
+    operator VkPhysicalDeviceMeshShaderPropertiesNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceMeshShaderPropertiesNV*>( this );
+    }
+
+    operator VkPhysicalDeviceMeshShaderPropertiesNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceMeshShaderPropertiesNV*>( this );
+    }
+
+    bool operator==( PhysicalDeviceMeshShaderPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( maxDrawMeshTasksCount == rhs.maxDrawMeshTasksCount )
+          && ( maxTaskWorkGroupInvocations == rhs.maxTaskWorkGroupInvocations )
+          && ( memcmp( maxTaskWorkGroupSize, rhs.maxTaskWorkGroupSize, 3 * sizeof( uint32_t ) ) == 0 )
+          && ( maxTaskTotalMemorySize == rhs.maxTaskTotalMemorySize )
+          && ( maxTaskOutputCount == rhs.maxTaskOutputCount )
+          && ( maxMeshWorkGroupInvocations == rhs.maxMeshWorkGroupInvocations )
+          && ( memcmp( maxMeshWorkGroupSize, rhs.maxMeshWorkGroupSize, 3 * sizeof( uint32_t ) ) == 0 )
+          && ( maxMeshTotalMemorySize == rhs.maxMeshTotalMemorySize )
+          && ( maxMeshOutputVertices == rhs.maxMeshOutputVertices )
+          && ( maxMeshOutputPrimitives == rhs.maxMeshOutputPrimitives )
+          && ( maxMeshMultiviewViewCount == rhs.maxMeshMultiviewViewCount )
+          && ( meshOutputPerVertexGranularity == rhs.meshOutputPerVertexGranularity )
+          && ( meshOutputPerPrimitiveGranularity == rhs.meshOutputPerPrimitiveGranularity );
+    }
+
+    bool operator!=( PhysicalDeviceMeshShaderPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMeshShaderPropertiesNV;
+    void* pNext = nullptr;
+    uint32_t maxDrawMeshTasksCount;
+    uint32_t maxTaskWorkGroupInvocations;
+    uint32_t maxTaskWorkGroupSize[3];
+    uint32_t maxTaskTotalMemorySize;
+    uint32_t maxTaskOutputCount;
+    uint32_t maxMeshWorkGroupInvocations;
+    uint32_t maxMeshWorkGroupSize[3];
+    uint32_t maxMeshTotalMemorySize;
+    uint32_t maxMeshOutputVertices;
+    uint32_t maxMeshOutputPrimitives;
+    uint32_t maxMeshMultiviewViewCount;
+    uint32_t meshOutputPerVertexGranularity;
+    uint32_t meshOutputPerPrimitiveGranularity;
+  };
+  static_assert( sizeof( PhysicalDeviceMeshShaderPropertiesNV ) == sizeof( VkPhysicalDeviceMeshShaderPropertiesNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceMeshShaderPropertiesNV>::value, "struct wrapper is not a standard layout!" );
+
+  struct PhysicalDeviceMultiviewFeatures
+  {
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewFeatures( VULKAN_HPP_NAMESPACE::Bool32 multiview_ = 0,
+                                                          VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader_ = 0,
+                                                          VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : multiview( multiview_ )
+      , multiviewGeometryShader( multiviewGeometryShader_ )
+      , multiviewTessellationShader( multiviewTessellationShader_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewFeatures & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewFeatures ) - offsetof( PhysicalDeviceMultiviewFeatures, pNext ) );
+      return *this;
+    }
+
+    PhysicalDeviceMultiviewFeatures( VkPhysicalDeviceMultiviewFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PhysicalDeviceMultiviewFeatures& operator=( VkPhysicalDeviceMultiviewFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewFeatures const *>(&rhs);
+      return *this;
+    }
+
+    PhysicalDeviceMultiviewFeatures & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceMultiviewFeatures & setMultiview( VULKAN_HPP_NAMESPACE::Bool32 multiview_ ) VULKAN_HPP_NOEXCEPT
+    {
+      multiview = multiview_;
+      return *this;
+    }
+
+    PhysicalDeviceMultiviewFeatures & setMultiviewGeometryShader( VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader_ ) VULKAN_HPP_NOEXCEPT
+    {
+      multiviewGeometryShader = multiviewGeometryShader_;
+      return *this;
+    }
+
+    PhysicalDeviceMultiviewFeatures & setMultiviewTessellationShader( VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader_ ) VULKAN_HPP_NOEXCEPT
+    {
+      multiviewTessellationShader = multiviewTessellationShader_;
+      return *this;
+    }
+
+    operator VkPhysicalDeviceMultiviewFeatures const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceMultiviewFeatures*>( this );
+    }
+
+    operator VkPhysicalDeviceMultiviewFeatures &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceMultiviewFeatures*>( this );
+    }
+
+    bool operator==( PhysicalDeviceMultiviewFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( multiview == rhs.multiview )
+          && ( multiviewGeometryShader == rhs.multiviewGeometryShader )
+          && ( multiviewTessellationShader == rhs.multiviewTessellationShader );
+    }
+
+    bool operator!=( PhysicalDeviceMultiviewFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMultiviewFeatures;
+    void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::Bool32 multiview;
+    VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader;
+    VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader;
+  };
+  static_assert( sizeof( PhysicalDeviceMultiviewFeatures ) == sizeof( VkPhysicalDeviceMultiviewFeatures ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceMultiviewFeatures>::value, "struct wrapper is not a standard layout!" );
+
+  struct PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX
+  {
+    PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX( VULKAN_HPP_NAMESPACE::Bool32 perViewPositionAllComponents_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : perViewPositionAllComponents( perViewPositionAllComponents_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX ) - offsetof( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX, pNext ) );
+      return *this;
+    }
+
+    PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX( VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX& operator=( VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const *>(&rhs);
+      return *this;
+    }
+
+    operator VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX*>( this );
+    }
+
+    operator VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX*>( this );
+    }
+
+    bool operator==( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( perViewPositionAllComponents == rhs.perViewPositionAllComponents );
+    }
+
+    bool operator!=( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMultiviewPerViewAttributesPropertiesNVX;
+    void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::Bool32 perViewPositionAllComponents;
+  };
+  static_assert( sizeof( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX ) == sizeof( VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX>::value, "struct wrapper is not a standard layout!" );
+
+  struct PhysicalDeviceMultiviewProperties
+  {
+    PhysicalDeviceMultiviewProperties( uint32_t maxMultiviewViewCount_ = 0,
+                                       uint32_t maxMultiviewInstanceIndex_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : maxMultiviewViewCount( maxMultiviewViewCount_ )
+      , maxMultiviewInstanceIndex( maxMultiviewInstanceIndex_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewProperties & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewProperties ) - offsetof( PhysicalDeviceMultiviewProperties, pNext ) );
+      return *this;
+    }
+
+    PhysicalDeviceMultiviewProperties( VkPhysicalDeviceMultiviewProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PhysicalDeviceMultiviewProperties& operator=( VkPhysicalDeviceMultiviewProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewProperties const *>(&rhs);
+      return *this;
+    }
+
+    operator VkPhysicalDeviceMultiviewProperties const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceMultiviewProperties*>( this );
+    }
+
+    operator VkPhysicalDeviceMultiviewProperties &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceMultiviewProperties*>( this );
+    }
+
+    bool operator==( PhysicalDeviceMultiviewProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( maxMultiviewViewCount == rhs.maxMultiviewViewCount )
+          && ( maxMultiviewInstanceIndex == rhs.maxMultiviewInstanceIndex );
+    }
+
+    bool operator!=( PhysicalDeviceMultiviewProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMultiviewProperties;
+    void* pNext = nullptr;
+    uint32_t maxMultiviewViewCount;
+    uint32_t maxMultiviewInstanceIndex;
+  };
+  static_assert( sizeof( PhysicalDeviceMultiviewProperties ) == sizeof( VkPhysicalDeviceMultiviewProperties ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceMultiviewProperties>::value, "struct wrapper is not a standard layout!" );
+
+  struct PhysicalDevicePCIBusInfoPropertiesEXT
+  {
+    PhysicalDevicePCIBusInfoPropertiesEXT( uint32_t pciDomain_ = 0,
+                                           uint32_t pciBus_ = 0,
+                                           uint32_t pciDevice_ = 0,
+                                           uint32_t pciFunction_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : pciDomain( pciDomain_ )
+      , pciBus( pciBus_ )
+      , pciDevice( pciDevice_ )
+      , pciFunction( pciFunction_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PhysicalDevicePCIBusInfoPropertiesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePCIBusInfoPropertiesEXT ) - offsetof( PhysicalDevicePCIBusInfoPropertiesEXT, pNext ) );
+      return *this;
+    }
+
+    PhysicalDevicePCIBusInfoPropertiesEXT( VkPhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PhysicalDevicePCIBusInfoPropertiesEXT& operator=( VkPhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePCIBusInfoPropertiesEXT const *>(&rhs);
+      return *this;
+    }
+
+    operator VkPhysicalDevicePCIBusInfoPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDevicePCIBusInfoPropertiesEXT*>( this );
+    }
+
+    operator VkPhysicalDevicePCIBusInfoPropertiesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDevicePCIBusInfoPropertiesEXT*>( this );
+    }
+
+    bool operator==( PhysicalDevicePCIBusInfoPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( pciDomain == rhs.pciDomain )
+          && ( pciBus == rhs.pciBus )
+          && ( pciDevice == rhs.pciDevice )
+          && ( pciFunction == rhs.pciFunction );
+    }
+
+    bool operator!=( PhysicalDevicePCIBusInfoPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePciBusInfoPropertiesEXT;
+    void* pNext = nullptr;
+    uint32_t pciDomain;
+    uint32_t pciBus;
+    uint32_t pciDevice;
+    uint32_t pciFunction;
+  };
+  static_assert( sizeof( PhysicalDevicePCIBusInfoPropertiesEXT ) == sizeof( VkPhysicalDevicePCIBusInfoPropertiesEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDevicePCIBusInfoPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
+
+  struct PhysicalDevicePerformanceQueryFeaturesKHR
+  {
+    VULKAN_HPP_CONSTEXPR PhysicalDevicePerformanceQueryFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 performanceCounterQueryPools_ = 0,
+                                                                    VULKAN_HPP_NAMESPACE::Bool32 performanceCounterMultipleQueryPools_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : performanceCounterQueryPools( performanceCounterQueryPools_ )
+      , performanceCounterMultipleQueryPools( performanceCounterMultipleQueryPools_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PhysicalDevicePerformanceQueryFeaturesKHR & operator=( VULKAN_HPP_NAMESPACE::PhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePerformanceQueryFeaturesKHR ) - offsetof( PhysicalDevicePerformanceQueryFeaturesKHR, pNext ) );
+      return *this;
+    }
+
+    PhysicalDevicePerformanceQueryFeaturesKHR( VkPhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PhysicalDevicePerformanceQueryFeaturesKHR& operator=( VkPhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePerformanceQueryFeaturesKHR const *>(&rhs);
+      return *this;
+    }
+
+    PhysicalDevicePerformanceQueryFeaturesKHR & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDevicePerformanceQueryFeaturesKHR & setPerformanceCounterQueryPools( VULKAN_HPP_NAMESPACE::Bool32 performanceCounterQueryPools_ ) VULKAN_HPP_NOEXCEPT
+    {
+      performanceCounterQueryPools = performanceCounterQueryPools_;
+      return *this;
+    }
+
+    PhysicalDevicePerformanceQueryFeaturesKHR & setPerformanceCounterMultipleQueryPools( VULKAN_HPP_NAMESPACE::Bool32 performanceCounterMultipleQueryPools_ ) VULKAN_HPP_NOEXCEPT
+    {
+      performanceCounterMultipleQueryPools = performanceCounterMultipleQueryPools_;
+      return *this;
+    }
+
+    operator VkPhysicalDevicePerformanceQueryFeaturesKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDevicePerformanceQueryFeaturesKHR*>( this );
+    }
+
+    operator VkPhysicalDevicePerformanceQueryFeaturesKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDevicePerformanceQueryFeaturesKHR*>( this );
+    }
+
+    bool operator==( PhysicalDevicePerformanceQueryFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( performanceCounterQueryPools == rhs.performanceCounterQueryPools )
+          && ( performanceCounterMultipleQueryPools == rhs.performanceCounterMultipleQueryPools );
+    }
+
+    bool operator!=( PhysicalDevicePerformanceQueryFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePerformanceQueryFeaturesKHR;
+    void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::Bool32 performanceCounterQueryPools;
+    VULKAN_HPP_NAMESPACE::Bool32 performanceCounterMultipleQueryPools;
+  };
+  static_assert( sizeof( PhysicalDevicePerformanceQueryFeaturesKHR ) == sizeof( VkPhysicalDevicePerformanceQueryFeaturesKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDevicePerformanceQueryFeaturesKHR>::value, "struct wrapper is not a standard layout!" );
+
+  struct PhysicalDevicePerformanceQueryPropertiesKHR
+  {
+    PhysicalDevicePerformanceQueryPropertiesKHR( VULKAN_HPP_NAMESPACE::Bool32 allowCommandBufferQueryCopies_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : allowCommandBufferQueryCopies( allowCommandBufferQueryCopies_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PhysicalDevicePerformanceQueryPropertiesKHR & operator=( VULKAN_HPP_NAMESPACE::PhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePerformanceQueryPropertiesKHR ) - offsetof( PhysicalDevicePerformanceQueryPropertiesKHR, pNext ) );
+      return *this;
+    }
+
+    PhysicalDevicePerformanceQueryPropertiesKHR( VkPhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PhysicalDevicePerformanceQueryPropertiesKHR& operator=( VkPhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePerformanceQueryPropertiesKHR const *>(&rhs);
+      return *this;
+    }
+
+    operator VkPhysicalDevicePerformanceQueryPropertiesKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDevicePerformanceQueryPropertiesKHR*>( this );
+    }
+
+    operator VkPhysicalDevicePerformanceQueryPropertiesKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDevicePerformanceQueryPropertiesKHR*>( this );
+    }
+
+    bool operator==( PhysicalDevicePerformanceQueryPropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( allowCommandBufferQueryCopies == rhs.allowCommandBufferQueryCopies );
+    }
+
+    bool operator!=( PhysicalDevicePerformanceQueryPropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePerformanceQueryPropertiesKHR;
+    void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::Bool32 allowCommandBufferQueryCopies;
+  };
+  static_assert( sizeof( PhysicalDevicePerformanceQueryPropertiesKHR ) == sizeof( VkPhysicalDevicePerformanceQueryPropertiesKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDevicePerformanceQueryPropertiesKHR>::value, "struct wrapper is not a standard layout!" );
+
+  struct PhysicalDevicePipelineExecutablePropertiesFeaturesKHR
+  {
+    VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineExecutablePropertiesFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 pipelineExecutableInfo_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : pipelineExecutableInfo( pipelineExecutableInfo_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineExecutablePropertiesFeaturesKHR & operator=( VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineExecutablePropertiesFeaturesKHR ) - offsetof( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR, pNext ) );
+      return *this;
+    }
+
+    PhysicalDevicePipelineExecutablePropertiesFeaturesKHR( VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PhysicalDevicePipelineExecutablePropertiesFeaturesKHR& operator=( VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const *>(&rhs);
+      return *this;
+    }
+
+    PhysicalDevicePipelineExecutablePropertiesFeaturesKHR & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDevicePipelineExecutablePropertiesFeaturesKHR & setPipelineExecutableInfo( VULKAN_HPP_NAMESPACE::Bool32 pipelineExecutableInfo_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pipelineExecutableInfo = pipelineExecutableInfo_;
+      return *this;
+    }
+
+    operator VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR*>( this );
+    }
+
+    operator VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR*>( this );
+    }
+
+    bool operator==( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( pipelineExecutableInfo == rhs.pipelineExecutableInfo );
+    }
+
+    bool operator!=( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePipelineExecutablePropertiesFeaturesKHR;
+    void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::Bool32 pipelineExecutableInfo;
+  };
+  static_assert( sizeof( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR ) == sizeof( VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDevicePipelineExecutablePropertiesFeaturesKHR>::value, "struct wrapper is not a standard layout!" );
+
+  struct PhysicalDevicePointClippingProperties
+  {
+    PhysicalDevicePointClippingProperties( VULKAN_HPP_NAMESPACE::PointClippingBehavior pointClippingBehavior_ = VULKAN_HPP_NAMESPACE::PointClippingBehavior::eAllClipPlanes ) VULKAN_HPP_NOEXCEPT
+      : pointClippingBehavior( pointClippingBehavior_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PhysicalDevicePointClippingProperties & operator=( VULKAN_HPP_NAMESPACE::PhysicalDevicePointClippingProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePointClippingProperties ) - offsetof( PhysicalDevicePointClippingProperties, pNext ) );
+      return *this;
+    }
+
+    PhysicalDevicePointClippingProperties( VkPhysicalDevicePointClippingProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PhysicalDevicePointClippingProperties& operator=( VkPhysicalDevicePointClippingProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePointClippingProperties const *>(&rhs);
+      return *this;
+    }
+
+    operator VkPhysicalDevicePointClippingProperties const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDevicePointClippingProperties*>( this );
+    }
+
+    operator VkPhysicalDevicePointClippingProperties &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDevicePointClippingProperties*>( this );
+    }
+
+    bool operator==( PhysicalDevicePointClippingProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( pointClippingBehavior == rhs.pointClippingBehavior );
+    }
+
+    bool operator!=( PhysicalDevicePointClippingProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePointClippingProperties;
+    void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::PointClippingBehavior pointClippingBehavior;
+  };
+  static_assert( sizeof( PhysicalDevicePointClippingProperties ) == sizeof( VkPhysicalDevicePointClippingProperties ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDevicePointClippingProperties>::value, "struct wrapper is not a standard layout!" );
+
+  struct PhysicalDeviceSparseProperties
+  {
+    PhysicalDeviceSparseProperties( VULKAN_HPP_NAMESPACE::Bool32 residencyStandard2DBlockShape_ = 0,
+                                    VULKAN_HPP_NAMESPACE::Bool32 residencyStandard2DMultisampleBlockShape_ = 0,
+                                    VULKAN_HPP_NAMESPACE::Bool32 residencyStandard3DBlockShape_ = 0,
+                                    VULKAN_HPP_NAMESPACE::Bool32 residencyAlignedMipSize_ = 0,
+                                    VULKAN_HPP_NAMESPACE::Bool32 residencyNonResidentStrict_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : residencyStandard2DBlockShape( residencyStandard2DBlockShape_ )
+      , residencyStandard2DMultisampleBlockShape( residencyStandard2DMultisampleBlockShape_ )
+      , residencyStandard3DBlockShape( residencyStandard3DBlockShape_ )
+      , residencyAlignedMipSize( residencyAlignedMipSize_ )
+      , residencyNonResidentStrict( residencyNonResidentStrict_ )
+    {}
+
+    PhysicalDeviceSparseProperties( VkPhysicalDeviceSparseProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PhysicalDeviceSparseProperties& operator=( VkPhysicalDeviceSparseProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties const *>(&rhs);
+      return *this;
+    }
+
+    operator VkPhysicalDeviceSparseProperties const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceSparseProperties*>( this );
+    }
+
+    operator VkPhysicalDeviceSparseProperties &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceSparseProperties*>( this );
+    }
+
+    bool operator==( PhysicalDeviceSparseProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( residencyStandard2DBlockShape == rhs.residencyStandard2DBlockShape )
+          && ( residencyStandard2DMultisampleBlockShape == rhs.residencyStandard2DMultisampleBlockShape )
+          && ( residencyStandard3DBlockShape == rhs.residencyStandard3DBlockShape )
+          && ( residencyAlignedMipSize == rhs.residencyAlignedMipSize )
+          && ( residencyNonResidentStrict == rhs.residencyNonResidentStrict );
+    }
+
+    bool operator!=( PhysicalDeviceSparseProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    VULKAN_HPP_NAMESPACE::Bool32 residencyStandard2DBlockShape;
+    VULKAN_HPP_NAMESPACE::Bool32 residencyStandard2DMultisampleBlockShape;
+    VULKAN_HPP_NAMESPACE::Bool32 residencyStandard3DBlockShape;
+    VULKAN_HPP_NAMESPACE::Bool32 residencyAlignedMipSize;
+    VULKAN_HPP_NAMESPACE::Bool32 residencyNonResidentStrict;
+  };
+  static_assert( sizeof( PhysicalDeviceSparseProperties ) == sizeof( VkPhysicalDeviceSparseProperties ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceSparseProperties>::value, "struct wrapper is not a standard layout!" );
+
+  struct PhysicalDeviceProperties
+  {
+    PhysicalDeviceProperties( uint32_t apiVersion_ = 0,
+                              uint32_t driverVersion_ = 0,
+                              uint32_t vendorID_ = 0,
+                              uint32_t deviceID_ = 0,
+                              VULKAN_HPP_NAMESPACE::PhysicalDeviceType deviceType_ = VULKAN_HPP_NAMESPACE::PhysicalDeviceType::eOther,
+                              std::array<char,VK_MAX_PHYSICAL_DEVICE_NAME_SIZE> const& deviceName_ = { { 0 } },
+                              std::array<uint8_t,VK_UUID_SIZE> const& pipelineCacheUUID_ = { { 0 } },
+                              VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits limits_ = VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits(),
+                              VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties sparseProperties_ = VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties() ) VULKAN_HPP_NOEXCEPT
+      : apiVersion( apiVersion_ )
+      , driverVersion( driverVersion_ )
+      , vendorID( vendorID_ )
+      , deviceID( deviceID_ )
+      , deviceType( deviceType_ )
+      , deviceName{}
+      , pipelineCacheUUID{}
+      , limits( limits_ )
+      , sparseProperties( sparseProperties_ )
+    {
+      VULKAN_HPP_NAMESPACE::ConstExpressionArrayCopy<char,VK_MAX_PHYSICAL_DEVICE_NAME_SIZE,VK_MAX_PHYSICAL_DEVICE_NAME_SIZE>::copy( deviceName, deviceName_ );
+      VULKAN_HPP_NAMESPACE::ConstExpressionArrayCopy<uint8_t,VK_UUID_SIZE,VK_UUID_SIZE>::copy( pipelineCacheUUID, pipelineCacheUUID_ );
+    }
+
+    PhysicalDeviceProperties( VkPhysicalDeviceProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PhysicalDeviceProperties& operator=( VkPhysicalDeviceProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties const *>(&rhs);
+      return *this;
+    }
+
+    operator VkPhysicalDeviceProperties const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceProperties*>( this );
+    }
+
+    operator VkPhysicalDeviceProperties &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceProperties*>( this );
+    }
+
+    bool operator==( PhysicalDeviceProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( apiVersion == rhs.apiVersion )
+          && ( driverVersion == rhs.driverVersion )
+          && ( vendorID == rhs.vendorID )
+          && ( deviceID == rhs.deviceID )
+          && ( deviceType == rhs.deviceType )
+          && ( memcmp( deviceName, rhs.deviceName, VK_MAX_PHYSICAL_DEVICE_NAME_SIZE * sizeof( char ) ) == 0 )
+          && ( memcmp( pipelineCacheUUID, rhs.pipelineCacheUUID, VK_UUID_SIZE * sizeof( uint8_t ) ) == 0 )
+          && ( limits == rhs.limits )
+          && ( sparseProperties == rhs.sparseProperties );
+    }
+
+    bool operator!=( PhysicalDeviceProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    uint32_t apiVersion;
+    uint32_t driverVersion;
+    uint32_t vendorID;
+    uint32_t deviceID;
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceType deviceType;
+    char deviceName[VK_MAX_PHYSICAL_DEVICE_NAME_SIZE];
+    uint8_t pipelineCacheUUID[VK_UUID_SIZE];
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits limits;
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties sparseProperties;
+  };
+  static_assert( sizeof( PhysicalDeviceProperties ) == sizeof( VkPhysicalDeviceProperties ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceProperties>::value, "struct wrapper is not a standard layout!" );
+
+  struct PhysicalDeviceProperties2
+  {
+    PhysicalDeviceProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties properties_ = VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties() ) VULKAN_HPP_NOEXCEPT
+      : properties( properties_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 ) - offsetof( PhysicalDeviceProperties2, pNext ) );
+      return *this;
+    }
+
+    PhysicalDeviceProperties2( VkPhysicalDeviceProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PhysicalDeviceProperties2& operator=( VkPhysicalDeviceProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 const *>(&rhs);
+      return *this;
+    }
+
+    operator VkPhysicalDeviceProperties2 const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceProperties2*>( this );
+    }
+
+    operator VkPhysicalDeviceProperties2 &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceProperties2*>( this );
+    }
+
+    bool operator==( PhysicalDeviceProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( properties == rhs.properties );
+    }
+
+    bool operator!=( PhysicalDeviceProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceProperties2;
+    void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties properties;
+  };
+  static_assert( sizeof( PhysicalDeviceProperties2 ) == sizeof( VkPhysicalDeviceProperties2 ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceProperties2>::value, "struct wrapper is not a standard layout!" );
+
+  struct PhysicalDeviceProtectedMemoryFeatures
+  {
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceProtectedMemoryFeatures( VULKAN_HPP_NAMESPACE::Bool32 protectedMemory_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : protectedMemory( protectedMemory_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceProtectedMemoryFeatures & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceProtectedMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceProtectedMemoryFeatures ) - offsetof( PhysicalDeviceProtectedMemoryFeatures, pNext ) );
+      return *this;
+    }
+
+    PhysicalDeviceProtectedMemoryFeatures( VkPhysicalDeviceProtectedMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PhysicalDeviceProtectedMemoryFeatures& operator=( VkPhysicalDeviceProtectedMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceProtectedMemoryFeatures const *>(&rhs);
+      return *this;
+    }
+
+    PhysicalDeviceProtectedMemoryFeatures & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceProtectedMemoryFeatures & setProtectedMemory( VULKAN_HPP_NAMESPACE::Bool32 protectedMemory_ ) VULKAN_HPP_NOEXCEPT
+    {
+      protectedMemory = protectedMemory_;
+      return *this;
+    }
+
+    operator VkPhysicalDeviceProtectedMemoryFeatures const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceProtectedMemoryFeatures*>( this );
+    }
+
+    operator VkPhysicalDeviceProtectedMemoryFeatures &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceProtectedMemoryFeatures*>( this );
+    }
+
+    bool operator==( PhysicalDeviceProtectedMemoryFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( protectedMemory == rhs.protectedMemory );
+    }
+
+    bool operator!=( PhysicalDeviceProtectedMemoryFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceProtectedMemoryFeatures;
+    void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::Bool32 protectedMemory;
+  };
+  static_assert( sizeof( PhysicalDeviceProtectedMemoryFeatures ) == sizeof( VkPhysicalDeviceProtectedMemoryFeatures ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceProtectedMemoryFeatures>::value, "struct wrapper is not a standard layout!" );
+
+  struct PhysicalDeviceProtectedMemoryProperties
+  {
+    PhysicalDeviceProtectedMemoryProperties( VULKAN_HPP_NAMESPACE::Bool32 protectedNoFault_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : protectedNoFault( protectedNoFault_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceProtectedMemoryProperties & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceProtectedMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceProtectedMemoryProperties ) - offsetof( PhysicalDeviceProtectedMemoryProperties, pNext ) );
+      return *this;
+    }
+
+    PhysicalDeviceProtectedMemoryProperties( VkPhysicalDeviceProtectedMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PhysicalDeviceProtectedMemoryProperties& operator=( VkPhysicalDeviceProtectedMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceProtectedMemoryProperties const *>(&rhs);
+      return *this;
+    }
+
+    operator VkPhysicalDeviceProtectedMemoryProperties const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceProtectedMemoryProperties*>( this );
+    }
+
+    operator VkPhysicalDeviceProtectedMemoryProperties &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceProtectedMemoryProperties*>( this );
+    }
+
+    bool operator==( PhysicalDeviceProtectedMemoryProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( protectedNoFault == rhs.protectedNoFault );
+    }
+
+    bool operator!=( PhysicalDeviceProtectedMemoryProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceProtectedMemoryProperties;
+    void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::Bool32 protectedNoFault;
+  };
+  static_assert( sizeof( PhysicalDeviceProtectedMemoryProperties ) == sizeof( VkPhysicalDeviceProtectedMemoryProperties ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceProtectedMemoryProperties>::value, "struct wrapper is not a standard layout!" );
+
+  struct PhysicalDevicePushDescriptorPropertiesKHR
+  {
+    PhysicalDevicePushDescriptorPropertiesKHR( uint32_t maxPushDescriptors_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : maxPushDescriptors( maxPushDescriptors_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PhysicalDevicePushDescriptorPropertiesKHR & operator=( VULKAN_HPP_NAMESPACE::PhysicalDevicePushDescriptorPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePushDescriptorPropertiesKHR ) - offsetof( PhysicalDevicePushDescriptorPropertiesKHR, pNext ) );
+      return *this;
+    }
+
+    PhysicalDevicePushDescriptorPropertiesKHR( VkPhysicalDevicePushDescriptorPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PhysicalDevicePushDescriptorPropertiesKHR& operator=( VkPhysicalDevicePushDescriptorPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePushDescriptorPropertiesKHR const *>(&rhs);
+      return *this;
+    }
+
+    operator VkPhysicalDevicePushDescriptorPropertiesKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDevicePushDescriptorPropertiesKHR*>( this );
+    }
+
+    operator VkPhysicalDevicePushDescriptorPropertiesKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDevicePushDescriptorPropertiesKHR*>( this );
+    }
+
+    bool operator==( PhysicalDevicePushDescriptorPropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( maxPushDescriptors == rhs.maxPushDescriptors );
+    }
+
+    bool operator!=( PhysicalDevicePushDescriptorPropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePushDescriptorPropertiesKHR;
+    void* pNext = nullptr;
+    uint32_t maxPushDescriptors;
+  };
+  static_assert( sizeof( PhysicalDevicePushDescriptorPropertiesKHR ) == sizeof( VkPhysicalDevicePushDescriptorPropertiesKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDevicePushDescriptorPropertiesKHR>::value, "struct wrapper is not a standard layout!" );
+
+  struct PhysicalDeviceRayTracingPropertiesNV
+  {
+    PhysicalDeviceRayTracingPropertiesNV( uint32_t shaderGroupHandleSize_ = 0,
+                                          uint32_t maxRecursionDepth_ = 0,
+                                          uint32_t maxShaderGroupStride_ = 0,
+                                          uint32_t shaderGroupBaseAlignment_ = 0,
+                                          uint64_t maxGeometryCount_ = 0,
+                                          uint64_t maxInstanceCount_ = 0,
+                                          uint64_t maxTriangleCount_ = 0,
+                                          uint32_t maxDescriptorSetAccelerationStructures_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : shaderGroupHandleSize( shaderGroupHandleSize_ )
+      , maxRecursionDepth( maxRecursionDepth_ )
+      , maxShaderGroupStride( maxShaderGroupStride_ )
+      , shaderGroupBaseAlignment( shaderGroupBaseAlignment_ )
+      , maxGeometryCount( maxGeometryCount_ )
+      , maxInstanceCount( maxInstanceCount_ )
+      , maxTriangleCount( maxTriangleCount_ )
+      , maxDescriptorSetAccelerationStructures( maxDescriptorSetAccelerationStructures_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPropertiesNV & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPropertiesNV ) - offsetof( PhysicalDeviceRayTracingPropertiesNV, pNext ) );
+      return *this;
+    }
+
+    PhysicalDeviceRayTracingPropertiesNV( VkPhysicalDeviceRayTracingPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PhysicalDeviceRayTracingPropertiesNV& operator=( VkPhysicalDeviceRayTracingPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPropertiesNV const *>(&rhs);
+      return *this;
+    }
+
+    operator VkPhysicalDeviceRayTracingPropertiesNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceRayTracingPropertiesNV*>( this );
+    }
+
+    operator VkPhysicalDeviceRayTracingPropertiesNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceRayTracingPropertiesNV*>( this );
+    }
+
+    bool operator==( PhysicalDeviceRayTracingPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( shaderGroupHandleSize == rhs.shaderGroupHandleSize )
+          && ( maxRecursionDepth == rhs.maxRecursionDepth )
+          && ( maxShaderGroupStride == rhs.maxShaderGroupStride )
+          && ( shaderGroupBaseAlignment == rhs.shaderGroupBaseAlignment )
+          && ( maxGeometryCount == rhs.maxGeometryCount )
+          && ( maxInstanceCount == rhs.maxInstanceCount )
+          && ( maxTriangleCount == rhs.maxTriangleCount )
+          && ( maxDescriptorSetAccelerationStructures == rhs.maxDescriptorSetAccelerationStructures );
+    }
+
+    bool operator!=( PhysicalDeviceRayTracingPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayTracingPropertiesNV;
+    void* pNext = nullptr;
+    uint32_t shaderGroupHandleSize;
+    uint32_t maxRecursionDepth;
+    uint32_t maxShaderGroupStride;
+    uint32_t shaderGroupBaseAlignment;
+    uint64_t maxGeometryCount;
+    uint64_t maxInstanceCount;
+    uint64_t maxTriangleCount;
+    uint32_t maxDescriptorSetAccelerationStructures;
+  };
+  static_assert( sizeof( PhysicalDeviceRayTracingPropertiesNV ) == sizeof( VkPhysicalDeviceRayTracingPropertiesNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceRayTracingPropertiesNV>::value, "struct wrapper is not a standard layout!" );
+
+  struct PhysicalDeviceRepresentativeFragmentTestFeaturesNV
+  {
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceRepresentativeFragmentTestFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTest_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : representativeFragmentTest( representativeFragmentTest_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceRepresentativeFragmentTestFeaturesNV & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRepresentativeFragmentTestFeaturesNV ) - offsetof( PhysicalDeviceRepresentativeFragmentTestFeaturesNV, pNext ) );
+      return *this;
+    }
+
+    PhysicalDeviceRepresentativeFragmentTestFeaturesNV( VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PhysicalDeviceRepresentativeFragmentTestFeaturesNV& operator=( VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceRepresentativeFragmentTestFeaturesNV const *>(&rhs);
+      return *this;
+    }
+
+    PhysicalDeviceRepresentativeFragmentTestFeaturesNV & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceRepresentativeFragmentTestFeaturesNV & setRepresentativeFragmentTest( VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTest_ ) VULKAN_HPP_NOEXCEPT
+    {
+      representativeFragmentTest = representativeFragmentTest_;
+      return *this;
+    }
+
+    operator VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV*>( this );
+    }
+
+    operator VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV*>( this );
+    }
+
+    bool operator==( PhysicalDeviceRepresentativeFragmentTestFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( representativeFragmentTest == rhs.representativeFragmentTest );
+    }
+
+    bool operator!=( PhysicalDeviceRepresentativeFragmentTestFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRepresentativeFragmentTestFeaturesNV;
+    void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTest;
+  };
+  static_assert( sizeof( PhysicalDeviceRepresentativeFragmentTestFeaturesNV ) == sizeof( VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceRepresentativeFragmentTestFeaturesNV>::value, "struct wrapper is not a standard layout!" );
+
+  struct PhysicalDeviceSampleLocationsPropertiesEXT
+  {
+    PhysicalDeviceSampleLocationsPropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlags sampleLocationSampleCounts_ = VULKAN_HPP_NAMESPACE::SampleCountFlags(),
+                                                VULKAN_HPP_NAMESPACE::Extent2D maxSampleLocationGridSize_ = VULKAN_HPP_NAMESPACE::Extent2D(),
+                                                std::array<float,2> const& sampleLocationCoordinateRange_ = { { 0 } },
+                                                uint32_t sampleLocationSubPixelBits_ = 0,
+                                                VULKAN_HPP_NAMESPACE::Bool32 variableSampleLocations_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : sampleLocationSampleCounts( sampleLocationSampleCounts_ )
+      , maxSampleLocationGridSize( maxSampleLocationGridSize_ )
+      , sampleLocationCoordinateRange{}
+      , sampleLocationSubPixelBits( sampleLocationSubPixelBits_ )
+      , variableSampleLocations( variableSampleLocations_ )
+    {
+      VULKAN_HPP_NAMESPACE::ConstExpressionArrayCopy<float,2,2>::copy( sampleLocationCoordinateRange, sampleLocationCoordinateRange_ );
+    }
+
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceSampleLocationsPropertiesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSampleLocationsPropertiesEXT ) - offsetof( PhysicalDeviceSampleLocationsPropertiesEXT, pNext ) );
+      return *this;
+    }
+
+    PhysicalDeviceSampleLocationsPropertiesEXT( VkPhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PhysicalDeviceSampleLocationsPropertiesEXT& operator=( VkPhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSampleLocationsPropertiesEXT const *>(&rhs);
+      return *this;
+    }
+
+    operator VkPhysicalDeviceSampleLocationsPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceSampleLocationsPropertiesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceSampleLocationsPropertiesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceSampleLocationsPropertiesEXT*>( this );
+    }
+
+    bool operator==( PhysicalDeviceSampleLocationsPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( sampleLocationSampleCounts == rhs.sampleLocationSampleCounts )
+          && ( maxSampleLocationGridSize == rhs.maxSampleLocationGridSize )
+          && ( memcmp( sampleLocationCoordinateRange, rhs.sampleLocationCoordinateRange, 2 * sizeof( float ) ) == 0 )
+          && ( sampleLocationSubPixelBits == rhs.sampleLocationSubPixelBits )
+          && ( variableSampleLocations == rhs.variableSampleLocations );
+    }
+
+    bool operator!=( PhysicalDeviceSampleLocationsPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSampleLocationsPropertiesEXT;
+    void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::SampleCountFlags sampleLocationSampleCounts;
+    VULKAN_HPP_NAMESPACE::Extent2D maxSampleLocationGridSize;
+    float sampleLocationCoordinateRange[2];
+    uint32_t sampleLocationSubPixelBits;
+    VULKAN_HPP_NAMESPACE::Bool32 variableSampleLocations;
+  };
+  static_assert( sizeof( PhysicalDeviceSampleLocationsPropertiesEXT ) == sizeof( VkPhysicalDeviceSampleLocationsPropertiesEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceSampleLocationsPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
+
+  struct PhysicalDeviceSamplerFilterMinmaxPropertiesEXT
+  {
+    PhysicalDeviceSamplerFilterMinmaxPropertiesEXT( VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxSingleComponentFormats_ = 0,
+                                                    VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxImageComponentMapping_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : filterMinmaxSingleComponentFormats( filterMinmaxSingleComponentFormats_ )
+      , filterMinmaxImageComponentMapping( filterMinmaxImageComponentMapping_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerFilterMinmaxPropertiesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerFilterMinmaxPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerFilterMinmaxPropertiesEXT ) - offsetof( PhysicalDeviceSamplerFilterMinmaxPropertiesEXT, pNext ) );
+      return *this;
+    }
+
+    PhysicalDeviceSamplerFilterMinmaxPropertiesEXT( VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PhysicalDeviceSamplerFilterMinmaxPropertiesEXT& operator=( VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerFilterMinmaxPropertiesEXT const *>(&rhs);
+      return *this;
+    }
+
+    operator VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT*>( this );
+    }
+
+    bool operator==( PhysicalDeviceSamplerFilterMinmaxPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( filterMinmaxSingleComponentFormats == rhs.filterMinmaxSingleComponentFormats )
+          && ( filterMinmaxImageComponentMapping == rhs.filterMinmaxImageComponentMapping );
+    }
+
+    bool operator!=( PhysicalDeviceSamplerFilterMinmaxPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSamplerFilterMinmaxPropertiesEXT;
+    void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxSingleComponentFormats;
+    VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxImageComponentMapping;
+  };
+  static_assert( sizeof( PhysicalDeviceSamplerFilterMinmaxPropertiesEXT ) == sizeof( VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceSamplerFilterMinmaxPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
+
+  struct PhysicalDeviceSamplerYcbcrConversionFeatures
+  {
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceSamplerYcbcrConversionFeatures( VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : samplerYcbcrConversion( samplerYcbcrConversion_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerYcbcrConversionFeatures & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerYcbcrConversionFeatures ) - offsetof( PhysicalDeviceSamplerYcbcrConversionFeatures, pNext ) );
+      return *this;
+    }
+
+    PhysicalDeviceSamplerYcbcrConversionFeatures( VkPhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PhysicalDeviceSamplerYcbcrConversionFeatures& operator=( VkPhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerYcbcrConversionFeatures const *>(&rhs);
+      return *this;
+    }
+
+    PhysicalDeviceSamplerYcbcrConversionFeatures & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceSamplerYcbcrConversionFeatures & setSamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion_ ) VULKAN_HPP_NOEXCEPT
+    {
+      samplerYcbcrConversion = samplerYcbcrConversion_;
+      return *this;
+    }
+
+    operator VkPhysicalDeviceSamplerYcbcrConversionFeatures const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceSamplerYcbcrConversionFeatures*>( this );
+    }
+
+    operator VkPhysicalDeviceSamplerYcbcrConversionFeatures &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceSamplerYcbcrConversionFeatures*>( this );
+    }
+
+    bool operator==( PhysicalDeviceSamplerYcbcrConversionFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( samplerYcbcrConversion == rhs.samplerYcbcrConversion );
+    }
+
+    bool operator!=( PhysicalDeviceSamplerYcbcrConversionFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSamplerYcbcrConversionFeatures;
+    void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion;
+  };
+  static_assert( sizeof( PhysicalDeviceSamplerYcbcrConversionFeatures ) == sizeof( VkPhysicalDeviceSamplerYcbcrConversionFeatures ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceSamplerYcbcrConversionFeatures>::value, "struct wrapper is not a standard layout!" );
+
+  struct PhysicalDeviceScalarBlockLayoutFeaturesEXT
+  {
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceScalarBlockLayoutFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : scalarBlockLayout( scalarBlockLayout_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceScalarBlockLayoutFeaturesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceScalarBlockLayoutFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceScalarBlockLayoutFeaturesEXT ) - offsetof( PhysicalDeviceScalarBlockLayoutFeaturesEXT, pNext ) );
+      return *this;
+    }
+
+    PhysicalDeviceScalarBlockLayoutFeaturesEXT( VkPhysicalDeviceScalarBlockLayoutFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PhysicalDeviceScalarBlockLayoutFeaturesEXT& operator=( VkPhysicalDeviceScalarBlockLayoutFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceScalarBlockLayoutFeaturesEXT const *>(&rhs);
+      return *this;
+    }
+
+    PhysicalDeviceScalarBlockLayoutFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceScalarBlockLayoutFeaturesEXT & setScalarBlockLayout( VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout_ ) VULKAN_HPP_NOEXCEPT
+    {
+      scalarBlockLayout = scalarBlockLayout_;
+      return *this;
+    }
+
+    operator VkPhysicalDeviceScalarBlockLayoutFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceScalarBlockLayoutFeaturesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceScalarBlockLayoutFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceScalarBlockLayoutFeaturesEXT*>( this );
+    }
+
+    bool operator==( PhysicalDeviceScalarBlockLayoutFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( scalarBlockLayout == rhs.scalarBlockLayout );
+    }
+
+    bool operator!=( PhysicalDeviceScalarBlockLayoutFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceScalarBlockLayoutFeaturesEXT;
+    void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout;
+  };
+  static_assert( sizeof( PhysicalDeviceScalarBlockLayoutFeaturesEXT ) == sizeof( VkPhysicalDeviceScalarBlockLayoutFeaturesEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceScalarBlockLayoutFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+
+  struct PhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR
+  {
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : separateDepthStencilLayouts( separateDepthStencilLayouts_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR ) - offsetof( PhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR, pNext ) );
+      return *this;
+    }
+
+    PhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR( VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR& operator=( VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR const *>(&rhs);
+      return *this;
+    }
+
+    PhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR & setSeparateDepthStencilLayouts( VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts_ ) VULKAN_HPP_NOEXCEPT
+    {
+      separateDepthStencilLayouts = separateDepthStencilLayouts_;
+      return *this;
+    }
+
+    operator VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR*>( this );
+    }
+
+    operator VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR*>( this );
+    }
+
+    bool operator==( PhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( separateDepthStencilLayouts == rhs.separateDepthStencilLayouts );
+    }
+
+    bool operator!=( PhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR;
+    void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts;
+  };
+  static_assert( sizeof( PhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR ) == sizeof( VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR>::value, "struct wrapper is not a standard layout!" );
+
+  struct PhysicalDeviceShaderAtomicInt64FeaturesKHR
+  {
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicInt64FeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics_ = 0,
+                                                                     VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : shaderBufferInt64Atomics( shaderBufferInt64Atomics_ )
+      , shaderSharedInt64Atomics( shaderSharedInt64Atomics_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicInt64FeaturesKHR & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicInt64FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicInt64FeaturesKHR ) - offsetof( PhysicalDeviceShaderAtomicInt64FeaturesKHR, pNext ) );
+      return *this;
+    }
+
+    PhysicalDeviceShaderAtomicInt64FeaturesKHR( VkPhysicalDeviceShaderAtomicInt64FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PhysicalDeviceShaderAtomicInt64FeaturesKHR& operator=( VkPhysicalDeviceShaderAtomicInt64FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicInt64FeaturesKHR const *>(&rhs);
+      return *this;
+    }
+
+    PhysicalDeviceShaderAtomicInt64FeaturesKHR & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceShaderAtomicInt64FeaturesKHR & setShaderBufferInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderBufferInt64Atomics = shaderBufferInt64Atomics_;
+      return *this;
+    }
+
+    PhysicalDeviceShaderAtomicInt64FeaturesKHR & setShaderSharedInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderSharedInt64Atomics = shaderSharedInt64Atomics_;
+      return *this;
+    }
+
+    operator VkPhysicalDeviceShaderAtomicInt64FeaturesKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceShaderAtomicInt64FeaturesKHR*>( this );
+    }
+
+    operator VkPhysicalDeviceShaderAtomicInt64FeaturesKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceShaderAtomicInt64FeaturesKHR*>( this );
+    }
+
+    bool operator==( PhysicalDeviceShaderAtomicInt64FeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( shaderBufferInt64Atomics == rhs.shaderBufferInt64Atomics )
+          && ( shaderSharedInt64Atomics == rhs.shaderSharedInt64Atomics );
+    }
+
+    bool operator!=( PhysicalDeviceShaderAtomicInt64FeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderAtomicInt64FeaturesKHR;
+    void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics;
+    VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics;
+  };
+  static_assert( sizeof( PhysicalDeviceShaderAtomicInt64FeaturesKHR ) == sizeof( VkPhysicalDeviceShaderAtomicInt64FeaturesKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceShaderAtomicInt64FeaturesKHR>::value, "struct wrapper is not a standard layout!" );
+
+  struct PhysicalDeviceShaderClockFeaturesKHR
+  {
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderClockFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupClock_ = 0,
+                                                               VULKAN_HPP_NAMESPACE::Bool32 shaderDeviceClock_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : shaderSubgroupClock( shaderSubgroupClock_ )
+      , shaderDeviceClock( shaderDeviceClock_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderClockFeaturesKHR & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderClockFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderClockFeaturesKHR ) - offsetof( PhysicalDeviceShaderClockFeaturesKHR, pNext ) );
+      return *this;
+    }
+
+    PhysicalDeviceShaderClockFeaturesKHR( VkPhysicalDeviceShaderClockFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PhysicalDeviceShaderClockFeaturesKHR& operator=( VkPhysicalDeviceShaderClockFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderClockFeaturesKHR const *>(&rhs);
+      return *this;
+    }
+
+    PhysicalDeviceShaderClockFeaturesKHR & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceShaderClockFeaturesKHR & setShaderSubgroupClock( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupClock_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderSubgroupClock = shaderSubgroupClock_;
+      return *this;
+    }
+
+    PhysicalDeviceShaderClockFeaturesKHR & setShaderDeviceClock( VULKAN_HPP_NAMESPACE::Bool32 shaderDeviceClock_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderDeviceClock = shaderDeviceClock_;
+      return *this;
+    }
+
+    operator VkPhysicalDeviceShaderClockFeaturesKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceShaderClockFeaturesKHR*>( this );
+    }
+
+    operator VkPhysicalDeviceShaderClockFeaturesKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceShaderClockFeaturesKHR*>( this );
+    }
+
+    bool operator==( PhysicalDeviceShaderClockFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( shaderSubgroupClock == rhs.shaderSubgroupClock )
+          && ( shaderDeviceClock == rhs.shaderDeviceClock );
+    }
+
+    bool operator!=( PhysicalDeviceShaderClockFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderClockFeaturesKHR;
+    void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupClock;
+    VULKAN_HPP_NAMESPACE::Bool32 shaderDeviceClock;
+  };
+  static_assert( sizeof( PhysicalDeviceShaderClockFeaturesKHR ) == sizeof( VkPhysicalDeviceShaderClockFeaturesKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceShaderClockFeaturesKHR>::value, "struct wrapper is not a standard layout!" );
+
+  struct PhysicalDeviceShaderCoreProperties2AMD
+  {
+    PhysicalDeviceShaderCoreProperties2AMD( VULKAN_HPP_NAMESPACE::ShaderCorePropertiesFlagsAMD shaderCoreFeatures_ = VULKAN_HPP_NAMESPACE::ShaderCorePropertiesFlagsAMD(),
+                                            uint32_t activeComputeUnitCount_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : shaderCoreFeatures( shaderCoreFeatures_ )
+      , activeComputeUnitCount( activeComputeUnitCount_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCoreProperties2AMD & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCoreProperties2AMD const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCoreProperties2AMD ) - offsetof( PhysicalDeviceShaderCoreProperties2AMD, pNext ) );
+      return *this;
+    }
+
+    PhysicalDeviceShaderCoreProperties2AMD( VkPhysicalDeviceShaderCoreProperties2AMD const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PhysicalDeviceShaderCoreProperties2AMD& operator=( VkPhysicalDeviceShaderCoreProperties2AMD const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCoreProperties2AMD const *>(&rhs);
+      return *this;
+    }
+
+    operator VkPhysicalDeviceShaderCoreProperties2AMD const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceShaderCoreProperties2AMD*>( this );
+    }
+
+    operator VkPhysicalDeviceShaderCoreProperties2AMD &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceShaderCoreProperties2AMD*>( this );
+    }
+
+    bool operator==( PhysicalDeviceShaderCoreProperties2AMD const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( shaderCoreFeatures == rhs.shaderCoreFeatures )
+          && ( activeComputeUnitCount == rhs.activeComputeUnitCount );
+    }
+
+    bool operator!=( PhysicalDeviceShaderCoreProperties2AMD const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderCoreProperties2AMD;
+    void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::ShaderCorePropertiesFlagsAMD shaderCoreFeatures;
+    uint32_t activeComputeUnitCount;
+  };
+  static_assert( sizeof( PhysicalDeviceShaderCoreProperties2AMD ) == sizeof( VkPhysicalDeviceShaderCoreProperties2AMD ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceShaderCoreProperties2AMD>::value, "struct wrapper is not a standard layout!" );
+
+  struct PhysicalDeviceShaderCorePropertiesAMD
+  {
+    PhysicalDeviceShaderCorePropertiesAMD( uint32_t shaderEngineCount_ = 0,
+                                           uint32_t shaderArraysPerEngineCount_ = 0,
+                                           uint32_t computeUnitsPerShaderArray_ = 0,
+                                           uint32_t simdPerComputeUnit_ = 0,
+                                           uint32_t wavefrontsPerSimd_ = 0,
+                                           uint32_t wavefrontSize_ = 0,
+                                           uint32_t sgprsPerSimd_ = 0,
+                                           uint32_t minSgprAllocation_ = 0,
+                                           uint32_t maxSgprAllocation_ = 0,
+                                           uint32_t sgprAllocationGranularity_ = 0,
+                                           uint32_t vgprsPerSimd_ = 0,
+                                           uint32_t minVgprAllocation_ = 0,
+                                           uint32_t maxVgprAllocation_ = 0,
+                                           uint32_t vgprAllocationGranularity_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : shaderEngineCount( shaderEngineCount_ )
+      , shaderArraysPerEngineCount( shaderArraysPerEngineCount_ )
+      , computeUnitsPerShaderArray( computeUnitsPerShaderArray_ )
+      , simdPerComputeUnit( simdPerComputeUnit_ )
+      , wavefrontsPerSimd( wavefrontsPerSimd_ )
+      , wavefrontSize( wavefrontSize_ )
+      , sgprsPerSimd( sgprsPerSimd_ )
+      , minSgprAllocation( minSgprAllocation_ )
+      , maxSgprAllocation( maxSgprAllocation_ )
+      , sgprAllocationGranularity( sgprAllocationGranularity_ )
+      , vgprsPerSimd( vgprsPerSimd_ )
+      , minVgprAllocation( minVgprAllocation_ )
+      , maxVgprAllocation( maxVgprAllocation_ )
+      , vgprAllocationGranularity( vgprAllocationGranularity_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCorePropertiesAMD & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCorePropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCorePropertiesAMD ) - offsetof( PhysicalDeviceShaderCorePropertiesAMD, pNext ) );
+      return *this;
+    }
+
+    PhysicalDeviceShaderCorePropertiesAMD( VkPhysicalDeviceShaderCorePropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PhysicalDeviceShaderCorePropertiesAMD& operator=( VkPhysicalDeviceShaderCorePropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCorePropertiesAMD const *>(&rhs);
+      return *this;
+    }
+
+    operator VkPhysicalDeviceShaderCorePropertiesAMD const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceShaderCorePropertiesAMD*>( this );
+    }
+
+    operator VkPhysicalDeviceShaderCorePropertiesAMD &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceShaderCorePropertiesAMD*>( this );
+    }
+
+    bool operator==( PhysicalDeviceShaderCorePropertiesAMD const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( shaderEngineCount == rhs.shaderEngineCount )
+          && ( shaderArraysPerEngineCount == rhs.shaderArraysPerEngineCount )
+          && ( computeUnitsPerShaderArray == rhs.computeUnitsPerShaderArray )
+          && ( simdPerComputeUnit == rhs.simdPerComputeUnit )
+          && ( wavefrontsPerSimd == rhs.wavefrontsPerSimd )
+          && ( wavefrontSize == rhs.wavefrontSize )
+          && ( sgprsPerSimd == rhs.sgprsPerSimd )
+          && ( minSgprAllocation == rhs.minSgprAllocation )
+          && ( maxSgprAllocation == rhs.maxSgprAllocation )
+          && ( sgprAllocationGranularity == rhs.sgprAllocationGranularity )
+          && ( vgprsPerSimd == rhs.vgprsPerSimd )
+          && ( minVgprAllocation == rhs.minVgprAllocation )
+          && ( maxVgprAllocation == rhs.maxVgprAllocation )
+          && ( vgprAllocationGranularity == rhs.vgprAllocationGranularity );
+    }
+
+    bool operator!=( PhysicalDeviceShaderCorePropertiesAMD const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderCorePropertiesAMD;
+    void* pNext = nullptr;
+    uint32_t shaderEngineCount;
+    uint32_t shaderArraysPerEngineCount;
+    uint32_t computeUnitsPerShaderArray;
+    uint32_t simdPerComputeUnit;
+    uint32_t wavefrontsPerSimd;
+    uint32_t wavefrontSize;
+    uint32_t sgprsPerSimd;
+    uint32_t minSgprAllocation;
+    uint32_t maxSgprAllocation;
+    uint32_t sgprAllocationGranularity;
+    uint32_t vgprsPerSimd;
+    uint32_t minVgprAllocation;
+    uint32_t maxVgprAllocation;
+    uint32_t vgprAllocationGranularity;
+  };
+  static_assert( sizeof( PhysicalDeviceShaderCorePropertiesAMD ) == sizeof( VkPhysicalDeviceShaderCorePropertiesAMD ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceShaderCorePropertiesAMD>::value, "struct wrapper is not a standard layout!" );
+
+  struct PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT
+  {
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 shaderDemoteToHelperInvocation_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : shaderDemoteToHelperInvocation( shaderDemoteToHelperInvocation_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT ) - offsetof( PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT, pNext ) );
+      return *this;
+    }
+
+    PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT( VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT& operator=( VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const *>(&rhs);
+      return *this;
+    }
+
+    PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT & setShaderDemoteToHelperInvocation( VULKAN_HPP_NAMESPACE::Bool32 shaderDemoteToHelperInvocation_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderDemoteToHelperInvocation = shaderDemoteToHelperInvocation_;
+      return *this;
+    }
+
+    operator VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT*>( this );
+    }
+
+    bool operator==( PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( shaderDemoteToHelperInvocation == rhs.shaderDemoteToHelperInvocation );
+    }
+
+    bool operator!=( PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT;
+    void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::Bool32 shaderDemoteToHelperInvocation;
+  };
+  static_assert( sizeof( PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT ) == sizeof( VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+
+  struct PhysicalDeviceShaderDrawParametersFeatures
+  {
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderDrawParametersFeatures( VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : shaderDrawParameters( shaderDrawParameters_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDrawParametersFeatures & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDrawParametersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDrawParametersFeatures ) - offsetof( PhysicalDeviceShaderDrawParametersFeatures, pNext ) );
+      return *this;
+    }
+
+    PhysicalDeviceShaderDrawParametersFeatures( VkPhysicalDeviceShaderDrawParametersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PhysicalDeviceShaderDrawParametersFeatures& operator=( VkPhysicalDeviceShaderDrawParametersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDrawParametersFeatures const *>(&rhs);
+      return *this;
+    }
+
+    PhysicalDeviceShaderDrawParametersFeatures & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceShaderDrawParametersFeatures & setShaderDrawParameters( VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderDrawParameters = shaderDrawParameters_;
+      return *this;
+    }
+
+    operator VkPhysicalDeviceShaderDrawParametersFeatures const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceShaderDrawParametersFeatures*>( this );
+    }
+
+    operator VkPhysicalDeviceShaderDrawParametersFeatures &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceShaderDrawParametersFeatures*>( this );
+    }
+
+    bool operator==( PhysicalDeviceShaderDrawParametersFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( shaderDrawParameters == rhs.shaderDrawParameters );
+    }
+
+    bool operator!=( PhysicalDeviceShaderDrawParametersFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderDrawParametersFeatures;
+    void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters;
+  };
+  static_assert( sizeof( PhysicalDeviceShaderDrawParametersFeatures ) == sizeof( VkPhysicalDeviceShaderDrawParametersFeatures ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceShaderDrawParametersFeatures>::value, "struct wrapper is not a standard layout!" );
+
+  struct PhysicalDeviceShaderFloat16Int8FeaturesKHR
+  {
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderFloat16Int8FeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16_ = 0,
+                                                                     VULKAN_HPP_NAMESPACE::Bool32 shaderInt8_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : shaderFloat16( shaderFloat16_ )
+      , shaderInt8( shaderInt8_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderFloat16Int8FeaturesKHR & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderFloat16Int8FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderFloat16Int8FeaturesKHR ) - offsetof( PhysicalDeviceShaderFloat16Int8FeaturesKHR, pNext ) );
+      return *this;
+    }
+
+    PhysicalDeviceShaderFloat16Int8FeaturesKHR( VkPhysicalDeviceShaderFloat16Int8FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PhysicalDeviceShaderFloat16Int8FeaturesKHR& operator=( VkPhysicalDeviceShaderFloat16Int8FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderFloat16Int8FeaturesKHR const *>(&rhs);
+      return *this;
+    }
+
+    PhysicalDeviceShaderFloat16Int8FeaturesKHR & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceShaderFloat16Int8FeaturesKHR & setShaderFloat16( VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderFloat16 = shaderFloat16_;
+      return *this;
+    }
+
+    PhysicalDeviceShaderFloat16Int8FeaturesKHR & setShaderInt8( VULKAN_HPP_NAMESPACE::Bool32 shaderInt8_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderInt8 = shaderInt8_;
+      return *this;
+    }
+
+    operator VkPhysicalDeviceShaderFloat16Int8FeaturesKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceShaderFloat16Int8FeaturesKHR*>( this );
+    }
+
+    operator VkPhysicalDeviceShaderFloat16Int8FeaturesKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceShaderFloat16Int8FeaturesKHR*>( this );
+    }
+
+    bool operator==( PhysicalDeviceShaderFloat16Int8FeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( shaderFloat16 == rhs.shaderFloat16 )
+          && ( shaderInt8 == rhs.shaderInt8 );
+    }
+
+    bool operator!=( PhysicalDeviceShaderFloat16Int8FeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderFloat16Int8FeaturesKHR;
+    void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16;
+    VULKAN_HPP_NAMESPACE::Bool32 shaderInt8;
+  };
+  static_assert( sizeof( PhysicalDeviceShaderFloat16Int8FeaturesKHR ) == sizeof( VkPhysicalDeviceShaderFloat16Int8FeaturesKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceShaderFloat16Int8FeaturesKHR>::value, "struct wrapper is not a standard layout!" );
+
+  struct PhysicalDeviceShaderImageFootprintFeaturesNV
+  {
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderImageFootprintFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 imageFootprint_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : imageFootprint( imageFootprint_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderImageFootprintFeaturesNV & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderImageFootprintFeaturesNV ) - offsetof( PhysicalDeviceShaderImageFootprintFeaturesNV, pNext ) );
+      return *this;
+    }
+
+    PhysicalDeviceShaderImageFootprintFeaturesNV( VkPhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PhysicalDeviceShaderImageFootprintFeaturesNV& operator=( VkPhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderImageFootprintFeaturesNV const *>(&rhs);
+      return *this;
+    }
+
+    PhysicalDeviceShaderImageFootprintFeaturesNV & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceShaderImageFootprintFeaturesNV & setImageFootprint( VULKAN_HPP_NAMESPACE::Bool32 imageFootprint_ ) VULKAN_HPP_NOEXCEPT
+    {
+      imageFootprint = imageFootprint_;
+      return *this;
+    }
+
+    operator VkPhysicalDeviceShaderImageFootprintFeaturesNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceShaderImageFootprintFeaturesNV*>( this );
+    }
+
+    operator VkPhysicalDeviceShaderImageFootprintFeaturesNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceShaderImageFootprintFeaturesNV*>( this );
+    }
+
+    bool operator==( PhysicalDeviceShaderImageFootprintFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( imageFootprint == rhs.imageFootprint );
+    }
+
+    bool operator!=( PhysicalDeviceShaderImageFootprintFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderImageFootprintFeaturesNV;
+    void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::Bool32 imageFootprint;
+  };
+  static_assert( sizeof( PhysicalDeviceShaderImageFootprintFeaturesNV ) == sizeof( VkPhysicalDeviceShaderImageFootprintFeaturesNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceShaderImageFootprintFeaturesNV>::value, "struct wrapper is not a standard layout!" );
+
+  struct PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL
+  {
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL( VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerFunctions2_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : shaderIntegerFunctions2( shaderIntegerFunctions2_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL ) - offsetof( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL, pNext ) );
+      return *this;
+    }
+
+    PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL( VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL& operator=( VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const *>(&rhs);
+      return *this;
+    }
+
+    PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL & setShaderIntegerFunctions2( VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerFunctions2_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderIntegerFunctions2 = shaderIntegerFunctions2_;
+      return *this;
+    }
+
+    operator VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL*>( this );
+    }
+
+    operator VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL*>( this );
+    }
+
+    bool operator==( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( shaderIntegerFunctions2 == rhs.shaderIntegerFunctions2 );
+    }
+
+    bool operator!=( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderIntegerFunctions2FeaturesINTEL;
+    void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerFunctions2;
+  };
+  static_assert( sizeof( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL ) == sizeof( VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL>::value, "struct wrapper is not a standard layout!" );
+
+  struct PhysicalDeviceShaderSMBuiltinsFeaturesNV
+  {
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSMBuiltinsFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 shaderSMBuiltins_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : shaderSMBuiltins( shaderSMBuiltins_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSMBuiltinsFeaturesNV & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSMBuiltinsFeaturesNV ) - offsetof( PhysicalDeviceShaderSMBuiltinsFeaturesNV, pNext ) );
+      return *this;
+    }
+
+    PhysicalDeviceShaderSMBuiltinsFeaturesNV( VkPhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PhysicalDeviceShaderSMBuiltinsFeaturesNV& operator=( VkPhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSMBuiltinsFeaturesNV const *>(&rhs);
+      return *this;
+    }
+
+    PhysicalDeviceShaderSMBuiltinsFeaturesNV & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceShaderSMBuiltinsFeaturesNV & setShaderSMBuiltins( VULKAN_HPP_NAMESPACE::Bool32 shaderSMBuiltins_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderSMBuiltins = shaderSMBuiltins_;
+      return *this;
+    }
+
+    operator VkPhysicalDeviceShaderSMBuiltinsFeaturesNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceShaderSMBuiltinsFeaturesNV*>( this );
+    }
+
+    operator VkPhysicalDeviceShaderSMBuiltinsFeaturesNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceShaderSMBuiltinsFeaturesNV*>( this );
+    }
+
+    bool operator==( PhysicalDeviceShaderSMBuiltinsFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( shaderSMBuiltins == rhs.shaderSMBuiltins );
+    }
+
+    bool operator!=( PhysicalDeviceShaderSMBuiltinsFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderSmBuiltinsFeaturesNV;
+    void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::Bool32 shaderSMBuiltins;
+  };
+  static_assert( sizeof( PhysicalDeviceShaderSMBuiltinsFeaturesNV ) == sizeof( VkPhysicalDeviceShaderSMBuiltinsFeaturesNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceShaderSMBuiltinsFeaturesNV>::value, "struct wrapper is not a standard layout!" );
+
+  struct PhysicalDeviceShaderSMBuiltinsPropertiesNV
+  {
+    PhysicalDeviceShaderSMBuiltinsPropertiesNV( uint32_t shaderSMCount_ = 0,
+                                                uint32_t shaderWarpsPerSM_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : shaderSMCount( shaderSMCount_ )
+      , shaderWarpsPerSM( shaderWarpsPerSM_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSMBuiltinsPropertiesNV & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSMBuiltinsPropertiesNV ) - offsetof( PhysicalDeviceShaderSMBuiltinsPropertiesNV, pNext ) );
+      return *this;
+    }
+
+    PhysicalDeviceShaderSMBuiltinsPropertiesNV( VkPhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PhysicalDeviceShaderSMBuiltinsPropertiesNV& operator=( VkPhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSMBuiltinsPropertiesNV const *>(&rhs);
+      return *this;
+    }
+
+    operator VkPhysicalDeviceShaderSMBuiltinsPropertiesNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceShaderSMBuiltinsPropertiesNV*>( this );
+    }
+
+    operator VkPhysicalDeviceShaderSMBuiltinsPropertiesNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceShaderSMBuiltinsPropertiesNV*>( this );
+    }
+
+    bool operator==( PhysicalDeviceShaderSMBuiltinsPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( shaderSMCount == rhs.shaderSMCount )
+          && ( shaderWarpsPerSM == rhs.shaderWarpsPerSM );
+    }
+
+    bool operator!=( PhysicalDeviceShaderSMBuiltinsPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderSmBuiltinsPropertiesNV;
+    void* pNext = nullptr;
+    uint32_t shaderSMCount;
+    uint32_t shaderWarpsPerSM;
+  };
+  static_assert( sizeof( PhysicalDeviceShaderSMBuiltinsPropertiesNV ) == sizeof( VkPhysicalDeviceShaderSMBuiltinsPropertiesNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceShaderSMBuiltinsPropertiesNV>::value, "struct wrapper is not a standard layout!" );
+
+  struct PhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR
+  {
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : shaderSubgroupExtendedTypes( shaderSubgroupExtendedTypes_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR ) - offsetof( PhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR, pNext ) );
+      return *this;
+    }
+
+    PhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR( VkPhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR& operator=( VkPhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR const *>(&rhs);
+      return *this;
+    }
+
+    PhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR & setShaderSubgroupExtendedTypes( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shaderSubgroupExtendedTypes = shaderSubgroupExtendedTypes_;
+      return *this;
+    }
+
+    operator VkPhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR*>( this );
+    }
+
+    operator VkPhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR*>( this );
+    }
+
+    bool operator==( PhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( shaderSubgroupExtendedTypes == rhs.shaderSubgroupExtendedTypes );
+    }
+
+    bool operator!=( PhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR;
+    void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes;
+  };
+  static_assert( sizeof( PhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR ) == sizeof( VkPhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR>::value, "struct wrapper is not a standard layout!" );
+
+  struct PhysicalDeviceShadingRateImageFeaturesNV
+  {
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceShadingRateImageFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 shadingRateImage_ = 0,
+                                                                   VULKAN_HPP_NAMESPACE::Bool32 shadingRateCoarseSampleOrder_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : shadingRateImage( shadingRateImage_ )
+      , shadingRateCoarseSampleOrder( shadingRateCoarseSampleOrder_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceShadingRateImageFeaturesNV & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceShadingRateImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShadingRateImageFeaturesNV ) - offsetof( PhysicalDeviceShadingRateImageFeaturesNV, pNext ) );
+      return *this;
+    }
+
+    PhysicalDeviceShadingRateImageFeaturesNV( VkPhysicalDeviceShadingRateImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PhysicalDeviceShadingRateImageFeaturesNV& operator=( VkPhysicalDeviceShadingRateImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShadingRateImageFeaturesNV const *>(&rhs);
+      return *this;
+    }
+
+    PhysicalDeviceShadingRateImageFeaturesNV & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceShadingRateImageFeaturesNV & setShadingRateImage( VULKAN_HPP_NAMESPACE::Bool32 shadingRateImage_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shadingRateImage = shadingRateImage_;
+      return *this;
+    }
+
+    PhysicalDeviceShadingRateImageFeaturesNV & setShadingRateCoarseSampleOrder( VULKAN_HPP_NAMESPACE::Bool32 shadingRateCoarseSampleOrder_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shadingRateCoarseSampleOrder = shadingRateCoarseSampleOrder_;
+      return *this;
+    }
+
+    operator VkPhysicalDeviceShadingRateImageFeaturesNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceShadingRateImageFeaturesNV*>( this );
+    }
+
+    operator VkPhysicalDeviceShadingRateImageFeaturesNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceShadingRateImageFeaturesNV*>( this );
+    }
+
+    bool operator==( PhysicalDeviceShadingRateImageFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( shadingRateImage == rhs.shadingRateImage )
+          && ( shadingRateCoarseSampleOrder == rhs.shadingRateCoarseSampleOrder );
+    }
+
+    bool operator!=( PhysicalDeviceShadingRateImageFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShadingRateImageFeaturesNV;
+    void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::Bool32 shadingRateImage;
+    VULKAN_HPP_NAMESPACE::Bool32 shadingRateCoarseSampleOrder;
+  };
+  static_assert( sizeof( PhysicalDeviceShadingRateImageFeaturesNV ) == sizeof( VkPhysicalDeviceShadingRateImageFeaturesNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceShadingRateImageFeaturesNV>::value, "struct wrapper is not a standard layout!" );
+
+  struct PhysicalDeviceShadingRateImagePropertiesNV
+  {
+    PhysicalDeviceShadingRateImagePropertiesNV( VULKAN_HPP_NAMESPACE::Extent2D shadingRateTexelSize_ = VULKAN_HPP_NAMESPACE::Extent2D(),
+                                                uint32_t shadingRatePaletteSize_ = 0,
+                                                uint32_t shadingRateMaxCoarseSamples_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : shadingRateTexelSize( shadingRateTexelSize_ )
+      , shadingRatePaletteSize( shadingRatePaletteSize_ )
+      , shadingRateMaxCoarseSamples( shadingRateMaxCoarseSamples_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceShadingRateImagePropertiesNV & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceShadingRateImagePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShadingRateImagePropertiesNV ) - offsetof( PhysicalDeviceShadingRateImagePropertiesNV, pNext ) );
+      return *this;
+    }
+
+    PhysicalDeviceShadingRateImagePropertiesNV( VkPhysicalDeviceShadingRateImagePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PhysicalDeviceShadingRateImagePropertiesNV& operator=( VkPhysicalDeviceShadingRateImagePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShadingRateImagePropertiesNV const *>(&rhs);
+      return *this;
+    }
+
+    operator VkPhysicalDeviceShadingRateImagePropertiesNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceShadingRateImagePropertiesNV*>( this );
+    }
+
+    operator VkPhysicalDeviceShadingRateImagePropertiesNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceShadingRateImagePropertiesNV*>( this );
+    }
+
+    bool operator==( PhysicalDeviceShadingRateImagePropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( shadingRateTexelSize == rhs.shadingRateTexelSize )
+          && ( shadingRatePaletteSize == rhs.shadingRatePaletteSize )
+          && ( shadingRateMaxCoarseSamples == rhs.shadingRateMaxCoarseSamples );
+    }
+
+    bool operator!=( PhysicalDeviceShadingRateImagePropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShadingRateImagePropertiesNV;
+    void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::Extent2D shadingRateTexelSize;
+    uint32_t shadingRatePaletteSize;
+    uint32_t shadingRateMaxCoarseSamples;
+  };
+  static_assert( sizeof( PhysicalDeviceShadingRateImagePropertiesNV ) == sizeof( VkPhysicalDeviceShadingRateImagePropertiesNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceShadingRateImagePropertiesNV>::value, "struct wrapper is not a standard layout!" );
+
+  struct PhysicalDeviceSparseImageFormatInfo2
+  {
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceSparseImageFormatInfo2( VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
+                                                               VULKAN_HPP_NAMESPACE::ImageType type_ = VULKAN_HPP_NAMESPACE::ImageType::e1D,
+                                                               VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1,
+                                                               VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = VULKAN_HPP_NAMESPACE::ImageUsageFlags(),
+                                                               VULKAN_HPP_NAMESPACE::ImageTiling tiling_ = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal ) VULKAN_HPP_NOEXCEPT
+      : format( format_ )
+      , type( type_ )
+      , samples( samples_ )
+      , usage( usage_ )
+      , tiling( tiling_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 ) - offsetof( PhysicalDeviceSparseImageFormatInfo2, pNext ) );
+      return *this;
+    }
+
+    PhysicalDeviceSparseImageFormatInfo2( VkPhysicalDeviceSparseImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PhysicalDeviceSparseImageFormatInfo2& operator=( VkPhysicalDeviceSparseImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 const *>(&rhs);
+      return *this;
+    }
+
+    PhysicalDeviceSparseImageFormatInfo2 & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceSparseImageFormatInfo2 & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
+    {
+      format = format_;
+      return *this;
+    }
+
+    PhysicalDeviceSparseImageFormatInfo2 & setType( VULKAN_HPP_NAMESPACE::ImageType type_ ) VULKAN_HPP_NOEXCEPT
+    {
+      type = type_;
+      return *this;
+    }
+
+    PhysicalDeviceSparseImageFormatInfo2 & setSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ ) VULKAN_HPP_NOEXCEPT
+    {
+      samples = samples_;
+      return *this;
+    }
+
+    PhysicalDeviceSparseImageFormatInfo2 & setUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT
+    {
+      usage = usage_;
+      return *this;
+    }
+
+    PhysicalDeviceSparseImageFormatInfo2 & setTiling( VULKAN_HPP_NAMESPACE::ImageTiling tiling_ ) VULKAN_HPP_NOEXCEPT
+    {
+      tiling = tiling_;
+      return *this;
+    }
+
+    operator VkPhysicalDeviceSparseImageFormatInfo2 const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2*>( this );
+    }
+
+    operator VkPhysicalDeviceSparseImageFormatInfo2 &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceSparseImageFormatInfo2*>( this );
+    }
+
+    bool operator==( PhysicalDeviceSparseImageFormatInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( format == rhs.format )
+          && ( type == rhs.type )
+          && ( samples == rhs.samples )
+          && ( usage == rhs.usage )
+          && ( tiling == rhs.tiling );
+    }
+
+    bool operator!=( PhysicalDeviceSparseImageFormatInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSparseImageFormatInfo2;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::Format format;
+    VULKAN_HPP_NAMESPACE::ImageType type;
+    VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples;
+    VULKAN_HPP_NAMESPACE::ImageUsageFlags usage;
+    VULKAN_HPP_NAMESPACE::ImageTiling tiling;
+  };
+  static_assert( sizeof( PhysicalDeviceSparseImageFormatInfo2 ) == sizeof( VkPhysicalDeviceSparseImageFormatInfo2 ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceSparseImageFormatInfo2>::value, "struct wrapper is not a standard layout!" );
+
+  struct PhysicalDeviceSubgroupProperties
+  {
+    PhysicalDeviceSubgroupProperties( uint32_t subgroupSize_ = 0,
+                                      VULKAN_HPP_NAMESPACE::ShaderStageFlags supportedStages_ = VULKAN_HPP_NAMESPACE::ShaderStageFlags(),
+                                      VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags supportedOperations_ = VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags(),
+                                      VULKAN_HPP_NAMESPACE::Bool32 quadOperationsInAllStages_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : subgroupSize( subgroupSize_ )
+      , supportedStages( supportedStages_ )
+      , supportedOperations( supportedOperations_ )
+      , quadOperationsInAllStages( quadOperationsInAllStages_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupProperties & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupProperties ) - offsetof( PhysicalDeviceSubgroupProperties, pNext ) );
+      return *this;
+    }
+
+    PhysicalDeviceSubgroupProperties( VkPhysicalDeviceSubgroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PhysicalDeviceSubgroupProperties& operator=( VkPhysicalDeviceSubgroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupProperties const *>(&rhs);
+      return *this;
+    }
+
+    operator VkPhysicalDeviceSubgroupProperties const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceSubgroupProperties*>( this );
+    }
+
+    operator VkPhysicalDeviceSubgroupProperties &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceSubgroupProperties*>( this );
+    }
+
+    bool operator==( PhysicalDeviceSubgroupProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( subgroupSize == rhs.subgroupSize )
+          && ( supportedStages == rhs.supportedStages )
+          && ( supportedOperations == rhs.supportedOperations )
+          && ( quadOperationsInAllStages == rhs.quadOperationsInAllStages );
+    }
+
+    bool operator!=( PhysicalDeviceSubgroupProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSubgroupProperties;
+    void* pNext = nullptr;
+    uint32_t subgroupSize;
+    VULKAN_HPP_NAMESPACE::ShaderStageFlags supportedStages;
+    VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags supportedOperations;
+    VULKAN_HPP_NAMESPACE::Bool32 quadOperationsInAllStages;
+  };
+  static_assert( sizeof( PhysicalDeviceSubgroupProperties ) == sizeof( VkPhysicalDeviceSubgroupProperties ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceSubgroupProperties>::value, "struct wrapper is not a standard layout!" );
+
+  struct PhysicalDeviceSubgroupSizeControlFeaturesEXT
+  {
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupSizeControlFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 subgroupSizeControl_ = 0,
+                                                                       VULKAN_HPP_NAMESPACE::Bool32 computeFullSubgroups_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : subgroupSizeControl( subgroupSizeControl_ )
+      , computeFullSubgroups( computeFullSubgroups_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlFeaturesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlFeaturesEXT ) - offsetof( PhysicalDeviceSubgroupSizeControlFeaturesEXT, pNext ) );
+      return *this;
+    }
+
+    PhysicalDeviceSubgroupSizeControlFeaturesEXT( VkPhysicalDeviceSubgroupSizeControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PhysicalDeviceSubgroupSizeControlFeaturesEXT& operator=( VkPhysicalDeviceSubgroupSizeControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlFeaturesEXT const *>(&rhs);
+      return *this;
+    }
+
+    PhysicalDeviceSubgroupSizeControlFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceSubgroupSizeControlFeaturesEXT & setSubgroupSizeControl( VULKAN_HPP_NAMESPACE::Bool32 subgroupSizeControl_ ) VULKAN_HPP_NOEXCEPT
+    {
+      subgroupSizeControl = subgroupSizeControl_;
+      return *this;
+    }
+
+    PhysicalDeviceSubgroupSizeControlFeaturesEXT & setComputeFullSubgroups( VULKAN_HPP_NAMESPACE::Bool32 computeFullSubgroups_ ) VULKAN_HPP_NOEXCEPT
+    {
+      computeFullSubgroups = computeFullSubgroups_;
+      return *this;
+    }
+
+    operator VkPhysicalDeviceSubgroupSizeControlFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceSubgroupSizeControlFeaturesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceSubgroupSizeControlFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceSubgroupSizeControlFeaturesEXT*>( this );
+    }
+
+    bool operator==( PhysicalDeviceSubgroupSizeControlFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( subgroupSizeControl == rhs.subgroupSizeControl )
+          && ( computeFullSubgroups == rhs.computeFullSubgroups );
+    }
+
+    bool operator!=( PhysicalDeviceSubgroupSizeControlFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSubgroupSizeControlFeaturesEXT;
+    void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::Bool32 subgroupSizeControl;
+    VULKAN_HPP_NAMESPACE::Bool32 computeFullSubgroups;
+  };
+  static_assert( sizeof( PhysicalDeviceSubgroupSizeControlFeaturesEXT ) == sizeof( VkPhysicalDeviceSubgroupSizeControlFeaturesEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceSubgroupSizeControlFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+
+  struct PhysicalDeviceSubgroupSizeControlPropertiesEXT
+  {
+    PhysicalDeviceSubgroupSizeControlPropertiesEXT( uint32_t minSubgroupSize_ = 0,
+                                                    uint32_t maxSubgroupSize_ = 0,
+                                                    uint32_t maxComputeWorkgroupSubgroups_ = 0,
+                                                    VULKAN_HPP_NAMESPACE::ShaderStageFlags requiredSubgroupSizeStages_ = VULKAN_HPP_NAMESPACE::ShaderStageFlags() ) VULKAN_HPP_NOEXCEPT
+      : minSubgroupSize( minSubgroupSize_ )
+      , maxSubgroupSize( maxSubgroupSize_ )
+      , maxComputeWorkgroupSubgroups( maxComputeWorkgroupSubgroups_ )
+      , requiredSubgroupSizeStages( requiredSubgroupSizeStages_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlPropertiesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlPropertiesEXT ) - offsetof( PhysicalDeviceSubgroupSizeControlPropertiesEXT, pNext ) );
+      return *this;
+    }
+
+    PhysicalDeviceSubgroupSizeControlPropertiesEXT( VkPhysicalDeviceSubgroupSizeControlPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PhysicalDeviceSubgroupSizeControlPropertiesEXT& operator=( VkPhysicalDeviceSubgroupSizeControlPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlPropertiesEXT const *>(&rhs);
+      return *this;
+    }
+
+    operator VkPhysicalDeviceSubgroupSizeControlPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceSubgroupSizeControlPropertiesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceSubgroupSizeControlPropertiesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceSubgroupSizeControlPropertiesEXT*>( this );
+    }
+
+    bool operator==( PhysicalDeviceSubgroupSizeControlPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( minSubgroupSize == rhs.minSubgroupSize )
+          && ( maxSubgroupSize == rhs.maxSubgroupSize )
+          && ( maxComputeWorkgroupSubgroups == rhs.maxComputeWorkgroupSubgroups )
+          && ( requiredSubgroupSizeStages == rhs.requiredSubgroupSizeStages );
+    }
+
+    bool operator!=( PhysicalDeviceSubgroupSizeControlPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSubgroupSizeControlPropertiesEXT;
+    void* pNext = nullptr;
+    uint32_t minSubgroupSize;
+    uint32_t maxSubgroupSize;
+    uint32_t maxComputeWorkgroupSubgroups;
+    VULKAN_HPP_NAMESPACE::ShaderStageFlags requiredSubgroupSizeStages;
+  };
+  static_assert( sizeof( PhysicalDeviceSubgroupSizeControlPropertiesEXT ) == sizeof( VkPhysicalDeviceSubgroupSizeControlPropertiesEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceSubgroupSizeControlPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
+
+  struct PhysicalDeviceSurfaceInfo2KHR
+  {
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceSurfaceInfo2KHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface_ = VULKAN_HPP_NAMESPACE::SurfaceKHR() ) VULKAN_HPP_NOEXCEPT
+      : surface( surface_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR ) - offsetof( PhysicalDeviceSurfaceInfo2KHR, pNext ) );
+      return *this;
+    }
+
+    PhysicalDeviceSurfaceInfo2KHR( VkPhysicalDeviceSurfaceInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PhysicalDeviceSurfaceInfo2KHR& operator=( VkPhysicalDeviceSurfaceInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR const *>(&rhs);
+      return *this;
+    }
+
+    PhysicalDeviceSurfaceInfo2KHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceSurfaceInfo2KHR & setSurface( VULKAN_HPP_NAMESPACE::SurfaceKHR surface_ ) VULKAN_HPP_NOEXCEPT
+    {
+      surface = surface_;
+      return *this;
+    }
+
+    operator VkPhysicalDeviceSurfaceInfo2KHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR*>( this );
+    }
+
+    operator VkPhysicalDeviceSurfaceInfo2KHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceSurfaceInfo2KHR*>( this );
+    }
+
+    bool operator==( PhysicalDeviceSurfaceInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( surface == rhs.surface );
+    }
+
+    bool operator!=( PhysicalDeviceSurfaceInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSurfaceInfo2KHR;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
+  };
+  static_assert( sizeof( PhysicalDeviceSurfaceInfo2KHR ) == sizeof( VkPhysicalDeviceSurfaceInfo2KHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceSurfaceInfo2KHR>::value, "struct wrapper is not a standard layout!" );
+
+  struct PhysicalDeviceTexelBufferAlignmentFeaturesEXT
+  {
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceTexelBufferAlignmentFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 texelBufferAlignment_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : texelBufferAlignment( texelBufferAlignment_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentFeaturesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentFeaturesEXT ) - offsetof( PhysicalDeviceTexelBufferAlignmentFeaturesEXT, pNext ) );
+      return *this;
+    }
+
+    PhysicalDeviceTexelBufferAlignmentFeaturesEXT( VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PhysicalDeviceTexelBufferAlignmentFeaturesEXT& operator=( VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentFeaturesEXT const *>(&rhs);
+      return *this;
+    }
+
+    PhysicalDeviceTexelBufferAlignmentFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceTexelBufferAlignmentFeaturesEXT & setTexelBufferAlignment( VULKAN_HPP_NAMESPACE::Bool32 texelBufferAlignment_ ) VULKAN_HPP_NOEXCEPT
+    {
+      texelBufferAlignment = texelBufferAlignment_;
+      return *this;
+    }
+
+    operator VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT*>( this );
+    }
+
+    bool operator==( PhysicalDeviceTexelBufferAlignmentFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( texelBufferAlignment == rhs.texelBufferAlignment );
+    }
+
+    bool operator!=( PhysicalDeviceTexelBufferAlignmentFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTexelBufferAlignmentFeaturesEXT;
+    void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::Bool32 texelBufferAlignment;
+  };
+  static_assert( sizeof( PhysicalDeviceTexelBufferAlignmentFeaturesEXT ) == sizeof( VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceTexelBufferAlignmentFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+
+  struct PhysicalDeviceTexelBufferAlignmentPropertiesEXT
+  {
+    PhysicalDeviceTexelBufferAlignmentPropertiesEXT( VULKAN_HPP_NAMESPACE::DeviceSize storageTexelBufferOffsetAlignmentBytes_ = 0,
+                                                     VULKAN_HPP_NAMESPACE::Bool32 storageTexelBufferOffsetSingleTexelAlignment_ = 0,
+                                                     VULKAN_HPP_NAMESPACE::DeviceSize uniformTexelBufferOffsetAlignmentBytes_ = 0,
+                                                     VULKAN_HPP_NAMESPACE::Bool32 uniformTexelBufferOffsetSingleTexelAlignment_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : storageTexelBufferOffsetAlignmentBytes( storageTexelBufferOffsetAlignmentBytes_ )
+      , storageTexelBufferOffsetSingleTexelAlignment( storageTexelBufferOffsetSingleTexelAlignment_ )
+      , uniformTexelBufferOffsetAlignmentBytes( uniformTexelBufferOffsetAlignmentBytes_ )
+      , uniformTexelBufferOffsetSingleTexelAlignment( uniformTexelBufferOffsetSingleTexelAlignment_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentPropertiesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentPropertiesEXT ) - offsetof( PhysicalDeviceTexelBufferAlignmentPropertiesEXT, pNext ) );
+      return *this;
+    }
+
+    PhysicalDeviceTexelBufferAlignmentPropertiesEXT( VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PhysicalDeviceTexelBufferAlignmentPropertiesEXT& operator=( VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentPropertiesEXT const *>(&rhs);
+      return *this;
+    }
+
+    operator VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT*>( this );
+    }
+
+    bool operator==( PhysicalDeviceTexelBufferAlignmentPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( storageTexelBufferOffsetAlignmentBytes == rhs.storageTexelBufferOffsetAlignmentBytes )
+          && ( storageTexelBufferOffsetSingleTexelAlignment == rhs.storageTexelBufferOffsetSingleTexelAlignment )
+          && ( uniformTexelBufferOffsetAlignmentBytes == rhs.uniformTexelBufferOffsetAlignmentBytes )
+          && ( uniformTexelBufferOffsetSingleTexelAlignment == rhs.uniformTexelBufferOffsetSingleTexelAlignment );
+    }
+
+    bool operator!=( PhysicalDeviceTexelBufferAlignmentPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTexelBufferAlignmentPropertiesEXT;
+    void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::DeviceSize storageTexelBufferOffsetAlignmentBytes;
+    VULKAN_HPP_NAMESPACE::Bool32 storageTexelBufferOffsetSingleTexelAlignment;
+    VULKAN_HPP_NAMESPACE::DeviceSize uniformTexelBufferOffsetAlignmentBytes;
+    VULKAN_HPP_NAMESPACE::Bool32 uniformTexelBufferOffsetSingleTexelAlignment;
+  };
+  static_assert( sizeof( PhysicalDeviceTexelBufferAlignmentPropertiesEXT ) == sizeof( VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceTexelBufferAlignmentPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
+
+  struct PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT
+  {
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_HDR_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : textureCompressionASTC_HDR( textureCompressionASTC_HDR_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT ) - offsetof( PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT, pNext ) );
+      return *this;
+    }
+
+    PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT( VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT& operator=( VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const *>(&rhs);
+      return *this;
+    }
+
+    PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT & setTextureCompressionASTC_HDR( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_HDR_ ) VULKAN_HPP_NOEXCEPT
+    {
+      textureCompressionASTC_HDR = textureCompressionASTC_HDR_;
+      return *this;
+    }
+
+    operator VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT*>( this );
+    }
+
+    bool operator==( PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( textureCompressionASTC_HDR == rhs.textureCompressionASTC_HDR );
+    }
+
+    bool operator!=( PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTextureCompressionAstcHdrFeaturesEXT;
+    void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_HDR;
+  };
+  static_assert( sizeof( PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT ) == sizeof( VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+
+  struct PhysicalDeviceTimelineSemaphoreFeaturesKHR
+  {
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceTimelineSemaphoreFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : timelineSemaphore( timelineSemaphore_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphoreFeaturesKHR & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphoreFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphoreFeaturesKHR ) - offsetof( PhysicalDeviceTimelineSemaphoreFeaturesKHR, pNext ) );
+      return *this;
+    }
+
+    PhysicalDeviceTimelineSemaphoreFeaturesKHR( VkPhysicalDeviceTimelineSemaphoreFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PhysicalDeviceTimelineSemaphoreFeaturesKHR& operator=( VkPhysicalDeviceTimelineSemaphoreFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphoreFeaturesKHR const *>(&rhs);
+      return *this;
+    }
+
+    PhysicalDeviceTimelineSemaphoreFeaturesKHR & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceTimelineSemaphoreFeaturesKHR & setTimelineSemaphore( VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore_ ) VULKAN_HPP_NOEXCEPT
+    {
+      timelineSemaphore = timelineSemaphore_;
+      return *this;
+    }
+
+    operator VkPhysicalDeviceTimelineSemaphoreFeaturesKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceTimelineSemaphoreFeaturesKHR*>( this );
+    }
+
+    operator VkPhysicalDeviceTimelineSemaphoreFeaturesKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceTimelineSemaphoreFeaturesKHR*>( this );
+    }
+
+    bool operator==( PhysicalDeviceTimelineSemaphoreFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( timelineSemaphore == rhs.timelineSemaphore );
+    }
+
+    bool operator!=( PhysicalDeviceTimelineSemaphoreFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTimelineSemaphoreFeaturesKHR;
+    void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore;
+  };
+  static_assert( sizeof( PhysicalDeviceTimelineSemaphoreFeaturesKHR ) == sizeof( VkPhysicalDeviceTimelineSemaphoreFeaturesKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceTimelineSemaphoreFeaturesKHR>::value, "struct wrapper is not a standard layout!" );
+
+  struct PhysicalDeviceTimelineSemaphorePropertiesKHR
+  {
+    PhysicalDeviceTimelineSemaphorePropertiesKHR( uint64_t maxTimelineSemaphoreValueDifference_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : maxTimelineSemaphoreValueDifference( maxTimelineSemaphoreValueDifference_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphorePropertiesKHR & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphorePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphorePropertiesKHR ) - offsetof( PhysicalDeviceTimelineSemaphorePropertiesKHR, pNext ) );
+      return *this;
+    }
+
+    PhysicalDeviceTimelineSemaphorePropertiesKHR( VkPhysicalDeviceTimelineSemaphorePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PhysicalDeviceTimelineSemaphorePropertiesKHR& operator=( VkPhysicalDeviceTimelineSemaphorePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphorePropertiesKHR const *>(&rhs);
+      return *this;
+    }
+
+    operator VkPhysicalDeviceTimelineSemaphorePropertiesKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceTimelineSemaphorePropertiesKHR*>( this );
+    }
+
+    operator VkPhysicalDeviceTimelineSemaphorePropertiesKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceTimelineSemaphorePropertiesKHR*>( this );
+    }
+
+    bool operator==( PhysicalDeviceTimelineSemaphorePropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( maxTimelineSemaphoreValueDifference == rhs.maxTimelineSemaphoreValueDifference );
+    }
+
+    bool operator!=( PhysicalDeviceTimelineSemaphorePropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTimelineSemaphorePropertiesKHR;
+    void* pNext = nullptr;
+    uint64_t maxTimelineSemaphoreValueDifference;
+  };
+  static_assert( sizeof( PhysicalDeviceTimelineSemaphorePropertiesKHR ) == sizeof( VkPhysicalDeviceTimelineSemaphorePropertiesKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceTimelineSemaphorePropertiesKHR>::value, "struct wrapper is not a standard layout!" );
+
+  struct PhysicalDeviceToolPropertiesEXT
+  {
+    PhysicalDeviceToolPropertiesEXT( std::array<char,VK_MAX_EXTENSION_NAME_SIZE> const& name_ = { { 0 } },
+                                     std::array<char,VK_MAX_EXTENSION_NAME_SIZE> const& version_ = { { 0 } },
+                                     VULKAN_HPP_NAMESPACE::ToolPurposeFlagsEXT purposes_ = VULKAN_HPP_NAMESPACE::ToolPurposeFlagsEXT(),
+                                     std::array<char,VK_MAX_DESCRIPTION_SIZE> const& description_ = { { 0 } },
+                                     std::array<char,VK_MAX_EXTENSION_NAME_SIZE> const& layer_ = { { 0 } } ) VULKAN_HPP_NOEXCEPT
+      : name{}
+      , version{}
+      , purposes( purposes_ )
+      , description{}
+      , layer{}
+    {
+      VULKAN_HPP_NAMESPACE::ConstExpressionArrayCopy<char,VK_MAX_EXTENSION_NAME_SIZE,VK_MAX_EXTENSION_NAME_SIZE>::copy( name, name_ );
+      VULKAN_HPP_NAMESPACE::ConstExpressionArrayCopy<char,VK_MAX_EXTENSION_NAME_SIZE,VK_MAX_EXTENSION_NAME_SIZE>::copy( version, version_ );
+      VULKAN_HPP_NAMESPACE::ConstExpressionArrayCopy<char,VK_MAX_DESCRIPTION_SIZE,VK_MAX_DESCRIPTION_SIZE>::copy( description, description_ );
+      VULKAN_HPP_NAMESPACE::ConstExpressionArrayCopy<char,VK_MAX_EXTENSION_NAME_SIZE,VK_MAX_EXTENSION_NAME_SIZE>::copy( layer, layer_ );
+    }
+
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceToolPropertiesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceToolPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceToolPropertiesEXT ) - offsetof( PhysicalDeviceToolPropertiesEXT, pNext ) );
+      return *this;
+    }
+
+    PhysicalDeviceToolPropertiesEXT( VkPhysicalDeviceToolPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PhysicalDeviceToolPropertiesEXT& operator=( VkPhysicalDeviceToolPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolPropertiesEXT const *>(&rhs);
+      return *this;
+    }
+
+    operator VkPhysicalDeviceToolPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceToolPropertiesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceToolPropertiesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceToolPropertiesEXT*>( this );
+    }
+
+    bool operator==( PhysicalDeviceToolPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( memcmp( name, rhs.name, VK_MAX_EXTENSION_NAME_SIZE * sizeof( char ) ) == 0 )
+          && ( memcmp( version, rhs.version, VK_MAX_EXTENSION_NAME_SIZE * sizeof( char ) ) == 0 )
+          && ( purposes == rhs.purposes )
+          && ( memcmp( description, rhs.description, VK_MAX_DESCRIPTION_SIZE * sizeof( char ) ) == 0 )
+          && ( memcmp( layer, rhs.layer, VK_MAX_EXTENSION_NAME_SIZE * sizeof( char ) ) == 0 );
+    }
+
+    bool operator!=( PhysicalDeviceToolPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceToolPropertiesEXT;
+    void* pNext = nullptr;
+    char name[VK_MAX_EXTENSION_NAME_SIZE];
+    char version[VK_MAX_EXTENSION_NAME_SIZE];
+    VULKAN_HPP_NAMESPACE::ToolPurposeFlagsEXT purposes;
+    char description[VK_MAX_DESCRIPTION_SIZE];
+    char layer[VK_MAX_EXTENSION_NAME_SIZE];
+  };
+  static_assert( sizeof( PhysicalDeviceToolPropertiesEXT ) == sizeof( VkPhysicalDeviceToolPropertiesEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceToolPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
+
+  struct PhysicalDeviceTransformFeedbackFeaturesEXT
+  {
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceTransformFeedbackFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 transformFeedback_ = 0,
+                                                                     VULKAN_HPP_NAMESPACE::Bool32 geometryStreams_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : transformFeedback( transformFeedback_ )
+      , geometryStreams( geometryStreams_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceTransformFeedbackFeaturesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceTransformFeedbackFeaturesEXT ) - offsetof( PhysicalDeviceTransformFeedbackFeaturesEXT, pNext ) );
+      return *this;
+    }
+
+    PhysicalDeviceTransformFeedbackFeaturesEXT( VkPhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PhysicalDeviceTransformFeedbackFeaturesEXT& operator=( VkPhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceTransformFeedbackFeaturesEXT const *>(&rhs);
+      return *this;
+    }
+
+    PhysicalDeviceTransformFeedbackFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceTransformFeedbackFeaturesEXT & setTransformFeedback( VULKAN_HPP_NAMESPACE::Bool32 transformFeedback_ ) VULKAN_HPP_NOEXCEPT
+    {
+      transformFeedback = transformFeedback_;
+      return *this;
+    }
+
+    PhysicalDeviceTransformFeedbackFeaturesEXT & setGeometryStreams( VULKAN_HPP_NAMESPACE::Bool32 geometryStreams_ ) VULKAN_HPP_NOEXCEPT
+    {
+      geometryStreams = geometryStreams_;
+      return *this;
+    }
+
+    operator VkPhysicalDeviceTransformFeedbackFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceTransformFeedbackFeaturesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceTransformFeedbackFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceTransformFeedbackFeaturesEXT*>( this );
+    }
+
+    bool operator==( PhysicalDeviceTransformFeedbackFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( transformFeedback == rhs.transformFeedback )
+          && ( geometryStreams == rhs.geometryStreams );
+    }
+
+    bool operator!=( PhysicalDeviceTransformFeedbackFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTransformFeedbackFeaturesEXT;
+    void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::Bool32 transformFeedback;
+    VULKAN_HPP_NAMESPACE::Bool32 geometryStreams;
+  };
+  static_assert( sizeof( PhysicalDeviceTransformFeedbackFeaturesEXT ) == sizeof( VkPhysicalDeviceTransformFeedbackFeaturesEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceTransformFeedbackFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+
+  struct PhysicalDeviceTransformFeedbackPropertiesEXT
+  {
+    PhysicalDeviceTransformFeedbackPropertiesEXT( uint32_t maxTransformFeedbackStreams_ = 0,
+                                                  uint32_t maxTransformFeedbackBuffers_ = 0,
+                                                  VULKAN_HPP_NAMESPACE::DeviceSize maxTransformFeedbackBufferSize_ = 0,
+                                                  uint32_t maxTransformFeedbackStreamDataSize_ = 0,
+                                                  uint32_t maxTransformFeedbackBufferDataSize_ = 0,
+                                                  uint32_t maxTransformFeedbackBufferDataStride_ = 0,
+                                                  VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackQueries_ = 0,
+                                                  VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackStreamsLinesTriangles_ = 0,
+                                                  VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackRasterizationStreamSelect_ = 0,
+                                                  VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackDraw_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : maxTransformFeedbackStreams( maxTransformFeedbackStreams_ )
+      , maxTransformFeedbackBuffers( maxTransformFeedbackBuffers_ )
+      , maxTransformFeedbackBufferSize( maxTransformFeedbackBufferSize_ )
+      , maxTransformFeedbackStreamDataSize( maxTransformFeedbackStreamDataSize_ )
+      , maxTransformFeedbackBufferDataSize( maxTransformFeedbackBufferDataSize_ )
+      , maxTransformFeedbackBufferDataStride( maxTransformFeedbackBufferDataStride_ )
+      , transformFeedbackQueries( transformFeedbackQueries_ )
+      , transformFeedbackStreamsLinesTriangles( transformFeedbackStreamsLinesTriangles_ )
+      , transformFeedbackRasterizationStreamSelect( transformFeedbackRasterizationStreamSelect_ )
+      , transformFeedbackDraw( transformFeedbackDraw_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceTransformFeedbackPropertiesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceTransformFeedbackPropertiesEXT ) - offsetof( PhysicalDeviceTransformFeedbackPropertiesEXT, pNext ) );
+      return *this;
+    }
+
+    PhysicalDeviceTransformFeedbackPropertiesEXT( VkPhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PhysicalDeviceTransformFeedbackPropertiesEXT& operator=( VkPhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceTransformFeedbackPropertiesEXT const *>(&rhs);
+      return *this;
+    }
+
+    operator VkPhysicalDeviceTransformFeedbackPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceTransformFeedbackPropertiesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceTransformFeedbackPropertiesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceTransformFeedbackPropertiesEXT*>( this );
+    }
+
+    bool operator==( PhysicalDeviceTransformFeedbackPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( maxTransformFeedbackStreams == rhs.maxTransformFeedbackStreams )
+          && ( maxTransformFeedbackBuffers == rhs.maxTransformFeedbackBuffers )
+          && ( maxTransformFeedbackBufferSize == rhs.maxTransformFeedbackBufferSize )
+          && ( maxTransformFeedbackStreamDataSize == rhs.maxTransformFeedbackStreamDataSize )
+          && ( maxTransformFeedbackBufferDataSize == rhs.maxTransformFeedbackBufferDataSize )
+          && ( maxTransformFeedbackBufferDataStride == rhs.maxTransformFeedbackBufferDataStride )
+          && ( transformFeedbackQueries == rhs.transformFeedbackQueries )
+          && ( transformFeedbackStreamsLinesTriangles == rhs.transformFeedbackStreamsLinesTriangles )
+          && ( transformFeedbackRasterizationStreamSelect == rhs.transformFeedbackRasterizationStreamSelect )
+          && ( transformFeedbackDraw == rhs.transformFeedbackDraw );
+    }
+
+    bool operator!=( PhysicalDeviceTransformFeedbackPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTransformFeedbackPropertiesEXT;
+    void* pNext = nullptr;
+    uint32_t maxTransformFeedbackStreams;
+    uint32_t maxTransformFeedbackBuffers;
+    VULKAN_HPP_NAMESPACE::DeviceSize maxTransformFeedbackBufferSize;
+    uint32_t maxTransformFeedbackStreamDataSize;
+    uint32_t maxTransformFeedbackBufferDataSize;
+    uint32_t maxTransformFeedbackBufferDataStride;
+    VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackQueries;
+    VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackStreamsLinesTriangles;
+    VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackRasterizationStreamSelect;
+    VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackDraw;
+  };
+  static_assert( sizeof( PhysicalDeviceTransformFeedbackPropertiesEXT ) == sizeof( VkPhysicalDeviceTransformFeedbackPropertiesEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceTransformFeedbackPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
+
+  struct PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR
+  {
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : uniformBufferStandardLayout( uniformBufferStandardLayout_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR ) - offsetof( PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR, pNext ) );
+      return *this;
+    }
+
+    PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR( VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR& operator=( VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR const *>(&rhs);
+      return *this;
+    }
+
+    PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR & setUniformBufferStandardLayout( VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout_ ) VULKAN_HPP_NOEXCEPT
+    {
+      uniformBufferStandardLayout = uniformBufferStandardLayout_;
+      return *this;
+    }
+
+    operator VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR*>( this );
+    }
+
+    operator VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR*>( this );
+    }
+
+    bool operator==( PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( uniformBufferStandardLayout == rhs.uniformBufferStandardLayout );
+    }
+
+    bool operator!=( PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceUniformBufferStandardLayoutFeaturesKHR;
+    void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout;
+  };
+  static_assert( sizeof( PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR ) == sizeof( VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR>::value, "struct wrapper is not a standard layout!" );
+
+  struct PhysicalDeviceVariablePointersFeatures
+  {
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceVariablePointersFeatures( VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer_ = 0,
+                                                                 VULKAN_HPP_NAMESPACE::Bool32 variablePointers_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : variablePointersStorageBuffer( variablePointersStorageBuffer_ )
+      , variablePointers( variablePointers_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceVariablePointersFeatures & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceVariablePointersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVariablePointersFeatures ) - offsetof( PhysicalDeviceVariablePointersFeatures, pNext ) );
+      return *this;
+    }
+
+    PhysicalDeviceVariablePointersFeatures( VkPhysicalDeviceVariablePointersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PhysicalDeviceVariablePointersFeatures& operator=( VkPhysicalDeviceVariablePointersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVariablePointersFeatures const *>(&rhs);
+      return *this;
+    }
+
+    PhysicalDeviceVariablePointersFeatures & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceVariablePointersFeatures & setVariablePointersStorageBuffer( VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      variablePointersStorageBuffer = variablePointersStorageBuffer_;
+      return *this;
+    }
+
+    PhysicalDeviceVariablePointersFeatures & setVariablePointers( VULKAN_HPP_NAMESPACE::Bool32 variablePointers_ ) VULKAN_HPP_NOEXCEPT
+    {
+      variablePointers = variablePointers_;
+      return *this;
+    }
+
+    operator VkPhysicalDeviceVariablePointersFeatures const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceVariablePointersFeatures*>( this );
+    }
+
+    operator VkPhysicalDeviceVariablePointersFeatures &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceVariablePointersFeatures*>( this );
+    }
+
+    bool operator==( PhysicalDeviceVariablePointersFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( variablePointersStorageBuffer == rhs.variablePointersStorageBuffer )
+          && ( variablePointers == rhs.variablePointers );
+    }
+
+    bool operator!=( PhysicalDeviceVariablePointersFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVariablePointersFeatures;
+    void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer;
+    VULKAN_HPP_NAMESPACE::Bool32 variablePointers;
+  };
+  static_assert( sizeof( PhysicalDeviceVariablePointersFeatures ) == sizeof( VkPhysicalDeviceVariablePointersFeatures ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceVariablePointersFeatures>::value, "struct wrapper is not a standard layout!" );
+
+  struct PhysicalDeviceVertexAttributeDivisorFeaturesEXT
+  {
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexAttributeDivisorFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateDivisor_ = 0,
+                                                                          VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateZeroDivisor_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : vertexAttributeInstanceRateDivisor( vertexAttributeInstanceRateDivisor_ )
+      , vertexAttributeInstanceRateZeroDivisor( vertexAttributeInstanceRateZeroDivisor_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorFeaturesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorFeaturesEXT ) - offsetof( PhysicalDeviceVertexAttributeDivisorFeaturesEXT, pNext ) );
+      return *this;
+    }
+
+    PhysicalDeviceVertexAttributeDivisorFeaturesEXT( VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PhysicalDeviceVertexAttributeDivisorFeaturesEXT& operator=( VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorFeaturesEXT const *>(&rhs);
+      return *this;
+    }
+
+    PhysicalDeviceVertexAttributeDivisorFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceVertexAttributeDivisorFeaturesEXT & setVertexAttributeInstanceRateDivisor( VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateDivisor_ ) VULKAN_HPP_NOEXCEPT
+    {
+      vertexAttributeInstanceRateDivisor = vertexAttributeInstanceRateDivisor_;
+      return *this;
+    }
+
+    PhysicalDeviceVertexAttributeDivisorFeaturesEXT & setVertexAttributeInstanceRateZeroDivisor( VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateZeroDivisor_ ) VULKAN_HPP_NOEXCEPT
+    {
+      vertexAttributeInstanceRateZeroDivisor = vertexAttributeInstanceRateZeroDivisor_;
+      return *this;
+    }
+
+    operator VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT*>( this );
+    }
+
+    bool operator==( PhysicalDeviceVertexAttributeDivisorFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( vertexAttributeInstanceRateDivisor == rhs.vertexAttributeInstanceRateDivisor )
+          && ( vertexAttributeInstanceRateZeroDivisor == rhs.vertexAttributeInstanceRateZeroDivisor );
+    }
+
+    bool operator!=( PhysicalDeviceVertexAttributeDivisorFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVertexAttributeDivisorFeaturesEXT;
+    void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateDivisor;
+    VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateZeroDivisor;
+  };
+  static_assert( sizeof( PhysicalDeviceVertexAttributeDivisorFeaturesEXT ) == sizeof( VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceVertexAttributeDivisorFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+
+  struct PhysicalDeviceVertexAttributeDivisorPropertiesEXT
+  {
+    PhysicalDeviceVertexAttributeDivisorPropertiesEXT( uint32_t maxVertexAttribDivisor_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : maxVertexAttribDivisor( maxVertexAttribDivisor_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorPropertiesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorPropertiesEXT ) - offsetof( PhysicalDeviceVertexAttributeDivisorPropertiesEXT, pNext ) );
+      return *this;
+    }
+
+    PhysicalDeviceVertexAttributeDivisorPropertiesEXT( VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PhysicalDeviceVertexAttributeDivisorPropertiesEXT& operator=( VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorPropertiesEXT const *>(&rhs);
+      return *this;
+    }
+
+    operator VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT*>( this );
+    }
+
+    bool operator==( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( maxVertexAttribDivisor == rhs.maxVertexAttribDivisor );
+    }
+
+    bool operator!=( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVertexAttributeDivisorPropertiesEXT;
+    void* pNext = nullptr;
+    uint32_t maxVertexAttribDivisor;
+  };
+  static_assert( sizeof( PhysicalDeviceVertexAttributeDivisorPropertiesEXT ) == sizeof( VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceVertexAttributeDivisorPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
+
+  struct PhysicalDeviceVulkanMemoryModelFeaturesKHR
+  {
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkanMemoryModelFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel_ = 0,
+                                                                     VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope_ = 0,
+                                                                     VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : vulkanMemoryModel( vulkanMemoryModel_ )
+      , vulkanMemoryModelDeviceScope( vulkanMemoryModelDeviceScope_ )
+      , vulkanMemoryModelAvailabilityVisibilityChains( vulkanMemoryModelAvailabilityVisibilityChains_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkanMemoryModelFeaturesKHR & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkanMemoryModelFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkanMemoryModelFeaturesKHR ) - offsetof( PhysicalDeviceVulkanMemoryModelFeaturesKHR, pNext ) );
+      return *this;
+    }
+
+    PhysicalDeviceVulkanMemoryModelFeaturesKHR( VkPhysicalDeviceVulkanMemoryModelFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PhysicalDeviceVulkanMemoryModelFeaturesKHR& operator=( VkPhysicalDeviceVulkanMemoryModelFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkanMemoryModelFeaturesKHR const *>(&rhs);
+      return *this;
+    }
+
+    PhysicalDeviceVulkanMemoryModelFeaturesKHR & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceVulkanMemoryModelFeaturesKHR & setVulkanMemoryModel( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel_ ) VULKAN_HPP_NOEXCEPT
+    {
+      vulkanMemoryModel = vulkanMemoryModel_;
+      return *this;
+    }
+
+    PhysicalDeviceVulkanMemoryModelFeaturesKHR & setVulkanMemoryModelDeviceScope( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope_ ) VULKAN_HPP_NOEXCEPT
+    {
+      vulkanMemoryModelDeviceScope = vulkanMemoryModelDeviceScope_;
+      return *this;
+    }
+
+    PhysicalDeviceVulkanMemoryModelFeaturesKHR & setVulkanMemoryModelAvailabilityVisibilityChains( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains_ ) VULKAN_HPP_NOEXCEPT
+    {
+      vulkanMemoryModelAvailabilityVisibilityChains = vulkanMemoryModelAvailabilityVisibilityChains_;
+      return *this;
+    }
+
+    operator VkPhysicalDeviceVulkanMemoryModelFeaturesKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceVulkanMemoryModelFeaturesKHR*>( this );
+    }
+
+    operator VkPhysicalDeviceVulkanMemoryModelFeaturesKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceVulkanMemoryModelFeaturesKHR*>( this );
+    }
+
+    bool operator==( PhysicalDeviceVulkanMemoryModelFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( vulkanMemoryModel == rhs.vulkanMemoryModel )
+          && ( vulkanMemoryModelDeviceScope == rhs.vulkanMemoryModelDeviceScope )
+          && ( vulkanMemoryModelAvailabilityVisibilityChains == rhs.vulkanMemoryModelAvailabilityVisibilityChains );
+    }
+
+    bool operator!=( PhysicalDeviceVulkanMemoryModelFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkanMemoryModelFeaturesKHR;
+    void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel;
+    VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope;
+    VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains;
+  };
+  static_assert( sizeof( PhysicalDeviceVulkanMemoryModelFeaturesKHR ) == sizeof( VkPhysicalDeviceVulkanMemoryModelFeaturesKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceVulkanMemoryModelFeaturesKHR>::value, "struct wrapper is not a standard layout!" );
+
+  struct PhysicalDeviceYcbcrImageArraysFeaturesEXT
+  {
+    VULKAN_HPP_CONSTEXPR PhysicalDeviceYcbcrImageArraysFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 ycbcrImageArrays_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : ycbcrImageArrays( ycbcrImageArrays_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceYcbcrImageArraysFeaturesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceYcbcrImageArraysFeaturesEXT ) - offsetof( PhysicalDeviceYcbcrImageArraysFeaturesEXT, pNext ) );
+      return *this;
+    }
+
+    PhysicalDeviceYcbcrImageArraysFeaturesEXT( VkPhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PhysicalDeviceYcbcrImageArraysFeaturesEXT& operator=( VkPhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceYcbcrImageArraysFeaturesEXT const *>(&rhs);
+      return *this;
+    }
+
+    PhysicalDeviceYcbcrImageArraysFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PhysicalDeviceYcbcrImageArraysFeaturesEXT & setYcbcrImageArrays( VULKAN_HPP_NAMESPACE::Bool32 ycbcrImageArrays_ ) VULKAN_HPP_NOEXCEPT
+    {
+      ycbcrImageArrays = ycbcrImageArrays_;
+      return *this;
+    }
+
+    operator VkPhysicalDeviceYcbcrImageArraysFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPhysicalDeviceYcbcrImageArraysFeaturesEXT*>( this );
+    }
+
+    operator VkPhysicalDeviceYcbcrImageArraysFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPhysicalDeviceYcbcrImageArraysFeaturesEXT*>( this );
+    }
+
+    bool operator==( PhysicalDeviceYcbcrImageArraysFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( ycbcrImageArrays == rhs.ycbcrImageArrays );
+    }
+
+    bool operator!=( PhysicalDeviceYcbcrImageArraysFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceYcbcrImageArraysFeaturesEXT;
+    void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::Bool32 ycbcrImageArrays;
+  };
+  static_assert( sizeof( PhysicalDeviceYcbcrImageArraysFeaturesEXT ) == sizeof( VkPhysicalDeviceYcbcrImageArraysFeaturesEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PhysicalDeviceYcbcrImageArraysFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+
+  struct PipelineCacheCreateInfo
+  {
+    VULKAN_HPP_CONSTEXPR PipelineCacheCreateInfo( VULKAN_HPP_NAMESPACE::PipelineCacheCreateFlags flags_ = VULKAN_HPP_NAMESPACE::PipelineCacheCreateFlags(),
+                                                  size_t initialDataSize_ = 0,
+                                                  const void* pInitialData_ = nullptr ) VULKAN_HPP_NOEXCEPT
+      : flags( flags_ )
+      , initialDataSize( initialDataSize_ )
+      , pInitialData( pInitialData_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo & operator=( VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo ) - offsetof( PipelineCacheCreateInfo, pNext ) );
+      return *this;
+    }
+
+    PipelineCacheCreateInfo( VkPipelineCacheCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PipelineCacheCreateInfo& operator=( VkPipelineCacheCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo const *>(&rhs);
+      return *this;
+    }
+
+    PipelineCacheCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PipelineCacheCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineCacheCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    PipelineCacheCreateInfo & setInitialDataSize( size_t initialDataSize_ ) VULKAN_HPP_NOEXCEPT
+    {
+      initialDataSize = initialDataSize_;
+      return *this;
+    }
+
+    PipelineCacheCreateInfo & setPInitialData( const void* pInitialData_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pInitialData = pInitialData_;
+      return *this;
+    }
+
+    operator VkPipelineCacheCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineCacheCreateInfo*>( this );
+    }
+
+    operator VkPipelineCacheCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineCacheCreateInfo*>( this );
+    }
+
+    bool operator==( PipelineCacheCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( initialDataSize == rhs.initialDataSize )
+          && ( pInitialData == rhs.pInitialData );
+    }
+
+    bool operator!=( PipelineCacheCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCacheCreateInfo;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::PipelineCacheCreateFlags flags;
+    size_t initialDataSize;
+    const void* pInitialData;
+  };
+  static_assert( sizeof( PipelineCacheCreateInfo ) == sizeof( VkPipelineCacheCreateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PipelineCacheCreateInfo>::value, "struct wrapper is not a standard layout!" );
+
+  struct PipelineColorBlendAdvancedStateCreateInfoEXT
+  {
+    VULKAN_HPP_CONSTEXPR PipelineColorBlendAdvancedStateCreateInfoEXT( VULKAN_HPP_NAMESPACE::Bool32 srcPremultiplied_ = 0,
+                                                                       VULKAN_HPP_NAMESPACE::Bool32 dstPremultiplied_ = 0,
+                                                                       VULKAN_HPP_NAMESPACE::BlendOverlapEXT blendOverlap_ = VULKAN_HPP_NAMESPACE::BlendOverlapEXT::eUncorrelated ) VULKAN_HPP_NOEXCEPT
+      : srcPremultiplied( srcPremultiplied_ )
+      , dstPremultiplied( dstPremultiplied_ )
+      , blendOverlap( blendOverlap_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PipelineColorBlendAdvancedStateCreateInfoEXT & operator=( VULKAN_HPP_NAMESPACE::PipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineColorBlendAdvancedStateCreateInfoEXT ) - offsetof( PipelineColorBlendAdvancedStateCreateInfoEXT, pNext ) );
+      return *this;
+    }
+
+    PipelineColorBlendAdvancedStateCreateInfoEXT( VkPipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PipelineColorBlendAdvancedStateCreateInfoEXT& operator=( VkPipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineColorBlendAdvancedStateCreateInfoEXT const *>(&rhs);
+      return *this;
+    }
+
+    PipelineColorBlendAdvancedStateCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PipelineColorBlendAdvancedStateCreateInfoEXT & setSrcPremultiplied( VULKAN_HPP_NAMESPACE::Bool32 srcPremultiplied_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcPremultiplied = srcPremultiplied_;
+      return *this;
+    }
+
+    PipelineColorBlendAdvancedStateCreateInfoEXT & setDstPremultiplied( VULKAN_HPP_NAMESPACE::Bool32 dstPremultiplied_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstPremultiplied = dstPremultiplied_;
+      return *this;
+    }
+
+    PipelineColorBlendAdvancedStateCreateInfoEXT & setBlendOverlap( VULKAN_HPP_NAMESPACE::BlendOverlapEXT blendOverlap_ ) VULKAN_HPP_NOEXCEPT
+    {
+      blendOverlap = blendOverlap_;
+      return *this;
+    }
+
+    operator VkPipelineColorBlendAdvancedStateCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineColorBlendAdvancedStateCreateInfoEXT*>( this );
+    }
+
+    operator VkPipelineColorBlendAdvancedStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineColorBlendAdvancedStateCreateInfoEXT*>( this );
+    }
+
+    bool operator==( PipelineColorBlendAdvancedStateCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( srcPremultiplied == rhs.srcPremultiplied )
+          && ( dstPremultiplied == rhs.dstPremultiplied )
+          && ( blendOverlap == rhs.blendOverlap );
+    }
+
+    bool operator!=( PipelineColorBlendAdvancedStateCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineColorBlendAdvancedStateCreateInfoEXT;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::Bool32 srcPremultiplied;
+    VULKAN_HPP_NAMESPACE::Bool32 dstPremultiplied;
+    VULKAN_HPP_NAMESPACE::BlendOverlapEXT blendOverlap;
+  };
+  static_assert( sizeof( PipelineColorBlendAdvancedStateCreateInfoEXT ) == sizeof( VkPipelineColorBlendAdvancedStateCreateInfoEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PipelineColorBlendAdvancedStateCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+
+  struct PipelineCompilerControlCreateInfoAMD
+  {
+    VULKAN_HPP_CONSTEXPR PipelineCompilerControlCreateInfoAMD( VULKAN_HPP_NAMESPACE::PipelineCompilerControlFlagsAMD compilerControlFlags_ = VULKAN_HPP_NAMESPACE::PipelineCompilerControlFlagsAMD() ) VULKAN_HPP_NOEXCEPT
+      : compilerControlFlags( compilerControlFlags_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PipelineCompilerControlCreateInfoAMD & operator=( VULKAN_HPP_NAMESPACE::PipelineCompilerControlCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineCompilerControlCreateInfoAMD ) - offsetof( PipelineCompilerControlCreateInfoAMD, pNext ) );
+      return *this;
+    }
+
+    PipelineCompilerControlCreateInfoAMD( VkPipelineCompilerControlCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PipelineCompilerControlCreateInfoAMD& operator=( VkPipelineCompilerControlCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineCompilerControlCreateInfoAMD const *>(&rhs);
+      return *this;
+    }
+
+    PipelineCompilerControlCreateInfoAMD & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PipelineCompilerControlCreateInfoAMD & setCompilerControlFlags( VULKAN_HPP_NAMESPACE::PipelineCompilerControlFlagsAMD compilerControlFlags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      compilerControlFlags = compilerControlFlags_;
+      return *this;
+    }
+
+    operator VkPipelineCompilerControlCreateInfoAMD const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineCompilerControlCreateInfoAMD*>( this );
+    }
+
+    operator VkPipelineCompilerControlCreateInfoAMD &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineCompilerControlCreateInfoAMD*>( this );
+    }
+
+    bool operator==( PipelineCompilerControlCreateInfoAMD const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( compilerControlFlags == rhs.compilerControlFlags );
+    }
+
+    bool operator!=( PipelineCompilerControlCreateInfoAMD const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCompilerControlCreateInfoAMD;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::PipelineCompilerControlFlagsAMD compilerControlFlags;
+  };
+  static_assert( sizeof( PipelineCompilerControlCreateInfoAMD ) == sizeof( VkPipelineCompilerControlCreateInfoAMD ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PipelineCompilerControlCreateInfoAMD>::value, "struct wrapper is not a standard layout!" );
+
+  struct PipelineCoverageModulationStateCreateInfoNV
+  {
+    VULKAN_HPP_CONSTEXPR PipelineCoverageModulationStateCreateInfoNV( VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateFlagsNV flags_ = VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateFlagsNV(),
+                                                                      VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode_ = VULKAN_HPP_NAMESPACE::CoverageModulationModeNV::eNone,
+                                                                      VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable_ = 0,
+                                                                      uint32_t coverageModulationTableCount_ = 0,
+                                                                      const float* pCoverageModulationTable_ = nullptr ) VULKAN_HPP_NOEXCEPT
+      : flags( flags_ )
+      , coverageModulationMode( coverageModulationMode_ )
+      , coverageModulationTableEnable( coverageModulationTableEnable_ )
+      , coverageModulationTableCount( coverageModulationTableCount_ )
+      , pCoverageModulationTable( pCoverageModulationTable_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateInfoNV & operator=( VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateInfoNV ) - offsetof( PipelineCoverageModulationStateCreateInfoNV, pNext ) );
+      return *this;
+    }
+
+    PipelineCoverageModulationStateCreateInfoNV( VkPipelineCoverageModulationStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PipelineCoverageModulationStateCreateInfoNV& operator=( VkPipelineCoverageModulationStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateInfoNV const *>(&rhs);
+      return *this;
+    }
+
+    PipelineCoverageModulationStateCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PipelineCoverageModulationStateCreateInfoNV & setFlags( VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    PipelineCoverageModulationStateCreateInfoNV & setCoverageModulationMode( VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode_ ) VULKAN_HPP_NOEXCEPT
+    {
+      coverageModulationMode = coverageModulationMode_;
+      return *this;
+    }
+
+    PipelineCoverageModulationStateCreateInfoNV & setCoverageModulationTableEnable( VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable_ ) VULKAN_HPP_NOEXCEPT
+    {
+      coverageModulationTableEnable = coverageModulationTableEnable_;
+      return *this;
+    }
+
+    PipelineCoverageModulationStateCreateInfoNV & setCoverageModulationTableCount( uint32_t coverageModulationTableCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      coverageModulationTableCount = coverageModulationTableCount_;
+      return *this;
+    }
+
+    PipelineCoverageModulationStateCreateInfoNV & setPCoverageModulationTable( const float* pCoverageModulationTable_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pCoverageModulationTable = pCoverageModulationTable_;
+      return *this;
+    }
+
+    operator VkPipelineCoverageModulationStateCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineCoverageModulationStateCreateInfoNV*>( this );
+    }
+
+    operator VkPipelineCoverageModulationStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineCoverageModulationStateCreateInfoNV*>( this );
+    }
+
+    bool operator==( PipelineCoverageModulationStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( coverageModulationMode == rhs.coverageModulationMode )
+          && ( coverageModulationTableEnable == rhs.coverageModulationTableEnable )
+          && ( coverageModulationTableCount == rhs.coverageModulationTableCount )
+          && ( pCoverageModulationTable == rhs.pCoverageModulationTable );
+    }
+
+    bool operator!=( PipelineCoverageModulationStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCoverageModulationStateCreateInfoNV;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateFlagsNV flags;
+    VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode;
+    VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable;
+    uint32_t coverageModulationTableCount;
+    const float* pCoverageModulationTable;
+  };
+  static_assert( sizeof( PipelineCoverageModulationStateCreateInfoNV ) == sizeof( VkPipelineCoverageModulationStateCreateInfoNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PipelineCoverageModulationStateCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
+
+  struct PipelineCoverageReductionStateCreateInfoNV
+  {
+    VULKAN_HPP_CONSTEXPR PipelineCoverageReductionStateCreateInfoNV( VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateFlagsNV flags_ = VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateFlagsNV(),
+                                                                     VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode_ = VULKAN_HPP_NAMESPACE::CoverageReductionModeNV::eMerge ) VULKAN_HPP_NOEXCEPT
+      : flags( flags_ )
+      , coverageReductionMode( coverageReductionMode_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateInfoNV & operator=( VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateInfoNV ) - offsetof( PipelineCoverageReductionStateCreateInfoNV, pNext ) );
+      return *this;
+    }
+
+    PipelineCoverageReductionStateCreateInfoNV( VkPipelineCoverageReductionStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PipelineCoverageReductionStateCreateInfoNV& operator=( VkPipelineCoverageReductionStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateInfoNV const *>(&rhs);
+      return *this;
+    }
+
+    PipelineCoverageReductionStateCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PipelineCoverageReductionStateCreateInfoNV & setFlags( VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    PipelineCoverageReductionStateCreateInfoNV & setCoverageReductionMode( VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode_ ) VULKAN_HPP_NOEXCEPT
+    {
+      coverageReductionMode = coverageReductionMode_;
+      return *this;
+    }
+
+    operator VkPipelineCoverageReductionStateCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineCoverageReductionStateCreateInfoNV*>( this );
+    }
+
+    operator VkPipelineCoverageReductionStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineCoverageReductionStateCreateInfoNV*>( this );
+    }
+
+    bool operator==( PipelineCoverageReductionStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( coverageReductionMode == rhs.coverageReductionMode );
+    }
+
+    bool operator!=( PipelineCoverageReductionStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCoverageReductionStateCreateInfoNV;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateFlagsNV flags;
+    VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode;
+  };
+  static_assert( sizeof( PipelineCoverageReductionStateCreateInfoNV ) == sizeof( VkPipelineCoverageReductionStateCreateInfoNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PipelineCoverageReductionStateCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
+
+  struct PipelineCoverageToColorStateCreateInfoNV
+  {
+    VULKAN_HPP_CONSTEXPR PipelineCoverageToColorStateCreateInfoNV( VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateFlagsNV flags_ = VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateFlagsNV(),
+                                                                   VULKAN_HPP_NAMESPACE::Bool32 coverageToColorEnable_ = 0,
+                                                                   uint32_t coverageToColorLocation_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : flags( flags_ )
+      , coverageToColorEnable( coverageToColorEnable_ )
+      , coverageToColorLocation( coverageToColorLocation_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateInfoNV & operator=( VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateInfoNV ) - offsetof( PipelineCoverageToColorStateCreateInfoNV, pNext ) );
+      return *this;
+    }
+
+    PipelineCoverageToColorStateCreateInfoNV( VkPipelineCoverageToColorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PipelineCoverageToColorStateCreateInfoNV& operator=( VkPipelineCoverageToColorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateInfoNV const *>(&rhs);
+      return *this;
+    }
+
+    PipelineCoverageToColorStateCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PipelineCoverageToColorStateCreateInfoNV & setFlags( VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    PipelineCoverageToColorStateCreateInfoNV & setCoverageToColorEnable( VULKAN_HPP_NAMESPACE::Bool32 coverageToColorEnable_ ) VULKAN_HPP_NOEXCEPT
+    {
+      coverageToColorEnable = coverageToColorEnable_;
+      return *this;
+    }
+
+    PipelineCoverageToColorStateCreateInfoNV & setCoverageToColorLocation( uint32_t coverageToColorLocation_ ) VULKAN_HPP_NOEXCEPT
+    {
+      coverageToColorLocation = coverageToColorLocation_;
+      return *this;
+    }
+
+    operator VkPipelineCoverageToColorStateCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineCoverageToColorStateCreateInfoNV*>( this );
+    }
+
+    operator VkPipelineCoverageToColorStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineCoverageToColorStateCreateInfoNV*>( this );
+    }
+
+    bool operator==( PipelineCoverageToColorStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( coverageToColorEnable == rhs.coverageToColorEnable )
+          && ( coverageToColorLocation == rhs.coverageToColorLocation );
+    }
+
+    bool operator!=( PipelineCoverageToColorStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCoverageToColorStateCreateInfoNV;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateFlagsNV flags;
+    VULKAN_HPP_NAMESPACE::Bool32 coverageToColorEnable;
+    uint32_t coverageToColorLocation;
+  };
+  static_assert( sizeof( PipelineCoverageToColorStateCreateInfoNV ) == sizeof( VkPipelineCoverageToColorStateCreateInfoNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PipelineCoverageToColorStateCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
+
+  struct PipelineCreationFeedbackEXT
+  {
+    PipelineCreationFeedbackEXT( VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackFlagsEXT flags_ = VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackFlagsEXT(),
+                                 uint64_t duration_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : flags( flags_ )
+      , duration( duration_ )
+    {}
+
+    PipelineCreationFeedbackEXT( VkPipelineCreationFeedbackEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PipelineCreationFeedbackEXT& operator=( VkPipelineCreationFeedbackEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackEXT const *>(&rhs);
+      return *this;
+    }
+
+    operator VkPipelineCreationFeedbackEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineCreationFeedbackEXT*>( this );
+    }
+
+    operator VkPipelineCreationFeedbackEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineCreationFeedbackEXT*>( this );
+    }
+
+    bool operator==( PipelineCreationFeedbackEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( flags == rhs.flags )
+          && ( duration == rhs.duration );
+    }
+
+    bool operator!=( PipelineCreationFeedbackEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackFlagsEXT flags;
+    uint64_t duration;
+  };
+  static_assert( sizeof( PipelineCreationFeedbackEXT ) == sizeof( VkPipelineCreationFeedbackEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PipelineCreationFeedbackEXT>::value, "struct wrapper is not a standard layout!" );
+
+  struct PipelineCreationFeedbackCreateInfoEXT
+  {
+    VULKAN_HPP_CONSTEXPR PipelineCreationFeedbackCreateInfoEXT( VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackEXT* pPipelineCreationFeedback_ = nullptr,
+                                                                uint32_t pipelineStageCreationFeedbackCount_ = 0,
+                                                                VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackEXT* pPipelineStageCreationFeedbacks_ = nullptr ) VULKAN_HPP_NOEXCEPT
+      : pPipelineCreationFeedback( pPipelineCreationFeedback_ )
+      , pipelineStageCreationFeedbackCount( pipelineStageCreationFeedbackCount_ )
+      , pPipelineStageCreationFeedbacks( pPipelineStageCreationFeedbacks_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackCreateInfoEXT & operator=( VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackCreateInfoEXT ) - offsetof( PipelineCreationFeedbackCreateInfoEXT, pNext ) );
+      return *this;
+    }
+
+    PipelineCreationFeedbackCreateInfoEXT( VkPipelineCreationFeedbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PipelineCreationFeedbackCreateInfoEXT& operator=( VkPipelineCreationFeedbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackCreateInfoEXT const *>(&rhs);
+      return *this;
+    }
+
+    PipelineCreationFeedbackCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PipelineCreationFeedbackCreateInfoEXT & setPPipelineCreationFeedback( VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackEXT* pPipelineCreationFeedback_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pPipelineCreationFeedback = pPipelineCreationFeedback_;
+      return *this;
+    }
+
+    PipelineCreationFeedbackCreateInfoEXT & setPipelineStageCreationFeedbackCount( uint32_t pipelineStageCreationFeedbackCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pipelineStageCreationFeedbackCount = pipelineStageCreationFeedbackCount_;
+      return *this;
+    }
+
+    PipelineCreationFeedbackCreateInfoEXT & setPPipelineStageCreationFeedbacks( VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackEXT* pPipelineStageCreationFeedbacks_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pPipelineStageCreationFeedbacks = pPipelineStageCreationFeedbacks_;
+      return *this;
+    }
+
+    operator VkPipelineCreationFeedbackCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineCreationFeedbackCreateInfoEXT*>( this );
+    }
+
+    operator VkPipelineCreationFeedbackCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineCreationFeedbackCreateInfoEXT*>( this );
+    }
+
+    bool operator==( PipelineCreationFeedbackCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( pPipelineCreationFeedback == rhs.pPipelineCreationFeedback )
+          && ( pipelineStageCreationFeedbackCount == rhs.pipelineStageCreationFeedbackCount )
+          && ( pPipelineStageCreationFeedbacks == rhs.pPipelineStageCreationFeedbacks );
+    }
+
+    bool operator!=( PipelineCreationFeedbackCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCreationFeedbackCreateInfoEXT;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackEXT* pPipelineCreationFeedback;
+    uint32_t pipelineStageCreationFeedbackCount;
+    VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackEXT* pPipelineStageCreationFeedbacks;
+  };
+  static_assert( sizeof( PipelineCreationFeedbackCreateInfoEXT ) == sizeof( VkPipelineCreationFeedbackCreateInfoEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PipelineCreationFeedbackCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+
+  struct PipelineDiscardRectangleStateCreateInfoEXT
+  {
+    VULKAN_HPP_CONSTEXPR PipelineDiscardRectangleStateCreateInfoEXT( VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateFlagsEXT flags_ = VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateFlagsEXT(),
+                                                                     VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode_ = VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT::eInclusive,
+                                                                     uint32_t discardRectangleCount_ = 0,
+                                                                     const VULKAN_HPP_NAMESPACE::Rect2D* pDiscardRectangles_ = nullptr ) VULKAN_HPP_NOEXCEPT
+      : flags( flags_ )
+      , discardRectangleMode( discardRectangleMode_ )
+      , discardRectangleCount( discardRectangleCount_ )
+      , pDiscardRectangles( pDiscardRectangles_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateInfoEXT & operator=( VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateInfoEXT ) - offsetof( PipelineDiscardRectangleStateCreateInfoEXT, pNext ) );
+      return *this;
+    }
+
+    PipelineDiscardRectangleStateCreateInfoEXT( VkPipelineDiscardRectangleStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PipelineDiscardRectangleStateCreateInfoEXT& operator=( VkPipelineDiscardRectangleStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateInfoEXT const *>(&rhs);
+      return *this;
+    }
+
+    PipelineDiscardRectangleStateCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PipelineDiscardRectangleStateCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    PipelineDiscardRectangleStateCreateInfoEXT & setDiscardRectangleMode( VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode_ ) VULKAN_HPP_NOEXCEPT
+    {
+      discardRectangleMode = discardRectangleMode_;
+      return *this;
+    }
+
+    PipelineDiscardRectangleStateCreateInfoEXT & setDiscardRectangleCount( uint32_t discardRectangleCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      discardRectangleCount = discardRectangleCount_;
+      return *this;
+    }
+
+    PipelineDiscardRectangleStateCreateInfoEXT & setPDiscardRectangles( const VULKAN_HPP_NAMESPACE::Rect2D* pDiscardRectangles_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pDiscardRectangles = pDiscardRectangles_;
+      return *this;
+    }
+
+    operator VkPipelineDiscardRectangleStateCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineDiscardRectangleStateCreateInfoEXT*>( this );
+    }
+
+    operator VkPipelineDiscardRectangleStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineDiscardRectangleStateCreateInfoEXT*>( this );
+    }
+
+    bool operator==( PipelineDiscardRectangleStateCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( discardRectangleMode == rhs.discardRectangleMode )
+          && ( discardRectangleCount == rhs.discardRectangleCount )
+          && ( pDiscardRectangles == rhs.pDiscardRectangles );
+    }
+
+    bool operator!=( PipelineDiscardRectangleStateCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineDiscardRectangleStateCreateInfoEXT;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateFlagsEXT flags;
+    VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode;
+    uint32_t discardRectangleCount;
+    const VULKAN_HPP_NAMESPACE::Rect2D* pDiscardRectangles;
+  };
+  static_assert( sizeof( PipelineDiscardRectangleStateCreateInfoEXT ) == sizeof( VkPipelineDiscardRectangleStateCreateInfoEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PipelineDiscardRectangleStateCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+
+  struct PipelineExecutableInfoKHR
+  {
+    VULKAN_HPP_CONSTEXPR PipelineExecutableInfoKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = VULKAN_HPP_NAMESPACE::Pipeline(),
+                                                    uint32_t executableIndex_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : pipeline( pipeline_ )
+      , executableIndex( executableIndex_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & operator=( VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR ) - offsetof( PipelineExecutableInfoKHR, pNext ) );
+      return *this;
+    }
+
+    PipelineExecutableInfoKHR( VkPipelineExecutableInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PipelineExecutableInfoKHR& operator=( VkPipelineExecutableInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR const *>(&rhs);
+      return *this;
+    }
+
+    PipelineExecutableInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PipelineExecutableInfoKHR & setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pipeline = pipeline_;
+      return *this;
+    }
+
+    PipelineExecutableInfoKHR & setExecutableIndex( uint32_t executableIndex_ ) VULKAN_HPP_NOEXCEPT
+    {
+      executableIndex = executableIndex_;
+      return *this;
+    }
+
+    operator VkPipelineExecutableInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineExecutableInfoKHR*>( this );
+    }
+
+    operator VkPipelineExecutableInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineExecutableInfoKHR*>( this );
+    }
+
+    bool operator==( PipelineExecutableInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( pipeline == rhs.pipeline )
+          && ( executableIndex == rhs.executableIndex );
+    }
+
+    bool operator!=( PipelineExecutableInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineExecutableInfoKHR;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::Pipeline pipeline;
+    uint32_t executableIndex;
+  };
+  static_assert( sizeof( PipelineExecutableInfoKHR ) == sizeof( VkPipelineExecutableInfoKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PipelineExecutableInfoKHR>::value, "struct wrapper is not a standard layout!" );
+
+  struct PipelineExecutableInternalRepresentationKHR
+  {
+    PipelineExecutableInternalRepresentationKHR( std::array<char,VK_MAX_DESCRIPTION_SIZE> const& name_ = { { 0 } },
+                                                 std::array<char,VK_MAX_DESCRIPTION_SIZE> const& description_ = { { 0 } },
+                                                 VULKAN_HPP_NAMESPACE::Bool32 isText_ = 0,
+                                                 size_t dataSize_ = 0,
+                                                 void* pData_ = nullptr ) VULKAN_HPP_NOEXCEPT
+      : name{}
+      , description{}
+      , isText( isText_ )
+      , dataSize( dataSize_ )
+      , pData( pData_ )
+    {
+      VULKAN_HPP_NAMESPACE::ConstExpressionArrayCopy<char,VK_MAX_DESCRIPTION_SIZE,VK_MAX_DESCRIPTION_SIZE>::copy( name, name_ );
+      VULKAN_HPP_NAMESPACE::ConstExpressionArrayCopy<char,VK_MAX_DESCRIPTION_SIZE,VK_MAX_DESCRIPTION_SIZE>::copy( description, description_ );
+    }
+
+    VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR & operator=( VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR ) - offsetof( PipelineExecutableInternalRepresentationKHR, pNext ) );
+      return *this;
+    }
+
+    PipelineExecutableInternalRepresentationKHR( VkPipelineExecutableInternalRepresentationKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PipelineExecutableInternalRepresentationKHR& operator=( VkPipelineExecutableInternalRepresentationKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR const *>(&rhs);
+      return *this;
+    }
+
+    operator VkPipelineExecutableInternalRepresentationKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineExecutableInternalRepresentationKHR*>( this );
+    }
+
+    operator VkPipelineExecutableInternalRepresentationKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineExecutableInternalRepresentationKHR*>( this );
+    }
+
+    bool operator==( PipelineExecutableInternalRepresentationKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( memcmp( name, rhs.name, VK_MAX_DESCRIPTION_SIZE * sizeof( char ) ) == 0 )
+          && ( memcmp( description, rhs.description, VK_MAX_DESCRIPTION_SIZE * sizeof( char ) ) == 0 )
+          && ( isText == rhs.isText )
+          && ( dataSize == rhs.dataSize )
+          && ( pData == rhs.pData );
+    }
+
+    bool operator!=( PipelineExecutableInternalRepresentationKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineExecutableInternalRepresentationKHR;
+    void* pNext = nullptr;
+    char name[VK_MAX_DESCRIPTION_SIZE];
+    char description[VK_MAX_DESCRIPTION_SIZE];
+    VULKAN_HPP_NAMESPACE::Bool32 isText;
+    size_t dataSize;
+    void* pData;
+  };
+  static_assert( sizeof( PipelineExecutableInternalRepresentationKHR ) == sizeof( VkPipelineExecutableInternalRepresentationKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PipelineExecutableInternalRepresentationKHR>::value, "struct wrapper is not a standard layout!" );
+
+  struct PipelineExecutablePropertiesKHR
+  {
+    PipelineExecutablePropertiesKHR( VULKAN_HPP_NAMESPACE::ShaderStageFlags stages_ = VULKAN_HPP_NAMESPACE::ShaderStageFlags(),
+                                     std::array<char,VK_MAX_DESCRIPTION_SIZE> const& name_ = { { 0 } },
+                                     std::array<char,VK_MAX_DESCRIPTION_SIZE> const& description_ = { { 0 } },
+                                     uint32_t subgroupSize_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : stages( stages_ )
+      , name{}
+      , description{}
+      , subgroupSize( subgroupSize_ )
+    {
+      VULKAN_HPP_NAMESPACE::ConstExpressionArrayCopy<char,VK_MAX_DESCRIPTION_SIZE,VK_MAX_DESCRIPTION_SIZE>::copy( name, name_ );
+      VULKAN_HPP_NAMESPACE::ConstExpressionArrayCopy<char,VK_MAX_DESCRIPTION_SIZE,VK_MAX_DESCRIPTION_SIZE>::copy( description, description_ );
+    }
+
+    VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR & operator=( VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR ) - offsetof( PipelineExecutablePropertiesKHR, pNext ) );
+      return *this;
+    }
+
+    PipelineExecutablePropertiesKHR( VkPipelineExecutablePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PipelineExecutablePropertiesKHR& operator=( VkPipelineExecutablePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR const *>(&rhs);
+      return *this;
+    }
+
+    operator VkPipelineExecutablePropertiesKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineExecutablePropertiesKHR*>( this );
+    }
+
+    operator VkPipelineExecutablePropertiesKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineExecutablePropertiesKHR*>( this );
+    }
+
+    bool operator==( PipelineExecutablePropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( stages == rhs.stages )
+          && ( memcmp( name, rhs.name, VK_MAX_DESCRIPTION_SIZE * sizeof( char ) ) == 0 )
+          && ( memcmp( description, rhs.description, VK_MAX_DESCRIPTION_SIZE * sizeof( char ) ) == 0 )
+          && ( subgroupSize == rhs.subgroupSize );
+    }
+
+    bool operator!=( PipelineExecutablePropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineExecutablePropertiesKHR;
+    void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::ShaderStageFlags stages;
+    char name[VK_MAX_DESCRIPTION_SIZE];
+    char description[VK_MAX_DESCRIPTION_SIZE];
+    uint32_t subgroupSize;
+  };
+  static_assert( sizeof( PipelineExecutablePropertiesKHR ) == sizeof( VkPipelineExecutablePropertiesKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PipelineExecutablePropertiesKHR>::value, "struct wrapper is not a standard layout!" );
+
+  union PipelineExecutableStatisticValueKHR
+  {
+    operator VkPipelineExecutableStatisticValueKHR const&() const
+    {
+      return *reinterpret_cast<const VkPipelineExecutableStatisticValueKHR*>(this);
+    }
+
+    operator VkPipelineExecutableStatisticValueKHR &()
+    {
+      return *reinterpret_cast<VkPipelineExecutableStatisticValueKHR*>(this);
+    }
+
+#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
+    VULKAN_HPP_NAMESPACE::Bool32 b32;
+    int64_t i64;
+    uint64_t u64;
+    double f64;
+#else
+    VkBool32 b32;
+    int64_t i64;
+    uint64_t u64;
+    double f64;
+#endif  /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/
+  };
+
+  struct PipelineExecutableStatisticKHR
+  {
+    PipelineExecutableStatisticKHR( std::array<char,VK_MAX_DESCRIPTION_SIZE> const& name_ = { { 0 } },
+                                    std::array<char,VK_MAX_DESCRIPTION_SIZE> const& description_ = { { 0 } },
+                                    VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR format_ = VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR::eBool32,
+                                    VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR value_ = VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR() ) VULKAN_HPP_NOEXCEPT
+      : name{}
+      , description{}
+      , format( format_ )
+      , value( value_ )
+    {
+      VULKAN_HPP_NAMESPACE::ConstExpressionArrayCopy<char,VK_MAX_DESCRIPTION_SIZE,VK_MAX_DESCRIPTION_SIZE>::copy( name, name_ );
+      VULKAN_HPP_NAMESPACE::ConstExpressionArrayCopy<char,VK_MAX_DESCRIPTION_SIZE,VK_MAX_DESCRIPTION_SIZE>::copy( description, description_ );
+    }
+
+    VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR & operator=( VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR ) - offsetof( PipelineExecutableStatisticKHR, pNext ) );
+      return *this;
+    }
+
+    PipelineExecutableStatisticKHR( VkPipelineExecutableStatisticKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PipelineExecutableStatisticKHR& operator=( VkPipelineExecutableStatisticKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR const *>(&rhs);
+      return *this;
+    }
+
+    operator VkPipelineExecutableStatisticKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineExecutableStatisticKHR*>( this );
+    }
+
+    operator VkPipelineExecutableStatisticKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineExecutableStatisticKHR*>( this );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineExecutableStatisticKHR;
+    void* pNext = nullptr;
+    char name[VK_MAX_DESCRIPTION_SIZE];
+    char description[VK_MAX_DESCRIPTION_SIZE];
+    VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR format;
+    VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR value;
+  };
+  static_assert( sizeof( PipelineExecutableStatisticKHR ) == sizeof( VkPipelineExecutableStatisticKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PipelineExecutableStatisticKHR>::value, "struct wrapper is not a standard layout!" );
+
+  struct PipelineInfoKHR
+  {
+    VULKAN_HPP_CONSTEXPR PipelineInfoKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = VULKAN_HPP_NAMESPACE::Pipeline() ) VULKAN_HPP_NOEXCEPT
+      : pipeline( pipeline_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PipelineInfoKHR & operator=( VULKAN_HPP_NAMESPACE::PipelineInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineInfoKHR ) - offsetof( PipelineInfoKHR, pNext ) );
+      return *this;
+    }
+
+    PipelineInfoKHR( VkPipelineInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PipelineInfoKHR& operator=( VkPipelineInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineInfoKHR const *>(&rhs);
+      return *this;
+    }
+
+    PipelineInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PipelineInfoKHR & setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pipeline = pipeline_;
+      return *this;
+    }
+
+    operator VkPipelineInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineInfoKHR*>( this );
+    }
+
+    operator VkPipelineInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineInfoKHR*>( this );
+    }
+
+    bool operator==( PipelineInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( pipeline == rhs.pipeline );
+    }
+
+    bool operator!=( PipelineInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineInfoKHR;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::Pipeline pipeline;
+  };
+  static_assert( sizeof( PipelineInfoKHR ) == sizeof( VkPipelineInfoKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PipelineInfoKHR>::value, "struct wrapper is not a standard layout!" );
+
+  struct PushConstantRange
+  {
+    VULKAN_HPP_CONSTEXPR PushConstantRange( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ = VULKAN_HPP_NAMESPACE::ShaderStageFlags(),
+                                            uint32_t offset_ = 0,
+                                            uint32_t size_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : stageFlags( stageFlags_ )
+      , offset( offset_ )
+      , size( size_ )
+    {}
+
+    PushConstantRange( VkPushConstantRange const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PushConstantRange& operator=( VkPushConstantRange const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PushConstantRange const *>(&rhs);
+      return *this;
+    }
+
+    PushConstantRange & setStageFlags( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stageFlags = stageFlags_;
+      return *this;
+    }
+
+    PushConstantRange & setOffset( uint32_t offset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      offset = offset_;
+      return *this;
+    }
+
+    PushConstantRange & setSize( uint32_t size_ ) VULKAN_HPP_NOEXCEPT
+    {
+      size = size_;
+      return *this;
+    }
+
+    operator VkPushConstantRange const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPushConstantRange*>( this );
+    }
+
+    operator VkPushConstantRange &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPushConstantRange*>( this );
+    }
+
+    bool operator==( PushConstantRange const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( stageFlags == rhs.stageFlags )
+          && ( offset == rhs.offset )
+          && ( size == rhs.size );
+    }
+
+    bool operator!=( PushConstantRange const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags;
+    uint32_t offset;
+    uint32_t size;
+  };
+  static_assert( sizeof( PushConstantRange ) == sizeof( VkPushConstantRange ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PushConstantRange>::value, "struct wrapper is not a standard layout!" );
+
+  struct PipelineLayoutCreateInfo
+  {
+    VULKAN_HPP_CONSTEXPR PipelineLayoutCreateInfo( VULKAN_HPP_NAMESPACE::PipelineLayoutCreateFlags flags_ = VULKAN_HPP_NAMESPACE::PipelineLayoutCreateFlags(),
+                                                   uint32_t setLayoutCount_ = 0,
+                                                   const VULKAN_HPP_NAMESPACE::DescriptorSetLayout* pSetLayouts_ = nullptr,
+                                                   uint32_t pushConstantRangeCount_ = 0,
+                                                   const VULKAN_HPP_NAMESPACE::PushConstantRange* pPushConstantRanges_ = nullptr ) VULKAN_HPP_NOEXCEPT
+      : flags( flags_ )
+      , setLayoutCount( setLayoutCount_ )
+      , pSetLayouts( pSetLayouts_ )
+      , pushConstantRangeCount( pushConstantRangeCount_ )
+      , pPushConstantRanges( pPushConstantRanges_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo & operator=( VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo ) - offsetof( PipelineLayoutCreateInfo, pNext ) );
+      return *this;
+    }
+
+    PipelineLayoutCreateInfo( VkPipelineLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PipelineLayoutCreateInfo& operator=( VkPipelineLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo const *>(&rhs);
+      return *this;
+    }
+
+    PipelineLayoutCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PipelineLayoutCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineLayoutCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    PipelineLayoutCreateInfo & setSetLayoutCount( uint32_t setLayoutCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      setLayoutCount = setLayoutCount_;
+      return *this;
+    }
+
+    PipelineLayoutCreateInfo & setPSetLayouts( const VULKAN_HPP_NAMESPACE::DescriptorSetLayout* pSetLayouts_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pSetLayouts = pSetLayouts_;
+      return *this;
+    }
+
+    PipelineLayoutCreateInfo & setPushConstantRangeCount( uint32_t pushConstantRangeCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pushConstantRangeCount = pushConstantRangeCount_;
+      return *this;
+    }
+
+    PipelineLayoutCreateInfo & setPPushConstantRanges( const VULKAN_HPP_NAMESPACE::PushConstantRange* pPushConstantRanges_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pPushConstantRanges = pPushConstantRanges_;
+      return *this;
+    }
+
+    operator VkPipelineLayoutCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineLayoutCreateInfo*>( this );
+    }
+
+    operator VkPipelineLayoutCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineLayoutCreateInfo*>( this );
+    }
+
+    bool operator==( PipelineLayoutCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( setLayoutCount == rhs.setLayoutCount )
+          && ( pSetLayouts == rhs.pSetLayouts )
+          && ( pushConstantRangeCount == rhs.pushConstantRangeCount )
+          && ( pPushConstantRanges == rhs.pPushConstantRanges );
+    }
+
+    bool operator!=( PipelineLayoutCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineLayoutCreateInfo;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::PipelineLayoutCreateFlags flags;
+    uint32_t setLayoutCount;
+    const VULKAN_HPP_NAMESPACE::DescriptorSetLayout* pSetLayouts;
+    uint32_t pushConstantRangeCount;
+    const VULKAN_HPP_NAMESPACE::PushConstantRange* pPushConstantRanges;
+  };
+  static_assert( sizeof( PipelineLayoutCreateInfo ) == sizeof( VkPipelineLayoutCreateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PipelineLayoutCreateInfo>::value, "struct wrapper is not a standard layout!" );
+
+  struct PipelineRasterizationConservativeStateCreateInfoEXT
+  {
+    VULKAN_HPP_CONSTEXPR PipelineRasterizationConservativeStateCreateInfoEXT( VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateFlagsEXT flags_ = VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateFlagsEXT(),
+                                                                              VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode_ = VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT::eDisabled,
+                                                                              float extraPrimitiveOverestimationSize_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : flags( flags_ )
+      , conservativeRasterizationMode( conservativeRasterizationMode_ )
+      , extraPrimitiveOverestimationSize( extraPrimitiveOverestimationSize_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateInfoEXT & operator=( VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateInfoEXT ) - offsetof( PipelineRasterizationConservativeStateCreateInfoEXT, pNext ) );
+      return *this;
+    }
+
+    PipelineRasterizationConservativeStateCreateInfoEXT( VkPipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PipelineRasterizationConservativeStateCreateInfoEXT& operator=( VkPipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateInfoEXT const *>(&rhs);
+      return *this;
+    }
+
+    PipelineRasterizationConservativeStateCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PipelineRasterizationConservativeStateCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    PipelineRasterizationConservativeStateCreateInfoEXT & setConservativeRasterizationMode( VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode_ ) VULKAN_HPP_NOEXCEPT
+    {
+      conservativeRasterizationMode = conservativeRasterizationMode_;
+      return *this;
+    }
+
+    PipelineRasterizationConservativeStateCreateInfoEXT & setExtraPrimitiveOverestimationSize( float extraPrimitiveOverestimationSize_ ) VULKAN_HPP_NOEXCEPT
+    {
+      extraPrimitiveOverestimationSize = extraPrimitiveOverestimationSize_;
+      return *this;
+    }
+
+    operator VkPipelineRasterizationConservativeStateCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineRasterizationConservativeStateCreateInfoEXT*>( this );
+    }
+
+    operator VkPipelineRasterizationConservativeStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineRasterizationConservativeStateCreateInfoEXT*>( this );
+    }
+
+    bool operator==( PipelineRasterizationConservativeStateCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( conservativeRasterizationMode == rhs.conservativeRasterizationMode )
+          && ( extraPrimitiveOverestimationSize == rhs.extraPrimitiveOverestimationSize );
+    }
+
+    bool operator!=( PipelineRasterizationConservativeStateCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationConservativeStateCreateInfoEXT;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateFlagsEXT flags;
+    VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode;
+    float extraPrimitiveOverestimationSize;
+  };
+  static_assert( sizeof( PipelineRasterizationConservativeStateCreateInfoEXT ) == sizeof( VkPipelineRasterizationConservativeStateCreateInfoEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PipelineRasterizationConservativeStateCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+
+  struct PipelineRasterizationDepthClipStateCreateInfoEXT
+  {
+    VULKAN_HPP_CONSTEXPR PipelineRasterizationDepthClipStateCreateInfoEXT( VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateFlagsEXT flags_ = VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateFlagsEXT(),
+                                                                           VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : flags( flags_ )
+      , depthClipEnable( depthClipEnable_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateInfoEXT & operator=( VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateInfoEXT ) - offsetof( PipelineRasterizationDepthClipStateCreateInfoEXT, pNext ) );
+      return *this;
+    }
+
+    PipelineRasterizationDepthClipStateCreateInfoEXT( VkPipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PipelineRasterizationDepthClipStateCreateInfoEXT& operator=( VkPipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateInfoEXT const *>(&rhs);
+      return *this;
+    }
+
+    PipelineRasterizationDepthClipStateCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PipelineRasterizationDepthClipStateCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    PipelineRasterizationDepthClipStateCreateInfoEXT & setDepthClipEnable( VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable_ ) VULKAN_HPP_NOEXCEPT
+    {
+      depthClipEnable = depthClipEnable_;
+      return *this;
+    }
+
+    operator VkPipelineRasterizationDepthClipStateCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineRasterizationDepthClipStateCreateInfoEXT*>( this );
+    }
+
+    operator VkPipelineRasterizationDepthClipStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineRasterizationDepthClipStateCreateInfoEXT*>( this );
+    }
+
+    bool operator==( PipelineRasterizationDepthClipStateCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( depthClipEnable == rhs.depthClipEnable );
+    }
+
+    bool operator!=( PipelineRasterizationDepthClipStateCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationDepthClipStateCreateInfoEXT;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateFlagsEXT flags;
+    VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable;
+  };
+  static_assert( sizeof( PipelineRasterizationDepthClipStateCreateInfoEXT ) == sizeof( VkPipelineRasterizationDepthClipStateCreateInfoEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PipelineRasterizationDepthClipStateCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+
+  struct PipelineRasterizationLineStateCreateInfoEXT
+  {
+    VULKAN_HPP_CONSTEXPR PipelineRasterizationLineStateCreateInfoEXT( VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT lineRasterizationMode_ = VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT::eDefault,
+                                                                      VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable_ = 0,
+                                                                      uint32_t lineStippleFactor_ = 0,
+                                                                      uint16_t lineStipplePattern_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : lineRasterizationMode( lineRasterizationMode_ )
+      , stippledLineEnable( stippledLineEnable_ )
+      , lineStippleFactor( lineStippleFactor_ )
+      , lineStipplePattern( lineStipplePattern_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PipelineRasterizationLineStateCreateInfoEXT & operator=( VULKAN_HPP_NAMESPACE::PipelineRasterizationLineStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineRasterizationLineStateCreateInfoEXT ) - offsetof( PipelineRasterizationLineStateCreateInfoEXT, pNext ) );
+      return *this;
+    }
+
+    PipelineRasterizationLineStateCreateInfoEXT( VkPipelineRasterizationLineStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PipelineRasterizationLineStateCreateInfoEXT& operator=( VkPipelineRasterizationLineStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineRasterizationLineStateCreateInfoEXT const *>(&rhs);
+      return *this;
+    }
+
+    PipelineRasterizationLineStateCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PipelineRasterizationLineStateCreateInfoEXT & setLineRasterizationMode( VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT lineRasterizationMode_ ) VULKAN_HPP_NOEXCEPT
+    {
+      lineRasterizationMode = lineRasterizationMode_;
+      return *this;
+    }
+
+    PipelineRasterizationLineStateCreateInfoEXT & setStippledLineEnable( VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stippledLineEnable = stippledLineEnable_;
+      return *this;
+    }
+
+    PipelineRasterizationLineStateCreateInfoEXT & setLineStippleFactor( uint32_t lineStippleFactor_ ) VULKAN_HPP_NOEXCEPT
+    {
+      lineStippleFactor = lineStippleFactor_;
+      return *this;
+    }
+
+    PipelineRasterizationLineStateCreateInfoEXT & setLineStipplePattern( uint16_t lineStipplePattern_ ) VULKAN_HPP_NOEXCEPT
+    {
+      lineStipplePattern = lineStipplePattern_;
+      return *this;
+    }
+
+    operator VkPipelineRasterizationLineStateCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineRasterizationLineStateCreateInfoEXT*>( this );
+    }
+
+    operator VkPipelineRasterizationLineStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineRasterizationLineStateCreateInfoEXT*>( this );
+    }
+
+    bool operator==( PipelineRasterizationLineStateCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( lineRasterizationMode == rhs.lineRasterizationMode )
+          && ( stippledLineEnable == rhs.stippledLineEnable )
+          && ( lineStippleFactor == rhs.lineStippleFactor )
+          && ( lineStipplePattern == rhs.lineStipplePattern );
+    }
+
+    bool operator!=( PipelineRasterizationLineStateCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationLineStateCreateInfoEXT;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT lineRasterizationMode;
+    VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable;
+    uint32_t lineStippleFactor;
+    uint16_t lineStipplePattern;
+  };
+  static_assert( sizeof( PipelineRasterizationLineStateCreateInfoEXT ) == sizeof( VkPipelineRasterizationLineStateCreateInfoEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PipelineRasterizationLineStateCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+
+  struct PipelineRasterizationStateRasterizationOrderAMD
+  {
+    VULKAN_HPP_CONSTEXPR PipelineRasterizationStateRasterizationOrderAMD( VULKAN_HPP_NAMESPACE::RasterizationOrderAMD rasterizationOrder_ = VULKAN_HPP_NAMESPACE::RasterizationOrderAMD::eStrict ) VULKAN_HPP_NOEXCEPT
+      : rasterizationOrder( rasterizationOrder_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PipelineRasterizationStateRasterizationOrderAMD & operator=( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateRasterizationOrderAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateRasterizationOrderAMD ) - offsetof( PipelineRasterizationStateRasterizationOrderAMD, pNext ) );
+      return *this;
+    }
+
+    PipelineRasterizationStateRasterizationOrderAMD( VkPipelineRasterizationStateRasterizationOrderAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PipelineRasterizationStateRasterizationOrderAMD& operator=( VkPipelineRasterizationStateRasterizationOrderAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineRasterizationStateRasterizationOrderAMD const *>(&rhs);
+      return *this;
+    }
+
+    PipelineRasterizationStateRasterizationOrderAMD & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PipelineRasterizationStateRasterizationOrderAMD & setRasterizationOrder( VULKAN_HPP_NAMESPACE::RasterizationOrderAMD rasterizationOrder_ ) VULKAN_HPP_NOEXCEPT
+    {
+      rasterizationOrder = rasterizationOrder_;
+      return *this;
+    }
+
+    operator VkPipelineRasterizationStateRasterizationOrderAMD const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineRasterizationStateRasterizationOrderAMD*>( this );
+    }
+
+    operator VkPipelineRasterizationStateRasterizationOrderAMD &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineRasterizationStateRasterizationOrderAMD*>( this );
+    }
+
+    bool operator==( PipelineRasterizationStateRasterizationOrderAMD const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( rasterizationOrder == rhs.rasterizationOrder );
+    }
+
+    bool operator!=( PipelineRasterizationStateRasterizationOrderAMD const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationStateRasterizationOrderAMD;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::RasterizationOrderAMD rasterizationOrder;
+  };
+  static_assert( sizeof( PipelineRasterizationStateRasterizationOrderAMD ) == sizeof( VkPipelineRasterizationStateRasterizationOrderAMD ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PipelineRasterizationStateRasterizationOrderAMD>::value, "struct wrapper is not a standard layout!" );
+
+  struct PipelineRasterizationStateStreamCreateInfoEXT
+  {
+    VULKAN_HPP_CONSTEXPR PipelineRasterizationStateStreamCreateInfoEXT( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateFlagsEXT flags_ = VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateFlagsEXT(),
+                                                                        uint32_t rasterizationStream_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : flags( flags_ )
+      , rasterizationStream( rasterizationStream_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateInfoEXT & operator=( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateInfoEXT ) - offsetof( PipelineRasterizationStateStreamCreateInfoEXT, pNext ) );
+      return *this;
+    }
+
+    PipelineRasterizationStateStreamCreateInfoEXT( VkPipelineRasterizationStateStreamCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PipelineRasterizationStateStreamCreateInfoEXT& operator=( VkPipelineRasterizationStateStreamCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateInfoEXT const *>(&rhs);
+      return *this;
+    }
+
+    PipelineRasterizationStateStreamCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PipelineRasterizationStateStreamCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    PipelineRasterizationStateStreamCreateInfoEXT & setRasterizationStream( uint32_t rasterizationStream_ ) VULKAN_HPP_NOEXCEPT
+    {
+      rasterizationStream = rasterizationStream_;
+      return *this;
+    }
+
+    operator VkPipelineRasterizationStateStreamCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineRasterizationStateStreamCreateInfoEXT*>( this );
+    }
+
+    operator VkPipelineRasterizationStateStreamCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineRasterizationStateStreamCreateInfoEXT*>( this );
+    }
+
+    bool operator==( PipelineRasterizationStateStreamCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( rasterizationStream == rhs.rasterizationStream );
+    }
+
+    bool operator!=( PipelineRasterizationStateStreamCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationStateStreamCreateInfoEXT;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateFlagsEXT flags;
+    uint32_t rasterizationStream;
+  };
+  static_assert( sizeof( PipelineRasterizationStateStreamCreateInfoEXT ) == sizeof( VkPipelineRasterizationStateStreamCreateInfoEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PipelineRasterizationStateStreamCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+
+  struct PipelineRepresentativeFragmentTestStateCreateInfoNV
+  {
+    VULKAN_HPP_CONSTEXPR PipelineRepresentativeFragmentTestStateCreateInfoNV( VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTestEnable_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : representativeFragmentTestEnable( representativeFragmentTestEnable_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PipelineRepresentativeFragmentTestStateCreateInfoNV & operator=( VULKAN_HPP_NAMESPACE::PipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineRepresentativeFragmentTestStateCreateInfoNV ) - offsetof( PipelineRepresentativeFragmentTestStateCreateInfoNV, pNext ) );
+      return *this;
+    }
+
+    PipelineRepresentativeFragmentTestStateCreateInfoNV( VkPipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PipelineRepresentativeFragmentTestStateCreateInfoNV& operator=( VkPipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineRepresentativeFragmentTestStateCreateInfoNV const *>(&rhs);
+      return *this;
+    }
+
+    PipelineRepresentativeFragmentTestStateCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PipelineRepresentativeFragmentTestStateCreateInfoNV & setRepresentativeFragmentTestEnable( VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTestEnable_ ) VULKAN_HPP_NOEXCEPT
+    {
+      representativeFragmentTestEnable = representativeFragmentTestEnable_;
+      return *this;
+    }
+
+    operator VkPipelineRepresentativeFragmentTestStateCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineRepresentativeFragmentTestStateCreateInfoNV*>( this );
+    }
+
+    operator VkPipelineRepresentativeFragmentTestStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineRepresentativeFragmentTestStateCreateInfoNV*>( this );
+    }
+
+    bool operator==( PipelineRepresentativeFragmentTestStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( representativeFragmentTestEnable == rhs.representativeFragmentTestEnable );
+    }
+
+    bool operator!=( PipelineRepresentativeFragmentTestStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRepresentativeFragmentTestStateCreateInfoNV;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTestEnable;
+  };
+  static_assert( sizeof( PipelineRepresentativeFragmentTestStateCreateInfoNV ) == sizeof( VkPipelineRepresentativeFragmentTestStateCreateInfoNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PipelineRepresentativeFragmentTestStateCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
+
+  struct PipelineSampleLocationsStateCreateInfoEXT
+  {
+    VULKAN_HPP_CONSTEXPR PipelineSampleLocationsStateCreateInfoEXT( VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable_ = 0,
+                                                                    VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo_ = VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT() ) VULKAN_HPP_NOEXCEPT
+      : sampleLocationsEnable( sampleLocationsEnable_ )
+      , sampleLocationsInfo( sampleLocationsInfo_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PipelineSampleLocationsStateCreateInfoEXT & operator=( VULKAN_HPP_NAMESPACE::PipelineSampleLocationsStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineSampleLocationsStateCreateInfoEXT ) - offsetof( PipelineSampleLocationsStateCreateInfoEXT, pNext ) );
+      return *this;
+    }
+
+    PipelineSampleLocationsStateCreateInfoEXT( VkPipelineSampleLocationsStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PipelineSampleLocationsStateCreateInfoEXT& operator=( VkPipelineSampleLocationsStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineSampleLocationsStateCreateInfoEXT const *>(&rhs);
+      return *this;
+    }
+
+    PipelineSampleLocationsStateCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PipelineSampleLocationsStateCreateInfoEXT & setSampleLocationsEnable( VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sampleLocationsEnable = sampleLocationsEnable_;
+      return *this;
+    }
+
+    PipelineSampleLocationsStateCreateInfoEXT & setSampleLocationsInfo( VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sampleLocationsInfo = sampleLocationsInfo_;
+      return *this;
+    }
+
+    operator VkPipelineSampleLocationsStateCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineSampleLocationsStateCreateInfoEXT*>( this );
+    }
+
+    operator VkPipelineSampleLocationsStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineSampleLocationsStateCreateInfoEXT*>( this );
+    }
+
+    bool operator==( PipelineSampleLocationsStateCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( sampleLocationsEnable == rhs.sampleLocationsEnable )
+          && ( sampleLocationsInfo == rhs.sampleLocationsInfo );
+    }
+
+    bool operator!=( PipelineSampleLocationsStateCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineSampleLocationsStateCreateInfoEXT;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable;
+    VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo;
+  };
+  static_assert( sizeof( PipelineSampleLocationsStateCreateInfoEXT ) == sizeof( VkPipelineSampleLocationsStateCreateInfoEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PipelineSampleLocationsStateCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+
+  struct PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT
+  {
+    PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT( uint32_t requiredSubgroupSize_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : requiredSubgroupSize( requiredSubgroupSize_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT & operator=( VULKAN_HPP_NAMESPACE::PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT ) - offsetof( PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT, pNext ) );
+      return *this;
+    }
+
+    PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT( VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT& operator=( VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const *>(&rhs);
+      return *this;
+    }
+
+    operator VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT*>( this );
+    }
+
+    operator VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT*>( this );
+    }
+
+    bool operator==( PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( requiredSubgroupSize == rhs.requiredSubgroupSize );
+    }
+
+    bool operator!=( PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineShaderStageRequiredSubgroupSizeCreateInfoEXT;
+    void* pNext = nullptr;
+    uint32_t requiredSubgroupSize;
+  };
+  static_assert( sizeof( PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT ) == sizeof( VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+
+  struct PipelineTessellationDomainOriginStateCreateInfo
+  {
+    VULKAN_HPP_CONSTEXPR PipelineTessellationDomainOriginStateCreateInfo( VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin_ = VULKAN_HPP_NAMESPACE::TessellationDomainOrigin::eUpperLeft ) VULKAN_HPP_NOEXCEPT
+      : domainOrigin( domainOrigin_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PipelineTessellationDomainOriginStateCreateInfo & operator=( VULKAN_HPP_NAMESPACE::PipelineTessellationDomainOriginStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineTessellationDomainOriginStateCreateInfo ) - offsetof( PipelineTessellationDomainOriginStateCreateInfo, pNext ) );
+      return *this;
+    }
+
+    PipelineTessellationDomainOriginStateCreateInfo( VkPipelineTessellationDomainOriginStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PipelineTessellationDomainOriginStateCreateInfo& operator=( VkPipelineTessellationDomainOriginStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineTessellationDomainOriginStateCreateInfo const *>(&rhs);
+      return *this;
+    }
+
+    PipelineTessellationDomainOriginStateCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PipelineTessellationDomainOriginStateCreateInfo & setDomainOrigin( VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin_ ) VULKAN_HPP_NOEXCEPT
+    {
+      domainOrigin = domainOrigin_;
+      return *this;
+    }
+
+    operator VkPipelineTessellationDomainOriginStateCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineTessellationDomainOriginStateCreateInfo*>( this );
+    }
+
+    operator VkPipelineTessellationDomainOriginStateCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineTessellationDomainOriginStateCreateInfo*>( this );
+    }
+
+    bool operator==( PipelineTessellationDomainOriginStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( domainOrigin == rhs.domainOrigin );
+    }
+
+    bool operator!=( PipelineTessellationDomainOriginStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineTessellationDomainOriginStateCreateInfo;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin;
+  };
+  static_assert( sizeof( PipelineTessellationDomainOriginStateCreateInfo ) == sizeof( VkPipelineTessellationDomainOriginStateCreateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PipelineTessellationDomainOriginStateCreateInfo>::value, "struct wrapper is not a standard layout!" );
+
+  struct VertexInputBindingDivisorDescriptionEXT
+  {
+    VULKAN_HPP_CONSTEXPR VertexInputBindingDivisorDescriptionEXT( uint32_t binding_ = 0,
+                                                                  uint32_t divisor_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : binding( binding_ )
+      , divisor( divisor_ )
+    {}
+
+    VertexInputBindingDivisorDescriptionEXT( VkVertexInputBindingDivisorDescriptionEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    VertexInputBindingDivisorDescriptionEXT& operator=( VkVertexInputBindingDivisorDescriptionEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT const *>(&rhs);
+      return *this;
+    }
+
+    VertexInputBindingDivisorDescriptionEXT & setBinding( uint32_t binding_ ) VULKAN_HPP_NOEXCEPT
+    {
+      binding = binding_;
+      return *this;
+    }
+
+    VertexInputBindingDivisorDescriptionEXT & setDivisor( uint32_t divisor_ ) VULKAN_HPP_NOEXCEPT
+    {
+      divisor = divisor_;
+      return *this;
+    }
+
+    operator VkVertexInputBindingDivisorDescriptionEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkVertexInputBindingDivisorDescriptionEXT*>( this );
+    }
+
+    operator VkVertexInputBindingDivisorDescriptionEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkVertexInputBindingDivisorDescriptionEXT*>( this );
+    }
+
+    bool operator==( VertexInputBindingDivisorDescriptionEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( binding == rhs.binding )
+          && ( divisor == rhs.divisor );
+    }
+
+    bool operator!=( VertexInputBindingDivisorDescriptionEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    uint32_t binding;
+    uint32_t divisor;
+  };
+  static_assert( sizeof( VertexInputBindingDivisorDescriptionEXT ) == sizeof( VkVertexInputBindingDivisorDescriptionEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<VertexInputBindingDivisorDescriptionEXT>::value, "struct wrapper is not a standard layout!" );
+
+  struct PipelineVertexInputDivisorStateCreateInfoEXT
+  {
+    VULKAN_HPP_CONSTEXPR PipelineVertexInputDivisorStateCreateInfoEXT( uint32_t vertexBindingDivisorCount_ = 0,
+                                                                       const VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT* pVertexBindingDivisors_ = nullptr ) VULKAN_HPP_NOEXCEPT
+      : vertexBindingDivisorCount( vertexBindingDivisorCount_ )
+      , pVertexBindingDivisors( pVertexBindingDivisors_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PipelineVertexInputDivisorStateCreateInfoEXT & operator=( VULKAN_HPP_NAMESPACE::PipelineVertexInputDivisorStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineVertexInputDivisorStateCreateInfoEXT ) - offsetof( PipelineVertexInputDivisorStateCreateInfoEXT, pNext ) );
+      return *this;
+    }
+
+    PipelineVertexInputDivisorStateCreateInfoEXT( VkPipelineVertexInputDivisorStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PipelineVertexInputDivisorStateCreateInfoEXT& operator=( VkPipelineVertexInputDivisorStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineVertexInputDivisorStateCreateInfoEXT const *>(&rhs);
+      return *this;
+    }
+
+    PipelineVertexInputDivisorStateCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PipelineVertexInputDivisorStateCreateInfoEXT & setVertexBindingDivisorCount( uint32_t vertexBindingDivisorCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      vertexBindingDivisorCount = vertexBindingDivisorCount_;
+      return *this;
+    }
+
+    PipelineVertexInputDivisorStateCreateInfoEXT & setPVertexBindingDivisors( const VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT* pVertexBindingDivisors_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pVertexBindingDivisors = pVertexBindingDivisors_;
+      return *this;
+    }
+
+    operator VkPipelineVertexInputDivisorStateCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineVertexInputDivisorStateCreateInfoEXT*>( this );
+    }
+
+    operator VkPipelineVertexInputDivisorStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineVertexInputDivisorStateCreateInfoEXT*>( this );
+    }
+
+    bool operator==( PipelineVertexInputDivisorStateCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( vertexBindingDivisorCount == rhs.vertexBindingDivisorCount )
+          && ( pVertexBindingDivisors == rhs.pVertexBindingDivisors );
+    }
+
+    bool operator!=( PipelineVertexInputDivisorStateCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineVertexInputDivisorStateCreateInfoEXT;
+    const void* pNext = nullptr;
+    uint32_t vertexBindingDivisorCount;
+    const VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT* pVertexBindingDivisors;
+  };
+  static_assert( sizeof( PipelineVertexInputDivisorStateCreateInfoEXT ) == sizeof( VkPipelineVertexInputDivisorStateCreateInfoEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PipelineVertexInputDivisorStateCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+
+  struct PipelineViewportCoarseSampleOrderStateCreateInfoNV
+  {
+    VULKAN_HPP_CONSTEXPR PipelineViewportCoarseSampleOrderStateCreateInfoNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType_ = VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV::eDefault,
+                                                                             uint32_t customSampleOrderCount_ = 0,
+                                                                             const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV* pCustomSampleOrders_ = nullptr ) VULKAN_HPP_NOEXCEPT
+      : sampleOrderType( sampleOrderType_ )
+      , customSampleOrderCount( customSampleOrderCount_ )
+      , pCustomSampleOrders( pCustomSampleOrders_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PipelineViewportCoarseSampleOrderStateCreateInfoNV & operator=( VULKAN_HPP_NAMESPACE::PipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineViewportCoarseSampleOrderStateCreateInfoNV ) - offsetof( PipelineViewportCoarseSampleOrderStateCreateInfoNV, pNext ) );
+      return *this;
+    }
+
+    PipelineViewportCoarseSampleOrderStateCreateInfoNV( VkPipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PipelineViewportCoarseSampleOrderStateCreateInfoNV& operator=( VkPipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineViewportCoarseSampleOrderStateCreateInfoNV const *>(&rhs);
+      return *this;
+    }
+
+    PipelineViewportCoarseSampleOrderStateCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PipelineViewportCoarseSampleOrderStateCreateInfoNV & setSampleOrderType( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sampleOrderType = sampleOrderType_;
+      return *this;
+    }
+
+    PipelineViewportCoarseSampleOrderStateCreateInfoNV & setCustomSampleOrderCount( uint32_t customSampleOrderCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      customSampleOrderCount = customSampleOrderCount_;
+      return *this;
+    }
+
+    PipelineViewportCoarseSampleOrderStateCreateInfoNV & setPCustomSampleOrders( const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV* pCustomSampleOrders_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pCustomSampleOrders = pCustomSampleOrders_;
+      return *this;
+    }
+
+    operator VkPipelineViewportCoarseSampleOrderStateCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineViewportCoarseSampleOrderStateCreateInfoNV*>( this );
+    }
+
+    operator VkPipelineViewportCoarseSampleOrderStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineViewportCoarseSampleOrderStateCreateInfoNV*>( this );
+    }
+
+    bool operator==( PipelineViewportCoarseSampleOrderStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( sampleOrderType == rhs.sampleOrderType )
+          && ( customSampleOrderCount == rhs.customSampleOrderCount )
+          && ( pCustomSampleOrders == rhs.pCustomSampleOrders );
+    }
+
+    bool operator!=( PipelineViewportCoarseSampleOrderStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportCoarseSampleOrderStateCreateInfoNV;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType;
+    uint32_t customSampleOrderCount;
+    const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV* pCustomSampleOrders;
+  };
+  static_assert( sizeof( PipelineViewportCoarseSampleOrderStateCreateInfoNV ) == sizeof( VkPipelineViewportCoarseSampleOrderStateCreateInfoNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PipelineViewportCoarseSampleOrderStateCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
+
+  struct PipelineViewportExclusiveScissorStateCreateInfoNV
+  {
+    VULKAN_HPP_CONSTEXPR PipelineViewportExclusiveScissorStateCreateInfoNV( uint32_t exclusiveScissorCount_ = 0,
+                                                                            const VULKAN_HPP_NAMESPACE::Rect2D* pExclusiveScissors_ = nullptr ) VULKAN_HPP_NOEXCEPT
+      : exclusiveScissorCount( exclusiveScissorCount_ )
+      , pExclusiveScissors( pExclusiveScissors_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PipelineViewportExclusiveScissorStateCreateInfoNV & operator=( VULKAN_HPP_NAMESPACE::PipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineViewportExclusiveScissorStateCreateInfoNV ) - offsetof( PipelineViewportExclusiveScissorStateCreateInfoNV, pNext ) );
+      return *this;
+    }
+
+    PipelineViewportExclusiveScissorStateCreateInfoNV( VkPipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PipelineViewportExclusiveScissorStateCreateInfoNV& operator=( VkPipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineViewportExclusiveScissorStateCreateInfoNV const *>(&rhs);
+      return *this;
+    }
+
+    PipelineViewportExclusiveScissorStateCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PipelineViewportExclusiveScissorStateCreateInfoNV & setExclusiveScissorCount( uint32_t exclusiveScissorCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      exclusiveScissorCount = exclusiveScissorCount_;
+      return *this;
+    }
+
+    PipelineViewportExclusiveScissorStateCreateInfoNV & setPExclusiveScissors( const VULKAN_HPP_NAMESPACE::Rect2D* pExclusiveScissors_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pExclusiveScissors = pExclusiveScissors_;
+      return *this;
+    }
+
+    operator VkPipelineViewportExclusiveScissorStateCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineViewportExclusiveScissorStateCreateInfoNV*>( this );
+    }
+
+    operator VkPipelineViewportExclusiveScissorStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineViewportExclusiveScissorStateCreateInfoNV*>( this );
+    }
+
+    bool operator==( PipelineViewportExclusiveScissorStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( exclusiveScissorCount == rhs.exclusiveScissorCount )
+          && ( pExclusiveScissors == rhs.pExclusiveScissors );
+    }
+
+    bool operator!=( PipelineViewportExclusiveScissorStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportExclusiveScissorStateCreateInfoNV;
+    const void* pNext = nullptr;
+    uint32_t exclusiveScissorCount;
+    const VULKAN_HPP_NAMESPACE::Rect2D* pExclusiveScissors;
+  };
+  static_assert( sizeof( PipelineViewportExclusiveScissorStateCreateInfoNV ) == sizeof( VkPipelineViewportExclusiveScissorStateCreateInfoNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PipelineViewportExclusiveScissorStateCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
+
+  struct ShadingRatePaletteNV
+  {
+    VULKAN_HPP_CONSTEXPR ShadingRatePaletteNV( uint32_t shadingRatePaletteEntryCount_ = 0,
+                                               const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV* pShadingRatePaletteEntries_ = nullptr ) VULKAN_HPP_NOEXCEPT
+      : shadingRatePaletteEntryCount( shadingRatePaletteEntryCount_ )
+      , pShadingRatePaletteEntries( pShadingRatePaletteEntries_ )
+    {}
+
+    ShadingRatePaletteNV( VkShadingRatePaletteNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    ShadingRatePaletteNV& operator=( VkShadingRatePaletteNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV const *>(&rhs);
+      return *this;
+    }
+
+    ShadingRatePaletteNV & setShadingRatePaletteEntryCount( uint32_t shadingRatePaletteEntryCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shadingRatePaletteEntryCount = shadingRatePaletteEntryCount_;
+      return *this;
+    }
+
+    ShadingRatePaletteNV & setPShadingRatePaletteEntries( const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV* pShadingRatePaletteEntries_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pShadingRatePaletteEntries = pShadingRatePaletteEntries_;
+      return *this;
+    }
+
+    operator VkShadingRatePaletteNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkShadingRatePaletteNV*>( this );
+    }
+
+    operator VkShadingRatePaletteNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkShadingRatePaletteNV*>( this );
+    }
+
+    bool operator==( ShadingRatePaletteNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( shadingRatePaletteEntryCount == rhs.shadingRatePaletteEntryCount )
+          && ( pShadingRatePaletteEntries == rhs.pShadingRatePaletteEntries );
+    }
+
+    bool operator!=( ShadingRatePaletteNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    uint32_t shadingRatePaletteEntryCount;
+    const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV* pShadingRatePaletteEntries;
+  };
+  static_assert( sizeof( ShadingRatePaletteNV ) == sizeof( VkShadingRatePaletteNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ShadingRatePaletteNV>::value, "struct wrapper is not a standard layout!" );
+
+  struct PipelineViewportShadingRateImageStateCreateInfoNV
+  {
+    VULKAN_HPP_CONSTEXPR PipelineViewportShadingRateImageStateCreateInfoNV( VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable_ = 0,
+                                                                            uint32_t viewportCount_ = 0,
+                                                                            const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV* pShadingRatePalettes_ = nullptr ) VULKAN_HPP_NOEXCEPT
+      : shadingRateImageEnable( shadingRateImageEnable_ )
+      , viewportCount( viewportCount_ )
+      , pShadingRatePalettes( pShadingRatePalettes_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PipelineViewportShadingRateImageStateCreateInfoNV & operator=( VULKAN_HPP_NAMESPACE::PipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineViewportShadingRateImageStateCreateInfoNV ) - offsetof( PipelineViewportShadingRateImageStateCreateInfoNV, pNext ) );
+      return *this;
+    }
+
+    PipelineViewportShadingRateImageStateCreateInfoNV( VkPipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PipelineViewportShadingRateImageStateCreateInfoNV& operator=( VkPipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineViewportShadingRateImageStateCreateInfoNV const *>(&rhs);
+      return *this;
+    }
+
+    PipelineViewportShadingRateImageStateCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PipelineViewportShadingRateImageStateCreateInfoNV & setShadingRateImageEnable( VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable_ ) VULKAN_HPP_NOEXCEPT
+    {
+      shadingRateImageEnable = shadingRateImageEnable_;
+      return *this;
+    }
+
+    PipelineViewportShadingRateImageStateCreateInfoNV & setViewportCount( uint32_t viewportCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      viewportCount = viewportCount_;
+      return *this;
+    }
+
+    PipelineViewportShadingRateImageStateCreateInfoNV & setPShadingRatePalettes( const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV* pShadingRatePalettes_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pShadingRatePalettes = pShadingRatePalettes_;
+      return *this;
+    }
+
+    operator VkPipelineViewportShadingRateImageStateCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineViewportShadingRateImageStateCreateInfoNV*>( this );
+    }
+
+    operator VkPipelineViewportShadingRateImageStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineViewportShadingRateImageStateCreateInfoNV*>( this );
+    }
+
+    bool operator==( PipelineViewportShadingRateImageStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( shadingRateImageEnable == rhs.shadingRateImageEnable )
+          && ( viewportCount == rhs.viewportCount )
+          && ( pShadingRatePalettes == rhs.pShadingRatePalettes );
+    }
+
+    bool operator!=( PipelineViewportShadingRateImageStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportShadingRateImageStateCreateInfoNV;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable;
+    uint32_t viewportCount;
+    const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV* pShadingRatePalettes;
+  };
+  static_assert( sizeof( PipelineViewportShadingRateImageStateCreateInfoNV ) == sizeof( VkPipelineViewportShadingRateImageStateCreateInfoNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PipelineViewportShadingRateImageStateCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
+
+  struct ViewportSwizzleNV
+  {
+    VULKAN_HPP_CONSTEXPR ViewportSwizzleNV( VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV x_ = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX,
+                                            VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV y_ = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX,
+                                            VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV z_ = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX,
+                                            VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV w_ = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX ) VULKAN_HPP_NOEXCEPT
+      : x( x_ )
+      , y( y_ )
+      , z( z_ )
+      , w( w_ )
+    {}
+
+    ViewportSwizzleNV( VkViewportSwizzleNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    ViewportSwizzleNV& operator=( VkViewportSwizzleNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ViewportSwizzleNV const *>(&rhs);
+      return *this;
+    }
+
+    ViewportSwizzleNV & setX( VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV x_ ) VULKAN_HPP_NOEXCEPT
+    {
+      x = x_;
+      return *this;
+    }
+
+    ViewportSwizzleNV & setY( VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV y_ ) VULKAN_HPP_NOEXCEPT
+    {
+      y = y_;
+      return *this;
+    }
+
+    ViewportSwizzleNV & setZ( VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV z_ ) VULKAN_HPP_NOEXCEPT
+    {
+      z = z_;
+      return *this;
+    }
+
+    ViewportSwizzleNV & setW( VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV w_ ) VULKAN_HPP_NOEXCEPT
+    {
+      w = w_;
+      return *this;
+    }
+
+    operator VkViewportSwizzleNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkViewportSwizzleNV*>( this );
+    }
+
+    operator VkViewportSwizzleNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkViewportSwizzleNV*>( this );
+    }
+
+    bool operator==( ViewportSwizzleNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( x == rhs.x )
+          && ( y == rhs.y )
+          && ( z == rhs.z )
+          && ( w == rhs.w );
+    }
+
+    bool operator!=( ViewportSwizzleNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV x;
+    VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV y;
+    VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV z;
+    VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV w;
+  };
+  static_assert( sizeof( ViewportSwizzleNV ) == sizeof( VkViewportSwizzleNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ViewportSwizzleNV>::value, "struct wrapper is not a standard layout!" );
+
+  struct PipelineViewportSwizzleStateCreateInfoNV
+  {
+    VULKAN_HPP_CONSTEXPR PipelineViewportSwizzleStateCreateInfoNV( VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateFlagsNV flags_ = VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateFlagsNV(),
+                                                                   uint32_t viewportCount_ = 0,
+                                                                   const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV* pViewportSwizzles_ = nullptr ) VULKAN_HPP_NOEXCEPT
+      : flags( flags_ )
+      , viewportCount( viewportCount_ )
+      , pViewportSwizzles( pViewportSwizzles_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateInfoNV & operator=( VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateInfoNV ) - offsetof( PipelineViewportSwizzleStateCreateInfoNV, pNext ) );
+      return *this;
+    }
+
+    PipelineViewportSwizzleStateCreateInfoNV( VkPipelineViewportSwizzleStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PipelineViewportSwizzleStateCreateInfoNV& operator=( VkPipelineViewportSwizzleStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateInfoNV const *>(&rhs);
+      return *this;
+    }
+
+    PipelineViewportSwizzleStateCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PipelineViewportSwizzleStateCreateInfoNV & setFlags( VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    PipelineViewportSwizzleStateCreateInfoNV & setViewportCount( uint32_t viewportCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      viewportCount = viewportCount_;
+      return *this;
+    }
+
+    PipelineViewportSwizzleStateCreateInfoNV & setPViewportSwizzles( const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV* pViewportSwizzles_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pViewportSwizzles = pViewportSwizzles_;
+      return *this;
+    }
+
+    operator VkPipelineViewportSwizzleStateCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineViewportSwizzleStateCreateInfoNV*>( this );
+    }
+
+    operator VkPipelineViewportSwizzleStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineViewportSwizzleStateCreateInfoNV*>( this );
+    }
+
+    bool operator==( PipelineViewportSwizzleStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( viewportCount == rhs.viewportCount )
+          && ( pViewportSwizzles == rhs.pViewportSwizzles );
+    }
+
+    bool operator!=( PipelineViewportSwizzleStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportSwizzleStateCreateInfoNV;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateFlagsNV flags;
+    uint32_t viewportCount;
+    const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV* pViewportSwizzles;
+  };
+  static_assert( sizeof( PipelineViewportSwizzleStateCreateInfoNV ) == sizeof( VkPipelineViewportSwizzleStateCreateInfoNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PipelineViewportSwizzleStateCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
+
+  struct ViewportWScalingNV
+  {
+    VULKAN_HPP_CONSTEXPR ViewportWScalingNV( float xcoeff_ = 0,
+                                             float ycoeff_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : xcoeff( xcoeff_ )
+      , ycoeff( ycoeff_ )
+    {}
+
+    ViewportWScalingNV( VkViewportWScalingNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    ViewportWScalingNV& operator=( VkViewportWScalingNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ViewportWScalingNV const *>(&rhs);
+      return *this;
+    }
+
+    ViewportWScalingNV & setXcoeff( float xcoeff_ ) VULKAN_HPP_NOEXCEPT
+    {
+      xcoeff = xcoeff_;
+      return *this;
+    }
+
+    ViewportWScalingNV & setYcoeff( float ycoeff_ ) VULKAN_HPP_NOEXCEPT
+    {
+      ycoeff = ycoeff_;
+      return *this;
+    }
+
+    operator VkViewportWScalingNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkViewportWScalingNV*>( this );
+    }
+
+    operator VkViewportWScalingNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkViewportWScalingNV*>( this );
+    }
+
+    bool operator==( ViewportWScalingNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( xcoeff == rhs.xcoeff )
+          && ( ycoeff == rhs.ycoeff );
+    }
+
+    bool operator!=( ViewportWScalingNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    float xcoeff;
+    float ycoeff;
+  };
+  static_assert( sizeof( ViewportWScalingNV ) == sizeof( VkViewportWScalingNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ViewportWScalingNV>::value, "struct wrapper is not a standard layout!" );
+
+  struct PipelineViewportWScalingStateCreateInfoNV
+  {
+    VULKAN_HPP_CONSTEXPR PipelineViewportWScalingStateCreateInfoNV( VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable_ = 0,
+                                                                    uint32_t viewportCount_ = 0,
+                                                                    const VULKAN_HPP_NAMESPACE::ViewportWScalingNV* pViewportWScalings_ = nullptr ) VULKAN_HPP_NOEXCEPT
+      : viewportWScalingEnable( viewportWScalingEnable_ )
+      , viewportCount( viewportCount_ )
+      , pViewportWScalings( pViewportWScalings_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PipelineViewportWScalingStateCreateInfoNV & operator=( VULKAN_HPP_NAMESPACE::PipelineViewportWScalingStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineViewportWScalingStateCreateInfoNV ) - offsetof( PipelineViewportWScalingStateCreateInfoNV, pNext ) );
+      return *this;
+    }
+
+    PipelineViewportWScalingStateCreateInfoNV( VkPipelineViewportWScalingStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PipelineViewportWScalingStateCreateInfoNV& operator=( VkPipelineViewportWScalingStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineViewportWScalingStateCreateInfoNV const *>(&rhs);
+      return *this;
+    }
+
+    PipelineViewportWScalingStateCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PipelineViewportWScalingStateCreateInfoNV & setViewportWScalingEnable( VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable_ ) VULKAN_HPP_NOEXCEPT
+    {
+      viewportWScalingEnable = viewportWScalingEnable_;
+      return *this;
+    }
+
+    PipelineViewportWScalingStateCreateInfoNV & setViewportCount( uint32_t viewportCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      viewportCount = viewportCount_;
+      return *this;
+    }
+
+    PipelineViewportWScalingStateCreateInfoNV & setPViewportWScalings( const VULKAN_HPP_NAMESPACE::ViewportWScalingNV* pViewportWScalings_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pViewportWScalings = pViewportWScalings_;
+      return *this;
+    }
+
+    operator VkPipelineViewportWScalingStateCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPipelineViewportWScalingStateCreateInfoNV*>( this );
+    }
+
+    operator VkPipelineViewportWScalingStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPipelineViewportWScalingStateCreateInfoNV*>( this );
+    }
+
+    bool operator==( PipelineViewportWScalingStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( viewportWScalingEnable == rhs.viewportWScalingEnable )
+          && ( viewportCount == rhs.viewportCount )
+          && ( pViewportWScalings == rhs.pViewportWScalings );
+    }
+
+    bool operator!=( PipelineViewportWScalingStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportWScalingStateCreateInfoNV;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable;
+    uint32_t viewportCount;
+    const VULKAN_HPP_NAMESPACE::ViewportWScalingNV* pViewportWScalings;
+  };
+  static_assert( sizeof( PipelineViewportWScalingStateCreateInfoNV ) == sizeof( VkPipelineViewportWScalingStateCreateInfoNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PipelineViewportWScalingStateCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
+
+#ifdef VK_USE_PLATFORM_GGP
+
+  struct PresentFrameTokenGGP
+  {
+    VULKAN_HPP_CONSTEXPR PresentFrameTokenGGP( GgpFrameToken frameToken_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : frameToken( frameToken_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PresentFrameTokenGGP & operator=( VULKAN_HPP_NAMESPACE::PresentFrameTokenGGP const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PresentFrameTokenGGP ) - offsetof( PresentFrameTokenGGP, pNext ) );
+      return *this;
+    }
+
+    PresentFrameTokenGGP( VkPresentFrameTokenGGP const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PresentFrameTokenGGP& operator=( VkPresentFrameTokenGGP const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PresentFrameTokenGGP const *>(&rhs);
+      return *this;
+    }
+
+    PresentFrameTokenGGP & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PresentFrameTokenGGP & setFrameToken( GgpFrameToken frameToken_ ) VULKAN_HPP_NOEXCEPT
+    {
+      frameToken = frameToken_;
+      return *this;
+    }
+
+    operator VkPresentFrameTokenGGP const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPresentFrameTokenGGP*>( this );
+    }
+
+    operator VkPresentFrameTokenGGP &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPresentFrameTokenGGP*>( this );
+    }
+
+    bool operator==( PresentFrameTokenGGP const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( frameToken == rhs.frameToken );
+    }
+
+    bool operator!=( PresentFrameTokenGGP const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePresentFrameTokenGGP;
+    const void* pNext = nullptr;
+    GgpFrameToken frameToken;
+  };
+  static_assert( sizeof( PresentFrameTokenGGP ) == sizeof( VkPresentFrameTokenGGP ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PresentFrameTokenGGP>::value, "struct wrapper is not a standard layout!" );
+#endif /*VK_USE_PLATFORM_GGP*/
+
+  struct PresentInfoKHR
+  {
+    VULKAN_HPP_CONSTEXPR PresentInfoKHR( uint32_t waitSemaphoreCount_ = 0,
+                                         const VULKAN_HPP_NAMESPACE::Semaphore* pWaitSemaphores_ = nullptr,
+                                         uint32_t swapchainCount_ = 0,
+                                         const VULKAN_HPP_NAMESPACE::SwapchainKHR* pSwapchains_ = nullptr,
+                                         const uint32_t* pImageIndices_ = nullptr,
+                                         VULKAN_HPP_NAMESPACE::Result* pResults_ = nullptr ) VULKAN_HPP_NOEXCEPT
+      : waitSemaphoreCount( waitSemaphoreCount_ )
+      , pWaitSemaphores( pWaitSemaphores_ )
+      , swapchainCount( swapchainCount_ )
+      , pSwapchains( pSwapchains_ )
+      , pImageIndices( pImageIndices_ )
+      , pResults( pResults_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PresentInfoKHR & operator=( VULKAN_HPP_NAMESPACE::PresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PresentInfoKHR ) - offsetof( PresentInfoKHR, pNext ) );
+      return *this;
+    }
+
+    PresentInfoKHR( VkPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PresentInfoKHR& operator=( VkPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PresentInfoKHR const *>(&rhs);
+      return *this;
+    }
+
+    PresentInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PresentInfoKHR & setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      waitSemaphoreCount = waitSemaphoreCount_;
+      return *this;
+    }
+
+    PresentInfoKHR & setPWaitSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore* pWaitSemaphores_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pWaitSemaphores = pWaitSemaphores_;
+      return *this;
+    }
+
+    PresentInfoKHR & setSwapchainCount( uint32_t swapchainCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      swapchainCount = swapchainCount_;
+      return *this;
+    }
+
+    PresentInfoKHR & setPSwapchains( const VULKAN_HPP_NAMESPACE::SwapchainKHR* pSwapchains_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pSwapchains = pSwapchains_;
+      return *this;
+    }
+
+    PresentInfoKHR & setPImageIndices( const uint32_t* pImageIndices_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pImageIndices = pImageIndices_;
+      return *this;
+    }
+
+    PresentInfoKHR & setPResults( VULKAN_HPP_NAMESPACE::Result* pResults_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pResults = pResults_;
+      return *this;
+    }
+
+    operator VkPresentInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPresentInfoKHR*>( this );
+    }
+
+    operator VkPresentInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPresentInfoKHR*>( this );
+    }
+
+    bool operator==( PresentInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( waitSemaphoreCount == rhs.waitSemaphoreCount )
+          && ( pWaitSemaphores == rhs.pWaitSemaphores )
+          && ( swapchainCount == rhs.swapchainCount )
+          && ( pSwapchains == rhs.pSwapchains )
+          && ( pImageIndices == rhs.pImageIndices )
+          && ( pResults == rhs.pResults );
+    }
+
+    bool operator!=( PresentInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePresentInfoKHR;
+    const void* pNext = nullptr;
+    uint32_t waitSemaphoreCount;
+    const VULKAN_HPP_NAMESPACE::Semaphore* pWaitSemaphores;
+    uint32_t swapchainCount;
+    const VULKAN_HPP_NAMESPACE::SwapchainKHR* pSwapchains;
+    const uint32_t* pImageIndices;
+    VULKAN_HPP_NAMESPACE::Result* pResults;
+  };
+  static_assert( sizeof( PresentInfoKHR ) == sizeof( VkPresentInfoKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PresentInfoKHR>::value, "struct wrapper is not a standard layout!" );
+
+  struct RectLayerKHR
+  {
+    VULKAN_HPP_CONSTEXPR RectLayerKHR( VULKAN_HPP_NAMESPACE::Offset2D offset_ = VULKAN_HPP_NAMESPACE::Offset2D(),
+                                       VULKAN_HPP_NAMESPACE::Extent2D extent_ = VULKAN_HPP_NAMESPACE::Extent2D(),
+                                       uint32_t layer_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : offset( offset_ )
+      , extent( extent_ )
+      , layer( layer_ )
+    {}
+
+    explicit RectLayerKHR( Rect2D const& rect2D,
+                           uint32_t layer_ = 0 )
+      : offset( rect2D.offset )
+      , extent( rect2D.extent )
+      , layer( layer_ )
+    {}
+
+    RectLayerKHR( VkRectLayerKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    RectLayerKHR& operator=( VkRectLayerKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RectLayerKHR const *>(&rhs);
+      return *this;
+    }
+
+    RectLayerKHR & setOffset( VULKAN_HPP_NAMESPACE::Offset2D offset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      offset = offset_;
+      return *this;
+    }
+
+    RectLayerKHR & setExtent( VULKAN_HPP_NAMESPACE::Extent2D extent_ ) VULKAN_HPP_NOEXCEPT
+    {
+      extent = extent_;
+      return *this;
+    }
+
+    RectLayerKHR & setLayer( uint32_t layer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      layer = layer_;
+      return *this;
+    }
+
+    operator VkRectLayerKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkRectLayerKHR*>( this );
+    }
+
+    operator VkRectLayerKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkRectLayerKHR*>( this );
+    }
+
+    bool operator==( RectLayerKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( offset == rhs.offset )
+          && ( extent == rhs.extent )
+          && ( layer == rhs.layer );
+    }
+
+    bool operator!=( RectLayerKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    VULKAN_HPP_NAMESPACE::Offset2D offset;
+    VULKAN_HPP_NAMESPACE::Extent2D extent;
+    uint32_t layer;
+  };
+  static_assert( sizeof( RectLayerKHR ) == sizeof( VkRectLayerKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<RectLayerKHR>::value, "struct wrapper is not a standard layout!" );
+
+  struct PresentRegionKHR
+  {
+    VULKAN_HPP_CONSTEXPR PresentRegionKHR( uint32_t rectangleCount_ = 0,
+                                           const VULKAN_HPP_NAMESPACE::RectLayerKHR* pRectangles_ = nullptr ) VULKAN_HPP_NOEXCEPT
+      : rectangleCount( rectangleCount_ )
+      , pRectangles( pRectangles_ )
+    {}
+
+    PresentRegionKHR( VkPresentRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PresentRegionKHR& operator=( VkPresentRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PresentRegionKHR const *>(&rhs);
+      return *this;
+    }
+
+    PresentRegionKHR & setRectangleCount( uint32_t rectangleCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      rectangleCount = rectangleCount_;
+      return *this;
+    }
+
+    PresentRegionKHR & setPRectangles( const VULKAN_HPP_NAMESPACE::RectLayerKHR* pRectangles_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pRectangles = pRectangles_;
+      return *this;
+    }
+
+    operator VkPresentRegionKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPresentRegionKHR*>( this );
+    }
+
+    operator VkPresentRegionKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPresentRegionKHR*>( this );
+    }
+
+    bool operator==( PresentRegionKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( rectangleCount == rhs.rectangleCount )
+          && ( pRectangles == rhs.pRectangles );
+    }
+
+    bool operator!=( PresentRegionKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    uint32_t rectangleCount;
+    const VULKAN_HPP_NAMESPACE::RectLayerKHR* pRectangles;
+  };
+  static_assert( sizeof( PresentRegionKHR ) == sizeof( VkPresentRegionKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PresentRegionKHR>::value, "struct wrapper is not a standard layout!" );
+
+  struct PresentRegionsKHR
+  {
+    VULKAN_HPP_CONSTEXPR PresentRegionsKHR( uint32_t swapchainCount_ = 0,
+                                            const VULKAN_HPP_NAMESPACE::PresentRegionKHR* pRegions_ = nullptr ) VULKAN_HPP_NOEXCEPT
+      : swapchainCount( swapchainCount_ )
+      , pRegions( pRegions_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PresentRegionsKHR & operator=( VULKAN_HPP_NAMESPACE::PresentRegionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PresentRegionsKHR ) - offsetof( PresentRegionsKHR, pNext ) );
+      return *this;
+    }
+
+    PresentRegionsKHR( VkPresentRegionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PresentRegionsKHR& operator=( VkPresentRegionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PresentRegionsKHR const *>(&rhs);
+      return *this;
+    }
+
+    PresentRegionsKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PresentRegionsKHR & setSwapchainCount( uint32_t swapchainCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      swapchainCount = swapchainCount_;
+      return *this;
+    }
+
+    PresentRegionsKHR & setPRegions( const VULKAN_HPP_NAMESPACE::PresentRegionKHR* pRegions_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pRegions = pRegions_;
+      return *this;
+    }
+
+    operator VkPresentRegionsKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPresentRegionsKHR*>( this );
+    }
+
+    operator VkPresentRegionsKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPresentRegionsKHR*>( this );
+    }
+
+    bool operator==( PresentRegionsKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( swapchainCount == rhs.swapchainCount )
+          && ( pRegions == rhs.pRegions );
+    }
+
+    bool operator!=( PresentRegionsKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePresentRegionsKHR;
+    const void* pNext = nullptr;
+    uint32_t swapchainCount;
+    const VULKAN_HPP_NAMESPACE::PresentRegionKHR* pRegions;
+  };
+  static_assert( sizeof( PresentRegionsKHR ) == sizeof( VkPresentRegionsKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PresentRegionsKHR>::value, "struct wrapper is not a standard layout!" );
+
+  struct PresentTimeGOOGLE
+  {
+    VULKAN_HPP_CONSTEXPR PresentTimeGOOGLE( uint32_t presentID_ = 0,
+                                            uint64_t desiredPresentTime_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : presentID( presentID_ )
+      , desiredPresentTime( desiredPresentTime_ )
+    {}
+
+    PresentTimeGOOGLE( VkPresentTimeGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PresentTimeGOOGLE& operator=( VkPresentTimeGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE const *>(&rhs);
+      return *this;
+    }
+
+    PresentTimeGOOGLE & setPresentID( uint32_t presentID_ ) VULKAN_HPP_NOEXCEPT
+    {
+      presentID = presentID_;
+      return *this;
+    }
+
+    PresentTimeGOOGLE & setDesiredPresentTime( uint64_t desiredPresentTime_ ) VULKAN_HPP_NOEXCEPT
+    {
+      desiredPresentTime = desiredPresentTime_;
+      return *this;
+    }
+
+    operator VkPresentTimeGOOGLE const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPresentTimeGOOGLE*>( this );
+    }
+
+    operator VkPresentTimeGOOGLE &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPresentTimeGOOGLE*>( this );
+    }
+
+    bool operator==( PresentTimeGOOGLE const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( presentID == rhs.presentID )
+          && ( desiredPresentTime == rhs.desiredPresentTime );
+    }
+
+    bool operator!=( PresentTimeGOOGLE const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    uint32_t presentID;
+    uint64_t desiredPresentTime;
+  };
+  static_assert( sizeof( PresentTimeGOOGLE ) == sizeof( VkPresentTimeGOOGLE ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PresentTimeGOOGLE>::value, "struct wrapper is not a standard layout!" );
+
+  struct PresentTimesInfoGOOGLE
+  {
+    VULKAN_HPP_CONSTEXPR PresentTimesInfoGOOGLE( uint32_t swapchainCount_ = 0,
+                                                 const VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE* pTimes_ = nullptr ) VULKAN_HPP_NOEXCEPT
+      : swapchainCount( swapchainCount_ )
+      , pTimes( pTimes_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::PresentTimesInfoGOOGLE & operator=( VULKAN_HPP_NAMESPACE::PresentTimesInfoGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PresentTimesInfoGOOGLE ) - offsetof( PresentTimesInfoGOOGLE, pNext ) );
+      return *this;
+    }
+
+    PresentTimesInfoGOOGLE( VkPresentTimesInfoGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    PresentTimesInfoGOOGLE& operator=( VkPresentTimesInfoGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PresentTimesInfoGOOGLE const *>(&rhs);
+      return *this;
+    }
+
+    PresentTimesInfoGOOGLE & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    PresentTimesInfoGOOGLE & setSwapchainCount( uint32_t swapchainCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      swapchainCount = swapchainCount_;
+      return *this;
+    }
+
+    PresentTimesInfoGOOGLE & setPTimes( const VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE* pTimes_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pTimes = pTimes_;
+      return *this;
+    }
+
+    operator VkPresentTimesInfoGOOGLE const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkPresentTimesInfoGOOGLE*>( this );
+    }
+
+    operator VkPresentTimesInfoGOOGLE &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkPresentTimesInfoGOOGLE*>( this );
+    }
+
+    bool operator==( PresentTimesInfoGOOGLE const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( swapchainCount == rhs.swapchainCount )
+          && ( pTimes == rhs.pTimes );
+    }
+
+    bool operator!=( PresentTimesInfoGOOGLE const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePresentTimesInfoGOOGLE;
+    const void* pNext = nullptr;
+    uint32_t swapchainCount;
+    const VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE* pTimes;
+  };
+  static_assert( sizeof( PresentTimesInfoGOOGLE ) == sizeof( VkPresentTimesInfoGOOGLE ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<PresentTimesInfoGOOGLE>::value, "struct wrapper is not a standard layout!" );
+
+  struct ProtectedSubmitInfo
+  {
+    VULKAN_HPP_CONSTEXPR ProtectedSubmitInfo( VULKAN_HPP_NAMESPACE::Bool32 protectedSubmit_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : protectedSubmit( protectedSubmit_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::ProtectedSubmitInfo & operator=( VULKAN_HPP_NAMESPACE::ProtectedSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ProtectedSubmitInfo ) - offsetof( ProtectedSubmitInfo, pNext ) );
+      return *this;
+    }
+
+    ProtectedSubmitInfo( VkProtectedSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    ProtectedSubmitInfo& operator=( VkProtectedSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ProtectedSubmitInfo const *>(&rhs);
+      return *this;
+    }
+
+    ProtectedSubmitInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ProtectedSubmitInfo & setProtectedSubmit( VULKAN_HPP_NAMESPACE::Bool32 protectedSubmit_ ) VULKAN_HPP_NOEXCEPT
+    {
+      protectedSubmit = protectedSubmit_;
+      return *this;
+    }
+
+    operator VkProtectedSubmitInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkProtectedSubmitInfo*>( this );
+    }
+
+    operator VkProtectedSubmitInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkProtectedSubmitInfo*>( this );
+    }
+
+    bool operator==( ProtectedSubmitInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( protectedSubmit == rhs.protectedSubmit );
+    }
+
+    bool operator!=( ProtectedSubmitInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eProtectedSubmitInfo;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::Bool32 protectedSubmit;
+  };
+  static_assert( sizeof( ProtectedSubmitInfo ) == sizeof( VkProtectedSubmitInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ProtectedSubmitInfo>::value, "struct wrapper is not a standard layout!" );
+
+  struct QueryPoolCreateInfo
+  {
+    VULKAN_HPP_CONSTEXPR QueryPoolCreateInfo( VULKAN_HPP_NAMESPACE::QueryPoolCreateFlags flags_ = VULKAN_HPP_NAMESPACE::QueryPoolCreateFlags(),
+                                              VULKAN_HPP_NAMESPACE::QueryType queryType_ = VULKAN_HPP_NAMESPACE::QueryType::eOcclusion,
+                                              uint32_t queryCount_ = 0,
+                                              VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics_ = VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags() ) VULKAN_HPP_NOEXCEPT
+      : flags( flags_ )
+      , queryType( queryType_ )
+      , queryCount( queryCount_ )
+      , pipelineStatistics( pipelineStatistics_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo & operator=( VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo ) - offsetof( QueryPoolCreateInfo, pNext ) );
+      return *this;
+    }
+
+    QueryPoolCreateInfo( VkQueryPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    QueryPoolCreateInfo& operator=( VkQueryPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo const *>(&rhs);
+      return *this;
+    }
+
+    QueryPoolCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    QueryPoolCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::QueryPoolCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    QueryPoolCreateInfo & setQueryType( VULKAN_HPP_NAMESPACE::QueryType queryType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      queryType = queryType_;
+      return *this;
+    }
+
+    QueryPoolCreateInfo & setQueryCount( uint32_t queryCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      queryCount = queryCount_;
+      return *this;
+    }
+
+    QueryPoolCreateInfo & setPipelineStatistics( VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pipelineStatistics = pipelineStatistics_;
+      return *this;
+    }
+
+    operator VkQueryPoolCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkQueryPoolCreateInfo*>( this );
+    }
+
+    operator VkQueryPoolCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkQueryPoolCreateInfo*>( this );
+    }
+
+    bool operator==( QueryPoolCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( queryType == rhs.queryType )
+          && ( queryCount == rhs.queryCount )
+          && ( pipelineStatistics == rhs.pipelineStatistics );
+    }
+
+    bool operator!=( QueryPoolCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueryPoolCreateInfo;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::QueryPoolCreateFlags flags;
+    VULKAN_HPP_NAMESPACE::QueryType queryType;
+    uint32_t queryCount;
+    VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics;
+  };
+  static_assert( sizeof( QueryPoolCreateInfo ) == sizeof( VkQueryPoolCreateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<QueryPoolCreateInfo>::value, "struct wrapper is not a standard layout!" );
+
+  struct QueryPoolCreateInfoINTEL
+  {
+    VULKAN_HPP_CONSTEXPR QueryPoolCreateInfoINTEL( VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL performanceCountersSampling_ = VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL::eManual ) VULKAN_HPP_NOEXCEPT
+      : performanceCountersSampling( performanceCountersSampling_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::QueryPoolCreateInfoINTEL & operator=( VULKAN_HPP_NAMESPACE::QueryPoolCreateInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::QueryPoolCreateInfoINTEL ) - offsetof( QueryPoolCreateInfoINTEL, pNext ) );
+      return *this;
+    }
+
+    QueryPoolCreateInfoINTEL( VkQueryPoolCreateInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    QueryPoolCreateInfoINTEL& operator=( VkQueryPoolCreateInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::QueryPoolCreateInfoINTEL const *>(&rhs);
+      return *this;
+    }
+
+    QueryPoolCreateInfoINTEL & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    QueryPoolCreateInfoINTEL & setPerformanceCountersSampling( VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL performanceCountersSampling_ ) VULKAN_HPP_NOEXCEPT
+    {
+      performanceCountersSampling = performanceCountersSampling_;
+      return *this;
+    }
+
+    operator VkQueryPoolCreateInfoINTEL const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkQueryPoolCreateInfoINTEL*>( this );
+    }
+
+    operator VkQueryPoolCreateInfoINTEL &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkQueryPoolCreateInfoINTEL*>( this );
+    }
+
+    bool operator==( QueryPoolCreateInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( performanceCountersSampling == rhs.performanceCountersSampling );
+    }
+
+    bool operator!=( QueryPoolCreateInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueryPoolCreateInfoINTEL;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL performanceCountersSampling;
+  };
+  static_assert( sizeof( QueryPoolCreateInfoINTEL ) == sizeof( VkQueryPoolCreateInfoINTEL ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<QueryPoolCreateInfoINTEL>::value, "struct wrapper is not a standard layout!" );
+
+  struct QueryPoolPerformanceCreateInfoKHR
+  {
+    VULKAN_HPP_CONSTEXPR QueryPoolPerformanceCreateInfoKHR( uint32_t queueFamilyIndex_ = 0,
+                                                            uint32_t counterIndexCount_ = 0,
+                                                            const uint32_t* pCounterIndices_ = nullptr ) VULKAN_HPP_NOEXCEPT
+      : queueFamilyIndex( queueFamilyIndex_ )
+      , counterIndexCount( counterIndexCount_ )
+      , pCounterIndices( pCounterIndices_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR & operator=( VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR ) - offsetof( QueryPoolPerformanceCreateInfoKHR, pNext ) );
+      return *this;
+    }
+
+    QueryPoolPerformanceCreateInfoKHR( VkQueryPoolPerformanceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    QueryPoolPerformanceCreateInfoKHR& operator=( VkQueryPoolPerformanceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR const *>(&rhs);
+      return *this;
+    }
+
+    QueryPoolPerformanceCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    QueryPoolPerformanceCreateInfoKHR & setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
+    {
+      queueFamilyIndex = queueFamilyIndex_;
+      return *this;
+    }
+
+    QueryPoolPerformanceCreateInfoKHR & setCounterIndexCount( uint32_t counterIndexCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      counterIndexCount = counterIndexCount_;
+      return *this;
+    }
+
+    QueryPoolPerformanceCreateInfoKHR & setPCounterIndices( const uint32_t* pCounterIndices_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pCounterIndices = pCounterIndices_;
+      return *this;
+    }
+
+    operator VkQueryPoolPerformanceCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkQueryPoolPerformanceCreateInfoKHR*>( this );
+    }
+
+    operator VkQueryPoolPerformanceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkQueryPoolPerformanceCreateInfoKHR*>( this );
+    }
+
+    bool operator==( QueryPoolPerformanceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( queueFamilyIndex == rhs.queueFamilyIndex )
+          && ( counterIndexCount == rhs.counterIndexCount )
+          && ( pCounterIndices == rhs.pCounterIndices );
+    }
+
+    bool operator!=( QueryPoolPerformanceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueryPoolPerformanceCreateInfoKHR;
+    const void* pNext = nullptr;
+    uint32_t queueFamilyIndex;
+    uint32_t counterIndexCount;
+    const uint32_t* pCounterIndices;
+  };
+  static_assert( sizeof( QueryPoolPerformanceCreateInfoKHR ) == sizeof( VkQueryPoolPerformanceCreateInfoKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<QueryPoolPerformanceCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
+
+  struct QueueFamilyCheckpointPropertiesNV
+  {
+    QueueFamilyCheckpointPropertiesNV( VULKAN_HPP_NAMESPACE::PipelineStageFlags checkpointExecutionStageMask_ = VULKAN_HPP_NAMESPACE::PipelineStageFlags() ) VULKAN_HPP_NOEXCEPT
+      : checkpointExecutionStageMask( checkpointExecutionStageMask_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::QueueFamilyCheckpointPropertiesNV & operator=( VULKAN_HPP_NAMESPACE::QueueFamilyCheckpointPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::QueueFamilyCheckpointPropertiesNV ) - offsetof( QueueFamilyCheckpointPropertiesNV, pNext ) );
+      return *this;
+    }
+
+    QueueFamilyCheckpointPropertiesNV( VkQueueFamilyCheckpointPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    QueueFamilyCheckpointPropertiesNV& operator=( VkQueueFamilyCheckpointPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::QueueFamilyCheckpointPropertiesNV const *>(&rhs);
+      return *this;
+    }
+
+    operator VkQueueFamilyCheckpointPropertiesNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkQueueFamilyCheckpointPropertiesNV*>( this );
+    }
+
+    operator VkQueueFamilyCheckpointPropertiesNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkQueueFamilyCheckpointPropertiesNV*>( this );
+    }
+
+    bool operator==( QueueFamilyCheckpointPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( checkpointExecutionStageMask == rhs.checkpointExecutionStageMask );
+    }
+
+    bool operator!=( QueueFamilyCheckpointPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueueFamilyCheckpointPropertiesNV;
+    void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::PipelineStageFlags checkpointExecutionStageMask;
+  };
+  static_assert( sizeof( QueueFamilyCheckpointPropertiesNV ) == sizeof( VkQueueFamilyCheckpointPropertiesNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<QueueFamilyCheckpointPropertiesNV>::value, "struct wrapper is not a standard layout!" );
+
+  struct QueueFamilyProperties
+  {
+    QueueFamilyProperties( VULKAN_HPP_NAMESPACE::QueueFlags queueFlags_ = VULKAN_HPP_NAMESPACE::QueueFlags(),
+                           uint32_t queueCount_ = 0,
+                           uint32_t timestampValidBits_ = 0,
+                           VULKAN_HPP_NAMESPACE::Extent3D minImageTransferGranularity_ = VULKAN_HPP_NAMESPACE::Extent3D() ) VULKAN_HPP_NOEXCEPT
+      : queueFlags( queueFlags_ )
+      , queueCount( queueCount_ )
+      , timestampValidBits( timestampValidBits_ )
+      , minImageTransferGranularity( minImageTransferGranularity_ )
+    {}
+
+    QueueFamilyProperties( VkQueueFamilyProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    QueueFamilyProperties& operator=( VkQueueFamilyProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::QueueFamilyProperties const *>(&rhs);
+      return *this;
+    }
+
+    operator VkQueueFamilyProperties const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkQueueFamilyProperties*>( this );
+    }
+
+    operator VkQueueFamilyProperties &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkQueueFamilyProperties*>( this );
+    }
+
+    bool operator==( QueueFamilyProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( queueFlags == rhs.queueFlags )
+          && ( queueCount == rhs.queueCount )
+          && ( timestampValidBits == rhs.timestampValidBits )
+          && ( minImageTransferGranularity == rhs.minImageTransferGranularity );
+    }
+
+    bool operator!=( QueueFamilyProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    VULKAN_HPP_NAMESPACE::QueueFlags queueFlags;
+    uint32_t queueCount;
+    uint32_t timestampValidBits;
+    VULKAN_HPP_NAMESPACE::Extent3D minImageTransferGranularity;
+  };
+  static_assert( sizeof( QueueFamilyProperties ) == sizeof( VkQueueFamilyProperties ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<QueueFamilyProperties>::value, "struct wrapper is not a standard layout!" );
+
+  struct QueueFamilyProperties2
+  {
+    QueueFamilyProperties2( VULKAN_HPP_NAMESPACE::QueueFamilyProperties queueFamilyProperties_ = VULKAN_HPP_NAMESPACE::QueueFamilyProperties() ) VULKAN_HPP_NOEXCEPT
+      : queueFamilyProperties( queueFamilyProperties_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 & operator=( VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 ) - offsetof( QueueFamilyProperties2, pNext ) );
+      return *this;
+    }
+
+    QueueFamilyProperties2( VkQueueFamilyProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    QueueFamilyProperties2& operator=( VkQueueFamilyProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 const *>(&rhs);
+      return *this;
+    }
+
+    operator VkQueueFamilyProperties2 const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkQueueFamilyProperties2*>( this );
+    }
+
+    operator VkQueueFamilyProperties2 &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkQueueFamilyProperties2*>( this );
+    }
+
+    bool operator==( QueueFamilyProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( queueFamilyProperties == rhs.queueFamilyProperties );
+    }
+
+    bool operator!=( QueueFamilyProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueueFamilyProperties2;
+    void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::QueueFamilyProperties queueFamilyProperties;
+  };
+  static_assert( sizeof( QueueFamilyProperties2 ) == sizeof( VkQueueFamilyProperties2 ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<QueueFamilyProperties2>::value, "struct wrapper is not a standard layout!" );
+
+  struct RayTracingShaderGroupCreateInfoNV
+  {
+    VULKAN_HPP_CONSTEXPR RayTracingShaderGroupCreateInfoNV( VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeNV type_ = VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeNV::eGeneral,
+                                                            uint32_t generalShader_ = 0,
+                                                            uint32_t closestHitShader_ = 0,
+                                                            uint32_t anyHitShader_ = 0,
+                                                            uint32_t intersectionShader_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : type( type_ )
+      , generalShader( generalShader_ )
+      , closestHitShader( closestHitShader_ )
+      , anyHitShader( anyHitShader_ )
+      , intersectionShader( intersectionShader_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV & operator=( VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV ) - offsetof( RayTracingShaderGroupCreateInfoNV, pNext ) );
+      return *this;
+    }
+
+    RayTracingShaderGroupCreateInfoNV( VkRayTracingShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    RayTracingShaderGroupCreateInfoNV& operator=( VkRayTracingShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV const *>(&rhs);
+      return *this;
+    }
+
+    RayTracingShaderGroupCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    RayTracingShaderGroupCreateInfoNV & setType( VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeNV type_ ) VULKAN_HPP_NOEXCEPT
+    {
+      type = type_;
+      return *this;
+    }
+
+    RayTracingShaderGroupCreateInfoNV & setGeneralShader( uint32_t generalShader_ ) VULKAN_HPP_NOEXCEPT
+    {
+      generalShader = generalShader_;
+      return *this;
+    }
+
+    RayTracingShaderGroupCreateInfoNV & setClosestHitShader( uint32_t closestHitShader_ ) VULKAN_HPP_NOEXCEPT
+    {
+      closestHitShader = closestHitShader_;
+      return *this;
+    }
+
+    RayTracingShaderGroupCreateInfoNV & setAnyHitShader( uint32_t anyHitShader_ ) VULKAN_HPP_NOEXCEPT
+    {
+      anyHitShader = anyHitShader_;
+      return *this;
+    }
+
+    RayTracingShaderGroupCreateInfoNV & setIntersectionShader( uint32_t intersectionShader_ ) VULKAN_HPP_NOEXCEPT
+    {
+      intersectionShader = intersectionShader_;
+      return *this;
+    }
+
+    operator VkRayTracingShaderGroupCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkRayTracingShaderGroupCreateInfoNV*>( this );
+    }
+
+    operator VkRayTracingShaderGroupCreateInfoNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkRayTracingShaderGroupCreateInfoNV*>( this );
+    }
+
+    bool operator==( RayTracingShaderGroupCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( type == rhs.type )
+          && ( generalShader == rhs.generalShader )
+          && ( closestHitShader == rhs.closestHitShader )
+          && ( anyHitShader == rhs.anyHitShader )
+          && ( intersectionShader == rhs.intersectionShader );
+    }
+
+    bool operator!=( RayTracingShaderGroupCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingShaderGroupCreateInfoNV;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeNV type;
+    uint32_t generalShader;
+    uint32_t closestHitShader;
+    uint32_t anyHitShader;
+    uint32_t intersectionShader;
+  };
+  static_assert( sizeof( RayTracingShaderGroupCreateInfoNV ) == sizeof( VkRayTracingShaderGroupCreateInfoNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<RayTracingShaderGroupCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
+
+  struct RayTracingPipelineCreateInfoNV
+  {
+    VULKAN_HPP_CONSTEXPR RayTracingPipelineCreateInfoNV( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ = VULKAN_HPP_NAMESPACE::PipelineCreateFlags(),
+                                                         uint32_t stageCount_ = 0,
+                                                         const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo* pStages_ = nullptr,
+                                                         uint32_t groupCount_ = 0,
+                                                         const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV* pGroups_ = nullptr,
+                                                         uint32_t maxRecursionDepth_ = 0,
+                                                         VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = VULKAN_HPP_NAMESPACE::PipelineLayout(),
+                                                         VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = VULKAN_HPP_NAMESPACE::Pipeline(),
+                                                         int32_t basePipelineIndex_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : flags( flags_ )
+      , stageCount( stageCount_ )
+      , pStages( pStages_ )
+      , groupCount( groupCount_ )
+      , pGroups( pGroups_ )
+      , maxRecursionDepth( maxRecursionDepth_ )
+      , layout( layout_ )
+      , basePipelineHandle( basePipelineHandle_ )
+      , basePipelineIndex( basePipelineIndex_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV & operator=( VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV ) - offsetof( RayTracingPipelineCreateInfoNV, pNext ) );
+      return *this;
+    }
+
+    RayTracingPipelineCreateInfoNV( VkRayTracingPipelineCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    RayTracingPipelineCreateInfoNV& operator=( VkRayTracingPipelineCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV const *>(&rhs);
+      return *this;
+    }
+
+    RayTracingPipelineCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    RayTracingPipelineCreateInfoNV & setFlags( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    RayTracingPipelineCreateInfoNV & setStageCount( uint32_t stageCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stageCount = stageCount_;
+      return *this;
+    }
+
+    RayTracingPipelineCreateInfoNV & setPStages( const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo* pStages_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pStages = pStages_;
+      return *this;
+    }
+
+    RayTracingPipelineCreateInfoNV & setGroupCount( uint32_t groupCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      groupCount = groupCount_;
+      return *this;
+    }
+
+    RayTracingPipelineCreateInfoNV & setPGroups( const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV* pGroups_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pGroups = pGroups_;
+      return *this;
+    }
+
+    RayTracingPipelineCreateInfoNV & setMaxRecursionDepth( uint32_t maxRecursionDepth_ ) VULKAN_HPP_NOEXCEPT
+    {
+      maxRecursionDepth = maxRecursionDepth_;
+      return *this;
+    }
+
+    RayTracingPipelineCreateInfoNV & setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT
+    {
+      layout = layout_;
+      return *this;
+    }
+
+    RayTracingPipelineCreateInfoNV & setBasePipelineHandle( VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ ) VULKAN_HPP_NOEXCEPT
+    {
+      basePipelineHandle = basePipelineHandle_;
+      return *this;
+    }
+
+    RayTracingPipelineCreateInfoNV & setBasePipelineIndex( int32_t basePipelineIndex_ ) VULKAN_HPP_NOEXCEPT
+    {
+      basePipelineIndex = basePipelineIndex_;
+      return *this;
+    }
+
+    operator VkRayTracingPipelineCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkRayTracingPipelineCreateInfoNV*>( this );
+    }
+
+    operator VkRayTracingPipelineCreateInfoNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkRayTracingPipelineCreateInfoNV*>( this );
+    }
+
+    bool operator==( RayTracingPipelineCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( stageCount == rhs.stageCount )
+          && ( pStages == rhs.pStages )
+          && ( groupCount == rhs.groupCount )
+          && ( pGroups == rhs.pGroups )
+          && ( maxRecursionDepth == rhs.maxRecursionDepth )
+          && ( layout == rhs.layout )
+          && ( basePipelineHandle == rhs.basePipelineHandle )
+          && ( basePipelineIndex == rhs.basePipelineIndex );
+    }
+
+    bool operator!=( RayTracingPipelineCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingPipelineCreateInfoNV;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags;
+    uint32_t stageCount;
+    const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo* pStages;
+    uint32_t groupCount;
+    const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV* pGroups;
+    uint32_t maxRecursionDepth;
+    VULKAN_HPP_NAMESPACE::PipelineLayout layout;
+    VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle;
+    int32_t basePipelineIndex;
+  };
+  static_assert( sizeof( RayTracingPipelineCreateInfoNV ) == sizeof( VkRayTracingPipelineCreateInfoNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<RayTracingPipelineCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
+
+  struct RefreshCycleDurationGOOGLE
+  {
+    RefreshCycleDurationGOOGLE( uint64_t refreshDuration_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : refreshDuration( refreshDuration_ )
+    {}
+
+    RefreshCycleDurationGOOGLE( VkRefreshCycleDurationGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    RefreshCycleDurationGOOGLE& operator=( VkRefreshCycleDurationGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE const *>(&rhs);
+      return *this;
+    }
+
+    operator VkRefreshCycleDurationGOOGLE const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkRefreshCycleDurationGOOGLE*>( this );
+    }
+
+    operator VkRefreshCycleDurationGOOGLE &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkRefreshCycleDurationGOOGLE*>( this );
+    }
+
+    bool operator==( RefreshCycleDurationGOOGLE const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( refreshDuration == rhs.refreshDuration );
+    }
+
+    bool operator!=( RefreshCycleDurationGOOGLE const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    uint64_t refreshDuration;
+  };
+  static_assert( sizeof( RefreshCycleDurationGOOGLE ) == sizeof( VkRefreshCycleDurationGOOGLE ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<RefreshCycleDurationGOOGLE>::value, "struct wrapper is not a standard layout!" );
+
+  struct RenderPassAttachmentBeginInfoKHR
+  {
+    VULKAN_HPP_CONSTEXPR RenderPassAttachmentBeginInfoKHR( uint32_t attachmentCount_ = 0,
+                                                           const VULKAN_HPP_NAMESPACE::ImageView* pAttachments_ = nullptr ) VULKAN_HPP_NOEXCEPT
+      : attachmentCount( attachmentCount_ )
+      , pAttachments( pAttachments_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::RenderPassAttachmentBeginInfoKHR & operator=( VULKAN_HPP_NAMESPACE::RenderPassAttachmentBeginInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::RenderPassAttachmentBeginInfoKHR ) - offsetof( RenderPassAttachmentBeginInfoKHR, pNext ) );
+      return *this;
+    }
+
+    RenderPassAttachmentBeginInfoKHR( VkRenderPassAttachmentBeginInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    RenderPassAttachmentBeginInfoKHR& operator=( VkRenderPassAttachmentBeginInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassAttachmentBeginInfoKHR const *>(&rhs);
+      return *this;
+    }
+
+    RenderPassAttachmentBeginInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    RenderPassAttachmentBeginInfoKHR & setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      attachmentCount = attachmentCount_;
+      return *this;
+    }
+
+    RenderPassAttachmentBeginInfoKHR & setPAttachments( const VULKAN_HPP_NAMESPACE::ImageView* pAttachments_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pAttachments = pAttachments_;
+      return *this;
+    }
+
+    operator VkRenderPassAttachmentBeginInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkRenderPassAttachmentBeginInfoKHR*>( this );
+    }
+
+    operator VkRenderPassAttachmentBeginInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkRenderPassAttachmentBeginInfoKHR*>( this );
+    }
+
+    bool operator==( RenderPassAttachmentBeginInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( attachmentCount == rhs.attachmentCount )
+          && ( pAttachments == rhs.pAttachments );
+    }
+
+    bool operator!=( RenderPassAttachmentBeginInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassAttachmentBeginInfoKHR;
+    const void* pNext = nullptr;
+    uint32_t attachmentCount;
+    const VULKAN_HPP_NAMESPACE::ImageView* pAttachments;
+  };
+  static_assert( sizeof( RenderPassAttachmentBeginInfoKHR ) == sizeof( VkRenderPassAttachmentBeginInfoKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<RenderPassAttachmentBeginInfoKHR>::value, "struct wrapper is not a standard layout!" );
+
+  struct RenderPassBeginInfo
+  {
+    VULKAN_HPP_CONSTEXPR RenderPassBeginInfo( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = VULKAN_HPP_NAMESPACE::RenderPass(),
+                                              VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_ = VULKAN_HPP_NAMESPACE::Framebuffer(),
+                                              VULKAN_HPP_NAMESPACE::Rect2D renderArea_ = VULKAN_HPP_NAMESPACE::Rect2D(),
+                                              uint32_t clearValueCount_ = 0,
+                                              const VULKAN_HPP_NAMESPACE::ClearValue* pClearValues_ = nullptr ) VULKAN_HPP_NOEXCEPT
+      : renderPass( renderPass_ )
+      , framebuffer( framebuffer_ )
+      , renderArea( renderArea_ )
+      , clearValueCount( clearValueCount_ )
+      , pClearValues( pClearValues_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & operator=( VULKAN_HPP_NAMESPACE::RenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::RenderPassBeginInfo ) - offsetof( RenderPassBeginInfo, pNext ) );
+      return *this;
+    }
+
+    RenderPassBeginInfo( VkRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    RenderPassBeginInfo& operator=( VkRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassBeginInfo const *>(&rhs);
+      return *this;
+    }
+
+    RenderPassBeginInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    RenderPassBeginInfo & setRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ ) VULKAN_HPP_NOEXCEPT
+    {
+      renderPass = renderPass_;
+      return *this;
+    }
+
+    RenderPassBeginInfo & setFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_ ) VULKAN_HPP_NOEXCEPT
+    {
+      framebuffer = framebuffer_;
+      return *this;
+    }
+
+    RenderPassBeginInfo & setRenderArea( VULKAN_HPP_NAMESPACE::Rect2D renderArea_ ) VULKAN_HPP_NOEXCEPT
+    {
+      renderArea = renderArea_;
+      return *this;
+    }
+
+    RenderPassBeginInfo & setClearValueCount( uint32_t clearValueCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      clearValueCount = clearValueCount_;
+      return *this;
+    }
+
+    RenderPassBeginInfo & setPClearValues( const VULKAN_HPP_NAMESPACE::ClearValue* pClearValues_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pClearValues = pClearValues_;
+      return *this;
+    }
+
+    operator VkRenderPassBeginInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkRenderPassBeginInfo*>( this );
+    }
+
+    operator VkRenderPassBeginInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkRenderPassBeginInfo*>( this );
+    }
+
+    bool operator==( RenderPassBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( renderPass == rhs.renderPass )
+          && ( framebuffer == rhs.framebuffer )
+          && ( renderArea == rhs.renderArea )
+          && ( clearValueCount == rhs.clearValueCount )
+          && ( pClearValues == rhs.pClearValues );
+    }
+
+    bool operator!=( RenderPassBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassBeginInfo;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::RenderPass renderPass;
+    VULKAN_HPP_NAMESPACE::Framebuffer framebuffer;
+    VULKAN_HPP_NAMESPACE::Rect2D renderArea;
+    uint32_t clearValueCount;
+    const VULKAN_HPP_NAMESPACE::ClearValue* pClearValues;
+  };
+  static_assert( sizeof( RenderPassBeginInfo ) == sizeof( VkRenderPassBeginInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<RenderPassBeginInfo>::value, "struct wrapper is not a standard layout!" );
+
+  struct SubpassDescription
+  {
+    VULKAN_HPP_CONSTEXPR SubpassDescription( VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_ = VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags(),
+                                             VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics,
+                                             uint32_t inputAttachmentCount_ = 0,
+                                             const VULKAN_HPP_NAMESPACE::AttachmentReference* pInputAttachments_ = nullptr,
+                                             uint32_t colorAttachmentCount_ = 0,
+                                             const VULKAN_HPP_NAMESPACE::AttachmentReference* pColorAttachments_ = nullptr,
+                                             const VULKAN_HPP_NAMESPACE::AttachmentReference* pResolveAttachments_ = nullptr,
+                                             const VULKAN_HPP_NAMESPACE::AttachmentReference* pDepthStencilAttachment_ = nullptr,
+                                             uint32_t preserveAttachmentCount_ = 0,
+                                             const uint32_t* pPreserveAttachments_ = nullptr ) VULKAN_HPP_NOEXCEPT
+      : flags( flags_ )
+      , pipelineBindPoint( pipelineBindPoint_ )
+      , inputAttachmentCount( inputAttachmentCount_ )
+      , pInputAttachments( pInputAttachments_ )
+      , colorAttachmentCount( colorAttachmentCount_ )
+      , pColorAttachments( pColorAttachments_ )
+      , pResolveAttachments( pResolveAttachments_ )
+      , pDepthStencilAttachment( pDepthStencilAttachment_ )
+      , preserveAttachmentCount( preserveAttachmentCount_ )
+      , pPreserveAttachments( pPreserveAttachments_ )
+    {}
+
+    SubpassDescription( VkSubpassDescription const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    SubpassDescription& operator=( VkSubpassDescription const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubpassDescription const *>(&rhs);
+      return *this;
+    }
+
+    SubpassDescription & setFlags( VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    SubpassDescription & setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pipelineBindPoint = pipelineBindPoint_;
+      return *this;
+    }
+
+    SubpassDescription & setInputAttachmentCount( uint32_t inputAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      inputAttachmentCount = inputAttachmentCount_;
+      return *this;
+    }
+
+    SubpassDescription & setPInputAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference* pInputAttachments_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pInputAttachments = pInputAttachments_;
+      return *this;
+    }
+
+    SubpassDescription & setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      colorAttachmentCount = colorAttachmentCount_;
+      return *this;
+    }
+
+    SubpassDescription & setPColorAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference* pColorAttachments_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pColorAttachments = pColorAttachments_;
+      return *this;
+    }
+
+    SubpassDescription & setPResolveAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference* pResolveAttachments_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pResolveAttachments = pResolveAttachments_;
+      return *this;
+    }
+
+    SubpassDescription & setPDepthStencilAttachment( const VULKAN_HPP_NAMESPACE::AttachmentReference* pDepthStencilAttachment_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pDepthStencilAttachment = pDepthStencilAttachment_;
+      return *this;
+    }
+
+    SubpassDescription & setPreserveAttachmentCount( uint32_t preserveAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      preserveAttachmentCount = preserveAttachmentCount_;
+      return *this;
+    }
+
+    SubpassDescription & setPPreserveAttachments( const uint32_t* pPreserveAttachments_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pPreserveAttachments = pPreserveAttachments_;
+      return *this;
+    }
+
+    operator VkSubpassDescription const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSubpassDescription*>( this );
+    }
+
+    operator VkSubpassDescription &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSubpassDescription*>( this );
+    }
+
+    bool operator==( SubpassDescription const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( flags == rhs.flags )
+          && ( pipelineBindPoint == rhs.pipelineBindPoint )
+          && ( inputAttachmentCount == rhs.inputAttachmentCount )
+          && ( pInputAttachments == rhs.pInputAttachments )
+          && ( colorAttachmentCount == rhs.colorAttachmentCount )
+          && ( pColorAttachments == rhs.pColorAttachments )
+          && ( pResolveAttachments == rhs.pResolveAttachments )
+          && ( pDepthStencilAttachment == rhs.pDepthStencilAttachment )
+          && ( preserveAttachmentCount == rhs.preserveAttachmentCount )
+          && ( pPreserveAttachments == rhs.pPreserveAttachments );
+    }
+
+    bool operator!=( SubpassDescription const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags;
+    VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint;
+    uint32_t inputAttachmentCount;
+    const VULKAN_HPP_NAMESPACE::AttachmentReference* pInputAttachments;
+    uint32_t colorAttachmentCount;
+    const VULKAN_HPP_NAMESPACE::AttachmentReference* pColorAttachments;
+    const VULKAN_HPP_NAMESPACE::AttachmentReference* pResolveAttachments;
+    const VULKAN_HPP_NAMESPACE::AttachmentReference* pDepthStencilAttachment;
+    uint32_t preserveAttachmentCount;
+    const uint32_t* pPreserveAttachments;
+  };
+  static_assert( sizeof( SubpassDescription ) == sizeof( VkSubpassDescription ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<SubpassDescription>::value, "struct wrapper is not a standard layout!" );
+
+  struct SubpassDependency
+  {
+    VULKAN_HPP_CONSTEXPR SubpassDependency( uint32_t srcSubpass_ = 0,
+                                            uint32_t dstSubpass_ = 0,
+                                            VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask_ = VULKAN_HPP_NAMESPACE::PipelineStageFlags(),
+                                            VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask_ = VULKAN_HPP_NAMESPACE::PipelineStageFlags(),
+                                            VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = VULKAN_HPP_NAMESPACE::AccessFlags(),
+                                            VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = VULKAN_HPP_NAMESPACE::AccessFlags(),
+                                            VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ = VULKAN_HPP_NAMESPACE::DependencyFlags() ) VULKAN_HPP_NOEXCEPT
+      : srcSubpass( srcSubpass_ )
+      , dstSubpass( dstSubpass_ )
+      , srcStageMask( srcStageMask_ )
+      , dstStageMask( dstStageMask_ )
+      , srcAccessMask( srcAccessMask_ )
+      , dstAccessMask( dstAccessMask_ )
+      , dependencyFlags( dependencyFlags_ )
+    {}
+
+    SubpassDependency( VkSubpassDependency const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    SubpassDependency& operator=( VkSubpassDependency const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubpassDependency const *>(&rhs);
+      return *this;
+    }
+
+    SubpassDependency & setSrcSubpass( uint32_t srcSubpass_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcSubpass = srcSubpass_;
+      return *this;
+    }
+
+    SubpassDependency & setDstSubpass( uint32_t dstSubpass_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstSubpass = dstSubpass_;
+      return *this;
+    }
+
+    SubpassDependency & setSrcStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcStageMask = srcStageMask_;
+      return *this;
+    }
+
+    SubpassDependency & setDstStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstStageMask = dstStageMask_;
+      return *this;
+    }
+
+    SubpassDependency & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcAccessMask = srcAccessMask_;
+      return *this;
+    }
+
+    SubpassDependency & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstAccessMask = dstAccessMask_;
+      return *this;
+    }
+
+    SubpassDependency & setDependencyFlags( VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dependencyFlags = dependencyFlags_;
+      return *this;
+    }
+
+    operator VkSubpassDependency const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSubpassDependency*>( this );
+    }
+
+    operator VkSubpassDependency &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSubpassDependency*>( this );
+    }
+
+    bool operator==( SubpassDependency const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( srcSubpass == rhs.srcSubpass )
+          && ( dstSubpass == rhs.dstSubpass )
+          && ( srcStageMask == rhs.srcStageMask )
+          && ( dstStageMask == rhs.dstStageMask )
+          && ( srcAccessMask == rhs.srcAccessMask )
+          && ( dstAccessMask == rhs.dstAccessMask )
+          && ( dependencyFlags == rhs.dependencyFlags );
+    }
+
+    bool operator!=( SubpassDependency const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    uint32_t srcSubpass;
+    uint32_t dstSubpass;
+    VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask;
+    VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask;
+    VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask;
+    VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask;
+    VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags;
+  };
+  static_assert( sizeof( SubpassDependency ) == sizeof( VkSubpassDependency ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<SubpassDependency>::value, "struct wrapper is not a standard layout!" );
+
+  struct RenderPassCreateInfo
+  {
+    VULKAN_HPP_CONSTEXPR RenderPassCreateInfo( VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_ = VULKAN_HPP_NAMESPACE::RenderPassCreateFlags(),
+                                               uint32_t attachmentCount_ = 0,
+                                               const VULKAN_HPP_NAMESPACE::AttachmentDescription* pAttachments_ = nullptr,
+                                               uint32_t subpassCount_ = 0,
+                                               const VULKAN_HPP_NAMESPACE::SubpassDescription* pSubpasses_ = nullptr,
+                                               uint32_t dependencyCount_ = 0,
+                                               const VULKAN_HPP_NAMESPACE::SubpassDependency* pDependencies_ = nullptr ) VULKAN_HPP_NOEXCEPT
+      : flags( flags_ )
+      , attachmentCount( attachmentCount_ )
+      , pAttachments( pAttachments_ )
+      , subpassCount( subpassCount_ )
+      , pSubpasses( pSubpasses_ )
+      , dependencyCount( dependencyCount_ )
+      , pDependencies( pDependencies_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::RenderPassCreateInfo & operator=( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo ) - offsetof( RenderPassCreateInfo, pNext ) );
+      return *this;
+    }
+
+    RenderPassCreateInfo( VkRenderPassCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    RenderPassCreateInfo& operator=( VkRenderPassCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassCreateInfo const *>(&rhs);
+      return *this;
+    }
+
+    RenderPassCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    RenderPassCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    RenderPassCreateInfo & setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      attachmentCount = attachmentCount_;
+      return *this;
+    }
+
+    RenderPassCreateInfo & setPAttachments( const VULKAN_HPP_NAMESPACE::AttachmentDescription* pAttachments_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pAttachments = pAttachments_;
+      return *this;
+    }
+
+    RenderPassCreateInfo & setSubpassCount( uint32_t subpassCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      subpassCount = subpassCount_;
+      return *this;
+    }
+
+    RenderPassCreateInfo & setPSubpasses( const VULKAN_HPP_NAMESPACE::SubpassDescription* pSubpasses_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pSubpasses = pSubpasses_;
+      return *this;
+    }
+
+    RenderPassCreateInfo & setDependencyCount( uint32_t dependencyCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dependencyCount = dependencyCount_;
+      return *this;
+    }
+
+    RenderPassCreateInfo & setPDependencies( const VULKAN_HPP_NAMESPACE::SubpassDependency* pDependencies_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pDependencies = pDependencies_;
+      return *this;
+    }
+
+    operator VkRenderPassCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkRenderPassCreateInfo*>( this );
+    }
+
+    operator VkRenderPassCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkRenderPassCreateInfo*>( this );
+    }
+
+    bool operator==( RenderPassCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( attachmentCount == rhs.attachmentCount )
+          && ( pAttachments == rhs.pAttachments )
+          && ( subpassCount == rhs.subpassCount )
+          && ( pSubpasses == rhs.pSubpasses )
+          && ( dependencyCount == rhs.dependencyCount )
+          && ( pDependencies == rhs.pDependencies );
+    }
+
+    bool operator!=( RenderPassCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassCreateInfo;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags;
+    uint32_t attachmentCount;
+    const VULKAN_HPP_NAMESPACE::AttachmentDescription* pAttachments;
+    uint32_t subpassCount;
+    const VULKAN_HPP_NAMESPACE::SubpassDescription* pSubpasses;
+    uint32_t dependencyCount;
+    const VULKAN_HPP_NAMESPACE::SubpassDependency* pDependencies;
+  };
+  static_assert( sizeof( RenderPassCreateInfo ) == sizeof( VkRenderPassCreateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<RenderPassCreateInfo>::value, "struct wrapper is not a standard layout!" );
+
+  struct SubpassDescription2KHR
+  {
+    VULKAN_HPP_CONSTEXPR SubpassDescription2KHR( VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_ = VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags(),
+                                                 VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics,
+                                                 uint32_t viewMask_ = 0,
+                                                 uint32_t inputAttachmentCount_ = 0,
+                                                 const VULKAN_HPP_NAMESPACE::AttachmentReference2KHR* pInputAttachments_ = nullptr,
+                                                 uint32_t colorAttachmentCount_ = 0,
+                                                 const VULKAN_HPP_NAMESPACE::AttachmentReference2KHR* pColorAttachments_ = nullptr,
+                                                 const VULKAN_HPP_NAMESPACE::AttachmentReference2KHR* pResolveAttachments_ = nullptr,
+                                                 const VULKAN_HPP_NAMESPACE::AttachmentReference2KHR* pDepthStencilAttachment_ = nullptr,
+                                                 uint32_t preserveAttachmentCount_ = 0,
+                                                 const uint32_t* pPreserveAttachments_ = nullptr ) VULKAN_HPP_NOEXCEPT
+      : flags( flags_ )
+      , pipelineBindPoint( pipelineBindPoint_ )
+      , viewMask( viewMask_ )
+      , inputAttachmentCount( inputAttachmentCount_ )
+      , pInputAttachments( pInputAttachments_ )
+      , colorAttachmentCount( colorAttachmentCount_ )
+      , pColorAttachments( pColorAttachments_ )
+      , pResolveAttachments( pResolveAttachments_ )
+      , pDepthStencilAttachment( pDepthStencilAttachment_ )
+      , preserveAttachmentCount( preserveAttachmentCount_ )
+      , pPreserveAttachments( pPreserveAttachments_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::SubpassDescription2KHR & operator=( VULKAN_HPP_NAMESPACE::SubpassDescription2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SubpassDescription2KHR ) - offsetof( SubpassDescription2KHR, pNext ) );
+      return *this;
+    }
+
+    SubpassDescription2KHR( VkSubpassDescription2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    SubpassDescription2KHR& operator=( VkSubpassDescription2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubpassDescription2KHR const *>(&rhs);
+      return *this;
+    }
+
+    SubpassDescription2KHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    SubpassDescription2KHR & setFlags( VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    SubpassDescription2KHR & setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pipelineBindPoint = pipelineBindPoint_;
+      return *this;
+    }
+
+    SubpassDescription2KHR & setViewMask( uint32_t viewMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      viewMask = viewMask_;
+      return *this;
+    }
+
+    SubpassDescription2KHR & setInputAttachmentCount( uint32_t inputAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      inputAttachmentCount = inputAttachmentCount_;
+      return *this;
+    }
+
+    SubpassDescription2KHR & setPInputAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference2KHR* pInputAttachments_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pInputAttachments = pInputAttachments_;
+      return *this;
+    }
+
+    SubpassDescription2KHR & setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      colorAttachmentCount = colorAttachmentCount_;
+      return *this;
+    }
+
+    SubpassDescription2KHR & setPColorAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference2KHR* pColorAttachments_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pColorAttachments = pColorAttachments_;
+      return *this;
+    }
+
+    SubpassDescription2KHR & setPResolveAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference2KHR* pResolveAttachments_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pResolveAttachments = pResolveAttachments_;
+      return *this;
+    }
+
+    SubpassDescription2KHR & setPDepthStencilAttachment( const VULKAN_HPP_NAMESPACE::AttachmentReference2KHR* pDepthStencilAttachment_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pDepthStencilAttachment = pDepthStencilAttachment_;
+      return *this;
+    }
+
+    SubpassDescription2KHR & setPreserveAttachmentCount( uint32_t preserveAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      preserveAttachmentCount = preserveAttachmentCount_;
+      return *this;
+    }
+
+    SubpassDescription2KHR & setPPreserveAttachments( const uint32_t* pPreserveAttachments_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pPreserveAttachments = pPreserveAttachments_;
+      return *this;
+    }
+
+    operator VkSubpassDescription2KHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSubpassDescription2KHR*>( this );
+    }
+
+    operator VkSubpassDescription2KHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSubpassDescription2KHR*>( this );
+    }
+
+    bool operator==( SubpassDescription2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( pipelineBindPoint == rhs.pipelineBindPoint )
+          && ( viewMask == rhs.viewMask )
+          && ( inputAttachmentCount == rhs.inputAttachmentCount )
+          && ( pInputAttachments == rhs.pInputAttachments )
+          && ( colorAttachmentCount == rhs.colorAttachmentCount )
+          && ( pColorAttachments == rhs.pColorAttachments )
+          && ( pResolveAttachments == rhs.pResolveAttachments )
+          && ( pDepthStencilAttachment == rhs.pDepthStencilAttachment )
+          && ( preserveAttachmentCount == rhs.preserveAttachmentCount )
+          && ( pPreserveAttachments == rhs.pPreserveAttachments );
+    }
+
+    bool operator!=( SubpassDescription2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassDescription2KHR;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags;
+    VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint;
+    uint32_t viewMask;
+    uint32_t inputAttachmentCount;
+    const VULKAN_HPP_NAMESPACE::AttachmentReference2KHR* pInputAttachments;
+    uint32_t colorAttachmentCount;
+    const VULKAN_HPP_NAMESPACE::AttachmentReference2KHR* pColorAttachments;
+    const VULKAN_HPP_NAMESPACE::AttachmentReference2KHR* pResolveAttachments;
+    const VULKAN_HPP_NAMESPACE::AttachmentReference2KHR* pDepthStencilAttachment;
+    uint32_t preserveAttachmentCount;
+    const uint32_t* pPreserveAttachments;
+  };
+  static_assert( sizeof( SubpassDescription2KHR ) == sizeof( VkSubpassDescription2KHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<SubpassDescription2KHR>::value, "struct wrapper is not a standard layout!" );
+
+  struct SubpassDependency2KHR
+  {
+    VULKAN_HPP_CONSTEXPR SubpassDependency2KHR( uint32_t srcSubpass_ = 0,
+                                                uint32_t dstSubpass_ = 0,
+                                                VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask_ = VULKAN_HPP_NAMESPACE::PipelineStageFlags(),
+                                                VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask_ = VULKAN_HPP_NAMESPACE::PipelineStageFlags(),
+                                                VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = VULKAN_HPP_NAMESPACE::AccessFlags(),
+                                                VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = VULKAN_HPP_NAMESPACE::AccessFlags(),
+                                                VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ = VULKAN_HPP_NAMESPACE::DependencyFlags(),
+                                                int32_t viewOffset_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : srcSubpass( srcSubpass_ )
+      , dstSubpass( dstSubpass_ )
+      , srcStageMask( srcStageMask_ )
+      , dstStageMask( dstStageMask_ )
+      , srcAccessMask( srcAccessMask_ )
+      , dstAccessMask( dstAccessMask_ )
+      , dependencyFlags( dependencyFlags_ )
+      , viewOffset( viewOffset_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::SubpassDependency2KHR & operator=( VULKAN_HPP_NAMESPACE::SubpassDependency2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SubpassDependency2KHR ) - offsetof( SubpassDependency2KHR, pNext ) );
+      return *this;
+    }
+
+    SubpassDependency2KHR( VkSubpassDependency2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    SubpassDependency2KHR& operator=( VkSubpassDependency2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubpassDependency2KHR const *>(&rhs);
+      return *this;
+    }
+
+    SubpassDependency2KHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    SubpassDependency2KHR & setSrcSubpass( uint32_t srcSubpass_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcSubpass = srcSubpass_;
+      return *this;
+    }
+
+    SubpassDependency2KHR & setDstSubpass( uint32_t dstSubpass_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstSubpass = dstSubpass_;
+      return *this;
+    }
+
+    SubpassDependency2KHR & setSrcStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcStageMask = srcStageMask_;
+      return *this;
+    }
+
+    SubpassDependency2KHR & setDstStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstStageMask = dstStageMask_;
+      return *this;
+    }
+
+    SubpassDependency2KHR & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      srcAccessMask = srcAccessMask_;
+      return *this;
+    }
+
+    SubpassDependency2KHR & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstAccessMask = dstAccessMask_;
+      return *this;
+    }
+
+    SubpassDependency2KHR & setDependencyFlags( VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dependencyFlags = dependencyFlags_;
+      return *this;
+    }
+
+    SubpassDependency2KHR & setViewOffset( int32_t viewOffset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      viewOffset = viewOffset_;
+      return *this;
+    }
+
+    operator VkSubpassDependency2KHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSubpassDependency2KHR*>( this );
+    }
+
+    operator VkSubpassDependency2KHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSubpassDependency2KHR*>( this );
+    }
+
+    bool operator==( SubpassDependency2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( srcSubpass == rhs.srcSubpass )
+          && ( dstSubpass == rhs.dstSubpass )
+          && ( srcStageMask == rhs.srcStageMask )
+          && ( dstStageMask == rhs.dstStageMask )
+          && ( srcAccessMask == rhs.srcAccessMask )
+          && ( dstAccessMask == rhs.dstAccessMask )
+          && ( dependencyFlags == rhs.dependencyFlags )
+          && ( viewOffset == rhs.viewOffset );
+    }
+
+    bool operator!=( SubpassDependency2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassDependency2KHR;
+    const void* pNext = nullptr;
+    uint32_t srcSubpass;
+    uint32_t dstSubpass;
+    VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask;
+    VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask;
+    VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask;
+    VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask;
+    VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags;
+    int32_t viewOffset;
+  };
+  static_assert( sizeof( SubpassDependency2KHR ) == sizeof( VkSubpassDependency2KHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<SubpassDependency2KHR>::value, "struct wrapper is not a standard layout!" );
+
+  struct RenderPassCreateInfo2KHR
+  {
+    VULKAN_HPP_CONSTEXPR RenderPassCreateInfo2KHR( VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_ = VULKAN_HPP_NAMESPACE::RenderPassCreateFlags(),
+                                                   uint32_t attachmentCount_ = 0,
+                                                   const VULKAN_HPP_NAMESPACE::AttachmentDescription2KHR* pAttachments_ = nullptr,
+                                                   uint32_t subpassCount_ = 0,
+                                                   const VULKAN_HPP_NAMESPACE::SubpassDescription2KHR* pSubpasses_ = nullptr,
+                                                   uint32_t dependencyCount_ = 0,
+                                                   const VULKAN_HPP_NAMESPACE::SubpassDependency2KHR* pDependencies_ = nullptr,
+                                                   uint32_t correlatedViewMaskCount_ = 0,
+                                                   const uint32_t* pCorrelatedViewMasks_ = nullptr ) VULKAN_HPP_NOEXCEPT
+      : flags( flags_ )
+      , attachmentCount( attachmentCount_ )
+      , pAttachments( pAttachments_ )
+      , subpassCount( subpassCount_ )
+      , pSubpasses( pSubpasses_ )
+      , dependencyCount( dependencyCount_ )
+      , pDependencies( pDependencies_ )
+      , correlatedViewMaskCount( correlatedViewMaskCount_ )
+      , pCorrelatedViewMasks( pCorrelatedViewMasks_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2KHR & operator=( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2KHR ) - offsetof( RenderPassCreateInfo2KHR, pNext ) );
+      return *this;
+    }
+
+    RenderPassCreateInfo2KHR( VkRenderPassCreateInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    RenderPassCreateInfo2KHR& operator=( VkRenderPassCreateInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2KHR const *>(&rhs);
+      return *this;
+    }
+
+    RenderPassCreateInfo2KHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    RenderPassCreateInfo2KHR & setFlags( VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    RenderPassCreateInfo2KHR & setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      attachmentCount = attachmentCount_;
+      return *this;
+    }
+
+    RenderPassCreateInfo2KHR & setPAttachments( const VULKAN_HPP_NAMESPACE::AttachmentDescription2KHR* pAttachments_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pAttachments = pAttachments_;
+      return *this;
+    }
+
+    RenderPassCreateInfo2KHR & setSubpassCount( uint32_t subpassCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      subpassCount = subpassCount_;
+      return *this;
+    }
+
+    RenderPassCreateInfo2KHR & setPSubpasses( const VULKAN_HPP_NAMESPACE::SubpassDescription2KHR* pSubpasses_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pSubpasses = pSubpasses_;
+      return *this;
+    }
+
+    RenderPassCreateInfo2KHR & setDependencyCount( uint32_t dependencyCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dependencyCount = dependencyCount_;
+      return *this;
+    }
+
+    RenderPassCreateInfo2KHR & setPDependencies( const VULKAN_HPP_NAMESPACE::SubpassDependency2KHR* pDependencies_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pDependencies = pDependencies_;
+      return *this;
+    }
+
+    RenderPassCreateInfo2KHR & setCorrelatedViewMaskCount( uint32_t correlatedViewMaskCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      correlatedViewMaskCount = correlatedViewMaskCount_;
+      return *this;
+    }
+
+    RenderPassCreateInfo2KHR & setPCorrelatedViewMasks( const uint32_t* pCorrelatedViewMasks_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pCorrelatedViewMasks = pCorrelatedViewMasks_;
+      return *this;
+    }
+
+    operator VkRenderPassCreateInfo2KHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkRenderPassCreateInfo2KHR*>( this );
+    }
+
+    operator VkRenderPassCreateInfo2KHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkRenderPassCreateInfo2KHR*>( this );
+    }
+
+    bool operator==( RenderPassCreateInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( attachmentCount == rhs.attachmentCount )
+          && ( pAttachments == rhs.pAttachments )
+          && ( subpassCount == rhs.subpassCount )
+          && ( pSubpasses == rhs.pSubpasses )
+          && ( dependencyCount == rhs.dependencyCount )
+          && ( pDependencies == rhs.pDependencies )
+          && ( correlatedViewMaskCount == rhs.correlatedViewMaskCount )
+          && ( pCorrelatedViewMasks == rhs.pCorrelatedViewMasks );
+    }
+
+    bool operator!=( RenderPassCreateInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassCreateInfo2KHR;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags;
+    uint32_t attachmentCount;
+    const VULKAN_HPP_NAMESPACE::AttachmentDescription2KHR* pAttachments;
+    uint32_t subpassCount;
+    const VULKAN_HPP_NAMESPACE::SubpassDescription2KHR* pSubpasses;
+    uint32_t dependencyCount;
+    const VULKAN_HPP_NAMESPACE::SubpassDependency2KHR* pDependencies;
+    uint32_t correlatedViewMaskCount;
+    const uint32_t* pCorrelatedViewMasks;
+  };
+  static_assert( sizeof( RenderPassCreateInfo2KHR ) == sizeof( VkRenderPassCreateInfo2KHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<RenderPassCreateInfo2KHR>::value, "struct wrapper is not a standard layout!" );
+
+  struct RenderPassFragmentDensityMapCreateInfoEXT
+  {
+    VULKAN_HPP_CONSTEXPR RenderPassFragmentDensityMapCreateInfoEXT( VULKAN_HPP_NAMESPACE::AttachmentReference fragmentDensityMapAttachment_ = VULKAN_HPP_NAMESPACE::AttachmentReference() ) VULKAN_HPP_NOEXCEPT
+      : fragmentDensityMapAttachment( fragmentDensityMapAttachment_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::RenderPassFragmentDensityMapCreateInfoEXT & operator=( VULKAN_HPP_NAMESPACE::RenderPassFragmentDensityMapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::RenderPassFragmentDensityMapCreateInfoEXT ) - offsetof( RenderPassFragmentDensityMapCreateInfoEXT, pNext ) );
+      return *this;
+    }
+
+    RenderPassFragmentDensityMapCreateInfoEXT( VkRenderPassFragmentDensityMapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    RenderPassFragmentDensityMapCreateInfoEXT& operator=( VkRenderPassFragmentDensityMapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassFragmentDensityMapCreateInfoEXT const *>(&rhs);
+      return *this;
+    }
+
+    RenderPassFragmentDensityMapCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    RenderPassFragmentDensityMapCreateInfoEXT & setFragmentDensityMapAttachment( VULKAN_HPP_NAMESPACE::AttachmentReference fragmentDensityMapAttachment_ ) VULKAN_HPP_NOEXCEPT
+    {
+      fragmentDensityMapAttachment = fragmentDensityMapAttachment_;
+      return *this;
+    }
+
+    operator VkRenderPassFragmentDensityMapCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkRenderPassFragmentDensityMapCreateInfoEXT*>( this );
+    }
+
+    operator VkRenderPassFragmentDensityMapCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkRenderPassFragmentDensityMapCreateInfoEXT*>( this );
+    }
+
+    bool operator==( RenderPassFragmentDensityMapCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( fragmentDensityMapAttachment == rhs.fragmentDensityMapAttachment );
+    }
+
+    bool operator!=( RenderPassFragmentDensityMapCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassFragmentDensityMapCreateInfoEXT;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::AttachmentReference fragmentDensityMapAttachment;
+  };
+  static_assert( sizeof( RenderPassFragmentDensityMapCreateInfoEXT ) == sizeof( VkRenderPassFragmentDensityMapCreateInfoEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<RenderPassFragmentDensityMapCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+
+  struct RenderPassInputAttachmentAspectCreateInfo
+  {
+    VULKAN_HPP_CONSTEXPR RenderPassInputAttachmentAspectCreateInfo( uint32_t aspectReferenceCount_ = 0,
+                                                                    const VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference* pAspectReferences_ = nullptr ) VULKAN_HPP_NOEXCEPT
+      : aspectReferenceCount( aspectReferenceCount_ )
+      , pAspectReferences( pAspectReferences_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::RenderPassInputAttachmentAspectCreateInfo & operator=( VULKAN_HPP_NAMESPACE::RenderPassInputAttachmentAspectCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::RenderPassInputAttachmentAspectCreateInfo ) - offsetof( RenderPassInputAttachmentAspectCreateInfo, pNext ) );
+      return *this;
+    }
+
+    RenderPassInputAttachmentAspectCreateInfo( VkRenderPassInputAttachmentAspectCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    RenderPassInputAttachmentAspectCreateInfo& operator=( VkRenderPassInputAttachmentAspectCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassInputAttachmentAspectCreateInfo const *>(&rhs);
+      return *this;
+    }
+
+    RenderPassInputAttachmentAspectCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    RenderPassInputAttachmentAspectCreateInfo & setAspectReferenceCount( uint32_t aspectReferenceCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      aspectReferenceCount = aspectReferenceCount_;
+      return *this;
+    }
+
+    RenderPassInputAttachmentAspectCreateInfo & setPAspectReferences( const VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference* pAspectReferences_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pAspectReferences = pAspectReferences_;
+      return *this;
+    }
+
+    operator VkRenderPassInputAttachmentAspectCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkRenderPassInputAttachmentAspectCreateInfo*>( this );
+    }
+
+    operator VkRenderPassInputAttachmentAspectCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkRenderPassInputAttachmentAspectCreateInfo*>( this );
+    }
+
+    bool operator==( RenderPassInputAttachmentAspectCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( aspectReferenceCount == rhs.aspectReferenceCount )
+          && ( pAspectReferences == rhs.pAspectReferences );
+    }
+
+    bool operator!=( RenderPassInputAttachmentAspectCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassInputAttachmentAspectCreateInfo;
+    const void* pNext = nullptr;
+    uint32_t aspectReferenceCount;
+    const VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference* pAspectReferences;
+  };
+  static_assert( sizeof( RenderPassInputAttachmentAspectCreateInfo ) == sizeof( VkRenderPassInputAttachmentAspectCreateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<RenderPassInputAttachmentAspectCreateInfo>::value, "struct wrapper is not a standard layout!" );
+
+  struct RenderPassMultiviewCreateInfo
+  {
+    VULKAN_HPP_CONSTEXPR RenderPassMultiviewCreateInfo( uint32_t subpassCount_ = 0,
+                                                        const uint32_t* pViewMasks_ = nullptr,
+                                                        uint32_t dependencyCount_ = 0,
+                                                        const int32_t* pViewOffsets_ = nullptr,
+                                                        uint32_t correlationMaskCount_ = 0,
+                                                        const uint32_t* pCorrelationMasks_ = nullptr ) VULKAN_HPP_NOEXCEPT
+      : subpassCount( subpassCount_ )
+      , pViewMasks( pViewMasks_ )
+      , dependencyCount( dependencyCount_ )
+      , pViewOffsets( pViewOffsets_ )
+      , correlationMaskCount( correlationMaskCount_ )
+      , pCorrelationMasks( pCorrelationMasks_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::RenderPassMultiviewCreateInfo & operator=( VULKAN_HPP_NAMESPACE::RenderPassMultiviewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::RenderPassMultiviewCreateInfo ) - offsetof( RenderPassMultiviewCreateInfo, pNext ) );
+      return *this;
+    }
+
+    RenderPassMultiviewCreateInfo( VkRenderPassMultiviewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    RenderPassMultiviewCreateInfo& operator=( VkRenderPassMultiviewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassMultiviewCreateInfo const *>(&rhs);
+      return *this;
+    }
+
+    RenderPassMultiviewCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    RenderPassMultiviewCreateInfo & setSubpassCount( uint32_t subpassCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      subpassCount = subpassCount_;
+      return *this;
+    }
+
+    RenderPassMultiviewCreateInfo & setPViewMasks( const uint32_t* pViewMasks_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pViewMasks = pViewMasks_;
+      return *this;
+    }
+
+    RenderPassMultiviewCreateInfo & setDependencyCount( uint32_t dependencyCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dependencyCount = dependencyCount_;
+      return *this;
+    }
+
+    RenderPassMultiviewCreateInfo & setPViewOffsets( const int32_t* pViewOffsets_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pViewOffsets = pViewOffsets_;
+      return *this;
+    }
+
+    RenderPassMultiviewCreateInfo & setCorrelationMaskCount( uint32_t correlationMaskCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      correlationMaskCount = correlationMaskCount_;
+      return *this;
+    }
+
+    RenderPassMultiviewCreateInfo & setPCorrelationMasks( const uint32_t* pCorrelationMasks_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pCorrelationMasks = pCorrelationMasks_;
+      return *this;
+    }
+
+    operator VkRenderPassMultiviewCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkRenderPassMultiviewCreateInfo*>( this );
+    }
+
+    operator VkRenderPassMultiviewCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkRenderPassMultiviewCreateInfo*>( this );
+    }
+
+    bool operator==( RenderPassMultiviewCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( subpassCount == rhs.subpassCount )
+          && ( pViewMasks == rhs.pViewMasks )
+          && ( dependencyCount == rhs.dependencyCount )
+          && ( pViewOffsets == rhs.pViewOffsets )
+          && ( correlationMaskCount == rhs.correlationMaskCount )
+          && ( pCorrelationMasks == rhs.pCorrelationMasks );
+    }
+
+    bool operator!=( RenderPassMultiviewCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassMultiviewCreateInfo;
+    const void* pNext = nullptr;
+    uint32_t subpassCount;
+    const uint32_t* pViewMasks;
+    uint32_t dependencyCount;
+    const int32_t* pViewOffsets;
+    uint32_t correlationMaskCount;
+    const uint32_t* pCorrelationMasks;
+  };
+  static_assert( sizeof( RenderPassMultiviewCreateInfo ) == sizeof( VkRenderPassMultiviewCreateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<RenderPassMultiviewCreateInfo>::value, "struct wrapper is not a standard layout!" );
+
+  struct SubpassSampleLocationsEXT
+  {
+    VULKAN_HPP_CONSTEXPR SubpassSampleLocationsEXT( uint32_t subpassIndex_ = 0,
+                                                    VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo_ = VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT() ) VULKAN_HPP_NOEXCEPT
+      : subpassIndex( subpassIndex_ )
+      , sampleLocationsInfo( sampleLocationsInfo_ )
+    {}
+
+    SubpassSampleLocationsEXT( VkSubpassSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    SubpassSampleLocationsEXT& operator=( VkSubpassSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT const *>(&rhs);
+      return *this;
+    }
+
+    SubpassSampleLocationsEXT & setSubpassIndex( uint32_t subpassIndex_ ) VULKAN_HPP_NOEXCEPT
+    {
+      subpassIndex = subpassIndex_;
+      return *this;
+    }
+
+    SubpassSampleLocationsEXT & setSampleLocationsInfo( VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo_ ) VULKAN_HPP_NOEXCEPT
+    {
+      sampleLocationsInfo = sampleLocationsInfo_;
+      return *this;
+    }
+
+    operator VkSubpassSampleLocationsEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSubpassSampleLocationsEXT*>( this );
+    }
+
+    operator VkSubpassSampleLocationsEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSubpassSampleLocationsEXT*>( this );
+    }
+
+    bool operator==( SubpassSampleLocationsEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( subpassIndex == rhs.subpassIndex )
+          && ( sampleLocationsInfo == rhs.sampleLocationsInfo );
+    }
+
+    bool operator!=( SubpassSampleLocationsEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    uint32_t subpassIndex;
+    VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo;
+  };
+  static_assert( sizeof( SubpassSampleLocationsEXT ) == sizeof( VkSubpassSampleLocationsEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<SubpassSampleLocationsEXT>::value, "struct wrapper is not a standard layout!" );
+
+  struct RenderPassSampleLocationsBeginInfoEXT
+  {
+    VULKAN_HPP_CONSTEXPR RenderPassSampleLocationsBeginInfoEXT( uint32_t attachmentInitialSampleLocationsCount_ = 0,
+                                                                const VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT* pAttachmentInitialSampleLocations_ = nullptr,
+                                                                uint32_t postSubpassSampleLocationsCount_ = 0,
+                                                                const VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT* pPostSubpassSampleLocations_ = nullptr ) VULKAN_HPP_NOEXCEPT
+      : attachmentInitialSampleLocationsCount( attachmentInitialSampleLocationsCount_ )
+      , pAttachmentInitialSampleLocations( pAttachmentInitialSampleLocations_ )
+      , postSubpassSampleLocationsCount( postSubpassSampleLocationsCount_ )
+      , pPostSubpassSampleLocations( pPostSubpassSampleLocations_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::RenderPassSampleLocationsBeginInfoEXT & operator=( VULKAN_HPP_NAMESPACE::RenderPassSampleLocationsBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::RenderPassSampleLocationsBeginInfoEXT ) - offsetof( RenderPassSampleLocationsBeginInfoEXT, pNext ) );
+      return *this;
+    }
+
+    RenderPassSampleLocationsBeginInfoEXT( VkRenderPassSampleLocationsBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    RenderPassSampleLocationsBeginInfoEXT& operator=( VkRenderPassSampleLocationsBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassSampleLocationsBeginInfoEXT const *>(&rhs);
+      return *this;
+    }
+
+    RenderPassSampleLocationsBeginInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    RenderPassSampleLocationsBeginInfoEXT & setAttachmentInitialSampleLocationsCount( uint32_t attachmentInitialSampleLocationsCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      attachmentInitialSampleLocationsCount = attachmentInitialSampleLocationsCount_;
+      return *this;
+    }
+
+    RenderPassSampleLocationsBeginInfoEXT & setPAttachmentInitialSampleLocations( const VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT* pAttachmentInitialSampleLocations_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pAttachmentInitialSampleLocations = pAttachmentInitialSampleLocations_;
+      return *this;
+    }
+
+    RenderPassSampleLocationsBeginInfoEXT & setPostSubpassSampleLocationsCount( uint32_t postSubpassSampleLocationsCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      postSubpassSampleLocationsCount = postSubpassSampleLocationsCount_;
+      return *this;
+    }
+
+    RenderPassSampleLocationsBeginInfoEXT & setPPostSubpassSampleLocations( const VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT* pPostSubpassSampleLocations_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pPostSubpassSampleLocations = pPostSubpassSampleLocations_;
+      return *this;
+    }
+
+    operator VkRenderPassSampleLocationsBeginInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkRenderPassSampleLocationsBeginInfoEXT*>( this );
+    }
+
+    operator VkRenderPassSampleLocationsBeginInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkRenderPassSampleLocationsBeginInfoEXT*>( this );
+    }
+
+    bool operator==( RenderPassSampleLocationsBeginInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( attachmentInitialSampleLocationsCount == rhs.attachmentInitialSampleLocationsCount )
+          && ( pAttachmentInitialSampleLocations == rhs.pAttachmentInitialSampleLocations )
+          && ( postSubpassSampleLocationsCount == rhs.postSubpassSampleLocationsCount )
+          && ( pPostSubpassSampleLocations == rhs.pPostSubpassSampleLocations );
+    }
+
+    bool operator!=( RenderPassSampleLocationsBeginInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassSampleLocationsBeginInfoEXT;
+    const void* pNext = nullptr;
+    uint32_t attachmentInitialSampleLocationsCount;
+    const VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT* pAttachmentInitialSampleLocations;
+    uint32_t postSubpassSampleLocationsCount;
+    const VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT* pPostSubpassSampleLocations;
+  };
+  static_assert( sizeof( RenderPassSampleLocationsBeginInfoEXT ) == sizeof( VkRenderPassSampleLocationsBeginInfoEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<RenderPassSampleLocationsBeginInfoEXT>::value, "struct wrapper is not a standard layout!" );
+
+  struct SamplerCreateInfo
+  {
+    VULKAN_HPP_CONSTEXPR SamplerCreateInfo( VULKAN_HPP_NAMESPACE::SamplerCreateFlags flags_ = VULKAN_HPP_NAMESPACE::SamplerCreateFlags(),
+                                            VULKAN_HPP_NAMESPACE::Filter magFilter_ = VULKAN_HPP_NAMESPACE::Filter::eNearest,
+                                            VULKAN_HPP_NAMESPACE::Filter minFilter_ = VULKAN_HPP_NAMESPACE::Filter::eNearest,
+                                            VULKAN_HPP_NAMESPACE::SamplerMipmapMode mipmapMode_ = VULKAN_HPP_NAMESPACE::SamplerMipmapMode::eNearest,
+                                            VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeU_ = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat,
+                                            VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeV_ = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat,
+                                            VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeW_ = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat,
+                                            float mipLodBias_ = 0,
+                                            VULKAN_HPP_NAMESPACE::Bool32 anisotropyEnable_ = 0,
+                                            float maxAnisotropy_ = 0,
+                                            VULKAN_HPP_NAMESPACE::Bool32 compareEnable_ = 0,
+                                            VULKAN_HPP_NAMESPACE::CompareOp compareOp_ = VULKAN_HPP_NAMESPACE::CompareOp::eNever,
+                                            float minLod_ = 0,
+                                            float maxLod_ = 0,
+                                            VULKAN_HPP_NAMESPACE::BorderColor borderColor_ = VULKAN_HPP_NAMESPACE::BorderColor::eFloatTransparentBlack,
+                                            VULKAN_HPP_NAMESPACE::Bool32 unnormalizedCoordinates_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : flags( flags_ )
+      , magFilter( magFilter_ )
+      , minFilter( minFilter_ )
+      , mipmapMode( mipmapMode_ )
+      , addressModeU( addressModeU_ )
+      , addressModeV( addressModeV_ )
+      , addressModeW( addressModeW_ )
+      , mipLodBias( mipLodBias_ )
+      , anisotropyEnable( anisotropyEnable_ )
+      , maxAnisotropy( maxAnisotropy_ )
+      , compareEnable( compareEnable_ )
+      , compareOp( compareOp_ )
+      , minLod( minLod_ )
+      , maxLod( maxLod_ )
+      , borderColor( borderColor_ )
+      , unnormalizedCoordinates( unnormalizedCoordinates_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::SamplerCreateInfo & operator=( VULKAN_HPP_NAMESPACE::SamplerCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SamplerCreateInfo ) - offsetof( SamplerCreateInfo, pNext ) );
+      return *this;
+    }
+
+    SamplerCreateInfo( VkSamplerCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    SamplerCreateInfo& operator=( VkSamplerCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SamplerCreateInfo const *>(&rhs);
+      return *this;
+    }
+
+    SamplerCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    SamplerCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::SamplerCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    SamplerCreateInfo & setMagFilter( VULKAN_HPP_NAMESPACE::Filter magFilter_ ) VULKAN_HPP_NOEXCEPT
+    {
+      magFilter = magFilter_;
+      return *this;
+    }
+
+    SamplerCreateInfo & setMinFilter( VULKAN_HPP_NAMESPACE::Filter minFilter_ ) VULKAN_HPP_NOEXCEPT
+    {
+      minFilter = minFilter_;
+      return *this;
+    }
+
+    SamplerCreateInfo & setMipmapMode( VULKAN_HPP_NAMESPACE::SamplerMipmapMode mipmapMode_ ) VULKAN_HPP_NOEXCEPT
+    {
+      mipmapMode = mipmapMode_;
+      return *this;
+    }
+
+    SamplerCreateInfo & setAddressModeU( VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeU_ ) VULKAN_HPP_NOEXCEPT
+    {
+      addressModeU = addressModeU_;
+      return *this;
+    }
+
+    SamplerCreateInfo & setAddressModeV( VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeV_ ) VULKAN_HPP_NOEXCEPT
+    {
+      addressModeV = addressModeV_;
+      return *this;
+    }
+
+    SamplerCreateInfo & setAddressModeW( VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeW_ ) VULKAN_HPP_NOEXCEPT
+    {
+      addressModeW = addressModeW_;
+      return *this;
+    }
+
+    SamplerCreateInfo & setMipLodBias( float mipLodBias_ ) VULKAN_HPP_NOEXCEPT
+    {
+      mipLodBias = mipLodBias_;
+      return *this;
+    }
+
+    SamplerCreateInfo & setAnisotropyEnable( VULKAN_HPP_NAMESPACE::Bool32 anisotropyEnable_ ) VULKAN_HPP_NOEXCEPT
+    {
+      anisotropyEnable = anisotropyEnable_;
+      return *this;
+    }
+
+    SamplerCreateInfo & setMaxAnisotropy( float maxAnisotropy_ ) VULKAN_HPP_NOEXCEPT
+    {
+      maxAnisotropy = maxAnisotropy_;
+      return *this;
+    }
+
+    SamplerCreateInfo & setCompareEnable( VULKAN_HPP_NAMESPACE::Bool32 compareEnable_ ) VULKAN_HPP_NOEXCEPT
+    {
+      compareEnable = compareEnable_;
+      return *this;
+    }
+
+    SamplerCreateInfo & setCompareOp( VULKAN_HPP_NAMESPACE::CompareOp compareOp_ ) VULKAN_HPP_NOEXCEPT
+    {
+      compareOp = compareOp_;
+      return *this;
+    }
+
+    SamplerCreateInfo & setMinLod( float minLod_ ) VULKAN_HPP_NOEXCEPT
+    {
+      minLod = minLod_;
+      return *this;
+    }
+
+    SamplerCreateInfo & setMaxLod( float maxLod_ ) VULKAN_HPP_NOEXCEPT
+    {
+      maxLod = maxLod_;
+      return *this;
+    }
+
+    SamplerCreateInfo & setBorderColor( VULKAN_HPP_NAMESPACE::BorderColor borderColor_ ) VULKAN_HPP_NOEXCEPT
+    {
+      borderColor = borderColor_;
+      return *this;
+    }
+
+    SamplerCreateInfo & setUnnormalizedCoordinates( VULKAN_HPP_NAMESPACE::Bool32 unnormalizedCoordinates_ ) VULKAN_HPP_NOEXCEPT
+    {
+      unnormalizedCoordinates = unnormalizedCoordinates_;
+      return *this;
+    }
+
+    operator VkSamplerCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSamplerCreateInfo*>( this );
+    }
+
+    operator VkSamplerCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSamplerCreateInfo*>( this );
+    }
+
+    bool operator==( SamplerCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( magFilter == rhs.magFilter )
+          && ( minFilter == rhs.minFilter )
+          && ( mipmapMode == rhs.mipmapMode )
+          && ( addressModeU == rhs.addressModeU )
+          && ( addressModeV == rhs.addressModeV )
+          && ( addressModeW == rhs.addressModeW )
+          && ( mipLodBias == rhs.mipLodBias )
+          && ( anisotropyEnable == rhs.anisotropyEnable )
+          && ( maxAnisotropy == rhs.maxAnisotropy )
+          && ( compareEnable == rhs.compareEnable )
+          && ( compareOp == rhs.compareOp )
+          && ( minLod == rhs.minLod )
+          && ( maxLod == rhs.maxLod )
+          && ( borderColor == rhs.borderColor )
+          && ( unnormalizedCoordinates == rhs.unnormalizedCoordinates );
+    }
+
+    bool operator!=( SamplerCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerCreateInfo;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::SamplerCreateFlags flags;
+    VULKAN_HPP_NAMESPACE::Filter magFilter;
+    VULKAN_HPP_NAMESPACE::Filter minFilter;
+    VULKAN_HPP_NAMESPACE::SamplerMipmapMode mipmapMode;
+    VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeU;
+    VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeV;
+    VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeW;
+    float mipLodBias;
+    VULKAN_HPP_NAMESPACE::Bool32 anisotropyEnable;
+    float maxAnisotropy;
+    VULKAN_HPP_NAMESPACE::Bool32 compareEnable;
+    VULKAN_HPP_NAMESPACE::CompareOp compareOp;
+    float minLod;
+    float maxLod;
+    VULKAN_HPP_NAMESPACE::BorderColor borderColor;
+    VULKAN_HPP_NAMESPACE::Bool32 unnormalizedCoordinates;
+  };
+  static_assert( sizeof( SamplerCreateInfo ) == sizeof( VkSamplerCreateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<SamplerCreateInfo>::value, "struct wrapper is not a standard layout!" );
+
+  struct SamplerReductionModeCreateInfoEXT
+  {
+    VULKAN_HPP_CONSTEXPR SamplerReductionModeCreateInfoEXT( VULKAN_HPP_NAMESPACE::SamplerReductionModeEXT reductionMode_ = VULKAN_HPP_NAMESPACE::SamplerReductionModeEXT::eWeightedAverage ) VULKAN_HPP_NOEXCEPT
+      : reductionMode( reductionMode_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::SamplerReductionModeCreateInfoEXT & operator=( VULKAN_HPP_NAMESPACE::SamplerReductionModeCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SamplerReductionModeCreateInfoEXT ) - offsetof( SamplerReductionModeCreateInfoEXT, pNext ) );
+      return *this;
+    }
+
+    SamplerReductionModeCreateInfoEXT( VkSamplerReductionModeCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    SamplerReductionModeCreateInfoEXT& operator=( VkSamplerReductionModeCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SamplerReductionModeCreateInfoEXT const *>(&rhs);
+      return *this;
+    }
+
+    SamplerReductionModeCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    SamplerReductionModeCreateInfoEXT & setReductionMode( VULKAN_HPP_NAMESPACE::SamplerReductionModeEXT reductionMode_ ) VULKAN_HPP_NOEXCEPT
+    {
+      reductionMode = reductionMode_;
+      return *this;
+    }
+
+    operator VkSamplerReductionModeCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSamplerReductionModeCreateInfoEXT*>( this );
+    }
+
+    operator VkSamplerReductionModeCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSamplerReductionModeCreateInfoEXT*>( this );
+    }
+
+    bool operator==( SamplerReductionModeCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( reductionMode == rhs.reductionMode );
+    }
+
+    bool operator!=( SamplerReductionModeCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerReductionModeCreateInfoEXT;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::SamplerReductionModeEXT reductionMode;
+  };
+  static_assert( sizeof( SamplerReductionModeCreateInfoEXT ) == sizeof( VkSamplerReductionModeCreateInfoEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<SamplerReductionModeCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+
+  struct SamplerYcbcrConversionCreateInfo
+  {
+    VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionCreateInfo( VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
+                                                           VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion ycbcrModel_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity,
+                                                           VULKAN_HPP_NAMESPACE::SamplerYcbcrRange ycbcrRange_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull,
+                                                           VULKAN_HPP_NAMESPACE::ComponentMapping components_ = VULKAN_HPP_NAMESPACE::ComponentMapping(),
+                                                           VULKAN_HPP_NAMESPACE::ChromaLocation xChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven,
+                                                           VULKAN_HPP_NAMESPACE::ChromaLocation yChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven,
+                                                           VULKAN_HPP_NAMESPACE::Filter chromaFilter_ = VULKAN_HPP_NAMESPACE::Filter::eNearest,
+                                                           VULKAN_HPP_NAMESPACE::Bool32 forceExplicitReconstruction_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : format( format_ )
+      , ycbcrModel( ycbcrModel_ )
+      , ycbcrRange( ycbcrRange_ )
+      , components( components_ )
+      , xChromaOffset( xChromaOffset_ )
+      , yChromaOffset( yChromaOffset_ )
+      , chromaFilter( chromaFilter_ )
+      , forceExplicitReconstruction( forceExplicitReconstruction_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & operator=( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo ) - offsetof( SamplerYcbcrConversionCreateInfo, pNext ) );
+      return *this;
+    }
+
+    SamplerYcbcrConversionCreateInfo( VkSamplerYcbcrConversionCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    SamplerYcbcrConversionCreateInfo& operator=( VkSamplerYcbcrConversionCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo const *>(&rhs);
+      return *this;
+    }
+
+    SamplerYcbcrConversionCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    SamplerYcbcrConversionCreateInfo & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
+    {
+      format = format_;
+      return *this;
+    }
+
+    SamplerYcbcrConversionCreateInfo & setYcbcrModel( VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion ycbcrModel_ ) VULKAN_HPP_NOEXCEPT
+    {
+      ycbcrModel = ycbcrModel_;
+      return *this;
+    }
+
+    SamplerYcbcrConversionCreateInfo & setYcbcrRange( VULKAN_HPP_NAMESPACE::SamplerYcbcrRange ycbcrRange_ ) VULKAN_HPP_NOEXCEPT
+    {
+      ycbcrRange = ycbcrRange_;
+      return *this;
+    }
+
+    SamplerYcbcrConversionCreateInfo & setComponents( VULKAN_HPP_NAMESPACE::ComponentMapping components_ ) VULKAN_HPP_NOEXCEPT
+    {
+      components = components_;
+      return *this;
+    }
+
+    SamplerYcbcrConversionCreateInfo & setXChromaOffset( VULKAN_HPP_NAMESPACE::ChromaLocation xChromaOffset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      xChromaOffset = xChromaOffset_;
+      return *this;
+    }
+
+    SamplerYcbcrConversionCreateInfo & setYChromaOffset( VULKAN_HPP_NAMESPACE::ChromaLocation yChromaOffset_ ) VULKAN_HPP_NOEXCEPT
+    {
+      yChromaOffset = yChromaOffset_;
+      return *this;
+    }
+
+    SamplerYcbcrConversionCreateInfo & setChromaFilter( VULKAN_HPP_NAMESPACE::Filter chromaFilter_ ) VULKAN_HPP_NOEXCEPT
+    {
+      chromaFilter = chromaFilter_;
+      return *this;
+    }
+
+    SamplerYcbcrConversionCreateInfo & setForceExplicitReconstruction( VULKAN_HPP_NAMESPACE::Bool32 forceExplicitReconstruction_ ) VULKAN_HPP_NOEXCEPT
+    {
+      forceExplicitReconstruction = forceExplicitReconstruction_;
+      return *this;
+    }
+
+    operator VkSamplerYcbcrConversionCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo*>( this );
+    }
+
+    operator VkSamplerYcbcrConversionCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSamplerYcbcrConversionCreateInfo*>( this );
+    }
+
+    bool operator==( SamplerYcbcrConversionCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( format == rhs.format )
+          && ( ycbcrModel == rhs.ycbcrModel )
+          && ( ycbcrRange == rhs.ycbcrRange )
+          && ( components == rhs.components )
+          && ( xChromaOffset == rhs.xChromaOffset )
+          && ( yChromaOffset == rhs.yChromaOffset )
+          && ( chromaFilter == rhs.chromaFilter )
+          && ( forceExplicitReconstruction == rhs.forceExplicitReconstruction );
+    }
+
+    bool operator!=( SamplerYcbcrConversionCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerYcbcrConversionCreateInfo;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::Format format;
+    VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion ycbcrModel;
+    VULKAN_HPP_NAMESPACE::SamplerYcbcrRange ycbcrRange;
+    VULKAN_HPP_NAMESPACE::ComponentMapping components;
+    VULKAN_HPP_NAMESPACE::ChromaLocation xChromaOffset;
+    VULKAN_HPP_NAMESPACE::ChromaLocation yChromaOffset;
+    VULKAN_HPP_NAMESPACE::Filter chromaFilter;
+    VULKAN_HPP_NAMESPACE::Bool32 forceExplicitReconstruction;
+  };
+  static_assert( sizeof( SamplerYcbcrConversionCreateInfo ) == sizeof( VkSamplerYcbcrConversionCreateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<SamplerYcbcrConversionCreateInfo>::value, "struct wrapper is not a standard layout!" );
+
+  struct SamplerYcbcrConversionImageFormatProperties
+  {
+    SamplerYcbcrConversionImageFormatProperties( uint32_t combinedImageSamplerDescriptorCount_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : combinedImageSamplerDescriptorCount( combinedImageSamplerDescriptorCount_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionImageFormatProperties & operator=( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionImageFormatProperties ) - offsetof( SamplerYcbcrConversionImageFormatProperties, pNext ) );
+      return *this;
+    }
+
+    SamplerYcbcrConversionImageFormatProperties( VkSamplerYcbcrConversionImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    SamplerYcbcrConversionImageFormatProperties& operator=( VkSamplerYcbcrConversionImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionImageFormatProperties const *>(&rhs);
+      return *this;
+    }
+
+    operator VkSamplerYcbcrConversionImageFormatProperties const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSamplerYcbcrConversionImageFormatProperties*>( this );
+    }
+
+    operator VkSamplerYcbcrConversionImageFormatProperties &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSamplerYcbcrConversionImageFormatProperties*>( this );
+    }
+
+    bool operator==( SamplerYcbcrConversionImageFormatProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( combinedImageSamplerDescriptorCount == rhs.combinedImageSamplerDescriptorCount );
+    }
+
+    bool operator!=( SamplerYcbcrConversionImageFormatProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerYcbcrConversionImageFormatProperties;
+    void* pNext = nullptr;
+    uint32_t combinedImageSamplerDescriptorCount;
+  };
+  static_assert( sizeof( SamplerYcbcrConversionImageFormatProperties ) == sizeof( VkSamplerYcbcrConversionImageFormatProperties ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<SamplerYcbcrConversionImageFormatProperties>::value, "struct wrapper is not a standard layout!" );
+
+  struct SamplerYcbcrConversionInfo
+  {
+    VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionInfo( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion conversion_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion() ) VULKAN_HPP_NOEXCEPT
+      : conversion( conversion_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionInfo & operator=( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionInfo ) - offsetof( SamplerYcbcrConversionInfo, pNext ) );
+      return *this;
+    }
+
+    SamplerYcbcrConversionInfo( VkSamplerYcbcrConversionInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    SamplerYcbcrConversionInfo& operator=( VkSamplerYcbcrConversionInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionInfo const *>(&rhs);
+      return *this;
+    }
+
+    SamplerYcbcrConversionInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    SamplerYcbcrConversionInfo & setConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion conversion_ ) VULKAN_HPP_NOEXCEPT
+    {
+      conversion = conversion_;
+      return *this;
+    }
+
+    operator VkSamplerYcbcrConversionInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSamplerYcbcrConversionInfo*>( this );
+    }
+
+    operator VkSamplerYcbcrConversionInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSamplerYcbcrConversionInfo*>( this );
+    }
+
+    bool operator==( SamplerYcbcrConversionInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( conversion == rhs.conversion );
+    }
+
+    bool operator!=( SamplerYcbcrConversionInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerYcbcrConversionInfo;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion conversion;
+  };
+  static_assert( sizeof( SamplerYcbcrConversionInfo ) == sizeof( VkSamplerYcbcrConversionInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<SamplerYcbcrConversionInfo>::value, "struct wrapper is not a standard layout!" );
+
+  struct SemaphoreCreateInfo
+  {
+    VULKAN_HPP_CONSTEXPR SemaphoreCreateInfo( VULKAN_HPP_NAMESPACE::SemaphoreCreateFlags flags_ = VULKAN_HPP_NAMESPACE::SemaphoreCreateFlags() ) VULKAN_HPP_NOEXCEPT
+      : flags( flags_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo & operator=( VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo ) - offsetof( SemaphoreCreateInfo, pNext ) );
+      return *this;
+    }
+
+    SemaphoreCreateInfo( VkSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    SemaphoreCreateInfo& operator=( VkSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo const *>(&rhs);
+      return *this;
+    }
+
+    SemaphoreCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    SemaphoreCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::SemaphoreCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    operator VkSemaphoreCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSemaphoreCreateInfo*>( this );
+    }
+
+    operator VkSemaphoreCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSemaphoreCreateInfo*>( this );
+    }
+
+    bool operator==( SemaphoreCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags );
+    }
+
+    bool operator!=( SemaphoreCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreCreateInfo;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::SemaphoreCreateFlags flags;
+  };
+  static_assert( sizeof( SemaphoreCreateInfo ) == sizeof( VkSemaphoreCreateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<SemaphoreCreateInfo>::value, "struct wrapper is not a standard layout!" );
+
+  struct SemaphoreGetFdInfoKHR
+  {
+    VULKAN_HPP_CONSTEXPR SemaphoreGetFdInfoKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = VULKAN_HPP_NAMESPACE::Semaphore(),
+                                                VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT
+      : semaphore( semaphore_ )
+      , handleType( handleType_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR & operator=( VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR ) - offsetof( SemaphoreGetFdInfoKHR, pNext ) );
+      return *this;
+    }
+
+    SemaphoreGetFdInfoKHR( VkSemaphoreGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    SemaphoreGetFdInfoKHR& operator=( VkSemaphoreGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR const *>(&rhs);
+      return *this;
+    }
+
+    SemaphoreGetFdInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    SemaphoreGetFdInfoKHR & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
+    {
+      semaphore = semaphore_;
+      return *this;
+    }
+
+    SemaphoreGetFdInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      handleType = handleType_;
+      return *this;
+    }
+
+    operator VkSemaphoreGetFdInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSemaphoreGetFdInfoKHR*>( this );
+    }
+
+    operator VkSemaphoreGetFdInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSemaphoreGetFdInfoKHR*>( this );
+    }
+
+    bool operator==( SemaphoreGetFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( semaphore == rhs.semaphore )
+          && ( handleType == rhs.handleType );
+    }
+
+    bool operator!=( SemaphoreGetFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreGetFdInfoKHR;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::Semaphore semaphore;
+    VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType;
+  };
+  static_assert( sizeof( SemaphoreGetFdInfoKHR ) == sizeof( VkSemaphoreGetFdInfoKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<SemaphoreGetFdInfoKHR>::value, "struct wrapper is not a standard layout!" );
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+  struct SemaphoreGetWin32HandleInfoKHR
+  {
+    VULKAN_HPP_CONSTEXPR SemaphoreGetWin32HandleInfoKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = VULKAN_HPP_NAMESPACE::Semaphore(),
+                                                         VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT
+      : semaphore( semaphore_ )
+      , handleType( handleType_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR & operator=( VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR ) - offsetof( SemaphoreGetWin32HandleInfoKHR, pNext ) );
+      return *this;
+    }
+
+    SemaphoreGetWin32HandleInfoKHR( VkSemaphoreGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    SemaphoreGetWin32HandleInfoKHR& operator=( VkSemaphoreGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR const *>(&rhs);
+      return *this;
+    }
+
+    SemaphoreGetWin32HandleInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    SemaphoreGetWin32HandleInfoKHR & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
+    {
+      semaphore = semaphore_;
+      return *this;
+    }
+
+    SemaphoreGetWin32HandleInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      handleType = handleType_;
+      return *this;
+    }
+
+    operator VkSemaphoreGetWin32HandleInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSemaphoreGetWin32HandleInfoKHR*>( this );
+    }
+
+    operator VkSemaphoreGetWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSemaphoreGetWin32HandleInfoKHR*>( this );
+    }
+
+    bool operator==( SemaphoreGetWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( semaphore == rhs.semaphore )
+          && ( handleType == rhs.handleType );
+    }
+
+    bool operator!=( SemaphoreGetWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreGetWin32HandleInfoKHR;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::Semaphore semaphore;
+    VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType;
+  };
+  static_assert( sizeof( SemaphoreGetWin32HandleInfoKHR ) == sizeof( VkSemaphoreGetWin32HandleInfoKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<SemaphoreGetWin32HandleInfoKHR>::value, "struct wrapper is not a standard layout!" );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  struct SemaphoreSignalInfoKHR
+  {
+    VULKAN_HPP_CONSTEXPR SemaphoreSignalInfoKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = VULKAN_HPP_NAMESPACE::Semaphore(),
+                                                 uint64_t value_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : semaphore( semaphore_ )
+      , value( value_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::SemaphoreSignalInfoKHR & operator=( VULKAN_HPP_NAMESPACE::SemaphoreSignalInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SemaphoreSignalInfoKHR ) - offsetof( SemaphoreSignalInfoKHR, pNext ) );
+      return *this;
+    }
+
+    SemaphoreSignalInfoKHR( VkSemaphoreSignalInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    SemaphoreSignalInfoKHR& operator=( VkSemaphoreSignalInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SemaphoreSignalInfoKHR const *>(&rhs);
+      return *this;
+    }
+
+    SemaphoreSignalInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    SemaphoreSignalInfoKHR & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
+    {
+      semaphore = semaphore_;
+      return *this;
+    }
+
+    SemaphoreSignalInfoKHR & setValue( uint64_t value_ ) VULKAN_HPP_NOEXCEPT
+    {
+      value = value_;
+      return *this;
+    }
+
+    operator VkSemaphoreSignalInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSemaphoreSignalInfoKHR*>( this );
+    }
+
+    operator VkSemaphoreSignalInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSemaphoreSignalInfoKHR*>( this );
+    }
+
+    bool operator==( SemaphoreSignalInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( semaphore == rhs.semaphore )
+          && ( value == rhs.value );
+    }
+
+    bool operator!=( SemaphoreSignalInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreSignalInfoKHR;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::Semaphore semaphore;
+    uint64_t value;
+  };
+  static_assert( sizeof( SemaphoreSignalInfoKHR ) == sizeof( VkSemaphoreSignalInfoKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<SemaphoreSignalInfoKHR>::value, "struct wrapper is not a standard layout!" );
+
+  struct SemaphoreTypeCreateInfoKHR
+  {
+    VULKAN_HPP_CONSTEXPR SemaphoreTypeCreateInfoKHR( VULKAN_HPP_NAMESPACE::SemaphoreTypeKHR semaphoreType_ = VULKAN_HPP_NAMESPACE::SemaphoreTypeKHR::eBinary,
+                                                     uint64_t initialValue_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : semaphoreType( semaphoreType_ )
+      , initialValue( initialValue_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::SemaphoreTypeCreateInfoKHR & operator=( VULKAN_HPP_NAMESPACE::SemaphoreTypeCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SemaphoreTypeCreateInfoKHR ) - offsetof( SemaphoreTypeCreateInfoKHR, pNext ) );
+      return *this;
+    }
+
+    SemaphoreTypeCreateInfoKHR( VkSemaphoreTypeCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    SemaphoreTypeCreateInfoKHR& operator=( VkSemaphoreTypeCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SemaphoreTypeCreateInfoKHR const *>(&rhs);
+      return *this;
+    }
+
+    SemaphoreTypeCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    SemaphoreTypeCreateInfoKHR & setSemaphoreType( VULKAN_HPP_NAMESPACE::SemaphoreTypeKHR semaphoreType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      semaphoreType = semaphoreType_;
+      return *this;
+    }
+
+    SemaphoreTypeCreateInfoKHR & setInitialValue( uint64_t initialValue_ ) VULKAN_HPP_NOEXCEPT
+    {
+      initialValue = initialValue_;
+      return *this;
+    }
+
+    operator VkSemaphoreTypeCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSemaphoreTypeCreateInfoKHR*>( this );
+    }
+
+    operator VkSemaphoreTypeCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSemaphoreTypeCreateInfoKHR*>( this );
+    }
+
+    bool operator==( SemaphoreTypeCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( semaphoreType == rhs.semaphoreType )
+          && ( initialValue == rhs.initialValue );
+    }
+
+    bool operator!=( SemaphoreTypeCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreTypeCreateInfoKHR;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::SemaphoreTypeKHR semaphoreType;
+    uint64_t initialValue;
+  };
+  static_assert( sizeof( SemaphoreTypeCreateInfoKHR ) == sizeof( VkSemaphoreTypeCreateInfoKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<SemaphoreTypeCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
+
+  struct SemaphoreWaitInfoKHR
+  {
+    VULKAN_HPP_CONSTEXPR SemaphoreWaitInfoKHR( VULKAN_HPP_NAMESPACE::SemaphoreWaitFlagsKHR flags_ = VULKAN_HPP_NAMESPACE::SemaphoreWaitFlagsKHR(),
+                                               uint32_t semaphoreCount_ = 0,
+                                               const VULKAN_HPP_NAMESPACE::Semaphore* pSemaphores_ = nullptr,
+                                               const uint64_t* pValues_ = nullptr ) VULKAN_HPP_NOEXCEPT
+      : flags( flags_ )
+      , semaphoreCount( semaphoreCount_ )
+      , pSemaphores( pSemaphores_ )
+      , pValues( pValues_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::SemaphoreWaitInfoKHR & operator=( VULKAN_HPP_NAMESPACE::SemaphoreWaitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SemaphoreWaitInfoKHR ) - offsetof( SemaphoreWaitInfoKHR, pNext ) );
+      return *this;
+    }
+
+    SemaphoreWaitInfoKHR( VkSemaphoreWaitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    SemaphoreWaitInfoKHR& operator=( VkSemaphoreWaitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SemaphoreWaitInfoKHR const *>(&rhs);
+      return *this;
+    }
+
+    SemaphoreWaitInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    SemaphoreWaitInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::SemaphoreWaitFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    SemaphoreWaitInfoKHR & setSemaphoreCount( uint32_t semaphoreCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      semaphoreCount = semaphoreCount_;
+      return *this;
+    }
+
+    SemaphoreWaitInfoKHR & setPSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore* pSemaphores_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pSemaphores = pSemaphores_;
+      return *this;
+    }
+
+    SemaphoreWaitInfoKHR & setPValues( const uint64_t* pValues_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pValues = pValues_;
+      return *this;
+    }
+
+    operator VkSemaphoreWaitInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSemaphoreWaitInfoKHR*>( this );
+    }
+
+    operator VkSemaphoreWaitInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSemaphoreWaitInfoKHR*>( this );
+    }
+
+    bool operator==( SemaphoreWaitInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( semaphoreCount == rhs.semaphoreCount )
+          && ( pSemaphores == rhs.pSemaphores )
+          && ( pValues == rhs.pValues );
+    }
+
+    bool operator!=( SemaphoreWaitInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreWaitInfoKHR;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::SemaphoreWaitFlagsKHR flags;
+    uint32_t semaphoreCount;
+    const VULKAN_HPP_NAMESPACE::Semaphore* pSemaphores;
+    const uint64_t* pValues;
+  };
+  static_assert( sizeof( SemaphoreWaitInfoKHR ) == sizeof( VkSemaphoreWaitInfoKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<SemaphoreWaitInfoKHR>::value, "struct wrapper is not a standard layout!" );
+
+  struct ShaderModuleCreateInfo
+  {
+    VULKAN_HPP_CONSTEXPR ShaderModuleCreateInfo( VULKAN_HPP_NAMESPACE::ShaderModuleCreateFlags flags_ = VULKAN_HPP_NAMESPACE::ShaderModuleCreateFlags(),
+                                                 size_t codeSize_ = 0,
+                                                 const uint32_t* pCode_ = nullptr ) VULKAN_HPP_NOEXCEPT
+      : flags( flags_ )
+      , codeSize( codeSize_ )
+      , pCode( pCode_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo & operator=( VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo ) - offsetof( ShaderModuleCreateInfo, pNext ) );
+      return *this;
+    }
+
+    ShaderModuleCreateInfo( VkShaderModuleCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    ShaderModuleCreateInfo& operator=( VkShaderModuleCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo const *>(&rhs);
+      return *this;
+    }
+
+    ShaderModuleCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ShaderModuleCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::ShaderModuleCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    ShaderModuleCreateInfo & setCodeSize( size_t codeSize_ ) VULKAN_HPP_NOEXCEPT
+    {
+      codeSize = codeSize_;
+      return *this;
+    }
+
+    ShaderModuleCreateInfo & setPCode( const uint32_t* pCode_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pCode = pCode_;
+      return *this;
+    }
+
+    operator VkShaderModuleCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkShaderModuleCreateInfo*>( this );
+    }
+
+    operator VkShaderModuleCreateInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkShaderModuleCreateInfo*>( this );
+    }
+
+    bool operator==( ShaderModuleCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( codeSize == rhs.codeSize )
+          && ( pCode == rhs.pCode );
+    }
+
+    bool operator!=( ShaderModuleCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eShaderModuleCreateInfo;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::ShaderModuleCreateFlags flags;
+    size_t codeSize;
+    const uint32_t* pCode;
+  };
+  static_assert( sizeof( ShaderModuleCreateInfo ) == sizeof( VkShaderModuleCreateInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ShaderModuleCreateInfo>::value, "struct wrapper is not a standard layout!" );
+
+  struct ShaderModuleValidationCacheCreateInfoEXT
+  {
+    VULKAN_HPP_CONSTEXPR ShaderModuleValidationCacheCreateInfoEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache_ = VULKAN_HPP_NAMESPACE::ValidationCacheEXT() ) VULKAN_HPP_NOEXCEPT
+      : validationCache( validationCache_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::ShaderModuleValidationCacheCreateInfoEXT & operator=( VULKAN_HPP_NAMESPACE::ShaderModuleValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ShaderModuleValidationCacheCreateInfoEXT ) - offsetof( ShaderModuleValidationCacheCreateInfoEXT, pNext ) );
+      return *this;
+    }
+
+    ShaderModuleValidationCacheCreateInfoEXT( VkShaderModuleValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    ShaderModuleValidationCacheCreateInfoEXT& operator=( VkShaderModuleValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ShaderModuleValidationCacheCreateInfoEXT const *>(&rhs);
+      return *this;
+    }
+
+    ShaderModuleValidationCacheCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ShaderModuleValidationCacheCreateInfoEXT & setValidationCache( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache_ ) VULKAN_HPP_NOEXCEPT
+    {
+      validationCache = validationCache_;
+      return *this;
+    }
+
+    operator VkShaderModuleValidationCacheCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkShaderModuleValidationCacheCreateInfoEXT*>( this );
+    }
+
+    operator VkShaderModuleValidationCacheCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkShaderModuleValidationCacheCreateInfoEXT*>( this );
+    }
+
+    bool operator==( ShaderModuleValidationCacheCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( validationCache == rhs.validationCache );
+    }
+
+    bool operator!=( ShaderModuleValidationCacheCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eShaderModuleValidationCacheCreateInfoEXT;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache;
+  };
+  static_assert( sizeof( ShaderModuleValidationCacheCreateInfoEXT ) == sizeof( VkShaderModuleValidationCacheCreateInfoEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ShaderModuleValidationCacheCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+
+  struct ShaderResourceUsageAMD
+  {
+    ShaderResourceUsageAMD( uint32_t numUsedVgprs_ = 0,
+                            uint32_t numUsedSgprs_ = 0,
+                            uint32_t ldsSizePerLocalWorkGroup_ = 0,
+                            size_t ldsUsageSizeInBytes_ = 0,
+                            size_t scratchMemUsageInBytes_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : numUsedVgprs( numUsedVgprs_ )
+      , numUsedSgprs( numUsedSgprs_ )
+      , ldsSizePerLocalWorkGroup( ldsSizePerLocalWorkGroup_ )
+      , ldsUsageSizeInBytes( ldsUsageSizeInBytes_ )
+      , scratchMemUsageInBytes( scratchMemUsageInBytes_ )
+    {}
+
+    ShaderResourceUsageAMD( VkShaderResourceUsageAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    ShaderResourceUsageAMD& operator=( VkShaderResourceUsageAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ShaderResourceUsageAMD const *>(&rhs);
+      return *this;
+    }
+
+    operator VkShaderResourceUsageAMD const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkShaderResourceUsageAMD*>( this );
+    }
+
+    operator VkShaderResourceUsageAMD &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkShaderResourceUsageAMD*>( this );
+    }
+
+    bool operator==( ShaderResourceUsageAMD const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( numUsedVgprs == rhs.numUsedVgprs )
+          && ( numUsedSgprs == rhs.numUsedSgprs )
+          && ( ldsSizePerLocalWorkGroup == rhs.ldsSizePerLocalWorkGroup )
+          && ( ldsUsageSizeInBytes == rhs.ldsUsageSizeInBytes )
+          && ( scratchMemUsageInBytes == rhs.scratchMemUsageInBytes );
+    }
+
+    bool operator!=( ShaderResourceUsageAMD const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    uint32_t numUsedVgprs;
+    uint32_t numUsedSgprs;
+    uint32_t ldsSizePerLocalWorkGroup;
+    size_t ldsUsageSizeInBytes;
+    size_t scratchMemUsageInBytes;
+  };
+  static_assert( sizeof( ShaderResourceUsageAMD ) == sizeof( VkShaderResourceUsageAMD ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ShaderResourceUsageAMD>::value, "struct wrapper is not a standard layout!" );
+
+  struct ShaderStatisticsInfoAMD
+  {
+    ShaderStatisticsInfoAMD( VULKAN_HPP_NAMESPACE::ShaderStageFlags shaderStageMask_ = VULKAN_HPP_NAMESPACE::ShaderStageFlags(),
+                             VULKAN_HPP_NAMESPACE::ShaderResourceUsageAMD resourceUsage_ = VULKAN_HPP_NAMESPACE::ShaderResourceUsageAMD(),
+                             uint32_t numPhysicalVgprs_ = 0,
+                             uint32_t numPhysicalSgprs_ = 0,
+                             uint32_t numAvailableVgprs_ = 0,
+                             uint32_t numAvailableSgprs_ = 0,
+                             std::array<uint32_t,3> const& computeWorkGroupSize_ = { { 0 } } ) VULKAN_HPP_NOEXCEPT
+      : shaderStageMask( shaderStageMask_ )
+      , resourceUsage( resourceUsage_ )
+      , numPhysicalVgprs( numPhysicalVgprs_ )
+      , numPhysicalSgprs( numPhysicalSgprs_ )
+      , numAvailableVgprs( numAvailableVgprs_ )
+      , numAvailableSgprs( numAvailableSgprs_ )
+      , computeWorkGroupSize{}
+    {
+      VULKAN_HPP_NAMESPACE::ConstExpressionArrayCopy<uint32_t,3,3>::copy( computeWorkGroupSize, computeWorkGroupSize_ );
+    }
+
+    ShaderStatisticsInfoAMD( VkShaderStatisticsInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    ShaderStatisticsInfoAMD& operator=( VkShaderStatisticsInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ShaderStatisticsInfoAMD const *>(&rhs);
+      return *this;
+    }
+
+    operator VkShaderStatisticsInfoAMD const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkShaderStatisticsInfoAMD*>( this );
+    }
+
+    operator VkShaderStatisticsInfoAMD &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkShaderStatisticsInfoAMD*>( this );
+    }
+
+    bool operator==( ShaderStatisticsInfoAMD const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( shaderStageMask == rhs.shaderStageMask )
+          && ( resourceUsage == rhs.resourceUsage )
+          && ( numPhysicalVgprs == rhs.numPhysicalVgprs )
+          && ( numPhysicalSgprs == rhs.numPhysicalSgprs )
+          && ( numAvailableVgprs == rhs.numAvailableVgprs )
+          && ( numAvailableSgprs == rhs.numAvailableSgprs )
+          && ( memcmp( computeWorkGroupSize, rhs.computeWorkGroupSize, 3 * sizeof( uint32_t ) ) == 0 );
+    }
+
+    bool operator!=( ShaderStatisticsInfoAMD const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    VULKAN_HPP_NAMESPACE::ShaderStageFlags shaderStageMask;
+    VULKAN_HPP_NAMESPACE::ShaderResourceUsageAMD resourceUsage;
+    uint32_t numPhysicalVgprs;
+    uint32_t numPhysicalSgprs;
+    uint32_t numAvailableVgprs;
+    uint32_t numAvailableSgprs;
+    uint32_t computeWorkGroupSize[3];
+  };
+  static_assert( sizeof( ShaderStatisticsInfoAMD ) == sizeof( VkShaderStatisticsInfoAMD ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ShaderStatisticsInfoAMD>::value, "struct wrapper is not a standard layout!" );
+
+  struct SharedPresentSurfaceCapabilitiesKHR
+  {
+    SharedPresentSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::ImageUsageFlags sharedPresentSupportedUsageFlags_ = VULKAN_HPP_NAMESPACE::ImageUsageFlags() ) VULKAN_HPP_NOEXCEPT
+      : sharedPresentSupportedUsageFlags( sharedPresentSupportedUsageFlags_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::SharedPresentSurfaceCapabilitiesKHR & operator=( VULKAN_HPP_NAMESPACE::SharedPresentSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SharedPresentSurfaceCapabilitiesKHR ) - offsetof( SharedPresentSurfaceCapabilitiesKHR, pNext ) );
+      return *this;
+    }
+
+    SharedPresentSurfaceCapabilitiesKHR( VkSharedPresentSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    SharedPresentSurfaceCapabilitiesKHR& operator=( VkSharedPresentSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SharedPresentSurfaceCapabilitiesKHR const *>(&rhs);
+      return *this;
+    }
+
+    operator VkSharedPresentSurfaceCapabilitiesKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSharedPresentSurfaceCapabilitiesKHR*>( this );
+    }
+
+    operator VkSharedPresentSurfaceCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSharedPresentSurfaceCapabilitiesKHR*>( this );
+    }
+
+    bool operator==( SharedPresentSurfaceCapabilitiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( sharedPresentSupportedUsageFlags == rhs.sharedPresentSupportedUsageFlags );
+    }
+
+    bool operator!=( SharedPresentSurfaceCapabilitiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSharedPresentSurfaceCapabilitiesKHR;
+    void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::ImageUsageFlags sharedPresentSupportedUsageFlags;
+  };
+  static_assert( sizeof( SharedPresentSurfaceCapabilitiesKHR ) == sizeof( VkSharedPresentSurfaceCapabilitiesKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<SharedPresentSurfaceCapabilitiesKHR>::value, "struct wrapper is not a standard layout!" );
+
+  struct SparseImageFormatProperties
+  {
+    SparseImageFormatProperties( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = VULKAN_HPP_NAMESPACE::ImageAspectFlags(),
+                                 VULKAN_HPP_NAMESPACE::Extent3D imageGranularity_ = VULKAN_HPP_NAMESPACE::Extent3D(),
+                                 VULKAN_HPP_NAMESPACE::SparseImageFormatFlags flags_ = VULKAN_HPP_NAMESPACE::SparseImageFormatFlags() ) VULKAN_HPP_NOEXCEPT
+      : aspectMask( aspectMask_ )
+      , imageGranularity( imageGranularity_ )
+      , flags( flags_ )
+    {}
+
+    SparseImageFormatProperties( VkSparseImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    SparseImageFormatProperties& operator=( VkSparseImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties const *>(&rhs);
+      return *this;
+    }
+
+    operator VkSparseImageFormatProperties const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSparseImageFormatProperties*>( this );
+    }
+
+    operator VkSparseImageFormatProperties &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSparseImageFormatProperties*>( this );
+    }
+
+    bool operator==( SparseImageFormatProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( aspectMask == rhs.aspectMask )
+          && ( imageGranularity == rhs.imageGranularity )
+          && ( flags == rhs.flags );
+    }
+
+    bool operator!=( SparseImageFormatProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask;
+    VULKAN_HPP_NAMESPACE::Extent3D imageGranularity;
+    VULKAN_HPP_NAMESPACE::SparseImageFormatFlags flags;
+  };
+  static_assert( sizeof( SparseImageFormatProperties ) == sizeof( VkSparseImageFormatProperties ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<SparseImageFormatProperties>::value, "struct wrapper is not a standard layout!" );
+
+  struct SparseImageFormatProperties2
+  {
+    SparseImageFormatProperties2( VULKAN_HPP_NAMESPACE::SparseImageFormatProperties properties_ = VULKAN_HPP_NAMESPACE::SparseImageFormatProperties() ) VULKAN_HPP_NOEXCEPT
+      : properties( properties_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 & operator=( VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 ) - offsetof( SparseImageFormatProperties2, pNext ) );
+      return *this;
+    }
+
+    SparseImageFormatProperties2( VkSparseImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    SparseImageFormatProperties2& operator=( VkSparseImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 const *>(&rhs);
+      return *this;
+    }
+
+    operator VkSparseImageFormatProperties2 const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSparseImageFormatProperties2*>( this );
+    }
+
+    operator VkSparseImageFormatProperties2 &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSparseImageFormatProperties2*>( this );
+    }
+
+    bool operator==( SparseImageFormatProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( properties == rhs.properties );
+    }
+
+    bool operator!=( SparseImageFormatProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSparseImageFormatProperties2;
+    void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::SparseImageFormatProperties properties;
+  };
+  static_assert( sizeof( SparseImageFormatProperties2 ) == sizeof( VkSparseImageFormatProperties2 ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<SparseImageFormatProperties2>::value, "struct wrapper is not a standard layout!" );
+
+  struct SparseImageMemoryRequirements
+  {
+    SparseImageMemoryRequirements( VULKAN_HPP_NAMESPACE::SparseImageFormatProperties formatProperties_ = VULKAN_HPP_NAMESPACE::SparseImageFormatProperties(),
+                                   uint32_t imageMipTailFirstLod_ = 0,
+                                   VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailSize_ = 0,
+                                   VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailOffset_ = 0,
+                                   VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailStride_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : formatProperties( formatProperties_ )
+      , imageMipTailFirstLod( imageMipTailFirstLod_ )
+      , imageMipTailSize( imageMipTailSize_ )
+      , imageMipTailOffset( imageMipTailOffset_ )
+      , imageMipTailStride( imageMipTailStride_ )
+    {}
+
+    SparseImageMemoryRequirements( VkSparseImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    SparseImageMemoryRequirements& operator=( VkSparseImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements const *>(&rhs);
+      return *this;
+    }
+
+    operator VkSparseImageMemoryRequirements const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSparseImageMemoryRequirements*>( this );
+    }
+
+    operator VkSparseImageMemoryRequirements &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSparseImageMemoryRequirements*>( this );
+    }
+
+    bool operator==( SparseImageMemoryRequirements const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( formatProperties == rhs.formatProperties )
+          && ( imageMipTailFirstLod == rhs.imageMipTailFirstLod )
+          && ( imageMipTailSize == rhs.imageMipTailSize )
+          && ( imageMipTailOffset == rhs.imageMipTailOffset )
+          && ( imageMipTailStride == rhs.imageMipTailStride );
+    }
+
+    bool operator!=( SparseImageMemoryRequirements const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    VULKAN_HPP_NAMESPACE::SparseImageFormatProperties formatProperties;
+    uint32_t imageMipTailFirstLod;
+    VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailSize;
+    VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailOffset;
+    VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailStride;
+  };
+  static_assert( sizeof( SparseImageMemoryRequirements ) == sizeof( VkSparseImageMemoryRequirements ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<SparseImageMemoryRequirements>::value, "struct wrapper is not a standard layout!" );
+
+  struct SparseImageMemoryRequirements2
+  {
+    SparseImageMemoryRequirements2( VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements memoryRequirements_ = VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements() ) VULKAN_HPP_NOEXCEPT
+      : memoryRequirements( memoryRequirements_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 & operator=( VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 ) - offsetof( SparseImageMemoryRequirements2, pNext ) );
+      return *this;
+    }
+
+    SparseImageMemoryRequirements2( VkSparseImageMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    SparseImageMemoryRequirements2& operator=( VkSparseImageMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 const *>(&rhs);
+      return *this;
+    }
+
+    operator VkSparseImageMemoryRequirements2 const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSparseImageMemoryRequirements2*>( this );
+    }
+
+    operator VkSparseImageMemoryRequirements2 &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSparseImageMemoryRequirements2*>( this );
+    }
+
+    bool operator==( SparseImageMemoryRequirements2 const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( memoryRequirements == rhs.memoryRequirements );
+    }
+
+    bool operator!=( SparseImageMemoryRequirements2 const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSparseImageMemoryRequirements2;
+    void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements memoryRequirements;
+  };
+  static_assert( sizeof( SparseImageMemoryRequirements2 ) == sizeof( VkSparseImageMemoryRequirements2 ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<SparseImageMemoryRequirements2>::value, "struct wrapper is not a standard layout!" );
+
+#ifdef VK_USE_PLATFORM_GGP
+
+  struct StreamDescriptorSurfaceCreateInfoGGP
+  {
+    VULKAN_HPP_CONSTEXPR StreamDescriptorSurfaceCreateInfoGGP( VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateFlagsGGP flags_ = VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateFlagsGGP(),
+                                                               GgpStreamDescriptor streamDescriptor_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : flags( flags_ )
+      , streamDescriptor( streamDescriptor_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP & operator=( VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP ) - offsetof( StreamDescriptorSurfaceCreateInfoGGP, pNext ) );
+      return *this;
+    }
+
+    StreamDescriptorSurfaceCreateInfoGGP( VkStreamDescriptorSurfaceCreateInfoGGP const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    StreamDescriptorSurfaceCreateInfoGGP& operator=( VkStreamDescriptorSurfaceCreateInfoGGP const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP const *>(&rhs);
+      return *this;
+    }
+
+    StreamDescriptorSurfaceCreateInfoGGP & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    StreamDescriptorSurfaceCreateInfoGGP & setFlags( VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateFlagsGGP flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    StreamDescriptorSurfaceCreateInfoGGP & setStreamDescriptor( GgpStreamDescriptor streamDescriptor_ ) VULKAN_HPP_NOEXCEPT
+    {
+      streamDescriptor = streamDescriptor_;
+      return *this;
+    }
+
+    operator VkStreamDescriptorSurfaceCreateInfoGGP const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkStreamDescriptorSurfaceCreateInfoGGP*>( this );
+    }
+
+    operator VkStreamDescriptorSurfaceCreateInfoGGP &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkStreamDescriptorSurfaceCreateInfoGGP*>( this );
+    }
+
+    bool operator==( StreamDescriptorSurfaceCreateInfoGGP const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( streamDescriptor == rhs.streamDescriptor );
+    }
+
+    bool operator!=( StreamDescriptorSurfaceCreateInfoGGP const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eStreamDescriptorSurfaceCreateInfoGGP;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateFlagsGGP flags;
+    GgpStreamDescriptor streamDescriptor;
+  };
+  static_assert( sizeof( StreamDescriptorSurfaceCreateInfoGGP ) == sizeof( VkStreamDescriptorSurfaceCreateInfoGGP ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<StreamDescriptorSurfaceCreateInfoGGP>::value, "struct wrapper is not a standard layout!" );
+#endif /*VK_USE_PLATFORM_GGP*/
+
+  struct SubmitInfo
+  {
+    VULKAN_HPP_CONSTEXPR SubmitInfo( uint32_t waitSemaphoreCount_ = 0,
+                                     const VULKAN_HPP_NAMESPACE::Semaphore* pWaitSemaphores_ = nullptr,
+                                     const VULKAN_HPP_NAMESPACE::PipelineStageFlags* pWaitDstStageMask_ = nullptr,
+                                     uint32_t commandBufferCount_ = 0,
+                                     const VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers_ = nullptr,
+                                     uint32_t signalSemaphoreCount_ = 0,
+                                     const VULKAN_HPP_NAMESPACE::Semaphore* pSignalSemaphores_ = nullptr ) VULKAN_HPP_NOEXCEPT
+      : waitSemaphoreCount( waitSemaphoreCount_ )
+      , pWaitSemaphores( pWaitSemaphores_ )
+      , pWaitDstStageMask( pWaitDstStageMask_ )
+      , commandBufferCount( commandBufferCount_ )
+      , pCommandBuffers( pCommandBuffers_ )
+      , signalSemaphoreCount( signalSemaphoreCount_ )
+      , pSignalSemaphores( pSignalSemaphores_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::SubmitInfo & operator=( VULKAN_HPP_NAMESPACE::SubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SubmitInfo ) - offsetof( SubmitInfo, pNext ) );
+      return *this;
+    }
+
+    SubmitInfo( VkSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    SubmitInfo& operator=( VkSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubmitInfo const *>(&rhs);
+      return *this;
+    }
+
+    SubmitInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    SubmitInfo & setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      waitSemaphoreCount = waitSemaphoreCount_;
+      return *this;
+    }
+
+    SubmitInfo & setPWaitSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore* pWaitSemaphores_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pWaitSemaphores = pWaitSemaphores_;
+      return *this;
+    }
+
+    SubmitInfo & setPWaitDstStageMask( const VULKAN_HPP_NAMESPACE::PipelineStageFlags* pWaitDstStageMask_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pWaitDstStageMask = pWaitDstStageMask_;
+      return *this;
+    }
+
+    SubmitInfo & setCommandBufferCount( uint32_t commandBufferCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      commandBufferCount = commandBufferCount_;
+      return *this;
+    }
+
+    SubmitInfo & setPCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pCommandBuffers = pCommandBuffers_;
+      return *this;
+    }
+
+    SubmitInfo & setSignalSemaphoreCount( uint32_t signalSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      signalSemaphoreCount = signalSemaphoreCount_;
+      return *this;
+    }
+
+    SubmitInfo & setPSignalSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore* pSignalSemaphores_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pSignalSemaphores = pSignalSemaphores_;
+      return *this;
+    }
+
+    operator VkSubmitInfo const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSubmitInfo*>( this );
+    }
+
+    operator VkSubmitInfo &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSubmitInfo*>( this );
+    }
+
+    bool operator==( SubmitInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( waitSemaphoreCount == rhs.waitSemaphoreCount )
+          && ( pWaitSemaphores == rhs.pWaitSemaphores )
+          && ( pWaitDstStageMask == rhs.pWaitDstStageMask )
+          && ( commandBufferCount == rhs.commandBufferCount )
+          && ( pCommandBuffers == rhs.pCommandBuffers )
+          && ( signalSemaphoreCount == rhs.signalSemaphoreCount )
+          && ( pSignalSemaphores == rhs.pSignalSemaphores );
+    }
+
+    bool operator!=( SubmitInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubmitInfo;
+    const void* pNext = nullptr;
+    uint32_t waitSemaphoreCount;
+    const VULKAN_HPP_NAMESPACE::Semaphore* pWaitSemaphores;
+    const VULKAN_HPP_NAMESPACE::PipelineStageFlags* pWaitDstStageMask;
+    uint32_t commandBufferCount;
+    const VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers;
+    uint32_t signalSemaphoreCount;
+    const VULKAN_HPP_NAMESPACE::Semaphore* pSignalSemaphores;
+  };
+  static_assert( sizeof( SubmitInfo ) == sizeof( VkSubmitInfo ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<SubmitInfo>::value, "struct wrapper is not a standard layout!" );
+
+  struct SubpassBeginInfoKHR
+  {
+    VULKAN_HPP_CONSTEXPR SubpassBeginInfoKHR( VULKAN_HPP_NAMESPACE::SubpassContents contents_ = VULKAN_HPP_NAMESPACE::SubpassContents::eInline ) VULKAN_HPP_NOEXCEPT
+      : contents( contents_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::SubpassBeginInfoKHR & operator=( VULKAN_HPP_NAMESPACE::SubpassBeginInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SubpassBeginInfoKHR ) - offsetof( SubpassBeginInfoKHR, pNext ) );
+      return *this;
+    }
+
+    SubpassBeginInfoKHR( VkSubpassBeginInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    SubpassBeginInfoKHR& operator=( VkSubpassBeginInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubpassBeginInfoKHR const *>(&rhs);
+      return *this;
+    }
+
+    SubpassBeginInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    SubpassBeginInfoKHR & setContents( VULKAN_HPP_NAMESPACE::SubpassContents contents_ ) VULKAN_HPP_NOEXCEPT
+    {
+      contents = contents_;
+      return *this;
+    }
+
+    operator VkSubpassBeginInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSubpassBeginInfoKHR*>( this );
+    }
+
+    operator VkSubpassBeginInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSubpassBeginInfoKHR*>( this );
+    }
+
+    bool operator==( SubpassBeginInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( contents == rhs.contents );
+    }
+
+    bool operator!=( SubpassBeginInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassBeginInfoKHR;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::SubpassContents contents;
+  };
+  static_assert( sizeof( SubpassBeginInfoKHR ) == sizeof( VkSubpassBeginInfoKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<SubpassBeginInfoKHR>::value, "struct wrapper is not a standard layout!" );
+
+  struct SubpassDescriptionDepthStencilResolveKHR
+  {
+    VULKAN_HPP_CONSTEXPR SubpassDescriptionDepthStencilResolveKHR( VULKAN_HPP_NAMESPACE::ResolveModeFlagBitsKHR depthResolveMode_ = VULKAN_HPP_NAMESPACE::ResolveModeFlagBitsKHR::eNone,
+                                                                   VULKAN_HPP_NAMESPACE::ResolveModeFlagBitsKHR stencilResolveMode_ = VULKAN_HPP_NAMESPACE::ResolveModeFlagBitsKHR::eNone,
+                                                                   const VULKAN_HPP_NAMESPACE::AttachmentReference2KHR* pDepthStencilResolveAttachment_ = nullptr ) VULKAN_HPP_NOEXCEPT
+      : depthResolveMode( depthResolveMode_ )
+      , stencilResolveMode( stencilResolveMode_ )
+      , pDepthStencilResolveAttachment( pDepthStencilResolveAttachment_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::SubpassDescriptionDepthStencilResolveKHR & operator=( VULKAN_HPP_NAMESPACE::SubpassDescriptionDepthStencilResolveKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SubpassDescriptionDepthStencilResolveKHR ) - offsetof( SubpassDescriptionDepthStencilResolveKHR, pNext ) );
+      return *this;
+    }
+
+    SubpassDescriptionDepthStencilResolveKHR( VkSubpassDescriptionDepthStencilResolveKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    SubpassDescriptionDepthStencilResolveKHR& operator=( VkSubpassDescriptionDepthStencilResolveKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubpassDescriptionDepthStencilResolveKHR const *>(&rhs);
+      return *this;
+    }
+
+    SubpassDescriptionDepthStencilResolveKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    SubpassDescriptionDepthStencilResolveKHR & setDepthResolveMode( VULKAN_HPP_NAMESPACE::ResolveModeFlagBitsKHR depthResolveMode_ ) VULKAN_HPP_NOEXCEPT
+    {
+      depthResolveMode = depthResolveMode_;
+      return *this;
+    }
+
+    SubpassDescriptionDepthStencilResolveKHR & setStencilResolveMode( VULKAN_HPP_NAMESPACE::ResolveModeFlagBitsKHR stencilResolveMode_ ) VULKAN_HPP_NOEXCEPT
+    {
+      stencilResolveMode = stencilResolveMode_;
+      return *this;
+    }
+
+    SubpassDescriptionDepthStencilResolveKHR & setPDepthStencilResolveAttachment( const VULKAN_HPP_NAMESPACE::AttachmentReference2KHR* pDepthStencilResolveAttachment_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pDepthStencilResolveAttachment = pDepthStencilResolveAttachment_;
+      return *this;
+    }
+
+    operator VkSubpassDescriptionDepthStencilResolveKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSubpassDescriptionDepthStencilResolveKHR*>( this );
+    }
+
+    operator VkSubpassDescriptionDepthStencilResolveKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSubpassDescriptionDepthStencilResolveKHR*>( this );
+    }
+
+    bool operator==( SubpassDescriptionDepthStencilResolveKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( depthResolveMode == rhs.depthResolveMode )
+          && ( stencilResolveMode == rhs.stencilResolveMode )
+          && ( pDepthStencilResolveAttachment == rhs.pDepthStencilResolveAttachment );
+    }
+
+    bool operator!=( SubpassDescriptionDepthStencilResolveKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassDescriptionDepthStencilResolveKHR;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::ResolveModeFlagBitsKHR depthResolveMode;
+    VULKAN_HPP_NAMESPACE::ResolveModeFlagBitsKHR stencilResolveMode;
+    const VULKAN_HPP_NAMESPACE::AttachmentReference2KHR* pDepthStencilResolveAttachment;
+  };
+  static_assert( sizeof( SubpassDescriptionDepthStencilResolveKHR ) == sizeof( VkSubpassDescriptionDepthStencilResolveKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<SubpassDescriptionDepthStencilResolveKHR>::value, "struct wrapper is not a standard layout!" );
+
+  struct SubpassEndInfoKHR
+  {
+    VULKAN_HPP_CONSTEXPR SubpassEndInfoKHR() VULKAN_HPP_NOEXCEPT
+    {}
+
+    VULKAN_HPP_NAMESPACE::SubpassEndInfoKHR & operator=( VULKAN_HPP_NAMESPACE::SubpassEndInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SubpassEndInfoKHR ) - offsetof( SubpassEndInfoKHR, pNext ) );
+      return *this;
+    }
+
+    SubpassEndInfoKHR( VkSubpassEndInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    SubpassEndInfoKHR& operator=( VkSubpassEndInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubpassEndInfoKHR const *>(&rhs);
+      return *this;
+    }
+
+    SubpassEndInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    operator VkSubpassEndInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSubpassEndInfoKHR*>( this );
+    }
+
+    operator VkSubpassEndInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSubpassEndInfoKHR*>( this );
+    }
+
+    bool operator==( SubpassEndInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext );
+    }
+
+    bool operator!=( SubpassEndInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassEndInfoKHR;
+    const void* pNext = nullptr;
+  };
+  static_assert( sizeof( SubpassEndInfoKHR ) == sizeof( VkSubpassEndInfoKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<SubpassEndInfoKHR>::value, "struct wrapper is not a standard layout!" );
+
+  struct SurfaceCapabilities2EXT
+  {
+    SurfaceCapabilities2EXT( uint32_t minImageCount_ = 0,
+                             uint32_t maxImageCount_ = 0,
+                             VULKAN_HPP_NAMESPACE::Extent2D currentExtent_ = VULKAN_HPP_NAMESPACE::Extent2D(),
+                             VULKAN_HPP_NAMESPACE::Extent2D minImageExtent_ = VULKAN_HPP_NAMESPACE::Extent2D(),
+                             VULKAN_HPP_NAMESPACE::Extent2D maxImageExtent_ = VULKAN_HPP_NAMESPACE::Extent2D(),
+                             uint32_t maxImageArrayLayers_ = 0,
+                             VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR(),
+                             VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR currentTransform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity,
+                             VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR supportedCompositeAlpha_ = VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR(),
+                             VULKAN_HPP_NAMESPACE::ImageUsageFlags supportedUsageFlags_ = VULKAN_HPP_NAMESPACE::ImageUsageFlags(),
+                             VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT supportedSurfaceCounters_ = VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT() ) VULKAN_HPP_NOEXCEPT
+      : minImageCount( minImageCount_ )
+      , maxImageCount( maxImageCount_ )
+      , currentExtent( currentExtent_ )
+      , minImageExtent( minImageExtent_ )
+      , maxImageExtent( maxImageExtent_ )
+      , maxImageArrayLayers( maxImageArrayLayers_ )
+      , supportedTransforms( supportedTransforms_ )
+      , currentTransform( currentTransform_ )
+      , supportedCompositeAlpha( supportedCompositeAlpha_ )
+      , supportedUsageFlags( supportedUsageFlags_ )
+      , supportedSurfaceCounters( supportedSurfaceCounters_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT & operator=( VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT ) - offsetof( SurfaceCapabilities2EXT, pNext ) );
+      return *this;
+    }
+
+    SurfaceCapabilities2EXT( VkSurfaceCapabilities2EXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    SurfaceCapabilities2EXT& operator=( VkSurfaceCapabilities2EXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT const *>(&rhs);
+      return *this;
+    }
+
+    operator VkSurfaceCapabilities2EXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSurfaceCapabilities2EXT*>( this );
+    }
+
+    operator VkSurfaceCapabilities2EXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSurfaceCapabilities2EXT*>( this );
+    }
+
+    bool operator==( SurfaceCapabilities2EXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( minImageCount == rhs.minImageCount )
+          && ( maxImageCount == rhs.maxImageCount )
+          && ( currentExtent == rhs.currentExtent )
+          && ( minImageExtent == rhs.minImageExtent )
+          && ( maxImageExtent == rhs.maxImageExtent )
+          && ( maxImageArrayLayers == rhs.maxImageArrayLayers )
+          && ( supportedTransforms == rhs.supportedTransforms )
+          && ( currentTransform == rhs.currentTransform )
+          && ( supportedCompositeAlpha == rhs.supportedCompositeAlpha )
+          && ( supportedUsageFlags == rhs.supportedUsageFlags )
+          && ( supportedSurfaceCounters == rhs.supportedSurfaceCounters );
+    }
+
+    bool operator!=( SurfaceCapabilities2EXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceCapabilities2EXT;
+    void* pNext = nullptr;
+    uint32_t minImageCount;
+    uint32_t maxImageCount;
+    VULKAN_HPP_NAMESPACE::Extent2D currentExtent;
+    VULKAN_HPP_NAMESPACE::Extent2D minImageExtent;
+    VULKAN_HPP_NAMESPACE::Extent2D maxImageExtent;
+    uint32_t maxImageArrayLayers;
+    VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms;
+    VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR currentTransform;
+    VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR supportedCompositeAlpha;
+    VULKAN_HPP_NAMESPACE::ImageUsageFlags supportedUsageFlags;
+    VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT supportedSurfaceCounters;
+  };
+  static_assert( sizeof( SurfaceCapabilities2EXT ) == sizeof( VkSurfaceCapabilities2EXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<SurfaceCapabilities2EXT>::value, "struct wrapper is not a standard layout!" );
+
+  struct SurfaceCapabilitiesKHR
+  {
+    SurfaceCapabilitiesKHR( uint32_t minImageCount_ = 0,
+                            uint32_t maxImageCount_ = 0,
+                            VULKAN_HPP_NAMESPACE::Extent2D currentExtent_ = VULKAN_HPP_NAMESPACE::Extent2D(),
+                            VULKAN_HPP_NAMESPACE::Extent2D minImageExtent_ = VULKAN_HPP_NAMESPACE::Extent2D(),
+                            VULKAN_HPP_NAMESPACE::Extent2D maxImageExtent_ = VULKAN_HPP_NAMESPACE::Extent2D(),
+                            uint32_t maxImageArrayLayers_ = 0,
+                            VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR(),
+                            VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR currentTransform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity,
+                            VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR supportedCompositeAlpha_ = VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR(),
+                            VULKAN_HPP_NAMESPACE::ImageUsageFlags supportedUsageFlags_ = VULKAN_HPP_NAMESPACE::ImageUsageFlags() ) VULKAN_HPP_NOEXCEPT
+      : minImageCount( minImageCount_ )
+      , maxImageCount( maxImageCount_ )
+      , currentExtent( currentExtent_ )
+      , minImageExtent( minImageExtent_ )
+      , maxImageExtent( maxImageExtent_ )
+      , maxImageArrayLayers( maxImageArrayLayers_ )
+      , supportedTransforms( supportedTransforms_ )
+      , currentTransform( currentTransform_ )
+      , supportedCompositeAlpha( supportedCompositeAlpha_ )
+      , supportedUsageFlags( supportedUsageFlags_ )
+    {}
+
+    SurfaceCapabilitiesKHR( VkSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    SurfaceCapabilitiesKHR& operator=( VkSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR const *>(&rhs);
+      return *this;
+    }
+
+    operator VkSurfaceCapabilitiesKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSurfaceCapabilitiesKHR*>( this );
+    }
+
+    operator VkSurfaceCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSurfaceCapabilitiesKHR*>( this );
+    }
+
+    bool operator==( SurfaceCapabilitiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( minImageCount == rhs.minImageCount )
+          && ( maxImageCount == rhs.maxImageCount )
+          && ( currentExtent == rhs.currentExtent )
+          && ( minImageExtent == rhs.minImageExtent )
+          && ( maxImageExtent == rhs.maxImageExtent )
+          && ( maxImageArrayLayers == rhs.maxImageArrayLayers )
+          && ( supportedTransforms == rhs.supportedTransforms )
+          && ( currentTransform == rhs.currentTransform )
+          && ( supportedCompositeAlpha == rhs.supportedCompositeAlpha )
+          && ( supportedUsageFlags == rhs.supportedUsageFlags );
+    }
+
+    bool operator!=( SurfaceCapabilitiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    uint32_t minImageCount;
+    uint32_t maxImageCount;
+    VULKAN_HPP_NAMESPACE::Extent2D currentExtent;
+    VULKAN_HPP_NAMESPACE::Extent2D minImageExtent;
+    VULKAN_HPP_NAMESPACE::Extent2D maxImageExtent;
+    uint32_t maxImageArrayLayers;
+    VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms;
+    VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR currentTransform;
+    VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR supportedCompositeAlpha;
+    VULKAN_HPP_NAMESPACE::ImageUsageFlags supportedUsageFlags;
+  };
+  static_assert( sizeof( SurfaceCapabilitiesKHR ) == sizeof( VkSurfaceCapabilitiesKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<SurfaceCapabilitiesKHR>::value, "struct wrapper is not a standard layout!" );
+
+  struct SurfaceCapabilities2KHR
+  {
+    SurfaceCapabilities2KHR( VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR surfaceCapabilities_ = VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR() ) VULKAN_HPP_NOEXCEPT
+      : surfaceCapabilities( surfaceCapabilities_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR & operator=( VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR ) - offsetof( SurfaceCapabilities2KHR, pNext ) );
+      return *this;
+    }
+
+    SurfaceCapabilities2KHR( VkSurfaceCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    SurfaceCapabilities2KHR& operator=( VkSurfaceCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR const *>(&rhs);
+      return *this;
+    }
+
+    operator VkSurfaceCapabilities2KHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSurfaceCapabilities2KHR*>( this );
+    }
+
+    operator VkSurfaceCapabilities2KHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSurfaceCapabilities2KHR*>( this );
+    }
+
+    bool operator==( SurfaceCapabilities2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( surfaceCapabilities == rhs.surfaceCapabilities );
+    }
+
+    bool operator!=( SurfaceCapabilities2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceCapabilities2KHR;
+    void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR surfaceCapabilities;
+  };
+  static_assert( sizeof( SurfaceCapabilities2KHR ) == sizeof( VkSurfaceCapabilities2KHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<SurfaceCapabilities2KHR>::value, "struct wrapper is not a standard layout!" );
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+  struct SurfaceCapabilitiesFullScreenExclusiveEXT
+  {
+    VULKAN_HPP_CONSTEXPR SurfaceCapabilitiesFullScreenExclusiveEXT( VULKAN_HPP_NAMESPACE::Bool32 fullScreenExclusiveSupported_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : fullScreenExclusiveSupported( fullScreenExclusiveSupported_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesFullScreenExclusiveEXT & operator=( VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesFullScreenExclusiveEXT ) - offsetof( SurfaceCapabilitiesFullScreenExclusiveEXT, pNext ) );
+      return *this;
+    }
+
+    SurfaceCapabilitiesFullScreenExclusiveEXT( VkSurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    SurfaceCapabilitiesFullScreenExclusiveEXT& operator=( VkSurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesFullScreenExclusiveEXT const *>(&rhs);
+      return *this;
+    }
+
+    SurfaceCapabilitiesFullScreenExclusiveEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    SurfaceCapabilitiesFullScreenExclusiveEXT & setFullScreenExclusiveSupported( VULKAN_HPP_NAMESPACE::Bool32 fullScreenExclusiveSupported_ ) VULKAN_HPP_NOEXCEPT
+    {
+      fullScreenExclusiveSupported = fullScreenExclusiveSupported_;
+      return *this;
+    }
+
+    operator VkSurfaceCapabilitiesFullScreenExclusiveEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSurfaceCapabilitiesFullScreenExclusiveEXT*>( this );
+    }
+
+    operator VkSurfaceCapabilitiesFullScreenExclusiveEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSurfaceCapabilitiesFullScreenExclusiveEXT*>( this );
+    }
+
+    bool operator==( SurfaceCapabilitiesFullScreenExclusiveEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( fullScreenExclusiveSupported == rhs.fullScreenExclusiveSupported );
+    }
+
+    bool operator!=( SurfaceCapabilitiesFullScreenExclusiveEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceCapabilitiesFullScreenExclusiveEXT;
+    void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::Bool32 fullScreenExclusiveSupported;
+  };
+  static_assert( sizeof( SurfaceCapabilitiesFullScreenExclusiveEXT ) == sizeof( VkSurfaceCapabilitiesFullScreenExclusiveEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<SurfaceCapabilitiesFullScreenExclusiveEXT>::value, "struct wrapper is not a standard layout!" );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  struct SurfaceFormatKHR
+  {
+    SurfaceFormatKHR( VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
+                      VULKAN_HPP_NAMESPACE::ColorSpaceKHR colorSpace_ = VULKAN_HPP_NAMESPACE::ColorSpaceKHR::eSrgbNonlinear ) VULKAN_HPP_NOEXCEPT
+      : format( format_ )
+      , colorSpace( colorSpace_ )
+    {}
+
+    SurfaceFormatKHR( VkSurfaceFormatKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    SurfaceFormatKHR& operator=( VkSurfaceFormatKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SurfaceFormatKHR const *>(&rhs);
+      return *this;
+    }
+
+    operator VkSurfaceFormatKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSurfaceFormatKHR*>( this );
+    }
+
+    operator VkSurfaceFormatKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSurfaceFormatKHR*>( this );
+    }
+
+    bool operator==( SurfaceFormatKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( format == rhs.format )
+          && ( colorSpace == rhs.colorSpace );
+    }
+
+    bool operator!=( SurfaceFormatKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    VULKAN_HPP_NAMESPACE::Format format;
+    VULKAN_HPP_NAMESPACE::ColorSpaceKHR colorSpace;
+  };
+  static_assert( sizeof( SurfaceFormatKHR ) == sizeof( VkSurfaceFormatKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<SurfaceFormatKHR>::value, "struct wrapper is not a standard layout!" );
+
+  struct SurfaceFormat2KHR
+  {
+    SurfaceFormat2KHR( VULKAN_HPP_NAMESPACE::SurfaceFormatKHR surfaceFormat_ = VULKAN_HPP_NAMESPACE::SurfaceFormatKHR() ) VULKAN_HPP_NOEXCEPT
+      : surfaceFormat( surfaceFormat_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR & operator=( VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR ) - offsetof( SurfaceFormat2KHR, pNext ) );
+      return *this;
+    }
+
+    SurfaceFormat2KHR( VkSurfaceFormat2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    SurfaceFormat2KHR& operator=( VkSurfaceFormat2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR const *>(&rhs);
+      return *this;
+    }
+
+    operator VkSurfaceFormat2KHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSurfaceFormat2KHR*>( this );
+    }
+
+    operator VkSurfaceFormat2KHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSurfaceFormat2KHR*>( this );
+    }
+
+    bool operator==( SurfaceFormat2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( surfaceFormat == rhs.surfaceFormat );
+    }
+
+    bool operator!=( SurfaceFormat2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceFormat2KHR;
+    void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::SurfaceFormatKHR surfaceFormat;
+  };
+  static_assert( sizeof( SurfaceFormat2KHR ) == sizeof( VkSurfaceFormat2KHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<SurfaceFormat2KHR>::value, "struct wrapper is not a standard layout!" );
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+  struct SurfaceFullScreenExclusiveInfoEXT
+  {
+    VULKAN_HPP_CONSTEXPR SurfaceFullScreenExclusiveInfoEXT( VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT fullScreenExclusive_ = VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT::eDefault ) VULKAN_HPP_NOEXCEPT
+      : fullScreenExclusive( fullScreenExclusive_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::SurfaceFullScreenExclusiveInfoEXT & operator=( VULKAN_HPP_NAMESPACE::SurfaceFullScreenExclusiveInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SurfaceFullScreenExclusiveInfoEXT ) - offsetof( SurfaceFullScreenExclusiveInfoEXT, pNext ) );
+      return *this;
+    }
+
+    SurfaceFullScreenExclusiveInfoEXT( VkSurfaceFullScreenExclusiveInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    SurfaceFullScreenExclusiveInfoEXT& operator=( VkSurfaceFullScreenExclusiveInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SurfaceFullScreenExclusiveInfoEXT const *>(&rhs);
+      return *this;
+    }
+
+    SurfaceFullScreenExclusiveInfoEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    SurfaceFullScreenExclusiveInfoEXT & setFullScreenExclusive( VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT fullScreenExclusive_ ) VULKAN_HPP_NOEXCEPT
+    {
+      fullScreenExclusive = fullScreenExclusive_;
+      return *this;
+    }
+
+    operator VkSurfaceFullScreenExclusiveInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSurfaceFullScreenExclusiveInfoEXT*>( this );
+    }
+
+    operator VkSurfaceFullScreenExclusiveInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSurfaceFullScreenExclusiveInfoEXT*>( this );
+    }
+
+    bool operator==( SurfaceFullScreenExclusiveInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( fullScreenExclusive == rhs.fullScreenExclusive );
+    }
+
+    bool operator!=( SurfaceFullScreenExclusiveInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceFullScreenExclusiveInfoEXT;
+    void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT fullScreenExclusive;
+  };
+  static_assert( sizeof( SurfaceFullScreenExclusiveInfoEXT ) == sizeof( VkSurfaceFullScreenExclusiveInfoEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<SurfaceFullScreenExclusiveInfoEXT>::value, "struct wrapper is not a standard layout!" );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+  struct SurfaceFullScreenExclusiveWin32InfoEXT
+  {
+    VULKAN_HPP_CONSTEXPR SurfaceFullScreenExclusiveWin32InfoEXT( HMONITOR hmonitor_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : hmonitor( hmonitor_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::SurfaceFullScreenExclusiveWin32InfoEXT & operator=( VULKAN_HPP_NAMESPACE::SurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SurfaceFullScreenExclusiveWin32InfoEXT ) - offsetof( SurfaceFullScreenExclusiveWin32InfoEXT, pNext ) );
+      return *this;
+    }
+
+    SurfaceFullScreenExclusiveWin32InfoEXT( VkSurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    SurfaceFullScreenExclusiveWin32InfoEXT& operator=( VkSurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SurfaceFullScreenExclusiveWin32InfoEXT const *>(&rhs);
+      return *this;
+    }
+
+    SurfaceFullScreenExclusiveWin32InfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    SurfaceFullScreenExclusiveWin32InfoEXT & setHmonitor( HMONITOR hmonitor_ ) VULKAN_HPP_NOEXCEPT
+    {
+      hmonitor = hmonitor_;
+      return *this;
+    }
+
+    operator VkSurfaceFullScreenExclusiveWin32InfoEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSurfaceFullScreenExclusiveWin32InfoEXT*>( this );
+    }
+
+    operator VkSurfaceFullScreenExclusiveWin32InfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSurfaceFullScreenExclusiveWin32InfoEXT*>( this );
+    }
+
+    bool operator==( SurfaceFullScreenExclusiveWin32InfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( hmonitor == rhs.hmonitor );
+    }
+
+    bool operator!=( SurfaceFullScreenExclusiveWin32InfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceFullScreenExclusiveWin32InfoEXT;
+    const void* pNext = nullptr;
+    HMONITOR hmonitor;
+  };
+  static_assert( sizeof( SurfaceFullScreenExclusiveWin32InfoEXT ) == sizeof( VkSurfaceFullScreenExclusiveWin32InfoEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<SurfaceFullScreenExclusiveWin32InfoEXT>::value, "struct wrapper is not a standard layout!" );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  struct SurfaceProtectedCapabilitiesKHR
+  {
+    VULKAN_HPP_CONSTEXPR SurfaceProtectedCapabilitiesKHR( VULKAN_HPP_NAMESPACE::Bool32 supportsProtected_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : supportsProtected( supportsProtected_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::SurfaceProtectedCapabilitiesKHR & operator=( VULKAN_HPP_NAMESPACE::SurfaceProtectedCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SurfaceProtectedCapabilitiesKHR ) - offsetof( SurfaceProtectedCapabilitiesKHR, pNext ) );
+      return *this;
+    }
+
+    SurfaceProtectedCapabilitiesKHR( VkSurfaceProtectedCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    SurfaceProtectedCapabilitiesKHR& operator=( VkSurfaceProtectedCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SurfaceProtectedCapabilitiesKHR const *>(&rhs);
+      return *this;
+    }
+
+    SurfaceProtectedCapabilitiesKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    SurfaceProtectedCapabilitiesKHR & setSupportsProtected( VULKAN_HPP_NAMESPACE::Bool32 supportsProtected_ ) VULKAN_HPP_NOEXCEPT
+    {
+      supportsProtected = supportsProtected_;
+      return *this;
+    }
+
+    operator VkSurfaceProtectedCapabilitiesKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSurfaceProtectedCapabilitiesKHR*>( this );
+    }
+
+    operator VkSurfaceProtectedCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSurfaceProtectedCapabilitiesKHR*>( this );
+    }
+
+    bool operator==( SurfaceProtectedCapabilitiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( supportsProtected == rhs.supportsProtected );
+    }
+
+    bool operator!=( SurfaceProtectedCapabilitiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceProtectedCapabilitiesKHR;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::Bool32 supportsProtected;
+  };
+  static_assert( sizeof( SurfaceProtectedCapabilitiesKHR ) == sizeof( VkSurfaceProtectedCapabilitiesKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<SurfaceProtectedCapabilitiesKHR>::value, "struct wrapper is not a standard layout!" );
+
+  struct SwapchainCounterCreateInfoEXT
+  {
+    VULKAN_HPP_CONSTEXPR SwapchainCounterCreateInfoEXT( VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT surfaceCounters_ = VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT() ) VULKAN_HPP_NOEXCEPT
+      : surfaceCounters( surfaceCounters_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::SwapchainCounterCreateInfoEXT & operator=( VULKAN_HPP_NAMESPACE::SwapchainCounterCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SwapchainCounterCreateInfoEXT ) - offsetof( SwapchainCounterCreateInfoEXT, pNext ) );
+      return *this;
+    }
+
+    SwapchainCounterCreateInfoEXT( VkSwapchainCounterCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    SwapchainCounterCreateInfoEXT& operator=( VkSwapchainCounterCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SwapchainCounterCreateInfoEXT const *>(&rhs);
+      return *this;
+    }
+
+    SwapchainCounterCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    SwapchainCounterCreateInfoEXT & setSurfaceCounters( VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT surfaceCounters_ ) VULKAN_HPP_NOEXCEPT
+    {
+      surfaceCounters = surfaceCounters_;
+      return *this;
+    }
+
+    operator VkSwapchainCounterCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSwapchainCounterCreateInfoEXT*>( this );
+    }
+
+    operator VkSwapchainCounterCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSwapchainCounterCreateInfoEXT*>( this );
+    }
+
+    bool operator==( SwapchainCounterCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( surfaceCounters == rhs.surfaceCounters );
+    }
+
+    bool operator!=( SwapchainCounterCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSwapchainCounterCreateInfoEXT;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT surfaceCounters;
+  };
+  static_assert( sizeof( SwapchainCounterCreateInfoEXT ) == sizeof( VkSwapchainCounterCreateInfoEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<SwapchainCounterCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+
+  struct SwapchainCreateInfoKHR
+  {
+    VULKAN_HPP_CONSTEXPR SwapchainCreateInfoKHR( VULKAN_HPP_NAMESPACE::SwapchainCreateFlagsKHR flags_ = VULKAN_HPP_NAMESPACE::SwapchainCreateFlagsKHR(),
+                                                 VULKAN_HPP_NAMESPACE::SurfaceKHR surface_ = VULKAN_HPP_NAMESPACE::SurfaceKHR(),
+                                                 uint32_t minImageCount_ = 0,
+                                                 VULKAN_HPP_NAMESPACE::Format imageFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
+                                                 VULKAN_HPP_NAMESPACE::ColorSpaceKHR imageColorSpace_ = VULKAN_HPP_NAMESPACE::ColorSpaceKHR::eSrgbNonlinear,
+                                                 VULKAN_HPP_NAMESPACE::Extent2D imageExtent_ = VULKAN_HPP_NAMESPACE::Extent2D(),
+                                                 uint32_t imageArrayLayers_ = 0,
+                                                 VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage_ = VULKAN_HPP_NAMESPACE::ImageUsageFlags(),
+                                                 VULKAN_HPP_NAMESPACE::SharingMode imageSharingMode_ = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive,
+                                                 uint32_t queueFamilyIndexCount_ = 0,
+                                                 const uint32_t* pQueueFamilyIndices_ = nullptr,
+                                                 VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR preTransform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity,
+                                                 VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR compositeAlpha_ = VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR::eOpaque,
+                                                 VULKAN_HPP_NAMESPACE::PresentModeKHR presentMode_ = VULKAN_HPP_NAMESPACE::PresentModeKHR::eImmediate,
+                                                 VULKAN_HPP_NAMESPACE::Bool32 clipped_ = 0,
+                                                 VULKAN_HPP_NAMESPACE::SwapchainKHR oldSwapchain_ = VULKAN_HPP_NAMESPACE::SwapchainKHR() ) VULKAN_HPP_NOEXCEPT
+      : flags( flags_ )
+      , surface( surface_ )
+      , minImageCount( minImageCount_ )
+      , imageFormat( imageFormat_ )
+      , imageColorSpace( imageColorSpace_ )
+      , imageExtent( imageExtent_ )
+      , imageArrayLayers( imageArrayLayers_ )
+      , imageUsage( imageUsage_ )
+      , imageSharingMode( imageSharingMode_ )
+      , queueFamilyIndexCount( queueFamilyIndexCount_ )
+      , pQueueFamilyIndices( pQueueFamilyIndices_ )
+      , preTransform( preTransform_ )
+      , compositeAlpha( compositeAlpha_ )
+      , presentMode( presentMode_ )
+      , clipped( clipped_ )
+      , oldSwapchain( oldSwapchain_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & operator=( VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR ) - offsetof( SwapchainCreateInfoKHR, pNext ) );
+      return *this;
+    }
+
+    SwapchainCreateInfoKHR( VkSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    SwapchainCreateInfoKHR& operator=( VkSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR const *>(&rhs);
+      return *this;
+    }
+
+    SwapchainCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    SwapchainCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::SwapchainCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    SwapchainCreateInfoKHR & setSurface( VULKAN_HPP_NAMESPACE::SurfaceKHR surface_ ) VULKAN_HPP_NOEXCEPT
+    {
+      surface = surface_;
+      return *this;
+    }
+
+    SwapchainCreateInfoKHR & setMinImageCount( uint32_t minImageCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      minImageCount = minImageCount_;
+      return *this;
+    }
+
+    SwapchainCreateInfoKHR & setImageFormat( VULKAN_HPP_NAMESPACE::Format imageFormat_ ) VULKAN_HPP_NOEXCEPT
+    {
+      imageFormat = imageFormat_;
+      return *this;
+    }
+
+    SwapchainCreateInfoKHR & setImageColorSpace( VULKAN_HPP_NAMESPACE::ColorSpaceKHR imageColorSpace_ ) VULKAN_HPP_NOEXCEPT
+    {
+      imageColorSpace = imageColorSpace_;
+      return *this;
+    }
+
+    SwapchainCreateInfoKHR & setImageExtent( VULKAN_HPP_NAMESPACE::Extent2D imageExtent_ ) VULKAN_HPP_NOEXCEPT
+    {
+      imageExtent = imageExtent_;
+      return *this;
+    }
+
+    SwapchainCreateInfoKHR & setImageArrayLayers( uint32_t imageArrayLayers_ ) VULKAN_HPP_NOEXCEPT
+    {
+      imageArrayLayers = imageArrayLayers_;
+      return *this;
+    }
+
+    SwapchainCreateInfoKHR & setImageUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage_ ) VULKAN_HPP_NOEXCEPT
+    {
+      imageUsage = imageUsage_;
+      return *this;
+    }
+
+    SwapchainCreateInfoKHR & setImageSharingMode( VULKAN_HPP_NAMESPACE::SharingMode imageSharingMode_ ) VULKAN_HPP_NOEXCEPT
+    {
+      imageSharingMode = imageSharingMode_;
+      return *this;
+    }
+
+    SwapchainCreateInfoKHR & setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      queueFamilyIndexCount = queueFamilyIndexCount_;
+      return *this;
+    }
+
+    SwapchainCreateInfoKHR & setPQueueFamilyIndices( const uint32_t* pQueueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pQueueFamilyIndices = pQueueFamilyIndices_;
+      return *this;
+    }
+
+    SwapchainCreateInfoKHR & setPreTransform( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR preTransform_ ) VULKAN_HPP_NOEXCEPT
+    {
+      preTransform = preTransform_;
+      return *this;
+    }
+
+    SwapchainCreateInfoKHR & setCompositeAlpha( VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR compositeAlpha_ ) VULKAN_HPP_NOEXCEPT
+    {
+      compositeAlpha = compositeAlpha_;
+      return *this;
+    }
+
+    SwapchainCreateInfoKHR & setPresentMode( VULKAN_HPP_NAMESPACE::PresentModeKHR presentMode_ ) VULKAN_HPP_NOEXCEPT
+    {
+      presentMode = presentMode_;
+      return *this;
+    }
+
+    SwapchainCreateInfoKHR & setClipped( VULKAN_HPP_NAMESPACE::Bool32 clipped_ ) VULKAN_HPP_NOEXCEPT
+    {
+      clipped = clipped_;
+      return *this;
+    }
+
+    SwapchainCreateInfoKHR & setOldSwapchain( VULKAN_HPP_NAMESPACE::SwapchainKHR oldSwapchain_ ) VULKAN_HPP_NOEXCEPT
+    {
+      oldSwapchain = oldSwapchain_;
+      return *this;
+    }
+
+    operator VkSwapchainCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSwapchainCreateInfoKHR*>( this );
+    }
+
+    operator VkSwapchainCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSwapchainCreateInfoKHR*>( this );
+    }
+
+    bool operator==( SwapchainCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( surface == rhs.surface )
+          && ( minImageCount == rhs.minImageCount )
+          && ( imageFormat == rhs.imageFormat )
+          && ( imageColorSpace == rhs.imageColorSpace )
+          && ( imageExtent == rhs.imageExtent )
+          && ( imageArrayLayers == rhs.imageArrayLayers )
+          && ( imageUsage == rhs.imageUsage )
+          && ( imageSharingMode == rhs.imageSharingMode )
+          && ( queueFamilyIndexCount == rhs.queueFamilyIndexCount )
+          && ( pQueueFamilyIndices == rhs.pQueueFamilyIndices )
+          && ( preTransform == rhs.preTransform )
+          && ( compositeAlpha == rhs.compositeAlpha )
+          && ( presentMode == rhs.presentMode )
+          && ( clipped == rhs.clipped )
+          && ( oldSwapchain == rhs.oldSwapchain );
+    }
+
+    bool operator!=( SwapchainCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSwapchainCreateInfoKHR;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::SwapchainCreateFlagsKHR flags;
+    VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
+    uint32_t minImageCount;
+    VULKAN_HPP_NAMESPACE::Format imageFormat;
+    VULKAN_HPP_NAMESPACE::ColorSpaceKHR imageColorSpace;
+    VULKAN_HPP_NAMESPACE::Extent2D imageExtent;
+    uint32_t imageArrayLayers;
+    VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage;
+    VULKAN_HPP_NAMESPACE::SharingMode imageSharingMode;
+    uint32_t queueFamilyIndexCount;
+    const uint32_t* pQueueFamilyIndices;
+    VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR preTransform;
+    VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR compositeAlpha;
+    VULKAN_HPP_NAMESPACE::PresentModeKHR presentMode;
+    VULKAN_HPP_NAMESPACE::Bool32 clipped;
+    VULKAN_HPP_NAMESPACE::SwapchainKHR oldSwapchain;
+  };
+  static_assert( sizeof( SwapchainCreateInfoKHR ) == sizeof( VkSwapchainCreateInfoKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<SwapchainCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
+
+  struct SwapchainDisplayNativeHdrCreateInfoAMD
+  {
+    VULKAN_HPP_CONSTEXPR SwapchainDisplayNativeHdrCreateInfoAMD( VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : localDimmingEnable( localDimmingEnable_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::SwapchainDisplayNativeHdrCreateInfoAMD & operator=( VULKAN_HPP_NAMESPACE::SwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SwapchainDisplayNativeHdrCreateInfoAMD ) - offsetof( SwapchainDisplayNativeHdrCreateInfoAMD, pNext ) );
+      return *this;
+    }
+
+    SwapchainDisplayNativeHdrCreateInfoAMD( VkSwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    SwapchainDisplayNativeHdrCreateInfoAMD& operator=( VkSwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SwapchainDisplayNativeHdrCreateInfoAMD const *>(&rhs);
+      return *this;
+    }
+
+    SwapchainDisplayNativeHdrCreateInfoAMD & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    SwapchainDisplayNativeHdrCreateInfoAMD & setLocalDimmingEnable( VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable_ ) VULKAN_HPP_NOEXCEPT
+    {
+      localDimmingEnable = localDimmingEnable_;
+      return *this;
+    }
+
+    operator VkSwapchainDisplayNativeHdrCreateInfoAMD const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkSwapchainDisplayNativeHdrCreateInfoAMD*>( this );
+    }
+
+    operator VkSwapchainDisplayNativeHdrCreateInfoAMD &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkSwapchainDisplayNativeHdrCreateInfoAMD*>( this );
+    }
+
+    bool operator==( SwapchainDisplayNativeHdrCreateInfoAMD const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( localDimmingEnable == rhs.localDimmingEnable );
+    }
+
+    bool operator!=( SwapchainDisplayNativeHdrCreateInfoAMD const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSwapchainDisplayNativeHdrCreateInfoAMD;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable;
+  };
+  static_assert( sizeof( SwapchainDisplayNativeHdrCreateInfoAMD ) == sizeof( VkSwapchainDisplayNativeHdrCreateInfoAMD ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<SwapchainDisplayNativeHdrCreateInfoAMD>::value, "struct wrapper is not a standard layout!" );
+
+  struct TextureLODGatherFormatPropertiesAMD
+  {
+    TextureLODGatherFormatPropertiesAMD( VULKAN_HPP_NAMESPACE::Bool32 supportsTextureGatherLODBiasAMD_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : supportsTextureGatherLODBiasAMD( supportsTextureGatherLODBiasAMD_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::TextureLODGatherFormatPropertiesAMD & operator=( VULKAN_HPP_NAMESPACE::TextureLODGatherFormatPropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::TextureLODGatherFormatPropertiesAMD ) - offsetof( TextureLODGatherFormatPropertiesAMD, pNext ) );
+      return *this;
+    }
+
+    TextureLODGatherFormatPropertiesAMD( VkTextureLODGatherFormatPropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    TextureLODGatherFormatPropertiesAMD& operator=( VkTextureLODGatherFormatPropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::TextureLODGatherFormatPropertiesAMD const *>(&rhs);
+      return *this;
+    }
+
+    operator VkTextureLODGatherFormatPropertiesAMD const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkTextureLODGatherFormatPropertiesAMD*>( this );
+    }
+
+    operator VkTextureLODGatherFormatPropertiesAMD &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkTextureLODGatherFormatPropertiesAMD*>( this );
+    }
+
+    bool operator==( TextureLODGatherFormatPropertiesAMD const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( supportsTextureGatherLODBiasAMD == rhs.supportsTextureGatherLODBiasAMD );
+    }
+
+    bool operator!=( TextureLODGatherFormatPropertiesAMD const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eTextureLodGatherFormatPropertiesAMD;
+    void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::Bool32 supportsTextureGatherLODBiasAMD;
+  };
+  static_assert( sizeof( TextureLODGatherFormatPropertiesAMD ) == sizeof( VkTextureLODGatherFormatPropertiesAMD ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<TextureLODGatherFormatPropertiesAMD>::value, "struct wrapper is not a standard layout!" );
+
+  struct TimelineSemaphoreSubmitInfoKHR
+  {
+    VULKAN_HPP_CONSTEXPR TimelineSemaphoreSubmitInfoKHR( uint32_t waitSemaphoreValueCount_ = 0,
+                                                         const uint64_t* pWaitSemaphoreValues_ = nullptr,
+                                                         uint32_t signalSemaphoreValueCount_ = 0,
+                                                         const uint64_t* pSignalSemaphoreValues_ = nullptr ) VULKAN_HPP_NOEXCEPT
+      : waitSemaphoreValueCount( waitSemaphoreValueCount_ )
+      , pWaitSemaphoreValues( pWaitSemaphoreValues_ )
+      , signalSemaphoreValueCount( signalSemaphoreValueCount_ )
+      , pSignalSemaphoreValues( pSignalSemaphoreValues_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::TimelineSemaphoreSubmitInfoKHR & operator=( VULKAN_HPP_NAMESPACE::TimelineSemaphoreSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::TimelineSemaphoreSubmitInfoKHR ) - offsetof( TimelineSemaphoreSubmitInfoKHR, pNext ) );
+      return *this;
+    }
+
+    TimelineSemaphoreSubmitInfoKHR( VkTimelineSemaphoreSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    TimelineSemaphoreSubmitInfoKHR& operator=( VkTimelineSemaphoreSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::TimelineSemaphoreSubmitInfoKHR const *>(&rhs);
+      return *this;
+    }
+
+    TimelineSemaphoreSubmitInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    TimelineSemaphoreSubmitInfoKHR & setWaitSemaphoreValueCount( uint32_t waitSemaphoreValueCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      waitSemaphoreValueCount = waitSemaphoreValueCount_;
+      return *this;
+    }
+
+    TimelineSemaphoreSubmitInfoKHR & setPWaitSemaphoreValues( const uint64_t* pWaitSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pWaitSemaphoreValues = pWaitSemaphoreValues_;
+      return *this;
+    }
+
+    TimelineSemaphoreSubmitInfoKHR & setSignalSemaphoreValueCount( uint32_t signalSemaphoreValueCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      signalSemaphoreValueCount = signalSemaphoreValueCount_;
+      return *this;
+    }
+
+    TimelineSemaphoreSubmitInfoKHR & setPSignalSemaphoreValues( const uint64_t* pSignalSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pSignalSemaphoreValues = pSignalSemaphoreValues_;
+      return *this;
+    }
+
+    operator VkTimelineSemaphoreSubmitInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkTimelineSemaphoreSubmitInfoKHR*>( this );
+    }
+
+    operator VkTimelineSemaphoreSubmitInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkTimelineSemaphoreSubmitInfoKHR*>( this );
+    }
+
+    bool operator==( TimelineSemaphoreSubmitInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( waitSemaphoreValueCount == rhs.waitSemaphoreValueCount )
+          && ( pWaitSemaphoreValues == rhs.pWaitSemaphoreValues )
+          && ( signalSemaphoreValueCount == rhs.signalSemaphoreValueCount )
+          && ( pSignalSemaphoreValues == rhs.pSignalSemaphoreValues );
+    }
+
+    bool operator!=( TimelineSemaphoreSubmitInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eTimelineSemaphoreSubmitInfoKHR;
+    const void* pNext = nullptr;
+    uint32_t waitSemaphoreValueCount;
+    const uint64_t* pWaitSemaphoreValues;
+    uint32_t signalSemaphoreValueCount;
+    const uint64_t* pSignalSemaphoreValues;
+  };
+  static_assert( sizeof( TimelineSemaphoreSubmitInfoKHR ) == sizeof( VkTimelineSemaphoreSubmitInfoKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<TimelineSemaphoreSubmitInfoKHR>::value, "struct wrapper is not a standard layout!" );
+
+  struct ValidationCacheCreateInfoEXT
+  {
+    VULKAN_HPP_CONSTEXPR ValidationCacheCreateInfoEXT( VULKAN_HPP_NAMESPACE::ValidationCacheCreateFlagsEXT flags_ = VULKAN_HPP_NAMESPACE::ValidationCacheCreateFlagsEXT(),
+                                                       size_t initialDataSize_ = 0,
+                                                       const void* pInitialData_ = nullptr ) VULKAN_HPP_NOEXCEPT
+      : flags( flags_ )
+      , initialDataSize( initialDataSize_ )
+      , pInitialData( pInitialData_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT & operator=( VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT ) - offsetof( ValidationCacheCreateInfoEXT, pNext ) );
+      return *this;
+    }
+
+    ValidationCacheCreateInfoEXT( VkValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    ValidationCacheCreateInfoEXT& operator=( VkValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT const *>(&rhs);
+      return *this;
+    }
+
+    ValidationCacheCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ValidationCacheCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::ValidationCacheCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    ValidationCacheCreateInfoEXT & setInitialDataSize( size_t initialDataSize_ ) VULKAN_HPP_NOEXCEPT
+    {
+      initialDataSize = initialDataSize_;
+      return *this;
+    }
+
+    ValidationCacheCreateInfoEXT & setPInitialData( const void* pInitialData_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pInitialData = pInitialData_;
+      return *this;
+    }
+
+    operator VkValidationCacheCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkValidationCacheCreateInfoEXT*>( this );
+    }
+
+    operator VkValidationCacheCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkValidationCacheCreateInfoEXT*>( this );
+    }
+
+    bool operator==( ValidationCacheCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( initialDataSize == rhs.initialDataSize )
+          && ( pInitialData == rhs.pInitialData );
+    }
+
+    bool operator!=( ValidationCacheCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eValidationCacheCreateInfoEXT;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::ValidationCacheCreateFlagsEXT flags;
+    size_t initialDataSize;
+    const void* pInitialData;
+  };
+  static_assert( sizeof( ValidationCacheCreateInfoEXT ) == sizeof( VkValidationCacheCreateInfoEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ValidationCacheCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+
+  struct ValidationFeaturesEXT
+  {
+    VULKAN_HPP_CONSTEXPR ValidationFeaturesEXT( uint32_t enabledValidationFeatureCount_ = 0,
+                                                const VULKAN_HPP_NAMESPACE::ValidationFeatureEnableEXT* pEnabledValidationFeatures_ = nullptr,
+                                                uint32_t disabledValidationFeatureCount_ = 0,
+                                                const VULKAN_HPP_NAMESPACE::ValidationFeatureDisableEXT* pDisabledValidationFeatures_ = nullptr ) VULKAN_HPP_NOEXCEPT
+      : enabledValidationFeatureCount( enabledValidationFeatureCount_ )
+      , pEnabledValidationFeatures( pEnabledValidationFeatures_ )
+      , disabledValidationFeatureCount( disabledValidationFeatureCount_ )
+      , pDisabledValidationFeatures( pDisabledValidationFeatures_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::ValidationFeaturesEXT & operator=( VULKAN_HPP_NAMESPACE::ValidationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ValidationFeaturesEXT ) - offsetof( ValidationFeaturesEXT, pNext ) );
+      return *this;
+    }
+
+    ValidationFeaturesEXT( VkValidationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    ValidationFeaturesEXT& operator=( VkValidationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ValidationFeaturesEXT const *>(&rhs);
+      return *this;
+    }
+
+    ValidationFeaturesEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ValidationFeaturesEXT & setEnabledValidationFeatureCount( uint32_t enabledValidationFeatureCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      enabledValidationFeatureCount = enabledValidationFeatureCount_;
+      return *this;
+    }
+
+    ValidationFeaturesEXT & setPEnabledValidationFeatures( const VULKAN_HPP_NAMESPACE::ValidationFeatureEnableEXT* pEnabledValidationFeatures_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pEnabledValidationFeatures = pEnabledValidationFeatures_;
+      return *this;
+    }
+
+    ValidationFeaturesEXT & setDisabledValidationFeatureCount( uint32_t disabledValidationFeatureCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      disabledValidationFeatureCount = disabledValidationFeatureCount_;
+      return *this;
+    }
+
+    ValidationFeaturesEXT & setPDisabledValidationFeatures( const VULKAN_HPP_NAMESPACE::ValidationFeatureDisableEXT* pDisabledValidationFeatures_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pDisabledValidationFeatures = pDisabledValidationFeatures_;
+      return *this;
+    }
+
+    operator VkValidationFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkValidationFeaturesEXT*>( this );
+    }
+
+    operator VkValidationFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkValidationFeaturesEXT*>( this );
+    }
+
+    bool operator==( ValidationFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( enabledValidationFeatureCount == rhs.enabledValidationFeatureCount )
+          && ( pEnabledValidationFeatures == rhs.pEnabledValidationFeatures )
+          && ( disabledValidationFeatureCount == rhs.disabledValidationFeatureCount )
+          && ( pDisabledValidationFeatures == rhs.pDisabledValidationFeatures );
+    }
+
+    bool operator!=( ValidationFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eValidationFeaturesEXT;
+    const void* pNext = nullptr;
+    uint32_t enabledValidationFeatureCount;
+    const VULKAN_HPP_NAMESPACE::ValidationFeatureEnableEXT* pEnabledValidationFeatures;
+    uint32_t disabledValidationFeatureCount;
+    const VULKAN_HPP_NAMESPACE::ValidationFeatureDisableEXT* pDisabledValidationFeatures;
+  };
+  static_assert( sizeof( ValidationFeaturesEXT ) == sizeof( VkValidationFeaturesEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ValidationFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+
+  struct ValidationFlagsEXT
+  {
+    VULKAN_HPP_CONSTEXPR ValidationFlagsEXT( uint32_t disabledValidationCheckCount_ = 0,
+                                             const VULKAN_HPP_NAMESPACE::ValidationCheckEXT* pDisabledValidationChecks_ = nullptr ) VULKAN_HPP_NOEXCEPT
+      : disabledValidationCheckCount( disabledValidationCheckCount_ )
+      , pDisabledValidationChecks( pDisabledValidationChecks_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::ValidationFlagsEXT & operator=( VULKAN_HPP_NAMESPACE::ValidationFlagsEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ValidationFlagsEXT ) - offsetof( ValidationFlagsEXT, pNext ) );
+      return *this;
+    }
+
+    ValidationFlagsEXT( VkValidationFlagsEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    ValidationFlagsEXT& operator=( VkValidationFlagsEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ValidationFlagsEXT const *>(&rhs);
+      return *this;
+    }
+
+    ValidationFlagsEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ValidationFlagsEXT & setDisabledValidationCheckCount( uint32_t disabledValidationCheckCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      disabledValidationCheckCount = disabledValidationCheckCount_;
+      return *this;
+    }
+
+    ValidationFlagsEXT & setPDisabledValidationChecks( const VULKAN_HPP_NAMESPACE::ValidationCheckEXT* pDisabledValidationChecks_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pDisabledValidationChecks = pDisabledValidationChecks_;
+      return *this;
+    }
+
+    operator VkValidationFlagsEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkValidationFlagsEXT*>( this );
+    }
+
+    operator VkValidationFlagsEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkValidationFlagsEXT*>( this );
+    }
+
+    bool operator==( ValidationFlagsEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( disabledValidationCheckCount == rhs.disabledValidationCheckCount )
+          && ( pDisabledValidationChecks == rhs.pDisabledValidationChecks );
+    }
+
+    bool operator!=( ValidationFlagsEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eValidationFlagsEXT;
+    const void* pNext = nullptr;
+    uint32_t disabledValidationCheckCount;
+    const VULKAN_HPP_NAMESPACE::ValidationCheckEXT* pDisabledValidationChecks;
+  };
+  static_assert( sizeof( ValidationFlagsEXT ) == sizeof( VkValidationFlagsEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ValidationFlagsEXT>::value, "struct wrapper is not a standard layout!" );
+
+#ifdef VK_USE_PLATFORM_VI_NN
+
+  struct ViSurfaceCreateInfoNN
+  {
+    VULKAN_HPP_CONSTEXPR ViSurfaceCreateInfoNN( VULKAN_HPP_NAMESPACE::ViSurfaceCreateFlagsNN flags_ = VULKAN_HPP_NAMESPACE::ViSurfaceCreateFlagsNN(),
+                                                void* window_ = nullptr ) VULKAN_HPP_NOEXCEPT
+      : flags( flags_ )
+      , window( window_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN & operator=( VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN ) - offsetof( ViSurfaceCreateInfoNN, pNext ) );
+      return *this;
+    }
+
+    ViSurfaceCreateInfoNN( VkViSurfaceCreateInfoNN const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    ViSurfaceCreateInfoNN& operator=( VkViSurfaceCreateInfoNN const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN const *>(&rhs);
+      return *this;
+    }
+
+    ViSurfaceCreateInfoNN & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    ViSurfaceCreateInfoNN & setFlags( VULKAN_HPP_NAMESPACE::ViSurfaceCreateFlagsNN flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    ViSurfaceCreateInfoNN & setWindow( void* window_ ) VULKAN_HPP_NOEXCEPT
+    {
+      window = window_;
+      return *this;
+    }
+
+    operator VkViSurfaceCreateInfoNN const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkViSurfaceCreateInfoNN*>( this );
+    }
+
+    operator VkViSurfaceCreateInfoNN &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkViSurfaceCreateInfoNN*>( this );
+    }
+
+    bool operator==( ViSurfaceCreateInfoNN const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( window == rhs.window );
+    }
+
+    bool operator!=( ViSurfaceCreateInfoNN const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eViSurfaceCreateInfoNN;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::ViSurfaceCreateFlagsNN flags;
+    void* window;
+  };
+  static_assert( sizeof( ViSurfaceCreateInfoNN ) == sizeof( VkViSurfaceCreateInfoNN ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<ViSurfaceCreateInfoNN>::value, "struct wrapper is not a standard layout!" );
+#endif /*VK_USE_PLATFORM_VI_NN*/
+
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+
+  struct WaylandSurfaceCreateInfoKHR
+  {
+    VULKAN_HPP_CONSTEXPR WaylandSurfaceCreateInfoKHR( VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateFlagsKHR flags_ = VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateFlagsKHR(),
+                                                      struct wl_display* display_ = nullptr,
+                                                      struct wl_surface* surface_ = nullptr ) VULKAN_HPP_NOEXCEPT
+      : flags( flags_ )
+      , display( display_ )
+      , surface( surface_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR & operator=( VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR ) - offsetof( WaylandSurfaceCreateInfoKHR, pNext ) );
+      return *this;
+    }
+
+    WaylandSurfaceCreateInfoKHR( VkWaylandSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    WaylandSurfaceCreateInfoKHR& operator=( VkWaylandSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR const *>(&rhs);
+      return *this;
+    }
+
+    WaylandSurfaceCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    WaylandSurfaceCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    WaylandSurfaceCreateInfoKHR & setDisplay( struct wl_display* display_ ) VULKAN_HPP_NOEXCEPT
+    {
+      display = display_;
+      return *this;
+    }
+
+    WaylandSurfaceCreateInfoKHR & setSurface( struct wl_surface* surface_ ) VULKAN_HPP_NOEXCEPT
+    {
+      surface = surface_;
+      return *this;
+    }
+
+    operator VkWaylandSurfaceCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkWaylandSurfaceCreateInfoKHR*>( this );
+    }
+
+    operator VkWaylandSurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkWaylandSurfaceCreateInfoKHR*>( this );
+    }
+
+    bool operator==( WaylandSurfaceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( display == rhs.display )
+          && ( surface == rhs.surface );
+    }
+
+    bool operator!=( WaylandSurfaceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWaylandSurfaceCreateInfoKHR;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateFlagsKHR flags;
+    struct wl_display* display;
+    struct wl_surface* surface;
+  };
+  static_assert( sizeof( WaylandSurfaceCreateInfoKHR ) == sizeof( VkWaylandSurfaceCreateInfoKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<WaylandSurfaceCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
+#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+  struct Win32KeyedMutexAcquireReleaseInfoKHR
+  {
+    VULKAN_HPP_CONSTEXPR Win32KeyedMutexAcquireReleaseInfoKHR( uint32_t acquireCount_ = 0,
+                                                               const VULKAN_HPP_NAMESPACE::DeviceMemory* pAcquireSyncs_ = nullptr,
+                                                               const uint64_t* pAcquireKeys_ = nullptr,
+                                                               const uint32_t* pAcquireTimeouts_ = nullptr,
+                                                               uint32_t releaseCount_ = 0,
+                                                               const VULKAN_HPP_NAMESPACE::DeviceMemory* pReleaseSyncs_ = nullptr,
+                                                               const uint64_t* pReleaseKeys_ = nullptr ) VULKAN_HPP_NOEXCEPT
+      : acquireCount( acquireCount_ )
+      , pAcquireSyncs( pAcquireSyncs_ )
+      , pAcquireKeys( pAcquireKeys_ )
+      , pAcquireTimeouts( pAcquireTimeouts_ )
+      , releaseCount( releaseCount_ )
+      , pReleaseSyncs( pReleaseSyncs_ )
+      , pReleaseKeys( pReleaseKeys_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::Win32KeyedMutexAcquireReleaseInfoKHR & operator=( VULKAN_HPP_NAMESPACE::Win32KeyedMutexAcquireReleaseInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::Win32KeyedMutexAcquireReleaseInfoKHR ) - offsetof( Win32KeyedMutexAcquireReleaseInfoKHR, pNext ) );
+      return *this;
+    }
+
+    Win32KeyedMutexAcquireReleaseInfoKHR( VkWin32KeyedMutexAcquireReleaseInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    Win32KeyedMutexAcquireReleaseInfoKHR& operator=( VkWin32KeyedMutexAcquireReleaseInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::Win32KeyedMutexAcquireReleaseInfoKHR const *>(&rhs);
+      return *this;
+    }
+
+    Win32KeyedMutexAcquireReleaseInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    Win32KeyedMutexAcquireReleaseInfoKHR & setAcquireCount( uint32_t acquireCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      acquireCount = acquireCount_;
+      return *this;
+    }
+
+    Win32KeyedMutexAcquireReleaseInfoKHR & setPAcquireSyncs( const VULKAN_HPP_NAMESPACE::DeviceMemory* pAcquireSyncs_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pAcquireSyncs = pAcquireSyncs_;
+      return *this;
+    }
+
+    Win32KeyedMutexAcquireReleaseInfoKHR & setPAcquireKeys( const uint64_t* pAcquireKeys_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pAcquireKeys = pAcquireKeys_;
+      return *this;
+    }
+
+    Win32KeyedMutexAcquireReleaseInfoKHR & setPAcquireTimeouts( const uint32_t* pAcquireTimeouts_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pAcquireTimeouts = pAcquireTimeouts_;
+      return *this;
+    }
+
+    Win32KeyedMutexAcquireReleaseInfoKHR & setReleaseCount( uint32_t releaseCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      releaseCount = releaseCount_;
+      return *this;
+    }
+
+    Win32KeyedMutexAcquireReleaseInfoKHR & setPReleaseSyncs( const VULKAN_HPP_NAMESPACE::DeviceMemory* pReleaseSyncs_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pReleaseSyncs = pReleaseSyncs_;
+      return *this;
+    }
+
+    Win32KeyedMutexAcquireReleaseInfoKHR & setPReleaseKeys( const uint64_t* pReleaseKeys_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pReleaseKeys = pReleaseKeys_;
+      return *this;
+    }
+
+    operator VkWin32KeyedMutexAcquireReleaseInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkWin32KeyedMutexAcquireReleaseInfoKHR*>( this );
+    }
+
+    operator VkWin32KeyedMutexAcquireReleaseInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkWin32KeyedMutexAcquireReleaseInfoKHR*>( this );
+    }
+
+    bool operator==( Win32KeyedMutexAcquireReleaseInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( acquireCount == rhs.acquireCount )
+          && ( pAcquireSyncs == rhs.pAcquireSyncs )
+          && ( pAcquireKeys == rhs.pAcquireKeys )
+          && ( pAcquireTimeouts == rhs.pAcquireTimeouts )
+          && ( releaseCount == rhs.releaseCount )
+          && ( pReleaseSyncs == rhs.pReleaseSyncs )
+          && ( pReleaseKeys == rhs.pReleaseKeys );
+    }
+
+    bool operator!=( Win32KeyedMutexAcquireReleaseInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWin32KeyedMutexAcquireReleaseInfoKHR;
+    const void* pNext = nullptr;
+    uint32_t acquireCount;
+    const VULKAN_HPP_NAMESPACE::DeviceMemory* pAcquireSyncs;
+    const uint64_t* pAcquireKeys;
+    const uint32_t* pAcquireTimeouts;
+    uint32_t releaseCount;
+    const VULKAN_HPP_NAMESPACE::DeviceMemory* pReleaseSyncs;
+    const uint64_t* pReleaseKeys;
+  };
+  static_assert( sizeof( Win32KeyedMutexAcquireReleaseInfoKHR ) == sizeof( VkWin32KeyedMutexAcquireReleaseInfoKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<Win32KeyedMutexAcquireReleaseInfoKHR>::value, "struct wrapper is not a standard layout!" );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+  struct Win32KeyedMutexAcquireReleaseInfoNV
+  {
+    VULKAN_HPP_CONSTEXPR Win32KeyedMutexAcquireReleaseInfoNV( uint32_t acquireCount_ = 0,
+                                                              const VULKAN_HPP_NAMESPACE::DeviceMemory* pAcquireSyncs_ = nullptr,
+                                                              const uint64_t* pAcquireKeys_ = nullptr,
+                                                              const uint32_t* pAcquireTimeoutMilliseconds_ = nullptr,
+                                                              uint32_t releaseCount_ = 0,
+                                                              const VULKAN_HPP_NAMESPACE::DeviceMemory* pReleaseSyncs_ = nullptr,
+                                                              const uint64_t* pReleaseKeys_ = nullptr ) VULKAN_HPP_NOEXCEPT
+      : acquireCount( acquireCount_ )
+      , pAcquireSyncs( pAcquireSyncs_ )
+      , pAcquireKeys( pAcquireKeys_ )
+      , pAcquireTimeoutMilliseconds( pAcquireTimeoutMilliseconds_ )
+      , releaseCount( releaseCount_ )
+      , pReleaseSyncs( pReleaseSyncs_ )
+      , pReleaseKeys( pReleaseKeys_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::Win32KeyedMutexAcquireReleaseInfoNV & operator=( VULKAN_HPP_NAMESPACE::Win32KeyedMutexAcquireReleaseInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::Win32KeyedMutexAcquireReleaseInfoNV ) - offsetof( Win32KeyedMutexAcquireReleaseInfoNV, pNext ) );
+      return *this;
+    }
+
+    Win32KeyedMutexAcquireReleaseInfoNV( VkWin32KeyedMutexAcquireReleaseInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    Win32KeyedMutexAcquireReleaseInfoNV& operator=( VkWin32KeyedMutexAcquireReleaseInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::Win32KeyedMutexAcquireReleaseInfoNV const *>(&rhs);
+      return *this;
+    }
+
+    Win32KeyedMutexAcquireReleaseInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    Win32KeyedMutexAcquireReleaseInfoNV & setAcquireCount( uint32_t acquireCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      acquireCount = acquireCount_;
+      return *this;
+    }
+
+    Win32KeyedMutexAcquireReleaseInfoNV & setPAcquireSyncs( const VULKAN_HPP_NAMESPACE::DeviceMemory* pAcquireSyncs_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pAcquireSyncs = pAcquireSyncs_;
+      return *this;
+    }
+
+    Win32KeyedMutexAcquireReleaseInfoNV & setPAcquireKeys( const uint64_t* pAcquireKeys_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pAcquireKeys = pAcquireKeys_;
+      return *this;
+    }
+
+    Win32KeyedMutexAcquireReleaseInfoNV & setPAcquireTimeoutMilliseconds( const uint32_t* pAcquireTimeoutMilliseconds_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pAcquireTimeoutMilliseconds = pAcquireTimeoutMilliseconds_;
+      return *this;
+    }
+
+    Win32KeyedMutexAcquireReleaseInfoNV & setReleaseCount( uint32_t releaseCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      releaseCount = releaseCount_;
+      return *this;
+    }
+
+    Win32KeyedMutexAcquireReleaseInfoNV & setPReleaseSyncs( const VULKAN_HPP_NAMESPACE::DeviceMemory* pReleaseSyncs_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pReleaseSyncs = pReleaseSyncs_;
+      return *this;
+    }
+
+    Win32KeyedMutexAcquireReleaseInfoNV & setPReleaseKeys( const uint64_t* pReleaseKeys_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pReleaseKeys = pReleaseKeys_;
+      return *this;
+    }
+
+    operator VkWin32KeyedMutexAcquireReleaseInfoNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkWin32KeyedMutexAcquireReleaseInfoNV*>( this );
+    }
+
+    operator VkWin32KeyedMutexAcquireReleaseInfoNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkWin32KeyedMutexAcquireReleaseInfoNV*>( this );
+    }
+
+    bool operator==( Win32KeyedMutexAcquireReleaseInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( acquireCount == rhs.acquireCount )
+          && ( pAcquireSyncs == rhs.pAcquireSyncs )
+          && ( pAcquireKeys == rhs.pAcquireKeys )
+          && ( pAcquireTimeoutMilliseconds == rhs.pAcquireTimeoutMilliseconds )
+          && ( releaseCount == rhs.releaseCount )
+          && ( pReleaseSyncs == rhs.pReleaseSyncs )
+          && ( pReleaseKeys == rhs.pReleaseKeys );
+    }
+
+    bool operator!=( Win32KeyedMutexAcquireReleaseInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWin32KeyedMutexAcquireReleaseInfoNV;
+    const void* pNext = nullptr;
+    uint32_t acquireCount;
+    const VULKAN_HPP_NAMESPACE::DeviceMemory* pAcquireSyncs;
+    const uint64_t* pAcquireKeys;
+    const uint32_t* pAcquireTimeoutMilliseconds;
+    uint32_t releaseCount;
+    const VULKAN_HPP_NAMESPACE::DeviceMemory* pReleaseSyncs;
+    const uint64_t* pReleaseKeys;
+  };
+  static_assert( sizeof( Win32KeyedMutexAcquireReleaseInfoNV ) == sizeof( VkWin32KeyedMutexAcquireReleaseInfoNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<Win32KeyedMutexAcquireReleaseInfoNV>::value, "struct wrapper is not a standard layout!" );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+  struct Win32SurfaceCreateInfoKHR
+  {
+    VULKAN_HPP_CONSTEXPR Win32SurfaceCreateInfoKHR( VULKAN_HPP_NAMESPACE::Win32SurfaceCreateFlagsKHR flags_ = VULKAN_HPP_NAMESPACE::Win32SurfaceCreateFlagsKHR(),
+                                                    HINSTANCE hinstance_ = 0,
+                                                    HWND hwnd_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : flags( flags_ )
+      , hinstance( hinstance_ )
+      , hwnd( hwnd_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR & operator=( VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR ) - offsetof( Win32SurfaceCreateInfoKHR, pNext ) );
+      return *this;
+    }
+
+    Win32SurfaceCreateInfoKHR( VkWin32SurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    Win32SurfaceCreateInfoKHR& operator=( VkWin32SurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR const *>(&rhs);
+      return *this;
+    }
+
+    Win32SurfaceCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    Win32SurfaceCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::Win32SurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    Win32SurfaceCreateInfoKHR & setHinstance( HINSTANCE hinstance_ ) VULKAN_HPP_NOEXCEPT
+    {
+      hinstance = hinstance_;
+      return *this;
+    }
+
+    Win32SurfaceCreateInfoKHR & setHwnd( HWND hwnd_ ) VULKAN_HPP_NOEXCEPT
+    {
+      hwnd = hwnd_;
+      return *this;
+    }
+
+    operator VkWin32SurfaceCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkWin32SurfaceCreateInfoKHR*>( this );
+    }
+
+    operator VkWin32SurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkWin32SurfaceCreateInfoKHR*>( this );
+    }
+
+    bool operator==( Win32SurfaceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( hinstance == rhs.hinstance )
+          && ( hwnd == rhs.hwnd );
+    }
+
+    bool operator!=( Win32SurfaceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWin32SurfaceCreateInfoKHR;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::Win32SurfaceCreateFlagsKHR flags;
+    HINSTANCE hinstance;
+    HWND hwnd;
+  };
+  static_assert( sizeof( Win32SurfaceCreateInfoKHR ) == sizeof( VkWin32SurfaceCreateInfoKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<Win32SurfaceCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  struct WriteDescriptorSet
+  {
+    VULKAN_HPP_CONSTEXPR WriteDescriptorSet( VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_ = VULKAN_HPP_NAMESPACE::DescriptorSet(),
+                                             uint32_t dstBinding_ = 0,
+                                             uint32_t dstArrayElement_ = 0,
+                                             uint32_t descriptorCount_ = 0,
+                                             VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler,
+                                             const VULKAN_HPP_NAMESPACE::DescriptorImageInfo* pImageInfo_ = nullptr,
+                                             const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo* pBufferInfo_ = nullptr,
+                                             const VULKAN_HPP_NAMESPACE::BufferView* pTexelBufferView_ = nullptr ) VULKAN_HPP_NOEXCEPT
+      : dstSet( dstSet_ )
+      , dstBinding( dstBinding_ )
+      , dstArrayElement( dstArrayElement_ )
+      , descriptorCount( descriptorCount_ )
+      , descriptorType( descriptorType_ )
+      , pImageInfo( pImageInfo_ )
+      , pBufferInfo( pBufferInfo_ )
+      , pTexelBufferView( pTexelBufferView_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::WriteDescriptorSet & operator=( VULKAN_HPP_NAMESPACE::WriteDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::WriteDescriptorSet ) - offsetof( WriteDescriptorSet, pNext ) );
+      return *this;
+    }
+
+    WriteDescriptorSet( VkWriteDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    WriteDescriptorSet& operator=( VkWriteDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::WriteDescriptorSet const *>(&rhs);
+      return *this;
+    }
+
+    WriteDescriptorSet & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    WriteDescriptorSet & setDstSet( VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstSet = dstSet_;
+      return *this;
+    }
+
+    WriteDescriptorSet & setDstBinding( uint32_t dstBinding_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstBinding = dstBinding_;
+      return *this;
+    }
+
+    WriteDescriptorSet & setDstArrayElement( uint32_t dstArrayElement_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dstArrayElement = dstArrayElement_;
+      return *this;
+    }
+
+    WriteDescriptorSet & setDescriptorCount( uint32_t descriptorCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorCount = descriptorCount_;
+      return *this;
+    }
+
+    WriteDescriptorSet & setDescriptorType( VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ ) VULKAN_HPP_NOEXCEPT
+    {
+      descriptorType = descriptorType_;
+      return *this;
+    }
+
+    WriteDescriptorSet & setPImageInfo( const VULKAN_HPP_NAMESPACE::DescriptorImageInfo* pImageInfo_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pImageInfo = pImageInfo_;
+      return *this;
+    }
+
+    WriteDescriptorSet & setPBufferInfo( const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo* pBufferInfo_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pBufferInfo = pBufferInfo_;
+      return *this;
+    }
+
+    WriteDescriptorSet & setPTexelBufferView( const VULKAN_HPP_NAMESPACE::BufferView* pTexelBufferView_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pTexelBufferView = pTexelBufferView_;
+      return *this;
+    }
+
+    operator VkWriteDescriptorSet const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkWriteDescriptorSet*>( this );
+    }
+
+    operator VkWriteDescriptorSet &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkWriteDescriptorSet*>( this );
+    }
+
+    bool operator==( WriteDescriptorSet const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( dstSet == rhs.dstSet )
+          && ( dstBinding == rhs.dstBinding )
+          && ( dstArrayElement == rhs.dstArrayElement )
+          && ( descriptorCount == rhs.descriptorCount )
+          && ( descriptorType == rhs.descriptorType )
+          && ( pImageInfo == rhs.pImageInfo )
+          && ( pBufferInfo == rhs.pBufferInfo )
+          && ( pTexelBufferView == rhs.pTexelBufferView );
+    }
+
+    bool operator!=( WriteDescriptorSet const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWriteDescriptorSet;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::DescriptorSet dstSet;
+    uint32_t dstBinding;
+    uint32_t dstArrayElement;
+    uint32_t descriptorCount;
+    VULKAN_HPP_NAMESPACE::DescriptorType descriptorType;
+    const VULKAN_HPP_NAMESPACE::DescriptorImageInfo* pImageInfo;
+    const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo* pBufferInfo;
+    const VULKAN_HPP_NAMESPACE::BufferView* pTexelBufferView;
+  };
+  static_assert( sizeof( WriteDescriptorSet ) == sizeof( VkWriteDescriptorSet ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<WriteDescriptorSet>::value, "struct wrapper is not a standard layout!" );
+
+  struct WriteDescriptorSetAccelerationStructureNV
+  {
+    VULKAN_HPP_CONSTEXPR WriteDescriptorSetAccelerationStructureNV( uint32_t accelerationStructureCount_ = 0,
+                                                                    const VULKAN_HPP_NAMESPACE::AccelerationStructureNV* pAccelerationStructures_ = nullptr ) VULKAN_HPP_NOEXCEPT
+      : accelerationStructureCount( accelerationStructureCount_ )
+      , pAccelerationStructures( pAccelerationStructures_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::WriteDescriptorSetAccelerationStructureNV & operator=( VULKAN_HPP_NAMESPACE::WriteDescriptorSetAccelerationStructureNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::WriteDescriptorSetAccelerationStructureNV ) - offsetof( WriteDescriptorSetAccelerationStructureNV, pNext ) );
+      return *this;
+    }
+
+    WriteDescriptorSetAccelerationStructureNV( VkWriteDescriptorSetAccelerationStructureNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    WriteDescriptorSetAccelerationStructureNV& operator=( VkWriteDescriptorSetAccelerationStructureNV const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::WriteDescriptorSetAccelerationStructureNV const *>(&rhs);
+      return *this;
+    }
+
+    WriteDescriptorSetAccelerationStructureNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    WriteDescriptorSetAccelerationStructureNV & setAccelerationStructureCount( uint32_t accelerationStructureCount_ ) VULKAN_HPP_NOEXCEPT
+    {
+      accelerationStructureCount = accelerationStructureCount_;
+      return *this;
+    }
+
+    WriteDescriptorSetAccelerationStructureNV & setPAccelerationStructures( const VULKAN_HPP_NAMESPACE::AccelerationStructureNV* pAccelerationStructures_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pAccelerationStructures = pAccelerationStructures_;
+      return *this;
+    }
+
+    operator VkWriteDescriptorSetAccelerationStructureNV const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkWriteDescriptorSetAccelerationStructureNV*>( this );
+    }
+
+    operator VkWriteDescriptorSetAccelerationStructureNV &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkWriteDescriptorSetAccelerationStructureNV*>( this );
+    }
+
+    bool operator==( WriteDescriptorSetAccelerationStructureNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( accelerationStructureCount == rhs.accelerationStructureCount )
+          && ( pAccelerationStructures == rhs.pAccelerationStructures );
+    }
+
+    bool operator!=( WriteDescriptorSetAccelerationStructureNV const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWriteDescriptorSetAccelerationStructureNV;
+    const void* pNext = nullptr;
+    uint32_t accelerationStructureCount;
+    const VULKAN_HPP_NAMESPACE::AccelerationStructureNV* pAccelerationStructures;
+  };
+  static_assert( sizeof( WriteDescriptorSetAccelerationStructureNV ) == sizeof( VkWriteDescriptorSetAccelerationStructureNV ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<WriteDescriptorSetAccelerationStructureNV>::value, "struct wrapper is not a standard layout!" );
+
+  struct WriteDescriptorSetInlineUniformBlockEXT
+  {
+    VULKAN_HPP_CONSTEXPR WriteDescriptorSetInlineUniformBlockEXT( uint32_t dataSize_ = 0,
+                                                                  const void* pData_ = nullptr ) VULKAN_HPP_NOEXCEPT
+      : dataSize( dataSize_ )
+      , pData( pData_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::WriteDescriptorSetInlineUniformBlockEXT & operator=( VULKAN_HPP_NAMESPACE::WriteDescriptorSetInlineUniformBlockEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::WriteDescriptorSetInlineUniformBlockEXT ) - offsetof( WriteDescriptorSetInlineUniformBlockEXT, pNext ) );
+      return *this;
+    }
+
+    WriteDescriptorSetInlineUniformBlockEXT( VkWriteDescriptorSetInlineUniformBlockEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    WriteDescriptorSetInlineUniformBlockEXT& operator=( VkWriteDescriptorSetInlineUniformBlockEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::WriteDescriptorSetInlineUniformBlockEXT const *>(&rhs);
+      return *this;
+    }
+
+    WriteDescriptorSetInlineUniformBlockEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    WriteDescriptorSetInlineUniformBlockEXT & setDataSize( uint32_t dataSize_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dataSize = dataSize_;
+      return *this;
+    }
+
+    WriteDescriptorSetInlineUniformBlockEXT & setPData( const void* pData_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pData = pData_;
+      return *this;
+    }
+
+    operator VkWriteDescriptorSetInlineUniformBlockEXT const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkWriteDescriptorSetInlineUniformBlockEXT*>( this );
+    }
+
+    operator VkWriteDescriptorSetInlineUniformBlockEXT &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkWriteDescriptorSetInlineUniformBlockEXT*>( this );
+    }
+
+    bool operator==( WriteDescriptorSetInlineUniformBlockEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( dataSize == rhs.dataSize )
+          && ( pData == rhs.pData );
+    }
+
+    bool operator!=( WriteDescriptorSetInlineUniformBlockEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWriteDescriptorSetInlineUniformBlockEXT;
+    const void* pNext = nullptr;
+    uint32_t dataSize;
+    const void* pData;
+  };
+  static_assert( sizeof( WriteDescriptorSetInlineUniformBlockEXT ) == sizeof( VkWriteDescriptorSetInlineUniformBlockEXT ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<WriteDescriptorSetInlineUniformBlockEXT>::value, "struct wrapper is not a standard layout!" );
+
+#ifdef VK_USE_PLATFORM_XCB_KHR
+
+  struct XcbSurfaceCreateInfoKHR
+  {
+    VULKAN_HPP_CONSTEXPR XcbSurfaceCreateInfoKHR( VULKAN_HPP_NAMESPACE::XcbSurfaceCreateFlagsKHR flags_ = VULKAN_HPP_NAMESPACE::XcbSurfaceCreateFlagsKHR(),
+                                                  xcb_connection_t* connection_ = nullptr,
+                                                  xcb_window_t window_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : flags( flags_ )
+      , connection( connection_ )
+      , window( window_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR & operator=( VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR ) - offsetof( XcbSurfaceCreateInfoKHR, pNext ) );
+      return *this;
+    }
+
+    XcbSurfaceCreateInfoKHR( VkXcbSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    XcbSurfaceCreateInfoKHR& operator=( VkXcbSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR const *>(&rhs);
+      return *this;
+    }
+
+    XcbSurfaceCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    XcbSurfaceCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::XcbSurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    XcbSurfaceCreateInfoKHR & setConnection( xcb_connection_t* connection_ ) VULKAN_HPP_NOEXCEPT
+    {
+      connection = connection_;
+      return *this;
+    }
+
+    XcbSurfaceCreateInfoKHR & setWindow( xcb_window_t window_ ) VULKAN_HPP_NOEXCEPT
+    {
+      window = window_;
+      return *this;
+    }
+
+    operator VkXcbSurfaceCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkXcbSurfaceCreateInfoKHR*>( this );
+    }
+
+    operator VkXcbSurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkXcbSurfaceCreateInfoKHR*>( this );
+    }
+
+    bool operator==( XcbSurfaceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( connection == rhs.connection )
+          && ( window == rhs.window );
+    }
+
+    bool operator!=( XcbSurfaceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eXcbSurfaceCreateInfoKHR;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::XcbSurfaceCreateFlagsKHR flags;
+    xcb_connection_t* connection;
+    xcb_window_t window;
+  };
+  static_assert( sizeof( XcbSurfaceCreateInfoKHR ) == sizeof( VkXcbSurfaceCreateInfoKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<XcbSurfaceCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
+#endif /*VK_USE_PLATFORM_XCB_KHR*/
+
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+
+  struct XlibSurfaceCreateInfoKHR
+  {
+    VULKAN_HPP_CONSTEXPR XlibSurfaceCreateInfoKHR( VULKAN_HPP_NAMESPACE::XlibSurfaceCreateFlagsKHR flags_ = VULKAN_HPP_NAMESPACE::XlibSurfaceCreateFlagsKHR(),
+                                                   Display* dpy_ = nullptr,
+                                                   Window window_ = 0 ) VULKAN_HPP_NOEXCEPT
+      : flags( flags_ )
+      , dpy( dpy_ )
+      , window( window_ )
+    {}
+
+    VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR & operator=( VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR ) - offsetof( XlibSurfaceCreateInfoKHR, pNext ) );
+      return *this;
+    }
+
+    XlibSurfaceCreateInfoKHR( VkXlibSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = rhs;
+    }
+
+    XlibSurfaceCreateInfoKHR& operator=( VkXlibSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR const *>(&rhs);
+      return *this;
+    }
+
+    XlibSurfaceCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+    {
+      pNext = pNext_;
+      return *this;
+    }
+
+    XlibSurfaceCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::XlibSurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
+    {
+      flags = flags_;
+      return *this;
+    }
+
+    XlibSurfaceCreateInfoKHR & setDpy( Display* dpy_ ) VULKAN_HPP_NOEXCEPT
+    {
+      dpy = dpy_;
+      return *this;
+    }
+
+    XlibSurfaceCreateInfoKHR & setWindow( Window window_ ) VULKAN_HPP_NOEXCEPT
+    {
+      window = window_;
+      return *this;
+    }
+
+    operator VkXlibSurfaceCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<const VkXlibSurfaceCreateInfoKHR*>( this );
+    }
+
+    operator VkXlibSurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
+    {
+      return *reinterpret_cast<VkXlibSurfaceCreateInfoKHR*>( this );
+    }
+
+    bool operator==( XlibSurfaceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ( sType == rhs.sType )
+          && ( pNext == rhs.pNext )
+          && ( flags == rhs.flags )
+          && ( dpy == rhs.dpy )
+          && ( window == rhs.window );
+    }
+
+    bool operator!=( XlibSurfaceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return !operator==( rhs );
+    }
+
+  public:
+    const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eXlibSurfaceCreateInfoKHR;
+    const void* pNext = nullptr;
+    VULKAN_HPP_NAMESPACE::XlibSurfaceCreateFlagsKHR flags;
+    Display* dpy;
+    Window window;
+  };
+  static_assert( sizeof( XlibSurfaceCreateInfoKHR ) == sizeof( VkXlibSurfaceCreateInfoKHR ), "struct and wrapper have different size!" );
+  static_assert( std::is_standard_layout<XlibSurfaceCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
+#endif /*VK_USE_PLATFORM_XLIB_KHR*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result createInstance( const VULKAN_HPP_NAMESPACE::InstanceCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Instance* pInstance, Dispatch const &d)
+  {
+    return static_cast<Result>( d.vkCreateInstance( reinterpret_cast<const VkInstanceCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkInstance*>( pInstance ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<VULKAN_HPP_NAMESPACE::Instance>::type createInstance( const InstanceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d )
+  {
+    VULKAN_HPP_NAMESPACE::Instance instance;
+    Result result = static_cast<Result>( d.vkCreateInstance( reinterpret_cast<const VkInstanceCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkInstance*>( &instance ) ) );
+    return createResultValue( result, instance, VULKAN_HPP_NAMESPACE_STRING"::createInstance" );
+  }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<Instance,Dispatch>>::type createInstanceUnique( const InstanceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d )
+  {
+    VULKAN_HPP_NAMESPACE::Instance instance;
+    Result result = static_cast<Result>( d.vkCreateInstance( reinterpret_cast<const VkInstanceCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkInstance*>( &instance ) ) );
+
+    ObjectDestroy<NoParent,Dispatch> deleter( allocator, d );
+    return createResultValue<Instance,Dispatch>( result, instance, VULKAN_HPP_NAMESPACE_STRING"::createInstanceUnique", deleter );
+  }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result enumerateInstanceExtensionProperties( const char* pLayerName, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::ExtensionProperties* pProperties, Dispatch const &d)
+  {
+    return static_cast<Result>( d.vkEnumerateInstanceExtensionProperties( pLayerName, pPropertyCount, reinterpret_cast<VkExtensionProperties*>( pProperties ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Allocator, typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<std::vector<ExtensionProperties,Allocator>>::type enumerateInstanceExtensionProperties( Optional<const std::string> layerName, Dispatch const &d )
+  {
+    std::vector<ExtensionProperties,Allocator> properties;
+    uint32_t propertyCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && propertyCount )
+      {
+        properties.resize( propertyCount );
+        result = static_cast<Result>( d.vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast<VkExtensionProperties*>( properties.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+      properties.resize( propertyCount );
+    }
+    return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::enumerateInstanceExtensionProperties" );
+  }
+  template<typename Allocator, typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<std::vector<ExtensionProperties,Allocator>>::type enumerateInstanceExtensionProperties( Optional<const std::string> layerName, Allocator const& vectorAllocator, Dispatch const &d )
+  {
+    std::vector<ExtensionProperties,Allocator> properties( vectorAllocator );
+    uint32_t propertyCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && propertyCount )
+      {
+        properties.resize( propertyCount );
+        result = static_cast<Result>( d.vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast<VkExtensionProperties*>( properties.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+      properties.resize( propertyCount );
+    }
+    return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::enumerateInstanceExtensionProperties" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result enumerateInstanceLayerProperties( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::LayerProperties* pProperties, Dispatch const &d)
+  {
+    return static_cast<Result>( d.vkEnumerateInstanceLayerProperties( pPropertyCount, reinterpret_cast<VkLayerProperties*>( pProperties ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Allocator, typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<std::vector<LayerProperties,Allocator>>::type enumerateInstanceLayerProperties(Dispatch const &d )
+  {
+    std::vector<LayerProperties,Allocator> properties;
+    uint32_t propertyCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkEnumerateInstanceLayerProperties( &propertyCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && propertyCount )
+      {
+        properties.resize( propertyCount );
+        result = static_cast<Result>( d.vkEnumerateInstanceLayerProperties( &propertyCount, reinterpret_cast<VkLayerProperties*>( properties.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+      properties.resize( propertyCount );
+    }
+    return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::enumerateInstanceLayerProperties" );
+  }
+  template<typename Allocator, typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<std::vector<LayerProperties,Allocator>>::type enumerateInstanceLayerProperties(Allocator const& vectorAllocator, Dispatch const &d )
+  {
+    std::vector<LayerProperties,Allocator> properties( vectorAllocator );
+    uint32_t propertyCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkEnumerateInstanceLayerProperties( &propertyCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && propertyCount )
+      {
+        properties.resize( propertyCount );
+        result = static_cast<Result>( d.vkEnumerateInstanceLayerProperties( &propertyCount, reinterpret_cast<VkLayerProperties*>( properties.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+      properties.resize( propertyCount );
+    }
+    return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::enumerateInstanceLayerProperties" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result enumerateInstanceVersion( uint32_t* pApiVersion, Dispatch const &d)
+  {
+    return static_cast<Result>( d.vkEnumerateInstanceVersion( pApiVersion ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<uint32_t>::type enumerateInstanceVersion(Dispatch const &d )
+  {
+    uint32_t apiVersion;
+    Result result = static_cast<Result>( d.vkEnumerateInstanceVersion( &apiVersion ) );
+    return createResultValue( result, apiVersion, VULKAN_HPP_NAMESPACE_STRING"::enumerateInstanceVersion" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result CommandBuffer::begin( const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo* pBeginInfo, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkBeginCommandBuffer( m_commandBuffer, reinterpret_cast<const VkCommandBufferBeginInfo*>( pBeginInfo ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<void>::type CommandBuffer::begin( const CommandBufferBeginInfo & beginInfo, Dispatch const &d ) const
+  {
+    Result result = static_cast<Result>( d.vkBeginCommandBuffer( m_commandBuffer, reinterpret_cast<const VkCommandBufferBeginInfo*>( &beginInfo ) ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::CommandBuffer::begin" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::beginConditionalRenderingEXT( const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT* pConditionalRenderingBegin, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdBeginConditionalRenderingEXT( m_commandBuffer, reinterpret_cast<const VkConditionalRenderingBeginInfoEXT*>( pConditionalRenderingBegin ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::beginConditionalRenderingEXT( const ConditionalRenderingBeginInfoEXT & conditionalRenderingBegin, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdBeginConditionalRenderingEXT( m_commandBuffer, reinterpret_cast<const VkConditionalRenderingBeginInfoEXT*>( &conditionalRenderingBegin ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pLabelInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdBeginDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast<const VkDebugUtilsLabelEXT*>( pLabelInfo ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::beginDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdBeginDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast<const VkDebugUtilsLabelEXT*>( &labelInfo ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::beginQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, VULKAN_HPP_NAMESPACE::QueryControlFlags flags, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdBeginQuery( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query, static_cast<VkQueryControlFlags>( flags ) );
+  }
+#else
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::beginQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, VULKAN_HPP_NAMESPACE::QueryControlFlags flags, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdBeginQuery( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query, static_cast<VkQueryControlFlags>( flags ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::beginQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, VULKAN_HPP_NAMESPACE::QueryControlFlags flags, uint32_t index, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdBeginQueryIndexedEXT( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query, static_cast<VkQueryControlFlags>( flags ), index );
+  }
+#else
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::beginQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, VULKAN_HPP_NAMESPACE::QueryControlFlags flags, uint32_t index, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdBeginQueryIndexedEXT( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query, static_cast<VkQueryControlFlags>( flags ), index );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo* pRenderPassBegin, VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdBeginRenderPass( m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo*>( pRenderPassBegin ), static_cast<VkSubpassContents>( contents ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass( const RenderPassBeginInfo & renderPassBegin, VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdBeginRenderPass( m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo*>( &renderPassBegin ), static_cast<VkSubpassContents>( contents ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo* pRenderPassBegin, const VULKAN_HPP_NAMESPACE::SubpassBeginInfoKHR* pSubpassBeginInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdBeginRenderPass2KHR( m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo*>( pRenderPassBegin ), reinterpret_cast<const VkSubpassBeginInfoKHR*>( pSubpassBeginInfo ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2KHR( const RenderPassBeginInfo & renderPassBegin, const SubpassBeginInfoKHR & subpassBeginInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdBeginRenderPass2KHR( m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo*>( &renderPassBegin ), reinterpret_cast<const VkSubpassBeginInfoKHR*>( &subpassBeginInfo ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::beginTransformFeedbackEXT( uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VULKAN_HPP_NAMESPACE::Buffer* pCounterBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pCounterBufferOffsets, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdBeginTransformFeedbackEXT( m_commandBuffer, firstCounterBuffer, counterBufferCount, reinterpret_cast<const VkBuffer*>( pCounterBuffers ), reinterpret_cast<const VkDeviceSize*>( pCounterBufferOffsets ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::beginTransformFeedbackEXT( uint32_t firstCounterBuffer, ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> counterBuffers, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> counterBufferOffsets, Dispatch const &d ) const
+  {
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+    VULKAN_HPP_ASSERT( counterBuffers.size() == counterBufferOffsets.size() );
+#else
+    if ( counterBuffers.size() != counterBufferOffsets.size() )
+    {
+      throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::VkCommandBuffer::beginTransformFeedbackEXT: counterBuffers.size() != counterBufferOffsets.size()" );
+    }
+#endif  /*VULKAN_HPP_NO_EXCEPTIONS*/
+    d.vkCmdBeginTransformFeedbackEXT( m_commandBuffer, firstCounterBuffer, counterBuffers.size() , reinterpret_cast<const VkBuffer*>( counterBuffers.data() ), reinterpret_cast<const VkDeviceSize*>( counterBufferOffsets.data() ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t firstSet, uint32_t descriptorSetCount, const VULKAN_HPP_NAMESPACE::DescriptorSet* pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t* pDynamicOffsets, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdBindDescriptorSets( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipelineLayout>( layout ), firstSet, descriptorSetCount, reinterpret_cast<const VkDescriptorSet*>( pDescriptorSets ), dynamicOffsetCount, pDynamicOffsets );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t firstSet, ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> descriptorSets, ArrayProxy<const uint32_t> dynamicOffsets, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdBindDescriptorSets( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipelineLayout>( layout ), firstSet, descriptorSets.size() , reinterpret_cast<const VkDescriptorSet*>( descriptorSets.data() ), dynamicOffsets.size() , dynamicOffsets.data() );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::bindIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::IndexType indexType, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdBindIndexBuffer( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), static_cast<VkIndexType>( indexType ) );
+  }
+#else
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::bindIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::IndexType indexType, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdBindIndexBuffer( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), static_cast<VkIndexType>( indexType ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::bindPipeline( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::Pipeline pipeline, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdBindPipeline( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipeline>( pipeline ) );
+  }
+#else
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::bindPipeline( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::Pipeline pipeline, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdBindPipeline( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipeline>( pipeline ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::bindShadingRateImageNV( VULKAN_HPP_NAMESPACE::ImageView imageView, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdBindShadingRateImageNV( m_commandBuffer, static_cast<VkImageView>( imageView ), static_cast<VkImageLayout>( imageLayout ) );
+  }
+#else
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::bindShadingRateImageNV( VULKAN_HPP_NAMESPACE::ImageView imageView, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdBindShadingRateImageNV( m_commandBuffer, static_cast<VkImageView>( imageView ), static_cast<VkImageLayout>( imageLayout ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::bindTransformFeedbackBuffersEXT( uint32_t firstBinding, uint32_t bindingCount, const VULKAN_HPP_NAMESPACE::Buffer* pBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pOffsets, const VULKAN_HPP_NAMESPACE::DeviceSize* pSizes, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdBindTransformFeedbackBuffersEXT( m_commandBuffer, firstBinding, bindingCount, reinterpret_cast<const VkBuffer*>( pBuffers ), reinterpret_cast<const VkDeviceSize*>( pOffsets ), reinterpret_cast<const VkDeviceSize*>( pSizes ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::bindTransformFeedbackBuffersEXT( uint32_t firstBinding, ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> buffers, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> offsets, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> sizes, Dispatch const &d ) const
+  {
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+    VULKAN_HPP_ASSERT( buffers.size() == offsets.size() );
+#else
+    if ( buffers.size() != offsets.size() )
+    {
+      throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::VkCommandBuffer::bindTransformFeedbackBuffersEXT: buffers.size() != offsets.size()" );
+    }
+#endif  /*VULKAN_HPP_NO_EXCEPTIONS*/
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+    VULKAN_HPP_ASSERT( buffers.size() == sizes.size() );
+#else
+    if ( buffers.size() != sizes.size() )
+    {
+      throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::VkCommandBuffer::bindTransformFeedbackBuffersEXT: buffers.size() != sizes.size()" );
+    }
+#endif  /*VULKAN_HPP_NO_EXCEPTIONS*/
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+    VULKAN_HPP_ASSERT( offsets.size() == sizes.size() );
+#else
+    if ( offsets.size() != sizes.size() )
+    {
+      throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::VkCommandBuffer::bindTransformFeedbackBuffersEXT: offsets.size() != sizes.size()" );
+    }
+#endif  /*VULKAN_HPP_NO_EXCEPTIONS*/
+    d.vkCmdBindTransformFeedbackBuffersEXT( m_commandBuffer, firstBinding, buffers.size() , reinterpret_cast<const VkBuffer*>( buffers.data() ), reinterpret_cast<const VkDeviceSize*>( offsets.data() ), reinterpret_cast<const VkDeviceSize*>( sizes.data() ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers( uint32_t firstBinding, uint32_t bindingCount, const VULKAN_HPP_NAMESPACE::Buffer* pBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pOffsets, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdBindVertexBuffers( m_commandBuffer, firstBinding, bindingCount, reinterpret_cast<const VkBuffer*>( pBuffers ), reinterpret_cast<const VkDeviceSize*>( pOffsets ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers( uint32_t firstBinding, ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> buffers, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> offsets, Dispatch const &d ) const
+  {
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+    VULKAN_HPP_ASSERT( buffers.size() == offsets.size() );
+#else
+    if ( buffers.size() != offsets.size() )
+    {
+      throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::VkCommandBuffer::bindVertexBuffers: buffers.size() != offsets.size()" );
+    }
+#endif  /*VULKAN_HPP_NO_EXCEPTIONS*/
+    d.vkCmdBindVertexBuffers( m_commandBuffer, firstBinding, buffers.size() , reinterpret_cast<const VkBuffer*>( buffers.data() ), reinterpret_cast<const VkDeviceSize*>( offsets.data() ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::ImageBlit* pRegions, VULKAN_HPP_NAMESPACE::Filter filter, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdBlitImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regionCount, reinterpret_cast<const VkImageBlit*>( pRegions ), static_cast<VkFilter>( filter ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageBlit> regions, VULKAN_HPP_NAMESPACE::Filter filter, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdBlitImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regions.size() , reinterpret_cast<const VkImageBlit*>( regions.data() ), static_cast<VkFilter>( filter ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV* pInfo, VULKAN_HPP_NAMESPACE::Buffer instanceData, VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, VULKAN_HPP_NAMESPACE::Bool32 update, VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, VULKAN_HPP_NAMESPACE::Buffer scratch, VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdBuildAccelerationStructureNV( m_commandBuffer, reinterpret_cast<const VkAccelerationStructureInfoNV*>( pInfo ), static_cast<VkBuffer>( instanceData ), static_cast<VkDeviceSize>( instanceOffset ), static_cast<VkBool32>( update ), static_cast<VkAccelerationStructureNV>( dst ), static_cast<VkAccelerationStructureNV>( src ), static_cast<VkBuffer>( scratch ), static_cast<VkDeviceSize>( scratchOffset ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureNV( const AccelerationStructureInfoNV & info, VULKAN_HPP_NAMESPACE::Buffer instanceData, VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, VULKAN_HPP_NAMESPACE::Bool32 update, VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, VULKAN_HPP_NAMESPACE::Buffer scratch, VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdBuildAccelerationStructureNV( m_commandBuffer, reinterpret_cast<const VkAccelerationStructureInfoNV*>( &info ), static_cast<VkBuffer>( instanceData ), static_cast<VkDeviceSize>( instanceOffset ), static_cast<VkBool32>( update ), static_cast<VkAccelerationStructureNV>( dst ), static_cast<VkAccelerationStructureNV>( src ), static_cast<VkBuffer>( scratch ), static_cast<VkDeviceSize>( scratchOffset ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::clearAttachments( uint32_t attachmentCount, const VULKAN_HPP_NAMESPACE::ClearAttachment* pAttachments, uint32_t rectCount, const VULKAN_HPP_NAMESPACE::ClearRect* pRects, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdClearAttachments( m_commandBuffer, attachmentCount, reinterpret_cast<const VkClearAttachment*>( pAttachments ), rectCount, reinterpret_cast<const VkClearRect*>( pRects ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::clearAttachments( ArrayProxy<const VULKAN_HPP_NAMESPACE::ClearAttachment> attachments, ArrayProxy<const VULKAN_HPP_NAMESPACE::ClearRect> rects, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdClearAttachments( m_commandBuffer, attachments.size() , reinterpret_cast<const VkClearAttachment*>( attachments.data() ), rects.size() , reinterpret_cast<const VkClearRect*>( rects.data() ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::clearColorImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const VULKAN_HPP_NAMESPACE::ClearColorValue* pColor, uint32_t rangeCount, const VULKAN_HPP_NAMESPACE::ImageSubresourceRange* pRanges, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdClearColorImage( m_commandBuffer, static_cast<VkImage>( image ), static_cast<VkImageLayout>( imageLayout ), reinterpret_cast<const VkClearColorValue*>( pColor ), rangeCount, reinterpret_cast<const VkImageSubresourceRange*>( pRanges ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::clearColorImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const ClearColorValue & color, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceRange> ranges, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdClearColorImage( m_commandBuffer, static_cast<VkImage>( image ), static_cast<VkImageLayout>( imageLayout ), reinterpret_cast<const VkClearColorValue*>( &color ), ranges.size() , reinterpret_cast<const VkImageSubresourceRange*>( ranges.data() ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue* pDepthStencil, uint32_t rangeCount, const VULKAN_HPP_NAMESPACE::ImageSubresourceRange* pRanges, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdClearDepthStencilImage( m_commandBuffer, static_cast<VkImage>( image ), static_cast<VkImageLayout>( imageLayout ), reinterpret_cast<const VkClearDepthStencilValue*>( pDepthStencil ), rangeCount, reinterpret_cast<const VkImageSubresourceRange*>( pRanges ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const ClearDepthStencilValue & depthStencil, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceRange> ranges, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdClearDepthStencilImage( m_commandBuffer, static_cast<VkImage>( image ), static_cast<VkImageLayout>( imageLayout ), reinterpret_cast<const VkClearDepthStencilValue*>( &depthStencil ), ranges.size() , reinterpret_cast<const VkImageSubresourceRange*>( ranges.data() ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeNV mode, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdCopyAccelerationStructureNV( m_commandBuffer, static_cast<VkAccelerationStructureNV>( dst ), static_cast<VkAccelerationStructureNV>( src ), static_cast<VkCopyAccelerationStructureModeNV>( mode ) );
+  }
+#else
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeNV mode, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdCopyAccelerationStructureNV( m_commandBuffer, static_cast<VkAccelerationStructureNV>( dst ), static_cast<VkAccelerationStructureNV>( src ), static_cast<VkCopyAccelerationStructureModeNV>( mode ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::BufferCopy* pRegions, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdCopyBuffer( m_commandBuffer, static_cast<VkBuffer>( srcBuffer ), static_cast<VkBuffer>( dstBuffer ), regionCount, reinterpret_cast<const VkBufferCopy*>( pRegions ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferCopy> regions, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdCopyBuffer( m_commandBuffer, static_cast<VkBuffer>( srcBuffer ), static_cast<VkBuffer>( dstBuffer ), regions.size() , reinterpret_cast<const VkBufferCopy*>( regions.data() ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::BufferImageCopy* pRegions, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdCopyBufferToImage( m_commandBuffer, static_cast<VkBuffer>( srcBuffer ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regionCount, reinterpret_cast<const VkBufferImageCopy*>( pRegions ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferImageCopy> regions, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdCopyBufferToImage( m_commandBuffer, static_cast<VkBuffer>( srcBuffer ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regions.size() , reinterpret_cast<const VkBufferImageCopy*>( regions.data() ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::ImageCopy* pRegions, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdCopyImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regionCount, reinterpret_cast<const VkImageCopy*>( pRegions ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageCopy> regions, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdCopyImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regions.size() , reinterpret_cast<const VkImageCopy*>( regions.data() ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::BufferImageCopy* pRegions, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdCopyImageToBuffer( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkBuffer>( dstBuffer ), regionCount, reinterpret_cast<const VkBufferImageCopy*>( pRegions ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferImageCopy> regions, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdCopyImageToBuffer( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkBuffer>( dstBuffer ), regions.size() , reinterpret_cast<const VkBufferImageCopy*>( regions.data() ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::copyQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdCopyQueryPoolResults( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount, static_cast<VkBuffer>( dstBuffer ), static_cast<VkDeviceSize>( dstOffset ), static_cast<VkDeviceSize>( stride ), static_cast<VkQueryResultFlags>( flags ) );
+  }
+#else
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::copyQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdCopyQueryPoolResults( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount, static_cast<VkBuffer>( dstBuffer ), static_cast<VkDeviceSize>( dstOffset ), static_cast<VkDeviceSize>( stride ), static_cast<VkQueryResultFlags>( flags ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::debugMarkerBeginEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT* pMarkerInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdDebugMarkerBeginEXT( m_commandBuffer, reinterpret_cast<const VkDebugMarkerMarkerInfoEXT*>( pMarkerInfo ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::debugMarkerBeginEXT( const DebugMarkerMarkerInfoEXT & markerInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdDebugMarkerBeginEXT( m_commandBuffer, reinterpret_cast<const VkDebugMarkerMarkerInfoEXT*>( &markerInfo ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::debugMarkerEndEXT(Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdDebugMarkerEndEXT( m_commandBuffer );
+  }
+#else
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::debugMarkerEndEXT(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdDebugMarkerEndEXT( m_commandBuffer );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::debugMarkerInsertEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT* pMarkerInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdDebugMarkerInsertEXT( m_commandBuffer, reinterpret_cast<const VkDebugMarkerMarkerInfoEXT*>( pMarkerInfo ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::debugMarkerInsertEXT( const DebugMarkerMarkerInfoEXT & markerInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdDebugMarkerInsertEXT( m_commandBuffer, reinterpret_cast<const VkDebugMarkerMarkerInfoEXT*>( &markerInfo ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::dispatch( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdDispatch( m_commandBuffer, groupCountX, groupCountY, groupCountZ );
+  }
+#else
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::dispatch( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdDispatch( m_commandBuffer, groupCountX, groupCountY, groupCountZ );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::dispatchBase( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdDispatchBase( m_commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ );
+  }
+#else
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::dispatchBase( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdDispatchBase( m_commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::dispatchBaseKHR( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdDispatchBaseKHR( m_commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ );
+  }
+#else
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::dispatchBaseKHR( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdDispatchBaseKHR( m_commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::dispatchIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdDispatchIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ) );
+  }
+#else
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::dispatchIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdDispatchIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::draw( uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdDraw( m_commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance );
+  }
+#else
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::draw( uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdDraw( m_commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::drawIndexed( uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdDrawIndexed( m_commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance );
+  }
+#else
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::drawIndexed( uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdDrawIndexed( m_commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdDrawIndexedIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride );
+  }
+#else
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdDrawIndexedIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdDrawIndexedIndirectCountAMD( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), static_cast<VkBuffer>( countBuffer ), static_cast<VkDeviceSize>( countBufferOffset ), maxDrawCount, stride );
+  }
+#else
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdDrawIndexedIndirectCountAMD( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), static_cast<VkBuffer>( countBuffer ), static_cast<VkDeviceSize>( countBufferOffset ), maxDrawCount, stride );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdDrawIndexedIndirectCountKHR( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), static_cast<VkBuffer>( countBuffer ), static_cast<VkDeviceSize>( countBufferOffset ), maxDrawCount, stride );
+  }
+#else
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdDrawIndexedIndirectCountKHR( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), static_cast<VkBuffer>( countBuffer ), static_cast<VkDeviceSize>( countBufferOffset ), maxDrawCount, stride );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::drawIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdDrawIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride );
+  }
+#else
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::drawIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdDrawIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::drawIndirectByteCountEXT( uint32_t instanceCount, uint32_t firstInstance, VULKAN_HPP_NAMESPACE::Buffer counterBuffer, VULKAN_HPP_NAMESPACE::DeviceSize counterBufferOffset, uint32_t counterOffset, uint32_t vertexStride, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdDrawIndirectByteCountEXT( m_commandBuffer, instanceCount, firstInstance, static_cast<VkBuffer>( counterBuffer ), static_cast<VkDeviceSize>( counterBufferOffset ), counterOffset, vertexStride );
+  }
+#else
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::drawIndirectByteCountEXT( uint32_t instanceCount, uint32_t firstInstance, VULKAN_HPP_NAMESPACE::Buffer counterBuffer, VULKAN_HPP_NAMESPACE::DeviceSize counterBufferOffset, uint32_t counterOffset, uint32_t vertexStride, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdDrawIndirectByteCountEXT( m_commandBuffer, instanceCount, firstInstance, static_cast<VkBuffer>( counterBuffer ), static_cast<VkDeviceSize>( counterBufferOffset ), counterOffset, vertexStride );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdDrawIndirectCountAMD( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), static_cast<VkBuffer>( countBuffer ), static_cast<VkDeviceSize>( countBufferOffset ), maxDrawCount, stride );
+  }
+#else
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdDrawIndirectCountAMD( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), static_cast<VkBuffer>( countBuffer ), static_cast<VkDeviceSize>( countBufferOffset ), maxDrawCount, stride );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdDrawIndirectCountKHR( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), static_cast<VkBuffer>( countBuffer ), static_cast<VkDeviceSize>( countBufferOffset ), maxDrawCount, stride );
+  }
+#else
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdDrawIndirectCountKHR( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), static_cast<VkBuffer>( countBuffer ), static_cast<VkDeviceSize>( countBufferOffset ), maxDrawCount, stride );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectCountNV( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdDrawMeshTasksIndirectCountNV( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), static_cast<VkBuffer>( countBuffer ), static_cast<VkDeviceSize>( countBufferOffset ), maxDrawCount, stride );
+  }
+#else
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectCountNV( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdDrawMeshTasksIndirectCountNV( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), static_cast<VkBuffer>( countBuffer ), static_cast<VkDeviceSize>( countBufferOffset ), maxDrawCount, stride );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectNV( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdDrawMeshTasksIndirectNV( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride );
+  }
+#else
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectNV( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdDrawMeshTasksIndirectNV( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksNV( uint32_t taskCount, uint32_t firstTask, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdDrawMeshTasksNV( m_commandBuffer, taskCount, firstTask );
+  }
+#else
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksNV( uint32_t taskCount, uint32_t firstTask, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdDrawMeshTasksNV( m_commandBuffer, taskCount, firstTask );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::endConditionalRenderingEXT(Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdEndConditionalRenderingEXT( m_commandBuffer );
+  }
+#else
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::endConditionalRenderingEXT(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdEndConditionalRenderingEXT( m_commandBuffer );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::endDebugUtilsLabelEXT(Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdEndDebugUtilsLabelEXT( m_commandBuffer );
+  }
+#else
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::endDebugUtilsLabelEXT(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdEndDebugUtilsLabelEXT( m_commandBuffer );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::endQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdEndQuery( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query );
+  }
+#else
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::endQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdEndQuery( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::endQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, uint32_t index, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdEndQueryIndexedEXT( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query, index );
+  }
+#else
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::endQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, uint32_t index, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdEndQueryIndexedEXT( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query, index );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::endRenderPass(Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdEndRenderPass( m_commandBuffer );
+  }
+#else
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::endRenderPass(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdEndRenderPass( m_commandBuffer );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2KHR( const VULKAN_HPP_NAMESPACE::SubpassEndInfoKHR* pSubpassEndInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdEndRenderPass2KHR( m_commandBuffer, reinterpret_cast<const VkSubpassEndInfoKHR*>( pSubpassEndInfo ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2KHR( const SubpassEndInfoKHR & subpassEndInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdEndRenderPass2KHR( m_commandBuffer, reinterpret_cast<const VkSubpassEndInfoKHR*>( &subpassEndInfo ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::endTransformFeedbackEXT( uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VULKAN_HPP_NAMESPACE::Buffer* pCounterBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pCounterBufferOffsets, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdEndTransformFeedbackEXT( m_commandBuffer, firstCounterBuffer, counterBufferCount, reinterpret_cast<const VkBuffer*>( pCounterBuffers ), reinterpret_cast<const VkDeviceSize*>( pCounterBufferOffsets ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::endTransformFeedbackEXT( uint32_t firstCounterBuffer, ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> counterBuffers, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> counterBufferOffsets, Dispatch const &d ) const
+  {
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+    VULKAN_HPP_ASSERT( counterBuffers.size() == counterBufferOffsets.size() );
+#else
+    if ( counterBuffers.size() != counterBufferOffsets.size() )
+    {
+      throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::VkCommandBuffer::endTransformFeedbackEXT: counterBuffers.size() != counterBufferOffsets.size()" );
+    }
+#endif  /*VULKAN_HPP_NO_EXCEPTIONS*/
+    d.vkCmdEndTransformFeedbackEXT( m_commandBuffer, firstCounterBuffer, counterBuffers.size() , reinterpret_cast<const VkBuffer*>( counterBuffers.data() ), reinterpret_cast<const VkDeviceSize*>( counterBufferOffsets.data() ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::executeCommands( uint32_t commandBufferCount, const VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdExecuteCommands( m_commandBuffer, commandBufferCount, reinterpret_cast<const VkCommandBuffer*>( pCommandBuffers ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::executeCommands( ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> commandBuffers, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdExecuteCommands( m_commandBuffer, commandBuffers.size() , reinterpret_cast<const VkCommandBuffer*>( commandBuffers.data() ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::fillBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize size, uint32_t data, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdFillBuffer( m_commandBuffer, static_cast<VkBuffer>( dstBuffer ), static_cast<VkDeviceSize>( dstOffset ), static_cast<VkDeviceSize>( size ), data );
+  }
+#else
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::fillBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize size, uint32_t data, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdFillBuffer( m_commandBuffer, static_cast<VkBuffer>( dstBuffer ), static_cast<VkDeviceSize>( dstOffset ), static_cast<VkDeviceSize>( size ), data );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pLabelInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdInsertDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast<const VkDebugUtilsLabelEXT*>( pLabelInfo ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::insertDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdInsertDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast<const VkDebugUtilsLabelEXT*>( &labelInfo ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::nextSubpass( VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdNextSubpass( m_commandBuffer, static_cast<VkSubpassContents>( contents ) );
+  }
+#else
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::nextSubpass( VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdNextSubpass( m_commandBuffer, static_cast<VkSubpassContents>( contents ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2KHR( const VULKAN_HPP_NAMESPACE::SubpassBeginInfoKHR* pSubpassBeginInfo, const VULKAN_HPP_NAMESPACE::SubpassEndInfoKHR* pSubpassEndInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdNextSubpass2KHR( m_commandBuffer, reinterpret_cast<const VkSubpassBeginInfoKHR*>( pSubpassBeginInfo ), reinterpret_cast<const VkSubpassEndInfoKHR*>( pSubpassEndInfo ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2KHR( const SubpassBeginInfoKHR & subpassBeginInfo, const SubpassEndInfoKHR & subpassEndInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdNextSubpass2KHR( m_commandBuffer, reinterpret_cast<const VkSubpassBeginInfoKHR*>( &subpassBeginInfo ), reinterpret_cast<const VkSubpassEndInfoKHR*>( &subpassEndInfo ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const VULKAN_HPP_NAMESPACE::MemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier* pImageMemoryBarriers, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdPipelineBarrier( m_commandBuffer, static_cast<VkPipelineStageFlags>( srcStageMask ), static_cast<VkPipelineStageFlags>( dstStageMask ), static_cast<VkDependencyFlags>( dependencyFlags ), memoryBarrierCount, reinterpret_cast<const VkMemoryBarrier*>( pMemoryBarriers ), bufferMemoryBarrierCount, reinterpret_cast<const VkBufferMemoryBarrier*>( pBufferMemoryBarriers ), imageMemoryBarrierCount, reinterpret_cast<const VkImageMemoryBarrier*>( pImageMemoryBarriers ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, ArrayProxy<const VULKAN_HPP_NAMESPACE::MemoryBarrier> memoryBarriers, ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier> bufferMemoryBarriers, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier> imageMemoryBarriers, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdPipelineBarrier( m_commandBuffer, static_cast<VkPipelineStageFlags>( srcStageMask ), static_cast<VkPipelineStageFlags>( dstStageMask ), static_cast<VkDependencyFlags>( dependencyFlags ), memoryBarriers.size() , reinterpret_cast<const VkMemoryBarrier*>( memoryBarriers.data() ), bufferMemoryBarriers.size() , reinterpret_cast<const VkBufferMemoryBarrier*>( bufferMemoryBarriers.data() ), imageMemoryBarriers.size() , reinterpret_cast<const VkImageMemoryBarrier*>( imageMemoryBarriers.data() ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::processCommandsNVX( const VULKAN_HPP_NAMESPACE::CmdProcessCommandsInfoNVX* pProcessCommandsInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdProcessCommandsNVX( m_commandBuffer, reinterpret_cast<const VkCmdProcessCommandsInfoNVX*>( pProcessCommandsInfo ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::processCommandsNVX( const CmdProcessCommandsInfoNVX & processCommandsInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdProcessCommandsNVX( m_commandBuffer, reinterpret_cast<const VkCmdProcessCommandsInfoNVX*>( &processCommandsInfo ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, uint32_t offset, uint32_t size, const void* pValues, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdPushConstants( m_commandBuffer, static_cast<VkPipelineLayout>( layout ), static_cast<VkShaderStageFlags>( stageFlags ), offset, size, pValues );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename T, typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, uint32_t offset, ArrayProxy<const T> values, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdPushConstants( m_commandBuffer, static_cast<VkPipelineLayout>( layout ), static_cast<VkShaderStageFlags>( stageFlags ), offset, values.size() * sizeof( T ) , reinterpret_cast<const void*>( values.data() ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount, const VULKAN_HPP_NAMESPACE::WriteDescriptorSet* pDescriptorWrites, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdPushDescriptorSetKHR( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipelineLayout>( layout ), set, descriptorWriteCount, reinterpret_cast<const VkWriteDescriptorSet*>( pDescriptorWrites ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteDescriptorSet> descriptorWrites, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdPushDescriptorSetKHR( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipelineLayout>( layout ), set, descriptorWrites.size() , reinterpret_cast<const VkWriteDescriptorSet*>( descriptorWrites.data() ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, const void* pData, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdPushDescriptorSetWithTemplateKHR( m_commandBuffer, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), static_cast<VkPipelineLayout>( layout ), set, pData );
+  }
+#else
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, const void* pData, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdPushDescriptorSetWithTemplateKHR( m_commandBuffer, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), static_cast<VkPipelineLayout>( layout ), set, pData );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::reserveSpaceForCommandsNVX( const VULKAN_HPP_NAMESPACE::CmdReserveSpaceForCommandsInfoNVX* pReserveSpaceInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdReserveSpaceForCommandsNVX( m_commandBuffer, reinterpret_cast<const VkCmdReserveSpaceForCommandsInfoNVX*>( pReserveSpaceInfo ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::reserveSpaceForCommandsNVX( const CmdReserveSpaceForCommandsInfoNVX & reserveSpaceInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdReserveSpaceForCommandsNVX( m_commandBuffer, reinterpret_cast<const VkCmdReserveSpaceForCommandsInfoNVX*>( &reserveSpaceInfo ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::resetEvent( VULKAN_HPP_NAMESPACE::Event event, VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdResetEvent( m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags>( stageMask ) );
+  }
+#else
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::resetEvent( VULKAN_HPP_NAMESPACE::Event event, VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdResetEvent( m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags>( stageMask ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdResetQueryPool( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount );
+  }
+#else
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdResetQueryPool( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::ImageResolve* pRegions, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdResolveImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regionCount, reinterpret_cast<const VkImageResolve*>( pRegions ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageResolve> regions, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdResolveImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regions.size() , reinterpret_cast<const VkImageResolve*>( regions.data() ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setBlendConstants( const float blendConstants[4], Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdSetBlendConstants( m_commandBuffer, blendConstants );
+  }
+#else
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setBlendConstants( const float blendConstants[4], Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdSetBlendConstants( m_commandBuffer, blendConstants );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setCheckpointNV( const void* pCheckpointMarker, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdSetCheckpointNV( m_commandBuffer, pCheckpointMarker );
+  }
+#else
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setCheckpointNV( const void* pCheckpointMarker, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdSetCheckpointNV( m_commandBuffer, pCheckpointMarker );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, uint32_t customSampleOrderCount, const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV* pCustomSampleOrders, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdSetCoarseSampleOrderNV( m_commandBuffer, static_cast<VkCoarseSampleOrderTypeNV>( sampleOrderType ), customSampleOrderCount, reinterpret_cast<const VkCoarseSampleOrderCustomNV*>( pCustomSampleOrders ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, ArrayProxy<const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV> customSampleOrders, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdSetCoarseSampleOrderNV( m_commandBuffer, static_cast<VkCoarseSampleOrderTypeNV>( sampleOrderType ), customSampleOrders.size() , reinterpret_cast<const VkCoarseSampleOrderCustomNV*>( customSampleOrders.data() ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setDepthBias( float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdSetDepthBias( m_commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor );
+  }
+#else
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setDepthBias( float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdSetDepthBias( m_commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setDepthBounds( float minDepthBounds, float maxDepthBounds, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdSetDepthBounds( m_commandBuffer, minDepthBounds, maxDepthBounds );
+  }
+#else
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setDepthBounds( float minDepthBounds, float maxDepthBounds, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdSetDepthBounds( m_commandBuffer, minDepthBounds, maxDepthBounds );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setDeviceMask( uint32_t deviceMask, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdSetDeviceMask( m_commandBuffer, deviceMask );
+  }
+#else
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setDeviceMask( uint32_t deviceMask, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdSetDeviceMask( m_commandBuffer, deviceMask );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setDeviceMaskKHR( uint32_t deviceMask, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdSetDeviceMaskKHR( m_commandBuffer, deviceMask );
+  }
+#else
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setDeviceMaskKHR( uint32_t deviceMask, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdSetDeviceMaskKHR( m_commandBuffer, deviceMask );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleEXT( uint32_t firstDiscardRectangle, uint32_t discardRectangleCount, const VULKAN_HPP_NAMESPACE::Rect2D* pDiscardRectangles, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdSetDiscardRectangleEXT( m_commandBuffer, firstDiscardRectangle, discardRectangleCount, reinterpret_cast<const VkRect2D*>( pDiscardRectangles ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleEXT( uint32_t firstDiscardRectangle, ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> discardRectangles, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdSetDiscardRectangleEXT( m_commandBuffer, firstDiscardRectangle, discardRectangles.size() , reinterpret_cast<const VkRect2D*>( discardRectangles.data() ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setEvent( VULKAN_HPP_NAMESPACE::Event event, VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdSetEvent( m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags>( stageMask ) );
+  }
+#else
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setEvent( VULKAN_HPP_NAMESPACE::Event event, VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdSetEvent( m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags>( stageMask ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setExclusiveScissorNV( uint32_t firstExclusiveScissor, uint32_t exclusiveScissorCount, const VULKAN_HPP_NAMESPACE::Rect2D* pExclusiveScissors, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdSetExclusiveScissorNV( m_commandBuffer, firstExclusiveScissor, exclusiveScissorCount, reinterpret_cast<const VkRect2D*>( pExclusiveScissors ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setExclusiveScissorNV( uint32_t firstExclusiveScissor, ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> exclusiveScissors, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdSetExclusiveScissorNV( m_commandBuffer, firstExclusiveScissor, exclusiveScissors.size() , reinterpret_cast<const VkRect2D*>( exclusiveScissors.data() ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setLineStippleEXT( uint32_t lineStippleFactor, uint16_t lineStipplePattern, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdSetLineStippleEXT( m_commandBuffer, lineStippleFactor, lineStipplePattern );
+  }
+#else
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setLineStippleEXT( uint32_t lineStippleFactor, uint16_t lineStipplePattern, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdSetLineStippleEXT( m_commandBuffer, lineStippleFactor, lineStipplePattern );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setLineWidth( float lineWidth, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdSetLineWidth( m_commandBuffer, lineWidth );
+  }
+#else
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setLineWidth( float lineWidth, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdSetLineWidth( m_commandBuffer, lineWidth );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result CommandBuffer::setPerformanceMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL* pMarkerInfo, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkCmdSetPerformanceMarkerINTEL( m_commandBuffer, reinterpret_cast<const VkPerformanceMarkerInfoINTEL*>( pMarkerInfo ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<void>::type CommandBuffer::setPerformanceMarkerINTEL( const PerformanceMarkerInfoINTEL & markerInfo, Dispatch const &d ) const
+  {
+    Result result = static_cast<Result>( d.vkCmdSetPerformanceMarkerINTEL( m_commandBuffer, reinterpret_cast<const VkPerformanceMarkerInfoINTEL*>( &markerInfo ) ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::CommandBuffer::setPerformanceMarkerINTEL" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result CommandBuffer::setPerformanceOverrideINTEL( const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL* pOverrideInfo, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkCmdSetPerformanceOverrideINTEL( m_commandBuffer, reinterpret_cast<const VkPerformanceOverrideInfoINTEL*>( pOverrideInfo ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<void>::type CommandBuffer::setPerformanceOverrideINTEL( const PerformanceOverrideInfoINTEL & overrideInfo, Dispatch const &d ) const
+  {
+    Result result = static_cast<Result>( d.vkCmdSetPerformanceOverrideINTEL( m_commandBuffer, reinterpret_cast<const VkPerformanceOverrideInfoINTEL*>( &overrideInfo ) ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::CommandBuffer::setPerformanceOverrideINTEL" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result CommandBuffer::setPerformanceStreamMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL* pMarkerInfo, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkCmdSetPerformanceStreamMarkerINTEL( m_commandBuffer, reinterpret_cast<const VkPerformanceStreamMarkerInfoINTEL*>( pMarkerInfo ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<void>::type CommandBuffer::setPerformanceStreamMarkerINTEL( const PerformanceStreamMarkerInfoINTEL & markerInfo, Dispatch const &d ) const
+  {
+    Result result = static_cast<Result>( d.vkCmdSetPerformanceStreamMarkerINTEL( m_commandBuffer, reinterpret_cast<const VkPerformanceStreamMarkerInfoINTEL*>( &markerInfo ) ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::CommandBuffer::setPerformanceStreamMarkerINTEL" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setSampleLocationsEXT( const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT* pSampleLocationsInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdSetSampleLocationsEXT( m_commandBuffer, reinterpret_cast<const VkSampleLocationsInfoEXT*>( pSampleLocationsInfo ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setSampleLocationsEXT( const SampleLocationsInfoEXT & sampleLocationsInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdSetSampleLocationsEXT( m_commandBuffer, reinterpret_cast<const VkSampleLocationsInfoEXT*>( &sampleLocationsInfo ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setScissor( uint32_t firstScissor, uint32_t scissorCount, const VULKAN_HPP_NAMESPACE::Rect2D* pScissors, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdSetScissor( m_commandBuffer, firstScissor, scissorCount, reinterpret_cast<const VkRect2D*>( pScissors ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setScissor( uint32_t firstScissor, ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> scissors, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdSetScissor( m_commandBuffer, firstScissor, scissors.size() , reinterpret_cast<const VkRect2D*>( scissors.data() ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setStencilCompareMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t compareMask, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdSetStencilCompareMask( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), compareMask );
+  }
+#else
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setStencilCompareMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t compareMask, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdSetStencilCompareMask( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), compareMask );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setStencilReference( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t reference, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdSetStencilReference( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), reference );
+  }
+#else
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setStencilReference( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t reference, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdSetStencilReference( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), reference );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setStencilWriteMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t writeMask, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdSetStencilWriteMask( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), writeMask );
+  }
+#else
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setStencilWriteMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t writeMask, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdSetStencilWriteMask( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), writeMask );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setViewport( uint32_t firstViewport, uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::Viewport* pViewports, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdSetViewport( m_commandBuffer, firstViewport, viewportCount, reinterpret_cast<const VkViewport*>( pViewports ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setViewport( uint32_t firstViewport, ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> viewports, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdSetViewport( m_commandBuffer, firstViewport, viewports.size() , reinterpret_cast<const VkViewport*>( viewports.data() ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setViewportShadingRatePaletteNV( uint32_t firstViewport, uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV* pShadingRatePalettes, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdSetViewportShadingRatePaletteNV( m_commandBuffer, firstViewport, viewportCount, reinterpret_cast<const VkShadingRatePaletteNV*>( pShadingRatePalettes ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setViewportShadingRatePaletteNV( uint32_t firstViewport, ArrayProxy<const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV> shadingRatePalettes, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdSetViewportShadingRatePaletteNV( m_commandBuffer, firstViewport, shadingRatePalettes.size() , reinterpret_cast<const VkShadingRatePaletteNV*>( shadingRatePalettes.data() ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setViewportWScalingNV( uint32_t firstViewport, uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::ViewportWScalingNV* pViewportWScalings, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdSetViewportWScalingNV( m_commandBuffer, firstViewport, viewportCount, reinterpret_cast<const VkViewportWScalingNV*>( pViewportWScalings ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setViewportWScalingNV( uint32_t firstViewport, ArrayProxy<const VULKAN_HPP_NAMESPACE::ViewportWScalingNV> viewportWScalings, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdSetViewportWScalingNV( m_commandBuffer, firstViewport, viewportWScalings.size() , reinterpret_cast<const VkViewportWScalingNV*>( viewportWScalings.data() ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::traceRaysNV( VULKAN_HPP_NAMESPACE::Buffer raygenShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderBindingOffset, VULKAN_HPP_NAMESPACE::Buffer missShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingStride, VULKAN_HPP_NAMESPACE::Buffer hitShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingStride, VULKAN_HPP_NAMESPACE::Buffer callableShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingStride, uint32_t width, uint32_t height, uint32_t depth, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdTraceRaysNV( m_commandBuffer, static_cast<VkBuffer>( raygenShaderBindingTableBuffer ), static_cast<VkDeviceSize>( raygenShaderBindingOffset ), static_cast<VkBuffer>( missShaderBindingTableBuffer ), static_cast<VkDeviceSize>( missShaderBindingOffset ), static_cast<VkDeviceSize>( missShaderBindingStride ), static_cast<VkBuffer>( hitShaderBindingTableBuffer ), static_cast<VkDeviceSize>( hitShaderBindingOffset ), static_cast<VkDeviceSize>( hitShaderBindingStride ), static_cast<VkBuffer>( callableShaderBindingTableBuffer ), static_cast<VkDeviceSize>( callableShaderBindingOffset ), static_cast<VkDeviceSize>( callableShaderBindingStride ), width, height, depth );
+  }
+#else
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::traceRaysNV( VULKAN_HPP_NAMESPACE::Buffer raygenShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderBindingOffset, VULKAN_HPP_NAMESPACE::Buffer missShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingStride, VULKAN_HPP_NAMESPACE::Buffer hitShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingStride, VULKAN_HPP_NAMESPACE::Buffer callableShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingStride, uint32_t width, uint32_t height, uint32_t depth, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdTraceRaysNV( m_commandBuffer, static_cast<VkBuffer>( raygenShaderBindingTableBuffer ), static_cast<VkDeviceSize>( raygenShaderBindingOffset ), static_cast<VkBuffer>( missShaderBindingTableBuffer ), static_cast<VkDeviceSize>( missShaderBindingOffset ), static_cast<VkDeviceSize>( missShaderBindingStride ), static_cast<VkBuffer>( hitShaderBindingTableBuffer ), static_cast<VkDeviceSize>( hitShaderBindingOffset ), static_cast<VkDeviceSize>( hitShaderBindingStride ), static_cast<VkBuffer>( callableShaderBindingTableBuffer ), static_cast<VkDeviceSize>( callableShaderBindingOffset ), static_cast<VkDeviceSize>( callableShaderBindingStride ), width, height, depth );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize dataSize, const void* pData, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdUpdateBuffer( m_commandBuffer, static_cast<VkBuffer>( dstBuffer ), static_cast<VkDeviceSize>( dstOffset ), static_cast<VkDeviceSize>( dataSize ), pData );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename T, typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, ArrayProxy<const T> data, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdUpdateBuffer( m_commandBuffer, static_cast<VkBuffer>( dstBuffer ), static_cast<VkDeviceSize>( dstOffset ), data.size() * sizeof( T ) , reinterpret_cast<const void*>( data.data() ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::waitEvents( uint32_t eventCount, const VULKAN_HPP_NAMESPACE::Event* pEvents, VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const VULKAN_HPP_NAMESPACE::MemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier* pImageMemoryBarriers, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdWaitEvents( m_commandBuffer, eventCount, reinterpret_cast<const VkEvent*>( pEvents ), static_cast<VkPipelineStageFlags>( srcStageMask ), static_cast<VkPipelineStageFlags>( dstStageMask ), memoryBarrierCount, reinterpret_cast<const VkMemoryBarrier*>( pMemoryBarriers ), bufferMemoryBarrierCount, reinterpret_cast<const VkBufferMemoryBarrier*>( pBufferMemoryBarriers ), imageMemoryBarrierCount, reinterpret_cast<const VkImageMemoryBarrier*>( pImageMemoryBarriers ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::waitEvents( ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> events, VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, ArrayProxy<const VULKAN_HPP_NAMESPACE::MemoryBarrier> memoryBarriers, ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier> bufferMemoryBarriers, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier> imageMemoryBarriers, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdWaitEvents( m_commandBuffer, events.size() , reinterpret_cast<const VkEvent*>( events.data() ), static_cast<VkPipelineStageFlags>( srcStageMask ), static_cast<VkPipelineStageFlags>( dstStageMask ), memoryBarriers.size() , reinterpret_cast<const VkMemoryBarrier*>( memoryBarriers.data() ), bufferMemoryBarriers.size() , reinterpret_cast<const VkBufferMemoryBarrier*>( bufferMemoryBarriers.data() ), imageMemoryBarriers.size() , reinterpret_cast<const VkImageMemoryBarrier*>( imageMemoryBarriers.data() ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesNV( uint32_t accelerationStructureCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureNV* pAccelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdWriteAccelerationStructuresPropertiesNV( m_commandBuffer, accelerationStructureCount, reinterpret_cast<const VkAccelerationStructureNV*>( pAccelerationStructures ), static_cast<VkQueryType>( queryType ), static_cast<VkQueryPool>( queryPool ), firstQuery );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesNV( ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureNV> accelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdWriteAccelerationStructuresPropertiesNV( m_commandBuffer, accelerationStructures.size() , reinterpret_cast<const VkAccelerationStructureNV*>( accelerationStructures.data() ), static_cast<VkQueryType>( queryType ), static_cast<VkQueryPool>( queryPool ), firstQuery );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::writeBufferMarkerAMD( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, uint32_t marker, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdWriteBufferMarkerAMD( m_commandBuffer, static_cast<VkPipelineStageFlagBits>( pipelineStage ), static_cast<VkBuffer>( dstBuffer ), static_cast<VkDeviceSize>( dstOffset ), marker );
+  }
+#else
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::writeBufferMarkerAMD( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, uint32_t marker, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdWriteBufferMarkerAMD( m_commandBuffer, static_cast<VkPipelineStageFlagBits>( pipelineStage ), static_cast<VkBuffer>( dstBuffer ), static_cast<VkDeviceSize>( dstOffset ), marker );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdWriteTimestamp( m_commandBuffer, static_cast<VkPipelineStageFlagBits>( pipelineStage ), static_cast<VkQueryPool>( queryPool ), query );
+  }
+#else
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkCmdWriteTimestamp( m_commandBuffer, static_cast<VkPipelineStageFlagBits>( pipelineStage ), static_cast<VkQueryPool>( queryPool ), query );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result CommandBuffer::end(Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkEndCommandBuffer( m_commandBuffer ) );
+  }
+#else
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<void>::type CommandBuffer::end(Dispatch const &d ) const
+  {
+    Result result = static_cast<Result>( d.vkEndCommandBuffer( m_commandBuffer ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::CommandBuffer::end" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result CommandBuffer::reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkResetCommandBuffer( m_commandBuffer, static_cast<VkCommandBufferResetFlags>( flags ) ) );
+  }
+#else
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<void>::type CommandBuffer::reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags, Dispatch const &d ) const
+  {
+    Result result = static_cast<Result>( d.vkResetCommandBuffer( m_commandBuffer, static_cast<VkCommandBufferResetFlags>( flags ) ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::CommandBuffer::reset" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::acquireFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkAcquireFullScreenExclusiveModeEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) );
+  }
+#else
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<void>::type Device::acquireFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d ) const
+  {
+    Result result = static_cast<Result>( d.vkAcquireFullScreenExclusiveModeEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::acquireFullScreenExclusiveModeEXT" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::acquireNextImage2KHR( const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR* pAcquireInfo, uint32_t* pImageIndex, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkAcquireNextImage2KHR( m_device, reinterpret_cast<const VkAcquireNextImageInfoKHR*>( pAcquireInfo ), pImageIndex ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValue<uint32_t> Device::acquireNextImage2KHR( const AcquireNextImageInfoKHR & acquireInfo, Dispatch const &d ) const
+  {
+    uint32_t imageIndex;
+    Result result = static_cast<Result>( d.vkAcquireNextImage2KHR( m_device, reinterpret_cast<const VkAcquireNextImageInfoKHR*>( &acquireInfo ), &imageIndex ) );
+    return createResultValue( result, imageIndex, VULKAN_HPP_NAMESPACE_STRING"::Device::acquireNextImage2KHR", { Result::eSuccess, Result::eTimeout, Result::eNotReady, Result::eSuboptimalKHR } );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint64_t timeout, VULKAN_HPP_NAMESPACE::Semaphore semaphore, VULKAN_HPP_NAMESPACE::Fence fence, uint32_t* pImageIndex, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkAcquireNextImageKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), timeout, static_cast<VkSemaphore>( semaphore ), static_cast<VkFence>( fence ), pImageIndex ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValue<uint32_t> Device::acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint64_t timeout, VULKAN_HPP_NAMESPACE::Semaphore semaphore, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d ) const
+  {
+    uint32_t imageIndex;
+    Result result = static_cast<Result>( d.vkAcquireNextImageKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), timeout, static_cast<VkSemaphore>( semaphore ), static_cast<VkFence>( fence ), &imageIndex ) );
+    return createResultValue( result, imageIndex, VULKAN_HPP_NAMESPACE_STRING"::Device::acquireNextImageKHR", { Result::eSuccess, Result::eTimeout, Result::eNotReady, Result::eSuboptimalKHR } );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::acquirePerformanceConfigurationINTEL( const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL* pAcquireInfo, VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL* pConfiguration, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkAcquirePerformanceConfigurationINTEL( m_device, reinterpret_cast<const VkPerformanceConfigurationAcquireInfoINTEL*>( pAcquireInfo ), reinterpret_cast<VkPerformanceConfigurationINTEL*>( pConfiguration ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL>::type Device::acquirePerformanceConfigurationINTEL( const PerformanceConfigurationAcquireInfoINTEL & acquireInfo, Dispatch const &d ) const
+  {
+    VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration;
+    Result result = static_cast<Result>( d.vkAcquirePerformanceConfigurationINTEL( m_device, reinterpret_cast<const VkPerformanceConfigurationAcquireInfoINTEL*>( &acquireInfo ), reinterpret_cast<VkPerformanceConfigurationINTEL*>( &configuration ) ) );
+    return createResultValue( result, configuration, VULKAN_HPP_NAMESPACE_STRING"::Device::acquirePerformanceConfigurationINTEL" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::acquireProfilingLockKHR( const VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR* pInfo, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkAcquireProfilingLockKHR( m_device, reinterpret_cast<const VkAcquireProfilingLockInfoKHR*>( pInfo ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<void>::type Device::acquireProfilingLockKHR( const AcquireProfilingLockInfoKHR & info, Dispatch const &d ) const
+  {
+    Result result = static_cast<Result>( d.vkAcquireProfilingLockKHR( m_device, reinterpret_cast<const VkAcquireProfilingLockInfoKHR*>( &info ) ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::acquireProfilingLockKHR" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::allocateCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo* pAllocateInfo, VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkAllocateCommandBuffers( m_device, reinterpret_cast<const VkCommandBufferAllocateInfo*>( pAllocateInfo ), reinterpret_cast<VkCommandBuffer*>( pCommandBuffers ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Allocator, typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<std::vector<CommandBuffer,Allocator>>::type Device::allocateCommandBuffers( const CommandBufferAllocateInfo & allocateInfo, Dispatch const &d ) const
+  {
+    std::vector<CommandBuffer,Allocator> commandBuffers( allocateInfo.commandBufferCount );
+    Result result = static_cast<Result>( d.vkAllocateCommandBuffers( m_device, reinterpret_cast<const VkCommandBufferAllocateInfo*>( &allocateInfo ), reinterpret_cast<VkCommandBuffer*>( commandBuffers.data() ) ) );
+    return createResultValue( result, commandBuffers, VULKAN_HPP_NAMESPACE_STRING"::Device::allocateCommandBuffers" );
+  }
+  template<typename Allocator, typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<std::vector<CommandBuffer,Allocator>>::type Device::allocateCommandBuffers( const CommandBufferAllocateInfo & allocateInfo, Allocator const& vectorAllocator, Dispatch const &d ) const
+  {
+    std::vector<CommandBuffer,Allocator> commandBuffers( allocateInfo.commandBufferCount, vectorAllocator );
+    Result result = static_cast<Result>( d.vkAllocateCommandBuffers( m_device, reinterpret_cast<const VkCommandBufferAllocateInfo*>( &allocateInfo ), reinterpret_cast<VkCommandBuffer*>( commandBuffers.data() ) ) );
+    return createResultValue( result, commandBuffers, VULKAN_HPP_NAMESPACE_STRING"::Device::allocateCommandBuffers" );
+  }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template<typename Allocator, typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<std::vector<UniqueHandle<CommandBuffer,Dispatch>,Allocator>>::type Device::allocateCommandBuffersUnique( const CommandBufferAllocateInfo & allocateInfo, Dispatch const &d ) const
+  {
+    static_assert( sizeof( CommandBuffer ) <= sizeof( UniqueHandle<CommandBuffer, Dispatch> ), "CommandBuffer is greater than UniqueHandle<CommandBuffer, Dispatch>!" );
+    std::vector<UniqueHandle<CommandBuffer, Dispatch>, Allocator> commandBuffers;
+    commandBuffers.reserve( allocateInfo.commandBufferCount );
+    CommandBuffer* buffer = reinterpret_cast<CommandBuffer*>( reinterpret_cast<char*>( commandBuffers.data() ) + allocateInfo.commandBufferCount * ( sizeof( UniqueHandle<CommandBuffer, Dispatch> ) - sizeof( CommandBuffer ) ) );
+    Result result = static_cast<Result>(d.vkAllocateCommandBuffers( m_device, reinterpret_cast<const VkCommandBufferAllocateInfo*>( &allocateInfo ), reinterpret_cast<VkCommandBuffer*>( buffer ) ) );
+    if (result == VULKAN_HPP_NAMESPACE::Result::eSuccess)
+    {
+      PoolFree<Device,CommandPool,Dispatch> deleter( *this, allocateInfo.commandPool, d );
+      for ( size_t i=0 ; i<allocateInfo.commandBufferCount ; i++ )
+      {
+        commandBuffers.push_back( UniqueHandle<CommandBuffer, Dispatch>( buffer[i], deleter ) );
+      }
+    }
+
+    return createResultValue( result, commandBuffers, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffersUnique" );
+  }
+  template<typename Allocator, typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<std::vector<UniqueHandle<CommandBuffer,Dispatch>,Allocator>>::type Device::allocateCommandBuffersUnique( const CommandBufferAllocateInfo & allocateInfo, Allocator const& vectorAllocator, Dispatch const &d ) const
+  {
+    static_assert( sizeof( CommandBuffer ) <= sizeof( UniqueHandle<CommandBuffer, Dispatch> ), "CommandBuffer is greater than UniqueHandle<CommandBuffer, Dispatch>!" );
+    std::vector<UniqueHandle<CommandBuffer, Dispatch>, Allocator> commandBuffers( vectorAllocator );
+    commandBuffers.reserve( allocateInfo.commandBufferCount );
+    CommandBuffer* buffer = reinterpret_cast<CommandBuffer*>( reinterpret_cast<char*>( commandBuffers.data() ) + allocateInfo.commandBufferCount * ( sizeof( UniqueHandle<CommandBuffer, Dispatch> ) - sizeof( CommandBuffer ) ) );
+    Result result = static_cast<Result>(d.vkAllocateCommandBuffers( m_device, reinterpret_cast<const VkCommandBufferAllocateInfo*>( &allocateInfo ), reinterpret_cast<VkCommandBuffer*>( buffer ) ) );
+    if (result == VULKAN_HPP_NAMESPACE::Result::eSuccess)
+    {
+      PoolFree<Device,CommandPool,Dispatch> deleter( *this, allocateInfo.commandPool, d );
+      for ( size_t i=0 ; i<allocateInfo.commandBufferCount ; i++ )
+      {
+        commandBuffers.push_back( UniqueHandle<CommandBuffer, Dispatch>( buffer[i], deleter ) );
+      }
+    }
+
+    return createResultValue( result, commandBuffers, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffersUnique" );
+  }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::allocateDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo* pAllocateInfo, VULKAN_HPP_NAMESPACE::DescriptorSet* pDescriptorSets, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkAllocateDescriptorSets( m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo*>( pAllocateInfo ), reinterpret_cast<VkDescriptorSet*>( pDescriptorSets ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Allocator, typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<std::vector<DescriptorSet,Allocator>>::type Device::allocateDescriptorSets( const DescriptorSetAllocateInfo & allocateInfo, Dispatch const &d ) const
+  {
+    std::vector<DescriptorSet,Allocator> descriptorSets( allocateInfo.descriptorSetCount );
+    Result result = static_cast<Result>( d.vkAllocateDescriptorSets( m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo*>( &allocateInfo ), reinterpret_cast<VkDescriptorSet*>( descriptorSets.data() ) ) );
+    return createResultValue( result, descriptorSets, VULKAN_HPP_NAMESPACE_STRING"::Device::allocateDescriptorSets" );
+  }
+  template<typename Allocator, typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<std::vector<DescriptorSet,Allocator>>::type Device::allocateDescriptorSets( const DescriptorSetAllocateInfo & allocateInfo, Allocator const& vectorAllocator, Dispatch const &d ) const
+  {
+    std::vector<DescriptorSet,Allocator> descriptorSets( allocateInfo.descriptorSetCount, vectorAllocator );
+    Result result = static_cast<Result>( d.vkAllocateDescriptorSets( m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo*>( &allocateInfo ), reinterpret_cast<VkDescriptorSet*>( descriptorSets.data() ) ) );
+    return createResultValue( result, descriptorSets, VULKAN_HPP_NAMESPACE_STRING"::Device::allocateDescriptorSets" );
+  }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template<typename Allocator, typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<std::vector<UniqueHandle<DescriptorSet,Dispatch>,Allocator>>::type Device::allocateDescriptorSetsUnique( const DescriptorSetAllocateInfo & allocateInfo, Dispatch const &d ) const
+  {
+    static_assert( sizeof( DescriptorSet ) <= sizeof( UniqueHandle<DescriptorSet, Dispatch> ), "DescriptorSet is greater than UniqueHandle<DescriptorSet, Dispatch>!" );
+    std::vector<UniqueHandle<DescriptorSet, Dispatch>, Allocator> descriptorSets;
+    descriptorSets.reserve( allocateInfo.descriptorSetCount );
+    DescriptorSet* buffer = reinterpret_cast<DescriptorSet*>( reinterpret_cast<char*>( descriptorSets.data() ) + allocateInfo.descriptorSetCount * ( sizeof( UniqueHandle<DescriptorSet, Dispatch> ) - sizeof( DescriptorSet ) ) );
+    Result result = static_cast<Result>(d.vkAllocateDescriptorSets( m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo*>( &allocateInfo ), reinterpret_cast<VkDescriptorSet*>( buffer ) ) );
+    if (result == VULKAN_HPP_NAMESPACE::Result::eSuccess)
+    {
+      PoolFree<Device,DescriptorPool,Dispatch> deleter( *this, allocateInfo.descriptorPool, d );
+      for ( size_t i=0 ; i<allocateInfo.descriptorSetCount ; i++ )
+      {
+        descriptorSets.push_back( UniqueHandle<DescriptorSet, Dispatch>( buffer[i], deleter ) );
+      }
+    }
+
+    return createResultValue( result, descriptorSets, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSetsUnique" );
+  }
+  template<typename Allocator, typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<std::vector<UniqueHandle<DescriptorSet,Dispatch>,Allocator>>::type Device::allocateDescriptorSetsUnique( const DescriptorSetAllocateInfo & allocateInfo, Allocator const& vectorAllocator, Dispatch const &d ) const
+  {
+    static_assert( sizeof( DescriptorSet ) <= sizeof( UniqueHandle<DescriptorSet, Dispatch> ), "DescriptorSet is greater than UniqueHandle<DescriptorSet, Dispatch>!" );
+    std::vector<UniqueHandle<DescriptorSet, Dispatch>, Allocator> descriptorSets( vectorAllocator );
+    descriptorSets.reserve( allocateInfo.descriptorSetCount );
+    DescriptorSet* buffer = reinterpret_cast<DescriptorSet*>( reinterpret_cast<char*>( descriptorSets.data() ) + allocateInfo.descriptorSetCount * ( sizeof( UniqueHandle<DescriptorSet, Dispatch> ) - sizeof( DescriptorSet ) ) );
+    Result result = static_cast<Result>(d.vkAllocateDescriptorSets( m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo*>( &allocateInfo ), reinterpret_cast<VkDescriptorSet*>( buffer ) ) );
+    if (result == VULKAN_HPP_NAMESPACE::Result::eSuccess)
+    {
+      PoolFree<Device,DescriptorPool,Dispatch> deleter( *this, allocateInfo.descriptorPool, d );
+      for ( size_t i=0 ; i<allocateInfo.descriptorSetCount ; i++ )
+      {
+        descriptorSets.push_back( UniqueHandle<DescriptorSet, Dispatch>( buffer[i], deleter ) );
+      }
+    }
+
+    return createResultValue( result, descriptorSets, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSetsUnique" );
+  }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::allocateMemory( const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo* pAllocateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DeviceMemory* pMemory, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkAllocateMemory( m_device, reinterpret_cast<const VkMemoryAllocateInfo*>( pAllocateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkDeviceMemory*>( pMemory ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<VULKAN_HPP_NAMESPACE::DeviceMemory>::type Device::allocateMemory( const MemoryAllocateInfo & allocateInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  {
+    VULKAN_HPP_NAMESPACE::DeviceMemory memory;
+    Result result = static_cast<Result>( d.vkAllocateMemory( m_device, reinterpret_cast<const VkMemoryAllocateInfo*>( &allocateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkDeviceMemory*>( &memory ) ) );
+    return createResultValue( result, memory, VULKAN_HPP_NAMESPACE_STRING"::Device::allocateMemory" );
+  }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<DeviceMemory,Dispatch>>::type Device::allocateMemoryUnique( const MemoryAllocateInfo & allocateInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  {
+    VULKAN_HPP_NAMESPACE::DeviceMemory memory;
+    Result result = static_cast<Result>( d.vkAllocateMemory( m_device, reinterpret_cast<const VkMemoryAllocateInfo*>( &allocateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkDeviceMemory*>( &memory ) ) );
+
+    ObjectFree<Device,Dispatch> deleter( *this, allocator, d );
+    return createResultValue<DeviceMemory,Dispatch>( result, memory, VULKAN_HPP_NAMESPACE_STRING"::Device::allocateMemoryUnique", deleter );
+  }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::bindAccelerationStructureMemoryNV( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV* pBindInfos, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkBindAccelerationStructureMemoryNV( m_device, bindInfoCount, reinterpret_cast<const VkBindAccelerationStructureMemoryInfoNV*>( pBindInfos ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<void>::type Device::bindAccelerationStructureMemoryNV( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV> bindInfos, Dispatch const &d ) const
+  {
+    Result result = static_cast<Result>( d.vkBindAccelerationStructureMemoryNV( m_device, bindInfos.size() , reinterpret_cast<const VkBindAccelerationStructureMemoryInfoNV*>( bindInfos.data() ) ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::bindAccelerationStructureMemoryNV" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::bindBufferMemory( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkBindBufferMemory( m_device, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceMemory>( memory ), static_cast<VkDeviceSize>( memoryOffset ) ) );
+  }
+#else
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<void>::type Device::bindBufferMemory( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const &d ) const
+  {
+    Result result = static_cast<Result>( d.vkBindBufferMemory( m_device, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceMemory>( memory ), static_cast<VkDeviceSize>( memoryOffset ) ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::bindBufferMemory" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::bindBufferMemory2( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo* pBindInfos, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkBindBufferMemory2( m_device, bindInfoCount, reinterpret_cast<const VkBindBufferMemoryInfo*>( pBindInfos ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<void>::type Device::bindBufferMemory2( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo> bindInfos, Dispatch const &d ) const
+  {
+    Result result = static_cast<Result>( d.vkBindBufferMemory2( m_device, bindInfos.size() , reinterpret_cast<const VkBindBufferMemoryInfo*>( bindInfos.data() ) ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::bindBufferMemory2" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::bindBufferMemory2KHR( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo* pBindInfos, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkBindBufferMemory2KHR( m_device, bindInfoCount, reinterpret_cast<const VkBindBufferMemoryInfo*>( pBindInfos ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<void>::type Device::bindBufferMemory2KHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo> bindInfos, Dispatch const &d ) const
+  {
+    Result result = static_cast<Result>( d.vkBindBufferMemory2KHR( m_device, bindInfos.size() , reinterpret_cast<const VkBindBufferMemoryInfo*>( bindInfos.data() ) ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::bindBufferMemory2KHR" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::bindImageMemory( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkBindImageMemory( m_device, static_cast<VkImage>( image ), static_cast<VkDeviceMemory>( memory ), static_cast<VkDeviceSize>( memoryOffset ) ) );
+  }
+#else
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<void>::type Device::bindImageMemory( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const &d ) const
+  {
+    Result result = static_cast<Result>( d.vkBindImageMemory( m_device, static_cast<VkImage>( image ), static_cast<VkDeviceMemory>( memory ), static_cast<VkDeviceSize>( memoryOffset ) ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::bindImageMemory" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::bindImageMemory2( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo* pBindInfos, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkBindImageMemory2( m_device, bindInfoCount, reinterpret_cast<const VkBindImageMemoryInfo*>( pBindInfos ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<void>::type Device::bindImageMemory2( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo> bindInfos, Dispatch const &d ) const
+  {
+    Result result = static_cast<Result>( d.vkBindImageMemory2( m_device, bindInfos.size() , reinterpret_cast<const VkBindImageMemoryInfo*>( bindInfos.data() ) ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::bindImageMemory2" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::bindImageMemory2KHR( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo* pBindInfos, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkBindImageMemory2KHR( m_device, bindInfoCount, reinterpret_cast<const VkBindImageMemoryInfo*>( pBindInfos ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<void>::type Device::bindImageMemory2KHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo> bindInfos, Dispatch const &d ) const
+  {
+    Result result = static_cast<Result>( d.vkBindImageMemory2KHR( m_device, bindInfos.size() , reinterpret_cast<const VkBindImageMemoryInfo*>( bindInfos.data() ) ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::bindImageMemory2KHR" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t shader, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkCompileDeferredNV( m_device, static_cast<VkPipeline>( pipeline ), shader ) );
+  }
+#else
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<void>::type Device::compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t shader, Dispatch const &d ) const
+  {
+    Result result = static_cast<Result>( d.vkCompileDeferredNV( m_device, static_cast<VkPipeline>( pipeline ), shader ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::compileDeferredNV" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::createAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::AccelerationStructureNV* pAccelerationStructure, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkCreateAccelerationStructureNV( m_device, reinterpret_cast<const VkAccelerationStructureCreateInfoNV*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkAccelerationStructureNV*>( pAccelerationStructure ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<VULKAN_HPP_NAMESPACE::AccelerationStructureNV>::type Device::createAccelerationStructureNV( const AccelerationStructureCreateInfoNV & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  {
+    VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure;
+    Result result = static_cast<Result>( d.vkCreateAccelerationStructureNV( m_device, reinterpret_cast<const VkAccelerationStructureCreateInfoNV*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkAccelerationStructureNV*>( &accelerationStructure ) ) );
+    return createResultValue( result, accelerationStructure, VULKAN_HPP_NAMESPACE_STRING"::Device::createAccelerationStructureNV" );
+  }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<AccelerationStructureNV,Dispatch>>::type Device::createAccelerationStructureNVUnique( const AccelerationStructureCreateInfoNV & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  {
+    VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure;
+    Result result = static_cast<Result>( d.vkCreateAccelerationStructureNV( m_device, reinterpret_cast<const VkAccelerationStructureCreateInfoNV*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkAccelerationStructureNV*>( &accelerationStructure ) ) );
+
+    ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
+    return createResultValue<AccelerationStructureNV,Dispatch>( result, accelerationStructure, VULKAN_HPP_NAMESPACE_STRING"::Device::createAccelerationStructureNVUnique", deleter );
+  }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::createBuffer( const VULKAN_HPP_NAMESPACE::BufferCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Buffer* pBuffer, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkCreateBuffer( m_device, reinterpret_cast<const VkBufferCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkBuffer*>( pBuffer ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<VULKAN_HPP_NAMESPACE::Buffer>::type Device::createBuffer( const BufferCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  {
+    VULKAN_HPP_NAMESPACE::Buffer buffer;
+    Result result = static_cast<Result>( d.vkCreateBuffer( m_device, reinterpret_cast<const VkBufferCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkBuffer*>( &buffer ) ) );
+    return createResultValue( result, buffer, VULKAN_HPP_NAMESPACE_STRING"::Device::createBuffer" );
+  }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<Buffer,Dispatch>>::type Device::createBufferUnique( const BufferCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  {
+    VULKAN_HPP_NAMESPACE::Buffer buffer;
+    Result result = static_cast<Result>( d.vkCreateBuffer( m_device, reinterpret_cast<const VkBufferCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkBuffer*>( &buffer ) ) );
+
+    ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
+    return createResultValue<Buffer,Dispatch>( result, buffer, VULKAN_HPP_NAMESPACE_STRING"::Device::createBufferUnique", deleter );
+  }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::createBufferView( const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::BufferView* pView, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkCreateBufferView( m_device, reinterpret_cast<const VkBufferViewCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkBufferView*>( pView ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<VULKAN_HPP_NAMESPACE::BufferView>::type Device::createBufferView( const BufferViewCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  {
+    VULKAN_HPP_NAMESPACE::BufferView view;
+    Result result = static_cast<Result>( d.vkCreateBufferView( m_device, reinterpret_cast<const VkBufferViewCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkBufferView*>( &view ) ) );
+    return createResultValue( result, view, VULKAN_HPP_NAMESPACE_STRING"::Device::createBufferView" );
+  }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<BufferView,Dispatch>>::type Device::createBufferViewUnique( const BufferViewCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  {
+    VULKAN_HPP_NAMESPACE::BufferView view;
+    Result result = static_cast<Result>( d.vkCreateBufferView( m_device, reinterpret_cast<const VkBufferViewCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkBufferView*>( &view ) ) );
+
+    ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
+    return createResultValue<BufferView,Dispatch>( result, view, VULKAN_HPP_NAMESPACE_STRING"::Device::createBufferViewUnique", deleter );
+  }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::createCommandPool( const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::CommandPool* pCommandPool, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkCreateCommandPool( m_device, reinterpret_cast<const VkCommandPoolCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkCommandPool*>( pCommandPool ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<VULKAN_HPP_NAMESPACE::CommandPool>::type Device::createCommandPool( const CommandPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  {
+    VULKAN_HPP_NAMESPACE::CommandPool commandPool;
+    Result result = static_cast<Result>( d.vkCreateCommandPool( m_device, reinterpret_cast<const VkCommandPoolCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkCommandPool*>( &commandPool ) ) );
+    return createResultValue( result, commandPool, VULKAN_HPP_NAMESPACE_STRING"::Device::createCommandPool" );
+  }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<CommandPool,Dispatch>>::type Device::createCommandPoolUnique( const CommandPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  {
+    VULKAN_HPP_NAMESPACE::CommandPool commandPool;
+    Result result = static_cast<Result>( d.vkCreateCommandPool( m_device, reinterpret_cast<const VkCommandPoolCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkCommandPool*>( &commandPool ) ) );
+
+    ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
+    return createResultValue<CommandPool,Dispatch>( result, commandPool, VULKAN_HPP_NAMESPACE_STRING"::Device::createCommandPoolUnique", deleter );
+  }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, uint32_t createInfoCount, const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo* pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Pipeline* pPipelines, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkCreateComputePipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfoCount, reinterpret_cast<const VkComputePipelineCreateInfo*>( pCreateInfos ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkPipeline*>( pPipelines ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Allocator, typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<std::vector<Pipeline,Allocator>>::type Device::createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> createInfos, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  {
+    std::vector<Pipeline,Allocator> pipelines( createInfos.size() );
+    Result result = static_cast<Result>( d.vkCreateComputePipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size() , reinterpret_cast<const VkComputePipelineCreateInfo*>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>( pipelines.data() ) ) );
+    return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING"::Device::createComputePipelines" );
+  }
+  template<typename Allocator, typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<std::vector<Pipeline,Allocator>>::type Device::createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> createInfos, Optional<const AllocationCallbacks> allocator, Allocator const& vectorAllocator, Dispatch const &d ) const
+  {
+    std::vector<Pipeline,Allocator> pipelines( createInfos.size(), vectorAllocator );
+    Result result = static_cast<Result>( d.vkCreateComputePipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size() , reinterpret_cast<const VkComputePipelineCreateInfo*>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>( pipelines.data() ) ) );
+    return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING"::Device::createComputePipelines" );
+  }
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<Pipeline>::type Device::createComputePipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const ComputePipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  {
+    Pipeline pipeline;
+    Result result = static_cast<Result>( d.vkCreateComputePipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), 1 , reinterpret_cast<const VkComputePipelineCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>( &pipeline ) ) );
+    return createResultValue( result, pipeline, VULKAN_HPP_NAMESPACE_STRING"::Device::createComputePipeline" );
+  }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template<typename Allocator, typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<std::vector<UniqueHandle<Pipeline,Dispatch>,Allocator>>::type Device::createComputePipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> createInfos, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  {
+    static_assert( sizeof( Pipeline ) <= sizeof( UniqueHandle<Pipeline, Dispatch> ), "Pipeline is greater than UniqueHandle<Pipeline, Dispatch>!" );
+    std::vector<UniqueHandle<Pipeline, Dispatch>, Allocator> pipelines;
+    pipelines.reserve( createInfos.size() );
+    Pipeline* buffer = reinterpret_cast<Pipeline*>( reinterpret_cast<char*>( pipelines.data() ) + createInfos.size() * ( sizeof( UniqueHandle<Pipeline, Dispatch> ) - sizeof( Pipeline ) ) );
+    Result result = static_cast<Result>(d.vkCreateComputePipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size() , reinterpret_cast<const VkComputePipelineCreateInfo*>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>( buffer ) ) );
+    if (result == VULKAN_HPP_NAMESPACE::Result::eSuccess)
+    {
+      ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
+      for ( size_t i=0 ; i<createInfos.size() ; i++ )
+      {
+        pipelines.push_back( UniqueHandle<Pipeline, Dispatch>( buffer[i], deleter ) );
+      }
+    }
+
+    return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelinesUnique" );
+  }
+  template<typename Allocator, typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<std::vector<UniqueHandle<Pipeline,Dispatch>,Allocator>>::type Device::createComputePipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> createInfos, Optional<const AllocationCallbacks> allocator, Allocator const& vectorAllocator, Dispatch const &d ) const
+  {
+    static_assert( sizeof( Pipeline ) <= sizeof( UniqueHandle<Pipeline, Dispatch> ), "Pipeline is greater than UniqueHandle<Pipeline, Dispatch>!" );
+    std::vector<UniqueHandle<Pipeline, Dispatch>, Allocator> pipelines( vectorAllocator );
+    pipelines.reserve( createInfos.size() );
+    Pipeline* buffer = reinterpret_cast<Pipeline*>( reinterpret_cast<char*>( pipelines.data() ) + createInfos.size() * ( sizeof( UniqueHandle<Pipeline, Dispatch> ) - sizeof( Pipeline ) ) );
+    Result result = static_cast<Result>(d.vkCreateComputePipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size() , reinterpret_cast<const VkComputePipelineCreateInfo*>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>( buffer ) ) );
+    if (result == VULKAN_HPP_NAMESPACE::Result::eSuccess)
+    {
+      ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
+      for ( size_t i=0 ; i<createInfos.size() ; i++ )
+      {
+        pipelines.push_back( UniqueHandle<Pipeline, Dispatch>( buffer[i], deleter ) );
+      }
+    }
+
+    return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelinesUnique" );
+  }
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<Pipeline,Dispatch>>::type Device::createComputePipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const ComputePipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  {
+    Pipeline pipeline;
+    Result result = static_cast<Result>( d.vkCreateComputePipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), 1 , reinterpret_cast<const VkComputePipelineCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>( &pipeline ) ) );
+
+    ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
+    return createResultValue<Pipeline,Dispatch>( result, pipeline, VULKAN_HPP_NAMESPACE_STRING"::Device::createComputePipelineUnique", deleter );
+  }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::createDescriptorPool( const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DescriptorPool* pDescriptorPool, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkCreateDescriptorPool( m_device, reinterpret_cast<const VkDescriptorPoolCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkDescriptorPool*>( pDescriptorPool ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorPool>::type Device::createDescriptorPool( const DescriptorPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  {
+    VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool;
+    Result result = static_cast<Result>( d.vkCreateDescriptorPool( m_device, reinterpret_cast<const VkDescriptorPoolCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkDescriptorPool*>( &descriptorPool ) ) );
+    return createResultValue( result, descriptorPool, VULKAN_HPP_NAMESPACE_STRING"::Device::createDescriptorPool" );
+  }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<DescriptorPool,Dispatch>>::type Device::createDescriptorPoolUnique( const DescriptorPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  {
+    VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool;
+    Result result = static_cast<Result>( d.vkCreateDescriptorPool( m_device, reinterpret_cast<const VkDescriptorPoolCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkDescriptorPool*>( &descriptorPool ) ) );
+
+    ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
+    return createResultValue<DescriptorPool,Dispatch>( result, descriptorPool, VULKAN_HPP_NAMESPACE_STRING"::Device::createDescriptorPoolUnique", deleter );
+  }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::createDescriptorSetLayout( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DescriptorSetLayout* pSetLayout, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkCreateDescriptorSetLayout( m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkDescriptorSetLayout*>( pSetLayout ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorSetLayout>::type Device::createDescriptorSetLayout( const DescriptorSetLayoutCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  {
+    VULKAN_HPP_NAMESPACE::DescriptorSetLayout setLayout;
+    Result result = static_cast<Result>( d.vkCreateDescriptorSetLayout( m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkDescriptorSetLayout*>( &setLayout ) ) );
+    return createResultValue( result, setLayout, VULKAN_HPP_NAMESPACE_STRING"::Device::createDescriptorSetLayout" );
+  }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<DescriptorSetLayout,Dispatch>>::type Device::createDescriptorSetLayoutUnique( const DescriptorSetLayoutCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  {
+    VULKAN_HPP_NAMESPACE::DescriptorSetLayout setLayout;
+    Result result = static_cast<Result>( d.vkCreateDescriptorSetLayout( m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkDescriptorSetLayout*>( &setLayout ) ) );
+
+    ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
+    return createResultValue<DescriptorSetLayout,Dispatch>( result, setLayout, VULKAN_HPP_NAMESPACE_STRING"::Device::createDescriptorSetLayoutUnique", deleter );
+  }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::createDescriptorUpdateTemplate( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate* pDescriptorUpdateTemplate, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkCreateDescriptorUpdateTemplate( m_device, reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkDescriptorUpdateTemplate*>( pDescriptorUpdateTemplate ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate>::type Device::createDescriptorUpdateTemplate( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  {
+    VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate;
+    Result result = static_cast<Result>( d.vkCreateDescriptorUpdateTemplate( m_device, reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkDescriptorUpdateTemplate*>( &descriptorUpdateTemplate ) ) );
+    return createResultValue( result, descriptorUpdateTemplate, VULKAN_HPP_NAMESPACE_STRING"::Device::createDescriptorUpdateTemplate" );
+  }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<DescriptorUpdateTemplate,Dispatch>>::type Device::createDescriptorUpdateTemplateUnique( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  {
+    VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate;
+    Result result = static_cast<Result>( d.vkCreateDescriptorUpdateTemplate( m_device, reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkDescriptorUpdateTemplate*>( &descriptorUpdateTemplate ) ) );
+
+    ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
+    return createResultValue<DescriptorUpdateTemplate,Dispatch>( result, descriptorUpdateTemplate, VULKAN_HPP_NAMESPACE_STRING"::Device::createDescriptorUpdateTemplateUnique", deleter );
+  }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::createDescriptorUpdateTemplateKHR( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate* pDescriptorUpdateTemplate, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkCreateDescriptorUpdateTemplateKHR( m_device, reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkDescriptorUpdateTemplate*>( pDescriptorUpdateTemplate ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate>::type Device::createDescriptorUpdateTemplateKHR( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  {
+    VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate;
+    Result result = static_cast<Result>( d.vkCreateDescriptorUpdateTemplateKHR( m_device, reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkDescriptorUpdateTemplate*>( &descriptorUpdateTemplate ) ) );
+    return createResultValue( result, descriptorUpdateTemplate, VULKAN_HPP_NAMESPACE_STRING"::Device::createDescriptorUpdateTemplateKHR" );
+  }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<DescriptorUpdateTemplate,Dispatch>>::type Device::createDescriptorUpdateTemplateKHRUnique( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  {
+    VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate;
+    Result result = static_cast<Result>( d.vkCreateDescriptorUpdateTemplateKHR( m_device, reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkDescriptorUpdateTemplate*>( &descriptorUpdateTemplate ) ) );
+
+    ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
+    return createResultValue<DescriptorUpdateTemplate,Dispatch>( result, descriptorUpdateTemplate, VULKAN_HPP_NAMESPACE_STRING"::Device::createDescriptorUpdateTemplateKHRUnique", deleter );
+  }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::createEvent( const VULKAN_HPP_NAMESPACE::EventCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Event* pEvent, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkCreateEvent( m_device, reinterpret_cast<const VkEventCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkEvent*>( pEvent ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<VULKAN_HPP_NAMESPACE::Event>::type Device::createEvent( const EventCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  {
+    VULKAN_HPP_NAMESPACE::Event event;
+    Result result = static_cast<Result>( d.vkCreateEvent( m_device, reinterpret_cast<const VkEventCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkEvent*>( &event ) ) );
+    return createResultValue( result, event, VULKAN_HPP_NAMESPACE_STRING"::Device::createEvent" );
+  }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<Event,Dispatch>>::type Device::createEventUnique( const EventCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  {
+    VULKAN_HPP_NAMESPACE::Event event;
+    Result result = static_cast<Result>( d.vkCreateEvent( m_device, reinterpret_cast<const VkEventCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkEvent*>( &event ) ) );
+
+    ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
+    return createResultValue<Event,Dispatch>( result, event, VULKAN_HPP_NAMESPACE_STRING"::Device::createEventUnique", deleter );
+  }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::createFence( const VULKAN_HPP_NAMESPACE::FenceCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Fence* pFence, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkCreateFence( m_device, reinterpret_cast<const VkFenceCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkFence*>( pFence ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<VULKAN_HPP_NAMESPACE::Fence>::type Device::createFence( const FenceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  {
+    VULKAN_HPP_NAMESPACE::Fence fence;
+    Result result = static_cast<Result>( d.vkCreateFence( m_device, reinterpret_cast<const VkFenceCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkFence*>( &fence ) ) );
+    return createResultValue( result, fence, VULKAN_HPP_NAMESPACE_STRING"::Device::createFence" );
+  }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<Fence,Dispatch>>::type Device::createFenceUnique( const FenceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  {
+    VULKAN_HPP_NAMESPACE::Fence fence;
+    Result result = static_cast<Result>( d.vkCreateFence( m_device, reinterpret_cast<const VkFenceCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkFence*>( &fence ) ) );
+
+    ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
+    return createResultValue<Fence,Dispatch>( result, fence, VULKAN_HPP_NAMESPACE_STRING"::Device::createFenceUnique", deleter );
+  }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::createFramebuffer( const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Framebuffer* pFramebuffer, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkCreateFramebuffer( m_device, reinterpret_cast<const VkFramebufferCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkFramebuffer*>( pFramebuffer ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<VULKAN_HPP_NAMESPACE::Framebuffer>::type Device::createFramebuffer( const FramebufferCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  {
+    VULKAN_HPP_NAMESPACE::Framebuffer framebuffer;
+    Result result = static_cast<Result>( d.vkCreateFramebuffer( m_device, reinterpret_cast<const VkFramebufferCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkFramebuffer*>( &framebuffer ) ) );
+    return createResultValue( result, framebuffer, VULKAN_HPP_NAMESPACE_STRING"::Device::createFramebuffer" );
+  }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<Framebuffer,Dispatch>>::type Device::createFramebufferUnique( const FramebufferCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  {
+    VULKAN_HPP_NAMESPACE::Framebuffer framebuffer;
+    Result result = static_cast<Result>( d.vkCreateFramebuffer( m_device, reinterpret_cast<const VkFramebufferCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkFramebuffer*>( &framebuffer ) ) );
+
+    ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
+    return createResultValue<Framebuffer,Dispatch>( result, framebuffer, VULKAN_HPP_NAMESPACE_STRING"::Device::createFramebufferUnique", deleter );
+  }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, uint32_t createInfoCount, const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo* pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Pipeline* pPipelines, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkCreateGraphicsPipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfoCount, reinterpret_cast<const VkGraphicsPipelineCreateInfo*>( pCreateInfos ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkPipeline*>( pPipelines ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Allocator, typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<std::vector<Pipeline,Allocator>>::type Device::createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> createInfos, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  {
+    std::vector<Pipeline,Allocator> pipelines( createInfos.size() );
+    Result result = static_cast<Result>( d.vkCreateGraphicsPipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size() , reinterpret_cast<const VkGraphicsPipelineCreateInfo*>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>( pipelines.data() ) ) );
+    return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING"::Device::createGraphicsPipelines" );
+  }
+  template<typename Allocator, typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<std::vector<Pipeline,Allocator>>::type Device::createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> createInfos, Optional<const AllocationCallbacks> allocator, Allocator const& vectorAllocator, Dispatch const &d ) const
+  {
+    std::vector<Pipeline,Allocator> pipelines( createInfos.size(), vectorAllocator );
+    Result result = static_cast<Result>( d.vkCreateGraphicsPipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size() , reinterpret_cast<const VkGraphicsPipelineCreateInfo*>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>( pipelines.data() ) ) );
+    return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING"::Device::createGraphicsPipelines" );
+  }
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<Pipeline>::type Device::createGraphicsPipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const GraphicsPipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  {
+    Pipeline pipeline;
+    Result result = static_cast<Result>( d.vkCreateGraphicsPipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), 1 , reinterpret_cast<const VkGraphicsPipelineCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>( &pipeline ) ) );
+    return createResultValue( result, pipeline, VULKAN_HPP_NAMESPACE_STRING"::Device::createGraphicsPipeline" );
+  }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template<typename Allocator, typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<std::vector<UniqueHandle<Pipeline,Dispatch>,Allocator>>::type Device::createGraphicsPipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> createInfos, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  {
+    static_assert( sizeof( Pipeline ) <= sizeof( UniqueHandle<Pipeline, Dispatch> ), "Pipeline is greater than UniqueHandle<Pipeline, Dispatch>!" );
+    std::vector<UniqueHandle<Pipeline, Dispatch>, Allocator> pipelines;
+    pipelines.reserve( createInfos.size() );
+    Pipeline* buffer = reinterpret_cast<Pipeline*>( reinterpret_cast<char*>( pipelines.data() ) + createInfos.size() * ( sizeof( UniqueHandle<Pipeline, Dispatch> ) - sizeof( Pipeline ) ) );
+    Result result = static_cast<Result>(d.vkCreateGraphicsPipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size() , reinterpret_cast<const VkGraphicsPipelineCreateInfo*>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>( buffer ) ) );
+    if (result == VULKAN_HPP_NAMESPACE::Result::eSuccess)
+    {
+      ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
+      for ( size_t i=0 ; i<createInfos.size() ; i++ )
+      {
+        pipelines.push_back( UniqueHandle<Pipeline, Dispatch>( buffer[i], deleter ) );
+      }
+    }
+
+    return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelinesUnique" );
+  }
+  template<typename Allocator, typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<std::vector<UniqueHandle<Pipeline,Dispatch>,Allocator>>::type Device::createGraphicsPipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> createInfos, Optional<const AllocationCallbacks> allocator, Allocator const& vectorAllocator, Dispatch const &d ) const
+  {
+    static_assert( sizeof( Pipeline ) <= sizeof( UniqueHandle<Pipeline, Dispatch> ), "Pipeline is greater than UniqueHandle<Pipeline, Dispatch>!" );
+    std::vector<UniqueHandle<Pipeline, Dispatch>, Allocator> pipelines( vectorAllocator );
+    pipelines.reserve( createInfos.size() );
+    Pipeline* buffer = reinterpret_cast<Pipeline*>( reinterpret_cast<char*>( pipelines.data() ) + createInfos.size() * ( sizeof( UniqueHandle<Pipeline, Dispatch> ) - sizeof( Pipeline ) ) );
+    Result result = static_cast<Result>(d.vkCreateGraphicsPipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size() , reinterpret_cast<const VkGraphicsPipelineCreateInfo*>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>( buffer ) ) );
+    if (result == VULKAN_HPP_NAMESPACE::Result::eSuccess)
+    {
+      ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
+      for ( size_t i=0 ; i<createInfos.size() ; i++ )
+      {
+        pipelines.push_back( UniqueHandle<Pipeline, Dispatch>( buffer[i], deleter ) );
+      }
+    }
+
+    return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelinesUnique" );
+  }
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<Pipeline,Dispatch>>::type Device::createGraphicsPipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const GraphicsPipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  {
+    Pipeline pipeline;
+    Result result = static_cast<Result>( d.vkCreateGraphicsPipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), 1 , reinterpret_cast<const VkGraphicsPipelineCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>( &pipeline ) ) );
+
+    ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
+    return createResultValue<Pipeline,Dispatch>( result, pipeline, VULKAN_HPP_NAMESPACE_STRING"::Device::createGraphicsPipelineUnique", deleter );
+  }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::createImage( const VULKAN_HPP_NAMESPACE::ImageCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Image* pImage, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkCreateImage( m_device, reinterpret_cast<const VkImageCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkImage*>( pImage ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<VULKAN_HPP_NAMESPACE::Image>::type Device::createImage( const ImageCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  {
+    VULKAN_HPP_NAMESPACE::Image image;
+    Result result = static_cast<Result>( d.vkCreateImage( m_device, reinterpret_cast<const VkImageCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkImage*>( &image ) ) );
+    return createResultValue( result, image, VULKAN_HPP_NAMESPACE_STRING"::Device::createImage" );
+  }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<Image,Dispatch>>::type Device::createImageUnique( const ImageCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  {
+    VULKAN_HPP_NAMESPACE::Image image;
+    Result result = static_cast<Result>( d.vkCreateImage( m_device, reinterpret_cast<const VkImageCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkImage*>( &image ) ) );
+
+    ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
+    return createResultValue<Image,Dispatch>( result, image, VULKAN_HPP_NAMESPACE_STRING"::Device::createImageUnique", deleter );
+  }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::createImageView( const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::ImageView* pView, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkCreateImageView( m_device, reinterpret_cast<const VkImageViewCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkImageView*>( pView ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<VULKAN_HPP_NAMESPACE::ImageView>::type Device::createImageView( const ImageViewCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  {
+    VULKAN_HPP_NAMESPACE::ImageView view;
+    Result result = static_cast<Result>( d.vkCreateImageView( m_device, reinterpret_cast<const VkImageViewCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkImageView*>( &view ) ) );
+    return createResultValue( result, view, VULKAN_HPP_NAMESPACE_STRING"::Device::createImageView" );
+  }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<ImageView,Dispatch>>::type Device::createImageViewUnique( const ImageViewCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  {
+    VULKAN_HPP_NAMESPACE::ImageView view;
+    Result result = static_cast<Result>( d.vkCreateImageView( m_device, reinterpret_cast<const VkImageViewCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkImageView*>( &view ) ) );
+
+    ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
+    return createResultValue<ImageView,Dispatch>( result, view, VULKAN_HPP_NAMESPACE_STRING"::Device::createImageViewUnique", deleter );
+  }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::createIndirectCommandsLayoutNVX( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNVX* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX* pIndirectCommandsLayout, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkCreateIndirectCommandsLayoutNVX( m_device, reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoNVX*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkIndirectCommandsLayoutNVX*>( pIndirectCommandsLayout ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX>::type Device::createIndirectCommandsLayoutNVX( const IndirectCommandsLayoutCreateInfoNVX & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  {
+    VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX indirectCommandsLayout;
+    Result result = static_cast<Result>( d.vkCreateIndirectCommandsLayoutNVX( m_device, reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoNVX*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkIndirectCommandsLayoutNVX*>( &indirectCommandsLayout ) ) );
+    return createResultValue( result, indirectCommandsLayout, VULKAN_HPP_NAMESPACE_STRING"::Device::createIndirectCommandsLayoutNVX" );
+  }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<IndirectCommandsLayoutNVX,Dispatch>>::type Device::createIndirectCommandsLayoutNVXUnique( const IndirectCommandsLayoutCreateInfoNVX & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  {
+    VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX indirectCommandsLayout;
+    Result result = static_cast<Result>( d.vkCreateIndirectCommandsLayoutNVX( m_device, reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoNVX*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkIndirectCommandsLayoutNVX*>( &indirectCommandsLayout ) ) );
+
+    ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
+    return createResultValue<IndirectCommandsLayoutNVX,Dispatch>( result, indirectCommandsLayout, VULKAN_HPP_NAMESPACE_STRING"::Device::createIndirectCommandsLayoutNVXUnique", deleter );
+  }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::createObjectTableNVX( const VULKAN_HPP_NAMESPACE::ObjectTableCreateInfoNVX* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::ObjectTableNVX* pObjectTable, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkCreateObjectTableNVX( m_device, reinterpret_cast<const VkObjectTableCreateInfoNVX*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkObjectTableNVX*>( pObjectTable ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<VULKAN_HPP_NAMESPACE::ObjectTableNVX>::type Device::createObjectTableNVX( const ObjectTableCreateInfoNVX & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  {
+    VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable;
+    Result result = static_cast<Result>( d.vkCreateObjectTableNVX( m_device, reinterpret_cast<const VkObjectTableCreateInfoNVX*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkObjectTableNVX*>( &objectTable ) ) );
+    return createResultValue( result, objectTable, VULKAN_HPP_NAMESPACE_STRING"::Device::createObjectTableNVX" );
+  }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<ObjectTableNVX,Dispatch>>::type Device::createObjectTableNVXUnique( const ObjectTableCreateInfoNVX & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  {
+    VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable;
+    Result result = static_cast<Result>( d.vkCreateObjectTableNVX( m_device, reinterpret_cast<const VkObjectTableCreateInfoNVX*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkObjectTableNVX*>( &objectTable ) ) );
+
+    ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
+    return createResultValue<ObjectTableNVX,Dispatch>( result, objectTable, VULKAN_HPP_NAMESPACE_STRING"::Device::createObjectTableNVXUnique", deleter );
+  }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::createPipelineCache( const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::PipelineCache* pPipelineCache, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkCreatePipelineCache( m_device, reinterpret_cast<const VkPipelineCacheCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkPipelineCache*>( pPipelineCache ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<VULKAN_HPP_NAMESPACE::PipelineCache>::type Device::createPipelineCache( const PipelineCacheCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  {
+    VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache;
+    Result result = static_cast<Result>( d.vkCreatePipelineCache( m_device, reinterpret_cast<const VkPipelineCacheCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipelineCache*>( &pipelineCache ) ) );
+    return createResultValue( result, pipelineCache, VULKAN_HPP_NAMESPACE_STRING"::Device::createPipelineCache" );
+  }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<PipelineCache,Dispatch>>::type Device::createPipelineCacheUnique( const PipelineCacheCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  {
+    VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache;
+    Result result = static_cast<Result>( d.vkCreatePipelineCache( m_device, reinterpret_cast<const VkPipelineCacheCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipelineCache*>( &pipelineCache ) ) );
+
+    ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
+    return createResultValue<PipelineCache,Dispatch>( result, pipelineCache, VULKAN_HPP_NAMESPACE_STRING"::Device::createPipelineCacheUnique", deleter );
+  }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::createPipelineLayout( const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::PipelineLayout* pPipelineLayout, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkCreatePipelineLayout( m_device, reinterpret_cast<const VkPipelineLayoutCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkPipelineLayout*>( pPipelineLayout ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<VULKAN_HPP_NAMESPACE::PipelineLayout>::type Device::createPipelineLayout( const PipelineLayoutCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  {
+    VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout;
+    Result result = static_cast<Result>( d.vkCreatePipelineLayout( m_device, reinterpret_cast<const VkPipelineLayoutCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipelineLayout*>( &pipelineLayout ) ) );
+    return createResultValue( result, pipelineLayout, VULKAN_HPP_NAMESPACE_STRING"::Device::createPipelineLayout" );
+  }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<PipelineLayout,Dispatch>>::type Device::createPipelineLayoutUnique( const PipelineLayoutCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  {
+    VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout;
+    Result result = static_cast<Result>( d.vkCreatePipelineLayout( m_device, reinterpret_cast<const VkPipelineLayoutCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipelineLayout*>( &pipelineLayout ) ) );
+
+    ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
+    return createResultValue<PipelineLayout,Dispatch>( result, pipelineLayout, VULKAN_HPP_NAMESPACE_STRING"::Device::createPipelineLayoutUnique", deleter );
+  }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::createQueryPool( const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::QueryPool* pQueryPool, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkCreateQueryPool( m_device, reinterpret_cast<const VkQueryPoolCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkQueryPool*>( pQueryPool ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<VULKAN_HPP_NAMESPACE::QueryPool>::type Device::createQueryPool( const QueryPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  {
+    VULKAN_HPP_NAMESPACE::QueryPool queryPool;
+    Result result = static_cast<Result>( d.vkCreateQueryPool( m_device, reinterpret_cast<const VkQueryPoolCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkQueryPool*>( &queryPool ) ) );
+    return createResultValue( result, queryPool, VULKAN_HPP_NAMESPACE_STRING"::Device::createQueryPool" );
+  }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<QueryPool,Dispatch>>::type Device::createQueryPoolUnique( const QueryPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  {
+    VULKAN_HPP_NAMESPACE::QueryPool queryPool;
+    Result result = static_cast<Result>( d.vkCreateQueryPool( m_device, reinterpret_cast<const VkQueryPoolCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkQueryPool*>( &queryPool ) ) );
+
+    ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
+    return createResultValue<QueryPool,Dispatch>( result, queryPool, VULKAN_HPP_NAMESPACE_STRING"::Device::createQueryPoolUnique", deleter );
+  }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, uint32_t createInfoCount, const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV* pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Pipeline* pPipelines, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkCreateRayTracingPipelinesNV( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfoCount, reinterpret_cast<const VkRayTracingPipelineCreateInfoNV*>( pCreateInfos ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkPipeline*>( pPipelines ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Allocator, typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<std::vector<Pipeline,Allocator>>::type Device::createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> createInfos, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  {
+    std::vector<Pipeline,Allocator> pipelines( createInfos.size() );
+    Result result = static_cast<Result>( d.vkCreateRayTracingPipelinesNV( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size() , reinterpret_cast<const VkRayTracingPipelineCreateInfoNV*>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>( pipelines.data() ) ) );
+    return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING"::Device::createRayTracingPipelinesNV" );
+  }
+  template<typename Allocator, typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<std::vector<Pipeline,Allocator>>::type Device::createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> createInfos, Optional<const AllocationCallbacks> allocator, Allocator const& vectorAllocator, Dispatch const &d ) const
+  {
+    std::vector<Pipeline,Allocator> pipelines( createInfos.size(), vectorAllocator );
+    Result result = static_cast<Result>( d.vkCreateRayTracingPipelinesNV( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size() , reinterpret_cast<const VkRayTracingPipelineCreateInfoNV*>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>( pipelines.data() ) ) );
+    return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING"::Device::createRayTracingPipelinesNV" );
+  }
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<Pipeline>::type Device::createRayTracingPipelineNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const RayTracingPipelineCreateInfoNV & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  {
+    Pipeline pipeline;
+    Result result = static_cast<Result>( d.vkCreateRayTracingPipelinesNV( m_device, static_cast<VkPipelineCache>( pipelineCache ), 1 , reinterpret_cast<const VkRayTracingPipelineCreateInfoNV*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>( &pipeline ) ) );
+    return createResultValue( result, pipeline, VULKAN_HPP_NAMESPACE_STRING"::Device::createRayTracingPipelineNV" );
+  }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template<typename Allocator, typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<std::vector<UniqueHandle<Pipeline,Dispatch>,Allocator>>::type Device::createRayTracingPipelinesNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> createInfos, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  {
+    static_assert( sizeof( Pipeline ) <= sizeof( UniqueHandle<Pipeline, Dispatch> ), "Pipeline is greater than UniqueHandle<Pipeline, Dispatch>!" );
+    std::vector<UniqueHandle<Pipeline, Dispatch>, Allocator> pipelines;
+    pipelines.reserve( createInfos.size() );
+    Pipeline* buffer = reinterpret_cast<Pipeline*>( reinterpret_cast<char*>( pipelines.data() ) + createInfos.size() * ( sizeof( UniqueHandle<Pipeline, Dispatch> ) - sizeof( Pipeline ) ) );
+    Result result = static_cast<Result>(d.vkCreateRayTracingPipelinesNV( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size() , reinterpret_cast<const VkRayTracingPipelineCreateInfoNV*>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>( buffer ) ) );
+    if (result == VULKAN_HPP_NAMESPACE::Result::eSuccess)
+    {
+      ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
+      for ( size_t i=0 ; i<createInfos.size() ; i++ )
+      {
+        pipelines.push_back( UniqueHandle<Pipeline, Dispatch>( buffer[i], deleter ) );
+      }
+    }
+
+    return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNVUnique" );
+  }
+  template<typename Allocator, typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<std::vector<UniqueHandle<Pipeline,Dispatch>,Allocator>>::type Device::createRayTracingPipelinesNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> createInfos, Optional<const AllocationCallbacks> allocator, Allocator const& vectorAllocator, Dispatch const &d ) const
+  {
+    static_assert( sizeof( Pipeline ) <= sizeof( UniqueHandle<Pipeline, Dispatch> ), "Pipeline is greater than UniqueHandle<Pipeline, Dispatch>!" );
+    std::vector<UniqueHandle<Pipeline, Dispatch>, Allocator> pipelines( vectorAllocator );
+    pipelines.reserve( createInfos.size() );
+    Pipeline* buffer = reinterpret_cast<Pipeline*>( reinterpret_cast<char*>( pipelines.data() ) + createInfos.size() * ( sizeof( UniqueHandle<Pipeline, Dispatch> ) - sizeof( Pipeline ) ) );
+    Result result = static_cast<Result>(d.vkCreateRayTracingPipelinesNV( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size() , reinterpret_cast<const VkRayTracingPipelineCreateInfoNV*>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>( buffer ) ) );
+    if (result == VULKAN_HPP_NAMESPACE::Result::eSuccess)
+    {
+      ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
+      for ( size_t i=0 ; i<createInfos.size() ; i++ )
+      {
+        pipelines.push_back( UniqueHandle<Pipeline, Dispatch>( buffer[i], deleter ) );
+      }
+    }
+
+    return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNVUnique" );
+  }
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<Pipeline,Dispatch>>::type Device::createRayTracingPipelineNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const RayTracingPipelineCreateInfoNV & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  {
+    Pipeline pipeline;
+    Result result = static_cast<Result>( d.vkCreateRayTracingPipelinesNV( m_device, static_cast<VkPipelineCache>( pipelineCache ), 1 , reinterpret_cast<const VkRayTracingPipelineCreateInfoNV*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>( &pipeline ) ) );
+
+    ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
+    return createResultValue<Pipeline,Dispatch>( result, pipeline, VULKAN_HPP_NAMESPACE_STRING"::Device::createRayTracingPipelineNVUnique", deleter );
+  }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::createRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::RenderPass* pRenderPass, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkCreateRenderPass( m_device, reinterpret_cast<const VkRenderPassCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkRenderPass*>( pRenderPass ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<VULKAN_HPP_NAMESPACE::RenderPass>::type Device::createRenderPass( const RenderPassCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  {
+    VULKAN_HPP_NAMESPACE::RenderPass renderPass;
+    Result result = static_cast<Result>( d.vkCreateRenderPass( m_device, reinterpret_cast<const VkRenderPassCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkRenderPass*>( &renderPass ) ) );
+    return createResultValue( result, renderPass, VULKAN_HPP_NAMESPACE_STRING"::Device::createRenderPass" );
+  }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<RenderPass,Dispatch>>::type Device::createRenderPassUnique( const RenderPassCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  {
+    VULKAN_HPP_NAMESPACE::RenderPass renderPass;
+    Result result = static_cast<Result>( d.vkCreateRenderPass( m_device, reinterpret_cast<const VkRenderPassCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkRenderPass*>( &renderPass ) ) );
+
+    ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
+    return createResultValue<RenderPass,Dispatch>( result, renderPass, VULKAN_HPP_NAMESPACE_STRING"::Device::createRenderPassUnique", deleter );
+  }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::createRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2KHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::RenderPass* pRenderPass, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkCreateRenderPass2KHR( m_device, reinterpret_cast<const VkRenderPassCreateInfo2KHR*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkRenderPass*>( pRenderPass ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<VULKAN_HPP_NAMESPACE::RenderPass>::type Device::createRenderPass2KHR( const RenderPassCreateInfo2KHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  {
+    VULKAN_HPP_NAMESPACE::RenderPass renderPass;
+    Result result = static_cast<Result>( d.vkCreateRenderPass2KHR( m_device, reinterpret_cast<const VkRenderPassCreateInfo2KHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkRenderPass*>( &renderPass ) ) );
+    return createResultValue( result, renderPass, VULKAN_HPP_NAMESPACE_STRING"::Device::createRenderPass2KHR" );
+  }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<RenderPass,Dispatch>>::type Device::createRenderPass2KHRUnique( const RenderPassCreateInfo2KHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  {
+    VULKAN_HPP_NAMESPACE::RenderPass renderPass;
+    Result result = static_cast<Result>( d.vkCreateRenderPass2KHR( m_device, reinterpret_cast<const VkRenderPassCreateInfo2KHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkRenderPass*>( &renderPass ) ) );
+
+    ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
+    return createResultValue<RenderPass,Dispatch>( result, renderPass, VULKAN_HPP_NAMESPACE_STRING"::Device::createRenderPass2KHRUnique", deleter );
+  }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::createSampler( const VULKAN_HPP_NAMESPACE::SamplerCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Sampler* pSampler, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkCreateSampler( m_device, reinterpret_cast<const VkSamplerCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSampler*>( pSampler ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<VULKAN_HPP_NAMESPACE::Sampler>::type Device::createSampler( const SamplerCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  {
+    VULKAN_HPP_NAMESPACE::Sampler sampler;
+    Result result = static_cast<Result>( d.vkCreateSampler( m_device, reinterpret_cast<const VkSamplerCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSampler*>( &sampler ) ) );
+    return createResultValue( result, sampler, VULKAN_HPP_NAMESPACE_STRING"::Device::createSampler" );
+  }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<Sampler,Dispatch>>::type Device::createSamplerUnique( const SamplerCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  {
+    VULKAN_HPP_NAMESPACE::Sampler sampler;
+    Result result = static_cast<Result>( d.vkCreateSampler( m_device, reinterpret_cast<const VkSamplerCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSampler*>( &sampler ) ) );
+
+    ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
+    return createResultValue<Sampler,Dispatch>( result, sampler, VULKAN_HPP_NAMESPACE_STRING"::Device::createSamplerUnique", deleter );
+  }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::createSamplerYcbcrConversion( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion* pYcbcrConversion, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkCreateSamplerYcbcrConversion( m_device, reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSamplerYcbcrConversion*>( pYcbcrConversion ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion>::type Device::createSamplerYcbcrConversion( const SamplerYcbcrConversionCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  {
+    VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion;
+    Result result = static_cast<Result>( d.vkCreateSamplerYcbcrConversion( m_device, reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSamplerYcbcrConversion*>( &ycbcrConversion ) ) );
+    return createResultValue( result, ycbcrConversion, VULKAN_HPP_NAMESPACE_STRING"::Device::createSamplerYcbcrConversion" );
+  }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<SamplerYcbcrConversion,Dispatch>>::type Device::createSamplerYcbcrConversionUnique( const SamplerYcbcrConversionCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  {
+    VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion;
+    Result result = static_cast<Result>( d.vkCreateSamplerYcbcrConversion( m_device, reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSamplerYcbcrConversion*>( &ycbcrConversion ) ) );
+
+    ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
+    return createResultValue<SamplerYcbcrConversion,Dispatch>( result, ycbcrConversion, VULKAN_HPP_NAMESPACE_STRING"::Device::createSamplerYcbcrConversionUnique", deleter );
+  }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::createSamplerYcbcrConversionKHR( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion* pYcbcrConversion, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkCreateSamplerYcbcrConversionKHR( m_device, reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSamplerYcbcrConversion*>( pYcbcrConversion ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion>::type Device::createSamplerYcbcrConversionKHR( const SamplerYcbcrConversionCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  {
+    VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion;
+    Result result = static_cast<Result>( d.vkCreateSamplerYcbcrConversionKHR( m_device, reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSamplerYcbcrConversion*>( &ycbcrConversion ) ) );
+    return createResultValue( result, ycbcrConversion, VULKAN_HPP_NAMESPACE_STRING"::Device::createSamplerYcbcrConversionKHR" );
+  }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<SamplerYcbcrConversion,Dispatch>>::type Device::createSamplerYcbcrConversionKHRUnique( const SamplerYcbcrConversionCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  {
+    VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion;
+    Result result = static_cast<Result>( d.vkCreateSamplerYcbcrConversionKHR( m_device, reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSamplerYcbcrConversion*>( &ycbcrConversion ) ) );
+
+    ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
+    return createResultValue<SamplerYcbcrConversion,Dispatch>( result, ycbcrConversion, VULKAN_HPP_NAMESPACE_STRING"::Device::createSamplerYcbcrConversionKHRUnique", deleter );
+  }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::createSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Semaphore* pSemaphore, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkCreateSemaphore( m_device, reinterpret_cast<const VkSemaphoreCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSemaphore*>( pSemaphore ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<VULKAN_HPP_NAMESPACE::Semaphore>::type Device::createSemaphore( const SemaphoreCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  {
+    VULKAN_HPP_NAMESPACE::Semaphore semaphore;
+    Result result = static_cast<Result>( d.vkCreateSemaphore( m_device, reinterpret_cast<const VkSemaphoreCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSemaphore*>( &semaphore ) ) );
+    return createResultValue( result, semaphore, VULKAN_HPP_NAMESPACE_STRING"::Device::createSemaphore" );
+  }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<Semaphore,Dispatch>>::type Device::createSemaphoreUnique( const SemaphoreCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  {
+    VULKAN_HPP_NAMESPACE::Semaphore semaphore;
+    Result result = static_cast<Result>( d.vkCreateSemaphore( m_device, reinterpret_cast<const VkSemaphoreCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSemaphore*>( &semaphore ) ) );
+
+    ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
+    return createResultValue<Semaphore,Dispatch>( result, semaphore, VULKAN_HPP_NAMESPACE_STRING"::Device::createSemaphoreUnique", deleter );
+  }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::createShaderModule( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::ShaderModule* pShaderModule, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkCreateShaderModule( m_device, reinterpret_cast<const VkShaderModuleCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkShaderModule*>( pShaderModule ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<VULKAN_HPP_NAMESPACE::ShaderModule>::type Device::createShaderModule( const ShaderModuleCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  {
+    VULKAN_HPP_NAMESPACE::ShaderModule shaderModule;
+    Result result = static_cast<Result>( d.vkCreateShaderModule( m_device, reinterpret_cast<const VkShaderModuleCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkShaderModule*>( &shaderModule ) ) );
+    return createResultValue( result, shaderModule, VULKAN_HPP_NAMESPACE_STRING"::Device::createShaderModule" );
+  }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<ShaderModule,Dispatch>>::type Device::createShaderModuleUnique( const ShaderModuleCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  {
+    VULKAN_HPP_NAMESPACE::ShaderModule shaderModule;
+    Result result = static_cast<Result>( d.vkCreateShaderModule( m_device, reinterpret_cast<const VkShaderModuleCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkShaderModule*>( &shaderModule ) ) );
+
+    ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
+    return createResultValue<ShaderModule,Dispatch>( result, shaderModule, VULKAN_HPP_NAMESPACE_STRING"::Device::createShaderModuleUnique", deleter );
+  }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::createSharedSwapchainsKHR( uint32_t swapchainCount, const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR* pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SwapchainKHR* pSwapchains, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkCreateSharedSwapchainsKHR( m_device, swapchainCount, reinterpret_cast<const VkSwapchainCreateInfoKHR*>( pCreateInfos ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSwapchainKHR*>( pSwapchains ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Allocator, typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<std::vector<SwapchainKHR,Allocator>>::type Device::createSharedSwapchainsKHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> createInfos, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  {
+    std::vector<SwapchainKHR,Allocator> swapchains( createInfos.size() );
+    Result result = static_cast<Result>( d.vkCreateSharedSwapchainsKHR( m_device, createInfos.size() , reinterpret_cast<const VkSwapchainCreateInfoKHR*>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSwapchainKHR*>( swapchains.data() ) ) );
+    return createResultValue( result, swapchains, VULKAN_HPP_NAMESPACE_STRING"::Device::createSharedSwapchainsKHR" );
+  }
+  template<typename Allocator, typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<std::vector<SwapchainKHR,Allocator>>::type Device::createSharedSwapchainsKHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> createInfos, Optional<const AllocationCallbacks> allocator, Allocator const& vectorAllocator, Dispatch const &d ) const
+  {
+    std::vector<SwapchainKHR,Allocator> swapchains( createInfos.size(), vectorAllocator );
+    Result result = static_cast<Result>( d.vkCreateSharedSwapchainsKHR( m_device, createInfos.size() , reinterpret_cast<const VkSwapchainCreateInfoKHR*>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSwapchainKHR*>( swapchains.data() ) ) );
+    return createResultValue( result, swapchains, VULKAN_HPP_NAMESPACE_STRING"::Device::createSharedSwapchainsKHR" );
+  }
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<SwapchainKHR>::type Device::createSharedSwapchainKHR( const SwapchainCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  {
+    SwapchainKHR swapchain;
+    Result result = static_cast<Result>( d.vkCreateSharedSwapchainsKHR( m_device, 1 , reinterpret_cast<const VkSwapchainCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSwapchainKHR*>( &swapchain ) ) );
+    return createResultValue( result, swapchain, VULKAN_HPP_NAMESPACE_STRING"::Device::createSharedSwapchainKHR" );
+  }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template<typename Allocator, typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<std::vector<UniqueHandle<SwapchainKHR,Dispatch>,Allocator>>::type Device::createSharedSwapchainsKHRUnique( ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> createInfos, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  {
+    static_assert( sizeof( SwapchainKHR ) <= sizeof( UniqueHandle<SwapchainKHR, Dispatch> ), "SwapchainKHR is greater than UniqueHandle<SwapchainKHR, Dispatch>!" );
+    std::vector<UniqueHandle<SwapchainKHR, Dispatch>, Allocator> swapchainKHRs;
+    swapchainKHRs.reserve( createInfos.size() );
+    SwapchainKHR* buffer = reinterpret_cast<SwapchainKHR*>( reinterpret_cast<char*>( swapchainKHRs.data() ) + createInfos.size() * ( sizeof( UniqueHandle<SwapchainKHR, Dispatch> ) - sizeof( SwapchainKHR ) ) );
+    Result result = static_cast<Result>(d.vkCreateSharedSwapchainsKHR( m_device, createInfos.size() , reinterpret_cast<const VkSwapchainCreateInfoKHR*>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSwapchainKHR*>( buffer ) ) );
+    if (result == VULKAN_HPP_NAMESPACE::Result::eSuccess)
+    {
+      ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
+      for ( size_t i=0 ; i<createInfos.size() ; i++ )
+      {
+        swapchainKHRs.push_back( UniqueHandle<SwapchainKHR, Dispatch>( buffer[i], deleter ) );
+      }
+    }
+
+    return createResultValue( result, swapchainKHRs, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHRUnique" );
+  }
+  template<typename Allocator, typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<std::vector<UniqueHandle<SwapchainKHR,Dispatch>,Allocator>>::type Device::createSharedSwapchainsKHRUnique( ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> createInfos, Optional<const AllocationCallbacks> allocator, Allocator const& vectorAllocator, Dispatch const &d ) const
+  {
+    static_assert( sizeof( SwapchainKHR ) <= sizeof( UniqueHandle<SwapchainKHR, Dispatch> ), "SwapchainKHR is greater than UniqueHandle<SwapchainKHR, Dispatch>!" );
+    std::vector<UniqueHandle<SwapchainKHR, Dispatch>, Allocator> swapchainKHRs( vectorAllocator );
+    swapchainKHRs.reserve( createInfos.size() );
+    SwapchainKHR* buffer = reinterpret_cast<SwapchainKHR*>( reinterpret_cast<char*>( swapchainKHRs.data() ) + createInfos.size() * ( sizeof( UniqueHandle<SwapchainKHR, Dispatch> ) - sizeof( SwapchainKHR ) ) );
+    Result result = static_cast<Result>(d.vkCreateSharedSwapchainsKHR( m_device, createInfos.size() , reinterpret_cast<const VkSwapchainCreateInfoKHR*>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSwapchainKHR*>( buffer ) ) );
+    if (result == VULKAN_HPP_NAMESPACE::Result::eSuccess)
+    {
+      ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
+      for ( size_t i=0 ; i<createInfos.size() ; i++ )
+      {
+        swapchainKHRs.push_back( UniqueHandle<SwapchainKHR, Dispatch>( buffer[i], deleter ) );
+      }
+    }
+
+    return createResultValue( result, swapchainKHRs, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHRUnique" );
+  }
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<SwapchainKHR,Dispatch>>::type Device::createSharedSwapchainKHRUnique( const SwapchainCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  {
+    SwapchainKHR swapchain;
+    Result result = static_cast<Result>( d.vkCreateSharedSwapchainsKHR( m_device, 1 , reinterpret_cast<const VkSwapchainCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSwapchainKHR*>( &swapchain ) ) );
+
+    ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
+    return createResultValue<SwapchainKHR,Dispatch>( result, swapchain, VULKAN_HPP_NAMESPACE_STRING"::Device::createSharedSwapchainKHRUnique", deleter );
+  }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::createSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SwapchainKHR* pSwapchain, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkCreateSwapchainKHR( m_device, reinterpret_cast<const VkSwapchainCreateInfoKHR*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSwapchainKHR*>( pSwapchain ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<VULKAN_HPP_NAMESPACE::SwapchainKHR>::type Device::createSwapchainKHR( const SwapchainCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  {
+    VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain;
+    Result result = static_cast<Result>( d.vkCreateSwapchainKHR( m_device, reinterpret_cast<const VkSwapchainCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSwapchainKHR*>( &swapchain ) ) );
+    return createResultValue( result, swapchain, VULKAN_HPP_NAMESPACE_STRING"::Device::createSwapchainKHR" );
+  }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<SwapchainKHR,Dispatch>>::type Device::createSwapchainKHRUnique( const SwapchainCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  {
+    VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain;
+    Result result = static_cast<Result>( d.vkCreateSwapchainKHR( m_device, reinterpret_cast<const VkSwapchainCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSwapchainKHR*>( &swapchain ) ) );
+
+    ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
+    return createResultValue<SwapchainKHR,Dispatch>( result, swapchain, VULKAN_HPP_NAMESPACE_STRING"::Device::createSwapchainKHRUnique", deleter );
+  }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::createValidationCacheEXT( const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::ValidationCacheEXT* pValidationCache, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkCreateValidationCacheEXT( m_device, reinterpret_cast<const VkValidationCacheCreateInfoEXT*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkValidationCacheEXT*>( pValidationCache ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<VULKAN_HPP_NAMESPACE::ValidationCacheEXT>::type Device::createValidationCacheEXT( const ValidationCacheCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  {
+    VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache;
+    Result result = static_cast<Result>( d.vkCreateValidationCacheEXT( m_device, reinterpret_cast<const VkValidationCacheCreateInfoEXT*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkValidationCacheEXT*>( &validationCache ) ) );
+    return createResultValue( result, validationCache, VULKAN_HPP_NAMESPACE_STRING"::Device::createValidationCacheEXT" );
+  }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<ValidationCacheEXT,Dispatch>>::type Device::createValidationCacheEXTUnique( const ValidationCacheCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  {
+    VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache;
+    Result result = static_cast<Result>( d.vkCreateValidationCacheEXT( m_device, reinterpret_cast<const VkValidationCacheCreateInfoEXT*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkValidationCacheEXT*>( &validationCache ) ) );
+
+    ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
+    return createResultValue<ValidationCacheEXT,Dispatch>( result, validationCache, VULKAN_HPP_NAMESPACE_STRING"::Device::createValidationCacheEXTUnique", deleter );
+  }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::debugMarkerSetObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT* pNameInfo, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkDebugMarkerSetObjectNameEXT( m_device, reinterpret_cast<const VkDebugMarkerObjectNameInfoEXT*>( pNameInfo ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<void>::type Device::debugMarkerSetObjectNameEXT( const DebugMarkerObjectNameInfoEXT & nameInfo, Dispatch const &d ) const
+  {
+    Result result = static_cast<Result>( d.vkDebugMarkerSetObjectNameEXT( m_device, reinterpret_cast<const VkDebugMarkerObjectNameInfoEXT*>( &nameInfo ) ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::debugMarkerSetObjectNameEXT" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::debugMarkerSetObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT* pTagInfo, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkDebugMarkerSetObjectTagEXT( m_device, reinterpret_cast<const VkDebugMarkerObjectTagInfoEXT*>( pTagInfo ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<void>::type Device::debugMarkerSetObjectTagEXT( const DebugMarkerObjectTagInfoEXT & tagInfo, Dispatch const &d ) const
+  {
+    Result result = static_cast<Result>( d.vkDebugMarkerSetObjectTagEXT( m_device, reinterpret_cast<const VkDebugMarkerObjectTagInfoEXT*>( &tagInfo ) ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::debugMarkerSetObjectTagEXT" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyAccelerationStructureNV( m_device, static_cast<VkAccelerationStructureNV>( accelerationStructure ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyAccelerationStructureNV( m_device, static_cast<VkAccelerationStructureNV>( accelerationStructure ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyAccelerationStructureNV( m_device, static_cast<VkAccelerationStructureNV>( accelerationStructure ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyAccelerationStructureNV( m_device, static_cast<VkAccelerationStructureNV>( accelerationStructure ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyBuffer( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyBuffer( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Buffer buffer, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyBuffer( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Buffer buffer, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyBuffer( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyBufferView( m_device, static_cast<VkBufferView>( bufferView ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyBufferView( m_device, static_cast<VkBufferView>( bufferView ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::BufferView bufferView, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyBufferView( m_device, static_cast<VkBufferView>( bufferView ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::BufferView bufferView, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyBufferView( m_device, static_cast<VkBufferView>( bufferView ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CommandPool commandPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CommandPool commandPool, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyDescriptorSetLayout( m_device, static_cast<VkDescriptorSetLayout>( descriptorSetLayout ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyDescriptorSetLayout( m_device, static_cast<VkDescriptorSetLayout>( descriptorSetLayout ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyDescriptorSetLayout( m_device, static_cast<VkDescriptorSetLayout>( descriptorSetLayout ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyDescriptorSetLayout( m_device, static_cast<VkDescriptorSetLayout>( descriptorSetLayout ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyDescriptorUpdateTemplate( m_device, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyDescriptorUpdateTemplate( m_device, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyDescriptorUpdateTemplate( m_device, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyDescriptorUpdateTemplate( m_device, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyDescriptorUpdateTemplateKHR( m_device, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyDescriptorUpdateTemplateKHR( m_device, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyDevice( m_device, reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyDevice( m_device, reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyEvent( VULKAN_HPP_NAMESPACE::Event event, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyEvent( m_device, static_cast<VkEvent>( event ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyEvent( VULKAN_HPP_NAMESPACE::Event event, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyEvent( m_device, static_cast<VkEvent>( event ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Event event, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyEvent( m_device, static_cast<VkEvent>( event ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Event event, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyEvent( m_device, static_cast<VkEvent>( event ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyFence( VULKAN_HPP_NAMESPACE::Fence fence, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyFence( m_device, static_cast<VkFence>( fence ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyFence( VULKAN_HPP_NAMESPACE::Fence fence, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyFence( m_device, static_cast<VkFence>( fence ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Fence fence, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyFence( m_device, static_cast<VkFence>( fence ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Fence fence, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyFence( m_device, static_cast<VkFence>( fence ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyFramebuffer( m_device, static_cast<VkFramebuffer>( framebuffer ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyFramebuffer( m_device, static_cast<VkFramebuffer>( framebuffer ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyFramebuffer( m_device, static_cast<VkFramebuffer>( framebuffer ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyFramebuffer( m_device, static_cast<VkFramebuffer>( framebuffer ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyImage( VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyImage( m_device, static_cast<VkImage>( image ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyImage( VULKAN_HPP_NAMESPACE::Image image, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyImage( m_device, static_cast<VkImage>( image ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyImage( m_device, static_cast<VkImage>( image ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Image image, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyImage( m_device, static_cast<VkImage>( image ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyImageView( VULKAN_HPP_NAMESPACE::ImageView imageView, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyImageView( m_device, static_cast<VkImageView>( imageView ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyImageView( VULKAN_HPP_NAMESPACE::ImageView imageView, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyImageView( m_device, static_cast<VkImageView>( imageView ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ImageView imageView, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyImageView( m_device, static_cast<VkImageView>( imageView ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ImageView imageView, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyImageView( m_device, static_cast<VkImageView>( imageView ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyIndirectCommandsLayoutNVX( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX indirectCommandsLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyIndirectCommandsLayoutNVX( m_device, static_cast<VkIndirectCommandsLayoutNVX>( indirectCommandsLayout ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyIndirectCommandsLayoutNVX( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX indirectCommandsLayout, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyIndirectCommandsLayoutNVX( m_device, static_cast<VkIndirectCommandsLayoutNVX>( indirectCommandsLayout ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX indirectCommandsLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyIndirectCommandsLayoutNVX( m_device, static_cast<VkIndirectCommandsLayoutNVX>( indirectCommandsLayout ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX indirectCommandsLayout, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyIndirectCommandsLayoutNVX( m_device, static_cast<VkIndirectCommandsLayoutNVX>( indirectCommandsLayout ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyObjectTableNVX( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyObjectTableNVX( m_device, static_cast<VkObjectTableNVX>( objectTable ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyObjectTableNVX( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyObjectTableNVX( m_device, static_cast<VkObjectTableNVX>( objectTable ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyObjectTableNVX( m_device, static_cast<VkObjectTableNVX>( objectTable ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyObjectTableNVX( m_device, static_cast<VkObjectTableNVX>( objectTable ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyPipeline( m_device, static_cast<VkPipeline>( pipeline ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyPipeline( m_device, static_cast<VkPipeline>( pipeline ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Pipeline pipeline, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyPipeline( m_device, static_cast<VkPipeline>( pipeline ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Pipeline pipeline, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyPipeline( m_device, static_cast<VkPipeline>( pipeline ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyPipelineCache( m_device, static_cast<VkPipelineCache>( pipelineCache ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyPipelineCache( m_device, static_cast<VkPipelineCache>( pipelineCache ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyPipelineCache( m_device, static_cast<VkPipelineCache>( pipelineCache ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyPipelineCache( m_device, static_cast<VkPipelineCache>( pipelineCache ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyPipelineLayout( m_device, static_cast<VkPipelineLayout>( pipelineLayout ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyPipelineLayout( m_device, static_cast<VkPipelineLayout>( pipelineLayout ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyPipelineLayout( m_device, static_cast<VkPipelineLayout>( pipelineLayout ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyPipelineLayout( m_device, static_cast<VkPipelineLayout>( pipelineLayout ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyQueryPool( m_device, static_cast<VkQueryPool>( queryPool ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyQueryPool( m_device, static_cast<VkQueryPool>( queryPool ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::QueryPool queryPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyQueryPool( m_device, static_cast<VkQueryPool>( queryPool ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::QueryPool queryPool, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyQueryPool( m_device, static_cast<VkQueryPool>( queryPool ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyRenderPass( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyRenderPass( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::RenderPass renderPass, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyRenderPass( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::RenderPass renderPass, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyRenderPass( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroySampler( VULKAN_HPP_NAMESPACE::Sampler sampler, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroySampler( m_device, static_cast<VkSampler>( sampler ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroySampler( VULKAN_HPP_NAMESPACE::Sampler sampler, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroySampler( m_device, static_cast<VkSampler>( sampler ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Sampler sampler, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroySampler( m_device, static_cast<VkSampler>( sampler ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Sampler sampler, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroySampler( m_device, static_cast<VkSampler>( sampler ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroySamplerYcbcrConversion( m_device, static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroySamplerYcbcrConversion( m_device, static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroySamplerYcbcrConversion( m_device, static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroySamplerYcbcrConversion( m_device, static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroySamplerYcbcrConversionKHR( m_device, static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroySamplerYcbcrConversionKHR( m_device, static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroySemaphore( m_device, static_cast<VkSemaphore>( semaphore ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroySemaphore( m_device, static_cast<VkSemaphore>( semaphore ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Semaphore semaphore, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroySemaphore( m_device, static_cast<VkSemaphore>( semaphore ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroySemaphore( m_device, static_cast<VkSemaphore>( semaphore ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyShaderModule( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyShaderModule( m_device, static_cast<VkShaderModule>( shaderModule ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyShaderModule( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyShaderModule( m_device, static_cast<VkShaderModule>( shaderModule ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyShaderModule( m_device, static_cast<VkShaderModule>( shaderModule ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyShaderModule( m_device, static_cast<VkShaderModule>( shaderModule ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroySwapchainKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroySwapchainKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroySwapchainKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroySwapchainKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyValidationCacheEXT( m_device, static_cast<VkValidationCacheEXT>( validationCache ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyValidationCacheEXT( m_device, static_cast<VkValidationCacheEXT>( validationCache ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyValidationCacheEXT( m_device, static_cast<VkValidationCacheEXT>( validationCache ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyValidationCacheEXT( m_device, static_cast<VkValidationCacheEXT>( validationCache ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::waitIdle(Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkDeviceWaitIdle( m_device ) );
+  }
+#else
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<void>::type Device::waitIdle(Dispatch const &d ) const
+  {
+    Result result = static_cast<Result>( d.vkDeviceWaitIdle( m_device ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::waitIdle" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT* pDisplayPowerInfo, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkDisplayPowerControlEXT( m_device, static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayPowerInfoEXT*>( pDisplayPowerInfo ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<void>::type Device::displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, const DisplayPowerInfoEXT & displayPowerInfo, Dispatch const &d ) const
+  {
+    Result result = static_cast<Result>( d.vkDisplayPowerControlEXT( m_device, static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayPowerInfoEXT*>( &displayPowerInfo ) ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::displayPowerControlEXT" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::flushMappedMemoryRanges( uint32_t memoryRangeCount, const VULKAN_HPP_NAMESPACE::MappedMemoryRange* pMemoryRanges, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkFlushMappedMemoryRanges( m_device, memoryRangeCount, reinterpret_cast<const VkMappedMemoryRange*>( pMemoryRanges ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<void>::type Device::flushMappedMemoryRanges( ArrayProxy<const VULKAN_HPP_NAMESPACE::MappedMemoryRange> memoryRanges, Dispatch const &d ) const
+  {
+    Result result = static_cast<Result>( d.vkFlushMappedMemoryRanges( m_device, memoryRanges.size() , reinterpret_cast<const VkMappedMemoryRange*>( memoryRanges.data() ) ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::flushMappedMemoryRanges" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool, uint32_t commandBufferCount, const VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkFreeCommandBuffers( m_device, static_cast<VkCommandPool>( commandPool ), commandBufferCount, reinterpret_cast<const VkCommandBuffer*>( pCommandBuffers ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool, ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> commandBuffers, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkFreeCommandBuffers( m_device, static_cast<VkCommandPool>( commandPool ), commandBuffers.size() , reinterpret_cast<const VkCommandBuffer*>( commandBuffers.data() ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::free( VULKAN_HPP_NAMESPACE::CommandPool commandPool, uint32_t commandBufferCount, const VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkFreeCommandBuffers( m_device, static_cast<VkCommandPool>( commandPool ), commandBufferCount, reinterpret_cast<const VkCommandBuffer*>( pCommandBuffers ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::free( VULKAN_HPP_NAMESPACE::CommandPool commandPool, ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> commandBuffers, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkFreeCommandBuffers( m_device, static_cast<VkCommandPool>( commandPool ), commandBuffers.size() , reinterpret_cast<const VkCommandBuffer*>( commandBuffers.data() ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::freeDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, uint32_t descriptorSetCount, const VULKAN_HPP_NAMESPACE::DescriptorSet* pDescriptorSets, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkFreeDescriptorSets( m_device, static_cast<VkDescriptorPool>( descriptorPool ), descriptorSetCount, reinterpret_cast<const VkDescriptorSet*>( pDescriptorSets ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<void>::type Device::freeDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> descriptorSets, Dispatch const &d ) const
+  {
+    Result result = static_cast<Result>( d.vkFreeDescriptorSets( m_device, static_cast<VkDescriptorPool>( descriptorPool ), descriptorSets.size() , reinterpret_cast<const VkDescriptorSet*>( descriptorSets.data() ) ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::freeDescriptorSets" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::free( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, uint32_t descriptorSetCount, const VULKAN_HPP_NAMESPACE::DescriptorSet* pDescriptorSets, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkFreeDescriptorSets( m_device, static_cast<VkDescriptorPool>( descriptorPool ), descriptorSetCount, reinterpret_cast<const VkDescriptorSet*>( pDescriptorSets ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<void>::type Device::free( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> descriptorSets, Dispatch const &d ) const
+  {
+    Result result = static_cast<Result>( d.vkFreeDescriptorSets( m_device, static_cast<VkDescriptorPool>( descriptorPool ), descriptorSets.size() , reinterpret_cast<const VkDescriptorSet*>( descriptorSets.data() ) ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::free" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::freeMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkFreeMemory( m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::freeMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkFreeMemory( m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::free( VULKAN_HPP_NAMESPACE::DeviceMemory memory, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkFreeMemory( m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::free( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkFreeMemory( m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, size_t dataSize, void* pData, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkGetAccelerationStructureHandleNV( m_device, static_cast<VkAccelerationStructureNV>( accelerationStructure ), dataSize, pData ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename T, typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<void>::type Device::getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, ArrayProxy<T> data, Dispatch const &d ) const
+  {
+    Result result = static_cast<Result>( d.vkGetAccelerationStructureHandleNV( m_device, static_cast<VkAccelerationStructureNV>( accelerationStructure ), data.size() * sizeof( T ) , reinterpret_cast<void*>( data.data() ) ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::getAccelerationStructureHandleNV" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::getAccelerationStructureMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR* pMemoryRequirements, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkGetAccelerationStructureMemoryRequirementsNV( m_device, reinterpret_cast<const VkAccelerationStructureMemoryRequirementsInfoNV*>( pInfo ), reinterpret_cast<VkMemoryRequirements2KHR*>( pMemoryRequirements ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR Device::getAccelerationStructureMemoryRequirementsNV( const AccelerationStructureMemoryRequirementsInfoNV & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR memoryRequirements;
+    d.vkGetAccelerationStructureMemoryRequirementsNV( m_device, reinterpret_cast<const VkAccelerationStructureMemoryRequirementsInfoNV*>( &info ), reinterpret_cast<VkMemoryRequirements2KHR*>( &memoryRequirements ) );
+    return memoryRequirements;
+  }
+  template<typename X, typename Y, typename ...Z, typename Dispatch>
+  VULKAN_HPP_INLINE StructureChain<X, Y, Z...> Device::getAccelerationStructureMemoryRequirementsNV( const AccelerationStructureMemoryRequirementsInfoNV & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    StructureChain<X, Y, Z...> structureChain;
+    VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR& memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR>();
+    d.vkGetAccelerationStructureMemoryRequirementsNV( m_device, reinterpret_cast<const VkAccelerationStructureMemoryRequirementsInfoNV*>( &info ), reinterpret_cast<VkMemoryRequirements2KHR*>( &memoryRequirements ) );
+    return structureChain;
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer* buffer, VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID* pProperties, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkGetAndroidHardwareBufferPropertiesANDROID( m_device, buffer, reinterpret_cast<VkAndroidHardwareBufferPropertiesANDROID*>( pProperties ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID>::type Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const &d ) const
+  {
+    VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID properties;
+    Result result = static_cast<Result>( d.vkGetAndroidHardwareBufferPropertiesANDROID( m_device, buffer, reinterpret_cast<VkAndroidHardwareBufferPropertiesANDROID*>( &properties ) ) );
+    return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::Device::getAndroidHardwareBufferPropertiesANDROID" );
+  }
+  template<typename X, typename Y, typename ...Z, typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<StructureChain<X, Y, Z...>>::type Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const &d ) const
+  {
+    StructureChain<X, Y, Z...> structureChain;
+    VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID& properties = structureChain.template get<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID>();
+    Result result = static_cast<Result>( d.vkGetAndroidHardwareBufferPropertiesANDROID( m_device, buffer, reinterpret_cast<VkAndroidHardwareBufferPropertiesANDROID*>( &properties ) ) );
+    return createResultValue( result, structureChain, VULKAN_HPP_NAMESPACE_STRING"::Device::getAndroidHardwareBufferPropertiesANDROID" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddressEXT( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfoKHR* pInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<DeviceAddress>( d.vkGetBufferDeviceAddressEXT( m_device, reinterpret_cast<const VkBufferDeviceAddressInfoKHR*>( pInfo ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddressEXT( const BufferDeviceAddressInfoKHR & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    return d.vkGetBufferDeviceAddressEXT( m_device, reinterpret_cast<const VkBufferDeviceAddressInfoKHR*>( &info ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfoKHR* pInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<DeviceAddress>( d.vkGetBufferDeviceAddressKHR( m_device, reinterpret_cast<const VkBufferDeviceAddressInfoKHR*>( pInfo ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddressKHR( const BufferDeviceAddressInfoKHR & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    return d.vkGetBufferDeviceAddressKHR( m_device, reinterpret_cast<const VkBufferDeviceAddressInfoKHR*>( &info ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::getBufferMemoryRequirements( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::MemoryRequirements* pMemoryRequirements, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkGetBufferMemoryRequirements( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<VkMemoryRequirements*>( pMemoryRequirements ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements Device::getBufferMemoryRequirements( VULKAN_HPP_NAMESPACE::Buffer buffer, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements;
+    d.vkGetBufferMemoryRequirements( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<VkMemoryRequirements*>( &memoryRequirements ) );
+    return memoryRequirements;
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2* pMemoryRequirements, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkGetBufferMemoryRequirements2( m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2*>( pInfo ), reinterpret_cast<VkMemoryRequirements2*>( pMemoryRequirements ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 Device::getBufferMemoryRequirements2( const BufferMemoryRequirementsInfo2 & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
+    d.vkGetBufferMemoryRequirements2( m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2*>( &info ), reinterpret_cast<VkMemoryRequirements2*>( &memoryRequirements ) );
+    return memoryRequirements;
+  }
+  template<typename X, typename Y, typename ...Z, typename Dispatch>
+  VULKAN_HPP_INLINE StructureChain<X, Y, Z...> Device::getBufferMemoryRequirements2( const BufferMemoryRequirementsInfo2 & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    StructureChain<X, Y, Z...> structureChain;
+    VULKAN_HPP_NAMESPACE::MemoryRequirements2& memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
+    d.vkGetBufferMemoryRequirements2( m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2*>( &info ), reinterpret_cast<VkMemoryRequirements2*>( &memoryRequirements ) );
+    return structureChain;
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2* pMemoryRequirements, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkGetBufferMemoryRequirements2KHR( m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2*>( pInfo ), reinterpret_cast<VkMemoryRequirements2*>( pMemoryRequirements ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 Device::getBufferMemoryRequirements2KHR( const BufferMemoryRequirementsInfo2 & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
+    d.vkGetBufferMemoryRequirements2KHR( m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2*>( &info ), reinterpret_cast<VkMemoryRequirements2*>( &memoryRequirements ) );
+    return memoryRequirements;
+  }
+  template<typename X, typename Y, typename ...Z, typename Dispatch>
+  VULKAN_HPP_INLINE StructureChain<X, Y, Z...> Device::getBufferMemoryRequirements2KHR( const BufferMemoryRequirementsInfo2 & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    StructureChain<X, Y, Z...> structureChain;
+    VULKAN_HPP_NAMESPACE::MemoryRequirements2& memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
+    d.vkGetBufferMemoryRequirements2KHR( m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2*>( &info ), reinterpret_cast<VkMemoryRequirements2*>( &memoryRequirements ) );
+    return structureChain;
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfoKHR* pInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    return d.vkGetBufferOpaqueCaptureAddressKHR( m_device, reinterpret_cast<const VkBufferDeviceAddressInfoKHR*>( pInfo ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddressKHR( const BufferDeviceAddressInfoKHR & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    return d.vkGetBufferOpaqueCaptureAddressKHR( m_device, reinterpret_cast<const VkBufferDeviceAddressInfoKHR*>( &info ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::getCalibratedTimestampsEXT( uint32_t timestampCount, const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT* pTimestampInfos, uint64_t* pTimestamps, uint64_t* pMaxDeviation, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkGetCalibratedTimestampsEXT( m_device, timestampCount, reinterpret_cast<const VkCalibratedTimestampInfoEXT*>( pTimestampInfos ), pTimestamps, pMaxDeviation ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<uint64_t>::type Device::getCalibratedTimestampsEXT( ArrayProxy<const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT> timestampInfos, ArrayProxy<uint64_t> timestamps, Dispatch const &d ) const
+  {
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+    VULKAN_HPP_ASSERT( timestampInfos.size() == timestamps.size() );
+#else
+    if ( timestampInfos.size() != timestamps.size() )
+    {
+      throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::VkDevice::getCalibratedTimestampsEXT: timestampInfos.size() != timestamps.size()" );
+    }
+#endif  /*VULKAN_HPP_NO_EXCEPTIONS*/
+    uint64_t maxDeviation;
+    Result result = static_cast<Result>( d.vkGetCalibratedTimestampsEXT( m_device, timestampInfos.size() , reinterpret_cast<const VkCalibratedTimestampInfoEXT*>( timestampInfos.data() ), timestamps.data(), &maxDeviation ) );
+    return createResultValue( result, maxDeviation, VULKAN_HPP_NAMESPACE_STRING"::Device::getCalibratedTimestampsEXT" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo* pCreateInfo, VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport* pSupport, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkGetDescriptorSetLayoutSupport( m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo*>( pCreateInfo ), reinterpret_cast<VkDescriptorSetLayoutSupport*>( pSupport ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport Device::getDescriptorSetLayoutSupport( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport support;
+    d.vkGetDescriptorSetLayoutSupport( m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo*>( &createInfo ), reinterpret_cast<VkDescriptorSetLayoutSupport*>( &support ) );
+    return support;
+  }
+  template<typename X, typename Y, typename ...Z, typename Dispatch>
+  VULKAN_HPP_INLINE StructureChain<X, Y, Z...> Device::getDescriptorSetLayoutSupport( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    StructureChain<X, Y, Z...> structureChain;
+    VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport& support = structureChain.template get<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport>();
+    d.vkGetDescriptorSetLayoutSupport( m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo*>( &createInfo ), reinterpret_cast<VkDescriptorSetLayoutSupport*>( &support ) );
+    return structureChain;
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo* pCreateInfo, VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport* pSupport, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkGetDescriptorSetLayoutSupportKHR( m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo*>( pCreateInfo ), reinterpret_cast<VkDescriptorSetLayoutSupport*>( pSupport ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport Device::getDescriptorSetLayoutSupportKHR( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport support;
+    d.vkGetDescriptorSetLayoutSupportKHR( m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo*>( &createInfo ), reinterpret_cast<VkDescriptorSetLayoutSupport*>( &support ) );
+    return support;
+  }
+  template<typename X, typename Y, typename ...Z, typename Dispatch>
+  VULKAN_HPP_INLINE StructureChain<X, Y, Z...> Device::getDescriptorSetLayoutSupportKHR( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    StructureChain<X, Y, Z...> structureChain;
+    VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport& support = structureChain.template get<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport>();
+    d.vkGetDescriptorSetLayoutSupportKHR( m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo*>( &createInfo ), reinterpret_cast<VkDescriptorSetLayoutSupport*>( &support ) );
+    return structureChain;
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::getGroupPeerMemoryFeatures( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags* pPeerMemoryFeatures, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkGetDeviceGroupPeerMemoryFeatures( m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast<VkPeerMemoryFeatureFlags*>( pPeerMemoryFeatures ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags Device::getGroupPeerMemoryFeatures( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags peerMemoryFeatures;
+    d.vkGetDeviceGroupPeerMemoryFeatures( m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast<VkPeerMemoryFeatureFlags*>( &peerMemoryFeatures ) );
+    return peerMemoryFeatures;
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags* pPeerMemoryFeatures, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkGetDeviceGroupPeerMemoryFeaturesKHR( m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast<VkPeerMemoryFeatureFlags*>( pPeerMemoryFeatures ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags Device::getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags peerMemoryFeatures;
+    d.vkGetDeviceGroupPeerMemoryFeaturesKHR( m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast<VkPeerMemoryFeatureFlags*>( &peerMemoryFeatures ) );
+    return peerMemoryFeatures;
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::getGroupPresentCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR* pDeviceGroupPresentCapabilities, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkGetDeviceGroupPresentCapabilitiesKHR( m_device, reinterpret_cast<VkDeviceGroupPresentCapabilitiesKHR*>( pDeviceGroupPresentCapabilities ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR>::type Device::getGroupPresentCapabilitiesKHR(Dispatch const &d ) const
+  {
+    VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR deviceGroupPresentCapabilities;
+    Result result = static_cast<Result>( d.vkGetDeviceGroupPresentCapabilitiesKHR( m_device, reinterpret_cast<VkDeviceGroupPresentCapabilitiesKHR*>( &deviceGroupPresentCapabilities ) ) );
+    return createResultValue( result, deviceGroupPresentCapabilities, VULKAN_HPP_NAMESPACE_STRING"::Device::getGroupPresentCapabilitiesKHR" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::getGroupSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR* pModes, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkGetDeviceGroupSurfacePresentModes2EXT( m_device, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR*>( pSurfaceInfo ), reinterpret_cast<VkDeviceGroupPresentModeFlagsKHR*>( pModes ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR>::type Device::getGroupSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d ) const
+  {
+    VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes;
+    Result result = static_cast<Result>( d.vkGetDeviceGroupSurfacePresentModes2EXT( m_device, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR*>( &surfaceInfo ), reinterpret_cast<VkDeviceGroupPresentModeFlagsKHR*>( &modes ) ) );
+    return createResultValue( result, modes, VULKAN_HPP_NAMESPACE_STRING"::Device::getGroupSurfacePresentModes2EXT" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR* pModes, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkGetDeviceGroupSurfacePresentModesKHR( m_device, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkDeviceGroupPresentModeFlagsKHR*>( pModes ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR>::type Device::getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d ) const
+  {
+    VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes;
+    Result result = static_cast<Result>( d.vkGetDeviceGroupSurfacePresentModesKHR( m_device, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkDeviceGroupPresentModeFlagsKHR*>( &modes ) ) );
+    return createResultValue( result, modes, VULKAN_HPP_NAMESPACE_STRING"::Device::getGroupSurfacePresentModesKHR" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::getMemoryCommitment( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize* pCommittedMemoryInBytes, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkGetDeviceMemoryCommitment( m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<VkDeviceSize*>( pCommittedMemoryInBytes ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceSize Device::getMemoryCommitment( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_NAMESPACE::DeviceSize committedMemoryInBytes;
+    d.vkGetDeviceMemoryCommitment( m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<VkDeviceSize*>( &committedMemoryInBytes ) );
+    return committedMemoryInBytes;
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE uint64_t Device::getMemoryOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfoKHR* pInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    return d.vkGetDeviceMemoryOpaqueCaptureAddressKHR( m_device, reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfoKHR*>( pInfo ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE uint64_t Device::getMemoryOpaqueCaptureAddressKHR( const DeviceMemoryOpaqueCaptureAddressInfoKHR & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    return d.vkGetDeviceMemoryOpaqueCaptureAddressKHR( m_device, reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfoKHR*>( &info ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE PFN_vkVoidFunction Device::getProcAddr( const char* pName, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    return d.vkGetDeviceProcAddr( m_device, pName );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE PFN_vkVoidFunction Device::getProcAddr( const std::string & name, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    return d.vkGetDeviceProcAddr( m_device, name.c_str() );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, VULKAN_HPP_NAMESPACE::Queue* pQueue, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkGetDeviceQueue( m_device, queueFamilyIndex, queueIndex, reinterpret_cast<VkQueue*>( pQueue ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Queue Device::getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_NAMESPACE::Queue queue;
+    d.vkGetDeviceQueue( m_device, queueFamilyIndex, queueIndex, reinterpret_cast<VkQueue*>( &queue ) );
+    return queue;
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::getQueue2( const VULKAN_HPP_NAMESPACE::DeviceQueueInfo2* pQueueInfo, VULKAN_HPP_NAMESPACE::Queue* pQueue, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkGetDeviceQueue2( m_device, reinterpret_cast<const VkDeviceQueueInfo2*>( pQueueInfo ), reinterpret_cast<VkQueue*>( pQueue ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Queue Device::getQueue2( const DeviceQueueInfo2 & queueInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_NAMESPACE::Queue queue;
+    d.vkGetDeviceQueue2( m_device, reinterpret_cast<const VkDeviceQueueInfo2*>( &queueInfo ), reinterpret_cast<VkQueue*>( &queue ) );
+    return queue;
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::getEventStatus( VULKAN_HPP_NAMESPACE::Event event, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkGetEventStatus( m_device, static_cast<VkEvent>( event ) ) );
+  }
+#else
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::getEventStatus( VULKAN_HPP_NAMESPACE::Event event, Dispatch const &d ) const
+  {
+    Result result = static_cast<Result>( d.vkGetEventStatus( m_device, static_cast<VkEvent>( event ) ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::getEventStatus", { Result::eEventSet, Result::eEventReset } );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::getFenceFdKHR( const VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR* pGetFdInfo, int* pFd, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkGetFenceFdKHR( m_device, reinterpret_cast<const VkFenceGetFdInfoKHR*>( pGetFdInfo ), pFd ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<int>::type Device::getFenceFdKHR( const FenceGetFdInfoKHR & getFdInfo, Dispatch const &d ) const
+  {
+    int fd;
+    Result result = static_cast<Result>( d.vkGetFenceFdKHR( m_device, reinterpret_cast<const VkFenceGetFdInfoKHR*>( &getFdInfo ), &fd ) );
+    return createResultValue( result, fd, VULKAN_HPP_NAMESPACE_STRING"::Device::getFenceFdKHR" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkGetFenceStatus( m_device, static_cast<VkFence>( fence ) ) );
+  }
+#else
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d ) const
+  {
+    Result result = static_cast<Result>( d.vkGetFenceStatus( m_device, static_cast<VkFence>( fence ) ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::getFenceStatus", { Result::eSuccess, Result::eNotReady } );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::getFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkGetFenceWin32HandleKHR( m_device, reinterpret_cast<const VkFenceGetWin32HandleInfoKHR*>( pGetWin32HandleInfo ), pHandle ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<HANDLE>::type Device::getFenceWin32HandleKHR( const FenceGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const &d ) const
+  {
+    HANDLE handle;
+    Result result = static_cast<Result>( d.vkGetFenceWin32HandleKHR( m_device, reinterpret_cast<const VkFenceGetWin32HandleInfoKHR*>( &getWin32HandleInfo ), &handle ) );
+    return createResultValue( result, handle, VULKAN_HPP_NAMESPACE_STRING"::Device::getFenceWin32HandleKHR" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::getImageDrmFormatModifierPropertiesEXT( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT* pProperties, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkGetImageDrmFormatModifierPropertiesEXT( m_device, static_cast<VkImage>( image ), reinterpret_cast<VkImageDrmFormatModifierPropertiesEXT*>( pProperties ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT>::type Device::getImageDrmFormatModifierPropertiesEXT( VULKAN_HPP_NAMESPACE::Image image, Dispatch const &d ) const
+  {
+    VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT properties;
+    Result result = static_cast<Result>( d.vkGetImageDrmFormatModifierPropertiesEXT( m_device, static_cast<VkImage>( image ), reinterpret_cast<VkImageDrmFormatModifierPropertiesEXT*>( &properties ) ) );
+    return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::Device::getImageDrmFormatModifierPropertiesEXT" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::getImageMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::MemoryRequirements* pMemoryRequirements, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkGetImageMemoryRequirements( m_device, static_cast<VkImage>( image ), reinterpret_cast<VkMemoryRequirements*>( pMemoryRequirements ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements Device::getImageMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements;
+    d.vkGetImageMemoryRequirements( m_device, static_cast<VkImage>( image ), reinterpret_cast<VkMemoryRequirements*>( &memoryRequirements ) );
+    return memoryRequirements;
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2* pMemoryRequirements, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkGetImageMemoryRequirements2( m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2*>( pInfo ), reinterpret_cast<VkMemoryRequirements2*>( pMemoryRequirements ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 Device::getImageMemoryRequirements2( const ImageMemoryRequirementsInfo2 & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
+    d.vkGetImageMemoryRequirements2( m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2*>( &info ), reinterpret_cast<VkMemoryRequirements2*>( &memoryRequirements ) );
+    return memoryRequirements;
+  }
+  template<typename X, typename Y, typename ...Z, typename Dispatch>
+  VULKAN_HPP_INLINE StructureChain<X, Y, Z...> Device::getImageMemoryRequirements2( const ImageMemoryRequirementsInfo2 & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    StructureChain<X, Y, Z...> structureChain;
+    VULKAN_HPP_NAMESPACE::MemoryRequirements2& memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
+    d.vkGetImageMemoryRequirements2( m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2*>( &info ), reinterpret_cast<VkMemoryRequirements2*>( &memoryRequirements ) );
+    return structureChain;
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2* pMemoryRequirements, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkGetImageMemoryRequirements2KHR( m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2*>( pInfo ), reinterpret_cast<VkMemoryRequirements2*>( pMemoryRequirements ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 Device::getImageMemoryRequirements2KHR( const ImageMemoryRequirementsInfo2 & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
+    d.vkGetImageMemoryRequirements2KHR( m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2*>( &info ), reinterpret_cast<VkMemoryRequirements2*>( &memoryRequirements ) );
+    return memoryRequirements;
+  }
+  template<typename X, typename Y, typename ...Z, typename Dispatch>
+  VULKAN_HPP_INLINE StructureChain<X, Y, Z...> Device::getImageMemoryRequirements2KHR( const ImageMemoryRequirementsInfo2 & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    StructureChain<X, Y, Z...> structureChain;
+    VULKAN_HPP_NAMESPACE::MemoryRequirements2& memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
+    d.vkGetImageMemoryRequirements2KHR( m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2*>( &info ), reinterpret_cast<VkMemoryRequirements2*>( &memoryRequirements ) );
+    return structureChain;
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, uint32_t* pSparseMemoryRequirementCount, VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements* pSparseMemoryRequirements, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkGetImageSparseMemoryRequirements( m_device, static_cast<VkImage>( image ), pSparseMemoryRequirementCount, reinterpret_cast<VkSparseImageMemoryRequirements*>( pSparseMemoryRequirements ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Allocator, typename Dispatch>
+  VULKAN_HPP_INLINE std::vector<SparseImageMemoryRequirements,Allocator> Device::getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, Dispatch const &d ) const
+  {
+    std::vector<SparseImageMemoryRequirements,Allocator> sparseMemoryRequirements;
+    uint32_t sparseMemoryRequirementCount;
+    d.vkGetImageSparseMemoryRequirements( m_device, static_cast<VkImage>( image ), &sparseMemoryRequirementCount, nullptr );
+    sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
+    d.vkGetImageSparseMemoryRequirements( m_device, static_cast<VkImage>( image ), &sparseMemoryRequirementCount, reinterpret_cast<VkSparseImageMemoryRequirements*>( sparseMemoryRequirements.data() ) );
+    return sparseMemoryRequirements;
+  }
+  template<typename Allocator, typename Dispatch>
+  VULKAN_HPP_INLINE std::vector<SparseImageMemoryRequirements,Allocator> Device::getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, Allocator const& vectorAllocator, Dispatch const &d ) const
+  {
+    std::vector<SparseImageMemoryRequirements,Allocator> sparseMemoryRequirements( vectorAllocator );
+    uint32_t sparseMemoryRequirementCount;
+    d.vkGetImageSparseMemoryRequirements( m_device, static_cast<VkImage>( image ), &sparseMemoryRequirementCount, nullptr );
+    sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
+    d.vkGetImageSparseMemoryRequirements( m_device, static_cast<VkImage>( image ), &sparseMemoryRequirementCount, reinterpret_cast<VkSparseImageMemoryRequirements*>( sparseMemoryRequirements.data() ) );
+    return sparseMemoryRequirements;
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2* pSparseMemoryRequirements, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkGetImageSparseMemoryRequirements2( m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2*>( pInfo ), pSparseMemoryRequirementCount, reinterpret_cast<VkSparseImageMemoryRequirements2*>( pSparseMemoryRequirements ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Allocator, typename Dispatch>
+  VULKAN_HPP_INLINE std::vector<SparseImageMemoryRequirements2,Allocator> Device::getImageSparseMemoryRequirements2( const ImageSparseMemoryRequirementsInfo2 & info, Dispatch const &d ) const
+  {
+    std::vector<SparseImageMemoryRequirements2,Allocator> sparseMemoryRequirements;
+    uint32_t sparseMemoryRequirementCount;
+    d.vkGetImageSparseMemoryRequirements2( m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2*>( &info ), &sparseMemoryRequirementCount, nullptr );
+    sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
+    d.vkGetImageSparseMemoryRequirements2( m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2*>( &info ), &sparseMemoryRequirementCount, reinterpret_cast<VkSparseImageMemoryRequirements2*>( sparseMemoryRequirements.data() ) );
+    return sparseMemoryRequirements;
+  }
+  template<typename Allocator, typename Dispatch>
+  VULKAN_HPP_INLINE std::vector<SparseImageMemoryRequirements2,Allocator> Device::getImageSparseMemoryRequirements2( const ImageSparseMemoryRequirementsInfo2 & info, Allocator const& vectorAllocator, Dispatch const &d ) const
+  {
+    std::vector<SparseImageMemoryRequirements2,Allocator> sparseMemoryRequirements( vectorAllocator );
+    uint32_t sparseMemoryRequirementCount;
+    d.vkGetImageSparseMemoryRequirements2( m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2*>( &info ), &sparseMemoryRequirementCount, nullptr );
+    sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
+    d.vkGetImageSparseMemoryRequirements2( m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2*>( &info ), &sparseMemoryRequirementCount, reinterpret_cast<VkSparseImageMemoryRequirements2*>( sparseMemoryRequirements.data() ) );
+    return sparseMemoryRequirements;
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2* pSparseMemoryRequirements, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkGetImageSparseMemoryRequirements2KHR( m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2*>( pInfo ), pSparseMemoryRequirementCount, reinterpret_cast<VkSparseImageMemoryRequirements2*>( pSparseMemoryRequirements ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Allocator, typename Dispatch>
+  VULKAN_HPP_INLINE std::vector<SparseImageMemoryRequirements2,Allocator> Device::getImageSparseMemoryRequirements2KHR( const ImageSparseMemoryRequirementsInfo2 & info, Dispatch const &d ) const
+  {
+    std::vector<SparseImageMemoryRequirements2,Allocator> sparseMemoryRequirements;
+    uint32_t sparseMemoryRequirementCount;
+    d.vkGetImageSparseMemoryRequirements2KHR( m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2*>( &info ), &sparseMemoryRequirementCount, nullptr );
+    sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
+    d.vkGetImageSparseMemoryRequirements2KHR( m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2*>( &info ), &sparseMemoryRequirementCount, reinterpret_cast<VkSparseImageMemoryRequirements2*>( sparseMemoryRequirements.data() ) );
+    return sparseMemoryRequirements;
+  }
+  template<typename Allocator, typename Dispatch>
+  VULKAN_HPP_INLINE std::vector<SparseImageMemoryRequirements2,Allocator> Device::getImageSparseMemoryRequirements2KHR( const ImageSparseMemoryRequirementsInfo2 & info, Allocator const& vectorAllocator, Dispatch const &d ) const
+  {
+    std::vector<SparseImageMemoryRequirements2,Allocator> sparseMemoryRequirements( vectorAllocator );
+    uint32_t sparseMemoryRequirementCount;
+    d.vkGetImageSparseMemoryRequirements2KHR( m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2*>( &info ), &sparseMemoryRequirementCount, nullptr );
+    sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
+    d.vkGetImageSparseMemoryRequirements2KHR( m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2*>( &info ), &sparseMemoryRequirementCount, reinterpret_cast<VkSparseImageMemoryRequirements2*>( sparseMemoryRequirements.data() ) );
+    return sparseMemoryRequirements;
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::getImageSubresourceLayout( VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource* pSubresource, VULKAN_HPP_NAMESPACE::SubresourceLayout* pLayout, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkGetImageSubresourceLayout( m_device, static_cast<VkImage>( image ), reinterpret_cast<const VkImageSubresource*>( pSubresource ), reinterpret_cast<VkSubresourceLayout*>( pLayout ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout Device::getImageSubresourceLayout( VULKAN_HPP_NAMESPACE::Image image, const ImageSubresource & subresource, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_NAMESPACE::SubresourceLayout layout;
+    d.vkGetImageSubresourceLayout( m_device, static_cast<VkImage>( image ), reinterpret_cast<const VkImageSubresource*>( &subresource ), reinterpret_cast<VkSubresourceLayout*>( &layout ) );
+    return layout;
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE uint32_t Device::getImageViewHandleNVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX* pInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    return d.vkGetImageViewHandleNVX( m_device, reinterpret_cast<const VkImageViewHandleInfoNVX*>( pInfo ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE uint32_t Device::getImageViewHandleNVX( const ImageViewHandleInfoNVX & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    return d.vkGetImageViewHandleNVX( m_device, reinterpret_cast<const VkImageViewHandleInfoNVX*>( &info ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::getMemoryAndroidHardwareBufferANDROID( const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID* pInfo, struct AHardwareBuffer** pBuffer, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkGetMemoryAndroidHardwareBufferANDROID( m_device, reinterpret_cast<const VkMemoryGetAndroidHardwareBufferInfoANDROID*>( pInfo ), pBuffer ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<struct AHardwareBuffer*>::type Device::getMemoryAndroidHardwareBufferANDROID( const MemoryGetAndroidHardwareBufferInfoANDROID & info, Dispatch const &d ) const
+  {
+    struct AHardwareBuffer* buffer;
+    Result result = static_cast<Result>( d.vkGetMemoryAndroidHardwareBufferANDROID( m_device, reinterpret_cast<const VkMemoryGetAndroidHardwareBufferInfoANDROID*>( &info ), &buffer ) );
+    return createResultValue( result, buffer, VULKAN_HPP_NAMESPACE_STRING"::Device::getMemoryAndroidHardwareBufferANDROID" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::getMemoryFdKHR( const VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR* pGetFdInfo, int* pFd, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkGetMemoryFdKHR( m_device, reinterpret_cast<const VkMemoryGetFdInfoKHR*>( pGetFdInfo ), pFd ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<int>::type Device::getMemoryFdKHR( const MemoryGetFdInfoKHR & getFdInfo, Dispatch const &d ) const
+  {
+    int fd;
+    Result result = static_cast<Result>( d.vkGetMemoryFdKHR( m_device, reinterpret_cast<const VkMemoryGetFdInfoKHR*>( &getFdInfo ), &fd ) );
+    return createResultValue( result, fd, VULKAN_HPP_NAMESPACE_STRING"::Device::getMemoryFdKHR" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, int fd, VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR* pMemoryFdProperties, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkGetMemoryFdPropertiesKHR( m_device, static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), fd, reinterpret_cast<VkMemoryFdPropertiesKHR*>( pMemoryFdProperties ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR>::type Device::getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, int fd, Dispatch const &d ) const
+  {
+    VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR memoryFdProperties;
+    Result result = static_cast<Result>( d.vkGetMemoryFdPropertiesKHR( m_device, static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), fd, reinterpret_cast<VkMemoryFdPropertiesKHR*>( &memoryFdProperties ) ) );
+    return createResultValue( result, memoryFdProperties, VULKAN_HPP_NAMESPACE_STRING"::Device::getMemoryFdPropertiesKHR" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, const void* pHostPointer, VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT* pMemoryHostPointerProperties, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkGetMemoryHostPointerPropertiesEXT( m_device, static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), pHostPointer, reinterpret_cast<VkMemoryHostPointerPropertiesEXT*>( pMemoryHostPointerProperties ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT>::type Device::getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, const void* pHostPointer, Dispatch const &d ) const
+  {
+    VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT memoryHostPointerProperties;
+    Result result = static_cast<Result>( d.vkGetMemoryHostPointerPropertiesEXT( m_device, static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), pHostPointer, reinterpret_cast<VkMemoryHostPointerPropertiesEXT*>( &memoryHostPointerProperties ) ) );
+    return createResultValue( result, memoryHostPointerProperties, VULKAN_HPP_NAMESPACE_STRING"::Device::getMemoryHostPointerPropertiesEXT" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::getMemoryWin32HandleKHR( const VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkGetMemoryWin32HandleKHR( m_device, reinterpret_cast<const VkMemoryGetWin32HandleInfoKHR*>( pGetWin32HandleInfo ), pHandle ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<HANDLE>::type Device::getMemoryWin32HandleKHR( const MemoryGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const &d ) const
+  {
+    HANDLE handle;
+    Result result = static_cast<Result>( d.vkGetMemoryWin32HandleKHR( m_device, reinterpret_cast<const VkMemoryGetWin32HandleInfoKHR*>( &getWin32HandleInfo ), &handle ) );
+    return createResultValue( result, handle, VULKAN_HPP_NAMESPACE_STRING"::Device::getMemoryWin32HandleKHR" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType, HANDLE* pHandle, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkGetMemoryWin32HandleNV( m_device, static_cast<VkDeviceMemory>( memory ), static_cast<VkExternalMemoryHandleTypeFlagsNV>( handleType ), pHandle ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<HANDLE>::type Device::getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType, Dispatch const &d ) const
+  {
+    HANDLE handle;
+    Result result = static_cast<Result>( d.vkGetMemoryWin32HandleNV( m_device, static_cast<VkDeviceMemory>( memory ), static_cast<VkExternalMemoryHandleTypeFlagsNV>( handleType ), &handle ) );
+    return createResultValue( result, handle, VULKAN_HPP_NAMESPACE_STRING"::Device::getMemoryWin32HandleNV" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR* pMemoryWin32HandleProperties, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkGetMemoryWin32HandlePropertiesKHR( m_device, static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), handle, reinterpret_cast<VkMemoryWin32HandlePropertiesKHR*>( pMemoryWin32HandleProperties ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR>::type Device::getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, Dispatch const &d ) const
+  {
+    VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR memoryWin32HandleProperties;
+    Result result = static_cast<Result>( d.vkGetMemoryWin32HandlePropertiesKHR( m_device, static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), handle, reinterpret_cast<VkMemoryWin32HandlePropertiesKHR*>( &memoryWin32HandleProperties ) ) );
+    return createResultValue( result, memoryWin32HandleProperties, VULKAN_HPP_NAMESPACE_STRING"::Device::getMemoryWin32HandlePropertiesKHR" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint32_t* pPresentationTimingCount, VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE* pPresentationTimings, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkGetPastPresentationTimingGOOGLE( m_device, static_cast<VkSwapchainKHR>( swapchain ), pPresentationTimingCount, reinterpret_cast<VkPastPresentationTimingGOOGLE*>( pPresentationTimings ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Allocator, typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<std::vector<PastPresentationTimingGOOGLE,Allocator>>::type Device::getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d ) const
+  {
+    std::vector<PastPresentationTimingGOOGLE,Allocator> presentationTimings;
+    uint32_t presentationTimingCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkGetPastPresentationTimingGOOGLE( m_device, static_cast<VkSwapchainKHR>( swapchain ), &presentationTimingCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && presentationTimingCount )
+      {
+        presentationTimings.resize( presentationTimingCount );
+        result = static_cast<Result>( d.vkGetPastPresentationTimingGOOGLE( m_device, static_cast<VkSwapchainKHR>( swapchain ), &presentationTimingCount, reinterpret_cast<VkPastPresentationTimingGOOGLE*>( presentationTimings.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( presentationTimingCount <= presentationTimings.size() );
+      presentationTimings.resize( presentationTimingCount );
+    }
+    return createResultValue( result, presentationTimings, VULKAN_HPP_NAMESPACE_STRING"::Device::getPastPresentationTimingGOOGLE" );
+  }
+  template<typename Allocator, typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<std::vector<PastPresentationTimingGOOGLE,Allocator>>::type Device::getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Allocator const& vectorAllocator, Dispatch const &d ) const
+  {
+    std::vector<PastPresentationTimingGOOGLE,Allocator> presentationTimings( vectorAllocator );
+    uint32_t presentationTimingCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkGetPastPresentationTimingGOOGLE( m_device, static_cast<VkSwapchainKHR>( swapchain ), &presentationTimingCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && presentationTimingCount )
+      {
+        presentationTimings.resize( presentationTimingCount );
+        result = static_cast<Result>( d.vkGetPastPresentationTimingGOOGLE( m_device, static_cast<VkSwapchainKHR>( swapchain ), &presentationTimingCount, reinterpret_cast<VkPastPresentationTimingGOOGLE*>( presentationTimings.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( presentationTimingCount <= presentationTimings.size() );
+      presentationTimings.resize( presentationTimingCount );
+    }
+    return createResultValue( result, presentationTimings, VULKAN_HPP_NAMESPACE_STRING"::Device::getPastPresentationTimingGOOGLE" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter, VULKAN_HPP_NAMESPACE::PerformanceValueINTEL* pValue, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkGetPerformanceParameterINTEL( m_device, static_cast<VkPerformanceParameterTypeINTEL>( parameter ), reinterpret_cast<VkPerformanceValueINTEL*>( pValue ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<VULKAN_HPP_NAMESPACE::PerformanceValueINTEL>::type Device::getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter, Dispatch const &d ) const
+  {
+    VULKAN_HPP_NAMESPACE::PerformanceValueINTEL value;
+    Result result = static_cast<Result>( d.vkGetPerformanceParameterINTEL( m_device, static_cast<VkPerformanceParameterTypeINTEL>( parameter ), reinterpret_cast<VkPerformanceValueINTEL*>( &value ) ) );
+    return createResultValue( result, value, VULKAN_HPP_NAMESPACE_STRING"::Device::getPerformanceParameterINTEL" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, size_t* pDataSize, void* pData, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), pDataSize, pData ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Allocator, typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t,Allocator>>::type Device::getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Dispatch const &d ) const
+  {
+    std::vector<uint8_t,Allocator> data;
+    size_t dataSize;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), &dataSize, nullptr ) );
+      if ( ( result == Result::eSuccess ) && dataSize )
+      {
+        data.resize( dataSize );
+        result = static_cast<Result>( d.vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), &dataSize, reinterpret_cast<void*>( data.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( dataSize <= data.size() );
+      data.resize( dataSize );
+    }
+    return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING"::Device::getPipelineCacheData" );
+  }
+  template<typename Allocator, typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t,Allocator>>::type Device::getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Allocator const& vectorAllocator, Dispatch const &d ) const
+  {
+    std::vector<uint8_t,Allocator> data( vectorAllocator );
+    size_t dataSize;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), &dataSize, nullptr ) );
+      if ( ( result == Result::eSuccess ) && dataSize )
+      {
+        data.resize( dataSize );
+        result = static_cast<Result>( d.vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), &dataSize, reinterpret_cast<void*>( data.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( dataSize <= data.size() );
+      data.resize( dataSize );
+    }
+    return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING"::Device::getPipelineCacheData" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::getPipelineExecutableInternalRepresentationsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR* pExecutableInfo, uint32_t* pInternalRepresentationCount, VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR* pInternalRepresentations, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkGetPipelineExecutableInternalRepresentationsKHR( m_device, reinterpret_cast<const VkPipelineExecutableInfoKHR*>( pExecutableInfo ), pInternalRepresentationCount, reinterpret_cast<VkPipelineExecutableInternalRepresentationKHR*>( pInternalRepresentations ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Allocator, typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<std::vector<PipelineExecutableInternalRepresentationKHR,Allocator>>::type Device::getPipelineExecutableInternalRepresentationsKHR( const PipelineExecutableInfoKHR & executableInfo, Dispatch const &d ) const
+  {
+    std::vector<PipelineExecutableInternalRepresentationKHR,Allocator> internalRepresentations;
+    uint32_t internalRepresentationCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkGetPipelineExecutableInternalRepresentationsKHR( m_device, reinterpret_cast<const VkPipelineExecutableInfoKHR*>( &executableInfo ), &internalRepresentationCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && internalRepresentationCount )
+      {
+        internalRepresentations.resize( internalRepresentationCount );
+        result = static_cast<Result>( d.vkGetPipelineExecutableInternalRepresentationsKHR( m_device, reinterpret_cast<const VkPipelineExecutableInfoKHR*>( &executableInfo ), &internalRepresentationCount, reinterpret_cast<VkPipelineExecutableInternalRepresentationKHR*>( internalRepresentations.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( internalRepresentationCount <= internalRepresentations.size() );
+      internalRepresentations.resize( internalRepresentationCount );
+    }
+    return createResultValue( result, internalRepresentations, VULKAN_HPP_NAMESPACE_STRING"::Device::getPipelineExecutableInternalRepresentationsKHR" );
+  }
+  template<typename Allocator, typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<std::vector<PipelineExecutableInternalRepresentationKHR,Allocator>>::type Device::getPipelineExecutableInternalRepresentationsKHR( const PipelineExecutableInfoKHR & executableInfo, Allocator const& vectorAllocator, Dispatch const &d ) const
+  {
+    std::vector<PipelineExecutableInternalRepresentationKHR,Allocator> internalRepresentations( vectorAllocator );
+    uint32_t internalRepresentationCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkGetPipelineExecutableInternalRepresentationsKHR( m_device, reinterpret_cast<const VkPipelineExecutableInfoKHR*>( &executableInfo ), &internalRepresentationCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && internalRepresentationCount )
+      {
+        internalRepresentations.resize( internalRepresentationCount );
+        result = static_cast<Result>( d.vkGetPipelineExecutableInternalRepresentationsKHR( m_device, reinterpret_cast<const VkPipelineExecutableInfoKHR*>( &executableInfo ), &internalRepresentationCount, reinterpret_cast<VkPipelineExecutableInternalRepresentationKHR*>( internalRepresentations.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( internalRepresentationCount <= internalRepresentations.size() );
+      internalRepresentations.resize( internalRepresentationCount );
+    }
+    return createResultValue( result, internalRepresentations, VULKAN_HPP_NAMESPACE_STRING"::Device::getPipelineExecutableInternalRepresentationsKHR" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::getPipelineExecutablePropertiesKHR( const VULKAN_HPP_NAMESPACE::PipelineInfoKHR* pPipelineInfo, uint32_t* pExecutableCount, VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR* pProperties, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkGetPipelineExecutablePropertiesKHR( m_device, reinterpret_cast<const VkPipelineInfoKHR*>( pPipelineInfo ), pExecutableCount, reinterpret_cast<VkPipelineExecutablePropertiesKHR*>( pProperties ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Allocator, typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<std::vector<PipelineExecutablePropertiesKHR,Allocator>>::type Device::getPipelineExecutablePropertiesKHR( const PipelineInfoKHR & pipelineInfo, Dispatch const &d ) const
+  {
+    std::vector<PipelineExecutablePropertiesKHR,Allocator> properties;
+    uint32_t executableCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkGetPipelineExecutablePropertiesKHR( m_device, reinterpret_cast<const VkPipelineInfoKHR*>( &pipelineInfo ), &executableCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && executableCount )
+      {
+        properties.resize( executableCount );
+        result = static_cast<Result>( d.vkGetPipelineExecutablePropertiesKHR( m_device, reinterpret_cast<const VkPipelineInfoKHR*>( &pipelineInfo ), &executableCount, reinterpret_cast<VkPipelineExecutablePropertiesKHR*>( properties.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( executableCount <= properties.size() );
+      properties.resize( executableCount );
+    }
+    return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::Device::getPipelineExecutablePropertiesKHR" );
+  }
+  template<typename Allocator, typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<std::vector<PipelineExecutablePropertiesKHR,Allocator>>::type Device::getPipelineExecutablePropertiesKHR( const PipelineInfoKHR & pipelineInfo, Allocator const& vectorAllocator, Dispatch const &d ) const
+  {
+    std::vector<PipelineExecutablePropertiesKHR,Allocator> properties( vectorAllocator );
+    uint32_t executableCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkGetPipelineExecutablePropertiesKHR( m_device, reinterpret_cast<const VkPipelineInfoKHR*>( &pipelineInfo ), &executableCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && executableCount )
+      {
+        properties.resize( executableCount );
+        result = static_cast<Result>( d.vkGetPipelineExecutablePropertiesKHR( m_device, reinterpret_cast<const VkPipelineInfoKHR*>( &pipelineInfo ), &executableCount, reinterpret_cast<VkPipelineExecutablePropertiesKHR*>( properties.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( executableCount <= properties.size() );
+      properties.resize( executableCount );
+    }
+    return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::Device::getPipelineExecutablePropertiesKHR" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::getPipelineExecutableStatisticsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR* pExecutableInfo, uint32_t* pStatisticCount, VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR* pStatistics, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkGetPipelineExecutableStatisticsKHR( m_device, reinterpret_cast<const VkPipelineExecutableInfoKHR*>( pExecutableInfo ), pStatisticCount, reinterpret_cast<VkPipelineExecutableStatisticKHR*>( pStatistics ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Allocator, typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<std::vector<PipelineExecutableStatisticKHR,Allocator>>::type Device::getPipelineExecutableStatisticsKHR( const PipelineExecutableInfoKHR & executableInfo, Dispatch const &d ) const
+  {
+    std::vector<PipelineExecutableStatisticKHR,Allocator> statistics;
+    uint32_t statisticCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkGetPipelineExecutableStatisticsKHR( m_device, reinterpret_cast<const VkPipelineExecutableInfoKHR*>( &executableInfo ), &statisticCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && statisticCount )
+      {
+        statistics.resize( statisticCount );
+        result = static_cast<Result>( d.vkGetPipelineExecutableStatisticsKHR( m_device, reinterpret_cast<const VkPipelineExecutableInfoKHR*>( &executableInfo ), &statisticCount, reinterpret_cast<VkPipelineExecutableStatisticKHR*>( statistics.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( statisticCount <= statistics.size() );
+      statistics.resize( statisticCount );
+    }
+    return createResultValue( result, statistics, VULKAN_HPP_NAMESPACE_STRING"::Device::getPipelineExecutableStatisticsKHR" );
+  }
+  template<typename Allocator, typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<std::vector<PipelineExecutableStatisticKHR,Allocator>>::type Device::getPipelineExecutableStatisticsKHR( const PipelineExecutableInfoKHR & executableInfo, Allocator const& vectorAllocator, Dispatch const &d ) const
+  {
+    std::vector<PipelineExecutableStatisticKHR,Allocator> statistics( vectorAllocator );
+    uint32_t statisticCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkGetPipelineExecutableStatisticsKHR( m_device, reinterpret_cast<const VkPipelineExecutableInfoKHR*>( &executableInfo ), &statisticCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && statisticCount )
+      {
+        statistics.resize( statisticCount );
+        result = static_cast<Result>( d.vkGetPipelineExecutableStatisticsKHR( m_device, reinterpret_cast<const VkPipelineExecutableInfoKHR*>( &executableInfo ), &statisticCount, reinterpret_cast<VkPipelineExecutableStatisticKHR*>( statistics.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( statisticCount <= statistics.size() );
+      statistics.resize( statisticCount );
+    }
+    return createResultValue( result, statistics, VULKAN_HPP_NAMESPACE_STRING"::Device::getPipelineExecutableStatisticsKHR" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, void* pData, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkGetQueryPoolResults( m_device, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount, dataSize, pData, static_cast<VkDeviceSize>( stride ), static_cast<VkQueryResultFlags>( flags ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename T, typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, ArrayProxy<T> data, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const &d ) const
+  {
+    Result result = static_cast<Result>( d.vkGetQueryPoolResults( m_device, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount, data.size() * sizeof( T ) , reinterpret_cast<void*>( data.data() ), static_cast<VkDeviceSize>( stride ), static_cast<VkQueryResultFlags>( flags ) ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::getQueryPoolResults", { Result::eSuccess, Result::eNotReady } );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkGetRayTracingShaderGroupHandlesNV( m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, dataSize, pData ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename T, typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<void>::type Device::getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, ArrayProxy<T> data, Dispatch const &d ) const
+  {
+    Result result = static_cast<Result>( d.vkGetRayTracingShaderGroupHandlesNV( m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, data.size() * sizeof( T ) , reinterpret_cast<void*>( data.data() ) ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::getRayTracingShaderGroupHandlesNV" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::getRefreshCycleDurationGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE* pDisplayTimingProperties, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkGetRefreshCycleDurationGOOGLE( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<VkRefreshCycleDurationGOOGLE*>( pDisplayTimingProperties ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE>::type Device::getRefreshCycleDurationGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d ) const
+  {
+    VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE displayTimingProperties;
+    Result result = static_cast<Result>( d.vkGetRefreshCycleDurationGOOGLE( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<VkRefreshCycleDurationGOOGLE*>( &displayTimingProperties ) ) );
+    return createResultValue( result, displayTimingProperties, VULKAN_HPP_NAMESPACE_STRING"::Device::getRefreshCycleDurationGOOGLE" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::getRenderAreaGranularity( VULKAN_HPP_NAMESPACE::RenderPass renderPass, VULKAN_HPP_NAMESPACE::Extent2D* pGranularity, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkGetRenderAreaGranularity( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<VkExtent2D*>( pGranularity ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Extent2D Device::getRenderAreaGranularity( VULKAN_HPP_NAMESPACE::RenderPass renderPass, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_NAMESPACE::Extent2D granularity;
+    d.vkGetRenderAreaGranularity( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<VkExtent2D*>( &granularity ) );
+    return granularity;
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::getSemaphoreCounterValueKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore, uint64_t* pValue, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkGetSemaphoreCounterValueKHR( m_device, static_cast<VkSemaphore>( semaphore ), pValue ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<uint64_t>::type Device::getSemaphoreCounterValueKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Dispatch const &d ) const
+  {
+    uint64_t value;
+    Result result = static_cast<Result>( d.vkGetSemaphoreCounterValueKHR( m_device, static_cast<VkSemaphore>( semaphore ), &value ) );
+    return createResultValue( result, value, VULKAN_HPP_NAMESPACE_STRING"::Device::getSemaphoreCounterValueKHR" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::getSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR* pGetFdInfo, int* pFd, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkGetSemaphoreFdKHR( m_device, reinterpret_cast<const VkSemaphoreGetFdInfoKHR*>( pGetFdInfo ), pFd ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<int>::type Device::getSemaphoreFdKHR( const SemaphoreGetFdInfoKHR & getFdInfo, Dispatch const &d ) const
+  {
+    int fd;
+    Result result = static_cast<Result>( d.vkGetSemaphoreFdKHR( m_device, reinterpret_cast<const VkSemaphoreGetFdInfoKHR*>( &getFdInfo ), &fd ) );
+    return createResultValue( result, fd, VULKAN_HPP_NAMESPACE_STRING"::Device::getSemaphoreFdKHR" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::getSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkGetSemaphoreWin32HandleKHR( m_device, reinterpret_cast<const VkSemaphoreGetWin32HandleInfoKHR*>( pGetWin32HandleInfo ), pHandle ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<HANDLE>::type Device::getSemaphoreWin32HandleKHR( const SemaphoreGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const &d ) const
+  {
+    HANDLE handle;
+    Result result = static_cast<Result>( d.vkGetSemaphoreWin32HandleKHR( m_device, reinterpret_cast<const VkSemaphoreGetWin32HandleInfoKHR*>( &getWin32HandleInfo ), &handle ) );
+    return createResultValue( result, handle, VULKAN_HPP_NAMESPACE_STRING"::Device::getSemaphoreWin32HandleKHR" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, size_t* pInfoSize, void* pInfo, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkGetShaderInfoAMD( m_device, static_cast<VkPipeline>( pipeline ), static_cast<VkShaderStageFlagBits>( shaderStage ), static_cast<VkShaderInfoTypeAMD>( infoType ), pInfoSize, pInfo ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Allocator, typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t,Allocator>>::type Device::getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, Dispatch const &d ) const
+  {
+    std::vector<uint8_t,Allocator> info;
+    size_t infoSize;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkGetShaderInfoAMD( m_device, static_cast<VkPipeline>( pipeline ), static_cast<VkShaderStageFlagBits>( shaderStage ), static_cast<VkShaderInfoTypeAMD>( infoType ), &infoSize, nullptr ) );
+      if ( ( result == Result::eSuccess ) && infoSize )
+      {
+        info.resize( infoSize );
+        result = static_cast<Result>( d.vkGetShaderInfoAMD( m_device, static_cast<VkPipeline>( pipeline ), static_cast<VkShaderStageFlagBits>( shaderStage ), static_cast<VkShaderInfoTypeAMD>( infoType ), &infoSize, reinterpret_cast<void*>( info.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( infoSize <= info.size() );
+      info.resize( infoSize );
+    }
+    return createResultValue( result, info, VULKAN_HPP_NAMESPACE_STRING"::Device::getShaderInfoAMD" );
+  }
+  template<typename Allocator, typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t,Allocator>>::type Device::getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, Allocator const& vectorAllocator, Dispatch const &d ) const
+  {
+    std::vector<uint8_t,Allocator> info( vectorAllocator );
+    size_t infoSize;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkGetShaderInfoAMD( m_device, static_cast<VkPipeline>( pipeline ), static_cast<VkShaderStageFlagBits>( shaderStage ), static_cast<VkShaderInfoTypeAMD>( infoType ), &infoSize, nullptr ) );
+      if ( ( result == Result::eSuccess ) && infoSize )
+      {
+        info.resize( infoSize );
+        result = static_cast<Result>( d.vkGetShaderInfoAMD( m_device, static_cast<VkPipeline>( pipeline ), static_cast<VkShaderStageFlagBits>( shaderStage ), static_cast<VkShaderInfoTypeAMD>( infoType ), &infoSize, reinterpret_cast<void*>( info.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( infoSize <= info.size() );
+      info.resize( infoSize );
+    }
+    return createResultValue( result, info, VULKAN_HPP_NAMESPACE_STRING"::Device::getShaderInfoAMD" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::getSwapchainCounterEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter, uint64_t* pCounterValue, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkGetSwapchainCounterEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ), static_cast<VkSurfaceCounterFlagBitsEXT>( counter ), pCounterValue ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<uint64_t>::type Device::getSwapchainCounterEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter, Dispatch const &d ) const
+  {
+    uint64_t counterValue;
+    Result result = static_cast<Result>( d.vkGetSwapchainCounterEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ), static_cast<VkSurfaceCounterFlagBitsEXT>( counter ), &counterValue ) );
+    return createResultValue( result, counterValue, VULKAN_HPP_NAMESPACE_STRING"::Device::getSwapchainCounterEXT" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint32_t* pSwapchainImageCount, VULKAN_HPP_NAMESPACE::Image* pSwapchainImages, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkGetSwapchainImagesKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), pSwapchainImageCount, reinterpret_cast<VkImage*>( pSwapchainImages ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Allocator, typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<std::vector<Image,Allocator>>::type Device::getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d ) const
+  {
+    std::vector<Image,Allocator> swapchainImages;
+    uint32_t swapchainImageCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkGetSwapchainImagesKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), &swapchainImageCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && swapchainImageCount )
+      {
+        swapchainImages.resize( swapchainImageCount );
+        result = static_cast<Result>( d.vkGetSwapchainImagesKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), &swapchainImageCount, reinterpret_cast<VkImage*>( swapchainImages.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( swapchainImageCount <= swapchainImages.size() );
+      swapchainImages.resize( swapchainImageCount );
+    }
+    return createResultValue( result, swapchainImages, VULKAN_HPP_NAMESPACE_STRING"::Device::getSwapchainImagesKHR" );
+  }
+  template<typename Allocator, typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<std::vector<Image,Allocator>>::type Device::getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Allocator const& vectorAllocator, Dispatch const &d ) const
+  {
+    std::vector<Image,Allocator> swapchainImages( vectorAllocator );
+    uint32_t swapchainImageCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkGetSwapchainImagesKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), &swapchainImageCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && swapchainImageCount )
+      {
+        swapchainImages.resize( swapchainImageCount );
+        result = static_cast<Result>( d.vkGetSwapchainImagesKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), &swapchainImageCount, reinterpret_cast<VkImage*>( swapchainImages.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( swapchainImageCount <= swapchainImages.size() );
+      swapchainImages.resize( swapchainImageCount );
+    }
+    return createResultValue( result, swapchainImages, VULKAN_HPP_NAMESPACE_STRING"::Device::getSwapchainImagesKHR" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::getSwapchainStatusKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkGetSwapchainStatusKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) );
+  }
+#else
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::getSwapchainStatusKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d ) const
+  {
+    Result result = static_cast<Result>( d.vkGetSwapchainStatusKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::getSwapchainStatusKHR", { Result::eSuccess, Result::eSuboptimalKHR } );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, size_t* pDataSize, void* pData, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkGetValidationCacheDataEXT( m_device, static_cast<VkValidationCacheEXT>( validationCache ), pDataSize, pData ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Allocator, typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t,Allocator>>::type Device::getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Dispatch const &d ) const
+  {
+    std::vector<uint8_t,Allocator> data;
+    size_t dataSize;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkGetValidationCacheDataEXT( m_device, static_cast<VkValidationCacheEXT>( validationCache ), &dataSize, nullptr ) );
+      if ( ( result == Result::eSuccess ) && dataSize )
+      {
+        data.resize( dataSize );
+        result = static_cast<Result>( d.vkGetValidationCacheDataEXT( m_device, static_cast<VkValidationCacheEXT>( validationCache ), &dataSize, reinterpret_cast<void*>( data.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( dataSize <= data.size() );
+      data.resize( dataSize );
+    }
+    return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING"::Device::getValidationCacheDataEXT" );
+  }
+  template<typename Allocator, typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t,Allocator>>::type Device::getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Allocator const& vectorAllocator, Dispatch const &d ) const
+  {
+    std::vector<uint8_t,Allocator> data( vectorAllocator );
+    size_t dataSize;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkGetValidationCacheDataEXT( m_device, static_cast<VkValidationCacheEXT>( validationCache ), &dataSize, nullptr ) );
+      if ( ( result == Result::eSuccess ) && dataSize )
+      {
+        data.resize( dataSize );
+        result = static_cast<Result>( d.vkGetValidationCacheDataEXT( m_device, static_cast<VkValidationCacheEXT>( validationCache ), &dataSize, reinterpret_cast<void*>( data.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( dataSize <= data.size() );
+      data.resize( dataSize );
+    }
+    return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING"::Device::getValidationCacheDataEXT" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::importFenceFdKHR( const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR* pImportFenceFdInfo, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkImportFenceFdKHR( m_device, reinterpret_cast<const VkImportFenceFdInfoKHR*>( pImportFenceFdInfo ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<void>::type Device::importFenceFdKHR( const ImportFenceFdInfoKHR & importFenceFdInfo, Dispatch const &d ) const
+  {
+    Result result = static_cast<Result>( d.vkImportFenceFdKHR( m_device, reinterpret_cast<const VkImportFenceFdInfoKHR*>( &importFenceFdInfo ) ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::importFenceFdKHR" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::importFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR* pImportFenceWin32HandleInfo, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkImportFenceWin32HandleKHR( m_device, reinterpret_cast<const VkImportFenceWin32HandleInfoKHR*>( pImportFenceWin32HandleInfo ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<void>::type Device::importFenceWin32HandleKHR( const ImportFenceWin32HandleInfoKHR & importFenceWin32HandleInfo, Dispatch const &d ) const
+  {
+    Result result = static_cast<Result>( d.vkImportFenceWin32HandleKHR( m_device, reinterpret_cast<const VkImportFenceWin32HandleInfoKHR*>( &importFenceWin32HandleInfo ) ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::importFenceWin32HandleKHR" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::importSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkImportSemaphoreFdKHR( m_device, reinterpret_cast<const VkImportSemaphoreFdInfoKHR*>( pImportSemaphoreFdInfo ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<void>::type Device::importSemaphoreFdKHR( const ImportSemaphoreFdInfoKHR & importSemaphoreFdInfo, Dispatch const &d ) const
+  {
+    Result result = static_cast<Result>( d.vkImportSemaphoreFdKHR( m_device, reinterpret_cast<const VkImportSemaphoreFdInfoKHR*>( &importSemaphoreFdInfo ) ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::importSemaphoreFdKHR" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::importSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR* pImportSemaphoreWin32HandleInfo, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkImportSemaphoreWin32HandleKHR( m_device, reinterpret_cast<const VkImportSemaphoreWin32HandleInfoKHR*>( pImportSemaphoreWin32HandleInfo ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<void>::type Device::importSemaphoreWin32HandleKHR( const ImportSemaphoreWin32HandleInfoKHR & importSemaphoreWin32HandleInfo, Dispatch const &d ) const
+  {
+    Result result = static_cast<Result>( d.vkImportSemaphoreWin32HandleKHR( m_device, reinterpret_cast<const VkImportSemaphoreWin32HandleInfoKHR*>( &importSemaphoreWin32HandleInfo ) ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::importSemaphoreWin32HandleKHR" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::initializePerformanceApiINTEL( const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL* pInitializeInfo, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkInitializePerformanceApiINTEL( m_device, reinterpret_cast<const VkInitializePerformanceApiInfoINTEL*>( pInitializeInfo ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<void>::type Device::initializePerformanceApiINTEL( const InitializePerformanceApiInfoINTEL & initializeInfo, Dispatch const &d ) const
+  {
+    Result result = static_cast<Result>( d.vkInitializePerformanceApiINTEL( m_device, reinterpret_cast<const VkInitializePerformanceApiInfoINTEL*>( &initializeInfo ) ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::initializePerformanceApiINTEL" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::invalidateMappedMemoryRanges( uint32_t memoryRangeCount, const VULKAN_HPP_NAMESPACE::MappedMemoryRange* pMemoryRanges, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkInvalidateMappedMemoryRanges( m_device, memoryRangeCount, reinterpret_cast<const VkMappedMemoryRange*>( pMemoryRanges ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<void>::type Device::invalidateMappedMemoryRanges( ArrayProxy<const VULKAN_HPP_NAMESPACE::MappedMemoryRange> memoryRanges, Dispatch const &d ) const
+  {
+    Result result = static_cast<Result>( d.vkInvalidateMappedMemoryRanges( m_device, memoryRanges.size() , reinterpret_cast<const VkMappedMemoryRange*>( memoryRanges.data() ) ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::invalidateMappedMemoryRanges" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::DeviceSize size, VULKAN_HPP_NAMESPACE::MemoryMapFlags flags, void** ppData, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkMapMemory( m_device, static_cast<VkDeviceMemory>( memory ), static_cast<VkDeviceSize>( offset ), static_cast<VkDeviceSize>( size ), static_cast<VkMemoryMapFlags>( flags ), ppData ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<void*>::type Device::mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::DeviceSize size, VULKAN_HPP_NAMESPACE::MemoryMapFlags flags, Dispatch const &d ) const
+  {
+    void* pData;
+    Result result = static_cast<Result>( d.vkMapMemory( m_device, static_cast<VkDeviceMemory>( memory ), static_cast<VkDeviceSize>( offset ), static_cast<VkDeviceSize>( size ), static_cast<VkMemoryMapFlags>( flags ), &pData ) );
+    return createResultValue( result, pData, VULKAN_HPP_NAMESPACE_STRING"::Device::mapMemory" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache dstCache, uint32_t srcCacheCount, const VULKAN_HPP_NAMESPACE::PipelineCache* pSrcCaches, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkMergePipelineCaches( m_device, static_cast<VkPipelineCache>( dstCache ), srcCacheCount, reinterpret_cast<const VkPipelineCache*>( pSrcCaches ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<void>::type Device::mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache dstCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::PipelineCache> srcCaches, Dispatch const &d ) const
+  {
+    Result result = static_cast<Result>( d.vkMergePipelineCaches( m_device, static_cast<VkPipelineCache>( dstCache ), srcCaches.size() , reinterpret_cast<const VkPipelineCache*>( srcCaches.data() ) ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::mergePipelineCaches" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::mergeValidationCachesEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache, uint32_t srcCacheCount, const VULKAN_HPP_NAMESPACE::ValidationCacheEXT* pSrcCaches, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkMergeValidationCachesEXT( m_device, static_cast<VkValidationCacheEXT>( dstCache ), srcCacheCount, reinterpret_cast<const VkValidationCacheEXT*>( pSrcCaches ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<void>::type Device::mergeValidationCachesEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::ValidationCacheEXT> srcCaches, Dispatch const &d ) const
+  {
+    Result result = static_cast<Result>( d.vkMergeValidationCachesEXT( m_device, static_cast<VkValidationCacheEXT>( dstCache ), srcCaches.size() , reinterpret_cast<const VkValidationCacheEXT*>( srcCaches.data() ) ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::mergeValidationCachesEXT" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::registerEventEXT( const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT* pDeviceEventInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Fence* pFence, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkRegisterDeviceEventEXT( m_device, reinterpret_cast<const VkDeviceEventInfoEXT*>( pDeviceEventInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkFence*>( pFence ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<VULKAN_HPP_NAMESPACE::Fence>::type Device::registerEventEXT( const DeviceEventInfoEXT & deviceEventInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  {
+    VULKAN_HPP_NAMESPACE::Fence fence;
+    Result result = static_cast<Result>( d.vkRegisterDeviceEventEXT( m_device, reinterpret_cast<const VkDeviceEventInfoEXT*>( &deviceEventInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkFence*>( &fence ) ) );
+    return createResultValue( result, fence, VULKAN_HPP_NAMESPACE_STRING"::Device::registerEventEXT" );
+  }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<Fence,Dispatch>>::type Device::registerEventEXTUnique( const DeviceEventInfoEXT & deviceEventInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  {
+    VULKAN_HPP_NAMESPACE::Fence fence;
+    Result result = static_cast<Result>( d.vkRegisterDeviceEventEXT( m_device, reinterpret_cast<const VkDeviceEventInfoEXT*>( &deviceEventInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkFence*>( &fence ) ) );
+
+    ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
+    return createResultValue<Fence,Dispatch>( result, fence, VULKAN_HPP_NAMESPACE_STRING"::Device::registerEventEXTUnique", deleter );
+  }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT* pDisplayEventInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Fence* pFence, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkRegisterDisplayEventEXT( m_device, static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayEventInfoEXT*>( pDisplayEventInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkFence*>( pFence ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<VULKAN_HPP_NAMESPACE::Fence>::type Device::registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, const DisplayEventInfoEXT & displayEventInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  {
+    VULKAN_HPP_NAMESPACE::Fence fence;
+    Result result = static_cast<Result>( d.vkRegisterDisplayEventEXT( m_device, static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayEventInfoEXT*>( &displayEventInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkFence*>( &fence ) ) );
+    return createResultValue( result, fence, VULKAN_HPP_NAMESPACE_STRING"::Device::registerDisplayEventEXT" );
+  }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<Fence,Dispatch>>::type Device::registerDisplayEventEXTUnique( VULKAN_HPP_NAMESPACE::DisplayKHR display, const DisplayEventInfoEXT & displayEventInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  {
+    VULKAN_HPP_NAMESPACE::Fence fence;
+    Result result = static_cast<Result>( d.vkRegisterDisplayEventEXT( m_device, static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayEventInfoEXT*>( &displayEventInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkFence*>( &fence ) ) );
+
+    ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
+    return createResultValue<Fence,Dispatch>( result, fence, VULKAN_HPP_NAMESPACE_STRING"::Device::registerDisplayEventEXTUnique", deleter );
+  }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::registerObjectsNVX( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable, uint32_t objectCount, const VULKAN_HPP_NAMESPACE::ObjectTableEntryNVX* const* ppObjectTableEntries, const uint32_t* pObjectIndices, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkRegisterObjectsNVX( m_device, static_cast<VkObjectTableNVX>( objectTable ), objectCount, reinterpret_cast<const VkObjectTableEntryNVX* const*>( ppObjectTableEntries ), pObjectIndices ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<void>::type Device::registerObjectsNVX( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable, ArrayProxy<const VULKAN_HPP_NAMESPACE::ObjectTableEntryNVX* const> pObjectTableEntries, ArrayProxy<const uint32_t> objectIndices, Dispatch const &d ) const
+  {
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+    VULKAN_HPP_ASSERT( pObjectTableEntries.size() == objectIndices.size() );
+#else
+    if ( pObjectTableEntries.size() != objectIndices.size() )
+    {
+      throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::VkDevice::registerObjectsNVX: pObjectTableEntries.size() != objectIndices.size()" );
+    }
+#endif  /*VULKAN_HPP_NO_EXCEPTIONS*/
+    Result result = static_cast<Result>( d.vkRegisterObjectsNVX( m_device, static_cast<VkObjectTableNVX>( objectTable ), pObjectTableEntries.size() , reinterpret_cast<const VkObjectTableEntryNVX* const*>( pObjectTableEntries.data() ), objectIndices.data() ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::registerObjectsNVX" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::releaseFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkReleaseFullScreenExclusiveModeEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) );
+  }
+#else
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<void>::type Device::releaseFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d ) const
+  {
+    Result result = static_cast<Result>( d.vkReleaseFullScreenExclusiveModeEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::releaseFullScreenExclusiveModeEXT" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::releasePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkReleasePerformanceConfigurationINTEL( m_device, static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) );
+  }
+#else
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<void>::type Device::releasePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const &d ) const
+  {
+    Result result = static_cast<Result>( d.vkReleasePerformanceConfigurationINTEL( m_device, static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::releasePerformanceConfigurationINTEL" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::releaseProfilingLockKHR(Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkReleaseProfilingLockKHR( m_device );
+  }
+#else
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::releaseProfilingLockKHR(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkReleaseProfilingLockKHR( m_device );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkResetCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolResetFlags>( flags ) ) );
+  }
+#else
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<void>::type Device::resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags, Dispatch const &d ) const
+  {
+    Result result = static_cast<Result>( d.vkResetCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolResetFlags>( flags ) ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::resetCommandPool" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkResetDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), static_cast<VkDescriptorPoolResetFlags>( flags ) ) );
+  }
+#else
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<void>::type Device::resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags, Dispatch const &d ) const
+  {
+    Result result = static_cast<Result>( d.vkResetDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), static_cast<VkDescriptorPoolResetFlags>( flags ) ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::resetDescriptorPool" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::resetEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkResetEvent( m_device, static_cast<VkEvent>( event ) ) );
+  }
+#else
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<void>::type Device::resetEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const &d ) const
+  {
+    Result result = static_cast<Result>( d.vkResetEvent( m_device, static_cast<VkEvent>( event ) ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::resetEvent" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::resetFences( uint32_t fenceCount, const VULKAN_HPP_NAMESPACE::Fence* pFences, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkResetFences( m_device, fenceCount, reinterpret_cast<const VkFence*>( pFences ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<void>::type Device::resetFences( ArrayProxy<const VULKAN_HPP_NAMESPACE::Fence> fences, Dispatch const &d ) const
+  {
+    Result result = static_cast<Result>( d.vkResetFences( m_device, fences.size() , reinterpret_cast<const VkFence*>( fences.data() ) ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::resetFences" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::resetQueryPoolEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkResetQueryPoolEXT( m_device, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount );
+  }
+#else
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::resetQueryPoolEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkResetQueryPoolEXT( m_device, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::setDebugUtilsObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT* pNameInfo, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkSetDebugUtilsObjectNameEXT( m_device, reinterpret_cast<const VkDebugUtilsObjectNameInfoEXT*>( pNameInfo ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<void>::type Device::setDebugUtilsObjectNameEXT( const DebugUtilsObjectNameInfoEXT & nameInfo, Dispatch const &d ) const
+  {
+    Result result = static_cast<Result>( d.vkSetDebugUtilsObjectNameEXT( m_device, reinterpret_cast<const VkDebugUtilsObjectNameInfoEXT*>( &nameInfo ) ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::setDebugUtilsObjectNameEXT" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::setDebugUtilsObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT* pTagInfo, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkSetDebugUtilsObjectTagEXT( m_device, reinterpret_cast<const VkDebugUtilsObjectTagInfoEXT*>( pTagInfo ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<void>::type Device::setDebugUtilsObjectTagEXT( const DebugUtilsObjectTagInfoEXT & tagInfo, Dispatch const &d ) const
+  {
+    Result result = static_cast<Result>( d.vkSetDebugUtilsObjectTagEXT( m_device, reinterpret_cast<const VkDebugUtilsObjectTagInfoEXT*>( &tagInfo ) ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::setDebugUtilsObjectTagEXT" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::setEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkSetEvent( m_device, static_cast<VkEvent>( event ) ) );
+  }
+#else
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<void>::type Device::setEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const &d ) const
+  {
+    Result result = static_cast<Result>( d.vkSetEvent( m_device, static_cast<VkEvent>( event ) ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::setEvent" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::setHdrMetadataEXT( uint32_t swapchainCount, const VULKAN_HPP_NAMESPACE::SwapchainKHR* pSwapchains, const VULKAN_HPP_NAMESPACE::HdrMetadataEXT* pMetadata, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkSetHdrMetadataEXT( m_device, swapchainCount, reinterpret_cast<const VkSwapchainKHR*>( pSwapchains ), reinterpret_cast<const VkHdrMetadataEXT*>( pMetadata ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::setHdrMetadataEXT( ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainKHR> swapchains, ArrayProxy<const VULKAN_HPP_NAMESPACE::HdrMetadataEXT> metadata, Dispatch const &d ) const
+  {
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+    VULKAN_HPP_ASSERT( swapchains.size() == metadata.size() );
+#else
+    if ( swapchains.size() != metadata.size() )
+    {
+      throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::VkDevice::setHdrMetadataEXT: swapchains.size() != metadata.size()" );
+    }
+#endif  /*VULKAN_HPP_NO_EXCEPTIONS*/
+    d.vkSetHdrMetadataEXT( m_device, swapchains.size() , reinterpret_cast<const VkSwapchainKHR*>( swapchains.data() ), reinterpret_cast<const VkHdrMetadataEXT*>( metadata.data() ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::setLocalDimmingAMD( VULKAN_HPP_NAMESPACE::SwapchainKHR swapChain, VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkSetLocalDimmingAMD( m_device, static_cast<VkSwapchainKHR>( swapChain ), static_cast<VkBool32>( localDimmingEnable ) );
+  }
+#else
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::setLocalDimmingAMD( VULKAN_HPP_NAMESPACE::SwapchainKHR swapChain, VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkSetLocalDimmingAMD( m_device, static_cast<VkSwapchainKHR>( swapChain ), static_cast<VkBool32>( localDimmingEnable ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::signalSemaphoreKHR( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfoKHR* pSignalInfo, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkSignalSemaphoreKHR( m_device, reinterpret_cast<const VkSemaphoreSignalInfoKHR*>( pSignalInfo ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<void>::type Device::signalSemaphoreKHR( const SemaphoreSignalInfoKHR & signalInfo, Dispatch const &d ) const
+  {
+    Result result = static_cast<Result>( d.vkSignalSemaphoreKHR( m_device, reinterpret_cast<const VkSemaphoreSignalInfoKHR*>( &signalInfo ) ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::signalSemaphoreKHR" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::trimCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkTrimCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolTrimFlags>( flags ) );
+  }
+#else
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::trimCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkTrimCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolTrimFlags>( flags ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::trimCommandPoolKHR( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkTrimCommandPoolKHR( m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolTrimFlags>( flags ) );
+  }
+#else
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::trimCommandPoolKHR( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkTrimCommandPoolKHR( m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolTrimFlags>( flags ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::uninitializePerformanceApiINTEL(Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkUninitializePerformanceApiINTEL( m_device );
+  }
+#else
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::uninitializePerformanceApiINTEL(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkUninitializePerformanceApiINTEL( m_device );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::unmapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkUnmapMemory( m_device, static_cast<VkDeviceMemory>( memory ) );
+  }
+#else
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::unmapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkUnmapMemory( m_device, static_cast<VkDeviceMemory>( memory ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::unregisterObjectsNVX( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable, uint32_t objectCount, const VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX* pObjectEntryTypes, const uint32_t* pObjectIndices, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkUnregisterObjectsNVX( m_device, static_cast<VkObjectTableNVX>( objectTable ), objectCount, reinterpret_cast<const VkObjectEntryTypeNVX*>( pObjectEntryTypes ), pObjectIndices ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<void>::type Device::unregisterObjectsNVX( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable, ArrayProxy<const VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX> objectEntryTypes, ArrayProxy<const uint32_t> objectIndices, Dispatch const &d ) const
+  {
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+    VULKAN_HPP_ASSERT( objectEntryTypes.size() == objectIndices.size() );
+#else
+    if ( objectEntryTypes.size() != objectIndices.size() )
+    {
+      throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::VkDevice::unregisterObjectsNVX: objectEntryTypes.size() != objectIndices.size()" );
+    }
+#endif  /*VULKAN_HPP_NO_EXCEPTIONS*/
+    Result result = static_cast<Result>( d.vkUnregisterObjectsNVX( m_device, static_cast<VkObjectTableNVX>( objectTable ), objectEntryTypes.size() , reinterpret_cast<const VkObjectEntryTypeNVX*>( objectEntryTypes.data() ), objectIndices.data() ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::unregisterObjectsNVX" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkUpdateDescriptorSetWithTemplate( m_device, static_cast<VkDescriptorSet>( descriptorSet ), static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), pData );
+  }
+#else
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkUpdateDescriptorSetWithTemplate( m_device, static_cast<VkDescriptorSet>( descriptorSet ), static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), pData );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkUpdateDescriptorSetWithTemplateKHR( m_device, static_cast<VkDescriptorSet>( descriptorSet ), static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), pData );
+  }
+#else
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkUpdateDescriptorSetWithTemplateKHR( m_device, static_cast<VkDescriptorSet>( descriptorSet ), static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), pData );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::updateDescriptorSets( uint32_t descriptorWriteCount, const VULKAN_HPP_NAMESPACE::WriteDescriptorSet* pDescriptorWrites, uint32_t descriptorCopyCount, const VULKAN_HPP_NAMESPACE::CopyDescriptorSet* pDescriptorCopies, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkUpdateDescriptorSets( m_device, descriptorWriteCount, reinterpret_cast<const VkWriteDescriptorSet*>( pDescriptorWrites ), descriptorCopyCount, reinterpret_cast<const VkCopyDescriptorSet*>( pDescriptorCopies ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Device::updateDescriptorSets( ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteDescriptorSet> descriptorWrites, ArrayProxy<const VULKAN_HPP_NAMESPACE::CopyDescriptorSet> descriptorCopies, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkUpdateDescriptorSets( m_device, descriptorWrites.size() , reinterpret_cast<const VkWriteDescriptorSet*>( descriptorWrites.data() ), descriptorCopies.size() , reinterpret_cast<const VkCopyDescriptorSet*>( descriptorCopies.data() ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::waitForFences( uint32_t fenceCount, const VULKAN_HPP_NAMESPACE::Fence* pFences, VULKAN_HPP_NAMESPACE::Bool32 waitAll, uint64_t timeout, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkWaitForFences( m_device, fenceCount, reinterpret_cast<const VkFence*>( pFences ), static_cast<VkBool32>( waitAll ), timeout ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::waitForFences( ArrayProxy<const VULKAN_HPP_NAMESPACE::Fence> fences, VULKAN_HPP_NAMESPACE::Bool32 waitAll, uint64_t timeout, Dispatch const &d ) const
+  {
+    Result result = static_cast<Result>( d.vkWaitForFences( m_device, fences.size() , reinterpret_cast<const VkFence*>( fences.data() ), static_cast<VkBool32>( waitAll ), timeout ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::waitForFences", { Result::eSuccess, Result::eTimeout } );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::waitSemaphoresKHR( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfoKHR* pWaitInfo, uint64_t timeout, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkWaitSemaphoresKHR( m_device, reinterpret_cast<const VkSemaphoreWaitInfoKHR*>( pWaitInfo ), timeout ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::waitSemaphoresKHR( const SemaphoreWaitInfoKHR & waitInfo, uint64_t timeout, Dispatch const &d ) const
+  {
+    Result result = static_cast<Result>( d.vkWaitSemaphoresKHR( m_device, reinterpret_cast<const VkSemaphoreWaitInfoKHR*>( &waitInfo ), timeout ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::waitSemaphoresKHR", { Result::eSuccess, Result::eTimeout } );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Instance::createAndroidSurfaceKHR( const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkCreateAndroidSurfaceKHR( m_instance, reinterpret_cast<const VkAndroidSurfaceCreateInfoKHR*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSurfaceKHR*>( pSurface ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type Instance::createAndroidSurfaceKHR( const AndroidSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  {
+    VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
+    Result result = static_cast<Result>( d.vkCreateAndroidSurfaceKHR( m_instance, reinterpret_cast<const VkAndroidSurfaceCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
+    return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createAndroidSurfaceKHR" );
+  }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type Instance::createAndroidSurfaceKHRUnique( const AndroidSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  {
+    VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
+    Result result = static_cast<Result>( d.vkCreateAndroidSurfaceKHR( m_instance, reinterpret_cast<const VkAndroidSurfaceCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
+
+    ObjectDestroy<Instance,Dispatch> deleter( *this, allocator, d );
+    return createResultValue<SurfaceKHR,Dispatch>( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createAndroidSurfaceKHRUnique", deleter );
+  }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Instance::createDebugReportCallbackEXT( const VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT* pCallback, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkCreateDebugReportCallbackEXT( m_instance, reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkDebugReportCallbackEXT*>( pCallback ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT>::type Instance::createDebugReportCallbackEXT( const DebugReportCallbackCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  {
+    VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback;
+    Result result = static_cast<Result>( d.vkCreateDebugReportCallbackEXT( m_instance, reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkDebugReportCallbackEXT*>( &callback ) ) );
+    return createResultValue( result, callback, VULKAN_HPP_NAMESPACE_STRING"::Instance::createDebugReportCallbackEXT" );
+  }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<DebugReportCallbackEXT,Dispatch>>::type Instance::createDebugReportCallbackEXTUnique( const DebugReportCallbackCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  {
+    VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback;
+    Result result = static_cast<Result>( d.vkCreateDebugReportCallbackEXT( m_instance, reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkDebugReportCallbackEXT*>( &callback ) ) );
+
+    ObjectDestroy<Instance,Dispatch> deleter( *this, allocator, d );
+    return createResultValue<DebugReportCallbackEXT,Dispatch>( result, callback, VULKAN_HPP_NAMESPACE_STRING"::Instance::createDebugReportCallbackEXTUnique", deleter );
+  }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Instance::createDebugUtilsMessengerEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT* pMessenger, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkCreateDebugUtilsMessengerEXT( m_instance, reinterpret_cast<const VkDebugUtilsMessengerCreateInfoEXT*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkDebugUtilsMessengerEXT*>( pMessenger ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT>::type Instance::createDebugUtilsMessengerEXT( const DebugUtilsMessengerCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  {
+    VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger;
+    Result result = static_cast<Result>( d.vkCreateDebugUtilsMessengerEXT( m_instance, reinterpret_cast<const VkDebugUtilsMessengerCreateInfoEXT*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkDebugUtilsMessengerEXT*>( &messenger ) ) );
+    return createResultValue( result, messenger, VULKAN_HPP_NAMESPACE_STRING"::Instance::createDebugUtilsMessengerEXT" );
+  }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<DebugUtilsMessengerEXT,Dispatch>>::type Instance::createDebugUtilsMessengerEXTUnique( const DebugUtilsMessengerCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  {
+    VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger;
+    Result result = static_cast<Result>( d.vkCreateDebugUtilsMessengerEXT( m_instance, reinterpret_cast<const VkDebugUtilsMessengerCreateInfoEXT*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkDebugUtilsMessengerEXT*>( &messenger ) ) );
+
+    ObjectDestroy<Instance,Dispatch> deleter( *this, allocator, d );
+    return createResultValue<DebugUtilsMessengerEXT,Dispatch>( result, messenger, VULKAN_HPP_NAMESPACE_STRING"::Instance::createDebugUtilsMessengerEXTUnique", deleter );
+  }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Instance::createDisplayPlaneSurfaceKHR( const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkCreateDisplayPlaneSurfaceKHR( m_instance, reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSurfaceKHR*>( pSurface ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type Instance::createDisplayPlaneSurfaceKHR( const DisplaySurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  {
+    VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
+    Result result = static_cast<Result>( d.vkCreateDisplayPlaneSurfaceKHR( m_instance, reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
+    return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createDisplayPlaneSurfaceKHR" );
+  }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type Instance::createDisplayPlaneSurfaceKHRUnique( const DisplaySurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  {
+    VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
+    Result result = static_cast<Result>( d.vkCreateDisplayPlaneSurfaceKHR( m_instance, reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
+
+    ObjectDestroy<Instance,Dispatch> deleter( *this, allocator, d );
+    return createResultValue<SurfaceKHR,Dispatch>( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createDisplayPlaneSurfaceKHRUnique", deleter );
+  }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Instance::createHeadlessSurfaceEXT( const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkCreateHeadlessSurfaceEXT( m_instance, reinterpret_cast<const VkHeadlessSurfaceCreateInfoEXT*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSurfaceKHR*>( pSurface ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type Instance::createHeadlessSurfaceEXT( const HeadlessSurfaceCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  {
+    VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
+    Result result = static_cast<Result>( d.vkCreateHeadlessSurfaceEXT( m_instance, reinterpret_cast<const VkHeadlessSurfaceCreateInfoEXT*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
+    return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createHeadlessSurfaceEXT" );
+  }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type Instance::createHeadlessSurfaceEXTUnique( const HeadlessSurfaceCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  {
+    VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
+    Result result = static_cast<Result>( d.vkCreateHeadlessSurfaceEXT( m_instance, reinterpret_cast<const VkHeadlessSurfaceCreateInfoEXT*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
+
+    ObjectDestroy<Instance,Dispatch> deleter( *this, allocator, d );
+    return createResultValue<SurfaceKHR,Dispatch>( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createHeadlessSurfaceEXTUnique", deleter );
+  }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VK_USE_PLATFORM_IOS_MVK
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Instance::createIOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkCreateIOSSurfaceMVK( m_instance, reinterpret_cast<const VkIOSSurfaceCreateInfoMVK*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSurfaceKHR*>( pSurface ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type Instance::createIOSSurfaceMVK( const IOSSurfaceCreateInfoMVK & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  {
+    VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
+    Result result = static_cast<Result>( d.vkCreateIOSSurfaceMVK( m_instance, reinterpret_cast<const VkIOSSurfaceCreateInfoMVK*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
+    return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createIOSSurfaceMVK" );
+  }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type Instance::createIOSSurfaceMVKUnique( const IOSSurfaceCreateInfoMVK & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  {
+    VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
+    Result result = static_cast<Result>( d.vkCreateIOSSurfaceMVK( m_instance, reinterpret_cast<const VkIOSSurfaceCreateInfoMVK*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
+
+    ObjectDestroy<Instance,Dispatch> deleter( *this, allocator, d );
+    return createResultValue<SurfaceKHR,Dispatch>( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createIOSSurfaceMVKUnique", deleter );
+  }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_IOS_MVK*/
+
+#ifdef VK_USE_PLATFORM_FUCHSIA
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Instance::createImagePipeSurfaceFUCHSIA( const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkCreateImagePipeSurfaceFUCHSIA( m_instance, reinterpret_cast<const VkImagePipeSurfaceCreateInfoFUCHSIA*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSurfaceKHR*>( pSurface ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type Instance::createImagePipeSurfaceFUCHSIA( const ImagePipeSurfaceCreateInfoFUCHSIA & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  {
+    VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
+    Result result = static_cast<Result>( d.vkCreateImagePipeSurfaceFUCHSIA( m_instance, reinterpret_cast<const VkImagePipeSurfaceCreateInfoFUCHSIA*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
+    return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createImagePipeSurfaceFUCHSIA" );
+  }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type Instance::createImagePipeSurfaceFUCHSIAUnique( const ImagePipeSurfaceCreateInfoFUCHSIA & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  {
+    VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
+    Result result = static_cast<Result>( d.vkCreateImagePipeSurfaceFUCHSIA( m_instance, reinterpret_cast<const VkImagePipeSurfaceCreateInfoFUCHSIA*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
+
+    ObjectDestroy<Instance,Dispatch> deleter( *this, allocator, d );
+    return createResultValue<SurfaceKHR,Dispatch>( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createImagePipeSurfaceFUCHSIAUnique", deleter );
+  }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+#ifdef VK_USE_PLATFORM_MACOS_MVK
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Instance::createMacOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkCreateMacOSSurfaceMVK( m_instance, reinterpret_cast<const VkMacOSSurfaceCreateInfoMVK*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSurfaceKHR*>( pSurface ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type Instance::createMacOSSurfaceMVK( const MacOSSurfaceCreateInfoMVK & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  {
+    VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
+    Result result = static_cast<Result>( d.vkCreateMacOSSurfaceMVK( m_instance, reinterpret_cast<const VkMacOSSurfaceCreateInfoMVK*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
+    return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createMacOSSurfaceMVK" );
+  }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type Instance::createMacOSSurfaceMVKUnique( const MacOSSurfaceCreateInfoMVK & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  {
+    VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
+    Result result = static_cast<Result>( d.vkCreateMacOSSurfaceMVK( m_instance, reinterpret_cast<const VkMacOSSurfaceCreateInfoMVK*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
+
+    ObjectDestroy<Instance,Dispatch> deleter( *this, allocator, d );
+    return createResultValue<SurfaceKHR,Dispatch>( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createMacOSSurfaceMVKUnique", deleter );
+  }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_MACOS_MVK*/
+
+#ifdef VK_USE_PLATFORM_METAL_EXT
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Instance::createMetalSurfaceEXT( const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkCreateMetalSurfaceEXT( m_instance, reinterpret_cast<const VkMetalSurfaceCreateInfoEXT*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSurfaceKHR*>( pSurface ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type Instance::createMetalSurfaceEXT( const MetalSurfaceCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  {
+    VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
+    Result result = static_cast<Result>( d.vkCreateMetalSurfaceEXT( m_instance, reinterpret_cast<const VkMetalSurfaceCreateInfoEXT*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
+    return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createMetalSurfaceEXT" );
+  }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type Instance::createMetalSurfaceEXTUnique( const MetalSurfaceCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  {
+    VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
+    Result result = static_cast<Result>( d.vkCreateMetalSurfaceEXT( m_instance, reinterpret_cast<const VkMetalSurfaceCreateInfoEXT*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
+
+    ObjectDestroy<Instance,Dispatch> deleter( *this, allocator, d );
+    return createResultValue<SurfaceKHR,Dispatch>( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createMetalSurfaceEXTUnique", deleter );
+  }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+
+#ifdef VK_USE_PLATFORM_GGP
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Instance::createStreamDescriptorSurfaceGGP( const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkCreateStreamDescriptorSurfaceGGP( m_instance, reinterpret_cast<const VkStreamDescriptorSurfaceCreateInfoGGP*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSurfaceKHR*>( pSurface ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type Instance::createStreamDescriptorSurfaceGGP( const StreamDescriptorSurfaceCreateInfoGGP & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  {
+    VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
+    Result result = static_cast<Result>( d.vkCreateStreamDescriptorSurfaceGGP( m_instance, reinterpret_cast<const VkStreamDescriptorSurfaceCreateInfoGGP*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
+    return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createStreamDescriptorSurfaceGGP" );
+  }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type Instance::createStreamDescriptorSurfaceGGPUnique( const StreamDescriptorSurfaceCreateInfoGGP & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  {
+    VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
+    Result result = static_cast<Result>( d.vkCreateStreamDescriptorSurfaceGGP( m_instance, reinterpret_cast<const VkStreamDescriptorSurfaceCreateInfoGGP*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
+
+    ObjectDestroy<Instance,Dispatch> deleter( *this, allocator, d );
+    return createResultValue<SurfaceKHR,Dispatch>( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createStreamDescriptorSurfaceGGPUnique", deleter );
+  }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_GGP*/
+
+#ifdef VK_USE_PLATFORM_VI_NN
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Instance::createViSurfaceNN( const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkCreateViSurfaceNN( m_instance, reinterpret_cast<const VkViSurfaceCreateInfoNN*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSurfaceKHR*>( pSurface ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type Instance::createViSurfaceNN( const ViSurfaceCreateInfoNN & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  {
+    VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
+    Result result = static_cast<Result>( d.vkCreateViSurfaceNN( m_instance, reinterpret_cast<const VkViSurfaceCreateInfoNN*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
+    return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createViSurfaceNN" );
+  }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type Instance::createViSurfaceNNUnique( const ViSurfaceCreateInfoNN & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  {
+    VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
+    Result result = static_cast<Result>( d.vkCreateViSurfaceNN( m_instance, reinterpret_cast<const VkViSurfaceCreateInfoNN*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
+
+    ObjectDestroy<Instance,Dispatch> deleter( *this, allocator, d );
+    return createResultValue<SurfaceKHR,Dispatch>( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createViSurfaceNNUnique", deleter );
+  }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_VI_NN*/
+
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Instance::createWaylandSurfaceKHR( const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkCreateWaylandSurfaceKHR( m_instance, reinterpret_cast<const VkWaylandSurfaceCreateInfoKHR*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSurfaceKHR*>( pSurface ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type Instance::createWaylandSurfaceKHR( const WaylandSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  {
+    VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
+    Result result = static_cast<Result>( d.vkCreateWaylandSurfaceKHR( m_instance, reinterpret_cast<const VkWaylandSurfaceCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
+    return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createWaylandSurfaceKHR" );
+  }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type Instance::createWaylandSurfaceKHRUnique( const WaylandSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  {
+    VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
+    Result result = static_cast<Result>( d.vkCreateWaylandSurfaceKHR( m_instance, reinterpret_cast<const VkWaylandSurfaceCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
+
+    ObjectDestroy<Instance,Dispatch> deleter( *this, allocator, d );
+    return createResultValue<SurfaceKHR,Dispatch>( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createWaylandSurfaceKHRUnique", deleter );
+  }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Instance::createWin32SurfaceKHR( const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkCreateWin32SurfaceKHR( m_instance, reinterpret_cast<const VkWin32SurfaceCreateInfoKHR*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSurfaceKHR*>( pSurface ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type Instance::createWin32SurfaceKHR( const Win32SurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  {
+    VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
+    Result result = static_cast<Result>( d.vkCreateWin32SurfaceKHR( m_instance, reinterpret_cast<const VkWin32SurfaceCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
+    return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createWin32SurfaceKHR" );
+  }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type Instance::createWin32SurfaceKHRUnique( const Win32SurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  {
+    VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
+    Result result = static_cast<Result>( d.vkCreateWin32SurfaceKHR( m_instance, reinterpret_cast<const VkWin32SurfaceCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
+
+    ObjectDestroy<Instance,Dispatch> deleter( *this, allocator, d );
+    return createResultValue<SurfaceKHR,Dispatch>( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createWin32SurfaceKHRUnique", deleter );
+  }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#ifdef VK_USE_PLATFORM_XCB_KHR
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Instance::createXcbSurfaceKHR( const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkCreateXcbSurfaceKHR( m_instance, reinterpret_cast<const VkXcbSurfaceCreateInfoKHR*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSurfaceKHR*>( pSurface ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type Instance::createXcbSurfaceKHR( const XcbSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  {
+    VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
+    Result result = static_cast<Result>( d.vkCreateXcbSurfaceKHR( m_instance, reinterpret_cast<const VkXcbSurfaceCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
+    return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createXcbSurfaceKHR" );
+  }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type Instance::createXcbSurfaceKHRUnique( const XcbSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  {
+    VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
+    Result result = static_cast<Result>( d.vkCreateXcbSurfaceKHR( m_instance, reinterpret_cast<const VkXcbSurfaceCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
+
+    ObjectDestroy<Instance,Dispatch> deleter( *this, allocator, d );
+    return createResultValue<SurfaceKHR,Dispatch>( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createXcbSurfaceKHRUnique", deleter );
+  }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_XCB_KHR*/
+
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Instance::createXlibSurfaceKHR( const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkCreateXlibSurfaceKHR( m_instance, reinterpret_cast<const VkXlibSurfaceCreateInfoKHR*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSurfaceKHR*>( pSurface ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type Instance::createXlibSurfaceKHR( const XlibSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  {
+    VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
+    Result result = static_cast<Result>( d.vkCreateXlibSurfaceKHR( m_instance, reinterpret_cast<const VkXlibSurfaceCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
+    return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createXlibSurfaceKHR" );
+  }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type Instance::createXlibSurfaceKHRUnique( const XlibSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  {
+    VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
+    Result result = static_cast<Result>( d.vkCreateXlibSurfaceKHR( m_instance, reinterpret_cast<const VkXlibSurfaceCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
+
+    ObjectDestroy<Instance,Dispatch> deleter( *this, allocator, d );
+    return createResultValue<SurfaceKHR,Dispatch>( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createXlibSurfaceKHRUnique", deleter );
+  }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_XLIB_KHR*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Instance::debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const char* pLayerPrefix, const char* pMessage, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDebugReportMessageEXT( m_instance, static_cast<VkDebugReportFlagsEXT>( flags ), static_cast<VkDebugReportObjectTypeEXT>( objectType ), object, location, messageCode, pLayerPrefix, pMessage );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Instance::debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const std::string & layerPrefix, const std::string & message, Dispatch const &d ) const
+  {
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+    VULKAN_HPP_ASSERT( layerPrefix.size() == message.size() );
+#else
+    if ( layerPrefix.size() != message.size() )
+    {
+      throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::VkInstance::debugReportMessageEXT: layerPrefix.size() != message.size()" );
+    }
+#endif  /*VULKAN_HPP_NO_EXCEPTIONS*/
+    d.vkDebugReportMessageEXT( m_instance, static_cast<VkDebugReportFlagsEXT>( flags ), static_cast<VkDebugReportObjectTypeEXT>( objectType ), object, location, messageCode, layerPrefix.c_str(), message.c_str() );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Instance::destroyDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyDebugReportCallbackEXT( m_instance, static_cast<VkDebugReportCallbackEXT>( callback ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Instance::destroyDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyDebugReportCallbackEXT( m_instance, static_cast<VkDebugReportCallbackEXT>( callback ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyDebugReportCallbackEXT( m_instance, static_cast<VkDebugReportCallbackEXT>( callback ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyDebugReportCallbackEXT( m_instance, static_cast<VkDebugReportCallbackEXT>( callback ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Instance::destroyDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyDebugUtilsMessengerEXT( m_instance, static_cast<VkDebugUtilsMessengerEXT>( messenger ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Instance::destroyDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyDebugUtilsMessengerEXT( m_instance, static_cast<VkDebugUtilsMessengerEXT>( messenger ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyDebugUtilsMessengerEXT( m_instance, static_cast<VkDebugUtilsMessengerEXT>( messenger ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyDebugUtilsMessengerEXT( m_instance, static_cast<VkDebugUtilsMessengerEXT>( messenger ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Instance::destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyInstance( m_instance, reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Instance::destroy( Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroyInstance( m_instance, reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Instance::destroySurfaceKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroySurfaceKHR( m_instance, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Instance::destroySurfaceKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroySurfaceKHR( m_instance, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroySurfaceKHR( m_instance, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkDestroySurfaceKHR( m_instance, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Instance::enumeratePhysicalDeviceGroups( uint32_t* pPhysicalDeviceGroupCount, VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkEnumeratePhysicalDeviceGroups( m_instance, pPhysicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties*>( pPhysicalDeviceGroupProperties ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Allocator, typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<std::vector<PhysicalDeviceGroupProperties,Allocator>>::type Instance::enumeratePhysicalDeviceGroups(Dispatch const &d ) const
+  {
+    std::vector<PhysicalDeviceGroupProperties,Allocator> physicalDeviceGroupProperties;
+    uint32_t physicalDeviceGroupCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkEnumeratePhysicalDeviceGroups( m_instance, &physicalDeviceGroupCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && physicalDeviceGroupCount )
+      {
+        physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
+        result = static_cast<Result>( d.vkEnumeratePhysicalDeviceGroups( m_instance, &physicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties*>( physicalDeviceGroupProperties.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() );
+      physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
+    }
+    return createResultValue( result, physicalDeviceGroupProperties, VULKAN_HPP_NAMESPACE_STRING"::Instance::enumeratePhysicalDeviceGroups" );
+  }
+  template<typename Allocator, typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<std::vector<PhysicalDeviceGroupProperties,Allocator>>::type Instance::enumeratePhysicalDeviceGroups(Allocator const& vectorAllocator, Dispatch const &d ) const
+  {
+    std::vector<PhysicalDeviceGroupProperties,Allocator> physicalDeviceGroupProperties( vectorAllocator );
+    uint32_t physicalDeviceGroupCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkEnumeratePhysicalDeviceGroups( m_instance, &physicalDeviceGroupCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && physicalDeviceGroupCount )
+      {
+        physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
+        result = static_cast<Result>( d.vkEnumeratePhysicalDeviceGroups( m_instance, &physicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties*>( physicalDeviceGroupProperties.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() );
+      physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
+    }
+    return createResultValue( result, physicalDeviceGroupProperties, VULKAN_HPP_NAMESPACE_STRING"::Instance::enumeratePhysicalDeviceGroups" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Instance::enumeratePhysicalDeviceGroupsKHR( uint32_t* pPhysicalDeviceGroupCount, VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkEnumeratePhysicalDeviceGroupsKHR( m_instance, pPhysicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties*>( pPhysicalDeviceGroupProperties ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Allocator, typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<std::vector<PhysicalDeviceGroupProperties,Allocator>>::type Instance::enumeratePhysicalDeviceGroupsKHR(Dispatch const &d ) const
+  {
+    std::vector<PhysicalDeviceGroupProperties,Allocator> physicalDeviceGroupProperties;
+    uint32_t physicalDeviceGroupCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkEnumeratePhysicalDeviceGroupsKHR( m_instance, &physicalDeviceGroupCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && physicalDeviceGroupCount )
+      {
+        physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
+        result = static_cast<Result>( d.vkEnumeratePhysicalDeviceGroupsKHR( m_instance, &physicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties*>( physicalDeviceGroupProperties.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() );
+      physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
+    }
+    return createResultValue( result, physicalDeviceGroupProperties, VULKAN_HPP_NAMESPACE_STRING"::Instance::enumeratePhysicalDeviceGroupsKHR" );
+  }
+  template<typename Allocator, typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<std::vector<PhysicalDeviceGroupProperties,Allocator>>::type Instance::enumeratePhysicalDeviceGroupsKHR(Allocator const& vectorAllocator, Dispatch const &d ) const
+  {
+    std::vector<PhysicalDeviceGroupProperties,Allocator> physicalDeviceGroupProperties( vectorAllocator );
+    uint32_t physicalDeviceGroupCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkEnumeratePhysicalDeviceGroupsKHR( m_instance, &physicalDeviceGroupCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && physicalDeviceGroupCount )
+      {
+        physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
+        result = static_cast<Result>( d.vkEnumeratePhysicalDeviceGroupsKHR( m_instance, &physicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties*>( physicalDeviceGroupProperties.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() );
+      physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
+    }
+    return createResultValue( result, physicalDeviceGroupProperties, VULKAN_HPP_NAMESPACE_STRING"::Instance::enumeratePhysicalDeviceGroupsKHR" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Instance::enumeratePhysicalDevices( uint32_t* pPhysicalDeviceCount, VULKAN_HPP_NAMESPACE::PhysicalDevice* pPhysicalDevices, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkEnumeratePhysicalDevices( m_instance, pPhysicalDeviceCount, reinterpret_cast<VkPhysicalDevice*>( pPhysicalDevices ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Allocator, typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<std::vector<PhysicalDevice,Allocator>>::type Instance::enumeratePhysicalDevices(Dispatch const &d ) const
+  {
+    std::vector<PhysicalDevice,Allocator> physicalDevices;
+    uint32_t physicalDeviceCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && physicalDeviceCount )
+      {
+        physicalDevices.resize( physicalDeviceCount );
+        result = static_cast<Result>( d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, reinterpret_cast<VkPhysicalDevice*>( physicalDevices.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( physicalDeviceCount <= physicalDevices.size() );
+      physicalDevices.resize( physicalDeviceCount );
+    }
+    return createResultValue( result, physicalDevices, VULKAN_HPP_NAMESPACE_STRING"::Instance::enumeratePhysicalDevices" );
+  }
+  template<typename Allocator, typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<std::vector<PhysicalDevice,Allocator>>::type Instance::enumeratePhysicalDevices(Allocator const& vectorAllocator, Dispatch const &d ) const
+  {
+    std::vector<PhysicalDevice,Allocator> physicalDevices( vectorAllocator );
+    uint32_t physicalDeviceCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && physicalDeviceCount )
+      {
+        physicalDevices.resize( physicalDeviceCount );
+        result = static_cast<Result>( d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, reinterpret_cast<VkPhysicalDevice*>( physicalDevices.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( physicalDeviceCount <= physicalDevices.size() );
+      physicalDevices.resize( physicalDeviceCount );
+    }
+    return createResultValue( result, physicalDevices, VULKAN_HPP_NAMESPACE_STRING"::Instance::enumeratePhysicalDevices" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE PFN_vkVoidFunction Instance::getProcAddr( const char* pName, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    return d.vkGetInstanceProcAddr( m_instance, pName );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE PFN_vkVoidFunction Instance::getProcAddr( const std::string & name, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    return d.vkGetInstanceProcAddr( m_instance, name.c_str() );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Instance::submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes, const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT* pCallbackData, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkSubmitDebugUtilsMessageEXT( m_instance, static_cast<VkDebugUtilsMessageSeverityFlagBitsEXT>( messageSeverity ), static_cast<VkDebugUtilsMessageTypeFlagsEXT>( messageTypes ), reinterpret_cast<const VkDebugUtilsMessengerCallbackDataEXT*>( pCallbackData ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Instance::submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes, const DebugUtilsMessengerCallbackDataEXT & callbackData, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkSubmitDebugUtilsMessageEXT( m_instance, static_cast<VkDebugUtilsMessageSeverityFlagBitsEXT>( messageSeverity ), static_cast<VkDebugUtilsMessageTypeFlagsEXT>( messageTypes ), reinterpret_cast<const VkDebugUtilsMessengerCallbackDataEXT*>( &callbackData ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result PhysicalDevice::acquireXlibDisplayEXT( Display* dpy, VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkAcquireXlibDisplayEXT( m_physicalDevice, dpy, static_cast<VkDisplayKHR>( display ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<Display>::type PhysicalDevice::acquireXlibDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const &d ) const
+  {
+    Display dpy;
+    Result result = static_cast<Result>( d.vkAcquireXlibDisplayEXT( m_physicalDevice, &dpy, static_cast<VkDisplayKHR>( display ) ) );
+    return createResultValue( result, dpy, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::acquireXlibDisplayEXT" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result PhysicalDevice::createDevice( const VULKAN_HPP_NAMESPACE::DeviceCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Device* pDevice, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkCreateDevice( m_physicalDevice, reinterpret_cast<const VkDeviceCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkDevice*>( pDevice ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<VULKAN_HPP_NAMESPACE::Device>::type PhysicalDevice::createDevice( const DeviceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  {
+    VULKAN_HPP_NAMESPACE::Device device;
+    Result result = static_cast<Result>( d.vkCreateDevice( m_physicalDevice, reinterpret_cast<const VkDeviceCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkDevice*>( &device ) ) );
+    return createResultValue( result, device, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::createDevice" );
+  }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<Device,Dispatch>>::type PhysicalDevice::createDeviceUnique( const DeviceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  {
+    VULKAN_HPP_NAMESPACE::Device device;
+    Result result = static_cast<Result>( d.vkCreateDevice( m_physicalDevice, reinterpret_cast<const VkDeviceCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkDevice*>( &device ) ) );
+
+    ObjectDestroy<NoParent,Dispatch> deleter( allocator, d );
+    return createResultValue<Device,Dispatch>( result, device, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::createDeviceUnique", deleter );
+  }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result PhysicalDevice::createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DisplayModeKHR* pMode, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkCreateDisplayModeKHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayModeCreateInfoKHR*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkDisplayModeKHR*>( pMode ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<VULKAN_HPP_NAMESPACE::DisplayModeKHR>::type PhysicalDevice::createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, const DisplayModeCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+  {
+    VULKAN_HPP_NAMESPACE::DisplayModeKHR mode;
+    Result result = static_cast<Result>( d.vkCreateDisplayModeKHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayModeCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkDisplayModeKHR*>( &mode ) ) );
+    return createResultValue( result, mode, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::createDisplayModeKHR" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result PhysicalDevice::enumerateDeviceExtensionProperties( const char* pLayerName, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::ExtensionProperties* pProperties, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, pLayerName, pPropertyCount, reinterpret_cast<VkExtensionProperties*>( pProperties ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Allocator, typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<std::vector<ExtensionProperties,Allocator>>::type PhysicalDevice::enumerateDeviceExtensionProperties( Optional<const std::string> layerName, Dispatch const &d ) const
+  {
+    std::vector<ExtensionProperties,Allocator> properties;
+    uint32_t propertyCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && propertyCount )
+      {
+        properties.resize( propertyCount );
+        result = static_cast<Result>( d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast<VkExtensionProperties*>( properties.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+      properties.resize( propertyCount );
+    }
+    return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::enumerateDeviceExtensionProperties" );
+  }
+  template<typename Allocator, typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<std::vector<ExtensionProperties,Allocator>>::type PhysicalDevice::enumerateDeviceExtensionProperties( Optional<const std::string> layerName, Allocator const& vectorAllocator, Dispatch const &d ) const
+  {
+    std::vector<ExtensionProperties,Allocator> properties( vectorAllocator );
+    uint32_t propertyCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && propertyCount )
+      {
+        properties.resize( propertyCount );
+        result = static_cast<Result>( d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast<VkExtensionProperties*>( properties.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+      properties.resize( propertyCount );
+    }
+    return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::enumerateDeviceExtensionProperties" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result PhysicalDevice::enumerateDeviceLayerProperties( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::LayerProperties* pProperties, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkEnumerateDeviceLayerProperties( m_physicalDevice, pPropertyCount, reinterpret_cast<VkLayerProperties*>( pProperties ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Allocator, typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<std::vector<LayerProperties,Allocator>>::type PhysicalDevice::enumerateDeviceLayerProperties(Dispatch const &d ) const
+  {
+    std::vector<LayerProperties,Allocator> properties;
+    uint32_t propertyCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && propertyCount )
+      {
+        properties.resize( propertyCount );
+        result = static_cast<Result>( d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, reinterpret_cast<VkLayerProperties*>( properties.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+      properties.resize( propertyCount );
+    }
+    return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::enumerateDeviceLayerProperties" );
+  }
+  template<typename Allocator, typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<std::vector<LayerProperties,Allocator>>::type PhysicalDevice::enumerateDeviceLayerProperties(Allocator const& vectorAllocator, Dispatch const &d ) const
+  {
+    std::vector<LayerProperties,Allocator> properties( vectorAllocator );
+    uint32_t propertyCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && propertyCount )
+      {
+        properties.resize( propertyCount );
+        result = static_cast<Result>( d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, reinterpret_cast<VkLayerProperties*>( properties.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+      properties.resize( propertyCount );
+    }
+    return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::enumerateDeviceLayerProperties" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, uint32_t* pCounterCount, VULKAN_HPP_NAMESPACE::PerformanceCounterKHR* pCounters, VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR* pCounterDescriptions, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( m_physicalDevice, queueFamilyIndex, pCounterCount, reinterpret_cast<VkPerformanceCounterKHR*>( pCounters ), reinterpret_cast<VkPerformanceCounterDescriptionKHR*>( pCounterDescriptions ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Allocator, typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<std::vector<PerformanceCounterDescriptionKHR,Allocator>>::type PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, ArrayProxy<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR> counters, Dispatch const &d ) const
+  {
+    std::vector<PerformanceCounterDescriptionKHR,Allocator> counterDescriptions;
+    uint32_t counterCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( m_physicalDevice, queueFamilyIndex, counters.size() , reinterpret_cast<VkPerformanceCounterKHR*>( counters.data() ), nullptr ) );
+      if ( ( result == Result::eSuccess ) && counterCount )
+      {
+        counterDescriptions.resize( counterCount );
+        result = static_cast<Result>( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( m_physicalDevice, queueFamilyIndex, counters.size() , reinterpret_cast<VkPerformanceCounterKHR*>( counters.data() ), reinterpret_cast<VkPerformanceCounterDescriptionKHR*>( counterDescriptions.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( counterCount <= counterDescriptions.size() );
+      counterDescriptions.resize( counterCount );
+    }
+    return createResultValue( result, counterDescriptions, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR" );
+  }
+  template<typename Allocator, typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<std::vector<PerformanceCounterDescriptionKHR,Allocator>>::type PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, ArrayProxy<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR> counters, Allocator const& vectorAllocator, Dispatch const &d ) const
+  {
+    std::vector<PerformanceCounterDescriptionKHR,Allocator> counterDescriptions( vectorAllocator );
+    uint32_t counterCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( m_physicalDevice, queueFamilyIndex, counters.size() , reinterpret_cast<VkPerformanceCounterKHR*>( counters.data() ), nullptr ) );
+      if ( ( result == Result::eSuccess ) && counterCount )
+      {
+        counterDescriptions.resize( counterCount );
+        result = static_cast<Result>( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( m_physicalDevice, queueFamilyIndex, counters.size() , reinterpret_cast<VkPerformanceCounterKHR*>( counters.data() ), reinterpret_cast<VkPerformanceCounterDescriptionKHR*>( counterDescriptions.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( counterCount <= counterDescriptions.size() );
+      counterDescriptions.resize( counterCount );
+    }
+    return createResultValue( result, counterDescriptions, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR* pProperties, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkGetDisplayModeProperties2KHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), pPropertyCount, reinterpret_cast<VkDisplayModeProperties2KHR*>( pProperties ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Allocator, typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayModeProperties2KHR,Allocator>>::type PhysicalDevice::getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const &d ) const
+  {
+    std::vector<DisplayModeProperties2KHR,Allocator> properties;
+    uint32_t propertyCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkGetDisplayModeProperties2KHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && propertyCount )
+      {
+        properties.resize( propertyCount );
+        result = static_cast<Result>( d.vkGetDisplayModeProperties2KHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, reinterpret_cast<VkDisplayModeProperties2KHR*>( properties.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+      properties.resize( propertyCount );
+    }
+    return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayModeProperties2KHR" );
+  }
+  template<typename Allocator, typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayModeProperties2KHR,Allocator>>::type PhysicalDevice::getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Allocator const& vectorAllocator, Dispatch const &d ) const
+  {
+    std::vector<DisplayModeProperties2KHR,Allocator> properties( vectorAllocator );
+    uint32_t propertyCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkGetDisplayModeProperties2KHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && propertyCount )
+      {
+        properties.resize( propertyCount );
+        result = static_cast<Result>( d.vkGetDisplayModeProperties2KHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, reinterpret_cast<VkDisplayModeProperties2KHR*>( properties.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+      properties.resize( propertyCount );
+    }
+    return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayModeProperties2KHR" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR* pProperties, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkGetDisplayModePropertiesKHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), pPropertyCount, reinterpret_cast<VkDisplayModePropertiesKHR*>( pProperties ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Allocator, typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayModePropertiesKHR,Allocator>>::type PhysicalDevice::getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const &d ) const
+  {
+    std::vector<DisplayModePropertiesKHR,Allocator> properties;
+    uint32_t propertyCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkGetDisplayModePropertiesKHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && propertyCount )
+      {
+        properties.resize( propertyCount );
+        result = static_cast<Result>( d.vkGetDisplayModePropertiesKHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, reinterpret_cast<VkDisplayModePropertiesKHR*>( properties.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+      properties.resize( propertyCount );
+    }
+    return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayModePropertiesKHR" );
+  }
+  template<typename Allocator, typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayModePropertiesKHR,Allocator>>::type PhysicalDevice::getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Allocator const& vectorAllocator, Dispatch const &d ) const
+  {
+    std::vector<DisplayModePropertiesKHR,Allocator> properties( vectorAllocator );
+    uint32_t propertyCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkGetDisplayModePropertiesKHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && propertyCount )
+      {
+        properties.resize( propertyCount );
+        result = static_cast<Result>( d.vkGetDisplayModePropertiesKHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, reinterpret_cast<VkDisplayModePropertiesKHR*>( properties.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+      properties.resize( propertyCount );
+    }
+    return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayModePropertiesKHR" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneCapabilities2KHR( const VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR* pDisplayPlaneInfo, VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR* pCapabilities, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkGetDisplayPlaneCapabilities2KHR( m_physicalDevice, reinterpret_cast<const VkDisplayPlaneInfo2KHR*>( pDisplayPlaneInfo ), reinterpret_cast<VkDisplayPlaneCapabilities2KHR*>( pCapabilities ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR>::type PhysicalDevice::getDisplayPlaneCapabilities2KHR( const DisplayPlaneInfo2KHR & displayPlaneInfo, Dispatch const &d ) const
+  {
+    VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR capabilities;
+    Result result = static_cast<Result>( d.vkGetDisplayPlaneCapabilities2KHR( m_physicalDevice, reinterpret_cast<const VkDisplayPlaneInfo2KHR*>( &displayPlaneInfo ), reinterpret_cast<VkDisplayPlaneCapabilities2KHR*>( &capabilities ) ) );
+    return createResultValue( result, capabilities, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayPlaneCapabilities2KHR" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode, uint32_t planeIndex, VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR* pCapabilities, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkGetDisplayPlaneCapabilitiesKHR( m_physicalDevice, static_cast<VkDisplayModeKHR>( mode ), planeIndex, reinterpret_cast<VkDisplayPlaneCapabilitiesKHR*>( pCapabilities ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR>::type PhysicalDevice::getDisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode, uint32_t planeIndex, Dispatch const &d ) const
+  {
+    VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR capabilities;
+    Result result = static_cast<Result>( d.vkGetDisplayPlaneCapabilitiesKHR( m_physicalDevice, static_cast<VkDisplayModeKHR>( mode ), planeIndex, reinterpret_cast<VkDisplayPlaneCapabilitiesKHR*>( &capabilities ) ) );
+    return createResultValue( result, capabilities, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayPlaneCapabilitiesKHR" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, uint32_t* pDisplayCount, VULKAN_HPP_NAMESPACE::DisplayKHR* pDisplays, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, pDisplayCount, reinterpret_cast<VkDisplayKHR*>( pDisplays ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Allocator, typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayKHR,Allocator>>::type PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, Dispatch const &d ) const
+  {
+    std::vector<DisplayKHR,Allocator> displays;
+    uint32_t displayCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && displayCount )
+      {
+        displays.resize( displayCount );
+        result = static_cast<Result>( d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, reinterpret_cast<VkDisplayKHR*>( displays.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( displayCount <= displays.size() );
+      displays.resize( displayCount );
+    }
+    return createResultValue( result, displays, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR" );
+  }
+  template<typename Allocator, typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayKHR,Allocator>>::type PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, Allocator const& vectorAllocator, Dispatch const &d ) const
+  {
+    std::vector<DisplayKHR,Allocator> displays( vectorAllocator );
+    uint32_t displayCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && displayCount )
+      {
+        displays.resize( displayCount );
+        result = static_cast<Result>( d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, reinterpret_cast<VkDisplayKHR*>( displays.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( displayCount <= displays.size() );
+      displays.resize( displayCount );
+    }
+    return createResultValue( result, displays, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result PhysicalDevice::getCalibrateableTimeDomainsEXT( uint32_t* pTimeDomainCount, VULKAN_HPP_NAMESPACE::TimeDomainEXT* pTimeDomains, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, pTimeDomainCount, reinterpret_cast<VkTimeDomainEXT*>( pTimeDomains ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Allocator, typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<std::vector<TimeDomainEXT,Allocator>>::type PhysicalDevice::getCalibrateableTimeDomainsEXT(Dispatch const &d ) const
+  {
+    std::vector<TimeDomainEXT,Allocator> timeDomains;
+    uint32_t timeDomainCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && timeDomainCount )
+      {
+        timeDomains.resize( timeDomainCount );
+        result = static_cast<Result>( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, reinterpret_cast<VkTimeDomainEXT*>( timeDomains.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( timeDomainCount <= timeDomains.size() );
+      timeDomains.resize( timeDomainCount );
+    }
+    return createResultValue( result, timeDomains, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getCalibrateableTimeDomainsEXT" );
+  }
+  template<typename Allocator, typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<std::vector<TimeDomainEXT,Allocator>>::type PhysicalDevice::getCalibrateableTimeDomainsEXT(Allocator const& vectorAllocator, Dispatch const &d ) const
+  {
+    std::vector<TimeDomainEXT,Allocator> timeDomains( vectorAllocator );
+    uint32_t timeDomainCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && timeDomainCount )
+      {
+        timeDomains.resize( timeDomainCount );
+        result = static_cast<Result>( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, reinterpret_cast<VkTimeDomainEXT*>( timeDomains.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( timeDomainCount <= timeDomains.size() );
+      timeDomains.resize( timeDomainCount );
+    }
+    return createResultValue( result, timeDomains, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getCalibrateableTimeDomainsEXT" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result PhysicalDevice::getCooperativeMatrixPropertiesNV( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV* pProperties, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( m_physicalDevice, pPropertyCount, reinterpret_cast<VkCooperativeMatrixPropertiesNV*>( pProperties ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Allocator, typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<std::vector<CooperativeMatrixPropertiesNV,Allocator>>::type PhysicalDevice::getCooperativeMatrixPropertiesNV(Dispatch const &d ) const
+  {
+    std::vector<CooperativeMatrixPropertiesNV,Allocator> properties;
+    uint32_t propertyCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( m_physicalDevice, &propertyCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && propertyCount )
+      {
+        properties.resize( propertyCount );
+        result = static_cast<Result>( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( m_physicalDevice, &propertyCount, reinterpret_cast<VkCooperativeMatrixPropertiesNV*>( properties.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+      properties.resize( propertyCount );
+    }
+    return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getCooperativeMatrixPropertiesNV" );
+  }
+  template<typename Allocator, typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<std::vector<CooperativeMatrixPropertiesNV,Allocator>>::type PhysicalDevice::getCooperativeMatrixPropertiesNV(Allocator const& vectorAllocator, Dispatch const &d ) const
+  {
+    std::vector<CooperativeMatrixPropertiesNV,Allocator> properties( vectorAllocator );
+    uint32_t propertyCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( m_physicalDevice, &propertyCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && propertyCount )
+      {
+        properties.resize( propertyCount );
+        result = static_cast<Result>( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( m_physicalDevice, &propertyCount, reinterpret_cast<VkCooperativeMatrixPropertiesNV*>( properties.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+      properties.resize( propertyCount );
+    }
+    return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getCooperativeMatrixPropertiesNV" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneProperties2KHR( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR* pProperties, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, pPropertyCount, reinterpret_cast<VkDisplayPlaneProperties2KHR*>( pProperties ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Allocator, typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayPlaneProperties2KHR,Allocator>>::type PhysicalDevice::getDisplayPlaneProperties2KHR(Dispatch const &d ) const
+  {
+    std::vector<DisplayPlaneProperties2KHR,Allocator> properties;
+    uint32_t propertyCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, &propertyCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && propertyCount )
+      {
+        properties.resize( propertyCount );
+        result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPlaneProperties2KHR*>( properties.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+      properties.resize( propertyCount );
+    }
+    return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayPlaneProperties2KHR" );
+  }
+  template<typename Allocator, typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayPlaneProperties2KHR,Allocator>>::type PhysicalDevice::getDisplayPlaneProperties2KHR(Allocator const& vectorAllocator, Dispatch const &d ) const
+  {
+    std::vector<DisplayPlaneProperties2KHR,Allocator> properties( vectorAllocator );
+    uint32_t propertyCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, &propertyCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && propertyCount )
+      {
+        properties.resize( propertyCount );
+        result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPlaneProperties2KHR*>( properties.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+      properties.resize( propertyCount );
+    }
+    return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayPlaneProperties2KHR" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlanePropertiesKHR( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR* pProperties, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, pPropertyCount, reinterpret_cast<VkDisplayPlanePropertiesKHR*>( pProperties ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Allocator, typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayPlanePropertiesKHR,Allocator>>::type PhysicalDevice::getDisplayPlanePropertiesKHR(Dispatch const &d ) const
+  {
+    std::vector<DisplayPlanePropertiesKHR,Allocator> properties;
+    uint32_t propertyCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && propertyCount )
+      {
+        properties.resize( propertyCount );
+        result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPlanePropertiesKHR*>( properties.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+      properties.resize( propertyCount );
+    }
+    return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayPlanePropertiesKHR" );
+  }
+  template<typename Allocator, typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayPlanePropertiesKHR,Allocator>>::type PhysicalDevice::getDisplayPlanePropertiesKHR(Allocator const& vectorAllocator, Dispatch const &d ) const
+  {
+    std::vector<DisplayPlanePropertiesKHR,Allocator> properties( vectorAllocator );
+    uint32_t propertyCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && propertyCount )
+      {
+        properties.resize( propertyCount );
+        result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPlanePropertiesKHR*>( properties.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+      properties.resize( propertyCount );
+    }
+    return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayPlanePropertiesKHR" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayProperties2KHR( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayProperties2KHR* pProperties, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, pPropertyCount, reinterpret_cast<VkDisplayProperties2KHR*>( pProperties ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Allocator, typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayProperties2KHR,Allocator>>::type PhysicalDevice::getDisplayProperties2KHR(Dispatch const &d ) const
+  {
+    std::vector<DisplayProperties2KHR,Allocator> properties;
+    uint32_t propertyCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && propertyCount )
+      {
+        properties.resize( propertyCount );
+        result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayProperties2KHR*>( properties.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+      properties.resize( propertyCount );
+    }
+    return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayProperties2KHR" );
+  }
+  template<typename Allocator, typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayProperties2KHR,Allocator>>::type PhysicalDevice::getDisplayProperties2KHR(Allocator const& vectorAllocator, Dispatch const &d ) const
+  {
+    std::vector<DisplayProperties2KHR,Allocator> properties( vectorAllocator );
+    uint32_t propertyCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && propertyCount )
+      {
+        properties.resize( propertyCount );
+        result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayProperties2KHR*>( properties.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+      properties.resize( propertyCount );
+    }
+    return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayProperties2KHR" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPropertiesKHR( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR* pProperties, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, pPropertyCount, reinterpret_cast<VkDisplayPropertiesKHR*>( pProperties ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Allocator, typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayPropertiesKHR,Allocator>>::type PhysicalDevice::getDisplayPropertiesKHR(Dispatch const &d ) const
+  {
+    std::vector<DisplayPropertiesKHR,Allocator> properties;
+    uint32_t propertyCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && propertyCount )
+      {
+        properties.resize( propertyCount );
+        result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPropertiesKHR*>( properties.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+      properties.resize( propertyCount );
+    }
+    return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayPropertiesKHR" );
+  }
+  template<typename Allocator, typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayPropertiesKHR,Allocator>>::type PhysicalDevice::getDisplayPropertiesKHR(Allocator const& vectorAllocator, Dispatch const &d ) const
+  {
+    std::vector<DisplayPropertiesKHR,Allocator> properties( vectorAllocator );
+    uint32_t propertyCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && propertyCount )
+      {
+        properties.resize( propertyCount );
+        result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPropertiesKHR*>( properties.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+      properties.resize( propertyCount );
+    }
+    return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayPropertiesKHR" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void PhysicalDevice::getExternalBufferProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo* pExternalBufferInfo, VULKAN_HPP_NAMESPACE::ExternalBufferProperties* pExternalBufferProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkGetPhysicalDeviceExternalBufferProperties( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo*>( pExternalBufferInfo ), reinterpret_cast<VkExternalBufferProperties*>( pExternalBufferProperties ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalBufferProperties PhysicalDevice::getExternalBufferProperties( const PhysicalDeviceExternalBufferInfo & externalBufferInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_NAMESPACE::ExternalBufferProperties externalBufferProperties;
+    d.vkGetPhysicalDeviceExternalBufferProperties( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo*>( &externalBufferInfo ), reinterpret_cast<VkExternalBufferProperties*>( &externalBufferProperties ) );
+    return externalBufferProperties;
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void PhysicalDevice::getExternalBufferPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo* pExternalBufferInfo, VULKAN_HPP_NAMESPACE::ExternalBufferProperties* pExternalBufferProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkGetPhysicalDeviceExternalBufferPropertiesKHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo*>( pExternalBufferInfo ), reinterpret_cast<VkExternalBufferProperties*>( pExternalBufferProperties ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalBufferProperties PhysicalDevice::getExternalBufferPropertiesKHR( const PhysicalDeviceExternalBufferInfo & externalBufferInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_NAMESPACE::ExternalBufferProperties externalBufferProperties;
+    d.vkGetPhysicalDeviceExternalBufferPropertiesKHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo*>( &externalBufferInfo ), reinterpret_cast<VkExternalBufferProperties*>( &externalBufferProperties ) );
+    return externalBufferProperties;
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void PhysicalDevice::getExternalFenceProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo* pExternalFenceInfo, VULKAN_HPP_NAMESPACE::ExternalFenceProperties* pExternalFenceProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkGetPhysicalDeviceExternalFenceProperties( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo*>( pExternalFenceInfo ), reinterpret_cast<VkExternalFenceProperties*>( pExternalFenceProperties ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalFenceProperties PhysicalDevice::getExternalFenceProperties( const PhysicalDeviceExternalFenceInfo & externalFenceInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_NAMESPACE::ExternalFenceProperties externalFenceProperties;
+    d.vkGetPhysicalDeviceExternalFenceProperties( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo*>( &externalFenceInfo ), reinterpret_cast<VkExternalFenceProperties*>( &externalFenceProperties ) );
+    return externalFenceProperties;
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void PhysicalDevice::getExternalFencePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo* pExternalFenceInfo, VULKAN_HPP_NAMESPACE::ExternalFenceProperties* pExternalFenceProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkGetPhysicalDeviceExternalFencePropertiesKHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo*>( pExternalFenceInfo ), reinterpret_cast<VkExternalFenceProperties*>( pExternalFenceProperties ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalFenceProperties PhysicalDevice::getExternalFencePropertiesKHR( const PhysicalDeviceExternalFenceInfo & externalFenceInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_NAMESPACE::ExternalFenceProperties externalFenceProperties;
+    d.vkGetPhysicalDeviceExternalFencePropertiesKHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo*>( &externalFenceInfo ), reinterpret_cast<VkExternalFenceProperties*>( &externalFenceProperties ) );
+    return externalFenceProperties;
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result PhysicalDevice::getExternalImageFormatPropertiesNV( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::ImageTiling tiling, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType, VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV* pExternalImageFormatProperties, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkGetPhysicalDeviceExternalImageFormatPropertiesNV( m_physicalDevice, static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkImageTiling>( tiling ), static_cast<VkImageUsageFlags>( usage ), static_cast<VkImageCreateFlags>( flags ), static_cast<VkExternalMemoryHandleTypeFlagsNV>( externalHandleType ), reinterpret_cast<VkExternalImageFormatPropertiesNV*>( pExternalImageFormatProperties ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV>::type PhysicalDevice::getExternalImageFormatPropertiesNV( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::ImageTiling tiling, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType, Dispatch const &d ) const
+  {
+    VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV externalImageFormatProperties;
+    Result result = static_cast<Result>( d.vkGetPhysicalDeviceExternalImageFormatPropertiesNV( m_physicalDevice, static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkImageTiling>( tiling ), static_cast<VkImageUsageFlags>( usage ), static_cast<VkImageCreateFlags>( flags ), static_cast<VkExternalMemoryHandleTypeFlagsNV>( externalHandleType ), reinterpret_cast<VkExternalImageFormatPropertiesNV*>( &externalImageFormatProperties ) ) );
+    return createResultValue( result, externalImageFormatProperties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getExternalImageFormatPropertiesNV" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void PhysicalDevice::getExternalSemaphoreProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties* pExternalSemaphoreProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkGetPhysicalDeviceExternalSemaphoreProperties( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo*>( pExternalSemaphoreInfo ), reinterpret_cast<VkExternalSemaphoreProperties*>( pExternalSemaphoreProperties ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties PhysicalDevice::getExternalSemaphoreProperties( const PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties externalSemaphoreProperties;
+    d.vkGetPhysicalDeviceExternalSemaphoreProperties( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo*>( &externalSemaphoreInfo ), reinterpret_cast<VkExternalSemaphoreProperties*>( &externalSemaphoreProperties ) );
+    return externalSemaphoreProperties;
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void PhysicalDevice::getExternalSemaphorePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties* pExternalSemaphoreProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo*>( pExternalSemaphoreInfo ), reinterpret_cast<VkExternalSemaphoreProperties*>( pExternalSemaphoreProperties ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties PhysicalDevice::getExternalSemaphorePropertiesKHR( const PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties externalSemaphoreProperties;
+    d.vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo*>( &externalSemaphoreInfo ), reinterpret_cast<VkExternalSemaphoreProperties*>( &externalSemaphoreProperties ) );
+    return externalSemaphoreProperties;
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void PhysicalDevice::getFeatures( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures* pFeatures, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkGetPhysicalDeviceFeatures( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures*>( pFeatures ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures PhysicalDevice::getFeatures(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures features;
+    d.vkGetPhysicalDeviceFeatures( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures*>( &features ) );
+    return features;
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void PhysicalDevice::getFeatures2( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2* pFeatures, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2*>( pFeatures ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 PhysicalDevice::getFeatures2(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 features;
+    d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2*>( &features ) );
+    return features;
+  }
+  template<typename X, typename Y, typename ...Z, typename Dispatch>
+  VULKAN_HPP_INLINE StructureChain<X, Y, Z...> PhysicalDevice::getFeatures2(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    StructureChain<X, Y, Z...> structureChain;
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2& features = structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2>();
+    d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2*>( &features ) );
+    return structureChain;
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void PhysicalDevice::getFeatures2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2* pFeatures, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkGetPhysicalDeviceFeatures2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2*>( pFeatures ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 PhysicalDevice::getFeatures2KHR(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 features;
+    d.vkGetPhysicalDeviceFeatures2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2*>( &features ) );
+    return features;
+  }
+  template<typename X, typename Y, typename ...Z, typename Dispatch>
+  VULKAN_HPP_INLINE StructureChain<X, Y, Z...> PhysicalDevice::getFeatures2KHR(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    StructureChain<X, Y, Z...> structureChain;
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2& features = structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2>();
+    d.vkGetPhysicalDeviceFeatures2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2*>( &features ) );
+    return structureChain;
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void PhysicalDevice::getFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::FormatProperties* pFormatProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkGetPhysicalDeviceFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties*>( pFormatProperties ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::FormatProperties PhysicalDevice::getFormatProperties( VULKAN_HPP_NAMESPACE::Format format, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_NAMESPACE::FormatProperties formatProperties;
+    d.vkGetPhysicalDeviceFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties*>( &formatProperties ) );
+    return formatProperties;
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void PhysicalDevice::getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::FormatProperties2* pFormatProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkGetPhysicalDeviceFormatProperties2( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2*>( pFormatProperties ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::FormatProperties2 PhysicalDevice::getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_NAMESPACE::FormatProperties2 formatProperties;
+    d.vkGetPhysicalDeviceFormatProperties2( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2*>( &formatProperties ) );
+    return formatProperties;
+  }
+  template<typename X, typename Y, typename ...Z, typename Dispatch>
+  VULKAN_HPP_INLINE StructureChain<X, Y, Z...> PhysicalDevice::getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    StructureChain<X, Y, Z...> structureChain;
+    VULKAN_HPP_NAMESPACE::FormatProperties2& formatProperties = structureChain.template get<VULKAN_HPP_NAMESPACE::FormatProperties2>();
+    d.vkGetPhysicalDeviceFormatProperties2( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2*>( &formatProperties ) );
+    return structureChain;
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void PhysicalDevice::getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::FormatProperties2* pFormatProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkGetPhysicalDeviceFormatProperties2KHR( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2*>( pFormatProperties ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::FormatProperties2 PhysicalDevice::getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_NAMESPACE::FormatProperties2 formatProperties;
+    d.vkGetPhysicalDeviceFormatProperties2KHR( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2*>( &formatProperties ) );
+    return formatProperties;
+  }
+  template<typename X, typename Y, typename ...Z, typename Dispatch>
+  VULKAN_HPP_INLINE StructureChain<X, Y, Z...> PhysicalDevice::getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    StructureChain<X, Y, Z...> structureChain;
+    VULKAN_HPP_NAMESPACE::FormatProperties2& formatProperties = structureChain.template get<VULKAN_HPP_NAMESPACE::FormatProperties2>();
+    d.vkGetPhysicalDeviceFormatProperties2KHR( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2*>( &formatProperties ) );
+    return structureChain;
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void PhysicalDevice::getGeneratedCommandsPropertiesNVX( VULKAN_HPP_NAMESPACE::DeviceGeneratedCommandsFeaturesNVX* pFeatures, VULKAN_HPP_NAMESPACE::DeviceGeneratedCommandsLimitsNVX* pLimits, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX( m_physicalDevice, reinterpret_cast<VkDeviceGeneratedCommandsFeaturesNVX*>( pFeatures ), reinterpret_cast<VkDeviceGeneratedCommandsLimitsNVX*>( pLimits ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceGeneratedCommandsLimitsNVX PhysicalDevice::getGeneratedCommandsPropertiesNVX( DeviceGeneratedCommandsFeaturesNVX & features, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_NAMESPACE::DeviceGeneratedCommandsLimitsNVX limits;
+    d.vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX( m_physicalDevice, reinterpret_cast<VkDeviceGeneratedCommandsFeaturesNVX*>( &features ), reinterpret_cast<VkDeviceGeneratedCommandsLimitsNVX*>( &limits ) );
+    return limits;
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result PhysicalDevice::getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::ImageTiling tiling, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, VULKAN_HPP_NAMESPACE::ImageFormatProperties* pImageFormatProperties, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkImageTiling>( tiling ), static_cast<VkImageUsageFlags>( usage ), static_cast<VkImageCreateFlags>( flags ), reinterpret_cast<VkImageFormatProperties*>( pImageFormatProperties ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<VULKAN_HPP_NAMESPACE::ImageFormatProperties>::type PhysicalDevice::getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::ImageTiling tiling, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, Dispatch const &d ) const
+  {
+    VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties;
+    Result result = static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkImageTiling>( tiling ), static_cast<VkImageUsageFlags>( usage ), static_cast<VkImageCreateFlags>( flags ), reinterpret_cast<VkImageFormatProperties*>( &imageFormatProperties ) ) );
+    return createResultValue( result, imageFormatProperties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getImageFormatProperties" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result PhysicalDevice::getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2* pImageFormatInfo, VULKAN_HPP_NAMESPACE::ImageFormatProperties2* pImageFormatProperties, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties2( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2*>( pImageFormatInfo ), reinterpret_cast<VkImageFormatProperties2*>( pImageFormatProperties ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>::type PhysicalDevice::getImageFormatProperties2( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d ) const
+  {
+    VULKAN_HPP_NAMESPACE::ImageFormatProperties2 imageFormatProperties;
+    Result result = static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties2( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2*>( &imageFormatInfo ), reinterpret_cast<VkImageFormatProperties2*>( &imageFormatProperties ) ) );
+    return createResultValue( result, imageFormatProperties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getImageFormatProperties2" );
+  }
+  template<typename X, typename Y, typename ...Z, typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<StructureChain<X, Y, Z...>>::type PhysicalDevice::getImageFormatProperties2( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d ) const
+  {
+    StructureChain<X, Y, Z...> structureChain;
+    VULKAN_HPP_NAMESPACE::ImageFormatProperties2& imageFormatProperties = structureChain.template get<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>();
+    Result result = static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties2( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2*>( &imageFormatInfo ), reinterpret_cast<VkImageFormatProperties2*>( &imageFormatProperties ) ) );
+    return createResultValue( result, structureChain, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getImageFormatProperties2" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result PhysicalDevice::getImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2* pImageFormatInfo, VULKAN_HPP_NAMESPACE::ImageFormatProperties2* pImageFormatProperties, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2*>( pImageFormatInfo ), reinterpret_cast<VkImageFormatProperties2*>( pImageFormatProperties ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>::type PhysicalDevice::getImageFormatProperties2KHR( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d ) const
+  {
+    VULKAN_HPP_NAMESPACE::ImageFormatProperties2 imageFormatProperties;
+    Result result = static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2*>( &imageFormatInfo ), reinterpret_cast<VkImageFormatProperties2*>( &imageFormatProperties ) ) );
+    return createResultValue( result, imageFormatProperties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getImageFormatProperties2KHR" );
+  }
+  template<typename X, typename Y, typename ...Z, typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<StructureChain<X, Y, Z...>>::type PhysicalDevice::getImageFormatProperties2KHR( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d ) const
+  {
+    StructureChain<X, Y, Z...> structureChain;
+    VULKAN_HPP_NAMESPACE::ImageFormatProperties2& imageFormatProperties = structureChain.template get<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>();
+    Result result = static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2*>( &imageFormatInfo ), reinterpret_cast<VkImageFormatProperties2*>( &imageFormatProperties ) ) );
+    return createResultValue( result, structureChain, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getImageFormatProperties2KHR" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void PhysicalDevice::getMemoryProperties( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties* pMemoryProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkGetPhysicalDeviceMemoryProperties( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties*>( pMemoryProperties ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties PhysicalDevice::getMemoryProperties(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties memoryProperties;
+    d.vkGetPhysicalDeviceMemoryProperties( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties*>( &memoryProperties ) );
+    return memoryProperties;
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void PhysicalDevice::getMemoryProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2* pMemoryProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkGetPhysicalDeviceMemoryProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2*>( pMemoryProperties ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 PhysicalDevice::getMemoryProperties2(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 memoryProperties;
+    d.vkGetPhysicalDeviceMemoryProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2*>( &memoryProperties ) );
+    return memoryProperties;
+  }
+  template<typename X, typename Y, typename ...Z, typename Dispatch>
+  VULKAN_HPP_INLINE StructureChain<X, Y, Z...> PhysicalDevice::getMemoryProperties2(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    StructureChain<X, Y, Z...> structureChain;
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2& memoryProperties = structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2>();
+    d.vkGetPhysicalDeviceMemoryProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2*>( &memoryProperties ) );
+    return structureChain;
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void PhysicalDevice::getMemoryProperties2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2* pMemoryProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkGetPhysicalDeviceMemoryProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2*>( pMemoryProperties ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 PhysicalDevice::getMemoryProperties2KHR(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 memoryProperties;
+    d.vkGetPhysicalDeviceMemoryProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2*>( &memoryProperties ) );
+    return memoryProperties;
+  }
+  template<typename X, typename Y, typename ...Z, typename Dispatch>
+  VULKAN_HPP_INLINE StructureChain<X, Y, Z...> PhysicalDevice::getMemoryProperties2KHR(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    StructureChain<X, Y, Z...> structureChain;
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2& memoryProperties = structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2>();
+    d.vkGetPhysicalDeviceMemoryProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2*>( &memoryProperties ) );
+    return structureChain;
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void PhysicalDevice::getMultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT* pMultisampleProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkGetPhysicalDeviceMultisamplePropertiesEXT( m_physicalDevice, static_cast<VkSampleCountFlagBits>( samples ), reinterpret_cast<VkMultisamplePropertiesEXT*>( pMultisampleProperties ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT PhysicalDevice::getMultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT multisampleProperties;
+    d.vkGetPhysicalDeviceMultisamplePropertiesEXT( m_physicalDevice, static_cast<VkSampleCountFlagBits>( samples ), reinterpret_cast<VkMultisamplePropertiesEXT*>( &multisampleProperties ) );
+    return multisampleProperties;
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, uint32_t* pRectCount, VULKAN_HPP_NAMESPACE::Rect2D* pRects, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), pRectCount, reinterpret_cast<VkRect2D*>( pRects ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Allocator, typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<std::vector<Rect2D,Allocator>>::type PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d ) const
+  {
+    std::vector<Rect2D,Allocator> rects;
+    uint32_t rectCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &rectCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && rectCount )
+      {
+        rects.resize( rectCount );
+        result = static_cast<Result>( d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &rectCount, reinterpret_cast<VkRect2D*>( rects.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( rectCount <= rects.size() );
+      rects.resize( rectCount );
+    }
+    return createResultValue( result, rects, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getPresentRectanglesKHR" );
+  }
+  template<typename Allocator, typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<std::vector<Rect2D,Allocator>>::type PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Allocator const& vectorAllocator, Dispatch const &d ) const
+  {
+    std::vector<Rect2D,Allocator> rects( vectorAllocator );
+    uint32_t rectCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &rectCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && rectCount )
+      {
+        rects.resize( rectCount );
+        result = static_cast<Result>( d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &rectCount, reinterpret_cast<VkRect2D*>( rects.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( rectCount <= rects.size() );
+      rects.resize( rectCount );
+    }
+    return createResultValue( result, rects, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getPresentRectanglesKHR" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void PhysicalDevice::getProperties( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkGetPhysicalDeviceProperties( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties*>( pProperties ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties PhysicalDevice::getProperties(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties properties;
+    d.vkGetPhysicalDeviceProperties( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties*>( &properties ) );
+    return properties;
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void PhysicalDevice::getProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkGetPhysicalDeviceProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2*>( pProperties ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 PhysicalDevice::getProperties2(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 properties;
+    d.vkGetPhysicalDeviceProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2*>( &properties ) );
+    return properties;
+  }
+  template<typename X, typename Y, typename ...Z, typename Dispatch>
+  VULKAN_HPP_INLINE StructureChain<X, Y, Z...> PhysicalDevice::getProperties2(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    StructureChain<X, Y, Z...> structureChain;
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2& properties = structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2>();
+    d.vkGetPhysicalDeviceProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2*>( &properties ) );
+    return structureChain;
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void PhysicalDevice::getProperties2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkGetPhysicalDeviceProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2*>( pProperties ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 PhysicalDevice::getProperties2KHR(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 properties;
+    d.vkGetPhysicalDeviceProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2*>( &properties ) );
+    return properties;
+  }
+  template<typename X, typename Y, typename ...Z, typename Dispatch>
+  VULKAN_HPP_INLINE StructureChain<X, Y, Z...> PhysicalDevice::getProperties2KHR(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    StructureChain<X, Y, Z...> structureChain;
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2& properties = structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2>();
+    d.vkGetPhysicalDeviceProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2*>( &properties ) );
+    return structureChain;
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void PhysicalDevice::getQueueFamilyPerformanceQueryPassesKHR( const VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR* pPerformanceQueryCreateInfo, uint32_t* pNumPasses, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( m_physicalDevice, reinterpret_cast<const VkQueryPoolPerformanceCreateInfoKHR*>( pPerformanceQueryCreateInfo ), pNumPasses );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE uint32_t PhysicalDevice::getQueueFamilyPerformanceQueryPassesKHR( const QueryPoolPerformanceCreateInfoKHR & performanceQueryCreateInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    uint32_t numPasses;
+    d.vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( m_physicalDevice, reinterpret_cast<const VkQueryPoolPerformanceCreateInfoKHR*>( &performanceQueryCreateInfo ), &numPasses );
+    return numPasses;
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void PhysicalDevice::getQueueFamilyProperties( uint32_t* pQueueFamilyPropertyCount, VULKAN_HPP_NAMESPACE::QueueFamilyProperties* pQueueFamilyProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, pQueueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties*>( pQueueFamilyProperties ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Allocator, typename Dispatch>
+  VULKAN_HPP_INLINE std::vector<QueueFamilyProperties,Allocator> PhysicalDevice::getQueueFamilyProperties(Dispatch const &d ) const
+  {
+    std::vector<QueueFamilyProperties,Allocator> queueFamilyProperties;
+    uint32_t queueFamilyPropertyCount;
+    d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
+    queueFamilyProperties.resize( queueFamilyPropertyCount );
+    d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties*>( queueFamilyProperties.data() ) );
+    return queueFamilyProperties;
+  }
+  template<typename Allocator, typename Dispatch>
+  VULKAN_HPP_INLINE std::vector<QueueFamilyProperties,Allocator> PhysicalDevice::getQueueFamilyProperties(Allocator const& vectorAllocator, Dispatch const &d ) const
+  {
+    std::vector<QueueFamilyProperties,Allocator> queueFamilyProperties( vectorAllocator );
+    uint32_t queueFamilyPropertyCount;
+    d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
+    queueFamilyProperties.resize( queueFamilyPropertyCount );
+    d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties*>( queueFamilyProperties.data() ) );
+    return queueFamilyProperties;
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void PhysicalDevice::getQueueFamilyProperties2( uint32_t* pQueueFamilyPropertyCount, VULKAN_HPP_NAMESPACE::QueueFamilyProperties2* pQueueFamilyProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, pQueueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2*>( pQueueFamilyProperties ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Allocator, typename Dispatch>
+  VULKAN_HPP_INLINE std::vector<QueueFamilyProperties2,Allocator> PhysicalDevice::getQueueFamilyProperties2(Dispatch const &d ) const
+  {
+    std::vector<QueueFamilyProperties2,Allocator> queueFamilyProperties;
+    uint32_t queueFamilyPropertyCount;
+    d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
+    queueFamilyProperties.resize( queueFamilyPropertyCount );
+    d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2*>( queueFamilyProperties.data() ) );
+    return queueFamilyProperties;
+  }
+  template<typename Allocator, typename Dispatch>
+  VULKAN_HPP_INLINE std::vector<QueueFamilyProperties2,Allocator> PhysicalDevice::getQueueFamilyProperties2(Allocator const& vectorAllocator, Dispatch const &d ) const
+  {
+    std::vector<QueueFamilyProperties2,Allocator> queueFamilyProperties( vectorAllocator );
+    uint32_t queueFamilyPropertyCount;
+    d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
+    queueFamilyProperties.resize( queueFamilyPropertyCount );
+    d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2*>( queueFamilyProperties.data() ) );
+    return queueFamilyProperties;
+  }
+  template<typename StructureChain, typename Allocator, typename Dispatch>
+  VULKAN_HPP_INLINE std::vector<StructureChain,Allocator> PhysicalDevice::getQueueFamilyProperties2(Dispatch const &d ) const
+  {
+    std::vector<StructureChain,Allocator> queueFamilyProperties;
+    uint32_t queueFamilyPropertyCount;
+    d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
+    queueFamilyProperties.resize( queueFamilyPropertyCount );
+    std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> localVector( queueFamilyPropertyCount );
+    for ( uint32_t i = 0; i < queueFamilyPropertyCount ; i++ )
+    {
+      localVector[i].pNext = queueFamilyProperties[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>().pNext;
+    }
+    d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2*>( localVector.data() ) );
+    for ( uint32_t i = 0; i < queueFamilyPropertyCount ; i++ )
+    {
+      queueFamilyProperties[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>() = localVector[i];
+    }
+    return queueFamilyProperties;
+  }
+  template<typename StructureChain, typename Allocator, typename Dispatch>
+  VULKAN_HPP_INLINE std::vector<StructureChain,Allocator> PhysicalDevice::getQueueFamilyProperties2(Allocator const& vectorAllocator, Dispatch const &d ) const
+  {
+    std::vector<StructureChain,Allocator> queueFamilyProperties( vectorAllocator );
+    uint32_t queueFamilyPropertyCount;
+    d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
+    queueFamilyProperties.resize( queueFamilyPropertyCount );
+    std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> localVector( queueFamilyPropertyCount );
+    for ( uint32_t i = 0; i < queueFamilyPropertyCount ; i++ )
+    {
+      localVector[i].pNext = queueFamilyProperties[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>().pNext;
+    }
+    d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2*>( localVector.data() ) );
+    for ( uint32_t i = 0; i < queueFamilyPropertyCount ; i++ )
+    {
+      queueFamilyProperties[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>() = localVector[i];
+    }
+    return queueFamilyProperties;
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void PhysicalDevice::getQueueFamilyProperties2KHR( uint32_t* pQueueFamilyPropertyCount, VULKAN_HPP_NAMESPACE::QueueFamilyProperties2* pQueueFamilyProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, pQueueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2*>( pQueueFamilyProperties ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Allocator, typename Dispatch>
+  VULKAN_HPP_INLINE std::vector<QueueFamilyProperties2,Allocator> PhysicalDevice::getQueueFamilyProperties2KHR(Dispatch const &d ) const
+  {
+    std::vector<QueueFamilyProperties2,Allocator> queueFamilyProperties;
+    uint32_t queueFamilyPropertyCount;
+    d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
+    queueFamilyProperties.resize( queueFamilyPropertyCount );
+    d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2*>( queueFamilyProperties.data() ) );
+    return queueFamilyProperties;
+  }
+  template<typename Allocator, typename Dispatch>
+  VULKAN_HPP_INLINE std::vector<QueueFamilyProperties2,Allocator> PhysicalDevice::getQueueFamilyProperties2KHR(Allocator const& vectorAllocator, Dispatch const &d ) const
+  {
+    std::vector<QueueFamilyProperties2,Allocator> queueFamilyProperties( vectorAllocator );
+    uint32_t queueFamilyPropertyCount;
+    d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
+    queueFamilyProperties.resize( queueFamilyPropertyCount );
+    d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2*>( queueFamilyProperties.data() ) );
+    return queueFamilyProperties;
+  }
+  template<typename StructureChain, typename Allocator, typename Dispatch>
+  VULKAN_HPP_INLINE std::vector<StructureChain,Allocator> PhysicalDevice::getQueueFamilyProperties2KHR(Dispatch const &d ) const
+  {
+    std::vector<StructureChain,Allocator> queueFamilyProperties;
+    uint32_t queueFamilyPropertyCount;
+    d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
+    queueFamilyProperties.resize( queueFamilyPropertyCount );
+    std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> localVector( queueFamilyPropertyCount );
+    for ( uint32_t i = 0; i < queueFamilyPropertyCount ; i++ )
+    {
+      localVector[i].pNext = queueFamilyProperties[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>().pNext;
+    }
+    d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2*>( localVector.data() ) );
+    for ( uint32_t i = 0; i < queueFamilyPropertyCount ; i++ )
+    {
+      queueFamilyProperties[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>() = localVector[i];
+    }
+    return queueFamilyProperties;
+  }
+  template<typename StructureChain, typename Allocator, typename Dispatch>
+  VULKAN_HPP_INLINE std::vector<StructureChain,Allocator> PhysicalDevice::getQueueFamilyProperties2KHR(Allocator const& vectorAllocator, Dispatch const &d ) const
+  {
+    std::vector<StructureChain,Allocator> queueFamilyProperties( vectorAllocator );
+    uint32_t queueFamilyPropertyCount;
+    d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
+    queueFamilyProperties.resize( queueFamilyPropertyCount );
+    std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> localVector( queueFamilyPropertyCount );
+    for ( uint32_t i = 0; i < queueFamilyPropertyCount ; i++ )
+    {
+      localVector[i].pNext = queueFamilyProperties[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>().pNext;
+    }
+    d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2*>( localVector.data() ) );
+    for ( uint32_t i = 0; i < queueFamilyPropertyCount ; i++ )
+    {
+      queueFamilyProperties[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>() = localVector[i];
+    }
+    return queueFamilyProperties;
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void PhysicalDevice::getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageTiling tiling, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::SparseImageFormatProperties* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkSampleCountFlagBits>( samples ), static_cast<VkImageUsageFlags>( usage ), static_cast<VkImageTiling>( tiling ), pPropertyCount, reinterpret_cast<VkSparseImageFormatProperties*>( pProperties ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Allocator, typename Dispatch>
+  VULKAN_HPP_INLINE std::vector<SparseImageFormatProperties,Allocator> PhysicalDevice::getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageTiling tiling, Dispatch const &d ) const
+  {
+    std::vector<SparseImageFormatProperties,Allocator> properties;
+    uint32_t propertyCount;
+    d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkSampleCountFlagBits>( samples ), static_cast<VkImageUsageFlags>( usage ), static_cast<VkImageTiling>( tiling ), &propertyCount, nullptr );
+    properties.resize( propertyCount );
+    d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkSampleCountFlagBits>( samples ), static_cast<VkImageUsageFlags>( usage ), static_cast<VkImageTiling>( tiling ), &propertyCount, reinterpret_cast<VkSparseImageFormatProperties*>( properties.data() ) );
+    return properties;
+  }
+  template<typename Allocator, typename Dispatch>
+  VULKAN_HPP_INLINE std::vector<SparseImageFormatProperties,Allocator> PhysicalDevice::getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageTiling tiling, Allocator const& vectorAllocator, Dispatch const &d ) const
+  {
+    std::vector<SparseImageFormatProperties,Allocator> properties( vectorAllocator );
+    uint32_t propertyCount;
+    d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkSampleCountFlagBits>( samples ), static_cast<VkImageUsageFlags>( usage ), static_cast<VkImageTiling>( tiling ), &propertyCount, nullptr );
+    properties.resize( propertyCount );
+    d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkSampleCountFlagBits>( samples ), static_cast<VkImageUsageFlags>( usage ), static_cast<VkImageTiling>( tiling ), &propertyCount, reinterpret_cast<VkSparseImageFormatProperties*>( properties.data() ) );
+    return properties;
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void PhysicalDevice::getSparseImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2*>( pFormatInfo ), pPropertyCount, reinterpret_cast<VkSparseImageFormatProperties2*>( pProperties ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Allocator, typename Dispatch>
+  VULKAN_HPP_INLINE std::vector<SparseImageFormatProperties2,Allocator> PhysicalDevice::getSparseImageFormatProperties2( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Dispatch const &d ) const
+  {
+    std::vector<SparseImageFormatProperties2,Allocator> properties;
+    uint32_t propertyCount;
+    d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2*>( &formatInfo ), &propertyCount, nullptr );
+    properties.resize( propertyCount );
+    d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2*>( &formatInfo ), &propertyCount, reinterpret_cast<VkSparseImageFormatProperties2*>( properties.data() ) );
+    return properties;
+  }
+  template<typename Allocator, typename Dispatch>
+  VULKAN_HPP_INLINE std::vector<SparseImageFormatProperties2,Allocator> PhysicalDevice::getSparseImageFormatProperties2( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Allocator const& vectorAllocator, Dispatch const &d ) const
+  {
+    std::vector<SparseImageFormatProperties2,Allocator> properties( vectorAllocator );
+    uint32_t propertyCount;
+    d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2*>( &formatInfo ), &propertyCount, nullptr );
+    properties.resize( propertyCount );
+    d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2*>( &formatInfo ), &propertyCount, reinterpret_cast<VkSparseImageFormatProperties2*>( properties.data() ) );
+    return properties;
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void PhysicalDevice::getSparseImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2*>( pFormatInfo ), pPropertyCount, reinterpret_cast<VkSparseImageFormatProperties2*>( pProperties ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Allocator, typename Dispatch>
+  VULKAN_HPP_INLINE std::vector<SparseImageFormatProperties2,Allocator> PhysicalDevice::getSparseImageFormatProperties2KHR( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Dispatch const &d ) const
+  {
+    std::vector<SparseImageFormatProperties2,Allocator> properties;
+    uint32_t propertyCount;
+    d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2*>( &formatInfo ), &propertyCount, nullptr );
+    properties.resize( propertyCount );
+    d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2*>( &formatInfo ), &propertyCount, reinterpret_cast<VkSparseImageFormatProperties2*>( properties.data() ) );
+    return properties;
+  }
+  template<typename Allocator, typename Dispatch>
+  VULKAN_HPP_INLINE std::vector<SparseImageFormatProperties2,Allocator> PhysicalDevice::getSparseImageFormatProperties2KHR( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Allocator const& vectorAllocator, Dispatch const &d ) const
+  {
+    std::vector<SparseImageFormatProperties2,Allocator> properties( vectorAllocator );
+    uint32_t propertyCount;
+    d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2*>( &formatInfo ), &propertyCount, nullptr );
+    properties.resize( propertyCount );
+    d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2*>( &formatInfo ), &propertyCount, reinterpret_cast<VkSparseImageFormatProperties2*>( properties.data() ) );
+    return properties;
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV( uint32_t* pCombinationCount, VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV* pCombinations, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( m_physicalDevice, pCombinationCount, reinterpret_cast<VkFramebufferMixedSamplesCombinationNV*>( pCombinations ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Allocator, typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<std::vector<FramebufferMixedSamplesCombinationNV,Allocator>>::type PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV(Dispatch const &d ) const
+  {
+    std::vector<FramebufferMixedSamplesCombinationNV,Allocator> combinations;
+    uint32_t combinationCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( m_physicalDevice, &combinationCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && combinationCount )
+      {
+        combinations.resize( combinationCount );
+        result = static_cast<Result>( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( m_physicalDevice, &combinationCount, reinterpret_cast<VkFramebufferMixedSamplesCombinationNV*>( combinations.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( combinationCount <= combinations.size() );
+      combinations.resize( combinationCount );
+    }
+    return createResultValue( result, combinations, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV" );
+  }
+  template<typename Allocator, typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<std::vector<FramebufferMixedSamplesCombinationNV,Allocator>>::type PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV(Allocator const& vectorAllocator, Dispatch const &d ) const
+  {
+    std::vector<FramebufferMixedSamplesCombinationNV,Allocator> combinations( vectorAllocator );
+    uint32_t combinationCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( m_physicalDevice, &combinationCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && combinationCount )
+      {
+        combinations.resize( combinationCount );
+        result = static_cast<Result>( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( m_physicalDevice, &combinationCount, reinterpret_cast<VkFramebufferMixedSamplesCombinationNV*>( combinations.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( combinationCount <= combinations.size() );
+      combinations.resize( combinationCount );
+    }
+    return createResultValue( result, combinations, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT* pSurfaceCapabilities, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilities2EXT( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkSurfaceCapabilities2EXT*>( pSurfaceCapabilities ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT>::type PhysicalDevice::getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d ) const
+  {
+    VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT surfaceCapabilities;
+    Result result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilities2EXT( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkSurfaceCapabilities2EXT*>( &surfaceCapabilities ) ) );
+    return createResultValue( result, surfaceCapabilities, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfaceCapabilities2EXT" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR* pSurfaceCapabilities, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR*>( pSurfaceInfo ), reinterpret_cast<VkSurfaceCapabilities2KHR*>( pSurfaceCapabilities ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR>::type PhysicalDevice::getSurfaceCapabilities2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d ) const
+  {
+    VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR surfaceCapabilities;
+    Result result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR*>( &surfaceInfo ), reinterpret_cast<VkSurfaceCapabilities2KHR*>( &surfaceCapabilities ) ) );
+    return createResultValue( result, surfaceCapabilities, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfaceCapabilities2KHR" );
+  }
+  template<typename X, typename Y, typename ...Z, typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<StructureChain<X, Y, Z...>>::type PhysicalDevice::getSurfaceCapabilities2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d ) const
+  {
+    StructureChain<X, Y, Z...> structureChain;
+    VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR& surfaceCapabilities = structureChain.template get<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR>();
+    Result result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR*>( &surfaceInfo ), reinterpret_cast<VkSurfaceCapabilities2KHR*>( &surfaceCapabilities ) ) );
+    return createResultValue( result, structureChain, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfaceCapabilities2KHR" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR* pSurfaceCapabilities, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilitiesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkSurfaceCapabilitiesKHR*>( pSurfaceCapabilities ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR>::type PhysicalDevice::getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d ) const
+  {
+    VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR surfaceCapabilities;
+    Result result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilitiesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkSurfaceCapabilitiesKHR*>( &surfaceCapabilities ) ) );
+    return createResultValue( result, surfaceCapabilities, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfaceCapabilitiesKHR" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pSurfaceFormatCount, VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR* pSurfaceFormats, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR*>( pSurfaceInfo ), pSurfaceFormatCount, reinterpret_cast<VkSurfaceFormat2KHR*>( pSurfaceFormats ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Allocator, typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<std::vector<SurfaceFormat2KHR,Allocator>>::type PhysicalDevice::getSurfaceFormats2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d ) const
+  {
+    std::vector<SurfaceFormat2KHR,Allocator> surfaceFormats;
+    uint32_t surfaceFormatCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR*>( &surfaceInfo ), &surfaceFormatCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && surfaceFormatCount )
+      {
+        surfaceFormats.resize( surfaceFormatCount );
+        result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR*>( &surfaceInfo ), &surfaceFormatCount, reinterpret_cast<VkSurfaceFormat2KHR*>( surfaceFormats.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() );
+      surfaceFormats.resize( surfaceFormatCount );
+    }
+    return createResultValue( result, surfaceFormats, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfaceFormats2KHR" );
+  }
+  template<typename Allocator, typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<std::vector<SurfaceFormat2KHR,Allocator>>::type PhysicalDevice::getSurfaceFormats2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Allocator const& vectorAllocator, Dispatch const &d ) const
+  {
+    std::vector<SurfaceFormat2KHR,Allocator> surfaceFormats( vectorAllocator );
+    uint32_t surfaceFormatCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR*>( &surfaceInfo ), &surfaceFormatCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && surfaceFormatCount )
+      {
+        surfaceFormats.resize( surfaceFormatCount );
+        result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR*>( &surfaceInfo ), &surfaceFormatCount, reinterpret_cast<VkSurfaceFormat2KHR*>( surfaceFormats.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() );
+      surfaceFormats.resize( surfaceFormatCount );
+    }
+    return createResultValue( result, surfaceFormats, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfaceFormats2KHR" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, uint32_t* pSurfaceFormatCount, VULKAN_HPP_NAMESPACE::SurfaceFormatKHR* pSurfaceFormats, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), pSurfaceFormatCount, reinterpret_cast<VkSurfaceFormatKHR*>( pSurfaceFormats ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Allocator, typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<std::vector<SurfaceFormatKHR,Allocator>>::type PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d ) const
+  {
+    std::vector<SurfaceFormatKHR,Allocator> surfaceFormats;
+    uint32_t surfaceFormatCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &surfaceFormatCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && surfaceFormatCount )
+      {
+        surfaceFormats.resize( surfaceFormatCount );
+        result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &surfaceFormatCount, reinterpret_cast<VkSurfaceFormatKHR*>( surfaceFormats.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() );
+      surfaceFormats.resize( surfaceFormatCount );
+    }
+    return createResultValue( result, surfaceFormats, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfaceFormatsKHR" );
+  }
+  template<typename Allocator, typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<std::vector<SurfaceFormatKHR,Allocator>>::type PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Allocator const& vectorAllocator, Dispatch const &d ) const
+  {
+    std::vector<SurfaceFormatKHR,Allocator> surfaceFormats( vectorAllocator );
+    uint32_t surfaceFormatCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &surfaceFormatCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && surfaceFormatCount )
+      {
+        surfaceFormats.resize( surfaceFormatCount );
+        result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &surfaceFormatCount, reinterpret_cast<VkSurfaceFormatKHR*>( surfaceFormats.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() );
+      surfaceFormats.resize( surfaceFormatCount );
+    }
+    return createResultValue( result, surfaceFormats, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfaceFormatsKHR" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result PhysicalDevice::getSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pPresentModeCount, VULKAN_HPP_NAMESPACE::PresentModeKHR* pPresentModes, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModes2EXT( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR*>( pSurfaceInfo ), pPresentModeCount, reinterpret_cast<VkPresentModeKHR*>( pPresentModes ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Allocator, typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<std::vector<PresentModeKHR,Allocator>>::type PhysicalDevice::getSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d ) const
+  {
+    std::vector<PresentModeKHR,Allocator> presentModes;
+    uint32_t presentModeCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModes2EXT( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR*>( &surfaceInfo ), &presentModeCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && presentModeCount )
+      {
+        presentModes.resize( presentModeCount );
+        result = static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModes2EXT( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR*>( &surfaceInfo ), &presentModeCount, reinterpret_cast<VkPresentModeKHR*>( presentModes.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() );
+      presentModes.resize( presentModeCount );
+    }
+    return createResultValue( result, presentModes, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfacePresentModes2EXT" );
+  }
+  template<typename Allocator, typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<std::vector<PresentModeKHR,Allocator>>::type PhysicalDevice::getSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Allocator const& vectorAllocator, Dispatch const &d ) const
+  {
+    std::vector<PresentModeKHR,Allocator> presentModes( vectorAllocator );
+    uint32_t presentModeCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModes2EXT( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR*>( &surfaceInfo ), &presentModeCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && presentModeCount )
+      {
+        presentModes.resize( presentModeCount );
+        result = static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModes2EXT( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR*>( &surfaceInfo ), &presentModeCount, reinterpret_cast<VkPresentModeKHR*>( presentModes.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() );
+      presentModes.resize( presentModeCount );
+    }
+    return createResultValue( result, presentModes, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfacePresentModes2EXT" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, uint32_t* pPresentModeCount, VULKAN_HPP_NAMESPACE::PresentModeKHR* pPresentModes, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), pPresentModeCount, reinterpret_cast<VkPresentModeKHR*>( pPresentModes ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Allocator, typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<std::vector<PresentModeKHR,Allocator>>::type PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d ) const
+  {
+    std::vector<PresentModeKHR,Allocator> presentModes;
+    uint32_t presentModeCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &presentModeCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && presentModeCount )
+      {
+        presentModes.resize( presentModeCount );
+        result = static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &presentModeCount, reinterpret_cast<VkPresentModeKHR*>( presentModes.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() );
+      presentModes.resize( presentModeCount );
+    }
+    return createResultValue( result, presentModes, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfacePresentModesKHR" );
+  }
+  template<typename Allocator, typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<std::vector<PresentModeKHR,Allocator>>::type PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Allocator const& vectorAllocator, Dispatch const &d ) const
+  {
+    std::vector<PresentModeKHR,Allocator> presentModes( vectorAllocator );
+    uint32_t presentModeCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &presentModeCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && presentModeCount )
+      {
+        presentModes.resize( presentModeCount );
+        result = static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &presentModeCount, reinterpret_cast<VkPresentModeKHR*>( presentModes.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() );
+      presentModes.resize( presentModeCount );
+    }
+    return createResultValue( result, presentModes, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfacePresentModesKHR" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceSupportKHR( uint32_t queueFamilyIndex, VULKAN_HPP_NAMESPACE::SurfaceKHR surface, VULKAN_HPP_NAMESPACE::Bool32* pSupported, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceSupportKHR( m_physicalDevice, queueFamilyIndex, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkBool32*>( pSupported ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<VULKAN_HPP_NAMESPACE::Bool32>::type PhysicalDevice::getSurfaceSupportKHR( uint32_t queueFamilyIndex, VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d ) const
+  {
+    VULKAN_HPP_NAMESPACE::Bool32 supported;
+    Result result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceSupportKHR( m_physicalDevice, queueFamilyIndex, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkBool32*>( &supported ) ) );
+    return createResultValue( result, supported, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfaceSupportKHR" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result PhysicalDevice::getToolPropertiesEXT( uint32_t* pToolCount, VULKAN_HPP_NAMESPACE::PhysicalDeviceToolPropertiesEXT* pToolProperties, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, pToolCount, reinterpret_cast<VkPhysicalDeviceToolPropertiesEXT*>( pToolProperties ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Allocator, typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<std::vector<PhysicalDeviceToolPropertiesEXT,Allocator>>::type PhysicalDevice::getToolPropertiesEXT(Dispatch const &d ) const
+  {
+    std::vector<PhysicalDeviceToolPropertiesEXT,Allocator> toolProperties;
+    uint32_t toolCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, &toolCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && toolCount )
+      {
+        toolProperties.resize( toolCount );
+        result = static_cast<Result>( d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, &toolCount, reinterpret_cast<VkPhysicalDeviceToolPropertiesEXT*>( toolProperties.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() );
+      toolProperties.resize( toolCount );
+    }
+    return createResultValue( result, toolProperties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getToolPropertiesEXT" );
+  }
+  template<typename Allocator, typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<std::vector<PhysicalDeviceToolPropertiesEXT,Allocator>>::type PhysicalDevice::getToolPropertiesEXT(Allocator const& vectorAllocator, Dispatch const &d ) const
+  {
+    std::vector<PhysicalDeviceToolPropertiesEXT,Allocator> toolProperties( vectorAllocator );
+    uint32_t toolCount;
+    Result result;
+    do
+    {
+      result = static_cast<Result>( d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, &toolCount, nullptr ) );
+      if ( ( result == Result::eSuccess ) && toolCount )
+      {
+        toolProperties.resize( toolCount );
+        result = static_cast<Result>( d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, &toolCount, reinterpret_cast<VkPhysicalDeviceToolPropertiesEXT*>( toolProperties.data() ) ) );
+      }
+    } while ( result == Result::eIncomplete );
+    if ( result == Result::eSuccess )
+    {
+      VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() );
+      toolProperties.resize( toolCount );
+    }
+    return createResultValue( result, toolProperties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getToolPropertiesEXT" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Bool32 PhysicalDevice::getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, struct wl_display* display, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Bool32>( d.vkGetPhysicalDeviceWaylandPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, display ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Bool32 PhysicalDevice::getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, struct wl_display & display, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    return d.vkGetPhysicalDeviceWaylandPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &display );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Bool32 PhysicalDevice::getWin32PresentationSupportKHR( uint32_t queueFamilyIndex, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Bool32>( d.vkGetPhysicalDeviceWin32PresentationSupportKHR( m_physicalDevice, queueFamilyIndex ) );
+  }
+#else
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Bool32 PhysicalDevice::getWin32PresentationSupportKHR( uint32_t queueFamilyIndex, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    return d.vkGetPhysicalDeviceWin32PresentationSupportKHR( m_physicalDevice, queueFamilyIndex );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#ifdef VK_USE_PLATFORM_XCB_KHR
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Bool32 PhysicalDevice::getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, xcb_connection_t* connection, xcb_visualid_t visual_id, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Bool32>( d.vkGetPhysicalDeviceXcbPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, connection, visual_id ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Bool32 PhysicalDevice::getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, xcb_connection_t & connection, xcb_visualid_t visual_id, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    return d.vkGetPhysicalDeviceXcbPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &connection, visual_id );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_XCB_KHR*/
+
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Bool32 PhysicalDevice::getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display* dpy, VisualID visualID, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    return static_cast<Bool32>( d.vkGetPhysicalDeviceXlibPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, dpy, visualID ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Bool32 PhysicalDevice::getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display & dpy, VisualID visualID, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    return d.vkGetPhysicalDeviceXlibPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &dpy, visualID );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_XLIB_KHR*/
+
+#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result PhysicalDevice::getRandROutputDisplayEXT( Display* dpy, RROutput rrOutput, VULKAN_HPP_NAMESPACE::DisplayKHR* pDisplay, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkGetRandROutputDisplayEXT( m_physicalDevice, dpy, rrOutput, reinterpret_cast<VkDisplayKHR*>( pDisplay ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<VULKAN_HPP_NAMESPACE::DisplayKHR>::type PhysicalDevice::getRandROutputDisplayEXT( Display & dpy, RROutput rrOutput, Dispatch const &d ) const
+  {
+    VULKAN_HPP_NAMESPACE::DisplayKHR display;
+    Result result = static_cast<Result>( d.vkGetRandROutputDisplayEXT( m_physicalDevice, &dpy, rrOutput, reinterpret_cast<VkDisplayKHR*>( &display ) ) );
+    return createResultValue( result, display, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getRandROutputDisplayEXT" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result PhysicalDevice::releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkReleaseDisplayEXT( m_physicalDevice, static_cast<VkDisplayKHR>( display ) ) );
+  }
+#else
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<void>::type PhysicalDevice::releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const &d ) const
+  {
+    Result result = static_cast<Result>( d.vkReleaseDisplayEXT( m_physicalDevice, static_cast<VkDisplayKHR>( display ) ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::releaseDisplayEXT" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Queue::getCheckpointDataNV( uint32_t* pCheckpointDataCount, VULKAN_HPP_NAMESPACE::CheckpointDataNV* pCheckpointData, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkGetQueueCheckpointDataNV( m_queue, pCheckpointDataCount, reinterpret_cast<VkCheckpointDataNV*>( pCheckpointData ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Allocator, typename Dispatch>
+  VULKAN_HPP_INLINE std::vector<CheckpointDataNV,Allocator> Queue::getCheckpointDataNV(Dispatch const &d ) const
+  {
+    std::vector<CheckpointDataNV,Allocator> checkpointData;
+    uint32_t checkpointDataCount;
+    d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount, nullptr );
+    checkpointData.resize( checkpointDataCount );
+    d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount, reinterpret_cast<VkCheckpointDataNV*>( checkpointData.data() ) );
+    return checkpointData;
+  }
+  template<typename Allocator, typename Dispatch>
+  VULKAN_HPP_INLINE std::vector<CheckpointDataNV,Allocator> Queue::getCheckpointDataNV(Allocator const& vectorAllocator, Dispatch const &d ) const
+  {
+    std::vector<CheckpointDataNV,Allocator> checkpointData( vectorAllocator );
+    uint32_t checkpointDataCount;
+    d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount, nullptr );
+    checkpointData.resize( checkpointDataCount );
+    d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount, reinterpret_cast<VkCheckpointDataNV*>( checkpointData.data() ) );
+    return checkpointData;
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Queue::beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pLabelInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkQueueBeginDebugUtilsLabelEXT( m_queue, reinterpret_cast<const VkDebugUtilsLabelEXT*>( pLabelInfo ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Queue::beginDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkQueueBeginDebugUtilsLabelEXT( m_queue, reinterpret_cast<const VkDebugUtilsLabelEXT*>( &labelInfo ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Queue::bindSparse( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindSparseInfo* pBindInfo, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkQueueBindSparse( m_queue, bindInfoCount, reinterpret_cast<const VkBindSparseInfo*>( pBindInfo ), static_cast<VkFence>( fence ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<void>::type Queue::bindSparse( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindSparseInfo> bindInfo, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d ) const
+  {
+    Result result = static_cast<Result>( d.vkQueueBindSparse( m_queue, bindInfo.size() , reinterpret_cast<const VkBindSparseInfo*>( bindInfo.data() ), static_cast<VkFence>( fence ) ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Queue::bindSparse" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Queue::endDebugUtilsLabelEXT(Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkQueueEndDebugUtilsLabelEXT( m_queue );
+  }
+#else
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Queue::endDebugUtilsLabelEXT(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkQueueEndDebugUtilsLabelEXT( m_queue );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Queue::insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pLabelInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkQueueInsertDebugUtilsLabelEXT( m_queue, reinterpret_cast<const VkDebugUtilsLabelEXT*>( pLabelInfo ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE void Queue::insertDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+  {
+    d.vkQueueInsertDebugUtilsLabelEXT( m_queue, reinterpret_cast<const VkDebugUtilsLabelEXT*>( &labelInfo ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Queue::presentKHR( const VULKAN_HPP_NAMESPACE::PresentInfoKHR* pPresentInfo, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkQueuePresentKHR( m_queue, reinterpret_cast<const VkPresentInfoKHR*>( pPresentInfo ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Queue::presentKHR( const PresentInfoKHR & presentInfo, Dispatch const &d ) const
+  {
+    Result result = static_cast<Result>( d.vkQueuePresentKHR( m_queue, reinterpret_cast<const VkPresentInfoKHR*>( &presentInfo ) ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Queue::presentKHR", { Result::eSuccess, Result::eSuboptimalKHR } );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Queue::setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkQueueSetPerformanceConfigurationINTEL( m_queue, static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) );
+  }
+#else
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<void>::type Queue::setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const &d ) const
+  {
+    Result result = static_cast<Result>( d.vkQueueSetPerformanceConfigurationINTEL( m_queue, static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Queue::setPerformanceConfigurationINTEL" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Queue::submit( uint32_t submitCount, const VULKAN_HPP_NAMESPACE::SubmitInfo* pSubmits, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkQueueSubmit( m_queue, submitCount, reinterpret_cast<const VkSubmitInfo*>( pSubmits ), static_cast<VkFence>( fence ) ) );
+  }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<void>::type Queue::submit( ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo> submits, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d ) const
+  {
+    Result result = static_cast<Result>( d.vkQueueSubmit( m_queue, submits.size() , reinterpret_cast<const VkSubmitInfo*>( submits.data() ), static_cast<VkFence>( fence ) ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Queue::submit" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE Result Queue::waitIdle(Dispatch const &d) const
+  {
+    return static_cast<Result>( d.vkQueueWaitIdle( m_queue ) );
+  }
+#else
+  template<typename Dispatch>
+  VULKAN_HPP_INLINE ResultValueType<void>::type Queue::waitIdle(Dispatch const &d ) const
+  {
+    Result result = static_cast<Result>( d.vkQueueWaitIdle( m_queue ) );
+    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Queue::waitIdle" );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+  template <> struct isStructureChainValid<AndroidHardwareBufferPropertiesANDROID, AndroidHardwareBufferFormatPropertiesANDROID>{ enum { value = true }; };
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+  template <> struct isStructureChainValid<ImageFormatProperties2, AndroidHardwareBufferUsageANDROID>{ enum { value = true }; };
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+  template <> struct isStructureChainValid<AttachmentDescription2KHR, AttachmentDescriptionStencilLayoutKHR>{ enum { value = true }; };
+  template <> struct isStructureChainValid<AttachmentReference2KHR, AttachmentReferenceStencilLayoutKHR>{ enum { value = true }; };
+  template <> struct isStructureChainValid<BindBufferMemoryInfo, BindBufferMemoryDeviceGroupInfo>{ enum { value = true }; };
+  template <> struct isStructureChainValid<BindImageMemoryInfo, BindImageMemoryDeviceGroupInfo>{ enum { value = true }; };
+  template <> struct isStructureChainValid<BindImageMemoryInfo, BindImageMemorySwapchainInfoKHR>{ enum { value = true }; };
+  template <> struct isStructureChainValid<BindImageMemoryInfo, BindImagePlaneMemoryInfo>{ enum { value = true }; };
+  template <> struct isStructureChainValid<BufferCreateInfo, BufferDeviceAddressCreateInfoEXT>{ enum { value = true }; };
+  template <> struct isStructureChainValid<BufferCreateInfo, BufferOpaqueCaptureAddressCreateInfoKHR>{ enum { value = true }; };
+  template <> struct isStructureChainValid<CommandBufferInheritanceInfo, CommandBufferInheritanceConditionalRenderingInfoEXT>{ enum { value = true }; };
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+  template <> struct isStructureChainValid<SubmitInfo, D3D12FenceSubmitInfoKHR>{ enum { value = true }; };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+  template <> struct isStructureChainValid<InstanceCreateInfo, DebugReportCallbackCreateInfoEXT>{ enum { value = true }; };
+  template <> struct isStructureChainValid<InstanceCreateInfo, DebugUtilsMessengerCreateInfoEXT>{ enum { value = true }; };
+  template <> struct isStructureChainValid<BufferCreateInfo, DedicatedAllocationBufferCreateInfoNV>{ enum { value = true }; };
+  template <> struct isStructureChainValid<ImageCreateInfo, DedicatedAllocationImageCreateInfoNV>{ enum { value = true }; };
+  template <> struct isStructureChainValid<MemoryAllocateInfo, DedicatedAllocationMemoryAllocateInfoNV>{ enum { value = true }; };
+  template <> struct isStructureChainValid<DescriptorPoolCreateInfo, DescriptorPoolInlineUniformBlockCreateInfoEXT>{ enum { value = true }; };
+  template <> struct isStructureChainValid<DescriptorSetLayoutCreateInfo, DescriptorSetLayoutBindingFlagsCreateInfoEXT>{ enum { value = true }; };
+  template <> struct isStructureChainValid<DescriptorSetAllocateInfo, DescriptorSetVariableDescriptorCountAllocateInfoEXT>{ enum { value = true }; };
+  template <> struct isStructureChainValid<DescriptorSetLayoutSupport, DescriptorSetVariableDescriptorCountLayoutSupportEXT>{ enum { value = true }; };
+  template <> struct isStructureChainValid<BindSparseInfo, DeviceGroupBindSparseInfo>{ enum { value = true }; };
+  template <> struct isStructureChainValid<CommandBufferBeginInfo, DeviceGroupCommandBufferBeginInfo>{ enum { value = true }; };
+  template <> struct isStructureChainValid<DeviceCreateInfo, DeviceGroupDeviceCreateInfo>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PresentInfoKHR, DeviceGroupPresentInfoKHR>{ enum { value = true }; };
+  template <> struct isStructureChainValid<RenderPassBeginInfo, DeviceGroupRenderPassBeginInfo>{ enum { value = true }; };
+  template <> struct isStructureChainValid<SubmitInfo, DeviceGroupSubmitInfo>{ enum { value = true }; };
+  template <> struct isStructureChainValid<SwapchainCreateInfoKHR, DeviceGroupSwapchainCreateInfoKHR>{ enum { value = true }; };
+  template <> struct isStructureChainValid<DeviceCreateInfo, DeviceMemoryOverallocationCreateInfoAMD>{ enum { value = true }; };
+  template <> struct isStructureChainValid<DeviceQueueCreateInfo, DeviceQueueGlobalPriorityCreateInfoEXT>{ enum { value = true }; };
+  template <> struct isStructureChainValid<SurfaceCapabilities2KHR, DisplayNativeHdrSurfaceCapabilitiesAMD>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PresentInfoKHR, DisplayPresentInfoKHR>{ enum { value = true }; };
+  template <> struct isStructureChainValid<FormatProperties2, DrmFormatModifierPropertiesListEXT>{ enum { value = true }; };
+  template <> struct isStructureChainValid<FenceCreateInfo, ExportFenceCreateInfo>{ enum { value = true }; };
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+  template <> struct isStructureChainValid<FenceCreateInfo, ExportFenceWin32HandleInfoKHR>{ enum { value = true }; };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+  template <> struct isStructureChainValid<MemoryAllocateInfo, ExportMemoryAllocateInfo>{ enum { value = true }; };
+  template <> struct isStructureChainValid<MemoryAllocateInfo, ExportMemoryAllocateInfoNV>{ enum { value = true }; };
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+  template <> struct isStructureChainValid<MemoryAllocateInfo, ExportMemoryWin32HandleInfoKHR>{ enum { value = true }; };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+  template <> struct isStructureChainValid<MemoryAllocateInfo, ExportMemoryWin32HandleInfoNV>{ enum { value = true }; };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+  template <> struct isStructureChainValid<SemaphoreCreateInfo, ExportSemaphoreCreateInfo>{ enum { value = true }; };
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+  template <> struct isStructureChainValid<SemaphoreCreateInfo, ExportSemaphoreWin32HandleInfoKHR>{ enum { value = true }; };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+  template <> struct isStructureChainValid<ImageCreateInfo, ExternalFormatANDROID>{ enum { value = true }; };
+  template <> struct isStructureChainValid<SamplerYcbcrConversionCreateInfo, ExternalFormatANDROID>{ enum { value = true }; };
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+  template <> struct isStructureChainValid<ImageFormatProperties2, ExternalImageFormatProperties>{ enum { value = true }; };
+  template <> struct isStructureChainValid<BufferCreateInfo, ExternalMemoryBufferCreateInfo>{ enum { value = true }; };
+  template <> struct isStructureChainValid<ImageCreateInfo, ExternalMemoryImageCreateInfo>{ enum { value = true }; };
+  template <> struct isStructureChainValid<ImageCreateInfo, ExternalMemoryImageCreateInfoNV>{ enum { value = true }; };
+  template <> struct isStructureChainValid<ImageFormatProperties2, FilterCubicImageViewImageFormatPropertiesEXT>{ enum { value = true }; };
+  template <> struct isStructureChainValid<FramebufferCreateInfo, FramebufferAttachmentsCreateInfoKHR>{ enum { value = true }; };
+  template <> struct isStructureChainValid<ImageCreateInfo, ImageDrmFormatModifierExplicitCreateInfoEXT>{ enum { value = true }; };
+  template <> struct isStructureChainValid<ImageCreateInfo, ImageDrmFormatModifierListCreateInfoEXT>{ enum { value = true }; };
+  template <> struct isStructureChainValid<ImageCreateInfo, ImageFormatListCreateInfoKHR>{ enum { value = true }; };
+  template <> struct isStructureChainValid<SwapchainCreateInfoKHR, ImageFormatListCreateInfoKHR>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PhysicalDeviceImageFormatInfo2, ImageFormatListCreateInfoKHR>{ enum { value = true }; };
+  template <> struct isStructureChainValid<ImageMemoryRequirementsInfo2, ImagePlaneMemoryRequirementsInfo>{ enum { value = true }; };
+  template <> struct isStructureChainValid<ImageCreateInfo, ImageStencilUsageCreateInfoEXT>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PhysicalDeviceImageFormatInfo2, ImageStencilUsageCreateInfoEXT>{ enum { value = true }; };
+  template <> struct isStructureChainValid<ImageCreateInfo, ImageSwapchainCreateInfoKHR>{ enum { value = true }; };
+  template <> struct isStructureChainValid<ImageViewCreateInfo, ImageViewASTCDecodeModeEXT>{ enum { value = true }; };
+  template <> struct isStructureChainValid<ImageViewCreateInfo, ImageViewUsageCreateInfo>{ enum { value = true }; };
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+  template <> struct isStructureChainValid<MemoryAllocateInfo, ImportAndroidHardwareBufferInfoANDROID>{ enum { value = true }; };
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+  template <> struct isStructureChainValid<MemoryAllocateInfo, ImportMemoryFdInfoKHR>{ enum { value = true }; };
+  template <> struct isStructureChainValid<MemoryAllocateInfo, ImportMemoryHostPointerInfoEXT>{ enum { value = true }; };
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+  template <> struct isStructureChainValid<MemoryAllocateInfo, ImportMemoryWin32HandleInfoKHR>{ enum { value = true }; };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+  template <> struct isStructureChainValid<MemoryAllocateInfo, ImportMemoryWin32HandleInfoNV>{ enum { value = true }; };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+  template <> struct isStructureChainValid<MemoryAllocateInfo, MemoryAllocateFlagsInfo>{ enum { value = true }; };
+  template <> struct isStructureChainValid<MemoryAllocateInfo, MemoryDedicatedAllocateInfo>{ enum { value = true }; };
+  template <> struct isStructureChainValid<MemoryRequirements2, MemoryDedicatedRequirements>{ enum { value = true }; };
+  template <> struct isStructureChainValid<MemoryAllocateInfo, MemoryOpaqueCaptureAddressAllocateInfoKHR>{ enum { value = true }; };
+  template <> struct isStructureChainValid<MemoryAllocateInfo, MemoryPriorityAllocateInfoEXT>{ enum { value = true }; };
+  template <> struct isStructureChainValid<SubmitInfo, PerformanceQuerySubmitInfoKHR>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDevice16BitStorageFeatures>{ enum { value = true }; };
+  template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDevice16BitStorageFeatures>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDevice8BitStorageFeaturesKHR>{ enum { value = true }; };
+  template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDevice8BitStorageFeaturesKHR>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceASTCDecodeFeaturesEXT>{ enum { value = true }; };
+  template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceASTCDecodeFeaturesEXT>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceBlendOperationAdvancedFeaturesEXT>{ enum { value = true }; };
+  template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceBlendOperationAdvancedFeaturesEXT>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceBlendOperationAdvancedPropertiesEXT>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceBufferDeviceAddressFeaturesEXT>{ enum { value = true }; };
+  template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceBufferDeviceAddressFeaturesEXT>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceBufferDeviceAddressFeaturesKHR>{ enum { value = true }; };
+  template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceBufferDeviceAddressFeaturesKHR>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceCoherentMemoryFeaturesAMD>{ enum { value = true }; };
+  template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceCoherentMemoryFeaturesAMD>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceComputeShaderDerivativesFeaturesNV>{ enum { value = true }; };
+  template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceComputeShaderDerivativesFeaturesNV>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceConditionalRenderingFeaturesEXT>{ enum { value = true }; };
+  template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceConditionalRenderingFeaturesEXT>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceConservativeRasterizationPropertiesEXT>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceCooperativeMatrixFeaturesNV>{ enum { value = true }; };
+  template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceCooperativeMatrixFeaturesNV>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceCooperativeMatrixPropertiesNV>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceCornerSampledImageFeaturesNV>{ enum { value = true }; };
+  template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceCornerSampledImageFeaturesNV>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceCoverageReductionModeFeaturesNV>{ enum { value = true }; };
+  template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceCoverageReductionModeFeaturesNV>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV>{ enum { value = true }; };
+  template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceDepthClipEnableFeaturesEXT>{ enum { value = true }; };
+  template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceDepthClipEnableFeaturesEXT>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceDepthStencilResolvePropertiesKHR>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceDescriptorIndexingFeaturesEXT>{ enum { value = true }; };
+  template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceDescriptorIndexingFeaturesEXT>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceDescriptorIndexingPropertiesEXT>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceDiscardRectanglePropertiesEXT>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceDriverPropertiesKHR>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceExclusiveScissorFeaturesNV>{ enum { value = true }; };
+  template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceExclusiveScissorFeaturesNV>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PhysicalDeviceImageFormatInfo2, PhysicalDeviceExternalImageFormatInfo>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceExternalMemoryHostPropertiesEXT>{ enum { value = true }; };
+  template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceFloatControlsPropertiesKHR>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceFragmentDensityMapFeaturesEXT>{ enum { value = true }; };
+  template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceFragmentDensityMapFeaturesEXT>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceFragmentDensityMapPropertiesEXT>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceFragmentShaderBarycentricFeaturesNV>{ enum { value = true }; };
+  template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceFragmentShaderBarycentricFeaturesNV>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceFragmentShaderInterlockFeaturesEXT>{ enum { value = true }; };
+  template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceFragmentShaderInterlockFeaturesEXT>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceHostQueryResetFeaturesEXT>{ enum { value = true }; };
+  template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceHostQueryResetFeaturesEXT>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceIDProperties>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PhysicalDeviceImageFormatInfo2, PhysicalDeviceImageDrmFormatModifierInfoEXT>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PhysicalDeviceImageFormatInfo2, PhysicalDeviceImageViewImageFormatInfoEXT>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceImagelessFramebufferFeaturesKHR>{ enum { value = true }; };
+  template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceImagelessFramebufferFeaturesKHR>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceIndexTypeUint8FeaturesEXT>{ enum { value = true }; };
+  template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceIndexTypeUint8FeaturesEXT>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceInlineUniformBlockFeaturesEXT>{ enum { value = true }; };
+  template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceInlineUniformBlockFeaturesEXT>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceInlineUniformBlockPropertiesEXT>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceLineRasterizationFeaturesEXT>{ enum { value = true }; };
+  template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceLineRasterizationFeaturesEXT>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceLineRasterizationPropertiesEXT>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceMaintenance3Properties>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PhysicalDeviceMemoryProperties2, PhysicalDeviceMemoryBudgetPropertiesEXT>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceMemoryPriorityFeaturesEXT>{ enum { value = true }; };
+  template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceMemoryPriorityFeaturesEXT>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceMeshShaderFeaturesNV>{ enum { value = true }; };
+  template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceMeshShaderFeaturesNV>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceMeshShaderPropertiesNV>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceMultiviewFeatures>{ enum { value = true }; };
+  template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceMultiviewFeatures>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceMultiviewProperties>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDevicePCIBusInfoPropertiesEXT>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDevicePerformanceQueryFeaturesKHR>{ enum { value = true }; };
+  template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDevicePerformanceQueryFeaturesKHR>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDevicePerformanceQueryPropertiesKHR>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDevicePipelineExecutablePropertiesFeaturesKHR>{ enum { value = true }; };
+  template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDevicePipelineExecutablePropertiesFeaturesKHR>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDevicePointClippingProperties>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceProtectedMemoryFeatures>{ enum { value = true }; };
+  template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceProtectedMemoryFeatures>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceProtectedMemoryProperties>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDevicePushDescriptorPropertiesKHR>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceRayTracingPropertiesNV>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceRepresentativeFragmentTestFeaturesNV>{ enum { value = true }; };
+  template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceRepresentativeFragmentTestFeaturesNV>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceSampleLocationsPropertiesEXT>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceSamplerFilterMinmaxPropertiesEXT>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceSamplerYcbcrConversionFeatures>{ enum { value = true }; };
+  template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceSamplerYcbcrConversionFeatures>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceScalarBlockLayoutFeaturesEXT>{ enum { value = true }; };
+  template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceScalarBlockLayoutFeaturesEXT>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR>{ enum { value = true }; };
+  template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceShaderAtomicInt64FeaturesKHR>{ enum { value = true }; };
+  template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceShaderAtomicInt64FeaturesKHR>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceShaderClockFeaturesKHR>{ enum { value = true }; };
+  template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceShaderClockFeaturesKHR>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceShaderCoreProperties2AMD>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceShaderCorePropertiesAMD>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT>{ enum { value = true }; };
+  template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceShaderDrawParametersFeatures>{ enum { value = true }; };
+  template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceShaderDrawParametersFeatures>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceShaderFloat16Int8FeaturesKHR>{ enum { value = true }; };
+  template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceShaderFloat16Int8FeaturesKHR>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceShaderImageFootprintFeaturesNV>{ enum { value = true }; };
+  template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceShaderImageFootprintFeaturesNV>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL>{ enum { value = true }; };
+  template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceShaderSMBuiltinsFeaturesNV>{ enum { value = true }; };
+  template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceShaderSMBuiltinsFeaturesNV>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceShaderSMBuiltinsPropertiesNV>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR>{ enum { value = true }; };
+  template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceShadingRateImageFeaturesNV>{ enum { value = true }; };
+  template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceShadingRateImageFeaturesNV>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceShadingRateImagePropertiesNV>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceSubgroupProperties>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceSubgroupSizeControlFeaturesEXT>{ enum { value = true }; };
+  template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceSubgroupSizeControlFeaturesEXT>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceSubgroupSizeControlPropertiesEXT>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceTexelBufferAlignmentFeaturesEXT>{ enum { value = true }; };
+  template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceTexelBufferAlignmentFeaturesEXT>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceTexelBufferAlignmentPropertiesEXT>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT>{ enum { value = true }; };
+  template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceTimelineSemaphoreFeaturesKHR>{ enum { value = true }; };
+  template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceTimelineSemaphoreFeaturesKHR>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceTimelineSemaphorePropertiesKHR>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceTransformFeedbackFeaturesEXT>{ enum { value = true }; };
+  template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceTransformFeedbackFeaturesEXT>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceTransformFeedbackPropertiesEXT>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR>{ enum { value = true }; };
+  template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceVariablePointersFeatures>{ enum { value = true }; };
+  template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceVariablePointersFeatures>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceVertexAttributeDivisorFeaturesEXT>{ enum { value = true }; };
+  template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceVertexAttributeDivisorFeaturesEXT>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceVertexAttributeDivisorPropertiesEXT>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceVulkanMemoryModelFeaturesKHR>{ enum { value = true }; };
+  template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceVulkanMemoryModelFeaturesKHR>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceYcbcrImageArraysFeaturesEXT>{ enum { value = true }; };
+  template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceYcbcrImageArraysFeaturesEXT>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PipelineColorBlendStateCreateInfo, PipelineColorBlendAdvancedStateCreateInfoEXT>{ enum { value = true }; };
+  template <> struct isStructureChainValid<GraphicsPipelineCreateInfo, PipelineCompilerControlCreateInfoAMD>{ enum { value = true }; };
+  template <> struct isStructureChainValid<ComputePipelineCreateInfo, PipelineCompilerControlCreateInfoAMD>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PipelineMultisampleStateCreateInfo, PipelineCoverageModulationStateCreateInfoNV>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PipelineMultisampleStateCreateInfo, PipelineCoverageReductionStateCreateInfoNV>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PipelineMultisampleStateCreateInfo, PipelineCoverageToColorStateCreateInfoNV>{ enum { value = true }; };
+  template <> struct isStructureChainValid<GraphicsPipelineCreateInfo, PipelineCreationFeedbackCreateInfoEXT>{ enum { value = true }; };
+  template <> struct isStructureChainValid<ComputePipelineCreateInfo, PipelineCreationFeedbackCreateInfoEXT>{ enum { value = true }; };
+  template <> struct isStructureChainValid<RayTracingPipelineCreateInfoNV, PipelineCreationFeedbackCreateInfoEXT>{ enum { value = true }; };
+  template <> struct isStructureChainValid<GraphicsPipelineCreateInfo, PipelineDiscardRectangleStateCreateInfoEXT>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PipelineRasterizationStateCreateInfo, PipelineRasterizationConservativeStateCreateInfoEXT>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PipelineRasterizationStateCreateInfo, PipelineRasterizationDepthClipStateCreateInfoEXT>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PipelineRasterizationStateCreateInfo, PipelineRasterizationLineStateCreateInfoEXT>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PipelineRasterizationStateCreateInfo, PipelineRasterizationStateRasterizationOrderAMD>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PipelineRasterizationStateCreateInfo, PipelineRasterizationStateStreamCreateInfoEXT>{ enum { value = true }; };
+  template <> struct isStructureChainValid<GraphicsPipelineCreateInfo, PipelineRepresentativeFragmentTestStateCreateInfoNV>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PipelineMultisampleStateCreateInfo, PipelineSampleLocationsStateCreateInfoEXT>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PipelineShaderStageCreateInfo, PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PipelineTessellationStateCreateInfo, PipelineTessellationDomainOriginStateCreateInfo>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PipelineVertexInputStateCreateInfo, PipelineVertexInputDivisorStateCreateInfoEXT>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PipelineViewportStateCreateInfo, PipelineViewportCoarseSampleOrderStateCreateInfoNV>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PipelineViewportStateCreateInfo, PipelineViewportExclusiveScissorStateCreateInfoNV>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PipelineViewportStateCreateInfo, PipelineViewportShadingRateImageStateCreateInfoNV>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PipelineViewportStateCreateInfo, PipelineViewportSwizzleStateCreateInfoNV>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PipelineViewportStateCreateInfo, PipelineViewportWScalingStateCreateInfoNV>{ enum { value = true }; };
+#ifdef VK_USE_PLATFORM_GGP
+  template <> struct isStructureChainValid<PresentInfoKHR, PresentFrameTokenGGP>{ enum { value = true }; };
+#endif /*VK_USE_PLATFORM_GGP*/
+  template <> struct isStructureChainValid<PresentInfoKHR, PresentRegionsKHR>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PresentInfoKHR, PresentTimesInfoGOOGLE>{ enum { value = true }; };
+  template <> struct isStructureChainValid<SubmitInfo, ProtectedSubmitInfo>{ enum { value = true }; };
+  template <> struct isStructureChainValid<QueryPoolCreateInfo, QueryPoolPerformanceCreateInfoKHR>{ enum { value = true }; };
+  template <> struct isStructureChainValid<QueueFamilyProperties2, QueueFamilyCheckpointPropertiesNV>{ enum { value = true }; };
+  template <> struct isStructureChainValid<RenderPassBeginInfo, RenderPassAttachmentBeginInfoKHR>{ enum { value = true }; };
+  template <> struct isStructureChainValid<RenderPassCreateInfo, RenderPassFragmentDensityMapCreateInfoEXT>{ enum { value = true }; };
+  template <> struct isStructureChainValid<RenderPassCreateInfo2KHR, RenderPassFragmentDensityMapCreateInfoEXT>{ enum { value = true }; };
+  template <> struct isStructureChainValid<RenderPassCreateInfo, RenderPassInputAttachmentAspectCreateInfo>{ enum { value = true }; };
+  template <> struct isStructureChainValid<RenderPassCreateInfo, RenderPassMultiviewCreateInfo>{ enum { value = true }; };
+  template <> struct isStructureChainValid<RenderPassBeginInfo, RenderPassSampleLocationsBeginInfoEXT>{ enum { value = true }; };
+  template <> struct isStructureChainValid<ImageMemoryBarrier, SampleLocationsInfoEXT>{ enum { value = true }; };
+  template <> struct isStructureChainValid<SamplerCreateInfo, SamplerReductionModeCreateInfoEXT>{ enum { value = true }; };
+  template <> struct isStructureChainValid<ImageFormatProperties2, SamplerYcbcrConversionImageFormatProperties>{ enum { value = true }; };
+  template <> struct isStructureChainValid<SamplerCreateInfo, SamplerYcbcrConversionInfo>{ enum { value = true }; };
+  template <> struct isStructureChainValid<ImageViewCreateInfo, SamplerYcbcrConversionInfo>{ enum { value = true }; };
+  template <> struct isStructureChainValid<SemaphoreCreateInfo, SemaphoreTypeCreateInfoKHR>{ enum { value = true }; };
+  template <> struct isStructureChainValid<PhysicalDeviceExternalSemaphoreInfo, SemaphoreTypeCreateInfoKHR>{ enum { value = true }; };
+  template <> struct isStructureChainValid<ShaderModuleCreateInfo, ShaderModuleValidationCacheCreateInfoEXT>{ enum { value = true }; };
+  template <> struct isStructureChainValid<SurfaceCapabilities2KHR, SharedPresentSurfaceCapabilitiesKHR>{ enum { value = true }; };
+  template <> struct isStructureChainValid<SubpassDescription2KHR, SubpassDescriptionDepthStencilResolveKHR>{ enum { value = true }; };
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+  template <> struct isStructureChainValid<SurfaceCapabilities2KHR, SurfaceCapabilitiesFullScreenExclusiveEXT>{ enum { value = true }; };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+  template <> struct isStructureChainValid<PhysicalDeviceSurfaceInfo2KHR, SurfaceFullScreenExclusiveInfoEXT>{ enum { value = true }; };
+  template <> struct isStructureChainValid<SwapchainCreateInfoKHR, SurfaceFullScreenExclusiveInfoEXT>{ enum { value = true }; };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+  template <> struct isStructureChainValid<PhysicalDeviceSurfaceInfo2KHR, SurfaceFullScreenExclusiveWin32InfoEXT>{ enum { value = true }; };
+  template <> struct isStructureChainValid<SwapchainCreateInfoKHR, SurfaceFullScreenExclusiveWin32InfoEXT>{ enum { value = true }; };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+  template <> struct isStructureChainValid<SurfaceCapabilities2KHR, SurfaceProtectedCapabilitiesKHR>{ enum { value = true }; };
+  template <> struct isStructureChainValid<SwapchainCreateInfoKHR, SwapchainCounterCreateInfoEXT>{ enum { value = true }; };
+  template <> struct isStructureChainValid<SwapchainCreateInfoKHR, SwapchainDisplayNativeHdrCreateInfoAMD>{ enum { value = true }; };
+  template <> struct isStructureChainValid<ImageFormatProperties2, TextureLODGatherFormatPropertiesAMD>{ enum { value = true }; };
+  template <> struct isStructureChainValid<SubmitInfo, TimelineSemaphoreSubmitInfoKHR>{ enum { value = true }; };
+  template <> struct isStructureChainValid<BindSparseInfo, TimelineSemaphoreSubmitInfoKHR>{ enum { value = true }; };
+  template <> struct isStructureChainValid<InstanceCreateInfo, ValidationFeaturesEXT>{ enum { value = true }; };
+  template <> struct isStructureChainValid<InstanceCreateInfo, ValidationFlagsEXT>{ enum { value = true }; };
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+  template <> struct isStructureChainValid<SubmitInfo, Win32KeyedMutexAcquireReleaseInfoKHR>{ enum { value = true }; };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+  template <> struct isStructureChainValid<SubmitInfo, Win32KeyedMutexAcquireReleaseInfoNV>{ enum { value = true }; };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+  template <> struct isStructureChainValid<WriteDescriptorSet, WriteDescriptorSetAccelerationStructureNV>{ enum { value = true }; };
+  template <> struct isStructureChainValid<WriteDescriptorSet, WriteDescriptorSetInlineUniformBlockEXT>{ enum { value = true }; };
+
+#if VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL
+  class DynamicLoader
+  {
+  public:
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+    DynamicLoader() VULKAN_HPP_NOEXCEPT : m_success( false )
+#else
+    DynamicLoader() : m_success( false )
+#endif
+    {
+#if defined(__linux__)
+      m_library = dlopen( "libvulkan.so", RTLD_NOW | RTLD_LOCAL );
+#elif defined(__APPLE__)
+      m_library = dlopen( "libvulkan.dylib", RTLD_NOW | RTLD_LOCAL );
+#elif defined(_WIN32)
+      m_library = LoadLibrary( TEXT( "vulkan-1.dll" ) );
+#else
+      assert( false && "unsupported platform" );
+#endif
+
+      m_success = m_library != 0;
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+      if ( !m_success )
+      {
+        // NOTE there should be an InitializationFailedError, but msvc insists on the symbol does not exist within the scope of this function.
+        throw std::runtime_error( "Failed to load vulkan library!" );
+      }
+#endif
+    }
+
+    ~DynamicLoader() VULKAN_HPP_NOEXCEPT
+    {
+      if ( m_library )
+      {
+#if defined(__linux__) || defined(__APPLE__)
+        dlclose( m_library );
+#elif defined(_WIN32)
+        FreeLibrary( m_library );
+#endif
+      }
+    }
+
+    template <typename T>
+    T getProcAddress( const char* function ) const VULKAN_HPP_NOEXCEPT
+    {
+#if defined(__linux__) || defined(__APPLE__)
+      return (T)dlsym( m_library, function );
+#elif defined(_WIN32)
+      return (T)GetProcAddress( m_library, function );
+#endif
+    }
+
+    bool success() const VULKAN_HPP_NOEXCEPT { return m_success; }
+
+  private:
+    bool m_success;
+#if defined(__linux__) || defined(__APPLE__)
+    void *m_library;
+#elif defined(_WIN32)
+    HMODULE m_library;
+#else
+#error unsupported platform
+#endif
+  };
+#endif
+
+  class DispatchLoaderDynamic
+  {
+  public:
+    PFN_vkCreateInstance vkCreateInstance = 0;
+    PFN_vkEnumerateInstanceExtensionProperties vkEnumerateInstanceExtensionProperties = 0;
+    PFN_vkEnumerateInstanceLayerProperties vkEnumerateInstanceLayerProperties = 0;
+    PFN_vkEnumerateInstanceVersion vkEnumerateInstanceVersion = 0;
+    PFN_vkBeginCommandBuffer vkBeginCommandBuffer = 0;
+    PFN_vkCmdBeginConditionalRenderingEXT vkCmdBeginConditionalRenderingEXT = 0;
+    PFN_vkCmdBeginDebugUtilsLabelEXT vkCmdBeginDebugUtilsLabelEXT = 0;
+    PFN_vkCmdBeginQuery vkCmdBeginQuery = 0;
+    PFN_vkCmdBeginQueryIndexedEXT vkCmdBeginQueryIndexedEXT = 0;
+    PFN_vkCmdBeginRenderPass vkCmdBeginRenderPass = 0;
+    PFN_vkCmdBeginRenderPass2KHR vkCmdBeginRenderPass2KHR = 0;
+    PFN_vkCmdBeginTransformFeedbackEXT vkCmdBeginTransformFeedbackEXT = 0;
+    PFN_vkCmdBindDescriptorSets vkCmdBindDescriptorSets = 0;
+    PFN_vkCmdBindIndexBuffer vkCmdBindIndexBuffer = 0;
+    PFN_vkCmdBindPipeline vkCmdBindPipeline = 0;
+    PFN_vkCmdBindShadingRateImageNV vkCmdBindShadingRateImageNV = 0;
+    PFN_vkCmdBindTransformFeedbackBuffersEXT vkCmdBindTransformFeedbackBuffersEXT = 0;
+    PFN_vkCmdBindVertexBuffers vkCmdBindVertexBuffers = 0;
+    PFN_vkCmdBlitImage vkCmdBlitImage = 0;
+    PFN_vkCmdBuildAccelerationStructureNV vkCmdBuildAccelerationStructureNV = 0;
+    PFN_vkCmdClearAttachments vkCmdClearAttachments = 0;
+    PFN_vkCmdClearColorImage vkCmdClearColorImage = 0;
+    PFN_vkCmdClearDepthStencilImage vkCmdClearDepthStencilImage = 0;
+    PFN_vkCmdCopyAccelerationStructureNV vkCmdCopyAccelerationStructureNV = 0;
+    PFN_vkCmdCopyBuffer vkCmdCopyBuffer = 0;
+    PFN_vkCmdCopyBufferToImage vkCmdCopyBufferToImage = 0;
+    PFN_vkCmdCopyImage vkCmdCopyImage = 0;
+    PFN_vkCmdCopyImageToBuffer vkCmdCopyImageToBuffer = 0;
+    PFN_vkCmdCopyQueryPoolResults vkCmdCopyQueryPoolResults = 0;
+    PFN_vkCmdDebugMarkerBeginEXT vkCmdDebugMarkerBeginEXT = 0;
+    PFN_vkCmdDebugMarkerEndEXT vkCmdDebugMarkerEndEXT = 0;
+    PFN_vkCmdDebugMarkerInsertEXT vkCmdDebugMarkerInsertEXT = 0;
+    PFN_vkCmdDispatch vkCmdDispatch = 0;
+    PFN_vkCmdDispatchBase vkCmdDispatchBase = 0;
+    PFN_vkCmdDispatchBaseKHR vkCmdDispatchBaseKHR = 0;
+    PFN_vkCmdDispatchIndirect vkCmdDispatchIndirect = 0;
+    PFN_vkCmdDraw vkCmdDraw = 0;
+    PFN_vkCmdDrawIndexed vkCmdDrawIndexed = 0;
+    PFN_vkCmdDrawIndexedIndirect vkCmdDrawIndexedIndirect = 0;
+    PFN_vkCmdDrawIndexedIndirectCountAMD vkCmdDrawIndexedIndirectCountAMD = 0;
+    PFN_vkCmdDrawIndexedIndirectCountKHR vkCmdDrawIndexedIndirectCountKHR = 0;
+    PFN_vkCmdDrawIndirect vkCmdDrawIndirect = 0;
+    PFN_vkCmdDrawIndirectByteCountEXT vkCmdDrawIndirectByteCountEXT = 0;
+    PFN_vkCmdDrawIndirectCountAMD vkCmdDrawIndirectCountAMD = 0;
+    PFN_vkCmdDrawIndirectCountKHR vkCmdDrawIndirectCountKHR = 0;
+    PFN_vkCmdDrawMeshTasksIndirectCountNV vkCmdDrawMeshTasksIndirectCountNV = 0;
+    PFN_vkCmdDrawMeshTasksIndirectNV vkCmdDrawMeshTasksIndirectNV = 0;
+    PFN_vkCmdDrawMeshTasksNV vkCmdDrawMeshTasksNV = 0;
+    PFN_vkCmdEndConditionalRenderingEXT vkCmdEndConditionalRenderingEXT = 0;
+    PFN_vkCmdEndDebugUtilsLabelEXT vkCmdEndDebugUtilsLabelEXT = 0;
+    PFN_vkCmdEndQuery vkCmdEndQuery = 0;
+    PFN_vkCmdEndQueryIndexedEXT vkCmdEndQueryIndexedEXT = 0;
+    PFN_vkCmdEndRenderPass vkCmdEndRenderPass = 0;
+    PFN_vkCmdEndRenderPass2KHR vkCmdEndRenderPass2KHR = 0;
+    PFN_vkCmdEndTransformFeedbackEXT vkCmdEndTransformFeedbackEXT = 0;
+    PFN_vkCmdExecuteCommands vkCmdExecuteCommands = 0;
+    PFN_vkCmdFillBuffer vkCmdFillBuffer = 0;
+    PFN_vkCmdInsertDebugUtilsLabelEXT vkCmdInsertDebugUtilsLabelEXT = 0;
+    PFN_vkCmdNextSubpass vkCmdNextSubpass = 0;
+    PFN_vkCmdNextSubpass2KHR vkCmdNextSubpass2KHR = 0;
+    PFN_vkCmdPipelineBarrier vkCmdPipelineBarrier = 0;
+    PFN_vkCmdProcessCommandsNVX vkCmdProcessCommandsNVX = 0;
+    PFN_vkCmdPushConstants vkCmdPushConstants = 0;
+    PFN_vkCmdPushDescriptorSetKHR vkCmdPushDescriptorSetKHR = 0;
+    PFN_vkCmdPushDescriptorSetWithTemplateKHR vkCmdPushDescriptorSetWithTemplateKHR = 0;
+    PFN_vkCmdReserveSpaceForCommandsNVX vkCmdReserveSpaceForCommandsNVX = 0;
+    PFN_vkCmdResetEvent vkCmdResetEvent = 0;
+    PFN_vkCmdResetQueryPool vkCmdResetQueryPool = 0;
+    PFN_vkCmdResolveImage vkCmdResolveImage = 0;
+    PFN_vkCmdSetBlendConstants vkCmdSetBlendConstants = 0;
+    PFN_vkCmdSetCheckpointNV vkCmdSetCheckpointNV = 0;
+    PFN_vkCmdSetCoarseSampleOrderNV vkCmdSetCoarseSampleOrderNV = 0;
+    PFN_vkCmdSetDepthBias vkCmdSetDepthBias = 0;
+    PFN_vkCmdSetDepthBounds vkCmdSetDepthBounds = 0;
+    PFN_vkCmdSetDeviceMask vkCmdSetDeviceMask = 0;
+    PFN_vkCmdSetDeviceMaskKHR vkCmdSetDeviceMaskKHR = 0;
+    PFN_vkCmdSetDiscardRectangleEXT vkCmdSetDiscardRectangleEXT = 0;
+    PFN_vkCmdSetEvent vkCmdSetEvent = 0;
+    PFN_vkCmdSetExclusiveScissorNV vkCmdSetExclusiveScissorNV = 0;
+    PFN_vkCmdSetLineStippleEXT vkCmdSetLineStippleEXT = 0;
+    PFN_vkCmdSetLineWidth vkCmdSetLineWidth = 0;
+    PFN_vkCmdSetPerformanceMarkerINTEL vkCmdSetPerformanceMarkerINTEL = 0;
+    PFN_vkCmdSetPerformanceOverrideINTEL vkCmdSetPerformanceOverrideINTEL = 0;
+    PFN_vkCmdSetPerformanceStreamMarkerINTEL vkCmdSetPerformanceStreamMarkerINTEL = 0;
+    PFN_vkCmdSetSampleLocationsEXT vkCmdSetSampleLocationsEXT = 0;
+    PFN_vkCmdSetScissor vkCmdSetScissor = 0;
+    PFN_vkCmdSetStencilCompareMask vkCmdSetStencilCompareMask = 0;
+    PFN_vkCmdSetStencilReference vkCmdSetStencilReference = 0;
+    PFN_vkCmdSetStencilWriteMask vkCmdSetStencilWriteMask = 0;
+    PFN_vkCmdSetViewport vkCmdSetViewport = 0;
+    PFN_vkCmdSetViewportShadingRatePaletteNV vkCmdSetViewportShadingRatePaletteNV = 0;
+    PFN_vkCmdSetViewportWScalingNV vkCmdSetViewportWScalingNV = 0;
+    PFN_vkCmdTraceRaysNV vkCmdTraceRaysNV = 0;
+    PFN_vkCmdUpdateBuffer vkCmdUpdateBuffer = 0;
+    PFN_vkCmdWaitEvents vkCmdWaitEvents = 0;
+    PFN_vkCmdWriteAccelerationStructuresPropertiesNV vkCmdWriteAccelerationStructuresPropertiesNV = 0;
+    PFN_vkCmdWriteBufferMarkerAMD vkCmdWriteBufferMarkerAMD = 0;
+    PFN_vkCmdWriteTimestamp vkCmdWriteTimestamp = 0;
+    PFN_vkEndCommandBuffer vkEndCommandBuffer = 0;
+    PFN_vkResetCommandBuffer vkResetCommandBuffer = 0;
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    PFN_vkAcquireFullScreenExclusiveModeEXT vkAcquireFullScreenExclusiveModeEXT = 0;
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+    PFN_vkAcquireNextImage2KHR vkAcquireNextImage2KHR = 0;
+    PFN_vkAcquireNextImageKHR vkAcquireNextImageKHR = 0;
+    PFN_vkAcquirePerformanceConfigurationINTEL vkAcquirePerformanceConfigurationINTEL = 0;
+    PFN_vkAcquireProfilingLockKHR vkAcquireProfilingLockKHR = 0;
+    PFN_vkAllocateCommandBuffers vkAllocateCommandBuffers = 0;
+    PFN_vkAllocateDescriptorSets vkAllocateDescriptorSets = 0;
+    PFN_vkAllocateMemory vkAllocateMemory = 0;
+    PFN_vkBindAccelerationStructureMemoryNV vkBindAccelerationStructureMemoryNV = 0;
+    PFN_vkBindBufferMemory vkBindBufferMemory = 0;
+    PFN_vkBindBufferMemory2 vkBindBufferMemory2 = 0;
+    PFN_vkBindBufferMemory2KHR vkBindBufferMemory2KHR = 0;
+    PFN_vkBindImageMemory vkBindImageMemory = 0;
+    PFN_vkBindImageMemory2 vkBindImageMemory2 = 0;
+    PFN_vkBindImageMemory2KHR vkBindImageMemory2KHR = 0;
+    PFN_vkCompileDeferredNV vkCompileDeferredNV = 0;
+    PFN_vkCreateAccelerationStructureNV vkCreateAccelerationStructureNV = 0;
+    PFN_vkCreateBuffer vkCreateBuffer = 0;
+    PFN_vkCreateBufferView vkCreateBufferView = 0;
+    PFN_vkCreateCommandPool vkCreateCommandPool = 0;
+    PFN_vkCreateComputePipelines vkCreateComputePipelines = 0;
+    PFN_vkCreateDescriptorPool vkCreateDescriptorPool = 0;
+    PFN_vkCreateDescriptorSetLayout vkCreateDescriptorSetLayout = 0;
+    PFN_vkCreateDescriptorUpdateTemplate vkCreateDescriptorUpdateTemplate = 0;
+    PFN_vkCreateDescriptorUpdateTemplateKHR vkCreateDescriptorUpdateTemplateKHR = 0;
+    PFN_vkCreateEvent vkCreateEvent = 0;
+    PFN_vkCreateFence vkCreateFence = 0;
+    PFN_vkCreateFramebuffer vkCreateFramebuffer = 0;
+    PFN_vkCreateGraphicsPipelines vkCreateGraphicsPipelines = 0;
+    PFN_vkCreateImage vkCreateImage = 0;
+    PFN_vkCreateImageView vkCreateImageView = 0;
+    PFN_vkCreateIndirectCommandsLayoutNVX vkCreateIndirectCommandsLayoutNVX = 0;
+    PFN_vkCreateObjectTableNVX vkCreateObjectTableNVX = 0;
+    PFN_vkCreatePipelineCache vkCreatePipelineCache = 0;
+    PFN_vkCreatePipelineLayout vkCreatePipelineLayout = 0;
+    PFN_vkCreateQueryPool vkCreateQueryPool = 0;
+    PFN_vkCreateRayTracingPipelinesNV vkCreateRayTracingPipelinesNV = 0;
+    PFN_vkCreateRenderPass vkCreateRenderPass = 0;
+    PFN_vkCreateRenderPass2KHR vkCreateRenderPass2KHR = 0;
+    PFN_vkCreateSampler vkCreateSampler = 0;
+    PFN_vkCreateSamplerYcbcrConversion vkCreateSamplerYcbcrConversion = 0;
+    PFN_vkCreateSamplerYcbcrConversionKHR vkCreateSamplerYcbcrConversionKHR = 0;
+    PFN_vkCreateSemaphore vkCreateSemaphore = 0;
+    PFN_vkCreateShaderModule vkCreateShaderModule = 0;
+    PFN_vkCreateSharedSwapchainsKHR vkCreateSharedSwapchainsKHR = 0;
+    PFN_vkCreateSwapchainKHR vkCreateSwapchainKHR = 0;
+    PFN_vkCreateValidationCacheEXT vkCreateValidationCacheEXT = 0;
+    PFN_vkDebugMarkerSetObjectNameEXT vkDebugMarkerSetObjectNameEXT = 0;
+    PFN_vkDebugMarkerSetObjectTagEXT vkDebugMarkerSetObjectTagEXT = 0;
+    PFN_vkDestroyAccelerationStructureNV vkDestroyAccelerationStructureNV = 0;
+    PFN_vkDestroyBuffer vkDestroyBuffer = 0;
+    PFN_vkDestroyBufferView vkDestroyBufferView = 0;
+    PFN_vkDestroyCommandPool vkDestroyCommandPool = 0;
+    PFN_vkDestroyDescriptorPool vkDestroyDescriptorPool = 0;
+    PFN_vkDestroyDescriptorSetLayout vkDestroyDescriptorSetLayout = 0;
+    PFN_vkDestroyDescriptorUpdateTemplate vkDestroyDescriptorUpdateTemplate = 0;
+    PFN_vkDestroyDescriptorUpdateTemplateKHR vkDestroyDescriptorUpdateTemplateKHR = 0;
+    PFN_vkDestroyDevice vkDestroyDevice = 0;
+    PFN_vkDestroyEvent vkDestroyEvent = 0;
+    PFN_vkDestroyFence vkDestroyFence = 0;
+    PFN_vkDestroyFramebuffer vkDestroyFramebuffer = 0;
+    PFN_vkDestroyImage vkDestroyImage = 0;
+    PFN_vkDestroyImageView vkDestroyImageView = 0;
+    PFN_vkDestroyIndirectCommandsLayoutNVX vkDestroyIndirectCommandsLayoutNVX = 0;
+    PFN_vkDestroyObjectTableNVX vkDestroyObjectTableNVX = 0;
+    PFN_vkDestroyPipeline vkDestroyPipeline = 0;
+    PFN_vkDestroyPipelineCache vkDestroyPipelineCache = 0;
+    PFN_vkDestroyPipelineLayout vkDestroyPipelineLayout = 0;
+    PFN_vkDestroyQueryPool vkDestroyQueryPool = 0;
+    PFN_vkDestroyRenderPass vkDestroyRenderPass = 0;
+    PFN_vkDestroySampler vkDestroySampler = 0;
+    PFN_vkDestroySamplerYcbcrConversion vkDestroySamplerYcbcrConversion = 0;
+    PFN_vkDestroySamplerYcbcrConversionKHR vkDestroySamplerYcbcrConversionKHR = 0;
+    PFN_vkDestroySemaphore vkDestroySemaphore = 0;
+    PFN_vkDestroyShaderModule vkDestroyShaderModule = 0;
+    PFN_vkDestroySwapchainKHR vkDestroySwapchainKHR = 0;
+    PFN_vkDestroyValidationCacheEXT vkDestroyValidationCacheEXT = 0;
+    PFN_vkDeviceWaitIdle vkDeviceWaitIdle = 0;
+    PFN_vkDisplayPowerControlEXT vkDisplayPowerControlEXT = 0;
+    PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges = 0;
+    PFN_vkFreeCommandBuffers vkFreeCommandBuffers = 0;
+    PFN_vkFreeDescriptorSets vkFreeDescriptorSets = 0;
+    PFN_vkFreeMemory vkFreeMemory = 0;
+    PFN_vkGetAccelerationStructureHandleNV vkGetAccelerationStructureHandleNV = 0;
+    PFN_vkGetAccelerationStructureMemoryRequirementsNV vkGetAccelerationStructureMemoryRequirementsNV = 0;
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+    PFN_vkGetAndroidHardwareBufferPropertiesANDROID vkGetAndroidHardwareBufferPropertiesANDROID = 0;
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+    PFN_vkGetBufferDeviceAddressEXT vkGetBufferDeviceAddressEXT = 0;
+    PFN_vkGetBufferDeviceAddressKHR vkGetBufferDeviceAddressKHR = 0;
+    PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements = 0;
+    PFN_vkGetBufferMemoryRequirements2 vkGetBufferMemoryRequirements2 = 0;
+    PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR = 0;
+    PFN_vkGetBufferOpaqueCaptureAddressKHR vkGetBufferOpaqueCaptureAddressKHR = 0;
+    PFN_vkGetCalibratedTimestampsEXT vkGetCalibratedTimestampsEXT = 0;
+    PFN_vkGetDescriptorSetLayoutSupport vkGetDescriptorSetLayoutSupport = 0;
+    PFN_vkGetDescriptorSetLayoutSupportKHR vkGetDescriptorSetLayoutSupportKHR = 0;
+    PFN_vkGetDeviceGroupPeerMemoryFeatures vkGetDeviceGroupPeerMemoryFeatures = 0;
+    PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR vkGetDeviceGroupPeerMemoryFeaturesKHR = 0;
+    PFN_vkGetDeviceGroupPresentCapabilitiesKHR vkGetDeviceGroupPresentCapabilitiesKHR = 0;
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    PFN_vkGetDeviceGroupSurfacePresentModes2EXT vkGetDeviceGroupSurfacePresentModes2EXT = 0;
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+    PFN_vkGetDeviceGroupSurfacePresentModesKHR vkGetDeviceGroupSurfacePresentModesKHR = 0;
+    PFN_vkGetDeviceMemoryCommitment vkGetDeviceMemoryCommitment = 0;
+    PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR vkGetDeviceMemoryOpaqueCaptureAddressKHR = 0;
+    PFN_vkGetDeviceProcAddr vkGetDeviceProcAddr = 0;
+    PFN_vkGetDeviceQueue vkGetDeviceQueue = 0;
+    PFN_vkGetDeviceQueue2 vkGetDeviceQueue2 = 0;
+    PFN_vkGetEventStatus vkGetEventStatus = 0;
+    PFN_vkGetFenceFdKHR vkGetFenceFdKHR = 0;
+    PFN_vkGetFenceStatus vkGetFenceStatus = 0;
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    PFN_vkGetFenceWin32HandleKHR vkGetFenceWin32HandleKHR = 0;
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+    PFN_vkGetImageDrmFormatModifierPropertiesEXT vkGetImageDrmFormatModifierPropertiesEXT = 0;
+    PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements = 0;
+    PFN_vkGetImageMemoryRequirements2 vkGetImageMemoryRequirements2 = 0;
+    PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR = 0;
+    PFN_vkGetImageSparseMemoryRequirements vkGetImageSparseMemoryRequirements = 0;
+    PFN_vkGetImageSparseMemoryRequirements2 vkGetImageSparseMemoryRequirements2 = 0;
+    PFN_vkGetImageSparseMemoryRequirements2KHR vkGetImageSparseMemoryRequirements2KHR = 0;
+    PFN_vkGetImageSubresourceLayout vkGetImageSubresourceLayout = 0;
+    PFN_vkGetImageViewHandleNVX vkGetImageViewHandleNVX = 0;
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+    PFN_vkGetMemoryAndroidHardwareBufferANDROID vkGetMemoryAndroidHardwareBufferANDROID = 0;
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+    PFN_vkGetMemoryFdKHR vkGetMemoryFdKHR = 0;
+    PFN_vkGetMemoryFdPropertiesKHR vkGetMemoryFdPropertiesKHR = 0;
+    PFN_vkGetMemoryHostPointerPropertiesEXT vkGetMemoryHostPointerPropertiesEXT = 0;
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    PFN_vkGetMemoryWin32HandleKHR vkGetMemoryWin32HandleKHR = 0;
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    PFN_vkGetMemoryWin32HandleNV vkGetMemoryWin32HandleNV = 0;
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    PFN_vkGetMemoryWin32HandlePropertiesKHR vkGetMemoryWin32HandlePropertiesKHR = 0;
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+    PFN_vkGetPastPresentationTimingGOOGLE vkGetPastPresentationTimingGOOGLE = 0;
+    PFN_vkGetPerformanceParameterINTEL vkGetPerformanceParameterINTEL = 0;
+    PFN_vkGetPipelineCacheData vkGetPipelineCacheData = 0;
+    PFN_vkGetPipelineExecutableInternalRepresentationsKHR vkGetPipelineExecutableInternalRepresentationsKHR = 0;
+    PFN_vkGetPipelineExecutablePropertiesKHR vkGetPipelineExecutablePropertiesKHR = 0;
+    PFN_vkGetPipelineExecutableStatisticsKHR vkGetPipelineExecutableStatisticsKHR = 0;
+    PFN_vkGetQueryPoolResults vkGetQueryPoolResults = 0;
+    PFN_vkGetRayTracingShaderGroupHandlesNV vkGetRayTracingShaderGroupHandlesNV = 0;
+    PFN_vkGetRefreshCycleDurationGOOGLE vkGetRefreshCycleDurationGOOGLE = 0;
+    PFN_vkGetRenderAreaGranularity vkGetRenderAreaGranularity = 0;
+    PFN_vkGetSemaphoreCounterValueKHR vkGetSemaphoreCounterValueKHR = 0;
+    PFN_vkGetSemaphoreFdKHR vkGetSemaphoreFdKHR = 0;
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    PFN_vkGetSemaphoreWin32HandleKHR vkGetSemaphoreWin32HandleKHR = 0;
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+    PFN_vkGetShaderInfoAMD vkGetShaderInfoAMD = 0;
+    PFN_vkGetSwapchainCounterEXT vkGetSwapchainCounterEXT = 0;
+    PFN_vkGetSwapchainImagesKHR vkGetSwapchainImagesKHR = 0;
+    PFN_vkGetSwapchainStatusKHR vkGetSwapchainStatusKHR = 0;
+    PFN_vkGetValidationCacheDataEXT vkGetValidationCacheDataEXT = 0;
+    PFN_vkImportFenceFdKHR vkImportFenceFdKHR = 0;
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    PFN_vkImportFenceWin32HandleKHR vkImportFenceWin32HandleKHR = 0;
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+    PFN_vkImportSemaphoreFdKHR vkImportSemaphoreFdKHR = 0;
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    PFN_vkImportSemaphoreWin32HandleKHR vkImportSemaphoreWin32HandleKHR = 0;
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+    PFN_vkInitializePerformanceApiINTEL vkInitializePerformanceApiINTEL = 0;
+    PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges = 0;
+    PFN_vkMapMemory vkMapMemory = 0;
+    PFN_vkMergePipelineCaches vkMergePipelineCaches = 0;
+    PFN_vkMergeValidationCachesEXT vkMergeValidationCachesEXT = 0;
+    PFN_vkRegisterDeviceEventEXT vkRegisterDeviceEventEXT = 0;
+    PFN_vkRegisterDisplayEventEXT vkRegisterDisplayEventEXT = 0;
+    PFN_vkRegisterObjectsNVX vkRegisterObjectsNVX = 0;
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    PFN_vkReleaseFullScreenExclusiveModeEXT vkReleaseFullScreenExclusiveModeEXT = 0;
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+    PFN_vkReleasePerformanceConfigurationINTEL vkReleasePerformanceConfigurationINTEL = 0;
+    PFN_vkReleaseProfilingLockKHR vkReleaseProfilingLockKHR = 0;
+    PFN_vkResetCommandPool vkResetCommandPool = 0;
+    PFN_vkResetDescriptorPool vkResetDescriptorPool = 0;
+    PFN_vkResetEvent vkResetEvent = 0;
+    PFN_vkResetFences vkResetFences = 0;
+    PFN_vkResetQueryPoolEXT vkResetQueryPoolEXT = 0;
+    PFN_vkSetDebugUtilsObjectNameEXT vkSetDebugUtilsObjectNameEXT = 0;
+    PFN_vkSetDebugUtilsObjectTagEXT vkSetDebugUtilsObjectTagEXT = 0;
+    PFN_vkSetEvent vkSetEvent = 0;
+    PFN_vkSetHdrMetadataEXT vkSetHdrMetadataEXT = 0;
+    PFN_vkSetLocalDimmingAMD vkSetLocalDimmingAMD = 0;
+    PFN_vkSignalSemaphoreKHR vkSignalSemaphoreKHR = 0;
+    PFN_vkTrimCommandPool vkTrimCommandPool = 0;
+    PFN_vkTrimCommandPoolKHR vkTrimCommandPoolKHR = 0;
+    PFN_vkUninitializePerformanceApiINTEL vkUninitializePerformanceApiINTEL = 0;
+    PFN_vkUnmapMemory vkUnmapMemory = 0;
+    PFN_vkUnregisterObjectsNVX vkUnregisterObjectsNVX = 0;
+    PFN_vkUpdateDescriptorSetWithTemplate vkUpdateDescriptorSetWithTemplate = 0;
+    PFN_vkUpdateDescriptorSetWithTemplateKHR vkUpdateDescriptorSetWithTemplateKHR = 0;
+    PFN_vkUpdateDescriptorSets vkUpdateDescriptorSets = 0;
+    PFN_vkWaitForFences vkWaitForFences = 0;
+    PFN_vkWaitSemaphoresKHR vkWaitSemaphoresKHR = 0;
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+    PFN_vkCreateAndroidSurfaceKHR vkCreateAndroidSurfaceKHR = 0;
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+    PFN_vkCreateDebugReportCallbackEXT vkCreateDebugReportCallbackEXT = 0;
+    PFN_vkCreateDebugUtilsMessengerEXT vkCreateDebugUtilsMessengerEXT = 0;
+    PFN_vkCreateDisplayPlaneSurfaceKHR vkCreateDisplayPlaneSurfaceKHR = 0;
+    PFN_vkCreateHeadlessSurfaceEXT vkCreateHeadlessSurfaceEXT = 0;
+#ifdef VK_USE_PLATFORM_IOS_MVK
+    PFN_vkCreateIOSSurfaceMVK vkCreateIOSSurfaceMVK = 0;
+#endif /*VK_USE_PLATFORM_IOS_MVK*/
+#ifdef VK_USE_PLATFORM_FUCHSIA
+    PFN_vkCreateImagePipeSurfaceFUCHSIA vkCreateImagePipeSurfaceFUCHSIA = 0;
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+#ifdef VK_USE_PLATFORM_MACOS_MVK
+    PFN_vkCreateMacOSSurfaceMVK vkCreateMacOSSurfaceMVK = 0;
+#endif /*VK_USE_PLATFORM_MACOS_MVK*/
+#ifdef VK_USE_PLATFORM_METAL_EXT
+    PFN_vkCreateMetalSurfaceEXT vkCreateMetalSurfaceEXT = 0;
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+#ifdef VK_USE_PLATFORM_GGP
+    PFN_vkCreateStreamDescriptorSurfaceGGP vkCreateStreamDescriptorSurfaceGGP = 0;
+#endif /*VK_USE_PLATFORM_GGP*/
+#ifdef VK_USE_PLATFORM_VI_NN
+    PFN_vkCreateViSurfaceNN vkCreateViSurfaceNN = 0;
+#endif /*VK_USE_PLATFORM_VI_NN*/
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+    PFN_vkCreateWaylandSurfaceKHR vkCreateWaylandSurfaceKHR = 0;
+#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    PFN_vkCreateWin32SurfaceKHR vkCreateWin32SurfaceKHR = 0;
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+#ifdef VK_USE_PLATFORM_XCB_KHR
+    PFN_vkCreateXcbSurfaceKHR vkCreateXcbSurfaceKHR = 0;
+#endif /*VK_USE_PLATFORM_XCB_KHR*/
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+    PFN_vkCreateXlibSurfaceKHR vkCreateXlibSurfaceKHR = 0;
+#endif /*VK_USE_PLATFORM_XLIB_KHR*/
+    PFN_vkDebugReportMessageEXT vkDebugReportMessageEXT = 0;
+    PFN_vkDestroyDebugReportCallbackEXT vkDestroyDebugReportCallbackEXT = 0;
+    PFN_vkDestroyDebugUtilsMessengerEXT vkDestroyDebugUtilsMessengerEXT = 0;
+    PFN_vkDestroyInstance vkDestroyInstance = 0;
+    PFN_vkDestroySurfaceKHR vkDestroySurfaceKHR = 0;
+    PFN_vkEnumeratePhysicalDeviceGroups vkEnumeratePhysicalDeviceGroups = 0;
+    PFN_vkEnumeratePhysicalDeviceGroupsKHR vkEnumeratePhysicalDeviceGroupsKHR = 0;
+    PFN_vkEnumeratePhysicalDevices vkEnumeratePhysicalDevices = 0;
+    PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr = 0;
+    PFN_vkSubmitDebugUtilsMessageEXT vkSubmitDebugUtilsMessageEXT = 0;
+#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
+    PFN_vkAcquireXlibDisplayEXT vkAcquireXlibDisplayEXT = 0;
+#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/
+    PFN_vkCreateDevice vkCreateDevice = 0;
+    PFN_vkCreateDisplayModeKHR vkCreateDisplayModeKHR = 0;
+    PFN_vkEnumerateDeviceExtensionProperties vkEnumerateDeviceExtensionProperties = 0;
+    PFN_vkEnumerateDeviceLayerProperties vkEnumerateDeviceLayerProperties = 0;
+    PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR = 0;
+    PFN_vkGetDisplayModeProperties2KHR vkGetDisplayModeProperties2KHR = 0;
+    PFN_vkGetDisplayModePropertiesKHR vkGetDisplayModePropertiesKHR = 0;
+    PFN_vkGetDisplayPlaneCapabilities2KHR vkGetDisplayPlaneCapabilities2KHR = 0;
+    PFN_vkGetDisplayPlaneCapabilitiesKHR vkGetDisplayPlaneCapabilitiesKHR = 0;
+    PFN_vkGetDisplayPlaneSupportedDisplaysKHR vkGetDisplayPlaneSupportedDisplaysKHR = 0;
+    PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT vkGetPhysicalDeviceCalibrateableTimeDomainsEXT = 0;
+    PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV vkGetPhysicalDeviceCooperativeMatrixPropertiesNV = 0;
+    PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR vkGetPhysicalDeviceDisplayPlaneProperties2KHR = 0;
+    PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR vkGetPhysicalDeviceDisplayPlanePropertiesKHR = 0;
+    PFN_vkGetPhysicalDeviceDisplayProperties2KHR vkGetPhysicalDeviceDisplayProperties2KHR = 0;
+    PFN_vkGetPhysicalDeviceDisplayPropertiesKHR vkGetPhysicalDeviceDisplayPropertiesKHR = 0;
+    PFN_vkGetPhysicalDeviceExternalBufferProperties vkGetPhysicalDeviceExternalBufferProperties = 0;
+    PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR vkGetPhysicalDeviceExternalBufferPropertiesKHR = 0;
+    PFN_vkGetPhysicalDeviceExternalFenceProperties vkGetPhysicalDeviceExternalFenceProperties = 0;
+    PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR vkGetPhysicalDeviceExternalFencePropertiesKHR = 0;
+    PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV vkGetPhysicalDeviceExternalImageFormatPropertiesNV = 0;
+    PFN_vkGetPhysicalDeviceExternalSemaphoreProperties vkGetPhysicalDeviceExternalSemaphoreProperties = 0;
+    PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR vkGetPhysicalDeviceExternalSemaphorePropertiesKHR = 0;
+    PFN_vkGetPhysicalDeviceFeatures vkGetPhysicalDeviceFeatures = 0;
+    PFN_vkGetPhysicalDeviceFeatures2 vkGetPhysicalDeviceFeatures2 = 0;
+    PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR = 0;
+    PFN_vkGetPhysicalDeviceFormatProperties vkGetPhysicalDeviceFormatProperties = 0;
+    PFN_vkGetPhysicalDeviceFormatProperties2 vkGetPhysicalDeviceFormatProperties2 = 0;
+    PFN_vkGetPhysicalDeviceFormatProperties2KHR vkGetPhysicalDeviceFormatProperties2KHR = 0;
+    PFN_vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX = 0;
+    PFN_vkGetPhysicalDeviceImageFormatProperties vkGetPhysicalDeviceImageFormatProperties = 0;
+    PFN_vkGetPhysicalDeviceImageFormatProperties2 vkGetPhysicalDeviceImageFormatProperties2 = 0;
+    PFN_vkGetPhysicalDeviceImageFormatProperties2KHR vkGetPhysicalDeviceImageFormatProperties2KHR = 0;
+    PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties = 0;
+    PFN_vkGetPhysicalDeviceMemoryProperties2 vkGetPhysicalDeviceMemoryProperties2 = 0;
+    PFN_vkGetPhysicalDeviceMemoryProperties2KHR vkGetPhysicalDeviceMemoryProperties2KHR = 0;
+    PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT vkGetPhysicalDeviceMultisamplePropertiesEXT = 0;
+    PFN_vkGetPhysicalDevicePresentRectanglesKHR vkGetPhysicalDevicePresentRectanglesKHR = 0;
+    PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties = 0;
+    PFN_vkGetPhysicalDeviceProperties2 vkGetPhysicalDeviceProperties2 = 0;
+    PFN_vkGetPhysicalDeviceProperties2KHR vkGetPhysicalDeviceProperties2KHR = 0;
+    PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR = 0;
+    PFN_vkGetPhysicalDeviceQueueFamilyProperties vkGetPhysicalDeviceQueueFamilyProperties = 0;
+    PFN_vkGetPhysicalDeviceQueueFamilyProperties2 vkGetPhysicalDeviceQueueFamilyProperties2 = 0;
+    PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR vkGetPhysicalDeviceQueueFamilyProperties2KHR = 0;
+    PFN_vkGetPhysicalDeviceSparseImageFormatProperties vkGetPhysicalDeviceSparseImageFormatProperties = 0;
+    PFN_vkGetPhysicalDeviceSparseImageFormatProperties2 vkGetPhysicalDeviceSparseImageFormatProperties2 = 0;
+    PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR vkGetPhysicalDeviceSparseImageFormatProperties2KHR = 0;
+    PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV = 0;
+    PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT vkGetPhysicalDeviceSurfaceCapabilities2EXT = 0;
+    PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR vkGetPhysicalDeviceSurfaceCapabilities2KHR = 0;
+    PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR vkGetPhysicalDeviceSurfaceCapabilitiesKHR = 0;
+    PFN_vkGetPhysicalDeviceSurfaceFormats2KHR vkGetPhysicalDeviceSurfaceFormats2KHR = 0;
+    PFN_vkGetPhysicalDeviceSurfaceFormatsKHR vkGetPhysicalDeviceSurfaceFormatsKHR = 0;
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    PFN_vkGetPhysicalDeviceSurfacePresentModes2EXT vkGetPhysicalDeviceSurfacePresentModes2EXT = 0;
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+    PFN_vkGetPhysicalDeviceSurfacePresentModesKHR vkGetPhysicalDeviceSurfacePresentModesKHR = 0;
+    PFN_vkGetPhysicalDeviceSurfaceSupportKHR vkGetPhysicalDeviceSurfaceSupportKHR = 0;
+    PFN_vkGetPhysicalDeviceToolPropertiesEXT vkGetPhysicalDeviceToolPropertiesEXT = 0;
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+    PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR vkGetPhysicalDeviceWaylandPresentationSupportKHR = 0;
+#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR vkGetPhysicalDeviceWin32PresentationSupportKHR = 0;
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+#ifdef VK_USE_PLATFORM_XCB_KHR
+    PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR vkGetPhysicalDeviceXcbPresentationSupportKHR = 0;
+#endif /*VK_USE_PLATFORM_XCB_KHR*/
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+    PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR vkGetPhysicalDeviceXlibPresentationSupportKHR = 0;
+#endif /*VK_USE_PLATFORM_XLIB_KHR*/
+#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
+    PFN_vkGetRandROutputDisplayEXT vkGetRandROutputDisplayEXT = 0;
+#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/
+    PFN_vkReleaseDisplayEXT vkReleaseDisplayEXT = 0;
+    PFN_vkGetQueueCheckpointDataNV vkGetQueueCheckpointDataNV = 0;
+    PFN_vkQueueBeginDebugUtilsLabelEXT vkQueueBeginDebugUtilsLabelEXT = 0;
+    PFN_vkQueueBindSparse vkQueueBindSparse = 0;
+    PFN_vkQueueEndDebugUtilsLabelEXT vkQueueEndDebugUtilsLabelEXT = 0;
+    PFN_vkQueueInsertDebugUtilsLabelEXT vkQueueInsertDebugUtilsLabelEXT = 0;
+    PFN_vkQueuePresentKHR vkQueuePresentKHR = 0;
+    PFN_vkQueueSetPerformanceConfigurationINTEL vkQueueSetPerformanceConfigurationINTEL = 0;
+    PFN_vkQueueSubmit vkQueueSubmit = 0;
+    PFN_vkQueueWaitIdle vkQueueWaitIdle = 0;
+
+  public:
+    DispatchLoaderDynamic() VULKAN_HPP_NOEXCEPT = default;
+
+#if !defined(VK_NO_PROTOTYPES)
+    // This interface is designed to be used for per-device function pointers in combination with a linked vulkan library.
+    DispatchLoaderDynamic(VULKAN_HPP_NAMESPACE::Instance const& instance, VULKAN_HPP_NAMESPACE::Device const& device) VULKAN_HPP_NOEXCEPT
+    {
+      init(instance, device);
+    }
+
+    // This interface is designed to be used for per-device function pointers in combination with a linked vulkan library.
+    void init(VULKAN_HPP_NAMESPACE::Instance const& instance, VULKAN_HPP_NAMESPACE::Device const& device) VULKAN_HPP_NOEXCEPT
+    {
+      init(static_cast<VkInstance>(instance), ::vkGetInstanceProcAddr, static_cast<VkDevice>(device), device ? ::vkGetDeviceProcAddr : nullptr);
+    }
+#endif // !defined(VK_NO_PROTOTYPES)
+
+    DispatchLoaderDynamic(PFN_vkGetInstanceProcAddr getInstanceProcAddr) VULKAN_HPP_NOEXCEPT
+    {
+      init(getInstanceProcAddr);
+    }
+
+    void init( PFN_vkGetInstanceProcAddr getInstanceProcAddr ) VULKAN_HPP_NOEXCEPT
+    {
+      VULKAN_HPP_ASSERT(getInstanceProcAddr);
+
+      vkGetInstanceProcAddr = getInstanceProcAddr;
+      vkCreateInstance = PFN_vkCreateInstance( vkGetInstanceProcAddr( NULL, "vkCreateInstance" ) );
+      vkEnumerateInstanceExtensionProperties = PFN_vkEnumerateInstanceExtensionProperties( vkGetInstanceProcAddr( NULL, "vkEnumerateInstanceExtensionProperties" ) );
+      vkEnumerateInstanceLayerProperties = PFN_vkEnumerateInstanceLayerProperties( vkGetInstanceProcAddr( NULL, "vkEnumerateInstanceLayerProperties" ) );
+      vkEnumerateInstanceVersion = PFN_vkEnumerateInstanceVersion( vkGetInstanceProcAddr( NULL, "vkEnumerateInstanceVersion" ) );
+    }
+
+    // This interface does not require a linked vulkan library.
+    DispatchLoaderDynamic( VkInstance instance, PFN_vkGetInstanceProcAddr getInstanceProcAddr, VkDevice device = VK_NULL_HANDLE, PFN_vkGetDeviceProcAddr getDeviceProcAddr = nullptr ) VULKAN_HPP_NOEXCEPT
+    {
+      init( instance, getInstanceProcAddr, device, getDeviceProcAddr );
+    }
+
+    // This interface does not require a linked vulkan library.
+    void init( VkInstance instance, PFN_vkGetInstanceProcAddr getInstanceProcAddr, VkDevice device = VK_NULL_HANDLE, PFN_vkGetDeviceProcAddr /*getDeviceProcAddr*/ = nullptr ) VULKAN_HPP_NOEXCEPT
+    {
+      VULKAN_HPP_ASSERT(instance && getInstanceProcAddr);
+      vkGetInstanceProcAddr = getInstanceProcAddr;
+      init( VULKAN_HPP_NAMESPACE::Instance(instance) );
+      if (device) {
+        init( VULKAN_HPP_NAMESPACE::Device(device) );
+      }
+    }
+
+    void init( VULKAN_HPP_NAMESPACE::Instance instanceCpp ) VULKAN_HPP_NOEXCEPT
+    {
+      VkInstance instance = static_cast<VkInstance>(instanceCpp);
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+      vkCreateAndroidSurfaceKHR = PFN_vkCreateAndroidSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateAndroidSurfaceKHR" ) );
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+      vkCreateDebugReportCallbackEXT = PFN_vkCreateDebugReportCallbackEXT( vkGetInstanceProcAddr( instance, "vkCreateDebugReportCallbackEXT" ) );
+      vkCreateDebugUtilsMessengerEXT = PFN_vkCreateDebugUtilsMessengerEXT( vkGetInstanceProcAddr( instance, "vkCreateDebugUtilsMessengerEXT" ) );
+      vkCreateDisplayPlaneSurfaceKHR = PFN_vkCreateDisplayPlaneSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateDisplayPlaneSurfaceKHR" ) );
+      vkCreateHeadlessSurfaceEXT = PFN_vkCreateHeadlessSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateHeadlessSurfaceEXT" ) );
+#ifdef VK_USE_PLATFORM_IOS_MVK
+      vkCreateIOSSurfaceMVK = PFN_vkCreateIOSSurfaceMVK( vkGetInstanceProcAddr( instance, "vkCreateIOSSurfaceMVK" ) );
+#endif /*VK_USE_PLATFORM_IOS_MVK*/
+#ifdef VK_USE_PLATFORM_FUCHSIA
+      vkCreateImagePipeSurfaceFUCHSIA = PFN_vkCreateImagePipeSurfaceFUCHSIA( vkGetInstanceProcAddr( instance, "vkCreateImagePipeSurfaceFUCHSIA" ) );
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+#ifdef VK_USE_PLATFORM_MACOS_MVK
+      vkCreateMacOSSurfaceMVK = PFN_vkCreateMacOSSurfaceMVK( vkGetInstanceProcAddr( instance, "vkCreateMacOSSurfaceMVK" ) );
+#endif /*VK_USE_PLATFORM_MACOS_MVK*/
+#ifdef VK_USE_PLATFORM_METAL_EXT
+      vkCreateMetalSurfaceEXT = PFN_vkCreateMetalSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateMetalSurfaceEXT" ) );
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+#ifdef VK_USE_PLATFORM_GGP
+      vkCreateStreamDescriptorSurfaceGGP = PFN_vkCreateStreamDescriptorSurfaceGGP( vkGetInstanceProcAddr( instance, "vkCreateStreamDescriptorSurfaceGGP" ) );
+#endif /*VK_USE_PLATFORM_GGP*/
+#ifdef VK_USE_PLATFORM_VI_NN
+      vkCreateViSurfaceNN = PFN_vkCreateViSurfaceNN( vkGetInstanceProcAddr( instance, "vkCreateViSurfaceNN" ) );
+#endif /*VK_USE_PLATFORM_VI_NN*/
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+      vkCreateWaylandSurfaceKHR = PFN_vkCreateWaylandSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateWaylandSurfaceKHR" ) );
+#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+      vkCreateWin32SurfaceKHR = PFN_vkCreateWin32SurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateWin32SurfaceKHR" ) );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+#ifdef VK_USE_PLATFORM_XCB_KHR
+      vkCreateXcbSurfaceKHR = PFN_vkCreateXcbSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateXcbSurfaceKHR" ) );
+#endif /*VK_USE_PLATFORM_XCB_KHR*/
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+      vkCreateXlibSurfaceKHR = PFN_vkCreateXlibSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateXlibSurfaceKHR" ) );
+#endif /*VK_USE_PLATFORM_XLIB_KHR*/
+      vkDebugReportMessageEXT = PFN_vkDebugReportMessageEXT( vkGetInstanceProcAddr( instance, "vkDebugReportMessageEXT" ) );
+      vkDestroyDebugReportCallbackEXT = PFN_vkDestroyDebugReportCallbackEXT( vkGetInstanceProcAddr( instance, "vkDestroyDebugReportCallbackEXT" ) );
+      vkDestroyDebugUtilsMessengerEXT = PFN_vkDestroyDebugUtilsMessengerEXT( vkGetInstanceProcAddr( instance, "vkDestroyDebugUtilsMessengerEXT" ) );
+      vkDestroyInstance = PFN_vkDestroyInstance( vkGetInstanceProcAddr( instance, "vkDestroyInstance" ) );
+      vkDestroySurfaceKHR = PFN_vkDestroySurfaceKHR( vkGetInstanceProcAddr( instance, "vkDestroySurfaceKHR" ) );
+      vkEnumeratePhysicalDeviceGroups = PFN_vkEnumeratePhysicalDeviceGroups( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceGroups" ) );
+      vkEnumeratePhysicalDeviceGroupsKHR = PFN_vkEnumeratePhysicalDeviceGroupsKHR( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceGroupsKHR" ) );
+      vkEnumeratePhysicalDevices = PFN_vkEnumeratePhysicalDevices( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDevices" ) );
+      vkSubmitDebugUtilsMessageEXT = PFN_vkSubmitDebugUtilsMessageEXT( vkGetInstanceProcAddr( instance, "vkSubmitDebugUtilsMessageEXT" ) );
+#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
+      vkAcquireXlibDisplayEXT = PFN_vkAcquireXlibDisplayEXT( vkGetInstanceProcAddr( instance, "vkAcquireXlibDisplayEXT" ) );
+#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/
+      vkCreateDevice = PFN_vkCreateDevice( vkGetInstanceProcAddr( instance, "vkCreateDevice" ) );
+      vkCreateDisplayModeKHR = PFN_vkCreateDisplayModeKHR( vkGetInstanceProcAddr( instance, "vkCreateDisplayModeKHR" ) );
+      vkEnumerateDeviceExtensionProperties = PFN_vkEnumerateDeviceExtensionProperties( vkGetInstanceProcAddr( instance, "vkEnumerateDeviceExtensionProperties" ) );
+      vkEnumerateDeviceLayerProperties = PFN_vkEnumerateDeviceLayerProperties( vkGetInstanceProcAddr( instance, "vkEnumerateDeviceLayerProperties" ) );
+      vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR = PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR" ) );
+      vkGetDisplayModeProperties2KHR = PFN_vkGetDisplayModeProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetDisplayModeProperties2KHR" ) );
+      vkGetDisplayModePropertiesKHR = PFN_vkGetDisplayModePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayModePropertiesKHR" ) );
+      vkGetDisplayPlaneCapabilities2KHR = PFN_vkGetDisplayPlaneCapabilities2KHR( vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneCapabilities2KHR" ) );
+      vkGetDisplayPlaneCapabilitiesKHR = PFN_vkGetDisplayPlaneCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneCapabilitiesKHR" ) );
+      vkGetDisplayPlaneSupportedDisplaysKHR = PFN_vkGetDisplayPlaneSupportedDisplaysKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneSupportedDisplaysKHR" ) );
+      vkGetPhysicalDeviceCalibrateableTimeDomainsEXT = PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCalibrateableTimeDomainsEXT" ) );
+      vkGetPhysicalDeviceCooperativeMatrixPropertiesNV = PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCooperativeMatrixPropertiesNV" ) );
+      vkGetPhysicalDeviceDisplayPlaneProperties2KHR = PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPlaneProperties2KHR" ) );
+      vkGetPhysicalDeviceDisplayPlanePropertiesKHR = PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPlanePropertiesKHR" ) );
+      vkGetPhysicalDeviceDisplayProperties2KHR = PFN_vkGetPhysicalDeviceDisplayProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayProperties2KHR" ) );
+      vkGetPhysicalDeviceDisplayPropertiesKHR = PFN_vkGetPhysicalDeviceDisplayPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPropertiesKHR" ) );
+      vkGetPhysicalDeviceExternalBufferProperties = PFN_vkGetPhysicalDeviceExternalBufferProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalBufferProperties" ) );
+      vkGetPhysicalDeviceExternalBufferPropertiesKHR = PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalBufferPropertiesKHR" ) );
+      vkGetPhysicalDeviceExternalFenceProperties = PFN_vkGetPhysicalDeviceExternalFenceProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalFenceProperties" ) );
+      vkGetPhysicalDeviceExternalFencePropertiesKHR = PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalFencePropertiesKHR" ) );
+      vkGetPhysicalDeviceExternalImageFormatPropertiesNV = PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalImageFormatPropertiesNV" ) );
+      vkGetPhysicalDeviceExternalSemaphoreProperties = PFN_vkGetPhysicalDeviceExternalSemaphoreProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalSemaphoreProperties" ) );
+      vkGetPhysicalDeviceExternalSemaphorePropertiesKHR = PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalSemaphorePropertiesKHR" ) );
+      vkGetPhysicalDeviceFeatures = PFN_vkGetPhysicalDeviceFeatures( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures" ) );
+      vkGetPhysicalDeviceFeatures2 = PFN_vkGetPhysicalDeviceFeatures2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures2" ) );
+      vkGetPhysicalDeviceFeatures2KHR = PFN_vkGetPhysicalDeviceFeatures2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures2KHR" ) );
+      vkGetPhysicalDeviceFormatProperties = PFN_vkGetPhysicalDeviceFormatProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties" ) );
+      vkGetPhysicalDeviceFormatProperties2 = PFN_vkGetPhysicalDeviceFormatProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties2" ) );
+      vkGetPhysicalDeviceFormatProperties2KHR = PFN_vkGetPhysicalDeviceFormatProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties2KHR" ) );
+      vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX = PFN_vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX" ) );
+      vkGetPhysicalDeviceImageFormatProperties = PFN_vkGetPhysicalDeviceImageFormatProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties" ) );
+      vkGetPhysicalDeviceImageFormatProperties2 = PFN_vkGetPhysicalDeviceImageFormatProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties2" ) );
+      vkGetPhysicalDeviceImageFormatProperties2KHR = PFN_vkGetPhysicalDeviceImageFormatProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties2KHR" ) );
+      vkGetPhysicalDeviceMemoryProperties = PFN_vkGetPhysicalDeviceMemoryProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties" ) );
+      vkGetPhysicalDeviceMemoryProperties2 = PFN_vkGetPhysicalDeviceMemoryProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties2" ) );
+      vkGetPhysicalDeviceMemoryProperties2KHR = PFN_vkGetPhysicalDeviceMemoryProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties2KHR" ) );
+      vkGetPhysicalDeviceMultisamplePropertiesEXT = PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMultisamplePropertiesEXT" ) );
+      vkGetPhysicalDevicePresentRectanglesKHR = PFN_vkGetPhysicalDevicePresentRectanglesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDevicePresentRectanglesKHR" ) );
+      vkGetPhysicalDeviceProperties = PFN_vkGetPhysicalDeviceProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties" ) );
+      vkGetPhysicalDeviceProperties2 = PFN_vkGetPhysicalDeviceProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties2" ) );
+      vkGetPhysicalDeviceProperties2KHR = PFN_vkGetPhysicalDeviceProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties2KHR" ) );
+      vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR = PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR" ) );
+      vkGetPhysicalDeviceQueueFamilyProperties = PFN_vkGetPhysicalDeviceQueueFamilyProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties" ) );
+      vkGetPhysicalDeviceQueueFamilyProperties2 = PFN_vkGetPhysicalDeviceQueueFamilyProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties2" ) );
+      vkGetPhysicalDeviceQueueFamilyProperties2KHR = PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties2KHR" ) );
+      vkGetPhysicalDeviceSparseImageFormatProperties = PFN_vkGetPhysicalDeviceSparseImageFormatProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties" ) );
+      vkGetPhysicalDeviceSparseImageFormatProperties2 = PFN_vkGetPhysicalDeviceSparseImageFormatProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties2" ) );
+      vkGetPhysicalDeviceSparseImageFormatProperties2KHR = PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties2KHR" ) );
+      vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV = PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV" ) );
+      vkGetPhysicalDeviceSurfaceCapabilities2EXT = PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilities2EXT" ) );
+      vkGetPhysicalDeviceSurfaceCapabilities2KHR = PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilities2KHR" ) );
+      vkGetPhysicalDeviceSurfaceCapabilitiesKHR = PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilitiesKHR" ) );
+      vkGetPhysicalDeviceSurfaceFormats2KHR = PFN_vkGetPhysicalDeviceSurfaceFormats2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceFormats2KHR" ) );
+      vkGetPhysicalDeviceSurfaceFormatsKHR = PFN_vkGetPhysicalDeviceSurfaceFormatsKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceFormatsKHR" ) );
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+      vkGetPhysicalDeviceSurfacePresentModes2EXT = PFN_vkGetPhysicalDeviceSurfacePresentModes2EXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfacePresentModes2EXT" ) );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+      vkGetPhysicalDeviceSurfacePresentModesKHR = PFN_vkGetPhysicalDeviceSurfacePresentModesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfacePresentModesKHR" ) );
+      vkGetPhysicalDeviceSurfaceSupportKHR = PFN_vkGetPhysicalDeviceSurfaceSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceSupportKHR" ) );
+      vkGetPhysicalDeviceToolPropertiesEXT = PFN_vkGetPhysicalDeviceToolPropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceToolPropertiesEXT" ) );
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+      vkGetPhysicalDeviceWaylandPresentationSupportKHR = PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceWaylandPresentationSupportKHR" ) );
+#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+      vkGetPhysicalDeviceWin32PresentationSupportKHR = PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceWin32PresentationSupportKHR" ) );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+#ifdef VK_USE_PLATFORM_XCB_KHR
+      vkGetPhysicalDeviceXcbPresentationSupportKHR = PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceXcbPresentationSupportKHR" ) );
+#endif /*VK_USE_PLATFORM_XCB_KHR*/
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+      vkGetPhysicalDeviceXlibPresentationSupportKHR = PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceXlibPresentationSupportKHR" ) );
+#endif /*VK_USE_PLATFORM_XLIB_KHR*/
+#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
+      vkGetRandROutputDisplayEXT = PFN_vkGetRandROutputDisplayEXT( vkGetInstanceProcAddr( instance, "vkGetRandROutputDisplayEXT" ) );
+#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/
+      vkReleaseDisplayEXT = PFN_vkReleaseDisplayEXT( vkGetInstanceProcAddr( instance, "vkReleaseDisplayEXT" ) );
+      vkBeginCommandBuffer = PFN_vkBeginCommandBuffer( vkGetInstanceProcAddr( instance, "vkBeginCommandBuffer" ) );
+      vkCmdBeginConditionalRenderingEXT = PFN_vkCmdBeginConditionalRenderingEXT( vkGetInstanceProcAddr( instance, "vkCmdBeginConditionalRenderingEXT" ) );
+      vkCmdBeginDebugUtilsLabelEXT = PFN_vkCmdBeginDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkCmdBeginDebugUtilsLabelEXT" ) );
+      vkCmdBeginQuery = PFN_vkCmdBeginQuery( vkGetInstanceProcAddr( instance, "vkCmdBeginQuery" ) );
+      vkCmdBeginQueryIndexedEXT = PFN_vkCmdBeginQueryIndexedEXT( vkGetInstanceProcAddr( instance, "vkCmdBeginQueryIndexedEXT" ) );
+      vkCmdBeginRenderPass = PFN_vkCmdBeginRenderPass( vkGetInstanceProcAddr( instance, "vkCmdBeginRenderPass" ) );
+      vkCmdBeginRenderPass2KHR = PFN_vkCmdBeginRenderPass2KHR( vkGetInstanceProcAddr( instance, "vkCmdBeginRenderPass2KHR" ) );
+      vkCmdBeginTransformFeedbackEXT = PFN_vkCmdBeginTransformFeedbackEXT( vkGetInstanceProcAddr( instance, "vkCmdBeginTransformFeedbackEXT" ) );
+      vkCmdBindDescriptorSets = PFN_vkCmdBindDescriptorSets( vkGetInstanceProcAddr( instance, "vkCmdBindDescriptorSets" ) );
+      vkCmdBindIndexBuffer = PFN_vkCmdBindIndexBuffer( vkGetInstanceProcAddr( instance, "vkCmdBindIndexBuffer" ) );
+      vkCmdBindPipeline = PFN_vkCmdBindPipeline( vkGetInstanceProcAddr( instance, "vkCmdBindPipeline" ) );
+      vkCmdBindShadingRateImageNV = PFN_vkCmdBindShadingRateImageNV( vkGetInstanceProcAddr( instance, "vkCmdBindShadingRateImageNV" ) );
+      vkCmdBindTransformFeedbackBuffersEXT = PFN_vkCmdBindTransformFeedbackBuffersEXT( vkGetInstanceProcAddr( instance, "vkCmdBindTransformFeedbackBuffersEXT" ) );
+      vkCmdBindVertexBuffers = PFN_vkCmdBindVertexBuffers( vkGetInstanceProcAddr( instance, "vkCmdBindVertexBuffers" ) );
+      vkCmdBlitImage = PFN_vkCmdBlitImage( vkGetInstanceProcAddr( instance, "vkCmdBlitImage" ) );
+      vkCmdBuildAccelerationStructureNV = PFN_vkCmdBuildAccelerationStructureNV( vkGetInstanceProcAddr( instance, "vkCmdBuildAccelerationStructureNV" ) );
+      vkCmdClearAttachments = PFN_vkCmdClearAttachments( vkGetInstanceProcAddr( instance, "vkCmdClearAttachments" ) );
+      vkCmdClearColorImage = PFN_vkCmdClearColorImage( vkGetInstanceProcAddr( instance, "vkCmdClearColorImage" ) );
+      vkCmdClearDepthStencilImage = PFN_vkCmdClearDepthStencilImage( vkGetInstanceProcAddr( instance, "vkCmdClearDepthStencilImage" ) );
+      vkCmdCopyAccelerationStructureNV = PFN_vkCmdCopyAccelerationStructureNV( vkGetInstanceProcAddr( instance, "vkCmdCopyAccelerationStructureNV" ) );
+      vkCmdCopyBuffer = PFN_vkCmdCopyBuffer( vkGetInstanceProcAddr( instance, "vkCmdCopyBuffer" ) );
+      vkCmdCopyBufferToImage = PFN_vkCmdCopyBufferToImage( vkGetInstanceProcAddr( instance, "vkCmdCopyBufferToImage" ) );
+      vkCmdCopyImage = PFN_vkCmdCopyImage( vkGetInstanceProcAddr( instance, "vkCmdCopyImage" ) );
+      vkCmdCopyImageToBuffer = PFN_vkCmdCopyImageToBuffer( vkGetInstanceProcAddr( instance, "vkCmdCopyImageToBuffer" ) );
+      vkCmdCopyQueryPoolResults = PFN_vkCmdCopyQueryPoolResults( vkGetInstanceProcAddr( instance, "vkCmdCopyQueryPoolResults" ) );
+      vkCmdDebugMarkerBeginEXT = PFN_vkCmdDebugMarkerBeginEXT( vkGetInstanceProcAddr( instance, "vkCmdDebugMarkerBeginEXT" ) );
+      vkCmdDebugMarkerEndEXT = PFN_vkCmdDebugMarkerEndEXT( vkGetInstanceProcAddr( instance, "vkCmdDebugMarkerEndEXT" ) );
+      vkCmdDebugMarkerInsertEXT = PFN_vkCmdDebugMarkerInsertEXT( vkGetInstanceProcAddr( instance, "vkCmdDebugMarkerInsertEXT" ) );
+      vkCmdDispatch = PFN_vkCmdDispatch( vkGetInstanceProcAddr( instance, "vkCmdDispatch" ) );
+      vkCmdDispatchBase = PFN_vkCmdDispatchBase( vkGetInstanceProcAddr( instance, "vkCmdDispatchBase" ) );
+      vkCmdDispatchBaseKHR = PFN_vkCmdDispatchBaseKHR( vkGetInstanceProcAddr( instance, "vkCmdDispatchBaseKHR" ) );
+      vkCmdDispatchIndirect = PFN_vkCmdDispatchIndirect( vkGetInstanceProcAddr( instance, "vkCmdDispatchIndirect" ) );
+      vkCmdDraw = PFN_vkCmdDraw( vkGetInstanceProcAddr( instance, "vkCmdDraw" ) );
+      vkCmdDrawIndexed = PFN_vkCmdDrawIndexed( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexed" ) );
+      vkCmdDrawIndexedIndirect = PFN_vkCmdDrawIndexedIndirect( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirect" ) );
+      vkCmdDrawIndexedIndirectCountAMD = PFN_vkCmdDrawIndexedIndirectCountAMD( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirectCountAMD" ) );
+      vkCmdDrawIndexedIndirectCountKHR = PFN_vkCmdDrawIndexedIndirectCountKHR( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirectCountKHR" ) );
+      vkCmdDrawIndirect = PFN_vkCmdDrawIndirect( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirect" ) );
+      vkCmdDrawIndirectByteCountEXT = PFN_vkCmdDrawIndirectByteCountEXT( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectByteCountEXT" ) );
+      vkCmdDrawIndirectCountAMD = PFN_vkCmdDrawIndirectCountAMD( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectCountAMD" ) );
+      vkCmdDrawIndirectCountKHR = PFN_vkCmdDrawIndirectCountKHR( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectCountKHR" ) );
+      vkCmdDrawMeshTasksIndirectCountNV = PFN_vkCmdDrawMeshTasksIndirectCountNV( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksIndirectCountNV" ) );
+      vkCmdDrawMeshTasksIndirectNV = PFN_vkCmdDrawMeshTasksIndirectNV( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksIndirectNV" ) );
+      vkCmdDrawMeshTasksNV = PFN_vkCmdDrawMeshTasksNV( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksNV" ) );
+      vkCmdEndConditionalRenderingEXT = PFN_vkCmdEndConditionalRenderingEXT( vkGetInstanceProcAddr( instance, "vkCmdEndConditionalRenderingEXT" ) );
+      vkCmdEndDebugUtilsLabelEXT = PFN_vkCmdEndDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkCmdEndDebugUtilsLabelEXT" ) );
+      vkCmdEndQuery = PFN_vkCmdEndQuery( vkGetInstanceProcAddr( instance, "vkCmdEndQuery" ) );
+      vkCmdEndQueryIndexedEXT = PFN_vkCmdEndQueryIndexedEXT( vkGetInstanceProcAddr( instance, "vkCmdEndQueryIndexedEXT" ) );
+      vkCmdEndRenderPass = PFN_vkCmdEndRenderPass( vkGetInstanceProcAddr( instance, "vkCmdEndRenderPass" ) );
+      vkCmdEndRenderPass2KHR = PFN_vkCmdEndRenderPass2KHR( vkGetInstanceProcAddr( instance, "vkCmdEndRenderPass2KHR" ) );
+      vkCmdEndTransformFeedbackEXT = PFN_vkCmdEndTransformFeedbackEXT( vkGetInstanceProcAddr( instance, "vkCmdEndTransformFeedbackEXT" ) );
+      vkCmdExecuteCommands = PFN_vkCmdExecuteCommands( vkGetInstanceProcAddr( instance, "vkCmdExecuteCommands" ) );
+      vkCmdFillBuffer = PFN_vkCmdFillBuffer( vkGetInstanceProcAddr( instance, "vkCmdFillBuffer" ) );
+      vkCmdInsertDebugUtilsLabelEXT = PFN_vkCmdInsertDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkCmdInsertDebugUtilsLabelEXT" ) );
+      vkCmdNextSubpass = PFN_vkCmdNextSubpass( vkGetInstanceProcAddr( instance, "vkCmdNextSubpass" ) );
+      vkCmdNextSubpass2KHR = PFN_vkCmdNextSubpass2KHR( vkGetInstanceProcAddr( instance, "vkCmdNextSubpass2KHR" ) );
+      vkCmdPipelineBarrier = PFN_vkCmdPipelineBarrier( vkGetInstanceProcAddr( instance, "vkCmdPipelineBarrier" ) );
+      vkCmdProcessCommandsNVX = PFN_vkCmdProcessCommandsNVX( vkGetInstanceProcAddr( instance, "vkCmdProcessCommandsNVX" ) );
+      vkCmdPushConstants = PFN_vkCmdPushConstants( vkGetInstanceProcAddr( instance, "vkCmdPushConstants" ) );
+      vkCmdPushDescriptorSetKHR = PFN_vkCmdPushDescriptorSetKHR( vkGetInstanceProcAddr( instance, "vkCmdPushDescriptorSetKHR" ) );
+      vkCmdPushDescriptorSetWithTemplateKHR = PFN_vkCmdPushDescriptorSetWithTemplateKHR( vkGetInstanceProcAddr( instance, "vkCmdPushDescriptorSetWithTemplateKHR" ) );
+      vkCmdReserveSpaceForCommandsNVX = PFN_vkCmdReserveSpaceForCommandsNVX( vkGetInstanceProcAddr( instance, "vkCmdReserveSpaceForCommandsNVX" ) );
+      vkCmdResetEvent = PFN_vkCmdResetEvent( vkGetInstanceProcAddr( instance, "vkCmdResetEvent" ) );
+      vkCmdResetQueryPool = PFN_vkCmdResetQueryPool( vkGetInstanceProcAddr( instance, "vkCmdResetQueryPool" ) );
+      vkCmdResolveImage = PFN_vkCmdResolveImage( vkGetInstanceProcAddr( instance, "vkCmdResolveImage" ) );
+      vkCmdSetBlendConstants = PFN_vkCmdSetBlendConstants( vkGetInstanceProcAddr( instance, "vkCmdSetBlendConstants" ) );
+      vkCmdSetCheckpointNV = PFN_vkCmdSetCheckpointNV( vkGetInstanceProcAddr( instance, "vkCmdSetCheckpointNV" ) );
+      vkCmdSetCoarseSampleOrderNV = PFN_vkCmdSetCoarseSampleOrderNV( vkGetInstanceProcAddr( instance, "vkCmdSetCoarseSampleOrderNV" ) );
+      vkCmdSetDepthBias = PFN_vkCmdSetDepthBias( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBias" ) );
+      vkCmdSetDepthBounds = PFN_vkCmdSetDepthBounds( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBounds" ) );
+      vkCmdSetDeviceMask = PFN_vkCmdSetDeviceMask( vkGetInstanceProcAddr( instance, "vkCmdSetDeviceMask" ) );
+      vkCmdSetDeviceMaskKHR = PFN_vkCmdSetDeviceMaskKHR( vkGetInstanceProcAddr( instance, "vkCmdSetDeviceMaskKHR" ) );
+      vkCmdSetDiscardRectangleEXT = PFN_vkCmdSetDiscardRectangleEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDiscardRectangleEXT" ) );
+      vkCmdSetEvent = PFN_vkCmdSetEvent( vkGetInstanceProcAddr( instance, "vkCmdSetEvent" ) );
+      vkCmdSetExclusiveScissorNV = PFN_vkCmdSetExclusiveScissorNV( vkGetInstanceProcAddr( instance, "vkCmdSetExclusiveScissorNV" ) );
+      vkCmdSetLineStippleEXT = PFN_vkCmdSetLineStippleEXT( vkGetInstanceProcAddr( instance, "vkCmdSetLineStippleEXT" ) );
+      vkCmdSetLineWidth = PFN_vkCmdSetLineWidth( vkGetInstanceProcAddr( instance, "vkCmdSetLineWidth" ) );
+      vkCmdSetPerformanceMarkerINTEL = PFN_vkCmdSetPerformanceMarkerINTEL( vkGetInstanceProcAddr( instance, "vkCmdSetPerformanceMarkerINTEL" ) );
+      vkCmdSetPerformanceOverrideINTEL = PFN_vkCmdSetPerformanceOverrideINTEL( vkGetInstanceProcAddr( instance, "vkCmdSetPerformanceOverrideINTEL" ) );
+      vkCmdSetPerformanceStreamMarkerINTEL = PFN_vkCmdSetPerformanceStreamMarkerINTEL( vkGetInstanceProcAddr( instance, "vkCmdSetPerformanceStreamMarkerINTEL" ) );
+      vkCmdSetSampleLocationsEXT = PFN_vkCmdSetSampleLocationsEXT( vkGetInstanceProcAddr( instance, "vkCmdSetSampleLocationsEXT" ) );
+      vkCmdSetScissor = PFN_vkCmdSetScissor( vkGetInstanceProcAddr( instance, "vkCmdSetScissor" ) );
+      vkCmdSetStencilCompareMask = PFN_vkCmdSetStencilCompareMask( vkGetInstanceProcAddr( instance, "vkCmdSetStencilCompareMask" ) );
+      vkCmdSetStencilReference = PFN_vkCmdSetStencilReference( vkGetInstanceProcAddr( instance, "vkCmdSetStencilReference" ) );
+      vkCmdSetStencilWriteMask = PFN_vkCmdSetStencilWriteMask( vkGetInstanceProcAddr( instance, "vkCmdSetStencilWriteMask" ) );
+      vkCmdSetViewport = PFN_vkCmdSetViewport( vkGetInstanceProcAddr( instance, "vkCmdSetViewport" ) );
+      vkCmdSetViewportShadingRatePaletteNV = PFN_vkCmdSetViewportShadingRatePaletteNV( vkGetInstanceProcAddr( instance, "vkCmdSetViewportShadingRatePaletteNV" ) );
+      vkCmdSetViewportWScalingNV = PFN_vkCmdSetViewportWScalingNV( vkGetInstanceProcAddr( instance, "vkCmdSetViewportWScalingNV" ) );
+      vkCmdTraceRaysNV = PFN_vkCmdTraceRaysNV( vkGetInstanceProcAddr( instance, "vkCmdTraceRaysNV" ) );
+      vkCmdUpdateBuffer = PFN_vkCmdUpdateBuffer( vkGetInstanceProcAddr( instance, "vkCmdUpdateBuffer" ) );
+      vkCmdWaitEvents = PFN_vkCmdWaitEvents( vkGetInstanceProcAddr( instance, "vkCmdWaitEvents" ) );
+      vkCmdWriteAccelerationStructuresPropertiesNV = PFN_vkCmdWriteAccelerationStructuresPropertiesNV( vkGetInstanceProcAddr( instance, "vkCmdWriteAccelerationStructuresPropertiesNV" ) );
+      vkCmdWriteBufferMarkerAMD = PFN_vkCmdWriteBufferMarkerAMD( vkGetInstanceProcAddr( instance, "vkCmdWriteBufferMarkerAMD" ) );
+      vkCmdWriteTimestamp = PFN_vkCmdWriteTimestamp( vkGetInstanceProcAddr( instance, "vkCmdWriteTimestamp" ) );
+      vkEndCommandBuffer = PFN_vkEndCommandBuffer( vkGetInstanceProcAddr( instance, "vkEndCommandBuffer" ) );
+      vkResetCommandBuffer = PFN_vkResetCommandBuffer( vkGetInstanceProcAddr( instance, "vkResetCommandBuffer" ) );
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+      vkAcquireFullScreenExclusiveModeEXT = PFN_vkAcquireFullScreenExclusiveModeEXT( vkGetInstanceProcAddr( instance, "vkAcquireFullScreenExclusiveModeEXT" ) );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+      vkAcquireNextImage2KHR = PFN_vkAcquireNextImage2KHR( vkGetInstanceProcAddr( instance, "vkAcquireNextImage2KHR" ) );
+      vkAcquireNextImageKHR = PFN_vkAcquireNextImageKHR( vkGetInstanceProcAddr( instance, "vkAcquireNextImageKHR" ) );
+      vkAcquirePerformanceConfigurationINTEL = PFN_vkAcquirePerformanceConfigurationINTEL( vkGetInstanceProcAddr( instance, "vkAcquirePerformanceConfigurationINTEL" ) );
+      vkAcquireProfilingLockKHR = PFN_vkAcquireProfilingLockKHR( vkGetInstanceProcAddr( instance, "vkAcquireProfilingLockKHR" ) );
+      vkAllocateCommandBuffers = PFN_vkAllocateCommandBuffers( vkGetInstanceProcAddr( instance, "vkAllocateCommandBuffers" ) );
+      vkAllocateDescriptorSets = PFN_vkAllocateDescriptorSets( vkGetInstanceProcAddr( instance, "vkAllocateDescriptorSets" ) );
+      vkAllocateMemory = PFN_vkAllocateMemory( vkGetInstanceProcAddr( instance, "vkAllocateMemory" ) );
+      vkBindAccelerationStructureMemoryNV = PFN_vkBindAccelerationStructureMemoryNV( vkGetInstanceProcAddr( instance, "vkBindAccelerationStructureMemoryNV" ) );
+      vkBindBufferMemory = PFN_vkBindBufferMemory( vkGetInstanceProcAddr( instance, "vkBindBufferMemory" ) );
+      vkBindBufferMemory2 = PFN_vkBindBufferMemory2( vkGetInstanceProcAddr( instance, "vkBindBufferMemory2" ) );
+      vkBindBufferMemory2KHR = PFN_vkBindBufferMemory2KHR( vkGetInstanceProcAddr( instance, "vkBindBufferMemory2KHR" ) );
+      vkBindImageMemory = PFN_vkBindImageMemory( vkGetInstanceProcAddr( instance, "vkBindImageMemory" ) );
+      vkBindImageMemory2 = PFN_vkBindImageMemory2( vkGetInstanceProcAddr( instance, "vkBindImageMemory2" ) );
+      vkBindImageMemory2KHR = PFN_vkBindImageMemory2KHR( vkGetInstanceProcAddr( instance, "vkBindImageMemory2KHR" ) );
+      vkCompileDeferredNV = PFN_vkCompileDeferredNV( vkGetInstanceProcAddr( instance, "vkCompileDeferredNV" ) );
+      vkCreateAccelerationStructureNV = PFN_vkCreateAccelerationStructureNV( vkGetInstanceProcAddr( instance, "vkCreateAccelerationStructureNV" ) );
+      vkCreateBuffer = PFN_vkCreateBuffer( vkGetInstanceProcAddr( instance, "vkCreateBuffer" ) );
+      vkCreateBufferView = PFN_vkCreateBufferView( vkGetInstanceProcAddr( instance, "vkCreateBufferView" ) );
+      vkCreateCommandPool = PFN_vkCreateCommandPool( vkGetInstanceProcAddr( instance, "vkCreateCommandPool" ) );
+      vkCreateComputePipelines = PFN_vkCreateComputePipelines( vkGetInstanceProcAddr( instance, "vkCreateComputePipelines" ) );
+      vkCreateDescriptorPool = PFN_vkCreateDescriptorPool( vkGetInstanceProcAddr( instance, "vkCreateDescriptorPool" ) );
+      vkCreateDescriptorSetLayout = PFN_vkCreateDescriptorSetLayout( vkGetInstanceProcAddr( instance, "vkCreateDescriptorSetLayout" ) );
+      vkCreateDescriptorUpdateTemplate = PFN_vkCreateDescriptorUpdateTemplate( vkGetInstanceProcAddr( instance, "vkCreateDescriptorUpdateTemplate" ) );
+      vkCreateDescriptorUpdateTemplateKHR = PFN_vkCreateDescriptorUpdateTemplateKHR( vkGetInstanceProcAddr( instance, "vkCreateDescriptorUpdateTemplateKHR" ) );
+      vkCreateEvent = PFN_vkCreateEvent( vkGetInstanceProcAddr( instance, "vkCreateEvent" ) );
+      vkCreateFence = PFN_vkCreateFence( vkGetInstanceProcAddr( instance, "vkCreateFence" ) );
+      vkCreateFramebuffer = PFN_vkCreateFramebuffer( vkGetInstanceProcAddr( instance, "vkCreateFramebuffer" ) );
+      vkCreateGraphicsPipelines = PFN_vkCreateGraphicsPipelines( vkGetInstanceProcAddr( instance, "vkCreateGraphicsPipelines" ) );
+      vkCreateImage = PFN_vkCreateImage( vkGetInstanceProcAddr( instance, "vkCreateImage" ) );
+      vkCreateImageView = PFN_vkCreateImageView( vkGetInstanceProcAddr( instance, "vkCreateImageView" ) );
+      vkCreateIndirectCommandsLayoutNVX = PFN_vkCreateIndirectCommandsLayoutNVX( vkGetInstanceProcAddr( instance, "vkCreateIndirectCommandsLayoutNVX" ) );
+      vkCreateObjectTableNVX = PFN_vkCreateObjectTableNVX( vkGetInstanceProcAddr( instance, "vkCreateObjectTableNVX" ) );
+      vkCreatePipelineCache = PFN_vkCreatePipelineCache( vkGetInstanceProcAddr( instance, "vkCreatePipelineCache" ) );
+      vkCreatePipelineLayout = PFN_vkCreatePipelineLayout( vkGetInstanceProcAddr( instance, "vkCreatePipelineLayout" ) );
+      vkCreateQueryPool = PFN_vkCreateQueryPool( vkGetInstanceProcAddr( instance, "vkCreateQueryPool" ) );
+      vkCreateRayTracingPipelinesNV = PFN_vkCreateRayTracingPipelinesNV( vkGetInstanceProcAddr( instance, "vkCreateRayTracingPipelinesNV" ) );
+      vkCreateRenderPass = PFN_vkCreateRenderPass( vkGetInstanceProcAddr( instance, "vkCreateRenderPass" ) );
+      vkCreateRenderPass2KHR = PFN_vkCreateRenderPass2KHR( vkGetInstanceProcAddr( instance, "vkCreateRenderPass2KHR" ) );
+      vkCreateSampler = PFN_vkCreateSampler( vkGetInstanceProcAddr( instance, "vkCreateSampler" ) );
+      vkCreateSamplerYcbcrConversion = PFN_vkCreateSamplerYcbcrConversion( vkGetInstanceProcAddr( instance, "vkCreateSamplerYcbcrConversion" ) );
+      vkCreateSamplerYcbcrConversionKHR = PFN_vkCreateSamplerYcbcrConversionKHR( vkGetInstanceProcAddr( instance, "vkCreateSamplerYcbcrConversionKHR" ) );
+      vkCreateSemaphore = PFN_vkCreateSemaphore( vkGetInstanceProcAddr( instance, "vkCreateSemaphore" ) );
+      vkCreateShaderModule = PFN_vkCreateShaderModule( vkGetInstanceProcAddr( instance, "vkCreateShaderModule" ) );
+      vkCreateSharedSwapchainsKHR = PFN_vkCreateSharedSwapchainsKHR( vkGetInstanceProcAddr( instance, "vkCreateSharedSwapchainsKHR" ) );
+      vkCreateSwapchainKHR = PFN_vkCreateSwapchainKHR( vkGetInstanceProcAddr( instance, "vkCreateSwapchainKHR" ) );
+      vkCreateValidationCacheEXT = PFN_vkCreateValidationCacheEXT( vkGetInstanceProcAddr( instance, "vkCreateValidationCacheEXT" ) );
+      vkDebugMarkerSetObjectNameEXT = PFN_vkDebugMarkerSetObjectNameEXT( vkGetInstanceProcAddr( instance, "vkDebugMarkerSetObjectNameEXT" ) );
+      vkDebugMarkerSetObjectTagEXT = PFN_vkDebugMarkerSetObjectTagEXT( vkGetInstanceProcAddr( instance, "vkDebugMarkerSetObjectTagEXT" ) );
+      vkDestroyAccelerationStructureNV = PFN_vkDestroyAccelerationStructureNV( vkGetInstanceProcAddr( instance, "vkDestroyAccelerationStructureNV" ) );
+      vkDestroyBuffer = PFN_vkDestroyBuffer( vkGetInstanceProcAddr( instance, "vkDestroyBuffer" ) );
+      vkDestroyBufferView = PFN_vkDestroyBufferView( vkGetInstanceProcAddr( instance, "vkDestroyBufferView" ) );
+      vkDestroyCommandPool = PFN_vkDestroyCommandPool( vkGetInstanceProcAddr( instance, "vkDestroyCommandPool" ) );
+      vkDestroyDescriptorPool = PFN_vkDestroyDescriptorPool( vkGetInstanceProcAddr( instance, "vkDestroyDescriptorPool" ) );
+      vkDestroyDescriptorSetLayout = PFN_vkDestroyDescriptorSetLayout( vkGetInstanceProcAddr( instance, "vkDestroyDescriptorSetLayout" ) );
+      vkDestroyDescriptorUpdateTemplate = PFN_vkDestroyDescriptorUpdateTemplate( vkGetInstanceProcAddr( instance, "vkDestroyDescriptorUpdateTemplate" ) );
+      vkDestroyDescriptorUpdateTemplateKHR = PFN_vkDestroyDescriptorUpdateTemplateKHR( vkGetInstanceProcAddr( instance, "vkDestroyDescriptorUpdateTemplateKHR" ) );
+      vkDestroyDevice = PFN_vkDestroyDevice( vkGetInstanceProcAddr( instance, "vkDestroyDevice" ) );
+      vkDestroyEvent = PFN_vkDestroyEvent( vkGetInstanceProcAddr( instance, "vkDestroyEvent" ) );
+      vkDestroyFence = PFN_vkDestroyFence( vkGetInstanceProcAddr( instance, "vkDestroyFence" ) );
+      vkDestroyFramebuffer = PFN_vkDestroyFramebuffer( vkGetInstanceProcAddr( instance, "vkDestroyFramebuffer" ) );
+      vkDestroyImage = PFN_vkDestroyImage( vkGetInstanceProcAddr( instance, "vkDestroyImage" ) );
+      vkDestroyImageView = PFN_vkDestroyImageView( vkGetInstanceProcAddr( instance, "vkDestroyImageView" ) );
+      vkDestroyIndirectCommandsLayoutNVX = PFN_vkDestroyIndirectCommandsLayoutNVX( vkGetInstanceProcAddr( instance, "vkDestroyIndirectCommandsLayoutNVX" ) );
+      vkDestroyObjectTableNVX = PFN_vkDestroyObjectTableNVX( vkGetInstanceProcAddr( instance, "vkDestroyObjectTableNVX" ) );
+      vkDestroyPipeline = PFN_vkDestroyPipeline( vkGetInstanceProcAddr( instance, "vkDestroyPipeline" ) );
+      vkDestroyPipelineCache = PFN_vkDestroyPipelineCache( vkGetInstanceProcAddr( instance, "vkDestroyPipelineCache" ) );
+      vkDestroyPipelineLayout = PFN_vkDestroyPipelineLayout( vkGetInstanceProcAddr( instance, "vkDestroyPipelineLayout" ) );
+      vkDestroyQueryPool = PFN_vkDestroyQueryPool( vkGetInstanceProcAddr( instance, "vkDestroyQueryPool" ) );
+      vkDestroyRenderPass = PFN_vkDestroyRenderPass( vkGetInstanceProcAddr( instance, "vkDestroyRenderPass" ) );
+      vkDestroySampler = PFN_vkDestroySampler( vkGetInstanceProcAddr( instance, "vkDestroySampler" ) );
+      vkDestroySamplerYcbcrConversion = PFN_vkDestroySamplerYcbcrConversion( vkGetInstanceProcAddr( instance, "vkDestroySamplerYcbcrConversion" ) );
+      vkDestroySamplerYcbcrConversionKHR = PFN_vkDestroySamplerYcbcrConversionKHR( vkGetInstanceProcAddr( instance, "vkDestroySamplerYcbcrConversionKHR" ) );
+      vkDestroySemaphore = PFN_vkDestroySemaphore( vkGetInstanceProcAddr( instance, "vkDestroySemaphore" ) );
+      vkDestroyShaderModule = PFN_vkDestroyShaderModule( vkGetInstanceProcAddr( instance, "vkDestroyShaderModule" ) );
+      vkDestroySwapchainKHR = PFN_vkDestroySwapchainKHR( vkGetInstanceProcAddr( instance, "vkDestroySwapchainKHR" ) );
+      vkDestroyValidationCacheEXT = PFN_vkDestroyValidationCacheEXT( vkGetInstanceProcAddr( instance, "vkDestroyValidationCacheEXT" ) );
+      vkDeviceWaitIdle = PFN_vkDeviceWaitIdle( vkGetInstanceProcAddr( instance, "vkDeviceWaitIdle" ) );
+      vkDisplayPowerControlEXT = PFN_vkDisplayPowerControlEXT( vkGetInstanceProcAddr( instance, "vkDisplayPowerControlEXT" ) );
+      vkFlushMappedMemoryRanges = PFN_vkFlushMappedMemoryRanges( vkGetInstanceProcAddr( instance, "vkFlushMappedMemoryRanges" ) );
+      vkFreeCommandBuffers = PFN_vkFreeCommandBuffers( vkGetInstanceProcAddr( instance, "vkFreeCommandBuffers" ) );
+      vkFreeDescriptorSets = PFN_vkFreeDescriptorSets( vkGetInstanceProcAddr( instance, "vkFreeDescriptorSets" ) );
+      vkFreeMemory = PFN_vkFreeMemory( vkGetInstanceProcAddr( instance, "vkFreeMemory" ) );
+      vkGetAccelerationStructureHandleNV = PFN_vkGetAccelerationStructureHandleNV( vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureHandleNV" ) );
+      vkGetAccelerationStructureMemoryRequirementsNV = PFN_vkGetAccelerationStructureMemoryRequirementsNV( vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureMemoryRequirementsNV" ) );
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+      vkGetAndroidHardwareBufferPropertiesANDROID = PFN_vkGetAndroidHardwareBufferPropertiesANDROID( vkGetInstanceProcAddr( instance, "vkGetAndroidHardwareBufferPropertiesANDROID" ) );
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+      vkGetBufferDeviceAddressEXT = PFN_vkGetBufferDeviceAddressEXT( vkGetInstanceProcAddr( instance, "vkGetBufferDeviceAddressEXT" ) );
+      vkGetBufferDeviceAddressKHR = PFN_vkGetBufferDeviceAddressKHR( vkGetInstanceProcAddr( instance, "vkGetBufferDeviceAddressKHR" ) );
+      vkGetBufferMemoryRequirements = PFN_vkGetBufferMemoryRequirements( vkGetInstanceProcAddr( instance, "vkGetBufferMemoryRequirements" ) );
+      vkGetBufferMemoryRequirements2 = PFN_vkGetBufferMemoryRequirements2( vkGetInstanceProcAddr( instance, "vkGetBufferMemoryRequirements2" ) );
+      vkGetBufferMemoryRequirements2KHR = PFN_vkGetBufferMemoryRequirements2KHR( vkGetInstanceProcAddr( instance, "vkGetBufferMemoryRequirements2KHR" ) );
+      vkGetBufferOpaqueCaptureAddressKHR = PFN_vkGetBufferOpaqueCaptureAddressKHR( vkGetInstanceProcAddr( instance, "vkGetBufferOpaqueCaptureAddressKHR" ) );
+      vkGetCalibratedTimestampsEXT = PFN_vkGetCalibratedTimestampsEXT( vkGetInstanceProcAddr( instance, "vkGetCalibratedTimestampsEXT" ) );
+      vkGetDescriptorSetLayoutSupport = PFN_vkGetDescriptorSetLayoutSupport( vkGetInstanceProcAddr( instance, "vkGetDescriptorSetLayoutSupport" ) );
+      vkGetDescriptorSetLayoutSupportKHR = PFN_vkGetDescriptorSetLayoutSupportKHR( vkGetInstanceProcAddr( instance, "vkGetDescriptorSetLayoutSupportKHR" ) );
+      vkGetDeviceGroupPeerMemoryFeatures = PFN_vkGetDeviceGroupPeerMemoryFeatures( vkGetInstanceProcAddr( instance, "vkGetDeviceGroupPeerMemoryFeatures" ) );
+      vkGetDeviceGroupPeerMemoryFeaturesKHR = PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceGroupPeerMemoryFeaturesKHR" ) );
+      vkGetDeviceGroupPresentCapabilitiesKHR = PFN_vkGetDeviceGroupPresentCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceGroupPresentCapabilitiesKHR" ) );
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+      vkGetDeviceGroupSurfacePresentModes2EXT = PFN_vkGetDeviceGroupSurfacePresentModes2EXT( vkGetInstanceProcAddr( instance, "vkGetDeviceGroupSurfacePresentModes2EXT" ) );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+      vkGetDeviceGroupSurfacePresentModesKHR = PFN_vkGetDeviceGroupSurfacePresentModesKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceGroupSurfacePresentModesKHR" ) );
+      vkGetDeviceMemoryCommitment = PFN_vkGetDeviceMemoryCommitment( vkGetInstanceProcAddr( instance, "vkGetDeviceMemoryCommitment" ) );
+      vkGetDeviceMemoryOpaqueCaptureAddressKHR = PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceMemoryOpaqueCaptureAddressKHR" ) );
+      vkGetDeviceProcAddr = PFN_vkGetDeviceProcAddr( vkGetInstanceProcAddr( instance, "vkGetDeviceProcAddr" ) );
+      vkGetDeviceQueue = PFN_vkGetDeviceQueue( vkGetInstanceProcAddr( instance, "vkGetDeviceQueue" ) );
+      vkGetDeviceQueue2 = PFN_vkGetDeviceQueue2( vkGetInstanceProcAddr( instance, "vkGetDeviceQueue2" ) );
+      vkGetEventStatus = PFN_vkGetEventStatus( vkGetInstanceProcAddr( instance, "vkGetEventStatus" ) );
+      vkGetFenceFdKHR = PFN_vkGetFenceFdKHR( vkGetInstanceProcAddr( instance, "vkGetFenceFdKHR" ) );
+      vkGetFenceStatus = PFN_vkGetFenceStatus( vkGetInstanceProcAddr( instance, "vkGetFenceStatus" ) );
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+      vkGetFenceWin32HandleKHR = PFN_vkGetFenceWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkGetFenceWin32HandleKHR" ) );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+      vkGetImageDrmFormatModifierPropertiesEXT = PFN_vkGetImageDrmFormatModifierPropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetImageDrmFormatModifierPropertiesEXT" ) );
+      vkGetImageMemoryRequirements = PFN_vkGetImageMemoryRequirements( vkGetInstanceProcAddr( instance, "vkGetImageMemoryRequirements" ) );
+      vkGetImageMemoryRequirements2 = PFN_vkGetImageMemoryRequirements2( vkGetInstanceProcAddr( instance, "vkGetImageMemoryRequirements2" ) );
+      vkGetImageMemoryRequirements2KHR = PFN_vkGetImageMemoryRequirements2KHR( vkGetInstanceProcAddr( instance, "vkGetImageMemoryRequirements2KHR" ) );
+      vkGetImageSparseMemoryRequirements = PFN_vkGetImageSparseMemoryRequirements( vkGetInstanceProcAddr( instance, "vkGetImageSparseMemoryRequirements" ) );
+      vkGetImageSparseMemoryRequirements2 = PFN_vkGetImageSparseMemoryRequirements2( vkGetInstanceProcAddr( instance, "vkGetImageSparseMemoryRequirements2" ) );
+      vkGetImageSparseMemoryRequirements2KHR = PFN_vkGetImageSparseMemoryRequirements2KHR( vkGetInstanceProcAddr( instance, "vkGetImageSparseMemoryRequirements2KHR" ) );
+      vkGetImageSubresourceLayout = PFN_vkGetImageSubresourceLayout( vkGetInstanceProcAddr( instance, "vkGetImageSubresourceLayout" ) );
+      vkGetImageViewHandleNVX = PFN_vkGetImageViewHandleNVX( vkGetInstanceProcAddr( instance, "vkGetImageViewHandleNVX" ) );
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+      vkGetMemoryAndroidHardwareBufferANDROID = PFN_vkGetMemoryAndroidHardwareBufferANDROID( vkGetInstanceProcAddr( instance, "vkGetMemoryAndroidHardwareBufferANDROID" ) );
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+      vkGetMemoryFdKHR = PFN_vkGetMemoryFdKHR( vkGetInstanceProcAddr( instance, "vkGetMemoryFdKHR" ) );
+      vkGetMemoryFdPropertiesKHR = PFN_vkGetMemoryFdPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetMemoryFdPropertiesKHR" ) );
+      vkGetMemoryHostPointerPropertiesEXT = PFN_vkGetMemoryHostPointerPropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetMemoryHostPointerPropertiesEXT" ) );
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+      vkGetMemoryWin32HandleKHR = PFN_vkGetMemoryWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkGetMemoryWin32HandleKHR" ) );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+      vkGetMemoryWin32HandleNV = PFN_vkGetMemoryWin32HandleNV( vkGetInstanceProcAddr( instance, "vkGetMemoryWin32HandleNV" ) );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+      vkGetMemoryWin32HandlePropertiesKHR = PFN_vkGetMemoryWin32HandlePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetMemoryWin32HandlePropertiesKHR" ) );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+      vkGetPastPresentationTimingGOOGLE = PFN_vkGetPastPresentationTimingGOOGLE( vkGetInstanceProcAddr( instance, "vkGetPastPresentationTimingGOOGLE" ) );
+      vkGetPerformanceParameterINTEL = PFN_vkGetPerformanceParameterINTEL( vkGetInstanceProcAddr( instance, "vkGetPerformanceParameterINTEL" ) );
+      vkGetPipelineCacheData = PFN_vkGetPipelineCacheData( vkGetInstanceProcAddr( instance, "vkGetPipelineCacheData" ) );
+      vkGetPipelineExecutableInternalRepresentationsKHR = PFN_vkGetPipelineExecutableInternalRepresentationsKHR( vkGetInstanceProcAddr( instance, "vkGetPipelineExecutableInternalRepresentationsKHR" ) );
+      vkGetPipelineExecutablePropertiesKHR = PFN_vkGetPipelineExecutablePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPipelineExecutablePropertiesKHR" ) );
+      vkGetPipelineExecutableStatisticsKHR = PFN_vkGetPipelineExecutableStatisticsKHR( vkGetInstanceProcAddr( instance, "vkGetPipelineExecutableStatisticsKHR" ) );
+      vkGetQueryPoolResults = PFN_vkGetQueryPoolResults( vkGetInstanceProcAddr( instance, "vkGetQueryPoolResults" ) );
+      vkGetRayTracingShaderGroupHandlesNV = PFN_vkGetRayTracingShaderGroupHandlesNV( vkGetInstanceProcAddr( instance, "vkGetRayTracingShaderGroupHandlesNV" ) );
+      vkGetRefreshCycleDurationGOOGLE = PFN_vkGetRefreshCycleDurationGOOGLE( vkGetInstanceProcAddr( instance, "vkGetRefreshCycleDurationGOOGLE" ) );
+      vkGetRenderAreaGranularity = PFN_vkGetRenderAreaGranularity( vkGetInstanceProcAddr( instance, "vkGetRenderAreaGranularity" ) );
+      vkGetSemaphoreCounterValueKHR = PFN_vkGetSemaphoreCounterValueKHR( vkGetInstanceProcAddr( instance, "vkGetSemaphoreCounterValueKHR" ) );
+      vkGetSemaphoreFdKHR = PFN_vkGetSemaphoreFdKHR( vkGetInstanceProcAddr( instance, "vkGetSemaphoreFdKHR" ) );
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+      vkGetSemaphoreWin32HandleKHR = PFN_vkGetSemaphoreWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkGetSemaphoreWin32HandleKHR" ) );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+      vkGetShaderInfoAMD = PFN_vkGetShaderInfoAMD( vkGetInstanceProcAddr( instance, "vkGetShaderInfoAMD" ) );
+      vkGetSwapchainCounterEXT = PFN_vkGetSwapchainCounterEXT( vkGetInstanceProcAddr( instance, "vkGetSwapchainCounterEXT" ) );
+      vkGetSwapchainImagesKHR = PFN_vkGetSwapchainImagesKHR( vkGetInstanceProcAddr( instance, "vkGetSwapchainImagesKHR" ) );
+      vkGetSwapchainStatusKHR = PFN_vkGetSwapchainStatusKHR( vkGetInstanceProcAddr( instance, "vkGetSwapchainStatusKHR" ) );
+      vkGetValidationCacheDataEXT = PFN_vkGetValidationCacheDataEXT( vkGetInstanceProcAddr( instance, "vkGetValidationCacheDataEXT" ) );
+      vkImportFenceFdKHR = PFN_vkImportFenceFdKHR( vkGetInstanceProcAddr( instance, "vkImportFenceFdKHR" ) );
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+      vkImportFenceWin32HandleKHR = PFN_vkImportFenceWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkImportFenceWin32HandleKHR" ) );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+      vkImportSemaphoreFdKHR = PFN_vkImportSemaphoreFdKHR( vkGetInstanceProcAddr( instance, "vkImportSemaphoreFdKHR" ) );
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+      vkImportSemaphoreWin32HandleKHR = PFN_vkImportSemaphoreWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkImportSemaphoreWin32HandleKHR" ) );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+      vkInitializePerformanceApiINTEL = PFN_vkInitializePerformanceApiINTEL( vkGetInstanceProcAddr( instance, "vkInitializePerformanceApiINTEL" ) );
+      vkInvalidateMappedMemoryRanges = PFN_vkInvalidateMappedMemoryRanges( vkGetInstanceProcAddr( instance, "vkInvalidateMappedMemoryRanges" ) );
+      vkMapMemory = PFN_vkMapMemory( vkGetInstanceProcAddr( instance, "vkMapMemory" ) );
+      vkMergePipelineCaches = PFN_vkMergePipelineCaches( vkGetInstanceProcAddr( instance, "vkMergePipelineCaches" ) );
+      vkMergeValidationCachesEXT = PFN_vkMergeValidationCachesEXT( vkGetInstanceProcAddr( instance, "vkMergeValidationCachesEXT" ) );
+      vkRegisterDeviceEventEXT = PFN_vkRegisterDeviceEventEXT( vkGetInstanceProcAddr( instance, "vkRegisterDeviceEventEXT" ) );
+      vkRegisterDisplayEventEXT = PFN_vkRegisterDisplayEventEXT( vkGetInstanceProcAddr( instance, "vkRegisterDisplayEventEXT" ) );
+      vkRegisterObjectsNVX = PFN_vkRegisterObjectsNVX( vkGetInstanceProcAddr( instance, "vkRegisterObjectsNVX" ) );
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+      vkReleaseFullScreenExclusiveModeEXT = PFN_vkReleaseFullScreenExclusiveModeEXT( vkGetInstanceProcAddr( instance, "vkReleaseFullScreenExclusiveModeEXT" ) );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+      vkReleasePerformanceConfigurationINTEL = PFN_vkReleasePerformanceConfigurationINTEL( vkGetInstanceProcAddr( instance, "vkReleasePerformanceConfigurationINTEL" ) );
+      vkReleaseProfilingLockKHR = PFN_vkReleaseProfilingLockKHR( vkGetInstanceProcAddr( instance, "vkReleaseProfilingLockKHR" ) );
+      vkResetCommandPool = PFN_vkResetCommandPool( vkGetInstanceProcAddr( instance, "vkResetCommandPool" ) );
+      vkResetDescriptorPool = PFN_vkResetDescriptorPool( vkGetInstanceProcAddr( instance, "vkResetDescriptorPool" ) );
+      vkResetEvent = PFN_vkResetEvent( vkGetInstanceProcAddr( instance, "vkResetEvent" ) );
+      vkResetFences = PFN_vkResetFences( vkGetInstanceProcAddr( instance, "vkResetFences" ) );
+      vkResetQueryPoolEXT = PFN_vkResetQueryPoolEXT( vkGetInstanceProcAddr( instance, "vkResetQueryPoolEXT" ) );
+      vkSetDebugUtilsObjectNameEXT = PFN_vkSetDebugUtilsObjectNameEXT( vkGetInstanceProcAddr( instance, "vkSetDebugUtilsObjectNameEXT" ) );
+      vkSetDebugUtilsObjectTagEXT = PFN_vkSetDebugUtilsObjectTagEXT( vkGetInstanceProcAddr( instance, "vkSetDebugUtilsObjectTagEXT" ) );
+      vkSetEvent = PFN_vkSetEvent( vkGetInstanceProcAddr( instance, "vkSetEvent" ) );
+      vkSetHdrMetadataEXT = PFN_vkSetHdrMetadataEXT( vkGetInstanceProcAddr( instance, "vkSetHdrMetadataEXT" ) );
+      vkSetLocalDimmingAMD = PFN_vkSetLocalDimmingAMD( vkGetInstanceProcAddr( instance, "vkSetLocalDimmingAMD" ) );
+      vkSignalSemaphoreKHR = PFN_vkSignalSemaphoreKHR( vkGetInstanceProcAddr( instance, "vkSignalSemaphoreKHR" ) );
+      vkTrimCommandPool = PFN_vkTrimCommandPool( vkGetInstanceProcAddr( instance, "vkTrimCommandPool" ) );
+      vkTrimCommandPoolKHR = PFN_vkTrimCommandPoolKHR( vkGetInstanceProcAddr( instance, "vkTrimCommandPoolKHR" ) );
+      vkUninitializePerformanceApiINTEL = PFN_vkUninitializePerformanceApiINTEL( vkGetInstanceProcAddr( instance, "vkUninitializePerformanceApiINTEL" ) );
+      vkUnmapMemory = PFN_vkUnmapMemory( vkGetInstanceProcAddr( instance, "vkUnmapMemory" ) );
+      vkUnregisterObjectsNVX = PFN_vkUnregisterObjectsNVX( vkGetInstanceProcAddr( instance, "vkUnregisterObjectsNVX" ) );
+      vkUpdateDescriptorSetWithTemplate = PFN_vkUpdateDescriptorSetWithTemplate( vkGetInstanceProcAddr( instance, "vkUpdateDescriptorSetWithTemplate" ) );
+      vkUpdateDescriptorSetWithTemplateKHR = PFN_vkUpdateDescriptorSetWithTemplateKHR( vkGetInstanceProcAddr( instance, "vkUpdateDescriptorSetWithTemplateKHR" ) );
+      vkUpdateDescriptorSets = PFN_vkUpdateDescriptorSets( vkGetInstanceProcAddr( instance, "vkUpdateDescriptorSets" ) );
+      vkWaitForFences = PFN_vkWaitForFences( vkGetInstanceProcAddr( instance, "vkWaitForFences" ) );
+      vkWaitSemaphoresKHR = PFN_vkWaitSemaphoresKHR( vkGetInstanceProcAddr( instance, "vkWaitSemaphoresKHR" ) );
+      vkGetQueueCheckpointDataNV = PFN_vkGetQueueCheckpointDataNV( vkGetInstanceProcAddr( instance, "vkGetQueueCheckpointDataNV" ) );
+      vkQueueBeginDebugUtilsLabelEXT = PFN_vkQueueBeginDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkQueueBeginDebugUtilsLabelEXT" ) );
+      vkQueueBindSparse = PFN_vkQueueBindSparse( vkGetInstanceProcAddr( instance, "vkQueueBindSparse" ) );
+      vkQueueEndDebugUtilsLabelEXT = PFN_vkQueueEndDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkQueueEndDebugUtilsLabelEXT" ) );
+      vkQueueInsertDebugUtilsLabelEXT = PFN_vkQueueInsertDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkQueueInsertDebugUtilsLabelEXT" ) );
+      vkQueuePresentKHR = PFN_vkQueuePresentKHR( vkGetInstanceProcAddr( instance, "vkQueuePresentKHR" ) );
+      vkQueueSetPerformanceConfigurationINTEL = PFN_vkQueueSetPerformanceConfigurationINTEL( vkGetInstanceProcAddr( instance, "vkQueueSetPerformanceConfigurationINTEL" ) );
+      vkQueueSubmit = PFN_vkQueueSubmit( vkGetInstanceProcAddr( instance, "vkQueueSubmit" ) );
+      vkQueueWaitIdle = PFN_vkQueueWaitIdle( vkGetInstanceProcAddr( instance, "vkQueueWaitIdle" ) );
+    }
+
+    void init( VULKAN_HPP_NAMESPACE::Device deviceCpp ) VULKAN_HPP_NOEXCEPT
+    {
+      VkDevice device = static_cast<VkDevice>(deviceCpp);
+      vkBeginCommandBuffer = PFN_vkBeginCommandBuffer( vkGetDeviceProcAddr( device, "vkBeginCommandBuffer" ) );
+      vkCmdBeginConditionalRenderingEXT = PFN_vkCmdBeginConditionalRenderingEXT( vkGetDeviceProcAddr( device, "vkCmdBeginConditionalRenderingEXT" ) );
+      vkCmdBeginDebugUtilsLabelEXT = PFN_vkCmdBeginDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdBeginDebugUtilsLabelEXT" ) );
+      vkCmdBeginQuery = PFN_vkCmdBeginQuery( vkGetDeviceProcAddr( device, "vkCmdBeginQuery" ) );
+      vkCmdBeginQueryIndexedEXT = PFN_vkCmdBeginQueryIndexedEXT( vkGetDeviceProcAddr( device, "vkCmdBeginQueryIndexedEXT" ) );
+      vkCmdBeginRenderPass = PFN_vkCmdBeginRenderPass( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass" ) );
+      vkCmdBeginRenderPass2KHR = PFN_vkCmdBeginRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass2KHR" ) );
+      vkCmdBeginTransformFeedbackEXT = PFN_vkCmdBeginTransformFeedbackEXT( vkGetDeviceProcAddr( device, "vkCmdBeginTransformFeedbackEXT" ) );
+      vkCmdBindDescriptorSets = PFN_vkCmdBindDescriptorSets( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorSets" ) );
+      vkCmdBindIndexBuffer = PFN_vkCmdBindIndexBuffer( vkGetDeviceProcAddr( device, "vkCmdBindIndexBuffer" ) );
+      vkCmdBindPipeline = PFN_vkCmdBindPipeline( vkGetDeviceProcAddr( device, "vkCmdBindPipeline" ) );
+      vkCmdBindShadingRateImageNV = PFN_vkCmdBindShadingRateImageNV( vkGetDeviceProcAddr( device, "vkCmdBindShadingRateImageNV" ) );
+      vkCmdBindTransformFeedbackBuffersEXT = PFN_vkCmdBindTransformFeedbackBuffersEXT( vkGetDeviceProcAddr( device, "vkCmdBindTransformFeedbackBuffersEXT" ) );
+      vkCmdBindVertexBuffers = PFN_vkCmdBindVertexBuffers( vkGetDeviceProcAddr( device, "vkCmdBindVertexBuffers" ) );
+      vkCmdBlitImage = PFN_vkCmdBlitImage( vkGetDeviceProcAddr( device, "vkCmdBlitImage" ) );
+      vkCmdBuildAccelerationStructureNV = PFN_vkCmdBuildAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructureNV" ) );
+      vkCmdClearAttachments = PFN_vkCmdClearAttachments( vkGetDeviceProcAddr( device, "vkCmdClearAttachments" ) );
+      vkCmdClearColorImage = PFN_vkCmdClearColorImage( vkGetDeviceProcAddr( device, "vkCmdClearColorImage" ) );
+      vkCmdClearDepthStencilImage = PFN_vkCmdClearDepthStencilImage( vkGetDeviceProcAddr( device, "vkCmdClearDepthStencilImage" ) );
+      vkCmdCopyAccelerationStructureNV = PFN_vkCmdCopyAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureNV" ) );
+      vkCmdCopyBuffer = PFN_vkCmdCopyBuffer( vkGetDeviceProcAddr( device, "vkCmdCopyBuffer" ) );
+      vkCmdCopyBufferToImage = PFN_vkCmdCopyBufferToImage( vkGetDeviceProcAddr( device, "vkCmdCopyBufferToImage" ) );
+      vkCmdCopyImage = PFN_vkCmdCopyImage( vkGetDeviceProcAddr( device, "vkCmdCopyImage" ) );
+      vkCmdCopyImageToBuffer = PFN_vkCmdCopyImageToBuffer( vkGetDeviceProcAddr( device, "vkCmdCopyImageToBuffer" ) );
+      vkCmdCopyQueryPoolResults = PFN_vkCmdCopyQueryPoolResults( vkGetDeviceProcAddr( device, "vkCmdCopyQueryPoolResults" ) );
+      vkCmdDebugMarkerBeginEXT = PFN_vkCmdDebugMarkerBeginEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerBeginEXT" ) );
+      vkCmdDebugMarkerEndEXT = PFN_vkCmdDebugMarkerEndEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerEndEXT" ) );
+      vkCmdDebugMarkerInsertEXT = PFN_vkCmdDebugMarkerInsertEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerInsertEXT" ) );
+      vkCmdDispatch = PFN_vkCmdDispatch( vkGetDeviceProcAddr( device, "vkCmdDispatch" ) );
+      vkCmdDispatchBase = PFN_vkCmdDispatchBase( vkGetDeviceProcAddr( device, "vkCmdDispatchBase" ) );
+      vkCmdDispatchBaseKHR = PFN_vkCmdDispatchBaseKHR( vkGetDeviceProcAddr( device, "vkCmdDispatchBaseKHR" ) );
+      vkCmdDispatchIndirect = PFN_vkCmdDispatchIndirect( vkGetDeviceProcAddr( device, "vkCmdDispatchIndirect" ) );
+      vkCmdDraw = PFN_vkCmdDraw( vkGetDeviceProcAddr( device, "vkCmdDraw" ) );
+      vkCmdDrawIndexed = PFN_vkCmdDrawIndexed( vkGetDeviceProcAddr( device, "vkCmdDrawIndexed" ) );
+      vkCmdDrawIndexedIndirect = PFN_vkCmdDrawIndexedIndirect( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirect" ) );
+      vkCmdDrawIndexedIndirectCountAMD = PFN_vkCmdDrawIndexedIndirectCountAMD( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCountAMD" ) );
+      vkCmdDrawIndexedIndirectCountKHR = PFN_vkCmdDrawIndexedIndirectCountKHR( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCountKHR" ) );
+      vkCmdDrawIndirect = PFN_vkCmdDrawIndirect( vkGetDeviceProcAddr( device, "vkCmdDrawIndirect" ) );
+      vkCmdDrawIndirectByteCountEXT = PFN_vkCmdDrawIndirectByteCountEXT( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectByteCountEXT" ) );
+      vkCmdDrawIndirectCountAMD = PFN_vkCmdDrawIndirectCountAMD( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCountAMD" ) );
+      vkCmdDrawIndirectCountKHR = PFN_vkCmdDrawIndirectCountKHR( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCountKHR" ) );
+      vkCmdDrawMeshTasksIndirectCountNV = PFN_vkCmdDrawMeshTasksIndirectCountNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectCountNV" ) );
+      vkCmdDrawMeshTasksIndirectNV = PFN_vkCmdDrawMeshTasksIndirectNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectNV" ) );
+      vkCmdDrawMeshTasksNV = PFN_vkCmdDrawMeshTasksNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksNV" ) );
+      vkCmdEndConditionalRenderingEXT = PFN_vkCmdEndConditionalRenderingEXT( vkGetDeviceProcAddr( device, "vkCmdEndConditionalRenderingEXT" ) );
+      vkCmdEndDebugUtilsLabelEXT = PFN_vkCmdEndDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdEndDebugUtilsLabelEXT" ) );
+      vkCmdEndQuery = PFN_vkCmdEndQuery( vkGetDeviceProcAddr( device, "vkCmdEndQuery" ) );
+      vkCmdEndQueryIndexedEXT = PFN_vkCmdEndQueryIndexedEXT( vkGetDeviceProcAddr( device, "vkCmdEndQueryIndexedEXT" ) );
+      vkCmdEndRenderPass = PFN_vkCmdEndRenderPass( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass" ) );
+      vkCmdEndRenderPass2KHR = PFN_vkCmdEndRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass2KHR" ) );
+      vkCmdEndTransformFeedbackEXT = PFN_vkCmdEndTransformFeedbackEXT( vkGetDeviceProcAddr( device, "vkCmdEndTransformFeedbackEXT" ) );
+      vkCmdExecuteCommands = PFN_vkCmdExecuteCommands( vkGetDeviceProcAddr( device, "vkCmdExecuteCommands" ) );
+      vkCmdFillBuffer = PFN_vkCmdFillBuffer( vkGetDeviceProcAddr( device, "vkCmdFillBuffer" ) );
+      vkCmdInsertDebugUtilsLabelEXT = PFN_vkCmdInsertDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdInsertDebugUtilsLabelEXT" ) );
+      vkCmdNextSubpass = PFN_vkCmdNextSubpass( vkGetDeviceProcAddr( device, "vkCmdNextSubpass" ) );
+      vkCmdNextSubpass2KHR = PFN_vkCmdNextSubpass2KHR( vkGetDeviceProcAddr( device, "vkCmdNextSubpass2KHR" ) );
+      vkCmdPipelineBarrier = PFN_vkCmdPipelineBarrier( vkGetDeviceProcAddr( device, "vkCmdPipelineBarrier" ) );
+      vkCmdProcessCommandsNVX = PFN_vkCmdProcessCommandsNVX( vkGetDeviceProcAddr( device, "vkCmdProcessCommandsNVX" ) );
+      vkCmdPushConstants = PFN_vkCmdPushConstants( vkGetDeviceProcAddr( device, "vkCmdPushConstants" ) );
+      vkCmdPushDescriptorSetKHR = PFN_vkCmdPushDescriptorSetKHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetKHR" ) );
+      vkCmdPushDescriptorSetWithTemplateKHR = PFN_vkCmdPushDescriptorSetWithTemplateKHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetWithTemplateKHR" ) );
+      vkCmdReserveSpaceForCommandsNVX = PFN_vkCmdReserveSpaceForCommandsNVX( vkGetDeviceProcAddr( device, "vkCmdReserveSpaceForCommandsNVX" ) );
+      vkCmdResetEvent = PFN_vkCmdResetEvent( vkGetDeviceProcAddr( device, "vkCmdResetEvent" ) );
+      vkCmdResetQueryPool = PFN_vkCmdResetQueryPool( vkGetDeviceProcAddr( device, "vkCmdResetQueryPool" ) );
+      vkCmdResolveImage = PFN_vkCmdResolveImage( vkGetDeviceProcAddr( device, "vkCmdResolveImage" ) );
+      vkCmdSetBlendConstants = PFN_vkCmdSetBlendConstants( vkGetDeviceProcAddr( device, "vkCmdSetBlendConstants" ) );
+      vkCmdSetCheckpointNV = PFN_vkCmdSetCheckpointNV( vkGetDeviceProcAddr( device, "vkCmdSetCheckpointNV" ) );
+      vkCmdSetCoarseSampleOrderNV = PFN_vkCmdSetCoarseSampleOrderNV( vkGetDeviceProcAddr( device, "vkCmdSetCoarseSampleOrderNV" ) );
+      vkCmdSetDepthBias = PFN_vkCmdSetDepthBias( vkGetDeviceProcAddr( device, "vkCmdSetDepthBias" ) );
+      vkCmdSetDepthBounds = PFN_vkCmdSetDepthBounds( vkGetDeviceProcAddr( device, "vkCmdSetDepthBounds" ) );
+      vkCmdSetDeviceMask = PFN_vkCmdSetDeviceMask( vkGetDeviceProcAddr( device, "vkCmdSetDeviceMask" ) );
+      vkCmdSetDeviceMaskKHR = PFN_vkCmdSetDeviceMaskKHR( vkGetDeviceProcAddr( device, "vkCmdSetDeviceMaskKHR" ) );
+      vkCmdSetDiscardRectangleEXT = PFN_vkCmdSetDiscardRectangleEXT( vkGetDeviceProcAddr( device, "vkCmdSetDiscardRectangleEXT" ) );
+      vkCmdSetEvent = PFN_vkCmdSetEvent( vkGetDeviceProcAddr( device, "vkCmdSetEvent" ) );
+      vkCmdSetExclusiveScissorNV = PFN_vkCmdSetExclusiveScissorNV( vkGetDeviceProcAddr( device, "vkCmdSetExclusiveScissorNV" ) );
+      vkCmdSetLineStippleEXT = PFN_vkCmdSetLineStippleEXT( vkGetDeviceProcAddr( device, "vkCmdSetLineStippleEXT" ) );
+      vkCmdSetLineWidth = PFN_vkCmdSetLineWidth( vkGetDeviceProcAddr( device, "vkCmdSetLineWidth" ) );
+      vkCmdSetPerformanceMarkerINTEL = PFN_vkCmdSetPerformanceMarkerINTEL( vkGetDeviceProcAddr( device, "vkCmdSetPerformanceMarkerINTEL" ) );
+      vkCmdSetPerformanceOverrideINTEL = PFN_vkCmdSetPerformanceOverrideINTEL( vkGetDeviceProcAddr( device, "vkCmdSetPerformanceOverrideINTEL" ) );
+      vkCmdSetPerformanceStreamMarkerINTEL = PFN_vkCmdSetPerformanceStreamMarkerINTEL( vkGetDeviceProcAddr( device, "vkCmdSetPerformanceStreamMarkerINTEL" ) );
+      vkCmdSetSampleLocationsEXT = PFN_vkCmdSetSampleLocationsEXT( vkGetDeviceProcAddr( device, "vkCmdSetSampleLocationsEXT" ) );
+      vkCmdSetScissor = PFN_vkCmdSetScissor( vkGetDeviceProcAddr( device, "vkCmdSetScissor" ) );
+      vkCmdSetStencilCompareMask = PFN_vkCmdSetStencilCompareMask( vkGetDeviceProcAddr( device, "vkCmdSetStencilCompareMask" ) );
+      vkCmdSetStencilReference = PFN_vkCmdSetStencilReference( vkGetDeviceProcAddr( device, "vkCmdSetStencilReference" ) );
+      vkCmdSetStencilWriteMask = PFN_vkCmdSetStencilWriteMask( vkGetDeviceProcAddr( device, "vkCmdSetStencilWriteMask" ) );
+      vkCmdSetViewport = PFN_vkCmdSetViewport( vkGetDeviceProcAddr( device, "vkCmdSetViewport" ) );
+      vkCmdSetViewportShadingRatePaletteNV = PFN_vkCmdSetViewportShadingRatePaletteNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportShadingRatePaletteNV" ) );
+      vkCmdSetViewportWScalingNV = PFN_vkCmdSetViewportWScalingNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportWScalingNV" ) );
+      vkCmdTraceRaysNV = PFN_vkCmdTraceRaysNV( vkGetDeviceProcAddr( device, "vkCmdTraceRaysNV" ) );
+      vkCmdUpdateBuffer = PFN_vkCmdUpdateBuffer( vkGetDeviceProcAddr( device, "vkCmdUpdateBuffer" ) );
+      vkCmdWaitEvents = PFN_vkCmdWaitEvents( vkGetDeviceProcAddr( device, "vkCmdWaitEvents" ) );
+      vkCmdWriteAccelerationStructuresPropertiesNV = PFN_vkCmdWriteAccelerationStructuresPropertiesNV( vkGetDeviceProcAddr( device, "vkCmdWriteAccelerationStructuresPropertiesNV" ) );
+      vkCmdWriteBufferMarkerAMD = PFN_vkCmdWriteBufferMarkerAMD( vkGetDeviceProcAddr( device, "vkCmdWriteBufferMarkerAMD" ) );
+      vkCmdWriteTimestamp = PFN_vkCmdWriteTimestamp( vkGetDeviceProcAddr( device, "vkCmdWriteTimestamp" ) );
+      vkEndCommandBuffer = PFN_vkEndCommandBuffer( vkGetDeviceProcAddr( device, "vkEndCommandBuffer" ) );
+      vkResetCommandBuffer = PFN_vkResetCommandBuffer( vkGetDeviceProcAddr( device, "vkResetCommandBuffer" ) );
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+      vkAcquireFullScreenExclusiveModeEXT = PFN_vkAcquireFullScreenExclusiveModeEXT( vkGetDeviceProcAddr( device, "vkAcquireFullScreenExclusiveModeEXT" ) );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+      vkAcquireNextImage2KHR = PFN_vkAcquireNextImage2KHR( vkGetDeviceProcAddr( device, "vkAcquireNextImage2KHR" ) );
+      vkAcquireNextImageKHR = PFN_vkAcquireNextImageKHR( vkGetDeviceProcAddr( device, "vkAcquireNextImageKHR" ) );
+      vkAcquirePerformanceConfigurationINTEL = PFN_vkAcquirePerformanceConfigurationINTEL( vkGetDeviceProcAddr( device, "vkAcquirePerformanceConfigurationINTEL" ) );
+      vkAcquireProfilingLockKHR = PFN_vkAcquireProfilingLockKHR( vkGetDeviceProcAddr( device, "vkAcquireProfilingLockKHR" ) );
+      vkAllocateCommandBuffers = PFN_vkAllocateCommandBuffers( vkGetDeviceProcAddr( device, "vkAllocateCommandBuffers" ) );
+      vkAllocateDescriptorSets = PFN_vkAllocateDescriptorSets( vkGetDeviceProcAddr( device, "vkAllocateDescriptorSets" ) );
+      vkAllocateMemory = PFN_vkAllocateMemory( vkGetDeviceProcAddr( device, "vkAllocateMemory" ) );
+      vkBindAccelerationStructureMemoryNV = PFN_vkBindAccelerationStructureMemoryNV( vkGetDeviceProcAddr( device, "vkBindAccelerationStructureMemoryNV" ) );
+      vkBindBufferMemory = PFN_vkBindBufferMemory( vkGetDeviceProcAddr( device, "vkBindBufferMemory" ) );
+      vkBindBufferMemory2 = PFN_vkBindBufferMemory2( vkGetDeviceProcAddr( device, "vkBindBufferMemory2" ) );
+      vkBindBufferMemory2KHR = PFN_vkBindBufferMemory2KHR( vkGetDeviceProcAddr( device, "vkBindBufferMemory2KHR" ) );
+      vkBindImageMemory = PFN_vkBindImageMemory( vkGetDeviceProcAddr( device, "vkBindImageMemory" ) );
+      vkBindImageMemory2 = PFN_vkBindImageMemory2( vkGetDeviceProcAddr( device, "vkBindImageMemory2" ) );
+      vkBindImageMemory2KHR = PFN_vkBindImageMemory2KHR( vkGetDeviceProcAddr( device, "vkBindImageMemory2KHR" ) );
+      vkCompileDeferredNV = PFN_vkCompileDeferredNV( vkGetDeviceProcAddr( device, "vkCompileDeferredNV" ) );
+      vkCreateAccelerationStructureNV = PFN_vkCreateAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCreateAccelerationStructureNV" ) );
+      vkCreateBuffer = PFN_vkCreateBuffer( vkGetDeviceProcAddr( device, "vkCreateBuffer" ) );
+      vkCreateBufferView = PFN_vkCreateBufferView( vkGetDeviceProcAddr( device, "vkCreateBufferView" ) );
+      vkCreateCommandPool = PFN_vkCreateCommandPool( vkGetDeviceProcAddr( device, "vkCreateCommandPool" ) );
+      vkCreateComputePipelines = PFN_vkCreateComputePipelines( vkGetDeviceProcAddr( device, "vkCreateComputePipelines" ) );
+      vkCreateDescriptorPool = PFN_vkCreateDescriptorPool( vkGetDeviceProcAddr( device, "vkCreateDescriptorPool" ) );
+      vkCreateDescriptorSetLayout = PFN_vkCreateDescriptorSetLayout( vkGetDeviceProcAddr( device, "vkCreateDescriptorSetLayout" ) );
+      vkCreateDescriptorUpdateTemplate = PFN_vkCreateDescriptorUpdateTemplate( vkGetDeviceProcAddr( device, "vkCreateDescriptorUpdateTemplate" ) );
+      vkCreateDescriptorUpdateTemplateKHR = PFN_vkCreateDescriptorUpdateTemplateKHR( vkGetDeviceProcAddr( device, "vkCreateDescriptorUpdateTemplateKHR" ) );
+      vkCreateEvent = PFN_vkCreateEvent( vkGetDeviceProcAddr( device, "vkCreateEvent" ) );
+      vkCreateFence = PFN_vkCreateFence( vkGetDeviceProcAddr( device, "vkCreateFence" ) );
+      vkCreateFramebuffer = PFN_vkCreateFramebuffer( vkGetDeviceProcAddr( device, "vkCreateFramebuffer" ) );
+      vkCreateGraphicsPipelines = PFN_vkCreateGraphicsPipelines( vkGetDeviceProcAddr( device, "vkCreateGraphicsPipelines" ) );
+      vkCreateImage = PFN_vkCreateImage( vkGetDeviceProcAddr( device, "vkCreateImage" ) );
+      vkCreateImageView = PFN_vkCreateImageView( vkGetDeviceProcAddr( device, "vkCreateImageView" ) );
+      vkCreateIndirectCommandsLayoutNVX = PFN_vkCreateIndirectCommandsLayoutNVX( vkGetDeviceProcAddr( device, "vkCreateIndirectCommandsLayoutNVX" ) );
+      vkCreateObjectTableNVX = PFN_vkCreateObjectTableNVX( vkGetDeviceProcAddr( device, "vkCreateObjectTableNVX" ) );
+      vkCreatePipelineCache = PFN_vkCreatePipelineCache( vkGetDeviceProcAddr( device, "vkCreatePipelineCache" ) );
+      vkCreatePipelineLayout = PFN_vkCreatePipelineLayout( vkGetDeviceProcAddr( device, "vkCreatePipelineLayout" ) );
+      vkCreateQueryPool = PFN_vkCreateQueryPool( vkGetDeviceProcAddr( device, "vkCreateQueryPool" ) );
+      vkCreateRayTracingPipelinesNV = PFN_vkCreateRayTracingPipelinesNV( vkGetDeviceProcAddr( device, "vkCreateRayTracingPipelinesNV" ) );
+      vkCreateRenderPass = PFN_vkCreateRenderPass( vkGetDeviceProcAddr( device, "vkCreateRenderPass" ) );
+      vkCreateRenderPass2KHR = PFN_vkCreateRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCreateRenderPass2KHR" ) );
+      vkCreateSampler = PFN_vkCreateSampler( vkGetDeviceProcAddr( device, "vkCreateSampler" ) );
+      vkCreateSamplerYcbcrConversion = PFN_vkCreateSamplerYcbcrConversion( vkGetDeviceProcAddr( device, "vkCreateSamplerYcbcrConversion" ) );
+      vkCreateSamplerYcbcrConversionKHR = PFN_vkCreateSamplerYcbcrConversionKHR( vkGetDeviceProcAddr( device, "vkCreateSamplerYcbcrConversionKHR" ) );
+      vkCreateSemaphore = PFN_vkCreateSemaphore( vkGetDeviceProcAddr( device, "vkCreateSemaphore" ) );
+      vkCreateShaderModule = PFN_vkCreateShaderModule( vkGetDeviceProcAddr( device, "vkCreateShaderModule" ) );
+      vkCreateSharedSwapchainsKHR = PFN_vkCreateSharedSwapchainsKHR( vkGetDeviceProcAddr( device, "vkCreateSharedSwapchainsKHR" ) );
+      vkCreateSwapchainKHR = PFN_vkCreateSwapchainKHR( vkGetDeviceProcAddr( device, "vkCreateSwapchainKHR" ) );
+      vkCreateValidationCacheEXT = PFN_vkCreateValidationCacheEXT( vkGetDeviceProcAddr( device, "vkCreateValidationCacheEXT" ) );
+      vkDebugMarkerSetObjectNameEXT = PFN_vkDebugMarkerSetObjectNameEXT( vkGetDeviceProcAddr( device, "vkDebugMarkerSetObjectNameEXT" ) );
+      vkDebugMarkerSetObjectTagEXT = PFN_vkDebugMarkerSetObjectTagEXT( vkGetDeviceProcAddr( device, "vkDebugMarkerSetObjectTagEXT" ) );
+      vkDestroyAccelerationStructureNV = PFN_vkDestroyAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkDestroyAccelerationStructureNV" ) );
+      vkDestroyBuffer = PFN_vkDestroyBuffer( vkGetDeviceProcAddr( device, "vkDestroyBuffer" ) );
+      vkDestroyBufferView = PFN_vkDestroyBufferView( vkGetDeviceProcAddr( device, "vkDestroyBufferView" ) );
+      vkDestroyCommandPool = PFN_vkDestroyCommandPool( vkGetDeviceProcAddr( device, "vkDestroyCommandPool" ) );
+      vkDestroyDescriptorPool = PFN_vkDestroyDescriptorPool( vkGetDeviceProcAddr( device, "vkDestroyDescriptorPool" ) );
+      vkDestroyDescriptorSetLayout = PFN_vkDestroyDescriptorSetLayout( vkGetDeviceProcAddr( device, "vkDestroyDescriptorSetLayout" ) );
+      vkDestroyDescriptorUpdateTemplate = PFN_vkDestroyDescriptorUpdateTemplate( vkGetDeviceProcAddr( device, "vkDestroyDescriptorUpdateTemplate" ) );
+      vkDestroyDescriptorUpdateTemplateKHR = PFN_vkDestroyDescriptorUpdateTemplateKHR( vkGetDeviceProcAddr( device, "vkDestroyDescriptorUpdateTemplateKHR" ) );
+      vkDestroyDevice = PFN_vkDestroyDevice( vkGetDeviceProcAddr( device, "vkDestroyDevice" ) );
+      vkDestroyEvent = PFN_vkDestroyEvent( vkGetDeviceProcAddr( device, "vkDestroyEvent" ) );
+      vkDestroyFence = PFN_vkDestroyFence( vkGetDeviceProcAddr( device, "vkDestroyFence" ) );
+      vkDestroyFramebuffer = PFN_vkDestroyFramebuffer( vkGetDeviceProcAddr( device, "vkDestroyFramebuffer" ) );
+      vkDestroyImage = PFN_vkDestroyImage( vkGetDeviceProcAddr( device, "vkDestroyImage" ) );
+      vkDestroyImageView = PFN_vkDestroyImageView( vkGetDeviceProcAddr( device, "vkDestroyImageView" ) );
+      vkDestroyIndirectCommandsLayoutNVX = PFN_vkDestroyIndirectCommandsLayoutNVX( vkGetDeviceProcAddr( device, "vkDestroyIndirectCommandsLayoutNVX" ) );
+      vkDestroyObjectTableNVX = PFN_vkDestroyObjectTableNVX( vkGetDeviceProcAddr( device, "vkDestroyObjectTableNVX" ) );
+      vkDestroyPipeline = PFN_vkDestroyPipeline( vkGetDeviceProcAddr( device, "vkDestroyPipeline" ) );
+      vkDestroyPipelineCache = PFN_vkDestroyPipelineCache( vkGetDeviceProcAddr( device, "vkDestroyPipelineCache" ) );
+      vkDestroyPipelineLayout = PFN_vkDestroyPipelineLayout( vkGetDeviceProcAddr( device, "vkDestroyPipelineLayout" ) );
+      vkDestroyQueryPool = PFN_vkDestroyQueryPool( vkGetDeviceProcAddr( device, "vkDestroyQueryPool" ) );
+      vkDestroyRenderPass = PFN_vkDestroyRenderPass( vkGetDeviceProcAddr( device, "vkDestroyRenderPass" ) );
+      vkDestroySampler = PFN_vkDestroySampler( vkGetDeviceProcAddr( device, "vkDestroySampler" ) );
+      vkDestroySamplerYcbcrConversion = PFN_vkDestroySamplerYcbcrConversion( vkGetDeviceProcAddr( device, "vkDestroySamplerYcbcrConversion" ) );
+      vkDestroySamplerYcbcrConversionKHR = PFN_vkDestroySamplerYcbcrConversionKHR( vkGetDeviceProcAddr( device, "vkDestroySamplerYcbcrConversionKHR" ) );
+      vkDestroySemaphore = PFN_vkDestroySemaphore( vkGetDeviceProcAddr( device, "vkDestroySemaphore" ) );
+      vkDestroyShaderModule = PFN_vkDestroyShaderModule( vkGetDeviceProcAddr( device, "vkDestroyShaderModule" ) );
+      vkDestroySwapchainKHR = PFN_vkDestroySwapchainKHR( vkGetDeviceProcAddr( device, "vkDestroySwapchainKHR" ) );
+      vkDestroyValidationCacheEXT = PFN_vkDestroyValidationCacheEXT( vkGetDeviceProcAddr( device, "vkDestroyValidationCacheEXT" ) );
+      vkDeviceWaitIdle = PFN_vkDeviceWaitIdle( vkGetDeviceProcAddr( device, "vkDeviceWaitIdle" ) );
+      vkDisplayPowerControlEXT = PFN_vkDisplayPowerControlEXT( vkGetDeviceProcAddr( device, "vkDisplayPowerControlEXT" ) );
+      vkFlushMappedMemoryRanges = PFN_vkFlushMappedMemoryRanges( vkGetDeviceProcAddr( device, "vkFlushMappedMemoryRanges" ) );
+      vkFreeCommandBuffers = PFN_vkFreeCommandBuffers( vkGetDeviceProcAddr( device, "vkFreeCommandBuffers" ) );
+      vkFreeDescriptorSets = PFN_vkFreeDescriptorSets( vkGetDeviceProcAddr( device, "vkFreeDescriptorSets" ) );
+      vkFreeMemory = PFN_vkFreeMemory( vkGetDeviceProcAddr( device, "vkFreeMemory" ) );
+      vkGetAccelerationStructureHandleNV = PFN_vkGetAccelerationStructureHandleNV( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureHandleNV" ) );
+      vkGetAccelerationStructureMemoryRequirementsNV = PFN_vkGetAccelerationStructureMemoryRequirementsNV( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureMemoryRequirementsNV" ) );
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+      vkGetAndroidHardwareBufferPropertiesANDROID = PFN_vkGetAndroidHardwareBufferPropertiesANDROID( vkGetDeviceProcAddr( device, "vkGetAndroidHardwareBufferPropertiesANDROID" ) );
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+      vkGetBufferDeviceAddressEXT = PFN_vkGetBufferDeviceAddressEXT( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddressEXT" ) );
+      vkGetBufferDeviceAddressKHR = PFN_vkGetBufferDeviceAddressKHR( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddressKHR" ) );
+      vkGetBufferMemoryRequirements = PFN_vkGetBufferMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements" ) );
+      vkGetBufferMemoryRequirements2 = PFN_vkGetBufferMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements2" ) );
+      vkGetBufferMemoryRequirements2KHR = PFN_vkGetBufferMemoryRequirements2KHR( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements2KHR" ) );
+      vkGetBufferOpaqueCaptureAddressKHR = PFN_vkGetBufferOpaqueCaptureAddressKHR( vkGetDeviceProcAddr( device, "vkGetBufferOpaqueCaptureAddressKHR" ) );
+      vkGetCalibratedTimestampsEXT = PFN_vkGetCalibratedTimestampsEXT( vkGetDeviceProcAddr( device, "vkGetCalibratedTimestampsEXT" ) );
+      vkGetDescriptorSetLayoutSupport = PFN_vkGetDescriptorSetLayoutSupport( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutSupport" ) );
+      vkGetDescriptorSetLayoutSupportKHR = PFN_vkGetDescriptorSetLayoutSupportKHR( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutSupportKHR" ) );
+      vkGetDeviceGroupPeerMemoryFeatures = PFN_vkGetDeviceGroupPeerMemoryFeatures( vkGetDeviceProcAddr( device, "vkGetDeviceGroupPeerMemoryFeatures" ) );
+      vkGetDeviceGroupPeerMemoryFeaturesKHR = PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR( vkGetDeviceProcAddr( device, "vkGetDeviceGroupPeerMemoryFeaturesKHR" ) );
+      vkGetDeviceGroupPresentCapabilitiesKHR = PFN_vkGetDeviceGroupPresentCapabilitiesKHR( vkGetDeviceProcAddr( device, "vkGetDeviceGroupPresentCapabilitiesKHR" ) );
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+      vkGetDeviceGroupSurfacePresentModes2EXT = PFN_vkGetDeviceGroupSurfacePresentModes2EXT( vkGetDeviceProcAddr( device, "vkGetDeviceGroupSurfacePresentModes2EXT" ) );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+      vkGetDeviceGroupSurfacePresentModesKHR = PFN_vkGetDeviceGroupSurfacePresentModesKHR( vkGetDeviceProcAddr( device, "vkGetDeviceGroupSurfacePresentModesKHR" ) );
+      vkGetDeviceMemoryCommitment = PFN_vkGetDeviceMemoryCommitment( vkGetDeviceProcAddr( device, "vkGetDeviceMemoryCommitment" ) );
+      vkGetDeviceMemoryOpaqueCaptureAddressKHR = PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR( vkGetDeviceProcAddr( device, "vkGetDeviceMemoryOpaqueCaptureAddressKHR" ) );
+      vkGetDeviceProcAddr = PFN_vkGetDeviceProcAddr( vkGetDeviceProcAddr( device, "vkGetDeviceProcAddr" ) );
+      vkGetDeviceQueue = PFN_vkGetDeviceQueue( vkGetDeviceProcAddr( device, "vkGetDeviceQueue" ) );
+      vkGetDeviceQueue2 = PFN_vkGetDeviceQueue2( vkGetDeviceProcAddr( device, "vkGetDeviceQueue2" ) );
+      vkGetEventStatus = PFN_vkGetEventStatus( vkGetDeviceProcAddr( device, "vkGetEventStatus" ) );
+      vkGetFenceFdKHR = PFN_vkGetFenceFdKHR( vkGetDeviceProcAddr( device, "vkGetFenceFdKHR" ) );
+      vkGetFenceStatus = PFN_vkGetFenceStatus( vkGetDeviceProcAddr( device, "vkGetFenceStatus" ) );
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+      vkGetFenceWin32HandleKHR = PFN_vkGetFenceWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetFenceWin32HandleKHR" ) );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+      vkGetImageDrmFormatModifierPropertiesEXT = PFN_vkGetImageDrmFormatModifierPropertiesEXT( vkGetDeviceProcAddr( device, "vkGetImageDrmFormatModifierPropertiesEXT" ) );
+      vkGetImageMemoryRequirements = PFN_vkGetImageMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements" ) );
+      vkGetImageMemoryRequirements2 = PFN_vkGetImageMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements2" ) );
+      vkGetImageMemoryRequirements2KHR = PFN_vkGetImageMemoryRequirements2KHR( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements2KHR" ) );
+      vkGetImageSparseMemoryRequirements = PFN_vkGetImageSparseMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements" ) );
+      vkGetImageSparseMemoryRequirements2 = PFN_vkGetImageSparseMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements2" ) );
+      vkGetImageSparseMemoryRequirements2KHR = PFN_vkGetImageSparseMemoryRequirements2KHR( vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements2KHR" ) );
+      vkGetImageSubresourceLayout = PFN_vkGetImageSubresourceLayout( vkGetDeviceProcAddr( device, "vkGetImageSubresourceLayout" ) );
+      vkGetImageViewHandleNVX = PFN_vkGetImageViewHandleNVX( vkGetDeviceProcAddr( device, "vkGetImageViewHandleNVX" ) );
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+      vkGetMemoryAndroidHardwareBufferANDROID = PFN_vkGetMemoryAndroidHardwareBufferANDROID( vkGetDeviceProcAddr( device, "vkGetMemoryAndroidHardwareBufferANDROID" ) );
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+      vkGetMemoryFdKHR = PFN_vkGetMemoryFdKHR( vkGetDeviceProcAddr( device, "vkGetMemoryFdKHR" ) );
+      vkGetMemoryFdPropertiesKHR = PFN_vkGetMemoryFdPropertiesKHR( vkGetDeviceProcAddr( device, "vkGetMemoryFdPropertiesKHR" ) );
+      vkGetMemoryHostPointerPropertiesEXT = PFN_vkGetMemoryHostPointerPropertiesEXT( vkGetDeviceProcAddr( device, "vkGetMemoryHostPointerPropertiesEXT" ) );
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+      vkGetMemoryWin32HandleKHR = PFN_vkGetMemoryWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandleKHR" ) );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+      vkGetMemoryWin32HandleNV = PFN_vkGetMemoryWin32HandleNV( vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandleNV" ) );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+      vkGetMemoryWin32HandlePropertiesKHR = PFN_vkGetMemoryWin32HandlePropertiesKHR( vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandlePropertiesKHR" ) );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+      vkGetPastPresentationTimingGOOGLE = PFN_vkGetPastPresentationTimingGOOGLE( vkGetDeviceProcAddr( device, "vkGetPastPresentationTimingGOOGLE" ) );
+      vkGetPerformanceParameterINTEL = PFN_vkGetPerformanceParameterINTEL( vkGetDeviceProcAddr( device, "vkGetPerformanceParameterINTEL" ) );
+      vkGetPipelineCacheData = PFN_vkGetPipelineCacheData( vkGetDeviceProcAddr( device, "vkGetPipelineCacheData" ) );
+      vkGetPipelineExecutableInternalRepresentationsKHR = PFN_vkGetPipelineExecutableInternalRepresentationsKHR( vkGetDeviceProcAddr( device, "vkGetPipelineExecutableInternalRepresentationsKHR" ) );
+      vkGetPipelineExecutablePropertiesKHR = PFN_vkGetPipelineExecutablePropertiesKHR( vkGetDeviceProcAddr( device, "vkGetPipelineExecutablePropertiesKHR" ) );
+      vkGetPipelineExecutableStatisticsKHR = PFN_vkGetPipelineExecutableStatisticsKHR( vkGetDeviceProcAddr( device, "vkGetPipelineExecutableStatisticsKHR" ) );
+      vkGetQueryPoolResults = PFN_vkGetQueryPoolResults( vkGetDeviceProcAddr( device, "vkGetQueryPoolResults" ) );
+      vkGetRayTracingShaderGroupHandlesNV = PFN_vkGetRayTracingShaderGroupHandlesNV( vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupHandlesNV" ) );
+      vkGetRefreshCycleDurationGOOGLE = PFN_vkGetRefreshCycleDurationGOOGLE( vkGetDeviceProcAddr( device, "vkGetRefreshCycleDurationGOOGLE" ) );
+      vkGetRenderAreaGranularity = PFN_vkGetRenderAreaGranularity( vkGetDeviceProcAddr( device, "vkGetRenderAreaGranularity" ) );
+      vkGetSemaphoreCounterValueKHR = PFN_vkGetSemaphoreCounterValueKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreCounterValueKHR" ) );
+      vkGetSemaphoreFdKHR = PFN_vkGetSemaphoreFdKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreFdKHR" ) );
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+      vkGetSemaphoreWin32HandleKHR = PFN_vkGetSemaphoreWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreWin32HandleKHR" ) );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+      vkGetShaderInfoAMD = PFN_vkGetShaderInfoAMD( vkGetDeviceProcAddr( device, "vkGetShaderInfoAMD" ) );
+      vkGetSwapchainCounterEXT = PFN_vkGetSwapchainCounterEXT( vkGetDeviceProcAddr( device, "vkGetSwapchainCounterEXT" ) );
+      vkGetSwapchainImagesKHR = PFN_vkGetSwapchainImagesKHR( vkGetDeviceProcAddr( device, "vkGetSwapchainImagesKHR" ) );
+      vkGetSwapchainStatusKHR = PFN_vkGetSwapchainStatusKHR( vkGetDeviceProcAddr( device, "vkGetSwapchainStatusKHR" ) );
+      vkGetValidationCacheDataEXT = PFN_vkGetValidationCacheDataEXT( vkGetDeviceProcAddr( device, "vkGetValidationCacheDataEXT" ) );
+      vkImportFenceFdKHR = PFN_vkImportFenceFdKHR( vkGetDeviceProcAddr( device, "vkImportFenceFdKHR" ) );
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+      vkImportFenceWin32HandleKHR = PFN_vkImportFenceWin32HandleKHR( vkGetDeviceProcAddr( device, "vkImportFenceWin32HandleKHR" ) );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+      vkImportSemaphoreFdKHR = PFN_vkImportSemaphoreFdKHR( vkGetDeviceProcAddr( device, "vkImportSemaphoreFdKHR" ) );
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+      vkImportSemaphoreWin32HandleKHR = PFN_vkImportSemaphoreWin32HandleKHR( vkGetDeviceProcAddr( device, "vkImportSemaphoreWin32HandleKHR" ) );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+      vkInitializePerformanceApiINTEL = PFN_vkInitializePerformanceApiINTEL( vkGetDeviceProcAddr( device, "vkInitializePerformanceApiINTEL" ) );
+      vkInvalidateMappedMemoryRanges = PFN_vkInvalidateMappedMemoryRanges( vkGetDeviceProcAddr( device, "vkInvalidateMappedMemoryRanges" ) );
+      vkMapMemory = PFN_vkMapMemory( vkGetDeviceProcAddr( device, "vkMapMemory" ) );
+      vkMergePipelineCaches = PFN_vkMergePipelineCaches( vkGetDeviceProcAddr( device, "vkMergePipelineCaches" ) );
+      vkMergeValidationCachesEXT = PFN_vkMergeValidationCachesEXT( vkGetDeviceProcAddr( device, "vkMergeValidationCachesEXT" ) );
+      vkRegisterDeviceEventEXT = PFN_vkRegisterDeviceEventEXT( vkGetDeviceProcAddr( device, "vkRegisterDeviceEventEXT" ) );
+      vkRegisterDisplayEventEXT = PFN_vkRegisterDisplayEventEXT( vkGetDeviceProcAddr( device, "vkRegisterDisplayEventEXT" ) );
+      vkRegisterObjectsNVX = PFN_vkRegisterObjectsNVX( vkGetDeviceProcAddr( device, "vkRegisterObjectsNVX" ) );
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+      vkReleaseFullScreenExclusiveModeEXT = PFN_vkReleaseFullScreenExclusiveModeEXT( vkGetDeviceProcAddr( device, "vkReleaseFullScreenExclusiveModeEXT" ) );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+      vkReleasePerformanceConfigurationINTEL = PFN_vkReleasePerformanceConfigurationINTEL( vkGetDeviceProcAddr( device, "vkReleasePerformanceConfigurationINTEL" ) );
+      vkReleaseProfilingLockKHR = PFN_vkReleaseProfilingLockKHR( vkGetDeviceProcAddr( device, "vkReleaseProfilingLockKHR" ) );
+      vkResetCommandPool = PFN_vkResetCommandPool( vkGetDeviceProcAddr( device, "vkResetCommandPool" ) );
+      vkResetDescriptorPool = PFN_vkResetDescriptorPool( vkGetDeviceProcAddr( device, "vkResetDescriptorPool" ) );
+      vkResetEvent = PFN_vkResetEvent( vkGetDeviceProcAddr( device, "vkResetEvent" ) );
+      vkResetFences = PFN_vkResetFences( vkGetDeviceProcAddr( device, "vkResetFences" ) );
+      vkResetQueryPoolEXT = PFN_vkResetQueryPoolEXT( vkGetDeviceProcAddr( device, "vkResetQueryPoolEXT" ) );
+      vkSetDebugUtilsObjectNameEXT = PFN_vkSetDebugUtilsObjectNameEXT( vkGetDeviceProcAddr( device, "vkSetDebugUtilsObjectNameEXT" ) );
+      vkSetDebugUtilsObjectTagEXT = PFN_vkSetDebugUtilsObjectTagEXT( vkGetDeviceProcAddr( device, "vkSetDebugUtilsObjectTagEXT" ) );
+      vkSetEvent = PFN_vkSetEvent( vkGetDeviceProcAddr( device, "vkSetEvent" ) );
+      vkSetHdrMetadataEXT = PFN_vkSetHdrMetadataEXT( vkGetDeviceProcAddr( device, "vkSetHdrMetadataEXT" ) );
+      vkSetLocalDimmingAMD = PFN_vkSetLocalDimmingAMD( vkGetDeviceProcAddr( device, "vkSetLocalDimmingAMD" ) );
+      vkSignalSemaphoreKHR = PFN_vkSignalSemaphoreKHR( vkGetDeviceProcAddr( device, "vkSignalSemaphoreKHR" ) );
+      vkTrimCommandPool = PFN_vkTrimCommandPool( vkGetDeviceProcAddr( device, "vkTrimCommandPool" ) );
+      vkTrimCommandPoolKHR = PFN_vkTrimCommandPoolKHR( vkGetDeviceProcAddr( device, "vkTrimCommandPoolKHR" ) );
+      vkUninitializePerformanceApiINTEL = PFN_vkUninitializePerformanceApiINTEL( vkGetDeviceProcAddr( device, "vkUninitializePerformanceApiINTEL" ) );
+      vkUnmapMemory = PFN_vkUnmapMemory( vkGetDeviceProcAddr( device, "vkUnmapMemory" ) );
+      vkUnregisterObjectsNVX = PFN_vkUnregisterObjectsNVX( vkGetDeviceProcAddr( device, "vkUnregisterObjectsNVX" ) );
+      vkUpdateDescriptorSetWithTemplate = PFN_vkUpdateDescriptorSetWithTemplate( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSetWithTemplate" ) );
+      vkUpdateDescriptorSetWithTemplateKHR = PFN_vkUpdateDescriptorSetWithTemplateKHR( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSetWithTemplateKHR" ) );
+      vkUpdateDescriptorSets = PFN_vkUpdateDescriptorSets( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSets" ) );
+      vkWaitForFences = PFN_vkWaitForFences( vkGetDeviceProcAddr( device, "vkWaitForFences" ) );
+      vkWaitSemaphoresKHR = PFN_vkWaitSemaphoresKHR( vkGetDeviceProcAddr( device, "vkWaitSemaphoresKHR" ) );
+      vkGetQueueCheckpointDataNV = PFN_vkGetQueueCheckpointDataNV( vkGetDeviceProcAddr( device, "vkGetQueueCheckpointDataNV" ) );
+      vkQueueBeginDebugUtilsLabelEXT = PFN_vkQueueBeginDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueBeginDebugUtilsLabelEXT" ) );
+      vkQueueBindSparse = PFN_vkQueueBindSparse( vkGetDeviceProcAddr( device, "vkQueueBindSparse" ) );
+      vkQueueEndDebugUtilsLabelEXT = PFN_vkQueueEndDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueEndDebugUtilsLabelEXT" ) );
+      vkQueueInsertDebugUtilsLabelEXT = PFN_vkQueueInsertDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueInsertDebugUtilsLabelEXT" ) );
+      vkQueuePresentKHR = PFN_vkQueuePresentKHR( vkGetDeviceProcAddr( device, "vkQueuePresentKHR" ) );
+      vkQueueSetPerformanceConfigurationINTEL = PFN_vkQueueSetPerformanceConfigurationINTEL( vkGetDeviceProcAddr( device, "vkQueueSetPerformanceConfigurationINTEL" ) );
+      vkQueueSubmit = PFN_vkQueueSubmit( vkGetDeviceProcAddr( device, "vkQueueSubmit" ) );
+      vkQueueWaitIdle = PFN_vkQueueWaitIdle( vkGetDeviceProcAddr( device, "vkQueueWaitIdle" ) );
+    }
+  };
+
+} // namespace VULKAN_HPP_NAMESPACE
+#endif
diff --git a/src/third_party/vulkan-headers/src/include/vulkan/vulkan_android.h b/src/third_party/vulkan-headers/src/include/vulkan/vulkan_android.h
new file mode 100644
index 0000000..9b8d3e2
--- /dev/null
+++ b/src/third_party/vulkan-headers/src/include/vulkan/vulkan_android.h
@@ -0,0 +1,122 @@
+#ifndef VULKAN_ANDROID_H_
+#define VULKAN_ANDROID_H_ 1
+
+/*
+** Copyright (c) 2015-2019 The Khronos Group Inc.
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+/*
+** This header is generated from the Khronos Vulkan XML API Registry.
+**
+*/
+
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+
+#define VK_KHR_android_surface 1
+struct ANativeWindow;
+#define VK_KHR_ANDROID_SURFACE_SPEC_VERSION 6
+#define VK_KHR_ANDROID_SURFACE_EXTENSION_NAME "VK_KHR_android_surface"
+typedef VkFlags VkAndroidSurfaceCreateFlagsKHR;
+typedef struct VkAndroidSurfaceCreateInfoKHR {
+    VkStructureType                   sType;
+    const void*                       pNext;
+    VkAndroidSurfaceCreateFlagsKHR    flags;
+    struct ANativeWindow*             window;
+} VkAndroidSurfaceCreateInfoKHR;
+
+typedef VkResult (VKAPI_PTR *PFN_vkCreateAndroidSurfaceKHR)(VkInstance instance, const VkAndroidSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateAndroidSurfaceKHR(
+    VkInstance                                  instance,
+    const VkAndroidSurfaceCreateInfoKHR*        pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface);
+#endif
+
+
+#define VK_ANDROID_external_memory_android_hardware_buffer 1
+struct AHardwareBuffer;
+#define VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_SPEC_VERSION 3
+#define VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME "VK_ANDROID_external_memory_android_hardware_buffer"
+typedef struct VkAndroidHardwareBufferUsageANDROID {
+    VkStructureType    sType;
+    void*              pNext;
+    uint64_t           androidHardwareBufferUsage;
+} VkAndroidHardwareBufferUsageANDROID;
+
+typedef struct VkAndroidHardwareBufferPropertiesANDROID {
+    VkStructureType    sType;
+    void*              pNext;
+    VkDeviceSize       allocationSize;
+    uint32_t           memoryTypeBits;
+} VkAndroidHardwareBufferPropertiesANDROID;
+
+typedef struct VkAndroidHardwareBufferFormatPropertiesANDROID {
+    VkStructureType                  sType;
+    void*                            pNext;
+    VkFormat                         format;
+    uint64_t                         externalFormat;
+    VkFormatFeatureFlags             formatFeatures;
+    VkComponentMapping               samplerYcbcrConversionComponents;
+    VkSamplerYcbcrModelConversion    suggestedYcbcrModel;
+    VkSamplerYcbcrRange              suggestedYcbcrRange;
+    VkChromaLocation                 suggestedXChromaOffset;
+    VkChromaLocation                 suggestedYChromaOffset;
+} VkAndroidHardwareBufferFormatPropertiesANDROID;
+
+typedef struct VkImportAndroidHardwareBufferInfoANDROID {
+    VkStructureType            sType;
+    const void*                pNext;
+    struct AHardwareBuffer*    buffer;
+} VkImportAndroidHardwareBufferInfoANDROID;
+
+typedef struct VkMemoryGetAndroidHardwareBufferInfoANDROID {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkDeviceMemory     memory;
+} VkMemoryGetAndroidHardwareBufferInfoANDROID;
+
+typedef struct VkExternalFormatANDROID {
+    VkStructureType    sType;
+    void*              pNext;
+    uint64_t           externalFormat;
+} VkExternalFormatANDROID;
+
+typedef VkResult (VKAPI_PTR *PFN_vkGetAndroidHardwareBufferPropertiesANDROID)(VkDevice device, const struct AHardwareBuffer* buffer, VkAndroidHardwareBufferPropertiesANDROID* pProperties);
+typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryAndroidHardwareBufferANDROID)(VkDevice device, const VkMemoryGetAndroidHardwareBufferInfoANDROID* pInfo, struct AHardwareBuffer** pBuffer);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkGetAndroidHardwareBufferPropertiesANDROID(
+    VkDevice                                    device,
+    const struct AHardwareBuffer*               buffer,
+    VkAndroidHardwareBufferPropertiesANDROID*   pProperties);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryAndroidHardwareBufferANDROID(
+    VkDevice                                    device,
+    const VkMemoryGetAndroidHardwareBufferInfoANDROID* pInfo,
+    struct AHardwareBuffer**                    pBuffer);
+#endif
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif
diff --git a/src/third_party/vulkan-headers/src/include/vulkan/vulkan_core.h b/src/third_party/vulkan-headers/src/include/vulkan/vulkan_core.h
new file mode 100644
index 0000000..7bfacbe
--- /dev/null
+++ b/src/third_party/vulkan-headers/src/include/vulkan/vulkan_core.h
@@ -0,0 +1,10271 @@
+#ifndef VULKAN_CORE_H_
+#define VULKAN_CORE_H_ 1
+
+/*
+** Copyright (c) 2015-2019 The Khronos Group Inc.
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+/*
+** This header is generated from the Khronos Vulkan XML API Registry.
+**
+*/
+
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+
+#define VK_VERSION_1_0 1
+#include "vk_platform.h"
+#define VK_MAKE_VERSION(major, minor, patch) \
+    (((major) << 22) | ((minor) << 12) | (patch))
+
+// DEPRECATED: This define has been removed. Specific version defines (e.g. VK_API_VERSION_1_0), or the VK_MAKE_VERSION macro, should be used instead.
+//#define VK_API_VERSION VK_MAKE_VERSION(1, 0, 0) // Patch version should always be set to 0
+
+// Vulkan 1.0 version number
+#define VK_API_VERSION_1_0 VK_MAKE_VERSION(1, 0, 0)// Patch version should always be set to 0
+
+#define VK_VERSION_MAJOR(version) ((uint32_t)(version) >> 22)
+#define VK_VERSION_MINOR(version) (((uint32_t)(version) >> 12) & 0x3ff)
+#define VK_VERSION_PATCH(version) ((uint32_t)(version) & 0xfff)
+// Version of this file
+#define VK_HEADER_VERSION 130
+
+
+#define VK_NULL_HANDLE 0
+
+
+#define VK_DEFINE_HANDLE(object) typedef struct object##_T* object;
+
+
+#if !defined(VK_DEFINE_NON_DISPATCHABLE_HANDLE)
+#if defined(__LP64__) || defined(_WIN64) || (defined(__x86_64__) && !defined(__ILP32__) ) || defined(_M_X64) || defined(__ia64) || defined (_M_IA64) || defined(__aarch64__) || defined(__powerpc64__)
+        #define VK_DEFINE_NON_DISPATCHABLE_HANDLE(object) typedef struct object##_T *object;
+#else
+        #define VK_DEFINE_NON_DISPATCHABLE_HANDLE(object) typedef uint64_t object;
+#endif
+#endif
+
+typedef uint32_t VkFlags;
+typedef uint32_t VkBool32;
+typedef uint64_t VkDeviceSize;
+typedef uint32_t VkSampleMask;
+VK_DEFINE_HANDLE(VkInstance)
+VK_DEFINE_HANDLE(VkPhysicalDevice)
+VK_DEFINE_HANDLE(VkDevice)
+VK_DEFINE_HANDLE(VkQueue)
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkSemaphore)
+VK_DEFINE_HANDLE(VkCommandBuffer)
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkFence)
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDeviceMemory)
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkBuffer)
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkImage)
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkEvent)
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkQueryPool)
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkBufferView)
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkImageView)
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkShaderModule)
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkPipelineCache)
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkPipelineLayout)
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkRenderPass)
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkPipeline)
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDescriptorSetLayout)
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkSampler)
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDescriptorPool)
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDescriptorSet)
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkFramebuffer)
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkCommandPool)
+#define VK_LOD_CLAMP_NONE                 1000.0f
+#define VK_REMAINING_MIP_LEVELS           (~0U)
+#define VK_REMAINING_ARRAY_LAYERS         (~0U)
+#define VK_WHOLE_SIZE                     (~0ULL)
+#define VK_ATTACHMENT_UNUSED              (~0U)
+#define VK_TRUE                           1
+#define VK_FALSE                          0
+#define VK_QUEUE_FAMILY_IGNORED           (~0U)
+#define VK_SUBPASS_EXTERNAL               (~0U)
+#define VK_MAX_PHYSICAL_DEVICE_NAME_SIZE  256
+#define VK_UUID_SIZE                      16
+#define VK_MAX_MEMORY_TYPES               32
+#define VK_MAX_MEMORY_HEAPS               16
+#define VK_MAX_EXTENSION_NAME_SIZE        256
+#define VK_MAX_DESCRIPTION_SIZE           256
+
+typedef enum VkPipelineCacheHeaderVersion {
+    VK_PIPELINE_CACHE_HEADER_VERSION_ONE = 1,
+    VK_PIPELINE_CACHE_HEADER_VERSION_BEGIN_RANGE = VK_PIPELINE_CACHE_HEADER_VERSION_ONE,
+    VK_PIPELINE_CACHE_HEADER_VERSION_END_RANGE = VK_PIPELINE_CACHE_HEADER_VERSION_ONE,
+    VK_PIPELINE_CACHE_HEADER_VERSION_RANGE_SIZE = (VK_PIPELINE_CACHE_HEADER_VERSION_ONE - VK_PIPELINE_CACHE_HEADER_VERSION_ONE + 1),
+    VK_PIPELINE_CACHE_HEADER_VERSION_MAX_ENUM = 0x7FFFFFFF
+} VkPipelineCacheHeaderVersion;
+
+typedef enum VkResult {
+    VK_SUCCESS = 0,
+    VK_NOT_READY = 1,
+    VK_TIMEOUT = 2,
+    VK_EVENT_SET = 3,
+    VK_EVENT_RESET = 4,
+    VK_INCOMPLETE = 5,
+    VK_ERROR_OUT_OF_HOST_MEMORY = -1,
+    VK_ERROR_OUT_OF_DEVICE_MEMORY = -2,
+    VK_ERROR_INITIALIZATION_FAILED = -3,
+    VK_ERROR_DEVICE_LOST = -4,
+    VK_ERROR_MEMORY_MAP_FAILED = -5,
+    VK_ERROR_LAYER_NOT_PRESENT = -6,
+    VK_ERROR_EXTENSION_NOT_PRESENT = -7,
+    VK_ERROR_FEATURE_NOT_PRESENT = -8,
+    VK_ERROR_INCOMPATIBLE_DRIVER = -9,
+    VK_ERROR_TOO_MANY_OBJECTS = -10,
+    VK_ERROR_FORMAT_NOT_SUPPORTED = -11,
+    VK_ERROR_FRAGMENTED_POOL = -12,
+    VK_ERROR_OUT_OF_POOL_MEMORY = -1000069000,
+    VK_ERROR_INVALID_EXTERNAL_HANDLE = -1000072003,
+    VK_ERROR_SURFACE_LOST_KHR = -1000000000,
+    VK_ERROR_NATIVE_WINDOW_IN_USE_KHR = -1000000001,
+    VK_SUBOPTIMAL_KHR = 1000001003,
+    VK_ERROR_OUT_OF_DATE_KHR = -1000001004,
+    VK_ERROR_INCOMPATIBLE_DISPLAY_KHR = -1000003001,
+    VK_ERROR_VALIDATION_FAILED_EXT = -1000011001,
+    VK_ERROR_INVALID_SHADER_NV = -1000012000,
+    VK_ERROR_INVALID_DRM_FORMAT_MODIFIER_PLANE_LAYOUT_EXT = -1000158000,
+    VK_ERROR_FRAGMENTATION_EXT = -1000161000,
+    VK_ERROR_NOT_PERMITTED_EXT = -1000174001,
+    VK_ERROR_FULL_SCREEN_EXCLUSIVE_MODE_LOST_EXT = -1000255000,
+    VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS_KHR = -1000244000,
+    VK_ERROR_OUT_OF_POOL_MEMORY_KHR = VK_ERROR_OUT_OF_POOL_MEMORY,
+    VK_ERROR_INVALID_EXTERNAL_HANDLE_KHR = VK_ERROR_INVALID_EXTERNAL_HANDLE,
+    VK_ERROR_INVALID_DEVICE_ADDRESS_EXT = VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS_KHR,
+    VK_RESULT_BEGIN_RANGE = VK_ERROR_FRAGMENTED_POOL,
+    VK_RESULT_END_RANGE = VK_INCOMPLETE,
+    VK_RESULT_RANGE_SIZE = (VK_INCOMPLETE - VK_ERROR_FRAGMENTED_POOL + 1),
+    VK_RESULT_MAX_ENUM = 0x7FFFFFFF
+} VkResult;
+
+typedef enum VkStructureType {
+    VK_STRUCTURE_TYPE_APPLICATION_INFO = 0,
+    VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO = 1,
+    VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO = 2,
+    VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO = 3,
+    VK_STRUCTURE_TYPE_SUBMIT_INFO = 4,
+    VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO = 5,
+    VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE = 6,
+    VK_STRUCTURE_TYPE_BIND_SPARSE_INFO = 7,
+    VK_STRUCTURE_TYPE_FENCE_CREATE_INFO = 8,
+    VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO = 9,
+    VK_STRUCTURE_TYPE_EVENT_CREATE_INFO = 10,
+    VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO = 11,
+    VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO = 12,
+    VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO = 13,
+    VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO = 14,
+    VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO = 15,
+    VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO = 16,
+    VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO = 17,
+    VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO = 18,
+    VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO = 19,
+    VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO = 20,
+    VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO = 21,
+    VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO = 22,
+    VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO = 23,
+    VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO = 24,
+    VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO = 25,
+    VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO = 26,
+    VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO = 27,
+    VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO = 28,
+    VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO = 29,
+    VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO = 30,
+    VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO = 31,
+    VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO = 32,
+    VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO = 33,
+    VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO = 34,
+    VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET = 35,
+    VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET = 36,
+    VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO = 37,
+    VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO = 38,
+    VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO = 39,
+    VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO = 40,
+    VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO = 41,
+    VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO = 42,
+    VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO = 43,
+    VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER = 44,
+    VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER = 45,
+    VK_STRUCTURE_TYPE_MEMORY_BARRIER = 46,
+    VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO = 47,
+    VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO = 48,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_PROPERTIES = 1000094000,
+    VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO = 1000157000,
+    VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO = 1000157001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES = 1000083000,
+    VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS = 1000127000,
+    VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO = 1000127001,
+    VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO = 1000060000,
+    VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO = 1000060003,
+    VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO = 1000060004,
+    VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO = 1000060005,
+    VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO = 1000060006,
+    VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO = 1000060013,
+    VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO = 1000060014,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES = 1000070000,
+    VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO = 1000070001,
+    VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2 = 1000146000,
+    VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2 = 1000146001,
+    VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2 = 1000146002,
+    VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2 = 1000146003,
+    VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2 = 1000146004,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2 = 1000059000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2 = 1000059001,
+    VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2 = 1000059002,
+    VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2 = 1000059003,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2 = 1000059004,
+    VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2 = 1000059005,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2 = 1000059006,
+    VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2 = 1000059007,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2 = 1000059008,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES = 1000117000,
+    VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO = 1000117001,
+    VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO = 1000117002,
+    VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO = 1000117003,
+    VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO = 1000053000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES = 1000053001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES = 1000053002,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES = 1000120000,
+    VK_STRUCTURE_TYPE_PROTECTED_SUBMIT_INFO = 1000145000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_FEATURES = 1000145001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_PROPERTIES = 1000145002,
+    VK_STRUCTURE_TYPE_DEVICE_QUEUE_INFO_2 = 1000145003,
+    VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO = 1000156000,
+    VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO = 1000156001,
+    VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO = 1000156002,
+    VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO = 1000156003,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES = 1000156004,
+    VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES = 1000156005,
+    VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO = 1000085000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO = 1000071000,
+    VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES = 1000071001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO = 1000071002,
+    VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES = 1000071003,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES = 1000071004,
+    VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO = 1000072000,
+    VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO = 1000072001,
+    VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO = 1000072002,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO = 1000112000,
+    VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES = 1000112001,
+    VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO = 1000113000,
+    VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO = 1000077000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO = 1000076000,
+    VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES = 1000076001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES = 1000168000,
+    VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT = 1000168001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES = 1000063000,
+    VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR = 1000001000,
+    VK_STRUCTURE_TYPE_PRESENT_INFO_KHR = 1000001001,
+    VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_CAPABILITIES_KHR = 1000060007,
+    VK_STRUCTURE_TYPE_IMAGE_SWAPCHAIN_CREATE_INFO_KHR = 1000060008,
+    VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_SWAPCHAIN_INFO_KHR = 1000060009,
+    VK_STRUCTURE_TYPE_ACQUIRE_NEXT_IMAGE_INFO_KHR = 1000060010,
+    VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_INFO_KHR = 1000060011,
+    VK_STRUCTURE_TYPE_DEVICE_GROUP_SWAPCHAIN_CREATE_INFO_KHR = 1000060012,
+    VK_STRUCTURE_TYPE_DISPLAY_MODE_CREATE_INFO_KHR = 1000002000,
+    VK_STRUCTURE_TYPE_DISPLAY_SURFACE_CREATE_INFO_KHR = 1000002001,
+    VK_STRUCTURE_TYPE_DISPLAY_PRESENT_INFO_KHR = 1000003000,
+    VK_STRUCTURE_TYPE_XLIB_SURFACE_CREATE_INFO_KHR = 1000004000,
+    VK_STRUCTURE_TYPE_XCB_SURFACE_CREATE_INFO_KHR = 1000005000,
+    VK_STRUCTURE_TYPE_WAYLAND_SURFACE_CREATE_INFO_KHR = 1000006000,
+    VK_STRUCTURE_TYPE_ANDROID_SURFACE_CREATE_INFO_KHR = 1000008000,
+    VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR = 1000009000,
+    VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT = 1000011000,
+    VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_RASTERIZATION_ORDER_AMD = 1000018000,
+    VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_NAME_INFO_EXT = 1000022000,
+    VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_TAG_INFO_EXT = 1000022001,
+    VK_STRUCTURE_TYPE_DEBUG_MARKER_MARKER_INFO_EXT = 1000022002,
+    VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_IMAGE_CREATE_INFO_NV = 1000026000,
+    VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_BUFFER_CREATE_INFO_NV = 1000026001,
+    VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_MEMORY_ALLOCATE_INFO_NV = 1000026002,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_FEATURES_EXT = 1000028000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_PROPERTIES_EXT = 1000028001,
+    VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_STREAM_CREATE_INFO_EXT = 1000028002,
+    VK_STRUCTURE_TYPE_IMAGE_VIEW_HANDLE_INFO_NVX = 1000030000,
+    VK_STRUCTURE_TYPE_TEXTURE_LOD_GATHER_FORMAT_PROPERTIES_AMD = 1000041000,
+    VK_STRUCTURE_TYPE_STREAM_DESCRIPTOR_SURFACE_CREATE_INFO_GGP = 1000049000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CORNER_SAMPLED_IMAGE_FEATURES_NV = 1000050000,
+    VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_NV = 1000056000,
+    VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_NV = 1000056001,
+    VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_NV = 1000057000,
+    VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_NV = 1000057001,
+    VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_NV = 1000058000,
+    VK_STRUCTURE_TYPE_VALIDATION_FLAGS_EXT = 1000061000,
+    VK_STRUCTURE_TYPE_VI_SURFACE_CREATE_INFO_NN = 1000062000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXTURE_COMPRESSION_ASTC_HDR_FEATURES_EXT = 1000066000,
+    VK_STRUCTURE_TYPE_IMAGE_VIEW_ASTC_DECODE_MODE_EXT = 1000067000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ASTC_DECODE_FEATURES_EXT = 1000067001,
+    VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_KHR = 1000073000,
+    VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_KHR = 1000073001,
+    VK_STRUCTURE_TYPE_MEMORY_WIN32_HANDLE_PROPERTIES_KHR = 1000073002,
+    VK_STRUCTURE_TYPE_MEMORY_GET_WIN32_HANDLE_INFO_KHR = 1000073003,
+    VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR = 1000074000,
+    VK_STRUCTURE_TYPE_MEMORY_FD_PROPERTIES_KHR = 1000074001,
+    VK_STRUCTURE_TYPE_MEMORY_GET_FD_INFO_KHR = 1000074002,
+    VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_KHR = 1000075000,
+    VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR = 1000078000,
+    VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR = 1000078001,
+    VK_STRUCTURE_TYPE_D3D12_FENCE_SUBMIT_INFO_KHR = 1000078002,
+    VK_STRUCTURE_TYPE_SEMAPHORE_GET_WIN32_HANDLE_INFO_KHR = 1000078003,
+    VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_FD_INFO_KHR = 1000079000,
+    VK_STRUCTURE_TYPE_SEMAPHORE_GET_FD_INFO_KHR = 1000079001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR = 1000080000,
+    VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_CONDITIONAL_RENDERING_INFO_EXT = 1000081000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONDITIONAL_RENDERING_FEATURES_EXT = 1000081001,
+    VK_STRUCTURE_TYPE_CONDITIONAL_RENDERING_BEGIN_INFO_EXT = 1000081002,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES_KHR = 1000082000,
+    VK_STRUCTURE_TYPE_PRESENT_REGIONS_KHR = 1000084000,
+    VK_STRUCTURE_TYPE_OBJECT_TABLE_CREATE_INFO_NVX = 1000086000,
+    VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_NVX = 1000086001,
+    VK_STRUCTURE_TYPE_CMD_PROCESS_COMMANDS_INFO_NVX = 1000086002,
+    VK_STRUCTURE_TYPE_CMD_RESERVE_SPACE_FOR_COMMANDS_INFO_NVX = 1000086003,
+    VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_LIMITS_NVX = 1000086004,
+    VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_FEATURES_NVX = 1000086005,
+    VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_W_SCALING_STATE_CREATE_INFO_NV = 1000087000,
+    VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_EXT = 1000090000,
+    VK_STRUCTURE_TYPE_DISPLAY_POWER_INFO_EXT = 1000091000,
+    VK_STRUCTURE_TYPE_DEVICE_EVENT_INFO_EXT = 1000091001,
+    VK_STRUCTURE_TYPE_DISPLAY_EVENT_INFO_EXT = 1000091002,
+    VK_STRUCTURE_TYPE_SWAPCHAIN_COUNTER_CREATE_INFO_EXT = 1000091003,
+    VK_STRUCTURE_TYPE_PRESENT_TIMES_INFO_GOOGLE = 1000092000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_ATTRIBUTES_PROPERTIES_NVX = 1000097000,
+    VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SWIZZLE_STATE_CREATE_INFO_NV = 1000098000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DISCARD_RECTANGLE_PROPERTIES_EXT = 1000099000,
+    VK_STRUCTURE_TYPE_PIPELINE_DISCARD_RECTANGLE_STATE_CREATE_INFO_EXT = 1000099001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONSERVATIVE_RASTERIZATION_PROPERTIES_EXT = 1000101000,
+    VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_CONSERVATIVE_STATE_CREATE_INFO_EXT = 1000101001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLIP_ENABLE_FEATURES_EXT = 1000102000,
+    VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_DEPTH_CLIP_STATE_CREATE_INFO_EXT = 1000102001,
+    VK_STRUCTURE_TYPE_HDR_METADATA_EXT = 1000105000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES_KHR = 1000108000,
+    VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO_KHR = 1000108001,
+    VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO_KHR = 1000108002,
+    VK_STRUCTURE_TYPE_RENDER_PASS_ATTACHMENT_BEGIN_INFO_KHR = 1000108003,
+    VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_2_KHR = 1000109000,
+    VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2_KHR = 1000109001,
+    VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_2_KHR = 1000109002,
+    VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2_KHR = 1000109003,
+    VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO_2_KHR = 1000109004,
+    VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO_KHR = 1000109005,
+    VK_STRUCTURE_TYPE_SUBPASS_END_INFO_KHR = 1000109006,
+    VK_STRUCTURE_TYPE_SHARED_PRESENT_SURFACE_CAPABILITIES_KHR = 1000111000,
+    VK_STRUCTURE_TYPE_IMPORT_FENCE_WIN32_HANDLE_INFO_KHR = 1000114000,
+    VK_STRUCTURE_TYPE_EXPORT_FENCE_WIN32_HANDLE_INFO_KHR = 1000114001,
+    VK_STRUCTURE_TYPE_FENCE_GET_WIN32_HANDLE_INFO_KHR = 1000114002,
+    VK_STRUCTURE_TYPE_IMPORT_FENCE_FD_INFO_KHR = 1000115000,
+    VK_STRUCTURE_TYPE_FENCE_GET_FD_INFO_KHR = 1000115001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PERFORMANCE_QUERY_FEATURES_KHR = 1000116000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PERFORMANCE_QUERY_PROPERTIES_KHR = 1000116001,
+    VK_STRUCTURE_TYPE_QUERY_POOL_PERFORMANCE_CREATE_INFO_KHR = 1000116002,
+    VK_STRUCTURE_TYPE_PERFORMANCE_QUERY_SUBMIT_INFO_KHR = 1000116003,
+    VK_STRUCTURE_TYPE_ACQUIRE_PROFILING_LOCK_INFO_KHR = 1000116004,
+    VK_STRUCTURE_TYPE_PERFORMANCE_COUNTER_KHR = 1000116005,
+    VK_STRUCTURE_TYPE_PERFORMANCE_COUNTER_DESCRIPTION_KHR = 1000116006,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SURFACE_INFO_2_KHR = 1000119000,
+    VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_KHR = 1000119001,
+    VK_STRUCTURE_TYPE_SURFACE_FORMAT_2_KHR = 1000119002,
+    VK_STRUCTURE_TYPE_DISPLAY_PROPERTIES_2_KHR = 1000121000,
+    VK_STRUCTURE_TYPE_DISPLAY_PLANE_PROPERTIES_2_KHR = 1000121001,
+    VK_STRUCTURE_TYPE_DISPLAY_MODE_PROPERTIES_2_KHR = 1000121002,
+    VK_STRUCTURE_TYPE_DISPLAY_PLANE_INFO_2_KHR = 1000121003,
+    VK_STRUCTURE_TYPE_DISPLAY_PLANE_CAPABILITIES_2_KHR = 1000121004,
+    VK_STRUCTURE_TYPE_IOS_SURFACE_CREATE_INFO_MVK = 1000122000,
+    VK_STRUCTURE_TYPE_MACOS_SURFACE_CREATE_INFO_MVK = 1000123000,
+    VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT = 1000128000,
+    VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_TAG_INFO_EXT = 1000128001,
+    VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT = 1000128002,
+    VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CALLBACK_DATA_EXT = 1000128003,
+    VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT = 1000128004,
+    VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_USAGE_ANDROID = 1000129000,
+    VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID = 1000129001,
+    VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID = 1000129002,
+    VK_STRUCTURE_TYPE_IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID = 1000129003,
+    VK_STRUCTURE_TYPE_MEMORY_GET_ANDROID_HARDWARE_BUFFER_INFO_ANDROID = 1000129004,
+    VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID = 1000129005,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES_EXT = 1000130000,
+    VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO_EXT = 1000130001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES_EXT = 1000138000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_PROPERTIES_EXT = 1000138001,
+    VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK_EXT = 1000138002,
+    VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO_EXT = 1000138003,
+    VK_STRUCTURE_TYPE_SAMPLE_LOCATIONS_INFO_EXT = 1000143000,
+    VK_STRUCTURE_TYPE_RENDER_PASS_SAMPLE_LOCATIONS_BEGIN_INFO_EXT = 1000143001,
+    VK_STRUCTURE_TYPE_PIPELINE_SAMPLE_LOCATIONS_STATE_CREATE_INFO_EXT = 1000143002,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLE_LOCATIONS_PROPERTIES_EXT = 1000143003,
+    VK_STRUCTURE_TYPE_MULTISAMPLE_PROPERTIES_EXT = 1000143004,
+    VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO_KHR = 1000147000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_FEATURES_EXT = 1000148000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_PROPERTIES_EXT = 1000148001,
+    VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_ADVANCED_STATE_CREATE_INFO_EXT = 1000148002,
+    VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_TO_COLOR_STATE_CREATE_INFO_NV = 1000149000,
+    VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_MODULATION_STATE_CREATE_INFO_NV = 1000152000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_FEATURES_NV = 1000154000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_PROPERTIES_NV = 1000154001,
+    VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT = 1000158000,
+    VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT = 1000158001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_DRM_FORMAT_MODIFIER_INFO_EXT = 1000158002,
+    VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_LIST_CREATE_INFO_EXT = 1000158003,
+    VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_EXPLICIT_CREATE_INFO_EXT = 1000158004,
+    VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT = 1000158005,
+    VK_STRUCTURE_TYPE_VALIDATION_CACHE_CREATE_INFO_EXT = 1000160000,
+    VK_STRUCTURE_TYPE_SHADER_MODULE_VALIDATION_CACHE_CREATE_INFO_EXT = 1000160001,
+    VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT = 1000161000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES_EXT = 1000161001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES_EXT = 1000161002,
+    VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO_EXT = 1000161003,
+    VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT_EXT = 1000161004,
+    VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SHADING_RATE_IMAGE_STATE_CREATE_INFO_NV = 1000164000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_FEATURES_NV = 1000164001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_PROPERTIES_NV = 1000164002,
+    VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_COARSE_SAMPLE_ORDER_STATE_CREATE_INFO_NV = 1000164005,
+    VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_NV = 1000165000,
+    VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_NV = 1000165001,
+    VK_STRUCTURE_TYPE_GEOMETRY_NV = 1000165003,
+    VK_STRUCTURE_TYPE_GEOMETRY_TRIANGLES_NV = 1000165004,
+    VK_STRUCTURE_TYPE_GEOMETRY_AABB_NV = 1000165005,
+    VK_STRUCTURE_TYPE_BIND_ACCELERATION_STRUCTURE_MEMORY_INFO_NV = 1000165006,
+    VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_NV = 1000165007,
+    VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV = 1000165008,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PROPERTIES_NV = 1000165009,
+    VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV = 1000165011,
+    VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_INFO_NV = 1000165012,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_REPRESENTATIVE_FRAGMENT_TEST_FEATURES_NV = 1000166000,
+    VK_STRUCTURE_TYPE_PIPELINE_REPRESENTATIVE_FRAGMENT_TEST_STATE_CREATE_INFO_NV = 1000166001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_VIEW_IMAGE_FORMAT_INFO_EXT = 1000170000,
+    VK_STRUCTURE_TYPE_FILTER_CUBIC_IMAGE_VIEW_IMAGE_FORMAT_PROPERTIES_EXT = 1000170001,
+    VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_EXT = 1000174000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES_KHR = 1000175000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES_KHR = 1000177000,
+    VK_STRUCTURE_TYPE_IMPORT_MEMORY_HOST_POINTER_INFO_EXT = 1000178000,
+    VK_STRUCTURE_TYPE_MEMORY_HOST_POINTER_PROPERTIES_EXT = 1000178001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_HOST_PROPERTIES_EXT = 1000178002,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES_KHR = 1000180000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CLOCK_FEATURES_KHR = 1000181000,
+    VK_STRUCTURE_TYPE_PIPELINE_COMPILER_CONTROL_CREATE_INFO_AMD = 1000183000,
+    VK_STRUCTURE_TYPE_CALIBRATED_TIMESTAMP_INFO_EXT = 1000184000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_AMD = 1000185000,
+    VK_STRUCTURE_TYPE_DEVICE_MEMORY_OVERALLOCATION_CREATE_INFO_AMD = 1000189000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_EXT = 1000190000,
+    VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_EXT = 1000190001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_EXT = 1000190002,
+    VK_STRUCTURE_TYPE_PRESENT_FRAME_TOKEN_GGP = 1000191000,
+    VK_STRUCTURE_TYPE_PIPELINE_CREATION_FEEDBACK_CREATE_INFO_EXT = 1000192000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES_KHR = 1000196000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES_KHR = 1000197000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES_KHR = 1000199000,
+    VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE_KHR = 1000199001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_FEATURES_NV = 1000201000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_FEATURES_NV = 1000202000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_PROPERTIES_NV = 1000202001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_FEATURES_NV = 1000203000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_IMAGE_FOOTPRINT_FEATURES_NV = 1000204000,
+    VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_EXCLUSIVE_SCISSOR_STATE_CREATE_INFO_NV = 1000205000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXCLUSIVE_SCISSOR_FEATURES_NV = 1000205002,
+    VK_STRUCTURE_TYPE_CHECKPOINT_DATA_NV = 1000206000,
+    VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_NV = 1000206001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES_KHR = 1000207000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES_KHR = 1000207001,
+    VK_STRUCTURE_TYPE_SEMAPHORE_TYPE_CREATE_INFO_KHR = 1000207002,
+    VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO_KHR = 1000207003,
+    VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO_KHR = 1000207004,
+    VK_STRUCTURE_TYPE_SEMAPHORE_SIGNAL_INFO_KHR = 1000207005,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_FUNCTIONS_2_FEATURES_INTEL = 1000209000,
+    VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO_INTEL = 1000210000,
+    VK_STRUCTURE_TYPE_INITIALIZE_PERFORMANCE_API_INFO_INTEL = 1000210001,
+    VK_STRUCTURE_TYPE_PERFORMANCE_MARKER_INFO_INTEL = 1000210002,
+    VK_STRUCTURE_TYPE_PERFORMANCE_STREAM_MARKER_INFO_INTEL = 1000210003,
+    VK_STRUCTURE_TYPE_PERFORMANCE_OVERRIDE_INFO_INTEL = 1000210004,
+    VK_STRUCTURE_TYPE_PERFORMANCE_CONFIGURATION_ACQUIRE_INFO_INTEL = 1000210005,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES_KHR = 1000211000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PCI_BUS_INFO_PROPERTIES_EXT = 1000212000,
+    VK_STRUCTURE_TYPE_DISPLAY_NATIVE_HDR_SURFACE_CAPABILITIES_AMD = 1000213000,
+    VK_STRUCTURE_TYPE_SWAPCHAIN_DISPLAY_NATIVE_HDR_CREATE_INFO_AMD = 1000213001,
+    VK_STRUCTURE_TYPE_IMAGEPIPE_SURFACE_CREATE_INFO_FUCHSIA = 1000214000,
+    VK_STRUCTURE_TYPE_METAL_SURFACE_CREATE_INFO_EXT = 1000217000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_FEATURES_EXT = 1000218000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_PROPERTIES_EXT = 1000218001,
+    VK_STRUCTURE_TYPE_RENDER_PASS_FRAGMENT_DENSITY_MAP_CREATE_INFO_EXT = 1000218002,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES_EXT = 1000221000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES_EXT = 1000225000,
+    VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO_EXT = 1000225001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES_EXT = 1000225002,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_2_AMD = 1000227000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COHERENT_MEMORY_FEATURES_AMD = 1000229000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT = 1000237000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PRIORITY_FEATURES_EXT = 1000238000,
+    VK_STRUCTURE_TYPE_MEMORY_PRIORITY_ALLOCATE_INFO_EXT = 1000238001,
+    VK_STRUCTURE_TYPE_SURFACE_PROTECTED_CAPABILITIES_KHR = 1000239000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEDICATED_ALLOCATION_IMAGE_ALIASING_FEATURES_NV = 1000240000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES_KHR = 1000241000,
+    VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_STENCIL_LAYOUT_KHR = 1000241001,
+    VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT_KHR = 1000241002,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_EXT = 1000244000,
+    VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_CREATE_INFO_EXT = 1000244002,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TOOL_PROPERTIES_EXT = 1000245000,
+    VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO_EXT = 1000246000,
+    VK_STRUCTURE_TYPE_VALIDATION_FEATURES_EXT = 1000247000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_FEATURES_NV = 1000249000,
+    VK_STRUCTURE_TYPE_COOPERATIVE_MATRIX_PROPERTIES_NV = 1000249001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_PROPERTIES_NV = 1000249002,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COVERAGE_REDUCTION_MODE_FEATURES_NV = 1000250000,
+    VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_REDUCTION_STATE_CREATE_INFO_NV = 1000250001,
+    VK_STRUCTURE_TYPE_FRAMEBUFFER_MIXED_SAMPLES_COMBINATION_NV = 1000250002,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_INTERLOCK_FEATURES_EXT = 1000251000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_YCBCR_IMAGE_ARRAYS_FEATURES_EXT = 1000252000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES_KHR = 1000253000,
+    VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_INFO_EXT = 1000255000,
+    VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_FULL_SCREEN_EXCLUSIVE_EXT = 1000255002,
+    VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_WIN32_INFO_EXT = 1000255001,
+    VK_STRUCTURE_TYPE_HEADLESS_SURFACE_CREATE_INFO_EXT = 1000256000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_KHR = 1000257000,
+    VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO_KHR = 1000244001,
+    VK_STRUCTURE_TYPE_BUFFER_OPAQUE_CAPTURE_ADDRESS_CREATE_INFO_KHR = 1000257002,
+    VK_STRUCTURE_TYPE_MEMORY_OPAQUE_CAPTURE_ADDRESS_ALLOCATE_INFO_KHR = 1000257003,
+    VK_STRUCTURE_TYPE_DEVICE_MEMORY_OPAQUE_CAPTURE_ADDRESS_INFO_KHR = 1000257004,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_EXT = 1000259000,
+    VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO_EXT = 1000259001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES_EXT = 1000259002,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES_EXT = 1000261000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_EXT = 1000265000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_EXECUTABLE_PROPERTIES_FEATURES_KHR = 1000269000,
+    VK_STRUCTURE_TYPE_PIPELINE_INFO_KHR = 1000269001,
+    VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_PROPERTIES_KHR = 1000269002,
+    VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INFO_KHR = 1000269003,
+    VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_STATISTIC_KHR = 1000269004,
+    VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INTERNAL_REPRESENTATION_KHR = 1000269005,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES_EXT = 1000276000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_FEATURES_EXT = 1000281000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES_EXT = 1000281001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETER_FEATURES = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES,
+    VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT = VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT,
+    VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2,
+    VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2_KHR = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2,
+    VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2_KHR = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2,
+    VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2_KHR = VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2,
+    VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2_KHR = VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2,
+    VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO_KHR = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO,
+    VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO_KHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO,
+    VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO_KHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO,
+    VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO_KHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO,
+    VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO_KHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO,
+    VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO_KHR = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO,
+    VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO_KHR = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES,
+    VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO,
+    VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES_KHR = VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO,
+    VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES_KHR = VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES,
+    VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO,
+    VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO,
+    VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_KHR = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO,
+    VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES_KHR = VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES,
+    VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT16_INT8_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES_KHR,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES,
+    VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO,
+    VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES2_EXT = VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_EXT,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO,
+    VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES_KHR = VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES,
+    VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES,
+    VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO,
+    VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO,
+    VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES,
+    VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS,
+    VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO,
+    VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR = VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2,
+    VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR = VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2,
+    VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2_KHR = VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2,
+    VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2,
+    VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2_KHR = VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2,
+    VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO,
+    VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO_KHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO,
+    VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO_KHR = VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO,
+    VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO_KHR = VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES,
+    VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES_KHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES,
+    VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO_KHR = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO,
+    VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO_KHR = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES,
+    VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT_KHR = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_ADDRESS_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_EXT,
+    VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO_EXT = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO_KHR,
+    VK_STRUCTURE_TYPE_BEGIN_RANGE = VK_STRUCTURE_TYPE_APPLICATION_INFO,
+    VK_STRUCTURE_TYPE_END_RANGE = VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO,
+    VK_STRUCTURE_TYPE_RANGE_SIZE = (VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO - VK_STRUCTURE_TYPE_APPLICATION_INFO + 1),
+    VK_STRUCTURE_TYPE_MAX_ENUM = 0x7FFFFFFF
+} VkStructureType;
+
+typedef enum VkSystemAllocationScope {
+    VK_SYSTEM_ALLOCATION_SCOPE_COMMAND = 0,
+    VK_SYSTEM_ALLOCATION_SCOPE_OBJECT = 1,
+    VK_SYSTEM_ALLOCATION_SCOPE_CACHE = 2,
+    VK_SYSTEM_ALLOCATION_SCOPE_DEVICE = 3,
+    VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE = 4,
+    VK_SYSTEM_ALLOCATION_SCOPE_BEGIN_RANGE = VK_SYSTEM_ALLOCATION_SCOPE_COMMAND,
+    VK_SYSTEM_ALLOCATION_SCOPE_END_RANGE = VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE,
+    VK_SYSTEM_ALLOCATION_SCOPE_RANGE_SIZE = (VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE - VK_SYSTEM_ALLOCATION_SCOPE_COMMAND + 1),
+    VK_SYSTEM_ALLOCATION_SCOPE_MAX_ENUM = 0x7FFFFFFF
+} VkSystemAllocationScope;
+
+typedef enum VkInternalAllocationType {
+    VK_INTERNAL_ALLOCATION_TYPE_EXECUTABLE = 0,
+    VK_INTERNAL_ALLOCATION_TYPE_BEGIN_RANGE = VK_INTERNAL_ALLOCATION_TYPE_EXECUTABLE,
+    VK_INTERNAL_ALLOCATION_TYPE_END_RANGE = VK_INTERNAL_ALLOCATION_TYPE_EXECUTABLE,
+    VK_INTERNAL_ALLOCATION_TYPE_RANGE_SIZE = (VK_INTERNAL_ALLOCATION_TYPE_EXECUTABLE - VK_INTERNAL_ALLOCATION_TYPE_EXECUTABLE + 1),
+    VK_INTERNAL_ALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
+} VkInternalAllocationType;
+
+typedef enum VkFormat {
+    VK_FORMAT_UNDEFINED = 0,
+    VK_FORMAT_R4G4_UNORM_PACK8 = 1,
+    VK_FORMAT_R4G4B4A4_UNORM_PACK16 = 2,
+    VK_FORMAT_B4G4R4A4_UNORM_PACK16 = 3,
+    VK_FORMAT_R5G6B5_UNORM_PACK16 = 4,
+    VK_FORMAT_B5G6R5_UNORM_PACK16 = 5,
+    VK_FORMAT_R5G5B5A1_UNORM_PACK16 = 6,
+    VK_FORMAT_B5G5R5A1_UNORM_PACK16 = 7,
+    VK_FORMAT_A1R5G5B5_UNORM_PACK16 = 8,
+    VK_FORMAT_R8_UNORM = 9,
+    VK_FORMAT_R8_SNORM = 10,
+    VK_FORMAT_R8_USCALED = 11,
+    VK_FORMAT_R8_SSCALED = 12,
+    VK_FORMAT_R8_UINT = 13,
+    VK_FORMAT_R8_SINT = 14,
+    VK_FORMAT_R8_SRGB = 15,
+    VK_FORMAT_R8G8_UNORM = 16,
+    VK_FORMAT_R8G8_SNORM = 17,
+    VK_FORMAT_R8G8_USCALED = 18,
+    VK_FORMAT_R8G8_SSCALED = 19,
+    VK_FORMAT_R8G8_UINT = 20,
+    VK_FORMAT_R8G8_SINT = 21,
+    VK_FORMAT_R8G8_SRGB = 22,
+    VK_FORMAT_R8G8B8_UNORM = 23,
+    VK_FORMAT_R8G8B8_SNORM = 24,
+    VK_FORMAT_R8G8B8_USCALED = 25,
+    VK_FORMAT_R8G8B8_SSCALED = 26,
+    VK_FORMAT_R8G8B8_UINT = 27,
+    VK_FORMAT_R8G8B8_SINT = 28,
+    VK_FORMAT_R8G8B8_SRGB = 29,
+    VK_FORMAT_B8G8R8_UNORM = 30,
+    VK_FORMAT_B8G8R8_SNORM = 31,
+    VK_FORMAT_B8G8R8_USCALED = 32,
+    VK_FORMAT_B8G8R8_SSCALED = 33,
+    VK_FORMAT_B8G8R8_UINT = 34,
+    VK_FORMAT_B8G8R8_SINT = 35,
+    VK_FORMAT_B8G8R8_SRGB = 36,
+    VK_FORMAT_R8G8B8A8_UNORM = 37,
+    VK_FORMAT_R8G8B8A8_SNORM = 38,
+    VK_FORMAT_R8G8B8A8_USCALED = 39,
+    VK_FORMAT_R8G8B8A8_SSCALED = 40,
+    VK_FORMAT_R8G8B8A8_UINT = 41,
+    VK_FORMAT_R8G8B8A8_SINT = 42,
+    VK_FORMAT_R8G8B8A8_SRGB = 43,
+    VK_FORMAT_B8G8R8A8_UNORM = 44,
+    VK_FORMAT_B8G8R8A8_SNORM = 45,
+    VK_FORMAT_B8G8R8A8_USCALED = 46,
+    VK_FORMAT_B8G8R8A8_SSCALED = 47,
+    VK_FORMAT_B8G8R8A8_UINT = 48,
+    VK_FORMAT_B8G8R8A8_SINT = 49,
+    VK_FORMAT_B8G8R8A8_SRGB = 50,
+    VK_FORMAT_A8B8G8R8_UNORM_PACK32 = 51,
+    VK_FORMAT_A8B8G8R8_SNORM_PACK32 = 52,
+    VK_FORMAT_A8B8G8R8_USCALED_PACK32 = 53,
+    VK_FORMAT_A8B8G8R8_SSCALED_PACK32 = 54,
+    VK_FORMAT_A8B8G8R8_UINT_PACK32 = 55,
+    VK_FORMAT_A8B8G8R8_SINT_PACK32 = 56,
+    VK_FORMAT_A8B8G8R8_SRGB_PACK32 = 57,
+    VK_FORMAT_A2R10G10B10_UNORM_PACK32 = 58,
+    VK_FORMAT_A2R10G10B10_SNORM_PACK32 = 59,
+    VK_FORMAT_A2R10G10B10_USCALED_PACK32 = 60,
+    VK_FORMAT_A2R10G10B10_SSCALED_PACK32 = 61,
+    VK_FORMAT_A2R10G10B10_UINT_PACK32 = 62,
+    VK_FORMAT_A2R10G10B10_SINT_PACK32 = 63,
+    VK_FORMAT_A2B10G10R10_UNORM_PACK32 = 64,
+    VK_FORMAT_A2B10G10R10_SNORM_PACK32 = 65,
+    VK_FORMAT_A2B10G10R10_USCALED_PACK32 = 66,
+    VK_FORMAT_A2B10G10R10_SSCALED_PACK32 = 67,
+    VK_FORMAT_A2B10G10R10_UINT_PACK32 = 68,
+    VK_FORMAT_A2B10G10R10_SINT_PACK32 = 69,
+    VK_FORMAT_R16_UNORM = 70,
+    VK_FORMAT_R16_SNORM = 71,
+    VK_FORMAT_R16_USCALED = 72,
+    VK_FORMAT_R16_SSCALED = 73,
+    VK_FORMAT_R16_UINT = 74,
+    VK_FORMAT_R16_SINT = 75,
+    VK_FORMAT_R16_SFLOAT = 76,
+    VK_FORMAT_R16G16_UNORM = 77,
+    VK_FORMAT_R16G16_SNORM = 78,
+    VK_FORMAT_R16G16_USCALED = 79,
+    VK_FORMAT_R16G16_SSCALED = 80,
+    VK_FORMAT_R16G16_UINT = 81,
+    VK_FORMAT_R16G16_SINT = 82,
+    VK_FORMAT_R16G16_SFLOAT = 83,
+    VK_FORMAT_R16G16B16_UNORM = 84,
+    VK_FORMAT_R16G16B16_SNORM = 85,
+    VK_FORMAT_R16G16B16_USCALED = 86,
+    VK_FORMAT_R16G16B16_SSCALED = 87,
+    VK_FORMAT_R16G16B16_UINT = 88,
+    VK_FORMAT_R16G16B16_SINT = 89,
+    VK_FORMAT_R16G16B16_SFLOAT = 90,
+    VK_FORMAT_R16G16B16A16_UNORM = 91,
+    VK_FORMAT_R16G16B16A16_SNORM = 92,
+    VK_FORMAT_R16G16B16A16_USCALED = 93,
+    VK_FORMAT_R16G16B16A16_SSCALED = 94,
+    VK_FORMAT_R16G16B16A16_UINT = 95,
+    VK_FORMAT_R16G16B16A16_SINT = 96,
+    VK_FORMAT_R16G16B16A16_SFLOAT = 97,
+    VK_FORMAT_R32_UINT = 98,
+    VK_FORMAT_R32_SINT = 99,
+    VK_FORMAT_R32_SFLOAT = 100,
+    VK_FORMAT_R32G32_UINT = 101,
+    VK_FORMAT_R32G32_SINT = 102,
+    VK_FORMAT_R32G32_SFLOAT = 103,
+    VK_FORMAT_R32G32B32_UINT = 104,
+    VK_FORMAT_R32G32B32_SINT = 105,
+    VK_FORMAT_R32G32B32_SFLOAT = 106,
+    VK_FORMAT_R32G32B32A32_UINT = 107,
+    VK_FORMAT_R32G32B32A32_SINT = 108,
+    VK_FORMAT_R32G32B32A32_SFLOAT = 109,
+    VK_FORMAT_R64_UINT = 110,
+    VK_FORMAT_R64_SINT = 111,
+    VK_FORMAT_R64_SFLOAT = 112,
+    VK_FORMAT_R64G64_UINT = 113,
+    VK_FORMAT_R64G64_SINT = 114,
+    VK_FORMAT_R64G64_SFLOAT = 115,
+    VK_FORMAT_R64G64B64_UINT = 116,
+    VK_FORMAT_R64G64B64_SINT = 117,
+    VK_FORMAT_R64G64B64_SFLOAT = 118,
+    VK_FORMAT_R64G64B64A64_UINT = 119,
+    VK_FORMAT_R64G64B64A64_SINT = 120,
+    VK_FORMAT_R64G64B64A64_SFLOAT = 121,
+    VK_FORMAT_B10G11R11_UFLOAT_PACK32 = 122,
+    VK_FORMAT_E5B9G9R9_UFLOAT_PACK32 = 123,
+    VK_FORMAT_D16_UNORM = 124,
+    VK_FORMAT_X8_D24_UNORM_PACK32 = 125,
+    VK_FORMAT_D32_SFLOAT = 126,
+    VK_FORMAT_S8_UINT = 127,
+    VK_FORMAT_D16_UNORM_S8_UINT = 128,
+    VK_FORMAT_D24_UNORM_S8_UINT = 129,
+    VK_FORMAT_D32_SFLOAT_S8_UINT = 130,
+    VK_FORMAT_BC1_RGB_UNORM_BLOCK = 131,
+    VK_FORMAT_BC1_RGB_SRGB_BLOCK = 132,
+    VK_FORMAT_BC1_RGBA_UNORM_BLOCK = 133,
+    VK_FORMAT_BC1_RGBA_SRGB_BLOCK = 134,
+    VK_FORMAT_BC2_UNORM_BLOCK = 135,
+    VK_FORMAT_BC2_SRGB_BLOCK = 136,
+    VK_FORMAT_BC3_UNORM_BLOCK = 137,
+    VK_FORMAT_BC3_SRGB_BLOCK = 138,
+    VK_FORMAT_BC4_UNORM_BLOCK = 139,
+    VK_FORMAT_BC4_SNORM_BLOCK = 140,
+    VK_FORMAT_BC5_UNORM_BLOCK = 141,
+    VK_FORMAT_BC5_SNORM_BLOCK = 142,
+    VK_FORMAT_BC6H_UFLOAT_BLOCK = 143,
+    VK_FORMAT_BC6H_SFLOAT_BLOCK = 144,
+    VK_FORMAT_BC7_UNORM_BLOCK = 145,
+    VK_FORMAT_BC7_SRGB_BLOCK = 146,
+    VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK = 147,
+    VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK = 148,
+    VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK = 149,
+    VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK = 150,
+    VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK = 151,
+    VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK = 152,
+    VK_FORMAT_EAC_R11_UNORM_BLOCK = 153,
+    VK_FORMAT_EAC_R11_SNORM_BLOCK = 154,
+    VK_FORMAT_EAC_R11G11_UNORM_BLOCK = 155,
+    VK_FORMAT_EAC_R11G11_SNORM_BLOCK = 156,
+    VK_FORMAT_ASTC_4x4_UNORM_BLOCK = 157,
+    VK_FORMAT_ASTC_4x4_SRGB_BLOCK = 158,
+    VK_FORMAT_ASTC_5x4_UNORM_BLOCK = 159,
+    VK_FORMAT_ASTC_5x4_SRGB_BLOCK = 160,
+    VK_FORMAT_ASTC_5x5_UNORM_BLOCK = 161,
+    VK_FORMAT_ASTC_5x5_SRGB_BLOCK = 162,
+    VK_FORMAT_ASTC_6x5_UNORM_BLOCK = 163,
+    VK_FORMAT_ASTC_6x5_SRGB_BLOCK = 164,
+    VK_FORMAT_ASTC_6x6_UNORM_BLOCK = 165,
+    VK_FORMAT_ASTC_6x6_SRGB_BLOCK = 166,
+    VK_FORMAT_ASTC_8x5_UNORM_BLOCK = 167,
+    VK_FORMAT_ASTC_8x5_SRGB_BLOCK = 168,
+    VK_FORMAT_ASTC_8x6_UNORM_BLOCK = 169,
+    VK_FORMAT_ASTC_8x6_SRGB_BLOCK = 170,
+    VK_FORMAT_ASTC_8x8_UNORM_BLOCK = 171,
+    VK_FORMAT_ASTC_8x8_SRGB_BLOCK = 172,
+    VK_FORMAT_ASTC_10x5_UNORM_BLOCK = 173,
+    VK_FORMAT_ASTC_10x5_SRGB_BLOCK = 174,
+    VK_FORMAT_ASTC_10x6_UNORM_BLOCK = 175,
+    VK_FORMAT_ASTC_10x6_SRGB_BLOCK = 176,
+    VK_FORMAT_ASTC_10x8_UNORM_BLOCK = 177,
+    VK_FORMAT_ASTC_10x8_SRGB_BLOCK = 178,
+    VK_FORMAT_ASTC_10x10_UNORM_BLOCK = 179,
+    VK_FORMAT_ASTC_10x10_SRGB_BLOCK = 180,
+    VK_FORMAT_ASTC_12x10_UNORM_BLOCK = 181,
+    VK_FORMAT_ASTC_12x10_SRGB_BLOCK = 182,
+    VK_FORMAT_ASTC_12x12_UNORM_BLOCK = 183,
+    VK_FORMAT_ASTC_12x12_SRGB_BLOCK = 184,
+    VK_FORMAT_G8B8G8R8_422_UNORM = 1000156000,
+    VK_FORMAT_B8G8R8G8_422_UNORM = 1000156001,
+    VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM = 1000156002,
+    VK_FORMAT_G8_B8R8_2PLANE_420_UNORM = 1000156003,
+    VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM = 1000156004,
+    VK_FORMAT_G8_B8R8_2PLANE_422_UNORM = 1000156005,
+    VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM = 1000156006,
+    VK_FORMAT_R10X6_UNORM_PACK16 = 1000156007,
+    VK_FORMAT_R10X6G10X6_UNORM_2PACK16 = 1000156008,
+    VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16 = 1000156009,
+    VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16 = 1000156010,
+    VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16 = 1000156011,
+    VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16 = 1000156012,
+    VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16 = 1000156013,
+    VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16 = 1000156014,
+    VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16 = 1000156015,
+    VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16 = 1000156016,
+    VK_FORMAT_R12X4_UNORM_PACK16 = 1000156017,
+    VK_FORMAT_R12X4G12X4_UNORM_2PACK16 = 1000156018,
+    VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16 = 1000156019,
+    VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16 = 1000156020,
+    VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16 = 1000156021,
+    VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16 = 1000156022,
+    VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16 = 1000156023,
+    VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16 = 1000156024,
+    VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16 = 1000156025,
+    VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16 = 1000156026,
+    VK_FORMAT_G16B16G16R16_422_UNORM = 1000156027,
+    VK_FORMAT_B16G16R16G16_422_UNORM = 1000156028,
+    VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM = 1000156029,
+    VK_FORMAT_G16_B16R16_2PLANE_420_UNORM = 1000156030,
+    VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM = 1000156031,
+    VK_FORMAT_G16_B16R16_2PLANE_422_UNORM = 1000156032,
+    VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM = 1000156033,
+    VK_FORMAT_PVRTC1_2BPP_UNORM_BLOCK_IMG = 1000054000,
+    VK_FORMAT_PVRTC1_4BPP_UNORM_BLOCK_IMG = 1000054001,
+    VK_FORMAT_PVRTC2_2BPP_UNORM_BLOCK_IMG = 1000054002,
+    VK_FORMAT_PVRTC2_4BPP_UNORM_BLOCK_IMG = 1000054003,
+    VK_FORMAT_PVRTC1_2BPP_SRGB_BLOCK_IMG = 1000054004,
+    VK_FORMAT_PVRTC1_4BPP_SRGB_BLOCK_IMG = 1000054005,
+    VK_FORMAT_PVRTC2_2BPP_SRGB_BLOCK_IMG = 1000054006,
+    VK_FORMAT_PVRTC2_4BPP_SRGB_BLOCK_IMG = 1000054007,
+    VK_FORMAT_ASTC_4x4_SFLOAT_BLOCK_EXT = 1000066000,
+    VK_FORMAT_ASTC_5x4_SFLOAT_BLOCK_EXT = 1000066001,
+    VK_FORMAT_ASTC_5x5_SFLOAT_BLOCK_EXT = 1000066002,
+    VK_FORMAT_ASTC_6x5_SFLOAT_BLOCK_EXT = 1000066003,
+    VK_FORMAT_ASTC_6x6_SFLOAT_BLOCK_EXT = 1000066004,
+    VK_FORMAT_ASTC_8x5_SFLOAT_BLOCK_EXT = 1000066005,
+    VK_FORMAT_ASTC_8x6_SFLOAT_BLOCK_EXT = 1000066006,
+    VK_FORMAT_ASTC_8x8_SFLOAT_BLOCK_EXT = 1000066007,
+    VK_FORMAT_ASTC_10x5_SFLOAT_BLOCK_EXT = 1000066008,
+    VK_FORMAT_ASTC_10x6_SFLOAT_BLOCK_EXT = 1000066009,
+    VK_FORMAT_ASTC_10x8_SFLOAT_BLOCK_EXT = 1000066010,
+    VK_FORMAT_ASTC_10x10_SFLOAT_BLOCK_EXT = 1000066011,
+    VK_FORMAT_ASTC_12x10_SFLOAT_BLOCK_EXT = 1000066012,
+    VK_FORMAT_ASTC_12x12_SFLOAT_BLOCK_EXT = 1000066013,
+    VK_FORMAT_G8B8G8R8_422_UNORM_KHR = VK_FORMAT_G8B8G8R8_422_UNORM,
+    VK_FORMAT_B8G8R8G8_422_UNORM_KHR = VK_FORMAT_B8G8R8G8_422_UNORM,
+    VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM_KHR = VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM,
+    VK_FORMAT_G8_B8R8_2PLANE_420_UNORM_KHR = VK_FORMAT_G8_B8R8_2PLANE_420_UNORM,
+    VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM_KHR = VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM,
+    VK_FORMAT_G8_B8R8_2PLANE_422_UNORM_KHR = VK_FORMAT_G8_B8R8_2PLANE_422_UNORM,
+    VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM_KHR = VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM,
+    VK_FORMAT_R10X6_UNORM_PACK16_KHR = VK_FORMAT_R10X6_UNORM_PACK16,
+    VK_FORMAT_R10X6G10X6_UNORM_2PACK16_KHR = VK_FORMAT_R10X6G10X6_UNORM_2PACK16,
+    VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16_KHR = VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16,
+    VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16_KHR = VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16,
+    VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16_KHR = VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16,
+    VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16_KHR = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16,
+    VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16_KHR = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16,
+    VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16_KHR = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16,
+    VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16_KHR = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16,
+    VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16_KHR = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16,
+    VK_FORMAT_R12X4_UNORM_PACK16_KHR = VK_FORMAT_R12X4_UNORM_PACK16,
+    VK_FORMAT_R12X4G12X4_UNORM_2PACK16_KHR = VK_FORMAT_R12X4G12X4_UNORM_2PACK16,
+    VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16_KHR = VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16,
+    VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16_KHR = VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16,
+    VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16_KHR = VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16,
+    VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16_KHR = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16,
+    VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16_KHR = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16,
+    VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16_KHR = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16,
+    VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16_KHR = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16,
+    VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16_KHR = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16,
+    VK_FORMAT_G16B16G16R16_422_UNORM_KHR = VK_FORMAT_G16B16G16R16_422_UNORM,
+    VK_FORMAT_B16G16R16G16_422_UNORM_KHR = VK_FORMAT_B16G16R16G16_422_UNORM,
+    VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM_KHR = VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM,
+    VK_FORMAT_G16_B16R16_2PLANE_420_UNORM_KHR = VK_FORMAT_G16_B16R16_2PLANE_420_UNORM,
+    VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM_KHR = VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM,
+    VK_FORMAT_G16_B16R16_2PLANE_422_UNORM_KHR = VK_FORMAT_G16_B16R16_2PLANE_422_UNORM,
+    VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM_KHR = VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM,
+    VK_FORMAT_BEGIN_RANGE = VK_FORMAT_UNDEFINED,
+    VK_FORMAT_END_RANGE = VK_FORMAT_ASTC_12x12_SRGB_BLOCK,
+    VK_FORMAT_RANGE_SIZE = (VK_FORMAT_ASTC_12x12_SRGB_BLOCK - VK_FORMAT_UNDEFINED + 1),
+    VK_FORMAT_MAX_ENUM = 0x7FFFFFFF
+} VkFormat;
+
+typedef enum VkImageType {
+    VK_IMAGE_TYPE_1D = 0,
+    VK_IMAGE_TYPE_2D = 1,
+    VK_IMAGE_TYPE_3D = 2,
+    VK_IMAGE_TYPE_BEGIN_RANGE = VK_IMAGE_TYPE_1D,
+    VK_IMAGE_TYPE_END_RANGE = VK_IMAGE_TYPE_3D,
+    VK_IMAGE_TYPE_RANGE_SIZE = (VK_IMAGE_TYPE_3D - VK_IMAGE_TYPE_1D + 1),
+    VK_IMAGE_TYPE_MAX_ENUM = 0x7FFFFFFF
+} VkImageType;
+
+typedef enum VkImageTiling {
+    VK_IMAGE_TILING_OPTIMAL = 0,
+    VK_IMAGE_TILING_LINEAR = 1,
+    VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT = 1000158000,
+    VK_IMAGE_TILING_BEGIN_RANGE = VK_IMAGE_TILING_OPTIMAL,
+    VK_IMAGE_TILING_END_RANGE = VK_IMAGE_TILING_LINEAR,
+    VK_IMAGE_TILING_RANGE_SIZE = (VK_IMAGE_TILING_LINEAR - VK_IMAGE_TILING_OPTIMAL + 1),
+    VK_IMAGE_TILING_MAX_ENUM = 0x7FFFFFFF
+} VkImageTiling;
+
+typedef enum VkPhysicalDeviceType {
+    VK_PHYSICAL_DEVICE_TYPE_OTHER = 0,
+    VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU = 1,
+    VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU = 2,
+    VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU = 3,
+    VK_PHYSICAL_DEVICE_TYPE_CPU = 4,
+    VK_PHYSICAL_DEVICE_TYPE_BEGIN_RANGE = VK_PHYSICAL_DEVICE_TYPE_OTHER,
+    VK_PHYSICAL_DEVICE_TYPE_END_RANGE = VK_PHYSICAL_DEVICE_TYPE_CPU,
+    VK_PHYSICAL_DEVICE_TYPE_RANGE_SIZE = (VK_PHYSICAL_DEVICE_TYPE_CPU - VK_PHYSICAL_DEVICE_TYPE_OTHER + 1),
+    VK_PHYSICAL_DEVICE_TYPE_MAX_ENUM = 0x7FFFFFFF
+} VkPhysicalDeviceType;
+
+typedef enum VkQueryType {
+    VK_QUERY_TYPE_OCCLUSION = 0,
+    VK_QUERY_TYPE_PIPELINE_STATISTICS = 1,
+    VK_QUERY_TYPE_TIMESTAMP = 2,
+    VK_QUERY_TYPE_TRANSFORM_FEEDBACK_STREAM_EXT = 1000028004,
+    VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR = 1000116000,
+    VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_NV = 1000165000,
+    VK_QUERY_TYPE_PERFORMANCE_QUERY_INTEL = 1000210000,
+    VK_QUERY_TYPE_BEGIN_RANGE = VK_QUERY_TYPE_OCCLUSION,
+    VK_QUERY_TYPE_END_RANGE = VK_QUERY_TYPE_TIMESTAMP,
+    VK_QUERY_TYPE_RANGE_SIZE = (VK_QUERY_TYPE_TIMESTAMP - VK_QUERY_TYPE_OCCLUSION + 1),
+    VK_QUERY_TYPE_MAX_ENUM = 0x7FFFFFFF
+} VkQueryType;
+
+typedef enum VkSharingMode {
+    VK_SHARING_MODE_EXCLUSIVE = 0,
+    VK_SHARING_MODE_CONCURRENT = 1,
+    VK_SHARING_MODE_BEGIN_RANGE = VK_SHARING_MODE_EXCLUSIVE,
+    VK_SHARING_MODE_END_RANGE = VK_SHARING_MODE_CONCURRENT,
+    VK_SHARING_MODE_RANGE_SIZE = (VK_SHARING_MODE_CONCURRENT - VK_SHARING_MODE_EXCLUSIVE + 1),
+    VK_SHARING_MODE_MAX_ENUM = 0x7FFFFFFF
+} VkSharingMode;
+
+typedef enum VkImageLayout {
+    VK_IMAGE_LAYOUT_UNDEFINED = 0,
+    VK_IMAGE_LAYOUT_GENERAL = 1,
+    VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL = 2,
+    VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL = 3,
+    VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL = 4,
+    VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL = 5,
+    VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL = 6,
+    VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL = 7,
+    VK_IMAGE_LAYOUT_PREINITIALIZED = 8,
+    VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL = 1000117000,
+    VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL = 1000117001,
+    VK_IMAGE_LAYOUT_PRESENT_SRC_KHR = 1000001002,
+    VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR = 1000111000,
+    VK_IMAGE_LAYOUT_SHADING_RATE_OPTIMAL_NV = 1000164003,
+    VK_IMAGE_LAYOUT_FRAGMENT_DENSITY_MAP_OPTIMAL_EXT = 1000218000,
+    VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR = 1000241000,
+    VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR = 1000241001,
+    VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR = 1000241002,
+    VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR = 1000241003,
+    VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL_KHR = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL,
+    VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL_KHR = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL,
+    VK_IMAGE_LAYOUT_BEGIN_RANGE = VK_IMAGE_LAYOUT_UNDEFINED,
+    VK_IMAGE_LAYOUT_END_RANGE = VK_IMAGE_LAYOUT_PREINITIALIZED,
+    VK_IMAGE_LAYOUT_RANGE_SIZE = (VK_IMAGE_LAYOUT_PREINITIALIZED - VK_IMAGE_LAYOUT_UNDEFINED + 1),
+    VK_IMAGE_LAYOUT_MAX_ENUM = 0x7FFFFFFF
+} VkImageLayout;
+
+typedef enum VkImageViewType {
+    VK_IMAGE_VIEW_TYPE_1D = 0,
+    VK_IMAGE_VIEW_TYPE_2D = 1,
+    VK_IMAGE_VIEW_TYPE_3D = 2,
+    VK_IMAGE_VIEW_TYPE_CUBE = 3,
+    VK_IMAGE_VIEW_TYPE_1D_ARRAY = 4,
+    VK_IMAGE_VIEW_TYPE_2D_ARRAY = 5,
+    VK_IMAGE_VIEW_TYPE_CUBE_ARRAY = 6,
+    VK_IMAGE_VIEW_TYPE_BEGIN_RANGE = VK_IMAGE_VIEW_TYPE_1D,
+    VK_IMAGE_VIEW_TYPE_END_RANGE = VK_IMAGE_VIEW_TYPE_CUBE_ARRAY,
+    VK_IMAGE_VIEW_TYPE_RANGE_SIZE = (VK_IMAGE_VIEW_TYPE_CUBE_ARRAY - VK_IMAGE_VIEW_TYPE_1D + 1),
+    VK_IMAGE_VIEW_TYPE_MAX_ENUM = 0x7FFFFFFF
+} VkImageViewType;
+
+typedef enum VkComponentSwizzle {
+    VK_COMPONENT_SWIZZLE_IDENTITY = 0,
+    VK_COMPONENT_SWIZZLE_ZERO = 1,
+    VK_COMPONENT_SWIZZLE_ONE = 2,
+    VK_COMPONENT_SWIZZLE_R = 3,
+    VK_COMPONENT_SWIZZLE_G = 4,
+    VK_COMPONENT_SWIZZLE_B = 5,
+    VK_COMPONENT_SWIZZLE_A = 6,
+    VK_COMPONENT_SWIZZLE_BEGIN_RANGE = VK_COMPONENT_SWIZZLE_IDENTITY,
+    VK_COMPONENT_SWIZZLE_END_RANGE = VK_COMPONENT_SWIZZLE_A,
+    VK_COMPONENT_SWIZZLE_RANGE_SIZE = (VK_COMPONENT_SWIZZLE_A - VK_COMPONENT_SWIZZLE_IDENTITY + 1),
+    VK_COMPONENT_SWIZZLE_MAX_ENUM = 0x7FFFFFFF
+} VkComponentSwizzle;
+
+typedef enum VkVertexInputRate {
+    VK_VERTEX_INPUT_RATE_VERTEX = 0,
+    VK_VERTEX_INPUT_RATE_INSTANCE = 1,
+    VK_VERTEX_INPUT_RATE_BEGIN_RANGE = VK_VERTEX_INPUT_RATE_VERTEX,
+    VK_VERTEX_INPUT_RATE_END_RANGE = VK_VERTEX_INPUT_RATE_INSTANCE,
+    VK_VERTEX_INPUT_RATE_RANGE_SIZE = (VK_VERTEX_INPUT_RATE_INSTANCE - VK_VERTEX_INPUT_RATE_VERTEX + 1),
+    VK_VERTEX_INPUT_RATE_MAX_ENUM = 0x7FFFFFFF
+} VkVertexInputRate;
+
+typedef enum VkPrimitiveTopology {
+    VK_PRIMITIVE_TOPOLOGY_POINT_LIST = 0,
+    VK_PRIMITIVE_TOPOLOGY_LINE_LIST = 1,
+    VK_PRIMITIVE_TOPOLOGY_LINE_STRIP = 2,
+    VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST = 3,
+    VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP = 4,
+    VK_PRIMITIVE_TOPOLOGY_TRIANGLE_FAN = 5,
+    VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY = 6,
+    VK_PRIMITIVE_TOPOLOGY_LINE_STRIP_WITH_ADJACENCY = 7,
+    VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY = 8,
+    VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY = 9,
+    VK_PRIMITIVE_TOPOLOGY_PATCH_LIST = 10,
+    VK_PRIMITIVE_TOPOLOGY_BEGIN_RANGE = VK_PRIMITIVE_TOPOLOGY_POINT_LIST,
+    VK_PRIMITIVE_TOPOLOGY_END_RANGE = VK_PRIMITIVE_TOPOLOGY_PATCH_LIST,
+    VK_PRIMITIVE_TOPOLOGY_RANGE_SIZE = (VK_PRIMITIVE_TOPOLOGY_PATCH_LIST - VK_PRIMITIVE_TOPOLOGY_POINT_LIST + 1),
+    VK_PRIMITIVE_TOPOLOGY_MAX_ENUM = 0x7FFFFFFF
+} VkPrimitiveTopology;
+
+typedef enum VkPolygonMode {
+    VK_POLYGON_MODE_FILL = 0,
+    VK_POLYGON_MODE_LINE = 1,
+    VK_POLYGON_MODE_POINT = 2,
+    VK_POLYGON_MODE_FILL_RECTANGLE_NV = 1000153000,
+    VK_POLYGON_MODE_BEGIN_RANGE = VK_POLYGON_MODE_FILL,
+    VK_POLYGON_MODE_END_RANGE = VK_POLYGON_MODE_POINT,
+    VK_POLYGON_MODE_RANGE_SIZE = (VK_POLYGON_MODE_POINT - VK_POLYGON_MODE_FILL + 1),
+    VK_POLYGON_MODE_MAX_ENUM = 0x7FFFFFFF
+} VkPolygonMode;
+
+typedef enum VkFrontFace {
+    VK_FRONT_FACE_COUNTER_CLOCKWISE = 0,
+    VK_FRONT_FACE_CLOCKWISE = 1,
+    VK_FRONT_FACE_BEGIN_RANGE = VK_FRONT_FACE_COUNTER_CLOCKWISE,
+    VK_FRONT_FACE_END_RANGE = VK_FRONT_FACE_CLOCKWISE,
+    VK_FRONT_FACE_RANGE_SIZE = (VK_FRONT_FACE_CLOCKWISE - VK_FRONT_FACE_COUNTER_CLOCKWISE + 1),
+    VK_FRONT_FACE_MAX_ENUM = 0x7FFFFFFF
+} VkFrontFace;
+
+typedef enum VkCompareOp {
+    VK_COMPARE_OP_NEVER = 0,
+    VK_COMPARE_OP_LESS = 1,
+    VK_COMPARE_OP_EQUAL = 2,
+    VK_COMPARE_OP_LESS_OR_EQUAL = 3,
+    VK_COMPARE_OP_GREATER = 4,
+    VK_COMPARE_OP_NOT_EQUAL = 5,
+    VK_COMPARE_OP_GREATER_OR_EQUAL = 6,
+    VK_COMPARE_OP_ALWAYS = 7,
+    VK_COMPARE_OP_BEGIN_RANGE = VK_COMPARE_OP_NEVER,
+    VK_COMPARE_OP_END_RANGE = VK_COMPARE_OP_ALWAYS,
+    VK_COMPARE_OP_RANGE_SIZE = (VK_COMPARE_OP_ALWAYS - VK_COMPARE_OP_NEVER + 1),
+    VK_COMPARE_OP_MAX_ENUM = 0x7FFFFFFF
+} VkCompareOp;
+
+typedef enum VkStencilOp {
+    VK_STENCIL_OP_KEEP = 0,
+    VK_STENCIL_OP_ZERO = 1,
+    VK_STENCIL_OP_REPLACE = 2,
+    VK_STENCIL_OP_INCREMENT_AND_CLAMP = 3,
+    VK_STENCIL_OP_DECREMENT_AND_CLAMP = 4,
+    VK_STENCIL_OP_INVERT = 5,
+    VK_STENCIL_OP_INCREMENT_AND_WRAP = 6,
+    VK_STENCIL_OP_DECREMENT_AND_WRAP = 7,
+    VK_STENCIL_OP_BEGIN_RANGE = VK_STENCIL_OP_KEEP,
+    VK_STENCIL_OP_END_RANGE = VK_STENCIL_OP_DECREMENT_AND_WRAP,
+    VK_STENCIL_OP_RANGE_SIZE = (VK_STENCIL_OP_DECREMENT_AND_WRAP - VK_STENCIL_OP_KEEP + 1),
+    VK_STENCIL_OP_MAX_ENUM = 0x7FFFFFFF
+} VkStencilOp;
+
+typedef enum VkLogicOp {
+    VK_LOGIC_OP_CLEAR = 0,
+    VK_LOGIC_OP_AND = 1,
+    VK_LOGIC_OP_AND_REVERSE = 2,
+    VK_LOGIC_OP_COPY = 3,
+    VK_LOGIC_OP_AND_INVERTED = 4,
+    VK_LOGIC_OP_NO_OP = 5,
+    VK_LOGIC_OP_XOR = 6,
+    VK_LOGIC_OP_OR = 7,
+    VK_LOGIC_OP_NOR = 8,
+    VK_LOGIC_OP_EQUIVALENT = 9,
+    VK_LOGIC_OP_INVERT = 10,
+    VK_LOGIC_OP_OR_REVERSE = 11,
+    VK_LOGIC_OP_COPY_INVERTED = 12,
+    VK_LOGIC_OP_OR_INVERTED = 13,
+    VK_LOGIC_OP_NAND = 14,
+    VK_LOGIC_OP_SET = 15,
+    VK_LOGIC_OP_BEGIN_RANGE = VK_LOGIC_OP_CLEAR,
+    VK_LOGIC_OP_END_RANGE = VK_LOGIC_OP_SET,
+    VK_LOGIC_OP_RANGE_SIZE = (VK_LOGIC_OP_SET - VK_LOGIC_OP_CLEAR + 1),
+    VK_LOGIC_OP_MAX_ENUM = 0x7FFFFFFF
+} VkLogicOp;
+
+typedef enum VkBlendFactor {
+    VK_BLEND_FACTOR_ZERO = 0,
+    VK_BLEND_FACTOR_ONE = 1,
+    VK_BLEND_FACTOR_SRC_COLOR = 2,
+    VK_BLEND_FACTOR_ONE_MINUS_SRC_COLOR = 3,
+    VK_BLEND_FACTOR_DST_COLOR = 4,
+    VK_BLEND_FACTOR_ONE_MINUS_DST_COLOR = 5,
+    VK_BLEND_FACTOR_SRC_ALPHA = 6,
+    VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA = 7,
+    VK_BLEND_FACTOR_DST_ALPHA = 8,
+    VK_BLEND_FACTOR_ONE_MINUS_DST_ALPHA = 9,
+    VK_BLEND_FACTOR_CONSTANT_COLOR = 10,
+    VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_COLOR = 11,
+    VK_BLEND_FACTOR_CONSTANT_ALPHA = 12,
+    VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA = 13,
+    VK_BLEND_FACTOR_SRC_ALPHA_SATURATE = 14,
+    VK_BLEND_FACTOR_SRC1_COLOR = 15,
+    VK_BLEND_FACTOR_ONE_MINUS_SRC1_COLOR = 16,
+    VK_BLEND_FACTOR_SRC1_ALPHA = 17,
+    VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA = 18,
+    VK_BLEND_FACTOR_BEGIN_RANGE = VK_BLEND_FACTOR_ZERO,
+    VK_BLEND_FACTOR_END_RANGE = VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA,
+    VK_BLEND_FACTOR_RANGE_SIZE = (VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA - VK_BLEND_FACTOR_ZERO + 1),
+    VK_BLEND_FACTOR_MAX_ENUM = 0x7FFFFFFF
+} VkBlendFactor;
+
+typedef enum VkBlendOp {
+    VK_BLEND_OP_ADD = 0,
+    VK_BLEND_OP_SUBTRACT = 1,
+    VK_BLEND_OP_REVERSE_SUBTRACT = 2,
+    VK_BLEND_OP_MIN = 3,
+    VK_BLEND_OP_MAX = 4,
+    VK_BLEND_OP_ZERO_EXT = 1000148000,
+    VK_BLEND_OP_SRC_EXT = 1000148001,
+    VK_BLEND_OP_DST_EXT = 1000148002,
+    VK_BLEND_OP_SRC_OVER_EXT = 1000148003,
+    VK_BLEND_OP_DST_OVER_EXT = 1000148004,
+    VK_BLEND_OP_SRC_IN_EXT = 1000148005,
+    VK_BLEND_OP_DST_IN_EXT = 1000148006,
+    VK_BLEND_OP_SRC_OUT_EXT = 1000148007,
+    VK_BLEND_OP_DST_OUT_EXT = 1000148008,
+    VK_BLEND_OP_SRC_ATOP_EXT = 1000148009,
+    VK_BLEND_OP_DST_ATOP_EXT = 1000148010,
+    VK_BLEND_OP_XOR_EXT = 1000148011,
+    VK_BLEND_OP_MULTIPLY_EXT = 1000148012,
+    VK_BLEND_OP_SCREEN_EXT = 1000148013,
+    VK_BLEND_OP_OVERLAY_EXT = 1000148014,
+    VK_BLEND_OP_DARKEN_EXT = 1000148015,
+    VK_BLEND_OP_LIGHTEN_EXT = 1000148016,
+    VK_BLEND_OP_COLORDODGE_EXT = 1000148017,
+    VK_BLEND_OP_COLORBURN_EXT = 1000148018,
+    VK_BLEND_OP_HARDLIGHT_EXT = 1000148019,
+    VK_BLEND_OP_SOFTLIGHT_EXT = 1000148020,
+    VK_BLEND_OP_DIFFERENCE_EXT = 1000148021,
+    VK_BLEND_OP_EXCLUSION_EXT = 1000148022,
+    VK_BLEND_OP_INVERT_EXT = 1000148023,
+    VK_BLEND_OP_INVERT_RGB_EXT = 1000148024,
+    VK_BLEND_OP_LINEARDODGE_EXT = 1000148025,
+    VK_BLEND_OP_LINEARBURN_EXT = 1000148026,
+    VK_BLEND_OP_VIVIDLIGHT_EXT = 1000148027,
+    VK_BLEND_OP_LINEARLIGHT_EXT = 1000148028,
+    VK_BLEND_OP_PINLIGHT_EXT = 1000148029,
+    VK_BLEND_OP_HARDMIX_EXT = 1000148030,
+    VK_BLEND_OP_HSL_HUE_EXT = 1000148031,
+    VK_BLEND_OP_HSL_SATURATION_EXT = 1000148032,
+    VK_BLEND_OP_HSL_COLOR_EXT = 1000148033,
+    VK_BLEND_OP_HSL_LUMINOSITY_EXT = 1000148034,
+    VK_BLEND_OP_PLUS_EXT = 1000148035,
+    VK_BLEND_OP_PLUS_CLAMPED_EXT = 1000148036,
+    VK_BLEND_OP_PLUS_CLAMPED_ALPHA_EXT = 1000148037,
+    VK_BLEND_OP_PLUS_DARKER_EXT = 1000148038,
+    VK_BLEND_OP_MINUS_EXT = 1000148039,
+    VK_BLEND_OP_MINUS_CLAMPED_EXT = 1000148040,
+    VK_BLEND_OP_CONTRAST_EXT = 1000148041,
+    VK_BLEND_OP_INVERT_OVG_EXT = 1000148042,
+    VK_BLEND_OP_RED_EXT = 1000148043,
+    VK_BLEND_OP_GREEN_EXT = 1000148044,
+    VK_BLEND_OP_BLUE_EXT = 1000148045,
+    VK_BLEND_OP_BEGIN_RANGE = VK_BLEND_OP_ADD,
+    VK_BLEND_OP_END_RANGE = VK_BLEND_OP_MAX,
+    VK_BLEND_OP_RANGE_SIZE = (VK_BLEND_OP_MAX - VK_BLEND_OP_ADD + 1),
+    VK_BLEND_OP_MAX_ENUM = 0x7FFFFFFF
+} VkBlendOp;
+
+typedef enum VkDynamicState {
+    VK_DYNAMIC_STATE_VIEWPORT = 0,
+    VK_DYNAMIC_STATE_SCISSOR = 1,
+    VK_DYNAMIC_STATE_LINE_WIDTH = 2,
+    VK_DYNAMIC_STATE_DEPTH_BIAS = 3,
+    VK_DYNAMIC_STATE_BLEND_CONSTANTS = 4,
+    VK_DYNAMIC_STATE_DEPTH_BOUNDS = 5,
+    VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK = 6,
+    VK_DYNAMIC_STATE_STENCIL_WRITE_MASK = 7,
+    VK_DYNAMIC_STATE_STENCIL_REFERENCE = 8,
+    VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV = 1000087000,
+    VK_DYNAMIC_STATE_DISCARD_RECTANGLE_EXT = 1000099000,
+    VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_EXT = 1000143000,
+    VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV = 1000164004,
+    VK_DYNAMIC_STATE_VIEWPORT_COARSE_SAMPLE_ORDER_NV = 1000164006,
+    VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_NV = 1000205001,
+    VK_DYNAMIC_STATE_LINE_STIPPLE_EXT = 1000259000,
+    VK_DYNAMIC_STATE_BEGIN_RANGE = VK_DYNAMIC_STATE_VIEWPORT,
+    VK_DYNAMIC_STATE_END_RANGE = VK_DYNAMIC_STATE_STENCIL_REFERENCE,
+    VK_DYNAMIC_STATE_RANGE_SIZE = (VK_DYNAMIC_STATE_STENCIL_REFERENCE - VK_DYNAMIC_STATE_VIEWPORT + 1),
+    VK_DYNAMIC_STATE_MAX_ENUM = 0x7FFFFFFF
+} VkDynamicState;
+
+typedef enum VkFilter {
+    VK_FILTER_NEAREST = 0,
+    VK_FILTER_LINEAR = 1,
+    VK_FILTER_CUBIC_IMG = 1000015000,
+    VK_FILTER_CUBIC_EXT = VK_FILTER_CUBIC_IMG,
+    VK_FILTER_BEGIN_RANGE = VK_FILTER_NEAREST,
+    VK_FILTER_END_RANGE = VK_FILTER_LINEAR,
+    VK_FILTER_RANGE_SIZE = (VK_FILTER_LINEAR - VK_FILTER_NEAREST + 1),
+    VK_FILTER_MAX_ENUM = 0x7FFFFFFF
+} VkFilter;
+
+typedef enum VkSamplerMipmapMode {
+    VK_SAMPLER_MIPMAP_MODE_NEAREST = 0,
+    VK_SAMPLER_MIPMAP_MODE_LINEAR = 1,
+    VK_SAMPLER_MIPMAP_MODE_BEGIN_RANGE = VK_SAMPLER_MIPMAP_MODE_NEAREST,
+    VK_SAMPLER_MIPMAP_MODE_END_RANGE = VK_SAMPLER_MIPMAP_MODE_LINEAR,
+    VK_SAMPLER_MIPMAP_MODE_RANGE_SIZE = (VK_SAMPLER_MIPMAP_MODE_LINEAR - VK_SAMPLER_MIPMAP_MODE_NEAREST + 1),
+    VK_SAMPLER_MIPMAP_MODE_MAX_ENUM = 0x7FFFFFFF
+} VkSamplerMipmapMode;
+
+typedef enum VkSamplerAddressMode {
+    VK_SAMPLER_ADDRESS_MODE_REPEAT = 0,
+    VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT = 1,
+    VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE = 2,
+    VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER = 3,
+    VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE = 4,
+    VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE_KHR = VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE,
+    VK_SAMPLER_ADDRESS_MODE_BEGIN_RANGE = VK_SAMPLER_ADDRESS_MODE_REPEAT,
+    VK_SAMPLER_ADDRESS_MODE_END_RANGE = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER,
+    VK_SAMPLER_ADDRESS_MODE_RANGE_SIZE = (VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER - VK_SAMPLER_ADDRESS_MODE_REPEAT + 1),
+    VK_SAMPLER_ADDRESS_MODE_MAX_ENUM = 0x7FFFFFFF
+} VkSamplerAddressMode;
+
+typedef enum VkBorderColor {
+    VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK = 0,
+    VK_BORDER_COLOR_INT_TRANSPARENT_BLACK = 1,
+    VK_BORDER_COLOR_FLOAT_OPAQUE_BLACK = 2,
+    VK_BORDER_COLOR_INT_OPAQUE_BLACK = 3,
+    VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE = 4,
+    VK_BORDER_COLOR_INT_OPAQUE_WHITE = 5,
+    VK_BORDER_COLOR_BEGIN_RANGE = VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK,
+    VK_BORDER_COLOR_END_RANGE = VK_BORDER_COLOR_INT_OPAQUE_WHITE,
+    VK_BORDER_COLOR_RANGE_SIZE = (VK_BORDER_COLOR_INT_OPAQUE_WHITE - VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK + 1),
+    VK_BORDER_COLOR_MAX_ENUM = 0x7FFFFFFF
+} VkBorderColor;
+
+typedef enum VkDescriptorType {
+    VK_DESCRIPTOR_TYPE_SAMPLER = 0,
+    VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER = 1,
+    VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE = 2,
+    VK_DESCRIPTOR_TYPE_STORAGE_IMAGE = 3,
+    VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER = 4,
+    VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER = 5,
+    VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER = 6,
+    VK_DESCRIPTOR_TYPE_STORAGE_BUFFER = 7,
+    VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC = 8,
+    VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC = 9,
+    VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT = 10,
+    VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT = 1000138000,
+    VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV = 1000165000,
+    VK_DESCRIPTOR_TYPE_BEGIN_RANGE = VK_DESCRIPTOR_TYPE_SAMPLER,
+    VK_DESCRIPTOR_TYPE_END_RANGE = VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT,
+    VK_DESCRIPTOR_TYPE_RANGE_SIZE = (VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT - VK_DESCRIPTOR_TYPE_SAMPLER + 1),
+    VK_DESCRIPTOR_TYPE_MAX_ENUM = 0x7FFFFFFF
+} VkDescriptorType;
+
+typedef enum VkAttachmentLoadOp {
+    VK_ATTACHMENT_LOAD_OP_LOAD = 0,
+    VK_ATTACHMENT_LOAD_OP_CLEAR = 1,
+    VK_ATTACHMENT_LOAD_OP_DONT_CARE = 2,
+    VK_ATTACHMENT_LOAD_OP_BEGIN_RANGE = VK_ATTACHMENT_LOAD_OP_LOAD,
+    VK_ATTACHMENT_LOAD_OP_END_RANGE = VK_ATTACHMENT_LOAD_OP_DONT_CARE,
+    VK_ATTACHMENT_LOAD_OP_RANGE_SIZE = (VK_ATTACHMENT_LOAD_OP_DONT_CARE - VK_ATTACHMENT_LOAD_OP_LOAD + 1),
+    VK_ATTACHMENT_LOAD_OP_MAX_ENUM = 0x7FFFFFFF
+} VkAttachmentLoadOp;
+
+typedef enum VkAttachmentStoreOp {
+    VK_ATTACHMENT_STORE_OP_STORE = 0,
+    VK_ATTACHMENT_STORE_OP_DONT_CARE = 1,
+    VK_ATTACHMENT_STORE_OP_BEGIN_RANGE = VK_ATTACHMENT_STORE_OP_STORE,
+    VK_ATTACHMENT_STORE_OP_END_RANGE = VK_ATTACHMENT_STORE_OP_DONT_CARE,
+    VK_ATTACHMENT_STORE_OP_RANGE_SIZE = (VK_ATTACHMENT_STORE_OP_DONT_CARE - VK_ATTACHMENT_STORE_OP_STORE + 1),
+    VK_ATTACHMENT_STORE_OP_MAX_ENUM = 0x7FFFFFFF
+} VkAttachmentStoreOp;
+
+typedef enum VkPipelineBindPoint {
+    VK_PIPELINE_BIND_POINT_GRAPHICS = 0,
+    VK_PIPELINE_BIND_POINT_COMPUTE = 1,
+    VK_PIPELINE_BIND_POINT_RAY_TRACING_NV = 1000165000,
+    VK_PIPELINE_BIND_POINT_BEGIN_RANGE = VK_PIPELINE_BIND_POINT_GRAPHICS,
+    VK_PIPELINE_BIND_POINT_END_RANGE = VK_PIPELINE_BIND_POINT_COMPUTE,
+    VK_PIPELINE_BIND_POINT_RANGE_SIZE = (VK_PIPELINE_BIND_POINT_COMPUTE - VK_PIPELINE_BIND_POINT_GRAPHICS + 1),
+    VK_PIPELINE_BIND_POINT_MAX_ENUM = 0x7FFFFFFF
+} VkPipelineBindPoint;
+
+typedef enum VkCommandBufferLevel {
+    VK_COMMAND_BUFFER_LEVEL_PRIMARY = 0,
+    VK_COMMAND_BUFFER_LEVEL_SECONDARY = 1,
+    VK_COMMAND_BUFFER_LEVEL_BEGIN_RANGE = VK_COMMAND_BUFFER_LEVEL_PRIMARY,
+    VK_COMMAND_BUFFER_LEVEL_END_RANGE = VK_COMMAND_BUFFER_LEVEL_SECONDARY,
+    VK_COMMAND_BUFFER_LEVEL_RANGE_SIZE = (VK_COMMAND_BUFFER_LEVEL_SECONDARY - VK_COMMAND_BUFFER_LEVEL_PRIMARY + 1),
+    VK_COMMAND_BUFFER_LEVEL_MAX_ENUM = 0x7FFFFFFF
+} VkCommandBufferLevel;
+
+typedef enum VkIndexType {
+    VK_INDEX_TYPE_UINT16 = 0,
+    VK_INDEX_TYPE_UINT32 = 1,
+    VK_INDEX_TYPE_NONE_NV = 1000165000,
+    VK_INDEX_TYPE_UINT8_EXT = 1000265000,
+    VK_INDEX_TYPE_BEGIN_RANGE = VK_INDEX_TYPE_UINT16,
+    VK_INDEX_TYPE_END_RANGE = VK_INDEX_TYPE_UINT32,
+    VK_INDEX_TYPE_RANGE_SIZE = (VK_INDEX_TYPE_UINT32 - VK_INDEX_TYPE_UINT16 + 1),
+    VK_INDEX_TYPE_MAX_ENUM = 0x7FFFFFFF
+} VkIndexType;
+
+typedef enum VkSubpassContents {
+    VK_SUBPASS_CONTENTS_INLINE = 0,
+    VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS = 1,
+    VK_SUBPASS_CONTENTS_BEGIN_RANGE = VK_SUBPASS_CONTENTS_INLINE,
+    VK_SUBPASS_CONTENTS_END_RANGE = VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS,
+    VK_SUBPASS_CONTENTS_RANGE_SIZE = (VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS - VK_SUBPASS_CONTENTS_INLINE + 1),
+    VK_SUBPASS_CONTENTS_MAX_ENUM = 0x7FFFFFFF
+} VkSubpassContents;
+
+typedef enum VkObjectType {
+    VK_OBJECT_TYPE_UNKNOWN = 0,
+    VK_OBJECT_TYPE_INSTANCE = 1,
+    VK_OBJECT_TYPE_PHYSICAL_DEVICE = 2,
+    VK_OBJECT_TYPE_DEVICE = 3,
+    VK_OBJECT_TYPE_QUEUE = 4,
+    VK_OBJECT_TYPE_SEMAPHORE = 5,
+    VK_OBJECT_TYPE_COMMAND_BUFFER = 6,
+    VK_OBJECT_TYPE_FENCE = 7,
+    VK_OBJECT_TYPE_DEVICE_MEMORY = 8,
+    VK_OBJECT_TYPE_BUFFER = 9,
+    VK_OBJECT_TYPE_IMAGE = 10,
+    VK_OBJECT_TYPE_EVENT = 11,
+    VK_OBJECT_TYPE_QUERY_POOL = 12,
+    VK_OBJECT_TYPE_BUFFER_VIEW = 13,
+    VK_OBJECT_TYPE_IMAGE_VIEW = 14,
+    VK_OBJECT_TYPE_SHADER_MODULE = 15,
+    VK_OBJECT_TYPE_PIPELINE_CACHE = 16,
+    VK_OBJECT_TYPE_PIPELINE_LAYOUT = 17,
+    VK_OBJECT_TYPE_RENDER_PASS = 18,
+    VK_OBJECT_TYPE_PIPELINE = 19,
+    VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT = 20,
+    VK_OBJECT_TYPE_SAMPLER = 21,
+    VK_OBJECT_TYPE_DESCRIPTOR_POOL = 22,
+    VK_OBJECT_TYPE_DESCRIPTOR_SET = 23,
+    VK_OBJECT_TYPE_FRAMEBUFFER = 24,
+    VK_OBJECT_TYPE_COMMAND_POOL = 25,
+    VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION = 1000156000,
+    VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE = 1000085000,
+    VK_OBJECT_TYPE_SURFACE_KHR = 1000000000,
+    VK_OBJECT_TYPE_SWAPCHAIN_KHR = 1000001000,
+    VK_OBJECT_TYPE_DISPLAY_KHR = 1000002000,
+    VK_OBJECT_TYPE_DISPLAY_MODE_KHR = 1000002001,
+    VK_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT = 1000011000,
+    VK_OBJECT_TYPE_OBJECT_TABLE_NVX = 1000086000,
+    VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX = 1000086001,
+    VK_OBJECT_TYPE_DEBUG_UTILS_MESSENGER_EXT = 1000128000,
+    VK_OBJECT_TYPE_VALIDATION_CACHE_EXT = 1000160000,
+    VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV = 1000165000,
+    VK_OBJECT_TYPE_PERFORMANCE_CONFIGURATION_INTEL = 1000210000,
+    VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_KHR = VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE,
+    VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR = VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION,
+    VK_OBJECT_TYPE_BEGIN_RANGE = VK_OBJECT_TYPE_UNKNOWN,
+    VK_OBJECT_TYPE_END_RANGE = VK_OBJECT_TYPE_COMMAND_POOL,
+    VK_OBJECT_TYPE_RANGE_SIZE = (VK_OBJECT_TYPE_COMMAND_POOL - VK_OBJECT_TYPE_UNKNOWN + 1),
+    VK_OBJECT_TYPE_MAX_ENUM = 0x7FFFFFFF
+} VkObjectType;
+
+typedef enum VkVendorId {
+    VK_VENDOR_ID_VIV = 0x10001,
+    VK_VENDOR_ID_VSI = 0x10002,
+    VK_VENDOR_ID_KAZAN = 0x10003,
+    VK_VENDOR_ID_BEGIN_RANGE = VK_VENDOR_ID_VIV,
+    VK_VENDOR_ID_END_RANGE = VK_VENDOR_ID_KAZAN,
+    VK_VENDOR_ID_RANGE_SIZE = (VK_VENDOR_ID_KAZAN - VK_VENDOR_ID_VIV + 1),
+    VK_VENDOR_ID_MAX_ENUM = 0x7FFFFFFF
+} VkVendorId;
+typedef VkFlags VkInstanceCreateFlags;
+
+typedef enum VkFormatFeatureFlagBits {
+    VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT = 0x00000001,
+    VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT = 0x00000002,
+    VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT = 0x00000004,
+    VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT = 0x00000008,
+    VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT = 0x00000010,
+    VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_ATOMIC_BIT = 0x00000020,
+    VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT = 0x00000040,
+    VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT = 0x00000080,
+    VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT = 0x00000100,
+    VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT = 0x00000200,
+    VK_FORMAT_FEATURE_BLIT_SRC_BIT = 0x00000400,
+    VK_FORMAT_FEATURE_BLIT_DST_BIT = 0x00000800,
+    VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT = 0x00001000,
+    VK_FORMAT_FEATURE_TRANSFER_SRC_BIT = 0x00004000,
+    VK_FORMAT_FEATURE_TRANSFER_DST_BIT = 0x00008000,
+    VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT = 0x00020000,
+    VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT = 0x00040000,
+    VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT = 0x00080000,
+    VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT = 0x00100000,
+    VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT = 0x00200000,
+    VK_FORMAT_FEATURE_DISJOINT_BIT = 0x00400000,
+    VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT = 0x00800000,
+    VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_IMG = 0x00002000,
+    VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT_EXT = 0x00010000,
+    VK_FORMAT_FEATURE_FRAGMENT_DENSITY_MAP_BIT_EXT = 0x01000000,
+    VK_FORMAT_FEATURE_TRANSFER_SRC_BIT_KHR = VK_FORMAT_FEATURE_TRANSFER_SRC_BIT,
+    VK_FORMAT_FEATURE_TRANSFER_DST_BIT_KHR = VK_FORMAT_FEATURE_TRANSFER_DST_BIT,
+    VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT_KHR = VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT,
+    VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT_KHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT,
+    VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT_KHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT,
+    VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT_KHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT,
+    VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT_KHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT,
+    VK_FORMAT_FEATURE_DISJOINT_BIT_KHR = VK_FORMAT_FEATURE_DISJOINT_BIT,
+    VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT_KHR = VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT,
+    VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_IMG,
+    VK_FORMAT_FEATURE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkFormatFeatureFlagBits;
+typedef VkFlags VkFormatFeatureFlags;
+
+typedef enum VkImageUsageFlagBits {
+    VK_IMAGE_USAGE_TRANSFER_SRC_BIT = 0x00000001,
+    VK_IMAGE_USAGE_TRANSFER_DST_BIT = 0x00000002,
+    VK_IMAGE_USAGE_SAMPLED_BIT = 0x00000004,
+    VK_IMAGE_USAGE_STORAGE_BIT = 0x00000008,
+    VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT = 0x00000010,
+    VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT = 0x00000020,
+    VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT = 0x00000040,
+    VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT = 0x00000080,
+    VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV = 0x00000100,
+    VK_IMAGE_USAGE_FRAGMENT_DENSITY_MAP_BIT_EXT = 0x00000200,
+    VK_IMAGE_USAGE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkImageUsageFlagBits;
+typedef VkFlags VkImageUsageFlags;
+
+typedef enum VkImageCreateFlagBits {
+    VK_IMAGE_CREATE_SPARSE_BINDING_BIT = 0x00000001,
+    VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT = 0x00000002,
+    VK_IMAGE_CREATE_SPARSE_ALIASED_BIT = 0x00000004,
+    VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT = 0x00000008,
+    VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT = 0x00000010,
+    VK_IMAGE_CREATE_ALIAS_BIT = 0x00000400,
+    VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT = 0x00000040,
+    VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT = 0x00000020,
+    VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT = 0x00000080,
+    VK_IMAGE_CREATE_EXTENDED_USAGE_BIT = 0x00000100,
+    VK_IMAGE_CREATE_PROTECTED_BIT = 0x00000800,
+    VK_IMAGE_CREATE_DISJOINT_BIT = 0x00000200,
+    VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV = 0x00002000,
+    VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT = 0x00001000,
+    VK_IMAGE_CREATE_SUBSAMPLED_BIT_EXT = 0x00004000,
+    VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR = VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT,
+    VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT_KHR = VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT,
+    VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT_KHR = VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT,
+    VK_IMAGE_CREATE_EXTENDED_USAGE_BIT_KHR = VK_IMAGE_CREATE_EXTENDED_USAGE_BIT,
+    VK_IMAGE_CREATE_DISJOINT_BIT_KHR = VK_IMAGE_CREATE_DISJOINT_BIT,
+    VK_IMAGE_CREATE_ALIAS_BIT_KHR = VK_IMAGE_CREATE_ALIAS_BIT,
+    VK_IMAGE_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkImageCreateFlagBits;
+typedef VkFlags VkImageCreateFlags;
+
+typedef enum VkSampleCountFlagBits {
+    VK_SAMPLE_COUNT_1_BIT = 0x00000001,
+    VK_SAMPLE_COUNT_2_BIT = 0x00000002,
+    VK_SAMPLE_COUNT_4_BIT = 0x00000004,
+    VK_SAMPLE_COUNT_8_BIT = 0x00000008,
+    VK_SAMPLE_COUNT_16_BIT = 0x00000010,
+    VK_SAMPLE_COUNT_32_BIT = 0x00000020,
+    VK_SAMPLE_COUNT_64_BIT = 0x00000040,
+    VK_SAMPLE_COUNT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkSampleCountFlagBits;
+typedef VkFlags VkSampleCountFlags;
+
+typedef enum VkQueueFlagBits {
+    VK_QUEUE_GRAPHICS_BIT = 0x00000001,
+    VK_QUEUE_COMPUTE_BIT = 0x00000002,
+    VK_QUEUE_TRANSFER_BIT = 0x00000004,
+    VK_QUEUE_SPARSE_BINDING_BIT = 0x00000008,
+    VK_QUEUE_PROTECTED_BIT = 0x00000010,
+    VK_QUEUE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkQueueFlagBits;
+typedef VkFlags VkQueueFlags;
+
+typedef enum VkMemoryPropertyFlagBits {
+    VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT = 0x00000001,
+    VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT = 0x00000002,
+    VK_MEMORY_PROPERTY_HOST_COHERENT_BIT = 0x00000004,
+    VK_MEMORY_PROPERTY_HOST_CACHED_BIT = 0x00000008,
+    VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT = 0x00000010,
+    VK_MEMORY_PROPERTY_PROTECTED_BIT = 0x00000020,
+    VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD = 0x00000040,
+    VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD = 0x00000080,
+    VK_MEMORY_PROPERTY_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkMemoryPropertyFlagBits;
+typedef VkFlags VkMemoryPropertyFlags;
+
+typedef enum VkMemoryHeapFlagBits {
+    VK_MEMORY_HEAP_DEVICE_LOCAL_BIT = 0x00000001,
+    VK_MEMORY_HEAP_MULTI_INSTANCE_BIT = 0x00000002,
+    VK_MEMORY_HEAP_MULTI_INSTANCE_BIT_KHR = VK_MEMORY_HEAP_MULTI_INSTANCE_BIT,
+    VK_MEMORY_HEAP_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkMemoryHeapFlagBits;
+typedef VkFlags VkMemoryHeapFlags;
+typedef VkFlags VkDeviceCreateFlags;
+
+typedef enum VkDeviceQueueCreateFlagBits {
+    VK_DEVICE_QUEUE_CREATE_PROTECTED_BIT = 0x00000001,
+    VK_DEVICE_QUEUE_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkDeviceQueueCreateFlagBits;
+typedef VkFlags VkDeviceQueueCreateFlags;
+
+typedef enum VkPipelineStageFlagBits {
+    VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT = 0x00000001,
+    VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT = 0x00000002,
+    VK_PIPELINE_STAGE_VERTEX_INPUT_BIT = 0x00000004,
+    VK_PIPELINE_STAGE_VERTEX_SHADER_BIT = 0x00000008,
+    VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT = 0x00000010,
+    VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT = 0x00000020,
+    VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT = 0x00000040,
+    VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT = 0x00000080,
+    VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT = 0x00000100,
+    VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT = 0x00000200,
+    VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT = 0x00000400,
+    VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT = 0x00000800,
+    VK_PIPELINE_STAGE_TRANSFER_BIT = 0x00001000,
+    VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT = 0x00002000,
+    VK_PIPELINE_STAGE_HOST_BIT = 0x00004000,
+    VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT = 0x00008000,
+    VK_PIPELINE_STAGE_ALL_COMMANDS_BIT = 0x00010000,
+    VK_PIPELINE_STAGE_TRANSFORM_FEEDBACK_BIT_EXT = 0x01000000,
+    VK_PIPELINE_STAGE_CONDITIONAL_RENDERING_BIT_EXT = 0x00040000,
+    VK_PIPELINE_STAGE_COMMAND_PROCESS_BIT_NVX = 0x00020000,
+    VK_PIPELINE_STAGE_SHADING_RATE_IMAGE_BIT_NV = 0x00400000,
+    VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_NV = 0x00200000,
+    VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_NV = 0x02000000,
+    VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV = 0x00080000,
+    VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV = 0x00100000,
+    VK_PIPELINE_STAGE_FRAGMENT_DENSITY_PROCESS_BIT_EXT = 0x00800000,
+    VK_PIPELINE_STAGE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkPipelineStageFlagBits;
+typedef VkFlags VkPipelineStageFlags;
+typedef VkFlags VkMemoryMapFlags;
+
+typedef enum VkImageAspectFlagBits {
+    VK_IMAGE_ASPECT_COLOR_BIT = 0x00000001,
+    VK_IMAGE_ASPECT_DEPTH_BIT = 0x00000002,
+    VK_IMAGE_ASPECT_STENCIL_BIT = 0x00000004,
+    VK_IMAGE_ASPECT_METADATA_BIT = 0x00000008,
+    VK_IMAGE_ASPECT_PLANE_0_BIT = 0x00000010,
+    VK_IMAGE_ASPECT_PLANE_1_BIT = 0x00000020,
+    VK_IMAGE_ASPECT_PLANE_2_BIT = 0x00000040,
+    VK_IMAGE_ASPECT_MEMORY_PLANE_0_BIT_EXT = 0x00000080,
+    VK_IMAGE_ASPECT_MEMORY_PLANE_1_BIT_EXT = 0x00000100,
+    VK_IMAGE_ASPECT_MEMORY_PLANE_2_BIT_EXT = 0x00000200,
+    VK_IMAGE_ASPECT_MEMORY_PLANE_3_BIT_EXT = 0x00000400,
+    VK_IMAGE_ASPECT_PLANE_0_BIT_KHR = VK_IMAGE_ASPECT_PLANE_0_BIT,
+    VK_IMAGE_ASPECT_PLANE_1_BIT_KHR = VK_IMAGE_ASPECT_PLANE_1_BIT,
+    VK_IMAGE_ASPECT_PLANE_2_BIT_KHR = VK_IMAGE_ASPECT_PLANE_2_BIT,
+    VK_IMAGE_ASPECT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkImageAspectFlagBits;
+typedef VkFlags VkImageAspectFlags;
+
+typedef enum VkSparseImageFormatFlagBits {
+    VK_SPARSE_IMAGE_FORMAT_SINGLE_MIPTAIL_BIT = 0x00000001,
+    VK_SPARSE_IMAGE_FORMAT_ALIGNED_MIP_SIZE_BIT = 0x00000002,
+    VK_SPARSE_IMAGE_FORMAT_NONSTANDARD_BLOCK_SIZE_BIT = 0x00000004,
+    VK_SPARSE_IMAGE_FORMAT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkSparseImageFormatFlagBits;
+typedef VkFlags VkSparseImageFormatFlags;
+
+typedef enum VkSparseMemoryBindFlagBits {
+    VK_SPARSE_MEMORY_BIND_METADATA_BIT = 0x00000001,
+    VK_SPARSE_MEMORY_BIND_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkSparseMemoryBindFlagBits;
+typedef VkFlags VkSparseMemoryBindFlags;
+
+typedef enum VkFenceCreateFlagBits {
+    VK_FENCE_CREATE_SIGNALED_BIT = 0x00000001,
+    VK_FENCE_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkFenceCreateFlagBits;
+typedef VkFlags VkFenceCreateFlags;
+typedef VkFlags VkSemaphoreCreateFlags;
+typedef VkFlags VkEventCreateFlags;
+typedef VkFlags VkQueryPoolCreateFlags;
+
+typedef enum VkQueryPipelineStatisticFlagBits {
+    VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_VERTICES_BIT = 0x00000001,
+    VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_PRIMITIVES_BIT = 0x00000002,
+    VK_QUERY_PIPELINE_STATISTIC_VERTEX_SHADER_INVOCATIONS_BIT = 0x00000004,
+    VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_INVOCATIONS_BIT = 0x00000008,
+    VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_PRIMITIVES_BIT = 0x00000010,
+    VK_QUERY_PIPELINE_STATISTIC_CLIPPING_INVOCATIONS_BIT = 0x00000020,
+    VK_QUERY_PIPELINE_STATISTIC_CLIPPING_PRIMITIVES_BIT = 0x00000040,
+    VK_QUERY_PIPELINE_STATISTIC_FRAGMENT_SHADER_INVOCATIONS_BIT = 0x00000080,
+    VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_CONTROL_SHADER_PATCHES_BIT = 0x00000100,
+    VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_EVALUATION_SHADER_INVOCATIONS_BIT = 0x00000200,
+    VK_QUERY_PIPELINE_STATISTIC_COMPUTE_SHADER_INVOCATIONS_BIT = 0x00000400,
+    VK_QUERY_PIPELINE_STATISTIC_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkQueryPipelineStatisticFlagBits;
+typedef VkFlags VkQueryPipelineStatisticFlags;
+
+typedef enum VkQueryResultFlagBits {
+    VK_QUERY_RESULT_64_BIT = 0x00000001,
+    VK_QUERY_RESULT_WAIT_BIT = 0x00000002,
+    VK_QUERY_RESULT_WITH_AVAILABILITY_BIT = 0x00000004,
+    VK_QUERY_RESULT_PARTIAL_BIT = 0x00000008,
+    VK_QUERY_RESULT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkQueryResultFlagBits;
+typedef VkFlags VkQueryResultFlags;
+
+typedef enum VkBufferCreateFlagBits {
+    VK_BUFFER_CREATE_SPARSE_BINDING_BIT = 0x00000001,
+    VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT = 0x00000002,
+    VK_BUFFER_CREATE_SPARSE_ALIASED_BIT = 0x00000004,
+    VK_BUFFER_CREATE_PROTECTED_BIT = 0x00000008,
+    VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_KHR = 0x00000010,
+    VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_EXT = VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_KHR,
+    VK_BUFFER_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkBufferCreateFlagBits;
+typedef VkFlags VkBufferCreateFlags;
+
+typedef enum VkBufferUsageFlagBits {
+    VK_BUFFER_USAGE_TRANSFER_SRC_BIT = 0x00000001,
+    VK_BUFFER_USAGE_TRANSFER_DST_BIT = 0x00000002,
+    VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT = 0x00000004,
+    VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT = 0x00000008,
+    VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT = 0x00000010,
+    VK_BUFFER_USAGE_STORAGE_BUFFER_BIT = 0x00000020,
+    VK_BUFFER_USAGE_INDEX_BUFFER_BIT = 0x00000040,
+    VK_BUFFER_USAGE_VERTEX_BUFFER_BIT = 0x00000080,
+    VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT = 0x00000100,
+    VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_BUFFER_BIT_EXT = 0x00000800,
+    VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_COUNTER_BUFFER_BIT_EXT = 0x00001000,
+    VK_BUFFER_USAGE_CONDITIONAL_RENDERING_BIT_EXT = 0x00000200,
+    VK_BUFFER_USAGE_RAY_TRACING_BIT_NV = 0x00000400,
+    VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_KHR = 0x00020000,
+    VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_EXT = VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_KHR,
+    VK_BUFFER_USAGE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkBufferUsageFlagBits;
+typedef VkFlags VkBufferUsageFlags;
+typedef VkFlags VkBufferViewCreateFlags;
+
+typedef enum VkImageViewCreateFlagBits {
+    VK_IMAGE_VIEW_CREATE_FRAGMENT_DENSITY_MAP_DYNAMIC_BIT_EXT = 0x00000001,
+    VK_IMAGE_VIEW_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkImageViewCreateFlagBits;
+typedef VkFlags VkImageViewCreateFlags;
+
+typedef enum VkShaderModuleCreateFlagBits {
+    VK_SHADER_MODULE_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkShaderModuleCreateFlagBits;
+typedef VkFlags VkShaderModuleCreateFlags;
+typedef VkFlags VkPipelineCacheCreateFlags;
+
+typedef enum VkPipelineCreateFlagBits {
+    VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT = 0x00000001,
+    VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT = 0x00000002,
+    VK_PIPELINE_CREATE_DERIVATIVE_BIT = 0x00000004,
+    VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT = 0x00000008,
+    VK_PIPELINE_CREATE_DISPATCH_BASE_BIT = 0x00000010,
+    VK_PIPELINE_CREATE_DEFER_COMPILE_BIT_NV = 0x00000020,
+    VK_PIPELINE_CREATE_CAPTURE_STATISTICS_BIT_KHR = 0x00000040,
+    VK_PIPELINE_CREATE_CAPTURE_INTERNAL_REPRESENTATIONS_BIT_KHR = 0x00000080,
+    VK_PIPELINE_CREATE_DISPATCH_BASE = VK_PIPELINE_CREATE_DISPATCH_BASE_BIT,
+    VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT_KHR = VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT,
+    VK_PIPELINE_CREATE_DISPATCH_BASE_KHR = VK_PIPELINE_CREATE_DISPATCH_BASE,
+    VK_PIPELINE_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkPipelineCreateFlagBits;
+typedef VkFlags VkPipelineCreateFlags;
+
+typedef enum VkPipelineShaderStageCreateFlagBits {
+    VK_PIPELINE_SHADER_STAGE_CREATE_ALLOW_VARYING_SUBGROUP_SIZE_BIT_EXT = 0x00000001,
+    VK_PIPELINE_SHADER_STAGE_CREATE_REQUIRE_FULL_SUBGROUPS_BIT_EXT = 0x00000002,
+    VK_PIPELINE_SHADER_STAGE_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkPipelineShaderStageCreateFlagBits;
+typedef VkFlags VkPipelineShaderStageCreateFlags;
+
+typedef enum VkShaderStageFlagBits {
+    VK_SHADER_STAGE_VERTEX_BIT = 0x00000001,
+    VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT = 0x00000002,
+    VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT = 0x00000004,
+    VK_SHADER_STAGE_GEOMETRY_BIT = 0x00000008,
+    VK_SHADER_STAGE_FRAGMENT_BIT = 0x00000010,
+    VK_SHADER_STAGE_COMPUTE_BIT = 0x00000020,
+    VK_SHADER_STAGE_ALL_GRAPHICS = 0x0000001F,
+    VK_SHADER_STAGE_ALL = 0x7FFFFFFF,
+    VK_SHADER_STAGE_RAYGEN_BIT_NV = 0x00000100,
+    VK_SHADER_STAGE_ANY_HIT_BIT_NV = 0x00000200,
+    VK_SHADER_STAGE_CLOSEST_HIT_BIT_NV = 0x00000400,
+    VK_SHADER_STAGE_MISS_BIT_NV = 0x00000800,
+    VK_SHADER_STAGE_INTERSECTION_BIT_NV = 0x00001000,
+    VK_SHADER_STAGE_CALLABLE_BIT_NV = 0x00002000,
+    VK_SHADER_STAGE_TASK_BIT_NV = 0x00000040,
+    VK_SHADER_STAGE_MESH_BIT_NV = 0x00000080,
+    VK_SHADER_STAGE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkShaderStageFlagBits;
+typedef VkFlags VkPipelineVertexInputStateCreateFlags;
+typedef VkFlags VkPipelineInputAssemblyStateCreateFlags;
+typedef VkFlags VkPipelineTessellationStateCreateFlags;
+typedef VkFlags VkPipelineViewportStateCreateFlags;
+typedef VkFlags VkPipelineRasterizationStateCreateFlags;
+
+typedef enum VkCullModeFlagBits {
+    VK_CULL_MODE_NONE = 0,
+    VK_CULL_MODE_FRONT_BIT = 0x00000001,
+    VK_CULL_MODE_BACK_BIT = 0x00000002,
+    VK_CULL_MODE_FRONT_AND_BACK = 0x00000003,
+    VK_CULL_MODE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkCullModeFlagBits;
+typedef VkFlags VkCullModeFlags;
+typedef VkFlags VkPipelineMultisampleStateCreateFlags;
+typedef VkFlags VkPipelineDepthStencilStateCreateFlags;
+typedef VkFlags VkPipelineColorBlendStateCreateFlags;
+
+typedef enum VkColorComponentFlagBits {
+    VK_COLOR_COMPONENT_R_BIT = 0x00000001,
+    VK_COLOR_COMPONENT_G_BIT = 0x00000002,
+    VK_COLOR_COMPONENT_B_BIT = 0x00000004,
+    VK_COLOR_COMPONENT_A_BIT = 0x00000008,
+    VK_COLOR_COMPONENT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkColorComponentFlagBits;
+typedef VkFlags VkColorComponentFlags;
+typedef VkFlags VkPipelineDynamicStateCreateFlags;
+typedef VkFlags VkPipelineLayoutCreateFlags;
+typedef VkFlags VkShaderStageFlags;
+
+typedef enum VkSamplerCreateFlagBits {
+    VK_SAMPLER_CREATE_SUBSAMPLED_BIT_EXT = 0x00000001,
+    VK_SAMPLER_CREATE_SUBSAMPLED_COARSE_RECONSTRUCTION_BIT_EXT = 0x00000002,
+    VK_SAMPLER_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkSamplerCreateFlagBits;
+typedef VkFlags VkSamplerCreateFlags;
+
+typedef enum VkDescriptorSetLayoutCreateFlagBits {
+    VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR = 0x00000001,
+    VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT = 0x00000002,
+    VK_DESCRIPTOR_SET_LAYOUT_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkDescriptorSetLayoutCreateFlagBits;
+typedef VkFlags VkDescriptorSetLayoutCreateFlags;
+
+typedef enum VkDescriptorPoolCreateFlagBits {
+    VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT = 0x00000001,
+    VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT_EXT = 0x00000002,
+    VK_DESCRIPTOR_POOL_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkDescriptorPoolCreateFlagBits;
+typedef VkFlags VkDescriptorPoolCreateFlags;
+typedef VkFlags VkDescriptorPoolResetFlags;
+
+typedef enum VkFramebufferCreateFlagBits {
+    VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR = 0x00000001,
+    VK_FRAMEBUFFER_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkFramebufferCreateFlagBits;
+typedef VkFlags VkFramebufferCreateFlags;
+
+typedef enum VkRenderPassCreateFlagBits {
+    VK_RENDER_PASS_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkRenderPassCreateFlagBits;
+typedef VkFlags VkRenderPassCreateFlags;
+
+typedef enum VkAttachmentDescriptionFlagBits {
+    VK_ATTACHMENT_DESCRIPTION_MAY_ALIAS_BIT = 0x00000001,
+    VK_ATTACHMENT_DESCRIPTION_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkAttachmentDescriptionFlagBits;
+typedef VkFlags VkAttachmentDescriptionFlags;
+
+typedef enum VkSubpassDescriptionFlagBits {
+    VK_SUBPASS_DESCRIPTION_PER_VIEW_ATTRIBUTES_BIT_NVX = 0x00000001,
+    VK_SUBPASS_DESCRIPTION_PER_VIEW_POSITION_X_ONLY_BIT_NVX = 0x00000002,
+    VK_SUBPASS_DESCRIPTION_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkSubpassDescriptionFlagBits;
+typedef VkFlags VkSubpassDescriptionFlags;
+
+typedef enum VkAccessFlagBits {
+    VK_ACCESS_INDIRECT_COMMAND_READ_BIT = 0x00000001,
+    VK_ACCESS_INDEX_READ_BIT = 0x00000002,
+    VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT = 0x00000004,
+    VK_ACCESS_UNIFORM_READ_BIT = 0x00000008,
+    VK_ACCESS_INPUT_ATTACHMENT_READ_BIT = 0x00000010,
+    VK_ACCESS_SHADER_READ_BIT = 0x00000020,
+    VK_ACCESS_SHADER_WRITE_BIT = 0x00000040,
+    VK_ACCESS_COLOR_ATTACHMENT_READ_BIT = 0x00000080,
+    VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT = 0x00000100,
+    VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT = 0x00000200,
+    VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT = 0x00000400,
+    VK_ACCESS_TRANSFER_READ_BIT = 0x00000800,
+    VK_ACCESS_TRANSFER_WRITE_BIT = 0x00001000,
+    VK_ACCESS_HOST_READ_BIT = 0x00002000,
+    VK_ACCESS_HOST_WRITE_BIT = 0x00004000,
+    VK_ACCESS_MEMORY_READ_BIT = 0x00008000,
+    VK_ACCESS_MEMORY_WRITE_BIT = 0x00010000,
+    VK_ACCESS_TRANSFORM_FEEDBACK_WRITE_BIT_EXT = 0x02000000,
+    VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_READ_BIT_EXT = 0x04000000,
+    VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_WRITE_BIT_EXT = 0x08000000,
+    VK_ACCESS_CONDITIONAL_RENDERING_READ_BIT_EXT = 0x00100000,
+    VK_ACCESS_COMMAND_PROCESS_READ_BIT_NVX = 0x00020000,
+    VK_ACCESS_COMMAND_PROCESS_WRITE_BIT_NVX = 0x00040000,
+    VK_ACCESS_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT = 0x00080000,
+    VK_ACCESS_SHADING_RATE_IMAGE_READ_BIT_NV = 0x00800000,
+    VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_NV = 0x00200000,
+    VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_NV = 0x00400000,
+    VK_ACCESS_FRAGMENT_DENSITY_MAP_READ_BIT_EXT = 0x01000000,
+    VK_ACCESS_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkAccessFlagBits;
+typedef VkFlags VkAccessFlags;
+
+typedef enum VkDependencyFlagBits {
+    VK_DEPENDENCY_BY_REGION_BIT = 0x00000001,
+    VK_DEPENDENCY_DEVICE_GROUP_BIT = 0x00000004,
+    VK_DEPENDENCY_VIEW_LOCAL_BIT = 0x00000002,
+    VK_DEPENDENCY_VIEW_LOCAL_BIT_KHR = VK_DEPENDENCY_VIEW_LOCAL_BIT,
+    VK_DEPENDENCY_DEVICE_GROUP_BIT_KHR = VK_DEPENDENCY_DEVICE_GROUP_BIT,
+    VK_DEPENDENCY_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkDependencyFlagBits;
+typedef VkFlags VkDependencyFlags;
+
+typedef enum VkCommandPoolCreateFlagBits {
+    VK_COMMAND_POOL_CREATE_TRANSIENT_BIT = 0x00000001,
+    VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT = 0x00000002,
+    VK_COMMAND_POOL_CREATE_PROTECTED_BIT = 0x00000004,
+    VK_COMMAND_POOL_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkCommandPoolCreateFlagBits;
+typedef VkFlags VkCommandPoolCreateFlags;
+
+typedef enum VkCommandPoolResetFlagBits {
+    VK_COMMAND_POOL_RESET_RELEASE_RESOURCES_BIT = 0x00000001,
+    VK_COMMAND_POOL_RESET_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkCommandPoolResetFlagBits;
+typedef VkFlags VkCommandPoolResetFlags;
+
+typedef enum VkCommandBufferUsageFlagBits {
+    VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT = 0x00000001,
+    VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT = 0x00000002,
+    VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT = 0x00000004,
+    VK_COMMAND_BUFFER_USAGE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkCommandBufferUsageFlagBits;
+typedef VkFlags VkCommandBufferUsageFlags;
+
+typedef enum VkQueryControlFlagBits {
+    VK_QUERY_CONTROL_PRECISE_BIT = 0x00000001,
+    VK_QUERY_CONTROL_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkQueryControlFlagBits;
+typedef VkFlags VkQueryControlFlags;
+
+typedef enum VkCommandBufferResetFlagBits {
+    VK_COMMAND_BUFFER_RESET_RELEASE_RESOURCES_BIT = 0x00000001,
+    VK_COMMAND_BUFFER_RESET_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkCommandBufferResetFlagBits;
+typedef VkFlags VkCommandBufferResetFlags;
+
+typedef enum VkStencilFaceFlagBits {
+    VK_STENCIL_FACE_FRONT_BIT = 0x00000001,
+    VK_STENCIL_FACE_BACK_BIT = 0x00000002,
+    VK_STENCIL_FACE_FRONT_AND_BACK = 0x00000003,
+    VK_STENCIL_FRONT_AND_BACK = VK_STENCIL_FACE_FRONT_AND_BACK,
+    VK_STENCIL_FACE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkStencilFaceFlagBits;
+typedef VkFlags VkStencilFaceFlags;
+typedef struct VkApplicationInfo {
+    VkStructureType    sType;
+    const void*        pNext;
+    const char*        pApplicationName;
+    uint32_t           applicationVersion;
+    const char*        pEngineName;
+    uint32_t           engineVersion;
+    uint32_t           apiVersion;
+} VkApplicationInfo;
+
+typedef struct VkInstanceCreateInfo {
+    VkStructureType             sType;
+    const void*                 pNext;
+    VkInstanceCreateFlags       flags;
+    const VkApplicationInfo*    pApplicationInfo;
+    uint32_t                    enabledLayerCount;
+    const char* const*          ppEnabledLayerNames;
+    uint32_t                    enabledExtensionCount;
+    const char* const*          ppEnabledExtensionNames;
+} VkInstanceCreateInfo;
+
+typedef void* (VKAPI_PTR *PFN_vkAllocationFunction)(
+    void*                                       pUserData,
+    size_t                                      size,
+    size_t                                      alignment,
+    VkSystemAllocationScope                     allocationScope);
+
+typedef void* (VKAPI_PTR *PFN_vkReallocationFunction)(
+    void*                                       pUserData,
+    void*                                       pOriginal,
+    size_t                                      size,
+    size_t                                      alignment,
+    VkSystemAllocationScope                     allocationScope);
+
+typedef void (VKAPI_PTR *PFN_vkFreeFunction)(
+    void*                                       pUserData,
+    void*                                       pMemory);
+
+typedef void (VKAPI_PTR *PFN_vkInternalAllocationNotification)(
+    void*                                       pUserData,
+    size_t                                      size,
+    VkInternalAllocationType                    allocationType,
+    VkSystemAllocationScope                     allocationScope);
+
+typedef void (VKAPI_PTR *PFN_vkInternalFreeNotification)(
+    void*                                       pUserData,
+    size_t                                      size,
+    VkInternalAllocationType                    allocationType,
+    VkSystemAllocationScope                     allocationScope);
+
+typedef struct VkAllocationCallbacks {
+    void*                                   pUserData;
+    PFN_vkAllocationFunction                pfnAllocation;
+    PFN_vkReallocationFunction              pfnReallocation;
+    PFN_vkFreeFunction                      pfnFree;
+    PFN_vkInternalAllocationNotification    pfnInternalAllocation;
+    PFN_vkInternalFreeNotification          pfnInternalFree;
+} VkAllocationCallbacks;
+
+typedef struct VkPhysicalDeviceFeatures {
+    VkBool32    robustBufferAccess;
+    VkBool32    fullDrawIndexUint32;
+    VkBool32    imageCubeArray;
+    VkBool32    independentBlend;
+    VkBool32    geometryShader;
+    VkBool32    tessellationShader;
+    VkBool32    sampleRateShading;
+    VkBool32    dualSrcBlend;
+    VkBool32    logicOp;
+    VkBool32    multiDrawIndirect;
+    VkBool32    drawIndirectFirstInstance;
+    VkBool32    depthClamp;
+    VkBool32    depthBiasClamp;
+    VkBool32    fillModeNonSolid;
+    VkBool32    depthBounds;
+    VkBool32    wideLines;
+    VkBool32    largePoints;
+    VkBool32    alphaToOne;
+    VkBool32    multiViewport;
+    VkBool32    samplerAnisotropy;
+    VkBool32    textureCompressionETC2;
+    VkBool32    textureCompressionASTC_LDR;
+    VkBool32    textureCompressionBC;
+    VkBool32    occlusionQueryPrecise;
+    VkBool32    pipelineStatisticsQuery;
+    VkBool32    vertexPipelineStoresAndAtomics;
+    VkBool32    fragmentStoresAndAtomics;
+    VkBool32    shaderTessellationAndGeometryPointSize;
+    VkBool32    shaderImageGatherExtended;
+    VkBool32    shaderStorageImageExtendedFormats;
+    VkBool32    shaderStorageImageMultisample;
+    VkBool32    shaderStorageImageReadWithoutFormat;
+    VkBool32    shaderStorageImageWriteWithoutFormat;
+    VkBool32    shaderUniformBufferArrayDynamicIndexing;
+    VkBool32    shaderSampledImageArrayDynamicIndexing;
+    VkBool32    shaderStorageBufferArrayDynamicIndexing;
+    VkBool32    shaderStorageImageArrayDynamicIndexing;
+    VkBool32    shaderClipDistance;
+    VkBool32    shaderCullDistance;
+    VkBool32    shaderFloat64;
+    VkBool32    shaderInt64;
+    VkBool32    shaderInt16;
+    VkBool32    shaderResourceResidency;
+    VkBool32    shaderResourceMinLod;
+    VkBool32    sparseBinding;
+    VkBool32    sparseResidencyBuffer;
+    VkBool32    sparseResidencyImage2D;
+    VkBool32    sparseResidencyImage3D;
+    VkBool32    sparseResidency2Samples;
+    VkBool32    sparseResidency4Samples;
+    VkBool32    sparseResidency8Samples;
+    VkBool32    sparseResidency16Samples;
+    VkBool32    sparseResidencyAliased;
+    VkBool32    variableMultisampleRate;
+    VkBool32    inheritedQueries;
+} VkPhysicalDeviceFeatures;
+
+typedef struct VkFormatProperties {
+    VkFormatFeatureFlags    linearTilingFeatures;
+    VkFormatFeatureFlags    optimalTilingFeatures;
+    VkFormatFeatureFlags    bufferFeatures;
+} VkFormatProperties;
+
+typedef struct VkExtent3D {
+    uint32_t    width;
+    uint32_t    height;
+    uint32_t    depth;
+} VkExtent3D;
+
+typedef struct VkImageFormatProperties {
+    VkExtent3D            maxExtent;
+    uint32_t              maxMipLevels;
+    uint32_t              maxArrayLayers;
+    VkSampleCountFlags    sampleCounts;
+    VkDeviceSize          maxResourceSize;
+} VkImageFormatProperties;
+
+typedef struct VkPhysicalDeviceLimits {
+    uint32_t              maxImageDimension1D;
+    uint32_t              maxImageDimension2D;
+    uint32_t              maxImageDimension3D;
+    uint32_t              maxImageDimensionCube;
+    uint32_t              maxImageArrayLayers;
+    uint32_t              maxTexelBufferElements;
+    uint32_t              maxUniformBufferRange;
+    uint32_t              maxStorageBufferRange;
+    uint32_t              maxPushConstantsSize;
+    uint32_t              maxMemoryAllocationCount;
+    uint32_t              maxSamplerAllocationCount;
+    VkDeviceSize          bufferImageGranularity;
+    VkDeviceSize          sparseAddressSpaceSize;
+    uint32_t              maxBoundDescriptorSets;
+    uint32_t              maxPerStageDescriptorSamplers;
+    uint32_t              maxPerStageDescriptorUniformBuffers;
+    uint32_t              maxPerStageDescriptorStorageBuffers;
+    uint32_t              maxPerStageDescriptorSampledImages;
+    uint32_t              maxPerStageDescriptorStorageImages;
+    uint32_t              maxPerStageDescriptorInputAttachments;
+    uint32_t              maxPerStageResources;
+    uint32_t              maxDescriptorSetSamplers;
+    uint32_t              maxDescriptorSetUniformBuffers;
+    uint32_t              maxDescriptorSetUniformBuffersDynamic;
+    uint32_t              maxDescriptorSetStorageBuffers;
+    uint32_t              maxDescriptorSetStorageBuffersDynamic;
+    uint32_t              maxDescriptorSetSampledImages;
+    uint32_t              maxDescriptorSetStorageImages;
+    uint32_t              maxDescriptorSetInputAttachments;
+    uint32_t              maxVertexInputAttributes;
+    uint32_t              maxVertexInputBindings;
+    uint32_t              maxVertexInputAttributeOffset;
+    uint32_t              maxVertexInputBindingStride;
+    uint32_t              maxVertexOutputComponents;
+    uint32_t              maxTessellationGenerationLevel;
+    uint32_t              maxTessellationPatchSize;
+    uint32_t              maxTessellationControlPerVertexInputComponents;
+    uint32_t              maxTessellationControlPerVertexOutputComponents;
+    uint32_t              maxTessellationControlPerPatchOutputComponents;
+    uint32_t              maxTessellationControlTotalOutputComponents;
+    uint32_t              maxTessellationEvaluationInputComponents;
+    uint32_t              maxTessellationEvaluationOutputComponents;
+    uint32_t              maxGeometryShaderInvocations;
+    uint32_t              maxGeometryInputComponents;
+    uint32_t              maxGeometryOutputComponents;
+    uint32_t              maxGeometryOutputVertices;
+    uint32_t              maxGeometryTotalOutputComponents;
+    uint32_t              maxFragmentInputComponents;
+    uint32_t              maxFragmentOutputAttachments;
+    uint32_t              maxFragmentDualSrcAttachments;
+    uint32_t              maxFragmentCombinedOutputResources;
+    uint32_t              maxComputeSharedMemorySize;
+    uint32_t              maxComputeWorkGroupCount[3];
+    uint32_t              maxComputeWorkGroupInvocations;
+    uint32_t              maxComputeWorkGroupSize[3];
+    uint32_t              subPixelPrecisionBits;
+    uint32_t              subTexelPrecisionBits;
+    uint32_t              mipmapPrecisionBits;
+    uint32_t              maxDrawIndexedIndexValue;
+    uint32_t              maxDrawIndirectCount;
+    float                 maxSamplerLodBias;
+    float                 maxSamplerAnisotropy;
+    uint32_t              maxViewports;
+    uint32_t              maxViewportDimensions[2];
+    float                 viewportBoundsRange[2];
+    uint32_t              viewportSubPixelBits;
+    size_t                minMemoryMapAlignment;
+    VkDeviceSize          minTexelBufferOffsetAlignment;
+    VkDeviceSize          minUniformBufferOffsetAlignment;
+    VkDeviceSize          minStorageBufferOffsetAlignment;
+    int32_t               minTexelOffset;
+    uint32_t              maxTexelOffset;
+    int32_t               minTexelGatherOffset;
+    uint32_t              maxTexelGatherOffset;
+    float                 minInterpolationOffset;
+    float                 maxInterpolationOffset;
+    uint32_t              subPixelInterpolationOffsetBits;
+    uint32_t              maxFramebufferWidth;
+    uint32_t              maxFramebufferHeight;
+    uint32_t              maxFramebufferLayers;
+    VkSampleCountFlags    framebufferColorSampleCounts;
+    VkSampleCountFlags    framebufferDepthSampleCounts;
+    VkSampleCountFlags    framebufferStencilSampleCounts;
+    VkSampleCountFlags    framebufferNoAttachmentsSampleCounts;
+    uint32_t              maxColorAttachments;
+    VkSampleCountFlags    sampledImageColorSampleCounts;
+    VkSampleCountFlags    sampledImageIntegerSampleCounts;
+    VkSampleCountFlags    sampledImageDepthSampleCounts;
+    VkSampleCountFlags    sampledImageStencilSampleCounts;
+    VkSampleCountFlags    storageImageSampleCounts;
+    uint32_t              maxSampleMaskWords;
+    VkBool32              timestampComputeAndGraphics;
+    float                 timestampPeriod;
+    uint32_t              maxClipDistances;
+    uint32_t              maxCullDistances;
+    uint32_t              maxCombinedClipAndCullDistances;
+    uint32_t              discreteQueuePriorities;
+    float                 pointSizeRange[2];
+    float                 lineWidthRange[2];
+    float                 pointSizeGranularity;
+    float                 lineWidthGranularity;
+    VkBool32              strictLines;
+    VkBool32              standardSampleLocations;
+    VkDeviceSize          optimalBufferCopyOffsetAlignment;
+    VkDeviceSize          optimalBufferCopyRowPitchAlignment;
+    VkDeviceSize          nonCoherentAtomSize;
+} VkPhysicalDeviceLimits;
+
+typedef struct VkPhysicalDeviceSparseProperties {
+    VkBool32    residencyStandard2DBlockShape;
+    VkBool32    residencyStandard2DMultisampleBlockShape;
+    VkBool32    residencyStandard3DBlockShape;
+    VkBool32    residencyAlignedMipSize;
+    VkBool32    residencyNonResidentStrict;
+} VkPhysicalDeviceSparseProperties;
+
+typedef struct VkPhysicalDeviceProperties {
+    uint32_t                            apiVersion;
+    uint32_t                            driverVersion;
+    uint32_t                            vendorID;
+    uint32_t                            deviceID;
+    VkPhysicalDeviceType                deviceType;
+    char                                deviceName[VK_MAX_PHYSICAL_DEVICE_NAME_SIZE];
+    uint8_t                             pipelineCacheUUID[VK_UUID_SIZE];
+    VkPhysicalDeviceLimits              limits;
+    VkPhysicalDeviceSparseProperties    sparseProperties;
+} VkPhysicalDeviceProperties;
+
+typedef struct VkQueueFamilyProperties {
+    VkQueueFlags    queueFlags;
+    uint32_t        queueCount;
+    uint32_t        timestampValidBits;
+    VkExtent3D      minImageTransferGranularity;
+} VkQueueFamilyProperties;
+
+typedef struct VkMemoryType {
+    VkMemoryPropertyFlags    propertyFlags;
+    uint32_t                 heapIndex;
+} VkMemoryType;
+
+typedef struct VkMemoryHeap {
+    VkDeviceSize         size;
+    VkMemoryHeapFlags    flags;
+} VkMemoryHeap;
+
+typedef struct VkPhysicalDeviceMemoryProperties {
+    uint32_t        memoryTypeCount;
+    VkMemoryType    memoryTypes[VK_MAX_MEMORY_TYPES];
+    uint32_t        memoryHeapCount;
+    VkMemoryHeap    memoryHeaps[VK_MAX_MEMORY_HEAPS];
+} VkPhysicalDeviceMemoryProperties;
+
+typedef void (VKAPI_PTR *PFN_vkVoidFunction)(void);
+typedef struct VkDeviceQueueCreateInfo {
+    VkStructureType             sType;
+    const void*                 pNext;
+    VkDeviceQueueCreateFlags    flags;
+    uint32_t                    queueFamilyIndex;
+    uint32_t                    queueCount;
+    const float*                pQueuePriorities;
+} VkDeviceQueueCreateInfo;
+
+typedef struct VkDeviceCreateInfo {
+    VkStructureType                    sType;
+    const void*                        pNext;
+    VkDeviceCreateFlags                flags;
+    uint32_t                           queueCreateInfoCount;
+    const VkDeviceQueueCreateInfo*     pQueueCreateInfos;
+    uint32_t                           enabledLayerCount;
+    const char* const*                 ppEnabledLayerNames;
+    uint32_t                           enabledExtensionCount;
+    const char* const*                 ppEnabledExtensionNames;
+    const VkPhysicalDeviceFeatures*    pEnabledFeatures;
+} VkDeviceCreateInfo;
+
+typedef struct VkExtensionProperties {
+    char        extensionName[VK_MAX_EXTENSION_NAME_SIZE];
+    uint32_t    specVersion;
+} VkExtensionProperties;
+
+typedef struct VkLayerProperties {
+    char        layerName[VK_MAX_EXTENSION_NAME_SIZE];
+    uint32_t    specVersion;
+    uint32_t    implementationVersion;
+    char        description[VK_MAX_DESCRIPTION_SIZE];
+} VkLayerProperties;
+
+typedef struct VkSubmitInfo {
+    VkStructureType                sType;
+    const void*                    pNext;
+    uint32_t                       waitSemaphoreCount;
+    const VkSemaphore*             pWaitSemaphores;
+    const VkPipelineStageFlags*    pWaitDstStageMask;
+    uint32_t                       commandBufferCount;
+    const VkCommandBuffer*         pCommandBuffers;
+    uint32_t                       signalSemaphoreCount;
+    const VkSemaphore*             pSignalSemaphores;
+} VkSubmitInfo;
+
+typedef struct VkMemoryAllocateInfo {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkDeviceSize       allocationSize;
+    uint32_t           memoryTypeIndex;
+} VkMemoryAllocateInfo;
+
+typedef struct VkMappedMemoryRange {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkDeviceMemory     memory;
+    VkDeviceSize       offset;
+    VkDeviceSize       size;
+} VkMappedMemoryRange;
+
+typedef struct VkMemoryRequirements {
+    VkDeviceSize    size;
+    VkDeviceSize    alignment;
+    uint32_t        memoryTypeBits;
+} VkMemoryRequirements;
+
+typedef struct VkSparseImageFormatProperties {
+    VkImageAspectFlags          aspectMask;
+    VkExtent3D                  imageGranularity;
+    VkSparseImageFormatFlags    flags;
+} VkSparseImageFormatProperties;
+
+typedef struct VkSparseImageMemoryRequirements {
+    VkSparseImageFormatProperties    formatProperties;
+    uint32_t                         imageMipTailFirstLod;
+    VkDeviceSize                     imageMipTailSize;
+    VkDeviceSize                     imageMipTailOffset;
+    VkDeviceSize                     imageMipTailStride;
+} VkSparseImageMemoryRequirements;
+
+typedef struct VkSparseMemoryBind {
+    VkDeviceSize               resourceOffset;
+    VkDeviceSize               size;
+    VkDeviceMemory             memory;
+    VkDeviceSize               memoryOffset;
+    VkSparseMemoryBindFlags    flags;
+} VkSparseMemoryBind;
+
+typedef struct VkSparseBufferMemoryBindInfo {
+    VkBuffer                     buffer;
+    uint32_t                     bindCount;
+    const VkSparseMemoryBind*    pBinds;
+} VkSparseBufferMemoryBindInfo;
+
+typedef struct VkSparseImageOpaqueMemoryBindInfo {
+    VkImage                      image;
+    uint32_t                     bindCount;
+    const VkSparseMemoryBind*    pBinds;
+} VkSparseImageOpaqueMemoryBindInfo;
+
+typedef struct VkImageSubresource {
+    VkImageAspectFlags    aspectMask;
+    uint32_t              mipLevel;
+    uint32_t              arrayLayer;
+} VkImageSubresource;
+
+typedef struct VkOffset3D {
+    int32_t    x;
+    int32_t    y;
+    int32_t    z;
+} VkOffset3D;
+
+typedef struct VkSparseImageMemoryBind {
+    VkImageSubresource         subresource;
+    VkOffset3D                 offset;
+    VkExtent3D                 extent;
+    VkDeviceMemory             memory;
+    VkDeviceSize               memoryOffset;
+    VkSparseMemoryBindFlags    flags;
+} VkSparseImageMemoryBind;
+
+typedef struct VkSparseImageMemoryBindInfo {
+    VkImage                           image;
+    uint32_t                          bindCount;
+    const VkSparseImageMemoryBind*    pBinds;
+} VkSparseImageMemoryBindInfo;
+
+typedef struct VkBindSparseInfo {
+    VkStructureType                             sType;
+    const void*                                 pNext;
+    uint32_t                                    waitSemaphoreCount;
+    const VkSemaphore*                          pWaitSemaphores;
+    uint32_t                                    bufferBindCount;
+    const VkSparseBufferMemoryBindInfo*         pBufferBinds;
+    uint32_t                                    imageOpaqueBindCount;
+    const VkSparseImageOpaqueMemoryBindInfo*    pImageOpaqueBinds;
+    uint32_t                                    imageBindCount;
+    const VkSparseImageMemoryBindInfo*          pImageBinds;
+    uint32_t                                    signalSemaphoreCount;
+    const VkSemaphore*                          pSignalSemaphores;
+} VkBindSparseInfo;
+
+typedef struct VkFenceCreateInfo {
+    VkStructureType       sType;
+    const void*           pNext;
+    VkFenceCreateFlags    flags;
+} VkFenceCreateInfo;
+
+typedef struct VkSemaphoreCreateInfo {
+    VkStructureType           sType;
+    const void*               pNext;
+    VkSemaphoreCreateFlags    flags;
+} VkSemaphoreCreateInfo;
+
+typedef struct VkEventCreateInfo {
+    VkStructureType       sType;
+    const void*           pNext;
+    VkEventCreateFlags    flags;
+} VkEventCreateInfo;
+
+typedef struct VkQueryPoolCreateInfo {
+    VkStructureType                  sType;
+    const void*                      pNext;
+    VkQueryPoolCreateFlags           flags;
+    VkQueryType                      queryType;
+    uint32_t                         queryCount;
+    VkQueryPipelineStatisticFlags    pipelineStatistics;
+} VkQueryPoolCreateInfo;
+
+typedef struct VkBufferCreateInfo {
+    VkStructureType        sType;
+    const void*            pNext;
+    VkBufferCreateFlags    flags;
+    VkDeviceSize           size;
+    VkBufferUsageFlags     usage;
+    VkSharingMode          sharingMode;
+    uint32_t               queueFamilyIndexCount;
+    const uint32_t*        pQueueFamilyIndices;
+} VkBufferCreateInfo;
+
+typedef struct VkBufferViewCreateInfo {
+    VkStructureType            sType;
+    const void*                pNext;
+    VkBufferViewCreateFlags    flags;
+    VkBuffer                   buffer;
+    VkFormat                   format;
+    VkDeviceSize               offset;
+    VkDeviceSize               range;
+} VkBufferViewCreateInfo;
+
+typedef struct VkImageCreateInfo {
+    VkStructureType          sType;
+    const void*              pNext;
+    VkImageCreateFlags       flags;
+    VkImageType              imageType;
+    VkFormat                 format;
+    VkExtent3D               extent;
+    uint32_t                 mipLevels;
+    uint32_t                 arrayLayers;
+    VkSampleCountFlagBits    samples;
+    VkImageTiling            tiling;
+    VkImageUsageFlags        usage;
+    VkSharingMode            sharingMode;
+    uint32_t                 queueFamilyIndexCount;
+    const uint32_t*          pQueueFamilyIndices;
+    VkImageLayout            initialLayout;
+} VkImageCreateInfo;
+
+typedef struct VkSubresourceLayout {
+    VkDeviceSize    offset;
+    VkDeviceSize    size;
+    VkDeviceSize    rowPitch;
+    VkDeviceSize    arrayPitch;
+    VkDeviceSize    depthPitch;
+} VkSubresourceLayout;
+
+typedef struct VkComponentMapping {
+    VkComponentSwizzle    r;
+    VkComponentSwizzle    g;
+    VkComponentSwizzle    b;
+    VkComponentSwizzle    a;
+} VkComponentMapping;
+
+typedef struct VkImageSubresourceRange {
+    VkImageAspectFlags    aspectMask;
+    uint32_t              baseMipLevel;
+    uint32_t              levelCount;
+    uint32_t              baseArrayLayer;
+    uint32_t              layerCount;
+} VkImageSubresourceRange;
+
+typedef struct VkImageViewCreateInfo {
+    VkStructureType            sType;
+    const void*                pNext;
+    VkImageViewCreateFlags     flags;
+    VkImage                    image;
+    VkImageViewType            viewType;
+    VkFormat                   format;
+    VkComponentMapping         components;
+    VkImageSubresourceRange    subresourceRange;
+} VkImageViewCreateInfo;
+
+typedef struct VkShaderModuleCreateInfo {
+    VkStructureType              sType;
+    const void*                  pNext;
+    VkShaderModuleCreateFlags    flags;
+    size_t                       codeSize;
+    const uint32_t*              pCode;
+} VkShaderModuleCreateInfo;
+
+typedef struct VkPipelineCacheCreateInfo {
+    VkStructureType               sType;
+    const void*                   pNext;
+    VkPipelineCacheCreateFlags    flags;
+    size_t                        initialDataSize;
+    const void*                   pInitialData;
+} VkPipelineCacheCreateInfo;
+
+typedef struct VkSpecializationMapEntry {
+    uint32_t    constantID;
+    uint32_t    offset;
+    size_t      size;
+} VkSpecializationMapEntry;
+
+typedef struct VkSpecializationInfo {
+    uint32_t                           mapEntryCount;
+    const VkSpecializationMapEntry*    pMapEntries;
+    size_t                             dataSize;
+    const void*                        pData;
+} VkSpecializationInfo;
+
+typedef struct VkPipelineShaderStageCreateInfo {
+    VkStructureType                     sType;
+    const void*                         pNext;
+    VkPipelineShaderStageCreateFlags    flags;
+    VkShaderStageFlagBits               stage;
+    VkShaderModule                      module;
+    const char*                         pName;
+    const VkSpecializationInfo*         pSpecializationInfo;
+} VkPipelineShaderStageCreateInfo;
+
+typedef struct VkVertexInputBindingDescription {
+    uint32_t             binding;
+    uint32_t             stride;
+    VkVertexInputRate    inputRate;
+} VkVertexInputBindingDescription;
+
+typedef struct VkVertexInputAttributeDescription {
+    uint32_t    location;
+    uint32_t    binding;
+    VkFormat    format;
+    uint32_t    offset;
+} VkVertexInputAttributeDescription;
+
+typedef struct VkPipelineVertexInputStateCreateInfo {
+    VkStructureType                             sType;
+    const void*                                 pNext;
+    VkPipelineVertexInputStateCreateFlags       flags;
+    uint32_t                                    vertexBindingDescriptionCount;
+    const VkVertexInputBindingDescription*      pVertexBindingDescriptions;
+    uint32_t                                    vertexAttributeDescriptionCount;
+    const VkVertexInputAttributeDescription*    pVertexAttributeDescriptions;
+} VkPipelineVertexInputStateCreateInfo;
+
+typedef struct VkPipelineInputAssemblyStateCreateInfo {
+    VkStructureType                            sType;
+    const void*                                pNext;
+    VkPipelineInputAssemblyStateCreateFlags    flags;
+    VkPrimitiveTopology                        topology;
+    VkBool32                                   primitiveRestartEnable;
+} VkPipelineInputAssemblyStateCreateInfo;
+
+typedef struct VkPipelineTessellationStateCreateInfo {
+    VkStructureType                           sType;
+    const void*                               pNext;
+    VkPipelineTessellationStateCreateFlags    flags;
+    uint32_t                                  patchControlPoints;
+} VkPipelineTessellationStateCreateInfo;
+
+typedef struct VkViewport {
+    float    x;
+    float    y;
+    float    width;
+    float    height;
+    float    minDepth;
+    float    maxDepth;
+} VkViewport;
+
+typedef struct VkOffset2D {
+    int32_t    x;
+    int32_t    y;
+} VkOffset2D;
+
+typedef struct VkExtent2D {
+    uint32_t    width;
+    uint32_t    height;
+} VkExtent2D;
+
+typedef struct VkRect2D {
+    VkOffset2D    offset;
+    VkExtent2D    extent;
+} VkRect2D;
+
+typedef struct VkPipelineViewportStateCreateInfo {
+    VkStructureType                       sType;
+    const void*                           pNext;
+    VkPipelineViewportStateCreateFlags    flags;
+    uint32_t                              viewportCount;
+    const VkViewport*                     pViewports;
+    uint32_t                              scissorCount;
+    const VkRect2D*                       pScissors;
+} VkPipelineViewportStateCreateInfo;
+
+typedef struct VkPipelineRasterizationStateCreateInfo {
+    VkStructureType                            sType;
+    const void*                                pNext;
+    VkPipelineRasterizationStateCreateFlags    flags;
+    VkBool32                                   depthClampEnable;
+    VkBool32                                   rasterizerDiscardEnable;
+    VkPolygonMode                              polygonMode;
+    VkCullModeFlags                            cullMode;
+    VkFrontFace                                frontFace;
+    VkBool32                                   depthBiasEnable;
+    float                                      depthBiasConstantFactor;
+    float                                      depthBiasClamp;
+    float                                      depthBiasSlopeFactor;
+    float                                      lineWidth;
+} VkPipelineRasterizationStateCreateInfo;
+
+typedef struct VkPipelineMultisampleStateCreateInfo {
+    VkStructureType                          sType;
+    const void*                              pNext;
+    VkPipelineMultisampleStateCreateFlags    flags;
+    VkSampleCountFlagBits                    rasterizationSamples;
+    VkBool32                                 sampleShadingEnable;
+    float                                    minSampleShading;
+    const VkSampleMask*                      pSampleMask;
+    VkBool32                                 alphaToCoverageEnable;
+    VkBool32                                 alphaToOneEnable;
+} VkPipelineMultisampleStateCreateInfo;
+
+typedef struct VkStencilOpState {
+    VkStencilOp    failOp;
+    VkStencilOp    passOp;
+    VkStencilOp    depthFailOp;
+    VkCompareOp    compareOp;
+    uint32_t       compareMask;
+    uint32_t       writeMask;
+    uint32_t       reference;
+} VkStencilOpState;
+
+typedef struct VkPipelineDepthStencilStateCreateInfo {
+    VkStructureType                           sType;
+    const void*                               pNext;
+    VkPipelineDepthStencilStateCreateFlags    flags;
+    VkBool32                                  depthTestEnable;
+    VkBool32                                  depthWriteEnable;
+    VkCompareOp                               depthCompareOp;
+    VkBool32                                  depthBoundsTestEnable;
+    VkBool32                                  stencilTestEnable;
+    VkStencilOpState                          front;
+    VkStencilOpState                          back;
+    float                                     minDepthBounds;
+    float                                     maxDepthBounds;
+} VkPipelineDepthStencilStateCreateInfo;
+
+typedef struct VkPipelineColorBlendAttachmentState {
+    VkBool32                 blendEnable;
+    VkBlendFactor            srcColorBlendFactor;
+    VkBlendFactor            dstColorBlendFactor;
+    VkBlendOp                colorBlendOp;
+    VkBlendFactor            srcAlphaBlendFactor;
+    VkBlendFactor            dstAlphaBlendFactor;
+    VkBlendOp                alphaBlendOp;
+    VkColorComponentFlags    colorWriteMask;
+} VkPipelineColorBlendAttachmentState;
+
+typedef struct VkPipelineColorBlendStateCreateInfo {
+    VkStructureType                               sType;
+    const void*                                   pNext;
+    VkPipelineColorBlendStateCreateFlags          flags;
+    VkBool32                                      logicOpEnable;
+    VkLogicOp                                     logicOp;
+    uint32_t                                      attachmentCount;
+    const VkPipelineColorBlendAttachmentState*    pAttachments;
+    float                                         blendConstants[4];
+} VkPipelineColorBlendStateCreateInfo;
+
+typedef struct VkPipelineDynamicStateCreateInfo {
+    VkStructureType                      sType;
+    const void*                          pNext;
+    VkPipelineDynamicStateCreateFlags    flags;
+    uint32_t                             dynamicStateCount;
+    const VkDynamicState*                pDynamicStates;
+} VkPipelineDynamicStateCreateInfo;
+
+typedef struct VkGraphicsPipelineCreateInfo {
+    VkStructureType                                  sType;
+    const void*                                      pNext;
+    VkPipelineCreateFlags                            flags;
+    uint32_t                                         stageCount;
+    const VkPipelineShaderStageCreateInfo*           pStages;
+    const VkPipelineVertexInputStateCreateInfo*      pVertexInputState;
+    const VkPipelineInputAssemblyStateCreateInfo*    pInputAssemblyState;
+    const VkPipelineTessellationStateCreateInfo*     pTessellationState;
+    const VkPipelineViewportStateCreateInfo*         pViewportState;
+    const VkPipelineRasterizationStateCreateInfo*    pRasterizationState;
+    const VkPipelineMultisampleStateCreateInfo*      pMultisampleState;
+    const VkPipelineDepthStencilStateCreateInfo*     pDepthStencilState;
+    const VkPipelineColorBlendStateCreateInfo*       pColorBlendState;
+    const VkPipelineDynamicStateCreateInfo*          pDynamicState;
+    VkPipelineLayout                                 layout;
+    VkRenderPass                                     renderPass;
+    uint32_t                                         subpass;
+    VkPipeline                                       basePipelineHandle;
+    int32_t                                          basePipelineIndex;
+} VkGraphicsPipelineCreateInfo;
+
+typedef struct VkComputePipelineCreateInfo {
+    VkStructureType                    sType;
+    const void*                        pNext;
+    VkPipelineCreateFlags              flags;
+    VkPipelineShaderStageCreateInfo    stage;
+    VkPipelineLayout                   layout;
+    VkPipeline                         basePipelineHandle;
+    int32_t                            basePipelineIndex;
+} VkComputePipelineCreateInfo;
+
+typedef struct VkPushConstantRange {
+    VkShaderStageFlags    stageFlags;
+    uint32_t              offset;
+    uint32_t              size;
+} VkPushConstantRange;
+
+typedef struct VkPipelineLayoutCreateInfo {
+    VkStructureType                 sType;
+    const void*                     pNext;
+    VkPipelineLayoutCreateFlags     flags;
+    uint32_t                        setLayoutCount;
+    const VkDescriptorSetLayout*    pSetLayouts;
+    uint32_t                        pushConstantRangeCount;
+    const VkPushConstantRange*      pPushConstantRanges;
+} VkPipelineLayoutCreateInfo;
+
+typedef struct VkSamplerCreateInfo {
+    VkStructureType         sType;
+    const void*             pNext;
+    VkSamplerCreateFlags    flags;
+    VkFilter                magFilter;
+    VkFilter                minFilter;
+    VkSamplerMipmapMode     mipmapMode;
+    VkSamplerAddressMode    addressModeU;
+    VkSamplerAddressMode    addressModeV;
+    VkSamplerAddressMode    addressModeW;
+    float                   mipLodBias;
+    VkBool32                anisotropyEnable;
+    float                   maxAnisotropy;
+    VkBool32                compareEnable;
+    VkCompareOp             compareOp;
+    float                   minLod;
+    float                   maxLod;
+    VkBorderColor           borderColor;
+    VkBool32                unnormalizedCoordinates;
+} VkSamplerCreateInfo;
+
+typedef struct VkDescriptorSetLayoutBinding {
+    uint32_t              binding;
+    VkDescriptorType      descriptorType;
+    uint32_t              descriptorCount;
+    VkShaderStageFlags    stageFlags;
+    const VkSampler*      pImmutableSamplers;
+} VkDescriptorSetLayoutBinding;
+
+typedef struct VkDescriptorSetLayoutCreateInfo {
+    VkStructureType                        sType;
+    const void*                            pNext;
+    VkDescriptorSetLayoutCreateFlags       flags;
+    uint32_t                               bindingCount;
+    const VkDescriptorSetLayoutBinding*    pBindings;
+} VkDescriptorSetLayoutCreateInfo;
+
+typedef struct VkDescriptorPoolSize {
+    VkDescriptorType    type;
+    uint32_t            descriptorCount;
+} VkDescriptorPoolSize;
+
+typedef struct VkDescriptorPoolCreateInfo {
+    VkStructureType                sType;
+    const void*                    pNext;
+    VkDescriptorPoolCreateFlags    flags;
+    uint32_t                       maxSets;
+    uint32_t                       poolSizeCount;
+    const VkDescriptorPoolSize*    pPoolSizes;
+} VkDescriptorPoolCreateInfo;
+
+typedef struct VkDescriptorSetAllocateInfo {
+    VkStructureType                 sType;
+    const void*                     pNext;
+    VkDescriptorPool                descriptorPool;
+    uint32_t                        descriptorSetCount;
+    const VkDescriptorSetLayout*    pSetLayouts;
+} VkDescriptorSetAllocateInfo;
+
+typedef struct VkDescriptorImageInfo {
+    VkSampler        sampler;
+    VkImageView      imageView;
+    VkImageLayout    imageLayout;
+} VkDescriptorImageInfo;
+
+typedef struct VkDescriptorBufferInfo {
+    VkBuffer        buffer;
+    VkDeviceSize    offset;
+    VkDeviceSize    range;
+} VkDescriptorBufferInfo;
+
+typedef struct VkWriteDescriptorSet {
+    VkStructureType                  sType;
+    const void*                      pNext;
+    VkDescriptorSet                  dstSet;
+    uint32_t                         dstBinding;
+    uint32_t                         dstArrayElement;
+    uint32_t                         descriptorCount;
+    VkDescriptorType                 descriptorType;
+    const VkDescriptorImageInfo*     pImageInfo;
+    const VkDescriptorBufferInfo*    pBufferInfo;
+    const VkBufferView*              pTexelBufferView;
+} VkWriteDescriptorSet;
+
+typedef struct VkCopyDescriptorSet {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkDescriptorSet    srcSet;
+    uint32_t           srcBinding;
+    uint32_t           srcArrayElement;
+    VkDescriptorSet    dstSet;
+    uint32_t           dstBinding;
+    uint32_t           dstArrayElement;
+    uint32_t           descriptorCount;
+} VkCopyDescriptorSet;
+
+typedef struct VkFramebufferCreateInfo {
+    VkStructureType             sType;
+    const void*                 pNext;
+    VkFramebufferCreateFlags    flags;
+    VkRenderPass                renderPass;
+    uint32_t                    attachmentCount;
+    const VkImageView*          pAttachments;
+    uint32_t                    width;
+    uint32_t                    height;
+    uint32_t                    layers;
+} VkFramebufferCreateInfo;
+
+typedef struct VkAttachmentDescription {
+    VkAttachmentDescriptionFlags    flags;
+    VkFormat                        format;
+    VkSampleCountFlagBits           samples;
+    VkAttachmentLoadOp              loadOp;
+    VkAttachmentStoreOp             storeOp;
+    VkAttachmentLoadOp              stencilLoadOp;
+    VkAttachmentStoreOp             stencilStoreOp;
+    VkImageLayout                   initialLayout;
+    VkImageLayout                   finalLayout;
+} VkAttachmentDescription;
+
+typedef struct VkAttachmentReference {
+    uint32_t         attachment;
+    VkImageLayout    layout;
+} VkAttachmentReference;
+
+typedef struct VkSubpassDescription {
+    VkSubpassDescriptionFlags       flags;
+    VkPipelineBindPoint             pipelineBindPoint;
+    uint32_t                        inputAttachmentCount;
+    const VkAttachmentReference*    pInputAttachments;
+    uint32_t                        colorAttachmentCount;
+    const VkAttachmentReference*    pColorAttachments;
+    const VkAttachmentReference*    pResolveAttachments;
+    const VkAttachmentReference*    pDepthStencilAttachment;
+    uint32_t                        preserveAttachmentCount;
+    const uint32_t*                 pPreserveAttachments;
+} VkSubpassDescription;
+
+typedef struct VkSubpassDependency {
+    uint32_t                srcSubpass;
+    uint32_t                dstSubpass;
+    VkPipelineStageFlags    srcStageMask;
+    VkPipelineStageFlags    dstStageMask;
+    VkAccessFlags           srcAccessMask;
+    VkAccessFlags           dstAccessMask;
+    VkDependencyFlags       dependencyFlags;
+} VkSubpassDependency;
+
+typedef struct VkRenderPassCreateInfo {
+    VkStructureType                   sType;
+    const void*                       pNext;
+    VkRenderPassCreateFlags           flags;
+    uint32_t                          attachmentCount;
+    const VkAttachmentDescription*    pAttachments;
+    uint32_t                          subpassCount;
+    const VkSubpassDescription*       pSubpasses;
+    uint32_t                          dependencyCount;
+    const VkSubpassDependency*        pDependencies;
+} VkRenderPassCreateInfo;
+
+typedef struct VkCommandPoolCreateInfo {
+    VkStructureType             sType;
+    const void*                 pNext;
+    VkCommandPoolCreateFlags    flags;
+    uint32_t                    queueFamilyIndex;
+} VkCommandPoolCreateInfo;
+
+typedef struct VkCommandBufferAllocateInfo {
+    VkStructureType         sType;
+    const void*             pNext;
+    VkCommandPool           commandPool;
+    VkCommandBufferLevel    level;
+    uint32_t                commandBufferCount;
+} VkCommandBufferAllocateInfo;
+
+typedef struct VkCommandBufferInheritanceInfo {
+    VkStructureType                  sType;
+    const void*                      pNext;
+    VkRenderPass                     renderPass;
+    uint32_t                         subpass;
+    VkFramebuffer                    framebuffer;
+    VkBool32                         occlusionQueryEnable;
+    VkQueryControlFlags              queryFlags;
+    VkQueryPipelineStatisticFlags    pipelineStatistics;
+} VkCommandBufferInheritanceInfo;
+
+typedef struct VkCommandBufferBeginInfo {
+    VkStructureType                          sType;
+    const void*                              pNext;
+    VkCommandBufferUsageFlags                flags;
+    const VkCommandBufferInheritanceInfo*    pInheritanceInfo;
+} VkCommandBufferBeginInfo;
+
+typedef struct VkBufferCopy {
+    VkDeviceSize    srcOffset;
+    VkDeviceSize    dstOffset;
+    VkDeviceSize    size;
+} VkBufferCopy;
+
+typedef struct VkImageSubresourceLayers {
+    VkImageAspectFlags    aspectMask;
+    uint32_t              mipLevel;
+    uint32_t              baseArrayLayer;
+    uint32_t              layerCount;
+} VkImageSubresourceLayers;
+
+typedef struct VkImageCopy {
+    VkImageSubresourceLayers    srcSubresource;
+    VkOffset3D                  srcOffset;
+    VkImageSubresourceLayers    dstSubresource;
+    VkOffset3D                  dstOffset;
+    VkExtent3D                  extent;
+} VkImageCopy;
+
+typedef struct VkImageBlit {
+    VkImageSubresourceLayers    srcSubresource;
+    VkOffset3D                  srcOffsets[2];
+    VkImageSubresourceLayers    dstSubresource;
+    VkOffset3D                  dstOffsets[2];
+} VkImageBlit;
+
+typedef struct VkBufferImageCopy {
+    VkDeviceSize                bufferOffset;
+    uint32_t                    bufferRowLength;
+    uint32_t                    bufferImageHeight;
+    VkImageSubresourceLayers    imageSubresource;
+    VkOffset3D                  imageOffset;
+    VkExtent3D                  imageExtent;
+} VkBufferImageCopy;
+
+typedef union VkClearColorValue {
+    float       float32[4];
+    int32_t     int32[4];
+    uint32_t    uint32[4];
+} VkClearColorValue;
+
+typedef struct VkClearDepthStencilValue {
+    float       depth;
+    uint32_t    stencil;
+} VkClearDepthStencilValue;
+
+typedef union VkClearValue {
+    VkClearColorValue           color;
+    VkClearDepthStencilValue    depthStencil;
+} VkClearValue;
+
+typedef struct VkClearAttachment {
+    VkImageAspectFlags    aspectMask;
+    uint32_t              colorAttachment;
+    VkClearValue          clearValue;
+} VkClearAttachment;
+
+typedef struct VkClearRect {
+    VkRect2D    rect;
+    uint32_t    baseArrayLayer;
+    uint32_t    layerCount;
+} VkClearRect;
+
+typedef struct VkImageResolve {
+    VkImageSubresourceLayers    srcSubresource;
+    VkOffset3D                  srcOffset;
+    VkImageSubresourceLayers    dstSubresource;
+    VkOffset3D                  dstOffset;
+    VkExtent3D                  extent;
+} VkImageResolve;
+
+typedef struct VkMemoryBarrier {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkAccessFlags      srcAccessMask;
+    VkAccessFlags      dstAccessMask;
+} VkMemoryBarrier;
+
+typedef struct VkBufferMemoryBarrier {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkAccessFlags      srcAccessMask;
+    VkAccessFlags      dstAccessMask;
+    uint32_t           srcQueueFamilyIndex;
+    uint32_t           dstQueueFamilyIndex;
+    VkBuffer           buffer;
+    VkDeviceSize       offset;
+    VkDeviceSize       size;
+} VkBufferMemoryBarrier;
+
+typedef struct VkImageMemoryBarrier {
+    VkStructureType            sType;
+    const void*                pNext;
+    VkAccessFlags              srcAccessMask;
+    VkAccessFlags              dstAccessMask;
+    VkImageLayout              oldLayout;
+    VkImageLayout              newLayout;
+    uint32_t                   srcQueueFamilyIndex;
+    uint32_t                   dstQueueFamilyIndex;
+    VkImage                    image;
+    VkImageSubresourceRange    subresourceRange;
+} VkImageMemoryBarrier;
+
+typedef struct VkRenderPassBeginInfo {
+    VkStructureType        sType;
+    const void*            pNext;
+    VkRenderPass           renderPass;
+    VkFramebuffer          framebuffer;
+    VkRect2D               renderArea;
+    uint32_t               clearValueCount;
+    const VkClearValue*    pClearValues;
+} VkRenderPassBeginInfo;
+
+typedef struct VkDispatchIndirectCommand {
+    uint32_t    x;
+    uint32_t    y;
+    uint32_t    z;
+} VkDispatchIndirectCommand;
+
+typedef struct VkDrawIndexedIndirectCommand {
+    uint32_t    indexCount;
+    uint32_t    instanceCount;
+    uint32_t    firstIndex;
+    int32_t     vertexOffset;
+    uint32_t    firstInstance;
+} VkDrawIndexedIndirectCommand;
+
+typedef struct VkDrawIndirectCommand {
+    uint32_t    vertexCount;
+    uint32_t    instanceCount;
+    uint32_t    firstVertex;
+    uint32_t    firstInstance;
+} VkDrawIndirectCommand;
+
+typedef struct VkBaseOutStructure {
+    VkStructureType               sType;
+    struct VkBaseOutStructure*    pNext;
+} VkBaseOutStructure;
+
+typedef struct VkBaseInStructure {
+    VkStructureType                    sType;
+    const struct VkBaseInStructure*    pNext;
+} VkBaseInStructure;
+
+typedef VkResult (VKAPI_PTR *PFN_vkCreateInstance)(const VkInstanceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkInstance* pInstance);
+typedef void (VKAPI_PTR *PFN_vkDestroyInstance)(VkInstance instance, const VkAllocationCallbacks* pAllocator);
+typedef VkResult (VKAPI_PTR *PFN_vkEnumeratePhysicalDevices)(VkInstance instance, uint32_t* pPhysicalDeviceCount, VkPhysicalDevice* pPhysicalDevices);
+typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceFeatures)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures* pFeatures);
+typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceFormatProperties)(VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties* pFormatProperties);
+typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceImageFormatProperties)(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkImageFormatProperties* pImageFormatProperties);
+typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceProperties)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties* pProperties);
+typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceQueueFamilyProperties)(VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties* pQueueFamilyProperties);
+typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceMemoryProperties)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties* pMemoryProperties);
+typedef PFN_vkVoidFunction (VKAPI_PTR *PFN_vkGetInstanceProcAddr)(VkInstance instance, const char* pName);
+typedef PFN_vkVoidFunction (VKAPI_PTR *PFN_vkGetDeviceProcAddr)(VkDevice device, const char* pName);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateDevice)(VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDevice* pDevice);
+typedef void (VKAPI_PTR *PFN_vkDestroyDevice)(VkDevice device, const VkAllocationCallbacks* pAllocator);
+typedef VkResult (VKAPI_PTR *PFN_vkEnumerateInstanceExtensionProperties)(const char* pLayerName, uint32_t* pPropertyCount, VkExtensionProperties* pProperties);
+typedef VkResult (VKAPI_PTR *PFN_vkEnumerateDeviceExtensionProperties)(VkPhysicalDevice physicalDevice, const char* pLayerName, uint32_t* pPropertyCount, VkExtensionProperties* pProperties);
+typedef VkResult (VKAPI_PTR *PFN_vkEnumerateInstanceLayerProperties)(uint32_t* pPropertyCount, VkLayerProperties* pProperties);
+typedef VkResult (VKAPI_PTR *PFN_vkEnumerateDeviceLayerProperties)(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkLayerProperties* pProperties);
+typedef void (VKAPI_PTR *PFN_vkGetDeviceQueue)(VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex, VkQueue* pQueue);
+typedef VkResult (VKAPI_PTR *PFN_vkQueueSubmit)(VkQueue queue, uint32_t submitCount, const VkSubmitInfo* pSubmits, VkFence fence);
+typedef VkResult (VKAPI_PTR *PFN_vkQueueWaitIdle)(VkQueue queue);
+typedef VkResult (VKAPI_PTR *PFN_vkDeviceWaitIdle)(VkDevice device);
+typedef VkResult (VKAPI_PTR *PFN_vkAllocateMemory)(VkDevice device, const VkMemoryAllocateInfo* pAllocateInfo, const VkAllocationCallbacks* pAllocator, VkDeviceMemory* pMemory);
+typedef void (VKAPI_PTR *PFN_vkFreeMemory)(VkDevice device, VkDeviceMemory memory, const VkAllocationCallbacks* pAllocator);
+typedef VkResult (VKAPI_PTR *PFN_vkMapMemory)(VkDevice device, VkDeviceMemory memory, VkDeviceSize offset, VkDeviceSize size, VkMemoryMapFlags flags, void** ppData);
+typedef void (VKAPI_PTR *PFN_vkUnmapMemory)(VkDevice device, VkDeviceMemory memory);
+typedef VkResult (VKAPI_PTR *PFN_vkFlushMappedMemoryRanges)(VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange* pMemoryRanges);
+typedef VkResult (VKAPI_PTR *PFN_vkInvalidateMappedMemoryRanges)(VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange* pMemoryRanges);
+typedef void (VKAPI_PTR *PFN_vkGetDeviceMemoryCommitment)(VkDevice device, VkDeviceMemory memory, VkDeviceSize* pCommittedMemoryInBytes);
+typedef VkResult (VKAPI_PTR *PFN_vkBindBufferMemory)(VkDevice device, VkBuffer buffer, VkDeviceMemory memory, VkDeviceSize memoryOffset);
+typedef VkResult (VKAPI_PTR *PFN_vkBindImageMemory)(VkDevice device, VkImage image, VkDeviceMemory memory, VkDeviceSize memoryOffset);
+typedef void (VKAPI_PTR *PFN_vkGetBufferMemoryRequirements)(VkDevice device, VkBuffer buffer, VkMemoryRequirements* pMemoryRequirements);
+typedef void (VKAPI_PTR *PFN_vkGetImageMemoryRequirements)(VkDevice device, VkImage image, VkMemoryRequirements* pMemoryRequirements);
+typedef void (VKAPI_PTR *PFN_vkGetImageSparseMemoryRequirements)(VkDevice device, VkImage image, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements* pSparseMemoryRequirements);
+typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceSparseImageFormatProperties)(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkSampleCountFlagBits samples, VkImageUsageFlags usage, VkImageTiling tiling, uint32_t* pPropertyCount, VkSparseImageFormatProperties* pProperties);
+typedef VkResult (VKAPI_PTR *PFN_vkQueueBindSparse)(VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo* pBindInfo, VkFence fence);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateFence)(VkDevice device, const VkFenceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence);
+typedef void (VKAPI_PTR *PFN_vkDestroyFence)(VkDevice device, VkFence fence, const VkAllocationCallbacks* pAllocator);
+typedef VkResult (VKAPI_PTR *PFN_vkResetFences)(VkDevice device, uint32_t fenceCount, const VkFence* pFences);
+typedef VkResult (VKAPI_PTR *PFN_vkGetFenceStatus)(VkDevice device, VkFence fence);
+typedef VkResult (VKAPI_PTR *PFN_vkWaitForFences)(VkDevice device, uint32_t fenceCount, const VkFence* pFences, VkBool32 waitAll, uint64_t timeout);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateSemaphore)(VkDevice device, const VkSemaphoreCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSemaphore* pSemaphore);
+typedef void (VKAPI_PTR *PFN_vkDestroySemaphore)(VkDevice device, VkSemaphore semaphore, const VkAllocationCallbacks* pAllocator);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateEvent)(VkDevice device, const VkEventCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkEvent* pEvent);
+typedef void (VKAPI_PTR *PFN_vkDestroyEvent)(VkDevice device, VkEvent event, const VkAllocationCallbacks* pAllocator);
+typedef VkResult (VKAPI_PTR *PFN_vkGetEventStatus)(VkDevice device, VkEvent event);
+typedef VkResult (VKAPI_PTR *PFN_vkSetEvent)(VkDevice device, VkEvent event);
+typedef VkResult (VKAPI_PTR *PFN_vkResetEvent)(VkDevice device, VkEvent event);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateQueryPool)(VkDevice device, const VkQueryPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkQueryPool* pQueryPool);
+typedef void (VKAPI_PTR *PFN_vkDestroyQueryPool)(VkDevice device, VkQueryPool queryPool, const VkAllocationCallbacks* pAllocator);
+typedef VkResult (VKAPI_PTR *PFN_vkGetQueryPoolResults)(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, void* pData, VkDeviceSize stride, VkQueryResultFlags flags);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateBuffer)(VkDevice device, const VkBufferCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkBuffer* pBuffer);
+typedef void (VKAPI_PTR *PFN_vkDestroyBuffer)(VkDevice device, VkBuffer buffer, const VkAllocationCallbacks* pAllocator);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateBufferView)(VkDevice device, const VkBufferViewCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkBufferView* pView);
+typedef void (VKAPI_PTR *PFN_vkDestroyBufferView)(VkDevice device, VkBufferView bufferView, const VkAllocationCallbacks* pAllocator);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateImage)(VkDevice device, const VkImageCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkImage* pImage);
+typedef void (VKAPI_PTR *PFN_vkDestroyImage)(VkDevice device, VkImage image, const VkAllocationCallbacks* pAllocator);
+typedef void (VKAPI_PTR *PFN_vkGetImageSubresourceLayout)(VkDevice device, VkImage image, const VkImageSubresource* pSubresource, VkSubresourceLayout* pLayout);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateImageView)(VkDevice device, const VkImageViewCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkImageView* pView);
+typedef void (VKAPI_PTR *PFN_vkDestroyImageView)(VkDevice device, VkImageView imageView, const VkAllocationCallbacks* pAllocator);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateShaderModule)(VkDevice device, const VkShaderModuleCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkShaderModule* pShaderModule);
+typedef void (VKAPI_PTR *PFN_vkDestroyShaderModule)(VkDevice device, VkShaderModule shaderModule, const VkAllocationCallbacks* pAllocator);
+typedef VkResult (VKAPI_PTR *PFN_vkCreatePipelineCache)(VkDevice device, const VkPipelineCacheCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPipelineCache* pPipelineCache);
+typedef void (VKAPI_PTR *PFN_vkDestroyPipelineCache)(VkDevice device, VkPipelineCache pipelineCache, const VkAllocationCallbacks* pAllocator);
+typedef VkResult (VKAPI_PTR *PFN_vkGetPipelineCacheData)(VkDevice device, VkPipelineCache pipelineCache, size_t* pDataSize, void* pData);
+typedef VkResult (VKAPI_PTR *PFN_vkMergePipelineCaches)(VkDevice device, VkPipelineCache dstCache, uint32_t srcCacheCount, const VkPipelineCache* pSrcCaches);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateGraphicsPipelines)(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkGraphicsPipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateComputePipelines)(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkComputePipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines);
+typedef void (VKAPI_PTR *PFN_vkDestroyPipeline)(VkDevice device, VkPipeline pipeline, const VkAllocationCallbacks* pAllocator);
+typedef VkResult (VKAPI_PTR *PFN_vkCreatePipelineLayout)(VkDevice device, const VkPipelineLayoutCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPipelineLayout* pPipelineLayout);
+typedef void (VKAPI_PTR *PFN_vkDestroyPipelineLayout)(VkDevice device, VkPipelineLayout pipelineLayout, const VkAllocationCallbacks* pAllocator);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateSampler)(VkDevice device, const VkSamplerCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSampler* pSampler);
+typedef void (VKAPI_PTR *PFN_vkDestroySampler)(VkDevice device, VkSampler sampler, const VkAllocationCallbacks* pAllocator);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateDescriptorSetLayout)(VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorSetLayout* pSetLayout);
+typedef void (VKAPI_PTR *PFN_vkDestroyDescriptorSetLayout)(VkDevice device, VkDescriptorSetLayout descriptorSetLayout, const VkAllocationCallbacks* pAllocator);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateDescriptorPool)(VkDevice device, const VkDescriptorPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorPool* pDescriptorPool);
+typedef void (VKAPI_PTR *PFN_vkDestroyDescriptorPool)(VkDevice device, VkDescriptorPool descriptorPool, const VkAllocationCallbacks* pAllocator);
+typedef VkResult (VKAPI_PTR *PFN_vkResetDescriptorPool)(VkDevice device, VkDescriptorPool descriptorPool, VkDescriptorPoolResetFlags flags);
+typedef VkResult (VKAPI_PTR *PFN_vkAllocateDescriptorSets)(VkDevice device, const VkDescriptorSetAllocateInfo* pAllocateInfo, VkDescriptorSet* pDescriptorSets);
+typedef VkResult (VKAPI_PTR *PFN_vkFreeDescriptorSets)(VkDevice device, VkDescriptorPool descriptorPool, uint32_t descriptorSetCount, const VkDescriptorSet* pDescriptorSets);
+typedef void (VKAPI_PTR *PFN_vkUpdateDescriptorSets)(VkDevice device, uint32_t descriptorWriteCount, const VkWriteDescriptorSet* pDescriptorWrites, uint32_t descriptorCopyCount, const VkCopyDescriptorSet* pDescriptorCopies);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateFramebuffer)(VkDevice device, const VkFramebufferCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkFramebuffer* pFramebuffer);
+typedef void (VKAPI_PTR *PFN_vkDestroyFramebuffer)(VkDevice device, VkFramebuffer framebuffer, const VkAllocationCallbacks* pAllocator);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateRenderPass)(VkDevice device, const VkRenderPassCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass);
+typedef void (VKAPI_PTR *PFN_vkDestroyRenderPass)(VkDevice device, VkRenderPass renderPass, const VkAllocationCallbacks* pAllocator);
+typedef void (VKAPI_PTR *PFN_vkGetRenderAreaGranularity)(VkDevice device, VkRenderPass renderPass, VkExtent2D* pGranularity);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateCommandPool)(VkDevice device, const VkCommandPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkCommandPool* pCommandPool);
+typedef void (VKAPI_PTR *PFN_vkDestroyCommandPool)(VkDevice device, VkCommandPool commandPool, const VkAllocationCallbacks* pAllocator);
+typedef VkResult (VKAPI_PTR *PFN_vkResetCommandPool)(VkDevice device, VkCommandPool commandPool, VkCommandPoolResetFlags flags);
+typedef VkResult (VKAPI_PTR *PFN_vkAllocateCommandBuffers)(VkDevice device, const VkCommandBufferAllocateInfo* pAllocateInfo, VkCommandBuffer* pCommandBuffers);
+typedef void (VKAPI_PTR *PFN_vkFreeCommandBuffers)(VkDevice device, VkCommandPool commandPool, uint32_t commandBufferCount, const VkCommandBuffer* pCommandBuffers);
+typedef VkResult (VKAPI_PTR *PFN_vkBeginCommandBuffer)(VkCommandBuffer commandBuffer, const VkCommandBufferBeginInfo* pBeginInfo);
+typedef VkResult (VKAPI_PTR *PFN_vkEndCommandBuffer)(VkCommandBuffer commandBuffer);
+typedef VkResult (VKAPI_PTR *PFN_vkResetCommandBuffer)(VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags);
+typedef void (VKAPI_PTR *PFN_vkCmdBindPipeline)(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline);
+typedef void (VKAPI_PTR *PFN_vkCmdSetViewport)(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewport* pViewports);
+typedef void (VKAPI_PTR *PFN_vkCmdSetScissor)(VkCommandBuffer commandBuffer, uint32_t firstScissor, uint32_t scissorCount, const VkRect2D* pScissors);
+typedef void (VKAPI_PTR *PFN_vkCmdSetLineWidth)(VkCommandBuffer commandBuffer, float lineWidth);
+typedef void (VKAPI_PTR *PFN_vkCmdSetDepthBias)(VkCommandBuffer commandBuffer, float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor);
+typedef void (VKAPI_PTR *PFN_vkCmdSetBlendConstants)(VkCommandBuffer commandBuffer, const float blendConstants[4]);
+typedef void (VKAPI_PTR *PFN_vkCmdSetDepthBounds)(VkCommandBuffer commandBuffer, float minDepthBounds, float maxDepthBounds);
+typedef void (VKAPI_PTR *PFN_vkCmdSetStencilCompareMask)(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t compareMask);
+typedef void (VKAPI_PTR *PFN_vkCmdSetStencilWriteMask)(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t writeMask);
+typedef void (VKAPI_PTR *PFN_vkCmdSetStencilReference)(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t reference);
+typedef void (VKAPI_PTR *PFN_vkCmdBindDescriptorSets)(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t firstSet, uint32_t descriptorSetCount, const VkDescriptorSet* pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t* pDynamicOffsets);
+typedef void (VKAPI_PTR *PFN_vkCmdBindIndexBuffer)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkIndexType indexType);
+typedef void (VKAPI_PTR *PFN_vkCmdBindVertexBuffers)(VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets);
+typedef void (VKAPI_PTR *PFN_vkCmdDraw)(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance);
+typedef void (VKAPI_PTR *PFN_vkCmdDrawIndexed)(VkCommandBuffer commandBuffer, uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance);
+typedef void (VKAPI_PTR *PFN_vkCmdDrawIndirect)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride);
+typedef void (VKAPI_PTR *PFN_vkCmdDrawIndexedIndirect)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride);
+typedef void (VKAPI_PTR *PFN_vkCmdDispatch)(VkCommandBuffer commandBuffer, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ);
+typedef void (VKAPI_PTR *PFN_vkCmdDispatchIndirect)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset);
+typedef void (VKAPI_PTR *PFN_vkCmdCopyBuffer)(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer, uint32_t regionCount, const VkBufferCopy* pRegions);
+typedef void (VKAPI_PTR *PFN_vkCmdCopyImage)(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageCopy* pRegions);
+typedef void (VKAPI_PTR *PFN_vkCmdBlitImage)(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageBlit* pRegions, VkFilter filter);
+typedef void (VKAPI_PTR *PFN_vkCmdCopyBufferToImage)(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkBufferImageCopy* pRegions);
+typedef void (VKAPI_PTR *PFN_vkCmdCopyImageToBuffer)(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkBuffer dstBuffer, uint32_t regionCount, const VkBufferImageCopy* pRegions);
+typedef void (VKAPI_PTR *PFN_vkCmdUpdateBuffer)(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize dataSize, const void* pData);
+typedef void (VKAPI_PTR *PFN_vkCmdFillBuffer)(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize size, uint32_t data);
+typedef void (VKAPI_PTR *PFN_vkCmdClearColorImage)(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, const VkClearColorValue* pColor, uint32_t rangeCount, const VkImageSubresourceRange* pRanges);
+typedef void (VKAPI_PTR *PFN_vkCmdClearDepthStencilImage)(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, const VkClearDepthStencilValue* pDepthStencil, uint32_t rangeCount, const VkImageSubresourceRange* pRanges);
+typedef void (VKAPI_PTR *PFN_vkCmdClearAttachments)(VkCommandBuffer commandBuffer, uint32_t attachmentCount, const VkClearAttachment* pAttachments, uint32_t rectCount, const VkClearRect* pRects);
+typedef void (VKAPI_PTR *PFN_vkCmdResolveImage)(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageResolve* pRegions);
+typedef void (VKAPI_PTR *PFN_vkCmdSetEvent)(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask);
+typedef void (VKAPI_PTR *PFN_vkCmdResetEvent)(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask);
+typedef void (VKAPI_PTR *PFN_vkCmdWaitEvents)(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent* pEvents, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers);
+typedef void (VKAPI_PTR *PFN_vkCmdPipelineBarrier)(VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers);
+typedef void (VKAPI_PTR *PFN_vkCmdBeginQuery)(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, VkQueryControlFlags flags);
+typedef void (VKAPI_PTR *PFN_vkCmdEndQuery)(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query);
+typedef void (VKAPI_PTR *PFN_vkCmdResetQueryPool)(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount);
+typedef void (VKAPI_PTR *PFN_vkCmdWriteTimestamp)(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage, VkQueryPool queryPool, uint32_t query);
+typedef void (VKAPI_PTR *PFN_vkCmdCopyQueryPoolResults)(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize stride, VkQueryResultFlags flags);
+typedef void (VKAPI_PTR *PFN_vkCmdPushConstants)(VkCommandBuffer commandBuffer, VkPipelineLayout layout, VkShaderStageFlags stageFlags, uint32_t offset, uint32_t size, const void* pValues);
+typedef void (VKAPI_PTR *PFN_vkCmdBeginRenderPass)(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo* pRenderPassBegin, VkSubpassContents contents);
+typedef void (VKAPI_PTR *PFN_vkCmdNextSubpass)(VkCommandBuffer commandBuffer, VkSubpassContents contents);
+typedef void (VKAPI_PTR *PFN_vkCmdEndRenderPass)(VkCommandBuffer commandBuffer);
+typedef void (VKAPI_PTR *PFN_vkCmdExecuteCommands)(VkCommandBuffer commandBuffer, uint32_t commandBufferCount, const VkCommandBuffer* pCommandBuffers);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateInstance(
+    const VkInstanceCreateInfo*                 pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkInstance*                                 pInstance);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyInstance(
+    VkInstance                                  instance,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkEnumeratePhysicalDevices(
+    VkInstance                                  instance,
+    uint32_t*                                   pPhysicalDeviceCount,
+    VkPhysicalDevice*                           pPhysicalDevices);
+
+VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFeatures(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceFeatures*                   pFeatures);
+
+VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFormatProperties(
+    VkPhysicalDevice                            physicalDevice,
+    VkFormat                                    format,
+    VkFormatProperties*                         pFormatProperties);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceImageFormatProperties(
+    VkPhysicalDevice                            physicalDevice,
+    VkFormat                                    format,
+    VkImageType                                 type,
+    VkImageTiling                               tiling,
+    VkImageUsageFlags                           usage,
+    VkImageCreateFlags                          flags,
+    VkImageFormatProperties*                    pImageFormatProperties);
+
+VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceProperties(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceProperties*                 pProperties);
+
+VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceQueueFamilyProperties(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pQueueFamilyPropertyCount,
+    VkQueueFamilyProperties*                    pQueueFamilyProperties);
+
+VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceMemoryProperties(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceMemoryProperties*           pMemoryProperties);
+
+VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vkGetInstanceProcAddr(
+    VkInstance                                  instance,
+    const char*                                 pName);
+
+VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vkGetDeviceProcAddr(
+    VkDevice                                    device,
+    const char*                                 pName);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateDevice(
+    VkPhysicalDevice                            physicalDevice,
+    const VkDeviceCreateInfo*                   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDevice*                                   pDevice);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyDevice(
+    VkDevice                                    device,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceExtensionProperties(
+    const char*                                 pLayerName,
+    uint32_t*                                   pPropertyCount,
+    VkExtensionProperties*                      pProperties);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateDeviceExtensionProperties(
+    VkPhysicalDevice                            physicalDevice,
+    const char*                                 pLayerName,
+    uint32_t*                                   pPropertyCount,
+    VkExtensionProperties*                      pProperties);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceLayerProperties(
+    uint32_t*                                   pPropertyCount,
+    VkLayerProperties*                          pProperties);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateDeviceLayerProperties(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pPropertyCount,
+    VkLayerProperties*                          pProperties);
+
+VKAPI_ATTR void VKAPI_CALL vkGetDeviceQueue(
+    VkDevice                                    device,
+    uint32_t                                    queueFamilyIndex,
+    uint32_t                                    queueIndex,
+    VkQueue*                                    pQueue);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkQueueSubmit(
+    VkQueue                                     queue,
+    uint32_t                                    submitCount,
+    const VkSubmitInfo*                         pSubmits,
+    VkFence                                     fence);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkQueueWaitIdle(
+    VkQueue                                     queue);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkDeviceWaitIdle(
+    VkDevice                                    device);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkAllocateMemory(
+    VkDevice                                    device,
+    const VkMemoryAllocateInfo*                 pAllocateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDeviceMemory*                             pMemory);
+
+VKAPI_ATTR void VKAPI_CALL vkFreeMemory(
+    VkDevice                                    device,
+    VkDeviceMemory                              memory,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkMapMemory(
+    VkDevice                                    device,
+    VkDeviceMemory                              memory,
+    VkDeviceSize                                offset,
+    VkDeviceSize                                size,
+    VkMemoryMapFlags                            flags,
+    void**                                      ppData);
+
+VKAPI_ATTR void VKAPI_CALL vkUnmapMemory(
+    VkDevice                                    device,
+    VkDeviceMemory                              memory);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkFlushMappedMemoryRanges(
+    VkDevice                                    device,
+    uint32_t                                    memoryRangeCount,
+    const VkMappedMemoryRange*                  pMemoryRanges);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkInvalidateMappedMemoryRanges(
+    VkDevice                                    device,
+    uint32_t                                    memoryRangeCount,
+    const VkMappedMemoryRange*                  pMemoryRanges);
+
+VKAPI_ATTR void VKAPI_CALL vkGetDeviceMemoryCommitment(
+    VkDevice                                    device,
+    VkDeviceMemory                              memory,
+    VkDeviceSize*                               pCommittedMemoryInBytes);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkBindBufferMemory(
+    VkDevice                                    device,
+    VkBuffer                                    buffer,
+    VkDeviceMemory                              memory,
+    VkDeviceSize                                memoryOffset);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkBindImageMemory(
+    VkDevice                                    device,
+    VkImage                                     image,
+    VkDeviceMemory                              memory,
+    VkDeviceSize                                memoryOffset);
+
+VKAPI_ATTR void VKAPI_CALL vkGetBufferMemoryRequirements(
+    VkDevice                                    device,
+    VkBuffer                                    buffer,
+    VkMemoryRequirements*                       pMemoryRequirements);
+
+VKAPI_ATTR void VKAPI_CALL vkGetImageMemoryRequirements(
+    VkDevice                                    device,
+    VkImage                                     image,
+    VkMemoryRequirements*                       pMemoryRequirements);
+
+VKAPI_ATTR void VKAPI_CALL vkGetImageSparseMemoryRequirements(
+    VkDevice                                    device,
+    VkImage                                     image,
+    uint32_t*                                   pSparseMemoryRequirementCount,
+    VkSparseImageMemoryRequirements*            pSparseMemoryRequirements);
+
+VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceSparseImageFormatProperties(
+    VkPhysicalDevice                            physicalDevice,
+    VkFormat                                    format,
+    VkImageType                                 type,
+    VkSampleCountFlagBits                       samples,
+    VkImageUsageFlags                           usage,
+    VkImageTiling                               tiling,
+    uint32_t*                                   pPropertyCount,
+    VkSparseImageFormatProperties*              pProperties);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkQueueBindSparse(
+    VkQueue                                     queue,
+    uint32_t                                    bindInfoCount,
+    const VkBindSparseInfo*                     pBindInfo,
+    VkFence                                     fence);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateFence(
+    VkDevice                                    device,
+    const VkFenceCreateInfo*                    pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkFence*                                    pFence);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyFence(
+    VkDevice                                    device,
+    VkFence                                     fence,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkResetFences(
+    VkDevice                                    device,
+    uint32_t                                    fenceCount,
+    const VkFence*                              pFences);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetFenceStatus(
+    VkDevice                                    device,
+    VkFence                                     fence);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkWaitForFences(
+    VkDevice                                    device,
+    uint32_t                                    fenceCount,
+    const VkFence*                              pFences,
+    VkBool32                                    waitAll,
+    uint64_t                                    timeout);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateSemaphore(
+    VkDevice                                    device,
+    const VkSemaphoreCreateInfo*                pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSemaphore*                                pSemaphore);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroySemaphore(
+    VkDevice                                    device,
+    VkSemaphore                                 semaphore,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateEvent(
+    VkDevice                                    device,
+    const VkEventCreateInfo*                    pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkEvent*                                    pEvent);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyEvent(
+    VkDevice                                    device,
+    VkEvent                                     event,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetEventStatus(
+    VkDevice                                    device,
+    VkEvent                                     event);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkSetEvent(
+    VkDevice                                    device,
+    VkEvent                                     event);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkResetEvent(
+    VkDevice                                    device,
+    VkEvent                                     event);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateQueryPool(
+    VkDevice                                    device,
+    const VkQueryPoolCreateInfo*                pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkQueryPool*                                pQueryPool);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyQueryPool(
+    VkDevice                                    device,
+    VkQueryPool                                 queryPool,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetQueryPoolResults(
+    VkDevice                                    device,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    firstQuery,
+    uint32_t                                    queryCount,
+    size_t                                      dataSize,
+    void*                                       pData,
+    VkDeviceSize                                stride,
+    VkQueryResultFlags                          flags);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateBuffer(
+    VkDevice                                    device,
+    const VkBufferCreateInfo*                   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkBuffer*                                   pBuffer);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyBuffer(
+    VkDevice                                    device,
+    VkBuffer                                    buffer,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateBufferView(
+    VkDevice                                    device,
+    const VkBufferViewCreateInfo*               pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkBufferView*                               pView);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyBufferView(
+    VkDevice                                    device,
+    VkBufferView                                bufferView,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateImage(
+    VkDevice                                    device,
+    const VkImageCreateInfo*                    pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkImage*                                    pImage);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyImage(
+    VkDevice                                    device,
+    VkImage                                     image,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR void VKAPI_CALL vkGetImageSubresourceLayout(
+    VkDevice                                    device,
+    VkImage                                     image,
+    const VkImageSubresource*                   pSubresource,
+    VkSubresourceLayout*                        pLayout);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateImageView(
+    VkDevice                                    device,
+    const VkImageViewCreateInfo*                pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkImageView*                                pView);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyImageView(
+    VkDevice                                    device,
+    VkImageView                                 imageView,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateShaderModule(
+    VkDevice                                    device,
+    const VkShaderModuleCreateInfo*             pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkShaderModule*                             pShaderModule);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyShaderModule(
+    VkDevice                                    device,
+    VkShaderModule                              shaderModule,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreatePipelineCache(
+    VkDevice                                    device,
+    const VkPipelineCacheCreateInfo*            pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkPipelineCache*                            pPipelineCache);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyPipelineCache(
+    VkDevice                                    device,
+    VkPipelineCache                             pipelineCache,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPipelineCacheData(
+    VkDevice                                    device,
+    VkPipelineCache                             pipelineCache,
+    size_t*                                     pDataSize,
+    void*                                       pData);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkMergePipelineCaches(
+    VkDevice                                    device,
+    VkPipelineCache                             dstCache,
+    uint32_t                                    srcCacheCount,
+    const VkPipelineCache*                      pSrcCaches);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateGraphicsPipelines(
+    VkDevice                                    device,
+    VkPipelineCache                             pipelineCache,
+    uint32_t                                    createInfoCount,
+    const VkGraphicsPipelineCreateInfo*         pCreateInfos,
+    const VkAllocationCallbacks*                pAllocator,
+    VkPipeline*                                 pPipelines);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateComputePipelines(
+    VkDevice                                    device,
+    VkPipelineCache                             pipelineCache,
+    uint32_t                                    createInfoCount,
+    const VkComputePipelineCreateInfo*          pCreateInfos,
+    const VkAllocationCallbacks*                pAllocator,
+    VkPipeline*                                 pPipelines);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyPipeline(
+    VkDevice                                    device,
+    VkPipeline                                  pipeline,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreatePipelineLayout(
+    VkDevice                                    device,
+    const VkPipelineLayoutCreateInfo*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkPipelineLayout*                           pPipelineLayout);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyPipelineLayout(
+    VkDevice                                    device,
+    VkPipelineLayout                            pipelineLayout,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateSampler(
+    VkDevice                                    device,
+    const VkSamplerCreateInfo*                  pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSampler*                                  pSampler);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroySampler(
+    VkDevice                                    device,
+    VkSampler                                   sampler,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateDescriptorSetLayout(
+    VkDevice                                    device,
+    const VkDescriptorSetLayoutCreateInfo*      pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDescriptorSetLayout*                      pSetLayout);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyDescriptorSetLayout(
+    VkDevice                                    device,
+    VkDescriptorSetLayout                       descriptorSetLayout,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateDescriptorPool(
+    VkDevice                                    device,
+    const VkDescriptorPoolCreateInfo*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDescriptorPool*                           pDescriptorPool);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyDescriptorPool(
+    VkDevice                                    device,
+    VkDescriptorPool                            descriptorPool,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkResetDescriptorPool(
+    VkDevice                                    device,
+    VkDescriptorPool                            descriptorPool,
+    VkDescriptorPoolResetFlags                  flags);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkAllocateDescriptorSets(
+    VkDevice                                    device,
+    const VkDescriptorSetAllocateInfo*          pAllocateInfo,
+    VkDescriptorSet*                            pDescriptorSets);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkFreeDescriptorSets(
+    VkDevice                                    device,
+    VkDescriptorPool                            descriptorPool,
+    uint32_t                                    descriptorSetCount,
+    const VkDescriptorSet*                      pDescriptorSets);
+
+VKAPI_ATTR void VKAPI_CALL vkUpdateDescriptorSets(
+    VkDevice                                    device,
+    uint32_t                                    descriptorWriteCount,
+    const VkWriteDescriptorSet*                 pDescriptorWrites,
+    uint32_t                                    descriptorCopyCount,
+    const VkCopyDescriptorSet*                  pDescriptorCopies);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateFramebuffer(
+    VkDevice                                    device,
+    const VkFramebufferCreateInfo*              pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkFramebuffer*                              pFramebuffer);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyFramebuffer(
+    VkDevice                                    device,
+    VkFramebuffer                               framebuffer,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateRenderPass(
+    VkDevice                                    device,
+    const VkRenderPassCreateInfo*               pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkRenderPass*                               pRenderPass);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyRenderPass(
+    VkDevice                                    device,
+    VkRenderPass                                renderPass,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR void VKAPI_CALL vkGetRenderAreaGranularity(
+    VkDevice                                    device,
+    VkRenderPass                                renderPass,
+    VkExtent2D*                                 pGranularity);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateCommandPool(
+    VkDevice                                    device,
+    const VkCommandPoolCreateInfo*              pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkCommandPool*                              pCommandPool);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyCommandPool(
+    VkDevice                                    device,
+    VkCommandPool                               commandPool,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkResetCommandPool(
+    VkDevice                                    device,
+    VkCommandPool                               commandPool,
+    VkCommandPoolResetFlags                     flags);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkAllocateCommandBuffers(
+    VkDevice                                    device,
+    const VkCommandBufferAllocateInfo*          pAllocateInfo,
+    VkCommandBuffer*                            pCommandBuffers);
+
+VKAPI_ATTR void VKAPI_CALL vkFreeCommandBuffers(
+    VkDevice                                    device,
+    VkCommandPool                               commandPool,
+    uint32_t                                    commandBufferCount,
+    const VkCommandBuffer*                      pCommandBuffers);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkBeginCommandBuffer(
+    VkCommandBuffer                             commandBuffer,
+    const VkCommandBufferBeginInfo*             pBeginInfo);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkEndCommandBuffer(
+    VkCommandBuffer                             commandBuffer);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkResetCommandBuffer(
+    VkCommandBuffer                             commandBuffer,
+    VkCommandBufferResetFlags                   flags);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdBindPipeline(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineBindPoint                         pipelineBindPoint,
+    VkPipeline                                  pipeline);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetViewport(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstViewport,
+    uint32_t                                    viewportCount,
+    const VkViewport*                           pViewports);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetScissor(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstScissor,
+    uint32_t                                    scissorCount,
+    const VkRect2D*                             pScissors);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetLineWidth(
+    VkCommandBuffer                             commandBuffer,
+    float                                       lineWidth);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthBias(
+    VkCommandBuffer                             commandBuffer,
+    float                                       depthBiasConstantFactor,
+    float                                       depthBiasClamp,
+    float                                       depthBiasSlopeFactor);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetBlendConstants(
+    VkCommandBuffer                             commandBuffer,
+    const float                                 blendConstants[4]);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthBounds(
+    VkCommandBuffer                             commandBuffer,
+    float                                       minDepthBounds,
+    float                                       maxDepthBounds);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilCompareMask(
+    VkCommandBuffer                             commandBuffer,
+    VkStencilFaceFlags                          faceMask,
+    uint32_t                                    compareMask);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilWriteMask(
+    VkCommandBuffer                             commandBuffer,
+    VkStencilFaceFlags                          faceMask,
+    uint32_t                                    writeMask);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilReference(
+    VkCommandBuffer                             commandBuffer,
+    VkStencilFaceFlags                          faceMask,
+    uint32_t                                    reference);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdBindDescriptorSets(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineBindPoint                         pipelineBindPoint,
+    VkPipelineLayout                            layout,
+    uint32_t                                    firstSet,
+    uint32_t                                    descriptorSetCount,
+    const VkDescriptorSet*                      pDescriptorSets,
+    uint32_t                                    dynamicOffsetCount,
+    const uint32_t*                             pDynamicOffsets);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdBindIndexBuffer(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkIndexType                                 indexType);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdBindVertexBuffers(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstBinding,
+    uint32_t                                    bindingCount,
+    const VkBuffer*                             pBuffers,
+    const VkDeviceSize*                         pOffsets);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdDraw(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    vertexCount,
+    uint32_t                                    instanceCount,
+    uint32_t                                    firstVertex,
+    uint32_t                                    firstInstance);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndexed(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    indexCount,
+    uint32_t                                    instanceCount,
+    uint32_t                                    firstIndex,
+    int32_t                                     vertexOffset,
+    uint32_t                                    firstInstance);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndirect(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    uint32_t                                    drawCount,
+    uint32_t                                    stride);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndexedIndirect(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    uint32_t                                    drawCount,
+    uint32_t                                    stride);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdDispatch(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    groupCountX,
+    uint32_t                                    groupCountY,
+    uint32_t                                    groupCountZ);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdDispatchIndirect(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdCopyBuffer(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    srcBuffer,
+    VkBuffer                                    dstBuffer,
+    uint32_t                                    regionCount,
+    const VkBufferCopy*                         pRegions);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdCopyImage(
+    VkCommandBuffer                             commandBuffer,
+    VkImage                                     srcImage,
+    VkImageLayout                               srcImageLayout,
+    VkImage                                     dstImage,
+    VkImageLayout                               dstImageLayout,
+    uint32_t                                    regionCount,
+    const VkImageCopy*                          pRegions);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdBlitImage(
+    VkCommandBuffer                             commandBuffer,
+    VkImage                                     srcImage,
+    VkImageLayout                               srcImageLayout,
+    VkImage                                     dstImage,
+    VkImageLayout                               dstImageLayout,
+    uint32_t                                    regionCount,
+    const VkImageBlit*                          pRegions,
+    VkFilter                                    filter);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdCopyBufferToImage(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    srcBuffer,
+    VkImage                                     dstImage,
+    VkImageLayout                               dstImageLayout,
+    uint32_t                                    regionCount,
+    const VkBufferImageCopy*                    pRegions);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdCopyImageToBuffer(
+    VkCommandBuffer                             commandBuffer,
+    VkImage                                     srcImage,
+    VkImageLayout                               srcImageLayout,
+    VkBuffer                                    dstBuffer,
+    uint32_t                                    regionCount,
+    const VkBufferImageCopy*                    pRegions);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdUpdateBuffer(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    dstBuffer,
+    VkDeviceSize                                dstOffset,
+    VkDeviceSize                                dataSize,
+    const void*                                 pData);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdFillBuffer(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    dstBuffer,
+    VkDeviceSize                                dstOffset,
+    VkDeviceSize                                size,
+    uint32_t                                    data);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdClearColorImage(
+    VkCommandBuffer                             commandBuffer,
+    VkImage                                     image,
+    VkImageLayout                               imageLayout,
+    const VkClearColorValue*                    pColor,
+    uint32_t                                    rangeCount,
+    const VkImageSubresourceRange*              pRanges);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdClearDepthStencilImage(
+    VkCommandBuffer                             commandBuffer,
+    VkImage                                     image,
+    VkImageLayout                               imageLayout,
+    const VkClearDepthStencilValue*             pDepthStencil,
+    uint32_t                                    rangeCount,
+    const VkImageSubresourceRange*              pRanges);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdClearAttachments(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    attachmentCount,
+    const VkClearAttachment*                    pAttachments,
+    uint32_t                                    rectCount,
+    const VkClearRect*                          pRects);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdResolveImage(
+    VkCommandBuffer                             commandBuffer,
+    VkImage                                     srcImage,
+    VkImageLayout                               srcImageLayout,
+    VkImage                                     dstImage,
+    VkImageLayout                               dstImageLayout,
+    uint32_t                                    regionCount,
+    const VkImageResolve*                       pRegions);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetEvent(
+    VkCommandBuffer                             commandBuffer,
+    VkEvent                                     event,
+    VkPipelineStageFlags                        stageMask);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdResetEvent(
+    VkCommandBuffer                             commandBuffer,
+    VkEvent                                     event,
+    VkPipelineStageFlags                        stageMask);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdWaitEvents(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    eventCount,
+    const VkEvent*                              pEvents,
+    VkPipelineStageFlags                        srcStageMask,
+    VkPipelineStageFlags                        dstStageMask,
+    uint32_t                                    memoryBarrierCount,
+    const VkMemoryBarrier*                      pMemoryBarriers,
+    uint32_t                                    bufferMemoryBarrierCount,
+    const VkBufferMemoryBarrier*                pBufferMemoryBarriers,
+    uint32_t                                    imageMemoryBarrierCount,
+    const VkImageMemoryBarrier*                 pImageMemoryBarriers);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdPipelineBarrier(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineStageFlags                        srcStageMask,
+    VkPipelineStageFlags                        dstStageMask,
+    VkDependencyFlags                           dependencyFlags,
+    uint32_t                                    memoryBarrierCount,
+    const VkMemoryBarrier*                      pMemoryBarriers,
+    uint32_t                                    bufferMemoryBarrierCount,
+    const VkBufferMemoryBarrier*                pBufferMemoryBarriers,
+    uint32_t                                    imageMemoryBarrierCount,
+    const VkImageMemoryBarrier*                 pImageMemoryBarriers);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdBeginQuery(
+    VkCommandBuffer                             commandBuffer,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    query,
+    VkQueryControlFlags                         flags);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdEndQuery(
+    VkCommandBuffer                             commandBuffer,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    query);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdResetQueryPool(
+    VkCommandBuffer                             commandBuffer,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    firstQuery,
+    uint32_t                                    queryCount);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdWriteTimestamp(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineStageFlagBits                     pipelineStage,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    query);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdCopyQueryPoolResults(
+    VkCommandBuffer                             commandBuffer,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    firstQuery,
+    uint32_t                                    queryCount,
+    VkBuffer                                    dstBuffer,
+    VkDeviceSize                                dstOffset,
+    VkDeviceSize                                stride,
+    VkQueryResultFlags                          flags);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdPushConstants(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineLayout                            layout,
+    VkShaderStageFlags                          stageFlags,
+    uint32_t                                    offset,
+    uint32_t                                    size,
+    const void*                                 pValues);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdBeginRenderPass(
+    VkCommandBuffer                             commandBuffer,
+    const VkRenderPassBeginInfo*                pRenderPassBegin,
+    VkSubpassContents                           contents);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdNextSubpass(
+    VkCommandBuffer                             commandBuffer,
+    VkSubpassContents                           contents);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdEndRenderPass(
+    VkCommandBuffer                             commandBuffer);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdExecuteCommands(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    commandBufferCount,
+    const VkCommandBuffer*                      pCommandBuffers);
+#endif
+
+
+#define VK_VERSION_1_1 1
+// Vulkan 1.1 version number
+#define VK_API_VERSION_1_1 VK_MAKE_VERSION(1, 1, 0)// Patch version should always be set to 0
+
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkSamplerYcbcrConversion)
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDescriptorUpdateTemplate)
+#define VK_MAX_DEVICE_GROUP_SIZE          32
+#define VK_LUID_SIZE                      8
+#define VK_QUEUE_FAMILY_EXTERNAL          (~0U-1)
+
+typedef enum VkPointClippingBehavior {
+    VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES = 0,
+    VK_POINT_CLIPPING_BEHAVIOR_USER_CLIP_PLANES_ONLY = 1,
+    VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES_KHR = VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES,
+    VK_POINT_CLIPPING_BEHAVIOR_USER_CLIP_PLANES_ONLY_KHR = VK_POINT_CLIPPING_BEHAVIOR_USER_CLIP_PLANES_ONLY,
+    VK_POINT_CLIPPING_BEHAVIOR_BEGIN_RANGE = VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES,
+    VK_POINT_CLIPPING_BEHAVIOR_END_RANGE = VK_POINT_CLIPPING_BEHAVIOR_USER_CLIP_PLANES_ONLY,
+    VK_POINT_CLIPPING_BEHAVIOR_RANGE_SIZE = (VK_POINT_CLIPPING_BEHAVIOR_USER_CLIP_PLANES_ONLY - VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES + 1),
+    VK_POINT_CLIPPING_BEHAVIOR_MAX_ENUM = 0x7FFFFFFF
+} VkPointClippingBehavior;
+
+typedef enum VkTessellationDomainOrigin {
+    VK_TESSELLATION_DOMAIN_ORIGIN_UPPER_LEFT = 0,
+    VK_TESSELLATION_DOMAIN_ORIGIN_LOWER_LEFT = 1,
+    VK_TESSELLATION_DOMAIN_ORIGIN_UPPER_LEFT_KHR = VK_TESSELLATION_DOMAIN_ORIGIN_UPPER_LEFT,
+    VK_TESSELLATION_DOMAIN_ORIGIN_LOWER_LEFT_KHR = VK_TESSELLATION_DOMAIN_ORIGIN_LOWER_LEFT,
+    VK_TESSELLATION_DOMAIN_ORIGIN_BEGIN_RANGE = VK_TESSELLATION_DOMAIN_ORIGIN_UPPER_LEFT,
+    VK_TESSELLATION_DOMAIN_ORIGIN_END_RANGE = VK_TESSELLATION_DOMAIN_ORIGIN_LOWER_LEFT,
+    VK_TESSELLATION_DOMAIN_ORIGIN_RANGE_SIZE = (VK_TESSELLATION_DOMAIN_ORIGIN_LOWER_LEFT - VK_TESSELLATION_DOMAIN_ORIGIN_UPPER_LEFT + 1),
+    VK_TESSELLATION_DOMAIN_ORIGIN_MAX_ENUM = 0x7FFFFFFF
+} VkTessellationDomainOrigin;
+
+typedef enum VkSamplerYcbcrModelConversion {
+    VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY = 0,
+    VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_IDENTITY = 1,
+    VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709 = 2,
+    VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_601 = 3,
+    VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020 = 4,
+    VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY_KHR = VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY,
+    VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_IDENTITY_KHR = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_IDENTITY,
+    VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709_KHR = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709,
+    VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_601_KHR = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_601,
+    VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020_KHR = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020,
+    VK_SAMPLER_YCBCR_MODEL_CONVERSION_BEGIN_RANGE = VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY,
+    VK_SAMPLER_YCBCR_MODEL_CONVERSION_END_RANGE = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020,
+    VK_SAMPLER_YCBCR_MODEL_CONVERSION_RANGE_SIZE = (VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020 - VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY + 1),
+    VK_SAMPLER_YCBCR_MODEL_CONVERSION_MAX_ENUM = 0x7FFFFFFF
+} VkSamplerYcbcrModelConversion;
+
+typedef enum VkSamplerYcbcrRange {
+    VK_SAMPLER_YCBCR_RANGE_ITU_FULL = 0,
+    VK_SAMPLER_YCBCR_RANGE_ITU_NARROW = 1,
+    VK_SAMPLER_YCBCR_RANGE_ITU_FULL_KHR = VK_SAMPLER_YCBCR_RANGE_ITU_FULL,
+    VK_SAMPLER_YCBCR_RANGE_ITU_NARROW_KHR = VK_SAMPLER_YCBCR_RANGE_ITU_NARROW,
+    VK_SAMPLER_YCBCR_RANGE_BEGIN_RANGE = VK_SAMPLER_YCBCR_RANGE_ITU_FULL,
+    VK_SAMPLER_YCBCR_RANGE_END_RANGE = VK_SAMPLER_YCBCR_RANGE_ITU_NARROW,
+    VK_SAMPLER_YCBCR_RANGE_RANGE_SIZE = (VK_SAMPLER_YCBCR_RANGE_ITU_NARROW - VK_SAMPLER_YCBCR_RANGE_ITU_FULL + 1),
+    VK_SAMPLER_YCBCR_RANGE_MAX_ENUM = 0x7FFFFFFF
+} VkSamplerYcbcrRange;
+
+typedef enum VkChromaLocation {
+    VK_CHROMA_LOCATION_COSITED_EVEN = 0,
+    VK_CHROMA_LOCATION_MIDPOINT = 1,
+    VK_CHROMA_LOCATION_COSITED_EVEN_KHR = VK_CHROMA_LOCATION_COSITED_EVEN,
+    VK_CHROMA_LOCATION_MIDPOINT_KHR = VK_CHROMA_LOCATION_MIDPOINT,
+    VK_CHROMA_LOCATION_BEGIN_RANGE = VK_CHROMA_LOCATION_COSITED_EVEN,
+    VK_CHROMA_LOCATION_END_RANGE = VK_CHROMA_LOCATION_MIDPOINT,
+    VK_CHROMA_LOCATION_RANGE_SIZE = (VK_CHROMA_LOCATION_MIDPOINT - VK_CHROMA_LOCATION_COSITED_EVEN + 1),
+    VK_CHROMA_LOCATION_MAX_ENUM = 0x7FFFFFFF
+} VkChromaLocation;
+
+typedef enum VkDescriptorUpdateTemplateType {
+    VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET = 0,
+    VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR = 1,
+    VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET_KHR = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET,
+    VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_BEGIN_RANGE = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET,
+    VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_END_RANGE = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET,
+    VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_RANGE_SIZE = (VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET - VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET + 1),
+    VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_MAX_ENUM = 0x7FFFFFFF
+} VkDescriptorUpdateTemplateType;
+
+typedef enum VkSubgroupFeatureFlagBits {
+    VK_SUBGROUP_FEATURE_BASIC_BIT = 0x00000001,
+    VK_SUBGROUP_FEATURE_VOTE_BIT = 0x00000002,
+    VK_SUBGROUP_FEATURE_ARITHMETIC_BIT = 0x00000004,
+    VK_SUBGROUP_FEATURE_BALLOT_BIT = 0x00000008,
+    VK_SUBGROUP_FEATURE_SHUFFLE_BIT = 0x00000010,
+    VK_SUBGROUP_FEATURE_SHUFFLE_RELATIVE_BIT = 0x00000020,
+    VK_SUBGROUP_FEATURE_CLUSTERED_BIT = 0x00000040,
+    VK_SUBGROUP_FEATURE_QUAD_BIT = 0x00000080,
+    VK_SUBGROUP_FEATURE_PARTITIONED_BIT_NV = 0x00000100,
+    VK_SUBGROUP_FEATURE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkSubgroupFeatureFlagBits;
+typedef VkFlags VkSubgroupFeatureFlags;
+
+typedef enum VkPeerMemoryFeatureFlagBits {
+    VK_PEER_MEMORY_FEATURE_COPY_SRC_BIT = 0x00000001,
+    VK_PEER_MEMORY_FEATURE_COPY_DST_BIT = 0x00000002,
+    VK_PEER_MEMORY_FEATURE_GENERIC_SRC_BIT = 0x00000004,
+    VK_PEER_MEMORY_FEATURE_GENERIC_DST_BIT = 0x00000008,
+    VK_PEER_MEMORY_FEATURE_COPY_SRC_BIT_KHR = VK_PEER_MEMORY_FEATURE_COPY_SRC_BIT,
+    VK_PEER_MEMORY_FEATURE_COPY_DST_BIT_KHR = VK_PEER_MEMORY_FEATURE_COPY_DST_BIT,
+    VK_PEER_MEMORY_FEATURE_GENERIC_SRC_BIT_KHR = VK_PEER_MEMORY_FEATURE_GENERIC_SRC_BIT,
+    VK_PEER_MEMORY_FEATURE_GENERIC_DST_BIT_KHR = VK_PEER_MEMORY_FEATURE_GENERIC_DST_BIT,
+    VK_PEER_MEMORY_FEATURE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkPeerMemoryFeatureFlagBits;
+typedef VkFlags VkPeerMemoryFeatureFlags;
+
+typedef enum VkMemoryAllocateFlagBits {
+    VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT = 0x00000001,
+    VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT_KHR = 0x00000002,
+    VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_KHR = 0x00000004,
+    VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT_KHR = VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT,
+    VK_MEMORY_ALLOCATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkMemoryAllocateFlagBits;
+typedef VkFlags VkMemoryAllocateFlags;
+typedef VkFlags VkCommandPoolTrimFlags;
+typedef VkFlags VkDescriptorUpdateTemplateCreateFlags;
+
+typedef enum VkExternalMemoryHandleTypeFlagBits {
+    VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT = 0x00000001,
+    VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT = 0x00000002,
+    VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT = 0x00000004,
+    VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT = 0x00000008,
+    VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_KMT_BIT = 0x00000010,
+    VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP_BIT = 0x00000020,
+    VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT = 0x00000040,
+    VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT = 0x00000200,
+    VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID = 0x00000400,
+    VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT = 0x00000080,
+    VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_MAPPED_FOREIGN_MEMORY_BIT_EXT = 0x00000100,
+    VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT_KHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT,
+    VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT_KHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT,
+    VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_KHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT,
+    VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT_KHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT,
+    VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_KMT_BIT_KHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_KMT_BIT,
+    VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP_BIT_KHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP_BIT,
+    VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT_KHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT,
+    VK_EXTERNAL_MEMORY_HANDLE_TYPE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkExternalMemoryHandleTypeFlagBits;
+typedef VkFlags VkExternalMemoryHandleTypeFlags;
+
+typedef enum VkExternalMemoryFeatureFlagBits {
+    VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT = 0x00000001,
+    VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT = 0x00000002,
+    VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT = 0x00000004,
+    VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT_KHR = VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT,
+    VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT_KHR = VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT,
+    VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT_KHR = VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT,
+    VK_EXTERNAL_MEMORY_FEATURE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkExternalMemoryFeatureFlagBits;
+typedef VkFlags VkExternalMemoryFeatureFlags;
+
+typedef enum VkExternalFenceHandleTypeFlagBits {
+    VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_FD_BIT = 0x00000001,
+    VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_BIT = 0x00000002,
+    VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT = 0x00000004,
+    VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT = 0x00000008,
+    VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_FD_BIT_KHR = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_FD_BIT,
+    VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_BIT_KHR = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_BIT,
+    VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_KHR = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT,
+    VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT_KHR = VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT,
+    VK_EXTERNAL_FENCE_HANDLE_TYPE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkExternalFenceHandleTypeFlagBits;
+typedef VkFlags VkExternalFenceHandleTypeFlags;
+
+typedef enum VkExternalFenceFeatureFlagBits {
+    VK_EXTERNAL_FENCE_FEATURE_EXPORTABLE_BIT = 0x00000001,
+    VK_EXTERNAL_FENCE_FEATURE_IMPORTABLE_BIT = 0x00000002,
+    VK_EXTERNAL_FENCE_FEATURE_EXPORTABLE_BIT_KHR = VK_EXTERNAL_FENCE_FEATURE_EXPORTABLE_BIT,
+    VK_EXTERNAL_FENCE_FEATURE_IMPORTABLE_BIT_KHR = VK_EXTERNAL_FENCE_FEATURE_IMPORTABLE_BIT,
+    VK_EXTERNAL_FENCE_FEATURE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkExternalFenceFeatureFlagBits;
+typedef VkFlags VkExternalFenceFeatureFlags;
+
+typedef enum VkFenceImportFlagBits {
+    VK_FENCE_IMPORT_TEMPORARY_BIT = 0x00000001,
+    VK_FENCE_IMPORT_TEMPORARY_BIT_KHR = VK_FENCE_IMPORT_TEMPORARY_BIT,
+    VK_FENCE_IMPORT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkFenceImportFlagBits;
+typedef VkFlags VkFenceImportFlags;
+
+typedef enum VkSemaphoreImportFlagBits {
+    VK_SEMAPHORE_IMPORT_TEMPORARY_BIT = 0x00000001,
+    VK_SEMAPHORE_IMPORT_TEMPORARY_BIT_KHR = VK_SEMAPHORE_IMPORT_TEMPORARY_BIT,
+    VK_SEMAPHORE_IMPORT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkSemaphoreImportFlagBits;
+typedef VkFlags VkSemaphoreImportFlags;
+
+typedef enum VkExternalSemaphoreHandleTypeFlagBits {
+    VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT = 0x00000001,
+    VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT = 0x00000002,
+    VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT = 0x00000004,
+    VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT = 0x00000008,
+    VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT = 0x00000010,
+    VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT_KHR = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT,
+    VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT_KHR = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT,
+    VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_KHR = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT,
+    VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT_KHR = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT,
+    VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT_KHR = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT,
+    VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkExternalSemaphoreHandleTypeFlagBits;
+typedef VkFlags VkExternalSemaphoreHandleTypeFlags;
+
+typedef enum VkExternalSemaphoreFeatureFlagBits {
+    VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT = 0x00000001,
+    VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT = 0x00000002,
+    VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT_KHR = VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT,
+    VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT_KHR = VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT,
+    VK_EXTERNAL_SEMAPHORE_FEATURE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkExternalSemaphoreFeatureFlagBits;
+typedef VkFlags VkExternalSemaphoreFeatureFlags;
+typedef struct VkPhysicalDeviceSubgroupProperties {
+    VkStructureType           sType;
+    void*                     pNext;
+    uint32_t                  subgroupSize;
+    VkShaderStageFlags        supportedStages;
+    VkSubgroupFeatureFlags    supportedOperations;
+    VkBool32                  quadOperationsInAllStages;
+} VkPhysicalDeviceSubgroupProperties;
+
+typedef struct VkBindBufferMemoryInfo {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkBuffer           buffer;
+    VkDeviceMemory     memory;
+    VkDeviceSize       memoryOffset;
+} VkBindBufferMemoryInfo;
+
+typedef struct VkBindImageMemoryInfo {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkImage            image;
+    VkDeviceMemory     memory;
+    VkDeviceSize       memoryOffset;
+} VkBindImageMemoryInfo;
+
+typedef struct VkPhysicalDevice16BitStorageFeatures {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           storageBuffer16BitAccess;
+    VkBool32           uniformAndStorageBuffer16BitAccess;
+    VkBool32           storagePushConstant16;
+    VkBool32           storageInputOutput16;
+} VkPhysicalDevice16BitStorageFeatures;
+
+typedef struct VkMemoryDedicatedRequirements {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           prefersDedicatedAllocation;
+    VkBool32           requiresDedicatedAllocation;
+} VkMemoryDedicatedRequirements;
+
+typedef struct VkMemoryDedicatedAllocateInfo {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkImage            image;
+    VkBuffer           buffer;
+} VkMemoryDedicatedAllocateInfo;
+
+typedef struct VkMemoryAllocateFlagsInfo {
+    VkStructureType          sType;
+    const void*              pNext;
+    VkMemoryAllocateFlags    flags;
+    uint32_t                 deviceMask;
+} VkMemoryAllocateFlagsInfo;
+
+typedef struct VkDeviceGroupRenderPassBeginInfo {
+    VkStructureType    sType;
+    const void*        pNext;
+    uint32_t           deviceMask;
+    uint32_t           deviceRenderAreaCount;
+    const VkRect2D*    pDeviceRenderAreas;
+} VkDeviceGroupRenderPassBeginInfo;
+
+typedef struct VkDeviceGroupCommandBufferBeginInfo {
+    VkStructureType    sType;
+    const void*        pNext;
+    uint32_t           deviceMask;
+} VkDeviceGroupCommandBufferBeginInfo;
+
+typedef struct VkDeviceGroupSubmitInfo {
+    VkStructureType    sType;
+    const void*        pNext;
+    uint32_t           waitSemaphoreCount;
+    const uint32_t*    pWaitSemaphoreDeviceIndices;
+    uint32_t           commandBufferCount;
+    const uint32_t*    pCommandBufferDeviceMasks;
+    uint32_t           signalSemaphoreCount;
+    const uint32_t*    pSignalSemaphoreDeviceIndices;
+} VkDeviceGroupSubmitInfo;
+
+typedef struct VkDeviceGroupBindSparseInfo {
+    VkStructureType    sType;
+    const void*        pNext;
+    uint32_t           resourceDeviceIndex;
+    uint32_t           memoryDeviceIndex;
+} VkDeviceGroupBindSparseInfo;
+
+typedef struct VkBindBufferMemoryDeviceGroupInfo {
+    VkStructureType    sType;
+    const void*        pNext;
+    uint32_t           deviceIndexCount;
+    const uint32_t*    pDeviceIndices;
+} VkBindBufferMemoryDeviceGroupInfo;
+
+typedef struct VkBindImageMemoryDeviceGroupInfo {
+    VkStructureType    sType;
+    const void*        pNext;
+    uint32_t           deviceIndexCount;
+    const uint32_t*    pDeviceIndices;
+    uint32_t           splitInstanceBindRegionCount;
+    const VkRect2D*    pSplitInstanceBindRegions;
+} VkBindImageMemoryDeviceGroupInfo;
+
+typedef struct VkPhysicalDeviceGroupProperties {
+    VkStructureType     sType;
+    void*               pNext;
+    uint32_t            physicalDeviceCount;
+    VkPhysicalDevice    physicalDevices[VK_MAX_DEVICE_GROUP_SIZE];
+    VkBool32            subsetAllocation;
+} VkPhysicalDeviceGroupProperties;
+
+typedef struct VkDeviceGroupDeviceCreateInfo {
+    VkStructureType            sType;
+    const void*                pNext;
+    uint32_t                   physicalDeviceCount;
+    const VkPhysicalDevice*    pPhysicalDevices;
+} VkDeviceGroupDeviceCreateInfo;
+
+typedef struct VkBufferMemoryRequirementsInfo2 {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkBuffer           buffer;
+} VkBufferMemoryRequirementsInfo2;
+
+typedef struct VkImageMemoryRequirementsInfo2 {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkImage            image;
+} VkImageMemoryRequirementsInfo2;
+
+typedef struct VkImageSparseMemoryRequirementsInfo2 {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkImage            image;
+} VkImageSparseMemoryRequirementsInfo2;
+
+typedef struct VkMemoryRequirements2 {
+    VkStructureType         sType;
+    void*                   pNext;
+    VkMemoryRequirements    memoryRequirements;
+} VkMemoryRequirements2;
+
+typedef VkMemoryRequirements2 VkMemoryRequirements2KHR;
+
+typedef struct VkSparseImageMemoryRequirements2 {
+    VkStructureType                    sType;
+    void*                              pNext;
+    VkSparseImageMemoryRequirements    memoryRequirements;
+} VkSparseImageMemoryRequirements2;
+
+typedef struct VkPhysicalDeviceFeatures2 {
+    VkStructureType             sType;
+    void*                       pNext;
+    VkPhysicalDeviceFeatures    features;
+} VkPhysicalDeviceFeatures2;
+
+typedef struct VkPhysicalDeviceProperties2 {
+    VkStructureType               sType;
+    void*                         pNext;
+    VkPhysicalDeviceProperties    properties;
+} VkPhysicalDeviceProperties2;
+
+typedef struct VkFormatProperties2 {
+    VkStructureType       sType;
+    void*                 pNext;
+    VkFormatProperties    formatProperties;
+} VkFormatProperties2;
+
+typedef struct VkImageFormatProperties2 {
+    VkStructureType            sType;
+    void*                      pNext;
+    VkImageFormatProperties    imageFormatProperties;
+} VkImageFormatProperties2;
+
+typedef struct VkPhysicalDeviceImageFormatInfo2 {
+    VkStructureType       sType;
+    const void*           pNext;
+    VkFormat              format;
+    VkImageType           type;
+    VkImageTiling         tiling;
+    VkImageUsageFlags     usage;
+    VkImageCreateFlags    flags;
+} VkPhysicalDeviceImageFormatInfo2;
+
+typedef struct VkQueueFamilyProperties2 {
+    VkStructureType            sType;
+    void*                      pNext;
+    VkQueueFamilyProperties    queueFamilyProperties;
+} VkQueueFamilyProperties2;
+
+typedef struct VkPhysicalDeviceMemoryProperties2 {
+    VkStructureType                     sType;
+    void*                               pNext;
+    VkPhysicalDeviceMemoryProperties    memoryProperties;
+} VkPhysicalDeviceMemoryProperties2;
+
+typedef struct VkSparseImageFormatProperties2 {
+    VkStructureType                  sType;
+    void*                            pNext;
+    VkSparseImageFormatProperties    properties;
+} VkSparseImageFormatProperties2;
+
+typedef struct VkPhysicalDeviceSparseImageFormatInfo2 {
+    VkStructureType          sType;
+    const void*              pNext;
+    VkFormat                 format;
+    VkImageType              type;
+    VkSampleCountFlagBits    samples;
+    VkImageUsageFlags        usage;
+    VkImageTiling            tiling;
+} VkPhysicalDeviceSparseImageFormatInfo2;
+
+typedef struct VkPhysicalDevicePointClippingProperties {
+    VkStructureType            sType;
+    void*                      pNext;
+    VkPointClippingBehavior    pointClippingBehavior;
+} VkPhysicalDevicePointClippingProperties;
+
+typedef struct VkInputAttachmentAspectReference {
+    uint32_t              subpass;
+    uint32_t              inputAttachmentIndex;
+    VkImageAspectFlags    aspectMask;
+} VkInputAttachmentAspectReference;
+
+typedef struct VkRenderPassInputAttachmentAspectCreateInfo {
+    VkStructureType                            sType;
+    const void*                                pNext;
+    uint32_t                                   aspectReferenceCount;
+    const VkInputAttachmentAspectReference*    pAspectReferences;
+} VkRenderPassInputAttachmentAspectCreateInfo;
+
+typedef struct VkImageViewUsageCreateInfo {
+    VkStructureType      sType;
+    const void*          pNext;
+    VkImageUsageFlags    usage;
+} VkImageViewUsageCreateInfo;
+
+typedef struct VkPipelineTessellationDomainOriginStateCreateInfo {
+    VkStructureType               sType;
+    const void*                   pNext;
+    VkTessellationDomainOrigin    domainOrigin;
+} VkPipelineTessellationDomainOriginStateCreateInfo;
+
+typedef struct VkRenderPassMultiviewCreateInfo {
+    VkStructureType    sType;
+    const void*        pNext;
+    uint32_t           subpassCount;
+    const uint32_t*    pViewMasks;
+    uint32_t           dependencyCount;
+    const int32_t*     pViewOffsets;
+    uint32_t           correlationMaskCount;
+    const uint32_t*    pCorrelationMasks;
+} VkRenderPassMultiviewCreateInfo;
+
+typedef struct VkPhysicalDeviceMultiviewFeatures {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           multiview;
+    VkBool32           multiviewGeometryShader;
+    VkBool32           multiviewTessellationShader;
+} VkPhysicalDeviceMultiviewFeatures;
+
+typedef struct VkPhysicalDeviceMultiviewProperties {
+    VkStructureType    sType;
+    void*              pNext;
+    uint32_t           maxMultiviewViewCount;
+    uint32_t           maxMultiviewInstanceIndex;
+} VkPhysicalDeviceMultiviewProperties;
+
+typedef struct VkPhysicalDeviceVariablePointersFeatures {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           variablePointersStorageBuffer;
+    VkBool32           variablePointers;
+} VkPhysicalDeviceVariablePointersFeatures;
+
+typedef VkPhysicalDeviceVariablePointersFeatures VkPhysicalDeviceVariablePointerFeatures;
+
+typedef struct VkPhysicalDeviceProtectedMemoryFeatures {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           protectedMemory;
+} VkPhysicalDeviceProtectedMemoryFeatures;
+
+typedef struct VkPhysicalDeviceProtectedMemoryProperties {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           protectedNoFault;
+} VkPhysicalDeviceProtectedMemoryProperties;
+
+typedef struct VkDeviceQueueInfo2 {
+    VkStructureType             sType;
+    const void*                 pNext;
+    VkDeviceQueueCreateFlags    flags;
+    uint32_t                    queueFamilyIndex;
+    uint32_t                    queueIndex;
+} VkDeviceQueueInfo2;
+
+typedef struct VkProtectedSubmitInfo {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkBool32           protectedSubmit;
+} VkProtectedSubmitInfo;
+
+typedef struct VkSamplerYcbcrConversionCreateInfo {
+    VkStructureType                  sType;
+    const void*                      pNext;
+    VkFormat                         format;
+    VkSamplerYcbcrModelConversion    ycbcrModel;
+    VkSamplerYcbcrRange              ycbcrRange;
+    VkComponentMapping               components;
+    VkChromaLocation                 xChromaOffset;
+    VkChromaLocation                 yChromaOffset;
+    VkFilter                         chromaFilter;
+    VkBool32                         forceExplicitReconstruction;
+} VkSamplerYcbcrConversionCreateInfo;
+
+typedef struct VkSamplerYcbcrConversionInfo {
+    VkStructureType             sType;
+    const void*                 pNext;
+    VkSamplerYcbcrConversion    conversion;
+} VkSamplerYcbcrConversionInfo;
+
+typedef struct VkBindImagePlaneMemoryInfo {
+    VkStructureType          sType;
+    const void*              pNext;
+    VkImageAspectFlagBits    planeAspect;
+} VkBindImagePlaneMemoryInfo;
+
+typedef struct VkImagePlaneMemoryRequirementsInfo {
+    VkStructureType          sType;
+    const void*              pNext;
+    VkImageAspectFlagBits    planeAspect;
+} VkImagePlaneMemoryRequirementsInfo;
+
+typedef struct VkPhysicalDeviceSamplerYcbcrConversionFeatures {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           samplerYcbcrConversion;
+} VkPhysicalDeviceSamplerYcbcrConversionFeatures;
+
+typedef struct VkSamplerYcbcrConversionImageFormatProperties {
+    VkStructureType    sType;
+    void*              pNext;
+    uint32_t           combinedImageSamplerDescriptorCount;
+} VkSamplerYcbcrConversionImageFormatProperties;
+
+typedef struct VkDescriptorUpdateTemplateEntry {
+    uint32_t            dstBinding;
+    uint32_t            dstArrayElement;
+    uint32_t            descriptorCount;
+    VkDescriptorType    descriptorType;
+    size_t              offset;
+    size_t              stride;
+} VkDescriptorUpdateTemplateEntry;
+
+typedef struct VkDescriptorUpdateTemplateCreateInfo {
+    VkStructureType                           sType;
+    const void*                               pNext;
+    VkDescriptorUpdateTemplateCreateFlags     flags;
+    uint32_t                                  descriptorUpdateEntryCount;
+    const VkDescriptorUpdateTemplateEntry*    pDescriptorUpdateEntries;
+    VkDescriptorUpdateTemplateType            templateType;
+    VkDescriptorSetLayout                     descriptorSetLayout;
+    VkPipelineBindPoint                       pipelineBindPoint;
+    VkPipelineLayout                          pipelineLayout;
+    uint32_t                                  set;
+} VkDescriptorUpdateTemplateCreateInfo;
+
+typedef struct VkExternalMemoryProperties {
+    VkExternalMemoryFeatureFlags       externalMemoryFeatures;
+    VkExternalMemoryHandleTypeFlags    exportFromImportedHandleTypes;
+    VkExternalMemoryHandleTypeFlags    compatibleHandleTypes;
+} VkExternalMemoryProperties;
+
+typedef struct VkPhysicalDeviceExternalImageFormatInfo {
+    VkStructureType                       sType;
+    const void*                           pNext;
+    VkExternalMemoryHandleTypeFlagBits    handleType;
+} VkPhysicalDeviceExternalImageFormatInfo;
+
+typedef struct VkExternalImageFormatProperties {
+    VkStructureType               sType;
+    void*                         pNext;
+    VkExternalMemoryProperties    externalMemoryProperties;
+} VkExternalImageFormatProperties;
+
+typedef struct VkPhysicalDeviceExternalBufferInfo {
+    VkStructureType                       sType;
+    const void*                           pNext;
+    VkBufferCreateFlags                   flags;
+    VkBufferUsageFlags                    usage;
+    VkExternalMemoryHandleTypeFlagBits    handleType;
+} VkPhysicalDeviceExternalBufferInfo;
+
+typedef struct VkExternalBufferProperties {
+    VkStructureType               sType;
+    void*                         pNext;
+    VkExternalMemoryProperties    externalMemoryProperties;
+} VkExternalBufferProperties;
+
+typedef struct VkPhysicalDeviceIDProperties {
+    VkStructureType    sType;
+    void*              pNext;
+    uint8_t            deviceUUID[VK_UUID_SIZE];
+    uint8_t            driverUUID[VK_UUID_SIZE];
+    uint8_t            deviceLUID[VK_LUID_SIZE];
+    uint32_t           deviceNodeMask;
+    VkBool32           deviceLUIDValid;
+} VkPhysicalDeviceIDProperties;
+
+typedef struct VkExternalMemoryImageCreateInfo {
+    VkStructureType                    sType;
+    const void*                        pNext;
+    VkExternalMemoryHandleTypeFlags    handleTypes;
+} VkExternalMemoryImageCreateInfo;
+
+typedef struct VkExternalMemoryBufferCreateInfo {
+    VkStructureType                    sType;
+    const void*                        pNext;
+    VkExternalMemoryHandleTypeFlags    handleTypes;
+} VkExternalMemoryBufferCreateInfo;
+
+typedef struct VkExportMemoryAllocateInfo {
+    VkStructureType                    sType;
+    const void*                        pNext;
+    VkExternalMemoryHandleTypeFlags    handleTypes;
+} VkExportMemoryAllocateInfo;
+
+typedef struct VkPhysicalDeviceExternalFenceInfo {
+    VkStructureType                      sType;
+    const void*                          pNext;
+    VkExternalFenceHandleTypeFlagBits    handleType;
+} VkPhysicalDeviceExternalFenceInfo;
+
+typedef struct VkExternalFenceProperties {
+    VkStructureType                   sType;
+    void*                             pNext;
+    VkExternalFenceHandleTypeFlags    exportFromImportedHandleTypes;
+    VkExternalFenceHandleTypeFlags    compatibleHandleTypes;
+    VkExternalFenceFeatureFlags       externalFenceFeatures;
+} VkExternalFenceProperties;
+
+typedef struct VkExportFenceCreateInfo {
+    VkStructureType                   sType;
+    const void*                       pNext;
+    VkExternalFenceHandleTypeFlags    handleTypes;
+} VkExportFenceCreateInfo;
+
+typedef struct VkExportSemaphoreCreateInfo {
+    VkStructureType                       sType;
+    const void*                           pNext;
+    VkExternalSemaphoreHandleTypeFlags    handleTypes;
+} VkExportSemaphoreCreateInfo;
+
+typedef struct VkPhysicalDeviceExternalSemaphoreInfo {
+    VkStructureType                          sType;
+    const void*                              pNext;
+    VkExternalSemaphoreHandleTypeFlagBits    handleType;
+} VkPhysicalDeviceExternalSemaphoreInfo;
+
+typedef struct VkExternalSemaphoreProperties {
+    VkStructureType                       sType;
+    void*                                 pNext;
+    VkExternalSemaphoreHandleTypeFlags    exportFromImportedHandleTypes;
+    VkExternalSemaphoreHandleTypeFlags    compatibleHandleTypes;
+    VkExternalSemaphoreFeatureFlags       externalSemaphoreFeatures;
+} VkExternalSemaphoreProperties;
+
+typedef struct VkPhysicalDeviceMaintenance3Properties {
+    VkStructureType    sType;
+    void*              pNext;
+    uint32_t           maxPerSetDescriptors;
+    VkDeviceSize       maxMemoryAllocationSize;
+} VkPhysicalDeviceMaintenance3Properties;
+
+typedef struct VkDescriptorSetLayoutSupport {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           supported;
+} VkDescriptorSetLayoutSupport;
+
+typedef struct VkPhysicalDeviceShaderDrawParametersFeatures {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           shaderDrawParameters;
+} VkPhysicalDeviceShaderDrawParametersFeatures;
+
+typedef VkPhysicalDeviceShaderDrawParametersFeatures VkPhysicalDeviceShaderDrawParameterFeatures;
+
+typedef VkResult (VKAPI_PTR *PFN_vkEnumerateInstanceVersion)(uint32_t* pApiVersion);
+typedef VkResult (VKAPI_PTR *PFN_vkBindBufferMemory2)(VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo* pBindInfos);
+typedef VkResult (VKAPI_PTR *PFN_vkBindImageMemory2)(VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo* pBindInfos);
+typedef void (VKAPI_PTR *PFN_vkGetDeviceGroupPeerMemoryFeatures)(VkDevice device, uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VkPeerMemoryFeatureFlags* pPeerMemoryFeatures);
+typedef void (VKAPI_PTR *PFN_vkCmdSetDeviceMask)(VkCommandBuffer commandBuffer, uint32_t deviceMask);
+typedef void (VKAPI_PTR *PFN_vkCmdDispatchBase)(VkCommandBuffer commandBuffer, uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ);
+typedef VkResult (VKAPI_PTR *PFN_vkEnumeratePhysicalDeviceGroups)(VkInstance instance, uint32_t* pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties);
+typedef void (VKAPI_PTR *PFN_vkGetImageMemoryRequirements2)(VkDevice device, const VkImageMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements);
+typedef void (VKAPI_PTR *PFN_vkGetBufferMemoryRequirements2)(VkDevice device, const VkBufferMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements);
+typedef void (VKAPI_PTR *PFN_vkGetImageSparseMemoryRequirements2)(VkDevice device, const VkImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2* pSparseMemoryRequirements);
+typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceFeatures2)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2* pFeatures);
+typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceProperties2)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties2* pProperties);
+typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceFormatProperties2)(VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties2* pFormatProperties);
+typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceImageFormatProperties2)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo, VkImageFormatProperties2* pImageFormatProperties);
+typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceQueueFamilyProperties2)(VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties2* pQueueFamilyProperties);
+typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceMemoryProperties2)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties2* pMemoryProperties);
+typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceSparseImageFormatProperties2)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, VkSparseImageFormatProperties2* pProperties);
+typedef void (VKAPI_PTR *PFN_vkTrimCommandPool)(VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlags flags);
+typedef void (VKAPI_PTR *PFN_vkGetDeviceQueue2)(VkDevice device, const VkDeviceQueueInfo2* pQueueInfo, VkQueue* pQueue);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateSamplerYcbcrConversion)(VkDevice device, const VkSamplerYcbcrConversionCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSamplerYcbcrConversion* pYcbcrConversion);
+typedef void (VKAPI_PTR *PFN_vkDestroySamplerYcbcrConversion)(VkDevice device, VkSamplerYcbcrConversion ycbcrConversion, const VkAllocationCallbacks* pAllocator);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateDescriptorUpdateTemplate)(VkDevice device, const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate);
+typedef void (VKAPI_PTR *PFN_vkDestroyDescriptorUpdateTemplate)(VkDevice device, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const VkAllocationCallbacks* pAllocator);
+typedef void (VKAPI_PTR *PFN_vkUpdateDescriptorSetWithTemplate)(VkDevice device, VkDescriptorSet descriptorSet, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData);
+typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceExternalBufferProperties)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo, VkExternalBufferProperties* pExternalBufferProperties);
+typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceExternalFenceProperties)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo, VkExternalFenceProperties* pExternalFenceProperties);
+typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceExternalSemaphoreProperties)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, VkExternalSemaphoreProperties* pExternalSemaphoreProperties);
+typedef void (VKAPI_PTR *PFN_vkGetDescriptorSetLayoutSupport)(VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, VkDescriptorSetLayoutSupport* pSupport);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceVersion(
+    uint32_t*                                   pApiVersion);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkBindBufferMemory2(
+    VkDevice                                    device,
+    uint32_t                                    bindInfoCount,
+    const VkBindBufferMemoryInfo*               pBindInfos);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkBindImageMemory2(
+    VkDevice                                    device,
+    uint32_t                                    bindInfoCount,
+    const VkBindImageMemoryInfo*                pBindInfos);
+
+VKAPI_ATTR void VKAPI_CALL vkGetDeviceGroupPeerMemoryFeatures(
+    VkDevice                                    device,
+    uint32_t                                    heapIndex,
+    uint32_t                                    localDeviceIndex,
+    uint32_t                                    remoteDeviceIndex,
+    VkPeerMemoryFeatureFlags*                   pPeerMemoryFeatures);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetDeviceMask(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    deviceMask);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdDispatchBase(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    baseGroupX,
+    uint32_t                                    baseGroupY,
+    uint32_t                                    baseGroupZ,
+    uint32_t                                    groupCountX,
+    uint32_t                                    groupCountY,
+    uint32_t                                    groupCountZ);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkEnumeratePhysicalDeviceGroups(
+    VkInstance                                  instance,
+    uint32_t*                                   pPhysicalDeviceGroupCount,
+    VkPhysicalDeviceGroupProperties*            pPhysicalDeviceGroupProperties);
+
+VKAPI_ATTR void VKAPI_CALL vkGetImageMemoryRequirements2(
+    VkDevice                                    device,
+    const VkImageMemoryRequirementsInfo2*       pInfo,
+    VkMemoryRequirements2*                      pMemoryRequirements);
+
+VKAPI_ATTR void VKAPI_CALL vkGetBufferMemoryRequirements2(
+    VkDevice                                    device,
+    const VkBufferMemoryRequirementsInfo2*      pInfo,
+    VkMemoryRequirements2*                      pMemoryRequirements);
+
+VKAPI_ATTR void VKAPI_CALL vkGetImageSparseMemoryRequirements2(
+    VkDevice                                    device,
+    const VkImageSparseMemoryRequirementsInfo2* pInfo,
+    uint32_t*                                   pSparseMemoryRequirementCount,
+    VkSparseImageMemoryRequirements2*           pSparseMemoryRequirements);
+
+VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFeatures2(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceFeatures2*                  pFeatures);
+
+VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceProperties2(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceProperties2*                pProperties);
+
+VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFormatProperties2(
+    VkPhysicalDevice                            physicalDevice,
+    VkFormat                                    format,
+    VkFormatProperties2*                        pFormatProperties);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceImageFormatProperties2(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceImageFormatInfo2*     pImageFormatInfo,
+    VkImageFormatProperties2*                   pImageFormatProperties);
+
+VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceQueueFamilyProperties2(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pQueueFamilyPropertyCount,
+    VkQueueFamilyProperties2*                   pQueueFamilyProperties);
+
+VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceMemoryProperties2(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceMemoryProperties2*          pMemoryProperties);
+
+VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceSparseImageFormatProperties2(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo,
+    uint32_t*                                   pPropertyCount,
+    VkSparseImageFormatProperties2*             pProperties);
+
+VKAPI_ATTR void VKAPI_CALL vkTrimCommandPool(
+    VkDevice                                    device,
+    VkCommandPool                               commandPool,
+    VkCommandPoolTrimFlags                      flags);
+
+VKAPI_ATTR void VKAPI_CALL vkGetDeviceQueue2(
+    VkDevice                                    device,
+    const VkDeviceQueueInfo2*                   pQueueInfo,
+    VkQueue*                                    pQueue);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateSamplerYcbcrConversion(
+    VkDevice                                    device,
+    const VkSamplerYcbcrConversionCreateInfo*   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSamplerYcbcrConversion*                   pYcbcrConversion);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroySamplerYcbcrConversion(
+    VkDevice                                    device,
+    VkSamplerYcbcrConversion                    ycbcrConversion,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateDescriptorUpdateTemplate(
+    VkDevice                                    device,
+    const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDescriptorUpdateTemplate*                 pDescriptorUpdateTemplate);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyDescriptorUpdateTemplate(
+    VkDevice                                    device,
+    VkDescriptorUpdateTemplate                  descriptorUpdateTemplate,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR void VKAPI_CALL vkUpdateDescriptorSetWithTemplate(
+    VkDevice                                    device,
+    VkDescriptorSet                             descriptorSet,
+    VkDescriptorUpdateTemplate                  descriptorUpdateTemplate,
+    const void*                                 pData);
+
+VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceExternalBufferProperties(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceExternalBufferInfo*   pExternalBufferInfo,
+    VkExternalBufferProperties*                 pExternalBufferProperties);
+
+VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceExternalFenceProperties(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceExternalFenceInfo*    pExternalFenceInfo,
+    VkExternalFenceProperties*                  pExternalFenceProperties);
+
+VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceExternalSemaphoreProperties(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo,
+    VkExternalSemaphoreProperties*              pExternalSemaphoreProperties);
+
+VKAPI_ATTR void VKAPI_CALL vkGetDescriptorSetLayoutSupport(
+    VkDevice                                    device,
+    const VkDescriptorSetLayoutCreateInfo*      pCreateInfo,
+    VkDescriptorSetLayoutSupport*               pSupport);
+#endif
+
+
+#define VK_KHR_surface 1
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkSurfaceKHR)
+#define VK_KHR_SURFACE_SPEC_VERSION       25
+#define VK_KHR_SURFACE_EXTENSION_NAME     "VK_KHR_surface"
+
+typedef enum VkColorSpaceKHR {
+    VK_COLOR_SPACE_SRGB_NONLINEAR_KHR = 0,
+    VK_COLOR_SPACE_DISPLAY_P3_NONLINEAR_EXT = 1000104001,
+    VK_COLOR_SPACE_EXTENDED_SRGB_LINEAR_EXT = 1000104002,
+    VK_COLOR_SPACE_DISPLAY_P3_LINEAR_EXT = 1000104003,
+    VK_COLOR_SPACE_DCI_P3_NONLINEAR_EXT = 1000104004,
+    VK_COLOR_SPACE_BT709_LINEAR_EXT = 1000104005,
+    VK_COLOR_SPACE_BT709_NONLINEAR_EXT = 1000104006,
+    VK_COLOR_SPACE_BT2020_LINEAR_EXT = 1000104007,
+    VK_COLOR_SPACE_HDR10_ST2084_EXT = 1000104008,
+    VK_COLOR_SPACE_DOLBYVISION_EXT = 1000104009,
+    VK_COLOR_SPACE_HDR10_HLG_EXT = 1000104010,
+    VK_COLOR_SPACE_ADOBERGB_LINEAR_EXT = 1000104011,
+    VK_COLOR_SPACE_ADOBERGB_NONLINEAR_EXT = 1000104012,
+    VK_COLOR_SPACE_PASS_THROUGH_EXT = 1000104013,
+    VK_COLOR_SPACE_EXTENDED_SRGB_NONLINEAR_EXT = 1000104014,
+    VK_COLOR_SPACE_DISPLAY_NATIVE_AMD = 1000213000,
+    VK_COLORSPACE_SRGB_NONLINEAR_KHR = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR,
+    VK_COLOR_SPACE_DCI_P3_LINEAR_EXT = VK_COLOR_SPACE_DISPLAY_P3_LINEAR_EXT,
+    VK_COLOR_SPACE_BEGIN_RANGE_KHR = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR,
+    VK_COLOR_SPACE_END_RANGE_KHR = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR,
+    VK_COLOR_SPACE_RANGE_SIZE_KHR = (VK_COLOR_SPACE_SRGB_NONLINEAR_KHR - VK_COLOR_SPACE_SRGB_NONLINEAR_KHR + 1),
+    VK_COLOR_SPACE_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkColorSpaceKHR;
+
+typedef enum VkPresentModeKHR {
+    VK_PRESENT_MODE_IMMEDIATE_KHR = 0,
+    VK_PRESENT_MODE_MAILBOX_KHR = 1,
+    VK_PRESENT_MODE_FIFO_KHR = 2,
+    VK_PRESENT_MODE_FIFO_RELAXED_KHR = 3,
+    VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR = 1000111000,
+    VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR = 1000111001,
+    VK_PRESENT_MODE_BEGIN_RANGE_KHR = VK_PRESENT_MODE_IMMEDIATE_KHR,
+    VK_PRESENT_MODE_END_RANGE_KHR = VK_PRESENT_MODE_FIFO_RELAXED_KHR,
+    VK_PRESENT_MODE_RANGE_SIZE_KHR = (VK_PRESENT_MODE_FIFO_RELAXED_KHR - VK_PRESENT_MODE_IMMEDIATE_KHR + 1),
+    VK_PRESENT_MODE_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkPresentModeKHR;
+
+typedef enum VkSurfaceTransformFlagBitsKHR {
+    VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR = 0x00000001,
+    VK_SURFACE_TRANSFORM_ROTATE_90_BIT_KHR = 0x00000002,
+    VK_SURFACE_TRANSFORM_ROTATE_180_BIT_KHR = 0x00000004,
+    VK_SURFACE_TRANSFORM_ROTATE_270_BIT_KHR = 0x00000008,
+    VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_BIT_KHR = 0x00000010,
+    VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_90_BIT_KHR = 0x00000020,
+    VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_180_BIT_KHR = 0x00000040,
+    VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_270_BIT_KHR = 0x00000080,
+    VK_SURFACE_TRANSFORM_INHERIT_BIT_KHR = 0x00000100,
+    VK_SURFACE_TRANSFORM_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkSurfaceTransformFlagBitsKHR;
+typedef VkFlags VkSurfaceTransformFlagsKHR;
+
+typedef enum VkCompositeAlphaFlagBitsKHR {
+    VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR = 0x00000001,
+    VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR = 0x00000002,
+    VK_COMPOSITE_ALPHA_POST_MULTIPLIED_BIT_KHR = 0x00000004,
+    VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR = 0x00000008,
+    VK_COMPOSITE_ALPHA_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkCompositeAlphaFlagBitsKHR;
+typedef VkFlags VkCompositeAlphaFlagsKHR;
+typedef struct VkSurfaceCapabilitiesKHR {
+    uint32_t                         minImageCount;
+    uint32_t                         maxImageCount;
+    VkExtent2D                       currentExtent;
+    VkExtent2D                       minImageExtent;
+    VkExtent2D                       maxImageExtent;
+    uint32_t                         maxImageArrayLayers;
+    VkSurfaceTransformFlagsKHR       supportedTransforms;
+    VkSurfaceTransformFlagBitsKHR    currentTransform;
+    VkCompositeAlphaFlagsKHR         supportedCompositeAlpha;
+    VkImageUsageFlags                supportedUsageFlags;
+} VkSurfaceCapabilitiesKHR;
+
+typedef struct VkSurfaceFormatKHR {
+    VkFormat           format;
+    VkColorSpaceKHR    colorSpace;
+} VkSurfaceFormatKHR;
+
+typedef void (VKAPI_PTR *PFN_vkDestroySurfaceKHR)(VkInstance instance, VkSurfaceKHR surface, const VkAllocationCallbacks* pAllocator);
+typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfaceSupportKHR)(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, VkSurfaceKHR surface, VkBool32* pSupported);
+typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR)(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, VkSurfaceCapabilitiesKHR* pSurfaceCapabilities);
+typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfaceFormatsKHR)(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pSurfaceFormatCount, VkSurfaceFormatKHR* pSurfaceFormats);
+typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfacePresentModesKHR)(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pPresentModeCount, VkPresentModeKHR* pPresentModes);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkDestroySurfaceKHR(
+    VkInstance                                  instance,
+    VkSurfaceKHR                                surface,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceSupportKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t                                    queueFamilyIndex,
+    VkSurfaceKHR                                surface,
+    VkBool32*                                   pSupported);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceCapabilitiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkSurfaceKHR                                surface,
+    VkSurfaceCapabilitiesKHR*                   pSurfaceCapabilities);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceFormatsKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkSurfaceKHR                                surface,
+    uint32_t*                                   pSurfaceFormatCount,
+    VkSurfaceFormatKHR*                         pSurfaceFormats);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfacePresentModesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkSurfaceKHR                                surface,
+    uint32_t*                                   pPresentModeCount,
+    VkPresentModeKHR*                           pPresentModes);
+#endif
+
+
+#define VK_KHR_swapchain 1
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkSwapchainKHR)
+#define VK_KHR_SWAPCHAIN_SPEC_VERSION     70
+#define VK_KHR_SWAPCHAIN_EXTENSION_NAME   "VK_KHR_swapchain"
+
+typedef enum VkSwapchainCreateFlagBitsKHR {
+    VK_SWAPCHAIN_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR = 0x00000001,
+    VK_SWAPCHAIN_CREATE_PROTECTED_BIT_KHR = 0x00000002,
+    VK_SWAPCHAIN_CREATE_MUTABLE_FORMAT_BIT_KHR = 0x00000004,
+    VK_SWAPCHAIN_CREATE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkSwapchainCreateFlagBitsKHR;
+typedef VkFlags VkSwapchainCreateFlagsKHR;
+
+typedef enum VkDeviceGroupPresentModeFlagBitsKHR {
+    VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_BIT_KHR = 0x00000001,
+    VK_DEVICE_GROUP_PRESENT_MODE_REMOTE_BIT_KHR = 0x00000002,
+    VK_DEVICE_GROUP_PRESENT_MODE_SUM_BIT_KHR = 0x00000004,
+    VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_MULTI_DEVICE_BIT_KHR = 0x00000008,
+    VK_DEVICE_GROUP_PRESENT_MODE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkDeviceGroupPresentModeFlagBitsKHR;
+typedef VkFlags VkDeviceGroupPresentModeFlagsKHR;
+typedef struct VkSwapchainCreateInfoKHR {
+    VkStructureType                  sType;
+    const void*                      pNext;
+    VkSwapchainCreateFlagsKHR        flags;
+    VkSurfaceKHR                     surface;
+    uint32_t                         minImageCount;
+    VkFormat                         imageFormat;
+    VkColorSpaceKHR                  imageColorSpace;
+    VkExtent2D                       imageExtent;
+    uint32_t                         imageArrayLayers;
+    VkImageUsageFlags                imageUsage;
+    VkSharingMode                    imageSharingMode;
+    uint32_t                         queueFamilyIndexCount;
+    const uint32_t*                  pQueueFamilyIndices;
+    VkSurfaceTransformFlagBitsKHR    preTransform;
+    VkCompositeAlphaFlagBitsKHR      compositeAlpha;
+    VkPresentModeKHR                 presentMode;
+    VkBool32                         clipped;
+    VkSwapchainKHR                   oldSwapchain;
+} VkSwapchainCreateInfoKHR;
+
+typedef struct VkPresentInfoKHR {
+    VkStructureType          sType;
+    const void*              pNext;
+    uint32_t                 waitSemaphoreCount;
+    const VkSemaphore*       pWaitSemaphores;
+    uint32_t                 swapchainCount;
+    const VkSwapchainKHR*    pSwapchains;
+    const uint32_t*          pImageIndices;
+    VkResult*                pResults;
+} VkPresentInfoKHR;
+
+typedef struct VkImageSwapchainCreateInfoKHR {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkSwapchainKHR     swapchain;
+} VkImageSwapchainCreateInfoKHR;
+
+typedef struct VkBindImageMemorySwapchainInfoKHR {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkSwapchainKHR     swapchain;
+    uint32_t           imageIndex;
+} VkBindImageMemorySwapchainInfoKHR;
+
+typedef struct VkAcquireNextImageInfoKHR {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkSwapchainKHR     swapchain;
+    uint64_t           timeout;
+    VkSemaphore        semaphore;
+    VkFence            fence;
+    uint32_t           deviceMask;
+} VkAcquireNextImageInfoKHR;
+
+typedef struct VkDeviceGroupPresentCapabilitiesKHR {
+    VkStructureType                     sType;
+    const void*                         pNext;
+    uint32_t                            presentMask[VK_MAX_DEVICE_GROUP_SIZE];
+    VkDeviceGroupPresentModeFlagsKHR    modes;
+} VkDeviceGroupPresentCapabilitiesKHR;
+
+typedef struct VkDeviceGroupPresentInfoKHR {
+    VkStructureType                        sType;
+    const void*                            pNext;
+    uint32_t                               swapchainCount;
+    const uint32_t*                        pDeviceMasks;
+    VkDeviceGroupPresentModeFlagBitsKHR    mode;
+} VkDeviceGroupPresentInfoKHR;
+
+typedef struct VkDeviceGroupSwapchainCreateInfoKHR {
+    VkStructureType                     sType;
+    const void*                         pNext;
+    VkDeviceGroupPresentModeFlagsKHR    modes;
+} VkDeviceGroupSwapchainCreateInfoKHR;
+
+typedef VkResult (VKAPI_PTR *PFN_vkCreateSwapchainKHR)(VkDevice device, const VkSwapchainCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSwapchainKHR* pSwapchain);
+typedef void (VKAPI_PTR *PFN_vkDestroySwapchainKHR)(VkDevice device, VkSwapchainKHR swapchain, const VkAllocationCallbacks* pAllocator);
+typedef VkResult (VKAPI_PTR *PFN_vkGetSwapchainImagesKHR)(VkDevice device, VkSwapchainKHR swapchain, uint32_t* pSwapchainImageCount, VkImage* pSwapchainImages);
+typedef VkResult (VKAPI_PTR *PFN_vkAcquireNextImageKHR)(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout, VkSemaphore semaphore, VkFence fence, uint32_t* pImageIndex);
+typedef VkResult (VKAPI_PTR *PFN_vkQueuePresentKHR)(VkQueue queue, const VkPresentInfoKHR* pPresentInfo);
+typedef VkResult (VKAPI_PTR *PFN_vkGetDeviceGroupPresentCapabilitiesKHR)(VkDevice device, VkDeviceGroupPresentCapabilitiesKHR* pDeviceGroupPresentCapabilities);
+typedef VkResult (VKAPI_PTR *PFN_vkGetDeviceGroupSurfacePresentModesKHR)(VkDevice device, VkSurfaceKHR surface, VkDeviceGroupPresentModeFlagsKHR* pModes);
+typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDevicePresentRectanglesKHR)(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pRectCount, VkRect2D* pRects);
+typedef VkResult (VKAPI_PTR *PFN_vkAcquireNextImage2KHR)(VkDevice device, const VkAcquireNextImageInfoKHR* pAcquireInfo, uint32_t* pImageIndex);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateSwapchainKHR(
+    VkDevice                                    device,
+    const VkSwapchainCreateInfoKHR*             pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSwapchainKHR*                             pSwapchain);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroySwapchainKHR(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetSwapchainImagesKHR(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    uint32_t*                                   pSwapchainImageCount,
+    VkImage*                                    pSwapchainImages);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkAcquireNextImageKHR(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    uint64_t                                    timeout,
+    VkSemaphore                                 semaphore,
+    VkFence                                     fence,
+    uint32_t*                                   pImageIndex);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkQueuePresentKHR(
+    VkQueue                                     queue,
+    const VkPresentInfoKHR*                     pPresentInfo);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetDeviceGroupPresentCapabilitiesKHR(
+    VkDevice                                    device,
+    VkDeviceGroupPresentCapabilitiesKHR*        pDeviceGroupPresentCapabilities);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetDeviceGroupSurfacePresentModesKHR(
+    VkDevice                                    device,
+    VkSurfaceKHR                                surface,
+    VkDeviceGroupPresentModeFlagsKHR*           pModes);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDevicePresentRectanglesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkSurfaceKHR                                surface,
+    uint32_t*                                   pRectCount,
+    VkRect2D*                                   pRects);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkAcquireNextImage2KHR(
+    VkDevice                                    device,
+    const VkAcquireNextImageInfoKHR*            pAcquireInfo,
+    uint32_t*                                   pImageIndex);
+#endif
+
+
+#define VK_KHR_display 1
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDisplayKHR)
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDisplayModeKHR)
+#define VK_KHR_DISPLAY_SPEC_VERSION       23
+#define VK_KHR_DISPLAY_EXTENSION_NAME     "VK_KHR_display"
+
+typedef enum VkDisplayPlaneAlphaFlagBitsKHR {
+    VK_DISPLAY_PLANE_ALPHA_OPAQUE_BIT_KHR = 0x00000001,
+    VK_DISPLAY_PLANE_ALPHA_GLOBAL_BIT_KHR = 0x00000002,
+    VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_BIT_KHR = 0x00000004,
+    VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_PREMULTIPLIED_BIT_KHR = 0x00000008,
+    VK_DISPLAY_PLANE_ALPHA_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkDisplayPlaneAlphaFlagBitsKHR;
+typedef VkFlags VkDisplayPlaneAlphaFlagsKHR;
+typedef VkFlags VkDisplayModeCreateFlagsKHR;
+typedef VkFlags VkDisplaySurfaceCreateFlagsKHR;
+typedef struct VkDisplayPropertiesKHR {
+    VkDisplayKHR                  display;
+    const char*                   displayName;
+    VkExtent2D                    physicalDimensions;
+    VkExtent2D                    physicalResolution;
+    VkSurfaceTransformFlagsKHR    supportedTransforms;
+    VkBool32                      planeReorderPossible;
+    VkBool32                      persistentContent;
+} VkDisplayPropertiesKHR;
+
+typedef struct VkDisplayModeParametersKHR {
+    VkExtent2D    visibleRegion;
+    uint32_t      refreshRate;
+} VkDisplayModeParametersKHR;
+
+typedef struct VkDisplayModePropertiesKHR {
+    VkDisplayModeKHR              displayMode;
+    VkDisplayModeParametersKHR    parameters;
+} VkDisplayModePropertiesKHR;
+
+typedef struct VkDisplayModeCreateInfoKHR {
+    VkStructureType                sType;
+    const void*                    pNext;
+    VkDisplayModeCreateFlagsKHR    flags;
+    VkDisplayModeParametersKHR     parameters;
+} VkDisplayModeCreateInfoKHR;
+
+typedef struct VkDisplayPlaneCapabilitiesKHR {
+    VkDisplayPlaneAlphaFlagsKHR    supportedAlpha;
+    VkOffset2D                     minSrcPosition;
+    VkOffset2D                     maxSrcPosition;
+    VkExtent2D                     minSrcExtent;
+    VkExtent2D                     maxSrcExtent;
+    VkOffset2D                     minDstPosition;
+    VkOffset2D                     maxDstPosition;
+    VkExtent2D                     minDstExtent;
+    VkExtent2D                     maxDstExtent;
+} VkDisplayPlaneCapabilitiesKHR;
+
+typedef struct VkDisplayPlanePropertiesKHR {
+    VkDisplayKHR    currentDisplay;
+    uint32_t        currentStackIndex;
+} VkDisplayPlanePropertiesKHR;
+
+typedef struct VkDisplaySurfaceCreateInfoKHR {
+    VkStructureType                   sType;
+    const void*                       pNext;
+    VkDisplaySurfaceCreateFlagsKHR    flags;
+    VkDisplayModeKHR                  displayMode;
+    uint32_t                          planeIndex;
+    uint32_t                          planeStackIndex;
+    VkSurfaceTransformFlagBitsKHR     transform;
+    float                             globalAlpha;
+    VkDisplayPlaneAlphaFlagBitsKHR    alphaMode;
+    VkExtent2D                        imageExtent;
+} VkDisplaySurfaceCreateInfoKHR;
+
+typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceDisplayPropertiesKHR)(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPropertiesKHR* pProperties);
+typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR)(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPlanePropertiesKHR* pProperties);
+typedef VkResult (VKAPI_PTR *PFN_vkGetDisplayPlaneSupportedDisplaysKHR)(VkPhysicalDevice physicalDevice, uint32_t planeIndex, uint32_t* pDisplayCount, VkDisplayKHR* pDisplays);
+typedef VkResult (VKAPI_PTR *PFN_vkGetDisplayModePropertiesKHR)(VkPhysicalDevice physicalDevice, VkDisplayKHR display, uint32_t* pPropertyCount, VkDisplayModePropertiesKHR* pProperties);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateDisplayModeKHR)(VkPhysicalDevice physicalDevice, VkDisplayKHR display, const VkDisplayModeCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDisplayModeKHR* pMode);
+typedef VkResult (VKAPI_PTR *PFN_vkGetDisplayPlaneCapabilitiesKHR)(VkPhysicalDevice physicalDevice, VkDisplayModeKHR mode, uint32_t planeIndex, VkDisplayPlaneCapabilitiesKHR* pCapabilities);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateDisplayPlaneSurfaceKHR)(VkInstance instance, const VkDisplaySurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceDisplayPropertiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pPropertyCount,
+    VkDisplayPropertiesKHR*                     pProperties);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceDisplayPlanePropertiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pPropertyCount,
+    VkDisplayPlanePropertiesKHR*                pProperties);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetDisplayPlaneSupportedDisplaysKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t                                    planeIndex,
+    uint32_t*                                   pDisplayCount,
+    VkDisplayKHR*                               pDisplays);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetDisplayModePropertiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkDisplayKHR                                display,
+    uint32_t*                                   pPropertyCount,
+    VkDisplayModePropertiesKHR*                 pProperties);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateDisplayModeKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkDisplayKHR                                display,
+    const VkDisplayModeCreateInfoKHR*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDisplayModeKHR*                           pMode);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetDisplayPlaneCapabilitiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkDisplayModeKHR                            mode,
+    uint32_t                                    planeIndex,
+    VkDisplayPlaneCapabilitiesKHR*              pCapabilities);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateDisplayPlaneSurfaceKHR(
+    VkInstance                                  instance,
+    const VkDisplaySurfaceCreateInfoKHR*        pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface);
+#endif
+
+
+#define VK_KHR_display_swapchain 1
+#define VK_KHR_DISPLAY_SWAPCHAIN_SPEC_VERSION 10
+#define VK_KHR_DISPLAY_SWAPCHAIN_EXTENSION_NAME "VK_KHR_display_swapchain"
+typedef struct VkDisplayPresentInfoKHR {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkRect2D           srcRect;
+    VkRect2D           dstRect;
+    VkBool32           persistent;
+} VkDisplayPresentInfoKHR;
+
+typedef VkResult (VKAPI_PTR *PFN_vkCreateSharedSwapchainsKHR)(VkDevice device, uint32_t swapchainCount, const VkSwapchainCreateInfoKHR* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkSwapchainKHR* pSwapchains);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateSharedSwapchainsKHR(
+    VkDevice                                    device,
+    uint32_t                                    swapchainCount,
+    const VkSwapchainCreateInfoKHR*             pCreateInfos,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSwapchainKHR*                             pSwapchains);
+#endif
+
+
+#define VK_KHR_sampler_mirror_clamp_to_edge 1
+#define VK_KHR_SAMPLER_MIRROR_CLAMP_TO_EDGE_SPEC_VERSION 3
+#define VK_KHR_SAMPLER_MIRROR_CLAMP_TO_EDGE_EXTENSION_NAME "VK_KHR_sampler_mirror_clamp_to_edge"
+
+
+#define VK_KHR_multiview 1
+#define VK_KHR_MULTIVIEW_SPEC_VERSION     1
+#define VK_KHR_MULTIVIEW_EXTENSION_NAME   "VK_KHR_multiview"
+typedef VkRenderPassMultiviewCreateInfo VkRenderPassMultiviewCreateInfoKHR;
+
+typedef VkPhysicalDeviceMultiviewFeatures VkPhysicalDeviceMultiviewFeaturesKHR;
+
+typedef VkPhysicalDeviceMultiviewProperties VkPhysicalDeviceMultiviewPropertiesKHR;
+
+
+
+#define VK_KHR_get_physical_device_properties2 1
+#define VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_SPEC_VERSION 2
+#define VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME "VK_KHR_get_physical_device_properties2"
+typedef VkPhysicalDeviceFeatures2 VkPhysicalDeviceFeatures2KHR;
+
+typedef VkPhysicalDeviceProperties2 VkPhysicalDeviceProperties2KHR;
+
+typedef VkFormatProperties2 VkFormatProperties2KHR;
+
+typedef VkImageFormatProperties2 VkImageFormatProperties2KHR;
+
+typedef VkPhysicalDeviceImageFormatInfo2 VkPhysicalDeviceImageFormatInfo2KHR;
+
+typedef VkQueueFamilyProperties2 VkQueueFamilyProperties2KHR;
+
+typedef VkPhysicalDeviceMemoryProperties2 VkPhysicalDeviceMemoryProperties2KHR;
+
+typedef VkSparseImageFormatProperties2 VkSparseImageFormatProperties2KHR;
+
+typedef VkPhysicalDeviceSparseImageFormatInfo2 VkPhysicalDeviceSparseImageFormatInfo2KHR;
+
+typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceFeatures2KHR)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2* pFeatures);
+typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceProperties2KHR)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties2* pProperties);
+typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceFormatProperties2KHR)(VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties2* pFormatProperties);
+typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceImageFormatProperties2KHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo, VkImageFormatProperties2* pImageFormatProperties);
+typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR)(VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties2* pQueueFamilyProperties);
+typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceMemoryProperties2KHR)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties2* pMemoryProperties);
+typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, VkSparseImageFormatProperties2* pProperties);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFeatures2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceFeatures2*                  pFeatures);
+
+VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceProperties2*                pProperties);
+
+VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFormatProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkFormat                                    format,
+    VkFormatProperties2*                        pFormatProperties);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceImageFormatProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceImageFormatInfo2*     pImageFormatInfo,
+    VkImageFormatProperties2*                   pImageFormatProperties);
+
+VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceQueueFamilyProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pQueueFamilyPropertyCount,
+    VkQueueFamilyProperties2*                   pQueueFamilyProperties);
+
+VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceMemoryProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceMemoryProperties2*          pMemoryProperties);
+
+VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceSparseImageFormatProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo,
+    uint32_t*                                   pPropertyCount,
+    VkSparseImageFormatProperties2*             pProperties);
+#endif
+
+
+#define VK_KHR_device_group 1
+#define VK_KHR_DEVICE_GROUP_SPEC_VERSION  4
+#define VK_KHR_DEVICE_GROUP_EXTENSION_NAME "VK_KHR_device_group"
+typedef VkPeerMemoryFeatureFlags VkPeerMemoryFeatureFlagsKHR;
+
+typedef VkPeerMemoryFeatureFlagBits VkPeerMemoryFeatureFlagBitsKHR;
+
+typedef VkMemoryAllocateFlags VkMemoryAllocateFlagsKHR;
+
+typedef VkMemoryAllocateFlagBits VkMemoryAllocateFlagBitsKHR;
+
+typedef VkMemoryAllocateFlagsInfo VkMemoryAllocateFlagsInfoKHR;
+
+typedef VkDeviceGroupRenderPassBeginInfo VkDeviceGroupRenderPassBeginInfoKHR;
+
+typedef VkDeviceGroupCommandBufferBeginInfo VkDeviceGroupCommandBufferBeginInfoKHR;
+
+typedef VkDeviceGroupSubmitInfo VkDeviceGroupSubmitInfoKHR;
+
+typedef VkDeviceGroupBindSparseInfo VkDeviceGroupBindSparseInfoKHR;
+
+typedef VkBindBufferMemoryDeviceGroupInfo VkBindBufferMemoryDeviceGroupInfoKHR;
+
+typedef VkBindImageMemoryDeviceGroupInfo VkBindImageMemoryDeviceGroupInfoKHR;
+
+typedef void (VKAPI_PTR *PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR)(VkDevice device, uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VkPeerMemoryFeatureFlags* pPeerMemoryFeatures);
+typedef void (VKAPI_PTR *PFN_vkCmdSetDeviceMaskKHR)(VkCommandBuffer commandBuffer, uint32_t deviceMask);
+typedef void (VKAPI_PTR *PFN_vkCmdDispatchBaseKHR)(VkCommandBuffer commandBuffer, uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkGetDeviceGroupPeerMemoryFeaturesKHR(
+    VkDevice                                    device,
+    uint32_t                                    heapIndex,
+    uint32_t                                    localDeviceIndex,
+    uint32_t                                    remoteDeviceIndex,
+    VkPeerMemoryFeatureFlags*                   pPeerMemoryFeatures);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetDeviceMaskKHR(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    deviceMask);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdDispatchBaseKHR(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    baseGroupX,
+    uint32_t                                    baseGroupY,
+    uint32_t                                    baseGroupZ,
+    uint32_t                                    groupCountX,
+    uint32_t                                    groupCountY,
+    uint32_t                                    groupCountZ);
+#endif
+
+
+#define VK_KHR_shader_draw_parameters 1
+#define VK_KHR_SHADER_DRAW_PARAMETERS_SPEC_VERSION 1
+#define VK_KHR_SHADER_DRAW_PARAMETERS_EXTENSION_NAME "VK_KHR_shader_draw_parameters"
+
+
+#define VK_KHR_maintenance1 1
+#define VK_KHR_MAINTENANCE1_SPEC_VERSION  2
+#define VK_KHR_MAINTENANCE1_EXTENSION_NAME "VK_KHR_maintenance1"
+typedef VkCommandPoolTrimFlags VkCommandPoolTrimFlagsKHR;
+
+typedef void (VKAPI_PTR *PFN_vkTrimCommandPoolKHR)(VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlags flags);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkTrimCommandPoolKHR(
+    VkDevice                                    device,
+    VkCommandPool                               commandPool,
+    VkCommandPoolTrimFlags                      flags);
+#endif
+
+
+#define VK_KHR_device_group_creation 1
+#define VK_KHR_DEVICE_GROUP_CREATION_SPEC_VERSION 1
+#define VK_KHR_DEVICE_GROUP_CREATION_EXTENSION_NAME "VK_KHR_device_group_creation"
+#define VK_MAX_DEVICE_GROUP_SIZE_KHR      VK_MAX_DEVICE_GROUP_SIZE
+typedef VkPhysicalDeviceGroupProperties VkPhysicalDeviceGroupPropertiesKHR;
+
+typedef VkDeviceGroupDeviceCreateInfo VkDeviceGroupDeviceCreateInfoKHR;
+
+typedef VkResult (VKAPI_PTR *PFN_vkEnumeratePhysicalDeviceGroupsKHR)(VkInstance instance, uint32_t* pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkEnumeratePhysicalDeviceGroupsKHR(
+    VkInstance                                  instance,
+    uint32_t*                                   pPhysicalDeviceGroupCount,
+    VkPhysicalDeviceGroupProperties*            pPhysicalDeviceGroupProperties);
+#endif
+
+
+#define VK_KHR_external_memory_capabilities 1
+#define VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_SPEC_VERSION 1
+#define VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME "VK_KHR_external_memory_capabilities"
+#define VK_LUID_SIZE_KHR                  VK_LUID_SIZE
+typedef VkExternalMemoryHandleTypeFlags VkExternalMemoryHandleTypeFlagsKHR;
+
+typedef VkExternalMemoryHandleTypeFlagBits VkExternalMemoryHandleTypeFlagBitsKHR;
+
+typedef VkExternalMemoryFeatureFlags VkExternalMemoryFeatureFlagsKHR;
+
+typedef VkExternalMemoryFeatureFlagBits VkExternalMemoryFeatureFlagBitsKHR;
+
+typedef VkExternalMemoryProperties VkExternalMemoryPropertiesKHR;
+
+typedef VkPhysicalDeviceExternalImageFormatInfo VkPhysicalDeviceExternalImageFormatInfoKHR;
+
+typedef VkExternalImageFormatProperties VkExternalImageFormatPropertiesKHR;
+
+typedef VkPhysicalDeviceExternalBufferInfo VkPhysicalDeviceExternalBufferInfoKHR;
+
+typedef VkExternalBufferProperties VkExternalBufferPropertiesKHR;
+
+typedef VkPhysicalDeviceIDProperties VkPhysicalDeviceIDPropertiesKHR;
+
+typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo, VkExternalBufferProperties* pExternalBufferProperties);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceExternalBufferPropertiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceExternalBufferInfo*   pExternalBufferInfo,
+    VkExternalBufferProperties*                 pExternalBufferProperties);
+#endif
+
+
+#define VK_KHR_external_memory 1
+#define VK_KHR_EXTERNAL_MEMORY_SPEC_VERSION 1
+#define VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME "VK_KHR_external_memory"
+#define VK_QUEUE_FAMILY_EXTERNAL_KHR      VK_QUEUE_FAMILY_EXTERNAL
+typedef VkExternalMemoryImageCreateInfo VkExternalMemoryImageCreateInfoKHR;
+
+typedef VkExternalMemoryBufferCreateInfo VkExternalMemoryBufferCreateInfoKHR;
+
+typedef VkExportMemoryAllocateInfo VkExportMemoryAllocateInfoKHR;
+
+
+
+#define VK_KHR_external_memory_fd 1
+#define VK_KHR_EXTERNAL_MEMORY_FD_SPEC_VERSION 1
+#define VK_KHR_EXTERNAL_MEMORY_FD_EXTENSION_NAME "VK_KHR_external_memory_fd"
+typedef struct VkImportMemoryFdInfoKHR {
+    VkStructureType                       sType;
+    const void*                           pNext;
+    VkExternalMemoryHandleTypeFlagBits    handleType;
+    int                                   fd;
+} VkImportMemoryFdInfoKHR;
+
+typedef struct VkMemoryFdPropertiesKHR {
+    VkStructureType    sType;
+    void*              pNext;
+    uint32_t           memoryTypeBits;
+} VkMemoryFdPropertiesKHR;
+
+typedef struct VkMemoryGetFdInfoKHR {
+    VkStructureType                       sType;
+    const void*                           pNext;
+    VkDeviceMemory                        memory;
+    VkExternalMemoryHandleTypeFlagBits    handleType;
+} VkMemoryGetFdInfoKHR;
+
+typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryFdKHR)(VkDevice device, const VkMemoryGetFdInfoKHR* pGetFdInfo, int* pFd);
+typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryFdPropertiesKHR)(VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, int fd, VkMemoryFdPropertiesKHR* pMemoryFdProperties);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryFdKHR(
+    VkDevice                                    device,
+    const VkMemoryGetFdInfoKHR*                 pGetFdInfo,
+    int*                                        pFd);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryFdPropertiesKHR(
+    VkDevice                                    device,
+    VkExternalMemoryHandleTypeFlagBits          handleType,
+    int                                         fd,
+    VkMemoryFdPropertiesKHR*                    pMemoryFdProperties);
+#endif
+
+
+#define VK_KHR_external_semaphore_capabilities 1
+#define VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_SPEC_VERSION 1
+#define VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_EXTENSION_NAME "VK_KHR_external_semaphore_capabilities"
+typedef VkExternalSemaphoreHandleTypeFlags VkExternalSemaphoreHandleTypeFlagsKHR;
+
+typedef VkExternalSemaphoreHandleTypeFlagBits VkExternalSemaphoreHandleTypeFlagBitsKHR;
+
+typedef VkExternalSemaphoreFeatureFlags VkExternalSemaphoreFeatureFlagsKHR;
+
+typedef VkExternalSemaphoreFeatureFlagBits VkExternalSemaphoreFeatureFlagBitsKHR;
+
+typedef VkPhysicalDeviceExternalSemaphoreInfo VkPhysicalDeviceExternalSemaphoreInfoKHR;
+
+typedef VkExternalSemaphoreProperties VkExternalSemaphorePropertiesKHR;
+
+typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, VkExternalSemaphoreProperties* pExternalSemaphoreProperties);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceExternalSemaphorePropertiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo,
+    VkExternalSemaphoreProperties*              pExternalSemaphoreProperties);
+#endif
+
+
+#define VK_KHR_external_semaphore 1
+#define VK_KHR_EXTERNAL_SEMAPHORE_SPEC_VERSION 1
+#define VK_KHR_EXTERNAL_SEMAPHORE_EXTENSION_NAME "VK_KHR_external_semaphore"
+typedef VkSemaphoreImportFlags VkSemaphoreImportFlagsKHR;
+
+typedef VkSemaphoreImportFlagBits VkSemaphoreImportFlagBitsKHR;
+
+typedef VkExportSemaphoreCreateInfo VkExportSemaphoreCreateInfoKHR;
+
+
+
+#define VK_KHR_external_semaphore_fd 1
+#define VK_KHR_EXTERNAL_SEMAPHORE_FD_SPEC_VERSION 1
+#define VK_KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME "VK_KHR_external_semaphore_fd"
+typedef struct VkImportSemaphoreFdInfoKHR {
+    VkStructureType                          sType;
+    const void*                              pNext;
+    VkSemaphore                              semaphore;
+    VkSemaphoreImportFlags                   flags;
+    VkExternalSemaphoreHandleTypeFlagBits    handleType;
+    int                                      fd;
+} VkImportSemaphoreFdInfoKHR;
+
+typedef struct VkSemaphoreGetFdInfoKHR {
+    VkStructureType                          sType;
+    const void*                              pNext;
+    VkSemaphore                              semaphore;
+    VkExternalSemaphoreHandleTypeFlagBits    handleType;
+} VkSemaphoreGetFdInfoKHR;
+
+typedef VkResult (VKAPI_PTR *PFN_vkImportSemaphoreFdKHR)(VkDevice device, const VkImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo);
+typedef VkResult (VKAPI_PTR *PFN_vkGetSemaphoreFdKHR)(VkDevice device, const VkSemaphoreGetFdInfoKHR* pGetFdInfo, int* pFd);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkImportSemaphoreFdKHR(
+    VkDevice                                    device,
+    const VkImportSemaphoreFdInfoKHR*           pImportSemaphoreFdInfo);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetSemaphoreFdKHR(
+    VkDevice                                    device,
+    const VkSemaphoreGetFdInfoKHR*              pGetFdInfo,
+    int*                                        pFd);
+#endif
+
+
+#define VK_KHR_push_descriptor 1
+#define VK_KHR_PUSH_DESCRIPTOR_SPEC_VERSION 2
+#define VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME "VK_KHR_push_descriptor"
+typedef struct VkPhysicalDevicePushDescriptorPropertiesKHR {
+    VkStructureType    sType;
+    void*              pNext;
+    uint32_t           maxPushDescriptors;
+} VkPhysicalDevicePushDescriptorPropertiesKHR;
+
+typedef void (VKAPI_PTR *PFN_vkCmdPushDescriptorSetKHR)(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount, const VkWriteDescriptorSet* pDescriptorWrites);
+typedef void (VKAPI_PTR *PFN_vkCmdPushDescriptorSetWithTemplateKHR)(VkCommandBuffer commandBuffer, VkDescriptorUpdateTemplate descriptorUpdateTemplate, VkPipelineLayout layout, uint32_t set, const void* pData);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkCmdPushDescriptorSetKHR(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineBindPoint                         pipelineBindPoint,
+    VkPipelineLayout                            layout,
+    uint32_t                                    set,
+    uint32_t                                    descriptorWriteCount,
+    const VkWriteDescriptorSet*                 pDescriptorWrites);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdPushDescriptorSetWithTemplateKHR(
+    VkCommandBuffer                             commandBuffer,
+    VkDescriptorUpdateTemplate                  descriptorUpdateTemplate,
+    VkPipelineLayout                            layout,
+    uint32_t                                    set,
+    const void*                                 pData);
+#endif
+
+
+#define VK_KHR_shader_float16_int8 1
+#define VK_KHR_SHADER_FLOAT16_INT8_SPEC_VERSION 1
+#define VK_KHR_SHADER_FLOAT16_INT8_EXTENSION_NAME "VK_KHR_shader_float16_int8"
+typedef struct VkPhysicalDeviceShaderFloat16Int8FeaturesKHR {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           shaderFloat16;
+    VkBool32           shaderInt8;
+} VkPhysicalDeviceShaderFloat16Int8FeaturesKHR;
+
+typedef VkPhysicalDeviceShaderFloat16Int8FeaturesKHR VkPhysicalDeviceFloat16Int8FeaturesKHR;
+
+
+
+#define VK_KHR_16bit_storage 1
+#define VK_KHR_16BIT_STORAGE_SPEC_VERSION 1
+#define VK_KHR_16BIT_STORAGE_EXTENSION_NAME "VK_KHR_16bit_storage"
+typedef VkPhysicalDevice16BitStorageFeatures VkPhysicalDevice16BitStorageFeaturesKHR;
+
+
+
+#define VK_KHR_incremental_present 1
+#define VK_KHR_INCREMENTAL_PRESENT_SPEC_VERSION 1
+#define VK_KHR_INCREMENTAL_PRESENT_EXTENSION_NAME "VK_KHR_incremental_present"
+typedef struct VkRectLayerKHR {
+    VkOffset2D    offset;
+    VkExtent2D    extent;
+    uint32_t      layer;
+} VkRectLayerKHR;
+
+typedef struct VkPresentRegionKHR {
+    uint32_t                 rectangleCount;
+    const VkRectLayerKHR*    pRectangles;
+} VkPresentRegionKHR;
+
+typedef struct VkPresentRegionsKHR {
+    VkStructureType              sType;
+    const void*                  pNext;
+    uint32_t                     swapchainCount;
+    const VkPresentRegionKHR*    pRegions;
+} VkPresentRegionsKHR;
+
+
+
+#define VK_KHR_descriptor_update_template 1
+typedef VkDescriptorUpdateTemplate VkDescriptorUpdateTemplateKHR;
+
+#define VK_KHR_DESCRIPTOR_UPDATE_TEMPLATE_SPEC_VERSION 1
+#define VK_KHR_DESCRIPTOR_UPDATE_TEMPLATE_EXTENSION_NAME "VK_KHR_descriptor_update_template"
+typedef VkDescriptorUpdateTemplateType VkDescriptorUpdateTemplateTypeKHR;
+
+typedef VkDescriptorUpdateTemplateCreateFlags VkDescriptorUpdateTemplateCreateFlagsKHR;
+
+typedef VkDescriptorUpdateTemplateEntry VkDescriptorUpdateTemplateEntryKHR;
+
+typedef VkDescriptorUpdateTemplateCreateInfo VkDescriptorUpdateTemplateCreateInfoKHR;
+
+typedef VkResult (VKAPI_PTR *PFN_vkCreateDescriptorUpdateTemplateKHR)(VkDevice device, const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate);
+typedef void (VKAPI_PTR *PFN_vkDestroyDescriptorUpdateTemplateKHR)(VkDevice device, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const VkAllocationCallbacks* pAllocator);
+typedef void (VKAPI_PTR *PFN_vkUpdateDescriptorSetWithTemplateKHR)(VkDevice device, VkDescriptorSet descriptorSet, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateDescriptorUpdateTemplateKHR(
+    VkDevice                                    device,
+    const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDescriptorUpdateTemplate*                 pDescriptorUpdateTemplate);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyDescriptorUpdateTemplateKHR(
+    VkDevice                                    device,
+    VkDescriptorUpdateTemplate                  descriptorUpdateTemplate,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR void VKAPI_CALL vkUpdateDescriptorSetWithTemplateKHR(
+    VkDevice                                    device,
+    VkDescriptorSet                             descriptorSet,
+    VkDescriptorUpdateTemplate                  descriptorUpdateTemplate,
+    const void*                                 pData);
+#endif
+
+
+#define VK_KHR_imageless_framebuffer 1
+#define VK_KHR_IMAGELESS_FRAMEBUFFER_SPEC_VERSION 1
+#define VK_KHR_IMAGELESS_FRAMEBUFFER_EXTENSION_NAME "VK_KHR_imageless_framebuffer"
+typedef struct VkPhysicalDeviceImagelessFramebufferFeaturesKHR {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           imagelessFramebuffer;
+} VkPhysicalDeviceImagelessFramebufferFeaturesKHR;
+
+typedef struct VkFramebufferAttachmentImageInfoKHR {
+    VkStructureType       sType;
+    const void*           pNext;
+    VkImageCreateFlags    flags;
+    VkImageUsageFlags     usage;
+    uint32_t              width;
+    uint32_t              height;
+    uint32_t              layerCount;
+    uint32_t              viewFormatCount;
+    const VkFormat*       pViewFormats;
+} VkFramebufferAttachmentImageInfoKHR;
+
+typedef struct VkFramebufferAttachmentsCreateInfoKHR {
+    VkStructureType                               sType;
+    const void*                                   pNext;
+    uint32_t                                      attachmentImageInfoCount;
+    const VkFramebufferAttachmentImageInfoKHR*    pAttachmentImageInfos;
+} VkFramebufferAttachmentsCreateInfoKHR;
+
+typedef struct VkRenderPassAttachmentBeginInfoKHR {
+    VkStructureType       sType;
+    const void*           pNext;
+    uint32_t              attachmentCount;
+    const VkImageView*    pAttachments;
+} VkRenderPassAttachmentBeginInfoKHR;
+
+
+
+#define VK_KHR_create_renderpass2 1
+#define VK_KHR_CREATE_RENDERPASS_2_SPEC_VERSION 1
+#define VK_KHR_CREATE_RENDERPASS_2_EXTENSION_NAME "VK_KHR_create_renderpass2"
+typedef struct VkAttachmentDescription2KHR {
+    VkStructureType                 sType;
+    const void*                     pNext;
+    VkAttachmentDescriptionFlags    flags;
+    VkFormat                        format;
+    VkSampleCountFlagBits           samples;
+    VkAttachmentLoadOp              loadOp;
+    VkAttachmentStoreOp             storeOp;
+    VkAttachmentLoadOp              stencilLoadOp;
+    VkAttachmentStoreOp             stencilStoreOp;
+    VkImageLayout                   initialLayout;
+    VkImageLayout                   finalLayout;
+} VkAttachmentDescription2KHR;
+
+typedef struct VkAttachmentReference2KHR {
+    VkStructureType       sType;
+    const void*           pNext;
+    uint32_t              attachment;
+    VkImageLayout         layout;
+    VkImageAspectFlags    aspectMask;
+} VkAttachmentReference2KHR;
+
+typedef struct VkSubpassDescription2KHR {
+    VkStructureType                     sType;
+    const void*                         pNext;
+    VkSubpassDescriptionFlags           flags;
+    VkPipelineBindPoint                 pipelineBindPoint;
+    uint32_t                            viewMask;
+    uint32_t                            inputAttachmentCount;
+    const VkAttachmentReference2KHR*    pInputAttachments;
+    uint32_t                            colorAttachmentCount;
+    const VkAttachmentReference2KHR*    pColorAttachments;
+    const VkAttachmentReference2KHR*    pResolveAttachments;
+    const VkAttachmentReference2KHR*    pDepthStencilAttachment;
+    uint32_t                            preserveAttachmentCount;
+    const uint32_t*                     pPreserveAttachments;
+} VkSubpassDescription2KHR;
+
+typedef struct VkSubpassDependency2KHR {
+    VkStructureType         sType;
+    const void*             pNext;
+    uint32_t                srcSubpass;
+    uint32_t                dstSubpass;
+    VkPipelineStageFlags    srcStageMask;
+    VkPipelineStageFlags    dstStageMask;
+    VkAccessFlags           srcAccessMask;
+    VkAccessFlags           dstAccessMask;
+    VkDependencyFlags       dependencyFlags;
+    int32_t                 viewOffset;
+} VkSubpassDependency2KHR;
+
+typedef struct VkRenderPassCreateInfo2KHR {
+    VkStructureType                       sType;
+    const void*                           pNext;
+    VkRenderPassCreateFlags               flags;
+    uint32_t                              attachmentCount;
+    const VkAttachmentDescription2KHR*    pAttachments;
+    uint32_t                              subpassCount;
+    const VkSubpassDescription2KHR*       pSubpasses;
+    uint32_t                              dependencyCount;
+    const VkSubpassDependency2KHR*        pDependencies;
+    uint32_t                              correlatedViewMaskCount;
+    const uint32_t*                       pCorrelatedViewMasks;
+} VkRenderPassCreateInfo2KHR;
+
+typedef struct VkSubpassBeginInfoKHR {
+    VkStructureType      sType;
+    const void*          pNext;
+    VkSubpassContents    contents;
+} VkSubpassBeginInfoKHR;
+
+typedef struct VkSubpassEndInfoKHR {
+    VkStructureType    sType;
+    const void*        pNext;
+} VkSubpassEndInfoKHR;
+
+typedef VkResult (VKAPI_PTR *PFN_vkCreateRenderPass2KHR)(VkDevice device, const VkRenderPassCreateInfo2KHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass);
+typedef void (VKAPI_PTR *PFN_vkCmdBeginRenderPass2KHR)(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo*      pRenderPassBegin, const VkSubpassBeginInfoKHR*      pSubpassBeginInfo);
+typedef void (VKAPI_PTR *PFN_vkCmdNextSubpass2KHR)(VkCommandBuffer commandBuffer, const VkSubpassBeginInfoKHR*      pSubpassBeginInfo, const VkSubpassEndInfoKHR*        pSubpassEndInfo);
+typedef void (VKAPI_PTR *PFN_vkCmdEndRenderPass2KHR)(VkCommandBuffer commandBuffer, const VkSubpassEndInfoKHR*        pSubpassEndInfo);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateRenderPass2KHR(
+    VkDevice                                    device,
+    const VkRenderPassCreateInfo2KHR*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkRenderPass*                               pRenderPass);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdBeginRenderPass2KHR(
+    VkCommandBuffer                             commandBuffer,
+    const VkRenderPassBeginInfo*                pRenderPassBegin,
+    const VkSubpassBeginInfoKHR*                pSubpassBeginInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdNextSubpass2KHR(
+    VkCommandBuffer                             commandBuffer,
+    const VkSubpassBeginInfoKHR*                pSubpassBeginInfo,
+    const VkSubpassEndInfoKHR*                  pSubpassEndInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdEndRenderPass2KHR(
+    VkCommandBuffer                             commandBuffer,
+    const VkSubpassEndInfoKHR*                  pSubpassEndInfo);
+#endif
+
+
+#define VK_KHR_shared_presentable_image 1
+#define VK_KHR_SHARED_PRESENTABLE_IMAGE_SPEC_VERSION 1
+#define VK_KHR_SHARED_PRESENTABLE_IMAGE_EXTENSION_NAME "VK_KHR_shared_presentable_image"
+typedef struct VkSharedPresentSurfaceCapabilitiesKHR {
+    VkStructureType      sType;
+    void*                pNext;
+    VkImageUsageFlags    sharedPresentSupportedUsageFlags;
+} VkSharedPresentSurfaceCapabilitiesKHR;
+
+typedef VkResult (VKAPI_PTR *PFN_vkGetSwapchainStatusKHR)(VkDevice device, VkSwapchainKHR swapchain);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkGetSwapchainStatusKHR(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain);
+#endif
+
+
+#define VK_KHR_external_fence_capabilities 1
+#define VK_KHR_EXTERNAL_FENCE_CAPABILITIES_SPEC_VERSION 1
+#define VK_KHR_EXTERNAL_FENCE_CAPABILITIES_EXTENSION_NAME "VK_KHR_external_fence_capabilities"
+typedef VkExternalFenceHandleTypeFlags VkExternalFenceHandleTypeFlagsKHR;
+
+typedef VkExternalFenceHandleTypeFlagBits VkExternalFenceHandleTypeFlagBitsKHR;
+
+typedef VkExternalFenceFeatureFlags VkExternalFenceFeatureFlagsKHR;
+
+typedef VkExternalFenceFeatureFlagBits VkExternalFenceFeatureFlagBitsKHR;
+
+typedef VkPhysicalDeviceExternalFenceInfo VkPhysicalDeviceExternalFenceInfoKHR;
+
+typedef VkExternalFenceProperties VkExternalFencePropertiesKHR;
+
+typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo, VkExternalFenceProperties* pExternalFenceProperties);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceExternalFencePropertiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceExternalFenceInfo*    pExternalFenceInfo,
+    VkExternalFenceProperties*                  pExternalFenceProperties);
+#endif
+
+
+#define VK_KHR_external_fence 1
+#define VK_KHR_EXTERNAL_FENCE_SPEC_VERSION 1
+#define VK_KHR_EXTERNAL_FENCE_EXTENSION_NAME "VK_KHR_external_fence"
+typedef VkFenceImportFlags VkFenceImportFlagsKHR;
+
+typedef VkFenceImportFlagBits VkFenceImportFlagBitsKHR;
+
+typedef VkExportFenceCreateInfo VkExportFenceCreateInfoKHR;
+
+
+
+#define VK_KHR_external_fence_fd 1
+#define VK_KHR_EXTERNAL_FENCE_FD_SPEC_VERSION 1
+#define VK_KHR_EXTERNAL_FENCE_FD_EXTENSION_NAME "VK_KHR_external_fence_fd"
+typedef struct VkImportFenceFdInfoKHR {
+    VkStructureType                      sType;
+    const void*                          pNext;
+    VkFence                              fence;
+    VkFenceImportFlags                   flags;
+    VkExternalFenceHandleTypeFlagBits    handleType;
+    int                                  fd;
+} VkImportFenceFdInfoKHR;
+
+typedef struct VkFenceGetFdInfoKHR {
+    VkStructureType                      sType;
+    const void*                          pNext;
+    VkFence                              fence;
+    VkExternalFenceHandleTypeFlagBits    handleType;
+} VkFenceGetFdInfoKHR;
+
+typedef VkResult (VKAPI_PTR *PFN_vkImportFenceFdKHR)(VkDevice device, const VkImportFenceFdInfoKHR* pImportFenceFdInfo);
+typedef VkResult (VKAPI_PTR *PFN_vkGetFenceFdKHR)(VkDevice device, const VkFenceGetFdInfoKHR* pGetFdInfo, int* pFd);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkImportFenceFdKHR(
+    VkDevice                                    device,
+    const VkImportFenceFdInfoKHR*               pImportFenceFdInfo);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetFenceFdKHR(
+    VkDevice                                    device,
+    const VkFenceGetFdInfoKHR*                  pGetFdInfo,
+    int*                                        pFd);
+#endif
+
+
+#define VK_KHR_performance_query 1
+#define VK_KHR_PERFORMANCE_QUERY_SPEC_VERSION 1
+#define VK_KHR_PERFORMANCE_QUERY_EXTENSION_NAME "VK_KHR_performance_query"
+
+typedef enum VkPerformanceCounterUnitKHR {
+    VK_PERFORMANCE_COUNTER_UNIT_GENERIC_KHR = 0,
+    VK_PERFORMANCE_COUNTER_UNIT_PERCENTAGE_KHR = 1,
+    VK_PERFORMANCE_COUNTER_UNIT_NANOSECONDS_KHR = 2,
+    VK_PERFORMANCE_COUNTER_UNIT_BYTES_KHR = 3,
+    VK_PERFORMANCE_COUNTER_UNIT_BYTES_PER_SECOND_KHR = 4,
+    VK_PERFORMANCE_COUNTER_UNIT_KELVIN_KHR = 5,
+    VK_PERFORMANCE_COUNTER_UNIT_WATTS_KHR = 6,
+    VK_PERFORMANCE_COUNTER_UNIT_VOLTS_KHR = 7,
+    VK_PERFORMANCE_COUNTER_UNIT_AMPS_KHR = 8,
+    VK_PERFORMANCE_COUNTER_UNIT_HERTZ_KHR = 9,
+    VK_PERFORMANCE_COUNTER_UNIT_CYCLES_KHR = 10,
+    VK_PERFORMANCE_COUNTER_UNIT_BEGIN_RANGE_KHR = VK_PERFORMANCE_COUNTER_UNIT_GENERIC_KHR,
+    VK_PERFORMANCE_COUNTER_UNIT_END_RANGE_KHR = VK_PERFORMANCE_COUNTER_UNIT_CYCLES_KHR,
+    VK_PERFORMANCE_COUNTER_UNIT_RANGE_SIZE_KHR = (VK_PERFORMANCE_COUNTER_UNIT_CYCLES_KHR - VK_PERFORMANCE_COUNTER_UNIT_GENERIC_KHR + 1),
+    VK_PERFORMANCE_COUNTER_UNIT_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkPerformanceCounterUnitKHR;
+
+typedef enum VkPerformanceCounterScopeKHR {
+    VK_QUERY_SCOPE_COMMAND_BUFFER_KHR = 0,
+    VK_QUERY_SCOPE_RENDER_PASS_KHR = 1,
+    VK_QUERY_SCOPE_COMMAND_KHR = 2,
+    VK_PERFORMANCE_COUNTER_SCOPE_BEGIN_RANGE_KHR = VK_QUERY_SCOPE_COMMAND_BUFFER_KHR,
+    VK_PERFORMANCE_COUNTER_SCOPE_END_RANGE_KHR = VK_QUERY_SCOPE_COMMAND_KHR,
+    VK_PERFORMANCE_COUNTER_SCOPE_RANGE_SIZE_KHR = (VK_QUERY_SCOPE_COMMAND_KHR - VK_QUERY_SCOPE_COMMAND_BUFFER_KHR + 1),
+    VK_PERFORMANCE_COUNTER_SCOPE_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkPerformanceCounterScopeKHR;
+
+typedef enum VkPerformanceCounterStorageKHR {
+    VK_PERFORMANCE_COUNTER_STORAGE_INT32_KHR = 0,
+    VK_PERFORMANCE_COUNTER_STORAGE_INT64_KHR = 1,
+    VK_PERFORMANCE_COUNTER_STORAGE_UINT32_KHR = 2,
+    VK_PERFORMANCE_COUNTER_STORAGE_UINT64_KHR = 3,
+    VK_PERFORMANCE_COUNTER_STORAGE_FLOAT32_KHR = 4,
+    VK_PERFORMANCE_COUNTER_STORAGE_FLOAT64_KHR = 5,
+    VK_PERFORMANCE_COUNTER_STORAGE_BEGIN_RANGE_KHR = VK_PERFORMANCE_COUNTER_STORAGE_INT32_KHR,
+    VK_PERFORMANCE_COUNTER_STORAGE_END_RANGE_KHR = VK_PERFORMANCE_COUNTER_STORAGE_FLOAT64_KHR,
+    VK_PERFORMANCE_COUNTER_STORAGE_RANGE_SIZE_KHR = (VK_PERFORMANCE_COUNTER_STORAGE_FLOAT64_KHR - VK_PERFORMANCE_COUNTER_STORAGE_INT32_KHR + 1),
+    VK_PERFORMANCE_COUNTER_STORAGE_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkPerformanceCounterStorageKHR;
+
+typedef enum VkPerformanceCounterDescriptionFlagBitsKHR {
+    VK_PERFORMANCE_COUNTER_DESCRIPTION_PERFORMANCE_IMPACTING_KHR = 0x00000001,
+    VK_PERFORMANCE_COUNTER_DESCRIPTION_CONCURRENTLY_IMPACTED_KHR = 0x00000002,
+    VK_PERFORMANCE_COUNTER_DESCRIPTION_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkPerformanceCounterDescriptionFlagBitsKHR;
+typedef VkFlags VkPerformanceCounterDescriptionFlagsKHR;
+
+typedef enum VkAcquireProfilingLockFlagBitsKHR {
+    VK_ACQUIRE_PROFILING_LOCK_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkAcquireProfilingLockFlagBitsKHR;
+typedef VkFlags VkAcquireProfilingLockFlagsKHR;
+typedef struct VkPhysicalDevicePerformanceQueryFeaturesKHR {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           performanceCounterQueryPools;
+    VkBool32           performanceCounterMultipleQueryPools;
+} VkPhysicalDevicePerformanceQueryFeaturesKHR;
+
+typedef struct VkPhysicalDevicePerformanceQueryPropertiesKHR {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           allowCommandBufferQueryCopies;
+} VkPhysicalDevicePerformanceQueryPropertiesKHR;
+
+typedef struct VkPerformanceCounterKHR {
+    VkStructureType                   sType;
+    const void*                       pNext;
+    VkPerformanceCounterUnitKHR       unit;
+    VkPerformanceCounterScopeKHR      scope;
+    VkPerformanceCounterStorageKHR    storage;
+    uint8_t                           uuid[VK_UUID_SIZE];
+} VkPerformanceCounterKHR;
+
+typedef struct VkPerformanceCounterDescriptionKHR {
+    VkStructureType                            sType;
+    const void*                                pNext;
+    VkPerformanceCounterDescriptionFlagsKHR    flags;
+    char                                       name[VK_MAX_DESCRIPTION_SIZE];
+    char                                       category[VK_MAX_DESCRIPTION_SIZE];
+    char                                       description[VK_MAX_DESCRIPTION_SIZE];
+} VkPerformanceCounterDescriptionKHR;
+
+typedef struct VkQueryPoolPerformanceCreateInfoKHR {
+    VkStructureType    sType;
+    const void*        pNext;
+    uint32_t           queueFamilyIndex;
+    uint32_t           counterIndexCount;
+    const uint32_t*    pCounterIndices;
+} VkQueryPoolPerformanceCreateInfoKHR;
+
+typedef union VkPerformanceCounterResultKHR {
+    int32_t     int32;
+    int64_t     int64;
+    uint32_t    uint32;
+    uint64_t    uint64;
+    float       float32;
+    double      float64;
+} VkPerformanceCounterResultKHR;
+
+typedef struct VkAcquireProfilingLockInfoKHR {
+    VkStructureType                   sType;
+    const void*                       pNext;
+    VkAcquireProfilingLockFlagsKHR    flags;
+    uint64_t                          timeout;
+} VkAcquireProfilingLockInfoKHR;
+
+typedef struct VkPerformanceQuerySubmitInfoKHR {
+    VkStructureType    sType;
+    const void*        pNext;
+    uint32_t           counterPassIndex;
+} VkPerformanceQuerySubmitInfoKHR;
+
+typedef VkResult (VKAPI_PTR *PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR)(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, uint32_t* pCounterCount, VkPerformanceCounterKHR* pCounters, VkPerformanceCounterDescriptionKHR* pCounterDescriptions);
+typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR)(VkPhysicalDevice physicalDevice, const VkQueryPoolPerformanceCreateInfoKHR* pPerformanceQueryCreateInfo, uint32_t* pNumPasses);
+typedef VkResult (VKAPI_PTR *PFN_vkAcquireProfilingLockKHR)(VkDevice device, const VkAcquireProfilingLockInfoKHR* pInfo);
+typedef void (VKAPI_PTR *PFN_vkReleaseProfilingLockKHR)(VkDevice device);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t                                    queueFamilyIndex,
+    uint32_t*                                   pCounterCount,
+    VkPerformanceCounterKHR*                    pCounters,
+    VkPerformanceCounterDescriptionKHR*         pCounterDescriptions);
+
+VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkQueryPoolPerformanceCreateInfoKHR*  pPerformanceQueryCreateInfo,
+    uint32_t*                                   pNumPasses);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkAcquireProfilingLockKHR(
+    VkDevice                                    device,
+    const VkAcquireProfilingLockInfoKHR*        pInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkReleaseProfilingLockKHR(
+    VkDevice                                    device);
+#endif
+
+
+#define VK_KHR_maintenance2 1
+#define VK_KHR_MAINTENANCE2_SPEC_VERSION  1
+#define VK_KHR_MAINTENANCE2_EXTENSION_NAME "VK_KHR_maintenance2"
+typedef VkPointClippingBehavior VkPointClippingBehaviorKHR;
+
+typedef VkTessellationDomainOrigin VkTessellationDomainOriginKHR;
+
+typedef VkPhysicalDevicePointClippingProperties VkPhysicalDevicePointClippingPropertiesKHR;
+
+typedef VkRenderPassInputAttachmentAspectCreateInfo VkRenderPassInputAttachmentAspectCreateInfoKHR;
+
+typedef VkInputAttachmentAspectReference VkInputAttachmentAspectReferenceKHR;
+
+typedef VkImageViewUsageCreateInfo VkImageViewUsageCreateInfoKHR;
+
+typedef VkPipelineTessellationDomainOriginStateCreateInfo VkPipelineTessellationDomainOriginStateCreateInfoKHR;
+
+
+
+#define VK_KHR_get_surface_capabilities2 1
+#define VK_KHR_GET_SURFACE_CAPABILITIES_2_SPEC_VERSION 1
+#define VK_KHR_GET_SURFACE_CAPABILITIES_2_EXTENSION_NAME "VK_KHR_get_surface_capabilities2"
+typedef struct VkPhysicalDeviceSurfaceInfo2KHR {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkSurfaceKHR       surface;
+} VkPhysicalDeviceSurfaceInfo2KHR;
+
+typedef struct VkSurfaceCapabilities2KHR {
+    VkStructureType             sType;
+    void*                       pNext;
+    VkSurfaceCapabilitiesKHR    surfaceCapabilities;
+} VkSurfaceCapabilities2KHR;
+
+typedef struct VkSurfaceFormat2KHR {
+    VkStructureType       sType;
+    void*                 pNext;
+    VkSurfaceFormatKHR    surfaceFormat;
+} VkSurfaceFormat2KHR;
+
+typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, VkSurfaceCapabilities2KHR* pSurfaceCapabilities);
+typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfaceFormats2KHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pSurfaceFormatCount, VkSurfaceFormat2KHR* pSurfaceFormats);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceCapabilities2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceSurfaceInfo2KHR*      pSurfaceInfo,
+    VkSurfaceCapabilities2KHR*                  pSurfaceCapabilities);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceFormats2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceSurfaceInfo2KHR*      pSurfaceInfo,
+    uint32_t*                                   pSurfaceFormatCount,
+    VkSurfaceFormat2KHR*                        pSurfaceFormats);
+#endif
+
+
+#define VK_KHR_variable_pointers 1
+#define VK_KHR_VARIABLE_POINTERS_SPEC_VERSION 1
+#define VK_KHR_VARIABLE_POINTERS_EXTENSION_NAME "VK_KHR_variable_pointers"
+typedef VkPhysicalDeviceVariablePointersFeatures VkPhysicalDeviceVariablePointerFeaturesKHR;
+
+typedef VkPhysicalDeviceVariablePointersFeatures VkPhysicalDeviceVariablePointersFeaturesKHR;
+
+
+
+#define VK_KHR_get_display_properties2 1
+#define VK_KHR_GET_DISPLAY_PROPERTIES_2_SPEC_VERSION 1
+#define VK_KHR_GET_DISPLAY_PROPERTIES_2_EXTENSION_NAME "VK_KHR_get_display_properties2"
+typedef struct VkDisplayProperties2KHR {
+    VkStructureType           sType;
+    void*                     pNext;
+    VkDisplayPropertiesKHR    displayProperties;
+} VkDisplayProperties2KHR;
+
+typedef struct VkDisplayPlaneProperties2KHR {
+    VkStructureType                sType;
+    void*                          pNext;
+    VkDisplayPlanePropertiesKHR    displayPlaneProperties;
+} VkDisplayPlaneProperties2KHR;
+
+typedef struct VkDisplayModeProperties2KHR {
+    VkStructureType               sType;
+    void*                         pNext;
+    VkDisplayModePropertiesKHR    displayModeProperties;
+} VkDisplayModeProperties2KHR;
+
+typedef struct VkDisplayPlaneInfo2KHR {
+    VkStructureType     sType;
+    const void*         pNext;
+    VkDisplayModeKHR    mode;
+    uint32_t            planeIndex;
+} VkDisplayPlaneInfo2KHR;
+
+typedef struct VkDisplayPlaneCapabilities2KHR {
+    VkStructureType                  sType;
+    void*                            pNext;
+    VkDisplayPlaneCapabilitiesKHR    capabilities;
+} VkDisplayPlaneCapabilities2KHR;
+
+typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceDisplayProperties2KHR)(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayProperties2KHR* pProperties);
+typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR)(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPlaneProperties2KHR* pProperties);
+typedef VkResult (VKAPI_PTR *PFN_vkGetDisplayModeProperties2KHR)(VkPhysicalDevice physicalDevice, VkDisplayKHR display, uint32_t* pPropertyCount, VkDisplayModeProperties2KHR* pProperties);
+typedef VkResult (VKAPI_PTR *PFN_vkGetDisplayPlaneCapabilities2KHR)(VkPhysicalDevice physicalDevice, const VkDisplayPlaneInfo2KHR* pDisplayPlaneInfo, VkDisplayPlaneCapabilities2KHR* pCapabilities);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceDisplayProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pPropertyCount,
+    VkDisplayProperties2KHR*                    pProperties);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceDisplayPlaneProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pPropertyCount,
+    VkDisplayPlaneProperties2KHR*               pProperties);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetDisplayModeProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkDisplayKHR                                display,
+    uint32_t*                                   pPropertyCount,
+    VkDisplayModeProperties2KHR*                pProperties);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetDisplayPlaneCapabilities2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkDisplayPlaneInfo2KHR*               pDisplayPlaneInfo,
+    VkDisplayPlaneCapabilities2KHR*             pCapabilities);
+#endif
+
+
+#define VK_KHR_dedicated_allocation 1
+#define VK_KHR_DEDICATED_ALLOCATION_SPEC_VERSION 3
+#define VK_KHR_DEDICATED_ALLOCATION_EXTENSION_NAME "VK_KHR_dedicated_allocation"
+typedef VkMemoryDedicatedRequirements VkMemoryDedicatedRequirementsKHR;
+
+typedef VkMemoryDedicatedAllocateInfo VkMemoryDedicatedAllocateInfoKHR;
+
+
+
+#define VK_KHR_storage_buffer_storage_class 1
+#define VK_KHR_STORAGE_BUFFER_STORAGE_CLASS_SPEC_VERSION 1
+#define VK_KHR_STORAGE_BUFFER_STORAGE_CLASS_EXTENSION_NAME "VK_KHR_storage_buffer_storage_class"
+
+
+#define VK_KHR_relaxed_block_layout 1
+#define VK_KHR_RELAXED_BLOCK_LAYOUT_SPEC_VERSION 1
+#define VK_KHR_RELAXED_BLOCK_LAYOUT_EXTENSION_NAME "VK_KHR_relaxed_block_layout"
+
+
+#define VK_KHR_get_memory_requirements2 1
+#define VK_KHR_GET_MEMORY_REQUIREMENTS_2_SPEC_VERSION 1
+#define VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME "VK_KHR_get_memory_requirements2"
+typedef VkBufferMemoryRequirementsInfo2 VkBufferMemoryRequirementsInfo2KHR;
+
+typedef VkImageMemoryRequirementsInfo2 VkImageMemoryRequirementsInfo2KHR;
+
+typedef VkImageSparseMemoryRequirementsInfo2 VkImageSparseMemoryRequirementsInfo2KHR;
+
+typedef VkSparseImageMemoryRequirements2 VkSparseImageMemoryRequirements2KHR;
+
+typedef void (VKAPI_PTR *PFN_vkGetImageMemoryRequirements2KHR)(VkDevice device, const VkImageMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements);
+typedef void (VKAPI_PTR *PFN_vkGetBufferMemoryRequirements2KHR)(VkDevice device, const VkBufferMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements);
+typedef void (VKAPI_PTR *PFN_vkGetImageSparseMemoryRequirements2KHR)(VkDevice device, const VkImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2* pSparseMemoryRequirements);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkGetImageMemoryRequirements2KHR(
+    VkDevice                                    device,
+    const VkImageMemoryRequirementsInfo2*       pInfo,
+    VkMemoryRequirements2*                      pMemoryRequirements);
+
+VKAPI_ATTR void VKAPI_CALL vkGetBufferMemoryRequirements2KHR(
+    VkDevice                                    device,
+    const VkBufferMemoryRequirementsInfo2*      pInfo,
+    VkMemoryRequirements2*                      pMemoryRequirements);
+
+VKAPI_ATTR void VKAPI_CALL vkGetImageSparseMemoryRequirements2KHR(
+    VkDevice                                    device,
+    const VkImageSparseMemoryRequirementsInfo2* pInfo,
+    uint32_t*                                   pSparseMemoryRequirementCount,
+    VkSparseImageMemoryRequirements2*           pSparseMemoryRequirements);
+#endif
+
+
+#define VK_KHR_image_format_list 1
+#define VK_KHR_IMAGE_FORMAT_LIST_SPEC_VERSION 1
+#define VK_KHR_IMAGE_FORMAT_LIST_EXTENSION_NAME "VK_KHR_image_format_list"
+typedef struct VkImageFormatListCreateInfoKHR {
+    VkStructureType    sType;
+    const void*        pNext;
+    uint32_t           viewFormatCount;
+    const VkFormat*    pViewFormats;
+} VkImageFormatListCreateInfoKHR;
+
+
+
+#define VK_KHR_sampler_ycbcr_conversion 1
+typedef VkSamplerYcbcrConversion VkSamplerYcbcrConversionKHR;
+
+#define VK_KHR_SAMPLER_YCBCR_CONVERSION_SPEC_VERSION 14
+#define VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME "VK_KHR_sampler_ycbcr_conversion"
+typedef VkSamplerYcbcrModelConversion VkSamplerYcbcrModelConversionKHR;
+
+typedef VkSamplerYcbcrRange VkSamplerYcbcrRangeKHR;
+
+typedef VkChromaLocation VkChromaLocationKHR;
+
+typedef VkSamplerYcbcrConversionCreateInfo VkSamplerYcbcrConversionCreateInfoKHR;
+
+typedef VkSamplerYcbcrConversionInfo VkSamplerYcbcrConversionInfoKHR;
+
+typedef VkBindImagePlaneMemoryInfo VkBindImagePlaneMemoryInfoKHR;
+
+typedef VkImagePlaneMemoryRequirementsInfo VkImagePlaneMemoryRequirementsInfoKHR;
+
+typedef VkPhysicalDeviceSamplerYcbcrConversionFeatures VkPhysicalDeviceSamplerYcbcrConversionFeaturesKHR;
+
+typedef VkSamplerYcbcrConversionImageFormatProperties VkSamplerYcbcrConversionImageFormatPropertiesKHR;
+
+typedef VkResult (VKAPI_PTR *PFN_vkCreateSamplerYcbcrConversionKHR)(VkDevice device, const VkSamplerYcbcrConversionCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSamplerYcbcrConversion* pYcbcrConversion);
+typedef void (VKAPI_PTR *PFN_vkDestroySamplerYcbcrConversionKHR)(VkDevice device, VkSamplerYcbcrConversion ycbcrConversion, const VkAllocationCallbacks* pAllocator);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateSamplerYcbcrConversionKHR(
+    VkDevice                                    device,
+    const VkSamplerYcbcrConversionCreateInfo*   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSamplerYcbcrConversion*                   pYcbcrConversion);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroySamplerYcbcrConversionKHR(
+    VkDevice                                    device,
+    VkSamplerYcbcrConversion                    ycbcrConversion,
+    const VkAllocationCallbacks*                pAllocator);
+#endif
+
+
+#define VK_KHR_bind_memory2 1
+#define VK_KHR_BIND_MEMORY_2_SPEC_VERSION 1
+#define VK_KHR_BIND_MEMORY_2_EXTENSION_NAME "VK_KHR_bind_memory2"
+typedef VkBindBufferMemoryInfo VkBindBufferMemoryInfoKHR;
+
+typedef VkBindImageMemoryInfo VkBindImageMemoryInfoKHR;
+
+typedef VkResult (VKAPI_PTR *PFN_vkBindBufferMemory2KHR)(VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo* pBindInfos);
+typedef VkResult (VKAPI_PTR *PFN_vkBindImageMemory2KHR)(VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo* pBindInfos);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkBindBufferMemory2KHR(
+    VkDevice                                    device,
+    uint32_t                                    bindInfoCount,
+    const VkBindBufferMemoryInfo*               pBindInfos);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkBindImageMemory2KHR(
+    VkDevice                                    device,
+    uint32_t                                    bindInfoCount,
+    const VkBindImageMemoryInfo*                pBindInfos);
+#endif
+
+
+#define VK_KHR_maintenance3 1
+#define VK_KHR_MAINTENANCE3_SPEC_VERSION  1
+#define VK_KHR_MAINTENANCE3_EXTENSION_NAME "VK_KHR_maintenance3"
+typedef VkPhysicalDeviceMaintenance3Properties VkPhysicalDeviceMaintenance3PropertiesKHR;
+
+typedef VkDescriptorSetLayoutSupport VkDescriptorSetLayoutSupportKHR;
+
+typedef void (VKAPI_PTR *PFN_vkGetDescriptorSetLayoutSupportKHR)(VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, VkDescriptorSetLayoutSupport* pSupport);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkGetDescriptorSetLayoutSupportKHR(
+    VkDevice                                    device,
+    const VkDescriptorSetLayoutCreateInfo*      pCreateInfo,
+    VkDescriptorSetLayoutSupport*               pSupport);
+#endif
+
+
+#define VK_KHR_draw_indirect_count 1
+#define VK_KHR_DRAW_INDIRECT_COUNT_SPEC_VERSION 1
+#define VK_KHR_DRAW_INDIRECT_COUNT_EXTENSION_NAME "VK_KHR_draw_indirect_count"
+typedef void (VKAPI_PTR *PFN_vkCmdDrawIndirectCountKHR)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride);
+typedef void (VKAPI_PTR *PFN_vkCmdDrawIndexedIndirectCountKHR)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndirectCountKHR(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkBuffer                                    countBuffer,
+    VkDeviceSize                                countBufferOffset,
+    uint32_t                                    maxDrawCount,
+    uint32_t                                    stride);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndexedIndirectCountKHR(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkBuffer                                    countBuffer,
+    VkDeviceSize                                countBufferOffset,
+    uint32_t                                    maxDrawCount,
+    uint32_t                                    stride);
+#endif
+
+
+#define VK_KHR_shader_subgroup_extended_types 1
+#define VK_KHR_SHADER_SUBGROUP_EXTENDED_TYPES_SPEC_VERSION 1
+#define VK_KHR_SHADER_SUBGROUP_EXTENDED_TYPES_EXTENSION_NAME "VK_KHR_shader_subgroup_extended_types"
+typedef struct VkPhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           shaderSubgroupExtendedTypes;
+} VkPhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR;
+
+
+
+#define VK_KHR_8bit_storage 1
+#define VK_KHR_8BIT_STORAGE_SPEC_VERSION  1
+#define VK_KHR_8BIT_STORAGE_EXTENSION_NAME "VK_KHR_8bit_storage"
+typedef struct VkPhysicalDevice8BitStorageFeaturesKHR {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           storageBuffer8BitAccess;
+    VkBool32           uniformAndStorageBuffer8BitAccess;
+    VkBool32           storagePushConstant8;
+} VkPhysicalDevice8BitStorageFeaturesKHR;
+
+
+
+#define VK_KHR_shader_atomic_int64 1
+#define VK_KHR_SHADER_ATOMIC_INT64_SPEC_VERSION 1
+#define VK_KHR_SHADER_ATOMIC_INT64_EXTENSION_NAME "VK_KHR_shader_atomic_int64"
+typedef struct VkPhysicalDeviceShaderAtomicInt64FeaturesKHR {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           shaderBufferInt64Atomics;
+    VkBool32           shaderSharedInt64Atomics;
+} VkPhysicalDeviceShaderAtomicInt64FeaturesKHR;
+
+
+
+#define VK_KHR_shader_clock 1
+#define VK_KHR_SHADER_CLOCK_SPEC_VERSION  1
+#define VK_KHR_SHADER_CLOCK_EXTENSION_NAME "VK_KHR_shader_clock"
+typedef struct VkPhysicalDeviceShaderClockFeaturesKHR {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           shaderSubgroupClock;
+    VkBool32           shaderDeviceClock;
+} VkPhysicalDeviceShaderClockFeaturesKHR;
+
+
+
+#define VK_KHR_driver_properties 1
+#define VK_MAX_DRIVER_NAME_SIZE_KHR       256
+#define VK_MAX_DRIVER_INFO_SIZE_KHR       256
+#define VK_KHR_DRIVER_PROPERTIES_SPEC_VERSION 1
+#define VK_KHR_DRIVER_PROPERTIES_EXTENSION_NAME "VK_KHR_driver_properties"
+
+typedef enum VkDriverIdKHR {
+    VK_DRIVER_ID_AMD_PROPRIETARY_KHR = 1,
+    VK_DRIVER_ID_AMD_OPEN_SOURCE_KHR = 2,
+    VK_DRIVER_ID_MESA_RADV_KHR = 3,
+    VK_DRIVER_ID_NVIDIA_PROPRIETARY_KHR = 4,
+    VK_DRIVER_ID_INTEL_PROPRIETARY_WINDOWS_KHR = 5,
+    VK_DRIVER_ID_INTEL_OPEN_SOURCE_MESA_KHR = 6,
+    VK_DRIVER_ID_IMAGINATION_PROPRIETARY_KHR = 7,
+    VK_DRIVER_ID_QUALCOMM_PROPRIETARY_KHR = 8,
+    VK_DRIVER_ID_ARM_PROPRIETARY_KHR = 9,
+    VK_DRIVER_ID_GOOGLE_SWIFTSHADER_KHR = 10,
+    VK_DRIVER_ID_GGP_PROPRIETARY_KHR = 11,
+    VK_DRIVER_ID_BROADCOM_PROPRIETARY_KHR = 12,
+    VK_DRIVER_ID_BEGIN_RANGE_KHR = VK_DRIVER_ID_AMD_PROPRIETARY_KHR,
+    VK_DRIVER_ID_END_RANGE_KHR = VK_DRIVER_ID_BROADCOM_PROPRIETARY_KHR,
+    VK_DRIVER_ID_RANGE_SIZE_KHR = (VK_DRIVER_ID_BROADCOM_PROPRIETARY_KHR - VK_DRIVER_ID_AMD_PROPRIETARY_KHR + 1),
+    VK_DRIVER_ID_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkDriverIdKHR;
+typedef struct VkConformanceVersionKHR {
+    uint8_t    major;
+    uint8_t    minor;
+    uint8_t    subminor;
+    uint8_t    patch;
+} VkConformanceVersionKHR;
+
+typedef struct VkPhysicalDeviceDriverPropertiesKHR {
+    VkStructureType            sType;
+    void*                      pNext;
+    VkDriverIdKHR              driverID;
+    char                       driverName[VK_MAX_DRIVER_NAME_SIZE_KHR];
+    char                       driverInfo[VK_MAX_DRIVER_INFO_SIZE_KHR];
+    VkConformanceVersionKHR    conformanceVersion;
+} VkPhysicalDeviceDriverPropertiesKHR;
+
+
+
+#define VK_KHR_shader_float_controls 1
+#define VK_KHR_SHADER_FLOAT_CONTROLS_SPEC_VERSION 4
+#define VK_KHR_SHADER_FLOAT_CONTROLS_EXTENSION_NAME "VK_KHR_shader_float_controls"
+
+typedef enum VkShaderFloatControlsIndependenceKHR {
+    VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_32_BIT_ONLY_KHR = 0,
+    VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_ALL_KHR = 1,
+    VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_NONE_KHR = 2,
+    VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_BEGIN_RANGE_KHR = VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_32_BIT_ONLY_KHR,
+    VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_END_RANGE_KHR = VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_NONE_KHR,
+    VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_RANGE_SIZE_KHR = (VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_NONE_KHR - VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_32_BIT_ONLY_KHR + 1),
+    VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkShaderFloatControlsIndependenceKHR;
+typedef struct VkPhysicalDeviceFloatControlsPropertiesKHR {
+    VkStructureType                         sType;
+    void*                                   pNext;
+    VkShaderFloatControlsIndependenceKHR    denormBehaviorIndependence;
+    VkShaderFloatControlsIndependenceKHR    roundingModeIndependence;
+    VkBool32                                shaderSignedZeroInfNanPreserveFloat16;
+    VkBool32                                shaderSignedZeroInfNanPreserveFloat32;
+    VkBool32                                shaderSignedZeroInfNanPreserveFloat64;
+    VkBool32                                shaderDenormPreserveFloat16;
+    VkBool32                                shaderDenormPreserveFloat32;
+    VkBool32                                shaderDenormPreserveFloat64;
+    VkBool32                                shaderDenormFlushToZeroFloat16;
+    VkBool32                                shaderDenormFlushToZeroFloat32;
+    VkBool32                                shaderDenormFlushToZeroFloat64;
+    VkBool32                                shaderRoundingModeRTEFloat16;
+    VkBool32                                shaderRoundingModeRTEFloat32;
+    VkBool32                                shaderRoundingModeRTEFloat64;
+    VkBool32                                shaderRoundingModeRTZFloat16;
+    VkBool32                                shaderRoundingModeRTZFloat32;
+    VkBool32                                shaderRoundingModeRTZFloat64;
+} VkPhysicalDeviceFloatControlsPropertiesKHR;
+
+
+
+#define VK_KHR_depth_stencil_resolve 1
+#define VK_KHR_DEPTH_STENCIL_RESOLVE_SPEC_VERSION 1
+#define VK_KHR_DEPTH_STENCIL_RESOLVE_EXTENSION_NAME "VK_KHR_depth_stencil_resolve"
+
+typedef enum VkResolveModeFlagBitsKHR {
+    VK_RESOLVE_MODE_NONE_KHR = 0,
+    VK_RESOLVE_MODE_SAMPLE_ZERO_BIT_KHR = 0x00000001,
+    VK_RESOLVE_MODE_AVERAGE_BIT_KHR = 0x00000002,
+    VK_RESOLVE_MODE_MIN_BIT_KHR = 0x00000004,
+    VK_RESOLVE_MODE_MAX_BIT_KHR = 0x00000008,
+    VK_RESOLVE_MODE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkResolveModeFlagBitsKHR;
+typedef VkFlags VkResolveModeFlagsKHR;
+typedef struct VkSubpassDescriptionDepthStencilResolveKHR {
+    VkStructureType                     sType;
+    const void*                         pNext;
+    VkResolveModeFlagBitsKHR            depthResolveMode;
+    VkResolveModeFlagBitsKHR            stencilResolveMode;
+    const VkAttachmentReference2KHR*    pDepthStencilResolveAttachment;
+} VkSubpassDescriptionDepthStencilResolveKHR;
+
+typedef struct VkPhysicalDeviceDepthStencilResolvePropertiesKHR {
+    VkStructureType          sType;
+    void*                    pNext;
+    VkResolveModeFlagsKHR    supportedDepthResolveModes;
+    VkResolveModeFlagsKHR    supportedStencilResolveModes;
+    VkBool32                 independentResolveNone;
+    VkBool32                 independentResolve;
+} VkPhysicalDeviceDepthStencilResolvePropertiesKHR;
+
+
+
+#define VK_KHR_swapchain_mutable_format 1
+#define VK_KHR_SWAPCHAIN_MUTABLE_FORMAT_SPEC_VERSION 1
+#define VK_KHR_SWAPCHAIN_MUTABLE_FORMAT_EXTENSION_NAME "VK_KHR_swapchain_mutable_format"
+
+
+#define VK_KHR_timeline_semaphore 1
+#define VK_KHR_TIMELINE_SEMAPHORE_SPEC_VERSION 2
+#define VK_KHR_TIMELINE_SEMAPHORE_EXTENSION_NAME "VK_KHR_timeline_semaphore"
+
+typedef enum VkSemaphoreTypeKHR {
+    VK_SEMAPHORE_TYPE_BINARY_KHR = 0,
+    VK_SEMAPHORE_TYPE_TIMELINE_KHR = 1,
+    VK_SEMAPHORE_TYPE_BEGIN_RANGE_KHR = VK_SEMAPHORE_TYPE_BINARY_KHR,
+    VK_SEMAPHORE_TYPE_END_RANGE_KHR = VK_SEMAPHORE_TYPE_TIMELINE_KHR,
+    VK_SEMAPHORE_TYPE_RANGE_SIZE_KHR = (VK_SEMAPHORE_TYPE_TIMELINE_KHR - VK_SEMAPHORE_TYPE_BINARY_KHR + 1),
+    VK_SEMAPHORE_TYPE_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkSemaphoreTypeKHR;
+
+typedef enum VkSemaphoreWaitFlagBitsKHR {
+    VK_SEMAPHORE_WAIT_ANY_BIT_KHR = 0x00000001,
+    VK_SEMAPHORE_WAIT_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkSemaphoreWaitFlagBitsKHR;
+typedef VkFlags VkSemaphoreWaitFlagsKHR;
+typedef struct VkPhysicalDeviceTimelineSemaphoreFeaturesKHR {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           timelineSemaphore;
+} VkPhysicalDeviceTimelineSemaphoreFeaturesKHR;
+
+typedef struct VkPhysicalDeviceTimelineSemaphorePropertiesKHR {
+    VkStructureType    sType;
+    void*              pNext;
+    uint64_t           maxTimelineSemaphoreValueDifference;
+} VkPhysicalDeviceTimelineSemaphorePropertiesKHR;
+
+typedef struct VkSemaphoreTypeCreateInfoKHR {
+    VkStructureType       sType;
+    const void*           pNext;
+    VkSemaphoreTypeKHR    semaphoreType;
+    uint64_t              initialValue;
+} VkSemaphoreTypeCreateInfoKHR;
+
+typedef struct VkTimelineSemaphoreSubmitInfoKHR {
+    VkStructureType    sType;
+    const void*        pNext;
+    uint32_t           waitSemaphoreValueCount;
+    const uint64_t*    pWaitSemaphoreValues;
+    uint32_t           signalSemaphoreValueCount;
+    const uint64_t*    pSignalSemaphoreValues;
+} VkTimelineSemaphoreSubmitInfoKHR;
+
+typedef struct VkSemaphoreWaitInfoKHR {
+    VkStructureType            sType;
+    const void*                pNext;
+    VkSemaphoreWaitFlagsKHR    flags;
+    uint32_t                   semaphoreCount;
+    const VkSemaphore*         pSemaphores;
+    const uint64_t*            pValues;
+} VkSemaphoreWaitInfoKHR;
+
+typedef struct VkSemaphoreSignalInfoKHR {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkSemaphore        semaphore;
+    uint64_t           value;
+} VkSemaphoreSignalInfoKHR;
+
+typedef VkResult (VKAPI_PTR *PFN_vkGetSemaphoreCounterValueKHR)(VkDevice device, VkSemaphore semaphore, uint64_t* pValue);
+typedef VkResult (VKAPI_PTR *PFN_vkWaitSemaphoresKHR)(VkDevice device, const VkSemaphoreWaitInfoKHR* pWaitInfo, uint64_t timeout);
+typedef VkResult (VKAPI_PTR *PFN_vkSignalSemaphoreKHR)(VkDevice device, const VkSemaphoreSignalInfoKHR* pSignalInfo);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkGetSemaphoreCounterValueKHR(
+    VkDevice                                    device,
+    VkSemaphore                                 semaphore,
+    uint64_t*                                   pValue);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkWaitSemaphoresKHR(
+    VkDevice                                    device,
+    const VkSemaphoreWaitInfoKHR*               pWaitInfo,
+    uint64_t                                    timeout);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkSignalSemaphoreKHR(
+    VkDevice                                    device,
+    const VkSemaphoreSignalInfoKHR*             pSignalInfo);
+#endif
+
+
+#define VK_KHR_vulkan_memory_model 1
+#define VK_KHR_VULKAN_MEMORY_MODEL_SPEC_VERSION 3
+#define VK_KHR_VULKAN_MEMORY_MODEL_EXTENSION_NAME "VK_KHR_vulkan_memory_model"
+typedef struct VkPhysicalDeviceVulkanMemoryModelFeaturesKHR {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           vulkanMemoryModel;
+    VkBool32           vulkanMemoryModelDeviceScope;
+    VkBool32           vulkanMemoryModelAvailabilityVisibilityChains;
+} VkPhysicalDeviceVulkanMemoryModelFeaturesKHR;
+
+
+
+#define VK_KHR_spirv_1_4 1
+#define VK_KHR_SPIRV_1_4_SPEC_VERSION     1
+#define VK_KHR_SPIRV_1_4_EXTENSION_NAME   "VK_KHR_spirv_1_4"
+
+
+#define VK_KHR_surface_protected_capabilities 1
+#define VK_KHR_SURFACE_PROTECTED_CAPABILITIES_SPEC_VERSION 1
+#define VK_KHR_SURFACE_PROTECTED_CAPABILITIES_EXTENSION_NAME "VK_KHR_surface_protected_capabilities"
+typedef struct VkSurfaceProtectedCapabilitiesKHR {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkBool32           supportsProtected;
+} VkSurfaceProtectedCapabilitiesKHR;
+
+
+
+#define VK_KHR_separate_depth_stencil_layouts 1
+#define VK_KHR_SEPARATE_DEPTH_STENCIL_LAYOUTS_SPEC_VERSION 1
+#define VK_KHR_SEPARATE_DEPTH_STENCIL_LAYOUTS_EXTENSION_NAME "VK_KHR_separate_depth_stencil_layouts"
+typedef struct VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           separateDepthStencilLayouts;
+} VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR;
+
+typedef struct VkAttachmentReferenceStencilLayoutKHR {
+    VkStructureType    sType;
+    void*              pNext;
+    VkImageLayout      stencilLayout;
+} VkAttachmentReferenceStencilLayoutKHR;
+
+typedef struct VkAttachmentDescriptionStencilLayoutKHR {
+    VkStructureType    sType;
+    void*              pNext;
+    VkImageLayout      stencilInitialLayout;
+    VkImageLayout      stencilFinalLayout;
+} VkAttachmentDescriptionStencilLayoutKHR;
+
+
+
+#define VK_KHR_uniform_buffer_standard_layout 1
+#define VK_KHR_UNIFORM_BUFFER_STANDARD_LAYOUT_SPEC_VERSION 1
+#define VK_KHR_UNIFORM_BUFFER_STANDARD_LAYOUT_EXTENSION_NAME "VK_KHR_uniform_buffer_standard_layout"
+typedef struct VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           uniformBufferStandardLayout;
+} VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR;
+
+
+
+#define VK_KHR_buffer_device_address 1
+typedef uint64_t VkDeviceAddress;
+#define VK_KHR_BUFFER_DEVICE_ADDRESS_SPEC_VERSION 1
+#define VK_KHR_BUFFER_DEVICE_ADDRESS_EXTENSION_NAME "VK_KHR_buffer_device_address"
+typedef struct VkPhysicalDeviceBufferDeviceAddressFeaturesKHR {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           bufferDeviceAddress;
+    VkBool32           bufferDeviceAddressCaptureReplay;
+    VkBool32           bufferDeviceAddressMultiDevice;
+} VkPhysicalDeviceBufferDeviceAddressFeaturesKHR;
+
+typedef struct VkBufferDeviceAddressInfoKHR {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkBuffer           buffer;
+} VkBufferDeviceAddressInfoKHR;
+
+typedef struct VkBufferOpaqueCaptureAddressCreateInfoKHR {
+    VkStructureType    sType;
+    const void*        pNext;
+    uint64_t           opaqueCaptureAddress;
+} VkBufferOpaqueCaptureAddressCreateInfoKHR;
+
+typedef struct VkMemoryOpaqueCaptureAddressAllocateInfoKHR {
+    VkStructureType    sType;
+    const void*        pNext;
+    uint64_t           opaqueCaptureAddress;
+} VkMemoryOpaqueCaptureAddressAllocateInfoKHR;
+
+typedef struct VkDeviceMemoryOpaqueCaptureAddressInfoKHR {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkDeviceMemory     memory;
+} VkDeviceMemoryOpaqueCaptureAddressInfoKHR;
+
+typedef VkDeviceAddress (VKAPI_PTR *PFN_vkGetBufferDeviceAddressKHR)(VkDevice device, const VkBufferDeviceAddressInfoKHR* pInfo);
+typedef uint64_t (VKAPI_PTR *PFN_vkGetBufferOpaqueCaptureAddressKHR)(VkDevice device, const VkBufferDeviceAddressInfoKHR* pInfo);
+typedef uint64_t (VKAPI_PTR *PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR)(VkDevice device, const VkDeviceMemoryOpaqueCaptureAddressInfoKHR* pInfo);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkDeviceAddress VKAPI_CALL vkGetBufferDeviceAddressKHR(
+    VkDevice                                    device,
+    const VkBufferDeviceAddressInfoKHR*         pInfo);
+
+VKAPI_ATTR uint64_t VKAPI_CALL vkGetBufferOpaqueCaptureAddressKHR(
+    VkDevice                                    device,
+    const VkBufferDeviceAddressInfoKHR*         pInfo);
+
+VKAPI_ATTR uint64_t VKAPI_CALL vkGetDeviceMemoryOpaqueCaptureAddressKHR(
+    VkDevice                                    device,
+    const VkDeviceMemoryOpaqueCaptureAddressInfoKHR* pInfo);
+#endif
+
+
+#define VK_KHR_pipeline_executable_properties 1
+#define VK_KHR_PIPELINE_EXECUTABLE_PROPERTIES_SPEC_VERSION 1
+#define VK_KHR_PIPELINE_EXECUTABLE_PROPERTIES_EXTENSION_NAME "VK_KHR_pipeline_executable_properties"
+
+typedef enum VkPipelineExecutableStatisticFormatKHR {
+    VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_BOOL32_KHR = 0,
+    VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_INT64_KHR = 1,
+    VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_UINT64_KHR = 2,
+    VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_FLOAT64_KHR = 3,
+    VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_BEGIN_RANGE_KHR = VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_BOOL32_KHR,
+    VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_END_RANGE_KHR = VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_FLOAT64_KHR,
+    VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_RANGE_SIZE_KHR = (VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_FLOAT64_KHR - VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_BOOL32_KHR + 1),
+    VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkPipelineExecutableStatisticFormatKHR;
+typedef struct VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           pipelineExecutableInfo;
+} VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR;
+
+typedef struct VkPipelineInfoKHR {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkPipeline         pipeline;
+} VkPipelineInfoKHR;
+
+typedef struct VkPipelineExecutablePropertiesKHR {
+    VkStructureType       sType;
+    void*                 pNext;
+    VkShaderStageFlags    stages;
+    char                  name[VK_MAX_DESCRIPTION_SIZE];
+    char                  description[VK_MAX_DESCRIPTION_SIZE];
+    uint32_t              subgroupSize;
+} VkPipelineExecutablePropertiesKHR;
+
+typedef struct VkPipelineExecutableInfoKHR {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkPipeline         pipeline;
+    uint32_t           executableIndex;
+} VkPipelineExecutableInfoKHR;
+
+typedef union VkPipelineExecutableStatisticValueKHR {
+    VkBool32    b32;
+    int64_t     i64;
+    uint64_t    u64;
+    double      f64;
+} VkPipelineExecutableStatisticValueKHR;
+
+typedef struct VkPipelineExecutableStatisticKHR {
+    VkStructureType                           sType;
+    void*                                     pNext;
+    char                                      name[VK_MAX_DESCRIPTION_SIZE];
+    char                                      description[VK_MAX_DESCRIPTION_SIZE];
+    VkPipelineExecutableStatisticFormatKHR    format;
+    VkPipelineExecutableStatisticValueKHR     value;
+} VkPipelineExecutableStatisticKHR;
+
+typedef struct VkPipelineExecutableInternalRepresentationKHR {
+    VkStructureType    sType;
+    void*              pNext;
+    char               name[VK_MAX_DESCRIPTION_SIZE];
+    char               description[VK_MAX_DESCRIPTION_SIZE];
+    VkBool32           isText;
+    size_t             dataSize;
+    void*              pData;
+} VkPipelineExecutableInternalRepresentationKHR;
+
+typedef VkResult (VKAPI_PTR *PFN_vkGetPipelineExecutablePropertiesKHR)(VkDevice                        device, const VkPipelineInfoKHR*        pPipelineInfo, uint32_t* pExecutableCount, VkPipelineExecutablePropertiesKHR* pProperties);
+typedef VkResult (VKAPI_PTR *PFN_vkGetPipelineExecutableStatisticsKHR)(VkDevice                        device, const VkPipelineExecutableInfoKHR*  pExecutableInfo, uint32_t* pStatisticCount, VkPipelineExecutableStatisticKHR* pStatistics);
+typedef VkResult (VKAPI_PTR *PFN_vkGetPipelineExecutableInternalRepresentationsKHR)(VkDevice                        device, const VkPipelineExecutableInfoKHR*  pExecutableInfo, uint32_t* pInternalRepresentationCount, VkPipelineExecutableInternalRepresentationKHR* pInternalRepresentations);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPipelineExecutablePropertiesKHR(
+    VkDevice                                    device,
+    const VkPipelineInfoKHR*                    pPipelineInfo,
+    uint32_t*                                   pExecutableCount,
+    VkPipelineExecutablePropertiesKHR*          pProperties);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPipelineExecutableStatisticsKHR(
+    VkDevice                                    device,
+    const VkPipelineExecutableInfoKHR*          pExecutableInfo,
+    uint32_t*                                   pStatisticCount,
+    VkPipelineExecutableStatisticKHR*           pStatistics);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPipelineExecutableInternalRepresentationsKHR(
+    VkDevice                                    device,
+    const VkPipelineExecutableInfoKHR*          pExecutableInfo,
+    uint32_t*                                   pInternalRepresentationCount,
+    VkPipelineExecutableInternalRepresentationKHR* pInternalRepresentations);
+#endif
+
+
+#define VK_EXT_debug_report 1
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDebugReportCallbackEXT)
+#define VK_EXT_DEBUG_REPORT_SPEC_VERSION  9
+#define VK_EXT_DEBUG_REPORT_EXTENSION_NAME "VK_EXT_debug_report"
+
+typedef enum VkDebugReportObjectTypeEXT {
+    VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT = 0,
+    VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT = 1,
+    VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT = 2,
+    VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT = 3,
+    VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT = 4,
+    VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT = 5,
+    VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT = 6,
+    VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT = 7,
+    VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT = 8,
+    VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT = 9,
+    VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT = 10,
+    VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT = 11,
+    VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT = 12,
+    VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT = 13,
+    VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT = 14,
+    VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT = 15,
+    VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT = 16,
+    VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT = 17,
+    VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT = 18,
+    VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT = 19,
+    VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT = 20,
+    VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT = 21,
+    VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT = 22,
+    VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT = 23,
+    VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT = 24,
+    VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT = 25,
+    VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT = 26,
+    VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT = 27,
+    VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT_EXT = 28,
+    VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_KHR_EXT = 29,
+    VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_MODE_KHR_EXT = 30,
+    VK_DEBUG_REPORT_OBJECT_TYPE_OBJECT_TABLE_NVX_EXT = 31,
+    VK_DEBUG_REPORT_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX_EXT = 32,
+    VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT_EXT = 33,
+    VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT = 1000156000,
+    VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_EXT = 1000085000,
+    VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV_EXT = 1000165000,
+    VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_EXT = VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT_EXT,
+    VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT = VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT_EXT,
+    VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_KHR_EXT = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_EXT,
+    VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR_EXT = VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT,
+    VK_DEBUG_REPORT_OBJECT_TYPE_BEGIN_RANGE_EXT = VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT,
+    VK_DEBUG_REPORT_OBJECT_TYPE_END_RANGE_EXT = VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT_EXT,
+    VK_DEBUG_REPORT_OBJECT_TYPE_RANGE_SIZE_EXT = (VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT_EXT - VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT + 1),
+    VK_DEBUG_REPORT_OBJECT_TYPE_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkDebugReportObjectTypeEXT;
+
+typedef enum VkDebugReportFlagBitsEXT {
+    VK_DEBUG_REPORT_INFORMATION_BIT_EXT = 0x00000001,
+    VK_DEBUG_REPORT_WARNING_BIT_EXT = 0x00000002,
+    VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT = 0x00000004,
+    VK_DEBUG_REPORT_ERROR_BIT_EXT = 0x00000008,
+    VK_DEBUG_REPORT_DEBUG_BIT_EXT = 0x00000010,
+    VK_DEBUG_REPORT_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkDebugReportFlagBitsEXT;
+typedef VkFlags VkDebugReportFlagsEXT;
+typedef VkBool32 (VKAPI_PTR *PFN_vkDebugReportCallbackEXT)(
+    VkDebugReportFlagsEXT                       flags,
+    VkDebugReportObjectTypeEXT                  objectType,
+    uint64_t                                    object,
+    size_t                                      location,
+    int32_t                                     messageCode,
+    const char*                                 pLayerPrefix,
+    const char*                                 pMessage,
+    void*                                       pUserData);
+
+typedef struct VkDebugReportCallbackCreateInfoEXT {
+    VkStructureType                 sType;
+    const void*                     pNext;
+    VkDebugReportFlagsEXT           flags;
+    PFN_vkDebugReportCallbackEXT    pfnCallback;
+    void*                           pUserData;
+} VkDebugReportCallbackCreateInfoEXT;
+
+typedef VkResult (VKAPI_PTR *PFN_vkCreateDebugReportCallbackEXT)(VkInstance instance, const VkDebugReportCallbackCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDebugReportCallbackEXT* pCallback);
+typedef void (VKAPI_PTR *PFN_vkDestroyDebugReportCallbackEXT)(VkInstance instance, VkDebugReportCallbackEXT callback, const VkAllocationCallbacks* pAllocator);
+typedef void (VKAPI_PTR *PFN_vkDebugReportMessageEXT)(VkInstance instance, VkDebugReportFlagsEXT flags, VkDebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const char* pLayerPrefix, const char* pMessage);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateDebugReportCallbackEXT(
+    VkInstance                                  instance,
+    const VkDebugReportCallbackCreateInfoEXT*   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDebugReportCallbackEXT*                   pCallback);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyDebugReportCallbackEXT(
+    VkInstance                                  instance,
+    VkDebugReportCallbackEXT                    callback,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR void VKAPI_CALL vkDebugReportMessageEXT(
+    VkInstance                                  instance,
+    VkDebugReportFlagsEXT                       flags,
+    VkDebugReportObjectTypeEXT                  objectType,
+    uint64_t                                    object,
+    size_t                                      location,
+    int32_t                                     messageCode,
+    const char*                                 pLayerPrefix,
+    const char*                                 pMessage);
+#endif
+
+
+#define VK_NV_glsl_shader 1
+#define VK_NV_GLSL_SHADER_SPEC_VERSION    1
+#define VK_NV_GLSL_SHADER_EXTENSION_NAME  "VK_NV_glsl_shader"
+
+
+#define VK_EXT_depth_range_unrestricted 1
+#define VK_EXT_DEPTH_RANGE_UNRESTRICTED_SPEC_VERSION 1
+#define VK_EXT_DEPTH_RANGE_UNRESTRICTED_EXTENSION_NAME "VK_EXT_depth_range_unrestricted"
+
+
+#define VK_IMG_filter_cubic 1
+#define VK_IMG_FILTER_CUBIC_SPEC_VERSION  1
+#define VK_IMG_FILTER_CUBIC_EXTENSION_NAME "VK_IMG_filter_cubic"
+
+
+#define VK_AMD_rasterization_order 1
+#define VK_AMD_RASTERIZATION_ORDER_SPEC_VERSION 1
+#define VK_AMD_RASTERIZATION_ORDER_EXTENSION_NAME "VK_AMD_rasterization_order"
+
+typedef enum VkRasterizationOrderAMD {
+    VK_RASTERIZATION_ORDER_STRICT_AMD = 0,
+    VK_RASTERIZATION_ORDER_RELAXED_AMD = 1,
+    VK_RASTERIZATION_ORDER_BEGIN_RANGE_AMD = VK_RASTERIZATION_ORDER_STRICT_AMD,
+    VK_RASTERIZATION_ORDER_END_RANGE_AMD = VK_RASTERIZATION_ORDER_RELAXED_AMD,
+    VK_RASTERIZATION_ORDER_RANGE_SIZE_AMD = (VK_RASTERIZATION_ORDER_RELAXED_AMD - VK_RASTERIZATION_ORDER_STRICT_AMD + 1),
+    VK_RASTERIZATION_ORDER_MAX_ENUM_AMD = 0x7FFFFFFF
+} VkRasterizationOrderAMD;
+typedef struct VkPipelineRasterizationStateRasterizationOrderAMD {
+    VkStructureType            sType;
+    const void*                pNext;
+    VkRasterizationOrderAMD    rasterizationOrder;
+} VkPipelineRasterizationStateRasterizationOrderAMD;
+
+
+
+#define VK_AMD_shader_trinary_minmax 1
+#define VK_AMD_SHADER_TRINARY_MINMAX_SPEC_VERSION 1
+#define VK_AMD_SHADER_TRINARY_MINMAX_EXTENSION_NAME "VK_AMD_shader_trinary_minmax"
+
+
+#define VK_AMD_shader_explicit_vertex_parameter 1
+#define VK_AMD_SHADER_EXPLICIT_VERTEX_PARAMETER_SPEC_VERSION 1
+#define VK_AMD_SHADER_EXPLICIT_VERTEX_PARAMETER_EXTENSION_NAME "VK_AMD_shader_explicit_vertex_parameter"
+
+
+#define VK_EXT_debug_marker 1
+#define VK_EXT_DEBUG_MARKER_SPEC_VERSION  4
+#define VK_EXT_DEBUG_MARKER_EXTENSION_NAME "VK_EXT_debug_marker"
+typedef struct VkDebugMarkerObjectNameInfoEXT {
+    VkStructureType               sType;
+    const void*                   pNext;
+    VkDebugReportObjectTypeEXT    objectType;
+    uint64_t                      object;
+    const char*                   pObjectName;
+} VkDebugMarkerObjectNameInfoEXT;
+
+typedef struct VkDebugMarkerObjectTagInfoEXT {
+    VkStructureType               sType;
+    const void*                   pNext;
+    VkDebugReportObjectTypeEXT    objectType;
+    uint64_t                      object;
+    uint64_t                      tagName;
+    size_t                        tagSize;
+    const void*                   pTag;
+} VkDebugMarkerObjectTagInfoEXT;
+
+typedef struct VkDebugMarkerMarkerInfoEXT {
+    VkStructureType    sType;
+    const void*        pNext;
+    const char*        pMarkerName;
+    float              color[4];
+} VkDebugMarkerMarkerInfoEXT;
+
+typedef VkResult (VKAPI_PTR *PFN_vkDebugMarkerSetObjectTagEXT)(VkDevice device, const VkDebugMarkerObjectTagInfoEXT* pTagInfo);
+typedef VkResult (VKAPI_PTR *PFN_vkDebugMarkerSetObjectNameEXT)(VkDevice device, const VkDebugMarkerObjectNameInfoEXT* pNameInfo);
+typedef void (VKAPI_PTR *PFN_vkCmdDebugMarkerBeginEXT)(VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT* pMarkerInfo);
+typedef void (VKAPI_PTR *PFN_vkCmdDebugMarkerEndEXT)(VkCommandBuffer commandBuffer);
+typedef void (VKAPI_PTR *PFN_vkCmdDebugMarkerInsertEXT)(VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT* pMarkerInfo);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkDebugMarkerSetObjectTagEXT(
+    VkDevice                                    device,
+    const VkDebugMarkerObjectTagInfoEXT*        pTagInfo);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkDebugMarkerSetObjectNameEXT(
+    VkDevice                                    device,
+    const VkDebugMarkerObjectNameInfoEXT*       pNameInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdDebugMarkerBeginEXT(
+    VkCommandBuffer                             commandBuffer,
+    const VkDebugMarkerMarkerInfoEXT*           pMarkerInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdDebugMarkerEndEXT(
+    VkCommandBuffer                             commandBuffer);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdDebugMarkerInsertEXT(
+    VkCommandBuffer                             commandBuffer,
+    const VkDebugMarkerMarkerInfoEXT*           pMarkerInfo);
+#endif
+
+
+#define VK_AMD_gcn_shader 1
+#define VK_AMD_GCN_SHADER_SPEC_VERSION    1
+#define VK_AMD_GCN_SHADER_EXTENSION_NAME  "VK_AMD_gcn_shader"
+
+
+#define VK_NV_dedicated_allocation 1
+#define VK_NV_DEDICATED_ALLOCATION_SPEC_VERSION 1
+#define VK_NV_DEDICATED_ALLOCATION_EXTENSION_NAME "VK_NV_dedicated_allocation"
+typedef struct VkDedicatedAllocationImageCreateInfoNV {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkBool32           dedicatedAllocation;
+} VkDedicatedAllocationImageCreateInfoNV;
+
+typedef struct VkDedicatedAllocationBufferCreateInfoNV {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkBool32           dedicatedAllocation;
+} VkDedicatedAllocationBufferCreateInfoNV;
+
+typedef struct VkDedicatedAllocationMemoryAllocateInfoNV {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkImage            image;
+    VkBuffer           buffer;
+} VkDedicatedAllocationMemoryAllocateInfoNV;
+
+
+
+#define VK_EXT_transform_feedback 1
+#define VK_EXT_TRANSFORM_FEEDBACK_SPEC_VERSION 1
+#define VK_EXT_TRANSFORM_FEEDBACK_EXTENSION_NAME "VK_EXT_transform_feedback"
+typedef VkFlags VkPipelineRasterizationStateStreamCreateFlagsEXT;
+typedef struct VkPhysicalDeviceTransformFeedbackFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           transformFeedback;
+    VkBool32           geometryStreams;
+} VkPhysicalDeviceTransformFeedbackFeaturesEXT;
+
+typedef struct VkPhysicalDeviceTransformFeedbackPropertiesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    uint32_t           maxTransformFeedbackStreams;
+    uint32_t           maxTransformFeedbackBuffers;
+    VkDeviceSize       maxTransformFeedbackBufferSize;
+    uint32_t           maxTransformFeedbackStreamDataSize;
+    uint32_t           maxTransformFeedbackBufferDataSize;
+    uint32_t           maxTransformFeedbackBufferDataStride;
+    VkBool32           transformFeedbackQueries;
+    VkBool32           transformFeedbackStreamsLinesTriangles;
+    VkBool32           transformFeedbackRasterizationStreamSelect;
+    VkBool32           transformFeedbackDraw;
+} VkPhysicalDeviceTransformFeedbackPropertiesEXT;
+
+typedef struct VkPipelineRasterizationStateStreamCreateInfoEXT {
+    VkStructureType                                     sType;
+    const void*                                         pNext;
+    VkPipelineRasterizationStateStreamCreateFlagsEXT    flags;
+    uint32_t                                            rasterizationStream;
+} VkPipelineRasterizationStateStreamCreateInfoEXT;
+
+typedef void (VKAPI_PTR *PFN_vkCmdBindTransformFeedbackBuffersEXT)(VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets, const VkDeviceSize* pSizes);
+typedef void (VKAPI_PTR *PFN_vkCmdBeginTransformFeedbackEXT)(VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VkBuffer* pCounterBuffers, const VkDeviceSize* pCounterBufferOffsets);
+typedef void (VKAPI_PTR *PFN_vkCmdEndTransformFeedbackEXT)(VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VkBuffer* pCounterBuffers, const VkDeviceSize* pCounterBufferOffsets);
+typedef void (VKAPI_PTR *PFN_vkCmdBeginQueryIndexedEXT)(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, VkQueryControlFlags flags, uint32_t index);
+typedef void (VKAPI_PTR *PFN_vkCmdEndQueryIndexedEXT)(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, uint32_t index);
+typedef void (VKAPI_PTR *PFN_vkCmdDrawIndirectByteCountEXT)(VkCommandBuffer commandBuffer, uint32_t instanceCount, uint32_t firstInstance, VkBuffer counterBuffer, VkDeviceSize counterBufferOffset, uint32_t counterOffset, uint32_t vertexStride);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkCmdBindTransformFeedbackBuffersEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstBinding,
+    uint32_t                                    bindingCount,
+    const VkBuffer*                             pBuffers,
+    const VkDeviceSize*                         pOffsets,
+    const VkDeviceSize*                         pSizes);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdBeginTransformFeedbackEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstCounterBuffer,
+    uint32_t                                    counterBufferCount,
+    const VkBuffer*                             pCounterBuffers,
+    const VkDeviceSize*                         pCounterBufferOffsets);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdEndTransformFeedbackEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstCounterBuffer,
+    uint32_t                                    counterBufferCount,
+    const VkBuffer*                             pCounterBuffers,
+    const VkDeviceSize*                         pCounterBufferOffsets);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdBeginQueryIndexedEXT(
+    VkCommandBuffer                             commandBuffer,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    query,
+    VkQueryControlFlags                         flags,
+    uint32_t                                    index);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdEndQueryIndexedEXT(
+    VkCommandBuffer                             commandBuffer,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    query,
+    uint32_t                                    index);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndirectByteCountEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    instanceCount,
+    uint32_t                                    firstInstance,
+    VkBuffer                                    counterBuffer,
+    VkDeviceSize                                counterBufferOffset,
+    uint32_t                                    counterOffset,
+    uint32_t                                    vertexStride);
+#endif
+
+
+#define VK_NVX_image_view_handle 1
+#define VK_NVX_IMAGE_VIEW_HANDLE_SPEC_VERSION 1
+#define VK_NVX_IMAGE_VIEW_HANDLE_EXTENSION_NAME "VK_NVX_image_view_handle"
+typedef struct VkImageViewHandleInfoNVX {
+    VkStructureType     sType;
+    const void*         pNext;
+    VkImageView         imageView;
+    VkDescriptorType    descriptorType;
+    VkSampler           sampler;
+} VkImageViewHandleInfoNVX;
+
+typedef uint32_t (VKAPI_PTR *PFN_vkGetImageViewHandleNVX)(VkDevice device, const VkImageViewHandleInfoNVX* pInfo);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR uint32_t VKAPI_CALL vkGetImageViewHandleNVX(
+    VkDevice                                    device,
+    const VkImageViewHandleInfoNVX*             pInfo);
+#endif
+
+
+#define VK_AMD_draw_indirect_count 1
+#define VK_AMD_DRAW_INDIRECT_COUNT_SPEC_VERSION 2
+#define VK_AMD_DRAW_INDIRECT_COUNT_EXTENSION_NAME "VK_AMD_draw_indirect_count"
+typedef void (VKAPI_PTR *PFN_vkCmdDrawIndirectCountAMD)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride);
+typedef void (VKAPI_PTR *PFN_vkCmdDrawIndexedIndirectCountAMD)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndirectCountAMD(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkBuffer                                    countBuffer,
+    VkDeviceSize                                countBufferOffset,
+    uint32_t                                    maxDrawCount,
+    uint32_t                                    stride);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndexedIndirectCountAMD(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkBuffer                                    countBuffer,
+    VkDeviceSize                                countBufferOffset,
+    uint32_t                                    maxDrawCount,
+    uint32_t                                    stride);
+#endif
+
+
+#define VK_AMD_negative_viewport_height 1
+#define VK_AMD_NEGATIVE_VIEWPORT_HEIGHT_SPEC_VERSION 1
+#define VK_AMD_NEGATIVE_VIEWPORT_HEIGHT_EXTENSION_NAME "VK_AMD_negative_viewport_height"
+
+
+#define VK_AMD_gpu_shader_half_float 1
+#define VK_AMD_GPU_SHADER_HALF_FLOAT_SPEC_VERSION 2
+#define VK_AMD_GPU_SHADER_HALF_FLOAT_EXTENSION_NAME "VK_AMD_gpu_shader_half_float"
+
+
+#define VK_AMD_shader_ballot 1
+#define VK_AMD_SHADER_BALLOT_SPEC_VERSION 1
+#define VK_AMD_SHADER_BALLOT_EXTENSION_NAME "VK_AMD_shader_ballot"
+
+
+#define VK_AMD_texture_gather_bias_lod 1
+#define VK_AMD_TEXTURE_GATHER_BIAS_LOD_SPEC_VERSION 1
+#define VK_AMD_TEXTURE_GATHER_BIAS_LOD_EXTENSION_NAME "VK_AMD_texture_gather_bias_lod"
+typedef struct VkTextureLODGatherFormatPropertiesAMD {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           supportsTextureGatherLODBiasAMD;
+} VkTextureLODGatherFormatPropertiesAMD;
+
+
+
+#define VK_AMD_shader_info 1
+#define VK_AMD_SHADER_INFO_SPEC_VERSION   1
+#define VK_AMD_SHADER_INFO_EXTENSION_NAME "VK_AMD_shader_info"
+
+typedef enum VkShaderInfoTypeAMD {
+    VK_SHADER_INFO_TYPE_STATISTICS_AMD = 0,
+    VK_SHADER_INFO_TYPE_BINARY_AMD = 1,
+    VK_SHADER_INFO_TYPE_DISASSEMBLY_AMD = 2,
+    VK_SHADER_INFO_TYPE_BEGIN_RANGE_AMD = VK_SHADER_INFO_TYPE_STATISTICS_AMD,
+    VK_SHADER_INFO_TYPE_END_RANGE_AMD = VK_SHADER_INFO_TYPE_DISASSEMBLY_AMD,
+    VK_SHADER_INFO_TYPE_RANGE_SIZE_AMD = (VK_SHADER_INFO_TYPE_DISASSEMBLY_AMD - VK_SHADER_INFO_TYPE_STATISTICS_AMD + 1),
+    VK_SHADER_INFO_TYPE_MAX_ENUM_AMD = 0x7FFFFFFF
+} VkShaderInfoTypeAMD;
+typedef struct VkShaderResourceUsageAMD {
+    uint32_t    numUsedVgprs;
+    uint32_t    numUsedSgprs;
+    uint32_t    ldsSizePerLocalWorkGroup;
+    size_t      ldsUsageSizeInBytes;
+    size_t      scratchMemUsageInBytes;
+} VkShaderResourceUsageAMD;
+
+typedef struct VkShaderStatisticsInfoAMD {
+    VkShaderStageFlags          shaderStageMask;
+    VkShaderResourceUsageAMD    resourceUsage;
+    uint32_t                    numPhysicalVgprs;
+    uint32_t                    numPhysicalSgprs;
+    uint32_t                    numAvailableVgprs;
+    uint32_t                    numAvailableSgprs;
+    uint32_t                    computeWorkGroupSize[3];
+} VkShaderStatisticsInfoAMD;
+
+typedef VkResult (VKAPI_PTR *PFN_vkGetShaderInfoAMD)(VkDevice device, VkPipeline pipeline, VkShaderStageFlagBits shaderStage, VkShaderInfoTypeAMD infoType, size_t* pInfoSize, void* pInfo);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkGetShaderInfoAMD(
+    VkDevice                                    device,
+    VkPipeline                                  pipeline,
+    VkShaderStageFlagBits                       shaderStage,
+    VkShaderInfoTypeAMD                         infoType,
+    size_t*                                     pInfoSize,
+    void*                                       pInfo);
+#endif
+
+
+#define VK_AMD_shader_image_load_store_lod 1
+#define VK_AMD_SHADER_IMAGE_LOAD_STORE_LOD_SPEC_VERSION 1
+#define VK_AMD_SHADER_IMAGE_LOAD_STORE_LOD_EXTENSION_NAME "VK_AMD_shader_image_load_store_lod"
+
+
+#define VK_NV_corner_sampled_image 1
+#define VK_NV_CORNER_SAMPLED_IMAGE_SPEC_VERSION 2
+#define VK_NV_CORNER_SAMPLED_IMAGE_EXTENSION_NAME "VK_NV_corner_sampled_image"
+typedef struct VkPhysicalDeviceCornerSampledImageFeaturesNV {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           cornerSampledImage;
+} VkPhysicalDeviceCornerSampledImageFeaturesNV;
+
+
+
+#define VK_IMG_format_pvrtc 1
+#define VK_IMG_FORMAT_PVRTC_SPEC_VERSION  1
+#define VK_IMG_FORMAT_PVRTC_EXTENSION_NAME "VK_IMG_format_pvrtc"
+
+
+#define VK_NV_external_memory_capabilities 1
+#define VK_NV_EXTERNAL_MEMORY_CAPABILITIES_SPEC_VERSION 1
+#define VK_NV_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME "VK_NV_external_memory_capabilities"
+
+typedef enum VkExternalMemoryHandleTypeFlagBitsNV {
+    VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT_NV = 0x00000001,
+    VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_NV = 0x00000002,
+    VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_BIT_NV = 0x00000004,
+    VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_KMT_BIT_NV = 0x00000008,
+    VK_EXTERNAL_MEMORY_HANDLE_TYPE_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF
+} VkExternalMemoryHandleTypeFlagBitsNV;
+typedef VkFlags VkExternalMemoryHandleTypeFlagsNV;
+
+typedef enum VkExternalMemoryFeatureFlagBitsNV {
+    VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT_NV = 0x00000001,
+    VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT_NV = 0x00000002,
+    VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT_NV = 0x00000004,
+    VK_EXTERNAL_MEMORY_FEATURE_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF
+} VkExternalMemoryFeatureFlagBitsNV;
+typedef VkFlags VkExternalMemoryFeatureFlagsNV;
+typedef struct VkExternalImageFormatPropertiesNV {
+    VkImageFormatProperties              imageFormatProperties;
+    VkExternalMemoryFeatureFlagsNV       externalMemoryFeatures;
+    VkExternalMemoryHandleTypeFlagsNV    exportFromImportedHandleTypes;
+    VkExternalMemoryHandleTypeFlagsNV    compatibleHandleTypes;
+} VkExternalImageFormatPropertiesNV;
+
+typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV)(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkExternalMemoryHandleTypeFlagsNV externalHandleType, VkExternalImageFormatPropertiesNV* pExternalImageFormatProperties);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceExternalImageFormatPropertiesNV(
+    VkPhysicalDevice                            physicalDevice,
+    VkFormat                                    format,
+    VkImageType                                 type,
+    VkImageTiling                               tiling,
+    VkImageUsageFlags                           usage,
+    VkImageCreateFlags                          flags,
+    VkExternalMemoryHandleTypeFlagsNV           externalHandleType,
+    VkExternalImageFormatPropertiesNV*          pExternalImageFormatProperties);
+#endif
+
+
+#define VK_NV_external_memory 1
+#define VK_NV_EXTERNAL_MEMORY_SPEC_VERSION 1
+#define VK_NV_EXTERNAL_MEMORY_EXTENSION_NAME "VK_NV_external_memory"
+typedef struct VkExternalMemoryImageCreateInfoNV {
+    VkStructureType                      sType;
+    const void*                          pNext;
+    VkExternalMemoryHandleTypeFlagsNV    handleTypes;
+} VkExternalMemoryImageCreateInfoNV;
+
+typedef struct VkExportMemoryAllocateInfoNV {
+    VkStructureType                      sType;
+    const void*                          pNext;
+    VkExternalMemoryHandleTypeFlagsNV    handleTypes;
+} VkExportMemoryAllocateInfoNV;
+
+
+
+#define VK_EXT_validation_flags 1
+#define VK_EXT_VALIDATION_FLAGS_SPEC_VERSION 2
+#define VK_EXT_VALIDATION_FLAGS_EXTENSION_NAME "VK_EXT_validation_flags"
+
+typedef enum VkValidationCheckEXT {
+    VK_VALIDATION_CHECK_ALL_EXT = 0,
+    VK_VALIDATION_CHECK_SHADERS_EXT = 1,
+    VK_VALIDATION_CHECK_BEGIN_RANGE_EXT = VK_VALIDATION_CHECK_ALL_EXT,
+    VK_VALIDATION_CHECK_END_RANGE_EXT = VK_VALIDATION_CHECK_SHADERS_EXT,
+    VK_VALIDATION_CHECK_RANGE_SIZE_EXT = (VK_VALIDATION_CHECK_SHADERS_EXT - VK_VALIDATION_CHECK_ALL_EXT + 1),
+    VK_VALIDATION_CHECK_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkValidationCheckEXT;
+typedef struct VkValidationFlagsEXT {
+    VkStructureType                sType;
+    const void*                    pNext;
+    uint32_t                       disabledValidationCheckCount;
+    const VkValidationCheckEXT*    pDisabledValidationChecks;
+} VkValidationFlagsEXT;
+
+
+
+#define VK_EXT_shader_subgroup_ballot 1
+#define VK_EXT_SHADER_SUBGROUP_BALLOT_SPEC_VERSION 1
+#define VK_EXT_SHADER_SUBGROUP_BALLOT_EXTENSION_NAME "VK_EXT_shader_subgroup_ballot"
+
+
+#define VK_EXT_shader_subgroup_vote 1
+#define VK_EXT_SHADER_SUBGROUP_VOTE_SPEC_VERSION 1
+#define VK_EXT_SHADER_SUBGROUP_VOTE_EXTENSION_NAME "VK_EXT_shader_subgroup_vote"
+
+
+#define VK_EXT_texture_compression_astc_hdr 1
+#define VK_EXT_TEXTURE_COMPRESSION_ASTC_HDR_SPEC_VERSION 1
+#define VK_EXT_TEXTURE_COMPRESSION_ASTC_HDR_EXTENSION_NAME "VK_EXT_texture_compression_astc_hdr"
+typedef struct VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           textureCompressionASTC_HDR;
+} VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT;
+
+
+
+#define VK_EXT_astc_decode_mode 1
+#define VK_EXT_ASTC_DECODE_MODE_SPEC_VERSION 1
+#define VK_EXT_ASTC_DECODE_MODE_EXTENSION_NAME "VK_EXT_astc_decode_mode"
+typedef struct VkImageViewASTCDecodeModeEXT {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkFormat           decodeMode;
+} VkImageViewASTCDecodeModeEXT;
+
+typedef struct VkPhysicalDeviceASTCDecodeFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           decodeModeSharedExponent;
+} VkPhysicalDeviceASTCDecodeFeaturesEXT;
+
+
+
+#define VK_EXT_conditional_rendering 1
+#define VK_EXT_CONDITIONAL_RENDERING_SPEC_VERSION 2
+#define VK_EXT_CONDITIONAL_RENDERING_EXTENSION_NAME "VK_EXT_conditional_rendering"
+
+typedef enum VkConditionalRenderingFlagBitsEXT {
+    VK_CONDITIONAL_RENDERING_INVERTED_BIT_EXT = 0x00000001,
+    VK_CONDITIONAL_RENDERING_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkConditionalRenderingFlagBitsEXT;
+typedef VkFlags VkConditionalRenderingFlagsEXT;
+typedef struct VkConditionalRenderingBeginInfoEXT {
+    VkStructureType                   sType;
+    const void*                       pNext;
+    VkBuffer                          buffer;
+    VkDeviceSize                      offset;
+    VkConditionalRenderingFlagsEXT    flags;
+} VkConditionalRenderingBeginInfoEXT;
+
+typedef struct VkPhysicalDeviceConditionalRenderingFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           conditionalRendering;
+    VkBool32           inheritedConditionalRendering;
+} VkPhysicalDeviceConditionalRenderingFeaturesEXT;
+
+typedef struct VkCommandBufferInheritanceConditionalRenderingInfoEXT {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkBool32           conditionalRenderingEnable;
+} VkCommandBufferInheritanceConditionalRenderingInfoEXT;
+
+typedef void (VKAPI_PTR *PFN_vkCmdBeginConditionalRenderingEXT)(VkCommandBuffer commandBuffer, const VkConditionalRenderingBeginInfoEXT* pConditionalRenderingBegin);
+typedef void (VKAPI_PTR *PFN_vkCmdEndConditionalRenderingEXT)(VkCommandBuffer commandBuffer);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkCmdBeginConditionalRenderingEXT(
+    VkCommandBuffer                             commandBuffer,
+    const VkConditionalRenderingBeginInfoEXT*   pConditionalRenderingBegin);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdEndConditionalRenderingEXT(
+    VkCommandBuffer                             commandBuffer);
+#endif
+
+
+#define VK_NVX_device_generated_commands 1
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkObjectTableNVX)
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkIndirectCommandsLayoutNVX)
+#define VK_NVX_DEVICE_GENERATED_COMMANDS_SPEC_VERSION 3
+#define VK_NVX_DEVICE_GENERATED_COMMANDS_EXTENSION_NAME "VK_NVX_device_generated_commands"
+
+typedef enum VkIndirectCommandsTokenTypeNVX {
+    VK_INDIRECT_COMMANDS_TOKEN_TYPE_PIPELINE_NVX = 0,
+    VK_INDIRECT_COMMANDS_TOKEN_TYPE_DESCRIPTOR_SET_NVX = 1,
+    VK_INDIRECT_COMMANDS_TOKEN_TYPE_INDEX_BUFFER_NVX = 2,
+    VK_INDIRECT_COMMANDS_TOKEN_TYPE_VERTEX_BUFFER_NVX = 3,
+    VK_INDIRECT_COMMANDS_TOKEN_TYPE_PUSH_CONSTANT_NVX = 4,
+    VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_NVX = 5,
+    VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_NVX = 6,
+    VK_INDIRECT_COMMANDS_TOKEN_TYPE_DISPATCH_NVX = 7,
+    VK_INDIRECT_COMMANDS_TOKEN_TYPE_BEGIN_RANGE_NVX = VK_INDIRECT_COMMANDS_TOKEN_TYPE_PIPELINE_NVX,
+    VK_INDIRECT_COMMANDS_TOKEN_TYPE_END_RANGE_NVX = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DISPATCH_NVX,
+    VK_INDIRECT_COMMANDS_TOKEN_TYPE_RANGE_SIZE_NVX = (VK_INDIRECT_COMMANDS_TOKEN_TYPE_DISPATCH_NVX - VK_INDIRECT_COMMANDS_TOKEN_TYPE_PIPELINE_NVX + 1),
+    VK_INDIRECT_COMMANDS_TOKEN_TYPE_MAX_ENUM_NVX = 0x7FFFFFFF
+} VkIndirectCommandsTokenTypeNVX;
+
+typedef enum VkObjectEntryTypeNVX {
+    VK_OBJECT_ENTRY_TYPE_DESCRIPTOR_SET_NVX = 0,
+    VK_OBJECT_ENTRY_TYPE_PIPELINE_NVX = 1,
+    VK_OBJECT_ENTRY_TYPE_INDEX_BUFFER_NVX = 2,
+    VK_OBJECT_ENTRY_TYPE_VERTEX_BUFFER_NVX = 3,
+    VK_OBJECT_ENTRY_TYPE_PUSH_CONSTANT_NVX = 4,
+    VK_OBJECT_ENTRY_TYPE_BEGIN_RANGE_NVX = VK_OBJECT_ENTRY_TYPE_DESCRIPTOR_SET_NVX,
+    VK_OBJECT_ENTRY_TYPE_END_RANGE_NVX = VK_OBJECT_ENTRY_TYPE_PUSH_CONSTANT_NVX,
+    VK_OBJECT_ENTRY_TYPE_RANGE_SIZE_NVX = (VK_OBJECT_ENTRY_TYPE_PUSH_CONSTANT_NVX - VK_OBJECT_ENTRY_TYPE_DESCRIPTOR_SET_NVX + 1),
+    VK_OBJECT_ENTRY_TYPE_MAX_ENUM_NVX = 0x7FFFFFFF
+} VkObjectEntryTypeNVX;
+
+typedef enum VkIndirectCommandsLayoutUsageFlagBitsNVX {
+    VK_INDIRECT_COMMANDS_LAYOUT_USAGE_UNORDERED_SEQUENCES_BIT_NVX = 0x00000001,
+    VK_INDIRECT_COMMANDS_LAYOUT_USAGE_SPARSE_SEQUENCES_BIT_NVX = 0x00000002,
+    VK_INDIRECT_COMMANDS_LAYOUT_USAGE_EMPTY_EXECUTIONS_BIT_NVX = 0x00000004,
+    VK_INDIRECT_COMMANDS_LAYOUT_USAGE_INDEXED_SEQUENCES_BIT_NVX = 0x00000008,
+    VK_INDIRECT_COMMANDS_LAYOUT_USAGE_FLAG_BITS_MAX_ENUM_NVX = 0x7FFFFFFF
+} VkIndirectCommandsLayoutUsageFlagBitsNVX;
+typedef VkFlags VkIndirectCommandsLayoutUsageFlagsNVX;
+
+typedef enum VkObjectEntryUsageFlagBitsNVX {
+    VK_OBJECT_ENTRY_USAGE_GRAPHICS_BIT_NVX = 0x00000001,
+    VK_OBJECT_ENTRY_USAGE_COMPUTE_BIT_NVX = 0x00000002,
+    VK_OBJECT_ENTRY_USAGE_FLAG_BITS_MAX_ENUM_NVX = 0x7FFFFFFF
+} VkObjectEntryUsageFlagBitsNVX;
+typedef VkFlags VkObjectEntryUsageFlagsNVX;
+typedef struct VkDeviceGeneratedCommandsFeaturesNVX {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkBool32           computeBindingPointSupport;
+} VkDeviceGeneratedCommandsFeaturesNVX;
+
+typedef struct VkDeviceGeneratedCommandsLimitsNVX {
+    VkStructureType    sType;
+    const void*        pNext;
+    uint32_t           maxIndirectCommandsLayoutTokenCount;
+    uint32_t           maxObjectEntryCounts;
+    uint32_t           minSequenceCountBufferOffsetAlignment;
+    uint32_t           minSequenceIndexBufferOffsetAlignment;
+    uint32_t           minCommandsTokenBufferOffsetAlignment;
+} VkDeviceGeneratedCommandsLimitsNVX;
+
+typedef struct VkIndirectCommandsTokenNVX {
+    VkIndirectCommandsTokenTypeNVX    tokenType;
+    VkBuffer                          buffer;
+    VkDeviceSize                      offset;
+} VkIndirectCommandsTokenNVX;
+
+typedef struct VkIndirectCommandsLayoutTokenNVX {
+    VkIndirectCommandsTokenTypeNVX    tokenType;
+    uint32_t                          bindingUnit;
+    uint32_t                          dynamicCount;
+    uint32_t                          divisor;
+} VkIndirectCommandsLayoutTokenNVX;
+
+typedef struct VkIndirectCommandsLayoutCreateInfoNVX {
+    VkStructureType                            sType;
+    const void*                                pNext;
+    VkPipelineBindPoint                        pipelineBindPoint;
+    VkIndirectCommandsLayoutUsageFlagsNVX      flags;
+    uint32_t                                   tokenCount;
+    const VkIndirectCommandsLayoutTokenNVX*    pTokens;
+} VkIndirectCommandsLayoutCreateInfoNVX;
+
+typedef struct VkCmdProcessCommandsInfoNVX {
+    VkStructureType                      sType;
+    const void*                          pNext;
+    VkObjectTableNVX                     objectTable;
+    VkIndirectCommandsLayoutNVX          indirectCommandsLayout;
+    uint32_t                             indirectCommandsTokenCount;
+    const VkIndirectCommandsTokenNVX*    pIndirectCommandsTokens;
+    uint32_t                             maxSequencesCount;
+    VkCommandBuffer                      targetCommandBuffer;
+    VkBuffer                             sequencesCountBuffer;
+    VkDeviceSize                         sequencesCountOffset;
+    VkBuffer                             sequencesIndexBuffer;
+    VkDeviceSize                         sequencesIndexOffset;
+} VkCmdProcessCommandsInfoNVX;
+
+typedef struct VkCmdReserveSpaceForCommandsInfoNVX {
+    VkStructureType                sType;
+    const void*                    pNext;
+    VkObjectTableNVX               objectTable;
+    VkIndirectCommandsLayoutNVX    indirectCommandsLayout;
+    uint32_t                       maxSequencesCount;
+} VkCmdReserveSpaceForCommandsInfoNVX;
+
+typedef struct VkObjectTableCreateInfoNVX {
+    VkStructureType                      sType;
+    const void*                          pNext;
+    uint32_t                             objectCount;
+    const VkObjectEntryTypeNVX*          pObjectEntryTypes;
+    const uint32_t*                      pObjectEntryCounts;
+    const VkObjectEntryUsageFlagsNVX*    pObjectEntryUsageFlags;
+    uint32_t                             maxUniformBuffersPerDescriptor;
+    uint32_t                             maxStorageBuffersPerDescriptor;
+    uint32_t                             maxStorageImagesPerDescriptor;
+    uint32_t                             maxSampledImagesPerDescriptor;
+    uint32_t                             maxPipelineLayouts;
+} VkObjectTableCreateInfoNVX;
+
+typedef struct VkObjectTableEntryNVX {
+    VkObjectEntryTypeNVX          type;
+    VkObjectEntryUsageFlagsNVX    flags;
+} VkObjectTableEntryNVX;
+
+typedef struct VkObjectTablePipelineEntryNVX {
+    VkObjectEntryTypeNVX          type;
+    VkObjectEntryUsageFlagsNVX    flags;
+    VkPipeline                    pipeline;
+} VkObjectTablePipelineEntryNVX;
+
+typedef struct VkObjectTableDescriptorSetEntryNVX {
+    VkObjectEntryTypeNVX          type;
+    VkObjectEntryUsageFlagsNVX    flags;
+    VkPipelineLayout              pipelineLayout;
+    VkDescriptorSet               descriptorSet;
+} VkObjectTableDescriptorSetEntryNVX;
+
+typedef struct VkObjectTableVertexBufferEntryNVX {
+    VkObjectEntryTypeNVX          type;
+    VkObjectEntryUsageFlagsNVX    flags;
+    VkBuffer                      buffer;
+} VkObjectTableVertexBufferEntryNVX;
+
+typedef struct VkObjectTableIndexBufferEntryNVX {
+    VkObjectEntryTypeNVX          type;
+    VkObjectEntryUsageFlagsNVX    flags;
+    VkBuffer                      buffer;
+    VkIndexType                   indexType;
+} VkObjectTableIndexBufferEntryNVX;
+
+typedef struct VkObjectTablePushConstantEntryNVX {
+    VkObjectEntryTypeNVX          type;
+    VkObjectEntryUsageFlagsNVX    flags;
+    VkPipelineLayout              pipelineLayout;
+    VkShaderStageFlags            stageFlags;
+} VkObjectTablePushConstantEntryNVX;
+
+typedef void (VKAPI_PTR *PFN_vkCmdProcessCommandsNVX)(VkCommandBuffer commandBuffer, const VkCmdProcessCommandsInfoNVX* pProcessCommandsInfo);
+typedef void (VKAPI_PTR *PFN_vkCmdReserveSpaceForCommandsNVX)(VkCommandBuffer commandBuffer, const VkCmdReserveSpaceForCommandsInfoNVX* pReserveSpaceInfo);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateIndirectCommandsLayoutNVX)(VkDevice device, const VkIndirectCommandsLayoutCreateInfoNVX* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkIndirectCommandsLayoutNVX* pIndirectCommandsLayout);
+typedef void (VKAPI_PTR *PFN_vkDestroyIndirectCommandsLayoutNVX)(VkDevice device, VkIndirectCommandsLayoutNVX indirectCommandsLayout, const VkAllocationCallbacks* pAllocator);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateObjectTableNVX)(VkDevice device, const VkObjectTableCreateInfoNVX* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkObjectTableNVX* pObjectTable);
+typedef void (VKAPI_PTR *PFN_vkDestroyObjectTableNVX)(VkDevice device, VkObjectTableNVX objectTable, const VkAllocationCallbacks* pAllocator);
+typedef VkResult (VKAPI_PTR *PFN_vkRegisterObjectsNVX)(VkDevice device, VkObjectTableNVX objectTable, uint32_t objectCount, const VkObjectTableEntryNVX* const*    ppObjectTableEntries, const uint32_t* pObjectIndices);
+typedef VkResult (VKAPI_PTR *PFN_vkUnregisterObjectsNVX)(VkDevice device, VkObjectTableNVX objectTable, uint32_t objectCount, const VkObjectEntryTypeNVX* pObjectEntryTypes, const uint32_t* pObjectIndices);
+typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX)(VkPhysicalDevice physicalDevice, VkDeviceGeneratedCommandsFeaturesNVX* pFeatures, VkDeviceGeneratedCommandsLimitsNVX* pLimits);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkCmdProcessCommandsNVX(
+    VkCommandBuffer                             commandBuffer,
+    const VkCmdProcessCommandsInfoNVX*          pProcessCommandsInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdReserveSpaceForCommandsNVX(
+    VkCommandBuffer                             commandBuffer,
+    const VkCmdReserveSpaceForCommandsInfoNVX*  pReserveSpaceInfo);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateIndirectCommandsLayoutNVX(
+    VkDevice                                    device,
+    const VkIndirectCommandsLayoutCreateInfoNVX* pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkIndirectCommandsLayoutNVX*                pIndirectCommandsLayout);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyIndirectCommandsLayoutNVX(
+    VkDevice                                    device,
+    VkIndirectCommandsLayoutNVX                 indirectCommandsLayout,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateObjectTableNVX(
+    VkDevice                                    device,
+    const VkObjectTableCreateInfoNVX*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkObjectTableNVX*                           pObjectTable);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyObjectTableNVX(
+    VkDevice                                    device,
+    VkObjectTableNVX                            objectTable,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkRegisterObjectsNVX(
+    VkDevice                                    device,
+    VkObjectTableNVX                            objectTable,
+    uint32_t                                    objectCount,
+    const VkObjectTableEntryNVX* const*         ppObjectTableEntries,
+    const uint32_t*                             pObjectIndices);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkUnregisterObjectsNVX(
+    VkDevice                                    device,
+    VkObjectTableNVX                            objectTable,
+    uint32_t                                    objectCount,
+    const VkObjectEntryTypeNVX*                 pObjectEntryTypes,
+    const uint32_t*                             pObjectIndices);
+
+VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX(
+    VkPhysicalDevice                            physicalDevice,
+    VkDeviceGeneratedCommandsFeaturesNVX*       pFeatures,
+    VkDeviceGeneratedCommandsLimitsNVX*         pLimits);
+#endif
+
+
+#define VK_NV_clip_space_w_scaling 1
+#define VK_NV_CLIP_SPACE_W_SCALING_SPEC_VERSION 1
+#define VK_NV_CLIP_SPACE_W_SCALING_EXTENSION_NAME "VK_NV_clip_space_w_scaling"
+typedef struct VkViewportWScalingNV {
+    float    xcoeff;
+    float    ycoeff;
+} VkViewportWScalingNV;
+
+typedef struct VkPipelineViewportWScalingStateCreateInfoNV {
+    VkStructureType                sType;
+    const void*                    pNext;
+    VkBool32                       viewportWScalingEnable;
+    uint32_t                       viewportCount;
+    const VkViewportWScalingNV*    pViewportWScalings;
+} VkPipelineViewportWScalingStateCreateInfoNV;
+
+typedef void (VKAPI_PTR *PFN_vkCmdSetViewportWScalingNV)(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewportWScalingNV* pViewportWScalings);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkCmdSetViewportWScalingNV(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstViewport,
+    uint32_t                                    viewportCount,
+    const VkViewportWScalingNV*                 pViewportWScalings);
+#endif
+
+
+#define VK_EXT_direct_mode_display 1
+#define VK_EXT_DIRECT_MODE_DISPLAY_SPEC_VERSION 1
+#define VK_EXT_DIRECT_MODE_DISPLAY_EXTENSION_NAME "VK_EXT_direct_mode_display"
+typedef VkResult (VKAPI_PTR *PFN_vkReleaseDisplayEXT)(VkPhysicalDevice physicalDevice, VkDisplayKHR display);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkReleaseDisplayEXT(
+    VkPhysicalDevice                            physicalDevice,
+    VkDisplayKHR                                display);
+#endif
+
+
+#define VK_EXT_display_surface_counter 1
+#define VK_EXT_DISPLAY_SURFACE_COUNTER_SPEC_VERSION 1
+#define VK_EXT_DISPLAY_SURFACE_COUNTER_EXTENSION_NAME "VK_EXT_display_surface_counter"
+
+typedef enum VkSurfaceCounterFlagBitsEXT {
+    VK_SURFACE_COUNTER_VBLANK_EXT = 0x00000001,
+    VK_SURFACE_COUNTER_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkSurfaceCounterFlagBitsEXT;
+typedef VkFlags VkSurfaceCounterFlagsEXT;
+typedef struct VkSurfaceCapabilities2EXT {
+    VkStructureType                  sType;
+    void*                            pNext;
+    uint32_t                         minImageCount;
+    uint32_t                         maxImageCount;
+    VkExtent2D                       currentExtent;
+    VkExtent2D                       minImageExtent;
+    VkExtent2D                       maxImageExtent;
+    uint32_t                         maxImageArrayLayers;
+    VkSurfaceTransformFlagsKHR       supportedTransforms;
+    VkSurfaceTransformFlagBitsKHR    currentTransform;
+    VkCompositeAlphaFlagsKHR         supportedCompositeAlpha;
+    VkImageUsageFlags                supportedUsageFlags;
+    VkSurfaceCounterFlagsEXT         supportedSurfaceCounters;
+} VkSurfaceCapabilities2EXT;
+
+typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT)(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, VkSurfaceCapabilities2EXT* pSurfaceCapabilities);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceCapabilities2EXT(
+    VkPhysicalDevice                            physicalDevice,
+    VkSurfaceKHR                                surface,
+    VkSurfaceCapabilities2EXT*                  pSurfaceCapabilities);
+#endif
+
+
+#define VK_EXT_display_control 1
+#define VK_EXT_DISPLAY_CONTROL_SPEC_VERSION 1
+#define VK_EXT_DISPLAY_CONTROL_EXTENSION_NAME "VK_EXT_display_control"
+
+typedef enum VkDisplayPowerStateEXT {
+    VK_DISPLAY_POWER_STATE_OFF_EXT = 0,
+    VK_DISPLAY_POWER_STATE_SUSPEND_EXT = 1,
+    VK_DISPLAY_POWER_STATE_ON_EXT = 2,
+    VK_DISPLAY_POWER_STATE_BEGIN_RANGE_EXT = VK_DISPLAY_POWER_STATE_OFF_EXT,
+    VK_DISPLAY_POWER_STATE_END_RANGE_EXT = VK_DISPLAY_POWER_STATE_ON_EXT,
+    VK_DISPLAY_POWER_STATE_RANGE_SIZE_EXT = (VK_DISPLAY_POWER_STATE_ON_EXT - VK_DISPLAY_POWER_STATE_OFF_EXT + 1),
+    VK_DISPLAY_POWER_STATE_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkDisplayPowerStateEXT;
+
+typedef enum VkDeviceEventTypeEXT {
+    VK_DEVICE_EVENT_TYPE_DISPLAY_HOTPLUG_EXT = 0,
+    VK_DEVICE_EVENT_TYPE_BEGIN_RANGE_EXT = VK_DEVICE_EVENT_TYPE_DISPLAY_HOTPLUG_EXT,
+    VK_DEVICE_EVENT_TYPE_END_RANGE_EXT = VK_DEVICE_EVENT_TYPE_DISPLAY_HOTPLUG_EXT,
+    VK_DEVICE_EVENT_TYPE_RANGE_SIZE_EXT = (VK_DEVICE_EVENT_TYPE_DISPLAY_HOTPLUG_EXT - VK_DEVICE_EVENT_TYPE_DISPLAY_HOTPLUG_EXT + 1),
+    VK_DEVICE_EVENT_TYPE_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkDeviceEventTypeEXT;
+
+typedef enum VkDisplayEventTypeEXT {
+    VK_DISPLAY_EVENT_TYPE_FIRST_PIXEL_OUT_EXT = 0,
+    VK_DISPLAY_EVENT_TYPE_BEGIN_RANGE_EXT = VK_DISPLAY_EVENT_TYPE_FIRST_PIXEL_OUT_EXT,
+    VK_DISPLAY_EVENT_TYPE_END_RANGE_EXT = VK_DISPLAY_EVENT_TYPE_FIRST_PIXEL_OUT_EXT,
+    VK_DISPLAY_EVENT_TYPE_RANGE_SIZE_EXT = (VK_DISPLAY_EVENT_TYPE_FIRST_PIXEL_OUT_EXT - VK_DISPLAY_EVENT_TYPE_FIRST_PIXEL_OUT_EXT + 1),
+    VK_DISPLAY_EVENT_TYPE_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkDisplayEventTypeEXT;
+typedef struct VkDisplayPowerInfoEXT {
+    VkStructureType           sType;
+    const void*               pNext;
+    VkDisplayPowerStateEXT    powerState;
+} VkDisplayPowerInfoEXT;
+
+typedef struct VkDeviceEventInfoEXT {
+    VkStructureType         sType;
+    const void*             pNext;
+    VkDeviceEventTypeEXT    deviceEvent;
+} VkDeviceEventInfoEXT;
+
+typedef struct VkDisplayEventInfoEXT {
+    VkStructureType          sType;
+    const void*              pNext;
+    VkDisplayEventTypeEXT    displayEvent;
+} VkDisplayEventInfoEXT;
+
+typedef struct VkSwapchainCounterCreateInfoEXT {
+    VkStructureType             sType;
+    const void*                 pNext;
+    VkSurfaceCounterFlagsEXT    surfaceCounters;
+} VkSwapchainCounterCreateInfoEXT;
+
+typedef VkResult (VKAPI_PTR *PFN_vkDisplayPowerControlEXT)(VkDevice device, VkDisplayKHR display, const VkDisplayPowerInfoEXT* pDisplayPowerInfo);
+typedef VkResult (VKAPI_PTR *PFN_vkRegisterDeviceEventEXT)(VkDevice device, const VkDeviceEventInfoEXT* pDeviceEventInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence);
+typedef VkResult (VKAPI_PTR *PFN_vkRegisterDisplayEventEXT)(VkDevice device, VkDisplayKHR display, const VkDisplayEventInfoEXT* pDisplayEventInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence);
+typedef VkResult (VKAPI_PTR *PFN_vkGetSwapchainCounterEXT)(VkDevice device, VkSwapchainKHR swapchain, VkSurfaceCounterFlagBitsEXT counter, uint64_t* pCounterValue);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkDisplayPowerControlEXT(
+    VkDevice                                    device,
+    VkDisplayKHR                                display,
+    const VkDisplayPowerInfoEXT*                pDisplayPowerInfo);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkRegisterDeviceEventEXT(
+    VkDevice                                    device,
+    const VkDeviceEventInfoEXT*                 pDeviceEventInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkFence*                                    pFence);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkRegisterDisplayEventEXT(
+    VkDevice                                    device,
+    VkDisplayKHR                                display,
+    const VkDisplayEventInfoEXT*                pDisplayEventInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkFence*                                    pFence);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetSwapchainCounterEXT(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    VkSurfaceCounterFlagBitsEXT                 counter,
+    uint64_t*                                   pCounterValue);
+#endif
+
+
+#define VK_GOOGLE_display_timing 1
+#define VK_GOOGLE_DISPLAY_TIMING_SPEC_VERSION 1
+#define VK_GOOGLE_DISPLAY_TIMING_EXTENSION_NAME "VK_GOOGLE_display_timing"
+typedef struct VkRefreshCycleDurationGOOGLE {
+    uint64_t    refreshDuration;
+} VkRefreshCycleDurationGOOGLE;
+
+typedef struct VkPastPresentationTimingGOOGLE {
+    uint32_t    presentID;
+    uint64_t    desiredPresentTime;
+    uint64_t    actualPresentTime;
+    uint64_t    earliestPresentTime;
+    uint64_t    presentMargin;
+} VkPastPresentationTimingGOOGLE;
+
+typedef struct VkPresentTimeGOOGLE {
+    uint32_t    presentID;
+    uint64_t    desiredPresentTime;
+} VkPresentTimeGOOGLE;
+
+typedef struct VkPresentTimesInfoGOOGLE {
+    VkStructureType               sType;
+    const void*                   pNext;
+    uint32_t                      swapchainCount;
+    const VkPresentTimeGOOGLE*    pTimes;
+} VkPresentTimesInfoGOOGLE;
+
+typedef VkResult (VKAPI_PTR *PFN_vkGetRefreshCycleDurationGOOGLE)(VkDevice device, VkSwapchainKHR swapchain, VkRefreshCycleDurationGOOGLE* pDisplayTimingProperties);
+typedef VkResult (VKAPI_PTR *PFN_vkGetPastPresentationTimingGOOGLE)(VkDevice device, VkSwapchainKHR swapchain, uint32_t* pPresentationTimingCount, VkPastPresentationTimingGOOGLE* pPresentationTimings);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkGetRefreshCycleDurationGOOGLE(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    VkRefreshCycleDurationGOOGLE*               pDisplayTimingProperties);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPastPresentationTimingGOOGLE(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    uint32_t*                                   pPresentationTimingCount,
+    VkPastPresentationTimingGOOGLE*             pPresentationTimings);
+#endif
+
+
+#define VK_NV_sample_mask_override_coverage 1
+#define VK_NV_SAMPLE_MASK_OVERRIDE_COVERAGE_SPEC_VERSION 1
+#define VK_NV_SAMPLE_MASK_OVERRIDE_COVERAGE_EXTENSION_NAME "VK_NV_sample_mask_override_coverage"
+
+
+#define VK_NV_geometry_shader_passthrough 1
+#define VK_NV_GEOMETRY_SHADER_PASSTHROUGH_SPEC_VERSION 1
+#define VK_NV_GEOMETRY_SHADER_PASSTHROUGH_EXTENSION_NAME "VK_NV_geometry_shader_passthrough"
+
+
+#define VK_NV_viewport_array2 1
+#define VK_NV_VIEWPORT_ARRAY2_SPEC_VERSION 1
+#define VK_NV_VIEWPORT_ARRAY2_EXTENSION_NAME "VK_NV_viewport_array2"
+
+
+#define VK_NVX_multiview_per_view_attributes 1
+#define VK_NVX_MULTIVIEW_PER_VIEW_ATTRIBUTES_SPEC_VERSION 1
+#define VK_NVX_MULTIVIEW_PER_VIEW_ATTRIBUTES_EXTENSION_NAME "VK_NVX_multiview_per_view_attributes"
+typedef struct VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           perViewPositionAllComponents;
+} VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX;
+
+
+
+#define VK_NV_viewport_swizzle 1
+#define VK_NV_VIEWPORT_SWIZZLE_SPEC_VERSION 1
+#define VK_NV_VIEWPORT_SWIZZLE_EXTENSION_NAME "VK_NV_viewport_swizzle"
+
+typedef enum VkViewportCoordinateSwizzleNV {
+    VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_X_NV = 0,
+    VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_X_NV = 1,
+    VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_Y_NV = 2,
+    VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_Y_NV = 3,
+    VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_Z_NV = 4,
+    VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_Z_NV = 5,
+    VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_W_NV = 6,
+    VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_W_NV = 7,
+    VK_VIEWPORT_COORDINATE_SWIZZLE_BEGIN_RANGE_NV = VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_X_NV,
+    VK_VIEWPORT_COORDINATE_SWIZZLE_END_RANGE_NV = VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_W_NV,
+    VK_VIEWPORT_COORDINATE_SWIZZLE_RANGE_SIZE_NV = (VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_W_NV - VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_X_NV + 1),
+    VK_VIEWPORT_COORDINATE_SWIZZLE_MAX_ENUM_NV = 0x7FFFFFFF
+} VkViewportCoordinateSwizzleNV;
+typedef VkFlags VkPipelineViewportSwizzleStateCreateFlagsNV;
+typedef struct VkViewportSwizzleNV {
+    VkViewportCoordinateSwizzleNV    x;
+    VkViewportCoordinateSwizzleNV    y;
+    VkViewportCoordinateSwizzleNV    z;
+    VkViewportCoordinateSwizzleNV    w;
+} VkViewportSwizzleNV;
+
+typedef struct VkPipelineViewportSwizzleStateCreateInfoNV {
+    VkStructureType                                sType;
+    const void*                                    pNext;
+    VkPipelineViewportSwizzleStateCreateFlagsNV    flags;
+    uint32_t                                       viewportCount;
+    const VkViewportSwizzleNV*                     pViewportSwizzles;
+} VkPipelineViewportSwizzleStateCreateInfoNV;
+
+
+
+#define VK_EXT_discard_rectangles 1
+#define VK_EXT_DISCARD_RECTANGLES_SPEC_VERSION 1
+#define VK_EXT_DISCARD_RECTANGLES_EXTENSION_NAME "VK_EXT_discard_rectangles"
+
+typedef enum VkDiscardRectangleModeEXT {
+    VK_DISCARD_RECTANGLE_MODE_INCLUSIVE_EXT = 0,
+    VK_DISCARD_RECTANGLE_MODE_EXCLUSIVE_EXT = 1,
+    VK_DISCARD_RECTANGLE_MODE_BEGIN_RANGE_EXT = VK_DISCARD_RECTANGLE_MODE_INCLUSIVE_EXT,
+    VK_DISCARD_RECTANGLE_MODE_END_RANGE_EXT = VK_DISCARD_RECTANGLE_MODE_EXCLUSIVE_EXT,
+    VK_DISCARD_RECTANGLE_MODE_RANGE_SIZE_EXT = (VK_DISCARD_RECTANGLE_MODE_EXCLUSIVE_EXT - VK_DISCARD_RECTANGLE_MODE_INCLUSIVE_EXT + 1),
+    VK_DISCARD_RECTANGLE_MODE_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkDiscardRectangleModeEXT;
+typedef VkFlags VkPipelineDiscardRectangleStateCreateFlagsEXT;
+typedef struct VkPhysicalDeviceDiscardRectanglePropertiesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    uint32_t           maxDiscardRectangles;
+} VkPhysicalDeviceDiscardRectanglePropertiesEXT;
+
+typedef struct VkPipelineDiscardRectangleStateCreateInfoEXT {
+    VkStructureType                                  sType;
+    const void*                                      pNext;
+    VkPipelineDiscardRectangleStateCreateFlagsEXT    flags;
+    VkDiscardRectangleModeEXT                        discardRectangleMode;
+    uint32_t                                         discardRectangleCount;
+    const VkRect2D*                                  pDiscardRectangles;
+} VkPipelineDiscardRectangleStateCreateInfoEXT;
+
+typedef void (VKAPI_PTR *PFN_vkCmdSetDiscardRectangleEXT)(VkCommandBuffer commandBuffer, uint32_t firstDiscardRectangle, uint32_t discardRectangleCount, const VkRect2D* pDiscardRectangles);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkCmdSetDiscardRectangleEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstDiscardRectangle,
+    uint32_t                                    discardRectangleCount,
+    const VkRect2D*                             pDiscardRectangles);
+#endif
+
+
+#define VK_EXT_conservative_rasterization 1
+#define VK_EXT_CONSERVATIVE_RASTERIZATION_SPEC_VERSION 1
+#define VK_EXT_CONSERVATIVE_RASTERIZATION_EXTENSION_NAME "VK_EXT_conservative_rasterization"
+
+typedef enum VkConservativeRasterizationModeEXT {
+    VK_CONSERVATIVE_RASTERIZATION_MODE_DISABLED_EXT = 0,
+    VK_CONSERVATIVE_RASTERIZATION_MODE_OVERESTIMATE_EXT = 1,
+    VK_CONSERVATIVE_RASTERIZATION_MODE_UNDERESTIMATE_EXT = 2,
+    VK_CONSERVATIVE_RASTERIZATION_MODE_BEGIN_RANGE_EXT = VK_CONSERVATIVE_RASTERIZATION_MODE_DISABLED_EXT,
+    VK_CONSERVATIVE_RASTERIZATION_MODE_END_RANGE_EXT = VK_CONSERVATIVE_RASTERIZATION_MODE_UNDERESTIMATE_EXT,
+    VK_CONSERVATIVE_RASTERIZATION_MODE_RANGE_SIZE_EXT = (VK_CONSERVATIVE_RASTERIZATION_MODE_UNDERESTIMATE_EXT - VK_CONSERVATIVE_RASTERIZATION_MODE_DISABLED_EXT + 1),
+    VK_CONSERVATIVE_RASTERIZATION_MODE_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkConservativeRasterizationModeEXT;
+typedef VkFlags VkPipelineRasterizationConservativeStateCreateFlagsEXT;
+typedef struct VkPhysicalDeviceConservativeRasterizationPropertiesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    float              primitiveOverestimationSize;
+    float              maxExtraPrimitiveOverestimationSize;
+    float              extraPrimitiveOverestimationSizeGranularity;
+    VkBool32           primitiveUnderestimation;
+    VkBool32           conservativePointAndLineRasterization;
+    VkBool32           degenerateTrianglesRasterized;
+    VkBool32           degenerateLinesRasterized;
+    VkBool32           fullyCoveredFragmentShaderInputVariable;
+    VkBool32           conservativeRasterizationPostDepthCoverage;
+} VkPhysicalDeviceConservativeRasterizationPropertiesEXT;
+
+typedef struct VkPipelineRasterizationConservativeStateCreateInfoEXT {
+    VkStructureType                                           sType;
+    const void*                                               pNext;
+    VkPipelineRasterizationConservativeStateCreateFlagsEXT    flags;
+    VkConservativeRasterizationModeEXT                        conservativeRasterizationMode;
+    float                                                     extraPrimitiveOverestimationSize;
+} VkPipelineRasterizationConservativeStateCreateInfoEXT;
+
+
+
+#define VK_EXT_depth_clip_enable 1
+#define VK_EXT_DEPTH_CLIP_ENABLE_SPEC_VERSION 1
+#define VK_EXT_DEPTH_CLIP_ENABLE_EXTENSION_NAME "VK_EXT_depth_clip_enable"
+typedef VkFlags VkPipelineRasterizationDepthClipStateCreateFlagsEXT;
+typedef struct VkPhysicalDeviceDepthClipEnableFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           depthClipEnable;
+} VkPhysicalDeviceDepthClipEnableFeaturesEXT;
+
+typedef struct VkPipelineRasterizationDepthClipStateCreateInfoEXT {
+    VkStructureType                                        sType;
+    const void*                                            pNext;
+    VkPipelineRasterizationDepthClipStateCreateFlagsEXT    flags;
+    VkBool32                                               depthClipEnable;
+} VkPipelineRasterizationDepthClipStateCreateInfoEXT;
+
+
+
+#define VK_EXT_swapchain_colorspace 1
+#define VK_EXT_SWAPCHAIN_COLOR_SPACE_SPEC_VERSION 4
+#define VK_EXT_SWAPCHAIN_COLOR_SPACE_EXTENSION_NAME "VK_EXT_swapchain_colorspace"
+
+
+#define VK_EXT_hdr_metadata 1
+#define VK_EXT_HDR_METADATA_SPEC_VERSION  2
+#define VK_EXT_HDR_METADATA_EXTENSION_NAME "VK_EXT_hdr_metadata"
+typedef struct VkXYColorEXT {
+    float    x;
+    float    y;
+} VkXYColorEXT;
+
+typedef struct VkHdrMetadataEXT {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkXYColorEXT       displayPrimaryRed;
+    VkXYColorEXT       displayPrimaryGreen;
+    VkXYColorEXT       displayPrimaryBlue;
+    VkXYColorEXT       whitePoint;
+    float              maxLuminance;
+    float              minLuminance;
+    float              maxContentLightLevel;
+    float              maxFrameAverageLightLevel;
+} VkHdrMetadataEXT;
+
+typedef void (VKAPI_PTR *PFN_vkSetHdrMetadataEXT)(VkDevice device, uint32_t swapchainCount, const VkSwapchainKHR* pSwapchains, const VkHdrMetadataEXT* pMetadata);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkSetHdrMetadataEXT(
+    VkDevice                                    device,
+    uint32_t                                    swapchainCount,
+    const VkSwapchainKHR*                       pSwapchains,
+    const VkHdrMetadataEXT*                     pMetadata);
+#endif
+
+
+#define VK_EXT_external_memory_dma_buf 1
+#define VK_EXT_EXTERNAL_MEMORY_DMA_BUF_SPEC_VERSION 1
+#define VK_EXT_EXTERNAL_MEMORY_DMA_BUF_EXTENSION_NAME "VK_EXT_external_memory_dma_buf"
+
+
+#define VK_EXT_queue_family_foreign 1
+#define VK_EXT_QUEUE_FAMILY_FOREIGN_SPEC_VERSION 1
+#define VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME "VK_EXT_queue_family_foreign"
+#define VK_QUEUE_FAMILY_FOREIGN_EXT       (~0U-2)
+
+
+#define VK_EXT_debug_utils 1
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDebugUtilsMessengerEXT)
+#define VK_EXT_DEBUG_UTILS_SPEC_VERSION   1
+#define VK_EXT_DEBUG_UTILS_EXTENSION_NAME "VK_EXT_debug_utils"
+typedef VkFlags VkDebugUtilsMessengerCallbackDataFlagsEXT;
+typedef VkFlags VkDebugUtilsMessengerCreateFlagsEXT;
+
+typedef enum VkDebugUtilsMessageSeverityFlagBitsEXT {
+    VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT = 0x00000001,
+    VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT = 0x00000010,
+    VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT = 0x00000100,
+    VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT = 0x00001000,
+    VK_DEBUG_UTILS_MESSAGE_SEVERITY_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkDebugUtilsMessageSeverityFlagBitsEXT;
+typedef VkFlags VkDebugUtilsMessageSeverityFlagsEXT;
+
+typedef enum VkDebugUtilsMessageTypeFlagBitsEXT {
+    VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT = 0x00000001,
+    VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT = 0x00000002,
+    VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT = 0x00000004,
+    VK_DEBUG_UTILS_MESSAGE_TYPE_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkDebugUtilsMessageTypeFlagBitsEXT;
+typedef VkFlags VkDebugUtilsMessageTypeFlagsEXT;
+typedef struct VkDebugUtilsObjectNameInfoEXT {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkObjectType       objectType;
+    uint64_t           objectHandle;
+    const char*        pObjectName;
+} VkDebugUtilsObjectNameInfoEXT;
+
+typedef struct VkDebugUtilsObjectTagInfoEXT {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkObjectType       objectType;
+    uint64_t           objectHandle;
+    uint64_t           tagName;
+    size_t             tagSize;
+    const void*        pTag;
+} VkDebugUtilsObjectTagInfoEXT;
+
+typedef struct VkDebugUtilsLabelEXT {
+    VkStructureType    sType;
+    const void*        pNext;
+    const char*        pLabelName;
+    float              color[4];
+} VkDebugUtilsLabelEXT;
+
+typedef struct VkDebugUtilsMessengerCallbackDataEXT {
+    VkStructureType                              sType;
+    const void*                                  pNext;
+    VkDebugUtilsMessengerCallbackDataFlagsEXT    flags;
+    const char*                                  pMessageIdName;
+    int32_t                                      messageIdNumber;
+    const char*                                  pMessage;
+    uint32_t                                     queueLabelCount;
+    const VkDebugUtilsLabelEXT*                  pQueueLabels;
+    uint32_t                                     cmdBufLabelCount;
+    const VkDebugUtilsLabelEXT*                  pCmdBufLabels;
+    uint32_t                                     objectCount;
+    const VkDebugUtilsObjectNameInfoEXT*         pObjects;
+} VkDebugUtilsMessengerCallbackDataEXT;
+
+typedef VkBool32 (VKAPI_PTR *PFN_vkDebugUtilsMessengerCallbackEXT)(
+    VkDebugUtilsMessageSeverityFlagBitsEXT           messageSeverity,
+    VkDebugUtilsMessageTypeFlagsEXT                  messageTypes,
+    const VkDebugUtilsMessengerCallbackDataEXT*      pCallbackData,
+    void*                                            pUserData);
+
+typedef struct VkDebugUtilsMessengerCreateInfoEXT {
+    VkStructureType                         sType;
+    const void*                             pNext;
+    VkDebugUtilsMessengerCreateFlagsEXT     flags;
+    VkDebugUtilsMessageSeverityFlagsEXT     messageSeverity;
+    VkDebugUtilsMessageTypeFlagsEXT         messageType;
+    PFN_vkDebugUtilsMessengerCallbackEXT    pfnUserCallback;
+    void*                                   pUserData;
+} VkDebugUtilsMessengerCreateInfoEXT;
+
+typedef VkResult (VKAPI_PTR *PFN_vkSetDebugUtilsObjectNameEXT)(VkDevice device, const VkDebugUtilsObjectNameInfoEXT* pNameInfo);
+typedef VkResult (VKAPI_PTR *PFN_vkSetDebugUtilsObjectTagEXT)(VkDevice device, const VkDebugUtilsObjectTagInfoEXT* pTagInfo);
+typedef void (VKAPI_PTR *PFN_vkQueueBeginDebugUtilsLabelEXT)(VkQueue queue, const VkDebugUtilsLabelEXT* pLabelInfo);
+typedef void (VKAPI_PTR *PFN_vkQueueEndDebugUtilsLabelEXT)(VkQueue queue);
+typedef void (VKAPI_PTR *PFN_vkQueueInsertDebugUtilsLabelEXT)(VkQueue queue, const VkDebugUtilsLabelEXT* pLabelInfo);
+typedef void (VKAPI_PTR *PFN_vkCmdBeginDebugUtilsLabelEXT)(VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT* pLabelInfo);
+typedef void (VKAPI_PTR *PFN_vkCmdEndDebugUtilsLabelEXT)(VkCommandBuffer commandBuffer);
+typedef void (VKAPI_PTR *PFN_vkCmdInsertDebugUtilsLabelEXT)(VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT* pLabelInfo);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateDebugUtilsMessengerEXT)(VkInstance instance, const VkDebugUtilsMessengerCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDebugUtilsMessengerEXT* pMessenger);
+typedef void (VKAPI_PTR *PFN_vkDestroyDebugUtilsMessengerEXT)(VkInstance instance, VkDebugUtilsMessengerEXT messenger, const VkAllocationCallbacks* pAllocator);
+typedef void (VKAPI_PTR *PFN_vkSubmitDebugUtilsMessageEXT)(VkInstance instance, VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VkDebugUtilsMessageTypeFlagsEXT messageTypes, const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkSetDebugUtilsObjectNameEXT(
+    VkDevice                                    device,
+    const VkDebugUtilsObjectNameInfoEXT*        pNameInfo);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkSetDebugUtilsObjectTagEXT(
+    VkDevice                                    device,
+    const VkDebugUtilsObjectTagInfoEXT*         pTagInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkQueueBeginDebugUtilsLabelEXT(
+    VkQueue                                     queue,
+    const VkDebugUtilsLabelEXT*                 pLabelInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkQueueEndDebugUtilsLabelEXT(
+    VkQueue                                     queue);
+
+VKAPI_ATTR void VKAPI_CALL vkQueueInsertDebugUtilsLabelEXT(
+    VkQueue                                     queue,
+    const VkDebugUtilsLabelEXT*                 pLabelInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdBeginDebugUtilsLabelEXT(
+    VkCommandBuffer                             commandBuffer,
+    const VkDebugUtilsLabelEXT*                 pLabelInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdEndDebugUtilsLabelEXT(
+    VkCommandBuffer                             commandBuffer);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdInsertDebugUtilsLabelEXT(
+    VkCommandBuffer                             commandBuffer,
+    const VkDebugUtilsLabelEXT*                 pLabelInfo);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateDebugUtilsMessengerEXT(
+    VkInstance                                  instance,
+    const VkDebugUtilsMessengerCreateInfoEXT*   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDebugUtilsMessengerEXT*                   pMessenger);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyDebugUtilsMessengerEXT(
+    VkInstance                                  instance,
+    VkDebugUtilsMessengerEXT                    messenger,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR void VKAPI_CALL vkSubmitDebugUtilsMessageEXT(
+    VkInstance                                  instance,
+    VkDebugUtilsMessageSeverityFlagBitsEXT      messageSeverity,
+    VkDebugUtilsMessageTypeFlagsEXT             messageTypes,
+    const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData);
+#endif
+
+
+#define VK_EXT_sampler_filter_minmax 1
+#define VK_EXT_SAMPLER_FILTER_MINMAX_SPEC_VERSION 2
+#define VK_EXT_SAMPLER_FILTER_MINMAX_EXTENSION_NAME "VK_EXT_sampler_filter_minmax"
+
+typedef enum VkSamplerReductionModeEXT {
+    VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE_EXT = 0,
+    VK_SAMPLER_REDUCTION_MODE_MIN_EXT = 1,
+    VK_SAMPLER_REDUCTION_MODE_MAX_EXT = 2,
+    VK_SAMPLER_REDUCTION_MODE_BEGIN_RANGE_EXT = VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE_EXT,
+    VK_SAMPLER_REDUCTION_MODE_END_RANGE_EXT = VK_SAMPLER_REDUCTION_MODE_MAX_EXT,
+    VK_SAMPLER_REDUCTION_MODE_RANGE_SIZE_EXT = (VK_SAMPLER_REDUCTION_MODE_MAX_EXT - VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE_EXT + 1),
+    VK_SAMPLER_REDUCTION_MODE_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkSamplerReductionModeEXT;
+typedef struct VkSamplerReductionModeCreateInfoEXT {
+    VkStructureType              sType;
+    const void*                  pNext;
+    VkSamplerReductionModeEXT    reductionMode;
+} VkSamplerReductionModeCreateInfoEXT;
+
+typedef struct VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           filterMinmaxSingleComponentFormats;
+    VkBool32           filterMinmaxImageComponentMapping;
+} VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT;
+
+
+
+#define VK_AMD_gpu_shader_int16 1
+#define VK_AMD_GPU_SHADER_INT16_SPEC_VERSION 2
+#define VK_AMD_GPU_SHADER_INT16_EXTENSION_NAME "VK_AMD_gpu_shader_int16"
+
+
+#define VK_AMD_mixed_attachment_samples 1
+#define VK_AMD_MIXED_ATTACHMENT_SAMPLES_SPEC_VERSION 1
+#define VK_AMD_MIXED_ATTACHMENT_SAMPLES_EXTENSION_NAME "VK_AMD_mixed_attachment_samples"
+
+
+#define VK_AMD_shader_fragment_mask 1
+#define VK_AMD_SHADER_FRAGMENT_MASK_SPEC_VERSION 1
+#define VK_AMD_SHADER_FRAGMENT_MASK_EXTENSION_NAME "VK_AMD_shader_fragment_mask"
+
+
+#define VK_EXT_inline_uniform_block 1
+#define VK_EXT_INLINE_UNIFORM_BLOCK_SPEC_VERSION 1
+#define VK_EXT_INLINE_UNIFORM_BLOCK_EXTENSION_NAME "VK_EXT_inline_uniform_block"
+typedef struct VkPhysicalDeviceInlineUniformBlockFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           inlineUniformBlock;
+    VkBool32           descriptorBindingInlineUniformBlockUpdateAfterBind;
+} VkPhysicalDeviceInlineUniformBlockFeaturesEXT;
+
+typedef struct VkPhysicalDeviceInlineUniformBlockPropertiesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    uint32_t           maxInlineUniformBlockSize;
+    uint32_t           maxPerStageDescriptorInlineUniformBlocks;
+    uint32_t           maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks;
+    uint32_t           maxDescriptorSetInlineUniformBlocks;
+    uint32_t           maxDescriptorSetUpdateAfterBindInlineUniformBlocks;
+} VkPhysicalDeviceInlineUniformBlockPropertiesEXT;
+
+typedef struct VkWriteDescriptorSetInlineUniformBlockEXT {
+    VkStructureType    sType;
+    const void*        pNext;
+    uint32_t           dataSize;
+    const void*        pData;
+} VkWriteDescriptorSetInlineUniformBlockEXT;
+
+typedef struct VkDescriptorPoolInlineUniformBlockCreateInfoEXT {
+    VkStructureType    sType;
+    const void*        pNext;
+    uint32_t           maxInlineUniformBlockBindings;
+} VkDescriptorPoolInlineUniformBlockCreateInfoEXT;
+
+
+
+#define VK_EXT_shader_stencil_export 1
+#define VK_EXT_SHADER_STENCIL_EXPORT_SPEC_VERSION 1
+#define VK_EXT_SHADER_STENCIL_EXPORT_EXTENSION_NAME "VK_EXT_shader_stencil_export"
+
+
+#define VK_EXT_sample_locations 1
+#define VK_EXT_SAMPLE_LOCATIONS_SPEC_VERSION 1
+#define VK_EXT_SAMPLE_LOCATIONS_EXTENSION_NAME "VK_EXT_sample_locations"
+typedef struct VkSampleLocationEXT {
+    float    x;
+    float    y;
+} VkSampleLocationEXT;
+
+typedef struct VkSampleLocationsInfoEXT {
+    VkStructureType               sType;
+    const void*                   pNext;
+    VkSampleCountFlagBits         sampleLocationsPerPixel;
+    VkExtent2D                    sampleLocationGridSize;
+    uint32_t                      sampleLocationsCount;
+    const VkSampleLocationEXT*    pSampleLocations;
+} VkSampleLocationsInfoEXT;
+
+typedef struct VkAttachmentSampleLocationsEXT {
+    uint32_t                    attachmentIndex;
+    VkSampleLocationsInfoEXT    sampleLocationsInfo;
+} VkAttachmentSampleLocationsEXT;
+
+typedef struct VkSubpassSampleLocationsEXT {
+    uint32_t                    subpassIndex;
+    VkSampleLocationsInfoEXT    sampleLocationsInfo;
+} VkSubpassSampleLocationsEXT;
+
+typedef struct VkRenderPassSampleLocationsBeginInfoEXT {
+    VkStructureType                          sType;
+    const void*                              pNext;
+    uint32_t                                 attachmentInitialSampleLocationsCount;
+    const VkAttachmentSampleLocationsEXT*    pAttachmentInitialSampleLocations;
+    uint32_t                                 postSubpassSampleLocationsCount;
+    const VkSubpassSampleLocationsEXT*       pPostSubpassSampleLocations;
+} VkRenderPassSampleLocationsBeginInfoEXT;
+
+typedef struct VkPipelineSampleLocationsStateCreateInfoEXT {
+    VkStructureType             sType;
+    const void*                 pNext;
+    VkBool32                    sampleLocationsEnable;
+    VkSampleLocationsInfoEXT    sampleLocationsInfo;
+} VkPipelineSampleLocationsStateCreateInfoEXT;
+
+typedef struct VkPhysicalDeviceSampleLocationsPropertiesEXT {
+    VkStructureType       sType;
+    void*                 pNext;
+    VkSampleCountFlags    sampleLocationSampleCounts;
+    VkExtent2D            maxSampleLocationGridSize;
+    float                 sampleLocationCoordinateRange[2];
+    uint32_t              sampleLocationSubPixelBits;
+    VkBool32              variableSampleLocations;
+} VkPhysicalDeviceSampleLocationsPropertiesEXT;
+
+typedef struct VkMultisamplePropertiesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkExtent2D         maxSampleLocationGridSize;
+} VkMultisamplePropertiesEXT;
+
+typedef void (VKAPI_PTR *PFN_vkCmdSetSampleLocationsEXT)(VkCommandBuffer commandBuffer, const VkSampleLocationsInfoEXT* pSampleLocationsInfo);
+typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT)(VkPhysicalDevice physicalDevice, VkSampleCountFlagBits samples, VkMultisamplePropertiesEXT* pMultisampleProperties);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkCmdSetSampleLocationsEXT(
+    VkCommandBuffer                             commandBuffer,
+    const VkSampleLocationsInfoEXT*             pSampleLocationsInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceMultisamplePropertiesEXT(
+    VkPhysicalDevice                            physicalDevice,
+    VkSampleCountFlagBits                       samples,
+    VkMultisamplePropertiesEXT*                 pMultisampleProperties);
+#endif
+
+
+#define VK_EXT_blend_operation_advanced 1
+#define VK_EXT_BLEND_OPERATION_ADVANCED_SPEC_VERSION 2
+#define VK_EXT_BLEND_OPERATION_ADVANCED_EXTENSION_NAME "VK_EXT_blend_operation_advanced"
+
+typedef enum VkBlendOverlapEXT {
+    VK_BLEND_OVERLAP_UNCORRELATED_EXT = 0,
+    VK_BLEND_OVERLAP_DISJOINT_EXT = 1,
+    VK_BLEND_OVERLAP_CONJOINT_EXT = 2,
+    VK_BLEND_OVERLAP_BEGIN_RANGE_EXT = VK_BLEND_OVERLAP_UNCORRELATED_EXT,
+    VK_BLEND_OVERLAP_END_RANGE_EXT = VK_BLEND_OVERLAP_CONJOINT_EXT,
+    VK_BLEND_OVERLAP_RANGE_SIZE_EXT = (VK_BLEND_OVERLAP_CONJOINT_EXT - VK_BLEND_OVERLAP_UNCORRELATED_EXT + 1),
+    VK_BLEND_OVERLAP_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkBlendOverlapEXT;
+typedef struct VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           advancedBlendCoherentOperations;
+} VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT;
+
+typedef struct VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    uint32_t           advancedBlendMaxColorAttachments;
+    VkBool32           advancedBlendIndependentBlend;
+    VkBool32           advancedBlendNonPremultipliedSrcColor;
+    VkBool32           advancedBlendNonPremultipliedDstColor;
+    VkBool32           advancedBlendCorrelatedOverlap;
+    VkBool32           advancedBlendAllOperations;
+} VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT;
+
+typedef struct VkPipelineColorBlendAdvancedStateCreateInfoEXT {
+    VkStructureType      sType;
+    const void*          pNext;
+    VkBool32             srcPremultiplied;
+    VkBool32             dstPremultiplied;
+    VkBlendOverlapEXT    blendOverlap;
+} VkPipelineColorBlendAdvancedStateCreateInfoEXT;
+
+
+
+#define VK_NV_fragment_coverage_to_color 1
+#define VK_NV_FRAGMENT_COVERAGE_TO_COLOR_SPEC_VERSION 1
+#define VK_NV_FRAGMENT_COVERAGE_TO_COLOR_EXTENSION_NAME "VK_NV_fragment_coverage_to_color"
+typedef VkFlags VkPipelineCoverageToColorStateCreateFlagsNV;
+typedef struct VkPipelineCoverageToColorStateCreateInfoNV {
+    VkStructureType                                sType;
+    const void*                                    pNext;
+    VkPipelineCoverageToColorStateCreateFlagsNV    flags;
+    VkBool32                                       coverageToColorEnable;
+    uint32_t                                       coverageToColorLocation;
+} VkPipelineCoverageToColorStateCreateInfoNV;
+
+
+
+#define VK_NV_framebuffer_mixed_samples 1
+#define VK_NV_FRAMEBUFFER_MIXED_SAMPLES_SPEC_VERSION 1
+#define VK_NV_FRAMEBUFFER_MIXED_SAMPLES_EXTENSION_NAME "VK_NV_framebuffer_mixed_samples"
+
+typedef enum VkCoverageModulationModeNV {
+    VK_COVERAGE_MODULATION_MODE_NONE_NV = 0,
+    VK_COVERAGE_MODULATION_MODE_RGB_NV = 1,
+    VK_COVERAGE_MODULATION_MODE_ALPHA_NV = 2,
+    VK_COVERAGE_MODULATION_MODE_RGBA_NV = 3,
+    VK_COVERAGE_MODULATION_MODE_BEGIN_RANGE_NV = VK_COVERAGE_MODULATION_MODE_NONE_NV,
+    VK_COVERAGE_MODULATION_MODE_END_RANGE_NV = VK_COVERAGE_MODULATION_MODE_RGBA_NV,
+    VK_COVERAGE_MODULATION_MODE_RANGE_SIZE_NV = (VK_COVERAGE_MODULATION_MODE_RGBA_NV - VK_COVERAGE_MODULATION_MODE_NONE_NV + 1),
+    VK_COVERAGE_MODULATION_MODE_MAX_ENUM_NV = 0x7FFFFFFF
+} VkCoverageModulationModeNV;
+typedef VkFlags VkPipelineCoverageModulationStateCreateFlagsNV;
+typedef struct VkPipelineCoverageModulationStateCreateInfoNV {
+    VkStructureType                                   sType;
+    const void*                                       pNext;
+    VkPipelineCoverageModulationStateCreateFlagsNV    flags;
+    VkCoverageModulationModeNV                        coverageModulationMode;
+    VkBool32                                          coverageModulationTableEnable;
+    uint32_t                                          coverageModulationTableCount;
+    const float*                                      pCoverageModulationTable;
+} VkPipelineCoverageModulationStateCreateInfoNV;
+
+
+
+#define VK_NV_fill_rectangle 1
+#define VK_NV_FILL_RECTANGLE_SPEC_VERSION 1
+#define VK_NV_FILL_RECTANGLE_EXTENSION_NAME "VK_NV_fill_rectangle"
+
+
+#define VK_NV_shader_sm_builtins 1
+#define VK_NV_SHADER_SM_BUILTINS_SPEC_VERSION 1
+#define VK_NV_SHADER_SM_BUILTINS_EXTENSION_NAME "VK_NV_shader_sm_builtins"
+typedef struct VkPhysicalDeviceShaderSMBuiltinsPropertiesNV {
+    VkStructureType    sType;
+    void*              pNext;
+    uint32_t           shaderSMCount;
+    uint32_t           shaderWarpsPerSM;
+} VkPhysicalDeviceShaderSMBuiltinsPropertiesNV;
+
+typedef struct VkPhysicalDeviceShaderSMBuiltinsFeaturesNV {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           shaderSMBuiltins;
+} VkPhysicalDeviceShaderSMBuiltinsFeaturesNV;
+
+
+
+#define VK_EXT_post_depth_coverage 1
+#define VK_EXT_POST_DEPTH_COVERAGE_SPEC_VERSION 1
+#define VK_EXT_POST_DEPTH_COVERAGE_EXTENSION_NAME "VK_EXT_post_depth_coverage"
+
+
+#define VK_EXT_image_drm_format_modifier 1
+#define VK_EXT_IMAGE_DRM_FORMAT_MODIFIER_SPEC_VERSION 1
+#define VK_EXT_IMAGE_DRM_FORMAT_MODIFIER_EXTENSION_NAME "VK_EXT_image_drm_format_modifier"
+typedef struct VkDrmFormatModifierPropertiesEXT {
+    uint64_t                drmFormatModifier;
+    uint32_t                drmFormatModifierPlaneCount;
+    VkFormatFeatureFlags    drmFormatModifierTilingFeatures;
+} VkDrmFormatModifierPropertiesEXT;
+
+typedef struct VkDrmFormatModifierPropertiesListEXT {
+    VkStructureType                      sType;
+    void*                                pNext;
+    uint32_t                             drmFormatModifierCount;
+    VkDrmFormatModifierPropertiesEXT*    pDrmFormatModifierProperties;
+} VkDrmFormatModifierPropertiesListEXT;
+
+typedef struct VkPhysicalDeviceImageDrmFormatModifierInfoEXT {
+    VkStructureType    sType;
+    const void*        pNext;
+    uint64_t           drmFormatModifier;
+    VkSharingMode      sharingMode;
+    uint32_t           queueFamilyIndexCount;
+    const uint32_t*    pQueueFamilyIndices;
+} VkPhysicalDeviceImageDrmFormatModifierInfoEXT;
+
+typedef struct VkImageDrmFormatModifierListCreateInfoEXT {
+    VkStructureType    sType;
+    const void*        pNext;
+    uint32_t           drmFormatModifierCount;
+    const uint64_t*    pDrmFormatModifiers;
+} VkImageDrmFormatModifierListCreateInfoEXT;
+
+typedef struct VkImageDrmFormatModifierExplicitCreateInfoEXT {
+    VkStructureType               sType;
+    const void*                   pNext;
+    uint64_t                      drmFormatModifier;
+    uint32_t                      drmFormatModifierPlaneCount;
+    const VkSubresourceLayout*    pPlaneLayouts;
+} VkImageDrmFormatModifierExplicitCreateInfoEXT;
+
+typedef struct VkImageDrmFormatModifierPropertiesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    uint64_t           drmFormatModifier;
+} VkImageDrmFormatModifierPropertiesEXT;
+
+typedef VkResult (VKAPI_PTR *PFN_vkGetImageDrmFormatModifierPropertiesEXT)(VkDevice device, VkImage image, VkImageDrmFormatModifierPropertiesEXT* pProperties);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkGetImageDrmFormatModifierPropertiesEXT(
+    VkDevice                                    device,
+    VkImage                                     image,
+    VkImageDrmFormatModifierPropertiesEXT*      pProperties);
+#endif
+
+
+#define VK_EXT_validation_cache 1
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkValidationCacheEXT)
+#define VK_EXT_VALIDATION_CACHE_SPEC_VERSION 1
+#define VK_EXT_VALIDATION_CACHE_EXTENSION_NAME "VK_EXT_validation_cache"
+
+typedef enum VkValidationCacheHeaderVersionEXT {
+    VK_VALIDATION_CACHE_HEADER_VERSION_ONE_EXT = 1,
+    VK_VALIDATION_CACHE_HEADER_VERSION_BEGIN_RANGE_EXT = VK_VALIDATION_CACHE_HEADER_VERSION_ONE_EXT,
+    VK_VALIDATION_CACHE_HEADER_VERSION_END_RANGE_EXT = VK_VALIDATION_CACHE_HEADER_VERSION_ONE_EXT,
+    VK_VALIDATION_CACHE_HEADER_VERSION_RANGE_SIZE_EXT = (VK_VALIDATION_CACHE_HEADER_VERSION_ONE_EXT - VK_VALIDATION_CACHE_HEADER_VERSION_ONE_EXT + 1),
+    VK_VALIDATION_CACHE_HEADER_VERSION_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkValidationCacheHeaderVersionEXT;
+typedef VkFlags VkValidationCacheCreateFlagsEXT;
+typedef struct VkValidationCacheCreateInfoEXT {
+    VkStructureType                    sType;
+    const void*                        pNext;
+    VkValidationCacheCreateFlagsEXT    flags;
+    size_t                             initialDataSize;
+    const void*                        pInitialData;
+} VkValidationCacheCreateInfoEXT;
+
+typedef struct VkShaderModuleValidationCacheCreateInfoEXT {
+    VkStructureType         sType;
+    const void*             pNext;
+    VkValidationCacheEXT    validationCache;
+} VkShaderModuleValidationCacheCreateInfoEXT;
+
+typedef VkResult (VKAPI_PTR *PFN_vkCreateValidationCacheEXT)(VkDevice device, const VkValidationCacheCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkValidationCacheEXT* pValidationCache);
+typedef void (VKAPI_PTR *PFN_vkDestroyValidationCacheEXT)(VkDevice device, VkValidationCacheEXT validationCache, const VkAllocationCallbacks* pAllocator);
+typedef VkResult (VKAPI_PTR *PFN_vkMergeValidationCachesEXT)(VkDevice device, VkValidationCacheEXT dstCache, uint32_t srcCacheCount, const VkValidationCacheEXT* pSrcCaches);
+typedef VkResult (VKAPI_PTR *PFN_vkGetValidationCacheDataEXT)(VkDevice device, VkValidationCacheEXT validationCache, size_t* pDataSize, void* pData);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateValidationCacheEXT(
+    VkDevice                                    device,
+    const VkValidationCacheCreateInfoEXT*       pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkValidationCacheEXT*                       pValidationCache);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyValidationCacheEXT(
+    VkDevice                                    device,
+    VkValidationCacheEXT                        validationCache,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkMergeValidationCachesEXT(
+    VkDevice                                    device,
+    VkValidationCacheEXT                        dstCache,
+    uint32_t                                    srcCacheCount,
+    const VkValidationCacheEXT*                 pSrcCaches);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetValidationCacheDataEXT(
+    VkDevice                                    device,
+    VkValidationCacheEXT                        validationCache,
+    size_t*                                     pDataSize,
+    void*                                       pData);
+#endif
+
+
+#define VK_EXT_descriptor_indexing 1
+#define VK_EXT_DESCRIPTOR_INDEXING_SPEC_VERSION 2
+#define VK_EXT_DESCRIPTOR_INDEXING_EXTENSION_NAME "VK_EXT_descriptor_indexing"
+
+typedef enum VkDescriptorBindingFlagBitsEXT {
+    VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT = 0x00000001,
+    VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT_EXT = 0x00000002,
+    VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT_EXT = 0x00000004,
+    VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT_EXT = 0x00000008,
+    VK_DESCRIPTOR_BINDING_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkDescriptorBindingFlagBitsEXT;
+typedef VkFlags VkDescriptorBindingFlagsEXT;
+typedef struct VkDescriptorSetLayoutBindingFlagsCreateInfoEXT {
+    VkStructureType                       sType;
+    const void*                           pNext;
+    uint32_t                              bindingCount;
+    const VkDescriptorBindingFlagsEXT*    pBindingFlags;
+} VkDescriptorSetLayoutBindingFlagsCreateInfoEXT;
+
+typedef struct VkPhysicalDeviceDescriptorIndexingFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           shaderInputAttachmentArrayDynamicIndexing;
+    VkBool32           shaderUniformTexelBufferArrayDynamicIndexing;
+    VkBool32           shaderStorageTexelBufferArrayDynamicIndexing;
+    VkBool32           shaderUniformBufferArrayNonUniformIndexing;
+    VkBool32           shaderSampledImageArrayNonUniformIndexing;
+    VkBool32           shaderStorageBufferArrayNonUniformIndexing;
+    VkBool32           shaderStorageImageArrayNonUniformIndexing;
+    VkBool32           shaderInputAttachmentArrayNonUniformIndexing;
+    VkBool32           shaderUniformTexelBufferArrayNonUniformIndexing;
+    VkBool32           shaderStorageTexelBufferArrayNonUniformIndexing;
+    VkBool32           descriptorBindingUniformBufferUpdateAfterBind;
+    VkBool32           descriptorBindingSampledImageUpdateAfterBind;
+    VkBool32           descriptorBindingStorageImageUpdateAfterBind;
+    VkBool32           descriptorBindingStorageBufferUpdateAfterBind;
+    VkBool32           descriptorBindingUniformTexelBufferUpdateAfterBind;
+    VkBool32           descriptorBindingStorageTexelBufferUpdateAfterBind;
+    VkBool32           descriptorBindingUpdateUnusedWhilePending;
+    VkBool32           descriptorBindingPartiallyBound;
+    VkBool32           descriptorBindingVariableDescriptorCount;
+    VkBool32           runtimeDescriptorArray;
+} VkPhysicalDeviceDescriptorIndexingFeaturesEXT;
+
+typedef struct VkPhysicalDeviceDescriptorIndexingPropertiesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    uint32_t           maxUpdateAfterBindDescriptorsInAllPools;
+    VkBool32           shaderUniformBufferArrayNonUniformIndexingNative;
+    VkBool32           shaderSampledImageArrayNonUniformIndexingNative;
+    VkBool32           shaderStorageBufferArrayNonUniformIndexingNative;
+    VkBool32           shaderStorageImageArrayNonUniformIndexingNative;
+    VkBool32           shaderInputAttachmentArrayNonUniformIndexingNative;
+    VkBool32           robustBufferAccessUpdateAfterBind;
+    VkBool32           quadDivergentImplicitLod;
+    uint32_t           maxPerStageDescriptorUpdateAfterBindSamplers;
+    uint32_t           maxPerStageDescriptorUpdateAfterBindUniformBuffers;
+    uint32_t           maxPerStageDescriptorUpdateAfterBindStorageBuffers;
+    uint32_t           maxPerStageDescriptorUpdateAfterBindSampledImages;
+    uint32_t           maxPerStageDescriptorUpdateAfterBindStorageImages;
+    uint32_t           maxPerStageDescriptorUpdateAfterBindInputAttachments;
+    uint32_t           maxPerStageUpdateAfterBindResources;
+    uint32_t           maxDescriptorSetUpdateAfterBindSamplers;
+    uint32_t           maxDescriptorSetUpdateAfterBindUniformBuffers;
+    uint32_t           maxDescriptorSetUpdateAfterBindUniformBuffersDynamic;
+    uint32_t           maxDescriptorSetUpdateAfterBindStorageBuffers;
+    uint32_t           maxDescriptorSetUpdateAfterBindStorageBuffersDynamic;
+    uint32_t           maxDescriptorSetUpdateAfterBindSampledImages;
+    uint32_t           maxDescriptorSetUpdateAfterBindStorageImages;
+    uint32_t           maxDescriptorSetUpdateAfterBindInputAttachments;
+} VkPhysicalDeviceDescriptorIndexingPropertiesEXT;
+
+typedef struct VkDescriptorSetVariableDescriptorCountAllocateInfoEXT {
+    VkStructureType    sType;
+    const void*        pNext;
+    uint32_t           descriptorSetCount;
+    const uint32_t*    pDescriptorCounts;
+} VkDescriptorSetVariableDescriptorCountAllocateInfoEXT;
+
+typedef struct VkDescriptorSetVariableDescriptorCountLayoutSupportEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    uint32_t           maxVariableDescriptorCount;
+} VkDescriptorSetVariableDescriptorCountLayoutSupportEXT;
+
+
+
+#define VK_EXT_shader_viewport_index_layer 1
+#define VK_EXT_SHADER_VIEWPORT_INDEX_LAYER_SPEC_VERSION 1
+#define VK_EXT_SHADER_VIEWPORT_INDEX_LAYER_EXTENSION_NAME "VK_EXT_shader_viewport_index_layer"
+
+
+#define VK_NV_shading_rate_image 1
+#define VK_NV_SHADING_RATE_IMAGE_SPEC_VERSION 3
+#define VK_NV_SHADING_RATE_IMAGE_EXTENSION_NAME "VK_NV_shading_rate_image"
+
+typedef enum VkShadingRatePaletteEntryNV {
+    VK_SHADING_RATE_PALETTE_ENTRY_NO_INVOCATIONS_NV = 0,
+    VK_SHADING_RATE_PALETTE_ENTRY_16_INVOCATIONS_PER_PIXEL_NV = 1,
+    VK_SHADING_RATE_PALETTE_ENTRY_8_INVOCATIONS_PER_PIXEL_NV = 2,
+    VK_SHADING_RATE_PALETTE_ENTRY_4_INVOCATIONS_PER_PIXEL_NV = 3,
+    VK_SHADING_RATE_PALETTE_ENTRY_2_INVOCATIONS_PER_PIXEL_NV = 4,
+    VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_PIXEL_NV = 5,
+    VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X1_PIXELS_NV = 6,
+    VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_1X2_PIXELS_NV = 7,
+    VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X2_PIXELS_NV = 8,
+    VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_4X2_PIXELS_NV = 9,
+    VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X4_PIXELS_NV = 10,
+    VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_4X4_PIXELS_NV = 11,
+    VK_SHADING_RATE_PALETTE_ENTRY_BEGIN_RANGE_NV = VK_SHADING_RATE_PALETTE_ENTRY_NO_INVOCATIONS_NV,
+    VK_SHADING_RATE_PALETTE_ENTRY_END_RANGE_NV = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_4X4_PIXELS_NV,
+    VK_SHADING_RATE_PALETTE_ENTRY_RANGE_SIZE_NV = (VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_4X4_PIXELS_NV - VK_SHADING_RATE_PALETTE_ENTRY_NO_INVOCATIONS_NV + 1),
+    VK_SHADING_RATE_PALETTE_ENTRY_MAX_ENUM_NV = 0x7FFFFFFF
+} VkShadingRatePaletteEntryNV;
+
+typedef enum VkCoarseSampleOrderTypeNV {
+    VK_COARSE_SAMPLE_ORDER_TYPE_DEFAULT_NV = 0,
+    VK_COARSE_SAMPLE_ORDER_TYPE_CUSTOM_NV = 1,
+    VK_COARSE_SAMPLE_ORDER_TYPE_PIXEL_MAJOR_NV = 2,
+    VK_COARSE_SAMPLE_ORDER_TYPE_SAMPLE_MAJOR_NV = 3,
+    VK_COARSE_SAMPLE_ORDER_TYPE_BEGIN_RANGE_NV = VK_COARSE_SAMPLE_ORDER_TYPE_DEFAULT_NV,
+    VK_COARSE_SAMPLE_ORDER_TYPE_END_RANGE_NV = VK_COARSE_SAMPLE_ORDER_TYPE_SAMPLE_MAJOR_NV,
+    VK_COARSE_SAMPLE_ORDER_TYPE_RANGE_SIZE_NV = (VK_COARSE_SAMPLE_ORDER_TYPE_SAMPLE_MAJOR_NV - VK_COARSE_SAMPLE_ORDER_TYPE_DEFAULT_NV + 1),
+    VK_COARSE_SAMPLE_ORDER_TYPE_MAX_ENUM_NV = 0x7FFFFFFF
+} VkCoarseSampleOrderTypeNV;
+typedef struct VkShadingRatePaletteNV {
+    uint32_t                              shadingRatePaletteEntryCount;
+    const VkShadingRatePaletteEntryNV*    pShadingRatePaletteEntries;
+} VkShadingRatePaletteNV;
+
+typedef struct VkPipelineViewportShadingRateImageStateCreateInfoNV {
+    VkStructureType                  sType;
+    const void*                      pNext;
+    VkBool32                         shadingRateImageEnable;
+    uint32_t                         viewportCount;
+    const VkShadingRatePaletteNV*    pShadingRatePalettes;
+} VkPipelineViewportShadingRateImageStateCreateInfoNV;
+
+typedef struct VkPhysicalDeviceShadingRateImageFeaturesNV {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           shadingRateImage;
+    VkBool32           shadingRateCoarseSampleOrder;
+} VkPhysicalDeviceShadingRateImageFeaturesNV;
+
+typedef struct VkPhysicalDeviceShadingRateImagePropertiesNV {
+    VkStructureType    sType;
+    void*              pNext;
+    VkExtent2D         shadingRateTexelSize;
+    uint32_t           shadingRatePaletteSize;
+    uint32_t           shadingRateMaxCoarseSamples;
+} VkPhysicalDeviceShadingRateImagePropertiesNV;
+
+typedef struct VkCoarseSampleLocationNV {
+    uint32_t    pixelX;
+    uint32_t    pixelY;
+    uint32_t    sample;
+} VkCoarseSampleLocationNV;
+
+typedef struct VkCoarseSampleOrderCustomNV {
+    VkShadingRatePaletteEntryNV        shadingRate;
+    uint32_t                           sampleCount;
+    uint32_t                           sampleLocationCount;
+    const VkCoarseSampleLocationNV*    pSampleLocations;
+} VkCoarseSampleOrderCustomNV;
+
+typedef struct VkPipelineViewportCoarseSampleOrderStateCreateInfoNV {
+    VkStructureType                       sType;
+    const void*                           pNext;
+    VkCoarseSampleOrderTypeNV             sampleOrderType;
+    uint32_t                              customSampleOrderCount;
+    const VkCoarseSampleOrderCustomNV*    pCustomSampleOrders;
+} VkPipelineViewportCoarseSampleOrderStateCreateInfoNV;
+
+typedef void (VKAPI_PTR *PFN_vkCmdBindShadingRateImageNV)(VkCommandBuffer commandBuffer, VkImageView imageView, VkImageLayout imageLayout);
+typedef void (VKAPI_PTR *PFN_vkCmdSetViewportShadingRatePaletteNV)(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkShadingRatePaletteNV* pShadingRatePalettes);
+typedef void (VKAPI_PTR *PFN_vkCmdSetCoarseSampleOrderNV)(VkCommandBuffer commandBuffer, VkCoarseSampleOrderTypeNV sampleOrderType, uint32_t customSampleOrderCount, const VkCoarseSampleOrderCustomNV* pCustomSampleOrders);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkCmdBindShadingRateImageNV(
+    VkCommandBuffer                             commandBuffer,
+    VkImageView                                 imageView,
+    VkImageLayout                               imageLayout);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetViewportShadingRatePaletteNV(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstViewport,
+    uint32_t                                    viewportCount,
+    const VkShadingRatePaletteNV*               pShadingRatePalettes);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetCoarseSampleOrderNV(
+    VkCommandBuffer                             commandBuffer,
+    VkCoarseSampleOrderTypeNV                   sampleOrderType,
+    uint32_t                                    customSampleOrderCount,
+    const VkCoarseSampleOrderCustomNV*          pCustomSampleOrders);
+#endif
+
+
+#define VK_NV_ray_tracing 1
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkAccelerationStructureNV)
+#define VK_NV_RAY_TRACING_SPEC_VERSION    3
+#define VK_NV_RAY_TRACING_EXTENSION_NAME  "VK_NV_ray_tracing"
+#define VK_SHADER_UNUSED_NV               (~0U)
+
+typedef enum VkAccelerationStructureTypeNV {
+    VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_NV = 0,
+    VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_NV = 1,
+    VK_ACCELERATION_STRUCTURE_TYPE_BEGIN_RANGE_NV = VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_NV,
+    VK_ACCELERATION_STRUCTURE_TYPE_END_RANGE_NV = VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_NV,
+    VK_ACCELERATION_STRUCTURE_TYPE_RANGE_SIZE_NV = (VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_NV - VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_NV + 1),
+    VK_ACCELERATION_STRUCTURE_TYPE_MAX_ENUM_NV = 0x7FFFFFFF
+} VkAccelerationStructureTypeNV;
+
+typedef enum VkRayTracingShaderGroupTypeNV {
+    VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_NV = 0,
+    VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_NV = 1,
+    VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_NV = 2,
+    VK_RAY_TRACING_SHADER_GROUP_TYPE_BEGIN_RANGE_NV = VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_NV,
+    VK_RAY_TRACING_SHADER_GROUP_TYPE_END_RANGE_NV = VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_NV,
+    VK_RAY_TRACING_SHADER_GROUP_TYPE_RANGE_SIZE_NV = (VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_NV - VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_NV + 1),
+    VK_RAY_TRACING_SHADER_GROUP_TYPE_MAX_ENUM_NV = 0x7FFFFFFF
+} VkRayTracingShaderGroupTypeNV;
+
+typedef enum VkGeometryTypeNV {
+    VK_GEOMETRY_TYPE_TRIANGLES_NV = 0,
+    VK_GEOMETRY_TYPE_AABBS_NV = 1,
+    VK_GEOMETRY_TYPE_BEGIN_RANGE_NV = VK_GEOMETRY_TYPE_TRIANGLES_NV,
+    VK_GEOMETRY_TYPE_END_RANGE_NV = VK_GEOMETRY_TYPE_AABBS_NV,
+    VK_GEOMETRY_TYPE_RANGE_SIZE_NV = (VK_GEOMETRY_TYPE_AABBS_NV - VK_GEOMETRY_TYPE_TRIANGLES_NV + 1),
+    VK_GEOMETRY_TYPE_MAX_ENUM_NV = 0x7FFFFFFF
+} VkGeometryTypeNV;
+
+typedef enum VkCopyAccelerationStructureModeNV {
+    VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_NV = 0,
+    VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_NV = 1,
+    VK_COPY_ACCELERATION_STRUCTURE_MODE_BEGIN_RANGE_NV = VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_NV,
+    VK_COPY_ACCELERATION_STRUCTURE_MODE_END_RANGE_NV = VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_NV,
+    VK_COPY_ACCELERATION_STRUCTURE_MODE_RANGE_SIZE_NV = (VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_NV - VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_NV + 1),
+    VK_COPY_ACCELERATION_STRUCTURE_MODE_MAX_ENUM_NV = 0x7FFFFFFF
+} VkCopyAccelerationStructureModeNV;
+
+typedef enum VkAccelerationStructureMemoryRequirementsTypeNV {
+    VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV = 0,
+    VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV = 1,
+    VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV = 2,
+    VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BEGIN_RANGE_NV = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV,
+    VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_END_RANGE_NV = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV,
+    VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_RANGE_SIZE_NV = (VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV - VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV + 1),
+    VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_MAX_ENUM_NV = 0x7FFFFFFF
+} VkAccelerationStructureMemoryRequirementsTypeNV;
+
+typedef enum VkGeometryFlagBitsNV {
+    VK_GEOMETRY_OPAQUE_BIT_NV = 0x00000001,
+    VK_GEOMETRY_NO_DUPLICATE_ANY_HIT_INVOCATION_BIT_NV = 0x00000002,
+    VK_GEOMETRY_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF
+} VkGeometryFlagBitsNV;
+typedef VkFlags VkGeometryFlagsNV;
+
+typedef enum VkGeometryInstanceFlagBitsNV {
+    VK_GEOMETRY_INSTANCE_TRIANGLE_CULL_DISABLE_BIT_NV = 0x00000001,
+    VK_GEOMETRY_INSTANCE_TRIANGLE_FRONT_COUNTERCLOCKWISE_BIT_NV = 0x00000002,
+    VK_GEOMETRY_INSTANCE_FORCE_OPAQUE_BIT_NV = 0x00000004,
+    VK_GEOMETRY_INSTANCE_FORCE_NO_OPAQUE_BIT_NV = 0x00000008,
+    VK_GEOMETRY_INSTANCE_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF
+} VkGeometryInstanceFlagBitsNV;
+typedef VkFlags VkGeometryInstanceFlagsNV;
+
+typedef enum VkBuildAccelerationStructureFlagBitsNV {
+    VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_NV = 0x00000001,
+    VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_NV = 0x00000002,
+    VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_TRACE_BIT_NV = 0x00000004,
+    VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_BUILD_BIT_NV = 0x00000008,
+    VK_BUILD_ACCELERATION_STRUCTURE_LOW_MEMORY_BIT_NV = 0x00000010,
+    VK_BUILD_ACCELERATION_STRUCTURE_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF
+} VkBuildAccelerationStructureFlagBitsNV;
+typedef VkFlags VkBuildAccelerationStructureFlagsNV;
+typedef struct VkRayTracingShaderGroupCreateInfoNV {
+    VkStructureType                  sType;
+    const void*                      pNext;
+    VkRayTracingShaderGroupTypeNV    type;
+    uint32_t                         generalShader;
+    uint32_t                         closestHitShader;
+    uint32_t                         anyHitShader;
+    uint32_t                         intersectionShader;
+} VkRayTracingShaderGroupCreateInfoNV;
+
+typedef struct VkRayTracingPipelineCreateInfoNV {
+    VkStructureType                               sType;
+    const void*                                   pNext;
+    VkPipelineCreateFlags                         flags;
+    uint32_t                                      stageCount;
+    const VkPipelineShaderStageCreateInfo*        pStages;
+    uint32_t                                      groupCount;
+    const VkRayTracingShaderGroupCreateInfoNV*    pGroups;
+    uint32_t                                      maxRecursionDepth;
+    VkPipelineLayout                              layout;
+    VkPipeline                                    basePipelineHandle;
+    int32_t                                       basePipelineIndex;
+} VkRayTracingPipelineCreateInfoNV;
+
+typedef struct VkGeometryTrianglesNV {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkBuffer           vertexData;
+    VkDeviceSize       vertexOffset;
+    uint32_t           vertexCount;
+    VkDeviceSize       vertexStride;
+    VkFormat           vertexFormat;
+    VkBuffer           indexData;
+    VkDeviceSize       indexOffset;
+    uint32_t           indexCount;
+    VkIndexType        indexType;
+    VkBuffer           transformData;
+    VkDeviceSize       transformOffset;
+} VkGeometryTrianglesNV;
+
+typedef struct VkGeometryAABBNV {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkBuffer           aabbData;
+    uint32_t           numAABBs;
+    uint32_t           stride;
+    VkDeviceSize       offset;
+} VkGeometryAABBNV;
+
+typedef struct VkGeometryDataNV {
+    VkGeometryTrianglesNV    triangles;
+    VkGeometryAABBNV         aabbs;
+} VkGeometryDataNV;
+
+typedef struct VkGeometryNV {
+    VkStructureType      sType;
+    const void*          pNext;
+    VkGeometryTypeNV     geometryType;
+    VkGeometryDataNV     geometry;
+    VkGeometryFlagsNV    flags;
+} VkGeometryNV;
+
+typedef struct VkAccelerationStructureInfoNV {
+    VkStructureType                        sType;
+    const void*                            pNext;
+    VkAccelerationStructureTypeNV          type;
+    VkBuildAccelerationStructureFlagsNV    flags;
+    uint32_t                               instanceCount;
+    uint32_t                               geometryCount;
+    const VkGeometryNV*                    pGeometries;
+} VkAccelerationStructureInfoNV;
+
+typedef struct VkAccelerationStructureCreateInfoNV {
+    VkStructureType                  sType;
+    const void*                      pNext;
+    VkDeviceSize                     compactedSize;
+    VkAccelerationStructureInfoNV    info;
+} VkAccelerationStructureCreateInfoNV;
+
+typedef struct VkBindAccelerationStructureMemoryInfoNV {
+    VkStructureType              sType;
+    const void*                  pNext;
+    VkAccelerationStructureNV    accelerationStructure;
+    VkDeviceMemory               memory;
+    VkDeviceSize                 memoryOffset;
+    uint32_t                     deviceIndexCount;
+    const uint32_t*              pDeviceIndices;
+} VkBindAccelerationStructureMemoryInfoNV;
+
+typedef struct VkWriteDescriptorSetAccelerationStructureNV {
+    VkStructureType                     sType;
+    const void*                         pNext;
+    uint32_t                            accelerationStructureCount;
+    const VkAccelerationStructureNV*    pAccelerationStructures;
+} VkWriteDescriptorSetAccelerationStructureNV;
+
+typedef struct VkAccelerationStructureMemoryRequirementsInfoNV {
+    VkStructureType                                    sType;
+    const void*                                        pNext;
+    VkAccelerationStructureMemoryRequirementsTypeNV    type;
+    VkAccelerationStructureNV                          accelerationStructure;
+} VkAccelerationStructureMemoryRequirementsInfoNV;
+
+typedef struct VkPhysicalDeviceRayTracingPropertiesNV {
+    VkStructureType    sType;
+    void*              pNext;
+    uint32_t           shaderGroupHandleSize;
+    uint32_t           maxRecursionDepth;
+    uint32_t           maxShaderGroupStride;
+    uint32_t           shaderGroupBaseAlignment;
+    uint64_t           maxGeometryCount;
+    uint64_t           maxInstanceCount;
+    uint64_t           maxTriangleCount;
+    uint32_t           maxDescriptorSetAccelerationStructures;
+} VkPhysicalDeviceRayTracingPropertiesNV;
+
+typedef VkResult (VKAPI_PTR *PFN_vkCreateAccelerationStructureNV)(VkDevice device, const VkAccelerationStructureCreateInfoNV* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkAccelerationStructureNV* pAccelerationStructure);
+typedef void (VKAPI_PTR *PFN_vkDestroyAccelerationStructureNV)(VkDevice device, VkAccelerationStructureNV accelerationStructure, const VkAllocationCallbacks* pAllocator);
+typedef void (VKAPI_PTR *PFN_vkGetAccelerationStructureMemoryRequirementsNV)(VkDevice device, const VkAccelerationStructureMemoryRequirementsInfoNV* pInfo, VkMemoryRequirements2KHR* pMemoryRequirements);
+typedef VkResult (VKAPI_PTR *PFN_vkBindAccelerationStructureMemoryNV)(VkDevice device, uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoNV* pBindInfos);
+typedef void (VKAPI_PTR *PFN_vkCmdBuildAccelerationStructureNV)(VkCommandBuffer commandBuffer, const VkAccelerationStructureInfoNV* pInfo, VkBuffer instanceData, VkDeviceSize instanceOffset, VkBool32 update, VkAccelerationStructureNV dst, VkAccelerationStructureNV src, VkBuffer scratch, VkDeviceSize scratchOffset);
+typedef void (VKAPI_PTR *PFN_vkCmdCopyAccelerationStructureNV)(VkCommandBuffer commandBuffer, VkAccelerationStructureNV dst, VkAccelerationStructureNV src, VkCopyAccelerationStructureModeNV mode);
+typedef void (VKAPI_PTR *PFN_vkCmdTraceRaysNV)(VkCommandBuffer commandBuffer, VkBuffer raygenShaderBindingTableBuffer, VkDeviceSize raygenShaderBindingOffset, VkBuffer missShaderBindingTableBuffer, VkDeviceSize missShaderBindingOffset, VkDeviceSize missShaderBindingStride, VkBuffer hitShaderBindingTableBuffer, VkDeviceSize hitShaderBindingOffset, VkDeviceSize hitShaderBindingStride, VkBuffer callableShaderBindingTableBuffer, VkDeviceSize callableShaderBindingOffset, VkDeviceSize callableShaderBindingStride, uint32_t width, uint32_t height, uint32_t depth);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateRayTracingPipelinesNV)(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkRayTracingPipelineCreateInfoNV* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines);
+typedef VkResult (VKAPI_PTR *PFN_vkGetRayTracingShaderGroupHandlesNV)(VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData);
+typedef VkResult (VKAPI_PTR *PFN_vkGetAccelerationStructureHandleNV)(VkDevice device, VkAccelerationStructureNV accelerationStructure, size_t dataSize, void* pData);
+typedef void (VKAPI_PTR *PFN_vkCmdWriteAccelerationStructuresPropertiesNV)(VkCommandBuffer commandBuffer, uint32_t accelerationStructureCount, const VkAccelerationStructureNV* pAccelerationStructures, VkQueryType queryType, VkQueryPool queryPool, uint32_t firstQuery);
+typedef VkResult (VKAPI_PTR *PFN_vkCompileDeferredNV)(VkDevice device, VkPipeline pipeline, uint32_t shader);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateAccelerationStructureNV(
+    VkDevice                                    device,
+    const VkAccelerationStructureCreateInfoNV*  pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkAccelerationStructureNV*                  pAccelerationStructure);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyAccelerationStructureNV(
+    VkDevice                                    device,
+    VkAccelerationStructureNV                   accelerationStructure,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR void VKAPI_CALL vkGetAccelerationStructureMemoryRequirementsNV(
+    VkDevice                                    device,
+    const VkAccelerationStructureMemoryRequirementsInfoNV* pInfo,
+    VkMemoryRequirements2KHR*                   pMemoryRequirements);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkBindAccelerationStructureMemoryNV(
+    VkDevice                                    device,
+    uint32_t                                    bindInfoCount,
+    const VkBindAccelerationStructureMemoryInfoNV* pBindInfos);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdBuildAccelerationStructureNV(
+    VkCommandBuffer                             commandBuffer,
+    const VkAccelerationStructureInfoNV*        pInfo,
+    VkBuffer                                    instanceData,
+    VkDeviceSize                                instanceOffset,
+    VkBool32                                    update,
+    VkAccelerationStructureNV                   dst,
+    VkAccelerationStructureNV                   src,
+    VkBuffer                                    scratch,
+    VkDeviceSize                                scratchOffset);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdCopyAccelerationStructureNV(
+    VkCommandBuffer                             commandBuffer,
+    VkAccelerationStructureNV                   dst,
+    VkAccelerationStructureNV                   src,
+    VkCopyAccelerationStructureModeNV           mode);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdTraceRaysNV(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    raygenShaderBindingTableBuffer,
+    VkDeviceSize                                raygenShaderBindingOffset,
+    VkBuffer                                    missShaderBindingTableBuffer,
+    VkDeviceSize                                missShaderBindingOffset,
+    VkDeviceSize                                missShaderBindingStride,
+    VkBuffer                                    hitShaderBindingTableBuffer,
+    VkDeviceSize                                hitShaderBindingOffset,
+    VkDeviceSize                                hitShaderBindingStride,
+    VkBuffer                                    callableShaderBindingTableBuffer,
+    VkDeviceSize                                callableShaderBindingOffset,
+    VkDeviceSize                                callableShaderBindingStride,
+    uint32_t                                    width,
+    uint32_t                                    height,
+    uint32_t                                    depth);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateRayTracingPipelinesNV(
+    VkDevice                                    device,
+    VkPipelineCache                             pipelineCache,
+    uint32_t                                    createInfoCount,
+    const VkRayTracingPipelineCreateInfoNV*     pCreateInfos,
+    const VkAllocationCallbacks*                pAllocator,
+    VkPipeline*                                 pPipelines);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetRayTracingShaderGroupHandlesNV(
+    VkDevice                                    device,
+    VkPipeline                                  pipeline,
+    uint32_t                                    firstGroup,
+    uint32_t                                    groupCount,
+    size_t                                      dataSize,
+    void*                                       pData);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetAccelerationStructureHandleNV(
+    VkDevice                                    device,
+    VkAccelerationStructureNV                   accelerationStructure,
+    size_t                                      dataSize,
+    void*                                       pData);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdWriteAccelerationStructuresPropertiesNV(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    accelerationStructureCount,
+    const VkAccelerationStructureNV*            pAccelerationStructures,
+    VkQueryType                                 queryType,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    firstQuery);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCompileDeferredNV(
+    VkDevice                                    device,
+    VkPipeline                                  pipeline,
+    uint32_t                                    shader);
+#endif
+
+
+#define VK_NV_representative_fragment_test 1
+#define VK_NV_REPRESENTATIVE_FRAGMENT_TEST_SPEC_VERSION 2
+#define VK_NV_REPRESENTATIVE_FRAGMENT_TEST_EXTENSION_NAME "VK_NV_representative_fragment_test"
+typedef struct VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           representativeFragmentTest;
+} VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV;
+
+typedef struct VkPipelineRepresentativeFragmentTestStateCreateInfoNV {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkBool32           representativeFragmentTestEnable;
+} VkPipelineRepresentativeFragmentTestStateCreateInfoNV;
+
+
+
+#define VK_EXT_filter_cubic 1
+#define VK_EXT_FILTER_CUBIC_SPEC_VERSION  2
+#define VK_EXT_FILTER_CUBIC_EXTENSION_NAME "VK_EXT_filter_cubic"
+typedef struct VkPhysicalDeviceImageViewImageFormatInfoEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkImageViewType    imageViewType;
+} VkPhysicalDeviceImageViewImageFormatInfoEXT;
+
+typedef struct VkFilterCubicImageViewImageFormatPropertiesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           filterCubic;
+    VkBool32           filterCubicMinmax ;
+} VkFilterCubicImageViewImageFormatPropertiesEXT;
+
+
+
+#define VK_EXT_global_priority 1
+#define VK_EXT_GLOBAL_PRIORITY_SPEC_VERSION 2
+#define VK_EXT_GLOBAL_PRIORITY_EXTENSION_NAME "VK_EXT_global_priority"
+
+typedef enum VkQueueGlobalPriorityEXT {
+    VK_QUEUE_GLOBAL_PRIORITY_LOW_EXT = 128,
+    VK_QUEUE_GLOBAL_PRIORITY_MEDIUM_EXT = 256,
+    VK_QUEUE_GLOBAL_PRIORITY_HIGH_EXT = 512,
+    VK_QUEUE_GLOBAL_PRIORITY_REALTIME_EXT = 1024,
+    VK_QUEUE_GLOBAL_PRIORITY_BEGIN_RANGE_EXT = VK_QUEUE_GLOBAL_PRIORITY_LOW_EXT,
+    VK_QUEUE_GLOBAL_PRIORITY_END_RANGE_EXT = VK_QUEUE_GLOBAL_PRIORITY_REALTIME_EXT,
+    VK_QUEUE_GLOBAL_PRIORITY_RANGE_SIZE_EXT = (VK_QUEUE_GLOBAL_PRIORITY_REALTIME_EXT - VK_QUEUE_GLOBAL_PRIORITY_LOW_EXT + 1),
+    VK_QUEUE_GLOBAL_PRIORITY_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkQueueGlobalPriorityEXT;
+typedef struct VkDeviceQueueGlobalPriorityCreateInfoEXT {
+    VkStructureType             sType;
+    const void*                 pNext;
+    VkQueueGlobalPriorityEXT    globalPriority;
+} VkDeviceQueueGlobalPriorityCreateInfoEXT;
+
+
+
+#define VK_EXT_external_memory_host 1
+#define VK_EXT_EXTERNAL_MEMORY_HOST_SPEC_VERSION 1
+#define VK_EXT_EXTERNAL_MEMORY_HOST_EXTENSION_NAME "VK_EXT_external_memory_host"
+typedef struct VkImportMemoryHostPointerInfoEXT {
+    VkStructureType                       sType;
+    const void*                           pNext;
+    VkExternalMemoryHandleTypeFlagBits    handleType;
+    void*                                 pHostPointer;
+} VkImportMemoryHostPointerInfoEXT;
+
+typedef struct VkMemoryHostPointerPropertiesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    uint32_t           memoryTypeBits;
+} VkMemoryHostPointerPropertiesEXT;
+
+typedef struct VkPhysicalDeviceExternalMemoryHostPropertiesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkDeviceSize       minImportedHostPointerAlignment;
+} VkPhysicalDeviceExternalMemoryHostPropertiesEXT;
+
+typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryHostPointerPropertiesEXT)(VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, const void* pHostPointer, VkMemoryHostPointerPropertiesEXT* pMemoryHostPointerProperties);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryHostPointerPropertiesEXT(
+    VkDevice                                    device,
+    VkExternalMemoryHandleTypeFlagBits          handleType,
+    const void*                                 pHostPointer,
+    VkMemoryHostPointerPropertiesEXT*           pMemoryHostPointerProperties);
+#endif
+
+
+#define VK_AMD_buffer_marker 1
+#define VK_AMD_BUFFER_MARKER_SPEC_VERSION 1
+#define VK_AMD_BUFFER_MARKER_EXTENSION_NAME "VK_AMD_buffer_marker"
+typedef void (VKAPI_PTR *PFN_vkCmdWriteBufferMarkerAMD)(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage, VkBuffer dstBuffer, VkDeviceSize dstOffset, uint32_t marker);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkCmdWriteBufferMarkerAMD(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineStageFlagBits                     pipelineStage,
+    VkBuffer                                    dstBuffer,
+    VkDeviceSize                                dstOffset,
+    uint32_t                                    marker);
+#endif
+
+
+#define VK_AMD_pipeline_compiler_control 1
+#define VK_AMD_PIPELINE_COMPILER_CONTROL_SPEC_VERSION 1
+#define VK_AMD_PIPELINE_COMPILER_CONTROL_EXTENSION_NAME "VK_AMD_pipeline_compiler_control"
+
+typedef enum VkPipelineCompilerControlFlagBitsAMD {
+    VK_PIPELINE_COMPILER_CONTROL_FLAG_BITS_MAX_ENUM_AMD = 0x7FFFFFFF
+} VkPipelineCompilerControlFlagBitsAMD;
+typedef VkFlags VkPipelineCompilerControlFlagsAMD;
+typedef struct VkPipelineCompilerControlCreateInfoAMD {
+    VkStructureType                      sType;
+    const void*                          pNext;
+    VkPipelineCompilerControlFlagsAMD    compilerControlFlags;
+} VkPipelineCompilerControlCreateInfoAMD;
+
+
+
+#define VK_EXT_calibrated_timestamps 1
+#define VK_EXT_CALIBRATED_TIMESTAMPS_SPEC_VERSION 1
+#define VK_EXT_CALIBRATED_TIMESTAMPS_EXTENSION_NAME "VK_EXT_calibrated_timestamps"
+
+typedef enum VkTimeDomainEXT {
+    VK_TIME_DOMAIN_DEVICE_EXT = 0,
+    VK_TIME_DOMAIN_CLOCK_MONOTONIC_EXT = 1,
+    VK_TIME_DOMAIN_CLOCK_MONOTONIC_RAW_EXT = 2,
+    VK_TIME_DOMAIN_QUERY_PERFORMANCE_COUNTER_EXT = 3,
+    VK_TIME_DOMAIN_BEGIN_RANGE_EXT = VK_TIME_DOMAIN_DEVICE_EXT,
+    VK_TIME_DOMAIN_END_RANGE_EXT = VK_TIME_DOMAIN_QUERY_PERFORMANCE_COUNTER_EXT,
+    VK_TIME_DOMAIN_RANGE_SIZE_EXT = (VK_TIME_DOMAIN_QUERY_PERFORMANCE_COUNTER_EXT - VK_TIME_DOMAIN_DEVICE_EXT + 1),
+    VK_TIME_DOMAIN_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkTimeDomainEXT;
+typedef struct VkCalibratedTimestampInfoEXT {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkTimeDomainEXT    timeDomain;
+} VkCalibratedTimestampInfoEXT;
+
+typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT)(VkPhysicalDevice physicalDevice, uint32_t* pTimeDomainCount, VkTimeDomainEXT* pTimeDomains);
+typedef VkResult (VKAPI_PTR *PFN_vkGetCalibratedTimestampsEXT)(VkDevice device, uint32_t timestampCount, const VkCalibratedTimestampInfoEXT* pTimestampInfos, uint64_t* pTimestamps, uint64_t* pMaxDeviation);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceCalibrateableTimeDomainsEXT(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pTimeDomainCount,
+    VkTimeDomainEXT*                            pTimeDomains);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetCalibratedTimestampsEXT(
+    VkDevice                                    device,
+    uint32_t                                    timestampCount,
+    const VkCalibratedTimestampInfoEXT*         pTimestampInfos,
+    uint64_t*                                   pTimestamps,
+    uint64_t*                                   pMaxDeviation);
+#endif
+
+
+#define VK_AMD_shader_core_properties 1
+#define VK_AMD_SHADER_CORE_PROPERTIES_SPEC_VERSION 2
+#define VK_AMD_SHADER_CORE_PROPERTIES_EXTENSION_NAME "VK_AMD_shader_core_properties"
+typedef struct VkPhysicalDeviceShaderCorePropertiesAMD {
+    VkStructureType    sType;
+    void*              pNext;
+    uint32_t           shaderEngineCount;
+    uint32_t           shaderArraysPerEngineCount;
+    uint32_t           computeUnitsPerShaderArray;
+    uint32_t           simdPerComputeUnit;
+    uint32_t           wavefrontsPerSimd;
+    uint32_t           wavefrontSize;
+    uint32_t           sgprsPerSimd;
+    uint32_t           minSgprAllocation;
+    uint32_t           maxSgprAllocation;
+    uint32_t           sgprAllocationGranularity;
+    uint32_t           vgprsPerSimd;
+    uint32_t           minVgprAllocation;
+    uint32_t           maxVgprAllocation;
+    uint32_t           vgprAllocationGranularity;
+} VkPhysicalDeviceShaderCorePropertiesAMD;
+
+
+
+#define VK_AMD_memory_overallocation_behavior 1
+#define VK_AMD_MEMORY_OVERALLOCATION_BEHAVIOR_SPEC_VERSION 1
+#define VK_AMD_MEMORY_OVERALLOCATION_BEHAVIOR_EXTENSION_NAME "VK_AMD_memory_overallocation_behavior"
+
+typedef enum VkMemoryOverallocationBehaviorAMD {
+    VK_MEMORY_OVERALLOCATION_BEHAVIOR_DEFAULT_AMD = 0,
+    VK_MEMORY_OVERALLOCATION_BEHAVIOR_ALLOWED_AMD = 1,
+    VK_MEMORY_OVERALLOCATION_BEHAVIOR_DISALLOWED_AMD = 2,
+    VK_MEMORY_OVERALLOCATION_BEHAVIOR_BEGIN_RANGE_AMD = VK_MEMORY_OVERALLOCATION_BEHAVIOR_DEFAULT_AMD,
+    VK_MEMORY_OVERALLOCATION_BEHAVIOR_END_RANGE_AMD = VK_MEMORY_OVERALLOCATION_BEHAVIOR_DISALLOWED_AMD,
+    VK_MEMORY_OVERALLOCATION_BEHAVIOR_RANGE_SIZE_AMD = (VK_MEMORY_OVERALLOCATION_BEHAVIOR_DISALLOWED_AMD - VK_MEMORY_OVERALLOCATION_BEHAVIOR_DEFAULT_AMD + 1),
+    VK_MEMORY_OVERALLOCATION_BEHAVIOR_MAX_ENUM_AMD = 0x7FFFFFFF
+} VkMemoryOverallocationBehaviorAMD;
+typedef struct VkDeviceMemoryOverallocationCreateInfoAMD {
+    VkStructureType                      sType;
+    const void*                          pNext;
+    VkMemoryOverallocationBehaviorAMD    overallocationBehavior;
+} VkDeviceMemoryOverallocationCreateInfoAMD;
+
+
+
+#define VK_EXT_vertex_attribute_divisor 1
+#define VK_EXT_VERTEX_ATTRIBUTE_DIVISOR_SPEC_VERSION 3
+#define VK_EXT_VERTEX_ATTRIBUTE_DIVISOR_EXTENSION_NAME "VK_EXT_vertex_attribute_divisor"
+typedef struct VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    uint32_t           maxVertexAttribDivisor;
+} VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT;
+
+typedef struct VkVertexInputBindingDivisorDescriptionEXT {
+    uint32_t    binding;
+    uint32_t    divisor;
+} VkVertexInputBindingDivisorDescriptionEXT;
+
+typedef struct VkPipelineVertexInputDivisorStateCreateInfoEXT {
+    VkStructureType                                     sType;
+    const void*                                         pNext;
+    uint32_t                                            vertexBindingDivisorCount;
+    const VkVertexInputBindingDivisorDescriptionEXT*    pVertexBindingDivisors;
+} VkPipelineVertexInputDivisorStateCreateInfoEXT;
+
+typedef struct VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           vertexAttributeInstanceRateDivisor;
+    VkBool32           vertexAttributeInstanceRateZeroDivisor;
+} VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT;
+
+
+
+#define VK_EXT_pipeline_creation_feedback 1
+#define VK_EXT_PIPELINE_CREATION_FEEDBACK_SPEC_VERSION 1
+#define VK_EXT_PIPELINE_CREATION_FEEDBACK_EXTENSION_NAME "VK_EXT_pipeline_creation_feedback"
+
+typedef enum VkPipelineCreationFeedbackFlagBitsEXT {
+    VK_PIPELINE_CREATION_FEEDBACK_VALID_BIT_EXT = 0x00000001,
+    VK_PIPELINE_CREATION_FEEDBACK_APPLICATION_PIPELINE_CACHE_HIT_BIT_EXT = 0x00000002,
+    VK_PIPELINE_CREATION_FEEDBACK_BASE_PIPELINE_ACCELERATION_BIT_EXT = 0x00000004,
+    VK_PIPELINE_CREATION_FEEDBACK_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkPipelineCreationFeedbackFlagBitsEXT;
+typedef VkFlags VkPipelineCreationFeedbackFlagsEXT;
+typedef struct VkPipelineCreationFeedbackEXT {
+    VkPipelineCreationFeedbackFlagsEXT    flags;
+    uint64_t                              duration;
+} VkPipelineCreationFeedbackEXT;
+
+typedef struct VkPipelineCreationFeedbackCreateInfoEXT {
+    VkStructureType                   sType;
+    const void*                       pNext;
+    VkPipelineCreationFeedbackEXT*    pPipelineCreationFeedback;
+    uint32_t                          pipelineStageCreationFeedbackCount;
+    VkPipelineCreationFeedbackEXT*    pPipelineStageCreationFeedbacks;
+} VkPipelineCreationFeedbackCreateInfoEXT;
+
+
+
+#define VK_NV_shader_subgroup_partitioned 1
+#define VK_NV_SHADER_SUBGROUP_PARTITIONED_SPEC_VERSION 1
+#define VK_NV_SHADER_SUBGROUP_PARTITIONED_EXTENSION_NAME "VK_NV_shader_subgroup_partitioned"
+
+
+#define VK_NV_compute_shader_derivatives 1
+#define VK_NV_COMPUTE_SHADER_DERIVATIVES_SPEC_VERSION 1
+#define VK_NV_COMPUTE_SHADER_DERIVATIVES_EXTENSION_NAME "VK_NV_compute_shader_derivatives"
+typedef struct VkPhysicalDeviceComputeShaderDerivativesFeaturesNV {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           computeDerivativeGroupQuads;
+    VkBool32           computeDerivativeGroupLinear;
+} VkPhysicalDeviceComputeShaderDerivativesFeaturesNV;
+
+
+
+#define VK_NV_mesh_shader 1
+#define VK_NV_MESH_SHADER_SPEC_VERSION    1
+#define VK_NV_MESH_SHADER_EXTENSION_NAME  "VK_NV_mesh_shader"
+typedef struct VkPhysicalDeviceMeshShaderFeaturesNV {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           taskShader;
+    VkBool32           meshShader;
+} VkPhysicalDeviceMeshShaderFeaturesNV;
+
+typedef struct VkPhysicalDeviceMeshShaderPropertiesNV {
+    VkStructureType    sType;
+    void*              pNext;
+    uint32_t           maxDrawMeshTasksCount;
+    uint32_t           maxTaskWorkGroupInvocations;
+    uint32_t           maxTaskWorkGroupSize[3];
+    uint32_t           maxTaskTotalMemorySize;
+    uint32_t           maxTaskOutputCount;
+    uint32_t           maxMeshWorkGroupInvocations;
+    uint32_t           maxMeshWorkGroupSize[3];
+    uint32_t           maxMeshTotalMemorySize;
+    uint32_t           maxMeshOutputVertices;
+    uint32_t           maxMeshOutputPrimitives;
+    uint32_t           maxMeshMultiviewViewCount;
+    uint32_t           meshOutputPerVertexGranularity;
+    uint32_t           meshOutputPerPrimitiveGranularity;
+} VkPhysicalDeviceMeshShaderPropertiesNV;
+
+typedef struct VkDrawMeshTasksIndirectCommandNV {
+    uint32_t    taskCount;
+    uint32_t    firstTask;
+} VkDrawMeshTasksIndirectCommandNV;
+
+typedef void (VKAPI_PTR *PFN_vkCmdDrawMeshTasksNV)(VkCommandBuffer commandBuffer, uint32_t taskCount, uint32_t firstTask);
+typedef void (VKAPI_PTR *PFN_vkCmdDrawMeshTasksIndirectNV)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride);
+typedef void (VKAPI_PTR *PFN_vkCmdDrawMeshTasksIndirectCountNV)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkCmdDrawMeshTasksNV(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    taskCount,
+    uint32_t                                    firstTask);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdDrawMeshTasksIndirectNV(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    uint32_t                                    drawCount,
+    uint32_t                                    stride);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdDrawMeshTasksIndirectCountNV(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkBuffer                                    countBuffer,
+    VkDeviceSize                                countBufferOffset,
+    uint32_t                                    maxDrawCount,
+    uint32_t                                    stride);
+#endif
+
+
+#define VK_NV_fragment_shader_barycentric 1
+#define VK_NV_FRAGMENT_SHADER_BARYCENTRIC_SPEC_VERSION 1
+#define VK_NV_FRAGMENT_SHADER_BARYCENTRIC_EXTENSION_NAME "VK_NV_fragment_shader_barycentric"
+typedef struct VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           fragmentShaderBarycentric;
+} VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV;
+
+
+
+#define VK_NV_shader_image_footprint 1
+#define VK_NV_SHADER_IMAGE_FOOTPRINT_SPEC_VERSION 2
+#define VK_NV_SHADER_IMAGE_FOOTPRINT_EXTENSION_NAME "VK_NV_shader_image_footprint"
+typedef struct VkPhysicalDeviceShaderImageFootprintFeaturesNV {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           imageFootprint;
+} VkPhysicalDeviceShaderImageFootprintFeaturesNV;
+
+
+
+#define VK_NV_scissor_exclusive 1
+#define VK_NV_SCISSOR_EXCLUSIVE_SPEC_VERSION 1
+#define VK_NV_SCISSOR_EXCLUSIVE_EXTENSION_NAME "VK_NV_scissor_exclusive"
+typedef struct VkPipelineViewportExclusiveScissorStateCreateInfoNV {
+    VkStructureType    sType;
+    const void*        pNext;
+    uint32_t           exclusiveScissorCount;
+    const VkRect2D*    pExclusiveScissors;
+} VkPipelineViewportExclusiveScissorStateCreateInfoNV;
+
+typedef struct VkPhysicalDeviceExclusiveScissorFeaturesNV {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           exclusiveScissor;
+} VkPhysicalDeviceExclusiveScissorFeaturesNV;
+
+typedef void (VKAPI_PTR *PFN_vkCmdSetExclusiveScissorNV)(VkCommandBuffer commandBuffer, uint32_t firstExclusiveScissor, uint32_t exclusiveScissorCount, const VkRect2D* pExclusiveScissors);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkCmdSetExclusiveScissorNV(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstExclusiveScissor,
+    uint32_t                                    exclusiveScissorCount,
+    const VkRect2D*                             pExclusiveScissors);
+#endif
+
+
+#define VK_NV_device_diagnostic_checkpoints 1
+#define VK_NV_DEVICE_DIAGNOSTIC_CHECKPOINTS_SPEC_VERSION 2
+#define VK_NV_DEVICE_DIAGNOSTIC_CHECKPOINTS_EXTENSION_NAME "VK_NV_device_diagnostic_checkpoints"
+typedef struct VkQueueFamilyCheckpointPropertiesNV {
+    VkStructureType         sType;
+    void*                   pNext;
+    VkPipelineStageFlags    checkpointExecutionStageMask;
+} VkQueueFamilyCheckpointPropertiesNV;
+
+typedef struct VkCheckpointDataNV {
+    VkStructureType            sType;
+    void*                      pNext;
+    VkPipelineStageFlagBits    stage;
+    void*                      pCheckpointMarker;
+} VkCheckpointDataNV;
+
+typedef void (VKAPI_PTR *PFN_vkCmdSetCheckpointNV)(VkCommandBuffer commandBuffer, const void* pCheckpointMarker);
+typedef void (VKAPI_PTR *PFN_vkGetQueueCheckpointDataNV)(VkQueue queue, uint32_t* pCheckpointDataCount, VkCheckpointDataNV* pCheckpointData);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkCmdSetCheckpointNV(
+    VkCommandBuffer                             commandBuffer,
+    const void*                                 pCheckpointMarker);
+
+VKAPI_ATTR void VKAPI_CALL vkGetQueueCheckpointDataNV(
+    VkQueue                                     queue,
+    uint32_t*                                   pCheckpointDataCount,
+    VkCheckpointDataNV*                         pCheckpointData);
+#endif
+
+
+#define VK_INTEL_shader_integer_functions2 1
+#define VK_INTEL_SHADER_INTEGER_FUNCTIONS_2_SPEC_VERSION 1
+#define VK_INTEL_SHADER_INTEGER_FUNCTIONS_2_EXTENSION_NAME "VK_INTEL_shader_integer_functions2"
+typedef struct VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           shaderIntegerFunctions2;
+} VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL;
+
+
+
+#define VK_INTEL_performance_query 1
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkPerformanceConfigurationINTEL)
+#define VK_INTEL_PERFORMANCE_QUERY_SPEC_VERSION 1
+#define VK_INTEL_PERFORMANCE_QUERY_EXTENSION_NAME "VK_INTEL_performance_query"
+
+typedef enum VkPerformanceConfigurationTypeINTEL {
+    VK_PERFORMANCE_CONFIGURATION_TYPE_COMMAND_QUEUE_METRICS_DISCOVERY_ACTIVATED_INTEL = 0,
+    VK_PERFORMANCE_CONFIGURATION_TYPE_BEGIN_RANGE_INTEL = VK_PERFORMANCE_CONFIGURATION_TYPE_COMMAND_QUEUE_METRICS_DISCOVERY_ACTIVATED_INTEL,
+    VK_PERFORMANCE_CONFIGURATION_TYPE_END_RANGE_INTEL = VK_PERFORMANCE_CONFIGURATION_TYPE_COMMAND_QUEUE_METRICS_DISCOVERY_ACTIVATED_INTEL,
+    VK_PERFORMANCE_CONFIGURATION_TYPE_RANGE_SIZE_INTEL = (VK_PERFORMANCE_CONFIGURATION_TYPE_COMMAND_QUEUE_METRICS_DISCOVERY_ACTIVATED_INTEL - VK_PERFORMANCE_CONFIGURATION_TYPE_COMMAND_QUEUE_METRICS_DISCOVERY_ACTIVATED_INTEL + 1),
+    VK_PERFORMANCE_CONFIGURATION_TYPE_MAX_ENUM_INTEL = 0x7FFFFFFF
+} VkPerformanceConfigurationTypeINTEL;
+
+typedef enum VkQueryPoolSamplingModeINTEL {
+    VK_QUERY_POOL_SAMPLING_MODE_MANUAL_INTEL = 0,
+    VK_QUERY_POOL_SAMPLING_MODE_BEGIN_RANGE_INTEL = VK_QUERY_POOL_SAMPLING_MODE_MANUAL_INTEL,
+    VK_QUERY_POOL_SAMPLING_MODE_END_RANGE_INTEL = VK_QUERY_POOL_SAMPLING_MODE_MANUAL_INTEL,
+    VK_QUERY_POOL_SAMPLING_MODE_RANGE_SIZE_INTEL = (VK_QUERY_POOL_SAMPLING_MODE_MANUAL_INTEL - VK_QUERY_POOL_SAMPLING_MODE_MANUAL_INTEL + 1),
+    VK_QUERY_POOL_SAMPLING_MODE_MAX_ENUM_INTEL = 0x7FFFFFFF
+} VkQueryPoolSamplingModeINTEL;
+
+typedef enum VkPerformanceOverrideTypeINTEL {
+    VK_PERFORMANCE_OVERRIDE_TYPE_NULL_HARDWARE_INTEL = 0,
+    VK_PERFORMANCE_OVERRIDE_TYPE_FLUSH_GPU_CACHES_INTEL = 1,
+    VK_PERFORMANCE_OVERRIDE_TYPE_BEGIN_RANGE_INTEL = VK_PERFORMANCE_OVERRIDE_TYPE_NULL_HARDWARE_INTEL,
+    VK_PERFORMANCE_OVERRIDE_TYPE_END_RANGE_INTEL = VK_PERFORMANCE_OVERRIDE_TYPE_FLUSH_GPU_CACHES_INTEL,
+    VK_PERFORMANCE_OVERRIDE_TYPE_RANGE_SIZE_INTEL = (VK_PERFORMANCE_OVERRIDE_TYPE_FLUSH_GPU_CACHES_INTEL - VK_PERFORMANCE_OVERRIDE_TYPE_NULL_HARDWARE_INTEL + 1),
+    VK_PERFORMANCE_OVERRIDE_TYPE_MAX_ENUM_INTEL = 0x7FFFFFFF
+} VkPerformanceOverrideTypeINTEL;
+
+typedef enum VkPerformanceParameterTypeINTEL {
+    VK_PERFORMANCE_PARAMETER_TYPE_HW_COUNTERS_SUPPORTED_INTEL = 0,
+    VK_PERFORMANCE_PARAMETER_TYPE_STREAM_MARKER_VALID_BITS_INTEL = 1,
+    VK_PERFORMANCE_PARAMETER_TYPE_BEGIN_RANGE_INTEL = VK_PERFORMANCE_PARAMETER_TYPE_HW_COUNTERS_SUPPORTED_INTEL,
+    VK_PERFORMANCE_PARAMETER_TYPE_END_RANGE_INTEL = VK_PERFORMANCE_PARAMETER_TYPE_STREAM_MARKER_VALID_BITS_INTEL,
+    VK_PERFORMANCE_PARAMETER_TYPE_RANGE_SIZE_INTEL = (VK_PERFORMANCE_PARAMETER_TYPE_STREAM_MARKER_VALID_BITS_INTEL - VK_PERFORMANCE_PARAMETER_TYPE_HW_COUNTERS_SUPPORTED_INTEL + 1),
+    VK_PERFORMANCE_PARAMETER_TYPE_MAX_ENUM_INTEL = 0x7FFFFFFF
+} VkPerformanceParameterTypeINTEL;
+
+typedef enum VkPerformanceValueTypeINTEL {
+    VK_PERFORMANCE_VALUE_TYPE_UINT32_INTEL = 0,
+    VK_PERFORMANCE_VALUE_TYPE_UINT64_INTEL = 1,
+    VK_PERFORMANCE_VALUE_TYPE_FLOAT_INTEL = 2,
+    VK_PERFORMANCE_VALUE_TYPE_BOOL_INTEL = 3,
+    VK_PERFORMANCE_VALUE_TYPE_STRING_INTEL = 4,
+    VK_PERFORMANCE_VALUE_TYPE_BEGIN_RANGE_INTEL = VK_PERFORMANCE_VALUE_TYPE_UINT32_INTEL,
+    VK_PERFORMANCE_VALUE_TYPE_END_RANGE_INTEL = VK_PERFORMANCE_VALUE_TYPE_STRING_INTEL,
+    VK_PERFORMANCE_VALUE_TYPE_RANGE_SIZE_INTEL = (VK_PERFORMANCE_VALUE_TYPE_STRING_INTEL - VK_PERFORMANCE_VALUE_TYPE_UINT32_INTEL + 1),
+    VK_PERFORMANCE_VALUE_TYPE_MAX_ENUM_INTEL = 0x7FFFFFFF
+} VkPerformanceValueTypeINTEL;
+typedef union VkPerformanceValueDataINTEL {
+    uint32_t       value32;
+    uint64_t       value64;
+    float          valueFloat;
+    VkBool32       valueBool;
+    const char*    valueString;
+} VkPerformanceValueDataINTEL;
+
+typedef struct VkPerformanceValueINTEL {
+    VkPerformanceValueTypeINTEL    type;
+    VkPerformanceValueDataINTEL    data;
+} VkPerformanceValueINTEL;
+
+typedef struct VkInitializePerformanceApiInfoINTEL {
+    VkStructureType    sType;
+    const void*        pNext;
+    void*              pUserData;
+} VkInitializePerformanceApiInfoINTEL;
+
+typedef struct VkQueryPoolCreateInfoINTEL {
+    VkStructureType                 sType;
+    const void*                     pNext;
+    VkQueryPoolSamplingModeINTEL    performanceCountersSampling;
+} VkQueryPoolCreateInfoINTEL;
+
+typedef struct VkPerformanceMarkerInfoINTEL {
+    VkStructureType    sType;
+    const void*        pNext;
+    uint64_t           marker;
+} VkPerformanceMarkerInfoINTEL;
+
+typedef struct VkPerformanceStreamMarkerInfoINTEL {
+    VkStructureType    sType;
+    const void*        pNext;
+    uint32_t           marker;
+} VkPerformanceStreamMarkerInfoINTEL;
+
+typedef struct VkPerformanceOverrideInfoINTEL {
+    VkStructureType                   sType;
+    const void*                       pNext;
+    VkPerformanceOverrideTypeINTEL    type;
+    VkBool32                          enable;
+    uint64_t                          parameter;
+} VkPerformanceOverrideInfoINTEL;
+
+typedef struct VkPerformanceConfigurationAcquireInfoINTEL {
+    VkStructureType                        sType;
+    const void*                            pNext;
+    VkPerformanceConfigurationTypeINTEL    type;
+} VkPerformanceConfigurationAcquireInfoINTEL;
+
+typedef VkResult (VKAPI_PTR *PFN_vkInitializePerformanceApiINTEL)(VkDevice device, const VkInitializePerformanceApiInfoINTEL* pInitializeInfo);
+typedef void (VKAPI_PTR *PFN_vkUninitializePerformanceApiINTEL)(VkDevice device);
+typedef VkResult (VKAPI_PTR *PFN_vkCmdSetPerformanceMarkerINTEL)(VkCommandBuffer commandBuffer, const VkPerformanceMarkerInfoINTEL* pMarkerInfo);
+typedef VkResult (VKAPI_PTR *PFN_vkCmdSetPerformanceStreamMarkerINTEL)(VkCommandBuffer commandBuffer, const VkPerformanceStreamMarkerInfoINTEL* pMarkerInfo);
+typedef VkResult (VKAPI_PTR *PFN_vkCmdSetPerformanceOverrideINTEL)(VkCommandBuffer commandBuffer, const VkPerformanceOverrideInfoINTEL* pOverrideInfo);
+typedef VkResult (VKAPI_PTR *PFN_vkAcquirePerformanceConfigurationINTEL)(VkDevice device, const VkPerformanceConfigurationAcquireInfoINTEL* pAcquireInfo, VkPerformanceConfigurationINTEL* pConfiguration);
+typedef VkResult (VKAPI_PTR *PFN_vkReleasePerformanceConfigurationINTEL)(VkDevice device, VkPerformanceConfigurationINTEL configuration);
+typedef VkResult (VKAPI_PTR *PFN_vkQueueSetPerformanceConfigurationINTEL)(VkQueue queue, VkPerformanceConfigurationINTEL configuration);
+typedef VkResult (VKAPI_PTR *PFN_vkGetPerformanceParameterINTEL)(VkDevice device, VkPerformanceParameterTypeINTEL parameter, VkPerformanceValueINTEL* pValue);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkInitializePerformanceApiINTEL(
+    VkDevice                                    device,
+    const VkInitializePerformanceApiInfoINTEL*  pInitializeInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkUninitializePerformanceApiINTEL(
+    VkDevice                                    device);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCmdSetPerformanceMarkerINTEL(
+    VkCommandBuffer                             commandBuffer,
+    const VkPerformanceMarkerInfoINTEL*         pMarkerInfo);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCmdSetPerformanceStreamMarkerINTEL(
+    VkCommandBuffer                             commandBuffer,
+    const VkPerformanceStreamMarkerInfoINTEL*   pMarkerInfo);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCmdSetPerformanceOverrideINTEL(
+    VkCommandBuffer                             commandBuffer,
+    const VkPerformanceOverrideInfoINTEL*       pOverrideInfo);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkAcquirePerformanceConfigurationINTEL(
+    VkDevice                                    device,
+    const VkPerformanceConfigurationAcquireInfoINTEL* pAcquireInfo,
+    VkPerformanceConfigurationINTEL*            pConfiguration);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkReleasePerformanceConfigurationINTEL(
+    VkDevice                                    device,
+    VkPerformanceConfigurationINTEL             configuration);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkQueueSetPerformanceConfigurationINTEL(
+    VkQueue                                     queue,
+    VkPerformanceConfigurationINTEL             configuration);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPerformanceParameterINTEL(
+    VkDevice                                    device,
+    VkPerformanceParameterTypeINTEL             parameter,
+    VkPerformanceValueINTEL*                    pValue);
+#endif
+
+
+#define VK_EXT_pci_bus_info 1
+#define VK_EXT_PCI_BUS_INFO_SPEC_VERSION  2
+#define VK_EXT_PCI_BUS_INFO_EXTENSION_NAME "VK_EXT_pci_bus_info"
+typedef struct VkPhysicalDevicePCIBusInfoPropertiesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    uint32_t           pciDomain;
+    uint32_t           pciBus;
+    uint32_t           pciDevice;
+    uint32_t           pciFunction;
+} VkPhysicalDevicePCIBusInfoPropertiesEXT;
+
+
+
+#define VK_AMD_display_native_hdr 1
+#define VK_AMD_DISPLAY_NATIVE_HDR_SPEC_VERSION 1
+#define VK_AMD_DISPLAY_NATIVE_HDR_EXTENSION_NAME "VK_AMD_display_native_hdr"
+typedef struct VkDisplayNativeHdrSurfaceCapabilitiesAMD {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           localDimmingSupport;
+} VkDisplayNativeHdrSurfaceCapabilitiesAMD;
+
+typedef struct VkSwapchainDisplayNativeHdrCreateInfoAMD {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkBool32           localDimmingEnable;
+} VkSwapchainDisplayNativeHdrCreateInfoAMD;
+
+typedef void (VKAPI_PTR *PFN_vkSetLocalDimmingAMD)(VkDevice device, VkSwapchainKHR swapChain, VkBool32 localDimmingEnable);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkSetLocalDimmingAMD(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapChain,
+    VkBool32                                    localDimmingEnable);
+#endif
+
+
+#define VK_EXT_fragment_density_map 1
+#define VK_EXT_FRAGMENT_DENSITY_MAP_SPEC_VERSION 1
+#define VK_EXT_FRAGMENT_DENSITY_MAP_EXTENSION_NAME "VK_EXT_fragment_density_map"
+typedef struct VkPhysicalDeviceFragmentDensityMapFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           fragmentDensityMap;
+    VkBool32           fragmentDensityMapDynamic;
+    VkBool32           fragmentDensityMapNonSubsampledImages;
+} VkPhysicalDeviceFragmentDensityMapFeaturesEXT;
+
+typedef struct VkPhysicalDeviceFragmentDensityMapPropertiesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkExtent2D         minFragmentDensityTexelSize;
+    VkExtent2D         maxFragmentDensityTexelSize;
+    VkBool32           fragmentDensityInvocations;
+} VkPhysicalDeviceFragmentDensityMapPropertiesEXT;
+
+typedef struct VkRenderPassFragmentDensityMapCreateInfoEXT {
+    VkStructureType          sType;
+    const void*              pNext;
+    VkAttachmentReference    fragmentDensityMapAttachment;
+} VkRenderPassFragmentDensityMapCreateInfoEXT;
+
+
+
+#define VK_EXT_scalar_block_layout 1
+#define VK_EXT_SCALAR_BLOCK_LAYOUT_SPEC_VERSION 1
+#define VK_EXT_SCALAR_BLOCK_LAYOUT_EXTENSION_NAME "VK_EXT_scalar_block_layout"
+typedef struct VkPhysicalDeviceScalarBlockLayoutFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           scalarBlockLayout;
+} VkPhysicalDeviceScalarBlockLayoutFeaturesEXT;
+
+
+
+#define VK_GOOGLE_hlsl_functionality1 1
+#define VK_GOOGLE_HLSL_FUNCTIONALITY1_SPEC_VERSION 1
+#define VK_GOOGLE_HLSL_FUNCTIONALITY1_EXTENSION_NAME "VK_GOOGLE_hlsl_functionality1"
+
+
+#define VK_GOOGLE_decorate_string 1
+#define VK_GOOGLE_DECORATE_STRING_SPEC_VERSION 1
+#define VK_GOOGLE_DECORATE_STRING_EXTENSION_NAME "VK_GOOGLE_decorate_string"
+
+
+#define VK_EXT_subgroup_size_control 1
+#define VK_EXT_SUBGROUP_SIZE_CONTROL_SPEC_VERSION 2
+#define VK_EXT_SUBGROUP_SIZE_CONTROL_EXTENSION_NAME "VK_EXT_subgroup_size_control"
+typedef struct VkPhysicalDeviceSubgroupSizeControlFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           subgroupSizeControl;
+    VkBool32           computeFullSubgroups;
+} VkPhysicalDeviceSubgroupSizeControlFeaturesEXT;
+
+typedef struct VkPhysicalDeviceSubgroupSizeControlPropertiesEXT {
+    VkStructureType       sType;
+    void*                 pNext;
+    uint32_t              minSubgroupSize;
+    uint32_t              maxSubgroupSize;
+    uint32_t              maxComputeWorkgroupSubgroups;
+    VkShaderStageFlags    requiredSubgroupSizeStages;
+} VkPhysicalDeviceSubgroupSizeControlPropertiesEXT;
+
+typedef struct VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    uint32_t           requiredSubgroupSize;
+} VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT;
+
+
+
+#define VK_AMD_shader_core_properties2 1
+#define VK_AMD_SHADER_CORE_PROPERTIES_2_SPEC_VERSION 1
+#define VK_AMD_SHADER_CORE_PROPERTIES_2_EXTENSION_NAME "VK_AMD_shader_core_properties2"
+
+typedef enum VkShaderCorePropertiesFlagBitsAMD {
+    VK_SHADER_CORE_PROPERTIES_FLAG_BITS_MAX_ENUM_AMD = 0x7FFFFFFF
+} VkShaderCorePropertiesFlagBitsAMD;
+typedef VkFlags VkShaderCorePropertiesFlagsAMD;
+typedef struct VkPhysicalDeviceShaderCoreProperties2AMD {
+    VkStructureType                   sType;
+    void*                             pNext;
+    VkShaderCorePropertiesFlagsAMD    shaderCoreFeatures;
+    uint32_t                          activeComputeUnitCount;
+} VkPhysicalDeviceShaderCoreProperties2AMD;
+
+
+
+#define VK_AMD_device_coherent_memory 1
+#define VK_AMD_DEVICE_COHERENT_MEMORY_SPEC_VERSION 1
+#define VK_AMD_DEVICE_COHERENT_MEMORY_EXTENSION_NAME "VK_AMD_device_coherent_memory"
+typedef struct VkPhysicalDeviceCoherentMemoryFeaturesAMD {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           deviceCoherentMemory;
+} VkPhysicalDeviceCoherentMemoryFeaturesAMD;
+
+
+
+#define VK_EXT_memory_budget 1
+#define VK_EXT_MEMORY_BUDGET_SPEC_VERSION 1
+#define VK_EXT_MEMORY_BUDGET_EXTENSION_NAME "VK_EXT_memory_budget"
+typedef struct VkPhysicalDeviceMemoryBudgetPropertiesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkDeviceSize       heapBudget[VK_MAX_MEMORY_HEAPS];
+    VkDeviceSize       heapUsage[VK_MAX_MEMORY_HEAPS];
+} VkPhysicalDeviceMemoryBudgetPropertiesEXT;
+
+
+
+#define VK_EXT_memory_priority 1
+#define VK_EXT_MEMORY_PRIORITY_SPEC_VERSION 1
+#define VK_EXT_MEMORY_PRIORITY_EXTENSION_NAME "VK_EXT_memory_priority"
+typedef struct VkPhysicalDeviceMemoryPriorityFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           memoryPriority;
+} VkPhysicalDeviceMemoryPriorityFeaturesEXT;
+
+typedef struct VkMemoryPriorityAllocateInfoEXT {
+    VkStructureType    sType;
+    const void*        pNext;
+    float              priority;
+} VkMemoryPriorityAllocateInfoEXT;
+
+
+
+#define VK_NV_dedicated_allocation_image_aliasing 1
+#define VK_NV_DEDICATED_ALLOCATION_IMAGE_ALIASING_SPEC_VERSION 1
+#define VK_NV_DEDICATED_ALLOCATION_IMAGE_ALIASING_EXTENSION_NAME "VK_NV_dedicated_allocation_image_aliasing"
+typedef struct VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           dedicatedAllocationImageAliasing;
+} VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV;
+
+
+
+#define VK_EXT_buffer_device_address 1
+#define VK_EXT_BUFFER_DEVICE_ADDRESS_SPEC_VERSION 2
+#define VK_EXT_BUFFER_DEVICE_ADDRESS_EXTENSION_NAME "VK_EXT_buffer_device_address"
+typedef struct VkPhysicalDeviceBufferDeviceAddressFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           bufferDeviceAddress;
+    VkBool32           bufferDeviceAddressCaptureReplay;
+    VkBool32           bufferDeviceAddressMultiDevice;
+} VkPhysicalDeviceBufferDeviceAddressFeaturesEXT;
+
+typedef VkPhysicalDeviceBufferDeviceAddressFeaturesEXT VkPhysicalDeviceBufferAddressFeaturesEXT;
+
+typedef VkBufferDeviceAddressInfoKHR VkBufferDeviceAddressInfoEXT;
+
+typedef struct VkBufferDeviceAddressCreateInfoEXT {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkDeviceAddress    deviceAddress;
+} VkBufferDeviceAddressCreateInfoEXT;
+
+typedef VkDeviceAddress (VKAPI_PTR *PFN_vkGetBufferDeviceAddressEXT)(VkDevice device, const VkBufferDeviceAddressInfoKHR* pInfo);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkDeviceAddress VKAPI_CALL vkGetBufferDeviceAddressEXT(
+    VkDevice                                    device,
+    const VkBufferDeviceAddressInfoKHR*         pInfo);
+#endif
+
+
+#define VK_EXT_tooling_info 1
+#define VK_EXT_TOOLING_INFO_SPEC_VERSION  1
+#define VK_EXT_TOOLING_INFO_EXTENSION_NAME "VK_EXT_tooling_info"
+
+typedef enum VkToolPurposeFlagBitsEXT {
+    VK_TOOL_PURPOSE_VALIDATION_BIT_EXT = 0x00000001,
+    VK_TOOL_PURPOSE_PROFILING_BIT_EXT = 0x00000002,
+    VK_TOOL_PURPOSE_TRACING_BIT_EXT = 0x00000004,
+    VK_TOOL_PURPOSE_ADDITIONAL_FEATURES_BIT_EXT = 0x00000008,
+    VK_TOOL_PURPOSE_MODIFYING_FEATURES_BIT_EXT = 0x00000010,
+    VK_TOOL_PURPOSE_DEBUG_REPORTING_BIT_EXT = 0x00000020,
+    VK_TOOL_PURPOSE_DEBUG_MARKERS_BIT_EXT = 0x00000040,
+    VK_TOOL_PURPOSE_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkToolPurposeFlagBitsEXT;
+typedef VkFlags VkToolPurposeFlagsEXT;
+typedef struct VkPhysicalDeviceToolPropertiesEXT {
+    VkStructureType          sType;
+    void*                    pNext;
+    char                     name[VK_MAX_EXTENSION_NAME_SIZE];
+    char                     version[VK_MAX_EXTENSION_NAME_SIZE];
+    VkToolPurposeFlagsEXT    purposes;
+    char                     description[VK_MAX_DESCRIPTION_SIZE];
+    char                     layer[VK_MAX_EXTENSION_NAME_SIZE];
+} VkPhysicalDeviceToolPropertiesEXT;
+
+typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceToolPropertiesEXT)(VkPhysicalDevice physicalDevice, uint32_t* pToolCount, VkPhysicalDeviceToolPropertiesEXT* pToolProperties);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceToolPropertiesEXT(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pToolCount,
+    VkPhysicalDeviceToolPropertiesEXT*          pToolProperties);
+#endif
+
+
+#define VK_EXT_separate_stencil_usage 1
+#define VK_EXT_SEPARATE_STENCIL_USAGE_SPEC_VERSION 1
+#define VK_EXT_SEPARATE_STENCIL_USAGE_EXTENSION_NAME "VK_EXT_separate_stencil_usage"
+typedef struct VkImageStencilUsageCreateInfoEXT {
+    VkStructureType      sType;
+    const void*          pNext;
+    VkImageUsageFlags    stencilUsage;
+} VkImageStencilUsageCreateInfoEXT;
+
+
+
+#define VK_EXT_validation_features 1
+#define VK_EXT_VALIDATION_FEATURES_SPEC_VERSION 2
+#define VK_EXT_VALIDATION_FEATURES_EXTENSION_NAME "VK_EXT_validation_features"
+
+typedef enum VkValidationFeatureEnableEXT {
+    VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_EXT = 0,
+    VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_RESERVE_BINDING_SLOT_EXT = 1,
+    VK_VALIDATION_FEATURE_ENABLE_BEST_PRACTICES_EXT = 2,
+    VK_VALIDATION_FEATURE_ENABLE_BEGIN_RANGE_EXT = VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_EXT,
+    VK_VALIDATION_FEATURE_ENABLE_END_RANGE_EXT = VK_VALIDATION_FEATURE_ENABLE_BEST_PRACTICES_EXT,
+    VK_VALIDATION_FEATURE_ENABLE_RANGE_SIZE_EXT = (VK_VALIDATION_FEATURE_ENABLE_BEST_PRACTICES_EXT - VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_EXT + 1),
+    VK_VALIDATION_FEATURE_ENABLE_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkValidationFeatureEnableEXT;
+
+typedef enum VkValidationFeatureDisableEXT {
+    VK_VALIDATION_FEATURE_DISABLE_ALL_EXT = 0,
+    VK_VALIDATION_FEATURE_DISABLE_SHADERS_EXT = 1,
+    VK_VALIDATION_FEATURE_DISABLE_THREAD_SAFETY_EXT = 2,
+    VK_VALIDATION_FEATURE_DISABLE_API_PARAMETERS_EXT = 3,
+    VK_VALIDATION_FEATURE_DISABLE_OBJECT_LIFETIMES_EXT = 4,
+    VK_VALIDATION_FEATURE_DISABLE_CORE_CHECKS_EXT = 5,
+    VK_VALIDATION_FEATURE_DISABLE_UNIQUE_HANDLES_EXT = 6,
+    VK_VALIDATION_FEATURE_DISABLE_BEGIN_RANGE_EXT = VK_VALIDATION_FEATURE_DISABLE_ALL_EXT,
+    VK_VALIDATION_FEATURE_DISABLE_END_RANGE_EXT = VK_VALIDATION_FEATURE_DISABLE_UNIQUE_HANDLES_EXT,
+    VK_VALIDATION_FEATURE_DISABLE_RANGE_SIZE_EXT = (VK_VALIDATION_FEATURE_DISABLE_UNIQUE_HANDLES_EXT - VK_VALIDATION_FEATURE_DISABLE_ALL_EXT + 1),
+    VK_VALIDATION_FEATURE_DISABLE_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkValidationFeatureDisableEXT;
+typedef struct VkValidationFeaturesEXT {
+    VkStructureType                         sType;
+    const void*                             pNext;
+    uint32_t                                enabledValidationFeatureCount;
+    const VkValidationFeatureEnableEXT*     pEnabledValidationFeatures;
+    uint32_t                                disabledValidationFeatureCount;
+    const VkValidationFeatureDisableEXT*    pDisabledValidationFeatures;
+} VkValidationFeaturesEXT;
+
+
+
+#define VK_NV_cooperative_matrix 1
+#define VK_NV_COOPERATIVE_MATRIX_SPEC_VERSION 1
+#define VK_NV_COOPERATIVE_MATRIX_EXTENSION_NAME "VK_NV_cooperative_matrix"
+
+typedef enum VkComponentTypeNV {
+    VK_COMPONENT_TYPE_FLOAT16_NV = 0,
+    VK_COMPONENT_TYPE_FLOAT32_NV = 1,
+    VK_COMPONENT_TYPE_FLOAT64_NV = 2,
+    VK_COMPONENT_TYPE_SINT8_NV = 3,
+    VK_COMPONENT_TYPE_SINT16_NV = 4,
+    VK_COMPONENT_TYPE_SINT32_NV = 5,
+    VK_COMPONENT_TYPE_SINT64_NV = 6,
+    VK_COMPONENT_TYPE_UINT8_NV = 7,
+    VK_COMPONENT_TYPE_UINT16_NV = 8,
+    VK_COMPONENT_TYPE_UINT32_NV = 9,
+    VK_COMPONENT_TYPE_UINT64_NV = 10,
+    VK_COMPONENT_TYPE_BEGIN_RANGE_NV = VK_COMPONENT_TYPE_FLOAT16_NV,
+    VK_COMPONENT_TYPE_END_RANGE_NV = VK_COMPONENT_TYPE_UINT64_NV,
+    VK_COMPONENT_TYPE_RANGE_SIZE_NV = (VK_COMPONENT_TYPE_UINT64_NV - VK_COMPONENT_TYPE_FLOAT16_NV + 1),
+    VK_COMPONENT_TYPE_MAX_ENUM_NV = 0x7FFFFFFF
+} VkComponentTypeNV;
+
+typedef enum VkScopeNV {
+    VK_SCOPE_DEVICE_NV = 1,
+    VK_SCOPE_WORKGROUP_NV = 2,
+    VK_SCOPE_SUBGROUP_NV = 3,
+    VK_SCOPE_QUEUE_FAMILY_NV = 5,
+    VK_SCOPE_BEGIN_RANGE_NV = VK_SCOPE_DEVICE_NV,
+    VK_SCOPE_END_RANGE_NV = VK_SCOPE_QUEUE_FAMILY_NV,
+    VK_SCOPE_RANGE_SIZE_NV = (VK_SCOPE_QUEUE_FAMILY_NV - VK_SCOPE_DEVICE_NV + 1),
+    VK_SCOPE_MAX_ENUM_NV = 0x7FFFFFFF
+} VkScopeNV;
+typedef struct VkCooperativeMatrixPropertiesNV {
+    VkStructureType      sType;
+    void*                pNext;
+    uint32_t             MSize;
+    uint32_t             NSize;
+    uint32_t             KSize;
+    VkComponentTypeNV    AType;
+    VkComponentTypeNV    BType;
+    VkComponentTypeNV    CType;
+    VkComponentTypeNV    DType;
+    VkScopeNV            scope;
+} VkCooperativeMatrixPropertiesNV;
+
+typedef struct VkPhysicalDeviceCooperativeMatrixFeaturesNV {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           cooperativeMatrix;
+    VkBool32           cooperativeMatrixRobustBufferAccess;
+} VkPhysicalDeviceCooperativeMatrixFeaturesNV;
+
+typedef struct VkPhysicalDeviceCooperativeMatrixPropertiesNV {
+    VkStructureType       sType;
+    void*                 pNext;
+    VkShaderStageFlags    cooperativeMatrixSupportedStages;
+} VkPhysicalDeviceCooperativeMatrixPropertiesNV;
+
+typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV)(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkCooperativeMatrixPropertiesNV* pProperties);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceCooperativeMatrixPropertiesNV(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pPropertyCount,
+    VkCooperativeMatrixPropertiesNV*            pProperties);
+#endif
+
+
+#define VK_NV_coverage_reduction_mode 1
+#define VK_NV_COVERAGE_REDUCTION_MODE_SPEC_VERSION 1
+#define VK_NV_COVERAGE_REDUCTION_MODE_EXTENSION_NAME "VK_NV_coverage_reduction_mode"
+
+typedef enum VkCoverageReductionModeNV {
+    VK_COVERAGE_REDUCTION_MODE_MERGE_NV = 0,
+    VK_COVERAGE_REDUCTION_MODE_TRUNCATE_NV = 1,
+    VK_COVERAGE_REDUCTION_MODE_BEGIN_RANGE_NV = VK_COVERAGE_REDUCTION_MODE_MERGE_NV,
+    VK_COVERAGE_REDUCTION_MODE_END_RANGE_NV = VK_COVERAGE_REDUCTION_MODE_TRUNCATE_NV,
+    VK_COVERAGE_REDUCTION_MODE_RANGE_SIZE_NV = (VK_COVERAGE_REDUCTION_MODE_TRUNCATE_NV - VK_COVERAGE_REDUCTION_MODE_MERGE_NV + 1),
+    VK_COVERAGE_REDUCTION_MODE_MAX_ENUM_NV = 0x7FFFFFFF
+} VkCoverageReductionModeNV;
+typedef VkFlags VkPipelineCoverageReductionStateCreateFlagsNV;
+typedef struct VkPhysicalDeviceCoverageReductionModeFeaturesNV {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           coverageReductionMode;
+} VkPhysicalDeviceCoverageReductionModeFeaturesNV;
+
+typedef struct VkPipelineCoverageReductionStateCreateInfoNV {
+    VkStructureType                                  sType;
+    const void*                                      pNext;
+    VkPipelineCoverageReductionStateCreateFlagsNV    flags;
+    VkCoverageReductionModeNV                        coverageReductionMode;
+} VkPipelineCoverageReductionStateCreateInfoNV;
+
+typedef struct VkFramebufferMixedSamplesCombinationNV {
+    VkStructureType              sType;
+    void*                        pNext;
+    VkCoverageReductionModeNV    coverageReductionMode;
+    VkSampleCountFlagBits        rasterizationSamples;
+    VkSampleCountFlags           depthStencilSamples;
+    VkSampleCountFlags           colorSamples;
+} VkFramebufferMixedSamplesCombinationNV;
+
+typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV)(VkPhysicalDevice physicalDevice, uint32_t* pCombinationCount, VkFramebufferMixedSamplesCombinationNV* pCombinations);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pCombinationCount,
+    VkFramebufferMixedSamplesCombinationNV*     pCombinations);
+#endif
+
+
+#define VK_EXT_fragment_shader_interlock 1
+#define VK_EXT_FRAGMENT_SHADER_INTERLOCK_SPEC_VERSION 1
+#define VK_EXT_FRAGMENT_SHADER_INTERLOCK_EXTENSION_NAME "VK_EXT_fragment_shader_interlock"
+typedef struct VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           fragmentShaderSampleInterlock;
+    VkBool32           fragmentShaderPixelInterlock;
+    VkBool32           fragmentShaderShadingRateInterlock;
+} VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT;
+
+
+
+#define VK_EXT_ycbcr_image_arrays 1
+#define VK_EXT_YCBCR_IMAGE_ARRAYS_SPEC_VERSION 1
+#define VK_EXT_YCBCR_IMAGE_ARRAYS_EXTENSION_NAME "VK_EXT_ycbcr_image_arrays"
+typedef struct VkPhysicalDeviceYcbcrImageArraysFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           ycbcrImageArrays;
+} VkPhysicalDeviceYcbcrImageArraysFeaturesEXT;
+
+
+
+#define VK_EXT_headless_surface 1
+#define VK_EXT_HEADLESS_SURFACE_SPEC_VERSION 1
+#define VK_EXT_HEADLESS_SURFACE_EXTENSION_NAME "VK_EXT_headless_surface"
+typedef VkFlags VkHeadlessSurfaceCreateFlagsEXT;
+typedef struct VkHeadlessSurfaceCreateInfoEXT {
+    VkStructureType                    sType;
+    const void*                        pNext;
+    VkHeadlessSurfaceCreateFlagsEXT    flags;
+} VkHeadlessSurfaceCreateInfoEXT;
+
+typedef VkResult (VKAPI_PTR *PFN_vkCreateHeadlessSurfaceEXT)(VkInstance instance, const VkHeadlessSurfaceCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateHeadlessSurfaceEXT(
+    VkInstance                                  instance,
+    const VkHeadlessSurfaceCreateInfoEXT*       pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface);
+#endif
+
+
+#define VK_EXT_line_rasterization 1
+#define VK_EXT_LINE_RASTERIZATION_SPEC_VERSION 1
+#define VK_EXT_LINE_RASTERIZATION_EXTENSION_NAME "VK_EXT_line_rasterization"
+
+typedef enum VkLineRasterizationModeEXT {
+    VK_LINE_RASTERIZATION_MODE_DEFAULT_EXT = 0,
+    VK_LINE_RASTERIZATION_MODE_RECTANGULAR_EXT = 1,
+    VK_LINE_RASTERIZATION_MODE_BRESENHAM_EXT = 2,
+    VK_LINE_RASTERIZATION_MODE_RECTANGULAR_SMOOTH_EXT = 3,
+    VK_LINE_RASTERIZATION_MODE_BEGIN_RANGE_EXT = VK_LINE_RASTERIZATION_MODE_DEFAULT_EXT,
+    VK_LINE_RASTERIZATION_MODE_END_RANGE_EXT = VK_LINE_RASTERIZATION_MODE_RECTANGULAR_SMOOTH_EXT,
+    VK_LINE_RASTERIZATION_MODE_RANGE_SIZE_EXT = (VK_LINE_RASTERIZATION_MODE_RECTANGULAR_SMOOTH_EXT - VK_LINE_RASTERIZATION_MODE_DEFAULT_EXT + 1),
+    VK_LINE_RASTERIZATION_MODE_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkLineRasterizationModeEXT;
+typedef struct VkPhysicalDeviceLineRasterizationFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           rectangularLines;
+    VkBool32           bresenhamLines;
+    VkBool32           smoothLines;
+    VkBool32           stippledRectangularLines;
+    VkBool32           stippledBresenhamLines;
+    VkBool32           stippledSmoothLines;
+} VkPhysicalDeviceLineRasterizationFeaturesEXT;
+
+typedef struct VkPhysicalDeviceLineRasterizationPropertiesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    uint32_t           lineSubPixelPrecisionBits;
+} VkPhysicalDeviceLineRasterizationPropertiesEXT;
+
+typedef struct VkPipelineRasterizationLineStateCreateInfoEXT {
+    VkStructureType               sType;
+    const void*                   pNext;
+    VkLineRasterizationModeEXT    lineRasterizationMode;
+    VkBool32                      stippledLineEnable;
+    uint32_t                      lineStippleFactor;
+    uint16_t                      lineStipplePattern;
+} VkPipelineRasterizationLineStateCreateInfoEXT;
+
+typedef void (VKAPI_PTR *PFN_vkCmdSetLineStippleEXT)(VkCommandBuffer commandBuffer, uint32_t lineStippleFactor, uint16_t lineStipplePattern);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkCmdSetLineStippleEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    lineStippleFactor,
+    uint16_t                                    lineStipplePattern);
+#endif
+
+
+#define VK_EXT_host_query_reset 1
+#define VK_EXT_HOST_QUERY_RESET_SPEC_VERSION 1
+#define VK_EXT_HOST_QUERY_RESET_EXTENSION_NAME "VK_EXT_host_query_reset"
+typedef struct VkPhysicalDeviceHostQueryResetFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           hostQueryReset;
+} VkPhysicalDeviceHostQueryResetFeaturesEXT;
+
+typedef void (VKAPI_PTR *PFN_vkResetQueryPoolEXT)(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkResetQueryPoolEXT(
+    VkDevice                                    device,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    firstQuery,
+    uint32_t                                    queryCount);
+#endif
+
+
+#define VK_EXT_index_type_uint8 1
+#define VK_EXT_INDEX_TYPE_UINT8_SPEC_VERSION 1
+#define VK_EXT_INDEX_TYPE_UINT8_EXTENSION_NAME "VK_EXT_index_type_uint8"
+typedef struct VkPhysicalDeviceIndexTypeUint8FeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           indexTypeUint8;
+} VkPhysicalDeviceIndexTypeUint8FeaturesEXT;
+
+
+
+#define VK_EXT_shader_demote_to_helper_invocation 1
+#define VK_EXT_SHADER_DEMOTE_TO_HELPER_INVOCATION_SPEC_VERSION 1
+#define VK_EXT_SHADER_DEMOTE_TO_HELPER_INVOCATION_EXTENSION_NAME "VK_EXT_shader_demote_to_helper_invocation"
+typedef struct VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           shaderDemoteToHelperInvocation;
+} VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT;
+
+
+
+#define VK_EXT_texel_buffer_alignment 1
+#define VK_EXT_TEXEL_BUFFER_ALIGNMENT_SPEC_VERSION 1
+#define VK_EXT_TEXEL_BUFFER_ALIGNMENT_EXTENSION_NAME "VK_EXT_texel_buffer_alignment"
+typedef struct VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           texelBufferAlignment;
+} VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT;
+
+typedef struct VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkDeviceSize       storageTexelBufferOffsetAlignmentBytes;
+    VkBool32           storageTexelBufferOffsetSingleTexelAlignment;
+    VkDeviceSize       uniformTexelBufferOffsetAlignmentBytes;
+    VkBool32           uniformTexelBufferOffsetSingleTexelAlignment;
+} VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT;
+
+
+
+#define VK_GOOGLE_user_type 1
+#define VK_GOOGLE_USER_TYPE_SPEC_VERSION  1
+#define VK_GOOGLE_USER_TYPE_EXTENSION_NAME "VK_GOOGLE_user_type"
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif
diff --git a/src/third_party/vulkan-headers/src/include/vulkan/vulkan_fuchsia.h b/src/third_party/vulkan-headers/src/include/vulkan/vulkan_fuchsia.h
new file mode 100644
index 0000000..81ebe55
--- /dev/null
+++ b/src/third_party/vulkan-headers/src/include/vulkan/vulkan_fuchsia.h
@@ -0,0 +1,57 @@
+#ifndef VULKAN_FUCHSIA_H_
+#define VULKAN_FUCHSIA_H_ 1
+
+/*
+** Copyright (c) 2015-2019 The Khronos Group Inc.
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+/*
+** This header is generated from the Khronos Vulkan XML API Registry.
+**
+*/
+
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+
+#define VK_FUCHSIA_imagepipe_surface 1
+#define VK_FUCHSIA_IMAGEPIPE_SURFACE_SPEC_VERSION 1
+#define VK_FUCHSIA_IMAGEPIPE_SURFACE_EXTENSION_NAME "VK_FUCHSIA_imagepipe_surface"
+typedef VkFlags VkImagePipeSurfaceCreateFlagsFUCHSIA;
+typedef struct VkImagePipeSurfaceCreateInfoFUCHSIA {
+    VkStructureType                         sType;
+    const void*                             pNext;
+    VkImagePipeSurfaceCreateFlagsFUCHSIA    flags;
+    zx_handle_t                             imagePipeHandle;
+} VkImagePipeSurfaceCreateInfoFUCHSIA;
+
+typedef VkResult (VKAPI_PTR *PFN_vkCreateImagePipeSurfaceFUCHSIA)(VkInstance instance, const VkImagePipeSurfaceCreateInfoFUCHSIA* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateImagePipeSurfaceFUCHSIA(
+    VkInstance                                  instance,
+    const VkImagePipeSurfaceCreateInfoFUCHSIA*  pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface);
+#endif
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif
diff --git a/src/third_party/vulkan-headers/src/include/vulkan/vulkan_ggp.h b/src/third_party/vulkan-headers/src/include/vulkan/vulkan_ggp.h
new file mode 100644
index 0000000..fd30613
--- /dev/null
+++ b/src/third_party/vulkan-headers/src/include/vulkan/vulkan_ggp.h
@@ -0,0 +1,68 @@
+#ifndef VULKAN_GGP_H_
+#define VULKAN_GGP_H_ 1
+
+/*
+** Copyright (c) 2015-2019 The Khronos Group Inc.
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+/*
+** This header is generated from the Khronos Vulkan XML API Registry.
+**
+*/
+
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+
+#define VK_GGP_stream_descriptor_surface 1
+#define VK_GGP_STREAM_DESCRIPTOR_SURFACE_SPEC_VERSION 1
+#define VK_GGP_STREAM_DESCRIPTOR_SURFACE_EXTENSION_NAME "VK_GGP_stream_descriptor_surface"
+typedef VkFlags VkStreamDescriptorSurfaceCreateFlagsGGP;
+typedef struct VkStreamDescriptorSurfaceCreateInfoGGP {
+    VkStructureType                            sType;
+    const void*                                pNext;
+    VkStreamDescriptorSurfaceCreateFlagsGGP    flags;
+    GgpStreamDescriptor                        streamDescriptor;
+} VkStreamDescriptorSurfaceCreateInfoGGP;
+
+typedef VkResult (VKAPI_PTR *PFN_vkCreateStreamDescriptorSurfaceGGP)(VkInstance instance, const VkStreamDescriptorSurfaceCreateInfoGGP* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateStreamDescriptorSurfaceGGP(
+    VkInstance                                  instance,
+    const VkStreamDescriptorSurfaceCreateInfoGGP* pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface);
+#endif
+
+
+#define VK_GGP_frame_token 1
+#define VK_GGP_FRAME_TOKEN_SPEC_VERSION   1
+#define VK_GGP_FRAME_TOKEN_EXTENSION_NAME "VK_GGP_frame_token"
+typedef struct VkPresentFrameTokenGGP {
+    VkStructureType    sType;
+    const void*        pNext;
+    GgpFrameToken      frameToken;
+} VkPresentFrameTokenGGP;
+
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif
diff --git a/src/third_party/vulkan-headers/src/include/vulkan/vulkan_ios.h b/src/third_party/vulkan-headers/src/include/vulkan/vulkan_ios.h
new file mode 100644
index 0000000..72ef1a8
--- /dev/null
+++ b/src/third_party/vulkan-headers/src/include/vulkan/vulkan_ios.h
@@ -0,0 +1,57 @@
+#ifndef VULKAN_IOS_H_
+#define VULKAN_IOS_H_ 1
+
+/*
+** Copyright (c) 2015-2019 The Khronos Group Inc.
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+/*
+** This header is generated from the Khronos Vulkan XML API Registry.
+**
+*/
+
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+
+#define VK_MVK_ios_surface 1
+#define VK_MVK_IOS_SURFACE_SPEC_VERSION   2
+#define VK_MVK_IOS_SURFACE_EXTENSION_NAME "VK_MVK_ios_surface"
+typedef VkFlags VkIOSSurfaceCreateFlagsMVK;
+typedef struct VkIOSSurfaceCreateInfoMVK {
+    VkStructureType               sType;
+    const void*                   pNext;
+    VkIOSSurfaceCreateFlagsMVK    flags;
+    const void*                   pView;
+} VkIOSSurfaceCreateInfoMVK;
+
+typedef VkResult (VKAPI_PTR *PFN_vkCreateIOSSurfaceMVK)(VkInstance instance, const VkIOSSurfaceCreateInfoMVK* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateIOSSurfaceMVK(
+    VkInstance                                  instance,
+    const VkIOSSurfaceCreateInfoMVK*            pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface);
+#endif
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif
diff --git a/src/third_party/vulkan-headers/src/include/vulkan/vulkan_macos.h b/src/third_party/vulkan-headers/src/include/vulkan/vulkan_macos.h
new file mode 100644
index 0000000..e6e5dea
--- /dev/null
+++ b/src/third_party/vulkan-headers/src/include/vulkan/vulkan_macos.h
@@ -0,0 +1,57 @@
+#ifndef VULKAN_MACOS_H_
+#define VULKAN_MACOS_H_ 1
+
+/*
+** Copyright (c) 2015-2019 The Khronos Group Inc.
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+/*
+** This header is generated from the Khronos Vulkan XML API Registry.
+**
+*/
+
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+
+#define VK_MVK_macos_surface 1
+#define VK_MVK_MACOS_SURFACE_SPEC_VERSION 2
+#define VK_MVK_MACOS_SURFACE_EXTENSION_NAME "VK_MVK_macos_surface"
+typedef VkFlags VkMacOSSurfaceCreateFlagsMVK;
+typedef struct VkMacOSSurfaceCreateInfoMVK {
+    VkStructureType                 sType;
+    const void*                     pNext;
+    VkMacOSSurfaceCreateFlagsMVK    flags;
+    const void*                     pView;
+} VkMacOSSurfaceCreateInfoMVK;
+
+typedef VkResult (VKAPI_PTR *PFN_vkCreateMacOSSurfaceMVK)(VkInstance instance, const VkMacOSSurfaceCreateInfoMVK* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateMacOSSurfaceMVK(
+    VkInstance                                  instance,
+    const VkMacOSSurfaceCreateInfoMVK*          pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface);
+#endif
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif
diff --git a/src/third_party/vulkan-headers/src/include/vulkan/vulkan_metal.h b/src/third_party/vulkan-headers/src/include/vulkan/vulkan_metal.h
new file mode 100644
index 0000000..3dec68c
--- /dev/null
+++ b/src/third_party/vulkan-headers/src/include/vulkan/vulkan_metal.h
@@ -0,0 +1,64 @@
+#ifndef VULKAN_METAL_H_
+#define VULKAN_METAL_H_ 1
+
+/*
+** Copyright (c) 2015-2019 The Khronos Group Inc.
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+/*
+** This header is generated from the Khronos Vulkan XML API Registry.
+**
+*/
+
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+
+#define VK_EXT_metal_surface 1
+
+#ifdef __OBJC__
+@class CAMetalLayer;
+#else
+typedef void CAMetalLayer;
+#endif
+
+#define VK_EXT_METAL_SURFACE_SPEC_VERSION 1
+#define VK_EXT_METAL_SURFACE_EXTENSION_NAME "VK_EXT_metal_surface"
+typedef VkFlags VkMetalSurfaceCreateFlagsEXT;
+typedef struct VkMetalSurfaceCreateInfoEXT {
+    VkStructureType                 sType;
+    const void*                     pNext;
+    VkMetalSurfaceCreateFlagsEXT    flags;
+    const CAMetalLayer*             pLayer;
+} VkMetalSurfaceCreateInfoEXT;
+
+typedef VkResult (VKAPI_PTR *PFN_vkCreateMetalSurfaceEXT)(VkInstance instance, const VkMetalSurfaceCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateMetalSurfaceEXT(
+    VkInstance                                  instance,
+    const VkMetalSurfaceCreateInfoEXT*          pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface);
+#endif
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif
diff --git a/src/third_party/vulkan-headers/src/include/vulkan/vulkan_vi.h b/src/third_party/vulkan-headers/src/include/vulkan/vulkan_vi.h
new file mode 100644
index 0000000..6fb66f9
--- /dev/null
+++ b/src/third_party/vulkan-headers/src/include/vulkan/vulkan_vi.h
@@ -0,0 +1,57 @@
+#ifndef VULKAN_VI_H_
+#define VULKAN_VI_H_ 1
+
+/*
+** Copyright (c) 2015-2019 The Khronos Group Inc.
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+/*
+** This header is generated from the Khronos Vulkan XML API Registry.
+**
+*/
+
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+
+#define VK_NN_vi_surface 1
+#define VK_NN_VI_SURFACE_SPEC_VERSION     1
+#define VK_NN_VI_SURFACE_EXTENSION_NAME   "VK_NN_vi_surface"
+typedef VkFlags VkViSurfaceCreateFlagsNN;
+typedef struct VkViSurfaceCreateInfoNN {
+    VkStructureType             sType;
+    const void*                 pNext;
+    VkViSurfaceCreateFlagsNN    flags;
+    void*                       window;
+} VkViSurfaceCreateInfoNN;
+
+typedef VkResult (VKAPI_PTR *PFN_vkCreateViSurfaceNN)(VkInstance instance, const VkViSurfaceCreateInfoNN* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateViSurfaceNN(
+    VkInstance                                  instance,
+    const VkViSurfaceCreateInfoNN*              pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface);
+#endif
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif
diff --git a/src/third_party/vulkan-headers/src/include/vulkan/vulkan_wayland.h b/src/third_party/vulkan-headers/src/include/vulkan/vulkan_wayland.h
new file mode 100644
index 0000000..599d05b
--- /dev/null
+++ b/src/third_party/vulkan-headers/src/include/vulkan/vulkan_wayland.h
@@ -0,0 +1,64 @@
+#ifndef VULKAN_WAYLAND_H_
+#define VULKAN_WAYLAND_H_ 1
+
+/*
+** Copyright (c) 2015-2019 The Khronos Group Inc.
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+/*
+** This header is generated from the Khronos Vulkan XML API Registry.
+**
+*/
+
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+
+#define VK_KHR_wayland_surface 1
+#define VK_KHR_WAYLAND_SURFACE_SPEC_VERSION 6
+#define VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME "VK_KHR_wayland_surface"
+typedef VkFlags VkWaylandSurfaceCreateFlagsKHR;
+typedef struct VkWaylandSurfaceCreateInfoKHR {
+    VkStructureType                   sType;
+    const void*                       pNext;
+    VkWaylandSurfaceCreateFlagsKHR    flags;
+    struct wl_display*                display;
+    struct wl_surface*                surface;
+} VkWaylandSurfaceCreateInfoKHR;
+
+typedef VkResult (VKAPI_PTR *PFN_vkCreateWaylandSurfaceKHR)(VkInstance instance, const VkWaylandSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface);
+typedef VkBool32 (VKAPI_PTR *PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR)(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, struct wl_display* display);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateWaylandSurfaceKHR(
+    VkInstance                                  instance,
+    const VkWaylandSurfaceCreateInfoKHR*        pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface);
+
+VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceWaylandPresentationSupportKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t                                    queueFamilyIndex,
+    struct wl_display*                          display);
+#endif
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif
diff --git a/src/third_party/vulkan-headers/src/include/vulkan/vulkan_win32.h b/src/third_party/vulkan-headers/src/include/vulkan/vulkan_win32.h
new file mode 100644
index 0000000..20a1dc0
--- /dev/null
+++ b/src/third_party/vulkan-headers/src/include/vulkan/vulkan_win32.h
@@ -0,0 +1,328 @@
+#ifndef VULKAN_WIN32_H_
+#define VULKAN_WIN32_H_ 1
+
+/*
+** Copyright (c) 2015-2019 The Khronos Group Inc.
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+/*
+** This header is generated from the Khronos Vulkan XML API Registry.
+**
+*/
+
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+
+#define VK_KHR_win32_surface 1
+#define VK_KHR_WIN32_SURFACE_SPEC_VERSION 6
+#define VK_KHR_WIN32_SURFACE_EXTENSION_NAME "VK_KHR_win32_surface"
+typedef VkFlags VkWin32SurfaceCreateFlagsKHR;
+typedef struct VkWin32SurfaceCreateInfoKHR {
+    VkStructureType                 sType;
+    const void*                     pNext;
+    VkWin32SurfaceCreateFlagsKHR    flags;
+    HINSTANCE                       hinstance;
+    HWND                            hwnd;
+} VkWin32SurfaceCreateInfoKHR;
+
+typedef VkResult (VKAPI_PTR *PFN_vkCreateWin32SurfaceKHR)(VkInstance instance, const VkWin32SurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface);
+typedef VkBool32 (VKAPI_PTR *PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR)(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateWin32SurfaceKHR(
+    VkInstance                                  instance,
+    const VkWin32SurfaceCreateInfoKHR*          pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface);
+
+VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceWin32PresentationSupportKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t                                    queueFamilyIndex);
+#endif
+
+
+#define VK_KHR_external_memory_win32 1
+#define VK_KHR_EXTERNAL_MEMORY_WIN32_SPEC_VERSION 1
+#define VK_KHR_EXTERNAL_MEMORY_WIN32_EXTENSION_NAME "VK_KHR_external_memory_win32"
+typedef struct VkImportMemoryWin32HandleInfoKHR {
+    VkStructureType                       sType;
+    const void*                           pNext;
+    VkExternalMemoryHandleTypeFlagBits    handleType;
+    HANDLE                                handle;
+    LPCWSTR                               name;
+} VkImportMemoryWin32HandleInfoKHR;
+
+typedef struct VkExportMemoryWin32HandleInfoKHR {
+    VkStructureType               sType;
+    const void*                   pNext;
+    const SECURITY_ATTRIBUTES*    pAttributes;
+    DWORD                         dwAccess;
+    LPCWSTR                       name;
+} VkExportMemoryWin32HandleInfoKHR;
+
+typedef struct VkMemoryWin32HandlePropertiesKHR {
+    VkStructureType    sType;
+    void*              pNext;
+    uint32_t           memoryTypeBits;
+} VkMemoryWin32HandlePropertiesKHR;
+
+typedef struct VkMemoryGetWin32HandleInfoKHR {
+    VkStructureType                       sType;
+    const void*                           pNext;
+    VkDeviceMemory                        memory;
+    VkExternalMemoryHandleTypeFlagBits    handleType;
+} VkMemoryGetWin32HandleInfoKHR;
+
+typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryWin32HandleKHR)(VkDevice device, const VkMemoryGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle);
+typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryWin32HandlePropertiesKHR)(VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, VkMemoryWin32HandlePropertiesKHR* pMemoryWin32HandleProperties);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryWin32HandleKHR(
+    VkDevice                                    device,
+    const VkMemoryGetWin32HandleInfoKHR*        pGetWin32HandleInfo,
+    HANDLE*                                     pHandle);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryWin32HandlePropertiesKHR(
+    VkDevice                                    device,
+    VkExternalMemoryHandleTypeFlagBits          handleType,
+    HANDLE                                      handle,
+    VkMemoryWin32HandlePropertiesKHR*           pMemoryWin32HandleProperties);
+#endif
+
+
+#define VK_KHR_win32_keyed_mutex 1
+#define VK_KHR_WIN32_KEYED_MUTEX_SPEC_VERSION 1
+#define VK_KHR_WIN32_KEYED_MUTEX_EXTENSION_NAME "VK_KHR_win32_keyed_mutex"
+typedef struct VkWin32KeyedMutexAcquireReleaseInfoKHR {
+    VkStructureType          sType;
+    const void*              pNext;
+    uint32_t                 acquireCount;
+    const VkDeviceMemory*    pAcquireSyncs;
+    const uint64_t*          pAcquireKeys;
+    const uint32_t*          pAcquireTimeouts;
+    uint32_t                 releaseCount;
+    const VkDeviceMemory*    pReleaseSyncs;
+    const uint64_t*          pReleaseKeys;
+} VkWin32KeyedMutexAcquireReleaseInfoKHR;
+
+
+
+#define VK_KHR_external_semaphore_win32 1
+#define VK_KHR_EXTERNAL_SEMAPHORE_WIN32_SPEC_VERSION 1
+#define VK_KHR_EXTERNAL_SEMAPHORE_WIN32_EXTENSION_NAME "VK_KHR_external_semaphore_win32"
+typedef struct VkImportSemaphoreWin32HandleInfoKHR {
+    VkStructureType                          sType;
+    const void*                              pNext;
+    VkSemaphore                              semaphore;
+    VkSemaphoreImportFlags                   flags;
+    VkExternalSemaphoreHandleTypeFlagBits    handleType;
+    HANDLE                                   handle;
+    LPCWSTR                                  name;
+} VkImportSemaphoreWin32HandleInfoKHR;
+
+typedef struct VkExportSemaphoreWin32HandleInfoKHR {
+    VkStructureType               sType;
+    const void*                   pNext;
+    const SECURITY_ATTRIBUTES*    pAttributes;
+    DWORD                         dwAccess;
+    LPCWSTR                       name;
+} VkExportSemaphoreWin32HandleInfoKHR;
+
+typedef struct VkD3D12FenceSubmitInfoKHR {
+    VkStructureType    sType;
+    const void*        pNext;
+    uint32_t           waitSemaphoreValuesCount;
+    const uint64_t*    pWaitSemaphoreValues;
+    uint32_t           signalSemaphoreValuesCount;
+    const uint64_t*    pSignalSemaphoreValues;
+} VkD3D12FenceSubmitInfoKHR;
+
+typedef struct VkSemaphoreGetWin32HandleInfoKHR {
+    VkStructureType                          sType;
+    const void*                              pNext;
+    VkSemaphore                              semaphore;
+    VkExternalSemaphoreHandleTypeFlagBits    handleType;
+} VkSemaphoreGetWin32HandleInfoKHR;
+
+typedef VkResult (VKAPI_PTR *PFN_vkImportSemaphoreWin32HandleKHR)(VkDevice device, const VkImportSemaphoreWin32HandleInfoKHR* pImportSemaphoreWin32HandleInfo);
+typedef VkResult (VKAPI_PTR *PFN_vkGetSemaphoreWin32HandleKHR)(VkDevice device, const VkSemaphoreGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkImportSemaphoreWin32HandleKHR(
+    VkDevice                                    device,
+    const VkImportSemaphoreWin32HandleInfoKHR*  pImportSemaphoreWin32HandleInfo);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetSemaphoreWin32HandleKHR(
+    VkDevice                                    device,
+    const VkSemaphoreGetWin32HandleInfoKHR*     pGetWin32HandleInfo,
+    HANDLE*                                     pHandle);
+#endif
+
+
+#define VK_KHR_external_fence_win32 1
+#define VK_KHR_EXTERNAL_FENCE_WIN32_SPEC_VERSION 1
+#define VK_KHR_EXTERNAL_FENCE_WIN32_EXTENSION_NAME "VK_KHR_external_fence_win32"
+typedef struct VkImportFenceWin32HandleInfoKHR {
+    VkStructureType                      sType;
+    const void*                          pNext;
+    VkFence                              fence;
+    VkFenceImportFlags                   flags;
+    VkExternalFenceHandleTypeFlagBits    handleType;
+    HANDLE                               handle;
+    LPCWSTR                              name;
+} VkImportFenceWin32HandleInfoKHR;
+
+typedef struct VkExportFenceWin32HandleInfoKHR {
+    VkStructureType               sType;
+    const void*                   pNext;
+    const SECURITY_ATTRIBUTES*    pAttributes;
+    DWORD                         dwAccess;
+    LPCWSTR                       name;
+} VkExportFenceWin32HandleInfoKHR;
+
+typedef struct VkFenceGetWin32HandleInfoKHR {
+    VkStructureType                      sType;
+    const void*                          pNext;
+    VkFence                              fence;
+    VkExternalFenceHandleTypeFlagBits    handleType;
+} VkFenceGetWin32HandleInfoKHR;
+
+typedef VkResult (VKAPI_PTR *PFN_vkImportFenceWin32HandleKHR)(VkDevice device, const VkImportFenceWin32HandleInfoKHR* pImportFenceWin32HandleInfo);
+typedef VkResult (VKAPI_PTR *PFN_vkGetFenceWin32HandleKHR)(VkDevice device, const VkFenceGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkImportFenceWin32HandleKHR(
+    VkDevice                                    device,
+    const VkImportFenceWin32HandleInfoKHR*      pImportFenceWin32HandleInfo);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetFenceWin32HandleKHR(
+    VkDevice                                    device,
+    const VkFenceGetWin32HandleInfoKHR*         pGetWin32HandleInfo,
+    HANDLE*                                     pHandle);
+#endif
+
+
+#define VK_NV_external_memory_win32 1
+#define VK_NV_EXTERNAL_MEMORY_WIN32_SPEC_VERSION 1
+#define VK_NV_EXTERNAL_MEMORY_WIN32_EXTENSION_NAME "VK_NV_external_memory_win32"
+typedef struct VkImportMemoryWin32HandleInfoNV {
+    VkStructureType                      sType;
+    const void*                          pNext;
+    VkExternalMemoryHandleTypeFlagsNV    handleType;
+    HANDLE                               handle;
+} VkImportMemoryWin32HandleInfoNV;
+
+typedef struct VkExportMemoryWin32HandleInfoNV {
+    VkStructureType               sType;
+    const void*                   pNext;
+    const SECURITY_ATTRIBUTES*    pAttributes;
+    DWORD                         dwAccess;
+} VkExportMemoryWin32HandleInfoNV;
+
+typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryWin32HandleNV)(VkDevice device, VkDeviceMemory memory, VkExternalMemoryHandleTypeFlagsNV handleType, HANDLE* pHandle);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryWin32HandleNV(
+    VkDevice                                    device,
+    VkDeviceMemory                              memory,
+    VkExternalMemoryHandleTypeFlagsNV           handleType,
+    HANDLE*                                     pHandle);
+#endif
+
+
+#define VK_NV_win32_keyed_mutex 1
+#define VK_NV_WIN32_KEYED_MUTEX_SPEC_VERSION 2
+#define VK_NV_WIN32_KEYED_MUTEX_EXTENSION_NAME "VK_NV_win32_keyed_mutex"
+typedef struct VkWin32KeyedMutexAcquireReleaseInfoNV {
+    VkStructureType          sType;
+    const void*              pNext;
+    uint32_t                 acquireCount;
+    const VkDeviceMemory*    pAcquireSyncs;
+    const uint64_t*          pAcquireKeys;
+    const uint32_t*          pAcquireTimeoutMilliseconds;
+    uint32_t                 releaseCount;
+    const VkDeviceMemory*    pReleaseSyncs;
+    const uint64_t*          pReleaseKeys;
+} VkWin32KeyedMutexAcquireReleaseInfoNV;
+
+
+
+#define VK_EXT_full_screen_exclusive 1
+#define VK_EXT_FULL_SCREEN_EXCLUSIVE_SPEC_VERSION 4
+#define VK_EXT_FULL_SCREEN_EXCLUSIVE_EXTENSION_NAME "VK_EXT_full_screen_exclusive"
+
+typedef enum VkFullScreenExclusiveEXT {
+    VK_FULL_SCREEN_EXCLUSIVE_DEFAULT_EXT = 0,
+    VK_FULL_SCREEN_EXCLUSIVE_ALLOWED_EXT = 1,
+    VK_FULL_SCREEN_EXCLUSIVE_DISALLOWED_EXT = 2,
+    VK_FULL_SCREEN_EXCLUSIVE_APPLICATION_CONTROLLED_EXT = 3,
+    VK_FULL_SCREEN_EXCLUSIVE_BEGIN_RANGE_EXT = VK_FULL_SCREEN_EXCLUSIVE_DEFAULT_EXT,
+    VK_FULL_SCREEN_EXCLUSIVE_END_RANGE_EXT = VK_FULL_SCREEN_EXCLUSIVE_APPLICATION_CONTROLLED_EXT,
+    VK_FULL_SCREEN_EXCLUSIVE_RANGE_SIZE_EXT = (VK_FULL_SCREEN_EXCLUSIVE_APPLICATION_CONTROLLED_EXT - VK_FULL_SCREEN_EXCLUSIVE_DEFAULT_EXT + 1),
+    VK_FULL_SCREEN_EXCLUSIVE_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkFullScreenExclusiveEXT;
+typedef struct VkSurfaceFullScreenExclusiveInfoEXT {
+    VkStructureType             sType;
+    void*                       pNext;
+    VkFullScreenExclusiveEXT    fullScreenExclusive;
+} VkSurfaceFullScreenExclusiveInfoEXT;
+
+typedef struct VkSurfaceCapabilitiesFullScreenExclusiveEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           fullScreenExclusiveSupported;
+} VkSurfaceCapabilitiesFullScreenExclusiveEXT;
+
+typedef struct VkSurfaceFullScreenExclusiveWin32InfoEXT {
+    VkStructureType    sType;
+    const void*        pNext;
+    HMONITOR           hmonitor;
+} VkSurfaceFullScreenExclusiveWin32InfoEXT;
+
+typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfacePresentModes2EXT)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pPresentModeCount, VkPresentModeKHR* pPresentModes);
+typedef VkResult (VKAPI_PTR *PFN_vkAcquireFullScreenExclusiveModeEXT)(VkDevice device, VkSwapchainKHR swapchain);
+typedef VkResult (VKAPI_PTR *PFN_vkReleaseFullScreenExclusiveModeEXT)(VkDevice device, VkSwapchainKHR swapchain);
+typedef VkResult (VKAPI_PTR *PFN_vkGetDeviceGroupSurfacePresentModes2EXT)(VkDevice device, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, VkDeviceGroupPresentModeFlagsKHR* pModes);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfacePresentModes2EXT(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceSurfaceInfo2KHR*      pSurfaceInfo,
+    uint32_t*                                   pPresentModeCount,
+    VkPresentModeKHR*                           pPresentModes);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkAcquireFullScreenExclusiveModeEXT(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkReleaseFullScreenExclusiveModeEXT(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetDeviceGroupSurfacePresentModes2EXT(
+    VkDevice                                    device,
+    const VkPhysicalDeviceSurfaceInfo2KHR*      pSurfaceInfo,
+    VkDeviceGroupPresentModeFlagsKHR*           pModes);
+#endif
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif
diff --git a/src/third_party/vulkan-headers/src/include/vulkan/vulkan_xcb.h b/src/third_party/vulkan-headers/src/include/vulkan/vulkan_xcb.h
new file mode 100644
index 0000000..4cc0bc0
--- /dev/null
+++ b/src/third_party/vulkan-headers/src/include/vulkan/vulkan_xcb.h
@@ -0,0 +1,65 @@
+#ifndef VULKAN_XCB_H_
+#define VULKAN_XCB_H_ 1
+
+/*
+** Copyright (c) 2015-2019 The Khronos Group Inc.
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+/*
+** This header is generated from the Khronos Vulkan XML API Registry.
+**
+*/
+
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+
+#define VK_KHR_xcb_surface 1
+#define VK_KHR_XCB_SURFACE_SPEC_VERSION   6
+#define VK_KHR_XCB_SURFACE_EXTENSION_NAME "VK_KHR_xcb_surface"
+typedef VkFlags VkXcbSurfaceCreateFlagsKHR;
+typedef struct VkXcbSurfaceCreateInfoKHR {
+    VkStructureType               sType;
+    const void*                   pNext;
+    VkXcbSurfaceCreateFlagsKHR    flags;
+    xcb_connection_t*             connection;
+    xcb_window_t                  window;
+} VkXcbSurfaceCreateInfoKHR;
+
+typedef VkResult (VKAPI_PTR *PFN_vkCreateXcbSurfaceKHR)(VkInstance instance, const VkXcbSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface);
+typedef VkBool32 (VKAPI_PTR *PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR)(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, xcb_connection_t* connection, xcb_visualid_t visual_id);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateXcbSurfaceKHR(
+    VkInstance                                  instance,
+    const VkXcbSurfaceCreateInfoKHR*            pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface);
+
+VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceXcbPresentationSupportKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t                                    queueFamilyIndex,
+    xcb_connection_t*                           connection,
+    xcb_visualid_t                              visual_id);
+#endif
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif
diff --git a/src/third_party/vulkan-headers/src/include/vulkan/vulkan_xlib.h b/src/third_party/vulkan-headers/src/include/vulkan/vulkan_xlib.h
new file mode 100644
index 0000000..ee2b48a
--- /dev/null
+++ b/src/third_party/vulkan-headers/src/include/vulkan/vulkan_xlib.h
@@ -0,0 +1,65 @@
+#ifndef VULKAN_XLIB_H_
+#define VULKAN_XLIB_H_ 1
+
+/*
+** Copyright (c) 2015-2019 The Khronos Group Inc.
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+/*
+** This header is generated from the Khronos Vulkan XML API Registry.
+**
+*/
+
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+
+#define VK_KHR_xlib_surface 1
+#define VK_KHR_XLIB_SURFACE_SPEC_VERSION  6
+#define VK_KHR_XLIB_SURFACE_EXTENSION_NAME "VK_KHR_xlib_surface"
+typedef VkFlags VkXlibSurfaceCreateFlagsKHR;
+typedef struct VkXlibSurfaceCreateInfoKHR {
+    VkStructureType                sType;
+    const void*                    pNext;
+    VkXlibSurfaceCreateFlagsKHR    flags;
+    Display*                       dpy;
+    Window                         window;
+} VkXlibSurfaceCreateInfoKHR;
+
+typedef VkResult (VKAPI_PTR *PFN_vkCreateXlibSurfaceKHR)(VkInstance instance, const VkXlibSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface);
+typedef VkBool32 (VKAPI_PTR *PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR)(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, Display* dpy, VisualID visualID);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateXlibSurfaceKHR(
+    VkInstance                                  instance,
+    const VkXlibSurfaceCreateInfoKHR*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface);
+
+VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceXlibPresentationSupportKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t                                    queueFamilyIndex,
+    Display*                                    dpy,
+    VisualID                                    visualID);
+#endif
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif
diff --git a/src/third_party/vulkan-headers/src/include/vulkan/vulkan_xlib_xrandr.h b/src/third_party/vulkan-headers/src/include/vulkan/vulkan_xlib_xrandr.h
new file mode 100644
index 0000000..08c4fd7
--- /dev/null
+++ b/src/third_party/vulkan-headers/src/include/vulkan/vulkan_xlib_xrandr.h
@@ -0,0 +1,55 @@
+#ifndef VULKAN_XLIB_XRANDR_H_
+#define VULKAN_XLIB_XRANDR_H_ 1
+
+/*
+** Copyright (c) 2015-2019 The Khronos Group Inc.
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+/*
+** This header is generated from the Khronos Vulkan XML API Registry.
+**
+*/
+
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+
+#define VK_EXT_acquire_xlib_display 1
+#define VK_EXT_ACQUIRE_XLIB_DISPLAY_SPEC_VERSION 1
+#define VK_EXT_ACQUIRE_XLIB_DISPLAY_EXTENSION_NAME "VK_EXT_acquire_xlib_display"
+typedef VkResult (VKAPI_PTR *PFN_vkAcquireXlibDisplayEXT)(VkPhysicalDevice physicalDevice, Display* dpy, VkDisplayKHR display);
+typedef VkResult (VKAPI_PTR *PFN_vkGetRandROutputDisplayEXT)(VkPhysicalDevice physicalDevice, Display* dpy, RROutput rrOutput, VkDisplayKHR* pDisplay);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkAcquireXlibDisplayEXT(
+    VkPhysicalDevice                            physicalDevice,
+    Display*                                    dpy,
+    VkDisplayKHR                                display);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetRandROutputDisplayEXT(
+    VkPhysicalDevice                            physicalDevice,
+    Display*                                    dpy,
+    RROutput                                    rrOutput,
+    VkDisplayKHR*                               pDisplay);
+#endif
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif
diff --git a/src/third_party/vulkan-headers/src/registry/cgenerator.py b/src/third_party/vulkan-headers/src/registry/cgenerator.py
new file mode 100644
index 0000000..a416e7d
--- /dev/null
+++ b/src/third_party/vulkan-headers/src/registry/cgenerator.py
@@ -0,0 +1,412 @@
+#!/usr/bin/python3 -i
+#
+# Copyright (c) 2013-2019 The Khronos Group Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import re
+from generator import (GeneratorOptions, OutputGenerator, noneStr,
+                       regSortFeatures, write)
+
+
+class CGeneratorOptions(GeneratorOptions):
+    """CGeneratorOptions - subclass of GeneratorOptions.
+
+    Adds options used by COutputGenerator objects during C language header
+    generation."""
+
+    def __init__(self,
+                 prefixText="",
+                 genFuncPointers=True,
+                 protectFile=True,
+                 protectFeature=True,
+                 protectProto=None,
+                 protectProtoStr=None,
+                 apicall='',
+                 apientry='',
+                 apientryp='',
+                 indentFuncProto=True,
+                 indentFuncPointer=False,
+                 alignFuncParam=0,
+                 genEnumBeginEndRange=False,
+                 genAliasMacro=False,
+                 aliasMacro='',
+                 **kwargs
+                 ):
+        """Constructor.
+        Additional parameters beyond parent class:
+
+        - prefixText - list of strings to prefix generated header with
+        (usually a copyright statement + calling convention macros).
+        - protectFile - True if multiple inclusion protection should be
+        generated (based on the filename) around the entire header.
+        - protectFeature - True if #ifndef..#endif protection should be
+        generated around a feature interface in the header file.
+        - genFuncPointers - True if function pointer typedefs should be
+        generated
+        - protectProto - If conditional protection should be generated
+        around prototype declarations, set to either '#ifdef'
+        to require opt-in (#ifdef protectProtoStr) or '#ifndef'
+        to require opt-out (#ifndef protectProtoStr). Otherwise
+        set to None.
+        - protectProtoStr - #ifdef/#ifndef symbol to use around prototype
+        declarations, if protectProto is set
+        - apicall - string to use for the function declaration prefix,
+        such as APICALL on Windows.
+        - apientry - string to use for the calling convention macro,
+        in typedefs, such as APIENTRY.
+        - apientryp - string to use for the calling convention macro
+        in function pointer typedefs, such as APIENTRYP.
+        - indentFuncProto - True if prototype declarations should put each
+        parameter on a separate line
+        - indentFuncPointer - True if typedefed function pointers should put each
+        parameter on a separate line
+        - alignFuncParam - if nonzero and parameters are being put on a
+        separate line, align parameter names at the specified column
+        - genEnumBeginEndRange - True if BEGIN_RANGE / END_RANGE macros should
+        be generated for enumerated types
+        - genAliasMacro - True if the OpenXR alias macro should be generated
+        for aliased types (unclear what other circumstances this is useful)
+        - aliasMacro - alias macro to inject when genAliasMacro is True"""
+        GeneratorOptions.__init__(self, **kwargs)
+
+        self.prefixText = prefixText
+        """list of strings to prefix generated header with (usually a copyright statement + calling convention macros)."""
+
+        self.genFuncPointers = genFuncPointers
+        """True if function pointer typedefs should be generated"""
+
+        self.protectFile = protectFile
+        """True if multiple inclusion protection should be generated (based on the filename) around the entire header."""
+
+        self.protectFeature = protectFeature
+        """True if #ifndef..#endif protection should be generated around a feature interface in the header file."""
+
+        self.protectProto = protectProto
+        """If conditional protection should be generated around prototype declarations, set to either '#ifdef' to require opt-in (#ifdef protectProtoStr) or '#ifndef' to require opt-out (#ifndef protectProtoStr). Otherwise set to None."""
+
+        self.protectProtoStr = protectProtoStr
+        """#ifdef/#ifndef symbol to use around prototype declarations, if protectProto is set"""
+
+        self.apicall = apicall
+        """string to use for the function declaration prefix, such as APICALL on Windows."""
+
+        self.apientry = apientry
+        """string to use for the calling convention macro, in typedefs, such as APIENTRY."""
+
+        self.apientryp = apientryp
+        """string to use for the calling convention macro in function pointer typedefs, such as APIENTRYP."""
+
+        self.indentFuncProto = indentFuncProto
+        """True if prototype declarations should put each parameter on a separate line"""
+
+        self.indentFuncPointer = indentFuncPointer
+        """True if typedefed function pointers should put each parameter on a separate line"""
+
+        self.alignFuncParam = alignFuncParam
+        """if nonzero and parameters are being put on a separate line, align parameter names at the specified column"""
+
+        self.genEnumBeginEndRange = genEnumBeginEndRange
+        """True if BEGIN_RANGE / END_RANGE macros should be generated for enumerated types"""
+
+        self.genAliasMacro = genAliasMacro
+        """True if the OpenXR alias macro should be generated for aliased types (unclear what other circumstances this is useful)"""
+
+        self.aliasMacro = aliasMacro
+        """alias macro to inject when genAliasMacro is True"""
+
+
+class COutputGenerator(OutputGenerator):
+    """Generates C-language API interfaces."""
+
+    # This is an ordered list of sections in the header file.
+    TYPE_SECTIONS = ['include', 'define', 'basetype', 'handle', 'enum',
+                     'group', 'bitmask', 'funcpointer', 'struct']
+    ALL_SECTIONS = TYPE_SECTIONS + ['commandPointer', 'command']
+
+    def __init__(self, *args, **kwargs):
+        super().__init__(*args, **kwargs)
+        # Internal state - accumulators for different inner block text
+        self.sections = {section: [] for section in self.ALL_SECTIONS}
+        self.feature_not_empty = False
+        self.may_alias = None
+
+    def beginFile(self, genOpts):
+        OutputGenerator.beginFile(self, genOpts)
+        # C-specific
+        #
+        # Multiple inclusion protection & C++ wrappers.
+        if genOpts.protectFile and self.genOpts.filename:
+            headerSym = re.sub(r'\.h', '_h_',
+                               os.path.basename(self.genOpts.filename)).upper()
+            write('#ifndef', headerSym, file=self.outFile)
+            write('#define', headerSym, '1', file=self.outFile)
+            self.newline()
+
+        # User-supplied prefix text, if any (list of strings)
+        if genOpts.prefixText:
+            for s in genOpts.prefixText:
+                write(s, file=self.outFile)
+
+        # C++ extern wrapper - after prefix lines so they can add includes.
+        self.newline()
+        write('#ifdef __cplusplus', file=self.outFile)
+        write('extern "C" {', file=self.outFile)
+        write('#endif', file=self.outFile)
+        self.newline()
+
+    def endFile(self):
+        # C-specific
+        # Finish C++ wrapper and multiple inclusion protection
+        self.newline()
+        write('#ifdef __cplusplus', file=self.outFile)
+        write('}', file=self.outFile)
+        write('#endif', file=self.outFile)
+        if self.genOpts.protectFile and self.genOpts.filename:
+            self.newline()
+            write('#endif', file=self.outFile)
+        # Finish processing in superclass
+        OutputGenerator.endFile(self)
+
+    def beginFeature(self, interface, emit):
+        # Start processing in superclass
+        OutputGenerator.beginFeature(self, interface, emit)
+        # C-specific
+        # Accumulate includes, defines, types, enums, function pointer typedefs,
+        # end function prototypes separately for this feature. They're only
+        # printed in endFeature().
+        self.sections = {section: [] for section in self.ALL_SECTIONS}
+        self.feature_not_empty = False
+
+    def endFeature(self):
+        "Actually write the interface to the output file."
+        # C-specific
+        if self.emit:
+            if self.feature_not_empty:
+                if self.genOpts.conventions.writeFeature(self.featureExtraProtect, self.genOpts.filename):
+                    self.newline()
+                    if self.genOpts.protectFeature:
+                        write('#ifndef', self.featureName, file=self.outFile)
+                    # If type declarations are needed by other features based on
+                    # this one, it may be necessary to suppress the ExtraProtect,
+                    # or move it below the 'for section...' loop.
+                    if self.featureExtraProtect is not None:
+                        write('#ifdef', self.featureExtraProtect, file=self.outFile)
+                    self.newline()
+                    write('#define', self.featureName, '1', file=self.outFile)
+                    for section in self.TYPE_SECTIONS:
+                        contents = self.sections[section]
+                        if contents:
+                            write('\n'.join(contents), file=self.outFile)
+                    if self.genOpts.genFuncPointers and self.sections['commandPointer']:
+                        write('\n'.join(self.sections['commandPointer']), file=self.outFile)
+                        self.newline()
+                    if self.sections['command']:
+                        if self.genOpts.protectProto:
+                            write(self.genOpts.protectProto,
+                                  self.genOpts.protectProtoStr, file=self.outFile)
+                        write('\n'.join(self.sections['command']), end='', file=self.outFile)
+                        if self.genOpts.protectProto:
+                            write('#endif', file=self.outFile)
+                        else:
+                            self.newline()
+                    if self.featureExtraProtect is not None:
+                        write('#endif /*', self.featureExtraProtect, '*/', file=self.outFile)
+                    if self.genOpts.protectFeature:
+                        write('#endif /*', self.featureName, '*/', file=self.outFile)
+        # Finish processing in superclass
+        OutputGenerator.endFeature(self)
+
+    def appendSection(self, section, text):
+        "Append a definition to the specified section"
+        # self.sections[section].append('SECTION: ' + section + '\n')
+        self.sections[section].append(text)
+        self.feature_not_empty = True
+
+    def genType(self, typeinfo, name, alias):
+        "Generate type."
+        OutputGenerator.genType(self, typeinfo, name, alias)
+        typeElem = typeinfo.elem
+
+        # Vulkan:
+        # Determine the category of the type, and the type section to add
+        # its definition to.
+        # 'funcpointer' is added to the 'struct' section as a workaround for
+        # internal issue #877, since structures and function pointer types
+        # can have cross-dependencies.
+        category = typeElem.get('category')
+        if category == 'funcpointer':
+            section = 'struct'
+        else:
+            section = category
+
+        if category in ('struct', 'union'):
+            # If the type is a struct type, generate it using the
+            # special-purpose generator.
+            self.genStruct(typeinfo, name, alias)
+        else:
+            # OpenXR: this section was not under 'else:' previously, just fell through
+            if alias:
+                # If the type is an alias, just emit a typedef declaration
+                body = 'typedef ' + alias + ' ' + name + ';\n'
+            else:
+                # Replace <apientry /> tags with an APIENTRY-style string
+                # (from self.genOpts). Copy other text through unchanged.
+                # If the resulting text is an empty string, don't emit it.
+                body = noneStr(typeElem.text)
+                for elem in typeElem:
+                    if elem.tag == 'apientry':
+                        body += self.genOpts.apientry + noneStr(elem.tail)
+                    else:
+                        body += noneStr(elem.text) + noneStr(elem.tail)
+            if body:
+                # Add extra newline after multi-line entries.
+                if '\n' in body[0:-1]:
+                    body += '\n'
+                self.appendSection(section, body)
+
+    def genProtectString(self, protect_str):
+        """Generate protection string.
+
+        Protection strings are the strings defining the OS/Platform/Graphics
+        requirements for a given OpenXR command.  When generating the
+        language header files, we need to make sure the items specific to a
+        graphics API or OS platform are properly wrapped in #ifs."""
+        protect_if_str = ''
+        protect_end_str = ''
+        if not protect_str:
+            return (protect_if_str, protect_end_str)
+
+        if ',' in protect_str:
+            protect_list = protect_str.split(",")
+            protect_defs = ('defined(%s)' % d for d in protect_list)
+            protect_def_str = ' && '.join(protect_defs)
+            protect_if_str = '#if %s\n' % protect_def_str
+            protect_end_str = '#endif // %s\n' % protect_def_str
+        else:
+            protect_if_str = '#ifdef %s\n' % protect_str
+            protect_end_str = '#endif // %s\n' % protect_str
+
+        return (protect_if_str, protect_end_str)
+
+    def typeMayAlias(self, typeName):
+        if not self.may_alias:
+            # First time we've asked if a type may alias.
+            # So, let's populate the set of all names of types that may.
+
+            # Everyone with an explicit mayalias="true"
+            self.may_alias = set(typeName
+                                 for typeName, data in self.registry.typedict.items()
+                                 if data.elem.get('mayalias') == 'true')
+
+            # Every type mentioned in some other type's parentstruct attribute.
+            parent_structs = (otherType.elem.get('parentstruct')
+                              for otherType in self.registry.typedict.values())
+            self.may_alias.update(set(x for x in parent_structs
+                                      if x is not None))
+        return typeName in self.may_alias
+
+    def genStruct(self, typeinfo, typeName, alias):
+        """Generate struct (e.g. C "struct" type).
+
+        This is a special case of the <type> tag where the contents are
+        interpreted as a set of <member> tags instead of freeform C
+        C type declarations. The <member> tags are just like <param>
+        tags - they are a declaration of a struct or union member.
+        Only simple member declarations are supported (no nested
+        structs etc.)
+
+        If alias is not None, then this struct aliases another; just
+        generate a typedef of that alias."""
+        OutputGenerator.genStruct(self, typeinfo, typeName, alias)
+
+        typeElem = typeinfo.elem
+
+        if alias:
+            body = 'typedef ' + alias + ' ' + typeName + ';\n'
+        else:
+            body = ''
+            (protect_begin, protect_end) = self.genProtectString(typeElem.get('protect'))
+            if protect_begin:
+                body += protect_begin
+            body += 'typedef ' + typeElem.get('category')
+
+            # This is an OpenXR-specific alternative where aliasing refers
+            # to an inheritance hierarchy of types rather than C-level type
+            # aliases.
+            if self.genOpts.genAliasMacro and self.typeMayAlias(typeName):
+                body += ' ' + self.genOpts.aliasMacro
+
+            body += ' ' + typeName + ' {\n'
+
+            targetLen = self.getMaxCParamTypeLength(typeinfo)
+            for member in typeElem.findall('.//member'):
+                body += self.makeCParamDecl(member, targetLen + 4)
+                body += ';\n'
+            body += '} ' + typeName + ';\n'
+            if protect_end:
+                body += protect_end
+
+        self.appendSection('struct', body)
+
+    def genGroup(self, groupinfo, groupName, alias=None):
+        """Generate groups (e.g. C "enum" type).
+
+        These are concatenated together with other types.
+
+        If alias is not None, it is the name of another group type
+        which aliases this type; just generate that alias."""
+        OutputGenerator.genGroup(self, groupinfo, groupName, alias)
+        groupElem = groupinfo.elem
+
+        # After either enumerated type or alias paths, add the declaration
+        # to the appropriate section for the group being defined.
+        if groupElem.get('type') == 'bitmask':
+            section = 'bitmask'
+        else:
+            section = 'group'
+
+        if alias:
+            # If the group name is aliased, just emit a typedef declaration
+            # for the alias.
+            body = 'typedef ' + alias + ' ' + groupName + ';\n'
+            self.appendSection(section, body)
+        else:
+            (section, body) = self.buildEnumCDecl(self.genOpts.genEnumBeginEndRange, groupinfo, groupName)
+            self.appendSection(section, "\n" + body)
+
+    def genEnum(self, enuminfo, name, alias):
+        """Generate enumerants.
+
+        <enum> tags may specify their values in several ways, but are usually
+        just integers."""
+        OutputGenerator.genEnum(self, enuminfo, name, alias)
+        (_, strVal) = self.enumToValue(enuminfo.elem, False)
+        body = '#define ' + name.ljust(33) + ' ' + strVal
+        self.appendSection('enum', body)
+
+    def genCmd(self, cmdinfo, name, alias):
+        "Command generation"
+        OutputGenerator.genCmd(self, cmdinfo, name, alias)
+
+        # if alias:
+        #     prefix = '// ' + name + ' is an alias of command ' + alias + '\n'
+        # else:
+        #     prefix = ''
+
+        prefix = ''
+        decls = self.makeCDecls(cmdinfo.elem)
+        self.appendSection('command', prefix + decls[0] + '\n')
+        if self.genOpts.genFuncPointers:
+            self.appendSection('commandPointer', decls[1])
diff --git a/src/third_party/vulkan-headers/src/registry/conventions.py b/src/third_party/vulkan-headers/src/registry/conventions.py
new file mode 100644
index 0000000..e0c3b83
--- /dev/null
+++ b/src/third_party/vulkan-headers/src/registry/conventions.py
@@ -0,0 +1,323 @@
+#!/usr/bin/python3 -i
+#
+# Copyright (c) 2013-2019 The Khronos Group Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Base class for working-group-specific style conventions,
+# used in generation.
+
+from enum import Enum
+
+# Type categories that respond "False" to isStructAlwaysValid
+# basetype is home to typedefs like ..Bool32
+CATEGORIES_REQUIRING_VALIDATION = set(('handle',
+                                       'enum',
+                                       'bitmask',
+                                       'basetype',
+                                       None))
+
+# These are basic C types pulled in via openxr_platform_defines.h
+TYPES_KNOWN_ALWAYS_VALID = set(('char',
+                                'float',
+                                'int8_t', 'uint8_t',
+                                'int32_t', 'uint32_t',
+                                'int64_t', 'uint64_t',
+                                'size_t',
+                                'uintptr_t',
+                                'int',
+                                ))
+
+
+class ProseListFormats(Enum):
+    """A connective, possibly with a quantifier."""
+    AND = 0
+    EACH_AND = 1
+    OR = 2
+    ANY_OR = 3
+
+    @classmethod
+    def from_string(cls, s):
+        if s == 'or':
+            return cls.OR
+        if s == 'and':
+            return cls.AND
+        return None
+
+    @property
+    def connective(self):
+        if self in (ProseListFormats.OR, ProseListFormats.ANY_OR):
+            return 'or'
+        return 'and'
+
+    def quantifier(self, n):
+        """Return the desired quantifier for a list of a given length."""
+        if self == ProseListFormats.ANY_OR:
+            if n > 1:
+                return 'any of '
+        elif self == ProseListFormats.EACH_AND:
+            if n > 2:
+                return 'each of '
+            if n == 2:
+                return 'both of '
+        return ''
+
+
+class ConventionsBase:
+    """WG-specific conventions."""
+
+    def __init__(self):
+        self._command_prefix = None
+        self._type_prefix = None
+
+    def formatExtension(self, name):
+        """Mark up a name as an extension for the spec.
+
+        Must implement."""
+        raise NotImplementedError
+
+    @property
+    def null(self):
+        """Preferred spelling of NULL."""
+        raise NotImplementedError
+
+    def makeProseList(self, elements, fmt=ProseListFormats.AND, with_verb=False, *args, **kwargs):
+        """Make a (comma-separated) list for use in prose.
+
+        Adds a connective (by default, 'and')
+        before the last element if there are more than 1.
+
+        Adds the right one of "is" or "are" to the end if with_verb is true.
+
+        Optionally adds a quantifier (like 'any') before a list of 2 or more,
+        if specified by fmt.
+
+        Override with a different method or different call to
+        _implMakeProseList if you want to add a comma for two elements,
+        or not use a serial comma.
+        """
+        return self._implMakeProseList(elements, fmt, with_verb, *args, **kwargs)
+
+    @property
+    def struct_macro(self):
+        """Get the appropriate format macro for a structure.
+
+        May override.
+        """
+        return 'slink:'
+
+    @property
+    def external_macro(self):
+        """Get the appropriate format macro for an external type like uint32_t.
+
+        May override.
+        """
+        return 'code:'
+
+    def makeStructName(self, name):
+        """Prepend the appropriate format macro for a structure to a structure type name.
+
+        Uses struct_macro, so just override that if you want to change behavior.
+        """
+        return self.struct_macro + name
+
+    def makeExternalTypeName(self, name):
+        """Prepend the appropriate format macro for an external type like uint32_t to a type name.
+
+        Uses external_macro, so just override that if you want to change behavior.
+        """
+        return self.external_macro + name
+
+    def _implMakeProseList(self, elements, fmt, with_verb, comma_for_two_elts=False, serial_comma=True):
+        """Internal-use implementation to make a (comma-separated) list for use in prose.
+
+        Adds a connective (by default, 'and')
+        before the last element if there are more than 1,
+        and only includes commas if there are more than 2
+        (if comma_for_two_elts is False).
+
+        Adds the right one of "is" or "are" to the end if with_verb is true.
+
+        Optionally adds a quantifier (like 'any') before a list of 2 or more,
+        if specified by fmt.
+
+        Don't edit these defaults, override self.makeProseList().
+        """
+        assert(serial_comma)  # didn't implement what we didn't need
+        if isinstance(fmt, str):
+            fmt = ProseListFormats.from_string(fmt)
+
+        my_elts = list(elements)
+        if len(my_elts) > 1:
+            my_elts[-1] = '{} {}'.format(fmt.connective, my_elts[-1])
+
+        if not comma_for_two_elts and len(my_elts) <= 2:
+            prose = ' '.join(my_elts)
+        else:
+            prose = ', '.join(my_elts)
+
+        quantifier = fmt.quantifier(len(my_elts))
+
+        parts = [quantifier, prose]
+
+        if with_verb:
+            if len(my_elts) > 1:
+                parts.append(' are')
+            else:
+                parts.append(' is')
+        return ''.join(parts)
+
+    @property
+    def file_suffix(self):
+        """Return suffix of generated Asciidoctor files"""
+        raise NotImplementedError
+
+    def api_name(self, spectype=None):
+        """Return API or specification name for citations in ref pages.
+
+        spectype is the spec this refpage is for.
+        'api' (the default value) is the main API Specification.
+        If an unrecognized spectype is given, returns None.
+
+        Must implement."""
+        raise NotImplementedError
+
+    def should_insert_may_alias_macro(self, genOpts):
+        """Return true if we should insert a "may alias" macro in this file.
+
+        Only used by OpenXR right now."""
+        return False
+
+    @property
+    def command_prefix(self):
+        """Return the expected prefix of commands/functions.
+
+        Implemented in terms of api_prefix."""
+        if not self._command_prefix:
+            self._command_prefix = self.api_prefix[:].replace('_', '').lower()
+        return self._command_prefix
+
+    @property
+    def type_prefix(self):
+        """Return the expected prefix of type names.
+
+        Implemented in terms of command_prefix (and in turn, api_prefix)."""
+        if not self._type_prefix:
+            self._type_prefix = ''.join(
+                (self.command_prefix[0:1].upper(), self.command_prefix[1:]))
+        return self._type_prefix
+
+    @property
+    def api_prefix(self):
+        """Return API token prefix.
+
+        Typically two uppercase letters followed by an underscore.
+
+        Must implement."""
+        raise NotImplementedError
+
+    @property
+    def api_version_prefix(self):
+        """Return API core version token prefix.
+
+        Implemented in terms of api_prefix.
+
+        May override."""
+        return self.api_prefix + 'VERSION_'
+
+    @property
+    def KHR_prefix(self):
+        """Return extension name prefix for KHR extensions.
+
+        Implemented in terms of api_prefix.
+
+        May override."""
+        return self.api_prefix + 'KHR_'
+
+    @property
+    def EXT_prefix(self):
+        """Return extension name prefix for EXT extensions.
+
+        Implemented in terms of api_prefix.
+
+        May override."""
+        return self.api_prefix + 'EXT_'
+
+    def writeFeature(self, featureExtraProtect, filename):
+        """Return True if OutputGenerator.endFeature should write this feature.
+
+        Defaults to always True.
+        Used in COutputGenerator.
+
+        May override."""
+        return True
+
+    def requires_error_validation(self, return_type):
+        """Return True if the return_type element is an API result code
+        requiring error validation.
+
+        Defaults to always False.
+
+        May override."""
+        return False
+
+    @property
+    def required_errors(self):
+        """Return a list of required error codes for validation.
+
+        Defaults to an empty list.
+
+        May override."""
+        return []
+
+    def is_voidpointer_alias(self, tag, text, tail):
+        """Return True if the declaration components (tag,text,tail) of an
+        element represents a void * type.
+
+        Defaults to a reasonable implementation.
+
+        May override."""
+        return tag == 'type' and text == 'void' and tail.startswith('*')
+
+    def make_voidpointer_alias(self, tail):
+        """Reformat a void * declaration to include the API alias macro.
+
+        Defaults to a no-op.
+
+        Must override if you actually want to use this feature in your project."""
+        return tail
+
+    def category_requires_validation(self, category):
+        """Return True if the given type 'category' always requires validation.
+
+        Defaults to a reasonable implementation.
+
+        May override."""
+        return category in CATEGORIES_REQUIRING_VALIDATION
+
+    def type_always_valid(self, typename):
+        """Return True if the given type name is always valid (never requires validation).
+
+        This is for things like integers.
+
+        Defaults to a reasonable implementation.
+
+        May override."""
+        return typename in TYPES_KNOWN_ALWAYS_VALID
+
+    @property
+    def should_skip_checking_codes(self):
+        """Return True if more than the basic validation of return codes should
+        be skipped for a command."""
+
+        return False
diff --git a/src/third_party/vulkan-headers/src/registry/generator.py b/src/third_party/vulkan-headers/src/registry/generator.py
new file mode 100644
index 0000000..08179b1
--- /dev/null
+++ b/src/third_party/vulkan-headers/src/registry/generator.py
@@ -0,0 +1,917 @@
+#!/usr/bin/python3 -i
+#
+# Copyright (c) 2013-2019 The Khronos Group Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Base class for source/header/doc generators, as well as some utility functions."""
+
+from __future__ import unicode_literals
+
+import io
+import os
+import pdb
+import re
+import sys
+try:
+    from pathlib import Path
+except ImportError:
+    from pathlib2 import Path
+
+from spec_tools.util import getElemName, getElemType
+
+
+def write(*args, **kwargs):
+    file = kwargs.pop('file', sys.stdout)
+    end = kwargs.pop('end', '\n')
+    file.write(' '.join(str(arg) for arg in args))
+    file.write(end)
+
+
+def noneStr(s):
+    """Return string argument, or "" if argument is None.
+
+    Used in converting etree Elements into text.
+    s - string to convert"""
+    if s:
+        return s
+    return ""
+
+def enquote(s):
+    """Return string argument with surrounding quotes,
+      for serialization into Python code."""
+    if s:
+        return "'{}'".format(s)
+    return None
+
+def regSortCategoryKey(feature):
+    """Primary sort key for regSortFeatures.
+
+    Sorts by category of the feature name string:
+
+    - Core API features (those defined with a `<feature>` tag)
+    - ARB/KHR/OES (Khronos extensions)
+    - other       (EXT/vendor extensions)
+
+    This may need to change for some APIs"""
+
+    if feature.elem.tag == 'feature':
+        return 0
+    if (feature.category == 'ARB'
+        or feature.category == 'KHR'
+            or feature.category == 'OES'):
+        return 1
+
+    return 2
+
+def regSortOrderKey(feature):
+    """Secondary sort key for regSortFeatures.
+    Sorts by sortorder attribute."""
+
+    return feature.sortorder
+
+def regSortFeatureVersionKey(feature):
+    """Tertiary sort key for regSortFeatures.
+
+    Sorts by feature version.
+    `<extension>` elements all have version number 0."""
+
+    return float(feature.versionNumber)
+
+def regSortExtensionNumberKey(feature):
+    """Last sort key for regSortFeatures.
+
+    Sorts by extension number.
+    `<feature>` elements all have extension number 0."""
+
+    return int(feature.number)
+
+def regSortFeatures(featureList):
+    """Default sort procedure for features.
+
+    - Sorts by primary key of feature category ('feature' or 'extension'),
+    - then by sort order within the category
+    - then by version number (for features)
+    - then by extension number (for extensions)"""
+    featureList.sort(key=regSortExtensionNumberKey)
+    featureList.sort(key=regSortFeatureVersionKey)
+    featureList.sort(key=regSortOrderKey)
+    featureList.sort(key=regSortCategoryKey)
+
+class GeneratorOptions:
+    """Base class for options used during header/documentation production.
+
+    These options are target language independent, and used by
+    Registry.apiGen() and by base OutputGenerator objects."""
+
+    def __init__(self,
+                 conventions=None,
+                 filename=None,
+                 directory='.',
+                 apiname=None,
+                 profile=None,
+                 versions='.*',
+                 emitversions='.*',
+                 defaultExtensions=None,
+                 addExtensions=None,
+                 removeExtensions=None,
+                 emitExtensions=None,
+                 sortProcedure=regSortFeatures):
+        """Constructor.
+
+        Arguments:
+
+        - conventions - may be mandatory for some generators:
+        an object that implements ConventionsBase
+        - filename - basename of file to generate, or None to write to stdout.
+        - directory - directory in which to generate filename
+        - apiname - string matching `<api>` 'apiname' attribute, e.g. 'gl'.
+        - profile - string specifying API profile , e.g. 'core', or None.
+        - versions - regex matching API versions to process interfaces for.
+        Normally `'.*'` or `'[0-9][.][0-9]'` to match all defined versions.
+        - emitversions - regex matching API versions to actually emit
+        interfaces for (though all requested versions are considered
+        when deciding which interfaces to generate). For GL 4.3 glext.h,
+        this might be `'1[.][2-5]|[2-4][.][0-9]'`.
+        - defaultExtensions - If not None, a string which must in its
+        entirety match the pattern in the "supported" attribute of
+        the `<extension>`. Defaults to None. Usually the same as apiname.
+        - addExtensions - regex matching names of additional extensions
+        to include. Defaults to None.
+        - removeExtensions - regex matching names of extensions to
+        remove (after defaultExtensions and addExtensions). Defaults
+        to None.
+        - emitExtensions - regex matching names of extensions to actually emit
+        interfaces for (though all requested versions are considered when
+        deciding which interfaces to generate).
+        - sortProcedure - takes a list of FeatureInfo objects and sorts
+        them in place to a preferred order in the generated output.
+        Default is core API versions, ARB/KHR/OES extensions, all other
+        extensions, by core API version number or extension number in each
+        group.
+
+        The regex patterns can be None or empty, in which case they match
+        nothing."""
+        self.conventions = conventions
+        """may be mandatory for some generators:
+        an object that implements ConventionsBase"""
+
+        self.filename = filename
+        "basename of file to generate, or None to write to stdout."
+
+        self.directory = directory
+        "directory in which to generate filename"
+
+        self.apiname = apiname
+        "string matching `<api>` 'apiname' attribute, e.g. 'gl'."
+
+        self.profile = profile
+        "string specifying API profile , e.g. 'core', or None."
+
+        self.versions = self.emptyRegex(versions)
+        """regex matching API versions to process interfaces for.
+        Normally `'.*'` or `'[0-9][.][0-9]'` to match all defined versions."""
+
+        self.emitversions = self.emptyRegex(emitversions)
+        """regex matching API versions to actually emit
+        interfaces for (though all requested versions are considered
+        when deciding which interfaces to generate). For GL 4.3 glext.h,
+        this might be `'1[.][2-5]|[2-4][.][0-9]'`."""
+
+        self.defaultExtensions = defaultExtensions
+        """If not None, a string which must in its
+        entirety match the pattern in the "supported" attribute of
+        the `<extension>`. Defaults to None. Usually the same as apiname."""
+
+        self.addExtensions = self.emptyRegex(addExtensions)
+        """regex matching names of additional extensions
+        to include. Defaults to None."""
+
+        self.removeExtensions = self.emptyRegex(removeExtensions)
+        """regex matching names of extensions to
+        remove (after defaultExtensions and addExtensions). Defaults
+        to None."""
+
+        self.emitExtensions = self.emptyRegex(emitExtensions)
+        """regex matching names of extensions to actually emit
+        interfaces for (though all requested versions are considered when
+        deciding which interfaces to generate)."""
+
+        self.sortProcedure = sortProcedure
+        """takes a list of FeatureInfo objects and sorts
+        them in place to a preferred order in the generated output.
+        Default is core API versions, ARB/KHR/OES extensions, all
+        other extensions, alphabetically within each group."""
+
+    def emptyRegex(self, pat):
+        """Substitute a regular expression which matches no version
+        or extension names for None or the empty string."""
+        if not pat:
+            return '_nomatch_^'
+
+        return pat
+
+
+class OutputGenerator:
+    """Generate specified API interfaces in a specific style, such as a C header.
+
+    Base class for generating API interfaces.
+    Manages basic logic, logging, and output file control.
+    Derived classes actually generate formatted output.
+    """
+
+    # categoryToPath - map XML 'category' to include file directory name
+    categoryToPath = {
+        'bitmask': 'flags',
+        'enum': 'enums',
+        'funcpointer': 'funcpointers',
+        'handle': 'handles',
+        'define': 'defines',
+        'basetype': 'basetypes',
+    }
+
+    def __init__(self, errFile=sys.stderr, warnFile=sys.stderr, diagFile=sys.stdout):
+        """Constructor
+
+        - errFile, warnFile, diagFile - file handles to write errors,
+          warnings, diagnostics to. May be None to not write."""
+        self.outFile = None
+        self.errFile = errFile
+        self.warnFile = warnFile
+        self.diagFile = diagFile
+        # Internal state
+        self.featureName = None
+        self.genOpts = None
+        self.registry = None
+        # Used for extension enum value generation
+        self.extBase = 1000000000
+        self.extBlockSize = 1000
+        self.madeDirs = {}
+
+    def logMsg(self, level, *args):
+        """Write a message of different categories to different
+        destinations.
+
+        - `level`
+          - 'diag' (diagnostic, voluminous)
+          - 'warn' (warning)
+          - 'error' (fatal error - raises exception after logging)
+
+        - `*args` - print()-style arguments to direct to corresponding log"""
+        if level == 'error':
+            strfile = io.StringIO()
+            write('ERROR:', *args, file=strfile)
+            if self.errFile is not None:
+                write(strfile.getvalue(), file=self.errFile)
+            raise UserWarning(strfile.getvalue())
+        elif level == 'warn':
+            if self.warnFile is not None:
+                write('WARNING:', *args, file=self.warnFile)
+        elif level == 'diag':
+            if self.diagFile is not None:
+                write('DIAG:', *args, file=self.diagFile)
+        else:
+            raise UserWarning(
+                '*** FATAL ERROR in Generator.logMsg: unknown level:' + level)
+
+    def enumToValue(self, elem, needsNum):
+        """Parse and convert an `<enum>` tag into a value.
+
+        Returns a list:
+
+        - first element - integer representation of the value, or None
+          if needsNum is False. The value must be a legal number
+          if needsNum is True.
+        - second element - string representation of the value
+
+        There are several possible representations of values.
+
+        - A 'value' attribute simply contains the value.
+        - A 'bitpos' attribute defines a value by specifying the bit
+          position which is set in that value.
+        - An 'offset','extbase','extends' triplet specifies a value
+          as an offset to a base value defined by the specified
+          'extbase' extension name, which is then cast to the
+          typename specified by 'extends'. This requires probing
+          the registry database, and imbeds knowledge of the
+          API extension enum scheme in this function.
+        - An 'alias' attribute contains the name of another enum
+          which this is an alias of. The other enum must be
+          declared first when emitting this enum."""
+        name = elem.get('name')
+        numVal = None
+        if 'value' in elem.keys():
+            value = elem.get('value')
+            # print('About to translate value =', value, 'type =', type(value))
+            if needsNum:
+                numVal = int(value, 0)
+            # If there's a non-integer, numeric 'type' attribute (e.g. 'u' or
+            # 'ull'), append it to the string value.
+            # t = enuminfo.elem.get('type')
+            # if t is not None and t != '' and t != 'i' and t != 's':
+            #     value += enuminfo.type
+            self.logMsg('diag', 'Enum', name, '-> value [', numVal, ',', value, ']')
+            return [numVal, value]
+        if 'bitpos' in elem.keys():
+            value = elem.get('bitpos')
+            bitpos = int(value, 0)
+            numVal = 1 << bitpos
+            value = '0x%08x' % numVal
+            if bitpos >= 32:
+                value = value + 'ULL'
+            self.logMsg('diag', 'Enum', name, '-> bitpos [', numVal, ',', value, ']')
+            return [numVal, value]
+        if 'offset' in elem.keys():
+            # Obtain values in the mapping from the attributes
+            enumNegative = False
+            offset = int(elem.get('offset'), 0)
+            extnumber = int(elem.get('extnumber'), 0)
+            extends = elem.get('extends')
+            if 'dir' in elem.keys():
+                enumNegative = True
+            self.logMsg('diag', 'Enum', name, 'offset =', offset,
+                        'extnumber =', extnumber, 'extends =', extends,
+                        'enumNegative =', enumNegative)
+            # Now determine the actual enumerant value, as defined
+            # in the "Layers and Extensions" appendix of the spec.
+            numVal = self.extBase + (extnumber - 1) * self.extBlockSize + offset
+            if enumNegative:
+                numVal *= -1
+            value = '%d' % numVal
+            # More logic needed!
+            self.logMsg('diag', 'Enum', name, '-> offset [', numVal, ',', value, ']')
+            return [numVal, value]
+        if 'alias' in elem.keys():
+            return [None, elem.get('alias')]
+        return [None, None]
+
+    def checkDuplicateEnums(self, enums):
+        """Sanity check enumerated values.
+
+        -  enums - list of `<enum>` Elements
+
+        returns the list with duplicates stripped"""
+        # Dictionaries indexed by name and numeric value.
+        # Entries are [ Element, numVal, strVal ] matching name or value
+
+        nameMap = {}
+        valueMap = {}
+
+        stripped = []
+        for elem in enums:
+            name = elem.get('name')
+            (numVal, strVal) = self.enumToValue(elem, True)
+
+            if name in nameMap:
+                # Duplicate name found; check values
+                (name2, numVal2, strVal2) = nameMap[name]
+
+                # Duplicate enum values for the same name are benign. This
+                # happens when defining the same enum conditionally in
+                # several extension blocks.
+                if (strVal2 == strVal or (numVal is not None
+                                          and numVal == numVal2)):
+                    True
+                    # self.logMsg('info', 'checkDuplicateEnums: Duplicate enum (' + name +
+                    #             ') found with the same value:' + strVal)
+                else:
+                    self.logMsg('warn', 'checkDuplicateEnums: Duplicate enum (' + name
+                                + ') found with different values:' + strVal
+                                + ' and ' + strVal2)
+
+                # Don't add the duplicate to the returned list
+                continue
+            elif numVal in valueMap:
+                # Duplicate value found (such as an alias); report it, but
+                # still add this enum to the list.
+                (name2, numVal2, strVal2) = valueMap[numVal]
+
+                try:
+                    self.logMsg('warn', 'Two enums found with the same value: ' +
+                                name + ' = ' + name2.get('name') + ' = ' + strVal)
+                except:
+                    pdb.set_trace()
+
+            # Track this enum to detect followon duplicates
+            nameMap[name] = [elem, numVal, strVal]
+            if numVal is not None:
+                valueMap[numVal] = [elem, numVal, strVal]
+
+            # Add this enum to the list
+            stripped.append(elem)
+
+        # Return the list
+        return stripped
+
+    def buildEnumCDecl(self, expand, groupinfo, groupName):
+        """Generate the C declaration for an enum"""
+        groupElem = groupinfo.elem
+
+        if self.genOpts.conventions.constFlagBits and groupElem.get('type') == 'bitmask':
+            return self.buildEnumCDecl_Bitmask(groupinfo, groupName)
+        else:
+            return self.buildEnumCDecl_Enum(expand, groupinfo, groupName)
+
+    def buildEnumCDecl_Bitmask(self, groupinfo, groupName):
+        """Generate the C declaration for an "enum" that is actually a
+        set of flag bits"""
+        groupElem = groupinfo.elem
+        flagTypeName = groupinfo.flagType.elem.get('name')
+
+        # Prefix
+        body = "// Flag bits for " + flagTypeName + "\n"
+
+        # Loop over the nested 'enum' tags.
+        for elem in groupElem.findall('enum'):
+            # Convert the value to an integer and use that to track min/max.
+            # Values of form -(number) are accepted but nothing more complex.
+            # Should catch exceptions here for more complex constructs. Not yet.
+            (_, strVal) = self.enumToValue(elem, True)
+            name = elem.get('name')
+            body += "static const {} {} = {};\n".format(flagTypeName, name, strVal)
+
+        # Postfix
+
+        return ("bitmask", body)
+
+    def buildEnumCDecl_Enum(self, expand, groupinfo, groupName):
+        """Generate the C declaration for an enumerated type"""
+        groupElem = groupinfo.elem
+
+        # Break the group name into prefix and suffix portions for range
+        # enum generation
+        expandName = re.sub(r'([0-9a-z_])([A-Z0-9])', r'\1_\2', groupName).upper()
+        expandPrefix = expandName
+        expandSuffix = ''
+        expandSuffixMatch = re.search(r'[A-Z][A-Z]+$', groupName)
+        if expandSuffixMatch:
+            expandSuffix = '_' + expandSuffixMatch.group()
+            # Strip off the suffix from the prefix
+            expandPrefix = expandName.rsplit(expandSuffix, 1)[0]
+
+        # Prefix
+        body = ["typedef enum %s {" % groupName]
+
+        # @@ Should use the type="bitmask" attribute instead
+        isEnum = ('FLAG_BITS' not in expandPrefix)
+
+        # Get a list of nested 'enum' tags.
+        enums = groupElem.findall('enum')
+
+        # Check for and report duplicates, and return a list with them
+        # removed.
+        enums = self.checkDuplicateEnums(enums)
+
+        # Loop over the nested 'enum' tags. Keep track of the minimum and
+        # maximum numeric values, if they can be determined; but only for
+        # core API enumerants, not extension enumerants. This is inferred
+        # by looking for 'extends' attributes.
+        minName = None
+
+        # Accumulate non-numeric enumerant values separately and append
+        # them following the numeric values, to allow for aliases.
+        # NOTE: this doesn't do a topological sort yet, so aliases of
+        # aliases can still get in the wrong order.
+        aliasText = []
+
+        for elem in enums:
+            # Convert the value to an integer and use that to track min/max.
+            # Values of form -(number) are accepted but nothing more complex.
+            # Should catch exceptions here for more complex constructs. Not yet.
+            (numVal, strVal) = self.enumToValue(elem, True)
+            name = elem.get('name')
+
+            # Extension enumerants are only included if they are required
+            if self.isEnumRequired(elem):
+                decl = "    {} = {},".format(name, strVal)
+                if numVal is not None:
+                    body.append(decl)
+                else:
+                    aliasText.append(decl)
+
+            # Don't track min/max for non-numbers (numVal is None)
+            if isEnum and numVal is not None and elem.get('extends') is None:
+                if minName is None:
+                    minName = maxName = name
+                    minValue = maxValue = numVal
+                elif numVal < minValue:
+                    minName = name
+                    minValue = numVal
+                elif numVal > maxValue:
+                    maxName = name
+                    maxValue = numVal
+
+        # Now append the non-numeric enumerant values
+        body.extend(aliasText)
+
+        # Generate min/max value tokens and a range-padding enum. Need some
+        # additional padding to generate correct names...
+        if isEnum and expand:
+            body.extend(("    {}_BEGIN_RANGE{} = {},".format(expandPrefix, expandSuffix, minName),
+                         "    {}_END_RANGE{} = {},".format(
+                             expandPrefix, expandSuffix, maxName),
+                         "    {}_RANGE_SIZE{} = ({} - {} + 1),".format(expandPrefix, expandSuffix, maxName, minName)))
+
+        body.append("    {}_MAX_ENUM{} = 0x7FFFFFFF".format(
+            expandPrefix, expandSuffix))
+
+        # Postfix
+        body.append("} %s;" % groupName)
+
+        # Determine appropriate section for this declaration
+        if groupElem.get('type') == 'bitmask':
+            section = 'bitmask'
+        else:
+            section = 'group'
+
+        return (section, '\n'.join(body))
+
+    def makeDir(self, path):
+        """Create a directory, if not already done.
+
+        Generally called from derived generators creating hierarchies."""
+        self.logMsg('diag', 'OutputGenerator::makeDir(' + path + ')')
+        if path not in self.madeDirs:
+            # This can get race conditions with multiple writers, see
+            # https://stackoverflow.com/questions/273192/
+            if not os.path.exists(path):
+                os.makedirs(path)
+            self.madeDirs[path] = None
+
+    def beginFile(self, genOpts):
+        """Start a new interface file
+
+        - genOpts - GeneratorOptions controlling what's generated and how"""
+        self.genOpts = genOpts
+        self.should_insert_may_alias_macro = \
+            self.genOpts.conventions.should_insert_may_alias_macro(self.genOpts)
+
+        self.conventions = genOpts.conventions
+
+        # Open specified output file. Not done in constructor since a
+        # Generator can be used without writing to a file.
+        if self.genOpts.filename is not None:
+            if sys.platform == 'win32':
+                directory = Path(self.genOpts.directory)
+                if not Path.exists(directory):
+                    os.makedirs(directory)
+                self.outFile = (directory / self.genOpts.filename).open('w', encoding='utf-8')
+            else:
+                filename = self.genOpts.directory + '/' + self.genOpts.filename
+                self.outFile = io.open(filename, 'w', encoding='utf-8')
+        else:
+            self.outFile = sys.stdout
+
+    def endFile(self):
+        if self.errFile:
+            self.errFile.flush()
+        if self.warnFile:
+            self.warnFile.flush()
+        if self.diagFile:
+            self.diagFile.flush()
+        self.outFile.flush()
+        if self.outFile != sys.stdout and self.outFile != sys.stderr:
+            self.outFile.close()
+        self.genOpts = None
+
+    def beginFeature(self, interface, emit):
+        """Write interface for a feature and tag generated features as having been done.
+
+        - interface - element for the `<version>` / `<extension>` to generate
+        - emit - actually write to the header only when True"""
+        self.emit = emit
+        self.featureName = interface.get('name')
+        # If there's an additional 'protect' attribute in the feature, save it
+        self.featureExtraProtect = interface.get('protect')
+
+    def endFeature(self):
+        """Finish an interface file, closing it when done.
+
+        Derived classes responsible for emitting feature"""
+        self.featureName = None
+        self.featureExtraProtect = None
+
+    def validateFeature(self, featureType, featureName):
+        """Validate we're generating something only inside a `<feature>` tag"""
+        if self.featureName is None:
+            raise UserWarning('Attempt to generate', featureType,
+                              featureName, 'when not in feature')
+
+    def genType(self, typeinfo, name, alias):
+        """Generate interface for a type
+
+        - typeinfo - TypeInfo for a type
+
+        Extend to generate as desired in your derived class."""
+        self.validateFeature('type', name)
+
+    def genStruct(self, typeinfo, typeName, alias):
+        """Generate interface for a C "struct" type.
+
+        - typeinfo - TypeInfo for a type interpreted as a struct
+
+        Extend to generate as desired in your derived class."""
+        self.validateFeature('struct', typeName)
+
+        # The mixed-mode <member> tags may contain no-op <comment> tags.
+        # It is convenient to remove them here where all output generators
+        # will benefit.
+        for member in typeinfo.elem.findall('.//member'):
+            for comment in member.findall('comment'):
+                member.remove(comment)
+
+    def genGroup(self, groupinfo, groupName, alias):
+        """Generate interface for a group of enums (C "enum")
+
+        - groupinfo - GroupInfo for a group.
+
+        Extend to generate as desired in your derived class."""
+
+        self.validateFeature('group', groupName)
+
+    def genEnum(self, enuminfo, typeName, alias):
+        """Generate interface for an enum (constant).
+
+        - enuminfo - EnumInfo for an enum
+        - name - enum name
+
+        Extend to generate as desired in your derived class."""
+        self.validateFeature('enum', typeName)
+
+    def genCmd(self, cmd, cmdinfo, alias):
+        """Generate interface for a command.
+
+        - cmdinfo - CmdInfo for a command
+
+        Extend to generate as desired in your derived class."""
+        self.validateFeature('command', cmdinfo)
+
+    def makeProtoName(self, name, tail):
+        """Turn a `<proto>` `<name>` into C-language prototype
+        and typedef declarations for that name.
+
+        - name - contents of `<name>` tag
+        - tail - whatever text follows that tag in the Element"""
+        return self.genOpts.apientry + name + tail
+
+    def makeTypedefName(self, name, tail):
+        """Make the function-pointer typedef name for a command."""
+        return '(' + self.genOpts.apientryp + 'PFN_' + name + tail + ')'
+
+    def makeCParamDecl(self, param, aligncol):
+        """Return a string which is an indented, formatted
+        declaration for a `<param>` or `<member>` block (e.g. function parameter
+        or structure/union member).
+
+        - param - Element (`<param>` or `<member>`) to format
+        - aligncol - if non-zero, attempt to align the nested `<name>` element
+          at this column"""
+        indent = '    '
+        paramdecl = indent + noneStr(param.text)
+        for elem in param:
+            text = noneStr(elem.text)
+            tail = noneStr(elem.tail)
+
+            if self.should_insert_may_alias_macro and self.genOpts.conventions.is_voidpointer_alias(elem.tag, text, tail):
+                # OpenXR-specific macro insertion - but not in apiinc for the spec
+                tail = self.genOpts.conventions.make_voidpointer_alias(tail)
+            if elem.tag == 'name' and aligncol > 0:
+                self.logMsg('diag', 'Aligning parameter', elem.text, 'to column', self.genOpts.alignFuncParam)
+                # Align at specified column, if possible
+                paramdecl = paramdecl.rstrip()
+                oldLen = len(paramdecl)
+                # This works around a problem where very long type names -
+                # longer than the alignment column - would run into the tail
+                # text.
+                paramdecl = paramdecl.ljust(aligncol - 1) + ' '
+                newLen = len(paramdecl)
+                self.logMsg('diag', 'Adjust length of parameter decl from', oldLen, 'to', newLen, ':', paramdecl)
+            paramdecl += text + tail
+        if aligncol == 0:
+            # Squeeze out multiple spaces other than the indentation
+            paramdecl = indent + ' '.join(paramdecl.split())
+        return paramdecl
+
+    def getCParamTypeLength(self, param):
+        """Return the length of the type field is an indented, formatted
+        declaration for a `<param>` or `<member>` block (e.g. function parameter
+        or structure/union member).
+
+        - param - Element (`<param>` or `<member>`) to identify"""
+        # Allow for missing <name> tag
+        newLen = 0
+        paramdecl = '    ' + noneStr(param.text)
+        for elem in param:
+            text = noneStr(elem.text)
+            tail = noneStr(elem.tail)
+
+            if self.should_insert_may_alias_macro and self.genOpts.conventions.is_voidpointer_alias(elem.tag, text, tail):
+                # OpenXR-specific macro insertion
+                tail = self.genOpts.conventions.make_voidpointer_alias(tail)
+            if elem.tag == 'name':
+                # Align at specified column, if possible
+                newLen = len(paramdecl.rstrip())
+                self.logMsg('diag', 'Identifying length of', elem.text, 'as', newLen)
+            paramdecl += text + tail
+
+        return newLen
+
+    def getMaxCParamTypeLength(self, info):
+        """Return the length of the longest type field for a member/parameter.
+
+        - info - TypeInfo or CommandInfo.
+        """
+        lengths = (self.getCParamTypeLength(member)
+                   for member in info.getMembers())
+        return max(lengths)
+
+    def getHandleParent(self, typename):
+        """Get the parent of a handle object."""
+        info = self.registry.typedict.get(typename)
+        if info is None:
+            return None
+
+        elem = info.elem
+        if elem is not None:
+            return elem.get('parent')
+
+        return None
+
+    def iterateHandleAncestors(self, typename):
+        """Iterate through the ancestors of a handle type."""
+        current = self.getHandleParent(typename)
+        while current is not None:
+            yield current
+            current = self.getHandleParent(current)
+
+    def getHandleAncestors(self, typename):
+        """Get the ancestors of a handle object."""
+        return list(self.iterateHandleAncestors(typename))
+
+    def getTypeCategory(self, typename):
+        """Get the category of a type."""
+        info = self.registry.typedict.get(typename)
+        if info is None:
+            return None
+
+        elem = info.elem
+        if elem is not None:
+            return elem.get('category')
+        return None
+
+    def isStructAlwaysValid(self, structname):
+        """Try to do check if a structure is always considered valid (i.e. there's no rules to its acceptance)."""
+        # A conventions object is required for this call.
+        if not self.conventions:
+            raise RuntimeError("To use isStructAlwaysValid, be sure your options include a Conventions object.")
+
+        if self.conventions.type_always_valid(structname):
+            return True
+
+        category = self.getTypeCategory(structname)
+        if self.conventions.category_requires_validation(category):
+            return False
+
+        info = self.registry.typedict.get(structname)
+        assert(info is not None)
+
+        members = info.getMembers()
+
+        for member in members:
+            member_name = getElemName(member)
+            if member_name in (self.conventions.structtype_member_name,
+                               self.conventions.nextpointer_member_name):
+                return False
+
+            if member.get('noautovalidity'):
+                return False
+
+            member_type = getElemType(member)
+
+            if member_type in ('void', 'char') or self.paramIsArray(member) or self.paramIsPointer(member):
+                return False
+
+            if self.conventions.type_always_valid(member_type):
+                continue
+
+            member_category = self.getTypeCategory(member_type)
+
+            if self.conventions.category_requires_validation(member_category):
+                return False
+
+            if member_category in ('struct', 'union'):
+                if self.isStructAlwaysValid(member_type) is False:
+                    return False
+
+        return True
+
+    def isEnumRequired(self, elem):
+        """Return True if this `<enum>` element is
+        required, False otherwise
+
+        - elem - `<enum>` element to test"""
+        required = elem.get('required') is not None
+        self.logMsg('diag', 'isEnumRequired:', elem.get('name'),
+                    '->', required)
+        return required
+
+        # @@@ This code is overridden by equivalent code now run in
+        # @@@ Registry.generateFeature
+
+        required = False
+
+        extname = elem.get('extname')
+        if extname is not None:
+            # 'supported' attribute was injected when the <enum> element was
+            # moved into the <enums> group in Registry.parseTree()
+            if self.genOpts.defaultExtensions == elem.get('supported'):
+                required = True
+            elif re.match(self.genOpts.addExtensions, extname) is not None:
+                required = True
+        elif elem.get('version') is not None:
+            required = re.match(self.genOpts.emitversions, elem.get('version')) is not None
+        else:
+            required = True
+
+        return required
+
+    def makeCDecls(self, cmd):
+        """Return C prototype and function pointer typedef for a
+        `<command>` Element, as a two-element list of strings.
+
+        - cmd - Element containing a `<command>` tag"""
+        proto = cmd.find('proto')
+        params = cmd.findall('param')
+        # Begin accumulating prototype and typedef strings
+        pdecl = self.genOpts.apicall
+        tdecl = 'typedef '
+
+        # Insert the function return type/name.
+        # For prototypes, add APIENTRY macro before the name
+        # For typedefs, add (APIENTRY *<name>) around the name and
+        #   use the PFN_cmdnameproc naming convention.
+        # Done by walking the tree for <proto> element by element.
+        # etree has elem.text followed by (elem[i], elem[i].tail)
+        #   for each child element and any following text
+        # Leading text
+        pdecl += noneStr(proto.text)
+        tdecl += noneStr(proto.text)
+        # For each child element, if it's a <name> wrap in appropriate
+        # declaration. Otherwise append its contents and tail contents.
+        for elem in proto:
+            text = noneStr(elem.text)
+            tail = noneStr(elem.tail)
+            if elem.tag == 'name':
+                pdecl += self.makeProtoName(text, tail)
+                tdecl += self.makeTypedefName(text, tail)
+            else:
+                pdecl += text + tail
+                tdecl += text + tail
+
+        if self.genOpts.alignFuncParam == 0:
+            # Squeeze out multiple spaces - there is no indentation
+            pdecl = ' '.join(pdecl.split())
+            tdecl = ' '.join(tdecl.split())
+
+        # Now add the parameter declaration list, which is identical
+        # for prototypes and typedefs. Concatenate all the text from
+        # a <param> node without the tags. No tree walking required
+        # since all tags are ignored.
+        # Uses: self.indentFuncProto
+        # self.indentFuncPointer
+        # self.alignFuncParam
+        n = len(params)
+        # Indented parameters
+        if n > 0:
+            indentdecl = '(\n'
+            indentdecl += ',\n'.join(self.makeCParamDecl(p, self.genOpts.alignFuncParam)
+                                     for p in params)
+            indentdecl += ');'
+        else:
+            indentdecl = '(void);'
+        # Non-indented parameters
+        paramdecl = '('
+        if n > 0:
+            paramnames = (''.join(t for t in p.itertext())
+                          for p in params)
+            paramdecl += ', '.join(paramnames)
+        else:
+            paramdecl += 'void'
+        paramdecl += ");"
+        return [pdecl + indentdecl, tdecl + paramdecl]
+
+    def newline(self):
+        """Print a newline to the output file (utility function)"""
+        write('', file=self.outFile)
+
+    def setRegistry(self, registry):
+        self.registry = registry
diff --git a/src/third_party/vulkan-headers/src/registry/genvk.py b/src/third_party/vulkan-headers/src/registry/genvk.py
new file mode 100755
index 0000000..1311a87
--- /dev/null
+++ b/src/third_party/vulkan-headers/src/registry/genvk.py
@@ -0,0 +1,552 @@
+#!/usr/bin/python3
+#
+# Copyright (c) 2013-2019 The Khronos Group Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import argparse
+import pdb
+import re
+import sys
+import time
+import xml.etree.ElementTree as etree
+
+from cgenerator import CGeneratorOptions, COutputGenerator
+from docgenerator import DocGeneratorOptions, DocOutputGenerator
+from extensionmetadocgenerator import (ExtensionMetaDocGeneratorOptions,
+                                       ExtensionMetaDocOutputGenerator)
+from generator import write
+from hostsyncgenerator import HostSynchronizationOutputGenerator
+from pygenerator import PyOutputGenerator
+from reg import Registry
+from validitygenerator import ValidityOutputGenerator
+from vkconventions import VulkanConventions
+
+
+# Simple timer functions
+startTime = None
+
+
+def startTimer(timeit):
+    global startTime
+    if timeit:
+        startTime = time.process_time()
+
+
+def endTimer(timeit, msg):
+    global startTime
+    if timeit:
+        endTime = time.process_time()
+        write(msg, endTime - startTime, file=sys.stderr)
+        startTime = None
+
+
+def makeREstring(strings, default=None, strings_are_regex=False):
+    """Turn a list of strings into a regexp string matching exactly those strings."""
+    if strings or default is None:
+        if not strings_are_regex:
+            strings = (re.escape(s) for s in strings)
+        return '^(' + '|'.join(strings) + ')$'
+    return default
+
+
+def makeGenOpts(args):
+    """Returns a directory of [ generator function, generator options ] indexed
+    by specified short names. The generator options incorporate the following
+    parameters:
+
+    args is an parsed argument object; see below for the fields that are used."""
+    global genOpts
+    genOpts = {}
+
+    # Default class of extensions to include, or None
+    defaultExtensions = args.defaultExtensions
+
+    # Additional extensions to include (list of extensions)
+    extensions = args.extension
+
+    # Extensions to remove (list of extensions)
+    removeExtensions = args.removeExtensions
+
+    # Extensions to emit (list of extensions)
+    emitExtensions = args.emitExtensions
+
+    # Features to include (list of features)
+    features = args.feature
+
+    # Whether to disable inclusion protect in headers
+    protect = args.protect
+
+    # Output target directory
+    directory = args.directory
+
+    # Descriptive names for various regexp patterns used to select
+    # versions and extensions
+    allFeatures = allExtensions = r'.*'
+
+    # Turn lists of names/patterns into matching regular expressions
+    addExtensionsPat     = makeREstring(extensions, None)
+    removeExtensionsPat  = makeREstring(removeExtensions, None)
+    emitExtensionsPat    = makeREstring(emitExtensions, allExtensions)
+    featuresPat          = makeREstring(features, allFeatures)
+
+    # Copyright text prefixing all headers (list of strings).
+    prefixStrings = [
+        '/*',
+        '** Copyright (c) 2015-2019 The Khronos Group Inc.',
+        '**',
+        '** Licensed under the Apache License, Version 2.0 (the "License");',
+        '** you may not use this file except in compliance with the License.',
+        '** You may obtain a copy of the License at',
+        '**',
+        '**     http://www.apache.org/licenses/LICENSE-2.0',
+        '**',
+        '** Unless required by applicable law or agreed to in writing, software',
+        '** distributed under the License is distributed on an "AS IS" BASIS,',
+        '** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.',
+        '** See the License for the specific language governing permissions and',
+        '** limitations under the License.',
+        '*/',
+        ''
+    ]
+
+    # Text specific to Vulkan headers
+    vkPrefixStrings = [
+        '/*',
+        '** This header is generated from the Khronos Vulkan XML API Registry.',
+        '**',
+        '*/',
+        ''
+    ]
+
+    # Defaults for generating re-inclusion protection wrappers (or not)
+    protectFile = protect
+
+    # An API style conventions object
+    conventions = VulkanConventions()
+
+    # API include files for spec and ref pages
+    # Overwrites include subdirectories in spec source tree
+    # The generated include files do not include the calling convention
+    # macros (apientry etc.), unlike the header files.
+    # Because the 1.0 core branch includes ref pages for extensions,
+    # all the extension interfaces need to be generated, even though
+    # none are used by the core spec itself.
+    genOpts['apiinc'] = [
+          DocOutputGenerator,
+          DocGeneratorOptions(
+            conventions       = conventions,
+            filename          = 'timeMarker',
+            directory         = directory,
+            apiname           = 'vulkan',
+            profile           = None,
+            versions          = featuresPat,
+            emitversions      = featuresPat,
+            defaultExtensions = None,
+            addExtensions     = addExtensionsPat,
+            removeExtensions  = removeExtensionsPat,
+            emitExtensions    = emitExtensionsPat,
+            prefixText        = prefixStrings + vkPrefixStrings,
+            apicall           = '',
+            apientry          = '',
+            apientryp         = '*',
+            alignFuncParam    = 48,
+            expandEnumerants  = False)
+        ]
+
+    # API names to validate man/api spec includes & links
+    genOpts['vkapi.py'] = [
+          PyOutputGenerator,
+          DocGeneratorOptions(
+            conventions       = conventions,
+            filename          = 'vkapi.py',
+            directory         = directory,
+            apiname           = 'vulkan',
+            profile           = None,
+            versions          = featuresPat,
+            emitversions      = featuresPat,
+            defaultExtensions = None,
+            addExtensions     = addExtensionsPat,
+            removeExtensions  = removeExtensionsPat,
+            emitExtensions    = emitExtensionsPat)
+        ]
+
+    # API validity files for spec
+    genOpts['validinc'] = [
+          ValidityOutputGenerator,
+          DocGeneratorOptions(
+            conventions       = conventions,
+            filename          = 'timeMarker',
+            directory         = directory,
+            apiname           = 'vulkan',
+            profile           = None,
+            versions          = featuresPat,
+            emitversions      = featuresPat,
+            defaultExtensions = None,
+            addExtensions     = addExtensionsPat,
+            removeExtensions  = removeExtensionsPat,
+            emitExtensions    = emitExtensionsPat)
+        ]
+
+    # API host sync table files for spec
+    genOpts['hostsyncinc'] = [
+          HostSynchronizationOutputGenerator,
+          DocGeneratorOptions(
+            conventions       = conventions,
+            filename          = 'timeMarker',
+            directory         = directory,
+            apiname           = 'vulkan',
+            profile           = None,
+            versions          = featuresPat,
+            emitversions      = featuresPat,
+            defaultExtensions = None,
+            addExtensions     = addExtensionsPat,
+            removeExtensions  = removeExtensionsPat,
+            emitExtensions    = emitExtensionsPat)
+        ]
+
+    # Extension metainformation for spec extension appendices
+    genOpts['extinc'] = [
+          ExtensionMetaDocOutputGenerator,
+          ExtensionMetaDocGeneratorOptions(
+            conventions       = conventions,
+            filename          = 'timeMarker',
+            directory         = directory,
+            apiname           = 'vulkan',
+            profile           = None,
+            versions          = featuresPat,
+            emitversions      = None,
+            defaultExtensions = defaultExtensions,
+            addExtensions     = None,
+            removeExtensions  = None,
+            emitExtensions    = emitExtensionsPat)
+        ]
+
+    # Platform extensions, in their own header files
+    # Each element of the platforms[] array defines information for
+    # generating a single platform:
+    #   [0] is the generated header file name
+    #   [1] is the set of platform extensions to generate
+    #   [2] is additional extensions whose interfaces should be considered,
+    #   but suppressed in the output, to avoid duplicate definitions of
+    #   dependent types like VkDisplayKHR and VkSurfaceKHR which come from
+    #   non-platform extensions.
+
+    # Track all platform extensions, for exclusion from vulkan_core.h
+    allPlatformExtensions = []
+
+    # Extensions suppressed for all platforms.
+    # Covers common WSI extension types.
+    commonSuppressExtensions = [ 'VK_KHR_display', 'VK_KHR_swapchain' ]
+
+    platforms = [
+        [ 'vulkan_android.h',     [ 'VK_KHR_android_surface',
+                                    'VK_ANDROID_external_memory_android_hardware_buffer'
+                                                                  ], commonSuppressExtensions ],
+        [ 'vulkan_fuchsia.h',     [ 'VK_FUCHSIA_imagepipe_surface'], commonSuppressExtensions ],
+        [ 'vulkan_ggp.h',         [ 'VK_GGP_stream_descriptor_surface',
+                                    'VK_GGP_frame_token'          ], commonSuppressExtensions ],
+        [ 'vulkan_ios.h',         [ 'VK_MVK_ios_surface'          ], commonSuppressExtensions ],
+        [ 'vulkan_macos.h',       [ 'VK_MVK_macos_surface'        ], commonSuppressExtensions ],
+        [ 'vulkan_vi.h',          [ 'VK_NN_vi_surface'            ], commonSuppressExtensions ],
+        [ 'vulkan_wayland.h',     [ 'VK_KHR_wayland_surface'      ], commonSuppressExtensions ],
+        [ 'vulkan_win32.h',       [ 'VK_.*_win32(|_.*)', 'VK_EXT_full_screen_exclusive' ],
+                                                                     commonSuppressExtensions +
+                                                                     [ 'VK_KHR_external_semaphore',
+                                                                       'VK_KHR_external_memory_capabilities',
+                                                                       'VK_KHR_external_fence',
+                                                                       'VK_KHR_external_fence_capabilities',
+                                                                       'VK_KHR_get_surface_capabilities2',
+                                                                       'VK_NV_external_memory_capabilities',
+                                                                     ] ],
+        [ 'vulkan_xcb.h',         [ 'VK_KHR_xcb_surface'          ], commonSuppressExtensions ],
+        [ 'vulkan_xlib.h',        [ 'VK_KHR_xlib_surface'         ], commonSuppressExtensions ],
+        [ 'vulkan_xlib_xrandr.h', [ 'VK_EXT_acquire_xlib_display' ], commonSuppressExtensions ],
+        [ 'vulkan_metal.h',       [ 'VK_EXT_metal_surface'        ], commonSuppressExtensions ],
+    ]
+
+    for platform in platforms:
+        headername = platform[0]
+
+        allPlatformExtensions += platform[1]
+
+        addPlatformExtensionsRE = makeREstring(
+            platform[1] + platform[2], strings_are_regex=True)
+        emitPlatformExtensionsRE = makeREstring(
+            platform[1], strings_are_regex=True)
+
+        opts = CGeneratorOptions(
+            conventions       = conventions,
+            filename          = headername,
+            directory         = directory,
+            apiname           = 'vulkan',
+            profile           = None,
+            versions          = featuresPat,
+            emitversions      = None,
+            defaultExtensions = None,
+            addExtensions     = addPlatformExtensionsRE,
+            removeExtensions  = None,
+            emitExtensions    = emitPlatformExtensionsRE,
+            prefixText        = prefixStrings + vkPrefixStrings,
+            genFuncPointers   = True,
+            protectFile       = protectFile,
+            protectFeature    = False,
+            protectProto      = '#ifndef',
+            protectProtoStr   = 'VK_NO_PROTOTYPES',
+            apicall           = 'VKAPI_ATTR ',
+            apientry          = 'VKAPI_CALL ',
+            apientryp         = 'VKAPI_PTR *',
+            alignFuncParam    = 48,
+            genEnumBeginEndRange = True)
+
+        genOpts[headername] = [ COutputGenerator, opts ]
+
+    # Header for core API + extensions.
+    # To generate just the core API,
+    # change to 'defaultExtensions = None' below.
+    #
+    # By default this adds all enabled, non-platform extensions.
+    # It removes all platform extensions (from the platform headers options
+    # constructed above) as well as any explicitly specified removals.
+
+    removeExtensionsPat = makeREstring(
+        allPlatformExtensions + removeExtensions, None, strings_are_regex=True)
+
+    genOpts['vulkan_core.h'] = [
+          COutputGenerator,
+          CGeneratorOptions(
+            conventions       = conventions,
+            filename          = 'vulkan_core.h',
+            directory         = directory,
+            apiname           = 'vulkan',
+            profile           = None,
+            versions          = featuresPat,
+            emitversions      = featuresPat,
+            defaultExtensions = defaultExtensions,
+            addExtensions     = None,
+            removeExtensions  = removeExtensionsPat,
+            emitExtensions    = emitExtensionsPat,
+            prefixText        = prefixStrings + vkPrefixStrings,
+            genFuncPointers   = True,
+            protectFile       = protectFile,
+            protectFeature    = False,
+            protectProto      = '#ifndef',
+            protectProtoStr   = 'VK_NO_PROTOTYPES',
+            apicall           = 'VKAPI_ATTR ',
+            apientry          = 'VKAPI_CALL ',
+            apientryp         = 'VKAPI_PTR *',
+            alignFuncParam    = 48,
+            genEnumBeginEndRange = True)
+        ]
+
+    # Unused - vulkan10.h target.
+    # It is possible to generate a header with just the Vulkan 1.0 +
+    # extension interfaces defined, but since the promoted KHR extensions
+    # are now defined in terms of the 1.1 interfaces, such a header is very
+    # similar to vulkan_core.h.
+    genOpts['vulkan10.h'] = [
+          COutputGenerator,
+          CGeneratorOptions(
+            conventions       = conventions,
+            filename          = 'vulkan10.h',
+            directory         = directory,
+            apiname           = 'vulkan',
+            profile           = None,
+            versions          = 'VK_VERSION_1_0',
+            emitversions      = 'VK_VERSION_1_0',
+            defaultExtensions = defaultExtensions,
+            addExtensions     = None,
+            removeExtensions  = removeExtensionsPat,
+            emitExtensions    = emitExtensionsPat,
+            prefixText        = prefixStrings + vkPrefixStrings,
+            genFuncPointers   = True,
+            protectFile       = protectFile,
+            protectFeature    = False,
+            protectProto      = '#ifndef',
+            protectProtoStr   = 'VK_NO_PROTOTYPES',
+            apicall           = 'VKAPI_ATTR ',
+            apientry          = 'VKAPI_CALL ',
+            apientryp         = 'VKAPI_PTR *',
+            alignFuncParam    = 48)
+        ]
+
+    genOpts['alias.h'] = [
+          COutputGenerator,
+          CGeneratorOptions(
+            conventions       = conventions,
+            filename          = 'alias.h',
+            directory         = directory,
+            apiname           = 'vulkan',
+            profile           = None,
+            versions          = featuresPat,
+            emitversions      = featuresPat,
+            defaultExtensions = defaultExtensions,
+            addExtensions     = None,
+            removeExtensions  = removeExtensionsPat,
+            emitExtensions    = emitExtensionsPat,
+            prefixText        = None,
+            genFuncPointers   = False,
+            protectFile       = False,
+            protectFeature    = False,
+            protectProto      = '',
+            protectProtoStr   = '',
+            apicall           = '',
+            apientry          = '',
+            apientryp         = '',
+            alignFuncParam    = 36)
+        ]
+
+
+def genTarget(args):
+    """Generate a target based on the options in the matching genOpts{} object.
+
+    This is encapsulated in a function so it can be profiled and/or timed.
+    The args parameter is an parsed argument object containing the following
+    fields that are used:
+
+    - target - target to generate
+    - directory - directory to generate it in
+    - protect - True if re-inclusion wrappers should be created
+    - extensions - list of additional extensions to include in generated interfaces"""
+    # Create generator options with specified parameters
+    makeGenOpts(args)
+
+    if args.target in genOpts:
+        createGenerator = genOpts[args.target][0]
+        options = genOpts[args.target][1]
+
+        if not args.quiet:
+            write('* Building', options.filename, file=sys.stderr)
+            write('* options.versions          =', options.versions, file=sys.stderr)
+            write('* options.emitversions      =', options.emitversions, file=sys.stderr)
+            write('* options.defaultExtensions =', options.defaultExtensions, file=sys.stderr)
+            write('* options.addExtensions     =', options.addExtensions, file=sys.stderr)
+            write('* options.removeExtensions  =', options.removeExtensions, file=sys.stderr)
+            write('* options.emitExtensions    =', options.emitExtensions, file=sys.stderr)
+
+        startTimer(args.time)
+        gen = createGenerator(errFile=errWarn,
+                              warnFile=errWarn,
+                              diagFile=diag)
+        reg.setGenerator(gen)
+        reg.apiGen(options)
+
+        if not args.quiet:
+            write('* Generated', options.filename, file=sys.stderr)
+        endTimer(args.time, '* Time to generate ' + options.filename + ' =')
+    else:
+        write('No generator options for unknown target:',
+              args.target, file=sys.stderr)
+
+
+# -feature name
+# -extension name
+# For both, "name" may be a single name, or a space-separated list
+# of names, or a regular expression.
+if __name__ == '__main__':
+    parser = argparse.ArgumentParser()
+
+    parser.add_argument('-defaultExtensions', action='store',
+                        default='vulkan',
+                        help='Specify a single class of extensions to add to targets')
+    parser.add_argument('-extension', action='append',
+                        default=[],
+                        help='Specify an extension or extensions to add to targets')
+    parser.add_argument('-removeExtensions', action='append',
+                        default=[],
+                        help='Specify an extension or extensions to remove from targets')
+    parser.add_argument('-emitExtensions', action='append',
+                        default=[],
+                        help='Specify an extension or extensions to emit in targets')
+    parser.add_argument('-feature', action='append',
+                        default=[],
+                        help='Specify a core API feature name or names to add to targets')
+    parser.add_argument('-debug', action='store_true',
+                        help='Enable debugging')
+    parser.add_argument('-dump', action='store_true',
+                        help='Enable dump to stderr')
+    parser.add_argument('-diagfile', action='store',
+                        default=None,
+                        help='Write diagnostics to specified file')
+    parser.add_argument('-errfile', action='store',
+                        default=None,
+                        help='Write errors and warnings to specified file instead of stderr')
+    parser.add_argument('-noprotect', dest='protect', action='store_false',
+                        help='Disable inclusion protection in output headers')
+    parser.add_argument('-profile', action='store_true',
+                        help='Enable profiling')
+    parser.add_argument('-registry', action='store',
+                        default='vk.xml',
+                        help='Use specified registry file instead of vk.xml')
+    parser.add_argument('-time', action='store_true',
+                        help='Enable timing')
+    parser.add_argument('-validate', action='store_true',
+                        help='Enable group validation')
+    parser.add_argument('-o', action='store', dest='directory',
+                        default='.',
+                        help='Create target and related files in specified directory')
+    parser.add_argument('target', metavar='target', nargs='?',
+                        help='Specify target')
+    parser.add_argument('-quiet', action='store_true', default=True,
+                        help='Suppress script output during normal execution.')
+    parser.add_argument('-verbose', action='store_false', dest='quiet', default=True,
+                        help='Enable script output during normal execution.')
+
+    args = parser.parse_args()
+
+    # This splits arguments which are space-separated lists
+    args.feature = [name for arg in args.feature for name in arg.split()]
+    args.extension = [name for arg in args.extension for name in arg.split()]
+
+    # Load & parse registry
+    reg = Registry()
+
+    startTimer(args.time)
+    tree = etree.parse(args.registry)
+    endTimer(args.time, '* Time to make ElementTree =')
+
+    if args.debug:
+        pdb.run('reg.loadElementTree(tree)')
+    else:
+        startTimer(args.time)
+        reg.loadElementTree(tree)
+        endTimer(args.time, '* Time to parse ElementTree =')
+
+    if args.validate:
+        reg.validateGroups()
+
+    if args.dump:
+        write('* Dumping registry to regdump.txt', file=sys.stderr)
+        reg.dumpReg(filehandle=open('regdump.txt', 'w', encoding='utf-8'))
+
+    # create error/warning & diagnostic files
+    if args.errfile:
+        errWarn = open(args.errfile, 'w', encoding='utf-8')
+    else:
+        errWarn = sys.stderr
+
+    if args.diagfile:
+        diag = open(args.diagfile, 'w', encoding='utf-8')
+    else:
+        diag = None
+
+    if args.debug:
+        pdb.run('genTarget(args)')
+    elif args.profile:
+        import cProfile
+        import pstats
+        cProfile.run('genTarget(args)', 'profile.txt')
+        p = pstats.Stats('profile.txt')
+        p.strip_dirs().sort_stats('time').print_stats(50)
+    else:
+        genTarget(args)
diff --git a/src/third_party/vulkan-headers/src/registry/reg.py b/src/third_party/vulkan-headers/src/registry/reg.py
new file mode 100755
index 0000000..d684abc
--- /dev/null
+++ b/src/third_party/vulkan-headers/src/registry/reg.py
@@ -0,0 +1,1210 @@
+#!/usr/bin/python3 -i
+#
+# Copyright (c) 2013-2019 The Khronos Group Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Types and classes for manipulating an API registry."""
+
+import copy
+import re
+import sys
+import xml.etree.ElementTree as etree
+from collections import defaultdict, namedtuple
+from generator import OutputGenerator, write
+
+def matchAPIProfile(api, profile, elem):
+    """Return whether an API and profile
+    being generated matches an element's profile
+
+    - api - string naming the API to match
+    - profile - string naming the profile to match
+    - elem - Element which (may) have 'api' and 'profile'
+      attributes to match to.
+
+    If a tag is not present in the Element, the corresponding API
+      or profile always matches.
+
+    Otherwise, the tag must exactly match the API or profile.
+
+    Thus, if 'profile' = core:
+
+    - `<remove>`  with no attribute will match
+    - `<remove profile="core">` will match
+    - `<remove profile="compatibility">` will not match
+
+    Possible match conditions:
+
+    ```
+      Requested   Element
+      Profile     Profile
+      ---------   --------
+      None        None        Always matches
+      'string'    None        Always matches
+      None        'string'    Does not match. Can't generate multiple APIs
+                              or profiles, so if an API/profile constraint
+                              is present, it must be asked for explicitly.
+      'string'    'string'    Strings must match
+    ```
+
+    ** In the future, we will allow regexes for the attributes,
+    not just strings, so that `api="^(gl|gles2)"` will match. Even
+    this isn't really quite enough, we might prefer something
+    like `"gl(core)|gles1(common-lite)"`."""
+    # Match 'api', if present
+    elem_api = elem.get('api')
+    if elem_api:
+        if api is None:
+            raise UserWarning("No API requested, but 'api' attribute is present with value '"
+                              + elem_api + "'")
+        elif api != elem_api:
+            # Requested API doesn't match attribute
+            return False
+    elem_profile = elem.get('profile')
+    if elem_profile:
+        if profile is None:
+            raise UserWarning("No profile requested, but 'profile' attribute is present with value '"
+                              + elem_profile + "'")
+        elif profile != elem_profile:
+            # Requested profile doesn't match attribute
+            return False
+    return True
+
+# def printKeys(msg, elem):
+#     """Print all the keys in an Element - only for diagnostics"""
+#     print('printKeys:', msg, file=sys.stderr)
+#     for key in elem.keys():
+#         print('    {} -> {}'.format(key, elem.get(key)), file=sys.stderr)
+
+class BaseInfo:
+    """Base class for information about a registry feature
+    (type/group/enum/command/API/extension).
+
+    Represents the state of a registry feature, used during API generation.
+    """
+
+    def __init__(self, elem):
+        self.required = False
+        """should this feature be defined during header generation
+        (has it been removed by a profile or version)?"""
+
+        self.declared = False
+        "has this feature been defined already?"
+
+        self.elem = elem
+        "etree Element for this feature"
+
+    def resetState(self):
+        """Reset required/declared to initial values. Used
+        prior to generating a new API interface."""
+        self.required = False
+        self.declared = False
+
+    def compareKeys(self, info, key, required = False):
+        """Return True if self.elem and info.elem have the same attribute
+           value for key.
+           If 'required' is not True, also returns True if neither element
+           has an attribute value for key."""
+
+        if required and key not in self.elem.keys():
+            return False
+        return self.elem.get(key) == info.elem.get(key)
+
+    def compareElem(self, info, infoName):
+        """Return True if self.elem and info.elem have the same definition.
+        info - the other object
+        infoName - 'type' / 'group' / 'enum' / 'command' / 'feature' /
+                   'extension'"""
+
+        if infoName == 'enum':
+            if self.compareKeys(info, 'extends'):
+                # Either both extend the same type, or no type
+                if (self.compareKeys(info, 'value', required = True) or
+                    self.compareKeys(info, 'bitpos', required = True)):
+                    # If both specify the same value or bit position,
+                    # they're equal
+                    return True
+                elif (self.compareKeys(info, 'extends') and
+                      self.compareKeys(info, 'extnumber') and
+                      self.compareKeys(info, 'offset') and
+                      self.compareKeys(info, 'dir')):
+                    # If both specify the same relative offset, they're equal
+                    return True
+                else:
+                    return False
+            else:
+                # The same enum can't extend two different types
+                return False
+        else:
+            # Non-<enum>s should never be redefined
+            return False
+
+
+class TypeInfo(BaseInfo):
+    """Registry information about a type. No additional state
+      beyond BaseInfo is required."""
+
+    def __init__(self, elem):
+        BaseInfo.__init__(self, elem)
+        self.additionalValidity = []
+        self.removedValidity = []
+
+    def getMembers(self):
+        """Get a collection of all member elements for this type, if any."""
+        return self.elem.findall('member')
+
+    def resetState(self):
+        BaseInfo.resetState(self)
+        self.additionalValidity = []
+        self.removedValidity = []
+
+
+class GroupInfo(BaseInfo):
+    """Registry information about a group of related enums
+    in an <enums> block, generally corresponding to a C "enum" type."""
+
+    def __init__(self, elem):
+        BaseInfo.__init__(self, elem)
+
+
+class EnumInfo(BaseInfo):
+    """Registry information about an enum"""
+
+    def __init__(self, elem):
+        BaseInfo.__init__(self, elem)
+        self.type = elem.get('type')
+        """numeric type of the value of the <enum> tag
+        ( '' for GLint, 'u' for GLuint, 'ull' for GLuint64 )"""
+        if self.type is None:
+            self.type = ''
+
+
+class CmdInfo(BaseInfo):
+    """Registry information about a command"""
+
+    def __init__(self, elem):
+        BaseInfo.__init__(self, elem)
+        self.additionalValidity = []
+        self.removedValidity = []
+
+    def getParams(self):
+        """Get a collection of all param elements for this command, if any."""
+        return self.elem.findall('param')
+
+    def resetState(self):
+        BaseInfo.resetState(self)
+        self.additionalValidity = []
+        self.removedValidity = []
+
+class FeatureInfo(BaseInfo):
+    """Registry information about an API <feature>
+    or <extension>."""
+
+    def __init__(self, elem):
+        BaseInfo.__init__(self, elem)
+        self.name = elem.get('name')
+        "feature name string (e.g. 'VK_KHR_surface')"
+
+        self.emit = False
+        "has this feature been defined already?"
+
+        self.sortorder = int(elem.get('sortorder', 0))
+        """explicit numeric sort key within feature and extension groups.
+        Defaults to 0."""
+
+        if elem.tag == 'feature':
+            # Element category (vendor) is meaningless for <feature>
+            self.category = 'VERSION'
+            "category, e.g. VERSION or khr/vendor tag"
+
+            self.version = elem.get('name')
+            """feature name string"""
+
+            self.versionNumber = elem.get('number')
+            """versionNumber - API version number, taken from the 'number'
+               attribute of <feature>. Extensions do not have API version
+               numbers and are assigned number 0."""
+
+            self.number = "0"
+            self.supported = None
+        else:
+            # Extract vendor portion of VK_<vendor>_<name>
+            self.category = self.name.split('_', 2)[1]
+            self.version = "0"
+            self.versionNumber = "0"
+            self.number = elem.get('number')
+            """extension number, used for ordering and for assigning
+            enumerant offsets. <feature> features do not have extension
+            numbers and are assigned number 0."""
+
+            # If there's no 'number' attribute, use 0, so sorting works
+            if self.number is None:
+                self.number = 0
+            self.supported = elem.get('supported')
+
+
+class Registry:
+    """Object representing an API registry, loaded from an XML file."""
+
+    def __init__(self):
+        self.tree = None
+        "ElementTree containing the root `<registry>`"
+
+        self.typedict = {}
+        "dictionary of TypeInfo objects keyed by type name"
+
+        self.groupdict = {}
+        "dictionary of GroupInfo objects keyed by group name"
+
+        self.enumdict = {}
+        "dictionary of EnumInfo objects keyed by enum name"
+
+        self.cmddict = {}
+        "dictionary of CmdInfo objects keyed by command name"
+
+        self.apidict = {}
+        "dictionary of FeatureInfo objects for `<feature>` elements keyed by API name"
+
+        self.extensions = []
+        "list of `<extension>` Elements"
+
+        self.extdict = {}
+        "dictionary of FeatureInfo objects for `<extension>` elements keyed by extension name"
+
+        # A default output generator, so commands prior to apiGen can report
+        # errors via the generator object.
+        self.gen = OutputGenerator()
+        "OutputGenerator object used to write headers / messages"
+
+        self.genOpts = None
+        """GeneratorOptions object used to control which
+        features to write and how to format them"""
+
+        self.emitFeatures = False
+        """True to actually emit features for a version / extension,
+        or False to just treat them as emitted"""
+
+        self.breakPat = None
+        "regexp pattern to break on when generatng names"
+        # self.breakPat     = re.compile('VkFenceImportFlagBits.*')
+
+        self.requiredextensions = []  # Hack - can remove it after validity generator goes away
+
+        # ** Global types for automatic source generation **
+        # Length Member data
+        self.commandextensiontuple = namedtuple('commandextensiontuple',
+                                                ['command',        # The name of the command being modified
+                                                 'value',          # The value to append to the command
+                                                 'extension'])     # The name of the extension that added it
+        self.validextensionstructs = defaultdict(list)
+        self.commandextensionsuccesses = []
+        self.commandextensionerrors = []
+
+        self.filename     = None
+
+    def loadElementTree(self, tree):
+        """Load ElementTree into a Registry object and parse it."""
+        self.tree = tree
+        self.parseTree()
+
+    def loadFile(self, file):
+        """Load an API registry XML file into a Registry object and parse it"""
+        self.filename = file
+        self.tree = etree.parse(file)
+        self.parseTree()
+
+    def setGenerator(self, gen):
+        """Specify output generator object.
+
+        `None` restores the default generator."""
+        self.gen = gen
+        self.gen.setRegistry(self)
+
+    def addElementInfo(self, elem, info, infoName, dictionary):
+        """Add information about an element to the corresponding dictionary.
+
+        Intended for internal use only.
+
+        - elem - `<type>`/`<enums>`/`<enum>`/`<command>`/`<feature>`/`<extension>` Element
+        - info - corresponding {Type|Group|Enum|Cmd|Feature}Info object
+        - infoName - 'type' / 'group' / 'enum' / 'command' / 'feature' / 'extension'
+        - dictionary - self.{type|group|enum|cmd|api|ext}dict
+
+        If the Element has an 'api' attribute, the dictionary key is the
+        tuple (name,api). If not, the key is the name. 'name' is an
+        attribute of the Element"""
+        # self.gen.logMsg('diag', 'Adding ElementInfo.required =',
+        #     info.required, 'name =', elem.get('name'))
+        api = elem.get('api')
+        if api:
+            key = (elem.get('name'), api)
+        else:
+            key = elem.get('name')
+        if key in dictionary:
+            if not dictionary[key].compareElem(info, infoName):
+                self.gen.logMsg('warn', 'Attempt to redefine', key,
+                                '(this should not happen)')
+                # printKeys('old element', dictionary[key].elem)
+                # printKeys('new element', info.elem)
+            else:
+                # Benign redefinition - intentional cases exist.
+                True
+        else:
+            dictionary[key] = info
+
+    def lookupElementInfo(self, fname, dictionary):
+        """Find a {Type|Enum|Cmd}Info object by name.
+
+        Intended for internal use only.
+
+        If an object qualified by API name exists, use that.
+
+        - fname - name of type / enum / command
+        - dictionary - self.{type|enum|cmd}dict"""
+        key = (fname, self.genOpts.apiname)
+        if key in dictionary:
+            # self.gen.logMsg('diag', 'Found API-specific element for feature', fname)
+            return dictionary[key]
+        if fname in dictionary:
+            # self.gen.logMsg('diag', 'Found generic element for feature', fname)
+            return dictionary[fname]
+
+        return None
+
+    def breakOnName(self, regexp):
+        """Specify a feature name regexp to break on when generating features."""
+        self.breakPat = re.compile(regexp)
+
+    def parseTree(self):
+        """Parse the registry Element, once created"""
+        # This must be the Element for the root <registry>
+        self.reg = self.tree.getroot()
+
+        # Create dictionary of registry types from toplevel <types> tags
+        # and add 'name' attribute to each <type> tag (where missing)
+        # based on its <name> element.
+        #
+        # There's usually one <types> block; more are OK
+        # Required <type> attributes: 'name' or nested <name> tag contents
+        self.typedict = {}
+        for type_elem in self.reg.findall('types/type'):
+            # If the <type> doesn't already have a 'name' attribute, set
+            # it from contents of its <name> tag.
+            if type_elem.get('name') is None:
+                type_elem.set('name', type_elem.find('name').text)
+            self.addElementInfo(type_elem, TypeInfo(type_elem), 'type', self.typedict)
+
+        # Create dictionary of registry enum groups from <enums> tags.
+        #
+        # Required <enums> attributes: 'name'. If no name is given, one is
+        # generated, but that group can't be identified and turned into an
+        # enum type definition - it's just a container for <enum> tags.
+        self.groupdict = {}
+        for group in self.reg.findall('enums'):
+            self.addElementInfo(group, GroupInfo(group), 'group', self.groupdict)
+
+        # Create dictionary of registry enums from <enum> tags
+        #
+        # <enums> tags usually define different namespaces for the values
+        #   defined in those tags, but the actual names all share the
+        #   same dictionary.
+        # Required <enum> attributes: 'name', 'value'
+        # For containing <enums> which have type="enum" or type="bitmask",
+        # tag all contained <enum>s are required. This is a stopgap until
+        # a better scheme for tagging core and extension enums is created.
+        self.enumdict = {}
+        for enums in self.reg.findall('enums'):
+            required = (enums.get('type') is not None)
+            for enum in enums.findall('enum'):
+                enumInfo = EnumInfo(enum)
+                enumInfo.required = required
+                self.addElementInfo(enum, enumInfo, 'enum', self.enumdict)
+
+        # Create dictionary of registry commands from <command> tags
+        # and add 'name' attribute to each <command> tag (where missing)
+        # based on its <proto><name> element.
+        #
+        # There's usually only one <commands> block; more are OK.
+        # Required <command> attributes: 'name' or <proto><name> tag contents
+        self.cmddict = {}
+        # List of commands which alias others. Contains
+        #   [ aliasName, element ]
+        # for each alias
+        cmdAlias = []
+        for cmd in self.reg.findall('commands/command'):
+            # If the <command> doesn't already have a 'name' attribute, set
+            # it from contents of its <proto><name> tag.
+            name = cmd.get('name')
+            if name is None:
+                name = cmd.set('name', cmd.find('proto/name').text)
+            ci = CmdInfo(cmd)
+            self.addElementInfo(cmd, ci, 'command', self.cmddict)
+            alias = cmd.get('alias')
+            if alias:
+                cmdAlias.append([name, alias, cmd])
+
+        # Now loop over aliases, injecting a copy of the aliased command's
+        # Element with the aliased prototype name replaced with the command
+        # name - if it exists.
+        for (name, alias, cmd) in cmdAlias:
+            if alias in self.cmddict:
+                # @ pdb.set_trace()
+                aliasInfo = self.cmddict[alias]
+                cmdElem = copy.deepcopy(aliasInfo.elem)
+                cmdElem.find('proto/name').text = name
+                cmdElem.set('name', name)
+                cmdElem.set('alias', alias)
+                ci = CmdInfo(cmdElem)
+                # Replace the dictionary entry for the CmdInfo element
+                self.cmddict[name] = ci
+
+                # @  newString = etree.tostring(base, encoding="unicode").replace(aliasValue, aliasName)
+                # @elem.append(etree.fromstring(replacement))
+            else:
+                self.gen.logMsg('warn', 'No matching <command> found for command',
+                                cmd.get('name'), 'alias', alias)
+
+        # Create dictionaries of API and extension interfaces
+        #   from toplevel <api> and <extension> tags.
+        self.apidict = {}
+        for feature in self.reg.findall('feature'):
+            featureInfo = FeatureInfo(feature)
+            self.addElementInfo(feature, featureInfo, 'feature', self.apidict)
+
+            # Add additional enums defined only in <feature> tags
+            # to the corresponding core type.
+            # When seen here, the <enum> element, processed to contain the
+            # numeric enum value, is added to the corresponding <enums>
+            # element, as well as adding to the enum dictionary. It is
+            # *removed* from the <require> element it is introduced in.
+            # Not doing this will cause spurious genEnum()
+            # calls to be made in output generation, and it's easier
+            # to handle here than in genEnum().
+            #
+            # In lxml.etree, an Element can have only one parent, so the
+            # append() operation also removes the element. But in Python's
+            # ElementTree package, an Element can have multiple parents. So
+            # it must be explicitly removed from the <require> tag, leading
+            # to the nested loop traversal of <require>/<enum> elements
+            # below.
+            #
+            # This code also adds a 'version' attribute containing the
+            # api version.
+            #
+            # For <enum> tags which are actually just constants, if there's
+            # no 'extends' tag but there is a 'value' or 'bitpos' tag, just
+            # add an EnumInfo record to the dictionary. That works because
+            # output generation of constants is purely dependency-based, and
+            # doesn't need to iterate through the XML tags.
+            for elem in feature.findall('require'):
+                for enum in elem.findall('enum'):
+                    addEnumInfo = False
+                    groupName = enum.get('extends')
+                    if groupName is not None:
+                        # self.gen.logMsg('diag', 'Found extension enum',
+                        #     enum.get('name'))
+                        # Add version number attribute to the <enum> element
+                        enum.set('version', featureInfo.version)
+                        # Look up the GroupInfo with matching groupName
+                        if groupName in self.groupdict:
+                            # self.gen.logMsg('diag', 'Matching group',
+                            #     groupName, 'found, adding element...')
+                            gi = self.groupdict[groupName]
+                            gi.elem.append(enum)
+                            # Remove element from parent <require> tag
+                            # This should be a no-op in lxml.etree
+                            try:
+                                elem.remove(enum)
+                            except ValueError:
+                                # Must be lxml.etree
+                                pass
+                        else:
+                            self.gen.logMsg('warn', 'NO matching group',
+                                            groupName, 'for enum', enum.get('name'), 'found.')
+                        addEnumInfo = True
+                    elif enum.get('value') or enum.get('bitpos') or enum.get('alias'):
+                        # self.gen.logMsg('diag', 'Adding extension constant "enum"',
+                        #     enum.get('name'))
+                        addEnumInfo = True
+                    if addEnumInfo:
+                        enumInfo = EnumInfo(enum)
+                        self.addElementInfo(enum, enumInfo, 'enum', self.enumdict)
+
+        self.extensions = self.reg.findall('extensions/extension')
+        self.extdict = {}
+        for feature in self.extensions:
+            featureInfo = FeatureInfo(feature)
+            self.addElementInfo(feature, featureInfo, 'extension', self.extdict)
+
+            # Add additional enums defined only in <extension> tags
+            # to the corresponding core type.
+            # Algorithm matches that of enums in a "feature" tag as above.
+            #
+            # This code also adds a 'extnumber' attribute containing the
+            # extension number, used for enumerant value calculation.
+            for elem in feature.findall('require'):
+                for enum in elem.findall('enum'):
+                    addEnumInfo = False
+                    groupName = enum.get('extends')
+                    if groupName is not None:
+                        # self.gen.logMsg('diag', 'Found extension enum',
+                        #     enum.get('name'))
+
+                        # Add <extension> block's extension number attribute to
+                        # the <enum> element unless specified explicitly, such
+                        # as when redefining an enum in another extension.
+                        extnumber = enum.get('extnumber')
+                        if not extnumber:
+                            enum.set('extnumber', featureInfo.number)
+
+                        enum.set('extname', featureInfo.name)
+                        enum.set('supported', featureInfo.supported)
+                        # Look up the GroupInfo with matching groupName
+                        if groupName in self.groupdict:
+                            # self.gen.logMsg('diag', 'Matching group',
+                            #     groupName, 'found, adding element...')
+                            gi = self.groupdict[groupName]
+                            gi.elem.append(enum)
+                            # Remove element from parent <require> tag
+                            # This should be a no-op in lxml.etree
+                            try:
+                                elem.remove(enum)
+                            except ValueError:
+                                # Must be lxml.etree
+                                pass
+                        else:
+                            self.gen.logMsg('warn', 'NO matching group',
+                                            groupName, 'for enum', enum.get('name'), 'found.')
+                        addEnumInfo = True
+                    elif enum.get('value') or enum.get('bitpos') or enum.get('alias'):
+                        # self.gen.logMsg('diag', 'Adding extension constant "enum"',
+                        #     enum.get('name'))
+                        addEnumInfo = True
+                    if addEnumInfo:
+                        enumInfo = EnumInfo(enum)
+                        self.addElementInfo(enum, enumInfo, 'enum', self.enumdict)
+
+        # Construct a "validextensionstructs" list for parent structures
+        # based on "structextends" tags in child structures
+        disabled_types = []
+        for disabled_ext in self.reg.findall('extensions/extension[@supported="disabled"]'):
+            for type_elem in disabled_ext.findall("*/type"):
+                disabled_types.append(type_elem.get('name'))
+        for type_elem in self.reg.findall('types/type'):
+            if type_elem.get('name') not in disabled_types:
+                parentStructs = type_elem.get('structextends')
+                if parentStructs is not None:
+                    for parent in parentStructs.split(','):
+                        # self.gen.logMsg('diag', type.get('name'), 'extends', parent)
+                        self.validextensionstructs[parent].append(type_elem.get('name'))
+        # Sort the lists so they don't depend on the XML order
+        for parent in self.validextensionstructs:
+            self.validextensionstructs[parent].sort()
+
+    def dumpReg(self, maxlen=120, filehandle=sys.stdout):
+        """Dump all the dictionaries constructed from the Registry object.
+
+        Diagnostic to dump the dictionaries to specified file handle (default stdout).
+        Truncates type / enum / command elements to maxlen characters (default 120)"""
+        write('***************************************', file=filehandle)
+        write('    ** Dumping Registry contents **',     file=filehandle)
+        write('***************************************', file=filehandle)
+        write('// Types', file=filehandle)
+        for name in self.typedict:
+            tobj = self.typedict[name]
+            write('    Type', name, '->', etree.tostring(tobj.elem)[0:maxlen], file=filehandle)
+        write('// Groups', file=filehandle)
+        for name in self.groupdict:
+            gobj = self.groupdict[name]
+            write('    Group', name, '->', etree.tostring(gobj.elem)[0:maxlen], file=filehandle)
+        write('// Enums', file=filehandle)
+        for name in self.enumdict:
+            eobj = self.enumdict[name]
+            write('    Enum', name, '->', etree.tostring(eobj.elem)[0:maxlen], file=filehandle)
+        write('// Commands', file=filehandle)
+        for name in self.cmddict:
+            cobj = self.cmddict[name]
+            write('    Command', name, '->', etree.tostring(cobj.elem)[0:maxlen], file=filehandle)
+        write('// APIs', file=filehandle)
+        for key in self.apidict:
+            write('    API Version ', key, '->',
+                  etree.tostring(self.apidict[key].elem)[0:maxlen], file=filehandle)
+        write('// Extensions', file=filehandle)
+        for key in self.extdict:
+            write('    Extension', key, '->',
+                  etree.tostring(self.extdict[key].elem)[0:maxlen], file=filehandle)
+
+    def markTypeRequired(self, typename, required):
+        """Require (along with its dependencies) or remove (but not its dependencies) a type.
+
+        - typename - name of type
+        - required - boolean (to tag features as required or not)
+        """
+        self.gen.logMsg('diag', 'tagging type:', typename, '-> required =', required)
+        # Get TypeInfo object for <type> tag corresponding to typename
+        typeinfo = self.lookupElementInfo(typename, self.typedict)
+        if typeinfo is not None:
+            if required:
+                # Tag type dependencies in 'alias' and 'required' attributes as
+                # required. This DOES NOT un-tag dependencies in a <remove>
+                # tag. See comments in markRequired() below for the reason.
+                for attrib_name in ['requires', 'alias']:
+                    depname = typeinfo.elem.get(attrib_name)
+                    if depname:
+                        self.gen.logMsg('diag', 'Generating dependent type',
+                                        depname, 'for', attrib_name, 'type', typename)
+                        # Don't recurse on self-referential structures.
+                        if typename != depname:
+                            self.markTypeRequired(depname, required)
+                        else:
+                            self.gen.logMsg('diag', 'type', typename, 'is self-referential')
+                # Tag types used in defining this type (e.g. in nested
+                # <type> tags)
+                # Look for <type> in entire <command> tree,
+                # not just immediate children
+                for subtype in typeinfo.elem.findall('.//type'):
+                    self.gen.logMsg('diag', 'markRequired: type requires dependent <type>', subtype.text)
+                    if typename != subtype.text:
+                        self.markTypeRequired(subtype.text, required)
+                    else:
+                        self.gen.logMsg('diag', 'type', typename, 'is self-referential')
+                # Tag enums used in defining this type, for example in
+                #   <member><name>member</name>[<enum>MEMBER_SIZE</enum>]</member>
+                for subenum in typeinfo.elem.findall('.//enum'):
+                    self.gen.logMsg('diag', 'markRequired: type requires dependent <enum>', subenum.text)
+                    self.markEnumRequired(subenum.text, required)
+                # Tag type dependency in 'bitvalues' attributes as
+                # required. This ensures that the bit values for a flag
+                # are emitted
+                depType = typeinfo.elem.get('bitvalues')
+                if depType:
+                    self.gen.logMsg('diag', 'Generating bitflag type',
+                                    depType, 'for type', typename)
+                    self.markTypeRequired(depType, required)
+                    group = self.lookupElementInfo(depType, self.groupdict)
+                    if group is not None:
+                        group.flagType = typeinfo
+
+            typeinfo.required = required
+        elif '.h' not in typename:
+            self.gen.logMsg('warn', 'type:', typename, 'IS NOT DEFINED')
+
+    def markEnumRequired(self, enumname, required):
+        """Mark an enum as required or not.
+
+        - enumname - name of enum
+        - required - boolean (to tag features as required or not)"""
+        self.gen.logMsg('diag', 'tagging enum:', enumname, '-> required =', required)
+        enum = self.lookupElementInfo(enumname, self.enumdict)
+        if enum is not None:
+            enum.required = required
+            # Tag enum dependencies in 'alias' attribute as required
+            depname = enum.elem.get('alias')
+            if depname:
+                self.gen.logMsg('diag', 'Generating dependent enum',
+                                depname, 'for alias', enumname, 'required =', enum.required)
+                self.markEnumRequired(depname, required)
+        else:
+            self.gen.logMsg('warn', 'enum:', enumname, 'IS NOT DEFINED')
+
+    def markCmdRequired(self, cmdname, required):
+        """Mark a command as required or not.
+
+        - cmdname - name of command
+        - required - boolean (to tag features as required or not)"""
+        self.gen.logMsg('diag', 'tagging command:', cmdname, '-> required =', required)
+        cmd = self.lookupElementInfo(cmdname, self.cmddict)
+        if cmd is not None:
+            cmd.required = required
+            # Tag command dependencies in 'alias' attribute as required
+            depname = cmd.elem.get('alias')
+            if depname:
+                self.gen.logMsg('diag', 'Generating dependent command',
+                                depname, 'for alias', cmdname)
+                self.markCmdRequired(depname, required)
+            # Tag all parameter types of this command as required.
+            # This DOES NOT remove types of commands in a <remove>
+            # tag, because many other commands may use the same type.
+            # We could be more clever and reference count types,
+            # instead of using a boolean.
+            if required:
+                # Look for <type> in entire <command> tree,
+                # not just immediate children
+                for type_elem in cmd.elem.findall('.//type'):
+                    self.gen.logMsg('diag', 'markRequired: command implicitly requires dependent type', type_elem.text)
+                    self.markTypeRequired(type_elem.text, required)
+        else:
+            self.gen.logMsg('warn', 'command:', cmdname, 'IS NOT DEFINED')
+
+    def markRequired(self, featurename, feature, required):
+        """Require or remove features specified in the Element.
+
+        - featurename - name of the feature
+        - feature - Element for `<require>` or `<remove>` tag
+        - required - boolean (to tag features as required or not)"""
+        self.gen.logMsg('diag', 'markRequired (feature = <too long to print>, required =', required, ')')
+
+        # Loop over types, enums, and commands in the tag
+        # @@ It would be possible to respect 'api' and 'profile' attributes
+        #  in individual features, but that's not done yet.
+        for typeElem in feature.findall('type'):
+            self.markTypeRequired(typeElem.get('name'), required)
+        for enumElem in feature.findall('enum'):
+            self.markEnumRequired(enumElem.get('name'), required)
+        for cmdElem in feature.findall('command'):
+            self.markCmdRequired(cmdElem.get('name'), required)
+
+        # Extensions may need to extend existing commands or other items in the future.
+        # So, look for extend tags.
+        for extendElem in feature.findall('extend'):
+            extendType = extendElem.get('type')
+            if extendType == 'command':
+                commandName = extendElem.get('name')
+                successExtends = extendElem.get('successcodes')
+                if successExtends is not None:
+                    for success in successExtends.split(','):
+                        self.commandextensionsuccesses.append(self.commandextensiontuple(command=commandName,
+                                                                                         value=success,
+                                                                                         extension=featurename))
+                errorExtends = extendElem.get('errorcodes')
+                if errorExtends is not None:
+                    for error in errorExtends.split(','):
+                        self.commandextensionerrors.append(self.commandextensiontuple(command=commandName,
+                                                                                      value=error,
+                                                                                      extension=featurename))
+            else:
+                self.gen.logMsg('warn', 'extend type:', extendType, 'IS NOT SUPPORTED')
+
+    def requireAndRemoveFeatures(self, interface, featurename, api, profile):
+        """Process `<require>` and `<remove>` tags for a `<version>` or `<extension>`.
+
+        - interface - Element for `<version>` or `<extension>`, containing
+          `<require>` and `<remove>` tags
+        - featurename - name of the feature
+        - api - string specifying API name being generated
+        - profile - string specifying API profile being generated"""
+        # <require> marks things that are required by this version/profile
+        for feature in interface.findall('require'):
+            if matchAPIProfile(api, profile, feature):
+                self.markRequired(featurename, feature, True)
+        # <remove> marks things that are removed by this version/profile
+        for feature in interface.findall('remove'):
+            if matchAPIProfile(api, profile, feature):
+                self.markRequired(featurename, feature, False)
+
+    def assignAdditionalValidity(self, interface, api, profile):
+        # Loop over all usage inside all <require> tags.
+        for feature in interface.findall('require'):
+            if matchAPIProfile(api, profile, feature):
+                for v in feature.findall('usage'):
+                    if v.get('command'):
+                        self.cmddict[v.get('command')].additionalValidity.append(copy.deepcopy(v))
+                    if v.get('struct'):
+                        self.typedict[v.get('struct')].additionalValidity.append(copy.deepcopy(v))
+
+        # Loop over all usage inside all <remove> tags.
+        for feature in interface.findall('remove'):
+            if matchAPIProfile(api, profile, feature):
+                for v in feature.findall('usage'):
+                    if v.get('command'):
+                        self.cmddict[v.get('command')].removedValidity.append(copy.deepcopy(v))
+                    if v.get('struct'):
+                        self.typedict[v.get('struct')].removedValidity.append(copy.deepcopy(v))
+
+    def generateFeature(self, fname, ftype, dictionary):
+        """Generate a single type / enum group / enum / command,
+        and all its dependencies as needed.
+
+        - fname - name of feature (`<type>`/`<enum>`/`<command>`)
+        - ftype - type of feature, 'type' | 'enum' | 'command'
+        - dictionary - of *Info objects - self.{type|enum|cmd}dict"""
+        # @ # Break to debugger on matching name pattern
+        # @ if self.breakPat and re.match(self.breakPat, fname):
+        # @    pdb.set_trace()
+
+        self.gen.logMsg('diag', 'generateFeature: generating', ftype, fname)
+        f = self.lookupElementInfo(fname, dictionary)
+        if f is None:
+            # No such feature. This is an error, but reported earlier
+            self.gen.logMsg('diag', 'No entry found for feature', fname,
+                            'returning!')
+            return
+
+        # If feature isn't required, or has already been declared, return
+        if not f.required:
+            self.gen.logMsg('diag', 'Skipping', ftype, fname, '(not required)')
+            return
+        if f.declared:
+            self.gen.logMsg('diag', 'Skipping', ftype, fname, '(already declared)')
+            return
+        # Always mark feature declared, as though actually emitted
+        f.declared = True
+
+        # Determine if this is an alias, and of what, if so
+        alias = f.elem.get('alias')
+        if alias:
+            self.gen.logMsg('diag', fname, 'is an alias of', alias)
+
+        # Pull in dependent declaration(s) of the feature.
+        # For types, there may be one type in the 'requires' attribute of
+        #   the element, one in the 'alias' attribute, and many in
+        #   embedded <type> and <enum> tags within the element.
+        # For commands, there may be many in <type> tags within the element.
+        # For enums, no dependencies are allowed (though perhaps if you
+        #   have a uint64 enum, it should require that type).
+        genProc = None
+        followupFeature = None
+        if ftype == 'type':
+            genProc = self.gen.genType
+
+            # Generate type dependencies in 'alias' and 'requires' attributes
+            if alias:
+                self.generateFeature(alias, 'type', self.typedict)
+            requires = f.elem.get('requires')
+            if requires:
+                self.gen.logMsg('diag', 'Generating required dependent type',
+                                requires)
+                self.generateFeature(requires, 'type', self.typedict)
+
+            # Generate types used in defining this type (e.g. in nested
+            # <type> tags)
+            # Look for <type> in entire <command> tree,
+            # not just immediate children
+            for subtype in f.elem.findall('.//type'):
+                self.gen.logMsg('diag', 'Generating required dependent <type>',
+                                subtype.text)
+                self.generateFeature(subtype.text, 'type', self.typedict)
+
+            # Generate enums used in defining this type, for example in
+            #   <member><name>member</name>[<enum>MEMBER_SIZE</enum>]</member>
+            for subtype in f.elem.findall('.//enum'):
+                self.gen.logMsg('diag', 'Generating required dependent <enum>',
+                                subtype.text)
+                self.generateFeature(subtype.text, 'enum', self.enumdict)
+
+            # If the type is an enum group, look up the corresponding
+            # group in the group dictionary and generate that instead.
+            if f.elem.get('category') == 'enum':
+                self.gen.logMsg('diag', 'Type', fname, 'is an enum group, so generate that instead')
+                group = self.lookupElementInfo(fname, self.groupdict)
+                if alias is not None:
+                    # An alias of another group name.
+                    # Pass to genGroup with 'alias' parameter = aliased name
+                    self.gen.logMsg('diag', 'Generating alias', fname,
+                                    'for enumerated type', alias)
+                    # Now, pass the *aliased* GroupInfo to the genGroup, but
+                    # with an additional parameter which is the alias name.
+                    genProc = self.gen.genGroup
+                    f = self.lookupElementInfo(alias, self.groupdict)
+                elif group is None:
+                    self.gen.logMsg('warn', 'Skipping enum type', fname,
+                                    ': No matching enumerant group')
+                    return
+                else:
+                    genProc = self.gen.genGroup
+                    f = group
+
+                    # @ The enum group is not ready for generation. At this
+                    # @   point, it contains all <enum> tags injected by
+                    # @   <extension> tags without any verification of whether
+                    # @   they're required or not. It may also contain
+                    # @   duplicates injected by multiple consistent
+                    # @   definitions of an <enum>.
+
+                    # @ Pass over each enum, marking its enumdict[] entry as
+                    # @ required or not. Mark aliases of enums as required,
+                    # @ too.
+
+                    enums = group.elem.findall('enum')
+
+                    self.gen.logMsg('diag', 'generateFeature: checking enums for group', fname)
+
+                    # Check for required enums, including aliases
+                    # LATER - Check for, report, and remove duplicates?
+                    enumAliases = []
+                    for elem in enums:
+                        name = elem.get('name')
+
+                        required = False
+
+                        extname = elem.get('extname')
+                        version = elem.get('version')
+                        if extname is not None:
+                            # 'supported' attribute was injected when the <enum> element was
+                            # moved into the <enums> group in Registry.parseTree()
+                            if self.genOpts.defaultExtensions == elem.get('supported'):
+                                required = True
+                            elif re.match(self.genOpts.addExtensions, extname) is not None:
+                                required = True
+                        elif version is not None:
+                            required = re.match(self.genOpts.emitversions, version) is not None
+                        else:
+                            required = True
+
+                        self.gen.logMsg('diag', '* required =', required, 'for', name)
+                        if required:
+                            # Mark this element as required (in the element, not the EnumInfo)
+                            elem.set('required', 'true')
+                            # If it's an alias, track that for later use
+                            enumAlias = elem.get('alias')
+                            if enumAlias:
+                                enumAliases.append(enumAlias)
+                    for elem in enums:
+                        name = elem.get('name')
+                        if name in enumAliases:
+                            elem.set('required', 'true')
+                            self.gen.logMsg('diag', '* also need to require alias', name)
+            if f.elem.get('category') == 'bitmask':
+                followupFeature = f.elem.get('bitvalues')
+        elif ftype == 'command':
+            # Generate command dependencies in 'alias' attribute
+            if alias:
+                self.generateFeature(alias, 'command', self.cmddict)
+
+            genProc = self.gen.genCmd
+            for type_elem in f.elem.findall('.//type'):
+                depname = type_elem.text
+                self.gen.logMsg('diag', 'Generating required parameter type',
+                                depname)
+                self.generateFeature(depname, 'type', self.typedict)
+        elif ftype == 'enum':
+            # Generate enum dependencies in 'alias' attribute
+            if alias:
+                self.generateFeature(alias, 'enum', self.enumdict)
+            genProc = self.gen.genEnum
+
+        # Actually generate the type only if emitting declarations
+        if self.emitFeatures:
+            self.gen.logMsg('diag', 'Emitting', ftype, 'decl for', fname)
+            genProc(f, fname, alias)
+        else:
+            self.gen.logMsg('diag', 'Skipping', ftype, fname,
+                            '(should not be emitted)')
+
+        if followupFeature:
+            self.gen.logMsg('diag', 'Generating required bitvalues <enum>',
+                            followupFeature)
+            self.generateFeature(followupFeature, "type", self.typedict)
+
+    def generateRequiredInterface(self, interface):
+        """Generate all interfaces required by an API version or extension.
+
+        - interface - Element for `<version>` or `<extension>`"""
+
+        # Loop over all features inside all <require> tags.
+        for features in interface.findall('require'):
+            for t in features.findall('type'):
+                self.generateFeature(t.get('name'), 'type', self.typedict)
+            for e in features.findall('enum'):
+                self.generateFeature(e.get('name'), 'enum', self.enumdict)
+            for c in features.findall('command'):
+                self.generateFeature(c.get('name'), 'command', self.cmddict)
+
+    def apiGen(self, genOpts):
+        """Generate interface for specified versions
+
+        - genOpts - GeneratorOptions object with parameters used
+          by the Generator object."""
+        self.gen.logMsg('diag', '*******************************************')
+        self.gen.logMsg('diag', '  Registry.apiGen file:', genOpts.filename,
+                        'api:', genOpts.apiname,
+                        'profile:', genOpts.profile)
+        self.gen.logMsg('diag', '*******************************************')
+
+        self.genOpts = genOpts
+        # Reset required/declared flags for all features
+        self.apiReset()
+
+        # Compile regexps used to select versions & extensions
+        regVersions = re.compile(self.genOpts.versions)
+        regEmitVersions = re.compile(self.genOpts.emitversions)
+        regAddExtensions = re.compile(self.genOpts.addExtensions)
+        regRemoveExtensions = re.compile(self.genOpts.removeExtensions)
+        regEmitExtensions = re.compile(self.genOpts.emitExtensions)
+
+        # Get all matching API feature names & add to list of FeatureInfo
+        # Note we used to select on feature version attributes, not names.
+        features = []
+        apiMatch = False
+        for key in self.apidict:
+            fi = self.apidict[key]
+            api = fi.elem.get('api')
+            if api == self.genOpts.apiname:
+                apiMatch = True
+                if regVersions.match(fi.name):
+                    # Matches API & version #s being generated. Mark for
+                    # emission and add to the features[] list .
+                    # @@ Could use 'declared' instead of 'emit'?
+                    fi.emit = (regEmitVersions.match(fi.name) is not None)
+                    features.append(fi)
+                    if not fi.emit:
+                        self.gen.logMsg('diag', 'NOT tagging feature api =', api,
+                                        'name =', fi.name, 'version =', fi.version,
+                                        'for emission (does not match emitversions pattern)')
+                    else:
+                        self.gen.logMsg('diag', 'Including feature api =', api,
+                                        'name =', fi.name, 'version =', fi.version,
+                                        'for emission (matches emitversions pattern)')
+                else:
+                    self.gen.logMsg('diag', 'NOT including feature api =', api,
+                                    'name =', fi.name, 'version =', fi.version,
+                                    '(does not match requested versions)')
+            else:
+                self.gen.logMsg('diag', 'NOT including feature api =', api,
+                                'name =', fi.name,
+                                '(does not match requested API)')
+        if not apiMatch:
+            self.gen.logMsg('warn', 'No matching API versions found!')
+
+        # Get all matching extensions, in order by their extension number,
+        # and add to the list of features.
+        # Start with extensions tagged with 'api' pattern matching the API
+        # being generated. Add extensions matching the pattern specified in
+        # regExtensions, then remove extensions matching the pattern
+        # specified in regRemoveExtensions
+        for (extName, ei) in sorted(self.extdict.items(), key=lambda x: x[1].number if x[1].number is not None else '0'):
+            extName = ei.name
+            include = False
+
+            # Include extension if defaultExtensions is not None and if the
+            # 'supported' attribute matches defaultExtensions. The regexp in
+            # 'supported' must exactly match defaultExtensions, so bracket
+            # it with ^(pat)$.
+            pat = '^(' + ei.elem.get('supported') + ')$'
+            if (self.genOpts.defaultExtensions
+                    and re.match(pat, self.genOpts.defaultExtensions)):
+                self.gen.logMsg('diag', 'Including extension',
+                                extName, "(defaultExtensions matches the 'supported' attribute)")
+                include = True
+
+            # Include additional extensions if the extension name matches
+            # the regexp specified in the generator options. This allows
+            # forcing extensions into an interface even if they're not
+            # tagged appropriately in the registry.
+            if regAddExtensions.match(extName) is not None:
+                self.gen.logMsg('diag', 'Including extension',
+                                extName, '(matches explicitly requested extensions to add)')
+                include = True
+            # Remove extensions if the name matches the regexp specified
+            # in generator options. This allows forcing removal of
+            # extensions from an interface even if they're tagged that
+            # way in the registry.
+            if regRemoveExtensions.match(extName) is not None:
+                self.gen.logMsg('diag', 'Removing extension',
+                                extName, '(matches explicitly requested extensions to remove)')
+                include = False
+
+            # If the extension is to be included, add it to the
+            # extension features list.
+            if include:
+                ei.emit = (regEmitExtensions.match(extName) is not None)
+                features.append(ei)
+                if not ei.emit:
+                    self.gen.logMsg('diag', 'NOT tagging extension',
+                                    extName,
+                                    'for emission (does not match emitextensions pattern)')
+
+                # Hack - can be removed when validity generator goes away
+                # (Jon) I'm not sure what this does, or if it should respect
+                # the ei.emit flag above.
+                self.requiredextensions.append(extName)
+            else:
+                self.gen.logMsg('diag', 'NOT including extension',
+                                extName, '(does not match api attribute or explicitly requested extensions)')
+
+        # Sort the features list, if a sort procedure is defined
+        if self.genOpts.sortProcedure:
+            self.genOpts.sortProcedure(features)
+
+        # Pass 1: loop over requested API versions and extensions tagging
+        #   types/commands/features as required (in an <require> block) or no
+        #   longer required (in an <remove> block). It is possible to remove
+        #   a feature in one version and restore it later by requiring it in
+        #   a later version.
+        # If a profile other than 'None' is being generated, it must
+        #   match the profile attribute (if any) of the <require> and
+        #   <remove> tags.
+        self.gen.logMsg('diag', 'PASS 1: TAG FEATURES')
+        for f in features:
+            self.gen.logMsg('diag', 'PASS 1: Tagging required and removed features for',
+                            f.name)
+            self.requireAndRemoveFeatures(f.elem, f.name, self.genOpts.apiname, self.genOpts.profile)
+            self.assignAdditionalValidity(f.elem, self.genOpts.apiname, self.genOpts.profile)
+
+        # Pass 2: loop over specified API versions and extensions printing
+        #   declarations for required things which haven't already been
+        #   generated.
+        self.gen.logMsg('diag', 'PASS 2: GENERATE INTERFACES FOR FEATURES')
+        self.gen.beginFile(self.genOpts)
+        for f in features:
+            self.gen.logMsg('diag', 'PASS 2: Generating interface for',
+                            f.name)
+            emit = self.emitFeatures = f.emit
+            if not emit:
+                self.gen.logMsg('diag', 'PASS 2: NOT declaring feature',
+                                f.elem.get('name'), 'because it is not tagged for emission')
+            # Generate the interface (or just tag its elements as having been
+            # emitted, if they haven't been).
+            self.gen.beginFeature(f.elem, emit)
+            self.generateRequiredInterface(f.elem)
+            self.gen.endFeature()
+        self.gen.endFile()
+
+    def apiReset(self):
+        """Reset type/enum/command dictionaries before generating another API.
+
+        Use between apiGen() calls to reset internal state."""
+        for datatype in self.typedict:
+            self.typedict[datatype].resetState()
+        for enum in self.enumdict:
+            self.enumdict[enum].resetState()
+        for cmd in self.cmddict:
+            self.cmddict[cmd].resetState()
+        for cmd in self.apidict:
+            self.apidict[cmd].resetState()
+
+    def validateGroups(self):
+        """Validate `group=` attributes on `<param>` and `<proto>` tags.
+
+        Check that `group=` attributes match actual groups"""
+        # Keep track of group names not in <group> tags
+        badGroup = {}
+        self.gen.logMsg('diag', 'VALIDATING GROUP ATTRIBUTES')
+        for cmd in self.reg.findall('commands/command'):
+            proto = cmd.find('proto')
+            # funcname = cmd.find('proto/name').text
+            group = proto.get('group')
+            if group is not None and group not in self.groupdict:
+                # self.gen.logMsg('diag', '*** Command ', funcname, ' has UNKNOWN return group ', group)
+                if group not in badGroup:
+                    badGroup[group] = 1
+                else:
+                    badGroup[group] = badGroup[group] + 1
+
+            for param in cmd.findall('param'):
+                pname = param.find('name')
+                if pname is not None:
+                    pname = pname.text
+                else:
+                    pname = param.get('name')
+                group = param.get('group')
+                if group is not None and group not in self.groupdict:
+                    # self.gen.logMsg('diag', '*** Command ', funcname, ' param ', pname, ' has UNKNOWN group ', group)
+                    if group not in badGroup:
+                        badGroup[group] = 1
+                    else:
+                        badGroup[group] = badGroup[group] + 1
+
+        if badGroup:
+            self.gen.logMsg('diag', 'SUMMARY OF UNRECOGNIZED GROUPS')
+            for key in sorted(badGroup.keys()):
+                self.gen.logMsg('diag', '    ', key, ' occurred ', badGroup[key], ' times')
diff --git a/src/third_party/vulkan-headers/src/registry/spec_tools/util.py b/src/third_party/vulkan-headers/src/registry/spec_tools/util.py
new file mode 100644
index 0000000..2463290
--- /dev/null
+++ b/src/third_party/vulkan-headers/src/registry/spec_tools/util.py
@@ -0,0 +1,68 @@
+"""Utility functions not closely tied to other spec_tools types."""
+# Copyright (c) 2018-2019 Collabora, Ltd.
+# Copyright (c) 2013-2019 The Khronos Group Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+def getElemName(elem, default=None):
+    """Get the name associated with an element, either a name child or name attribute."""
+    name_elem = elem.find('name')
+    if name_elem is not None:
+        return name_elem.text
+    # Fallback if there is no child.
+    return elem.get('name', default)
+
+
+def getElemType(elem, default=None):
+    """Get the type associated with an element, either a type child or type attribute."""
+    type_elem = elem.find('type')
+    if type_elem is not None:
+        return type_elem.text
+    # Fallback if there is no child.
+    return elem.get('type', default)
+
+
+def findFirstWithPredicate(collection, pred):
+    """Return the first element that satisfies the predicate, or None if none exist.
+
+    NOTE: Some places where this is used might be better served by changing to a dictionary.
+    """
+    for elt in collection:
+        if pred(elt):
+            return elt
+    return None
+
+
+def findNamedElem(elems, name):
+    """Traverse a collection of elements with 'name' nodes or attributes, looking for and returning one with the right name.
+
+    NOTE: Many places where this is used might be better served by changing to a dictionary.
+    """
+    return findFirstWithPredicate(elems, lambda elem: getElemName(elem) == name)
+
+
+def findTypedElem(elems, typename):
+    """Traverse a collection of elements with 'type' nodes or attributes, looking for and returning one with the right typename.
+
+    NOTE: Many places where this is used might be better served by changing to a dictionary.
+    """
+    return findFirstWithPredicate(elems, lambda elem: getElemType(elem) == typename)
+
+
+def findNamedObject(collection, name):
+    """Traverse a collection of elements with 'name' attributes, looking for and returning one with the right name.
+
+    NOTE: Many places where this is used might be better served by changing to a dictionary.
+    """
+    return findFirstWithPredicate(collection, lambda elt: elt.name == name)
diff --git a/src/third_party/vulkan-headers/src/registry/validusage.json b/src/third_party/vulkan-headers/src/registry/validusage.json
new file mode 100644
index 0000000..3e4f118
--- /dev/null
+++ b/src/third_party/vulkan-headers/src/registry/validusage.json
@@ -0,0 +1,26246 @@
+{
+  "version info": {
+    "schema version": 2,
+    "api version": "1.1.130",
+    "comment": "from git branch: github-master commit: 86113f72290ca5998fcae798ee180bf587eca2a0",
+    "date": "2019-12-07 14:23:36Z"
+  },
+  "validation": {
+    "vkGetInstanceProcAddr": {
+      "core": [
+        {
+          "vuid": "VUID-vkGetInstanceProcAddr-instance-parameter",
+          "text": " If <code>instance</code> is not <code>NULL</code>, <code>instance</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkInstance\">VkInstance</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetInstanceProcAddr-pName-parameter",
+          "text": " <code>pName</code> <strong class=\"purple\">must</strong> be a null-terminated UTF-8 string"
+        }
+      ]
+    },
+    "vkGetDeviceProcAddr": {
+      "core": [
+        {
+          "vuid": "VUID-vkGetDeviceProcAddr-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetDeviceProcAddr-pName-parameter",
+          "text": " <code>pName</code> <strong class=\"purple\">must</strong> be a null-terminated UTF-8 string"
+        }
+      ]
+    },
+    "vkEnumerateInstanceVersion": {
+      "(VK_VERSION_1_1)": [
+        {
+          "vuid": "VUID-vkEnumerateInstanceVersion-pApiVersion-parameter",
+          "text": " <code>pApiVersion</code> <strong class=\"purple\">must</strong> be a valid pointer to a <code>uint32_t</code> value"
+        }
+      ]
+    },
+    "vkCreateInstance": {
+      "core": [
+        {
+          "vuid": "VUID-vkCreateInstance-ppEnabledExtensionNames-01388",
+          "text": " All <a href=\"#extendingvulkan-extensions-extensiondependencies\">required extensions</a> for each extension in the <a href=\"#VkInstanceCreateInfo\">VkInstanceCreateInfo</a>::<code>ppEnabledExtensionNames</code> list <strong class=\"purple\">must</strong> also be present in that list."
+        },
+        {
+          "vuid": "VUID-vkCreateInstance-pCreateInfo-parameter",
+          "text": " <code>pCreateInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkInstanceCreateInfo\">VkInstanceCreateInfo</a> structure"
+        },
+        {
+          "vuid": "VUID-vkCreateInstance-pAllocator-parameter",
+          "text": " If <code>pAllocator</code> is not <code>NULL</code>, <code>pAllocator</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkAllocationCallbacks\">VkAllocationCallbacks</a> structure"
+        },
+        {
+          "vuid": "VUID-vkCreateInstance-pInstance-parameter",
+          "text": " <code>pInstance</code> <strong class=\"purple\">must</strong> be a valid pointer to a <a href=\"#VkInstance\">VkInstance</a> handle"
+        }
+      ]
+    },
+    "VkInstanceCreateInfo": {
+      "core": [
+        {
+          "vuid": "VUID-VkInstanceCreateInfo-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO</code>"
+        },
+        {
+          "vuid": "VUID-VkInstanceCreateInfo-pNext-pNext",
+          "text": " Each <code>pNext</code> member of any structure (including this one) in the <code>pNext</code> chain <strong class=\"purple\">must</strong> be either <code>NULL</code> or a pointer to a valid instance of <a href=\"#VkDebugReportCallbackCreateInfoEXT\">VkDebugReportCallbackCreateInfoEXT</a>, <a href=\"#VkDebugUtilsMessengerCreateInfoEXT\">VkDebugUtilsMessengerCreateInfoEXT</a>, <a href=\"#VkValidationFeaturesEXT\">VkValidationFeaturesEXT</a>, or <a href=\"#VkValidationFlagsEXT\">VkValidationFlagsEXT</a>"
+        },
+        {
+          "vuid": "VUID-VkInstanceCreateInfo-sType-unique",
+          "text": " Each <code>sType</code> member in the <code>pNext</code> chain <strong class=\"purple\">must</strong> be unique"
+        },
+        {
+          "vuid": "VUID-VkInstanceCreateInfo-flags-zerobitmask",
+          "text": " <code>flags</code> <strong class=\"purple\">must</strong> be <code>0</code>"
+        },
+        {
+          "vuid": "VUID-VkInstanceCreateInfo-pApplicationInfo-parameter",
+          "text": " If <code>pApplicationInfo</code> is not <code>NULL</code>, <code>pApplicationInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkApplicationInfo\">VkApplicationInfo</a> structure"
+        },
+        {
+          "vuid": "VUID-VkInstanceCreateInfo-ppEnabledLayerNames-parameter",
+          "text": " If <code>enabledLayerCount</code> is not <code>0</code>, <code>ppEnabledLayerNames</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>enabledLayerCount</code> null-terminated UTF-8 strings"
+        },
+        {
+          "vuid": "VUID-VkInstanceCreateInfo-ppEnabledExtensionNames-parameter",
+          "text": " If <code>enabledExtensionCount</code> is not <code>0</code>, <code>ppEnabledExtensionNames</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>enabledExtensionCount</code> null-terminated UTF-8 strings"
+        }
+      ]
+    },
+    "VkValidationFlagsEXT": {
+      "(VK_EXT_validation_flags)": [
+        {
+          "vuid": "VUID-VkValidationFlagsEXT-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_VALIDATION_FLAGS_EXT</code>"
+        },
+        {
+          "vuid": "VUID-VkValidationFlagsEXT-pDisabledValidationChecks-parameter",
+          "text": " <code>pDisabledValidationChecks</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>disabledValidationCheckCount</code> valid <a href=\"#VkValidationCheckEXT\">VkValidationCheckEXT</a> values"
+        },
+        {
+          "vuid": "VUID-VkValidationFlagsEXT-disabledValidationCheckCount-arraylength",
+          "text": " <code>disabledValidationCheckCount</code> <strong class=\"purple\">must</strong> be greater than <code>0</code>"
+        }
+      ]
+    },
+    "VkValidationFeaturesEXT": {
+      "(VK_EXT_validation_features)": [
+        {
+          "vuid": "VUID-VkValidationFeaturesEXT-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_VALIDATION_FEATURES_EXT</code>"
+        },
+        {
+          "vuid": "VUID-VkValidationFeaturesEXT-pEnabledValidationFeatures-parameter",
+          "text": " If <code>enabledValidationFeatureCount</code> is not <code>0</code>, <code>pEnabledValidationFeatures</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>enabledValidationFeatureCount</code> valid <a href=\"#VkValidationFeatureEnableEXT\">VkValidationFeatureEnableEXT</a> values"
+        },
+        {
+          "vuid": "VUID-VkValidationFeaturesEXT-pDisabledValidationFeatures-parameter",
+          "text": " If <code>disabledValidationFeatureCount</code> is not <code>0</code>, <code>pDisabledValidationFeatures</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>disabledValidationFeatureCount</code> valid <a href=\"#VkValidationFeatureDisableEXT\">VkValidationFeatureDisableEXT</a> values"
+        }
+      ]
+    },
+    "VkApplicationInfo": {
+      "core": [
+        {
+          "vuid": "VUID-VkApplicationInfo-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_APPLICATION_INFO</code>"
+        },
+        {
+          "vuid": "VUID-VkApplicationInfo-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-VkApplicationInfo-pApplicationName-parameter",
+          "text": " If <code>pApplicationName</code> is not <code>NULL</code>, <code>pApplicationName</code> <strong class=\"purple\">must</strong> be a null-terminated UTF-8 string"
+        },
+        {
+          "vuid": "VUID-VkApplicationInfo-pEngineName-parameter",
+          "text": " If <code>pEngineName</code> is not <code>NULL</code>, <code>pEngineName</code> <strong class=\"purple\">must</strong> be a null-terminated UTF-8 string"
+        }
+      ]
+    },
+    "vkDestroyInstance": {
+      "core": [
+        {
+          "vuid": "VUID-vkDestroyInstance-instance-00629",
+          "text": " All child objects created using <code>instance</code> <strong class=\"purple\">must</strong> have been destroyed prior to destroying <code>instance</code>"
+        },
+        {
+          "vuid": "VUID-vkDestroyInstance-instance-00630",
+          "text": " If <code>VkAllocationCallbacks</code> were provided when <code>instance</code> was created, a compatible set of callbacks <strong class=\"purple\">must</strong> be provided here"
+        },
+        {
+          "vuid": "VUID-vkDestroyInstance-instance-00631",
+          "text": " If no <code>VkAllocationCallbacks</code> were provided when <code>instance</code> was created, <code>pAllocator</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-vkDestroyInstance-instance-parameter",
+          "text": " If <code>instance</code> is not <code>NULL</code>, <code>instance</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkInstance\">VkInstance</a> handle"
+        },
+        {
+          "vuid": "VUID-vkDestroyInstance-pAllocator-parameter",
+          "text": " If <code>pAllocator</code> is not <code>NULL</code>, <code>pAllocator</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkAllocationCallbacks\">VkAllocationCallbacks</a> structure"
+        }
+      ]
+    },
+    "vkEnumeratePhysicalDevices": {
+      "core": [
+        {
+          "vuid": "VUID-vkEnumeratePhysicalDevices-instance-parameter",
+          "text": " <code>instance</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkInstance\">VkInstance</a> handle"
+        },
+        {
+          "vuid": "VUID-vkEnumeratePhysicalDevices-pPhysicalDeviceCount-parameter",
+          "text": " <code>pPhysicalDeviceCount</code> <strong class=\"purple\">must</strong> be a valid pointer to a <code>uint32_t</code> value"
+        },
+        {
+          "vuid": "VUID-vkEnumeratePhysicalDevices-pPhysicalDevices-parameter",
+          "text": " If the value referenced by <code>pPhysicalDeviceCount</code> is not <code>0</code>, and <code>pPhysicalDevices</code> is not <code>NULL</code>, <code>pPhysicalDevices</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>pPhysicalDeviceCount</code> <a href=\"#VkPhysicalDevice\">VkPhysicalDevice</a> handles"
+        }
+      ]
+    },
+    "vkGetPhysicalDeviceProperties": {
+      "core": [
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceProperties-physicalDevice-parameter",
+          "text": " <code>physicalDevice</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkPhysicalDevice\">VkPhysicalDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceProperties-pProperties-parameter",
+          "text": " <code>pProperties</code> <strong class=\"purple\">must</strong> be a valid pointer to a <a href=\"#VkPhysicalDeviceProperties\">VkPhysicalDeviceProperties</a> structure"
+        }
+      ]
+    },
+    "vkGetPhysicalDeviceProperties2": {
+      "(VK_VERSION_1_1,VK_KHR_get_physical_device_properties2)": [
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceProperties2-physicalDevice-parameter",
+          "text": " <code>physicalDevice</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkPhysicalDevice\">VkPhysicalDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceProperties2-pProperties-parameter",
+          "text": " <code>pProperties</code> <strong class=\"purple\">must</strong> be a valid pointer to a <a href=\"#VkPhysicalDeviceProperties2\">VkPhysicalDeviceProperties2</a> structure"
+        }
+      ]
+    },
+    "VkPhysicalDeviceProperties2": {
+      "(VK_VERSION_1_1,VK_KHR_get_physical_device_properties2)": [
+        {
+          "vuid": "VUID-VkPhysicalDeviceProperties2-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2</code>"
+        },
+        {
+          "vuid": "VUID-VkPhysicalDeviceProperties2-pNext-pNext",
+          "text": " Each <code>pNext</code> member of any structure (including this one) in the <code>pNext</code> chain <strong class=\"purple\">must</strong> be either <code>NULL</code> or a pointer to a valid instance of <a href=\"#VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT\">VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT</a>, <a href=\"#VkPhysicalDeviceConservativeRasterizationPropertiesEXT\">VkPhysicalDeviceConservativeRasterizationPropertiesEXT</a>, <a href=\"#VkPhysicalDeviceCooperativeMatrixPropertiesNV\">VkPhysicalDeviceCooperativeMatrixPropertiesNV</a>, <a href=\"#VkPhysicalDeviceDepthStencilResolvePropertiesKHR\">VkPhysicalDeviceDepthStencilResolvePropertiesKHR</a>, <a href=\"#VkPhysicalDeviceDescriptorIndexingPropertiesEXT\">VkPhysicalDeviceDescriptorIndexingPropertiesEXT</a>, <a href=\"#VkPhysicalDeviceDiscardRectanglePropertiesEXT\">VkPhysicalDeviceDiscardRectanglePropertiesEXT</a>, <a href=\"#VkPhysicalDeviceDriverPropertiesKHR\">VkPhysicalDeviceDriverPropertiesKHR</a>, <a href=\"#VkPhysicalDeviceExternalMemoryHostPropertiesEXT\">VkPhysicalDeviceExternalMemoryHostPropertiesEXT</a>, <a href=\"#VkPhysicalDeviceFloatControlsPropertiesKHR\">VkPhysicalDeviceFloatControlsPropertiesKHR</a>, <a href=\"#VkPhysicalDeviceFragmentDensityMapPropertiesEXT\">VkPhysicalDeviceFragmentDensityMapPropertiesEXT</a>, <a href=\"#VkPhysicalDeviceIDProperties\">VkPhysicalDeviceIDProperties</a>, <a href=\"#VkPhysicalDeviceInlineUniformBlockPropertiesEXT\">VkPhysicalDeviceInlineUniformBlockPropertiesEXT</a>, <a href=\"#VkPhysicalDeviceLineRasterizationPropertiesEXT\">VkPhysicalDeviceLineRasterizationPropertiesEXT</a>, <a href=\"#VkPhysicalDeviceMaintenance3Properties\">VkPhysicalDeviceMaintenance3Properties</a>, <a href=\"#VkPhysicalDeviceMeshShaderPropertiesNV\">VkPhysicalDeviceMeshShaderPropertiesNV</a>, <a href=\"#VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX\">VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX</a>, <a href=\"#VkPhysicalDeviceMultiviewProperties\">VkPhysicalDeviceMultiviewProperties</a>, <a href=\"#VkPhysicalDevicePCIBusInfoPropertiesEXT\">VkPhysicalDevicePCIBusInfoPropertiesEXT</a>, <a href=\"#VkPhysicalDevicePerformanceQueryPropertiesKHR\">VkPhysicalDevicePerformanceQueryPropertiesKHR</a>, <a href=\"#VkPhysicalDevicePointClippingProperties\">VkPhysicalDevicePointClippingProperties</a>, <a href=\"#VkPhysicalDeviceProtectedMemoryProperties\">VkPhysicalDeviceProtectedMemoryProperties</a>, <a href=\"#VkPhysicalDevicePushDescriptorPropertiesKHR\">VkPhysicalDevicePushDescriptorPropertiesKHR</a>, <a href=\"#VkPhysicalDeviceRayTracingPropertiesNV\">VkPhysicalDeviceRayTracingPropertiesNV</a>, <a href=\"#VkPhysicalDeviceSampleLocationsPropertiesEXT\">VkPhysicalDeviceSampleLocationsPropertiesEXT</a>, <a href=\"#VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT\">VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT</a>, <a href=\"#VkPhysicalDeviceShaderCoreProperties2AMD\">VkPhysicalDeviceShaderCoreProperties2AMD</a>, <a href=\"#VkPhysicalDeviceShaderCorePropertiesAMD\">VkPhysicalDeviceShaderCorePropertiesAMD</a>, <a href=\"#VkPhysicalDeviceShaderSMBuiltinsPropertiesNV\">VkPhysicalDeviceShaderSMBuiltinsPropertiesNV</a>, <a href=\"#VkPhysicalDeviceShadingRateImagePropertiesNV\">VkPhysicalDeviceShadingRateImagePropertiesNV</a>, <a href=\"#VkPhysicalDeviceSubgroupProperties\">VkPhysicalDeviceSubgroupProperties</a>, <a href=\"#VkPhysicalDeviceSubgroupSizeControlPropertiesEXT\">VkPhysicalDeviceSubgroupSizeControlPropertiesEXT</a>, <a href=\"#VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT\">VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT</a>, <a href=\"#VkPhysicalDeviceTimelineSemaphorePropertiesKHR\">VkPhysicalDeviceTimelineSemaphorePropertiesKHR</a>, <a href=\"#VkPhysicalDeviceTransformFeedbackPropertiesEXT\">VkPhysicalDeviceTransformFeedbackPropertiesEXT</a>, or <a href=\"#VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT\">VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT</a>"
+        },
+        {
+          "vuid": "VUID-VkPhysicalDeviceProperties2-sType-unique",
+          "text": " Each <code>sType</code> member in the <code>pNext</code> chain <strong class=\"purple\">must</strong> be unique"
+        }
+      ]
+    },
+    "VkPhysicalDeviceIDProperties": {
+      "(VK_VERSION_1_1,VK_KHR_get_physical_device_properties2)+(VK_VERSION_1_1,VK_KHR_external_memory_capabilities,VK_KHR_external_semaphore_capabilities,VK_KHR_external_fence_capabilities)": [
+        {
+          "vuid": "VUID-VkPhysicalDeviceIDProperties-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES</code>"
+        }
+      ]
+    },
+    "VkPhysicalDeviceDriverPropertiesKHR": {
+      "(VK_VERSION_1_1,VK_KHR_get_physical_device_properties2)+(VK_KHR_driver_properties)": [
+        {
+          "vuid": "VUID-VkPhysicalDeviceDriverPropertiesKHR-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES_KHR</code>"
+        }
+      ]
+    },
+    "VkPhysicalDevicePCIBusInfoPropertiesEXT": {
+      "(VK_VERSION_1_1,VK_KHR_get_physical_device_properties2)+(VK_EXT_pci_bus_info)": [
+        {
+          "vuid": "VUID-VkPhysicalDevicePCIBusInfoPropertiesEXT-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PCI_BUS_INFO_PROPERTIES_EXT</code>"
+        }
+      ]
+    },
+    "vkGetPhysicalDeviceQueueFamilyProperties": {
+      "core": [
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceQueueFamilyProperties-physicalDevice-parameter",
+          "text": " <code>physicalDevice</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkPhysicalDevice\">VkPhysicalDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceQueueFamilyProperties-pQueueFamilyPropertyCount-parameter",
+          "text": " <code>pQueueFamilyPropertyCount</code> <strong class=\"purple\">must</strong> be a valid pointer to a <code>uint32_t</code> value"
+        },
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceQueueFamilyProperties-pQueueFamilyProperties-parameter",
+          "text": " If the value referenced by <code>pQueueFamilyPropertyCount</code> is not <code>0</code>, and <code>pQueueFamilyProperties</code> is not <code>NULL</code>, <code>pQueueFamilyProperties</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>pQueueFamilyPropertyCount</code> <a href=\"#VkQueueFamilyProperties\">VkQueueFamilyProperties</a> structures"
+        }
+      ]
+    },
+    "vkGetPhysicalDeviceQueueFamilyProperties2": {
+      "(VK_VERSION_1_1,VK_KHR_get_physical_device_properties2)": [
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceQueueFamilyProperties2-physicalDevice-parameter",
+          "text": " <code>physicalDevice</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkPhysicalDevice\">VkPhysicalDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceQueueFamilyProperties2-pQueueFamilyPropertyCount-parameter",
+          "text": " <code>pQueueFamilyPropertyCount</code> <strong class=\"purple\">must</strong> be a valid pointer to a <code>uint32_t</code> value"
+        },
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceQueueFamilyProperties2-pQueueFamilyProperties-parameter",
+          "text": " If the value referenced by <code>pQueueFamilyPropertyCount</code> is not <code>0</code>, and <code>pQueueFamilyProperties</code> is not <code>NULL</code>, <code>pQueueFamilyProperties</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>pQueueFamilyPropertyCount</code> <a href=\"#VkQueueFamilyProperties2\">VkQueueFamilyProperties2</a> structures"
+        }
+      ]
+    },
+    "VkQueueFamilyProperties2": {
+      "(VK_VERSION_1_1,VK_KHR_get_physical_device_properties2)": [
+        {
+          "vuid": "VUID-VkQueueFamilyProperties2-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2</code>"
+        },
+        {
+          "vuid": "VUID-VkQueueFamilyProperties2-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code> or a pointer to a valid instance of <a href=\"#VkQueueFamilyCheckpointPropertiesNV\">VkQueueFamilyCheckpointPropertiesNV</a>"
+        }
+      ]
+    },
+    "VkQueueFamilyCheckpointPropertiesNV": {
+      "(VK_VERSION_1_1,VK_KHR_get_physical_device_properties2)+(VK_NV_device_diagnostic_checkpoints)": [
+        {
+          "vuid": "VUID-VkQueueFamilyCheckpointPropertiesNV-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_NV</code>"
+        }
+      ]
+    },
+    "vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR": {
+      "(VK_KHR_performance_query)": [
+        {
+          "vuid": "VUID-vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR-physicalDevice-parameter",
+          "text": " <code>physicalDevice</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkPhysicalDevice\">VkPhysicalDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR-pCounterCount-parameter",
+          "text": " <code>pCounterCount</code> <strong class=\"purple\">must</strong> be a valid pointer to a <code>uint32_t</code> value"
+        },
+        {
+          "vuid": "VUID-vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR-pCounters-parameter",
+          "text": " If the value referenced by <code>pCounterCount</code> is not <code>0</code>, and <code>pCounters</code> is not <code>NULL</code>, <code>pCounters</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>pCounterCount</code> <a href=\"#VkPerformanceCounterKHR\">VkPerformanceCounterKHR</a> structures"
+        },
+        {
+          "vuid": "VUID-vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR-pCounterDescriptions-parameter",
+          "text": " If the value referenced by <code>pCounterCount</code> is not <code>0</code>, and <code>pCounterDescriptions</code> is not <code>NULL</code>, <code>pCounterDescriptions</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>pCounterCount</code> <a href=\"#VkPerformanceCounterDescriptionKHR\">VkPerformanceCounterDescriptionKHR</a> structures"
+        }
+      ]
+    },
+    "VkPerformanceCounterKHR": {
+      "(VK_KHR_performance_query)": [
+        {
+          "vuid": "VUID-VkPerformanceCounterKHR-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PERFORMANCE_COUNTER_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkPerformanceCounterKHR-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        }
+      ]
+    },
+    "VkPerformanceCounterDescriptionKHR": {
+      "(VK_KHR_performance_query)": [
+        {
+          "vuid": "VUID-VkPerformanceCounterDescriptionKHR-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PERFORMANCE_COUNTER_DESCRIPTION_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkPerformanceCounterDescriptionKHR-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        }
+      ]
+    },
+    "vkEnumeratePhysicalDeviceGroups": {
+      "(VK_VERSION_1_1,VK_KHR_device_group_creation)": [
+        {
+          "vuid": "VUID-vkEnumeratePhysicalDeviceGroups-instance-parameter",
+          "text": " <code>instance</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkInstance\">VkInstance</a> handle"
+        },
+        {
+          "vuid": "VUID-vkEnumeratePhysicalDeviceGroups-pPhysicalDeviceGroupCount-parameter",
+          "text": " <code>pPhysicalDeviceGroupCount</code> <strong class=\"purple\">must</strong> be a valid pointer to a <code>uint32_t</code> value"
+        },
+        {
+          "vuid": "VUID-vkEnumeratePhysicalDeviceGroups-pPhysicalDeviceGroupProperties-parameter",
+          "text": " If the value referenced by <code>pPhysicalDeviceGroupCount</code> is not <code>0</code>, and <code>pPhysicalDeviceGroupProperties</code> is not <code>NULL</code>, <code>pPhysicalDeviceGroupProperties</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>pPhysicalDeviceGroupCount</code> <a href=\"#VkPhysicalDeviceGroupProperties\">VkPhysicalDeviceGroupProperties</a> structures"
+        }
+      ]
+    },
+    "VkPhysicalDeviceGroupProperties": {
+      "(VK_VERSION_1_1,VK_KHR_device_group_creation)": [
+        {
+          "vuid": "VUID-VkPhysicalDeviceGroupProperties-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES</code>"
+        },
+        {
+          "vuid": "VUID-VkPhysicalDeviceGroupProperties-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        }
+      ]
+    },
+    "vkCreateDevice": {
+      "core": [
+        {
+          "vuid": "VUID-vkCreateDevice-ppEnabledExtensionNames-01387",
+          "text": " All <a href=\"#extendingvulkan-extensions-extensiondependencies\">required extensions</a> for each extension in the <a href=\"#VkDeviceCreateInfo\">VkDeviceCreateInfo</a>::<code>ppEnabledExtensionNames</code> list <strong class=\"purple\">must</strong> also be present in that list."
+        },
+        {
+          "vuid": "VUID-vkCreateDevice-physicalDevice-parameter",
+          "text": " <code>physicalDevice</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkPhysicalDevice\">VkPhysicalDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCreateDevice-pCreateInfo-parameter",
+          "text": " <code>pCreateInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkDeviceCreateInfo\">VkDeviceCreateInfo</a> structure"
+        },
+        {
+          "vuid": "VUID-vkCreateDevice-pAllocator-parameter",
+          "text": " If <code>pAllocator</code> is not <code>NULL</code>, <code>pAllocator</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkAllocationCallbacks\">VkAllocationCallbacks</a> structure"
+        },
+        {
+          "vuid": "VUID-vkCreateDevice-pDevice-parameter",
+          "text": " <code>pDevice</code> <strong class=\"purple\">must</strong> be a valid pointer to a <a href=\"#VkDevice\">VkDevice</a> handle"
+        }
+      ]
+    },
+    "VkDeviceCreateInfo": {
+      "!(VK_VERSION_1_1)": [
+        {
+          "vuid": "VUID-VkDeviceCreateInfo-queueFamilyIndex-00372",
+          "text": " The <code>queueFamilyIndex</code> member of each element of <code>pQueueCreateInfos</code> <strong class=\"purple\">must</strong> be unique within <code>pQueueCreateInfos</code>"
+        }
+      ],
+      "(VK_VERSION_1_1)": [
+        {
+          "vuid": "VUID-VkDeviceCreateInfo-queueFamilyIndex-02802",
+          "text": " The <code>queueFamilyIndex</code> member of each element of <code>pQueueCreateInfos</code> <strong class=\"purple\">must</strong> be unique within <code>pQueueCreateInfos</code>, except that two members can share the same <code>queueFamilyIndex</code> if one is a protected-capable queue and one is not a protected-capable queue."
+        }
+      ],
+      "(VK_VERSION_1_1,VK_KHR_get_physical_device_properties2)": [
+        {
+          "vuid": "VUID-VkDeviceCreateInfo-pNext-00373",
+          "text": " If the <code>pNext</code> chain includes a <a href=\"#VkPhysicalDeviceFeatures2\">VkPhysicalDeviceFeatures2</a> structure, then <code>pEnabledFeatures</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        }
+      ],
+      "(VK_AMD_negative_viewport_height)+(VK_VERSION_1_1)": [
+        {
+          "vuid": "VUID-VkDeviceCreateInfo-ppEnabledExtensionNames-01840",
+          "text": " <code>ppEnabledExtensionNames</code> <strong class=\"purple\">must</strong> not contain <code><a href=\"#VK_AMD_negative_viewport_height\">VK_AMD_negative_viewport_height</a></code>"
+        }
+      ],
+      "(VK_AMD_negative_viewport_height)+!(VK_VERSION_1_1)+(VK_KHR_maintenance1)": [
+        {
+          "vuid": "VUID-VkDeviceCreateInfo-ppEnabledExtensionNames-00374",
+          "text": " <code>ppEnabledExtensionNames</code> <strong class=\"purple\">must</strong> not contain both <code><a href=\"#VK_KHR_maintenance1\">VK_KHR_maintenance1</a></code> and <code><a href=\"#VK_AMD_negative_viewport_height\">VK_AMD_negative_viewport_height</a></code>"
+        }
+      ],
+      "(VK_EXT_buffer_device_address+VK_KHR_buffer_device_address)": [
+        {
+          "vuid": "VUID-VkDeviceCreateInfo-ppEnabledExtensionNames-03328",
+          "text": " <code>ppEnabledExtensionNames</code> <strong class=\"purple\">must</strong> not contain both <code><a href=\"#VK_KHR_buffer_device_address\">VK_KHR_buffer_device_address</a></code> and <code><a href=\"#VK_EXT_buffer_device_address\">VK_EXT_buffer_device_address</a></code>"
+        }
+      ],
+      "core": [
+        {
+          "vuid": "VUID-VkDeviceCreateInfo-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO</code>"
+        },
+        {
+          "vuid": "VUID-VkDeviceCreateInfo-pNext-pNext",
+          "text": " Each <code>pNext</code> member of any structure (including this one) in the <code>pNext</code> chain <strong class=\"purple\">must</strong> be either <code>NULL</code> or a pointer to a valid instance of <a href=\"#VkDeviceGroupDeviceCreateInfo\">VkDeviceGroupDeviceCreateInfo</a>, <a href=\"#VkDeviceMemoryOverallocationCreateInfoAMD\">VkDeviceMemoryOverallocationCreateInfoAMD</a>, <a href=\"#VkPhysicalDevice16BitStorageFeatures\">VkPhysicalDevice16BitStorageFeatures</a>, <a href=\"#VkPhysicalDevice8BitStorageFeaturesKHR\">VkPhysicalDevice8BitStorageFeaturesKHR</a>, <a href=\"#VkPhysicalDeviceASTCDecodeFeaturesEXT\">VkPhysicalDeviceASTCDecodeFeaturesEXT</a>, <a href=\"#VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT\">VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT</a>, <a href=\"#VkPhysicalDeviceBufferDeviceAddressFeaturesEXT\">VkPhysicalDeviceBufferDeviceAddressFeaturesEXT</a>, <a href=\"#VkPhysicalDeviceBufferDeviceAddressFeaturesKHR\">VkPhysicalDeviceBufferDeviceAddressFeaturesKHR</a>, <a href=\"#VkPhysicalDeviceCoherentMemoryFeaturesAMD\">VkPhysicalDeviceCoherentMemoryFeaturesAMD</a>, <a href=\"#VkPhysicalDeviceComputeShaderDerivativesFeaturesNV\">VkPhysicalDeviceComputeShaderDerivativesFeaturesNV</a>, <a href=\"#VkPhysicalDeviceConditionalRenderingFeaturesEXT\">VkPhysicalDeviceConditionalRenderingFeaturesEXT</a>, <a href=\"#VkPhysicalDeviceCooperativeMatrixFeaturesNV\">VkPhysicalDeviceCooperativeMatrixFeaturesNV</a>, <a href=\"#VkPhysicalDeviceCornerSampledImageFeaturesNV\">VkPhysicalDeviceCornerSampledImageFeaturesNV</a>, <a href=\"#VkPhysicalDeviceCoverageReductionModeFeaturesNV\">VkPhysicalDeviceCoverageReductionModeFeaturesNV</a>, <a href=\"#VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV\">VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV</a>, <a href=\"#VkPhysicalDeviceDepthClipEnableFeaturesEXT\">VkPhysicalDeviceDepthClipEnableFeaturesEXT</a>, <a href=\"#VkPhysicalDeviceDescriptorIndexingFeaturesEXT\">VkPhysicalDeviceDescriptorIndexingFeaturesEXT</a>, <a href=\"#VkPhysicalDeviceExclusiveScissorFeaturesNV\">VkPhysicalDeviceExclusiveScissorFeaturesNV</a>, <a href=\"#VkPhysicalDeviceFeatures2\">VkPhysicalDeviceFeatures2</a>, <a href=\"#VkPhysicalDeviceFragmentDensityMapFeaturesEXT\">VkPhysicalDeviceFragmentDensityMapFeaturesEXT</a>, <a href=\"#VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV\">VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV</a>, <a href=\"#VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT\">VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT</a>, <a href=\"#VkPhysicalDeviceHostQueryResetFeaturesEXT\">VkPhysicalDeviceHostQueryResetFeaturesEXT</a>, <a href=\"#VkPhysicalDeviceImagelessFramebufferFeaturesKHR\">VkPhysicalDeviceImagelessFramebufferFeaturesKHR</a>, <a href=\"#VkPhysicalDeviceIndexTypeUint8FeaturesEXT\">VkPhysicalDeviceIndexTypeUint8FeaturesEXT</a>, <a href=\"#VkPhysicalDeviceInlineUniformBlockFeaturesEXT\">VkPhysicalDeviceInlineUniformBlockFeaturesEXT</a>, <a href=\"#VkPhysicalDeviceLineRasterizationFeaturesEXT\">VkPhysicalDeviceLineRasterizationFeaturesEXT</a>, <a href=\"#VkPhysicalDeviceMemoryPriorityFeaturesEXT\">VkPhysicalDeviceMemoryPriorityFeaturesEXT</a>, <a href=\"#VkPhysicalDeviceMeshShaderFeaturesNV\">VkPhysicalDeviceMeshShaderFeaturesNV</a>, <a href=\"#VkPhysicalDeviceMultiviewFeatures\">VkPhysicalDeviceMultiviewFeatures</a>, <a href=\"#VkPhysicalDevicePerformanceQueryFeaturesKHR\">VkPhysicalDevicePerformanceQueryFeaturesKHR</a>, <a href=\"#VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR\">VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR</a>, <a href=\"#VkPhysicalDeviceProtectedMemoryFeatures\">VkPhysicalDeviceProtectedMemoryFeatures</a>, <a href=\"#VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV\">VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV</a>, <a href=\"#VkPhysicalDeviceSamplerYcbcrConversionFeatures\">VkPhysicalDeviceSamplerYcbcrConversionFeatures</a>, <a href=\"#VkPhysicalDeviceScalarBlockLayoutFeaturesEXT\">VkPhysicalDeviceScalarBlockLayoutFeaturesEXT</a>, <a href=\"#VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR\">VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR</a>, <a href=\"#VkPhysicalDeviceShaderAtomicInt64FeaturesKHR\">VkPhysicalDeviceShaderAtomicInt64FeaturesKHR</a>, <a href=\"#VkPhysicalDeviceShaderClockFeaturesKHR\">VkPhysicalDeviceShaderClockFeaturesKHR</a>, <a href=\"#VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT\">VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT</a>, <a href=\"#VkPhysicalDeviceShaderDrawParametersFeatures\">VkPhysicalDeviceShaderDrawParametersFeatures</a>, <a href=\"#VkPhysicalDeviceShaderFloat16Int8FeaturesKHR\">VkPhysicalDeviceShaderFloat16Int8FeaturesKHR</a>, <a href=\"#VkPhysicalDeviceShaderImageFootprintFeaturesNV\">VkPhysicalDeviceShaderImageFootprintFeaturesNV</a>, <a href=\"#VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL\">VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL</a>, <a href=\"#VkPhysicalDeviceShaderSMBuiltinsFeaturesNV\">VkPhysicalDeviceShaderSMBuiltinsFeaturesNV</a>, <a href=\"#VkPhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR\">VkPhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR</a>, <a href=\"#VkPhysicalDeviceShadingRateImageFeaturesNV\">VkPhysicalDeviceShadingRateImageFeaturesNV</a>, <a href=\"#VkPhysicalDeviceSubgroupSizeControlFeaturesEXT\">VkPhysicalDeviceSubgroupSizeControlFeaturesEXT</a>, <a href=\"#VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT\">VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT</a>, <a href=\"#VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT\">VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT</a>, <a href=\"#VkPhysicalDeviceTimelineSemaphoreFeaturesKHR\">VkPhysicalDeviceTimelineSemaphoreFeaturesKHR</a>, <a href=\"#VkPhysicalDeviceTransformFeedbackFeaturesEXT\">VkPhysicalDeviceTransformFeedbackFeaturesEXT</a>, <a href=\"#VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR\">VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR</a>, <a href=\"#VkPhysicalDeviceVariablePointersFeatures\">VkPhysicalDeviceVariablePointersFeatures</a>, <a href=\"#VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT\">VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT</a>, <a href=\"#VkPhysicalDeviceVulkanMemoryModelFeaturesKHR\">VkPhysicalDeviceVulkanMemoryModelFeaturesKHR</a>, or <a href=\"#VkPhysicalDeviceYcbcrImageArraysFeaturesEXT\">VkPhysicalDeviceYcbcrImageArraysFeaturesEXT</a>"
+        },
+        {
+          "vuid": "VUID-VkDeviceCreateInfo-sType-unique",
+          "text": " Each <code>sType</code> member in the <code>pNext</code> chain <strong class=\"purple\">must</strong> be unique"
+        },
+        {
+          "vuid": "VUID-VkDeviceCreateInfo-flags-zerobitmask",
+          "text": " <code>flags</code> <strong class=\"purple\">must</strong> be <code>0</code>"
+        },
+        {
+          "vuid": "VUID-VkDeviceCreateInfo-pQueueCreateInfos-parameter",
+          "text": " <code>pQueueCreateInfos</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>queueCreateInfoCount</code> valid <a href=\"#VkDeviceQueueCreateInfo\">VkDeviceQueueCreateInfo</a> structures"
+        },
+        {
+          "vuid": "VUID-VkDeviceCreateInfo-ppEnabledLayerNames-parameter",
+          "text": " If <code>enabledLayerCount</code> is not <code>0</code>, <code>ppEnabledLayerNames</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>enabledLayerCount</code> null-terminated UTF-8 strings"
+        },
+        {
+          "vuid": "VUID-VkDeviceCreateInfo-ppEnabledExtensionNames-parameter",
+          "text": " If <code>enabledExtensionCount</code> is not <code>0</code>, <code>ppEnabledExtensionNames</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>enabledExtensionCount</code> null-terminated UTF-8 strings"
+        },
+        {
+          "vuid": "VUID-VkDeviceCreateInfo-pEnabledFeatures-parameter",
+          "text": " If <code>pEnabledFeatures</code> is not <code>NULL</code>, <code>pEnabledFeatures</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkPhysicalDeviceFeatures\">VkPhysicalDeviceFeatures</a> structure"
+        },
+        {
+          "vuid": "VUID-VkDeviceCreateInfo-queueCreateInfoCount-arraylength",
+          "text": " <code>queueCreateInfoCount</code> <strong class=\"purple\">must</strong> be greater than <code>0</code>"
+        }
+      ]
+    },
+    "VkDeviceGroupDeviceCreateInfo": {
+      "(VK_VERSION_1_1,VK_KHR_device_group_creation)": [
+        {
+          "vuid": "VUID-VkDeviceGroupDeviceCreateInfo-pPhysicalDevices-00375",
+          "text": " Each element of <code>pPhysicalDevices</code> <strong class=\"purple\">must</strong> be unique"
+        },
+        {
+          "vuid": "VUID-VkDeviceGroupDeviceCreateInfo-pPhysicalDevices-00376",
+          "text": " All elements of <code>pPhysicalDevices</code> <strong class=\"purple\">must</strong> be in the same device group as enumerated by <a href=\"#vkEnumeratePhysicalDeviceGroups\">vkEnumeratePhysicalDeviceGroups</a>"
+        },
+        {
+          "vuid": "VUID-VkDeviceGroupDeviceCreateInfo-physicalDeviceCount-00377",
+          "text": " If <code>physicalDeviceCount</code> is not <code>0</code>, the <code>physicalDevice</code> parameter of <a href=\"#vkCreateDevice\">vkCreateDevice</a> <strong class=\"purple\">must</strong> be an element of <code>pPhysicalDevices</code>."
+        },
+        {
+          "vuid": "VUID-VkDeviceGroupDeviceCreateInfo-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO</code>"
+        },
+        {
+          "vuid": "VUID-VkDeviceGroupDeviceCreateInfo-pPhysicalDevices-parameter",
+          "text": " If <code>physicalDeviceCount</code> is not <code>0</code>, <code>pPhysicalDevices</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>physicalDeviceCount</code> valid <a href=\"#VkPhysicalDevice\">VkPhysicalDevice</a> handles"
+        }
+      ]
+    },
+    "VkDeviceMemoryOverallocationCreateInfoAMD": {
+      "(VK_AMD_memory_overallocation_behavior)": [
+        {
+          "vuid": "VUID-VkDeviceMemoryOverallocationCreateInfoAMD-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_DEVICE_MEMORY_OVERALLOCATION_CREATE_INFO_AMD</code>"
+        },
+        {
+          "vuid": "VUID-VkDeviceMemoryOverallocationCreateInfoAMD-overallocationBehavior-parameter",
+          "text": " <code>overallocationBehavior</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkMemoryOverallocationBehaviorAMD\">VkMemoryOverallocationBehaviorAMD</a> value"
+        }
+      ]
+    },
+    "vkDestroyDevice": {
+      "core": [
+        {
+          "vuid": "VUID-vkDestroyDevice-device-00378",
+          "text": " All child objects created on <code>device</code> <strong class=\"purple\">must</strong> have been destroyed prior to destroying <code>device</code>"
+        },
+        {
+          "vuid": "VUID-vkDestroyDevice-device-00379",
+          "text": " If <code>VkAllocationCallbacks</code> were provided when <code>device</code> was created, a compatible set of callbacks <strong class=\"purple\">must</strong> be provided here"
+        },
+        {
+          "vuid": "VUID-vkDestroyDevice-device-00380",
+          "text": " If no <code>VkAllocationCallbacks</code> were provided when <code>device</code> was created, <code>pAllocator</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-vkDestroyDevice-device-parameter",
+          "text": " If <code>device</code> is not <code>NULL</code>, <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkDestroyDevice-pAllocator-parameter",
+          "text": " If <code>pAllocator</code> is not <code>NULL</code>, <code>pAllocator</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkAllocationCallbacks\">VkAllocationCallbacks</a> structure"
+        }
+      ]
+    },
+    "VkDeviceQueueCreateInfo": {
+      "core": [
+        {
+          "vuid": "VUID-VkDeviceQueueCreateInfo-queueFamilyIndex-00381",
+          "text": " <code>queueFamilyIndex</code> <strong class=\"purple\">must</strong> be less than <code>pQueueFamilyPropertyCount</code> returned by <code>vkGetPhysicalDeviceQueueFamilyProperties</code>"
+        },
+        {
+          "vuid": "VUID-VkDeviceQueueCreateInfo-queueCount-00382",
+          "text": " <code>queueCount</code> <strong class=\"purple\">must</strong> be less than or equal to the <code>queueCount</code> member of the <code>VkQueueFamilyProperties</code> structure, as returned by <code>vkGetPhysicalDeviceQueueFamilyProperties</code> in the <code>pQueueFamilyProperties</code>[queueFamilyIndex]"
+        },
+        {
+          "vuid": "VUID-VkDeviceQueueCreateInfo-pQueuePriorities-00383",
+          "text": " Each element of <code>pQueuePriorities</code> <strong class=\"purple\">must</strong> be between <code>0.0</code> and <code>1.0</code> inclusive"
+        },
+        {
+          "vuid": "VUID-VkDeviceQueueCreateInfo-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO</code>"
+        },
+        {
+          "vuid": "VUID-VkDeviceQueueCreateInfo-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code> or a pointer to a valid instance of <a href=\"#VkDeviceQueueGlobalPriorityCreateInfoEXT\">VkDeviceQueueGlobalPriorityCreateInfoEXT</a>"
+        },
+        {
+          "vuid": "VUID-VkDeviceQueueCreateInfo-flags-parameter",
+          "text": " <code>flags</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkDeviceQueueCreateFlagBits\">VkDeviceQueueCreateFlagBits</a> values"
+        },
+        {
+          "vuid": "VUID-VkDeviceQueueCreateInfo-pQueuePriorities-parameter",
+          "text": " <code>pQueuePriorities</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>queueCount</code> <code>float</code> values"
+        },
+        {
+          "vuid": "VUID-VkDeviceQueueCreateInfo-queueCount-arraylength",
+          "text": " <code>queueCount</code> <strong class=\"purple\">must</strong> be greater than <code>0</code>"
+        }
+      ]
+    },
+    "VkDeviceQueueGlobalPriorityCreateInfoEXT": {
+      "(VK_EXT_global_priority)": [
+        {
+          "vuid": "VUID-VkDeviceQueueGlobalPriorityCreateInfoEXT-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_EXT</code>"
+        },
+        {
+          "vuid": "VUID-VkDeviceQueueGlobalPriorityCreateInfoEXT-globalPriority-parameter",
+          "text": " <code>globalPriority</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkQueueGlobalPriorityEXT\">VkQueueGlobalPriorityEXT</a> value"
+        }
+      ]
+    },
+    "vkGetDeviceQueue": {
+      "core": [
+        {
+          "vuid": "VUID-vkGetDeviceQueue-queueFamilyIndex-00384",
+          "text": " <code>queueFamilyIndex</code> <strong class=\"purple\">must</strong> be one of the queue family indices specified when <code>device</code> was created, via the <code>VkDeviceQueueCreateInfo</code> structure"
+        },
+        {
+          "vuid": "VUID-vkGetDeviceQueue-queueIndex-00385",
+          "text": " <code>queueIndex</code> <strong class=\"purple\">must</strong> be less than the number of queues created for the specified queue family index when <code>device</code> was created, via the <code>queueCount</code> member of the <code>VkDeviceQueueCreateInfo</code> structure"
+        },
+        {
+          "vuid": "VUID-vkGetDeviceQueue-flags-01841",
+          "text": " <a href=\"#VkDeviceQueueCreateInfo\">VkDeviceQueueCreateInfo</a>::<code>flags</code> <strong class=\"purple\">must</strong> have been set to zero when <code>device</code> was created"
+        },
+        {
+          "vuid": "VUID-vkGetDeviceQueue-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetDeviceQueue-pQueue-parameter",
+          "text": " <code>pQueue</code> <strong class=\"purple\">must</strong> be a valid pointer to a <a href=\"#VkQueue\">VkQueue</a> handle"
+        }
+      ]
+    },
+    "vkGetDeviceQueue2": {
+      "(VK_VERSION_1_1)": [
+        {
+          "vuid": "VUID-vkGetDeviceQueue2-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetDeviceQueue2-pQueueInfo-parameter",
+          "text": " <code>pQueueInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkDeviceQueueInfo2\">VkDeviceQueueInfo2</a> structure"
+        },
+        {
+          "vuid": "VUID-vkGetDeviceQueue2-pQueue-parameter",
+          "text": " <code>pQueue</code> <strong class=\"purple\">must</strong> be a valid pointer to a <a href=\"#VkQueue\">VkQueue</a> handle"
+        }
+      ]
+    },
+    "VkDeviceQueueInfo2": {
+      "(VK_VERSION_1_1)": [
+        {
+          "vuid": "VUID-VkDeviceQueueInfo2-queueFamilyIndex-01842",
+          "text": " <code>queueFamilyIndex</code> <strong class=\"purple\">must</strong> be one of the queue family indices specified when <code>device</code> was created, via the <code>VkDeviceQueueCreateInfo</code> structure"
+        },
+        {
+          "vuid": "VUID-VkDeviceQueueInfo2-queueIndex-01843",
+          "text": " <code>queueIndex</code> <strong class=\"purple\">must</strong> be less than the number of queues created for the specified queue family index and <a href=\"#VkDeviceQueueCreateFlags\">VkDeviceQueueCreateFlags</a> member <code>flags</code> equal to this <code>flags</code> value when <code>device</code> was created, via the <code>queueCount</code> member of the <code>VkDeviceQueueCreateInfo</code> structure"
+        },
+        {
+          "vuid": "VUID-VkDeviceQueueInfo2-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_DEVICE_QUEUE_INFO_2</code>"
+        },
+        {
+          "vuid": "VUID-VkDeviceQueueInfo2-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-VkDeviceQueueInfo2-flags-parameter",
+          "text": " <code>flags</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkDeviceQueueCreateFlagBits\">VkDeviceQueueCreateFlagBits</a> values"
+        }
+      ]
+    },
+    "vkCreateCommandPool": {
+      "core": [
+        {
+          "vuid": "VUID-vkCreateCommandPool-queueFamilyIndex-01937",
+          "text": " <code>pCreateInfo</code>-&gt;queueFamilyIndex <strong class=\"purple\">must</strong> be the index of a queue family available in the logical device <code>device</code>."
+        },
+        {
+          "vuid": "VUID-vkCreateCommandPool-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCreateCommandPool-pCreateInfo-parameter",
+          "text": " <code>pCreateInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkCommandPoolCreateInfo\">VkCommandPoolCreateInfo</a> structure"
+        },
+        {
+          "vuid": "VUID-vkCreateCommandPool-pAllocator-parameter",
+          "text": " If <code>pAllocator</code> is not <code>NULL</code>, <code>pAllocator</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkAllocationCallbacks\">VkAllocationCallbacks</a> structure"
+        },
+        {
+          "vuid": "VUID-vkCreateCommandPool-pCommandPool-parameter",
+          "text": " <code>pCommandPool</code> <strong class=\"purple\">must</strong> be a valid pointer to a <a href=\"#VkCommandPool\">VkCommandPool</a> handle"
+        }
+      ]
+    },
+    "VkCommandPoolCreateInfo": {
+      "core": [
+        {
+          "vuid": "VUID-VkCommandPoolCreateInfo-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO</code>"
+        },
+        {
+          "vuid": "VUID-VkCommandPoolCreateInfo-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-VkCommandPoolCreateInfo-flags-parameter",
+          "text": " <code>flags</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkCommandPoolCreateFlagBits\">VkCommandPoolCreateFlagBits</a> values"
+        }
+      ]
+    },
+    "vkTrimCommandPool": {
+      "(VK_VERSION_1_1,VK_KHR_maintenance1)": [
+        {
+          "vuid": "VUID-vkTrimCommandPool-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkTrimCommandPool-commandPool-parameter",
+          "text": " <code>commandPool</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkCommandPool\">VkCommandPool</a> handle"
+        },
+        {
+          "vuid": "VUID-vkTrimCommandPool-flags-zerobitmask",
+          "text": " <code>flags</code> <strong class=\"purple\">must</strong> be <code>0</code>"
+        },
+        {
+          "vuid": "VUID-vkTrimCommandPool-commandPool-parent",
+          "text": " <code>commandPool</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from <code>device</code>"
+        }
+      ]
+    },
+    "vkResetCommandPool": {
+      "core": [
+        {
+          "vuid": "VUID-vkResetCommandPool-commandPool-00040",
+          "text": " All <code>VkCommandBuffer</code> objects allocated from <code>commandPool</code> <strong class=\"purple\">must</strong> not be in the <a href=\"#commandbuffers-lifecycle\">pending state</a>"
+        },
+        {
+          "vuid": "VUID-vkResetCommandPool-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkResetCommandPool-commandPool-parameter",
+          "text": " <code>commandPool</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkCommandPool\">VkCommandPool</a> handle"
+        },
+        {
+          "vuid": "VUID-vkResetCommandPool-flags-parameter",
+          "text": " <code>flags</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkCommandPoolResetFlagBits\">VkCommandPoolResetFlagBits</a> values"
+        },
+        {
+          "vuid": "VUID-vkResetCommandPool-commandPool-parent",
+          "text": " <code>commandPool</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from <code>device</code>"
+        }
+      ]
+    },
+    "vkDestroyCommandPool": {
+      "core": [
+        {
+          "vuid": "VUID-vkDestroyCommandPool-commandPool-00041",
+          "text": " All <code>VkCommandBuffer</code> objects allocated from <code>commandPool</code> <strong class=\"purple\">must</strong> not be in the <a href=\"#commandbuffers-lifecycle\">pending state</a>."
+        },
+        {
+          "vuid": "VUID-vkDestroyCommandPool-commandPool-00042",
+          "text": " If <code>VkAllocationCallbacks</code> were provided when <code>commandPool</code> was created, a compatible set of callbacks <strong class=\"purple\">must</strong> be provided here"
+        },
+        {
+          "vuid": "VUID-vkDestroyCommandPool-commandPool-00043",
+          "text": " If no <code>VkAllocationCallbacks</code> were provided when <code>commandPool</code> was created, <code>pAllocator</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-vkDestroyCommandPool-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkDestroyCommandPool-commandPool-parameter",
+          "text": " If <code>commandPool</code> is not <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>, <code>commandPool</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkCommandPool\">VkCommandPool</a> handle"
+        },
+        {
+          "vuid": "VUID-vkDestroyCommandPool-pAllocator-parameter",
+          "text": " If <code>pAllocator</code> is not <code>NULL</code>, <code>pAllocator</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkAllocationCallbacks\">VkAllocationCallbacks</a> structure"
+        },
+        {
+          "vuid": "VUID-vkDestroyCommandPool-commandPool-parent",
+          "text": " If <code>commandPool</code> is a valid handle, it <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from <code>device</code>"
+        }
+      ]
+    },
+    "vkAllocateCommandBuffers": {
+      "core": [
+        {
+          "vuid": "VUID-vkAllocateCommandBuffers-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkAllocateCommandBuffers-pAllocateInfo-parameter",
+          "text": " <code>pAllocateInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkCommandBufferAllocateInfo\">VkCommandBufferAllocateInfo</a> structure"
+        },
+        {
+          "vuid": "VUID-vkAllocateCommandBuffers-pCommandBuffers-parameter",
+          "text": " <code>pCommandBuffers</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>pAllocateInfo</code>::commandBufferCount <a href=\"#VkCommandBuffer\">VkCommandBuffer</a> handles"
+        },
+        {
+          "vuid": "VUID-vkAllocateCommandBuffers-pAllocateInfo::commandBufferCount-arraylength",
+          "text": " The value referenced by <code>pAllocateInfo</code>::<code>commandBufferCount</code> <strong class=\"purple\">must</strong> be greater than <code>0</code>"
+        }
+      ]
+    },
+    "VkCommandBufferAllocateInfo": {
+      "core": [
+        {
+          "vuid": "VUID-VkCommandBufferAllocateInfo-commandBufferCount-00044",
+          "text": " <code>commandBufferCount</code> <strong class=\"purple\">must</strong> be greater than <code>0</code>"
+        },
+        {
+          "vuid": "VUID-VkCommandBufferAllocateInfo-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO</code>"
+        },
+        {
+          "vuid": "VUID-VkCommandBufferAllocateInfo-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-VkCommandBufferAllocateInfo-commandPool-parameter",
+          "text": " <code>commandPool</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkCommandPool\">VkCommandPool</a> handle"
+        },
+        {
+          "vuid": "VUID-VkCommandBufferAllocateInfo-level-parameter",
+          "text": " <code>level</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkCommandBufferLevel\">VkCommandBufferLevel</a> value"
+        }
+      ]
+    },
+    "vkResetCommandBuffer": {
+      "core": [
+        {
+          "vuid": "VUID-vkResetCommandBuffer-commandBuffer-00045",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> not be in the <a href=\"#commandbuffers-lifecycle\">pending state</a>"
+        },
+        {
+          "vuid": "VUID-vkResetCommandBuffer-commandBuffer-00046",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> have been allocated from a pool that was created with the <code>VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT</code>"
+        },
+        {
+          "vuid": "VUID-vkResetCommandBuffer-commandBuffer-parameter",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkCommandBuffer\">VkCommandBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkResetCommandBuffer-flags-parameter",
+          "text": " <code>flags</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkCommandBufferResetFlagBits\">VkCommandBufferResetFlagBits</a> values"
+        }
+      ]
+    },
+    "vkFreeCommandBuffers": {
+      "core": [
+        {
+          "vuid": "VUID-vkFreeCommandBuffers-pCommandBuffers-00047",
+          "text": " All elements of <code>pCommandBuffers</code> <strong class=\"purple\">must</strong> not be in the <a href=\"#commandbuffers-lifecycle\">pending state</a>"
+        },
+        {
+          "vuid": "VUID-vkFreeCommandBuffers-pCommandBuffers-00048",
+          "text": " <code>pCommandBuffers</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>commandBufferCount</code> <code>VkCommandBuffer</code> handles, each element of which <strong class=\"purple\">must</strong> either be a valid handle or <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-vkFreeCommandBuffers-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkFreeCommandBuffers-commandPool-parameter",
+          "text": " <code>commandPool</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkCommandPool\">VkCommandPool</a> handle"
+        },
+        {
+          "vuid": "VUID-vkFreeCommandBuffers-commandBufferCount-arraylength",
+          "text": " <code>commandBufferCount</code> <strong class=\"purple\">must</strong> be greater than <code>0</code>"
+        },
+        {
+          "vuid": "VUID-vkFreeCommandBuffers-commandPool-parent",
+          "text": " <code>commandPool</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from <code>device</code>"
+        },
+        {
+          "vuid": "VUID-vkFreeCommandBuffers-pCommandBuffers-parent",
+          "text": " Each element of <code>pCommandBuffers</code> that is a valid handle <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from <code>commandPool</code>"
+        }
+      ]
+    },
+    "vkBeginCommandBuffer": {
+      "core": [
+        {
+          "vuid": "VUID-vkBeginCommandBuffer-commandBuffer-00049",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> not be in the <a href=\"#commandbuffers-lifecycle\">recording or pending state</a>."
+        },
+        {
+          "vuid": "VUID-vkBeginCommandBuffer-commandBuffer-00050",
+          "text": " If <code>commandBuffer</code> was allocated from a <a href=\"#VkCommandPool\">VkCommandPool</a> which did not have the <code>VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT</code> flag set, <code>commandBuffer</code> <strong class=\"purple\">must</strong> be in the <a href=\"#commandbuffers-lifecycle\">initial state</a>."
+        },
+        {
+          "vuid": "VUID-vkBeginCommandBuffer-commandBuffer-00051",
+          "text": " If <code>commandBuffer</code> is a secondary command buffer, the <code>pInheritanceInfo</code> member of <code>pBeginInfo</code> <strong class=\"purple\">must</strong> be a valid <code>VkCommandBufferInheritanceInfo</code> structure"
+        },
+        {
+          "vuid": "VUID-vkBeginCommandBuffer-commandBuffer-00052",
+          "text": " If <code>commandBuffer</code> is a secondary command buffer and either the <code>occlusionQueryEnable</code> member of the <code>pInheritanceInfo</code> member of <code>pBeginInfo</code> is <code>VK_FALSE</code>, or the precise occlusion queries feature is not enabled, the <code>queryFlags</code> member of the <code>pInheritanceInfo</code> member <code>pBeginInfo</code> <strong class=\"purple\">must</strong> not contain <code>VK_QUERY_CONTROL_PRECISE_BIT</code>"
+        },
+        {
+          "vuid": "VUID-vkBeginCommandBuffer-commandBuffer-parameter",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkCommandBuffer\">VkCommandBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkBeginCommandBuffer-pBeginInfo-parameter",
+          "text": " <code>pBeginInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkCommandBufferBeginInfo\">VkCommandBufferBeginInfo</a> structure"
+        }
+      ]
+    },
+    "VkCommandBufferBeginInfo": {
+      "core": [
+        {
+          "vuid": "VUID-VkCommandBufferBeginInfo-flags-00053",
+          "text": " If <code>flags</code> contains <code>VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT</code>, the <code>renderPass</code> member of <code>pInheritanceInfo</code> <strong class=\"purple\">must</strong> be a valid <code>VkRenderPass</code>"
+        },
+        {
+          "vuid": "VUID-VkCommandBufferBeginInfo-flags-00054",
+          "text": " If <code>flags</code> contains <code>VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT</code>, the <code>subpass</code> member of <code>pInheritanceInfo</code> <strong class=\"purple\">must</strong> be a valid subpass index within the <code>renderPass</code> member of <code>pInheritanceInfo</code>"
+        },
+        {
+          "vuid": "VUID-VkCommandBufferBeginInfo-flags-00055",
+          "text": " If <code>flags</code> contains <code>VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT</code>, the <code>framebuffer</code> member of <code>pInheritanceInfo</code> <strong class=\"purple\">must</strong> be either <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>, or a valid <code>VkFramebuffer</code> that is compatible with the <code>renderPass</code> member of <code>pInheritanceInfo</code>"
+        },
+        {
+          "vuid": "VUID-VkCommandBufferBeginInfo-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO</code>"
+        },
+        {
+          "vuid": "VUID-VkCommandBufferBeginInfo-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code> or a pointer to a valid instance of <a href=\"#VkDeviceGroupCommandBufferBeginInfo\">VkDeviceGroupCommandBufferBeginInfo</a>"
+        },
+        {
+          "vuid": "VUID-VkCommandBufferBeginInfo-flags-parameter",
+          "text": " <code>flags</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkCommandBufferUsageFlagBits\">VkCommandBufferUsageFlagBits</a> values"
+        }
+      ]
+    },
+    "VkCommandBufferInheritanceInfo": {
+      "core": [
+        {
+          "vuid": "VUID-VkCommandBufferInheritanceInfo-occlusionQueryEnable-00056",
+          "text": " If the <a href=\"#features-inheritedQueries\">inherited queries</a> feature is not enabled, <code>occlusionQueryEnable</code> <strong class=\"purple\">must</strong> be <code>VK_FALSE</code>"
+        },
+        {
+          "vuid": "VUID-VkCommandBufferInheritanceInfo-queryFlags-00057",
+          "text": " If the <a href=\"#features-inheritedQueries\">inherited queries</a> feature is enabled, <code>queryFlags</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkQueryControlFlagBits\">VkQueryControlFlagBits</a> values"
+        },
+        {
+          "vuid": "VUID-VkCommandBufferInheritanceInfo-queryFlags-02788",
+          "text": " If the <a href=\"#features-inheritedQueries\">inherited queries</a> feature is not enabled, <code>queryFlags</code> <strong class=\"purple\">must</strong> be <code>0</code>"
+        },
+        {
+          "vuid": "VUID-VkCommandBufferInheritanceInfo-pipelineStatistics-02789",
+          "text": " If the <a href=\"#features-pipelineStatisticsQuery\">pipeline statistics queries</a> feature is enabled, <code>pipelineStatistics</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkQueryPipelineStatisticFlagBits\">VkQueryPipelineStatisticFlagBits</a> values"
+        },
+        {
+          "vuid": "VUID-VkCommandBufferInheritanceInfo-pipelineStatistics-00058",
+          "text": " If the <a href=\"#features-pipelineStatisticsQuery\">pipeline statistics queries</a> feature is not enabled, <code>pipelineStatistics</code> <strong class=\"purple\">must</strong> be <code>0</code>"
+        },
+        {
+          "vuid": "VUID-VkCommandBufferInheritanceInfo-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO</code>"
+        },
+        {
+          "vuid": "VUID-VkCommandBufferInheritanceInfo-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code> or a pointer to a valid instance of <a href=\"#VkCommandBufferInheritanceConditionalRenderingInfoEXT\">VkCommandBufferInheritanceConditionalRenderingInfoEXT</a>"
+        },
+        {
+          "vuid": "VUID-VkCommandBufferInheritanceInfo-commonparent",
+          "text": " Both of <code>framebuffer</code>, and <code>renderPass</code> that are valid handles of non-ignored parameters <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from the same <a href=\"#VkDevice\">VkDevice</a>"
+        }
+      ]
+    },
+    "VkCommandBufferInheritanceConditionalRenderingInfoEXT": {
+      "(VK_EXT_conditional_rendering)": [
+        {
+          "vuid": "VUID-VkCommandBufferInheritanceConditionalRenderingInfoEXT-conditionalRenderingEnable-01977",
+          "text": " If the <a href=\"#features-inheritedConditionalRendering\">inherited conditional rendering</a> feature is not enabled, <code>conditionalRenderingEnable</code> <strong class=\"purple\">must</strong> be <code>VK_FALSE</code>"
+        },
+        {
+          "vuid": "VUID-VkCommandBufferInheritanceConditionalRenderingInfoEXT-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_CONDITIONAL_RENDERING_INFO_EXT</code>"
+        }
+      ]
+    },
+    "vkEndCommandBuffer": {
+      "core": [
+        {
+          "vuid": "VUID-vkEndCommandBuffer-commandBuffer-00059",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be in the <a href=\"#commandbuffers-lifecycle\">recording state</a>."
+        },
+        {
+          "vuid": "VUID-vkEndCommandBuffer-commandBuffer-00060",
+          "text": " If <code>commandBuffer</code> is a primary command buffer, there <strong class=\"purple\">must</strong> not be an active render pass instance"
+        },
+        {
+          "vuid": "VUID-vkEndCommandBuffer-commandBuffer-00061",
+          "text": " All queries made <a href=\"#queries-operation-active\">active</a> during the recording of <code>commandBuffer</code> <strong class=\"purple\">must</strong> have been made inactive"
+        },
+        {
+          "vuid": "VUID-vkEndCommandBuffer-commandBuffer-parameter",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkCommandBuffer\">VkCommandBuffer</a> handle"
+        }
+      ],
+      "(VK_EXT_conditional_rendering)": [
+        {
+          "vuid": "VUID-vkEndCommandBuffer-None-01978",
+          "text": " Conditional rendering must not be <a href=\"#active-conditional-rendering\">active</a>"
+        }
+      ],
+      "(VK_EXT_debug_utils)": [
+        {
+          "vuid": "VUID-vkEndCommandBuffer-commandBuffer-01815",
+          "text": " If <code>commandBuffer</code> is a secondary command buffer, there <strong class=\"purple\">must</strong> not be an outstanding <a href=\"#vkCmdBeginDebugUtilsLabelEXT\">vkCmdBeginDebugUtilsLabelEXT</a> command recorded to <code>commandBuffer</code> that has not previously been ended by a call to <a href=\"#vkCmdEndDebugUtilsLabelEXT\">vkCmdEndDebugUtilsLabelEXT</a>."
+        }
+      ],
+      "(VK_EXT_debug_marker)": [
+        {
+          "vuid": "VUID-vkEndCommandBuffer-commandBuffer-00062",
+          "text": " If <code>commandBuffer</code> is a secondary command buffer, there <strong class=\"purple\">must</strong> not be an outstanding <a href=\"#vkCmdDebugMarkerBeginEXT\">vkCmdDebugMarkerBeginEXT</a> command recorded to <code>commandBuffer</code> that has not previously been ended by a call to <a href=\"#vkCmdDebugMarkerEndEXT\">vkCmdDebugMarkerEndEXT</a>."
+        }
+      ]
+    },
+    "vkQueueSubmit": {
+      "core": [
+        {
+          "vuid": "VUID-vkQueueSubmit-fence-00063",
+          "text": " If <code>fence</code> is not <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>, <code>fence</code> <strong class=\"purple\">must</strong> be unsignaled"
+        },
+        {
+          "vuid": "VUID-vkQueueSubmit-fence-00064",
+          "text": " If <code>fence</code> is not <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>, <code>fence</code> <strong class=\"purple\">must</strong> not be associated with any other queue command that has not yet completed execution on that queue"
+        },
+        {
+          "vuid": "VUID-vkQueueSubmit-pCommandBuffers-00065",
+          "text": " Any calls to <a href=\"#vkCmdSetEvent\">vkCmdSetEvent</a>, <a href=\"#vkCmdResetEvent\">vkCmdResetEvent</a> or <a href=\"#vkCmdWaitEvents\">vkCmdWaitEvents</a> that have been recorded into any of the command buffer elements of the <code>pCommandBuffers</code> member of any element of <code>pSubmits</code>, <strong class=\"purple\">must</strong> not reference any <a href=\"#VkEvent\">VkEvent</a> that is referenced by any of those commands in a command buffer that has been submitted to another queue and is still in the <em>pending state</em>"
+        },
+        {
+          "vuid": "VUID-vkQueueSubmit-pWaitDstStageMask-00066",
+          "text": " Any stage flag included in any element of the <code>pWaitDstStageMask</code> member of any element of <code>pSubmits</code> <strong class=\"purple\">must</strong> be a pipeline stage supported by one of the capabilities of <code>queue</code>, as specified in the <a href=\"#synchronization-pipeline-stages-supported\">table of supported pipeline stages</a>"
+        },
+        {
+          "vuid": "VUID-vkQueueSubmit-pSignalSemaphores-00067",
+          "text": " Each element of the <code>pSignalSemaphores</code> member of any element of <code>pSubmits</code> <strong class=\"purple\">must</strong> be unsignaled when the semaphore signal operation it defines is executed on the device"
+        },
+        {
+          "vuid": "VUID-vkQueueSubmit-pWaitSemaphores-00068",
+          "text": " When a semaphore wait operation referring to a binary semaphore defined by any element of the <code>pWaitSemaphores</code> member of any element of <code>pSubmits</code> executes on <code>queue</code>, there <strong class=\"purple\">must</strong> be no other queues waiting on the same semaphore"
+        },
+        {
+          "vuid": "VUID-vkQueueSubmit-pCommandBuffers-00070",
+          "text": " Each element of the <code>pCommandBuffers</code> member of each element of <code>pSubmits</code> <strong class=\"purple\">must</strong> be in the <a href=\"#commandbuffers-lifecycle\">pending or executable state</a>"
+        },
+        {
+          "vuid": "VUID-vkQueueSubmit-pCommandBuffers-00071",
+          "text": " If any element of the <code>pCommandBuffers</code> member of any element of <code>pSubmits</code> was not recorded with the <code>VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT</code>, it <strong class=\"purple\">must</strong> not be in the <a href=\"#commandbuffers-lifecycle\">pending state</a>"
+        },
+        {
+          "vuid": "VUID-vkQueueSubmit-pCommandBuffers-00072",
+          "text": " Any <a href=\"#commandbuffers-secondary\">secondary command buffers recorded</a> into any element of the <code>pCommandBuffers</code> member of any element of <code>pSubmits</code> <strong class=\"purple\">must</strong> be in the <a href=\"#commandbuffers-lifecycle\">pending or executable state</a>"
+        },
+        {
+          "vuid": "VUID-vkQueueSubmit-pCommandBuffers-00073",
+          "text": " If any <a href=\"#commandbuffers-secondary\">secondary command buffers recorded</a> into any element of the <code>pCommandBuffers</code> member of any element of <code>pSubmits</code> was not recorded with the <code>VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT</code>, it <strong class=\"purple\">must</strong> not be in the <a href=\"#commandbuffers-lifecycle\">pending state</a>"
+        },
+        {
+          "vuid": "VUID-vkQueueSubmit-pCommandBuffers-00074",
+          "text": " Each element of the <code>pCommandBuffers</code> member of each element of <code>pSubmits</code> <strong class=\"purple\">must</strong> have been allocated from a <code>VkCommandPool</code> that was created for the same queue family <code>queue</code> belongs to"
+        },
+        {
+          "vuid": "VUID-vkQueueSubmit-pSubmits-02207",
+          "text": " If any element of <code>pSubmits</code>-&gt;pCommandBuffers includes a <a href=\"#synchronization-queue-transfers-acquire\">Queue Family Transfer Acquire Operation</a>, there <strong class=\"purple\">must</strong> exist a previously submitted <a href=\"#synchronization-queue-transfers-release\">Queue Family Transfer Release Operation</a> on a queue in the queue family identified by the acquire operation, with parameters matching the acquire operation as defined in the definition of such <a href=\"#synchronization-queue-transfers-acquire\">acquire operations</a>, and which happens before the acquire operation"
+        },
+        {
+          "vuid": "VUID-vkQueueSubmit-pSubmits-02808",
+          "text": " Any resource created with <code>VK_SHARING_MODE_EXCLUSIVE</code> that is read by an operation specified by <code>pSubmits</code> <strong class=\"purple\">must</strong> not be owned by any queue family other than the one which <code>queue</code> belongs to, at the time it is executed"
+        },
+        {
+          "vuid": "VUID-vkQueueSubmit-queue-parameter",
+          "text": " <code>queue</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkQueue\">VkQueue</a> handle"
+        },
+        {
+          "vuid": "VUID-vkQueueSubmit-pSubmits-parameter",
+          "text": " If <code>submitCount</code> is not <code>0</code>, <code>pSubmits</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>submitCount</code> valid <a href=\"#VkSubmitInfo\">VkSubmitInfo</a> structures"
+        },
+        {
+          "vuid": "VUID-vkQueueSubmit-fence-parameter",
+          "text": " If <code>fence</code> is not <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>, <code>fence</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkFence\">VkFence</a> handle"
+        },
+        {
+          "vuid": "VUID-vkQueueSubmit-commonparent",
+          "text": " Both of <code>fence</code>, and <code>queue</code> that are valid handles of non-ignored parameters <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from the same <a href=\"#VkDevice\">VkDevice</a>"
+        }
+      ],
+      "!(VK_KHR_timeline_semaphore)": [
+        {
+          "vuid": "VUID-vkQueueSubmit-pWaitSemaphores-00069",
+          "text": " All elements of the <code>pWaitSemaphores</code> member of all elements of <code>pSubmits</code> <strong class=\"purple\">must</strong> be semaphores that are signaled, or have <a href=\"#synchronization-semaphores-signaling\">semaphore signal operations</a> previously submitted for execution"
+        }
+      ],
+      "(VK_KHR_timeline_semaphore)": [
+        {
+          "vuid": "VUID-vkQueueSubmit-pWaitSemaphores-03238",
+          "text": " All elements of the <code>pWaitSemaphores</code> member of all elements of <code>pSubmits</code> created with a <a href=\"#VkSemaphoreTypeKHR\">VkSemaphoreTypeKHR</a> of <code>VK_SEMAPHORE_TYPE_BINARY_KHR</code> <strong class=\"purple\">must</strong> reference a semaphore signal operation that has been submitted for execution and any semaphore signal operations on which it depends (if any) <strong class=\"purple\">must</strong> have also been submitted for execution"
+        }
+      ],
+      "(VK_KHR_performance_query)": [
+        {
+          "vuid": "VUID-vkQueueSubmit-pCommandBuffers-03220",
+          "text": " If a command recorded into any element of <code>pCommandBuffers</code> was a <a href=\"#vkCmdBeginQuery\">vkCmdBeginQuery</a> whose <code>queryPool</code> was created with a <code>queryType</code> of <code>VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR</code>, the <a href=\"#profiling-lock\">profiling lock</a> <strong class=\"purple\">must</strong> have been held continuously on the <code>VkDevice</code> that <code>queue</code> was retrieved from, throughout recording of those command buffers"
+        }
+      ]
+    },
+    "VkSubmitInfo": {
+      "core": [
+        {
+          "vuid": "VUID-VkSubmitInfo-pCommandBuffers-00075",
+          "text": " Each element of <code>pCommandBuffers</code> <strong class=\"purple\">must</strong> not have been allocated with <code>VK_COMMAND_BUFFER_LEVEL_SECONDARY</code>"
+        },
+        {
+          "vuid": "VUID-VkSubmitInfo-pWaitDstStageMask-00076",
+          "text": " If the <a href=\"#features-geometryShader\">geometry shaders</a> feature is not enabled, each element of <code>pWaitDstStageMask</code> <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT</code>"
+        },
+        {
+          "vuid": "VUID-VkSubmitInfo-pWaitDstStageMask-00077",
+          "text": " If the <a href=\"#features-tessellationShader\">tessellation shaders</a> feature is not enabled, each element of <code>pWaitDstStageMask</code> <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT</code> or <code>VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT</code>"
+        },
+        {
+          "vuid": "VUID-VkSubmitInfo-pWaitDstStageMask-00078",
+          "text": " Each element of <code>pWaitDstStageMask</code> <strong class=\"purple\">must</strong> not include <code>VK_PIPELINE_STAGE_HOST_BIT</code>."
+        },
+        {
+          "vuid": "VUID-VkSubmitInfo-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_SUBMIT_INFO</code>"
+        },
+        {
+          "vuid": "VUID-VkSubmitInfo-pNext-pNext",
+          "text": " Each <code>pNext</code> member of any structure (including this one) in the <code>pNext</code> chain <strong class=\"purple\">must</strong> be either <code>NULL</code> or a pointer to a valid instance of <a href=\"#VkD3D12FenceSubmitInfoKHR\">VkD3D12FenceSubmitInfoKHR</a>, <a href=\"#VkDeviceGroupSubmitInfo\">VkDeviceGroupSubmitInfo</a>, <a href=\"#VkPerformanceQuerySubmitInfoKHR\">VkPerformanceQuerySubmitInfoKHR</a>, <a href=\"#VkProtectedSubmitInfo\">VkProtectedSubmitInfo</a>, <a href=\"#VkTimelineSemaphoreSubmitInfoKHR\">VkTimelineSemaphoreSubmitInfoKHR</a>, <a href=\"#VkWin32KeyedMutexAcquireReleaseInfoKHR\">VkWin32KeyedMutexAcquireReleaseInfoKHR</a>, or <a href=\"#VkWin32KeyedMutexAcquireReleaseInfoNV\">VkWin32KeyedMutexAcquireReleaseInfoNV</a>"
+        },
+        {
+          "vuid": "VUID-VkSubmitInfo-sType-unique",
+          "text": " Each <code>sType</code> member in the <code>pNext</code> chain <strong class=\"purple\">must</strong> be unique"
+        },
+        {
+          "vuid": "VUID-VkSubmitInfo-pWaitSemaphores-parameter",
+          "text": " If <code>waitSemaphoreCount</code> is not <code>0</code>, <code>pWaitSemaphores</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>waitSemaphoreCount</code> valid <a href=\"#VkSemaphore\">VkSemaphore</a> handles"
+        },
+        {
+          "vuid": "VUID-VkSubmitInfo-pWaitDstStageMask-parameter",
+          "text": " If <code>waitSemaphoreCount</code> is not <code>0</code>, <code>pWaitDstStageMask</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>waitSemaphoreCount</code> valid combinations of <a href=\"#VkPipelineStageFlagBits\">VkPipelineStageFlagBits</a> values"
+        },
+        {
+          "vuid": "VUID-VkSubmitInfo-pWaitDstStageMask-requiredbitmask",
+          "text": " Each element of <code>pWaitDstStageMask</code> <strong class=\"purple\">must</strong> not be <code>0</code>"
+        },
+        {
+          "vuid": "VUID-VkSubmitInfo-pCommandBuffers-parameter",
+          "text": " If <code>commandBufferCount</code> is not <code>0</code>, <code>pCommandBuffers</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>commandBufferCount</code> valid <a href=\"#VkCommandBuffer\">VkCommandBuffer</a> handles"
+        },
+        {
+          "vuid": "VUID-VkSubmitInfo-pSignalSemaphores-parameter",
+          "text": " If <code>signalSemaphoreCount</code> is not <code>0</code>, <code>pSignalSemaphores</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>signalSemaphoreCount</code> valid <a href=\"#VkSemaphore\">VkSemaphore</a> handles"
+        },
+        {
+          "vuid": "VUID-VkSubmitInfo-commonparent",
+          "text": " Each of the elements of <code>pCommandBuffers</code>, the elements of <code>pSignalSemaphores</code>, and the elements of <code>pWaitSemaphores</code> that are valid handles of non-ignored parameters <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from the same <a href=\"#VkDevice\">VkDevice</a>"
+        }
+      ],
+      "(VK_KHR_timeline_semaphore)": [
+        {
+          "vuid": "VUID-VkSubmitInfo-pWaitSemaphores-03239",
+          "text": " If any element of <code>pWaitSemaphores</code> or <code>pSignalSemaphores</code> was created with a <a href=\"#VkSemaphoreTypeKHR\">VkSemaphoreTypeKHR</a> of <code>VK_SEMAPHORE_TYPE_TIMELINE_KHR</code>, then the <code>pNext</code> chain <strong class=\"purple\">must</strong> include a <a href=\"#VkTimelineSemaphoreSubmitInfoKHR\">VkTimelineSemaphoreSubmitInfoKHR</a> structure"
+        },
+        {
+          "vuid": "VUID-VkSubmitInfo-pNext-03240",
+          "text": " If the <code>pNext</code> chain of this structure includes a <a href=\"#VkTimelineSemaphoreSubmitInfoKHR\">VkTimelineSemaphoreSubmitInfoKHR</a> structure and any element of <code>pWaitSemaphores</code> was created with a <a href=\"#VkSemaphoreTypeKHR\">VkSemaphoreTypeKHR</a> of <code>VK_SEMAPHORE_TYPE_TIMELINE_KHR</code>, then its <code>waitSemaphoreValueCount</code> member <strong class=\"purple\">must</strong> equal <code>waitSemaphoreCount</code>"
+        },
+        {
+          "vuid": "VUID-VkSubmitInfo-pNext-03241",
+          "text": " If the <code>pNext</code> chain of this structure includes a <a href=\"#VkTimelineSemaphoreSubmitInfoKHR\">VkTimelineSemaphoreSubmitInfoKHR</a> structure and any element of <code>pSignalSemaphores</code> was created with a <a href=\"#VkSemaphoreTypeKHR\">VkSemaphoreTypeKHR</a> of <code>VK_SEMAPHORE_TYPE_TIMELINE_KHR</code>, then its <code>signalSemaphoreValueCount</code> member <strong class=\"purple\">must</strong> equal <code>signalSemaphoreCount</code>"
+        },
+        {
+          "vuid": "VUID-VkSubmitInfo-pSignalSemaphores-03242",
+          "text": " For each element of <code>pSignalSemaphores</code> created with a <a href=\"#VkSemaphoreTypeKHR\">VkSemaphoreTypeKHR</a> of <code>VK_SEMAPHORE_TYPE_TIMELINE_KHR</code> the corresponding element of <a href=\"#VkTimelineSemaphoreSubmitInfoKHR\">VkTimelineSemaphoreSubmitInfoKHR</a>::pSignalSemaphoreValues <strong class=\"purple\">must</strong> have a value greater than the current value of the semaphore when the <a href=\"#synchronization-semaphores-signaling\">semaphore signal operation</a> is executed"
+        },
+        {
+          "vuid": "VUID-VkSubmitInfo-pWaitSemaphores-03243",
+          "text": " For each element of <code>pWaitSemaphores</code> created with a <a href=\"#VkSemaphoreTypeKHR\">VkSemaphoreTypeKHR</a> of <code>VK_SEMAPHORE_TYPE_TIMELINE_KHR</code> the corresponding element of <a href=\"#VkTimelineSemaphoreSubmitInfoKHR\">VkTimelineSemaphoreSubmitInfoKHR</a>::pWaitSemaphoreValues <strong class=\"purple\">must</strong> have a value which does not differ from the current value of the semaphore or the value of any outstanding semaphore wait or signal operation on that semaphore by more than <a href=\"#limits-maxTimelineSemaphoreValueDifference\"><code>maxTimelineSemaphoreValueDifference</code></a>."
+        },
+        {
+          "vuid": "VUID-VkSubmitInfo-pSignalSemaphores-03244",
+          "text": " For each element of <code>pSignalSemaphores</code> created with a <a href=\"#VkSemaphoreTypeKHR\">VkSemaphoreTypeKHR</a> of <code>VK_SEMAPHORE_TYPE_TIMELINE_KHR</code> the corresponding element of <a href=\"#VkTimelineSemaphoreSubmitInfoKHR\">VkTimelineSemaphoreSubmitInfoKHR</a>::pSignalSemaphoreValues <strong class=\"purple\">must</strong> have a value which does not differ from the current value of the semaphore or the value of any outstanding semaphore wait or signal operation on that semaphore by more than <a href=\"#limits-maxTimelineSemaphoreValueDifference\"><code>maxTimelineSemaphoreValueDifference</code></a>."
+        }
+      ],
+      "(VK_NV_mesh_shader)": [
+        {
+          "vuid": "VUID-VkSubmitInfo-pWaitDstStageMask-02089",
+          "text": " If the <a href=\"#features-meshShader\">mesh shaders</a> feature is not enabled, each element of <code>pWaitDstStageMask</code> <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV</code>"
+        },
+        {
+          "vuid": "VUID-VkSubmitInfo-pWaitDstStageMask-02090",
+          "text": " If the <a href=\"#features-taskShader\">task shaders</a> feature is not enabled, each element of <code>pWaitDstStageMask</code> <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV</code>"
+        }
+      ]
+    },
+    "VkTimelineSemaphoreSubmitInfoKHR": {
+      "(VK_KHR_timeline_semaphore)": [
+        {
+          "vuid": "VUID-VkTimelineSemaphoreSubmitInfoKHR-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkTimelineSemaphoreSubmitInfoKHR-pWaitSemaphoreValues-parameter",
+          "text": " If <code>waitSemaphoreValueCount</code> is not <code>0</code>, and <code>pWaitSemaphoreValues</code> is not <code>NULL</code>, <code>pWaitSemaphoreValues</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>waitSemaphoreValueCount</code> <code>uint64_t</code> values"
+        },
+        {
+          "vuid": "VUID-VkTimelineSemaphoreSubmitInfoKHR-pSignalSemaphoreValues-parameter",
+          "text": " If <code>signalSemaphoreValueCount</code> is not <code>0</code>, and <code>pSignalSemaphoreValues</code> is not <code>NULL</code>, <code>pSignalSemaphoreValues</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>signalSemaphoreValueCount</code> <code>uint64_t</code> values"
+        }
+      ]
+    },
+    "VkD3D12FenceSubmitInfoKHR": {
+      "(VK_KHR_external_semaphore_win32)": [
+        {
+          "vuid": "VUID-VkD3D12FenceSubmitInfoKHR-waitSemaphoreValuesCount-00079",
+          "text": " <code>waitSemaphoreValuesCount</code> <strong class=\"purple\">must</strong> be the same value as <code>VkSubmitInfo</code>::<code>waitSemaphoreCount</code>, where <code>VkSubmitInfo</code> is in the <code>pNext</code> chain of this <code>VkD3D12FenceSubmitInfoKHR</code> structure."
+        },
+        {
+          "vuid": "VUID-VkD3D12FenceSubmitInfoKHR-signalSemaphoreValuesCount-00080",
+          "text": " <code>signalSemaphoreValuesCount</code> <strong class=\"purple\">must</strong> be the same value as <code>VkSubmitInfo</code>::<code>signalSemaphoreCount</code>, where <code>VkSubmitInfo</code> is in the <code>pNext</code> chain of this <code>VkD3D12FenceSubmitInfoKHR</code> structure."
+        },
+        {
+          "vuid": "VUID-VkD3D12FenceSubmitInfoKHR-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_D3D12_FENCE_SUBMIT_INFO_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkD3D12FenceSubmitInfoKHR-pWaitSemaphoreValues-parameter",
+          "text": " If <code>waitSemaphoreValuesCount</code> is not <code>0</code>, and <code>pWaitSemaphoreValues</code> is not <code>NULL</code>, <code>pWaitSemaphoreValues</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>waitSemaphoreValuesCount</code> <code>uint64_t</code> values"
+        },
+        {
+          "vuid": "VUID-VkD3D12FenceSubmitInfoKHR-pSignalSemaphoreValues-parameter",
+          "text": " If <code>signalSemaphoreValuesCount</code> is not <code>0</code>, and <code>pSignalSemaphoreValues</code> is not <code>NULL</code>, <code>pSignalSemaphoreValues</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>signalSemaphoreValuesCount</code> <code>uint64_t</code> values"
+        }
+      ]
+    },
+    "VkWin32KeyedMutexAcquireReleaseInfoKHR": {
+      "(VK_KHR_win32_keyed_mutex)": [
+        {
+          "vuid": "VUID-VkWin32KeyedMutexAcquireReleaseInfoKHR-pAcquireSyncs-00081",
+          "text": " Each member of <code>pAcquireSyncs</code> and <code>pReleaseSyncs</code> <strong class=\"purple\">must</strong> be a device memory object imported by setting <a href=\"#VkImportMemoryWin32HandleInfoKHR\">VkImportMemoryWin32HandleInfoKHR</a>::<code>handleType</code> to <code>VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT</code> or <code>VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_KMT_BIT</code>."
+        },
+        {
+          "vuid": "VUID-VkWin32KeyedMutexAcquireReleaseInfoKHR-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkWin32KeyedMutexAcquireReleaseInfoKHR-pAcquireSyncs-parameter",
+          "text": " If <code>acquireCount</code> is not <code>0</code>, <code>pAcquireSyncs</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>acquireCount</code> valid <a href=\"#VkDeviceMemory\">VkDeviceMemory</a> handles"
+        },
+        {
+          "vuid": "VUID-VkWin32KeyedMutexAcquireReleaseInfoKHR-pAcquireKeys-parameter",
+          "text": " If <code>acquireCount</code> is not <code>0</code>, <code>pAcquireKeys</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>acquireCount</code> <code>uint64_t</code> values"
+        },
+        {
+          "vuid": "VUID-VkWin32KeyedMutexAcquireReleaseInfoKHR-pAcquireTimeouts-parameter",
+          "text": " If <code>acquireCount</code> is not <code>0</code>, <code>pAcquireTimeouts</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>acquireCount</code> <code>uint32_t</code> values"
+        },
+        {
+          "vuid": "VUID-VkWin32KeyedMutexAcquireReleaseInfoKHR-pReleaseSyncs-parameter",
+          "text": " If <code>releaseCount</code> is not <code>0</code>, <code>pReleaseSyncs</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>releaseCount</code> valid <a href=\"#VkDeviceMemory\">VkDeviceMemory</a> handles"
+        },
+        {
+          "vuid": "VUID-VkWin32KeyedMutexAcquireReleaseInfoKHR-pReleaseKeys-parameter",
+          "text": " If <code>releaseCount</code> is not <code>0</code>, <code>pReleaseKeys</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>releaseCount</code> <code>uint64_t</code> values"
+        },
+        {
+          "vuid": "VUID-VkWin32KeyedMutexAcquireReleaseInfoKHR-commonparent",
+          "text": " Both of the elements of <code>pAcquireSyncs</code>, and the elements of <code>pReleaseSyncs</code> that are valid handles of non-ignored parameters <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from the same <a href=\"#VkDevice\">VkDevice</a>"
+        }
+      ]
+    },
+    "VkWin32KeyedMutexAcquireReleaseInfoNV": {
+      "(VK_NV_win32_keyed_mutex)": [
+        {
+          "vuid": "VUID-VkWin32KeyedMutexAcquireReleaseInfoNV-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_NV</code>"
+        },
+        {
+          "vuid": "VUID-VkWin32KeyedMutexAcquireReleaseInfoNV-pAcquireSyncs-parameter",
+          "text": " If <code>acquireCount</code> is not <code>0</code>, <code>pAcquireSyncs</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>acquireCount</code> valid <a href=\"#VkDeviceMemory\">VkDeviceMemory</a> handles"
+        },
+        {
+          "vuid": "VUID-VkWin32KeyedMutexAcquireReleaseInfoNV-pAcquireKeys-parameter",
+          "text": " If <code>acquireCount</code> is not <code>0</code>, <code>pAcquireKeys</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>acquireCount</code> <code>uint64_t</code> values"
+        },
+        {
+          "vuid": "VUID-VkWin32KeyedMutexAcquireReleaseInfoNV-pAcquireTimeoutMilliseconds-parameter",
+          "text": " If <code>acquireCount</code> is not <code>0</code>, <code>pAcquireTimeoutMilliseconds</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>acquireCount</code> <code>uint32_t</code> values"
+        },
+        {
+          "vuid": "VUID-VkWin32KeyedMutexAcquireReleaseInfoNV-pReleaseSyncs-parameter",
+          "text": " If <code>releaseCount</code> is not <code>0</code>, <code>pReleaseSyncs</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>releaseCount</code> valid <a href=\"#VkDeviceMemory\">VkDeviceMemory</a> handles"
+        },
+        {
+          "vuid": "VUID-VkWin32KeyedMutexAcquireReleaseInfoNV-pReleaseKeys-parameter",
+          "text": " If <code>releaseCount</code> is not <code>0</code>, <code>pReleaseKeys</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>releaseCount</code> <code>uint64_t</code> values"
+        },
+        {
+          "vuid": "VUID-VkWin32KeyedMutexAcquireReleaseInfoNV-commonparent",
+          "text": " Both of the elements of <code>pAcquireSyncs</code>, and the elements of <code>pReleaseSyncs</code> that are valid handles of non-ignored parameters <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from the same <a href=\"#VkDevice\">VkDevice</a>"
+        }
+      ]
+    },
+    "VkProtectedSubmitInfo": {
+      "(VK_VERSION_1_1)": [
+        {
+          "vuid": "VUID-VkProtectedSubmitInfo-protectedSubmit-01816",
+          "text": " If the protected memory feature is not enabled, <code>protectedSubmit</code> <strong class=\"purple\">must</strong> not be <code>VK_TRUE</code>."
+        },
+        {
+          "vuid": "VUID-VkProtectedSubmitInfo-protectedSubmit-01817",
+          "text": " If <code>protectedSubmit</code> is <code>VK_TRUE</code>, then each element of the <code>pCommandBuffers</code> array <strong class=\"purple\">must</strong> be a protected command buffer."
+        },
+        {
+          "vuid": "VUID-VkProtectedSubmitInfo-protectedSubmit-01818",
+          "text": " If <code>protectedSubmit</code> is <code>VK_FALSE</code>, then each element of the <code>pCommandBuffers</code> array <strong class=\"purple\">must</strong> be an unprotected command buffer."
+        },
+        {
+          "vuid": "VUID-VkProtectedSubmitInfo-pNext-01819",
+          "text": " If the <code>VkSubmitInfo</code>::<code>pNext</code> chain does not include a <code>VkProtectedSubmitInfo</code> structure, then each element of the command buffer of the <code>pCommandBuffers</code> array <strong class=\"purple\">must</strong> be an unprotected command buffer."
+        },
+        {
+          "vuid": "VUID-VkProtectedSubmitInfo-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PROTECTED_SUBMIT_INFO</code>"
+        }
+      ]
+    },
+    "VkDeviceGroupSubmitInfo": {
+      "(VK_VERSION_1_1,VK_KHR_device_group)": [
+        {
+          "vuid": "VUID-VkDeviceGroupSubmitInfo-waitSemaphoreCount-00082",
+          "text": " <code>waitSemaphoreCount</code> <strong class=\"purple\">must</strong> equal <a href=\"#VkSubmitInfo\">VkSubmitInfo</a>::<code>waitSemaphoreCount</code>"
+        },
+        {
+          "vuid": "VUID-VkDeviceGroupSubmitInfo-commandBufferCount-00083",
+          "text": " <code>commandBufferCount</code> <strong class=\"purple\">must</strong> equal <a href=\"#VkSubmitInfo\">VkSubmitInfo</a>::<code>commandBufferCount</code>"
+        },
+        {
+          "vuid": "VUID-VkDeviceGroupSubmitInfo-signalSemaphoreCount-00084",
+          "text": " <code>signalSemaphoreCount</code> <strong class=\"purple\">must</strong> equal <a href=\"#VkSubmitInfo\">VkSubmitInfo</a>::<code>signalSemaphoreCount</code>"
+        },
+        {
+          "vuid": "VUID-VkDeviceGroupSubmitInfo-pWaitSemaphoreDeviceIndices-00085",
+          "text": " All elements of <code>pWaitSemaphoreDeviceIndices</code> and <code>pSignalSemaphoreDeviceIndices</code> <strong class=\"purple\">must</strong> be valid device indices"
+        },
+        {
+          "vuid": "VUID-VkDeviceGroupSubmitInfo-pCommandBufferDeviceMasks-00086",
+          "text": " All elements of <code>pCommandBufferDeviceMasks</code> <strong class=\"purple\">must</strong> be valid device masks"
+        },
+        {
+          "vuid": "VUID-VkDeviceGroupSubmitInfo-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO</code>"
+        },
+        {
+          "vuid": "VUID-VkDeviceGroupSubmitInfo-pWaitSemaphoreDeviceIndices-parameter",
+          "text": " If <code>waitSemaphoreCount</code> is not <code>0</code>, <code>pWaitSemaphoreDeviceIndices</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>waitSemaphoreCount</code> <code>uint32_t</code> values"
+        },
+        {
+          "vuid": "VUID-VkDeviceGroupSubmitInfo-pCommandBufferDeviceMasks-parameter",
+          "text": " If <code>commandBufferCount</code> is not <code>0</code>, <code>pCommandBufferDeviceMasks</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>commandBufferCount</code> <code>uint32_t</code> values"
+        },
+        {
+          "vuid": "VUID-VkDeviceGroupSubmitInfo-pSignalSemaphoreDeviceIndices-parameter",
+          "text": " If <code>signalSemaphoreCount</code> is not <code>0</code>, <code>pSignalSemaphoreDeviceIndices</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>signalSemaphoreCount</code> <code>uint32_t</code> values"
+        }
+      ]
+    },
+    "VkPerformanceQuerySubmitInfoKHR": {
+      "(VK_KHR_performance_query)": [
+        {
+          "vuid": "VUID-VkPerformanceQuerySubmitInfoKHR-counterPassIndex-03221",
+          "text": " <code>counterPassIndex</code> <strong class=\"purple\">must</strong> be less than the number of counter passes required by any queries within the batch. The required number of counter passes for a performance query is obtained by calling <a href=\"#vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR\">vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR</a>"
+        },
+        {
+          "vuid": "VUID-VkPerformanceQuerySubmitInfoKHR-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PERFORMANCE_QUERY_SUBMIT_INFO_KHR</code>"
+        }
+      ]
+    },
+    "vkCmdExecuteCommands": {
+      "core": [
+        {
+          "vuid": "VUID-vkCmdExecuteCommands-commandBuffer-00087",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> have been allocated with a <code>level</code> of <code>VK_COMMAND_BUFFER_LEVEL_PRIMARY</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdExecuteCommands-pCommandBuffers-00088",
+          "text": " Each element of <code>pCommandBuffers</code> <strong class=\"purple\">must</strong> have been allocated with a <code>level</code> of <code>VK_COMMAND_BUFFER_LEVEL_SECONDARY</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdExecuteCommands-pCommandBuffers-00089",
+          "text": " Each element of <code>pCommandBuffers</code> <strong class=\"purple\">must</strong> be in the <a href=\"#commandbuffers-lifecycle\">pending or executable state</a>."
+        },
+        {
+          "vuid": "VUID-vkCmdExecuteCommands-pCommandBuffers-00090",
+          "text": " If any element of <code>pCommandBuffers</code> was not recorded with the <code>VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT</code> flag, and it was recorded into any other primary command buffer, that primary command buffer <strong class=\"purple\">must</strong> not be in the <a href=\"#commandbuffers-lifecycle\">pending state</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdExecuteCommands-pCommandBuffers-00091",
+          "text": " If any element of <code>pCommandBuffers</code> was not recorded with the <code>VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT</code> flag, it <strong class=\"purple\">must</strong> not be in the <a href=\"#commandbuffers-lifecycle\">pending state</a>."
+        },
+        {
+          "vuid": "VUID-vkCmdExecuteCommands-pCommandBuffers-00092",
+          "text": " If any element of <code>pCommandBuffers</code> was not recorded with the <code>VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT</code> flag, it <strong class=\"purple\">must</strong> not have already been recorded to <code>commandBuffer</code>."
+        },
+        {
+          "vuid": "VUID-vkCmdExecuteCommands-pCommandBuffers-00093",
+          "text": " If any element of <code>pCommandBuffers</code> was not recorded with the <code>VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT</code> flag, it <strong class=\"purple\">must</strong> not appear more than once in <code>pCommandBuffers</code>."
+        },
+        {
+          "vuid": "VUID-vkCmdExecuteCommands-pCommandBuffers-00094",
+          "text": " Each element of <code>pCommandBuffers</code> <strong class=\"purple\">must</strong> have been allocated from a <code>VkCommandPool</code> that was created for the same queue family as the <code>VkCommandPool</code> from which <code>commandBuffer</code> was allocated"
+        },
+        {
+          "vuid": "VUID-vkCmdExecuteCommands-contents-00095",
+          "text": " If <code>vkCmdExecuteCommands</code> is being called within a render pass instance, that render pass instance <strong class=\"purple\">must</strong> have been begun with the <code>contents</code> parameter of <code>vkCmdBeginRenderPass</code> set to <code>VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdExecuteCommands-pCommandBuffers-00096",
+          "text": " If <code>vkCmdExecuteCommands</code> is being called within a render pass instance, each element of <code>pCommandBuffers</code> <strong class=\"purple\">must</strong> have been recorded with the <code>VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdExecuteCommands-pCommandBuffers-00097",
+          "text": " If <code>vkCmdExecuteCommands</code> is being called within a render pass instance, each element of <code>pCommandBuffers</code> <strong class=\"purple\">must</strong> have been recorded with <code>VkCommandBufferInheritanceInfo</code>::<code>subpass</code> set to the index of the subpass which the given command buffer will be executed in"
+        },
+        {
+          "vuid": "VUID-vkCmdExecuteCommands-pInheritanceInfo-00098",
+          "text": " If <code>vkCmdExecuteCommands</code> is being called within a render pass instance, the render passes specified in the <code>pBeginInfo</code>-&gt;pInheritanceInfo-&gt;renderPass members of the <a href=\"#vkBeginCommandBuffer\">vkBeginCommandBuffer</a> commands used to begin recording each element of <code>pCommandBuffers</code> <strong class=\"purple\">must</strong> be <a href=\"#renderpass-compatibility\">compatible</a> with the current render pass."
+        },
+        {
+          "vuid": "VUID-vkCmdExecuteCommands-pCommandBuffers-00099",
+          "text": " If <code>vkCmdExecuteCommands</code> is being called within a render pass instance, and any element of <code>pCommandBuffers</code> was recorded with <a href=\"#VkCommandBufferInheritanceInfo\">VkCommandBufferInheritanceInfo</a>::<code>framebuffer</code> not equal to <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>, that <code>VkFramebuffer</code> <strong class=\"purple\">must</strong> match the <code>VkFramebuffer</code> used in the current render pass instance"
+        },
+        {
+          "vuid": "VUID-vkCmdExecuteCommands-pCommandBuffers-00100",
+          "text": " If <code>vkCmdExecuteCommands</code> is not being called within a render pass instance, each element of <code>pCommandBuffers</code> <strong class=\"purple\">must</strong> not have been recorded with the <code>VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdExecuteCommands-commandBuffer-00101",
+          "text": " If the <a href=\"#features-inheritedQueries\">inherited queries</a> feature is not enabled, <code>commandBuffer</code> <strong class=\"purple\">must</strong> not have any queries <a href=\"#queries-operation-active\">active</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdExecuteCommands-commandBuffer-00102",
+          "text": " If <code>commandBuffer</code> has a <code>VK_QUERY_TYPE_OCCLUSION</code> query <a href=\"#queries-operation-active\">active</a>, then each element of <code>pCommandBuffers</code> <strong class=\"purple\">must</strong> have been recorded with <code>VkCommandBufferInheritanceInfo</code>::<code>occlusionQueryEnable</code> set to <code>VK_TRUE</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdExecuteCommands-commandBuffer-00103",
+          "text": " If <code>commandBuffer</code> has a <code>VK_QUERY_TYPE_OCCLUSION</code> query <a href=\"#queries-operation-active\">active</a>, then each element of <code>pCommandBuffers</code> <strong class=\"purple\">must</strong> have been recorded with <code>VkCommandBufferInheritanceInfo</code>::<code>queryFlags</code> having all bits set that are set for the query"
+        },
+        {
+          "vuid": "VUID-vkCmdExecuteCommands-commandBuffer-00104",
+          "text": " If <code>commandBuffer</code> has a <code>VK_QUERY_TYPE_PIPELINE_STATISTICS</code> query <a href=\"#queries-operation-active\">active</a>, then each element of <code>pCommandBuffers</code> <strong class=\"purple\">must</strong> have been recorded with <code>VkCommandBufferInheritanceInfo</code>::<code>pipelineStatistics</code> having all bits set that are set in the <code>VkQueryPool</code> the query uses"
+        },
+        {
+          "vuid": "VUID-vkCmdExecuteCommands-pCommandBuffers-00105",
+          "text": " Each element of <code>pCommandBuffers</code> <strong class=\"purple\">must</strong> not begin any query types that are <a href=\"#queries-operation-active\">active</a> in <code>commandBuffer</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdExecuteCommands-commandBuffer-parameter",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkCommandBuffer\">VkCommandBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdExecuteCommands-pCommandBuffers-parameter",
+          "text": " <code>pCommandBuffers</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>commandBufferCount</code> valid <a href=\"#VkCommandBuffer\">VkCommandBuffer</a> handles"
+        },
+        {
+          "vuid": "VUID-vkCmdExecuteCommands-commandBuffer-recording",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be in the <a href=\"#commandbuffers-lifecycle\">recording state</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdExecuteCommands-commandBuffer-cmdpool",
+          "text": " The <code>VkCommandPool</code> that <code>commandBuffer</code> was allocated from <strong class=\"purple\">must</strong> support transfer, graphics, or compute operations"
+        },
+        {
+          "vuid": "VUID-vkCmdExecuteCommands-bufferlevel",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be a primary <code>VkCommandBuffer</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdExecuteCommands-commandBufferCount-arraylength",
+          "text": " <code>commandBufferCount</code> <strong class=\"purple\">must</strong> be greater than <code>0</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdExecuteCommands-commonparent",
+          "text": " Both of <code>commandBuffer</code>, and the elements of <code>pCommandBuffers</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from the same <a href=\"#VkDevice\">VkDevice</a>"
+        }
+      ],
+      "(VK_VERSION_1_1)": [
+        {
+          "vuid": "VUID-vkCmdExecuteCommands-commandBuffer-01820",
+          "text": " If <code>commandBuffer</code> is a protected command buffer, then each element of <code>pCommandBuffers</code> <strong class=\"purple\">must</strong> be a protected command buffer."
+        },
+        {
+          "vuid": "VUID-vkCmdExecuteCommands-commandBuffer-01821",
+          "text": " If <code>commandBuffer</code> is an unprotected command buffer, then each element of <code>pCommandBuffers</code> <strong class=\"purple\">must</strong> be an unprotected command buffer."
+        }
+      ],
+      "(VK_EXT_transform_feedback)": [
+        {
+          "vuid": "VUID-vkCmdExecuteCommands-None-02286",
+          "text": " This command <strong class=\"purple\">must</strong> not be recorded when transform feedback is active"
+        }
+      ]
+    },
+    "VkDeviceGroupCommandBufferBeginInfo": {
+      "(VK_VERSION_1_1,VK_KHR_device_group)": [
+        {
+          "vuid": "VUID-VkDeviceGroupCommandBufferBeginInfo-deviceMask-00106",
+          "text": " <code>deviceMask</code> <strong class=\"purple\">must</strong> be a valid device mask value"
+        },
+        {
+          "vuid": "VUID-VkDeviceGroupCommandBufferBeginInfo-deviceMask-00107",
+          "text": " <code>deviceMask</code> <strong class=\"purple\">must</strong> not be zero"
+        },
+        {
+          "vuid": "VUID-VkDeviceGroupCommandBufferBeginInfo-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO</code>"
+        }
+      ]
+    },
+    "vkCmdSetDeviceMask": {
+      "(VK_VERSION_1_1,VK_KHR_device_group)": [
+        {
+          "vuid": "VUID-vkCmdSetDeviceMask-deviceMask-00108",
+          "text": " <code>deviceMask</code> <strong class=\"purple\">must</strong> be a valid device mask value"
+        },
+        {
+          "vuid": "VUID-vkCmdSetDeviceMask-deviceMask-00109",
+          "text": " <code>deviceMask</code> <strong class=\"purple\">must</strong> not be zero"
+        },
+        {
+          "vuid": "VUID-vkCmdSetDeviceMask-deviceMask-00110",
+          "text": " <code>deviceMask</code> <strong class=\"purple\">must</strong> not include any set bits that were not in the <a href=\"#VkDeviceGroupCommandBufferBeginInfo\">VkDeviceGroupCommandBufferBeginInfo</a>::<code>deviceMask</code> value when the command buffer began recording."
+        },
+        {
+          "vuid": "VUID-vkCmdSetDeviceMask-deviceMask-00111",
+          "text": " If <code>vkCmdSetDeviceMask</code> is called inside a render pass instance, <code>deviceMask</code> <strong class=\"purple\">must</strong> not include any set bits that were not in the <a href=\"#VkDeviceGroupRenderPassBeginInfo\">VkDeviceGroupRenderPassBeginInfo</a>::<code>deviceMask</code> value when the render pass instance began recording."
+        },
+        {
+          "vuid": "VUID-vkCmdSetDeviceMask-commandBuffer-parameter",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkCommandBuffer\">VkCommandBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdSetDeviceMask-commandBuffer-recording",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be in the <a href=\"#commandbuffers-lifecycle\">recording state</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdSetDeviceMask-commandBuffer-cmdpool",
+          "text": " The <code>VkCommandPool</code> that <code>commandBuffer</code> was allocated from <strong class=\"purple\">must</strong> support graphics, compute, or transfer operations"
+        }
+      ]
+    },
+    "vkCreateFence": {
+      "core": [
+        {
+          "vuid": "VUID-vkCreateFence-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCreateFence-pCreateInfo-parameter",
+          "text": " <code>pCreateInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkFenceCreateInfo\">VkFenceCreateInfo</a> structure"
+        },
+        {
+          "vuid": "VUID-vkCreateFence-pAllocator-parameter",
+          "text": " If <code>pAllocator</code> is not <code>NULL</code>, <code>pAllocator</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkAllocationCallbacks\">VkAllocationCallbacks</a> structure"
+        },
+        {
+          "vuid": "VUID-vkCreateFence-pFence-parameter",
+          "text": " <code>pFence</code> <strong class=\"purple\">must</strong> be a valid pointer to a <a href=\"#VkFence\">VkFence</a> handle"
+        }
+      ]
+    },
+    "VkFenceCreateInfo": {
+      "core": [
+        {
+          "vuid": "VUID-VkFenceCreateInfo-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_FENCE_CREATE_INFO</code>"
+        },
+        {
+          "vuid": "VUID-VkFenceCreateInfo-pNext-pNext",
+          "text": " Each <code>pNext</code> member of any structure (including this one) in the <code>pNext</code> chain <strong class=\"purple\">must</strong> be either <code>NULL</code> or a pointer to a valid instance of <a href=\"#VkExportFenceCreateInfo\">VkExportFenceCreateInfo</a> or <a href=\"#VkExportFenceWin32HandleInfoKHR\">VkExportFenceWin32HandleInfoKHR</a>"
+        },
+        {
+          "vuid": "VUID-VkFenceCreateInfo-sType-unique",
+          "text": " Each <code>sType</code> member in the <code>pNext</code> chain <strong class=\"purple\">must</strong> be unique"
+        },
+        {
+          "vuid": "VUID-VkFenceCreateInfo-flags-parameter",
+          "text": " <code>flags</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkFenceCreateFlagBits\">VkFenceCreateFlagBits</a> values"
+        }
+      ]
+    },
+    "VkExportFenceCreateInfo": {
+      "(VK_VERSION_1_1,VK_KHR_external_fence)": [
+        {
+          "vuid": "VUID-VkExportFenceCreateInfo-handleTypes-01446",
+          "text": " The bits in <code>handleTypes</code> must be supported and compatible, as reported by <a href=\"#VkExternalFenceProperties\">VkExternalFenceProperties</a>."
+        },
+        {
+          "vuid": "VUID-VkExportFenceCreateInfo-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO</code>"
+        },
+        {
+          "vuid": "VUID-VkExportFenceCreateInfo-handleTypes-parameter",
+          "text": " <code>handleTypes</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkExternalFenceHandleTypeFlagBits\">VkExternalFenceHandleTypeFlagBits</a> values"
+        }
+      ]
+    },
+    "VkExportFenceWin32HandleInfoKHR": {
+      "(VK_KHR_external_fence_win32)": [
+        {
+          "vuid": "VUID-VkExportFenceWin32HandleInfoKHR-handleTypes-01447",
+          "text": " If <a href=\"#VkExportFenceCreateInfo\">VkExportFenceCreateInfo</a>::<code>handleTypes</code> does not include <code>VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_BIT</code>, a <code>VkExportFenceWin32HandleInfoKHR</code> structure <strong class=\"purple\">must</strong> not be included in the <code>pNext</code> chain of <a href=\"#VkFenceCreateInfo\">VkFenceCreateInfo</a>."
+        },
+        {
+          "vuid": "VUID-VkExportFenceWin32HandleInfoKHR-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_EXPORT_FENCE_WIN32_HANDLE_INFO_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkExportFenceWin32HandleInfoKHR-pAttributes-parameter",
+          "text": " If <code>pAttributes</code> is not <code>NULL</code>, <code>pAttributes</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <code>SECURITY_ATTRIBUTES</code> value"
+        }
+      ]
+    },
+    "vkGetFenceWin32HandleKHR": {
+      "(VK_KHR_external_fence_win32)": [
+        {
+          "vuid": "VUID-vkGetFenceWin32HandleKHR-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetFenceWin32HandleKHR-pGetWin32HandleInfo-parameter",
+          "text": " <code>pGetWin32HandleInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkFenceGetWin32HandleInfoKHR\">VkFenceGetWin32HandleInfoKHR</a> structure"
+        },
+        {
+          "vuid": "VUID-vkGetFenceWin32HandleKHR-pHandle-parameter",
+          "text": " <code>pHandle</code> <strong class=\"purple\">must</strong> be a valid pointer to a <code>HANDLE</code> value"
+        }
+      ]
+    },
+    "VkFenceGetWin32HandleInfoKHR": {
+      "(VK_KHR_external_fence_win32)": [
+        {
+          "vuid": "VUID-VkFenceGetWin32HandleInfoKHR-handleType-01448",
+          "text": " <code>handleType</code> <strong class=\"purple\">must</strong> have been included in <a href=\"#VkExportFenceCreateInfo\">VkExportFenceCreateInfo</a>::<code>handleTypes</code> when the <code>fence</code>&#8217;s current payload was created."
+        },
+        {
+          "vuid": "VUID-VkFenceGetWin32HandleInfoKHR-handleType-01449",
+          "text": " If <code>handleType</code> is defined as an NT handle, <a href=\"#vkGetFenceWin32HandleKHR\">vkGetFenceWin32HandleKHR</a> <strong class=\"purple\">must</strong> be called no more than once for each valid unique combination of <code>fence</code> and <code>handleType</code>."
+        },
+        {
+          "vuid": "VUID-VkFenceGetWin32HandleInfoKHR-fence-01450",
+          "text": " <code>fence</code> <strong class=\"purple\">must</strong> not currently have its payload replaced by an imported payload as described below in <a href=\"#synchronization-fences-importing\">Importing Fence Payloads</a> unless that imported payload&#8217;s handle type was included in <a href=\"#VkExternalFenceProperties\">VkExternalFenceProperties</a>::<code>exportFromImportedHandleTypes</code> for <code>handleType</code>."
+        },
+        {
+          "vuid": "VUID-VkFenceGetWin32HandleInfoKHR-handleType-01451",
+          "text": " If <code>handleType</code> refers to a handle type with copy payload transference semantics, <code>fence</code> <strong class=\"purple\">must</strong> be signaled, or have an associated <a href=\"#synchronization-fences-signaling\">fence signal operation</a> pending execution."
+        },
+        {
+          "vuid": "VUID-VkFenceGetWin32HandleInfoKHR-handleType-01452",
+          "text": " <code>handleType</code> <strong class=\"purple\">must</strong> be defined as an NT handle or a global share handle."
+        },
+        {
+          "vuid": "VUID-VkFenceGetWin32HandleInfoKHR-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_FENCE_GET_WIN32_HANDLE_INFO_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkFenceGetWin32HandleInfoKHR-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-VkFenceGetWin32HandleInfoKHR-fence-parameter",
+          "text": " <code>fence</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkFence\">VkFence</a> handle"
+        },
+        {
+          "vuid": "VUID-VkFenceGetWin32HandleInfoKHR-handleType-parameter",
+          "text": " <code>handleType</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkExternalFenceHandleTypeFlagBits\">VkExternalFenceHandleTypeFlagBits</a> value"
+        }
+      ]
+    },
+    "vkGetFenceFdKHR": {
+      "(VK_KHR_external_fence_fd)": [
+        {
+          "vuid": "VUID-vkGetFenceFdKHR-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetFenceFdKHR-pGetFdInfo-parameter",
+          "text": " <code>pGetFdInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkFenceGetFdInfoKHR\">VkFenceGetFdInfoKHR</a> structure"
+        },
+        {
+          "vuid": "VUID-vkGetFenceFdKHR-pFd-parameter",
+          "text": " <code>pFd</code> <strong class=\"purple\">must</strong> be a valid pointer to an <code>int</code> value"
+        }
+      ]
+    },
+    "VkFenceGetFdInfoKHR": {
+      "(VK_KHR_external_fence_fd)": [
+        {
+          "vuid": "VUID-VkFenceGetFdInfoKHR-handleType-01453",
+          "text": " <code>handleType</code> <strong class=\"purple\">must</strong> have been included in <a href=\"#VkExportFenceCreateInfo\">VkExportFenceCreateInfo</a>::<code>handleTypes</code> when <code>fence</code>&#8217;s current payload was created."
+        },
+        {
+          "vuid": "VUID-VkFenceGetFdInfoKHR-handleType-01454",
+          "text": " If <code>handleType</code> refers to a handle type with copy payload transference semantics, <code>fence</code> <strong class=\"purple\">must</strong> be signaled, or have an associated <a href=\"#synchronization-fences-signaling\">fence signal operation</a> pending execution."
+        },
+        {
+          "vuid": "VUID-VkFenceGetFdInfoKHR-fence-01455",
+          "text": " <code>fence</code> <strong class=\"purple\">must</strong> not currently have its payload replaced by an imported payload as described below in <a href=\"#synchronization-fences-importing\">Importing Fence Payloads</a> unless that imported payload&#8217;s handle type was included in <a href=\"#VkExternalFenceProperties\">VkExternalFenceProperties</a>::<code>exportFromImportedHandleTypes</code> for <code>handleType</code>."
+        },
+        {
+          "vuid": "VUID-VkFenceGetFdInfoKHR-handleType-01456",
+          "text": " <code>handleType</code> <strong class=\"purple\">must</strong> be defined as a POSIX file descriptor handle."
+        },
+        {
+          "vuid": "VUID-VkFenceGetFdInfoKHR-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_FENCE_GET_FD_INFO_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkFenceGetFdInfoKHR-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-VkFenceGetFdInfoKHR-fence-parameter",
+          "text": " <code>fence</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkFence\">VkFence</a> handle"
+        },
+        {
+          "vuid": "VUID-VkFenceGetFdInfoKHR-handleType-parameter",
+          "text": " <code>handleType</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkExternalFenceHandleTypeFlagBits\">VkExternalFenceHandleTypeFlagBits</a> value"
+        }
+      ]
+    },
+    "vkDestroyFence": {
+      "core": [
+        {
+          "vuid": "VUID-vkDestroyFence-fence-01120",
+          "text": " All <a href=\"#devsandqueues-submission\">queue submission</a> commands that refer to <code>fence</code> <strong class=\"purple\">must</strong> have completed execution"
+        },
+        {
+          "vuid": "VUID-vkDestroyFence-fence-01121",
+          "text": " If <code>VkAllocationCallbacks</code> were provided when <code>fence</code> was created, a compatible set of callbacks <strong class=\"purple\">must</strong> be provided here"
+        },
+        {
+          "vuid": "VUID-vkDestroyFence-fence-01122",
+          "text": " If no <code>VkAllocationCallbacks</code> were provided when <code>fence</code> was created, <code>pAllocator</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-vkDestroyFence-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkDestroyFence-fence-parameter",
+          "text": " If <code>fence</code> is not <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>, <code>fence</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkFence\">VkFence</a> handle"
+        },
+        {
+          "vuid": "VUID-vkDestroyFence-pAllocator-parameter",
+          "text": " If <code>pAllocator</code> is not <code>NULL</code>, <code>pAllocator</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkAllocationCallbacks\">VkAllocationCallbacks</a> structure"
+        },
+        {
+          "vuid": "VUID-vkDestroyFence-fence-parent",
+          "text": " If <code>fence</code> is a valid handle, it <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from <code>device</code>"
+        }
+      ]
+    },
+    "vkGetFenceStatus": {
+      "core": [
+        {
+          "vuid": "VUID-vkGetFenceStatus-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetFenceStatus-fence-parameter",
+          "text": " <code>fence</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkFence\">VkFence</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetFenceStatus-fence-parent",
+          "text": " <code>fence</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from <code>device</code>"
+        }
+      ]
+    },
+    "vkResetFences": {
+      "core": [
+        {
+          "vuid": "VUID-vkResetFences-pFences-01123",
+          "text": " Each element of <code>pFences</code> <strong class=\"purple\">must</strong> not be currently associated with any queue command that has not yet completed execution on that queue"
+        },
+        {
+          "vuid": "VUID-vkResetFences-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkResetFences-pFences-parameter",
+          "text": " <code>pFences</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>fenceCount</code> valid <a href=\"#VkFence\">VkFence</a> handles"
+        },
+        {
+          "vuid": "VUID-vkResetFences-fenceCount-arraylength",
+          "text": " <code>fenceCount</code> <strong class=\"purple\">must</strong> be greater than <code>0</code>"
+        },
+        {
+          "vuid": "VUID-vkResetFences-pFences-parent",
+          "text": " Each element of <code>pFences</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from <code>device</code>"
+        }
+      ]
+    },
+    "vkWaitForFences": {
+      "core": [
+        {
+          "vuid": "VUID-vkWaitForFences-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkWaitForFences-pFences-parameter",
+          "text": " <code>pFences</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>fenceCount</code> valid <a href=\"#VkFence\">VkFence</a> handles"
+        },
+        {
+          "vuid": "VUID-vkWaitForFences-fenceCount-arraylength",
+          "text": " <code>fenceCount</code> <strong class=\"purple\">must</strong> be greater than <code>0</code>"
+        },
+        {
+          "vuid": "VUID-vkWaitForFences-pFences-parent",
+          "text": " Each element of <code>pFences</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from <code>device</code>"
+        }
+      ]
+    },
+    "vkRegisterDeviceEventEXT": {
+      "(VK_EXT_display_control)": [
+        {
+          "vuid": "VUID-vkRegisterDeviceEventEXT-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkRegisterDeviceEventEXT-pDeviceEventInfo-parameter",
+          "text": " <code>pDeviceEventInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkDeviceEventInfoEXT\">VkDeviceEventInfoEXT</a> structure"
+        },
+        {
+          "vuid": "VUID-vkRegisterDeviceEventEXT-pAllocator-parameter",
+          "text": " If <code>pAllocator</code> is not <code>NULL</code>, <code>pAllocator</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkAllocationCallbacks\">VkAllocationCallbacks</a> structure"
+        },
+        {
+          "vuid": "VUID-vkRegisterDeviceEventEXT-pFence-parameter",
+          "text": " <code>pFence</code> <strong class=\"purple\">must</strong> be a valid pointer to a <a href=\"#VkFence\">VkFence</a> handle"
+        }
+      ]
+    },
+    "VkDeviceEventInfoEXT": {
+      "(VK_EXT_display_control)": [
+        {
+          "vuid": "VUID-VkDeviceEventInfoEXT-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_DEVICE_EVENT_INFO_EXT</code>"
+        },
+        {
+          "vuid": "VUID-VkDeviceEventInfoEXT-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-VkDeviceEventInfoEXT-deviceEvent-parameter",
+          "text": " <code>deviceEvent</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDeviceEventTypeEXT\">VkDeviceEventTypeEXT</a> value"
+        }
+      ]
+    },
+    "vkRegisterDisplayEventEXT": {
+      "(VK_EXT_display_control)": [
+        {
+          "vuid": "VUID-vkRegisterDisplayEventEXT-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkRegisterDisplayEventEXT-display-parameter",
+          "text": " <code>display</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDisplayKHR\">VkDisplayKHR</a> handle"
+        },
+        {
+          "vuid": "VUID-vkRegisterDisplayEventEXT-pDisplayEventInfo-parameter",
+          "text": " <code>pDisplayEventInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkDisplayEventInfoEXT\">VkDisplayEventInfoEXT</a> structure"
+        },
+        {
+          "vuid": "VUID-vkRegisterDisplayEventEXT-pAllocator-parameter",
+          "text": " If <code>pAllocator</code> is not <code>NULL</code>, <code>pAllocator</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkAllocationCallbacks\">VkAllocationCallbacks</a> structure"
+        },
+        {
+          "vuid": "VUID-vkRegisterDisplayEventEXT-pFence-parameter",
+          "text": " <code>pFence</code> <strong class=\"purple\">must</strong> be a valid pointer to a <a href=\"#VkFence\">VkFence</a> handle"
+        }
+      ]
+    },
+    "VkDisplayEventInfoEXT": {
+      "(VK_EXT_display_control)": [
+        {
+          "vuid": "VUID-VkDisplayEventInfoEXT-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_DISPLAY_EVENT_INFO_EXT</code>"
+        },
+        {
+          "vuid": "VUID-VkDisplayEventInfoEXT-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-VkDisplayEventInfoEXT-displayEvent-parameter",
+          "text": " <code>displayEvent</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDisplayEventTypeEXT\">VkDisplayEventTypeEXT</a> value"
+        }
+      ]
+    },
+    "vkImportFenceWin32HandleKHR": {
+      "(VK_KHR_external_fence_win32)": [
+        {
+          "vuid": "VUID-vkImportFenceWin32HandleKHR-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkImportFenceWin32HandleKHR-pImportFenceWin32HandleInfo-parameter",
+          "text": " <code>pImportFenceWin32HandleInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkImportFenceWin32HandleInfoKHR\">VkImportFenceWin32HandleInfoKHR</a> structure"
+        }
+      ]
+    },
+    "VkImportFenceWin32HandleInfoKHR": {
+      "(VK_KHR_external_fence_win32)": [
+        {
+          "vuid": "VUID-VkImportFenceWin32HandleInfoKHR-handleType-01457",
+          "text": " <code>handleType</code> <strong class=\"purple\">must</strong> be a value included in the <a href=\"#synchronization-fence-handletypes-win32\">Handle Types Supported by VkImportFenceWin32HandleInfoKHR</a> table."
+        },
+        {
+          "vuid": "VUID-VkImportFenceWin32HandleInfoKHR-handleType-01459",
+          "text": " If <code>handleType</code> is not <code>VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_BIT</code>, <code>name</code> <strong class=\"purple\">must</strong> be <code>NULL</code>."
+        },
+        {
+          "vuid": "VUID-VkImportFenceWin32HandleInfoKHR-handleType-01460",
+          "text": " If <code>handleType</code> is not <code>0</code> and <code>handle</code> is <code>NULL</code>, <code>name</code> <strong class=\"purple\">must</strong> name a valid synchronization primitive of the type specified by <code>handleType</code>."
+        },
+        {
+          "vuid": "VUID-VkImportFenceWin32HandleInfoKHR-handleType-01461",
+          "text": " If <code>handleType</code> is not <code>0</code> and <code>name</code> is <code>NULL</code>, <code>handle</code> <strong class=\"purple\">must</strong> be a valid handle of the type specified by <code>handleType</code>."
+        },
+        {
+          "vuid": "VUID-VkImportFenceWin32HandleInfoKHR-handle-01462",
+          "text": " If <code>handle</code> is not <code>NULL</code>, <code>name</code> must be <code>NULL</code>."
+        },
+        {
+          "vuid": "VUID-VkImportFenceWin32HandleInfoKHR-handle-01539",
+          "text": " If <code>handle</code> is not <code>NULL</code>, it <strong class=\"purple\">must</strong> obey any requirements listed for <code>handleType</code> in <a href=\"#external-fence-handle-types-compatibility\">external fence handle types compatibility</a>."
+        },
+        {
+          "vuid": "VUID-VkImportFenceWin32HandleInfoKHR-name-01540",
+          "text": " If <code>name</code> is not <code>NULL</code>, it <strong class=\"purple\">must</strong> obey any requirements listed for <code>handleType</code> in <a href=\"#external-fence-handle-types-compatibility\">external fence handle types compatibility</a>."
+        },
+        {
+          "vuid": "VUID-VkImportFenceWin32HandleInfoKHR-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_IMPORT_FENCE_WIN32_HANDLE_INFO_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkImportFenceWin32HandleInfoKHR-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-VkImportFenceWin32HandleInfoKHR-fence-parameter",
+          "text": " <code>fence</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkFence\">VkFence</a> handle"
+        },
+        {
+          "vuid": "VUID-VkImportFenceWin32HandleInfoKHR-flags-parameter",
+          "text": " <code>flags</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkFenceImportFlagBits\">VkFenceImportFlagBits</a> values"
+        },
+        {
+          "vuid": "VUID-VkImportFenceWin32HandleInfoKHR-handleType-parameter",
+          "text": " If <code>handleType</code> is not <code>0</code>, <code>handleType</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkExternalFenceHandleTypeFlagBits\">VkExternalFenceHandleTypeFlagBits</a> value"
+        }
+      ]
+    },
+    "vkImportFenceFdKHR": {
+      "(VK_KHR_external_fence_fd)": [
+        {
+          "vuid": "VUID-vkImportFenceFdKHR-fence-01463",
+          "text": " <code>fence</code> <strong class=\"purple\">must</strong> not be associated with any queue command that has not yet completed execution on that queue"
+        },
+        {
+          "vuid": "VUID-vkImportFenceFdKHR-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkImportFenceFdKHR-pImportFenceFdInfo-parameter",
+          "text": " <code>pImportFenceFdInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkImportFenceFdInfoKHR\">VkImportFenceFdInfoKHR</a> structure"
+        }
+      ]
+    },
+    "VkImportFenceFdInfoKHR": {
+      "(VK_KHR_external_fence_fd)": [
+        {
+          "vuid": "VUID-VkImportFenceFdInfoKHR-handleType-01464",
+          "text": " <code>handleType</code> <strong class=\"purple\">must</strong> be a value included in the <a href=\"#synchronization-fence-handletypes-fd\">Handle Types Supported by VkImportFenceFdInfoKHR</a> table."
+        },
+        {
+          "vuid": "VUID-VkImportFenceFdInfoKHR-fd-01541",
+          "text": " <code>fd</code> <strong class=\"purple\">must</strong> obey any requirements listed for <code>handleType</code> in <a href=\"#external-fence-handle-types-compatibility\">external fence handle types compatibility</a>."
+        },
+        {
+          "vuid": "VUID-VkImportFenceFdInfoKHR-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_IMPORT_FENCE_FD_INFO_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkImportFenceFdInfoKHR-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-VkImportFenceFdInfoKHR-fence-parameter",
+          "text": " <code>fence</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkFence\">VkFence</a> handle"
+        },
+        {
+          "vuid": "VUID-VkImportFenceFdInfoKHR-flags-parameter",
+          "text": " <code>flags</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkFenceImportFlagBits\">VkFenceImportFlagBits</a> values"
+        },
+        {
+          "vuid": "VUID-VkImportFenceFdInfoKHR-handleType-parameter",
+          "text": " <code>handleType</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkExternalFenceHandleTypeFlagBits\">VkExternalFenceHandleTypeFlagBits</a> value"
+        }
+      ]
+    },
+    "vkCreateSemaphore": {
+      "core": [
+        {
+          "vuid": "VUID-vkCreateSemaphore-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCreateSemaphore-pCreateInfo-parameter",
+          "text": " <code>pCreateInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkSemaphoreCreateInfo\">VkSemaphoreCreateInfo</a> structure"
+        },
+        {
+          "vuid": "VUID-vkCreateSemaphore-pAllocator-parameter",
+          "text": " If <code>pAllocator</code> is not <code>NULL</code>, <code>pAllocator</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkAllocationCallbacks\">VkAllocationCallbacks</a> structure"
+        },
+        {
+          "vuid": "VUID-vkCreateSemaphore-pSemaphore-parameter",
+          "text": " <code>pSemaphore</code> <strong class=\"purple\">must</strong> be a valid pointer to a <a href=\"#VkSemaphore\">VkSemaphore</a> handle"
+        }
+      ]
+    },
+    "VkSemaphoreCreateInfo": {
+      "core": [
+        {
+          "vuid": "VUID-VkSemaphoreCreateInfo-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO</code>"
+        },
+        {
+          "vuid": "VUID-VkSemaphoreCreateInfo-pNext-pNext",
+          "text": " Each <code>pNext</code> member of any structure (including this one) in the <code>pNext</code> chain <strong class=\"purple\">must</strong> be either <code>NULL</code> or a pointer to a valid instance of <a href=\"#VkExportSemaphoreCreateInfo\">VkExportSemaphoreCreateInfo</a>, <a href=\"#VkExportSemaphoreWin32HandleInfoKHR\">VkExportSemaphoreWin32HandleInfoKHR</a>, or <a href=\"#VkSemaphoreTypeCreateInfoKHR\">VkSemaphoreTypeCreateInfoKHR</a>"
+        },
+        {
+          "vuid": "VUID-VkSemaphoreCreateInfo-sType-unique",
+          "text": " Each <code>sType</code> member in the <code>pNext</code> chain <strong class=\"purple\">must</strong> be unique"
+        },
+        {
+          "vuid": "VUID-VkSemaphoreCreateInfo-flags-zerobitmask",
+          "text": " <code>flags</code> <strong class=\"purple\">must</strong> be <code>0</code>"
+        }
+      ]
+    },
+    "VkSemaphoreTypeCreateInfoKHR": {
+      "(VK_KHR_timeline_semaphore)": [
+        {
+          "vuid": "VUID-VkSemaphoreTypeCreateInfoKHR-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_SEMAPHORE_TYPE_CREATE_INFO_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkSemaphoreTypeCreateInfoKHR-semaphoreType-parameter",
+          "text": " <code>semaphoreType</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkSemaphoreTypeKHR\">VkSemaphoreTypeKHR</a> value"
+        },
+        {
+          "vuid": "VUID-VkSemaphoreTypeCreateInfoKHR-timelineSemaphore-03252",
+          "text": " If the <a href=\"#features-timelineSemaphore\"><code>timelineSemaphore</code></a> feature is not enabled, <code>semaphoreType</code> <strong class=\"purple\">must</strong> not equal <code>VK_SEMAPHORE_TYPE_TIMELINE_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkSemaphoreTypeCreateInfoKHR-semaphoreType-03279",
+          "text": " If <code>semaphoreType</code> is <code>VK_SEMAPHORE_TYPE_BINARY_KHR</code>, <code>initialValue</code> <strong class=\"purple\">must</strong> be zero."
+        }
+      ]
+    },
+    "VkExportSemaphoreCreateInfo": {
+      "(VK_VERSION_1_1,VK_KHR_external_semaphore)": [
+        {
+          "vuid": "VUID-VkExportSemaphoreCreateInfo-handleTypes-01124",
+          "text": " The bits in <code>handleTypes</code> <strong class=\"purple\">must</strong> be supported and compatible, as reported by <a href=\"#VkExternalSemaphoreProperties\">VkExternalSemaphoreProperties</a>."
+        },
+        {
+          "vuid": "VUID-VkExportSemaphoreCreateInfo-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO</code>"
+        },
+        {
+          "vuid": "VUID-VkExportSemaphoreCreateInfo-handleTypes-parameter",
+          "text": " <code>handleTypes</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkExternalSemaphoreHandleTypeFlagBits\">VkExternalSemaphoreHandleTypeFlagBits</a> values"
+        }
+      ]
+    },
+    "VkExportSemaphoreWin32HandleInfoKHR": {
+      "(VK_KHR_external_semaphore_win32)": [
+        {
+          "vuid": "VUID-VkExportSemaphoreWin32HandleInfoKHR-handleTypes-01125",
+          "text": " If <a href=\"#VkExportSemaphoreCreateInfo\">VkExportSemaphoreCreateInfo</a>::<code>handleTypes</code> does not include <code>VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT</code> or <code>VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT</code>, <code>VkExportSemaphoreWin32HandleInfoKHR</code> <strong class=\"purple\">must</strong> not be included in the <code>pNext</code> chain of <a href=\"#VkSemaphoreCreateInfo\">VkSemaphoreCreateInfo</a>."
+        },
+        {
+          "vuid": "VUID-VkExportSemaphoreWin32HandleInfoKHR-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkExportSemaphoreWin32HandleInfoKHR-pAttributes-parameter",
+          "text": " If <code>pAttributes</code> is not <code>NULL</code>, <code>pAttributes</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <code>SECURITY_ATTRIBUTES</code> value"
+        }
+      ]
+    },
+    "vkGetSemaphoreWin32HandleKHR": {
+      "(VK_KHR_external_semaphore_win32)": [
+        {
+          "vuid": "VUID-vkGetSemaphoreWin32HandleKHR-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetSemaphoreWin32HandleKHR-pGetWin32HandleInfo-parameter",
+          "text": " <code>pGetWin32HandleInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkSemaphoreGetWin32HandleInfoKHR\">VkSemaphoreGetWin32HandleInfoKHR</a> structure"
+        },
+        {
+          "vuid": "VUID-vkGetSemaphoreWin32HandleKHR-pHandle-parameter",
+          "text": " <code>pHandle</code> <strong class=\"purple\">must</strong> be a valid pointer to a <code>HANDLE</code> value"
+        }
+      ]
+    },
+    "VkSemaphoreGetWin32HandleInfoKHR": {
+      "(VK_KHR_external_semaphore_win32)": [
+        {
+          "vuid": "VUID-VkSemaphoreGetWin32HandleInfoKHR-handleType-01126",
+          "text": " <code>handleType</code> <strong class=\"purple\">must</strong> have been included in <a href=\"#VkExportSemaphoreCreateInfo\">VkExportSemaphoreCreateInfo</a>::<code>handleTypes</code> when the <code>semaphore</code>&#8217;s current payload was created."
+        },
+        {
+          "vuid": "VUID-VkSemaphoreGetWin32HandleInfoKHR-handleType-01127",
+          "text": " If <code>handleType</code> is defined as an NT handle, <a href=\"#vkGetSemaphoreWin32HandleKHR\">vkGetSemaphoreWin32HandleKHR</a> <strong class=\"purple\">must</strong> be called no more than once for each valid unique combination of <code>semaphore</code> and <code>handleType</code>."
+        },
+        {
+          "vuid": "VUID-VkSemaphoreGetWin32HandleInfoKHR-semaphore-01128",
+          "text": " <code>semaphore</code> <strong class=\"purple\">must</strong> not currently have its payload replaced by an imported payload as described below in <a href=\"#synchronization-semaphores-importing\">Importing Semaphore Payloads</a> unless that imported payload&#8217;s handle type was included in <a href=\"#VkExternalSemaphoreProperties\">VkExternalSemaphoreProperties</a>::<code>exportFromImportedHandleTypes</code> for <code>handleType</code>."
+        },
+        {
+          "vuid": "VUID-VkSemaphoreGetWin32HandleInfoKHR-handleType-01129",
+          "text": " If <code>handleType</code> refers to a handle type with copy payload transference semantics, as defined below in <a href=\"#synchronization-semaphores-importing\">Importing Semaphore Payloads</a>, there <strong class=\"purple\">must</strong> be no queue waiting on <code>semaphore</code>."
+        },
+        {
+          "vuid": "VUID-VkSemaphoreGetWin32HandleInfoKHR-handleType-01130",
+          "text": " If <code>handleType</code> refers to a handle type with copy payload transference semantics, <code>semaphore</code> <strong class=\"purple\">must</strong> be signaled, or have an associated <a href=\"#synchronization-semaphores-signaling\">semaphore signal operation</a> pending execution."
+        },
+        {
+          "vuid": "VUID-VkSemaphoreGetWin32HandleInfoKHR-handleType-01131",
+          "text": " <code>handleType</code> <strong class=\"purple\">must</strong> be defined as an NT handle or a global share handle."
+        },
+        {
+          "vuid": "VUID-VkSemaphoreGetWin32HandleInfoKHR-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_SEMAPHORE_GET_WIN32_HANDLE_INFO_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkSemaphoreGetWin32HandleInfoKHR-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-VkSemaphoreGetWin32HandleInfoKHR-semaphore-parameter",
+          "text": " <code>semaphore</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkSemaphore\">VkSemaphore</a> handle"
+        },
+        {
+          "vuid": "VUID-VkSemaphoreGetWin32HandleInfoKHR-handleType-parameter",
+          "text": " <code>handleType</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkExternalSemaphoreHandleTypeFlagBits\">VkExternalSemaphoreHandleTypeFlagBits</a> value"
+        }
+      ]
+    },
+    "vkGetSemaphoreFdKHR": {
+      "(VK_KHR_external_semaphore_fd)": [
+        {
+          "vuid": "VUID-vkGetSemaphoreFdKHR-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetSemaphoreFdKHR-pGetFdInfo-parameter",
+          "text": " <code>pGetFdInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkSemaphoreGetFdInfoKHR\">VkSemaphoreGetFdInfoKHR</a> structure"
+        },
+        {
+          "vuid": "VUID-vkGetSemaphoreFdKHR-pFd-parameter",
+          "text": " <code>pFd</code> <strong class=\"purple\">must</strong> be a valid pointer to an <code>int</code> value"
+        }
+      ]
+    },
+    "VkSemaphoreGetFdInfoKHR": {
+      "(VK_KHR_external_semaphore_fd)": [
+        {
+          "vuid": "VUID-VkSemaphoreGetFdInfoKHR-handleType-01132",
+          "text": " <code>handleType</code> <strong class=\"purple\">must</strong> have been included in <a href=\"#VkExportSemaphoreCreateInfo\">VkExportSemaphoreCreateInfo</a>::<code>handleTypes</code> when <code>semaphore</code>&#8217;s current payload was created."
+        },
+        {
+          "vuid": "VUID-VkSemaphoreGetFdInfoKHR-semaphore-01133",
+          "text": " <code>semaphore</code> <strong class=\"purple\">must</strong> not currently have its payload replaced by an imported payload as described below in <a href=\"#synchronization-semaphores-importing\">Importing Semaphore Payloads</a> unless that imported payload&#8217;s handle type was included in <a href=\"#VkExternalSemaphoreProperties\">VkExternalSemaphoreProperties</a>::<code>exportFromImportedHandleTypes</code> for <code>handleType</code>."
+        },
+        {
+          "vuid": "VUID-VkSemaphoreGetFdInfoKHR-handleType-01134",
+          "text": " If <code>handleType</code> refers to a handle type with copy payload transference semantics, as defined below in <a href=\"#synchronization-semaphores-importing\">Importing Semaphore Payloads</a>, there <strong class=\"purple\">must</strong> be no queue waiting on <code>semaphore</code>."
+        },
+        {
+          "vuid": "VUID-VkSemaphoreGetFdInfoKHR-handleType-01135",
+          "text": " If <code>handleType</code> refers to a handle type with copy payload transference semantics, <code>semaphore</code> <strong class=\"purple\">must</strong> be signaled, or have an associated <a href=\"#synchronization-semaphores-signaling\">semaphore signal operation</a> pending execution."
+        },
+        {
+          "vuid": "VUID-VkSemaphoreGetFdInfoKHR-handleType-01136",
+          "text": " <code>handleType</code> <strong class=\"purple\">must</strong> be defined as a POSIX file descriptor handle."
+        },
+        {
+          "vuid": "VUID-VkSemaphoreGetFdInfoKHR-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_SEMAPHORE_GET_FD_INFO_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkSemaphoreGetFdInfoKHR-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-VkSemaphoreGetFdInfoKHR-semaphore-parameter",
+          "text": " <code>semaphore</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkSemaphore\">VkSemaphore</a> handle"
+        },
+        {
+          "vuid": "VUID-VkSemaphoreGetFdInfoKHR-handleType-parameter",
+          "text": " <code>handleType</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkExternalSemaphoreHandleTypeFlagBits\">VkExternalSemaphoreHandleTypeFlagBits</a> value"
+        }
+      ],
+      "(VK_KHR_external_semaphore_fd)+(VK_KHR_timeline_semaphore)": [
+        {
+          "vuid": "VUID-VkSemaphoreGetFdInfoKHR-handleType-03253",
+          "text": " If <code>handleType</code> refers to a handle type with copy payload transference semantics, <code>semaphore</code> <strong class=\"purple\">must</strong> have been created with a <a href=\"#VkSemaphoreTypeKHR\">VkSemaphoreTypeKHR</a> of <code>VK_SEMAPHORE_TYPE_BINARY_KHR</code>."
+        },
+        {
+          "vuid": "VUID-VkSemaphoreGetFdInfoKHR-handleType-03254",
+          "text": " If <code>handleType</code> refers to a handle type with copy payload transference semantics, <code>semaphore</code> <strong class=\"purple\">must</strong> have an associated semaphore signal operation that has been submitted for execution and any semaphore signal operations on which it depends (if any) <strong class=\"purple\">must</strong> have also been submitted for execution."
+        }
+      ]
+    },
+    "vkDestroySemaphore": {
+      "core": [
+        {
+          "vuid": "VUID-vkDestroySemaphore-semaphore-01137",
+          "text": " All submitted batches that refer to <code>semaphore</code> <strong class=\"purple\">must</strong> have completed execution"
+        },
+        {
+          "vuid": "VUID-vkDestroySemaphore-semaphore-01138",
+          "text": " If <code>VkAllocationCallbacks</code> were provided when <code>semaphore</code> was created, a compatible set of callbacks <strong class=\"purple\">must</strong> be provided here"
+        },
+        {
+          "vuid": "VUID-vkDestroySemaphore-semaphore-01139",
+          "text": " If no <code>VkAllocationCallbacks</code> were provided when <code>semaphore</code> was created, <code>pAllocator</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-vkDestroySemaphore-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkDestroySemaphore-semaphore-parameter",
+          "text": " If <code>semaphore</code> is not <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>, <code>semaphore</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkSemaphore\">VkSemaphore</a> handle"
+        },
+        {
+          "vuid": "VUID-vkDestroySemaphore-pAllocator-parameter",
+          "text": " If <code>pAllocator</code> is not <code>NULL</code>, <code>pAllocator</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkAllocationCallbacks\">VkAllocationCallbacks</a> structure"
+        },
+        {
+          "vuid": "VUID-vkDestroySemaphore-semaphore-parent",
+          "text": " If <code>semaphore</code> is a valid handle, it <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from <code>device</code>"
+        }
+      ]
+    },
+    "vkGetSemaphoreCounterValueKHR": {
+      "(VK_KHR_timeline_semaphore)": [
+        {
+          "vuid": "VUID-vkGetSemaphoreCounterValueKHR-semaphore-03255",
+          "text": " <code>semaphore</code> <strong class=\"purple\">must</strong> have been created with a <a href=\"#VkSemaphoreTypeKHR\">VkSemaphoreTypeKHR</a> of <code>VK_SEMAPHORE_TYPE_TIMELINE_KHR</code>"
+        },
+        {
+          "vuid": "VUID-vkGetSemaphoreCounterValueKHR-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetSemaphoreCounterValueKHR-semaphore-parameter",
+          "text": " <code>semaphore</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkSemaphore\">VkSemaphore</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetSemaphoreCounterValueKHR-pValue-parameter",
+          "text": " <code>pValue</code> <strong class=\"purple\">must</strong> be a valid pointer to a <code>uint64_t</code> value"
+        },
+        {
+          "vuid": "VUID-vkGetSemaphoreCounterValueKHR-semaphore-parent",
+          "text": " <code>semaphore</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from <code>device</code>"
+        }
+      ]
+    },
+    "vkWaitSemaphoresKHR": {
+      "(VK_KHR_timeline_semaphore)": [
+        {
+          "vuid": "VUID-vkWaitSemaphoresKHR-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkWaitSemaphoresKHR-pWaitInfo-parameter",
+          "text": " <code>pWaitInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkSemaphoreWaitInfoKHR\">VkSemaphoreWaitInfoKHR</a> structure"
+        }
+      ]
+    },
+    "VkSemaphoreWaitInfoKHR": {
+      "(VK_KHR_timeline_semaphore)": [
+        {
+          "vuid": "VUID-VkSemaphoreWaitInfoKHR-pSemaphores-03256",
+          "text": " All of the elements of <code>pSemaphores</code> <strong class=\"purple\">must</strong> reference a semaphore that was created with a <a href=\"#VkSemaphoreTypeKHR\">VkSemaphoreTypeKHR</a> of <code>VK_SEMAPHORE_TYPE_TIMELINE_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkSemaphoreWaitInfoKHR-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkSemaphoreWaitInfoKHR-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-VkSemaphoreWaitInfoKHR-flags-parameter",
+          "text": " <code>flags</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkSemaphoreWaitFlagBitsKHR\">VkSemaphoreWaitFlagBitsKHR</a> values"
+        },
+        {
+          "vuid": "VUID-VkSemaphoreWaitInfoKHR-pSemaphores-parameter",
+          "text": " <code>pSemaphores</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>semaphoreCount</code> valid <a href=\"#VkSemaphore\">VkSemaphore</a> handles"
+        },
+        {
+          "vuid": "VUID-VkSemaphoreWaitInfoKHR-pValues-parameter",
+          "text": " <code>pValues</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>semaphoreCount</code> <code>uint64_t</code> values"
+        },
+        {
+          "vuid": "VUID-VkSemaphoreWaitInfoKHR-semaphoreCount-arraylength",
+          "text": " <code>semaphoreCount</code> <strong class=\"purple\">must</strong> be greater than <code>0</code>"
+        }
+      ]
+    },
+    "vkSignalSemaphoreKHR": {
+      "(VK_KHR_timeline_semaphore)": [
+        {
+          "vuid": "VUID-vkSignalSemaphoreKHR-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkSignalSemaphoreKHR-pSignalInfo-parameter",
+          "text": " <code>pSignalInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkSemaphoreSignalInfoKHR\">VkSemaphoreSignalInfoKHR</a> structure"
+        }
+      ]
+    },
+    "VkSemaphoreSignalInfoKHR": {
+      "(VK_KHR_timeline_semaphore)": [
+        {
+          "vuid": "VUID-VkSemaphoreSignalInfoKHR-semaphore-03257",
+          "text": " <code>semaphore</code> <strong class=\"purple\">must</strong> have been created with a <a href=\"#VkSemaphoreTypeKHR\">VkSemaphoreTypeKHR</a> of <code>VK_SEMAPHORE_TYPE_TIMELINE_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkSemaphoreSignalInfoKHR-value-03258",
+          "text": " <code>value</code> <strong class=\"purple\">must</strong> have a value greater than the current value of the semaphore"
+        },
+        {
+          "vuid": "VUID-VkSemaphoreSignalInfoKHR-value-03259",
+          "text": " <code>value</code> <strong class=\"purple\">must</strong> be less than the value of any pending semaphore signal operations"
+        },
+        {
+          "vuid": "VUID-VkSemaphoreSignalInfoKHR-value-03260",
+          "text": " <code>value</code> <strong class=\"purple\">must</strong> have a value which does not differ from the current value of the semaphore or the value of any outstanding semaphore wait or signal operation on <code>semaphore</code> by more than <a href=\"#limits-maxTimelineSemaphoreValueDifference\"><code>maxTimelineSemaphoreValueDifference</code></a>."
+        },
+        {
+          "vuid": "VUID-VkSemaphoreSignalInfoKHR-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_SEMAPHORE_SIGNAL_INFO_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkSemaphoreSignalInfoKHR-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-VkSemaphoreSignalInfoKHR-semaphore-parameter",
+          "text": " <code>semaphore</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkSemaphore\">VkSemaphore</a> handle"
+        }
+      ]
+    },
+    "vkImportSemaphoreWin32HandleKHR": {
+      "(VK_KHR_external_semaphore_win32)": [
+        {
+          "vuid": "VUID-vkImportSemaphoreWin32HandleKHR-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkImportSemaphoreWin32HandleKHR-pImportSemaphoreWin32HandleInfo-parameter",
+          "text": " <code>pImportSemaphoreWin32HandleInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkImportSemaphoreWin32HandleInfoKHR\">VkImportSemaphoreWin32HandleInfoKHR</a> structure"
+        }
+      ]
+    },
+    "VkImportSemaphoreWin32HandleInfoKHR": {
+      "(VK_KHR_external_semaphore_win32)": [
+        {
+          "vuid": "VUID-VkImportSemaphoreWin32HandleInfoKHR-handleType-01140",
+          "text": " <code>handleType</code> <strong class=\"purple\">must</strong> be a value included in the <a href=\"#synchronization-semaphore-handletypes-win32\">Handle Types Supported by VkImportSemaphoreWin32HandleInfoKHR</a> table."
+        },
+        {
+          "vuid": "VUID-VkImportSemaphoreWin32HandleInfoKHR-handleType-01466",
+          "text": " If <code>handleType</code> is not <code>VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT</code> or <code>VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT</code>, <code>name</code> <strong class=\"purple\">must</strong> be <code>NULL</code>."
+        },
+        {
+          "vuid": "VUID-VkImportSemaphoreWin32HandleInfoKHR-handleType-01467",
+          "text": " If <code>handleType</code> is not <code>0</code> and <code>handle</code> is <code>NULL</code>, <code>name</code> <strong class=\"purple\">must</strong> name a valid synchronization primitive of the type specified by <code>handleType</code>."
+        },
+        {
+          "vuid": "VUID-VkImportSemaphoreWin32HandleInfoKHR-handleType-01468",
+          "text": " If <code>handleType</code> is not <code>0</code> and <code>name</code> is <code>NULL</code>, <code>handle</code> <strong class=\"purple\">must</strong> be a valid handle of the type specified by <code>handleType</code>."
+        },
+        {
+          "vuid": "VUID-VkImportSemaphoreWin32HandleInfoKHR-handle-01469",
+          "text": " If <code>handle</code> is not <code>NULL</code>, <code>name</code> must be <code>NULL</code>."
+        },
+        {
+          "vuid": "VUID-VkImportSemaphoreWin32HandleInfoKHR-handle-01542",
+          "text": " If <code>handle</code> is not <code>NULL</code>, it <strong class=\"purple\">must</strong> obey any requirements listed for <code>handleType</code> in <a href=\"#external-semaphore-handle-types-compatibility\">external semaphore handle types compatibility</a>."
+        },
+        {
+          "vuid": "VUID-VkImportSemaphoreWin32HandleInfoKHR-name-01543",
+          "text": " If <code>name</code> is not <code>NULL</code>, it <strong class=\"purple\">must</strong> obey any requirements listed for <code>handleType</code> in <a href=\"#external-semaphore-handle-types-compatibility\">external semaphore handle types compatibility</a>."
+        },
+        {
+          "vuid": "VUID-VkImportSemaphoreWin32HandleInfoKHR-handleType-03261",
+          "text": " If <code>handleType</code> is <code>VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT</code> or <code>VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT</code>, the <a href=\"#VkSemaphoreCreateInfo\">VkSemaphoreCreateInfo</a>::<code>flags</code> field <strong class=\"purple\">must</strong> match that of the semaphore from which <code>handle</code> or <code>name</code> was exported."
+        },
+        {
+          "vuid": "VUID-VkImportSemaphoreWin32HandleInfoKHR-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkImportSemaphoreWin32HandleInfoKHR-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-VkImportSemaphoreWin32HandleInfoKHR-semaphore-parameter",
+          "text": " <code>semaphore</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkSemaphore\">VkSemaphore</a> handle"
+        },
+        {
+          "vuid": "VUID-VkImportSemaphoreWin32HandleInfoKHR-flags-parameter",
+          "text": " <code>flags</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkSemaphoreImportFlagBits\">VkSemaphoreImportFlagBits</a> values"
+        },
+        {
+          "vuid": "VUID-VkImportSemaphoreWin32HandleInfoKHR-handleType-parameter",
+          "text": " If <code>handleType</code> is not <code>0</code>, <code>handleType</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkExternalSemaphoreHandleTypeFlagBits\">VkExternalSemaphoreHandleTypeFlagBits</a> value"
+        }
+      ],
+      "(VK_KHR_external_semaphore_win32)+(VK_KHR_timeline_semaphore)": [
+        {
+          "vuid": "VUID-VkImportSemaphoreWin32HandleInfoKHR-handleType-03262",
+          "text": " If <code>handleType</code> is <code>VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT</code> or <code>VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT</code>, the <a href=\"#VkSemaphoreTypeCreateInfoKHR\">VkSemaphoreTypeCreateInfoKHR</a>::<code>semaphoreType</code> field <strong class=\"purple\">must</strong> match that of the semaphore from which <code>handle</code> or <code>name</code> was exported."
+        },
+        {
+          "vuid": "VUID-VkImportSemaphoreWin32HandleInfoKHR-flags-03322",
+          "text": " If <code>flags</code> contains <code>VK_SEMAPHORE_IMPORT_TEMPORARY_BIT</code>, the <a href=\"#VkSemaphoreTypeCreateInfoKHR\">VkSemaphoreTypeCreateInfoKHR</a>::<code>semaphoreType</code> field of the semaphore from which <code>handle</code> or <code>name</code> was exported <strong class=\"purple\">must</strong> not be <code>VK_SEMAPHORE_TYPE_TIMELINE_KHR</code>."
+        }
+      ]
+    },
+    "vkImportSemaphoreFdKHR": {
+      "(VK_KHR_external_semaphore_fd)": [
+        {
+          "vuid": "VUID-vkImportSemaphoreFdKHR-semaphore-01142",
+          "text": " <code>semaphore</code> <strong class=\"purple\">must</strong> not be associated with any queue command that has not yet completed execution on that queue"
+        },
+        {
+          "vuid": "VUID-vkImportSemaphoreFdKHR-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkImportSemaphoreFdKHR-pImportSemaphoreFdInfo-parameter",
+          "text": " <code>pImportSemaphoreFdInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkImportSemaphoreFdInfoKHR\">VkImportSemaphoreFdInfoKHR</a> structure"
+        }
+      ]
+    },
+    "VkImportSemaphoreFdInfoKHR": {
+      "(VK_KHR_external_semaphore_fd)": [
+        {
+          "vuid": "VUID-VkImportSemaphoreFdInfoKHR-handleType-01143",
+          "text": " <code>handleType</code> <strong class=\"purple\">must</strong> be a value included in the <a href=\"#synchronization-semaphore-handletypes-fd\">Handle Types Supported by VkImportSemaphoreFdInfoKHR</a> table."
+        },
+        {
+          "vuid": "VUID-VkImportSemaphoreFdInfoKHR-fd-01544",
+          "text": " <code>fd</code> <strong class=\"purple\">must</strong> obey any requirements listed for <code>handleType</code> in <a href=\"#external-semaphore-handle-types-compatibility\">external semaphore handle types compatibility</a>."
+        },
+        {
+          "vuid": "VUID-VkImportSemaphoreFdInfoKHR-handleType-03263",
+          "text": " If <code>handleType</code> is <code>VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT</code>, the <a href=\"#VkSemaphoreCreateInfo\">VkSemaphoreCreateInfo</a>::<code>flags</code> field <strong class=\"purple\">must</strong> match that of the semaphore from which <code>fd</code> was exported."
+        },
+        {
+          "vuid": "VUID-VkImportSemaphoreFdInfoKHR-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_FD_INFO_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkImportSemaphoreFdInfoKHR-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-VkImportSemaphoreFdInfoKHR-semaphore-parameter",
+          "text": " <code>semaphore</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkSemaphore\">VkSemaphore</a> handle"
+        },
+        {
+          "vuid": "VUID-VkImportSemaphoreFdInfoKHR-flags-parameter",
+          "text": " <code>flags</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkSemaphoreImportFlagBits\">VkSemaphoreImportFlagBits</a> values"
+        },
+        {
+          "vuid": "VUID-VkImportSemaphoreFdInfoKHR-handleType-parameter",
+          "text": " <code>handleType</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkExternalSemaphoreHandleTypeFlagBits\">VkExternalSemaphoreHandleTypeFlagBits</a> value"
+        }
+      ],
+      "(VK_KHR_external_semaphore_fd)+(VK_KHR_timeline_semaphore)": [
+        {
+          "vuid": "VUID-VkImportSemaphoreFdInfoKHR-handleType-03264",
+          "text": " If <code>handleType</code> is <code>VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT</code>, the <a href=\"#VkSemaphoreTypeCreateInfoKHR\">VkSemaphoreTypeCreateInfoKHR</a>::<code>semaphoreType</code> field <strong class=\"purple\">must</strong> match that of the semaphore from which <code>fd</code> was exported."
+        },
+        {
+          "vuid": "VUID-VkImportSemaphoreFdInfoKHR-flags-03323",
+          "text": " If <code>flags</code> contains <code>VK_SEMAPHORE_IMPORT_TEMPORARY_BIT</code>, the <a href=\"#VkSemaphoreTypeCreateInfoKHR\">VkSemaphoreTypeCreateInfoKHR</a>::<code>semaphoreType</code> field of the semaphore from which <code>fd</code> was exported <strong class=\"purple\">must</strong> not be <code>VK_SEMAPHORE_TYPE_TIMELINE_KHR</code>."
+        }
+      ]
+    },
+    "vkCreateEvent": {
+      "core": [
+        {
+          "vuid": "VUID-vkCreateEvent-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCreateEvent-pCreateInfo-parameter",
+          "text": " <code>pCreateInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkEventCreateInfo\">VkEventCreateInfo</a> structure"
+        },
+        {
+          "vuid": "VUID-vkCreateEvent-pAllocator-parameter",
+          "text": " If <code>pAllocator</code> is not <code>NULL</code>, <code>pAllocator</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkAllocationCallbacks\">VkAllocationCallbacks</a> structure"
+        },
+        {
+          "vuid": "VUID-vkCreateEvent-pEvent-parameter",
+          "text": " <code>pEvent</code> <strong class=\"purple\">must</strong> be a valid pointer to a <a href=\"#VkEvent\">VkEvent</a> handle"
+        }
+      ]
+    },
+    "VkEventCreateInfo": {
+      "core": [
+        {
+          "vuid": "VUID-VkEventCreateInfo-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_EVENT_CREATE_INFO</code>"
+        },
+        {
+          "vuid": "VUID-VkEventCreateInfo-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-VkEventCreateInfo-flags-zerobitmask",
+          "text": " <code>flags</code> <strong class=\"purple\">must</strong> be <code>0</code>"
+        }
+      ]
+    },
+    "vkDestroyEvent": {
+      "core": [
+        {
+          "vuid": "VUID-vkDestroyEvent-event-01145",
+          "text": " All submitted commands that refer to <code>event</code> <strong class=\"purple\">must</strong> have completed execution"
+        },
+        {
+          "vuid": "VUID-vkDestroyEvent-event-01146",
+          "text": " If <code>VkAllocationCallbacks</code> were provided when <code>event</code> was created, a compatible set of callbacks <strong class=\"purple\">must</strong> be provided here"
+        },
+        {
+          "vuid": "VUID-vkDestroyEvent-event-01147",
+          "text": " If no <code>VkAllocationCallbacks</code> were provided when <code>event</code> was created, <code>pAllocator</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-vkDestroyEvent-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkDestroyEvent-event-parameter",
+          "text": " If <code>event</code> is not <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>, <code>event</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkEvent\">VkEvent</a> handle"
+        },
+        {
+          "vuid": "VUID-vkDestroyEvent-pAllocator-parameter",
+          "text": " If <code>pAllocator</code> is not <code>NULL</code>, <code>pAllocator</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkAllocationCallbacks\">VkAllocationCallbacks</a> structure"
+        },
+        {
+          "vuid": "VUID-vkDestroyEvent-event-parent",
+          "text": " If <code>event</code> is a valid handle, it <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from <code>device</code>"
+        }
+      ]
+    },
+    "vkGetEventStatus": {
+      "core": [
+        {
+          "vuid": "VUID-vkGetEventStatus-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetEventStatus-event-parameter",
+          "text": " <code>event</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkEvent\">VkEvent</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetEventStatus-event-parent",
+          "text": " <code>event</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from <code>device</code>"
+        }
+      ]
+    },
+    "vkSetEvent": {
+      "core": [
+        {
+          "vuid": "VUID-vkSetEvent-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkSetEvent-event-parameter",
+          "text": " <code>event</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkEvent\">VkEvent</a> handle"
+        },
+        {
+          "vuid": "VUID-vkSetEvent-event-parent",
+          "text": " <code>event</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from <code>device</code>"
+        }
+      ]
+    },
+    "vkResetEvent": {
+      "core": [
+        {
+          "vuid": "VUID-vkResetEvent-event-01148",
+          "text": " <code>event</code> <strong class=\"purple\">must</strong> not be waited on by a <code>vkCmdWaitEvents</code> command that is currently executing"
+        },
+        {
+          "vuid": "VUID-vkResetEvent-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkResetEvent-event-parameter",
+          "text": " <code>event</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkEvent\">VkEvent</a> handle"
+        },
+        {
+          "vuid": "VUID-vkResetEvent-event-parent",
+          "text": " <code>event</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from <code>device</code>"
+        }
+      ]
+    },
+    "vkCmdSetEvent": {
+      "core": [
+        {
+          "vuid": "VUID-vkCmdSetEvent-stageMask-01149",
+          "text": " <code>stageMask</code> <strong class=\"purple\">must</strong> not include <code>VK_PIPELINE_STAGE_HOST_BIT</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdSetEvent-stageMask-01150",
+          "text": " If the <a href=\"#features-geometryShader\">geometry shaders</a> feature is not enabled, <code>stageMask</code> <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdSetEvent-stageMask-01151",
+          "text": " If the <a href=\"#features-tessellationShader\">tessellation shaders</a> feature is not enabled, <code>stageMask</code> <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT</code> or <code>VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdSetEvent-commandBuffer-parameter",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkCommandBuffer\">VkCommandBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdSetEvent-event-parameter",
+          "text": " <code>event</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkEvent\">VkEvent</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdSetEvent-stageMask-parameter",
+          "text": " <code>stageMask</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkPipelineStageFlagBits\">VkPipelineStageFlagBits</a> values"
+        },
+        {
+          "vuid": "VUID-vkCmdSetEvent-stageMask-requiredbitmask",
+          "text": " <code>stageMask</code> <strong class=\"purple\">must</strong> not be <code>0</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdSetEvent-commandBuffer-recording",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be in the <a href=\"#commandbuffers-lifecycle\">recording state</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdSetEvent-commandBuffer-cmdpool",
+          "text": " The <code>VkCommandPool</code> that <code>commandBuffer</code> was allocated from <strong class=\"purple\">must</strong> support graphics, or compute operations"
+        },
+        {
+          "vuid": "VUID-vkCmdSetEvent-renderpass",
+          "text": " This command <strong class=\"purple\">must</strong> only be called outside of a render pass instance"
+        },
+        {
+          "vuid": "VUID-vkCmdSetEvent-commonparent",
+          "text": " Both of <code>commandBuffer</code>, and <code>event</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from the same <a href=\"#VkDevice\">VkDevice</a>"
+        }
+      ],
+      "(VK_VERSION_1_1,VK_KHR_device_group)": [
+        {
+          "vuid": "VUID-vkCmdSetEvent-commandBuffer-01152",
+          "text": " <code>commandBuffer</code>&#8217;s current device mask <strong class=\"purple\">must</strong> include exactly one physical device."
+        }
+      ],
+      "(VK_NV_mesh_shader)": [
+        {
+          "vuid": "VUID-vkCmdSetEvent-stageMask-02107",
+          "text": " If the <a href=\"#features-meshShader\">mesh shaders</a> feature is not enabled, <code>stageMask</code> <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdSetEvent-stageMask-02108",
+          "text": " If the <a href=\"#features-taskShader\">task shaders</a> feature is not enabled, <code>stageMask</code> <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV</code>"
+        }
+      ]
+    },
+    "vkCmdResetEvent": {
+      "core": [
+        {
+          "vuid": "VUID-vkCmdResetEvent-stageMask-01153",
+          "text": " <code>stageMask</code> <strong class=\"purple\">must</strong> not include <code>VK_PIPELINE_STAGE_HOST_BIT</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdResetEvent-stageMask-01154",
+          "text": " If the <a href=\"#features-geometryShader\">geometry shaders</a> feature is not enabled, <code>stageMask</code> <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdResetEvent-stageMask-01155",
+          "text": " If the <a href=\"#features-tessellationShader\">tessellation shaders</a> feature is not enabled, <code>stageMask</code> <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT</code> or <code>VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdResetEvent-event-01156",
+          "text": " When this command executes, <code>event</code> <strong class=\"purple\">must</strong> not be waited on by a <code>vkCmdWaitEvents</code> command that is currently executing"
+        },
+        {
+          "vuid": "VUID-vkCmdResetEvent-commandBuffer-parameter",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkCommandBuffer\">VkCommandBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdResetEvent-event-parameter",
+          "text": " <code>event</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkEvent\">VkEvent</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdResetEvent-stageMask-parameter",
+          "text": " <code>stageMask</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkPipelineStageFlagBits\">VkPipelineStageFlagBits</a> values"
+        },
+        {
+          "vuid": "VUID-vkCmdResetEvent-stageMask-requiredbitmask",
+          "text": " <code>stageMask</code> <strong class=\"purple\">must</strong> not be <code>0</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdResetEvent-commandBuffer-recording",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be in the <a href=\"#commandbuffers-lifecycle\">recording state</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdResetEvent-commandBuffer-cmdpool",
+          "text": " The <code>VkCommandPool</code> that <code>commandBuffer</code> was allocated from <strong class=\"purple\">must</strong> support graphics, or compute operations"
+        },
+        {
+          "vuid": "VUID-vkCmdResetEvent-renderpass",
+          "text": " This command <strong class=\"purple\">must</strong> only be called outside of a render pass instance"
+        },
+        {
+          "vuid": "VUID-vkCmdResetEvent-commonparent",
+          "text": " Both of <code>commandBuffer</code>, and <code>event</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from the same <a href=\"#VkDevice\">VkDevice</a>"
+        }
+      ],
+      "(VK_VERSION_1_1,VK_KHR_device_group)": [
+        {
+          "vuid": "VUID-vkCmdResetEvent-commandBuffer-01157",
+          "text": " <code>commandBuffer</code>&#8217;s current device mask <strong class=\"purple\">must</strong> include exactly one physical device."
+        }
+      ],
+      "(VK_NV_mesh_shader)": [
+        {
+          "vuid": "VUID-vkCmdResetEvent-stageMask-02109",
+          "text": " If the <a href=\"#features-meshShader\">mesh shaders</a> feature is not enabled, <code>stageMask</code> <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdResetEvent-stageMask-02110",
+          "text": " If the <a href=\"#features-taskShader\">task shaders</a> feature is not enabled, <code>stageMask</code> <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV</code>"
+        }
+      ]
+    },
+    "vkCmdWaitEvents": {
+      "core": [
+        {
+          "vuid": "VUID-vkCmdWaitEvents-srcStageMask-01158",
+          "text": " <code>srcStageMask</code> <strong class=\"purple\">must</strong> be the bitwise OR of the <code>stageMask</code> parameter used in previous calls to <code>vkCmdSetEvent</code> with any of the members of <code>pEvents</code> and <code>VK_PIPELINE_STAGE_HOST_BIT</code> if any of the members of <code>pEvents</code> was set using <code>vkSetEvent</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdWaitEvents-srcStageMask-01159",
+          "text": " If the <a href=\"#features-geometryShader\">geometry shaders</a> feature is not enabled, <code>srcStageMask</code> <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdWaitEvents-dstStageMask-01160",
+          "text": " If the <a href=\"#features-geometryShader\">geometry shaders</a> feature is not enabled, <code>dstStageMask</code> <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdWaitEvents-srcStageMask-01161",
+          "text": " If the <a href=\"#features-tessellationShader\">tessellation shaders</a> feature is not enabled, <code>srcStageMask</code> <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT</code> or <code>VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdWaitEvents-dstStageMask-01162",
+          "text": " If the <a href=\"#features-tessellationShader\">tessellation shaders</a> feature is not enabled, <code>dstStageMask</code> <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT</code> or <code>VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdWaitEvents-pEvents-01163",
+          "text": " If <code>pEvents</code> includes one or more events that will be signaled by <code>vkSetEvent</code> after <code>commandBuffer</code> has been submitted to a queue, then <code>vkCmdWaitEvents</code> <strong class=\"purple\">must</strong> not be called inside a render pass instance"
+        },
+        {
+          "vuid": "VUID-vkCmdWaitEvents-srcStageMask-01164",
+          "text": " Any pipeline stage included in <code>srcStageMask</code> or <code>dstStageMask</code> <strong class=\"purple\">must</strong> be supported by the capabilities of the queue family specified by the <code>queueFamilyIndex</code> member of the <a href=\"#VkCommandPoolCreateInfo\">VkCommandPoolCreateInfo</a> structure that was used to create the <code>VkCommandPool</code> that <code>commandBuffer</code> was allocated from, as specified in the <a href=\"#synchronization-pipeline-stages-supported\">table of supported pipeline stages</a>."
+        },
+        {
+          "vuid": "VUID-vkCmdWaitEvents-pMemoryBarriers-01165",
+          "text": " Each element of <code>pMemoryBarriers</code>, <code>pBufferMemoryBarriers</code> or <code>pImageMemoryBarriers</code> <strong class=\"purple\">must</strong> not have any access flag included in its <code>srcAccessMask</code> member if that bit is not supported by any of the pipeline stages in <code>srcStageMask</code>, as specified in the <a href=\"#synchronization-access-types-supported\">table of supported access types</a>."
+        },
+        {
+          "vuid": "VUID-vkCmdWaitEvents-pMemoryBarriers-01166",
+          "text": " Each element of <code>pMemoryBarriers</code>, <code>pBufferMemoryBarriers</code> or <code>pImageMemoryBarriers</code> <strong class=\"purple\">must</strong> not have any access flag included in its <code>dstAccessMask</code> member if that bit is not supported by any of the pipeline stages in <code>dstStageMask</code>, as specified in the <a href=\"#synchronization-access-types-supported\">table of supported access types</a>."
+        },
+        {
+          "vuid": "VUID-vkCmdWaitEvents-srcQueueFamilyIndex-02803",
+          "text": " The <code>srcQueueFamilyIndex</code> and <code>dstQueueFamilyIndex</code> members of any element of <code>pBufferMemoryBarriers</code> or <code>pImageMemoryBarriers</code> <strong class=\"purple\">must</strong> be equal."
+        },
+        {
+          "vuid": "VUID-vkCmdWaitEvents-srcAccessMask-02809",
+          "text": " The <code>srcAccessMask</code> member of each element of <code>pMemoryBarriers</code> <strong class=\"purple\">must</strong> only include access flags that are supported by one or more of the pipeline stages in <code>srcStageMask</code>, as specified in the <a href=\"#synchronization-access-types-supported\">table of supported access types</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdWaitEvents-dstAccessMask-02810",
+          "text": " The <code>dstAccessMask</code> member of each element of <code>pMemoryBarriers</code> <strong class=\"purple\">must</strong> only include access flags that are supported by one or more of the pipeline stages in <code>dstStageMask</code>, as specified in the <a href=\"#synchronization-access-types-supported\">table of supported access types</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdWaitEvents-srcAccessMask-02811",
+          "text": " The <code>srcAccessMask</code> member of each element of <code>pBufferMemoryBarriers</code> <strong class=\"purple\">must</strong> only include access flags that are supported by one or more of the pipeline stages in <code>srcStageMask</code>, as specified in the <a href=\"#synchronization-access-types-supported\">table of supported access types</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdWaitEvents-dstAccessMask-02812",
+          "text": " The <code>dstAccessMask</code> member of each element of <code>pBufferMemoryBarriers</code> <strong class=\"purple\">must</strong> only include access flags that are supported by one or more of the pipeline stages in <code>dstStageMask</code>, as specified in the <a href=\"#synchronization-access-types-supported\">table of supported access types</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdWaitEvents-srcAccessMask-02813",
+          "text": " The <code>srcAccessMask</code> member of each element of <code>pImageMemoryBarriers</code> <strong class=\"purple\">must</strong> only include access flags that are supported by one or more of the pipeline stages in <code>srcStageMask</code>, as specified in the <a href=\"#synchronization-access-types-supported\">table of supported access types</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdWaitEvents-dstAccessMask-02814",
+          "text": " The <code>dstAccessMask</code> member of any element of <code>pImageMemoryBarriers</code> <strong class=\"purple\">must</strong> only include access flags that are supported by one or more of the pipeline stages in <code>dstStageMask</code>, as specified in the <a href=\"#synchronization-access-types-supported\">table of supported access types</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdWaitEvents-commandBuffer-parameter",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkCommandBuffer\">VkCommandBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdWaitEvents-pEvents-parameter",
+          "text": " <code>pEvents</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>eventCount</code> valid <a href=\"#VkEvent\">VkEvent</a> handles"
+        },
+        {
+          "vuid": "VUID-vkCmdWaitEvents-srcStageMask-parameter",
+          "text": " <code>srcStageMask</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkPipelineStageFlagBits\">VkPipelineStageFlagBits</a> values"
+        },
+        {
+          "vuid": "VUID-vkCmdWaitEvents-srcStageMask-requiredbitmask",
+          "text": " <code>srcStageMask</code> <strong class=\"purple\">must</strong> not be <code>0</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdWaitEvents-dstStageMask-parameter",
+          "text": " <code>dstStageMask</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkPipelineStageFlagBits\">VkPipelineStageFlagBits</a> values"
+        },
+        {
+          "vuid": "VUID-vkCmdWaitEvents-dstStageMask-requiredbitmask",
+          "text": " <code>dstStageMask</code> <strong class=\"purple\">must</strong> not be <code>0</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdWaitEvents-pMemoryBarriers-parameter",
+          "text": " If <code>memoryBarrierCount</code> is not <code>0</code>, <code>pMemoryBarriers</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>memoryBarrierCount</code> valid <a href=\"#VkMemoryBarrier\">VkMemoryBarrier</a> structures"
+        },
+        {
+          "vuid": "VUID-vkCmdWaitEvents-pBufferMemoryBarriers-parameter",
+          "text": " If <code>bufferMemoryBarrierCount</code> is not <code>0</code>, <code>pBufferMemoryBarriers</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>bufferMemoryBarrierCount</code> valid <a href=\"#VkBufferMemoryBarrier\">VkBufferMemoryBarrier</a> structures"
+        },
+        {
+          "vuid": "VUID-vkCmdWaitEvents-pImageMemoryBarriers-parameter",
+          "text": " If <code>imageMemoryBarrierCount</code> is not <code>0</code>, <code>pImageMemoryBarriers</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>imageMemoryBarrierCount</code> valid <a href=\"#VkImageMemoryBarrier\">VkImageMemoryBarrier</a> structures"
+        },
+        {
+          "vuid": "VUID-vkCmdWaitEvents-commandBuffer-recording",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be in the <a href=\"#commandbuffers-lifecycle\">recording state</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdWaitEvents-commandBuffer-cmdpool",
+          "text": " The <code>VkCommandPool</code> that <code>commandBuffer</code> was allocated from <strong class=\"purple\">must</strong> support graphics, or compute operations"
+        },
+        {
+          "vuid": "VUID-vkCmdWaitEvents-eventCount-arraylength",
+          "text": " <code>eventCount</code> <strong class=\"purple\">must</strong> be greater than <code>0</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdWaitEvents-commonparent",
+          "text": " Both of <code>commandBuffer</code>, and the elements of <code>pEvents</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from the same <a href=\"#VkDevice\">VkDevice</a>"
+        }
+      ],
+      "(VK_VERSION_1_1,VK_KHR_device_group)": [
+        {
+          "vuid": "VUID-vkCmdWaitEvents-commandBuffer-01167",
+          "text": " <code>commandBuffer</code>&#8217;s current device mask <strong class=\"purple\">must</strong> include exactly one physical device."
+        }
+      ],
+      "(VK_NV_mesh_shader)": [
+        {
+          "vuid": "VUID-vkCmdWaitEvents-srcStageMask-02111",
+          "text": " If the <a href=\"#features-meshShader\">mesh shaders</a> feature is not enabled, <code>srcStageMask</code> <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdWaitEvents-srcStageMask-02112",
+          "text": " If the <a href=\"#features-taskShader\">task shaders</a> feature is not enabled, <code>srcStageMask</code> <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdWaitEvents-dstStageMask-02113",
+          "text": " If the <a href=\"#features-meshShader\">mesh shaders</a> feature is not enabled, <code>dstStageMask</code> <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdWaitEvents-dstStageMask-02114",
+          "text": " If the <a href=\"#features-taskShader\">task shaders</a> feature is not enabled, <code>dstStageMask</code> <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV</code>"
+        }
+      ]
+    },
+    "vkCmdPipelineBarrier": {
+      "core": [
+        {
+          "vuid": "VUID-vkCmdPipelineBarrier-srcStageMask-01168",
+          "text": " If the <a href=\"#features-geometryShader\">geometry shaders</a> feature is not enabled, <code>srcStageMask</code> <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdPipelineBarrier-dstStageMask-01169",
+          "text": " If the <a href=\"#features-geometryShader\">geometry shaders</a> feature is not enabled, <code>dstStageMask</code> <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdPipelineBarrier-srcStageMask-01170",
+          "text": " If the <a href=\"#features-tessellationShader\">tessellation shaders</a> feature is not enabled, <code>srcStageMask</code> <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT</code> or <code>VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdPipelineBarrier-dstStageMask-01171",
+          "text": " If the <a href=\"#features-tessellationShader\">tessellation shaders</a> feature is not enabled, <code>dstStageMask</code> <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT</code> or <code>VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdPipelineBarrier-pDependencies-02285",
+          "text": " If <code>vkCmdPipelineBarrier</code> is called within a render pass instance, the render pass <strong class=\"purple\">must</strong> have been created with at least one <code>VkSubpassDependency</code> instance in <code>VkRenderPassCreateInfo</code>::<code>pDependencies</code> that expresses a dependency from the current subpass to itself, and for which <code>srcStageMask</code> contains a subset of the bit values in <code>VkSubpassDependency</code>::<code>srcStageMask</code>, <code>dstStageMask</code> contains a subset of the bit values in <code>VkSubpassDependency</code>::<code>dstStageMask</code>, <code>dependencyFlags</code> is equal to <code>VkSubpassDependency</code>::<code>dependencyFlags</code>, <code>srcAccessMask</code> member of each element of <code>pMemoryBarriers</code> and <code>pImageMemoryBarriers</code> contains a subset of the bit values in <code>VkSubpassDependency</code>::<code>srcAccessMask</code>, and <code>dstAccessMask</code> member of each element of <code>pMemoryBarriers</code> and <code>pImageMemoryBarriers</code> contains a subset of the bit values in <code>VkSubpassDependency</code>::<code>dstAccessMask</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdPipelineBarrier-bufferMemoryBarrierCount-01178",
+          "text": " If <code>vkCmdPipelineBarrier</code> is called within a render pass instance, <code>bufferMemoryBarrierCount</code> <strong class=\"purple\">must</strong> be <code>0</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdPipelineBarrier-oldLayout-01181",
+          "text": " If <code>vkCmdPipelineBarrier</code> is called within a render pass instance, the <code>oldLayout</code> and <code>newLayout</code> members of an element of <code>pImageMemoryBarriers</code> <strong class=\"purple\">must</strong> be equal"
+        },
+        {
+          "vuid": "VUID-vkCmdPipelineBarrier-srcQueueFamilyIndex-01182",
+          "text": " If <code>vkCmdPipelineBarrier</code> is called within a render pass instance, the <code>srcQueueFamilyIndex</code> and <code>dstQueueFamilyIndex</code> members of any element of <code>pImageMemoryBarriers</code> <strong class=\"purple\">must</strong> be <code>VK_QUEUE_FAMILY_IGNORED</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdPipelineBarrier-srcStageMask-01183",
+          "text": " Any pipeline stage included in <code>srcStageMask</code> or <code>dstStageMask</code> <strong class=\"purple\">must</strong> be supported by the capabilities of the queue family specified by the <code>queueFamilyIndex</code> member of the <a href=\"#VkCommandPoolCreateInfo\">VkCommandPoolCreateInfo</a> structure that was used to create the <code>VkCommandPool</code> that <code>commandBuffer</code> was allocated from, as specified in the <a href=\"#synchronization-pipeline-stages-supported\">table of supported pipeline stages</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdPipelineBarrier-srcAccessMask-02815",
+          "text": " The <code>srcAccessMask</code> member of each element of <code>pMemoryBarriers</code> <strong class=\"purple\">must</strong> only include access flags that are supported by one or more of the pipeline stages in <code>srcStageMask</code>, as specified in the <a href=\"#synchronization-access-types-supported\">table of supported access types</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdPipelineBarrier-dstAccessMask-02816",
+          "text": " The <code>dstAccessMask</code> member of each element of <code>pMemoryBarriers</code> <strong class=\"purple\">must</strong> only include access flags that are supported by one or more of the pipeline stages in <code>dstStageMask</code>, as specified in the <a href=\"#synchronization-access-types-supported\">table of supported access types</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdPipelineBarrier-pBufferMemoryBarriers-02817",
+          "text": " For any element of <code>pBufferMemoryBarriers</code>, if its <code>srcQueueFamilyIndex</code> and <code>dstQueueFamilyIndex</code> members are equal, or if its <code>srcQueueFamilyIndex</code> is the queue family index that was used to create the command pool that <code>commandBuffer</code> was allocated from, then its <code>srcAccessMask</code> member <strong class=\"purple\">must</strong> only contain access flags that are supported by one or more of the pipeline stages in <code>srcStageMask</code>, as specified in the <a href=\"#synchronization-access-types-supported\">table of supported access types</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdPipelineBarrier-pBufferMemoryBarriers-02818",
+          "text": " For any element of <code>pBufferMemoryBarriers</code>, if its <code>srcQueueFamilyIndex</code> and <code>dstQueueFamilyIndex</code> members are equal, or if its <code>dstQueueFamilyIndex</code> is the queue family index that was used to create the command pool that <code>commandBuffer</code> was allocated from, then its <code>dstAccessMask</code> member <strong class=\"purple\">must</strong> only contain access flags that are supported by one or more of the pipeline stages in <code>dstStageMask</code>, as specified in the <a href=\"#synchronization-access-types-supported\">table of supported access types</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdPipelineBarrier-pImageMemoryBarriers-02819",
+          "text": " For any element of <code>pImageMemoryBarriers</code>, if its <code>srcQueueFamilyIndex</code> and <code>dstQueueFamilyIndex</code> members are equal, or if its <code>srcQueueFamilyIndex</code> is the queue family index that was used to create the command pool that <code>commandBuffer</code> was allocated from, then its <code>srcAccessMask</code> member <strong class=\"purple\">must</strong> only contain access flags that are supported by one or more of the pipeline stages in <code>srcStageMask</code>, as specified in the <a href=\"#synchronization-access-types-supported\">table of supported access types</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdPipelineBarrier-pImageMemoryBarriers-02820",
+          "text": " For any element of <code>pImageMemoryBarriers</code>, if its <code>srcQueueFamilyIndex</code> and <code>dstQueueFamilyIndex</code> members are equal, or if its <code>dstQueueFamilyIndex</code> is the queue family index that was used to create the command pool that <code>commandBuffer</code> was allocated from, then its <code>dstAccessMask</code> member <strong class=\"purple\">must</strong> only contain access flags that are supported by one or more of the pipeline stages in <code>dstStageMask</code>, as specified in the <a href=\"#synchronization-access-types-supported\">table of supported access types</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdPipelineBarrier-commandBuffer-parameter",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkCommandBuffer\">VkCommandBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdPipelineBarrier-srcStageMask-parameter",
+          "text": " <code>srcStageMask</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkPipelineStageFlagBits\">VkPipelineStageFlagBits</a> values"
+        },
+        {
+          "vuid": "VUID-vkCmdPipelineBarrier-srcStageMask-requiredbitmask",
+          "text": " <code>srcStageMask</code> <strong class=\"purple\">must</strong> not be <code>0</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdPipelineBarrier-dstStageMask-parameter",
+          "text": " <code>dstStageMask</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkPipelineStageFlagBits\">VkPipelineStageFlagBits</a> values"
+        },
+        {
+          "vuid": "VUID-vkCmdPipelineBarrier-dstStageMask-requiredbitmask",
+          "text": " <code>dstStageMask</code> <strong class=\"purple\">must</strong> not be <code>0</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdPipelineBarrier-dependencyFlags-parameter",
+          "text": " <code>dependencyFlags</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkDependencyFlagBits\">VkDependencyFlagBits</a> values"
+        },
+        {
+          "vuid": "VUID-vkCmdPipelineBarrier-pMemoryBarriers-parameter",
+          "text": " If <code>memoryBarrierCount</code> is not <code>0</code>, <code>pMemoryBarriers</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>memoryBarrierCount</code> valid <a href=\"#VkMemoryBarrier\">VkMemoryBarrier</a> structures"
+        },
+        {
+          "vuid": "VUID-vkCmdPipelineBarrier-pBufferMemoryBarriers-parameter",
+          "text": " If <code>bufferMemoryBarrierCount</code> is not <code>0</code>, <code>pBufferMemoryBarriers</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>bufferMemoryBarrierCount</code> valid <a href=\"#VkBufferMemoryBarrier\">VkBufferMemoryBarrier</a> structures"
+        },
+        {
+          "vuid": "VUID-vkCmdPipelineBarrier-pImageMemoryBarriers-parameter",
+          "text": " If <code>imageMemoryBarrierCount</code> is not <code>0</code>, <code>pImageMemoryBarriers</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>imageMemoryBarrierCount</code> valid <a href=\"#VkImageMemoryBarrier\">VkImageMemoryBarrier</a> structures"
+        },
+        {
+          "vuid": "VUID-vkCmdPipelineBarrier-commandBuffer-recording",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be in the <a href=\"#commandbuffers-lifecycle\">recording state</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdPipelineBarrier-commandBuffer-cmdpool",
+          "text": " The <code>VkCommandPool</code> that <code>commandBuffer</code> was allocated from <strong class=\"purple\">must</strong> support transfer, graphics, or compute operations"
+        }
+      ],
+      "(VK_KHR_depth_stencil_resolve)": [
+        {
+          "vuid": "VUID-vkCmdPipelineBarrier-image-02635",
+          "text": " If <code>vkCmdPipelineBarrier</code> is called within a render pass instance, the <code>image</code> member of any element of <code>pImageMemoryBarriers</code> <strong class=\"purple\">must</strong> be equal to one of the elements of <code>pAttachments</code> that the current <code>framebuffer</code> was created with, that is also referred to by one of the elements of the <code>pColorAttachments</code>, <code>pResolveAttachments</code> or <code>pDepthStencilAttachment</code> members of the <code>VkSubpassDescription</code> instance or by the <code>pDepthStencilResolveAttachment</code> member of the <code>VkSubpassDescriptionDepthStencilResolveKHR</code> structure that the current subpass was created with"
+        },
+        {
+          "vuid": "VUID-vkCmdPipelineBarrier-oldLayout-02636",
+          "text": " If <code>vkCmdPipelineBarrier</code> is called within a render pass instance, the <code>oldLayout</code> and <code>newLayout</code> members of any element of <code>pImageMemoryBarriers</code> <strong class=\"purple\">must</strong> be equal to the <code>layout</code> member of an element of the <code>pColorAttachments</code>, <code>pResolveAttachments</code> or <code>pDepthStencilAttachment</code> members of the <code>VkSubpassDescription</code> instance or by the <code>pDepthStencilResolveAttachment</code> member of the <code>VkSubpassDescriptionDepthStencilResolveKHR</code> structure that the current subpass was created with, that refers to the same <code>image</code>"
+        }
+      ],
+      "!(VK_KHR_depth_stencil_resolve)": [
+        {
+          "vuid": "VUID-vkCmdPipelineBarrier-image-02637",
+          "text": " If <code>vkCmdPipelineBarrier</code> is called within a render pass instance, the <code>image</code> member of any element of <code>pImageMemoryBarriers</code> <strong class=\"purple\">must</strong> be equal to one of the elements of <code>pAttachments</code> that the current <code>framebuffer</code> was created with, that is also referred to by one of the elements of the <code>pColorAttachments</code>, <code>pResolveAttachments</code> or <code>pDepthStencilAttachment</code> members of the <code>VkSubpassDescription</code> instance that the current subpass was created with"
+        },
+        {
+          "vuid": "VUID-vkCmdPipelineBarrier-oldLayout-02638",
+          "text": " If <code>vkCmdPipelineBarrier</code> is called within a render pass instance, the <code>oldLayout</code> and <code>newLayout</code> members of any element of <code>pImageMemoryBarriers</code> <strong class=\"purple\">must</strong> be equal to the <code>layout</code> member of an element of the <code>pColorAttachments</code>, <code>pResolveAttachments</code> or <code>pDepthStencilAttachment</code> members of the <code>VkSubpassDescription</code> instance that the current subpass was created with, that refers to the same <code>image</code>"
+        }
+      ],
+      "(VK_VERSION_1_1,VK_KHR_multiview)": [
+        {
+          "vuid": "VUID-vkCmdPipelineBarrier-dependencyFlags-01186",
+          "text": " If <code>vkCmdPipelineBarrier</code> is called outside of a render pass instance, <code>dependencyFlags</code> <strong class=\"purple\">must</strong> not include <code>VK_DEPENDENCY_VIEW_LOCAL_BIT</code>"
+        }
+      ],
+      "(VK_NV_mesh_shader)": [
+        {
+          "vuid": "VUID-vkCmdPipelineBarrier-srcStageMask-02115",
+          "text": " If the <a href=\"#features-meshShader\">mesh shaders</a> feature is not enabled, <code>srcStageMask</code> <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdPipelineBarrier-srcStageMask-02116",
+          "text": " If the <a href=\"#features-taskShader\">task shaders</a> feature is not enabled, <code>srcStageMask</code> <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdPipelineBarrier-dstStageMask-02117",
+          "text": " If the <a href=\"#features-meshShader\">mesh shaders</a> feature is not enabled, <code>dstStageMask</code> <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdPipelineBarrier-dstStageMask-02118",
+          "text": " If the <a href=\"#features-taskShader\">task shaders</a> feature is not enabled, <code>dstStageMask</code> <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV</code>"
+        }
+      ]
+    },
+    "VkMemoryBarrier": {
+      "core": [
+        {
+          "vuid": "VUID-VkMemoryBarrier-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_MEMORY_BARRIER</code>"
+        },
+        {
+          "vuid": "VUID-VkMemoryBarrier-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-VkMemoryBarrier-srcAccessMask-parameter",
+          "text": " <code>srcAccessMask</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkAccessFlagBits\">VkAccessFlagBits</a> values"
+        },
+        {
+          "vuid": "VUID-VkMemoryBarrier-dstAccessMask-parameter",
+          "text": " <code>dstAccessMask</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkAccessFlagBits\">VkAccessFlagBits</a> values"
+        }
+      ]
+    },
+    "VkBufferMemoryBarrier": {
+      "core": [
+        {
+          "vuid": "VUID-VkBufferMemoryBarrier-offset-01187",
+          "text": " <code>offset</code> <strong class=\"purple\">must</strong> be less than the size of <code>buffer</code>"
+        },
+        {
+          "vuid": "VUID-VkBufferMemoryBarrier-size-01188",
+          "text": " If <code>size</code> is not equal to <code>VK_WHOLE_SIZE</code>, <code>size</code> <strong class=\"purple\">must</strong> be greater than <code>0</code>"
+        },
+        {
+          "vuid": "VUID-VkBufferMemoryBarrier-size-01189",
+          "text": " If <code>size</code> is not equal to <code>VK_WHOLE_SIZE</code>, <code>size</code> <strong class=\"purple\">must</strong> be less than or equal to than the size of <code>buffer</code> minus <code>offset</code>"
+        },
+        {
+          "vuid": "VUID-VkBufferMemoryBarrier-buffer-01196",
+          "text": " If <code>buffer</code> was created with a sharing mode of <code>VK_SHARING_MODE_EXCLUSIVE</code>, and <code>srcQueueFamilyIndex</code> and <code>dstQueueFamilyIndex</code> are not <code>VK_QUEUE_FAMILY_IGNORED</code>, at least one of them <strong class=\"purple\">must</strong> be the same as the family of the queue that will execute this barrier"
+        },
+        {
+          "vuid": "VUID-VkBufferMemoryBarrier-buffer-01931",
+          "text": " If <code>buffer</code> is non-sparse then it <strong class=\"purple\">must</strong> be bound completely and contiguously to a single <code>VkDeviceMemory</code> object"
+        },
+        {
+          "vuid": "VUID-VkBufferMemoryBarrier-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER</code>"
+        },
+        {
+          "vuid": "VUID-VkBufferMemoryBarrier-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-VkBufferMemoryBarrier-buffer-parameter",
+          "text": " <code>buffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkBuffer\">VkBuffer</a> handle"
+        }
+      ],
+      "!(VK_VERSION_1_1,VK_KHR_external_memory)": [
+        {
+          "vuid": "VUID-VkBufferMemoryBarrier-buffer-01190",
+          "text": " If <code>buffer</code> was created with a sharing mode of <code>VK_SHARING_MODE_CONCURRENT</code>, <code>srcQueueFamilyIndex</code> and <code>dstQueueFamilyIndex</code> <strong class=\"purple\">must</strong> both be <code>VK_QUEUE_FAMILY_IGNORED</code>"
+        },
+        {
+          "vuid": "VUID-VkBufferMemoryBarrier-buffer-01192",
+          "text": " If <code>buffer</code> was created with a sharing mode of <code>VK_SHARING_MODE_EXCLUSIVE</code>, <code>srcQueueFamilyIndex</code> and <code>dstQueueFamilyIndex</code> <strong class=\"purple\">must</strong> either both be <code>VK_QUEUE_FAMILY_IGNORED</code>, or both be a valid queue family (see <a href=\"#devsandqueues-queueprops\">Queue Family Properties</a>)"
+        }
+      ],
+      "(VK_VERSION_1_1,VK_KHR_external_memory)": [
+        {
+          "vuid": "VUID-VkBufferMemoryBarrier-buffer-01191",
+          "text": " If <code>buffer</code> was created with a sharing mode of <code>VK_SHARING_MODE_CONCURRENT</code>, at least one of <code>srcQueueFamilyIndex</code> and <code>dstQueueFamilyIndex</code> <strong class=\"purple\">must</strong> be <code>VK_QUEUE_FAMILY_IGNORED</code>"
+        },
+        {
+          "vuid": "VUID-VkBufferMemoryBarrier-buffer-01763",
+          "text": " If <code>buffer</code> was created with a sharing mode of <code>VK_SHARING_MODE_CONCURRENT</code>, and one of <code>srcQueueFamilyIndex</code> and <code>dstQueueFamilyIndex</code> is <code>VK_QUEUE_FAMILY_IGNORED</code>, the other <strong class=\"purple\">must</strong> be <code>VK_QUEUE_FAMILY_IGNORED</code> or a special queue family reserved for external memory ownership transfers, as described in <a href=\"#synchronization-queue-transfers\">Queue Family Ownership Transfer</a>."
+        },
+        {
+          "vuid": "VUID-VkBufferMemoryBarrier-buffer-01193",
+          "text": " If <code>buffer</code> was created with a sharing mode of <code>VK_SHARING_MODE_EXCLUSIVE</code> and <code>srcQueueFamilyIndex</code> is <code>VK_QUEUE_FAMILY_IGNORED</code>, <code>dstQueueFamilyIndex</code> <strong class=\"purple\">must</strong> also be <code>VK_QUEUE_FAMILY_IGNORED</code>"
+        },
+        {
+          "vuid": "VUID-VkBufferMemoryBarrier-buffer-01764",
+          "text": " If <code>buffer</code> was created with a sharing mode of <code>VK_SHARING_MODE_EXCLUSIVE</code> and <code>srcQueueFamilyIndex</code> is not <code>VK_QUEUE_FAMILY_IGNORED</code>, it <strong class=\"purple\">must</strong> be a valid queue family or a special queue family reserved for external memory transfers, as described in <a href=\"#synchronization-queue-transfers\">Queue Family Ownership Transfer</a>."
+        },
+        {
+          "vuid": "VUID-VkBufferMemoryBarrier-buffer-01765",
+          "text": " If <code>buffer</code> was created with a sharing mode of <code>VK_SHARING_MODE_EXCLUSIVE</code> and <code>dstQueueFamilyIndex</code> is not <code>VK_QUEUE_FAMILY_IGNORED</code>, it <strong class=\"purple\">must</strong> be a valid queue family or a special queue family reserved for external memory transfers, as described in <a href=\"#synchronization-queue-transfers\">Queue Family Ownership Transfer</a>."
+        }
+      ]
+    },
+    "VkImageMemoryBarrier": {
+      "core": [
+        {
+          "vuid": "VUID-VkImageMemoryBarrier-oldLayout-01197",
+          "text": " <code>oldLayout</code> <strong class=\"purple\">must</strong> be <code>VK_IMAGE_LAYOUT_UNDEFINED</code> or the current layout of the image subresources affected by the barrier"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier-newLayout-01198",
+          "text": " <code>newLayout</code> <strong class=\"purple\">must</strong> not be <code>VK_IMAGE_LAYOUT_UNDEFINED</code> or <code>VK_IMAGE_LAYOUT_PREINITIALIZED</code>"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier-image-01205",
+          "text": " If <code>image</code> was created with a sharing mode of <code>VK_SHARING_MODE_EXCLUSIVE</code>, and <code>srcQueueFamilyIndex</code> and <code>dstQueueFamilyIndex</code> are not <code>VK_QUEUE_FAMILY_IGNORED</code>, at least one of them <strong class=\"purple\">must</strong> be the same as the family of the queue that will execute this barrier"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier-subresourceRange-01486",
+          "text": " <code>subresourceRange.baseMipLevel</code> <strong class=\"purple\">must</strong> be less than the <code>mipLevels</code> specified in <a href=\"#VkImageCreateInfo\">VkImageCreateInfo</a> when <code>image</code> was created"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier-subresourceRange-01724",
+          "text": " If <code>subresourceRange.levelCount</code> is not <code>VK_REMAINING_MIP_LEVELS</code>, <span class=\"eq\"><code>subresourceRange.baseMipLevel</code> &#43; <code>subresourceRange.levelCount</code></span> <strong class=\"purple\">must</strong> be less than or equal to the <code>mipLevels</code> specified in <a href=\"#VkImageCreateInfo\">VkImageCreateInfo</a> when <code>image</code> was created"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier-subresourceRange-01488",
+          "text": " <code>subresourceRange.baseArrayLayer</code> <strong class=\"purple\">must</strong> be less than the <code>arrayLayers</code> specified in <a href=\"#VkImageCreateInfo\">VkImageCreateInfo</a> when <code>image</code> was created"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier-subresourceRange-01725",
+          "text": " If <code>subresourceRange.layerCount</code> is not <code>VK_REMAINING_ARRAY_LAYERS</code>, <span class=\"eq\"><code>subresourceRange.baseArrayLayer</code> &#43; <code>subresourceRange.layerCount</code></span> <strong class=\"purple\">must</strong> be less than or equal to the <code>arrayLayers</code> specified in <a href=\"#VkImageCreateInfo\">VkImageCreateInfo</a> when <code>image</code> was created"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier-oldLayout-01208",
+          "text": " If either <code>oldLayout</code> or <code>newLayout</code> is <code>VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL</code> then <code>image</code> <strong class=\"purple\">must</strong> have been created with <code>VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT</code> set"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier-oldLayout-01209",
+          "text": " If either <code>oldLayout</code> or <code>newLayout</code> is <code>VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL</code> then <code>image</code> <strong class=\"purple\">must</strong> have been created with <code>VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT</code> set"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier-oldLayout-01210",
+          "text": " If either <code>oldLayout</code> or <code>newLayout</code> is <code>VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL</code> then <code>image</code> <strong class=\"purple\">must</strong> have been created with <code>VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT</code> set"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier-oldLayout-01211",
+          "text": " If either <code>oldLayout</code> or <code>newLayout</code> is <code>VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL</code> then <code>image</code> <strong class=\"purple\">must</strong> have been created with <code>VK_IMAGE_USAGE_SAMPLED_BIT</code> or <code>VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT</code> set"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier-oldLayout-01212",
+          "text": " If either <code>oldLayout</code> or <code>newLayout</code> is <code>VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL</code> then <code>image</code> <strong class=\"purple\">must</strong> have been created with <code>VK_IMAGE_USAGE_TRANSFER_SRC_BIT</code> set"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier-oldLayout-01213",
+          "text": " If either <code>oldLayout</code> or <code>newLayout</code> is <code>VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL</code> then <code>image</code> <strong class=\"purple\">must</strong> have been created with <code>VK_IMAGE_USAGE_TRANSFER_DST_BIT</code> set"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier-image-01932",
+          "text": " If <code>image</code> is non-sparse then it <strong class=\"purple\">must</strong> be bound completely and contiguously to a single <code>VkDeviceMemory</code> object"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER</code>"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code> or a pointer to a valid instance of <a href=\"#VkSampleLocationsInfoEXT\">VkSampleLocationsInfoEXT</a>"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier-oldLayout-parameter",
+          "text": " <code>oldLayout</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkImageLayout\">VkImageLayout</a> value"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier-newLayout-parameter",
+          "text": " <code>newLayout</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkImageLayout\">VkImageLayout</a> value"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier-image-parameter",
+          "text": " <code>image</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkImage\">VkImage</a> handle"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier-subresourceRange-parameter",
+          "text": " <code>subresourceRange</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkImageSubresourceRange\">VkImageSubresourceRange</a> structure"
+        }
+      ],
+      "!(VK_VERSION_1_1,VK_KHR_external_memory)": [
+        {
+          "vuid": "VUID-VkImageMemoryBarrier-image-01199",
+          "text": " If <code>image</code> was created with a sharing mode of <code>VK_SHARING_MODE_CONCURRENT</code>, <code>srcQueueFamilyIndex</code> and <code>dstQueueFamilyIndex</code> <strong class=\"purple\">must</strong> both be <code>VK_QUEUE_FAMILY_IGNORED</code>"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier-image-01200",
+          "text": " If <code>image</code> was created with a sharing mode of <code>VK_SHARING_MODE_EXCLUSIVE</code>, <code>srcQueueFamilyIndex</code> and <code>dstQueueFamilyIndex</code> <strong class=\"purple\">must</strong> either both be <code>VK_QUEUE_FAMILY_IGNORED</code>, or both be a valid queue family (see <a href=\"#devsandqueues-queueprops\">Queue Family Properties</a>)."
+        }
+      ],
+      "(VK_VERSION_1_1,VK_KHR_external_memory)": [
+        {
+          "vuid": "VUID-VkImageMemoryBarrier-image-01381",
+          "text": " If <code>image</code> was created with a sharing mode of <code>VK_SHARING_MODE_CONCURRENT</code>, at least one of <code>srcQueueFamilyIndex</code> and <code>dstQueueFamilyIndex</code> <strong class=\"purple\">must</strong> be <code>VK_QUEUE_FAMILY_IGNORED</code>"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier-image-01766",
+          "text": " If <code>image</code> was created with a sharing mode of <code>VK_SHARING_MODE_CONCURRENT</code>, and one of <code>srcQueueFamilyIndex</code> and <code>dstQueueFamilyIndex</code> is <code>VK_QUEUE_FAMILY_IGNORED</code>, the other <strong class=\"purple\">must</strong> be <code>VK_QUEUE_FAMILY_IGNORED</code> or a special queue family reserved for external memory transfers, as described in <a href=\"#synchronization-queue-transfers\">Queue Family Ownership Transfer</a>."
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier-image-01201",
+          "text": " If <code>image</code> was created with a sharing mode of <code>VK_SHARING_MODE_EXCLUSIVE</code> and <code>srcQueueFamilyIndex</code> is <code>VK_QUEUE_FAMILY_IGNORED</code>, <code>dstQueueFamilyIndex</code> <strong class=\"purple\">must</strong> also be <code>VK_QUEUE_FAMILY_IGNORED</code>."
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier-image-01767",
+          "text": " If <code>image</code> was created with a sharing mode of <code>VK_SHARING_MODE_EXCLUSIVE</code> and <code>srcQueueFamilyIndex</code> is not <code>VK_QUEUE_FAMILY_IGNORED</code>, it <strong class=\"purple\">must</strong> be a valid queue family or a special queue family reserved for external memory transfers, as described in <a href=\"#synchronization-queue-transfers\">Queue Family Ownership Transfer</a>."
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier-image-01768",
+          "text": " If <code>image</code> was created with a sharing mode of <code>VK_SHARING_MODE_EXCLUSIVE</code> and <code>dstQueueFamilyIndex</code> is not <code>VK_QUEUE_FAMILY_IGNORED</code>, it <strong class=\"purple\">must</strong> be a valid queue family or a special queue family reserved for external memory transfers, as described in <a href=\"#synchronization-queue-transfers\">Queue Family Ownership Transfer</a>."
+        }
+      ],
+      "(VK_KHR_separate_depth_stencil_layouts)": [
+        {
+          "vuid": "VUID-VkImageMemoryBarrier-image-03319",
+          "text": " If <code>image</code> has a depth/stencil format with both depth and stencil and the <a href=\"#features-separateDepthStencilLayouts\">separateDepthStencilLayouts</a> feature is enabled, then the <code>aspectMask</code> member of <code>subresourceRange</code> <strong class=\"purple\">must</strong> include either or both <code>VK_IMAGE_ASPECT_DEPTH_BIT</code> and <code>VK_IMAGE_ASPECT_STENCIL_BIT</code>"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier-image-03320",
+          "text": " If <code>image</code> has a depth/stencil format with both depth and stencil and the <a href=\"#features-separateDepthStencilLayouts\">separateDepthStencilLayouts</a> feature is not enabled, then the <code>aspectMask</code> member of <code>subresourceRange</code> <strong class=\"purple\">must</strong> include both <code>VK_IMAGE_ASPECT_DEPTH_BIT</code> and <code>VK_IMAGE_ASPECT_STENCIL_BIT</code>"
+        }
+      ],
+      "!(VK_KHR_separate_depth_stencil_layouts)": [
+        {
+          "vuid": "VUID-VkImageMemoryBarrier-image-01207",
+          "text": " If <code>image</code> has a depth/stencil format with both depth and stencil components, then the <code>aspectMask</code> member of <code>subresourceRange</code> <strong class=\"purple\">must</strong> include both <code>VK_IMAGE_ASPECT_DEPTH_BIT</code> and <code>VK_IMAGE_ASPECT_STENCIL_BIT</code>"
+        }
+      ],
+      "(VK_VERSION_1_1,VK_KHR_sampler_ycbcr_conversion)": [
+        {
+          "vuid": "VUID-VkImageMemoryBarrier-image-01671",
+          "text": " If <code>image</code> has a single-plane color format or is not <em>disjoint</em>, then the <code>aspectMask</code> member of <code>subresourceRange</code> <strong class=\"purple\">must</strong> be <code>VK_IMAGE_ASPECT_COLOR_BIT</code>"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier-image-01672",
+          "text": " If <code>image</code> has a multi-planar format and the image is <em>disjoint</em>, then the <code>aspectMask</code> member of <code>subresourceRange</code> <strong class=\"purple\">must</strong> include either at least one of <code>VK_IMAGE_ASPECT_PLANE_0_BIT</code>, <code>VK_IMAGE_ASPECT_PLANE_1_BIT</code>, and <code>VK_IMAGE_ASPECT_PLANE_2_BIT</code>; or <strong class=\"purple\">must</strong> include <code>VK_IMAGE_ASPECT_COLOR_BIT</code>"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier-image-01673",
+          "text": " If <code>image</code> has a multi-planar format with only two planes, then the <code>aspectMask</code> member of <code>subresourceRange</code> <strong class=\"purple\">must</strong> not include <code>VK_IMAGE_ASPECT_PLANE_2_BIT</code>"
+        }
+      ],
+      "(VK_VERSION_1_1,VK_KHR_maintenance2)": [
+        {
+          "vuid": "VUID-VkImageMemoryBarrier-oldLayout-01658",
+          "text": " If either <code>oldLayout</code> or <code>newLayout</code> is <code>VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL</code> then <code>image</code> <strong class=\"purple\">must</strong> have been created with <code>VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT</code> set"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryBarrier-oldLayout-01659",
+          "text": " If either <code>oldLayout</code> or <code>newLayout</code> is <code>VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL</code> then <code>image</code> <strong class=\"purple\">must</strong> have been created with <code>VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT</code> set"
+        }
+      ],
+      "(VK_NV_shading_rate_image)": [
+        {
+          "vuid": "VUID-VkImageMemoryBarrier-oldLayout-02088",
+          "text": " If either <code>oldLayout</code> or <code>newLayout</code> is <code>VK_IMAGE_LAYOUT_SHADING_RATE_OPTIMAL_NV</code> then <code>image</code> <strong class=\"purple\">must</strong> have been created with <code>VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV</code> set"
+        }
+      ]
+    },
+    "vkQueueWaitIdle": {
+      "core": [
+        {
+          "vuid": "VUID-vkQueueWaitIdle-queue-parameter",
+          "text": " <code>queue</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkQueue\">VkQueue</a> handle"
+        }
+      ]
+    },
+    "vkDeviceWaitIdle": {
+      "core": [
+        {
+          "vuid": "VUID-vkDeviceWaitIdle-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        }
+      ]
+    },
+    "vkGetCalibratedTimestampsEXT": {
+      "(VK_EXT_calibrated_timestamps)": [
+        {
+          "vuid": "VUID-vkGetCalibratedTimestampsEXT-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetCalibratedTimestampsEXT-pTimestampInfos-parameter",
+          "text": " <code>pTimestampInfos</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>timestampCount</code> valid <a href=\"#VkCalibratedTimestampInfoEXT\">VkCalibratedTimestampInfoEXT</a> structures"
+        },
+        {
+          "vuid": "VUID-vkGetCalibratedTimestampsEXT-pTimestamps-parameter",
+          "text": " <code>pTimestamps</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>timestampCount</code> <code>uint64_t</code> values"
+        },
+        {
+          "vuid": "VUID-vkGetCalibratedTimestampsEXT-pMaxDeviation-parameter",
+          "text": " <code>pMaxDeviation</code> <strong class=\"purple\">must</strong> be a valid pointer to a <code>uint64_t</code> value"
+        },
+        {
+          "vuid": "VUID-vkGetCalibratedTimestampsEXT-timestampCount-arraylength",
+          "text": " <code>timestampCount</code> <strong class=\"purple\">must</strong> be greater than <code>0</code>"
+        }
+      ]
+    },
+    "VkCalibratedTimestampInfoEXT": {
+      "(VK_EXT_calibrated_timestamps)": [
+        {
+          "vuid": "VUID-VkCalibratedTimestampInfoEXT-timeDomain-02354",
+          "text": " <code>timeDomain</code> <strong class=\"purple\">must</strong> be one of the <a href=\"#VkTimeDomainEXT\">VkTimeDomainEXT</a> values returned by <a href=\"#vkGetPhysicalDeviceCalibrateableTimeDomainsEXT\">vkGetPhysicalDeviceCalibrateableTimeDomainsEXT</a>"
+        },
+        {
+          "vuid": "VUID-VkCalibratedTimestampInfoEXT-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_CALIBRATED_TIMESTAMP_INFO_EXT</code>"
+        },
+        {
+          "vuid": "VUID-VkCalibratedTimestampInfoEXT-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-VkCalibratedTimestampInfoEXT-timeDomain-parameter",
+          "text": " <code>timeDomain</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkTimeDomainEXT\">VkTimeDomainEXT</a> value"
+        }
+      ]
+    },
+    "vkCreateRenderPass": {
+      "core": [
+        {
+          "vuid": "VUID-vkCreateRenderPass-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCreateRenderPass-pCreateInfo-parameter",
+          "text": " <code>pCreateInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkRenderPassCreateInfo\">VkRenderPassCreateInfo</a> structure"
+        },
+        {
+          "vuid": "VUID-vkCreateRenderPass-pAllocator-parameter",
+          "text": " If <code>pAllocator</code> is not <code>NULL</code>, <code>pAllocator</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkAllocationCallbacks\">VkAllocationCallbacks</a> structure"
+        },
+        {
+          "vuid": "VUID-vkCreateRenderPass-pRenderPass-parameter",
+          "text": " <code>pRenderPass</code> <strong class=\"purple\">must</strong> be a valid pointer to a <a href=\"#VkRenderPass\">VkRenderPass</a> handle"
+        }
+      ]
+    },
+    "VkRenderPassCreateInfo": {
+      "core": [
+        {
+          "vuid": "VUID-VkRenderPassCreateInfo-attachment-00834",
+          "text": " If the <code>attachment</code> member of any element of <code>pInputAttachments</code>, <code>pColorAttachments</code>, <code>pResolveAttachments</code> or <code>pDepthStencilAttachment</code>, or any element of <code>pPreserveAttachments</code> in any element of <code>pSubpasses</code> is not <code>VK_ATTACHMENT_UNUSED</code>, it <strong class=\"purple\">must</strong> be less than <code>attachmentCount</code>"
+        },
+        {
+          "vuid": "VUID-VkRenderPassCreateInfo-pAttachments-00836",
+          "text": " For any member of <code>pAttachments</code> with a <code>loadOp</code> equal to <code>VK_ATTACHMENT_LOAD_OP_CLEAR</code>, the first use of that attachment <strong class=\"purple\">must</strong> not specify a <code>layout</code> equal to <code>VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL</code> or <code>VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL</code>."
+        },
+        {
+          "vuid": "VUID-VkRenderPassCreateInfo-pAttachments-02511",
+          "text": " For any member of <code>pAttachments</code> with a <code>stencilLoadOp</code> equal to <code>VK_ATTACHMENT_LOAD_OP_CLEAR</code>, the first use of that attachment <strong class=\"purple\">must</strong> not specify a <code>layout</code> equal to <code>VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL</code> or <code>VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL</code>."
+        },
+        {
+          "vuid": "VUID-VkRenderPassCreateInfo-pDependencies-00837",
+          "text": " For any element of <code>pDependencies</code>, if the <code>srcSubpass</code> is not <code>VK_SUBPASS_EXTERNAL</code>, all stage flags included in the <code>srcStageMask</code> member of that dependency <strong class=\"purple\">must</strong> be a pipeline stage supported by the <a href=\"#synchronization-pipeline-stages-types\">pipeline</a> identified by the <code>pipelineBindPoint</code> member of the source subpass"
+        },
+        {
+          "vuid": "VUID-VkRenderPassCreateInfo-pDependencies-00838",
+          "text": " For any element of <code>pDependencies</code>, if the <code>dstSubpass</code> is not <code>VK_SUBPASS_EXTERNAL</code>, all stage flags included in the <code>dstStageMask</code> member of that dependency <strong class=\"purple\">must</strong> be a pipeline stage supported by the <a href=\"#synchronization-pipeline-stages-types\">pipeline</a> identified by the <code>pipelineBindPoint</code> member of the destination subpass"
+        },
+        {
+          "vuid": "VUID-VkRenderPassCreateInfo-srcSubpass-02517",
+          "text": " The <code>srcSubpass</code> member of each element of <code>pDependencies</code> <strong class=\"purple\">must</strong> be less than <code>subpassCount</code>"
+        },
+        {
+          "vuid": "VUID-VkRenderPassCreateInfo-dstSubpass-02518",
+          "text": " The <code>dstSubpass</code> member of each element of <code>pDependencies</code> <strong class=\"purple\">must</strong> be less than <code>subpassCount</code>"
+        },
+        {
+          "vuid": "VUID-VkRenderPassCreateInfo-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO</code>"
+        },
+        {
+          "vuid": "VUID-VkRenderPassCreateInfo-pNext-pNext",
+          "text": " Each <code>pNext</code> member of any structure (including this one) in the <code>pNext</code> chain <strong class=\"purple\">must</strong> be either <code>NULL</code> or a pointer to a valid instance of <a href=\"#VkRenderPassFragmentDensityMapCreateInfoEXT\">VkRenderPassFragmentDensityMapCreateInfoEXT</a>, <a href=\"#VkRenderPassInputAttachmentAspectCreateInfo\">VkRenderPassInputAttachmentAspectCreateInfo</a>, or <a href=\"#VkRenderPassMultiviewCreateInfo\">VkRenderPassMultiviewCreateInfo</a>"
+        },
+        {
+          "vuid": "VUID-VkRenderPassCreateInfo-sType-unique",
+          "text": " Each <code>sType</code> member in the <code>pNext</code> chain <strong class=\"purple\">must</strong> be unique"
+        },
+        {
+          "vuid": "VUID-VkRenderPassCreateInfo-flags-zerobitmask",
+          "text": " <code>flags</code> <strong class=\"purple\">must</strong> be <code>0</code>"
+        },
+        {
+          "vuid": "VUID-VkRenderPassCreateInfo-pAttachments-parameter",
+          "text": " If <code>attachmentCount</code> is not <code>0</code>, <code>pAttachments</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>attachmentCount</code> valid <a href=\"#VkAttachmentDescription\">VkAttachmentDescription</a> structures"
+        },
+        {
+          "vuid": "VUID-VkRenderPassCreateInfo-pSubpasses-parameter",
+          "text": " <code>pSubpasses</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>subpassCount</code> valid <a href=\"#VkSubpassDescription\">VkSubpassDescription</a> structures"
+        },
+        {
+          "vuid": "VUID-VkRenderPassCreateInfo-pDependencies-parameter",
+          "text": " If <code>dependencyCount</code> is not <code>0</code>, <code>pDependencies</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>dependencyCount</code> valid <a href=\"#VkSubpassDependency\">VkSubpassDependency</a> structures"
+        },
+        {
+          "vuid": "VUID-VkRenderPassCreateInfo-subpassCount-arraylength",
+          "text": " <code>subpassCount</code> <strong class=\"purple\">must</strong> be greater than <code>0</code>"
+        }
+      ],
+      "(VK_VERSION_1_1,VK_KHR_maintenance2)": [
+        {
+          "vuid": "VUID-VkRenderPassCreateInfo-pAttachments-01566",
+          "text": " For any member of <code>pAttachments</code> with a <code>loadOp</code> equal to <code>VK_ATTACHMENT_LOAD_OP_CLEAR</code>, the first use of that attachment <strong class=\"purple\">must</strong> not specify a <code>layout</code> equal to <code>VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL</code>."
+        },
+        {
+          "vuid": "VUID-VkRenderPassCreateInfo-pAttachments-01567",
+          "text": " For any member of <code>pAttachments</code> with a <code>stencilLoadOp</code> equal to <code>VK_ATTACHMENT_LOAD_OP_CLEAR</code>, the first use of that attachment <strong class=\"purple\">must</strong> not specify a <code>layout</code> equal to <code>VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL</code>."
+        },
+        {
+          "vuid": "VUID-VkRenderPassCreateInfo-pNext-01926",
+          "text": " If the <code>pNext</code> chain includes a <a href=\"#VkRenderPassInputAttachmentAspectCreateInfo\">VkRenderPassInputAttachmentAspectCreateInfo</a> structure, the <code>subpass</code> member of each element of its <code>pAspectReferences</code> member <strong class=\"purple\">must</strong> be less than <code>subpassCount</code>"
+        },
+        {
+          "vuid": "VUID-VkRenderPassCreateInfo-pNext-01927",
+          "text": " If the <code>pNext</code> chain includes a <a href=\"#VkRenderPassInputAttachmentAspectCreateInfo\">VkRenderPassInputAttachmentAspectCreateInfo</a> structure, the <code>inputAttachmentIndex</code> member of each element of its <code>pAspectReferences</code> member <strong class=\"purple\">must</strong> be less than the value of <code>inputAttachmentCount</code> in the member of <code>pSubpasses</code> identified by its <code>subpass</code> member"
+        },
+        {
+          "vuid": "VUID-VkRenderPassCreateInfo-pNext-01963",
+          "text": " If the <code>pNext</code> chain includes a <a href=\"#VkRenderPassInputAttachmentAspectCreateInfo\">VkRenderPassInputAttachmentAspectCreateInfo</a> structure, for any element of the <code>pInputAttachments</code> member of any element of <code>pSubpasses</code> where the <code>attachment</code> member is not <code>VK_ATTACHMENT_UNUSED</code>, the <code>aspectMask</code> member of the corresponding element of <a href=\"#VkRenderPassInputAttachmentAspectCreateInfo\">VkRenderPassInputAttachmentAspectCreateInfo</a>::<code>pAspectReferences</code> <strong class=\"purple\">must</strong> only include aspects that are present in images of the format specified by the element of <code>pAttachments</code> at <code>attachment</code>"
+        }
+      ],
+      "(VK_VERSION_1_1,VK_KHR_multiview)": [
+        {
+          "vuid": "VUID-VkRenderPassCreateInfo-pNext-01928",
+          "text": " If the <code>pNext</code> chain includes a <a href=\"#VkRenderPassMultiviewCreateInfo\">VkRenderPassMultiviewCreateInfo</a> structure, and its <code>subpassCount</code> member is not zero, that member <strong class=\"purple\">must</strong> be equal to the value of <code>subpassCount</code>"
+        },
+        {
+          "vuid": "VUID-VkRenderPassCreateInfo-pNext-01929",
+          "text": " If the <code>pNext</code> chain includes a <a href=\"#VkRenderPassMultiviewCreateInfo\">VkRenderPassMultiviewCreateInfo</a> structure, if its <code>dependencyCount</code> member is not zero, it <strong class=\"purple\">must</strong> be equal to <code>dependencyCount</code>"
+        },
+        {
+          "vuid": "VUID-VkRenderPassCreateInfo-pNext-01930",
+          "text": " If the <code>pNext</code> chain includes a <a href=\"#VkRenderPassMultiviewCreateInfo\">VkRenderPassMultiviewCreateInfo</a> structure, for each non-zero element of <code>pViewOffsets</code>, the <code>srcSubpass</code> and <code>dstSubpass</code> members of <code>pDependencies</code> at the same index <strong class=\"purple\">must</strong> not be equal"
+        },
+        {
+          "vuid": "VUID-VkRenderPassCreateInfo-pNext-02512",
+          "text": " If the <code>pNext</code> chain includes a <a href=\"#VkRenderPassMultiviewCreateInfo\">VkRenderPassMultiviewCreateInfo</a> structure, for any element of <code>pDependencies</code> with a <code>dependencyFlags</code> member that does not include <code>VK_DEPENDENCY_VIEW_LOCAL_BIT</code>, the corresponding element of the <code>pViewOffsets</code> member of that <a href=\"#VkRenderPassMultiviewCreateInfo\">VkRenderPassMultiviewCreateInfo</a> instance <strong class=\"purple\">must</strong> be <code>0</code>"
+        },
+        {
+          "vuid": "VUID-VkRenderPassCreateInfo-pNext-02513",
+          "text": " If the <code>pNext</code> chain includes a <a href=\"#VkRenderPassMultiviewCreateInfo\">VkRenderPassMultiviewCreateInfo</a> structure, elements of its <code>pViewMasks</code> member <strong class=\"purple\">must</strong> either all be <code>0</code>, or all not be <code>0</code>"
+        },
+        {
+          "vuid": "VUID-VkRenderPassCreateInfo-pNext-02514",
+          "text": " If the <code>pNext</code> chain includes a <a href=\"#VkRenderPassMultiviewCreateInfo\">VkRenderPassMultiviewCreateInfo</a> structure, and each element of its <code>pViewMasks</code> member is <code>0</code>, the <code>dependencyFlags</code> member of each element of <code>pDependencies</code> <strong class=\"purple\">must</strong> not include <code>VK_DEPENDENCY_VIEW_LOCAL_BIT</code>"
+        },
+        {
+          "vuid": "VUID-VkRenderPassCreateInfo-pNext-02515",
+          "text": " If the <code>pNext</code> chain includes a <a href=\"#VkRenderPassMultiviewCreateInfo\">VkRenderPassMultiviewCreateInfo</a> structure, and each element of its <code>pViewMasks</code> member is <code>0</code>, <code>correlatedViewMaskCount</code> <strong class=\"purple\">must</strong> be <code>0</code>"
+        },
+        {
+          "vuid": "VUID-VkRenderPassCreateInfo-pNext-02516",
+          "text": " If the <code>pNext</code> chain includes a <a href=\"#VkRenderPassMultiviewCreateInfo\">VkRenderPassMultiviewCreateInfo</a> structure, each element of its <code>pViewMask</code> member <strong class=\"purple\">must</strong> not have a bit set at an index greater than or equal to <a href=\"#VkPhysicalDeviceLimits\">VkPhysicalDeviceLimits</a>::<code>maxFramebufferLayers</code>"
+        }
+      ]
+    },
+    "VkRenderPassMultiviewCreateInfo": {
+      "(VK_VERSION_1_1,VK_KHR_multiview)": [
+        {
+          "vuid": "VUID-VkRenderPassMultiviewCreateInfo-pCorrelationMasks-00841",
+          "text": " Each view index <strong class=\"purple\">must</strong> not be set in more than one element of <code>pCorrelationMasks</code>"
+        },
+        {
+          "vuid": "VUID-VkRenderPassMultiviewCreateInfo-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO</code>"
+        },
+        {
+          "vuid": "VUID-VkRenderPassMultiviewCreateInfo-pViewMasks-parameter",
+          "text": " If <code>subpassCount</code> is not <code>0</code>, <code>pViewMasks</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>subpassCount</code> <code>uint32_t</code> values"
+        },
+        {
+          "vuid": "VUID-VkRenderPassMultiviewCreateInfo-pViewOffsets-parameter",
+          "text": " If <code>dependencyCount</code> is not <code>0</code>, <code>pViewOffsets</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>dependencyCount</code> <code>int32_t</code> values"
+        },
+        {
+          "vuid": "VUID-VkRenderPassMultiviewCreateInfo-pCorrelationMasks-parameter",
+          "text": " If <code>correlationMaskCount</code> is not <code>0</code>, <code>pCorrelationMasks</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>correlationMaskCount</code> <code>uint32_t</code> values"
+        }
+      ]
+    },
+    "VkRenderPassFragmentDensityMapCreateInfoEXT": {
+      "(VK_EXT_fragment_density_map)": [
+        {
+          "vuid": "VUID-VkRenderPassFragmentDensityMapCreateInfoEXT-fragmentDensityMapAttachment-02547",
+          "text": " If <code>fragmentDensityMapAttachment</code> is not <code>VK_ATTACHMENT_UNUSED</code>, <code>fragmentDensityMapAttachment</code> <strong class=\"purple\">must</strong> be less than <code>VkRenderPassCreateInfo</code>::<code>attachmentCount</code>"
+        },
+        {
+          "vuid": "VUID-VkRenderPassFragmentDensityMapCreateInfoEXT-fragmentDensityMapAttachment-02548",
+          "text": " If <code>fragmentDensityMapAttachment</code> is not <code>VK_ATTACHMENT_UNUSED</code>, <code>fragmentDensityMapAttachment</code> <strong class=\"purple\">must</strong> not be an element of <code>VkSubpassDescription</code>::<code>pInputAttachments</code>, <code>VkSubpassDescription</code>::<code>pColorAttachments</code>, <code>VkSubpassDescription</code>::<code>pResolveAttachments</code>, <code>VkSubpassDescription</code>::<code>pDepthStencilAttachment</code>, or <code>VkSubpassDescription</code>::<code>pPreserveAttachments</code> for any subpass"
+        },
+        {
+          "vuid": "VUID-VkRenderPassFragmentDensityMapCreateInfoEXT-fragmentDensityMapAttachment-02549",
+          "text": " If <code>fragmentDensityMapAttachment</code> is not <code>VK_ATTACHMENT_UNUSED</code>, <code>layout</code> <strong class=\"purple\">must</strong> be equal to <code>VK_IMAGE_LAYOUT_FRAGMENT_DENSITY_MAP_OPTIMAL_EXT</code>, or <code>VK_IMAGE_LAYOUT_GENERAL</code>"
+        },
+        {
+          "vuid": "VUID-VkRenderPassFragmentDensityMapCreateInfoEXT-fragmentDensityMapAttachment-02550",
+          "text": " If <code>fragmentDensityMapAttachment</code> is not <code>VK_ATTACHMENT_UNUSED</code>, <code>fragmentDensityMapAttachment</code> <strong class=\"purple\">must</strong> reference an attachment with a <code>loadOp</code> equal to <code>VK_ATTACHMENT_LOAD_OP_LOAD</code> or <code>VK_ATTACHMENT_LOAD_OP_DONT_CARE</code>."
+        },
+        {
+          "vuid": "VUID-VkRenderPassFragmentDensityMapCreateInfoEXT-fragmentDensityMapAttachment-02551",
+          "text": " If <code>fragmentDensityMapAttachment</code> is not <code>VK_ATTACHMENT_UNUSED</code>, <code>fragmentDensityMapAttachment</code> <strong class=\"purple\">must</strong> reference an attachment with a <code>storeOp</code> equal to <code>VK_ATTACHMENT_STORE_OP_DONT_CARE</code>."
+        },
+        {
+          "vuid": "VUID-VkRenderPassFragmentDensityMapCreateInfoEXT-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_RENDER_PASS_FRAGMENT_DENSITY_MAP_CREATE_INFO_EXT</code>"
+        },
+        {
+          "vuid": "VUID-VkRenderPassFragmentDensityMapCreateInfoEXT-fragmentDensityMapAttachment-parameter",
+          "text": " <code>fragmentDensityMapAttachment</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkAttachmentReference\">VkAttachmentReference</a> structure"
+        }
+      ]
+    },
+    "VkAttachmentDescription": {
+      "core": [
+        {
+          "vuid": "VUID-VkAttachmentDescription-finalLayout-00843",
+          "text": " <code>finalLayout</code> <strong class=\"purple\">must</strong> not be <code>VK_IMAGE_LAYOUT_UNDEFINED</code> or <code>VK_IMAGE_LAYOUT_PREINITIALIZED</code>"
+        },
+        {
+          "vuid": "VUID-VkAttachmentDescription-format-03280",
+          "text": " If <code>format</code> is a color format, <code>initialLayout</code> <strong class=\"purple\">must</strong> not be <code>VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL</code>, <code>VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL</code>, <code>VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL</code>, or <code>VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL</code>"
+        },
+        {
+          "vuid": "VUID-VkAttachmentDescription-format-03281",
+          "text": " If <code>format</code> is a depth/stencil format, <code>initialLayout</code> <strong class=\"purple\">must</strong> not be <code>VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL</code>"
+        },
+        {
+          "vuid": "VUID-VkAttachmentDescription-format-03282",
+          "text": " If <code>format</code> is a color format, name:finalLayout <strong class=\"purple\">must</strong> not be <code>VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL</code>, <code>VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL</code>, <code>VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL</code>, or <code>VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL</code>"
+        },
+        {
+          "vuid": "VUID-VkAttachmentDescription-format-03283",
+          "text": " If <code>format</code> is a depth/stencil format, <code>finalLayout</code> <strong class=\"purple\">must</strong> not be <code>VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL</code>"
+        },
+        {
+          "vuid": "VUID-VkAttachmentDescription-flags-parameter",
+          "text": " <code>flags</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkAttachmentDescriptionFlagBits\">VkAttachmentDescriptionFlagBits</a> values"
+        },
+        {
+          "vuid": "VUID-VkAttachmentDescription-format-parameter",
+          "text": " <code>format</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkFormat\">VkFormat</a> value"
+        },
+        {
+          "vuid": "VUID-VkAttachmentDescription-samples-parameter",
+          "text": " <code>samples</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkSampleCountFlagBits\">VkSampleCountFlagBits</a> value"
+        },
+        {
+          "vuid": "VUID-VkAttachmentDescription-loadOp-parameter",
+          "text": " <code>loadOp</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkAttachmentLoadOp\">VkAttachmentLoadOp</a> value"
+        },
+        {
+          "vuid": "VUID-VkAttachmentDescription-storeOp-parameter",
+          "text": " <code>storeOp</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkAttachmentStoreOp\">VkAttachmentStoreOp</a> value"
+        },
+        {
+          "vuid": "VUID-VkAttachmentDescription-stencilLoadOp-parameter",
+          "text": " <code>stencilLoadOp</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkAttachmentLoadOp\">VkAttachmentLoadOp</a> value"
+        },
+        {
+          "vuid": "VUID-VkAttachmentDescription-stencilStoreOp-parameter",
+          "text": " <code>stencilStoreOp</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkAttachmentStoreOp\">VkAttachmentStoreOp</a> value"
+        },
+        {
+          "vuid": "VUID-VkAttachmentDescription-initialLayout-parameter",
+          "text": " <code>initialLayout</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkImageLayout\">VkImageLayout</a> value"
+        },
+        {
+          "vuid": "VUID-VkAttachmentDescription-finalLayout-parameter",
+          "text": " <code>finalLayout</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkImageLayout\">VkImageLayout</a> value"
+        }
+      ],
+      "(VK_KHR_separate_depth_stencil_layouts)": [
+        {
+          "vuid": "VUID-VkAttachmentDescription-separateDepthStencilLayouts-03284",
+          "text": " If the <a href=\"#features-separateDepthStencilLayouts\"><code>separateDepthStencilLayouts</code></a> feature is not enabled, <code>initialLayout</code> <strong class=\"purple\">must</strong> not be <code>VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR</code>, <code>VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR</code>, <code>VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR</code>, or <code>VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkAttachmentDescription-separateDepthStencilLayouts-03285",
+          "text": " If the <a href=\"#features-separateDepthStencilLayouts\"><code>separateDepthStencilLayouts</code></a> feature is not enabled, <code>finalLayout</code> <strong class=\"purple\">must</strong> not be <code>VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR</code>, <code>VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR</code>, <code>VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR</code>, or <code>VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkAttachmentDescription-format-03286",
+          "text": " If <code>format</code> is a color format, <code>initialLayout</code> <strong class=\"purple\">must</strong> not be <code>VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR</code>, <code>VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR</code>, <code>VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR</code>, or <code>VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkAttachmentDescription-format-03287",
+          "text": " If <code>format</code> is a color format, <code>finalLayout</code> <strong class=\"purple\">must</strong> not be <code>VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR</code>, <code>VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR</code>, <code>VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR</code>, or <code>VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkAttachmentDescription-format-03288",
+          "text": " If <code>format</code> is a depth/stencil format which includes both depth and stencil aspects, <code>initialLayout</code> <strong class=\"purple\">must</strong> not be <code>VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR</code>, <code>VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR</code>, <code>VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR</code>, or <code>VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkAttachmentDescription-format-03289",
+          "text": " If <code>format</code> is a depth/stencil format which includes both depth and stencil aspects, <code>finalLayout</code> <strong class=\"purple\">must</strong> not be <code>VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR</code>, <code>VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR</code>, <code>VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR</code>, or <code>VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkAttachmentDescription-format-03290",
+          "text": " If <code>format</code> is a depth/stencil format which includes only the depth aspect, <code>initialLayout</code> <strong class=\"purple\">must</strong> not be <code>VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR</code> or <code>VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkAttachmentDescription-format-03291",
+          "text": " If <code>format</code> is a depth/stencil format which includes only the depth aspect, <code>finalLayout</code> <strong class=\"purple\">must</strong> not be <code>VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR</code> or <code>VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkAttachmentDescription-format-03292",
+          "text": " If <code>format</code> is a depth/stencil format which includes only the stencil aspect, <code>initialLayout</code> <strong class=\"purple\">must</strong> not be <code>VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR</code> or <code>VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkAttachmentDescription-format-03293",
+          "text": " If <code>format</code> is a depth/stencil format which includes only the stencil aspect, <code>finalLayout</code> <strong class=\"purple\">must</strong> not be <code>VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR</code> or <code>VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR</code>"
+        }
+      ]
+    },
+    "VkRenderPassInputAttachmentAspectCreateInfo": {
+      "(VK_VERSION_1_1,VK_KHR_maintenance2)": [
+        {
+          "vuid": "VUID-VkRenderPassInputAttachmentAspectCreateInfo-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO</code>"
+        },
+        {
+          "vuid": "VUID-VkRenderPassInputAttachmentAspectCreateInfo-pAspectReferences-parameter",
+          "text": " <code>pAspectReferences</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>aspectReferenceCount</code> valid <a href=\"#VkInputAttachmentAspectReference\">VkInputAttachmentAspectReference</a> structures"
+        },
+        {
+          "vuid": "VUID-VkRenderPassInputAttachmentAspectCreateInfo-aspectReferenceCount-arraylength",
+          "text": " <code>aspectReferenceCount</code> <strong class=\"purple\">must</strong> be greater than <code>0</code>"
+        }
+      ]
+    },
+    "VkInputAttachmentAspectReference": {
+      "(VK_VERSION_1_1,VK_KHR_maintenance2)": [
+        {
+          "vuid": "VUID-VkInputAttachmentAspectReference-aspectMask-01964",
+          "text": " <code>aspectMask</code> <strong class=\"purple\">must</strong> not include <code>VK_IMAGE_ASPECT_METADATA_BIT</code>"
+        },
+        {
+          "vuid": "VUID-VkInputAttachmentAspectReference-aspectMask-parameter",
+          "text": " <code>aspectMask</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkImageAspectFlagBits\">VkImageAspectFlagBits</a> values"
+        },
+        {
+          "vuid": "VUID-VkInputAttachmentAspectReference-aspectMask-requiredbitmask",
+          "text": " <code>aspectMask</code> <strong class=\"purple\">must</strong> not be <code>0</code>"
+        }
+      ],
+      "(VK_VERSION_1_1,VK_KHR_maintenance2)+(VK_EXT_image_drm_format_modifier)": [
+        {
+          "vuid": "VUID-VkInputAttachmentAspectReference-aspectMask-02250",
+          "text": " <code>aspectMask</code> <strong class=\"purple\">must</strong> not include <code>VK_IMAGE_ASPECT_MEMORY_PLANE_i_BIT_EXT</code> for any index <code>i</code>."
+        }
+      ]
+    },
+    "VkSubpassDescription": {
+      "core": [
+        {
+          "vuid": "VUID-VkSubpassDescription-pipelineBindPoint-00844",
+          "text": " <code>pipelineBindPoint</code> <strong class=\"purple\">must</strong> be <code>VK_PIPELINE_BIND_POINT_GRAPHICS</code>"
+        },
+        {
+          "vuid": "VUID-VkSubpassDescription-colorAttachmentCount-00845",
+          "text": " <code>colorAttachmentCount</code> <strong class=\"purple\">must</strong> be less than or equal to <code>VkPhysicalDeviceLimits</code>::<code>maxColorAttachments</code>"
+        },
+        {
+          "vuid": "VUID-VkSubpassDescription-loadOp-00846",
+          "text": " If the first use of an attachment in this render pass is as an input attachment, and the attachment is not also used as a color or depth/stencil attachment in the same subpass, then <code>loadOp</code> <strong class=\"purple\">must</strong> not be <code>VK_ATTACHMENT_LOAD_OP_CLEAR</code>"
+        },
+        {
+          "vuid": "VUID-VkSubpassDescription-pResolveAttachments-00847",
+          "text": " If <code>pResolveAttachments</code> is not <code>NULL</code>, for each resolve attachment that is not <code>VK_ATTACHMENT_UNUSED</code>, the corresponding color attachment <strong class=\"purple\">must</strong> not be <code>VK_ATTACHMENT_UNUSED</code>"
+        },
+        {
+          "vuid": "VUID-VkSubpassDescription-pResolveAttachments-00848",
+          "text": " If <code>pResolveAttachments</code> is not <code>NULL</code>, for each resolve attachment that is not <code>VK_ATTACHMENT_UNUSED</code>, the corresponding color attachment <strong class=\"purple\">must</strong> not have a sample count of <code>VK_SAMPLE_COUNT_1_BIT</code>"
+        },
+        {
+          "vuid": "VUID-VkSubpassDescription-pResolveAttachments-00849",
+          "text": " If <code>pResolveAttachments</code> is not <code>NULL</code>, each resolve attachment that is not <code>VK_ATTACHMENT_UNUSED</code> <strong class=\"purple\">must</strong> have a sample count of <code>VK_SAMPLE_COUNT_1_BIT</code>"
+        },
+        {
+          "vuid": "VUID-VkSubpassDescription-pResolveAttachments-00850",
+          "text": " If <code>pResolveAttachments</code> is not <code>NULL</code>, each resolve attachment that is not <code>VK_ATTACHMENT_UNUSED</code> <strong class=\"purple\">must</strong> have the same <a href=\"#VkFormat\">VkFormat</a> as its corresponding color attachment"
+        },
+        {
+          "vuid": "VUID-VkSubpassDescription-pColorAttachments-01417",
+          "text": " All attachments in <code>pColorAttachments</code> that are not <code>VK_ATTACHMENT_UNUSED</code> <strong class=\"purple\">must</strong> have the same sample count"
+        },
+        {
+          "vuid": "VUID-VkSubpassDescription-pInputAttachments-02647",
+          "text": " All attachments in <code>pInputAttachments</code> that are not <code>VK_ATTACHMENT_UNUSED</code> <strong class=\"purple\">must</strong> have formats whose features contain at least one of <code>VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT</code> or <code>VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT</code>."
+        },
+        {
+          "vuid": "VUID-VkSubpassDescription-pColorAttachments-02648",
+          "text": " All attachments in <code>pColorAttachments</code> that are not <code>VK_ATTACHMENT_UNUSED</code> <strong class=\"purple\">must</strong> have formats whose features contain <code>VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT</code>"
+        },
+        {
+          "vuid": "VUID-VkSubpassDescription-pResolveAttachments-02649",
+          "text": " All attachments in <code>pResolveAttachments</code> that are not <code>VK_ATTACHMENT_UNUSED</code> <strong class=\"purple\">must</strong> have formats whose features contain <code>VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT</code>"
+        },
+        {
+          "vuid": "VUID-VkSubpassDescription-pDepthStencilAttachment-02650",
+          "text": " If <code>pDepthStencilAttachment</code> is not <code>NULL</code> and the attachment is not <code>VK_ATTACHMENT_UNUSED</code> then it <strong class=\"purple\">must</strong> have a format whose features contain <code>VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT</code>"
+        },
+        {
+          "vuid": "VUID-VkSubpassDescription-pDepthStencilAttachment-01418",
+          "text": " If neither the <code>VK_AMD_mixed_attachment_samples</code> nor the <code>VK_NV_framebuffer_mixed_samples</code> extensions are enabled, and if <code>pDepthStencilAttachment</code> is not <code>VK_ATTACHMENT_UNUSED</code> and any attachments in <code>pColorAttachments</code> are not <code>VK_ATTACHMENT_UNUSED</code>, they <strong class=\"purple\">must</strong> have the same sample count"
+        },
+        {
+          "vuid": "VUID-VkSubpassDescription-attachment-00853",
+          "text": " The <code>attachment</code> member of each element of <code>pPreserveAttachments</code> <strong class=\"purple\">must</strong> not be <code>VK_ATTACHMENT_UNUSED</code>"
+        },
+        {
+          "vuid": "VUID-VkSubpassDescription-pPreserveAttachments-00854",
+          "text": " Each element of <code>pPreserveAttachments</code> <strong class=\"purple\">must</strong> not also be an element of any other member of the subpass description"
+        },
+        {
+          "vuid": "VUID-VkSubpassDescription-layout-02519",
+          "text": " If any attachment is used by more than one <a href=\"#VkAttachmentReference\">VkAttachmentReference</a> member, then each use <strong class=\"purple\">must</strong> use the same <code>layout</code>"
+        },
+        {
+          "vuid": "VUID-VkSubpassDescription-flags-parameter",
+          "text": " <code>flags</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkSubpassDescriptionFlagBits\">VkSubpassDescriptionFlagBits</a> values"
+        },
+        {
+          "vuid": "VUID-VkSubpassDescription-pipelineBindPoint-parameter",
+          "text": " <code>pipelineBindPoint</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkPipelineBindPoint\">VkPipelineBindPoint</a> value"
+        },
+        {
+          "vuid": "VUID-VkSubpassDescription-pInputAttachments-parameter",
+          "text": " If <code>inputAttachmentCount</code> is not <code>0</code>, <code>pInputAttachments</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>inputAttachmentCount</code> valid <a href=\"#VkAttachmentReference\">VkAttachmentReference</a> structures"
+        },
+        {
+          "vuid": "VUID-VkSubpassDescription-pColorAttachments-parameter",
+          "text": " If <code>colorAttachmentCount</code> is not <code>0</code>, <code>pColorAttachments</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>colorAttachmentCount</code> valid <a href=\"#VkAttachmentReference\">VkAttachmentReference</a> structures"
+        },
+        {
+          "vuid": "VUID-VkSubpassDescription-pResolveAttachments-parameter",
+          "text": " If <code>colorAttachmentCount</code> is not <code>0</code>, and <code>pResolveAttachments</code> is not <code>NULL</code>, <code>pResolveAttachments</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>colorAttachmentCount</code> valid <a href=\"#VkAttachmentReference\">VkAttachmentReference</a> structures"
+        },
+        {
+          "vuid": "VUID-VkSubpassDescription-pDepthStencilAttachment-parameter",
+          "text": " If <code>pDepthStencilAttachment</code> is not <code>NULL</code>, <code>pDepthStencilAttachment</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkAttachmentReference\">VkAttachmentReference</a> structure"
+        },
+        {
+          "vuid": "VUID-VkSubpassDescription-pPreserveAttachments-parameter",
+          "text": " If <code>preserveAttachmentCount</code> is not <code>0</code>, <code>pPreserveAttachments</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>preserveAttachmentCount</code> <code>uint32_t</code> values"
+        }
+      ],
+      "(VK_AMD_mixed_attachment_samples)": [
+        {
+          "vuid": "VUID-VkSubpassDescription-pColorAttachments-01506",
+          "text": " If the <code>VK_AMD_mixed_attachment_samples</code> extension is enabled, and all attachments in <code>pColorAttachments</code> that are not <code>VK_ATTACHMENT_UNUSED</code> <strong class=\"purple\">must</strong> have a sample count that is smaller than or equal to the sample count of <code>pDepthStencilAttachment</code> if it is not <code>VK_ATTACHMENT_UNUSED</code>"
+        }
+      ],
+      "(VK_NVX_multiview_per_view_attributes)": [
+        {
+          "vuid": "VUID-VkSubpassDescription-flags-00856",
+          "text": " If <code>flags</code> includes <code>VK_SUBPASS_DESCRIPTION_PER_VIEW_POSITION_X_ONLY_BIT_NVX</code>, it <strong class=\"purple\">must</strong> also include <code>VK_SUBPASS_DESCRIPTION_PER_VIEW_ATTRIBUTES_BIT_NVX</code>."
+        }
+      ]
+    },
+    "VkAttachmentReference": {
+      "core": [
+        {
+          "vuid": "VUID-VkAttachmentReference-layout-00857",
+          "text": " If <code>attachment</code> is not <code>VK_ATTACHMENT_UNUSED</code>, <code>layout</code> <strong class=\"purple\">must</strong> not be <code>VK_IMAGE_LAYOUT_UNDEFINED</code>, <code>VK_IMAGE_LAYOUT_PREINITIALIZED</code>, <code>VK_IMAGE_LAYOUT_PRESENT_SRC_KHR</code>, <code>VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR</code>, <code>VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR</code>, <code>VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR</code>, or <code>VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkAttachmentReference-layout-parameter",
+          "text": " <code>layout</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkImageLayout\">VkImageLayout</a> value"
+        }
+      ]
+    },
+    "VkSubpassDependency": {
+      "core": [
+        {
+          "vuid": "VUID-VkSubpassDependency-srcStageMask-00860",
+          "text": " If the <a href=\"#features-geometryShader\">geometry shaders</a> feature is not enabled, <code>srcStageMask</code> <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT</code>"
+        },
+        {
+          "vuid": "VUID-VkSubpassDependency-dstStageMask-00861",
+          "text": " If the <a href=\"#features-geometryShader\">geometry shaders</a> feature is not enabled, <code>dstStageMask</code> <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT</code>"
+        },
+        {
+          "vuid": "VUID-VkSubpassDependency-srcStageMask-00862",
+          "text": " If the <a href=\"#features-tessellationShader\">tessellation shaders</a> feature is not enabled, <code>srcStageMask</code> <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT</code> or <code>VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT</code>"
+        },
+        {
+          "vuid": "VUID-VkSubpassDependency-dstStageMask-00863",
+          "text": " If the <a href=\"#features-tessellationShader\">tessellation shaders</a> feature is not enabled, <code>dstStageMask</code> <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT</code> or <code>VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT</code>"
+        },
+        {
+          "vuid": "VUID-VkSubpassDependency-srcSubpass-00864",
+          "text": " <code>srcSubpass</code> <strong class=\"purple\">must</strong> be less than or equal to <code>dstSubpass</code>, unless one of them is <code>VK_SUBPASS_EXTERNAL</code>, to avoid cyclic dependencies and ensure a valid execution order"
+        },
+        {
+          "vuid": "VUID-VkSubpassDependency-srcSubpass-00865",
+          "text": " <code>srcSubpass</code> and <code>dstSubpass</code> <strong class=\"purple\">must</strong> not both be equal to <code>VK_SUBPASS_EXTERNAL</code>"
+        },
+        {
+          "vuid": "VUID-VkSubpassDependency-srcSubpass-00867",
+          "text": " If <code>srcSubpass</code> is equal to <code>dstSubpass</code> and not all of the stages in <code>srcStageMask</code> and <code>dstStageMask</code> are <a href=\"#synchronization-framebuffer-regions\">framebuffer-space stages</a>, the <a href=\"#synchronization-pipeline-stages-order\">logically latest</a> pipeline stage in <code>srcStageMask</code> <strong class=\"purple\">must</strong> be <a href=\"#synchronization-pipeline-stages-order\">logically earlier</a> than or equal to the <a href=\"#synchronization-pipeline-stages-order\">logically earliest</a> pipeline stage in <code>dstStageMask</code>"
+        },
+        {
+          "vuid": "VUID-VkSubpassDependency-srcAccessMask-00868",
+          "text": " Any access flag included in <code>srcAccessMask</code> <strong class=\"purple\">must</strong> be supported by one of the pipeline stages in <code>srcStageMask</code>, as specified in the <a href=\"#synchronization-access-types-supported\">table of supported access types</a>"
+        },
+        {
+          "vuid": "VUID-VkSubpassDependency-dstAccessMask-00869",
+          "text": " Any access flag included in <code>dstAccessMask</code> <strong class=\"purple\">must</strong> be supported by one of the pipeline stages in <code>dstStageMask</code>, as specified in the <a href=\"#synchronization-access-types-supported\">table of supported access types</a>"
+        },
+        {
+          "vuid": "VUID-VkSubpassDependency-srcSubpass-02243",
+          "text": " If <code>srcSubpass</code> equals <code>dstSubpass</code>, and <code>srcStageMask</code> and <code>dstStageMask</code> both include a <a href=\"#synchronization-framebuffer-regions\">framebuffer-space stage</a>, then <code>dependencyFlags</code> <strong class=\"purple\">must</strong> include <code>VK_DEPENDENCY_BY_REGION_BIT</code>"
+        },
+        {
+          "vuid": "VUID-VkSubpassDependency-srcStageMask-parameter",
+          "text": " <code>srcStageMask</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkPipelineStageFlagBits\">VkPipelineStageFlagBits</a> values"
+        },
+        {
+          "vuid": "VUID-VkSubpassDependency-srcStageMask-requiredbitmask",
+          "text": " <code>srcStageMask</code> <strong class=\"purple\">must</strong> not be <code>0</code>"
+        },
+        {
+          "vuid": "VUID-VkSubpassDependency-dstStageMask-parameter",
+          "text": " <code>dstStageMask</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkPipelineStageFlagBits\">VkPipelineStageFlagBits</a> values"
+        },
+        {
+          "vuid": "VUID-VkSubpassDependency-dstStageMask-requiredbitmask",
+          "text": " <code>dstStageMask</code> <strong class=\"purple\">must</strong> not be <code>0</code>"
+        },
+        {
+          "vuid": "VUID-VkSubpassDependency-srcAccessMask-parameter",
+          "text": " <code>srcAccessMask</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkAccessFlagBits\">VkAccessFlagBits</a> values"
+        },
+        {
+          "vuid": "VUID-VkSubpassDependency-dstAccessMask-parameter",
+          "text": " <code>dstAccessMask</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkAccessFlagBits\">VkAccessFlagBits</a> values"
+        },
+        {
+          "vuid": "VUID-VkSubpassDependency-dependencyFlags-parameter",
+          "text": " <code>dependencyFlags</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkDependencyFlagBits\">VkDependencyFlagBits</a> values"
+        }
+      ],
+      "(VK_VERSION_1_1,VK_KHR_multiview)": [
+        {
+          "vuid": "VUID-VkSubpassDependency-dependencyFlags-02520",
+          "text": " If <code>dependencyFlags</code> includes <code>VK_DEPENDENCY_VIEW_LOCAL_BIT</code>, <code>srcSubpass</code> <strong class=\"purple\">must</strong> not be equal to <code>VK_SUBPASS_EXTERNAL</code>"
+        },
+        {
+          "vuid": "VUID-VkSubpassDependency-dependencyFlags-02521",
+          "text": " If <code>dependencyFlags</code> includes <code>VK_DEPENDENCY_VIEW_LOCAL_BIT</code>, <code>dstSubpass</code> <strong class=\"purple\">must</strong> not be equal to <code>VK_SUBPASS_EXTERNAL</code>"
+        },
+        {
+          "vuid": "VUID-VkSubpassDependency-srcSubpass-00872",
+          "text": " If <code>srcSubpass</code> equals <code>dstSubpass</code> and that subpass has more than one bit set in the view mask, then <code>dependencyFlags</code> <strong class=\"purple\">must</strong> include <code>VK_DEPENDENCY_VIEW_LOCAL_BIT</code>"
+        }
+      ],
+      "(VK_NV_mesh_shader)": [
+        {
+          "vuid": "VUID-VkSubpassDependency-srcStageMask-02099",
+          "text": " If the <a href=\"#features-meshShader\">mesh shaders</a> feature is not enabled, <code>srcStageMask</code> <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV</code>"
+        },
+        {
+          "vuid": "VUID-VkSubpassDependency-srcStageMask-02100",
+          "text": " If the <a href=\"#features-taskShader\">task shaders</a> feature is not enabled, <code>srcStageMask</code> <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV</code>"
+        },
+        {
+          "vuid": "VUID-VkSubpassDependency-dstStageMask-02101",
+          "text": " If the <a href=\"#features-meshShader\">mesh shaders</a> feature is not enabled, <code>dstStageMask</code> <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV</code>"
+        },
+        {
+          "vuid": "VUID-VkSubpassDependency-dstStageMask-02102",
+          "text": " If the <a href=\"#features-taskShader\">task shaders</a> feature is not enabled, <code>dstStageMask</code> <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV</code>"
+        }
+      ]
+    },
+    "vkCreateRenderPass2KHR": {
+      "(VK_KHR_create_renderpass2)": [
+        {
+          "vuid": "VUID-vkCreateRenderPass2KHR-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCreateRenderPass2KHR-pCreateInfo-parameter",
+          "text": " <code>pCreateInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkRenderPassCreateInfo2KHR\">VkRenderPassCreateInfo2KHR</a> structure"
+        },
+        {
+          "vuid": "VUID-vkCreateRenderPass2KHR-pAllocator-parameter",
+          "text": " If <code>pAllocator</code> is not <code>NULL</code>, <code>pAllocator</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkAllocationCallbacks\">VkAllocationCallbacks</a> structure"
+        },
+        {
+          "vuid": "VUID-vkCreateRenderPass2KHR-pRenderPass-parameter",
+          "text": " <code>pRenderPass</code> <strong class=\"purple\">must</strong> be a valid pointer to a <a href=\"#VkRenderPass\">VkRenderPass</a> handle"
+        }
+      ]
+    },
+    "VkRenderPassCreateInfo2KHR": {
+      "(VK_KHR_create_renderpass2)": [
+        {
+          "vuid": "VUID-VkRenderPassCreateInfo2KHR-None-03049",
+          "text": " If any two subpasses operate on attachments with overlapping ranges of the same <code>VkDeviceMemory</code> object, and at least one subpass writes to that area of <code>VkDeviceMemory</code>, a subpass dependency <strong class=\"purple\">must</strong> be included (either directly or via some intermediate subpasses) between them"
+        },
+        {
+          "vuid": "VUID-VkRenderPassCreateInfo2KHR-attachment-03050",
+          "text": " If the <code>attachment</code> member of any element of <code>pInputAttachments</code>, <code>pColorAttachments</code>, <code>pResolveAttachments</code> or <code>pDepthStencilAttachment</code>, or the attachment indexed by any element of <code>pPreserveAttachments</code> in any given element of <code>pSubpasses</code> is bound to a range of a <code>VkDeviceMemory</code> object that overlaps with any other attachment in any subpass (including the same subpass), the <code>VkAttachmentDescription2KHR</code> structures describing them <strong class=\"purple\">must</strong> include <code>VK_ATTACHMENT_DESCRIPTION_MAY_ALIAS_BIT</code> in <code>flags</code>"
+        },
+        {
+          "vuid": "VUID-VkRenderPassCreateInfo2KHR-attachment-03051",
+          "text": " If the <code>attachment</code> member of any element of <code>pInputAttachments</code>, <code>pColorAttachments</code>, <code>pResolveAttachments</code> or <code>pDepthStencilAttachment</code>, or any element of <code>pPreserveAttachments</code> in any given element of <code>pSubpasses</code> is not <code>VK_ATTACHMENT_UNUSED</code>, it <strong class=\"purple\">must</strong> be less than <code>attachmentCount</code>"
+        },
+        {
+          "vuid": "VUID-VkRenderPassCreateInfo2KHR-pAttachments-02522",
+          "text": " For any member of <code>pAttachments</code> with a <code>loadOp</code> equal to <code>VK_ATTACHMENT_LOAD_OP_CLEAR</code>, the first use of that attachment <strong class=\"purple\">must</strong> not specify a <code>layout</code> equal to <code>VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL</code>, <code>VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL</code>, or <code>VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL</code>"
+        },
+        {
+          "vuid": "VUID-VkRenderPassCreateInfo2KHR-pAttachments-02523",
+          "text": " For any member of <code>pAttachments</code> with a <code>stencilLoadOp</code> equal to <code>VK_ATTACHMENT_LOAD_OP_CLEAR</code>, the first use of that attachment <strong class=\"purple\">must</strong> not specify a <code>layout</code> equal to <code>VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL</code>, <code>VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL</code>, or <code>VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL</code>."
+        },
+        {
+          "vuid": "VUID-VkRenderPassCreateInfo2KHR-pDependencies-03054",
+          "text": " For any element of <code>pDependencies</code>, if the <code>srcSubpass</code> is not <code>VK_SUBPASS_EXTERNAL</code>, all stage flags included in the <code>srcStageMask</code> member of that dependency <strong class=\"purple\">must</strong> be a pipeline stage supported by the <a href=\"#synchronization-pipeline-stages-types\">pipeline</a> identified by the <code>pipelineBindPoint</code> member of the source subpass"
+        },
+        {
+          "vuid": "VUID-VkRenderPassCreateInfo2KHR-pDependencies-03055",
+          "text": " For any element of <code>pDependencies</code>, if the <code>dstSubpass</code> is not <code>VK_SUBPASS_EXTERNAL</code>, all stage flags included in the <code>dstStageMask</code> member of that dependency <strong class=\"purple\">must</strong> be a pipeline stage supported by the <a href=\"#synchronization-pipeline-stages-types\">pipeline</a> identified by the <code>pipelineBindPoint</code> member of the destination subpass"
+        },
+        {
+          "vuid": "VUID-VkRenderPassCreateInfo2KHR-pCorrelatedViewMasks-03056",
+          "text": " The set of bits included in any element of <code>pCorrelatedViewMasks</code> <strong class=\"purple\">must</strong> not overlap with the set of bits included in any other element of <code>pCorrelatedViewMasks</code>"
+        },
+        {
+          "vuid": "VUID-VkRenderPassCreateInfo2KHR-viewMask-03057",
+          "text": " If the <a href=\"#VkSubpassDescription2KHR\">VkSubpassDescription2KHR</a>::<code>viewMask</code> member of all elements of <code>pSubpasses</code> is <code>0</code>, <code>correlatedViewMaskCount</code> <strong class=\"purple\">must</strong> be <code>0</code>"
+        },
+        {
+          "vuid": "VUID-VkRenderPassCreateInfo2KHR-viewMask-03058",
+          "text": " The <a href=\"#VkSubpassDescription2KHR\">VkSubpassDescription2KHR</a>::<code>viewMask</code> member of all elements of <code>pSubpasses</code> <strong class=\"purple\">must</strong> either all be <code>0</code>, or all not be <code>0</code>"
+        },
+        {
+          "vuid": "VUID-VkRenderPassCreateInfo2KHR-viewMask-03059",
+          "text": " If the <a href=\"#VkSubpassDescription2KHR\">VkSubpassDescription2KHR</a>::<code>viewMask</code> member of all elements of <code>pSubpasses</code> is <code>0</code>, the <code>dependencyFlags</code> member of any element of <code>pDependencies</code> <strong class=\"purple\">must</strong> not include <code>VK_DEPENDENCY_VIEW_LOCAL_BIT</code>"
+        },
+        {
+          "vuid": "VUID-VkRenderPassCreateInfo2KHR-pDependencies-03060",
+          "text": " For any element of <code>pDependencies</code> where its <code>srcSubpass</code> member equals its <code>dstSubpass</code> member, if the <code>viewMask</code> member of the corresponding element of <code>pSubpasses</code> includes more than one bit, its <code>dependencyFlags</code> member <strong class=\"purple\">must</strong> include <code>VK_DEPENDENCY_VIEW_LOCAL_BIT</code>"
+        },
+        {
+          "vuid": "VUID-VkRenderPassCreateInfo2KHR-viewMask-02524",
+          "text": " The <code>viewMask</code> member <strong class=\"purple\">must</strong> not have a bit set at an index greater than or equal to <a href=\"#VkPhysicalDeviceLimits\">VkPhysicalDeviceLimits</a>::<code>maxFramebufferLayers</code>"
+        },
+        {
+          "vuid": "VUID-VkRenderPassCreateInfo2KHR-attachment-02525",
+          "text": " If the <code>attachment</code> member of any element of the <code>pInputAttachments</code> member of any element of <code>pSubpasses</code> is not <code>VK_ATTACHMENT_UNUSED</code>, the <code>aspectMask</code> member of that element of <code>pInputAttachments</code> <strong class=\"purple\">must</strong> only include aspects that are present in images of the format specified by the element of <code>pAttachments</code> specified by <code>attachment</code>"
+        },
+        {
+          "vuid": "VUID-VkRenderPassCreateInfo2KHR-srcSubpass-02526",
+          "text": " The <code>srcSubpass</code> member of each element of <code>pDependencies</code> <strong class=\"purple\">must</strong> be less than <code>subpassCount</code>"
+        },
+        {
+          "vuid": "VUID-VkRenderPassCreateInfo2KHR-dstSubpass-02527",
+          "text": " The <code>dstSubpass</code> member of each element of <code>pDependencies</code> <strong class=\"purple\">must</strong> be less than <code>subpassCount</code>"
+        },
+        {
+          "vuid": "VUID-VkRenderPassCreateInfo2KHR-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO_2_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkRenderPassCreateInfo2KHR-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code> or a pointer to a valid instance of <a href=\"#VkRenderPassFragmentDensityMapCreateInfoEXT\">VkRenderPassFragmentDensityMapCreateInfoEXT</a>"
+        },
+        {
+          "vuid": "VUID-VkRenderPassCreateInfo2KHR-flags-zerobitmask",
+          "text": " <code>flags</code> <strong class=\"purple\">must</strong> be <code>0</code>"
+        },
+        {
+          "vuid": "VUID-VkRenderPassCreateInfo2KHR-pAttachments-parameter",
+          "text": " If <code>attachmentCount</code> is not <code>0</code>, <code>pAttachments</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>attachmentCount</code> valid <a href=\"#VkAttachmentDescription2KHR\">VkAttachmentDescription2KHR</a> structures"
+        },
+        {
+          "vuid": "VUID-VkRenderPassCreateInfo2KHR-pSubpasses-parameter",
+          "text": " <code>pSubpasses</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>subpassCount</code> valid <a href=\"#VkSubpassDescription2KHR\">VkSubpassDescription2KHR</a> structures"
+        },
+        {
+          "vuid": "VUID-VkRenderPassCreateInfo2KHR-pDependencies-parameter",
+          "text": " If <code>dependencyCount</code> is not <code>0</code>, <code>pDependencies</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>dependencyCount</code> valid <a href=\"#VkSubpassDependency2KHR\">VkSubpassDependency2KHR</a> structures"
+        },
+        {
+          "vuid": "VUID-VkRenderPassCreateInfo2KHR-pCorrelatedViewMasks-parameter",
+          "text": " If <code>correlatedViewMaskCount</code> is not <code>0</code>, <code>pCorrelatedViewMasks</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>correlatedViewMaskCount</code> <code>uint32_t</code> values"
+        },
+        {
+          "vuid": "VUID-VkRenderPassCreateInfo2KHR-subpassCount-arraylength",
+          "text": " <code>subpassCount</code> <strong class=\"purple\">must</strong> be greater than <code>0</code>"
+        }
+      ]
+    },
+    "VkAttachmentDescription2KHR": {
+      "(VK_KHR_create_renderpass2)": [
+        {
+          "vuid": "VUID-VkAttachmentDescription2KHR-finalLayout-03061",
+          "text": " <code>finalLayout</code> <strong class=\"purple\">must</strong> not be <code>VK_IMAGE_LAYOUT_UNDEFINED</code> or <code>VK_IMAGE_LAYOUT_PREINITIALIZED</code>"
+        },
+        {
+          "vuid": "VUID-VkAttachmentDescription2KHR-format-03294",
+          "text": " If <code>format</code> is a color format, name:initialLayout <strong class=\"purple\">must</strong> not be <code>VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL</code>, <code>VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL</code>, <code>VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL</code>, or <code>VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL</code>"
+        },
+        {
+          "vuid": "VUID-VkAttachmentDescription2KHR-format-03295",
+          "text": " If <code>format</code> is a depth/stencil format, name:initialLayout <strong class=\"purple\">must</strong> not be <code>VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL</code>"
+        },
+        {
+          "vuid": "VUID-VkAttachmentDescription2KHR-format-03296",
+          "text": " If <code>format</code> is a color format, name:finalLayout <strong class=\"purple\">must</strong> not be <code>VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL</code>, <code>VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL</code>, <code>VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL</code>, or <code>VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL</code>"
+        },
+        {
+          "vuid": "VUID-VkAttachmentDescription2KHR-format-03297",
+          "text": " If <code>format</code> is a depth/stencil format, name:finalLayout <strong class=\"purple\">must</strong> not be <code>VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL</code>"
+        },
+        {
+          "vuid": "VUID-VkAttachmentDescription2KHR-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_2_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkAttachmentDescription2KHR-flags-parameter",
+          "text": " <code>flags</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkAttachmentDescriptionFlagBits\">VkAttachmentDescriptionFlagBits</a> values"
+        },
+        {
+          "vuid": "VUID-VkAttachmentDescription2KHR-format-parameter",
+          "text": " <code>format</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkFormat\">VkFormat</a> value"
+        },
+        {
+          "vuid": "VUID-VkAttachmentDescription2KHR-samples-parameter",
+          "text": " <code>samples</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkSampleCountFlagBits\">VkSampleCountFlagBits</a> value"
+        },
+        {
+          "vuid": "VUID-VkAttachmentDescription2KHR-loadOp-parameter",
+          "text": " <code>loadOp</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkAttachmentLoadOp\">VkAttachmentLoadOp</a> value"
+        },
+        {
+          "vuid": "VUID-VkAttachmentDescription2KHR-storeOp-parameter",
+          "text": " <code>storeOp</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkAttachmentStoreOp\">VkAttachmentStoreOp</a> value"
+        },
+        {
+          "vuid": "VUID-VkAttachmentDescription2KHR-stencilLoadOp-parameter",
+          "text": " <code>stencilLoadOp</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkAttachmentLoadOp\">VkAttachmentLoadOp</a> value"
+        },
+        {
+          "vuid": "VUID-VkAttachmentDescription2KHR-stencilStoreOp-parameter",
+          "text": " <code>stencilStoreOp</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkAttachmentStoreOp\">VkAttachmentStoreOp</a> value"
+        },
+        {
+          "vuid": "VUID-VkAttachmentDescription2KHR-initialLayout-parameter",
+          "text": " <code>initialLayout</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkImageLayout\">VkImageLayout</a> value"
+        },
+        {
+          "vuid": "VUID-VkAttachmentDescription2KHR-finalLayout-parameter",
+          "text": " <code>finalLayout</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkImageLayout\">VkImageLayout</a> value"
+        }
+      ],
+      "(VK_KHR_create_renderpass2)+(VK_KHR_separate_depth_stencil_layouts)": [
+        {
+          "vuid": "VUID-VkAttachmentDescription2KHR-separateDepthStencilLayouts-03298",
+          "text": " If the <a href=\"#features-separateDepthStencilLayouts\"><code>separateDepthStencilLayouts</code></a> feature is not enabled, <code>initialLayout</code> <strong class=\"purple\">must</strong> not be <code>VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR</code>, <code>VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR</code>, <code>VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR</code> or <code>VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkAttachmentDescription2KHR-separateDepthStencilLayouts-03299",
+          "text": " If the <a href=\"#features-separateDepthStencilLayouts\"><code>separateDepthStencilLayouts</code></a> feature is not enabled, <code>finalLayout</code> <strong class=\"purple\">must</strong> not be <code>VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR</code>, <code>VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR</code>, <code>VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR</code> or <code>VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkAttachmentDescription2KHR-format-03300",
+          "text": " If <code>format</code> is a color format, <code>initialLayout</code> <strong class=\"purple\">must</strong> not be <code>VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR</code>, <code>VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR</code>, <code>VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR</code> or <code>VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkAttachmentDescription2KHR-format-03301",
+          "text": " If <code>format</code> is a color format, <code>finalLayout</code> <strong class=\"purple\">must</strong> not be <code>VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR</code>, <code>VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR</code>, <code>VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR</code> or <code>VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkAttachmentDescription2KHR-format-03302",
+          "text": " If <code>format</code> is a depth/stencil format which includes both depth and stencil aspects, and <code>initialLayout</code> is <code>VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR</code> or <code>VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR</code>, the <code>pNext</code> chain <strong class=\"purple\">must</strong> include a <a href=\"#VkAttachmentDescriptionStencilLayoutKHR\">VkAttachmentDescriptionStencilLayoutKHR</a> structure"
+        },
+        {
+          "vuid": "VUID-VkAttachmentDescription2KHR-format-03303",
+          "text": " If <code>format</code> is a depth/stencil format which includes both depth and stencil aspects, and <code>finalLayout</code> is <code>VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR</code> or <code>VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR</code>, the <code>pNext</code> chain <strong class=\"purple\">must</strong> include a <a href=\"#VkAttachmentDescriptionStencilLayoutKHR\">VkAttachmentDescriptionStencilLayoutKHR</a> structure"
+        },
+        {
+          "vuid": "VUID-VkAttachmentDescription2KHR-format-03304",
+          "text": " If <code>format</code> is a depth/stencil format which includes only the depth aspect, <code>initialLayout</code> <strong class=\"purple\">must</strong> not be <code>VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR</code> or <code>VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkAttachmentDescription2KHR-format-03305",
+          "text": " If <code>format</code> is a depth/stencil format which includes only the depth aspect, <code>finalLayout</code> <strong class=\"purple\">must</strong> not be <code>VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR</code> or <code>VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkAttachmentDescription2KHR-format-03306",
+          "text": " If <code>format</code> is a depth/stencil format which includes only the stencil aspect, <code>initialLayout</code> <strong class=\"purple\">must</strong> not be <code>VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR</code> or <code>VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkAttachmentDescription2KHR-format-03307",
+          "text": " If <code>format</code> is a depth/stencil format which includes only the stencil aspect, <code>finalLayout</code> <strong class=\"purple\">must</strong> not be <code>VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR</code> or <code>VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR</code>"
+        }
+      ]
+    },
+    "VkAttachmentDescriptionStencilLayoutKHR": {
+      "(VK_KHR_create_renderpass2)+(VK_KHR_separate_depth_stencil_layouts)": [
+        {
+          "vuid": "VUID-VkAttachmentDescriptionStencilLayoutKHR-stencilInitialLayout-03308",
+          "text": " <code>stencilInitialLayout</code> <strong class=\"purple\">must</strong> not be <code>VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL</code>, <code>VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR</code>, <code>VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR</code>, <code>VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL</code>, <code>VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL</code>, <code>VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL</code>, or <code>VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL</code>"
+        },
+        {
+          "vuid": "VUID-VkAttachmentDescriptionStencilLayoutKHR-stencilFinalLayout-03309",
+          "text": " <code>stencilFinalLayout</code> <strong class=\"purple\">must</strong> not be <code>VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL</code>, <code>VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR</code>, <code>VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR</code>, <code>VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL</code>, <code>VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL</code>, <code>VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL</code>, or <code>VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL</code>"
+        },
+        {
+          "vuid": "VUID-VkAttachmentDescriptionStencilLayoutKHR-stencilFinalLayout-03310",
+          "text": " <code>stencilFinalLayout</code> <strong class=\"purple\">must</strong> not be <code>VK_IMAGE_LAYOUT_UNDEFINED</code> or <code>VK_IMAGE_LAYOUT_PREINITIALIZED</code>"
+        },
+        {
+          "vuid": "VUID-VkAttachmentDescriptionStencilLayoutKHR-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkAttachmentDescriptionStencilLayoutKHR-stencilInitialLayout-parameter",
+          "text": " <code>stencilInitialLayout</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkImageLayout\">VkImageLayout</a> value"
+        },
+        {
+          "vuid": "VUID-VkAttachmentDescriptionStencilLayoutKHR-stencilFinalLayout-parameter",
+          "text": " <code>stencilFinalLayout</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkImageLayout\">VkImageLayout</a> value"
+        }
+      ]
+    },
+    "VkSubpassDescription2KHR": {
+      "(VK_KHR_create_renderpass2)": [
+        {
+          "vuid": "VUID-VkSubpassDescription2KHR-pipelineBindPoint-03062",
+          "text": " <code>pipelineBindPoint</code> <strong class=\"purple\">must</strong> be <code>VK_PIPELINE_BIND_POINT_GRAPHICS</code>"
+        },
+        {
+          "vuid": "VUID-VkSubpassDescription2KHR-colorAttachmentCount-03063",
+          "text": " <code>colorAttachmentCount</code> <strong class=\"purple\">must</strong> be less than or equal to <code>VkPhysicalDeviceLimits</code>::<code>maxColorAttachments</code>"
+        },
+        {
+          "vuid": "VUID-VkSubpassDescription2KHR-loadOp-03064",
+          "text": " If the first use of an attachment in this render pass is as an input attachment, and the attachment is not also used as a color or depth/stencil attachment in the same subpass, then <code>loadOp</code> <strong class=\"purple\">must</strong> not be <code>VK_ATTACHMENT_LOAD_OP_CLEAR</code>"
+        },
+        {
+          "vuid": "VUID-VkSubpassDescription2KHR-pResolveAttachments-03065",
+          "text": " If <code>pResolveAttachments</code> is not <code>NULL</code>, for each resolve attachment that does not have the value <code>VK_ATTACHMENT_UNUSED</code>, the corresponding color attachment <strong class=\"purple\">must</strong> not have the value <code>VK_ATTACHMENT_UNUSED</code>"
+        },
+        {
+          "vuid": "VUID-VkSubpassDescription2KHR-pResolveAttachments-03066",
+          "text": " If <code>pResolveAttachments</code> is not <code>NULL</code>, for each resolve attachment that is not <code>VK_ATTACHMENT_UNUSED</code>, the corresponding color attachment <strong class=\"purple\">must</strong> not have a sample count of <code>VK_SAMPLE_COUNT_1_BIT</code>"
+        },
+        {
+          "vuid": "VUID-VkSubpassDescription2KHR-pResolveAttachments-03067",
+          "text": " If <code>pResolveAttachments</code> is not <code>NULL</code>, each resolve attachment that is not <code>VK_ATTACHMENT_UNUSED</code> <strong class=\"purple\">must</strong> have a sample count of <code>VK_SAMPLE_COUNT_1_BIT</code>"
+        },
+        {
+          "vuid": "VUID-VkSubpassDescription2KHR-pResolveAttachments-03068",
+          "text": " Any given element of <code>pResolveAttachments</code> <strong class=\"purple\">must</strong> have the same <a href=\"#VkFormat\">VkFormat</a> as its corresponding color attachment"
+        },
+        {
+          "vuid": "VUID-VkSubpassDescription2KHR-pColorAttachments-03069",
+          "text": " All attachments in <code>pColorAttachments</code> that are not <code>VK_ATTACHMENT_UNUSED</code> <strong class=\"purple\">must</strong> have the same sample count"
+        },
+        {
+          "vuid": "VUID-VkSubpassDescription2KHR-pDepthStencilAttachment-03071",
+          "text": " If neither the <code>VK_AMD_mixed_attachment_samples</code> nor the <code>VK_NV_framebuffer_mixed_samples</code> extensions are enabled, and if <code>pDepthStencilAttachment</code> is not <code>VK_ATTACHMENT_UNUSED</code> and any attachments in <code>pColorAttachments</code> are not <code>VK_ATTACHMENT_UNUSED</code>, they <strong class=\"purple\">must</strong> have the same sample count"
+        },
+        {
+          "vuid": "VUID-VkSubpassDescription2KHR-attachment-03073",
+          "text": " The <code>attachment</code> member of any element of <code>pPreserveAttachments</code> <strong class=\"purple\">must</strong> not be <code>VK_ATTACHMENT_UNUSED</code>"
+        },
+        {
+          "vuid": "VUID-VkSubpassDescription2KHR-pPreserveAttachments-03074",
+          "text": " Any given element of <code>pPreserveAttachments</code> <strong class=\"purple\">must</strong> not also be an element of any other member of the subpass description"
+        },
+        {
+          "vuid": "VUID-VkSubpassDescription2KHR-layout-02528",
+          "text": " If any attachment is used by more than one <a href=\"#VkAttachmentReference\">VkAttachmentReference</a> member, then each use <strong class=\"purple\">must</strong> use the same <code>layout</code>"
+        },
+        {
+          "vuid": "VUID-VkSubpassDescription2KHR-attachment-02799",
+          "text": " If the <code>attachment</code> member of any element of <code>pInputAttachments</code> is not <code>VK_ATTACHMENT_UNUSED</code>, then the <code>aspectMask</code> member <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkImageAspectFlagBits\">VkImageAspectFlagBits</a>"
+        },
+        {
+          "vuid": "VUID-VkSubpassDescription2KHR-attachment-02800",
+          "text": " If the <code>attachment</code> member of any element of <code>pInputAttachments</code> is not <code>VK_ATTACHMENT_UNUSED</code>, then the <code>aspectMask</code> member <strong class=\"purple\">must</strong> not be <code>0</code>"
+        },
+        {
+          "vuid": "VUID-VkSubpassDescription2KHR-attachment-02801",
+          "text": " If the <code>attachment</code> member of any element of <code>pInputAttachments</code> is not <code>VK_ATTACHMENT_UNUSED</code>, then the <code>aspectMask</code> member <strong class=\"purple\">must</strong> not include <code>VK_IMAGE_ASPECT_METADATA_BIT</code>"
+        },
+        {
+          "vuid": "VUID-VkSubpassDescription2KHR-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_2_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkSubpassDescription2KHR-flags-parameter",
+          "text": " <code>flags</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkSubpassDescriptionFlagBits\">VkSubpassDescriptionFlagBits</a> values"
+        },
+        {
+          "vuid": "VUID-VkSubpassDescription2KHR-pipelineBindPoint-parameter",
+          "text": " <code>pipelineBindPoint</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkPipelineBindPoint\">VkPipelineBindPoint</a> value"
+        },
+        {
+          "vuid": "VUID-VkSubpassDescription2KHR-pInputAttachments-parameter",
+          "text": " If <code>inputAttachmentCount</code> is not <code>0</code>, <code>pInputAttachments</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>inputAttachmentCount</code> valid <a href=\"#VkAttachmentReference2KHR\">VkAttachmentReference2KHR</a> structures"
+        },
+        {
+          "vuid": "VUID-VkSubpassDescription2KHR-pColorAttachments-parameter",
+          "text": " If <code>colorAttachmentCount</code> is not <code>0</code>, <code>pColorAttachments</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>colorAttachmentCount</code> valid <a href=\"#VkAttachmentReference2KHR\">VkAttachmentReference2KHR</a> structures"
+        },
+        {
+          "vuid": "VUID-VkSubpassDescription2KHR-pResolveAttachments-parameter",
+          "text": " If <code>colorAttachmentCount</code> is not <code>0</code>, and <code>pResolveAttachments</code> is not <code>NULL</code>, <code>pResolveAttachments</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>colorAttachmentCount</code> valid <a href=\"#VkAttachmentReference2KHR\">VkAttachmentReference2KHR</a> structures"
+        },
+        {
+          "vuid": "VUID-VkSubpassDescription2KHR-pDepthStencilAttachment-parameter",
+          "text": " If <code>pDepthStencilAttachment</code> is not <code>NULL</code>, <code>pDepthStencilAttachment</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkAttachmentReference2KHR\">VkAttachmentReference2KHR</a> structure"
+        },
+        {
+          "vuid": "VUID-VkSubpassDescription2KHR-pPreserveAttachments-parameter",
+          "text": " If <code>preserveAttachmentCount</code> is not <code>0</code>, <code>pPreserveAttachments</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>preserveAttachmentCount</code> <code>uint32_t</code> values"
+        }
+      ],
+      "(VK_KHR_create_renderpass2)+(VK_AMD_mixed_attachment_samples)": [
+        {
+          "vuid": "VUID-VkSubpassDescription2KHR-pColorAttachments-03070",
+          "text": " If the <code>VK_AMD_mixed_attachment_samples</code> extension is enabled, all attachments in <code>pColorAttachments</code> that are not <code>VK_ATTACHMENT_UNUSED</code> <strong class=\"purple\">must</strong> have a sample count that is smaller than or equal to the sample count of <code>pDepthStencilAttachment</code> if it is not <code>VK_ATTACHMENT_UNUSED</code>"
+        }
+      ],
+      "(VK_KHR_create_renderpass2)+(VK_NVX_multiview_per_view_attributes)": [
+        {
+          "vuid": "VUID-VkSubpassDescription2KHR-flags-03076",
+          "text": " If <code>flags</code> includes <code>VK_SUBPASS_DESCRIPTION_PER_VIEW_POSITION_X_ONLY_BIT_NVX</code>, it <strong class=\"purple\">must</strong> also include <code>VK_SUBPASS_DESCRIPTION_PER_VIEW_ATTRIBUTES_BIT_NVX</code>."
+        }
+      ]
+    },
+    "VkSubpassDescriptionDepthStencilResolveKHR": {
+      "(VK_KHR_create_renderpass2)+(VK_KHR_depth_stencil_resolve)": [
+        {
+          "vuid": "VUID-VkSubpassDescriptionDepthStencilResolveKHR-pDepthStencilResolveAttachment-03177",
+          "text": " If <code>pDepthStencilResolveAttachment</code> is not <code>NULL</code> and does not have the value <code>VK_ATTACHMENT_UNUSED</code>, <code>pDepthStencilAttachment</code> <strong class=\"purple\">must</strong> not have the value <code>VK_ATTACHMENT_UNUSED</code>"
+        },
+        {
+          "vuid": "VUID-VkSubpassDescriptionDepthStencilResolveKHR-pDepthStencilResolveAttachment-03178",
+          "text": " If <code>pDepthStencilResolveAttachment</code> is not <code>NULL</code> and does not have the value <code>VK_ATTACHMENT_UNUSED</code>, <code>depthResolveMode</code> and <code>stencilResolveMode</code> <strong class=\"purple\">must</strong> not both be <code>VK_RESOLVE_MODE_NONE_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkSubpassDescriptionDepthStencilResolveKHR-pDepthStencilResolveAttachment-03179",
+          "text": " If <code>pDepthStencilResolveAttachment</code> is not <code>NULL</code> and does not have the value <code>VK_ATTACHMENT_UNUSED</code>, <code>pDepthStencilAttachment</code> <strong class=\"purple\">must</strong> not have a sample count of <code>VK_SAMPLE_COUNT_1_BIT</code>"
+        },
+        {
+          "vuid": "VUID-VkSubpassDescriptionDepthStencilResolveKHR-pDepthStencilResolveAttachment-03180",
+          "text": " If <code>pDepthStencilResolveAttachment</code> is not <code>NULL</code> and does not have the value <code>VK_ATTACHMENT_UNUSED</code>, <code>pDepthStencilResolveAttachment</code> <strong class=\"purple\">must</strong> have a sample count of <code>VK_SAMPLE_COUNT_1_BIT</code>"
+        },
+        {
+          "vuid": "VUID-VkSubpassDescriptionDepthStencilResolveKHR-pDepthStencilResolveAttachment-02651",
+          "text": " If <code>pDepthStencilResolveAttachment</code> is not <code>NULL</code> and does not have the value <code>VK_ATTACHMENT_UNUSED</code> then it <strong class=\"purple\">must</strong> have a format whose features contain <code>VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT</code>"
+        },
+        {
+          "vuid": "VUID-VkSubpassDescriptionDepthStencilResolveKHR-pDepthStencilResolveAttachment-03181",
+          "text": " If the <a href=\"#VkFormat\">VkFormat</a> of <code>pDepthStencilResolveAttachment</code> has a depth component, then the <a href=\"#VkFormat\">VkFormat</a> of <code>pDepthStencilAttachment</code> <strong class=\"purple\">must</strong> have a depth component with the same number of bits and numerical type"
+        },
+        {
+          "vuid": "VUID-VkSubpassDescriptionDepthStencilResolveKHR-pDepthStencilResolveAttachment-03182",
+          "text": " If the <a href=\"#VkFormat\">VkFormat</a> of <code>pDepthStencilResolveAttachment</code> has a stencil component, then the <a href=\"#VkFormat\">VkFormat</a> of <code>pDepthStencilAttachment</code> <strong class=\"purple\">must</strong> have a stencil component with the same number of bits and numerical type"
+        },
+        {
+          "vuid": "VUID-VkSubpassDescriptionDepthStencilResolveKHR-depthResolveMode-03183",
+          "text": " The value of <code>depthResolveMode</code> <strong class=\"purple\">must</strong> be one of the bits set in <a href=\"#VkPhysicalDeviceDepthStencilResolvePropertiesKHR\">VkPhysicalDeviceDepthStencilResolvePropertiesKHR</a>::<code>supportedDepthResolveModes</code> or <code>VK_RESOLVE_MODE_NONE_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkSubpassDescriptionDepthStencilResolveKHR-stencilResolveMode-03184",
+          "text": " The value of <code>stencilResolveMode</code> <strong class=\"purple\">must</strong> be one of the bits set in <a href=\"#VkPhysicalDeviceDepthStencilResolvePropertiesKHR\">VkPhysicalDeviceDepthStencilResolvePropertiesKHR</a>::<code>supportedStencilResolveModes</code> or <code>VK_RESOLVE_MODE_NONE_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkSubpassDescriptionDepthStencilResolveKHR-pDepthStencilResolveAttachment-03185",
+          "text": " If the <a href=\"#VkFormat\">VkFormat</a> of <code>pDepthStencilResolveAttachment</code> has both depth and stencil components, <a href=\"#VkPhysicalDeviceDepthStencilResolvePropertiesKHR\">VkPhysicalDeviceDepthStencilResolvePropertiesKHR</a>::<code>independentResolve</code> is <code>VK_FALSE</code>, and <a href=\"#VkPhysicalDeviceDepthStencilResolvePropertiesKHR\">VkPhysicalDeviceDepthStencilResolvePropertiesKHR</a>::<code>independentResolveNone</code> is <code>VK_FALSE</code>, then the values of <code>depthResolveMode</code> and <code>stencilResolveMode</code> <strong class=\"purple\">must</strong> be identical"
+        },
+        {
+          "vuid": "VUID-VkSubpassDescriptionDepthStencilResolveKHR-pDepthStencilResolveAttachment-03186",
+          "text": " If the <a href=\"#VkFormat\">VkFormat</a> of <code>pDepthStencilResolveAttachment</code> has both depth and stencil components, <a href=\"#VkPhysicalDeviceDepthStencilResolvePropertiesKHR\">VkPhysicalDeviceDepthStencilResolvePropertiesKHR</a>::<code>independentResolve</code> is <code>VK_FALSE</code> and <a href=\"#VkPhysicalDeviceDepthStencilResolvePropertiesKHR\">VkPhysicalDeviceDepthStencilResolvePropertiesKHR</a>::<code>independentResolveNone</code> is <code>VK_TRUE</code>, then the values of <code>depthResolveMode</code> and <code>stencilResolveMode</code> <strong class=\"purple\">must</strong> be identical or one of them <strong class=\"purple\">must</strong> be <code>VK_RESOLVE_MODE_NONE_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkSubpassDescriptionDepthStencilResolveKHR-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkSubpassDescriptionDepthStencilResolveKHR-depthResolveMode-parameter",
+          "text": " <code>depthResolveMode</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkResolveModeFlagBitsKHR\">VkResolveModeFlagBitsKHR</a> value"
+        },
+        {
+          "vuid": "VUID-VkSubpassDescriptionDepthStencilResolveKHR-stencilResolveMode-parameter",
+          "text": " <code>stencilResolveMode</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkResolveModeFlagBitsKHR\">VkResolveModeFlagBitsKHR</a> value"
+        },
+        {
+          "vuid": "VUID-VkSubpassDescriptionDepthStencilResolveKHR-pDepthStencilResolveAttachment-parameter",
+          "text": " If <code>pDepthStencilResolveAttachment</code> is not <code>NULL</code>, <code>pDepthStencilResolveAttachment</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkAttachmentReference2KHR\">VkAttachmentReference2KHR</a> structure"
+        }
+      ]
+    },
+    "VkAttachmentReference2KHR": {
+      "(VK_KHR_create_renderpass2)": [
+        {
+          "vuid": "VUID-VkAttachmentReference2KHR-layout-03077",
+          "text": " If <code>attachment</code> is not <code>VK_ATTACHMENT_UNUSED</code>, <code>layout</code> <strong class=\"purple\">must</strong> not be <code>VK_IMAGE_LAYOUT_UNDEFINED</code>, <code>VK_IMAGE_LAYOUT_PREINITIALIZED</code>, or <code>VK_IMAGE_LAYOUT_PRESENT_SRC_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkAttachmentReference2KHR-attachment-03311",
+          "text": " If <code>attachment</code> is not <code>VK_ATTACHMENT_UNUSED</code>, and <code>aspectMask</code> does not include <code>VK_IMAGE_ASPECT_STENCIL_BIT</code> or <code>VK_IMAGE_ASPECT_DEPTH_BIT</code>, <code>layout</code> <strong class=\"purple\">must</strong> not be <code>VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL</code>, <code>VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL</code>, <code>VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL</code>, or <code>VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL</code>"
+        },
+        {
+          "vuid": "VUID-VkAttachmentReference2KHR-attachment-03312",
+          "text": " If <code>attachment</code> is not <code>VK_ATTACHMENT_UNUSED</code>, and <code>aspectMask</code> does not include <code>VK_IMAGE_ASPECT_COLOR_BIT</code>, <code>layout</code> <strong class=\"purple\">must</strong> not be <code>VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL</code>"
+        },
+        {
+          "vuid": "VUID-VkAttachmentReference2KHR-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkAttachmentReference2KHR-layout-parameter",
+          "text": " <code>layout</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkImageLayout\">VkImageLayout</a> value"
+        }
+      ],
+      "(VK_KHR_create_renderpass2)+(VK_KHR_separate_depth_stencil_layouts)": [
+        {
+          "vuid": "VUID-VkAttachmentReference2KHR-separateDepthStencilLayouts-03313",
+          "text": " If the <a href=\"#features-separateDepthStencilLayouts\"><code>separateDepthStencilLayouts</code></a> feature is not enabled, and <code>attachment</code> is not <code>VK_ATTACHMENT_UNUSED</code>, <code>layout</code> <strong class=\"purple\">must</strong> not be <code>VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR</code>, <code>VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR</code>, <code>VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR</code>, or <code>VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR</code>,"
+        },
+        {
+          "vuid": "VUID-VkAttachmentReference2KHR-attachment-03314",
+          "text": " If <code>attachment</code> is not <code>VK_ATTACHMENT_UNUSED</code>, and <code>aspectMask</code> includes <code>VK_IMAGE_ASPECT_COLOR_BIT</code>, <code>layout</code> <strong class=\"purple\">must</strong> not be <code>VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR</code>, <code>VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR</code>, <code>VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR</code>, or <code>VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR</code>,"
+        },
+        {
+          "vuid": "VUID-VkAttachmentReference2KHR-attachment-03315",
+          "text": " If <code>attachment</code> is not <code>VK_ATTACHMENT_UNUSED</code>, and <code>aspectMask</code> includes both <code>VK_IMAGE_ASPECT_DEPTH_BIT</code> and <code>VK_IMAGE_ASPECT_STENCIL_BIT</code>, and <code>layout</code> is <code>VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR</code> or <code>VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR</code>, the <code>pNext</code> chain <strong class=\"purple\">must</strong> include a <a href=\"#VkAttachmentReferenceStencilLayoutKHR\">VkAttachmentReferenceStencilLayoutKHR</a> structure"
+        },
+        {
+          "vuid": "VUID-VkAttachmentReference2KHR-attachment-03316",
+          "text": " If <code>attachment</code> is not <code>VK_ATTACHMENT_UNUSED</code>, and <code>aspectMask</code> includes only <code>VK_IMAGE_ASPECT_DEPTH_BIT</code> then <code>layout</code> <strong class=\"purple\">must</strong> not be <code>VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR</code>, or <code>VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkAttachmentReference2KHR-attachment-03317",
+          "text": " If <code>attachment</code> is not <code>VK_ATTACHMENT_UNUSED</code>, and <code>aspectMask</code> includes only <code>VK_IMAGE_ASPECT_STENCIL_BIT</code> then <code>layout</code> <strong class=\"purple\">must</strong> not be <code>VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR</code>, or <code>VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR</code>"
+        }
+      ]
+    },
+    "VkAttachmentReferenceStencilLayoutKHR": {
+      "(VK_KHR_create_renderpass2)+(VK_KHR_separate_depth_stencil_layouts)": [
+        {
+          "vuid": "VUID-VkAttachmentReferenceStencilLayoutKHR-stencilLayout-03318",
+          "text": " <code>stencilLayout</code> <strong class=\"purple\">must</strong> not be <code>VK_IMAGE_LAYOUT_UNDEFINED</code>, <code>VK_IMAGE_LAYOUT_PREINITIALIZED</code>, <code>VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL</code>, <code>VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR</code>, <code>VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR</code>, <code>VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL</code>, <code>VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL</code>, <code>VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL</code>, <code>VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL</code>, or <code>VK_IMAGE_LAYOUT_PRESENT_SRC_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkAttachmentReferenceStencilLayoutKHR-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_STENCIL_LAYOUT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkAttachmentReferenceStencilLayoutKHR-stencilLayout-parameter",
+          "text": " <code>stencilLayout</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkImageLayout\">VkImageLayout</a> value"
+        }
+      ]
+    },
+    "VkSubpassDependency2KHR": {
+      "(VK_KHR_create_renderpass2)": [
+        {
+          "vuid": "VUID-VkSubpassDependency2KHR-srcStageMask-03080",
+          "text": " If the <a href=\"#features-geometryShader\">geometry shaders</a> feature is not enabled, <code>srcStageMask</code> <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT</code>"
+        },
+        {
+          "vuid": "VUID-VkSubpassDependency2KHR-dstStageMask-03081",
+          "text": " If the <a href=\"#features-geometryShader\">geometry shaders</a> feature is not enabled, <code>dstStageMask</code> <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT</code>"
+        },
+        {
+          "vuid": "VUID-VkSubpassDependency2KHR-srcStageMask-03082",
+          "text": " If the <a href=\"#features-tessellationShader\">tessellation shaders</a> feature is not enabled, <code>srcStageMask</code> <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT</code> or <code>VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT</code>"
+        },
+        {
+          "vuid": "VUID-VkSubpassDependency2KHR-dstStageMask-03083",
+          "text": " If the <a href=\"#features-tessellationShader\">tessellation shaders</a> feature is not enabled, <code>dstStageMask</code> <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT</code> or <code>VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT</code>"
+        },
+        {
+          "vuid": "VUID-VkSubpassDependency2KHR-srcSubpass-03084",
+          "text": " <code>srcSubpass</code> <strong class=\"purple\">must</strong> be less than or equal to <code>dstSubpass</code>, unless one of them is <code>VK_SUBPASS_EXTERNAL</code>, to avoid cyclic dependencies and ensure a valid execution order"
+        },
+        {
+          "vuid": "VUID-VkSubpassDependency2KHR-srcSubpass-03085",
+          "text": " <code>srcSubpass</code> and <code>dstSubpass</code> <strong class=\"purple\">must</strong> not both be equal to <code>VK_SUBPASS_EXTERNAL</code>"
+        },
+        {
+          "vuid": "VUID-VkSubpassDependency2KHR-srcSubpass-03087",
+          "text": " If <code>srcSubpass</code> is equal to <code>dstSubpass</code> and not all of the stages in <code>srcStageMask</code> and <code>dstStageMask</code> are <a href=\"#synchronization-framebuffer-regions\">framebuffer-space stages</a>, the <a href=\"#synchronization-pipeline-stages-order\">logically latest</a> pipeline stage in <code>srcStageMask</code> <strong class=\"purple\">must</strong> be <a href=\"#synchronization-pipeline-stages-order\">logically earlier</a> than or equal to the <a href=\"#synchronization-pipeline-stages-order\">logically earliest</a> pipeline stage in <code>dstStageMask</code>"
+        },
+        {
+          "vuid": "VUID-VkSubpassDependency2KHR-srcAccessMask-03088",
+          "text": " Any access flag included in <code>srcAccessMask</code> <strong class=\"purple\">must</strong> be supported by one of the pipeline stages in <code>srcStageMask</code>, as specified in the <a href=\"#synchronization-access-types-supported\">table of supported access types</a>"
+        },
+        {
+          "vuid": "VUID-VkSubpassDependency2KHR-dstAccessMask-03089",
+          "text": " Any access flag included in <code>dstAccessMask</code> <strong class=\"purple\">must</strong> be supported by one of the pipeline stages in <code>dstStageMask</code>, as specified in the <a href=\"#synchronization-access-types-supported\">table of supported access types</a>"
+        },
+        {
+          "vuid": "VUID-VkSubpassDependency2KHR-dependencyFlags-03090",
+          "text": " If <code>dependencyFlags</code> includes <code>VK_DEPENDENCY_VIEW_LOCAL_BIT</code>, <code>srcSubpass</code> <strong class=\"purple\">must</strong> not be equal to <code>VK_SUBPASS_EXTERNAL</code>"
+        },
+        {
+          "vuid": "VUID-VkSubpassDependency2KHR-dependencyFlags-03091",
+          "text": " If <code>dependencyFlags</code> includes <code>VK_DEPENDENCY_VIEW_LOCAL_BIT</code>, <code>dstSubpass</code> <strong class=\"purple\">must</strong> not be equal to <code>VK_SUBPASS_EXTERNAL</code>"
+        },
+        {
+          "vuid": "VUID-VkSubpassDependency2KHR-srcSubpass-02245",
+          "text": " If <code>srcSubpass</code> equals <code>dstSubpass</code>, and <code>srcStageMask</code> and <code>dstStageMask</code> both include a <a href=\"#synchronization-framebuffer-regions\">framebuffer-space stage</a>, then <code>dependencyFlags</code> <strong class=\"purple\">must</strong> include <code>VK_DEPENDENCY_BY_REGION_BIT</code>"
+        },
+        {
+          "vuid": "VUID-VkSubpassDependency2KHR-viewOffset-02530",
+          "text": " If <code>viewOffset</code> is not equal to <code>0</code>, <code>srcSubpass</code> <strong class=\"purple\">must</strong> not be equal to <code>dstSubpass</code>"
+        },
+        {
+          "vuid": "VUID-VkSubpassDependency2KHR-dependencyFlags-03092",
+          "text": " If <code>dependencyFlags</code> does not include <code>VK_DEPENDENCY_VIEW_LOCAL_BIT</code>, <code>viewOffset</code> <strong class=\"purple\">must</strong> be <code>0</code>"
+        },
+        {
+          "vuid": "VUID-VkSubpassDependency2KHR-viewOffset-03093",
+          "text": " If <code>viewOffset</code> is not <code>0</code>, <code>srcSubpass</code> <strong class=\"purple\">must</strong> not be equal to <code>dstSubpass</code>."
+        },
+        {
+          "vuid": "VUID-VkSubpassDependency2KHR-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkSubpassDependency2KHR-srcStageMask-parameter",
+          "text": " <code>srcStageMask</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkPipelineStageFlagBits\">VkPipelineStageFlagBits</a> values"
+        },
+        {
+          "vuid": "VUID-VkSubpassDependency2KHR-srcStageMask-requiredbitmask",
+          "text": " <code>srcStageMask</code> <strong class=\"purple\">must</strong> not be <code>0</code>"
+        },
+        {
+          "vuid": "VUID-VkSubpassDependency2KHR-dstStageMask-parameter",
+          "text": " <code>dstStageMask</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkPipelineStageFlagBits\">VkPipelineStageFlagBits</a> values"
+        },
+        {
+          "vuid": "VUID-VkSubpassDependency2KHR-dstStageMask-requiredbitmask",
+          "text": " <code>dstStageMask</code> <strong class=\"purple\">must</strong> not be <code>0</code>"
+        },
+        {
+          "vuid": "VUID-VkSubpassDependency2KHR-srcAccessMask-parameter",
+          "text": " <code>srcAccessMask</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkAccessFlagBits\">VkAccessFlagBits</a> values"
+        },
+        {
+          "vuid": "VUID-VkSubpassDependency2KHR-dstAccessMask-parameter",
+          "text": " <code>dstAccessMask</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkAccessFlagBits\">VkAccessFlagBits</a> values"
+        },
+        {
+          "vuid": "VUID-VkSubpassDependency2KHR-dependencyFlags-parameter",
+          "text": " <code>dependencyFlags</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkDependencyFlagBits\">VkDependencyFlagBits</a> values"
+        }
+      ],
+      "(VK_KHR_create_renderpass2)+(VK_NV_mesh_shader)": [
+        {
+          "vuid": "VUID-VkSubpassDependency2KHR-srcStageMask-02103",
+          "text": " If the <a href=\"#features-meshShader\">mesh shaders</a> feature is not enabled, <code>srcStageMask</code> <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV</code>"
+        },
+        {
+          "vuid": "VUID-VkSubpassDependency2KHR-srcStageMask-02104",
+          "text": " If the <a href=\"#features-taskShader\">task shaders</a> feature is not enabled, <code>srcStageMask</code> <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV</code>"
+        },
+        {
+          "vuid": "VUID-VkSubpassDependency2KHR-dstStageMask-02105",
+          "text": " If the <a href=\"#features-meshShader\">mesh shaders</a> feature is not enabled, <code>dstStageMask</code> <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV</code>"
+        },
+        {
+          "vuid": "VUID-VkSubpassDependency2KHR-dstStageMask-02106",
+          "text": " If the <a href=\"#features-taskShader\">task shaders</a> feature is not enabled, <code>dstStageMask</code> <strong class=\"purple\">must</strong> not contain <code>VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV</code>"
+        }
+      ]
+    },
+    "vkDestroyRenderPass": {
+      "core": [
+        {
+          "vuid": "VUID-vkDestroyRenderPass-renderPass-00873",
+          "text": " All submitted commands that refer to <code>renderPass</code> <strong class=\"purple\">must</strong> have completed execution"
+        },
+        {
+          "vuid": "VUID-vkDestroyRenderPass-renderPass-00874",
+          "text": " If <code>VkAllocationCallbacks</code> were provided when <code>renderPass</code> was created, a compatible set of callbacks <strong class=\"purple\">must</strong> be provided here"
+        },
+        {
+          "vuid": "VUID-vkDestroyRenderPass-renderPass-00875",
+          "text": " If no <code>VkAllocationCallbacks</code> were provided when <code>renderPass</code> was created, <code>pAllocator</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-vkDestroyRenderPass-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkDestroyRenderPass-renderPass-parameter",
+          "text": " If <code>renderPass</code> is not <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>, <code>renderPass</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkRenderPass\">VkRenderPass</a> handle"
+        },
+        {
+          "vuid": "VUID-vkDestroyRenderPass-pAllocator-parameter",
+          "text": " If <code>pAllocator</code> is not <code>NULL</code>, <code>pAllocator</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkAllocationCallbacks\">VkAllocationCallbacks</a> structure"
+        },
+        {
+          "vuid": "VUID-vkDestroyRenderPass-renderPass-parent",
+          "text": " If <code>renderPass</code> is a valid handle, it <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from <code>device</code>"
+        }
+      ]
+    },
+    "vkCreateFramebuffer": {
+      "core": [
+        {
+          "vuid": "VUID-vkCreateFramebuffer-pCreateInfo-02777",
+          "text": " If <code>pCreateInfo</code>-&gt;flags does not include <code>VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR</code>, and <code>attachmentCount</code> is not <code>0</code>, each element of <code>pCreateInfo</code>-&gt;pAttachments <strong class=\"purple\">must</strong> have been created on <code>device</code>"
+        },
+        {
+          "vuid": "VUID-vkCreateFramebuffer-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCreateFramebuffer-pCreateInfo-parameter",
+          "text": " <code>pCreateInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkFramebufferCreateInfo\">VkFramebufferCreateInfo</a> structure"
+        },
+        {
+          "vuid": "VUID-vkCreateFramebuffer-pAllocator-parameter",
+          "text": " If <code>pAllocator</code> is not <code>NULL</code>, <code>pAllocator</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkAllocationCallbacks\">VkAllocationCallbacks</a> structure"
+        },
+        {
+          "vuid": "VUID-vkCreateFramebuffer-pFramebuffer-parameter",
+          "text": " <code>pFramebuffer</code> <strong class=\"purple\">must</strong> be a valid pointer to a <a href=\"#VkFramebuffer\">VkFramebuffer</a> handle"
+        }
+      ]
+    },
+    "VkFramebufferCreateInfo": {
+      "core": [
+        {
+          "vuid": "VUID-VkFramebufferCreateInfo-attachmentCount-00876",
+          "text": " <code>attachmentCount</code> <strong class=\"purple\">must</strong> be equal to the attachment count specified in <code>renderPass</code>"
+        },
+        {
+          "vuid": "VUID-VkFramebufferCreateInfo-flags-02778",
+          "text": " If <code>flags</code> does not include <code>VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR</code>, and <code>attachmentCount</code> is not <code>0</code>, <code>pAttachments</code> must be a valid pointer to an array of <code>attachmentCount</code> valid <a href=\"#VkImageView\">VkImageView</a> handles"
+        },
+        {
+          "vuid": "VUID-VkFramebufferCreateInfo-pAttachments-00877",
+          "text": " If <code>flags</code> does not include <code>VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR</code>, each element of <code>pAttachments</code> that is used as a color attachment or resolve attachment by <code>renderPass</code> <strong class=\"purple\">must</strong> have been created with a <code>usage</code> value including <code>VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT</code>"
+        },
+        {
+          "vuid": "VUID-VkFramebufferCreateInfo-pAttachments-02633",
+          "text": " If <code>flags</code> does not include <code>VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR</code>, each element of <code>pAttachments</code> that is used as a depth/stencil attachment by <code>renderPass</code> <strong class=\"purple\">must</strong> have been created with a <code>usage</code> value including <code>VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT</code>"
+        },
+        {
+          "vuid": "VUID-VkFramebufferCreateInfo-pAttachments-00879",
+          "text": " If <code>flags</code> does not include <code>VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR</code>, each element of <code>pAttachments</code> that is used as an input attachment by <code>renderPass</code> <strong class=\"purple\">must</strong> have been created with a <code>usage</code> value including <code>VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT</code>"
+        },
+        {
+          "vuid": "VUID-VkFramebufferCreateInfo-pAttachments-00880",
+          "text": " If <code>flags</code> does not include <code>VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR</code>, each element of <code>pAttachments</code> <strong class=\"purple\">must</strong> have been created with a <a href=\"#VkFormat\">VkFormat</a> value that matches the <a href=\"#VkFormat\">VkFormat</a> specified by the corresponding <code>VkAttachmentDescription</code> in <code>renderPass</code>"
+        },
+        {
+          "vuid": "VUID-VkFramebufferCreateInfo-pAttachments-00881",
+          "text": " If <code>flags</code> does not include <code>VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR</code>, each element of <code>pAttachments</code> <strong class=\"purple\">must</strong> have been created with a <code>samples</code> value that matches the <code>samples</code> value specified by the corresponding <code>VkAttachmentDescription</code> in <code>renderPass</code>"
+        },
+        {
+          "vuid": "VUID-VkFramebufferCreateInfo-pAttachments-00883",
+          "text": " If <code>flags</code> does not include <code>VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR</code>, each element of <code>pAttachments</code> <strong class=\"purple\">must</strong> only specify a single mip level"
+        },
+        {
+          "vuid": "VUID-VkFramebufferCreateInfo-pAttachments-00884",
+          "text": " If <code>flags</code> does not include <code>VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR</code>, each element of <code>pAttachments</code> <strong class=\"purple\">must</strong> have been created with the identity swizzle"
+        },
+        {
+          "vuid": "VUID-VkFramebufferCreateInfo-width-00885",
+          "text": " <code>width</code> <strong class=\"purple\">must</strong> be greater than <code>0</code>."
+        },
+        {
+          "vuid": "VUID-VkFramebufferCreateInfo-width-00886",
+          "text": " <code>width</code> <strong class=\"purple\">must</strong> be less than or equal to <code>VkPhysicalDeviceLimits</code>::<code>maxFramebufferWidth</code>"
+        },
+        {
+          "vuid": "VUID-VkFramebufferCreateInfo-height-00887",
+          "text": " <code>height</code> <strong class=\"purple\">must</strong> be greater than <code>0</code>."
+        },
+        {
+          "vuid": "VUID-VkFramebufferCreateInfo-height-00888",
+          "text": " <code>height</code> <strong class=\"purple\">must</strong> be less than or equal to <code>VkPhysicalDeviceLimits</code>::<code>maxFramebufferHeight</code>"
+        },
+        {
+          "vuid": "VUID-VkFramebufferCreateInfo-layers-00889",
+          "text": " <code>layers</code> <strong class=\"purple\">must</strong> be greater than <code>0</code>."
+        },
+        {
+          "vuid": "VUID-VkFramebufferCreateInfo-layers-00890",
+          "text": " <code>layers</code> <strong class=\"purple\">must</strong> be less than or equal to <code>VkPhysicalDeviceLimits</code>::<code>maxFramebufferLayers</code>"
+        },
+        {
+          "vuid": "VUID-VkFramebufferCreateInfo-flags-03188",
+          "text": " If <code>flags</code> does not include <code>VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR</code>, and <code>attachmentCount</code> is not 0, <code>pAttachments</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>attachmentCount</code> valid <a href=\"#VkImageView\">VkImageView</a> handles"
+        },
+        {
+          "vuid": "VUID-VkFramebufferCreateInfo-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO</code>"
+        },
+        {
+          "vuid": "VUID-VkFramebufferCreateInfo-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code> or a pointer to a valid instance of <a href=\"#VkFramebufferAttachmentsCreateInfoKHR\">VkFramebufferAttachmentsCreateInfoKHR</a>"
+        },
+        {
+          "vuid": "VUID-VkFramebufferCreateInfo-flags-parameter",
+          "text": " <code>flags</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkFramebufferCreateFlagBits\">VkFramebufferCreateFlagBits</a> values"
+        },
+        {
+          "vuid": "VUID-VkFramebufferCreateInfo-renderPass-parameter",
+          "text": " <code>renderPass</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkRenderPass\">VkRenderPass</a> handle"
+        },
+        {
+          "vuid": "VUID-VkFramebufferCreateInfo-commonparent",
+          "text": " Both of <code>renderPass</code>, and the elements of <code>pAttachments</code> that are valid handles of non-ignored parameters <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from the same <a href=\"#VkDevice\">VkDevice</a>"
+        }
+      ],
+      "(VK_KHR_depth_stencil_resolve)": [
+        {
+          "vuid": "VUID-VkFramebufferCreateInfo-pAttachments-02634",
+          "text": " If <code>flags</code> does not include <code>VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR</code>, each element of <code>pAttachments</code> that is used as a depth/stencil resolve attachment by <code>renderPass</code> <strong class=\"purple\">must</strong> have been created with a <code>usage</code> value including <code>VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT</code>"
+        }
+      ],
+      "(VK_EXT_fragment_density_map)": [
+        {
+          "vuid": "VUID-VkFramebufferCreateInfo-pAttachments-02552",
+          "text": " Each element of <code>pAttachments</code> that is used as a fragment density map attachment by <code>renderPass</code> <strong class=\"purple\">must</strong> not have been created with a <code>flags</code> value including <code>VK_IMAGE_CREATE_SUBSAMPLED_BIT_EXT</code>."
+        },
+        {
+          "vuid": "VUID-VkFramebufferCreateInfo-renderPass-02553",
+          "text": " If <code>renderPass</code> has a fragment density map attachment and <a href=\"#features-nonsubsampledimages\">non-subsample image feature</a> is not enabled, each element of <code>pAttachments</code> <strong class=\"purple\">must</strong> have been created with a <code>flags</code> value including <code>VK_IMAGE_CREATE_SUBSAMPLED_BIT_EXT</code> unless that element is the fragment density map attachment."
+        },
+        {
+          "vuid": "VUID-VkFramebufferCreateInfo-pAttachments-02554",
+          "text": " If <code>flags</code> does not include <code>VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR</code>, each element of <code>pAttachments</code> <strong class=\"purple\">must</strong> have dimensions at least as large as the corresponding framebuffer dimension except for any element that is referenced by <code>fragmentDensityMapAttachment</code>"
+        },
+        {
+          "vuid": "VUID-VkFramebufferCreateInfo-pAttachments-02555",
+          "text": " If <code>flags</code> does not include <code>VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR</code>, an element of <code>pAttachments</code> that is referenced by <code>fragmentDensityMapAttachment</code> <strong class=\"purple\">must</strong> have a width at least as large as \\(\\lceil{\\frac{width}{maxFragmentDensityTexelSize_{width}}}\\rceil\\)"
+        },
+        {
+          "vuid": "VUID-VkFramebufferCreateInfo-pAttachments-02556",
+          "text": " If <code>flags</code> does not include <code>VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR</code>, an element of <code>pAttachments</code> that is referenced by <code>fragmentDensityMapAttachment</code> <strong class=\"purple\">must</strong> have a height at least as large as \\(\\lceil{\\frac{height}{maxFragmentDensityTexelSize_{height}}}\\rceil\\)"
+        }
+      ],
+      "!(VK_EXT_fragment_density_map)": [
+        {
+          "vuid": "VUID-VkFramebufferCreateInfo-pAttachments-00882",
+          "text": " If <code>flags</code> does not include <code>VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR</code>, each element of <code>pAttachments</code> <strong class=\"purple\">must</strong> have dimensions at least as large as the corresponding framebuffer dimension"
+        }
+      ],
+      "!(VK_EXT_fragment_density_map)+(VK_VERSION_1_1,VK_KHR_multiview)": [
+        {
+          "vuid": "VUID-VkFramebufferCreateInfo-renderPass-02743",
+          "text": " If <code>renderPass</code> was specified with non-zero view masks, each element of <code>pAttachments</code> <strong class=\"purple\">must</strong> have a <code>layerCount</code> greater than the index of the most significant bit set in any of those view masks"
+        }
+      ],
+      "(VK_EXT_fragment_density_map)+!(VK_VERSION_1_1,VK_KHR_multiview)": [
+        {
+          "vuid": "VUID-VkFramebufferCreateInfo-pAttachments-02744",
+          "text": " An element of <code>pAttachments</code> that is referenced by <code>fragmentDensityMapAttachment</code> <strong class=\"purple\">must</strong> have a <code>layerCount</code> equal to <code>1</code>"
+        }
+      ],
+      "(VK_EXT_fragment_density_map)+(VK_VERSION_1_1,VK_KHR_multiview)": [
+        {
+          "vuid": "VUID-VkFramebufferCreateInfo-renderPass-02745",
+          "text": " If <code>renderPass</code> was specified with non-zero view masks, each element of <code>pAttachments</code> that is not referenced by <code>fragmentDensityMapAttachment</code> <strong class=\"purple\">must</strong> have a <code>layerCount</code> greater than the index of the most significant bit set in any of those view masks"
+        },
+        {
+          "vuid": "VUID-VkFramebufferCreateInfo-renderPass-02746",
+          "text": " If <code>renderPass</code> was specified with non-zero view masks, each element of <code>pAttachments</code> that is referenced by <code>fragmentDensityMapAttachment</code> <strong class=\"purple\">must</strong> have a <code>layerCount</code> equal to <code>1</code> or greater than the index of the most significant bit set in any of those view masks"
+        },
+        {
+          "vuid": "VUID-VkFramebufferCreateInfo-renderPass-02747",
+          "text": " If <code>renderPass</code> was not specified with non-zero view masks, each element of <code>pAttachments</code> that is referenced by <code>fragmentDensityMapAttachment</code> <strong class=\"purple\">must</strong> have a <code>layerCount</code> equal to <code>1</code>"
+        }
+      ],
+      "(VK_VERSION_1_1,VK_KHR_multiview)": [
+        {
+          "vuid": "VUID-VkFramebufferCreateInfo-renderPass-02531",
+          "text": " If <code>renderPass</code> was specified with non-zero view masks, <code>layers</code> <strong class=\"purple\">must</strong> be <code>1</code>"
+        }
+      ],
+      "(VK_VERSION_1_1,VK_KHR_maintenance1)": [
+        {
+          "vuid": "VUID-VkFramebufferCreateInfo-pAttachments-00891",
+          "text": " If <code>flags</code> does not include <code>VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR</code>, each element of <code>pAttachments</code> that is a 2D or 2D array image view taken from a 3D image <strong class=\"purple\">must</strong> not be a depth/stencil format"
+        }
+      ],
+      "(VK_KHR_imageless_framebuffer)": [
+        {
+          "vuid": "VUID-VkFramebufferCreateInfo-flags-03189",
+          "text": " If the <a href=\"#features-imagelessFramebuffer\">imageless framebuffer</a> feature is not enabled, <code>flags</code> <strong class=\"purple\">must</strong> not include <code>VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkFramebufferCreateInfo-flags-03190",
+          "text": " If <code>flags</code> includes <code>VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR</code>, the <code>pNext</code> chain <strong class=\"purple\">must</strong> include a <a href=\"#VkFramebufferAttachmentsCreateInfoKHR\">VkFramebufferAttachmentsCreateInfoKHR</a> structure"
+        },
+        {
+          "vuid": "VUID-VkFramebufferCreateInfo-flags-03191",
+          "text": " If <code>flags</code> includes <code>VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR</code>, the <code>attachmentImageInfoCount</code> member of a <a href=\"#VkFramebufferAttachmentsCreateInfoKHR\">VkFramebufferAttachmentsCreateInfoKHR</a> structure included in the <code>pNext</code> chain <strong class=\"purple\">must</strong> be equal to either zero or <code>attachmentCount</code>"
+        },
+        {
+          "vuid": "VUID-VkFramebufferCreateInfo-flags-03201",
+          "text": " If <code>flags</code> includes <code>VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR</code>, the <code>usage</code> member of any element of the <code>pAttachmentImageInfos</code> member of a <a href=\"#VkFramebufferAttachmentsCreateInfoKHR\">VkFramebufferAttachmentsCreateInfoKHR</a> structure included in the <code>pNext</code> chain that refers to an attachment used as a color attachment or resolve attachment by <code>renderPass</code> <strong class=\"purple\">must</strong> include <code>VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT</code>"
+        },
+        {
+          "vuid": "VUID-VkFramebufferCreateInfo-flags-03202",
+          "text": " If <code>flags</code> includes <code>VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR</code>, the <code>usage</code> member of any element of the <code>pAttachmentImageInfos</code> member of a <a href=\"#VkFramebufferAttachmentsCreateInfoKHR\">VkFramebufferAttachmentsCreateInfoKHR</a> structure included in the <code>pNext</code> chain that refers to an attachment used as a depth/stencil attachment by <code>renderPass</code> <strong class=\"purple\">must</strong> include <code>VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT</code>"
+        },
+        {
+          "vuid": "VUID-VkFramebufferCreateInfo-flags-03204",
+          "text": " If <code>flags</code> includes <code>VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR</code>, the <code>usage</code> member of any element of the <code>pAttachmentImageInfos</code> member of a <a href=\"#VkFramebufferAttachmentsCreateInfoKHR\">VkFramebufferAttachmentsCreateInfoKHR</a> structure included in the <code>pNext</code> chain that refers to an attachment used as an input attachment by <code>renderPass</code> <strong class=\"purple\">must</strong> include <code>VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT</code>"
+        },
+        {
+          "vuid": "VUID-VkFramebufferCreateInfo-flags-03205",
+          "text": " If <code>flags</code> includes <code>VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR</code>, at least one element of the <code>pViewFormats</code> member of any element of the <code>pAttachmentImageInfos</code> member of a <a href=\"#VkFramebufferAttachmentsCreateInfoKHR\">VkFramebufferAttachmentsCreateInfoKHR</a> structure included in the <code>pNext</code> chain <strong class=\"purple\">must</strong> be equal to the corresponding value of <a href=\"#VkAttachmentDescription\">VkAttachmentDescription</a>::<code>format</code> used to create <code>renderPass</code>"
+        }
+      ],
+      "(VK_KHR_imageless_framebuffer)+!(VK_EXT_fragment_density_map)": [
+        {
+          "vuid": "VUID-VkFramebufferCreateInfo-flags-03192",
+          "text": " If <code>flags</code> includes <code>VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR</code>, the <code>width</code> member of any element of the <code>pAttachmentImageInfos</code> member of a <a href=\"#VkFramebufferAttachmentsCreateInfoKHR\">VkFramebufferAttachmentsCreateInfoKHR</a> structure included in the <code>pNext</code> chain <strong class=\"purple\">must</strong> be greater than or equal to <code>width</code>"
+        },
+        {
+          "vuid": "VUID-VkFramebufferCreateInfo-flags-03193",
+          "text": " If <code>flags</code> includes <code>VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR</code>, the <code>height</code> member of any element of the <code>pAttachmentImageInfos</code> member of a <a href=\"#VkFramebufferAttachmentsCreateInfoKHR\">VkFramebufferAttachmentsCreateInfoKHR</a> structure included in the <code>pNext</code> chain <strong class=\"purple\">must</strong> be greater than or equal to <code>height</code>"
+        }
+      ],
+      "(VK_KHR_imageless_framebuffer)+(VK_EXT_fragment_density_map)": [
+        {
+          "vuid": "VUID-VkFramebufferCreateInfo-flags-03194",
+          "text": " If <code>flags</code> includes <code>VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR</code>, the <code>width</code> member of any element of the <code>pAttachmentImageInfos</code> member of a <a href=\"#VkFramebufferAttachmentsCreateInfoKHR\">VkFramebufferAttachmentsCreateInfoKHR</a> structure included in the <code>pNext</code> chain <strong class=\"purple\">must</strong> be greater than or equal to <code>width</code>, except for any element that is referenced by <a href=\"#VkRenderPassFragmentDensityMapCreateInfoEXT\">VkRenderPassFragmentDensityMapCreateInfoEXT</a>::<code>fragmentDensityMapAttachment</code> in <code>renderPass</code>"
+        },
+        {
+          "vuid": "VUID-VkFramebufferCreateInfo-flags-03195",
+          "text": " If <code>flags</code> includes <code>VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR</code>, the <code>height</code> member of any element of the <code>pAttachmentImageInfos</code> member of a <a href=\"#VkFramebufferAttachmentsCreateInfoKHR\">VkFramebufferAttachmentsCreateInfoKHR</a> structure included in the <code>pNext</code> chain <strong class=\"purple\">must</strong> be greater than or equal to <code>height</code>, except for any element that is referenced by <a href=\"#VkRenderPassFragmentDensityMapCreateInfoEXT\">VkRenderPassFragmentDensityMapCreateInfoEXT</a>::<code>fragmentDensityMapAttachment</code> in <code>renderPass</code>"
+        },
+        {
+          "vuid": "VUID-VkFramebufferCreateInfo-flags-03196",
+          "text": " If <code>flags</code> includes <code>VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR</code>, the <code>width</code> member of any element of the <code>pAttachmentImageInfos</code> member of a <a href=\"#VkFramebufferAttachmentsCreateInfoKHR\">VkFramebufferAttachmentsCreateInfoKHR</a> structure included in the <code>pNext</code> chain that is referenced by <a href=\"#VkRenderPassFragmentDensityMapCreateInfoEXT\">VkRenderPassFragmentDensityMapCreateInfoEXT</a>::<code>fragmentDensityMapAttachment</code> in <code>renderPass</code> <strong class=\"purple\">must</strong> be greater than or equal to \\(\\lceil{\\frac{width}{maxFragmentDensityTexelSize_{width}}}\\rceil\\)"
+        },
+        {
+          "vuid": "VUID-VkFramebufferCreateInfo-flags-03197",
+          "text": " If <code>flags</code> includes <code>VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR</code>, the <code>height</code> member of any element of the <code>pAttachmentImageInfos</code> member of a <a href=\"#VkFramebufferAttachmentsCreateInfoKHR\">VkFramebufferAttachmentsCreateInfoKHR</a> structure included in the <code>pNext</code> chain that is referenced by <a href=\"#VkRenderPassFragmentDensityMapCreateInfoEXT\">VkRenderPassFragmentDensityMapCreateInfoEXT</a>::<code>fragmentDensityMapAttachment</code> in <code>renderPass</code> <strong class=\"purple\">must</strong> be greater than or equal to \\(\\lceil{\\frac{height}{maxFragmentDensityTexelSize_{height}}}\\rceil\\)"
+        }
+      ],
+      "(VK_KHR_imageless_framebuffer)+(VK_VERSION_1_1,VK_KHR_multiview)": [
+        {
+          "vuid": "VUID-VkFramebufferCreateInfo-renderPass-03198",
+          "text": " If multiview is enabled for <code>renderPass</code>, and <code>flags</code> includes <code>VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR</code>, the <code>layerCount</code> member of any element of the <code>pAttachmentImageInfos</code> member of a <a href=\"#VkFramebufferAttachmentsCreateInfoKHR\">VkFramebufferAttachmentsCreateInfoKHR</a> structure included in the <code>pNext</code> chain <strong class=\"purple\">must</strong> be greater than the maximum bit index set in the view mask in the subpasses in which it is used in <code>renderPass</code>"
+        },
+        {
+          "vuid": "VUID-VkFramebufferCreateInfo-renderPass-03199",
+          "text": " If multiview is not enabled for <code>renderPass</code>, and <code>flags</code> includes <code>VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR</code>, the <code>layerCount</code> member of any element of the <code>pAttachmentImageInfos</code> member of a <a href=\"#VkFramebufferAttachmentsCreateInfoKHR\">VkFramebufferAttachmentsCreateInfoKHR</a> structure included in the <code>pNext</code> chain <strong class=\"purple\">must</strong> be greater than or equal to <code>layers</code>"
+        }
+      ],
+      "(VK_KHR_imageless_framebuffer)+!(VK_VERSION_1_1+VK_KHR_multiview)": [
+        {
+          "vuid": "VUID-VkFramebufferCreateInfo-flags-03200",
+          "text": " If <code>flags</code> includes <code>VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR</code>, the <code>layerCount</code> member of any element of the <code>pAttachmentImageInfos</code> member of a <a href=\"#VkFramebufferAttachmentsCreateInfoKHR\">VkFramebufferAttachmentsCreateInfoKHR</a> structure included in the <code>pNext</code> chain <strong class=\"purple\">must</strong> be greater than or equal to <code>layers</code>"
+        }
+      ],
+      "(VK_KHR_imageless_framebuffer)+(VK_KHR_depth_stencil_resolve)": [
+        {
+          "vuid": "VUID-VkFramebufferCreateInfo-flags-03203",
+          "text": " If <code>flags</code> includes <code>VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR</code>, the <code>usage</code> member of any element of the <code>pAttachmentImageInfos</code> member of a <a href=\"#VkFramebufferAttachmentsCreateInfoKHR\">VkFramebufferAttachmentsCreateInfoKHR</a> structure included in the <code>pNext</code> chain that refers to an attachment used as a depth/stencil resolve attachment by <code>renderPass</code> <strong class=\"purple\">must</strong> include <code>VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT</code>"
+        }
+      ]
+    },
+    "VkFramebufferAttachmentsCreateInfoKHR": {
+      "(VK_KHR_imageless_framebuffer)": [
+        {
+          "vuid": "VUID-VkFramebufferAttachmentsCreateInfoKHR-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkFramebufferAttachmentsCreateInfoKHR-pAttachmentImageInfos-parameter",
+          "text": " If <code>attachmentImageInfoCount</code> is not <code>0</code>, <code>pAttachmentImageInfos</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>attachmentImageInfoCount</code> valid <a href=\"#VkFramebufferAttachmentImageInfoKHR\">VkFramebufferAttachmentImageInfoKHR</a> structures"
+        }
+      ]
+    },
+    "VkFramebufferAttachmentImageInfoKHR": {
+      "(VK_KHR_imageless_framebuffer)": [
+        {
+          "vuid": "VUID-VkFramebufferAttachmentImageInfoKHR-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkFramebufferAttachmentImageInfoKHR-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-VkFramebufferAttachmentImageInfoKHR-flags-parameter",
+          "text": " <code>flags</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkImageCreateFlagBits\">VkImageCreateFlagBits</a> values"
+        },
+        {
+          "vuid": "VUID-VkFramebufferAttachmentImageInfoKHR-usage-parameter",
+          "text": " <code>usage</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkImageUsageFlagBits\">VkImageUsageFlagBits</a> values"
+        },
+        {
+          "vuid": "VUID-VkFramebufferAttachmentImageInfoKHR-usage-requiredbitmask",
+          "text": " <code>usage</code> <strong class=\"purple\">must</strong> not be <code>0</code>"
+        },
+        {
+          "vuid": "VUID-VkFramebufferAttachmentImageInfoKHR-pViewFormats-parameter",
+          "text": " If <code>viewFormatCount</code> is not <code>0</code>, <code>pViewFormats</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>viewFormatCount</code> valid <a href=\"#VkFormat\">VkFormat</a> values"
+        }
+      ]
+    },
+    "vkDestroyFramebuffer": {
+      "core": [
+        {
+          "vuid": "VUID-vkDestroyFramebuffer-framebuffer-00892",
+          "text": " All submitted commands that refer to <code>framebuffer</code> <strong class=\"purple\">must</strong> have completed execution"
+        },
+        {
+          "vuid": "VUID-vkDestroyFramebuffer-framebuffer-00893",
+          "text": " If <code>VkAllocationCallbacks</code> were provided when <code>framebuffer</code> was created, a compatible set of callbacks <strong class=\"purple\">must</strong> be provided here"
+        },
+        {
+          "vuid": "VUID-vkDestroyFramebuffer-framebuffer-00894",
+          "text": " If no <code>VkAllocationCallbacks</code> were provided when <code>framebuffer</code> was created, <code>pAllocator</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-vkDestroyFramebuffer-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkDestroyFramebuffer-framebuffer-parameter",
+          "text": " If <code>framebuffer</code> is not <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>, <code>framebuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkFramebuffer\">VkFramebuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkDestroyFramebuffer-pAllocator-parameter",
+          "text": " If <code>pAllocator</code> is not <code>NULL</code>, <code>pAllocator</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkAllocationCallbacks\">VkAllocationCallbacks</a> structure"
+        },
+        {
+          "vuid": "VUID-vkDestroyFramebuffer-framebuffer-parent",
+          "text": " If <code>framebuffer</code> is a valid handle, it <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from <code>device</code>"
+        }
+      ]
+    },
+    "vkCmdBeginRenderPass": {
+      "core": [
+        {
+          "vuid": "VUID-vkCmdBeginRenderPass-initialLayout-00895",
+          "text": " If any of the <code>initialLayout</code> or <code>finalLayout</code> member of the <code>VkAttachmentDescription</code> structures or the <code>layout</code> member of the <code>VkAttachmentReference</code> structures specified when creating the render pass specified in the <code>renderPass</code> member of <code>pRenderPassBegin</code> is <code>VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL</code> then the corresponding attachment image view of the framebuffer specified in the <code>framebuffer</code> member of <code>pRenderPassBegin</code> <strong class=\"purple\">must</strong> have been created with a <code>usage</code> value including <code>VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdBeginRenderPass-initialLayout-00897",
+          "text": " If any of the <code>initialLayout</code> or <code>finalLayout</code> member of the <code>VkAttachmentDescription</code> structures or the <code>layout</code> member of the <code>VkAttachmentReference</code> structures specified when creating the render pass specified in the <code>renderPass</code> member of <code>pRenderPassBegin</code> is <code>VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL</code> then the corresponding attachment image view of the framebuffer specified in the <code>framebuffer</code> member of <code>pRenderPassBegin</code> <strong class=\"purple\">must</strong> have been created with a <code>usage</code> value including <code>VK_IMAGE_USAGE_SAMPLED_BIT</code> or <code>VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdBeginRenderPass-initialLayout-00898",
+          "text": " If any of the <code>initialLayout</code> or <code>finalLayout</code> member of the <code>VkAttachmentDescription</code> structures or the <code>layout</code> member of the <code>VkAttachmentReference</code> structures specified when creating the render pass specified in the <code>renderPass</code> member of <code>pRenderPassBegin</code> is <code>VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL</code> then the corresponding attachment image view of the framebuffer specified in the <code>framebuffer</code> member of <code>pRenderPassBegin</code> <strong class=\"purple\">must</strong> have been created with a <code>usage</code> value including <code>VK_IMAGE_USAGE_TRANSFER_SRC_BIT</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdBeginRenderPass-initialLayout-00899",
+          "text": " If any of the <code>initialLayout</code> or <code>finalLayout</code> member of the <code>VkAttachmentDescription</code> structures or the <code>layout</code> member of the <code>VkAttachmentReference</code> structures specified when creating the render pass specified in the <code>renderPass</code> member of <code>pRenderPassBegin</code> is <code>VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL</code> then the corresponding attachment image view of the framebuffer specified in the <code>framebuffer</code> member of <code>pRenderPassBegin</code> <strong class=\"purple\">must</strong> have been created with a <code>usage</code> value including <code>VK_IMAGE_USAGE_TRANSFER_DST_BIT</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdBeginRenderPass-initialLayout-00900",
+          "text": " If any of the <code>initialLayout</code> members of the <code>VkAttachmentDescription</code> structures specified when creating the render pass specified in the <code>renderPass</code> member of <code>pRenderPassBegin</code> is not <code>VK_IMAGE_LAYOUT_UNDEFINED</code>, then each such <code>initialLayout</code> <strong class=\"purple\">must</strong> be equal to the current layout of the corresponding attachment image subresource of the framebuffer specified in the <code>framebuffer</code> member of <code>pRenderPassBegin</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdBeginRenderPass-srcStageMask-00901",
+          "text": " The <code>srcStageMask</code> and <code>dstStageMask</code> members of any element of the <code>pDependencies</code> member of <a href=\"#VkRenderPassCreateInfo\">VkRenderPassCreateInfo</a> used to create <code>renderPass</code> <strong class=\"purple\">must</strong> be supported by the capabilities of the queue family identified by the <code>queueFamilyIndex</code> member of the <a href=\"#VkCommandPoolCreateInfo\">VkCommandPoolCreateInfo</a> used to create the command pool which <code>commandBuffer</code> was allocated from"
+        },
+        {
+          "vuid": "VUID-vkCmdBeginRenderPass-framebuffer-02532",
+          "text": " For any attachment in <code>framebuffer</code> that is used by <code>renderPass</code> and is bound to memory locations that are also bound to another attachment used by <code>renderPass</code>, and if at least one of those uses causes either attachment to be written to, both attachments <strong class=\"purple\">must</strong> have had the <code>VK_ATTACHMENT_DESCRIPTION_MAY_ALIAS_BIT</code> set"
+        },
+        {
+          "vuid": "VUID-vkCmdBeginRenderPass-commandBuffer-parameter",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkCommandBuffer\">VkCommandBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdBeginRenderPass-pRenderPassBegin-parameter",
+          "text": " <code>pRenderPassBegin</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkRenderPassBeginInfo\">VkRenderPassBeginInfo</a> structure"
+        },
+        {
+          "vuid": "VUID-vkCmdBeginRenderPass-contents-parameter",
+          "text": " <code>contents</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkSubpassContents\">VkSubpassContents</a> value"
+        },
+        {
+          "vuid": "VUID-vkCmdBeginRenderPass-commandBuffer-recording",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be in the <a href=\"#commandbuffers-lifecycle\">recording state</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdBeginRenderPass-commandBuffer-cmdpool",
+          "text": " The <code>VkCommandPool</code> that <code>commandBuffer</code> was allocated from <strong class=\"purple\">must</strong> support graphics operations"
+        },
+        {
+          "vuid": "VUID-vkCmdBeginRenderPass-renderpass",
+          "text": " This command <strong class=\"purple\">must</strong> only be called outside of a render pass instance"
+        },
+        {
+          "vuid": "VUID-vkCmdBeginRenderPass-bufferlevel",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be a primary <code>VkCommandBuffer</code>"
+        }
+      ],
+      "!(VK_VERSION_1_1,VK_KHR_maintenance2)": [
+        {
+          "vuid": "VUID-vkCmdBeginRenderPass-initialLayout-00896",
+          "text": " If any of the <code>initialLayout</code> or <code>finalLayout</code> member of the <code>VkAttachmentDescription</code> structures or the <code>layout</code> member of the <code>VkAttachmentReference</code> structures specified when creating the render pass specified in the <code>renderPass</code> member of <code>pRenderPassBegin</code> is <code>VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL</code>, or <code>VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL</code> then the corresponding attachment image view of the framebuffer specified in the <code>framebuffer</code> member of <code>pRenderPassBegin</code> <strong class=\"purple\">must</strong> have been created with a <code>usage</code> value including <code>VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT</code>"
+        }
+      ],
+      "(VK_VERSION_1_1,VK_KHR_maintenance2)": [
+        {
+          "vuid": "VUID-vkCmdBeginRenderPass-initialLayout-01758",
+          "text": " If any of the <code>initialLayout</code> or <code>finalLayout</code> member of the <code>VkAttachmentDescription</code> structures or the <code>layout</code> member of the <code>VkAttachmentReference</code> structures specified when creating the render pass specified in the <code>renderPass</code> member of <code>pRenderPassBegin</code> is <code>VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL</code>, <code>VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL</code>, <code>VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL</code>, or <code>VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL</code> then the corresponding attachment image view of the framebuffer specified in the <code>framebuffer</code> member of <code>pRenderPassBegin</code> <strong class=\"purple\">must</strong> have been created with a <code>usage</code> value including <code>VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT</code>"
+        }
+      ]
+    },
+    "vkCmdBeginRenderPass2KHR": {
+      "(VK_KHR_create_renderpass2)": [
+        {
+          "vuid": "VUID-vkCmdBeginRenderPass2KHR-framebuffer-02779",
+          "text": " Both the <code>framebuffer</code> and <code>renderPass</code> members of <code>pRenderPassBegin</code> <strong class=\"purple\">must</strong> have been created on the same <a href=\"#VkDevice\">VkDevice</a> that <code>commandBuffer</code> was allocated on"
+        },
+        {
+          "vuid": "VUID-vkCmdBeginRenderPass2KHR-initialLayout-03094",
+          "text": " If any of the <code>initialLayout</code> or <code>finalLayout</code> member of the <code>VkAttachmentDescription</code> structures or the <code>layout</code> member of the <code>VkAttachmentReference</code> structures specified when creating the render pass specified in the <code>renderPass</code> member of <code>pRenderPassBegin</code> is <code>VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL</code> then the corresponding attachment image view of the framebuffer specified in the <code>framebuffer</code> member of <code>pRenderPassBegin</code> <strong class=\"purple\">must</strong> have been created with a <code>usage</code> value including <code>VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdBeginRenderPass2KHR-initialLayout-03096",
+          "text": " If any of the <code>initialLayout</code> or <code>finalLayout</code> member of the <code>VkAttachmentDescription</code> structures or the <code>layout</code> member of the <code>VkAttachmentReference</code> structures specified when creating the render pass specified in the <code>renderPass</code> member of <code>pRenderPassBegin</code> is <code>VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL</code>, <code>VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL</code>, <code>VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL</code>, or <code>VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL</code> then the corresponding attachment image view of the framebuffer specified in the <code>framebuffer</code> member of <code>pRenderPassBegin</code> <strong class=\"purple\">must</strong> have been created with a <code>usage</code> value including <code>VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdBeginRenderPass2KHR-initialLayout-03097",
+          "text": " If any of the <code>initialLayout</code> or <code>finalLayout</code> member of the <code>VkAttachmentDescription</code> structures or the <code>layout</code> member of the <code>VkAttachmentReference</code> structures specified when creating the render pass specified in the <code>renderPass</code> member of <code>pRenderPassBegin</code> is <code>VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL</code> then the corresponding attachment image view of the framebuffer specified in the <code>framebuffer</code> member of <code>pRenderPassBegin</code> <strong class=\"purple\">must</strong> have been created with a <code>usage</code> value including <code>VK_IMAGE_USAGE_SAMPLED_BIT</code> or <code>VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdBeginRenderPass2KHR-initialLayout-03098",
+          "text": " If any of the <code>initialLayout</code> or <code>finalLayout</code> member of the <code>VkAttachmentDescription</code> structures or the <code>layout</code> member of the <code>VkAttachmentReference</code> structures specified when creating the render pass specified in the <code>renderPass</code> member of <code>pRenderPassBegin</code> is <code>VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL</code> then the corresponding attachment image view of the framebuffer specified in the <code>framebuffer</code> member of <code>pRenderPassBegin</code> <strong class=\"purple\">must</strong> have been created with a <code>usage</code> value including <code>VK_IMAGE_USAGE_TRANSFER_SRC_BIT</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdBeginRenderPass2KHR-initialLayout-03099",
+          "text": " If any of the <code>initialLayout</code> or <code>finalLayout</code> member of the <code>VkAttachmentDescription</code> structures or the <code>layout</code> member of the <code>VkAttachmentReference</code> structures specified when creating the render pass specified in the <code>renderPass</code> member of <code>pRenderPassBegin</code> is <code>VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL</code> then the corresponding attachment image view of the framebuffer specified in the <code>framebuffer</code> member of <code>pRenderPassBegin</code> <strong class=\"purple\">must</strong> have been created with a <code>usage</code> value including <code>VK_IMAGE_USAGE_TRANSFER_DST_BIT</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdBeginRenderPass2KHR-initialLayout-03100",
+          "text": " If any of the <code>initialLayout</code> members of the <code>VkAttachmentDescription</code> structures specified when creating the render pass specified in the <code>renderPass</code> member of <code>pRenderPassBegin</code> is not <code>VK_IMAGE_LAYOUT_UNDEFINED</code>, then each such <code>initialLayout</code> <strong class=\"purple\">must</strong> be equal to the current layout of the corresponding attachment image subresource of the framebuffer specified in the <code>framebuffer</code> member of <code>pRenderPassBegin</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdBeginRenderPass2KHR-srcStageMask-03101",
+          "text": " The <code>srcStageMask</code> and <code>dstStageMask</code> members of any element of the <code>pDependencies</code> member of <a href=\"#VkRenderPassCreateInfo\">VkRenderPassCreateInfo</a> used to create <code>renderPass</code> <strong class=\"purple\">must</strong> be supported by the capabilities of the queue family identified by the <code>queueFamilyIndex</code> member of the <a href=\"#VkCommandPoolCreateInfo\">VkCommandPoolCreateInfo</a> used to create the command pool which <code>commandBuffer</code> was allocated from"
+        },
+        {
+          "vuid": "VUID-vkCmdBeginRenderPass2KHR-framebuffer-02533",
+          "text": " For any attachment in <code>framebuffer</code> that is used by <code>renderPass</code> and is bound to memory locations that are also bound to another attachment used by <code>renderPass</code>, and if at least one of those uses causes either attachment to be written to, both attachments <strong class=\"purple\">must</strong> have had the <code>VK_ATTACHMENT_DESCRIPTION_MAY_ALIAS_BIT</code> set"
+        },
+        {
+          "vuid": "VUID-vkCmdBeginRenderPass2KHR-commandBuffer-parameter",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkCommandBuffer\">VkCommandBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdBeginRenderPass2KHR-pRenderPassBegin-parameter",
+          "text": " <code>pRenderPassBegin</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkRenderPassBeginInfo\">VkRenderPassBeginInfo</a> structure"
+        },
+        {
+          "vuid": "VUID-vkCmdBeginRenderPass2KHR-pSubpassBeginInfo-parameter",
+          "text": " <code>pSubpassBeginInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkSubpassBeginInfoKHR\">VkSubpassBeginInfoKHR</a> structure"
+        },
+        {
+          "vuid": "VUID-vkCmdBeginRenderPass2KHR-commandBuffer-recording",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be in the <a href=\"#commandbuffers-lifecycle\">recording state</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdBeginRenderPass2KHR-commandBuffer-cmdpool",
+          "text": " The <code>VkCommandPool</code> that <code>commandBuffer</code> was allocated from <strong class=\"purple\">must</strong> support graphics operations"
+        },
+        {
+          "vuid": "VUID-vkCmdBeginRenderPass2KHR-renderpass",
+          "text": " This command <strong class=\"purple\">must</strong> only be called outside of a render pass instance"
+        },
+        {
+          "vuid": "VUID-vkCmdBeginRenderPass2KHR-bufferlevel",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be a primary <code>VkCommandBuffer</code>"
+        }
+      ]
+    },
+    "VkRenderPassBeginInfo": {
+      "core": [
+        {
+          "vuid": "VUID-VkRenderPassBeginInfo-clearValueCount-00902",
+          "text": " <code>clearValueCount</code> <strong class=\"purple\">must</strong> be greater than the largest attachment index in <code>renderPass</code> that specifies a <code>loadOp</code> (or <code>stencilLoadOp</code>, if the attachment has a depth/stencil format) of <code>VK_ATTACHMENT_LOAD_OP_CLEAR</code>"
+        },
+        {
+          "vuid": "VUID-VkRenderPassBeginInfo-renderPass-00904",
+          "text": " <code>renderPass</code> <strong class=\"purple\">must</strong> be <a href=\"#renderpass-compatibility\">compatible</a> with the <code>renderPass</code> member of the <code>VkFramebufferCreateInfo</code> structure specified when creating <code>framebuffer</code>."
+        },
+        {
+          "vuid": "VUID-VkRenderPassBeginInfo-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO</code>"
+        },
+        {
+          "vuid": "VUID-VkRenderPassBeginInfo-pNext-pNext",
+          "text": " Each <code>pNext</code> member of any structure (including this one) in the <code>pNext</code> chain <strong class=\"purple\">must</strong> be either <code>NULL</code> or a pointer to a valid instance of <a href=\"#VkDeviceGroupRenderPassBeginInfo\">VkDeviceGroupRenderPassBeginInfo</a>, <a href=\"#VkRenderPassAttachmentBeginInfoKHR\">VkRenderPassAttachmentBeginInfoKHR</a>, or <a href=\"#VkRenderPassSampleLocationsBeginInfoEXT\">VkRenderPassSampleLocationsBeginInfoEXT</a>"
+        },
+        {
+          "vuid": "VUID-VkRenderPassBeginInfo-sType-unique",
+          "text": " Each <code>sType</code> member in the <code>pNext</code> chain <strong class=\"purple\">must</strong> be unique"
+        },
+        {
+          "vuid": "VUID-VkRenderPassBeginInfo-renderPass-parameter",
+          "text": " <code>renderPass</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkRenderPass\">VkRenderPass</a> handle"
+        },
+        {
+          "vuid": "VUID-VkRenderPassBeginInfo-framebuffer-parameter",
+          "text": " <code>framebuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkFramebuffer\">VkFramebuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-VkRenderPassBeginInfo-pClearValues-parameter",
+          "text": " If <code>clearValueCount</code> is not <code>0</code>, <code>pClearValues</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>clearValueCount</code> <a href=\"#VkClearValue\">VkClearValue</a> unions"
+        },
+        {
+          "vuid": "VUID-VkRenderPassBeginInfo-commonparent",
+          "text": " Both of <code>framebuffer</code>, and <code>renderPass</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from the same <a href=\"#VkDevice\">VkDevice</a>"
+        }
+      ],
+      "(VK_KHR_imageless_framebuffer)": [
+        {
+          "vuid": "VUID-VkRenderPassBeginInfo-framebuffer-03207",
+          "text": " If <code>framebuffer</code> was created with a <a href=\"#VkFramebufferCreateInfo\">VkFramebufferCreateInfo</a>::<code>flags</code> value that did not include <code>VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR</code>, and the <code>pNext</code> chain includes a <a href=\"#VkRenderPassAttachmentBeginInfoKHR\">VkRenderPassAttachmentBeginInfoKHR</a> structure, its <code>attachmentCount</code> <strong class=\"purple\">must</strong> be zero"
+        },
+        {
+          "vuid": "VUID-VkRenderPassBeginInfo-framebuffer-03208",
+          "text": " If <code>framebuffer</code> was created with a <a href=\"#VkFramebufferCreateInfo\">VkFramebufferCreateInfo</a>::<code>flags</code> value that included <code>VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR</code>, the <code>attachmentCount</code> of a <a href=\"#VkRenderPassAttachmentBeginInfoKHR\">VkRenderPassAttachmentBeginInfoKHR</a> structure included in the <code>pNext</code> chain <strong class=\"purple\">must</strong> be equal to the value of <a href=\"#VkFramebufferAttachmentsCreateInfoKHR\">VkFramebufferAttachmentsCreateInfoKHR</a>::<code>attachmentImageInfoCount</code> used to create <code>framebuffer</code>"
+        },
+        {
+          "vuid": "VUID-VkRenderPassBeginInfo-framebuffer-02780",
+          "text": " If <code>framebuffer</code> was created with a <a href=\"#VkFramebufferCreateInfo\">VkFramebufferCreateInfo</a>::<code>flags</code> value that included <code>VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR</code>, each element of the <code>pAttachments</code> member of a <a href=\"#VkRenderPassAttachmentBeginInfoKHR\">VkRenderPassAttachmentBeginInfoKHR</a> structure included in the <code>pNext</code> chain <strong class=\"purple\">must</strong> have been created on the same <a href=\"#VkDevice\">VkDevice</a> as <code>framebuffer</code> and <code>renderPass</code>"
+        },
+        {
+          "vuid": "VUID-VkRenderPassBeginInfo-framebuffer-03209",
+          "text": " If <code>framebuffer</code> was created with a <a href=\"#VkFramebufferCreateInfo\">VkFramebufferCreateInfo</a>::<code>flags</code> value that included <code>VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR</code>, each element of the <code>pAttachments</code> member of a <a href=\"#VkRenderPassAttachmentBeginInfoKHR\">VkRenderPassAttachmentBeginInfoKHR</a> structure included in the <code>pNext</code> chain <strong class=\"purple\">must</strong> be a <a href=\"#VkImageView\">VkImageView</a> of an image created with a value of <a href=\"#VkImageCreateInfo\">VkImageCreateInfo</a>::<code>flags</code> equal to the <code>flags</code> member of the corresponding element of <a href=\"#VkFramebufferAttachmentsCreateInfoKHR\">VkFramebufferAttachmentsCreateInfoKHR</a>::<code>pAttachments</code> used to create <code>framebuffer</code>"
+        },
+        {
+          "vuid": "VUID-VkRenderPassBeginInfo-framebuffer-03210",
+          "text": " If <code>framebuffer</code> was created with a <a href=\"#VkFramebufferCreateInfo\">VkFramebufferCreateInfo</a>::<code>flags</code> value that included <code>VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR</code>, each element of the <code>pAttachments</code> member of a <a href=\"#VkRenderPassAttachmentBeginInfoKHR\">VkRenderPassAttachmentBeginInfoKHR</a> structure included in the <code>pNext</code> chain <strong class=\"purple\">must</strong> be a <a href=\"#VkImageView\">VkImageView</a> of an image created with a value of <a href=\"#VkImageCreateInfo\">VkImageCreateInfo</a>::<code>usage</code> equal to the <code>usage</code> member of the corresponding element of <a href=\"#VkFramebufferAttachmentsCreateInfoKHR\">VkFramebufferAttachmentsCreateInfoKHR</a>::<code>pAttachments</code> used to create <code>framebuffer</code>"
+        },
+        {
+          "vuid": "VUID-VkRenderPassBeginInfo-framebuffer-03211",
+          "text": " If <code>framebuffer</code> was created with a <a href=\"#VkFramebufferCreateInfo\">VkFramebufferCreateInfo</a>::<code>flags</code> value that included <code>VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR</code>, each element of the <code>pAttachments</code> member of a <a href=\"#VkRenderPassAttachmentBeginInfoKHR\">VkRenderPassAttachmentBeginInfoKHR</a> structure included in the <code>pNext</code> chain <strong class=\"purple\">must</strong> be a <a href=\"#VkImageView\">VkImageView</a> with a width equal to the <code>width</code> member of the corresponding element of <a href=\"#VkFramebufferAttachmentsCreateInfoKHR\">VkFramebufferAttachmentsCreateInfoKHR</a>::<code>pAttachments</code> used to create <code>framebuffer</code>"
+        },
+        {
+          "vuid": "VUID-VkRenderPassBeginInfo-framebuffer-03212",
+          "text": " If <code>framebuffer</code> was created with a <a href=\"#VkFramebufferCreateInfo\">VkFramebufferCreateInfo</a>::<code>flags</code> value that included <code>VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR</code>, each element of the <code>pAttachments</code> member of a <a href=\"#VkRenderPassAttachmentBeginInfoKHR\">VkRenderPassAttachmentBeginInfoKHR</a> structure included in the <code>pNext</code> chain <strong class=\"purple\">must</strong> be a <a href=\"#VkImageView\">VkImageView</a> with a height equal to the <code>height</code> member of the corresponding element of <a href=\"#VkFramebufferAttachmentsCreateInfoKHR\">VkFramebufferAttachmentsCreateInfoKHR</a>::<code>pAttachments</code> used to create <code>framebuffer</code>"
+        },
+        {
+          "vuid": "VUID-VkRenderPassBeginInfo-framebuffer-03213",
+          "text": " If <code>framebuffer</code> was created with a <a href=\"#VkFramebufferCreateInfo\">VkFramebufferCreateInfo</a>::<code>flags</code> value that included <code>VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR</code>, each element of the <code>pAttachments</code> member of a <a href=\"#VkRenderPassAttachmentBeginInfoKHR\">VkRenderPassAttachmentBeginInfoKHR</a> structure included in the <code>pNext</code> chain <strong class=\"purple\">must</strong> be a <a href=\"#VkImageView\">VkImageView</a> of an image created with a value of <a href=\"#VkImageViewCreateInfo\">VkImageViewCreateInfo</a>::<code>subresourceRange.layerCount</code> equal to the <code>layerCount</code> member of the corresponding element of <a href=\"#VkFramebufferAttachmentsCreateInfoKHR\">VkFramebufferAttachmentsCreateInfoKHR</a>::<code>pAttachments</code> used to create <code>framebuffer</code>"
+        },
+        {
+          "vuid": "VUID-VkRenderPassBeginInfo-framebuffer-03214",
+          "text": " If <code>framebuffer</code> was created with a <a href=\"#VkFramebufferCreateInfo\">VkFramebufferCreateInfo</a>::<code>flags</code> value that included <code>VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR</code>, each element of the <code>pAttachments</code> member of a <a href=\"#VkRenderPassAttachmentBeginInfoKHR\">VkRenderPassAttachmentBeginInfoKHR</a> structure included in the <code>pNext</code> chain <strong class=\"purple\">must</strong> be a <a href=\"#VkImageView\">VkImageView</a> of an image created with a value of <a href=\"#VkImageFormatListCreateInfoKHR\">VkImageFormatListCreateInfoKHR</a>::<code>viewFormatCount</code> equal to the <code>viewFormatCount</code> member of the corresponding element of <a href=\"#VkFramebufferAttachmentsCreateInfoKHR\">VkFramebufferAttachmentsCreateInfoKHR</a>::<code>pAttachments</code> used to create <code>framebuffer</code>"
+        },
+        {
+          "vuid": "VUID-VkRenderPassBeginInfo-framebuffer-03215",
+          "text": " If <code>framebuffer</code> was created with a <a href=\"#VkFramebufferCreateInfo\">VkFramebufferCreateInfo</a>::<code>flags</code> value that included <code>VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR</code>, each element of the <code>pAttachments</code> member of a <a href=\"#VkRenderPassAttachmentBeginInfoKHR\">VkRenderPassAttachmentBeginInfoKHR</a> structure included in the <code>pNext</code> chain <strong class=\"purple\">must</strong> be a <a href=\"#VkImageView\">VkImageView</a> of an image created with a set of elements in <a href=\"#VkImageFormatListCreateInfoKHR\">VkImageFormatListCreateInfoKHR</a>::<code>pViewFormats</code> equal to the set of elements in the <code>pViewFormats</code> member of the corresponding element of <a href=\"#VkFramebufferAttachmentsCreateInfoKHR\">VkFramebufferAttachmentsCreateInfoKHR</a>::<code>pAttachments</code> used to create <code>framebuffer</code>"
+        },
+        {
+          "vuid": "VUID-VkRenderPassBeginInfo-framebuffer-03216",
+          "text": " If <code>framebuffer</code> was created with a <a href=\"#VkFramebufferCreateInfo\">VkFramebufferCreateInfo</a>::<code>flags</code> value that included <code>VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR</code>, each element of the <code>pAttachments</code> member of a <a href=\"#VkRenderPassAttachmentBeginInfoKHR\">VkRenderPassAttachmentBeginInfoKHR</a> structure included in the <code>pNext</code> chain <strong class=\"purple\">must</strong> be a <a href=\"#VkImageView\">VkImageView</a> of an image created with a value of <a href=\"#VkImageViewCreateInfo\">VkImageViewCreateInfo</a>::<code>format</code> equal to the corresponding value of <a href=\"#VkAttachmentDescription\">VkAttachmentDescription</a>::<code>format</code> in <code>renderPass</code>"
+        },
+        {
+          "vuid": "VUID-VkRenderPassBeginInfo-framebuffer-03217",
+          "text": " If <code>framebuffer</code> was created with a <a href=\"#VkFramebufferCreateInfo\">VkFramebufferCreateInfo</a>::<code>flags</code> value that included <code>VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR</code>, each element of the <code>pAttachments</code> member of a <a href=\"#VkRenderPassAttachmentBeginInfoKHR\">VkRenderPassAttachmentBeginInfoKHR</a> structure included in the <code>pNext</code> chain <strong class=\"purple\">must</strong> be a <a href=\"#VkImageView\">VkImageView</a> of an image created with a value of <a href=\"#VkImageCreateInfo\">VkImageCreateInfo</a>::<code>samples</code> equal to the corresponding value of <a href=\"#VkAttachmentDescription\">VkAttachmentDescription</a>::<code>samples</code> in <code>renderPass</code>"
+        }
+      ]
+    },
+    "VkRenderPassSampleLocationsBeginInfoEXT": {
+      "(VK_EXT_sample_locations)": [
+        {
+          "vuid": "VUID-VkRenderPassSampleLocationsBeginInfoEXT-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_RENDER_PASS_SAMPLE_LOCATIONS_BEGIN_INFO_EXT</code>"
+        },
+        {
+          "vuid": "VUID-VkRenderPassSampleLocationsBeginInfoEXT-pAttachmentInitialSampleLocations-parameter",
+          "text": " If <code>attachmentInitialSampleLocationsCount</code> is not <code>0</code>, <code>pAttachmentInitialSampleLocations</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>attachmentInitialSampleLocationsCount</code> valid <a href=\"#VkAttachmentSampleLocationsEXT\">VkAttachmentSampleLocationsEXT</a> structures"
+        },
+        {
+          "vuid": "VUID-VkRenderPassSampleLocationsBeginInfoEXT-pPostSubpassSampleLocations-parameter",
+          "text": " If <code>postSubpassSampleLocationsCount</code> is not <code>0</code>, <code>pPostSubpassSampleLocations</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>postSubpassSampleLocationsCount</code> valid <a href=\"#VkSubpassSampleLocationsEXT\">VkSubpassSampleLocationsEXT</a> structures"
+        }
+      ]
+    },
+    "VkAttachmentSampleLocationsEXT": {
+      "(VK_EXT_sample_locations)": [
+        {
+          "vuid": "VUID-VkAttachmentSampleLocationsEXT-attachmentIndex-01531",
+          "text": " <code>attachmentIndex</code> <strong class=\"purple\">must</strong> be less than the <code>attachmentCount</code> specified in <a href=\"#VkRenderPassCreateInfo\">VkRenderPassCreateInfo</a> the render pass specified by <a href=\"#VkRenderPassBeginInfo\">VkRenderPassBeginInfo</a>::<code>renderPass</code> was created with"
+        },
+        {
+          "vuid": "VUID-VkAttachmentSampleLocationsEXT-sampleLocationsInfo-parameter",
+          "text": " <code>sampleLocationsInfo</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkSampleLocationsInfoEXT\">VkSampleLocationsInfoEXT</a> structure"
+        }
+      ]
+    },
+    "VkSubpassSampleLocationsEXT": {
+      "(VK_EXT_sample_locations)": [
+        {
+          "vuid": "VUID-VkSubpassSampleLocationsEXT-subpassIndex-01532",
+          "text": " <code>subpassIndex</code> <strong class=\"purple\">must</strong> be less than the <code>subpassCount</code> specified in <a href=\"#VkRenderPassCreateInfo\">VkRenderPassCreateInfo</a> the render pass specified by <a href=\"#VkRenderPassBeginInfo\">VkRenderPassBeginInfo</a>::<code>renderPass</code> was created with"
+        },
+        {
+          "vuid": "VUID-VkSubpassSampleLocationsEXT-sampleLocationsInfo-parameter",
+          "text": " <code>sampleLocationsInfo</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkSampleLocationsInfoEXT\">VkSampleLocationsInfoEXT</a> structure"
+        }
+      ]
+    },
+    "VkSubpassBeginInfoKHR": {
+      "(VK_KHR_create_renderpass2)": [
+        {
+          "vuid": "VUID-VkSubpassBeginInfoKHR-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkSubpassBeginInfoKHR-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-VkSubpassBeginInfoKHR-contents-parameter",
+          "text": " <code>contents</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkSubpassContents\">VkSubpassContents</a> value"
+        }
+      ]
+    },
+    "VkDeviceGroupRenderPassBeginInfo": {
+      "(VK_VERSION_1_1,VK_KHR_device_group)": [
+        {
+          "vuid": "VUID-VkDeviceGroupRenderPassBeginInfo-deviceMask-00905",
+          "text": " <code>deviceMask</code> <strong class=\"purple\">must</strong> be a valid device mask value"
+        },
+        {
+          "vuid": "VUID-VkDeviceGroupRenderPassBeginInfo-deviceMask-00906",
+          "text": " <code>deviceMask</code> <strong class=\"purple\">must</strong> not be zero"
+        },
+        {
+          "vuid": "VUID-VkDeviceGroupRenderPassBeginInfo-deviceMask-00907",
+          "text": " <code>deviceMask</code> <strong class=\"purple\">must</strong> be a subset of the command buffer&#8217;s initial device mask"
+        },
+        {
+          "vuid": "VUID-VkDeviceGroupRenderPassBeginInfo-deviceRenderAreaCount-00908",
+          "text": " <code>deviceRenderAreaCount</code> <strong class=\"purple\">must</strong> either be zero or equal to the number of physical devices in the logical device."
+        },
+        {
+          "vuid": "VUID-VkDeviceGroupRenderPassBeginInfo-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO</code>"
+        },
+        {
+          "vuid": "VUID-VkDeviceGroupRenderPassBeginInfo-pDeviceRenderAreas-parameter",
+          "text": " If <code>deviceRenderAreaCount</code> is not <code>0</code>, <code>pDeviceRenderAreas</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>deviceRenderAreaCount</code> <a href=\"#VkRect2D\">VkRect2D</a> structures"
+        }
+      ]
+    },
+    "VkRenderPassAttachmentBeginInfoKHR": {
+      "(VK_KHR_imageless_framebuffer)": [
+        {
+          "vuid": "VUID-VkRenderPassAttachmentBeginInfoKHR-pAttachments-03218",
+          "text": " Each element of <code>pAttachments</code> <strong class=\"purple\">must</strong> only specify a single mip level"
+        },
+        {
+          "vuid": "VUID-VkRenderPassAttachmentBeginInfoKHR-pAttachments-03219",
+          "text": " Each element of <code>pAttachments</code> <strong class=\"purple\">must</strong> have been created with the identity swizzle"
+        },
+        {
+          "vuid": "VUID-VkRenderPassAttachmentBeginInfoKHR-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_RENDER_PASS_ATTACHMENT_BEGIN_INFO_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkRenderPassAttachmentBeginInfoKHR-pAttachments-parameter",
+          "text": " If <code>attachmentCount</code> is not <code>0</code>, <code>pAttachments</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>attachmentCount</code> valid <a href=\"#VkImageView\">VkImageView</a> handles"
+        }
+      ]
+    },
+    "vkGetRenderAreaGranularity": {
+      "core": [
+        {
+          "vuid": "VUID-vkGetRenderAreaGranularity-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetRenderAreaGranularity-renderPass-parameter",
+          "text": " <code>renderPass</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkRenderPass\">VkRenderPass</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetRenderAreaGranularity-pGranularity-parameter",
+          "text": " <code>pGranularity</code> <strong class=\"purple\">must</strong> be a valid pointer to a <a href=\"#VkExtent2D\">VkExtent2D</a> structure"
+        },
+        {
+          "vuid": "VUID-vkGetRenderAreaGranularity-renderPass-parent",
+          "text": " <code>renderPass</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from <code>device</code>"
+        }
+      ]
+    },
+    "vkCmdNextSubpass": {
+      "core": [
+        {
+          "vuid": "VUID-vkCmdNextSubpass-None-00909",
+          "text": " The current subpass index <strong class=\"purple\">must</strong> be less than the number of subpasses in the render pass minus one"
+        },
+        {
+          "vuid": "VUID-vkCmdNextSubpass-commandBuffer-parameter",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkCommandBuffer\">VkCommandBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdNextSubpass-contents-parameter",
+          "text": " <code>contents</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkSubpassContents\">VkSubpassContents</a> value"
+        },
+        {
+          "vuid": "VUID-vkCmdNextSubpass-commandBuffer-recording",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be in the <a href=\"#commandbuffers-lifecycle\">recording state</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdNextSubpass-commandBuffer-cmdpool",
+          "text": " The <code>VkCommandPool</code> that <code>commandBuffer</code> was allocated from <strong class=\"purple\">must</strong> support graphics operations"
+        },
+        {
+          "vuid": "VUID-vkCmdNextSubpass-renderpass",
+          "text": " This command <strong class=\"purple\">must</strong> only be called inside of a render pass instance"
+        },
+        {
+          "vuid": "VUID-vkCmdNextSubpass-bufferlevel",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be a primary <code>VkCommandBuffer</code>"
+        }
+      ],
+      "(VK_EXT_transform_feedback)": [
+        {
+          "vuid": "VUID-vkCmdNextSubpass-None-02349",
+          "text": " This command <strong class=\"purple\">must</strong> not be recorded when transform feedback is active"
+        }
+      ]
+    },
+    "vkCmdNextSubpass2KHR": {
+      "(VK_KHR_create_renderpass2)": [
+        {
+          "vuid": "VUID-vkCmdNextSubpass2KHR-None-03102",
+          "text": " The current subpass index <strong class=\"purple\">must</strong> be less than the number of subpasses in the render pass minus one"
+        },
+        {
+          "vuid": "VUID-vkCmdNextSubpass2KHR-commandBuffer-parameter",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkCommandBuffer\">VkCommandBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdNextSubpass2KHR-pSubpassBeginInfo-parameter",
+          "text": " <code>pSubpassBeginInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkSubpassBeginInfoKHR\">VkSubpassBeginInfoKHR</a> structure"
+        },
+        {
+          "vuid": "VUID-vkCmdNextSubpass2KHR-pSubpassEndInfo-parameter",
+          "text": " <code>pSubpassEndInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkSubpassEndInfoKHR\">VkSubpassEndInfoKHR</a> structure"
+        },
+        {
+          "vuid": "VUID-vkCmdNextSubpass2KHR-commandBuffer-recording",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be in the <a href=\"#commandbuffers-lifecycle\">recording state</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdNextSubpass2KHR-commandBuffer-cmdpool",
+          "text": " The <code>VkCommandPool</code> that <code>commandBuffer</code> was allocated from <strong class=\"purple\">must</strong> support graphics operations"
+        },
+        {
+          "vuid": "VUID-vkCmdNextSubpass2KHR-renderpass",
+          "text": " This command <strong class=\"purple\">must</strong> only be called inside of a render pass instance"
+        },
+        {
+          "vuid": "VUID-vkCmdNextSubpass2KHR-bufferlevel",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be a primary <code>VkCommandBuffer</code>"
+        }
+      ],
+      "(VK_KHR_create_renderpass2)+(VK_EXT_transform_feedback)": [
+        {
+          "vuid": "VUID-vkCmdNextSubpass2KHR-None-02350",
+          "text": " This command <strong class=\"purple\">must</strong> not be recorded when transform feedback is active"
+        }
+      ]
+    },
+    "vkCmdEndRenderPass": {
+      "core": [
+        {
+          "vuid": "VUID-vkCmdEndRenderPass-None-00910",
+          "text": " The current subpass index <strong class=\"purple\">must</strong> be equal to the number of subpasses in the render pass minus one"
+        },
+        {
+          "vuid": "VUID-vkCmdEndRenderPass-commandBuffer-parameter",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkCommandBuffer\">VkCommandBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdEndRenderPass-commandBuffer-recording",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be in the <a href=\"#commandbuffers-lifecycle\">recording state</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdEndRenderPass-commandBuffer-cmdpool",
+          "text": " The <code>VkCommandPool</code> that <code>commandBuffer</code> was allocated from <strong class=\"purple\">must</strong> support graphics operations"
+        },
+        {
+          "vuid": "VUID-vkCmdEndRenderPass-renderpass",
+          "text": " This command <strong class=\"purple\">must</strong> only be called inside of a render pass instance"
+        },
+        {
+          "vuid": "VUID-vkCmdEndRenderPass-bufferlevel",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be a primary <code>VkCommandBuffer</code>"
+        }
+      ],
+      "(VK_EXT_transform_feedback)": [
+        {
+          "vuid": "VUID-vkCmdEndRenderPass-None-02351",
+          "text": " This command <strong class=\"purple\">must</strong> not be recorded when transform feedback is active"
+        }
+      ]
+    },
+    "vkCmdEndRenderPass2KHR": {
+      "(VK_KHR_create_renderpass2)": [
+        {
+          "vuid": "VUID-vkCmdEndRenderPass2KHR-None-03103",
+          "text": " The current subpass index <strong class=\"purple\">must</strong> be equal to the number of subpasses in the render pass minus one"
+        },
+        {
+          "vuid": "VUID-vkCmdEndRenderPass2KHR-commandBuffer-parameter",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkCommandBuffer\">VkCommandBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdEndRenderPass2KHR-pSubpassEndInfo-parameter",
+          "text": " <code>pSubpassEndInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkSubpassEndInfoKHR\">VkSubpassEndInfoKHR</a> structure"
+        },
+        {
+          "vuid": "VUID-vkCmdEndRenderPass2KHR-commandBuffer-recording",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be in the <a href=\"#commandbuffers-lifecycle\">recording state</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdEndRenderPass2KHR-commandBuffer-cmdpool",
+          "text": " The <code>VkCommandPool</code> that <code>commandBuffer</code> was allocated from <strong class=\"purple\">must</strong> support graphics operations"
+        },
+        {
+          "vuid": "VUID-vkCmdEndRenderPass2KHR-renderpass",
+          "text": " This command <strong class=\"purple\">must</strong> only be called inside of a render pass instance"
+        },
+        {
+          "vuid": "VUID-vkCmdEndRenderPass2KHR-bufferlevel",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be a primary <code>VkCommandBuffer</code>"
+        }
+      ],
+      "(VK_KHR_create_renderpass2)+(VK_EXT_transform_feedback)": [
+        {
+          "vuid": "VUID-vkCmdEndRenderPass2KHR-None-02352",
+          "text": " This command <strong class=\"purple\">must</strong> not be recorded when transform feedback is active"
+        }
+      ]
+    },
+    "VkSubpassEndInfoKHR": {
+      "(VK_KHR_create_renderpass2)": [
+        {
+          "vuid": "VUID-VkSubpassEndInfoKHR-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_SUBPASS_END_INFO_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkSubpassEndInfoKHR-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        }
+      ]
+    },
+    "vkCreateShaderModule": {
+      "core": [
+        {
+          "vuid": "VUID-vkCreateShaderModule-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCreateShaderModule-pCreateInfo-parameter",
+          "text": " <code>pCreateInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkShaderModuleCreateInfo\">VkShaderModuleCreateInfo</a> structure"
+        },
+        {
+          "vuid": "VUID-vkCreateShaderModule-pAllocator-parameter",
+          "text": " If <code>pAllocator</code> is not <code>NULL</code>, <code>pAllocator</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkAllocationCallbacks\">VkAllocationCallbacks</a> structure"
+        },
+        {
+          "vuid": "VUID-vkCreateShaderModule-pShaderModule-parameter",
+          "text": " <code>pShaderModule</code> <strong class=\"purple\">must</strong> be a valid pointer to a <a href=\"#VkShaderModule\">VkShaderModule</a> handle"
+        }
+      ]
+    },
+    "VkShaderModuleCreateInfo": {
+      "core": [
+        {
+          "vuid": "VUID-VkShaderModuleCreateInfo-codeSize-01085",
+          "text": " <code>codeSize</code> <strong class=\"purple\">must</strong> be greater than 0"
+        },
+        {
+          "vuid": "VUID-VkShaderModuleCreateInfo-pCode-01089",
+          "text": " <code>pCode</code> <strong class=\"purple\">must</strong> declare the <code>Shader</code> capability for SPIR-V code"
+        },
+        {
+          "vuid": "VUID-VkShaderModuleCreateInfo-pCode-01090",
+          "text": " <code>pCode</code> <strong class=\"purple\">must</strong> not declare any capability that is not supported by the API, as described by the <a href=\"#spirvenv-module-validation\">Capabilities</a> section of the <a href=\"#spirvenv-capabilities\">SPIR-V Environment</a> appendix"
+        },
+        {
+          "vuid": "VUID-VkShaderModuleCreateInfo-pCode-01091",
+          "text": " If <code>pCode</code> declares any of the capabilities listed as <strong class=\"purple\">optional</strong> in the <a href=\"#spirvenv-capabilities-table\">SPIR-V Environment</a> appendix, the corresponding feature(s) <strong class=\"purple\">must</strong> be enabled."
+        },
+        {
+          "vuid": "VUID-VkShaderModuleCreateInfo-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO</code>"
+        },
+        {
+          "vuid": "VUID-VkShaderModuleCreateInfo-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code> or a pointer to a valid instance of <a href=\"#VkShaderModuleValidationCacheCreateInfoEXT\">VkShaderModuleValidationCacheCreateInfoEXT</a>"
+        },
+        {
+          "vuid": "VUID-VkShaderModuleCreateInfo-flags-zerobitmask",
+          "text": " <code>flags</code> <strong class=\"purple\">must</strong> be <code>0</code>"
+        },
+        {
+          "vuid": "VUID-VkShaderModuleCreateInfo-pCode-parameter",
+          "text": " <code>pCode</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of \\(\\textrm{codeSize} \\over 4\\) <code>uint32_t</code> values"
+        }
+      ],
+      "!(VK_NV_glsl_shader)": [
+        {
+          "vuid": "VUID-VkShaderModuleCreateInfo-codeSize-01086",
+          "text": " <code>codeSize</code> <strong class=\"purple\">must</strong> be a multiple of 4"
+        },
+        {
+          "vuid": "VUID-VkShaderModuleCreateInfo-pCode-01087",
+          "text": " <code>pCode</code> <strong class=\"purple\">must</strong> point to valid SPIR-V code, formatted and packed as described by the <a href=\"#spirv-spec\">Khronos SPIR-V Specification</a>"
+        },
+        {
+          "vuid": "VUID-VkShaderModuleCreateInfo-pCode-01088",
+          "text": " <code>pCode</code> <strong class=\"purple\">must</strong> adhere to the validation rules described by the <a href=\"#spirvenv-module-validation\">Validation Rules within a Module</a> section of the <a href=\"#spirvenv-capabilities\">SPIR-V Environment</a> appendix"
+        }
+      ],
+      "(VK_NV_glsl_shader)": [
+        {
+          "vuid": "VUID-VkShaderModuleCreateInfo-pCode-01376",
+          "text": " If <code>pCode</code> is a pointer to SPIR-V code, <code>codeSize</code> <strong class=\"purple\">must</strong> be a multiple of 4"
+        },
+        {
+          "vuid": "VUID-VkShaderModuleCreateInfo-pCode-01377",
+          "text": " <code>pCode</code> <strong class=\"purple\">must</strong> point to either valid SPIR-V code, formatted and packed as described by the <a href=\"#spirv-spec\">Khronos SPIR-V Specification</a> or valid GLSL code which <strong class=\"purple\">must</strong> be written to the <code>GL_KHR_vulkan_glsl</code> extension specification"
+        },
+        {
+          "vuid": "VUID-VkShaderModuleCreateInfo-pCode-01378",
+          "text": " If <code>pCode</code> is a pointer to SPIR-V code, that code <strong class=\"purple\">must</strong> adhere to the validation rules described by the <a href=\"#spirvenv-module-validation\">Validation Rules within a Module</a> section of the <a href=\"#spirvenv-capabilities\">SPIR-V Environment</a> appendix"
+        },
+        {
+          "vuid": "VUID-VkShaderModuleCreateInfo-pCode-01379",
+          "text": " If <code>pCode</code> is a pointer to GLSL code, it <strong class=\"purple\">must</strong> be valid GLSL code written to the <code>GL_KHR_vulkan_glsl</code> GLSL extension specification"
+        }
+      ]
+    },
+    "VkShaderModuleValidationCacheCreateInfoEXT": {
+      "(VK_EXT_validation_cache)": [
+        {
+          "vuid": "VUID-VkShaderModuleValidationCacheCreateInfoEXT-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_SHADER_MODULE_VALIDATION_CACHE_CREATE_INFO_EXT</code>"
+        },
+        {
+          "vuid": "VUID-VkShaderModuleValidationCacheCreateInfoEXT-validationCache-parameter",
+          "text": " <code>validationCache</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkValidationCacheEXT\">VkValidationCacheEXT</a> handle"
+        }
+      ]
+    },
+    "vkDestroyShaderModule": {
+      "core": [
+        {
+          "vuid": "VUID-vkDestroyShaderModule-shaderModule-01092",
+          "text": " If <code>VkAllocationCallbacks</code> were provided when <code>shaderModule</code> was created, a compatible set of callbacks <strong class=\"purple\">must</strong> be provided here"
+        },
+        {
+          "vuid": "VUID-vkDestroyShaderModule-shaderModule-01093",
+          "text": " If no <code>VkAllocationCallbacks</code> were provided when <code>shaderModule</code> was created, <code>pAllocator</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-vkDestroyShaderModule-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkDestroyShaderModule-shaderModule-parameter",
+          "text": " If <code>shaderModule</code> is not <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>, <code>shaderModule</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkShaderModule\">VkShaderModule</a> handle"
+        },
+        {
+          "vuid": "VUID-vkDestroyShaderModule-pAllocator-parameter",
+          "text": " If <code>pAllocator</code> is not <code>NULL</code>, <code>pAllocator</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkAllocationCallbacks\">VkAllocationCallbacks</a> structure"
+        },
+        {
+          "vuid": "VUID-vkDestroyShaderModule-shaderModule-parent",
+          "text": " If <code>shaderModule</code> is a valid handle, it <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from <code>device</code>"
+        }
+      ]
+    },
+    "vkGetPhysicalDeviceCooperativeMatrixPropertiesNV": {
+      "(VK_NV_cooperative_matrix)": [
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceCooperativeMatrixPropertiesNV-physicalDevice-parameter",
+          "text": " <code>physicalDevice</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkPhysicalDevice\">VkPhysicalDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceCooperativeMatrixPropertiesNV-pPropertyCount-parameter",
+          "text": " <code>pPropertyCount</code> <strong class=\"purple\">must</strong> be a valid pointer to a <code>uint32_t</code> value"
+        },
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceCooperativeMatrixPropertiesNV-pProperties-parameter",
+          "text": " If the value referenced by <code>pPropertyCount</code> is not <code>0</code>, and <code>pProperties</code> is not <code>NULL</code>, <code>pProperties</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>pPropertyCount</code> <a href=\"#VkCooperativeMatrixPropertiesNV\">VkCooperativeMatrixPropertiesNV</a> structures"
+        }
+      ]
+    },
+    "VkCooperativeMatrixPropertiesNV": {
+      "(VK_NV_cooperative_matrix)": [
+        {
+          "vuid": "VUID-VkCooperativeMatrixPropertiesNV-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_COOPERATIVE_MATRIX_PROPERTIES_NV</code>"
+        },
+        {
+          "vuid": "VUID-VkCooperativeMatrixPropertiesNV-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-VkCooperativeMatrixPropertiesNV-AType-parameter",
+          "text": " <code>AType</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkComponentTypeNV\">VkComponentTypeNV</a> value"
+        },
+        {
+          "vuid": "VUID-VkCooperativeMatrixPropertiesNV-BType-parameter",
+          "text": " <code>BType</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkComponentTypeNV\">VkComponentTypeNV</a> value"
+        },
+        {
+          "vuid": "VUID-VkCooperativeMatrixPropertiesNV-CType-parameter",
+          "text": " <code>CType</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkComponentTypeNV\">VkComponentTypeNV</a> value"
+        },
+        {
+          "vuid": "VUID-VkCooperativeMatrixPropertiesNV-DType-parameter",
+          "text": " <code>DType</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkComponentTypeNV\">VkComponentTypeNV</a> value"
+        },
+        {
+          "vuid": "VUID-VkCooperativeMatrixPropertiesNV-scope-parameter",
+          "text": " <code>scope</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkScopeNV\">VkScopeNV</a> value"
+        }
+      ]
+    },
+    "vkCreateValidationCacheEXT": {
+      "(VK_EXT_validation_cache)": [
+        {
+          "vuid": "VUID-vkCreateValidationCacheEXT-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCreateValidationCacheEXT-pCreateInfo-parameter",
+          "text": " <code>pCreateInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkValidationCacheCreateInfoEXT\">VkValidationCacheCreateInfoEXT</a> structure"
+        },
+        {
+          "vuid": "VUID-vkCreateValidationCacheEXT-pAllocator-parameter",
+          "text": " If <code>pAllocator</code> is not <code>NULL</code>, <code>pAllocator</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkAllocationCallbacks\">VkAllocationCallbacks</a> structure"
+        },
+        {
+          "vuid": "VUID-vkCreateValidationCacheEXT-pValidationCache-parameter",
+          "text": " <code>pValidationCache</code> <strong class=\"purple\">must</strong> be a valid pointer to a <a href=\"#VkValidationCacheEXT\">VkValidationCacheEXT</a> handle"
+        }
+      ]
+    },
+    "VkValidationCacheCreateInfoEXT": {
+      "(VK_EXT_validation_cache)": [
+        {
+          "vuid": "VUID-VkValidationCacheCreateInfoEXT-initialDataSize-01534",
+          "text": " If <code>initialDataSize</code> is not <code>0</code>, it <strong class=\"purple\">must</strong> be equal to the size of <code>pInitialData</code>, as returned by <code>vkGetValidationCacheDataEXT</code> when <code>pInitialData</code> was originally retrieved"
+        },
+        {
+          "vuid": "VUID-VkValidationCacheCreateInfoEXT-initialDataSize-01535",
+          "text": " If <code>initialDataSize</code> is not <code>0</code>, <code>pInitialData</code> <strong class=\"purple\">must</strong> have been retrieved from a previous call to <code>vkGetValidationCacheDataEXT</code>"
+        },
+        {
+          "vuid": "VUID-VkValidationCacheCreateInfoEXT-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_VALIDATION_CACHE_CREATE_INFO_EXT</code>"
+        },
+        {
+          "vuid": "VUID-VkValidationCacheCreateInfoEXT-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-VkValidationCacheCreateInfoEXT-flags-zerobitmask",
+          "text": " <code>flags</code> <strong class=\"purple\">must</strong> be <code>0</code>"
+        },
+        {
+          "vuid": "VUID-VkValidationCacheCreateInfoEXT-pInitialData-parameter",
+          "text": " If <code>initialDataSize</code> is not <code>0</code>, <code>pInitialData</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>initialDataSize</code> bytes"
+        }
+      ]
+    },
+    "vkMergeValidationCachesEXT": {
+      "(VK_EXT_validation_cache)": [
+        {
+          "vuid": "VUID-vkMergeValidationCachesEXT-dstCache-01536",
+          "text": " <code>dstCache</code> <strong class=\"purple\">must</strong> not appear in the list of source caches"
+        },
+        {
+          "vuid": "VUID-vkMergeValidationCachesEXT-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkMergeValidationCachesEXT-dstCache-parameter",
+          "text": " <code>dstCache</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkValidationCacheEXT\">VkValidationCacheEXT</a> handle"
+        },
+        {
+          "vuid": "VUID-vkMergeValidationCachesEXT-pSrcCaches-parameter",
+          "text": " <code>pSrcCaches</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>srcCacheCount</code> valid <a href=\"#VkValidationCacheEXT\">VkValidationCacheEXT</a> handles"
+        },
+        {
+          "vuid": "VUID-vkMergeValidationCachesEXT-srcCacheCount-arraylength",
+          "text": " <code>srcCacheCount</code> <strong class=\"purple\">must</strong> be greater than <code>0</code>"
+        },
+        {
+          "vuid": "VUID-vkMergeValidationCachesEXT-dstCache-parent",
+          "text": " <code>dstCache</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from <code>device</code>"
+        },
+        {
+          "vuid": "VUID-vkMergeValidationCachesEXT-pSrcCaches-parent",
+          "text": " Each element of <code>pSrcCaches</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from <code>device</code>"
+        }
+      ]
+    },
+    "vkGetValidationCacheDataEXT": {
+      "(VK_EXT_validation_cache)": [
+        {
+          "vuid": "VUID-vkGetValidationCacheDataEXT-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetValidationCacheDataEXT-validationCache-parameter",
+          "text": " <code>validationCache</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkValidationCacheEXT\">VkValidationCacheEXT</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetValidationCacheDataEXT-pDataSize-parameter",
+          "text": " <code>pDataSize</code> <strong class=\"purple\">must</strong> be a valid pointer to a <code>size_t</code> value"
+        },
+        {
+          "vuid": "VUID-vkGetValidationCacheDataEXT-pData-parameter",
+          "text": " If the value referenced by <code>pDataSize</code> is not <code>0</code>, and <code>pData</code> is not <code>NULL</code>, <code>pData</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>pDataSize</code> bytes"
+        },
+        {
+          "vuid": "VUID-vkGetValidationCacheDataEXT-validationCache-parent",
+          "text": " <code>validationCache</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from <code>device</code>"
+        }
+      ]
+    },
+    "vkDestroyValidationCacheEXT": {
+      "(VK_EXT_validation_cache)": [
+        {
+          "vuid": "VUID-vkDestroyValidationCacheEXT-validationCache-01537",
+          "text": " If <code>VkAllocationCallbacks</code> were provided when <code>validationCache</code> was created, a compatible set of callbacks <strong class=\"purple\">must</strong> be provided here"
+        },
+        {
+          "vuid": "VUID-vkDestroyValidationCacheEXT-validationCache-01538",
+          "text": " If no <code>VkAllocationCallbacks</code> were provided when <code>validationCache</code> was created, <code>pAllocator</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-vkDestroyValidationCacheEXT-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkDestroyValidationCacheEXT-validationCache-parameter",
+          "text": " If <code>validationCache</code> is not <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>, <code>validationCache</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkValidationCacheEXT\">VkValidationCacheEXT</a> handle"
+        },
+        {
+          "vuid": "VUID-vkDestroyValidationCacheEXT-pAllocator-parameter",
+          "text": " If <code>pAllocator</code> is not <code>NULL</code>, <code>pAllocator</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkAllocationCallbacks\">VkAllocationCallbacks</a> structure"
+        },
+        {
+          "vuid": "VUID-vkDestroyValidationCacheEXT-validationCache-parent",
+          "text": " If <code>validationCache</code> is a valid handle, it <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from <code>device</code>"
+        }
+      ]
+    },
+    "vkCreateComputePipelines": {
+      "core": [
+        {
+          "vuid": "VUID-vkCreateComputePipelines-flags-00695",
+          "text": " If the <code>flags</code> member of any element of <code>pCreateInfos</code> contains the <code>VK_PIPELINE_CREATE_DERIVATIVE_BIT</code> flag, and the <code>basePipelineIndex</code> member of that same element is not <code>-1</code>, <code>basePipelineIndex</code> <strong class=\"purple\">must</strong> be less than the index into <code>pCreateInfos</code> that corresponds to that element"
+        },
+        {
+          "vuid": "VUID-vkCreateComputePipelines-flags-00696",
+          "text": " If the <code>flags</code> member of any element of <code>pCreateInfos</code> contains the <code>VK_PIPELINE_CREATE_DERIVATIVE_BIT</code> flag, the base pipeline <strong class=\"purple\">must</strong> have been created with the <code>VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT</code> flag set"
+        },
+        {
+          "vuid": "VUID-vkCreateComputePipelines-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCreateComputePipelines-pipelineCache-parameter",
+          "text": " If <code>pipelineCache</code> is not <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>, <code>pipelineCache</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkPipelineCache\">VkPipelineCache</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCreateComputePipelines-pCreateInfos-parameter",
+          "text": " <code>pCreateInfos</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>createInfoCount</code> valid <a href=\"#VkComputePipelineCreateInfo\">VkComputePipelineCreateInfo</a> structures"
+        },
+        {
+          "vuid": "VUID-vkCreateComputePipelines-pAllocator-parameter",
+          "text": " If <code>pAllocator</code> is not <code>NULL</code>, <code>pAllocator</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkAllocationCallbacks\">VkAllocationCallbacks</a> structure"
+        },
+        {
+          "vuid": "VUID-vkCreateComputePipelines-pPipelines-parameter",
+          "text": " <code>pPipelines</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>createInfoCount</code> <a href=\"#VkPipeline\">VkPipeline</a> handles"
+        },
+        {
+          "vuid": "VUID-vkCreateComputePipelines-createInfoCount-arraylength",
+          "text": " <code>createInfoCount</code> <strong class=\"purple\">must</strong> be greater than <code>0</code>"
+        },
+        {
+          "vuid": "VUID-vkCreateComputePipelines-pipelineCache-parent",
+          "text": " If <code>pipelineCache</code> is a valid handle, it <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from <code>device</code>"
+        }
+      ]
+    },
+    "VkComputePipelineCreateInfo": {
+      "core": [
+        {
+          "vuid": "VUID-VkComputePipelineCreateInfo-flags-00697",
+          "text": " If <code>flags</code> contains the <code>VK_PIPELINE_CREATE_DERIVATIVE_BIT</code> flag, and <code>basePipelineIndex</code> is -1, <code>basePipelineHandle</code> <strong class=\"purple\">must</strong> be a valid handle to a compute <code>VkPipeline</code>"
+        },
+        {
+          "vuid": "VUID-VkComputePipelineCreateInfo-flags-00698",
+          "text": " If <code>flags</code> contains the <code>VK_PIPELINE_CREATE_DERIVATIVE_BIT</code> flag, and <code>basePipelineHandle</code> is <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>, <code>basePipelineIndex</code> <strong class=\"purple\">must</strong> be a valid index into the calling command&#8217;s <code>pCreateInfos</code> parameter"
+        },
+        {
+          "vuid": "VUID-VkComputePipelineCreateInfo-flags-00699",
+          "text": " If <code>flags</code> contains the <code>VK_PIPELINE_CREATE_DERIVATIVE_BIT</code> flag, and <code>basePipelineIndex</code> is not -1, <code>basePipelineHandle</code> <strong class=\"purple\">must</strong> be <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>"
+        },
+        {
+          "vuid": "VUID-VkComputePipelineCreateInfo-flags-00700",
+          "text": " If <code>flags</code> contains the <code>VK_PIPELINE_CREATE_DERIVATIVE_BIT</code> flag, and <code>basePipelineHandle</code> is not <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>, <code>basePipelineIndex</code> <strong class=\"purple\">must</strong> be -1"
+        },
+        {
+          "vuid": "VUID-VkComputePipelineCreateInfo-stage-00701",
+          "text": " The <code>stage</code> member of <code>stage</code> <strong class=\"purple\">must</strong> be <code>VK_SHADER_STAGE_COMPUTE_BIT</code>"
+        },
+        {
+          "vuid": "VUID-VkComputePipelineCreateInfo-stage-00702",
+          "text": " The shader code for the entry point identified by <code>stage</code> and the rest of the state identified by this structure <strong class=\"purple\">must</strong> adhere to the pipeline linking rules described in the <a href=\"#interfaces\">Shader Interfaces</a> chapter"
+        },
+        {
+          "vuid": "VUID-VkComputePipelineCreateInfo-layout-00703",
+          "text": " <code>layout</code> <strong class=\"purple\">must</strong> be <a href=\"#descriptorsets-pipelinelayout-consistency\">consistent</a> with the layout of the compute shader specified in <code>stage</code>"
+        },
+        {
+          "vuid": "VUID-VkComputePipelineCreateInfo-layout-01687",
+          "text": " The number of resources in <code>layout</code> accessible to the compute shader stage <strong class=\"purple\">must</strong> be less than or equal to <code>VkPhysicalDeviceLimits</code>::<code>maxPerStageResources</code>"
+        },
+        {
+          "vuid": "VUID-VkComputePipelineCreateInfo-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO</code>"
+        },
+        {
+          "vuid": "VUID-VkComputePipelineCreateInfo-pNext-pNext",
+          "text": " Each <code>pNext</code> member of any structure (including this one) in the <code>pNext</code> chain <strong class=\"purple\">must</strong> be either <code>NULL</code> or a pointer to a valid instance of <a href=\"#VkPipelineCompilerControlCreateInfoAMD\">VkPipelineCompilerControlCreateInfoAMD</a> or <a href=\"#VkPipelineCreationFeedbackCreateInfoEXT\">VkPipelineCreationFeedbackCreateInfoEXT</a>"
+        },
+        {
+          "vuid": "VUID-VkComputePipelineCreateInfo-sType-unique",
+          "text": " Each <code>sType</code> member in the <code>pNext</code> chain <strong class=\"purple\">must</strong> be unique"
+        },
+        {
+          "vuid": "VUID-VkComputePipelineCreateInfo-flags-parameter",
+          "text": " <code>flags</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkPipelineCreateFlagBits\">VkPipelineCreateFlagBits</a> values"
+        },
+        {
+          "vuid": "VUID-VkComputePipelineCreateInfo-stage-parameter",
+          "text": " <code>stage</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkPipelineShaderStageCreateInfo\">VkPipelineShaderStageCreateInfo</a> structure"
+        },
+        {
+          "vuid": "VUID-VkComputePipelineCreateInfo-layout-parameter",
+          "text": " <code>layout</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkPipelineLayout\">VkPipelineLayout</a> handle"
+        },
+        {
+          "vuid": "VUID-VkComputePipelineCreateInfo-commonparent",
+          "text": " Both of <code>basePipelineHandle</code>, and <code>layout</code> that are valid handles of non-ignored parameters <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from the same <a href=\"#VkDevice\">VkDevice</a>"
+        }
+      ]
+    },
+    "VkPipelineShaderStageCreateInfo": {
+      "core": [
+        {
+          "vuid": "VUID-VkPipelineShaderStageCreateInfo-stage-00704",
+          "text": " If the <a href=\"#features-geometryShader\">geometry shaders</a> feature is not enabled, <code>stage</code> <strong class=\"purple\">must</strong> not be <code>VK_SHADER_STAGE_GEOMETRY_BIT</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineShaderStageCreateInfo-stage-00705",
+          "text": " If the <a href=\"#features-tessellationShader\">tessellation shaders</a> feature is not enabled, <code>stage</code> <strong class=\"purple\">must</strong> not be <code>VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT</code> or <code>VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineShaderStageCreateInfo-stage-00706",
+          "text": " <code>stage</code> <strong class=\"purple\">must</strong> not be <code>VK_SHADER_STAGE_ALL_GRAPHICS</code>, or <code>VK_SHADER_STAGE_ALL</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineShaderStageCreateInfo-pName-00707",
+          "text": " <code>pName</code> <strong class=\"purple\">must</strong> be the name of an <code>OpEntryPoint</code> in <code>module</code> with an execution model that matches <code>stage</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineShaderStageCreateInfo-maxClipDistances-00708",
+          "text": " If the identified entry point includes any variable in its interface that is declared with the <code>ClipDistance</code> <code>BuiltIn</code> decoration, that variable <strong class=\"purple\">must</strong> not have an array size greater than <code>VkPhysicalDeviceLimits</code>::<code>maxClipDistances</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineShaderStageCreateInfo-maxCullDistances-00709",
+          "text": " If the identified entry point includes any variable in its interface that is declared with the <code>CullDistance</code> <code>BuiltIn</code> decoration, that variable <strong class=\"purple\">must</strong> not have an array size greater than <code>VkPhysicalDeviceLimits</code>::<code>maxCullDistances</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineShaderStageCreateInfo-maxCombinedClipAndCullDistances-00710",
+          "text": " If the identified entry point includes any variables in its interface that are declared with the <code>ClipDistance</code> or <code>CullDistance</code> <code>BuiltIn</code> decoration, those variables <strong class=\"purple\">must</strong> not have array sizes which sum to more than <code>VkPhysicalDeviceLimits</code>::<code>maxCombinedClipAndCullDistances</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineShaderStageCreateInfo-maxSampleMaskWords-00711",
+          "text": " If the identified entry point includes any variable in its interface that is declared with the <code>SampleMask</code> <code>BuiltIn</code> decoration, that variable <strong class=\"purple\">must</strong> not have an array size greater than <code>VkPhysicalDeviceLimits</code>::<code>maxSampleMaskWords</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineShaderStageCreateInfo-stage-00712",
+          "text": " If <code>stage</code> is <code>VK_SHADER_STAGE_VERTEX_BIT</code>, the identified entry point <strong class=\"purple\">must</strong> not include any input variable in its interface that is decorated with <code>CullDistance</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineShaderStageCreateInfo-stage-00713",
+          "text": " If <code>stage</code> is <code>VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT</code> or <code>VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT</code>, and the identified entry point has an <code>OpExecutionMode</code> instruction that specifies a patch size with <code>OutputVertices</code>, the patch size <strong class=\"purple\">must</strong> be greater than <code>0</code> and less than or equal to <code>VkPhysicalDeviceLimits</code>::<code>maxTessellationPatchSize</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineShaderStageCreateInfo-stage-00714",
+          "text": " If <code>stage</code> is <code>VK_SHADER_STAGE_GEOMETRY_BIT</code>, the identified entry point <strong class=\"purple\">must</strong> have an <code>OpExecutionMode</code> instruction that specifies a maximum output vertex count that is greater than <code>0</code> and less than or equal to <code>VkPhysicalDeviceLimits</code>::<code>maxGeometryOutputVertices</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineShaderStageCreateInfo-stage-00715",
+          "text": " If <code>stage</code> is <code>VK_SHADER_STAGE_GEOMETRY_BIT</code>, the identified entry point <strong class=\"purple\">must</strong> have an <code>OpExecutionMode</code> instruction that specifies an invocation count that is greater than <code>0</code> and less than or equal to <code>VkPhysicalDeviceLimits</code>::<code>maxGeometryShaderInvocations</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineShaderStageCreateInfo-stage-02596",
+          "text": " If <code>stage</code> is a vertex processing stage, and the identified entry point writes to <code>Layer</code> for any primitive, it <strong class=\"purple\">must</strong> write the same value to <code>Layer</code> for all vertices of a given primitive"
+        },
+        {
+          "vuid": "VUID-VkPipelineShaderStageCreateInfo-stage-02597",
+          "text": " If <code>stage</code> is a vertex processing stage, and the identified entry point writes to <code>ViewportIndex</code> for any primitive, it <strong class=\"purple\">must</strong> write the same value to <code>ViewportIndex</code> for all vertices of a given primitive"
+        },
+        {
+          "vuid": "VUID-VkPipelineShaderStageCreateInfo-stage-00718",
+          "text": " If <code>stage</code> is <code>VK_SHADER_STAGE_FRAGMENT_BIT</code>, the identified entry point <strong class=\"purple\">must</strong> not include any output variables in its interface decorated with <code>CullDistance</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineShaderStageCreateInfo-stage-00719",
+          "text": " If <code>stage</code> is <code>VK_SHADER_STAGE_FRAGMENT_BIT</code>, and the identified entry point writes to <code>FragDepth</code> in any execution path, it <strong class=\"purple\">must</strong> write to <code>FragDepth</code> in all execution paths"
+        },
+        {
+          "vuid": "VUID-VkPipelineShaderStageCreateInfo-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineShaderStageCreateInfo-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code> or a pointer to a valid instance of <a href=\"#VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT\">VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT</a>"
+        },
+        {
+          "vuid": "VUID-VkPipelineShaderStageCreateInfo-flags-parameter",
+          "text": " <code>flags</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkPipelineShaderStageCreateFlagBits\">VkPipelineShaderStageCreateFlagBits</a> values"
+        },
+        {
+          "vuid": "VUID-VkPipelineShaderStageCreateInfo-stage-parameter",
+          "text": " <code>stage</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkShaderStageFlagBits\">VkShaderStageFlagBits</a> value"
+        },
+        {
+          "vuid": "VUID-VkPipelineShaderStageCreateInfo-module-parameter",
+          "text": " <code>module</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkShaderModule\">VkShaderModule</a> handle"
+        },
+        {
+          "vuid": "VUID-VkPipelineShaderStageCreateInfo-pName-parameter",
+          "text": " <code>pName</code> <strong class=\"purple\">must</strong> be a null-terminated UTF-8 string"
+        },
+        {
+          "vuid": "VUID-VkPipelineShaderStageCreateInfo-pSpecializationInfo-parameter",
+          "text": " If <code>pSpecializationInfo</code> is not <code>NULL</code>, <code>pSpecializationInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkSpecializationInfo\">VkSpecializationInfo</a> structure"
+        }
+      ],
+      "(VK_NV_mesh_shader)": [
+        {
+          "vuid": "VUID-VkPipelineShaderStageCreateInfo-stage-02091",
+          "text": " If the <a href=\"#features-meshShader\">mesh shader</a> feature is not enabled, <code>stage</code> <strong class=\"purple\">must</strong> not be <code>VK_SHADER_STAGE_MESH_BIT_NV</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineShaderStageCreateInfo-stage-02092",
+          "text": " If the <a href=\"#features-taskShader\">task shader</a> feature is not enabled, <code>stage</code> <strong class=\"purple\">must</strong> not be <code>VK_SHADER_STAGE_TASK_BIT_NV</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineShaderStageCreateInfo-stage-02093",
+          "text": " If <code>stage</code> is <code>VK_SHADER_STAGE_MESH_BIT_NV</code>, the identified entry point <strong class=\"purple\">must</strong> have an <code>OpExecutionMode</code> instruction that specifies a maximum output vertex count, <code>OutputVertices</code>, that is greater than <code>0</code> and less than or equal to <code>VkPhysicalDeviceMeshShaderPropertiesNV</code>::<code>maxMeshOutputVertices</code>."
+        },
+        {
+          "vuid": "VUID-VkPipelineShaderStageCreateInfo-stage-02094",
+          "text": " If <code>stage</code> is <code>VK_SHADER_STAGE_MESH_BIT_NV</code>, the identified entry point <strong class=\"purple\">must</strong> have an <code>OpExecutionMode</code> instruction that specifies a maximum output primitive count, <code>OutputPrimitivesNV</code>, that is greater than <code>0</code> and less than or equal to <code>VkPhysicalDeviceMeshShaderPropertiesNV</code>::<code>maxMeshOutputPrimitives</code>."
+        }
+      ],
+      "(VK_EXT_shader_stencil_export)": [
+        {
+          "vuid": "VUID-VkPipelineShaderStageCreateInfo-stage-01511",
+          "text": " If <code>stage</code> is <code>VK_SHADER_STAGE_FRAGMENT_BIT</code>, and the identified entry point writes to <code>FragStencilRefEXT</code> in any execution path, it <strong class=\"purple\">must</strong> write to <code>FragStencilRefEXT</code> in all execution paths"
+        }
+      ],
+      "(VK_EXT_subgroup_size_control)": [
+        {
+          "vuid": "VUID-VkPipelineShaderStageCreateInfo-flags-02784",
+          "text": " If <code>flags</code> has the <code>VK_PIPELINE_SHADER_STAGE_CREATE_ALLOW_VARYING_SUBGROUP_SIZE_BIT_EXT</code> flag set, the <a href=\"#features-subgroupSizeControl\"><code>subgroupSizeControl</code></a> feature <strong class=\"purple\">must</strong> be enabled."
+        },
+        {
+          "vuid": "VUID-VkPipelineShaderStageCreateInfo-flags-02785",
+          "text": " If <code>flags</code> has the <code>VK_PIPELINE_SHADER_STAGE_CREATE_REQUIRE_FULL_SUBGROUPS_BIT_EXT</code> flag set, the <a href=\"#features-computeFullSubgroups\"><code>computeFullSubgroups</code></a> feature <strong class=\"purple\">must</strong> be enabled."
+        },
+        {
+          "vuid": "VUID-VkPipelineShaderStageCreateInfo-pNext-02754",
+          "text": " If a <a href=\"#VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT\">VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT</a> structure is included in the <code>pNext</code> chain, <code>flags</code> <strong class=\"purple\">must</strong> not have the <code>VK_PIPELINE_SHADER_STAGE_CREATE_ALLOW_VARYING_SUBGROUP_SIZE_BIT_EXT</code> flag set."
+        },
+        {
+          "vuid": "VUID-VkPipelineShaderStageCreateInfo-pNext-02755",
+          "text": " If a <a href=\"#VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT\">VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT</a> structure is included in the <code>pNext</code> chain, the <a href=\"#features-subgroupSizeControl\"><code>subgroupSizeControl</code></a> feature <strong class=\"purple\">must</strong> be enabled, and <code>stage</code> <strong class=\"purple\">must</strong> be a valid bit specified in <a href=\"#limits-required-subgroup-size-stages\"><code>requiredSubgroupSizeStages</code></a>."
+        },
+        {
+          "vuid": "VUID-VkPipelineShaderStageCreateInfo-pNext-02756",
+          "text": " If a <a href=\"#VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT\">VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT</a> structure is included in the <code>pNext</code> chain and <code>stage</code> is <code>VK_SHADER_STAGE_COMPUTE_BIT</code>, the local workgroup size of the shader <strong class=\"purple\">must</strong> be less than or equal to the product of <a href=\"#VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT\">VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT</a>::<code>requiredSubgroupSize</code> and <a href=\"#limits-max-subgroups-per-workgroup\"><code>maxComputeWorkgroupSubgroups</code></a>."
+        },
+        {
+          "vuid": "VUID-VkPipelineShaderStageCreateInfo-pNext-02757",
+          "text": " If a <a href=\"#VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT\">VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT</a> structure is included in the <code>pNext</code> chain, and <code>flags</code> has the <code>VK_PIPELINE_SHADER_STAGE_CREATE_REQUIRE_FULL_SUBGROUPS_BIT_EXT</code> flag set, the local workgroup size in the X dimension of the pipeline <strong class=\"purple\">must</strong> be a multiple of <a href=\"#VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT\">VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT</a>::<code>requiredSubgroupSize</code>."
+        },
+        {
+          "vuid": "VUID-VkPipelineShaderStageCreateInfo-flags-02758",
+          "text": " If <code>flags</code> has both the <code>VK_PIPELINE_SHADER_STAGE_CREATE_REQUIRE_FULL_SUBGROUPS_BIT_EXT</code> and <code>VK_PIPELINE_SHADER_STAGE_CREATE_ALLOW_VARYING_SUBGROUP_SIZE_BIT_EXT</code> flags set, the local workgroup size in the X dimension of the pipeline <strong class=\"purple\">must</strong> be a multiple of <a href=\"#limits-max-subgroup-size\"><code>maxSubgroupSize</code></a>."
+        },
+        {
+          "vuid": "VUID-VkPipelineShaderStageCreateInfo-flags-02759",
+          "text": " If <code>flags</code> has the <code>VK_PIPELINE_SHADER_STAGE_CREATE_REQUIRE_FULL_SUBGROUPS_BIT_EXT</code> flag set and <code>flags</code> does not have the <code>VK_PIPELINE_SHADER_STAGE_CREATE_ALLOW_VARYING_SUBGROUP_SIZE_BIT_EXT</code> flag set and no <a href=\"#VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT\">VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT</a> structure is included in the <code>pNext</code> chain, the local workgroup size in the X dimension of the pipeline <strong class=\"purple\">must</strong> be a multiple of <a href=\"#limits-subgroup-size\"><code>subgroupSize</code></a>."
+        }
+      ]
+    },
+    "VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT": {
+      "(VK_EXT_subgroup_size_control)": [
+        {
+          "vuid": "VUID-VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT-requiredSubgroupSize-02760",
+          "text": " <code>requiredSubgroupSize</code> <strong class=\"purple\">must</strong> be a power-of-two integer."
+        },
+        {
+          "vuid": "VUID-VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT-requiredSubgroupSize-02761",
+          "text": " <code>requiredSubgroupSize</code> <strong class=\"purple\">must</strong> be greater or equal to <a href=\"#limits-min-subgroup-size\">minSubgroupSize</a>."
+        },
+        {
+          "vuid": "VUID-VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT-requiredSubgroupSize-02762",
+          "text": " <code>requiredSubgroupSize</code> <strong class=\"purple\">must</strong> be less than or equal to <a href=\"#limits-max-subgroup-size\">maxSubgroupSize</a>."
+        },
+        {
+          "vuid": "VUID-VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO_EXT</code>"
+        }
+      ]
+    },
+    "vkCreateGraphicsPipelines": {
+      "core": [
+        {
+          "vuid": "VUID-vkCreateGraphicsPipelines-flags-00720",
+          "text": " If the <code>flags</code> member of any element of <code>pCreateInfos</code> contains the <code>VK_PIPELINE_CREATE_DERIVATIVE_BIT</code> flag, and the <code>basePipelineIndex</code> member of that same element is not <code>-1</code>, <code>basePipelineIndex</code> <strong class=\"purple\">must</strong> be less than the index into <code>pCreateInfos</code> that corresponds to that element"
+        },
+        {
+          "vuid": "VUID-vkCreateGraphicsPipelines-flags-00721",
+          "text": " If the <code>flags</code> member of any element of <code>pCreateInfos</code> contains the <code>VK_PIPELINE_CREATE_DERIVATIVE_BIT</code> flag, the base pipeline <strong class=\"purple\">must</strong> have been created with the <code>VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT</code> flag set"
+        },
+        {
+          "vuid": "VUID-vkCreateGraphicsPipelines-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCreateGraphicsPipelines-pipelineCache-parameter",
+          "text": " If <code>pipelineCache</code> is not <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>, <code>pipelineCache</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkPipelineCache\">VkPipelineCache</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCreateGraphicsPipelines-pCreateInfos-parameter",
+          "text": " <code>pCreateInfos</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>createInfoCount</code> valid <a href=\"#VkGraphicsPipelineCreateInfo\">VkGraphicsPipelineCreateInfo</a> structures"
+        },
+        {
+          "vuid": "VUID-vkCreateGraphicsPipelines-pAllocator-parameter",
+          "text": " If <code>pAllocator</code> is not <code>NULL</code>, <code>pAllocator</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkAllocationCallbacks\">VkAllocationCallbacks</a> structure"
+        },
+        {
+          "vuid": "VUID-vkCreateGraphicsPipelines-pPipelines-parameter",
+          "text": " <code>pPipelines</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>createInfoCount</code> <a href=\"#VkPipeline\">VkPipeline</a> handles"
+        },
+        {
+          "vuid": "VUID-vkCreateGraphicsPipelines-createInfoCount-arraylength",
+          "text": " <code>createInfoCount</code> <strong class=\"purple\">must</strong> be greater than <code>0</code>"
+        },
+        {
+          "vuid": "VUID-vkCreateGraphicsPipelines-pipelineCache-parent",
+          "text": " If <code>pipelineCache</code> is a valid handle, it <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from <code>device</code>"
+        }
+      ]
+    },
+    "VkGraphicsPipelineCreateInfo": {
+      "core": [
+        {
+          "vuid": "VUID-VkGraphicsPipelineCreateInfo-flags-00722",
+          "text": " If <code>flags</code> contains the <code>VK_PIPELINE_CREATE_DERIVATIVE_BIT</code> flag, and <code>basePipelineIndex</code> is -1, <code>basePipelineHandle</code> <strong class=\"purple\">must</strong> be a valid handle to a graphics <code>VkPipeline</code>"
+        },
+        {
+          "vuid": "VUID-VkGraphicsPipelineCreateInfo-flags-00723",
+          "text": " If <code>flags</code> contains the <code>VK_PIPELINE_CREATE_DERIVATIVE_BIT</code> flag, and <code>basePipelineHandle</code> is <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>, <code>basePipelineIndex</code> <strong class=\"purple\">must</strong> be a valid index into the calling command&#8217;s <code>pCreateInfos</code> parameter"
+        },
+        {
+          "vuid": "VUID-VkGraphicsPipelineCreateInfo-flags-00724",
+          "text": " If <code>flags</code> contains the <code>VK_PIPELINE_CREATE_DERIVATIVE_BIT</code> flag, and <code>basePipelineIndex</code> is not -1, <code>basePipelineHandle</code> <strong class=\"purple\">must</strong> be <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>"
+        },
+        {
+          "vuid": "VUID-VkGraphicsPipelineCreateInfo-flags-00725",
+          "text": " If <code>flags</code> contains the <code>VK_PIPELINE_CREATE_DERIVATIVE_BIT</code> flag, and <code>basePipelineHandle</code> is not <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>, <code>basePipelineIndex</code> <strong class=\"purple\">must</strong> be -1"
+        },
+        {
+          "vuid": "VUID-VkGraphicsPipelineCreateInfo-stage-00726",
+          "text": " The <code>stage</code> member of each element of <code>pStages</code> <strong class=\"purple\">must</strong> be unique"
+        },
+        {
+          "vuid": "VUID-VkGraphicsPipelineCreateInfo-stage-00728",
+          "text": " The <code>stage</code> member of each element of <code>pStages</code> <strong class=\"purple\">must</strong> not be <code>VK_SHADER_STAGE_COMPUTE_BIT</code>"
+        },
+        {
+          "vuid": "VUID-VkGraphicsPipelineCreateInfo-pStages-00729",
+          "text": " If <code>pStages</code> includes a tessellation control shader stage, it <strong class=\"purple\">must</strong> include a tessellation evaluation shader stage"
+        },
+        {
+          "vuid": "VUID-VkGraphicsPipelineCreateInfo-pStages-00730",
+          "text": " If <code>pStages</code> includes a tessellation evaluation shader stage, it <strong class=\"purple\">must</strong> include a tessellation control shader stage"
+        },
+        {
+          "vuid": "VUID-VkGraphicsPipelineCreateInfo-pStages-00731",
+          "text": " If <code>pStages</code> includes a tessellation control shader stage and a tessellation evaluation shader stage, <code>pTessellationState</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <code>VkPipelineTessellationStateCreateInfo</code> structure"
+        },
+        {
+          "vuid": "VUID-VkGraphicsPipelineCreateInfo-pStages-00732",
+          "text": " If <code>pStages</code> includes tessellation shader stages, the shader code of at least one stage <strong class=\"purple\">must</strong> contain an <code>OpExecutionMode</code> instruction that specifies the type of subdivision in the pipeline"
+        },
+        {
+          "vuid": "VUID-VkGraphicsPipelineCreateInfo-pStages-00733",
+          "text": " If <code>pStages</code> includes tessellation shader stages, and the shader code of both stages contain an <code>OpExecutionMode</code> instruction that specifies the type of subdivision in the pipeline, they <strong class=\"purple\">must</strong> both specify the same subdivision mode"
+        },
+        {
+          "vuid": "VUID-VkGraphicsPipelineCreateInfo-pStages-00734",
+          "text": " If <code>pStages</code> includes tessellation shader stages, the shader code of at least one stage <strong class=\"purple\">must</strong> contain an <code>OpExecutionMode</code> instruction that specifies the output patch size in the pipeline"
+        },
+        {
+          "vuid": "VUID-VkGraphicsPipelineCreateInfo-pStages-00735",
+          "text": " If <code>pStages</code> includes tessellation shader stages, and the shader code of both contain an <code>OpExecutionMode</code> instruction that specifies the out patch size in the pipeline, they <strong class=\"purple\">must</strong> both specify the same patch size"
+        },
+        {
+          "vuid": "VUID-VkGraphicsPipelineCreateInfo-pStages-00736",
+          "text": " If <code>pStages</code> includes tessellation shader stages, the <code>topology</code> member of <code>pInputAssembly</code> <strong class=\"purple\">must</strong> be <code>VK_PRIMITIVE_TOPOLOGY_PATCH_LIST</code>"
+        },
+        {
+          "vuid": "VUID-VkGraphicsPipelineCreateInfo-topology-00737",
+          "text": " If the <code>topology</code> member of <code>pInputAssembly</code> is <code>VK_PRIMITIVE_TOPOLOGY_PATCH_LIST</code>, <code>pStages</code> <strong class=\"purple\">must</strong> include tessellation shader stages"
+        },
+        {
+          "vuid": "VUID-VkGraphicsPipelineCreateInfo-pStages-00738",
+          "text": " If <code>pStages</code> includes a geometry shader stage, and does not include any tessellation shader stages, its shader code <strong class=\"purple\">must</strong> contain an <code>OpExecutionMode</code> instruction that specifies an input primitive type that is <a href=\"#shaders-geometry-execution\">compatible</a> with the primitive topology specified in <code>pInputAssembly</code>"
+        },
+        {
+          "vuid": "VUID-VkGraphicsPipelineCreateInfo-pStages-00739",
+          "text": " If <code>pStages</code> includes a geometry shader stage, and also includes tessellation shader stages, its shader code <strong class=\"purple\">must</strong> contain an <code>OpExecutionMode</code> instruction that specifies an input primitive type that is <a href=\"#shaders-geometry-execution\">compatible</a> with the primitive topology that is output by the tessellation stages"
+        },
+        {
+          "vuid": "VUID-VkGraphicsPipelineCreateInfo-pStages-00740",
+          "text": " If <code>pStages</code> includes a fragment shader stage and a geometry shader stage, and the fragment shader code reads from an input variable that is decorated with <code>PrimitiveID</code>, then the geometry shader code <strong class=\"purple\">must</strong> write to a matching output variable, decorated with <code>PrimitiveID</code>, in all execution paths"
+        },
+        {
+          "vuid": "VUID-VkGraphicsPipelineCreateInfo-pStages-00741",
+          "text": " If <code>pStages</code> includes a fragment shader stage, its shader code <strong class=\"purple\">must</strong> not read from any input attachment that is defined as <code>VK_ATTACHMENT_UNUSED</code> in <code>subpass</code>"
+        },
+        {
+          "vuid": "VUID-VkGraphicsPipelineCreateInfo-pStages-00742",
+          "text": " The shader code for the entry points identified by <code>pStages</code>, and the rest of the state identified by this structure <strong class=\"purple\">must</strong> adhere to the pipeline linking rules described in the <a href=\"#interfaces\">Shader Interfaces</a> chapter"
+        },
+        {
+          "vuid": "VUID-VkGraphicsPipelineCreateInfo-blendEnable-02023",
+          "text": " If rasterization is not disabled and the subpass uses color attachments, then for each color attachment in the subpass the <code>blendEnable</code> member of the corresponding element of the <code>pAttachment</code> member of <code>pColorBlendState</code> <strong class=\"purple\">must</strong> be <code>VK_FALSE</code> if the attached image&#8217;s <a href=\"#resources-image-format-features\">format features</a> does not contain <code>VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT</code>."
+        },
+        {
+          "vuid": "VUID-VkGraphicsPipelineCreateInfo-attachmentCount-00746",
+          "text": " If rasterization is not disabled and the subpass uses color attachments, the <code>attachmentCount</code> member of <code>pColorBlendState</code> <strong class=\"purple\">must</strong> be equal to the <code>colorAttachmentCount</code> used to create <code>subpass</code>"
+        },
+        {
+          "vuid": "VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-00747",
+          "text": " If no element of the <code>pDynamicStates</code> member of <code>pDynamicState</code> is <code>VK_DYNAMIC_STATE_VIEWPORT</code>, the <code>pViewports</code> member of <code>pViewportState</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>pViewportState</code>-&gt;viewportCount valid <code>VkViewport</code> structures"
+        },
+        {
+          "vuid": "VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-00748",
+          "text": " If no element of the <code>pDynamicStates</code> member of <code>pDynamicState</code> is <code>VK_DYNAMIC_STATE_SCISSOR</code>, the <code>pScissors</code> member of <code>pViewportState</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>pViewportState</code>-&gt;scissorCount <code>VkRect2D</code> structures"
+        },
+        {
+          "vuid": "VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-00749",
+          "text": " If the wide lines feature is not enabled, and no element of the <code>pDynamicStates</code> member of <code>pDynamicState</code> is <code>VK_DYNAMIC_STATE_LINE_WIDTH</code>, the <code>lineWidth</code> member of <code>pRasterizationState</code> <strong class=\"purple\">must</strong> be <code>1.0</code>"
+        },
+        {
+          "vuid": "VUID-VkGraphicsPipelineCreateInfo-rasterizerDiscardEnable-00750",
+          "text": " If the <code>rasterizerDiscardEnable</code> member of <code>pRasterizationState</code> is <code>VK_FALSE</code>, <code>pViewportState</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <code>VkPipelineViewportStateCreateInfo</code> structure"
+        },
+        {
+          "vuid": "VUID-VkGraphicsPipelineCreateInfo-rasterizerDiscardEnable-00751",
+          "text": " If the <code>rasterizerDiscardEnable</code> member of <code>pRasterizationState</code> is <code>VK_FALSE</code>, <code>pMultisampleState</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <code>VkPipelineMultisampleStateCreateInfo</code> structure"
+        },
+        {
+          "vuid": "VUID-VkGraphicsPipelineCreateInfo-rasterizerDiscardEnable-00752",
+          "text": " If the <code>rasterizerDiscardEnable</code> member of <code>pRasterizationState</code> is <code>VK_FALSE</code>, and <code>subpass</code> uses a depth/stencil attachment, <code>pDepthStencilState</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <code>VkPipelineDepthStencilStateCreateInfo</code> structure"
+        },
+        {
+          "vuid": "VUID-VkGraphicsPipelineCreateInfo-rasterizerDiscardEnable-00753",
+          "text": " If the <code>rasterizerDiscardEnable</code> member of <code>pRasterizationState</code> is <code>VK_FALSE</code>, and <code>subpass</code> uses color attachments, <code>pColorBlendState</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <code>VkPipelineColorBlendStateCreateInfo</code> structure"
+        },
+        {
+          "vuid": "VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-00754",
+          "text": " If the depth bias clamping feature is not enabled, no element of the <code>pDynamicStates</code> member of <code>pDynamicState</code> is <code>VK_DYNAMIC_STATE_DEPTH_BIAS</code>, and the <code>depthBiasEnable</code> member of <code>pRasterizationState</code> is <code>VK_TRUE</code>, the <code>depthBiasClamp</code> member of <code>pRasterizationState</code> <strong class=\"purple\">must</strong> be <code>0.0</code>"
+        },
+        {
+          "vuid": "VUID-VkGraphicsPipelineCreateInfo-layout-00756",
+          "text": " <code>layout</code> <strong class=\"purple\">must</strong> be <a href=\"#descriptorsets-pipelinelayout-consistency\">consistent</a> with all shaders specified in <code>pStages</code>"
+        },
+        {
+          "vuid": "VUID-VkGraphicsPipelineCreateInfo-subpass-00757",
+          "text": " If neither the <code>VK_AMD_mixed_attachment_samples</code> nor the <code>VK_NV_framebuffer_mixed_samples</code> extensions are enabled, and if <code>subpass</code> uses color and/or depth/stencil attachments, then the <code>rasterizationSamples</code> member of <code>pMultisampleState</code> <strong class=\"purple\">must</strong> be the same as the sample count for those subpass attachments"
+        },
+        {
+          "vuid": "VUID-VkGraphicsPipelineCreateInfo-subpass-00758",
+          "text": " If <code>subpass</code> does not use any color and/or depth/stencil attachments, then the <code>rasterizationSamples</code> member of <code>pMultisampleState</code> <strong class=\"purple\">must</strong> follow the rules for a <a href=\"#renderpass-noattachments\">zero-attachment subpass</a>"
+        },
+        {
+          "vuid": "VUID-VkGraphicsPipelineCreateInfo-subpass-00759",
+          "text": " <code>subpass</code> <strong class=\"purple\">must</strong> be a valid subpass within <code>renderPass</code>"
+        },
+        {
+          "vuid": "VUID-VkGraphicsPipelineCreateInfo-layout-01688",
+          "text": " The number of resources in <code>layout</code> accessible to each shader stage that is used by the pipeline <strong class=\"purple\">must</strong> be less than or equal to <code>VkPhysicalDeviceLimits</code>::<code>maxPerStageResources</code>"
+        },
+        {
+          "vuid": "VUID-VkGraphicsPipelineCreateInfo-pStages-02097",
+          "text": " If <code>pStages</code> includes a vertex shader stage, <code>pVertexInputState</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkPipelineVertexInputStateCreateInfo\">VkPipelineVertexInputStateCreateInfo</a> structure"
+        },
+        {
+          "vuid": "VUID-VkGraphicsPipelineCreateInfo-pStages-02098",
+          "text": " If <code>pStages</code> includes a vertex shader stage, <code>pInputAssemblyState</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkPipelineInputAssemblyStateCreateInfo\">VkPipelineInputAssemblyStateCreateInfo</a> structure"
+        },
+        {
+          "vuid": "VUID-VkGraphicsPipelineCreateInfo-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO</code>"
+        },
+        {
+          "vuid": "VUID-VkGraphicsPipelineCreateInfo-pNext-pNext",
+          "text": " Each <code>pNext</code> member of any structure (including this one) in the <code>pNext</code> chain <strong class=\"purple\">must</strong> be either <code>NULL</code> or a pointer to a valid instance of <a href=\"#VkPipelineCompilerControlCreateInfoAMD\">VkPipelineCompilerControlCreateInfoAMD</a>, <a href=\"#VkPipelineCreationFeedbackCreateInfoEXT\">VkPipelineCreationFeedbackCreateInfoEXT</a>, <a href=\"#VkPipelineDiscardRectangleStateCreateInfoEXT\">VkPipelineDiscardRectangleStateCreateInfoEXT</a>, or <a href=\"#VkPipelineRepresentativeFragmentTestStateCreateInfoNV\">VkPipelineRepresentativeFragmentTestStateCreateInfoNV</a>"
+        },
+        {
+          "vuid": "VUID-VkGraphicsPipelineCreateInfo-sType-unique",
+          "text": " Each <code>sType</code> member in the <code>pNext</code> chain <strong class=\"purple\">must</strong> be unique"
+        },
+        {
+          "vuid": "VUID-VkGraphicsPipelineCreateInfo-flags-parameter",
+          "text": " <code>flags</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkPipelineCreateFlagBits\">VkPipelineCreateFlagBits</a> values"
+        },
+        {
+          "vuid": "VUID-VkGraphicsPipelineCreateInfo-pStages-parameter",
+          "text": " <code>pStages</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>stageCount</code> valid <a href=\"#VkPipelineShaderStageCreateInfo\">VkPipelineShaderStageCreateInfo</a> structures"
+        },
+        {
+          "vuid": "VUID-VkGraphicsPipelineCreateInfo-pRasterizationState-parameter",
+          "text": " <code>pRasterizationState</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkPipelineRasterizationStateCreateInfo\">VkPipelineRasterizationStateCreateInfo</a> structure"
+        },
+        {
+          "vuid": "VUID-VkGraphicsPipelineCreateInfo-pDynamicState-parameter",
+          "text": " If <code>pDynamicState</code> is not <code>NULL</code>, <code>pDynamicState</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkPipelineDynamicStateCreateInfo\">VkPipelineDynamicStateCreateInfo</a> structure"
+        },
+        {
+          "vuid": "VUID-VkGraphicsPipelineCreateInfo-layout-parameter",
+          "text": " <code>layout</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkPipelineLayout\">VkPipelineLayout</a> handle"
+        },
+        {
+          "vuid": "VUID-VkGraphicsPipelineCreateInfo-renderPass-parameter",
+          "text": " <code>renderPass</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkRenderPass\">VkRenderPass</a> handle"
+        },
+        {
+          "vuid": "VUID-VkGraphicsPipelineCreateInfo-stageCount-arraylength",
+          "text": " <code>stageCount</code> <strong class=\"purple\">must</strong> be greater than <code>0</code>"
+        },
+        {
+          "vuid": "VUID-VkGraphicsPipelineCreateInfo-commonparent",
+          "text": " Each of <code>basePipelineHandle</code>, <code>layout</code>, and <code>renderPass</code> that are valid handles of non-ignored parameters <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from the same <a href=\"#VkDevice\">VkDevice</a>"
+        }
+      ],
+      "!(VK_NV_mesh_shader)": [
+        {
+          "vuid": "VUID-VkGraphicsPipelineCreateInfo-stage-00727",
+          "text": " The <code>stage</code> member of one element of <code>pStages</code> <strong class=\"purple\">must</strong> be <code>VK_SHADER_STAGE_VERTEX_BIT</code>"
+        }
+      ],
+      "(VK_NV_mesh_shader)": [
+        {
+          "vuid": "VUID-VkGraphicsPipelineCreateInfo-pStages-02095",
+          "text": " The geometric shader stages provided in <code>pStages</code> <strong class=\"purple\">must</strong> be either from the mesh shading pipeline (<code>stage</code> is <code>VK_SHADER_STAGE_TASK_BIT_NV</code> or <code>VK_SHADER_STAGE_MESH_BIT_NV</code>) or from the primitive shading pipeline (<code>stage</code> is <code>VK_SHADER_STAGE_VERTEX_BIT</code>, <code>VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT</code>, <code>VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT</code>, or <code>VK_SHADER_STAGE_GEOMETRY_BIT</code>)."
+        },
+        {
+          "vuid": "VUID-VkGraphicsPipelineCreateInfo-stage-02096",
+          "text": " The <code>stage</code> member of one element of <code>pStages</code> <strong class=\"purple\">must</strong> be either <code>VK_SHADER_STAGE_VERTEX_BIT</code> or <code>VK_SHADER_STAGE_MESH_BIT_NV</code>."
+        }
+      ],
+      "!(VK_VERSION_1_1,VK_KHR_maintenance2)": [
+        {
+          "vuid": "VUID-VkGraphicsPipelineCreateInfo-subpass-00743",
+          "text": " If rasterization is not disabled and <code>subpass</code> uses a depth/stencil attachment in <code>renderPass</code> that has a layout of <code>VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL</code> in the <code>VkAttachmentReference</code> defined by <code>subpass</code>, the <code>depthWriteEnable</code> member of <code>pDepthStencilState</code> <strong class=\"purple\">must</strong> be <code>VK_FALSE</code>"
+        },
+        {
+          "vuid": "VUID-VkGraphicsPipelineCreateInfo-subpass-00744",
+          "text": " If rasterization is not disabled and <code>subpass</code> uses a depth/stencil attachment in <code>renderPass</code> that has a layout of <code>VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL</code> in the <code>VkAttachmentReference</code> defined by <code>subpass</code>, the <code>failOp</code>, <code>passOp</code> and <code>depthFailOp</code> members of each of the <code>front</code> and <code>back</code> members of <code>pDepthStencilState</code> <strong class=\"purple\">must</strong> be <code>VK_STENCIL_OP_KEEP</code>"
+        }
+      ],
+      "(VK_VERSION_1_1,VK_KHR_maintenance2)": [
+        {
+          "vuid": "VUID-VkGraphicsPipelineCreateInfo-subpass-01756",
+          "text": " If rasterization is not disabled and <code>subpass</code> uses a depth/stencil attachment in <code>renderPass</code> that has a layout of <code>VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL</code> or <code>VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL</code> in the <code>VkAttachmentReference</code> defined by <code>subpass</code>, the <code>depthWriteEnable</code> member of <code>pDepthStencilState</code> <strong class=\"purple\">must</strong> be <code>VK_FALSE</code>"
+        },
+        {
+          "vuid": "VUID-VkGraphicsPipelineCreateInfo-subpass-01757",
+          "text": " If rasterization is not disabled and <code>subpass</code> uses a depth/stencil attachment in <code>renderPass</code> that has a layout of <code>VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL</code> or <code>VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL</code> in the <code>VkAttachmentReference</code> defined by <code>subpass</code>, the <code>failOp</code>, <code>passOp</code> and <code>depthFailOp</code> members of each of the <code>front</code> and <code>back</code> members of <code>pDepthStencilState</code> <strong class=\"purple\">must</strong> be <code>VK_STENCIL_OP_KEEP</code>"
+        },
+        {
+          "vuid": "VUID-VkGraphicsPipelineCreateInfo-pStages-01565",
+          "text": " If <code>pStages</code> includes a fragment shader stage and an input attachment was referenced by the <a href=\"#VkRenderPassInputAttachmentAspectCreateInfo\">VkRenderPassInputAttachmentAspectCreateInfo</a> at <code>renderPass</code> create time, its shader code <strong class=\"purple\">must</strong> not read from any aspect that was not specified in the <code>aspectMask</code> of the corresponding <a href=\"#VkInputAttachmentAspectReference\">VkInputAttachmentAspectReference</a> structure."
+        }
+      ],
+      "!(VK_EXT_depth_range_unrestricted)": [
+        {
+          "vuid": "VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-00755",
+          "text": " If no element of the <code>pDynamicStates</code> member of <code>pDynamicState</code> is <code>VK_DYNAMIC_STATE_DEPTH_BOUNDS</code>, and the <code>depthBoundsTestEnable</code> member of <code>pDepthStencilState</code> is <code>VK_TRUE</code>, the <code>minDepthBounds</code> and <code>maxDepthBounds</code> members of <code>pDepthStencilState</code> <strong class=\"purple\">must</strong> be between <code>0.0</code> and <code>1.0</code>, inclusive"
+        }
+      ],
+      "(VK_EXT_depth_range_unrestricted)": [
+        {
+          "vuid": "VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-02510",
+          "text": " If the <code><a href=\"#VK_EXT_depth_range_unrestricted\">VK_EXT_depth_range_unrestricted</a></code> extension is not enabled and no element of the <code>pDynamicStates</code> member of <code>pDynamicState</code> is <code>VK_DYNAMIC_STATE_DEPTH_BOUNDS</code>, and the <code>depthBoundsTestEnable</code> member of <code>pDepthStencilState</code> is <code>VK_TRUE</code>, the <code>minDepthBounds</code> and <code>maxDepthBounds</code> members of <code>pDepthStencilState</code> <strong class=\"purple\">must</strong> be between <code>0.0</code> and <code>1.0</code>, inclusive"
+        }
+      ],
+      "(VK_EXT_sample_locations)": [
+        {
+          "vuid": "VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-01521",
+          "text": " If no element of the <code>pDynamicStates</code> member of <code>pDynamicState</code> is <code>VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_EXT</code>, and the <code>sampleLocationsEnable</code> member of a <a href=\"#VkPipelineSampleLocationsStateCreateInfoEXT\">VkPipelineSampleLocationsStateCreateInfoEXT</a> structure included in the <code>pNext</code> chain of <code>pMultisampleState</code> is <code>VK_TRUE</code>, <code>sampleLocationsInfo.sampleLocationGridSize.width</code> <strong class=\"purple\">must</strong> evenly divide <a href=\"#VkMultisamplePropertiesEXT\">VkMultisamplePropertiesEXT</a>::<code>sampleLocationGridSize.width</code> as returned by <a href=\"#vkGetPhysicalDeviceMultisamplePropertiesEXT\">vkGetPhysicalDeviceMultisamplePropertiesEXT</a> with a <code>samples</code> parameter equaling <code>rasterizationSamples</code>"
+        },
+        {
+          "vuid": "VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-01522",
+          "text": " If no element of the <code>pDynamicStates</code> member of <code>pDynamicState</code> is <code>VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_EXT</code>, and the <code>sampleLocationsEnable</code> member of a <a href=\"#VkPipelineSampleLocationsStateCreateInfoEXT\">VkPipelineSampleLocationsStateCreateInfoEXT</a> structure included in the <code>pNext</code> chain of <code>pMultisampleState</code> is <code>VK_TRUE</code>, <code>sampleLocationsInfo.sampleLocationGridSize.height</code> <strong class=\"purple\">must</strong> evenly divide <a href=\"#VkMultisamplePropertiesEXT\">VkMultisamplePropertiesEXT</a>::<code>sampleLocationGridSize.height</code> as returned by <a href=\"#vkGetPhysicalDeviceMultisamplePropertiesEXT\">vkGetPhysicalDeviceMultisamplePropertiesEXT</a> with a <code>samples</code> parameter equaling <code>rasterizationSamples</code>"
+        },
+        {
+          "vuid": "VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-01523",
+          "text": " If no element of the <code>pDynamicStates</code> member of <code>pDynamicState</code> is <code>VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_EXT</code>, and the <code>sampleLocationsEnable</code> member of a <a href=\"#VkPipelineSampleLocationsStateCreateInfoEXT\">VkPipelineSampleLocationsStateCreateInfoEXT</a> structure included in the <code>pNext</code> chain of <code>pMultisampleState</code> is <code>VK_TRUE</code>, <code>sampleLocationsInfo.sampleLocationsPerPixel</code> <strong class=\"purple\">must</strong> equal <code>rasterizationSamples</code>"
+        },
+        {
+          "vuid": "VUID-VkGraphicsPipelineCreateInfo-sampleLocationsEnable-01524",
+          "text": " If the <code>sampleLocationsEnable</code> member of a <a href=\"#VkPipelineSampleLocationsStateCreateInfoEXT\">VkPipelineSampleLocationsStateCreateInfoEXT</a> structure included in the <code>pNext</code> chain of <code>pMultisampleState</code> is <code>VK_TRUE</code>, the fragment shader code <strong class=\"purple\">must</strong> not statically use the extended instruction <code>InterpolateAtSample</code>"
+        }
+      ],
+      "(VK_AMD_mixed_attachment_samples)": [
+        {
+          "vuid": "VUID-VkGraphicsPipelineCreateInfo-subpass-01505",
+          "text": " If the <code>VK_AMD_mixed_attachment_samples</code> extension is enabled, and if <code>subpass</code> uses color and/or depth/stencil attachments, then the <code>rasterizationSamples</code> member of <code>pMultisampleState</code> <strong class=\"purple\">must</strong> equal the maximum of the sample counts of those subpass attachments"
+        }
+      ],
+      "(VK_NV_framebuffer_mixed_samples)": [
+        {
+          "vuid": "VUID-VkGraphicsPipelineCreateInfo-subpass-01411",
+          "text": " If the <code>VK_NV_framebuffer_mixed_samples</code> extension is enabled, and if <code>subpass</code> has a depth/stencil attachment and depth test, stencil test, or depth bounds test are enabled, then the <code>rasterizationSamples</code> member of <code>pMultisampleState</code> <strong class=\"purple\">must</strong> be the same as the sample count of the depth/stencil attachment"
+        },
+        {
+          "vuid": "VUID-VkGraphicsPipelineCreateInfo-subpass-01412",
+          "text": " If the <code>VK_NV_framebuffer_mixed_samples</code> extension is enabled, and if <code>subpass</code> has any color attachments, then the <code>rasterizationSamples</code> member of <code>pMultisampleState</code> <strong class=\"purple\">must</strong> be greater than or equal to the sample count for those subpass attachments"
+        }
+      ],
+      "(VK_NV_coverage_reduction_mode)": [
+        {
+          "vuid": "VUID-VkGraphicsPipelineCreateInfo-coverageReductionMode-02722",
+          "text": " If the <code>VK_NV_coverage_reduction_mode</code> extension is enabled, the coverage reduction mode specified by <a href=\"#VkPipelineCoverageReductionStateCreateInfoNV\">VkPipelineCoverageReductionStateCreateInfoNV</a>::<code>coverageReductionMode</code>, the <code>rasterizationSamples</code> member of <code>pMultisampleState</code> and the sample counts for the color and depth/stencil attachments (if the subpass has them) <strong class=\"purple\">must</strong> be a valid combination returned by <code>vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV</code>"
+        }
+      ],
+      "(VK_VERSION_1_1,VK_KHR_multiview)": [
+        {
+          "vuid": "VUID-VkGraphicsPipelineCreateInfo-renderPass-00760",
+          "text": " If the <code>renderPass</code> has multiview enabled and <code>subpass</code> has more than one bit set in the view mask and <code>multiviewTessellationShader</code> is not enabled, then <code>pStages</code> <strong class=\"purple\">must</strong> not include tessellation shaders."
+        },
+        {
+          "vuid": "VUID-VkGraphicsPipelineCreateInfo-renderPass-00761",
+          "text": " If the <code>renderPass</code> has multiview enabled and <code>subpass</code> has more than one bit set in the view mask and <code>multiviewGeometryShader</code> is not enabled, then <code>pStages</code> <strong class=\"purple\">must</strong> not include a geometry shader."
+        },
+        {
+          "vuid": "VUID-VkGraphicsPipelineCreateInfo-renderPass-00762",
+          "text": " If the <code>renderPass</code> has multiview enabled and <code>subpass</code> has more than one bit set in the view mask, shaders in the pipeline <strong class=\"purple\">must</strong> not write to the <code>Layer</code> built-in output"
+        },
+        {
+          "vuid": "VUID-VkGraphicsPipelineCreateInfo-renderPass-00763",
+          "text": " If the <code>renderPass</code> has multiview enabled, then all shaders <strong class=\"purple\">must</strong> not include variables decorated with the <code>Layer</code> built-in decoration in their interfaces."
+        }
+      ],
+      "(VK_VERSION_1_1,VK_KHR_device_group)": [
+        {
+          "vuid": "VUID-VkGraphicsPipelineCreateInfo-flags-00764",
+          "text": " <code>flags</code> <strong class=\"purple\">must</strong> not contain the <code>VK_PIPELINE_CREATE_DISPATCH_BASE</code> flag."
+        }
+      ],
+      "(VK_NV_clip_space_w_scaling)": [
+        {
+          "vuid": "VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-01715",
+          "text": " If no element of the <code>pDynamicStates</code> member of <code>pDynamicState</code> is <code>VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV</code>, and the <code>viewportWScalingEnable</code> member of a <a href=\"#VkPipelineViewportWScalingStateCreateInfoNV\">VkPipelineViewportWScalingStateCreateInfoNV</a> structure, included in the <code>pNext</code> chain of <code>pViewportState</code>, is <code>VK_TRUE</code>, the <code>pViewportWScalings</code> member of the <a href=\"#VkPipelineViewportWScalingStateCreateInfoNV\">VkPipelineViewportWScalingStateCreateInfoNV</a> <strong class=\"purple\">must</strong> be a pointer to an array of <a href=\"#VkPipelineViewportWScalingStateCreateInfoNV\">VkPipelineViewportWScalingStateCreateInfoNV</a>::<code>viewportCount</code> valid <a href=\"#VkViewportWScalingNV\">VkViewportWScalingNV</a> structures"
+        }
+      ],
+      "(VK_EXT_transform_feedback)": [
+        {
+          "vuid": "VUID-VkGraphicsPipelineCreateInfo-pStages-02317",
+          "text": " The <code>Xfb</code> execution mode <strong class=\"purple\">can</strong> be specified by only one shader stage in <code>pStages</code>"
+        },
+        {
+          "vuid": "VUID-VkGraphicsPipelineCreateInfo-pStages-02318",
+          "text": " If any shader stage in <code>pStages</code> specifies <code>Xfb</code> execution mode it <strong class=\"purple\">must</strong> be the last vertex processing stage"
+        },
+        {
+          "vuid": "VUID-VkGraphicsPipelineCreateInfo-rasterizationStream-02319",
+          "text": " If a <code>VkPipelineRasterizationStateStreamCreateInfoEXT</code>::<code>rasterizationStream</code> value other than zero is specified, all variables in the output interface of the entry point being compiled decorated with <code>Position</code>, <code>PointSize</code>, <code>ClipDistance</code>, or <code>CullDistance</code> <strong class=\"purple\">must</strong> all be decorated with identical <code>Stream</code> values that match the <code>rasterizationStream</code>"
+        },
+        {
+          "vuid": "VUID-VkGraphicsPipelineCreateInfo-rasterizationStream-02320",
+          "text": " If <code>VkPipelineRasterizationStateStreamCreateInfoEXT</code>::<code>rasterizationStream</code> is zero, or not specified, all variables in the output interface of the entry point being compiled decorated with <code>Position</code>, <code>PointSize</code>, <code>ClipDistance</code>, or <code>CullDistance</code> <strong class=\"purple\">must</strong> all be decorated with a <code>Stream</code> value of zero, or <strong class=\"purple\">must</strong> not specify the <code>Stream</code> decoration"
+        },
+        {
+          "vuid": "VUID-VkGraphicsPipelineCreateInfo-geometryStreams-02321",
+          "text": " If the last vertex processing stage is a geometry shader, and that geometry shader uses the <code>GeometryStreams</code> capability, then <code>VkPhysicalDeviceTransformFeedbackFeaturesEXT</code>::<code>geometryStreams</code> feature <strong class=\"purple\">must</strong> be enabled"
+        }
+      ],
+      "(VK_EXT_transform_feedback)+(VK_NV_mesh_shader)": [
+        {
+          "vuid": "VUID-VkGraphicsPipelineCreateInfo-None-02322",
+          "text": " If there are any mesh shader stages in the pipeline there <strong class=\"purple\">must</strong> not be any shader stage in the pipeline with a <code>Xfb</code> execution mode."
+        }
+      ],
+      "(VK_EXT_line_rasterization)": [
+        {
+          "vuid": "VUID-VkGraphicsPipelineCreateInfo-lineRasterizationMode-02766",
+          "text": " If the <code>lineRasterizationMode</code> member of a <a href=\"#VkPipelineRasterizationLineStateCreateInfoEXT\">VkPipelineRasterizationLineStateCreateInfoEXT</a> structure included in the <code>pNext</code> chain of <code>pRasterizationState</code> is <code>VK_LINE_RASTERIZATION_MODE_BRESENHAM_EXT</code> or <code>VK_LINE_RASTERIZATION_MODE_RECTANGULAR_SMOOTH_EXT</code> and if rasterization is enabled, then the <code>alphaToCoverageEnable</code>, <code>alphaToOneEnable</code>, and <code>sampleShadingEnable</code> members of <code>pMultisampleState</code> <strong class=\"purple\">must</strong> all be <code>VK_FALSE</code>"
+        },
+        {
+          "vuid": "VUID-VkGraphicsPipelineCreateInfo-stippledLineEnable-02767",
+          "text": " If the <code>stippledLineEnable</code> member of <a href=\"#VkPipelineRasterizationLineStateCreateInfoEXT\">VkPipelineRasterizationLineStateCreateInfoEXT</a> is <code>VK_TRUE</code> and no element of the <code>pDynamicStates</code> member of <code>pDynamicState</code> is <code>VK_DYNAMIC_STATE_LINE_STIPPLE_EXT</code>, then the <code>lineStippleFactor</code> member of <a href=\"#VkPipelineRasterizationLineStateCreateInfoEXT\">VkPipelineRasterizationLineStateCreateInfoEXT</a> <strong class=\"purple\">must</strong> be in the range <span class=\"eq\">[1,256]</span>"
+        }
+      ]
+    },
+    "VkPipelineDynamicStateCreateInfo": {
+      "core": [
+        {
+          "vuid": "VUID-VkPipelineDynamicStateCreateInfo-pDynamicStates-01442",
+          "text": " Each element of <code>pDynamicStates</code> <strong class=\"purple\">must</strong> be unique"
+        },
+        {
+          "vuid": "VUID-VkPipelineDynamicStateCreateInfo-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineDynamicStateCreateInfo-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineDynamicStateCreateInfo-flags-zerobitmask",
+          "text": " <code>flags</code> <strong class=\"purple\">must</strong> be <code>0</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineDynamicStateCreateInfo-pDynamicStates-parameter",
+          "text": " If <code>dynamicStateCount</code> is not <code>0</code>, <code>pDynamicStates</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>dynamicStateCount</code> valid <a href=\"#VkDynamicState\">VkDynamicState</a> values"
+        }
+      ]
+    },
+    "vkDestroyPipeline": {
+      "core": [
+        {
+          "vuid": "VUID-vkDestroyPipeline-pipeline-00765",
+          "text": " All submitted commands that refer to <code>pipeline</code> <strong class=\"purple\">must</strong> have completed execution"
+        },
+        {
+          "vuid": "VUID-vkDestroyPipeline-pipeline-00766",
+          "text": " If <code>VkAllocationCallbacks</code> were provided when <code>pipeline</code> was created, a compatible set of callbacks <strong class=\"purple\">must</strong> be provided here"
+        },
+        {
+          "vuid": "VUID-vkDestroyPipeline-pipeline-00767",
+          "text": " If no <code>VkAllocationCallbacks</code> were provided when <code>pipeline</code> was created, <code>pAllocator</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-vkDestroyPipeline-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkDestroyPipeline-pipeline-parameter",
+          "text": " If <code>pipeline</code> is not <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>, <code>pipeline</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkPipeline\">VkPipeline</a> handle"
+        },
+        {
+          "vuid": "VUID-vkDestroyPipeline-pAllocator-parameter",
+          "text": " If <code>pAllocator</code> is not <code>NULL</code>, <code>pAllocator</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkAllocationCallbacks\">VkAllocationCallbacks</a> structure"
+        },
+        {
+          "vuid": "VUID-vkDestroyPipeline-pipeline-parent",
+          "text": " If <code>pipeline</code> is a valid handle, it <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from <code>device</code>"
+        }
+      ]
+    },
+    "vkCreatePipelineCache": {
+      "core": [
+        {
+          "vuid": "VUID-vkCreatePipelineCache-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCreatePipelineCache-pCreateInfo-parameter",
+          "text": " <code>pCreateInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkPipelineCacheCreateInfo\">VkPipelineCacheCreateInfo</a> structure"
+        },
+        {
+          "vuid": "VUID-vkCreatePipelineCache-pAllocator-parameter",
+          "text": " If <code>pAllocator</code> is not <code>NULL</code>, <code>pAllocator</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkAllocationCallbacks\">VkAllocationCallbacks</a> structure"
+        },
+        {
+          "vuid": "VUID-vkCreatePipelineCache-pPipelineCache-parameter",
+          "text": " <code>pPipelineCache</code> <strong class=\"purple\">must</strong> be a valid pointer to a <a href=\"#VkPipelineCache\">VkPipelineCache</a> handle"
+        }
+      ]
+    },
+    "VkPipelineCacheCreateInfo": {
+      "core": [
+        {
+          "vuid": "VUID-VkPipelineCacheCreateInfo-initialDataSize-00768",
+          "text": " If <code>initialDataSize</code> is not <code>0</code>, it <strong class=\"purple\">must</strong> be equal to the size of <code>pInitialData</code>, as returned by <code>vkGetPipelineCacheData</code> when <code>pInitialData</code> was originally retrieved"
+        },
+        {
+          "vuid": "VUID-VkPipelineCacheCreateInfo-initialDataSize-00769",
+          "text": " If <code>initialDataSize</code> is not <code>0</code>, <code>pInitialData</code> <strong class=\"purple\">must</strong> have been retrieved from a previous call to <code>vkGetPipelineCacheData</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineCacheCreateInfo-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineCacheCreateInfo-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineCacheCreateInfo-flags-zerobitmask",
+          "text": " <code>flags</code> <strong class=\"purple\">must</strong> be <code>0</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineCacheCreateInfo-pInitialData-parameter",
+          "text": " If <code>initialDataSize</code> is not <code>0</code>, <code>pInitialData</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>initialDataSize</code> bytes"
+        }
+      ]
+    },
+    "vkMergePipelineCaches": {
+      "core": [
+        {
+          "vuid": "VUID-vkMergePipelineCaches-dstCache-00770",
+          "text": " <code>dstCache</code> <strong class=\"purple\">must</strong> not appear in the list of source caches"
+        },
+        {
+          "vuid": "VUID-vkMergePipelineCaches-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkMergePipelineCaches-dstCache-parameter",
+          "text": " <code>dstCache</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkPipelineCache\">VkPipelineCache</a> handle"
+        },
+        {
+          "vuid": "VUID-vkMergePipelineCaches-pSrcCaches-parameter",
+          "text": " <code>pSrcCaches</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>srcCacheCount</code> valid <a href=\"#VkPipelineCache\">VkPipelineCache</a> handles"
+        },
+        {
+          "vuid": "VUID-vkMergePipelineCaches-srcCacheCount-arraylength",
+          "text": " <code>srcCacheCount</code> <strong class=\"purple\">must</strong> be greater than <code>0</code>"
+        },
+        {
+          "vuid": "VUID-vkMergePipelineCaches-dstCache-parent",
+          "text": " <code>dstCache</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from <code>device</code>"
+        },
+        {
+          "vuid": "VUID-vkMergePipelineCaches-pSrcCaches-parent",
+          "text": " Each element of <code>pSrcCaches</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from <code>device</code>"
+        }
+      ]
+    },
+    "vkGetPipelineCacheData": {
+      "core": [
+        {
+          "vuid": "VUID-vkGetPipelineCacheData-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetPipelineCacheData-pipelineCache-parameter",
+          "text": " <code>pipelineCache</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkPipelineCache\">VkPipelineCache</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetPipelineCacheData-pDataSize-parameter",
+          "text": " <code>pDataSize</code> <strong class=\"purple\">must</strong> be a valid pointer to a <code>size_t</code> value"
+        },
+        {
+          "vuid": "VUID-vkGetPipelineCacheData-pData-parameter",
+          "text": " If the value referenced by <code>pDataSize</code> is not <code>0</code>, and <code>pData</code> is not <code>NULL</code>, <code>pData</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>pDataSize</code> bytes"
+        },
+        {
+          "vuid": "VUID-vkGetPipelineCacheData-pipelineCache-parent",
+          "text": " <code>pipelineCache</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from <code>device</code>"
+        }
+      ]
+    },
+    "vkDestroyPipelineCache": {
+      "core": [
+        {
+          "vuid": "VUID-vkDestroyPipelineCache-pipelineCache-00771",
+          "text": " If <code>VkAllocationCallbacks</code> were provided when <code>pipelineCache</code> was created, a compatible set of callbacks <strong class=\"purple\">must</strong> be provided here"
+        },
+        {
+          "vuid": "VUID-vkDestroyPipelineCache-pipelineCache-00772",
+          "text": " If no <code>VkAllocationCallbacks</code> were provided when <code>pipelineCache</code> was created, <code>pAllocator</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-vkDestroyPipelineCache-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkDestroyPipelineCache-pipelineCache-parameter",
+          "text": " If <code>pipelineCache</code> is not <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>, <code>pipelineCache</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkPipelineCache\">VkPipelineCache</a> handle"
+        },
+        {
+          "vuid": "VUID-vkDestroyPipelineCache-pAllocator-parameter",
+          "text": " If <code>pAllocator</code> is not <code>NULL</code>, <code>pAllocator</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkAllocationCallbacks\">VkAllocationCallbacks</a> structure"
+        },
+        {
+          "vuid": "VUID-vkDestroyPipelineCache-pipelineCache-parent",
+          "text": " If <code>pipelineCache</code> is a valid handle, it <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from <code>device</code>"
+        }
+      ]
+    },
+    "VkSpecializationInfo": {
+      "core": [
+        {
+          "vuid": "VUID-VkSpecializationInfo-offset-00773",
+          "text": " The <code>offset</code> member of each element of <code>pMapEntries</code> <strong class=\"purple\">must</strong> be less than <code>dataSize</code>"
+        },
+        {
+          "vuid": "VUID-VkSpecializationInfo-pMapEntries-00774",
+          "text": " The <code>size</code> member of each element of <code>pMapEntries</code> <strong class=\"purple\">must</strong> be less than or equal to <code>dataSize</code> minus <code>offset</code>"
+        },
+        {
+          "vuid": "VUID-VkSpecializationInfo-pMapEntries-parameter",
+          "text": " If <code>mapEntryCount</code> is not <code>0</code>, <code>pMapEntries</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>mapEntryCount</code> valid <a href=\"#VkSpecializationMapEntry\">VkSpecializationMapEntry</a> structures"
+        },
+        {
+          "vuid": "VUID-VkSpecializationInfo-pData-parameter",
+          "text": " If <code>dataSize</code> is not <code>0</code>, <code>pData</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>dataSize</code> bytes"
+        }
+      ]
+    },
+    "VkSpecializationMapEntry": {
+      "core": [
+        {
+          "vuid": "VUID-VkSpecializationMapEntry-constantID-00776",
+          "text": " For a <code>constantID</code> specialization constant declared in a shader, <code>size</code> <strong class=\"purple\">must</strong> match the byte size of the <code>constantID</code>. If the specialization constant is of type <code>boolean</code>, <code>size</code> <strong class=\"purple\">must</strong> be the byte size of <code>VkBool32</code>"
+        }
+      ]
+    },
+    "vkCmdBindPipeline": {
+      "core": [
+        {
+          "vuid": "VUID-vkCmdBindPipeline-pipelineBindPoint-00777",
+          "text": " If <code>pipelineBindPoint</code> is <code>VK_PIPELINE_BIND_POINT_COMPUTE</code>, the <code>VkCommandPool</code> that <code>commandBuffer</code> was allocated from <strong class=\"purple\">must</strong> support compute operations"
+        },
+        {
+          "vuid": "VUID-vkCmdBindPipeline-pipelineBindPoint-00778",
+          "text": " If <code>pipelineBindPoint</code> is <code>VK_PIPELINE_BIND_POINT_GRAPHICS</code>, the <code>VkCommandPool</code> that <code>commandBuffer</code> was allocated from <strong class=\"purple\">must</strong> support graphics operations"
+        },
+        {
+          "vuid": "VUID-vkCmdBindPipeline-pipelineBindPoint-00779",
+          "text": " If <code>pipelineBindPoint</code> is <code>VK_PIPELINE_BIND_POINT_COMPUTE</code>, <code>pipeline</code> <strong class=\"purple\">must</strong> be a compute pipeline"
+        },
+        {
+          "vuid": "VUID-vkCmdBindPipeline-pipelineBindPoint-00780",
+          "text": " If <code>pipelineBindPoint</code> is <code>VK_PIPELINE_BIND_POINT_GRAPHICS</code>, <code>pipeline</code> <strong class=\"purple\">must</strong> be a graphics pipeline"
+        },
+        {
+          "vuid": "VUID-vkCmdBindPipeline-pipeline-00781",
+          "text": " If the <a href=\"#features-variableMultisampleRate\">variable multisample rate</a> feature is not supported, <code>pipeline</code> is a graphics pipeline, the current subpass has no attachments, and this is not the first call to this function with a graphics pipeline after transitioning to the current subpass, then the sample count specified by this pipeline <strong class=\"purple\">must</strong> match that set in the previous pipeline"
+        },
+        {
+          "vuid": "VUID-vkCmdBindPipeline-commandBuffer-parameter",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkCommandBuffer\">VkCommandBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdBindPipeline-pipelineBindPoint-parameter",
+          "text": " <code>pipelineBindPoint</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkPipelineBindPoint\">VkPipelineBindPoint</a> value"
+        },
+        {
+          "vuid": "VUID-vkCmdBindPipeline-pipeline-parameter",
+          "text": " <code>pipeline</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkPipeline\">VkPipeline</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdBindPipeline-commandBuffer-recording",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be in the <a href=\"#commandbuffers-lifecycle\">recording state</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdBindPipeline-commandBuffer-cmdpool",
+          "text": " The <code>VkCommandPool</code> that <code>commandBuffer</code> was allocated from <strong class=\"purple\">must</strong> support graphics, or compute operations"
+        },
+        {
+          "vuid": "VUID-vkCmdBindPipeline-commonparent",
+          "text": " Both of <code>commandBuffer</code>, and <code>pipeline</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from the same <a href=\"#VkDevice\">VkDevice</a>"
+        }
+      ],
+      "(VK_EXT_sample_locations)": [
+        {
+          "vuid": "VUID-vkCmdBindPipeline-variableSampleLocations-01525",
+          "text": " If <a href=\"#VkPhysicalDeviceSampleLocationsPropertiesEXT\">VkPhysicalDeviceSampleLocationsPropertiesEXT</a>::<code>variableSampleLocations</code> is <code>VK_FALSE</code>, and <code>pipeline</code> is a graphics pipeline created with a <a href=\"#VkPipelineSampleLocationsStateCreateInfoEXT\">VkPipelineSampleLocationsStateCreateInfoEXT</a> structure having its <code>sampleLocationsEnable</code> member set to <code>VK_TRUE</code> but without <code>VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_EXT</code> enabled then the current render pass instance <strong class=\"purple\">must</strong> have been begun by specifying a <a href=\"#VkRenderPassSampleLocationsBeginInfoEXT\">VkRenderPassSampleLocationsBeginInfoEXT</a> structure whose <code>pPostSubpassSampleLocations</code> member contains an element with a <code>subpassIndex</code> matching the current subpass index and the <code>sampleLocationsInfo</code> member of that element <strong class=\"purple\">must</strong> match the <code>sampleLocationsInfo</code> specified in <a href=\"#VkPipelineSampleLocationsStateCreateInfoEXT\">VkPipelineSampleLocationsStateCreateInfoEXT</a> when the pipeline was created"
+        }
+      ],
+      "(VK_EXT_transform_feedback)": [
+        {
+          "vuid": "VUID-vkCmdBindPipeline-None-02323",
+          "text": " This command <strong class=\"purple\">must</strong> not be recorded when transform feedback is active"
+        }
+      ],
+      "(VK_NV_ray_tracing)": [
+        {
+          "vuid": "VUID-vkCmdBindPipeline-pipelineBindPoint-02391",
+          "text": " If <code>pipelineBindPoint</code> is <code>VK_PIPELINE_BIND_POINT_RAY_TRACING_NV</code>, the <code>VkCommandPool</code> that <code>commandBuffer</code> was allocated from <strong class=\"purple\">must</strong> support compute operations"
+        },
+        {
+          "vuid": "VUID-vkCmdBindPipeline-pipelineBindPoint-02392",
+          "text": " If <code>pipelineBindPoint</code> is <code>VK_PIPELINE_BIND_POINT_RAY_TRACING_NV</code>, the <code>pipeline</code> <strong class=\"purple\">must</strong> be a ray tracing pipeline"
+        }
+      ]
+    },
+    "vkGetPipelineExecutablePropertiesKHR": {
+      "(VK_KHR_pipeline_executable_properties)": [
+        {
+          "vuid": "VUID-vkGetPipelineExecutablePropertiesKHR-pipelineExecutableInfo-03270",
+          "text": " <a href=\"#features-pipelineExecutableInfo\"><code>pipelineExecutableInfo</code></a> <strong class=\"purple\">must</strong> be enabled."
+        },
+        {
+          "vuid": "VUID-vkGetPipelineExecutablePropertiesKHR-pipeline-03271",
+          "text": " <code>pipeline</code> member of <code>pPipelineInfo</code> <strong class=\"purple\">must</strong> have been created with <code>device</code>."
+        },
+        {
+          "vuid": "VUID-vkGetPipelineExecutablePropertiesKHR-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetPipelineExecutablePropertiesKHR-pPipelineInfo-parameter",
+          "text": " <code>pPipelineInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkPipelineInfoKHR\">VkPipelineInfoKHR</a> structure"
+        },
+        {
+          "vuid": "VUID-vkGetPipelineExecutablePropertiesKHR-pExecutableCount-parameter",
+          "text": " <code>pExecutableCount</code> <strong class=\"purple\">must</strong> be a valid pointer to a <code>uint32_t</code> value"
+        },
+        {
+          "vuid": "VUID-vkGetPipelineExecutablePropertiesKHR-pProperties-parameter",
+          "text": " If the value referenced by <code>pExecutableCount</code> is not <code>0</code>, and <code>pProperties</code> is not <code>NULL</code>, <code>pProperties</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>pExecutableCount</code> <a href=\"#VkPipelineExecutablePropertiesKHR\">VkPipelineExecutablePropertiesKHR</a> structures"
+        }
+      ]
+    },
+    "VkPipelineInfoKHR": {
+      "(VK_KHR_pipeline_executable_properties)": [
+        {
+          "vuid": "VUID-VkPipelineInfoKHR-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PIPELINE_INFO_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineInfoKHR-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineInfoKHR-pipeline-parameter",
+          "text": " <code>pipeline</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkPipeline\">VkPipeline</a> handle"
+        }
+      ]
+    },
+    "VkPipelineExecutablePropertiesKHR": {
+      "(VK_KHR_pipeline_executable_properties)": [
+        {
+          "vuid": "VUID-VkPipelineExecutablePropertiesKHR-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_PROPERTIES_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineExecutablePropertiesKHR-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        }
+      ]
+    },
+    "vkGetPipelineExecutableStatisticsKHR": {
+      "(VK_KHR_pipeline_executable_properties)": [
+        {
+          "vuid": "VUID-vkGetPipelineExecutableStatisticsKHR-pipelineExecutableInfo-03272",
+          "text": " <a href=\"#features-pipelineExecutableInfo\"><code>pipelineExecutableInfo</code></a> <strong class=\"purple\">must</strong> be enabled."
+        },
+        {
+          "vuid": "VUID-vkGetPipelineExecutableStatisticsKHR-pipeline-03273",
+          "text": " <code>pipeline</code> member of <code>pExecutableInfo</code> <strong class=\"purple\">must</strong> have been created with <code>device</code>."
+        },
+        {
+          "vuid": "VUID-vkGetPipelineExecutableStatisticsKHR-pipeline-03274",
+          "text": " <code>pipeline</code> member of <code>pExecutableInfo</code> <strong class=\"purple\">must</strong> have been created with <code>VK_PIPELINE_CREATE_CAPTURE_STATISTICS_BIT_KHR</code> set in the <code>flags</code> field of <a href=\"#VkGraphicsPipelineCreateInfo\">VkGraphicsPipelineCreateInfo</a> or <a href=\"#VkComputePipelineCreateInfo\">VkComputePipelineCreateInfo</a>."
+        },
+        {
+          "vuid": "VUID-vkGetPipelineExecutableStatisticsKHR-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetPipelineExecutableStatisticsKHR-pExecutableInfo-parameter",
+          "text": " <code>pExecutableInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkPipelineExecutableInfoKHR\">VkPipelineExecutableInfoKHR</a> structure"
+        },
+        {
+          "vuid": "VUID-vkGetPipelineExecutableStatisticsKHR-pStatisticCount-parameter",
+          "text": " <code>pStatisticCount</code> <strong class=\"purple\">must</strong> be a valid pointer to a <code>uint32_t</code> value"
+        },
+        {
+          "vuid": "VUID-vkGetPipelineExecutableStatisticsKHR-pStatistics-parameter",
+          "text": " If the value referenced by <code>pStatisticCount</code> is not <code>0</code>, and <code>pStatistics</code> is not <code>NULL</code>, <code>pStatistics</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>pStatisticCount</code> <a href=\"#VkPipelineExecutableStatisticKHR\">VkPipelineExecutableStatisticKHR</a> structures"
+        }
+      ]
+    },
+    "VkPipelineExecutableInfoKHR": {
+      "(VK_KHR_pipeline_executable_properties)": [
+        {
+          "vuid": "VUID-VkPipelineExecutableInfoKHR-executableIndex-03275",
+          "text": " <code>executableIndex</code> <strong class=\"purple\">must</strong> be less than the number of executables associated with <code>pipeline</code> as returned in the <code>pExecutableCount</code> parameter of <code>vkGetPipelineExecutablePropertiesKHR</code>."
+        },
+        {
+          "vuid": "VUID-VkPipelineExecutableInfoKHR-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INFO_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineExecutableInfoKHR-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineExecutableInfoKHR-pipeline-parameter",
+          "text": " <code>pipeline</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkPipeline\">VkPipeline</a> handle"
+        }
+      ]
+    },
+    "VkPipelineExecutableStatisticKHR": {
+      "(VK_KHR_pipeline_executable_properties)": [
+        {
+          "vuid": "VUID-VkPipelineExecutableStatisticKHR-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_STATISTIC_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineExecutableStatisticKHR-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        }
+      ]
+    },
+    "vkGetPipelineExecutableInternalRepresentationsKHR": {
+      "(VK_KHR_pipeline_executable_properties)": [
+        {
+          "vuid": "VUID-vkGetPipelineExecutableInternalRepresentationsKHR-pipelineExecutableInfo-03276",
+          "text": " <a href=\"#features-pipelineExecutableInfo\"><code>pipelineExecutableInfo</code></a> <strong class=\"purple\">must</strong> be enabled."
+        },
+        {
+          "vuid": "VUID-vkGetPipelineExecutableInternalRepresentationsKHR-pipeline-03277",
+          "text": " <code>pipeline</code> member of <code>pExecutableInfo</code> <strong class=\"purple\">must</strong> have been created with <code>device</code>."
+        },
+        {
+          "vuid": "VUID-vkGetPipelineExecutableInternalRepresentationsKHR-pipeline-03278",
+          "text": " <code>pipeline</code> member of <code>pExecutableInfo</code> <strong class=\"purple\">must</strong> have been created with <code>VK_PIPELINE_CREATE_CAPTURE_INTERNAL_REPRESENTATIONS_BIT_KHR</code> set in the <code>flags</code> field of <a href=\"#VkGraphicsPipelineCreateInfo\">VkGraphicsPipelineCreateInfo</a> or <a href=\"#VkComputePipelineCreateInfo\">VkComputePipelineCreateInfo</a>."
+        },
+        {
+          "vuid": "VUID-vkGetPipelineExecutableInternalRepresentationsKHR-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetPipelineExecutableInternalRepresentationsKHR-pExecutableInfo-parameter",
+          "text": " <code>pExecutableInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkPipelineExecutableInfoKHR\">VkPipelineExecutableInfoKHR</a> structure"
+        },
+        {
+          "vuid": "VUID-vkGetPipelineExecutableInternalRepresentationsKHR-pInternalRepresentationCount-parameter",
+          "text": " <code>pInternalRepresentationCount</code> <strong class=\"purple\">must</strong> be a valid pointer to a <code>uint32_t</code> value"
+        },
+        {
+          "vuid": "VUID-vkGetPipelineExecutableInternalRepresentationsKHR-pInternalRepresentations-parameter",
+          "text": " If the value referenced by <code>pInternalRepresentationCount</code> is not <code>0</code>, and <code>pInternalRepresentations</code> is not <code>NULL</code>, <code>pInternalRepresentations</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>pInternalRepresentationCount</code> <a href=\"#VkPipelineExecutableInternalRepresentationKHR\">VkPipelineExecutableInternalRepresentationKHR</a> structures"
+        }
+      ]
+    },
+    "VkPipelineExecutableInternalRepresentationKHR": {
+      "(VK_KHR_pipeline_executable_properties)": [
+        {
+          "vuid": "VUID-VkPipelineExecutableInternalRepresentationKHR-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INTERNAL_REPRESENTATION_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineExecutableInternalRepresentationKHR-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        }
+      ]
+    },
+    "vkGetShaderInfoAMD": {
+      "(VK_AMD_shader_info)": [
+        {
+          "vuid": "VUID-vkGetShaderInfoAMD-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetShaderInfoAMD-pipeline-parameter",
+          "text": " <code>pipeline</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkPipeline\">VkPipeline</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetShaderInfoAMD-shaderStage-parameter",
+          "text": " <code>shaderStage</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkShaderStageFlagBits\">VkShaderStageFlagBits</a> value"
+        },
+        {
+          "vuid": "VUID-vkGetShaderInfoAMD-infoType-parameter",
+          "text": " <code>infoType</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkShaderInfoTypeAMD\">VkShaderInfoTypeAMD</a> value"
+        },
+        {
+          "vuid": "VUID-vkGetShaderInfoAMD-pInfoSize-parameter",
+          "text": " <code>pInfoSize</code> <strong class=\"purple\">must</strong> be a valid pointer to a <code>size_t</code> value"
+        },
+        {
+          "vuid": "VUID-vkGetShaderInfoAMD-pInfo-parameter",
+          "text": " If the value referenced by <code>pInfoSize</code> is not <code>0</code>, and <code>pInfo</code> is not <code>NULL</code>, <code>pInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>pInfoSize</code> bytes"
+        },
+        {
+          "vuid": "VUID-vkGetShaderInfoAMD-pipeline-parent",
+          "text": " <code>pipeline</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from <code>device</code>"
+        }
+      ]
+    },
+    "VkPipelineCompilerControlCreateInfoAMD": {
+      "(VK_AMD_pipeline_compiler_control)": [
+        {
+          "vuid": "VUID-VkPipelineCompilerControlCreateInfoAMD-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PIPELINE_COMPILER_CONTROL_CREATE_INFO_AMD</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineCompilerControlCreateInfoAMD-compilerControlFlags-zerobitmask",
+          "text": " <code>compilerControlFlags</code> <strong class=\"purple\">must</strong> be <code>0</code>"
+        }
+      ]
+    },
+    "vkCreateRayTracingPipelinesNV": {
+      "(VK_NV_ray_tracing)": [
+        {
+          "vuid": "VUID-vkCreateRayTracingPipelinesNV-flags-02402",
+          "text": " If the <code>flags</code> member of any element of <code>pCreateInfos</code> contains the <code>VK_PIPELINE_CREATE_DERIVATIVE_BIT</code> flag, and the <code>basePipelineIndex</code> member of that same element is not <code>-1</code>, <code>basePipelineIndex</code> <strong class=\"purple\">must</strong> be less than the index into <code>pCreateInfos</code> that corresponds to that element"
+        },
+        {
+          "vuid": "VUID-vkCreateRayTracingPipelinesNV-flags-02403",
+          "text": " If the <code>flags</code> member of any element of <code>pCreateInfos</code> contains the <code>VK_PIPELINE_CREATE_DERIVATIVE_BIT</code> flag, the base pipeline <strong class=\"purple\">must</strong> have been created with the <code>VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT</code> flag set"
+        },
+        {
+          "vuid": "VUID-vkCreateRayTracingPipelinesNV-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCreateRayTracingPipelinesNV-pipelineCache-parameter",
+          "text": " If <code>pipelineCache</code> is not <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>, <code>pipelineCache</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkPipelineCache\">VkPipelineCache</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCreateRayTracingPipelinesNV-pCreateInfos-parameter",
+          "text": " <code>pCreateInfos</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>createInfoCount</code> valid <a href=\"#VkRayTracingPipelineCreateInfoNV\">VkRayTracingPipelineCreateInfoNV</a> structures"
+        },
+        {
+          "vuid": "VUID-vkCreateRayTracingPipelinesNV-pAllocator-parameter",
+          "text": " If <code>pAllocator</code> is not <code>NULL</code>, <code>pAllocator</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkAllocationCallbacks\">VkAllocationCallbacks</a> structure"
+        },
+        {
+          "vuid": "VUID-vkCreateRayTracingPipelinesNV-pPipelines-parameter",
+          "text": " <code>pPipelines</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>createInfoCount</code> <a href=\"#VkPipeline\">VkPipeline</a> handles"
+        },
+        {
+          "vuid": "VUID-vkCreateRayTracingPipelinesNV-createInfoCount-arraylength",
+          "text": " <code>createInfoCount</code> <strong class=\"purple\">must</strong> be greater than <code>0</code>"
+        },
+        {
+          "vuid": "VUID-vkCreateRayTracingPipelinesNV-pipelineCache-parent",
+          "text": " If <code>pipelineCache</code> is a valid handle, it <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from <code>device</code>"
+        }
+      ]
+    },
+    "VkRayTracingPipelineCreateInfoNV": {
+      "(VK_NV_ray_tracing)": [
+        {
+          "vuid": "VUID-VkRayTracingPipelineCreateInfoNV-flags-02404",
+          "text": " If <code>flags</code> contains the <code>VK_PIPELINE_CREATE_DERIVATIVE_BIT</code> flag, and <code>basePipelineIndex</code> is <code>-1</code>, <code>basePipelineHandle</code> <strong class=\"purple\">must</strong> be a valid handle to a ray tracing <code>VkPipeline</code>"
+        },
+        {
+          "vuid": "VUID-VkRayTracingPipelineCreateInfoNV-flags-02405",
+          "text": " If <code>flags</code> contains the <code>VK_PIPELINE_CREATE_DERIVATIVE_BIT</code> flag, and <code>basePipelineHandle</code> is <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>, <code>basePipelineIndex</code> <strong class=\"purple\">must</strong> be a valid index into the calling command&#8217;s <code>pCreateInfos</code> parameter"
+        },
+        {
+          "vuid": "VUID-VkRayTracingPipelineCreateInfoNV-flags-02406",
+          "text": " If <code>flags</code> contains the <code>VK_PIPELINE_CREATE_DERIVATIVE_BIT</code> flag, and <code>basePipelineIndex</code> is not <code>-1</code>, <code>basePipelineHandle</code> <strong class=\"purple\">must</strong> be <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>"
+        },
+        {
+          "vuid": "VUID-VkRayTracingPipelineCreateInfoNV-flags-02407",
+          "text": " If <code>flags</code> contains the <code>VK_PIPELINE_CREATE_DERIVATIVE_BIT</code> flag, and <code>basePipelineHandle</code> is not <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>, <code>basePipelineIndex</code> <strong class=\"purple\">must</strong> be <code>-1</code>"
+        },
+        {
+          "vuid": "VUID-VkRayTracingPipelineCreateInfoNV-stage-02408",
+          "text": " The <code>stage</code> member of one element of <code>pStages</code> <strong class=\"purple\">must</strong> be <code>VK_SHADER_STAGE_RAYGEN_BIT_NV</code>"
+        },
+        {
+          "vuid": "VUID-VkRayTracingPipelineCreateInfoNV-pStages-02409",
+          "text": " The shader code for the entry points identified by <code>pStages</code>, and the rest of the state identified by this structure <strong class=\"purple\">must</strong> adhere to the pipeline linking rules described in the <a href=\"#interfaces\">Shader Interfaces</a> chapter"
+        },
+        {
+          "vuid": "VUID-VkRayTracingPipelineCreateInfoNV-layout-02410",
+          "text": " <code>layout</code> <strong class=\"purple\">must</strong> be <a href=\"#descriptorsets-pipelinelayout-consistency\">consistent</a> with all shaders specified in <code>pStages</code>"
+        },
+        {
+          "vuid": "VUID-VkRayTracingPipelineCreateInfoNV-layout-02411",
+          "text": " The number of resources in <code>layout</code> accessible to each shader stage that is used by the pipeline <strong class=\"purple\">must</strong> be less than or equal to <a href=\"#VkPhysicalDeviceLimits\">VkPhysicalDeviceLimits</a>::<code>maxPerStageResources</code>"
+        },
+        {
+          "vuid": "VUID-VkRayTracingPipelineCreateInfoNV-maxRecursionDepth-02412",
+          "text": " <code>maxRecursionDepth</code> <strong class=\"purple\">must</strong> be less than or equal to <a href=\"#VkPhysicalDeviceRayTracingPropertiesNV\">VkPhysicalDeviceRayTracingPropertiesNV</a>::<code>maxRecursionDepth</code>"
+        },
+        {
+          "vuid": "VUID-VkRayTracingPipelineCreateInfoNV-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_NV</code>"
+        },
+        {
+          "vuid": "VUID-VkRayTracingPipelineCreateInfoNV-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code> or a pointer to a valid instance of <a href=\"#VkPipelineCreationFeedbackCreateInfoEXT\">VkPipelineCreationFeedbackCreateInfoEXT</a>"
+        },
+        {
+          "vuid": "VUID-VkRayTracingPipelineCreateInfoNV-flags-parameter",
+          "text": " <code>flags</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkPipelineCreateFlagBits\">VkPipelineCreateFlagBits</a> values"
+        },
+        {
+          "vuid": "VUID-VkRayTracingPipelineCreateInfoNV-pStages-parameter",
+          "text": " <code>pStages</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>stageCount</code> valid <a href=\"#VkPipelineShaderStageCreateInfo\">VkPipelineShaderStageCreateInfo</a> structures"
+        },
+        {
+          "vuid": "VUID-VkRayTracingPipelineCreateInfoNV-pGroups-parameter",
+          "text": " <code>pGroups</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>groupCount</code> valid <a href=\"#VkRayTracingShaderGroupCreateInfoNV\">VkRayTracingShaderGroupCreateInfoNV</a> structures"
+        },
+        {
+          "vuid": "VUID-VkRayTracingPipelineCreateInfoNV-layout-parameter",
+          "text": " <code>layout</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkPipelineLayout\">VkPipelineLayout</a> handle"
+        },
+        {
+          "vuid": "VUID-VkRayTracingPipelineCreateInfoNV-stageCount-arraylength",
+          "text": " <code>stageCount</code> <strong class=\"purple\">must</strong> be greater than <code>0</code>"
+        },
+        {
+          "vuid": "VUID-VkRayTracingPipelineCreateInfoNV-groupCount-arraylength",
+          "text": " <code>groupCount</code> <strong class=\"purple\">must</strong> be greater than <code>0</code>"
+        },
+        {
+          "vuid": "VUID-VkRayTracingPipelineCreateInfoNV-commonparent",
+          "text": " Both of <code>basePipelineHandle</code>, and <code>layout</code> that are valid handles of non-ignored parameters <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from the same <a href=\"#VkDevice\">VkDevice</a>"
+        }
+      ]
+    },
+    "VkRayTracingShaderGroupCreateInfoNV": {
+      "(VK_NV_ray_tracing)": [
+        {
+          "vuid": "VUID-VkRayTracingShaderGroupCreateInfoNV-type-02413",
+          "text": " If <code>type</code> is <code>VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_NV</code> then <code>generalShader</code> <strong class=\"purple\">must</strong> be a valid index into <code>pStages</code> referring to a shader of <code>VK_SHADER_STAGE_RAYGEN_BIT_NV</code>, <code>VK_SHADER_STAGE_MISS_BIT_NV</code>, or <code>VK_SHADER_STAGE_CALLABLE_BIT_NV</code>"
+        },
+        {
+          "vuid": "VUID-VkRayTracingShaderGroupCreateInfoNV-type-02414",
+          "text": " If <code>type</code> is <code>VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_NV</code> then <code>closestHitShader</code>, <code>anyHitShader</code>, and <code>intersectionShader</code> <strong class=\"purple\">must</strong> be <code>VK_SHADER_UNUSED_NV</code>"
+        },
+        {
+          "vuid": "VUID-VkRayTracingShaderGroupCreateInfoNV-type-02415",
+          "text": " If <code>type</code> is <code>VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_NV</code> then <code>intersectionShader</code> <strong class=\"purple\">must</strong> be a valid index into <code>pStages</code> referring to a shader of <code>VK_SHADER_STAGE_INTERSECTION_BIT_NV</code>"
+        },
+        {
+          "vuid": "VUID-VkRayTracingShaderGroupCreateInfoNV-type-02416",
+          "text": " If <code>type</code> is <code>VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_NV</code> then <code>intersectionShader</code> <strong class=\"purple\">must</strong> be <code>VK_SHADER_UNUSED_NV</code>"
+        },
+        {
+          "vuid": "VUID-VkRayTracingShaderGroupCreateInfoNV-closestHitShader-02417",
+          "text": " <code>closestHitShader</code> <strong class=\"purple\">must</strong> be either <code>VK_SHADER_UNUSED_NV</code> or a valid index into <code>pStages</code> referring to a shader of <code>VK_SHADER_STAGE_CLOSEST_HIT_BIT_NV</code>"
+        },
+        {
+          "vuid": "VUID-VkRayTracingShaderGroupCreateInfoNV-anyHitShader-02418",
+          "text": " <code>anyHitShader</code> <strong class=\"purple\">must</strong> be either <code>VK_SHADER_UNUSED_NV</code> or a valid index into <code>pStages</code> referring to a shader of <code>VK_SHADER_STAGE_ANY_HIT_BIT_NV</code>"
+        },
+        {
+          "vuid": "VUID-VkRayTracingShaderGroupCreateInfoNV-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV</code>"
+        },
+        {
+          "vuid": "VUID-VkRayTracingShaderGroupCreateInfoNV-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-VkRayTracingShaderGroupCreateInfoNV-type-parameter",
+          "text": " <code>type</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkRayTracingShaderGroupTypeNV\">VkRayTracingShaderGroupTypeNV</a> value"
+        }
+      ]
+    },
+    "vkGetRayTracingShaderGroupHandlesNV": {
+      "(VK_NV_ray_tracing)": [
+        {
+          "vuid": "VUID-vkGetRayTracingShaderGroupHandlesNV-firstGroup-02419",
+          "text": " The sum of <code>firstGroup</code> and <code>groupCount</code> <strong class=\"purple\">must</strong> be less than the number of shader groups in <code>pipeline</code>."
+        },
+        {
+          "vuid": "VUID-vkGetRayTracingShaderGroupHandlesNV-dataSize-02420",
+          "text": " <code>dataSize</code> <strong class=\"purple\">must</strong> be at least <span class=\"eq\"><code>VkPhysicalDeviceRayTracingPropertiesNV</code>::<code>shaderGroupHandleSize</code> {times} <code>groupCount</code></span>"
+        },
+        {
+          "vuid": "VUID-vkGetRayTracingShaderGroupHandlesNV-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetRayTracingShaderGroupHandlesNV-pipeline-parameter",
+          "text": " <code>pipeline</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkPipeline\">VkPipeline</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetRayTracingShaderGroupHandlesNV-pData-parameter",
+          "text": " <code>pData</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>dataSize</code> bytes"
+        },
+        {
+          "vuid": "VUID-vkGetRayTracingShaderGroupHandlesNV-dataSize-arraylength",
+          "text": " <code>dataSize</code> <strong class=\"purple\">must</strong> be greater than <code>0</code>"
+        },
+        {
+          "vuid": "VUID-vkGetRayTracingShaderGroupHandlesNV-pipeline-parent",
+          "text": " <code>pipeline</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from <code>device</code>"
+        }
+      ]
+    },
+    "vkCompileDeferredNV": {
+      "(VK_NV_ray_tracing)": [
+        {
+          "vuid": "VUID-vkCompileDeferredNV-pipeline-02237",
+          "text": " <code>pipeline</code> <strong class=\"purple\">must</strong> have been created with <code>VK_PIPELINE_CREATE_DEFER_COMPILE_BIT_NV</code>"
+        },
+        {
+          "vuid": "VUID-vkCompileDeferredNV-shader-02238",
+          "text": " <code>shader</code> <strong class=\"purple\">must</strong> not have been called as a deferred compile before"
+        },
+        {
+          "vuid": "VUID-vkCompileDeferredNV-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCompileDeferredNV-pipeline-parameter",
+          "text": " <code>pipeline</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkPipeline\">VkPipeline</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCompileDeferredNV-pipeline-parent",
+          "text": " <code>pipeline</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from <code>device</code>"
+        }
+      ]
+    },
+    "VkPipelineCreationFeedbackCreateInfoEXT": {
+      "(VK_EXT_pipeline_creation_feedback)": [
+        {
+          "vuid": "VUID-VkPipelineCreationFeedbackCreateInfoEXT-pipelineStageCreationFeedbackCount-02668",
+          "text": " When chained to <a href=\"#VkGraphicsPipelineCreateInfo\">VkGraphicsPipelineCreateInfo</a>, <a href=\"#VkPipelineCreationFeedbackEXT\">VkPipelineCreationFeedbackEXT</a>::<code>pipelineStageCreationFeedbackCount</code> <strong class=\"purple\">must</strong> equal <a href=\"#VkGraphicsPipelineCreateInfo\">VkGraphicsPipelineCreateInfo</a>::<code>stageCount</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineCreationFeedbackCreateInfoEXT-pipelineStageCreationFeedbackCount-02669",
+          "text": " When chained to <a href=\"#VkComputePipelineCreateInfo\">VkComputePipelineCreateInfo</a>, <a href=\"#VkPipelineCreationFeedbackEXT\">VkPipelineCreationFeedbackEXT</a>::<code>pipelineStageCreationFeedbackCount</code> <strong class=\"purple\">must</strong> equal 1"
+        },
+        {
+          "vuid": "VUID-VkPipelineCreationFeedbackCreateInfoEXT-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PIPELINE_CREATION_FEEDBACK_CREATE_INFO_EXT</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineCreationFeedbackCreateInfoEXT-pPipelineCreationFeedback-parameter",
+          "text": " <code>pPipelineCreationFeedback</code> <strong class=\"purple\">must</strong> be a valid pointer to a <a href=\"#VkPipelineCreationFeedbackEXT\">VkPipelineCreationFeedbackEXT</a> structure"
+        },
+        {
+          "vuid": "VUID-VkPipelineCreationFeedbackCreateInfoEXT-pPipelineStageCreationFeedbacks-parameter",
+          "text": " <code>pPipelineStageCreationFeedbacks</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>pipelineStageCreationFeedbackCount</code> <a href=\"#VkPipelineCreationFeedbackEXT\">VkPipelineCreationFeedbackEXT</a> structures"
+        },
+        {
+          "vuid": "VUID-VkPipelineCreationFeedbackCreateInfoEXT-pipelineStageCreationFeedbackCount-arraylength",
+          "text": " <code>pipelineStageCreationFeedbackCount</code> <strong class=\"purple\">must</strong> be greater than <code>0</code>"
+        }
+      ],
+      "(VK_EXT_pipeline_creation_feedback)+(VK_NV_ray_tracing)": [
+        {
+          "vuid": "VUID-VkPipelineCreationFeedbackCreateInfoEXT-pipelineStageCreationFeedbackCount-02670",
+          "text": " When chained to <a href=\"#VkRayTracingPipelineCreateInfoNV\">VkRayTracingPipelineCreateInfoNV</a>, <a href=\"#VkPipelineCreationFeedbackEXT\">VkPipelineCreationFeedbackEXT</a>::<code>pipelineStageCreationFeedbackCount</code> <strong class=\"purple\">must</strong> equal <a href=\"#VkRayTracingPipelineCreateInfoNV\">VkRayTracingPipelineCreateInfoNV</a>::<code>stageCount</code>"
+        }
+      ]
+    },
+    "VkAllocationCallbacks": {
+      "core": [
+        {
+          "vuid": "VUID-VkAllocationCallbacks-pfnAllocation-00632",
+          "text": " <code>pfnAllocation</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid user-defined <a href=\"#PFN_vkAllocationFunction\">PFN_vkAllocationFunction</a>"
+        },
+        {
+          "vuid": "VUID-VkAllocationCallbacks-pfnReallocation-00633",
+          "text": " <code>pfnReallocation</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid user-defined <a href=\"#PFN_vkReallocationFunction\">PFN_vkReallocationFunction</a>"
+        },
+        {
+          "vuid": "VUID-VkAllocationCallbacks-pfnFree-00634",
+          "text": " <code>pfnFree</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid user-defined <a href=\"#PFN_vkFreeFunction\">PFN_vkFreeFunction</a>"
+        },
+        {
+          "vuid": "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635",
+          "text": " If either of <code>pfnInternalAllocation</code> or <code>pfnInternalFree</code> is not <code>NULL</code>, both <strong class=\"purple\">must</strong> be valid callbacks"
+        }
+      ]
+    },
+    "vkGetPhysicalDeviceMemoryProperties": {
+      "core": [
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceMemoryProperties-physicalDevice-parameter",
+          "text": " <code>physicalDevice</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkPhysicalDevice\">VkPhysicalDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceMemoryProperties-pMemoryProperties-parameter",
+          "text": " <code>pMemoryProperties</code> <strong class=\"purple\">must</strong> be a valid pointer to a <a href=\"#VkPhysicalDeviceMemoryProperties\">VkPhysicalDeviceMemoryProperties</a> structure"
+        }
+      ]
+    },
+    "vkGetPhysicalDeviceMemoryProperties2": {
+      "(VK_VERSION_1_1,VK_KHR_get_physical_device_properties2)": [
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceMemoryProperties2-physicalDevice-parameter",
+          "text": " <code>physicalDevice</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkPhysicalDevice\">VkPhysicalDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceMemoryProperties2-pMemoryProperties-parameter",
+          "text": " <code>pMemoryProperties</code> <strong class=\"purple\">must</strong> be a valid pointer to a <a href=\"#VkPhysicalDeviceMemoryProperties2\">VkPhysicalDeviceMemoryProperties2</a> structure"
+        }
+      ]
+    },
+    "VkPhysicalDeviceMemoryProperties2": {
+      "(VK_VERSION_1_1,VK_KHR_get_physical_device_properties2)": [
+        {
+          "vuid": "VUID-VkPhysicalDeviceMemoryProperties2-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2</code>"
+        },
+        {
+          "vuid": "VUID-VkPhysicalDeviceMemoryProperties2-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code> or a pointer to a valid instance of <a href=\"#VkPhysicalDeviceMemoryBudgetPropertiesEXT\">VkPhysicalDeviceMemoryBudgetPropertiesEXT</a>"
+        }
+      ]
+    },
+    "VkPhysicalDeviceMemoryBudgetPropertiesEXT": {
+      "(VK_EXT_memory_budget)": [
+        {
+          "vuid": "VUID-VkPhysicalDeviceMemoryBudgetPropertiesEXT-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT</code>"
+        }
+      ]
+    },
+    "vkAllocateMemory": {
+      "core": [
+        {
+          "vuid": "VUID-vkAllocateMemory-pAllocateInfo-01713",
+          "text": " <code>pAllocateInfo</code>-&gt;allocationSize <strong class=\"purple\">must</strong> be less than or equal to <a href=\"#VkPhysicalDeviceMemoryProperties\">VkPhysicalDeviceMemoryProperties</a>::<code>memoryHeaps</code>[memindex].size where <code>memindex</code> = <a href=\"#VkPhysicalDeviceMemoryProperties\">VkPhysicalDeviceMemoryProperties</a>::<code>memoryTypes</code>[pAllocateInfo-&gt;memoryTypeIndex].heapIndex as returned by <a href=\"#vkGetPhysicalDeviceMemoryProperties\">vkGetPhysicalDeviceMemoryProperties</a> for the <a href=\"#VkPhysicalDevice\">VkPhysicalDevice</a> that <code>device</code> was created from."
+        },
+        {
+          "vuid": "VUID-vkAllocateMemory-pAllocateInfo-01714",
+          "text": " <code>pAllocateInfo</code>-&gt;memoryTypeIndex <strong class=\"purple\">must</strong> be less than <a href=\"#VkPhysicalDeviceMemoryProperties\">VkPhysicalDeviceMemoryProperties</a>::<code>memoryTypeCount</code> as returned by <a href=\"#vkGetPhysicalDeviceMemoryProperties\">vkGetPhysicalDeviceMemoryProperties</a> for the <a href=\"#VkPhysicalDevice\">VkPhysicalDevice</a> that <code>device</code> was created from."
+        },
+        {
+          "vuid": "VUID-vkAllocateMemory-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkAllocateMemory-pAllocateInfo-parameter",
+          "text": " <code>pAllocateInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkMemoryAllocateInfo\">VkMemoryAllocateInfo</a> structure"
+        },
+        {
+          "vuid": "VUID-vkAllocateMemory-pAllocator-parameter",
+          "text": " If <code>pAllocator</code> is not <code>NULL</code>, <code>pAllocator</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkAllocationCallbacks\">VkAllocationCallbacks</a> structure"
+        },
+        {
+          "vuid": "VUID-vkAllocateMemory-pMemory-parameter",
+          "text": " <code>pMemory</code> <strong class=\"purple\">must</strong> be a valid pointer to a <a href=\"#VkDeviceMemory\">VkDeviceMemory</a> handle"
+        }
+      ],
+      "(VK_AMD_device_coherent_memory)": [
+        {
+          "vuid": "VUID-vkAllocateMemory-deviceCoherentMemory-02790",
+          "text": " If the <a href=\"#features-deviceCoherentMemory\"><code>deviceCoherentMemory</code></a> feature is not enabled, <code>pAllocateInfo</code>-&gt;memoryTypeIndex <strong class=\"purple\">must</strong> not identify a memory type supporting <code>VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD</code>"
+        }
+      ]
+    },
+    "VkMemoryAllocateInfo": {
+      "!(VK_ANDROID_external_memory_android_hardware_buffer)": [
+        {
+          "vuid": "VUID-VkMemoryAllocateInfo-allocationSize-00638",
+          "text": " <code>allocationSize</code> <strong class=\"purple\">must</strong> be greater than <code>0</code>"
+        }
+      ],
+      "(VK_KHR_external_memory)+(VK_KHR_dedicated_allocation,VK_NV_dedicated_allocation)": [
+        {
+          "vuid": "VUID-VkMemoryAllocateInfo-pNext-00639",
+          "text": "     If the <code>pNext</code> chain includes a <code>VkExportMemoryAllocateInfo</code>     structure, and any of the handle types specified in     <code>VkExportMemoryAllocateInfo</code>::<code>handleTypes</code> require a dedicated     allocation, as reported by     <a href=\"#vkGetPhysicalDeviceImageFormatProperties2\">vkGetPhysicalDeviceImageFormatProperties2</a> in     <code>VkExternalImageFormatProperties</code>::<code>externalMemoryProperties.externalMemoryFeatures</code>     or     <code>VkExternalBufferProperties</code>::<code>externalMemoryProperties.externalMemoryFeatures</code>,     the <code>pNext</code> chain must include a ifdef::VK_KHR_dedicated_allocation[<a href=\"#VkMemoryDedicatedAllocateInfo\">VkMemoryDedicatedAllocateInfo</a>]"
+        }
+      ],
+      "(VK_KHR_external_memory)+(VK_NV_external_memory)": [
+        {
+          "vuid": "VUID-VkMemoryAllocateInfo-pNext-00640",
+          "text": " If the <code>pNext</code> chain includes a <a href=\"#VkExportMemoryAllocateInfo\">VkExportMemoryAllocateInfo</a> structure, it <strong class=\"purple\">must</strong> not include a <a href=\"#VkExportMemoryAllocateInfoNV\">VkExportMemoryAllocateInfoNV</a> or <a href=\"#VkExportMemoryWin32HandleInfoNV\">VkExportMemoryWin32HandleInfoNV</a> structure."
+        }
+      ],
+      "(VK_KHR_external_memory_win32+VK_NV_external_memory_win32)": [
+        {
+          "vuid": "VUID-VkMemoryAllocateInfo-pNext-00641",
+          "text": " If the <code>pNext</code> chain includes a <a href=\"#VkImportMemoryWin32HandleInfoKHR\">VkImportMemoryWin32HandleInfoKHR</a> structure, it <strong class=\"purple\">must</strong> not include a <a href=\"#VkImportMemoryWin32HandleInfoNV\">VkImportMemoryWin32HandleInfoNV</a> structure."
+        }
+      ],
+      "(VK_KHR_external_memory_fd)": [
+        {
+          "vuid": "VUID-VkMemoryAllocateInfo-allocationSize-01742",
+          "text": " If the parameters define an import operation, the external handle specified was created by the Vulkan API, and the external handle type is <code>VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT_KHR</code>, then the values of <code>allocationSize</code> and <code>memoryTypeIndex</code> <strong class=\"purple\">must</strong> match those specified when the memory object being imported was created."
+        },
+        {
+          "vuid": "VUID-VkMemoryAllocateInfo-memoryTypeIndex-00648",
+          "text": " If the parameters define an import operation and the external handle is a POSIX file descriptor created outside of the Vulkan API, the value of <code>memoryTypeIndex</code> <strong class=\"purple\">must</strong> be one of those returned by <a href=\"#vkGetMemoryFdPropertiesKHR\">vkGetMemoryFdPropertiesKHR</a>."
+        }
+      ],
+      "(VK_KHR_external_memory+VK_KHR_device_group)": [
+        {
+          "vuid": "VUID-VkMemoryAllocateInfo-None-00643",
+          "text": " If the parameters define an import operation and the external handle specified was created by the Vulkan API, the device mask specified by <a href=\"#VkMemoryAllocateFlagsInfo\">VkMemoryAllocateFlagsInfo</a> <strong class=\"purple\">must</strong> match that specified when the memory object being imported was allocated."
+        },
+        {
+          "vuid": "VUID-VkMemoryAllocateInfo-None-00644",
+          "text": " If the parameters define an import operation and the external handle specified was created by the Vulkan API, the list of physical devices that comprise the logical device passed to <a href=\"#vkAllocateMemory\">vkAllocateMemory</a> <strong class=\"purple\">must</strong> match the list of physical devices that comprise the logical device on which the memory was originally allocated."
+        }
+      ],
+      "(VK_KHR_external_memory_win32)": [
+        {
+          "vuid": "VUID-VkMemoryAllocateInfo-memoryTypeIndex-00645",
+          "text": " If the parameters define an import operation and the external handle is an NT handle or a global share handle created outside of the Vulkan API, the value of <code>memoryTypeIndex</code> <strong class=\"purple\">must</strong> be one of those returned by <a href=\"#vkGetMemoryWin32HandlePropertiesKHR\">vkGetMemoryWin32HandlePropertiesKHR</a>."
+        },
+        {
+          "vuid": "VUID-VkMemoryAllocateInfo-allocationSize-01743",
+          "text": " If the parameters define an import operation, the external handle was created by the Vulkan API, and the external handle type is <code>VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT_KHR</code> or <code>VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_KHR</code>, then the values of <code>allocationSize</code> and <code>memoryTypeIndex</code> <strong class=\"purple\">must</strong> match those specified when the memory object being imported was created."
+        },
+        {
+          "vuid": "VUID-VkMemoryAllocateInfo-allocationSize-00646",
+          "text": " If the parameters define an import operation and the external handle type is <code>VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT</code>, <code>VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_KMT_BIT</code>, or <code>VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT</code>, <code>allocationSize</code> <strong class=\"purple\">must</strong> match the size reported in the memory requirements of the <code>image</code> or <code>buffer</code> member of the <code>VkDedicatedAllocationMemoryAllocateInfoNV</code> structure included in the <code>pNext</code> chain."
+        },
+        {
+          "vuid": "VUID-VkMemoryAllocateInfo-allocationSize-00647",
+          "text": " If the parameters define an import operation and the external handle type is <code>VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP_BIT</code>, <code>allocationSize</code> <strong class=\"purple\">must</strong> match the size specified when creating the Direct3D 12 heap from which the external handle was extracted."
+        }
+      ],
+      "(VK_VERSION_1_1)": [
+        {
+          "vuid": "VUID-VkMemoryAllocateInfo-memoryTypeIndex-01872",
+          "text": " If the protected memory feature is not enabled, the <code>VkMemoryAllocateInfo</code>::<code>memoryTypeIndex</code> <strong class=\"purple\">must</strong> not indicate a memory type that reports <code>VK_MEMORY_PROPERTY_PROTECTED_BIT</code>."
+        }
+      ],
+      "(VK_EXT_external_memory_host)": [
+        {
+          "vuid": "VUID-VkMemoryAllocateInfo-memoryTypeIndex-01744",
+          "text": " If the parameters define an import operation and the external handle is a host pointer, the value of <code>memoryTypeIndex</code> <strong class=\"purple\">must</strong> be one of those returned by <a href=\"#vkGetMemoryHostPointerPropertiesEXT\">vkGetMemoryHostPointerPropertiesEXT</a>"
+        },
+        {
+          "vuid": "VUID-VkMemoryAllocateInfo-allocationSize-01745",
+          "text": " If the parameters define an import operation and the external handle is a host pointer, <code>allocationSize</code> <strong class=\"purple\">must</strong> be an integer multiple of <code>VkPhysicalDeviceExternalMemoryHostPropertiesEXT</code>::<code>minImportedHostPointerAlignment</code>"
+        }
+      ],
+      "(VK_EXT_external_memory_host)+(VK_NV_dedicated_allocation)": [
+        {
+          "vuid": "VUID-VkMemoryAllocateInfo-pNext-02805",
+          "text": " If the parameters define an import operation and the external handle is a host pointer, the <code>pNext</code> chain <strong class=\"purple\">must</strong> not include a <a href=\"#VkDedicatedAllocationMemoryAllocateInfoNV\">VkDedicatedAllocationMemoryAllocateInfoNV</a> structure with either its <code>image</code> or <code>buffer</code> field set to a value other than <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>."
+        }
+      ],
+      "(VK_EXT_external_memory_host)+(VK_KHR_dedicated_allocation)": [
+        {
+          "vuid": "VUID-VkMemoryAllocateInfo-pNext-02806",
+          "text": " If the parameters define an import operation and the external handle is a host pointer, the <code>pNext</code> chain <strong class=\"purple\">must</strong> not include a <a href=\"#VkMemoryDedicatedAllocateInfo\">VkMemoryDedicatedAllocateInfo</a> structure with either its <code>image</code> or <code>buffer</code> field set to a value other than <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>."
+        }
+      ],
+      "(VK_ANDROID_external_memory_android_hardware_buffer)": [
+        {
+          "vuid": "VUID-VkMemoryAllocateInfo-allocationSize-02383",
+          "text": " If the parameters define an import operation and the external handle type is <code>VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID</code>, <code>allocationSize</code> <strong class=\"purple\">must</strong> be the size returned by <a href=\"#vkGetAndroidHardwareBufferPropertiesANDROID\">vkGetAndroidHardwareBufferPropertiesANDROID</a> for the Android hardware buffer."
+        },
+        {
+          "vuid": "VUID-VkMemoryAllocateInfo-pNext-02384",
+          "text": " If the parameters define an import operation and the external handle type is <code>VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID</code>, and the <code>pNext</code> chain does not include a <a href=\"#VkMemoryDedicatedAllocateInfo\">VkMemoryDedicatedAllocateInfo</a> structure or <a href=\"#VkMemoryDedicatedAllocateInfo\">VkMemoryDedicatedAllocateInfo</a>::<code>image</code> is <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>, the Android hardware buffer <strong class=\"purple\">must</strong> have a <code>AHardwareBuffer_Desc</code>::<code>format</code> of <code>AHARDWAREBUFFER_FORMAT_BLOB</code> and a <code>AHardwareBuffer_Desc</code>::<code>usage</code> that includes <code>AHARDWAREBUFFER_USAGE_GPU_DATA_BUFFER</code>."
+        },
+        {
+          "vuid": "VUID-VkMemoryAllocateInfo-memoryTypeIndex-02385",
+          "text": " If the parameters define an import operation and the external handle type is <code>VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID</code>, <code>memoryTypeIndex</code> <strong class=\"purple\">must</strong> be one of those returned by <a href=\"#vkGetAndroidHardwareBufferPropertiesANDROID\">vkGetAndroidHardwareBufferPropertiesANDROID</a> for the Android hardware buffer."
+        },
+        {
+          "vuid": "VUID-VkMemoryAllocateInfo-pNext-01874",
+          "text": " If the parameters do not define an import operation, and the <code>pNext</code> chain includes a <code>VkExportMemoryAllocateInfo</code> structure with <code>VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID</code> included in its <code>handleTypes</code> member, and the <code>pNext</code> chain includes a <a href=\"#VkMemoryDedicatedAllocateInfo\">VkMemoryDedicatedAllocateInfo</a> structure with <code>image</code> not equal to <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>, then <code>allocationSize</code> <strong class=\"purple\">must</strong> be <code>0</code>, otherwise <code>allocationSize</code> <strong class=\"purple\">must</strong> be greater than <code>0</code>."
+        },
+        {
+          "vuid": "VUID-VkMemoryAllocateInfo-pNext-02386",
+          "text": " If the parameters define an import operation, the external handle is an Android hardware buffer, and the <code>pNext</code> chain includes a <a href=\"#VkMemoryDedicatedAllocateInfo\">VkMemoryDedicatedAllocateInfo</a> with <code>image</code> that is not <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>, the Android hardware buffer&#8217;s <a href=\"#AHardwareBuffer\">AHardwareBuffer</a>::<code>usage</code> <strong class=\"purple\">must</strong> include at least one of <code>AHARDWAREBUFFER_USAGE_GPU_COLOR_OUTPUT</code> or <code>AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE</code>."
+        },
+        {
+          "vuid": "VUID-VkMemoryAllocateInfo-pNext-02387",
+          "text": " If the parameters define an import operation, the external handle is an Android hardware buffer, and the <code>pNext</code> chain includes a <a href=\"#VkMemoryDedicatedAllocateInfo\">VkMemoryDedicatedAllocateInfo</a> with <code>image</code> that is not <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>, the format of <code>image</code> <strong class=\"purple\">must</strong> be <code>VK_FORMAT_UNDEFINED</code> or the format returned by <a href=\"#vkGetAndroidHardwareBufferPropertiesANDROID\">vkGetAndroidHardwareBufferPropertiesANDROID</a> in <a href=\"#VkAndroidHardwareBufferFormatPropertiesANDROID\">VkAndroidHardwareBufferFormatPropertiesANDROID</a>::<code>format</code> for the Android hardware buffer."
+        },
+        {
+          "vuid": "VUID-VkMemoryAllocateInfo-pNext-02388",
+          "text": " If the parameters define an import operation, the external handle is an Android hardware buffer, and the <code>pNext</code> chain includes a <a href=\"#VkMemoryDedicatedAllocateInfo\">VkMemoryDedicatedAllocateInfo</a> structure with <code>image</code> that is not <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>, the width, height, and array layer dimensions of <code>image</code> and the Android hardware buffer&#8217;s <code>AHardwareBuffer_Desc</code> <strong class=\"purple\">must</strong> be identical."
+        },
+        {
+          "vuid": "VUID-VkMemoryAllocateInfo-pNext-02389",
+          "text": " If the parameters define an import operation, the external handle is an Android hardware buffer, and the <code>pNext</code> chain includes a <a href=\"#VkMemoryDedicatedAllocateInfo\">VkMemoryDedicatedAllocateInfo</a> structure with <code>image</code> that is not <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>, and the Android hardware buffer&#8217;s <a href=\"#AHardwareBuffer\">AHardwareBuffer</a>::<code>usage</code> includes <code>AHARDWAREBUFFER_USAGE_GPU_MIPMAP_COMPLETE</code>, the <code>image</code> <strong class=\"purple\">must</strong> have a complete mipmap chain."
+        },
+        {
+          "vuid": "VUID-VkMemoryAllocateInfo-pNext-02586",
+          "text": " If the parameters define an import operation, the external handle is an Android hardware buffer, and the <code>pNext</code> chain includes a <a href=\"#VkMemoryDedicatedAllocateInfo\">VkMemoryDedicatedAllocateInfo</a> structure with <code>image</code> that is not <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>, and the Android hardware buffer&#8217;s <a href=\"#AHardwareBuffer\">AHardwareBuffer</a>::<code>usage</code> does not include <code>AHARDWAREBUFFER_USAGE_GPU_MIPMAP_COMPLETE</code>, the <code>image</code> <strong class=\"purple\">must</strong> have exactly one mipmap level."
+        },
+        {
+          "vuid": "VUID-VkMemoryAllocateInfo-pNext-02390",
+          "text": " If the parameters define an import operation, the external handle is an Android hardware buffer, and the <code>pNext</code> chain includes a <a href=\"#VkMemoryDedicatedAllocateInfo\">VkMemoryDedicatedAllocateInfo</a> structure with <code>image</code> that is not <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>, each bit set in the usage of <code>image</code> <strong class=\"purple\">must</strong> be listed in <a href=\"#memory-external-android-hardware-buffer-usage\">AHardwareBuffer Usage Equivalence</a>, and if there is a corresponding <code>AHARDWAREBUFFER_USAGE</code> bit listed that bit <strong class=\"purple\">must</strong> be included in the Android hardware buffer&#8217;s <code>AHardwareBuffer_Desc</code>::<code>usage</code>."
+        }
+      ],
+      "(VK_KHR_buffer_device_address)": [
+        {
+          "vuid": "VUID-VkMemoryAllocateInfo-opaqueCaptureAddress-03329",
+          "text": " If <a href=\"#VkMemoryOpaqueCaptureAddressAllocateInfoKHR\">VkMemoryOpaqueCaptureAddressAllocateInfoKHR</a>::<code>opaqueCaptureAddress</code> is not zero, <code>VkMemoryAllocateFlagsInfo</code>::<code>flags</code> <strong class=\"purple\">must</strong> include <code>VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkMemoryAllocateInfo-flags-03330",
+          "text": " If <code>VkMemoryAllocateFlagsInfo</code>::<code>flags</code> includes <code>VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_KHR</code>, the <a href=\"#features-bufferDeviceAddressCaptureReplay\">bufferDeviceAddressCaptureReplay</a> feature <strong class=\"purple\">must</strong> be enabled"
+        },
+        {
+          "vuid": "VUID-VkMemoryAllocateInfo-flags-03331",
+          "text": " If <code>VkMemoryAllocateFlagsInfo</code>::<code>flags</code> includes <code>VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT_KHR</code>, the <a href=\"#features-bufferDeviceAddress\">bufferDeviceAddress</a> feature <strong class=\"purple\">must</strong> be enabled"
+        },
+        {
+          "vuid": "VUID-VkMemoryAllocateInfo-opaqueCaptureAddress-03333",
+          "text": " If the parameters define an import operation, <a href=\"#VkMemoryOpaqueCaptureAddressAllocateInfoKHR\">VkMemoryOpaqueCaptureAddressAllocateInfoKHR</a>::<code>opaqueCaptureAddress</code> <strong class=\"purple\">must</strong> be zero"
+        }
+      ],
+      "(VK_KHR_buffer_device_address)+(VK_EXT_external_memory_host)": [
+        {
+          "vuid": "VUID-VkMemoryAllocateInfo-pNext-03332",
+          "text": " If the <code>pNext</code> chain includes a <code>VkImportMemoryHostPointerInfoEXT</code> structure, <a href=\"#VkMemoryOpaqueCaptureAddressAllocateInfoKHR\">VkMemoryOpaqueCaptureAddressAllocateInfoKHR</a>::<code>opaqueCaptureAddress</code> <strong class=\"purple\">must</strong> be zero"
+        }
+      ],
+      "core": [
+        {
+          "vuid": "VUID-VkMemoryAllocateInfo-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO</code>"
+        },
+        {
+          "vuid": "VUID-VkMemoryAllocateInfo-pNext-pNext",
+          "text": " Each <code>pNext</code> member of any structure (including this one) in the <code>pNext</code> chain <strong class=\"purple\">must</strong> be either <code>NULL</code> or a pointer to a valid instance of <a href=\"#VkDedicatedAllocationMemoryAllocateInfoNV\">VkDedicatedAllocationMemoryAllocateInfoNV</a>, <a href=\"#VkExportMemoryAllocateInfo\">VkExportMemoryAllocateInfo</a>, <a href=\"#VkExportMemoryAllocateInfoNV\">VkExportMemoryAllocateInfoNV</a>, <a href=\"#VkExportMemoryWin32HandleInfoKHR\">VkExportMemoryWin32HandleInfoKHR</a>, <a href=\"#VkExportMemoryWin32HandleInfoNV\">VkExportMemoryWin32HandleInfoNV</a>, <a href=\"#VkImportAndroidHardwareBufferInfoANDROID\">VkImportAndroidHardwareBufferInfoANDROID</a>, <a href=\"#VkImportMemoryFdInfoKHR\">VkImportMemoryFdInfoKHR</a>, <a href=\"#VkImportMemoryHostPointerInfoEXT\">VkImportMemoryHostPointerInfoEXT</a>, <a href=\"#VkImportMemoryWin32HandleInfoKHR\">VkImportMemoryWin32HandleInfoKHR</a>, <a href=\"#VkImportMemoryWin32HandleInfoNV\">VkImportMemoryWin32HandleInfoNV</a>, <a href=\"#VkMemoryAllocateFlagsInfo\">VkMemoryAllocateFlagsInfo</a>, <a href=\"#VkMemoryDedicatedAllocateInfo\">VkMemoryDedicatedAllocateInfo</a>, <a href=\"#VkMemoryOpaqueCaptureAddressAllocateInfoKHR\">VkMemoryOpaqueCaptureAddressAllocateInfoKHR</a>, or <a href=\"#VkMemoryPriorityAllocateInfoEXT\">VkMemoryPriorityAllocateInfoEXT</a>"
+        },
+        {
+          "vuid": "VUID-VkMemoryAllocateInfo-sType-unique",
+          "text": " Each <code>sType</code> member in the <code>pNext</code> chain <strong class=\"purple\">must</strong> be unique"
+        }
+      ]
+    },
+    "VkMemoryDedicatedAllocateInfo": {
+      "(VK_VERSION_1_1,VK_KHR_dedicated_allocation)": [
+        {
+          "vuid": "VUID-VkMemoryDedicatedAllocateInfo-image-01432",
+          "text": " At least one of <code>image</code> and <code>buffer</code> <strong class=\"purple\">must</strong> be <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>"
+        },
+        {
+          "vuid": "VUID-VkMemoryDedicatedAllocateInfo-image-01433",
+          "text": " If <code>image</code> is not <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>, <code>VkMemoryAllocateInfo</code>::<code>allocationSize</code> <strong class=\"purple\">must</strong> equal the <code>VkMemoryRequirements</code>::<code>size</code> of the image"
+        },
+        {
+          "vuid": "VUID-VkMemoryDedicatedAllocateInfo-image-01434",
+          "text": " If <code>image</code> is not <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>, <code>image</code> <strong class=\"purple\">must</strong> have been created without <code>VK_IMAGE_CREATE_SPARSE_BINDING_BIT</code> set in <code>VkImageCreateInfo</code>::<code>flags</code>"
+        },
+        {
+          "vuid": "VUID-VkMemoryDedicatedAllocateInfo-buffer-01435",
+          "text": " If <code>buffer</code> is not <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>, <code>VkMemoryAllocateInfo</code>::<code>allocationSize</code> <strong class=\"purple\">must</strong> equal the <code>VkMemoryRequirements</code>::<code>size</code> of the buffer"
+        },
+        {
+          "vuid": "VUID-VkMemoryDedicatedAllocateInfo-buffer-01436",
+          "text": " If <code>buffer</code> is not <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>, <code>buffer</code> <strong class=\"purple\">must</strong> have been created without <code>VK_BUFFER_CREATE_SPARSE_BINDING_BIT</code> set in <a href=\"#VkBufferCreateInfo\">VkBufferCreateInfo</a>::<code>flags</code>"
+        },
+        {
+          "vuid": "VUID-VkMemoryDedicatedAllocateInfo-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO</code>"
+        },
+        {
+          "vuid": "VUID-VkMemoryDedicatedAllocateInfo-image-parameter",
+          "text": " If <code>image</code> is not <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>, <code>image</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkImage\">VkImage</a> handle"
+        },
+        {
+          "vuid": "VUID-VkMemoryDedicatedAllocateInfo-buffer-parameter",
+          "text": " If <code>buffer</code> is not <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>, <code>buffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkBuffer\">VkBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-VkMemoryDedicatedAllocateInfo-commonparent",
+          "text": " Both of <code>buffer</code>, and <code>image</code> that are valid handles of non-ignored parameters <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from the same <a href=\"#VkDevice\">VkDevice</a>"
+        }
+      ],
+      "(VK_VERSION_1_1,VK_KHR_dedicated_allocation)+(VK_KHR_external_memory_win32)": [
+        {
+          "vuid": "VUID-VkMemoryDedicatedAllocateInfo-image-01876",
+          "text": " If <code>image</code> is not <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a> and <a href=\"#VkMemoryAllocateInfo\">VkMemoryAllocateInfo</a> defines a memory import operation with handle type <code>VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT</code>, <code>VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT</code>, <code>VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT</code>, <code>VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_KMT_BIT</code>, <code>VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP_BIT</code>, or <code>VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT</code>, and the external handle was created by the Vulkan API, then the memory being imported <strong class=\"purple\">must</strong> also be a dedicated image allocation and <code>image</code> must be identical to the image associated with the imported memory."
+        },
+        {
+          "vuid": "VUID-VkMemoryDedicatedAllocateInfo-buffer-01877",
+          "text": " If <code>buffer</code> is not <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a> and <a href=\"#VkMemoryAllocateInfo\">VkMemoryAllocateInfo</a> defines a memory import operation with handle type <code>VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT</code>, <code>VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT</code>, <code>VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT</code>, <code>VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_KMT_BIT</code>, <code>VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP_BIT</code>, or <code>VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT</code>, and the external handle was created by the Vulkan API, then the memory being imported <strong class=\"purple\">must</strong> also be a dedicated buffer allocation and <code>buffer</code> must be identical to the buffer associated with the imported memory."
+        }
+      ],
+      "(VK_VERSION_1_1,VK_KHR_dedicated_allocation)+(VK_KHR_external_memory_fd)": [
+        {
+          "vuid": "VUID-VkMemoryDedicatedAllocateInfo-image-01878",
+          "text": " If <code>image</code> is not <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a> and <a href=\"#VkMemoryAllocateInfo\">VkMemoryAllocateInfo</a> defines a memory import operation with handle type <code>VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT</code>, the memory being imported <strong class=\"purple\">must</strong> also be a dedicated image allocation and <code>image</code> must be identical to the image associated with the imported memory."
+        },
+        {
+          "vuid": "VUID-VkMemoryDedicatedAllocateInfo-buffer-01879",
+          "text": " If <code>buffer</code> is not <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a> and <a href=\"#VkMemoryAllocateInfo\">VkMemoryAllocateInfo</a> defines a memory import operation with handle type <code>VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT</code>, the memory being imported <strong class=\"purple\">must</strong> also be a dedicated buffer allocation and <code>buffer</code> must be identical to the buffer associated with the imported memory."
+        }
+      ],
+      "(VK_VERSION_1_1,VK_KHR_dedicated_allocation)+(VK_KHR_sampler_ycbcr_conversion)": [
+        {
+          "vuid": "VUID-VkMemoryDedicatedAllocateInfo-image-01797",
+          "text": " If <code>image</code> is not <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>, <code>image</code> <strong class=\"purple\">must</strong> not have been created with <code>VK_IMAGE_CREATE_DISJOINT_BIT</code> set in <a href=\"#VkImageCreateInfo\">VkImageCreateInfo</a>::<code>flags</code>"
+        }
+      ]
+    },
+    "VkDedicatedAllocationMemoryAllocateInfoNV": {
+      "(VK_NV_dedicated_allocation)": [
+        {
+          "vuid": "VUID-VkDedicatedAllocationMemoryAllocateInfoNV-image-00649",
+          "text": " At least one of <code>image</code> and <code>buffer</code> <strong class=\"purple\">must</strong> be <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>"
+        },
+        {
+          "vuid": "VUID-VkDedicatedAllocationMemoryAllocateInfoNV-image-00650",
+          "text": " If <code>image</code> is not <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>, the image <strong class=\"purple\">must</strong> have been created with <code>VkDedicatedAllocationImageCreateInfoNV</code>::<code>dedicatedAllocation</code> equal to <code>VK_TRUE</code>"
+        },
+        {
+          "vuid": "VUID-VkDedicatedAllocationMemoryAllocateInfoNV-buffer-00651",
+          "text": " If <code>buffer</code> is not <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>, the buffer <strong class=\"purple\">must</strong> have been created with <code>VkDedicatedAllocationBufferCreateInfoNV</code>::<code>dedicatedAllocation</code> equal to <code>VK_TRUE</code>"
+        },
+        {
+          "vuid": "VUID-VkDedicatedAllocationMemoryAllocateInfoNV-image-00652",
+          "text": " If <code>image</code> is not <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>, <code>VkMemoryAllocateInfo</code>::<code>allocationSize</code> <strong class=\"purple\">must</strong> equal the <code>VkMemoryRequirements</code>::<code>size</code> of the image"
+        },
+        {
+          "vuid": "VUID-VkDedicatedAllocationMemoryAllocateInfoNV-buffer-00653",
+          "text": " If <code>buffer</code> is not <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>, <code>VkMemoryAllocateInfo</code>::<code>allocationSize</code> <strong class=\"purple\">must</strong> equal the <code>VkMemoryRequirements</code>::<code>size</code> of the buffer"
+        },
+        {
+          "vuid": "VUID-VkDedicatedAllocationMemoryAllocateInfoNV-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_MEMORY_ALLOCATE_INFO_NV</code>"
+        },
+        {
+          "vuid": "VUID-VkDedicatedAllocationMemoryAllocateInfoNV-image-parameter",
+          "text": " If <code>image</code> is not <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>, <code>image</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkImage\">VkImage</a> handle"
+        },
+        {
+          "vuid": "VUID-VkDedicatedAllocationMemoryAllocateInfoNV-buffer-parameter",
+          "text": " If <code>buffer</code> is not <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>, <code>buffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkBuffer\">VkBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-VkDedicatedAllocationMemoryAllocateInfoNV-commonparent",
+          "text": " Both of <code>buffer</code>, and <code>image</code> that are valid handles of non-ignored parameters <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from the same <a href=\"#VkDevice\">VkDevice</a>"
+        }
+      ],
+      "(VK_NV_dedicated_allocation)+(VK_KHR_external_memory_win32,VK_KHR_external_memory_fd)": [
+        {
+          "vuid": "VUID-VkDedicatedAllocationMemoryAllocateInfoNV-image-00654",
+          "text": " If <code>image</code> is not <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a> and <a href=\"#VkMemoryAllocateInfo\">VkMemoryAllocateInfo</a> defines a memory import operation, the memory being imported <strong class=\"purple\">must</strong> also be a dedicated image allocation and <code>image</code> <strong class=\"purple\">must</strong> be identical to the image associated with the imported memory."
+        },
+        {
+          "vuid": "VUID-VkDedicatedAllocationMemoryAllocateInfoNV-buffer-00655",
+          "text": " If <code>buffer</code> is not <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a> and <a href=\"#VkMemoryAllocateInfo\">VkMemoryAllocateInfo</a> defines a memory import operation, the memory being imported <strong class=\"purple\">must</strong> also be a dedicated buffer allocation and <code>buffer</code> <strong class=\"purple\">must</strong> be identical to the buffer associated with the imported memory."
+        }
+      ]
+    },
+    "VkMemoryPriorityAllocateInfoEXT": {
+      "(VK_EXT_memory_priority)": [
+        {
+          "vuid": "VUID-VkMemoryPriorityAllocateInfoEXT-priority-02602",
+          "text": " <code>priority</code> <strong class=\"purple\">must</strong> be between <code>0</code> and <code>1</code>, inclusive"
+        },
+        {
+          "vuid": "VUID-VkMemoryPriorityAllocateInfoEXT-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_MEMORY_PRIORITY_ALLOCATE_INFO_EXT</code>"
+        }
+      ]
+    },
+    "VkExportMemoryAllocateInfo": {
+      "(VK_VERSION_1_1,VK_KHR_external_memory)": [
+        {
+          "vuid": "VUID-VkExportMemoryAllocateInfo-handleTypes-00656",
+          "text": " The bits in <code>handleTypes</code> <strong class=\"purple\">must</strong> be supported and compatible, as reported by <a href=\"#VkExternalImageFormatProperties\">VkExternalImageFormatProperties</a> or <a href=\"#VkExternalBufferProperties\">VkExternalBufferProperties</a>."
+        },
+        {
+          "vuid": "VUID-VkExportMemoryAllocateInfo-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO</code>"
+        },
+        {
+          "vuid": "VUID-VkExportMemoryAllocateInfo-handleTypes-parameter",
+          "text": " <code>handleTypes</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkExternalMemoryHandleTypeFlagBits\">VkExternalMemoryHandleTypeFlagBits</a> values"
+        }
+      ]
+    },
+    "VkExportMemoryWin32HandleInfoKHR": {
+      "(VK_KHR_external_memory_win32)": [
+        {
+          "vuid": "VUID-VkExportMemoryWin32HandleInfoKHR-handleTypes-00657",
+          "text": " If <a href=\"#VkExportMemoryAllocateInfo\">VkExportMemoryAllocateInfo</a>::<code>handleTypes</code> does not include <code>VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT</code>, <code>VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT</code>, <code>VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP_BIT</code>, or <code>VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT</code>, a <code>VkExportMemoryWin32HandleInfoKHR</code> structure <strong class=\"purple\">must</strong> not be included in the <code>pNext</code> chain of <a href=\"#VkMemoryAllocateInfo\">VkMemoryAllocateInfo</a>."
+        },
+        {
+          "vuid": "VUID-VkExportMemoryWin32HandleInfoKHR-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkExportMemoryWin32HandleInfoKHR-pAttributes-parameter",
+          "text": " If <code>pAttributes</code> is not <code>NULL</code>, <code>pAttributes</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <code>SECURITY_ATTRIBUTES</code> value"
+        }
+      ]
+    },
+    "VkImportMemoryWin32HandleInfoKHR": {
+      "(VK_KHR_external_memory_win32)": [
+        {
+          "vuid": "VUID-VkImportMemoryWin32HandleInfoKHR-handleType-00658",
+          "text": " If <code>handleType</code> is not <code>0</code>, it <strong class=\"purple\">must</strong> be supported for import, as reported by <a href=\"#VkExternalImageFormatProperties\">VkExternalImageFormatProperties</a> or <a href=\"#VkExternalBufferProperties\">VkExternalBufferProperties</a>."
+        },
+        {
+          "vuid": "VUID-VkImportMemoryWin32HandleInfoKHR-handle-00659",
+          "text": " The memory from which <code>handle</code> was exported, or the memory named by <code>name</code> <strong class=\"purple\">must</strong> have been created on the same underlying physical device as <code>device</code>."
+        },
+        {
+          "vuid": "VUID-VkImportMemoryWin32HandleInfoKHR-handleType-00660",
+          "text": " If <code>handleType</code> is not <code>0</code>, it <strong class=\"purple\">must</strong> be defined as an NT handle or a global share handle."
+        },
+        {
+          "vuid": "VUID-VkImportMemoryWin32HandleInfoKHR-handleType-01439",
+          "text": " If <code>handleType</code> is not <code>VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT</code>, <code>VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT</code>, <code>VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP_BIT</code>, or <code>VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT</code>, <code>name</code> <strong class=\"purple\">must</strong> be <code>NULL</code>."
+        },
+        {
+          "vuid": "VUID-VkImportMemoryWin32HandleInfoKHR-handleType-01440",
+          "text": " If <code>handleType</code> is not <code>0</code> and <code>handle</code> is <code>NULL</code>, <code>name</code> <strong class=\"purple\">must</strong> name a valid memory resource of the type specified by <code>handleType</code>."
+        },
+        {
+          "vuid": "VUID-VkImportMemoryWin32HandleInfoKHR-handleType-00661",
+          "text": " If <code>handleType</code> is not <code>0</code> and <code>name</code> is <code>NULL</code>, <code>handle</code> <strong class=\"purple\">must</strong> be a valid handle of the type specified by <code>handleType</code>."
+        },
+        {
+          "vuid": "VUID-VkImportMemoryWin32HandleInfoKHR-handle-01441",
+          "text": " if <code>handle</code> is not <code>NULL</code>, <code>name</code> must be <code>NULL</code>."
+        },
+        {
+          "vuid": "VUID-VkImportMemoryWin32HandleInfoKHR-handle-01518",
+          "text": " If <code>handle</code> is not <code>NULL</code>, it <strong class=\"purple\">must</strong> obey any requirements listed for <code>handleType</code> in <a href=\"#external-memory-handle-types-compatibility\">external memory handle types compatibility</a>."
+        },
+        {
+          "vuid": "VUID-VkImportMemoryWin32HandleInfoKHR-name-01519",
+          "text": " If <code>name</code> is not <code>NULL</code>, it <strong class=\"purple\">must</strong> obey any requirements listed for <code>handleType</code> in <a href=\"#external-memory-handle-types-compatibility\">external memory handle types compatibility</a>."
+        },
+        {
+          "vuid": "VUID-VkImportMemoryWin32HandleInfoKHR-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkImportMemoryWin32HandleInfoKHR-handleType-parameter",
+          "text": " If <code>handleType</code> is not <code>0</code>, <code>handleType</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkExternalMemoryHandleTypeFlagBits\">VkExternalMemoryHandleTypeFlagBits</a> value"
+        }
+      ]
+    },
+    "vkGetMemoryWin32HandleKHR": {
+      "(VK_KHR_external_memory_win32)": [
+        {
+          "vuid": "VUID-vkGetMemoryWin32HandleKHR-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetMemoryWin32HandleKHR-pGetWin32HandleInfo-parameter",
+          "text": " <code>pGetWin32HandleInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkMemoryGetWin32HandleInfoKHR\">VkMemoryGetWin32HandleInfoKHR</a> structure"
+        },
+        {
+          "vuid": "VUID-vkGetMemoryWin32HandleKHR-pHandle-parameter",
+          "text": " <code>pHandle</code> <strong class=\"purple\">must</strong> be a valid pointer to a <code>HANDLE</code> value"
+        }
+      ]
+    },
+    "VkMemoryGetWin32HandleInfoKHR": {
+      "(VK_KHR_external_memory_win32)": [
+        {
+          "vuid": "VUID-VkMemoryGetWin32HandleInfoKHR-handleType-00662",
+          "text": " <code>handleType</code> <strong class=\"purple\">must</strong> have been included in <a href=\"#VkExportMemoryAllocateInfo\">VkExportMemoryAllocateInfo</a>::<code>handleTypes</code> when <code>memory</code> was created."
+        },
+        {
+          "vuid": "VUID-VkMemoryGetWin32HandleInfoKHR-handleType-00663",
+          "text": " If <code>handleType</code> is defined as an NT handle, <a href=\"#vkGetMemoryWin32HandleKHR\">vkGetMemoryWin32HandleKHR</a> <strong class=\"purple\">must</strong> be called no more than once for each valid unique combination of <code>memory</code> and <code>handleType</code>."
+        },
+        {
+          "vuid": "VUID-VkMemoryGetWin32HandleInfoKHR-handleType-00664",
+          "text": " <code>handleType</code> <strong class=\"purple\">must</strong> be defined as an NT handle or a global share handle."
+        },
+        {
+          "vuid": "VUID-VkMemoryGetWin32HandleInfoKHR-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_MEMORY_GET_WIN32_HANDLE_INFO_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkMemoryGetWin32HandleInfoKHR-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-VkMemoryGetWin32HandleInfoKHR-memory-parameter",
+          "text": " <code>memory</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDeviceMemory\">VkDeviceMemory</a> handle"
+        },
+        {
+          "vuid": "VUID-VkMemoryGetWin32HandleInfoKHR-handleType-parameter",
+          "text": " <code>handleType</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkExternalMemoryHandleTypeFlagBits\">VkExternalMemoryHandleTypeFlagBits</a> value"
+        }
+      ]
+    },
+    "vkGetMemoryWin32HandlePropertiesKHR": {
+      "(VK_KHR_external_memory_win32)": [
+        {
+          "vuid": "VUID-vkGetMemoryWin32HandlePropertiesKHR-handle-00665",
+          "text": " <code>handle</code> <strong class=\"purple\">must</strong> be an external memory handle created outside of the Vulkan API."
+        },
+        {
+          "vuid": "VUID-vkGetMemoryWin32HandlePropertiesKHR-handleType-00666",
+          "text": " <code>handleType</code> <strong class=\"purple\">must</strong> not be one of the handle types defined as opaque."
+        },
+        {
+          "vuid": "VUID-vkGetMemoryWin32HandlePropertiesKHR-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetMemoryWin32HandlePropertiesKHR-handleType-parameter",
+          "text": " <code>handleType</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkExternalMemoryHandleTypeFlagBits\">VkExternalMemoryHandleTypeFlagBits</a> value"
+        },
+        {
+          "vuid": "VUID-vkGetMemoryWin32HandlePropertiesKHR-pMemoryWin32HandleProperties-parameter",
+          "text": " <code>pMemoryWin32HandleProperties</code> <strong class=\"purple\">must</strong> be a valid pointer to a <a href=\"#VkMemoryWin32HandlePropertiesKHR\">VkMemoryWin32HandlePropertiesKHR</a> structure"
+        }
+      ]
+    },
+    "VkMemoryWin32HandlePropertiesKHR": {
+      "(VK_KHR_external_memory_win32)": [
+        {
+          "vuid": "VUID-VkMemoryWin32HandlePropertiesKHR-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_MEMORY_WIN32_HANDLE_PROPERTIES_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkMemoryWin32HandlePropertiesKHR-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        }
+      ]
+    },
+    "VkImportMemoryFdInfoKHR": {
+      "(VK_KHR_external_memory_fd)": [
+        {
+          "vuid": "VUID-VkImportMemoryFdInfoKHR-handleType-00667",
+          "text": " If <code>handleType</code> is not <code>0</code>, it <strong class=\"purple\">must</strong> be supported for import, as reported by <a href=\"#VkExternalImageFormatProperties\">VkExternalImageFormatProperties</a> or <a href=\"#VkExternalBufferProperties\">VkExternalBufferProperties</a>."
+        },
+        {
+          "vuid": "VUID-VkImportMemoryFdInfoKHR-fd-00668",
+          "text": " The memory from which <code>fd</code> was exported <strong class=\"purple\">must</strong> have been created on the same underlying physical device as <code>device</code>."
+        },
+        {
+          "vuid": "VUID-VkImportMemoryFdInfoKHR-handleType-00669",
+          "text": " If <code>handleType</code> is not <code>0</code>, it <strong class=\"purple\">must</strong> be defined as a POSIX file descriptor handle."
+        },
+        {
+          "vuid": "VUID-VkImportMemoryFdInfoKHR-handleType-00670",
+          "text": " If <code>handleType</code> is not <code>0</code>, <code>fd</code> <strong class=\"purple\">must</strong> be a valid handle of the type specified by <code>handleType</code>."
+        },
+        {
+          "vuid": "VUID-VkImportMemoryFdInfoKHR-fd-01746",
+          "text": " The memory represented by <code>fd</code> <strong class=\"purple\">must</strong> have been created from a physical device and driver that is compatible with <code>device</code> and <code>handleType</code>, as described in <a href=\"#external-memory-handle-types-compatibility\">External memory handle types compatibility</a>."
+        },
+        {
+          "vuid": "VUID-VkImportMemoryFdInfoKHR-fd-01520",
+          "text": " <code>fd</code> <strong class=\"purple\">must</strong> obey any requirements listed for <code>handleType</code> in <a href=\"#external-memory-handle-types-compatibility\">external memory handle types compatibility</a>."
+        },
+        {
+          "vuid": "VUID-VkImportMemoryFdInfoKHR-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkImportMemoryFdInfoKHR-handleType-parameter",
+          "text": " If <code>handleType</code> is not <code>0</code>, <code>handleType</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkExternalMemoryHandleTypeFlagBits\">VkExternalMemoryHandleTypeFlagBits</a> value"
+        }
+      ]
+    },
+    "vkGetMemoryFdKHR": {
+      "(VK_KHR_external_memory_fd)": [
+        {
+          "vuid": "VUID-vkGetMemoryFdKHR-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetMemoryFdKHR-pGetFdInfo-parameter",
+          "text": " <code>pGetFdInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkMemoryGetFdInfoKHR\">VkMemoryGetFdInfoKHR</a> structure"
+        },
+        {
+          "vuid": "VUID-vkGetMemoryFdKHR-pFd-parameter",
+          "text": " <code>pFd</code> <strong class=\"purple\">must</strong> be a valid pointer to an <code>int</code> value"
+        }
+      ]
+    },
+    "VkMemoryGetFdInfoKHR": {
+      "(VK_KHR_external_memory_fd)": [
+        {
+          "vuid": "VUID-VkMemoryGetFdInfoKHR-handleType-00671",
+          "text": " <code>handleType</code> <strong class=\"purple\">must</strong> have been included in <a href=\"#VkExportMemoryAllocateInfo\">VkExportMemoryAllocateInfo</a>::<code>handleTypes</code> when <code>memory</code> was created."
+        },
+        {
+          "vuid": "VUID-VkMemoryGetFdInfoKHR-handleType-00672",
+          "text": " <code>handleType</code> <strong class=\"purple\">must</strong> be defined as a POSIX file descriptor handle."
+        },
+        {
+          "vuid": "VUID-VkMemoryGetFdInfoKHR-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_MEMORY_GET_FD_INFO_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkMemoryGetFdInfoKHR-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-VkMemoryGetFdInfoKHR-memory-parameter",
+          "text": " <code>memory</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDeviceMemory\">VkDeviceMemory</a> handle"
+        },
+        {
+          "vuid": "VUID-VkMemoryGetFdInfoKHR-handleType-parameter",
+          "text": " <code>handleType</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkExternalMemoryHandleTypeFlagBits\">VkExternalMemoryHandleTypeFlagBits</a> value"
+        }
+      ]
+    },
+    "vkGetMemoryFdPropertiesKHR": {
+      "(VK_KHR_external_memory_fd)": [
+        {
+          "vuid": "VUID-vkGetMemoryFdPropertiesKHR-fd-00673",
+          "text": " <code>fd</code> <strong class=\"purple\">must</strong> be an external memory handle created outside of the Vulkan API."
+        },
+        {
+          "vuid": "VUID-vkGetMemoryFdPropertiesKHR-handleType-00674",
+          "text": " <code>handleType</code> <strong class=\"purple\">must</strong> not be <code>VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT_KHR</code>."
+        },
+        {
+          "vuid": "VUID-vkGetMemoryFdPropertiesKHR-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetMemoryFdPropertiesKHR-handleType-parameter",
+          "text": " <code>handleType</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkExternalMemoryHandleTypeFlagBits\">VkExternalMemoryHandleTypeFlagBits</a> value"
+        },
+        {
+          "vuid": "VUID-vkGetMemoryFdPropertiesKHR-pMemoryFdProperties-parameter",
+          "text": " <code>pMemoryFdProperties</code> <strong class=\"purple\">must</strong> be a valid pointer to a <a href=\"#VkMemoryFdPropertiesKHR\">VkMemoryFdPropertiesKHR</a> structure"
+        }
+      ]
+    },
+    "VkMemoryFdPropertiesKHR": {
+      "(VK_KHR_external_memory_fd)": [
+        {
+          "vuid": "VUID-VkMemoryFdPropertiesKHR-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_MEMORY_FD_PROPERTIES_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkMemoryFdPropertiesKHR-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        }
+      ]
+    },
+    "VkImportMemoryHostPointerInfoEXT": {
+      "(VK_EXT_external_memory_host)": [
+        {
+          "vuid": "VUID-VkImportMemoryHostPointerInfoEXT-handleType-01747",
+          "text": " If <code>handleType</code> is not <code>0</code>, it <strong class=\"purple\">must</strong> be supported for import, as reported in <a href=\"#VkExternalMemoryProperties\">VkExternalMemoryProperties</a>"
+        },
+        {
+          "vuid": "VUID-VkImportMemoryHostPointerInfoEXT-handleType-01748",
+          "text": " If <code>handleType</code> is not <code>0</code>, it <strong class=\"purple\">must</strong> be <code>VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT</code> or <code>VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_MAPPED_FOREIGN_MEMORY_BIT_EXT</code>"
+        },
+        {
+          "vuid": "VUID-VkImportMemoryHostPointerInfoEXT-pHostPointer-01749",
+          "text": " <code>pHostPointer</code> <strong class=\"purple\">must</strong> be a pointer aligned to an integer multiple of <code>VkPhysicalDeviceExternalMemoryHostPropertiesEXT</code>::<code>minImportedHostPointerAlignment</code>"
+        },
+        {
+          "vuid": "VUID-VkImportMemoryHostPointerInfoEXT-handleType-01750",
+          "text": " If <code>handleType</code> is <code>VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT</code>, <code>pHostPointer</code> <strong class=\"purple\">must</strong> be a pointer to <code>allocationSize</code> number of bytes of host memory, where <code>allocationSize</code> is the member of the <code>VkMemoryAllocateInfo</code> structure this structure is chained to"
+        },
+        {
+          "vuid": "VUID-VkImportMemoryHostPointerInfoEXT-handleType-01751",
+          "text": " If <code>handleType</code> is <code>VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_MAPPED_FOREIGN_MEMORY_BIT_EXT</code>, <code>pHostPointer</code> <strong class=\"purple\">must</strong> be a pointer to <code>allocationSize</code> number of bytes of host mapped foreign memory, where <code>allocationSize</code> is the member of the <code>VkMemoryAllocateInfo</code> structure this structure is chained to"
+        },
+        {
+          "vuid": "VUID-VkImportMemoryHostPointerInfoEXT-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_IMPORT_MEMORY_HOST_POINTER_INFO_EXT</code>"
+        },
+        {
+          "vuid": "VUID-VkImportMemoryHostPointerInfoEXT-handleType-parameter",
+          "text": " <code>handleType</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkExternalMemoryHandleTypeFlagBits\">VkExternalMemoryHandleTypeFlagBits</a> value"
+        }
+      ]
+    },
+    "vkGetMemoryHostPointerPropertiesEXT": {
+      "(VK_EXT_external_memory_host)": [
+        {
+          "vuid": "VUID-vkGetMemoryHostPointerPropertiesEXT-handleType-01752",
+          "text": " <code>handleType</code> <strong class=\"purple\">must</strong> be <code>VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT</code> or <code>VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_MAPPED_FOREIGN_MEMORY_BIT_EXT</code>"
+        },
+        {
+          "vuid": "VUID-vkGetMemoryHostPointerPropertiesEXT-pHostPointer-01753",
+          "text": " <code>pHostPointer</code> <strong class=\"purple\">must</strong> be a pointer aligned to an integer multiple of <code>VkPhysicalDeviceExternalMemoryHostPropertiesEXT</code>::<code>minImportedHostPointerAlignment</code>"
+        },
+        {
+          "vuid": "VUID-vkGetMemoryHostPointerPropertiesEXT-handleType-01754",
+          "text": " If <code>handleType</code> is <code>VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT</code>, <code>pHostPointer</code> <strong class=\"purple\">must</strong> be a pointer to host memory"
+        },
+        {
+          "vuid": "VUID-vkGetMemoryHostPointerPropertiesEXT-handleType-01755",
+          "text": " If <code>handleType</code> is <code>VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_MAPPED_FOREIGN_MEMORY_BIT_EXT</code>, <code>pHostPointer</code> <strong class=\"purple\">must</strong> be a pointer to host mapped foreign memory"
+        },
+        {
+          "vuid": "VUID-vkGetMemoryHostPointerPropertiesEXT-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetMemoryHostPointerPropertiesEXT-handleType-parameter",
+          "text": " <code>handleType</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkExternalMemoryHandleTypeFlagBits\">VkExternalMemoryHandleTypeFlagBits</a> value"
+        },
+        {
+          "vuid": "VUID-vkGetMemoryHostPointerPropertiesEXT-pMemoryHostPointerProperties-parameter",
+          "text": " <code>pMemoryHostPointerProperties</code> <strong class=\"purple\">must</strong> be a valid pointer to a <a href=\"#VkMemoryHostPointerPropertiesEXT\">VkMemoryHostPointerPropertiesEXT</a> structure"
+        }
+      ]
+    },
+    "VkMemoryHostPointerPropertiesEXT": {
+      "(VK_EXT_external_memory_host)": [
+        {
+          "vuid": "VUID-VkMemoryHostPointerPropertiesEXT-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_MEMORY_HOST_POINTER_PROPERTIES_EXT</code>"
+        },
+        {
+          "vuid": "VUID-VkMemoryHostPointerPropertiesEXT-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        }
+      ]
+    },
+    "VkImportAndroidHardwareBufferInfoANDROID": {
+      "(VK_ANDROID_external_memory_android_hardware_buffer)": [
+        {
+          "vuid": "VUID-VkImportAndroidHardwareBufferInfoANDROID-buffer-01880",
+          "text": " If <code>buffer</code> is not <code>NULL</code>, Android hardware buffers <strong class=\"purple\">must</strong> be supported for import, as reported by <a href=\"#VkExternalImageFormatProperties\">VkExternalImageFormatProperties</a> or <a href=\"#VkExternalBufferProperties\">VkExternalBufferProperties</a>."
+        },
+        {
+          "vuid": "VUID-VkImportAndroidHardwareBufferInfoANDROID-buffer-01881",
+          "text": " If <code>buffer</code> is not <code>NULL</code>, it <strong class=\"purple\">must</strong> be a valid Android hardware buffer object with <code>AHardwareBuffer_Desc</code>::<code>format</code> and <code>AHardwareBuffer_Desc</code>::<code>usage</code> compatible with Vulkan as described in <a href=\"#memory-external-android-hardware-buffer\">Android Hardware Buffers</a>."
+        },
+        {
+          "vuid": "VUID-VkImportAndroidHardwareBufferInfoANDROID-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID</code>"
+        },
+        {
+          "vuid": "VUID-VkImportAndroidHardwareBufferInfoANDROID-buffer-parameter",
+          "text": " <code>buffer</code> <strong class=\"purple\">must</strong> be a valid pointer to an <code>AHardwareBuffer</code> value"
+        }
+      ]
+    },
+    "vkGetMemoryAndroidHardwareBufferANDROID": {
+      "(VK_ANDROID_external_memory_android_hardware_buffer)": [
+        {
+          "vuid": "VUID-vkGetMemoryAndroidHardwareBufferANDROID-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetMemoryAndroidHardwareBufferANDROID-pInfo-parameter",
+          "text": " <code>pInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkMemoryGetAndroidHardwareBufferInfoANDROID\">VkMemoryGetAndroidHardwareBufferInfoANDROID</a> structure"
+        },
+        {
+          "vuid": "VUID-vkGetMemoryAndroidHardwareBufferANDROID-pBuffer-parameter",
+          "text": " <code>pBuffer</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid pointer to an <code>AHardwareBuffer</code> value"
+        }
+      ]
+    },
+    "VkMemoryGetAndroidHardwareBufferInfoANDROID": {
+      "(VK_ANDROID_external_memory_android_hardware_buffer)": [
+        {
+          "vuid": "VUID-VkMemoryGetAndroidHardwareBufferInfoANDROID-handleTypes-01882",
+          "text": " <code>VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID</code> <strong class=\"purple\">must</strong> have been included in <a href=\"#VkExportMemoryAllocateInfo\">VkExportMemoryAllocateInfo</a>::<code>handleTypes</code> when <code>memory</code> was created."
+        },
+        {
+          "vuid": "VUID-VkMemoryGetAndroidHardwareBufferInfoANDROID-pNext-01883",
+          "text": " If the <code>pNext</code> chain of the <a href=\"#VkMemoryAllocateInfo\">VkMemoryAllocateInfo</a> used to allocate <code>memory</code> included a <a href=\"#VkMemoryDedicatedAllocateInfo\">VkMemoryDedicatedAllocateInfo</a> with non-<code>NULL</code> <code>image</code> member, then that <code>image</code> <strong class=\"purple\">must</strong> already be bound to <code>memory</code>."
+        },
+        {
+          "vuid": "VUID-VkMemoryGetAndroidHardwareBufferInfoANDROID-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_MEMORY_GET_ANDROID_HARDWARE_BUFFER_INFO_ANDROID</code>"
+        },
+        {
+          "vuid": "VUID-VkMemoryGetAndroidHardwareBufferInfoANDROID-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-VkMemoryGetAndroidHardwareBufferInfoANDROID-memory-parameter",
+          "text": " <code>memory</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDeviceMemory\">VkDeviceMemory</a> handle"
+        }
+      ]
+    },
+    "vkGetAndroidHardwareBufferPropertiesANDROID": {
+      "(VK_ANDROID_external_memory_android_hardware_buffer)": [
+        {
+          "vuid": "VUID-vkGetAndroidHardwareBufferPropertiesANDROID-buffer-01884",
+          "text": " <code>buffer</code> <strong class=\"purple\">must</strong> be a valid Android hardware buffer object with at least one of the <code>AHARDWAREBUFFER_USAGE_GPU_</code>* flags in its <code>AHardwareBuffer_Desc</code>::<code>usage</code>"
+        },
+        {
+          "vuid": "VUID-vkGetAndroidHardwareBufferPropertiesANDROID-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetAndroidHardwareBufferPropertiesANDROID-buffer-parameter",
+          "text": " <code>buffer</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <code>AHardwareBuffer</code> value"
+        },
+        {
+          "vuid": "VUID-vkGetAndroidHardwareBufferPropertiesANDROID-pProperties-parameter",
+          "text": " <code>pProperties</code> <strong class=\"purple\">must</strong> be a valid pointer to a <a href=\"#VkAndroidHardwareBufferPropertiesANDROID\">VkAndroidHardwareBufferPropertiesANDROID</a> structure"
+        }
+      ]
+    },
+    "VkAndroidHardwareBufferPropertiesANDROID": {
+      "(VK_ANDROID_external_memory_android_hardware_buffer)": [
+        {
+          "vuid": "VUID-VkAndroidHardwareBufferPropertiesANDROID-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID</code>"
+        },
+        {
+          "vuid": "VUID-VkAndroidHardwareBufferPropertiesANDROID-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code> or a pointer to a valid instance of <a href=\"#VkAndroidHardwareBufferFormatPropertiesANDROID\">VkAndroidHardwareBufferFormatPropertiesANDROID</a>"
+        }
+      ]
+    },
+    "VkAndroidHardwareBufferFormatPropertiesANDROID": {
+      "(VK_ANDROID_external_memory_android_hardware_buffer)": [
+        {
+          "vuid": "VUID-VkAndroidHardwareBufferFormatPropertiesANDROID-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID</code>"
+        }
+      ]
+    },
+    "VkExportMemoryAllocateInfoNV": {
+      "(VK_NV_external_memory)": [
+        {
+          "vuid": "VUID-VkExportMemoryAllocateInfoNV-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_NV</code>"
+        },
+        {
+          "vuid": "VUID-VkExportMemoryAllocateInfoNV-handleTypes-parameter",
+          "text": " <code>handleTypes</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkExternalMemoryHandleTypeFlagBitsNV\">VkExternalMemoryHandleTypeFlagBitsNV</a> values"
+        }
+      ]
+    },
+    "VkExportMemoryWin32HandleInfoNV": {
+      "(VK_NV_external_memory_win32)": [
+        {
+          "vuid": "VUID-VkExportMemoryWin32HandleInfoNV-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_NV</code>"
+        },
+        {
+          "vuid": "VUID-VkExportMemoryWin32HandleInfoNV-pAttributes-parameter",
+          "text": " If <code>pAttributes</code> is not <code>NULL</code>, <code>pAttributes</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <code>SECURITY_ATTRIBUTES</code> value"
+        }
+      ]
+    },
+    "VkImportMemoryWin32HandleInfoNV": {
+      "(VK_NV_external_memory_win32)": [
+        {
+          "vuid": "VUID-VkImportMemoryWin32HandleInfoNV-handleType-01327",
+          "text": " <code>handleType</code> <strong class=\"purple\">must</strong> not have more than one bit set."
+        },
+        {
+          "vuid": "VUID-VkImportMemoryWin32HandleInfoNV-handle-01328",
+          "text": " <code>handle</code> <strong class=\"purple\">must</strong> be a valid handle to memory, obtained as specified by <code>handleType</code>."
+        },
+        {
+          "vuid": "VUID-VkImportMemoryWin32HandleInfoNV-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_NV</code>"
+        },
+        {
+          "vuid": "VUID-VkImportMemoryWin32HandleInfoNV-handleType-parameter",
+          "text": " <code>handleType</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkExternalMemoryHandleTypeFlagBitsNV\">VkExternalMemoryHandleTypeFlagBitsNV</a> values"
+        }
+      ]
+    },
+    "vkGetMemoryWin32HandleNV": {
+      "(VK_NV_external_memory_win32)": [
+        {
+          "vuid": "VUID-vkGetMemoryWin32HandleNV-handleType-01326",
+          "text": " <code>handleType</code> <strong class=\"purple\">must</strong> be a flag specified in <a href=\"#VkExportMemoryAllocateInfoNV\">VkExportMemoryAllocateInfoNV</a>::<code>handleTypes</code> when allocating <code>memory</code>"
+        },
+        {
+          "vuid": "VUID-vkGetMemoryWin32HandleNV-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetMemoryWin32HandleNV-memory-parameter",
+          "text": " <code>memory</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDeviceMemory\">VkDeviceMemory</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetMemoryWin32HandleNV-handleType-parameter",
+          "text": " <code>handleType</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkExternalMemoryHandleTypeFlagBitsNV\">VkExternalMemoryHandleTypeFlagBitsNV</a> values"
+        },
+        {
+          "vuid": "VUID-vkGetMemoryWin32HandleNV-handleType-requiredbitmask",
+          "text": " <code>handleType</code> <strong class=\"purple\">must</strong> not be <code>0</code>"
+        },
+        {
+          "vuid": "VUID-vkGetMemoryWin32HandleNV-pHandle-parameter",
+          "text": " <code>pHandle</code> <strong class=\"purple\">must</strong> be a valid pointer to a <code>HANDLE</code> value"
+        },
+        {
+          "vuid": "VUID-vkGetMemoryWin32HandleNV-memory-parent",
+          "text": " <code>memory</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from <code>device</code>"
+        }
+      ]
+    },
+    "VkMemoryAllocateFlagsInfo": {
+      "(VK_VERSION_1_1,VK_KHR_device_group)": [
+        {
+          "vuid": "VUID-VkMemoryAllocateFlagsInfo-deviceMask-00675",
+          "text": " If <code>VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT</code> is set, <code>deviceMask</code> <strong class=\"purple\">must</strong> be a valid device mask."
+        },
+        {
+          "vuid": "VUID-VkMemoryAllocateFlagsInfo-deviceMask-00676",
+          "text": " If <code>VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT</code> is set, <code>deviceMask</code> <strong class=\"purple\">must</strong> not be zero"
+        },
+        {
+          "vuid": "VUID-VkMemoryAllocateFlagsInfo-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO</code>"
+        },
+        {
+          "vuid": "VUID-VkMemoryAllocateFlagsInfo-flags-parameter",
+          "text": " <code>flags</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkMemoryAllocateFlagBits\">VkMemoryAllocateFlagBits</a> values"
+        }
+      ]
+    },
+    "VkMemoryOpaqueCaptureAddressAllocateInfoKHR": {
+      "(VK_KHR_buffer_device_address)": [
+        {
+          "vuid": "VUID-VkMemoryOpaqueCaptureAddressAllocateInfoKHR-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_MEMORY_OPAQUE_CAPTURE_ADDRESS_ALLOCATE_INFO_KHR</code>"
+        }
+      ]
+    },
+    "vkFreeMemory": {
+      "core": [
+        {
+          "vuid": "VUID-vkFreeMemory-memory-00677",
+          "text": " All submitted commands that refer to <code>memory</code> (via images or buffers) <strong class=\"purple\">must</strong> have completed execution"
+        },
+        {
+          "vuid": "VUID-vkFreeMemory-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkFreeMemory-memory-parameter",
+          "text": " If <code>memory</code> is not <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>, <code>memory</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDeviceMemory\">VkDeviceMemory</a> handle"
+        },
+        {
+          "vuid": "VUID-vkFreeMemory-pAllocator-parameter",
+          "text": " If <code>pAllocator</code> is not <code>NULL</code>, <code>pAllocator</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkAllocationCallbacks\">VkAllocationCallbacks</a> structure"
+        },
+        {
+          "vuid": "VUID-vkFreeMemory-memory-parent",
+          "text": " If <code>memory</code> is a valid handle, it <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from <code>device</code>"
+        }
+      ]
+    },
+    "vkMapMemory": {
+      "core": [
+        {
+          "vuid": "VUID-vkMapMemory-memory-00678",
+          "text": " <code>memory</code> <strong class=\"purple\">must</strong> not be currently host mapped"
+        },
+        {
+          "vuid": "VUID-vkMapMemory-offset-00679",
+          "text": " <code>offset</code> <strong class=\"purple\">must</strong> be less than the size of <code>memory</code>"
+        },
+        {
+          "vuid": "VUID-vkMapMemory-size-00680",
+          "text": " If <code>size</code> is not equal to <code>VK_WHOLE_SIZE</code>, <code>size</code> <strong class=\"purple\">must</strong> be greater than <code>0</code>"
+        },
+        {
+          "vuid": "VUID-vkMapMemory-size-00681",
+          "text": " If <code>size</code> is not equal to <code>VK_WHOLE_SIZE</code>, <code>size</code> <strong class=\"purple\">must</strong> be less than or equal to the size of the <code>memory</code> minus <code>offset</code>"
+        },
+        {
+          "vuid": "VUID-vkMapMemory-memory-00682",
+          "text": " <code>memory</code> <strong class=\"purple\">must</strong> have been created with a memory type that reports <code>VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT</code>"
+        },
+        {
+          "vuid": "VUID-vkMapMemory-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkMapMemory-memory-parameter",
+          "text": " <code>memory</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDeviceMemory\">VkDeviceMemory</a> handle"
+        },
+        {
+          "vuid": "VUID-vkMapMemory-flags-zerobitmask",
+          "text": " <code>flags</code> <strong class=\"purple\">must</strong> be <code>0</code>"
+        },
+        {
+          "vuid": "VUID-vkMapMemory-ppData-parameter",
+          "text": " <code>ppData</code> <strong class=\"purple\">must</strong> be a valid pointer to a pointer value"
+        },
+        {
+          "vuid": "VUID-vkMapMemory-memory-parent",
+          "text": " <code>memory</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from <code>device</code>"
+        }
+      ],
+      "(VK_KHR_device_group)": [
+        {
+          "vuid": "VUID-vkMapMemory-memory-00683",
+          "text": " <code>memory</code> <strong class=\"purple\">must</strong> not have been allocated with multiple instances."
+        }
+      ]
+    },
+    "vkFlushMappedMemoryRanges": {
+      "core": [
+        {
+          "vuid": "VUID-vkFlushMappedMemoryRanges-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkFlushMappedMemoryRanges-pMemoryRanges-parameter",
+          "text": " <code>pMemoryRanges</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>memoryRangeCount</code> valid <a href=\"#VkMappedMemoryRange\">VkMappedMemoryRange</a> structures"
+        },
+        {
+          "vuid": "VUID-vkFlushMappedMemoryRanges-memoryRangeCount-arraylength",
+          "text": " <code>memoryRangeCount</code> <strong class=\"purple\">must</strong> be greater than <code>0</code>"
+        }
+      ]
+    },
+    "vkInvalidateMappedMemoryRanges": {
+      "core": [
+        {
+          "vuid": "VUID-vkInvalidateMappedMemoryRanges-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkInvalidateMappedMemoryRanges-pMemoryRanges-parameter",
+          "text": " <code>pMemoryRanges</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>memoryRangeCount</code> valid <a href=\"#VkMappedMemoryRange\">VkMappedMemoryRange</a> structures"
+        },
+        {
+          "vuid": "VUID-vkInvalidateMappedMemoryRanges-memoryRangeCount-arraylength",
+          "text": " <code>memoryRangeCount</code> <strong class=\"purple\">must</strong> be greater than <code>0</code>"
+        }
+      ]
+    },
+    "VkMappedMemoryRange": {
+      "core": [
+        {
+          "vuid": "VUID-VkMappedMemoryRange-memory-00684",
+          "text": " <code>memory</code> <strong class=\"purple\">must</strong> be currently host mapped"
+        },
+        {
+          "vuid": "VUID-VkMappedMemoryRange-size-00685",
+          "text": " If <code>size</code> is not equal to <code>VK_WHOLE_SIZE</code>, <code>offset</code> and <code>size</code> <strong class=\"purple\">must</strong> specify a range contained within the currently mapped range of <code>memory</code>"
+        },
+        {
+          "vuid": "VUID-VkMappedMemoryRange-size-00686",
+          "text": " If <code>size</code> is equal to <code>VK_WHOLE_SIZE</code>, <code>offset</code> <strong class=\"purple\">must</strong> be within the currently mapped range of <code>memory</code>"
+        },
+        {
+          "vuid": "VUID-VkMappedMemoryRange-size-01389",
+          "text": " If <code>size</code> is equal to <code>VK_WHOLE_SIZE</code>, the end of the current mapping of <code>memory</code> <strong class=\"purple\">must</strong> be a multiple of <a href=\"#VkPhysicalDeviceLimits\">VkPhysicalDeviceLimits</a>::<code>nonCoherentAtomSize</code> bytes from the beginning of the memory object."
+        },
+        {
+          "vuid": "VUID-VkMappedMemoryRange-offset-00687",
+          "text": " <code>offset</code> <strong class=\"purple\">must</strong> be a multiple of <a href=\"#VkPhysicalDeviceLimits\">VkPhysicalDeviceLimits</a>::<code>nonCoherentAtomSize</code>"
+        },
+        {
+          "vuid": "VUID-VkMappedMemoryRange-size-01390",
+          "text": " If <code>size</code> is not equal to <code>VK_WHOLE_SIZE</code>, <code>size</code> <strong class=\"purple\">must</strong> either be a multiple of <a href=\"#VkPhysicalDeviceLimits\">VkPhysicalDeviceLimits</a>::<code>nonCoherentAtomSize</code>, or <code>offset</code> plus <code>size</code> <strong class=\"purple\">must</strong> equal the size of <code>memory</code>."
+        },
+        {
+          "vuid": "VUID-VkMappedMemoryRange-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE</code>"
+        },
+        {
+          "vuid": "VUID-VkMappedMemoryRange-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-VkMappedMemoryRange-memory-parameter",
+          "text": " <code>memory</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDeviceMemory\">VkDeviceMemory</a> handle"
+        }
+      ]
+    },
+    "vkUnmapMemory": {
+      "core": [
+        {
+          "vuid": "VUID-vkUnmapMemory-memory-00689",
+          "text": " <code>memory</code> <strong class=\"purple\">must</strong> be currently host mapped"
+        },
+        {
+          "vuid": "VUID-vkUnmapMemory-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkUnmapMemory-memory-parameter",
+          "text": " <code>memory</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDeviceMemory\">VkDeviceMemory</a> handle"
+        },
+        {
+          "vuid": "VUID-vkUnmapMemory-memory-parent",
+          "text": " <code>memory</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from <code>device</code>"
+        }
+      ]
+    },
+    "vkGetDeviceMemoryCommitment": {
+      "core": [
+        {
+          "vuid": "VUID-vkGetDeviceMemoryCommitment-memory-00690",
+          "text": " <code>memory</code> <strong class=\"purple\">must</strong> have been created with a memory type that reports <code>VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT</code>"
+        },
+        {
+          "vuid": "VUID-vkGetDeviceMemoryCommitment-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetDeviceMemoryCommitment-memory-parameter",
+          "text": " <code>memory</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDeviceMemory\">VkDeviceMemory</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetDeviceMemoryCommitment-pCommittedMemoryInBytes-parameter",
+          "text": " <code>pCommittedMemoryInBytes</code> <strong class=\"purple\">must</strong> be a valid pointer to a <code>VkDeviceSize</code> value"
+        },
+        {
+          "vuid": "VUID-vkGetDeviceMemoryCommitment-memory-parent",
+          "text": " <code>memory</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from <code>device</code>"
+        }
+      ]
+    },
+    "vkGetDeviceGroupPeerMemoryFeatures": {
+      "(VK_VERSION_1_1,VK_KHR_device_group)": [
+        {
+          "vuid": "VUID-vkGetDeviceGroupPeerMemoryFeatures-heapIndex-00691",
+          "text": " <code>heapIndex</code> <strong class=\"purple\">must</strong> be less than <code>memoryHeapCount</code>"
+        },
+        {
+          "vuid": "VUID-vkGetDeviceGroupPeerMemoryFeatures-localDeviceIndex-00692",
+          "text": " <code>localDeviceIndex</code> <strong class=\"purple\">must</strong> be a valid device index"
+        },
+        {
+          "vuid": "VUID-vkGetDeviceGroupPeerMemoryFeatures-remoteDeviceIndex-00693",
+          "text": " <code>remoteDeviceIndex</code> <strong class=\"purple\">must</strong> be a valid device index"
+        },
+        {
+          "vuid": "VUID-vkGetDeviceGroupPeerMemoryFeatures-localDeviceIndex-00694",
+          "text": " <code>localDeviceIndex</code> <strong class=\"purple\">must</strong> not equal <code>remoteDeviceIndex</code>"
+        },
+        {
+          "vuid": "VUID-vkGetDeviceGroupPeerMemoryFeatures-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetDeviceGroupPeerMemoryFeatures-pPeerMemoryFeatures-parameter",
+          "text": " <code>pPeerMemoryFeatures</code> <strong class=\"purple\">must</strong> be a valid pointer to a <a href=\"#VkPeerMemoryFeatureFlags\">VkPeerMemoryFeatureFlags</a> value"
+        }
+      ]
+    },
+    "vkGetDeviceMemoryOpaqueCaptureAddressKHR": {
+      "(VK_KHR_buffer_device_address)": [
+        {
+          "vuid": "VUID-vkGetDeviceMemoryOpaqueCaptureAddressKHR-None-03334",
+          "text": " The <a href=\"#features-bufferDeviceAddress\">bufferDeviceAddress</a> feature <strong class=\"purple\">must</strong> be enabled"
+        },
+        {
+          "vuid": "VUID-vkGetDeviceMemoryOpaqueCaptureAddressKHR-device-03335",
+          "text": " If <code>device</code> was created with multiple physical devices, then the <a href=\"#features-bufferDeviceAddressMultiDevice\">bufferDeviceAddressMultiDevice</a> feature <strong class=\"purple\">must</strong> be enabled"
+        },
+        {
+          "vuid": "VUID-vkGetDeviceMemoryOpaqueCaptureAddressKHR-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetDeviceMemoryOpaqueCaptureAddressKHR-pInfo-parameter",
+          "text": " <code>pInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkDeviceMemoryOpaqueCaptureAddressInfoKHR\">VkDeviceMemoryOpaqueCaptureAddressInfoKHR</a> structure"
+        }
+      ]
+    },
+    "VkDeviceMemoryOpaqueCaptureAddressInfoKHR": {
+      "(VK_KHR_buffer_device_address)": [
+        {
+          "vuid": "VUID-VkDeviceMemoryOpaqueCaptureAddressInfoKHR-memory-03336",
+          "text": " <code>memory</code> <strong class=\"purple\">must</strong> have been allocated with <code>VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkDeviceMemoryOpaqueCaptureAddressInfoKHR-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_DEVICE_MEMORY_OPAQUE_CAPTURE_ADDRESS_INFO_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkDeviceMemoryOpaqueCaptureAddressInfoKHR-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-VkDeviceMemoryOpaqueCaptureAddressInfoKHR-memory-parameter",
+          "text": " <code>memory</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDeviceMemory\">VkDeviceMemory</a> handle"
+        }
+      ]
+    },
+    "vkCreateBuffer": {
+      "core": [
+        {
+          "vuid": "VUID-vkCreateBuffer-flags-00911",
+          "text": " If the <code>flags</code> member of <code>pCreateInfo</code> includes <code>VK_BUFFER_CREATE_SPARSE_BINDING_BIT</code>, creating this <code>VkBuffer</code> <strong class=\"purple\">must</strong> not cause the total required sparse memory for all currently valid sparse resources on the device to exceed <code>VkPhysicalDeviceLimits</code>::<code>sparseAddressSpaceSize</code>"
+        },
+        {
+          "vuid": "VUID-vkCreateBuffer-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCreateBuffer-pCreateInfo-parameter",
+          "text": " <code>pCreateInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkBufferCreateInfo\">VkBufferCreateInfo</a> structure"
+        },
+        {
+          "vuid": "VUID-vkCreateBuffer-pAllocator-parameter",
+          "text": " If <code>pAllocator</code> is not <code>NULL</code>, <code>pAllocator</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkAllocationCallbacks\">VkAllocationCallbacks</a> structure"
+        },
+        {
+          "vuid": "VUID-vkCreateBuffer-pBuffer-parameter",
+          "text": " <code>pBuffer</code> <strong class=\"purple\">must</strong> be a valid pointer to a <a href=\"#VkBuffer\">VkBuffer</a> handle"
+        }
+      ]
+    },
+    "VkBufferCreateInfo": {
+      "core": [
+        {
+          "vuid": "VUID-VkBufferCreateInfo-size-00912",
+          "text": " <code>size</code> <strong class=\"purple\">must</strong> be greater than <code>0</code>"
+        },
+        {
+          "vuid": "VUID-VkBufferCreateInfo-sharingMode-00913",
+          "text": " If <code>sharingMode</code> is <code>VK_SHARING_MODE_CONCURRENT</code>, <code>pQueueFamilyIndices</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>queueFamilyIndexCount</code> <code>uint32_t</code> values"
+        },
+        {
+          "vuid": "VUID-VkBufferCreateInfo-sharingMode-00914",
+          "text": " If <code>sharingMode</code> is <code>VK_SHARING_MODE_CONCURRENT</code>, <code>queueFamilyIndexCount</code> <strong class=\"purple\">must</strong> be greater than <code>1</code>"
+        },
+        {
+          "vuid": "VUID-VkBufferCreateInfo-flags-00915",
+          "text": " If the <a href=\"#features-sparseBinding\">sparse bindings</a> feature is not enabled, <code>flags</code> <strong class=\"purple\">must</strong> not contain <code>VK_BUFFER_CREATE_SPARSE_BINDING_BIT</code>"
+        },
+        {
+          "vuid": "VUID-VkBufferCreateInfo-flags-00916",
+          "text": " If the <a href=\"#features-sparseResidencyBuffer\">sparse buffer residency</a> feature is not enabled, <code>flags</code> <strong class=\"purple\">must</strong> not contain <code>VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT</code>"
+        },
+        {
+          "vuid": "VUID-VkBufferCreateInfo-flags-00917",
+          "text": " If the <a href=\"#features-sparseResidencyAliased\">sparse aliased residency</a> feature is not enabled, <code>flags</code> <strong class=\"purple\">must</strong> not contain <code>VK_BUFFER_CREATE_SPARSE_ALIASED_BIT</code>"
+        },
+        {
+          "vuid": "VUID-VkBufferCreateInfo-flags-00918",
+          "text": " If <code>flags</code> contains <code>VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT</code> or <code>VK_BUFFER_CREATE_SPARSE_ALIASED_BIT</code>, it <strong class=\"purple\">must</strong> also contain <code>VK_BUFFER_CREATE_SPARSE_BINDING_BIT</code>"
+        },
+        {
+          "vuid": "VUID-VkBufferCreateInfo-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO</code>"
+        },
+        {
+          "vuid": "VUID-VkBufferCreateInfo-pNext-pNext",
+          "text": " Each <code>pNext</code> member of any structure (including this one) in the <code>pNext</code> chain <strong class=\"purple\">must</strong> be either <code>NULL</code> or a pointer to a valid instance of <a href=\"#VkBufferDeviceAddressCreateInfoEXT\">VkBufferDeviceAddressCreateInfoEXT</a>, <a href=\"#VkBufferOpaqueCaptureAddressCreateInfoKHR\">VkBufferOpaqueCaptureAddressCreateInfoKHR</a>, <a href=\"#VkDedicatedAllocationBufferCreateInfoNV\">VkDedicatedAllocationBufferCreateInfoNV</a>, or <a href=\"#VkExternalMemoryBufferCreateInfo\">VkExternalMemoryBufferCreateInfo</a>"
+        },
+        {
+          "vuid": "VUID-VkBufferCreateInfo-sType-unique",
+          "text": " Each <code>sType</code> member in the <code>pNext</code> chain <strong class=\"purple\">must</strong> be unique"
+        },
+        {
+          "vuid": "VUID-VkBufferCreateInfo-flags-parameter",
+          "text": " <code>flags</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkBufferCreateFlagBits\">VkBufferCreateFlagBits</a> values"
+        },
+        {
+          "vuid": "VUID-VkBufferCreateInfo-usage-parameter",
+          "text": " <code>usage</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkBufferUsageFlagBits\">VkBufferUsageFlagBits</a> values"
+        },
+        {
+          "vuid": "VUID-VkBufferCreateInfo-usage-requiredbitmask",
+          "text": " <code>usage</code> <strong class=\"purple\">must</strong> not be <code>0</code>"
+        },
+        {
+          "vuid": "VUID-VkBufferCreateInfo-sharingMode-parameter",
+          "text": " <code>sharingMode</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkSharingMode\">VkSharingMode</a> value"
+        }
+      ],
+      "!(VK_VERSION_1_1,VK_KHR_get_physical_device_properties2)": [
+        {
+          "vuid": "VUID-VkBufferCreateInfo-sharingMode-01391",
+          "text": " If <code>sharingMode</code> is <code>VK_SHARING_MODE_CONCURRENT</code>, each element of <code>pQueueFamilyIndices</code> <strong class=\"purple\">must</strong> be unique and <strong class=\"purple\">must</strong> be less than <code>pQueueFamilyPropertyCount</code> returned by <a href=\"#vkGetPhysicalDeviceQueueFamilyProperties\">vkGetPhysicalDeviceQueueFamilyProperties</a> for the <code>physicalDevice</code> that was used to create <code>device</code>"
+        }
+      ],
+      "(VK_VERSION_1_1,VK_KHR_get_physical_device_properties2)": [
+        {
+          "vuid": "VUID-VkBufferCreateInfo-sharingMode-01419",
+          "text": " If <code>sharingMode</code> is <code>VK_SHARING_MODE_CONCURRENT</code>, each element of <code>pQueueFamilyIndices</code> <strong class=\"purple\">must</strong> be unique and <strong class=\"purple\">must</strong> be less than <code>pQueueFamilyPropertyCount</code> returned by either <a href=\"#vkGetPhysicalDeviceQueueFamilyProperties\">vkGetPhysicalDeviceQueueFamilyProperties</a> or <a href=\"#vkGetPhysicalDeviceQueueFamilyProperties2\">vkGetPhysicalDeviceQueueFamilyProperties2</a> for the <code>physicalDevice</code> that was used to create <code>device</code>"
+        }
+      ],
+      "(VK_VERSION_1_1,VK_KHR_external_memory)": [
+        {
+          "vuid": "VUID-VkBufferCreateInfo-pNext-00920",
+          "text": " If the <code>pNext</code> chain includes a <a href=\"#VkExternalMemoryBufferCreateInfo\">VkExternalMemoryBufferCreateInfo</a> structure, its <code>handleTypes</code> member <strong class=\"purple\">must</strong> only contain bits that are also in <a href=\"#VkExternalBufferProperties\">VkExternalBufferProperties</a>::<code>externalMemoryProperties.compatibleHandleTypes</code>, as returned by <a href=\"#vkGetPhysicalDeviceExternalBufferProperties\">vkGetPhysicalDeviceExternalBufferProperties</a> with <code>pExternalBufferInfo</code>-&gt;handleType equal to any one of the handle types specified in <a href=\"#VkExternalMemoryBufferCreateInfo\">VkExternalMemoryBufferCreateInfo</a>::<code>handleTypes</code>"
+        }
+      ],
+      "(VK_VERSION_1_1)": [
+        {
+          "vuid": "VUID-VkBufferCreateInfo-flags-01887",
+          "text": " If the protected memory feature is not enabled, <code>flags</code> <strong class=\"purple\">must</strong> not contain <code>VK_BUFFER_CREATE_PROTECTED_BIT</code>"
+        },
+        {
+          "vuid": "VUID-VkBufferCreateInfo-None-01888",
+          "text": " If any of the bits <code>VK_BUFFER_CREATE_SPARSE_BINDING_BIT</code>, <code>VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT</code>, or <code>VK_BUFFER_CREATE_SPARSE_ALIASED_BIT</code> are set, <code>VK_BUFFER_CREATE_PROTECTED_BIT</code> <strong class=\"purple\">must</strong> not also be set"
+        }
+      ],
+      "(VK_NV_dedicated_allocation)": [
+        {
+          "vuid": "VUID-VkBufferCreateInfo-pNext-01571",
+          "text": " If the <code>pNext</code> chain includes a <a href=\"#VkDedicatedAllocationBufferCreateInfoNV\">VkDedicatedAllocationBufferCreateInfoNV</a> structure, and the <code>dedicatedAllocation</code> member of the chained structure is <code>VK_TRUE</code>, then <code>flags</code> <strong class=\"purple\">must</strong> not include <code>VK_BUFFER_CREATE_SPARSE_BINDING_BIT</code>, <code>VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT</code>, or <code>VK_BUFFER_CREATE_SPARSE_ALIASED_BIT</code>"
+        }
+      ],
+      "(VK_EXT_buffer_device_address,VK_KHR_buffer_device_address)+(VK_EXT_buffer_device_address)": [
+        {
+          "vuid": "VUID-VkBufferCreateInfo-deviceAddress-02604",
+          "text": " If <a href=\"#VkBufferDeviceAddressCreateInfoEXT\">VkBufferDeviceAddressCreateInfoEXT</a>::<code>deviceAddress</code> is not zero, <code>flags</code> <strong class=\"purple\">must</strong> include <code>VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_KHR</code>"
+        }
+      ],
+      "(VK_EXT_buffer_device_address,VK_KHR_buffer_device_address)+(VK_KHR_buffer_device_address)": [
+        {
+          "vuid": "VUID-VkBufferCreateInfo-opaqueCaptureAddress-03337",
+          "text": " If <a href=\"#VkBufferOpaqueCaptureAddressCreateInfoKHR\">VkBufferOpaqueCaptureAddressCreateInfoKHR</a>::<code>opaqueCaptureAddress</code> is not zero, <code>flags</code> <strong class=\"purple\">must</strong> include <code>VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_KHR</code>"
+        }
+      ],
+      "(VK_EXT_buffer_device_address,VK_KHR_buffer_device_address)": [
+        {
+          "vuid": "VUID-VkBufferCreateInfo-flags-03338",
+          "text": " If <code>flags</code> includes <code>VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_KHR</code>, the <a href=\"#features-bufferDeviceAddressCaptureReplay\">bufferDeviceAddressCaptureReplay</a> or <a href=\"#features-bufferDeviceAddressCaptureReplayEXT\"><code>VkPhysicalDeviceBufferDeviceAddressFeaturesEXT</code>::<code>bufferDeviceAddressCaptureReplay</code></a> feature <strong class=\"purple\">must</strong> be enabled"
+        }
+      ]
+    },
+    "VkDedicatedAllocationBufferCreateInfoNV": {
+      "(VK_NV_dedicated_allocation)": [
+        {
+          "vuid": "VUID-VkDedicatedAllocationBufferCreateInfoNV-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_BUFFER_CREATE_INFO_NV</code>"
+        }
+      ]
+    },
+    "VkExternalMemoryBufferCreateInfo": {
+      "(VK_VERSION_1_1,VK_KHR_external_memory)": [
+        {
+          "vuid": "VUID-VkExternalMemoryBufferCreateInfo-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO</code>"
+        },
+        {
+          "vuid": "VUID-VkExternalMemoryBufferCreateInfo-handleTypes-parameter",
+          "text": " <code>handleTypes</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkExternalMemoryHandleTypeFlagBits\">VkExternalMemoryHandleTypeFlagBits</a> values"
+        }
+      ]
+    },
+    "VkBufferOpaqueCaptureAddressCreateInfoKHR": {
+      "(VK_KHR_buffer_device_address)": [
+        {
+          "vuid": "VUID-VkBufferOpaqueCaptureAddressCreateInfoKHR-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_BUFFER_OPAQUE_CAPTURE_ADDRESS_CREATE_INFO_KHR</code>"
+        }
+      ]
+    },
+    "VkBufferDeviceAddressCreateInfoEXT": {
+      "(VK_EXT_buffer_device_address)": [
+        {
+          "vuid": "VUID-VkBufferDeviceAddressCreateInfoEXT-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_CREATE_INFO_EXT</code>"
+        }
+      ]
+    },
+    "vkDestroyBuffer": {
+      "core": [
+        {
+          "vuid": "VUID-vkDestroyBuffer-buffer-00922",
+          "text": " All submitted commands that refer to <code>buffer</code>, either directly or via a <code>VkBufferView</code>, <strong class=\"purple\">must</strong> have completed execution"
+        },
+        {
+          "vuid": "VUID-vkDestroyBuffer-buffer-00923",
+          "text": " If <code>VkAllocationCallbacks</code> were provided when <code>buffer</code> was created, a compatible set of callbacks <strong class=\"purple\">must</strong> be provided here"
+        },
+        {
+          "vuid": "VUID-vkDestroyBuffer-buffer-00924",
+          "text": " If no <code>VkAllocationCallbacks</code> were provided when <code>buffer</code> was created, <code>pAllocator</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-vkDestroyBuffer-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkDestroyBuffer-buffer-parameter",
+          "text": " If <code>buffer</code> is not <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>, <code>buffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkBuffer\">VkBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkDestroyBuffer-pAllocator-parameter",
+          "text": " If <code>pAllocator</code> is not <code>NULL</code>, <code>pAllocator</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkAllocationCallbacks\">VkAllocationCallbacks</a> structure"
+        },
+        {
+          "vuid": "VUID-vkDestroyBuffer-buffer-parent",
+          "text": " If <code>buffer</code> is a valid handle, it <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from <code>device</code>"
+        }
+      ]
+    },
+    "vkCreateBufferView": {
+      "core": [
+        {
+          "vuid": "VUID-vkCreateBufferView-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCreateBufferView-pCreateInfo-parameter",
+          "text": " <code>pCreateInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkBufferViewCreateInfo\">VkBufferViewCreateInfo</a> structure"
+        },
+        {
+          "vuid": "VUID-vkCreateBufferView-pAllocator-parameter",
+          "text": " If <code>pAllocator</code> is not <code>NULL</code>, <code>pAllocator</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkAllocationCallbacks\">VkAllocationCallbacks</a> structure"
+        },
+        {
+          "vuid": "VUID-vkCreateBufferView-pView-parameter",
+          "text": " <code>pView</code> <strong class=\"purple\">must</strong> be a valid pointer to a <a href=\"#VkBufferView\">VkBufferView</a> handle"
+        }
+      ]
+    },
+    "VkBufferViewCreateInfo": {
+      "core": [
+        {
+          "vuid": "VUID-VkBufferViewCreateInfo-offset-00925",
+          "text": " <code>offset</code> <strong class=\"purple\">must</strong> be less than the size of <code>buffer</code>"
+        },
+        {
+          "vuid": "VUID-VkBufferViewCreateInfo-range-00928",
+          "text": " If <code>range</code> is not equal to <code>VK_WHOLE_SIZE</code>, <code>range</code> <strong class=\"purple\">must</strong> be greater than <code>0</code>"
+        },
+        {
+          "vuid": "VUID-VkBufferViewCreateInfo-range-00929",
+          "text": " If <code>range</code> is not equal to <code>VK_WHOLE_SIZE</code>, <code>range</code> <strong class=\"purple\">must</strong> be an integer multiple of the texel block size of <code>format</code>"
+        },
+        {
+          "vuid": "VUID-VkBufferViewCreateInfo-range-00930",
+          "text": " If <code>range</code> is not equal to <code>VK_WHOLE_SIZE</code>, <code>range</code> divided by the texel block size of <code>format</code>, multiplied by the number of texels per texel block for that format (as defined in the <a href=\"#formats-compatibility\">Compatible Formats</a> table), <strong class=\"purple\">must</strong> be less than or equal to <code>VkPhysicalDeviceLimits</code>::<code>maxTexelBufferElements</code>"
+        },
+        {
+          "vuid": "VUID-VkBufferViewCreateInfo-offset-00931",
+          "text": " If <code>range</code> is not equal to <code>VK_WHOLE_SIZE</code>, the sum of <code>offset</code> and <code>range</code> <strong class=\"purple\">must</strong> be less than or equal to the size of <code>buffer</code>"
+        },
+        {
+          "vuid": "VUID-VkBufferViewCreateInfo-buffer-00932",
+          "text": " <code>buffer</code> <strong class=\"purple\">must</strong> have been created with a <code>usage</code> value containing at least one of <code>VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT</code> or <code>VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT</code>"
+        },
+        {
+          "vuid": "VUID-VkBufferViewCreateInfo-buffer-00933",
+          "text": " If <code>buffer</code> was created with <code>usage</code> containing <code>VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT</code>, <code>format</code> <strong class=\"purple\">must</strong> be supported for uniform texel buffers, as specified by the <code>VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT</code> flag in <code>VkFormatProperties</code>::<code>bufferFeatures</code> returned by <code>vkGetPhysicalDeviceFormatProperties</code>"
+        },
+        {
+          "vuid": "VUID-VkBufferViewCreateInfo-buffer-00934",
+          "text": " If <code>buffer</code> was created with <code>usage</code> containing <code>VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT</code>, <code>format</code> <strong class=\"purple\">must</strong> be supported for storage texel buffers, as specified by the <code>VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT</code> flag in <code>VkFormatProperties</code>::<code>bufferFeatures</code> returned by <code>vkGetPhysicalDeviceFormatProperties</code>"
+        },
+        {
+          "vuid": "VUID-VkBufferViewCreateInfo-buffer-00935",
+          "text": " If <code>buffer</code> is non-sparse then it <strong class=\"purple\">must</strong> be bound completely and contiguously to a single <code>VkDeviceMemory</code> object"
+        },
+        {
+          "vuid": "VUID-VkBufferViewCreateInfo-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO</code>"
+        },
+        {
+          "vuid": "VUID-VkBufferViewCreateInfo-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-VkBufferViewCreateInfo-flags-zerobitmask",
+          "text": " <code>flags</code> <strong class=\"purple\">must</strong> be <code>0</code>"
+        },
+        {
+          "vuid": "VUID-VkBufferViewCreateInfo-buffer-parameter",
+          "text": " <code>buffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkBuffer\">VkBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-VkBufferViewCreateInfo-format-parameter",
+          "text": " <code>format</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkFormat\">VkFormat</a> value"
+        }
+      ],
+      "!(VK_EXT_texel_buffer_alignment)": [
+        {
+          "vuid": "VUID-VkBufferViewCreateInfo-offset-00926",
+          "text": " <code>offset</code> <strong class=\"purple\">must</strong> be a multiple of <code>VkPhysicalDeviceLimits</code>::<code>minTexelBufferOffsetAlignment</code>"
+        }
+      ],
+      "(VK_EXT_texel_buffer_alignment)": [
+        {
+          "vuid": "VUID-VkBufferViewCreateInfo-offset-02749",
+          "text": " If the <a href=\"#features-texelBufferAlignment\">texelBufferAlignment</a> feature is not enabled, <code>offset</code> <strong class=\"purple\">must</strong> be a multiple of <code>VkPhysicalDeviceLimits</code>::<code>minTexelBufferOffsetAlignment</code>"
+        },
+        {
+          "vuid": "VUID-VkBufferViewCreateInfo-buffer-02750",
+          "text": " If the <a href=\"#features-texelBufferAlignment\">texelBufferAlignment</a> feature is enabled and if <code>buffer</code> was created with <code>usage</code> containing <code>VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT</code>, <code>offset</code> <strong class=\"purple\">must</strong> be a multiple of the lesser of <a href=\"#VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT\">VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT</a>::<code>storageTexelBufferOffsetAlignmentBytes</code> or, if <a href=\"#VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT\">VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT</a>::<code>storageTexelBufferOffsetSingleTexelAlignment</code> is <code>VK_TRUE</code>, the size of a texel of the requested <code>format</code>. If the size of a texel is a multiple of three bytes, then the size of a single component of <code>format</code> is used instead"
+        },
+        {
+          "vuid": "VUID-VkBufferViewCreateInfo-buffer-02751",
+          "text": " If the <a href=\"#features-texelBufferAlignment\">texelBufferAlignment</a> feature is enabled and if <code>buffer</code> was created with <code>usage</code> containing <code>VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT</code>, <code>offset</code> <strong class=\"purple\">must</strong> be a multiple of the lesser of <a href=\"#VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT\">VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT</a>::<code>uniformTexelBufferOffsetAlignmentBytes</code> or, if <a href=\"#VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT\">VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT</a>::<code>uniformTexelBufferOffsetSingleTexelAlignment</code> is <code>VK_TRUE</code>, the size of a texel of the requested <code>format</code>. If the size of a texel is a multiple of three bytes, then the size of a single component of <code>format</code> is used instead"
+        }
+      ]
+    },
+    "vkDestroyBufferView": {
+      "core": [
+        {
+          "vuid": "VUID-vkDestroyBufferView-bufferView-00936",
+          "text": " All submitted commands that refer to <code>bufferView</code> <strong class=\"purple\">must</strong> have completed execution"
+        },
+        {
+          "vuid": "VUID-vkDestroyBufferView-bufferView-00937",
+          "text": " If <code>VkAllocationCallbacks</code> were provided when <code>bufferView</code> was created, a compatible set of callbacks <strong class=\"purple\">must</strong> be provided here"
+        },
+        {
+          "vuid": "VUID-vkDestroyBufferView-bufferView-00938",
+          "text": " If no <code>VkAllocationCallbacks</code> were provided when <code>bufferView</code> was created, <code>pAllocator</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-vkDestroyBufferView-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkDestroyBufferView-bufferView-parameter",
+          "text": " If <code>bufferView</code> is not <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>, <code>bufferView</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkBufferView\">VkBufferView</a> handle"
+        },
+        {
+          "vuid": "VUID-vkDestroyBufferView-pAllocator-parameter",
+          "text": " If <code>pAllocator</code> is not <code>NULL</code>, <code>pAllocator</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkAllocationCallbacks\">VkAllocationCallbacks</a> structure"
+        },
+        {
+          "vuid": "VUID-vkDestroyBufferView-bufferView-parent",
+          "text": " If <code>bufferView</code> is a valid handle, it <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from <code>device</code>"
+        }
+      ]
+    },
+    "vkCreateImage": {
+      "core": [
+        {
+          "vuid": "VUID-vkCreateImage-flags-00939",
+          "text": " If the <code>flags</code> member of <code>pCreateInfo</code> includes <code>VK_IMAGE_CREATE_SPARSE_BINDING_BIT</code>, creating this <code>VkImage</code> <strong class=\"purple\">must</strong> not cause the total required sparse memory for all currently valid sparse resources on the device to exceed <code>VkPhysicalDeviceLimits</code>::<code>sparseAddressSpaceSize</code>"
+        },
+        {
+          "vuid": "VUID-vkCreateImage-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCreateImage-pCreateInfo-parameter",
+          "text": " <code>pCreateInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkImageCreateInfo\">VkImageCreateInfo</a> structure"
+        },
+        {
+          "vuid": "VUID-vkCreateImage-pAllocator-parameter",
+          "text": " If <code>pAllocator</code> is not <code>NULL</code>, <code>pAllocator</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkAllocationCallbacks\">VkAllocationCallbacks</a> structure"
+        },
+        {
+          "vuid": "VUID-vkCreateImage-pImage-parameter",
+          "text": " <code>pImage</code> <strong class=\"purple\">must</strong> be a valid pointer to a <a href=\"#VkImage\">VkImage</a> handle"
+        }
+      ]
+    },
+    "VkImageCreateInfo": {
+      "core": [
+        {
+          "vuid": "VUID-VkImageCreateInfo-imageCreateMaxMipLevels-02251",
+          "text": " Each of the following values (as described in <a href=\"#resources-image-creation-limits\">Image Creation Limits</a>) <strong class=\"purple\">must</strong> not be undefined <code>imageCreateMaxMipLevels</code>, <code>imageCreateMaxArrayLayers</code>, <code>imageCreateMaxExtent</code>, and <code>imageCreateSampleCounts</code>."
+        },
+        {
+          "vuid": "VUID-VkImageCreateInfo-sharingMode-00941",
+          "text": " If <code>sharingMode</code> is <code>VK_SHARING_MODE_CONCURRENT</code>, <code>pQueueFamilyIndices</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>queueFamilyIndexCount</code> <code>uint32_t</code> values"
+        },
+        {
+          "vuid": "VUID-VkImageCreateInfo-sharingMode-00942",
+          "text": " If <code>sharingMode</code> is <code>VK_SHARING_MODE_CONCURRENT</code>, <code>queueFamilyIndexCount</code> <strong class=\"purple\">must</strong> be greater than <code>1</code>"
+        },
+        {
+          "vuid": "VUID-VkImageCreateInfo-extent-00944",
+          "text": " <code>extent.width</code> <strong class=\"purple\">must</strong> be greater than <code>0</code>."
+        },
+        {
+          "vuid": "VUID-VkImageCreateInfo-extent-00945",
+          "text": " <code>extent.height</code> <strong class=\"purple\">must</strong> be greater than <code>0</code>."
+        },
+        {
+          "vuid": "VUID-VkImageCreateInfo-extent-00946",
+          "text": " <code>extent.depth</code> <strong class=\"purple\">must</strong> be greater than <code>0</code>."
+        },
+        {
+          "vuid": "VUID-VkImageCreateInfo-mipLevels-00947",
+          "text": " <code>mipLevels</code> <strong class=\"purple\">must</strong> be greater than <code>0</code>"
+        },
+        {
+          "vuid": "VUID-VkImageCreateInfo-arrayLayers-00948",
+          "text": " <code>arrayLayers</code> <strong class=\"purple\">must</strong> be greater than <code>0</code>"
+        },
+        {
+          "vuid": "VUID-VkImageCreateInfo-flags-00949",
+          "text": " If <code>flags</code> contains <code>VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT</code>, <code>imageType</code> <strong class=\"purple\">must</strong> be <code>VK_IMAGE_TYPE_2D</code>"
+        },
+        {
+          "vuid": "VUID-VkImageCreateInfo-extent-02252",
+          "text": " <code>extent.width</code> <strong class=\"purple\">must</strong> be less than or equal to <code>imageCreateMaxExtent.width</code> (as defined in <a href=\"#resources-image-creation-limits\">Image Creation Limits</a>)."
+        },
+        {
+          "vuid": "VUID-VkImageCreateInfo-extent-02253",
+          "text": " <code>extent.height</code> <strong class=\"purple\">must</strong> be less than or equal to <code>imageCreateMaxExtent.height</code> (as defined in <a href=\"#resources-image-creation-limits\">Image Creation Limits</a>)."
+        },
+        {
+          "vuid": "VUID-VkImageCreateInfo-extent-02254",
+          "text": " <code>extent.depth</code> <strong class=\"purple\">must</strong> be less than or equal to <code>imageCreateMaxExtent.depth</code> (as defined in <a href=\"#resources-image-creation-limits\">Image Creation Limits</a>)."
+        },
+        {
+          "vuid": "VUID-VkImageCreateInfo-imageType-00954",
+          "text": " If <code>imageType</code> is <code>VK_IMAGE_TYPE_2D</code> and <code>flags</code> contains <code>VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT</code>, <code>extent.width</code> and <code>extent.height</code> <strong class=\"purple\">must</strong> be equal and <code>arrayLayers</code> <strong class=\"purple\">must</strong> be greater than or equal to 6"
+        },
+        {
+          "vuid": "VUID-VkImageCreateInfo-imageType-00956",
+          "text": " If <code>imageType</code> is <code>VK_IMAGE_TYPE_1D</code>, both <code>extent.height</code> and <code>extent.depth</code> <strong class=\"purple\">must</strong> be <code>1</code>"
+        },
+        {
+          "vuid": "VUID-VkImageCreateInfo-imageType-00957",
+          "text": " If <code>imageType</code> is <code>VK_IMAGE_TYPE_2D</code>, <code>extent.depth</code> <strong class=\"purple\">must</strong> be <code>1</code>"
+        },
+        {
+          "vuid": "VUID-VkImageCreateInfo-mipLevels-00958",
+          "text": " <code>mipLevels</code> <strong class=\"purple\">must</strong> be less than or equal to the number of levels in the complete mipmap chain based on <span class=\"eq\"><code>extent.width</code></span>, <span class=\"eq\"><code>extent.height</code></span>, and <span class=\"eq\"><code>extent.depth</code></span>."
+        },
+        {
+          "vuid": "VUID-VkImageCreateInfo-mipLevels-02255",
+          "text": " <code>mipLevels</code> <strong class=\"purple\">must</strong> be less than or equal to <code>imageCreateMaxMipLevels</code> (as defined in <a href=\"#resources-image-creation-limits\">Image Creation Limits</a>)."
+        },
+        {
+          "vuid": "VUID-VkImageCreateInfo-arrayLayers-02256",
+          "text": " <code>arrayLayers</code> <strong class=\"purple\">must</strong> be less than or equal to <code>imageCreateMaxArrayLayers</code> (as defined in <a href=\"#resources-image-creation-limits\">Image Creation Limits</a>)."
+        },
+        {
+          "vuid": "VUID-VkImageCreateInfo-imageType-00961",
+          "text": " If <code>imageType</code> is <code>VK_IMAGE_TYPE_3D</code>, <code>arrayLayers</code> <strong class=\"purple\">must</strong> be <code>1</code>."
+        },
+        {
+          "vuid": "VUID-VkImageCreateInfo-samples-02257",
+          "text": " If <code>samples</code> is not <code>VK_SAMPLE_COUNT_1_BIT</code>, then <code>imageType</code> <strong class=\"purple\">must</strong> be <code>VK_IMAGE_TYPE_2D</code>, <code>flags</code> <strong class=\"purple\">must</strong> not contain <code>VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT</code>, <code>mipLevels</code> <strong class=\"purple\">must</strong> be equal to <code>1</code>, and <code>imageCreateMaybeLinear</code> (as defined in <a href=\"#resources-image-creation-limits\">Image Creation Limits</a>) <strong class=\"purple\">must</strong> be <code>false</code>,"
+        },
+        {
+          "vuid": "VUID-VkImageCreateInfo-usage-00963",
+          "text": " If <code>usage</code> includes <code>VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT</code>, then bits other than <code>VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT</code>, <code>VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT</code>, and <code>VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT</code> <strong class=\"purple\">must</strong> not be set"
+        },
+        {
+          "vuid": "VUID-VkImageCreateInfo-usage-00964",
+          "text": " If <code>usage</code> includes <code>VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT</code>, <code>VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT</code>, <code>VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT</code>, or <code>VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT</code>, <code>extent.width</code> <strong class=\"purple\">must</strong> be less than or equal to <code>VkPhysicalDeviceLimits</code>::<code>maxFramebufferWidth</code>"
+        },
+        {
+          "vuid": "VUID-VkImageCreateInfo-usage-00965",
+          "text": " If <code>usage</code> includes <code>VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT</code>, <code>VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT</code>, <code>VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT</code>, or <code>VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT</code>, <code>extent.height</code> <strong class=\"purple\">must</strong> be less than or equal to <code>VkPhysicalDeviceLimits</code>::<code>maxFramebufferHeight</code>"
+        },
+        {
+          "vuid": "VUID-VkImageCreateInfo-usage-00966",
+          "text": " If <code>usage</code> includes <code>VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT</code>, <code>usage</code> <strong class=\"purple\">must</strong> also contain at least one of <code>VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT</code>, <code>VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT</code>, or <code>VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT</code>."
+        },
+        {
+          "vuid": "VUID-VkImageCreateInfo-samples-02258",
+          "text": " <code>samples</code> <strong class=\"purple\">must</strong> be a bit value that is set in <code>imageCreateSampleCounts</code> (as defined in <a href=\"#resources-image-creation-limits\">Image Creation Limits</a>)."
+        },
+        {
+          "vuid": "VUID-VkImageCreateInfo-usage-00968",
+          "text": " If the <a href=\"#features-shaderStorageImageMultisample\">multisampled storage images</a> feature is not enabled, and <code>usage</code> contains <code>VK_IMAGE_USAGE_STORAGE_BIT</code>, <code>samples</code> <strong class=\"purple\">must</strong> be <code>VK_SAMPLE_COUNT_1_BIT</code>"
+        },
+        {
+          "vuid": "VUID-VkImageCreateInfo-flags-00969",
+          "text": " If the <a href=\"#features-sparseBinding\">sparse bindings</a> feature is not enabled, <code>flags</code> <strong class=\"purple\">must</strong> not contain <code>VK_IMAGE_CREATE_SPARSE_BINDING_BIT</code>"
+        },
+        {
+          "vuid": "VUID-VkImageCreateInfo-flags-01924",
+          "text": " If the <a href=\"#features-sparseResidencyAliased\">sparse aliased residency</a> feature is not enabled, <code>flags</code> <strong class=\"purple\">must</strong> not contain <code>VK_IMAGE_CREATE_SPARSE_ALIASED_BIT</code>"
+        },
+        {
+          "vuid": "VUID-VkImageCreateInfo-imageType-00970",
+          "text": " If <code>imageType</code> is <code>VK_IMAGE_TYPE_1D</code>, <code>flags</code> <strong class=\"purple\">must</strong> not contain <code>VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT</code>"
+        },
+        {
+          "vuid": "VUID-VkImageCreateInfo-imageType-00971",
+          "text": " If the <a href=\"#features-sparseResidencyImage2D\">sparse residency for 2D images</a> feature is not enabled, and <code>imageType</code> is <code>VK_IMAGE_TYPE_2D</code>, <code>flags</code> <strong class=\"purple\">must</strong> not contain <code>VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT</code>"
+        },
+        {
+          "vuid": "VUID-VkImageCreateInfo-imageType-00972",
+          "text": " If the <a href=\"#features-sparseResidencyImage3D\">sparse residency for 3D images</a> feature is not enabled, and <code>imageType</code> is <code>VK_IMAGE_TYPE_3D</code>, <code>flags</code> <strong class=\"purple\">must</strong> not contain <code>VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT</code>"
+        },
+        {
+          "vuid": "VUID-VkImageCreateInfo-imageType-00973",
+          "text": " If the <a href=\"#features-sparseResidency2Samples\">sparse residency for images with 2 samples</a> feature is not enabled, <code>imageType</code> is <code>VK_IMAGE_TYPE_2D</code>, and <code>samples</code> is <code>VK_SAMPLE_COUNT_2_BIT</code>, <code>flags</code> <strong class=\"purple\">must</strong> not contain <code>VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT</code>"
+        },
+        {
+          "vuid": "VUID-VkImageCreateInfo-imageType-00974",
+          "text": " If the <a href=\"#features-sparseResidency4Samples\">sparse residency for images with 4 samples</a> feature is not enabled, <code>imageType</code> is <code>VK_IMAGE_TYPE_2D</code>, and <code>samples</code> is <code>VK_SAMPLE_COUNT_4_BIT</code>, <code>flags</code> <strong class=\"purple\">must</strong> not contain <code>VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT</code>"
+        },
+        {
+          "vuid": "VUID-VkImageCreateInfo-imageType-00975",
+          "text": " If the <a href=\"#features-sparseResidency8Samples\">sparse residency for images with 8 samples</a> feature is not enabled, <code>imageType</code> is <code>VK_IMAGE_TYPE_2D</code>, and <code>samples</code> is <code>VK_SAMPLE_COUNT_8_BIT</code>, <code>flags</code> <strong class=\"purple\">must</strong> not contain <code>VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT</code>"
+        },
+        {
+          "vuid": "VUID-VkImageCreateInfo-imageType-00976",
+          "text": " If the <a href=\"#features-sparseResidency16Samples\">sparse residency for images with 16 samples</a> feature is not enabled, <code>imageType</code> is <code>VK_IMAGE_TYPE_2D</code>, and <code>samples</code> is <code>VK_SAMPLE_COUNT_16_BIT</code>, <code>flags</code> <strong class=\"purple\">must</strong> not contain <code>VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT</code>"
+        },
+        {
+          "vuid": "VUID-VkImageCreateInfo-flags-00987",
+          "text": " If <code>flags</code> contains <code>VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT</code> or <code>VK_IMAGE_CREATE_SPARSE_ALIASED_BIT</code>, it <strong class=\"purple\">must</strong> also contain <code>VK_IMAGE_CREATE_SPARSE_BINDING_BIT</code>"
+        },
+        {
+          "vuid": "VUID-VkImageCreateInfo-None-01925",
+          "text": " If any of the bits <code>VK_IMAGE_CREATE_SPARSE_BINDING_BIT</code>, <code>VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT</code>, or <code>VK_IMAGE_CREATE_SPARSE_ALIASED_BIT</code> are set, <code>VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT</code> <strong class=\"purple\">must</strong> not also be set"
+        },
+        {
+          "vuid": "VUID-VkImageCreateInfo-initialLayout-00993",
+          "text": " <code>initialLayout</code> <strong class=\"purple\">must</strong> be <code>VK_IMAGE_LAYOUT_UNDEFINED</code> or <code>VK_IMAGE_LAYOUT_PREINITIALIZED</code>."
+        },
+        {
+          "vuid": "VUID-VkImageCreateInfo-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO</code>"
+        },
+        {
+          "vuid": "VUID-VkImageCreateInfo-pNext-pNext",
+          "text": " Each <code>pNext</code> member of any structure (including this one) in the <code>pNext</code> chain <strong class=\"purple\">must</strong> be either <code>NULL</code> or a pointer to a valid instance of <a href=\"#VkDedicatedAllocationImageCreateInfoNV\">VkDedicatedAllocationImageCreateInfoNV</a>, <a href=\"#VkExternalFormatANDROID\">VkExternalFormatANDROID</a>, <a href=\"#VkExternalMemoryImageCreateInfo\">VkExternalMemoryImageCreateInfo</a>, <a href=\"#VkExternalMemoryImageCreateInfoNV\">VkExternalMemoryImageCreateInfoNV</a>, <a href=\"#VkImageDrmFormatModifierExplicitCreateInfoEXT\">VkImageDrmFormatModifierExplicitCreateInfoEXT</a>, <a href=\"#VkImageDrmFormatModifierListCreateInfoEXT\">VkImageDrmFormatModifierListCreateInfoEXT</a>, <a href=\"#VkImageFormatListCreateInfoKHR\">VkImageFormatListCreateInfoKHR</a>, <a href=\"#VkImageStencilUsageCreateInfoEXT\">VkImageStencilUsageCreateInfoEXT</a>, or <a href=\"#VkImageSwapchainCreateInfoKHR\">VkImageSwapchainCreateInfoKHR</a>"
+        },
+        {
+          "vuid": "VUID-VkImageCreateInfo-sType-unique",
+          "text": " Each <code>sType</code> member in the <code>pNext</code> chain <strong class=\"purple\">must</strong> be unique"
+        },
+        {
+          "vuid": "VUID-VkImageCreateInfo-flags-parameter",
+          "text": " <code>flags</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkImageCreateFlagBits\">VkImageCreateFlagBits</a> values"
+        },
+        {
+          "vuid": "VUID-VkImageCreateInfo-imageType-parameter",
+          "text": " <code>imageType</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkImageType\">VkImageType</a> value"
+        },
+        {
+          "vuid": "VUID-VkImageCreateInfo-format-parameter",
+          "text": " <code>format</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkFormat\">VkFormat</a> value"
+        },
+        {
+          "vuid": "VUID-VkImageCreateInfo-samples-parameter",
+          "text": " <code>samples</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkSampleCountFlagBits\">VkSampleCountFlagBits</a> value"
+        },
+        {
+          "vuid": "VUID-VkImageCreateInfo-tiling-parameter",
+          "text": " <code>tiling</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkImageTiling\">VkImageTiling</a> value"
+        },
+        {
+          "vuid": "VUID-VkImageCreateInfo-usage-parameter",
+          "text": " <code>usage</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkImageUsageFlagBits\">VkImageUsageFlagBits</a> values"
+        },
+        {
+          "vuid": "VUID-VkImageCreateInfo-usage-requiredbitmask",
+          "text": " <code>usage</code> <strong class=\"purple\">must</strong> not be <code>0</code>"
+        },
+        {
+          "vuid": "VUID-VkImageCreateInfo-sharingMode-parameter",
+          "text": " <code>sharingMode</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkSharingMode\">VkSharingMode</a> value"
+        },
+        {
+          "vuid": "VUID-VkImageCreateInfo-initialLayout-parameter",
+          "text": " <code>initialLayout</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkImageLayout\">VkImageLayout</a> value"
+        }
+      ],
+      "!(VK_VERSION_1_1,VK_KHR_get_physical_device_properties2)": [
+        {
+          "vuid": "VUID-VkImageCreateInfo-sharingMode-01392",
+          "text": " If <code>sharingMode</code> is <code>VK_SHARING_MODE_CONCURRENT</code>, each element of <code>pQueueFamilyIndices</code> <strong class=\"purple\">must</strong> be unique and <strong class=\"purple\">must</strong> be less than <code>pQueueFamilyPropertyCount</code> returned by <a href=\"#vkGetPhysicalDeviceQueueFamilyProperties\">vkGetPhysicalDeviceQueueFamilyProperties</a> for the <code>physicalDevice</code> that was used to create <code>device</code>"
+        }
+      ],
+      "(VK_VERSION_1_1,VK_KHR_get_physical_device_properties2)": [
+        {
+          "vuid": "VUID-VkImageCreateInfo-sharingMode-01420",
+          "text": " If <code>sharingMode</code> is <code>VK_SHARING_MODE_CONCURRENT</code>, each element of <code>pQueueFamilyIndices</code> <strong class=\"purple\">must</strong> be unique and <strong class=\"purple\">must</strong> be less than <code>pQueueFamilyPropertyCount</code> returned by either <a href=\"#vkGetPhysicalDeviceQueueFamilyProperties\">vkGetPhysicalDeviceQueueFamilyProperties</a> or <a href=\"#vkGetPhysicalDeviceQueueFamilyProperties2\">vkGetPhysicalDeviceQueueFamilyProperties2</a> for the <code>physicalDevice</code> that was used to create <code>device</code>"
+        }
+      ],
+      "!(VK_ANDROID_external_memory_android_hardware_buffer)": [
+        {
+          "vuid": "VUID-VkImageCreateInfo-format-00943",
+          "text": " <code>format</code> <strong class=\"purple\">must</strong> not be <code>VK_FORMAT_UNDEFINED</code>"
+        }
+      ],
+      "(VK_ANDROID_external_memory_android_hardware_buffer)": [
+        {
+          "vuid": "VUID-VkImageCreateInfo-pNext-01974",
+          "text": " If the <code>pNext</code> chain includes a <a href=\"#VkExternalFormatANDROID\">VkExternalFormatANDROID</a> structure, and its <code>externalFormat</code> member is non-zero the <code>format</code> <strong class=\"purple\">must</strong> be <code>VK_FORMAT_UNDEFINED</code>."
+        },
+        {
+          "vuid": "VUID-VkImageCreateInfo-pNext-01975",
+          "text": " If the <code>pNext</code> chain does not include a <a href=\"#VkExternalFormatANDROID\">VkExternalFormatANDROID</a> structure, or does and its <code>externalFormat</code> member is <code>0</code>, the <code>format</code> <strong class=\"purple\">must</strong> not be <code>VK_FORMAT_UNDEFINED</code>."
+        },
+        {
+          "vuid": "VUID-VkImageCreateInfo-pNext-02393",
+          "text": " If the <code>pNext</code> chain includes a <a href=\"#VkExternalMemoryImageCreateInfo\">VkExternalMemoryImageCreateInfo</a> structure whose <code>handleTypes</code> member includes <code>VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID</code>, <code>imageType</code> <strong class=\"purple\">must</strong> be <code>VK_IMAGE_TYPE_2D</code>."
+        },
+        {
+          "vuid": "VUID-VkImageCreateInfo-pNext-02394",
+          "text": " If the <code>pNext</code> chain includes a <a href=\"#VkExternalMemoryImageCreateInfo\">VkExternalMemoryImageCreateInfo</a> structure whose <code>handleTypes</code> member includes <code>VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID</code>, <code>mipLevels</code> <strong class=\"purple\">must</strong> either be <code>1</code> or equal to the number of levels in the complete mipmap chain based on <span class=\"eq\"><code>extent.width</code></span>, <span class=\"eq\"><code>extent.height</code></span>, and <span class=\"eq\"><code>extent.depth</code></span>."
+        },
+        {
+          "vuid": "VUID-VkImageCreateInfo-pNext-02396",
+          "text": " If the <code>pNext</code> chain includes a <a href=\"#VkExternalFormatANDROID\">VkExternalFormatANDROID</a> structure whose <code>externalFormat</code> member is not <code>0</code>, <code>flags</code> <strong class=\"purple\">must</strong> not include <code>VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT</code>."
+        },
+        {
+          "vuid": "VUID-VkImageCreateInfo-pNext-02397",
+          "text": " If the <code>pNext</code> chain includes a <a href=\"#VkExternalFormatANDROID\">VkExternalFormatANDROID</a> structure whose <code>externalFormat</code> member is not <code>0</code>, <code>usage</code> <strong class=\"purple\">must</strong> not include any usages except <code>VK_IMAGE_USAGE_SAMPLED_BIT</code>."
+        },
+        {
+          "vuid": "VUID-VkImageCreateInfo-pNext-02398",
+          "text": " If the <code>pNext</code> chain includes a <a href=\"#VkExternalFormatANDROID\">VkExternalFormatANDROID</a> structure whose <code>externalFormat</code> member is not <code>0</code>, <code>tiling</code> <strong class=\"purple\">must</strong> be <code>VK_IMAGE_TILING_OPTIMAL</code>."
+        }
+      ],
+      "(VK_EXT_fragment_density_map)": [
+        {
+          "vuid": "VUID-VkImageCreateInfo-flags-02557",
+          "text": " If <code>flags</code> contains <code>VK_IMAGE_USAGE_FRAGMENT_DENSITY_MAP_BIT_EXT</code>, <code>imageType</code> <strong class=\"purple\">must</strong> be <code>VK_IMAGE_TYPE_2D</code>"
+        },
+        {
+          "vuid": "VUID-VkImageCreateInfo-samples-02558",
+          "text": " If <code>samples</code> is not <code>VK_SAMPLE_COUNT_1_BIT</code>, <code>usage</code> <strong class=\"purple\">must</strong> not contain <code>VK_IMAGE_USAGE_FRAGMENT_DENSITY_MAP_BIT_EXT</code>"
+        },
+        {
+          "vuid": "VUID-VkImageCreateInfo-usage-02559",
+          "text": " If <code>usage</code> includes <code>VK_IMAGE_USAGE_FRAGMENT_DENSITY_MAP_BIT_EXT</code>, <code>extent.width</code> <strong class=\"purple\">must</strong> be less than or equal to \\(\\lceil{\\frac{maxFramebufferWidth}{minFragmentDensityTexelSize_{width}}}\\rceil\\)"
+        },
+        {
+          "vuid": "VUID-VkImageCreateInfo-usage-02560",
+          "text": " If <code>usage</code> includes <code>VK_IMAGE_USAGE_FRAGMENT_DENSITY_MAP_BIT_EXT</code>, <code>extent.height</code> <strong class=\"purple\">must</strong> be less than or equal to \\(\\lceil{\\frac{maxFramebufferHeight}{minFragmentDensityTexelSize_{height}}}\\rceil\\)"
+        },
+        {
+          "vuid": "VUID-VkImageCreateInfo-flags-02565",
+          "text": " If <code>flags</code> contains <code>VK_IMAGE_CREATE_SUBSAMPLED_BIT_EXT</code>, <code>tiling</code> <strong class=\"purple\">must</strong> be <code>VK_IMAGE_TILING_OPTIMAL</code>"
+        },
+        {
+          "vuid": "VUID-VkImageCreateInfo-flags-02566",
+          "text": " If <code>flags</code> contains <code>VK_IMAGE_CREATE_SUBSAMPLED_BIT_EXT</code>, <code>imageType</code> <strong class=\"purple\">must</strong> be <code>VK_IMAGE_TYPE_2D</code>"
+        },
+        {
+          "vuid": "VUID-VkImageCreateInfo-flags-02567",
+          "text": " If <code>flags</code> contains <code>VK_IMAGE_CREATE_SUBSAMPLED_BIT_EXT</code>, <code>flags</code> <strong class=\"purple\">must</strong> not contain <code>VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT</code>"
+        },
+        {
+          "vuid": "VUID-VkImageCreateInfo-flags-02568",
+          "text": " If <code>flags</code> contains <code>VK_IMAGE_CREATE_SUBSAMPLED_BIT_EXT</code>, <code>mipLevels</code> <strong class=\"purple\">must</strong> be <code>1</code>"
+        }
+      ],
+      "(VK_VERSION_1_1,VK_KHR_maintenance1)": [
+        {
+          "vuid": "VUID-VkImageCreateInfo-flags-00950",
+          "text": " If <code>flags</code> contains <code>VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT</code>, <code>imageType</code> <strong class=\"purple\">must</strong> be <code>VK_IMAGE_TYPE_3D</code>"
+        }
+      ],
+      "(VK_VERSION_1_1)": [
+        {
+          "vuid": "VUID-VkImageCreateInfo-flags-01890",
+          "text": " If the protected memory feature is not enabled, <code>flags</code> <strong class=\"purple\">must</strong> not contain <code>VK_IMAGE_CREATE_PROTECTED_BIT</code>."
+        },
+        {
+          "vuid": "VUID-VkImageCreateInfo-None-01891",
+          "text": " If any of the bits <code>VK_IMAGE_CREATE_SPARSE_BINDING_BIT</code>, <code>VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT</code>, or <code>VK_IMAGE_CREATE_SPARSE_ALIASED_BIT</code> are set, <code>VK_IMAGE_CREATE_PROTECTED_BIT</code> <strong class=\"purple\">must</strong> not also be set."
+        }
+      ],
+      "(VK_VERSION_1_1,VK_KHR_external_memory)+(VK_NV_external_memory)": [
+        {
+          "vuid": "VUID-VkImageCreateInfo-pNext-00988",
+          "text": " If the <code>pNext</code> chain includes a <a href=\"#VkExternalMemoryImageCreateInfoNV\">VkExternalMemoryImageCreateInfoNV</a> structure, it <strong class=\"purple\">must</strong> not contain a <a href=\"#VkExternalMemoryImageCreateInfo\">VkExternalMemoryImageCreateInfo</a> structure."
+        }
+      ],
+      "(VK_VERSION_1_1,VK_KHR_external_memory)": [
+        {
+          "vuid": "VUID-VkImageCreateInfo-pNext-00990",
+          "text": " If the <code>pNext</code> chain includes a <a href=\"#VkExternalMemoryImageCreateInfo\">VkExternalMemoryImageCreateInfo</a> structure, its <code>handleTypes</code> member <strong class=\"purple\">must</strong> only contain bits that are also in <a href=\"#VkExternalImageFormatProperties\">VkExternalImageFormatProperties</a>::<code>externalMemoryProperties.compatibleHandleTypes</code>, as returned by <a href=\"#vkGetPhysicalDeviceImageFormatProperties2\">vkGetPhysicalDeviceImageFormatProperties2</a> with <code>format</code>, <code>imageType</code>, <code>tiling</code>, <code>usage</code>, and <code>flags</code> equal to those in this structure, and with a <a href=\"#VkPhysicalDeviceExternalImageFormatInfo\">VkPhysicalDeviceExternalImageFormatInfo</a> structure included in the <code>pNext</code> chain, with a <code>handleType</code> equal to any one of the handle types specified in <a href=\"#VkExternalMemoryImageCreateInfo\">VkExternalMemoryImageCreateInfo</a>::<code>handleTypes</code>"
+        }
+      ],
+      "(VK_NV_external_memory+VK_NV_external_memory_capabilities)": [
+        {
+          "vuid": "VUID-VkImageCreateInfo-pNext-00991",
+          "text": " If the <code>pNext</code> chain includes a <a href=\"#VkExternalMemoryImageCreateInfoNV\">VkExternalMemoryImageCreateInfoNV</a> structure, its <code>handleTypes</code> member <strong class=\"purple\">must</strong> only contain bits that are also in <a href=\"#VkExternalImageFormatPropertiesNV\">VkExternalImageFormatPropertiesNV</a>::<code>externalMemoryProperties.compatibleHandleTypes</code>, as returned by <a href=\"#vkGetPhysicalDeviceExternalImageFormatPropertiesNV\">vkGetPhysicalDeviceExternalImageFormatPropertiesNV</a> with <code>format</code>, <code>imageType</code>, <code>tiling</code>, <code>usage</code>, and <code>flags</code> equal to those in this structure, and with <code>externalHandleType</code> equal to any one of the handle types specified in <a href=\"#VkExternalMemoryImageCreateInfoNV\">VkExternalMemoryImageCreateInfoNV</a>::<code>handleTypes</code>"
+        }
+      ],
+      "(VK_VERSION_1_1,VK_KHR_device_group)": [
+        {
+          "vuid": "VUID-VkImageCreateInfo-physicalDeviceCount-01421",
+          "text": " If the logical device was created with <a href=\"#VkDeviceGroupDeviceCreateInfo\">VkDeviceGroupDeviceCreateInfo</a>::<code>physicalDeviceCount</code> equal to 1, <code>flags</code> <strong class=\"purple\">must</strong> not contain <code>VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT</code>"
+        },
+        {
+          "vuid": "VUID-VkImageCreateInfo-flags-02259",
+          "text": " If <code>flags</code> contains <code>VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT</code>, then <code>mipLevels</code> <strong class=\"purple\">must</strong> be one, <code>arrayLayers</code> <strong class=\"purple\">must</strong> be one, <code>imageType</code> <strong class=\"purple\">must</strong> be <code>VK_IMAGE_TYPE_2D</code>. and <code>imageCreateMaybeLinear</code> (as defined in <a href=\"#resources-image-creation-limits\">Image Creation Limits</a>) <strong class=\"purple\">must</strong> be <code>false</code>."
+        }
+      ],
+      "(VK_VERSION_1_1,VK_KHR_maintenance2)": [
+        {
+          "vuid": "VUID-VkImageCreateInfo-flags-01572",
+          "text": " If <code>flags</code> contains <code>VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT</code>, then <code>format</code> <strong class=\"purple\">must</strong> be a <a href=\"#appendix-compressedtex-bc\">block-compressed image format</a>, an <a href=\"#appendix-compressedtex-etc2\">ETC compressed image format</a>, or an <a href=\"#appendix-compressedtex-astc\">ASTC compressed image format</a>."
+        },
+        {
+          "vuid": "VUID-VkImageCreateInfo-flags-01573",
+          "text": " If <code>flags</code> contains <code>VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT</code>, then <code>flags</code> <strong class=\"purple\">must</strong> also contain <code>VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT</code>."
+        }
+      ],
+      "(VK_VERSION_1_1,VK_KHR_external_memory,VK_NV_external_memory)": [
+        {
+          "vuid": "VUID-VkImageCreateInfo-pNext-01443",
+          "text": "     If the <code>pNext</code> chain includes a ifdef::VK_VERSION_1_1,VK_KHR_external_memory[<a href=\"#VkExternalMemoryImageCreateInfo\">VkExternalMemoryImageCreateInfo</a>]"
+        }
+      ],
+      "(VK_VERSION_1_1,VK_KHR_sampler_ycbcr_conversion)": [
+        {
+          "vuid": "VUID-VkImageCreateInfo-format-02561",
+          "text": " If the image <code>format</code> is one of those listed in <a href=\"#formats-requiring-sampler-ycbcr-conversion\">Formats requiring sampler Y&#8217;C<sub>B</sub>C<sub>R</sub> conversion for <code>VK_IMAGE_ASPECT_COLOR_BIT</code> image views</a>, then <code>mipLevels</code> <strong class=\"purple\">must</strong> be 1"
+        },
+        {
+          "vuid": "VUID-VkImageCreateInfo-format-02562",
+          "text": " If the image <code>format</code> is one of those listed in <a href=\"#formats-requiring-sampler-ycbcr-conversion\">Formats requiring sampler Y&#8217;C<sub>B</sub>C<sub>R</sub> conversion for <code>VK_IMAGE_ASPECT_COLOR_BIT</code> image views</a>, <code>samples</code> must be <code>VK_SAMPLE_COUNT_1_BIT</code>"
+        },
+        {
+          "vuid": "VUID-VkImageCreateInfo-format-02563",
+          "text": " If the image <code>format</code> is one of those listed in <a href=\"#formats-requiring-sampler-ycbcr-conversion\">Formats requiring sampler Y&#8217;C<sub>B</sub>C<sub>R</sub> conversion for <code>VK_IMAGE_ASPECT_COLOR_BIT</code> image views</a>, <code>imageType</code> <strong class=\"purple\">must</strong> be <code>VK_IMAGE_TYPE_2D</code>"
+        },
+        {
+          "vuid": "VUID-VkImageCreateInfo-imageCreateFormatFeatures-02260",
+          "text": " If <code>format</code> is a <em>multi-planar</em> format, and if <code>imageCreateFormatFeatures</code> (as defined in <a href=\"#resources-image-creation-limits\">Image Creation Limits</a>) does not contain <code>VK_FORMAT_FEATURE_DISJOINT_BIT</code>, then <code>flags</code> <strong class=\"purple\">must</strong> not contain <code>VK_IMAGE_CREATE_DISJOINT_BIT</code>"
+        },
+        {
+          "vuid": "VUID-VkImageCreateInfo-format-01577",
+          "text": " If <code>format</code> is not a <em>multi-planar</em> format, and <code>flags</code> does not include <code>VK_IMAGE_CREATE_ALIAS_BIT</code>, <code>flags</code> <strong class=\"purple\">must</strong> not contain <code>VK_IMAGE_CREATE_DISJOINT_BIT</code>"
+        }
+      ],
+      "(VK_VERSION_1_1,VK_KHR_sampler_ycbcr_conversion)+(VK_EXT_ycbcr_image_arrays)": [
+        {
+          "vuid": "VUID-VkImageCreateInfo-format-02653",
+          "text": " If the image <code>format</code> is one of those listed in <a href=\"#formats-requiring-sampler-ycbcr-conversion\">Formats requiring sampler Y&#8217;C<sub>B</sub>C<sub>R</sub> conversion for <code>VK_IMAGE_ASPECT_COLOR_BIT</code> image views</a>, and the <code>ycbcrImageArrays</code> feature is not enabled, <code>arrayLayers</code> <strong class=\"purple\">must</strong> be 1"
+        }
+      ],
+      "(VK_VERSION_1_1,VK_KHR_sampler_ycbcr_conversion)+!(VK_EXT_ycbcr_image_arrays)": [
+        {
+          "vuid": "VUID-VkImageCreateInfo-format-02564",
+          "text": " If the image <code>format</code> is one of those listed in <a href=\"#formats-requiring-sampler-ycbcr-conversion\">Formats requiring sampler Y&#8217;C<sub>B</sub>C<sub>R</sub> conversion for <code>VK_IMAGE_ASPECT_COLOR_BIT</code> image views</a>, <code>arrayLayers</code> <strong class=\"purple\">must</strong> be 1"
+        }
+      ],
+      "(VK_EXT_image_drm_format_modifier)": [
+        {
+          "vuid": "VUID-VkImageCreateInfo-tiling-02261",
+          "text": " If <code>tiling</code> is <code>VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT</code>, then the <code>pNext</code> chain <strong class=\"purple\">must</strong> include exactly one of <a href=\"#VkImageDrmFormatModifierListCreateInfoEXT\">VkImageDrmFormatModifierListCreateInfoEXT</a> or <a href=\"#VkImageDrmFormatModifierExplicitCreateInfoEXT\">VkImageDrmFormatModifierExplicitCreateInfoEXT</a> structures"
+        },
+        {
+          "vuid": "VUID-VkImageCreateInfo-pNext-02262",
+          "text": " If the <code>pNext</code> chain includes a <a href=\"#VkImageDrmFormatModifierListCreateInfoEXT\">VkImageDrmFormatModifierListCreateInfoEXT</a> or <a href=\"#VkImageDrmFormatModifierExplicitCreateInfoEXT\">VkImageDrmFormatModifierExplicitCreateInfoEXT</a> structure, then <code>tiling</code> <strong class=\"purple\">must</strong> be <code>VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT</code>"
+        },
+        {
+          "vuid": "VUID-VkImageCreateInfo-tiling-02353",
+          "text": " If <code>tiling</code> is <code>VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT</code> and <code>flags</code> contains <code>VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT</code>, then the <code>pNext</code> chain <strong class=\"purple\">must</strong> include an <a href=\"#VkImageFormatListCreateInfoKHR\">VkImageFormatListCreateInfoKHR</a> structure with non-zero <code>viewFormatCount</code>"
+        }
+      ],
+      "(VK_EXT_sample_locations)": [
+        {
+          "vuid": "VUID-VkImageCreateInfo-flags-01533",
+          "text": " If <code>flags</code> contains <code>VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT</code> <code>format</code> <strong class=\"purple\">must</strong> be a depth or depth/stencil format"
+        }
+      ],
+      "(VK_EXT_separate_stencil_usage)": [
+        {
+          "vuid": "VUID-VkImageCreateInfo-format-02795",
+          "text": " If <code>format</code> is a depth-stencil format, <code>usage</code> includes <code>VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT</code>, and the <code>pNext</code> chain includes a <a href=\"#VkImageStencilUsageCreateInfoEXT\">VkImageStencilUsageCreateInfoEXT</a> structure, then its <a href=\"#VkImageStencilUsageCreateInfoEXT\">VkImageStencilUsageCreateInfoEXT</a>::<code>stencilUsage</code> member <strong class=\"purple\">must</strong> also include <code>VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT</code>"
+        },
+        {
+          "vuid": "VUID-VkImageCreateInfo-format-02796",
+          "text": " If <code>format</code> is a depth-stencil format, <code>usage</code> does not include <code>VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT</code>, and the <code>pNext</code> chain includes a <a href=\"#VkImageStencilUsageCreateInfoEXT\">VkImageStencilUsageCreateInfoEXT</a> structure, then its <a href=\"#VkImageStencilUsageCreateInfoEXT\">VkImageStencilUsageCreateInfoEXT</a>::<code>stencilUsage</code> member <strong class=\"purple\">must</strong> also not include <code>VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT</code>"
+        },
+        {
+          "vuid": "VUID-VkImageCreateInfo-format-02797",
+          "text": " If <code>format</code> is a depth-stencil format, <code>usage</code> includes <code>VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT</code>, and the <code>pNext</code> chain includes a <a href=\"#VkImageStencilUsageCreateInfoEXT\">VkImageStencilUsageCreateInfoEXT</a> structure, then its <a href=\"#VkImageStencilUsageCreateInfoEXT\">VkImageStencilUsageCreateInfoEXT</a>::<code>stencilUsage</code> member <strong class=\"purple\">must</strong> also include <code>VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT</code>"
+        },
+        {
+          "vuid": "VUID-VkImageCreateInfo-format-02798",
+          "text": " If <code>format</code> is a depth-stencil format, <code>usage</code> does not include <code>VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT</code>, and the <code>pNext</code> chain includes a <a href=\"#VkImageStencilUsageCreateInfoEXT\">VkImageStencilUsageCreateInfoEXT</a> structure, then its <a href=\"#VkImageStencilUsageCreateInfoEXT\">VkImageStencilUsageCreateInfoEXT</a>::<code>stencilUsage</code> member <strong class=\"purple\">must</strong> also not include <code>VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT</code>"
+        },
+        {
+          "vuid": "VUID-VkImageCreateInfo-Format-02536",
+          "text": " If <code>Format</code> is a depth-stencil format and the <code>pNext</code> chain includes a <a href=\"#VkImageStencilUsageCreateInfoEXT\">VkImageStencilUsageCreateInfoEXT</a> structure with its <code>stencilUsage</code> member including <code>VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT</code>, <code>extent.width</code> <strong class=\"purple\">must</strong> be less than or equal to <code>VkPhysicalDeviceLimits</code>::<code>maxFramebufferWidth</code>"
+        },
+        {
+          "vuid": "VUID-VkImageCreateInfo-format-02537",
+          "text": " If <code>format</code> is a depth-stencil format and the <code>pNext</code> chain includes a <a href=\"#VkImageStencilUsageCreateInfoEXT\">VkImageStencilUsageCreateInfoEXT</a> structure with its <code>stencilUsage</code> member including <code>VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT</code>, <code>extent.height</code> <strong class=\"purple\">must</strong> be less than or equal to <code>VkPhysicalDeviceLimits</code>::<code>maxFramebufferHeight</code>"
+        },
+        {
+          "vuid": "VUID-VkImageCreateInfo-format-02538",
+          "text": " If the <a href=\"#features-shaderStorageImageMultisample\">multisampled storage images</a> feature is not enabled, <code>format</code> is a depth-stencil format and the <code>pNext</code> chain includes a <a href=\"#VkImageStencilUsageCreateInfoEXT\">VkImageStencilUsageCreateInfoEXT</a> structure with its <code>stencilUsage</code> including <code>VK_IMAGE_USAGE_STORAGE_BIT</code>, <code>samples</code> <strong class=\"purple\">must</strong> be <code>VK_SAMPLE_COUNT_1_BIT</code>"
+        }
+      ],
+      "(VK_NV_corner_sampled_image)": [
+        {
+          "vuid": "VUID-VkImageCreateInfo-flags-02050",
+          "text": " If <code>flags</code> contains <code>VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV</code>, <code>imageType</code> <strong class=\"purple\">must</strong> be <code>VK_IMAGE_TYPE_2D</code> or <code>VK_IMAGE_TYPE_3D</code>"
+        },
+        {
+          "vuid": "VUID-VkImageCreateInfo-flags-02051",
+          "text": " If <code>flags</code> contains <code>VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV</code>, it <strong class=\"purple\">must</strong> not contain <code>VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT</code> and the <code>format</code> <strong class=\"purple\">must</strong> not be a depth/stencil format"
+        },
+        {
+          "vuid": "VUID-VkImageCreateInfo-flags-02052",
+          "text": " If <code>flags</code> contains <code>VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV</code> and <code>imageType</code> is <code>VK_IMAGE_TYPE_2D</code>, <code>extent.width</code> and <code>extent.height</code> <strong class=\"purple\">must</strong> be greater than <code>1</code>"
+        },
+        {
+          "vuid": "VUID-VkImageCreateInfo-flags-02053",
+          "text": " If <code>flags</code> contains <code>VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV</code> and <code>imageType</code> is <code>VK_IMAGE_TYPE_3D</code>, <code>extent.width</code>, <code>extent.height</code>, and <code>extent.depth</code> <strong class=\"purple\">must</strong> be greater than <code>1</code>"
+        }
+      ],
+      "(VK_NV_shading_rate_image)": [
+        {
+          "vuid": "VUID-VkImageCreateInfo-imageType-02082",
+          "text": " If <code>usage</code> includes <code>VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV</code>, <code>imageType</code> <strong class=\"purple\">must</strong> be <code>VK_IMAGE_TYPE_2D</code>."
+        },
+        {
+          "vuid": "VUID-VkImageCreateInfo-samples-02083",
+          "text": " If <code>usage</code> includes <code>VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV</code>, <code>samples</code> <strong class=\"purple\">must</strong> be <code>VK_SAMPLE_COUNT_1_BIT</code>."
+        },
+        {
+          "vuid": "VUID-VkImageCreateInfo-tiling-02084",
+          "text": " If <code>usage</code> includes <code>VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV</code>, <code>tiling</code> <strong class=\"purple\">must</strong> be <code>VK_IMAGE_TILING_OPTIMAL</code>."
+        }
+      ]
+    },
+    "VkImageStencilUsageCreateInfoEXT": {
+      "(VK_EXT_separate_stencil_usage)": [
+        {
+          "vuid": "VUID-VkImageStencilUsageCreateInfoEXT-stencilUsage-02539",
+          "text": " If <code>stencilUsage</code> includes <code>VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT</code>, it <strong class=\"purple\">must</strong> not include bits other than <code>VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT</code> or <code>VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT</code>"
+        },
+        {
+          "vuid": "VUID-VkImageStencilUsageCreateInfoEXT-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO_EXT</code>"
+        },
+        {
+          "vuid": "VUID-VkImageStencilUsageCreateInfoEXT-stencilUsage-parameter",
+          "text": " <code>stencilUsage</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkImageUsageFlagBits\">VkImageUsageFlagBits</a> values"
+        },
+        {
+          "vuid": "VUID-VkImageStencilUsageCreateInfoEXT-stencilUsage-requiredbitmask",
+          "text": " <code>stencilUsage</code> <strong class=\"purple\">must</strong> not be <code>0</code>"
+        }
+      ]
+    },
+    "VkDedicatedAllocationImageCreateInfoNV": {
+      "(VK_NV_dedicated_allocation)": [
+        {
+          "vuid": "VUID-VkDedicatedAllocationImageCreateInfoNV-dedicatedAllocation-00994",
+          "text": " If <code>dedicatedAllocation</code> is <code>VK_TRUE</code>, <code>VkImageCreateInfo</code>::<code>flags</code> <strong class=\"purple\">must</strong> not include <code>VK_IMAGE_CREATE_SPARSE_BINDING_BIT</code>, <code>VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT</code>, or <code>VK_IMAGE_CREATE_SPARSE_ALIASED_BIT</code>"
+        },
+        {
+          "vuid": "VUID-VkDedicatedAllocationImageCreateInfoNV-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_IMAGE_CREATE_INFO_NV</code>"
+        }
+      ]
+    },
+    "VkExternalMemoryImageCreateInfo": {
+      "(VK_VERSION_1_1,VK_KHR_external_memory)": [
+        {
+          "vuid": "VUID-VkExternalMemoryImageCreateInfo-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO</code>"
+        },
+        {
+          "vuid": "VUID-VkExternalMemoryImageCreateInfo-handleTypes-parameter",
+          "text": " <code>handleTypes</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkExternalMemoryHandleTypeFlagBits\">VkExternalMemoryHandleTypeFlagBits</a> values"
+        },
+        {
+          "vuid": "VUID-VkExternalMemoryImageCreateInfo-handleTypes-requiredbitmask",
+          "text": " <code>handleTypes</code> <strong class=\"purple\">must</strong> not be <code>0</code>"
+        }
+      ]
+    },
+    "VkExternalMemoryImageCreateInfoNV": {
+      "(VK_NV_external_memory)": [
+        {
+          "vuid": "VUID-VkExternalMemoryImageCreateInfoNV-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_NV</code>"
+        },
+        {
+          "vuid": "VUID-VkExternalMemoryImageCreateInfoNV-handleTypes-parameter",
+          "text": " <code>handleTypes</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkExternalMemoryHandleTypeFlagBitsNV\">VkExternalMemoryHandleTypeFlagBitsNV</a> values"
+        }
+      ]
+    },
+    "VkExternalFormatANDROID": {
+      "(VK_ANDROID_external_memory_android_hardware_buffer)": [
+        {
+          "vuid": "VUID-VkExternalFormatANDROID-externalFormat-01894",
+          "text": " <code>externalFormat</code> <strong class=\"purple\">must</strong> be <code>0</code> or a value returned in the <code>externalFormat</code> member of <a href=\"#VkAndroidHardwareBufferFormatPropertiesANDROID\">VkAndroidHardwareBufferFormatPropertiesANDROID</a> by an earlier call to <a href=\"#vkGetAndroidHardwareBufferPropertiesANDROID\">vkGetAndroidHardwareBufferPropertiesANDROID</a>"
+        },
+        {
+          "vuid": "VUID-VkExternalFormatANDROID-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID</code>"
+        }
+      ]
+    },
+    "VkImageSwapchainCreateInfoKHR": {
+      "(VK_VERSION_1_1,VK_KHR_device_group)+(VK_KHR_swapchain)": [
+        {
+          "vuid": "VUID-VkImageSwapchainCreateInfoKHR-swapchain-00995",
+          "text": " If <code>swapchain</code> is not <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>, the fields of <a href=\"#VkImageCreateInfo\">VkImageCreateInfo</a> <strong class=\"purple\">must</strong> match the <a href=\"#swapchain-wsi-image-create-info\">implied image creation parameters</a> of the swapchain"
+        },
+        {
+          "vuid": "VUID-VkImageSwapchainCreateInfoKHR-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_IMAGE_SWAPCHAIN_CREATE_INFO_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkImageSwapchainCreateInfoKHR-swapchain-parameter",
+          "text": " If <code>swapchain</code> is not <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>, <code>swapchain</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkSwapchainKHR\">VkSwapchainKHR</a> handle"
+        }
+      ]
+    },
+    "VkImageFormatListCreateInfoKHR": {
+      "(VK_KHR_image_format_list)": [
+        {
+          "vuid": "VUID-VkImageFormatListCreateInfoKHR-viewFormatCount-01578",
+          "text": " If <code>viewFormatCount</code> is not <code>0</code>, all of the formats in the <code>pViewFormats</code> array <strong class=\"purple\">must</strong> be compatible with the format specified in the <code>format</code> field of <code>VkImageCreateInfo</code>, as described in the <a href=\"#formats-compatibility\">compatibility table</a>."
+        },
+        {
+          "vuid": "VUID-VkImageFormatListCreateInfoKHR-flags-01579",
+          "text": " If <code>VkImageCreateInfo</code>::<code>flags</code> does not contain <code>VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT</code>, <code>viewFormatCount</code> <strong class=\"purple\">must</strong> be <code>0</code> or <code>1</code>."
+        },
+        {
+          "vuid": "VUID-VkImageFormatListCreateInfoKHR-viewFormatCount-01580",
+          "text": " If <code>viewFormatCount</code> is not <code>0</code>, <code>VkImageCreateInfo</code>::<code>format</code> <strong class=\"purple\">must</strong> be in <code>pViewFormats</code>."
+        },
+        {
+          "vuid": "VUID-VkImageFormatListCreateInfoKHR-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkImageFormatListCreateInfoKHR-pViewFormats-parameter",
+          "text": " If <code>viewFormatCount</code> is not <code>0</code>, <code>pViewFormats</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>viewFormatCount</code> valid <a href=\"#VkFormat\">VkFormat</a> values"
+        }
+      ]
+    },
+    "VkImageDrmFormatModifierListCreateInfoEXT": {
+      "(VK_EXT_image_drm_format_modifier)": [
+        {
+          "vuid": "VUID-VkImageDrmFormatModifierListCreateInfoEXT-pDrmFormatModifiers-02263",
+          "text": " Each <em>modifier</em> in <code>pDrmFormatModifiers</code> must be compatible with the parameters in <a href=\"#VkImageCreateInfo\">VkImageCreateInfo</a> and its <code>pNext</code> chain, as determined by querying <a href=\"#VkPhysicalDeviceImageFormatInfo2\">VkPhysicalDeviceImageFormatInfo2</a> extended with <a href=\"#VkPhysicalDeviceImageDrmFormatModifierInfoEXT\">VkPhysicalDeviceImageDrmFormatModifierInfoEXT</a>."
+        },
+        {
+          "vuid": "VUID-VkImageDrmFormatModifierListCreateInfoEXT-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_LIST_CREATE_INFO_EXT</code>"
+        },
+        {
+          "vuid": "VUID-VkImageDrmFormatModifierListCreateInfoEXT-pDrmFormatModifiers-parameter",
+          "text": " <code>pDrmFormatModifiers</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>drmFormatModifierCount</code> <code>uint64_t</code> values"
+        },
+        {
+          "vuid": "VUID-VkImageDrmFormatModifierListCreateInfoEXT-drmFormatModifierCount-arraylength",
+          "text": " <code>drmFormatModifierCount</code> <strong class=\"purple\">must</strong> be greater than <code>0</code>"
+        }
+      ]
+    },
+    "VkImageDrmFormatModifierExplicitCreateInfoEXT": {
+      "(VK_EXT_image_drm_format_modifier)": [
+        {
+          "vuid": "VUID-VkImageDrmFormatModifierExplicitCreateInfoEXT-drmFormatModifier-02264",
+          "text": " <code>drmFormatModifier</code> must be compatible with the parameters in <a href=\"#VkImageCreateInfo\">VkImageCreateInfo</a> and its <code>pNext</code> chain, as determined by querying <a href=\"#VkPhysicalDeviceImageFormatInfo2\">VkPhysicalDeviceImageFormatInfo2</a> extended with <a href=\"#VkPhysicalDeviceImageDrmFormatModifierInfoEXT\">VkPhysicalDeviceImageDrmFormatModifierInfoEXT</a>."
+        },
+        {
+          "vuid": "VUID-VkImageDrmFormatModifierExplicitCreateInfoEXT-drmFormatModifierPlaneCount-02265",
+          "text": " <code>drmFormatModifierPlaneCount</code> <strong class=\"purple\">must</strong> be equal to the <a href=\"#VkDrmFormatModifierPropertiesEXT\">VkDrmFormatModifierPropertiesEXT</a>::<code>drmFormatModifierPlaneCount</code> associated with <a href=\"#VkImageCreateInfo\">VkImageCreateInfo</a>::<code>format</code> and <code>drmFormatModifier</code>, as found by querying <a href=\"#VkDrmFormatModifierPropertiesListEXT\">VkDrmFormatModifierPropertiesListEXT</a>."
+        },
+        {
+          "vuid": "VUID-VkImageDrmFormatModifierExplicitCreateInfoEXT-size-02267",
+          "text": " For each element of <code>pPlaneLayouts</code>, <code>size</code> <strong class=\"purple\">must</strong> be 0"
+        },
+        {
+          "vuid": "VUID-VkImageDrmFormatModifierExplicitCreateInfoEXT-arrayPitch-02268",
+          "text": " For each element of <code>pPlaneLayouts</code>, <code>arrayPitch</code> <strong class=\"purple\">must</strong> be 0 if <a href=\"#VkImageCreateInfo\">VkImageCreateInfo</a>::<code>arrayLayers</code> is 1."
+        },
+        {
+          "vuid": "VUID-VkImageDrmFormatModifierExplicitCreateInfoEXT-depthPitch-02269",
+          "text": " For each element of <code>pPlaneLayouts</code>, <code>depthPitch</code> <strong class=\"purple\">must</strong> be 0 if <a href=\"#VkImageCreateInfo\">VkImageCreateInfo</a>::<code>extent.depth</code> is 1."
+        },
+        {
+          "vuid": "VUID-VkImageDrmFormatModifierExplicitCreateInfoEXT-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_EXPLICIT_CREATE_INFO_EXT</code>"
+        },
+        {
+          "vuid": "VUID-VkImageDrmFormatModifierExplicitCreateInfoEXT-pPlaneLayouts-parameter",
+          "text": " If <code>drmFormatModifierPlaneCount</code> is not <code>0</code>, <code>pPlaneLayouts</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>drmFormatModifierPlaneCount</code> <a href=\"#VkSubresourceLayout\">VkSubresourceLayout</a> structures"
+        }
+      ]
+    },
+    "vkGetImageSubresourceLayout": {
+      "!(VK_EXT_image_drm_format_modifier)": [
+        {
+          "vuid": "VUID-vkGetImageSubresourceLayout-image-00996",
+          "text": " <code>image</code> <strong class=\"purple\">must</strong> have been created with <code>tiling</code> equal to <code>VK_IMAGE_TILING_LINEAR</code>"
+        }
+      ],
+      "(VK_EXT_image_drm_format_modifier)": [
+        {
+          "vuid": "VUID-vkGetImageSubresourceLayout-image-02270",
+          "text": " <code>image</code> <strong class=\"purple\">must</strong> have been created with <code>tiling</code> equal to <code>VK_IMAGE_TILING_LINEAR</code> or <code>VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT</code>"
+        },
+        {
+          "vuid": "VUID-vkGetImageSubresourceLayout-tiling-02271",
+          "text": "  If the <code>tiling</code> of the <code>image</code> is  <code>VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT</code>, then the <code>aspectMask</code>  member of <code>pSubresource</code> <strong class=\"purple\">must</strong> be  <code>VK_IMAGE_ASPECT_MEMORY_PLANE_i_BIT_EXT</code> and the index <code>i</code> <strong class=\"purple\">must</strong>  be less than the  <a href=\"#VkDrmFormatModifierPropertiesEXT\"><code>drmFormatModifierPlaneCount</code></a>  associated with the image&#8217;s <a href=\"#VkImageCreateInfo\"><code>format</code></a> and <a href=\"#VkImageDrmFormatModifierPropertiesEXT\"><code>drmFormatModifier</code></a>."
+        }
+      ],
+      "core": [
+        {
+          "vuid": "VUID-vkGetImageSubresourceLayout-aspectMask-00997",
+          "text": " The <code>aspectMask</code> member of <code>pSubresource</code> <strong class=\"purple\">must</strong> only have a single bit set"
+        },
+        {
+          "vuid": "VUID-vkGetImageSubresourceLayout-mipLevel-01716",
+          "text": " The <code>mipLevel</code> member of <code>pSubresource</code> <strong class=\"purple\">must</strong> be less than the <code>mipLevels</code> specified in <a href=\"#VkImageCreateInfo\">VkImageCreateInfo</a> when <code>image</code> was created"
+        },
+        {
+          "vuid": "VUID-vkGetImageSubresourceLayout-arrayLayer-01717",
+          "text": " The <code>arrayLayer</code> member of <code>pSubresource</code> <strong class=\"purple\">must</strong> be less than the <code>arrayLayers</code> specified in <a href=\"#VkImageCreateInfo\">VkImageCreateInfo</a> when <code>image</code> was created"
+        },
+        {
+          "vuid": "VUID-vkGetImageSubresourceLayout-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetImageSubresourceLayout-image-parameter",
+          "text": " <code>image</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkImage\">VkImage</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetImageSubresourceLayout-pSubresource-parameter",
+          "text": " <code>pSubresource</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkImageSubresource\">VkImageSubresource</a> structure"
+        },
+        {
+          "vuid": "VUID-vkGetImageSubresourceLayout-pLayout-parameter",
+          "text": " <code>pLayout</code> <strong class=\"purple\">must</strong> be a valid pointer to a <a href=\"#VkSubresourceLayout\">VkSubresourceLayout</a> structure"
+        },
+        {
+          "vuid": "VUID-vkGetImageSubresourceLayout-image-parent",
+          "text": " <code>image</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from <code>device</code>"
+        }
+      ],
+      "(VK_VERSION_1_1,VK_KHR_sampler_ycbcr_conversion)": [
+        {
+          "vuid": "VUID-vkGetImageSubresourceLayout-format-01581",
+          "text": " If the <code>tiling</code> of the <code>image</code> is <code>VK_IMAGE_TILING_LINEAR</code> and its <code>format</code> is a <a href=\"#formats-requiring-sampler-ycbcr-conversion\">multi-planar format</a> with two planes, the <code>aspectMask</code> member of <code>pSubresource</code> <strong class=\"purple\">must</strong> be <code>VK_IMAGE_ASPECT_PLANE_0_BIT</code> or <code>VK_IMAGE_ASPECT_PLANE_1_BIT</code>"
+        },
+        {
+          "vuid": "VUID-vkGetImageSubresourceLayout-format-01582",
+          "text": " If the <code>tiling</code> of the <code>image</code> is <code>VK_IMAGE_TILING_LINEAR</code> and its <code>format</code> is a <a href=\"#formats-requiring-sampler-ycbcr-conversion\">multi-planar format</a> with three planes, the <code>aspectMask</code> member of <code>pSubresource</code> <strong class=\"purple\">must</strong> be <code>VK_IMAGE_ASPECT_PLANE_0_BIT</code>, <code>VK_IMAGE_ASPECT_PLANE_1_BIT</code> or <code>VK_IMAGE_ASPECT_PLANE_2_BIT</code>"
+        }
+      ],
+      "(VK_ANDROID_external_memory_android_hardware_buffer)": [
+        {
+          "vuid": "VUID-vkGetImageSubresourceLayout-image-01895",
+          "text": " If <code>image</code> was created with the <code>VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID</code> external memory handle type, then <code>image</code> <strong class=\"purple\">must</strong> be bound to memory."
+        }
+      ]
+    },
+    "VkImageSubresource": {
+      "core": [
+        {
+          "vuid": "VUID-VkImageSubresource-aspectMask-parameter",
+          "text": " <code>aspectMask</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkImageAspectFlagBits\">VkImageAspectFlagBits</a> values"
+        },
+        {
+          "vuid": "VUID-VkImageSubresource-aspectMask-requiredbitmask",
+          "text": " <code>aspectMask</code> <strong class=\"purple\">must</strong> not be <code>0</code>"
+        }
+      ]
+    },
+    "vkGetImageDrmFormatModifierPropertiesEXT": {
+      "(VK_EXT_image_drm_format_modifier)": [
+        {
+          "vuid": "VUID-vkGetImageDrmFormatModifierPropertiesEXT-image-02272",
+          "text": " <code>image</code> <strong class=\"purple\">must</strong> have been created with <a href=\"#VkImageCreateInfo\"><code>tiling</code></a> equal to <code>VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT</code>."
+        },
+        {
+          "vuid": "VUID-vkGetImageDrmFormatModifierPropertiesEXT-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetImageDrmFormatModifierPropertiesEXT-image-parameter",
+          "text": " <code>image</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkImage\">VkImage</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetImageDrmFormatModifierPropertiesEXT-pProperties-parameter",
+          "text": " <code>pProperties</code> <strong class=\"purple\">must</strong> be a valid pointer to a <a href=\"#VkImageDrmFormatModifierPropertiesEXT\">VkImageDrmFormatModifierPropertiesEXT</a> structure"
+        },
+        {
+          "vuid": "VUID-vkGetImageDrmFormatModifierPropertiesEXT-image-parent",
+          "text": " <code>image</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from <code>device</code>"
+        }
+      ]
+    },
+    "VkImageDrmFormatModifierPropertiesEXT": {
+      "(VK_EXT_image_drm_format_modifier)": [
+        {
+          "vuid": "VUID-VkImageDrmFormatModifierPropertiesEXT-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT</code>"
+        },
+        {
+          "vuid": "VUID-VkImageDrmFormatModifierPropertiesEXT-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        }
+      ]
+    },
+    "vkDestroyImage": {
+      "core": [
+        {
+          "vuid": "VUID-vkDestroyImage-image-01000",
+          "text": " All submitted commands that refer to <code>image</code>, either directly or via a <code>VkImageView</code>, <strong class=\"purple\">must</strong> have completed execution"
+        },
+        {
+          "vuid": "VUID-vkDestroyImage-image-01001",
+          "text": " If <code>VkAllocationCallbacks</code> were provided when <code>image</code> was created, a compatible set of callbacks <strong class=\"purple\">must</strong> be provided here"
+        },
+        {
+          "vuid": "VUID-vkDestroyImage-image-01002",
+          "text": " If no <code>VkAllocationCallbacks</code> were provided when <code>image</code> was created, <code>pAllocator</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-vkDestroyImage-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkDestroyImage-image-parameter",
+          "text": " If <code>image</code> is not <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>, <code>image</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkImage\">VkImage</a> handle"
+        },
+        {
+          "vuid": "VUID-vkDestroyImage-pAllocator-parameter",
+          "text": " If <code>pAllocator</code> is not <code>NULL</code>, <code>pAllocator</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkAllocationCallbacks\">VkAllocationCallbacks</a> structure"
+        },
+        {
+          "vuid": "VUID-vkDestroyImage-image-parent",
+          "text": " If <code>image</code> is a valid handle, it <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from <code>device</code>"
+        }
+      ]
+    },
+    "vkCreateImageView": {
+      "core": [
+        {
+          "vuid": "VUID-vkCreateImageView-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCreateImageView-pCreateInfo-parameter",
+          "text": " <code>pCreateInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkImageViewCreateInfo\">VkImageViewCreateInfo</a> structure"
+        },
+        {
+          "vuid": "VUID-vkCreateImageView-pAllocator-parameter",
+          "text": " If <code>pAllocator</code> is not <code>NULL</code>, <code>pAllocator</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkAllocationCallbacks\">VkAllocationCallbacks</a> structure"
+        },
+        {
+          "vuid": "VUID-vkCreateImageView-pView-parameter",
+          "text": " <code>pView</code> <strong class=\"purple\">must</strong> be a valid pointer to a <a href=\"#VkImageView\">VkImageView</a> handle"
+        }
+      ]
+    },
+    "VkImageViewCreateInfo": {
+      "core": [
+        {
+          "vuid": "VUID-VkImageViewCreateInfo-image-01003",
+          "text": " If <code>image</code> was not created with <code>VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT</code> then <code>viewType</code> <strong class=\"purple\">must</strong> not be <code>VK_IMAGE_VIEW_TYPE_CUBE</code> or <code>VK_IMAGE_VIEW_TYPE_CUBE_ARRAY</code>"
+        },
+        {
+          "vuid": "VUID-VkImageViewCreateInfo-viewType-01004",
+          "text": " If the <a href=\"#features-imageCubeArray\">image cubemap arrays</a> feature is not enabled, <code>viewType</code> <strong class=\"purple\">must</strong> not be <code>VK_IMAGE_VIEW_TYPE_CUBE_ARRAY</code>"
+        },
+        {
+          "vuid": "VUID-VkImageViewCreateInfo-None-02273",
+          "text": " The <a href=\"#resources-image-view-format-features\">format features</a> of the resultant image view <strong class=\"purple\">must</strong> contain at least one bit."
+        },
+        {
+          "vuid": "VUID-VkImageViewCreateInfo-usage-02274",
+          "text": " If <code>usage</code> contains <code>VK_IMAGE_USAGE_SAMPLED_BIT</code>, then the <a href=\"#resources-image-view-format-features\">format features</a> of the resultant image view <strong class=\"purple\">must</strong> contain <code>VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT</code>."
+        },
+        {
+          "vuid": "VUID-VkImageViewCreateInfo-usage-02275",
+          "text": " If <code>usage</code> contains <code>VK_IMAGE_USAGE_STORAGE_BIT</code>, then the image view&#8217;s <a href=\"#resources-image-view-format-features\">format features</a> <strong class=\"purple\">must</strong> contain <code>VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT</code>."
+        },
+        {
+          "vuid": "VUID-VkImageViewCreateInfo-usage-02276",
+          "text": " If <code>usage</code> contains <code>VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT</code>, then the image view&#8217;s <a href=\"#resources-image-view-format-features\">format features</a> <strong class=\"purple\">must</strong> contain <code>VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT</code>."
+        },
+        {
+          "vuid": "VUID-VkImageViewCreateInfo-usage-02277",
+          "text": " If <code>usage</code> contains <code>VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT</code>, then the image view&#8217;s <a href=\"#resources-image-view-format-features\">format features</a> <strong class=\"purple\">must</strong> contain <code>VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT</code>."
+        },
+        {
+          "vuid": "VUID-VkImageViewCreateInfo-usage-02652",
+          "text": " If <code>usage</code> contains <code>VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT</code>, then the image view&#8217;s <a href=\"#resources-image-view-format-features\">format features</a> <strong class=\"purple\">must</strong> contain at least one of <code>VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT</code> or <code>VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT</code>."
+        },
+        {
+          "vuid": "VUID-VkImageViewCreateInfo-subresourceRange-01478",
+          "text": " <code>subresourceRange.baseMipLevel</code> <strong class=\"purple\">must</strong> be less than the <code>mipLevels</code> specified in <a href=\"#VkImageCreateInfo\">VkImageCreateInfo</a> when <code>image</code> was created"
+        },
+        {
+          "vuid": "VUID-VkImageViewCreateInfo-subresourceRange-01718",
+          "text": " If <code>subresourceRange.levelCount</code> is not <code>VK_REMAINING_MIP_LEVELS</code>, <span class=\"eq\"><code>subresourceRange.baseMipLevel</code> &#43; <code>subresourceRange.levelCount</code></span> <strong class=\"purple\">must</strong> be less than or equal to the <code>mipLevels</code> specified in <a href=\"#VkImageCreateInfo\">VkImageCreateInfo</a> when <code>image</code> was created"
+        },
+        {
+          "vuid": "VUID-VkImageViewCreateInfo-image-01018",
+          "text": " If <code>image</code> was created with the <code>VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT</code> flag, <code>format</code> <strong class=\"purple\">must</strong> be compatible with the <code>format</code> used to create <code>image</code>, as defined in <a href=\"#formats-compatibility-classes\">Format Compatibility Classes</a>"
+        },
+        {
+          "vuid": "VUID-VkImageViewCreateInfo-image-01020",
+          "text": " If <code>image</code> is non-sparse then it <strong class=\"purple\">must</strong> be bound completely and contiguously to a single <code>VkDeviceMemory</code> object"
+        },
+        {
+          "vuid": "VUID-VkImageViewCreateInfo-subResourceRange-01021",
+          "text": " <code>subresourceRange</code> and <code>viewType</code> <strong class=\"purple\">must</strong> be compatible with the image, as described in the <a href=\"#resources-image-views-compatibility\">compatibility table</a>"
+        },
+        {
+          "vuid": "VUID-VkImageViewCreateInfo-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO</code>"
+        },
+        {
+          "vuid": "VUID-VkImageViewCreateInfo-pNext-pNext",
+          "text": " Each <code>pNext</code> member of any structure (including this one) in the <code>pNext</code> chain <strong class=\"purple\">must</strong> be either <code>NULL</code> or a pointer to a valid instance of <a href=\"#VkImageViewASTCDecodeModeEXT\">VkImageViewASTCDecodeModeEXT</a>, <a href=\"#VkImageViewUsageCreateInfo\">VkImageViewUsageCreateInfo</a>, or <a href=\"#VkSamplerYcbcrConversionInfo\">VkSamplerYcbcrConversionInfo</a>"
+        },
+        {
+          "vuid": "VUID-VkImageViewCreateInfo-sType-unique",
+          "text": " Each <code>sType</code> member in the <code>pNext</code> chain <strong class=\"purple\">must</strong> be unique"
+        },
+        {
+          "vuid": "VUID-VkImageViewCreateInfo-flags-parameter",
+          "text": " <code>flags</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkImageViewCreateFlagBits\">VkImageViewCreateFlagBits</a> values"
+        },
+        {
+          "vuid": "VUID-VkImageViewCreateInfo-image-parameter",
+          "text": " <code>image</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkImage\">VkImage</a> handle"
+        },
+        {
+          "vuid": "VUID-VkImageViewCreateInfo-viewType-parameter",
+          "text": " <code>viewType</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkImageViewType\">VkImageViewType</a> value"
+        },
+        {
+          "vuid": "VUID-VkImageViewCreateInfo-format-parameter",
+          "text": " <code>format</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkFormat\">VkFormat</a> value"
+        },
+        {
+          "vuid": "VUID-VkImageViewCreateInfo-components-parameter",
+          "text": " <code>components</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkComponentMapping\">VkComponentMapping</a> structure"
+        },
+        {
+          "vuid": "VUID-VkImageViewCreateInfo-subresourceRange-parameter",
+          "text": " <code>subresourceRange</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkImageSubresourceRange\">VkImageSubresourceRange</a> structure"
+        }
+      ],
+      "(VK_VERSION_1_1,VK_KHR_maintenance1)": [
+        {
+          "vuid": "VUID-VkImageViewCreateInfo-image-01005",
+          "text": " If <code>image</code> was created with <code>VK_IMAGE_TYPE_3D</code> but without <code>VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT</code> set then <code>viewType</code> <strong class=\"purple\">must</strong> not be <code>VK_IMAGE_VIEW_TYPE_2D</code> or <code>VK_IMAGE_VIEW_TYPE_2D_ARRAY</code>"
+        },
+        {
+          "vuid": "VUID-VkImageViewCreateInfo-image-01482",
+          "text": " If <code>image</code> is not a 3D image created with <code>VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT</code> set, or <code>viewType</code> is not <code>VK_IMAGE_VIEW_TYPE_2D</code> or <code>VK_IMAGE_VIEW_TYPE_2D_ARRAY</code>, <code>subresourceRange.baseArrayLayer</code> <strong class=\"purple\">must</strong> be less than the <code>arrayLayers</code> specified in <a href=\"#VkImageCreateInfo\">VkImageCreateInfo</a> when <code>image</code> was created"
+        },
+        {
+          "vuid": "VUID-VkImageViewCreateInfo-subresourceRange-01483",
+          "text": " If <code>subresourceRange.layerCount</code> is not <code>VK_REMAINING_ARRAY_LAYERS</code>, <code>image</code> is not a 3D image created with <code>VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT</code> set, or <code>viewType</code> is not <code>VK_IMAGE_VIEW_TYPE_2D</code> or <code>VK_IMAGE_VIEW_TYPE_2D_ARRAY</code>, <code>subresourceRange.layerCount</code> <strong class=\"purple\">must</strong> be non-zero and <span class=\"eq\"><code>subresourceRange.baseArrayLayer</code> &#43; <code>subresourceRange.layerCount</code></span> <strong class=\"purple\">must</strong> be less than or equal to the <code>arrayLayers</code> specified in <a href=\"#VkImageCreateInfo\">VkImageCreateInfo</a> when <code>image</code> was created"
+        },
+        {
+          "vuid": "VUID-VkImageViewCreateInfo-image-02724",
+          "text": " If <code>image</code> is a 3D image created with <code>VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT</code> set, and <code>viewType</code> is <code>VK_IMAGE_VIEW_TYPE_2D</code> or <code>VK_IMAGE_VIEW_TYPE_2D_ARRAY</code>, <code>subresourceRange.baseArrayLayer</code> <strong class=\"purple\">must</strong> be less than the depth computed from <code>baseMipLevel</code> and <code>extent.depth</code> specified in <a href=\"#VkImageCreateInfo\">VkImageCreateInfo</a> when <code>image</code> was created, according to the formula defined in <a href=\"#resources-image-miplevel-sizing\">Image Miplevel Sizing</a>."
+        },
+        {
+          "vuid": "VUID-VkImageViewCreateInfo-subresourceRange-02725",
+          "text": " If <code>subresourceRange.layerCount</code> is not <code>VK_REMAINING_ARRAY_LAYERS</code>, <code>image</code> is a 3D image created with <code>VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT</code> set, and <code>viewType</code> is <code>VK_IMAGE_VIEW_TYPE_2D</code> or <code>VK_IMAGE_VIEW_TYPE_2D_ARRAY</code>, <code>subresourceRange.layerCount</code> <strong class=\"purple\">must</strong> be non-zero and <span class=\"eq\"><code>subresourceRange.baseArrayLayer</code> &#43; <code>subresourceRange.layerCount</code></span> <strong class=\"purple\">must</strong> be less than or equal to the depth computed from <code>baseMipLevel</code> and <code>extent.depth</code> specified in <a href=\"#VkImageCreateInfo\">VkImageCreateInfo</a> when <code>image</code> was created, according to the formula defined in <a href=\"#resources-image-miplevel-sizing\">Image Miplevel Sizing</a>."
+        }
+      ],
+      "!(VK_EXT_fragment_density_map)+!(VK_NV_shading_rate_image)": [
+        {
+          "vuid": "VUID-VkImageViewCreateInfo-image-01007",
+          "text": " <code>image</code> <strong class=\"purple\">must</strong> have been created with a <code>usage</code> value containing at least one of <code>VK_IMAGE_USAGE_SAMPLED_BIT</code>, <code>VK_IMAGE_USAGE_STORAGE_BIT</code>, <code>VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT</code>, <code>VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT</code>, or <code>VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT</code>"
+        }
+      ],
+      "!(VK_EXT_fragment_density_map)+(VK_NV_shading_rate_image)": [
+        {
+          "vuid": "VUID-VkImageViewCreateInfo-image-02085",
+          "text": " <code>image</code> <strong class=\"purple\">must</strong> have been created with a <code>usage</code> value containing at least one of <code>VK_IMAGE_USAGE_SAMPLED_BIT</code>, <code>VK_IMAGE_USAGE_STORAGE_BIT</code>, <code>VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT</code>, <code>VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT</code>, <code>VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT</code>, or <code>VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV</code>"
+        }
+      ],
+      "(VK_EXT_fragment_density_map)+!(VK_NV_shading_rate_image)": [
+        {
+          "vuid": "VUID-VkImageViewCreateInfo-image-02569",
+          "text": " <code>image</code> <strong class=\"purple\">must</strong> have been created with a <code>usage</code> value containing at least one of <code>VK_IMAGE_USAGE_SAMPLED_BIT</code>, <code>VK_IMAGE_USAGE_STORAGE_BIT</code>, <code>VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT</code>, <code>VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT</code>, <code>VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT</code>, or <code>VK_IMAGE_USAGE_FRAGMENT_DENSITY_MAP_BIT_EXT</code>"
+        }
+      ],
+      "(VK_EXT_fragment_density_map)+(VK_NV_shading_rate_image)": [
+        {
+          "vuid": "VUID-VkImageViewCreateInfo-image-02570",
+          "text": " <code>image</code> <strong class=\"purple\">must</strong> have been created with a <code>usage</code> value containing at least one of <code>VK_IMAGE_USAGE_SAMPLED_BIT</code>, <code>VK_IMAGE_USAGE_STORAGE_BIT</code>, <code>VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT</code>, <code>VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT</code>, <code>VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT</code>, <code>VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV</code>, or <code>VK_IMAGE_USAGE_FRAGMENT_DENSITY_MAP_BIT_EXT</code>"
+        }
+      ],
+      "(VK_EXT_fragment_density_map)": [
+        {
+          "vuid": "VUID-VkImageViewCreateInfo-image-02571",
+          "text": " If <code>image</code> was created with <code>usage</code> containing <code>VK_IMAGE_USAGE_FRAGMENT_DENSITY_MAP_BIT_EXT</code>, <code>subresourceRange.levelCount</code> <strong class=\"purple\">must</strong> be <code>1</code>"
+        },
+        {
+          "vuid": "VUID-VkImageViewCreateInfo-flags-02572",
+          "text": " If <a href=\"#features-fragmentdensitymapdynamic\">dynamic fragment density map</a> feature is not enabled, <code>flags</code> <strong class=\"purple\">must</strong> not contain <code>VK_IMAGE_VIEW_CREATE_FRAGMENT_DENSITY_MAP_DYNAMIC_BIT_EXT</code>"
+        },
+        {
+          "vuid": "VUID-VkImageViewCreateInfo-image-02573",
+          "text": " If <a href=\"#features-fragmentdensitymapdynamic\">dynamic fragment density map</a> feature is not enabled and <code>image</code> was created with <code>usage</code> containing <code>VK_IMAGE_USAGE_FRAGMENT_DENSITY_MAP_BIT_EXT</code>, <code>flags</code> <strong class=\"purple\">must</strong> not contain any of <code>VK_IMAGE_CREATE_PROTECTED_BIT</code>, <code>VK_IMAGE_CREATE_SPARSE_BINDING_BIT</code>, <code>VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT</code>, or <code>VK_IMAGE_CREATE_SPARSE_ALIASED_BIT</code>"
+        }
+      ],
+      "!(VK_VERSION_1_1,VK_KHR_maintenance1)": [
+        {
+          "vuid": "VUID-VkImageViewCreateInfo-subresourceRange-01480",
+          "text": " <code>subresourceRange.baseArrayLayer</code> <strong class=\"purple\">must</strong> be less than the <code>arrayLayers</code> specified in <a href=\"#VkImageCreateInfo\">VkImageCreateInfo</a> when <code>image</code> was created"
+        },
+        {
+          "vuid": "VUID-VkImageViewCreateInfo-subresourceRange-01719",
+          "text": " If <code>subresourceRange.layerCount</code> is not <code>VK_REMAINING_ARRAY_LAYERS</code>, <span class=\"eq\"><code>subresourceRange.baseArrayLayer</code> &#43; <code>subresourceRange.layerCount</code></span> <strong class=\"purple\">must</strong> be less than or equal to the <code>arrayLayers</code> specified in <a href=\"#VkImageCreateInfo\">VkImageCreateInfo</a> when <code>image</code> was created"
+        }
+      ],
+      "(VK_VERSION_1_1,VK_KHR_maintenance2)+!(VK_VERSION_1_1,VK_KHR_sampler_ycbcr_conversion)": [
+        {
+          "vuid": "VUID-VkImageViewCreateInfo-image-01759",
+          "text": " If <code>image</code> was created with the <code>VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT</code> flag, but without the <code>VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT</code> flag, <code>format</code> <strong class=\"purple\">must</strong> be compatible with the <code>format</code> used to create <code>image</code>, as defined in <a href=\"#formats-compatibility-classes\">Format Compatibility Classes</a>"
+        }
+      ],
+      "!(VK_VERSION_1_1,VK_KHR_maintenance2)+(VK_VERSION_1_1,VK_KHR_sampler_ycbcr_conversion)": [
+        {
+          "vuid": "VUID-VkImageViewCreateInfo-image-01760",
+          "text": " If <code>image</code> was created with the <code>VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT</code> flag, and if the <code>format</code> of the <code>image</code> is not a <a href=\"#formats-requiring-sampler-ycbcr-conversion\">multi-planar</a> format, <code>format</code> <strong class=\"purple\">must</strong> be compatible with the <code>format</code> used to create <code>image</code>, as defined in <a href=\"#formats-compatibility-classes\">Format Compatibility Classes</a>"
+        }
+      ],
+      "(VK_VERSION_1_1,VK_KHR_maintenance2)+(VK_VERSION_1_1,VK_KHR_sampler_ycbcr_conversion)": [
+        {
+          "vuid": "VUID-VkImageViewCreateInfo-image-01761",
+          "text": " If <code>image</code> was created with the <code>VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT</code> flag, but without the <code>VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT</code> flag, and if the <code>format</code> of the <code>image</code> is not a <a href=\"#formats-requiring-sampler-ycbcr-conversion\">multi-planar</a> format, <code>format</code> <strong class=\"purple\">must</strong> be compatible with the <code>format</code> used to create <code>image</code>, as defined in <a href=\"#formats-compatibility-classes\">Format Compatibility Classes</a>"
+        }
+      ],
+      "(VK_VERSION_1_1,VK_KHR_maintenance2)": [
+        {
+          "vuid": "VUID-VkImageViewCreateInfo-image-01583",
+          "text": " If <code>image</code> was created with the <code>VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT</code> flag, <code>format</code> <strong class=\"purple\">must</strong> be compatible with, or <strong class=\"purple\">must</strong> be an uncompressed format that is size-compatible with, the <code>format</code> used to create <code>image</code>."
+        },
+        {
+          "vuid": "VUID-VkImageViewCreateInfo-image-01584",
+          "text": " If <code>image</code> was created with the <code>VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT</code> flag, the <code>levelCount</code> and <code>layerCount</code> members of <code>subresourceRange</code> <strong class=\"purple\">must</strong> both be <code>1</code>."
+        }
+      ],
+      "(VK_KHR_image_format_list)": [
+        {
+          "vuid": "VUID-VkImageViewCreateInfo-pNext-01585",
+          "text": " If a <code>VkImageFormatListCreateInfoKHR</code> structure was included in the <code>pNext</code> chain of the <code>VkImageCreateInfo</code> structure used when creating <code>image</code> and the <code>viewFormatCount</code> field of <code>VkImageFormatListCreateInfoKHR</code> is not zero then <code>format</code> <strong class=\"purple\">must</strong> be one of the formats in <code>VkImageFormatListCreateInfoKHR</code>::<code>pViewFormats</code>."
+        }
+      ],
+      "(VK_VERSION_1_1,VK_KHR_sampler_ycbcr_conversion)": [
+        {
+          "vuid": "VUID-VkImageViewCreateInfo-image-01586",
+          "text": " If <code>image</code> was created with the <code>VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT</code> flag, if the <code>format</code> of the <code>image</code> is a <a href=\"#formats-requiring-sampler-ycbcr-conversion\">multi-planar</a> format, and if <code>subresourceRange.aspectMask</code> is one of <code>VK_IMAGE_ASPECT_PLANE_0_BIT</code>, <code>VK_IMAGE_ASPECT_PLANE_1_BIT</code>, or <code>VK_IMAGE_ASPECT_PLANE_2_BIT</code>, then <code>format</code> <strong class=\"purple\">must</strong> be compatible with the <a href=\"#VkFormat\">VkFormat</a> for the plane of the <code>image</code> <code>format</code> indicated by <code>subresourceRange.aspectMask</code>, as defined in <a href=\"#formats-compatible-planes\">Compatible formats of planes of multi-planar formats</a>"
+        },
+        {
+          "vuid": "VUID-VkImageViewCreateInfo-image-01762",
+          "text": " If <code>image</code> was not created with the <code>VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT</code> flag, or if the <code>format</code> of the <code>image</code> is a <a href=\"#formats-requiring-sampler-ycbcr-conversion\">multi-planar</a> format and if <code>subresourceRange.aspectMask</code> is <code>VK_IMAGE_ASPECT_COLOR_BIT</code>, <code>format</code> <strong class=\"purple\">must</strong> be identical to the <code>format</code> used to create <code>image</code>"
+        },
+        {
+          "vuid": "VUID-VkImageViewCreateInfo-pNext-01970",
+          "text": " If the <code>pNext</code> chain includes a <a href=\"#VkSamplerYcbcrConversionInfo\">VkSamplerYcbcrConversionInfo</a> structure with a <code>conversion</code> value other than <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>, all members of <code>components</code> <strong class=\"purple\">must</strong> have the value <code>VK_COMPONENT_SWIZZLE_IDENTITY</code>."
+        }
+      ],
+      "!(VK_VERSION_1_1,VK_KHR_sampler_ycbcr_conversion)": [
+        {
+          "vuid": "VUID-VkImageViewCreateInfo-image-01019",
+          "text": " If <code>image</code> was not created with the <code>VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT</code> flag, <code>format</code> <strong class=\"purple\">must</strong> be identical to the <code>format</code> used to create <code>image</code>"
+        }
+      ],
+      "(VK_ANDROID_external_memory_android_hardware_buffer)": [
+        {
+          "vuid": "VUID-VkImageViewCreateInfo-image-02399",
+          "text": " If <code>image</code> has an <a href=\"#memory-external-android-hardware-buffer-external-formats\">external format</a>, <code>format</code> <strong class=\"purple\">must</strong> be <code>VK_FORMAT_UNDEFINED</code>."
+        },
+        {
+          "vuid": "VUID-VkImageViewCreateInfo-image-02400",
+          "text": " If <code>image</code> has an <a href=\"#memory-external-android-hardware-buffer-external-formats\">external format</a>, the <code>pNext</code> chain <strong class=\"purple\">must</strong> include a <a href=\"#VkSamplerYcbcrConversionInfo\">VkSamplerYcbcrConversionInfo</a> structure with a <code>conversion</code> object created with the same external format as <code>image</code>."
+        },
+        {
+          "vuid": "VUID-VkImageViewCreateInfo-image-02401",
+          "text": " If <code>image</code> has an <a href=\"#memory-external-android-hardware-buffer-external-formats\">external format</a>, all members of <code>components</code> <strong class=\"purple\">must</strong> be <code>VK_COMPONENT_SWIZZLE_IDENTITY</code>."
+        }
+      ],
+      "(VK_NV_shading_rate_image)": [
+        {
+          "vuid": "VUID-VkImageViewCreateInfo-image-02086",
+          "text": " If <code>image</code> was created with <code>usage</code> containing <code>VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV</code>, <code>viewType</code> <strong class=\"purple\">must</strong> be <code>VK_IMAGE_VIEW_TYPE_2D</code> or <code>VK_IMAGE_VIEW_TYPE_2D_ARRAY</code>"
+        },
+        {
+          "vuid": "VUID-VkImageViewCreateInfo-image-02087",
+          "text": " If <code>image</code> was created with <code>usage</code> containing <code>VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV</code>, <code>format</code> <strong class=\"purple\">must</strong> be <code>VK_FORMAT_R8_UINT</code>"
+        }
+      ],
+      "(VK_VERSION_1_1,VK_KHR_maintenance2)+!(VK_EXT_separate_stencil_usage)": [
+        {
+          "vuid": "VUID-VkImageViewCreateInfo-pNext-02661",
+          "text": " If the <code>pNext</code> chain includes a <a href=\"#VkImageViewUsageCreateInfo\">VkImageViewUsageCreateInfo</a> structure, its <code>usage</code> member <strong class=\"purple\">must</strong> not include any bits that were not set in the <code>usage</code> member of the <a href=\"#VkImageCreateInfo\">VkImageCreateInfo</a> structure used to create <code>image</code>."
+        }
+      ],
+      "(VK_VERSION_1_1,VK_KHR_maintenance2)+(VK_EXT_separate_stencil_usage)": [
+        {
+          "vuid": "VUID-VkImageViewCreateInfo-pNext-02662",
+          "text": " If the <code>pNext</code> chain includes a <a href=\"#VkImageViewUsageCreateInfo\">VkImageViewUsageCreateInfo</a> structure, and <code>image</code> was not created with a <a href=\"#VkImageStencilUsageCreateInfoEXT\">VkImageStencilUsageCreateInfoEXT</a> structure included in the <code>pNext</code> chain of <a href=\"#VkImageCreateInfo\">VkImageCreateInfo</a>, its <code>usage</code> member <strong class=\"purple\">must</strong> not include any bits that were not set in the <code>usage</code> member of the <a href=\"#VkImageCreateInfo\">VkImageCreateInfo</a> structure used to create <code>image</code>"
+        },
+        {
+          "vuid": "VUID-VkImageViewCreateInfo-pNext-02663",
+          "text": " If the <code>pNext</code> chain includes a <a href=\"#VkImageViewUsageCreateInfo\">VkImageViewUsageCreateInfo</a> structure, <code>image</code> was created with a <a href=\"#VkImageStencilUsageCreateInfoEXT\">VkImageStencilUsageCreateInfoEXT</a> structure included in the <code>pNext</code> chain of <a href=\"#VkImageCreateInfo\">VkImageCreateInfo</a>, and <code>subResourceRange.aspectMask</code> includes <code>VK_IMAGE_ASPECT_STENCIL_BIT</code>, the <code>usage</code> member of the <a href=\"#VkImageViewUsageCreateInfo\">VkImageViewUsageCreateInfo</a> instance <strong class=\"purple\">must</strong> not include any bits that were not set in the <code>usage</code> member of the <a href=\"#VkImageStencilUsageCreateInfoEXT\">VkImageStencilUsageCreateInfoEXT</a> structure used to create <code>image</code>"
+        },
+        {
+          "vuid": "VUID-VkImageViewCreateInfo-pNext-02664",
+          "text": " If the <code>pNext</code> chain includes a <a href=\"#VkImageViewUsageCreateInfo\">VkImageViewUsageCreateInfo</a> structure, <code>image</code> was created with a <a href=\"#VkImageStencilUsageCreateInfoEXT\">VkImageStencilUsageCreateInfoEXT</a> structure included in the <code>pNext</code> chain of <a href=\"#VkImageCreateInfo\">VkImageCreateInfo</a>, and <code>subResourceRange.aspectMask</code> includes bits other than <code>VK_IMAGE_ASPECT_STENCIL_BIT</code>, the <code>usage</code> member of the <a href=\"#VkImageViewUsageCreateInfo\">VkImageViewUsageCreateInfo</a> structure <strong class=\"purple\">must</strong> not include any bits that were not set in the <code>usage</code> member of the <a href=\"#VkImageCreateInfo\">VkImageCreateInfo</a> structure used to create <code>image</code>"
+        }
+      ]
+    },
+    "VkImageViewUsageCreateInfo": {
+      "(VK_VERSION_1_1,VK_KHR_maintenance2)": [
+        {
+          "vuid": "VUID-VkImageViewUsageCreateInfo-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO</code>"
+        },
+        {
+          "vuid": "VUID-VkImageViewUsageCreateInfo-usage-parameter",
+          "text": " <code>usage</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkImageUsageFlagBits\">VkImageUsageFlagBits</a> values"
+        },
+        {
+          "vuid": "VUID-VkImageViewUsageCreateInfo-usage-requiredbitmask",
+          "text": " <code>usage</code> <strong class=\"purple\">must</strong> not be <code>0</code>"
+        }
+      ]
+    },
+    "VkImageSubresourceRange": {
+      "core": [
+        {
+          "vuid": "VUID-VkImageSubresourceRange-levelCount-01720",
+          "text": " If <code>levelCount</code> is not <code>VK_REMAINING_MIP_LEVELS</code>, it <strong class=\"purple\">must</strong> be greater than <code>0</code>"
+        },
+        {
+          "vuid": "VUID-VkImageSubresourceRange-layerCount-01721",
+          "text": " If <code>layerCount</code> is not <code>VK_REMAINING_ARRAY_LAYERS</code>, it <strong class=\"purple\">must</strong> be greater than <code>0</code>"
+        },
+        {
+          "vuid": "VUID-VkImageSubresourceRange-aspectMask-parameter",
+          "text": " <code>aspectMask</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkImageAspectFlagBits\">VkImageAspectFlagBits</a> values"
+        },
+        {
+          "vuid": "VUID-VkImageSubresourceRange-aspectMask-requiredbitmask",
+          "text": " <code>aspectMask</code> <strong class=\"purple\">must</strong> not be <code>0</code>"
+        }
+      ],
+      "(VK_VERSION_1_1,VK_KHR_sampler_ycbcr_conversion)": [
+        {
+          "vuid": "VUID-VkImageSubresourceRange-aspectMask-01670",
+          "text": " If <code>aspectMask</code> includes <code>VK_IMAGE_ASPECT_COLOR_BIT</code>, then it <strong class=\"purple\">must</strong> not include any of <code>VK_IMAGE_ASPECT_PLANE_0_BIT</code>, <code>VK_IMAGE_ASPECT_PLANE_1_BIT</code>, or <code>VK_IMAGE_ASPECT_PLANE_2_BIT</code>"
+        }
+      ],
+      "(VK_EXT_image_drm_format_modifier)": [
+        {
+          "vuid": "VUID-VkImageSubresourceRange-aspectMask-02278",
+          "text": " <code>aspectMask</code> <strong class=\"purple\">must</strong> not include <code>VK_IMAGE_ASPECT_MEMORY_PLANE_i_BIT_EXT</code> for any index <code>i</code>"
+        }
+      ]
+    },
+    "VkComponentMapping": {
+      "core": [
+        {
+          "vuid": "VUID-VkComponentMapping-r-parameter",
+          "text": " <code>r</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkComponentSwizzle\">VkComponentSwizzle</a> value"
+        },
+        {
+          "vuid": "VUID-VkComponentMapping-g-parameter",
+          "text": " <code>g</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkComponentSwizzle\">VkComponentSwizzle</a> value"
+        },
+        {
+          "vuid": "VUID-VkComponentMapping-b-parameter",
+          "text": " <code>b</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkComponentSwizzle\">VkComponentSwizzle</a> value"
+        },
+        {
+          "vuid": "VUID-VkComponentMapping-a-parameter",
+          "text": " <code>a</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkComponentSwizzle\">VkComponentSwizzle</a> value"
+        }
+      ]
+    },
+    "VkImageViewASTCDecodeModeEXT": {
+      "(VK_EXT_astc_decode_mode)": [
+        {
+          "vuid": "VUID-VkImageViewASTCDecodeModeEXT-decodeMode-02230",
+          "text": " <code>decodeMode</code> <strong class=\"purple\">must</strong> be one of <code>VK_FORMAT_R16G16B16A16_SFLOAT</code>, <code>VK_FORMAT_R8G8B8A8_UNORM</code>, or <code>VK_FORMAT_E5B9G9R9_UFLOAT_PACK32</code>"
+        },
+        {
+          "vuid": "VUID-VkImageViewASTCDecodeModeEXT-decodeMode-02231",
+          "text": " If the <a href=\"#features-astc-decodeModeSharedExponent\"><code>decodeModeSharedExponent</code></a> feature is not enabled, <code>decodeMode</code> <strong class=\"purple\">must</strong> not be <code>VK_FORMAT_E5B9G9R9_UFLOAT_PACK32</code>"
+        },
+        {
+          "vuid": "VUID-VkImageViewASTCDecodeModeEXT-decodeMode-02232",
+          "text": " If <code>decodeMode</code> is <code>VK_FORMAT_R8G8B8A8_UNORM</code> the image view <strong class=\"purple\">must</strong> not include blocks using any of the ASTC HDR modes"
+        },
+        {
+          "vuid": "VUID-VkImageViewASTCDecodeModeEXT-format-02233",
+          "text": " <code>format</code> of the image view <strong class=\"purple\">must</strong> be one of <code>VK_FORMAT_ASTC_4x4_UNORM_BLOCK</code>, <code>VK_FORMAT_ASTC_4x4_SRGB_BLOCK</code>, <code>VK_FORMAT_ASTC_5x4_UNORM_BLOCK</code>, <code>VK_FORMAT_ASTC_5x4_SRGB_BLOCK</code>, <code>VK_FORMAT_ASTC_5x5_UNORM_BLOCK</code>, <code>VK_FORMAT_ASTC_5x5_SRGB_BLOCK</code>, <code>VK_FORMAT_ASTC_6x5_UNORM_BLOCK</code>, <code>VK_FORMAT_ASTC_6x5_SRGB_BLOCK</code>, <code>VK_FORMAT_ASTC_6x6_UNORM_BLOCK</code>, <code>VK_FORMAT_ASTC_6x6_SRGB_BLOCK</code>, <code>VK_FORMAT_ASTC_8x5_UNORM_BLOCK</code>, <code>VK_FORMAT_ASTC_8x5_SRGB_BLOCK</code>, <code>VK_FORMAT_ASTC_8x6_UNORM_BLOCK</code>, <code>VK_FORMAT_ASTC_8x6_SRGB_BLOCK</code>, <code>VK_FORMAT_ASTC_8x8_UNORM_BLOCK</code>, <code>VK_FORMAT_ASTC_8x8_SRGB_BLOCK</code>, <code>VK_FORMAT_ASTC_10x5_UNORM_BLOCK</code>, <code>VK_FORMAT_ASTC_10x5_SRGB_BLOCK</code>, <code>VK_FORMAT_ASTC_10x6_UNORM_BLOCK</code>, <code>VK_FORMAT_ASTC_10x6_SRGB_BLOCK</code>, <code>VK_FORMAT_ASTC_10x8_UNORM_BLOCK</code>, <code>VK_FORMAT_ASTC_10x8_SRGB_BLOCK</code>, <code>VK_FORMAT_ASTC_10x10_UNORM_BLOCK</code>, <code>VK_FORMAT_ASTC_10x10_SRGB_BLOCK</code>, <code>VK_FORMAT_ASTC_12x10_UNORM_BLOCK</code>, <code>VK_FORMAT_ASTC_12x10_SRGB_BLOCK</code>, <code>VK_FORMAT_ASTC_12x12_UNORM_BLOCK</code>, or <code>VK_FORMAT_ASTC_12x12_SRGB_BLOCK</code>"
+        },
+        {
+          "vuid": "VUID-VkImageViewASTCDecodeModeEXT-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_IMAGE_VIEW_ASTC_DECODE_MODE_EXT</code>"
+        },
+        {
+          "vuid": "VUID-VkImageViewASTCDecodeModeEXT-decodeMode-parameter",
+          "text": " <code>decodeMode</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkFormat\">VkFormat</a> value"
+        }
+      ]
+    },
+    "vkDestroyImageView": {
+      "core": [
+        {
+          "vuid": "VUID-vkDestroyImageView-imageView-01026",
+          "text": " All submitted commands that refer to <code>imageView</code> <strong class=\"purple\">must</strong> have completed execution"
+        },
+        {
+          "vuid": "VUID-vkDestroyImageView-imageView-01027",
+          "text": " If <code>VkAllocationCallbacks</code> were provided when <code>imageView</code> was created, a compatible set of callbacks <strong class=\"purple\">must</strong> be provided here"
+        },
+        {
+          "vuid": "VUID-vkDestroyImageView-imageView-01028",
+          "text": " If no <code>VkAllocationCallbacks</code> were provided when <code>imageView</code> was created, <code>pAllocator</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-vkDestroyImageView-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkDestroyImageView-imageView-parameter",
+          "text": " If <code>imageView</code> is not <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>, <code>imageView</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkImageView\">VkImageView</a> handle"
+        },
+        {
+          "vuid": "VUID-vkDestroyImageView-pAllocator-parameter",
+          "text": " If <code>pAllocator</code> is not <code>NULL</code>, <code>pAllocator</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkAllocationCallbacks\">VkAllocationCallbacks</a> structure"
+        },
+        {
+          "vuid": "VUID-vkDestroyImageView-imageView-parent",
+          "text": " If <code>imageView</code> is a valid handle, it <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from <code>device</code>"
+        }
+      ]
+    },
+    "vkGetImageViewHandleNVX": {
+      "(VK_NVX_image_view_handle)": [
+        {
+          "vuid": "VUID-vkGetImageViewHandleNVX-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetImageViewHandleNVX-pInfo-parameter",
+          "text": " <code>pInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkImageViewHandleInfoNVX\">VkImageViewHandleInfoNVX</a> structure"
+        }
+      ]
+    },
+    "VkImageViewHandleInfoNVX": {
+      "(VK_NVX_image_view_handle)": [
+        {
+          "vuid": "VUID-VkImageViewHandleInfoNVX-descriptorType-02654",
+          "text": " <code>descriptorType</code> <strong class=\"purple\">must</strong> be <code>VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE</code>, <code>VK_DESCRIPTOR_TYPE_STORAGE_IMAGE</code>, or <code>VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER</code>"
+        },
+        {
+          "vuid": "VUID-VkImageViewHandleInfoNVX-sampler-02655",
+          "text": " <code>sampler</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkSampler\">VkSampler</a> if <code>descriptorType</code> is <code>VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER</code>"
+        },
+        {
+          "vuid": "VUID-VkImageViewHandleInfoNVX-imageView-02656",
+          "text": " If descriptorType is <code>VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE</code> or <code>VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER</code>, the image that <code>imageView</code> was created from <strong class=\"purple\">must</strong> have been created with the <code>VK_IMAGE_USAGE_SAMPLED_BIT</code> usage bit set"
+        },
+        {
+          "vuid": "VUID-VkImageViewHandleInfoNVX-imageView-02657",
+          "text": " If descriptorType is <code>VK_DESCRIPTOR_TYPE_STORAGE_IMAGE</code>, the image that <code>imageView</code> was created from <strong class=\"purple\">must</strong> have been created with the <code>VK_IMAGE_USAGE_STORAGE_BIT</code> usage bit set"
+        },
+        {
+          "vuid": "VUID-VkImageViewHandleInfoNVX-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_IMAGE_VIEW_HANDLE_INFO_NVX</code>"
+        },
+        {
+          "vuid": "VUID-VkImageViewHandleInfoNVX-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-VkImageViewHandleInfoNVX-imageView-parameter",
+          "text": " <code>imageView</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkImageView\">VkImageView</a> handle"
+        },
+        {
+          "vuid": "VUID-VkImageViewHandleInfoNVX-descriptorType-parameter",
+          "text": " <code>descriptorType</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDescriptorType\">VkDescriptorType</a> value"
+        },
+        {
+          "vuid": "VUID-VkImageViewHandleInfoNVX-sampler-parameter",
+          "text": " If <code>sampler</code> is not <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>, <code>sampler</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkSampler\">VkSampler</a> handle"
+        },
+        {
+          "vuid": "VUID-VkImageViewHandleInfoNVX-commonparent",
+          "text": " Both of <code>imageView</code>, and <code>sampler</code> that are valid handles of non-ignored parameters <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from the same <a href=\"#VkDevice\">VkDevice</a>"
+        }
+      ]
+    },
+    "vkGetBufferMemoryRequirements": {
+      "core": [
+        {
+          "vuid": "VUID-vkGetBufferMemoryRequirements-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetBufferMemoryRequirements-buffer-parameter",
+          "text": " <code>buffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkBuffer\">VkBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetBufferMemoryRequirements-pMemoryRequirements-parameter",
+          "text": " <code>pMemoryRequirements</code> <strong class=\"purple\">must</strong> be a valid pointer to a <a href=\"#VkMemoryRequirements\">VkMemoryRequirements</a> structure"
+        },
+        {
+          "vuid": "VUID-vkGetBufferMemoryRequirements-buffer-parent",
+          "text": " <code>buffer</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from <code>device</code>"
+        }
+      ]
+    },
+    "vkGetImageMemoryRequirements": {
+      "(VK_VERSION_1_1,VK_KHR_sampler_ycbcr_conversion)": [
+        {
+          "vuid": "VUID-vkGetImageMemoryRequirements-image-01588",
+          "text": " <code>image</code> <strong class=\"purple\">must</strong> not have been created with the <code>VK_IMAGE_CREATE_DISJOINT_BIT</code> flag set"
+        }
+      ],
+      "core": [
+        {
+          "vuid": "VUID-vkGetImageMemoryRequirements-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetImageMemoryRequirements-image-parameter",
+          "text": " <code>image</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkImage\">VkImage</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetImageMemoryRequirements-pMemoryRequirements-parameter",
+          "text": " <code>pMemoryRequirements</code> <strong class=\"purple\">must</strong> be a valid pointer to a <a href=\"#VkMemoryRequirements\">VkMemoryRequirements</a> structure"
+        },
+        {
+          "vuid": "VUID-vkGetImageMemoryRequirements-image-parent",
+          "text": " <code>image</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from <code>device</code>"
+        }
+      ]
+    },
+    "vkGetBufferMemoryRequirements2": {
+      "(VK_VERSION_1_1,VK_KHR_get_memory_requirements2)": [
+        {
+          "vuid": "VUID-vkGetBufferMemoryRequirements2-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetBufferMemoryRequirements2-pInfo-parameter",
+          "text": " <code>pInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkBufferMemoryRequirementsInfo2\">VkBufferMemoryRequirementsInfo2</a> structure"
+        },
+        {
+          "vuid": "VUID-vkGetBufferMemoryRequirements2-pMemoryRequirements-parameter",
+          "text": " <code>pMemoryRequirements</code> <strong class=\"purple\">must</strong> be a valid pointer to a <a href=\"#VkMemoryRequirements2\">VkMemoryRequirements2</a> structure"
+        }
+      ]
+    },
+    "VkBufferMemoryRequirementsInfo2": {
+      "(VK_VERSION_1_1,VK_KHR_get_memory_requirements2)": [
+        {
+          "vuid": "VUID-VkBufferMemoryRequirementsInfo2-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2</code>"
+        },
+        {
+          "vuid": "VUID-VkBufferMemoryRequirementsInfo2-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-VkBufferMemoryRequirementsInfo2-buffer-parameter",
+          "text": " <code>buffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkBuffer\">VkBuffer</a> handle"
+        }
+      ]
+    },
+    "vkGetImageMemoryRequirements2": {
+      "(VK_VERSION_1_1,VK_KHR_get_memory_requirements2)": [
+        {
+          "vuid": "VUID-vkGetImageMemoryRequirements2-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetImageMemoryRequirements2-pInfo-parameter",
+          "text": " <code>pInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkImageMemoryRequirementsInfo2\">VkImageMemoryRequirementsInfo2</a> structure"
+        },
+        {
+          "vuid": "VUID-vkGetImageMemoryRequirements2-pMemoryRequirements-parameter",
+          "text": " <code>pMemoryRequirements</code> <strong class=\"purple\">must</strong> be a valid pointer to a <a href=\"#VkMemoryRequirements2\">VkMemoryRequirements2</a> structure"
+        }
+      ]
+    },
+    "VkImageMemoryRequirementsInfo2": {
+      "(VK_VERSION_1_1,VK_KHR_get_memory_requirements2)+(VK_VERSION_1_1,VK_KHR_sampler_ycbcr_conversion)": [
+        {
+          "vuid": "VUID-VkImageMemoryRequirementsInfo2-image-01589",
+          "text": " If <code>image</code> was created with a <em>multi-planar</em> format and the <code>VK_IMAGE_CREATE_DISJOINT_BIT</code> flag, there <strong class=\"purple\">must</strong> be a <a href=\"#VkImagePlaneMemoryRequirementsInfo\">VkImagePlaneMemoryRequirementsInfo</a> included in the <code>pNext</code> chain of the <a href=\"#VkImageMemoryRequirementsInfo2\">VkImageMemoryRequirementsInfo2</a> structure"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryRequirementsInfo2-image-01590",
+          "text": " If <code>image</code> was not created with the <code>VK_IMAGE_CREATE_DISJOINT_BIT</code> flag, there <strong class=\"purple\">must</strong> not be a <a href=\"#VkImagePlaneMemoryRequirementsInfo\">VkImagePlaneMemoryRequirementsInfo</a> included in the <code>pNext</code> chain of the <a href=\"#VkImageMemoryRequirementsInfo2\">VkImageMemoryRequirementsInfo2</a> structure"
+        }
+      ],
+      "(VK_VERSION_1_1,VK_KHR_get_memory_requirements2)+(VK_VERSION_1_1,VK_KHR_sampler_ycbcr_conversion)+(VK_EXT_image_drm_format_modifier)": [
+        {
+          "vuid": "VUID-VkImageMemoryRequirementsInfo2-image-02279",
+          "text": " If <code>image</code> was created with <code>VK_IMAGE_CREATE_DISJOINT_BIT</code> and with <code>VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT</code>, then there <strong class=\"purple\">must</strong> be a <a href=\"#VkImagePlaneMemoryRequirementsInfo\">VkImagePlaneMemoryRequirementsInfo</a> included in the <code>pNext</code> chain of the <a href=\"#VkImageMemoryRequirementsInfo2\">VkImageMemoryRequirementsInfo2</a> structure"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryRequirementsInfo2-image-02280",
+          "text": " If <code>image</code> was created with a single-plane format and with any <code>tiling</code> other than <code>VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT</code>, then there <strong class=\"purple\">must</strong> not be a <a href=\"#VkImagePlaneMemoryRequirementsInfo\">VkImagePlaneMemoryRequirementsInfo</a> included in the <code>pNext</code> chain of the <a href=\"#VkImageMemoryRequirementsInfo2\">VkImageMemoryRequirementsInfo2</a> structure"
+        }
+      ],
+      "(VK_VERSION_1_1,VK_KHR_get_memory_requirements2)+(VK_VERSION_1_1,VK_KHR_sampler_ycbcr_conversion)+!(VK_EXT_image_drm_format_modifier)": [
+        {
+          "vuid": "VUID-VkImageMemoryRequirementsInfo2-image-01591",
+          "text": " If <code>image</code> was created with a single-plane format, there <strong class=\"purple\">must</strong> not be a <a href=\"#VkImagePlaneMemoryRequirementsInfo\">VkImagePlaneMemoryRequirementsInfo</a> included in the <code>pNext</code> chain of the <a href=\"#VkImageMemoryRequirementsInfo2\">VkImageMemoryRequirementsInfo2</a> structure"
+        }
+      ],
+      "(VK_VERSION_1_1,VK_KHR_get_memory_requirements2)+(VK_VERSION_1_1,VK_KHR_sampler_ycbcr_conversion)+(VK_ANDROID_external_memory_android_hardware_buffer)": [
+        {
+          "vuid": "VUID-VkImageMemoryRequirementsInfo2-image-01897",
+          "text": " If <code>image</code> was created with the <code>VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID</code> external memory handle type, then <code>image</code> <strong class=\"purple\">must</strong> be bound to memory."
+        }
+      ],
+      "(VK_VERSION_1_1,VK_KHR_get_memory_requirements2)": [
+        {
+          "vuid": "VUID-VkImageMemoryRequirementsInfo2-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2</code>"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryRequirementsInfo2-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code> or a pointer to a valid instance of <a href=\"#VkImagePlaneMemoryRequirementsInfo\">VkImagePlaneMemoryRequirementsInfo</a>"
+        },
+        {
+          "vuid": "VUID-VkImageMemoryRequirementsInfo2-image-parameter",
+          "text": " <code>image</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkImage\">VkImage</a> handle"
+        }
+      ]
+    },
+    "VkImagePlaneMemoryRequirementsInfo": {
+      "(VK_VERSION_1_1,VK_KHR_get_memory_requirements2)+(VK_VERSION_1_1,VK_KHR_sampler_ycbcr_conversion)": [
+        {
+          "vuid": "VUID-VkImagePlaneMemoryRequirementsInfo-planeAspect-02281",
+          "text": " If the image&#8217;s tiling is <code>VK_IMAGE_TILING_LINEAR</code> or <code>VK_IMAGE_TILING_OPTIMAL</code>, then <code>planeAspect</code> <strong class=\"purple\">must</strong> be a single valid <em>format plane</em> for the image. (That is, for a two-plane image <code>planeAspect</code> <strong class=\"purple\">must</strong> be <code>VK_IMAGE_ASPECT_PLANE_0_BIT</code> or <code>VK_IMAGE_ASPECT_PLANE_1_BIT</code>, and for a three-plane image <code>planeAspect</code> <strong class=\"purple\">must</strong> be <code>VK_IMAGE_ASPECT_PLANE_0_BIT</code>, <code>VK_IMAGE_ASPECT_PLANE_1_BIT</code> or <code>VK_IMAGE_ASPECT_PLANE_2_BIT</code>)."
+        },
+        {
+          "vuid": "VUID-VkImagePlaneMemoryRequirementsInfo-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO</code>"
+        },
+        {
+          "vuid": "VUID-VkImagePlaneMemoryRequirementsInfo-planeAspect-parameter",
+          "text": " <code>planeAspect</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkImageAspectFlagBits\">VkImageAspectFlagBits</a> value"
+        }
+      ],
+      "(VK_VERSION_1_1,VK_KHR_get_memory_requirements2)+(VK_VERSION_1_1,VK_KHR_sampler_ycbcr_conversion)+(VK_EXT_image_drm_format_modifier)": [
+        {
+          "vuid": "VUID-VkImagePlaneMemoryRequirementsInfo-planeAspect-02282",
+          "text": "  If the image&#8217;s tiling is <code>VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT</code>,  then <code>planeAspect</code> <strong class=\"purple\">must</strong> be a single valid <em>memory plane</em> for the  image.  (That is, <code>aspectMask</code> <strong class=\"purple\">must</strong> specify a plane index that is less than  the  <a href=\"#VkDrmFormatModifierPropertiesEXT\"><code>drmFormatModifierPlaneCount</code></a>  associated with the image&#8217;s <a href=\"#VkImageCreateInfo\"><code>format</code></a> and <a href=\"#VkImageDrmFormatModifierPropertiesEXT\"><code>drmFormatModifier</code></a>.)"
+        }
+      ]
+    },
+    "VkMemoryRequirements2": {
+      "(VK_VERSION_1_1,VK_KHR_get_memory_requirements2)": [
+        {
+          "vuid": "VUID-VkMemoryRequirements2-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2</code>"
+        },
+        {
+          "vuid": "VUID-VkMemoryRequirements2-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code> or a pointer to a valid instance of <a href=\"#VkMemoryDedicatedRequirements\">VkMemoryDedicatedRequirements</a>"
+        }
+      ]
+    },
+    "VkMemoryDedicatedRequirements": {
+      "(VK_VERSION_1_1,VK_KHR_dedicated_allocation)": [
+        {
+          "vuid": "VUID-VkMemoryDedicatedRequirements-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS</code>"
+        }
+      ]
+    },
+    "vkBindBufferMemory": {
+      "core": [
+        {
+          "vuid": "VUID-vkBindBufferMemory-buffer-01029",
+          "text": " <code>buffer</code> <strong class=\"purple\">must</strong> not already be backed by a memory object"
+        },
+        {
+          "vuid": "VUID-vkBindBufferMemory-buffer-01030",
+          "text": " <code>buffer</code> <strong class=\"purple\">must</strong> not have been created with any sparse memory binding flags"
+        },
+        {
+          "vuid": "VUID-vkBindBufferMemory-memoryOffset-01031",
+          "text": " <code>memoryOffset</code> <strong class=\"purple\">must</strong> be less than the size of <code>memory</code>"
+        },
+        {
+          "vuid": "VUID-vkBindBufferMemory-memory-01035",
+          "text": " <code>memory</code> <strong class=\"purple\">must</strong> have been allocated using one of the memory types allowed in the <code>memoryTypeBits</code> member of the <code>VkMemoryRequirements</code> structure returned from a call to <code>vkGetBufferMemoryRequirements</code> with <code>buffer</code>"
+        },
+        {
+          "vuid": "VUID-vkBindBufferMemory-memoryOffset-01036",
+          "text": " <code>memoryOffset</code> <strong class=\"purple\">must</strong> be an integer multiple of the <code>alignment</code> member of the <code>VkMemoryRequirements</code> structure returned from a call to <code>vkGetBufferMemoryRequirements</code> with <code>buffer</code>"
+        },
+        {
+          "vuid": "VUID-vkBindBufferMemory-size-01037",
+          "text": " The <code>size</code> member of the <code>VkMemoryRequirements</code> structure returned from a call to <code>vkGetBufferMemoryRequirements</code> with <code>buffer</code> <strong class=\"purple\">must</strong> be less than or equal to the size of <code>memory</code> minus <code>memoryOffset</code>"
+        },
+        {
+          "vuid": "VUID-vkBindBufferMemory-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkBindBufferMemory-buffer-parameter",
+          "text": " <code>buffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkBuffer\">VkBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkBindBufferMemory-memory-parameter",
+          "text": " <code>memory</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDeviceMemory\">VkDeviceMemory</a> handle"
+        },
+        {
+          "vuid": "VUID-vkBindBufferMemory-buffer-parent",
+          "text": " <code>buffer</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from <code>device</code>"
+        },
+        {
+          "vuid": "VUID-vkBindBufferMemory-memory-parent",
+          "text": " <code>memory</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from <code>device</code>"
+        }
+      ],
+      "(VK_VERSION_1_1,VK_KHR_dedicated_allocation)": [
+        {
+          "vuid": "VUID-vkBindBufferMemory-buffer-01444",
+          "text": " If <code>buffer</code> requires a dedicated allocation(as reported by <a href=\"#vkGetBufferMemoryRequirements2\">vkGetBufferMemoryRequirements2</a> in <a href=\"#VkMemoryDedicatedRequirements\">VkMemoryDedicatedRequirements</a>::requiresDedicatedAllocation for <code>buffer</code>), <code>memory</code> <strong class=\"purple\">must</strong> have been created with <a href=\"#VkMemoryDedicatedAllocateInfo\">VkMemoryDedicatedAllocateInfo</a>::<code>buffer</code> equal to <code>buffer</code>"
+        },
+        {
+          "vuid": "VUID-vkBindBufferMemory-memory-01508",
+          "text": " If the <code>VkMemoryAllocateInfo</code> provided when <code>memory</code> was allocated included a <a href=\"#VkMemoryDedicatedAllocateInfo\">VkMemoryDedicatedAllocateInfo</a> structure in its <code>pNext</code> chain, and <a href=\"#VkMemoryDedicatedAllocateInfo\">VkMemoryDedicatedAllocateInfo</a>::<code>buffer</code> was not <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>, then <code>buffer</code> <strong class=\"purple\">must</strong> equal <a href=\"#VkMemoryDedicatedAllocateInfo\">VkMemoryDedicatedAllocateInfo</a>::<code>buffer</code>, and <code>memoryOffset</code> <strong class=\"purple\">must</strong> be zero."
+        }
+      ],
+      "(VK_VERSION_1_1)": [
+        {
+          "vuid": "VUID-vkBindBufferMemory-None-01898",
+          "text": " If buffer was created with the <code>VK_BUFFER_CREATE_PROTECTED_BIT</code> bit set, the buffer <strong class=\"purple\">must</strong> be bound to a memory object allocated with a memory type that reports <code>VK_MEMORY_PROPERTY_PROTECTED_BIT</code>"
+        },
+        {
+          "vuid": "VUID-vkBindBufferMemory-None-01899",
+          "text": " If buffer was created with the <code>VK_BUFFER_CREATE_PROTECTED_BIT</code> bit not set, the buffer <strong class=\"purple\">must</strong> not be bound to a memory object created with a memory type that reports <code>VK_MEMORY_PROPERTY_PROTECTED_BIT</code>"
+        }
+      ],
+      "(VK_NV_dedicated_allocation)": [
+        {
+          "vuid": "VUID-vkBindBufferMemory-buffer-01038",
+          "text": " If <code>buffer</code> was created with <a href=\"#VkDedicatedAllocationBufferCreateInfoNV\">VkDedicatedAllocationBufferCreateInfoNV</a>::<code>dedicatedAllocation</code> equal to <code>VK_TRUE</code>, <code>memory</code> <strong class=\"purple\">must</strong> have been created with <a href=\"#VkDedicatedAllocationMemoryAllocateInfoNV\">VkDedicatedAllocationMemoryAllocateInfoNV</a>::<code>buffer</code> equal to a buffer handle created with identical creation parameters to <code>buffer</code> and <code>memoryOffset</code> <strong class=\"purple\">must</strong> be zero"
+        }
+      ],
+      "(VK_NV_dedicated_allocation)+!(VK_VERSION_1_1,VK_KHR_dedicated_allocation)": [
+        {
+          "vuid": "VUID-vkBindBufferMemory-buffer-01039",
+          "text": " If <code>buffer</code> was not created with <a href=\"#VkDedicatedAllocationBufferCreateInfoNV\">VkDedicatedAllocationBufferCreateInfoNV</a>::<code>dedicatedAllocation</code> equal to <code>VK_TRUE</code>, <code>memory</code> <strong class=\"purple\">must</strong> not have been allocated dedicated for a specific buffer or image"
+        }
+      ],
+      "(VK_VERSION_1_1,VK_KHR_external_memory)": [
+        {
+          "vuid": "VUID-vkBindBufferMemory-memory-02726",
+          "text": " If the value of <a href=\"#VkExportMemoryAllocateInfo\">VkExportMemoryAllocateInfo</a>::<code>handleTypes</code> used to allocate <code>memory</code> is not <code>0</code>, it <strong class=\"purple\">must</strong> include at least one of the handles set in <a href=\"#VkExternalMemoryBufferCreateInfo\">VkExternalMemoryBufferCreateInfo</a>::<code>handleTypes</code> when <code>buffer</code> was created"
+        },
+        {
+          "vuid": "VUID-vkBindBufferMemory-memory-02727",
+          "text": " If <code>memory</code> was created by a memory import operation, the external handle type of the imported memory <strong class=\"purple\">must</strong> also have been set in <a href=\"#VkExternalMemoryBufferCreateInfo\">VkExternalMemoryBufferCreateInfo</a>::<code>handleTypes</code> when <code>buffer</code> was created"
+        }
+      ],
+      "(VK_KHR_buffer_device_address)": [
+        {
+          "vuid": "VUID-vkBindBufferMemory-bufferDeviceAddress-03339",
+          "text": " If the <a href=\"#VkPhysicalDeviceBufferDeviceAddressFeaturesKHR\">VkPhysicalDeviceBufferDeviceAddressFeaturesKHR</a>::<code>bufferDeviceAddress</code> feature is enabled and <code>buffer</code> was created with the <code>VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_KHR</code> bit set, <code>memory</code> <strong class=\"purple\">must</strong> have been allocated with the <code>VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT_KHR</code> bit set"
+        }
+      ]
+    },
+    "vkBindBufferMemory2": {
+      "(VK_VERSION_1_1,VK_KHR_bind_memory2)": [
+        {
+          "vuid": "VUID-vkBindBufferMemory2-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkBindBufferMemory2-pBindInfos-parameter",
+          "text": " <code>pBindInfos</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>bindInfoCount</code> valid <a href=\"#VkBindBufferMemoryInfo\">VkBindBufferMemoryInfo</a> structures"
+        },
+        {
+          "vuid": "VUID-vkBindBufferMemory2-bindInfoCount-arraylength",
+          "text": " <code>bindInfoCount</code> <strong class=\"purple\">must</strong> be greater than <code>0</code>"
+        }
+      ]
+    },
+    "VkBindBufferMemoryInfo": {
+      "(VK_VERSION_1_1,VK_KHR_bind_memory2)": [
+        {
+          "vuid": "VUID-VkBindBufferMemoryInfo-buffer-01593",
+          "text": " <code>buffer</code> <strong class=\"purple\">must</strong> not already be backed by a memory object"
+        },
+        {
+          "vuid": "VUID-VkBindBufferMemoryInfo-buffer-01594",
+          "text": " <code>buffer</code> <strong class=\"purple\">must</strong> not have been created with any sparse memory binding flags"
+        },
+        {
+          "vuid": "VUID-VkBindBufferMemoryInfo-memoryOffset-01595",
+          "text": " <code>memoryOffset</code> <strong class=\"purple\">must</strong> be less than the size of <code>memory</code>"
+        },
+        {
+          "vuid": "VUID-VkBindBufferMemoryInfo-memory-01599",
+          "text": " <code>memory</code> <strong class=\"purple\">must</strong> have been allocated using one of the memory types allowed in the <code>memoryTypeBits</code> member of the <code>VkMemoryRequirements</code> structure returned from a call to <code>vkGetBufferMemoryRequirements</code> with <code>buffer</code>"
+        },
+        {
+          "vuid": "VUID-VkBindBufferMemoryInfo-memoryOffset-01600",
+          "text": " <code>memoryOffset</code> <strong class=\"purple\">must</strong> be an integer multiple of the <code>alignment</code> member of the <code>VkMemoryRequirements</code> structure returned from a call to <code>vkGetBufferMemoryRequirements</code> with <code>buffer</code>"
+        },
+        {
+          "vuid": "VUID-VkBindBufferMemoryInfo-size-01601",
+          "text": " The <code>size</code> member of the <code>VkMemoryRequirements</code> structure returned from a call to <code>vkGetBufferMemoryRequirements</code> with <code>buffer</code> <strong class=\"purple\">must</strong> be less than or equal to the size of <code>memory</code> minus <code>memoryOffset</code>"
+        },
+        {
+          "vuid": "VUID-VkBindBufferMemoryInfo-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO</code>"
+        },
+        {
+          "vuid": "VUID-VkBindBufferMemoryInfo-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code> or a pointer to a valid instance of <a href=\"#VkBindBufferMemoryDeviceGroupInfo\">VkBindBufferMemoryDeviceGroupInfo</a>"
+        },
+        {
+          "vuid": "VUID-VkBindBufferMemoryInfo-buffer-parameter",
+          "text": " <code>buffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkBuffer\">VkBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-VkBindBufferMemoryInfo-memory-parameter",
+          "text": " <code>memory</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDeviceMemory\">VkDeviceMemory</a> handle"
+        },
+        {
+          "vuid": "VUID-VkBindBufferMemoryInfo-commonparent",
+          "text": " Both of <code>buffer</code>, and <code>memory</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from the same <a href=\"#VkDevice\">VkDevice</a>"
+        }
+      ],
+      "(VK_VERSION_1_1,VK_KHR_bind_memory2)+(VK_VERSION_1_1,VK_KHR_dedicated_allocation)": [
+        {
+          "vuid": "VUID-VkBindBufferMemoryInfo-buffer-01602",
+          "text": " If <code>buffer</code> requires a dedicated allocation(as reported by <a href=\"#vkGetBufferMemoryRequirements2\">vkGetBufferMemoryRequirements2</a> in <a href=\"#VkMemoryDedicatedRequirements\">VkMemoryDedicatedRequirements</a>::requiresDedicatedAllocation for <code>buffer</code>), <code>memory</code> <strong class=\"purple\">must</strong> have been created with <a href=\"#VkMemoryDedicatedAllocateInfo\">VkMemoryDedicatedAllocateInfo</a>::<code>buffer</code> equal to <code>buffer</code> and <code>memoryOffset</code> <strong class=\"purple\">must</strong> be zero"
+        },
+        {
+          "vuid": "VUID-VkBindBufferMemoryInfo-memory-01900",
+          "text": " If the <code>VkMemoryAllocateInfo</code> provided when <code>memory</code> was allocated included a <a href=\"#VkMemoryDedicatedAllocateInfo\">VkMemoryDedicatedAllocateInfo</a> structure in its <code>pNext</code> chain, and <a href=\"#VkMemoryDedicatedAllocateInfo\">VkMemoryDedicatedAllocateInfo</a>::<code>buffer</code> was not <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>, then <code>buffer</code> <strong class=\"purple\">must</strong> equal <a href=\"#VkMemoryDedicatedAllocateInfo\">VkMemoryDedicatedAllocateInfo</a>::<code>buffer</code> and <code>memoryOffset</code> <strong class=\"purple\">must</strong> be zero."
+        }
+      ],
+      "(VK_VERSION_1_1,VK_KHR_bind_memory2)+(VK_NV_dedicated_allocation)": [
+        {
+          "vuid": "VUID-VkBindBufferMemoryInfo-buffer-01603",
+          "text": " If <code>buffer</code> was created with <a href=\"#VkDedicatedAllocationBufferCreateInfoNV\">VkDedicatedAllocationBufferCreateInfoNV</a>::<code>dedicatedAllocation</code> equal to <code>VK_TRUE</code>, <code>memory</code> <strong class=\"purple\">must</strong> have been created with <a href=\"#VkDedicatedAllocationMemoryAllocateInfoNV\">VkDedicatedAllocationMemoryAllocateInfoNV</a>::<code>buffer</code> equal to <code>buffer</code> and <code>memoryOffset</code> <strong class=\"purple\">must</strong> be zero"
+        }
+      ],
+      "(VK_VERSION_1_1,VK_KHR_bind_memory2)+(VK_NV_dedicated_allocation)+!(VK_VERSION_1_1,VK_KHR_dedicated_allocation)": [
+        {
+          "vuid": "VUID-VkBindBufferMemoryInfo-buffer-01604",
+          "text": " If <code>buffer</code> was not created with <a href=\"#VkDedicatedAllocationBufferCreateInfoNV\">VkDedicatedAllocationBufferCreateInfoNV</a>::<code>dedicatedAllocation</code> equal to <code>VK_TRUE</code>, <code>memory</code> <strong class=\"purple\">must</strong> not have been allocated dedicated for a specific buffer or image"
+        }
+      ],
+      "(VK_VERSION_1_1,VK_KHR_bind_memory2)+(VK_VERSION_1_1,VK_KHR_device_group)": [
+        {
+          "vuid": "VUID-VkBindBufferMemoryInfo-pNext-01605",
+          "text": " If the <code>pNext</code> chain includes a <a href=\"#VkBindBufferMemoryDeviceGroupInfo\">VkBindBufferMemoryDeviceGroupInfo</a> structure, all instances of <code>memory</code> specified by <a href=\"#VkBindBufferMemoryDeviceGroupInfo\">VkBindBufferMemoryDeviceGroupInfo</a>::<code>pDeviceIndices</code> <strong class=\"purple\">must</strong> have been allocated"
+        }
+      ],
+      "(VK_VERSION_1_1,VK_KHR_bind_memory2)+(VK_VERSION_1_1,VK_KHR_external_memory)": [
+        {
+          "vuid": "VUID-VkBindBufferMemoryInfo-handleTypes-02791",
+          "text": " If the value of <a href=\"#VkExportMemoryAllocateInfo\">VkExportMemoryAllocateInfo</a>::<code>handleTypes</code> used to allocate <code>memory</code> is not <code>0</code>, it <strong class=\"purple\">must</strong> include at least one of the handles set in <a href=\"#VkExternalMemoryBufferCreateInfo\">VkExternalMemoryBufferCreateInfo</a>::<code>handleTypes</code> when <code>buffer</code> was created"
+        },
+        {
+          "vuid": "VUID-VkBindBufferMemoryInfo-memory-02792",
+          "text": " If <code>memory</code> was created by a memory import operation, the external handle type of the imported memory <strong class=\"purple\">must</strong> also have been set in <a href=\"#VkExternalMemoryBufferCreateInfo\">VkExternalMemoryBufferCreateInfo</a>::<code>handleTypes</code> when <code>buffer</code> was created"
+        }
+      ]
+    },
+    "VkBindBufferMemoryDeviceGroupInfo": {
+      "(VK_VERSION_1_1,VK_KHR_bind_memory2)+(VK_VERSION_1_1,VK_KHR_device_group)": [
+        {
+          "vuid": "VUID-VkBindBufferMemoryDeviceGroupInfo-deviceIndexCount-01606",
+          "text": " <code>deviceIndexCount</code> <strong class=\"purple\">must</strong> either be zero or equal to the number of physical devices in the logical device"
+        },
+        {
+          "vuid": "VUID-VkBindBufferMemoryDeviceGroupInfo-pDeviceIndices-01607",
+          "text": " All elements of <code>pDeviceIndices</code> <strong class=\"purple\">must</strong> be valid device indices"
+        },
+        {
+          "vuid": "VUID-VkBindBufferMemoryDeviceGroupInfo-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO</code>"
+        },
+        {
+          "vuid": "VUID-VkBindBufferMemoryDeviceGroupInfo-pDeviceIndices-parameter",
+          "text": " If <code>deviceIndexCount</code> is not <code>0</code>, <code>pDeviceIndices</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>deviceIndexCount</code> <code>uint32_t</code> values"
+        }
+      ]
+    },
+    "vkBindImageMemory": {
+      "(VK_VERSION_1_1,VK_KHR_sampler_ycbcr_conversion)": [
+        {
+          "vuid": "VUID-vkBindImageMemory-image-01608",
+          "text": " <code>image</code> <strong class=\"purple\">must</strong> not have been created with the <code>VK_IMAGE_CREATE_DISJOINT_BIT</code> set."
+        }
+      ],
+      "core": [
+        {
+          "vuid": "VUID-vkBindImageMemory-image-01044",
+          "text": " <code>image</code> <strong class=\"purple\">must</strong> not already be backed by a memory object"
+        },
+        {
+          "vuid": "VUID-vkBindImageMemory-image-01045",
+          "text": " <code>image</code> <strong class=\"purple\">must</strong> not have been created with any sparse memory binding flags"
+        },
+        {
+          "vuid": "VUID-vkBindImageMemory-memoryOffset-01046",
+          "text": " <code>memoryOffset</code> <strong class=\"purple\">must</strong> be less than the size of <code>memory</code>"
+        },
+        {
+          "vuid": "VUID-vkBindImageMemory-memory-01047",
+          "text": " <code>memory</code> <strong class=\"purple\">must</strong> have been allocated using one of the memory types allowed in the <code>memoryTypeBits</code> member of the <code>VkMemoryRequirements</code> structure returned from a call to <code>vkGetImageMemoryRequirements</code> with <code>image</code>"
+        },
+        {
+          "vuid": "VUID-vkBindImageMemory-memoryOffset-01048",
+          "text": " <code>memoryOffset</code> <strong class=\"purple\">must</strong> be an integer multiple of the <code>alignment</code> member of the <code>VkMemoryRequirements</code> structure returned from a call to <code>vkGetImageMemoryRequirements</code> with <code>image</code>"
+        },
+        {
+          "vuid": "VUID-vkBindImageMemory-size-01049",
+          "text": " The <code>size</code> member of the <code>VkMemoryRequirements</code> structure returned from a call to <code>vkGetImageMemoryRequirements</code> with <code>image</code> <strong class=\"purple\">must</strong> be less than or equal to the size of <code>memory</code> minus <code>memoryOffset</code>"
+        },
+        {
+          "vuid": "VUID-vkBindImageMemory-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkBindImageMemory-image-parameter",
+          "text": " <code>image</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkImage\">VkImage</a> handle"
+        },
+        {
+          "vuid": "VUID-vkBindImageMemory-memory-parameter",
+          "text": " <code>memory</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDeviceMemory\">VkDeviceMemory</a> handle"
+        },
+        {
+          "vuid": "VUID-vkBindImageMemory-image-parent",
+          "text": " <code>image</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from <code>device</code>"
+        },
+        {
+          "vuid": "VUID-vkBindImageMemory-memory-parent",
+          "text": " <code>memory</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from <code>device</code>"
+        }
+      ],
+      "(VK_VERSION_1_1,VK_KHR_dedicated_allocation)": [
+        {
+          "vuid": "VUID-vkBindImageMemory-image-01445",
+          "text": " If <code>image</code> requires a dedicated allocation (as reported by <a href=\"#vkGetImageMemoryRequirements2\">vkGetImageMemoryRequirements2</a> in <a href=\"#VkMemoryDedicatedRequirements\">VkMemoryDedicatedRequirements</a>::requiresDedicatedAllocation for <code>image</code>), <code>memory</code> <strong class=\"purple\">must</strong> have been created with <a href=\"#VkMemoryDedicatedAllocateInfo\">VkMemoryDedicatedAllocateInfo</a>::<code>image</code> equal to <code>image</code>"
+        }
+      ],
+      "(VK_VERSION_1_1,VK_KHR_dedicated_allocation)+!(VK_NV_dedicated_allocation_image_aliasing)": [
+        {
+          "vuid": "VUID-vkBindImageMemory-memory-01509",
+          "text": " If the <code>VkMemoryAllocateInfo</code> provided when <code>memory</code> was allocated included a <a href=\"#VkMemoryDedicatedAllocateInfo\">VkMemoryDedicatedAllocateInfo</a> structure in its <code>pNext</code> chain, and <a href=\"#VkMemoryDedicatedAllocateInfo\">VkMemoryDedicatedAllocateInfo</a>::<code>image</code> was not <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>, then <code>image</code> <strong class=\"purple\">must</strong> equal <a href=\"#VkMemoryDedicatedAllocateInfo\">VkMemoryDedicatedAllocateInfo</a>::<code>image</code> and <code>memoryOffset</code> <strong class=\"purple\">must</strong> be zero"
+        }
+      ],
+      "(VK_VERSION_1_1,VK_KHR_dedicated_allocation)+(VK_NV_dedicated_allocation_image_aliasing)": [
+        {
+          "vuid": "VUID-vkBindImageMemory-memory-02628",
+          "text": " If the <a href=\"#features-dedicatedAllocationImageAliasing\">dedicated allocation image aliasing</a> feature is not enabled, and the <code>VkMemoryAllocateInfo</code> provided when <code>memory</code> was allocated included a <a href=\"#VkMemoryDedicatedAllocateInfo\">VkMemoryDedicatedAllocateInfo</a> structure in its <code>pNext</code> chain, and <a href=\"#VkMemoryDedicatedAllocateInfo\">VkMemoryDedicatedAllocateInfo</a>::<code>image</code> was not <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>, then <code>image</code> <strong class=\"purple\">must</strong> equal <a href=\"#VkMemoryDedicatedAllocateInfo\">VkMemoryDedicatedAllocateInfo</a>::<code>image</code> and <code>memoryOffset</code> <strong class=\"purple\">must</strong> be zero."
+        },
+        {
+          "vuid": "VUID-vkBindImageMemory-memory-02629",
+          "text": " If the <a href=\"#features-dedicatedAllocationImageAliasing\">dedicated allocation image aliasing</a> feature is enabled, and the <code>VkMemoryAllocateInfo</code> provided when <code>memory</code> was allocated included a <a href=\"#VkMemoryDedicatedAllocateInfo\">VkMemoryDedicatedAllocateInfo</a> structure in its <code>pNext</code> chain, and <a href=\"#VkMemoryDedicatedAllocateInfo\">VkMemoryDedicatedAllocateInfo</a>::<code>image</code> was not <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>, then <code>memoryOffset</code> <strong class=\"purple\">must</strong> be zero, and <code>image</code> <strong class=\"purple\">must</strong> be either equal to <a href=\"#VkMemoryDedicatedAllocateInfo\">VkMemoryDedicatedAllocateInfo</a>::<code>image</code> or an image that was created using the same parameters in <code>VkImageCreateInfo</code>, with the exception that <code>extent</code> and <code>arrayLayers</code> <strong class=\"purple\">may</strong> differ subject to the following restrictions: every dimension in the <code>extent</code> parameter of the image being bound <strong class=\"purple\">must</strong> be equal to or smaller than the original image for which the allocation was created; and the <code>arrayLayers</code> parameter of the image being bound <strong class=\"purple\">must</strong> be equal to or smaller than the original image for which the allocation was created."
+        }
+      ],
+      "(VK_VERSION_1_1)": [
+        {
+          "vuid": "VUID-vkBindImageMemory-None-01901",
+          "text": " If image was created with the <code>VK_IMAGE_CREATE_PROTECTED_BIT</code> bit set, the image <strong class=\"purple\">must</strong> be bound to a memory object allocated with a memory type that reports <code>VK_MEMORY_PROPERTY_PROTECTED_BIT</code>"
+        },
+        {
+          "vuid": "VUID-vkBindImageMemory-None-01902",
+          "text": " If image was created with the <code>VK_IMAGE_CREATE_PROTECTED_BIT</code> bit not set, the image <strong class=\"purple\">must</strong> not be bound to a memory object created with a memory type that reports <code>VK_MEMORY_PROPERTY_PROTECTED_BIT</code>"
+        }
+      ],
+      "(VK_NV_dedicated_allocation)": [
+        {
+          "vuid": "VUID-vkBindImageMemory-image-01050",
+          "text": " If <code>image</code> was created with <a href=\"#VkDedicatedAllocationImageCreateInfoNV\">VkDedicatedAllocationImageCreateInfoNV</a>::<code>dedicatedAllocation</code> equal to <code>VK_TRUE</code>, <code>memory</code> <strong class=\"purple\">must</strong> have been created with <a href=\"#VkDedicatedAllocationMemoryAllocateInfoNV\">VkDedicatedAllocationMemoryAllocateInfoNV</a>::<code>image</code> equal to an image handle created with identical creation parameters to <code>image</code> and <code>memoryOffset</code> <strong class=\"purple\">must</strong> be zero"
+        }
+      ],
+      "(VK_NV_dedicated_allocation)+!(VK_VERSION_1_1,VK_KHR_dedicated_allocation)": [
+        {
+          "vuid": "VUID-vkBindImageMemory-image-01051",
+          "text": " If <code>image</code> was not created with <a href=\"#VkDedicatedAllocationImageCreateInfoNV\">VkDedicatedAllocationImageCreateInfoNV</a>::<code>dedicatedAllocation</code> equal to <code>VK_TRUE</code>, <code>memory</code> <strong class=\"purple\">must</strong> not have been allocated dedicated for a specific buffer or image"
+        }
+      ],
+      "(VK_VERSION_1_1,VK_KHR_external_memory)": [
+        {
+          "vuid": "VUID-vkBindImageMemory-memory-02728",
+          "text": " If the value of <a href=\"#VkExportMemoryAllocateInfo\">VkExportMemoryAllocateInfo</a>::<code>handleTypes</code> used to allocate <code>memory</code> is not <code>0</code>, it <strong class=\"purple\">must</strong> include at least one of the handles set in <a href=\"#VkExternalMemoryImageCreateInfo\">VkExternalMemoryImageCreateInfo</a>::<code>handleTypes</code> when <code>image</code> was created"
+        },
+        {
+          "vuid": "VUID-vkBindImageMemory-memory-02729",
+          "text": " If <code>memory</code> was created by a memory import operation, the external handle type of the imported memory <strong class=\"purple\">must</strong> also have been set in <a href=\"#VkExternalMemoryImageCreateInfo\">VkExternalMemoryImageCreateInfo</a>::<code>handleTypes</code> when <code>image</code> was created"
+        }
+      ]
+    },
+    "vkBindImageMemory2": {
+      "(VK_VERSION_1_1,VK_KHR_bind_memory2)": [
+        {
+          "vuid": "VUID-vkBindImageMemory2-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkBindImageMemory2-pBindInfos-parameter",
+          "text": " <code>pBindInfos</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>bindInfoCount</code> valid <a href=\"#VkBindImageMemoryInfo\">VkBindImageMemoryInfo</a> structures"
+        },
+        {
+          "vuid": "VUID-vkBindImageMemory2-bindInfoCount-arraylength",
+          "text": " <code>bindInfoCount</code> <strong class=\"purple\">must</strong> be greater than <code>0</code>"
+        }
+      ]
+    },
+    "VkBindImageMemoryInfo": {
+      "(VK_VERSION_1_1,VK_KHR_bind_memory2)": [
+        {
+          "vuid": "VUID-VkBindImageMemoryInfo-image-01609",
+          "text": " <code>image</code> <strong class=\"purple\">must</strong> not already be backed by a memory object"
+        },
+        {
+          "vuid": "VUID-VkBindImageMemoryInfo-image-01610",
+          "text": " <code>image</code> <strong class=\"purple\">must</strong> not have been created with any sparse memory binding flags"
+        },
+        {
+          "vuid": "VUID-VkBindImageMemoryInfo-memoryOffset-01611",
+          "text": " <code>memoryOffset</code> <strong class=\"purple\">must</strong> be less than the size of <code>memory</code>"
+        },
+        {
+          "vuid": "VUID-VkBindImageMemoryInfo-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO</code>"
+        },
+        {
+          "vuid": "VUID-VkBindImageMemoryInfo-pNext-pNext",
+          "text": " Each <code>pNext</code> member of any structure (including this one) in the <code>pNext</code> chain <strong class=\"purple\">must</strong> be either <code>NULL</code> or a pointer to a valid instance of <a href=\"#VkBindImageMemoryDeviceGroupInfo\">VkBindImageMemoryDeviceGroupInfo</a>, <a href=\"#VkBindImageMemorySwapchainInfoKHR\">VkBindImageMemorySwapchainInfoKHR</a>, or <a href=\"#VkBindImagePlaneMemoryInfo\">VkBindImagePlaneMemoryInfo</a>"
+        },
+        {
+          "vuid": "VUID-VkBindImageMemoryInfo-sType-unique",
+          "text": " Each <code>sType</code> member in the <code>pNext</code> chain <strong class=\"purple\">must</strong> be unique"
+        },
+        {
+          "vuid": "VUID-VkBindImageMemoryInfo-image-parameter",
+          "text": " <code>image</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkImage\">VkImage</a> handle"
+        },
+        {
+          "vuid": "VUID-VkBindImageMemoryInfo-commonparent",
+          "text": " Both of <code>image</code>, and <code>memory</code> that are valid handles of non-ignored parameters <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from the same <a href=\"#VkDevice\">VkDevice</a>"
+        }
+      ],
+      "(VK_VERSION_1_1,VK_KHR_bind_memory2)+!(VK_VERSION_1_1,VK_KHR_sampler_ycbcr_conversion)": [
+        {
+          "vuid": "VUID-VkBindImageMemoryInfo-memory-01612",
+          "text": " <code>memory</code> <strong class=\"purple\">must</strong> have been allocated using one of the memory types allowed in the <code>memoryTypeBits</code> member of the <a href=\"#VkMemoryRequirements\">VkMemoryRequirements</a> structure returned from a call to <a href=\"#vkGetImageMemoryRequirements\">vkGetImageMemoryRequirements</a> with <code>image</code>"
+        },
+        {
+          "vuid": "VUID-VkBindImageMemoryInfo-memoryOffset-01613",
+          "text": " <code>memoryOffset</code> <strong class=\"purple\">must</strong> be an integer multiple of the <code>alignment</code> member of the <a href=\"#VkMemoryRequirements\">VkMemoryRequirements</a> structure returned from a call to <a href=\"#vkGetImageMemoryRequirements\">vkGetImageMemoryRequirements</a> with <code>image</code>"
+        },
+        {
+          "vuid": "VUID-VkBindImageMemoryInfo-memory-01614",
+          "text": " The difference of the size of <code>memory</code> and <code>memoryOffset</code> <strong class=\"purple\">must</strong> be greater than or equal to the <code>size</code> member of the <a href=\"#VkMemoryRequirements\">VkMemoryRequirements</a> structure returned from a call to <a href=\"#vkGetImageMemoryRequirements\">vkGetImageMemoryRequirements</a> with the same <code>image</code>"
+        }
+      ],
+      "(VK_VERSION_1_1,VK_KHR_bind_memory2)+(VK_VERSION_1_1,VK_KHR_sampler_ycbcr_conversion)": [
+        {
+          "vuid": "VUID-VkBindImageMemoryInfo-pNext-01615",
+          "text": " If the <code>pNext</code> chain does not include a <a href=\"#VkBindImagePlaneMemoryInfo\">VkBindImagePlaneMemoryInfo</a> structure, <code>memory</code> <strong class=\"purple\">must</strong> have been allocated using one of the memory types allowed in the <code>memoryTypeBits</code> member of the <a href=\"#VkMemoryRequirements\">VkMemoryRequirements</a> structure returned from a call to <a href=\"#vkGetImageMemoryRequirements2\">vkGetImageMemoryRequirements2</a> with <code>image</code>"
+        },
+        {
+          "vuid": "VUID-VkBindImageMemoryInfo-pNext-01616",
+          "text": " If the <code>pNext</code> chain does not include a <a href=\"#VkBindImagePlaneMemoryInfo\">VkBindImagePlaneMemoryInfo</a> structure, <code>memoryOffset</code> <strong class=\"purple\">must</strong> be an integer multiple of the <code>alignment</code> member of the <a href=\"#VkMemoryRequirements\">VkMemoryRequirements</a> structure returned from a call to <a href=\"#vkGetImageMemoryRequirements2\">vkGetImageMemoryRequirements2</a> with <code>image</code>"
+        },
+        {
+          "vuid": "VUID-VkBindImageMemoryInfo-pNext-01617",
+          "text": " If the <code>pNext</code> chain does not include a <a href=\"#VkBindImagePlaneMemoryInfo\">VkBindImagePlaneMemoryInfo</a> structure, the difference of the size of <code>memory</code> and <code>memoryOffset</code> <strong class=\"purple\">must</strong> be greater than or equal to the <code>size</code> member of the <a href=\"#VkMemoryRequirements\">VkMemoryRequirements</a> structure returned from a call to <a href=\"#vkGetImageMemoryRequirements2\">vkGetImageMemoryRequirements2</a> with the same <code>image</code>"
+        },
+        {
+          "vuid": "VUID-VkBindImageMemoryInfo-pNext-01618",
+          "text": " If the <code>pNext</code> chain includes a <a href=\"#VkBindImagePlaneMemoryInfo\">VkBindImagePlaneMemoryInfo</a> structure, <code>image</code> <strong class=\"purple\">must</strong> have been created with the <code>VK_IMAGE_CREATE_DISJOINT_BIT</code> bit set."
+        },
+        {
+          "vuid": "VUID-VkBindImageMemoryInfo-pNext-01619",
+          "text": " If the <code>pNext</code> chain includes a <a href=\"#VkBindImagePlaneMemoryInfo\">VkBindImagePlaneMemoryInfo</a> structure, <code>memory</code> <strong class=\"purple\">must</strong> have been allocated using one of the memory types allowed in the <code>memoryTypeBits</code> member of the <a href=\"#VkMemoryRequirements\">VkMemoryRequirements</a> structure returned from a call to <a href=\"#vkGetImageMemoryRequirements2\">vkGetImageMemoryRequirements2</a> with <code>image</code> and the correct <code>planeAspect</code> for this plane in the <a href=\"#VkImagePlaneMemoryRequirementsInfo\">VkImagePlaneMemoryRequirementsInfo</a> structure included in the <a href=\"#VkImageMemoryRequirementsInfo2\">VkImageMemoryRequirementsInfo2</a> structure&#8217;s <code>pNext</code> chain"
+        },
+        {
+          "vuid": "VUID-VkBindImageMemoryInfo-pNext-01620",
+          "text": " If the <code>pNext</code> chain includes a <a href=\"#VkBindImagePlaneMemoryInfo\">VkBindImagePlaneMemoryInfo</a> structure, <code>memoryOffset</code> <strong class=\"purple\">must</strong> be an integer multiple of the <code>alignment</code> member of the <a href=\"#VkMemoryRequirements\">VkMemoryRequirements</a> structure returned from a call to <a href=\"#vkGetImageMemoryRequirements2\">vkGetImageMemoryRequirements2</a> with <code>image</code> and the correct <code>planeAspect</code> for this plane in the <a href=\"#VkImagePlaneMemoryRequirementsInfo\">VkImagePlaneMemoryRequirementsInfo</a> structure included in the <a href=\"#VkImageMemoryRequirementsInfo2\">VkImageMemoryRequirementsInfo2</a> structure&#8217;s <code>pNext</code> chain"
+        },
+        {
+          "vuid": "VUID-VkBindImageMemoryInfo-pNext-01621",
+          "text": " If the <code>pNext</code> chain includes a <a href=\"#VkBindImagePlaneMemoryInfo\">VkBindImagePlaneMemoryInfo</a> structure, the difference of the size of <code>memory</code> and <code>memoryOffset</code> <strong class=\"purple\">must</strong> be greater than or equal to the <code>size</code> member of the <a href=\"#VkMemoryRequirements\">VkMemoryRequirements</a> structure returned from a call to <a href=\"#vkGetImageMemoryRequirements2\">vkGetImageMemoryRequirements2</a> with the same <code>image</code> and the correct <code>planeAspect</code> for this plane in the <a href=\"#VkImagePlaneMemoryRequirementsInfo\">VkImagePlaneMemoryRequirementsInfo</a> structure included in the <a href=\"#VkImageMemoryRequirementsInfo2\">VkImageMemoryRequirementsInfo2</a> structure&#8217;s <code>pNext</code> chain"
+        }
+      ],
+      "(VK_VERSION_1_1,VK_KHR_bind_memory2)+(VK_VERSION_1_1,VK_KHR_dedicated_allocation)": [
+        {
+          "vuid": "VUID-VkBindImageMemoryInfo-image-01622",
+          "text": " If <code>image</code> requires a dedicated allocation (as reported by <a href=\"#vkGetImageMemoryRequirements2\">vkGetImageMemoryRequirements2</a> in <a href=\"#VkMemoryDedicatedRequirements\">VkMemoryDedicatedRequirements</a>::requiresDedicatedAllocation for <code>image</code>), <code>memory</code> <strong class=\"purple\">must</strong> have been created with <a href=\"#VkMemoryDedicatedAllocateInfo\">VkMemoryDedicatedAllocateInfo</a>::<code>image</code> equal to <code>image</code> and <code>memoryOffset</code> <strong class=\"purple\">must</strong> be zero"
+        }
+      ],
+      "(VK_VERSION_1_1,VK_KHR_bind_memory2)+(VK_VERSION_1_1,VK_KHR_dedicated_allocation)+!(VK_NV_dedicated_allocation_image_aliasing)": [
+        {
+          "vuid": "VUID-VkBindImageMemoryInfo-memory-01903",
+          "text": " If the <code>VkMemoryAllocateInfo</code> provided when <code>memory</code> was allocated included a <a href=\"#VkMemoryDedicatedAllocateInfo\">VkMemoryDedicatedAllocateInfo</a> structure in its <code>pNext</code> chain, and <a href=\"#VkMemoryDedicatedAllocateInfo\">VkMemoryDedicatedAllocateInfo</a>::<code>image</code> was not <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>, then <code>image</code> <strong class=\"purple\">must</strong> equal <a href=\"#VkMemoryDedicatedAllocateInfo\">VkMemoryDedicatedAllocateInfo</a>::<code>image</code> and <code>memoryOffset</code> <strong class=\"purple\">must</strong> be zero."
+        }
+      ],
+      "(VK_VERSION_1_1,VK_KHR_bind_memory2)+(VK_VERSION_1_1,VK_KHR_dedicated_allocation)+(VK_NV_dedicated_allocation_image_aliasing)": [
+        {
+          "vuid": "VUID-VkBindImageMemoryInfo-memory-02630",
+          "text": " If the <a href=\"#features-dedicatedAllocationImageAliasing\">dedicated allocation image aliasing</a> feature is not enabled, and the <code>VkMemoryAllocateInfo</code> provided when <code>memory</code> was allocated included a <a href=\"#VkMemoryDedicatedAllocateInfo\">VkMemoryDedicatedAllocateInfo</a> structure in its <code>pNext</code> chain, and <a href=\"#VkMemoryDedicatedAllocateInfo\">VkMemoryDedicatedAllocateInfo</a>::<code>image</code> was not <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>, then <code>image</code> <strong class=\"purple\">must</strong> equal <a href=\"#VkMemoryDedicatedAllocateInfo\">VkMemoryDedicatedAllocateInfo</a>::<code>image</code> and <code>memoryOffset</code> <strong class=\"purple\">must</strong> be zero."
+        },
+        {
+          "vuid": "VUID-VkBindImageMemoryInfo-memory-02631",
+          "text": " If the <a href=\"#features-dedicatedAllocationImageAliasing\">dedicated allocation image aliasing</a> feature is enabled, and the <code>VkMemoryAllocateInfo</code> provided when <code>memory</code> was allocated included a <a href=\"#VkMemoryDedicatedAllocateInfo\">VkMemoryDedicatedAllocateInfo</a> structure in its <code>pNext</code> chain, and <a href=\"#VkMemoryDedicatedAllocateInfo\">VkMemoryDedicatedAllocateInfo</a>::<code>image</code> was not <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>, then <code>memoryOffset</code> <strong class=\"purple\">must</strong> be zero, and <code>image</code> <strong class=\"purple\">must</strong> be either equal to <a href=\"#VkMemoryDedicatedAllocateInfo\">VkMemoryDedicatedAllocateInfo</a>::<code>image</code> or an image that was created using the same parameters in <code>VkImageCreateInfo</code>, with the exception that <code>extent</code> and <code>arrayLayers</code> <strong class=\"purple\">may</strong> differ subject to the following restrictions: every dimension in the <code>extent</code> parameter of the image being bound <strong class=\"purple\">must</strong> be equal to or smaller than the original image for which the allocation was created; and the <code>arrayLayers</code> parameter of the image being bound <strong class=\"purple\">must</strong> be equal to or smaller than the original image for which the allocation was created."
+        }
+      ],
+      "(VK_VERSION_1_1,VK_KHR_bind_memory2)+(VK_NV_dedicated_allocation)": [
+        {
+          "vuid": "VUID-VkBindImageMemoryInfo-image-01623",
+          "text": " If <code>image</code> was created with <a href=\"#VkDedicatedAllocationImageCreateInfoNV\">VkDedicatedAllocationImageCreateInfoNV</a>::<code>dedicatedAllocation</code> equal to <code>VK_TRUE</code>, <code>memory</code> <strong class=\"purple\">must</strong> have been created with <a href=\"#VkDedicatedAllocationMemoryAllocateInfoNV\">VkDedicatedAllocationMemoryAllocateInfoNV</a>::<code>image</code> equal to <code>image</code> and <code>memoryOffset</code> <strong class=\"purple\">must</strong> be zero"
+        }
+      ],
+      "(VK_VERSION_1_1,VK_KHR_bind_memory2)+(VK_NV_dedicated_allocation)+!(VK_VERSION_1_1,VK_KHR_dedicated_allocation)": [
+        {
+          "vuid": "VUID-VkBindImageMemoryInfo-image-01624",
+          "text": " If <code>image</code> was not created with <a href=\"#VkDedicatedAllocationImageCreateInfoNV\">VkDedicatedAllocationImageCreateInfoNV</a>::<code>dedicatedAllocation</code> equal to <code>VK_TRUE</code>, <code>memory</code> <strong class=\"purple\">must</strong> not have been allocated dedicated for a specific buffer or image"
+        }
+      ],
+      "(VK_VERSION_1_1,VK_KHR_bind_memory2)+!(VK_VERSION_1_1+VK_KHR_swapchain)+!(VK_KHR_device_group+VK_KHR_swapchain)": [
+        {
+          "vuid": "VUID-VkBindImageMemoryInfo-memory-01625",
+          "text": " <code>memory</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDeviceMemory\">VkDeviceMemory</a> handle"
+        }
+      ],
+      "(VK_VERSION_1_1,VK_KHR_bind_memory2)+(VK_VERSION_1_1,VK_KHR_device_group)": [
+        {
+          "vuid": "VUID-VkBindImageMemoryInfo-pNext-01626",
+          "text": " If the <code>pNext</code> chain includes a <a href=\"#VkBindImageMemoryDeviceGroupInfo\">VkBindImageMemoryDeviceGroupInfo</a> structure, all instances of <code>memory</code> specified by <a href=\"#VkBindImageMemoryDeviceGroupInfo\">VkBindImageMemoryDeviceGroupInfo</a>::<code>pDeviceIndices</code> <strong class=\"purple\">must</strong> have been allocated"
+        },
+        {
+          "vuid": "VUID-VkBindImageMemoryInfo-pNext-01627",
+          "text": " If the <code>pNext</code> chain includes a <a href=\"#VkBindImageMemoryDeviceGroupInfo\">VkBindImageMemoryDeviceGroupInfo</a> structure, and <a href=\"#VkBindImageMemoryDeviceGroupInfo\">VkBindImageMemoryDeviceGroupInfo</a>::<code>splitInstanceBindRegionCount</code> is not zero, then <code>image</code> <strong class=\"purple\">must</strong> have been created with the <code>VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT</code> bit set"
+        },
+        {
+          "vuid": "VUID-VkBindImageMemoryInfo-pNext-01628",
+          "text": " If the <code>pNext</code> chain includes a <a href=\"#VkBindImageMemoryDeviceGroupInfo\">VkBindImageMemoryDeviceGroupInfo</a> structure, all elements of <a href=\"#VkBindImageMemoryDeviceGroupInfo\">VkBindImageMemoryDeviceGroupInfo</a>::<code>pSplitInstanceBindRegions</code> <strong class=\"purple\">must</strong> be valid rectangles contained within the dimensions of <code>image</code>"
+        },
+        {
+          "vuid": "VUID-VkBindImageMemoryInfo-pNext-01629",
+          "text": " If the <code>pNext</code> chain includes a <a href=\"#VkBindImageMemoryDeviceGroupInfo\">VkBindImageMemoryDeviceGroupInfo</a> structure, the union of the areas of all elements of <a href=\"#VkBindImageMemoryDeviceGroupInfo\">VkBindImageMemoryDeviceGroupInfo</a>::<code>pSplitInstanceBindRegions</code> that correspond to the same instance of <code>image</code> <strong class=\"purple\">must</strong> cover the entire image."
+        }
+      ],
+      "(VK_VERSION_1_1,VK_KHR_bind_memory2)+(VK_VERSION_1_1,VK_KHR_device_group)+(VK_KHR_swapchain)": [
+        {
+          "vuid": "VUID-VkBindImageMemoryInfo-image-01630",
+          "text": " If <code>image</code> was created with a valid swapchain handle in <a href=\"#VkImageSwapchainCreateInfoKHR\">VkImageSwapchainCreateInfoKHR</a>::<code>swapchain</code>, then the <code>pNext</code> chain <strong class=\"purple\">must</strong> include a <a href=\"#VkBindImageMemorySwapchainInfoKHR\">VkBindImageMemorySwapchainInfoKHR</a> structure containing the same swapchain handle."
+        },
+        {
+          "vuid": "VUID-VkBindImageMemoryInfo-pNext-01631",
+          "text": " If the <code>pNext</code> chain includes a <a href=\"#VkBindImageMemorySwapchainInfoKHR\">VkBindImageMemorySwapchainInfoKHR</a> structure, <code>memory</code> <strong class=\"purple\">must</strong> be <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>"
+        },
+        {
+          "vuid": "VUID-VkBindImageMemoryInfo-pNext-01632",
+          "text": " If the <code>pNext</code> chain does not include a <a href=\"#VkBindImageMemorySwapchainInfoKHR\">VkBindImageMemorySwapchainInfoKHR</a> structure, <code>memory</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDeviceMemory\">VkDeviceMemory</a> handle"
+        }
+      ],
+      "(VK_VERSION_1_1,VK_KHR_bind_memory2)+(VK_VERSION_1_1,VK_KHR_external_memory)": [
+        {
+          "vuid": "VUID-VkBindImageMemoryInfo-handleTypes-02793",
+          "text": " If the value of <a href=\"#VkExportMemoryAllocateInfo\">VkExportMemoryAllocateInfo</a>::<code>handleTypes</code> used to allocate <code>memory</code> is not <code>0</code>, it <strong class=\"purple\">must</strong> include at least one of the handles set in <a href=\"#VkExternalMemoryImageCreateInfo\">VkExternalMemoryImageCreateInfo</a>::<code>handleTypes</code> when <code>image</code> was created"
+        },
+        {
+          "vuid": "VUID-VkBindImageMemoryInfo-memory-02794",
+          "text": " If <code>memory</code> was created by a memory import operation, the external handle type of the imported memory <strong class=\"purple\">must</strong> also have been set in <a href=\"#VkExternalMemoryImageCreateInfo\">VkExternalMemoryImageCreateInfo</a>::<code>handleTypes</code> when <code>image</code> was created"
+        }
+      ]
+    },
+    "VkBindImageMemoryDeviceGroupInfo": {
+      "(VK_VERSION_1_1,VK_KHR_bind_memory2)+(VK_VERSION_1_1,VK_KHR_device_group)": [
+        {
+          "vuid": "VUID-VkBindImageMemoryDeviceGroupInfo-deviceIndexCount-01633",
+          "text": " At least one of <code>deviceIndexCount</code> and <code>splitInstanceBindRegionCount</code> <strong class=\"purple\">must</strong> be zero."
+        },
+        {
+          "vuid": "VUID-VkBindImageMemoryDeviceGroupInfo-deviceIndexCount-01634",
+          "text": " <code>deviceIndexCount</code> <strong class=\"purple\">must</strong> either be zero or equal to the number of physical devices in the logical device"
+        },
+        {
+          "vuid": "VUID-VkBindImageMemoryDeviceGroupInfo-pDeviceIndices-01635",
+          "text": " All elements of <code>pDeviceIndices</code> <strong class=\"purple\">must</strong> be valid device indices."
+        },
+        {
+          "vuid": "VUID-VkBindImageMemoryDeviceGroupInfo-splitInstanceBindRegionCount-01636",
+          "text": " <code>splitInstanceBindRegionCount</code> <strong class=\"purple\">must</strong> either be zero or equal to the number of physical devices in the logical device squared"
+        },
+        {
+          "vuid": "VUID-VkBindImageMemoryDeviceGroupInfo-pSplitInstanceBindRegions-01637",
+          "text": " Elements of <code>pSplitInstanceBindRegions</code> that correspond to the same instance of an image <strong class=\"purple\">must</strong> not overlap."
+        },
+        {
+          "vuid": "VUID-VkBindImageMemoryDeviceGroupInfo-offset-01638",
+          "text": " The <code>offset.x</code> member of any element of <code>pSplitInstanceBindRegions</code> <strong class=\"purple\">must</strong> be a multiple of the sparse image block width (<code>VkSparseImageFormatProperties</code>::<code>imageGranularity.width</code>) of all non-metadata aspects of the image"
+        },
+        {
+          "vuid": "VUID-VkBindImageMemoryDeviceGroupInfo-offset-01639",
+          "text": " The <code>offset.y</code> member of any element of <code>pSplitInstanceBindRegions</code> <strong class=\"purple\">must</strong> be a multiple of the sparse image block height (<code>VkSparseImageFormatProperties</code>::<code>imageGranularity.height</code>) of all non-metadata aspects of the image"
+        },
+        {
+          "vuid": "VUID-VkBindImageMemoryDeviceGroupInfo-extent-01640",
+          "text": " The <code>extent.width</code> member of any element of <code>pSplitInstanceBindRegions</code> <strong class=\"purple\">must</strong> either be a multiple of the sparse image block width of all non-metadata aspects of the image, or else <code>extent.width</code> &#43; <code>offset.x</code> <strong class=\"purple\">must</strong> equal the width of the image subresource"
+        },
+        {
+          "vuid": "VUID-VkBindImageMemoryDeviceGroupInfo-extent-01641",
+          "text": " The <code>extent.height</code> member of any element of <code>pSplitInstanceBindRegions</code> <strong class=\"purple\">must</strong> either be a multiple of the sparse image block height of all non-metadata aspects of the image, or else <code>extent.height</code> &#43; <code>offset.y</code> <strong class=\"purple\">must</strong> equal the width of the image subresource"
+        },
+        {
+          "vuid": "VUID-VkBindImageMemoryDeviceGroupInfo-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO</code>"
+        },
+        {
+          "vuid": "VUID-VkBindImageMemoryDeviceGroupInfo-pDeviceIndices-parameter",
+          "text": " If <code>deviceIndexCount</code> is not <code>0</code>, <code>pDeviceIndices</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>deviceIndexCount</code> <code>uint32_t</code> values"
+        },
+        {
+          "vuid": "VUID-VkBindImageMemoryDeviceGroupInfo-pSplitInstanceBindRegions-parameter",
+          "text": " If <code>splitInstanceBindRegionCount</code> is not <code>0</code>, <code>pSplitInstanceBindRegions</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>splitInstanceBindRegionCount</code> <a href=\"#VkRect2D\">VkRect2D</a> structures"
+        }
+      ]
+    },
+    "VkBindImageMemorySwapchainInfoKHR": {
+      "(VK_VERSION_1_1,VK_KHR_bind_memory2)+(VK_VERSION_1_1,VK_KHR_device_group)+(VK_KHR_swapchain)": [
+        {
+          "vuid": "VUID-VkBindImageMemorySwapchainInfoKHR-imageIndex-01644",
+          "text": " <code>imageIndex</code> <strong class=\"purple\">must</strong> be less than the number of images in <code>swapchain</code>"
+        },
+        {
+          "vuid": "VUID-VkBindImageMemorySwapchainInfoKHR-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_SWAPCHAIN_INFO_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkBindImageMemorySwapchainInfoKHR-swapchain-parameter",
+          "text": " <code>swapchain</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkSwapchainKHR\">VkSwapchainKHR</a> handle"
+        }
+      ]
+    },
+    "VkBindImagePlaneMemoryInfo": {
+      "(VK_VERSION_1_1,VK_KHR_bind_memory2)+(VK_VERSION_1_1,VK_KHR_sampler_ycbcr_conversion)": [
+        {
+          "vuid": "VUID-VkBindImagePlaneMemoryInfo-planeAspect-02283",
+          "text": " If the image&#8217;s tiling is <code>VK_IMAGE_TILING_LINEAR</code> or <code>VK_IMAGE_TILING_OPTIMAL</code>, then <code>planeAspect</code> <strong class=\"purple\">must</strong> be a single valid <em>format plane</em> for the image. (That is, <code>planeAspect</code> <strong class=\"purple\">must</strong> be <code>VK_IMAGE_ASPECT_PLANE_0_BIT</code> or <code>VK_IMAGE_ASPECT_PLANE_1_BIT</code> for &#8220;<code>_2PLANE</code>&#8221; formats and <code>planeAspect</code> <strong class=\"purple\">must</strong> be <code>VK_IMAGE_ASPECT_PLANE_0_BIT</code>, <code>VK_IMAGE_ASPECT_PLANE_1_BIT</code>, or <code>VK_IMAGE_ASPECT_PLANE_2_BIT</code> for &#8220;<code>_3PLANE</code>&#8221; formats.)"
+        },
+        {
+          "vuid": "VUID-VkBindImagePlaneMemoryInfo-None-01643",
+          "text": " A single call to <a href=\"#vkBindImageMemory2\">vkBindImageMemory2</a> <strong class=\"purple\">must</strong> bind all or none of the planes of an image (i.e. bindings to all planes of an image <strong class=\"purple\">must</strong> be made in a single <a href=\"#vkBindImageMemory2\">vkBindImageMemory2</a> call), as separate bindings"
+        },
+        {
+          "vuid": "VUID-VkBindImagePlaneMemoryInfo-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO</code>"
+        },
+        {
+          "vuid": "VUID-VkBindImagePlaneMemoryInfo-planeAspect-parameter",
+          "text": " <code>planeAspect</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkImageAspectFlagBits\">VkImageAspectFlagBits</a> value"
+        }
+      ],
+      "(VK_VERSION_1_1,VK_KHR_bind_memory2)+(VK_VERSION_1_1,VK_KHR_sampler_ycbcr_conversion)+(VK_EXT_image_drm_format_modifier)": [
+        {
+          "vuid": "VUID-VkBindImagePlaneMemoryInfo-planeAspect-02284",
+          "text": "  If the image&#8217;s tiling is <code>VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT</code>,  then <code>planeAspect</code> <strong class=\"purple\">must</strong> be a single valid <em>memory plane</em> for the  image.  (That is, <code>aspectMask</code> <strong class=\"purple\">must</strong> specify a plane index that is less than  the  <a href=\"#VkDrmFormatModifierPropertiesEXT\"><code>drmFormatModifierPlaneCount</code></a>  associated with the image&#8217;s <a href=\"#VkImageCreateInfo\"><code>format</code></a> and <a href=\"#VkImageDrmFormatModifierPropertiesEXT\"><code>drmFormatModifier</code></a>.)"
+        }
+      ]
+    },
+    "vkCreateAccelerationStructureNV": {
+      "(VK_NV_ray_tracing)": [
+        {
+          "vuid": "VUID-vkCreateAccelerationStructureNV-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCreateAccelerationStructureNV-pCreateInfo-parameter",
+          "text": " <code>pCreateInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkAccelerationStructureCreateInfoNV\">VkAccelerationStructureCreateInfoNV</a> structure"
+        },
+        {
+          "vuid": "VUID-vkCreateAccelerationStructureNV-pAllocator-parameter",
+          "text": " If <code>pAllocator</code> is not <code>NULL</code>, <code>pAllocator</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkAllocationCallbacks\">VkAllocationCallbacks</a> structure"
+        },
+        {
+          "vuid": "VUID-vkCreateAccelerationStructureNV-pAccelerationStructure-parameter",
+          "text": " <code>pAccelerationStructure</code> <strong class=\"purple\">must</strong> be a valid pointer to a <a href=\"#VkAccelerationStructureNV\">VkAccelerationStructureNV</a> handle"
+        }
+      ]
+    },
+    "VkAccelerationStructureCreateInfoNV": {
+      "(VK_NV_ray_tracing)": [
+        {
+          "vuid": "VUID-VkAccelerationStructureCreateInfoNV-compactedSize-02421",
+          "text": " If <code>compactedSize</code> is not <code>0</code> then both <code>info.geometryCount</code> and <code>info.instanceCount</code> <strong class=\"purple\">must</strong> be <code>0</code>"
+        },
+        {
+          "vuid": "VUID-VkAccelerationStructureCreateInfoNV-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_NV</code>"
+        },
+        {
+          "vuid": "VUID-VkAccelerationStructureCreateInfoNV-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-VkAccelerationStructureCreateInfoNV-info-parameter",
+          "text": " <code>info</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkAccelerationStructureInfoNV\">VkAccelerationStructureInfoNV</a> structure"
+        }
+      ]
+    },
+    "VkAccelerationStructureInfoNV": {
+      "(VK_NV_ray_tracing)": [
+        {
+          "vuid": "VUID-VkAccelerationStructureInfoNV-geometryCount-02422",
+          "text": " <code>geometryCount</code> <strong class=\"purple\">must</strong> be less than or equal to <a href=\"#VkPhysicalDeviceRayTracingPropertiesNV\">VkPhysicalDeviceRayTracingPropertiesNV</a>::<code>maxGeometryCount</code>"
+        },
+        {
+          "vuid": "VUID-VkAccelerationStructureInfoNV-instanceCount-02423",
+          "text": " <code>instanceCount</code> <strong class=\"purple\">must</strong> be less than or equal to <a href=\"#VkPhysicalDeviceRayTracingPropertiesNV\">VkPhysicalDeviceRayTracingPropertiesNV</a>::<code>maxInstanceCount</code>"
+        },
+        {
+          "vuid": "VUID-VkAccelerationStructureInfoNV-maxTriangleCount-02424",
+          "text": " The total number of triangles in all geometries <strong class=\"purple\">must</strong> be less than or equal to <a href=\"#VkPhysicalDeviceRayTracingPropertiesNV\">VkPhysicalDeviceRayTracingPropertiesNV</a>::<code>maxTriangleCount</code>"
+        },
+        {
+          "vuid": "VUID-VkAccelerationStructureInfoNV-type-02425",
+          "text": " If <code>type</code> is <code>VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_NV</code> then <code>geometryCount</code> <strong class=\"purple\">must</strong> be <code>0</code>"
+        },
+        {
+          "vuid": "VUID-VkAccelerationStructureInfoNV-type-02426",
+          "text": " If <code>type</code> is <code>VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_NV</code> then <code>instanceCount</code> <strong class=\"purple\">must</strong> be <code>0</code>"
+        },
+        {
+          "vuid": "VUID-VkAccelerationStructureInfoNV-type-02786",
+          "text": " If <code>type</code> is <code>VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_NV</code> then the <code>geometryType</code> member of each geometry in <code>pGeometries</code> <strong class=\"purple\">must</strong> be the same"
+        },
+        {
+          "vuid": "VUID-VkAccelerationStructureInfoNV-flags-02592",
+          "text": " If <code>flags</code> has the <code>VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_TRACE_BIT_NV</code> bit set, then it <strong class=\"purple\">must</strong> not have the <code>VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_BUILD_BIT_NV</code> bit set"
+        },
+        {
+          "vuid": "VUID-VkAccelerationStructureInfoNV-scratch-02781",
+          "text": " <code>scratch</code> <strong class=\"purple\">must</strong> have been created with <code>VK_BUFFER_USAGE_RAY_TRACING_BIT_NV</code> usage flag"
+        },
+        {
+          "vuid": "VUID-VkAccelerationStructureInfoNV-instanceData-02782",
+          "text": " If <code>instanceData</code> is not <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>, <code>instanceData</code> <strong class=\"purple\">must</strong> have been created with <code>VK_BUFFER_USAGE_RAY_TRACING_BIT_NV</code> usage flag"
+        },
+        {
+          "vuid": "VUID-VkAccelerationStructureInfoNV-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_INFO_NV</code>"
+        },
+        {
+          "vuid": "VUID-VkAccelerationStructureInfoNV-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-VkAccelerationStructureInfoNV-type-parameter",
+          "text": " <code>type</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkAccelerationStructureTypeNV\">VkAccelerationStructureTypeNV</a> value"
+        },
+        {
+          "vuid": "VUID-VkAccelerationStructureInfoNV-flags-parameter",
+          "text": " <code>flags</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkBuildAccelerationStructureFlagBitsNV\">VkBuildAccelerationStructureFlagBitsNV</a> values"
+        },
+        {
+          "vuid": "VUID-VkAccelerationStructureInfoNV-pGeometries-parameter",
+          "text": " If <code>geometryCount</code> is not <code>0</code>, <code>pGeometries</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>geometryCount</code> valid <a href=\"#VkGeometryNV\">VkGeometryNV</a> structures"
+        }
+      ]
+    },
+    "VkGeometryNV": {
+      "(VK_NV_ray_tracing)": [
+        {
+          "vuid": "VUID-VkGeometryNV-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_GEOMETRY_NV</code>"
+        },
+        {
+          "vuid": "VUID-VkGeometryNV-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-VkGeometryNV-geometryType-parameter",
+          "text": " <code>geometryType</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkGeometryTypeNV\">VkGeometryTypeNV</a> value"
+        },
+        {
+          "vuid": "VUID-VkGeometryNV-geometry-parameter",
+          "text": " <code>geometry</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkGeometryDataNV\">VkGeometryDataNV</a> structure"
+        },
+        {
+          "vuid": "VUID-VkGeometryNV-flags-parameter",
+          "text": " <code>flags</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkGeometryFlagBitsNV\">VkGeometryFlagBitsNV</a> values"
+        }
+      ]
+    },
+    "VkGeometryDataNV": {
+      "(VK_NV_ray_tracing)": [
+        {
+          "vuid": "VUID-VkGeometryDataNV-triangles-parameter",
+          "text": " <code>triangles</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkGeometryTrianglesNV\">VkGeometryTrianglesNV</a> structure"
+        },
+        {
+          "vuid": "VUID-VkGeometryDataNV-aabbs-parameter",
+          "text": " <code>aabbs</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkGeometryAABBNV\">VkGeometryAABBNV</a> structure"
+        }
+      ]
+    },
+    "VkGeometryTrianglesNV": {
+      "(VK_NV_ray_tracing)": [
+        {
+          "vuid": "VUID-VkGeometryTrianglesNV-vertexOffset-02428",
+          "text": " <code>vertexOffset</code> <strong class=\"purple\">must</strong> be less than the size of <code>vertexData</code>"
+        },
+        {
+          "vuid": "VUID-VkGeometryTrianglesNV-vertexOffset-02429",
+          "text": " <code>vertexOffset</code> <strong class=\"purple\">must</strong> be a multiple of the component size of <code>vertexFormat</code>"
+        },
+        {
+          "vuid": "VUID-VkGeometryTrianglesNV-vertexFormat-02430",
+          "text": " <code>vertexFormat</code> <strong class=\"purple\">must</strong> be one of <code>VK_FORMAT_R32G32B32_SFLOAT</code>, <code>VK_FORMAT_R32G32_SFLOAT</code>, <code>VK_FORMAT_R16G16B16_SFLOAT</code>, <code>VK_FORMAT_R16G16_SFLOAT</code>, <code>VK_FORMAT_R16G16_SNORM</code>, or <code>VK_FORMAT_R16G16B16_SNORM</code>"
+        },
+        {
+          "vuid": "VUID-VkGeometryTrianglesNV-indexOffset-02431",
+          "text": " <code>indexOffset</code> <strong class=\"purple\">must</strong> be less than the size of <code>indexData</code>"
+        },
+        {
+          "vuid": "VUID-VkGeometryTrianglesNV-indexOffset-02432",
+          "text": " <code>indexOffset</code> <strong class=\"purple\">must</strong> be a multiple of the element size of <code>indexType</code>"
+        },
+        {
+          "vuid": "VUID-VkGeometryTrianglesNV-indexType-02433",
+          "text": " <code>indexType</code> <strong class=\"purple\">must</strong> be <code>VK_INDEX_TYPE_UINT16</code>, <code>VK_INDEX_TYPE_UINT32</code>, or <code>VK_INDEX_TYPE_NONE_NV</code>"
+        },
+        {
+          "vuid": "VUID-VkGeometryTrianglesNV-indexData-02434",
+          "text": " <code>indexData</code> <strong class=\"purple\">must</strong> be <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a> if <code>indexType</code> is <code>VK_INDEX_TYPE_NONE_NV</code>"
+        },
+        {
+          "vuid": "VUID-VkGeometryTrianglesNV-indexData-02435",
+          "text": " <code>indexData</code> <strong class=\"purple\">must</strong> be a valid <code>VkBuffer</code> handle if <code>indexType</code> is not <code>VK_INDEX_TYPE_NONE_NV</code>"
+        },
+        {
+          "vuid": "VUID-VkGeometryTrianglesNV-indexCount-02436",
+          "text": " <code>indexCount</code> <strong class=\"purple\">must</strong> be <code>0</code> if <code>indexType</code> is <code>VK_INDEX_TYPE_NONE_NV</code>"
+        },
+        {
+          "vuid": "VUID-VkGeometryTrianglesNV-transformOffset-02437",
+          "text": " <code>transformOffset</code> <strong class=\"purple\">must</strong> be less than the size of <code>transformData</code>"
+        },
+        {
+          "vuid": "VUID-VkGeometryTrianglesNV-transformOffset-02438",
+          "text": " <code>transformOffset</code> <strong class=\"purple\">must</strong> be a multiple of <code>16</code>"
+        },
+        {
+          "vuid": "VUID-VkGeometryTrianglesNV-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_GEOMETRY_TRIANGLES_NV</code>"
+        },
+        {
+          "vuid": "VUID-VkGeometryTrianglesNV-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-VkGeometryTrianglesNV-vertexData-parameter",
+          "text": " If <code>vertexData</code> is not <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>, <code>vertexData</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkBuffer\">VkBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-VkGeometryTrianglesNV-vertexFormat-parameter",
+          "text": " <code>vertexFormat</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkFormat\">VkFormat</a> value"
+        },
+        {
+          "vuid": "VUID-VkGeometryTrianglesNV-indexData-parameter",
+          "text": " If <code>indexData</code> is not <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>, <code>indexData</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkBuffer\">VkBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-VkGeometryTrianglesNV-indexType-parameter",
+          "text": " <code>indexType</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkIndexType\">VkIndexType</a> value"
+        },
+        {
+          "vuid": "VUID-VkGeometryTrianglesNV-transformData-parameter",
+          "text": " If <code>transformData</code> is not <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>, <code>transformData</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkBuffer\">VkBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-VkGeometryTrianglesNV-commonparent",
+          "text": " Each of <code>indexData</code>, <code>transformData</code>, and <code>vertexData</code> that are valid handles of non-ignored parameters <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from the same <a href=\"#VkDevice\">VkDevice</a>"
+        }
+      ]
+    },
+    "VkGeometryAABBNV": {
+      "(VK_NV_ray_tracing)": [
+        {
+          "vuid": "VUID-VkGeometryAABBNV-offset-02439",
+          "text": " <code>offset</code> <strong class=\"purple\">must</strong> be less than the size of <code>aabbData</code>"
+        },
+        {
+          "vuid": "VUID-VkGeometryAABBNV-offset-02440",
+          "text": " <code>offset</code> <strong class=\"purple\">must</strong> be a multiple of <code>8</code>"
+        },
+        {
+          "vuid": "VUID-VkGeometryAABBNV-stride-02441",
+          "text": " <code>stride</code> <strong class=\"purple\">must</strong> be a multiple of <code>8</code>"
+        },
+        {
+          "vuid": "VUID-VkGeometryAABBNV-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_GEOMETRY_AABB_NV</code>"
+        },
+        {
+          "vuid": "VUID-VkGeometryAABBNV-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-VkGeometryAABBNV-aabbData-parameter",
+          "text": " If <code>aabbData</code> is not <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>, <code>aabbData</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkBuffer\">VkBuffer</a> handle"
+        }
+      ]
+    },
+    "vkDestroyAccelerationStructureNV": {
+      "(VK_NV_ray_tracing)": [
+        {
+          "vuid": "VUID-vkDestroyAccelerationStructureNV-accelerationStructure-02442",
+          "text": " All submitted commands that refer to <code>accelerationStructure</code> <strong class=\"purple\">must</strong> have completed execution"
+        },
+        {
+          "vuid": "VUID-vkDestroyAccelerationStructureNV-accelerationStructure-02443",
+          "text": " If <code>VkAllocationCallbacks</code> were provided when <code>accelerationStructure</code> was created, a compatible set of callbacks <strong class=\"purple\">must</strong> be provided here"
+        },
+        {
+          "vuid": "VUID-vkDestroyAccelerationStructureNV-accelerationStructure-02444",
+          "text": " If no <code>VkAllocationCallbacks</code> were provided when <code>accelerationStructure</code> was created, <code>pAllocator</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-vkDestroyAccelerationStructureNV-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkDestroyAccelerationStructureNV-accelerationStructure-parameter",
+          "text": " <code>accelerationStructure</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkAccelerationStructureNV\">VkAccelerationStructureNV</a> handle"
+        },
+        {
+          "vuid": "VUID-vkDestroyAccelerationStructureNV-pAllocator-parameter",
+          "text": " If <code>pAllocator</code> is not <code>NULL</code>, <code>pAllocator</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkAllocationCallbacks\">VkAllocationCallbacks</a> structure"
+        },
+        {
+          "vuid": "VUID-vkDestroyAccelerationStructureNV-accelerationStructure-parent",
+          "text": " <code>accelerationStructure</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from <code>device</code>"
+        }
+      ]
+    },
+    "vkGetAccelerationStructureMemoryRequirementsNV": {
+      "(VK_NV_ray_tracing)": [
+        {
+          "vuid": "VUID-vkGetAccelerationStructureMemoryRequirementsNV-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetAccelerationStructureMemoryRequirementsNV-pInfo-parameter",
+          "text": " <code>pInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkAccelerationStructureMemoryRequirementsInfoNV\">VkAccelerationStructureMemoryRequirementsInfoNV</a> structure"
+        },
+        {
+          "vuid": "VUID-vkGetAccelerationStructureMemoryRequirementsNV-pMemoryRequirements-parameter",
+          "text": " <code>pMemoryRequirements</code> <strong class=\"purple\">must</strong> be a valid pointer to a <a href=\"#VkMemoryRequirements2KHR\">VkMemoryRequirements2KHR</a> structure"
+        }
+      ]
+    },
+    "VkAccelerationStructureMemoryRequirementsInfoNV": {
+      "(VK_NV_ray_tracing)": [
+        {
+          "vuid": "VUID-VkAccelerationStructureMemoryRequirementsInfoNV-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV</code>"
+        },
+        {
+          "vuid": "VUID-VkAccelerationStructureMemoryRequirementsInfoNV-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-VkAccelerationStructureMemoryRequirementsInfoNV-type-parameter",
+          "text": " <code>type</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkAccelerationStructureMemoryRequirementsTypeNV\">VkAccelerationStructureMemoryRequirementsTypeNV</a> value"
+        },
+        {
+          "vuid": "VUID-VkAccelerationStructureMemoryRequirementsInfoNV-accelerationStructure-parameter",
+          "text": " <code>accelerationStructure</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkAccelerationStructureNV\">VkAccelerationStructureNV</a> handle"
+        }
+      ]
+    },
+    "vkBindAccelerationStructureMemoryNV": {
+      "(VK_NV_ray_tracing)": [
+        {
+          "vuid": "VUID-vkBindAccelerationStructureMemoryNV-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkBindAccelerationStructureMemoryNV-pBindInfos-parameter",
+          "text": " <code>pBindInfos</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>bindInfoCount</code> valid <a href=\"#VkBindAccelerationStructureMemoryInfoNV\">VkBindAccelerationStructureMemoryInfoNV</a> structures"
+        },
+        {
+          "vuid": "VUID-vkBindAccelerationStructureMemoryNV-bindInfoCount-arraylength",
+          "text": " <code>bindInfoCount</code> <strong class=\"purple\">must</strong> be greater than <code>0</code>"
+        }
+      ]
+    },
+    "VkBindAccelerationStructureMemoryInfoNV": {
+      "(VK_NV_ray_tracing)": [
+        {
+          "vuid": "VUID-VkBindAccelerationStructureMemoryInfoNV-accelerationStructure-02450",
+          "text": " <code>accelerationStructure</code> <strong class=\"purple\">must</strong> not already be backed by a memory object"
+        },
+        {
+          "vuid": "VUID-VkBindAccelerationStructureMemoryInfoNV-memoryOffset-02451",
+          "text": " <code>memoryOffset</code> <strong class=\"purple\">must</strong> be less than the size of <code>memory</code>"
+        },
+        {
+          "vuid": "VUID-VkBindAccelerationStructureMemoryInfoNV-memory-02593",
+          "text": " <code>memory</code> <strong class=\"purple\">must</strong> have been allocated using one of the memory types allowed in the <code>memoryTypeBits</code> member of the <a href=\"#VkMemoryRequirements\">VkMemoryRequirements</a> structure returned from a call to <a href=\"#vkGetAccelerationStructureMemoryRequirementsNV\">vkGetAccelerationStructureMemoryRequirementsNV</a> with <code>accelerationStructure</code> and <code>type</code> of <code>VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV</code>"
+        },
+        {
+          "vuid": "VUID-VkBindAccelerationStructureMemoryInfoNV-memoryOffset-02594",
+          "text": " <code>memoryOffset</code> <strong class=\"purple\">must</strong> be an integer multiple of the <code>alignment</code> member of the <a href=\"#VkMemoryRequirements\">VkMemoryRequirements</a> structure returned from a call to <a href=\"#vkGetAccelerationStructureMemoryRequirementsNV\">vkGetAccelerationStructureMemoryRequirementsNV</a> with <code>accelerationStructure</code> and <code>type</code> of <code>VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV</code>"
+        },
+        {
+          "vuid": "VUID-VkBindAccelerationStructureMemoryInfoNV-size-02595",
+          "text": " The <code>size</code> member of the <code>VkMemoryRequirements</code> structure returned from a call to <a href=\"#vkGetAccelerationStructureMemoryRequirementsNV\">vkGetAccelerationStructureMemoryRequirementsNV</a> with <code>accelerationStructure</code> and <code>type</code> of <code>VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV</code> <strong class=\"purple\">must</strong> be less than or equal to the size of <code>memory</code> minus <code>memoryOffset</code>"
+        },
+        {
+          "vuid": "VUID-VkBindAccelerationStructureMemoryInfoNV-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_BIND_ACCELERATION_STRUCTURE_MEMORY_INFO_NV</code>"
+        },
+        {
+          "vuid": "VUID-VkBindAccelerationStructureMemoryInfoNV-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-VkBindAccelerationStructureMemoryInfoNV-accelerationStructure-parameter",
+          "text": " <code>accelerationStructure</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkAccelerationStructureNV\">VkAccelerationStructureNV</a> handle"
+        },
+        {
+          "vuid": "VUID-VkBindAccelerationStructureMemoryInfoNV-memory-parameter",
+          "text": " <code>memory</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDeviceMemory\">VkDeviceMemory</a> handle"
+        },
+        {
+          "vuid": "VUID-VkBindAccelerationStructureMemoryInfoNV-pDeviceIndices-parameter",
+          "text": " If <code>deviceIndexCount</code> is not <code>0</code>, <code>pDeviceIndices</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>deviceIndexCount</code> <code>uint32_t</code> values"
+        },
+        {
+          "vuid": "VUID-VkBindAccelerationStructureMemoryInfoNV-commonparent",
+          "text": " Both of <code>accelerationStructure</code>, and <code>memory</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from the same <a href=\"#VkDevice\">VkDevice</a>"
+        }
+      ]
+    },
+    "vkGetAccelerationStructureHandleNV": {
+      "(VK_NV_ray_tracing)": [
+        {
+          "vuid": "VUID-vkGetAccelerationStructureHandleNV-dataSize-02240",
+          "text": " <code>dataSize</code> <strong class=\"purple\">must</strong> be large enough to contain the result of the query, as described above"
+        },
+        {
+          "vuid": "VUID-vkGetAccelerationStructureHandleNV-accelerationStructure-02787",
+          "text": " <code>accelerationStructure</code> <strong class=\"purple\">must</strong> be bound completely and contiguously to a single <code>VkDeviceMemory</code> object via <a href=\"#vkBindAccelerationStructureMemoryNV\">vkBindAccelerationStructureMemoryNV</a>"
+        },
+        {
+          "vuid": "VUID-vkGetAccelerationStructureHandleNV-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetAccelerationStructureHandleNV-accelerationStructure-parameter",
+          "text": " <code>accelerationStructure</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkAccelerationStructureNV\">VkAccelerationStructureNV</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetAccelerationStructureHandleNV-pData-parameter",
+          "text": " <code>pData</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>dataSize</code> bytes"
+        },
+        {
+          "vuid": "VUID-vkGetAccelerationStructureHandleNV-dataSize-arraylength",
+          "text": " <code>dataSize</code> <strong class=\"purple\">must</strong> be greater than <code>0</code>"
+        },
+        {
+          "vuid": "VUID-vkGetAccelerationStructureHandleNV-accelerationStructure-parent",
+          "text": " <code>accelerationStructure</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from <code>device</code>"
+        }
+      ]
+    },
+    "vkCreateSampler": {
+      "core": [
+        {
+          "vuid": "VUID-vkCreateSampler-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCreateSampler-pCreateInfo-parameter",
+          "text": " <code>pCreateInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkSamplerCreateInfo\">VkSamplerCreateInfo</a> structure"
+        },
+        {
+          "vuid": "VUID-vkCreateSampler-pAllocator-parameter",
+          "text": " If <code>pAllocator</code> is not <code>NULL</code>, <code>pAllocator</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkAllocationCallbacks\">VkAllocationCallbacks</a> structure"
+        },
+        {
+          "vuid": "VUID-vkCreateSampler-pSampler-parameter",
+          "text": " <code>pSampler</code> <strong class=\"purple\">must</strong> be a valid pointer to a <a href=\"#VkSampler\">VkSampler</a> handle"
+        }
+      ]
+    },
+    "VkSamplerCreateInfo": {
+      "core": [
+        {
+          "vuid": "VUID-VkSamplerCreateInfo-mipLodBias-01069",
+          "text": " The absolute value of <code>mipLodBias</code> <strong class=\"purple\">must</strong> be less than or equal to <code>VkPhysicalDeviceLimits</code>::<code>maxSamplerLodBias</code>"
+        },
+        {
+          "vuid": "VUID-VkSamplerCreateInfo-maxLod-01973",
+          "text": " <code>maxLod</code> <strong class=\"purple\">must</strong> be greater than or equal to <code>minLod</code>"
+        },
+        {
+          "vuid": "VUID-VkSamplerCreateInfo-anisotropyEnable-01070",
+          "text": " If the <a href=\"#features-samplerAnisotropy\">anisotropic sampling</a> feature is not enabled, <code>anisotropyEnable</code> <strong class=\"purple\">must</strong> be <code>VK_FALSE</code>"
+        },
+        {
+          "vuid": "VUID-VkSamplerCreateInfo-anisotropyEnable-01071",
+          "text": " If <code>anisotropyEnable</code> is <code>VK_TRUE</code>, <code>maxAnisotropy</code> <strong class=\"purple\">must</strong> be between <code>1.0</code> and <code>VkPhysicalDeviceLimits</code>::<code>maxSamplerAnisotropy</code>, inclusive"
+        },
+        {
+          "vuid": "VUID-VkSamplerCreateInfo-unnormalizedCoordinates-01072",
+          "text": " If <code>unnormalizedCoordinates</code> is <code>VK_TRUE</code>, <code>minFilter</code> and <code>magFilter</code> <strong class=\"purple\">must</strong> be equal"
+        },
+        {
+          "vuid": "VUID-VkSamplerCreateInfo-unnormalizedCoordinates-01073",
+          "text": " If <code>unnormalizedCoordinates</code> is <code>VK_TRUE</code>, <code>mipmapMode</code> <strong class=\"purple\">must</strong> be <code>VK_SAMPLER_MIPMAP_MODE_NEAREST</code>"
+        },
+        {
+          "vuid": "VUID-VkSamplerCreateInfo-unnormalizedCoordinates-01074",
+          "text": " If <code>unnormalizedCoordinates</code> is <code>VK_TRUE</code>, <code>minLod</code> and <code>maxLod</code> <strong class=\"purple\">must</strong> be zero"
+        },
+        {
+          "vuid": "VUID-VkSamplerCreateInfo-unnormalizedCoordinates-01075",
+          "text": " If <code>unnormalizedCoordinates</code> is <code>VK_TRUE</code>, <code>addressModeU</code> and <code>addressModeV</code> <strong class=\"purple\">must</strong> each be either <code>VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE</code> or <code>VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER</code>"
+        },
+        {
+          "vuid": "VUID-VkSamplerCreateInfo-unnormalizedCoordinates-01076",
+          "text": " If <code>unnormalizedCoordinates</code> is <code>VK_TRUE</code>, <code>anisotropyEnable</code> <strong class=\"purple\">must</strong> be <code>VK_FALSE</code>"
+        },
+        {
+          "vuid": "VUID-VkSamplerCreateInfo-unnormalizedCoordinates-01077",
+          "text": " If <code>unnormalizedCoordinates</code> is <code>VK_TRUE</code>, <code>compareEnable</code> <strong class=\"purple\">must</strong> be <code>VK_FALSE</code>"
+        },
+        {
+          "vuid": "VUID-VkSamplerCreateInfo-addressModeU-01078",
+          "text": " If any of <code>addressModeU</code>, <code>addressModeV</code> or <code>addressModeW</code> are <code>VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER</code>, <code>borderColor</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkBorderColor\">VkBorderColor</a> value"
+        },
+        {
+          "vuid": "VUID-VkSamplerCreateInfo-compareEnable-01080",
+          "text": " If <code>compareEnable</code> is <code>VK_TRUE</code>, <code>compareOp</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkCompareOp\">VkCompareOp</a> value"
+        },
+        {
+          "vuid": "VUID-VkSamplerCreateInfo-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO</code>"
+        },
+        {
+          "vuid": "VUID-VkSamplerCreateInfo-pNext-pNext",
+          "text": " Each <code>pNext</code> member of any structure (including this one) in the <code>pNext</code> chain <strong class=\"purple\">must</strong> be either <code>NULL</code> or a pointer to a valid instance of <a href=\"#VkSamplerReductionModeCreateInfoEXT\">VkSamplerReductionModeCreateInfoEXT</a> or <a href=\"#VkSamplerYcbcrConversionInfo\">VkSamplerYcbcrConversionInfo</a>"
+        },
+        {
+          "vuid": "VUID-VkSamplerCreateInfo-sType-unique",
+          "text": " Each <code>sType</code> member in the <code>pNext</code> chain <strong class=\"purple\">must</strong> be unique"
+        },
+        {
+          "vuid": "VUID-VkSamplerCreateInfo-flags-parameter",
+          "text": " <code>flags</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkSamplerCreateFlagBits\">VkSamplerCreateFlagBits</a> values"
+        },
+        {
+          "vuid": "VUID-VkSamplerCreateInfo-magFilter-parameter",
+          "text": " <code>magFilter</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkFilter\">VkFilter</a> value"
+        },
+        {
+          "vuid": "VUID-VkSamplerCreateInfo-minFilter-parameter",
+          "text": " <code>minFilter</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkFilter\">VkFilter</a> value"
+        },
+        {
+          "vuid": "VUID-VkSamplerCreateInfo-mipmapMode-parameter",
+          "text": " <code>mipmapMode</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkSamplerMipmapMode\">VkSamplerMipmapMode</a> value"
+        },
+        {
+          "vuid": "VUID-VkSamplerCreateInfo-addressModeU-parameter",
+          "text": " <code>addressModeU</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkSamplerAddressMode\">VkSamplerAddressMode</a> value"
+        },
+        {
+          "vuid": "VUID-VkSamplerCreateInfo-addressModeV-parameter",
+          "text": " <code>addressModeV</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkSamplerAddressMode\">VkSamplerAddressMode</a> value"
+        },
+        {
+          "vuid": "VUID-VkSamplerCreateInfo-addressModeW-parameter",
+          "text": " <code>addressModeW</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkSamplerAddressMode\">VkSamplerAddressMode</a> value"
+        }
+      ],
+      "(VK_VERSION_1_1,VK_KHR_sampler_ycbcr_conversion)": [
+        {
+          "vuid": "VUID-VkSamplerCreateInfo-minFilter-01645",
+          "text": " If <a href=\"#samplers-YCbCr-conversion\">sampler Y&#8217;C<sub>B</sub>C<sub>R</sub> conversion</a> is enabled and <code>VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT</code> is not set for the format, <code>minFilter</code> and <code>magFilter</code> <strong class=\"purple\">must</strong> be equal to the sampler Y&#8217;C<sub>B</sub>C<sub>R</sub> conversion&#8217;s <code>chromaFilter</code>"
+        },
+        {
+          "vuid": "VUID-VkSamplerCreateInfo-addressModeU-01646",
+          "text": " If <a href=\"#samplers-YCbCr-conversion\">sampler Y&#8217;C<sub>B</sub>C<sub>R</sub> conversion</a> is enabled, <code>addressModeU</code>, <code>addressModeV</code>, and <code>addressModeW</code> <strong class=\"purple\">must</strong> be <code>VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE</code>, <code>anisotropyEnable</code> <strong class=\"purple\">must</strong> be <code>VK_FALSE</code>, and <code>unnormalizedCoordinates</code> <strong class=\"purple\">must</strong> be <code>VK_FALSE</code>"
+        }
+      ],
+      "(VK_VERSION_1_1,VK_KHR_sampler_ycbcr_conversion)+(VK_EXT_sampler_filter_minmax)": [
+        {
+          "vuid": "VUID-VkSamplerCreateInfo-None-01647",
+          "text": " The sampler reduction mode <strong class=\"purple\">must</strong> be set to <code>VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE_EXT</code> if <a href=\"#samplers-YCbCr-conversion\">sampler Y&#8217;C<sub>B</sub>C<sub>R</sub> conversion</a> is enabled"
+        }
+      ],
+      "(VK_KHR_sampler_mirror_clamp_to_edge)": [
+        {
+          "vuid": "VUID-VkSamplerCreateInfo-addressModeU-01079",
+          "text": " If the <code><a href=\"#VK_KHR_sampler_mirror_clamp_to_edge\">VK_KHR_sampler_mirror_clamp_to_edge</a></code> extension is not enabled, <code>addressModeU</code>, <code>addressModeV</code> and <code>addressModeW</code> <strong class=\"purple\">must</strong> not be <code>VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE</code>"
+        }
+      ],
+      "(VK_IMG_filter_cubic,VK_EXT_filter_cubic)": [
+        {
+          "vuid": "VUID-VkSamplerCreateInfo-magFilter-01081",
+          "text": " If either <code>magFilter</code> or <code>minFilter</code> is <code>VK_FILTER_CUBIC_EXT</code>, <code>anisotropyEnable</code> <strong class=\"purple\">must</strong> be <code>VK_FALSE</code>"
+        }
+      ],
+      "(VK_IMG_filter_cubic+VK_EXT_sampler_filter_minmax)+!(VK_EXT_filter_cubic)": [
+        {
+          "vuid": "VUID-VkSamplerCreateInfo-magFilter-01422",
+          "text": " If either <code>magFilter</code> or <code>minFilter</code> is <code>VK_FILTER_CUBIC_EXT</code>, the <code>reductionMode</code> member of <a href=\"#VkSamplerReductionModeCreateInfoEXT\">VkSamplerReductionModeCreateInfoEXT</a> <strong class=\"purple\">must</strong> be <code>VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE_EXT</code>"
+        }
+      ],
+      "(VK_EXT_sampler_filter_minmax)": [
+        {
+          "vuid": "VUID-VkSamplerCreateInfo-compareEnable-01423",
+          "text": " If <code>compareEnable</code> is <code>VK_TRUE</code>, the <code>reductionMode</code> member of <a href=\"#VkSamplerReductionModeCreateInfoEXT\">VkSamplerReductionModeCreateInfoEXT</a> <strong class=\"purple\">must</strong> be <code>VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE_EXT</code>"
+        }
+      ],
+      "(VK_EXT_fragment_density_map)": [
+        {
+          "vuid": "VUID-VkSamplerCreateInfo-flags-02574",
+          "text": " If <code>flags</code> includes <code>VK_SAMPLER_CREATE_SUBSAMPLED_BIT_EXT</code>, then <code>minFilter</code> and <code>magFilter</code> <strong class=\"purple\">must</strong> be equal."
+        },
+        {
+          "vuid": "VUID-VkSamplerCreateInfo-flags-02575",
+          "text": " If <code>flags</code> includes <code>VK_SAMPLER_CREATE_SUBSAMPLED_BIT_EXT</code>, then <code>mipmapMode</code> <strong class=\"purple\">must</strong> be <code>VK_SAMPLER_MIPMAP_MODE_NEAREST</code>."
+        },
+        {
+          "vuid": "VUID-VkSamplerCreateInfo-flags-02576",
+          "text": " If <code>flags</code> includes <code>VK_SAMPLER_CREATE_SUBSAMPLED_BIT_EXT</code>, then <code>minLod</code> and <code>maxLod</code> <strong class=\"purple\">must</strong> be zero."
+        },
+        {
+          "vuid": "VUID-VkSamplerCreateInfo-flags-02577",
+          "text": " If <code>flags</code> includes <code>VK_SAMPLER_CREATE_SUBSAMPLED_BIT_EXT</code>, then <code>addressModeU</code> and <code>addressModeV</code> <strong class=\"purple\">must</strong> each be either <code>VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE</code> or <code>VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER</code>."
+        },
+        {
+          "vuid": "VUID-VkSamplerCreateInfo-flags-02578",
+          "text": " If <code>flags</code> includes <code>VK_SAMPLER_CREATE_SUBSAMPLED_BIT_EXT</code>, then <code>anisotropyEnable</code> <strong class=\"purple\">must</strong> be <code>VK_FALSE</code>."
+        },
+        {
+          "vuid": "VUID-VkSamplerCreateInfo-flags-02579",
+          "text": " If <code>flags</code> includes <code>VK_SAMPLER_CREATE_SUBSAMPLED_BIT_EXT</code>, then <code>compareEnable</code> <strong class=\"purple\">must</strong> be <code>VK_FALSE</code>."
+        },
+        {
+          "vuid": "VUID-VkSamplerCreateInfo-flags-02580",
+          "text": " If <code>flags</code> includes <code>VK_SAMPLER_CREATE_SUBSAMPLED_BIT_EXT</code>, then <code>unnormalizedCoordinates</code> <strong class=\"purple\">must</strong> be <code>VK_FALSE</code>."
+        }
+      ]
+    },
+    "VkSamplerReductionModeCreateInfoEXT": {
+      "(VK_EXT_sampler_filter_minmax)": [
+        {
+          "vuid": "VUID-VkSamplerReductionModeCreateInfoEXT-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO_EXT</code>"
+        },
+        {
+          "vuid": "VUID-VkSamplerReductionModeCreateInfoEXT-reductionMode-parameter",
+          "text": " <code>reductionMode</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkSamplerReductionModeEXT\">VkSamplerReductionModeEXT</a> value"
+        }
+      ]
+    },
+    "vkDestroySampler": {
+      "core": [
+        {
+          "vuid": "VUID-vkDestroySampler-sampler-01082",
+          "text": " All submitted commands that refer to <code>sampler</code> <strong class=\"purple\">must</strong> have completed execution"
+        },
+        {
+          "vuid": "VUID-vkDestroySampler-sampler-01083",
+          "text": " If <code>VkAllocationCallbacks</code> were provided when <code>sampler</code> was created, a compatible set of callbacks <strong class=\"purple\">must</strong> be provided here"
+        },
+        {
+          "vuid": "VUID-vkDestroySampler-sampler-01084",
+          "text": " If no <code>VkAllocationCallbacks</code> were provided when <code>sampler</code> was created, <code>pAllocator</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-vkDestroySampler-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkDestroySampler-sampler-parameter",
+          "text": " If <code>sampler</code> is not <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>, <code>sampler</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkSampler\">VkSampler</a> handle"
+        },
+        {
+          "vuid": "VUID-vkDestroySampler-pAllocator-parameter",
+          "text": " If <code>pAllocator</code> is not <code>NULL</code>, <code>pAllocator</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkAllocationCallbacks\">VkAllocationCallbacks</a> structure"
+        },
+        {
+          "vuid": "VUID-vkDestroySampler-sampler-parent",
+          "text": " If <code>sampler</code> is a valid handle, it <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from <code>device</code>"
+        }
+      ]
+    },
+    "VkSamplerYcbcrConversionInfo": {
+      "(VK_VERSION_1_1,VK_KHR_sampler_ycbcr_conversion)": [
+        {
+          "vuid": "VUID-VkSamplerYcbcrConversionInfo-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO</code>"
+        },
+        {
+          "vuid": "VUID-VkSamplerYcbcrConversionInfo-conversion-parameter",
+          "text": " <code>conversion</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkSamplerYcbcrConversion\">VkSamplerYcbcrConversion</a> handle"
+        }
+      ]
+    },
+    "vkCreateSamplerYcbcrConversion": {
+      "(VK_VERSION_1_1,VK_KHR_sampler_ycbcr_conversion)": [
+        {
+          "vuid": "VUID-vkCreateSamplerYcbcrConversion-None-01648",
+          "text": " The <a href=\"#features-sampler-YCbCr-conversion\">sampler Y&#8217;C<sub>B</sub>C<sub>R</sub> conversion feature</a> <strong class=\"purple\">must</strong> be enabled"
+        },
+        {
+          "vuid": "VUID-vkCreateSamplerYcbcrConversion-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCreateSamplerYcbcrConversion-pCreateInfo-parameter",
+          "text": " <code>pCreateInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkSamplerYcbcrConversionCreateInfo\">VkSamplerYcbcrConversionCreateInfo</a> structure"
+        },
+        {
+          "vuid": "VUID-vkCreateSamplerYcbcrConversion-pAllocator-parameter",
+          "text": " If <code>pAllocator</code> is not <code>NULL</code>, <code>pAllocator</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkAllocationCallbacks\">VkAllocationCallbacks</a> structure"
+        },
+        {
+          "vuid": "VUID-vkCreateSamplerYcbcrConversion-pYcbcrConversion-parameter",
+          "text": " <code>pYcbcrConversion</code> <strong class=\"purple\">must</strong> be a valid pointer to a <a href=\"#VkSamplerYcbcrConversion\">VkSamplerYcbcrConversion</a> handle"
+        }
+      ]
+    },
+    "VkSamplerYcbcrConversionCreateInfo": {
+      "(VK_VERSION_1_1,VK_KHR_sampler_ycbcr_conversion)+!(VK_ANDROID_external_memory_android_hardware_buffer)": [
+        {
+          "vuid": "VUID-VkSamplerYcbcrConversionCreateInfo-format-01649",
+          "text": " <code>format</code> <strong class=\"purple\">must</strong> not be <code>VK_FORMAT_UNDEFINED</code>"
+        }
+      ],
+      "(VK_VERSION_1_1,VK_KHR_sampler_ycbcr_conversion)+(VK_ANDROID_external_memory_android_hardware_buffer)": [
+        {
+          "vuid": "VUID-VkSamplerYcbcrConversionCreateInfo-format-01904",
+          "text": " If an external format conversion is being created, <code>format</code> <strong class=\"purple\">must</strong> be <code>VK_FORMAT_UNDEFINED</code>, otherwise it <strong class=\"purple\">must</strong> not be <code>VK_FORMAT_UNDEFINED</code>."
+        }
+      ],
+      "(VK_VERSION_1_1,VK_KHR_sampler_ycbcr_conversion)": [
+        {
+          "vuid": "VUID-VkSamplerYcbcrConversionCreateInfo-format-01650",
+          "text": " <code>format</code> <strong class=\"purple\">must</strong> support <code>VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT</code> or <code>VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT</code>"
+        },
+        {
+          "vuid": "VUID-VkSamplerYcbcrConversionCreateInfo-xChromaOffset-01651",
+          "text": " If the format does not support <code>VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT</code>, <code>xChromaOffset</code> and <code>yChromaOffset</code> <strong class=\"purple\">must</strong> not be <code>VK_CHROMA_LOCATION_COSITED_EVEN</code>"
+        },
+        {
+          "vuid": "VUID-VkSamplerYcbcrConversionCreateInfo-xChromaOffset-01652",
+          "text": " If the format does not support <code>VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT</code>, <code>xChromaOffset</code> and <code>yChromaOffset</code> <strong class=\"purple\">must</strong> not be <code>VK_CHROMA_LOCATION_MIDPOINT</code>"
+        },
+        {
+          "vuid": "VUID-VkSamplerYcbcrConversionCreateInfo-format-01653",
+          "text": " <code>format</code> <strong class=\"purple\">must</strong> represent unsigned normalized values (i.e. the format must be a <code>UNORM</code> format)"
+        },
+        {
+          "vuid": "VUID-VkSamplerYcbcrConversionCreateInfo-components-02581",
+          "text": " If the format has a <code>_422</code> or <code>_420</code> suffix, then <code>components.g</code> <strong class=\"purple\">must</strong> be <code>VK_COMPONENT_SWIZZLE_IDENTITY</code>"
+        },
+        {
+          "vuid": "VUID-VkSamplerYcbcrConversionCreateInfo-components-02582",
+          "text": " If the format has a <code>_422</code> or <code>_420</code> suffix, then <code>components.a</code> <strong class=\"purple\">must</strong> be <code>VK_COMPONENT_SWIZZLE_IDENTITY</code>, <code>VK_COMPONENT_SWIZZLE_ONE</code>, or <code>VK_COMPONENT_SWIZZLE_ZERO</code>"
+        },
+        {
+          "vuid": "VUID-VkSamplerYcbcrConversionCreateInfo-components-02583",
+          "text": " If the format has a <code>_422</code> or <code>_420</code> suffix, then <code>components.r</code> <strong class=\"purple\">must</strong> be <code>VK_COMPONENT_SWIZZLE_IDENTITY</code> or <code>VK_COMPONENT_SWIZZLE_B</code>"
+        },
+        {
+          "vuid": "VUID-VkSamplerYcbcrConversionCreateInfo-components-02584",
+          "text": " If the format has a <code>_422</code> or <code>_420</code> suffix, then <code>components.b</code> <strong class=\"purple\">must</strong> be <code>VK_COMPONENT_SWIZZLE_IDENTITY</code> or <code>VK_COMPONENT_SWIZZLE_R</code>"
+        },
+        {
+          "vuid": "VUID-VkSamplerYcbcrConversionCreateInfo-components-02585",
+          "text": " If the format has a <code>_422</code> or <code>_420</code> suffix, and if either <code>components.r</code> or <code>components.b</code> is <code>VK_COMPONENT_SWIZZLE_IDENTITY</code>, both values <strong class=\"purple\">must</strong> be <code>VK_COMPONENT_SWIZZLE_IDENTITY</code>"
+        },
+        {
+          "vuid": "VUID-VkSamplerYcbcrConversionCreateInfo-ycbcrModel-01655",
+          "text": " If <code>ycbcrModel</code> is not <code>VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY</code>, then <code>components.r</code>, <code>components.g</code>, and <code>components.b</code> <strong class=\"purple\">must</strong> correspond to channels of the <code>format</code>; that is, <code>components.r</code>, <code>components.g</code>, and <code>components.b</code> <strong class=\"purple\">must</strong> not be <code>VK_COMPONENT_SWIZZLE_ZERO</code> or <code>VK_COMPONENT_SWIZZLE_ONE</code>, and <strong class=\"purple\">must</strong> not correspond to a channel which contains zero or one as a consequence of <a href=\"#textures-conversion-to-rgba\">conversion to RGBA</a>"
+        },
+        {
+          "vuid": "VUID-VkSamplerYcbcrConversionCreateInfo-ycbcrRange-02748",
+          "text": " If <code>ycbcrRange</code> is <code>VK_SAMPLER_YCBCR_RANGE_ITU_NARROW</code> then the R, G and B channels obtained by applying the <code>component</code> swizzle to <code>format</code> <strong class=\"purple\">must</strong> each have a bit-depth greater than or equal to 8."
+        },
+        {
+          "vuid": "VUID-VkSamplerYcbcrConversionCreateInfo-forceExplicitReconstruction-01656",
+          "text": " If the format does not support <code>VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT</code>, <code>forceExplicitReconstruction</code> <strong class=\"purple\">must</strong> be FALSE"
+        },
+        {
+          "vuid": "VUID-VkSamplerYcbcrConversionCreateInfo-chromaFilter-01657",
+          "text": " If the format does not support <code>VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT</code>, <code>chromaFilter</code> <strong class=\"purple\">must</strong> be <code>VK_FILTER_NEAREST</code>"
+        },
+        {
+          "vuid": "VUID-VkSamplerYcbcrConversionCreateInfo-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO</code>"
+        },
+        {
+          "vuid": "VUID-VkSamplerYcbcrConversionCreateInfo-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code> or a pointer to a valid instance of <a href=\"#VkExternalFormatANDROID\">VkExternalFormatANDROID</a>"
+        },
+        {
+          "vuid": "VUID-VkSamplerYcbcrConversionCreateInfo-format-parameter",
+          "text": " <code>format</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkFormat\">VkFormat</a> value"
+        },
+        {
+          "vuid": "VUID-VkSamplerYcbcrConversionCreateInfo-ycbcrModel-parameter",
+          "text": " <code>ycbcrModel</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkSamplerYcbcrModelConversion\">VkSamplerYcbcrModelConversion</a> value"
+        },
+        {
+          "vuid": "VUID-VkSamplerYcbcrConversionCreateInfo-ycbcrRange-parameter",
+          "text": " <code>ycbcrRange</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkSamplerYcbcrRange\">VkSamplerYcbcrRange</a> value"
+        },
+        {
+          "vuid": "VUID-VkSamplerYcbcrConversionCreateInfo-components-parameter",
+          "text": " <code>components</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkComponentMapping\">VkComponentMapping</a> structure"
+        },
+        {
+          "vuid": "VUID-VkSamplerYcbcrConversionCreateInfo-xChromaOffset-parameter",
+          "text": " <code>xChromaOffset</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkChromaLocation\">VkChromaLocation</a> value"
+        },
+        {
+          "vuid": "VUID-VkSamplerYcbcrConversionCreateInfo-yChromaOffset-parameter",
+          "text": " <code>yChromaOffset</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkChromaLocation\">VkChromaLocation</a> value"
+        },
+        {
+          "vuid": "VUID-VkSamplerYcbcrConversionCreateInfo-chromaFilter-parameter",
+          "text": " <code>chromaFilter</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkFilter\">VkFilter</a> value"
+        }
+      ]
+    },
+    "vkDestroySamplerYcbcrConversion": {
+      "(VK_VERSION_1_1,VK_KHR_sampler_ycbcr_conversion)": [
+        {
+          "vuid": "VUID-vkDestroySamplerYcbcrConversion-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkDestroySamplerYcbcrConversion-ycbcrConversion-parameter",
+          "text": " If <code>ycbcrConversion</code> is not <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>, <code>ycbcrConversion</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkSamplerYcbcrConversion\">VkSamplerYcbcrConversion</a> handle"
+        },
+        {
+          "vuid": "VUID-vkDestroySamplerYcbcrConversion-pAllocator-parameter",
+          "text": " If <code>pAllocator</code> is not <code>NULL</code>, <code>pAllocator</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkAllocationCallbacks\">VkAllocationCallbacks</a> structure"
+        },
+        {
+          "vuid": "VUID-vkDestroySamplerYcbcrConversion-ycbcrConversion-parent",
+          "text": " If <code>ycbcrConversion</code> is a valid handle, it <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from <code>device</code>"
+        }
+      ]
+    },
+    "vkCreateDescriptorSetLayout": {
+      "core": [
+        {
+          "vuid": "VUID-vkCreateDescriptorSetLayout-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCreateDescriptorSetLayout-pCreateInfo-parameter",
+          "text": " <code>pCreateInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkDescriptorSetLayoutCreateInfo\">VkDescriptorSetLayoutCreateInfo</a> structure"
+        },
+        {
+          "vuid": "VUID-vkCreateDescriptorSetLayout-pAllocator-parameter",
+          "text": " If <code>pAllocator</code> is not <code>NULL</code>, <code>pAllocator</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkAllocationCallbacks\">VkAllocationCallbacks</a> structure"
+        },
+        {
+          "vuid": "VUID-vkCreateDescriptorSetLayout-pSetLayout-parameter",
+          "text": " <code>pSetLayout</code> <strong class=\"purple\">must</strong> be a valid pointer to a <a href=\"#VkDescriptorSetLayout\">VkDescriptorSetLayout</a> handle"
+        }
+      ]
+    },
+    "VkDescriptorSetLayoutCreateInfo": {
+      "core": [
+        {
+          "vuid": "VUID-VkDescriptorSetLayoutCreateInfo-binding-00279",
+          "text": " The <a href=\"#VkDescriptorSetLayoutBinding\">VkDescriptorSetLayoutBinding</a>::<code>binding</code> members of the elements of the <code>pBindings</code> array <strong class=\"purple\">must</strong> each have different values."
+        },
+        {
+          "vuid": "VUID-VkDescriptorSetLayoutCreateInfo-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO</code>"
+        },
+        {
+          "vuid": "VUID-VkDescriptorSetLayoutCreateInfo-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code> or a pointer to a valid instance of <a href=\"#VkDescriptorSetLayoutBindingFlagsCreateInfoEXT\">VkDescriptorSetLayoutBindingFlagsCreateInfoEXT</a>"
+        },
+        {
+          "vuid": "VUID-VkDescriptorSetLayoutCreateInfo-flags-parameter",
+          "text": " <code>flags</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkDescriptorSetLayoutCreateFlagBits\">VkDescriptorSetLayoutCreateFlagBits</a> values"
+        },
+        {
+          "vuid": "VUID-VkDescriptorSetLayoutCreateInfo-pBindings-parameter",
+          "text": " If <code>bindingCount</code> is not <code>0</code>, <code>pBindings</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>bindingCount</code> valid <a href=\"#VkDescriptorSetLayoutBinding\">VkDescriptorSetLayoutBinding</a> structures"
+        }
+      ],
+      "(VK_KHR_push_descriptor)": [
+        {
+          "vuid": "VUID-VkDescriptorSetLayoutCreateInfo-flags-00280",
+          "text": " If <code>flags</code> contains <code>VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR</code>, then all elements of <code>pBindings</code> <strong class=\"purple\">must</strong> not have a <code>descriptorType</code> of <code>VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC</code> or <code>VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC</code>"
+        },
+        {
+          "vuid": "VUID-VkDescriptorSetLayoutCreateInfo-flags-00281",
+          "text": " If <code>flags</code> contains <code>VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR</code>, then the total number of elements of all bindings <strong class=\"purple\">must</strong> be less than or equal to <a href=\"#VkPhysicalDevicePushDescriptorPropertiesKHR\">VkPhysicalDevicePushDescriptorPropertiesKHR</a>::<code>maxPushDescriptors</code>"
+        }
+      ],
+      "(VK_KHR_push_descriptor)+(VK_EXT_inline_uniform_block)": [
+        {
+          "vuid": "VUID-VkDescriptorSetLayoutCreateInfo-flags-02208",
+          "text": " If <code>flags</code> contains <code>VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR</code>, then all elements of <code>pBindings</code> <strong class=\"purple\">must</strong> not have a <code>descriptorType</code> of <code>VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT</code>"
+        }
+      ],
+      "(VK_EXT_descriptor_indexing)": [
+        {
+          "vuid": "VUID-VkDescriptorSetLayoutCreateInfo-flags-03000",
+          "text": " If any binding has the <code>VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT</code> bit set, <code>flags</code> <strong class=\"purple\">must</strong> include <code>VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT</code>"
+        },
+        {
+          "vuid": "VUID-VkDescriptorSetLayoutCreateInfo-descriptorType-03001",
+          "text": " If any binding has the <code>VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT</code> bit set, then all bindings <strong class=\"purple\">must</strong> not have <code>descriptorType</code> of <code>VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC</code> or <code>VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC</code>"
+        }
+      ]
+    },
+    "VkDescriptorSetLayoutBinding": {
+      "core": [
+        {
+          "vuid": "VUID-VkDescriptorSetLayoutBinding-descriptorType-00282",
+          "text": " If <code>descriptorType</code> is <code>VK_DESCRIPTOR_TYPE_SAMPLER</code> or <code>VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER</code>, and <code>descriptorCount</code> is not <code>0</code> and <code>pImmutableSamplers</code> is not <code>NULL</code>, <code>pImmutableSamplers</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>descriptorCount</code> valid <code>VkSampler</code> handles"
+        },
+        {
+          "vuid": "VUID-VkDescriptorSetLayoutBinding-descriptorCount-00283",
+          "text": " If <code>descriptorCount</code> is not <code>0</code>, <code>stageFlags</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkShaderStageFlagBits\">VkShaderStageFlagBits</a> values"
+        },
+        {
+          "vuid": "VUID-VkDescriptorSetLayoutBinding-descriptorType-01510",
+          "text": " If <code>descriptorType</code> is <code>VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT</code> and <code>descriptorCount</code> is not <code>0</code>, then <code>stageFlags</code> <strong class=\"purple\">must</strong> be <code>0</code> or <code>VK_SHADER_STAGE_FRAGMENT_BIT</code>"
+        },
+        {
+          "vuid": "VUID-VkDescriptorSetLayoutBinding-descriptorType-parameter",
+          "text": " <code>descriptorType</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDescriptorType\">VkDescriptorType</a> value"
+        }
+      ],
+      "(VK_EXT_inline_uniform_block)": [
+        {
+          "vuid": "VUID-VkDescriptorSetLayoutBinding-descriptorType-02209",
+          "text": " If <code>descriptorType</code> is <code>VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT</code> then <code>descriptorCount</code> <strong class=\"purple\">must</strong> be a multiple of <code>4</code>"
+        },
+        {
+          "vuid": "VUID-VkDescriptorSetLayoutBinding-descriptorType-02210",
+          "text": " If <code>descriptorType</code> is <code>VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT</code> then <code>descriptorCount</code> <strong class=\"purple\">must</strong> be less than or equal to <code>VkPhysicalDeviceInlineUniformBlockPropertiesEXT</code>::<code>maxInlineUniformBlockSize</code>"
+        }
+      ]
+    },
+    "VkDescriptorSetLayoutBindingFlagsCreateInfoEXT": {
+      "(VK_EXT_descriptor_indexing)": [
+        {
+          "vuid": "VUID-VkDescriptorSetLayoutBindingFlagsCreateInfoEXT-bindingCount-03002",
+          "text": " If <code>bindingCount</code> is not zero, <code>bindingCount</code> <strong class=\"purple\">must</strong> equal <a href=\"#VkDescriptorSetLayoutCreateInfo\">VkDescriptorSetLayoutCreateInfo</a>::<code>bindingCount</code>"
+        },
+        {
+          "vuid": "VUID-VkDescriptorSetLayoutBindingFlagsCreateInfoEXT-pBindingFlags-03004",
+          "text": " If an element of <code>pBindingFlags</code> includes <code>VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT_EXT</code>, then all other elements of <a href=\"#VkDescriptorSetLayoutCreateInfo\">VkDescriptorSetLayoutCreateInfo</a>::<code>pBindings</code> <strong class=\"purple\">must</strong> have a smaller value of <code>binding</code>"
+        },
+        {
+          "vuid": "VUID-VkDescriptorSetLayoutBindingFlagsCreateInfoEXT-descriptorBindingUniformBufferUpdateAfterBind-03005",
+          "text": " If <a href=\"#VkPhysicalDeviceDescriptorIndexingFeaturesEXT\">VkPhysicalDeviceDescriptorIndexingFeaturesEXT</a>::<code>descriptorBindingUniformBufferUpdateAfterBind</code> is not enabled, all bindings with descriptor type <code>VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER</code> <strong class=\"purple\">must</strong> not use <code>VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT</code>"
+        },
+        {
+          "vuid": "VUID-VkDescriptorSetLayoutBindingFlagsCreateInfoEXT-descriptorBindingSampledImageUpdateAfterBind-03006",
+          "text": " If <a href=\"#VkPhysicalDeviceDescriptorIndexingFeaturesEXT\">VkPhysicalDeviceDescriptorIndexingFeaturesEXT</a>::<code>descriptorBindingSampledImageUpdateAfterBind</code> is not enabled, all bindings with descriptor type <code>VK_DESCRIPTOR_TYPE_SAMPLER</code>, <code>VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER</code>, or <code>VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE</code> <strong class=\"purple\">must</strong> not use <code>VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT</code>"
+        },
+        {
+          "vuid": "VUID-VkDescriptorSetLayoutBindingFlagsCreateInfoEXT-descriptorBindingStorageImageUpdateAfterBind-03007",
+          "text": " If <a href=\"#VkPhysicalDeviceDescriptorIndexingFeaturesEXT\">VkPhysicalDeviceDescriptorIndexingFeaturesEXT</a>::<code>descriptorBindingStorageImageUpdateAfterBind</code> is not enabled, all bindings with descriptor type <code>VK_DESCRIPTOR_TYPE_STORAGE_IMAGE</code> <strong class=\"purple\">must</strong> not use <code>VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT</code>"
+        },
+        {
+          "vuid": "VUID-VkDescriptorSetLayoutBindingFlagsCreateInfoEXT-descriptorBindingStorageBufferUpdateAfterBind-03008",
+          "text": " If <a href=\"#VkPhysicalDeviceDescriptorIndexingFeaturesEXT\">VkPhysicalDeviceDescriptorIndexingFeaturesEXT</a>::<code>descriptorBindingStorageBufferUpdateAfterBind</code> is not enabled, all bindings with descriptor type <code>VK_DESCRIPTOR_TYPE_STORAGE_BUFFER</code> <strong class=\"purple\">must</strong> not use <code>VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT</code>"
+        },
+        {
+          "vuid": "VUID-VkDescriptorSetLayoutBindingFlagsCreateInfoEXT-descriptorBindingUniformTexelBufferUpdateAfterBind-03009",
+          "text": " If <a href=\"#VkPhysicalDeviceDescriptorIndexingFeaturesEXT\">VkPhysicalDeviceDescriptorIndexingFeaturesEXT</a>::<code>descriptorBindingUniformTexelBufferUpdateAfterBind</code> is not enabled, all bindings with descriptor type <code>VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER</code> <strong class=\"purple\">must</strong> not use <code>VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT</code>"
+        },
+        {
+          "vuid": "VUID-VkDescriptorSetLayoutBindingFlagsCreateInfoEXT-descriptorBindingStorageTexelBufferUpdateAfterBind-03010",
+          "text": " If <a href=\"#VkPhysicalDeviceDescriptorIndexingFeaturesEXT\">VkPhysicalDeviceDescriptorIndexingFeaturesEXT</a>::<code>descriptorBindingStorageTexelBufferUpdateAfterBind</code> is not enabled, all bindings with descriptor type <code>VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER</code> <strong class=\"purple\">must</strong> not use <code>VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT</code>"
+        },
+        {
+          "vuid": "VUID-VkDescriptorSetLayoutBindingFlagsCreateInfoEXT-None-03011",
+          "text": " All bindings with descriptor type <code>VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT</code>, <code>VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC</code>, or <code>VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC</code> <strong class=\"purple\">must</strong> not use <code>VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT</code>"
+        },
+        {
+          "vuid": "VUID-VkDescriptorSetLayoutBindingFlagsCreateInfoEXT-descriptorBindingUpdateUnusedWhilePending-03012",
+          "text": " If <a href=\"#VkPhysicalDeviceDescriptorIndexingFeaturesEXT\">VkPhysicalDeviceDescriptorIndexingFeaturesEXT</a>::<code>descriptorBindingUpdateUnusedWhilePending</code> is not enabled, all elements of <code>pBindingFlags</code> <strong class=\"purple\">must</strong> not include <code>VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT_EXT</code>"
+        },
+        {
+          "vuid": "VUID-VkDescriptorSetLayoutBindingFlagsCreateInfoEXT-descriptorBindingPartiallyBound-03013",
+          "text": " If <a href=\"#VkPhysicalDeviceDescriptorIndexingFeaturesEXT\">VkPhysicalDeviceDescriptorIndexingFeaturesEXT</a>::<code>descriptorBindingPartiallyBound</code> is not enabled, all elements of <code>pBindingFlags</code> <strong class=\"purple\">must</strong> not include <code>VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT_EXT</code>"
+        },
+        {
+          "vuid": "VUID-VkDescriptorSetLayoutBindingFlagsCreateInfoEXT-descriptorBindingVariableDescriptorCount-03014",
+          "text": " If <a href=\"#VkPhysicalDeviceDescriptorIndexingFeaturesEXT\">VkPhysicalDeviceDescriptorIndexingFeaturesEXT</a>::<code>descriptorBindingVariableDescriptorCount</code> is not enabled, all elements of <code>pBindingFlags</code> <strong class=\"purple\">must</strong> not include <code>VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT_EXT</code>"
+        },
+        {
+          "vuid": "VUID-VkDescriptorSetLayoutBindingFlagsCreateInfoEXT-pBindingFlags-03015",
+          "text": " If an element of <code>pBindingFlags</code> includes <code>VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT_EXT</code>, that element&#8217;s <code>descriptorType</code> <strong class=\"purple\">must</strong> not be <code>VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC</code> or <code>VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC</code>"
+        },
+        {
+          "vuid": "VUID-VkDescriptorSetLayoutBindingFlagsCreateInfoEXT-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT</code>"
+        },
+        {
+          "vuid": "VUID-VkDescriptorSetLayoutBindingFlagsCreateInfoEXT-pBindingFlags-parameter",
+          "text": " If <code>bindingCount</code> is not <code>0</code>, and <code>pBindingFlags</code> is not <code>NULL</code>, <code>pBindingFlags</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>bindingCount</code> valid combinations of <a href=\"#VkDescriptorBindingFlagBitsEXT\">VkDescriptorBindingFlagBitsEXT</a> values"
+        }
+      ],
+      "(VK_EXT_descriptor_indexing)+(VK_KHR_push_descriptor)": [
+        {
+          "vuid": "VUID-VkDescriptorSetLayoutBindingFlagsCreateInfoEXT-flags-03003",
+          "text": " If <a href=\"#VkDescriptorSetLayoutCreateInfo\">VkDescriptorSetLayoutCreateInfo</a>::<code>flags</code> includes <code>VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR</code>, then all elements of <code>pBindingFlags</code> <strong class=\"purple\">must</strong> not include <code>VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT</code>, <code>VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT_EXT</code>, or <code>VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT_EXT</code>"
+        }
+      ],
+      "(VK_EXT_descriptor_indexing)+(VK_EXT_inline_uniform_block)": [
+        {
+          "vuid": "VUID-VkDescriptorSetLayoutBindingFlagsCreateInfoEXT-descriptorBindingInlineUniformBlockUpdateAfterBind-02211",
+          "text": " If <a href=\"#VkPhysicalDeviceInlineUniformBlockFeaturesEXT\">VkPhysicalDeviceInlineUniformBlockFeaturesEXT</a>::<code>descriptorBindingInlineUniformBlockUpdateAfterBind</code> is not enabled, all bindings with descriptor type <code>VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT</code> <strong class=\"purple\">must</strong> not use <code>VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT</code>"
+        }
+      ]
+    },
+    "vkGetDescriptorSetLayoutSupport": {
+      "(VK_VERSION_1_1,VK_KHR_maintenance3)": [
+        {
+          "vuid": "VUID-vkGetDescriptorSetLayoutSupport-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetDescriptorSetLayoutSupport-pCreateInfo-parameter",
+          "text": " <code>pCreateInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkDescriptorSetLayoutCreateInfo\">VkDescriptorSetLayoutCreateInfo</a> structure"
+        },
+        {
+          "vuid": "VUID-vkGetDescriptorSetLayoutSupport-pSupport-parameter",
+          "text": " <code>pSupport</code> <strong class=\"purple\">must</strong> be a valid pointer to a <a href=\"#VkDescriptorSetLayoutSupport\">VkDescriptorSetLayoutSupport</a> structure"
+        }
+      ]
+    },
+    "VkDescriptorSetLayoutSupport": {
+      "(VK_VERSION_1_1,VK_KHR_maintenance3)": [
+        {
+          "vuid": "VUID-VkDescriptorSetLayoutSupport-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT</code>"
+        },
+        {
+          "vuid": "VUID-VkDescriptorSetLayoutSupport-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code> or a pointer to a valid instance of <a href=\"#VkDescriptorSetVariableDescriptorCountLayoutSupportEXT\">VkDescriptorSetVariableDescriptorCountLayoutSupportEXT</a>"
+        }
+      ]
+    },
+    "VkDescriptorSetVariableDescriptorCountLayoutSupportEXT": {
+      "(VK_EXT_descriptor_indexing)": [
+        {
+          "vuid": "VUID-VkDescriptorSetVariableDescriptorCountLayoutSupportEXT-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT_EXT</code>"
+        }
+      ]
+    },
+    "vkDestroyDescriptorSetLayout": {
+      "core": [
+        {
+          "vuid": "VUID-vkDestroyDescriptorSetLayout-descriptorSetLayout-00284",
+          "text": " If <code>VkAllocationCallbacks</code> were provided when <code>descriptorSetLayout</code> was created, a compatible set of callbacks <strong class=\"purple\">must</strong> be provided here"
+        },
+        {
+          "vuid": "VUID-vkDestroyDescriptorSetLayout-descriptorSetLayout-00285",
+          "text": " If no <code>VkAllocationCallbacks</code> were provided when <code>descriptorSetLayout</code> was created, <code>pAllocator</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-vkDestroyDescriptorSetLayout-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkDestroyDescriptorSetLayout-descriptorSetLayout-parameter",
+          "text": " If <code>descriptorSetLayout</code> is not <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>, <code>descriptorSetLayout</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDescriptorSetLayout\">VkDescriptorSetLayout</a> handle"
+        },
+        {
+          "vuid": "VUID-vkDestroyDescriptorSetLayout-pAllocator-parameter",
+          "text": " If <code>pAllocator</code> is not <code>NULL</code>, <code>pAllocator</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkAllocationCallbacks\">VkAllocationCallbacks</a> structure"
+        },
+        {
+          "vuid": "VUID-vkDestroyDescriptorSetLayout-descriptorSetLayout-parent",
+          "text": " If <code>descriptorSetLayout</code> is a valid handle, it <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from <code>device</code>"
+        }
+      ]
+    },
+    "vkCreatePipelineLayout": {
+      "core": [
+        {
+          "vuid": "VUID-vkCreatePipelineLayout-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCreatePipelineLayout-pCreateInfo-parameter",
+          "text": " <code>pCreateInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkPipelineLayoutCreateInfo\">VkPipelineLayoutCreateInfo</a> structure"
+        },
+        {
+          "vuid": "VUID-vkCreatePipelineLayout-pAllocator-parameter",
+          "text": " If <code>pAllocator</code> is not <code>NULL</code>, <code>pAllocator</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkAllocationCallbacks\">VkAllocationCallbacks</a> structure"
+        },
+        {
+          "vuid": "VUID-vkCreatePipelineLayout-pPipelineLayout-parameter",
+          "text": " <code>pPipelineLayout</code> <strong class=\"purple\">must</strong> be a valid pointer to a <a href=\"#VkPipelineLayout\">VkPipelineLayout</a> handle"
+        }
+      ]
+    },
+    "VkPipelineLayoutCreateInfo": {
+      "core": [
+        {
+          "vuid": "VUID-VkPipelineLayoutCreateInfo-setLayoutCount-00286",
+          "text": " <code>setLayoutCount</code> <strong class=\"purple\">must</strong> be less than or equal to <code>VkPhysicalDeviceLimits</code>::<code>maxBoundDescriptorSets</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineLayoutCreateInfo-pPushConstantRanges-00292",
+          "text": " Any two elements of <code>pPushConstantRanges</code> <strong class=\"purple\">must</strong> not include the same stage in <code>stageFlags</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineLayoutCreateInfo-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineLayoutCreateInfo-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineLayoutCreateInfo-flags-zerobitmask",
+          "text": " <code>flags</code> <strong class=\"purple\">must</strong> be <code>0</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-parameter",
+          "text": " If <code>setLayoutCount</code> is not <code>0</code>, <code>pSetLayouts</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>setLayoutCount</code> valid <a href=\"#VkDescriptorSetLayout\">VkDescriptorSetLayout</a> handles"
+        },
+        {
+          "vuid": "VUID-VkPipelineLayoutCreateInfo-pPushConstantRanges-parameter",
+          "text": " If <code>pushConstantRangeCount</code> is not <code>0</code>, <code>pPushConstantRanges</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>pushConstantRangeCount</code> valid <a href=\"#VkPushConstantRange\">VkPushConstantRange</a> structures"
+        }
+      ],
+      "!(VK_EXT_descriptor_indexing)": [
+        {
+          "vuid": "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-00287",
+          "text": " The total number of descriptors of the type <code>VK_DESCRIPTOR_TYPE_SAMPLER</code> and <code>VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER</code> accessible to any shader stage across all elements of <code>pSetLayouts</code> <strong class=\"purple\">must</strong> be less than or equal to <code>VkPhysicalDeviceLimits</code>::<code>maxPerStageDescriptorSamplers</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-00288",
+          "text": " The total number of descriptors of the type <code>VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER</code> and <code>VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC</code> accessible to any shader stage across all elements of <code>pSetLayouts</code> <strong class=\"purple\">must</strong> be less than or equal to <code>VkPhysicalDeviceLimits</code>::<code>maxPerStageDescriptorUniformBuffers</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-00289",
+          "text": " The total number of descriptors of the type <code>VK_DESCRIPTOR_TYPE_STORAGE_BUFFER</code> and <code>VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC</code> accessible to any shader stage across all elements of <code>pSetLayouts</code> <strong class=\"purple\">must</strong> be less than or equal to <code>VkPhysicalDeviceLimits</code>::<code>maxPerStageDescriptorStorageBuffers</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-00290",
+          "text": " The total number of descriptors of the type <code>VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER</code>, <code>VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE</code>, and <code>VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER</code> accessible to any shader stage across all elements of <code>pSetLayouts</code> <strong class=\"purple\">must</strong> be less than or equal to <code>VkPhysicalDeviceLimits</code>::<code>maxPerStageDescriptorSampledImages</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-00291",
+          "text": " The total number of descriptors of the type <code>VK_DESCRIPTOR_TYPE_STORAGE_IMAGE</code>, and <code>VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER</code> accessible to any shader stage across all elements of <code>pSetLayouts</code> <strong class=\"purple\">must</strong> be less than or equal to <code>VkPhysicalDeviceLimits</code>::<code>maxPerStageDescriptorStorageImages</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01676",
+          "text": " The total number of descriptors of the type <code>VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT</code> accessible to any given shader stage across all elements of <code>pSetLayouts</code> <strong class=\"purple\">must</strong> be less than or equal to <code>VkPhysicalDeviceLimits</code>::<code>maxPerStageDescriptorInputAttachments</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01677",
+          "text": " The total number of descriptors of the type <code>VK_DESCRIPTOR_TYPE_SAMPLER</code> and <code>VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER</code> accessible across all shader stages and across all elements of <code>pSetLayouts</code> <strong class=\"purple\">must</strong> be less than or equal to <code>VkPhysicalDeviceLimits</code>::<code>maxDescriptorSetSamplers</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01678",
+          "text": " The total number of descriptors of the type <code>VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER</code> accessible across all shader stages and across all elements of <code>pSetLayouts</code> <strong class=\"purple\">must</strong> be less than or equal to <code>VkPhysicalDeviceLimits</code>::<code>maxDescriptorSetUniformBuffers</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01679",
+          "text": " The total number of descriptors of the type <code>VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC</code> accessible across all shader stages and across all elements of <code>pSetLayouts</code> <strong class=\"purple\">must</strong> be less than or equal to <code>VkPhysicalDeviceLimits</code>::<code>maxDescriptorSetUniformBuffersDynamic</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01680",
+          "text": " The total number of descriptors of the type <code>VK_DESCRIPTOR_TYPE_STORAGE_BUFFER</code> accessible across all shader stages and across all elements of <code>pSetLayouts</code> <strong class=\"purple\">must</strong> be less than or equal to <code>VkPhysicalDeviceLimits</code>::<code>maxDescriptorSetStorageBuffers</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01681",
+          "text": " The total number of descriptors of the type <code>VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC</code> accessible across all shader stages and across all elements of <code>pSetLayouts</code> <strong class=\"purple\">must</strong> be less than or equal to <code>VkPhysicalDeviceLimits</code>::<code>maxDescriptorSetStorageBuffersDynamic</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01682",
+          "text": " The total number of descriptors of the type <code>VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER</code>, <code>VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE</code>, and <code>VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER</code> accessible across all shader stages and across all elements of <code>pSetLayouts</code> <strong class=\"purple\">must</strong> be less than or equal to <code>VkPhysicalDeviceLimits</code>::<code>maxDescriptorSetSampledImages</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01683",
+          "text": " The total number of descriptors of the type <code>VK_DESCRIPTOR_TYPE_STORAGE_IMAGE</code>, and <code>VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER</code> accessible across all shader stages and across all elements of <code>pSetLayouts</code> <strong class=\"purple\">must</strong> be less than or equal to <code>VkPhysicalDeviceLimits</code>::<code>maxDescriptorSetStorageImages</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01684",
+          "text": " The total number of descriptors of the type <code>VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT</code> accessible across all shader stages and across all elements of <code>pSetLayouts</code> <strong class=\"purple\">must</strong> be less than or equal to <code>VkPhysicalDeviceLimits</code>::<code>maxDescriptorSetInputAttachments</code>"
+        }
+      ],
+      "!(VK_EXT_descriptor_indexing)+(VK_EXT_inline_uniform_block)": [
+        {
+          "vuid": "VUID-VkPipelineLayoutCreateInfo-descriptorType-02212",
+          "text": " The total number of bindings with a <code>descriptorType</code> of <code>VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT</code> accessible to any given shader stage across all elements of <code>pSetLayouts</code> <strong class=\"purple\">must</strong> be less than or equal to <code>VkPhysicalDeviceInlineUniformBlockPropertiesEXT</code>::<code>maxPerStageDescriptorInlineUniformBlocks</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineLayoutCreateInfo-descriptorType-02213",
+          "text": " The total number of bindings with a <code>descriptorType</code> of <code>VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT</code> accessible across all shader stages and across all elements of <code>pSetLayouts</code> <strong class=\"purple\">must</strong> be less than or equal to <code>VkPhysicalDeviceInlineUniformBlockPropertiesEXT</code>::<code>maxDescriptorSetInlineUniformBlocks</code>"
+        }
+      ],
+      "(VK_EXT_descriptor_indexing)": [
+        {
+          "vuid": "VUID-VkPipelineLayoutCreateInfo-descriptorType-03016",
+          "text": " The total number of descriptors in descriptor set layouts created without the <code>VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT</code> bit set with a <code>descriptorType</code> of <code>VK_DESCRIPTOR_TYPE_SAMPLER</code> and <code>VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER</code> accessible to any given shader stage across all elements of <code>pSetLayouts</code> <strong class=\"purple\">must</strong> be less than or equal to <code>VkPhysicalDeviceLimits</code>::<code>maxPerStageDescriptorSamplers</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineLayoutCreateInfo-descriptorType-03017",
+          "text": " The total number of descriptors in descriptor set layouts created without the <code>VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT</code> bit set with a <code>descriptorType</code> of <code>VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER</code> and <code>VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC</code> accessible to any given shader stage across all elements of <code>pSetLayouts</code> <strong class=\"purple\">must</strong> be less than or equal to <code>VkPhysicalDeviceLimits</code>::<code>maxPerStageDescriptorUniformBuffers</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineLayoutCreateInfo-descriptorType-03018",
+          "text": " The total number of descriptors in descriptor set layouts created without the <code>VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT</code> bit set with a <code>descriptorType</code> of <code>VK_DESCRIPTOR_TYPE_STORAGE_BUFFER</code> and <code>VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC</code> accessible to any given shader stage across all elements of <code>pSetLayouts</code> <strong class=\"purple\">must</strong> be less than or equal to <code>VkPhysicalDeviceLimits</code>::<code>maxPerStageDescriptorStorageBuffers</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineLayoutCreateInfo-descriptorType-03019",
+          "text": " The total number of descriptors in descriptor set layouts created without the <code>VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT</code> bit set with a <code>descriptorType</code> of <code>VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER</code>, <code>VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE</code>, and <code>VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER</code> accessible to any given shader stage across all elements of <code>pSetLayouts</code> <strong class=\"purple\">must</strong> be less than or equal to <code>VkPhysicalDeviceLimits</code>::<code>maxPerStageDescriptorSampledImages</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineLayoutCreateInfo-descriptorType-03020",
+          "text": " The total number of descriptors in descriptor set layouts created without the <code>VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT</code> bit set with a <code>descriptorType</code> of <code>VK_DESCRIPTOR_TYPE_STORAGE_IMAGE</code>, and <code>VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER</code> accessible to any given shader stage across all elements of <code>pSetLayouts</code> <strong class=\"purple\">must</strong> be less than or equal to <code>VkPhysicalDeviceLimits</code>::<code>maxPerStageDescriptorStorageImages</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineLayoutCreateInfo-descriptorType-03021",
+          "text": " The total number of descriptors in descriptor set layouts created without the <code>VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT</code> bit set with a <code>descriptorType</code> of <code>VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT</code> accessible to any given shader stage across all elements of <code>pSetLayouts</code> <strong class=\"purple\">must</strong> be less than or equal to <code>VkPhysicalDeviceLimits</code>::<code>maxPerStageDescriptorInputAttachments</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineLayoutCreateInfo-descriptorType-03022",
+          "text": " The total number of descriptors with a <code>descriptorType</code> of <code>VK_DESCRIPTOR_TYPE_SAMPLER</code> and <code>VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER</code> accessible to any given shader stage across all elements of <code>pSetLayouts</code> <strong class=\"purple\">must</strong> be less than or equal to <code>VkPhysicalDeviceDescriptorIndexingPropertiesEXT</code>::<code>maxPerStageDescriptorUpdateAfterBindSamplers</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineLayoutCreateInfo-descriptorType-03023",
+          "text": " The total number of descriptors with a <code>descriptorType</code> of <code>VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER</code> and <code>VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC</code> accessible to any given shader stage across all elements of <code>pSetLayouts</code> <strong class=\"purple\">must</strong> be less than or equal to <code>VkPhysicalDeviceDescriptorIndexingPropertiesEXT</code>::<code>maxPerStageDescriptorUpdateAfterBindUniformBuffers</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineLayoutCreateInfo-descriptorType-03024",
+          "text": " The total number of descriptors with a <code>descriptorType</code> of <code>VK_DESCRIPTOR_TYPE_STORAGE_BUFFER</code> and <code>VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC</code> accessible to any given shader stage across all elements of <code>pSetLayouts</code> <strong class=\"purple\">must</strong> be less than or equal to <code>VkPhysicalDeviceDescriptorIndexingPropertiesEXT</code>::<code>maxPerStageDescriptorUpdateAfterBindStorageBuffers</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineLayoutCreateInfo-descriptorType-03025",
+          "text": " The total number of descriptors with a <code>descriptorType</code> of <code>VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER</code>, <code>VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE</code>, and <code>VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER</code> accessible to any given shader stage across all elements of <code>pSetLayouts</code> <strong class=\"purple\">must</strong> be less than or equal to <code>VkPhysicalDeviceDescriptorIndexingPropertiesEXT</code>::<code>maxPerStageDescriptorUpdateAfterBindSampledImages</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineLayoutCreateInfo-descriptorType-03026",
+          "text": " The total number of descriptors with a <code>descriptorType</code> of <code>VK_DESCRIPTOR_TYPE_STORAGE_IMAGE</code>, and <code>VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER</code> accessible to any given shader stage across all elements of <code>pSetLayouts</code> <strong class=\"purple\">must</strong> be less than or equal to <code>VkPhysicalDeviceDescriptorIndexingPropertiesEXT</code>::<code>maxPerStageDescriptorUpdateAfterBindStorageImages</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineLayoutCreateInfo-descriptorType-03027",
+          "text": " The total number of descriptors with a <code>descriptorType</code> of <code>VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT</code> accessible to any given shader stage across all elements of <code>pSetLayouts</code> <strong class=\"purple\">must</strong> be less than or equal to <code>VkPhysicalDeviceDescriptorIndexingPropertiesEXT</code>::<code>maxPerStageDescriptorUpdateAfterBindInputAttachments</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineLayoutCreateInfo-descriptorType-03028",
+          "text": " The total number of descriptors in descriptor set layouts created without the <code>VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT</code> bit set with a <code>descriptorType</code> of <code>VK_DESCRIPTOR_TYPE_SAMPLER</code> and <code>VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER</code> accessible across all shader stages and across all elements of <code>pSetLayouts</code> <strong class=\"purple\">must</strong> be less than or equal to <code>VkPhysicalDeviceLimits</code>::<code>maxDescriptorSetSamplers</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineLayoutCreateInfo-descriptorType-03029",
+          "text": " The total number of descriptors in descriptor set layouts created without the <code>VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT</code> bit set with a <code>descriptorType</code> of <code>VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER</code> accessible across all shader stages and across all elements of <code>pSetLayouts</code> <strong class=\"purple\">must</strong> be less than or equal to <code>VkPhysicalDeviceLimits</code>::<code>maxDescriptorSetUniformBuffers</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineLayoutCreateInfo-descriptorType-03030",
+          "text": " The total number of descriptors in descriptor set layouts created without the <code>VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT</code> bit set with a <code>descriptorType</code> of <code>VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC</code> accessible across all shader stages and across all elements of <code>pSetLayouts</code> <strong class=\"purple\">must</strong> be less than or equal to <code>VkPhysicalDeviceLimits</code>::<code>maxDescriptorSetUniformBuffersDynamic</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineLayoutCreateInfo-descriptorType-03031",
+          "text": " The total number of descriptors in descriptor set layouts created without the <code>VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT</code> bit set with a <code>descriptorType</code> of <code>VK_DESCRIPTOR_TYPE_STORAGE_BUFFER</code> accessible across all shader stages and across all elements of <code>pSetLayouts</code> <strong class=\"purple\">must</strong> be less than or equal to <code>VkPhysicalDeviceLimits</code>::<code>maxDescriptorSetStorageBuffers</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineLayoutCreateInfo-descriptorType-03032",
+          "text": " The total number of descriptors in descriptor set layouts created without the <code>VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT</code> bit set with a <code>descriptorType</code> of <code>VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC</code> accessible across all shader stages and across all elements of <code>pSetLayouts</code> <strong class=\"purple\">must</strong> be less than or equal to <code>VkPhysicalDeviceLimits</code>::<code>maxDescriptorSetStorageBuffersDynamic</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineLayoutCreateInfo-descriptorType-03033",
+          "text": " The total number of descriptors in descriptor set layouts created without the <code>VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT</code> bit set with a <code>descriptorType</code> of <code>VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER</code>, <code>VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE</code>, and <code>VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER</code> accessible across all shader stages and across all elements of <code>pSetLayouts</code> <strong class=\"purple\">must</strong> be less than or equal to <code>VkPhysicalDeviceLimits</code>::<code>maxDescriptorSetSampledImages</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineLayoutCreateInfo-descriptorType-03034",
+          "text": " The total number of descriptors in descriptor set layouts created without the <code>VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT</code> bit set with a <code>descriptorType</code> of <code>VK_DESCRIPTOR_TYPE_STORAGE_IMAGE</code>, and <code>VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER</code> accessible across all shader stages and across all elements of <code>pSetLayouts</code> <strong class=\"purple\">must</strong> be less than or equal to <code>VkPhysicalDeviceLimits</code>::<code>maxDescriptorSetStorageImages</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineLayoutCreateInfo-descriptorType-03035",
+          "text": " The total number of descriptors in descriptor set layouts created without the <code>VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT</code> bit set with a <code>descriptorType</code> of <code>VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT</code> accessible across all shader stages and across all elements of <code>pSetLayouts</code> <strong class=\"purple\">must</strong> be less than or equal to <code>VkPhysicalDeviceLimits</code>::<code>maxDescriptorSetInputAttachments</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-03036",
+          "text": " The total number of descriptors of the type <code>VK_DESCRIPTOR_TYPE_SAMPLER</code> and <code>VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER</code> accessible across all shader stages and across all elements of <code>pSetLayouts</code> <strong class=\"purple\">must</strong> be less than or equal to <code>VkPhysicalDeviceDescriptorIndexingPropertiesEXT</code>::<code>maxDescriptorSetUpdateAfterBindSamplers</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-03037",
+          "text": " The total number of descriptors of the type <code>VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER</code> accessible across all shader stages and across all elements of <code>pSetLayouts</code> <strong class=\"purple\">must</strong> be less than or equal to <code>VkPhysicalDeviceDescriptorIndexingPropertiesEXT</code>::<code>maxDescriptorSetUpdateAfterBindUniformBuffers</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-03038",
+          "text": " The total number of descriptors of the type <code>VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC</code> accessible across all shader stages and across all elements of <code>pSetLayouts</code> <strong class=\"purple\">must</strong> be less than or equal to <code>VkPhysicalDeviceDescriptorIndexingPropertiesEXT</code>::<code>maxDescriptorSetUpdateAfterBindUniformBuffersDynamic</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-03039",
+          "text": " The total number of descriptors of the type <code>VK_DESCRIPTOR_TYPE_STORAGE_BUFFER</code> accessible across all shader stages and across all elements of <code>pSetLayouts</code> <strong class=\"purple\">must</strong> be less than or equal to <code>VkPhysicalDeviceDescriptorIndexingPropertiesEXT</code>::<code>maxDescriptorSetUpdateAfterBindStorageBuffers</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-03040",
+          "text": " The total number of descriptors of the type <code>VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC</code> accessible across all shader stages and across all elements of <code>pSetLayouts</code> <strong class=\"purple\">must</strong> be less than or equal to <code>VkPhysicalDeviceDescriptorIndexingPropertiesEXT</code>::<code>maxDescriptorSetUpdateAfterBindStorageBuffersDynamic</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-03041",
+          "text": " The total number of descriptors of the type <code>VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER</code>, <code>VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE</code>, and <code>VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER</code> accessible across all shader stages and across all elements of <code>pSetLayouts</code> <strong class=\"purple\">must</strong> be less than or equal to <code>VkPhysicalDeviceDescriptorIndexingPropertiesEXT</code>::<code>maxDescriptorSetUpdateAfterBindSampledImages</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-03042",
+          "text": " The total number of descriptors of the type <code>VK_DESCRIPTOR_TYPE_STORAGE_IMAGE</code>, and <code>VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER</code> accessible across all shader stages and across all elements of <code>pSetLayouts</code> <strong class=\"purple\">must</strong> be less than or equal to <code>VkPhysicalDeviceDescriptorIndexingPropertiesEXT</code>::<code>maxDescriptorSetUpdateAfterBindStorageImages</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-03043",
+          "text": " The total number of descriptors of the type <code>VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT</code> accessible across all shader stages and across all elements of <code>pSetLayouts</code> <strong class=\"purple\">must</strong> be less than or equal to <code>VkPhysicalDeviceDescriptorIndexingPropertiesEXT</code>::<code>maxDescriptorSetUpdateAfterBindInputAttachments</code>"
+        }
+      ],
+      "(VK_EXT_descriptor_indexing)+(VK_EXT_inline_uniform_block)": [
+        {
+          "vuid": "VUID-VkPipelineLayoutCreateInfo-descriptorType-02214",
+          "text": " The total number of bindings in descriptor set layouts created without the <code>VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT</code> bit set with a <code>descriptorType</code> of <code>VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT</code> accessible to any given shader stage across all elements of <code>pSetLayouts</code> <strong class=\"purple\">must</strong> be less than or equal to <code>VkPhysicalDeviceInlineUniformBlockPropertiesEXT</code>::<code>maxPerStageDescriptorInlineUniformBlocks</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineLayoutCreateInfo-descriptorType-02215",
+          "text": " The total number of bindings with a <code>descriptorType</code> of <code>VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT</code> accessible to any given shader stage across all elements of <code>pSetLayouts</code> <strong class=\"purple\">must</strong> be less than or equal to <code>VkPhysicalDeviceInlineUniformBlockPropertiesEXT</code>::<code>maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineLayoutCreateInfo-descriptorType-02216",
+          "text": " The total number of bindings in descriptor set layouts created without the <code>VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT</code> bit set with a <code>descriptorType</code> of <code>VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT</code> accessible across all shader stages and across all elements of <code>pSetLayouts</code> <strong class=\"purple\">must</strong> be less than or equal to <code>VkPhysicalDeviceInlineUniformBlockPropertiesEXT</code>::<code>maxDescriptorSetInlineUniformBlocks</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineLayoutCreateInfo-descriptorType-02217",
+          "text": " The total number of bindings with a <code>descriptorType</code> of <code>VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT</code> accessible across all shader stages and across all elements of <code>pSetLayouts</code> <strong class=\"purple\">must</strong> be less than or equal to <code>VkPhysicalDeviceInlineUniformBlockPropertiesEXT</code>::<code>maxDescriptorSetUpdateAfterBindInlineUniformBlocks</code>"
+        }
+      ],
+      "(VK_KHR_push_descriptor)": [
+        {
+          "vuid": "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-00293",
+          "text": " <code>pSetLayouts</code> <strong class=\"purple\">must</strong> not contain more than one descriptor set layout that was created with <code>VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR</code> set"
+        }
+      ],
+      "(VK_NV_ray_tracing)": [
+        {
+          "vuid": "VUID-VkPipelineLayoutCreateInfo-descriptorType-02381",
+          "text": " The total number of bindings with a <code>descriptorType</code> of <code>VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV</code> accessible across all shader stages and across all elements of <code>pSetLayouts</code> <strong class=\"purple\">must</strong> be less than or equal to <code>VkPhysicalDeviceRayTracingPropertiesNV</code>::<code>maxDescriptorSetAccelerationStructures</code>"
+        }
+      ]
+    },
+    "VkPushConstantRange": {
+      "core": [
+        {
+          "vuid": "VUID-VkPushConstantRange-offset-00294",
+          "text": " <code>offset</code> <strong class=\"purple\">must</strong> be less than <code>VkPhysicalDeviceLimits</code>::<code>maxPushConstantsSize</code>"
+        },
+        {
+          "vuid": "VUID-VkPushConstantRange-offset-00295",
+          "text": " <code>offset</code> <strong class=\"purple\">must</strong> be a multiple of <code>4</code>"
+        },
+        {
+          "vuid": "VUID-VkPushConstantRange-size-00296",
+          "text": " <code>size</code> <strong class=\"purple\">must</strong> be greater than <code>0</code>"
+        },
+        {
+          "vuid": "VUID-VkPushConstantRange-size-00297",
+          "text": " <code>size</code> <strong class=\"purple\">must</strong> be a multiple of <code>4</code>"
+        },
+        {
+          "vuid": "VUID-VkPushConstantRange-size-00298",
+          "text": " <code>size</code> <strong class=\"purple\">must</strong> be less than or equal to <code>VkPhysicalDeviceLimits</code>::<code>maxPushConstantsSize</code> minus <code>offset</code>"
+        },
+        {
+          "vuid": "VUID-VkPushConstantRange-stageFlags-parameter",
+          "text": " <code>stageFlags</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkShaderStageFlagBits\">VkShaderStageFlagBits</a> values"
+        },
+        {
+          "vuid": "VUID-VkPushConstantRange-stageFlags-requiredbitmask",
+          "text": " <code>stageFlags</code> <strong class=\"purple\">must</strong> not be <code>0</code>"
+        }
+      ]
+    },
+    "vkDestroyPipelineLayout": {
+      "core": [
+        {
+          "vuid": "VUID-vkDestroyPipelineLayout-pipelineLayout-00299",
+          "text": " If <code>VkAllocationCallbacks</code> were provided when <code>pipelineLayout</code> was created, a compatible set of callbacks <strong class=\"purple\">must</strong> be provided here"
+        },
+        {
+          "vuid": "VUID-vkDestroyPipelineLayout-pipelineLayout-00300",
+          "text": " If no <code>VkAllocationCallbacks</code> were provided when <code>pipelineLayout</code> was created, <code>pAllocator</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-vkDestroyPipelineLayout-pipelineLayout-02004",
+          "text": " <code>pipelineLayout</code> <strong class=\"purple\">must</strong> not have been passed to any <code>vkCmd*</code> command for any command buffers that are still in the <a href=\"#commandbuffers-lifecycle\">recording state</a> when <code>vkDestroyPipelineLayout</code> is called"
+        },
+        {
+          "vuid": "VUID-vkDestroyPipelineLayout-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkDestroyPipelineLayout-pipelineLayout-parameter",
+          "text": " If <code>pipelineLayout</code> is not <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>, <code>pipelineLayout</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkPipelineLayout\">VkPipelineLayout</a> handle"
+        },
+        {
+          "vuid": "VUID-vkDestroyPipelineLayout-pAllocator-parameter",
+          "text": " If <code>pAllocator</code> is not <code>NULL</code>, <code>pAllocator</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkAllocationCallbacks\">VkAllocationCallbacks</a> structure"
+        },
+        {
+          "vuid": "VUID-vkDestroyPipelineLayout-pipelineLayout-parent",
+          "text": " If <code>pipelineLayout</code> is a valid handle, it <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from <code>device</code>"
+        }
+      ]
+    },
+    "vkCreateDescriptorPool": {
+      "core": [
+        {
+          "vuid": "VUID-vkCreateDescriptorPool-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCreateDescriptorPool-pCreateInfo-parameter",
+          "text": " <code>pCreateInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkDescriptorPoolCreateInfo\">VkDescriptorPoolCreateInfo</a> structure"
+        },
+        {
+          "vuid": "VUID-vkCreateDescriptorPool-pAllocator-parameter",
+          "text": " If <code>pAllocator</code> is not <code>NULL</code>, <code>pAllocator</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkAllocationCallbacks\">VkAllocationCallbacks</a> structure"
+        },
+        {
+          "vuid": "VUID-vkCreateDescriptorPool-pDescriptorPool-parameter",
+          "text": " <code>pDescriptorPool</code> <strong class=\"purple\">must</strong> be a valid pointer to a <a href=\"#VkDescriptorPool\">VkDescriptorPool</a> handle"
+        }
+      ]
+    },
+    "VkDescriptorPoolCreateInfo": {
+      "core": [
+        {
+          "vuid": "VUID-VkDescriptorPoolCreateInfo-maxSets-00301",
+          "text": " <code>maxSets</code> <strong class=\"purple\">must</strong> be greater than <code>0</code>"
+        },
+        {
+          "vuid": "VUID-VkDescriptorPoolCreateInfo-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO</code>"
+        },
+        {
+          "vuid": "VUID-VkDescriptorPoolCreateInfo-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code> or a pointer to a valid instance of <a href=\"#VkDescriptorPoolInlineUniformBlockCreateInfoEXT\">VkDescriptorPoolInlineUniformBlockCreateInfoEXT</a>"
+        },
+        {
+          "vuid": "VUID-VkDescriptorPoolCreateInfo-flags-parameter",
+          "text": " <code>flags</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkDescriptorPoolCreateFlagBits\">VkDescriptorPoolCreateFlagBits</a> values"
+        },
+        {
+          "vuid": "VUID-VkDescriptorPoolCreateInfo-pPoolSizes-parameter",
+          "text": " <code>pPoolSizes</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>poolSizeCount</code> valid <a href=\"#VkDescriptorPoolSize\">VkDescriptorPoolSize</a> structures"
+        },
+        {
+          "vuid": "VUID-VkDescriptorPoolCreateInfo-poolSizeCount-arraylength",
+          "text": " <code>poolSizeCount</code> <strong class=\"purple\">must</strong> be greater than <code>0</code>"
+        }
+      ]
+    },
+    "VkDescriptorPoolInlineUniformBlockCreateInfoEXT": {
+      "(VK_EXT_inline_uniform_block)": [
+        {
+          "vuid": "VUID-VkDescriptorPoolInlineUniformBlockCreateInfoEXT-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO_EXT</code>"
+        }
+      ]
+    },
+    "VkDescriptorPoolSize": {
+      "core": [
+        {
+          "vuid": "VUID-VkDescriptorPoolSize-descriptorCount-00302",
+          "text": " <code>descriptorCount</code> <strong class=\"purple\">must</strong> be greater than <code>0</code>"
+        },
+        {
+          "vuid": "VUID-VkDescriptorPoolSize-type-parameter",
+          "text": " <code>type</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDescriptorType\">VkDescriptorType</a> value"
+        }
+      ],
+      "(VK_EXT_inline_uniform_block)": [
+        {
+          "vuid": "VUID-VkDescriptorPoolSize-type-02218",
+          "text": " If <code>type</code> is <code>VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT</code> then <code>descriptorCount</code> <strong class=\"purple\">must</strong> be a multiple of <code>4</code>"
+        }
+      ]
+    },
+    "vkDestroyDescriptorPool": {
+      "core": [
+        {
+          "vuid": "VUID-vkDestroyDescriptorPool-descriptorPool-00303",
+          "text": " All submitted commands that refer to <code>descriptorPool</code> (via any allocated descriptor sets) <strong class=\"purple\">must</strong> have completed execution"
+        },
+        {
+          "vuid": "VUID-vkDestroyDescriptorPool-descriptorPool-00304",
+          "text": " If <code>VkAllocationCallbacks</code> were provided when <code>descriptorPool</code> was created, a compatible set of callbacks <strong class=\"purple\">must</strong> be provided here"
+        },
+        {
+          "vuid": "VUID-vkDestroyDescriptorPool-descriptorPool-00305",
+          "text": " If no <code>VkAllocationCallbacks</code> were provided when <code>descriptorPool</code> was created, <code>pAllocator</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-vkDestroyDescriptorPool-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkDestroyDescriptorPool-descriptorPool-parameter",
+          "text": " If <code>descriptorPool</code> is not <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>, <code>descriptorPool</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDescriptorPool\">VkDescriptorPool</a> handle"
+        },
+        {
+          "vuid": "VUID-vkDestroyDescriptorPool-pAllocator-parameter",
+          "text": " If <code>pAllocator</code> is not <code>NULL</code>, <code>pAllocator</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkAllocationCallbacks\">VkAllocationCallbacks</a> structure"
+        },
+        {
+          "vuid": "VUID-vkDestroyDescriptorPool-descriptorPool-parent",
+          "text": " If <code>descriptorPool</code> is a valid handle, it <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from <code>device</code>"
+        }
+      ]
+    },
+    "vkAllocateDescriptorSets": {
+      "core": [
+        {
+          "vuid": "VUID-vkAllocateDescriptorSets-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkAllocateDescriptorSets-pAllocateInfo-parameter",
+          "text": " <code>pAllocateInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkDescriptorSetAllocateInfo\">VkDescriptorSetAllocateInfo</a> structure"
+        },
+        {
+          "vuid": "VUID-vkAllocateDescriptorSets-pDescriptorSets-parameter",
+          "text": " <code>pDescriptorSets</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>pAllocateInfo</code>::descriptorSetCount <a href=\"#VkDescriptorSet\">VkDescriptorSet</a> handles"
+        },
+        {
+          "vuid": "VUID-vkAllocateDescriptorSets-pAllocateInfo::descriptorSetCount-arraylength",
+          "text": " The value referenced by <code>pAllocateInfo</code>::<code>descriptorSetCount</code> <strong class=\"purple\">must</strong> be greater than <code>0</code>"
+        }
+      ]
+    },
+    "VkDescriptorSetAllocateInfo": {
+      "!(VK_VERSION_1_1,VK_KHR_maintenance1)": [
+        {
+          "vuid": "VUID-VkDescriptorSetAllocateInfo-descriptorSetCount-00306",
+          "text": " <code>descriptorSetCount</code> <strong class=\"purple\">must</strong> not be greater than the number of sets that are currently available for allocation in <code>descriptorPool</code>"
+        },
+        {
+          "vuid": "VUID-VkDescriptorSetAllocateInfo-descriptorPool-00307",
+          "text": " <code>descriptorPool</code> <strong class=\"purple\">must</strong> have enough free descriptor capacity remaining to allocate the descriptor sets of the specified layouts"
+        }
+      ],
+      "(VK_KHR_push_descriptor)": [
+        {
+          "vuid": "VUID-VkDescriptorSetAllocateInfo-pSetLayouts-00308",
+          "text": " Each element of <code>pSetLayouts</code> <strong class=\"purple\">must</strong> not have been created with <code>VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR</code> set"
+        }
+      ],
+      "(VK_EXT_descriptor_indexing)": [
+        {
+          "vuid": "VUID-VkDescriptorSetAllocateInfo-pSetLayouts-03044",
+          "text": " If any element of <code>pSetLayouts</code> was created with the <code>VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT</code> bit set, <code>descriptorPool</code> <strong class=\"purple\">must</strong> have been created with the <code>VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT_EXT</code> flag set"
+        }
+      ],
+      "core": [
+        {
+          "vuid": "VUID-VkDescriptorSetAllocateInfo-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO</code>"
+        },
+        {
+          "vuid": "VUID-VkDescriptorSetAllocateInfo-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code> or a pointer to a valid instance of <a href=\"#VkDescriptorSetVariableDescriptorCountAllocateInfoEXT\">VkDescriptorSetVariableDescriptorCountAllocateInfoEXT</a>"
+        },
+        {
+          "vuid": "VUID-VkDescriptorSetAllocateInfo-descriptorPool-parameter",
+          "text": " <code>descriptorPool</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDescriptorPool\">VkDescriptorPool</a> handle"
+        },
+        {
+          "vuid": "VUID-VkDescriptorSetAllocateInfo-pSetLayouts-parameter",
+          "text": " <code>pSetLayouts</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>descriptorSetCount</code> valid <a href=\"#VkDescriptorSetLayout\">VkDescriptorSetLayout</a> handles"
+        },
+        {
+          "vuid": "VUID-VkDescriptorSetAllocateInfo-descriptorSetCount-arraylength",
+          "text": " <code>descriptorSetCount</code> <strong class=\"purple\">must</strong> be greater than <code>0</code>"
+        },
+        {
+          "vuid": "VUID-VkDescriptorSetAllocateInfo-commonparent",
+          "text": " Both of <code>descriptorPool</code>, and the elements of <code>pSetLayouts</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from the same <a href=\"#VkDevice\">VkDevice</a>"
+        }
+      ]
+    },
+    "VkDescriptorSetVariableDescriptorCountAllocateInfoEXT": {
+      "(VK_EXT_descriptor_indexing)": [
+        {
+          "vuid": "VUID-VkDescriptorSetVariableDescriptorCountAllocateInfoEXT-descriptorSetCount-03045",
+          "text": " If <code>descriptorSetCount</code> is not zero, <code>descriptorSetCount</code> <strong class=\"purple\">must</strong> equal <a href=\"#VkDescriptorSetAllocateInfo\">VkDescriptorSetAllocateInfo</a>::<code>descriptorSetCount</code>"
+        },
+        {
+          "vuid": "VUID-VkDescriptorSetVariableDescriptorCountAllocateInfoEXT-pSetLayouts-03046",
+          "text": " If <a href=\"#VkDescriptorSetAllocateInfo\">VkDescriptorSetAllocateInfo</a>::<code>pSetLayouts</code>[i] has a variable descriptor count binding, then <code>pDescriptorCounts</code>[i] <strong class=\"purple\">must</strong> be less than or equal to the descriptor count specified for that binding when the descriptor set layout was created."
+        },
+        {
+          "vuid": "VUID-VkDescriptorSetVariableDescriptorCountAllocateInfoEXT-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO_EXT</code>"
+        },
+        {
+          "vuid": "VUID-VkDescriptorSetVariableDescriptorCountAllocateInfoEXT-pDescriptorCounts-parameter",
+          "text": " If <code>descriptorSetCount</code> is not <code>0</code>, <code>pDescriptorCounts</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>descriptorSetCount</code> <code>uint32_t</code> values"
+        }
+      ]
+    },
+    "vkFreeDescriptorSets": {
+      "core": [
+        {
+          "vuid": "VUID-vkFreeDescriptorSets-pDescriptorSets-00309",
+          "text": " All submitted commands that refer to any element of <code>pDescriptorSets</code> <strong class=\"purple\">must</strong> have completed execution"
+        },
+        {
+          "vuid": "VUID-vkFreeDescriptorSets-pDescriptorSets-00310",
+          "text": " <code>pDescriptorSets</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>descriptorSetCount</code> <code>VkDescriptorSet</code> handles, each element of which <strong class=\"purple\">must</strong> either be a valid handle or <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>"
+        },
+        {
+          "vuid": "VUID-vkFreeDescriptorSets-pDescriptorSets-00311",
+          "text": " Each valid handle in <code>pDescriptorSets</code> <strong class=\"purple\">must</strong> have been allocated from <code>descriptorPool</code>"
+        },
+        {
+          "vuid": "VUID-vkFreeDescriptorSets-descriptorPool-00312",
+          "text": " <code>descriptorPool</code> <strong class=\"purple\">must</strong> have been created with the <code>VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT</code> flag"
+        },
+        {
+          "vuid": "VUID-vkFreeDescriptorSets-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkFreeDescriptorSets-descriptorPool-parameter",
+          "text": " <code>descriptorPool</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDescriptorPool\">VkDescriptorPool</a> handle"
+        },
+        {
+          "vuid": "VUID-vkFreeDescriptorSets-descriptorSetCount-arraylength",
+          "text": " <code>descriptorSetCount</code> <strong class=\"purple\">must</strong> be greater than <code>0</code>"
+        },
+        {
+          "vuid": "VUID-vkFreeDescriptorSets-descriptorPool-parent",
+          "text": " <code>descriptorPool</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from <code>device</code>"
+        },
+        {
+          "vuid": "VUID-vkFreeDescriptorSets-pDescriptorSets-parent",
+          "text": " Each element of <code>pDescriptorSets</code> that is a valid handle <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from <code>descriptorPool</code>"
+        }
+      ]
+    },
+    "vkResetDescriptorPool": {
+      "core": [
+        {
+          "vuid": "VUID-vkResetDescriptorPool-descriptorPool-00313",
+          "text": " All uses of <code>descriptorPool</code> (via any allocated descriptor sets) <strong class=\"purple\">must</strong> have completed execution"
+        },
+        {
+          "vuid": "VUID-vkResetDescriptorPool-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkResetDescriptorPool-descriptorPool-parameter",
+          "text": " <code>descriptorPool</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDescriptorPool\">VkDescriptorPool</a> handle"
+        },
+        {
+          "vuid": "VUID-vkResetDescriptorPool-flags-zerobitmask",
+          "text": " <code>flags</code> <strong class=\"purple\">must</strong> be <code>0</code>"
+        },
+        {
+          "vuid": "VUID-vkResetDescriptorPool-descriptorPool-parent",
+          "text": " <code>descriptorPool</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from <code>device</code>"
+        }
+      ]
+    },
+    "vkUpdateDescriptorSets": {
+      "!(VK_EXT_descriptor_indexing)": [
+        {
+          "vuid": "VUID-vkUpdateDescriptorSets-dstSet-00314",
+          "text": " The <code>dstSet</code> member of each element of <code>pDescriptorWrites</code> or <code>pDescriptorCopies</code> <strong class=\"purple\">must</strong> not be used by any command that was recorded to a command buffer which is in the <a href=\"#commandbuffers-lifecycle\">pending state</a>."
+        }
+      ],
+      "(VK_EXT_descriptor_indexing)": [
+        {
+          "vuid": "VUID-vkUpdateDescriptorSets-None-03047",
+          "text": " Descriptor bindings updated by this command which were created without the <code>VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT</code> or <code>VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT_EXT</code> bits set <strong class=\"purple\">must</strong> not be used by any command that was recorded to a command buffer which is in the <a href=\"#commandbuffers-lifecycle\">pending state</a>."
+        }
+      ],
+      "core": [
+        {
+          "vuid": "VUID-vkUpdateDescriptorSets-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkUpdateDescriptorSets-pDescriptorWrites-parameter",
+          "text": " If <code>descriptorWriteCount</code> is not <code>0</code>, <code>pDescriptorWrites</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>descriptorWriteCount</code> valid <a href=\"#VkWriteDescriptorSet\">VkWriteDescriptorSet</a> structures"
+        },
+        {
+          "vuid": "VUID-vkUpdateDescriptorSets-pDescriptorCopies-parameter",
+          "text": " If <code>descriptorCopyCount</code> is not <code>0</code>, <code>pDescriptorCopies</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>descriptorCopyCount</code> valid <a href=\"#VkCopyDescriptorSet\">VkCopyDescriptorSet</a> structures"
+        }
+      ]
+    },
+    "VkWriteDescriptorSet": {
+      "core": [
+        {
+          "vuid": "VUID-VkWriteDescriptorSet-dstBinding-00315",
+          "text": " <code>dstBinding</code> <strong class=\"purple\">must</strong> be less than or equal to the maximum value of <code>binding</code> of all <a href=\"#VkDescriptorSetLayoutBinding\">VkDescriptorSetLayoutBinding</a> structures specified when <code>dstSet</code>&#8217;s descriptor set layout was created"
+        },
+        {
+          "vuid": "VUID-VkWriteDescriptorSet-dstBinding-00316",
+          "text": " <code>dstBinding</code> <strong class=\"purple\">must</strong> be a binding with a non-zero <code>descriptorCount</code>"
+        },
+        {
+          "vuid": "VUID-VkWriteDescriptorSet-descriptorCount-00317",
+          "text": " All consecutive bindings updated via a single <code>VkWriteDescriptorSet</code> structure, except those with a <code>descriptorCount</code> of zero, <strong class=\"purple\">must</strong> have identical <code>descriptorType</code> and <code>stageFlags</code>."
+        },
+        {
+          "vuid": "VUID-VkWriteDescriptorSet-descriptorCount-00318",
+          "text": " All consecutive bindings updated via a single <code>VkWriteDescriptorSet</code> structure, except those with a <code>descriptorCount</code> of zero, <strong class=\"purple\">must</strong> all either use immutable samplers or <strong class=\"purple\">must</strong> all not use immutable samplers."
+        },
+        {
+          "vuid": "VUID-VkWriteDescriptorSet-descriptorType-00319",
+          "text": " <code>descriptorType</code> <strong class=\"purple\">must</strong> match the type of <code>dstBinding</code> within <code>dstSet</code>"
+        },
+        {
+          "vuid": "VUID-VkWriteDescriptorSet-dstSet-00320",
+          "text": " <code>dstSet</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDescriptorSet\">VkDescriptorSet</a> handle"
+        },
+        {
+          "vuid": "VUID-VkWriteDescriptorSet-dstArrayElement-00321",
+          "text": " The sum of <code>dstArrayElement</code> and <code>descriptorCount</code> <strong class=\"purple\">must</strong> be less than or equal to the number of array elements in the descriptor set binding specified by <code>dstBinding</code>, and all applicable consecutive bindings, as described by <a href=\"#descriptorsets-updates-consecutive\">consecutive binding updates</a>"
+        },
+        {
+          "vuid": "VUID-VkWriteDescriptorSet-descriptorType-00322",
+          "text": " If <code>descriptorType</code> is <code>VK_DESCRIPTOR_TYPE_SAMPLER</code>, <code>VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER</code>, <code>VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE</code>, <code>VK_DESCRIPTOR_TYPE_STORAGE_IMAGE</code>, or <code>VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT</code>, <code>pImageInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>descriptorCount</code> valid <code>VkDescriptorImageInfo</code> structures"
+        },
+        {
+          "vuid": "VUID-VkWriteDescriptorSet-descriptorType-00323",
+          "text": " If <code>descriptorType</code> is <code>VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER</code> or <code>VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER</code>, <code>pTexelBufferView</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>descriptorCount</code> valid <code>VkBufferView</code> handles"
+        },
+        {
+          "vuid": "VUID-VkWriteDescriptorSet-descriptorType-00324",
+          "text": " If <code>descriptorType</code> is <code>VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER</code>, <code>VK_DESCRIPTOR_TYPE_STORAGE_BUFFER</code>, <code>VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC</code>, or <code>VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC</code>, <code>pBufferInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>descriptorCount</code> valid <code>VkDescriptorBufferInfo</code> structures"
+        },
+        {
+          "vuid": "VUID-VkWriteDescriptorSet-descriptorType-00325",
+          "text": " If <code>descriptorType</code> is <code>VK_DESCRIPTOR_TYPE_SAMPLER</code> or <code>VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER</code>, and <code>dstSet</code> was not allocated with a layout that included immutable samplers for <code>dstBinding</code> with <code>descriptorType</code>, the <code>sampler</code> member of each element of <code>pImageInfo</code> <strong class=\"purple\">must</strong> be a valid <code>VkSampler</code> object"
+        },
+        {
+          "vuid": "VUID-VkWriteDescriptorSet-descriptorType-00326",
+          "text": " If <code>descriptorType</code> is <code>VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER</code>, <code>VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE</code>, <code>VK_DESCRIPTOR_TYPE_STORAGE_IMAGE</code>, or <code>VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT</code>, the <code>imageView</code> and <code>imageLayout</code> members of each element of <code>pImageInfo</code> <strong class=\"purple\">must</strong> be a valid <code>VkImageView</code> and <a href=\"#VkImageLayout\">VkImageLayout</a>, respectively"
+        },
+        {
+          "vuid": "VUID-VkWriteDescriptorSet-descriptorType-01946",
+          "text": " If <code>descriptorType</code> is <code>VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE</code>, then the <code>imageView</code> member of each <code>pImageInfo</code> element <strong class=\"purple\">must</strong> have been created without a <code>VkSamplerYcbcrConversionInfo</code> structure in its <code>pNext</code> chain"
+        },
+        {
+          "vuid": "VUID-VkWriteDescriptorSet-descriptorType-02738",
+          "text": " If <code>descriptorType</code> is <code>VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER</code>, and if any element of <code>pImageInfo</code> has a <code>imageView</code> member that was created with a <code>VkSamplerYcbcrConversionInfo</code> structure in its <code>pNext</code> chain, then <code>dstSet</code> <strong class=\"purple\">must</strong> have been allocated with a layout that included immutable samplers for <code>dstBinding</code>, and the corresponding immutable sampler <strong class=\"purple\">must</strong> have been created with an <em>identically defined</em> <code>VkSamplerYcbcrConversionInfo</code> object"
+        },
+        {
+          "vuid": "VUID-VkWriteDescriptorSet-descriptorType-01948",
+          "text": " If <code>descriptorType</code> is <code>VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER</code>, and <code>dstSet</code> was allocated with a layout that included immutable samplers for <code>dstBinding</code>, then the <code>imageView</code> member of each element of <code>pImageInfo</code> which corresponds to an immutable sampler that enables <a href=\"#samplers-YCbCr-conversion\">sampler Y&#8217;C<sub>B</sub>C<sub>R</sub> conversion</a> <strong class=\"purple\">must</strong> have been created with a <code>VkSamplerYcbcrConversionInfo</code> structure in its <code>pNext</code> chain with an <em>identically defined</em> <code>VkSamplerYcbcrConversionInfo</code> to the corresponding immutable sampler"
+        },
+        {
+          "vuid": "VUID-VkWriteDescriptorSet-descriptorType-01402",
+          "text": " If <code>descriptorType</code> is <code>VK_DESCRIPTOR_TYPE_STORAGE_IMAGE</code>, for each descriptor that will be accessed via load or store operations the <code>imageLayout</code> member for corresponding elements of <code>pImageInfo</code> <strong class=\"purple\">must</strong> be <code>VK_IMAGE_LAYOUT_GENERAL</code>"
+        },
+        {
+          "vuid": "VUID-VkWriteDescriptorSet-descriptorType-00327",
+          "text": " If <code>descriptorType</code> is <code>VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER</code> or <code>VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC</code>, the <code>offset</code> member of each element of <code>pBufferInfo</code> <strong class=\"purple\">must</strong> be a multiple of <code>VkPhysicalDeviceLimits</code>::<code>minUniformBufferOffsetAlignment</code>"
+        },
+        {
+          "vuid": "VUID-VkWriteDescriptorSet-descriptorType-00328",
+          "text": " If <code>descriptorType</code> is <code>VK_DESCRIPTOR_TYPE_STORAGE_BUFFER</code> or <code>VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC</code>, the <code>offset</code> member of each element of <code>pBufferInfo</code> <strong class=\"purple\">must</strong> be a multiple of <code>VkPhysicalDeviceLimits</code>::<code>minStorageBufferOffsetAlignment</code>"
+        },
+        {
+          "vuid": "VUID-VkWriteDescriptorSet-descriptorType-00329",
+          "text": " If <code>descriptorType</code> is <code>VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER</code>, <code>VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC</code>, <code>VK_DESCRIPTOR_TYPE_STORAGE_BUFFER</code>, or <code>VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC</code>, and the <code>buffer</code> member of any element of <code>pBufferInfo</code> is the handle of a non-sparse buffer, then that buffer <strong class=\"purple\">must</strong> be bound completely and contiguously to a single <code>VkDeviceMemory</code> object"
+        },
+        {
+          "vuid": "VUID-VkWriteDescriptorSet-descriptorType-00330",
+          "text": " If <code>descriptorType</code> is <code>VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER</code> or <code>VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC</code>, the <code>buffer</code> member of each element of <code>pBufferInfo</code> <strong class=\"purple\">must</strong> have been created with <code>VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT</code> set"
+        },
+        {
+          "vuid": "VUID-VkWriteDescriptorSet-descriptorType-00331",
+          "text": " If <code>descriptorType</code> is <code>VK_DESCRIPTOR_TYPE_STORAGE_BUFFER</code> or <code>VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC</code>, the <code>buffer</code> member of each element of <code>pBufferInfo</code> <strong class=\"purple\">must</strong> have been created with <code>VK_BUFFER_USAGE_STORAGE_BUFFER_BIT</code> set"
+        },
+        {
+          "vuid": "VUID-VkWriteDescriptorSet-descriptorType-00332",
+          "text": " If <code>descriptorType</code> is <code>VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER</code> or <code>VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC</code>, the <code>range</code> member of each element of <code>pBufferInfo</code>, or the effective range if <code>range</code> is <code>VK_WHOLE_SIZE</code>, <strong class=\"purple\">must</strong> be less than or equal to <code>VkPhysicalDeviceLimits</code>::<code>maxUniformBufferRange</code>"
+        },
+        {
+          "vuid": "VUID-VkWriteDescriptorSet-descriptorType-00333",
+          "text": " If <code>descriptorType</code> is <code>VK_DESCRIPTOR_TYPE_STORAGE_BUFFER</code> or <code>VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC</code>, the <code>range</code> member of each element of <code>pBufferInfo</code>, or the effective range if <code>range</code> is <code>VK_WHOLE_SIZE</code>, <strong class=\"purple\">must</strong> be less than or equal to <code>VkPhysicalDeviceLimits</code>::<code>maxStorageBufferRange</code>"
+        },
+        {
+          "vuid": "VUID-VkWriteDescriptorSet-descriptorType-00334",
+          "text": " If <code>descriptorType</code> is <code>VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER</code>, the <code>VkBuffer</code> that each element of <code>pTexelBufferView</code> was created from <strong class=\"purple\">must</strong> have been created with <code>VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT</code> set"
+        },
+        {
+          "vuid": "VUID-VkWriteDescriptorSet-descriptorType-00335",
+          "text": " If <code>descriptorType</code> is <code>VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER</code>, the <code>VkBuffer</code> that each element of <code>pTexelBufferView</code> was created from <strong class=\"purple\">must</strong> have been created with <code>VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT</code> set"
+        },
+        {
+          "vuid": "VUID-VkWriteDescriptorSet-descriptorType-00336",
+          "text": " If <code>descriptorType</code> is <code>VK_DESCRIPTOR_TYPE_STORAGE_IMAGE</code> or <code>VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT</code>, the <code>imageView</code> member of each element of <code>pImageInfo</code> <strong class=\"purple\">must</strong> have been created with the identity swizzle"
+        },
+        {
+          "vuid": "VUID-VkWriteDescriptorSet-descriptorType-00337",
+          "text": " If <code>descriptorType</code> is <code>VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE</code> or <code>VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER</code>, the <code>imageView</code> member of each element of <code>pImageInfo</code> <strong class=\"purple\">must</strong> have been created with <code>VK_IMAGE_USAGE_SAMPLED_BIT</code> set"
+        },
+        {
+          "vuid": "VUID-VkWriteDescriptorSet-descriptorType-01403",
+          "text": " If <code>descriptorType</code> is <code>VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE</code> or <code>VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER</code>, the <code>imageLayout</code> member of each element of <code>pImageInfo</code> <strong class=\"purple\">must</strong> be a member of the list given in <a href=\"#descriptorsets-sampledimage\">Sampled Image</a> or <a href=\"#descriptorsets-combinedimagesampler\">Combined Image Sampler</a>, corresponding to its type"
+        },
+        {
+          "vuid": "VUID-VkWriteDescriptorSet-descriptorType-00338",
+          "text": " If <code>descriptorType</code> is <code>VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT</code>, the <code>imageView</code> member of each element of <code>pImageInfo</code> <strong class=\"purple\">must</strong> have been created with <code>VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT</code> set"
+        },
+        {
+          "vuid": "VUID-VkWriteDescriptorSet-descriptorType-00339",
+          "text": " If <code>descriptorType</code> is <code>VK_DESCRIPTOR_TYPE_STORAGE_IMAGE</code>, the <code>imageView</code> member of each element of <code>pImageInfo</code> <strong class=\"purple\">must</strong> have been created with <code>VK_IMAGE_USAGE_STORAGE_BIT</code> set"
+        },
+        {
+          "vuid": "VUID-VkWriteDescriptorSet-descriptorType-02752",
+          "text": " If <code>descriptorType</code> is <code>VK_DESCRIPTOR_TYPE_SAMPLER</code>, then <code>dstSet</code> <strong class=\"purple\">must</strong> not have been allocated with a layout that included immutable samplers for <code>dstBinding</code>"
+        },
+        {
+          "vuid": "VUID-VkWriteDescriptorSet-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET</code>"
+        },
+        {
+          "vuid": "VUID-VkWriteDescriptorSet-pNext-pNext",
+          "text": " Each <code>pNext</code> member of any structure (including this one) in the <code>pNext</code> chain <strong class=\"purple\">must</strong> be either <code>NULL</code> or a pointer to a valid instance of <a href=\"#VkWriteDescriptorSetAccelerationStructureNV\">VkWriteDescriptorSetAccelerationStructureNV</a> or <a href=\"#VkWriteDescriptorSetInlineUniformBlockEXT\">VkWriteDescriptorSetInlineUniformBlockEXT</a>"
+        },
+        {
+          "vuid": "VUID-VkWriteDescriptorSet-sType-unique",
+          "text": " Each <code>sType</code> member in the <code>pNext</code> chain <strong class=\"purple\">must</strong> be unique"
+        },
+        {
+          "vuid": "VUID-VkWriteDescriptorSet-descriptorType-parameter",
+          "text": " <code>descriptorType</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDescriptorType\">VkDescriptorType</a> value"
+        },
+        {
+          "vuid": "VUID-VkWriteDescriptorSet-descriptorCount-arraylength",
+          "text": " <code>descriptorCount</code> <strong class=\"purple\">must</strong> be greater than <code>0</code>"
+        },
+        {
+          "vuid": "VUID-VkWriteDescriptorSet-commonparent",
+          "text": " Both of <code>dstSet</code>, and the elements of <code>pTexelBufferView</code> that are valid handles of non-ignored parameters <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from the same <a href=\"#VkDevice\">VkDevice</a>"
+        }
+      ],
+      "(VK_EXT_inline_uniform_block)": [
+        {
+          "vuid": "VUID-VkWriteDescriptorSet-descriptorType-02219",
+          "text": " If <code>descriptorType</code> is <code>VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT</code>, <code>dstArrayElement</code> <strong class=\"purple\">must</strong> be an integer multiple of <code>4</code>"
+        },
+        {
+          "vuid": "VUID-VkWriteDescriptorSet-descriptorType-02220",
+          "text": " If <code>descriptorType</code> is <code>VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT</code>, <code>descriptorCount</code> <strong class=\"purple\">must</strong> be an integer multiple of <code>4</code>"
+        },
+        {
+          "vuid": "VUID-VkWriteDescriptorSet-descriptorType-02221",
+          "text": " If <code>descriptorType</code> is <code>VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT</code>, the <code>pNext</code> chain <strong class=\"purple\">must</strong> include a <a href=\"#VkWriteDescriptorSetInlineUniformBlockEXT\">VkWriteDescriptorSetInlineUniformBlockEXT</a> structure whose <code>dataSize</code> member equals <code>descriptorCount</code>"
+        }
+      ],
+      "(VK_NV_ray_tracing)": [
+        {
+          "vuid": "VUID-VkWriteDescriptorSet-descriptorType-02382",
+          "text": " If <code>descriptorType</code> is <code>VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV</code>, the <code>pNext</code> chain <strong class=\"purple\">must</strong> include a <a href=\"#VkWriteDescriptorSetAccelerationStructureNV\">VkWriteDescriptorSetAccelerationStructureNV</a> structure whose <code>accelerationStructureCount</code> member equals <code>descriptorCount</code>"
+        }
+      ],
+      "(VK_EXT_descriptor_indexing)": [
+        {
+          "vuid": "VUID-VkWriteDescriptorSet-descriptorCount-03048",
+          "text": " All consecutive bindings updated via a single <code>VkWriteDescriptorSet</code> structure, except those with a <code>descriptorCount</code> of zero, <strong class=\"purple\">must</strong> have identical <a href=\"#VkDescriptorBindingFlagBitsEXT\">VkDescriptorBindingFlagBitsEXT</a>."
+        }
+      ]
+    },
+    "VkDescriptorBufferInfo": {
+      "core": [
+        {
+          "vuid": "VUID-VkDescriptorBufferInfo-offset-00340",
+          "text": " <code>offset</code> <strong class=\"purple\">must</strong> be less than the size of <code>buffer</code>"
+        },
+        {
+          "vuid": "VUID-VkDescriptorBufferInfo-range-00341",
+          "text": " If <code>range</code> is not equal to <code>VK_WHOLE_SIZE</code>, <code>range</code> <strong class=\"purple\">must</strong> be greater than <code>0</code>"
+        },
+        {
+          "vuid": "VUID-VkDescriptorBufferInfo-range-00342",
+          "text": " If <code>range</code> is not equal to <code>VK_WHOLE_SIZE</code>, <code>range</code> <strong class=\"purple\">must</strong> be less than or equal to the size of <code>buffer</code> minus <code>offset</code>"
+        },
+        {
+          "vuid": "VUID-VkDescriptorBufferInfo-buffer-parameter",
+          "text": " <code>buffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkBuffer\">VkBuffer</a> handle"
+        }
+      ]
+    },
+    "VkDescriptorImageInfo": {
+      "(VK_VERSION_1_1,VK_KHR_maintenance1)": [
+        {
+          "vuid": "VUID-VkDescriptorImageInfo-imageView-00343",
+          "text": " <code>imageView</code> <strong class=\"purple\">must</strong> not be 2D or 2D array image view created from a 3D image"
+        }
+      ],
+      "core": [
+        {
+          "vuid": "VUID-VkDescriptorImageInfo-imageView-01976",
+          "text": " If <code>imageView</code> is created from a depth/stencil image, the <code>aspectMask</code> used to create the <code>imageView</code> <strong class=\"purple\">must</strong> include either <code>VK_IMAGE_ASPECT_DEPTH_BIT</code> or <code>VK_IMAGE_ASPECT_STENCIL_BIT</code> but not both."
+        },
+        {
+          "vuid": "VUID-VkDescriptorImageInfo-imageLayout-00344",
+          "text": " <code>imageLayout</code> <strong class=\"purple\">must</strong> match the actual <a href=\"#VkImageLayout\">VkImageLayout</a> of each subresource accessible from <code>imageView</code> at the time this descriptor is accessed as defined by the <a href=\"#resources-image-layouts-matching-rule\">image layout matching rules</a>"
+        },
+        {
+          "vuid": "VUID-VkDescriptorImageInfo-commonparent",
+          "text": " Both of <code>imageView</code>, and <code>sampler</code> that are valid handles of non-ignored parameters <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from the same <a href=\"#VkDevice\">VkDevice</a>"
+        }
+      ],
+      "(VK_VERSION_1_1,VK_KHR_sampler_ycbcr_conversion)": [
+        {
+          "vuid": "VUID-VkDescriptorImageInfo-sampler-01564",
+          "text": " If <code>sampler</code> is used and the <a href=\"#VkFormat\">VkFormat</a> of the image is a <a href=\"#formats-requiring-sampler-ycbcr-conversion\">multi-planar format</a>, the image <strong class=\"purple\">must</strong> have been created with <code>VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT</code>, and the <code>aspectMask</code> of the <code>imageView</code> <strong class=\"purple\">must</strong> be <code>VK_IMAGE_ASPECT_PLANE_0_BIT</code>, <code>VK_IMAGE_ASPECT_PLANE_1_BIT</code> or (for three-plane formats only) <code>VK_IMAGE_ASPECT_PLANE_2_BIT</code>"
+        }
+      ]
+    },
+    "VkWriteDescriptorSetInlineUniformBlockEXT": {
+      "(VK_EXT_inline_uniform_block)": [
+        {
+          "vuid": "VUID-VkWriteDescriptorSetInlineUniformBlockEXT-dataSize-02222",
+          "text": " <code>dataSize</code> <strong class=\"purple\">must</strong> be an integer multiple of <code>4</code>"
+        },
+        {
+          "vuid": "VUID-VkWriteDescriptorSetInlineUniformBlockEXT-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK_EXT</code>"
+        },
+        {
+          "vuid": "VUID-VkWriteDescriptorSetInlineUniformBlockEXT-pData-parameter",
+          "text": " <code>pData</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>dataSize</code> bytes"
+        },
+        {
+          "vuid": "VUID-VkWriteDescriptorSetInlineUniformBlockEXT-dataSize-arraylength",
+          "text": " <code>dataSize</code> <strong class=\"purple\">must</strong> be greater than <code>0</code>"
+        }
+      ]
+    },
+    "VkWriteDescriptorSetAccelerationStructureNV": {
+      "(VK_NV_ray_tracing)": [
+        {
+          "vuid": "VUID-VkWriteDescriptorSetAccelerationStructureNV-accelerationStructureCount-02236",
+          "text": " <code>accelerationStructureCount</code> <strong class=\"purple\">must</strong> be equal to <code>descriptorCount</code> in the extended structure"
+        },
+        {
+          "vuid": "VUID-VkWriteDescriptorSetAccelerationStructureNV-pAccelerationStructures-02764",
+          "text": " Each acceleration structure in <code>pAccelerationStructures</code> must have been created with <code>VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_NV</code>"
+        },
+        {
+          "vuid": "VUID-VkWriteDescriptorSetAccelerationStructureNV-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_NV</code>"
+        },
+        {
+          "vuid": "VUID-VkWriteDescriptorSetAccelerationStructureNV-pAccelerationStructures-parameter",
+          "text": " <code>pAccelerationStructures</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>accelerationStructureCount</code> valid <a href=\"#VkAccelerationStructureNV\">VkAccelerationStructureNV</a> handles"
+        },
+        {
+          "vuid": "VUID-VkWriteDescriptorSetAccelerationStructureNV-accelerationStructureCount-arraylength",
+          "text": " <code>accelerationStructureCount</code> <strong class=\"purple\">must</strong> be greater than <code>0</code>"
+        }
+      ]
+    },
+    "VkCopyDescriptorSet": {
+      "core": [
+        {
+          "vuid": "VUID-VkCopyDescriptorSet-srcBinding-00345",
+          "text": " <code>srcBinding</code> <strong class=\"purple\">must</strong> be a valid binding within <code>srcSet</code>"
+        },
+        {
+          "vuid": "VUID-VkCopyDescriptorSet-srcArrayElement-00346",
+          "text": " The sum of <code>srcArrayElement</code> and <code>descriptorCount</code> <strong class=\"purple\">must</strong> be less than or equal to the number of array elements in the descriptor set binding specified by <code>srcBinding</code>, and all applicable consecutive bindings, as described by <a href=\"#descriptorsets-updates-consecutive\">consecutive binding updates</a>"
+        },
+        {
+          "vuid": "VUID-VkCopyDescriptorSet-dstBinding-00347",
+          "text": " <code>dstBinding</code> <strong class=\"purple\">must</strong> be a valid binding within <code>dstSet</code>"
+        },
+        {
+          "vuid": "VUID-VkCopyDescriptorSet-dstArrayElement-00348",
+          "text": " The sum of <code>dstArrayElement</code> and <code>descriptorCount</code> <strong class=\"purple\">must</strong> be less than or equal to the number of array elements in the descriptor set binding specified by <code>dstBinding</code>, and all applicable consecutive bindings, as described by <a href=\"#descriptorsets-updates-consecutive\">consecutive binding updates</a>"
+        },
+        {
+          "vuid": "VUID-VkCopyDescriptorSet-dstBinding-02632",
+          "text": " The type of <code>dstBinding</code> within <code>dstSet</code> <strong class=\"purple\">must</strong> be equal to the type of <code>srcBinding</code> within <code>srcSet</code>"
+        },
+        {
+          "vuid": "VUID-VkCopyDescriptorSet-srcSet-00349",
+          "text": " If <code>srcSet</code> is equal to <code>dstSet</code>, then the source and destination ranges of descriptors <strong class=\"purple\">must</strong> not overlap, where the ranges <strong class=\"purple\">may</strong> include array elements from consecutive bindings as described by <a href=\"#descriptorsets-updates-consecutive\">consecutive binding updates</a>"
+        },
+        {
+          "vuid": "VUID-VkCopyDescriptorSet-dstBinding-02753",
+          "text": " If the descriptor type of the descriptor set binding specified by <code>dstBinding</code> is <code>VK_DESCRIPTOR_TYPE_SAMPLER</code>, then <code>dstSet</code> <strong class=\"purple\">must</strong> not have been allocated with a layout that included immutable samplers for <code>dstBinding</code>"
+        },
+        {
+          "vuid": "VUID-VkCopyDescriptorSet-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET</code>"
+        },
+        {
+          "vuid": "VUID-VkCopyDescriptorSet-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-VkCopyDescriptorSet-srcSet-parameter",
+          "text": " <code>srcSet</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDescriptorSet\">VkDescriptorSet</a> handle"
+        },
+        {
+          "vuid": "VUID-VkCopyDescriptorSet-dstSet-parameter",
+          "text": " <code>dstSet</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDescriptorSet\">VkDescriptorSet</a> handle"
+        },
+        {
+          "vuid": "VUID-VkCopyDescriptorSet-commonparent",
+          "text": " Both of <code>dstSet</code>, and <code>srcSet</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from the same <a href=\"#VkDevice\">VkDevice</a>"
+        }
+      ],
+      "(VK_EXT_inline_uniform_block)": [
+        {
+          "vuid": "VUID-VkCopyDescriptorSet-srcBinding-02223",
+          "text": " If the descriptor type of the descriptor set binding specified by <code>srcBinding</code> is <code>VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT</code>, <code>srcArrayElement</code> <strong class=\"purple\">must</strong> be an integer multiple of <code>4</code>"
+        },
+        {
+          "vuid": "VUID-VkCopyDescriptorSet-dstBinding-02224",
+          "text": " If the descriptor type of the descriptor set binding specified by <code>dstBinding</code> is <code>VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT</code>, <code>dstArrayElement</code> <strong class=\"purple\">must</strong> be an integer multiple of <code>4</code>"
+        },
+        {
+          "vuid": "VUID-VkCopyDescriptorSet-srcBinding-02225",
+          "text": " If the descriptor type of the descriptor set binding specified by either <code>srcBinding</code> or <code>dstBinding</code> is <code>VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT</code>, <code>descriptorCount</code> <strong class=\"purple\">must</strong> be an integer multiple of <code>4</code>"
+        }
+      ],
+      "(VK_EXT_descriptor_indexing)": [
+        {
+          "vuid": "VUID-VkCopyDescriptorSet-srcSet-01918",
+          "text": " If <code>srcSet</code>&#8217;s layout was created with the <code>VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT</code> flag set, then <code>dstSet</code>&#8217;s layout <strong class=\"purple\">must</strong> also have been created with the <code>VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT</code> flag set"
+        },
+        {
+          "vuid": "VUID-VkCopyDescriptorSet-srcSet-01919",
+          "text": " If <code>srcSet</code>&#8217;s layout was created without the <code>VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT</code> flag set, then <code>dstSet</code>&#8217;s layout <strong class=\"purple\">must</strong> also have been created without the <code>VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT</code> flag set"
+        },
+        {
+          "vuid": "VUID-VkCopyDescriptorSet-srcSet-01920",
+          "text": " If the descriptor pool from which <code>srcSet</code> was allocated was created with the <code>VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT_EXT</code> flag set, then the descriptor pool from which <code>dstSet</code> was allocated <strong class=\"purple\">must</strong> also have been created with the <code>VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT_EXT</code> flag set"
+        },
+        {
+          "vuid": "VUID-VkCopyDescriptorSet-srcSet-01921",
+          "text": " If the descriptor pool from which <code>srcSet</code> was allocated was created without the <code>VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT_EXT</code> flag set, then the descriptor pool from which <code>dstSet</code> was allocated <strong class=\"purple\">must</strong> also have been created without the <code>VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT_EXT</code> flag set"
+        }
+      ]
+    },
+    "vkCreateDescriptorUpdateTemplate": {
+      "(VK_VERSION_1_1,VK_KHR_descriptor_update_template)": [
+        {
+          "vuid": "VUID-vkCreateDescriptorUpdateTemplate-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCreateDescriptorUpdateTemplate-pCreateInfo-parameter",
+          "text": " <code>pCreateInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkDescriptorUpdateTemplateCreateInfo\">VkDescriptorUpdateTemplateCreateInfo</a> structure"
+        },
+        {
+          "vuid": "VUID-vkCreateDescriptorUpdateTemplate-pAllocator-parameter",
+          "text": " If <code>pAllocator</code> is not <code>NULL</code>, <code>pAllocator</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkAllocationCallbacks\">VkAllocationCallbacks</a> structure"
+        },
+        {
+          "vuid": "VUID-vkCreateDescriptorUpdateTemplate-pDescriptorUpdateTemplate-parameter",
+          "text": " <code>pDescriptorUpdateTemplate</code> <strong class=\"purple\">must</strong> be a valid pointer to a <a href=\"#VkDescriptorUpdateTemplate\">VkDescriptorUpdateTemplate</a> handle"
+        }
+      ]
+    },
+    "VkDescriptorUpdateTemplateCreateInfo": {
+      "(VK_VERSION_1_1,VK_KHR_descriptor_update_template)": [
+        {
+          "vuid": "VUID-VkDescriptorUpdateTemplateCreateInfo-templateType-00350",
+          "text": " If <code>templateType</code> is <code>VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET</code>, <code>descriptorSetLayout</code> <strong class=\"purple\">must</strong> be a valid <code>VkDescriptorSetLayout</code> handle"
+        },
+        {
+          "vuid": "VUID-VkDescriptorUpdateTemplateCreateInfo-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO</code>"
+        },
+        {
+          "vuid": "VUID-VkDescriptorUpdateTemplateCreateInfo-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-VkDescriptorUpdateTemplateCreateInfo-flags-zerobitmask",
+          "text": " <code>flags</code> <strong class=\"purple\">must</strong> be <code>0</code>"
+        },
+        {
+          "vuid": "VUID-VkDescriptorUpdateTemplateCreateInfo-pDescriptorUpdateEntries-parameter",
+          "text": " <code>pDescriptorUpdateEntries</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>descriptorUpdateEntryCount</code> valid <a href=\"#VkDescriptorUpdateTemplateEntry\">VkDescriptorUpdateTemplateEntry</a> structures"
+        },
+        {
+          "vuid": "VUID-VkDescriptorUpdateTemplateCreateInfo-templateType-parameter",
+          "text": " <code>templateType</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDescriptorUpdateTemplateType\">VkDescriptorUpdateTemplateType</a> value"
+        },
+        {
+          "vuid": "VUID-VkDescriptorUpdateTemplateCreateInfo-descriptorUpdateEntryCount-arraylength",
+          "text": " <code>descriptorUpdateEntryCount</code> <strong class=\"purple\">must</strong> be greater than <code>0</code>"
+        },
+        {
+          "vuid": "VUID-VkDescriptorUpdateTemplateCreateInfo-commonparent",
+          "text": " Both of <code>descriptorSetLayout</code>, and <code>pipelineLayout</code> that are valid handles of non-ignored parameters <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from the same <a href=\"#VkDevice\">VkDevice</a>"
+        }
+      ],
+      "(VK_VERSION_1_1,VK_KHR_descriptor_update_template)+(VK_KHR_push_descriptor)": [
+        {
+          "vuid": "VUID-VkDescriptorUpdateTemplateCreateInfo-templateType-00351",
+          "text": " If <code>templateType</code> is <code>VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR</code>, <code>pipelineBindPoint</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkPipelineBindPoint\">VkPipelineBindPoint</a> value"
+        },
+        {
+          "vuid": "VUID-VkDescriptorUpdateTemplateCreateInfo-templateType-00352",
+          "text": " If <code>templateType</code> is <code>VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR</code>, <code>pipelineLayout</code> <strong class=\"purple\">must</strong> be a valid <code>VkPipelineLayout</code> handle"
+        },
+        {
+          "vuid": "VUID-VkDescriptorUpdateTemplateCreateInfo-templateType-00353",
+          "text": " If <code>templateType</code> is <code>VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR</code>, <code>set</code> <strong class=\"purple\">must</strong> be the unique set number in the pipeline layout that uses a descriptor set layout that was created with <code>VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR</code>"
+        }
+      ]
+    },
+    "VkDescriptorUpdateTemplateEntry": {
+      "(VK_VERSION_1_1,VK_KHR_descriptor_update_template)": [
+        {
+          "vuid": "VUID-VkDescriptorUpdateTemplateEntry-dstBinding-00354",
+          "text": " <code>dstBinding</code> <strong class=\"purple\">must</strong> be a valid binding in the descriptor set layout implicitly specified when using a descriptor update template to update descriptors."
+        },
+        {
+          "vuid": "VUID-VkDescriptorUpdateTemplateEntry-dstArrayElement-00355",
+          "text": " <code>dstArrayElement</code> and <code>descriptorCount</code> <strong class=\"purple\">must</strong> be less than or equal to the number of array elements in the descriptor set binding implicitly specified when using a descriptor update template to update descriptors, and all applicable consecutive bindings, as described by <a href=\"#descriptorsets-updates-consecutive\">consecutive binding updates</a>"
+        },
+        {
+          "vuid": "VUID-VkDescriptorUpdateTemplateEntry-descriptorType-parameter",
+          "text": " <code>descriptorType</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDescriptorType\">VkDescriptorType</a> value"
+        }
+      ],
+      "(VK_VERSION_1_1,VK_KHR_descriptor_update_template)+(VK_EXT_inline_uniform_block)": [
+        {
+          "vuid": "VUID-VkDescriptorUpdateTemplateEntry-descriptor-02226",
+          "text": " If <code>descriptor</code> type is <code>VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT</code>, <code>dstArrayElement</code> <strong class=\"purple\">must</strong> be an integer multiple of <code>4</code>"
+        },
+        {
+          "vuid": "VUID-VkDescriptorUpdateTemplateEntry-descriptor-02227",
+          "text": " If <code>descriptor</code> type is <code>VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT</code>, <code>descriptorCount</code> <strong class=\"purple\">must</strong> be an integer multiple of <code>4</code>"
+        }
+      ]
+    },
+    "vkDestroyDescriptorUpdateTemplate": {
+      "(VK_VERSION_1_1,VK_KHR_descriptor_update_template)": [
+        {
+          "vuid": "VUID-vkDestroyDescriptorUpdateTemplate-descriptorSetLayout-00356",
+          "text": " If <code>VkAllocationCallbacks</code> were provided when <code>descriptorSetLayout</code> was created, a compatible set of callbacks <strong class=\"purple\">must</strong> be provided here"
+        },
+        {
+          "vuid": "VUID-vkDestroyDescriptorUpdateTemplate-descriptorSetLayout-00357",
+          "text": " If no <code>VkAllocationCallbacks</code> were provided when <code>descriptorSetLayout</code> was created, <code>pAllocator</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-vkDestroyDescriptorUpdateTemplate-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkDestroyDescriptorUpdateTemplate-descriptorUpdateTemplate-parameter",
+          "text": " If <code>descriptorUpdateTemplate</code> is not <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>, <code>descriptorUpdateTemplate</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDescriptorUpdateTemplate\">VkDescriptorUpdateTemplate</a> handle"
+        },
+        {
+          "vuid": "VUID-vkDestroyDescriptorUpdateTemplate-pAllocator-parameter",
+          "text": " If <code>pAllocator</code> is not <code>NULL</code>, <code>pAllocator</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkAllocationCallbacks\">VkAllocationCallbacks</a> structure"
+        },
+        {
+          "vuid": "VUID-vkDestroyDescriptorUpdateTemplate-descriptorUpdateTemplate-parent",
+          "text": " If <code>descriptorUpdateTemplate</code> is a valid handle, it <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from <code>device</code>"
+        }
+      ]
+    },
+    "vkUpdateDescriptorSetWithTemplate": {
+      "(VK_VERSION_1_1,VK_KHR_descriptor_update_template)": [
+        {
+          "vuid": "VUID-vkUpdateDescriptorSetWithTemplate-pData-01685",
+          "text": " <code>pData</code> <strong class=\"purple\">must</strong> be a valid pointer to a memory containing one or more valid instances of <a href=\"#VkDescriptorImageInfo\">VkDescriptorImageInfo</a>, <a href=\"#VkDescriptorBufferInfo\">VkDescriptorBufferInfo</a>, or <a href=\"#VkBufferView\">VkBufferView</a> in a layout defined by <code>descriptorUpdateTemplate</code> when it was created with <a href=\"#vkCreateDescriptorUpdateTemplate\">vkCreateDescriptorUpdateTemplate</a>"
+        },
+        {
+          "vuid": "VUID-vkUpdateDescriptorSetWithTemplate-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkUpdateDescriptorSetWithTemplate-descriptorSet-parameter",
+          "text": " <code>descriptorSet</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDescriptorSet\">VkDescriptorSet</a> handle"
+        },
+        {
+          "vuid": "VUID-vkUpdateDescriptorSetWithTemplate-descriptorUpdateTemplate-parameter",
+          "text": " <code>descriptorUpdateTemplate</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDescriptorUpdateTemplate\">VkDescriptorUpdateTemplate</a> handle"
+        },
+        {
+          "vuid": "VUID-vkUpdateDescriptorSetWithTemplate-descriptorUpdateTemplate-parent",
+          "text": " <code>descriptorUpdateTemplate</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from <code>device</code>"
+        }
+      ]
+    },
+    "vkCmdBindDescriptorSets": {
+      "core": [
+        {
+          "vuid": "VUID-vkCmdBindDescriptorSets-pDescriptorSets-00358",
+          "text": " Each element of <code>pDescriptorSets</code> <strong class=\"purple\">must</strong> have been allocated with a <code>VkDescriptorSetLayout</code> that matches (is the same as, or identically defined as) the <code>VkDescriptorSetLayout</code> at set <em>n</em> in <code>layout</code>, where <em>n</em> is the sum of <code>firstSet</code> and the index into <code>pDescriptorSets</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdBindDescriptorSets-dynamicOffsetCount-00359",
+          "text": " <code>dynamicOffsetCount</code> <strong class=\"purple\">must</strong> be equal to the total number of dynamic descriptors in <code>pDescriptorSets</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdBindDescriptorSets-firstSet-00360",
+          "text": " The sum of <code>firstSet</code> and <code>descriptorSetCount</code> <strong class=\"purple\">must</strong> be less than or equal to <code>VkPipelineLayoutCreateInfo</code>::<code>setLayoutCount</code> provided when <code>layout</code> was created"
+        },
+        {
+          "vuid": "VUID-vkCmdBindDescriptorSets-pipelineBindPoint-00361",
+          "text": " <code>pipelineBindPoint</code> <strong class=\"purple\">must</strong> be supported by the <code>commandBuffer</code>&#8217;s parent <code>VkCommandPool</code>&#8217;s queue family"
+        },
+        {
+          "vuid": "VUID-vkCmdBindDescriptorSets-pDynamicOffsets-01971",
+          "text": " Each element of <code>pDynamicOffsets</code> which corresponds to a descriptor binding with type <code>VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC</code> <strong class=\"purple\">must</strong> be a multiple of <code>VkPhysicalDeviceLimits</code>::<code>minUniformBufferOffsetAlignment</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdBindDescriptorSets-pDynamicOffsets-01972",
+          "text": " Each element of <code>pDynamicOffsets</code> which corresponds to a descriptor binding with type <code>VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC</code> <strong class=\"purple\">must</strong> be a multiple of <code>VkPhysicalDeviceLimits</code>::<code>minStorageBufferOffsetAlignment</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdBindDescriptorSets-pDescriptorSets-01979",
+          "text": " For each dynamic uniform or storage buffer binding in <code>pDescriptorSets</code>, the sum of the effective offset, as defined above, and the range of the binding <strong class=\"purple\">must</strong> be less than or equal to the size of the buffer"
+        },
+        {
+          "vuid": "VUID-vkCmdBindDescriptorSets-commandBuffer-parameter",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkCommandBuffer\">VkCommandBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdBindDescriptorSets-pipelineBindPoint-parameter",
+          "text": " <code>pipelineBindPoint</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkPipelineBindPoint\">VkPipelineBindPoint</a> value"
+        },
+        {
+          "vuid": "VUID-vkCmdBindDescriptorSets-layout-parameter",
+          "text": " <code>layout</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkPipelineLayout\">VkPipelineLayout</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdBindDescriptorSets-pDescriptorSets-parameter",
+          "text": " <code>pDescriptorSets</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>descriptorSetCount</code> valid <a href=\"#VkDescriptorSet\">VkDescriptorSet</a> handles"
+        },
+        {
+          "vuid": "VUID-vkCmdBindDescriptorSets-pDynamicOffsets-parameter",
+          "text": " If <code>dynamicOffsetCount</code> is not <code>0</code>, <code>pDynamicOffsets</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>dynamicOffsetCount</code> <code>uint32_t</code> values"
+        },
+        {
+          "vuid": "VUID-vkCmdBindDescriptorSets-commandBuffer-recording",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be in the <a href=\"#commandbuffers-lifecycle\">recording state</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdBindDescriptorSets-commandBuffer-cmdpool",
+          "text": " The <code>VkCommandPool</code> that <code>commandBuffer</code> was allocated from <strong class=\"purple\">must</strong> support graphics, or compute operations"
+        },
+        {
+          "vuid": "VUID-vkCmdBindDescriptorSets-descriptorSetCount-arraylength",
+          "text": " <code>descriptorSetCount</code> <strong class=\"purple\">must</strong> be greater than <code>0</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdBindDescriptorSets-commonparent",
+          "text": " Each of <code>commandBuffer</code>, <code>layout</code>, and the elements of <code>pDescriptorSets</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from the same <a href=\"#VkDevice\">VkDevice</a>"
+        }
+      ]
+    },
+    "vkCmdPushDescriptorSetKHR": {
+      "(VK_KHR_push_descriptor)": [
+        {
+          "vuid": "VUID-vkCmdPushDescriptorSetKHR-pipelineBindPoint-00363",
+          "text": " <code>pipelineBindPoint</code> <strong class=\"purple\">must</strong> be supported by the <code>commandBuffer</code>&#8217;s parent <code>VkCommandPool</code>&#8217;s queue family"
+        },
+        {
+          "vuid": "VUID-vkCmdPushDescriptorSetKHR-set-00364",
+          "text": " <code>set</code> <strong class=\"purple\">must</strong> be less than <code>VkPipelineLayoutCreateInfo</code>::<code>setLayoutCount</code> provided when <code>layout</code> was created"
+        },
+        {
+          "vuid": "VUID-vkCmdPushDescriptorSetKHR-set-00365",
+          "text": " <code>set</code> <strong class=\"purple\">must</strong> be the unique set number in the pipeline layout that uses a descriptor set layout that was created with <code>VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdPushDescriptorSetKHR-commandBuffer-parameter",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkCommandBuffer\">VkCommandBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdPushDescriptorSetKHR-pipelineBindPoint-parameter",
+          "text": " <code>pipelineBindPoint</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkPipelineBindPoint\">VkPipelineBindPoint</a> value"
+        },
+        {
+          "vuid": "VUID-vkCmdPushDescriptorSetKHR-layout-parameter",
+          "text": " <code>layout</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkPipelineLayout\">VkPipelineLayout</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdPushDescriptorSetKHR-pDescriptorWrites-parameter",
+          "text": " <code>pDescriptorWrites</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>descriptorWriteCount</code> valid <a href=\"#VkWriteDescriptorSet\">VkWriteDescriptorSet</a> structures"
+        },
+        {
+          "vuid": "VUID-vkCmdPushDescriptorSetKHR-commandBuffer-recording",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be in the <a href=\"#commandbuffers-lifecycle\">recording state</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdPushDescriptorSetKHR-commandBuffer-cmdpool",
+          "text": " The <code>VkCommandPool</code> that <code>commandBuffer</code> was allocated from <strong class=\"purple\">must</strong> support graphics, or compute operations"
+        },
+        {
+          "vuid": "VUID-vkCmdPushDescriptorSetKHR-descriptorWriteCount-arraylength",
+          "text": " <code>descriptorWriteCount</code> <strong class=\"purple\">must</strong> be greater than <code>0</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdPushDescriptorSetKHR-commonparent",
+          "text": " Both of <code>commandBuffer</code>, and <code>layout</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from the same <a href=\"#VkDevice\">VkDevice</a>"
+        }
+      ]
+    },
+    "vkCmdPushDescriptorSetWithTemplateKHR": {
+      "(VK_KHR_push_descriptor)+(VK_VERSION_1_1,VK_KHR_descriptor_update_template)": [
+        {
+          "vuid": "VUID-vkCmdPushDescriptorSetWithTemplateKHR-commandBuffer-00366",
+          "text": " The <code>pipelineBindPoint</code> specified during the creation of the descriptor update template <strong class=\"purple\">must</strong> be supported by the <code>commandBuffer</code>&#8217;s parent <code>VkCommandPool</code>&#8217;s queue family"
+        },
+        {
+          "vuid": "VUID-vkCmdPushDescriptorSetWithTemplateKHR-pData-01686",
+          "text": " <code>pData</code> <strong class=\"purple\">must</strong> be a valid pointer to a memory containing one or more valid instances of <a href=\"#VkDescriptorImageInfo\">VkDescriptorImageInfo</a>, <a href=\"#VkDescriptorBufferInfo\">VkDescriptorBufferInfo</a>, or <a href=\"#VkBufferView\">VkBufferView</a> in a layout defined by <code>descriptorUpdateTemplate</code> when it was created with <a href=\"#vkCreateDescriptorUpdateTemplateKHR\">vkCreateDescriptorUpdateTemplateKHR</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdPushDescriptorSetWithTemplateKHR-commandBuffer-parameter",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkCommandBuffer\">VkCommandBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdPushDescriptorSetWithTemplateKHR-descriptorUpdateTemplate-parameter",
+          "text": " <code>descriptorUpdateTemplate</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDescriptorUpdateTemplate\">VkDescriptorUpdateTemplate</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdPushDescriptorSetWithTemplateKHR-layout-parameter",
+          "text": " <code>layout</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkPipelineLayout\">VkPipelineLayout</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdPushDescriptorSetWithTemplateKHR-commandBuffer-recording",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be in the <a href=\"#commandbuffers-lifecycle\">recording state</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdPushDescriptorSetWithTemplateKHR-commandBuffer-cmdpool",
+          "text": " The <code>VkCommandPool</code> that <code>commandBuffer</code> was allocated from <strong class=\"purple\">must</strong> support graphics, or compute operations"
+        },
+        {
+          "vuid": "VUID-vkCmdPushDescriptorSetWithTemplateKHR-commonparent",
+          "text": " Each of <code>commandBuffer</code>, <code>descriptorUpdateTemplate</code>, and <code>layout</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from the same <a href=\"#VkDevice\">VkDevice</a>"
+        }
+      ]
+    },
+    "vkCmdPushConstants": {
+      "core": [
+        {
+          "vuid": "VUID-vkCmdPushConstants-offset-01795",
+          "text": " For each byte in the range specified by <code>offset</code> and <code>size</code> and for each shader stage in <code>stageFlags</code>, there <strong class=\"purple\">must</strong> be a push constant range in <code>layout</code> that includes that byte and that stage"
+        },
+        {
+          "vuid": "VUID-vkCmdPushConstants-offset-01796",
+          "text": " For each byte in the range specified by <code>offset</code> and <code>size</code> and for each push constant range that overlaps that byte, <code>stageFlags</code> <strong class=\"purple\">must</strong> include all stages in that push constant range&#8217;s <a href=\"#VkPushConstantRange\">VkPushConstantRange</a>::<code>stageFlags</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdPushConstants-offset-00368",
+          "text": " <code>offset</code> <strong class=\"purple\">must</strong> be a multiple of <code>4</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdPushConstants-size-00369",
+          "text": " <code>size</code> <strong class=\"purple\">must</strong> be a multiple of <code>4</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdPushConstants-offset-00370",
+          "text": " <code>offset</code> <strong class=\"purple\">must</strong> be less than <code>VkPhysicalDeviceLimits</code>::<code>maxPushConstantsSize</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdPushConstants-size-00371",
+          "text": " <code>size</code> <strong class=\"purple\">must</strong> be less than or equal to <code>VkPhysicalDeviceLimits</code>::<code>maxPushConstantsSize</code> minus <code>offset</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdPushConstants-commandBuffer-parameter",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkCommandBuffer\">VkCommandBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdPushConstants-layout-parameter",
+          "text": " <code>layout</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkPipelineLayout\">VkPipelineLayout</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdPushConstants-stageFlags-parameter",
+          "text": " <code>stageFlags</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkShaderStageFlagBits\">VkShaderStageFlagBits</a> values"
+        },
+        {
+          "vuid": "VUID-vkCmdPushConstants-stageFlags-requiredbitmask",
+          "text": " <code>stageFlags</code> <strong class=\"purple\">must</strong> not be <code>0</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdPushConstants-pValues-parameter",
+          "text": " <code>pValues</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>size</code> bytes"
+        },
+        {
+          "vuid": "VUID-vkCmdPushConstants-commandBuffer-recording",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be in the <a href=\"#commandbuffers-lifecycle\">recording state</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdPushConstants-commandBuffer-cmdpool",
+          "text": " The <code>VkCommandPool</code> that <code>commandBuffer</code> was allocated from <strong class=\"purple\">must</strong> support graphics, or compute operations"
+        },
+        {
+          "vuid": "VUID-vkCmdPushConstants-size-arraylength",
+          "text": " <code>size</code> <strong class=\"purple\">must</strong> be greater than <code>0</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdPushConstants-commonparent",
+          "text": " Both of <code>commandBuffer</code>, and <code>layout</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from the same <a href=\"#VkDevice\">VkDevice</a>"
+        }
+      ]
+    },
+    "vkGetBufferDeviceAddressKHR": {
+      "(VK_EXT_buffer_device_address,VK_KHR_buffer_device_address)": [
+        {
+          "vuid": "VUID-vkGetBufferDeviceAddressKHR-bufferDeviceAddress-03324",
+          "text": " The <a href=\"#features-bufferDeviceAddress\">bufferDeviceAddress</a> or <a href=\"#features-bufferDeviceAddressEXT\"><code>VkPhysicalDeviceBufferDeviceAddressFeaturesEXT</code>::<code>bufferDeviceAddress</code></a> feature <strong class=\"purple\">must</strong> be enabled"
+        },
+        {
+          "vuid": "VUID-vkGetBufferDeviceAddressKHR-device-03325",
+          "text": " If <code>device</code> was created with multiple physical devices, then the <a href=\"#features-bufferDeviceAddressMultiDevice\">bufferDeviceAddressMultiDevice</a> or <a href=\"#features-bufferDeviceAddressMultiDeviceEXT\"><code>VkPhysicalDeviceBufferDeviceAddressFeaturesEXT</code>::<code>bufferDeviceAddressMultiDevice</code></a> feature <strong class=\"purple\">must</strong> be enabled"
+        },
+        {
+          "vuid": "VUID-vkGetBufferDeviceAddressKHR-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetBufferDeviceAddressKHR-pInfo-parameter",
+          "text": " <code>pInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkBufferDeviceAddressInfoKHR\">VkBufferDeviceAddressInfoKHR</a> structure"
+        }
+      ]
+    },
+    "VkBufferDeviceAddressInfoKHR": {
+      "(VK_EXT_buffer_device_address,VK_KHR_buffer_device_address)": [
+        {
+          "vuid": "VUID-VkBufferDeviceAddressInfoKHR-buffer-02600",
+          "text": " If <code>buffer</code> is non-sparse and was not created with the <code>VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_KHR</code> flag, then it <strong class=\"purple\">must</strong> be bound completely and contiguously to a single <code>VkDeviceMemory</code> object"
+        },
+        {
+          "vuid": "VUID-VkBufferDeviceAddressInfoKHR-buffer-02601",
+          "text": " <code>buffer</code> <strong class=\"purple\">must</strong> have been created with <code>VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkBufferDeviceAddressInfoKHR-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkBufferDeviceAddressInfoKHR-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-VkBufferDeviceAddressInfoKHR-buffer-parameter",
+          "text": " <code>buffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkBuffer\">VkBuffer</a> handle"
+        }
+      ]
+    },
+    "vkGetBufferOpaqueCaptureAddressKHR": {
+      "(VK_KHR_buffer_device_address)": [
+        {
+          "vuid": "VUID-vkGetBufferOpaqueCaptureAddressKHR-None-03326",
+          "text": " The <a href=\"#features-bufferDeviceAddress\">bufferDeviceAddress</a> feature <strong class=\"purple\">must</strong> be enabled"
+        },
+        {
+          "vuid": "VUID-vkGetBufferOpaqueCaptureAddressKHR-device-03327",
+          "text": " If <code>device</code> was created with multiple physical devices, then the <a href=\"#features-bufferDeviceAddressMultiDevice\">bufferDeviceAddressMultiDevice</a> feature <strong class=\"purple\">must</strong> be enabled"
+        },
+        {
+          "vuid": "VUID-vkGetBufferOpaqueCaptureAddressKHR-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetBufferOpaqueCaptureAddressKHR-pInfo-parameter",
+          "text": " <code>pInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkBufferDeviceAddressInfoKHR\">VkBufferDeviceAddressInfoKHR</a> structure"
+        }
+      ]
+    },
+    "vkCreateQueryPool": {
+      "core": [
+        {
+          "vuid": "VUID-vkCreateQueryPool-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCreateQueryPool-pCreateInfo-parameter",
+          "text": " <code>pCreateInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkQueryPoolCreateInfo\">VkQueryPoolCreateInfo</a> structure"
+        },
+        {
+          "vuid": "VUID-vkCreateQueryPool-pAllocator-parameter",
+          "text": " If <code>pAllocator</code> is not <code>NULL</code>, <code>pAllocator</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkAllocationCallbacks\">VkAllocationCallbacks</a> structure"
+        },
+        {
+          "vuid": "VUID-vkCreateQueryPool-pQueryPool-parameter",
+          "text": " <code>pQueryPool</code> <strong class=\"purple\">must</strong> be a valid pointer to a <a href=\"#VkQueryPool\">VkQueryPool</a> handle"
+        }
+      ]
+    },
+    "VkQueryPoolCreateInfo": {
+      "core": [
+        {
+          "vuid": "VUID-VkQueryPoolCreateInfo-queryType-00791",
+          "text": " If the <a href=\"#features-pipelineStatisticsQuery\">pipeline statistics queries</a> feature is not enabled, <code>queryType</code> <strong class=\"purple\">must</strong> not be <code>VK_QUERY_TYPE_PIPELINE_STATISTICS</code>"
+        },
+        {
+          "vuid": "VUID-VkQueryPoolCreateInfo-queryType-00792",
+          "text": " If <code>queryType</code> is <code>VK_QUERY_TYPE_PIPELINE_STATISTICS</code>, <code>pipelineStatistics</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkQueryPipelineStatisticFlagBits\">VkQueryPipelineStatisticFlagBits</a> values"
+        },
+        {
+          "vuid": "VUID-VkQueryPoolCreateInfo-queryCount-02763",
+          "text": " <code>queryCount</code> <strong class=\"purple\">must</strong> be greater than 0"
+        },
+        {
+          "vuid": "VUID-VkQueryPoolCreateInfo-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO</code>"
+        },
+        {
+          "vuid": "VUID-VkQueryPoolCreateInfo-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code> or a pointer to a valid instance of <a href=\"#VkQueryPoolPerformanceCreateInfoKHR\">VkQueryPoolPerformanceCreateInfoKHR</a>"
+        },
+        {
+          "vuid": "VUID-VkQueryPoolCreateInfo-flags-zerobitmask",
+          "text": " <code>flags</code> <strong class=\"purple\">must</strong> be <code>0</code>"
+        },
+        {
+          "vuid": "VUID-VkQueryPoolCreateInfo-queryType-parameter",
+          "text": " <code>queryType</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkQueryType\">VkQueryType</a> value"
+        }
+      ],
+      "(VK_KHR_performance_query)": [
+        {
+          "vuid": "VUID-VkQueryPoolCreateInfo-queryType-03222",
+          "text": " If <code>queryType</code> is <code>VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR</code>, the <code>pNext</code> chain <strong class=\"purple\">must</strong> include a structure of type <a href=\"#VkQueryPoolPerformanceCreateInfoKHR\">VkQueryPoolPerformanceCreateInfoKHR</a>"
+        }
+      ]
+    },
+    "VkQueryPoolPerformanceCreateInfoKHR": {
+      "(VK_KHR_performance_query)": [
+        {
+          "vuid": "VUID-VkQueryPoolPerformanceCreateInfoKHR-queueFamilyIndex-03236",
+          "text": " <code>queueFamilyIndex</code> <strong class=\"purple\">must</strong> be a valid queue family index of the device"
+        },
+        {
+          "vuid": "VUID-VkQueryPoolPerformanceCreateInfoKHR-performanceCounterQueryPools-03237",
+          "text": " The <a href=\"#features-features-performanceCounterQueryPools\"><code>performanceCounterQueryPools</code></a> feature <strong class=\"purple\">must</strong> be enabled"
+        },
+        {
+          "vuid": "VUID-VkQueryPoolPerformanceCreateInfoKHR-pCounterIndices-03321",
+          "text": " Each element of <code>pCounterIndices</code> <strong class=\"purple\">must</strong> be in the range of counters reported by <code>vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR</code> for the queue family specified in <code>queueFamilyIndex</code>"
+        },
+        {
+          "vuid": "VUID-VkQueryPoolPerformanceCreateInfoKHR-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_QUERY_POOL_PERFORMANCE_CREATE_INFO_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkQueryPoolPerformanceCreateInfoKHR-pCounterIndices-parameter",
+          "text": " <code>pCounterIndices</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>counterIndexCount</code> <code>uint32_t</code> values"
+        },
+        {
+          "vuid": "VUID-VkQueryPoolPerformanceCreateInfoKHR-counterIndexCount-arraylength",
+          "text": " <code>counterIndexCount</code> <strong class=\"purple\">must</strong> be greater than <code>0</code>"
+        }
+      ]
+    },
+    "vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR": {
+      "(VK_KHR_performance_query)": [
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR-physicalDevice-parameter",
+          "text": " <code>physicalDevice</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkPhysicalDevice\">VkPhysicalDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR-pPerformanceQueryCreateInfo-parameter",
+          "text": " <code>pPerformanceQueryCreateInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkQueryPoolPerformanceCreateInfoKHR\">VkQueryPoolPerformanceCreateInfoKHR</a> structure"
+        },
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR-pNumPasses-parameter",
+          "text": " <code>pNumPasses</code> <strong class=\"purple\">must</strong> be a valid pointer to a <code>uint32_t</code> value"
+        }
+      ]
+    },
+    "vkDestroyQueryPool": {
+      "core": [
+        {
+          "vuid": "VUID-vkDestroyQueryPool-queryPool-00793",
+          "text": " All submitted commands that refer to <code>queryPool</code> <strong class=\"purple\">must</strong> have completed execution"
+        },
+        {
+          "vuid": "VUID-vkDestroyQueryPool-queryPool-00794",
+          "text": " If <code>VkAllocationCallbacks</code> were provided when <code>queryPool</code> was created, a compatible set of callbacks <strong class=\"purple\">must</strong> be provided here"
+        },
+        {
+          "vuid": "VUID-vkDestroyQueryPool-queryPool-00795",
+          "text": " If no <code>VkAllocationCallbacks</code> were provided when <code>queryPool</code> was created, <code>pAllocator</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-vkDestroyQueryPool-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkDestroyQueryPool-queryPool-parameter",
+          "text": " If <code>queryPool</code> is not <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>, <code>queryPool</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkQueryPool\">VkQueryPool</a> handle"
+        },
+        {
+          "vuid": "VUID-vkDestroyQueryPool-pAllocator-parameter",
+          "text": " If <code>pAllocator</code> is not <code>NULL</code>, <code>pAllocator</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkAllocationCallbacks\">VkAllocationCallbacks</a> structure"
+        },
+        {
+          "vuid": "VUID-vkDestroyQueryPool-queryPool-parent",
+          "text": " If <code>queryPool</code> is a valid handle, it <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from <code>device</code>"
+        }
+      ]
+    },
+    "vkCmdResetQueryPool": {
+      "core": [
+        {
+          "vuid": "VUID-vkCmdResetQueryPool-firstQuery-00796",
+          "text": " <code>firstQuery</code> <strong class=\"purple\">must</strong> be less than the number of queries in <code>queryPool</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdResetQueryPool-firstQuery-00797",
+          "text": " The sum of <code>firstQuery</code> and <code>queryCount</code> <strong class=\"purple\">must</strong> be less than or equal to the number of queries in <code>queryPool</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdResetQueryPool-commandBuffer-parameter",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkCommandBuffer\">VkCommandBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdResetQueryPool-queryPool-parameter",
+          "text": " <code>queryPool</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkQueryPool\">VkQueryPool</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdResetQueryPool-commandBuffer-recording",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be in the <a href=\"#commandbuffers-lifecycle\">recording state</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdResetQueryPool-commandBuffer-cmdpool",
+          "text": " The <code>VkCommandPool</code> that <code>commandBuffer</code> was allocated from <strong class=\"purple\">must</strong> support graphics, or compute operations"
+        },
+        {
+          "vuid": "VUID-vkCmdResetQueryPool-renderpass",
+          "text": " This command <strong class=\"purple\">must</strong> only be called outside of a render pass instance"
+        },
+        {
+          "vuid": "VUID-vkCmdResetQueryPool-commonparent",
+          "text": " Both of <code>commandBuffer</code>, and <code>queryPool</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from the same <a href=\"#VkDevice\">VkDevice</a>"
+        }
+      ]
+    },
+    "vkResetQueryPoolEXT": {
+      "(VK_EXT_host_query_reset)": [
+        {
+          "vuid": "VUID-vkResetQueryPoolEXT-None-02665",
+          "text": " The <a href=\"#features-hostQueryReset\">hostQueryReset</a> feature <strong class=\"purple\">must</strong> be enabled"
+        },
+        {
+          "vuid": "VUID-vkResetQueryPoolEXT-firstQuery-02666",
+          "text": " <code>firstQuery</code> <strong class=\"purple\">must</strong> be less than the number of queries in <code>queryPool</code>"
+        },
+        {
+          "vuid": "VUID-vkResetQueryPoolEXT-firstQuery-02667",
+          "text": " The sum of <code>firstQuery</code> and <code>queryCount</code> <strong class=\"purple\">must</strong> be less than or equal to the number of queries in <code>queryPool</code>"
+        },
+        {
+          "vuid": "VUID-vkResetQueryPoolEXT-firstQuery-02741",
+          "text": " Submitted commands that refer to the range specified by <code>firstQuery</code> and <code>queryCount</code> in <code>queryPool</code> <strong class=\"purple\">must</strong> have completed execution"
+        },
+        {
+          "vuid": "VUID-vkResetQueryPoolEXT-firstQuery-02742",
+          "text": " The range of queries specified by <code>firstQuery</code> and <code>queryCount</code> in <code>queryPool</code> <strong class=\"purple\">must</strong> not be in use by calls to <a href=\"#vkGetQueryPoolResults\">vkGetQueryPoolResults</a> or <code>vkResetQueryPoolEXT</code> in other threads"
+        },
+        {
+          "vuid": "VUID-vkResetQueryPoolEXT-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkResetQueryPoolEXT-queryPool-parameter",
+          "text": " <code>queryPool</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkQueryPool\">VkQueryPool</a> handle"
+        },
+        {
+          "vuid": "VUID-vkResetQueryPoolEXT-queryPool-parent",
+          "text": " <code>queryPool</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from <code>device</code>"
+        }
+      ]
+    },
+    "vkCmdBeginQuery": {
+      "core": [
+        {
+          "vuid": "VUID-vkCmdBeginQuery-queryPool-01922",
+          "text": " <code>queryPool</code> <strong class=\"purple\">must</strong> have been created with a <code>queryType</code> that differs from that of any queries that are <a href=\"#queries-operation-active\">active</a> within <code>commandBuffer</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdBeginQuery-None-00807",
+          "text": " All queries used by the command <strong class=\"purple\">must</strong> be unavailable"
+        },
+        {
+          "vuid": "VUID-vkCmdBeginQuery-queryType-02804",
+          "text": " The <code>queryType</code> used to create <code>queryPool</code> <strong class=\"purple\">must</strong> not be <code>VK_QUERY_TYPE_TIMESTAMP</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdBeginQuery-queryType-00800",
+          "text": " If the <a href=\"#features-occlusionQueryPrecise\">precise occlusion queries</a> feature is not enabled, or the <code>queryType</code> used to create <code>queryPool</code> was not <code>VK_QUERY_TYPE_OCCLUSION</code>, <code>flags</code> <strong class=\"purple\">must</strong> not contain <code>VK_QUERY_CONTROL_PRECISE_BIT</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdBeginQuery-query-00802",
+          "text": " <code>query</code> <strong class=\"purple\">must</strong> be less than the number of queries in <code>queryPool</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdBeginQuery-queryType-00803",
+          "text": " If the <code>queryType</code> used to create <code>queryPool</code> was <code>VK_QUERY_TYPE_OCCLUSION</code>, the <code>VkCommandPool</code> that <code>commandBuffer</code> was allocated from <strong class=\"purple\">must</strong> support graphics operations"
+        },
+        {
+          "vuid": "VUID-vkCmdBeginQuery-queryType-00804",
+          "text": " If the <code>queryType</code> used to create <code>queryPool</code> was <code>VK_QUERY_TYPE_PIPELINE_STATISTICS</code> and any of the <code>pipelineStatistics</code> indicate graphics operations, the <code>VkCommandPool</code> that <code>commandBuffer</code> was allocated from <strong class=\"purple\">must</strong> support graphics operations"
+        },
+        {
+          "vuid": "VUID-vkCmdBeginQuery-queryType-00805",
+          "text": " If the <code>queryType</code> used to create <code>queryPool</code> was <code>VK_QUERY_TYPE_PIPELINE_STATISTICS</code> and any of the <code>pipelineStatistics</code> indicate compute operations, the <code>VkCommandPool</code> that <code>commandBuffer</code> was allocated from <strong class=\"purple\">must</strong> support compute operations"
+        },
+        {
+          "vuid": "VUID-vkCmdBeginQuery-commandBuffer-parameter",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkCommandBuffer\">VkCommandBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdBeginQuery-queryPool-parameter",
+          "text": " <code>queryPool</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkQueryPool\">VkQueryPool</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdBeginQuery-flags-parameter",
+          "text": " <code>flags</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkQueryControlFlagBits\">VkQueryControlFlagBits</a> values"
+        },
+        {
+          "vuid": "VUID-vkCmdBeginQuery-commandBuffer-recording",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be in the <a href=\"#commandbuffers-lifecycle\">recording state</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdBeginQuery-commandBuffer-cmdpool",
+          "text": " The <code>VkCommandPool</code> that <code>commandBuffer</code> was allocated from <strong class=\"purple\">must</strong> support graphics, or compute operations"
+        },
+        {
+          "vuid": "VUID-vkCmdBeginQuery-commonparent",
+          "text": " Both of <code>commandBuffer</code>, and <code>queryPool</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from the same <a href=\"#VkDevice\">VkDevice</a>"
+        }
+      ],
+      "(VK_VERSION_1_1)": [
+        {
+          "vuid": "VUID-vkCmdBeginQuery-commandBuffer-01885",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> not be a protected command buffer"
+        }
+      ],
+      "(VK_VERSION_1_1,VK_KHR_multiview)": [
+        {
+          "vuid": "VUID-vkCmdBeginQuery-query-00808",
+          "text": " If called within a render pass instance, the sum of <code>query</code> and the number of bits set in the current subpass&#8217;s view mask <strong class=\"purple\">must</strong> be less than or equal to the number of queries in <code>queryPool</code>"
+        }
+      ],
+      "(VK_EXT_transform_feedback)": [
+        {
+          "vuid": "VUID-vkCmdBeginQuery-queryType-02327",
+          "text": " If the <code>queryType</code> used to create <code>queryPool</code> was <code>VK_QUERY_TYPE_TRANSFORM_FEEDBACK_STREAM_EXT</code> the <code>VkCommandPool</code> that <code>commandBuffer</code> was allocated from <strong class=\"purple\">must</strong> support graphics operations"
+        },
+        {
+          "vuid": "VUID-vkCmdBeginQuery-queryType-02328",
+          "text": " If the <code>queryType</code> used to create <code>queryPool</code> was <code>VK_QUERY_TYPE_TRANSFORM_FEEDBACK_STREAM_EXT</code> then <code>VkPhysicalDeviceTransformFeedbackPropertiesEXT</code>::<code>transformFeedbackQueries</code> <strong class=\"purple\">must</strong> be supported"
+        }
+      ],
+      "(VK_KHR_performance_query)": [
+        {
+          "vuid": "VUID-vkCmdBeginQuery-queryPool-03223",
+          "text": " If <code>queryPool</code> was created with a <code>queryType</code> of <code>VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR</code>, the <a href=\"#profiling-lock\">profiling lock</a> <strong class=\"purple\">must</strong> have been held before <a href=\"#vkBeginCommandBuffer\">vkBeginCommandBuffer</a> was called on <code>commandBuffer</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdBeginQuery-queryPool-03224",
+          "text": " If <code>queryPool</code> was created with a <code>queryType</code> of <code>VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR</code> and one of the counters used to create <code>queryPool</code> was <code>VK_QUERY_SCOPE_COMMAND_BUFFER_KHR</code>, the query begin <strong class=\"purple\">must</strong> be the first recorded command in <code>commandBuffer</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdBeginQuery-queryPool-03225",
+          "text": " If <code>queryPool</code> was created with a <code>queryType</code> of <code>VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR</code> and one of the counters used to create <code>queryPool</code> was <code>VK_QUERY_SCOPE_RENDER_PASS_KHR</code>, the begin command <strong class=\"purple\">must</strong> not be recorded within a render pass instance"
+        },
+        {
+          "vuid": "VUID-vkCmdBeginQuery-queryPool-03226",
+          "text": " If <code>queryPool</code> was created with a <code>queryType</code> of <code>VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR</code> and another query pool with a <code>queryType</code> <code>VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR</code> has been used within <code>commandBuffer</code>, its parent primary command buffer or secondary command buffer recorded within the same parent primary command buffer as <code>commandBuffer</code>, the <a href=\"#features-features-performanceCounterMultipleQueryPools\"><code>performanceCounterMultipleQueryPools</code></a> feature <strong class=\"purple\">must</strong> be enabled"
+        }
+      ]
+    },
+    "vkCmdBeginQueryIndexedEXT": {
+      "(VK_EXT_transform_feedback)": [
+        {
+          "vuid": "VUID-vkCmdBeginQueryIndexedEXT-queryPool-01922",
+          "text": " <code>queryPool</code> <strong class=\"purple\">must</strong> have been created with a <code>queryType</code> that differs from that of any queries that are <a href=\"#queries-operation-active\">active</a> within <code>commandBuffer</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdBeginQueryIndexedEXT-None-00807",
+          "text": " All queries used by the command <strong class=\"purple\">must</strong> be unavailable"
+        },
+        {
+          "vuid": "VUID-vkCmdBeginQueryIndexedEXT-queryType-02804",
+          "text": " The <code>queryType</code> used to create <code>queryPool</code> <strong class=\"purple\">must</strong> not be <code>VK_QUERY_TYPE_TIMESTAMP</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdBeginQueryIndexedEXT-queryType-00800",
+          "text": " If the <a href=\"#features-occlusionQueryPrecise\">precise occlusion queries</a> feature is not enabled, or the <code>queryType</code> used to create <code>queryPool</code> was not <code>VK_QUERY_TYPE_OCCLUSION</code>, <code>flags</code> <strong class=\"purple\">must</strong> not contain <code>VK_QUERY_CONTROL_PRECISE_BIT</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdBeginQueryIndexedEXT-query-00802",
+          "text": " <code>query</code> <strong class=\"purple\">must</strong> be less than the number of queries in <code>queryPool</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdBeginQueryIndexedEXT-queryType-00803",
+          "text": " If the <code>queryType</code> used to create <code>queryPool</code> was <code>VK_QUERY_TYPE_OCCLUSION</code>, the <code>VkCommandPool</code> that <code>commandBuffer</code> was allocated from <strong class=\"purple\">must</strong> support graphics operations"
+        },
+        {
+          "vuid": "VUID-vkCmdBeginQueryIndexedEXT-queryType-00804",
+          "text": " If the <code>queryType</code> used to create <code>queryPool</code> was <code>VK_QUERY_TYPE_PIPELINE_STATISTICS</code> and any of the <code>pipelineStatistics</code> indicate graphics operations, the <code>VkCommandPool</code> that <code>commandBuffer</code> was allocated from <strong class=\"purple\">must</strong> support graphics operations"
+        },
+        {
+          "vuid": "VUID-vkCmdBeginQueryIndexedEXT-queryType-00805",
+          "text": " If the <code>queryType</code> used to create <code>queryPool</code> was <code>VK_QUERY_TYPE_PIPELINE_STATISTICS</code> and any of the <code>pipelineStatistics</code> indicate compute operations, the <code>VkCommandPool</code> that <code>commandBuffer</code> was allocated from <strong class=\"purple\">must</strong> support compute operations"
+        },
+        {
+          "vuid": "VUID-vkCmdBeginQueryIndexedEXT-queryType-02338",
+          "text": " If the <code>queryType</code> used to create <code>queryPool</code> was <code>VK_QUERY_TYPE_TRANSFORM_FEEDBACK_STREAM_EXT</code> the <code>VkCommandPool</code> that <code>commandBuffer</code> was allocated from <strong class=\"purple\">must</strong> support graphics operations"
+        },
+        {
+          "vuid": "VUID-vkCmdBeginQueryIndexedEXT-queryType-02339",
+          "text": " If the <code>queryType</code> used to create <code>queryPool</code> was <code>VK_QUERY_TYPE_TRANSFORM_FEEDBACK_STREAM_EXT</code> the <code>index</code> parameter <strong class=\"purple\">must</strong> be less than <code>VkPhysicalDeviceTransformFeedbackPropertiesEXT</code>::<code>maxTransformFeedbackStreams</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdBeginQueryIndexedEXT-queryType-02340",
+          "text": " If the <code>queryType</code> used to create <code>queryPool</code> was not <code>VK_QUERY_TYPE_TRANSFORM_FEEDBACK_STREAM_EXT</code> the <code>index</code> <strong class=\"purple\">must</strong> be zero"
+        },
+        {
+          "vuid": "VUID-vkCmdBeginQueryIndexedEXT-queryType-02341",
+          "text": " If the <code>queryType</code> used to create <code>queryPool</code> was <code>VK_QUERY_TYPE_TRANSFORM_FEEDBACK_STREAM_EXT</code> then <code>VkPhysicalDeviceTransformFeedbackPropertiesEXT</code>::<code>transformFeedbackQueries</code> <strong class=\"purple\">must</strong> be supported"
+        },
+        {
+          "vuid": "VUID-vkCmdBeginQueryIndexedEXT-commandBuffer-parameter",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkCommandBuffer\">VkCommandBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdBeginQueryIndexedEXT-queryPool-parameter",
+          "text": " <code>queryPool</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkQueryPool\">VkQueryPool</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdBeginQueryIndexedEXT-flags-parameter",
+          "text": " <code>flags</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkQueryControlFlagBits\">VkQueryControlFlagBits</a> values"
+        },
+        {
+          "vuid": "VUID-vkCmdBeginQueryIndexedEXT-commandBuffer-recording",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be in the <a href=\"#commandbuffers-lifecycle\">recording state</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdBeginQueryIndexedEXT-commandBuffer-cmdpool",
+          "text": " The <code>VkCommandPool</code> that <code>commandBuffer</code> was allocated from <strong class=\"purple\">must</strong> support graphics, or compute operations"
+        },
+        {
+          "vuid": "VUID-vkCmdBeginQueryIndexedEXT-commonparent",
+          "text": " Both of <code>commandBuffer</code>, and <code>queryPool</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from the same <a href=\"#VkDevice\">VkDevice</a>"
+        }
+      ],
+      "(VK_EXT_transform_feedback)+(VK_VERSION_1_1)": [
+        {
+          "vuid": "VUID-vkCmdBeginQueryIndexedEXT-commandBuffer-01885",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> not be a protected command buffer"
+        }
+      ],
+      "(VK_EXT_transform_feedback)+(VK_VERSION_1_1,VK_KHR_multiview)": [
+        {
+          "vuid": "VUID-vkCmdBeginQueryIndexedEXT-query-00808",
+          "text": " If called within a render pass instance, the sum of <code>query</code> and the number of bits set in the current subpass&#8217;s view mask <strong class=\"purple\">must</strong> be less than or equal to the number of queries in <code>queryPool</code>"
+        }
+      ],
+      "(VK_EXT_transform_feedback)+(VK_KHR_performance_query)": [
+        {
+          "vuid": "VUID-vkCmdBeginQueryIndexedEXT-queryPool-03223",
+          "text": " If <code>queryPool</code> was created with a <code>queryType</code> of <code>VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR</code>, the <a href=\"#profiling-lock\">profiling lock</a> <strong class=\"purple\">must</strong> have been held before <a href=\"#vkBeginCommandBuffer\">vkBeginCommandBuffer</a> was called on <code>commandBuffer</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdBeginQueryIndexedEXT-queryPool-03224",
+          "text": " If <code>queryPool</code> was created with a <code>queryType</code> of <code>VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR</code> and one of the counters used to create <code>queryPool</code> was <code>VK_QUERY_SCOPE_COMMAND_BUFFER_KHR</code>, the query begin <strong class=\"purple\">must</strong> be the first recorded command in <code>commandBuffer</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdBeginQueryIndexedEXT-queryPool-03225",
+          "text": " If <code>queryPool</code> was created with a <code>queryType</code> of <code>VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR</code> and one of the counters used to create <code>queryPool</code> was <code>VK_QUERY_SCOPE_RENDER_PASS_KHR</code>, the begin command <strong class=\"purple\">must</strong> not be recorded within a render pass instance"
+        },
+        {
+          "vuid": "VUID-vkCmdBeginQueryIndexedEXT-queryPool-03226",
+          "text": " If <code>queryPool</code> was created with a <code>queryType</code> of <code>VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR</code> and another query pool with a <code>queryType</code> <code>VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR</code> has been used within <code>commandBuffer</code>, its parent primary command buffer or secondary command buffer recorded within the same parent primary command buffer as <code>commandBuffer</code>, the <a href=\"#features-features-performanceCounterMultipleQueryPools\"><code>performanceCounterMultipleQueryPools</code></a> feature <strong class=\"purple\">must</strong> be enabled"
+        }
+      ]
+    },
+    "vkCmdEndQuery": {
+      "core": [
+        {
+          "vuid": "VUID-vkCmdEndQuery-None-01923",
+          "text": " All queries used by the command <strong class=\"purple\">must</strong> be <a href=\"#queries-operation-active\">active</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdEndQuery-query-00810",
+          "text": " <code>query</code> <strong class=\"purple\">must</strong> be less than the number of queries in <code>queryPool</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdEndQuery-commandBuffer-parameter",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkCommandBuffer\">VkCommandBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdEndQuery-queryPool-parameter",
+          "text": " <code>queryPool</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkQueryPool\">VkQueryPool</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdEndQuery-commandBuffer-recording",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be in the <a href=\"#commandbuffers-lifecycle\">recording state</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdEndQuery-commandBuffer-cmdpool",
+          "text": " The <code>VkCommandPool</code> that <code>commandBuffer</code> was allocated from <strong class=\"purple\">must</strong> support graphics, or compute operations"
+        },
+        {
+          "vuid": "VUID-vkCmdEndQuery-commonparent",
+          "text": " Both of <code>commandBuffer</code>, and <code>queryPool</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from the same <a href=\"#VkDevice\">VkDevice</a>"
+        }
+      ],
+      "(VK_VERSION_1_1)": [
+        {
+          "vuid": "VUID-vkCmdEndQuery-commandBuffer-01886",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> not be a protected command buffer"
+        }
+      ],
+      "(VK_VERSION_1_1,VK_KHR_multiview)": [
+        {
+          "vuid": "VUID-vkCmdEndQuery-query-00812",
+          "text": " If <code>vkCmdEndQuery</code> is called within a render pass instance, the sum of <code>query</code> and the number of bits set in the current subpass&#8217;s view mask <strong class=\"purple\">must</strong> be less than or equal to the number of queries in <code>queryPool</code>"
+        }
+      ],
+      "(VK_KHR_performance_query)": [
+        {
+          "vuid": "VUID-vkCmdEndQuery-queryPool-03227",
+          "text": " If <code>queryPool</code> was created with a <code>queryType</code> of <code>VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR</code> and one or more of the counters used to create <code>queryPool</code> was <code>VK_QUERY_SCOPE_COMMAND_BUFFER_KHR</code>, the <a href=\"#vkCmdEndQuery\">vkCmdEndQuery</a> <strong class=\"purple\">must</strong> be the last recorded command in <code>commandBuffer</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdEndQuery-queryPool-03228",
+          "text": " If <code>queryPool</code> was created with a <code>queryType</code> of <code>VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR</code> and one or more of the counters used to create <code>queryPool</code> was <code>VK_QUERY_SCOPE_RENDER_PASS_KHR</code>, the <a href=\"#vkCmdEndQuery\">vkCmdEndQuery</a> <strong class=\"purple\">must</strong> not be recorded within a render pass instance"
+        }
+      ]
+    },
+    "vkCmdEndQueryIndexedEXT": {
+      "(VK_EXT_transform_feedback)": [
+        {
+          "vuid": "VUID-vkCmdEndQueryIndexedEXT-None-02342",
+          "text": " All queries used by the command <strong class=\"purple\">must</strong> be <a href=\"#queries-operation-active\">active</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdEndQueryIndexedEXT-query-02343",
+          "text": " <code>query</code> <strong class=\"purple\">must</strong> be less than the number of queries in <code>queryPool</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdEndQueryIndexedEXT-queryType-02346",
+          "text": " If the <code>queryType</code> used to create <code>queryPool</code> was <code>VK_QUERY_TYPE_TRANSFORM_FEEDBACK_STREAM_EXT</code> the <code>index</code> parameter <strong class=\"purple\">must</strong> be less than <code>VkPhysicalDeviceTransformFeedbackPropertiesEXT</code>::<code>maxTransformFeedbackStreams</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdEndQueryIndexedEXT-queryType-02347",
+          "text": " If the <code>queryType</code> used to create <code>queryPool</code> was not <code>VK_QUERY_TYPE_TRANSFORM_FEEDBACK_STREAM_EXT</code> the <code>index</code> <strong class=\"purple\">must</strong> be zero"
+        },
+        {
+          "vuid": "VUID-vkCmdEndQueryIndexedEXT-queryType-02723",
+          "text": " If the <code>queryType</code> used to create <code>queryPool</code> was <code>VK_QUERY_TYPE_TRANSFORM_FEEDBACK_STREAM_EXT</code> <code>index</code> <strong class=\"purple\">must</strong> equal the <code>index</code> used to begin the query"
+        },
+        {
+          "vuid": "VUID-vkCmdEndQueryIndexedEXT-commandBuffer-parameter",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkCommandBuffer\">VkCommandBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdEndQueryIndexedEXT-queryPool-parameter",
+          "text": " <code>queryPool</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkQueryPool\">VkQueryPool</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdEndQueryIndexedEXT-commandBuffer-recording",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be in the <a href=\"#commandbuffers-lifecycle\">recording state</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdEndQueryIndexedEXT-commandBuffer-cmdpool",
+          "text": " The <code>VkCommandPool</code> that <code>commandBuffer</code> was allocated from <strong class=\"purple\">must</strong> support graphics, or compute operations"
+        },
+        {
+          "vuid": "VUID-vkCmdEndQueryIndexedEXT-commonparent",
+          "text": " Both of <code>commandBuffer</code>, and <code>queryPool</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from the same <a href=\"#VkDevice\">VkDevice</a>"
+        }
+      ],
+      "(VK_EXT_transform_feedback)+(VK_VERSION_1_1)": [
+        {
+          "vuid": "VUID-vkCmdEndQueryIndexedEXT-commandBuffer-02344",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> not be a protected command buffer"
+        }
+      ],
+      "(VK_EXT_transform_feedback)+(VK_VERSION_1_1,VK_KHR_multiview)": [
+        {
+          "vuid": "VUID-vkCmdEndQueryIndexedEXT-query-02345",
+          "text": " If <code>vkCmdEndQueryIndexedEXT</code> is called within a render pass instance, the sum of <code>query</code> and the number of bits set in the current subpass&#8217;s view mask <strong class=\"purple\">must</strong> be less than or equal to the number of queries in <code>queryPool</code>"
+        }
+      ]
+    },
+    "vkGetQueryPoolResults": {
+      "core": [
+        {
+          "vuid": "VUID-vkGetQueryPoolResults-firstQuery-00813",
+          "text": " <code>firstQuery</code> <strong class=\"purple\">must</strong> be less than the number of queries in <code>queryPool</code>"
+        },
+        {
+          "vuid": "VUID-vkGetQueryPoolResults-flags-02827",
+          "text": " If <code>VK_QUERY_RESULT_64_BIT</code> is not set in <code>flags</code>, then <code>pData</code> and <code>stride</code> <strong class=\"purple\">must</strong> be multiples of <code>4</code>"
+        },
+        {
+          "vuid": "VUID-vkGetQueryPoolResults-flags-00815",
+          "text": " If <code>VK_QUERY_RESULT_64_BIT</code> is set in <code>flags</code> then <code>pData</code> and <code>stride</code> <strong class=\"purple\">must</strong> be multiples of <code>8</code>"
+        },
+        {
+          "vuid": "VUID-vkGetQueryPoolResults-firstQuery-00816",
+          "text": " The sum of <code>firstQuery</code> and <code>queryCount</code> <strong class=\"purple\">must</strong> be less than or equal to the number of queries in <code>queryPool</code>"
+        },
+        {
+          "vuid": "VUID-vkGetQueryPoolResults-dataSize-00817",
+          "text": " <code>dataSize</code> <strong class=\"purple\">must</strong> be large enough to contain the result of each query, as described <a href=\"#queries-operation-memorylayout\">here</a>"
+        },
+        {
+          "vuid": "VUID-vkGetQueryPoolResults-queryType-00818",
+          "text": " If the <code>queryType</code> used to create <code>queryPool</code> was <code>VK_QUERY_TYPE_TIMESTAMP</code>, <code>flags</code> <strong class=\"purple\">must</strong> not contain <code>VK_QUERY_RESULT_PARTIAL_BIT</code>"
+        },
+        {
+          "vuid": "VUID-vkGetQueryPoolResults-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetQueryPoolResults-queryPool-parameter",
+          "text": " <code>queryPool</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkQueryPool\">VkQueryPool</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetQueryPoolResults-pData-parameter",
+          "text": " <code>pData</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>dataSize</code> bytes"
+        },
+        {
+          "vuid": "VUID-vkGetQueryPoolResults-flags-parameter",
+          "text": " <code>flags</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkQueryResultFlagBits\">VkQueryResultFlagBits</a> values"
+        },
+        {
+          "vuid": "VUID-vkGetQueryPoolResults-dataSize-arraylength",
+          "text": " <code>dataSize</code> <strong class=\"purple\">must</strong> be greater than <code>0</code>"
+        },
+        {
+          "vuid": "VUID-vkGetQueryPoolResults-queryPool-parent",
+          "text": " <code>queryPool</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from <code>device</code>"
+        }
+      ],
+      "(VK_KHR_performance_query)": [
+        {
+          "vuid": "VUID-vkGetQueryPoolResults-flags-02828",
+          "text": " If <code>VK_QUERY_RESULT_64_BIT</code> is not set in <code>flags</code> and the <code>queryType</code> used to create <code>queryPool</code> was not <code>VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR</code>, then <code>pData</code> and <code>stride</code> <strong class=\"purple\">must</strong> be multiples of <code>4</code>"
+        },
+        {
+          "vuid": "VUID-vkGetQueryPoolResults-queryType-03229",
+          "text": " If the <code>queryType</code> used to create <code>queryPool</code> was <code>VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR</code>, then <code>pData</code> and <code>stride</code> <strong class=\"purple\">must</strong> be multiples of the size of <a href=\"#VkPerformanceCounterResultKHR\">VkPerformanceCounterResultKHR</a>"
+        },
+        {
+          "vuid": "VUID-vkGetQueryPoolResults-queryType-03230",
+          "text": " If the <code>queryType</code> used to create <code>queryPool</code> was <code>VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR</code>, <code>flags</code> <strong class=\"purple\">must</strong> not contain <code>VK_QUERY_RESULT_WITH_AVAILABILITY_BIT</code>, <code>VK_QUERY_RESULT_PARTIAL_BIT</code> or <code>VK_QUERY_RESULT_64_BIT</code>"
+        },
+        {
+          "vuid": "VUID-vkGetQueryPoolResults-queryType-03231",
+          "text": " If the <code>queryType</code> used to create <code>queryPool</code> was <code>VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR</code>, the <code>queryPool</code> <strong class=\"purple\">must</strong> have been recorded once for each pass as retrieved via a call to <a href=\"#vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR\">vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR</a>"
+        }
+      ]
+    },
+    "vkCmdCopyQueryPoolResults": {
+      "core": [
+        {
+          "vuid": "VUID-vkCmdCopyQueryPoolResults-dstOffset-00819",
+          "text": " <code>dstOffset</code> <strong class=\"purple\">must</strong> be less than the size of <code>dstBuffer</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyQueryPoolResults-firstQuery-00820",
+          "text": " <code>firstQuery</code> <strong class=\"purple\">must</strong> be less than the number of queries in <code>queryPool</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyQueryPoolResults-firstQuery-00821",
+          "text": " The sum of <code>firstQuery</code> and <code>queryCount</code> <strong class=\"purple\">must</strong> be less than or equal to the number of queries in <code>queryPool</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyQueryPoolResults-flags-00822",
+          "text": " If <code>VK_QUERY_RESULT_64_BIT</code> is not set in <code>flags</code> then <code>dstOffset</code> and <code>stride</code> <strong class=\"purple\">must</strong> be multiples of <code>4</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyQueryPoolResults-flags-00823",
+          "text": " If <code>VK_QUERY_RESULT_64_BIT</code> is set in <code>flags</code> then <code>dstOffset</code> and <code>stride</code> <strong class=\"purple\">must</strong> be multiples of <code>8</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyQueryPoolResults-dstBuffer-00824",
+          "text": " <code>dstBuffer</code> <strong class=\"purple\">must</strong> have enough storage, from <code>dstOffset</code>, to contain the result of each query, as described <a href=\"#queries-operation-memorylayout\">here</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyQueryPoolResults-dstBuffer-00825",
+          "text": " <code>dstBuffer</code> <strong class=\"purple\">must</strong> have been created with <code>VK_BUFFER_USAGE_TRANSFER_DST_BIT</code> usage flag"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyQueryPoolResults-dstBuffer-00826",
+          "text": " If <code>dstBuffer</code> is non-sparse then it <strong class=\"purple\">must</strong> be bound completely and contiguously to a single <code>VkDeviceMemory</code> object"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyQueryPoolResults-queryType-00827",
+          "text": " If the <code>queryType</code> used to create <code>queryPool</code> was <code>VK_QUERY_TYPE_TIMESTAMP</code>, <code>flags</code> <strong class=\"purple\">must</strong> not contain <code>VK_QUERY_RESULT_PARTIAL_BIT</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyQueryPoolResults-commandBuffer-parameter",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkCommandBuffer\">VkCommandBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyQueryPoolResults-queryPool-parameter",
+          "text": " <code>queryPool</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkQueryPool\">VkQueryPool</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyQueryPoolResults-dstBuffer-parameter",
+          "text": " <code>dstBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkBuffer\">VkBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyQueryPoolResults-flags-parameter",
+          "text": " <code>flags</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkQueryResultFlagBits\">VkQueryResultFlagBits</a> values"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyQueryPoolResults-commandBuffer-recording",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be in the <a href=\"#commandbuffers-lifecycle\">recording state</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyQueryPoolResults-commandBuffer-cmdpool",
+          "text": " The <code>VkCommandPool</code> that <code>commandBuffer</code> was allocated from <strong class=\"purple\">must</strong> support graphics, or compute operations"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyQueryPoolResults-renderpass",
+          "text": " This command <strong class=\"purple\">must</strong> only be called outside of a render pass instance"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyQueryPoolResults-commonparent",
+          "text": " Each of <code>commandBuffer</code>, <code>dstBuffer</code>, and <code>queryPool</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from the same <a href=\"#VkDevice\">VkDevice</a>"
+        }
+      ],
+      "(VK_KHR_performance_query)": [
+        {
+          "vuid": "VUID-vkCmdCopyQueryPoolResults-queryType-03232",
+          "text": " If the <code>queryType</code> used to create <code>queryPool</code> was <code>VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR</code>, <a href=\"#VkPhysicalDevicePerformanceQueryPropertiesKHR\">VkPhysicalDevicePerformanceQueryPropertiesKHR</a>::<code>allowCommandBufferQueryCopies</code> <strong class=\"purple\">must</strong> be <code>VK_TRUE</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyQueryPoolResults-queryType-03233",
+          "text": " If the <code>queryType</code> used to create <code>queryPool</code> was <code>VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR</code>, <code>flags</code> <strong class=\"purple\">must</strong> not contain <code>VK_QUERY_RESULT_WITH_AVAILABILITY_BIT</code>, <code>VK_QUERY_RESULT_PARTIAL_BIT</code> or <code>VK_QUERY_RESULT_64_BIT</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyQueryPoolResults-queryType-03234",
+          "text": " If the <code>queryType</code> used to create <code>queryPool</code> was <code>VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR</code>, the <code>queryPool</code> <strong class=\"purple\">must</strong> have been submitted once for each pass as retrieved via a call to <a href=\"#vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR\">vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR</a>"
+        }
+      ],
+      "(VK_INTEL_performance_query)": [
+        {
+          "vuid": "VUID-vkCmdCopyQueryPoolResults-queryType-02734",
+          "text": " <a href=\"#vkCmdCopyQueryPoolResults\">vkCmdCopyQueryPoolResults</a> <strong class=\"purple\">must</strong> not be called if the <code>queryType</code> used to create <code>queryPool</code> was <code>VK_QUERY_TYPE_PERFORMANCE_QUERY_INTEL</code>"
+        }
+      ]
+    },
+    "vkCmdWriteTimestamp": {
+      "core": [
+        {
+          "vuid": "VUID-vkCmdWriteTimestamp-queryPool-01416",
+          "text": " <code>queryPool</code> <strong class=\"purple\">must</strong> have been created with a <code>queryType</code> of <code>VK_QUERY_TYPE_TIMESTAMP</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdWriteTimestamp-queryPool-00828",
+          "text": " The query identified by <code>queryPool</code> and <code>query</code> <strong class=\"purple\">must</strong> be <em>unavailable</em>"
+        },
+        {
+          "vuid": "VUID-vkCmdWriteTimestamp-timestampValidBits-00829",
+          "text": " The command pool&#8217;s queue family <strong class=\"purple\">must</strong> support a non-zero <code>timestampValidBits</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdWriteTimestamp-commandBuffer-parameter",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkCommandBuffer\">VkCommandBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdWriteTimestamp-pipelineStage-parameter",
+          "text": " <code>pipelineStage</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkPipelineStageFlagBits\">VkPipelineStageFlagBits</a> value"
+        },
+        {
+          "vuid": "VUID-vkCmdWriteTimestamp-queryPool-parameter",
+          "text": " <code>queryPool</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkQueryPool\">VkQueryPool</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdWriteTimestamp-commandBuffer-recording",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be in the <a href=\"#commandbuffers-lifecycle\">recording state</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdWriteTimestamp-commandBuffer-cmdpool",
+          "text": " The <code>VkCommandPool</code> that <code>commandBuffer</code> was allocated from <strong class=\"purple\">must</strong> support transfer, graphics, or compute operations"
+        },
+        {
+          "vuid": "VUID-vkCmdWriteTimestamp-commonparent",
+          "text": " Both of <code>commandBuffer</code>, and <code>queryPool</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from the same <a href=\"#VkDevice\">VkDevice</a>"
+        }
+      ],
+      "(VK_VERSION_1_1,VK_KHR_multiview)": [
+        {
+          "vuid": "VUID-vkCmdWriteTimestamp-None-00830",
+          "text": " All queries used by the command <strong class=\"purple\">must</strong> be unavailable"
+        },
+        {
+          "vuid": "VUID-vkCmdWriteTimestamp-query-00831",
+          "text": " If <code>vkCmdWriteTimestamp</code> is called within a render pass instance, the sum of <code>query</code> and the number of bits set in the current subpass&#8217;s view mask <strong class=\"purple\">must</strong> be less than or equal to the number of queries in <code>queryPool</code>"
+        }
+      ]
+    },
+    "vkAcquireProfilingLockKHR": {
+      "(VK_KHR_performance_query)": [
+        {
+          "vuid": "VUID-vkAcquireProfilingLockKHR-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkAcquireProfilingLockKHR-pInfo-parameter",
+          "text": " <code>pInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkAcquireProfilingLockInfoKHR\">VkAcquireProfilingLockInfoKHR</a> structure"
+        }
+      ]
+    },
+    "VkAcquireProfilingLockInfoKHR": {
+      "(VK_KHR_performance_query)": [
+        {
+          "vuid": "VUID-VkAcquireProfilingLockInfoKHR-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_ACQUIRE_PROFILING_LOCK_INFO_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkAcquireProfilingLockInfoKHR-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-VkAcquireProfilingLockInfoKHR-flags-zerobitmask",
+          "text": " <code>flags</code> <strong class=\"purple\">must</strong> be <code>0</code>"
+        }
+      ]
+    },
+    "vkReleaseProfilingLockKHR": {
+      "(VK_KHR_performance_query)": [
+        {
+          "vuid": "VUID-vkReleaseProfilingLockKHR-device-03235",
+          "text": " The profiling lock of <code>device</code> <strong class=\"purple\">must</strong> have been held via a previous successful call to <a href=\"#vkAcquireProfilingLockKHR\">vkAcquireProfilingLockKHR</a>"
+        },
+        {
+          "vuid": "VUID-vkReleaseProfilingLockKHR-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        }
+      ]
+    },
+    "vkInitializePerformanceApiINTEL": {
+      "(VK_INTEL_performance_query)+(VK_INTEL_performance_query)": [
+        {
+          "vuid": "VUID-vkInitializePerformanceApiINTEL-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkInitializePerformanceApiINTEL-pInitializeInfo-parameter",
+          "text": " <code>pInitializeInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkInitializePerformanceApiInfoINTEL\">VkInitializePerformanceApiInfoINTEL</a> structure"
+        }
+      ]
+    },
+    "VkInitializePerformanceApiInfoINTEL": {
+      "(VK_INTEL_performance_query)+(VK_INTEL_performance_query)": [
+        {
+          "vuid": "VUID-VkInitializePerformanceApiInfoINTEL-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_INITIALIZE_PERFORMANCE_API_INFO_INTEL</code>"
+        },
+        {
+          "vuid": "VUID-VkInitializePerformanceApiInfoINTEL-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        }
+      ]
+    },
+    "vkUninitializePerformanceApiINTEL": {
+      "(VK_INTEL_performance_query)+(VK_INTEL_performance_query)": [
+        {
+          "vuid": "VUID-vkUninitializePerformanceApiINTEL-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        }
+      ]
+    },
+    "vkGetPerformanceParameterINTEL": {
+      "(VK_INTEL_performance_query)+(VK_INTEL_performance_query)": [
+        {
+          "vuid": "VUID-vkGetPerformanceParameterINTEL-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetPerformanceParameterINTEL-parameter-parameter",
+          "text": " <code>parameter</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkPerformanceParameterTypeINTEL\">VkPerformanceParameterTypeINTEL</a> value"
+        },
+        {
+          "vuid": "VUID-vkGetPerformanceParameterINTEL-pValue-parameter",
+          "text": " <code>pValue</code> <strong class=\"purple\">must</strong> be a valid pointer to a <a href=\"#VkPerformanceValueINTEL\">VkPerformanceValueINTEL</a> structure"
+        }
+      ]
+    },
+    "VkPerformanceValueINTEL": {
+      "(VK_INTEL_performance_query)+(VK_INTEL_performance_query)": [
+        {
+          "vuid": "VUID-VkPerformanceValueINTEL-type-parameter",
+          "text": " <code>type</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkPerformanceValueTypeINTEL\">VkPerformanceValueTypeINTEL</a> value"
+        },
+        {
+          "vuid": "VUID-VkPerformanceValueINTEL-data-parameter",
+          "text": " <code>data</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkPerformanceValueDataINTEL\">VkPerformanceValueDataINTEL</a> union"
+        }
+      ]
+    },
+    "VkPerformanceValueDataINTEL": {
+      "(VK_INTEL_performance_query)+(VK_INTEL_performance_query)": [
+        {
+          "vuid": "VUID-VkPerformanceValueDataINTEL-valueString-parameter",
+          "text": " <code>valueString</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid"
+        }
+      ]
+    },
+    "VkQueryPoolCreateInfoINTEL": {
+      "(VK_INTEL_performance_query)+(VK_INTEL_performance_query)": [
+        {
+          "vuid": "VUID-VkQueryPoolCreateInfoINTEL-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO_INTEL</code>"
+        },
+        {
+          "vuid": "VUID-VkQueryPoolCreateInfoINTEL-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-VkQueryPoolCreateInfoINTEL-performanceCountersSampling-parameter",
+          "text": " <code>performanceCountersSampling</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkQueryPoolSamplingModeINTEL\">VkQueryPoolSamplingModeINTEL</a> value"
+        }
+      ]
+    },
+    "vkCmdSetPerformanceMarkerINTEL": {
+      "(VK_INTEL_performance_query)+(VK_INTEL_performance_query)": [
+        {
+          "vuid": "VUID-vkCmdSetPerformanceMarkerINTEL-commandBuffer-parameter",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkCommandBuffer\">VkCommandBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdSetPerformanceMarkerINTEL-pMarkerInfo-parameter",
+          "text": " <code>pMarkerInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkPerformanceMarkerInfoINTEL\">VkPerformanceMarkerInfoINTEL</a> structure"
+        },
+        {
+          "vuid": "VUID-vkCmdSetPerformanceMarkerINTEL-commandBuffer-recording",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be in the <a href=\"#commandbuffers-lifecycle\">recording state</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdSetPerformanceMarkerINTEL-commandBuffer-cmdpool",
+          "text": " The <code>VkCommandPool</code> that <code>commandBuffer</code> was allocated from <strong class=\"purple\">must</strong> support graphics, compute, or transfer operations"
+        }
+      ]
+    },
+    "VkPerformanceMarkerInfoINTEL": {
+      "(VK_INTEL_performance_query)+(VK_INTEL_performance_query)": [
+        {
+          "vuid": "VUID-VkPerformanceMarkerInfoINTEL-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PERFORMANCE_MARKER_INFO_INTEL</code>"
+        },
+        {
+          "vuid": "VUID-VkPerformanceMarkerInfoINTEL-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        }
+      ]
+    },
+    "vkCmdSetPerformanceStreamMarkerINTEL": {
+      "(VK_INTEL_performance_query)+(VK_INTEL_performance_query)": [
+        {
+          "vuid": "VUID-vkCmdSetPerformanceStreamMarkerINTEL-commandBuffer-parameter",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkCommandBuffer\">VkCommandBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdSetPerformanceStreamMarkerINTEL-pMarkerInfo-parameter",
+          "text": " <code>pMarkerInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkPerformanceStreamMarkerInfoINTEL\">VkPerformanceStreamMarkerInfoINTEL</a> structure"
+        },
+        {
+          "vuid": "VUID-vkCmdSetPerformanceStreamMarkerINTEL-commandBuffer-recording",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be in the <a href=\"#commandbuffers-lifecycle\">recording state</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdSetPerformanceStreamMarkerINTEL-commandBuffer-cmdpool",
+          "text": " The <code>VkCommandPool</code> that <code>commandBuffer</code> was allocated from <strong class=\"purple\">must</strong> support graphics, compute, or transfer operations"
+        }
+      ]
+    },
+    "VkPerformanceStreamMarkerInfoINTEL": {
+      "(VK_INTEL_performance_query)+(VK_INTEL_performance_query)": [
+        {
+          "vuid": "VUID-VkPerformanceStreamMarkerInfoINTEL-marker-02735",
+          "text": " The value written by the application into <code>marker</code> <strong class=\"purple\">must</strong> only used the valid bits as reported by <a href=\"#vkGetPerformanceParameterINTEL\">vkGetPerformanceParameterINTEL</a> with the <code>VK_PERFORMANCE_PARAMETER_TYPE_STREAM_MARKER_VALID_BITS_INTEL</code>."
+        },
+        {
+          "vuid": "VUID-VkPerformanceStreamMarkerInfoINTEL-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PERFORMANCE_STREAM_MARKER_INFO_INTEL</code>"
+        },
+        {
+          "vuid": "VUID-VkPerformanceStreamMarkerInfoINTEL-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        }
+      ]
+    },
+    "vkCmdSetPerformanceOverrideINTEL": {
+      "(VK_INTEL_performance_query)+(VK_INTEL_performance_query)": [
+        {
+          "vuid": "VUID-vkCmdSetPerformanceOverrideINTEL-pOverrideInfo-02736",
+          "text": " <code>pOverrideInfo</code> <strong class=\"purple\">must</strong> not be used with a <a href=\"#VkPerformanceOverrideTypeINTEL\">VkPerformanceOverrideTypeINTEL</a> that is not reported available by <code>vkGetPerformanceParameterINTEL</code>."
+        },
+        {
+          "vuid": "VUID-vkCmdSetPerformanceOverrideINTEL-commandBuffer-parameter",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkCommandBuffer\">VkCommandBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdSetPerformanceOverrideINTEL-pOverrideInfo-parameter",
+          "text": " <code>pOverrideInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkPerformanceOverrideInfoINTEL\">VkPerformanceOverrideInfoINTEL</a> structure"
+        },
+        {
+          "vuid": "VUID-vkCmdSetPerformanceOverrideINTEL-commandBuffer-recording",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be in the <a href=\"#commandbuffers-lifecycle\">recording state</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdSetPerformanceOverrideINTEL-commandBuffer-cmdpool",
+          "text": " The <code>VkCommandPool</code> that <code>commandBuffer</code> was allocated from <strong class=\"purple\">must</strong> support graphics, compute, or transfer operations"
+        }
+      ]
+    },
+    "VkPerformanceOverrideInfoINTEL": {
+      "(VK_INTEL_performance_query)+(VK_INTEL_performance_query)": [
+        {
+          "vuid": "VUID-VkPerformanceOverrideInfoINTEL-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PERFORMANCE_OVERRIDE_INFO_INTEL</code>"
+        },
+        {
+          "vuid": "VUID-VkPerformanceOverrideInfoINTEL-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-VkPerformanceOverrideInfoINTEL-type-parameter",
+          "text": " <code>type</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkPerformanceOverrideTypeINTEL\">VkPerformanceOverrideTypeINTEL</a> value"
+        }
+      ]
+    },
+    "vkAcquirePerformanceConfigurationINTEL": {
+      "(VK_INTEL_performance_query)+(VK_INTEL_performance_query)": [
+        {
+          "vuid": "VUID-vkAcquirePerformanceConfigurationINTEL-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkAcquirePerformanceConfigurationINTEL-pAcquireInfo-parameter",
+          "text": " <code>pAcquireInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkPerformanceConfigurationAcquireInfoINTEL\">VkPerformanceConfigurationAcquireInfoINTEL</a> structure"
+        },
+        {
+          "vuid": "VUID-vkAcquirePerformanceConfigurationINTEL-pConfiguration-parameter",
+          "text": " <code>pConfiguration</code> <strong class=\"purple\">must</strong> be a valid pointer to a <a href=\"#VkPerformanceConfigurationINTEL\">VkPerformanceConfigurationINTEL</a> handle"
+        }
+      ]
+    },
+    "VkPerformanceConfigurationAcquireInfoINTEL": {
+      "(VK_INTEL_performance_query)+(VK_INTEL_performance_query)": [
+        {
+          "vuid": "VUID-VkPerformanceConfigurationAcquireInfoINTEL-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PERFORMANCE_CONFIGURATION_ACQUIRE_INFO_INTEL</code>"
+        },
+        {
+          "vuid": "VUID-VkPerformanceConfigurationAcquireInfoINTEL-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-VkPerformanceConfigurationAcquireInfoINTEL-type-parameter",
+          "text": " <code>type</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkPerformanceConfigurationTypeINTEL\">VkPerformanceConfigurationTypeINTEL</a> value"
+        }
+      ]
+    },
+    "vkQueueSetPerformanceConfigurationINTEL": {
+      "(VK_INTEL_performance_query)+(VK_INTEL_performance_query)": [
+        {
+          "vuid": "VUID-vkQueueSetPerformanceConfigurationINTEL-queue-parameter",
+          "text": " <code>queue</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkQueue\">VkQueue</a> handle"
+        },
+        {
+          "vuid": "VUID-vkQueueSetPerformanceConfigurationINTEL-configuration-parameter",
+          "text": " <code>configuration</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkPerformanceConfigurationINTEL\">VkPerformanceConfigurationINTEL</a> handle"
+        },
+        {
+          "vuid": "VUID-vkQueueSetPerformanceConfigurationINTEL-commonparent",
+          "text": " Both of <code>configuration</code>, and <code>queue</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from the same <a href=\"#VkDevice\">VkDevice</a>"
+        }
+      ]
+    },
+    "vkReleasePerformanceConfigurationINTEL": {
+      "(VK_INTEL_performance_query)+(VK_INTEL_performance_query)": [
+        {
+          "vuid": "VUID-vkReleasePerformanceConfigurationINTEL-configuration-02737",
+          "text": " <code>configuration</code> <strong class=\"purple\">must</strong> not be released before all command buffers submitted while the configuration was set are in <a href=\"#commandbuffers-lifecycle\">pending state</a>."
+        },
+        {
+          "vuid": "VUID-vkReleasePerformanceConfigurationINTEL-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkReleasePerformanceConfigurationINTEL-configuration-parameter",
+          "text": " <code>configuration</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkPerformanceConfigurationINTEL\">VkPerformanceConfigurationINTEL</a> handle"
+        },
+        {
+          "vuid": "VUID-vkReleasePerformanceConfigurationINTEL-configuration-parent",
+          "text": " <code>configuration</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from <code>device</code>"
+        }
+      ]
+    },
+    "vkCmdClearColorImage": {
+      "(VK_VERSION_1_1,VK_KHR_maintenance1)": [
+        {
+          "vuid": "VUID-vkCmdClearColorImage-image-01993",
+          "text": " The <a href=\"#resources-image-format-features\">format features</a> of <code>image</code> <strong class=\"purple\">must</strong> contain <code>VK_FORMAT_FEATURE_TRANSFER_DST_BIT</code>."
+        }
+      ],
+      "core": [
+        {
+          "vuid": "VUID-vkCmdClearColorImage-image-00002",
+          "text": " <code>image</code> <strong class=\"purple\">must</strong> have been created with <code>VK_IMAGE_USAGE_TRANSFER_DST_BIT</code> usage flag"
+        },
+        {
+          "vuid": "VUID-vkCmdClearColorImage-image-00003",
+          "text": " If <code>image</code> is non-sparse then it <strong class=\"purple\">must</strong> be bound completely and contiguously to a single <code>VkDeviceMemory</code> object"
+        },
+        {
+          "vuid": "VUID-vkCmdClearColorImage-imageLayout-00004",
+          "text": " <code>imageLayout</code> <strong class=\"purple\">must</strong> specify the layout of the image subresource ranges of <code>image</code> specified in <code>pRanges</code> at the time this command is executed on a <code>VkDevice</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdClearColorImage-aspectMask-02498",
+          "text": " The <a href=\"#VkImageSubresourceRange\">VkImageSubresourceRange</a>::<code>aspectMask</code> members of the elements of the <code>pRanges</code> array <strong class=\"purple\">must</strong> each only include <code>VK_IMAGE_ASPECT_COLOR_BIT</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdClearColorImage-baseMipLevel-01470",
+          "text": " The <a href=\"#VkImageSubresourceRange\">VkImageSubresourceRange</a>::<code>baseMipLevel</code> members of the elements of the <code>pRanges</code> array <strong class=\"purple\">must</strong> each be less than the <code>mipLevels</code> specified in <a href=\"#VkImageCreateInfo\">VkImageCreateInfo</a> when <code>image</code> was created"
+        },
+        {
+          "vuid": "VUID-vkCmdClearColorImage-pRanges-01692",
+          "text": " For each <a href=\"#VkImageSubresourceRange\">VkImageSubresourceRange</a> element of <code>pRanges</code>, if the <code>levelCount</code> member is not <code>VK_REMAINING_MIP_LEVELS</code>, then <span class=\"eq\"><code>baseMipLevel</code> &#43; <code>levelCount</code></span> <strong class=\"purple\">must</strong> be less than the <code>mipLevels</code> specified in <a href=\"#VkImageCreateInfo\">VkImageCreateInfo</a> when <code>image</code> was created"
+        },
+        {
+          "vuid": "VUID-vkCmdClearColorImage-baseArrayLayer-01472",
+          "text": " The <a href=\"#VkImageSubresourceRange\">VkImageSubresourceRange</a>::<code>baseArrayLayer</code> members of the elements of the <code>pRanges</code> array <strong class=\"purple\">must</strong> each be less than the <code>arrayLayers</code> specified in <a href=\"#VkImageCreateInfo\">VkImageCreateInfo</a> when <code>image</code> was created"
+        },
+        {
+          "vuid": "VUID-vkCmdClearColorImage-pRanges-01693",
+          "text": " For each <a href=\"#VkImageSubresourceRange\">VkImageSubresourceRange</a> element of <code>pRanges</code>, if the <code>layerCount</code> member is not <code>VK_REMAINING_ARRAY_LAYERS</code>, then <span class=\"eq\"><code>baseArrayLayer</code> &#43; <code>layerCount</code></span> <strong class=\"purple\">must</strong> be less than the <code>arrayLayers</code> specified in <a href=\"#VkImageCreateInfo\">VkImageCreateInfo</a> when <code>image</code> was created"
+        },
+        {
+          "vuid": "VUID-vkCmdClearColorImage-image-00007",
+          "text": " <code>image</code> <strong class=\"purple\">must</strong> not have a compressed or depth/stencil format"
+        },
+        {
+          "vuid": "VUID-vkCmdClearColorImage-commandBuffer-parameter",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkCommandBuffer\">VkCommandBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdClearColorImage-image-parameter",
+          "text": " <code>image</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkImage\">VkImage</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdClearColorImage-imageLayout-parameter",
+          "text": " <code>imageLayout</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkImageLayout\">VkImageLayout</a> value"
+        },
+        {
+          "vuid": "VUID-vkCmdClearColorImage-pColor-parameter",
+          "text": " <code>pColor</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkClearColorValue\">VkClearColorValue</a> union"
+        },
+        {
+          "vuid": "VUID-vkCmdClearColorImage-pRanges-parameter",
+          "text": " <code>pRanges</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>rangeCount</code> valid <a href=\"#VkImageSubresourceRange\">VkImageSubresourceRange</a> structures"
+        },
+        {
+          "vuid": "VUID-vkCmdClearColorImage-commandBuffer-recording",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be in the <a href=\"#commandbuffers-lifecycle\">recording state</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdClearColorImage-commandBuffer-cmdpool",
+          "text": " The <code>VkCommandPool</code> that <code>commandBuffer</code> was allocated from <strong class=\"purple\">must</strong> support graphics, or compute operations"
+        },
+        {
+          "vuid": "VUID-vkCmdClearColorImage-renderpass",
+          "text": " This command <strong class=\"purple\">must</strong> only be called outside of a render pass instance"
+        },
+        {
+          "vuid": "VUID-vkCmdClearColorImage-rangeCount-arraylength",
+          "text": " <code>rangeCount</code> <strong class=\"purple\">must</strong> be greater than <code>0</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdClearColorImage-commonparent",
+          "text": " Both of <code>commandBuffer</code>, and <code>image</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from the same <a href=\"#VkDevice\">VkDevice</a>"
+        }
+      ],
+      "(VK_VERSION_1_1,VK_KHR_sampler_ycbcr_conversion)": [
+        {
+          "vuid": "VUID-vkCmdClearColorImage-image-01545",
+          "text": " <code>image</code> <strong class=\"purple\">must</strong> not use a format listed in <a href=\"#formats-requiring-sampler-ycbcr-conversion\">Formats requiring sampler Y&#8217;C<sub>B</sub>C<sub>R</sub> conversion for <code>VK_IMAGE_ASPECT_COLOR_BIT</code> image views</a>"
+        }
+      ],
+      "!(VK_KHR_shared_presentable_image)": [
+        {
+          "vuid": "VUID-vkCmdClearColorImage-imageLayout-00005",
+          "text": " <code>imageLayout</code> <strong class=\"purple\">must</strong> be <code>VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL</code> or <code>VK_IMAGE_LAYOUT_GENERAL</code>"
+        }
+      ],
+      "(VK_KHR_shared_presentable_image)": [
+        {
+          "vuid": "VUID-vkCmdClearColorImage-imageLayout-01394",
+          "text": " <code>imageLayout</code> <strong class=\"purple\">must</strong> be <code>VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL</code>, <code>VK_IMAGE_LAYOUT_GENERAL</code>, or <code>VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR</code>"
+        }
+      ],
+      "(VK_VERSION_1_1)": [
+        {
+          "vuid": "VUID-vkCmdClearColorImage-commandBuffer-01805",
+          "text": " If <code>commandBuffer</code> is an unprotected command buffer, then <code>image</code> <strong class=\"purple\">must</strong> not be a protected image"
+        },
+        {
+          "vuid": "VUID-vkCmdClearColorImage-commandBuffer-01806",
+          "text": " If <code>commandBuffer</code> is a protected command buffer, then <code>image</code> <strong class=\"purple\">must</strong> not be an unprotected image"
+        }
+      ]
+    },
+    "vkCmdClearDepthStencilImage": {
+      "(VK_VERSION_1_1,VK_KHR_maintenance1)": [
+        {
+          "vuid": "VUID-vkCmdClearDepthStencilImage-image-01994",
+          "text": " The <a href=\"#resources-image-format-features\">format features</a> of <code>image</code> <strong class=\"purple\">must</strong> contain <code>VK_FORMAT_FEATURE_TRANSFER_DST_BIT</code>."
+        }
+      ],
+      "!(VK_EXT_separate_stencil_usage)": [
+        {
+          "vuid": "VUID-vkCmdClearDepthStencilImage-image-00009",
+          "text": " <code>image</code> <strong class=\"purple\">must</strong> have been created with <code>VK_IMAGE_USAGE_TRANSFER_DST_BIT</code> usage flag"
+        }
+      ],
+      "(VK_EXT_separate_stencil_usage)": [
+        {
+          "vuid": "VUID-vkCmdClearDepthStencilImage-pRanges-02658",
+          "text": " If any element of <code>pRanges.aspect</code> includes <code>VK_IMAGE_ASPECT_STENCIL_BIT</code>, and <code>image</code> was created with <a href=\"#VkImageStencilUsageCreateInfoEXT\">separate stencil usage</a>, <code>VK_IMAGE_USAGE_TRANSFER_DST_BIT</code> <strong class=\"purple\">must</strong> have been included in the <a href=\"#VkImageStencilUsageCreateInfoEXT\">VkImageStencilUsageCreateInfoEXT</a>::<code>stencilUsage</code> used to create <code>image</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdClearDepthStencilImage-pRanges-02659",
+          "text": " If any element of <code>pRanges.aspect</code> includes <code>VK_IMAGE_ASPECT_STENCIL_BIT</code>, and <code>image</code> was not created with <a href=\"#VkImageStencilUsageCreateInfoEXT\">separate stencil usage</a>, <code>VK_IMAGE_USAGE_TRANSFER_DST_BIT</code> <strong class=\"purple\">must</strong> have been included in the <a href=\"#VkImageCreateInfo\">VkImageCreateInfo</a>::<code>usage</code> used to create <code>image</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdClearDepthStencilImage-pRanges-02660",
+          "text": " If any element of <code>pRanges.aspect</code> includes <code>VK_IMAGE_ASPECT_DEPTH_BIT</code>, <code>VK_IMAGE_USAGE_TRANSFER_DST_BIT</code> <strong class=\"purple\">must</strong> have been included in the <a href=\"#VkImageCreateInfo\">VkImageCreateInfo</a>::<code>usage</code> used to create <code>image</code>"
+        }
+      ],
+      "core": [
+        {
+          "vuid": "VUID-vkCmdClearDepthStencilImage-image-00010",
+          "text": " If <code>image</code> is non-sparse then it <strong class=\"purple\">must</strong> be bound completely and contiguously to a single <code>VkDeviceMemory</code> object"
+        },
+        {
+          "vuid": "VUID-vkCmdClearDepthStencilImage-imageLayout-00011",
+          "text": " <code>imageLayout</code> <strong class=\"purple\">must</strong> specify the layout of the image subresource ranges of <code>image</code> specified in <code>pRanges</code> at the time this command is executed on a <code>VkDevice</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdClearDepthStencilImage-imageLayout-00012",
+          "text": " <code>imageLayout</code> <strong class=\"purple\">must</strong> be either of <code>VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL</code> or <code>VK_IMAGE_LAYOUT_GENERAL</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdClearDepthStencilImage-aspectMask-02824",
+          "text": " The <a href=\"#VkImageSubresourceRange\">VkImageSubresourceRange</a>::<code>aspectMask</code> member of each element of the <code>pRanges</code> array <strong class=\"purple\">must</strong> not include bits other than <code>VK_IMAGE_ASPECT_DEPTH_BIT</code> or <code>VK_IMAGE_ASPECT_STENCIL_BIT</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdClearDepthStencilImage-image-02825",
+          "text": " If the <code>image</code>&#8217;s format does not have a stencil component, then the <a href=\"#VkImageSubresourceRange\">VkImageSubresourceRange</a>::<code>aspectMask</code> member of each element of the <code>pRanges</code> array <strong class=\"purple\">must</strong> not include the <code>VK_IMAGE_ASPECT_STENCIL_BIT</code> bit"
+        },
+        {
+          "vuid": "VUID-vkCmdClearDepthStencilImage-image-02826",
+          "text": " If the <code>image</code>&#8217;s format does not have a depth component, then the <a href=\"#VkImageSubresourceRange\">VkImageSubresourceRange</a>::<code>aspectMask</code> member of each element of the <code>pRanges</code> array <strong class=\"purple\">must</strong> not include the <code>VK_IMAGE_ASPECT_DEPTH_BIT</code> bit"
+        },
+        {
+          "vuid": "VUID-vkCmdClearDepthStencilImage-baseMipLevel-01474",
+          "text": " The <a href=\"#VkImageSubresourceRange\">VkImageSubresourceRange</a>::<code>baseMipLevel</code> members of the elements of the <code>pRanges</code> array <strong class=\"purple\">must</strong> each be less than the <code>mipLevels</code> specified in <a href=\"#VkImageCreateInfo\">VkImageCreateInfo</a> when <code>image</code> was created"
+        },
+        {
+          "vuid": "VUID-vkCmdClearDepthStencilImage-pRanges-01694",
+          "text": " For each <a href=\"#VkImageSubresourceRange\">VkImageSubresourceRange</a> element of <code>pRanges</code>, if the <code>levelCount</code> member is not <code>VK_REMAINING_MIP_LEVELS</code>, then <span class=\"eq\"><code>baseMipLevel</code> &#43; <code>levelCount</code></span> <strong class=\"purple\">must</strong> be less than the <code>mipLevels</code> specified in <a href=\"#VkImageCreateInfo\">VkImageCreateInfo</a> when <code>image</code> was created"
+        },
+        {
+          "vuid": "VUID-vkCmdClearDepthStencilImage-baseArrayLayer-01476",
+          "text": " The <a href=\"#VkImageSubresourceRange\">VkImageSubresourceRange</a>::<code>baseArrayLayer</code> members of the elements of the <code>pRanges</code> array <strong class=\"purple\">must</strong> each be less than the <code>arrayLayers</code> specified in <a href=\"#VkImageCreateInfo\">VkImageCreateInfo</a> when <code>image</code> was created"
+        },
+        {
+          "vuid": "VUID-vkCmdClearDepthStencilImage-pRanges-01695",
+          "text": " For each <a href=\"#VkImageSubresourceRange\">VkImageSubresourceRange</a> element of <code>pRanges</code>, if the <code>layerCount</code> member is not <code>VK_REMAINING_ARRAY_LAYERS</code>, then <span class=\"eq\"><code>baseArrayLayer</code> &#43; <code>layerCount</code></span> <strong class=\"purple\">must</strong> be less than the <code>arrayLayers</code> specified in <a href=\"#VkImageCreateInfo\">VkImageCreateInfo</a> when <code>image</code> was created"
+        },
+        {
+          "vuid": "VUID-vkCmdClearDepthStencilImage-image-00014",
+          "text": " <code>image</code> <strong class=\"purple\">must</strong> have a depth/stencil format"
+        },
+        {
+          "vuid": "VUID-vkCmdClearDepthStencilImage-commandBuffer-parameter",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkCommandBuffer\">VkCommandBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdClearDepthStencilImage-image-parameter",
+          "text": " <code>image</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkImage\">VkImage</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdClearDepthStencilImage-imageLayout-parameter",
+          "text": " <code>imageLayout</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkImageLayout\">VkImageLayout</a> value"
+        },
+        {
+          "vuid": "VUID-vkCmdClearDepthStencilImage-pDepthStencil-parameter",
+          "text": " <code>pDepthStencil</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkClearDepthStencilValue\">VkClearDepthStencilValue</a> structure"
+        },
+        {
+          "vuid": "VUID-vkCmdClearDepthStencilImage-pRanges-parameter",
+          "text": " <code>pRanges</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>rangeCount</code> valid <a href=\"#VkImageSubresourceRange\">VkImageSubresourceRange</a> structures"
+        },
+        {
+          "vuid": "VUID-vkCmdClearDepthStencilImage-commandBuffer-recording",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be in the <a href=\"#commandbuffers-lifecycle\">recording state</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdClearDepthStencilImage-commandBuffer-cmdpool",
+          "text": " The <code>VkCommandPool</code> that <code>commandBuffer</code> was allocated from <strong class=\"purple\">must</strong> support graphics operations"
+        },
+        {
+          "vuid": "VUID-vkCmdClearDepthStencilImage-renderpass",
+          "text": " This command <strong class=\"purple\">must</strong> only be called outside of a render pass instance"
+        },
+        {
+          "vuid": "VUID-vkCmdClearDepthStencilImage-rangeCount-arraylength",
+          "text": " <code>rangeCount</code> <strong class=\"purple\">must</strong> be greater than <code>0</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdClearDepthStencilImage-commonparent",
+          "text": " Both of <code>commandBuffer</code>, and <code>image</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from the same <a href=\"#VkDevice\">VkDevice</a>"
+        }
+      ],
+      "(VK_VERSION_1_1)": [
+        {
+          "vuid": "VUID-vkCmdClearDepthStencilImage-commandBuffer-01807",
+          "text": " If <code>commandBuffer</code> is an unprotected command buffer, then <code>image</code> <strong class=\"purple\">must</strong> not be a protected image"
+        },
+        {
+          "vuid": "VUID-vkCmdClearDepthStencilImage-commandBuffer-01808",
+          "text": " If <code>commandBuffer</code> is a protected command buffer, then <code>image</code> <strong class=\"purple\">must</strong> not be an unprotected image"
+        }
+      ]
+    },
+    "vkCmdClearAttachments": {
+      "core": [
+        {
+          "vuid": "VUID-vkCmdClearAttachments-aspectMask-02501",
+          "text": " If the <code>aspectMask</code> member of any element of <code>pAttachments</code> contains <code>VK_IMAGE_ASPECT_COLOR_BIT</code>, then the <code>colorAttachment</code> member of that element <strong class=\"purple\">must</strong> either refer to a color attachment which is <code>VK_ATTACHMENT_UNUSED</code>, or <strong class=\"purple\">must</strong> be a valid color attachment."
+        },
+        {
+          "vuid": "VUID-vkCmdClearAttachments-aspectMask-02502",
+          "text": " If the <code>aspectMask</code> member of any element of <code>pAttachments</code> contains <code>VK_IMAGE_ASPECT_DEPTH_BIT</code>, then the current subpass' depth/stencil attachment <strong class=\"purple\">must</strong> either be <code>VK_ATTACHMENT_UNUSED</code>, or <strong class=\"purple\">must</strong> have a depth component"
+        },
+        {
+          "vuid": "VUID-vkCmdClearAttachments-aspectMask-02503",
+          "text": " If the <code>aspectMask</code> member of any element of <code>pAttachments</code> contains <code>VK_IMAGE_ASPECT_STENCIL_BIT</code>, then the current subpass' depth/stencil attachment <strong class=\"purple\">must</strong> either be <code>VK_ATTACHMENT_UNUSED</code>, or <strong class=\"purple\">must</strong> have a stencil component"
+        },
+        {
+          "vuid": "VUID-vkCmdClearAttachments-rect-02682",
+          "text": " The <code>rect</code> member of each element of <code>pRects</code> <strong class=\"purple\">must</strong> have an <code>extent.width</code> greater than <code>0</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdClearAttachments-rect-02683",
+          "text": " The <code>rect</code> member of each element of <code>pRects</code> <strong class=\"purple\">must</strong> have an <code>extent.height</code> greater than <code>0</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdClearAttachments-pRects-00016",
+          "text": " The rectangular region specified by each element of <code>pRects</code> <strong class=\"purple\">must</strong> be contained within the render area of the current render pass instance"
+        },
+        {
+          "vuid": "VUID-vkCmdClearAttachments-pRects-00017",
+          "text": " The layers specified by each element of <code>pRects</code> <strong class=\"purple\">must</strong> be contained within every attachment that <code>pAttachments</code> refers to"
+        },
+        {
+          "vuid": "VUID-vkCmdClearAttachments-layerCount-01934",
+          "text": " The <code>layerCount</code> member of each element of <code>pRects</code> <strong class=\"purple\">must</strong> not be <code>0</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdClearAttachments-commandBuffer-parameter",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkCommandBuffer\">VkCommandBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdClearAttachments-pAttachments-parameter",
+          "text": " <code>pAttachments</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>attachmentCount</code> valid <a href=\"#VkClearAttachment\">VkClearAttachment</a> structures"
+        },
+        {
+          "vuid": "VUID-vkCmdClearAttachments-pRects-parameter",
+          "text": " <code>pRects</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>rectCount</code> <a href=\"#VkClearRect\">VkClearRect</a> structures"
+        },
+        {
+          "vuid": "VUID-vkCmdClearAttachments-commandBuffer-recording",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be in the <a href=\"#commandbuffers-lifecycle\">recording state</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdClearAttachments-commandBuffer-cmdpool",
+          "text": " The <code>VkCommandPool</code> that <code>commandBuffer</code> was allocated from <strong class=\"purple\">must</strong> support graphics operations"
+        },
+        {
+          "vuid": "VUID-vkCmdClearAttachments-renderpass",
+          "text": " This command <strong class=\"purple\">must</strong> only be called inside of a render pass instance"
+        },
+        {
+          "vuid": "VUID-vkCmdClearAttachments-attachmentCount-arraylength",
+          "text": " <code>attachmentCount</code> <strong class=\"purple\">must</strong> be greater than <code>0</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdClearAttachments-rectCount-arraylength",
+          "text": " <code>rectCount</code> <strong class=\"purple\">must</strong> be greater than <code>0</code>"
+        }
+      ],
+      "(VK_VERSION_1_1)": [
+        {
+          "vuid": "VUID-vkCmdClearAttachments-commandBuffer-02504",
+          "text": " If <code>commandBuffer</code> is an unprotected command buffer, then each attachment to be cleared <strong class=\"purple\">must</strong> not be a protected image."
+        },
+        {
+          "vuid": "VUID-vkCmdClearAttachments-commandBuffer-02505",
+          "text": " If <code>commandBuffer</code> is a protected command buffer, then each attachment to be cleared <strong class=\"purple\">must</strong> not be an unprotected image."
+        }
+      ],
+      "(VK_VERSION_1_1,VK_KHR_multiview)": [
+        {
+          "vuid": "VUID-vkCmdClearAttachments-baseArrayLayer-00018",
+          "text": " If the render pass instance this is recorded in uses multiview, then <code>baseArrayLayer</code> <strong class=\"purple\">must</strong> be zero and <code>layerCount</code> <strong class=\"purple\">must</strong> be one."
+        }
+      ]
+    },
+    "VkClearAttachment": {
+      "core": [
+        {
+          "vuid": "VUID-VkClearAttachment-aspectMask-00019",
+          "text": " If <code>aspectMask</code> includes <code>VK_IMAGE_ASPECT_COLOR_BIT</code>, it <strong class=\"purple\">must</strong> not include <code>VK_IMAGE_ASPECT_DEPTH_BIT</code> or <code>VK_IMAGE_ASPECT_STENCIL_BIT</code>"
+        },
+        {
+          "vuid": "VUID-VkClearAttachment-aspectMask-00020",
+          "text": " <code>aspectMask</code> <strong class=\"purple\">must</strong> not include <code>VK_IMAGE_ASPECT_METADATA_BIT</code>"
+        },
+        {
+          "vuid": "VUID-VkClearAttachment-clearValue-00021",
+          "text": " <code>clearValue</code> <strong class=\"purple\">must</strong> be a valid <code>VkClearValue</code> union"
+        },
+        {
+          "vuid": "VUID-VkClearAttachment-aspectMask-parameter",
+          "text": " <code>aspectMask</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkImageAspectFlagBits\">VkImageAspectFlagBits</a> values"
+        },
+        {
+          "vuid": "VUID-VkClearAttachment-aspectMask-requiredbitmask",
+          "text": " <code>aspectMask</code> <strong class=\"purple\">must</strong> not be <code>0</code>"
+        }
+      ],
+      "(VK_EXT_image_drm_format_modifier)": [
+        {
+          "vuid": "VUID-VkClearAttachment-aspectMask-02246",
+          "text": " <code>aspectMask</code> <strong class=\"purple\">must</strong> not include <code>VK_IMAGE_ASPECT_MEMORY_PLANE_i_BIT_EXT</code> for any index <code>i</code>."
+        }
+      ]
+    },
+    "VkClearDepthStencilValue": {
+      "(VK_EXT_depth_range_unrestricted)": [
+        {
+          "vuid": "VUID-VkClearDepthStencilValue-depth-00022",
+          "text": " Unless the <code><a href=\"#VK_EXT_depth_range_unrestricted\">VK_EXT_depth_range_unrestricted</a></code> extension is enabled <code>depth</code> <strong class=\"purple\">must</strong> be between <code>0.0</code> and <code>1.0</code>, inclusive"
+        }
+      ],
+      "!(VK_EXT_depth_range_unrestricted)": [
+        {
+          "vuid": "VUID-VkClearDepthStencilValue-depth-02506",
+          "text": " <code>depth</code> <strong class=\"purple\">must</strong> be between <code>0.0</code> and <code>1.0</code>, inclusive"
+        }
+      ]
+    },
+    "vkCmdFillBuffer": {
+      "core": [
+        {
+          "vuid": "VUID-vkCmdFillBuffer-dstOffset-00024",
+          "text": " <code>dstOffset</code> <strong class=\"purple\">must</strong> be less than the size of <code>dstBuffer</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdFillBuffer-dstOffset-00025",
+          "text": " <code>dstOffset</code> <strong class=\"purple\">must</strong> be a multiple of <code>4</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdFillBuffer-size-00026",
+          "text": " If <code>size</code> is not equal to <code>VK_WHOLE_SIZE</code>, <code>size</code> <strong class=\"purple\">must</strong> be greater than <code>0</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdFillBuffer-size-00027",
+          "text": " If <code>size</code> is not equal to <code>VK_WHOLE_SIZE</code>, <code>size</code> <strong class=\"purple\">must</strong> be less than or equal to the size of <code>dstBuffer</code> minus <code>dstOffset</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdFillBuffer-size-00028",
+          "text": " If <code>size</code> is not equal to <code>VK_WHOLE_SIZE</code>, <code>size</code> <strong class=\"purple\">must</strong> be a multiple of <code>4</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdFillBuffer-dstBuffer-00029",
+          "text": " <code>dstBuffer</code> <strong class=\"purple\">must</strong> have been created with <code>VK_BUFFER_USAGE_TRANSFER_DST_BIT</code> usage flag"
+        },
+        {
+          "vuid": "VUID-vkCmdFillBuffer-dstBuffer-00031",
+          "text": " If <code>dstBuffer</code> is non-sparse then it <strong class=\"purple\">must</strong> be bound completely and contiguously to a single <code>VkDeviceMemory</code> object"
+        },
+        {
+          "vuid": "VUID-vkCmdFillBuffer-commandBuffer-parameter",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkCommandBuffer\">VkCommandBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdFillBuffer-dstBuffer-parameter",
+          "text": " <code>dstBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkBuffer\">VkBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdFillBuffer-commandBuffer-recording",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be in the <a href=\"#commandbuffers-lifecycle\">recording state</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdFillBuffer-commandBuffer-cmdpool",
+          "text": " The <code>VkCommandPool</code> that <code>commandBuffer</code> was allocated from <strong class=\"purple\">must</strong> support transfer, graphics or compute operations"
+        },
+        {
+          "vuid": "VUID-vkCmdFillBuffer-renderpass",
+          "text": " This command <strong class=\"purple\">must</strong> only be called outside of a render pass instance"
+        },
+        {
+          "vuid": "VUID-vkCmdFillBuffer-commonparent",
+          "text": " Both of <code>commandBuffer</code>, and <code>dstBuffer</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from the same <a href=\"#VkDevice\">VkDevice</a>"
+        }
+      ],
+      "!(VK_VERSION_1_1,VK_KHR_maintenance1)": [
+        {
+          "vuid": "VUID-vkCmdFillBuffer-commandBuffer-00030",
+          "text": " The <code>VkCommandPool</code> that <code>commandBuffer</code> was allocated from <strong class=\"purple\">must</strong> support graphics or compute operations"
+        }
+      ],
+      "(VK_VERSION_1_1)": [
+        {
+          "vuid": "VUID-vkCmdFillBuffer-commandBuffer-01811",
+          "text": " If <code>commandBuffer</code> is an unprotected command buffer, then <code>dstBuffer</code> <strong class=\"purple\">must</strong> not be a protected buffer"
+        },
+        {
+          "vuid": "VUID-vkCmdFillBuffer-commandBuffer-01812",
+          "text": " If <code>commandBuffer</code> is a protected command buffer, then <code>dstBuffer</code> <strong class=\"purple\">must</strong> not be an unprotected buffer"
+        }
+      ]
+    },
+    "vkCmdUpdateBuffer": {
+      "core": [
+        {
+          "vuid": "VUID-vkCmdUpdateBuffer-dstOffset-00032",
+          "text": " <code>dstOffset</code> <strong class=\"purple\">must</strong> be less than the size of <code>dstBuffer</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdUpdateBuffer-dataSize-00033",
+          "text": " <code>dataSize</code> <strong class=\"purple\">must</strong> be less than or equal to the size of <code>dstBuffer</code> minus <code>dstOffset</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdUpdateBuffer-dstBuffer-00034",
+          "text": " <code>dstBuffer</code> <strong class=\"purple\">must</strong> have been created with <code>VK_BUFFER_USAGE_TRANSFER_DST_BIT</code> usage flag"
+        },
+        {
+          "vuid": "VUID-vkCmdUpdateBuffer-dstBuffer-00035",
+          "text": " If <code>dstBuffer</code> is non-sparse then it <strong class=\"purple\">must</strong> be bound completely and contiguously to a single <code>VkDeviceMemory</code> object"
+        },
+        {
+          "vuid": "VUID-vkCmdUpdateBuffer-dstOffset-00036",
+          "text": " <code>dstOffset</code> <strong class=\"purple\">must</strong> be a multiple of <code>4</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdUpdateBuffer-dataSize-00037",
+          "text": " <code>dataSize</code> <strong class=\"purple\">must</strong> be less than or equal to <code>65536</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdUpdateBuffer-dataSize-00038",
+          "text": " <code>dataSize</code> <strong class=\"purple\">must</strong> be a multiple of <code>4</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdUpdateBuffer-commandBuffer-parameter",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkCommandBuffer\">VkCommandBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdUpdateBuffer-dstBuffer-parameter",
+          "text": " <code>dstBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkBuffer\">VkBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdUpdateBuffer-pData-parameter",
+          "text": " <code>pData</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>dataSize</code> bytes"
+        },
+        {
+          "vuid": "VUID-vkCmdUpdateBuffer-commandBuffer-recording",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be in the <a href=\"#commandbuffers-lifecycle\">recording state</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdUpdateBuffer-commandBuffer-cmdpool",
+          "text": " The <code>VkCommandPool</code> that <code>commandBuffer</code> was allocated from <strong class=\"purple\">must</strong> support transfer, graphics, or compute operations"
+        },
+        {
+          "vuid": "VUID-vkCmdUpdateBuffer-renderpass",
+          "text": " This command <strong class=\"purple\">must</strong> only be called outside of a render pass instance"
+        },
+        {
+          "vuid": "VUID-vkCmdUpdateBuffer-dataSize-arraylength",
+          "text": " <code>dataSize</code> <strong class=\"purple\">must</strong> be greater than <code>0</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdUpdateBuffer-commonparent",
+          "text": " Both of <code>commandBuffer</code>, and <code>dstBuffer</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from the same <a href=\"#VkDevice\">VkDevice</a>"
+        }
+      ],
+      "(VK_VERSION_1_1)": [
+        {
+          "vuid": "VUID-vkCmdUpdateBuffer-commandBuffer-01813",
+          "text": " If <code>commandBuffer</code> is an unprotected command buffer, then <code>dstBuffer</code> <strong class=\"purple\">must</strong> not be a protected buffer"
+        },
+        {
+          "vuid": "VUID-vkCmdUpdateBuffer-commandBuffer-01814",
+          "text": " If <code>commandBuffer</code> is a protected command buffer, then <code>dstBuffer</code> <strong class=\"purple\">must</strong> not be an unprotected buffer"
+        }
+      ]
+    },
+    "vkCmdCopyBuffer": {
+      "core": [
+        {
+          "vuid": "VUID-vkCmdCopyBuffer-srcOffset-00113",
+          "text": " The <code>srcOffset</code> member of each element of <code>pRegions</code> <strong class=\"purple\">must</strong> be less than the size of <code>srcBuffer</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyBuffer-dstOffset-00114",
+          "text": " The <code>dstOffset</code> member of each element of <code>pRegions</code> <strong class=\"purple\">must</strong> be less than the size of <code>dstBuffer</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyBuffer-size-00115",
+          "text": " The <code>size</code> member of each element of <code>pRegions</code> <strong class=\"purple\">must</strong> be less than or equal to the size of <code>srcBuffer</code> minus <code>srcOffset</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyBuffer-size-00116",
+          "text": " The <code>size</code> member of each element of <code>pRegions</code> <strong class=\"purple\">must</strong> be less than or equal to the size of <code>dstBuffer</code> minus <code>dstOffset</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyBuffer-pRegions-00117",
+          "text": " The union of the source regions, and the union of the destination regions, specified by the elements of <code>pRegions</code>, <strong class=\"purple\">must</strong> not overlap in memory"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyBuffer-srcBuffer-00118",
+          "text": " <code>srcBuffer</code> <strong class=\"purple\">must</strong> have been created with <code>VK_BUFFER_USAGE_TRANSFER_SRC_BIT</code> usage flag"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyBuffer-srcBuffer-00119",
+          "text": " If <code>srcBuffer</code> is non-sparse then it <strong class=\"purple\">must</strong> be bound completely and contiguously to a single <code>VkDeviceMemory</code> object"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyBuffer-dstBuffer-00120",
+          "text": " <code>dstBuffer</code> <strong class=\"purple\">must</strong> have been created with <code>VK_BUFFER_USAGE_TRANSFER_DST_BIT</code> usage flag"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyBuffer-dstBuffer-00121",
+          "text": " If <code>dstBuffer</code> is non-sparse then it <strong class=\"purple\">must</strong> be bound completely and contiguously to a single <code>VkDeviceMemory</code> object"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyBuffer-commandBuffer-parameter",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkCommandBuffer\">VkCommandBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyBuffer-srcBuffer-parameter",
+          "text": " <code>srcBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkBuffer\">VkBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyBuffer-dstBuffer-parameter",
+          "text": " <code>dstBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkBuffer\">VkBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyBuffer-pRegions-parameter",
+          "text": " <code>pRegions</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>regionCount</code> valid <a href=\"#VkBufferCopy\">VkBufferCopy</a> structures"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyBuffer-commandBuffer-recording",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be in the <a href=\"#commandbuffers-lifecycle\">recording state</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyBuffer-commandBuffer-cmdpool",
+          "text": " The <code>VkCommandPool</code> that <code>commandBuffer</code> was allocated from <strong class=\"purple\">must</strong> support transfer, graphics, or compute operations"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyBuffer-renderpass",
+          "text": " This command <strong class=\"purple\">must</strong> only be called outside of a render pass instance"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyBuffer-regionCount-arraylength",
+          "text": " <code>regionCount</code> <strong class=\"purple\">must</strong> be greater than <code>0</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyBuffer-commonparent",
+          "text": " Each of <code>commandBuffer</code>, <code>dstBuffer</code>, and <code>srcBuffer</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from the same <a href=\"#VkDevice\">VkDevice</a>"
+        }
+      ],
+      "(VK_VERSION_1_1)": [
+        {
+          "vuid": "VUID-vkCmdCopyBuffer-commandBuffer-01822",
+          "text": " If <code>commandBuffer</code> is an unprotected command buffer, then <code>srcBuffer</code> <strong class=\"purple\">must</strong> not be a protected buffer"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyBuffer-commandBuffer-01823",
+          "text": " If <code>commandBuffer</code> is an unprotected command buffer, then <code>dstBuffer</code> <strong class=\"purple\">must</strong> not be a protected buffer"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyBuffer-commandBuffer-01824",
+          "text": " If <code>commandBuffer</code> is a protected command buffer, then <code>dstBuffer</code> <strong class=\"purple\">must</strong> not be an unprotected buffer"
+        }
+      ]
+    },
+    "VkBufferCopy": {
+      "core": [
+        {
+          "vuid": "VUID-VkBufferCopy-size-01988",
+          "text": " The <code>size</code> <strong class=\"purple\">must</strong> be greater than <code>0</code>"
+        }
+      ]
+    },
+    "vkCmdCopyImage": {
+      "core": [
+        {
+          "vuid": "VUID-vkCmdCopyImage-pRegions-00122",
+          "text": " The source region specified by each element of <code>pRegions</code> <strong class=\"purple\">must</strong> be a region that is contained within <code>srcImage</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyImage-pRegions-00123",
+          "text": " The destination region specified by each element of <code>pRegions</code> <strong class=\"purple\">must</strong> be a region that is contained within <code>dstImage</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyImage-pRegions-00124",
+          "text": " The union of all source regions, and the union of all destination regions, specified by the elements of <code>pRegions</code>, <strong class=\"purple\">must</strong> not overlap in memory"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyImage-srcImage-00126",
+          "text": " <code>srcImage</code> <strong class=\"purple\">must</strong> have been created with <code>VK_IMAGE_USAGE_TRANSFER_SRC_BIT</code> usage flag"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyImage-srcImageLayout-00128",
+          "text": " <code>srcImageLayout</code> <strong class=\"purple\">must</strong> specify the layout of the image subresources of <code>srcImage</code> specified in <code>pRegions</code> at the time this command is executed on a <code>VkDevice</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyImage-dstImage-00131",
+          "text": " <code>dstImage</code> <strong class=\"purple\">must</strong> have been created with <code>VK_IMAGE_USAGE_TRANSFER_DST_BIT</code> usage flag"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyImage-dstImageLayout-00133",
+          "text": " <code>dstImageLayout</code> <strong class=\"purple\">must</strong> specify the layout of the image subresources of <code>dstImage</code> specified in <code>pRegions</code> at the time this command is executed on a <code>VkDevice</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyImage-srcImage-00136",
+          "text": " The sample count of <code>srcImage</code> and <code>dstImage</code> <strong class=\"purple\">must</strong> match"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyImage-srcSubresource-01696",
+          "text": " The <code>srcSubresource.mipLevel</code> member of each element of <code>pRegions</code> <strong class=\"purple\">must</strong> be less than the <code>mipLevels</code> specified in <a href=\"#VkImageCreateInfo\">VkImageCreateInfo</a> when <code>srcImage</code> was created"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyImage-dstSubresource-01697",
+          "text": " The <code>dstSubresource.mipLevel</code> member of each element of <code>pRegions</code> <strong class=\"purple\">must</strong> be less than the <code>mipLevels</code> specified in <a href=\"#VkImageCreateInfo\">VkImageCreateInfo</a> when <code>dstImage</code> was created"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyImage-srcSubresource-01698",
+          "text": " The <span class=\"eq\"><code>srcSubresource.baseArrayLayer</code> &#43; <code>srcSubresource.layerCount</code></span> of each element of <code>pRegions</code> <strong class=\"purple\">must</strong> be less than or equal to the <code>arrayLayers</code> specified in <a href=\"#VkImageCreateInfo\">VkImageCreateInfo</a> when <code>srcImage</code> was created"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyImage-dstSubresource-01699",
+          "text": " The <span class=\"eq\"><code>dstSubresource.baseArrayLayer</code> &#43; <code>dstSubresource.layerCount</code></span> of each element of <code>pRegions</code> <strong class=\"purple\">must</strong> be less than or equal to the <code>arrayLayers</code> specified in <a href=\"#VkImageCreateInfo\">VkImageCreateInfo</a> when <code>dstImage</code> was created"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyImage-srcOffset-01783",
+          "text": " The <code>srcOffset</code> and <code>extent</code> members of each element of <code>pRegions</code> <strong class=\"purple\">must</strong> respect the image transfer granularity requirements of <code>commandBuffer</code>&#8217;s command pool&#8217;s queue family, as described in <a href=\"#VkQueueFamilyProperties\">VkQueueFamilyProperties</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyImage-dstOffset-01784",
+          "text": " The <code>dstOffset</code> and <code>extent</code> members of each element of <code>pRegions</code> <strong class=\"purple\">must</strong> respect the image transfer granularity requirements of <code>commandBuffer</code>&#8217;s command pool&#8217;s queue family, as described in <a href=\"#VkQueueFamilyProperties\">VkQueueFamilyProperties</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyImage-commandBuffer-parameter",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkCommandBuffer\">VkCommandBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyImage-srcImage-parameter",
+          "text": " <code>srcImage</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkImage\">VkImage</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyImage-srcImageLayout-parameter",
+          "text": " <code>srcImageLayout</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkImageLayout\">VkImageLayout</a> value"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyImage-dstImage-parameter",
+          "text": " <code>dstImage</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkImage\">VkImage</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyImage-dstImageLayout-parameter",
+          "text": " <code>dstImageLayout</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkImageLayout\">VkImageLayout</a> value"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyImage-pRegions-parameter",
+          "text": " <code>pRegions</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>regionCount</code> valid <a href=\"#VkImageCopy\">VkImageCopy</a> structures"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyImage-commandBuffer-recording",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be in the <a href=\"#commandbuffers-lifecycle\">recording state</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyImage-commandBuffer-cmdpool",
+          "text": " The <code>VkCommandPool</code> that <code>commandBuffer</code> was allocated from <strong class=\"purple\">must</strong> support transfer, graphics, or compute operations"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyImage-renderpass",
+          "text": " This command <strong class=\"purple\">must</strong> only be called outside of a render pass instance"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyImage-regionCount-arraylength",
+          "text": " <code>regionCount</code> <strong class=\"purple\">must</strong> be greater than <code>0</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyImage-commonparent",
+          "text": " Each of <code>commandBuffer</code>, <code>dstImage</code>, and <code>srcImage</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from the same <a href=\"#VkDevice\">VkDevice</a>"
+        }
+      ],
+      "(VK_VERSION_1_1,VK_KHR_maintenance1)": [
+        {
+          "vuid": "VUID-vkCmdCopyImage-srcImage-01995",
+          "text": " The <a href=\"#resources-image-format-features\">format features</a> of <code>srcImage</code> <strong class=\"purple\">must</strong> contain <code>VK_FORMAT_FEATURE_TRANSFER_SRC_BIT</code>."
+        },
+        {
+          "vuid": "VUID-vkCmdCopyImage-dstImage-01996",
+          "text": " The <a href=\"#resources-image-format-features\">format features</a> of <code>dstImage</code> <strong class=\"purple\">must</strong> contain <code>VK_FORMAT_FEATURE_TRANSFER_DST_BIT</code>."
+        }
+      ],
+      "!(VK_VERSION_1_1,VK_KHR_sampler_ycbcr_conversion)": [
+        {
+          "vuid": "VUID-vkCmdCopyImage-srcImage-00127",
+          "text": " If <code>srcImage</code> is non-sparse then it <strong class=\"purple\">must</strong> be bound completely and contiguously to a single <code>VkDeviceMemory</code> object"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyImage-dstImage-00132",
+          "text": " If <code>dstImage</code> is non-sparse then it <strong class=\"purple\">must</strong> be bound completely and contiguously to a single <code>VkDeviceMemory</code> object"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyImage-srcImage-00135",
+          "text": " The <a href=\"#VkFormat\">VkFormat</a> of each of <code>srcImage</code> and <code>dstImage</code> <strong class=\"purple\">must</strong> be compatible, as defined <a href=\"#copies-images-format-compatibility\">above</a>"
+        }
+      ],
+      "(VK_VERSION_1_1,VK_KHR_sampler_ycbcr_conversion)": [
+        {
+          "vuid": "VUID-vkCmdCopyImage-srcImage-01546",
+          "text": " If <code>srcImage</code> is non-sparse then the image or <em>disjoint</em> plane to be copied <strong class=\"purple\">must</strong> be bound completely and contiguously to a single <code>VkDeviceMemory</code> object"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyImage-dstImage-01547",
+          "text": " If <code>dstImage</code> is non-sparse then the image or <em>disjoint</em> plane that is the destination of the copy <strong class=\"purple\">must</strong> be bound completely and contiguously to a single <code>VkDeviceMemory</code> object"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyImage-srcImage-01548",
+          "text": " If the <a href=\"#VkFormat\">VkFormat</a> of each of <code>srcImage</code> and <code>dstImage</code> is not a <a href=\"#formats-requiring-sampler-ycbcr-conversion\"><em>multi-planar format</em></a>, the <a href=\"#VkFormat\">VkFormat</a> of each of <code>srcImage</code> and <code>dstImage</code> <strong class=\"purple\">must</strong> be compatible, as defined <a href=\"#copies-images-format-compatibility\">above</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyImage-None-01549",
+          "text": " In a copy to or from a plane of a <a href=\"#formats-requiring-sampler-ycbcr-conversion\">multi-planar image</a>, the <a href=\"#VkFormat\">VkFormat</a> of the image and plane <strong class=\"purple\">must</strong> be compatible according to <a href=\"#formats-compatible-planes\">the description of compatible planes</a> for the plane being copied"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyImage-aspectMask-01550",
+          "text": " When a copy is performed to or from an image with a <a href=\"#formats-requiring-sampler-ycbcr-conversion\">multi-planar format</a>, the <code>aspectMask</code> of the <code>srcSubresource</code> and/or <code>dstSubresource</code> that refers to the multi-planar image <strong class=\"purple\">must</strong> be <code>VK_IMAGE_ASPECT_PLANE_0_BIT</code>, <code>VK_IMAGE_ASPECT_PLANE_1_BIT</code>, or <code>VK_IMAGE_ASPECT_PLANE_2_BIT</code> (with <code>VK_IMAGE_ASPECT_PLANE_2_BIT</code> valid only for a <a href=\"#VkFormat\">VkFormat</a> with three planes)"
+        }
+      ],
+      "!(VK_KHR_shared_presentable_image)": [
+        {
+          "vuid": "VUID-vkCmdCopyImage-srcImageLayout-00129",
+          "text": " <code>srcImageLayout</code> <strong class=\"purple\">must</strong> be <code>VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL</code> or <code>VK_IMAGE_LAYOUT_GENERAL</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyImage-dstImageLayout-00134",
+          "text": " <code>dstImageLayout</code> <strong class=\"purple\">must</strong> be <code>VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL</code> or <code>VK_IMAGE_LAYOUT_GENERAL</code>"
+        }
+      ],
+      "(VK_KHR_shared_presentable_image)": [
+        {
+          "vuid": "VUID-vkCmdCopyImage-srcImageLayout-01917",
+          "text": " <code>srcImageLayout</code> <strong class=\"purple\">must</strong> be <code>VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL</code>, <code>VK_IMAGE_LAYOUT_GENERAL</code>, or <code>VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyImage-dstImageLayout-01395",
+          "text": " <code>dstImageLayout</code> <strong class=\"purple\">must</strong> be <code>VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL</code>, <code>VK_IMAGE_LAYOUT_GENERAL</code>, or <code>VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR</code>"
+        }
+      ],
+      "(VK_VERSION_1_1)": [
+        {
+          "vuid": "VUID-vkCmdCopyImage-commandBuffer-01825",
+          "text": " If <code>commandBuffer</code> is an unprotected command buffer, then <code>srcImage</code> <strong class=\"purple\">must</strong> not be a protected image"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyImage-commandBuffer-01826",
+          "text": " If <code>commandBuffer</code> is an unprotected command buffer, then <code>dstImage</code> <strong class=\"purple\">must</strong> not be a protected image"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyImage-commandBuffer-01827",
+          "text": " If <code>commandBuffer</code> is a protected command buffer, then <code>dstImage</code> <strong class=\"purple\">must</strong> not be an unprotected image"
+        }
+      ],
+      "(VK_EXT_fragment_density_map)": [
+        {
+          "vuid": "VUID-vkCmdCopyImage-dstImage-02542",
+          "text": " <code>dstImage</code> and <code>srcImage</code> <strong class=\"purple\">must</strong> not have been created with <code>flags</code> containing <code>VK_IMAGE_CREATE_SUBSAMPLED_BIT_EXT</code>"
+        }
+      ]
+    },
+    "VkImageCopy": {
+      "!(VK_VERSION_1_1,VK_KHR_sampler_ycbcr_conversion)": [
+        {
+          "vuid": "VUID-VkImageCopy-aspectMask-00137",
+          "text": " The <code>aspectMask</code> member of <code>srcSubresource</code> and <code>dstSubresource</code> <strong class=\"purple\">must</strong> match"
+        },
+        {
+          "vuid": "VUID-VkImageCopy-srcOffset-00157",
+          "text": " If the calling command&#8217;s <code>srcImage</code> is a compressed image, all members of <code>srcOffset</code> <strong class=\"purple\">must</strong> be a multiple of the corresponding dimensions of the compressed texel block"
+        },
+        {
+          "vuid": "VUID-VkImageCopy-extent-00158",
+          "text": " If the calling command&#8217;s <code>srcImage</code> is a compressed image, <code>extent.width</code> <strong class=\"purple\">must</strong> be a multiple of the compressed texel block width or <span class=\"eq\">(<code>extent.width</code> &#43; <code>srcOffset.x</code>)</span> <strong class=\"purple\">must</strong> equal the source image subresource width"
+        },
+        {
+          "vuid": "VUID-VkImageCopy-extent-00159",
+          "text": " If the calling command&#8217;s <code>srcImage</code> is a compressed image, <code>extent.height</code> <strong class=\"purple\">must</strong> be a multiple of the compressed texel block height or <span class=\"eq\">(<code>extent.height</code> &#43; <code>srcOffset.y</code>)</span> <strong class=\"purple\">must</strong> equal the source image subresource height"
+        },
+        {
+          "vuid": "VUID-VkImageCopy-extent-00160",
+          "text": " If the calling command&#8217;s <code>srcImage</code> is a compressed image, <code>extent.depth</code> <strong class=\"purple\">must</strong> be a multiple of the compressed texel block depth or <span class=\"eq\">(<code>extent.depth</code> &#43; <code>srcOffset.z</code>)</span> <strong class=\"purple\">must</strong> equal the source image subresource depth"
+        },
+        {
+          "vuid": "VUID-VkImageCopy-dstOffset-00162",
+          "text": " If the calling command&#8217;s <code>dstImage</code> is a compressed format image, all members of <code>dstOffset</code> <strong class=\"purple\">must</strong> be a multiple of the corresponding dimensions of the compressed texel block"
+        },
+        {
+          "vuid": "VUID-VkImageCopy-extent-00163",
+          "text": " If the calling command&#8217;s <code>dstImage</code> is a compressed format image, <code>extent.width</code> <strong class=\"purple\">must</strong> be a multiple of the compressed texel block width or <span class=\"eq\">(<code>extent.width</code> &#43; <code>dstOffset.x</code>)</span> <strong class=\"purple\">must</strong> equal the destination image subresource width"
+        },
+        {
+          "vuid": "VUID-VkImageCopy-extent-00164",
+          "text": " If the calling command&#8217;s <code>dstImage</code> is a compressed format image, <code>extent.height</code> <strong class=\"purple\">must</strong> be a multiple of the compressed texel block height or <span class=\"eq\">(<code>extent.height</code> &#43; <code>dstOffset.y</code>)</span> <strong class=\"purple\">must</strong> equal the destination image subresource height"
+        },
+        {
+          "vuid": "VUID-VkImageCopy-extent-00165",
+          "text": " If the calling command&#8217;s <code>dstImage</code> is a compressed format image, <code>extent.depth</code> <strong class=\"purple\">must</strong> be a multiple of the compressed texel block depth or <span class=\"eq\">(<code>extent.depth</code> &#43; <code>dstOffset.z</code>)</span> <strong class=\"purple\">must</strong> equal the destination image subresource depth"
+        }
+      ],
+      "(VK_VERSION_1_1,VK_KHR_sampler_ycbcr_conversion)": [
+        {
+          "vuid": "VUID-VkImageCopy-srcImage-01551",
+          "text": " If neither the calling command&#8217;s <code>srcImage</code> nor the calling command&#8217;s <code>dstImage</code> has a <a href=\"#formats-requiring-sampler-ycbcr-conversion\">multi-planar image format</a> then the <code>aspectMask</code> member of <code>srcSubresource</code> and <code>dstSubresource</code> <strong class=\"purple\">must</strong> match"
+        },
+        {
+          "vuid": "VUID-VkImageCopy-srcImage-01552",
+          "text": " If the calling command&#8217;s <code>srcImage</code> has a <a href=\"#VkFormat\">VkFormat</a> with <a href=\"#formats-requiring-sampler-ycbcr-conversion\">two planes</a> then the <code>srcSubresource</code> <code>aspectMask</code> <strong class=\"purple\">must</strong> be <code>VK_IMAGE_ASPECT_PLANE_0_BIT</code> or <code>VK_IMAGE_ASPECT_PLANE_1_BIT</code>"
+        },
+        {
+          "vuid": "VUID-VkImageCopy-srcImage-01553",
+          "text": " If the calling command&#8217;s <code>srcImage</code> has a <a href=\"#VkFormat\">VkFormat</a> with <a href=\"#formats-requiring-sampler-ycbcr-conversion\">three planes</a> then the <code>srcSubresource</code> <code>aspectMask</code> <strong class=\"purple\">must</strong> be <code>VK_IMAGE_ASPECT_PLANE_0_BIT</code>, <code>VK_IMAGE_ASPECT_PLANE_1_BIT</code>, or <code>VK_IMAGE_ASPECT_PLANE_2_BIT</code>"
+        },
+        {
+          "vuid": "VUID-VkImageCopy-dstImage-01554",
+          "text": " If the calling command&#8217;s <code>dstImage</code> has a <a href=\"#VkFormat\">VkFormat</a> with <a href=\"#formats-requiring-sampler-ycbcr-conversion\">two planes</a> then the <code>dstSubresource</code> <code>aspectMask</code> <strong class=\"purple\">must</strong> be <code>VK_IMAGE_ASPECT_PLANE_0_BIT</code> or <code>VK_IMAGE_ASPECT_PLANE_1_BIT</code>"
+        },
+        {
+          "vuid": "VUID-VkImageCopy-dstImage-01555",
+          "text": " If the calling command&#8217;s <code>dstImage</code> has a <a href=\"#VkFormat\">VkFormat</a> with <a href=\"#formats-requiring-sampler-ycbcr-conversion\">three planes</a> then the <code>dstSubresource</code> <code>aspectMask</code> <strong class=\"purple\">must</strong> be <code>VK_IMAGE_ASPECT_PLANE_0_BIT</code>, <code>VK_IMAGE_ASPECT_PLANE_1_BIT</code>, or <code>VK_IMAGE_ASPECT_PLANE_2_BIT</code>"
+        },
+        {
+          "vuid": "VUID-VkImageCopy-srcImage-01556",
+          "text": " If the calling command&#8217;s <code>srcImage</code> has a <a href=\"#formats-requiring-sampler-ycbcr-conversion\">multi-planar image format</a> and the <code>dstImage</code> does not have a multi-planar image format, the <code>dstSubresource</code> <code>aspectMask</code> <strong class=\"purple\">must</strong> be <code>VK_IMAGE_ASPECT_COLOR_BIT</code>"
+        },
+        {
+          "vuid": "VUID-VkImageCopy-dstImage-01557",
+          "text": " If the calling command&#8217;s <code>dstImage</code> has a <a href=\"#formats-requiring-sampler-ycbcr-conversion\">multi-planar image format</a> and the <code>srcImage</code> does not have a multi-planar image format, the <code>srcSubresource</code> <code>aspectMask</code> <strong class=\"purple\">must</strong> be <code>VK_IMAGE_ASPECT_COLOR_BIT</code>"
+        },
+        {
+          "vuid": "VUID-VkImageCopy-srcImage-01727",
+          "text": " If the calling command&#8217;s <code>srcImage</code> is a compressed image, or a <em>single-plane</em>, &#8220;<code>_422</code>&#8221; image format, all members of <code>srcOffset</code> <strong class=\"purple\">must</strong> be a multiple of the corresponding dimensions of the compressed texel block"
+        },
+        {
+          "vuid": "VUID-VkImageCopy-srcImage-01728",
+          "text": " If the calling command&#8217;s <code>srcImage</code> is a compressed image, or a <em>single-plane</em>, &#8220;<code>_422</code>&#8221; image format, <code>extent.width</code> <strong class=\"purple\">must</strong> be a multiple of the compressed texel block width or <span class=\"eq\">(<code>extent.width</code> &#43; <code>srcOffset.x</code>)</span> <strong class=\"purple\">must</strong> equal the source image subresource width"
+        },
+        {
+          "vuid": "VUID-VkImageCopy-srcImage-01729",
+          "text": " If the calling command&#8217;s <code>srcImage</code> is a compressed image, or a <em>single-plane</em>, &#8220;<code>_422</code>&#8221; image format, <code>extent.height</code> <strong class=\"purple\">must</strong> be a multiple of the compressed texel block height or <span class=\"eq\">(<code>extent.height</code> &#43; <code>srcOffset.y</code>)</span> <strong class=\"purple\">must</strong> equal the source image subresource height"
+        },
+        {
+          "vuid": "VUID-VkImageCopy-srcImage-01730",
+          "text": " If the calling command&#8217;s <code>srcImage</code> is a compressed image, or a <em>single-plane</em>, &#8220;<code>_422</code>&#8221; image format, <code>extent.depth</code> <strong class=\"purple\">must</strong> be a multiple of the compressed texel block depth or <span class=\"eq\">(<code>extent.depth</code> &#43; <code>srcOffset.z</code>)</span> <strong class=\"purple\">must</strong> equal the source image subresource depth"
+        },
+        {
+          "vuid": "VUID-VkImageCopy-dstImage-01731",
+          "text": " If the calling command&#8217;s <code>dstImage</code> is a compressed format image, or a <em>single-plane</em>, &#8220;<code>_422</code>&#8221; image format, all members of <code>dstOffset</code> <strong class=\"purple\">must</strong> be a multiple of the corresponding dimensions of the compressed texel block"
+        },
+        {
+          "vuid": "VUID-VkImageCopy-dstImage-01732",
+          "text": " If the calling command&#8217;s <code>dstImage</code> is a compressed format image, or a <em>single-plane</em>, &#8220;<code>_422</code>&#8221; image format, <code>extent.width</code> <strong class=\"purple\">must</strong> be a multiple of the compressed texel block width or <span class=\"eq\">(<code>extent.width</code> &#43; <code>dstOffset.x</code>)</span> <strong class=\"purple\">must</strong> equal the destination image subresource width"
+        },
+        {
+          "vuid": "VUID-VkImageCopy-dstImage-01733",
+          "text": " If the calling command&#8217;s <code>dstImage</code> is a compressed format image, or a <em>single-plane</em>, &#8220;<code>_422</code>&#8221; image format, <code>extent.height</code> <strong class=\"purple\">must</strong> be a multiple of the compressed texel block height or <span class=\"eq\">(<code>extent.height</code> &#43; <code>dstOffset.y</code>)</span> <strong class=\"purple\">must</strong> equal the destination image subresource height"
+        },
+        {
+          "vuid": "VUID-VkImageCopy-dstImage-01734",
+          "text": " If the calling command&#8217;s <code>dstImage</code> is a compressed format image, or a <em>single-plane</em>, &#8220;<code>_422</code>&#8221; image format, <code>extent.depth</code> <strong class=\"purple\">must</strong> be a multiple of the compressed texel block depth or <span class=\"eq\">(<code>extent.depth</code> &#43; <code>dstOffset.z</code>)</span> <strong class=\"purple\">must</strong> equal the destination image subresource depth"
+        }
+      ],
+      "!(VK_VERSION_1_1,VK_KHR_maintenance1)": [
+        {
+          "vuid": "VUID-VkImageCopy-layerCount-00138",
+          "text": " The <code>layerCount</code> member of <code>srcSubresource</code> and <code>dstSubresource</code> <strong class=\"purple\">must</strong> match"
+        },
+        {
+          "vuid": "VUID-VkImageCopy-srcImage-00139",
+          "text": " If either of the calling command&#8217;s <code>srcImage</code> or <code>dstImage</code> parameters are of <a href=\"#VkImageType\">VkImageType</a> <code>VK_IMAGE_TYPE_3D</code>, the <code>baseArrayLayer</code> and <code>layerCount</code> members of both <code>srcSubresource</code> and <code>dstSubresource</code> <strong class=\"purple\">must</strong> be <code>0</code> and <code>1</code>, respectively"
+        },
+        {
+          "vuid": "VUID-VkImageCopy-srcImage-01789",
+          "text": " If the calling command&#8217;s <code>srcImage</code> or <code>dstImage</code> is of type <code>VK_IMAGE_TYPE_2D</code>, then <code>extent.depth</code> <strong class=\"purple\">must</strong> be <code>1</code>."
+        }
+      ],
+      "(VK_VERSION_1_1,VK_KHR_maintenance1)": [
+        {
+          "vuid": "VUID-VkImageCopy-extent-00140",
+          "text": " The number of slices of the <code>extent</code> (for 3D) or layers of the <code>srcSubresource</code> (for non-3D) <strong class=\"purple\">must</strong> match the number of slices of the <code>extent</code> (for 3D) or layers of the <code>dstSubresource</code> (for non-3D)"
+        },
+        {
+          "vuid": "VUID-VkImageCopy-srcImage-00141",
+          "text": " If either of the calling command&#8217;s <code>srcImage</code> or <code>dstImage</code> parameters are of <a href=\"#VkImageType\">VkImageType</a> <code>VK_IMAGE_TYPE_3D</code>, the <code>baseArrayLayer</code> and <code>layerCount</code> members of the corresponding subresource <strong class=\"purple\">must</strong> be <code>0</code> and <code>1</code>, respectively"
+        },
+        {
+          "vuid": "VUID-VkImageCopy-srcImage-01790",
+          "text": " If both <code>srcImage</code> and <code>dstImage</code> are of type <code>VK_IMAGE_TYPE_2D</code> then <code>extent.depth</code> <strong class=\"purple\">must</strong> be <code>1</code>."
+        },
+        {
+          "vuid": "VUID-VkImageCopy-srcImage-01791",
+          "text": " If the calling command&#8217;s <code>srcImage</code> is of type <code>VK_IMAGE_TYPE_2D</code>, and the <code>dstImage</code> is of type <code>VK_IMAGE_TYPE_3D</code>, then <code>extent.depth</code> <strong class=\"purple\">must</strong> equal to the <code>layerCount</code> member of <code>srcSubresource</code>."
+        },
+        {
+          "vuid": "VUID-VkImageCopy-dstImage-01792",
+          "text": " If the calling command&#8217;s <code>dstImage</code> is of type <code>VK_IMAGE_TYPE_2D</code>, and the <code>srcImage</code> is of type <code>VK_IMAGE_TYPE_3D</code>, then <code>extent.depth</code> <strong class=\"purple\">must</strong> equal to the <code>layerCount</code> member of <code>dstSubresource</code>."
+        }
+      ],
+      "core": [
+        {
+          "vuid": "VUID-VkImageCopy-aspectMask-00142",
+          "text": " The <code>aspectMask</code> member of <code>srcSubresource</code> <strong class=\"purple\">must</strong> specify aspects present in the calling command&#8217;s <code>srcImage</code>"
+        },
+        {
+          "vuid": "VUID-VkImageCopy-aspectMask-00143",
+          "text": " The <code>aspectMask</code> member of <code>dstSubresource</code> <strong class=\"purple\">must</strong> specify aspects present in the calling command&#8217;s <code>dstImage</code>"
+        },
+        {
+          "vuid": "VUID-VkImageCopy-srcOffset-00144",
+          "text": " <code>srcOffset.x</code> and <span class=\"eq\">(<code>extent.width</code> &#43; <code>srcOffset.x</code>)</span> <strong class=\"purple\">must</strong> both be greater than or equal to <code>0</code> and less than or equal to the source image subresource width"
+        },
+        {
+          "vuid": "VUID-VkImageCopy-srcOffset-00145",
+          "text": " <code>srcOffset.y</code> and <span class=\"eq\">(<code>extent.height</code> &#43; <code>srcOffset.y</code>)</span> <strong class=\"purple\">must</strong> both be greater than or equal to <code>0</code> and less than or equal to the source image subresource height"
+        },
+        {
+          "vuid": "VUID-VkImageCopy-srcImage-00146",
+          "text": " If the calling command&#8217;s <code>srcImage</code> is of type <code>VK_IMAGE_TYPE_1D</code>, then <code>srcOffset.y</code> <strong class=\"purple\">must</strong> be <code>0</code> and <code>extent.height</code> <strong class=\"purple\">must</strong> be <code>1</code>."
+        },
+        {
+          "vuid": "VUID-VkImageCopy-srcOffset-00147",
+          "text": " <code>srcOffset.z</code> and <span class=\"eq\">(<code>extent.depth</code> &#43; <code>srcOffset.z</code>)</span> <strong class=\"purple\">must</strong> both be greater than or equal to <code>0</code> and less than or equal to the source image subresource depth"
+        },
+        {
+          "vuid": "VUID-VkImageCopy-srcImage-01785",
+          "text": " If the calling command&#8217;s <code>srcImage</code> is of type <code>VK_IMAGE_TYPE_1D</code>, then <code>srcOffset.z</code> <strong class=\"purple\">must</strong> be <code>0</code> and <code>extent.depth</code> <strong class=\"purple\">must</strong> be <code>1</code>."
+        },
+        {
+          "vuid": "VUID-VkImageCopy-dstImage-01786",
+          "text": " If the calling command&#8217;s <code>dstImage</code> is of type <code>VK_IMAGE_TYPE_1D</code>, then <code>dstOffset.z</code> <strong class=\"purple\">must</strong> be <code>0</code> and <code>extent.depth</code> <strong class=\"purple\">must</strong> be <code>1</code>."
+        },
+        {
+          "vuid": "VUID-VkImageCopy-srcImage-01787",
+          "text": " If the calling command&#8217;s <code>srcImage</code> is of type <code>VK_IMAGE_TYPE_2D</code>, then <code>srcOffset.z</code> <strong class=\"purple\">must</strong> be <code>0</code>."
+        },
+        {
+          "vuid": "VUID-VkImageCopy-dstImage-01788",
+          "text": " If the calling command&#8217;s <code>dstImage</code> is of type <code>VK_IMAGE_TYPE_2D</code>, then <code>dstOffset.z</code> <strong class=\"purple\">must</strong> be <code>0</code>."
+        },
+        {
+          "vuid": "VUID-VkImageCopy-dstOffset-00150",
+          "text": " <code>dstOffset.x</code> and <span class=\"eq\">(<code>extent.width</code> &#43; <code>dstOffset.x</code>)</span> <strong class=\"purple\">must</strong> both be greater than or equal to <code>0</code> and less than or equal to the destination image subresource width"
+        },
+        {
+          "vuid": "VUID-VkImageCopy-dstOffset-00151",
+          "text": " <code>dstOffset.y</code> and <span class=\"eq\">(<code>extent.height</code> &#43; <code>dstOffset.y</code>)</span> <strong class=\"purple\">must</strong> both be greater than or equal to <code>0</code> and less than or equal to the destination image subresource height"
+        },
+        {
+          "vuid": "VUID-VkImageCopy-dstImage-00152",
+          "text": " If the calling command&#8217;s <code>dstImage</code> is of type <code>VK_IMAGE_TYPE_1D</code>, then <code>dstOffset.y</code> <strong class=\"purple\">must</strong> be <code>0</code> and <code>extent.height</code> <strong class=\"purple\">must</strong> be <code>1</code>."
+        },
+        {
+          "vuid": "VUID-VkImageCopy-dstOffset-00153",
+          "text": " <code>dstOffset.z</code> and <span class=\"eq\">(<code>extent.depth</code> &#43; <code>dstOffset.z</code>)</span> <strong class=\"purple\">must</strong> both be greater than or equal to <code>0</code> and less than or equal to the destination image subresource depth"
+        },
+        {
+          "vuid": "VUID-VkImageCopy-srcSubresource-parameter",
+          "text": " <code>srcSubresource</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkImageSubresourceLayers\">VkImageSubresourceLayers</a> structure"
+        },
+        {
+          "vuid": "VUID-VkImageCopy-dstSubresource-parameter",
+          "text": " <code>dstSubresource</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkImageSubresourceLayers\">VkImageSubresourceLayers</a> structure"
+        }
+      ]
+    },
+    "VkImageSubresourceLayers": {
+      "core": [
+        {
+          "vuid": "VUID-VkImageSubresourceLayers-aspectMask-00167",
+          "text": " If <code>aspectMask</code> contains <code>VK_IMAGE_ASPECT_COLOR_BIT</code>, it <strong class=\"purple\">must</strong> not contain either of <code>VK_IMAGE_ASPECT_DEPTH_BIT</code> or <code>VK_IMAGE_ASPECT_STENCIL_BIT</code>"
+        },
+        {
+          "vuid": "VUID-VkImageSubresourceLayers-aspectMask-00168",
+          "text": " <code>aspectMask</code> <strong class=\"purple\">must</strong> not contain <code>VK_IMAGE_ASPECT_METADATA_BIT</code>"
+        },
+        {
+          "vuid": "VUID-VkImageSubresourceLayers-layerCount-01700",
+          "text": " <code>layerCount</code> <strong class=\"purple\">must</strong> be greater than 0"
+        },
+        {
+          "vuid": "VUID-VkImageSubresourceLayers-aspectMask-parameter",
+          "text": " <code>aspectMask</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkImageAspectFlagBits\">VkImageAspectFlagBits</a> values"
+        },
+        {
+          "vuid": "VUID-VkImageSubresourceLayers-aspectMask-requiredbitmask",
+          "text": " <code>aspectMask</code> <strong class=\"purple\">must</strong> not be <code>0</code>"
+        }
+      ],
+      "(VK_EXT_image_drm_format_modifier)": [
+        {
+          "vuid": "VUID-VkImageSubresourceLayers-aspectMask-02247",
+          "text": " <code>aspectMask</code> <strong class=\"purple\">must</strong> not include <code>VK_IMAGE_ASPECT_MEMORY_PLANE_i_BIT_EXT</code> for any index <code>i</code>."
+        }
+      ]
+    },
+    "vkCmdCopyBufferToImage": {
+      "core": [
+        {
+          "vuid": "VUID-vkCmdCopyBufferToImage-pRegions-00171",
+          "text": " <code>srcBuffer</code> <strong class=\"purple\">must</strong> be large enough to contain all buffer locations that are accessed according to <a href=\"#copies-buffers-images-addressing\">Buffer and Image Addressing</a>, for each element of <code>pRegions</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyBufferToImage-pRegions-00172",
+          "text": " The image region specified by each element of <code>pRegions</code> <strong class=\"purple\">must</strong> be a region that is contained within <code>dstImage</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyBufferToImage-pRegions-00173",
+          "text": " The union of all source regions, and the union of all destination regions, specified by the elements of <code>pRegions</code>, <strong class=\"purple\">must</strong> not overlap in memory"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyBufferToImage-srcBuffer-00174",
+          "text": " <code>srcBuffer</code> <strong class=\"purple\">must</strong> have been created with <code>VK_BUFFER_USAGE_TRANSFER_SRC_BIT</code> usage flag"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyBufferToImage-srcBuffer-00176",
+          "text": " If <code>srcBuffer</code> is non-sparse then it <strong class=\"purple\">must</strong> be bound completely and contiguously to a single <code>VkDeviceMemory</code> object"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyBufferToImage-dstImage-00177",
+          "text": " <code>dstImage</code> <strong class=\"purple\">must</strong> have been created with <code>VK_IMAGE_USAGE_TRANSFER_DST_BIT</code> usage flag"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyBufferToImage-dstImage-00178",
+          "text": " If <code>dstImage</code> is non-sparse then it <strong class=\"purple\">must</strong> be bound completely and contiguously to a single <code>VkDeviceMemory</code> object"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyBufferToImage-dstImage-00179",
+          "text": " <code>dstImage</code> <strong class=\"purple\">must</strong> have a sample count equal to <code>VK_SAMPLE_COUNT_1_BIT</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyBufferToImage-dstImageLayout-00180",
+          "text": " <code>dstImageLayout</code> <strong class=\"purple\">must</strong> specify the layout of the image subresources of <code>dstImage</code> specified in <code>pRegions</code> at the time this command is executed on a <code>VkDevice</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyBufferToImage-imageSubresource-01701",
+          "text": " The <code>imageSubresource.mipLevel</code> member of each element of <code>pRegions</code> <strong class=\"purple\">must</strong> be less than the <code>mipLevels</code> specified in <a href=\"#VkImageCreateInfo\">VkImageCreateInfo</a> when <code>dstImage</code> was created"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyBufferToImage-imageSubresource-01702",
+          "text": " The <span class=\"eq\"><code>imageSubresource.baseArrayLayer</code> &#43; <code>imageSubresource.layerCount</code></span> of each element of <code>pRegions</code> <strong class=\"purple\">must</strong> be less than or equal to the <code>arrayLayers</code> specified in <a href=\"#VkImageCreateInfo\">VkImageCreateInfo</a> when <code>dstImage</code> was created"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyBufferToImage-imageOffset-01793",
+          "text": " The <code>imageOffset</code> and <code>imageExtent</code> members of each element of <code>pRegions</code> <strong class=\"purple\">must</strong> respect the image transfer granularity requirements of <code>commandBuffer</code>&#8217;s command pool&#8217;s queue family, as described in <a href=\"#VkQueueFamilyProperties\">VkQueueFamilyProperties</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyBufferToImage-commandBuffer-parameter",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkCommandBuffer\">VkCommandBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyBufferToImage-srcBuffer-parameter",
+          "text": " <code>srcBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkBuffer\">VkBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyBufferToImage-dstImage-parameter",
+          "text": " <code>dstImage</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkImage\">VkImage</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyBufferToImage-dstImageLayout-parameter",
+          "text": " <code>dstImageLayout</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkImageLayout\">VkImageLayout</a> value"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyBufferToImage-pRegions-parameter",
+          "text": " <code>pRegions</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>regionCount</code> valid <a href=\"#VkBufferImageCopy\">VkBufferImageCopy</a> structures"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyBufferToImage-commandBuffer-recording",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be in the <a href=\"#commandbuffers-lifecycle\">recording state</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyBufferToImage-commandBuffer-cmdpool",
+          "text": " The <code>VkCommandPool</code> that <code>commandBuffer</code> was allocated from <strong class=\"purple\">must</strong> support transfer, graphics, or compute operations"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyBufferToImage-renderpass",
+          "text": " This command <strong class=\"purple\">must</strong> only be called outside of a render pass instance"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyBufferToImage-regionCount-arraylength",
+          "text": " <code>regionCount</code> <strong class=\"purple\">must</strong> be greater than <code>0</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyBufferToImage-commonparent",
+          "text": " Each of <code>commandBuffer</code>, <code>dstImage</code>, and <code>srcBuffer</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from the same <a href=\"#VkDevice\">VkDevice</a>"
+        }
+      ],
+      "(VK_VERSION_1_1,VK_KHR_maintenance1)": [
+        {
+          "vuid": "VUID-vkCmdCopyBufferToImage-dstImage-01997",
+          "text": " The <a href=\"#resources-image-format-features\">format features</a> of <code>dstImage</code> <strong class=\"purple\">must</strong> contain <code>VK_FORMAT_FEATURE_TRANSFER_DST_BIT</code>."
+        }
+      ],
+      "!(VK_KHR_shared_presentable_image)": [
+        {
+          "vuid": "VUID-vkCmdCopyBufferToImage-dstImageLayout-00181",
+          "text": " <code>dstImageLayout</code> <strong class=\"purple\">must</strong> be <code>VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL</code> or <code>VK_IMAGE_LAYOUT_GENERAL</code>"
+        }
+      ],
+      "(VK_KHR_shared_presentable_image)": [
+        {
+          "vuid": "VUID-vkCmdCopyBufferToImage-dstImageLayout-01396",
+          "text": " <code>dstImageLayout</code> <strong class=\"purple\">must</strong> be <code>VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL</code>, <code>VK_IMAGE_LAYOUT_GENERAL</code>, or <code>VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR</code>"
+        }
+      ],
+      "(VK_VERSION_1_1)": [
+        {
+          "vuid": "VUID-vkCmdCopyBufferToImage-commandBuffer-01828",
+          "text": " If <code>commandBuffer</code> is an unprotected command buffer, then <code>srcBuffer</code> <strong class=\"purple\">must</strong> not be a protected buffer"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyBufferToImage-commandBuffer-01829",
+          "text": " If <code>commandBuffer</code> is an unprotected command buffer, then <code>dstImage</code> <strong class=\"purple\">must</strong> not be a protected image"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyBufferToImage-commandBuffer-01830",
+          "text": " If <code>commandBuffer</code> is a protected command buffer, then <code>dstImage</code> <strong class=\"purple\">must</strong> not be an unprotected image"
+        }
+      ],
+      "(VK_EXT_fragment_density_map)": [
+        {
+          "vuid": "VUID-vkCmdCopyBufferToImage-dstImage-02543",
+          "text": " <code>dstImage</code> <strong class=\"purple\">must</strong> not have been created with <code>flags</code> containing <code>VK_IMAGE_CREATE_SUBSAMPLED_BIT_EXT</code>"
+        }
+      ]
+    },
+    "vkCmdCopyImageToBuffer": {
+      "core": [
+        {
+          "vuid": "VUID-vkCmdCopyImageToBuffer-pRegions-00182",
+          "text": " The image region specified by each element of <code>pRegions</code> <strong class=\"purple\">must</strong> be a region that is contained within <code>srcImage</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyImageToBuffer-pRegions-00183",
+          "text": " <code>dstBuffer</code> <strong class=\"purple\">must</strong> be large enough to contain all buffer locations that are accessed according to <a href=\"#copies-buffers-images-addressing\">Buffer and Image Addressing</a>, for each element of <code>pRegions</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyImageToBuffer-pRegions-00184",
+          "text": " The union of all source regions, and the union of all destination regions, specified by the elements of <code>pRegions</code>, <strong class=\"purple\">must</strong> not overlap in memory"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyImageToBuffer-srcImage-00186",
+          "text": " <code>srcImage</code> <strong class=\"purple\">must</strong> have been created with <code>VK_IMAGE_USAGE_TRANSFER_SRC_BIT</code> usage flag"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyImageToBuffer-srcImage-00187",
+          "text": " If <code>srcImage</code> is non-sparse then it <strong class=\"purple\">must</strong> be bound completely and contiguously to a single <code>VkDeviceMemory</code> object"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyImageToBuffer-srcImage-00188",
+          "text": " <code>srcImage</code> <strong class=\"purple\">must</strong> have a sample count equal to <code>VK_SAMPLE_COUNT_1_BIT</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyImageToBuffer-srcImageLayout-00189",
+          "text": " <code>srcImageLayout</code> <strong class=\"purple\">must</strong> specify the layout of the image subresources of <code>srcImage</code> specified in <code>pRegions</code> at the time this command is executed on a <code>VkDevice</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyImageToBuffer-dstBuffer-00191",
+          "text": " <code>dstBuffer</code> <strong class=\"purple\">must</strong> have been created with <code>VK_BUFFER_USAGE_TRANSFER_DST_BIT</code> usage flag"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyImageToBuffer-dstBuffer-00192",
+          "text": " If <code>dstBuffer</code> is non-sparse then it <strong class=\"purple\">must</strong> be bound completely and contiguously to a single <code>VkDeviceMemory</code> object"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyImageToBuffer-imageSubresource-01703",
+          "text": " The <code>imageSubresource.mipLevel</code> member of each element of <code>pRegions</code> <strong class=\"purple\">must</strong> be less than the <code>mipLevels</code> specified in <a href=\"#VkImageCreateInfo\">VkImageCreateInfo</a> when <code>srcImage</code> was created"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyImageToBuffer-imageSubresource-01704",
+          "text": " The <span class=\"eq\"><code>imageSubresource.baseArrayLayer</code> &#43; <code>imageSubresource.layerCount</code></span> of each element of <code>pRegions</code> <strong class=\"purple\">must</strong> be less than or equal to the <code>arrayLayers</code> specified in <a href=\"#VkImageCreateInfo\">VkImageCreateInfo</a> when <code>srcImage</code> was created"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyImageToBuffer-imageOffset-01794",
+          "text": " The <code>imageOffset</code> and <code>imageExtent</code> members of each element of <code>pRegions</code> <strong class=\"purple\">must</strong> respect the image transfer granularity requirements of <code>commandBuffer</code>&#8217;s command pool&#8217;s queue family, as described in <a href=\"#VkQueueFamilyProperties\">VkQueueFamilyProperties</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyImageToBuffer-commandBuffer-parameter",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkCommandBuffer\">VkCommandBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyImageToBuffer-srcImage-parameter",
+          "text": " <code>srcImage</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkImage\">VkImage</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyImageToBuffer-srcImageLayout-parameter",
+          "text": " <code>srcImageLayout</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkImageLayout\">VkImageLayout</a> value"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyImageToBuffer-dstBuffer-parameter",
+          "text": " <code>dstBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkBuffer\">VkBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyImageToBuffer-pRegions-parameter",
+          "text": " <code>pRegions</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>regionCount</code> valid <a href=\"#VkBufferImageCopy\">VkBufferImageCopy</a> structures"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyImageToBuffer-commandBuffer-recording",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be in the <a href=\"#commandbuffers-lifecycle\">recording state</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyImageToBuffer-commandBuffer-cmdpool",
+          "text": " The <code>VkCommandPool</code> that <code>commandBuffer</code> was allocated from <strong class=\"purple\">must</strong> support transfer, graphics, or compute operations"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyImageToBuffer-renderpass",
+          "text": " This command <strong class=\"purple\">must</strong> only be called outside of a render pass instance"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyImageToBuffer-regionCount-arraylength",
+          "text": " <code>regionCount</code> <strong class=\"purple\">must</strong> be greater than <code>0</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyImageToBuffer-commonparent",
+          "text": " Each of <code>commandBuffer</code>, <code>dstBuffer</code>, and <code>srcImage</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from the same <a href=\"#VkDevice\">VkDevice</a>"
+        }
+      ],
+      "(VK_VERSION_1_1,VK_KHR_maintenance1)": [
+        {
+          "vuid": "VUID-vkCmdCopyImageToBuffer-srcImage-01998",
+          "text": " The <a href=\"#resources-image-format-features\">format features</a> of <code>srcImage</code> <strong class=\"purple\">must</strong> contain <code>VK_FORMAT_FEATURE_TRANSFER_SRC_BIT</code>."
+        }
+      ],
+      "!(VK_KHR_shared_presentable_image)": [
+        {
+          "vuid": "VUID-vkCmdCopyImageToBuffer-srcImageLayout-00190",
+          "text": " <code>srcImageLayout</code> <strong class=\"purple\">must</strong> be <code>VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL</code> or <code>VK_IMAGE_LAYOUT_GENERAL</code>"
+        }
+      ],
+      "(VK_KHR_shared_presentable_image)": [
+        {
+          "vuid": "VUID-vkCmdCopyImageToBuffer-srcImageLayout-01397",
+          "text": " <code>srcImageLayout</code> <strong class=\"purple\">must</strong> be <code>VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR</code>, <code>VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL</code> or <code>VK_IMAGE_LAYOUT_GENERAL</code>"
+        }
+      ],
+      "(VK_VERSION_1_1)": [
+        {
+          "vuid": "VUID-vkCmdCopyImageToBuffer-commandBuffer-01831",
+          "text": " If <code>commandBuffer</code> is an unprotected command buffer, then <code>srcImage</code> <strong class=\"purple\">must</strong> not be a protected image"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyImageToBuffer-commandBuffer-01832",
+          "text": " If <code>commandBuffer</code> is an unprotected command buffer, then <code>dstBuffer</code> <strong class=\"purple\">must</strong> not be a protected buffer"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyImageToBuffer-commandBuffer-01833",
+          "text": " If <code>commandBuffer</code> is a protected command buffer, then <code>dstBuffer</code> <strong class=\"purple\">must</strong> not be an unprotected buffer"
+        }
+      ],
+      "(VK_EXT_fragment_density_map)": [
+        {
+          "vuid": "VUID-vkCmdCopyImageToBuffer-srcImage-02544",
+          "text": " <code>srcImage</code> <strong class=\"purple\">must</strong> not have been created with <code>flags</code> containing <code>VK_IMAGE_CREATE_SUBSAMPLED_BIT_EXT</code>"
+        }
+      ]
+    },
+    "VkBufferImageCopy": {
+      "!(VK_VERSION_1_1,VK_KHR_sampler_ycbcr_conversion)": [
+        {
+          "vuid": "VUID-VkBufferImageCopy-bufferOffset-00193",
+          "text": " If the calling command&#8217;s <code>VkImage</code> parameter&#8217;s format is not a depth/stencil format, then <code>bufferOffset</code> <strong class=\"purple\">must</strong> be a multiple of the format&#8217;s texel block size."
+        },
+        {
+          "vuid": "VUID-VkBufferImageCopy-bufferRowLength-00203",
+          "text": " If the calling command&#8217;s <code>VkImage</code> parameter is a compressed image, <code>bufferRowLength</code> <strong class=\"purple\">must</strong> be a multiple of the compressed texel block width"
+        },
+        {
+          "vuid": "VUID-VkBufferImageCopy-bufferImageHeight-00204",
+          "text": " If the calling command&#8217;s <code>VkImage</code> parameter is a compressed image, <code>bufferImageHeight</code> <strong class=\"purple\">must</strong> be a multiple of the compressed texel block height"
+        },
+        {
+          "vuid": "VUID-VkBufferImageCopy-imageOffset-00205",
+          "text": " If the calling command&#8217;s <code>VkImage</code> parameter is a compressed image, all members of <code>imageOffset</code> <strong class=\"purple\">must</strong> be a multiple of the corresponding dimensions of the compressed texel block"
+        },
+        {
+          "vuid": "VUID-VkBufferImageCopy-bufferOffset-00206",
+          "text": " If the calling command&#8217;s <code>VkImage</code> parameter is a compressed image, <code>bufferOffset</code> <strong class=\"purple\">must</strong> be a multiple of the compressed texel block size in bytes"
+        },
+        {
+          "vuid": "VUID-VkBufferImageCopy-imageExtent-00207",
+          "text": " If the calling command&#8217;s <code>VkImage</code> parameter is a compressed image, <code>imageExtent.width</code> <strong class=\"purple\">must</strong> be a multiple of the compressed texel block width or <span class=\"eq\">(<code>imageExtent.width</code> &#43; <code>imageOffset.x</code>)</span> <strong class=\"purple\">must</strong> equal the image subresource width"
+        },
+        {
+          "vuid": "VUID-VkBufferImageCopy-imageExtent-00208",
+          "text": " If the calling command&#8217;s <code>VkImage</code> parameter is a compressed image, <code>imageExtent.height</code> <strong class=\"purple\">must</strong> be a multiple of the compressed texel block height or <span class=\"eq\">(<code>imageExtent.height</code> &#43; <code>imageOffset.y</code>)</span> <strong class=\"purple\">must</strong> equal the image subresource height"
+        },
+        {
+          "vuid": "VUID-VkBufferImageCopy-imageExtent-00209",
+          "text": " If the calling command&#8217;s <code>VkImage</code> parameter is a compressed image, <code>imageExtent.depth</code> <strong class=\"purple\">must</strong> be a multiple of the compressed texel block depth or <span class=\"eq\">(<code>imageExtent.depth</code> &#43; <code>imageOffset.z</code>)</span> <strong class=\"purple\">must</strong> equal the image subresource depth"
+        }
+      ],
+      "(VK_VERSION_1_1,VK_KHR_sampler_ycbcr_conversion)": [
+        {
+          "vuid": "VUID-VkBufferImageCopy-bufferOffset-01558",
+          "text": " If the calling command&#8217;s <code>VkImage</code> parameter&#8217;s format is not a depth/stencil format or a <a href=\"#formats-requiring-sampler-ycbcr-conversion\">multi-planar format</a>, then <code>bufferOffset</code> <strong class=\"purple\">must</strong> be a multiple of the format&#8217;s texel block size."
+        },
+        {
+          "vuid": "VUID-VkBufferImageCopy-bufferOffset-01559",
+          "text": " If the calling command&#8217;s <code>VkImage</code> parameter&#8217;s format is a <a href=\"#formats-requiring-sampler-ycbcr-conversion\">multi-planar format</a>, then <code>bufferOffset</code> <strong class=\"purple\">must</strong> be a multiple of the element size of the compatible format for the format and the <code>aspectMask</code> of the <code>imageSubresource</code> as defined in <a href=\"#formats-compatible-planes\">Compatible formats of planes of multi-planar formats</a>"
+        },
+        {
+          "vuid": "VUID-VkBufferImageCopy-None-01735",
+          "text": " If the calling command&#8217;s <code>VkImage</code> parameter is a compressed image, or a <em>single-plane</em>, &#8220;<code>_422</code>&#8221; image format, <code>bufferRowLength</code> <strong class=\"purple\">must</strong> be a multiple of the compressed texel block width"
+        },
+        {
+          "vuid": "VUID-VkBufferImageCopy-None-01736",
+          "text": " If the calling command&#8217;s <code>VkImage</code> parameter is a compressed image, or a <em>single-plane</em>, &#8220;<code>_422</code>&#8221; image format, <code>bufferImageHeight</code> <strong class=\"purple\">must</strong> be a multiple of the compressed texel block height"
+        },
+        {
+          "vuid": "VUID-VkBufferImageCopy-None-01737",
+          "text": " If the calling command&#8217;s <code>VkImage</code> parameter is a compressed image, or a <em>single-plane</em>, &#8220;<code>_422</code>&#8221; image format, all members of <code>imageOffset</code> <strong class=\"purple\">must</strong> be a multiple of the corresponding dimensions of the compressed texel block"
+        },
+        {
+          "vuid": "VUID-VkBufferImageCopy-None-01738",
+          "text": " If the calling command&#8217;s <code>VkImage</code> parameter is a compressed image, or a <em>single-plane</em>, &#8220;<code>_422</code>&#8221; image format, <code>bufferOffset</code> <strong class=\"purple\">must</strong> be a multiple of the compressed texel block size in bytes"
+        },
+        {
+          "vuid": "VUID-VkBufferImageCopy-None-01739",
+          "text": " If the calling command&#8217;s <code>VkImage</code> parameter is a compressed image, or a <em>single-plane</em>, &#8220;<code>_422</code>&#8221; image format, <code>imageExtent.width</code> <strong class=\"purple\">must</strong> be a multiple of the compressed texel block width or <span class=\"eq\">(<code>imageExtent.width</code> &#43; <code>imageOffset.x</code>)</span> <strong class=\"purple\">must</strong> equal the image subresource width"
+        },
+        {
+          "vuid": "VUID-VkBufferImageCopy-None-01740",
+          "text": " If the calling command&#8217;s <code>VkImage</code> parameter is a compressed image, or a <em>single-plane</em>, &#8220;<code>_422</code>&#8221; image format, <code>imageExtent.height</code> <strong class=\"purple\">must</strong> be a multiple of the compressed texel block height or <span class=\"eq\">(<code>imageExtent.height</code> &#43; <code>imageOffset.y</code>)</span> <strong class=\"purple\">must</strong> equal the image subresource height"
+        },
+        {
+          "vuid": "VUID-VkBufferImageCopy-None-01741",
+          "text": " If the calling command&#8217;s <code>VkImage</code> parameter is a compressed image, or a <em>single-plane</em>, &#8220;<code>_422</code>&#8221; image format, <code>imageExtent.depth</code> <strong class=\"purple\">must</strong> be a multiple of the compressed texel block depth or <span class=\"eq\">(<code>imageExtent.depth</code> &#43; <code>imageOffset.z</code>)</span> <strong class=\"purple\">must</strong> equal the image subresource depth"
+        },
+        {
+          "vuid": "VUID-VkBufferImageCopy-aspectMask-01560",
+          "text": " If the calling command&#8217;s <code>VkImage</code> parameter&#8217;s format is a <a href=\"#formats-requiring-sampler-ycbcr-conversion\">multi-planar format</a>, then the <code>aspectMask</code> member of <code>imageSubresource</code> <strong class=\"purple\">must</strong> be <code>VK_IMAGE_ASPECT_PLANE_0_BIT</code>, <code>VK_IMAGE_ASPECT_PLANE_1_BIT</code>, or <code>VK_IMAGE_ASPECT_PLANE_2_BIT</code> (with <code>VK_IMAGE_ASPECT_PLANE_2_BIT</code> valid only for image formats with three planes)"
+        }
+      ],
+      "core": [
+        {
+          "vuid": "VUID-VkBufferImageCopy-bufferOffset-00194",
+          "text": " <code>bufferOffset</code> <strong class=\"purple\">must</strong> be a multiple of <code>4</code>"
+        },
+        {
+          "vuid": "VUID-VkBufferImageCopy-bufferRowLength-00195",
+          "text": " <code>bufferRowLength</code> <strong class=\"purple\">must</strong> be <code>0</code>, or greater than or equal to the <code>width</code> member of <code>imageExtent</code>"
+        },
+        {
+          "vuid": "VUID-VkBufferImageCopy-bufferImageHeight-00196",
+          "text": " <code>bufferImageHeight</code> <strong class=\"purple\">must</strong> be <code>0</code>, or greater than or equal to the <code>height</code> member of <code>imageExtent</code>"
+        },
+        {
+          "vuid": "VUID-VkBufferImageCopy-imageOffset-00197",
+          "text": " <code>imageOffset.x</code> and <span class=\"eq\">(<code>imageExtent.width</code> &#43; <code>imageOffset.x</code>)</span> <strong class=\"purple\">must</strong> both be greater than or equal to <code>0</code> and less than or equal to the image subresource width"
+        },
+        {
+          "vuid": "VUID-VkBufferImageCopy-imageOffset-00198",
+          "text": " <code>imageOffset.y</code> and <span class=\"eq\">(imageExtent.height &#43; <code>imageOffset.y</code>)</span> <strong class=\"purple\">must</strong> both be greater than or equal to <code>0</code> and less than or equal to the image subresource height"
+        },
+        {
+          "vuid": "VUID-VkBufferImageCopy-srcImage-00199",
+          "text": " If the calling command&#8217;s <code>srcImage</code> (<a href=\"#vkCmdCopyImageToBuffer\">vkCmdCopyImageToBuffer</a>) or <code>dstImage</code> (<a href=\"#vkCmdCopyBufferToImage\">vkCmdCopyBufferToImage</a>) is of type <code>VK_IMAGE_TYPE_1D</code>, then <code>imageOffset.y</code> <strong class=\"purple\">must</strong> be <code>0</code> and <code>imageExtent.height</code> <strong class=\"purple\">must</strong> be <code>1</code>."
+        },
+        {
+          "vuid": "VUID-VkBufferImageCopy-imageOffset-00200",
+          "text": " <code>imageOffset.z</code> and <span class=\"eq\">(imageExtent.depth &#43; <code>imageOffset.z</code>)</span> <strong class=\"purple\">must</strong> both be greater than or equal to <code>0</code> and less than or equal to the image subresource depth"
+        },
+        {
+          "vuid": "VUID-VkBufferImageCopy-srcImage-00201",
+          "text": " If the calling command&#8217;s <code>srcImage</code> (<a href=\"#vkCmdCopyImageToBuffer\">vkCmdCopyImageToBuffer</a>) or <code>dstImage</code> (<a href=\"#vkCmdCopyBufferToImage\">vkCmdCopyBufferToImage</a>) is of type <code>VK_IMAGE_TYPE_1D</code> or <code>VK_IMAGE_TYPE_2D</code>, then <code>imageOffset.z</code> <strong class=\"purple\">must</strong> be <code>0</code> and <code>imageExtent.depth</code> <strong class=\"purple\">must</strong> be <code>1</code>"
+        },
+        {
+          "vuid": "VUID-VkBufferImageCopy-aspectMask-00211",
+          "text": " The <code>aspectMask</code> member of <code>imageSubresource</code> <strong class=\"purple\">must</strong> specify aspects present in the calling command&#8217;s <code>VkImage</code> parameter"
+        },
+        {
+          "vuid": "VUID-VkBufferImageCopy-aspectMask-00212",
+          "text": " The <code>aspectMask</code> member of <code>imageSubresource</code> <strong class=\"purple\">must</strong> only have a single bit set"
+        },
+        {
+          "vuid": "VUID-VkBufferImageCopy-baseArrayLayer-00213",
+          "text": " If the calling command&#8217;s <code>VkImage</code> parameter is of <a href=\"#VkImageType\">VkImageType</a> <code>VK_IMAGE_TYPE_3D</code>, the <code>baseArrayLayer</code> and <code>layerCount</code> members of <code>imageSubresource</code> <strong class=\"purple\">must</strong> be <code>0</code> and <code>1</code>, respectively"
+        },
+        {
+          "vuid": "VUID-VkBufferImageCopy-imageSubresource-parameter",
+          "text": " <code>imageSubresource</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkImageSubresourceLayers\">VkImageSubresourceLayers</a> structure"
+        }
+      ],
+      "!(VK_EXT_depth_range_unrestricted)": [
+        {
+          "vuid": "VUID-VkBufferImageCopy-None-00214",
+          "text": " When copying to the depth aspect of an image subresource, the data in the source buffer <strong class=\"purple\">must</strong> be in the range <span class=\"eq\">[0,1]</span>"
+        }
+      ]
+    },
+    "vkCmdBlitImage": {
+      "core": [
+        {
+          "vuid": "VUID-vkCmdBlitImage-pRegions-00215",
+          "text": " The source region specified by each element of <code>pRegions</code> <strong class=\"purple\">must</strong> be a region that is contained within <code>srcImage</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdBlitImage-pRegions-00216",
+          "text": " The destination region specified by each element of <code>pRegions</code> <strong class=\"purple\">must</strong> be a region that is contained within <code>dstImage</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdBlitImage-pRegions-00217",
+          "text": " The union of all destination regions, specified by the elements of <code>pRegions</code>, <strong class=\"purple\">must</strong> not overlap in memory with any texel that <strong class=\"purple\">may</strong> be sampled during the blit operation"
+        },
+        {
+          "vuid": "VUID-vkCmdBlitImage-srcImage-01999",
+          "text": " The <a href=\"#resources-image-format-features\">format features</a> of <code>srcImage</code> <strong class=\"purple\">must</strong> contain <code>VK_FORMAT_FEATURE_BLIT_SRC_BIT</code>."
+        },
+        {
+          "vuid": "VUID-vkCmdBlitImage-srcImage-00219",
+          "text": " <code>srcImage</code> <strong class=\"purple\">must</strong> have been created with <code>VK_IMAGE_USAGE_TRANSFER_SRC_BIT</code> usage flag"
+        },
+        {
+          "vuid": "VUID-vkCmdBlitImage-srcImage-00220",
+          "text": " If <code>srcImage</code> is non-sparse then it <strong class=\"purple\">must</strong> be bound completely and contiguously to a single <code>VkDeviceMemory</code> object"
+        },
+        {
+          "vuid": "VUID-vkCmdBlitImage-srcImageLayout-00221",
+          "text": " <code>srcImageLayout</code> <strong class=\"purple\">must</strong> specify the layout of the image subresources of <code>srcImage</code> specified in <code>pRegions</code> at the time this command is executed on a <code>VkDevice</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdBlitImage-dstImage-02000",
+          "text": " The <a href=\"#resources-image-format-features\">format features</a> of <code>dstImage</code> <strong class=\"purple\">must</strong> contain <code>VK_FORMAT_FEATURE_BLIT_DST_BIT</code>."
+        },
+        {
+          "vuid": "VUID-vkCmdBlitImage-dstImage-00224",
+          "text": " <code>dstImage</code> <strong class=\"purple\">must</strong> have been created with <code>VK_IMAGE_USAGE_TRANSFER_DST_BIT</code> usage flag"
+        },
+        {
+          "vuid": "VUID-vkCmdBlitImage-dstImage-00225",
+          "text": " If <code>dstImage</code> is non-sparse then it <strong class=\"purple\">must</strong> be bound completely and contiguously to a single <code>VkDeviceMemory</code> object"
+        },
+        {
+          "vuid": "VUID-vkCmdBlitImage-dstImageLayout-00226",
+          "text": " <code>dstImageLayout</code> <strong class=\"purple\">must</strong> specify the layout of the image subresources of <code>dstImage</code> specified in <code>pRegions</code> at the time this command is executed on a <code>VkDevice</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdBlitImage-srcImage-00228",
+          "text": " The sample count of <code>srcImage</code> and <code>dstImage</code> <strong class=\"purple\">must</strong> both be equal to <code>VK_SAMPLE_COUNT_1_BIT</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdBlitImage-srcImage-00229",
+          "text": " If either of <code>srcImage</code> or <code>dstImage</code> was created with a signed integer <a href=\"#VkFormat\">VkFormat</a>, the other <strong class=\"purple\">must</strong> also have been created with a signed integer <a href=\"#VkFormat\">VkFormat</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdBlitImage-srcImage-00230",
+          "text": " If either of <code>srcImage</code> or <code>dstImage</code> was created with an unsigned integer <a href=\"#VkFormat\">VkFormat</a>, the other <strong class=\"purple\">must</strong> also have been created with an unsigned integer <a href=\"#VkFormat\">VkFormat</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdBlitImage-srcImage-00231",
+          "text": " If either of <code>srcImage</code> or <code>dstImage</code> was created with a depth/stencil format, the other <strong class=\"purple\">must</strong> have exactly the same format"
+        },
+        {
+          "vuid": "VUID-vkCmdBlitImage-srcImage-00232",
+          "text": " If <code>srcImage</code> was created with a depth/stencil format, <code>filter</code> <strong class=\"purple\">must</strong> be <code>VK_FILTER_NEAREST</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdBlitImage-srcImage-00233",
+          "text": " <code>srcImage</code> <strong class=\"purple\">must</strong> have been created with a <code>samples</code> value of <code>VK_SAMPLE_COUNT_1_BIT</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdBlitImage-dstImage-00234",
+          "text": " <code>dstImage</code> <strong class=\"purple\">must</strong> have been created with a <code>samples</code> value of <code>VK_SAMPLE_COUNT_1_BIT</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdBlitImage-filter-02001",
+          "text": " If <code>filter</code> is <code>VK_FILTER_LINEAR</code>, then the <a href=\"#resources-image-format-features\">format features</a> of <code>srcImage</code> <strong class=\"purple\">must</strong> contain <code>VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT</code>."
+        },
+        {
+          "vuid": "VUID-vkCmdBlitImage-srcSubresource-01705",
+          "text": " The <code>srcSubresource.mipLevel</code> member of each element of <code>pRegions</code> <strong class=\"purple\">must</strong> be less than the <code>mipLevels</code> specified in <a href=\"#VkImageCreateInfo\">VkImageCreateInfo</a> when <code>srcImage</code> was created"
+        },
+        {
+          "vuid": "VUID-vkCmdBlitImage-dstSubresource-01706",
+          "text": " The <code>dstSubresource.mipLevel</code> member of each element of <code>pRegions</code> <strong class=\"purple\">must</strong> be less than the <code>mipLevels</code> specified in <a href=\"#VkImageCreateInfo\">VkImageCreateInfo</a> when <code>dstImage</code> was created"
+        },
+        {
+          "vuid": "VUID-vkCmdBlitImage-srcSubresource-01707",
+          "text": " The <span class=\"eq\"><code>srcSubresource.baseArrayLayer</code> &#43; <code>srcSubresource.layerCount</code></span> of each element of <code>pRegions</code> <strong class=\"purple\">must</strong> be less than or equal to the <code>arrayLayers</code> specified in <a href=\"#VkImageCreateInfo\">VkImageCreateInfo</a> when <code>srcImage</code> was created"
+        },
+        {
+          "vuid": "VUID-vkCmdBlitImage-dstSubresource-01708",
+          "text": " The <span class=\"eq\"><code>dstSubresource.baseArrayLayer</code> &#43; <code>dstSubresource.layerCount</code></span> of each element of <code>pRegions</code> <strong class=\"purple\">must</strong> be less than or equal to the <code>arrayLayers</code> specified in <a href=\"#VkImageCreateInfo\">VkImageCreateInfo</a> when <code>dstImage</code> was created"
+        },
+        {
+          "vuid": "VUID-vkCmdBlitImage-commandBuffer-parameter",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkCommandBuffer\">VkCommandBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdBlitImage-srcImage-parameter",
+          "text": " <code>srcImage</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkImage\">VkImage</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdBlitImage-srcImageLayout-parameter",
+          "text": " <code>srcImageLayout</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkImageLayout\">VkImageLayout</a> value"
+        },
+        {
+          "vuid": "VUID-vkCmdBlitImage-dstImage-parameter",
+          "text": " <code>dstImage</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkImage\">VkImage</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdBlitImage-dstImageLayout-parameter",
+          "text": " <code>dstImageLayout</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkImageLayout\">VkImageLayout</a> value"
+        },
+        {
+          "vuid": "VUID-vkCmdBlitImage-pRegions-parameter",
+          "text": " <code>pRegions</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>regionCount</code> valid <a href=\"#VkImageBlit\">VkImageBlit</a> structures"
+        },
+        {
+          "vuid": "VUID-vkCmdBlitImage-filter-parameter",
+          "text": " <code>filter</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkFilter\">VkFilter</a> value"
+        },
+        {
+          "vuid": "VUID-vkCmdBlitImage-commandBuffer-recording",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be in the <a href=\"#commandbuffers-lifecycle\">recording state</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdBlitImage-commandBuffer-cmdpool",
+          "text": " The <code>VkCommandPool</code> that <code>commandBuffer</code> was allocated from <strong class=\"purple\">must</strong> support graphics operations"
+        },
+        {
+          "vuid": "VUID-vkCmdBlitImage-renderpass",
+          "text": " This command <strong class=\"purple\">must</strong> only be called outside of a render pass instance"
+        },
+        {
+          "vuid": "VUID-vkCmdBlitImage-regionCount-arraylength",
+          "text": " <code>regionCount</code> <strong class=\"purple\">must</strong> be greater than <code>0</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdBlitImage-commonparent",
+          "text": " Each of <code>commandBuffer</code>, <code>dstImage</code>, and <code>srcImage</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from the same <a href=\"#VkDevice\">VkDevice</a>"
+        }
+      ],
+      "(VK_VERSION_1_1,VK_KHR_sampler_ycbcr_conversion)": [
+        {
+          "vuid": "VUID-vkCmdBlitImage-srcImage-01561",
+          "text": " <code>srcImage</code> <strong class=\"purple\">must</strong> not use a format listed in <a href=\"#formats-requiring-sampler-ycbcr-conversion\">Formats requiring sampler Y&#8217;C<sub>B</sub>C<sub>R</sub> conversion for <code>VK_IMAGE_ASPECT_COLOR_BIT</code> image views</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdBlitImage-dstImage-01562",
+          "text": " <code>dstImage</code> <strong class=\"purple\">must</strong> not use a format listed in <a href=\"#formats-requiring-sampler-ycbcr-conversion\">Formats requiring sampler Y&#8217;C<sub>B</sub>C<sub>R</sub> conversion for <code>VK_IMAGE_ASPECT_COLOR_BIT</code> image views</a>"
+        }
+      ],
+      "!(VK_KHR_shared_presentable_image)": [
+        {
+          "vuid": "VUID-vkCmdBlitImage-srcImageLayout-00222",
+          "text": " <code>srcImageLayout</code> <strong class=\"purple\">must</strong> be <code>VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL</code> or <code>VK_IMAGE_LAYOUT_GENERAL</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdBlitImage-dstImageLayout-00227",
+          "text": " <code>dstImageLayout</code> <strong class=\"purple\">must</strong> be <code>VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL</code> or <code>VK_IMAGE_LAYOUT_GENERAL</code>"
+        }
+      ],
+      "(VK_KHR_shared_presentable_image)": [
+        {
+          "vuid": "VUID-vkCmdBlitImage-srcImageLayout-01398",
+          "text": " <code>srcImageLayout</code> <strong class=\"purple\">must</strong> be <code>VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR</code>, <code>VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL</code> or <code>VK_IMAGE_LAYOUT_GENERAL</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdBlitImage-dstImageLayout-01399",
+          "text": " <code>dstImageLayout</code> <strong class=\"purple\">must</strong> be <code>VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR</code>, <code>VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL</code> or <code>VK_IMAGE_LAYOUT_GENERAL</code>"
+        }
+      ],
+      "(VK_IMG_filter_cubic,VK_EXT_filter_cubic)": [
+        {
+          "vuid": "VUID-vkCmdBlitImage-filter-02002",
+          "text": " If <code>filter</code> is <code>VK_FILTER_CUBIC_EXT</code>, then the <a href=\"#resources-image-format-features\">format features</a> of <code>srcImage</code> <strong class=\"purple\">must</strong> contain <code>VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT</code>."
+        },
+        {
+          "vuid": "VUID-vkCmdBlitImage-filter-00237",
+          "text": " If <code>filter</code> is <code>VK_FILTER_CUBIC_EXT</code>, <code>srcImage</code> <strong class=\"purple\">must</strong> have a <a href=\"#VkImageType\">VkImageType</a> of <code>VK_IMAGE_TYPE_2D</code>"
+        }
+      ],
+      "(VK_VERSION_1_1)": [
+        {
+          "vuid": "VUID-vkCmdBlitImage-commandBuffer-01834",
+          "text": " If <code>commandBuffer</code> is an unprotected command buffer, then <code>srcImage</code> <strong class=\"purple\">must</strong> not be a protected image"
+        },
+        {
+          "vuid": "VUID-vkCmdBlitImage-commandBuffer-01835",
+          "text": " If <code>commandBuffer</code> is an unprotected command buffer, then <code>dstImage</code> <strong class=\"purple\">must</strong> not be a protected image"
+        },
+        {
+          "vuid": "VUID-vkCmdBlitImage-commandBuffer-01836",
+          "text": " If <code>commandBuffer</code> is a protected command buffer, then <code>dstImage</code> <strong class=\"purple\">must</strong> not be an unprotected image"
+        }
+      ],
+      "(VK_EXT_fragment_density_map)": [
+        {
+          "vuid": "VUID-vkCmdBlitImage-dstImage-02545",
+          "text": " <code>dstImage</code> and <code>srcImage</code> <strong class=\"purple\">must</strong> not have been created with <code>flags</code> containing <code>VK_IMAGE_CREATE_SUBSAMPLED_BIT_EXT</code>"
+        }
+      ]
+    },
+    "VkImageBlit": {
+      "core": [
+        {
+          "vuid": "VUID-VkImageBlit-aspectMask-00238",
+          "text": " The <code>aspectMask</code> member of <code>srcSubresource</code> and <code>dstSubresource</code> <strong class=\"purple\">must</strong> match"
+        },
+        {
+          "vuid": "VUID-VkImageBlit-layerCount-00239",
+          "text": " The <code>layerCount</code> member of <code>srcSubresource</code> and <code>dstSubresource</code> <strong class=\"purple\">must</strong> match"
+        },
+        {
+          "vuid": "VUID-VkImageBlit-srcImage-00240",
+          "text": " If either of the calling command&#8217;s <code>srcImage</code> or <code>dstImage</code> parameters are of <a href=\"#VkImageType\">VkImageType</a> <code>VK_IMAGE_TYPE_3D</code>, the <code>baseArrayLayer</code> and <code>layerCount</code> members of both <code>srcSubresource</code> and <code>dstSubresource</code> <strong class=\"purple\">must</strong> be <code>0</code> and <code>1</code>, respectively"
+        },
+        {
+          "vuid": "VUID-VkImageBlit-aspectMask-00241",
+          "text": " The <code>aspectMask</code> member of <code>srcSubresource</code> <strong class=\"purple\">must</strong> specify aspects present in the calling command&#8217;s <code>srcImage</code>"
+        },
+        {
+          "vuid": "VUID-VkImageBlit-aspectMask-00242",
+          "text": " The <code>aspectMask</code> member of <code>dstSubresource</code> <strong class=\"purple\">must</strong> specify aspects present in the calling command&#8217;s <code>dstImage</code>"
+        },
+        {
+          "vuid": "VUID-VkImageBlit-srcOffset-00243",
+          "text": " <code>srcOffset</code>[0].x and <code>srcOffset</code>[1].x <strong class=\"purple\">must</strong> both be greater than or equal to <code>0</code> and less than or equal to the source image subresource width"
+        },
+        {
+          "vuid": "VUID-VkImageBlit-srcOffset-00244",
+          "text": " <code>srcOffset</code>[0].y and <code>srcOffset</code>[1].y <strong class=\"purple\">must</strong> both be greater than or equal to <code>0</code> and less than or equal to the source image subresource height"
+        },
+        {
+          "vuid": "VUID-VkImageBlit-srcImage-00245",
+          "text": " If the calling command&#8217;s <code>srcImage</code> is of type <code>VK_IMAGE_TYPE_1D</code>, then <code>srcOffset</code>[0].y <strong class=\"purple\">must</strong> be <code>0</code> and <code>srcOffset</code>[1].y <strong class=\"purple\">must</strong> be <code>1</code>."
+        },
+        {
+          "vuid": "VUID-VkImageBlit-srcOffset-00246",
+          "text": " <code>srcOffset</code>[0].z and <code>srcOffset</code>[1].z <strong class=\"purple\">must</strong> both be greater than or equal to <code>0</code> and less than or equal to the source image subresource depth"
+        },
+        {
+          "vuid": "VUID-VkImageBlit-srcImage-00247",
+          "text": " If the calling command&#8217;s <code>srcImage</code> is of type <code>VK_IMAGE_TYPE_1D</code> or <code>VK_IMAGE_TYPE_2D</code>, then <code>srcOffset</code>[0].z <strong class=\"purple\">must</strong> be <code>0</code> and <code>srcOffset</code>[1].z <strong class=\"purple\">must</strong> be <code>1</code>."
+        },
+        {
+          "vuid": "VUID-VkImageBlit-dstOffset-00248",
+          "text": " <code>dstOffset</code>[0].x and <code>dstOffset</code>[1].x <strong class=\"purple\">must</strong> both be greater than or equal to <code>0</code> and less than or equal to the destination image subresource width"
+        },
+        {
+          "vuid": "VUID-VkImageBlit-dstOffset-00249",
+          "text": " <code>dstOffset</code>[0].y and <code>dstOffset</code>[1].y <strong class=\"purple\">must</strong> both be greater than or equal to <code>0</code> and less than or equal to the destination image subresource height"
+        },
+        {
+          "vuid": "VUID-VkImageBlit-dstImage-00250",
+          "text": " If the calling command&#8217;s <code>dstImage</code> is of type <code>VK_IMAGE_TYPE_1D</code>, then <code>dstOffset</code>[0].y <strong class=\"purple\">must</strong> be <code>0</code> and <code>dstOffset</code>[1].y <strong class=\"purple\">must</strong> be <code>1</code>."
+        },
+        {
+          "vuid": "VUID-VkImageBlit-dstOffset-00251",
+          "text": " <code>dstOffset</code>[0].z and <code>dstOffset</code>[1].z <strong class=\"purple\">must</strong> both be greater than or equal to <code>0</code> and less than or equal to the destination image subresource depth"
+        },
+        {
+          "vuid": "VUID-VkImageBlit-dstImage-00252",
+          "text": " If the calling command&#8217;s <code>dstImage</code> is of type <code>VK_IMAGE_TYPE_1D</code> or <code>VK_IMAGE_TYPE_2D</code>, then <code>dstOffset</code>[0].z <strong class=\"purple\">must</strong> be <code>0</code> and <code>dstOffset</code>[1].z <strong class=\"purple\">must</strong> be <code>1</code>."
+        },
+        {
+          "vuid": "VUID-VkImageBlit-srcSubresource-parameter",
+          "text": " <code>srcSubresource</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkImageSubresourceLayers\">VkImageSubresourceLayers</a> structure"
+        },
+        {
+          "vuid": "VUID-VkImageBlit-dstSubresource-parameter",
+          "text": " <code>dstSubresource</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkImageSubresourceLayers\">VkImageSubresourceLayers</a> structure"
+        }
+      ]
+    },
+    "vkCmdResolveImage": {
+      "core": [
+        {
+          "vuid": "VUID-vkCmdResolveImage-pRegions-00253",
+          "text": " The source region specified by each element of <code>pRegions</code> <strong class=\"purple\">must</strong> be a region that is contained within <code>srcImage</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdResolveImage-pRegions-00254",
+          "text": " The destination region specified by each element of <code>pRegions</code> <strong class=\"purple\">must</strong> be a region that is contained within <code>dstImage</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdResolveImage-pRegions-00255",
+          "text": " The union of all source regions, and the union of all destination regions, specified by the elements of <code>pRegions</code>, <strong class=\"purple\">must</strong> not overlap in memory"
+        },
+        {
+          "vuid": "VUID-vkCmdResolveImage-srcImage-00256",
+          "text": " If <code>srcImage</code> is non-sparse then it <strong class=\"purple\">must</strong> be bound completely and contiguously to a single <code>VkDeviceMemory</code> object"
+        },
+        {
+          "vuid": "VUID-vkCmdResolveImage-srcImage-00257",
+          "text": " <code>srcImage</code> <strong class=\"purple\">must</strong> have a sample count equal to any valid sample count value other than <code>VK_SAMPLE_COUNT_1_BIT</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdResolveImage-dstImage-00258",
+          "text": " If <code>dstImage</code> is non-sparse then it <strong class=\"purple\">must</strong> be bound completely and contiguously to a single <code>VkDeviceMemory</code> object"
+        },
+        {
+          "vuid": "VUID-vkCmdResolveImage-dstImage-00259",
+          "text": " <code>dstImage</code> <strong class=\"purple\">must</strong> have a sample count equal to <code>VK_SAMPLE_COUNT_1_BIT</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdResolveImage-srcImageLayout-00260",
+          "text": " <code>srcImageLayout</code> <strong class=\"purple\">must</strong> specify the layout of the image subresources of <code>srcImage</code> specified in <code>pRegions</code> at the time this command is executed on a <code>VkDevice</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdResolveImage-dstImageLayout-00262",
+          "text": " <code>dstImageLayout</code> <strong class=\"purple\">must</strong> specify the layout of the image subresources of <code>dstImage</code> specified in <code>pRegions</code> at the time this command is executed on a <code>VkDevice</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdResolveImage-dstImage-02003",
+          "text": " The <a href=\"#resources-image-format-features\">format features</a> of <code>dstImage</code> <strong class=\"purple\">must</strong> contain <code>VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT</code>."
+        },
+        {
+          "vuid": "VUID-vkCmdResolveImage-srcImage-01386",
+          "text": " <code>srcImage</code> and <code>dstImage</code> <strong class=\"purple\">must</strong> have been created with the same image format"
+        },
+        {
+          "vuid": "VUID-vkCmdResolveImage-srcSubresource-01709",
+          "text": " The <code>srcSubresource.mipLevel</code> member of each element of <code>pRegions</code> <strong class=\"purple\">must</strong> be less than the <code>mipLevels</code> specified in <a href=\"#VkImageCreateInfo\">VkImageCreateInfo</a> when <code>srcImage</code> was created"
+        },
+        {
+          "vuid": "VUID-vkCmdResolveImage-dstSubresource-01710",
+          "text": " The <code>dstSubresource.mipLevel</code> member of each element of <code>pRegions</code> <strong class=\"purple\">must</strong> be less than the <code>mipLevels</code> specified in <a href=\"#VkImageCreateInfo\">VkImageCreateInfo</a> when <code>dstImage</code> was created"
+        },
+        {
+          "vuid": "VUID-vkCmdResolveImage-srcSubresource-01711",
+          "text": " The <span class=\"eq\"><code>srcSubresource.baseArrayLayer</code> &#43; <code>srcSubresource.layerCount</code></span> of each element of <code>pRegions</code> <strong class=\"purple\">must</strong> be less than or equal to the <code>arrayLayers</code> specified in <a href=\"#VkImageCreateInfo\">VkImageCreateInfo</a> when <code>srcImage</code> was created"
+        },
+        {
+          "vuid": "VUID-vkCmdResolveImage-dstSubresource-01712",
+          "text": " The <span class=\"eq\"><code>dstSubresource.baseArrayLayer</code> &#43; <code>dstSubresource.layerCount</code></span> of each element of <code>pRegions</code> <strong class=\"purple\">must</strong> be less than or equal to the <code>arrayLayers</code> specified in <a href=\"#VkImageCreateInfo\">VkImageCreateInfo</a> when <code>dstImage</code> was created"
+        },
+        {
+          "vuid": "VUID-vkCmdResolveImage-commandBuffer-parameter",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkCommandBuffer\">VkCommandBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdResolveImage-srcImage-parameter",
+          "text": " <code>srcImage</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkImage\">VkImage</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdResolveImage-srcImageLayout-parameter",
+          "text": " <code>srcImageLayout</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkImageLayout\">VkImageLayout</a> value"
+        },
+        {
+          "vuid": "VUID-vkCmdResolveImage-dstImage-parameter",
+          "text": " <code>dstImage</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkImage\">VkImage</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdResolveImage-dstImageLayout-parameter",
+          "text": " <code>dstImageLayout</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkImageLayout\">VkImageLayout</a> value"
+        },
+        {
+          "vuid": "VUID-vkCmdResolveImage-pRegions-parameter",
+          "text": " <code>pRegions</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>regionCount</code> valid <a href=\"#VkImageResolve\">VkImageResolve</a> structures"
+        },
+        {
+          "vuid": "VUID-vkCmdResolveImage-commandBuffer-recording",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be in the <a href=\"#commandbuffers-lifecycle\">recording state</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdResolveImage-commandBuffer-cmdpool",
+          "text": " The <code>VkCommandPool</code> that <code>commandBuffer</code> was allocated from <strong class=\"purple\">must</strong> support graphics operations"
+        },
+        {
+          "vuid": "VUID-vkCmdResolveImage-renderpass",
+          "text": " This command <strong class=\"purple\">must</strong> only be called outside of a render pass instance"
+        },
+        {
+          "vuid": "VUID-vkCmdResolveImage-regionCount-arraylength",
+          "text": " <code>regionCount</code> <strong class=\"purple\">must</strong> be greater than <code>0</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdResolveImage-commonparent",
+          "text": " Each of <code>commandBuffer</code>, <code>dstImage</code>, and <code>srcImage</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from the same <a href=\"#VkDevice\">VkDevice</a>"
+        }
+      ],
+      "!(VK_KHR_shared_presentable_image)": [
+        {
+          "vuid": "VUID-vkCmdResolveImage-srcImageLayout-00261",
+          "text": " <code>srcImageLayout</code> <strong class=\"purple\">must</strong> be <code>VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL</code> or <code>VK_IMAGE_LAYOUT_GENERAL</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdResolveImage-dstImageLayout-00263",
+          "text": " <code>dstImageLayout</code> <strong class=\"purple\">must</strong> be <code>VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL</code> or <code>VK_IMAGE_LAYOUT_GENERAL</code>"
+        }
+      ],
+      "(VK_KHR_shared_presentable_image)": [
+        {
+          "vuid": "VUID-vkCmdResolveImage-srcImageLayout-01400",
+          "text": " <code>srcImageLayout</code> <strong class=\"purple\">must</strong> be <code>VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR</code>, <code>VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL</code> or <code>VK_IMAGE_LAYOUT_GENERAL</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdResolveImage-dstImageLayout-01401",
+          "text": " <code>dstImageLayout</code> <strong class=\"purple\">must</strong> be <code>VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR</code>, <code>VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL</code> or <code>VK_IMAGE_LAYOUT_GENERAL</code>"
+        }
+      ],
+      "(VK_VERSION_1_1)": [
+        {
+          "vuid": "VUID-vkCmdResolveImage-commandBuffer-01837",
+          "text": " If <code>commandBuffer</code> is an unprotected command buffer, then <code>srcImage</code> <strong class=\"purple\">must</strong> not be a protected image"
+        },
+        {
+          "vuid": "VUID-vkCmdResolveImage-commandBuffer-01838",
+          "text": " If <code>commandBuffer</code> is an unprotected command buffer, then <code>dstImage</code> <strong class=\"purple\">must</strong> not be a protected image"
+        },
+        {
+          "vuid": "VUID-vkCmdResolveImage-commandBuffer-01839",
+          "text": " If <code>commandBuffer</code> is a protected command buffer, then <code>dstImage</code> <strong class=\"purple\">must</strong> not be an unprotected image"
+        }
+      ],
+      "(VK_EXT_fragment_density_map)": [
+        {
+          "vuid": "VUID-vkCmdResolveImage-dstImage-02546",
+          "text": " <code>dstImage</code> and <code>srcImage</code> <strong class=\"purple\">must</strong> not have been created with <code>flags</code> containing <code>VK_IMAGE_CREATE_SUBSAMPLED_BIT_EXT</code>"
+        }
+      ]
+    },
+    "VkImageResolve": {
+      "core": [
+        {
+          "vuid": "VUID-VkImageResolve-aspectMask-00266",
+          "text": " The <code>aspectMask</code> member of <code>srcSubresource</code> and <code>dstSubresource</code> <strong class=\"purple\">must</strong> only contain <code>VK_IMAGE_ASPECT_COLOR_BIT</code>"
+        },
+        {
+          "vuid": "VUID-VkImageResolve-layerCount-00267",
+          "text": " The <code>layerCount</code> member of <code>srcSubresource</code> and <code>dstSubresource</code> <strong class=\"purple\">must</strong> match"
+        },
+        {
+          "vuid": "VUID-VkImageResolve-srcImage-00268",
+          "text": " If either of the calling command&#8217;s <code>srcImage</code> or <code>dstImage</code> parameters are of <a href=\"#VkImageType\">VkImageType</a> <code>VK_IMAGE_TYPE_3D</code>, the <code>baseArrayLayer</code> and <code>layerCount</code> members of both <code>srcSubresource</code> and <code>dstSubresource</code> <strong class=\"purple\">must</strong> be <code>0</code> and <code>1</code>, respectively"
+        },
+        {
+          "vuid": "VUID-VkImageResolve-srcOffset-00269",
+          "text": " <code>srcOffset.x</code> and <span class=\"eq\">(<code>extent.width</code> &#43; <code>srcOffset.x</code>)</span> <strong class=\"purple\">must</strong> both be greater than or equal to <code>0</code> and less than or equal to the source image subresource width"
+        },
+        {
+          "vuid": "VUID-VkImageResolve-srcOffset-00270",
+          "text": " <code>srcOffset.y</code> and <span class=\"eq\">(<code>extent.height</code> &#43; <code>srcOffset.y</code>)</span> <strong class=\"purple\">must</strong> both be greater than or equal to <code>0</code> and less than or equal to the source image subresource height"
+        },
+        {
+          "vuid": "VUID-VkImageResolve-srcImage-00271",
+          "text": " If the calling command&#8217;s <code>srcImage</code> is of type <code>VK_IMAGE_TYPE_1D</code>, then <code>srcOffset.y</code> <strong class=\"purple\">must</strong> be <code>0</code> and <code>extent.height</code> <strong class=\"purple\">must</strong> be <code>1</code>."
+        },
+        {
+          "vuid": "VUID-VkImageResolve-srcOffset-00272",
+          "text": " <code>srcOffset.z</code> and <span class=\"eq\">(<code>extent.depth</code> &#43; <code>srcOffset.z</code>)</span> <strong class=\"purple\">must</strong> both be greater than or equal to <code>0</code> and less than or equal to the source image subresource depth"
+        },
+        {
+          "vuid": "VUID-VkImageResolve-srcImage-00273",
+          "text": " If the calling command&#8217;s <code>srcImage</code> is of type <code>VK_IMAGE_TYPE_1D</code> or <code>VK_IMAGE_TYPE_2D</code>, then <code>srcOffset.z</code> <strong class=\"purple\">must</strong> be <code>0</code> and <code>extent.depth</code> <strong class=\"purple\">must</strong> be <code>1</code>."
+        },
+        {
+          "vuid": "VUID-VkImageResolve-dstOffset-00274",
+          "text": " <code>dstOffset.x</code> and <span class=\"eq\">(<code>extent.width</code> &#43; <code>dstOffset.x</code>)</span> <strong class=\"purple\">must</strong> both be greater than or equal to <code>0</code> and less than or equal to the destination image subresource width"
+        },
+        {
+          "vuid": "VUID-VkImageResolve-dstOffset-00275",
+          "text": " <code>dstOffset.y</code> and <span class=\"eq\">(<code>extent.height</code> &#43; <code>dstOffset.y</code>)</span> <strong class=\"purple\">must</strong> both be greater than or equal to <code>0</code> and less than or equal to the destination image subresource height"
+        },
+        {
+          "vuid": "VUID-VkImageResolve-dstImage-00276",
+          "text": " If the calling command&#8217;s <code>dstImage</code> is of type <code>VK_IMAGE_TYPE_1D</code>, then <code>dstOffset.y</code> <strong class=\"purple\">must</strong> be <code>0</code> and <code>extent.height</code> <strong class=\"purple\">must</strong> be <code>1</code>."
+        },
+        {
+          "vuid": "VUID-VkImageResolve-dstOffset-00277",
+          "text": " <code>dstOffset.z</code> and <span class=\"eq\">(<code>extent.depth</code> &#43; <code>dstOffset.z</code>)</span> <strong class=\"purple\">must</strong> both be greater than or equal to <code>0</code> and less than or equal to the destination image subresource depth"
+        },
+        {
+          "vuid": "VUID-VkImageResolve-dstImage-00278",
+          "text": " If the calling command&#8217;s <code>dstImage</code> is of type <code>VK_IMAGE_TYPE_1D</code> or <code>VK_IMAGE_TYPE_2D</code>, then <code>dstOffset.z</code> <strong class=\"purple\">must</strong> be <code>0</code> and <code>extent.depth</code> <strong class=\"purple\">must</strong> be <code>1</code>."
+        },
+        {
+          "vuid": "VUID-VkImageResolve-srcSubresource-parameter",
+          "text": " <code>srcSubresource</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkImageSubresourceLayers\">VkImageSubresourceLayers</a> structure"
+        },
+        {
+          "vuid": "VUID-VkImageResolve-dstSubresource-parameter",
+          "text": " <code>dstSubresource</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkImageSubresourceLayers\">VkImageSubresourceLayers</a> structure"
+        }
+      ]
+    },
+    "vkCmdWriteBufferMarkerAMD": {
+      "(VK_AMD_buffer_marker)": [
+        {
+          "vuid": "VUID-vkCmdWriteBufferMarkerAMD-dstOffset-01798",
+          "text": " <code>dstOffset</code> <strong class=\"purple\">must</strong> be less than or equal to the size of <code>dstBuffer</code> minus <code>4</code>."
+        },
+        {
+          "vuid": "VUID-vkCmdWriteBufferMarkerAMD-dstBuffer-01799",
+          "text": " <code>dstBuffer</code> <strong class=\"purple\">must</strong> have been created with <code>VK_BUFFER_USAGE_TRANSFER_DST_BIT</code> usage flag"
+        },
+        {
+          "vuid": "VUID-vkCmdWriteBufferMarkerAMD-dstBuffer-01800",
+          "text": " If <code>dstBuffer</code> is non-sparse then it <strong class=\"purple\">must</strong> be bound completely and contiguously to a single <code>VkDeviceMemory</code> object"
+        },
+        {
+          "vuid": "VUID-vkCmdWriteBufferMarkerAMD-dstOffset-01801",
+          "text": " <code>dstOffset</code> <strong class=\"purple\">must</strong> be a multiple of <code>4</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdWriteBufferMarkerAMD-commandBuffer-parameter",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkCommandBuffer\">VkCommandBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdWriteBufferMarkerAMD-pipelineStage-parameter",
+          "text": " <code>pipelineStage</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkPipelineStageFlagBits\">VkPipelineStageFlagBits</a> value"
+        },
+        {
+          "vuid": "VUID-vkCmdWriteBufferMarkerAMD-dstBuffer-parameter",
+          "text": " <code>dstBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkBuffer\">VkBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdWriteBufferMarkerAMD-commandBuffer-recording",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be in the <a href=\"#commandbuffers-lifecycle\">recording state</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdWriteBufferMarkerAMD-commandBuffer-cmdpool",
+          "text": " The <code>VkCommandPool</code> that <code>commandBuffer</code> was allocated from <strong class=\"purple\">must</strong> support transfer, graphics, or compute operations"
+        },
+        {
+          "vuid": "VUID-vkCmdWriteBufferMarkerAMD-commonparent",
+          "text": " Both of <code>commandBuffer</code>, and <code>dstBuffer</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from the same <a href=\"#VkDevice\">VkDevice</a>"
+        }
+      ]
+    },
+    "VkPipelineInputAssemblyStateCreateInfo": {
+      "core": [
+        {
+          "vuid": "VUID-VkPipelineInputAssemblyStateCreateInfo-topology-00428",
+          "text": " If <code>topology</code> is <code>VK_PRIMITIVE_TOPOLOGY_POINT_LIST</code>, <code>VK_PRIMITIVE_TOPOLOGY_LINE_LIST</code>, <code>VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST</code>, <code>VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY</code>, <code>VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY</code> or <code>VK_PRIMITIVE_TOPOLOGY_PATCH_LIST</code>, <code>primitiveRestartEnable</code> <strong class=\"purple\">must</strong> be <code>VK_FALSE</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineInputAssemblyStateCreateInfo-topology-00429",
+          "text": " If the <a href=\"#features-geometryShader\">geometry shaders</a> feature is not enabled, <code>topology</code> <strong class=\"purple\">must</strong> not be any of <code>VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY</code>, <code>VK_PRIMITIVE_TOPOLOGY_LINE_STRIP_WITH_ADJACENCY</code>, <code>VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY</code> or <code>VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineInputAssemblyStateCreateInfo-topology-00430",
+          "text": " If the <a href=\"#features-tessellationShader\">tessellation shaders</a> feature is not enabled, <code>topology</code> <strong class=\"purple\">must</strong> not be <code>VK_PRIMITIVE_TOPOLOGY_PATCH_LIST</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineInputAssemblyStateCreateInfo-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineInputAssemblyStateCreateInfo-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineInputAssemblyStateCreateInfo-flags-zerobitmask",
+          "text": " <code>flags</code> <strong class=\"purple\">must</strong> be <code>0</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineInputAssemblyStateCreateInfo-topology-parameter",
+          "text": " <code>topology</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkPrimitiveTopology\">VkPrimitiveTopology</a> value"
+        }
+      ]
+    },
+    "vkCmdBindIndexBuffer": {
+      "core": [
+        {
+          "vuid": "VUID-vkCmdBindIndexBuffer-offset-00431",
+          "text": " <code>offset</code> <strong class=\"purple\">must</strong> be less than the size of <code>buffer</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdBindIndexBuffer-offset-00432",
+          "text": " The sum of <code>offset</code> and the address of the range of <code>VkDeviceMemory</code> object that is backing <code>buffer</code>, <strong class=\"purple\">must</strong> be a multiple of the type indicated by <code>indexType</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdBindIndexBuffer-buffer-00433",
+          "text": " <code>buffer</code> <strong class=\"purple\">must</strong> have been created with the <code>VK_BUFFER_USAGE_INDEX_BUFFER_BIT</code> flag"
+        },
+        {
+          "vuid": "VUID-vkCmdBindIndexBuffer-buffer-00434",
+          "text": " If <code>buffer</code> is non-sparse then it <strong class=\"purple\">must</strong> be bound completely and contiguously to a single <code>VkDeviceMemory</code> object"
+        },
+        {
+          "vuid": "VUID-vkCmdBindIndexBuffer-commandBuffer-parameter",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkCommandBuffer\">VkCommandBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdBindIndexBuffer-buffer-parameter",
+          "text": " <code>buffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkBuffer\">VkBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdBindIndexBuffer-indexType-parameter",
+          "text": " <code>indexType</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkIndexType\">VkIndexType</a> value"
+        },
+        {
+          "vuid": "VUID-vkCmdBindIndexBuffer-commandBuffer-recording",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be in the <a href=\"#commandbuffers-lifecycle\">recording state</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdBindIndexBuffer-commandBuffer-cmdpool",
+          "text": " The <code>VkCommandPool</code> that <code>commandBuffer</code> was allocated from <strong class=\"purple\">must</strong> support graphics operations"
+        },
+        {
+          "vuid": "VUID-vkCmdBindIndexBuffer-commonparent",
+          "text": " Both of <code>buffer</code>, and <code>commandBuffer</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from the same <a href=\"#VkDevice\">VkDevice</a>"
+        }
+      ],
+      "(VK_NV_ray_tracing)": [
+        {
+          "vuid": "VUID-vkCmdBindIndexBuffer-indexType-02507",
+          "text": " <code>indexType</code> <strong class=\"purple\">must</strong> not be <code>VK_INDEX_TYPE_NONE_NV</code>."
+        }
+      ],
+      "(VK_EXT_index_type_uint8)": [
+        {
+          "vuid": "VUID-vkCmdBindIndexBuffer-indexType-02765",
+          "text": " If <code>indexType</code> is <code>VK_INDEX_TYPE_UINT8_EXT</code>, the <a href=\"#features-indexTypeUint8\">indexTypeUint8</a> feature <strong class=\"purple\">must</strong> be enabled"
+        }
+      ]
+    },
+    "vkCmdDraw": {
+      "core": [
+        {
+          "vuid": "VUID-vkCmdDraw-None-02690",
+          "text": " If a <code>VkImageView</code> is sampled with <code>VK_FILTER_LINEAR</code> as a result of this command, then the image view&#8217;s <a href=\"#resources-image-view-format-features\">format features</a> <strong class=\"purple\">must</strong> contain <code>VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdDraw-None-02691",
+          "text": " If a <code>VkImageView</code> is accessed using atomic operations as a result of this command, then the image view&#8217;s <a href=\"#resources-image-view-format-features\">format features</a> <strong class=\"purple\">must</strong> contain <code>VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdDraw-None-02697",
+          "text": " For each set <em>n</em> that is statically used by the <code>VkPipeline</code> bound to the pipeline bind point used by this command, a descriptor set <strong class=\"purple\">must</strong> have been bound to <em>n</em> at the same pipeline bind point, with a <code>VkPipelineLayout</code> that is compatible for set <em>n</em>, with the <code>VkPipelineLayout</code> used to create the current <code>VkPipeline</code>, as described in <a href=\"#descriptorsets-compatibility\">Pipeline Layout Compatibility</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdDraw-None-02698",
+          "text": " For each push constant that is statically used by the <code>VkPipeline</code> bound to the pipeline bind point used by this command, a push constant value <strong class=\"purple\">must</strong> have been set for the same pipeline bind point, with a <code>VkPipelineLayout</code> that is compatible for push constants, with the <code>VkPipelineLayout</code> used to create the current <code>VkPipeline</code>, as described in <a href=\"#descriptorsets-compatibility\">Pipeline Layout Compatibility</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdDraw-None-02699",
+          "text": " Descriptors in each bound descriptor set, specified via <code>vkCmdBindDescriptorSets</code>, <strong class=\"purple\">must</strong> be valid if they are statically used by the <code>VkPipeline</code> bound to the pipeline bind point used by this command"
+        },
+        {
+          "vuid": "VUID-vkCmdDraw-None-02700",
+          "text": " A valid pipeline <strong class=\"purple\">must</strong> be bound to the pipeline bind point used by this command"
+        },
+        {
+          "vuid": "VUID-vkCmdDraw-commandBuffer-02701",
+          "text": " If the <code>VkPipeline</code> object bound to the pipeline bind point used by this command requires any dynamic state, that state <strong class=\"purple\">must</strong> have been set for <code>commandBuffer</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdDraw-None-02702",
+          "text": " If the <code>VkPipeline</code> object bound to the pipeline bind point used by this command accesses a <code>VkSampler</code> object that uses unnormalized coordinates, that sampler <strong class=\"purple\">must</strong> not be used to sample from any <code>VkImage</code> with a <code>VkImageView</code> of the type <code>VK_IMAGE_VIEW_TYPE_3D</code>, <code>VK_IMAGE_VIEW_TYPE_CUBE</code>, <code>VK_IMAGE_VIEW_TYPE_1D_ARRAY</code>, <code>VK_IMAGE_VIEW_TYPE_2D_ARRAY</code> or <code>VK_IMAGE_VIEW_TYPE_CUBE_ARRAY</code>, in any shader stage"
+        },
+        {
+          "vuid": "VUID-vkCmdDraw-None-02703",
+          "text": " If the <code>VkPipeline</code> object bound to the pipeline bind point used by this command accesses a <code>VkSampler</code> object that uses unnormalized coordinates, that sampler <strong class=\"purple\">must</strong> not be used with any of the SPIR-V <code>OpImageSample*</code> or <code>OpImageSparseSample*</code> instructions with <code>ImplicitLod</code>, <code>Dref</code> or <code>Proj</code> in their name, in any shader stage"
+        },
+        {
+          "vuid": "VUID-vkCmdDraw-None-02704",
+          "text": " If the <code>VkPipeline</code> object bound to the pipeline bind point used by this command accesses a <code>VkSampler</code> object that uses unnormalized coordinates, that sampler <strong class=\"purple\">must</strong> not be used with any of the SPIR-V <code>OpImageSample*</code> or <code>OpImageSparseSample*</code> instructions that includes a LOD bias or any offset values, in any shader stage"
+        },
+        {
+          "vuid": "VUID-vkCmdDraw-None-02705",
+          "text": " If the <a href=\"#features-robustBufferAccess\">robust buffer access</a> feature is not enabled, and if the <code>VkPipeline</code> object bound to the pipeline bind point used by this command accesses a uniform buffer, it <strong class=\"purple\">must</strong> not access values outside of the range of the buffer as specified in the descriptor set bound to the same pipeline bind point"
+        },
+        {
+          "vuid": "VUID-vkCmdDraw-None-02706",
+          "text": " If the <a href=\"#features-robustBufferAccess\">robust buffer access</a> feature is not enabled, and if the <code>VkPipeline</code> object bound to the pipeline bind point used by this command accesses a storage buffer, it <strong class=\"purple\">must</strong> not access values outside of the range of the buffer as specified in the descriptor set bound to the same pipeline bind point"
+        },
+        {
+          "vuid": "VUID-vkCmdDraw-renderPass-02684",
+          "text": " The current render pass <strong class=\"purple\">must</strong> be <a href=\"#renderpass-compatibility\">compatible</a> with the <code>renderPass</code> member of the <code>VkGraphicsPipelineCreateInfo</code> structure specified when creating the <code>VkPipeline</code> bound to <code>VK_PIPELINE_BIND_POINT_GRAPHICS</code>."
+        },
+        {
+          "vuid": "VUID-vkCmdDraw-subpass-02685",
+          "text": " The subpass index of the current render pass <strong class=\"purple\">must</strong> be equal to the <code>subpass</code> member of the <code>VkGraphicsPipelineCreateInfo</code> structure specified when creating the <code>VkPipeline</code> bound to <code>VK_PIPELINE_BIND_POINT_GRAPHICS</code>."
+        },
+        {
+          "vuid": "VUID-vkCmdDraw-None-02686",
+          "text": " Every input attachment used by the current subpass <strong class=\"purple\">must</strong> be bound to the pipeline via a descriptor set"
+        },
+        {
+          "vuid": "VUID-vkCmdDraw-None-02687",
+          "text": " Image subresources used as attachments in the current render pass <strong class=\"purple\">must</strong> not be accessed in any way other than as an attachment by this command."
+        },
+        {
+          "vuid": "VUID-vkCmdDraw-None-02720",
+          "text": " All vertex input bindings accessed via vertex input variables declared in the vertex shader entry point&#8217;s interface <strong class=\"purple\">must</strong> have valid buffers bound"
+        },
+        {
+          "vuid": "VUID-vkCmdDraw-None-02721",
+          "text": " For a given vertex buffer binding, any attribute data fetched <strong class=\"purple\">must</strong> be entirely contained within the corresponding vertex buffer binding, as described in <a href=\"#fxvertex-input\">Vertex Input Description</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdDraw-commandBuffer-parameter",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkCommandBuffer\">VkCommandBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdDraw-commandBuffer-recording",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be in the <a href=\"#commandbuffers-lifecycle\">recording state</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdDraw-commandBuffer-cmdpool",
+          "text": " The <code>VkCommandPool</code> that <code>commandBuffer</code> was allocated from <strong class=\"purple\">must</strong> support graphics operations"
+        },
+        {
+          "vuid": "VUID-vkCmdDraw-renderpass",
+          "text": " This command <strong class=\"purple\">must</strong> only be called inside of a render pass instance"
+        }
+      ],
+      "(VK_IMG_filter_cubic,VK_EXT_filter_cubic)": [
+        {
+          "vuid": "VUID-vkCmdDraw-None-02692",
+          "text": " If a <code>VkImageView</code> is sampled with <code>VK_FILTER_CUBIC_EXT</code> as a result of this command, then the image view&#8217;s <a href=\"#resources-image-view-format-features\">format features</a> <strong class=\"purple\">must</strong> contain <code>VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT</code>"
+        }
+      ],
+      "(VK_IMG_filter_cubic,VK_EXT_filter_cubic)+!(VK_EXT_filter_cubic)": [
+        {
+          "vuid": "VUID-vkCmdDraw-None-02693",
+          "text": " Any <a href=\"#VkImageView\">VkImageView</a> being sampled with <code>VK_FILTER_CUBIC_EXT</code> as a result of this command <strong class=\"purple\">must</strong> not have a <a href=\"#VkImageViewType\">VkImageViewType</a> of <code>VK_IMAGE_VIEW_TYPE_3D</code>, <code>VK_IMAGE_VIEW_TYPE_CUBE</code>, or <code>VK_IMAGE_VIEW_TYPE_CUBE_ARRAY</code>"
+        }
+      ],
+      "(VK_IMG_filter_cubic,VK_EXT_filter_cubic)+(VK_EXT_filter_cubic)": [
+        {
+          "vuid": "VUID-vkCmdDraw-filterCubic-02694",
+          "text": " Any <a href=\"#VkImageView\">VkImageView</a> being sampled with <code>VK_FILTER_CUBIC_EXT</code> as a result of this command <strong class=\"purple\">must</strong> have a <a href=\"#VkImageViewType\">VkImageViewType</a> and format that supports cubic filtering, as specified by <code>VkFilterCubicImageViewImageFormatPropertiesEXT</code>::<code>filterCubic</code> returned by <code>vkGetPhysicalDeviceImageFormatProperties2</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdDraw-filterCubicMinmax-02695",
+          "text": " Any <a href=\"#VkImageView\">VkImageView</a> being sampled with <code>VK_FILTER_CUBIC_EXT</code> with a reduction mode of either <code>VK_SAMPLER_REDUCTION_MODE_MIN_EXT</code> or <code>VK_SAMPLER_REDUCTION_MODE_MAX_EXT</code> as a result of this command <strong class=\"purple\">must</strong> have a <a href=\"#VkImageViewType\">VkImageViewType</a> and format that supports cubic filtering together with minmax filtering, as specified by <code>VkFilterCubicImageViewImageFormatPropertiesEXT</code>::<code>filterCubicMinmax</code> returned by <code>vkGetPhysicalDeviceImageFormatProperties2</code>"
+        }
+      ],
+      "(VK_NV_corner_sampled_image)": [
+        {
+          "vuid": "VUID-vkCmdDraw-flags-02696",
+          "text": " Any <a href=\"#VkImage\">VkImage</a> created with a <a href=\"#VkImageCreateInfo\">VkImageCreateInfo</a>::<code>flags</code> containing <code>VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV</code> sampled as a result of this command <strong class=\"purple\">must</strong> only be sampled using a <a href=\"#VkSamplerAddressMode\">VkSamplerAddressMode</a> of <code>VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE</code>."
+        }
+      ],
+      "(VK_VERSION_1_1)": [
+        {
+          "vuid": "VUID-vkCmdDraw-commandBuffer-02707",
+          "text": " If <code>commandBuffer</code> is an unprotected command buffer, any resource accessed by the <code>VkPipeline</code> object bound to the pipeline bind point used by this command <strong class=\"purple\">must</strong> not be a protected resource"
+        },
+        {
+          "vuid": "VUID-vkCmdDraw-commandBuffer-02712",
+          "text": " If <code>commandBuffer</code> is a protected command buffer, any resource written to by the <code>VkPipeline</code> object bound to the pipeline bind point used by this command <strong class=\"purple\">must</strong> not be an unprotected resource"
+        },
+        {
+          "vuid": "VUID-vkCmdDraw-commandBuffer-02713",
+          "text": " If <code>commandBuffer</code> is a protected command buffer, pipeline stages other than the framebuffer-space and compute stages in the <code>VkPipeline</code> object bound to the pipeline bind point <strong class=\"purple\">must</strong> not write to any resource"
+        }
+      ],
+      "(VK_VERSION_1_1,VK_KHR_multiview)": [
+        {
+          "vuid": "VUID-vkCmdDraw-maxMultiviewInstanceIndex-02688",
+          "text": " If the draw is recorded in a render pass instance with multiview enabled, the maximum instance index <strong class=\"purple\">must</strong> be less than or equal to <a href=\"#VkPhysicalDeviceMultiviewProperties\">VkPhysicalDeviceMultiviewProperties</a>::<code>maxMultiviewInstanceIndex</code>."
+        }
+      ],
+      "(VK_EXT_sample_locations)": [
+        {
+          "vuid": "VUID-vkCmdDraw-sampleLocationsEnable-02689",
+          "text": " If the bound graphics pipeline was created with <a href=\"#VkPipelineSampleLocationsStateCreateInfoEXT\">VkPipelineSampleLocationsStateCreateInfoEXT</a>::<code>sampleLocationsEnable</code> set to <code>VK_TRUE</code> and the current subpass has a depth/stencil attachment, then that attachment <strong class=\"purple\">must</strong> have been created with the <code>VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT</code> bit set"
+        }
+      ]
+    },
+    "vkCmdDrawIndexed": {
+      "core": [
+        {
+          "vuid": "VUID-vkCmdDrawIndexed-None-02690",
+          "text": " If a <code>VkImageView</code> is sampled with <code>VK_FILTER_LINEAR</code> as a result of this command, then the image view&#8217;s <a href=\"#resources-image-view-format-features\">format features</a> <strong class=\"purple\">must</strong> contain <code>VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndexed-None-02691",
+          "text": " If a <code>VkImageView</code> is accessed using atomic operations as a result of this command, then the image view&#8217;s <a href=\"#resources-image-view-format-features\">format features</a> <strong class=\"purple\">must</strong> contain <code>VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndexed-None-02697",
+          "text": " For each set <em>n</em> that is statically used by the <code>VkPipeline</code> bound to the pipeline bind point used by this command, a descriptor set <strong class=\"purple\">must</strong> have been bound to <em>n</em> at the same pipeline bind point, with a <code>VkPipelineLayout</code> that is compatible for set <em>n</em>, with the <code>VkPipelineLayout</code> used to create the current <code>VkPipeline</code>, as described in <a href=\"#descriptorsets-compatibility\">Pipeline Layout Compatibility</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndexed-None-02698",
+          "text": " For each push constant that is statically used by the <code>VkPipeline</code> bound to the pipeline bind point used by this command, a push constant value <strong class=\"purple\">must</strong> have been set for the same pipeline bind point, with a <code>VkPipelineLayout</code> that is compatible for push constants, with the <code>VkPipelineLayout</code> used to create the current <code>VkPipeline</code>, as described in <a href=\"#descriptorsets-compatibility\">Pipeline Layout Compatibility</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndexed-None-02699",
+          "text": " Descriptors in each bound descriptor set, specified via <code>vkCmdBindDescriptorSets</code>, <strong class=\"purple\">must</strong> be valid if they are statically used by the <code>VkPipeline</code> bound to the pipeline bind point used by this command"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndexed-None-02700",
+          "text": " A valid pipeline <strong class=\"purple\">must</strong> be bound to the pipeline bind point used by this command"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndexed-commandBuffer-02701",
+          "text": " If the <code>VkPipeline</code> object bound to the pipeline bind point used by this command requires any dynamic state, that state <strong class=\"purple\">must</strong> have been set for <code>commandBuffer</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndexed-None-02702",
+          "text": " If the <code>VkPipeline</code> object bound to the pipeline bind point used by this command accesses a <code>VkSampler</code> object that uses unnormalized coordinates, that sampler <strong class=\"purple\">must</strong> not be used to sample from any <code>VkImage</code> with a <code>VkImageView</code> of the type <code>VK_IMAGE_VIEW_TYPE_3D</code>, <code>VK_IMAGE_VIEW_TYPE_CUBE</code>, <code>VK_IMAGE_VIEW_TYPE_1D_ARRAY</code>, <code>VK_IMAGE_VIEW_TYPE_2D_ARRAY</code> or <code>VK_IMAGE_VIEW_TYPE_CUBE_ARRAY</code>, in any shader stage"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndexed-None-02703",
+          "text": " If the <code>VkPipeline</code> object bound to the pipeline bind point used by this command accesses a <code>VkSampler</code> object that uses unnormalized coordinates, that sampler <strong class=\"purple\">must</strong> not be used with any of the SPIR-V <code>OpImageSample*</code> or <code>OpImageSparseSample*</code> instructions with <code>ImplicitLod</code>, <code>Dref</code> or <code>Proj</code> in their name, in any shader stage"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndexed-None-02704",
+          "text": " If the <code>VkPipeline</code> object bound to the pipeline bind point used by this command accesses a <code>VkSampler</code> object that uses unnormalized coordinates, that sampler <strong class=\"purple\">must</strong> not be used with any of the SPIR-V <code>OpImageSample*</code> or <code>OpImageSparseSample*</code> instructions that includes a LOD bias or any offset values, in any shader stage"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndexed-None-02705",
+          "text": " If the <a href=\"#features-robustBufferAccess\">robust buffer access</a> feature is not enabled, and if the <code>VkPipeline</code> object bound to the pipeline bind point used by this command accesses a uniform buffer, it <strong class=\"purple\">must</strong> not access values outside of the range of the buffer as specified in the descriptor set bound to the same pipeline bind point"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndexed-None-02706",
+          "text": " If the <a href=\"#features-robustBufferAccess\">robust buffer access</a> feature is not enabled, and if the <code>VkPipeline</code> object bound to the pipeline bind point used by this command accesses a storage buffer, it <strong class=\"purple\">must</strong> not access values outside of the range of the buffer as specified in the descriptor set bound to the same pipeline bind point"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndexed-renderPass-02684",
+          "text": " The current render pass <strong class=\"purple\">must</strong> be <a href=\"#renderpass-compatibility\">compatible</a> with the <code>renderPass</code> member of the <code>VkGraphicsPipelineCreateInfo</code> structure specified when creating the <code>VkPipeline</code> bound to <code>VK_PIPELINE_BIND_POINT_GRAPHICS</code>."
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndexed-subpass-02685",
+          "text": " The subpass index of the current render pass <strong class=\"purple\">must</strong> be equal to the <code>subpass</code> member of the <code>VkGraphicsPipelineCreateInfo</code> structure specified when creating the <code>VkPipeline</code> bound to <code>VK_PIPELINE_BIND_POINT_GRAPHICS</code>."
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndexed-None-02686",
+          "text": " Every input attachment used by the current subpass <strong class=\"purple\">must</strong> be bound to the pipeline via a descriptor set"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndexed-None-02687",
+          "text": " Image subresources used as attachments in the current render pass <strong class=\"purple\">must</strong> not be accessed in any way other than as an attachment by this command."
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndexed-None-02720",
+          "text": " All vertex input bindings accessed via vertex input variables declared in the vertex shader entry point&#8217;s interface <strong class=\"purple\">must</strong> have valid buffers bound"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndexed-None-02721",
+          "text": " For a given vertex buffer binding, any attribute data fetched <strong class=\"purple\">must</strong> be entirely contained within the corresponding vertex buffer binding, as described in <a href=\"#fxvertex-input\">Vertex Input Description</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndexed-indexSize-00463",
+          "text": " <span class=\"eq\">(<code>indexSize</code> * (<code>firstIndex</code> &#43; <code>indexCount</code>) &#43; <code>offset</code>)</span> <strong class=\"purple\">must</strong> be less than or equal to the size of the bound index buffer, with <code>indexSize</code> being based on the type specified by <code>indexType</code>, where the index buffer, <code>indexType</code>, and <code>offset</code> are specified via <code>vkCmdBindIndexBuffer</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndexed-commandBuffer-parameter",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkCommandBuffer\">VkCommandBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndexed-commandBuffer-recording",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be in the <a href=\"#commandbuffers-lifecycle\">recording state</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndexed-commandBuffer-cmdpool",
+          "text": " The <code>VkCommandPool</code> that <code>commandBuffer</code> was allocated from <strong class=\"purple\">must</strong> support graphics operations"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndexed-renderpass",
+          "text": " This command <strong class=\"purple\">must</strong> only be called inside of a render pass instance"
+        }
+      ],
+      "(VK_IMG_filter_cubic,VK_EXT_filter_cubic)": [
+        {
+          "vuid": "VUID-vkCmdDrawIndexed-None-02692",
+          "text": " If a <code>VkImageView</code> is sampled with <code>VK_FILTER_CUBIC_EXT</code> as a result of this command, then the image view&#8217;s <a href=\"#resources-image-view-format-features\">format features</a> <strong class=\"purple\">must</strong> contain <code>VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT</code>"
+        }
+      ],
+      "(VK_IMG_filter_cubic,VK_EXT_filter_cubic)+!(VK_EXT_filter_cubic)": [
+        {
+          "vuid": "VUID-vkCmdDrawIndexed-None-02693",
+          "text": " Any <a href=\"#VkImageView\">VkImageView</a> being sampled with <code>VK_FILTER_CUBIC_EXT</code> as a result of this command <strong class=\"purple\">must</strong> not have a <a href=\"#VkImageViewType\">VkImageViewType</a> of <code>VK_IMAGE_VIEW_TYPE_3D</code>, <code>VK_IMAGE_VIEW_TYPE_CUBE</code>, or <code>VK_IMAGE_VIEW_TYPE_CUBE_ARRAY</code>"
+        }
+      ],
+      "(VK_IMG_filter_cubic,VK_EXT_filter_cubic)+(VK_EXT_filter_cubic)": [
+        {
+          "vuid": "VUID-vkCmdDrawIndexed-filterCubic-02694",
+          "text": " Any <a href=\"#VkImageView\">VkImageView</a> being sampled with <code>VK_FILTER_CUBIC_EXT</code> as a result of this command <strong class=\"purple\">must</strong> have a <a href=\"#VkImageViewType\">VkImageViewType</a> and format that supports cubic filtering, as specified by <code>VkFilterCubicImageViewImageFormatPropertiesEXT</code>::<code>filterCubic</code> returned by <code>vkGetPhysicalDeviceImageFormatProperties2</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndexed-filterCubicMinmax-02695",
+          "text": " Any <a href=\"#VkImageView\">VkImageView</a> being sampled with <code>VK_FILTER_CUBIC_EXT</code> with a reduction mode of either <code>VK_SAMPLER_REDUCTION_MODE_MIN_EXT</code> or <code>VK_SAMPLER_REDUCTION_MODE_MAX_EXT</code> as a result of this command <strong class=\"purple\">must</strong> have a <a href=\"#VkImageViewType\">VkImageViewType</a> and format that supports cubic filtering together with minmax filtering, as specified by <code>VkFilterCubicImageViewImageFormatPropertiesEXT</code>::<code>filterCubicMinmax</code> returned by <code>vkGetPhysicalDeviceImageFormatProperties2</code>"
+        }
+      ],
+      "(VK_NV_corner_sampled_image)": [
+        {
+          "vuid": "VUID-vkCmdDrawIndexed-flags-02696",
+          "text": " Any <a href=\"#VkImage\">VkImage</a> created with a <a href=\"#VkImageCreateInfo\">VkImageCreateInfo</a>::<code>flags</code> containing <code>VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV</code> sampled as a result of this command <strong class=\"purple\">must</strong> only be sampled using a <a href=\"#VkSamplerAddressMode\">VkSamplerAddressMode</a> of <code>VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE</code>."
+        }
+      ],
+      "(VK_VERSION_1_1)": [
+        {
+          "vuid": "VUID-vkCmdDrawIndexed-commandBuffer-02707",
+          "text": " If <code>commandBuffer</code> is an unprotected command buffer, any resource accessed by the <code>VkPipeline</code> object bound to the pipeline bind point used by this command <strong class=\"purple\">must</strong> not be a protected resource"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndexed-commandBuffer-02712",
+          "text": " If <code>commandBuffer</code> is a protected command buffer, any resource written to by the <code>VkPipeline</code> object bound to the pipeline bind point used by this command <strong class=\"purple\">must</strong> not be an unprotected resource"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndexed-commandBuffer-02713",
+          "text": " If <code>commandBuffer</code> is a protected command buffer, pipeline stages other than the framebuffer-space and compute stages in the <code>VkPipeline</code> object bound to the pipeline bind point <strong class=\"purple\">must</strong> not write to any resource"
+        }
+      ],
+      "(VK_VERSION_1_1,VK_KHR_multiview)": [
+        {
+          "vuid": "VUID-vkCmdDrawIndexed-maxMultiviewInstanceIndex-02688",
+          "text": " If the draw is recorded in a render pass instance with multiview enabled, the maximum instance index <strong class=\"purple\">must</strong> be less than or equal to <a href=\"#VkPhysicalDeviceMultiviewProperties\">VkPhysicalDeviceMultiviewProperties</a>::<code>maxMultiviewInstanceIndex</code>."
+        }
+      ],
+      "(VK_EXT_sample_locations)": [
+        {
+          "vuid": "VUID-vkCmdDrawIndexed-sampleLocationsEnable-02689",
+          "text": " If the bound graphics pipeline was created with <a href=\"#VkPipelineSampleLocationsStateCreateInfoEXT\">VkPipelineSampleLocationsStateCreateInfoEXT</a>::<code>sampleLocationsEnable</code> set to <code>VK_TRUE</code> and the current subpass has a depth/stencil attachment, then that attachment <strong class=\"purple\">must</strong> have been created with the <code>VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT</code> bit set"
+        }
+      ]
+    },
+    "vkCmdDrawIndirect": {
+      "core": [
+        {
+          "vuid": "VUID-vkCmdDrawIndirect-None-02690",
+          "text": " If a <code>VkImageView</code> is sampled with <code>VK_FILTER_LINEAR</code> as a result of this command, then the image view&#8217;s <a href=\"#resources-image-view-format-features\">format features</a> <strong class=\"purple\">must</strong> contain <code>VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndirect-None-02691",
+          "text": " If a <code>VkImageView</code> is accessed using atomic operations as a result of this command, then the image view&#8217;s <a href=\"#resources-image-view-format-features\">format features</a> <strong class=\"purple\">must</strong> contain <code>VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndirect-None-02697",
+          "text": " For each set <em>n</em> that is statically used by the <code>VkPipeline</code> bound to the pipeline bind point used by this command, a descriptor set <strong class=\"purple\">must</strong> have been bound to <em>n</em> at the same pipeline bind point, with a <code>VkPipelineLayout</code> that is compatible for set <em>n</em>, with the <code>VkPipelineLayout</code> used to create the current <code>VkPipeline</code>, as described in <a href=\"#descriptorsets-compatibility\">Pipeline Layout Compatibility</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndirect-None-02698",
+          "text": " For each push constant that is statically used by the <code>VkPipeline</code> bound to the pipeline bind point used by this command, a push constant value <strong class=\"purple\">must</strong> have been set for the same pipeline bind point, with a <code>VkPipelineLayout</code> that is compatible for push constants, with the <code>VkPipelineLayout</code> used to create the current <code>VkPipeline</code>, as described in <a href=\"#descriptorsets-compatibility\">Pipeline Layout Compatibility</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndirect-None-02699",
+          "text": " Descriptors in each bound descriptor set, specified via <code>vkCmdBindDescriptorSets</code>, <strong class=\"purple\">must</strong> be valid if they are statically used by the <code>VkPipeline</code> bound to the pipeline bind point used by this command"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndirect-None-02700",
+          "text": " A valid pipeline <strong class=\"purple\">must</strong> be bound to the pipeline bind point used by this command"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndirect-commandBuffer-02701",
+          "text": " If the <code>VkPipeline</code> object bound to the pipeline bind point used by this command requires any dynamic state, that state <strong class=\"purple\">must</strong> have been set for <code>commandBuffer</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndirect-None-02702",
+          "text": " If the <code>VkPipeline</code> object bound to the pipeline bind point used by this command accesses a <code>VkSampler</code> object that uses unnormalized coordinates, that sampler <strong class=\"purple\">must</strong> not be used to sample from any <code>VkImage</code> with a <code>VkImageView</code> of the type <code>VK_IMAGE_VIEW_TYPE_3D</code>, <code>VK_IMAGE_VIEW_TYPE_CUBE</code>, <code>VK_IMAGE_VIEW_TYPE_1D_ARRAY</code>, <code>VK_IMAGE_VIEW_TYPE_2D_ARRAY</code> or <code>VK_IMAGE_VIEW_TYPE_CUBE_ARRAY</code>, in any shader stage"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndirect-None-02703",
+          "text": " If the <code>VkPipeline</code> object bound to the pipeline bind point used by this command accesses a <code>VkSampler</code> object that uses unnormalized coordinates, that sampler <strong class=\"purple\">must</strong> not be used with any of the SPIR-V <code>OpImageSample*</code> or <code>OpImageSparseSample*</code> instructions with <code>ImplicitLod</code>, <code>Dref</code> or <code>Proj</code> in their name, in any shader stage"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndirect-None-02704",
+          "text": " If the <code>VkPipeline</code> object bound to the pipeline bind point used by this command accesses a <code>VkSampler</code> object that uses unnormalized coordinates, that sampler <strong class=\"purple\">must</strong> not be used with any of the SPIR-V <code>OpImageSample*</code> or <code>OpImageSparseSample*</code> instructions that includes a LOD bias or any offset values, in any shader stage"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndirect-None-02705",
+          "text": " If the <a href=\"#features-robustBufferAccess\">robust buffer access</a> feature is not enabled, and if the <code>VkPipeline</code> object bound to the pipeline bind point used by this command accesses a uniform buffer, it <strong class=\"purple\">must</strong> not access values outside of the range of the buffer as specified in the descriptor set bound to the same pipeline bind point"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndirect-None-02706",
+          "text": " If the <a href=\"#features-robustBufferAccess\">robust buffer access</a> feature is not enabled, and if the <code>VkPipeline</code> object bound to the pipeline bind point used by this command accesses a storage buffer, it <strong class=\"purple\">must</strong> not access values outside of the range of the buffer as specified in the descriptor set bound to the same pipeline bind point"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndirect-renderPass-02684",
+          "text": " The current render pass <strong class=\"purple\">must</strong> be <a href=\"#renderpass-compatibility\">compatible</a> with the <code>renderPass</code> member of the <code>VkGraphicsPipelineCreateInfo</code> structure specified when creating the <code>VkPipeline</code> bound to <code>VK_PIPELINE_BIND_POINT_GRAPHICS</code>."
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndirect-subpass-02685",
+          "text": " The subpass index of the current render pass <strong class=\"purple\">must</strong> be equal to the <code>subpass</code> member of the <code>VkGraphicsPipelineCreateInfo</code> structure specified when creating the <code>VkPipeline</code> bound to <code>VK_PIPELINE_BIND_POINT_GRAPHICS</code>."
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndirect-None-02686",
+          "text": " Every input attachment used by the current subpass <strong class=\"purple\">must</strong> be bound to the pipeline via a descriptor set"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndirect-None-02687",
+          "text": " Image subresources used as attachments in the current render pass <strong class=\"purple\">must</strong> not be accessed in any way other than as an attachment by this command."
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndirect-None-02720",
+          "text": " All vertex input bindings accessed via vertex input variables declared in the vertex shader entry point&#8217;s interface <strong class=\"purple\">must</strong> have valid buffers bound"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndirect-None-02721",
+          "text": " For a given vertex buffer binding, any attribute data fetched <strong class=\"purple\">must</strong> be entirely contained within the corresponding vertex buffer binding, as described in <a href=\"#fxvertex-input\">Vertex Input Description</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndirect-buffer-02708",
+          "text": " If <code>buffer</code> is non-sparse then it <strong class=\"purple\">must</strong> be bound completely and contiguously to a single <code>VkDeviceMemory</code> object"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndirect-buffer-02709",
+          "text": " <code>buffer</code> <strong class=\"purple\">must</strong> have been created with the <code>VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT</code> bit set"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndirect-offset-02710",
+          "text": " <code>offset</code> <strong class=\"purple\">must</strong> be a multiple of <code>4</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndirect-drawCount-02718",
+          "text": " If the <a href=\"#features-multiDrawIndirect\">multi-draw indirect</a> feature is not enabled, <code>drawCount</code> <strong class=\"purple\">must</strong> be <code>0</code> or <code>1</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndirect-drawCount-02719",
+          "text": " <code>drawCount</code> <strong class=\"purple\">must</strong> be less than or equal to <code>VkPhysicalDeviceLimits</code>::<code>maxDrawIndirectCount</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndirect-firstInstance-00478",
+          "text": " If the <a href=\"#features-drawIndirectFirstInstance\">drawIndirectFirstInstance</a> feature is not enabled, all the <code>firstInstance</code> members of the <code>VkDrawIndirectCommand</code> structures accessed by this command <strong class=\"purple\">must</strong> be <code>0</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndirect-drawCount-00476",
+          "text": " If <code>drawCount</code> is greater than <code>1</code>, <code>stride</code> <strong class=\"purple\">must</strong> be a multiple of <code>4</code> and <strong class=\"purple\">must</strong> be greater than or equal to <code>sizeof</code>(<code>VkDrawIndirectCommand</code>)"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndirect-drawCount-00487",
+          "text": " If <code>drawCount</code> is equal to <code>1</code>, <span class=\"eq\">(<code>offset</code> &#43; <code>sizeof</code>(<a href=\"#VkDrawIndirectCommand\">VkDrawIndirectCommand</a>))</span> <strong class=\"purple\">must</strong> be less than or equal to the size of <code>buffer</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndirect-drawCount-00488",
+          "text": " If <code>drawCount</code> is greater than <code>1</code>, <span class=\"eq\">(<code>stride</code> {times} (<code>drawCount</code> - 1) &#43; <code>offset</code> &#43; <code>sizeof</code>(<a href=\"#VkDrawIndirectCommand\">VkDrawIndirectCommand</a>))</span> <strong class=\"purple\">must</strong> be less than or equal to the size of <code>buffer</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndirect-commandBuffer-parameter",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkCommandBuffer\">VkCommandBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndirect-buffer-parameter",
+          "text": " <code>buffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkBuffer\">VkBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndirect-commandBuffer-recording",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be in the <a href=\"#commandbuffers-lifecycle\">recording state</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndirect-commandBuffer-cmdpool",
+          "text": " The <code>VkCommandPool</code> that <code>commandBuffer</code> was allocated from <strong class=\"purple\">must</strong> support graphics operations"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndirect-renderpass",
+          "text": " This command <strong class=\"purple\">must</strong> only be called inside of a render pass instance"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndirect-commonparent",
+          "text": " Both of <code>buffer</code>, and <code>commandBuffer</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from the same <a href=\"#VkDevice\">VkDevice</a>"
+        }
+      ],
+      "(VK_IMG_filter_cubic,VK_EXT_filter_cubic)": [
+        {
+          "vuid": "VUID-vkCmdDrawIndirect-None-02692",
+          "text": " If a <code>VkImageView</code> is sampled with <code>VK_FILTER_CUBIC_EXT</code> as a result of this command, then the image view&#8217;s <a href=\"#resources-image-view-format-features\">format features</a> <strong class=\"purple\">must</strong> contain <code>VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT</code>"
+        }
+      ],
+      "(VK_IMG_filter_cubic,VK_EXT_filter_cubic)+!(VK_EXT_filter_cubic)": [
+        {
+          "vuid": "VUID-vkCmdDrawIndirect-None-02693",
+          "text": " Any <a href=\"#VkImageView\">VkImageView</a> being sampled with <code>VK_FILTER_CUBIC_EXT</code> as a result of this command <strong class=\"purple\">must</strong> not have a <a href=\"#VkImageViewType\">VkImageViewType</a> of <code>VK_IMAGE_VIEW_TYPE_3D</code>, <code>VK_IMAGE_VIEW_TYPE_CUBE</code>, or <code>VK_IMAGE_VIEW_TYPE_CUBE_ARRAY</code>"
+        }
+      ],
+      "(VK_IMG_filter_cubic,VK_EXT_filter_cubic)+(VK_EXT_filter_cubic)": [
+        {
+          "vuid": "VUID-vkCmdDrawIndirect-filterCubic-02694",
+          "text": " Any <a href=\"#VkImageView\">VkImageView</a> being sampled with <code>VK_FILTER_CUBIC_EXT</code> as a result of this command <strong class=\"purple\">must</strong> have a <a href=\"#VkImageViewType\">VkImageViewType</a> and format that supports cubic filtering, as specified by <code>VkFilterCubicImageViewImageFormatPropertiesEXT</code>::<code>filterCubic</code> returned by <code>vkGetPhysicalDeviceImageFormatProperties2</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndirect-filterCubicMinmax-02695",
+          "text": " Any <a href=\"#VkImageView\">VkImageView</a> being sampled with <code>VK_FILTER_CUBIC_EXT</code> with a reduction mode of either <code>VK_SAMPLER_REDUCTION_MODE_MIN_EXT</code> or <code>VK_SAMPLER_REDUCTION_MODE_MAX_EXT</code> as a result of this command <strong class=\"purple\">must</strong> have a <a href=\"#VkImageViewType\">VkImageViewType</a> and format that supports cubic filtering together with minmax filtering, as specified by <code>VkFilterCubicImageViewImageFormatPropertiesEXT</code>::<code>filterCubicMinmax</code> returned by <code>vkGetPhysicalDeviceImageFormatProperties2</code>"
+        }
+      ],
+      "(VK_NV_corner_sampled_image)": [
+        {
+          "vuid": "VUID-vkCmdDrawIndirect-flags-02696",
+          "text": " Any <a href=\"#VkImage\">VkImage</a> created with a <a href=\"#VkImageCreateInfo\">VkImageCreateInfo</a>::<code>flags</code> containing <code>VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV</code> sampled as a result of this command <strong class=\"purple\">must</strong> only be sampled using a <a href=\"#VkSamplerAddressMode\">VkSamplerAddressMode</a> of <code>VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE</code>."
+        }
+      ],
+      "(VK_VERSION_1_1)": [
+        {
+          "vuid": "VUID-vkCmdDrawIndirect-commandBuffer-02707",
+          "text": " If <code>commandBuffer</code> is an unprotected command buffer, any resource accessed by the <code>VkPipeline</code> object bound to the pipeline bind point used by this command <strong class=\"purple\">must</strong> not be a protected resource"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndirect-commandBuffer-02711",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> not be a protected command buffer"
+        }
+      ],
+      "(VK_VERSION_1_1,VK_KHR_multiview)": [
+        {
+          "vuid": "VUID-vkCmdDrawIndirect-maxMultiviewInstanceIndex-02688",
+          "text": " If the draw is recorded in a render pass instance with multiview enabled, the maximum instance index <strong class=\"purple\">must</strong> be less than or equal to <a href=\"#VkPhysicalDeviceMultiviewProperties\">VkPhysicalDeviceMultiviewProperties</a>::<code>maxMultiviewInstanceIndex</code>."
+        }
+      ],
+      "(VK_EXT_sample_locations)": [
+        {
+          "vuid": "VUID-vkCmdDrawIndirect-sampleLocationsEnable-02689",
+          "text": " If the bound graphics pipeline was created with <a href=\"#VkPipelineSampleLocationsStateCreateInfoEXT\">VkPipelineSampleLocationsStateCreateInfoEXT</a>::<code>sampleLocationsEnable</code> set to <code>VK_TRUE</code> and the current subpass has a depth/stencil attachment, then that attachment <strong class=\"purple\">must</strong> have been created with the <code>VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT</code> bit set"
+        }
+      ]
+    },
+    "VkDrawIndirectCommand": {
+      "core": [
+        {
+          "vuid": "VUID-VkDrawIndirectCommand-None-00500",
+          "text": " For a given vertex buffer binding, any attribute data fetched <strong class=\"purple\">must</strong> be entirely contained within the corresponding vertex buffer binding, as described in <a href=\"#fxvertex-input\">Vertex Input Description</a>"
+        },
+        {
+          "vuid": "VUID-VkDrawIndirectCommand-firstInstance-00501",
+          "text": " If the <a href=\"#features-drawIndirectFirstInstance\">drawIndirectFirstInstance</a> feature is not enabled, <code>firstInstance</code> <strong class=\"purple\">must</strong> be <code>0</code>"
+        }
+      ]
+    },
+    "vkCmdDrawIndirectCountKHR": {
+      "(VK_KHR_draw_indirect_count)": [
+        {
+          "vuid": "VUID-vkCmdDrawIndirectCountKHR-None-02690",
+          "text": " If a <code>VkImageView</code> is sampled with <code>VK_FILTER_LINEAR</code> as a result of this command, then the image view&#8217;s <a href=\"#resources-image-view-format-features\">format features</a> <strong class=\"purple\">must</strong> contain <code>VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndirectCountKHR-None-02691",
+          "text": " If a <code>VkImageView</code> is accessed using atomic operations as a result of this command, then the image view&#8217;s <a href=\"#resources-image-view-format-features\">format features</a> <strong class=\"purple\">must</strong> contain <code>VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndirectCountKHR-None-02697",
+          "text": " For each set <em>n</em> that is statically used by the <code>VkPipeline</code> bound to the pipeline bind point used by this command, a descriptor set <strong class=\"purple\">must</strong> have been bound to <em>n</em> at the same pipeline bind point, with a <code>VkPipelineLayout</code> that is compatible for set <em>n</em>, with the <code>VkPipelineLayout</code> used to create the current <code>VkPipeline</code>, as described in <a href=\"#descriptorsets-compatibility\">Pipeline Layout Compatibility</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndirectCountKHR-None-02698",
+          "text": " For each push constant that is statically used by the <code>VkPipeline</code> bound to the pipeline bind point used by this command, a push constant value <strong class=\"purple\">must</strong> have been set for the same pipeline bind point, with a <code>VkPipelineLayout</code> that is compatible for push constants, with the <code>VkPipelineLayout</code> used to create the current <code>VkPipeline</code>, as described in <a href=\"#descriptorsets-compatibility\">Pipeline Layout Compatibility</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndirectCountKHR-None-02699",
+          "text": " Descriptors in each bound descriptor set, specified via <code>vkCmdBindDescriptorSets</code>, <strong class=\"purple\">must</strong> be valid if they are statically used by the <code>VkPipeline</code> bound to the pipeline bind point used by this command"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndirectCountKHR-None-02700",
+          "text": " A valid pipeline <strong class=\"purple\">must</strong> be bound to the pipeline bind point used by this command"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndirectCountKHR-commandBuffer-02701",
+          "text": " If the <code>VkPipeline</code> object bound to the pipeline bind point used by this command requires any dynamic state, that state <strong class=\"purple\">must</strong> have been set for <code>commandBuffer</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndirectCountKHR-None-02702",
+          "text": " If the <code>VkPipeline</code> object bound to the pipeline bind point used by this command accesses a <code>VkSampler</code> object that uses unnormalized coordinates, that sampler <strong class=\"purple\">must</strong> not be used to sample from any <code>VkImage</code> with a <code>VkImageView</code> of the type <code>VK_IMAGE_VIEW_TYPE_3D</code>, <code>VK_IMAGE_VIEW_TYPE_CUBE</code>, <code>VK_IMAGE_VIEW_TYPE_1D_ARRAY</code>, <code>VK_IMAGE_VIEW_TYPE_2D_ARRAY</code> or <code>VK_IMAGE_VIEW_TYPE_CUBE_ARRAY</code>, in any shader stage"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndirectCountKHR-None-02703",
+          "text": " If the <code>VkPipeline</code> object bound to the pipeline bind point used by this command accesses a <code>VkSampler</code> object that uses unnormalized coordinates, that sampler <strong class=\"purple\">must</strong> not be used with any of the SPIR-V <code>OpImageSample*</code> or <code>OpImageSparseSample*</code> instructions with <code>ImplicitLod</code>, <code>Dref</code> or <code>Proj</code> in their name, in any shader stage"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndirectCountKHR-None-02704",
+          "text": " If the <code>VkPipeline</code> object bound to the pipeline bind point used by this command accesses a <code>VkSampler</code> object that uses unnormalized coordinates, that sampler <strong class=\"purple\">must</strong> not be used with any of the SPIR-V <code>OpImageSample*</code> or <code>OpImageSparseSample*</code> instructions that includes a LOD bias or any offset values, in any shader stage"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndirectCountKHR-None-02705",
+          "text": " If the <a href=\"#features-robustBufferAccess\">robust buffer access</a> feature is not enabled, and if the <code>VkPipeline</code> object bound to the pipeline bind point used by this command accesses a uniform buffer, it <strong class=\"purple\">must</strong> not access values outside of the range of the buffer as specified in the descriptor set bound to the same pipeline bind point"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndirectCountKHR-None-02706",
+          "text": " If the <a href=\"#features-robustBufferAccess\">robust buffer access</a> feature is not enabled, and if the <code>VkPipeline</code> object bound to the pipeline bind point used by this command accesses a storage buffer, it <strong class=\"purple\">must</strong> not access values outside of the range of the buffer as specified in the descriptor set bound to the same pipeline bind point"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndirectCountKHR-renderPass-02684",
+          "text": " The current render pass <strong class=\"purple\">must</strong> be <a href=\"#renderpass-compatibility\">compatible</a> with the <code>renderPass</code> member of the <code>VkGraphicsPipelineCreateInfo</code> structure specified when creating the <code>VkPipeline</code> bound to <code>VK_PIPELINE_BIND_POINT_GRAPHICS</code>."
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndirectCountKHR-subpass-02685",
+          "text": " The subpass index of the current render pass <strong class=\"purple\">must</strong> be equal to the <code>subpass</code> member of the <code>VkGraphicsPipelineCreateInfo</code> structure specified when creating the <code>VkPipeline</code> bound to <code>VK_PIPELINE_BIND_POINT_GRAPHICS</code>."
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndirectCountKHR-None-02686",
+          "text": " Every input attachment used by the current subpass <strong class=\"purple\">must</strong> be bound to the pipeline via a descriptor set"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndirectCountKHR-None-02687",
+          "text": " Image subresources used as attachments in the current render pass <strong class=\"purple\">must</strong> not be accessed in any way other than as an attachment by this command."
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndirectCountKHR-None-02720",
+          "text": " All vertex input bindings accessed via vertex input variables declared in the vertex shader entry point&#8217;s interface <strong class=\"purple\">must</strong> have valid buffers bound"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndirectCountKHR-None-02721",
+          "text": " For a given vertex buffer binding, any attribute data fetched <strong class=\"purple\">must</strong> be entirely contained within the corresponding vertex buffer binding, as described in <a href=\"#fxvertex-input\">Vertex Input Description</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndirectCountKHR-buffer-02708",
+          "text": " If <code>buffer</code> is non-sparse then it <strong class=\"purple\">must</strong> be bound completely and contiguously to a single <code>VkDeviceMemory</code> object"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndirectCountKHR-buffer-02709",
+          "text": " <code>buffer</code> <strong class=\"purple\">must</strong> have been created with the <code>VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT</code> bit set"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndirectCountKHR-offset-02710",
+          "text": " <code>offset</code> <strong class=\"purple\">must</strong> be a multiple of <code>4</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndirectCountKHR-countBuffer-02714",
+          "text": " If <code>countBuffer</code> is non-sparse then it <strong class=\"purple\">must</strong> be bound completely and contiguously to a single <code>VkDeviceMemory</code> object"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndirectCountKHR-countBuffer-02715",
+          "text": " <code>countBuffer</code> <strong class=\"purple\">must</strong> have been created with the <code>VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT</code> bit set"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndirectCountKHR-countBufferOffset-02716",
+          "text": " <code>countBufferOffset</code> <strong class=\"purple\">must</strong> be a multiple of <code>4</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndirectCountKHR-countBuffer-02717",
+          "text": " The count stored in <code>countBuffer</code> <strong class=\"purple\">must</strong> be less than or equal to <code>VkPhysicalDeviceLimits</code>::<code>maxDrawIndirectCount</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndirectCountKHR-stride-03110",
+          "text": " <code>stride</code> <strong class=\"purple\">must</strong> be a multiple of <code>4</code> and <strong class=\"purple\">must</strong> be greater than or equal to sizeof(<code>VkDrawIndirectCommand</code>)"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndirectCountKHR-maxDrawCount-03111",
+          "text": " If <code>maxDrawCount</code> is greater than or equal to <code>1</code>, <span class=\"eq\">(<code>stride</code> {times} (<code>maxDrawCount</code> - 1) &#43; <code>offset</code> &#43; sizeof(<code>VkDrawIndirectCommand</code>))</span> <strong class=\"purple\">must</strong> be less than or equal to the size of <code>buffer</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndirectCountKHR-countBuffer-03121",
+          "text": " If the count stored in <code>countBuffer</code> is equal to <code>1</code>, <span class=\"eq\">(<code>offset</code> &#43; sizeof(<code>VkDrawIndirectCommand</code>))</span> <strong class=\"purple\">must</strong> be less than or equal to the size of <code>buffer</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndirectCountKHR-countBuffer-03122",
+          "text": " If the count stored in <code>countBuffer</code> is greater than <code>1</code>, <span class=\"eq\">(<code>stride</code> {times} (<code>drawCount</code> - 1) &#43; <code>offset</code> &#43; sizeof(<code>VkDrawIndirectCommand</code>))</span> <strong class=\"purple\">must</strong> be less than or equal to the size of <code>buffer</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndirectCountKHR-commandBuffer-parameter",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkCommandBuffer\">VkCommandBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndirectCountKHR-buffer-parameter",
+          "text": " <code>buffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkBuffer\">VkBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndirectCountKHR-countBuffer-parameter",
+          "text": " <code>countBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkBuffer\">VkBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndirectCountKHR-commandBuffer-recording",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be in the <a href=\"#commandbuffers-lifecycle\">recording state</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndirectCountKHR-commandBuffer-cmdpool",
+          "text": " The <code>VkCommandPool</code> that <code>commandBuffer</code> was allocated from <strong class=\"purple\">must</strong> support graphics operations"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndirectCountKHR-renderpass",
+          "text": " This command <strong class=\"purple\">must</strong> only be called inside of a render pass instance"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndirectCountKHR-commonparent",
+          "text": " Each of <code>buffer</code>, <code>commandBuffer</code>, and <code>countBuffer</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from the same <a href=\"#VkDevice\">VkDevice</a>"
+        }
+      ],
+      "(VK_KHR_draw_indirect_count)+(VK_IMG_filter_cubic,VK_EXT_filter_cubic)": [
+        {
+          "vuid": "VUID-vkCmdDrawIndirectCountKHR-None-02692",
+          "text": " If a <code>VkImageView</code> is sampled with <code>VK_FILTER_CUBIC_EXT</code> as a result of this command, then the image view&#8217;s <a href=\"#resources-image-view-format-features\">format features</a> <strong class=\"purple\">must</strong> contain <code>VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT</code>"
+        }
+      ],
+      "(VK_KHR_draw_indirect_count)+(VK_IMG_filter_cubic,VK_EXT_filter_cubic)+!(VK_EXT_filter_cubic)": [
+        {
+          "vuid": "VUID-vkCmdDrawIndirectCountKHR-None-02693",
+          "text": " Any <a href=\"#VkImageView\">VkImageView</a> being sampled with <code>VK_FILTER_CUBIC_EXT</code> as a result of this command <strong class=\"purple\">must</strong> not have a <a href=\"#VkImageViewType\">VkImageViewType</a> of <code>VK_IMAGE_VIEW_TYPE_3D</code>, <code>VK_IMAGE_VIEW_TYPE_CUBE</code>, or <code>VK_IMAGE_VIEW_TYPE_CUBE_ARRAY</code>"
+        }
+      ],
+      "(VK_KHR_draw_indirect_count)+(VK_IMG_filter_cubic,VK_EXT_filter_cubic)+(VK_EXT_filter_cubic)": [
+        {
+          "vuid": "VUID-vkCmdDrawIndirectCountKHR-filterCubic-02694",
+          "text": " Any <a href=\"#VkImageView\">VkImageView</a> being sampled with <code>VK_FILTER_CUBIC_EXT</code> as a result of this command <strong class=\"purple\">must</strong> have a <a href=\"#VkImageViewType\">VkImageViewType</a> and format that supports cubic filtering, as specified by <code>VkFilterCubicImageViewImageFormatPropertiesEXT</code>::<code>filterCubic</code> returned by <code>vkGetPhysicalDeviceImageFormatProperties2</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndirectCountKHR-filterCubicMinmax-02695",
+          "text": " Any <a href=\"#VkImageView\">VkImageView</a> being sampled with <code>VK_FILTER_CUBIC_EXT</code> with a reduction mode of either <code>VK_SAMPLER_REDUCTION_MODE_MIN_EXT</code> or <code>VK_SAMPLER_REDUCTION_MODE_MAX_EXT</code> as a result of this command <strong class=\"purple\">must</strong> have a <a href=\"#VkImageViewType\">VkImageViewType</a> and format that supports cubic filtering together with minmax filtering, as specified by <code>VkFilterCubicImageViewImageFormatPropertiesEXT</code>::<code>filterCubicMinmax</code> returned by <code>vkGetPhysicalDeviceImageFormatProperties2</code>"
+        }
+      ],
+      "(VK_KHR_draw_indirect_count)+(VK_NV_corner_sampled_image)": [
+        {
+          "vuid": "VUID-vkCmdDrawIndirectCountKHR-flags-02696",
+          "text": " Any <a href=\"#VkImage\">VkImage</a> created with a <a href=\"#VkImageCreateInfo\">VkImageCreateInfo</a>::<code>flags</code> containing <code>VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV</code> sampled as a result of this command <strong class=\"purple\">must</strong> only be sampled using a <a href=\"#VkSamplerAddressMode\">VkSamplerAddressMode</a> of <code>VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE</code>."
+        }
+      ],
+      "(VK_KHR_draw_indirect_count)+(VK_VERSION_1_1)": [
+        {
+          "vuid": "VUID-vkCmdDrawIndirectCountKHR-commandBuffer-02707",
+          "text": " If <code>commandBuffer</code> is an unprotected command buffer, any resource accessed by the <code>VkPipeline</code> object bound to the pipeline bind point used by this command <strong class=\"purple\">must</strong> not be a protected resource"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndirectCountKHR-commandBuffer-02711",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> not be a protected command buffer"
+        }
+      ],
+      "(VK_KHR_draw_indirect_count)+(VK_VERSION_1_1,VK_KHR_multiview)": [
+        {
+          "vuid": "VUID-vkCmdDrawIndirectCountKHR-maxMultiviewInstanceIndex-02688",
+          "text": " If the draw is recorded in a render pass instance with multiview enabled, the maximum instance index <strong class=\"purple\">must</strong> be less than or equal to <a href=\"#VkPhysicalDeviceMultiviewProperties\">VkPhysicalDeviceMultiviewProperties</a>::<code>maxMultiviewInstanceIndex</code>."
+        }
+      ],
+      "(VK_KHR_draw_indirect_count)+(VK_EXT_sample_locations)": [
+        {
+          "vuid": "VUID-vkCmdDrawIndirectCountKHR-sampleLocationsEnable-02689",
+          "text": " If the bound graphics pipeline was created with <a href=\"#VkPipelineSampleLocationsStateCreateInfoEXT\">VkPipelineSampleLocationsStateCreateInfoEXT</a>::<code>sampleLocationsEnable</code> set to <code>VK_TRUE</code> and the current subpass has a depth/stencil attachment, then that attachment <strong class=\"purple\">must</strong> have been created with the <code>VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT</code> bit set"
+        }
+      ]
+    },
+    "vkCmdDrawIndexedIndirect": {
+      "core": [
+        {
+          "vuid": "VUID-vkCmdDrawIndexedIndirect-None-02690",
+          "text": " If a <code>VkImageView</code> is sampled with <code>VK_FILTER_LINEAR</code> as a result of this command, then the image view&#8217;s <a href=\"#resources-image-view-format-features\">format features</a> <strong class=\"purple\">must</strong> contain <code>VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndexedIndirect-None-02691",
+          "text": " If a <code>VkImageView</code> is accessed using atomic operations as a result of this command, then the image view&#8217;s <a href=\"#resources-image-view-format-features\">format features</a> <strong class=\"purple\">must</strong> contain <code>VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndexedIndirect-None-02697",
+          "text": " For each set <em>n</em> that is statically used by the <code>VkPipeline</code> bound to the pipeline bind point used by this command, a descriptor set <strong class=\"purple\">must</strong> have been bound to <em>n</em> at the same pipeline bind point, with a <code>VkPipelineLayout</code> that is compatible for set <em>n</em>, with the <code>VkPipelineLayout</code> used to create the current <code>VkPipeline</code>, as described in <a href=\"#descriptorsets-compatibility\">Pipeline Layout Compatibility</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndexedIndirect-None-02698",
+          "text": " For each push constant that is statically used by the <code>VkPipeline</code> bound to the pipeline bind point used by this command, a push constant value <strong class=\"purple\">must</strong> have been set for the same pipeline bind point, with a <code>VkPipelineLayout</code> that is compatible for push constants, with the <code>VkPipelineLayout</code> used to create the current <code>VkPipeline</code>, as described in <a href=\"#descriptorsets-compatibility\">Pipeline Layout Compatibility</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndexedIndirect-None-02699",
+          "text": " Descriptors in each bound descriptor set, specified via <code>vkCmdBindDescriptorSets</code>, <strong class=\"purple\">must</strong> be valid if they are statically used by the <code>VkPipeline</code> bound to the pipeline bind point used by this command"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndexedIndirect-None-02700",
+          "text": " A valid pipeline <strong class=\"purple\">must</strong> be bound to the pipeline bind point used by this command"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndexedIndirect-commandBuffer-02701",
+          "text": " If the <code>VkPipeline</code> object bound to the pipeline bind point used by this command requires any dynamic state, that state <strong class=\"purple\">must</strong> have been set for <code>commandBuffer</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndexedIndirect-None-02702",
+          "text": " If the <code>VkPipeline</code> object bound to the pipeline bind point used by this command accesses a <code>VkSampler</code> object that uses unnormalized coordinates, that sampler <strong class=\"purple\">must</strong> not be used to sample from any <code>VkImage</code> with a <code>VkImageView</code> of the type <code>VK_IMAGE_VIEW_TYPE_3D</code>, <code>VK_IMAGE_VIEW_TYPE_CUBE</code>, <code>VK_IMAGE_VIEW_TYPE_1D_ARRAY</code>, <code>VK_IMAGE_VIEW_TYPE_2D_ARRAY</code> or <code>VK_IMAGE_VIEW_TYPE_CUBE_ARRAY</code>, in any shader stage"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndexedIndirect-None-02703",
+          "text": " If the <code>VkPipeline</code> object bound to the pipeline bind point used by this command accesses a <code>VkSampler</code> object that uses unnormalized coordinates, that sampler <strong class=\"purple\">must</strong> not be used with any of the SPIR-V <code>OpImageSample*</code> or <code>OpImageSparseSample*</code> instructions with <code>ImplicitLod</code>, <code>Dref</code> or <code>Proj</code> in their name, in any shader stage"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndexedIndirect-None-02704",
+          "text": " If the <code>VkPipeline</code> object bound to the pipeline bind point used by this command accesses a <code>VkSampler</code> object that uses unnormalized coordinates, that sampler <strong class=\"purple\">must</strong> not be used with any of the SPIR-V <code>OpImageSample*</code> or <code>OpImageSparseSample*</code> instructions that includes a LOD bias or any offset values, in any shader stage"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndexedIndirect-None-02705",
+          "text": " If the <a href=\"#features-robustBufferAccess\">robust buffer access</a> feature is not enabled, and if the <code>VkPipeline</code> object bound to the pipeline bind point used by this command accesses a uniform buffer, it <strong class=\"purple\">must</strong> not access values outside of the range of the buffer as specified in the descriptor set bound to the same pipeline bind point"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndexedIndirect-None-02706",
+          "text": " If the <a href=\"#features-robustBufferAccess\">robust buffer access</a> feature is not enabled, and if the <code>VkPipeline</code> object bound to the pipeline bind point used by this command accesses a storage buffer, it <strong class=\"purple\">must</strong> not access values outside of the range of the buffer as specified in the descriptor set bound to the same pipeline bind point"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndexedIndirect-renderPass-02684",
+          "text": " The current render pass <strong class=\"purple\">must</strong> be <a href=\"#renderpass-compatibility\">compatible</a> with the <code>renderPass</code> member of the <code>VkGraphicsPipelineCreateInfo</code> structure specified when creating the <code>VkPipeline</code> bound to <code>VK_PIPELINE_BIND_POINT_GRAPHICS</code>."
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndexedIndirect-subpass-02685",
+          "text": " The subpass index of the current render pass <strong class=\"purple\">must</strong> be equal to the <code>subpass</code> member of the <code>VkGraphicsPipelineCreateInfo</code> structure specified when creating the <code>VkPipeline</code> bound to <code>VK_PIPELINE_BIND_POINT_GRAPHICS</code>."
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndexedIndirect-None-02686",
+          "text": " Every input attachment used by the current subpass <strong class=\"purple\">must</strong> be bound to the pipeline via a descriptor set"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndexedIndirect-None-02687",
+          "text": " Image subresources used as attachments in the current render pass <strong class=\"purple\">must</strong> not be accessed in any way other than as an attachment by this command."
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndexedIndirect-None-02720",
+          "text": " All vertex input bindings accessed via vertex input variables declared in the vertex shader entry point&#8217;s interface <strong class=\"purple\">must</strong> have valid buffers bound"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndexedIndirect-None-02721",
+          "text": " For a given vertex buffer binding, any attribute data fetched <strong class=\"purple\">must</strong> be entirely contained within the corresponding vertex buffer binding, as described in <a href=\"#fxvertex-input\">Vertex Input Description</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndexedIndirect-buffer-02708",
+          "text": " If <code>buffer</code> is non-sparse then it <strong class=\"purple\">must</strong> be bound completely and contiguously to a single <code>VkDeviceMemory</code> object"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndexedIndirect-buffer-02709",
+          "text": " <code>buffer</code> <strong class=\"purple\">must</strong> have been created with the <code>VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT</code> bit set"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndexedIndirect-offset-02710",
+          "text": " <code>offset</code> <strong class=\"purple\">must</strong> be a multiple of <code>4</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndexedIndirect-drawCount-02718",
+          "text": " If the <a href=\"#features-multiDrawIndirect\">multi-draw indirect</a> feature is not enabled, <code>drawCount</code> <strong class=\"purple\">must</strong> be <code>0</code> or <code>1</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndexedIndirect-drawCount-02719",
+          "text": " <code>drawCount</code> <strong class=\"purple\">must</strong> be less than or equal to <code>VkPhysicalDeviceLimits</code>::<code>maxDrawIndirectCount</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndexedIndirect-drawCount-00528",
+          "text": " If <code>drawCount</code> is greater than <code>1</code>, <code>stride</code> <strong class=\"purple\">must</strong> be a multiple of <code>4</code> and <strong class=\"purple\">must</strong> be greater than or equal to <code>sizeof</code>(<code>VkDrawIndexedIndirectCommand</code>)"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndexedIndirect-firstInstance-00530",
+          "text": " If the <a href=\"#features-drawIndirectFirstInstance\">drawIndirectFirstInstance</a> feature is not enabled, all the <code>firstInstance</code> members of the <code>VkDrawIndexedIndirectCommand</code> structures accessed by this command <strong class=\"purple\">must</strong> be <code>0</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndexedIndirect-drawCount-00539",
+          "text": " If <code>drawCount</code> is equal to <code>1</code>, <span class=\"eq\">(<code>offset</code> &#43; <code>sizeof</code>(<code>VkDrawIndexedIndirectCommand</code>))</span> <strong class=\"purple\">must</strong> be less than or equal to the size of <code>buffer</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndexedIndirect-drawCount-00540",
+          "text": " If <code>drawCount</code> is greater than <code>1</code>, <span class=\"eq\">(<code>stride</code> {times} (<code>drawCount</code> - 1) &#43; <code>offset</code> &#43; <code>sizeof</code>(<code>VkDrawIndexedIndirectCommand</code>))</span> <strong class=\"purple\">must</strong> be less than or equal to the size of <code>buffer</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndexedIndirect-commandBuffer-parameter",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkCommandBuffer\">VkCommandBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndexedIndirect-buffer-parameter",
+          "text": " <code>buffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkBuffer\">VkBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndexedIndirect-commandBuffer-recording",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be in the <a href=\"#commandbuffers-lifecycle\">recording state</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndexedIndirect-commandBuffer-cmdpool",
+          "text": " The <code>VkCommandPool</code> that <code>commandBuffer</code> was allocated from <strong class=\"purple\">must</strong> support graphics operations"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndexedIndirect-renderpass",
+          "text": " This command <strong class=\"purple\">must</strong> only be called inside of a render pass instance"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndexedIndirect-commonparent",
+          "text": " Both of <code>buffer</code>, and <code>commandBuffer</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from the same <a href=\"#VkDevice\">VkDevice</a>"
+        }
+      ],
+      "(VK_IMG_filter_cubic,VK_EXT_filter_cubic)": [
+        {
+          "vuid": "VUID-vkCmdDrawIndexedIndirect-None-02692",
+          "text": " If a <code>VkImageView</code> is sampled with <code>VK_FILTER_CUBIC_EXT</code> as a result of this command, then the image view&#8217;s <a href=\"#resources-image-view-format-features\">format features</a> <strong class=\"purple\">must</strong> contain <code>VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT</code>"
+        }
+      ],
+      "(VK_IMG_filter_cubic,VK_EXT_filter_cubic)+!(VK_EXT_filter_cubic)": [
+        {
+          "vuid": "VUID-vkCmdDrawIndexedIndirect-None-02693",
+          "text": " Any <a href=\"#VkImageView\">VkImageView</a> being sampled with <code>VK_FILTER_CUBIC_EXT</code> as a result of this command <strong class=\"purple\">must</strong> not have a <a href=\"#VkImageViewType\">VkImageViewType</a> of <code>VK_IMAGE_VIEW_TYPE_3D</code>, <code>VK_IMAGE_VIEW_TYPE_CUBE</code>, or <code>VK_IMAGE_VIEW_TYPE_CUBE_ARRAY</code>"
+        }
+      ],
+      "(VK_IMG_filter_cubic,VK_EXT_filter_cubic)+(VK_EXT_filter_cubic)": [
+        {
+          "vuid": "VUID-vkCmdDrawIndexedIndirect-filterCubic-02694",
+          "text": " Any <a href=\"#VkImageView\">VkImageView</a> being sampled with <code>VK_FILTER_CUBIC_EXT</code> as a result of this command <strong class=\"purple\">must</strong> have a <a href=\"#VkImageViewType\">VkImageViewType</a> and format that supports cubic filtering, as specified by <code>VkFilterCubicImageViewImageFormatPropertiesEXT</code>::<code>filterCubic</code> returned by <code>vkGetPhysicalDeviceImageFormatProperties2</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndexedIndirect-filterCubicMinmax-02695",
+          "text": " Any <a href=\"#VkImageView\">VkImageView</a> being sampled with <code>VK_FILTER_CUBIC_EXT</code> with a reduction mode of either <code>VK_SAMPLER_REDUCTION_MODE_MIN_EXT</code> or <code>VK_SAMPLER_REDUCTION_MODE_MAX_EXT</code> as a result of this command <strong class=\"purple\">must</strong> have a <a href=\"#VkImageViewType\">VkImageViewType</a> and format that supports cubic filtering together with minmax filtering, as specified by <code>VkFilterCubicImageViewImageFormatPropertiesEXT</code>::<code>filterCubicMinmax</code> returned by <code>vkGetPhysicalDeviceImageFormatProperties2</code>"
+        }
+      ],
+      "(VK_NV_corner_sampled_image)": [
+        {
+          "vuid": "VUID-vkCmdDrawIndexedIndirect-flags-02696",
+          "text": " Any <a href=\"#VkImage\">VkImage</a> created with a <a href=\"#VkImageCreateInfo\">VkImageCreateInfo</a>::<code>flags</code> containing <code>VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV</code> sampled as a result of this command <strong class=\"purple\">must</strong> only be sampled using a <a href=\"#VkSamplerAddressMode\">VkSamplerAddressMode</a> of <code>VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE</code>."
+        }
+      ],
+      "(VK_VERSION_1_1)": [
+        {
+          "vuid": "VUID-vkCmdDrawIndexedIndirect-commandBuffer-02707",
+          "text": " If <code>commandBuffer</code> is an unprotected command buffer, any resource accessed by the <code>VkPipeline</code> object bound to the pipeline bind point used by this command <strong class=\"purple\">must</strong> not be a protected resource"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndexedIndirect-commandBuffer-02711",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> not be a protected command buffer"
+        }
+      ],
+      "(VK_VERSION_1_1,VK_KHR_multiview)": [
+        {
+          "vuid": "VUID-vkCmdDrawIndexedIndirect-maxMultiviewInstanceIndex-02688",
+          "text": " If the draw is recorded in a render pass instance with multiview enabled, the maximum instance index <strong class=\"purple\">must</strong> be less than or equal to <a href=\"#VkPhysicalDeviceMultiviewProperties\">VkPhysicalDeviceMultiviewProperties</a>::<code>maxMultiviewInstanceIndex</code>."
+        }
+      ],
+      "(VK_EXT_sample_locations)": [
+        {
+          "vuid": "VUID-vkCmdDrawIndexedIndirect-sampleLocationsEnable-02689",
+          "text": " If the bound graphics pipeline was created with <a href=\"#VkPipelineSampleLocationsStateCreateInfoEXT\">VkPipelineSampleLocationsStateCreateInfoEXT</a>::<code>sampleLocationsEnable</code> set to <code>VK_TRUE</code> and the current subpass has a depth/stencil attachment, then that attachment <strong class=\"purple\">must</strong> have been created with the <code>VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT</code> bit set"
+        }
+      ]
+    },
+    "VkDrawIndexedIndirectCommand": {
+      "core": [
+        {
+          "vuid": "VUID-VkDrawIndexedIndirectCommand-None-00552",
+          "text": " For a given vertex buffer binding, any attribute data fetched <strong class=\"purple\">must</strong> be entirely contained within the corresponding vertex buffer binding, as described in <a href=\"#fxvertex-input\">Vertex Input Description</a>"
+        },
+        {
+          "vuid": "VUID-VkDrawIndexedIndirectCommand-indexSize-00553",
+          "text": " <span class=\"eq\">(<code>indexSize</code> * (<code>firstIndex</code> &#43; <code>indexCount</code>) &#43; <code>offset</code>)</span> <strong class=\"purple\">must</strong> be less than or equal to the size of the bound index buffer, with <code>indexSize</code> being based on the type specified by <code>indexType</code>, where the index buffer, <code>indexType</code>, and <code>offset</code> are specified via <code>vkCmdBindIndexBuffer</code>"
+        },
+        {
+          "vuid": "VUID-VkDrawIndexedIndirectCommand-firstInstance-00554",
+          "text": " If the <a href=\"#features-drawIndirectFirstInstance\">drawIndirectFirstInstance</a> feature is not enabled, <code>firstInstance</code> <strong class=\"purple\">must</strong> be <code>0</code>"
+        }
+      ]
+    },
+    "vkCmdDrawIndexedIndirectCountKHR": {
+      "(VK_KHR_draw_indirect_count)": [
+        {
+          "vuid": "VUID-vkCmdDrawIndexedIndirectCountKHR-None-02690",
+          "text": " If a <code>VkImageView</code> is sampled with <code>VK_FILTER_LINEAR</code> as a result of this command, then the image view&#8217;s <a href=\"#resources-image-view-format-features\">format features</a> <strong class=\"purple\">must</strong> contain <code>VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndexedIndirectCountKHR-None-02691",
+          "text": " If a <code>VkImageView</code> is accessed using atomic operations as a result of this command, then the image view&#8217;s <a href=\"#resources-image-view-format-features\">format features</a> <strong class=\"purple\">must</strong> contain <code>VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndexedIndirectCountKHR-None-02697",
+          "text": " For each set <em>n</em> that is statically used by the <code>VkPipeline</code> bound to the pipeline bind point used by this command, a descriptor set <strong class=\"purple\">must</strong> have been bound to <em>n</em> at the same pipeline bind point, with a <code>VkPipelineLayout</code> that is compatible for set <em>n</em>, with the <code>VkPipelineLayout</code> used to create the current <code>VkPipeline</code>, as described in <a href=\"#descriptorsets-compatibility\">Pipeline Layout Compatibility</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndexedIndirectCountKHR-None-02698",
+          "text": " For each push constant that is statically used by the <code>VkPipeline</code> bound to the pipeline bind point used by this command, a push constant value <strong class=\"purple\">must</strong> have been set for the same pipeline bind point, with a <code>VkPipelineLayout</code> that is compatible for push constants, with the <code>VkPipelineLayout</code> used to create the current <code>VkPipeline</code>, as described in <a href=\"#descriptorsets-compatibility\">Pipeline Layout Compatibility</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndexedIndirectCountKHR-None-02699",
+          "text": " Descriptors in each bound descriptor set, specified via <code>vkCmdBindDescriptorSets</code>, <strong class=\"purple\">must</strong> be valid if they are statically used by the <code>VkPipeline</code> bound to the pipeline bind point used by this command"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndexedIndirectCountKHR-None-02700",
+          "text": " A valid pipeline <strong class=\"purple\">must</strong> be bound to the pipeline bind point used by this command"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndexedIndirectCountKHR-commandBuffer-02701",
+          "text": " If the <code>VkPipeline</code> object bound to the pipeline bind point used by this command requires any dynamic state, that state <strong class=\"purple\">must</strong> have been set for <code>commandBuffer</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndexedIndirectCountKHR-None-02702",
+          "text": " If the <code>VkPipeline</code> object bound to the pipeline bind point used by this command accesses a <code>VkSampler</code> object that uses unnormalized coordinates, that sampler <strong class=\"purple\">must</strong> not be used to sample from any <code>VkImage</code> with a <code>VkImageView</code> of the type <code>VK_IMAGE_VIEW_TYPE_3D</code>, <code>VK_IMAGE_VIEW_TYPE_CUBE</code>, <code>VK_IMAGE_VIEW_TYPE_1D_ARRAY</code>, <code>VK_IMAGE_VIEW_TYPE_2D_ARRAY</code> or <code>VK_IMAGE_VIEW_TYPE_CUBE_ARRAY</code>, in any shader stage"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndexedIndirectCountKHR-None-02703",
+          "text": " If the <code>VkPipeline</code> object bound to the pipeline bind point used by this command accesses a <code>VkSampler</code> object that uses unnormalized coordinates, that sampler <strong class=\"purple\">must</strong> not be used with any of the SPIR-V <code>OpImageSample*</code> or <code>OpImageSparseSample*</code> instructions with <code>ImplicitLod</code>, <code>Dref</code> or <code>Proj</code> in their name, in any shader stage"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndexedIndirectCountKHR-None-02704",
+          "text": " If the <code>VkPipeline</code> object bound to the pipeline bind point used by this command accesses a <code>VkSampler</code> object that uses unnormalized coordinates, that sampler <strong class=\"purple\">must</strong> not be used with any of the SPIR-V <code>OpImageSample*</code> or <code>OpImageSparseSample*</code> instructions that includes a LOD bias or any offset values, in any shader stage"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndexedIndirectCountKHR-None-02705",
+          "text": " If the <a href=\"#features-robustBufferAccess\">robust buffer access</a> feature is not enabled, and if the <code>VkPipeline</code> object bound to the pipeline bind point used by this command accesses a uniform buffer, it <strong class=\"purple\">must</strong> not access values outside of the range of the buffer as specified in the descriptor set bound to the same pipeline bind point"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndexedIndirectCountKHR-None-02706",
+          "text": " If the <a href=\"#features-robustBufferAccess\">robust buffer access</a> feature is not enabled, and if the <code>VkPipeline</code> object bound to the pipeline bind point used by this command accesses a storage buffer, it <strong class=\"purple\">must</strong> not access values outside of the range of the buffer as specified in the descriptor set bound to the same pipeline bind point"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndexedIndirectCountKHR-renderPass-02684",
+          "text": " The current render pass <strong class=\"purple\">must</strong> be <a href=\"#renderpass-compatibility\">compatible</a> with the <code>renderPass</code> member of the <code>VkGraphicsPipelineCreateInfo</code> structure specified when creating the <code>VkPipeline</code> bound to <code>VK_PIPELINE_BIND_POINT_GRAPHICS</code>."
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndexedIndirectCountKHR-subpass-02685",
+          "text": " The subpass index of the current render pass <strong class=\"purple\">must</strong> be equal to the <code>subpass</code> member of the <code>VkGraphicsPipelineCreateInfo</code> structure specified when creating the <code>VkPipeline</code> bound to <code>VK_PIPELINE_BIND_POINT_GRAPHICS</code>."
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndexedIndirectCountKHR-None-02686",
+          "text": " Every input attachment used by the current subpass <strong class=\"purple\">must</strong> be bound to the pipeline via a descriptor set"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndexedIndirectCountKHR-None-02687",
+          "text": " Image subresources used as attachments in the current render pass <strong class=\"purple\">must</strong> not be accessed in any way other than as an attachment by this command."
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndexedIndirectCountKHR-None-02720",
+          "text": " All vertex input bindings accessed via vertex input variables declared in the vertex shader entry point&#8217;s interface <strong class=\"purple\">must</strong> have valid buffers bound"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndexedIndirectCountKHR-None-02721",
+          "text": " For a given vertex buffer binding, any attribute data fetched <strong class=\"purple\">must</strong> be entirely contained within the corresponding vertex buffer binding, as described in <a href=\"#fxvertex-input\">Vertex Input Description</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndexedIndirectCountKHR-buffer-02708",
+          "text": " If <code>buffer</code> is non-sparse then it <strong class=\"purple\">must</strong> be bound completely and contiguously to a single <code>VkDeviceMemory</code> object"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndexedIndirectCountKHR-buffer-02709",
+          "text": " <code>buffer</code> <strong class=\"purple\">must</strong> have been created with the <code>VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT</code> bit set"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndexedIndirectCountKHR-offset-02710",
+          "text": " <code>offset</code> <strong class=\"purple\">must</strong> be a multiple of <code>4</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndexedIndirectCountKHR-countBuffer-02714",
+          "text": " If <code>countBuffer</code> is non-sparse then it <strong class=\"purple\">must</strong> be bound completely and contiguously to a single <code>VkDeviceMemory</code> object"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndexedIndirectCountKHR-countBuffer-02715",
+          "text": " <code>countBuffer</code> <strong class=\"purple\">must</strong> have been created with the <code>VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT</code> bit set"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndexedIndirectCountKHR-countBufferOffset-02716",
+          "text": " <code>countBufferOffset</code> <strong class=\"purple\">must</strong> be a multiple of <code>4</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndexedIndirectCountKHR-countBuffer-02717",
+          "text": " The count stored in <code>countBuffer</code> <strong class=\"purple\">must</strong> be less than or equal to <code>VkPhysicalDeviceLimits</code>::<code>maxDrawIndirectCount</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndexedIndirectCountKHR-stride-03142",
+          "text": " <code>stride</code> <strong class=\"purple\">must</strong> be a multiple of <code>4</code> and <strong class=\"purple\">must</strong> be greater than or equal to sizeof(<code>VkDrawIndexedIndirectCommand</code>)"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndexedIndirectCountKHR-maxDrawCount-03143",
+          "text": " If <code>maxDrawCount</code> is greater than or equal to <code>1</code>, <span class=\"eq\">(<code>stride</code> {times} (<code>maxDrawCount</code> - 1) &#43; <code>offset</code> &#43; sizeof(<code>VkDrawIndexedIndirectCommand</code>))</span> <strong class=\"purple\">must</strong> be less than or equal to the size of <code>buffer</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndexedIndirectCountKHR-countBuffer-03153",
+          "text": " If count stored in <code>countBuffer</code> is equal to <code>1</code>, <span class=\"eq\">(<code>offset</code> &#43; sizeof(<code>VkDrawIndexedIndirectCommand</code>))</span> <strong class=\"purple\">must</strong> be less than or equal to the size of <code>buffer</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndexedIndirectCountKHR-countBuffer-03154",
+          "text": " If count stored in <code>countBuffer</code> is greater than <code>1</code>, <span class=\"eq\">(<code>stride</code> {times} (<code>drawCount</code> - 1) &#43; <code>offset</code> &#43; sizeof(<code>VkDrawIndexedIndirectCommand</code>))</span> <strong class=\"purple\">must</strong> be less than or equal to the size of <code>buffer</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndexedIndirectCountKHR-commandBuffer-parameter",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkCommandBuffer\">VkCommandBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndexedIndirectCountKHR-buffer-parameter",
+          "text": " <code>buffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkBuffer\">VkBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndexedIndirectCountKHR-countBuffer-parameter",
+          "text": " <code>countBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkBuffer\">VkBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndexedIndirectCountKHR-commandBuffer-recording",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be in the <a href=\"#commandbuffers-lifecycle\">recording state</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndexedIndirectCountKHR-commandBuffer-cmdpool",
+          "text": " The <code>VkCommandPool</code> that <code>commandBuffer</code> was allocated from <strong class=\"purple\">must</strong> support graphics operations"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndexedIndirectCountKHR-renderpass",
+          "text": " This command <strong class=\"purple\">must</strong> only be called inside of a render pass instance"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndexedIndirectCountKHR-commonparent",
+          "text": " Each of <code>buffer</code>, <code>commandBuffer</code>, and <code>countBuffer</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from the same <a href=\"#VkDevice\">VkDevice</a>"
+        }
+      ],
+      "(VK_KHR_draw_indirect_count)+(VK_IMG_filter_cubic,VK_EXT_filter_cubic)": [
+        {
+          "vuid": "VUID-vkCmdDrawIndexedIndirectCountKHR-None-02692",
+          "text": " If a <code>VkImageView</code> is sampled with <code>VK_FILTER_CUBIC_EXT</code> as a result of this command, then the image view&#8217;s <a href=\"#resources-image-view-format-features\">format features</a> <strong class=\"purple\">must</strong> contain <code>VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT</code>"
+        }
+      ],
+      "(VK_KHR_draw_indirect_count)+(VK_IMG_filter_cubic,VK_EXT_filter_cubic)+!(VK_EXT_filter_cubic)": [
+        {
+          "vuid": "VUID-vkCmdDrawIndexedIndirectCountKHR-None-02693",
+          "text": " Any <a href=\"#VkImageView\">VkImageView</a> being sampled with <code>VK_FILTER_CUBIC_EXT</code> as a result of this command <strong class=\"purple\">must</strong> not have a <a href=\"#VkImageViewType\">VkImageViewType</a> of <code>VK_IMAGE_VIEW_TYPE_3D</code>, <code>VK_IMAGE_VIEW_TYPE_CUBE</code>, or <code>VK_IMAGE_VIEW_TYPE_CUBE_ARRAY</code>"
+        }
+      ],
+      "(VK_KHR_draw_indirect_count)+(VK_IMG_filter_cubic,VK_EXT_filter_cubic)+(VK_EXT_filter_cubic)": [
+        {
+          "vuid": "VUID-vkCmdDrawIndexedIndirectCountKHR-filterCubic-02694",
+          "text": " Any <a href=\"#VkImageView\">VkImageView</a> being sampled with <code>VK_FILTER_CUBIC_EXT</code> as a result of this command <strong class=\"purple\">must</strong> have a <a href=\"#VkImageViewType\">VkImageViewType</a> and format that supports cubic filtering, as specified by <code>VkFilterCubicImageViewImageFormatPropertiesEXT</code>::<code>filterCubic</code> returned by <code>vkGetPhysicalDeviceImageFormatProperties2</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndexedIndirectCountKHR-filterCubicMinmax-02695",
+          "text": " Any <a href=\"#VkImageView\">VkImageView</a> being sampled with <code>VK_FILTER_CUBIC_EXT</code> with a reduction mode of either <code>VK_SAMPLER_REDUCTION_MODE_MIN_EXT</code> or <code>VK_SAMPLER_REDUCTION_MODE_MAX_EXT</code> as a result of this command <strong class=\"purple\">must</strong> have a <a href=\"#VkImageViewType\">VkImageViewType</a> and format that supports cubic filtering together with minmax filtering, as specified by <code>VkFilterCubicImageViewImageFormatPropertiesEXT</code>::<code>filterCubicMinmax</code> returned by <code>vkGetPhysicalDeviceImageFormatProperties2</code>"
+        }
+      ],
+      "(VK_KHR_draw_indirect_count)+(VK_NV_corner_sampled_image)": [
+        {
+          "vuid": "VUID-vkCmdDrawIndexedIndirectCountKHR-flags-02696",
+          "text": " Any <a href=\"#VkImage\">VkImage</a> created with a <a href=\"#VkImageCreateInfo\">VkImageCreateInfo</a>::<code>flags</code> containing <code>VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV</code> sampled as a result of this command <strong class=\"purple\">must</strong> only be sampled using a <a href=\"#VkSamplerAddressMode\">VkSamplerAddressMode</a> of <code>VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE</code>."
+        }
+      ],
+      "(VK_KHR_draw_indirect_count)+(VK_VERSION_1_1)": [
+        {
+          "vuid": "VUID-vkCmdDrawIndexedIndirectCountKHR-commandBuffer-02707",
+          "text": " If <code>commandBuffer</code> is an unprotected command buffer, any resource accessed by the <code>VkPipeline</code> object bound to the pipeline bind point used by this command <strong class=\"purple\">must</strong> not be a protected resource"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndexedIndirectCountKHR-commandBuffer-02711",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> not be a protected command buffer"
+        }
+      ],
+      "(VK_KHR_draw_indirect_count)+(VK_VERSION_1_1,VK_KHR_multiview)": [
+        {
+          "vuid": "VUID-vkCmdDrawIndexedIndirectCountKHR-maxMultiviewInstanceIndex-02688",
+          "text": " If the draw is recorded in a render pass instance with multiview enabled, the maximum instance index <strong class=\"purple\">must</strong> be less than or equal to <a href=\"#VkPhysicalDeviceMultiviewProperties\">VkPhysicalDeviceMultiviewProperties</a>::<code>maxMultiviewInstanceIndex</code>."
+        }
+      ],
+      "(VK_KHR_draw_indirect_count)+(VK_EXT_sample_locations)": [
+        {
+          "vuid": "VUID-vkCmdDrawIndexedIndirectCountKHR-sampleLocationsEnable-02689",
+          "text": " If the bound graphics pipeline was created with <a href=\"#VkPipelineSampleLocationsStateCreateInfoEXT\">VkPipelineSampleLocationsStateCreateInfoEXT</a>::<code>sampleLocationsEnable</code> set to <code>VK_TRUE</code> and the current subpass has a depth/stencil attachment, then that attachment <strong class=\"purple\">must</strong> have been created with the <code>VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT</code> bit set"
+        }
+      ]
+    },
+    "vkCmdDrawIndirectByteCountEXT": {
+      "(VK_EXT_transform_feedback)": [
+        {
+          "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-None-02690",
+          "text": " If a <code>VkImageView</code> is sampled with <code>VK_FILTER_LINEAR</code> as a result of this command, then the image view&#8217;s <a href=\"#resources-image-view-format-features\">format features</a> <strong class=\"purple\">must</strong> contain <code>VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-None-02691",
+          "text": " If a <code>VkImageView</code> is accessed using atomic operations as a result of this command, then the image view&#8217;s <a href=\"#resources-image-view-format-features\">format features</a> <strong class=\"purple\">must</strong> contain <code>VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-None-02697",
+          "text": " For each set <em>n</em> that is statically used by the <code>VkPipeline</code> bound to the pipeline bind point used by this command, a descriptor set <strong class=\"purple\">must</strong> have been bound to <em>n</em> at the same pipeline bind point, with a <code>VkPipelineLayout</code> that is compatible for set <em>n</em>, with the <code>VkPipelineLayout</code> used to create the current <code>VkPipeline</code>, as described in <a href=\"#descriptorsets-compatibility\">Pipeline Layout Compatibility</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-None-02698",
+          "text": " For each push constant that is statically used by the <code>VkPipeline</code> bound to the pipeline bind point used by this command, a push constant value <strong class=\"purple\">must</strong> have been set for the same pipeline bind point, with a <code>VkPipelineLayout</code> that is compatible for push constants, with the <code>VkPipelineLayout</code> used to create the current <code>VkPipeline</code>, as described in <a href=\"#descriptorsets-compatibility\">Pipeline Layout Compatibility</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-None-02699",
+          "text": " Descriptors in each bound descriptor set, specified via <code>vkCmdBindDescriptorSets</code>, <strong class=\"purple\">must</strong> be valid if they are statically used by the <code>VkPipeline</code> bound to the pipeline bind point used by this command"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-None-02700",
+          "text": " A valid pipeline <strong class=\"purple\">must</strong> be bound to the pipeline bind point used by this command"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-commandBuffer-02701",
+          "text": " If the <code>VkPipeline</code> object bound to the pipeline bind point used by this command requires any dynamic state, that state <strong class=\"purple\">must</strong> have been set for <code>commandBuffer</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-None-02702",
+          "text": " If the <code>VkPipeline</code> object bound to the pipeline bind point used by this command accesses a <code>VkSampler</code> object that uses unnormalized coordinates, that sampler <strong class=\"purple\">must</strong> not be used to sample from any <code>VkImage</code> with a <code>VkImageView</code> of the type <code>VK_IMAGE_VIEW_TYPE_3D</code>, <code>VK_IMAGE_VIEW_TYPE_CUBE</code>, <code>VK_IMAGE_VIEW_TYPE_1D_ARRAY</code>, <code>VK_IMAGE_VIEW_TYPE_2D_ARRAY</code> or <code>VK_IMAGE_VIEW_TYPE_CUBE_ARRAY</code>, in any shader stage"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-None-02703",
+          "text": " If the <code>VkPipeline</code> object bound to the pipeline bind point used by this command accesses a <code>VkSampler</code> object that uses unnormalized coordinates, that sampler <strong class=\"purple\">must</strong> not be used with any of the SPIR-V <code>OpImageSample*</code> or <code>OpImageSparseSample*</code> instructions with <code>ImplicitLod</code>, <code>Dref</code> or <code>Proj</code> in their name, in any shader stage"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-None-02704",
+          "text": " If the <code>VkPipeline</code> object bound to the pipeline bind point used by this command accesses a <code>VkSampler</code> object that uses unnormalized coordinates, that sampler <strong class=\"purple\">must</strong> not be used with any of the SPIR-V <code>OpImageSample*</code> or <code>OpImageSparseSample*</code> instructions that includes a LOD bias or any offset values, in any shader stage"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-None-02705",
+          "text": " If the <a href=\"#features-robustBufferAccess\">robust buffer access</a> feature is not enabled, and if the <code>VkPipeline</code> object bound to the pipeline bind point used by this command accesses a uniform buffer, it <strong class=\"purple\">must</strong> not access values outside of the range of the buffer as specified in the descriptor set bound to the same pipeline bind point"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-None-02706",
+          "text": " If the <a href=\"#features-robustBufferAccess\">robust buffer access</a> feature is not enabled, and if the <code>VkPipeline</code> object bound to the pipeline bind point used by this command accesses a storage buffer, it <strong class=\"purple\">must</strong> not access values outside of the range of the buffer as specified in the descriptor set bound to the same pipeline bind point"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-renderPass-02684",
+          "text": " The current render pass <strong class=\"purple\">must</strong> be <a href=\"#renderpass-compatibility\">compatible</a> with the <code>renderPass</code> member of the <code>VkGraphicsPipelineCreateInfo</code> structure specified when creating the <code>VkPipeline</code> bound to <code>VK_PIPELINE_BIND_POINT_GRAPHICS</code>."
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-subpass-02685",
+          "text": " The subpass index of the current render pass <strong class=\"purple\">must</strong> be equal to the <code>subpass</code> member of the <code>VkGraphicsPipelineCreateInfo</code> structure specified when creating the <code>VkPipeline</code> bound to <code>VK_PIPELINE_BIND_POINT_GRAPHICS</code>."
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-None-02686",
+          "text": " Every input attachment used by the current subpass <strong class=\"purple\">must</strong> be bound to the pipeline via a descriptor set"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-None-02687",
+          "text": " Image subresources used as attachments in the current render pass <strong class=\"purple\">must</strong> not be accessed in any way other than as an attachment by this command."
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-None-02720",
+          "text": " All vertex input bindings accessed via vertex input variables declared in the vertex shader entry point&#8217;s interface <strong class=\"purple\">must</strong> have valid buffers bound"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-None-02721",
+          "text": " For a given vertex buffer binding, any attribute data fetched <strong class=\"purple\">must</strong> be entirely contained within the corresponding vertex buffer binding, as described in <a href=\"#fxvertex-input\">Vertex Input Description</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-transformFeedback-02287",
+          "text": " <code>VkPhysicalDeviceTransformFeedbackFeaturesEXT</code>::<code>transformFeedback</code> <strong class=\"purple\">must</strong> be enabled"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-transformFeedbackDraw-02288",
+          "text": " The implementation <strong class=\"purple\">must</strong> support <code>VkPhysicalDeviceTransformFeedbackPropertiesEXT</code>::<code>transformFeedbackDraw</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-vertexStride-02289",
+          "text": " <code>vertexStride</code> <strong class=\"purple\">must</strong> be greater than 0 and less than or equal to <a href=\"#VkPhysicalDeviceLimits\">VkPhysicalDeviceLimits</a>::<code>maxTransformFeedbackBufferDataStride</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-counterBuffer-02290",
+          "text": " <code>counterBuffer</code> <strong class=\"purple\">must</strong> have been created with the <code>VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT</code> bit set"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-commandBuffer-parameter",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkCommandBuffer\">VkCommandBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-counterBuffer-parameter",
+          "text": " <code>counterBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkBuffer\">VkBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-commandBuffer-recording",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be in the <a href=\"#commandbuffers-lifecycle\">recording state</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-commandBuffer-cmdpool",
+          "text": " The <code>VkCommandPool</code> that <code>commandBuffer</code> was allocated from <strong class=\"purple\">must</strong> support graphics operations"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-renderpass",
+          "text": " This command <strong class=\"purple\">must</strong> only be called inside of a render pass instance"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-commonparent",
+          "text": " Both of <code>commandBuffer</code>, and <code>counterBuffer</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from the same <a href=\"#VkDevice\">VkDevice</a>"
+        }
+      ],
+      "(VK_EXT_transform_feedback)+(VK_IMG_filter_cubic,VK_EXT_filter_cubic)": [
+        {
+          "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-None-02692",
+          "text": " If a <code>VkImageView</code> is sampled with <code>VK_FILTER_CUBIC_EXT</code> as a result of this command, then the image view&#8217;s <a href=\"#resources-image-view-format-features\">format features</a> <strong class=\"purple\">must</strong> contain <code>VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT</code>"
+        }
+      ],
+      "(VK_EXT_transform_feedback)+(VK_IMG_filter_cubic,VK_EXT_filter_cubic)+!(VK_EXT_filter_cubic)": [
+        {
+          "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-None-02693",
+          "text": " Any <a href=\"#VkImageView\">VkImageView</a> being sampled with <code>VK_FILTER_CUBIC_EXT</code> as a result of this command <strong class=\"purple\">must</strong> not have a <a href=\"#VkImageViewType\">VkImageViewType</a> of <code>VK_IMAGE_VIEW_TYPE_3D</code>, <code>VK_IMAGE_VIEW_TYPE_CUBE</code>, or <code>VK_IMAGE_VIEW_TYPE_CUBE_ARRAY</code>"
+        }
+      ],
+      "(VK_EXT_transform_feedback)+(VK_IMG_filter_cubic,VK_EXT_filter_cubic)+(VK_EXT_filter_cubic)": [
+        {
+          "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-filterCubic-02694",
+          "text": " Any <a href=\"#VkImageView\">VkImageView</a> being sampled with <code>VK_FILTER_CUBIC_EXT</code> as a result of this command <strong class=\"purple\">must</strong> have a <a href=\"#VkImageViewType\">VkImageViewType</a> and format that supports cubic filtering, as specified by <code>VkFilterCubicImageViewImageFormatPropertiesEXT</code>::<code>filterCubic</code> returned by <code>vkGetPhysicalDeviceImageFormatProperties2</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-filterCubicMinmax-02695",
+          "text": " Any <a href=\"#VkImageView\">VkImageView</a> being sampled with <code>VK_FILTER_CUBIC_EXT</code> with a reduction mode of either <code>VK_SAMPLER_REDUCTION_MODE_MIN_EXT</code> or <code>VK_SAMPLER_REDUCTION_MODE_MAX_EXT</code> as a result of this command <strong class=\"purple\">must</strong> have a <a href=\"#VkImageViewType\">VkImageViewType</a> and format that supports cubic filtering together with minmax filtering, as specified by <code>VkFilterCubicImageViewImageFormatPropertiesEXT</code>::<code>filterCubicMinmax</code> returned by <code>vkGetPhysicalDeviceImageFormatProperties2</code>"
+        }
+      ],
+      "(VK_EXT_transform_feedback)+(VK_NV_corner_sampled_image)": [
+        {
+          "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-flags-02696",
+          "text": " Any <a href=\"#VkImage\">VkImage</a> created with a <a href=\"#VkImageCreateInfo\">VkImageCreateInfo</a>::<code>flags</code> containing <code>VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV</code> sampled as a result of this command <strong class=\"purple\">must</strong> only be sampled using a <a href=\"#VkSamplerAddressMode\">VkSamplerAddressMode</a> of <code>VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE</code>."
+        }
+      ],
+      "(VK_EXT_transform_feedback)+(VK_VERSION_1_1)": [
+        {
+          "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-commandBuffer-02707",
+          "text": " If <code>commandBuffer</code> is an unprotected command buffer, any resource accessed by the <code>VkPipeline</code> object bound to the pipeline bind point used by this command <strong class=\"purple\">must</strong> not be a protected resource"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-commandBuffer-02646",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> not be a protected command buffer"
+        }
+      ],
+      "(VK_EXT_transform_feedback)+(VK_VERSION_1_1,VK_KHR_multiview)": [
+        {
+          "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-maxMultiviewInstanceIndex-02688",
+          "text": " If the draw is recorded in a render pass instance with multiview enabled, the maximum instance index <strong class=\"purple\">must</strong> be less than or equal to <a href=\"#VkPhysicalDeviceMultiviewProperties\">VkPhysicalDeviceMultiviewProperties</a>::<code>maxMultiviewInstanceIndex</code>."
+        }
+      ],
+      "(VK_EXT_transform_feedback)+(VK_EXT_sample_locations)": [
+        {
+          "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-sampleLocationsEnable-02689",
+          "text": " If the bound graphics pipeline was created with <a href=\"#VkPipelineSampleLocationsStateCreateInfoEXT\">VkPipelineSampleLocationsStateCreateInfoEXT</a>::<code>sampleLocationsEnable</code> set to <code>VK_TRUE</code> and the current subpass has a depth/stencil attachment, then that attachment <strong class=\"purple\">must</strong> have been created with the <code>VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT</code> bit set"
+        }
+      ]
+    },
+    "vkCmdBeginConditionalRenderingEXT": {
+      "(VK_EXT_conditional_rendering)": [
+        {
+          "vuid": "VUID-vkCmdBeginConditionalRenderingEXT-None-01980",
+          "text": " Conditional rendering <strong class=\"purple\">must</strong> not already be <a href=\"#active-conditional-rendering\">active</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdBeginConditionalRenderingEXT-commandBuffer-parameter",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkCommandBuffer\">VkCommandBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdBeginConditionalRenderingEXT-pConditionalRenderingBegin-parameter",
+          "text": " <code>pConditionalRenderingBegin</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkConditionalRenderingBeginInfoEXT\">VkConditionalRenderingBeginInfoEXT</a> structure"
+        },
+        {
+          "vuid": "VUID-vkCmdBeginConditionalRenderingEXT-commandBuffer-recording",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be in the <a href=\"#commandbuffers-lifecycle\">recording state</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdBeginConditionalRenderingEXT-commandBuffer-cmdpool",
+          "text": " The <code>VkCommandPool</code> that <code>commandBuffer</code> was allocated from <strong class=\"purple\">must</strong> support graphics, or compute operations"
+        }
+      ]
+    },
+    "VkConditionalRenderingBeginInfoEXT": {
+      "(VK_EXT_conditional_rendering)": [
+        {
+          "vuid": "VUID-VkConditionalRenderingBeginInfoEXT-buffer-01981",
+          "text": " If <code>buffer</code> is non-sparse then it <strong class=\"purple\">must</strong> be bound completely and contiguously to a single <code>VkDeviceMemory</code> object"
+        },
+        {
+          "vuid": "VUID-VkConditionalRenderingBeginInfoEXT-buffer-01982",
+          "text": " <code>buffer</code> <strong class=\"purple\">must</strong> have been created with the <code>VK_BUFFER_USAGE_CONDITIONAL_RENDERING_BIT_EXT</code> bit set"
+        },
+        {
+          "vuid": "VUID-VkConditionalRenderingBeginInfoEXT-offset-01983",
+          "text": " <code>offset</code> <strong class=\"purple\">must</strong> be less than the size of <code>buffer</code> by at least 32 bits."
+        },
+        {
+          "vuid": "VUID-VkConditionalRenderingBeginInfoEXT-offset-01984",
+          "text": " <code>offset</code> <strong class=\"purple\">must</strong> be a multiple of 4"
+        },
+        {
+          "vuid": "VUID-VkConditionalRenderingBeginInfoEXT-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_CONDITIONAL_RENDERING_BEGIN_INFO_EXT</code>"
+        },
+        {
+          "vuid": "VUID-VkConditionalRenderingBeginInfoEXT-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-VkConditionalRenderingBeginInfoEXT-buffer-parameter",
+          "text": " <code>buffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkBuffer\">VkBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-VkConditionalRenderingBeginInfoEXT-flags-parameter",
+          "text": " <code>flags</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkConditionalRenderingFlagBitsEXT\">VkConditionalRenderingFlagBitsEXT</a> values"
+        }
+      ]
+    },
+    "vkCmdEndConditionalRenderingEXT": {
+      "(VK_EXT_conditional_rendering)": [
+        {
+          "vuid": "VUID-vkCmdEndConditionalRenderingEXT-None-01985",
+          "text": " Conditional rendering <strong class=\"purple\">must</strong> be <a href=\"#active-conditional-rendering\">active</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdEndConditionalRenderingEXT-None-01986",
+          "text": " If conditional rendering was made <a href=\"#active-conditional-rendering\">active</a> outside of a render pass instance, it must not be ended inside a render pass instance"
+        },
+        {
+          "vuid": "VUID-vkCmdEndConditionalRenderingEXT-None-01987",
+          "text": " If conditional rendering was made <a href=\"#active-conditional-rendering\">active</a> within a subpass it must be ended in the same subpass"
+        },
+        {
+          "vuid": "VUID-vkCmdEndConditionalRenderingEXT-commandBuffer-parameter",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkCommandBuffer\">VkCommandBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdEndConditionalRenderingEXT-commandBuffer-recording",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be in the <a href=\"#commandbuffers-lifecycle\">recording state</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdEndConditionalRenderingEXT-commandBuffer-cmdpool",
+          "text": " The <code>VkCommandPool</code> that <code>commandBuffer</code> was allocated from <strong class=\"purple\">must</strong> support graphics, or compute operations"
+        }
+      ]
+    },
+    "vkCmdDrawMeshTasksNV": {
+      "(VK_NV_mesh_shader)": [
+        {
+          "vuid": "VUID-vkCmdDrawMeshTasksNV-None-02690",
+          "text": " If a <code>VkImageView</code> is sampled with <code>VK_FILTER_LINEAR</code> as a result of this command, then the image view&#8217;s <a href=\"#resources-image-view-format-features\">format features</a> <strong class=\"purple\">must</strong> contain <code>VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawMeshTasksNV-None-02691",
+          "text": " If a <code>VkImageView</code> is accessed using atomic operations as a result of this command, then the image view&#8217;s <a href=\"#resources-image-view-format-features\">format features</a> <strong class=\"purple\">must</strong> contain <code>VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawMeshTasksNV-None-02697",
+          "text": " For each set <em>n</em> that is statically used by the <code>VkPipeline</code> bound to the pipeline bind point used by this command, a descriptor set <strong class=\"purple\">must</strong> have been bound to <em>n</em> at the same pipeline bind point, with a <code>VkPipelineLayout</code> that is compatible for set <em>n</em>, with the <code>VkPipelineLayout</code> used to create the current <code>VkPipeline</code>, as described in <a href=\"#descriptorsets-compatibility\">Pipeline Layout Compatibility</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawMeshTasksNV-None-02698",
+          "text": " For each push constant that is statically used by the <code>VkPipeline</code> bound to the pipeline bind point used by this command, a push constant value <strong class=\"purple\">must</strong> have been set for the same pipeline bind point, with a <code>VkPipelineLayout</code> that is compatible for push constants, with the <code>VkPipelineLayout</code> used to create the current <code>VkPipeline</code>, as described in <a href=\"#descriptorsets-compatibility\">Pipeline Layout Compatibility</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawMeshTasksNV-None-02699",
+          "text": " Descriptors in each bound descriptor set, specified via <code>vkCmdBindDescriptorSets</code>, <strong class=\"purple\">must</strong> be valid if they are statically used by the <code>VkPipeline</code> bound to the pipeline bind point used by this command"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawMeshTasksNV-None-02700",
+          "text": " A valid pipeline <strong class=\"purple\">must</strong> be bound to the pipeline bind point used by this command"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawMeshTasksNV-commandBuffer-02701",
+          "text": " If the <code>VkPipeline</code> object bound to the pipeline bind point used by this command requires any dynamic state, that state <strong class=\"purple\">must</strong> have been set for <code>commandBuffer</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawMeshTasksNV-None-02702",
+          "text": " If the <code>VkPipeline</code> object bound to the pipeline bind point used by this command accesses a <code>VkSampler</code> object that uses unnormalized coordinates, that sampler <strong class=\"purple\">must</strong> not be used to sample from any <code>VkImage</code> with a <code>VkImageView</code> of the type <code>VK_IMAGE_VIEW_TYPE_3D</code>, <code>VK_IMAGE_VIEW_TYPE_CUBE</code>, <code>VK_IMAGE_VIEW_TYPE_1D_ARRAY</code>, <code>VK_IMAGE_VIEW_TYPE_2D_ARRAY</code> or <code>VK_IMAGE_VIEW_TYPE_CUBE_ARRAY</code>, in any shader stage"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawMeshTasksNV-None-02703",
+          "text": " If the <code>VkPipeline</code> object bound to the pipeline bind point used by this command accesses a <code>VkSampler</code> object that uses unnormalized coordinates, that sampler <strong class=\"purple\">must</strong> not be used with any of the SPIR-V <code>OpImageSample*</code> or <code>OpImageSparseSample*</code> instructions with <code>ImplicitLod</code>, <code>Dref</code> or <code>Proj</code> in their name, in any shader stage"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawMeshTasksNV-None-02704",
+          "text": " If the <code>VkPipeline</code> object bound to the pipeline bind point used by this command accesses a <code>VkSampler</code> object that uses unnormalized coordinates, that sampler <strong class=\"purple\">must</strong> not be used with any of the SPIR-V <code>OpImageSample*</code> or <code>OpImageSparseSample*</code> instructions that includes a LOD bias or any offset values, in any shader stage"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawMeshTasksNV-None-02705",
+          "text": " If the <a href=\"#features-robustBufferAccess\">robust buffer access</a> feature is not enabled, and if the <code>VkPipeline</code> object bound to the pipeline bind point used by this command accesses a uniform buffer, it <strong class=\"purple\">must</strong> not access values outside of the range of the buffer as specified in the descriptor set bound to the same pipeline bind point"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawMeshTasksNV-None-02706",
+          "text": " If the <a href=\"#features-robustBufferAccess\">robust buffer access</a> feature is not enabled, and if the <code>VkPipeline</code> object bound to the pipeline bind point used by this command accesses a storage buffer, it <strong class=\"purple\">must</strong> not access values outside of the range of the buffer as specified in the descriptor set bound to the same pipeline bind point"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawMeshTasksNV-renderPass-02684",
+          "text": " The current render pass <strong class=\"purple\">must</strong> be <a href=\"#renderpass-compatibility\">compatible</a> with the <code>renderPass</code> member of the <code>VkGraphicsPipelineCreateInfo</code> structure specified when creating the <code>VkPipeline</code> bound to <code>VK_PIPELINE_BIND_POINT_GRAPHICS</code>."
+        },
+        {
+          "vuid": "VUID-vkCmdDrawMeshTasksNV-subpass-02685",
+          "text": " The subpass index of the current render pass <strong class=\"purple\">must</strong> be equal to the <code>subpass</code> member of the <code>VkGraphicsPipelineCreateInfo</code> structure specified when creating the <code>VkPipeline</code> bound to <code>VK_PIPELINE_BIND_POINT_GRAPHICS</code>."
+        },
+        {
+          "vuid": "VUID-vkCmdDrawMeshTasksNV-None-02686",
+          "text": " Every input attachment used by the current subpass <strong class=\"purple\">must</strong> be bound to the pipeline via a descriptor set"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawMeshTasksNV-None-02687",
+          "text": " Image subresources used as attachments in the current render pass <strong class=\"purple\">must</strong> not be accessed in any way other than as an attachment by this command."
+        },
+        {
+          "vuid": "VUID-vkCmdDrawMeshTasksNV-taskCount-02119",
+          "text": " <code>taskCount</code> <strong class=\"purple\">must</strong> be less than or equal to <code>VkPhysicalDeviceMeshShaderPropertiesNV</code>::<code>maxDrawMeshTasksCount</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawMeshTasksNV-commandBuffer-parameter",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkCommandBuffer\">VkCommandBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawMeshTasksNV-commandBuffer-recording",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be in the <a href=\"#commandbuffers-lifecycle\">recording state</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawMeshTasksNV-commandBuffer-cmdpool",
+          "text": " The <code>VkCommandPool</code> that <code>commandBuffer</code> was allocated from <strong class=\"purple\">must</strong> support graphics operations"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawMeshTasksNV-renderpass",
+          "text": " This command <strong class=\"purple\">must</strong> only be called inside of a render pass instance"
+        }
+      ],
+      "(VK_NV_mesh_shader)+(VK_IMG_filter_cubic,VK_EXT_filter_cubic)": [
+        {
+          "vuid": "VUID-vkCmdDrawMeshTasksNV-None-02692",
+          "text": " If a <code>VkImageView</code> is sampled with <code>VK_FILTER_CUBIC_EXT</code> as a result of this command, then the image view&#8217;s <a href=\"#resources-image-view-format-features\">format features</a> <strong class=\"purple\">must</strong> contain <code>VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT</code>"
+        }
+      ],
+      "(VK_NV_mesh_shader)+(VK_IMG_filter_cubic,VK_EXT_filter_cubic)+!(VK_EXT_filter_cubic)": [
+        {
+          "vuid": "VUID-vkCmdDrawMeshTasksNV-None-02693",
+          "text": " Any <a href=\"#VkImageView\">VkImageView</a> being sampled with <code>VK_FILTER_CUBIC_EXT</code> as a result of this command <strong class=\"purple\">must</strong> not have a <a href=\"#VkImageViewType\">VkImageViewType</a> of <code>VK_IMAGE_VIEW_TYPE_3D</code>, <code>VK_IMAGE_VIEW_TYPE_CUBE</code>, or <code>VK_IMAGE_VIEW_TYPE_CUBE_ARRAY</code>"
+        }
+      ],
+      "(VK_NV_mesh_shader)+(VK_IMG_filter_cubic,VK_EXT_filter_cubic)+(VK_EXT_filter_cubic)": [
+        {
+          "vuid": "VUID-vkCmdDrawMeshTasksNV-filterCubic-02694",
+          "text": " Any <a href=\"#VkImageView\">VkImageView</a> being sampled with <code>VK_FILTER_CUBIC_EXT</code> as a result of this command <strong class=\"purple\">must</strong> have a <a href=\"#VkImageViewType\">VkImageViewType</a> and format that supports cubic filtering, as specified by <code>VkFilterCubicImageViewImageFormatPropertiesEXT</code>::<code>filterCubic</code> returned by <code>vkGetPhysicalDeviceImageFormatProperties2</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawMeshTasksNV-filterCubicMinmax-02695",
+          "text": " Any <a href=\"#VkImageView\">VkImageView</a> being sampled with <code>VK_FILTER_CUBIC_EXT</code> with a reduction mode of either <code>VK_SAMPLER_REDUCTION_MODE_MIN_EXT</code> or <code>VK_SAMPLER_REDUCTION_MODE_MAX_EXT</code> as a result of this command <strong class=\"purple\">must</strong> have a <a href=\"#VkImageViewType\">VkImageViewType</a> and format that supports cubic filtering together with minmax filtering, as specified by <code>VkFilterCubicImageViewImageFormatPropertiesEXT</code>::<code>filterCubicMinmax</code> returned by <code>vkGetPhysicalDeviceImageFormatProperties2</code>"
+        }
+      ],
+      "(VK_NV_mesh_shader)+(VK_NV_corner_sampled_image)": [
+        {
+          "vuid": "VUID-vkCmdDrawMeshTasksNV-flags-02696",
+          "text": " Any <a href=\"#VkImage\">VkImage</a> created with a <a href=\"#VkImageCreateInfo\">VkImageCreateInfo</a>::<code>flags</code> containing <code>VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV</code> sampled as a result of this command <strong class=\"purple\">must</strong> only be sampled using a <a href=\"#VkSamplerAddressMode\">VkSamplerAddressMode</a> of <code>VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE</code>."
+        }
+      ],
+      "(VK_NV_mesh_shader)+(VK_VERSION_1_1)": [
+        {
+          "vuid": "VUID-vkCmdDrawMeshTasksNV-commandBuffer-02707",
+          "text": " If <code>commandBuffer</code> is an unprotected command buffer, any resource accessed by the <code>VkPipeline</code> object bound to the pipeline bind point used by this command <strong class=\"purple\">must</strong> not be a protected resource"
+        }
+      ],
+      "(VK_NV_mesh_shader)+(VK_VERSION_1_1,VK_KHR_multiview)": [
+        {
+          "vuid": "VUID-vkCmdDrawMeshTasksNV-maxMultiviewInstanceIndex-02688",
+          "text": " If the draw is recorded in a render pass instance with multiview enabled, the maximum instance index <strong class=\"purple\">must</strong> be less than or equal to <a href=\"#VkPhysicalDeviceMultiviewProperties\">VkPhysicalDeviceMultiviewProperties</a>::<code>maxMultiviewInstanceIndex</code>."
+        }
+      ],
+      "(VK_NV_mesh_shader)+(VK_EXT_sample_locations)": [
+        {
+          "vuid": "VUID-vkCmdDrawMeshTasksNV-sampleLocationsEnable-02689",
+          "text": " If the bound graphics pipeline was created with <a href=\"#VkPipelineSampleLocationsStateCreateInfoEXT\">VkPipelineSampleLocationsStateCreateInfoEXT</a>::<code>sampleLocationsEnable</code> set to <code>VK_TRUE</code> and the current subpass has a depth/stencil attachment, then that attachment <strong class=\"purple\">must</strong> have been created with the <code>VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT</code> bit set"
+        }
+      ]
+    },
+    "vkCmdDrawMeshTasksIndirectNV": {
+      "(VK_NV_mesh_shader)": [
+        {
+          "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-None-02690",
+          "text": " If a <code>VkImageView</code> is sampled with <code>VK_FILTER_LINEAR</code> as a result of this command, then the image view&#8217;s <a href=\"#resources-image-view-format-features\">format features</a> <strong class=\"purple\">must</strong> contain <code>VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-None-02691",
+          "text": " If a <code>VkImageView</code> is accessed using atomic operations as a result of this command, then the image view&#8217;s <a href=\"#resources-image-view-format-features\">format features</a> <strong class=\"purple\">must</strong> contain <code>VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-None-02697",
+          "text": " For each set <em>n</em> that is statically used by the <code>VkPipeline</code> bound to the pipeline bind point used by this command, a descriptor set <strong class=\"purple\">must</strong> have been bound to <em>n</em> at the same pipeline bind point, with a <code>VkPipelineLayout</code> that is compatible for set <em>n</em>, with the <code>VkPipelineLayout</code> used to create the current <code>VkPipeline</code>, as described in <a href=\"#descriptorsets-compatibility\">Pipeline Layout Compatibility</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-None-02698",
+          "text": " For each push constant that is statically used by the <code>VkPipeline</code> bound to the pipeline bind point used by this command, a push constant value <strong class=\"purple\">must</strong> have been set for the same pipeline bind point, with a <code>VkPipelineLayout</code> that is compatible for push constants, with the <code>VkPipelineLayout</code> used to create the current <code>VkPipeline</code>, as described in <a href=\"#descriptorsets-compatibility\">Pipeline Layout Compatibility</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-None-02699",
+          "text": " Descriptors in each bound descriptor set, specified via <code>vkCmdBindDescriptorSets</code>, <strong class=\"purple\">must</strong> be valid if they are statically used by the <code>VkPipeline</code> bound to the pipeline bind point used by this command"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-None-02700",
+          "text": " A valid pipeline <strong class=\"purple\">must</strong> be bound to the pipeline bind point used by this command"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-commandBuffer-02701",
+          "text": " If the <code>VkPipeline</code> object bound to the pipeline bind point used by this command requires any dynamic state, that state <strong class=\"purple\">must</strong> have been set for <code>commandBuffer</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-None-02702",
+          "text": " If the <code>VkPipeline</code> object bound to the pipeline bind point used by this command accesses a <code>VkSampler</code> object that uses unnormalized coordinates, that sampler <strong class=\"purple\">must</strong> not be used to sample from any <code>VkImage</code> with a <code>VkImageView</code> of the type <code>VK_IMAGE_VIEW_TYPE_3D</code>, <code>VK_IMAGE_VIEW_TYPE_CUBE</code>, <code>VK_IMAGE_VIEW_TYPE_1D_ARRAY</code>, <code>VK_IMAGE_VIEW_TYPE_2D_ARRAY</code> or <code>VK_IMAGE_VIEW_TYPE_CUBE_ARRAY</code>, in any shader stage"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-None-02703",
+          "text": " If the <code>VkPipeline</code> object bound to the pipeline bind point used by this command accesses a <code>VkSampler</code> object that uses unnormalized coordinates, that sampler <strong class=\"purple\">must</strong> not be used with any of the SPIR-V <code>OpImageSample*</code> or <code>OpImageSparseSample*</code> instructions with <code>ImplicitLod</code>, <code>Dref</code> or <code>Proj</code> in their name, in any shader stage"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-None-02704",
+          "text": " If the <code>VkPipeline</code> object bound to the pipeline bind point used by this command accesses a <code>VkSampler</code> object that uses unnormalized coordinates, that sampler <strong class=\"purple\">must</strong> not be used with any of the SPIR-V <code>OpImageSample*</code> or <code>OpImageSparseSample*</code> instructions that includes a LOD bias or any offset values, in any shader stage"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-None-02705",
+          "text": " If the <a href=\"#features-robustBufferAccess\">robust buffer access</a> feature is not enabled, and if the <code>VkPipeline</code> object bound to the pipeline bind point used by this command accesses a uniform buffer, it <strong class=\"purple\">must</strong> not access values outside of the range of the buffer as specified in the descriptor set bound to the same pipeline bind point"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-None-02706",
+          "text": " If the <a href=\"#features-robustBufferAccess\">robust buffer access</a> feature is not enabled, and if the <code>VkPipeline</code> object bound to the pipeline bind point used by this command accesses a storage buffer, it <strong class=\"purple\">must</strong> not access values outside of the range of the buffer as specified in the descriptor set bound to the same pipeline bind point"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-renderPass-02684",
+          "text": " The current render pass <strong class=\"purple\">must</strong> be <a href=\"#renderpass-compatibility\">compatible</a> with the <code>renderPass</code> member of the <code>VkGraphicsPipelineCreateInfo</code> structure specified when creating the <code>VkPipeline</code> bound to <code>VK_PIPELINE_BIND_POINT_GRAPHICS</code>."
+        },
+        {
+          "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-subpass-02685",
+          "text": " The subpass index of the current render pass <strong class=\"purple\">must</strong> be equal to the <code>subpass</code> member of the <code>VkGraphicsPipelineCreateInfo</code> structure specified when creating the <code>VkPipeline</code> bound to <code>VK_PIPELINE_BIND_POINT_GRAPHICS</code>."
+        },
+        {
+          "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-None-02686",
+          "text": " Every input attachment used by the current subpass <strong class=\"purple\">must</strong> be bound to the pipeline via a descriptor set"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-None-02687",
+          "text": " Image subresources used as attachments in the current render pass <strong class=\"purple\">must</strong> not be accessed in any way other than as an attachment by this command."
+        },
+        {
+          "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-buffer-02708",
+          "text": " If <code>buffer</code> is non-sparse then it <strong class=\"purple\">must</strong> be bound completely and contiguously to a single <code>VkDeviceMemory</code> object"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-buffer-02709",
+          "text": " <code>buffer</code> <strong class=\"purple\">must</strong> have been created with the <code>VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT</code> bit set"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-offset-02710",
+          "text": " <code>offset</code> <strong class=\"purple\">must</strong> be a multiple of <code>4</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-drawCount-02718",
+          "text": " If the <a href=\"#features-multiDrawIndirect\">multi-draw indirect</a> feature is not enabled, <code>drawCount</code> <strong class=\"purple\">must</strong> be <code>0</code> or <code>1</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-drawCount-02719",
+          "text": " <code>drawCount</code> <strong class=\"purple\">must</strong> be less than or equal to <code>VkPhysicalDeviceLimits</code>::<code>maxDrawIndirectCount</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-drawCount-02146",
+          "text": " If <code>drawCount</code> is greater than <code>1</code>, <code>stride</code> <strong class=\"purple\">must</strong> be a multiple of <code>4</code> and <strong class=\"purple\">must</strong> be greater than or equal to <code>sizeof</code>(<code>VkDrawMeshTasksIndirectCommandNV</code>)"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-drawCount-02156",
+          "text": " If <code>drawCount</code> is equal to <code>1</code>, <span class=\"eq\">(<code>offset</code> &#43; <code>sizeof</code>(<a href=\"#VkDrawMeshTasksIndirectCommandNV\">VkDrawMeshTasksIndirectCommandNV</a>))</span> <strong class=\"purple\">must</strong> be less than or equal to the size of <code>buffer</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-drawCount-02157",
+          "text": " If <code>drawCount</code> is greater than <code>1</code>, <span class=\"eq\">(<code>stride</code> {times} (<code>drawCount</code> - 1) &#43; <code>offset</code> &#43; <code>sizeof</code>(<a href=\"#VkDrawMeshTasksIndirectCommandNV\">VkDrawMeshTasksIndirectCommandNV</a>))</span> <strong class=\"purple\">must</strong> be less than or equal to the size of <code>buffer</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-commandBuffer-parameter",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkCommandBuffer\">VkCommandBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-buffer-parameter",
+          "text": " <code>buffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkBuffer\">VkBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-commandBuffer-recording",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be in the <a href=\"#commandbuffers-lifecycle\">recording state</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-commandBuffer-cmdpool",
+          "text": " The <code>VkCommandPool</code> that <code>commandBuffer</code> was allocated from <strong class=\"purple\">must</strong> support graphics operations"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-renderpass",
+          "text": " This command <strong class=\"purple\">must</strong> only be called inside of a render pass instance"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-commonparent",
+          "text": " Both of <code>buffer</code>, and <code>commandBuffer</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from the same <a href=\"#VkDevice\">VkDevice</a>"
+        }
+      ],
+      "(VK_NV_mesh_shader)+(VK_IMG_filter_cubic,VK_EXT_filter_cubic)": [
+        {
+          "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-None-02692",
+          "text": " If a <code>VkImageView</code> is sampled with <code>VK_FILTER_CUBIC_EXT</code> as a result of this command, then the image view&#8217;s <a href=\"#resources-image-view-format-features\">format features</a> <strong class=\"purple\">must</strong> contain <code>VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT</code>"
+        }
+      ],
+      "(VK_NV_mesh_shader)+(VK_IMG_filter_cubic,VK_EXT_filter_cubic)+!(VK_EXT_filter_cubic)": [
+        {
+          "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-None-02693",
+          "text": " Any <a href=\"#VkImageView\">VkImageView</a> being sampled with <code>VK_FILTER_CUBIC_EXT</code> as a result of this command <strong class=\"purple\">must</strong> not have a <a href=\"#VkImageViewType\">VkImageViewType</a> of <code>VK_IMAGE_VIEW_TYPE_3D</code>, <code>VK_IMAGE_VIEW_TYPE_CUBE</code>, or <code>VK_IMAGE_VIEW_TYPE_CUBE_ARRAY</code>"
+        }
+      ],
+      "(VK_NV_mesh_shader)+(VK_IMG_filter_cubic,VK_EXT_filter_cubic)+(VK_EXT_filter_cubic)": [
+        {
+          "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-filterCubic-02694",
+          "text": " Any <a href=\"#VkImageView\">VkImageView</a> being sampled with <code>VK_FILTER_CUBIC_EXT</code> as a result of this command <strong class=\"purple\">must</strong> have a <a href=\"#VkImageViewType\">VkImageViewType</a> and format that supports cubic filtering, as specified by <code>VkFilterCubicImageViewImageFormatPropertiesEXT</code>::<code>filterCubic</code> returned by <code>vkGetPhysicalDeviceImageFormatProperties2</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-filterCubicMinmax-02695",
+          "text": " Any <a href=\"#VkImageView\">VkImageView</a> being sampled with <code>VK_FILTER_CUBIC_EXT</code> with a reduction mode of either <code>VK_SAMPLER_REDUCTION_MODE_MIN_EXT</code> or <code>VK_SAMPLER_REDUCTION_MODE_MAX_EXT</code> as a result of this command <strong class=\"purple\">must</strong> have a <a href=\"#VkImageViewType\">VkImageViewType</a> and format that supports cubic filtering together with minmax filtering, as specified by <code>VkFilterCubicImageViewImageFormatPropertiesEXT</code>::<code>filterCubicMinmax</code> returned by <code>vkGetPhysicalDeviceImageFormatProperties2</code>"
+        }
+      ],
+      "(VK_NV_mesh_shader)+(VK_NV_corner_sampled_image)": [
+        {
+          "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-flags-02696",
+          "text": " Any <a href=\"#VkImage\">VkImage</a> created with a <a href=\"#VkImageCreateInfo\">VkImageCreateInfo</a>::<code>flags</code> containing <code>VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV</code> sampled as a result of this command <strong class=\"purple\">must</strong> only be sampled using a <a href=\"#VkSamplerAddressMode\">VkSamplerAddressMode</a> of <code>VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE</code>."
+        }
+      ],
+      "(VK_NV_mesh_shader)+(VK_VERSION_1_1)": [
+        {
+          "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-commandBuffer-02707",
+          "text": " If <code>commandBuffer</code> is an unprotected command buffer, any resource accessed by the <code>VkPipeline</code> object bound to the pipeline bind point used by this command <strong class=\"purple\">must</strong> not be a protected resource"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-commandBuffer-02711",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> not be a protected command buffer"
+        }
+      ],
+      "(VK_NV_mesh_shader)+(VK_VERSION_1_1,VK_KHR_multiview)": [
+        {
+          "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-maxMultiviewInstanceIndex-02688",
+          "text": " If the draw is recorded in a render pass instance with multiview enabled, the maximum instance index <strong class=\"purple\">must</strong> be less than or equal to <a href=\"#VkPhysicalDeviceMultiviewProperties\">VkPhysicalDeviceMultiviewProperties</a>::<code>maxMultiviewInstanceIndex</code>."
+        }
+      ],
+      "(VK_NV_mesh_shader)+(VK_EXT_sample_locations)": [
+        {
+          "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-sampleLocationsEnable-02689",
+          "text": " If the bound graphics pipeline was created with <a href=\"#VkPipelineSampleLocationsStateCreateInfoEXT\">VkPipelineSampleLocationsStateCreateInfoEXT</a>::<code>sampleLocationsEnable</code> set to <code>VK_TRUE</code> and the current subpass has a depth/stencil attachment, then that attachment <strong class=\"purple\">must</strong> have been created with the <code>VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT</code> bit set"
+        }
+      ]
+    },
+    "VkDrawMeshTasksIndirectCommandNV": {
+      "(VK_NV_mesh_shader)": [
+        {
+          "vuid": "VUID-VkDrawMeshTasksIndirectCommandNV-taskCount-02175",
+          "text": " <code>taskCount</code> <strong class=\"purple\">must</strong> be less than or equal to <code>VkPhysicalDeviceMeshShaderPropertiesNV</code>::<code>maxDrawMeshTasksCount</code>"
+        }
+      ]
+    },
+    "vkCmdDrawMeshTasksIndirectCountNV": {
+      "(VK_NV_mesh_shader)": [
+        {
+          "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-None-02690",
+          "text": " If a <code>VkImageView</code> is sampled with <code>VK_FILTER_LINEAR</code> as a result of this command, then the image view&#8217;s <a href=\"#resources-image-view-format-features\">format features</a> <strong class=\"purple\">must</strong> contain <code>VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-None-02691",
+          "text": " If a <code>VkImageView</code> is accessed using atomic operations as a result of this command, then the image view&#8217;s <a href=\"#resources-image-view-format-features\">format features</a> <strong class=\"purple\">must</strong> contain <code>VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-None-02697",
+          "text": " For each set <em>n</em> that is statically used by the <code>VkPipeline</code> bound to the pipeline bind point used by this command, a descriptor set <strong class=\"purple\">must</strong> have been bound to <em>n</em> at the same pipeline bind point, with a <code>VkPipelineLayout</code> that is compatible for set <em>n</em>, with the <code>VkPipelineLayout</code> used to create the current <code>VkPipeline</code>, as described in <a href=\"#descriptorsets-compatibility\">Pipeline Layout Compatibility</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-None-02698",
+          "text": " For each push constant that is statically used by the <code>VkPipeline</code> bound to the pipeline bind point used by this command, a push constant value <strong class=\"purple\">must</strong> have been set for the same pipeline bind point, with a <code>VkPipelineLayout</code> that is compatible for push constants, with the <code>VkPipelineLayout</code> used to create the current <code>VkPipeline</code>, as described in <a href=\"#descriptorsets-compatibility\">Pipeline Layout Compatibility</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-None-02699",
+          "text": " Descriptors in each bound descriptor set, specified via <code>vkCmdBindDescriptorSets</code>, <strong class=\"purple\">must</strong> be valid if they are statically used by the <code>VkPipeline</code> bound to the pipeline bind point used by this command"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-None-02700",
+          "text": " A valid pipeline <strong class=\"purple\">must</strong> be bound to the pipeline bind point used by this command"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-commandBuffer-02701",
+          "text": " If the <code>VkPipeline</code> object bound to the pipeline bind point used by this command requires any dynamic state, that state <strong class=\"purple\">must</strong> have been set for <code>commandBuffer</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-None-02702",
+          "text": " If the <code>VkPipeline</code> object bound to the pipeline bind point used by this command accesses a <code>VkSampler</code> object that uses unnormalized coordinates, that sampler <strong class=\"purple\">must</strong> not be used to sample from any <code>VkImage</code> with a <code>VkImageView</code> of the type <code>VK_IMAGE_VIEW_TYPE_3D</code>, <code>VK_IMAGE_VIEW_TYPE_CUBE</code>, <code>VK_IMAGE_VIEW_TYPE_1D_ARRAY</code>, <code>VK_IMAGE_VIEW_TYPE_2D_ARRAY</code> or <code>VK_IMAGE_VIEW_TYPE_CUBE_ARRAY</code>, in any shader stage"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-None-02703",
+          "text": " If the <code>VkPipeline</code> object bound to the pipeline bind point used by this command accesses a <code>VkSampler</code> object that uses unnormalized coordinates, that sampler <strong class=\"purple\">must</strong> not be used with any of the SPIR-V <code>OpImageSample*</code> or <code>OpImageSparseSample*</code> instructions with <code>ImplicitLod</code>, <code>Dref</code> or <code>Proj</code> in their name, in any shader stage"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-None-02704",
+          "text": " If the <code>VkPipeline</code> object bound to the pipeline bind point used by this command accesses a <code>VkSampler</code> object that uses unnormalized coordinates, that sampler <strong class=\"purple\">must</strong> not be used with any of the SPIR-V <code>OpImageSample*</code> or <code>OpImageSparseSample*</code> instructions that includes a LOD bias or any offset values, in any shader stage"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-None-02705",
+          "text": " If the <a href=\"#features-robustBufferAccess\">robust buffer access</a> feature is not enabled, and if the <code>VkPipeline</code> object bound to the pipeline bind point used by this command accesses a uniform buffer, it <strong class=\"purple\">must</strong> not access values outside of the range of the buffer as specified in the descriptor set bound to the same pipeline bind point"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-None-02706",
+          "text": " If the <a href=\"#features-robustBufferAccess\">robust buffer access</a> feature is not enabled, and if the <code>VkPipeline</code> object bound to the pipeline bind point used by this command accesses a storage buffer, it <strong class=\"purple\">must</strong> not access values outside of the range of the buffer as specified in the descriptor set bound to the same pipeline bind point"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-renderPass-02684",
+          "text": " The current render pass <strong class=\"purple\">must</strong> be <a href=\"#renderpass-compatibility\">compatible</a> with the <code>renderPass</code> member of the <code>VkGraphicsPipelineCreateInfo</code> structure specified when creating the <code>VkPipeline</code> bound to <code>VK_PIPELINE_BIND_POINT_GRAPHICS</code>."
+        },
+        {
+          "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-subpass-02685",
+          "text": " The subpass index of the current render pass <strong class=\"purple\">must</strong> be equal to the <code>subpass</code> member of the <code>VkGraphicsPipelineCreateInfo</code> structure specified when creating the <code>VkPipeline</code> bound to <code>VK_PIPELINE_BIND_POINT_GRAPHICS</code>."
+        },
+        {
+          "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-None-02686",
+          "text": " Every input attachment used by the current subpass <strong class=\"purple\">must</strong> be bound to the pipeline via a descriptor set"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-None-02687",
+          "text": " Image subresources used as attachments in the current render pass <strong class=\"purple\">must</strong> not be accessed in any way other than as an attachment by this command."
+        },
+        {
+          "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-buffer-02708",
+          "text": " If <code>buffer</code> is non-sparse then it <strong class=\"purple\">must</strong> be bound completely and contiguously to a single <code>VkDeviceMemory</code> object"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-buffer-02709",
+          "text": " <code>buffer</code> <strong class=\"purple\">must</strong> have been created with the <code>VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT</code> bit set"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-offset-02710",
+          "text": " <code>offset</code> <strong class=\"purple\">must</strong> be a multiple of <code>4</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-countBuffer-02714",
+          "text": " If <code>countBuffer</code> is non-sparse then it <strong class=\"purple\">must</strong> be bound completely and contiguously to a single <code>VkDeviceMemory</code> object"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-countBuffer-02715",
+          "text": " <code>countBuffer</code> <strong class=\"purple\">must</strong> have been created with the <code>VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT</code> bit set"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-countBufferOffset-02716",
+          "text": " <code>countBufferOffset</code> <strong class=\"purple\">must</strong> be a multiple of <code>4</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-countBuffer-02717",
+          "text": " The count stored in <code>countBuffer</code> <strong class=\"purple\">must</strong> be less than or equal to <code>VkPhysicalDeviceLimits</code>::<code>maxDrawIndirectCount</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-stride-02182",
+          "text": " <code>stride</code> <strong class=\"purple\">must</strong> be a multiple of <code>4</code> and <strong class=\"purple\">must</strong> be greater than or equal to <code>sizeof</code>(<code>VkDrawMeshTasksIndirectCommandNV</code>)"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-maxDrawCount-02183",
+          "text": " If <code>maxDrawCount</code> is greater than or equal to <code>1</code>, <span class=\"eq\">(<code>stride</code> {times} (<code>maxDrawCount</code> - 1) &#43; <code>offset</code> &#43; <code>sizeof</code>(<code>VkDrawMeshTasksIndirectCommandNV</code>))</span> <strong class=\"purple\">must</strong> be less than or equal to the size of <code>buffer</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-countBuffer-02191",
+          "text": " If the count stored in <code>countBuffer</code> is equal to <code>1</code>, <span class=\"eq\">(<code>offset</code> &#43; <code>sizeof</code>(<code>VkDrawMeshTasksIndirectCommandNV</code>))</span> <strong class=\"purple\">must</strong> be less than or equal to the size of <code>buffer</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-countBuffer-02192",
+          "text": " If the count stored in <code>countBuffer</code> is greater than <code>1</code>, <span class=\"eq\">(<code>stride</code> {times} (<code>drawCount</code> - 1) &#43; <code>offset</code> &#43; <code>sizeof</code>(<code>VkDrawMeshTasksIndirectCommandNV</code>))</span> <strong class=\"purple\">must</strong> be less than or equal to the size of <code>buffer</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-commandBuffer-parameter",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkCommandBuffer\">VkCommandBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-buffer-parameter",
+          "text": " <code>buffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkBuffer\">VkBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-countBuffer-parameter",
+          "text": " <code>countBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkBuffer\">VkBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-commandBuffer-recording",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be in the <a href=\"#commandbuffers-lifecycle\">recording state</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-commandBuffer-cmdpool",
+          "text": " The <code>VkCommandPool</code> that <code>commandBuffer</code> was allocated from <strong class=\"purple\">must</strong> support graphics operations"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-renderpass",
+          "text": " This command <strong class=\"purple\">must</strong> only be called inside of a render pass instance"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-commonparent",
+          "text": " Each of <code>buffer</code>, <code>commandBuffer</code>, and <code>countBuffer</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from the same <a href=\"#VkDevice\">VkDevice</a>"
+        }
+      ],
+      "(VK_NV_mesh_shader)+(VK_IMG_filter_cubic,VK_EXT_filter_cubic)": [
+        {
+          "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-None-02692",
+          "text": " If a <code>VkImageView</code> is sampled with <code>VK_FILTER_CUBIC_EXT</code> as a result of this command, then the image view&#8217;s <a href=\"#resources-image-view-format-features\">format features</a> <strong class=\"purple\">must</strong> contain <code>VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT</code>"
+        }
+      ],
+      "(VK_NV_mesh_shader)+(VK_IMG_filter_cubic,VK_EXT_filter_cubic)+!(VK_EXT_filter_cubic)": [
+        {
+          "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-None-02693",
+          "text": " Any <a href=\"#VkImageView\">VkImageView</a> being sampled with <code>VK_FILTER_CUBIC_EXT</code> as a result of this command <strong class=\"purple\">must</strong> not have a <a href=\"#VkImageViewType\">VkImageViewType</a> of <code>VK_IMAGE_VIEW_TYPE_3D</code>, <code>VK_IMAGE_VIEW_TYPE_CUBE</code>, or <code>VK_IMAGE_VIEW_TYPE_CUBE_ARRAY</code>"
+        }
+      ],
+      "(VK_NV_mesh_shader)+(VK_IMG_filter_cubic,VK_EXT_filter_cubic)+(VK_EXT_filter_cubic)": [
+        {
+          "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-filterCubic-02694",
+          "text": " Any <a href=\"#VkImageView\">VkImageView</a> being sampled with <code>VK_FILTER_CUBIC_EXT</code> as a result of this command <strong class=\"purple\">must</strong> have a <a href=\"#VkImageViewType\">VkImageViewType</a> and format that supports cubic filtering, as specified by <code>VkFilterCubicImageViewImageFormatPropertiesEXT</code>::<code>filterCubic</code> returned by <code>vkGetPhysicalDeviceImageFormatProperties2</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-filterCubicMinmax-02695",
+          "text": " Any <a href=\"#VkImageView\">VkImageView</a> being sampled with <code>VK_FILTER_CUBIC_EXT</code> with a reduction mode of either <code>VK_SAMPLER_REDUCTION_MODE_MIN_EXT</code> or <code>VK_SAMPLER_REDUCTION_MODE_MAX_EXT</code> as a result of this command <strong class=\"purple\">must</strong> have a <a href=\"#VkImageViewType\">VkImageViewType</a> and format that supports cubic filtering together with minmax filtering, as specified by <code>VkFilterCubicImageViewImageFormatPropertiesEXT</code>::<code>filterCubicMinmax</code> returned by <code>vkGetPhysicalDeviceImageFormatProperties2</code>"
+        }
+      ],
+      "(VK_NV_mesh_shader)+(VK_NV_corner_sampled_image)": [
+        {
+          "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-flags-02696",
+          "text": " Any <a href=\"#VkImage\">VkImage</a> created with a <a href=\"#VkImageCreateInfo\">VkImageCreateInfo</a>::<code>flags</code> containing <code>VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV</code> sampled as a result of this command <strong class=\"purple\">must</strong> only be sampled using a <a href=\"#VkSamplerAddressMode\">VkSamplerAddressMode</a> of <code>VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE</code>."
+        }
+      ],
+      "(VK_NV_mesh_shader)+(VK_VERSION_1_1)": [
+        {
+          "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-commandBuffer-02707",
+          "text": " If <code>commandBuffer</code> is an unprotected command buffer, any resource accessed by the <code>VkPipeline</code> object bound to the pipeline bind point used by this command <strong class=\"purple\">must</strong> not be a protected resource"
+        },
+        {
+          "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-commandBuffer-02711",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> not be a protected command buffer"
+        }
+      ],
+      "(VK_NV_mesh_shader)+(VK_VERSION_1_1,VK_KHR_multiview)": [
+        {
+          "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-maxMultiviewInstanceIndex-02688",
+          "text": " If the draw is recorded in a render pass instance with multiview enabled, the maximum instance index <strong class=\"purple\">must</strong> be less than or equal to <a href=\"#VkPhysicalDeviceMultiviewProperties\">VkPhysicalDeviceMultiviewProperties</a>::<code>maxMultiviewInstanceIndex</code>."
+        }
+      ],
+      "(VK_NV_mesh_shader)+(VK_EXT_sample_locations)": [
+        {
+          "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-sampleLocationsEnable-02689",
+          "text": " If the bound graphics pipeline was created with <a href=\"#VkPipelineSampleLocationsStateCreateInfoEXT\">VkPipelineSampleLocationsStateCreateInfoEXT</a>::<code>sampleLocationsEnable</code> set to <code>VK_TRUE</code> and the current subpass has a depth/stencil attachment, then that attachment <strong class=\"purple\">must</strong> have been created with the <code>VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT</code> bit set"
+        }
+      ]
+    },
+    "VkPipelineVertexInputStateCreateInfo": {
+      "core": [
+        {
+          "vuid": "VUID-VkPipelineVertexInputStateCreateInfo-vertexBindingDescriptionCount-00613",
+          "text": " <code>vertexBindingDescriptionCount</code> <strong class=\"purple\">must</strong> be less than or equal to <code>VkPhysicalDeviceLimits</code>::<code>maxVertexInputBindings</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineVertexInputStateCreateInfo-vertexAttributeDescriptionCount-00614",
+          "text": " <code>vertexAttributeDescriptionCount</code> <strong class=\"purple\">must</strong> be less than or equal to <code>VkPhysicalDeviceLimits</code>::<code>maxVertexInputAttributes</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineVertexInputStateCreateInfo-binding-00615",
+          "text": " For every <code>binding</code> specified by each element of <code>pVertexAttributeDescriptions</code>, a <code>VkVertexInputBindingDescription</code> <strong class=\"purple\">must</strong> exist in <code>pVertexBindingDescriptions</code> with the same value of <code>binding</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineVertexInputStateCreateInfo-pVertexBindingDescriptions-00616",
+          "text": " All elements of <code>pVertexBindingDescriptions</code> <strong class=\"purple\">must</strong> describe distinct binding numbers"
+        },
+        {
+          "vuid": "VUID-VkPipelineVertexInputStateCreateInfo-pVertexAttributeDescriptions-00617",
+          "text": " All elements of <code>pVertexAttributeDescriptions</code> <strong class=\"purple\">must</strong> describe distinct attribute locations"
+        },
+        {
+          "vuid": "VUID-VkPipelineVertexInputStateCreateInfo-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineVertexInputStateCreateInfo-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code> or a pointer to a valid instance of <a href=\"#VkPipelineVertexInputDivisorStateCreateInfoEXT\">VkPipelineVertexInputDivisorStateCreateInfoEXT</a>"
+        },
+        {
+          "vuid": "VUID-VkPipelineVertexInputStateCreateInfo-flags-zerobitmask",
+          "text": " <code>flags</code> <strong class=\"purple\">must</strong> be <code>0</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineVertexInputStateCreateInfo-pVertexBindingDescriptions-parameter",
+          "text": " If <code>vertexBindingDescriptionCount</code> is not <code>0</code>, <code>pVertexBindingDescriptions</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>vertexBindingDescriptionCount</code> valid <a href=\"#VkVertexInputBindingDescription\">VkVertexInputBindingDescription</a> structures"
+        },
+        {
+          "vuid": "VUID-VkPipelineVertexInputStateCreateInfo-pVertexAttributeDescriptions-parameter",
+          "text": " If <code>vertexAttributeDescriptionCount</code> is not <code>0</code>, <code>pVertexAttributeDescriptions</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>vertexAttributeDescriptionCount</code> valid <a href=\"#VkVertexInputAttributeDescription\">VkVertexInputAttributeDescription</a> structures"
+        }
+      ]
+    },
+    "VkVertexInputBindingDescription": {
+      "core": [
+        {
+          "vuid": "VUID-VkVertexInputBindingDescription-binding-00618",
+          "text": " <code>binding</code> <strong class=\"purple\">must</strong> be less than <code>VkPhysicalDeviceLimits</code>::<code>maxVertexInputBindings</code>"
+        },
+        {
+          "vuid": "VUID-VkVertexInputBindingDescription-stride-00619",
+          "text": " <code>stride</code> <strong class=\"purple\">must</strong> be less than or equal to <code>VkPhysicalDeviceLimits</code>::<code>maxVertexInputBindingStride</code>"
+        },
+        {
+          "vuid": "VUID-VkVertexInputBindingDescription-inputRate-parameter",
+          "text": " <code>inputRate</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkVertexInputRate\">VkVertexInputRate</a> value"
+        }
+      ]
+    },
+    "VkVertexInputAttributeDescription": {
+      "core": [
+        {
+          "vuid": "VUID-VkVertexInputAttributeDescription-location-00620",
+          "text": " <code>location</code> <strong class=\"purple\">must</strong> be less than <code>VkPhysicalDeviceLimits</code>::<code>maxVertexInputAttributes</code>"
+        },
+        {
+          "vuid": "VUID-VkVertexInputAttributeDescription-binding-00621",
+          "text": " <code>binding</code> <strong class=\"purple\">must</strong> be less than <code>VkPhysicalDeviceLimits</code>::<code>maxVertexInputBindings</code>"
+        },
+        {
+          "vuid": "VUID-VkVertexInputAttributeDescription-offset-00622",
+          "text": " <code>offset</code> <strong class=\"purple\">must</strong> be less than or equal to <code>VkPhysicalDeviceLimits</code>::<code>maxVertexInputAttributeOffset</code>"
+        },
+        {
+          "vuid": "VUID-VkVertexInputAttributeDescription-format-00623",
+          "text": " <code>format</code> <strong class=\"purple\">must</strong> be allowed as a vertex buffer format, as specified by the <code>VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT</code> flag in <code>VkFormatProperties</code>::<code>bufferFeatures</code> returned by <code>vkGetPhysicalDeviceFormatProperties</code>"
+        },
+        {
+          "vuid": "VUID-VkVertexInputAttributeDescription-format-parameter",
+          "text": " <code>format</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkFormat\">VkFormat</a> value"
+        }
+      ]
+    },
+    "vkCmdBindVertexBuffers": {
+      "core": [
+        {
+          "vuid": "VUID-vkCmdBindVertexBuffers-firstBinding-00624",
+          "text": " <code>firstBinding</code> <strong class=\"purple\">must</strong> be less than <code>VkPhysicalDeviceLimits</code>::<code>maxVertexInputBindings</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdBindVertexBuffers-firstBinding-00625",
+          "text": " The sum of <code>firstBinding</code> and <code>bindingCount</code> <strong class=\"purple\">must</strong> be less than or equal to <code>VkPhysicalDeviceLimits</code>::<code>maxVertexInputBindings</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdBindVertexBuffers-pOffsets-00626",
+          "text": " All elements of <code>pOffsets</code> <strong class=\"purple\">must</strong> be less than the size of the corresponding element in <code>pBuffers</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdBindVertexBuffers-pBuffers-00627",
+          "text": " All elements of <code>pBuffers</code> <strong class=\"purple\">must</strong> have been created with the <code>VK_BUFFER_USAGE_VERTEX_BUFFER_BIT</code> flag"
+        },
+        {
+          "vuid": "VUID-vkCmdBindVertexBuffers-pBuffers-00628",
+          "text": " Each element of <code>pBuffers</code> that is non-sparse <strong class=\"purple\">must</strong> be bound completely and contiguously to a single <code>VkDeviceMemory</code> object"
+        },
+        {
+          "vuid": "VUID-vkCmdBindVertexBuffers-commandBuffer-parameter",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkCommandBuffer\">VkCommandBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdBindVertexBuffers-pBuffers-parameter",
+          "text": " <code>pBuffers</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>bindingCount</code> valid <a href=\"#VkBuffer\">VkBuffer</a> handles"
+        },
+        {
+          "vuid": "VUID-vkCmdBindVertexBuffers-pOffsets-parameter",
+          "text": " <code>pOffsets</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>bindingCount</code> <code>VkDeviceSize</code> values"
+        },
+        {
+          "vuid": "VUID-vkCmdBindVertexBuffers-commandBuffer-recording",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be in the <a href=\"#commandbuffers-lifecycle\">recording state</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdBindVertexBuffers-commandBuffer-cmdpool",
+          "text": " The <code>VkCommandPool</code> that <code>commandBuffer</code> was allocated from <strong class=\"purple\">must</strong> support graphics operations"
+        },
+        {
+          "vuid": "VUID-vkCmdBindVertexBuffers-bindingCount-arraylength",
+          "text": " <code>bindingCount</code> <strong class=\"purple\">must</strong> be greater than <code>0</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdBindVertexBuffers-commonparent",
+          "text": " Both of <code>commandBuffer</code>, and the elements of <code>pBuffers</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from the same <a href=\"#VkDevice\">VkDevice</a>"
+        }
+      ]
+    },
+    "VkPipelineVertexInputDivisorStateCreateInfoEXT": {
+      "(VK_EXT_vertex_attribute_divisor)": [
+        {
+          "vuid": "VUID-VkPipelineVertexInputDivisorStateCreateInfoEXT-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_EXT</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineVertexInputDivisorStateCreateInfoEXT-pVertexBindingDivisors-parameter",
+          "text": " <code>pVertexBindingDivisors</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>vertexBindingDivisorCount</code> <a href=\"#VkVertexInputBindingDivisorDescriptionEXT\">VkVertexInputBindingDivisorDescriptionEXT</a> structures"
+        },
+        {
+          "vuid": "VUID-VkPipelineVertexInputDivisorStateCreateInfoEXT-vertexBindingDivisorCount-arraylength",
+          "text": " <code>vertexBindingDivisorCount</code> <strong class=\"purple\">must</strong> be greater than <code>0</code>"
+        }
+      ]
+    },
+    "VkVertexInputBindingDivisorDescriptionEXT": {
+      "(VK_EXT_vertex_attribute_divisor)": [
+        {
+          "vuid": "VUID-VkVertexInputBindingDivisorDescriptionEXT-binding-01869",
+          "text": " <code>binding</code> <strong class=\"purple\">must</strong> be less than <code>VkPhysicalDeviceLimits</code>::<code>maxVertexInputBindings</code>"
+        },
+        {
+          "vuid": "VUID-VkVertexInputBindingDivisorDescriptionEXT-vertexAttributeInstanceRateZeroDivisor-02228",
+          "text": " If the <code>vertexAttributeInstanceRateZeroDivisor</code> feature is not enabled, <code>divisor</code> <strong class=\"purple\">must</strong> not be <code>0</code>"
+        },
+        {
+          "vuid": "VUID-VkVertexInputBindingDivisorDescriptionEXT-vertexAttributeInstanceRateDivisor-02229",
+          "text": " If the <code>vertexAttributeInstanceRateDivisor</code> feature is not enabled, <code>divisor</code> <strong class=\"purple\">must</strong> be <code>1</code>"
+        },
+        {
+          "vuid": "VUID-VkVertexInputBindingDivisorDescriptionEXT-divisor-01870",
+          "text": " <code>divisor</code> <strong class=\"purple\">must</strong> be a value between <code>0</code> and <code>VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT</code>::<code>maxVertexAttribDivisor</code>, inclusive."
+        },
+        {
+          "vuid": "VUID-VkVertexInputBindingDivisorDescriptionEXT-inputRate-01871",
+          "text": " <a href=\"#VkVertexInputBindingDescription\">VkVertexInputBindingDescription</a>::<code>inputRate</code> <strong class=\"purple\">must</strong> be of type <code>VK_VERTEX_INPUT_RATE_INSTANCE</code> for this <code>binding</code>."
+        }
+      ]
+    },
+    "VkPipelineTessellationStateCreateInfo": {
+      "core": [
+        {
+          "vuid": "VUID-VkPipelineTessellationStateCreateInfo-patchControlPoints-01214",
+          "text": " <code>patchControlPoints</code> <strong class=\"purple\">must</strong> be greater than zero and less than or equal to <code>VkPhysicalDeviceLimits</code>::<code>maxTessellationPatchSize</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineTessellationStateCreateInfo-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineTessellationStateCreateInfo-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code> or a pointer to a valid instance of <a href=\"#VkPipelineTessellationDomainOriginStateCreateInfo\">VkPipelineTessellationDomainOriginStateCreateInfo</a>"
+        },
+        {
+          "vuid": "VUID-VkPipelineTessellationStateCreateInfo-flags-zerobitmask",
+          "text": " <code>flags</code> <strong class=\"purple\">must</strong> be <code>0</code>"
+        }
+      ]
+    },
+    "VkPipelineTessellationDomainOriginStateCreateInfo": {
+      "(VK_VERSION_1_1,VK_KHR_maintenance2)": [
+        {
+          "vuid": "VUID-VkPipelineTessellationDomainOriginStateCreateInfo-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineTessellationDomainOriginStateCreateInfo-domainOrigin-parameter",
+          "text": " <code>domainOrigin</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkTessellationDomainOrigin\">VkTessellationDomainOrigin</a> value"
+        }
+      ]
+    },
+    "vkCmdBindTransformFeedbackBuffersEXT": {
+      "(VK_EXT_transform_feedback)": [
+        {
+          "vuid": "VUID-vkCmdBindTransformFeedbackBuffersEXT-transformFeedback-02355",
+          "text": " <code>VkPhysicalDeviceTransformFeedbackFeaturesEXT</code>::<code>transformFeedback</code> <strong class=\"purple\">must</strong> be enabled"
+        },
+        {
+          "vuid": "VUID-vkCmdBindTransformFeedbackBuffersEXT-firstBinding-02356",
+          "text": " <code>firstBinding</code> <strong class=\"purple\">must</strong> be less than <code>VkPhysicalDeviceTransformFeedbackPropertiesEXT</code>::<code>maxTransformFeedbackBuffers</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdBindTransformFeedbackBuffersEXT-firstBinding-02357",
+          "text": " The sum of <code>firstBinding</code> and <code>bindingCount</code> <strong class=\"purple\">must</strong> be less than or equal to <code>VkPhysicalDeviceTransformFeedbackPropertiesEXT</code>::<code>maxTransformFeedbackBuffers</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdBindTransformFeedbackBuffersEXT-pOffsets-02358",
+          "text": " All elements of <code>pOffsets</code> <strong class=\"purple\">must</strong> be less than the size of the corresponding element in <code>pBuffers</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdBindTransformFeedbackBuffersEXT-pOffsets-02359",
+          "text": " All elements of <code>pOffsets</code> <strong class=\"purple\">must</strong> be a multiple of 4"
+        },
+        {
+          "vuid": "VUID-vkCmdBindTransformFeedbackBuffersEXT-pBuffers-02360",
+          "text": " All elements of <code>pBuffers</code> <strong class=\"purple\">must</strong> have been created with the <code>VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_BUFFER_BIT_EXT</code> flag"
+        },
+        {
+          "vuid": "VUID-vkCmdBindTransformFeedbackBuffersEXT-pSize-02361",
+          "text": " If the optional <code>pSize</code> array is specified, each element of <code>pSizes</code> <strong class=\"purple\">must</strong> either be <code>VK_WHOLE_SIZE</code>, or be less than or equal to <code>VkPhysicalDeviceTransformFeedbackPropertiesEXT</code>::<code>maxTransformFeedbackBufferSize</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdBindTransformFeedbackBuffersEXT-pSizes-02362",
+          "text": " All elements of <code>pSizes</code> <strong class=\"purple\">must</strong> be less than or equal to the size of the corresponding buffer in <code>pBuffers</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdBindTransformFeedbackBuffersEXT-pOffsets-02363",
+          "text": " All elements of <code>pOffsets</code> plus <code>pSizes</code>, where the <code>pSizes</code>, element is not <code>VK_WHOLE_SIZE</code>, <strong class=\"purple\">must</strong> be less than or equal to the size of the corresponding element in <code>pBuffers</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdBindTransformFeedbackBuffersEXT-pBuffers-02364",
+          "text": " Each element of <code>pBuffers</code> that is non-sparse <strong class=\"purple\">must</strong> be bound completely and contiguously to a single <code>VkDeviceMemory</code> object"
+        },
+        {
+          "vuid": "VUID-vkCmdBindTransformFeedbackBuffersEXT-None-02365",
+          "text": " Transform feedback <strong class=\"purple\">must</strong> not be active when the <code>vkCmdBindTransformFeedbackBuffersEXT</code> command is recorded"
+        },
+        {
+          "vuid": "VUID-vkCmdBindTransformFeedbackBuffersEXT-commandBuffer-parameter",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkCommandBuffer\">VkCommandBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdBindTransformFeedbackBuffersEXT-pBuffers-parameter",
+          "text": " <code>pBuffers</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>bindingCount</code> valid <a href=\"#VkBuffer\">VkBuffer</a> handles"
+        },
+        {
+          "vuid": "VUID-vkCmdBindTransformFeedbackBuffersEXT-pOffsets-parameter",
+          "text": " <code>pOffsets</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>bindingCount</code> <code>VkDeviceSize</code> values"
+        },
+        {
+          "vuid": "VUID-vkCmdBindTransformFeedbackBuffersEXT-pSizes-parameter",
+          "text": " If <code>pSizes</code> is not <code>NULL</code>, <code>pSizes</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>bindingCount</code> <code>VkDeviceSize</code> values"
+        },
+        {
+          "vuid": "VUID-vkCmdBindTransformFeedbackBuffersEXT-commandBuffer-recording",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be in the <a href=\"#commandbuffers-lifecycle\">recording state</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdBindTransformFeedbackBuffersEXT-commandBuffer-cmdpool",
+          "text": " The <code>VkCommandPool</code> that <code>commandBuffer</code> was allocated from <strong class=\"purple\">must</strong> support graphics operations"
+        },
+        {
+          "vuid": "VUID-vkCmdBindTransformFeedbackBuffersEXT-bindingCount-arraylength",
+          "text": " If <code>pSizes</code> is not <code>NULL</code>, <code>bindingCount</code> <strong class=\"purple\">must</strong> be greater than <code>0</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdBindTransformFeedbackBuffersEXT-commonparent",
+          "text": " Both of <code>commandBuffer</code>, and the elements of <code>pBuffers</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from the same <a href=\"#VkDevice\">VkDevice</a>"
+        }
+      ]
+    },
+    "vkCmdBeginTransformFeedbackEXT": {
+      "(VK_EXT_transform_feedback)": [
+        {
+          "vuid": "VUID-vkCmdBeginTransformFeedbackEXT-transformFeedback-02366",
+          "text": " <code>VkPhysicalDeviceTransformFeedbackFeaturesEXT</code>::<code>transformFeedback</code> <strong class=\"purple\">must</strong> be enabled"
+        },
+        {
+          "vuid": "VUID-vkCmdBeginTransformFeedbackEXT-None-02367",
+          "text": " Transform feedback <strong class=\"purple\">must</strong> not be active"
+        },
+        {
+          "vuid": "VUID-vkCmdBeginTransformFeedbackEXT-firstCounterBuffer-02368",
+          "text": " <code>firstCounterBuffer</code> <strong class=\"purple\">must</strong> be less than <code>VkPhysicalDeviceTransformFeedbackPropertiesEXT</code>::<code>maxTransformFeedbackBuffers</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdBeginTransformFeedbackEXT-firstCounterBuffer-02369",
+          "text": " The sum of <code>firstCounterBuffer</code> and <code>counterBufferCount</code> <strong class=\"purple\">must</strong> be less than or equal to <code>VkPhysicalDeviceTransformFeedbackPropertiesEXT</code>::<code>maxTransformFeedbackBuffers</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdBeginTransformFeedbackEXT-counterBufferCount-02607",
+          "text": " If <code>counterBufferCount</code> is not <code>0</code>, and <code>pCounterBuffers</code> is not <code>NULL</code>, <code>pCounterBuffers</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>counterBufferCount</code> <code>VkBuffer</code> handles that are either valid or <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdBeginTransformFeedbackEXT-pCounterBufferOffsets-02370",
+          "text": " For each buffer handle in the array, if it is not <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a> it <strong class=\"purple\">must</strong> reference a buffer large enough to hold 4 bytes at the corresponding offset from the <code>pCounterBufferOffsets</code> array"
+        },
+        {
+          "vuid": "VUID-vkCmdBeginTransformFeedbackEXT-pCounterBuffer-02371",
+          "text": " If <code>pCounterBuffer</code> is <code>NULL</code>, then <code>pCounterBufferOffsets</code> <strong class=\"purple\">must</strong> also be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdBeginTransformFeedbackEXT-pCounterBuffers-02372",
+          "text": " For each buffer handle in the <code>pCounterBuffers</code> array that is not <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a> it <strong class=\"purple\">must</strong> have been created with a <code>usage</code> value containing <code>VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_COUNTER_BUFFER_BIT_EXT</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdBeginTransformFeedbackEXT-commandBuffer-parameter",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkCommandBuffer\">VkCommandBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdBeginTransformFeedbackEXT-pCounterBufferOffsets-parameter",
+          "text": " If <code>counterBufferCount</code> is not <code>0</code>, and <code>pCounterBufferOffsets</code> is not <code>NULL</code>, <code>pCounterBufferOffsets</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>counterBufferCount</code> <code>VkDeviceSize</code> values"
+        },
+        {
+          "vuid": "VUID-vkCmdBeginTransformFeedbackEXT-commandBuffer-recording",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be in the <a href=\"#commandbuffers-lifecycle\">recording state</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdBeginTransformFeedbackEXT-commandBuffer-cmdpool",
+          "text": " The <code>VkCommandPool</code> that <code>commandBuffer</code> was allocated from <strong class=\"purple\">must</strong> support graphics operations"
+        },
+        {
+          "vuid": "VUID-vkCmdBeginTransformFeedbackEXT-renderpass",
+          "text": " This command <strong class=\"purple\">must</strong> only be called inside of a render pass instance"
+        },
+        {
+          "vuid": "VUID-vkCmdBeginTransformFeedbackEXT-commonparent",
+          "text": " Both of <code>commandBuffer</code>, and the elements of <code>pCounterBuffers</code> that are valid handles of non-ignored parameters <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from the same <a href=\"#VkDevice\">VkDevice</a>"
+        }
+      ],
+      "(VK_EXT_transform_feedback)+(VK_VERSION_1_1,VK_KHR_multiview)": [
+        {
+          "vuid": "VUID-vkCmdBeginTransformFeedbackEXT-None-02373",
+          "text": " Transform feedback <strong class=\"purple\">must</strong> not be made active in a render pass instance with multiview enabled"
+        }
+      ]
+    },
+    "vkCmdEndTransformFeedbackEXT": {
+      "(VK_EXT_transform_feedback)": [
+        {
+          "vuid": "VUID-vkCmdEndTransformFeedbackEXT-transformFeedback-02374",
+          "text": " <code>VkPhysicalDeviceTransformFeedbackFeaturesEXT</code>::<code>transformFeedback</code> <strong class=\"purple\">must</strong> be enabled"
+        },
+        {
+          "vuid": "VUID-vkCmdEndTransformFeedbackEXT-None-02375",
+          "text": " Transform feedback <strong class=\"purple\">must</strong> be active"
+        },
+        {
+          "vuid": "VUID-vkCmdEndTransformFeedbackEXT-firstCounterBuffer-02376",
+          "text": " <code>firstCounterBuffer</code> <strong class=\"purple\">must</strong> be less than <code>VkPhysicalDeviceTransformFeedbackPropertiesEXT</code>::<code>maxTransformFeedbackBuffers</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdEndTransformFeedbackEXT-firstCounterBuffer-02377",
+          "text": " The sum of <code>firstCounterBuffer</code> and <code>counterBufferCount</code> <strong class=\"purple\">must</strong> be less than or equal to <code>VkPhysicalDeviceTransformFeedbackPropertiesEXT</code>::<code>maxTransformFeedbackBuffers</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdEndTransformFeedbackEXT-counterBufferCount-02608",
+          "text": " If <code>counterBufferCount</code> is not <code>0</code>, and <code>pCounterBuffers</code> is not <code>NULL</code>, <code>pCounterBuffers</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>counterBufferCount</code> <code>VkBuffer</code> handles that are either valid or <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdEndTransformFeedbackEXT-pCounterBufferOffsets-02378",
+          "text": " For each buffer handle in the array, if it is not <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a> it <strong class=\"purple\">must</strong> reference a buffer large enough to hold 4 bytes at the corresponding offset from the <code>pCounterBufferOffsets</code> array"
+        },
+        {
+          "vuid": "VUID-vkCmdEndTransformFeedbackEXT-pCounterBuffer-02379",
+          "text": " If <code>pCounterBuffer</code> is <code>NULL</code>, then <code>pCounterBufferOffsets</code> <strong class=\"purple\">must</strong> also be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdEndTransformFeedbackEXT-pCounterBuffers-02380",
+          "text": " For each buffer handle in the <code>pCounterBuffers</code> array that is not <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a> it <strong class=\"purple\">must</strong> have been created with a <code>usage</code> value containing <code>VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_COUNTER_BUFFER_BIT_EXT</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdEndTransformFeedbackEXT-commandBuffer-parameter",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkCommandBuffer\">VkCommandBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdEndTransformFeedbackEXT-pCounterBufferOffsets-parameter",
+          "text": " If <code>counterBufferCount</code> is not <code>0</code>, and <code>pCounterBufferOffsets</code> is not <code>NULL</code>, <code>pCounterBufferOffsets</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>counterBufferCount</code> <code>VkDeviceSize</code> values"
+        },
+        {
+          "vuid": "VUID-vkCmdEndTransformFeedbackEXT-commandBuffer-recording",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be in the <a href=\"#commandbuffers-lifecycle\">recording state</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdEndTransformFeedbackEXT-commandBuffer-cmdpool",
+          "text": " The <code>VkCommandPool</code> that <code>commandBuffer</code> was allocated from <strong class=\"purple\">must</strong> support graphics operations"
+        },
+        {
+          "vuid": "VUID-vkCmdEndTransformFeedbackEXT-renderpass",
+          "text": " This command <strong class=\"purple\">must</strong> only be called inside of a render pass instance"
+        },
+        {
+          "vuid": "VUID-vkCmdEndTransformFeedbackEXT-commonparent",
+          "text": " Both of <code>commandBuffer</code>, and the elements of <code>pCounterBuffers</code> that are valid handles of non-ignored parameters <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from the same <a href=\"#VkDevice\">VkDevice</a>"
+        }
+      ]
+    },
+    "VkPipelineViewportSwizzleStateCreateInfoNV": {
+      "(VK_NV_viewport_swizzle)": [
+        {
+          "vuid": "VUID-VkPipelineViewportSwizzleStateCreateInfoNV-viewportCount-01215",
+          "text": " <code>viewportCount</code> <strong class=\"purple\">must</strong> match the <code>viewportCount</code> set in <code>VkPipelineViewportStateCreateInfo</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineViewportSwizzleStateCreateInfoNV-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SWIZZLE_STATE_CREATE_INFO_NV</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineViewportSwizzleStateCreateInfoNV-flags-zerobitmask",
+          "text": " <code>flags</code> <strong class=\"purple\">must</strong> be <code>0</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineViewportSwizzleStateCreateInfoNV-pViewportSwizzles-parameter",
+          "text": " <code>pViewportSwizzles</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>viewportCount</code> valid <a href=\"#VkViewportSwizzleNV\">VkViewportSwizzleNV</a> structures"
+        },
+        {
+          "vuid": "VUID-VkPipelineViewportSwizzleStateCreateInfoNV-viewportCount-arraylength",
+          "text": " <code>viewportCount</code> <strong class=\"purple\">must</strong> be greater than <code>0</code>"
+        }
+      ]
+    },
+    "VkViewportSwizzleNV": {
+      "(VK_NV_viewport_swizzle)": [
+        {
+          "vuid": "VUID-VkViewportSwizzleNV-x-parameter",
+          "text": " <code>x</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkViewportCoordinateSwizzleNV\">VkViewportCoordinateSwizzleNV</a> value"
+        },
+        {
+          "vuid": "VUID-VkViewportSwizzleNV-y-parameter",
+          "text": " <code>y</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkViewportCoordinateSwizzleNV\">VkViewportCoordinateSwizzleNV</a> value"
+        },
+        {
+          "vuid": "VUID-VkViewportSwizzleNV-z-parameter",
+          "text": " <code>z</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkViewportCoordinateSwizzleNV\">VkViewportCoordinateSwizzleNV</a> value"
+        },
+        {
+          "vuid": "VUID-VkViewportSwizzleNV-w-parameter",
+          "text": " <code>w</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkViewportCoordinateSwizzleNV\">VkViewportCoordinateSwizzleNV</a> value"
+        }
+      ]
+    },
+    "VkPipelineViewportWScalingStateCreateInfoNV": {
+      "(VK_NV_clip_space_w_scaling)": [
+        {
+          "vuid": "VUID-VkPipelineViewportWScalingStateCreateInfoNV-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_W_SCALING_STATE_CREATE_INFO_NV</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineViewportWScalingStateCreateInfoNV-viewportCount-arraylength",
+          "text": " <code>viewportCount</code> <strong class=\"purple\">must</strong> be greater than <code>0</code>"
+        }
+      ]
+    },
+    "vkCmdSetViewportWScalingNV": {
+      "(VK_NV_clip_space_w_scaling)": [
+        {
+          "vuid": "VUID-vkCmdSetViewportWScalingNV-None-01322",
+          "text": " The bound graphics pipeline <strong class=\"purple\">must</strong> have been created with the <code>VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV</code> dynamic state enabled"
+        },
+        {
+          "vuid": "VUID-vkCmdSetViewportWScalingNV-firstViewport-01323",
+          "text": " <code>firstViewport</code> <strong class=\"purple\">must</strong> be less than <a href=\"#VkPhysicalDeviceLimits\">VkPhysicalDeviceLimits</a>::<code>maxViewports</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdSetViewportWScalingNV-firstViewport-01324",
+          "text": " The sum of <code>firstViewport</code> and <code>viewportCount</code> <strong class=\"purple\">must</strong> be between <code>1</code> and <a href=\"#VkPhysicalDeviceLimits\">VkPhysicalDeviceLimits</a>::<code>maxViewports</code>, inclusive"
+        },
+        {
+          "vuid": "VUID-vkCmdSetViewportWScalingNV-commandBuffer-parameter",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkCommandBuffer\">VkCommandBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdSetViewportWScalingNV-pViewportWScalings-parameter",
+          "text": " <code>pViewportWScalings</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>viewportCount</code> <a href=\"#VkViewportWScalingNV\">VkViewportWScalingNV</a> structures"
+        },
+        {
+          "vuid": "VUID-vkCmdSetViewportWScalingNV-commandBuffer-recording",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be in the <a href=\"#commandbuffers-lifecycle\">recording state</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdSetViewportWScalingNV-commandBuffer-cmdpool",
+          "text": " The <code>VkCommandPool</code> that <code>commandBuffer</code> was allocated from <strong class=\"purple\">must</strong> support graphics operations"
+        },
+        {
+          "vuid": "VUID-vkCmdSetViewportWScalingNV-viewportCount-arraylength",
+          "text": " <code>viewportCount</code> <strong class=\"purple\">must</strong> be greater than <code>0</code>"
+        }
+      ]
+    },
+    "VkPipelineViewportStateCreateInfo": {
+      "core": [
+        {
+          "vuid": "VUID-VkPipelineViewportStateCreateInfo-viewportCount-01216",
+          "text": " If the <a href=\"#features-multiViewport\">multiple viewports</a> feature is not enabled, <code>viewportCount</code> <strong class=\"purple\">must</strong> be <code>1</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01217",
+          "text": " If the <a href=\"#features-multiViewport\">multiple viewports</a> feature is not enabled, <code>scissorCount</code> <strong class=\"purple\">must</strong> be <code>1</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineViewportStateCreateInfo-viewportCount-01218",
+          "text": " <code>viewportCount</code> <strong class=\"purple\">must</strong> be between <code>1</code> and <code>VkPhysicalDeviceLimits</code>::<code>maxViewports</code>, inclusive"
+        },
+        {
+          "vuid": "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01219",
+          "text": " <code>scissorCount</code> <strong class=\"purple\">must</strong> be between <code>1</code> and <code>VkPhysicalDeviceLimits</code>::<code>maxViewports</code>, inclusive"
+        },
+        {
+          "vuid": "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01220",
+          "text": " <code>scissorCount</code> and <code>viewportCount</code> <strong class=\"purple\">must</strong> be identical"
+        },
+        {
+          "vuid": "VUID-VkPipelineViewportStateCreateInfo-x-02821",
+          "text": " The <code>x</code> and <code>y</code> members of <code>offset</code> member of any element of <code>pScissors</code> <strong class=\"purple\">must</strong> be greater than or equal to <code>0</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineViewportStateCreateInfo-offset-02822",
+          "text": " Evaluation of <span class=\"eq\">(<code>offset.x</code> &#43; <code>extent.width</code>)</span> <strong class=\"purple\">must</strong> not cause a signed integer addition overflow for any element of <code>pScissors</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineViewportStateCreateInfo-offset-02823",
+          "text": " Evaluation of <span class=\"eq\">(<code>offset.y</code> &#43; <code>extent.height</code>)</span> <strong class=\"purple\">must</strong> not cause a signed integer addition overflow for any element of <code>pScissors</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineViewportStateCreateInfo-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineViewportStateCreateInfo-pNext-pNext",
+          "text": " Each <code>pNext</code> member of any structure (including this one) in the <code>pNext</code> chain <strong class=\"purple\">must</strong> be either <code>NULL</code> or a pointer to a valid instance of <a href=\"#VkPipelineViewportCoarseSampleOrderStateCreateInfoNV\">VkPipelineViewportCoarseSampleOrderStateCreateInfoNV</a>, <a href=\"#VkPipelineViewportExclusiveScissorStateCreateInfoNV\">VkPipelineViewportExclusiveScissorStateCreateInfoNV</a>, <a href=\"#VkPipelineViewportShadingRateImageStateCreateInfoNV\">VkPipelineViewportShadingRateImageStateCreateInfoNV</a>, <a href=\"#VkPipelineViewportSwizzleStateCreateInfoNV\">VkPipelineViewportSwizzleStateCreateInfoNV</a>, or <a href=\"#VkPipelineViewportWScalingStateCreateInfoNV\">VkPipelineViewportWScalingStateCreateInfoNV</a>"
+        },
+        {
+          "vuid": "VUID-VkPipelineViewportStateCreateInfo-sType-unique",
+          "text": " Each <code>sType</code> member in the <code>pNext</code> chain <strong class=\"purple\">must</strong> be unique"
+        },
+        {
+          "vuid": "VUID-VkPipelineViewportStateCreateInfo-flags-zerobitmask",
+          "text": " <code>flags</code> <strong class=\"purple\">must</strong> be <code>0</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineViewportStateCreateInfo-viewportCount-arraylength",
+          "text": " <code>viewportCount</code> <strong class=\"purple\">must</strong> be greater than <code>0</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineViewportStateCreateInfo-scissorCount-arraylength",
+          "text": " <code>scissorCount</code> <strong class=\"purple\">must</strong> be greater than <code>0</code>"
+        }
+      ],
+      "(VK_NV_clip_space_w_scaling)": [
+        {
+          "vuid": "VUID-VkPipelineViewportStateCreateInfo-viewportWScalingEnable-01726",
+          "text": " If the <code>viewportWScalingEnable</code> member of a <a href=\"#VkPipelineViewportWScalingStateCreateInfoNV\">VkPipelineViewportWScalingStateCreateInfoNV</a> structure included in the <code>pNext</code> chain is <code>VK_TRUE</code>, the <code>viewportCount</code> member of the <a href=\"#VkPipelineViewportWScalingStateCreateInfoNV\">VkPipelineViewportWScalingStateCreateInfoNV</a> structure <strong class=\"purple\">must</strong> be equal to <code>viewportCount</code>"
+        }
+      ]
+    },
+    "vkCmdSetViewport": {
+      "core": [
+        {
+          "vuid": "VUID-vkCmdSetViewport-None-01221",
+          "text": " The bound graphics pipeline <strong class=\"purple\">must</strong> have been created with the <code>VK_DYNAMIC_STATE_VIEWPORT</code> dynamic state enabled"
+        },
+        {
+          "vuid": "VUID-vkCmdSetViewport-firstViewport-01222",
+          "text": " <code>firstViewport</code> <strong class=\"purple\">must</strong> be less than <code>VkPhysicalDeviceLimits</code>::<code>maxViewports</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdSetViewport-firstViewport-01223",
+          "text": " The sum of <code>firstViewport</code> and <code>viewportCount</code> <strong class=\"purple\">must</strong> be between <code>1</code> and <code>VkPhysicalDeviceLimits</code>::<code>maxViewports</code>, inclusive"
+        },
+        {
+          "vuid": "VUID-vkCmdSetViewport-firstViewport-01224",
+          "text": " If the <a href=\"#features-multiViewport\">multiple viewports</a> feature is not enabled, <code>firstViewport</code> <strong class=\"purple\">must</strong> be <code>0</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdSetViewport-viewportCount-01225",
+          "text": " If the <a href=\"#features-multiViewport\">multiple viewports</a> feature is not enabled, <code>viewportCount</code> <strong class=\"purple\">must</strong> be <code>1</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdSetViewport-commandBuffer-parameter",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkCommandBuffer\">VkCommandBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdSetViewport-pViewports-parameter",
+          "text": " <code>pViewports</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>viewportCount</code> valid <a href=\"#VkViewport\">VkViewport</a> structures"
+        },
+        {
+          "vuid": "VUID-vkCmdSetViewport-commandBuffer-recording",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be in the <a href=\"#commandbuffers-lifecycle\">recording state</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdSetViewport-commandBuffer-cmdpool",
+          "text": " The <code>VkCommandPool</code> that <code>commandBuffer</code> was allocated from <strong class=\"purple\">must</strong> support graphics operations"
+        },
+        {
+          "vuid": "VUID-vkCmdSetViewport-viewportCount-arraylength",
+          "text": " <code>viewportCount</code> <strong class=\"purple\">must</strong> be greater than <code>0</code>"
+        }
+      ]
+    },
+    "VkViewport": {
+      "core": [
+        {
+          "vuid": "VUID-VkViewport-width-01770",
+          "text": " <code>width</code> <strong class=\"purple\">must</strong> be greater than <code>0.0</code>"
+        },
+        {
+          "vuid": "VUID-VkViewport-width-01771",
+          "text": " <code>width</code> <strong class=\"purple\">must</strong> be less than or equal to <code>VkPhysicalDeviceLimits</code>::<code>maxViewportDimensions</code>[0]"
+        },
+        {
+          "vuid": "VUID-VkViewport-height-01773",
+          "text": " The absolute value of <code>height</code> <strong class=\"purple\">must</strong> be less than or equal to <code>VkPhysicalDeviceLimits</code>::<code>maxViewportDimensions</code>[1]"
+        },
+        {
+          "vuid": "VUID-VkViewport-x-01774",
+          "text": " <code>x</code> <strong class=\"purple\">must</strong> be greater than or equal to <code>viewportBoundsRange</code>[0]"
+        },
+        {
+          "vuid": "VUID-VkViewport-x-01232",
+          "text": " <span class=\"eq\">(<code>x</code> &#43; <code>width</code>)</span> <strong class=\"purple\">must</strong> be less than or equal to <code>viewportBoundsRange</code>[1]"
+        },
+        {
+          "vuid": "VUID-VkViewport-y-01775",
+          "text": " <code>y</code> <strong class=\"purple\">must</strong> be greater than or equal to <code>viewportBoundsRange</code>[0]"
+        },
+        {
+          "vuid": "VUID-VkViewport-y-01233",
+          "text": " <span class=\"eq\">(<code>y</code> &#43; <code>height</code>)</span> <strong class=\"purple\">must</strong> be less than or equal to <code>viewportBoundsRange</code>[1]"
+        }
+      ],
+      "!(VK_VERSION_1_1,VK_KHR_maintenance1,VK_AMD_negative_viewport_height)": [
+        {
+          "vuid": "VUID-VkViewport-height-01772",
+          "text": " <code>height</code> <strong class=\"purple\">must</strong> be greater than <code>0.0</code>"
+        }
+      ],
+      "(VK_VERSION_1_1,VK_KHR_maintenance1,VK_AMD_negative_viewport_height)": [
+        {
+          "vuid": "VUID-VkViewport-y-01776",
+          "text": " <code>y</code> <strong class=\"purple\">must</strong> be less than or equal to <code>viewportBoundsRange</code>[1]"
+        },
+        {
+          "vuid": "VUID-VkViewport-y-01777",
+          "text": " <span class=\"eq\">(<code>y</code> &#43; <code>height</code>)</span> <strong class=\"purple\">must</strong> be greater than or equal to <code>viewportBoundsRange</code>[0]"
+        }
+      ],
+      "(VK_EXT_depth_range_unrestricted)": [
+        {
+          "vuid": "VUID-VkViewport-minDepth-01234",
+          "text": " Unless <code><a href=\"#VK_EXT_depth_range_unrestricted\">VK_EXT_depth_range_unrestricted</a></code> extension is enabled <code>minDepth</code> <strong class=\"purple\">must</strong> be between <code>0.0</code> and <code>1.0</code>, inclusive"
+        },
+        {
+          "vuid": "VUID-VkViewport-maxDepth-01235",
+          "text": " Unless <code><a href=\"#VK_EXT_depth_range_unrestricted\">VK_EXT_depth_range_unrestricted</a></code> extension is enabled <code>maxDepth</code> <strong class=\"purple\">must</strong> be between <code>0.0</code> and <code>1.0</code>, inclusive"
+        }
+      ],
+      "!(VK_EXT_depth_range_unrestricted)": [
+        {
+          "vuid": "VUID-VkViewport-minDepth-02540",
+          "text": " <code>minDepth</code> <strong class=\"purple\">must</strong> be between <code>0.0</code> and <code>1.0</code>, inclusive"
+        },
+        {
+          "vuid": "VUID-VkViewport-maxDepth-02541",
+          "text": " <code>maxDepth</code> <strong class=\"purple\">must</strong> be between <code>0.0</code> and <code>1.0</code>, inclusive"
+        }
+      ]
+    },
+    "VkPipelineRasterizationStateCreateInfo": {
+      "core": [
+        {
+          "vuid": "VUID-VkPipelineRasterizationStateCreateInfo-depthClampEnable-00782",
+          "text": " If the <a href=\"#features-depthClamp\">depth clamping</a> feature is not enabled, <code>depthClampEnable</code> <strong class=\"purple\">must</strong> be <code>VK_FALSE</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineRasterizationStateCreateInfo-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineRasterizationStateCreateInfo-pNext-pNext",
+          "text": " Each <code>pNext</code> member of any structure (including this one) in the <code>pNext</code> chain <strong class=\"purple\">must</strong> be either <code>NULL</code> or a pointer to a valid instance of <a href=\"#VkPipelineRasterizationConservativeStateCreateInfoEXT\">VkPipelineRasterizationConservativeStateCreateInfoEXT</a>, <a href=\"#VkPipelineRasterizationDepthClipStateCreateInfoEXT\">VkPipelineRasterizationDepthClipStateCreateInfoEXT</a>, <a href=\"#VkPipelineRasterizationLineStateCreateInfoEXT\">VkPipelineRasterizationLineStateCreateInfoEXT</a>, <a href=\"#VkPipelineRasterizationStateRasterizationOrderAMD\">VkPipelineRasterizationStateRasterizationOrderAMD</a>, or <a href=\"#VkPipelineRasterizationStateStreamCreateInfoEXT\">VkPipelineRasterizationStateStreamCreateInfoEXT</a>"
+        },
+        {
+          "vuid": "VUID-VkPipelineRasterizationStateCreateInfo-sType-unique",
+          "text": " Each <code>sType</code> member in the <code>pNext</code> chain <strong class=\"purple\">must</strong> be unique"
+        },
+        {
+          "vuid": "VUID-VkPipelineRasterizationStateCreateInfo-flags-zerobitmask",
+          "text": " <code>flags</code> <strong class=\"purple\">must</strong> be <code>0</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineRasterizationStateCreateInfo-polygonMode-parameter",
+          "text": " <code>polygonMode</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkPolygonMode\">VkPolygonMode</a> value"
+        },
+        {
+          "vuid": "VUID-VkPipelineRasterizationStateCreateInfo-cullMode-parameter",
+          "text": " <code>cullMode</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkCullModeFlagBits\">VkCullModeFlagBits</a> values"
+        },
+        {
+          "vuid": "VUID-VkPipelineRasterizationStateCreateInfo-frontFace-parameter",
+          "text": " <code>frontFace</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkFrontFace\">VkFrontFace</a> value"
+        }
+      ],
+      "!(VK_NV_fill_rectangle)": [
+        {
+          "vuid": "VUID-VkPipelineRasterizationStateCreateInfo-polygonMode-01413",
+          "text": " If the <a href=\"#features-fillModeNonSolid\">non-solid fill modes</a> feature is not enabled, <code>polygonMode</code> <strong class=\"purple\">must</strong> be <code>VK_POLYGON_MODE_FILL</code>"
+        }
+      ],
+      "(VK_NV_fill_rectangle)": [
+        {
+          "vuid": "VUID-VkPipelineRasterizationStateCreateInfo-polygonMode-01507",
+          "text": " If the <a href=\"#features-fillModeNonSolid\">non-solid fill modes</a> feature is not enabled, <code>polygonMode</code> <strong class=\"purple\">must</strong> be <code>VK_POLYGON_MODE_FILL</code> or <code>VK_POLYGON_MODE_FILL_RECTANGLE_NV</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineRasterizationStateCreateInfo-polygonMode-01414",
+          "text": " If the <code><a href=\"#VK_NV_fill_rectangle\">VK_NV_fill_rectangle</a></code> extension is not enabled, <code>polygonMode</code> <strong class=\"purple\">must</strong> not be <code>VK_POLYGON_MODE_FILL_RECTANGLE_NV</code>"
+        }
+      ]
+    },
+    "VkPipelineRasterizationDepthClipStateCreateInfoEXT": {
+      "(VK_EXT_depth_clip_enable)": [
+        {
+          "vuid": "VUID-VkPipelineRasterizationDepthClipStateCreateInfoEXT-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_DEPTH_CLIP_STATE_CREATE_INFO_EXT</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineRasterizationDepthClipStateCreateInfoEXT-flags-zerobitmask",
+          "text": " <code>flags</code> <strong class=\"purple\">must</strong> be <code>0</code>"
+        }
+      ]
+    },
+    "VkPipelineMultisampleStateCreateInfo": {
+      "core": [
+        {
+          "vuid": "VUID-VkPipelineMultisampleStateCreateInfo-sampleShadingEnable-00784",
+          "text": " If the <a href=\"#features-sampleRateShading\">sample rate shading</a> feature is not enabled, <code>sampleShadingEnable</code> <strong class=\"purple\">must</strong> be <code>VK_FALSE</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineMultisampleStateCreateInfo-alphaToOneEnable-00785",
+          "text": " If the <a href=\"#features-alphaToOne\">alpha to one</a> feature is not enabled, <code>alphaToOneEnable</code> <strong class=\"purple\">must</strong> be <code>VK_FALSE</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineMultisampleStateCreateInfo-minSampleShading-00786",
+          "text": " <code>minSampleShading</code> <strong class=\"purple\">must</strong> be in the range <span class=\"eq\">[0,1]</span>"
+        },
+        {
+          "vuid": "VUID-VkPipelineMultisampleStateCreateInfo-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineMultisampleStateCreateInfo-pNext-pNext",
+          "text": " Each <code>pNext</code> member of any structure (including this one) in the <code>pNext</code> chain <strong class=\"purple\">must</strong> be either <code>NULL</code> or a pointer to a valid instance of <a href=\"#VkPipelineCoverageModulationStateCreateInfoNV\">VkPipelineCoverageModulationStateCreateInfoNV</a>, <a href=\"#VkPipelineCoverageReductionStateCreateInfoNV\">VkPipelineCoverageReductionStateCreateInfoNV</a>, <a href=\"#VkPipelineCoverageToColorStateCreateInfoNV\">VkPipelineCoverageToColorStateCreateInfoNV</a>, or <a href=\"#VkPipelineSampleLocationsStateCreateInfoEXT\">VkPipelineSampleLocationsStateCreateInfoEXT</a>"
+        },
+        {
+          "vuid": "VUID-VkPipelineMultisampleStateCreateInfo-sType-unique",
+          "text": " Each <code>sType</code> member in the <code>pNext</code> chain <strong class=\"purple\">must</strong> be unique"
+        },
+        {
+          "vuid": "VUID-VkPipelineMultisampleStateCreateInfo-flags-zerobitmask",
+          "text": " <code>flags</code> <strong class=\"purple\">must</strong> be <code>0</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineMultisampleStateCreateInfo-rasterizationSamples-parameter",
+          "text": " <code>rasterizationSamples</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkSampleCountFlagBits\">VkSampleCountFlagBits</a> value"
+        },
+        {
+          "vuid": "VUID-VkPipelineMultisampleStateCreateInfo-pSampleMask-parameter",
+          "text": " If <code>pSampleMask</code> is not <code>NULL</code>, <code>pSampleMask</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of \\(\\lceil{\\mathit{rasterizationSamples} \\over 32}\\rceil\\) <code>VkSampleMask</code> values"
+        }
+      ],
+      "(VK_NV_framebuffer_mixed_samples)": [
+        {
+          "vuid": "VUID-VkPipelineMultisampleStateCreateInfo-rasterizationSamples-01415",
+          "text": " If the <code>VK_NV_framebuffer_mixed_samples</code> extension is enabled, and if the subpass has any color attachments and <code>rasterizationSamples</code> is greater than the number of color samples, then <code>sampleShadingEnable</code> <strong class=\"purple\">must</strong> be <code>VK_FALSE</code>"
+        }
+      ]
+    },
+    "VkPipelineRasterizationStateStreamCreateInfoEXT": {
+      "(VK_EXT_transform_feedback)": [
+        {
+          "vuid": "VUID-VkPipelineRasterizationStateStreamCreateInfoEXT-geometryStreams-02324",
+          "text": " <code>VkPhysicalDeviceTransformFeedbackFeaturesEXT</code>::<code>geometryStreams</code> <strong class=\"purple\">must</strong> be enabled"
+        },
+        {
+          "vuid": "VUID-VkPipelineRasterizationStateStreamCreateInfoEXT-rasterizationStream-02325",
+          "text": " <code>rasterizationStream</code> <strong class=\"purple\">must</strong> be less than <a href=\"#VkPhysicalDeviceTransformFeedbackPropertiesEXT\">VkPhysicalDeviceTransformFeedbackPropertiesEXT</a>::<code>maxTransformFeedbackStreams</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineRasterizationStateStreamCreateInfoEXT-rasterizationStream-02326",
+          "text": " <code>rasterizationStream</code> <strong class=\"purple\">must</strong> be zero if <code>VkPhysicalDeviceTransformFeedbackPropertiesEXT</code>::<code>transformFeedbackRasterizationStreamSelect</code> is <code>VK_FALSE</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineRasterizationStateStreamCreateInfoEXT-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_STREAM_CREATE_INFO_EXT</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineRasterizationStateStreamCreateInfoEXT-flags-zerobitmask",
+          "text": " <code>flags</code> <strong class=\"purple\">must</strong> be <code>0</code>"
+        }
+      ]
+    },
+    "VkPipelineRasterizationStateRasterizationOrderAMD": {
+      "(VK_AMD_rasterization_order)": [
+        {
+          "vuid": "VUID-VkPipelineRasterizationStateRasterizationOrderAMD-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_RASTERIZATION_ORDER_AMD</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineRasterizationStateRasterizationOrderAMD-rasterizationOrder-parameter",
+          "text": " <code>rasterizationOrder</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkRasterizationOrderAMD\">VkRasterizationOrderAMD</a> value"
+        }
+      ]
+    },
+    "VkPipelineSampleLocationsStateCreateInfoEXT": {
+      "(VK_EXT_sample_locations)": [
+        {
+          "vuid": "VUID-VkPipelineSampleLocationsStateCreateInfoEXT-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PIPELINE_SAMPLE_LOCATIONS_STATE_CREATE_INFO_EXT</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineSampleLocationsStateCreateInfoEXT-sampleLocationsInfo-parameter",
+          "text": " <code>sampleLocationsInfo</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkSampleLocationsInfoEXT\">VkSampleLocationsInfoEXT</a> structure"
+        }
+      ]
+    },
+    "VkSampleLocationsInfoEXT": {
+      "(VK_EXT_sample_locations)": [
+        {
+          "vuid": "VUID-VkSampleLocationsInfoEXT-sampleLocationsPerPixel-01526",
+          "text": " <code>sampleLocationsPerPixel</code> <strong class=\"purple\">must</strong> be a bit value that is set in <a href=\"#VkPhysicalDeviceSampleLocationsPropertiesEXT\">VkPhysicalDeviceSampleLocationsPropertiesEXT</a>::<code>sampleLocationSampleCounts</code>"
+        },
+        {
+          "vuid": "VUID-VkSampleLocationsInfoEXT-sampleLocationsCount-01527",
+          "text": " <code>sampleLocationsCount</code> <strong class=\"purple\">must</strong> equal <span class=\"eq\"><code>sampleLocationsPerPixel</code> {times} <code>sampleLocationGridSize.width</code> {times} <code>sampleLocationGridSize.height</code></span>"
+        },
+        {
+          "vuid": "VUID-VkSampleLocationsInfoEXT-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_SAMPLE_LOCATIONS_INFO_EXT</code>"
+        },
+        {
+          "vuid": "VUID-VkSampleLocationsInfoEXT-sampleLocationsPerPixel-parameter",
+          "text": " If <code>sampleLocationsPerPixel</code> is not <code>0</code>, <code>sampleLocationsPerPixel</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkSampleCountFlagBits\">VkSampleCountFlagBits</a> value"
+        },
+        {
+          "vuid": "VUID-VkSampleLocationsInfoEXT-pSampleLocations-parameter",
+          "text": " If <code>sampleLocationsCount</code> is not <code>0</code>, <code>pSampleLocations</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>sampleLocationsCount</code> <a href=\"#VkSampleLocationEXT\">VkSampleLocationEXT</a> structures"
+        }
+      ]
+    },
+    "vkCmdSetSampleLocationsEXT": {
+      "(VK_EXT_sample_locations)": [
+        {
+          "vuid": "VUID-vkCmdSetSampleLocationsEXT-None-01528",
+          "text": " The bound graphics pipeline <strong class=\"purple\">must</strong> have been created with the <code>VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_EXT</code> dynamic state enabled"
+        },
+        {
+          "vuid": "VUID-vkCmdSetSampleLocationsEXT-sampleLocationsPerPixel-01529",
+          "text": " The <code>sampleLocationsPerPixel</code> member of <code>pSampleLocationsInfo</code> <strong class=\"purple\">must</strong> equal the <code>rasterizationSamples</code> member of the <a href=\"#VkPipelineMultisampleStateCreateInfo\">VkPipelineMultisampleStateCreateInfo</a> structure the bound graphics pipeline has been created with"
+        },
+        {
+          "vuid": "VUID-vkCmdSetSampleLocationsEXT-variableSampleLocations-01530",
+          "text": " If <a href=\"#VkPhysicalDeviceSampleLocationsPropertiesEXT\">VkPhysicalDeviceSampleLocationsPropertiesEXT</a>::<code>variableSampleLocations</code> is <code>VK_FALSE</code> then the current render pass <strong class=\"purple\">must</strong> have been begun by specifying a <a href=\"#VkRenderPassSampleLocationsBeginInfoEXT\">VkRenderPassSampleLocationsBeginInfoEXT</a> structure whose <code>pPostSubpassSampleLocations</code> member contains an element with a <code>subpassIndex</code> matching the current subpass index and the <code>sampleLocationsInfo</code> member of that element <strong class=\"purple\">must</strong> match the sample locations state pointed to by <code>pSampleLocationsInfo</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdSetSampleLocationsEXT-commandBuffer-parameter",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkCommandBuffer\">VkCommandBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdSetSampleLocationsEXT-pSampleLocationsInfo-parameter",
+          "text": " <code>pSampleLocationsInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkSampleLocationsInfoEXT\">VkSampleLocationsInfoEXT</a> structure"
+        },
+        {
+          "vuid": "VUID-vkCmdSetSampleLocationsEXT-commandBuffer-recording",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be in the <a href=\"#commandbuffers-lifecycle\">recording state</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdSetSampleLocationsEXT-commandBuffer-cmdpool",
+          "text": " The <code>VkCommandPool</code> that <code>commandBuffer</code> was allocated from <strong class=\"purple\">must</strong> support graphics operations"
+        }
+      ]
+    },
+    "VkPipelineViewportShadingRateImageStateCreateInfoNV": {
+      "(VK_NV_shading_rate_image)": [
+        {
+          "vuid": "VUID-VkPipelineViewportShadingRateImageStateCreateInfoNV-viewportCount-02054",
+          "text": " If the <a href=\"#features-multiViewport\">multiple viewports</a> feature is not enabled, <code>viewportCount</code> <strong class=\"purple\">must</strong> be <code>0</code> or <code>1</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineViewportShadingRateImageStateCreateInfoNV-viewportCount-02055",
+          "text": " <code>viewportCount</code> <strong class=\"purple\">must</strong> be less than or equal to <code>VkPhysicalDeviceLimits</code>::<code>maxViewports</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineViewportShadingRateImageStateCreateInfoNV-shadingRateImageEnable-02056",
+          "text": " If <code>shadingRateImageEnable</code> is <code>VK_TRUE</code>, <code>viewportCount</code> <strong class=\"purple\">must</strong> be equal to the <code>viewportCount</code> member of <code>VkPipelineViewportStateCreateInfo</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineViewportShadingRateImageStateCreateInfoNV-pDynamicStates-02057",
+          "text": " If no element of the <code>pDynamicStates</code> member of <code>pDynamicState</code> is <code>VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV</code>, <code>pShadingRatePalettes</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>viewportCount</code> <code>VkShadingRatePaletteNV</code> structures"
+        },
+        {
+          "vuid": "VUID-VkPipelineViewportShadingRateImageStateCreateInfoNV-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SHADING_RATE_IMAGE_STATE_CREATE_INFO_NV</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineViewportShadingRateImageStateCreateInfoNV-pShadingRatePalettes-parameter",
+          "text": " If <code>viewportCount</code> is not <code>0</code>, and <code>pShadingRatePalettes</code> is not <code>NULL</code>, <code>pShadingRatePalettes</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>viewportCount</code> valid <a href=\"#VkShadingRatePaletteNV\">VkShadingRatePaletteNV</a> structures"
+        }
+      ]
+    },
+    "vkCmdBindShadingRateImageNV": {
+      "(VK_NV_shading_rate_image)": [
+        {
+          "vuid": "VUID-vkCmdBindShadingRateImageNV-None-02058",
+          "text": " The <a href=\"#features-shadingRateImage\">shading rate image</a> feature <strong class=\"purple\">must</strong> be enabled."
+        },
+        {
+          "vuid": "VUID-vkCmdBindShadingRateImageNV-imageView-02059",
+          "text": " If <code>imageView</code> is not <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>, it <strong class=\"purple\">must</strong> be a valid <a href=\"#VkImageView\">VkImageView</a> handle of type <code>VK_IMAGE_VIEW_TYPE_2D</code> or <code>VK_IMAGE_VIEW_TYPE_2D_ARRAY</code>."
+        },
+        {
+          "vuid": "VUID-vkCmdBindShadingRateImageNV-imageView-02060",
+          "text": " If <code>imageView</code> is not <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>, it <strong class=\"purple\">must</strong> have a format of <code>VK_FORMAT_R8_UINT</code>."
+        },
+        {
+          "vuid": "VUID-vkCmdBindShadingRateImageNV-imageView-02061",
+          "text": " If <code>imageView</code> is not <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>, it <strong class=\"purple\">must</strong> have been created with a <code>usage</code> value including <code>VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdBindShadingRateImageNV-imageView-02062",
+          "text": " If <code>imageView</code> is not <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>, <code>imageLayout</code> <strong class=\"purple\">must</strong> match the actual <a href=\"#VkImageLayout\">VkImageLayout</a> of each subresource accessible from <code>imageView</code> at the time the subresource is accessed."
+        },
+        {
+          "vuid": "VUID-vkCmdBindShadingRateImageNV-imageLayout-02063",
+          "text": " If <code>imageView</code> is not <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>, <code>imageLayout</code> <strong class=\"purple\">must</strong> be <code>VK_IMAGE_LAYOUT_SHADING_RATE_OPTIMAL_NV</code> or <code>VK_IMAGE_LAYOUT_GENERAL</code>."
+        },
+        {
+          "vuid": "VUID-vkCmdBindShadingRateImageNV-commandBuffer-parameter",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkCommandBuffer\">VkCommandBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdBindShadingRateImageNV-imageView-parameter",
+          "text": " If <code>imageView</code> is not <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>, <code>imageView</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkImageView\">VkImageView</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdBindShadingRateImageNV-imageLayout-parameter",
+          "text": " <code>imageLayout</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkImageLayout\">VkImageLayout</a> value"
+        },
+        {
+          "vuid": "VUID-vkCmdBindShadingRateImageNV-commandBuffer-recording",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be in the <a href=\"#commandbuffers-lifecycle\">recording state</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdBindShadingRateImageNV-commandBuffer-cmdpool",
+          "text": " The <code>VkCommandPool</code> that <code>commandBuffer</code> was allocated from <strong class=\"purple\">must</strong> support graphics operations"
+        },
+        {
+          "vuid": "VUID-vkCmdBindShadingRateImageNV-commonparent",
+          "text": " Both of <code>commandBuffer</code>, and <code>imageView</code> that are valid handles of non-ignored parameters <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from the same <a href=\"#VkDevice\">VkDevice</a>"
+        }
+      ]
+    },
+    "vkCmdSetViewportShadingRatePaletteNV": {
+      "(VK_NV_shading_rate_image)": [
+        {
+          "vuid": "VUID-vkCmdSetViewportShadingRatePaletteNV-None-02064",
+          "text": " The <a href=\"#features-shadingRateImage\">shading rate image</a> feature <strong class=\"purple\">must</strong> be enabled."
+        },
+        {
+          "vuid": "VUID-vkCmdSetViewportShadingRatePaletteNV-None-02065",
+          "text": " The bound graphics pipeline <strong class=\"purple\">must</strong> have been created with the <code>VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV</code> dynamic state enabled"
+        },
+        {
+          "vuid": "VUID-vkCmdSetViewportShadingRatePaletteNV-firstViewport-02066",
+          "text": " <code>firstViewport</code> <strong class=\"purple\">must</strong> be less than <code>VkPhysicalDeviceLimits</code>::<code>maxViewports</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdSetViewportShadingRatePaletteNV-firstViewport-02067",
+          "text": " The sum of <code>firstViewport</code> and <code>viewportCount</code> <strong class=\"purple\">must</strong> be between <code>1</code> and <code>VkPhysicalDeviceLimits</code>::<code>maxViewports</code>, inclusive"
+        },
+        {
+          "vuid": "VUID-vkCmdSetViewportShadingRatePaletteNV-firstViewport-02068",
+          "text": " If the <a href=\"#features-multiViewport\">multiple viewports</a> feature is not enabled, <code>firstViewport</code> <strong class=\"purple\">must</strong> be <code>0</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdSetViewportShadingRatePaletteNV-viewportCount-02069",
+          "text": " If the <a href=\"#features-multiViewport\">multiple viewports</a> feature is not enabled, <code>viewportCount</code> <strong class=\"purple\">must</strong> be <code>1</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdSetViewportShadingRatePaletteNV-commandBuffer-parameter",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkCommandBuffer\">VkCommandBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdSetViewportShadingRatePaletteNV-pShadingRatePalettes-parameter",
+          "text": " <code>pShadingRatePalettes</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>viewportCount</code> valid <a href=\"#VkShadingRatePaletteNV\">VkShadingRatePaletteNV</a> structures"
+        },
+        {
+          "vuid": "VUID-vkCmdSetViewportShadingRatePaletteNV-commandBuffer-recording",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be in the <a href=\"#commandbuffers-lifecycle\">recording state</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdSetViewportShadingRatePaletteNV-commandBuffer-cmdpool",
+          "text": " The <code>VkCommandPool</code> that <code>commandBuffer</code> was allocated from <strong class=\"purple\">must</strong> support graphics operations"
+        },
+        {
+          "vuid": "VUID-vkCmdSetViewportShadingRatePaletteNV-viewportCount-arraylength",
+          "text": " <code>viewportCount</code> <strong class=\"purple\">must</strong> be greater than <code>0</code>"
+        }
+      ]
+    },
+    "VkShadingRatePaletteNV": {
+      "(VK_NV_shading_rate_image)": [
+        {
+          "vuid": "VUID-VkShadingRatePaletteNV-shadingRatePaletteEntryCount-02071",
+          "text": " <code>shadingRatePaletteEntryCount</code> <strong class=\"purple\">must</strong> be between <code>1</code> and <code>VkPhysicalDeviceShadingRateImagePropertiesNV</code>::<code>shadingRatePaletteSize</code>, inclusive"
+        },
+        {
+          "vuid": "VUID-VkShadingRatePaletteNV-pShadingRatePaletteEntries-parameter",
+          "text": " <code>pShadingRatePaletteEntries</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>shadingRatePaletteEntryCount</code> valid <a href=\"#VkShadingRatePaletteEntryNV\">VkShadingRatePaletteEntryNV</a> values"
+        },
+        {
+          "vuid": "VUID-VkShadingRatePaletteNV-shadingRatePaletteEntryCount-arraylength",
+          "text": " <code>shadingRatePaletteEntryCount</code> <strong class=\"purple\">must</strong> be greater than <code>0</code>"
+        }
+      ]
+    },
+    "VkPipelineViewportCoarseSampleOrderStateCreateInfoNV": {
+      "(VK_NV_shading_rate_image)": [
+        {
+          "vuid": "VUID-VkPipelineViewportCoarseSampleOrderStateCreateInfoNV-sampleOrderType-02072",
+          "text": " If <code>sampleOrderType</code> is not <code>VK_COARSE_SAMPLE_ORDER_TYPE_CUSTOM_NV</code>, <code>customSamplerOrderCount</code> <strong class=\"purple\">must</strong> be <code>0</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineViewportCoarseSampleOrderStateCreateInfoNV-pCustomSampleOrders-02234",
+          "text": " The array <code>pCustomSampleOrders</code> <strong class=\"purple\">must</strong> not contain two structures with matching values for both the <code>shadingRate</code> and <code>sampleCount</code> members."
+        },
+        {
+          "vuid": "VUID-VkPipelineViewportCoarseSampleOrderStateCreateInfoNV-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_COARSE_SAMPLE_ORDER_STATE_CREATE_INFO_NV</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineViewportCoarseSampleOrderStateCreateInfoNV-sampleOrderType-parameter",
+          "text": " <code>sampleOrderType</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkCoarseSampleOrderTypeNV\">VkCoarseSampleOrderTypeNV</a> value"
+        },
+        {
+          "vuid": "VUID-VkPipelineViewportCoarseSampleOrderStateCreateInfoNV-pCustomSampleOrders-parameter",
+          "text": " If <code>customSampleOrderCount</code> is not <code>0</code>, <code>pCustomSampleOrders</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>customSampleOrderCount</code> valid <a href=\"#VkCoarseSampleOrderCustomNV\">VkCoarseSampleOrderCustomNV</a> structures"
+        }
+      ]
+    },
+    "VkCoarseSampleOrderCustomNV": {
+      "(VK_NV_shading_rate_image)": [
+        {
+          "vuid": "VUID-VkCoarseSampleOrderCustomNV-shadingRate-02073",
+          "text": " <code>shadingRate</code> <strong class=\"purple\">must</strong> be a shading rate that generates fragments with more than one pixel."
+        },
+        {
+          "vuid": "VUID-VkCoarseSampleOrderCustomNV-sampleCount-02074",
+          "text": " <code>sampleCount</code> <strong class=\"purple\">must</strong> correspond to a sample count enumerated in <a href=\"#VkSampleCountFlags\">VkSampleCountFlags</a> whose corresponding bit is set in <a href=\"#VkPhysicalDeviceLimits\">VkPhysicalDeviceLimits</a>::<code>framebufferNoAttachmentsSampleCounts</code>."
+        },
+        {
+          "vuid": "VUID-VkCoarseSampleOrderCustomNV-sampleLocationCount-02075",
+          "text": " <code>sampleLocationCount</code> <strong class=\"purple\">must</strong> be equal to the product of <code>sampleCount</code>, the fragment width for <code>shadingRate</code>, and the fragment height for <code>shadingRate</code>."
+        },
+        {
+          "vuid": "VUID-VkCoarseSampleOrderCustomNV-sampleLocationCount-02076",
+          "text": " <code>sampleLocationCount</code> <strong class=\"purple\">must</strong> be less than or equal to the value of <code>VkPhysicalDeviceShadingRateImagePropertiesNV</code>::<code>shadingRateMaxCoarseSamples</code>."
+        },
+        {
+          "vuid": "VUID-VkCoarseSampleOrderCustomNV-pSampleLocations-02077",
+          "text": " The array <code>pSampleLocations</code> <strong class=\"purple\">must</strong> contain exactly one entry for every combination of valid values for <code>pixelX</code>, <code>pixelY</code>, and <code>sample</code> in the structure <a href=\"#VkCoarseSampleOrderCustomNV\">VkCoarseSampleOrderCustomNV</a>."
+        },
+        {
+          "vuid": "VUID-VkCoarseSampleOrderCustomNV-shadingRate-parameter",
+          "text": " <code>shadingRate</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkShadingRatePaletteEntryNV\">VkShadingRatePaletteEntryNV</a> value"
+        },
+        {
+          "vuid": "VUID-VkCoarseSampleOrderCustomNV-pSampleLocations-parameter",
+          "text": " <code>pSampleLocations</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>sampleLocationCount</code> <a href=\"#VkCoarseSampleLocationNV\">VkCoarseSampleLocationNV</a> structures"
+        },
+        {
+          "vuid": "VUID-VkCoarseSampleOrderCustomNV-sampleLocationCount-arraylength",
+          "text": " <code>sampleLocationCount</code> <strong class=\"purple\">must</strong> be greater than <code>0</code>"
+        }
+      ]
+    },
+    "VkCoarseSampleLocationNV": {
+      "(VK_NV_shading_rate_image)": [
+        {
+          "vuid": "VUID-VkCoarseSampleLocationNV-pixelX-02078",
+          "text": " <code>pixelX</code> <strong class=\"purple\">must</strong> be less than the width (in pixels) of the fragment."
+        },
+        {
+          "vuid": "VUID-VkCoarseSampleLocationNV-pixelY-02079",
+          "text": " <code>pixelY</code> <strong class=\"purple\">must</strong> be less than the height (in pixels) of the fragment."
+        },
+        {
+          "vuid": "VUID-VkCoarseSampleLocationNV-sample-02080",
+          "text": " <code>sample</code> <strong class=\"purple\">must</strong> be less than the number of coverage samples in each pixel belonging to the fragment."
+        }
+      ]
+    },
+    "vkCmdSetCoarseSampleOrderNV": {
+      "(VK_NV_shading_rate_image)": [
+        {
+          "vuid": "VUID-vkCmdSetCoarseSampleOrderNV-sampleOrderType-02081",
+          "text": " If <code>sampleOrderType</code> is not <code>VK_COARSE_SAMPLE_ORDER_TYPE_CUSTOM_NV</code>, <code>customSamplerOrderCount</code> <strong class=\"purple\">must</strong> be <code>0</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdSetCoarseSampleOrderNV-pCustomSampleOrders-02235",
+          "text": " The array <code>pCustomSampleOrders</code> <strong class=\"purple\">must</strong> not contain two structures with matching values for both the <code>shadingRate</code> and <code>sampleCount</code> members."
+        },
+        {
+          "vuid": "VUID-vkCmdSetCoarseSampleOrderNV-commandBuffer-parameter",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkCommandBuffer\">VkCommandBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdSetCoarseSampleOrderNV-sampleOrderType-parameter",
+          "text": " <code>sampleOrderType</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkCoarseSampleOrderTypeNV\">VkCoarseSampleOrderTypeNV</a> value"
+        },
+        {
+          "vuid": "VUID-vkCmdSetCoarseSampleOrderNV-pCustomSampleOrders-parameter",
+          "text": " If <code>customSampleOrderCount</code> is not <code>0</code>, <code>pCustomSampleOrders</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>customSampleOrderCount</code> valid <a href=\"#VkCoarseSampleOrderCustomNV\">VkCoarseSampleOrderCustomNV</a> structures"
+        },
+        {
+          "vuid": "VUID-vkCmdSetCoarseSampleOrderNV-commandBuffer-recording",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be in the <a href=\"#commandbuffers-lifecycle\">recording state</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdSetCoarseSampleOrderNV-commandBuffer-cmdpool",
+          "text": " The <code>VkCommandPool</code> that <code>commandBuffer</code> was allocated from <strong class=\"purple\">must</strong> support graphics operations"
+        }
+      ]
+    },
+    "VkPipelineRasterizationLineStateCreateInfoEXT": {
+      "(VK_EXT_line_rasterization)": [
+        {
+          "vuid": "VUID-VkPipelineRasterizationLineStateCreateInfoEXT-lineRasterizationMode-02768",
+          "text": " If <code>lineRasterizationMode</code> is <code>VK_LINE_RASTERIZATION_MODE_RECTANGULAR_EXT</code>, then the <a href=\"#features-rectangularLines\">rectangularLines</a> feature <strong class=\"purple\">must</strong> be enabled"
+        },
+        {
+          "vuid": "VUID-VkPipelineRasterizationLineStateCreateInfoEXT-lineRasterizationMode-02769",
+          "text": " If <code>lineRasterizationMode</code> is <code>VK_LINE_RASTERIZATION_MODE_BRESENHAM_EXT</code>, then the <a href=\"#features-bresenhamLines\">bresenhamLines</a> feature <strong class=\"purple\">must</strong> be enabled"
+        },
+        {
+          "vuid": "VUID-VkPipelineRasterizationLineStateCreateInfoEXT-lineRasterizationMode-02770",
+          "text": " If <code>lineRasterizationMode</code> is <code>VK_LINE_RASTERIZATION_MODE_RECTANGULAR_SMOOTH_EXT</code>, then the <a href=\"#features-bresenhamLines\">smoothLines</a> feature <strong class=\"purple\">must</strong> be enabled"
+        },
+        {
+          "vuid": "VUID-VkPipelineRasterizationLineStateCreateInfoEXT-stippledLineEnable-02771",
+          "text": " If <code>stippledLineEnable</code> is <code>VK_TRUE</code> and <code>lineRasterizationMode</code> is <code>VK_LINE_RASTERIZATION_MODE_RECTANGULAR_EXT</code>, then the <a href=\"#features-stippledRectangularLines\">stippledRectangularLines</a> feature <strong class=\"purple\">must</strong> be enabled"
+        },
+        {
+          "vuid": "VUID-VkPipelineRasterizationLineStateCreateInfoEXT-stippledLineEnable-02772",
+          "text": " If <code>stippledLineEnable</code> is <code>VK_TRUE</code> and <code>lineRasterizationMode</code> is <code>VK_LINE_RASTERIZATION_MODE_BRESENHAM_EXT</code>, then the <a href=\"#features-stippledBresenhamLines\">stippledBresenhamLines</a> feature <strong class=\"purple\">must</strong> be enabled"
+        },
+        {
+          "vuid": "VUID-VkPipelineRasterizationLineStateCreateInfoEXT-stippledLineEnable-02773",
+          "text": " If <code>stippledLineEnable</code> is <code>VK_TRUE</code> and <code>lineRasterizationMode</code> is <code>VK_LINE_RASTERIZATION_MODE_RECTANGULAR_SMOOTH_EXT</code>, then the <a href=\"#features-stippledSmoothLines\">stippledSmoothLines</a> feature <strong class=\"purple\">must</strong> be enabled"
+        },
+        {
+          "vuid": "VUID-VkPipelineRasterizationLineStateCreateInfoEXT-stippledLineEnable-02774",
+          "text": " If <code>stippledLineEnable</code> is <code>VK_TRUE</code> and <code>lineRasterizationMode</code> is <code>VK_LINE_RASTERIZATION_MODE_DEFAULT_EXT</code>, then the <a href=\"#features-stippledRectangularLines\">stippledRectangularLines</a> feature <strong class=\"purple\">must</strong> be enabled and <a href=\"#VkPhysicalDeviceLimits\">VkPhysicalDeviceLimits</a>::<code>strictLines</code> <strong class=\"purple\">must</strong> be <code>VK_TRUE</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineRasterizationLineStateCreateInfoEXT-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO_EXT</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineRasterizationLineStateCreateInfoEXT-lineRasterizationMode-parameter",
+          "text": " <code>lineRasterizationMode</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkLineRasterizationModeEXT\">VkLineRasterizationModeEXT</a> value"
+        }
+      ]
+    },
+    "vkCmdSetLineWidth": {
+      "core": [
+        {
+          "vuid": "VUID-vkCmdSetLineWidth-None-00787",
+          "text": " The bound graphics pipeline <strong class=\"purple\">must</strong> have been created with the <code>VK_DYNAMIC_STATE_LINE_WIDTH</code> dynamic state enabled"
+        },
+        {
+          "vuid": "VUID-vkCmdSetLineWidth-lineWidth-00788",
+          "text": " If the <a href=\"#features-wideLines\">wide lines</a> feature is not enabled, <code>lineWidth</code> <strong class=\"purple\">must</strong> be <code>1.0</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdSetLineWidth-commandBuffer-parameter",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkCommandBuffer\">VkCommandBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdSetLineWidth-commandBuffer-recording",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be in the <a href=\"#commandbuffers-lifecycle\">recording state</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdSetLineWidth-commandBuffer-cmdpool",
+          "text": " The <code>VkCommandPool</code> that <code>commandBuffer</code> was allocated from <strong class=\"purple\">must</strong> support graphics operations"
+        }
+      ]
+    },
+    "vkCmdSetLineStippleEXT": {
+      "(VK_EXT_line_rasterization)": [
+        {
+          "vuid": "VUID-vkCmdSetLineStippleEXT-None-02775",
+          "text": " The bound graphics pipeline <strong class=\"purple\">must</strong> have been created with the <code>VK_DYNAMIC_STATE_LINE_STIPPLE_EXT</code> dynamic state enabled"
+        },
+        {
+          "vuid": "VUID-vkCmdSetLineStippleEXT-lineStippleFactor-02776",
+          "text": " <code>lineStippleFactor</code> <strong class=\"purple\">must</strong> be in the range <span class=\"eq\">[1,256]</span>"
+        },
+        {
+          "vuid": "VUID-vkCmdSetLineStippleEXT-commandBuffer-parameter",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkCommandBuffer\">VkCommandBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdSetLineStippleEXT-commandBuffer-recording",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be in the <a href=\"#commandbuffers-lifecycle\">recording state</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdSetLineStippleEXT-commandBuffer-cmdpool",
+          "text": " The <code>VkCommandPool</code> that <code>commandBuffer</code> was allocated from <strong class=\"purple\">must</strong> support graphics operations"
+        }
+      ]
+    },
+    "vkCmdSetDepthBias": {
+      "core": [
+        {
+          "vuid": "VUID-vkCmdSetDepthBias-None-00789",
+          "text": " The bound graphics pipeline <strong class=\"purple\">must</strong> have been created with the <code>VK_DYNAMIC_STATE_DEPTH_BIAS</code> dynamic state enabled"
+        },
+        {
+          "vuid": "VUID-vkCmdSetDepthBias-depthBiasClamp-00790",
+          "text": " If the <a href=\"#features-depthBiasClamp\">depth bias clamping</a> feature is not enabled, <code>depthBiasClamp</code> <strong class=\"purple\">must</strong> be <code>0.0</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdSetDepthBias-commandBuffer-parameter",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkCommandBuffer\">VkCommandBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdSetDepthBias-commandBuffer-recording",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be in the <a href=\"#commandbuffers-lifecycle\">recording state</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdSetDepthBias-commandBuffer-cmdpool",
+          "text": " The <code>VkCommandPool</code> that <code>commandBuffer</code> was allocated from <strong class=\"purple\">must</strong> support graphics operations"
+        }
+      ]
+    },
+    "VkPipelineRasterizationConservativeStateCreateInfoEXT": {
+      "(VK_EXT_conservative_rasterization)": [
+        {
+          "vuid": "VUID-VkPipelineRasterizationConservativeStateCreateInfoEXT-extraPrimitiveOverestimationSize-01769",
+          "text": " <code>extraPrimitiveOverestimationSize</code> <strong class=\"purple\">must</strong> be in the range of <code>0.0</code> to <code>VkPhysicalDeviceConservativeRasterizationPropertiesEXT</code>::<code>maxExtraPrimitiveOverestimationSize</code> inclusive"
+        },
+        {
+          "vuid": "VUID-VkPipelineRasterizationConservativeStateCreateInfoEXT-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_CONSERVATIVE_STATE_CREATE_INFO_EXT</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineRasterizationConservativeStateCreateInfoEXT-flags-zerobitmask",
+          "text": " <code>flags</code> <strong class=\"purple\">must</strong> be <code>0</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineRasterizationConservativeStateCreateInfoEXT-conservativeRasterizationMode-parameter",
+          "text": " <code>conservativeRasterizationMode</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkConservativeRasterizationModeEXT\">VkConservativeRasterizationModeEXT</a> value"
+        }
+      ]
+    },
+    "VkPipelineDiscardRectangleStateCreateInfoEXT": {
+      "(VK_EXT_discard_rectangles)": [
+        {
+          "vuid": "VUID-VkPipelineDiscardRectangleStateCreateInfoEXT-discardRectangleCount-00582",
+          "text": " <code>discardRectangleCount</code> <strong class=\"purple\">must</strong> be between <code>0</code> and <code>VkPhysicalDeviceDiscardRectanglePropertiesEXT</code>::<code>maxDiscardRectangles</code>, inclusive"
+        },
+        {
+          "vuid": "VUID-VkPipelineDiscardRectangleStateCreateInfoEXT-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PIPELINE_DISCARD_RECTANGLE_STATE_CREATE_INFO_EXT</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineDiscardRectangleStateCreateInfoEXT-flags-zerobitmask",
+          "text": " <code>flags</code> <strong class=\"purple\">must</strong> be <code>0</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineDiscardRectangleStateCreateInfoEXT-discardRectangleMode-parameter",
+          "text": " <code>discardRectangleMode</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDiscardRectangleModeEXT\">VkDiscardRectangleModeEXT</a> value"
+        }
+      ]
+    },
+    "vkCmdSetDiscardRectangleEXT": {
+      "(VK_EXT_discard_rectangles)": [
+        {
+          "vuid": "VUID-vkCmdSetDiscardRectangleEXT-None-00583",
+          "text": " The bound graphics pipeline <strong class=\"purple\">must</strong> have been created with the <code>VK_DYNAMIC_STATE_DISCARD_RECTANGLE_EXT</code> dynamic state enabled"
+        },
+        {
+          "vuid": "VUID-vkCmdSetDiscardRectangleEXT-firstDiscardRectangle-00585",
+          "text": " The sum of <code>firstDiscardRectangle</code> and <code>discardRectangleCount</code> <strong class=\"purple\">must</strong> be less than or equal to <a href=\"#VkPhysicalDeviceDiscardRectanglePropertiesEXT\">VkPhysicalDeviceDiscardRectanglePropertiesEXT</a>::<code>maxDiscardRectangles</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdSetDiscardRectangleEXT-x-00587",
+          "text": " The <code>x</code> and <code>y</code> member of <code>offset</code> in each <a href=\"#VkRect2D\">VkRect2D</a> element of <code>pDiscardRectangles</code> <strong class=\"purple\">must</strong> be greater than or equal to <code>0</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdSetDiscardRectangleEXT-offset-00588",
+          "text": " Evaluation of <span class=\"eq\">(<code>offset.x</code> &#43; <code>extent.width</code>)</span> in each <a href=\"#VkRect2D\">VkRect2D</a> element of <code>pDiscardRectangles</code> <strong class=\"purple\">must</strong> not cause a signed integer addition overflow"
+        },
+        {
+          "vuid": "VUID-vkCmdSetDiscardRectangleEXT-offset-00589",
+          "text": " Evaluation of <span class=\"eq\">(<code>offset.y</code> &#43; <code>extent.height</code>)</span> in each <a href=\"#VkRect2D\">VkRect2D</a> element of <code>pDiscardRectangles</code> <strong class=\"purple\">must</strong> not cause a signed integer addition overflow"
+        },
+        {
+          "vuid": "VUID-vkCmdSetDiscardRectangleEXT-commandBuffer-parameter",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkCommandBuffer\">VkCommandBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdSetDiscardRectangleEXT-pDiscardRectangles-parameter",
+          "text": " <code>pDiscardRectangles</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>discardRectangleCount</code> <a href=\"#VkRect2D\">VkRect2D</a> structures"
+        },
+        {
+          "vuid": "VUID-vkCmdSetDiscardRectangleEXT-commandBuffer-recording",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be in the <a href=\"#commandbuffers-lifecycle\">recording state</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdSetDiscardRectangleEXT-commandBuffer-cmdpool",
+          "text": " The <code>VkCommandPool</code> that <code>commandBuffer</code> was allocated from <strong class=\"purple\">must</strong> support graphics operations"
+        },
+        {
+          "vuid": "VUID-vkCmdSetDiscardRectangleEXT-discardRectangleCount-arraylength",
+          "text": " <code>discardRectangleCount</code> <strong class=\"purple\">must</strong> be greater than <code>0</code>"
+        }
+      ]
+    },
+    "vkCmdSetScissor": {
+      "core": [
+        {
+          "vuid": "VUID-vkCmdSetScissor-None-00590",
+          "text": " The bound graphics pipeline <strong class=\"purple\">must</strong> have been created with the <code>VK_DYNAMIC_STATE_SCISSOR</code> dynamic state enabled"
+        },
+        {
+          "vuid": "VUID-vkCmdSetScissor-firstScissor-00591",
+          "text": " <code>firstScissor</code> <strong class=\"purple\">must</strong> be less than <code>VkPhysicalDeviceLimits</code>::<code>maxViewports</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdSetScissor-firstScissor-00592",
+          "text": " The sum of <code>firstScissor</code> and <code>scissorCount</code> <strong class=\"purple\">must</strong> be between <code>1</code> and <code>VkPhysicalDeviceLimits</code>::<code>maxViewports</code>, inclusive"
+        },
+        {
+          "vuid": "VUID-vkCmdSetScissor-firstScissor-00593",
+          "text": " If the <a href=\"#features-multiViewport\">multiple viewports</a> feature is not enabled, <code>firstScissor</code> <strong class=\"purple\">must</strong> be <code>0</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdSetScissor-scissorCount-00594",
+          "text": " If the <a href=\"#features-multiViewport\">multiple viewports</a> feature is not enabled, <code>scissorCount</code> <strong class=\"purple\">must</strong> be <code>1</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdSetScissor-x-00595",
+          "text": " The <code>x</code> and <code>y</code> members of <code>offset</code> member of any element of <code>pScissors</code> <strong class=\"purple\">must</strong> be greater than or equal to <code>0</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdSetScissor-offset-00596",
+          "text": " Evaluation of <span class=\"eq\">(<code>offset.x</code> &#43; <code>extent.width</code>)</span> <strong class=\"purple\">must</strong> not cause a signed integer addition overflow for any element of <code>pScissors</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdSetScissor-offset-00597",
+          "text": " Evaluation of <span class=\"eq\">(<code>offset.y</code> &#43; <code>extent.height</code>)</span> <strong class=\"purple\">must</strong> not cause a signed integer addition overflow for any element of <code>pScissors</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdSetScissor-commandBuffer-parameter",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkCommandBuffer\">VkCommandBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdSetScissor-pScissors-parameter",
+          "text": " <code>pScissors</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>scissorCount</code> <a href=\"#VkRect2D\">VkRect2D</a> structures"
+        },
+        {
+          "vuid": "VUID-vkCmdSetScissor-commandBuffer-recording",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be in the <a href=\"#commandbuffers-lifecycle\">recording state</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdSetScissor-commandBuffer-cmdpool",
+          "text": " The <code>VkCommandPool</code> that <code>commandBuffer</code> was allocated from <strong class=\"purple\">must</strong> support graphics operations"
+        },
+        {
+          "vuid": "VUID-vkCmdSetScissor-scissorCount-arraylength",
+          "text": " <code>scissorCount</code> <strong class=\"purple\">must</strong> be greater than <code>0</code>"
+        }
+      ]
+    },
+    "VkPipelineViewportExclusiveScissorStateCreateInfoNV": {
+      "(VK_NV_scissor_exclusive)": [
+        {
+          "vuid": "VUID-VkPipelineViewportExclusiveScissorStateCreateInfoNV-exclusiveScissorCount-02027",
+          "text": " If the <a href=\"#features-multiViewport\">multiple viewports</a> feature is not enabled, <code>exclusiveScissorCount</code> <strong class=\"purple\">must</strong> be <code>0</code> or <code>1</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineViewportExclusiveScissorStateCreateInfoNV-exclusiveScissorCount-02028",
+          "text": " <code>exclusiveScissorCount</code> <strong class=\"purple\">must</strong> be less than or equal to <code>VkPhysicalDeviceLimits</code>::<code>maxViewports</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineViewportExclusiveScissorStateCreateInfoNV-exclusiveScissorCount-02029",
+          "text": " <code>exclusiveScissorCount</code> <strong class=\"purple\">must</strong> be <code>0</code> or identical to the <code>viewportCount</code> member of <code>VkPipelineViewportStateCreateInfo</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineViewportExclusiveScissorStateCreateInfoNV-pDynamicStates-02030",
+          "text": " If no element of the <code>pDynamicStates</code> member of <code>pDynamicState</code> is <code>VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_NV</code> and <code>exclusiveScissorCount</code> is not <code>0</code>, <code>pExclusiveScissors</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>exclusiveScissorCount</code> <code>VkRect2D</code> structures"
+        },
+        {
+          "vuid": "VUID-VkPipelineViewportExclusiveScissorStateCreateInfoNV-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_EXCLUSIVE_SCISSOR_STATE_CREATE_INFO_NV</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineViewportExclusiveScissorStateCreateInfoNV-pExclusiveScissors-parameter",
+          "text": " If <code>exclusiveScissorCount</code> is not <code>0</code>, and <code>pExclusiveScissors</code> is not <code>NULL</code>, <code>pExclusiveScissors</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>exclusiveScissorCount</code> <a href=\"#VkRect2D\">VkRect2D</a> structures"
+        }
+      ]
+    },
+    "vkCmdSetExclusiveScissorNV": {
+      "(VK_NV_scissor_exclusive)": [
+        {
+          "vuid": "VUID-vkCmdSetExclusiveScissorNV-None-02031",
+          "text": " The <a href=\"#features-exclusiveScissor\">exclusive scissor</a> feature <strong class=\"purple\">must</strong> be enabled."
+        },
+        {
+          "vuid": "VUID-vkCmdSetExclusiveScissorNV-None-02032",
+          "text": " The bound graphics pipeline <strong class=\"purple\">must</strong> have been created with the <code>VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_NV</code> dynamic state enabled"
+        },
+        {
+          "vuid": "VUID-vkCmdSetExclusiveScissorNV-firstExclusiveScissor-02033",
+          "text": " <code>firstExclusiveScissor</code> <strong class=\"purple\">must</strong> be less than <code>VkPhysicalDeviceLimits</code>::<code>maxViewports</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdSetExclusiveScissorNV-firstExclusiveScissor-02034",
+          "text": " The sum of <code>firstExclusiveScissor</code> and <code>exclusiveScissorCount</code> <strong class=\"purple\">must</strong> be between <code>1</code> and <code>VkPhysicalDeviceLimits</code>::<code>maxViewports</code>, inclusive"
+        },
+        {
+          "vuid": "VUID-vkCmdSetExclusiveScissorNV-firstExclusiveScissor-02035",
+          "text": " If the <a href=\"#features-multiViewport\">multiple viewports</a> feature is not enabled, <code>firstExclusiveScissor</code> <strong class=\"purple\">must</strong> be <code>0</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdSetExclusiveScissorNV-exclusiveScissorCount-02036",
+          "text": " If the <a href=\"#features-multiViewport\">multiple viewports</a> feature is not enabled, <code>exclusiveScissorCount</code> <strong class=\"purple\">must</strong> be <code>1</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdSetExclusiveScissorNV-x-02037",
+          "text": " The <code>x</code> and <code>y</code> members of <code>offset</code> in each member of <code>pExclusiveScissors</code> <strong class=\"purple\">must</strong> be greater than or equal to <code>0</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdSetExclusiveScissorNV-offset-02038",
+          "text": " Evaluation of <span class=\"eq\">(<code>offset.x</code> &#43; <code>extent.width</code>)</span> for each member of <code>pExclusiveScissors</code> <strong class=\"purple\">must</strong> not cause a signed integer addition overflow"
+        },
+        {
+          "vuid": "VUID-vkCmdSetExclusiveScissorNV-offset-02039",
+          "text": " Evaluation of <span class=\"eq\">(<code>offset.y</code> &#43; <code>extent.height</code>)</span> for each member of <code>pExclusiveScissors</code> <strong class=\"purple\">must</strong> not cause a signed integer addition overflow"
+        },
+        {
+          "vuid": "VUID-vkCmdSetExclusiveScissorNV-commandBuffer-parameter",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkCommandBuffer\">VkCommandBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdSetExclusiveScissorNV-pExclusiveScissors-parameter",
+          "text": " <code>pExclusiveScissors</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>exclusiveScissorCount</code> <a href=\"#VkRect2D\">VkRect2D</a> structures"
+        },
+        {
+          "vuid": "VUID-vkCmdSetExclusiveScissorNV-commandBuffer-recording",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be in the <a href=\"#commandbuffers-lifecycle\">recording state</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdSetExclusiveScissorNV-commandBuffer-cmdpool",
+          "text": " The <code>VkCommandPool</code> that <code>commandBuffer</code> was allocated from <strong class=\"purple\">must</strong> support graphics operations"
+        },
+        {
+          "vuid": "VUID-vkCmdSetExclusiveScissorNV-exclusiveScissorCount-arraylength",
+          "text": " <code>exclusiveScissorCount</code> <strong class=\"purple\">must</strong> be greater than <code>0</code>"
+        }
+      ]
+    },
+    "VkPipelineDepthStencilStateCreateInfo": {
+      "core": [
+        {
+          "vuid": "VUID-VkPipelineDepthStencilStateCreateInfo-depthBoundsTestEnable-00598",
+          "text": " If the <a href=\"#features-depthBounds\">depth bounds testing</a> feature is not enabled, <code>depthBoundsTestEnable</code> <strong class=\"purple\">must</strong> be <code>VK_FALSE</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineDepthStencilStateCreateInfo-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineDepthStencilStateCreateInfo-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineDepthStencilStateCreateInfo-flags-zerobitmask",
+          "text": " <code>flags</code> <strong class=\"purple\">must</strong> be <code>0</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineDepthStencilStateCreateInfo-depthCompareOp-parameter",
+          "text": " <code>depthCompareOp</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkCompareOp\">VkCompareOp</a> value"
+        },
+        {
+          "vuid": "VUID-VkPipelineDepthStencilStateCreateInfo-front-parameter",
+          "text": " <code>front</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkStencilOpState\">VkStencilOpState</a> structure"
+        },
+        {
+          "vuid": "VUID-VkPipelineDepthStencilStateCreateInfo-back-parameter",
+          "text": " <code>back</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkStencilOpState\">VkStencilOpState</a> structure"
+        }
+      ]
+    },
+    "vkCmdSetDepthBounds": {
+      "core": [
+        {
+          "vuid": "VUID-vkCmdSetDepthBounds-None-00599",
+          "text": " The bound graphics pipeline <strong class=\"purple\">must</strong> have been created with the <code>VK_DYNAMIC_STATE_DEPTH_BOUNDS</code> dynamic state enabled"
+        },
+        {
+          "vuid": "VUID-vkCmdSetDepthBounds-commandBuffer-parameter",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkCommandBuffer\">VkCommandBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdSetDepthBounds-commandBuffer-recording",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be in the <a href=\"#commandbuffers-lifecycle\">recording state</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdSetDepthBounds-commandBuffer-cmdpool",
+          "text": " The <code>VkCommandPool</code> that <code>commandBuffer</code> was allocated from <strong class=\"purple\">must</strong> support graphics operations"
+        }
+      ],
+      "(VK_EXT_depth_range_unrestricted)": [
+        {
+          "vuid": "VUID-vkCmdSetDepthBounds-minDepthBounds-00600",
+          "text": " Unless the <code><a href=\"#VK_EXT_depth_range_unrestricted\">VK_EXT_depth_range_unrestricted</a></code> extension is enabled <code>minDepthBounds</code> <strong class=\"purple\">must</strong> be between <code>0.0</code> and <code>1.0</code>, inclusive"
+        },
+        {
+          "vuid": "VUID-vkCmdSetDepthBounds-maxDepthBounds-00601",
+          "text": " Unless the <code><a href=\"#VK_EXT_depth_range_unrestricted\">VK_EXT_depth_range_unrestricted</a></code> extension is enabled <code>maxDepthBounds</code> <strong class=\"purple\">must</strong> be between <code>0.0</code> and <code>1.0</code>, inclusive"
+        }
+      ],
+      "!(VK_EXT_depth_range_unrestricted)": [
+        {
+          "vuid": "VUID-vkCmdSetDepthBounds-minDepthBounds-02508",
+          "text": " <code>minDepthBounds</code> <strong class=\"purple\">must</strong> be between <code>0.0</code> and <code>1.0</code>, inclusive"
+        },
+        {
+          "vuid": "VUID-vkCmdSetDepthBounds-maxDepthBounds-02509",
+          "text": " <code>maxDepthBounds</code> <strong class=\"purple\">must</strong> be between <code>0.0</code> and <code>1.0</code>, inclusive"
+        }
+      ]
+    },
+    "VkStencilOpState": {
+      "core": [
+        {
+          "vuid": "VUID-VkStencilOpState-failOp-parameter",
+          "text": " <code>failOp</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkStencilOp\">VkStencilOp</a> value"
+        },
+        {
+          "vuid": "VUID-VkStencilOpState-passOp-parameter",
+          "text": " <code>passOp</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkStencilOp\">VkStencilOp</a> value"
+        },
+        {
+          "vuid": "VUID-VkStencilOpState-depthFailOp-parameter",
+          "text": " <code>depthFailOp</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkStencilOp\">VkStencilOp</a> value"
+        },
+        {
+          "vuid": "VUID-VkStencilOpState-compareOp-parameter",
+          "text": " <code>compareOp</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkCompareOp\">VkCompareOp</a> value"
+        }
+      ]
+    },
+    "vkCmdSetStencilCompareMask": {
+      "core": [
+        {
+          "vuid": "VUID-vkCmdSetStencilCompareMask-None-00602",
+          "text": " The bound graphics pipeline <strong class=\"purple\">must</strong> have been created with the <code>VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK</code> dynamic state enabled"
+        },
+        {
+          "vuid": "VUID-vkCmdSetStencilCompareMask-commandBuffer-parameter",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkCommandBuffer\">VkCommandBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdSetStencilCompareMask-faceMask-parameter",
+          "text": " <code>faceMask</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkStencilFaceFlagBits\">VkStencilFaceFlagBits</a> values"
+        },
+        {
+          "vuid": "VUID-vkCmdSetStencilCompareMask-faceMask-requiredbitmask",
+          "text": " <code>faceMask</code> <strong class=\"purple\">must</strong> not be <code>0</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdSetStencilCompareMask-commandBuffer-recording",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be in the <a href=\"#commandbuffers-lifecycle\">recording state</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdSetStencilCompareMask-commandBuffer-cmdpool",
+          "text": " The <code>VkCommandPool</code> that <code>commandBuffer</code> was allocated from <strong class=\"purple\">must</strong> support graphics operations"
+        }
+      ]
+    },
+    "vkCmdSetStencilWriteMask": {
+      "core": [
+        {
+          "vuid": "VUID-vkCmdSetStencilWriteMask-None-00603",
+          "text": " The bound graphics pipeline <strong class=\"purple\">must</strong> have been created with the <code>VK_DYNAMIC_STATE_STENCIL_WRITE_MASK</code> dynamic state enabled"
+        },
+        {
+          "vuid": "VUID-vkCmdSetStencilWriteMask-commandBuffer-parameter",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkCommandBuffer\">VkCommandBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdSetStencilWriteMask-faceMask-parameter",
+          "text": " <code>faceMask</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkStencilFaceFlagBits\">VkStencilFaceFlagBits</a> values"
+        },
+        {
+          "vuid": "VUID-vkCmdSetStencilWriteMask-faceMask-requiredbitmask",
+          "text": " <code>faceMask</code> <strong class=\"purple\">must</strong> not be <code>0</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdSetStencilWriteMask-commandBuffer-recording",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be in the <a href=\"#commandbuffers-lifecycle\">recording state</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdSetStencilWriteMask-commandBuffer-cmdpool",
+          "text": " The <code>VkCommandPool</code> that <code>commandBuffer</code> was allocated from <strong class=\"purple\">must</strong> support graphics operations"
+        }
+      ]
+    },
+    "vkCmdSetStencilReference": {
+      "core": [
+        {
+          "vuid": "VUID-vkCmdSetStencilReference-None-00604",
+          "text": " The bound graphics pipeline <strong class=\"purple\">must</strong> have been created with the <code>VK_DYNAMIC_STATE_STENCIL_REFERENCE</code> dynamic state enabled"
+        },
+        {
+          "vuid": "VUID-vkCmdSetStencilReference-commandBuffer-parameter",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkCommandBuffer\">VkCommandBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdSetStencilReference-faceMask-parameter",
+          "text": " <code>faceMask</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkStencilFaceFlagBits\">VkStencilFaceFlagBits</a> values"
+        },
+        {
+          "vuid": "VUID-vkCmdSetStencilReference-faceMask-requiredbitmask",
+          "text": " <code>faceMask</code> <strong class=\"purple\">must</strong> not be <code>0</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdSetStencilReference-commandBuffer-recording",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be in the <a href=\"#commandbuffers-lifecycle\">recording state</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdSetStencilReference-commandBuffer-cmdpool",
+          "text": " The <code>VkCommandPool</code> that <code>commandBuffer</code> was allocated from <strong class=\"purple\">must</strong> support graphics operations"
+        }
+      ]
+    },
+    "VkPipelineRepresentativeFragmentTestStateCreateInfoNV": {
+      "(VK_NV_representative_fragment_test)": [
+        {
+          "vuid": "VUID-VkPipelineRepresentativeFragmentTestStateCreateInfoNV-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PIPELINE_REPRESENTATIVE_FRAGMENT_TEST_STATE_CREATE_INFO_NV</code>"
+        }
+      ]
+    },
+    "VkPipelineCoverageToColorStateCreateInfoNV": {
+      "(VK_NV_fragment_coverage_to_color)": [
+        {
+          "vuid": "VUID-VkPipelineCoverageToColorStateCreateInfoNV-coverageToColorEnable-01404",
+          "text": " If <code>coverageToColorEnable</code> is <code>VK_TRUE</code>, then the render pass subpass indicated by <a href=\"#VkGraphicsPipelineCreateInfo\">VkGraphicsPipelineCreateInfo</a>::<code>renderPass</code> and <a href=\"#VkGraphicsPipelineCreateInfo\">VkGraphicsPipelineCreateInfo</a>::<code>subpass</code> <strong class=\"purple\">must</strong> have a color attachment at the location selected by <code>coverageToColorLocation</code>, with a <a href=\"#VkFormat\">VkFormat</a> of <code>VK_FORMAT_R8_UINT</code>, <code>VK_FORMAT_R8_SINT</code>, <code>VK_FORMAT_R16_UINT</code>, <code>VK_FORMAT_R16_SINT</code>, <code>VK_FORMAT_R32_UINT</code>, or <code>VK_FORMAT_R32_SINT</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineCoverageToColorStateCreateInfoNV-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_TO_COLOR_STATE_CREATE_INFO_NV</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineCoverageToColorStateCreateInfoNV-flags-zerobitmask",
+          "text": " <code>flags</code> <strong class=\"purple\">must</strong> be <code>0</code>"
+        }
+      ]
+    },
+    "VkPipelineCoverageReductionStateCreateInfoNV": {
+      "(VK_NV_framebuffer_mixed_samples)+(VK_NV_coverage_reduction_mode)": [
+        {
+          "vuid": "VUID-VkPipelineCoverageReductionStateCreateInfoNV-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_REDUCTION_STATE_CREATE_INFO_NV</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineCoverageReductionStateCreateInfoNV-flags-zerobitmask",
+          "text": " <code>flags</code> <strong class=\"purple\">must</strong> be <code>0</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineCoverageReductionStateCreateInfoNV-coverageReductionMode-parameter",
+          "text": " <code>coverageReductionMode</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkCoverageReductionModeNV\">VkCoverageReductionModeNV</a> value"
+        }
+      ]
+    },
+    "vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV": {
+      "(VK_NV_framebuffer_mixed_samples)+(VK_NV_coverage_reduction_mode)": [
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV-physicalDevice-parameter",
+          "text": " <code>physicalDevice</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkPhysicalDevice\">VkPhysicalDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV-pCombinationCount-parameter",
+          "text": " <code>pCombinationCount</code> <strong class=\"purple\">must</strong> be a valid pointer to a <code>uint32_t</code> value"
+        },
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV-pCombinations-parameter",
+          "text": " If the value referenced by <code>pCombinationCount</code> is not <code>0</code>, and <code>pCombinations</code> is not <code>NULL</code>, <code>pCombinations</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>pCombinationCount</code> <a href=\"#VkFramebufferMixedSamplesCombinationNV\">VkFramebufferMixedSamplesCombinationNV</a> structures"
+        }
+      ]
+    },
+    "VkFramebufferMixedSamplesCombinationNV": {
+      "(VK_NV_framebuffer_mixed_samples)+(VK_NV_coverage_reduction_mode)": [
+        {
+          "vuid": "VUID-VkFramebufferMixedSamplesCombinationNV-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_FRAMEBUFFER_MIXED_SAMPLES_COMBINATION_NV</code>"
+        },
+        {
+          "vuid": "VUID-VkFramebufferMixedSamplesCombinationNV-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        }
+      ]
+    },
+    "VkPipelineCoverageModulationStateCreateInfoNV": {
+      "(VK_NV_framebuffer_mixed_samples)": [
+        {
+          "vuid": "VUID-VkPipelineCoverageModulationStateCreateInfoNV-coverageModulationTableEnable-01405",
+          "text": " If <code>coverageModulationTableEnable</code> is <code>VK_TRUE</code>, <code>coverageModulationTableCount</code> <strong class=\"purple\">must</strong> be equal to the number of rasterization samples divided by the number of color samples in the subpass"
+        },
+        {
+          "vuid": "VUID-VkPipelineCoverageModulationStateCreateInfoNV-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_MODULATION_STATE_CREATE_INFO_NV</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineCoverageModulationStateCreateInfoNV-flags-zerobitmask",
+          "text": " <code>flags</code> <strong class=\"purple\">must</strong> be <code>0</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineCoverageModulationStateCreateInfoNV-coverageModulationMode-parameter",
+          "text": " <code>coverageModulationMode</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkCoverageModulationModeNV\">VkCoverageModulationModeNV</a> value"
+        }
+      ]
+    },
+    "VkPipelineColorBlendStateCreateInfo": {
+      "core": [
+        {
+          "vuid": "VUID-VkPipelineColorBlendStateCreateInfo-pAttachments-00605",
+          "text": " If the <a href=\"#features-independentBlend\">independent blending</a> feature is not enabled, all elements of <code>pAttachments</code> <strong class=\"purple\">must</strong> be identical"
+        },
+        {
+          "vuid": "VUID-VkPipelineColorBlendStateCreateInfo-logicOpEnable-00606",
+          "text": " If the <a href=\"#features-logicOp\">logic operations</a> feature is not enabled, <code>logicOpEnable</code> <strong class=\"purple\">must</strong> be <code>VK_FALSE</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineColorBlendStateCreateInfo-logicOpEnable-00607",
+          "text": " If <code>logicOpEnable</code> is <code>VK_TRUE</code>, <code>logicOp</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkLogicOp\">VkLogicOp</a> value"
+        },
+        {
+          "vuid": "VUID-VkPipelineColorBlendStateCreateInfo-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineColorBlendStateCreateInfo-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code> or a pointer to a valid instance of <a href=\"#VkPipelineColorBlendAdvancedStateCreateInfoEXT\">VkPipelineColorBlendAdvancedStateCreateInfoEXT</a>"
+        },
+        {
+          "vuid": "VUID-VkPipelineColorBlendStateCreateInfo-flags-zerobitmask",
+          "text": " <code>flags</code> <strong class=\"purple\">must</strong> be <code>0</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineColorBlendStateCreateInfo-pAttachments-parameter",
+          "text": " If <code>attachmentCount</code> is not <code>0</code>, <code>pAttachments</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>attachmentCount</code> valid <a href=\"#VkPipelineColorBlendAttachmentState\">VkPipelineColorBlendAttachmentState</a> structures"
+        }
+      ]
+    },
+    "VkPipelineColorBlendAttachmentState": {
+      "core": [
+        {
+          "vuid": "VUID-VkPipelineColorBlendAttachmentState-srcColorBlendFactor-00608",
+          "text": " If the <a href=\"#features-dualSrcBlend\">dual source blending</a> feature is not enabled, <code>srcColorBlendFactor</code> <strong class=\"purple\">must</strong> not be <code>VK_BLEND_FACTOR_SRC1_COLOR</code>, <code>VK_BLEND_FACTOR_ONE_MINUS_SRC1_COLOR</code>, <code>VK_BLEND_FACTOR_SRC1_ALPHA</code>, or <code>VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineColorBlendAttachmentState-dstColorBlendFactor-00609",
+          "text": " If the <a href=\"#features-dualSrcBlend\">dual source blending</a> feature is not enabled, <code>dstColorBlendFactor</code> <strong class=\"purple\">must</strong> not be <code>VK_BLEND_FACTOR_SRC1_COLOR</code>, <code>VK_BLEND_FACTOR_ONE_MINUS_SRC1_COLOR</code>, <code>VK_BLEND_FACTOR_SRC1_ALPHA</code>, or <code>VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineColorBlendAttachmentState-srcAlphaBlendFactor-00610",
+          "text": " If the <a href=\"#features-dualSrcBlend\">dual source blending</a> feature is not enabled, <code>srcAlphaBlendFactor</code> <strong class=\"purple\">must</strong> not be <code>VK_BLEND_FACTOR_SRC1_COLOR</code>, <code>VK_BLEND_FACTOR_ONE_MINUS_SRC1_COLOR</code>, <code>VK_BLEND_FACTOR_SRC1_ALPHA</code>, or <code>VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineColorBlendAttachmentState-dstAlphaBlendFactor-00611",
+          "text": " If the <a href=\"#features-dualSrcBlend\">dual source blending</a> feature is not enabled, <code>dstAlphaBlendFactor</code> <strong class=\"purple\">must</strong> not be <code>VK_BLEND_FACTOR_SRC1_COLOR</code>, <code>VK_BLEND_FACTOR_ONE_MINUS_SRC1_COLOR</code>, <code>VK_BLEND_FACTOR_SRC1_ALPHA</code>, or <code>VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineColorBlendAttachmentState-srcColorBlendFactor-parameter",
+          "text": " <code>srcColorBlendFactor</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkBlendFactor\">VkBlendFactor</a> value"
+        },
+        {
+          "vuid": "VUID-VkPipelineColorBlendAttachmentState-dstColorBlendFactor-parameter",
+          "text": " <code>dstColorBlendFactor</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkBlendFactor\">VkBlendFactor</a> value"
+        },
+        {
+          "vuid": "VUID-VkPipelineColorBlendAttachmentState-colorBlendOp-parameter",
+          "text": " <code>colorBlendOp</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkBlendOp\">VkBlendOp</a> value"
+        },
+        {
+          "vuid": "VUID-VkPipelineColorBlendAttachmentState-srcAlphaBlendFactor-parameter",
+          "text": " <code>srcAlphaBlendFactor</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkBlendFactor\">VkBlendFactor</a> value"
+        },
+        {
+          "vuid": "VUID-VkPipelineColorBlendAttachmentState-dstAlphaBlendFactor-parameter",
+          "text": " <code>dstAlphaBlendFactor</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkBlendFactor\">VkBlendFactor</a> value"
+        },
+        {
+          "vuid": "VUID-VkPipelineColorBlendAttachmentState-alphaBlendOp-parameter",
+          "text": " <code>alphaBlendOp</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkBlendOp\">VkBlendOp</a> value"
+        },
+        {
+          "vuid": "VUID-VkPipelineColorBlendAttachmentState-colorWriteMask-parameter",
+          "text": " <code>colorWriteMask</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkColorComponentFlagBits\">VkColorComponentFlagBits</a> values"
+        }
+      ],
+      "(VK_EXT_blend_operation_advanced)": [
+        {
+          "vuid": "VUID-VkPipelineColorBlendAttachmentState-colorBlendOp-01406",
+          "text": " If either of <code>colorBlendOp</code> or <code>alphaBlendOp</code> is an <a href=\"#framebuffer-blend-advanced\">advanced blend operation</a>, then <code>colorBlendOp</code> <strong class=\"purple\">must</strong> equal <code>alphaBlendOp</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineColorBlendAttachmentState-advancedBlendIndependentBlend-01407",
+          "text": " If <a href=\"#VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT\">VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT</a>::<code>advancedBlendIndependentBlend</code> is <code>VK_FALSE</code> and <code>colorBlendOp</code> is an <a href=\"#framebuffer-blend-advanced\">advanced blend operation</a>, then <code>colorBlendOp</code> <strong class=\"purple\">must</strong> be the same for all attachments."
+        },
+        {
+          "vuid": "VUID-VkPipelineColorBlendAttachmentState-advancedBlendIndependentBlend-01408",
+          "text": " If <a href=\"#VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT\">VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT</a>::<code>advancedBlendIndependentBlend</code> is <code>VK_FALSE</code> and <code>alphaBlendOp</code> is an <a href=\"#framebuffer-blend-advanced\">advanced blend operation</a>, then <code>alphaBlendOp</code> <strong class=\"purple\">must</strong> be the same for all attachments."
+        },
+        {
+          "vuid": "VUID-VkPipelineColorBlendAttachmentState-advancedBlendAllOperations-01409",
+          "text": " If <a href=\"#VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT\">VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT</a>::<code>advancedBlendAllOperations</code> is <code>VK_FALSE</code>, then <code>colorBlendOp</code> <strong class=\"purple\">must</strong> not be <code>VK_BLEND_OP_ZERO_EXT</code>, <code>VK_BLEND_OP_SRC_EXT</code>, <code>VK_BLEND_OP_DST_EXT</code>, <code>VK_BLEND_OP_SRC_OVER_EXT</code>, <code>VK_BLEND_OP_DST_OVER_EXT</code>, <code>VK_BLEND_OP_SRC_IN_EXT</code>, <code>VK_BLEND_OP_DST_IN_EXT</code>, <code>VK_BLEND_OP_SRC_OUT_EXT</code>, <code>VK_BLEND_OP_DST_OUT_EXT</code>, <code>VK_BLEND_OP_SRC_ATOP_EXT</code>, <code>VK_BLEND_OP_DST_ATOP_EXT</code>, <code>VK_BLEND_OP_XOR_EXT</code>, <code>VK_BLEND_OP_INVERT_EXT</code>, <code>VK_BLEND_OP_INVERT_RGB_EXT</code>, <code>VK_BLEND_OP_LINEARDODGE_EXT</code>, <code>VK_BLEND_OP_LINEARBURN_EXT</code>, <code>VK_BLEND_OP_VIVIDLIGHT_EXT</code>, <code>VK_BLEND_OP_LINEARLIGHT_EXT</code>, <code>VK_BLEND_OP_PINLIGHT_EXT</code>, <code>VK_BLEND_OP_HARDMIX_EXT</code>, <code>VK_BLEND_OP_PLUS_EXT</code>, <code>VK_BLEND_OP_PLUS_CLAMPED_EXT</code>, <code>VK_BLEND_OP_PLUS_CLAMPED_ALPHA_EXT</code>, <code>VK_BLEND_OP_PLUS_DARKER_EXT</code>, <code>VK_BLEND_OP_MINUS_EXT</code>, <code>VK_BLEND_OP_MINUS_CLAMPED_EXT</code>, <code>VK_BLEND_OP_CONTRAST_EXT</code>, <code>VK_BLEND_OP_INVERT_OVG_EXT</code>, <code>VK_BLEND_OP_RED_EXT</code>, <code>VK_BLEND_OP_GREEN_EXT</code>, or <code>VK_BLEND_OP_BLUE_EXT</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineColorBlendAttachmentState-colorBlendOp-01410",
+          "text": " If <code>colorBlendOp</code> or <code>alphaBlendOp</code> is an <a href=\"#framebuffer-blend-advanced\">advanced blend operation</a>, then <a href=\"#VkSubpassDescription\">VkSubpassDescription</a>::<code>colorAttachmentCount</code> of the subpass this pipeline is compiled against <strong class=\"purple\">must</strong> be less than or equal to <a href=\"#VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT\">VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT</a>::advancedBlendMaxColorAttachments"
+        }
+      ]
+    },
+    "vkCmdSetBlendConstants": {
+      "core": [
+        {
+          "vuid": "VUID-vkCmdSetBlendConstants-None-00612",
+          "text": " The bound graphics pipeline <strong class=\"purple\">must</strong> have been created with the <code>VK_DYNAMIC_STATE_BLEND_CONSTANTS</code> dynamic state enabled"
+        },
+        {
+          "vuid": "VUID-vkCmdSetBlendConstants-commandBuffer-parameter",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkCommandBuffer\">VkCommandBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdSetBlendConstants-commandBuffer-recording",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be in the <a href=\"#commandbuffers-lifecycle\">recording state</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdSetBlendConstants-commandBuffer-cmdpool",
+          "text": " The <code>VkCommandPool</code> that <code>commandBuffer</code> was allocated from <strong class=\"purple\">must</strong> support graphics operations"
+        }
+      ]
+    },
+    "VkPipelineColorBlendAdvancedStateCreateInfoEXT": {
+      "(VK_EXT_blend_operation_advanced)": [
+        {
+          "vuid": "VUID-VkPipelineColorBlendAdvancedStateCreateInfoEXT-srcPremultiplied-01424",
+          "text": " If the <a href=\"#limits-advancedBlendNonPremultipliedSrcColor\">non-premultiplied source color</a> property is not supported, <code>srcPremultiplied</code> <strong class=\"purple\">must</strong> be <code>VK_TRUE</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineColorBlendAdvancedStateCreateInfoEXT-dstPremultiplied-01425",
+          "text": " If the <a href=\"#limits-advancedBlendNonPremultipliedDstColor\">non-premultiplied destination color</a> property is not supported, <code>dstPremultiplied</code> <strong class=\"purple\">must</strong> be <code>VK_TRUE</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineColorBlendAdvancedStateCreateInfoEXT-blendOverlap-01426",
+          "text": " If the <a href=\"#limits-advancedBlendCorrelatedOverlap\">correlated overlap</a> property is not supported, <code>blendOverlap</code> <strong class=\"purple\">must</strong> be <code>VK_BLEND_OVERLAP_UNCORRELATED_EXT</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineColorBlendAdvancedStateCreateInfoEXT-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_ADVANCED_STATE_CREATE_INFO_EXT</code>"
+        },
+        {
+          "vuid": "VUID-VkPipelineColorBlendAdvancedStateCreateInfoEXT-blendOverlap-parameter",
+          "text": " <code>blendOverlap</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkBlendOverlapEXT\">VkBlendOverlapEXT</a> value"
+        }
+      ]
+    },
+    "vkCmdDispatch": {
+      "core": [
+        {
+          "vuid": "VUID-vkCmdDispatch-None-02690",
+          "text": " If a <code>VkImageView</code> is sampled with <code>VK_FILTER_LINEAR</code> as a result of this command, then the image view&#8217;s <a href=\"#resources-image-view-format-features\">format features</a> <strong class=\"purple\">must</strong> contain <code>VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdDispatch-None-02691",
+          "text": " If a <code>VkImageView</code> is accessed using atomic operations as a result of this command, then the image view&#8217;s <a href=\"#resources-image-view-format-features\">format features</a> <strong class=\"purple\">must</strong> contain <code>VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdDispatch-None-02697",
+          "text": " For each set <em>n</em> that is statically used by the <code>VkPipeline</code> bound to the pipeline bind point used by this command, a descriptor set <strong class=\"purple\">must</strong> have been bound to <em>n</em> at the same pipeline bind point, with a <code>VkPipelineLayout</code> that is compatible for set <em>n</em>, with the <code>VkPipelineLayout</code> used to create the current <code>VkPipeline</code>, as described in <a href=\"#descriptorsets-compatibility\">Pipeline Layout Compatibility</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdDispatch-None-02698",
+          "text": " For each push constant that is statically used by the <code>VkPipeline</code> bound to the pipeline bind point used by this command, a push constant value <strong class=\"purple\">must</strong> have been set for the same pipeline bind point, with a <code>VkPipelineLayout</code> that is compatible for push constants, with the <code>VkPipelineLayout</code> used to create the current <code>VkPipeline</code>, as described in <a href=\"#descriptorsets-compatibility\">Pipeline Layout Compatibility</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdDispatch-None-02699",
+          "text": " Descriptors in each bound descriptor set, specified via <code>vkCmdBindDescriptorSets</code>, <strong class=\"purple\">must</strong> be valid if they are statically used by the <code>VkPipeline</code> bound to the pipeline bind point used by this command"
+        },
+        {
+          "vuid": "VUID-vkCmdDispatch-None-02700",
+          "text": " A valid pipeline <strong class=\"purple\">must</strong> be bound to the pipeline bind point used by this command"
+        },
+        {
+          "vuid": "VUID-vkCmdDispatch-commandBuffer-02701",
+          "text": " If the <code>VkPipeline</code> object bound to the pipeline bind point used by this command requires any dynamic state, that state <strong class=\"purple\">must</strong> have been set for <code>commandBuffer</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdDispatch-None-02702",
+          "text": " If the <code>VkPipeline</code> object bound to the pipeline bind point used by this command accesses a <code>VkSampler</code> object that uses unnormalized coordinates, that sampler <strong class=\"purple\">must</strong> not be used to sample from any <code>VkImage</code> with a <code>VkImageView</code> of the type <code>VK_IMAGE_VIEW_TYPE_3D</code>, <code>VK_IMAGE_VIEW_TYPE_CUBE</code>, <code>VK_IMAGE_VIEW_TYPE_1D_ARRAY</code>, <code>VK_IMAGE_VIEW_TYPE_2D_ARRAY</code> or <code>VK_IMAGE_VIEW_TYPE_CUBE_ARRAY</code>, in any shader stage"
+        },
+        {
+          "vuid": "VUID-vkCmdDispatch-None-02703",
+          "text": " If the <code>VkPipeline</code> object bound to the pipeline bind point used by this command accesses a <code>VkSampler</code> object that uses unnormalized coordinates, that sampler <strong class=\"purple\">must</strong> not be used with any of the SPIR-V <code>OpImageSample*</code> or <code>OpImageSparseSample*</code> instructions with <code>ImplicitLod</code>, <code>Dref</code> or <code>Proj</code> in their name, in any shader stage"
+        },
+        {
+          "vuid": "VUID-vkCmdDispatch-None-02704",
+          "text": " If the <code>VkPipeline</code> object bound to the pipeline bind point used by this command accesses a <code>VkSampler</code> object that uses unnormalized coordinates, that sampler <strong class=\"purple\">must</strong> not be used with any of the SPIR-V <code>OpImageSample*</code> or <code>OpImageSparseSample*</code> instructions that includes a LOD bias or any offset values, in any shader stage"
+        },
+        {
+          "vuid": "VUID-vkCmdDispatch-None-02705",
+          "text": " If the <a href=\"#features-robustBufferAccess\">robust buffer access</a> feature is not enabled, and if the <code>VkPipeline</code> object bound to the pipeline bind point used by this command accesses a uniform buffer, it <strong class=\"purple\">must</strong> not access values outside of the range of the buffer as specified in the descriptor set bound to the same pipeline bind point"
+        },
+        {
+          "vuid": "VUID-vkCmdDispatch-None-02706",
+          "text": " If the <a href=\"#features-robustBufferAccess\">robust buffer access</a> feature is not enabled, and if the <code>VkPipeline</code> object bound to the pipeline bind point used by this command accesses a storage buffer, it <strong class=\"purple\">must</strong> not access values outside of the range of the buffer as specified in the descriptor set bound to the same pipeline bind point"
+        },
+        {
+          "vuid": "VUID-vkCmdDispatch-groupCountX-00386",
+          "text": " <code>groupCountX</code> <strong class=\"purple\">must</strong> be less than or equal to <code>VkPhysicalDeviceLimits</code>::<code>maxComputeWorkGroupCount</code>[0]"
+        },
+        {
+          "vuid": "VUID-vkCmdDispatch-groupCountY-00387",
+          "text": " <code>groupCountY</code> <strong class=\"purple\">must</strong> be less than or equal to <code>VkPhysicalDeviceLimits</code>::<code>maxComputeWorkGroupCount</code>[1]"
+        },
+        {
+          "vuid": "VUID-vkCmdDispatch-groupCountZ-00388",
+          "text": " <code>groupCountZ</code> <strong class=\"purple\">must</strong> be less than or equal to <code>VkPhysicalDeviceLimits</code>::<code>maxComputeWorkGroupCount</code>[2]"
+        },
+        {
+          "vuid": "VUID-vkCmdDispatch-commandBuffer-parameter",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkCommandBuffer\">VkCommandBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdDispatch-commandBuffer-recording",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be in the <a href=\"#commandbuffers-lifecycle\">recording state</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdDispatch-commandBuffer-cmdpool",
+          "text": " The <code>VkCommandPool</code> that <code>commandBuffer</code> was allocated from <strong class=\"purple\">must</strong> support compute operations"
+        },
+        {
+          "vuid": "VUID-vkCmdDispatch-renderpass",
+          "text": " This command <strong class=\"purple\">must</strong> only be called outside of a render pass instance"
+        }
+      ],
+      "(VK_IMG_filter_cubic,VK_EXT_filter_cubic)": [
+        {
+          "vuid": "VUID-vkCmdDispatch-None-02692",
+          "text": " If a <code>VkImageView</code> is sampled with <code>VK_FILTER_CUBIC_EXT</code> as a result of this command, then the image view&#8217;s <a href=\"#resources-image-view-format-features\">format features</a> <strong class=\"purple\">must</strong> contain <code>VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT</code>"
+        }
+      ],
+      "(VK_IMG_filter_cubic,VK_EXT_filter_cubic)+!(VK_EXT_filter_cubic)": [
+        {
+          "vuid": "VUID-vkCmdDispatch-None-02693",
+          "text": " Any <a href=\"#VkImageView\">VkImageView</a> being sampled with <code>VK_FILTER_CUBIC_EXT</code> as a result of this command <strong class=\"purple\">must</strong> not have a <a href=\"#VkImageViewType\">VkImageViewType</a> of <code>VK_IMAGE_VIEW_TYPE_3D</code>, <code>VK_IMAGE_VIEW_TYPE_CUBE</code>, or <code>VK_IMAGE_VIEW_TYPE_CUBE_ARRAY</code>"
+        }
+      ],
+      "(VK_IMG_filter_cubic,VK_EXT_filter_cubic)+(VK_EXT_filter_cubic)": [
+        {
+          "vuid": "VUID-vkCmdDispatch-filterCubic-02694",
+          "text": " Any <a href=\"#VkImageView\">VkImageView</a> being sampled with <code>VK_FILTER_CUBIC_EXT</code> as a result of this command <strong class=\"purple\">must</strong> have a <a href=\"#VkImageViewType\">VkImageViewType</a> and format that supports cubic filtering, as specified by <code>VkFilterCubicImageViewImageFormatPropertiesEXT</code>::<code>filterCubic</code> returned by <code>vkGetPhysicalDeviceImageFormatProperties2</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdDispatch-filterCubicMinmax-02695",
+          "text": " Any <a href=\"#VkImageView\">VkImageView</a> being sampled with <code>VK_FILTER_CUBIC_EXT</code> with a reduction mode of either <code>VK_SAMPLER_REDUCTION_MODE_MIN_EXT</code> or <code>VK_SAMPLER_REDUCTION_MODE_MAX_EXT</code> as a result of this command <strong class=\"purple\">must</strong> have a <a href=\"#VkImageViewType\">VkImageViewType</a> and format that supports cubic filtering together with minmax filtering, as specified by <code>VkFilterCubicImageViewImageFormatPropertiesEXT</code>::<code>filterCubicMinmax</code> returned by <code>vkGetPhysicalDeviceImageFormatProperties2</code>"
+        }
+      ],
+      "(VK_NV_corner_sampled_image)": [
+        {
+          "vuid": "VUID-vkCmdDispatch-flags-02696",
+          "text": " Any <a href=\"#VkImage\">VkImage</a> created with a <a href=\"#VkImageCreateInfo\">VkImageCreateInfo</a>::<code>flags</code> containing <code>VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV</code> sampled as a result of this command <strong class=\"purple\">must</strong> only be sampled using a <a href=\"#VkSamplerAddressMode\">VkSamplerAddressMode</a> of <code>VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE</code>."
+        }
+      ],
+      "(VK_VERSION_1_1)": [
+        {
+          "vuid": "VUID-vkCmdDispatch-commandBuffer-02707",
+          "text": " If <code>commandBuffer</code> is an unprotected command buffer, any resource accessed by the <code>VkPipeline</code> object bound to the pipeline bind point used by this command <strong class=\"purple\">must</strong> not be a protected resource"
+        },
+        {
+          "vuid": "VUID-vkCmdDispatch-commandBuffer-02712",
+          "text": " If <code>commandBuffer</code> is a protected command buffer, any resource written to by the <code>VkPipeline</code> object bound to the pipeline bind point used by this command <strong class=\"purple\">must</strong> not be an unprotected resource"
+        },
+        {
+          "vuid": "VUID-vkCmdDispatch-commandBuffer-02713",
+          "text": " If <code>commandBuffer</code> is a protected command buffer, pipeline stages other than the framebuffer-space and compute stages in the <code>VkPipeline</code> object bound to the pipeline bind point <strong class=\"purple\">must</strong> not write to any resource"
+        }
+      ]
+    },
+    "vkCmdDispatchIndirect": {
+      "core": [
+        {
+          "vuid": "VUID-vkCmdDispatchIndirect-None-02690",
+          "text": " If a <code>VkImageView</code> is sampled with <code>VK_FILTER_LINEAR</code> as a result of this command, then the image view&#8217;s <a href=\"#resources-image-view-format-features\">format features</a> <strong class=\"purple\">must</strong> contain <code>VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdDispatchIndirect-None-02691",
+          "text": " If a <code>VkImageView</code> is accessed using atomic operations as a result of this command, then the image view&#8217;s <a href=\"#resources-image-view-format-features\">format features</a> <strong class=\"purple\">must</strong> contain <code>VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdDispatchIndirect-None-02697",
+          "text": " For each set <em>n</em> that is statically used by the <code>VkPipeline</code> bound to the pipeline bind point used by this command, a descriptor set <strong class=\"purple\">must</strong> have been bound to <em>n</em> at the same pipeline bind point, with a <code>VkPipelineLayout</code> that is compatible for set <em>n</em>, with the <code>VkPipelineLayout</code> used to create the current <code>VkPipeline</code>, as described in <a href=\"#descriptorsets-compatibility\">Pipeline Layout Compatibility</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdDispatchIndirect-None-02698",
+          "text": " For each push constant that is statically used by the <code>VkPipeline</code> bound to the pipeline bind point used by this command, a push constant value <strong class=\"purple\">must</strong> have been set for the same pipeline bind point, with a <code>VkPipelineLayout</code> that is compatible for push constants, with the <code>VkPipelineLayout</code> used to create the current <code>VkPipeline</code>, as described in <a href=\"#descriptorsets-compatibility\">Pipeline Layout Compatibility</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdDispatchIndirect-None-02699",
+          "text": " Descriptors in each bound descriptor set, specified via <code>vkCmdBindDescriptorSets</code>, <strong class=\"purple\">must</strong> be valid if they are statically used by the <code>VkPipeline</code> bound to the pipeline bind point used by this command"
+        },
+        {
+          "vuid": "VUID-vkCmdDispatchIndirect-None-02700",
+          "text": " A valid pipeline <strong class=\"purple\">must</strong> be bound to the pipeline bind point used by this command"
+        },
+        {
+          "vuid": "VUID-vkCmdDispatchIndirect-commandBuffer-02701",
+          "text": " If the <code>VkPipeline</code> object bound to the pipeline bind point used by this command requires any dynamic state, that state <strong class=\"purple\">must</strong> have been set for <code>commandBuffer</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdDispatchIndirect-None-02702",
+          "text": " If the <code>VkPipeline</code> object bound to the pipeline bind point used by this command accesses a <code>VkSampler</code> object that uses unnormalized coordinates, that sampler <strong class=\"purple\">must</strong> not be used to sample from any <code>VkImage</code> with a <code>VkImageView</code> of the type <code>VK_IMAGE_VIEW_TYPE_3D</code>, <code>VK_IMAGE_VIEW_TYPE_CUBE</code>, <code>VK_IMAGE_VIEW_TYPE_1D_ARRAY</code>, <code>VK_IMAGE_VIEW_TYPE_2D_ARRAY</code> or <code>VK_IMAGE_VIEW_TYPE_CUBE_ARRAY</code>, in any shader stage"
+        },
+        {
+          "vuid": "VUID-vkCmdDispatchIndirect-None-02703",
+          "text": " If the <code>VkPipeline</code> object bound to the pipeline bind point used by this command accesses a <code>VkSampler</code> object that uses unnormalized coordinates, that sampler <strong class=\"purple\">must</strong> not be used with any of the SPIR-V <code>OpImageSample*</code> or <code>OpImageSparseSample*</code> instructions with <code>ImplicitLod</code>, <code>Dref</code> or <code>Proj</code> in their name, in any shader stage"
+        },
+        {
+          "vuid": "VUID-vkCmdDispatchIndirect-None-02704",
+          "text": " If the <code>VkPipeline</code> object bound to the pipeline bind point used by this command accesses a <code>VkSampler</code> object that uses unnormalized coordinates, that sampler <strong class=\"purple\">must</strong> not be used with any of the SPIR-V <code>OpImageSample*</code> or <code>OpImageSparseSample*</code> instructions that includes a LOD bias or any offset values, in any shader stage"
+        },
+        {
+          "vuid": "VUID-vkCmdDispatchIndirect-None-02705",
+          "text": " If the <a href=\"#features-robustBufferAccess\">robust buffer access</a> feature is not enabled, and if the <code>VkPipeline</code> object bound to the pipeline bind point used by this command accesses a uniform buffer, it <strong class=\"purple\">must</strong> not access values outside of the range of the buffer as specified in the descriptor set bound to the same pipeline bind point"
+        },
+        {
+          "vuid": "VUID-vkCmdDispatchIndirect-None-02706",
+          "text": " If the <a href=\"#features-robustBufferAccess\">robust buffer access</a> feature is not enabled, and if the <code>VkPipeline</code> object bound to the pipeline bind point used by this command accesses a storage buffer, it <strong class=\"purple\">must</strong> not access values outside of the range of the buffer as specified in the descriptor set bound to the same pipeline bind point"
+        },
+        {
+          "vuid": "VUID-vkCmdDispatchIndirect-buffer-02708",
+          "text": " If <code>buffer</code> is non-sparse then it <strong class=\"purple\">must</strong> be bound completely and contiguously to a single <code>VkDeviceMemory</code> object"
+        },
+        {
+          "vuid": "VUID-vkCmdDispatchIndirect-buffer-02709",
+          "text": " <code>buffer</code> <strong class=\"purple\">must</strong> have been created with the <code>VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT</code> bit set"
+        },
+        {
+          "vuid": "VUID-vkCmdDispatchIndirect-offset-02710",
+          "text": " <code>offset</code> <strong class=\"purple\">must</strong> be a multiple of <code>4</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdDispatchIndirect-offset-00407",
+          "text": " The sum of <code>offset</code> and the size of <code>VkDispatchIndirectCommand</code> <strong class=\"purple\">must</strong> be less than or equal to the size of <code>buffer</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdDispatchIndirect-commandBuffer-parameter",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkCommandBuffer\">VkCommandBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdDispatchIndirect-buffer-parameter",
+          "text": " <code>buffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkBuffer\">VkBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdDispatchIndirect-commandBuffer-recording",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be in the <a href=\"#commandbuffers-lifecycle\">recording state</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdDispatchIndirect-commandBuffer-cmdpool",
+          "text": " The <code>VkCommandPool</code> that <code>commandBuffer</code> was allocated from <strong class=\"purple\">must</strong> support compute operations"
+        },
+        {
+          "vuid": "VUID-vkCmdDispatchIndirect-renderpass",
+          "text": " This command <strong class=\"purple\">must</strong> only be called outside of a render pass instance"
+        },
+        {
+          "vuid": "VUID-vkCmdDispatchIndirect-commonparent",
+          "text": " Both of <code>buffer</code>, and <code>commandBuffer</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from the same <a href=\"#VkDevice\">VkDevice</a>"
+        }
+      ],
+      "(VK_IMG_filter_cubic,VK_EXT_filter_cubic)": [
+        {
+          "vuid": "VUID-vkCmdDispatchIndirect-None-02692",
+          "text": " If a <code>VkImageView</code> is sampled with <code>VK_FILTER_CUBIC_EXT</code> as a result of this command, then the image view&#8217;s <a href=\"#resources-image-view-format-features\">format features</a> <strong class=\"purple\">must</strong> contain <code>VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT</code>"
+        }
+      ],
+      "(VK_IMG_filter_cubic,VK_EXT_filter_cubic)+!(VK_EXT_filter_cubic)": [
+        {
+          "vuid": "VUID-vkCmdDispatchIndirect-None-02693",
+          "text": " Any <a href=\"#VkImageView\">VkImageView</a> being sampled with <code>VK_FILTER_CUBIC_EXT</code> as a result of this command <strong class=\"purple\">must</strong> not have a <a href=\"#VkImageViewType\">VkImageViewType</a> of <code>VK_IMAGE_VIEW_TYPE_3D</code>, <code>VK_IMAGE_VIEW_TYPE_CUBE</code>, or <code>VK_IMAGE_VIEW_TYPE_CUBE_ARRAY</code>"
+        }
+      ],
+      "(VK_IMG_filter_cubic,VK_EXT_filter_cubic)+(VK_EXT_filter_cubic)": [
+        {
+          "vuid": "VUID-vkCmdDispatchIndirect-filterCubic-02694",
+          "text": " Any <a href=\"#VkImageView\">VkImageView</a> being sampled with <code>VK_FILTER_CUBIC_EXT</code> as a result of this command <strong class=\"purple\">must</strong> have a <a href=\"#VkImageViewType\">VkImageViewType</a> and format that supports cubic filtering, as specified by <code>VkFilterCubicImageViewImageFormatPropertiesEXT</code>::<code>filterCubic</code> returned by <code>vkGetPhysicalDeviceImageFormatProperties2</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdDispatchIndirect-filterCubicMinmax-02695",
+          "text": " Any <a href=\"#VkImageView\">VkImageView</a> being sampled with <code>VK_FILTER_CUBIC_EXT</code> with a reduction mode of either <code>VK_SAMPLER_REDUCTION_MODE_MIN_EXT</code> or <code>VK_SAMPLER_REDUCTION_MODE_MAX_EXT</code> as a result of this command <strong class=\"purple\">must</strong> have a <a href=\"#VkImageViewType\">VkImageViewType</a> and format that supports cubic filtering together with minmax filtering, as specified by <code>VkFilterCubicImageViewImageFormatPropertiesEXT</code>::<code>filterCubicMinmax</code> returned by <code>vkGetPhysicalDeviceImageFormatProperties2</code>"
+        }
+      ],
+      "(VK_NV_corner_sampled_image)": [
+        {
+          "vuid": "VUID-vkCmdDispatchIndirect-flags-02696",
+          "text": " Any <a href=\"#VkImage\">VkImage</a> created with a <a href=\"#VkImageCreateInfo\">VkImageCreateInfo</a>::<code>flags</code> containing <code>VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV</code> sampled as a result of this command <strong class=\"purple\">must</strong> only be sampled using a <a href=\"#VkSamplerAddressMode\">VkSamplerAddressMode</a> of <code>VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE</code>."
+        }
+      ],
+      "(VK_VERSION_1_1)": [
+        {
+          "vuid": "VUID-vkCmdDispatchIndirect-commandBuffer-02707",
+          "text": " If <code>commandBuffer</code> is an unprotected command buffer, any resource accessed by the <code>VkPipeline</code> object bound to the pipeline bind point used by this command <strong class=\"purple\">must</strong> not be a protected resource"
+        },
+        {
+          "vuid": "VUID-vkCmdDispatchIndirect-commandBuffer-02711",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> not be a protected command buffer"
+        }
+      ]
+    },
+    "VkDispatchIndirectCommand": {
+      "core": [
+        {
+          "vuid": "VUID-VkDispatchIndirectCommand-x-00417",
+          "text": " <code>x</code> <strong class=\"purple\">must</strong> be less than or equal to <code>VkPhysicalDeviceLimits</code>::<code>maxComputeWorkGroupCount</code>[0]"
+        },
+        {
+          "vuid": "VUID-VkDispatchIndirectCommand-y-00418",
+          "text": " <code>y</code> <strong class=\"purple\">must</strong> be less than or equal to <code>VkPhysicalDeviceLimits</code>::<code>maxComputeWorkGroupCount</code>[1]"
+        },
+        {
+          "vuid": "VUID-VkDispatchIndirectCommand-z-00419",
+          "text": " <code>z</code> <strong class=\"purple\">must</strong> be less than or equal to <code>VkPhysicalDeviceLimits</code>::<code>maxComputeWorkGroupCount</code>[2]"
+        }
+      ]
+    },
+    "vkCmdDispatchBase": {
+      "(VK_VERSION_1_1,VK_KHR_device_group)": [
+        {
+          "vuid": "VUID-vkCmdDispatchBase-None-02690",
+          "text": " If a <code>VkImageView</code> is sampled with <code>VK_FILTER_LINEAR</code> as a result of this command, then the image view&#8217;s <a href=\"#resources-image-view-format-features\">format features</a> <strong class=\"purple\">must</strong> contain <code>VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdDispatchBase-None-02691",
+          "text": " If a <code>VkImageView</code> is accessed using atomic operations as a result of this command, then the image view&#8217;s <a href=\"#resources-image-view-format-features\">format features</a> <strong class=\"purple\">must</strong> contain <code>VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdDispatchBase-None-02697",
+          "text": " For each set <em>n</em> that is statically used by the <code>VkPipeline</code> bound to the pipeline bind point used by this command, a descriptor set <strong class=\"purple\">must</strong> have been bound to <em>n</em> at the same pipeline bind point, with a <code>VkPipelineLayout</code> that is compatible for set <em>n</em>, with the <code>VkPipelineLayout</code> used to create the current <code>VkPipeline</code>, as described in <a href=\"#descriptorsets-compatibility\">Pipeline Layout Compatibility</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdDispatchBase-None-02698",
+          "text": " For each push constant that is statically used by the <code>VkPipeline</code> bound to the pipeline bind point used by this command, a push constant value <strong class=\"purple\">must</strong> have been set for the same pipeline bind point, with a <code>VkPipelineLayout</code> that is compatible for push constants, with the <code>VkPipelineLayout</code> used to create the current <code>VkPipeline</code>, as described in <a href=\"#descriptorsets-compatibility\">Pipeline Layout Compatibility</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdDispatchBase-None-02699",
+          "text": " Descriptors in each bound descriptor set, specified via <code>vkCmdBindDescriptorSets</code>, <strong class=\"purple\">must</strong> be valid if they are statically used by the <code>VkPipeline</code> bound to the pipeline bind point used by this command"
+        },
+        {
+          "vuid": "VUID-vkCmdDispatchBase-None-02700",
+          "text": " A valid pipeline <strong class=\"purple\">must</strong> be bound to the pipeline bind point used by this command"
+        },
+        {
+          "vuid": "VUID-vkCmdDispatchBase-commandBuffer-02701",
+          "text": " If the <code>VkPipeline</code> object bound to the pipeline bind point used by this command requires any dynamic state, that state <strong class=\"purple\">must</strong> have been set for <code>commandBuffer</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdDispatchBase-None-02702",
+          "text": " If the <code>VkPipeline</code> object bound to the pipeline bind point used by this command accesses a <code>VkSampler</code> object that uses unnormalized coordinates, that sampler <strong class=\"purple\">must</strong> not be used to sample from any <code>VkImage</code> with a <code>VkImageView</code> of the type <code>VK_IMAGE_VIEW_TYPE_3D</code>, <code>VK_IMAGE_VIEW_TYPE_CUBE</code>, <code>VK_IMAGE_VIEW_TYPE_1D_ARRAY</code>, <code>VK_IMAGE_VIEW_TYPE_2D_ARRAY</code> or <code>VK_IMAGE_VIEW_TYPE_CUBE_ARRAY</code>, in any shader stage"
+        },
+        {
+          "vuid": "VUID-vkCmdDispatchBase-None-02703",
+          "text": " If the <code>VkPipeline</code> object bound to the pipeline bind point used by this command accesses a <code>VkSampler</code> object that uses unnormalized coordinates, that sampler <strong class=\"purple\">must</strong> not be used with any of the SPIR-V <code>OpImageSample*</code> or <code>OpImageSparseSample*</code> instructions with <code>ImplicitLod</code>, <code>Dref</code> or <code>Proj</code> in their name, in any shader stage"
+        },
+        {
+          "vuid": "VUID-vkCmdDispatchBase-None-02704",
+          "text": " If the <code>VkPipeline</code> object bound to the pipeline bind point used by this command accesses a <code>VkSampler</code> object that uses unnormalized coordinates, that sampler <strong class=\"purple\">must</strong> not be used with any of the SPIR-V <code>OpImageSample*</code> or <code>OpImageSparseSample*</code> instructions that includes a LOD bias or any offset values, in any shader stage"
+        },
+        {
+          "vuid": "VUID-vkCmdDispatchBase-None-02705",
+          "text": " If the <a href=\"#features-robustBufferAccess\">robust buffer access</a> feature is not enabled, and if the <code>VkPipeline</code> object bound to the pipeline bind point used by this command accesses a uniform buffer, it <strong class=\"purple\">must</strong> not access values outside of the range of the buffer as specified in the descriptor set bound to the same pipeline bind point"
+        },
+        {
+          "vuid": "VUID-vkCmdDispatchBase-None-02706",
+          "text": " If the <a href=\"#features-robustBufferAccess\">robust buffer access</a> feature is not enabled, and if the <code>VkPipeline</code> object bound to the pipeline bind point used by this command accesses a storage buffer, it <strong class=\"purple\">must</strong> not access values outside of the range of the buffer as specified in the descriptor set bound to the same pipeline bind point"
+        },
+        {
+          "vuid": "VUID-vkCmdDispatchBase-baseGroupX-00421",
+          "text": " <code>baseGroupX</code> <strong class=\"purple\">must</strong> be less than <code>VkPhysicalDeviceLimits</code>::<code>maxComputeWorkGroupCount</code>[0]"
+        },
+        {
+          "vuid": "VUID-vkCmdDispatchBase-baseGroupX-00422",
+          "text": " <code>baseGroupX</code> <strong class=\"purple\">must</strong> be less than <code>VkPhysicalDeviceLimits</code>::<code>maxComputeWorkGroupCount</code>[1]"
+        },
+        {
+          "vuid": "VUID-vkCmdDispatchBase-baseGroupZ-00423",
+          "text": " <code>baseGroupZ</code> <strong class=\"purple\">must</strong> be less than <code>VkPhysicalDeviceLimits</code>::<code>maxComputeWorkGroupCount</code>[2]"
+        },
+        {
+          "vuid": "VUID-vkCmdDispatchBase-groupCountX-00424",
+          "text": " <code>groupCountX</code> <strong class=\"purple\">must</strong> be less than or equal to <code>VkPhysicalDeviceLimits</code>::<code>maxComputeWorkGroupCount</code>[0] minus <code>baseGroupX</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdDispatchBase-groupCountY-00425",
+          "text": " <code>groupCountY</code> <strong class=\"purple\">must</strong> be less than or equal to <code>VkPhysicalDeviceLimits</code>::<code>maxComputeWorkGroupCount</code>[1] minus <code>baseGroupY</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdDispatchBase-groupCountZ-00426",
+          "text": " <code>groupCountZ</code> <strong class=\"purple\">must</strong> be less than or equal to <code>VkPhysicalDeviceLimits</code>::<code>maxComputeWorkGroupCount</code>[2] minus <code>baseGroupZ</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdDispatchBase-baseGroupX-00427",
+          "text": " If any of <code>baseGroupX</code>, <code>baseGroupY</code>, or <code>baseGroupZ</code> are not zero, then the bound compute pipeline <strong class=\"purple\">must</strong> have been created with the <code>VK_PIPELINE_CREATE_DISPATCH_BASE</code> flag."
+        },
+        {
+          "vuid": "VUID-vkCmdDispatchBase-commandBuffer-parameter",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkCommandBuffer\">VkCommandBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdDispatchBase-commandBuffer-recording",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be in the <a href=\"#commandbuffers-lifecycle\">recording state</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdDispatchBase-commandBuffer-cmdpool",
+          "text": " The <code>VkCommandPool</code> that <code>commandBuffer</code> was allocated from <strong class=\"purple\">must</strong> support compute operations"
+        },
+        {
+          "vuid": "VUID-vkCmdDispatchBase-renderpass",
+          "text": " This command <strong class=\"purple\">must</strong> only be called outside of a render pass instance"
+        }
+      ],
+      "(VK_VERSION_1_1,VK_KHR_device_group)+(VK_IMG_filter_cubic,VK_EXT_filter_cubic)": [
+        {
+          "vuid": "VUID-vkCmdDispatchBase-None-02692",
+          "text": " If a <code>VkImageView</code> is sampled with <code>VK_FILTER_CUBIC_EXT</code> as a result of this command, then the image view&#8217;s <a href=\"#resources-image-view-format-features\">format features</a> <strong class=\"purple\">must</strong> contain <code>VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT</code>"
+        }
+      ],
+      "(VK_VERSION_1_1,VK_KHR_device_group)+(VK_IMG_filter_cubic,VK_EXT_filter_cubic)+!(VK_EXT_filter_cubic)": [
+        {
+          "vuid": "VUID-vkCmdDispatchBase-None-02693",
+          "text": " Any <a href=\"#VkImageView\">VkImageView</a> being sampled with <code>VK_FILTER_CUBIC_EXT</code> as a result of this command <strong class=\"purple\">must</strong> not have a <a href=\"#VkImageViewType\">VkImageViewType</a> of <code>VK_IMAGE_VIEW_TYPE_3D</code>, <code>VK_IMAGE_VIEW_TYPE_CUBE</code>, or <code>VK_IMAGE_VIEW_TYPE_CUBE_ARRAY</code>"
+        }
+      ],
+      "(VK_VERSION_1_1,VK_KHR_device_group)+(VK_IMG_filter_cubic,VK_EXT_filter_cubic)+(VK_EXT_filter_cubic)": [
+        {
+          "vuid": "VUID-vkCmdDispatchBase-filterCubic-02694",
+          "text": " Any <a href=\"#VkImageView\">VkImageView</a> being sampled with <code>VK_FILTER_CUBIC_EXT</code> as a result of this command <strong class=\"purple\">must</strong> have a <a href=\"#VkImageViewType\">VkImageViewType</a> and format that supports cubic filtering, as specified by <code>VkFilterCubicImageViewImageFormatPropertiesEXT</code>::<code>filterCubic</code> returned by <code>vkGetPhysicalDeviceImageFormatProperties2</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdDispatchBase-filterCubicMinmax-02695",
+          "text": " Any <a href=\"#VkImageView\">VkImageView</a> being sampled with <code>VK_FILTER_CUBIC_EXT</code> with a reduction mode of either <code>VK_SAMPLER_REDUCTION_MODE_MIN_EXT</code> or <code>VK_SAMPLER_REDUCTION_MODE_MAX_EXT</code> as a result of this command <strong class=\"purple\">must</strong> have a <a href=\"#VkImageViewType\">VkImageViewType</a> and format that supports cubic filtering together with minmax filtering, as specified by <code>VkFilterCubicImageViewImageFormatPropertiesEXT</code>::<code>filterCubicMinmax</code> returned by <code>vkGetPhysicalDeviceImageFormatProperties2</code>"
+        }
+      ],
+      "(VK_VERSION_1_1,VK_KHR_device_group)+(VK_NV_corner_sampled_image)": [
+        {
+          "vuid": "VUID-vkCmdDispatchBase-flags-02696",
+          "text": " Any <a href=\"#VkImage\">VkImage</a> created with a <a href=\"#VkImageCreateInfo\">VkImageCreateInfo</a>::<code>flags</code> containing <code>VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV</code> sampled as a result of this command <strong class=\"purple\">must</strong> only be sampled using a <a href=\"#VkSamplerAddressMode\">VkSamplerAddressMode</a> of <code>VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE</code>."
+        }
+      ],
+      "(VK_VERSION_1_1,VK_KHR_device_group)+(VK_VERSION_1_1)": [
+        {
+          "vuid": "VUID-vkCmdDispatchBase-commandBuffer-02707",
+          "text": " If <code>commandBuffer</code> is an unprotected command buffer, any resource accessed by the <code>VkPipeline</code> object bound to the pipeline bind point used by this command <strong class=\"purple\">must</strong> not be a protected resource"
+        }
+      ]
+    },
+    "vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX": {
+      "(VK_NVX_device_generated_commands)": [
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX-physicalDevice-parameter",
+          "text": " <code>physicalDevice</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkPhysicalDevice\">VkPhysicalDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX-pFeatures-parameter",
+          "text": " <code>pFeatures</code> <strong class=\"purple\">must</strong> be a valid pointer to a <a href=\"#VkDeviceGeneratedCommandsFeaturesNVX\">VkDeviceGeneratedCommandsFeaturesNVX</a> structure"
+        },
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX-pLimits-parameter",
+          "text": " <code>pLimits</code> <strong class=\"purple\">must</strong> be a valid pointer to a <a href=\"#VkDeviceGeneratedCommandsLimitsNVX\">VkDeviceGeneratedCommandsLimitsNVX</a> structure"
+        }
+      ]
+    },
+    "VkDeviceGeneratedCommandsFeaturesNVX": {
+      "(VK_NVX_device_generated_commands)": [
+        {
+          "vuid": "VUID-VkDeviceGeneratedCommandsFeaturesNVX-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_FEATURES_NVX</code>"
+        },
+        {
+          "vuid": "VUID-VkDeviceGeneratedCommandsFeaturesNVX-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        }
+      ]
+    },
+    "VkDeviceGeneratedCommandsLimitsNVX": {
+      "(VK_NVX_device_generated_commands)": [
+        {
+          "vuid": "VUID-VkDeviceGeneratedCommandsLimitsNVX-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_LIMITS_NVX</code>"
+        },
+        {
+          "vuid": "VUID-VkDeviceGeneratedCommandsLimitsNVX-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        }
+      ]
+    },
+    "vkCreateObjectTableNVX": {
+      "(VK_NVX_device_generated_commands)": [
+        {
+          "vuid": "VUID-vkCreateObjectTableNVX-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCreateObjectTableNVX-pCreateInfo-parameter",
+          "text": " <code>pCreateInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkObjectTableCreateInfoNVX\">VkObjectTableCreateInfoNVX</a> structure"
+        },
+        {
+          "vuid": "VUID-vkCreateObjectTableNVX-pAllocator-parameter",
+          "text": " If <code>pAllocator</code> is not <code>NULL</code>, <code>pAllocator</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkAllocationCallbacks\">VkAllocationCallbacks</a> structure"
+        },
+        {
+          "vuid": "VUID-vkCreateObjectTableNVX-pObjectTable-parameter",
+          "text": " <code>pObjectTable</code> <strong class=\"purple\">must</strong> be a valid pointer to a <a href=\"#VkObjectTableNVX\">VkObjectTableNVX</a> handle"
+        }
+      ]
+    },
+    "VkObjectTableCreateInfoNVX": {
+      "(VK_NVX_device_generated_commands)": [
+        {
+          "vuid": "VUID-VkObjectTableCreateInfoNVX-computeBindingPointSupport-01355",
+          "text": " If the <code>VkDeviceGeneratedCommandsFeaturesNVX</code>::<code>computeBindingPointSupport</code> feature is not enabled, <code>pObjectEntryUsageFlags</code> <strong class=\"purple\">must</strong> not contain <code>VK_OBJECT_ENTRY_USAGE_COMPUTE_BIT_NVX</code>"
+        },
+        {
+          "vuid": "VUID-VkObjectTableCreateInfoNVX-pObjectEntryCounts-01356",
+          "text": " Any value within <code>pObjectEntryCounts</code> <strong class=\"purple\">must</strong> not exceed <code>VkDeviceGeneratedCommandsLimitsNVX</code>::<code>maxObjectEntryCounts</code>"
+        },
+        {
+          "vuid": "VUID-VkObjectTableCreateInfoNVX-maxUniformBuffersPerDescriptor-01357",
+          "text": " <code>maxUniformBuffersPerDescriptor</code> <strong class=\"purple\">must</strong> be within the limits supported by the device."
+        },
+        {
+          "vuid": "VUID-VkObjectTableCreateInfoNVX-maxStorageBuffersPerDescriptor-01358",
+          "text": " <code>maxStorageBuffersPerDescriptor</code> <strong class=\"purple\">must</strong> be within the limits supported by the device."
+        },
+        {
+          "vuid": "VUID-VkObjectTableCreateInfoNVX-maxStorageImagesPerDescriptor-01359",
+          "text": " <code>maxStorageImagesPerDescriptor</code> <strong class=\"purple\">must</strong> be within the limits supported by the device."
+        },
+        {
+          "vuid": "VUID-VkObjectTableCreateInfoNVX-maxSampledImagesPerDescriptor-01360",
+          "text": " <code>maxSampledImagesPerDescriptor</code> <strong class=\"purple\">must</strong> be within the limits supported by the device."
+        },
+        {
+          "vuid": "VUID-VkObjectTableCreateInfoNVX-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_OBJECT_TABLE_CREATE_INFO_NVX</code>"
+        },
+        {
+          "vuid": "VUID-VkObjectTableCreateInfoNVX-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-VkObjectTableCreateInfoNVX-pObjectEntryTypes-parameter",
+          "text": " <code>pObjectEntryTypes</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>objectCount</code> valid <a href=\"#VkObjectEntryTypeNVX\">VkObjectEntryTypeNVX</a> values"
+        },
+        {
+          "vuid": "VUID-VkObjectTableCreateInfoNVX-pObjectEntryCounts-parameter",
+          "text": " <code>pObjectEntryCounts</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>objectCount</code> <code>uint32_t</code> values"
+        },
+        {
+          "vuid": "VUID-VkObjectTableCreateInfoNVX-pObjectEntryUsageFlags-parameter",
+          "text": " <code>pObjectEntryUsageFlags</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>objectCount</code> valid combinations of <a href=\"#VkObjectEntryUsageFlagBitsNVX\">VkObjectEntryUsageFlagBitsNVX</a> values"
+        },
+        {
+          "vuid": "VUID-VkObjectTableCreateInfoNVX-pObjectEntryUsageFlags-requiredbitmask",
+          "text": " Each element of <code>pObjectEntryUsageFlags</code> <strong class=\"purple\">must</strong> not be <code>0</code>"
+        },
+        {
+          "vuid": "VUID-VkObjectTableCreateInfoNVX-objectCount-arraylength",
+          "text": " <code>objectCount</code> <strong class=\"purple\">must</strong> be greater than <code>0</code>"
+        }
+      ]
+    },
+    "vkDestroyObjectTableNVX": {
+      "(VK_NVX_device_generated_commands)": [
+        {
+          "vuid": "VUID-vkDestroyObjectTableNVX-objectTable-01361",
+          "text": " All submitted commands that refer to <code>objectTable</code> <strong class=\"purple\">must</strong> have completed execution."
+        },
+        {
+          "vuid": "VUID-vkDestroyObjectTableNVX-objectTable-01362",
+          "text": " If <code>VkAllocationCallbacks</code> were provided when <code>objectTable</code> was created, a compatible set of callbacks <strong class=\"purple\">must</strong> be provided here."
+        },
+        {
+          "vuid": "VUID-vkDestroyObjectTableNVX-objectTable-01363",
+          "text": " If no <code>VkAllocationCallbacks</code> were provided when <code>objectTable</code> was created, <code>pAllocator</code> <strong class=\"purple\">must</strong> be <code>NULL</code>."
+        },
+        {
+          "vuid": "VUID-vkDestroyObjectTableNVX-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkDestroyObjectTableNVX-objectTable-parameter",
+          "text": " <code>objectTable</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkObjectTableNVX\">VkObjectTableNVX</a> handle"
+        },
+        {
+          "vuid": "VUID-vkDestroyObjectTableNVX-pAllocator-parameter",
+          "text": " If <code>pAllocator</code> is not <code>NULL</code>, <code>pAllocator</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkAllocationCallbacks\">VkAllocationCallbacks</a> structure"
+        },
+        {
+          "vuid": "VUID-vkDestroyObjectTableNVX-objectTable-parent",
+          "text": " <code>objectTable</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from <code>device</code>"
+        }
+      ]
+    },
+    "vkRegisterObjectsNVX": {
+      "(VK_NVX_device_generated_commands)": [
+        {
+          "vuid": "VUID-vkRegisterObjectsNVX-pObjectTableEntry-01364",
+          "text": " The contents of <code>pObjectTableEntry</code> <strong class=\"purple\">must</strong> yield plausible bindings supported by the device."
+        },
+        {
+          "vuid": "VUID-vkRegisterObjectsNVX-pObjectIndices-01365",
+          "text": " At any <code>pObjectIndices</code> there <strong class=\"purple\">must</strong> not be a registered resource already."
+        },
+        {
+          "vuid": "VUID-vkRegisterObjectsNVX-pObjectIndices-01366",
+          "text": " Any value inside <code>pObjectIndices</code> <strong class=\"purple\">must</strong> be below the appropriate <code>VkObjectTableCreateInfoNVX</code>::<code>pObjectEntryCounts</code> limits provided at <code>objectTable</code> creation time."
+        },
+        {
+          "vuid": "VUID-vkRegisterObjectsNVX-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkRegisterObjectsNVX-objectTable-parameter",
+          "text": " <code>objectTable</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkObjectTableNVX\">VkObjectTableNVX</a> handle"
+        },
+        {
+          "vuid": "VUID-vkRegisterObjectsNVX-ppObjectTableEntries-parameter",
+          "text": " <code>ppObjectTableEntries</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>objectCount</code> valid <a href=\"#VkObjectTableEntryNVX\">VkObjectTableEntryNVX</a> structures"
+        },
+        {
+          "vuid": "VUID-vkRegisterObjectsNVX-pObjectIndices-parameter",
+          "text": " <code>pObjectIndices</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>objectCount</code> <code>uint32_t</code> values"
+        },
+        {
+          "vuid": "VUID-vkRegisterObjectsNVX-objectCount-arraylength",
+          "text": " <code>objectCount</code> <strong class=\"purple\">must</strong> be greater than <code>0</code>"
+        },
+        {
+          "vuid": "VUID-vkRegisterObjectsNVX-objectTable-parent",
+          "text": " <code>objectTable</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from <code>device</code>"
+        }
+      ]
+    },
+    "VkObjectTableEntryNVX": {
+      "(VK_NVX_device_generated_commands)": [
+        {
+          "vuid": "VUID-VkObjectTableEntryNVX-computeBindingPointSupport-01367",
+          "text": " If the <code>VkDeviceGeneratedCommandsFeaturesNVX</code>::<code>computeBindingPointSupport</code> feature is not enabled, <code>flags</code> <strong class=\"purple\">must</strong> not contain <code>VK_OBJECT_ENTRY_USAGE_COMPUTE_BIT_NVX</code>"
+        },
+        {
+          "vuid": "VUID-VkObjectTableEntryNVX-type-parameter",
+          "text": " <code>type</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkObjectEntryTypeNVX\">VkObjectEntryTypeNVX</a> value"
+        },
+        {
+          "vuid": "VUID-VkObjectTableEntryNVX-flags-parameter",
+          "text": " <code>flags</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkObjectEntryUsageFlagBitsNVX\">VkObjectEntryUsageFlagBitsNVX</a> values"
+        },
+        {
+          "vuid": "VUID-VkObjectTableEntryNVX-flags-requiredbitmask",
+          "text": " <code>flags</code> <strong class=\"purple\">must</strong> not be <code>0</code>"
+        }
+      ]
+    },
+    "VkObjectTablePipelineEntryNVX": {
+      "(VK_NVX_device_generated_commands)": [
+        {
+          "vuid": "VUID-VkObjectTablePipelineEntryNVX-type-01368",
+          "text": " <code>type</code> <strong class=\"purple\">must</strong> be <code>VK_OBJECT_ENTRY_TYPE_PIPELINE_NVX</code>"
+        },
+        {
+          "vuid": "VUID-VkObjectTablePipelineEntryNVX-type-parameter",
+          "text": " <code>type</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkObjectEntryTypeNVX\">VkObjectEntryTypeNVX</a> value"
+        },
+        {
+          "vuid": "VUID-VkObjectTablePipelineEntryNVX-flags-parameter",
+          "text": " <code>flags</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkObjectEntryUsageFlagBitsNVX\">VkObjectEntryUsageFlagBitsNVX</a> values"
+        },
+        {
+          "vuid": "VUID-VkObjectTablePipelineEntryNVX-flags-requiredbitmask",
+          "text": " <code>flags</code> <strong class=\"purple\">must</strong> not be <code>0</code>"
+        },
+        {
+          "vuid": "VUID-VkObjectTablePipelineEntryNVX-pipeline-parameter",
+          "text": " <code>pipeline</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkPipeline\">VkPipeline</a> handle"
+        }
+      ]
+    },
+    "VkObjectTableDescriptorSetEntryNVX": {
+      "(VK_NVX_device_generated_commands)": [
+        {
+          "vuid": "VUID-VkObjectTableDescriptorSetEntryNVX-type-01369",
+          "text": " <code>type</code> <strong class=\"purple\">must</strong> be <code>VK_OBJECT_ENTRY_TYPE_DESCRIPTOR_SET_NVX</code>"
+        },
+        {
+          "vuid": "VUID-VkObjectTableDescriptorSetEntryNVX-type-parameter",
+          "text": " <code>type</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkObjectEntryTypeNVX\">VkObjectEntryTypeNVX</a> value"
+        },
+        {
+          "vuid": "VUID-VkObjectTableDescriptorSetEntryNVX-flags-parameter",
+          "text": " <code>flags</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkObjectEntryUsageFlagBitsNVX\">VkObjectEntryUsageFlagBitsNVX</a> values"
+        },
+        {
+          "vuid": "VUID-VkObjectTableDescriptorSetEntryNVX-flags-requiredbitmask",
+          "text": " <code>flags</code> <strong class=\"purple\">must</strong> not be <code>0</code>"
+        },
+        {
+          "vuid": "VUID-VkObjectTableDescriptorSetEntryNVX-pipelineLayout-parameter",
+          "text": " <code>pipelineLayout</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkPipelineLayout\">VkPipelineLayout</a> handle"
+        },
+        {
+          "vuid": "VUID-VkObjectTableDescriptorSetEntryNVX-descriptorSet-parameter",
+          "text": " <code>descriptorSet</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDescriptorSet\">VkDescriptorSet</a> handle"
+        },
+        {
+          "vuid": "VUID-VkObjectTableDescriptorSetEntryNVX-commonparent",
+          "text": " Both of <code>descriptorSet</code>, and <code>pipelineLayout</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from the same <a href=\"#VkDevice\">VkDevice</a>"
+        }
+      ]
+    },
+    "VkObjectTableVertexBufferEntryNVX": {
+      "(VK_NVX_device_generated_commands)": [
+        {
+          "vuid": "VUID-VkObjectTableVertexBufferEntryNVX-type-01370",
+          "text": " <code>type</code> <strong class=\"purple\">must</strong> be <code>VK_OBJECT_ENTRY_TYPE_VERTEX_BUFFER_NVX</code>"
+        },
+        {
+          "vuid": "VUID-VkObjectTableVertexBufferEntryNVX-type-parameter",
+          "text": " <code>type</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkObjectEntryTypeNVX\">VkObjectEntryTypeNVX</a> value"
+        },
+        {
+          "vuid": "VUID-VkObjectTableVertexBufferEntryNVX-flags-parameter",
+          "text": " <code>flags</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkObjectEntryUsageFlagBitsNVX\">VkObjectEntryUsageFlagBitsNVX</a> values"
+        },
+        {
+          "vuid": "VUID-VkObjectTableVertexBufferEntryNVX-flags-requiredbitmask",
+          "text": " <code>flags</code> <strong class=\"purple\">must</strong> not be <code>0</code>"
+        },
+        {
+          "vuid": "VUID-VkObjectTableVertexBufferEntryNVX-buffer-parameter",
+          "text": " <code>buffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkBuffer\">VkBuffer</a> handle"
+        }
+      ]
+    },
+    "VkObjectTableIndexBufferEntryNVX": {
+      "(VK_NVX_device_generated_commands)": [
+        {
+          "vuid": "VUID-VkObjectTableIndexBufferEntryNVX-type-01371",
+          "text": " <code>type</code> <strong class=\"purple\">must</strong> be <code>VK_OBJECT_ENTRY_TYPE_INDEX_BUFFER_NVX</code>"
+        },
+        {
+          "vuid": "VUID-VkObjectTableIndexBufferEntryNVX-indexType-02783",
+          "text": " <code>indexType</code> <strong class=\"purple\">must</strong> be <code>VK_INDEX_TYPE_UINT16</code>, or <code>VK_INDEX_TYPE_UINT32</code>"
+        },
+        {
+          "vuid": "VUID-VkObjectTableIndexBufferEntryNVX-type-parameter",
+          "text": " <code>type</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkObjectEntryTypeNVX\">VkObjectEntryTypeNVX</a> value"
+        },
+        {
+          "vuid": "VUID-VkObjectTableIndexBufferEntryNVX-flags-parameter",
+          "text": " <code>flags</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkObjectEntryUsageFlagBitsNVX\">VkObjectEntryUsageFlagBitsNVX</a> values"
+        },
+        {
+          "vuid": "VUID-VkObjectTableIndexBufferEntryNVX-flags-requiredbitmask",
+          "text": " <code>flags</code> <strong class=\"purple\">must</strong> not be <code>0</code>"
+        },
+        {
+          "vuid": "VUID-VkObjectTableIndexBufferEntryNVX-buffer-parameter",
+          "text": " <code>buffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkBuffer\">VkBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-VkObjectTableIndexBufferEntryNVX-indexType-parameter",
+          "text": " <code>indexType</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkIndexType\">VkIndexType</a> value"
+        }
+      ]
+    },
+    "VkObjectTablePushConstantEntryNVX": {
+      "(VK_NVX_device_generated_commands)": [
+        {
+          "vuid": "VUID-VkObjectTablePushConstantEntryNVX-type-01372",
+          "text": " <code>type</code> <strong class=\"purple\">must</strong> be <code>VK_OBJECT_ENTRY_TYPE_PUSH_CONSTANT_NVX</code>"
+        },
+        {
+          "vuid": "VUID-VkObjectTablePushConstantEntryNVX-type-parameter",
+          "text": " <code>type</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkObjectEntryTypeNVX\">VkObjectEntryTypeNVX</a> value"
+        },
+        {
+          "vuid": "VUID-VkObjectTablePushConstantEntryNVX-flags-parameter",
+          "text": " <code>flags</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkObjectEntryUsageFlagBitsNVX\">VkObjectEntryUsageFlagBitsNVX</a> values"
+        },
+        {
+          "vuid": "VUID-VkObjectTablePushConstantEntryNVX-flags-requiredbitmask",
+          "text": " <code>flags</code> <strong class=\"purple\">must</strong> not be <code>0</code>"
+        },
+        {
+          "vuid": "VUID-VkObjectTablePushConstantEntryNVX-pipelineLayout-parameter",
+          "text": " <code>pipelineLayout</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkPipelineLayout\">VkPipelineLayout</a> handle"
+        },
+        {
+          "vuid": "VUID-VkObjectTablePushConstantEntryNVX-stageFlags-parameter",
+          "text": " <code>stageFlags</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkShaderStageFlagBits\">VkShaderStageFlagBits</a> values"
+        },
+        {
+          "vuid": "VUID-VkObjectTablePushConstantEntryNVX-stageFlags-requiredbitmask",
+          "text": " <code>stageFlags</code> <strong class=\"purple\">must</strong> not be <code>0</code>"
+        }
+      ]
+    },
+    "vkUnregisterObjectsNVX": {
+      "(VK_NVX_device_generated_commands)": [
+        {
+          "vuid": "VUID-vkUnregisterObjectsNVX-pObjectIndices-01373",
+          "text": " At any <code>pObjectIndices</code> there <strong class=\"purple\">must</strong> be a registered resource already."
+        },
+        {
+          "vuid": "VUID-vkUnregisterObjectsNVX-pObjectEntryTypes-01374",
+          "text": " The <code>pObjectEntryTypes</code> of the resource at <code>pObjectIndices</code> <strong class=\"purple\">must</strong> match."
+        },
+        {
+          "vuid": "VUID-vkUnregisterObjectsNVX-None-01375",
+          "text": " All operations on the device using the registered resource <strong class=\"purple\">must</strong> have been completed."
+        },
+        {
+          "vuid": "VUID-vkUnregisterObjectsNVX-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkUnregisterObjectsNVX-objectTable-parameter",
+          "text": " <code>objectTable</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkObjectTableNVX\">VkObjectTableNVX</a> handle"
+        },
+        {
+          "vuid": "VUID-vkUnregisterObjectsNVX-pObjectEntryTypes-parameter",
+          "text": " <code>pObjectEntryTypes</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>objectCount</code> valid <a href=\"#VkObjectEntryTypeNVX\">VkObjectEntryTypeNVX</a> values"
+        },
+        {
+          "vuid": "VUID-vkUnregisterObjectsNVX-pObjectIndices-parameter",
+          "text": " <code>pObjectIndices</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>objectCount</code> <code>uint32_t</code> values"
+        },
+        {
+          "vuid": "VUID-vkUnregisterObjectsNVX-objectCount-arraylength",
+          "text": " <code>objectCount</code> <strong class=\"purple\">must</strong> be greater than <code>0</code>"
+        },
+        {
+          "vuid": "VUID-vkUnregisterObjectsNVX-objectTable-parent",
+          "text": " <code>objectTable</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from <code>device</code>"
+        }
+      ]
+    },
+    "VkIndirectCommandsLayoutTokenNVX": {
+      "(VK_NVX_device_generated_commands)": [
+        {
+          "vuid": "VUID-VkIndirectCommandsLayoutTokenNVX-bindingUnit-01342",
+          "text": " <code>bindingUnit</code> <strong class=\"purple\">must</strong> stay within device supported limits for the appropriate commands."
+        },
+        {
+          "vuid": "VUID-VkIndirectCommandsLayoutTokenNVX-dynamicCount-01343",
+          "text": " <code>dynamicCount</code> <strong class=\"purple\">must</strong> stay within device supported limits for the appropriate commands."
+        },
+        {
+          "vuid": "VUID-VkIndirectCommandsLayoutTokenNVX-divisor-01344",
+          "text": " <code>divisor</code> <strong class=\"purple\">must</strong> be greater than <code>0</code> and a power of two."
+        },
+        {
+          "vuid": "VUID-VkIndirectCommandsLayoutTokenNVX-tokenType-parameter",
+          "text": " <code>tokenType</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkIndirectCommandsTokenTypeNVX\">VkIndirectCommandsTokenTypeNVX</a> value"
+        }
+      ]
+    },
+    "VkIndirectCommandsTokenNVX": {
+      "(VK_NVX_device_generated_commands)": [
+        {
+          "vuid": "VUID-VkIndirectCommandsTokenNVX-buffer-01345",
+          "text": " The <code>buffer</code>&#8217;s usage flag <strong class=\"purple\">must</strong> have the <code>VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT</code> bit set."
+        },
+        {
+          "vuid": "VUID-VkIndirectCommandsTokenNVX-offset-01346",
+          "text": " The <code>offset</code> <strong class=\"purple\">must</strong> be aligned to <code>VkDeviceGeneratedCommandsLimitsNVX</code>::<code>minCommandsTokenBufferOffsetAlignment</code>."
+        },
+        {
+          "vuid": "VUID-VkIndirectCommandsTokenNVX-tokenType-parameter",
+          "text": " <code>tokenType</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkIndirectCommandsTokenTypeNVX\">VkIndirectCommandsTokenTypeNVX</a> value"
+        },
+        {
+          "vuid": "VUID-VkIndirectCommandsTokenNVX-buffer-parameter",
+          "text": " <code>buffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkBuffer\">VkBuffer</a> handle"
+        }
+      ]
+    },
+    "vkCreateIndirectCommandsLayoutNVX": {
+      "(VK_NVX_device_generated_commands)": [
+        {
+          "vuid": "VUID-vkCreateIndirectCommandsLayoutNVX-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCreateIndirectCommandsLayoutNVX-pCreateInfo-parameter",
+          "text": " <code>pCreateInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkIndirectCommandsLayoutCreateInfoNVX\">VkIndirectCommandsLayoutCreateInfoNVX</a> structure"
+        },
+        {
+          "vuid": "VUID-vkCreateIndirectCommandsLayoutNVX-pAllocator-parameter",
+          "text": " If <code>pAllocator</code> is not <code>NULL</code>, <code>pAllocator</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkAllocationCallbacks\">VkAllocationCallbacks</a> structure"
+        },
+        {
+          "vuid": "VUID-vkCreateIndirectCommandsLayoutNVX-pIndirectCommandsLayout-parameter",
+          "text": " <code>pIndirectCommandsLayout</code> <strong class=\"purple\">must</strong> be a valid pointer to a <a href=\"#VkIndirectCommandsLayoutNVX\">VkIndirectCommandsLayoutNVX</a> handle"
+        }
+      ]
+    },
+    "VkIndirectCommandsLayoutCreateInfoNVX": {
+      "(VK_NVX_device_generated_commands)": [
+        {
+          "vuid": "VUID-VkIndirectCommandsLayoutCreateInfoNVX-tokenCount-01347",
+          "text": " <code>tokenCount</code> <strong class=\"purple\">must</strong> be greater than <code>0</code> and below <code>VkDeviceGeneratedCommandsLimitsNVX</code>::<code>maxIndirectCommandsLayoutTokenCount</code>"
+        },
+        {
+          "vuid": "VUID-VkIndirectCommandsLayoutCreateInfoNVX-computeBindingPointSupport-01348",
+          "text": " If the <code>VkDeviceGeneratedCommandsFeaturesNVX</code>::<code>computeBindingPointSupport</code> feature is not enabled, then <code>pipelineBindPoint</code> <strong class=\"purple\">must</strong> not be <code>VK_PIPELINE_BIND_POINT_COMPUTE</code>"
+        },
+        {
+          "vuid": "VUID-VkIndirectCommandsLayoutCreateInfoNVX-pTokens-01349",
+          "text": " If <code>pTokens</code> contains an entry of <code>VK_INDIRECT_COMMANDS_TOKEN_TYPE_PIPELINE_NVX</code> it <strong class=\"purple\">must</strong> be the first element of the array and there <strong class=\"purple\">must</strong> be only a single element of such token type."
+        },
+        {
+          "vuid": "VUID-VkIndirectCommandsLayoutCreateInfoNVX-pTokens-01350",
+          "text": " All state binding tokens in <code>pTokens</code> <strong class=\"purple\">must</strong> occur prior work provoking tokens (<code>VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_NVX</code>, <code>VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_NVX</code>, <code>VK_INDIRECT_COMMANDS_TOKEN_TYPE_DISPATCH_NVX</code>)."
+        },
+        {
+          "vuid": "VUID-VkIndirectCommandsLayoutCreateInfoNVX-pTokens-01351",
+          "text": " The content of <code>pTokens</code> <strong class=\"purple\">must</strong> include one single work provoking token that is compatible with the <code>pipelineBindPoint</code>."
+        },
+        {
+          "vuid": "VUID-VkIndirectCommandsLayoutCreateInfoNVX-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_NVX</code>"
+        },
+        {
+          "vuid": "VUID-VkIndirectCommandsLayoutCreateInfoNVX-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-VkIndirectCommandsLayoutCreateInfoNVX-pipelineBindPoint-parameter",
+          "text": " <code>pipelineBindPoint</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkPipelineBindPoint\">VkPipelineBindPoint</a> value"
+        },
+        {
+          "vuid": "VUID-VkIndirectCommandsLayoutCreateInfoNVX-flags-parameter",
+          "text": " <code>flags</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkIndirectCommandsLayoutUsageFlagBitsNVX\">VkIndirectCommandsLayoutUsageFlagBitsNVX</a> values"
+        },
+        {
+          "vuid": "VUID-VkIndirectCommandsLayoutCreateInfoNVX-flags-requiredbitmask",
+          "text": " <code>flags</code> <strong class=\"purple\">must</strong> not be <code>0</code>"
+        },
+        {
+          "vuid": "VUID-VkIndirectCommandsLayoutCreateInfoNVX-pTokens-parameter",
+          "text": " <code>pTokens</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>tokenCount</code> valid <a href=\"#VkIndirectCommandsLayoutTokenNVX\">VkIndirectCommandsLayoutTokenNVX</a> structures"
+        },
+        {
+          "vuid": "VUID-VkIndirectCommandsLayoutCreateInfoNVX-tokenCount-arraylength",
+          "text": " <code>tokenCount</code> <strong class=\"purple\">must</strong> be greater than <code>0</code>"
+        }
+      ]
+    },
+    "vkDestroyIndirectCommandsLayoutNVX": {
+      "(VK_NVX_device_generated_commands)": [
+        {
+          "vuid": "VUID-vkDestroyIndirectCommandsLayoutNVX-indirectCommandsLayout-01352",
+          "text": " All submitted commands that refer to <code>indirectCommandsLayout</code> <strong class=\"purple\">must</strong> have completed execution"
+        },
+        {
+          "vuid": "VUID-vkDestroyIndirectCommandsLayoutNVX-objectTable-01353",
+          "text": " If <code>VkAllocationCallbacks</code> were provided when <code>objectTable</code> was created, a compatible set of callbacks <strong class=\"purple\">must</strong> be provided here"
+        },
+        {
+          "vuid": "VUID-vkDestroyIndirectCommandsLayoutNVX-objectTable-01354",
+          "text": " If no <code>VkAllocationCallbacks</code> were provided when <code>objectTable</code> was created, <code>pAllocator</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-vkDestroyIndirectCommandsLayoutNVX-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkDestroyIndirectCommandsLayoutNVX-indirectCommandsLayout-parameter",
+          "text": " <code>indirectCommandsLayout</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkIndirectCommandsLayoutNVX\">VkIndirectCommandsLayoutNVX</a> handle"
+        },
+        {
+          "vuid": "VUID-vkDestroyIndirectCommandsLayoutNVX-pAllocator-parameter",
+          "text": " If <code>pAllocator</code> is not <code>NULL</code>, <code>pAllocator</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkAllocationCallbacks\">VkAllocationCallbacks</a> structure"
+        },
+        {
+          "vuid": "VUID-vkDestroyIndirectCommandsLayoutNVX-indirectCommandsLayout-parent",
+          "text": " <code>indirectCommandsLayout</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from <code>device</code>"
+        }
+      ]
+    },
+    "vkCmdReserveSpaceForCommandsNVX": {
+      "(VK_NVX_device_generated_commands)": [
+        {
+          "vuid": "VUID-vkCmdReserveSpaceForCommandsNVX-commandBuffer-01329",
+          "text": " The provided <code>commandBuffer</code> <strong class=\"purple\">must</strong> not have had a prior space reservation since its creation or the last reset."
+        },
+        {
+          "vuid": "VUID-vkCmdReserveSpaceForCommandsNVX-commandBuffer-01330",
+          "text": " The state of the <code>commandBuffer</code> <strong class=\"purple\">must</strong> be legal to execute all commands within the sequence provided by the <code>indirectCommandsLayout</code> member of <code>pProcessCommandsInfo</code>."
+        },
+        {
+          "vuid": "VUID-vkCmdReserveSpaceForCommandsNVX-commandBuffer-parameter",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkCommandBuffer\">VkCommandBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdReserveSpaceForCommandsNVX-pReserveSpaceInfo-parameter",
+          "text": " <code>pReserveSpaceInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkCmdReserveSpaceForCommandsInfoNVX\">VkCmdReserveSpaceForCommandsInfoNVX</a> structure"
+        },
+        {
+          "vuid": "VUID-vkCmdReserveSpaceForCommandsNVX-commandBuffer-recording",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be in the <a href=\"#commandbuffers-lifecycle\">recording state</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdReserveSpaceForCommandsNVX-commandBuffer-cmdpool",
+          "text": " The <code>VkCommandPool</code> that <code>commandBuffer</code> was allocated from <strong class=\"purple\">must</strong> support graphics, or compute operations"
+        },
+        {
+          "vuid": "VUID-vkCmdReserveSpaceForCommandsNVX-renderpass",
+          "text": " This command <strong class=\"purple\">must</strong> only be called inside of a render pass instance"
+        },
+        {
+          "vuid": "VUID-vkCmdReserveSpaceForCommandsNVX-bufferlevel",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be a secondary <code>VkCommandBuffer</code>"
+        }
+      ]
+    },
+    "VkCmdReserveSpaceForCommandsInfoNVX": {
+      "(VK_NVX_device_generated_commands)": [
+        {
+          "vuid": "VUID-VkCmdReserveSpaceForCommandsInfoNVX-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_CMD_RESERVE_SPACE_FOR_COMMANDS_INFO_NVX</code>"
+        },
+        {
+          "vuid": "VUID-VkCmdReserveSpaceForCommandsInfoNVX-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-VkCmdReserveSpaceForCommandsInfoNVX-objectTable-parameter",
+          "text": " <code>objectTable</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkObjectTableNVX\">VkObjectTableNVX</a> handle"
+        },
+        {
+          "vuid": "VUID-VkCmdReserveSpaceForCommandsInfoNVX-indirectCommandsLayout-parameter",
+          "text": " <code>indirectCommandsLayout</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkIndirectCommandsLayoutNVX\">VkIndirectCommandsLayoutNVX</a> handle"
+        },
+        {
+          "vuid": "VUID-VkCmdReserveSpaceForCommandsInfoNVX-commonparent",
+          "text": " Both of <code>indirectCommandsLayout</code>, and <code>objectTable</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from the same <a href=\"#VkDevice\">VkDevice</a>"
+        }
+      ]
+    },
+    "vkCmdProcessCommandsNVX": {
+      "(VK_NVX_device_generated_commands)": [
+        {
+          "vuid": "VUID-vkCmdProcessCommandsNVX-commandBuffer-parameter",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkCommandBuffer\">VkCommandBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdProcessCommandsNVX-pProcessCommandsInfo-parameter",
+          "text": " <code>pProcessCommandsInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkCmdProcessCommandsInfoNVX\">VkCmdProcessCommandsInfoNVX</a> structure"
+        },
+        {
+          "vuid": "VUID-vkCmdProcessCommandsNVX-commandBuffer-recording",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be in the <a href=\"#commandbuffers-lifecycle\">recording state</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdProcessCommandsNVX-commandBuffer-cmdpool",
+          "text": " The <code>VkCommandPool</code> that <code>commandBuffer</code> was allocated from <strong class=\"purple\">must</strong> support graphics, or compute operations"
+        },
+        {
+          "vuid": "VUID-vkCmdProcessCommandsNVX-renderpass",
+          "text": " This command <strong class=\"purple\">must</strong> only be called inside of a render pass instance"
+        }
+      ]
+    },
+    "VkCmdProcessCommandsInfoNVX": {
+      "(VK_NVX_device_generated_commands)": [
+        {
+          "vuid": "VUID-VkCmdProcessCommandsInfoNVX-objectTable-01331",
+          "text": " The provided <code>objectTable</code> <strong class=\"purple\">must</strong> include all objects referenced by the generation process"
+        },
+        {
+          "vuid": "VUID-VkCmdProcessCommandsInfoNVX-indirectCommandsTokenCount-01332",
+          "text": " <code>indirectCommandsTokenCount</code> <strong class=\"purple\">must</strong> match the <code>indirectCommandsLayout</code>&#8217;s <code>tokenCount</code>"
+        },
+        {
+          "vuid": "VUID-VkCmdProcessCommandsInfoNVX-tokenType-01333",
+          "text": " The <code>tokenType</code> member of each entry in the <code>pIndirectCommandsTokens</code> array <strong class=\"purple\">must</strong> match the values used at creation time of <code>indirectCommandsLayout</code>"
+        },
+        {
+          "vuid": "VUID-VkCmdProcessCommandsInfoNVX-targetCommandBuffer-01334",
+          "text": " If <code>targetCommandBuffer</code> is provided, it <strong class=\"purple\">must</strong> have reserved command space"
+        },
+        {
+          "vuid": "VUID-VkCmdProcessCommandsInfoNVX-targetCommandBuffer-01335",
+          "text": " If <code>targetCommandBuffer</code> is provided, the <code>objectTable</code> <strong class=\"purple\">must</strong> match the reservation&#8217;s <code>objectTable</code> and <strong class=\"purple\">must</strong> have had all referenced objects registered at reservation time"
+        },
+        {
+          "vuid": "VUID-VkCmdProcessCommandsInfoNVX-targetCommandBuffer-01336",
+          "text": " If <code>targetCommandBuffer</code> is provided, the <code>indirectCommandsLayout</code> <strong class=\"purple\">must</strong> match the reservation&#8217;s <code>indirectCommandsLayout</code>"
+        },
+        {
+          "vuid": "VUID-VkCmdProcessCommandsInfoNVX-targetCommandBuffer-01337",
+          "text": " If <code>targetCommandBuffer</code> is provided, the <code>maxSequencesCount</code> <strong class=\"purple\">must</strong> not exceed the reservation&#8217;s <code>maxSequencesCount</code>"
+        },
+        {
+          "vuid": "VUID-VkCmdProcessCommandsInfoNVX-sequencesCountBuffer-01338",
+          "text": " If <code>sequencesCountBuffer</code> is used, its usage flag <strong class=\"purple\">must</strong> have the <code>VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT</code> bit set"
+        },
+        {
+          "vuid": "VUID-VkCmdProcessCommandsInfoNVX-sequencesCountBuffer-01339",
+          "text": " If <code>sequencesCountBuffer</code> is used, <code>sequencesCountOffset</code> <strong class=\"purple\">must</strong> be aligned to <code>VkDeviceGeneratedCommandsLimitsNVX</code>::<code>minSequenceCountBufferOffsetAlignment</code>"
+        },
+        {
+          "vuid": "VUID-VkCmdProcessCommandsInfoNVX-sequencesIndexBuffer-01340",
+          "text": " If <code>sequencesIndexBuffer</code> is used, its usage flag <strong class=\"purple\">must</strong> have the <code>VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT</code> bit set"
+        },
+        {
+          "vuid": "VUID-VkCmdProcessCommandsInfoNVX-sequencesIndexBuffer-01341",
+          "text": " If <code>sequencesIndexBuffer</code> is used, <code>sequencesIndexOffset</code> <strong class=\"purple\">must</strong> be aligned to <code>VkDeviceGeneratedCommandsLimitsNVX</code>::<code>minSequenceIndexBufferOffsetAlignment</code>"
+        },
+        {
+          "vuid": "VUID-VkCmdProcessCommandsInfoNVX-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_CMD_PROCESS_COMMANDS_INFO_NVX</code>"
+        },
+        {
+          "vuid": "VUID-VkCmdProcessCommandsInfoNVX-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-VkCmdProcessCommandsInfoNVX-objectTable-parameter",
+          "text": " <code>objectTable</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkObjectTableNVX\">VkObjectTableNVX</a> handle"
+        },
+        {
+          "vuid": "VUID-VkCmdProcessCommandsInfoNVX-indirectCommandsLayout-parameter",
+          "text": " <code>indirectCommandsLayout</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkIndirectCommandsLayoutNVX\">VkIndirectCommandsLayoutNVX</a> handle"
+        },
+        {
+          "vuid": "VUID-VkCmdProcessCommandsInfoNVX-pIndirectCommandsTokens-parameter",
+          "text": " <code>pIndirectCommandsTokens</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>indirectCommandsTokenCount</code> valid <a href=\"#VkIndirectCommandsTokenNVX\">VkIndirectCommandsTokenNVX</a> structures"
+        },
+        {
+          "vuid": "VUID-VkCmdProcessCommandsInfoNVX-targetCommandBuffer-parameter",
+          "text": " If <code>targetCommandBuffer</code> is not <code>NULL</code>, <code>targetCommandBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkCommandBuffer\">VkCommandBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-VkCmdProcessCommandsInfoNVX-sequencesCountBuffer-parameter",
+          "text": " If <code>sequencesCountBuffer</code> is not <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>, <code>sequencesCountBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkBuffer\">VkBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-VkCmdProcessCommandsInfoNVX-sequencesIndexBuffer-parameter",
+          "text": " If <code>sequencesIndexBuffer</code> is not <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>, <code>sequencesIndexBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkBuffer\">VkBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-VkCmdProcessCommandsInfoNVX-indirectCommandsTokenCount-arraylength",
+          "text": " <code>indirectCommandsTokenCount</code> <strong class=\"purple\">must</strong> be greater than <code>0</code>"
+        },
+        {
+          "vuid": "VUID-VkCmdProcessCommandsInfoNVX-commonparent",
+          "text": " Each of <code>indirectCommandsLayout</code>, <code>objectTable</code>, <code>sequencesCountBuffer</code>, <code>sequencesIndexBuffer</code>, and <code>targetCommandBuffer</code> that are valid handles of non-ignored parameters <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from the same <a href=\"#VkDevice\">VkDevice</a>"
+        }
+      ]
+    },
+    "vkGetPhysicalDeviceSparseImageFormatProperties": {
+      "core": [
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceSparseImageFormatProperties-samples-01094",
+          "text": " <code>samples</code> <strong class=\"purple\">must</strong> be a bit value that is set in <code>VkImageFormatProperties</code>::<code>sampleCounts</code> returned by <code>vkGetPhysicalDeviceImageFormatProperties</code> with <code>format</code>, <code>type</code>, <code>tiling</code>, and <code>usage</code> equal to those in this command and <code>flags</code> equal to the value that is set in <code>VkImageCreateInfo</code>::<code>flags</code> when the image is created"
+        },
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceSparseImageFormatProperties-physicalDevice-parameter",
+          "text": " <code>physicalDevice</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkPhysicalDevice\">VkPhysicalDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceSparseImageFormatProperties-format-parameter",
+          "text": " <code>format</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkFormat\">VkFormat</a> value"
+        },
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceSparseImageFormatProperties-type-parameter",
+          "text": " <code>type</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkImageType\">VkImageType</a> value"
+        },
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceSparseImageFormatProperties-samples-parameter",
+          "text": " <code>samples</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkSampleCountFlagBits\">VkSampleCountFlagBits</a> value"
+        },
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceSparseImageFormatProperties-usage-parameter",
+          "text": " <code>usage</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkImageUsageFlagBits\">VkImageUsageFlagBits</a> values"
+        },
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceSparseImageFormatProperties-usage-requiredbitmask",
+          "text": " <code>usage</code> <strong class=\"purple\">must</strong> not be <code>0</code>"
+        },
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceSparseImageFormatProperties-tiling-parameter",
+          "text": " <code>tiling</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkImageTiling\">VkImageTiling</a> value"
+        },
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceSparseImageFormatProperties-pPropertyCount-parameter",
+          "text": " <code>pPropertyCount</code> <strong class=\"purple\">must</strong> be a valid pointer to a <code>uint32_t</code> value"
+        },
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceSparseImageFormatProperties-pProperties-parameter",
+          "text": " If the value referenced by <code>pPropertyCount</code> is not <code>0</code>, and <code>pProperties</code> is not <code>NULL</code>, <code>pProperties</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>pPropertyCount</code> <a href=\"#VkSparseImageFormatProperties\">VkSparseImageFormatProperties</a> structures"
+        }
+      ]
+    },
+    "vkGetPhysicalDeviceSparseImageFormatProperties2": {
+      "(VK_VERSION_1_1,VK_KHR_get_physical_device_properties2)": [
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceSparseImageFormatProperties2-physicalDevice-parameter",
+          "text": " <code>physicalDevice</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkPhysicalDevice\">VkPhysicalDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceSparseImageFormatProperties2-pFormatInfo-parameter",
+          "text": " <code>pFormatInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkPhysicalDeviceSparseImageFormatInfo2\">VkPhysicalDeviceSparseImageFormatInfo2</a> structure"
+        },
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceSparseImageFormatProperties2-pPropertyCount-parameter",
+          "text": " <code>pPropertyCount</code> <strong class=\"purple\">must</strong> be a valid pointer to a <code>uint32_t</code> value"
+        },
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceSparseImageFormatProperties2-pProperties-parameter",
+          "text": " If the value referenced by <code>pPropertyCount</code> is not <code>0</code>, and <code>pProperties</code> is not <code>NULL</code>, <code>pProperties</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>pPropertyCount</code> <a href=\"#VkSparseImageFormatProperties2\">VkSparseImageFormatProperties2</a> structures"
+        }
+      ]
+    },
+    "VkPhysicalDeviceSparseImageFormatInfo2": {
+      "(VK_VERSION_1_1,VK_KHR_get_physical_device_properties2)": [
+        {
+          "vuid": "VUID-VkPhysicalDeviceSparseImageFormatInfo2-samples-01095",
+          "text": " <code>samples</code> <strong class=\"purple\">must</strong> be a bit value that is set in <code>VkImageFormatProperties</code>::<code>sampleCounts</code> returned by <code>vkGetPhysicalDeviceImageFormatProperties</code> with <code>format</code>, <code>type</code>, <code>tiling</code>, and <code>usage</code> equal to those in this command and <code>flags</code> equal to the value that is set in <code>VkImageCreateInfo</code>::<code>flags</code> when the image is created"
+        },
+        {
+          "vuid": "VUID-VkPhysicalDeviceSparseImageFormatInfo2-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2</code>"
+        },
+        {
+          "vuid": "VUID-VkPhysicalDeviceSparseImageFormatInfo2-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-VkPhysicalDeviceSparseImageFormatInfo2-format-parameter",
+          "text": " <code>format</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkFormat\">VkFormat</a> value"
+        },
+        {
+          "vuid": "VUID-VkPhysicalDeviceSparseImageFormatInfo2-type-parameter",
+          "text": " <code>type</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkImageType\">VkImageType</a> value"
+        },
+        {
+          "vuid": "VUID-VkPhysicalDeviceSparseImageFormatInfo2-samples-parameter",
+          "text": " <code>samples</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkSampleCountFlagBits\">VkSampleCountFlagBits</a> value"
+        },
+        {
+          "vuid": "VUID-VkPhysicalDeviceSparseImageFormatInfo2-usage-parameter",
+          "text": " <code>usage</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkImageUsageFlagBits\">VkImageUsageFlagBits</a> values"
+        },
+        {
+          "vuid": "VUID-VkPhysicalDeviceSparseImageFormatInfo2-usage-requiredbitmask",
+          "text": " <code>usage</code> <strong class=\"purple\">must</strong> not be <code>0</code>"
+        },
+        {
+          "vuid": "VUID-VkPhysicalDeviceSparseImageFormatInfo2-tiling-parameter",
+          "text": " <code>tiling</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkImageTiling\">VkImageTiling</a> value"
+        }
+      ]
+    },
+    "VkSparseImageFormatProperties2": {
+      "(VK_VERSION_1_1,VK_KHR_get_physical_device_properties2)": [
+        {
+          "vuid": "VUID-VkSparseImageFormatProperties2-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2</code>"
+        },
+        {
+          "vuid": "VUID-VkSparseImageFormatProperties2-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        }
+      ]
+    },
+    "vkGetImageSparseMemoryRequirements": {
+      "core": [
+        {
+          "vuid": "VUID-vkGetImageSparseMemoryRequirements-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetImageSparseMemoryRequirements-image-parameter",
+          "text": " <code>image</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkImage\">VkImage</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetImageSparseMemoryRequirements-pSparseMemoryRequirementCount-parameter",
+          "text": " <code>pSparseMemoryRequirementCount</code> <strong class=\"purple\">must</strong> be a valid pointer to a <code>uint32_t</code> value"
+        },
+        {
+          "vuid": "VUID-vkGetImageSparseMemoryRequirements-pSparseMemoryRequirements-parameter",
+          "text": " If the value referenced by <code>pSparseMemoryRequirementCount</code> is not <code>0</code>, and <code>pSparseMemoryRequirements</code> is not <code>NULL</code>, <code>pSparseMemoryRequirements</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>pSparseMemoryRequirementCount</code> <a href=\"#VkSparseImageMemoryRequirements\">VkSparseImageMemoryRequirements</a> structures"
+        },
+        {
+          "vuid": "VUID-vkGetImageSparseMemoryRequirements-image-parent",
+          "text": " <code>image</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from <code>device</code>"
+        }
+      ]
+    },
+    "vkGetImageSparseMemoryRequirements2": {
+      "(VK_VERSION_1_1,VK_KHR_get_memory_requirements2)": [
+        {
+          "vuid": "VUID-vkGetImageSparseMemoryRequirements2-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetImageSparseMemoryRequirements2-pInfo-parameter",
+          "text": " <code>pInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkImageSparseMemoryRequirementsInfo2\">VkImageSparseMemoryRequirementsInfo2</a> structure"
+        },
+        {
+          "vuid": "VUID-vkGetImageSparseMemoryRequirements2-pSparseMemoryRequirementCount-parameter",
+          "text": " <code>pSparseMemoryRequirementCount</code> <strong class=\"purple\">must</strong> be a valid pointer to a <code>uint32_t</code> value"
+        },
+        {
+          "vuid": "VUID-vkGetImageSparseMemoryRequirements2-pSparseMemoryRequirements-parameter",
+          "text": " If the value referenced by <code>pSparseMemoryRequirementCount</code> is not <code>0</code>, and <code>pSparseMemoryRequirements</code> is not <code>NULL</code>, <code>pSparseMemoryRequirements</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>pSparseMemoryRequirementCount</code> <a href=\"#VkSparseImageMemoryRequirements2\">VkSparseImageMemoryRequirements2</a> structures"
+        }
+      ]
+    },
+    "VkImageSparseMemoryRequirementsInfo2": {
+      "(VK_VERSION_1_1,VK_KHR_get_memory_requirements2)": [
+        {
+          "vuid": "VUID-VkImageSparseMemoryRequirementsInfo2-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2</code>"
+        },
+        {
+          "vuid": "VUID-VkImageSparseMemoryRequirementsInfo2-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-VkImageSparseMemoryRequirementsInfo2-image-parameter",
+          "text": " <code>image</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkImage\">VkImage</a> handle"
+        }
+      ]
+    },
+    "VkSparseImageMemoryRequirements2": {
+      "(VK_VERSION_1_1,VK_KHR_get_memory_requirements2)": [
+        {
+          "vuid": "VUID-VkSparseImageMemoryRequirements2-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2</code>"
+        },
+        {
+          "vuid": "VUID-VkSparseImageMemoryRequirements2-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        }
+      ]
+    },
+    "VkSparseMemoryBind": {
+      "core": [
+        {
+          "vuid": "VUID-VkSparseMemoryBind-memory-01096",
+          "text": " If <code>memory</code> is not <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>, <code>memory</code> and <code>memoryOffset</code> <strong class=\"purple\">must</strong> match the memory requirements of the resource, as described in section <a href=\"#resources-association\">Resource Memory Association</a>"
+        },
+        {
+          "vuid": "VUID-VkSparseMemoryBind-memory-01097",
+          "text": " If <code>memory</code> is not <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>, <code>memory</code> <strong class=\"purple\">must</strong> not have been created with a memory type that reports <code>VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT</code> bit set"
+        },
+        {
+          "vuid": "VUID-VkSparseMemoryBind-size-01098",
+          "text": " <code>size</code> <strong class=\"purple\">must</strong> be greater than <code>0</code>"
+        },
+        {
+          "vuid": "VUID-VkSparseMemoryBind-resourceOffset-01099",
+          "text": " <code>resourceOffset</code> <strong class=\"purple\">must</strong> be less than the size of the resource"
+        },
+        {
+          "vuid": "VUID-VkSparseMemoryBind-size-01100",
+          "text": " <code>size</code> <strong class=\"purple\">must</strong> be less than or equal to the size of the resource minus <code>resourceOffset</code>"
+        },
+        {
+          "vuid": "VUID-VkSparseMemoryBind-memoryOffset-01101",
+          "text": " <code>memoryOffset</code> <strong class=\"purple\">must</strong> be less than the size of <code>memory</code>"
+        },
+        {
+          "vuid": "VUID-VkSparseMemoryBind-size-01102",
+          "text": " <code>size</code> <strong class=\"purple\">must</strong> be less than or equal to the size of <code>memory</code> minus <code>memoryOffset</code>"
+        },
+        {
+          "vuid": "VUID-VkSparseMemoryBind-memory-parameter",
+          "text": " If <code>memory</code> is not <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>, <code>memory</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDeviceMemory\">VkDeviceMemory</a> handle"
+        },
+        {
+          "vuid": "VUID-VkSparseMemoryBind-flags-parameter",
+          "text": " <code>flags</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkSparseMemoryBindFlagBits\">VkSparseMemoryBindFlagBits</a> values"
+        }
+      ],
+      "(VK_VERSION_1_1,VK_KHR_external_memory)": [
+        {
+          "vuid": "VUID-VkSparseMemoryBind-memory-02730",
+          "text": " If <code>memory</code> was created with <a href=\"#VkExportMemoryAllocateInfo\">VkExportMemoryAllocateInfo</a>::<code>handleTypes</code> not equal to <code>0</code>, at least one handle type it contained <strong class=\"purple\">must</strong> also have been set in <a href=\"#VkExternalMemoryBufferCreateInfo\">VkExternalMemoryBufferCreateInfo</a>::<code>handleTypes</code> or <a href=\"#VkExternalMemoryImageCreateInfo\">VkExternalMemoryImageCreateInfo</a>::<code>handleTypes</code> when the resource was created."
+        },
+        {
+          "vuid": "VUID-VkSparseMemoryBind-memory-02731",
+          "text": " If <code>memory</code> was created by a memory import operation, the external handle type of the imported memory <strong class=\"purple\">must</strong> also have been set in <a href=\"#VkExternalMemoryBufferCreateInfo\">VkExternalMemoryBufferCreateInfo</a>::<code>handleTypes</code> or <a href=\"#VkExternalMemoryImageCreateInfo\">VkExternalMemoryImageCreateInfo</a>::<code>handleTypes</code> when the resource was created."
+        }
+      ]
+    },
+    "VkSparseBufferMemoryBindInfo": {
+      "core": [
+        {
+          "vuid": "VUID-VkSparseBufferMemoryBindInfo-buffer-parameter",
+          "text": " <code>buffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkBuffer\">VkBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-VkSparseBufferMemoryBindInfo-pBinds-parameter",
+          "text": " <code>pBinds</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>bindCount</code> valid <a href=\"#VkSparseMemoryBind\">VkSparseMemoryBind</a> structures"
+        },
+        {
+          "vuid": "VUID-VkSparseBufferMemoryBindInfo-bindCount-arraylength",
+          "text": " <code>bindCount</code> <strong class=\"purple\">must</strong> be greater than <code>0</code>"
+        }
+      ]
+    },
+    "VkSparseImageOpaqueMemoryBindInfo": {
+      "core": [
+        {
+          "vuid": "VUID-VkSparseImageOpaqueMemoryBindInfo-pBinds-01103",
+          "text": " If the <code>flags</code> member of any element of <code>pBinds</code> contains <code>VK_SPARSE_MEMORY_BIND_METADATA_BIT</code>, the binding range defined <strong class=\"purple\">must</strong> be within the mip tail region of the metadata aspect of <code>image</code>"
+        },
+        {
+          "vuid": "VUID-VkSparseImageOpaqueMemoryBindInfo-image-parameter",
+          "text": " <code>image</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkImage\">VkImage</a> handle"
+        },
+        {
+          "vuid": "VUID-VkSparseImageOpaqueMemoryBindInfo-pBinds-parameter",
+          "text": " <code>pBinds</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>bindCount</code> valid <a href=\"#VkSparseMemoryBind\">VkSparseMemoryBind</a> structures"
+        },
+        {
+          "vuid": "VUID-VkSparseImageOpaqueMemoryBindInfo-bindCount-arraylength",
+          "text": " <code>bindCount</code> <strong class=\"purple\">must</strong> be greater than <code>0</code>"
+        }
+      ]
+    },
+    "VkSparseImageMemoryBindInfo": {
+      "core": [
+        {
+          "vuid": "VUID-VkSparseImageMemoryBindInfo-subresource-01722",
+          "text": " The <code>subresource.mipLevel</code> member of each element of <code>pBinds</code> <strong class=\"purple\">must</strong> be less than the <code>mipLevels</code> specified in <a href=\"#VkImageCreateInfo\">VkImageCreateInfo</a> when <code>image</code> was created"
+        },
+        {
+          "vuid": "VUID-VkSparseImageMemoryBindInfo-subresource-01723",
+          "text": " The <code>subresource.arrayLayer</code> member of each element of <code>pBinds</code> <strong class=\"purple\">must</strong> be less than the <code>arrayLayers</code> specified in <a href=\"#VkImageCreateInfo\">VkImageCreateInfo</a> when <code>image</code> was created"
+        },
+        {
+          "vuid": "VUID-VkSparseImageMemoryBindInfo-image-parameter",
+          "text": " <code>image</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkImage\">VkImage</a> handle"
+        },
+        {
+          "vuid": "VUID-VkSparseImageMemoryBindInfo-pBinds-parameter",
+          "text": " <code>pBinds</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>bindCount</code> valid <a href=\"#VkSparseImageMemoryBind\">VkSparseImageMemoryBind</a> structures"
+        },
+        {
+          "vuid": "VUID-VkSparseImageMemoryBindInfo-bindCount-arraylength",
+          "text": " <code>bindCount</code> <strong class=\"purple\">must</strong> be greater than <code>0</code>"
+        }
+      ]
+    },
+    "VkSparseImageMemoryBind": {
+      "core": [
+        {
+          "vuid": "VUID-VkSparseImageMemoryBind-memory-01104",
+          "text": " If the <a href=\"#features-sparseResidencyAliased\">sparse aliased residency</a> feature is not enabled, and if any other resources are bound to ranges of <code>memory</code>, the range of <code>memory</code> being bound <strong class=\"purple\">must</strong> not overlap with those bound ranges"
+        },
+        {
+          "vuid": "VUID-VkSparseImageMemoryBind-memory-01105",
+          "text": " <code>memory</code> and <code>memoryOffset</code> <strong class=\"purple\">must</strong> match the memory requirements of the calling command&#8217;s <code>image</code>, as described in section <a href=\"#resources-association\">Resource Memory Association</a>"
+        },
+        {
+          "vuid": "VUID-VkSparseImageMemoryBind-subresource-01106",
+          "text": " <code>subresource</code> <strong class=\"purple\">must</strong> be a valid image subresource for <code>image</code> (see <a href=\"#resources-image-views\">Image Views</a>)"
+        },
+        {
+          "vuid": "VUID-VkSparseImageMemoryBind-offset-01107",
+          "text": " <code>offset.x</code> <strong class=\"purple\">must</strong> be a multiple of the sparse image block width (<code>VkSparseImageFormatProperties</code>::<code>imageGranularity.width</code>) of the image"
+        },
+        {
+          "vuid": "VUID-VkSparseImageMemoryBind-extent-01108",
+          "text": " <code>extent.width</code> <strong class=\"purple\">must</strong> either be a multiple of the sparse image block width of the image, or else <span class=\"eq\">(<code>extent.width</code> &#43; <code>offset.x</code>)</span> <strong class=\"purple\">must</strong> equal the width of the image subresource"
+        },
+        {
+          "vuid": "VUID-VkSparseImageMemoryBind-offset-01109",
+          "text": " <code>offset.y</code> <strong class=\"purple\">must</strong> be a multiple of the sparse image block height (<code>VkSparseImageFormatProperties</code>::<code>imageGranularity.height</code>) of the image"
+        },
+        {
+          "vuid": "VUID-VkSparseImageMemoryBind-extent-01110",
+          "text": " <code>extent.height</code> <strong class=\"purple\">must</strong> either be a multiple of the sparse image block height of the image, or else <span class=\"eq\">(<code>extent.height</code> &#43; <code>offset.y</code>)</span> <strong class=\"purple\">must</strong> equal the height of the image subresource"
+        },
+        {
+          "vuid": "VUID-VkSparseImageMemoryBind-offset-01111",
+          "text": " <code>offset.z</code> <strong class=\"purple\">must</strong> be a multiple of the sparse image block depth (<code>VkSparseImageFormatProperties</code>::<code>imageGranularity.depth</code>) of the image"
+        },
+        {
+          "vuid": "VUID-VkSparseImageMemoryBind-extent-01112",
+          "text": " <code>extent.depth</code> <strong class=\"purple\">must</strong> either be a multiple of the sparse image block depth of the image, or else <span class=\"eq\">(<code>extent.depth</code> &#43; <code>offset.z</code>)</span> <strong class=\"purple\">must</strong> equal the depth of the image subresource"
+        },
+        {
+          "vuid": "VUID-VkSparseImageMemoryBind-subresource-parameter",
+          "text": " <code>subresource</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkImageSubresource\">VkImageSubresource</a> structure"
+        },
+        {
+          "vuid": "VUID-VkSparseImageMemoryBind-memory-parameter",
+          "text": " If <code>memory</code> is not <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>, <code>memory</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDeviceMemory\">VkDeviceMemory</a> handle"
+        },
+        {
+          "vuid": "VUID-VkSparseImageMemoryBind-flags-parameter",
+          "text": " <code>flags</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkSparseMemoryBindFlagBits\">VkSparseMemoryBindFlagBits</a> values"
+        }
+      ],
+      "(VK_VERSION_1_1,VK_KHR_external_memory)": [
+        {
+          "vuid": "VUID-VkSparseImageMemoryBind-memory-02732",
+          "text": " If <code>memory</code> was created with <a href=\"#VkExportMemoryAllocateInfo\">VkExportMemoryAllocateInfo</a>::<code>handleTypes</code> not equal to <code>0</code>, at least one handle type it contained <strong class=\"purple\">must</strong> also have been set in <a href=\"#VkExternalMemoryImageCreateInfo\">VkExternalMemoryImageCreateInfo</a>::<code>handleTypes</code> when the image was created."
+        },
+        {
+          "vuid": "VUID-VkSparseImageMemoryBind-memory-02733",
+          "text": " If <code>memory</code> was created by a memory import operation, the external handle type of the imported memory <strong class=\"purple\">must</strong> also have been set in <a href=\"#VkExternalMemoryImageCreateInfo\">VkExternalMemoryImageCreateInfo</a>::<code>handleTypes</code> when <code>image</code> was created."
+        }
+      ]
+    },
+    "vkQueueBindSparse": {
+      "core": [
+        {
+          "vuid": "VUID-vkQueueBindSparse-fence-01113",
+          "text": " If <code>fence</code> is not <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>, <code>fence</code> <strong class=\"purple\">must</strong> be unsignaled"
+        },
+        {
+          "vuid": "VUID-vkQueueBindSparse-fence-01114",
+          "text": " If <code>fence</code> is not <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>, <code>fence</code> <strong class=\"purple\">must</strong> not be associated with any other queue command that has not yet completed execution on that queue"
+        },
+        {
+          "vuid": "VUID-vkQueueBindSparse-pSignalSemaphores-01115",
+          "text": " Each element of the <code>pSignalSemaphores</code> member of each element of <code>pBindInfo</code> <strong class=\"purple\">must</strong> be unsignaled when the semaphore signal operation it defines is executed on the device"
+        },
+        {
+          "vuid": "VUID-vkQueueBindSparse-pWaitSemaphores-01116",
+          "text": " When a semaphore wait operation referring to a binary semaphore defined by any element of the <code>pWaitSemaphores</code> member of any element of <code>pBindInfo</code> executes on <code>queue</code>, there <strong class=\"purple\">must</strong> be no other queues waiting on the same semaphore."
+        },
+        {
+          "vuid": "VUID-vkQueueBindSparse-pWaitSemaphores-01117",
+          "text": " All elements of the <code>pWaitSemaphores</code> member of all elements of <code>pBindInfo</code> member referring to a binary semaphore <strong class=\"purple\">must</strong> be semaphores that are signaled, or have <a href=\"#synchronization-semaphores-signaling\">semaphore signal operations</a> previously submitted for execution."
+        },
+        {
+          "vuid": "VUID-vkQueueBindSparse-queue-parameter",
+          "text": " <code>queue</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkQueue\">VkQueue</a> handle"
+        },
+        {
+          "vuid": "VUID-vkQueueBindSparse-pBindInfo-parameter",
+          "text": " If <code>bindInfoCount</code> is not <code>0</code>, <code>pBindInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>bindInfoCount</code> valid <a href=\"#VkBindSparseInfo\">VkBindSparseInfo</a> structures"
+        },
+        {
+          "vuid": "VUID-vkQueueBindSparse-fence-parameter",
+          "text": " If <code>fence</code> is not <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>, <code>fence</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkFence\">VkFence</a> handle"
+        },
+        {
+          "vuid": "VUID-vkQueueBindSparse-queuetype",
+          "text": " The <code>queue</code> <strong class=\"purple\">must</strong> support sparse binding operations"
+        },
+        {
+          "vuid": "VUID-vkQueueBindSparse-commonparent",
+          "text": " Both of <code>fence</code>, and <code>queue</code> that are valid handles of non-ignored parameters <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from the same <a href=\"#VkDevice\">VkDevice</a>"
+        }
+      ],
+      "(VK_KHR_timeline_semaphore)": [
+        {
+          "vuid": "VUID-vkQueueBindSparse-pWaitSemaphores-03245",
+          "text": " All elements of the <code>pWaitSemaphores</code> member of all elements of <code>pBindInfo</code> created with a <a href=\"#VkSemaphoreTypeKHR\">VkSemaphoreTypeKHR</a> of <code>VK_SEMAPHORE_TYPE_BINARY_KHR</code> <strong class=\"purple\">must</strong> reference a semaphore signal operation that has been submitted for execution and any semaphore signal operations on which it depends (if any) <strong class=\"purple\">must</strong> have also been submitted for execution."
+        }
+      ]
+    },
+    "VkBindSparseInfo": {
+      "(VK_KHR_timeline_semaphore)": [
+        {
+          "vuid": "VUID-VkBindSparseInfo-pWaitSemaphores-03246",
+          "text": " If any element of <code>pWaitSemaphores</code> or <code>pSignalSemaphores</code> was created with a <a href=\"#VkSemaphoreTypeKHR\">VkSemaphoreTypeKHR</a> of <code>VK_SEMAPHORE_TYPE_TIMELINE_KHR</code> then the <code>pNext</code> chain <strong class=\"purple\">must</strong> include a <a href=\"#VkTimelineSemaphoreSubmitInfoKHR\">VkTimelineSemaphoreSubmitInfoKHR</a> structure"
+        },
+        {
+          "vuid": "VUID-VkBindSparseInfo-pNext-03247",
+          "text": " If the <code>pNext</code> chain of this structure includes a <a href=\"#VkTimelineSemaphoreSubmitInfoKHR\">VkTimelineSemaphoreSubmitInfoKHR</a> structure and any element of <code>pWaitSemaphores</code> was created with a <a href=\"#VkSemaphoreTypeKHR\">VkSemaphoreTypeKHR</a> of <code>VK_SEMAPHORE_TYPE_TIMELINE_KHR</code> then its <code>waitSemaphoreValueCount</code> member <strong class=\"purple\">must</strong> equal <code>waitSemaphoreCount</code>"
+        },
+        {
+          "vuid": "VUID-VkBindSparseInfo-pNext-03248",
+          "text": " If the <code>pNext</code> chain of this structure includes a <a href=\"#VkTimelineSemaphoreSubmitInfoKHR\">VkTimelineSemaphoreSubmitInfoKHR</a> structure and any element of <code>pSignalSemaphores</code> was created with a <a href=\"#VkSemaphoreTypeKHR\">VkSemaphoreTypeKHR</a> of <code>VK_SEMAPHORE_TYPE_TIMELINE_KHR</code> then its <code>signalSemaphoreValueCount</code> member <strong class=\"purple\">must</strong> equal <code>signalSemaphoreCount</code>"
+        },
+        {
+          "vuid": "VUID-VkBindSparseInfo-pSignalSemaphores-03249",
+          "text": " For each element of <code>pSignalSemaphores</code> created with a <a href=\"#VkSemaphoreTypeKHR\">VkSemaphoreTypeKHR</a> of <code>VK_SEMAPHORE_TYPE_TIMELINE_KHR</code> the corresponding element of <a href=\"#VkTimelineSemaphoreSubmitInfoKHR\">VkTimelineSemaphoreSubmitInfoKHR</a>::pSignalSemaphoreValues <strong class=\"purple\">must</strong> have a value greater than the current value of the semaphore when the <a href=\"#synchronization-semaphores-signaling\">semaphore signal operation</a> is executed"
+        },
+        {
+          "vuid": "VUID-VkBindSparseInfo-pWaitSemaphores-03250",
+          "text": " For each element of <code>pWaitSemaphores</code> created with a <a href=\"#VkSemaphoreTypeKHR\">VkSemaphoreTypeKHR</a> of <code>VK_SEMAPHORE_TYPE_TIMELINE_KHR</code> the corresponding element of <a href=\"#VkTimelineSemaphoreSubmitInfoKHR\">VkTimelineSemaphoreSubmitInfoKHR</a>::pWaitSemaphoreValues <strong class=\"purple\">must</strong> have a value which does not differ from the current value of the semaphore or from the value of any outstanding semaphore wait or signal operation on that semaphore by more than <a href=\"#limits-maxTimelineSemaphoreValueDifference\"><code>maxTimelineSemaphoreValueDifference</code></a>."
+        },
+        {
+          "vuid": "VUID-VkBindSparseInfo-pSignalSemaphores-03251",
+          "text": " For each element of <code>pSignalSemaphores</code> created with a <a href=\"#VkSemaphoreTypeKHR\">VkSemaphoreTypeKHR</a> of <code>VK_SEMAPHORE_TYPE_TIMELINE_KHR</code> the corresponding element of <a href=\"#VkTimelineSemaphoreSubmitInfoKHR\">VkTimelineSemaphoreSubmitInfoKHR</a>::pSignalSemaphoreValues <strong class=\"purple\">must</strong> have a value which does not differ from the current value of the semaphore or from the value of any outstanding semaphore wait or signal operation on that semaphore by more than <a href=\"#limits-maxTimelineSemaphoreValueDifference\"><code>maxTimelineSemaphoreValueDifference</code></a>."
+        }
+      ],
+      "core": [
+        {
+          "vuid": "VUID-VkBindSparseInfo-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_BIND_SPARSE_INFO</code>"
+        },
+        {
+          "vuid": "VUID-VkBindSparseInfo-pNext-pNext",
+          "text": " Each <code>pNext</code> member of any structure (including this one) in the <code>pNext</code> chain <strong class=\"purple\">must</strong> be either <code>NULL</code> or a pointer to a valid instance of <a href=\"#VkDeviceGroupBindSparseInfo\">VkDeviceGroupBindSparseInfo</a> or <a href=\"#VkTimelineSemaphoreSubmitInfoKHR\">VkTimelineSemaphoreSubmitInfoKHR</a>"
+        },
+        {
+          "vuid": "VUID-VkBindSparseInfo-sType-unique",
+          "text": " Each <code>sType</code> member in the <code>pNext</code> chain <strong class=\"purple\">must</strong> be unique"
+        },
+        {
+          "vuid": "VUID-VkBindSparseInfo-pWaitSemaphores-parameter",
+          "text": " If <code>waitSemaphoreCount</code> is not <code>0</code>, <code>pWaitSemaphores</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>waitSemaphoreCount</code> valid <a href=\"#VkSemaphore\">VkSemaphore</a> handles"
+        },
+        {
+          "vuid": "VUID-VkBindSparseInfo-pBufferBinds-parameter",
+          "text": " If <code>bufferBindCount</code> is not <code>0</code>, <code>pBufferBinds</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>bufferBindCount</code> valid <a href=\"#VkSparseBufferMemoryBindInfo\">VkSparseBufferMemoryBindInfo</a> structures"
+        },
+        {
+          "vuid": "VUID-VkBindSparseInfo-pImageOpaqueBinds-parameter",
+          "text": " If <code>imageOpaqueBindCount</code> is not <code>0</code>, <code>pImageOpaqueBinds</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>imageOpaqueBindCount</code> valid <a href=\"#VkSparseImageOpaqueMemoryBindInfo\">VkSparseImageOpaqueMemoryBindInfo</a> structures"
+        },
+        {
+          "vuid": "VUID-VkBindSparseInfo-pImageBinds-parameter",
+          "text": " If <code>imageBindCount</code> is not <code>0</code>, <code>pImageBinds</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>imageBindCount</code> valid <a href=\"#VkSparseImageMemoryBindInfo\">VkSparseImageMemoryBindInfo</a> structures"
+        },
+        {
+          "vuid": "VUID-VkBindSparseInfo-pSignalSemaphores-parameter",
+          "text": " If <code>signalSemaphoreCount</code> is not <code>0</code>, <code>pSignalSemaphores</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>signalSemaphoreCount</code> valid <a href=\"#VkSemaphore\">VkSemaphore</a> handles"
+        },
+        {
+          "vuid": "VUID-VkBindSparseInfo-commonparent",
+          "text": " Both of the elements of <code>pSignalSemaphores</code>, and the elements of <code>pWaitSemaphores</code> that are valid handles of non-ignored parameters <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from the same <a href=\"#VkDevice\">VkDevice</a>"
+        }
+      ]
+    },
+    "VkDeviceGroupBindSparseInfo": {
+      "(VK_VERSION_1_1,VK_KHR_device_group)": [
+        {
+          "vuid": "VUID-VkDeviceGroupBindSparseInfo-resourceDeviceIndex-01118",
+          "text": " <code>resourceDeviceIndex</code> and <code>memoryDeviceIndex</code> <strong class=\"purple\">must</strong> both be valid device indices."
+        },
+        {
+          "vuid": "VUID-VkDeviceGroupBindSparseInfo-memoryDeviceIndex-01119",
+          "text": " Each memory allocation bound in this batch <strong class=\"purple\">must</strong> have allocated an instance for <code>memoryDeviceIndex</code>."
+        },
+        {
+          "vuid": "VUID-VkDeviceGroupBindSparseInfo-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO</code>"
+        }
+      ]
+    },
+    "vkCreateAndroidSurfaceKHR": {
+      "(VK_KHR_surface)+(VK_KHR_android_surface)": [
+        {
+          "vuid": "VUID-vkCreateAndroidSurfaceKHR-instance-parameter",
+          "text": " <code>instance</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkInstance\">VkInstance</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCreateAndroidSurfaceKHR-pCreateInfo-parameter",
+          "text": " <code>pCreateInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkAndroidSurfaceCreateInfoKHR\">VkAndroidSurfaceCreateInfoKHR</a> structure"
+        },
+        {
+          "vuid": "VUID-vkCreateAndroidSurfaceKHR-pAllocator-parameter",
+          "text": " If <code>pAllocator</code> is not <code>NULL</code>, <code>pAllocator</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkAllocationCallbacks\">VkAllocationCallbacks</a> structure"
+        },
+        {
+          "vuid": "VUID-vkCreateAndroidSurfaceKHR-pSurface-parameter",
+          "text": " <code>pSurface</code> <strong class=\"purple\">must</strong> be a valid pointer to a <a href=\"#VkSurfaceKHR\">VkSurfaceKHR</a> handle"
+        }
+      ]
+    },
+    "VkAndroidSurfaceCreateInfoKHR": {
+      "(VK_KHR_surface)+(VK_KHR_android_surface)": [
+        {
+          "vuid": "VUID-VkAndroidSurfaceCreateInfoKHR-window-01248",
+          "text": " <code>window</code> <strong class=\"purple\">must</strong> point to a valid Android <a href=\"#ANativeWindow\">ANativeWindow</a>."
+        },
+        {
+          "vuid": "VUID-VkAndroidSurfaceCreateInfoKHR-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_ANDROID_SURFACE_CREATE_INFO_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkAndroidSurfaceCreateInfoKHR-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-VkAndroidSurfaceCreateInfoKHR-flags-zerobitmask",
+          "text": " <code>flags</code> <strong class=\"purple\">must</strong> be <code>0</code>"
+        }
+      ]
+    },
+    "vkCreateWaylandSurfaceKHR": {
+      "(VK_KHR_surface)+(VK_KHR_wayland_surface)": [
+        {
+          "vuid": "VUID-vkCreateWaylandSurfaceKHR-instance-parameter",
+          "text": " <code>instance</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkInstance\">VkInstance</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCreateWaylandSurfaceKHR-pCreateInfo-parameter",
+          "text": " <code>pCreateInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkWaylandSurfaceCreateInfoKHR\">VkWaylandSurfaceCreateInfoKHR</a> structure"
+        },
+        {
+          "vuid": "VUID-vkCreateWaylandSurfaceKHR-pAllocator-parameter",
+          "text": " If <code>pAllocator</code> is not <code>NULL</code>, <code>pAllocator</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkAllocationCallbacks\">VkAllocationCallbacks</a> structure"
+        },
+        {
+          "vuid": "VUID-vkCreateWaylandSurfaceKHR-pSurface-parameter",
+          "text": " <code>pSurface</code> <strong class=\"purple\">must</strong> be a valid pointer to a <a href=\"#VkSurfaceKHR\">VkSurfaceKHR</a> handle"
+        }
+      ]
+    },
+    "VkWaylandSurfaceCreateInfoKHR": {
+      "(VK_KHR_surface)+(VK_KHR_wayland_surface)": [
+        {
+          "vuid": "VUID-VkWaylandSurfaceCreateInfoKHR-display-01304",
+          "text": " <code>display</code> <strong class=\"purple\">must</strong> point to a valid Wayland <code>wl_display</code>."
+        },
+        {
+          "vuid": "VUID-VkWaylandSurfaceCreateInfoKHR-surface-01305",
+          "text": " <code>surface</code> <strong class=\"purple\">must</strong> point to a valid Wayland <code>wl_surface</code>."
+        },
+        {
+          "vuid": "VUID-VkWaylandSurfaceCreateInfoKHR-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_WAYLAND_SURFACE_CREATE_INFO_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkWaylandSurfaceCreateInfoKHR-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-VkWaylandSurfaceCreateInfoKHR-flags-zerobitmask",
+          "text": " <code>flags</code> <strong class=\"purple\">must</strong> be <code>0</code>"
+        }
+      ]
+    },
+    "vkCreateWin32SurfaceKHR": {
+      "(VK_KHR_surface)+(VK_KHR_win32_surface)": [
+        {
+          "vuid": "VUID-vkCreateWin32SurfaceKHR-instance-parameter",
+          "text": " <code>instance</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkInstance\">VkInstance</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCreateWin32SurfaceKHR-pCreateInfo-parameter",
+          "text": " <code>pCreateInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkWin32SurfaceCreateInfoKHR\">VkWin32SurfaceCreateInfoKHR</a> structure"
+        },
+        {
+          "vuid": "VUID-vkCreateWin32SurfaceKHR-pAllocator-parameter",
+          "text": " If <code>pAllocator</code> is not <code>NULL</code>, <code>pAllocator</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkAllocationCallbacks\">VkAllocationCallbacks</a> structure"
+        },
+        {
+          "vuid": "VUID-vkCreateWin32SurfaceKHR-pSurface-parameter",
+          "text": " <code>pSurface</code> <strong class=\"purple\">must</strong> be a valid pointer to a <a href=\"#VkSurfaceKHR\">VkSurfaceKHR</a> handle"
+        }
+      ]
+    },
+    "VkWin32SurfaceCreateInfoKHR": {
+      "(VK_KHR_surface)+(VK_KHR_win32_surface)": [
+        {
+          "vuid": "VUID-VkWin32SurfaceCreateInfoKHR-hinstance-01307",
+          "text": " <code>hinstance</code> <strong class=\"purple\">must</strong> be a valid Win32 <code>HINSTANCE</code>."
+        },
+        {
+          "vuid": "VUID-VkWin32SurfaceCreateInfoKHR-hwnd-01308",
+          "text": " <code>hwnd</code> <strong class=\"purple\">must</strong> be a valid Win32 <code>HWND</code>."
+        },
+        {
+          "vuid": "VUID-VkWin32SurfaceCreateInfoKHR-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkWin32SurfaceCreateInfoKHR-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-VkWin32SurfaceCreateInfoKHR-flags-zerobitmask",
+          "text": " <code>flags</code> <strong class=\"purple\">must</strong> be <code>0</code>"
+        }
+      ]
+    },
+    "vkCreateXcbSurfaceKHR": {
+      "(VK_KHR_surface)+(VK_KHR_xcb_surface)": [
+        {
+          "vuid": "VUID-vkCreateXcbSurfaceKHR-instance-parameter",
+          "text": " <code>instance</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkInstance\">VkInstance</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCreateXcbSurfaceKHR-pCreateInfo-parameter",
+          "text": " <code>pCreateInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkXcbSurfaceCreateInfoKHR\">VkXcbSurfaceCreateInfoKHR</a> structure"
+        },
+        {
+          "vuid": "VUID-vkCreateXcbSurfaceKHR-pAllocator-parameter",
+          "text": " If <code>pAllocator</code> is not <code>NULL</code>, <code>pAllocator</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkAllocationCallbacks\">VkAllocationCallbacks</a> structure"
+        },
+        {
+          "vuid": "VUID-vkCreateXcbSurfaceKHR-pSurface-parameter",
+          "text": " <code>pSurface</code> <strong class=\"purple\">must</strong> be a valid pointer to a <a href=\"#VkSurfaceKHR\">VkSurfaceKHR</a> handle"
+        }
+      ]
+    },
+    "VkXcbSurfaceCreateInfoKHR": {
+      "(VK_KHR_surface)+(VK_KHR_xcb_surface)": [
+        {
+          "vuid": "VUID-VkXcbSurfaceCreateInfoKHR-connection-01310",
+          "text": " <code>connection</code> <strong class=\"purple\">must</strong> point to a valid X11 <code>xcb_connection_t</code>."
+        },
+        {
+          "vuid": "VUID-VkXcbSurfaceCreateInfoKHR-window-01311",
+          "text": " <code>window</code> <strong class=\"purple\">must</strong> be a valid X11 <code>xcb_window_t</code>."
+        },
+        {
+          "vuid": "VUID-VkXcbSurfaceCreateInfoKHR-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_XCB_SURFACE_CREATE_INFO_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkXcbSurfaceCreateInfoKHR-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-VkXcbSurfaceCreateInfoKHR-flags-zerobitmask",
+          "text": " <code>flags</code> <strong class=\"purple\">must</strong> be <code>0</code>"
+        }
+      ]
+    },
+    "vkCreateXlibSurfaceKHR": {
+      "(VK_KHR_surface)+(VK_KHR_xlib_surface)": [
+        {
+          "vuid": "VUID-vkCreateXlibSurfaceKHR-instance-parameter",
+          "text": " <code>instance</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkInstance\">VkInstance</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCreateXlibSurfaceKHR-pCreateInfo-parameter",
+          "text": " <code>pCreateInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkXlibSurfaceCreateInfoKHR\">VkXlibSurfaceCreateInfoKHR</a> structure"
+        },
+        {
+          "vuid": "VUID-vkCreateXlibSurfaceKHR-pAllocator-parameter",
+          "text": " If <code>pAllocator</code> is not <code>NULL</code>, <code>pAllocator</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkAllocationCallbacks\">VkAllocationCallbacks</a> structure"
+        },
+        {
+          "vuid": "VUID-vkCreateXlibSurfaceKHR-pSurface-parameter",
+          "text": " <code>pSurface</code> <strong class=\"purple\">must</strong> be a valid pointer to a <a href=\"#VkSurfaceKHR\">VkSurfaceKHR</a> handle"
+        }
+      ]
+    },
+    "VkXlibSurfaceCreateInfoKHR": {
+      "(VK_KHR_surface)+(VK_KHR_xlib_surface)": [
+        {
+          "vuid": "VUID-VkXlibSurfaceCreateInfoKHR-dpy-01313",
+          "text": " <code>dpy</code> <strong class=\"purple\">must</strong> point to a valid Xlib <code>Display</code>."
+        },
+        {
+          "vuid": "VUID-VkXlibSurfaceCreateInfoKHR-window-01314",
+          "text": " <code>window</code> <strong class=\"purple\">must</strong> be a valid Xlib <code>Window</code>."
+        },
+        {
+          "vuid": "VUID-VkXlibSurfaceCreateInfoKHR-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_XLIB_SURFACE_CREATE_INFO_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkXlibSurfaceCreateInfoKHR-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-VkXlibSurfaceCreateInfoKHR-flags-zerobitmask",
+          "text": " <code>flags</code> <strong class=\"purple\">must</strong> be <code>0</code>"
+        }
+      ]
+    },
+    "vkCreateImagePipeSurfaceFUCHSIA": {
+      "(VK_KHR_surface)+(VK_FUCHSIA_imagepipe_surface)": [
+        {
+          "vuid": "VUID-vkCreateImagePipeSurfaceFUCHSIA-instance-parameter",
+          "text": " <code>instance</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkInstance\">VkInstance</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCreateImagePipeSurfaceFUCHSIA-pCreateInfo-parameter",
+          "text": " <code>pCreateInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkImagePipeSurfaceCreateInfoFUCHSIA\">VkImagePipeSurfaceCreateInfoFUCHSIA</a> structure"
+        },
+        {
+          "vuid": "VUID-vkCreateImagePipeSurfaceFUCHSIA-pAllocator-parameter",
+          "text": " If <code>pAllocator</code> is not <code>NULL</code>, <code>pAllocator</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkAllocationCallbacks\">VkAllocationCallbacks</a> structure"
+        },
+        {
+          "vuid": "VUID-vkCreateImagePipeSurfaceFUCHSIA-pSurface-parameter",
+          "text": " <code>pSurface</code> <strong class=\"purple\">must</strong> be a valid pointer to a <a href=\"#VkSurfaceKHR\">VkSurfaceKHR</a> handle"
+        }
+      ]
+    },
+    "VkImagePipeSurfaceCreateInfoFUCHSIA": {
+      "(VK_KHR_surface)+(VK_FUCHSIA_imagepipe_surface)": [
+        {
+          "vuid": "VUID-VkImagePipeSurfaceCreateInfoFUCHSIA-imagePipeHandle-00000",
+          "text": " <code>imagePipeHandle</code> <strong class=\"purple\">must</strong> be a valid <code>zx_handle_t</code>"
+        },
+        {
+          "vuid": "VUID-VkImagePipeSurfaceCreateInfoFUCHSIA-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_IMAGEPIPE_SURFACE_CREATE_INFO_FUCHSIA</code>"
+        },
+        {
+          "vuid": "VUID-VkImagePipeSurfaceCreateInfoFUCHSIA-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-VkImagePipeSurfaceCreateInfoFUCHSIA-flags-zerobitmask",
+          "text": " <code>flags</code> <strong class=\"purple\">must</strong> be <code>0</code>"
+        }
+      ]
+    },
+    "vkCreateStreamDescriptorSurfaceGGP": {
+      "(VK_KHR_surface)+(VK_GGP_stream_descriptor_surface)": [
+        {
+          "vuid": "VUID-vkCreateStreamDescriptorSurfaceGGP-instance-parameter",
+          "text": " <code>instance</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkInstance\">VkInstance</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCreateStreamDescriptorSurfaceGGP-pCreateInfo-parameter",
+          "text": " <code>pCreateInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkStreamDescriptorSurfaceCreateInfoGGP\">VkStreamDescriptorSurfaceCreateInfoGGP</a> structure"
+        },
+        {
+          "vuid": "VUID-vkCreateStreamDescriptorSurfaceGGP-pAllocator-parameter",
+          "text": " If <code>pAllocator</code> is not <code>NULL</code>, <code>pAllocator</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkAllocationCallbacks\">VkAllocationCallbacks</a> structure"
+        },
+        {
+          "vuid": "VUID-vkCreateStreamDescriptorSurfaceGGP-pSurface-parameter",
+          "text": " <code>pSurface</code> <strong class=\"purple\">must</strong> be a valid pointer to a <a href=\"#VkSurfaceKHR\">VkSurfaceKHR</a> handle"
+        }
+      ]
+    },
+    "VkStreamDescriptorSurfaceCreateInfoGGP": {
+      "(VK_KHR_surface)+(VK_GGP_stream_descriptor_surface)": [
+        {
+          "vuid": "VUID-VkStreamDescriptorSurfaceCreateInfoGGP-streamDescriptor-02681",
+          "text": " <code>streamDescriptor</code> <strong class=\"purple\">must</strong> be a valid <code>GgpStreamDescriptor</code>"
+        },
+        {
+          "vuid": "VUID-VkStreamDescriptorSurfaceCreateInfoGGP-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_STREAM_DESCRIPTOR_SURFACE_CREATE_INFO_GGP</code>"
+        },
+        {
+          "vuid": "VUID-VkStreamDescriptorSurfaceCreateInfoGGP-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-VkStreamDescriptorSurfaceCreateInfoGGP-flags-zerobitmask",
+          "text": " <code>flags</code> <strong class=\"purple\">must</strong> be <code>0</code>"
+        }
+      ]
+    },
+    "vkCreateIOSSurfaceMVK": {
+      "(VK_KHR_surface)+(VK_MVK_ios_surface)": [
+        {
+          "vuid": "VUID-vkCreateIOSSurfaceMVK-instance-parameter",
+          "text": " <code>instance</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkInstance\">VkInstance</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCreateIOSSurfaceMVK-pCreateInfo-parameter",
+          "text": " <code>pCreateInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkIOSSurfaceCreateInfoMVK\">VkIOSSurfaceCreateInfoMVK</a> structure"
+        },
+        {
+          "vuid": "VUID-vkCreateIOSSurfaceMVK-pAllocator-parameter",
+          "text": " If <code>pAllocator</code> is not <code>NULL</code>, <code>pAllocator</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkAllocationCallbacks\">VkAllocationCallbacks</a> structure"
+        },
+        {
+          "vuid": "VUID-vkCreateIOSSurfaceMVK-pSurface-parameter",
+          "text": " <code>pSurface</code> <strong class=\"purple\">must</strong> be a valid pointer to a <a href=\"#VkSurfaceKHR\">VkSurfaceKHR</a> handle"
+        }
+      ]
+    },
+    "VkIOSSurfaceCreateInfoMVK": {
+      "(VK_KHR_surface)+(VK_MVK_ios_surface)": [
+        {
+          "vuid": "VUID-VkIOSSurfaceCreateInfoMVK-pView-01316",
+          "text": " <code>pView</code> <strong class=\"purple\">must</strong> be a valid <code>UIView</code> and <strong class=\"purple\">must</strong> be backed by a <code>CALayer</code> instance of type <a href=\"#CAMetalLayer\">CAMetalLayer</a>."
+        },
+        {
+          "vuid": "VUID-VkIOSSurfaceCreateInfoMVK-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_IOS_SURFACE_CREATE_INFO_MVK</code>"
+        },
+        {
+          "vuid": "VUID-VkIOSSurfaceCreateInfoMVK-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-VkIOSSurfaceCreateInfoMVK-flags-zerobitmask",
+          "text": " <code>flags</code> <strong class=\"purple\">must</strong> be <code>0</code>"
+        }
+      ]
+    },
+    "vkCreateMacOSSurfaceMVK": {
+      "(VK_KHR_surface)+(VK_MVK_macos_surface)": [
+        {
+          "vuid": "VUID-vkCreateMacOSSurfaceMVK-instance-parameter",
+          "text": " <code>instance</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkInstance\">VkInstance</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCreateMacOSSurfaceMVK-pCreateInfo-parameter",
+          "text": " <code>pCreateInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkMacOSSurfaceCreateInfoMVK\">VkMacOSSurfaceCreateInfoMVK</a> structure"
+        },
+        {
+          "vuid": "VUID-vkCreateMacOSSurfaceMVK-pAllocator-parameter",
+          "text": " If <code>pAllocator</code> is not <code>NULL</code>, <code>pAllocator</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkAllocationCallbacks\">VkAllocationCallbacks</a> structure"
+        },
+        {
+          "vuid": "VUID-vkCreateMacOSSurfaceMVK-pSurface-parameter",
+          "text": " <code>pSurface</code> <strong class=\"purple\">must</strong> be a valid pointer to a <a href=\"#VkSurfaceKHR\">VkSurfaceKHR</a> handle"
+        }
+      ]
+    },
+    "VkMacOSSurfaceCreateInfoMVK": {
+      "(VK_KHR_surface)+(VK_MVK_macos_surface)": [
+        {
+          "vuid": "VUID-VkMacOSSurfaceCreateInfoMVK-pView-01317",
+          "text": " <code>pView</code> <strong class=\"purple\">must</strong> be a valid <code>NSView</code> and <strong class=\"purple\">must</strong> be backed by a <code>CALayer</code> instance of type <a href=\"#CAMetalLayer\">CAMetalLayer</a>."
+        },
+        {
+          "vuid": "VUID-VkMacOSSurfaceCreateInfoMVK-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_MACOS_SURFACE_CREATE_INFO_MVK</code>"
+        },
+        {
+          "vuid": "VUID-VkMacOSSurfaceCreateInfoMVK-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-VkMacOSSurfaceCreateInfoMVK-flags-zerobitmask",
+          "text": " <code>flags</code> <strong class=\"purple\">must</strong> be <code>0</code>"
+        }
+      ]
+    },
+    "vkCreateViSurfaceNN": {
+      "(VK_KHR_surface)+(VK_NN_vi_surface)": [
+        {
+          "vuid": "VUID-vkCreateViSurfaceNN-instance-parameter",
+          "text": " <code>instance</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkInstance\">VkInstance</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCreateViSurfaceNN-pCreateInfo-parameter",
+          "text": " <code>pCreateInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkViSurfaceCreateInfoNN\">VkViSurfaceCreateInfoNN</a> structure"
+        },
+        {
+          "vuid": "VUID-vkCreateViSurfaceNN-pAllocator-parameter",
+          "text": " If <code>pAllocator</code> is not <code>NULL</code>, <code>pAllocator</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkAllocationCallbacks\">VkAllocationCallbacks</a> structure"
+        },
+        {
+          "vuid": "VUID-vkCreateViSurfaceNN-pSurface-parameter",
+          "text": " <code>pSurface</code> <strong class=\"purple\">must</strong> be a valid pointer to a <a href=\"#VkSurfaceKHR\">VkSurfaceKHR</a> handle"
+        }
+      ]
+    },
+    "VkViSurfaceCreateInfoNN": {
+      "(VK_KHR_surface)+(VK_NN_vi_surface)": [
+        {
+          "vuid": "VUID-VkViSurfaceCreateInfoNN-window-01318",
+          "text": " <code>window</code> <strong class=\"purple\">must</strong> be a valid <code>nn</code>::<code>vi</code>::<code>NativeWindowHandle</code>"
+        },
+        {
+          "vuid": "VUID-VkViSurfaceCreateInfoNN-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_VI_SURFACE_CREATE_INFO_NN</code>"
+        },
+        {
+          "vuid": "VUID-VkViSurfaceCreateInfoNN-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-VkViSurfaceCreateInfoNN-flags-zerobitmask",
+          "text": " <code>flags</code> <strong class=\"purple\">must</strong> be <code>0</code>"
+        }
+      ]
+    },
+    "vkCreateMetalSurfaceEXT": {
+      "(VK_KHR_surface)+(VK_EXT_metal_surface)": [
+        {
+          "vuid": "VUID-vkCreateMetalSurfaceEXT-instance-parameter",
+          "text": " <code>instance</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkInstance\">VkInstance</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCreateMetalSurfaceEXT-pCreateInfo-parameter",
+          "text": " <code>pCreateInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkMetalSurfaceCreateInfoEXT\">VkMetalSurfaceCreateInfoEXT</a> structure"
+        },
+        {
+          "vuid": "VUID-vkCreateMetalSurfaceEXT-pAllocator-parameter",
+          "text": " If <code>pAllocator</code> is not <code>NULL</code>, <code>pAllocator</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkAllocationCallbacks\">VkAllocationCallbacks</a> structure"
+        },
+        {
+          "vuid": "VUID-vkCreateMetalSurfaceEXT-pSurface-parameter",
+          "text": " <code>pSurface</code> <strong class=\"purple\">must</strong> be a valid pointer to a <a href=\"#VkSurfaceKHR\">VkSurfaceKHR</a> handle"
+        }
+      ]
+    },
+    "VkMetalSurfaceCreateInfoEXT": {
+      "(VK_KHR_surface)+(VK_EXT_metal_surface)": [
+        {
+          "vuid": "VUID-VkMetalSurfaceCreateInfoEXT-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_METAL_SURFACE_CREATE_INFO_EXT</code>"
+        },
+        {
+          "vuid": "VUID-VkMetalSurfaceCreateInfoEXT-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-VkMetalSurfaceCreateInfoEXT-flags-zerobitmask",
+          "text": " <code>flags</code> <strong class=\"purple\">must</strong> be <code>0</code>"
+        }
+      ]
+    },
+    "vkDestroySurfaceKHR": {
+      "(VK_KHR_surface)": [
+        {
+          "vuid": "VUID-vkDestroySurfaceKHR-surface-01266",
+          "text": " All <code>VkSwapchainKHR</code> objects created for <code>surface</code> <strong class=\"purple\">must</strong> have been destroyed prior to destroying <code>surface</code>"
+        },
+        {
+          "vuid": "VUID-vkDestroySurfaceKHR-surface-01267",
+          "text": " If <code>VkAllocationCallbacks</code> were provided when <code>surface</code> was created, a compatible set of callbacks <strong class=\"purple\">must</strong> be provided here"
+        },
+        {
+          "vuid": "VUID-vkDestroySurfaceKHR-surface-01268",
+          "text": " If no <code>VkAllocationCallbacks</code> were provided when <code>surface</code> was created, <code>pAllocator</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-vkDestroySurfaceKHR-instance-parameter",
+          "text": " <code>instance</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkInstance\">VkInstance</a> handle"
+        },
+        {
+          "vuid": "VUID-vkDestroySurfaceKHR-surface-parameter",
+          "text": " If <code>surface</code> is not <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>, <code>surface</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkSurfaceKHR\">VkSurfaceKHR</a> handle"
+        },
+        {
+          "vuid": "VUID-vkDestroySurfaceKHR-pAllocator-parameter",
+          "text": " If <code>pAllocator</code> is not <code>NULL</code>, <code>pAllocator</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkAllocationCallbacks\">VkAllocationCallbacks</a> structure"
+        },
+        {
+          "vuid": "VUID-vkDestroySurfaceKHR-surface-parent",
+          "text": " If <code>surface</code> is a valid handle, it <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from <code>instance</code>"
+        }
+      ]
+    },
+    "vkGetPhysicalDeviceDisplayPropertiesKHR": {
+      "(VK_KHR_surface)+(VK_KHR_display)": [
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceDisplayPropertiesKHR-physicalDevice-parameter",
+          "text": " <code>physicalDevice</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkPhysicalDevice\">VkPhysicalDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceDisplayPropertiesKHR-pPropertyCount-parameter",
+          "text": " <code>pPropertyCount</code> <strong class=\"purple\">must</strong> be a valid pointer to a <code>uint32_t</code> value"
+        },
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceDisplayPropertiesKHR-pProperties-parameter",
+          "text": " If the value referenced by <code>pPropertyCount</code> is not <code>0</code>, and <code>pProperties</code> is not <code>NULL</code>, <code>pProperties</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>pPropertyCount</code> <a href=\"#VkDisplayPropertiesKHR\">VkDisplayPropertiesKHR</a> structures"
+        }
+      ]
+    },
+    "vkGetPhysicalDeviceDisplayProperties2KHR": {
+      "(VK_KHR_surface)+(VK_KHR_display)+(VK_KHR_get_display_properties2)": [
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceDisplayProperties2KHR-physicalDevice-parameter",
+          "text": " <code>physicalDevice</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkPhysicalDevice\">VkPhysicalDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceDisplayProperties2KHR-pPropertyCount-parameter",
+          "text": " <code>pPropertyCount</code> <strong class=\"purple\">must</strong> be a valid pointer to a <code>uint32_t</code> value"
+        },
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceDisplayProperties2KHR-pProperties-parameter",
+          "text": " If the value referenced by <code>pPropertyCount</code> is not <code>0</code>, and <code>pProperties</code> is not <code>NULL</code>, <code>pProperties</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>pPropertyCount</code> <a href=\"#VkDisplayProperties2KHR\">VkDisplayProperties2KHR</a> structures"
+        }
+      ]
+    },
+    "VkDisplayProperties2KHR": {
+      "(VK_KHR_surface)+(VK_KHR_display)+(VK_KHR_get_display_properties2)": [
+        {
+          "vuid": "VUID-VkDisplayProperties2KHR-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_DISPLAY_PROPERTIES_2_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkDisplayProperties2KHR-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        }
+      ]
+    },
+    "vkAcquireXlibDisplayEXT": {
+      "(VK_KHR_surface)+(VK_KHR_display)+(VK_EXT_direct_mode_display)+(VK_EXT_acquire_xlib_display)": [
+        {
+          "vuid": "VUID-vkAcquireXlibDisplayEXT-physicalDevice-parameter",
+          "text": " <code>physicalDevice</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkPhysicalDevice\">VkPhysicalDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkAcquireXlibDisplayEXT-dpy-parameter",
+          "text": " <code>dpy</code> <strong class=\"purple\">must</strong> be a valid pointer to a <code>Display</code> value"
+        },
+        {
+          "vuid": "VUID-vkAcquireXlibDisplayEXT-display-parameter",
+          "text": " <code>display</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDisplayKHR\">VkDisplayKHR</a> handle"
+        }
+      ]
+    },
+    "vkGetRandROutputDisplayEXT": {
+      "(VK_KHR_surface)+(VK_KHR_display)+(VK_EXT_direct_mode_display)+(VK_EXT_acquire_xlib_display)": [
+        {
+          "vuid": "VUID-vkGetRandROutputDisplayEXT-physicalDevice-parameter",
+          "text": " <code>physicalDevice</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkPhysicalDevice\">VkPhysicalDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetRandROutputDisplayEXT-dpy-parameter",
+          "text": " <code>dpy</code> <strong class=\"purple\">must</strong> be a valid pointer to a <code>Display</code> value"
+        },
+        {
+          "vuid": "VUID-vkGetRandROutputDisplayEXT-pDisplay-parameter",
+          "text": " <code>pDisplay</code> <strong class=\"purple\">must</strong> be a valid pointer to a <a href=\"#VkDisplayKHR\">VkDisplayKHR</a> handle"
+        }
+      ]
+    },
+    "vkReleaseDisplayEXT": {
+      "(VK_KHR_surface)+(VK_KHR_display)+(VK_EXT_direct_mode_display)": [
+        {
+          "vuid": "VUID-vkReleaseDisplayEXT-physicalDevice-parameter",
+          "text": " <code>physicalDevice</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkPhysicalDevice\">VkPhysicalDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkReleaseDisplayEXT-display-parameter",
+          "text": " <code>display</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDisplayKHR\">VkDisplayKHR</a> handle"
+        }
+      ]
+    },
+    "vkGetPhysicalDeviceDisplayPlanePropertiesKHR": {
+      "(VK_KHR_surface)+(VK_KHR_display)": [
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceDisplayPlanePropertiesKHR-physicalDevice-parameter",
+          "text": " <code>physicalDevice</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkPhysicalDevice\">VkPhysicalDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceDisplayPlanePropertiesKHR-pPropertyCount-parameter",
+          "text": " <code>pPropertyCount</code> <strong class=\"purple\">must</strong> be a valid pointer to a <code>uint32_t</code> value"
+        },
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceDisplayPlanePropertiesKHR-pProperties-parameter",
+          "text": " If the value referenced by <code>pPropertyCount</code> is not <code>0</code>, and <code>pProperties</code> is not <code>NULL</code>, <code>pProperties</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>pPropertyCount</code> <a href=\"#VkDisplayPlanePropertiesKHR\">VkDisplayPlanePropertiesKHR</a> structures"
+        }
+      ]
+    },
+    "vkGetPhysicalDeviceDisplayPlaneProperties2KHR": {
+      "(VK_KHR_surface)+(VK_KHR_display)+(VK_KHR_get_display_properties2)": [
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceDisplayPlaneProperties2KHR-physicalDevice-parameter",
+          "text": " <code>physicalDevice</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkPhysicalDevice\">VkPhysicalDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceDisplayPlaneProperties2KHR-pPropertyCount-parameter",
+          "text": " <code>pPropertyCount</code> <strong class=\"purple\">must</strong> be a valid pointer to a <code>uint32_t</code> value"
+        },
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceDisplayPlaneProperties2KHR-pProperties-parameter",
+          "text": " If the value referenced by <code>pPropertyCount</code> is not <code>0</code>, and <code>pProperties</code> is not <code>NULL</code>, <code>pProperties</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>pPropertyCount</code> <a href=\"#VkDisplayPlaneProperties2KHR\">VkDisplayPlaneProperties2KHR</a> structures"
+        }
+      ]
+    },
+    "VkDisplayPlaneProperties2KHR": {
+      "(VK_KHR_surface)+(VK_KHR_display)+(VK_KHR_get_display_properties2)": [
+        {
+          "vuid": "VUID-VkDisplayPlaneProperties2KHR-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_DISPLAY_PLANE_PROPERTIES_2_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkDisplayPlaneProperties2KHR-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        }
+      ]
+    },
+    "vkGetDisplayPlaneSupportedDisplaysKHR": {
+      "(VK_KHR_surface)+(VK_KHR_display)": [
+        {
+          "vuid": "VUID-vkGetDisplayPlaneSupportedDisplaysKHR-planeIndex-01249",
+          "text": " <code>planeIndex</code> <strong class=\"purple\">must</strong> be less than the number of display planes supported by the device as determined by calling <code>vkGetPhysicalDeviceDisplayPlanePropertiesKHR</code>"
+        },
+        {
+          "vuid": "VUID-vkGetDisplayPlaneSupportedDisplaysKHR-physicalDevice-parameter",
+          "text": " <code>physicalDevice</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkPhysicalDevice\">VkPhysicalDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetDisplayPlaneSupportedDisplaysKHR-pDisplayCount-parameter",
+          "text": " <code>pDisplayCount</code> <strong class=\"purple\">must</strong> be a valid pointer to a <code>uint32_t</code> value"
+        },
+        {
+          "vuid": "VUID-vkGetDisplayPlaneSupportedDisplaysKHR-pDisplays-parameter",
+          "text": " If the value referenced by <code>pDisplayCount</code> is not <code>0</code>, and <code>pDisplays</code> is not <code>NULL</code>, <code>pDisplays</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>pDisplayCount</code> <a href=\"#VkDisplayKHR\">VkDisplayKHR</a> handles"
+        }
+      ]
+    },
+    "vkGetDisplayModePropertiesKHR": {
+      "(VK_KHR_surface)+(VK_KHR_display)": [
+        {
+          "vuid": "VUID-vkGetDisplayModePropertiesKHR-physicalDevice-parameter",
+          "text": " <code>physicalDevice</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkPhysicalDevice\">VkPhysicalDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetDisplayModePropertiesKHR-display-parameter",
+          "text": " <code>display</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDisplayKHR\">VkDisplayKHR</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetDisplayModePropertiesKHR-pPropertyCount-parameter",
+          "text": " <code>pPropertyCount</code> <strong class=\"purple\">must</strong> be a valid pointer to a <code>uint32_t</code> value"
+        },
+        {
+          "vuid": "VUID-vkGetDisplayModePropertiesKHR-pProperties-parameter",
+          "text": " If the value referenced by <code>pPropertyCount</code> is not <code>0</code>, and <code>pProperties</code> is not <code>NULL</code>, <code>pProperties</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>pPropertyCount</code> <a href=\"#VkDisplayModePropertiesKHR\">VkDisplayModePropertiesKHR</a> structures"
+        }
+      ]
+    },
+    "vkGetDisplayModeProperties2KHR": {
+      "(VK_KHR_surface)+(VK_KHR_display)+(VK_KHR_get_display_properties2)": [
+        {
+          "vuid": "VUID-vkGetDisplayModeProperties2KHR-physicalDevice-parameter",
+          "text": " <code>physicalDevice</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkPhysicalDevice\">VkPhysicalDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetDisplayModeProperties2KHR-display-parameter",
+          "text": " <code>display</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDisplayKHR\">VkDisplayKHR</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetDisplayModeProperties2KHR-pPropertyCount-parameter",
+          "text": " <code>pPropertyCount</code> <strong class=\"purple\">must</strong> be a valid pointer to a <code>uint32_t</code> value"
+        },
+        {
+          "vuid": "VUID-vkGetDisplayModeProperties2KHR-pProperties-parameter",
+          "text": " If the value referenced by <code>pPropertyCount</code> is not <code>0</code>, and <code>pProperties</code> is not <code>NULL</code>, <code>pProperties</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>pPropertyCount</code> <a href=\"#VkDisplayModeProperties2KHR\">VkDisplayModeProperties2KHR</a> structures"
+        }
+      ]
+    },
+    "VkDisplayModeProperties2KHR": {
+      "(VK_KHR_surface)+(VK_KHR_display)+(VK_KHR_get_display_properties2)": [
+        {
+          "vuid": "VUID-VkDisplayModeProperties2KHR-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_DISPLAY_MODE_PROPERTIES_2_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkDisplayModeProperties2KHR-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        }
+      ]
+    },
+    "VkDisplayModeParametersKHR": {
+      "(VK_KHR_surface)+(VK_KHR_display)": [
+        {
+          "vuid": "VUID-VkDisplayModeParametersKHR-width-01990",
+          "text": " The <code>width</code> member of <code>visibleRegion</code> <strong class=\"purple\">must</strong> be greater than <code>0</code>"
+        },
+        {
+          "vuid": "VUID-VkDisplayModeParametersKHR-height-01991",
+          "text": " The <code>height</code> member of <code>visibleRegion</code> <strong class=\"purple\">must</strong> be greater than <code>0</code>"
+        },
+        {
+          "vuid": "VUID-VkDisplayModeParametersKHR-refreshRate-01992",
+          "text": " <code>refreshRate</code> <strong class=\"purple\">must</strong> be greater than <code>0</code>"
+        }
+      ]
+    },
+    "vkCreateDisplayModeKHR": {
+      "(VK_KHR_surface)+(VK_KHR_display)": [
+        {
+          "vuid": "VUID-vkCreateDisplayModeKHR-physicalDevice-parameter",
+          "text": " <code>physicalDevice</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkPhysicalDevice\">VkPhysicalDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCreateDisplayModeKHR-display-parameter",
+          "text": " <code>display</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDisplayKHR\">VkDisplayKHR</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCreateDisplayModeKHR-pCreateInfo-parameter",
+          "text": " <code>pCreateInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkDisplayModeCreateInfoKHR\">VkDisplayModeCreateInfoKHR</a> structure"
+        },
+        {
+          "vuid": "VUID-vkCreateDisplayModeKHR-pAllocator-parameter",
+          "text": " If <code>pAllocator</code> is not <code>NULL</code>, <code>pAllocator</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkAllocationCallbacks\">VkAllocationCallbacks</a> structure"
+        },
+        {
+          "vuid": "VUID-vkCreateDisplayModeKHR-pMode-parameter",
+          "text": " <code>pMode</code> <strong class=\"purple\">must</strong> be a valid pointer to a <a href=\"#VkDisplayModeKHR\">VkDisplayModeKHR</a> handle"
+        }
+      ]
+    },
+    "VkDisplayModeCreateInfoKHR": {
+      "(VK_KHR_surface)+(VK_KHR_display)": [
+        {
+          "vuid": "VUID-VkDisplayModeCreateInfoKHR-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_DISPLAY_MODE_CREATE_INFO_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkDisplayModeCreateInfoKHR-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-VkDisplayModeCreateInfoKHR-flags-zerobitmask",
+          "text": " <code>flags</code> <strong class=\"purple\">must</strong> be <code>0</code>"
+        },
+        {
+          "vuid": "VUID-VkDisplayModeCreateInfoKHR-parameters-parameter",
+          "text": " <code>parameters</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDisplayModeParametersKHR\">VkDisplayModeParametersKHR</a> structure"
+        }
+      ]
+    },
+    "vkGetDisplayPlaneCapabilitiesKHR": {
+      "(VK_KHR_surface)+(VK_KHR_display)": [
+        {
+          "vuid": "VUID-vkGetDisplayPlaneCapabilitiesKHR-physicalDevice-parameter",
+          "text": " <code>physicalDevice</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkPhysicalDevice\">VkPhysicalDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetDisplayPlaneCapabilitiesKHR-mode-parameter",
+          "text": " <code>mode</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDisplayModeKHR\">VkDisplayModeKHR</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetDisplayPlaneCapabilitiesKHR-pCapabilities-parameter",
+          "text": " <code>pCapabilities</code> <strong class=\"purple\">must</strong> be a valid pointer to a <a href=\"#VkDisplayPlaneCapabilitiesKHR\">VkDisplayPlaneCapabilitiesKHR</a> structure"
+        }
+      ]
+    },
+    "vkGetDisplayPlaneCapabilities2KHR": {
+      "(VK_KHR_surface)+(VK_KHR_display)+(VK_KHR_get_display_properties2)": [
+        {
+          "vuid": "VUID-vkGetDisplayPlaneCapabilities2KHR-physicalDevice-parameter",
+          "text": " <code>physicalDevice</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkPhysicalDevice\">VkPhysicalDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetDisplayPlaneCapabilities2KHR-pDisplayPlaneInfo-parameter",
+          "text": " <code>pDisplayPlaneInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkDisplayPlaneInfo2KHR\">VkDisplayPlaneInfo2KHR</a> structure"
+        },
+        {
+          "vuid": "VUID-vkGetDisplayPlaneCapabilities2KHR-pCapabilities-parameter",
+          "text": " <code>pCapabilities</code> <strong class=\"purple\">must</strong> be a valid pointer to a <a href=\"#VkDisplayPlaneCapabilities2KHR\">VkDisplayPlaneCapabilities2KHR</a> structure"
+        }
+      ]
+    },
+    "VkDisplayPlaneInfo2KHR": {
+      "(VK_KHR_surface)+(VK_KHR_display)+(VK_KHR_get_display_properties2)": [
+        {
+          "vuid": "VUID-VkDisplayPlaneInfo2KHR-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_DISPLAY_PLANE_INFO_2_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkDisplayPlaneInfo2KHR-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-VkDisplayPlaneInfo2KHR-mode-parameter",
+          "text": " <code>mode</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDisplayModeKHR\">VkDisplayModeKHR</a> handle"
+        }
+      ]
+    },
+    "VkDisplayPlaneCapabilities2KHR": {
+      "(VK_KHR_surface)+(VK_KHR_display)+(VK_KHR_get_display_properties2)": [
+        {
+          "vuid": "VUID-VkDisplayPlaneCapabilities2KHR-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_DISPLAY_PLANE_CAPABILITIES_2_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkDisplayPlaneCapabilities2KHR-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        }
+      ]
+    },
+    "vkDisplayPowerControlEXT": {
+      "(VK_KHR_surface)+(VK_KHR_display)+(VK_EXT_display_control)": [
+        {
+          "vuid": "VUID-vkDisplayPowerControlEXT-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkDisplayPowerControlEXT-display-parameter",
+          "text": " <code>display</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDisplayKHR\">VkDisplayKHR</a> handle"
+        },
+        {
+          "vuid": "VUID-vkDisplayPowerControlEXT-pDisplayPowerInfo-parameter",
+          "text": " <code>pDisplayPowerInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkDisplayPowerInfoEXT\">VkDisplayPowerInfoEXT</a> structure"
+        }
+      ]
+    },
+    "VkDisplayPowerInfoEXT": {
+      "(VK_KHR_surface)+(VK_KHR_display)+(VK_EXT_display_control)": [
+        {
+          "vuid": "VUID-VkDisplayPowerInfoEXT-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_DISPLAY_POWER_INFO_EXT</code>"
+        },
+        {
+          "vuid": "VUID-VkDisplayPowerInfoEXT-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-VkDisplayPowerInfoEXT-powerState-parameter",
+          "text": " <code>powerState</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDisplayPowerStateEXT\">VkDisplayPowerStateEXT</a> value"
+        }
+      ]
+    },
+    "vkCreateDisplayPlaneSurfaceKHR": {
+      "(VK_KHR_surface)+(VK_KHR_display)": [
+        {
+          "vuid": "VUID-vkCreateDisplayPlaneSurfaceKHR-instance-parameter",
+          "text": " <code>instance</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkInstance\">VkInstance</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCreateDisplayPlaneSurfaceKHR-pCreateInfo-parameter",
+          "text": " <code>pCreateInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkDisplaySurfaceCreateInfoKHR\">VkDisplaySurfaceCreateInfoKHR</a> structure"
+        },
+        {
+          "vuid": "VUID-vkCreateDisplayPlaneSurfaceKHR-pAllocator-parameter",
+          "text": " If <code>pAllocator</code> is not <code>NULL</code>, <code>pAllocator</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkAllocationCallbacks\">VkAllocationCallbacks</a> structure"
+        },
+        {
+          "vuid": "VUID-vkCreateDisplayPlaneSurfaceKHR-pSurface-parameter",
+          "text": " <code>pSurface</code> <strong class=\"purple\">must</strong> be a valid pointer to a <a href=\"#VkSurfaceKHR\">VkSurfaceKHR</a> handle"
+        }
+      ]
+    },
+    "VkDisplaySurfaceCreateInfoKHR": {
+      "(VK_KHR_surface)+(VK_KHR_display)": [
+        {
+          "vuid": "VUID-VkDisplaySurfaceCreateInfoKHR-planeIndex-01252",
+          "text": " <code>planeIndex</code> <strong class=\"purple\">must</strong> be less than the number of display planes supported by the device as determined by calling <code>vkGetPhysicalDeviceDisplayPlanePropertiesKHR</code>"
+        },
+        {
+          "vuid": "VUID-VkDisplaySurfaceCreateInfoKHR-planeReorderPossible-01253",
+          "text": " If the <code>planeReorderPossible</code> member of the <code>VkDisplayPropertiesKHR</code> structure returned by <code>vkGetPhysicalDeviceDisplayPropertiesKHR</code> for the display corresponding to <code>displayMode</code> is <code>VK_TRUE</code> then <code>planeStackIndex</code> <strong class=\"purple\">must</strong> be less than the number of display planes supported by the device as determined by calling <code>vkGetPhysicalDeviceDisplayPlanePropertiesKHR</code>; otherwise <code>planeStackIndex</code> <strong class=\"purple\">must</strong> equal the <code>currentStackIndex</code> member of <code>VkDisplayPlanePropertiesKHR</code> returned by <code>vkGetPhysicalDeviceDisplayPlanePropertiesKHR</code> for the display plane corresponding to <code>displayMode</code>"
+        },
+        {
+          "vuid": "VUID-VkDisplaySurfaceCreateInfoKHR-alphaMode-01254",
+          "text": " If <code>alphaMode</code> is <code>VK_DISPLAY_PLANE_ALPHA_GLOBAL_BIT_KHR</code> then <code>globalAlpha</code> <strong class=\"purple\">must</strong> be between <code>0</code> and <code>1</code>, inclusive"
+        },
+        {
+          "vuid": "VUID-VkDisplaySurfaceCreateInfoKHR-alphaMode-01255",
+          "text": " <code>alphaMode</code> <strong class=\"purple\">must</strong> be <code>0</code> or one of the bits present in the <code>supportedAlpha</code> member of <code>VkDisplayPlaneCapabilitiesKHR</code> returned by <code>vkGetDisplayPlaneCapabilitiesKHR</code> for the display plane corresponding to <code>displayMode</code>"
+        },
+        {
+          "vuid": "VUID-VkDisplaySurfaceCreateInfoKHR-width-01256",
+          "text": " The <code>width</code> and <code>height</code> members of <code>imageExtent</code> <strong class=\"purple\">must</strong> be less than the <code>maxImageDimensions2D</code> member of <code>VkPhysicalDeviceLimits</code>"
+        },
+        {
+          "vuid": "VUID-VkDisplaySurfaceCreateInfoKHR-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_DISPLAY_SURFACE_CREATE_INFO_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkDisplaySurfaceCreateInfoKHR-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-VkDisplaySurfaceCreateInfoKHR-flags-zerobitmask",
+          "text": " <code>flags</code> <strong class=\"purple\">must</strong> be <code>0</code>"
+        },
+        {
+          "vuid": "VUID-VkDisplaySurfaceCreateInfoKHR-displayMode-parameter",
+          "text": " <code>displayMode</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDisplayModeKHR\">VkDisplayModeKHR</a> handle"
+        },
+        {
+          "vuid": "VUID-VkDisplaySurfaceCreateInfoKHR-transform-parameter",
+          "text": " <code>transform</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkSurfaceTransformFlagBitsKHR\">VkSurfaceTransformFlagBitsKHR</a> value"
+        },
+        {
+          "vuid": "VUID-VkDisplaySurfaceCreateInfoKHR-alphaMode-parameter",
+          "text": " <code>alphaMode</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDisplayPlaneAlphaFlagBitsKHR\">VkDisplayPlaneAlphaFlagBitsKHR</a> value"
+        }
+      ]
+    },
+    "vkCreateHeadlessSurfaceEXT": {
+      "(VK_KHR_surface)+(VK_EXT_headless_surface)": [
+        {
+          "vuid": "VUID-vkCreateHeadlessSurfaceEXT-instance-parameter",
+          "text": " <code>instance</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkInstance\">VkInstance</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCreateHeadlessSurfaceEXT-pCreateInfo-parameter",
+          "text": " <code>pCreateInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkHeadlessSurfaceCreateInfoEXT\">VkHeadlessSurfaceCreateInfoEXT</a> structure"
+        },
+        {
+          "vuid": "VUID-vkCreateHeadlessSurfaceEXT-pAllocator-parameter",
+          "text": " If <code>pAllocator</code> is not <code>NULL</code>, <code>pAllocator</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkAllocationCallbacks\">VkAllocationCallbacks</a> structure"
+        },
+        {
+          "vuid": "VUID-vkCreateHeadlessSurfaceEXT-pSurface-parameter",
+          "text": " <code>pSurface</code> <strong class=\"purple\">must</strong> be a valid pointer to a <a href=\"#VkSurfaceKHR\">VkSurfaceKHR</a> handle"
+        }
+      ]
+    },
+    "VkHeadlessSurfaceCreateInfoEXT": {
+      "(VK_KHR_surface)+(VK_EXT_headless_surface)": [
+        {
+          "vuid": "VUID-VkHeadlessSurfaceCreateInfoEXT-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_HEADLESS_SURFACE_CREATE_INFO_EXT</code>"
+        },
+        {
+          "vuid": "VUID-VkHeadlessSurfaceCreateInfoEXT-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-VkHeadlessSurfaceCreateInfoEXT-flags-zerobitmask",
+          "text": " <code>flags</code> <strong class=\"purple\">must</strong> be <code>0</code>"
+        }
+      ]
+    },
+    "vkGetPhysicalDeviceSurfaceSupportKHR": {
+      "(VK_KHR_surface)": [
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceSurfaceSupportKHR-queueFamilyIndex-01269",
+          "text": " <code>queueFamilyIndex</code> <strong class=\"purple\">must</strong> be less than <code>pQueueFamilyPropertyCount</code> returned by <code>vkGetPhysicalDeviceQueueFamilyProperties</code> for the given <code>physicalDevice</code>"
+        },
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceSurfaceSupportKHR-physicalDevice-parameter",
+          "text": " <code>physicalDevice</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkPhysicalDevice\">VkPhysicalDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceSurfaceSupportKHR-surface-parameter",
+          "text": " <code>surface</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkSurfaceKHR\">VkSurfaceKHR</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceSurfaceSupportKHR-pSupported-parameter",
+          "text": " <code>pSupported</code> <strong class=\"purple\">must</strong> be a valid pointer to a <code>VkBool32</code> value"
+        },
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceSurfaceSupportKHR-commonparent",
+          "text": " Both of <code>physicalDevice</code>, and <code>surface</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from the same <a href=\"#VkInstance\">VkInstance</a>"
+        }
+      ]
+    },
+    "vkGetPhysicalDeviceWaylandPresentationSupportKHR": {
+      "(VK_KHR_surface)+(VK_KHR_wayland_surface)": [
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceWaylandPresentationSupportKHR-queueFamilyIndex-01306",
+          "text": " <code>queueFamilyIndex</code> <strong class=\"purple\">must</strong> be less than <code>pQueueFamilyPropertyCount</code> returned by <code>vkGetPhysicalDeviceQueueFamilyProperties</code> for the given <code>physicalDevice</code>"
+        },
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceWaylandPresentationSupportKHR-physicalDevice-parameter",
+          "text": " <code>physicalDevice</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkPhysicalDevice\">VkPhysicalDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceWaylandPresentationSupportKHR-display-parameter",
+          "text": " <code>display</code> <strong class=\"purple\">must</strong> be a valid pointer to a <code>wl_display</code> value"
+        }
+      ]
+    },
+    "vkGetPhysicalDeviceWin32PresentationSupportKHR": {
+      "(VK_KHR_surface)+(VK_KHR_win32_surface)": [
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceWin32PresentationSupportKHR-queueFamilyIndex-01309",
+          "text": " <code>queueFamilyIndex</code> <strong class=\"purple\">must</strong> be less than <code>pQueueFamilyPropertyCount</code> returned by <code>vkGetPhysicalDeviceQueueFamilyProperties</code> for the given <code>physicalDevice</code>"
+        },
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceWin32PresentationSupportKHR-physicalDevice-parameter",
+          "text": " <code>physicalDevice</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkPhysicalDevice\">VkPhysicalDevice</a> handle"
+        }
+      ]
+    },
+    "vkGetPhysicalDeviceXcbPresentationSupportKHR": {
+      "(VK_KHR_surface)+(VK_KHR_xcb_surface)": [
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceXcbPresentationSupportKHR-queueFamilyIndex-01312",
+          "text": " <code>queueFamilyIndex</code> <strong class=\"purple\">must</strong> be less than <code>pQueueFamilyPropertyCount</code> returned by <code>vkGetPhysicalDeviceQueueFamilyProperties</code> for the given <code>physicalDevice</code>"
+        },
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceXcbPresentationSupportKHR-physicalDevice-parameter",
+          "text": " <code>physicalDevice</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkPhysicalDevice\">VkPhysicalDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceXcbPresentationSupportKHR-connection-parameter",
+          "text": " <code>connection</code> <strong class=\"purple\">must</strong> be a valid pointer to an <code>xcb_connection_t</code> value"
+        }
+      ]
+    },
+    "vkGetPhysicalDeviceXlibPresentationSupportKHR": {
+      "(VK_KHR_surface)+(VK_KHR_xlib_surface)": [
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceXlibPresentationSupportKHR-queueFamilyIndex-01315",
+          "text": " <code>queueFamilyIndex</code> <strong class=\"purple\">must</strong> be less than <code>pQueueFamilyPropertyCount</code> returned by <code>vkGetPhysicalDeviceQueueFamilyProperties</code> for the given <code>physicalDevice</code>"
+        },
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceXlibPresentationSupportKHR-physicalDevice-parameter",
+          "text": " <code>physicalDevice</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkPhysicalDevice\">VkPhysicalDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceXlibPresentationSupportKHR-dpy-parameter",
+          "text": " <code>dpy</code> <strong class=\"purple\">must</strong> be a valid pointer to a <code>Display</code> value"
+        }
+      ]
+    },
+    "vkGetPhysicalDeviceSurfaceCapabilitiesKHR": {
+      "(VK_KHR_surface)": [
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceSurfaceCapabilitiesKHR-physicalDevice-parameter",
+          "text": " <code>physicalDevice</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkPhysicalDevice\">VkPhysicalDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceSurfaceCapabilitiesKHR-surface-parameter",
+          "text": " <code>surface</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkSurfaceKHR\">VkSurfaceKHR</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceSurfaceCapabilitiesKHR-pSurfaceCapabilities-parameter",
+          "text": " <code>pSurfaceCapabilities</code> <strong class=\"purple\">must</strong> be a valid pointer to a <a href=\"#VkSurfaceCapabilitiesKHR\">VkSurfaceCapabilitiesKHR</a> structure"
+        },
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceSurfaceCapabilitiesKHR-commonparent",
+          "text": " Both of <code>physicalDevice</code>, and <code>surface</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from the same <a href=\"#VkInstance\">VkInstance</a>"
+        }
+      ]
+    },
+    "vkGetPhysicalDeviceSurfaceCapabilities2KHR": {
+      "(VK_KHR_surface)+(VK_KHR_get_surface_capabilities2)+(VK_EXT_full_screen_exclusive+VK_KHR_win32_surface)": [
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceSurfaceCapabilities2KHR-pNext-02671",
+          "text": " If a <a href=\"#VkSurfaceCapabilitiesFullScreenExclusiveEXT\">VkSurfaceCapabilitiesFullScreenExclusiveEXT</a> structure is included in the <code>pNext</code> chain of <code>pSurfaceCapabilities</code>, a <a href=\"#VkSurfaceFullScreenExclusiveWin32InfoEXT\">VkSurfaceFullScreenExclusiveWin32InfoEXT</a> structure <strong class=\"purple\">must</strong> be included in the <code>pNext</code> chain of <code>pSurfaceInfo</code>."
+        }
+      ],
+      "(VK_KHR_surface)+(VK_KHR_get_surface_capabilities2)": [
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceSurfaceCapabilities2KHR-physicalDevice-parameter",
+          "text": " <code>physicalDevice</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkPhysicalDevice\">VkPhysicalDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceSurfaceCapabilities2KHR-pSurfaceInfo-parameter",
+          "text": " <code>pSurfaceInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkPhysicalDeviceSurfaceInfo2KHR\">VkPhysicalDeviceSurfaceInfo2KHR</a> structure"
+        },
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceSurfaceCapabilities2KHR-pSurfaceCapabilities-parameter",
+          "text": " <code>pSurfaceCapabilities</code> <strong class=\"purple\">must</strong> be a valid pointer to a <a href=\"#VkSurfaceCapabilities2KHR\">VkSurfaceCapabilities2KHR</a> structure"
+        }
+      ]
+    },
+    "VkPhysicalDeviceSurfaceInfo2KHR": {
+      "(VK_KHR_surface)+(VK_KHR_get_surface_capabilities2)+(VK_KHR_win32_surface+VK_EXT_full_screen_exclusive)": [
+        {
+          "vuid": "VUID-VkPhysicalDeviceSurfaceInfo2KHR-pNext-02672",
+          "text": " If the <code>pNext</code> chain includes a <a href=\"#VkSurfaceFullScreenExclusiveInfoEXT\">VkSurfaceFullScreenExclusiveInfoEXT</a> structure with its <code>fullScreenExclusive</code> member set to <code>VK_FULL_SCREEN_EXCLUSIVE_APPLICATION_CONTROLLED_EXT</code>, and <code>surface</code> was created using <a href=\"#vkCreateWin32SurfaceKHR\">vkCreateWin32SurfaceKHR</a>, a <a href=\"#VkSurfaceFullScreenExclusiveWin32InfoEXT\">VkSurfaceFullScreenExclusiveWin32InfoEXT</a> structure <strong class=\"purple\">must</strong> be included in the <code>pNext</code> chain"
+        }
+      ],
+      "(VK_KHR_surface)+(VK_KHR_get_surface_capabilities2)": [
+        {
+          "vuid": "VUID-VkPhysicalDeviceSurfaceInfo2KHR-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SURFACE_INFO_2_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkPhysicalDeviceSurfaceInfo2KHR-pNext-pNext",
+          "text": " Each <code>pNext</code> member of any structure (including this one) in the <code>pNext</code> chain <strong class=\"purple\">must</strong> be either <code>NULL</code> or a pointer to a valid instance of <a href=\"#VkSurfaceFullScreenExclusiveInfoEXT\">VkSurfaceFullScreenExclusiveInfoEXT</a> or <a href=\"#VkSurfaceFullScreenExclusiveWin32InfoEXT\">VkSurfaceFullScreenExclusiveWin32InfoEXT</a>"
+        },
+        {
+          "vuid": "VUID-VkPhysicalDeviceSurfaceInfo2KHR-sType-unique",
+          "text": " Each <code>sType</code> member in the <code>pNext</code> chain <strong class=\"purple\">must</strong> be unique"
+        },
+        {
+          "vuid": "VUID-VkPhysicalDeviceSurfaceInfo2KHR-surface-parameter",
+          "text": " <code>surface</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkSurfaceKHR\">VkSurfaceKHR</a> handle"
+        }
+      ]
+    },
+    "VkSurfaceFullScreenExclusiveInfoEXT": {
+      "(VK_KHR_surface)+(VK_KHR_get_surface_capabilities2)+(VK_EXT_full_screen_exclusive)": [
+        {
+          "vuid": "VUID-VkSurfaceFullScreenExclusiveInfoEXT-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_INFO_EXT</code>"
+        },
+        {
+          "vuid": "VUID-VkSurfaceFullScreenExclusiveInfoEXT-fullScreenExclusive-parameter",
+          "text": " <code>fullScreenExclusive</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkFullScreenExclusiveEXT\">VkFullScreenExclusiveEXT</a> value"
+        }
+      ]
+    },
+    "VkSurfaceFullScreenExclusiveWin32InfoEXT": {
+      "(VK_KHR_surface)+(VK_KHR_get_surface_capabilities2)+(VK_EXT_full_screen_exclusive)+(VK_KHR_win32_surface)": [
+        {
+          "vuid": "VUID-VkSurfaceFullScreenExclusiveWin32InfoEXT-hmonitor-02673",
+          "text": " <code>hmonitor</code> <strong class=\"purple\">must</strong> be a valid <code>HMONITOR</code>"
+        },
+        {
+          "vuid": "VUID-VkSurfaceFullScreenExclusiveWin32InfoEXT-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_WIN32_INFO_EXT</code>"
+        }
+      ]
+    },
+    "VkSurfaceCapabilities2KHR": {
+      "(VK_KHR_surface)+(VK_KHR_get_surface_capabilities2)": [
+        {
+          "vuid": "VUID-VkSurfaceCapabilities2KHR-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkSurfaceCapabilities2KHR-pNext-pNext",
+          "text": " Each <code>pNext</code> member of any structure (including this one) in the <code>pNext</code> chain <strong class=\"purple\">must</strong> be either <code>NULL</code> or a pointer to a valid instance of <a href=\"#VkDisplayNativeHdrSurfaceCapabilitiesAMD\">VkDisplayNativeHdrSurfaceCapabilitiesAMD</a>, <a href=\"#VkSharedPresentSurfaceCapabilitiesKHR\">VkSharedPresentSurfaceCapabilitiesKHR</a>, <a href=\"#VkSurfaceCapabilitiesFullScreenExclusiveEXT\">VkSurfaceCapabilitiesFullScreenExclusiveEXT</a>, or <a href=\"#VkSurfaceProtectedCapabilitiesKHR\">VkSurfaceProtectedCapabilitiesKHR</a>"
+        },
+        {
+          "vuid": "VUID-VkSurfaceCapabilities2KHR-sType-unique",
+          "text": " Each <code>sType</code> member in the <code>pNext</code> chain <strong class=\"purple\">must</strong> be unique"
+        }
+      ]
+    },
+    "VkSurfaceProtectedCapabilitiesKHR": {
+      "(VK_KHR_surface)+(VK_KHR_get_surface_capabilities2)+(VK_KHR_surface_protected_capabilities)": [
+        {
+          "vuid": "VUID-VkSurfaceProtectedCapabilitiesKHR-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_SURFACE_PROTECTED_CAPABILITIES_KHR</code>"
+        }
+      ]
+    },
+    "VkSharedPresentSurfaceCapabilitiesKHR": {
+      "(VK_KHR_surface)+(VK_KHR_get_surface_capabilities2)+(VK_KHR_shared_presentable_image)": [
+        {
+          "vuid": "VUID-VkSharedPresentSurfaceCapabilitiesKHR-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_SHARED_PRESENT_SURFACE_CAPABILITIES_KHR</code>"
+        }
+      ]
+    },
+    "VkDisplayNativeHdrSurfaceCapabilitiesAMD": {
+      "(VK_KHR_surface)+(VK_KHR_get_surface_capabilities2)+(VK_AMD_display_native_hdr)": [
+        {
+          "vuid": "VUID-VkDisplayNativeHdrSurfaceCapabilitiesAMD-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_DISPLAY_NATIVE_HDR_SURFACE_CAPABILITIES_AMD</code>"
+        }
+      ]
+    },
+    "VkSurfaceCapabilitiesFullScreenExclusiveEXT": {
+      "(VK_KHR_surface)+(VK_KHR_get_surface_capabilities2)+(VK_EXT_full_screen_exclusive)": [
+        {
+          "vuid": "VUID-VkSurfaceCapabilitiesFullScreenExclusiveEXT-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_FULL_SCREEN_EXCLUSIVE_EXT</code>"
+        }
+      ]
+    },
+    "vkGetPhysicalDeviceSurfaceCapabilities2EXT": {
+      "(VK_KHR_surface)+(VK_EXT_display_surface_counter)": [
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceSurfaceCapabilities2EXT-physicalDevice-parameter",
+          "text": " <code>physicalDevice</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkPhysicalDevice\">VkPhysicalDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceSurfaceCapabilities2EXT-surface-parameter",
+          "text": " <code>surface</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkSurfaceKHR\">VkSurfaceKHR</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceSurfaceCapabilities2EXT-pSurfaceCapabilities-parameter",
+          "text": " <code>pSurfaceCapabilities</code> <strong class=\"purple\">must</strong> be a valid pointer to a <a href=\"#VkSurfaceCapabilities2EXT\">VkSurfaceCapabilities2EXT</a> structure"
+        },
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceSurfaceCapabilities2EXT-commonparent",
+          "text": " Both of <code>physicalDevice</code>, and <code>surface</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from the same <a href=\"#VkInstance\">VkInstance</a>"
+        }
+      ]
+    },
+    "VkSurfaceCapabilities2EXT": {
+      "(VK_KHR_surface)+(VK_EXT_display_surface_counter)": [
+        {
+          "vuid": "VUID-VkSurfaceCapabilities2EXT-supportedSurfaceCounters-01246",
+          "text": " <code>supportedSurfaceCounters</code> <strong class=\"purple\">must</strong> not include <code>VK_SURFACE_COUNTER_VBLANK_EXT</code> unless the surface queried is a <a href=\"#wsi-display-surfaces\">display surface</a>."
+        },
+        {
+          "vuid": "VUID-VkSurfaceCapabilities2EXT-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_EXT</code>"
+        },
+        {
+          "vuid": "VUID-VkSurfaceCapabilities2EXT-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        }
+      ]
+    },
+    "vkGetPhysicalDeviceSurfaceFormatsKHR": {
+      "(VK_KHR_surface)": [
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceSurfaceFormatsKHR-surface-02739",
+          "text": " <code>surface</code> must be supported by <code>physicalDevice</code>, as reported by <a href=\"#vkGetPhysicalDeviceSurfaceSupportKHR\">vkGetPhysicalDeviceSurfaceSupportKHR</a> or an equivalent platform-specific mechanism."
+        },
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceSurfaceFormatsKHR-physicalDevice-parameter",
+          "text": " <code>physicalDevice</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkPhysicalDevice\">VkPhysicalDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceSurfaceFormatsKHR-surface-parameter",
+          "text": " <code>surface</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkSurfaceKHR\">VkSurfaceKHR</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceSurfaceFormatsKHR-pSurfaceFormatCount-parameter",
+          "text": " <code>pSurfaceFormatCount</code> <strong class=\"purple\">must</strong> be a valid pointer to a <code>uint32_t</code> value"
+        },
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceSurfaceFormatsKHR-pSurfaceFormats-parameter",
+          "text": " If the value referenced by <code>pSurfaceFormatCount</code> is not <code>0</code>, and <code>pSurfaceFormats</code> is not <code>NULL</code>, <code>pSurfaceFormats</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>pSurfaceFormatCount</code> <a href=\"#VkSurfaceFormatKHR\">VkSurfaceFormatKHR</a> structures"
+        },
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceSurfaceFormatsKHR-commonparent",
+          "text": " Both of <code>physicalDevice</code>, and <code>surface</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from the same <a href=\"#VkInstance\">VkInstance</a>"
+        }
+      ]
+    },
+    "vkGetPhysicalDeviceSurfaceFormats2KHR": {
+      "(VK_KHR_surface)+(VK_KHR_get_surface_capabilities2)": [
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceSurfaceFormats2KHR-pSurfaceInfo-02740",
+          "text": " <code>pSurfaceInfo</code>-&gt;surface must be supported by <code>physicalDevice</code>, as reported by <a href=\"#vkGetPhysicalDeviceSurfaceSupportKHR\">vkGetPhysicalDeviceSurfaceSupportKHR</a> or an equivalent platform-specific mechanism."
+        },
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceSurfaceFormats2KHR-physicalDevice-parameter",
+          "text": " <code>physicalDevice</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkPhysicalDevice\">VkPhysicalDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceSurfaceFormats2KHR-pSurfaceInfo-parameter",
+          "text": " <code>pSurfaceInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkPhysicalDeviceSurfaceInfo2KHR\">VkPhysicalDeviceSurfaceInfo2KHR</a> structure"
+        },
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceSurfaceFormats2KHR-pSurfaceFormatCount-parameter",
+          "text": " <code>pSurfaceFormatCount</code> <strong class=\"purple\">must</strong> be a valid pointer to a <code>uint32_t</code> value"
+        },
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceSurfaceFormats2KHR-pSurfaceFormats-parameter",
+          "text": " If the value referenced by <code>pSurfaceFormatCount</code> is not <code>0</code>, and <code>pSurfaceFormats</code> is not <code>NULL</code>, <code>pSurfaceFormats</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>pSurfaceFormatCount</code> <a href=\"#VkSurfaceFormat2KHR\">VkSurfaceFormat2KHR</a> structures"
+        }
+      ]
+    },
+    "VkSurfaceFormat2KHR": {
+      "(VK_KHR_surface)+(VK_KHR_get_surface_capabilities2)": [
+        {
+          "vuid": "VUID-VkSurfaceFormat2KHR-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_SURFACE_FORMAT_2_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkSurfaceFormat2KHR-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        }
+      ]
+    },
+    "vkGetPhysicalDeviceSurfacePresentModesKHR": {
+      "(VK_KHR_surface)": [
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceSurfacePresentModesKHR-physicalDevice-parameter",
+          "text": " <code>physicalDevice</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkPhysicalDevice\">VkPhysicalDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceSurfacePresentModesKHR-surface-parameter",
+          "text": " <code>surface</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkSurfaceKHR\">VkSurfaceKHR</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceSurfacePresentModesKHR-pPresentModeCount-parameter",
+          "text": " <code>pPresentModeCount</code> <strong class=\"purple\">must</strong> be a valid pointer to a <code>uint32_t</code> value"
+        },
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceSurfacePresentModesKHR-pPresentModes-parameter",
+          "text": " If the value referenced by <code>pPresentModeCount</code> is not <code>0</code>, and <code>pPresentModes</code> is not <code>NULL</code>, <code>pPresentModes</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>pPresentModeCount</code> <a href=\"#VkPresentModeKHR\">VkPresentModeKHR</a> values"
+        },
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceSurfacePresentModesKHR-commonparent",
+          "text": " Both of <code>physicalDevice</code>, and <code>surface</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from the same <a href=\"#VkInstance\">VkInstance</a>"
+        }
+      ]
+    },
+    "vkGetPhysicalDeviceSurfacePresentModes2EXT": {
+      "(VK_KHR_surface)+(VK_EXT_full_screen_exclusive)": [
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceSurfacePresentModes2EXT-physicalDevice-parameter",
+          "text": " <code>physicalDevice</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkPhysicalDevice\">VkPhysicalDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceSurfacePresentModes2EXT-pSurfaceInfo-parameter",
+          "text": " <code>pSurfaceInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkPhysicalDeviceSurfaceInfo2KHR\">VkPhysicalDeviceSurfaceInfo2KHR</a> structure"
+        },
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceSurfacePresentModes2EXT-pPresentModeCount-parameter",
+          "text": " <code>pPresentModeCount</code> <strong class=\"purple\">must</strong> be a valid pointer to a <code>uint32_t</code> value"
+        },
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceSurfacePresentModes2EXT-pPresentModes-parameter",
+          "text": " If the value referenced by <code>pPresentModeCount</code> is not <code>0</code>, and <code>pPresentModes</code> is not <code>NULL</code>, <code>pPresentModes</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>pPresentModeCount</code> <a href=\"#VkPresentModeKHR\">VkPresentModeKHR</a> values"
+        }
+      ]
+    },
+    "vkAcquireFullScreenExclusiveModeEXT": {
+      "(VK_KHR_surface)+(VK_EXT_full_screen_exclusive)": [
+        {
+          "vuid": "VUID-vkAcquireFullScreenExclusiveModeEXT-swapchain-02674",
+          "text": " <code>swapchain</code> <strong class=\"purple\">must</strong> not be in the retired state"
+        },
+        {
+          "vuid": "VUID-vkAcquireFullScreenExclusiveModeEXT-swapchain-02675",
+          "text": " <code>swapchain</code> <strong class=\"purple\">must</strong> be a swapchain created with a <a href=\"#VkSurfaceFullScreenExclusiveInfoEXT\">VkSurfaceFullScreenExclusiveInfoEXT</a> structure, with <code>fullScreenExclusive</code> set to <code>VK_FULL_SCREEN_EXCLUSIVE_APPLICATION_CONTROLLED_EXT</code>"
+        },
+        {
+          "vuid": "VUID-vkAcquireFullScreenExclusiveModeEXT-swapchain-02676",
+          "text": " <code>swapchain</code> <strong class=\"purple\">must</strong> not currently have exclusive full-screen access"
+        },
+        {
+          "vuid": "VUID-vkAcquireFullScreenExclusiveModeEXT-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkAcquireFullScreenExclusiveModeEXT-swapchain-parameter",
+          "text": " <code>swapchain</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkSwapchainKHR\">VkSwapchainKHR</a> handle"
+        },
+        {
+          "vuid": "VUID-vkAcquireFullScreenExclusiveModeEXT-commonparent",
+          "text": " Both of <code>device</code>, and <code>swapchain</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from the same <a href=\"#VkInstance\">VkInstance</a>"
+        }
+      ]
+    },
+    "vkReleaseFullScreenExclusiveModeEXT": {
+      "(VK_KHR_surface)+(VK_EXT_full_screen_exclusive)": [
+        {
+          "vuid": "VUID-vkReleaseFullScreenExclusiveModeEXT-swapchain-02677",
+          "text": " <code>swapchain</code> <strong class=\"purple\">must</strong> not be in the retired state"
+        },
+        {
+          "vuid": "VUID-vkReleaseFullScreenExclusiveModeEXT-swapchain-02678",
+          "text": " <code>swapchain</code> <strong class=\"purple\">must</strong> be a swapchain created with a <a href=\"#VkSurfaceFullScreenExclusiveInfoEXT\">VkSurfaceFullScreenExclusiveInfoEXT</a> structure, with <code>fullScreenExclusive</code> set to <code>VK_FULL_SCREEN_EXCLUSIVE_APPLICATION_CONTROLLED_EXT</code>"
+        }
+      ]
+    },
+    "vkGetDeviceGroupPresentCapabilitiesKHR": {
+      "(VK_KHR_surface)+(VK_KHR_swapchain)+(VK_VERSION_1_1,VK_KHR_device_group)": [
+        {
+          "vuid": "VUID-vkGetDeviceGroupPresentCapabilitiesKHR-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetDeviceGroupPresentCapabilitiesKHR-pDeviceGroupPresentCapabilities-parameter",
+          "text": " <code>pDeviceGroupPresentCapabilities</code> <strong class=\"purple\">must</strong> be a valid pointer to a <a href=\"#VkDeviceGroupPresentCapabilitiesKHR\">VkDeviceGroupPresentCapabilitiesKHR</a> structure"
+        }
+      ]
+    },
+    "VkDeviceGroupPresentCapabilitiesKHR": {
+      "(VK_KHR_surface)+(VK_KHR_swapchain)+(VK_VERSION_1_1,VK_KHR_device_group)": [
+        {
+          "vuid": "VUID-VkDeviceGroupPresentCapabilitiesKHR-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_CAPABILITIES_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkDeviceGroupPresentCapabilitiesKHR-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        }
+      ]
+    },
+    "vkGetDeviceGroupSurfacePresentModesKHR": {
+      "(VK_KHR_surface)+(VK_KHR_swapchain)+(VK_VERSION_1_1,VK_KHR_device_group)": [
+        {
+          "vuid": "VUID-vkGetDeviceGroupSurfacePresentModesKHR-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetDeviceGroupSurfacePresentModesKHR-surface-parameter",
+          "text": " <code>surface</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkSurfaceKHR\">VkSurfaceKHR</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetDeviceGroupSurfacePresentModesKHR-pModes-parameter",
+          "text": " <code>pModes</code> <strong class=\"purple\">must</strong> be a valid pointer to a <a href=\"#VkDeviceGroupPresentModeFlagsKHR\">VkDeviceGroupPresentModeFlagsKHR</a> value"
+        },
+        {
+          "vuid": "VUID-vkGetDeviceGroupSurfacePresentModesKHR-commonparent",
+          "text": " Both of <code>device</code>, and <code>surface</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from the same <a href=\"#VkInstance\">VkInstance</a>"
+        }
+      ]
+    },
+    "vkGetDeviceGroupSurfacePresentModes2EXT": {
+      "(VK_KHR_surface)+(VK_KHR_swapchain)+(VK_VERSION_1_1,VK_KHR_device_group)+(VK_EXT_full_screen_exclusive)": [
+        {
+          "vuid": "VUID-vkGetDeviceGroupSurfacePresentModes2EXT-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetDeviceGroupSurfacePresentModes2EXT-pSurfaceInfo-parameter",
+          "text": " <code>pSurfaceInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkPhysicalDeviceSurfaceInfo2KHR\">VkPhysicalDeviceSurfaceInfo2KHR</a> structure"
+        },
+        {
+          "vuid": "VUID-vkGetDeviceGroupSurfacePresentModes2EXT-pModes-parameter",
+          "text": " <code>pModes</code> <strong class=\"purple\">must</strong> be a valid pointer to a <a href=\"#VkDeviceGroupPresentModeFlagsKHR\">VkDeviceGroupPresentModeFlagsKHR</a> value"
+        }
+      ]
+    },
+    "vkGetPhysicalDevicePresentRectanglesKHR": {
+      "(VK_KHR_surface)+(VK_KHR_swapchain)+(VK_VERSION_1_1,VK_KHR_device_group)": [
+        {
+          "vuid": "VUID-vkGetPhysicalDevicePresentRectanglesKHR-physicalDevice-parameter",
+          "text": " <code>physicalDevice</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkPhysicalDevice\">VkPhysicalDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetPhysicalDevicePresentRectanglesKHR-surface-parameter",
+          "text": " <code>surface</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkSurfaceKHR\">VkSurfaceKHR</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetPhysicalDevicePresentRectanglesKHR-pRectCount-parameter",
+          "text": " <code>pRectCount</code> <strong class=\"purple\">must</strong> be a valid pointer to a <code>uint32_t</code> value"
+        },
+        {
+          "vuid": "VUID-vkGetPhysicalDevicePresentRectanglesKHR-pRects-parameter",
+          "text": " If the value referenced by <code>pRectCount</code> is not <code>0</code>, and <code>pRects</code> is not <code>NULL</code>, <code>pRects</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>pRectCount</code> <a href=\"#VkRect2D\">VkRect2D</a> structures"
+        },
+        {
+          "vuid": "VUID-vkGetPhysicalDevicePresentRectanglesKHR-commonparent",
+          "text": " Both of <code>physicalDevice</code>, and <code>surface</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from the same <a href=\"#VkInstance\">VkInstance</a>"
+        }
+      ]
+    },
+    "vkGetRefreshCycleDurationGOOGLE": {
+      "(VK_KHR_surface)+(VK_KHR_swapchain)+(VK_GOOGLE_display_timing)": [
+        {
+          "vuid": "VUID-vkGetRefreshCycleDurationGOOGLE-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetRefreshCycleDurationGOOGLE-swapchain-parameter",
+          "text": " <code>swapchain</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkSwapchainKHR\">VkSwapchainKHR</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetRefreshCycleDurationGOOGLE-pDisplayTimingProperties-parameter",
+          "text": " <code>pDisplayTimingProperties</code> <strong class=\"purple\">must</strong> be a valid pointer to a <a href=\"#VkRefreshCycleDurationGOOGLE\">VkRefreshCycleDurationGOOGLE</a> structure"
+        },
+        {
+          "vuid": "VUID-vkGetRefreshCycleDurationGOOGLE-commonparent",
+          "text": " Both of <code>device</code>, and <code>swapchain</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from the same <a href=\"#VkInstance\">VkInstance</a>"
+        }
+      ]
+    },
+    "vkGetPastPresentationTimingGOOGLE": {
+      "(VK_KHR_surface)+(VK_KHR_swapchain)+(VK_GOOGLE_display_timing)": [
+        {
+          "vuid": "VUID-vkGetPastPresentationTimingGOOGLE-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetPastPresentationTimingGOOGLE-swapchain-parameter",
+          "text": " <code>swapchain</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkSwapchainKHR\">VkSwapchainKHR</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetPastPresentationTimingGOOGLE-pPresentationTimingCount-parameter",
+          "text": " <code>pPresentationTimingCount</code> <strong class=\"purple\">must</strong> be a valid pointer to a <code>uint32_t</code> value"
+        },
+        {
+          "vuid": "VUID-vkGetPastPresentationTimingGOOGLE-pPresentationTimings-parameter",
+          "text": " If the value referenced by <code>pPresentationTimingCount</code> is not <code>0</code>, and <code>pPresentationTimings</code> is not <code>NULL</code>, <code>pPresentationTimings</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>pPresentationTimingCount</code> <a href=\"#VkPastPresentationTimingGOOGLE\">VkPastPresentationTimingGOOGLE</a> structures"
+        },
+        {
+          "vuid": "VUID-vkGetPastPresentationTimingGOOGLE-commonparent",
+          "text": " Both of <code>device</code>, and <code>swapchain</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from the same <a href=\"#VkInstance\">VkInstance</a>"
+        }
+      ]
+    },
+    "vkGetSwapchainStatusKHR": {
+      "(VK_KHR_surface)+(VK_KHR_swapchain)+(VK_KHR_shared_presentable_image)": [
+        {
+          "vuid": "VUID-vkGetSwapchainStatusKHR-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetSwapchainStatusKHR-swapchain-parameter",
+          "text": " <code>swapchain</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkSwapchainKHR\">VkSwapchainKHR</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetSwapchainStatusKHR-commonparent",
+          "text": " Both of <code>device</code>, and <code>swapchain</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from the same <a href=\"#VkInstance\">VkInstance</a>"
+        }
+      ]
+    },
+    "vkCreateSwapchainKHR": {
+      "(VK_KHR_surface)+(VK_KHR_swapchain)": [
+        {
+          "vuid": "VUID-vkCreateSwapchainKHR-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCreateSwapchainKHR-pCreateInfo-parameter",
+          "text": " <code>pCreateInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkSwapchainCreateInfoKHR\">VkSwapchainCreateInfoKHR</a> structure"
+        },
+        {
+          "vuid": "VUID-vkCreateSwapchainKHR-pAllocator-parameter",
+          "text": " If <code>pAllocator</code> is not <code>NULL</code>, <code>pAllocator</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkAllocationCallbacks\">VkAllocationCallbacks</a> structure"
+        },
+        {
+          "vuid": "VUID-vkCreateSwapchainKHR-pSwapchain-parameter",
+          "text": " <code>pSwapchain</code> <strong class=\"purple\">must</strong> be a valid pointer to a <a href=\"#VkSwapchainKHR\">VkSwapchainKHR</a> handle"
+        }
+      ]
+    },
+    "VkSwapchainCreateInfoKHR": {
+      "(VK_KHR_surface)+(VK_KHR_swapchain)": [
+        {
+          "vuid": "VUID-VkSwapchainCreateInfoKHR-surface-01270",
+          "text": " <code>surface</code> <strong class=\"purple\">must</strong> be a surface that is supported by the device as determined using <a href=\"#vkGetPhysicalDeviceSurfaceSupportKHR\">vkGetPhysicalDeviceSurfaceSupportKHR</a>"
+        },
+        {
+          "vuid": "VUID-VkSwapchainCreateInfoKHR-minImageCount-01271",
+          "text": " <code>minImageCount</code> <strong class=\"purple\">must</strong> be greater than or equal to the value returned in the <code>minImageCount</code> member of the <code>VkSurfaceCapabilitiesKHR</code> structure returned by <a href=\"#vkGetPhysicalDeviceSurfaceCapabilitiesKHR\">vkGetPhysicalDeviceSurfaceCapabilitiesKHR</a> for the surface"
+        },
+        {
+          "vuid": "VUID-VkSwapchainCreateInfoKHR-minImageCount-01272",
+          "text": " <code>minImageCount</code> <strong class=\"purple\">must</strong> be less than or equal to the value returned in the <code>maxImageCount</code> member of the <code>VkSurfaceCapabilitiesKHR</code> structure returned by <code>vkGetPhysicalDeviceSurfaceCapabilitiesKHR</code> for the surface if the returned <code>maxImageCount</code> is not zero"
+        },
+        {
+          "vuid": "VUID-VkSwapchainCreateInfoKHR-imageFormat-01273",
+          "text": " <code>imageFormat</code> and <code>imageColorSpace</code> <strong class=\"purple\">must</strong> match the <code>format</code> and <code>colorSpace</code> members, respectively, of one of the <code>VkSurfaceFormatKHR</code> structures returned by <code>vkGetPhysicalDeviceSurfaceFormatsKHR</code> for the surface"
+        },
+        {
+          "vuid": "VUID-VkSwapchainCreateInfoKHR-imageExtent-01274",
+          "text": " <code>imageExtent</code> <strong class=\"purple\">must</strong> be between <code>minImageExtent</code> and <code>maxImageExtent</code>, inclusive, where <code>minImageExtent</code> and <code>maxImageExtent</code> are members of the <code>VkSurfaceCapabilitiesKHR</code> structure returned by <code>vkGetPhysicalDeviceSurfaceCapabilitiesKHR</code> for the surface"
+        },
+        {
+          "vuid": "VUID-VkSwapchainCreateInfoKHR-imageExtent-01689",
+          "text": " <code>imageExtent</code> members <code>width</code> and <code>height</code> <strong class=\"purple\">must</strong> both be non-zero"
+        },
+        {
+          "vuid": "VUID-VkSwapchainCreateInfoKHR-imageArrayLayers-01275",
+          "text": " <code>imageArrayLayers</code> <strong class=\"purple\">must</strong> be greater than <code>0</code> and less than or equal to the <code>maxImageArrayLayers</code> member of the <code>VkSurfaceCapabilitiesKHR</code> structure returned by <code>vkGetPhysicalDeviceSurfaceCapabilitiesKHR</code> for the surface"
+        },
+        {
+          "vuid": "VUID-VkSwapchainCreateInfoKHR-imageSharingMode-01277",
+          "text": " If <code>imageSharingMode</code> is <code>VK_SHARING_MODE_CONCURRENT</code>, <code>pQueueFamilyIndices</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>queueFamilyIndexCount</code> <code>uint32_t</code> values"
+        },
+        {
+          "vuid": "VUID-VkSwapchainCreateInfoKHR-imageSharingMode-01278",
+          "text": " If <code>imageSharingMode</code> is <code>VK_SHARING_MODE_CONCURRENT</code>, <code>queueFamilyIndexCount</code> <strong class=\"purple\">must</strong> be greater than <code>1</code>"
+        },
+        {
+          "vuid": "VUID-VkSwapchainCreateInfoKHR-preTransform-01279",
+          "text": " <code>preTransform</code> <strong class=\"purple\">must</strong> be one of the bits present in the <code>supportedTransforms</code> member of the <code>VkSurfaceCapabilitiesKHR</code> structure returned by <code>vkGetPhysicalDeviceSurfaceCapabilitiesKHR</code> for the surface"
+        },
+        {
+          "vuid": "VUID-VkSwapchainCreateInfoKHR-compositeAlpha-01280",
+          "text": " <code>compositeAlpha</code> <strong class=\"purple\">must</strong> be one of the bits present in the <code>supportedCompositeAlpha</code> member of the <code>VkSurfaceCapabilitiesKHR</code> structure returned by <code>vkGetPhysicalDeviceSurfaceCapabilitiesKHR</code> for the surface"
+        },
+        {
+          "vuid": "VUID-VkSwapchainCreateInfoKHR-presentMode-01281",
+          "text": " <code>presentMode</code> <strong class=\"purple\">must</strong> be one of the <a href=\"#VkPresentModeKHR\">VkPresentModeKHR</a> values returned by <code>vkGetPhysicalDeviceSurfacePresentModesKHR</code> for the surface"
+        },
+        {
+          "vuid": "VUID-VkSwapchainCreateInfoKHR-oldSwapchain-01933",
+          "text": " If <code>oldSwapchain</code> is not <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>, <code>oldSwapchain</code> <strong class=\"purple\">must</strong> be a non-retired swapchain associated with native window referred to by <code>surface</code>"
+        },
+        {
+          "vuid": "VUID-VkSwapchainCreateInfoKHR-imageFormat-01778",
+          "text": " The <a href=\"#swapchain-wsi-image-create-info\">implied image creation parameters</a> of the swapchain <strong class=\"purple\">must</strong> be supported as reported by <a href=\"#vkGetPhysicalDeviceImageFormatProperties\">vkGetPhysicalDeviceImageFormatProperties</a>"
+        },
+        {
+          "vuid": "VUID-VkSwapchainCreateInfoKHR-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkSwapchainCreateInfoKHR-pNext-pNext",
+          "text": " Each <code>pNext</code> member of any structure (including this one) in the <code>pNext</code> chain <strong class=\"purple\">must</strong> be either <code>NULL</code> or a pointer to a valid instance of <a href=\"#VkDeviceGroupSwapchainCreateInfoKHR\">VkDeviceGroupSwapchainCreateInfoKHR</a>, <a href=\"#VkImageFormatListCreateInfoKHR\">VkImageFormatListCreateInfoKHR</a>, <a href=\"#VkSurfaceFullScreenExclusiveInfoEXT\">VkSurfaceFullScreenExclusiveInfoEXT</a>, <a href=\"#VkSurfaceFullScreenExclusiveWin32InfoEXT\">VkSurfaceFullScreenExclusiveWin32InfoEXT</a>, <a href=\"#VkSwapchainCounterCreateInfoEXT\">VkSwapchainCounterCreateInfoEXT</a>, or <a href=\"#VkSwapchainDisplayNativeHdrCreateInfoAMD\">VkSwapchainDisplayNativeHdrCreateInfoAMD</a>"
+        },
+        {
+          "vuid": "VUID-VkSwapchainCreateInfoKHR-sType-unique",
+          "text": " Each <code>sType</code> member in the <code>pNext</code> chain <strong class=\"purple\">must</strong> be unique"
+        },
+        {
+          "vuid": "VUID-VkSwapchainCreateInfoKHR-flags-parameter",
+          "text": " <code>flags</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkSwapchainCreateFlagBitsKHR\">VkSwapchainCreateFlagBitsKHR</a> values"
+        },
+        {
+          "vuid": "VUID-VkSwapchainCreateInfoKHR-surface-parameter",
+          "text": " <code>surface</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkSurfaceKHR\">VkSurfaceKHR</a> handle"
+        },
+        {
+          "vuid": "VUID-VkSwapchainCreateInfoKHR-imageFormat-parameter",
+          "text": " <code>imageFormat</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkFormat\">VkFormat</a> value"
+        },
+        {
+          "vuid": "VUID-VkSwapchainCreateInfoKHR-imageColorSpace-parameter",
+          "text": " <code>imageColorSpace</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkColorSpaceKHR\">VkColorSpaceKHR</a> value"
+        },
+        {
+          "vuid": "VUID-VkSwapchainCreateInfoKHR-imageUsage-parameter",
+          "text": " <code>imageUsage</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkImageUsageFlagBits\">VkImageUsageFlagBits</a> values"
+        },
+        {
+          "vuid": "VUID-VkSwapchainCreateInfoKHR-imageUsage-requiredbitmask",
+          "text": " <code>imageUsage</code> <strong class=\"purple\">must</strong> not be <code>0</code>"
+        },
+        {
+          "vuid": "VUID-VkSwapchainCreateInfoKHR-imageSharingMode-parameter",
+          "text": " <code>imageSharingMode</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkSharingMode\">VkSharingMode</a> value"
+        },
+        {
+          "vuid": "VUID-VkSwapchainCreateInfoKHR-preTransform-parameter",
+          "text": " <code>preTransform</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkSurfaceTransformFlagBitsKHR\">VkSurfaceTransformFlagBitsKHR</a> value"
+        },
+        {
+          "vuid": "VUID-VkSwapchainCreateInfoKHR-compositeAlpha-parameter",
+          "text": " <code>compositeAlpha</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkCompositeAlphaFlagBitsKHR\">VkCompositeAlphaFlagBitsKHR</a> value"
+        },
+        {
+          "vuid": "VUID-VkSwapchainCreateInfoKHR-presentMode-parameter",
+          "text": " <code>presentMode</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkPresentModeKHR\">VkPresentModeKHR</a> value"
+        },
+        {
+          "vuid": "VUID-VkSwapchainCreateInfoKHR-oldSwapchain-parameter",
+          "text": " If <code>oldSwapchain</code> is not <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>, <code>oldSwapchain</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkSwapchainKHR\">VkSwapchainKHR</a> handle"
+        },
+        {
+          "vuid": "VUID-VkSwapchainCreateInfoKHR-oldSwapchain-parent",
+          "text": " If <code>oldSwapchain</code> is a valid handle, it <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from <code>surface</code>"
+        },
+        {
+          "vuid": "VUID-VkSwapchainCreateInfoKHR-commonparent",
+          "text": " Both of <code>oldSwapchain</code>, and <code>surface</code> that are valid handles of non-ignored parameters <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from the same <a href=\"#VkInstance\">VkInstance</a>"
+        }
+      ],
+      "(VK_KHR_surface)+(VK_KHR_swapchain)+(VK_KHR_shared_presentable_image)": [
+        {
+          "vuid": "VUID-VkSwapchainCreateInfoKHR-minImageCount-01383",
+          "text": " <code>minImageCount</code> <strong class=\"purple\">must</strong> be <code>1</code> if <code>presentMode</code> is either <code>VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR</code> or <code>VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkSwapchainCreateInfoKHR-presentMode-01427",
+          "text": " If <code>presentMode</code> is <code>VK_PRESENT_MODE_IMMEDIATE_KHR</code>, <code>VK_PRESENT_MODE_MAILBOX_KHR</code>, <code>VK_PRESENT_MODE_FIFO_KHR</code> or <code>VK_PRESENT_MODE_FIFO_RELAXED_KHR</code>, <code>imageUsage</code> <strong class=\"purple\">must</strong> be a subset of the supported usage flags present in the <code>supportedUsageFlags</code> member of the <a href=\"#VkSurfaceCapabilitiesKHR\">VkSurfaceCapabilitiesKHR</a> structure returned by <a href=\"#vkGetPhysicalDeviceSurfaceCapabilitiesKHR\">vkGetPhysicalDeviceSurfaceCapabilitiesKHR</a> for <code>surface</code>"
+        },
+        {
+          "vuid": "VUID-VkSwapchainCreateInfoKHR-imageUsage-01384",
+          "text": " If <code>presentMode</code> is <code>VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR</code> or <code>VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR</code>, <code>imageUsage</code> <strong class=\"purple\">must</strong> be a subset of the supported usage flags present in the <code>sharedPresentSupportedUsageFlags</code> member of the <a href=\"#VkSharedPresentSurfaceCapabilitiesKHR\">VkSharedPresentSurfaceCapabilitiesKHR</a> structure returned by <a href=\"#vkGetPhysicalDeviceSurfaceCapabilities2KHR\">vkGetPhysicalDeviceSurfaceCapabilities2KHR</a> for <code>surface</code>"
+        }
+      ],
+      "(VK_KHR_surface)+(VK_KHR_swapchain)+!(VK_KHR_shared_presentable_image)": [
+        {
+          "vuid": "VUID-VkSwapchainCreateInfoKHR-imageUsage-01276",
+          "text": " <code>imageUsage</code> <strong class=\"purple\">must</strong> be a subset of the supported usage flags present in the <code>supportedUsageFlags</code> member of the <code>VkSurfaceCapabilitiesKHR</code> structure returned by <code>vkGetPhysicalDeviceSurfaceCapabilitiesKHR</code> for the surface"
+        }
+      ],
+      "(VK_KHR_surface)+(VK_KHR_swapchain)+!(VK_VERSION_1_1,VK_KHR_get_physical_device_properties2)": [
+        {
+          "vuid": "VUID-VkSwapchainCreateInfoKHR-imageSharingMode-01393",
+          "text": " If <code>imageSharingMode</code> is <code>VK_SHARING_MODE_CONCURRENT</code>, each element of <code>pQueueFamilyIndices</code> <strong class=\"purple\">must</strong> be unique and <strong class=\"purple\">must</strong> be less than <code>pQueueFamilyPropertyCount</code> returned by <a href=\"#vkGetPhysicalDeviceQueueFamilyProperties\">vkGetPhysicalDeviceQueueFamilyProperties</a> for the <code>physicalDevice</code> that was used to create <code>device</code>"
+        }
+      ],
+      "(VK_KHR_surface)+(VK_KHR_swapchain)+(VK_VERSION_1_1,VK_KHR_get_physical_device_properties2)": [
+        {
+          "vuid": "VUID-VkSwapchainCreateInfoKHR-imageSharingMode-01428",
+          "text": " If <code>imageSharingMode</code> is <code>VK_SHARING_MODE_CONCURRENT</code>, each element of <code>pQueueFamilyIndices</code> <strong class=\"purple\">must</strong> be unique and <strong class=\"purple\">must</strong> be less than <code>pQueueFamilyPropertyCount</code> returned by either <a href=\"#vkGetPhysicalDeviceQueueFamilyProperties\">vkGetPhysicalDeviceQueueFamilyProperties</a> or <a href=\"#vkGetPhysicalDeviceQueueFamilyProperties2\">vkGetPhysicalDeviceQueueFamilyProperties2</a> for the <code>physicalDevice</code> that was used to create <code>device</code>"
+        }
+      ],
+      "(VK_KHR_surface)+(VK_KHR_swapchain)+(VK_VERSION_1_1,VK_KHR_device_group)": [
+        {
+          "vuid": "VUID-VkSwapchainCreateInfoKHR-physicalDeviceCount-01429",
+          "text": " If the logical device was created with <a href=\"#VkDeviceGroupDeviceCreateInfo\">VkDeviceGroupDeviceCreateInfo</a>::<code>physicalDeviceCount</code> equal to 1, <code>flags</code> <strong class=\"purple\">must</strong> not contain <code>VK_SWAPCHAIN_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR</code>"
+        }
+      ],
+      "(VK_KHR_surface)+(VK_KHR_swapchain)+(VK_KHR_swapchain_mutable_format)": [
+        {
+          "vuid": "VUID-VkSwapchainCreateInfoKHR-flags-03168",
+          "text": " If <code>flags</code> contains <code>VK_SWAPCHAIN_CREATE_MUTABLE_FORMAT_BIT_KHR</code> then the <code>pNext</code> chain <strong class=\"purple\">must</strong> include a <a href=\"#VkImageFormatListCreateInfoKHR\">VkImageFormatListCreateInfoKHR</a> structure with a <code>viewFormatCount</code> greater than zero and <code>pViewFormats</code> <strong class=\"purple\">must</strong> have an element equal to <code>imageFormat</code>"
+        }
+      ],
+      "(VK_KHR_surface)+(VK_KHR_swapchain)+(VK_KHR_surface_protected_capabilities)": [
+        {
+          "vuid": "VUID-VkSwapchainCreateInfoKHR-flags-03187",
+          "text": " If <code>flags</code> contains <code>VK_SWAPCHAIN_CREATE_PROTECTED_BIT_KHR</code>, then <code>VkSurfaceProtectedCapabilitiesKHR</code>::<code>supportsProtected</code> <strong class=\"purple\">must</strong> be <code>VK_TRUE</code> in the <a href=\"#VkSurfaceProtectedCapabilitiesKHR\">VkSurfaceProtectedCapabilitiesKHR</a> structure returned by <a href=\"#vkGetPhysicalDeviceSurfaceCapabilities2KHR\">vkGetPhysicalDeviceSurfaceCapabilities2KHR</a> for <code>surface</code>"
+        }
+      ],
+      "(VK_KHR_surface)+(VK_KHR_swapchain)+(VK_EXT_full_screen_exclusive+VK_KHR_win32_surface)": [
+        {
+          "vuid": "VUID-VkSwapchainCreateInfoKHR-pNext-02679",
+          "text": " If the <code>pNext</code> chain includes a <a href=\"#VkSurfaceFullScreenExclusiveInfoEXT\">VkSurfaceFullScreenExclusiveInfoEXT</a> structure with its <code>fullScreenExclusive</code> member set to <code>VK_FULL_SCREEN_EXCLUSIVE_APPLICATION_CONTROLLED_EXT</code>, and <code>surface</code> was created using <a href=\"#vkCreateWin32SurfaceKHR\">vkCreateWin32SurfaceKHR</a>, a <a href=\"#VkSurfaceFullScreenExclusiveWin32InfoEXT\">VkSurfaceFullScreenExclusiveWin32InfoEXT</a> structure <strong class=\"purple\">must</strong> be included in the <code>pNext</code> chain"
+        }
+      ]
+    },
+    "VkDeviceGroupSwapchainCreateInfoKHR": {
+      "(VK_KHR_surface)+(VK_KHR_swapchain)+(VK_VERSION_1_1,VK_KHR_device_group)": [
+        {
+          "vuid": "VUID-VkDeviceGroupSwapchainCreateInfoKHR-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_DEVICE_GROUP_SWAPCHAIN_CREATE_INFO_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkDeviceGroupSwapchainCreateInfoKHR-modes-parameter",
+          "text": " <code>modes</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkDeviceGroupPresentModeFlagBitsKHR\">VkDeviceGroupPresentModeFlagBitsKHR</a> values"
+        },
+        {
+          "vuid": "VUID-VkDeviceGroupSwapchainCreateInfoKHR-modes-requiredbitmask",
+          "text": " <code>modes</code> <strong class=\"purple\">must</strong> not be <code>0</code>"
+        }
+      ]
+    },
+    "VkSwapchainDisplayNativeHdrCreateInfoAMD": {
+      "(VK_KHR_surface)+(VK_KHR_swapchain)+(VK_AMD_display_native_hdr)": [
+        {
+          "vuid": "VUID-VkSwapchainDisplayNativeHdrCreateInfoAMD-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_SWAPCHAIN_DISPLAY_NATIVE_HDR_CREATE_INFO_AMD</code>"
+        },
+        {
+          "vuid": "VUID-VkSwapchainDisplayNativeHdrCreateInfoAMD-localDimmingEnable-XXXXX",
+          "text": " It is only valid to set <code>localDimmingEnable</code> to <code>VK_TRUE</code> if <a href=\"#VkDisplayNativeHdrSurfaceCapabilitiesAMD\">VkDisplayNativeHdrSurfaceCapabilitiesAMD</a>::<code>localDimmingSupport</code> is supported."
+        }
+      ]
+    },
+    "vkSetLocalDimmingAMD": {
+      "(VK_KHR_surface)+(VK_KHR_swapchain)+(VK_AMD_display_native_hdr)": [
+        {
+          "vuid": "VUID-vkSetLocalDimmingAMD-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkSetLocalDimmingAMD-swapChain-parameter",
+          "text": " <code>swapChain</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkSwapchainKHR\">VkSwapchainKHR</a> handle"
+        },
+        {
+          "vuid": "VUID-vkSetLocalDimmingAMD-commonparent",
+          "text": " Both of <code>device</code>, and <code>swapChain</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from the same <a href=\"#VkInstance\">VkInstance</a>"
+        },
+        {
+          "vuid": "VUID-vkSetLocalDimmingAMD-XXXXX",
+          "text": " It is only valid to call <a href=\"#vkSetLocalDimmingAMD\">vkSetLocalDimmingAMD</a> if <a href=\"#VkDisplayNativeHdrSurfaceCapabilitiesAMD\">VkDisplayNativeHdrSurfaceCapabilitiesAMD</a>::<code>localDimmingSupport</code> is supported."
+        }
+      ]
+    },
+    "VkSwapchainCounterCreateInfoEXT": {
+      "(VK_KHR_surface)+(VK_KHR_swapchain)+(VK_EXT_display_control)": [
+        {
+          "vuid": "VUID-VkSwapchainCounterCreateInfoEXT-surfaceCounters-01244",
+          "text": " The bits in <code>surfaceCounters</code> <strong class=\"purple\">must</strong> be supported by <a href=\"#VkSwapchainCreateInfoKHR\">VkSwapchainCreateInfoKHR</a>::<code>surface</code>, as reported by <a href=\"#vkGetPhysicalDeviceSurfaceCapabilities2EXT\">vkGetPhysicalDeviceSurfaceCapabilities2EXT</a>."
+        },
+        {
+          "vuid": "VUID-VkSwapchainCounterCreateInfoEXT-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_SWAPCHAIN_COUNTER_CREATE_INFO_EXT</code>"
+        },
+        {
+          "vuid": "VUID-VkSwapchainCounterCreateInfoEXT-surfaceCounters-parameter",
+          "text": " <code>surfaceCounters</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkSurfaceCounterFlagBitsEXT\">VkSurfaceCounterFlagBitsEXT</a> values"
+        }
+      ]
+    },
+    "vkGetSwapchainCounterEXT": {
+      "(VK_KHR_surface)+(VK_KHR_swapchain)+(VK_EXT_display_control)": [
+        {
+          "vuid": "VUID-vkGetSwapchainCounterEXT-swapchain-01245",
+          "text": " One or more present commands on <code>swapchain</code> <strong class=\"purple\">must</strong> have been processed by the presentation engine."
+        },
+        {
+          "vuid": "VUID-vkGetSwapchainCounterEXT-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetSwapchainCounterEXT-swapchain-parameter",
+          "text": " <code>swapchain</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkSwapchainKHR\">VkSwapchainKHR</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetSwapchainCounterEXT-counter-parameter",
+          "text": " <code>counter</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkSurfaceCounterFlagBitsEXT\">VkSurfaceCounterFlagBitsEXT</a> value"
+        },
+        {
+          "vuid": "VUID-vkGetSwapchainCounterEXT-pCounterValue-parameter",
+          "text": " <code>pCounterValue</code> <strong class=\"purple\">must</strong> be a valid pointer to a <code>uint64_t</code> value"
+        },
+        {
+          "vuid": "VUID-vkGetSwapchainCounterEXT-commonparent",
+          "text": " Both of <code>device</code>, and <code>swapchain</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from the same <a href=\"#VkInstance\">VkInstance</a>"
+        }
+      ]
+    },
+    "vkDestroySwapchainKHR": {
+      "(VK_KHR_surface)+(VK_KHR_swapchain)": [
+        {
+          "vuid": "VUID-vkDestroySwapchainKHR-swapchain-01282",
+          "text": " All uses of presentable images acquired from <code>swapchain</code> <strong class=\"purple\">must</strong> have completed execution"
+        },
+        {
+          "vuid": "VUID-vkDestroySwapchainKHR-swapchain-01283",
+          "text": " If <code>VkAllocationCallbacks</code> were provided when <code>swapchain</code> was created, a compatible set of callbacks <strong class=\"purple\">must</strong> be provided here"
+        },
+        {
+          "vuid": "VUID-vkDestroySwapchainKHR-swapchain-01284",
+          "text": " If no <code>VkAllocationCallbacks</code> were provided when <code>swapchain</code> was created, <code>pAllocator</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-vkDestroySwapchainKHR-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkDestroySwapchainKHR-swapchain-parameter",
+          "text": " If <code>swapchain</code> is not <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>, <code>swapchain</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkSwapchainKHR\">VkSwapchainKHR</a> handle"
+        },
+        {
+          "vuid": "VUID-vkDestroySwapchainKHR-pAllocator-parameter",
+          "text": " If <code>pAllocator</code> is not <code>NULL</code>, <code>pAllocator</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkAllocationCallbacks\">VkAllocationCallbacks</a> structure"
+        },
+        {
+          "vuid": "VUID-vkDestroySwapchainKHR-commonparent",
+          "text": " Both of <code>device</code>, and <code>swapchain</code> that are valid handles of non-ignored parameters <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from the same <a href=\"#VkInstance\">VkInstance</a>"
+        }
+      ]
+    },
+    "vkCreateSharedSwapchainsKHR": {
+      "(VK_KHR_surface)+(VK_KHR_swapchain)+(VK_KHR_display_swapchain)": [
+        {
+          "vuid": "VUID-vkCreateSharedSwapchainsKHR-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCreateSharedSwapchainsKHR-pCreateInfos-parameter",
+          "text": " <code>pCreateInfos</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>swapchainCount</code> valid <a href=\"#VkSwapchainCreateInfoKHR\">VkSwapchainCreateInfoKHR</a> structures"
+        },
+        {
+          "vuid": "VUID-vkCreateSharedSwapchainsKHR-pAllocator-parameter",
+          "text": " If <code>pAllocator</code> is not <code>NULL</code>, <code>pAllocator</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkAllocationCallbacks\">VkAllocationCallbacks</a> structure"
+        },
+        {
+          "vuid": "VUID-vkCreateSharedSwapchainsKHR-pSwapchains-parameter",
+          "text": " <code>pSwapchains</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>swapchainCount</code> <a href=\"#VkSwapchainKHR\">VkSwapchainKHR</a> handles"
+        },
+        {
+          "vuid": "VUID-vkCreateSharedSwapchainsKHR-swapchainCount-arraylength",
+          "text": " <code>swapchainCount</code> <strong class=\"purple\">must</strong> be greater than <code>0</code>"
+        }
+      ]
+    },
+    "vkGetSwapchainImagesKHR": {
+      "(VK_KHR_surface)+(VK_KHR_swapchain)": [
+        {
+          "vuid": "VUID-vkGetSwapchainImagesKHR-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetSwapchainImagesKHR-swapchain-parameter",
+          "text": " <code>swapchain</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkSwapchainKHR\">VkSwapchainKHR</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetSwapchainImagesKHR-pSwapchainImageCount-parameter",
+          "text": " <code>pSwapchainImageCount</code> <strong class=\"purple\">must</strong> be a valid pointer to a <code>uint32_t</code> value"
+        },
+        {
+          "vuid": "VUID-vkGetSwapchainImagesKHR-pSwapchainImages-parameter",
+          "text": " If the value referenced by <code>pSwapchainImageCount</code> is not <code>0</code>, and <code>pSwapchainImages</code> is not <code>NULL</code>, <code>pSwapchainImages</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>pSwapchainImageCount</code> <a href=\"#VkImage\">VkImage</a> handles"
+        },
+        {
+          "vuid": "VUID-vkGetSwapchainImagesKHR-commonparent",
+          "text": " Both of <code>device</code>, and <code>swapchain</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from the same <a href=\"#VkInstance\">VkInstance</a>"
+        }
+      ]
+    },
+    "vkAcquireNextImageKHR": {
+      "(VK_KHR_surface)+(VK_KHR_swapchain)": [
+        {
+          "vuid": "VUID-vkAcquireNextImageKHR-swapchain-01285",
+          "text": " <code>swapchain</code> <strong class=\"purple\">must</strong> not be in the retired state"
+        },
+        {
+          "vuid": "VUID-vkAcquireNextImageKHR-semaphore-01286",
+          "text": " If <code>semaphore</code> is not <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a> it <strong class=\"purple\">must</strong> be unsignaled"
+        },
+        {
+          "vuid": "VUID-vkAcquireNextImageKHR-semaphore-01779",
+          "text": " If <code>semaphore</code> is not <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a> it <strong class=\"purple\">must</strong> not have any uncompleted signal or wait operations pending"
+        },
+        {
+          "vuid": "VUID-vkAcquireNextImageKHR-fence-01287",
+          "text": " If <code>fence</code> is not <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a> it <strong class=\"purple\">must</strong> be unsignaled and <strong class=\"purple\">must</strong> not be associated with any other queue command that has not yet completed execution on that queue"
+        },
+        {
+          "vuid": "VUID-vkAcquireNextImageKHR-semaphore-01780",
+          "text": " <code>semaphore</code> and <code>fence</code> <strong class=\"purple\">must</strong> not both be equal to <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>"
+        },
+        {
+          "vuid": "VUID-vkAcquireNextImageKHR-swapchain-01802",
+          "text": " If the number of currently acquired images is greater than the difference between the number of images in <code>swapchain</code> and the value of <a href=\"#VkSurfaceCapabilitiesKHR\">VkSurfaceCapabilitiesKHR</a>::<code>minImageCount</code> as returned by a call to <a href=\"#vkGetPhysicalDeviceSurfaceCapabilities2KHR\">vkGetPhysicalDeviceSurfaceCapabilities2KHR</a> with the <code>surface</code> used to create <code>swapchain</code>, <code>timeout</code> <strong class=\"purple\">must</strong> not be <code>UINT64_MAX</code>"
+        },
+        {
+          "vuid": "VUID-vkAcquireNextImageKHR-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkAcquireNextImageKHR-swapchain-parameter",
+          "text": " <code>swapchain</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkSwapchainKHR\">VkSwapchainKHR</a> handle"
+        },
+        {
+          "vuid": "VUID-vkAcquireNextImageKHR-semaphore-parameter",
+          "text": " If <code>semaphore</code> is not <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>, <code>semaphore</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkSemaphore\">VkSemaphore</a> handle"
+        },
+        {
+          "vuid": "VUID-vkAcquireNextImageKHR-fence-parameter",
+          "text": " If <code>fence</code> is not <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>, <code>fence</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkFence\">VkFence</a> handle"
+        },
+        {
+          "vuid": "VUID-vkAcquireNextImageKHR-pImageIndex-parameter",
+          "text": " <code>pImageIndex</code> <strong class=\"purple\">must</strong> be a valid pointer to a <code>uint32_t</code> value"
+        },
+        {
+          "vuid": "VUID-vkAcquireNextImageKHR-semaphore-parent",
+          "text": " If <code>semaphore</code> is a valid handle, it <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from <code>device</code>"
+        },
+        {
+          "vuid": "VUID-vkAcquireNextImageKHR-fence-parent",
+          "text": " If <code>fence</code> is a valid handle, it <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from <code>device</code>"
+        },
+        {
+          "vuid": "VUID-vkAcquireNextImageKHR-commonparent",
+          "text": " Both of <code>device</code>, and <code>swapchain</code> that are valid handles of non-ignored parameters <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from the same <a href=\"#VkInstance\">VkInstance</a>"
+        }
+      ],
+      "(VK_KHR_surface)+(VK_KHR_swapchain)+(VK_KHR_timeline_semaphore)": [
+        {
+          "vuid": "VUID-vkAcquireNextImageKHR-semaphore-03265",
+          "text": " <code>semaphore</code> <strong class=\"purple\">must</strong> have a <a href=\"#VkSemaphoreTypeKHR\">VkSemaphoreTypeKHR</a> of <code>VK_SEMAPHORE_TYPE_BINARY_KHR</code>"
+        }
+      ]
+    },
+    "vkAcquireNextImage2KHR": {
+      "(VK_KHR_surface)+(VK_KHR_swapchain)+(VK_VERSION_1_1,VK_KHR_device_group)": [
+        {
+          "vuid": "VUID-vkAcquireNextImage2KHR-swapchain-01803",
+          "text": " If the number of currently acquired images is greater than the difference between the number of images in the <code>swapchain</code> member of <code>pAcquireInfo</code> and the value of <a href=\"#VkSurfaceCapabilitiesKHR\">VkSurfaceCapabilitiesKHR</a>::<code>minImageCount</code> as returned by a call to <a href=\"#vkGetPhysicalDeviceSurfaceCapabilities2KHR\">vkGetPhysicalDeviceSurfaceCapabilities2KHR</a> with the <code>surface</code> used to create <code>swapchain</code>, the <code>timeout</code> member of <code>pAcquireInfo</code> <strong class=\"purple\">must</strong> not be <code>UINT64_MAX</code>"
+        },
+        {
+          "vuid": "VUID-vkAcquireNextImage2KHR-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkAcquireNextImage2KHR-pAcquireInfo-parameter",
+          "text": " <code>pAcquireInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkAcquireNextImageInfoKHR\">VkAcquireNextImageInfoKHR</a> structure"
+        },
+        {
+          "vuid": "VUID-vkAcquireNextImage2KHR-pImageIndex-parameter",
+          "text": " <code>pImageIndex</code> <strong class=\"purple\">must</strong> be a valid pointer to a <code>uint32_t</code> value"
+        }
+      ]
+    },
+    "VkAcquireNextImageInfoKHR": {
+      "(VK_KHR_surface)+(VK_KHR_swapchain)+(VK_VERSION_1_1,VK_KHR_device_group)": [
+        {
+          "vuid": "VUID-VkAcquireNextImageInfoKHR-swapchain-01675",
+          "text": " <code>swapchain</code> <strong class=\"purple\">must</strong> not be in the retired state"
+        },
+        {
+          "vuid": "VUID-VkAcquireNextImageInfoKHR-semaphore-01288",
+          "text": " If <code>semaphore</code> is not <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a> it <strong class=\"purple\">must</strong> be unsignaled"
+        },
+        {
+          "vuid": "VUID-VkAcquireNextImageInfoKHR-semaphore-01781",
+          "text": " If <code>semaphore</code> is not <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a> it <strong class=\"purple\">must</strong> not have any uncompleted signal or wait operations pending"
+        },
+        {
+          "vuid": "VUID-VkAcquireNextImageInfoKHR-fence-01289",
+          "text": " If <code>fence</code> is not <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a> it <strong class=\"purple\">must</strong> be unsignaled and <strong class=\"purple\">must</strong> not be associated with any other queue command that has not yet completed execution on that queue"
+        },
+        {
+          "vuid": "VUID-VkAcquireNextImageInfoKHR-semaphore-01782",
+          "text": " <code>semaphore</code> and <code>fence</code> <strong class=\"purple\">must</strong> not both be equal to <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>"
+        },
+        {
+          "vuid": "VUID-VkAcquireNextImageInfoKHR-deviceMask-01290",
+          "text": " <code>deviceMask</code> <strong class=\"purple\">must</strong> be a valid device mask"
+        },
+        {
+          "vuid": "VUID-VkAcquireNextImageInfoKHR-deviceMask-01291",
+          "text": " <code>deviceMask</code> <strong class=\"purple\">must</strong> not be zero"
+        },
+        {
+          "vuid": "VUID-VkAcquireNextImageInfoKHR-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_ACQUIRE_NEXT_IMAGE_INFO_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkAcquireNextImageInfoKHR-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-VkAcquireNextImageInfoKHR-swapchain-parameter",
+          "text": " <code>swapchain</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkSwapchainKHR\">VkSwapchainKHR</a> handle"
+        },
+        {
+          "vuid": "VUID-VkAcquireNextImageInfoKHR-semaphore-parameter",
+          "text": " If <code>semaphore</code> is not <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>, <code>semaphore</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkSemaphore\">VkSemaphore</a> handle"
+        },
+        {
+          "vuid": "VUID-VkAcquireNextImageInfoKHR-fence-parameter",
+          "text": " If <code>fence</code> is not <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>, <code>fence</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkFence\">VkFence</a> handle"
+        },
+        {
+          "vuid": "VUID-VkAcquireNextImageInfoKHR-commonparent",
+          "text": " Each of <code>fence</code>, <code>semaphore</code>, and <code>swapchain</code> that are valid handles of non-ignored parameters <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from the same <a href=\"#VkInstance\">VkInstance</a>"
+        }
+      ],
+      "(VK_KHR_surface)+(VK_KHR_swapchain)+(VK_VERSION_1_1,VK_KHR_device_group)+(VK_KHR_timeline_semaphore)": [
+        {
+          "vuid": "VUID-VkAcquireNextImageInfoKHR-semaphore-03266",
+          "text": " <code>semaphore</code> <strong class=\"purple\">must</strong> have a <a href=\"#VkSemaphoreTypeKHR\">VkSemaphoreTypeKHR</a> of <code>VK_SEMAPHORE_TYPE_BINARY_KHR</code>"
+        }
+      ]
+    },
+    "vkQueuePresentKHR": {
+      "(VK_KHR_surface)+(VK_KHR_swapchain)": [
+        {
+          "vuid": "VUID-vkQueuePresentKHR-pSwapchains-01292",
+          "text": " Each element of <code>pSwapchains</code> member of <code>pPresentInfo</code> <strong class=\"purple\">must</strong> be a swapchain that is created for a surface for which presentation is supported from <code>queue</code> as determined using a call to <code>vkGetPhysicalDeviceSurfaceSupportKHR</code>"
+        },
+        {
+          "vuid": "VUID-vkQueuePresentKHR-pWaitSemaphores-01294",
+          "text": " When a semaphore wait operation referring to a binary semaphore defined by the elements of the <code>pWaitSemaphores</code> member of <code>pPresentInfo</code> executes on <code>queue</code>, there <strong class=\"purple\">must</strong> be no other queues waiting on the same semaphore."
+        },
+        {
+          "vuid": "VUID-vkQueuePresentKHR-pWaitSemaphores-01295",
+          "text": " All elements of the <code>pWaitSemaphores</code> member of <code>pPresentInfo</code> <strong class=\"purple\">must</strong> be semaphores that are signaled, or have <a href=\"#synchronization-semaphores-signaling\">semaphore signal operations</a> previously submitted for execution."
+        },
+        {
+          "vuid": "VUID-vkQueuePresentKHR-queue-parameter",
+          "text": " <code>queue</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkQueue\">VkQueue</a> handle"
+        },
+        {
+          "vuid": "VUID-vkQueuePresentKHR-pPresentInfo-parameter",
+          "text": " <code>pPresentInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkPresentInfoKHR\">VkPresentInfoKHR</a> structure"
+        }
+      ],
+      "(VK_KHR_surface)+(VK_KHR_swapchain)+(VK_KHR_display_swapchain)": [
+        {
+          "vuid": "VUID-vkQueuePresentKHR-pSwapchains-01293",
+          "text": " If more than one member of <code>pSwapchains</code> was created from a display surface, all display surfaces referenced that refer to the same display <strong class=\"purple\">must</strong> use the same display mode"
+        }
+      ],
+      "(VK_KHR_surface)+(VK_KHR_swapchain)+(VK_KHR_timeline_semaphore)": [
+        {
+          "vuid": "VUID-vkQueuePresentKHR-pWaitSemaphores-03267",
+          "text": " All elements of the <code>pWaitSemaphores</code> member of <code>pPresentInfo</code> <strong class=\"purple\">must</strong> be created with a <a href=\"#VkSemaphoreTypeKHR\">VkSemaphoreTypeKHR</a> of <code>VK_SEMAPHORE_TYPE_BINARY_KHR</code>."
+        },
+        {
+          "vuid": "VUID-vkQueuePresentKHR-pWaitSemaphores-03268",
+          "text": " All elements of the <code>pWaitSemaphores</code> member of <code>pPresentInfo</code> <strong class=\"purple\">must</strong> reference a semaphore signal operation that has been submitted for execution and any semaphore signal operations on which it depends (if any) <strong class=\"purple\">must</strong> have also been submitted for execution."
+        }
+      ]
+    },
+    "VkPresentInfoKHR": {
+      "(VK_KHR_surface)+(VK_KHR_swapchain)+!(VK_KHR_shared_presentable_image)": [
+        {
+          "vuid": "VUID-VkPresentInfoKHR-pImageIndices-01296",
+          "text": " Each element of <code>pImageIndices</code> <strong class=\"purple\">must</strong> be the index of a presentable image acquired from the swapchain specified by the corresponding element of the <code>pSwapchains</code> array, and the presented image subresource <strong class=\"purple\">must</strong> be in the <code>VK_IMAGE_LAYOUT_PRESENT_SRC_KHR</code> layout at the time the operation is executed on a <code>VkDevice</code>"
+        }
+      ],
+      "(VK_KHR_surface)+(VK_KHR_swapchain)+(VK_KHR_shared_presentable_image)": [
+        {
+          "vuid": "VUID-VkPresentInfoKHR-pImageIndices-01430",
+          "text": " Each element of <code>pImageIndices</code> <strong class=\"purple\">must</strong> be the index of a presentable image acquired from the swapchain specified by the corresponding element of the <code>pSwapchains</code> array, and the presented image subresource <strong class=\"purple\">must</strong> be in the <code>VK_IMAGE_LAYOUT_PRESENT_SRC_KHR</code> or <code>VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR</code> layout at the time the operation is executed on a <code>VkDevice</code>"
+        }
+      ],
+      "(VK_KHR_surface)+(VK_KHR_swapchain)+(VK_KHR_timeline_semaphore)": [
+        {
+          "vuid": "VUID-VkPresentInfoKHR-pWaitSemaphores-03269",
+          "text": " All elements of the <code>pWaitSemaphores</code> <strong class=\"purple\">must</strong> have a <a href=\"#VkSemaphoreTypeKHR\">VkSemaphoreTypeKHR</a> of <code>VK_SEMAPHORE_TYPE_BINARY_KHR</code>"
+        }
+      ],
+      "(VK_KHR_surface)+(VK_KHR_swapchain)": [
+        {
+          "vuid": "VUID-VkPresentInfoKHR-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PRESENT_INFO_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkPresentInfoKHR-pNext-pNext",
+          "text": " Each <code>pNext</code> member of any structure (including this one) in the <code>pNext</code> chain <strong class=\"purple\">must</strong> be either <code>NULL</code> or a pointer to a valid instance of <a href=\"#VkDeviceGroupPresentInfoKHR\">VkDeviceGroupPresentInfoKHR</a>, <a href=\"#VkDisplayPresentInfoKHR\">VkDisplayPresentInfoKHR</a>, <a href=\"#VkPresentFrameTokenGGP\">VkPresentFrameTokenGGP</a>, <a href=\"#VkPresentRegionsKHR\">VkPresentRegionsKHR</a>, or <a href=\"#VkPresentTimesInfoGOOGLE\">VkPresentTimesInfoGOOGLE</a>"
+        },
+        {
+          "vuid": "VUID-VkPresentInfoKHR-sType-unique",
+          "text": " Each <code>sType</code> member in the <code>pNext</code> chain <strong class=\"purple\">must</strong> be unique"
+        },
+        {
+          "vuid": "VUID-VkPresentInfoKHR-pWaitSemaphores-parameter",
+          "text": " If <code>waitSemaphoreCount</code> is not <code>0</code>, <code>pWaitSemaphores</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>waitSemaphoreCount</code> valid <a href=\"#VkSemaphore\">VkSemaphore</a> handles"
+        },
+        {
+          "vuid": "VUID-VkPresentInfoKHR-pSwapchains-parameter",
+          "text": " <code>pSwapchains</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>swapchainCount</code> valid <a href=\"#VkSwapchainKHR\">VkSwapchainKHR</a> handles"
+        },
+        {
+          "vuid": "VUID-VkPresentInfoKHR-pImageIndices-parameter",
+          "text": " <code>pImageIndices</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>swapchainCount</code> <code>uint32_t</code> values"
+        },
+        {
+          "vuid": "VUID-VkPresentInfoKHR-pResults-parameter",
+          "text": " If <code>pResults</code> is not <code>NULL</code>, <code>pResults</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>swapchainCount</code> <a href=\"#VkResult\">VkResult</a> values"
+        },
+        {
+          "vuid": "VUID-VkPresentInfoKHR-swapchainCount-arraylength",
+          "text": " <code>swapchainCount</code> <strong class=\"purple\">must</strong> be greater than <code>0</code>"
+        },
+        {
+          "vuid": "VUID-VkPresentInfoKHR-commonparent",
+          "text": " Both of the elements of <code>pSwapchains</code>, and the elements of <code>pWaitSemaphores</code> that are valid handles of non-ignored parameters <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from the same <a href=\"#VkInstance\">VkInstance</a>"
+        }
+      ]
+    },
+    "VkPresentRegionsKHR": {
+      "(VK_KHR_surface)+(VK_KHR_swapchain)+(VK_KHR_incremental_present)": [
+        {
+          "vuid": "VUID-VkPresentRegionsKHR-swapchainCount-01260",
+          "text": " <code>swapchainCount</code> <strong class=\"purple\">must</strong> be the same value as <code>VkPresentInfoKHR</code>::<code>swapchainCount</code>, where <code>VkPresentInfoKHR</code> is included in the <code>pNext</code> chain of this <code>VkPresentRegionsKHR</code> structure"
+        },
+        {
+          "vuid": "VUID-VkPresentRegionsKHR-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PRESENT_REGIONS_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkPresentRegionsKHR-pRegions-parameter",
+          "text": " If <code>pRegions</code> is not <code>NULL</code>, <code>pRegions</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>swapchainCount</code> valid <a href=\"#VkPresentRegionKHR\">VkPresentRegionKHR</a> structures"
+        },
+        {
+          "vuid": "VUID-VkPresentRegionsKHR-swapchainCount-arraylength",
+          "text": " <code>swapchainCount</code> <strong class=\"purple\">must</strong> be greater than <code>0</code>"
+        }
+      ]
+    },
+    "VkPresentRegionKHR": {
+      "(VK_KHR_surface)+(VK_KHR_swapchain)+(VK_KHR_incremental_present)": [
+        {
+          "vuid": "VUID-VkPresentRegionKHR-pRectangles-parameter",
+          "text": " If <code>rectangleCount</code> is not <code>0</code>, and <code>pRectangles</code> is not <code>NULL</code>, <code>pRectangles</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>rectangleCount</code> valid <a href=\"#VkRectLayerKHR\">VkRectLayerKHR</a> structures"
+        }
+      ]
+    },
+    "VkRectLayerKHR": {
+      "(VK_KHR_surface)+(VK_KHR_swapchain)+(VK_KHR_incremental_present)": [
+        {
+          "vuid": "VUID-VkRectLayerKHR-offset-01261",
+          "text": " The sum of <code>offset</code> and <code>extent</code> <strong class=\"purple\">must</strong> be no greater than the <code>imageExtent</code> member of the <code>VkSwapchainCreateInfoKHR</code> structure given to <a href=\"#vkCreateSwapchainKHR\">vkCreateSwapchainKHR</a>."
+        },
+        {
+          "vuid": "VUID-VkRectLayerKHR-layer-01262",
+          "text": " <code>layer</code> <strong class=\"purple\">must</strong> be less than <code>imageArrayLayers</code> member of the <code>VkSwapchainCreateInfoKHR</code> structure given to <a href=\"#vkCreateSwapchainKHR\">vkCreateSwapchainKHR</a>."
+        }
+      ]
+    },
+    "VkDisplayPresentInfoKHR": {
+      "(VK_KHR_surface)+(VK_KHR_swapchain)+(VK_KHR_display_swapchain)": [
+        {
+          "vuid": "VUID-VkDisplayPresentInfoKHR-srcRect-01257",
+          "text": " <code>srcRect</code> <strong class=\"purple\">must</strong> specify a rectangular region that is a subset of the image being presented"
+        },
+        {
+          "vuid": "VUID-VkDisplayPresentInfoKHR-dstRect-01258",
+          "text": " <code>dstRect</code> <strong class=\"purple\">must</strong> specify a rectangular region that is a subset of the <code>visibleRegion</code> parameter of the display mode the swapchain being presented uses"
+        },
+        {
+          "vuid": "VUID-VkDisplayPresentInfoKHR-persistentContent-01259",
+          "text": " If the <code>persistentContent</code> member of the <code>VkDisplayPropertiesKHR</code> structure returned by <code>vkGetPhysicalDeviceDisplayPropertiesKHR</code> for the display the present operation targets then <code>persistent</code> <strong class=\"purple\">must</strong> be <code>VK_FALSE</code>"
+        },
+        {
+          "vuid": "VUID-VkDisplayPresentInfoKHR-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_DISPLAY_PRESENT_INFO_KHR</code>"
+        }
+      ]
+    },
+    "VkDeviceGroupPresentInfoKHR": {
+      "(VK_KHR_surface)+(VK_KHR_swapchain)+(VK_VERSION_1_1,VK_KHR_device_group)": [
+        {
+          "vuid": "VUID-VkDeviceGroupPresentInfoKHR-swapchainCount-01297",
+          "text": " <code>swapchainCount</code> <strong class=\"purple\">must</strong> equal <code>0</code> or <a href=\"#VkPresentInfoKHR\">VkPresentInfoKHR</a>::<code>swapchainCount</code>"
+        },
+        {
+          "vuid": "VUID-VkDeviceGroupPresentInfoKHR-mode-01298",
+          "text": " If <code>mode</code> is <code>VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_BIT_KHR</code>, then each element of <code>pDeviceMasks</code> <strong class=\"purple\">must</strong> have exactly one bit set, and the corresponding element of <a href=\"#VkDeviceGroupPresentCapabilitiesKHR\">VkDeviceGroupPresentCapabilitiesKHR</a>::<code>presentMask</code> <strong class=\"purple\">must</strong> be non-zero"
+        },
+        {
+          "vuid": "VUID-VkDeviceGroupPresentInfoKHR-mode-01299",
+          "text": " If <code>mode</code> is <code>VK_DEVICE_GROUP_PRESENT_MODE_REMOTE_BIT_KHR</code>, then each element of <code>pDeviceMasks</code> <strong class=\"purple\">must</strong> have exactly one bit set, and some physical device in the logical device <strong class=\"purple\">must</strong> include that bit in its <a href=\"#VkDeviceGroupPresentCapabilitiesKHR\">VkDeviceGroupPresentCapabilitiesKHR</a>::<code>presentMask</code>."
+        },
+        {
+          "vuid": "VUID-VkDeviceGroupPresentInfoKHR-mode-01300",
+          "text": " If <code>mode</code> is <code>VK_DEVICE_GROUP_PRESENT_MODE_SUM_BIT_KHR</code>, then each element of <code>pDeviceMasks</code> <strong class=\"purple\">must</strong> have a value for which all set bits are set in one of the elements of <a href=\"#VkDeviceGroupPresentCapabilitiesKHR\">VkDeviceGroupPresentCapabilitiesKHR</a>::<code>presentMask</code>"
+        },
+        {
+          "vuid": "VUID-VkDeviceGroupPresentInfoKHR-mode-01301",
+          "text": " If <code>mode</code> is <code>VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_MULTI_DEVICE_BIT_KHR</code>, then for each bit set in each element of <code>pDeviceMasks</code>, the corresponding element of <a href=\"#VkDeviceGroupPresentCapabilitiesKHR\">VkDeviceGroupPresentCapabilitiesKHR</a>::<code>presentMask</code> <strong class=\"purple\">must</strong> be non-zero"
+        },
+        {
+          "vuid": "VUID-VkDeviceGroupPresentInfoKHR-pDeviceMasks-01302",
+          "text": " The value of each element of <code>pDeviceMasks</code> <strong class=\"purple\">must</strong> be equal to the device mask passed in <a href=\"#VkAcquireNextImageInfoKHR\">VkAcquireNextImageInfoKHR</a>::<code>deviceMask</code> when the image index was last acquired"
+        },
+        {
+          "vuid": "VUID-VkDeviceGroupPresentInfoKHR-mode-01303",
+          "text": " <code>mode</code> <strong class=\"purple\">must</strong> have exactly one bit set, and that bit <strong class=\"purple\">must</strong> have been included in <a href=\"#VkDeviceGroupSwapchainCreateInfoKHR\">VkDeviceGroupSwapchainCreateInfoKHR</a>::<code>modes</code>"
+        },
+        {
+          "vuid": "VUID-VkDeviceGroupPresentInfoKHR-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_INFO_KHR</code>"
+        },
+        {
+          "vuid": "VUID-VkDeviceGroupPresentInfoKHR-pDeviceMasks-parameter",
+          "text": " If <code>swapchainCount</code> is not <code>0</code>, <code>pDeviceMasks</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>swapchainCount</code> <code>uint32_t</code> values"
+        },
+        {
+          "vuid": "VUID-VkDeviceGroupPresentInfoKHR-mode-parameter",
+          "text": " <code>mode</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDeviceGroupPresentModeFlagBitsKHR\">VkDeviceGroupPresentModeFlagBitsKHR</a> value"
+        }
+      ]
+    },
+    "VkPresentTimesInfoGOOGLE": {
+      "(VK_KHR_surface)+(VK_KHR_swapchain)+(VK_GOOGLE_display_timing)": [
+        {
+          "vuid": "VUID-VkPresentTimesInfoGOOGLE-swapchainCount-01247",
+          "text": " <code>swapchainCount</code> <strong class=\"purple\">must</strong> be the same value as <code>VkPresentInfoKHR</code>::<code>swapchainCount</code>, where <code>VkPresentInfoKHR</code> is included in the <code>pNext</code> chain of this <code>VkPresentTimesInfoGOOGLE</code> structure."
+        },
+        {
+          "vuid": "VUID-VkPresentTimesInfoGOOGLE-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PRESENT_TIMES_INFO_GOOGLE</code>"
+        },
+        {
+          "vuid": "VUID-VkPresentTimesInfoGOOGLE-pTimes-parameter",
+          "text": " If <code>pTimes</code> is not <code>NULL</code>, <code>pTimes</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>swapchainCount</code> <a href=\"#VkPresentTimeGOOGLE\">VkPresentTimeGOOGLE</a> structures"
+        },
+        {
+          "vuid": "VUID-VkPresentTimesInfoGOOGLE-swapchainCount-arraylength",
+          "text": " <code>swapchainCount</code> <strong class=\"purple\">must</strong> be greater than <code>0</code>"
+        }
+      ]
+    },
+    "VkPresentFrameTokenGGP": {
+      "(VK_KHR_surface)+(VK_KHR_swapchain)+(VK_GGP_frame_token)": [
+        {
+          "vuid": "VUID-VkPresentFrameTokenGGP-frameToken-02680",
+          "text": " <code>frameToken</code> <strong class=\"purple\">must</strong> be a valid <code>GgpFrameToken</code>"
+        },
+        {
+          "vuid": "VUID-VkPresentFrameTokenGGP-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PRESENT_FRAME_TOKEN_GGP</code>"
+        }
+      ]
+    },
+    "vkSetHdrMetadataEXT": {
+      "(VK_KHR_surface)+(VK_KHR_swapchain)+(VK_EXT_hdr_metadata)": [
+        {
+          "vuid": "VUID-vkSetHdrMetadataEXT-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkSetHdrMetadataEXT-pSwapchains-parameter",
+          "text": " <code>pSwapchains</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>swapchainCount</code> valid <a href=\"#VkSwapchainKHR\">VkSwapchainKHR</a> handles"
+        },
+        {
+          "vuid": "VUID-vkSetHdrMetadataEXT-pMetadata-parameter",
+          "text": " <code>pMetadata</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>swapchainCount</code> valid <a href=\"#VkHdrMetadataEXT\">VkHdrMetadataEXT</a> structures"
+        },
+        {
+          "vuid": "VUID-vkSetHdrMetadataEXT-swapchainCount-arraylength",
+          "text": " <code>swapchainCount</code> <strong class=\"purple\">must</strong> be greater than <code>0</code>"
+        },
+        {
+          "vuid": "VUID-vkSetHdrMetadataEXT-commonparent",
+          "text": " Both of <code>device</code>, and the elements of <code>pSwapchains</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from the same <a href=\"#VkInstance\">VkInstance</a>"
+        }
+      ]
+    },
+    "VkHdrMetadataEXT": {
+      "(VK_KHR_surface)+(VK_KHR_swapchain)+(VK_EXT_hdr_metadata)": [
+        {
+          "vuid": "VUID-VkHdrMetadataEXT-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_HDR_METADATA_EXT</code>"
+        },
+        {
+          "vuid": "VUID-VkHdrMetadataEXT-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        }
+      ]
+    },
+    "vkCmdTraceRaysNV": {
+      "(VK_NV_ray_tracing)": [
+        {
+          "vuid": "VUID-vkCmdTraceRaysNV-None-02690",
+          "text": " If a <code>VkImageView</code> is sampled with <code>VK_FILTER_LINEAR</code> as a result of this command, then the image view&#8217;s <a href=\"#resources-image-view-format-features\">format features</a> <strong class=\"purple\">must</strong> contain <code>VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdTraceRaysNV-None-02691",
+          "text": " If a <code>VkImageView</code> is accessed using atomic operations as a result of this command, then the image view&#8217;s <a href=\"#resources-image-view-format-features\">format features</a> <strong class=\"purple\">must</strong> contain <code>VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdTraceRaysNV-None-02697",
+          "text": " For each set <em>n</em> that is statically used by the <code>VkPipeline</code> bound to the pipeline bind point used by this command, a descriptor set <strong class=\"purple\">must</strong> have been bound to <em>n</em> at the same pipeline bind point, with a <code>VkPipelineLayout</code> that is compatible for set <em>n</em>, with the <code>VkPipelineLayout</code> used to create the current <code>VkPipeline</code>, as described in <a href=\"#descriptorsets-compatibility\">Pipeline Layout Compatibility</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdTraceRaysNV-None-02698",
+          "text": " For each push constant that is statically used by the <code>VkPipeline</code> bound to the pipeline bind point used by this command, a push constant value <strong class=\"purple\">must</strong> have been set for the same pipeline bind point, with a <code>VkPipelineLayout</code> that is compatible for push constants, with the <code>VkPipelineLayout</code> used to create the current <code>VkPipeline</code>, as described in <a href=\"#descriptorsets-compatibility\">Pipeline Layout Compatibility</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdTraceRaysNV-None-02699",
+          "text": " Descriptors in each bound descriptor set, specified via <code>vkCmdBindDescriptorSets</code>, <strong class=\"purple\">must</strong> be valid if they are statically used by the <code>VkPipeline</code> bound to the pipeline bind point used by this command"
+        },
+        {
+          "vuid": "VUID-vkCmdTraceRaysNV-None-02700",
+          "text": " A valid pipeline <strong class=\"purple\">must</strong> be bound to the pipeline bind point used by this command"
+        },
+        {
+          "vuid": "VUID-vkCmdTraceRaysNV-commandBuffer-02701",
+          "text": " If the <code>VkPipeline</code> object bound to the pipeline bind point used by this command requires any dynamic state, that state <strong class=\"purple\">must</strong> have been set for <code>commandBuffer</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdTraceRaysNV-None-02702",
+          "text": " If the <code>VkPipeline</code> object bound to the pipeline bind point used by this command accesses a <code>VkSampler</code> object that uses unnormalized coordinates, that sampler <strong class=\"purple\">must</strong> not be used to sample from any <code>VkImage</code> with a <code>VkImageView</code> of the type <code>VK_IMAGE_VIEW_TYPE_3D</code>, <code>VK_IMAGE_VIEW_TYPE_CUBE</code>, <code>VK_IMAGE_VIEW_TYPE_1D_ARRAY</code>, <code>VK_IMAGE_VIEW_TYPE_2D_ARRAY</code> or <code>VK_IMAGE_VIEW_TYPE_CUBE_ARRAY</code>, in any shader stage"
+        },
+        {
+          "vuid": "VUID-vkCmdTraceRaysNV-None-02703",
+          "text": " If the <code>VkPipeline</code> object bound to the pipeline bind point used by this command accesses a <code>VkSampler</code> object that uses unnormalized coordinates, that sampler <strong class=\"purple\">must</strong> not be used with any of the SPIR-V <code>OpImageSample*</code> or <code>OpImageSparseSample*</code> instructions with <code>ImplicitLod</code>, <code>Dref</code> or <code>Proj</code> in their name, in any shader stage"
+        },
+        {
+          "vuid": "VUID-vkCmdTraceRaysNV-None-02704",
+          "text": " If the <code>VkPipeline</code> object bound to the pipeline bind point used by this command accesses a <code>VkSampler</code> object that uses unnormalized coordinates, that sampler <strong class=\"purple\">must</strong> not be used with any of the SPIR-V <code>OpImageSample*</code> or <code>OpImageSparseSample*</code> instructions that includes a LOD bias or any offset values, in any shader stage"
+        },
+        {
+          "vuid": "VUID-vkCmdTraceRaysNV-None-02705",
+          "text": " If the <a href=\"#features-robustBufferAccess\">robust buffer access</a> feature is not enabled, and if the <code>VkPipeline</code> object bound to the pipeline bind point used by this command accesses a uniform buffer, it <strong class=\"purple\">must</strong> not access values outside of the range of the buffer as specified in the descriptor set bound to the same pipeline bind point"
+        },
+        {
+          "vuid": "VUID-vkCmdTraceRaysNV-None-02706",
+          "text": " If the <a href=\"#features-robustBufferAccess\">robust buffer access</a> feature is not enabled, and if the <code>VkPipeline</code> object bound to the pipeline bind point used by this command accesses a storage buffer, it <strong class=\"purple\">must</strong> not access values outside of the range of the buffer as specified in the descriptor set bound to the same pipeline bind point"
+        },
+        {
+          "vuid": "VUID-vkCmdTraceRaysNV-raygenShaderBindingOffset-02455",
+          "text": " <code>raygenShaderBindingOffset</code> <strong class=\"purple\">must</strong> be less than the size of <code>raygenShaderBindingTableBuffer</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdTraceRaysNV-raygenShaderBindingOffset-02456",
+          "text": " <code>raygenShaderBindingOffset</code> <strong class=\"purple\">must</strong> be a multiple of <code>VkPhysicalDeviceRayTracingPropertiesNV</code>::<code>shaderGroupBaseAlignment</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdTraceRaysNV-missShaderBindingOffset-02457",
+          "text": " <code>missShaderBindingOffset</code> <strong class=\"purple\">must</strong> be less than the size of <code>missShaderBindingTableBuffer</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdTraceRaysNV-missShaderBindingOffset-02458",
+          "text": " <code>missShaderBindingOffset</code> <strong class=\"purple\">must</strong> be a multiple of <code>VkPhysicalDeviceRayTracingPropertiesNV</code>::<code>shaderGroupBaseAlignment</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdTraceRaysNV-hitShaderBindingOffset-02459",
+          "text": " <code>hitShaderBindingOffset</code> <strong class=\"purple\">must</strong> be less than the size of <code>hitShaderBindingTableBuffer</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdTraceRaysNV-hitShaderBindingOffset-02460",
+          "text": " <code>hitShaderBindingOffset</code> <strong class=\"purple\">must</strong> be a multiple of <code>VkPhysicalDeviceRayTracingPropertiesNV</code>::<code>shaderGroupBaseAlignment</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdTraceRaysNV-callableShaderBindingOffset-02461",
+          "text": " <code>callableShaderBindingOffset</code> <strong class=\"purple\">must</strong> be less than the size of <code>callableShaderBindingTableBuffer</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdTraceRaysNV-callableShaderBindingOffset-02462",
+          "text": " <code>callableShaderBindingOffset</code> <strong class=\"purple\">must</strong> be a multiple of <code>VkPhysicalDeviceRayTracingPropertiesNV</code>::<code>shaderGroupBaseAlignment</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdTraceRaysNV-missShaderBindingStride-02463",
+          "text": " <code>missShaderBindingStride</code> <strong class=\"purple\">must</strong> be a multiple of <code>VkPhysicalDeviceRayTracingPropertiesNV</code>::<code>shaderGroupHandleSize</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdTraceRaysNV-hitShaderBindingStride-02464",
+          "text": " <code>hitShaderBindingStride</code> <strong class=\"purple\">must</strong> be a multiple of <code>VkPhysicalDeviceRayTracingPropertiesNV</code>::<code>shaderGroupHandleSize</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdTraceRaysNV-callableShaderBindingStride-02465",
+          "text": " <code>callableShaderBindingStride</code> <strong class=\"purple\">must</strong> be a multiple of <code>VkPhysicalDeviceRayTracingPropertiesNV</code>::<code>shaderGroupHandleSize</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdTraceRaysNV-missShaderBindingStride-02466",
+          "text": " <code>missShaderBindingStride</code> <strong class=\"purple\">must</strong> be a less than or equal to <code>VkPhysicalDeviceRayTracingPropertiesNV</code>::<code>maxShaderGroupStride</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdTraceRaysNV-hitShaderBindingStride-02467",
+          "text": " <code>hitShaderBindingStride</code> <strong class=\"purple\">must</strong> be a less than or equal to <code>VkPhysicalDeviceRayTracingPropertiesNV</code>::<code>maxShaderGroupStride</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdTraceRaysNV-callableShaderBindingStride-02468",
+          "text": " <code>callableShaderBindingStride</code> <strong class=\"purple\">must</strong> be a less than or equal to <code>VkPhysicalDeviceRayTracingPropertiesNV</code>::<code>maxShaderGroupStride</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdTraceRaysNV-width-02469",
+          "text": " <code>width</code> <strong class=\"purple\">must</strong> be less than or equal to <code>VkPhysicalDeviceLimits</code>::<code>maxComputeWorkGroupCount</code>[0]"
+        },
+        {
+          "vuid": "VUID-vkCmdTraceRaysNV-height-02470",
+          "text": " <code>height</code> <strong class=\"purple\">must</strong> be less than or equal to <code>VkPhysicalDeviceLimits</code>::<code>maxComputeWorkGroupCount</code>[1]"
+        },
+        {
+          "vuid": "VUID-vkCmdTraceRaysNV-depth-02471",
+          "text": " <code>depth</code> <strong class=\"purple\">must</strong> be less than or equal to <code>VkPhysicalDeviceLimits</code>::<code>maxComputeWorkGroupCount</code>[2]"
+        },
+        {
+          "vuid": "VUID-vkCmdTraceRaysNV-commandBuffer-parameter",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkCommandBuffer\">VkCommandBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdTraceRaysNV-raygenShaderBindingTableBuffer-parameter",
+          "text": " <code>raygenShaderBindingTableBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkBuffer\">VkBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdTraceRaysNV-missShaderBindingTableBuffer-parameter",
+          "text": " If <code>missShaderBindingTableBuffer</code> is not <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>, <code>missShaderBindingTableBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkBuffer\">VkBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdTraceRaysNV-hitShaderBindingTableBuffer-parameter",
+          "text": " If <code>hitShaderBindingTableBuffer</code> is not <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>, <code>hitShaderBindingTableBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkBuffer\">VkBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdTraceRaysNV-callableShaderBindingTableBuffer-parameter",
+          "text": " If <code>callableShaderBindingTableBuffer</code> is not <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>, <code>callableShaderBindingTableBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkBuffer\">VkBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdTraceRaysNV-commandBuffer-recording",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be in the <a href=\"#commandbuffers-lifecycle\">recording state</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdTraceRaysNV-commandBuffer-cmdpool",
+          "text": " The <code>VkCommandPool</code> that <code>commandBuffer</code> was allocated from <strong class=\"purple\">must</strong> support compute operations"
+        },
+        {
+          "vuid": "VUID-vkCmdTraceRaysNV-renderpass",
+          "text": " This command <strong class=\"purple\">must</strong> only be called outside of a render pass instance"
+        },
+        {
+          "vuid": "VUID-vkCmdTraceRaysNV-commonparent",
+          "text": " Each of <code>callableShaderBindingTableBuffer</code>, <code>commandBuffer</code>, <code>hitShaderBindingTableBuffer</code>, <code>missShaderBindingTableBuffer</code>, and <code>raygenShaderBindingTableBuffer</code> that are valid handles of non-ignored parameters <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from the same <a href=\"#VkDevice\">VkDevice</a>"
+        }
+      ],
+      "(VK_NV_ray_tracing)+(VK_IMG_filter_cubic,VK_EXT_filter_cubic)": [
+        {
+          "vuid": "VUID-vkCmdTraceRaysNV-None-02692",
+          "text": " If a <code>VkImageView</code> is sampled with <code>VK_FILTER_CUBIC_EXT</code> as a result of this command, then the image view&#8217;s <a href=\"#resources-image-view-format-features\">format features</a> <strong class=\"purple\">must</strong> contain <code>VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT</code>"
+        }
+      ],
+      "(VK_NV_ray_tracing)+(VK_IMG_filter_cubic,VK_EXT_filter_cubic)+!(VK_EXT_filter_cubic)": [
+        {
+          "vuid": "VUID-vkCmdTraceRaysNV-None-02693",
+          "text": " Any <a href=\"#VkImageView\">VkImageView</a> being sampled with <code>VK_FILTER_CUBIC_EXT</code> as a result of this command <strong class=\"purple\">must</strong> not have a <a href=\"#VkImageViewType\">VkImageViewType</a> of <code>VK_IMAGE_VIEW_TYPE_3D</code>, <code>VK_IMAGE_VIEW_TYPE_CUBE</code>, or <code>VK_IMAGE_VIEW_TYPE_CUBE_ARRAY</code>"
+        }
+      ],
+      "(VK_NV_ray_tracing)+(VK_IMG_filter_cubic,VK_EXT_filter_cubic)+(VK_EXT_filter_cubic)": [
+        {
+          "vuid": "VUID-vkCmdTraceRaysNV-filterCubic-02694",
+          "text": " Any <a href=\"#VkImageView\">VkImageView</a> being sampled with <code>VK_FILTER_CUBIC_EXT</code> as a result of this command <strong class=\"purple\">must</strong> have a <a href=\"#VkImageViewType\">VkImageViewType</a> and format that supports cubic filtering, as specified by <code>VkFilterCubicImageViewImageFormatPropertiesEXT</code>::<code>filterCubic</code> returned by <code>vkGetPhysicalDeviceImageFormatProperties2</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdTraceRaysNV-filterCubicMinmax-02695",
+          "text": " Any <a href=\"#VkImageView\">VkImageView</a> being sampled with <code>VK_FILTER_CUBIC_EXT</code> with a reduction mode of either <code>VK_SAMPLER_REDUCTION_MODE_MIN_EXT</code> or <code>VK_SAMPLER_REDUCTION_MODE_MAX_EXT</code> as a result of this command <strong class=\"purple\">must</strong> have a <a href=\"#VkImageViewType\">VkImageViewType</a> and format that supports cubic filtering together with minmax filtering, as specified by <code>VkFilterCubicImageViewImageFormatPropertiesEXT</code>::<code>filterCubicMinmax</code> returned by <code>vkGetPhysicalDeviceImageFormatProperties2</code>"
+        }
+      ],
+      "(VK_NV_ray_tracing)+(VK_NV_corner_sampled_image)": [
+        {
+          "vuid": "VUID-vkCmdTraceRaysNV-flags-02696",
+          "text": " Any <a href=\"#VkImage\">VkImage</a> created with a <a href=\"#VkImageCreateInfo\">VkImageCreateInfo</a>::<code>flags</code> containing <code>VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV</code> sampled as a result of this command <strong class=\"purple\">must</strong> only be sampled using a <a href=\"#VkSamplerAddressMode\">VkSamplerAddressMode</a> of <code>VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE</code>."
+        }
+      ],
+      "(VK_NV_ray_tracing)+(VK_VERSION_1_1)": [
+        {
+          "vuid": "VUID-vkCmdTraceRaysNV-commandBuffer-02707",
+          "text": " If <code>commandBuffer</code> is an unprotected command buffer, any resource accessed by the <code>VkPipeline</code> object bound to the pipeline bind point used by this command <strong class=\"purple\">must</strong> not be a protected resource"
+        },
+        {
+          "vuid": "VUID-vkCmdTraceRaysNV-commandBuffer-02712",
+          "text": " If <code>commandBuffer</code> is a protected command buffer, any resource written to by the <code>VkPipeline</code> object bound to the pipeline bind point used by this command <strong class=\"purple\">must</strong> not be an unprotected resource"
+        },
+        {
+          "vuid": "VUID-vkCmdTraceRaysNV-commandBuffer-02713",
+          "text": " If <code>commandBuffer</code> is a protected command buffer, pipeline stages other than the framebuffer-space and compute stages in the <code>VkPipeline</code> object bound to the pipeline bind point <strong class=\"purple\">must</strong> not write to any resource"
+        }
+      ]
+    },
+    "vkCmdBuildAccelerationStructureNV": {
+      "(VK_NV_ray_tracing)": [
+        {
+          "vuid": "VUID-vkCmdBuildAccelerationStructureNV-geometryCount-02241",
+          "text": " <code>geometryCount</code> <strong class=\"purple\">must</strong> be less than or equal to <a href=\"#VkPhysicalDeviceRayTracingPropertiesNV\">VkPhysicalDeviceRayTracingPropertiesNV</a>::<code>maxGeometryCount</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdBuildAccelerationStructureNV-dst-02488",
+          "text": " <code>dst</code> <strong class=\"purple\">must</strong> have been created with compatible <a href=\"#VkAccelerationStructureInfoNV\">VkAccelerationStructureInfoNV</a> where <a href=\"#VkAccelerationStructureInfoNV\">VkAccelerationStructureInfoNV</a>::<code>type</code> and <a href=\"#VkAccelerationStructureInfoNV\">VkAccelerationStructureInfoNV</a>::<code>flags</code> are identical, <a href=\"#VkAccelerationStructureInfoNV\">VkAccelerationStructureInfoNV</a>::<code>instanceCount</code> and <a href=\"#VkAccelerationStructureInfoNV\">VkAccelerationStructureInfoNV</a>::<code>geometryCount</code> for <code>dst</code> are greater than or equal to the build size and each geometry in <a href=\"#VkAccelerationStructureInfoNV\">VkAccelerationStructureInfoNV</a>::<code>pGeometries</code> for <code>dst</code> has greater than or equal to the number of vertices, indices, and AABBs."
+        },
+        {
+          "vuid": "VUID-vkCmdBuildAccelerationStructureNV-update-02489",
+          "text": " If <code>update</code> is <code>VK_TRUE</code>, <code>src</code> <strong class=\"purple\">must</strong> not be <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdBuildAccelerationStructureNV-update-02490",
+          "text": " If <code>update</code> is <code>VK_TRUE</code>, <code>src</code> <strong class=\"purple\">must</strong> have been built before with <code>VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_NV</code> set in <a href=\"#VkAccelerationStructureInfoNV\">VkAccelerationStructureInfoNV</a>::<code>flags</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdBuildAccelerationStructureNV-update-02491",
+          "text": " If <code>update</code> is <code>VK_FALSE</code>, The <code>size</code> member of the <a href=\"#VkMemoryRequirements\">VkMemoryRequirements</a> structure returned from a call to <a href=\"#vkGetAccelerationStructureMemoryRequirementsNV\">vkGetAccelerationStructureMemoryRequirementsNV</a> with <a href=\"#VkAccelerationStructureMemoryRequirementsInfoNV\">VkAccelerationStructureMemoryRequirementsInfoNV</a>::<code>accelerationStructure</code> set to <code>dst</code> and <a href=\"#VkAccelerationStructureMemoryRequirementsInfoNV\">VkAccelerationStructureMemoryRequirementsInfoNV</a>::<code>type</code> set to <code>VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV</code> <strong class=\"purple\">must</strong> be less than or equal to the size of <code>scratch</code> minus <code>scratchOffset</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdBuildAccelerationStructureNV-update-02492",
+          "text": " If <code>update</code> is <code>VK_TRUE</code>, The <code>size</code> member of the <a href=\"#VkMemoryRequirements\">VkMemoryRequirements</a> structure returned from a call to <a href=\"#vkGetAccelerationStructureMemoryRequirementsNV\">vkGetAccelerationStructureMemoryRequirementsNV</a> with <a href=\"#VkAccelerationStructureMemoryRequirementsInfoNV\">VkAccelerationStructureMemoryRequirementsInfoNV</a>::<code>accelerationStructure</code> set to <code>dst</code> and <a href=\"#VkAccelerationStructureMemoryRequirementsInfoNV\">VkAccelerationStructureMemoryRequirementsInfoNV</a>::<code>type</code> set to <code>VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV</code> <strong class=\"purple\">must</strong> be less than or equal to the size of <code>scratch</code> minus <code>scratchOffset</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdBuildAccelerationStructureNV-commandBuffer-parameter",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkCommandBuffer\">VkCommandBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdBuildAccelerationStructureNV-pInfo-parameter",
+          "text": " <code>pInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkAccelerationStructureInfoNV\">VkAccelerationStructureInfoNV</a> structure"
+        },
+        {
+          "vuid": "VUID-vkCmdBuildAccelerationStructureNV-instanceData-parameter",
+          "text": " If <code>instanceData</code> is not <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>, <code>instanceData</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkBuffer\">VkBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdBuildAccelerationStructureNV-dst-parameter",
+          "text": " <code>dst</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkAccelerationStructureNV\">VkAccelerationStructureNV</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdBuildAccelerationStructureNV-src-parameter",
+          "text": " If <code>src</code> is not <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>, <code>src</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkAccelerationStructureNV\">VkAccelerationStructureNV</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdBuildAccelerationStructureNV-scratch-parameter",
+          "text": " <code>scratch</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkBuffer\">VkBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdBuildAccelerationStructureNV-commandBuffer-recording",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be in the <a href=\"#commandbuffers-lifecycle\">recording state</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdBuildAccelerationStructureNV-commandBuffer-cmdpool",
+          "text": " The <code>VkCommandPool</code> that <code>commandBuffer</code> was allocated from <strong class=\"purple\">must</strong> support compute operations"
+        },
+        {
+          "vuid": "VUID-vkCmdBuildAccelerationStructureNV-renderpass",
+          "text": " This command <strong class=\"purple\">must</strong> only be called outside of a render pass instance"
+        },
+        {
+          "vuid": "VUID-vkCmdBuildAccelerationStructureNV-commonparent",
+          "text": " Each of <code>commandBuffer</code>, <code>dst</code>, <code>instanceData</code>, <code>scratch</code>, and <code>src</code> that are valid handles of non-ignored parameters <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from the same <a href=\"#VkDevice\">VkDevice</a>"
+        }
+      ]
+    },
+    "vkCmdWriteAccelerationStructuresPropertiesNV": {
+      "(VK_NV_ray_tracing)": [
+        {
+          "vuid": "VUID-vkCmdWriteAccelerationStructuresPropertiesNV-queryType-02242",
+          "text": " <code>queryType</code> <strong class=\"purple\">must</strong> be <code>VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_NV</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdWriteAccelerationStructuresPropertiesNV-queryPool-02493",
+          "text": " <code>queryPool</code> <strong class=\"purple\">must</strong> have been created with a <code>queryType</code> matching <code>queryType</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdWriteAccelerationStructuresPropertiesNV-queryPool-02494",
+          "text": " The queries identified by <code>queryPool</code> and <code>firstQuery</code> <strong class=\"purple\">must</strong> be <em>unavailable</em>"
+        },
+        {
+          "vuid": "VUID-vkCmdWriteAccelerationStructuresPropertiesNV-accelerationStructures-02495",
+          "text": " All acceleration structures in <code>accelerationStructures</code> <strong class=\"purple\">must</strong> have been built with <code>VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_NV</code> if <code>queryType</code> is <code>VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_NV</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdWriteAccelerationStructuresPropertiesNV-commandBuffer-parameter",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkCommandBuffer\">VkCommandBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdWriteAccelerationStructuresPropertiesNV-pAccelerationStructures-parameter",
+          "text": " <code>pAccelerationStructures</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>accelerationStructureCount</code> valid <a href=\"#VkAccelerationStructureNV\">VkAccelerationStructureNV</a> handles"
+        },
+        {
+          "vuid": "VUID-vkCmdWriteAccelerationStructuresPropertiesNV-queryType-parameter",
+          "text": " <code>queryType</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkQueryType\">VkQueryType</a> value"
+        },
+        {
+          "vuid": "VUID-vkCmdWriteAccelerationStructuresPropertiesNV-queryPool-parameter",
+          "text": " <code>queryPool</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkQueryPool\">VkQueryPool</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdWriteAccelerationStructuresPropertiesNV-commandBuffer-recording",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be in the <a href=\"#commandbuffers-lifecycle\">recording state</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdWriteAccelerationStructuresPropertiesNV-commandBuffer-cmdpool",
+          "text": " The <code>VkCommandPool</code> that <code>commandBuffer</code> was allocated from <strong class=\"purple\">must</strong> support compute operations"
+        },
+        {
+          "vuid": "VUID-vkCmdWriteAccelerationStructuresPropertiesNV-renderpass",
+          "text": " This command <strong class=\"purple\">must</strong> only be called outside of a render pass instance"
+        },
+        {
+          "vuid": "VUID-vkCmdWriteAccelerationStructuresPropertiesNV-accelerationStructureCount-arraylength",
+          "text": " <code>accelerationStructureCount</code> <strong class=\"purple\">must</strong> be greater than <code>0</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdWriteAccelerationStructuresPropertiesNV-commonparent",
+          "text": " Each of <code>commandBuffer</code>, <code>queryPool</code>, and the elements of <code>pAccelerationStructures</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from the same <a href=\"#VkDevice\">VkDevice</a>"
+        }
+      ]
+    },
+    "vkCmdCopyAccelerationStructureNV": {
+      "(VK_NV_ray_tracing)": [
+        {
+          "vuid": "VUID-vkCmdCopyAccelerationStructureNV-mode-02496",
+          "text": " <code>mode</code> <strong class=\"purple\">must</strong> be <code>VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_NV</code> or <code>VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_NV</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyAccelerationStructureNV-src-02497",
+          "text": " <code>src</code> <strong class=\"purple\">must</strong> have been built with <code>VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_NV</code> if <code>mode</code> is <code>VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_NV</code>"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyAccelerationStructureNV-commandBuffer-parameter",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkCommandBuffer\">VkCommandBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyAccelerationStructureNV-dst-parameter",
+          "text": " <code>dst</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkAccelerationStructureNV\">VkAccelerationStructureNV</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyAccelerationStructureNV-src-parameter",
+          "text": " <code>src</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkAccelerationStructureNV\">VkAccelerationStructureNV</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyAccelerationStructureNV-mode-parameter",
+          "text": " <code>mode</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkCopyAccelerationStructureModeNV\">VkCopyAccelerationStructureModeNV</a> value"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyAccelerationStructureNV-commandBuffer-recording",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be in the <a href=\"#commandbuffers-lifecycle\">recording state</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyAccelerationStructureNV-commandBuffer-cmdpool",
+          "text": " The <code>VkCommandPool</code> that <code>commandBuffer</code> was allocated from <strong class=\"purple\">must</strong> support compute operations"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyAccelerationStructureNV-renderpass",
+          "text": " This command <strong class=\"purple\">must</strong> only be called outside of a render pass instance"
+        },
+        {
+          "vuid": "VUID-vkCmdCopyAccelerationStructureNV-commonparent",
+          "text": " Each of <code>commandBuffer</code>, <code>dst</code>, and <code>src</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from the same <a href=\"#VkDevice\">VkDevice</a>"
+        }
+      ]
+    },
+    "vkEnumerateInstanceLayerProperties": {
+      "core": [
+        {
+          "vuid": "VUID-vkEnumerateInstanceLayerProperties-pPropertyCount-parameter",
+          "text": " <code>pPropertyCount</code> <strong class=\"purple\">must</strong> be a valid pointer to a <code>uint32_t</code> value"
+        },
+        {
+          "vuid": "VUID-vkEnumerateInstanceLayerProperties-pProperties-parameter",
+          "text": " If the value referenced by <code>pPropertyCount</code> is not <code>0</code>, and <code>pProperties</code> is not <code>NULL</code>, <code>pProperties</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>pPropertyCount</code> <a href=\"#VkLayerProperties\">VkLayerProperties</a> structures"
+        }
+      ]
+    },
+    "vkEnumerateDeviceLayerProperties": {
+      "core": [
+        {
+          "vuid": "VUID-vkEnumerateDeviceLayerProperties-physicalDevice-parameter",
+          "text": " <code>physicalDevice</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkPhysicalDevice\">VkPhysicalDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkEnumerateDeviceLayerProperties-pPropertyCount-parameter",
+          "text": " <code>pPropertyCount</code> <strong class=\"purple\">must</strong> be a valid pointer to a <code>uint32_t</code> value"
+        },
+        {
+          "vuid": "VUID-vkEnumerateDeviceLayerProperties-pProperties-parameter",
+          "text": " If the value referenced by <code>pPropertyCount</code> is not <code>0</code>, and <code>pProperties</code> is not <code>NULL</code>, <code>pProperties</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>pPropertyCount</code> <a href=\"#VkLayerProperties\">VkLayerProperties</a> structures"
+        }
+      ]
+    },
+    "vkEnumerateInstanceExtensionProperties": {
+      "core": [
+        {
+          "vuid": "VUID-vkEnumerateInstanceExtensionProperties-pLayerName-parameter",
+          "text": " If <code>pLayerName</code> is not <code>NULL</code>, <code>pLayerName</code> <strong class=\"purple\">must</strong> be a null-terminated UTF-8 string"
+        },
+        {
+          "vuid": "VUID-vkEnumerateInstanceExtensionProperties-pPropertyCount-parameter",
+          "text": " <code>pPropertyCount</code> <strong class=\"purple\">must</strong> be a valid pointer to a <code>uint32_t</code> value"
+        },
+        {
+          "vuid": "VUID-vkEnumerateInstanceExtensionProperties-pProperties-parameter",
+          "text": " If the value referenced by <code>pPropertyCount</code> is not <code>0</code>, and <code>pProperties</code> is not <code>NULL</code>, <code>pProperties</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>pPropertyCount</code> <a href=\"#VkExtensionProperties\">VkExtensionProperties</a> structures"
+        }
+      ]
+    },
+    "vkEnumerateDeviceExtensionProperties": {
+      "core": [
+        {
+          "vuid": "VUID-vkEnumerateDeviceExtensionProperties-physicalDevice-parameter",
+          "text": " <code>physicalDevice</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkPhysicalDevice\">VkPhysicalDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkEnumerateDeviceExtensionProperties-pLayerName-parameter",
+          "text": " If <code>pLayerName</code> is not <code>NULL</code>, <code>pLayerName</code> <strong class=\"purple\">must</strong> be a null-terminated UTF-8 string"
+        },
+        {
+          "vuid": "VUID-vkEnumerateDeviceExtensionProperties-pPropertyCount-parameter",
+          "text": " <code>pPropertyCount</code> <strong class=\"purple\">must</strong> be a valid pointer to a <code>uint32_t</code> value"
+        },
+        {
+          "vuid": "VUID-vkEnumerateDeviceExtensionProperties-pProperties-parameter",
+          "text": " If the value referenced by <code>pPropertyCount</code> is not <code>0</code>, and <code>pProperties</code> is not <code>NULL</code>, <code>pProperties</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>pPropertyCount</code> <a href=\"#VkExtensionProperties\">VkExtensionProperties</a> structures"
+        }
+      ]
+    },
+    "vkGetPhysicalDeviceFeatures": {
+      "core": [
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceFeatures-physicalDevice-parameter",
+          "text": " <code>physicalDevice</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkPhysicalDevice\">VkPhysicalDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceFeatures-pFeatures-parameter",
+          "text": " <code>pFeatures</code> <strong class=\"purple\">must</strong> be a valid pointer to a <a href=\"#VkPhysicalDeviceFeatures\">VkPhysicalDeviceFeatures</a> structure"
+        }
+      ]
+    },
+    "vkGetPhysicalDeviceFeatures2": {
+      "(VK_VERSION_1_1,VK_KHR_get_physical_device_properties2)": [
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceFeatures2-physicalDevice-parameter",
+          "text": " <code>physicalDevice</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkPhysicalDevice\">VkPhysicalDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceFeatures2-pFeatures-parameter",
+          "text": " <code>pFeatures</code> <strong class=\"purple\">must</strong> be a valid pointer to a <a href=\"#VkPhysicalDeviceFeatures2\">VkPhysicalDeviceFeatures2</a> structure"
+        }
+      ]
+    },
+    "VkPhysicalDeviceFeatures2": {
+      "(VK_VERSION_1_1,VK_KHR_get_physical_device_properties2)": [
+        {
+          "vuid": "VUID-VkPhysicalDeviceFeatures2-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2</code>"
+        }
+      ]
+    },
+    "VkPhysicalDeviceVariablePointersFeatures": {
+      "(VK_VERSION_1_1,VK_KHR_variable_pointers)": [
+        {
+          "vuid": "VUID-VkPhysicalDeviceVariablePointersFeatures-variablePointers-01431",
+          "text": " If <code>variablePointers</code> is enabled then <code>variablePointersStorageBuffer</code> <strong class=\"purple\">must</strong> also be enabled."
+        },
+        {
+          "vuid": "VUID-VkPhysicalDeviceVariablePointersFeatures-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES</code>"
+        }
+      ]
+    },
+    "VkPhysicalDeviceMultiviewFeatures": {
+      "(VK_VERSION_1_1,VK_KHR_multiview)": [
+        {
+          "vuid": "VUID-VkPhysicalDeviceMultiviewFeatures-multiviewGeometryShader-00580",
+          "text": " If <code>multiviewGeometryShader</code> is enabled then <code>multiview</code> <strong class=\"purple\">must</strong> also be enabled."
+        },
+        {
+          "vuid": "VUID-VkPhysicalDeviceMultiviewFeatures-multiviewTessellationShader-00581",
+          "text": " If <code>multiviewTessellationShader</code> is enabled then <code>multiview</code> <strong class=\"purple\">must</strong> also be enabled."
+        },
+        {
+          "vuid": "VUID-VkPhysicalDeviceMultiviewFeatures-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES</code>"
+        }
+      ]
+    },
+    "VkPhysicalDeviceShaderAtomicInt64FeaturesKHR": {
+      "(VK_KHR_shader_atomic_int64)": [
+        {
+          "vuid": "VUID-VkPhysicalDeviceShaderAtomicInt64FeaturesKHR-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES_KHR</code>"
+        }
+      ]
+    },
+    "VkPhysicalDevice8BitStorageFeaturesKHR": {
+      "(VK_KHR_8bit_storage)": [
+        {
+          "vuid": "VUID-VkPhysicalDevice8BitStorageFeaturesKHR-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES_KHR</code>"
+        }
+      ]
+    },
+    "VkPhysicalDevice16BitStorageFeatures": {
+      "(VK_VERSION_1_1,VK_KHR_16bit_storage)": [
+        {
+          "vuid": "VUID-VkPhysicalDevice16BitStorageFeatures-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES</code>"
+        }
+      ]
+    },
+    "VkPhysicalDeviceShaderFloat16Int8FeaturesKHR": {
+      "(VK_KHR_shader_float16_int8)": [
+        {
+          "vuid": "VUID-VkPhysicalDeviceShaderFloat16Int8FeaturesKHR-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES_KHR</code>"
+        }
+      ]
+    },
+    "VkPhysicalDeviceShaderClockFeaturesKHR": {
+      "(VK_KHR_shader_clock)": [
+        {
+          "vuid": "VUID-VkPhysicalDeviceShaderClockFeaturesKHR-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CLOCK_FEATURES_KHR</code>"
+        }
+      ]
+    },
+    "VkPhysicalDeviceSamplerYcbcrConversionFeatures": {
+      "(VK_VERSION_1_1,VK_KHR_sampler_ycbcr_conversion)": [
+        {
+          "vuid": "VUID-VkPhysicalDeviceSamplerYcbcrConversionFeatures-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES</code>"
+        }
+      ]
+    },
+    "VkPhysicalDeviceProtectedMemoryFeatures": {
+      "(VK_VERSION_1_1)": [
+        {
+          "vuid": "VUID-VkPhysicalDeviceProtectedMemoryFeatures-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_FEATURES</code>"
+        }
+      ]
+    },
+    "VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT": {
+      "(VK_EXT_blend_operation_advanced)": [
+        {
+          "vuid": "VUID-VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_FEATURES_EXT</code>"
+        }
+      ]
+    },
+    "VkPhysicalDeviceConditionalRenderingFeaturesEXT": {
+      "(VK_EXT_conditional_rendering)": [
+        {
+          "vuid": "VUID-VkPhysicalDeviceConditionalRenderingFeaturesEXT-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONDITIONAL_RENDERING_FEATURES_EXT</code>"
+        }
+      ]
+    },
+    "VkPhysicalDeviceShaderDrawParametersFeatures": {
+      "(VK_VERSION_1_1)": [
+        {
+          "vuid": "VUID-VkPhysicalDeviceShaderDrawParametersFeatures-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES</code>"
+        }
+      ]
+    },
+    "VkPhysicalDeviceMeshShaderFeaturesNV": {
+      "(VK_NV_mesh_shader)": [
+        {
+          "vuid": "VUID-VkPhysicalDeviceMeshShaderFeaturesNV-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_FEATURES_NV</code>"
+        }
+      ]
+    },
+    "VkPhysicalDeviceDescriptorIndexingFeaturesEXT": {
+      "(VK_EXT_descriptor_indexing)": [
+        {
+          "vuid": "VUID-VkPhysicalDeviceDescriptorIndexingFeaturesEXT-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES_EXT</code>"
+        }
+      ]
+    },
+    "VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT": {
+      "(VK_EXT_vertex_attribute_divisor)": [
+        {
+          "vuid": "VUID-VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_EXT</code>"
+        }
+      ]
+    },
+    "VkPhysicalDeviceASTCDecodeFeaturesEXT": {
+      "(VK_EXT_astc_decode_mode)": [
+        {
+          "vuid": "VUID-VkPhysicalDeviceASTCDecodeFeaturesEXT-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ASTC_DECODE_FEATURES_EXT</code>"
+        }
+      ]
+    },
+    "VkPhysicalDeviceTransformFeedbackFeaturesEXT": {
+      "(VK_EXT_transform_feedback)": [
+        {
+          "vuid": "VUID-VkPhysicalDeviceTransformFeedbackFeaturesEXT-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_FEATURES_EXT</code>"
+        }
+      ]
+    },
+    "VkPhysicalDeviceVulkanMemoryModelFeaturesKHR": {
+      "(VK_KHR_vulkan_memory_model)": [
+        {
+          "vuid": "VUID-VkPhysicalDeviceVulkanMemoryModelFeaturesKHR-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES_KHR</code>"
+        }
+      ]
+    },
+    "VkPhysicalDeviceInlineUniformBlockFeaturesEXT": {
+      "(VK_EXT_inline_uniform_block)": [
+        {
+          "vuid": "VUID-VkPhysicalDeviceInlineUniformBlockFeaturesEXT-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES_EXT</code>"
+        }
+      ]
+    },
+    "VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV": {
+      "(VK_NV_representative_fragment_test)": [
+        {
+          "vuid": "VUID-VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_REPRESENTATIVE_FRAGMENT_TEST_FEATURES_NV</code>"
+        }
+      ]
+    },
+    "VkPhysicalDeviceExclusiveScissorFeaturesNV": {
+      "(VK_NV_scissor_exclusive)": [
+        {
+          "vuid": "VUID-VkPhysicalDeviceExclusiveScissorFeaturesNV-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXCLUSIVE_SCISSOR_FEATURES_NV</code>"
+        }
+      ]
+    },
+    "VkPhysicalDeviceCornerSampledImageFeaturesNV": {
+      "(VK_NV_corner_sampled_image)": [
+        {
+          "vuid": "VUID-VkPhysicalDeviceCornerSampledImageFeaturesNV-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CORNER_SAMPLED_IMAGE_FEATURES_NV</code>"
+        }
+      ]
+    },
+    "VkPhysicalDeviceComputeShaderDerivativesFeaturesNV": {
+      "(VK_NV_compute_shader_derivatives)": [
+        {
+          "vuid": "VUID-VkPhysicalDeviceComputeShaderDerivativesFeaturesNV-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_FEATURES_NV</code>"
+        }
+      ]
+    },
+    "VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV": {
+      "(VK_NV_fragment_shader_barycentric)": [
+        {
+          "vuid": "VUID-VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_FEATURES_NV</code>"
+        }
+      ]
+    },
+    "VkPhysicalDeviceShaderImageFootprintFeaturesNV": {
+      "(VK_NV_shader_image_footprint)": [
+        {
+          "vuid": "VUID-VkPhysicalDeviceShaderImageFootprintFeaturesNV-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_IMAGE_FOOTPRINT_FEATURES_NV</code>"
+        }
+      ]
+    },
+    "VkPhysicalDeviceShadingRateImageFeaturesNV": {
+      "(VK_NV_shading_rate_image)": [
+        {
+          "vuid": "VUID-VkPhysicalDeviceShadingRateImageFeaturesNV-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_FEATURES_NV</code>"
+        }
+      ]
+    },
+    "VkPhysicalDeviceFragmentDensityMapFeaturesEXT": {
+      "(VK_EXT_fragment_density_map)": [
+        {
+          "vuid": "VUID-VkPhysicalDeviceFragmentDensityMapFeaturesEXT-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_FEATURES_EXT</code>"
+        }
+      ]
+    },
+    "VkPhysicalDeviceScalarBlockLayoutFeaturesEXT": {
+      "(VK_EXT_scalar_block_layout)": [
+        {
+          "vuid": "VUID-VkPhysicalDeviceScalarBlockLayoutFeaturesEXT-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES_EXT</code>"
+        }
+      ]
+    },
+    "VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR": {
+      "(VK_KHR_uniform_buffer_standard_layout)": [
+        {
+          "vuid": "VUID-VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES_KHR</code>"
+        }
+      ]
+    },
+    "VkPhysicalDeviceDepthClipEnableFeaturesEXT": {
+      "(VK_EXT_depth_clip_enable)": [
+        {
+          "vuid": "VUID-VkPhysicalDeviceDepthClipEnableFeaturesEXT-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLIP_ENABLE_FEATURES_EXT</code>"
+        }
+      ]
+    },
+    "VkPhysicalDeviceMemoryPriorityFeaturesEXT": {
+      "(VK_EXT_memory_priority)": [
+        {
+          "vuid": "VUID-VkPhysicalDeviceMemoryPriorityFeaturesEXT-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PRIORITY_FEATURES_EXT</code>"
+        }
+      ]
+    },
+    "VkPhysicalDeviceBufferDeviceAddressFeaturesKHR": {
+      "(VK_EXT_buffer_device_address,VK_KHR_buffer_device_address)": [
+        {
+          "vuid": "VUID-VkPhysicalDeviceBufferDeviceAddressFeaturesKHR-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_KHR</code>"
+        }
+      ]
+    },
+    "VkPhysicalDeviceBufferDeviceAddressFeaturesEXT": {
+      "(VK_EXT_buffer_device_address,VK_KHR_buffer_device_address)+(VK_EXT_buffer_device_address)": [
+        {
+          "vuid": "VUID-VkPhysicalDeviceBufferDeviceAddressFeaturesEXT-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_EXT</code>"
+        }
+      ]
+    },
+    "VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV": {
+      "(VK_NV_dedicated_allocation_image_aliasing)": [
+        {
+          "vuid": "VUID-VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEDICATED_ALLOCATION_IMAGE_ALIASING_FEATURES_NV</code>"
+        }
+      ]
+    },
+    "VkPhysicalDeviceImagelessFramebufferFeaturesKHR": {
+      "(VK_KHR_imageless_framebuffer)": [
+        {
+          "vuid": "VUID-VkPhysicalDeviceImagelessFramebufferFeaturesKHR-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES_KHR</code>"
+        }
+      ]
+    },
+    "VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT": {
+      "(VK_EXT_fragment_shader_interlock)": [
+        {
+          "vuid": "VUID-VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_INTERLOCK_FEATURES_EXT</code>"
+        }
+      ]
+    },
+    "VkPhysicalDeviceCooperativeMatrixFeaturesNV": {
+      "(VK_NV_cooperative_matrix)": [
+        {
+          "vuid": "VUID-VkPhysicalDeviceCooperativeMatrixFeaturesNV-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_FEATURES_NV</code>"
+        }
+      ]
+    },
+    "VkPhysicalDeviceYcbcrImageArraysFeaturesEXT": {
+      "(VK_EXT_ycbcr_image_arrays)": [
+        {
+          "vuid": "VUID-VkPhysicalDeviceYcbcrImageArraysFeaturesEXT-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_YCBCR_IMAGE_ARRAYS_FEATURES_EXT</code>"
+        }
+      ]
+    },
+    "VkPhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR": {
+      "(VK_VERSION_1_1)+(VK_KHR_shader_subgroup_extended_types)": [
+        {
+          "vuid": "VUID-VkPhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES_KHR</code>"
+        }
+      ]
+    },
+    "VkPhysicalDeviceHostQueryResetFeaturesEXT": {
+      "(VK_EXT_host_query_reset)": [
+        {
+          "vuid": "VUID-VkPhysicalDeviceHostQueryResetFeaturesEXT-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES_EXT</code>"
+        }
+      ]
+    },
+    "VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL": {
+      "(VK_INTEL_shader_integer_functions2)": [
+        {
+          "vuid": "VUID-VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_FUNCTIONS_2_FEATURES_INTEL</code>"
+        }
+      ]
+    },
+    "VkPhysicalDeviceCoverageReductionModeFeaturesNV": {
+      "(VK_NV_coverage_reduction_mode)": [
+        {
+          "vuid": "VUID-VkPhysicalDeviceCoverageReductionModeFeaturesNV-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COVERAGE_REDUCTION_MODE_FEATURES_NV</code>"
+        }
+      ]
+    },
+    "VkPhysicalDeviceTimelineSemaphoreFeaturesKHR": {
+      "(VK_KHR_timeline_semaphore)": [
+        {
+          "vuid": "VUID-VkPhysicalDeviceTimelineSemaphoreFeaturesKHR-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES_KHR</code>"
+        }
+      ]
+    },
+    "VkPhysicalDeviceIndexTypeUint8FeaturesEXT": {
+      "(VK_EXT_index_type_uint8)": [
+        {
+          "vuid": "VUID-VkPhysicalDeviceIndexTypeUint8FeaturesEXT-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_EXT</code>"
+        }
+      ]
+    },
+    "VkPhysicalDeviceShaderSMBuiltinsFeaturesNV": {
+      "(VK_NV_shader_sm_builtins)": [
+        {
+          "vuid": "VUID-VkPhysicalDeviceShaderSMBuiltinsFeaturesNV-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_FEATURES_NV</code>"
+        }
+      ]
+    },
+    "VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR": {
+      "(VK_KHR_separate_depth_stencil_layouts)": [
+        {
+          "vuid": "VUID-VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES_KHR</code>"
+        }
+      ]
+    },
+    "VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR": {
+      "(VK_KHR_pipeline_executable_properties)": [
+        {
+          "vuid": "VUID-VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_EXECUTABLE_PROPERTIES_FEATURES_KHR</code>"
+        }
+      ]
+    },
+    "VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT": {
+      "(VK_EXT_shader_demote_to_helper_invocation)": [
+        {
+          "vuid": "VUID-VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES_EXT</code>"
+        }
+      ]
+    },
+    "VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT": {
+      "(VK_EXT_texel_buffer_alignment)": [
+        {
+          "vuid": "VUID-VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_FEATURES_EXT</code>"
+        }
+      ]
+    },
+    "VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT": {
+      "(VK_EXT_texture_compression_astc_hdr)": [
+        {
+          "vuid": "VUID-VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXTURE_COMPRESSION_ASTC_HDR_FEATURES_EXT</code>"
+        }
+      ]
+    },
+    "VkPhysicalDeviceLineRasterizationFeaturesEXT": {
+      "(VK_EXT_line_rasterization)": [
+        {
+          "vuid": "VUID-VkPhysicalDeviceLineRasterizationFeaturesEXT-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_EXT</code>"
+        }
+      ]
+    },
+    "VkPhysicalDeviceSubgroupSizeControlFeaturesEXT": {
+      "(VK_EXT_subgroup_size_control)": [
+        {
+          "vuid": "VUID-VkPhysicalDeviceSubgroupSizeControlFeaturesEXT-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES_EXT</code>"
+        }
+      ]
+    },
+    "VkPhysicalDeviceCoherentMemoryFeaturesAMD": {
+      "(VK_AMD_device_coherent_memory)": [
+        {
+          "vuid": "VUID-VkPhysicalDeviceCoherentMemoryFeaturesAMD-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COHERENT_MEMORY_FEATURES_AMD</code>"
+        }
+      ]
+    },
+    "VkPhysicalDevicePushDescriptorPropertiesKHR": {
+      "(VK_KHR_push_descriptor)": [
+        {
+          "vuid": "VUID-VkPhysicalDevicePushDescriptorPropertiesKHR-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR</code>"
+        }
+      ]
+    },
+    "VkPhysicalDeviceMultiviewProperties": {
+      "(VK_VERSION_1_1,VK_KHR_multiview)": [
+        {
+          "vuid": "VUID-VkPhysicalDeviceMultiviewProperties-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES</code>"
+        }
+      ]
+    },
+    "VkPhysicalDeviceFloatControlsPropertiesKHR": {
+      "(VK_KHR_shader_float_controls)": [
+        {
+          "vuid": "VUID-VkPhysicalDeviceFloatControlsPropertiesKHR-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES_KHR</code>"
+        }
+      ]
+    },
+    "VkPhysicalDeviceDiscardRectanglePropertiesEXT": {
+      "(VK_EXT_discard_rectangles)": [
+        {
+          "vuid": "VUID-VkPhysicalDeviceDiscardRectanglePropertiesEXT-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DISCARD_RECTANGLE_PROPERTIES_EXT</code>"
+        }
+      ]
+    },
+    "VkPhysicalDeviceSampleLocationsPropertiesEXT": {
+      "(VK_EXT_sample_locations)": [
+        {
+          "vuid": "VUID-VkPhysicalDeviceSampleLocationsPropertiesEXT-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLE_LOCATIONS_PROPERTIES_EXT</code>"
+        }
+      ]
+    },
+    "VkPhysicalDeviceExternalMemoryHostPropertiesEXT": {
+      "(VK_EXT_external_memory_host)": [
+        {
+          "vuid": "VUID-VkPhysicalDeviceExternalMemoryHostPropertiesEXT-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_HOST_PROPERTIES_EXT</code>"
+        }
+      ]
+    },
+    "VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX": {
+      "(VK_NVX_multiview_per_view_attributes)": [
+        {
+          "vuid": "VUID-VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_ATTRIBUTES_PROPERTIES_NVX</code>"
+        }
+      ]
+    },
+    "VkPhysicalDevicePointClippingProperties": {
+      "(VK_VERSION_1_1,VK_KHR_maintenance2)": [
+        {
+          "vuid": "VUID-VkPhysicalDevicePointClippingProperties-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES</code>"
+        }
+      ]
+    },
+    "VkPhysicalDeviceSubgroupProperties": {
+      "(VK_VERSION_1_1)": [
+        {
+          "vuid": "VUID-VkPhysicalDeviceSubgroupProperties-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_PROPERTIES</code>"
+        }
+      ]
+    },
+    "VkPhysicalDeviceSubgroupSizeControlPropertiesEXT": {
+      "(VK_VERSION_1_1)+(VK_EXT_subgroup_size_control)": [
+        {
+          "vuid": "VUID-VkPhysicalDeviceSubgroupSizeControlPropertiesEXT-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES_EXT</code>"
+        }
+      ]
+    },
+    "VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT": {
+      "(VK_EXT_blend_operation_advanced)": [
+        {
+          "vuid": "VUID-VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_PROPERTIES_EXT</code>"
+        }
+      ]
+    },
+    "VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT": {
+      "(VK_EXT_vertex_attribute_divisor)": [
+        {
+          "vuid": "VUID-VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_EXT</code>"
+        }
+      ]
+    },
+    "VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT": {
+      "(VK_EXT_sampler_filter_minmax)": [
+        {
+          "vuid": "VUID-VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES_EXT</code>"
+        }
+      ]
+    },
+    "VkPhysicalDeviceProtectedMemoryProperties": {
+      "(VK_VERSION_1_1)": [
+        {
+          "vuid": "VUID-VkPhysicalDeviceProtectedMemoryProperties-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_PROPERTIES</code>"
+        }
+      ]
+    },
+    "VkPhysicalDeviceMaintenance3Properties": {
+      "(VK_VERSION_1_1,VK_KHR_maintenance3)": [
+        {
+          "vuid": "VUID-VkPhysicalDeviceMaintenance3Properties-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES</code>"
+        }
+      ]
+    },
+    "VkPhysicalDeviceMeshShaderPropertiesNV": {
+      "(VK_NV_mesh_shader)": [
+        {
+          "vuid": "VUID-VkPhysicalDeviceMeshShaderPropertiesNV-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_PROPERTIES_NV</code>"
+        }
+      ]
+    },
+    "VkPhysicalDeviceDescriptorIndexingPropertiesEXT": {
+      "(VK_EXT_descriptor_indexing)": [
+        {
+          "vuid": "VUID-VkPhysicalDeviceDescriptorIndexingPropertiesEXT-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES_EXT</code>"
+        }
+      ]
+    },
+    "VkPhysicalDeviceInlineUniformBlockPropertiesEXT": {
+      "(VK_EXT_inline_uniform_block)": [
+        {
+          "vuid": "VUID-VkPhysicalDeviceInlineUniformBlockPropertiesEXT-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_PROPERTIES_EXT</code>"
+        }
+      ]
+    },
+    "VkPhysicalDeviceConservativeRasterizationPropertiesEXT": {
+      "(VK_EXT_conservative_rasterization)": [
+        {
+          "vuid": "VUID-VkPhysicalDeviceConservativeRasterizationPropertiesEXT-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONSERVATIVE_RASTERIZATION_PROPERTIES_EXT</code>"
+        }
+      ]
+    },
+    "VkPhysicalDeviceFragmentDensityMapPropertiesEXT": {
+      "(VK_EXT_fragment_density_map)": [
+        {
+          "vuid": "VUID-VkPhysicalDeviceFragmentDensityMapPropertiesEXT-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_PROPERTIES_EXT</code>"
+        }
+      ]
+    },
+    "VkPhysicalDeviceShaderCorePropertiesAMD": {
+      "(VK_AMD_shader_core_properties)": [
+        {
+          "vuid": "VUID-VkPhysicalDeviceShaderCorePropertiesAMD-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_AMD</code>"
+        }
+      ]
+    },
+    "VkPhysicalDeviceShaderCoreProperties2AMD": {
+      "(VK_AMD_shader_core_properties2)": [
+        {
+          "vuid": "VUID-VkPhysicalDeviceShaderCoreProperties2AMD-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_2_AMD</code>"
+        }
+      ]
+    },
+    "VkPhysicalDeviceDepthStencilResolvePropertiesKHR": {
+      "(VK_KHR_depth_stencil_resolve)": [
+        {
+          "vuid": "VUID-VkPhysicalDeviceDepthStencilResolvePropertiesKHR-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES_KHR</code>"
+        }
+      ]
+    },
+    "VkPhysicalDevicePerformanceQueryFeaturesKHR": {
+      "(VK_KHR_performance_query)": [
+        {
+          "vuid": "VUID-VkPhysicalDevicePerformanceQueryFeaturesKHR-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PERFORMANCE_QUERY_FEATURES_KHR</code>"
+        }
+      ]
+    },
+    "VkPhysicalDevicePerformanceQueryPropertiesKHR": {
+      "(VK_KHR_performance_query)": [
+        {
+          "vuid": "VUID-VkPhysicalDevicePerformanceQueryPropertiesKHR-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PERFORMANCE_QUERY_PROPERTIES_KHR</code>"
+        }
+      ]
+    },
+    "VkPhysicalDeviceShadingRateImagePropertiesNV": {
+      "(VK_NV_shading_rate_image)": [
+        {
+          "vuid": "VUID-VkPhysicalDeviceShadingRateImagePropertiesNV-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_PROPERTIES_NV</code>"
+        }
+      ]
+    },
+    "VkPhysicalDeviceTransformFeedbackPropertiesEXT": {
+      "(VK_EXT_transform_feedback)": [
+        {
+          "vuid": "VUID-VkPhysicalDeviceTransformFeedbackPropertiesEXT-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_PROPERTIES_EXT</code>"
+        }
+      ]
+    },
+    "VkPhysicalDeviceRayTracingPropertiesNV": {
+      "(VK_NV_ray_tracing)": [
+        {
+          "vuid": "VUID-VkPhysicalDeviceRayTracingPropertiesNV-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PROPERTIES_NV</code>"
+        }
+      ]
+    },
+    "VkPhysicalDeviceCooperativeMatrixPropertiesNV": {
+      "(VK_NV_cooperative_matrix)": [
+        {
+          "vuid": "VUID-VkPhysicalDeviceCooperativeMatrixPropertiesNV-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_PROPERTIES_NV</code>"
+        }
+      ]
+    },
+    "VkPhysicalDeviceShaderSMBuiltinsPropertiesNV": {
+      "(VK_NV_shader_sm_builtins)": [
+        {
+          "vuid": "VUID-VkPhysicalDeviceShaderSMBuiltinsPropertiesNV-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_PROPERTIES_NV</code>"
+        }
+      ]
+    },
+    "VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT": {
+      "(VK_EXT_texel_buffer_alignment)": [
+        {
+          "vuid": "VUID-VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES_EXT</code>"
+        }
+      ]
+    },
+    "VkPhysicalDeviceTimelineSemaphorePropertiesKHR": {
+      "(VK_KHR_timeline_semaphore)": [
+        {
+          "vuid": "VUID-VkPhysicalDeviceTimelineSemaphorePropertiesKHR-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES_KHR</code>"
+        }
+      ]
+    },
+    "VkPhysicalDeviceLineRasterizationPropertiesEXT": {
+      "(VK_EXT_line_rasterization)": [
+        {
+          "vuid": "VUID-VkPhysicalDeviceLineRasterizationPropertiesEXT-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES_EXT</code>"
+        }
+      ]
+    },
+    "vkGetPhysicalDeviceMultisamplePropertiesEXT": {
+      "(VK_EXT_sample_locations)": [
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceMultisamplePropertiesEXT-physicalDevice-parameter",
+          "text": " <code>physicalDevice</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkPhysicalDevice\">VkPhysicalDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceMultisamplePropertiesEXT-samples-parameter",
+          "text": " <code>samples</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkSampleCountFlagBits\">VkSampleCountFlagBits</a> value"
+        },
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceMultisamplePropertiesEXT-pMultisampleProperties-parameter",
+          "text": " <code>pMultisampleProperties</code> <strong class=\"purple\">must</strong> be a valid pointer to a <a href=\"#VkMultisamplePropertiesEXT\">VkMultisamplePropertiesEXT</a> structure"
+        }
+      ]
+    },
+    "VkMultisamplePropertiesEXT": {
+      "(VK_EXT_sample_locations)": [
+        {
+          "vuid": "VUID-VkMultisamplePropertiesEXT-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_MULTISAMPLE_PROPERTIES_EXT</code>"
+        },
+        {
+          "vuid": "VUID-VkMultisamplePropertiesEXT-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        }
+      ]
+    },
+    "vkGetPhysicalDeviceFormatProperties": {
+      "core": [
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceFormatProperties-physicalDevice-parameter",
+          "text": " <code>physicalDevice</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkPhysicalDevice\">VkPhysicalDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceFormatProperties-format-parameter",
+          "text": " <code>format</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkFormat\">VkFormat</a> value"
+        },
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceFormatProperties-pFormatProperties-parameter",
+          "text": " <code>pFormatProperties</code> <strong class=\"purple\">must</strong> be a valid pointer to a <a href=\"#VkFormatProperties\">VkFormatProperties</a> structure"
+        }
+      ]
+    },
+    "vkGetPhysicalDeviceFormatProperties2": {
+      "(VK_VERSION_1_1,VK_KHR_get_physical_device_properties2)": [
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceFormatProperties2-physicalDevice-parameter",
+          "text": " <code>physicalDevice</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkPhysicalDevice\">VkPhysicalDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceFormatProperties2-format-parameter",
+          "text": " <code>format</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkFormat\">VkFormat</a> value"
+        },
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceFormatProperties2-pFormatProperties-parameter",
+          "text": " <code>pFormatProperties</code> <strong class=\"purple\">must</strong> be a valid pointer to a <a href=\"#VkFormatProperties2\">VkFormatProperties2</a> structure"
+        }
+      ]
+    },
+    "VkFormatProperties2": {
+      "(VK_VERSION_1_1,VK_KHR_get_physical_device_properties2)": [
+        {
+          "vuid": "VUID-VkFormatProperties2-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2</code>"
+        },
+        {
+          "vuid": "VUID-VkFormatProperties2-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code> or a pointer to a valid instance of <a href=\"#VkDrmFormatModifierPropertiesListEXT\">VkDrmFormatModifierPropertiesListEXT</a>"
+        }
+      ]
+    },
+    "VkDrmFormatModifierPropertiesListEXT": {
+      "(VK_VERSION_1_1,VK_KHR_get_physical_device_properties2)+(VK_EXT_image_drm_format_modifier)": [
+        {
+          "vuid": "VUID-VkDrmFormatModifierPropertiesListEXT-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT</code>"
+        }
+      ]
+    },
+    "vkGetPhysicalDeviceImageFormatProperties": {
+      "(VK_EXT_image_drm_format_modifier)": [
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceImageFormatProperties-tiling-02248",
+          "text": " <code>tiling</code> <strong class=\"purple\">must</strong> not be <code>VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT</code>. (Use <a href=\"#vkGetPhysicalDeviceImageFormatProperties2\">vkGetPhysicalDeviceImageFormatProperties2</a> instead)."
+        }
+      ],
+      "core": [
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceImageFormatProperties-physicalDevice-parameter",
+          "text": " <code>physicalDevice</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkPhysicalDevice\">VkPhysicalDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceImageFormatProperties-format-parameter",
+          "text": " <code>format</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkFormat\">VkFormat</a> value"
+        },
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceImageFormatProperties-type-parameter",
+          "text": " <code>type</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkImageType\">VkImageType</a> value"
+        },
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceImageFormatProperties-tiling-parameter",
+          "text": " <code>tiling</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkImageTiling\">VkImageTiling</a> value"
+        },
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceImageFormatProperties-usage-parameter",
+          "text": " <code>usage</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkImageUsageFlagBits\">VkImageUsageFlagBits</a> values"
+        },
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceImageFormatProperties-usage-requiredbitmask",
+          "text": " <code>usage</code> <strong class=\"purple\">must</strong> not be <code>0</code>"
+        },
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceImageFormatProperties-flags-parameter",
+          "text": " <code>flags</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkImageCreateFlagBits\">VkImageCreateFlagBits</a> values"
+        },
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceImageFormatProperties-pImageFormatProperties-parameter",
+          "text": " <code>pImageFormatProperties</code> <strong class=\"purple\">must</strong> be a valid pointer to a <a href=\"#VkImageFormatProperties\">VkImageFormatProperties</a> structure"
+        }
+      ]
+    },
+    "vkGetPhysicalDeviceExternalImageFormatPropertiesNV": {
+      "(VK_NV_external_memory_capabilities)": [
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceExternalImageFormatPropertiesNV-physicalDevice-parameter",
+          "text": " <code>physicalDevice</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkPhysicalDevice\">VkPhysicalDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceExternalImageFormatPropertiesNV-format-parameter",
+          "text": " <code>format</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkFormat\">VkFormat</a> value"
+        },
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceExternalImageFormatPropertiesNV-type-parameter",
+          "text": " <code>type</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkImageType\">VkImageType</a> value"
+        },
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceExternalImageFormatPropertiesNV-tiling-parameter",
+          "text": " <code>tiling</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkImageTiling\">VkImageTiling</a> value"
+        },
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceExternalImageFormatPropertiesNV-usage-parameter",
+          "text": " <code>usage</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkImageUsageFlagBits\">VkImageUsageFlagBits</a> values"
+        },
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceExternalImageFormatPropertiesNV-usage-requiredbitmask",
+          "text": " <code>usage</code> <strong class=\"purple\">must</strong> not be <code>0</code>"
+        },
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceExternalImageFormatPropertiesNV-flags-parameter",
+          "text": " <code>flags</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkImageCreateFlagBits\">VkImageCreateFlagBits</a> values"
+        },
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceExternalImageFormatPropertiesNV-externalHandleType-parameter",
+          "text": " <code>externalHandleType</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkExternalMemoryHandleTypeFlagBitsNV\">VkExternalMemoryHandleTypeFlagBitsNV</a> values"
+        },
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceExternalImageFormatPropertiesNV-pExternalImageFormatProperties-parameter",
+          "text": " <code>pExternalImageFormatProperties</code> <strong class=\"purple\">must</strong> be a valid pointer to a <a href=\"#VkExternalImageFormatPropertiesNV\">VkExternalImageFormatPropertiesNV</a> structure"
+        }
+      ]
+    },
+    "vkGetPhysicalDeviceImageFormatProperties2": {
+      "(VK_VERSION_1_1,VK_KHR_get_physical_device_properties2)+(VK_ANDROID_external_memory_android_hardware_buffer)": [
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceImageFormatProperties2-pNext-01868",
+          "text": " If the <code>pNext</code> chain of <code>pImageFormatProperties</code> includes a <a href=\"#VkAndroidHardwareBufferUsageANDROID\">VkAndroidHardwareBufferUsageANDROID</a> structure, the <code>pNext</code> chain of <code>pImageFormatInfo</code> <strong class=\"purple\">must</strong> include a <a href=\"#VkPhysicalDeviceExternalImageFormatInfo\">VkPhysicalDeviceExternalImageFormatInfo</a> structure with <code>handleType</code> set to <code>VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID</code>."
+        }
+      ],
+      "(VK_VERSION_1_1,VK_KHR_get_physical_device_properties2)": [
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceImageFormatProperties2-physicalDevice-parameter",
+          "text": " <code>physicalDevice</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkPhysicalDevice\">VkPhysicalDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceImageFormatProperties2-pImageFormatInfo-parameter",
+          "text": " <code>pImageFormatInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkPhysicalDeviceImageFormatInfo2\">VkPhysicalDeviceImageFormatInfo2</a> structure"
+        },
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceImageFormatProperties2-pImageFormatProperties-parameter",
+          "text": " <code>pImageFormatProperties</code> <strong class=\"purple\">must</strong> be a valid pointer to a <a href=\"#VkImageFormatProperties2\">VkImageFormatProperties2</a> structure"
+        }
+      ]
+    },
+    "VkPhysicalDeviceImageFormatInfo2": {
+      "(VK_VERSION_1_1,VK_KHR_get_physical_device_properties2)+(VK_EXT_image_drm_format_modifier)": [
+        {
+          "vuid": "VUID-VkPhysicalDeviceImageFormatInfo2-tiling-02249",
+          "text": " <code>tiling</code> <strong class=\"purple\">must</strong> be <code>VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT</code> if and only if the <code>pNext</code> chain includes <a href=\"#VkPhysicalDeviceImageDrmFormatModifierInfoEXT\">VkPhysicalDeviceImageDrmFormatModifierInfoEXT</a>."
+        },
+        {
+          "vuid": "VUID-VkPhysicalDeviceImageFormatInfo2-tiling-02313",
+          "text": " If <code>tiling</code> is <code>VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT</code> and <code>flags</code> contains <code>VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT</code>, then the <code>pNext</code> chain <strong class=\"purple\">must</strong> include <a href=\"#VkImageFormatListCreateInfoKHR\">VkImageFormatListCreateInfoKHR</a> with non-zero <code>viewFormatCount</code>."
+        }
+      ],
+      "(VK_VERSION_1_1,VK_KHR_get_physical_device_properties2)": [
+        {
+          "vuid": "VUID-VkPhysicalDeviceImageFormatInfo2-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2</code>"
+        },
+        {
+          "vuid": "VUID-VkPhysicalDeviceImageFormatInfo2-pNext-pNext",
+          "text": " Each <code>pNext</code> member of any structure (including this one) in the <code>pNext</code> chain <strong class=\"purple\">must</strong> be either <code>NULL</code> or a pointer to a valid instance of <a href=\"#VkImageFormatListCreateInfoKHR\">VkImageFormatListCreateInfoKHR</a>, <a href=\"#VkImageStencilUsageCreateInfoEXT\">VkImageStencilUsageCreateInfoEXT</a>, <a href=\"#VkPhysicalDeviceExternalImageFormatInfo\">VkPhysicalDeviceExternalImageFormatInfo</a>, <a href=\"#VkPhysicalDeviceImageDrmFormatModifierInfoEXT\">VkPhysicalDeviceImageDrmFormatModifierInfoEXT</a>, or <a href=\"#VkPhysicalDeviceImageViewImageFormatInfoEXT\">VkPhysicalDeviceImageViewImageFormatInfoEXT</a>"
+        },
+        {
+          "vuid": "VUID-VkPhysicalDeviceImageFormatInfo2-sType-unique",
+          "text": " Each <code>sType</code> member in the <code>pNext</code> chain <strong class=\"purple\">must</strong> be unique"
+        },
+        {
+          "vuid": "VUID-VkPhysicalDeviceImageFormatInfo2-format-parameter",
+          "text": " <code>format</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkFormat\">VkFormat</a> value"
+        },
+        {
+          "vuid": "VUID-VkPhysicalDeviceImageFormatInfo2-type-parameter",
+          "text": " <code>type</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkImageType\">VkImageType</a> value"
+        },
+        {
+          "vuid": "VUID-VkPhysicalDeviceImageFormatInfo2-tiling-parameter",
+          "text": " <code>tiling</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkImageTiling\">VkImageTiling</a> value"
+        },
+        {
+          "vuid": "VUID-VkPhysicalDeviceImageFormatInfo2-usage-parameter",
+          "text": " <code>usage</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkImageUsageFlagBits\">VkImageUsageFlagBits</a> values"
+        },
+        {
+          "vuid": "VUID-VkPhysicalDeviceImageFormatInfo2-usage-requiredbitmask",
+          "text": " <code>usage</code> <strong class=\"purple\">must</strong> not be <code>0</code>"
+        },
+        {
+          "vuid": "VUID-VkPhysicalDeviceImageFormatInfo2-flags-parameter",
+          "text": " <code>flags</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkImageCreateFlagBits\">VkImageCreateFlagBits</a> values"
+        }
+      ]
+    },
+    "VkImageFormatProperties2": {
+      "(VK_VERSION_1_1,VK_KHR_get_physical_device_properties2)": [
+        {
+          "vuid": "VUID-VkImageFormatProperties2-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2</code>"
+        },
+        {
+          "vuid": "VUID-VkImageFormatProperties2-pNext-pNext",
+          "text": " Each <code>pNext</code> member of any structure (including this one) in the <code>pNext</code> chain <strong class=\"purple\">must</strong> be either <code>NULL</code> or a pointer to a valid instance of <a href=\"#VkAndroidHardwareBufferUsageANDROID\">VkAndroidHardwareBufferUsageANDROID</a>, <a href=\"#VkExternalImageFormatProperties\">VkExternalImageFormatProperties</a>, <a href=\"#VkFilterCubicImageViewImageFormatPropertiesEXT\">VkFilterCubicImageViewImageFormatPropertiesEXT</a>, <a href=\"#VkSamplerYcbcrConversionImageFormatProperties\">VkSamplerYcbcrConversionImageFormatProperties</a>, or <a href=\"#VkTextureLODGatherFormatPropertiesAMD\">VkTextureLODGatherFormatPropertiesAMD</a>"
+        },
+        {
+          "vuid": "VUID-VkImageFormatProperties2-sType-unique",
+          "text": " Each <code>sType</code> member in the <code>pNext</code> chain <strong class=\"purple\">must</strong> be unique"
+        }
+      ]
+    },
+    "VkTextureLODGatherFormatPropertiesAMD": {
+      "(VK_VERSION_1_1,VK_KHR_get_physical_device_properties2)+(VK_AMD_texture_gather_bias_lod)": [
+        {
+          "vuid": "VUID-VkTextureLODGatherFormatPropertiesAMD-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_TEXTURE_LOD_GATHER_FORMAT_PROPERTIES_AMD</code>"
+        }
+      ]
+    },
+    "VkPhysicalDeviceExternalImageFormatInfo": {
+      "(VK_VERSION_1_1,VK_KHR_get_physical_device_properties2)+(VK_VERSION_1_1,VK_KHR_external_memory_capabilities)": [
+        {
+          "vuid": "VUID-VkPhysicalDeviceExternalImageFormatInfo-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO</code>"
+        },
+        {
+          "vuid": "VUID-VkPhysicalDeviceExternalImageFormatInfo-handleType-parameter",
+          "text": " If <code>handleType</code> is not <code>0</code>, <code>handleType</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkExternalMemoryHandleTypeFlagBits\">VkExternalMemoryHandleTypeFlagBits</a> value"
+        }
+      ]
+    },
+    "VkExternalImageFormatProperties": {
+      "(VK_VERSION_1_1,VK_KHR_get_physical_device_properties2)+(VK_VERSION_1_1,VK_KHR_external_memory_capabilities)": [
+        {
+          "vuid": "VUID-VkExternalImageFormatProperties-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES</code>"
+        }
+      ]
+    },
+    "VkPhysicalDeviceImageDrmFormatModifierInfoEXT": {
+      "(VK_VERSION_1_1,VK_KHR_get_physical_device_properties2)+(VK_EXT_image_drm_format_modifier)": [
+        {
+          "vuid": "VUID-VkPhysicalDeviceImageDrmFormatModifierInfoEXT-sharingMode-02314",
+          "text": " If <code>sharingMode</code> is <code>VK_SHARING_MODE_CONCURRENT</code>, then <code>pQueueFamilyIndices</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>queueFamilyIndexCount</code> <code>uint32_t</code> values."
+        },
+        {
+          "vuid": "VUID-VkPhysicalDeviceImageDrmFormatModifierInfoEXT-sharingMode-02315",
+          "text": " If <code>sharingMode</code> is <code>VK_SHARING_MODE_CONCURRENT</code>, then <code>queueFamilyIndexCount</code> <strong class=\"purple\">must</strong> be greater than <code>1</code>."
+        },
+        {
+          "vuid": "VUID-VkPhysicalDeviceImageDrmFormatModifierInfoEXT-sharingMode-02316",
+          "text": " If <code>sharingMode</code> is <code>VK_SHARING_MODE_CONCURRENT</code>, each element of <code>pQueueFamilyIndices</code> <strong class=\"purple\">must</strong> be unique and <strong class=\"purple\">must</strong> be less than the <code>pQueueFamilyPropertyCount</code> returned by <a href=\"#vkGetPhysicalDeviceQueueFamilyProperties2\">vkGetPhysicalDeviceQueueFamilyProperties2</a> for the <code>physicalDevice</code> that was used to create <code>device</code>."
+        },
+        {
+          "vuid": "VUID-VkPhysicalDeviceImageDrmFormatModifierInfoEXT-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_DRM_FORMAT_MODIFIER_INFO_EXT</code>"
+        },
+        {
+          "vuid": "VUID-VkPhysicalDeviceImageDrmFormatModifierInfoEXT-sharingMode-parameter",
+          "text": " <code>sharingMode</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkSharingMode\">VkSharingMode</a> value"
+        }
+      ]
+    },
+    "VkSamplerYcbcrConversionImageFormatProperties": {
+      "(VK_VERSION_1_1,VK_KHR_get_physical_device_properties2)+(VK_VERSION_1_1,VK_KHR_sampler_ycbcr_conversion)": [
+        {
+          "vuid": "VUID-VkSamplerYcbcrConversionImageFormatProperties-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES</code>"
+        }
+      ]
+    },
+    "VkAndroidHardwareBufferUsageANDROID": {
+      "(VK_VERSION_1_1,VK_KHR_get_physical_device_properties2)+(VK_ANDROID_external_memory_android_hardware_buffer)": [
+        {
+          "vuid": "VUID-VkAndroidHardwareBufferUsageANDROID-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_USAGE_ANDROID</code>"
+        }
+      ]
+    },
+    "VkPhysicalDeviceImageViewImageFormatInfoEXT": {
+      "(VK_VERSION_1_1,VK_KHR_get_physical_device_properties2)+(VK_EXT_filter_cubic)": [
+        {
+          "vuid": "VUID-VkPhysicalDeviceImageViewImageFormatInfoEXT-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_VIEW_IMAGE_FORMAT_INFO_EXT</code>"
+        },
+        {
+          "vuid": "VUID-VkPhysicalDeviceImageViewImageFormatInfoEXT-imageViewType-parameter",
+          "text": " <code>imageViewType</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkImageViewType\">VkImageViewType</a> value"
+        }
+      ]
+    },
+    "VkFilterCubicImageViewImageFormatPropertiesEXT": {
+      "(VK_VERSION_1_1,VK_KHR_get_physical_device_properties2)+(VK_EXT_filter_cubic)": [
+        {
+          "vuid": "VUID-VkFilterCubicImageViewImageFormatPropertiesEXT-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_FILTER_CUBIC_IMAGE_VIEW_IMAGE_FORMAT_PROPERTIES_EXT</code>"
+        },
+        {
+          "vuid": "VUID-VkFilterCubicImageViewImageFormatPropertiesEXT-pNext-02627",
+          "text": " If the <code>pNext</code> chain of the <a href=\"#VkImageFormatProperties2\">VkImageFormatProperties2</a> structure includes a <a href=\"#VkFilterCubicImageViewImageFormatPropertiesEXT\">VkFilterCubicImageViewImageFormatPropertiesEXT</a> structure, the <code>pNext</code> chain of the <a href=\"#VkPhysicalDeviceImageFormatInfo2\">VkPhysicalDeviceImageFormatInfo2</a> structure <strong class=\"purple\">must</strong> include a <a href=\"#VkPhysicalDeviceImageViewImageFormatInfoEXT\">VkPhysicalDeviceImageViewImageFormatInfoEXT</a> structure with an <code>imageViewType</code> that is compatible with <code>imageType</code>."
+        }
+      ]
+    },
+    "vkGetPhysicalDeviceExternalBufferProperties": {
+      "(VK_VERSION_1_1,VK_KHR_external_memory_capabilities)": [
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceExternalBufferProperties-physicalDevice-parameter",
+          "text": " <code>physicalDevice</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkPhysicalDevice\">VkPhysicalDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceExternalBufferProperties-pExternalBufferInfo-parameter",
+          "text": " <code>pExternalBufferInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkPhysicalDeviceExternalBufferInfo\">VkPhysicalDeviceExternalBufferInfo</a> structure"
+        },
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceExternalBufferProperties-pExternalBufferProperties-parameter",
+          "text": " <code>pExternalBufferProperties</code> <strong class=\"purple\">must</strong> be a valid pointer to a <a href=\"#VkExternalBufferProperties\">VkExternalBufferProperties</a> structure"
+        }
+      ]
+    },
+    "VkPhysicalDeviceExternalBufferInfo": {
+      "(VK_VERSION_1_1,VK_KHR_external_memory_capabilities)": [
+        {
+          "vuid": "VUID-VkPhysicalDeviceExternalBufferInfo-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO</code>"
+        },
+        {
+          "vuid": "VUID-VkPhysicalDeviceExternalBufferInfo-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-VkPhysicalDeviceExternalBufferInfo-flags-parameter",
+          "text": " <code>flags</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkBufferCreateFlagBits\">VkBufferCreateFlagBits</a> values"
+        },
+        {
+          "vuid": "VUID-VkPhysicalDeviceExternalBufferInfo-usage-parameter",
+          "text": " <code>usage</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkBufferUsageFlagBits\">VkBufferUsageFlagBits</a> values"
+        },
+        {
+          "vuid": "VUID-VkPhysicalDeviceExternalBufferInfo-usage-requiredbitmask",
+          "text": " <code>usage</code> <strong class=\"purple\">must</strong> not be <code>0</code>"
+        },
+        {
+          "vuid": "VUID-VkPhysicalDeviceExternalBufferInfo-handleType-parameter",
+          "text": " <code>handleType</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkExternalMemoryHandleTypeFlagBits\">VkExternalMemoryHandleTypeFlagBits</a> value"
+        }
+      ]
+    },
+    "VkExternalBufferProperties": {
+      "(VK_VERSION_1_1,VK_KHR_external_memory_capabilities)": [
+        {
+          "vuid": "VUID-VkExternalBufferProperties-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES</code>"
+        },
+        {
+          "vuid": "VUID-VkExternalBufferProperties-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        }
+      ]
+    },
+    "vkGetPhysicalDeviceExternalSemaphoreProperties": {
+      "(VK_VERSION_1_1,VK_KHR_external_semaphore_capabilities)": [
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceExternalSemaphoreProperties-physicalDevice-parameter",
+          "text": " <code>physicalDevice</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkPhysicalDevice\">VkPhysicalDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceExternalSemaphoreProperties-pExternalSemaphoreInfo-parameter",
+          "text": " <code>pExternalSemaphoreInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkPhysicalDeviceExternalSemaphoreInfo\">VkPhysicalDeviceExternalSemaphoreInfo</a> structure"
+        },
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceExternalSemaphoreProperties-pExternalSemaphoreProperties-parameter",
+          "text": " <code>pExternalSemaphoreProperties</code> <strong class=\"purple\">must</strong> be a valid pointer to a <a href=\"#VkExternalSemaphoreProperties\">VkExternalSemaphoreProperties</a> structure"
+        }
+      ]
+    },
+    "VkPhysicalDeviceExternalSemaphoreInfo": {
+      "(VK_VERSION_1_1,VK_KHR_external_semaphore_capabilities)": [
+        {
+          "vuid": "VUID-VkPhysicalDeviceExternalSemaphoreInfo-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO</code>"
+        },
+        {
+          "vuid": "VUID-VkPhysicalDeviceExternalSemaphoreInfo-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code> or a pointer to a valid instance of <a href=\"#VkSemaphoreTypeCreateInfoKHR\">VkSemaphoreTypeCreateInfoKHR</a>"
+        },
+        {
+          "vuid": "VUID-VkPhysicalDeviceExternalSemaphoreInfo-handleType-parameter",
+          "text": " <code>handleType</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkExternalSemaphoreHandleTypeFlagBits\">VkExternalSemaphoreHandleTypeFlagBits</a> value"
+        }
+      ]
+    },
+    "VkExternalSemaphoreProperties": {
+      "(VK_VERSION_1_1,VK_KHR_external_semaphore_capabilities)": [
+        {
+          "vuid": "VUID-VkExternalSemaphoreProperties-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES</code>"
+        },
+        {
+          "vuid": "VUID-VkExternalSemaphoreProperties-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        }
+      ]
+    },
+    "vkGetPhysicalDeviceExternalFenceProperties": {
+      "(VK_VERSION_1_1,VK_KHR_external_fence_capabilities)": [
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceExternalFenceProperties-physicalDevice-parameter",
+          "text": " <code>physicalDevice</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkPhysicalDevice\">VkPhysicalDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceExternalFenceProperties-pExternalFenceInfo-parameter",
+          "text": " <code>pExternalFenceInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkPhysicalDeviceExternalFenceInfo\">VkPhysicalDeviceExternalFenceInfo</a> structure"
+        },
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceExternalFenceProperties-pExternalFenceProperties-parameter",
+          "text": " <code>pExternalFenceProperties</code> <strong class=\"purple\">must</strong> be a valid pointer to a <a href=\"#VkExternalFenceProperties\">VkExternalFenceProperties</a> structure"
+        }
+      ]
+    },
+    "VkPhysicalDeviceExternalFenceInfo": {
+      "(VK_VERSION_1_1,VK_KHR_external_fence_capabilities)": [
+        {
+          "vuid": "VUID-VkPhysicalDeviceExternalFenceInfo-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO</code>"
+        },
+        {
+          "vuid": "VUID-VkPhysicalDeviceExternalFenceInfo-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-VkPhysicalDeviceExternalFenceInfo-handleType-parameter",
+          "text": " <code>handleType</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkExternalFenceHandleTypeFlagBits\">VkExternalFenceHandleTypeFlagBits</a> value"
+        }
+      ]
+    },
+    "VkExternalFenceProperties": {
+      "(VK_VERSION_1_1,VK_KHR_external_fence_capabilities)": [
+        {
+          "vuid": "VUID-VkExternalFenceProperties-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES</code>"
+        },
+        {
+          "vuid": "VUID-VkExternalFenceProperties-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        }
+      ]
+    },
+    "vkGetPhysicalDeviceCalibrateableTimeDomainsEXT": {
+      "(VK_EXT_calibrated_timestamps)": [
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceCalibrateableTimeDomainsEXT-physicalDevice-parameter",
+          "text": " <code>physicalDevice</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkPhysicalDevice\">VkPhysicalDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceCalibrateableTimeDomainsEXT-pTimeDomainCount-parameter",
+          "text": " <code>pTimeDomainCount</code> <strong class=\"purple\">must</strong> be a valid pointer to a <code>uint32_t</code> value"
+        },
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceCalibrateableTimeDomainsEXT-pTimeDomains-parameter",
+          "text": " If the value referenced by <code>pTimeDomainCount</code> is not <code>0</code>, and <code>pTimeDomains</code> is not <code>NULL</code>, <code>pTimeDomains</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>pTimeDomainCount</code> <a href=\"#VkTimeDomainEXT\">VkTimeDomainEXT</a> values"
+        }
+      ]
+    },
+    "vkSetDebugUtilsObjectNameEXT": {
+      "(VK_EXT_debug_utils)": [
+        {
+          "vuid": "VUID-vkSetDebugUtilsObjectNameEXT-pNameInfo-02587",
+          "text": " <code>pNameInfo</code>-&gt;objectType <strong class=\"purple\">must</strong> not be <code>VK_OBJECT_TYPE_UNKNOWN</code>"
+        },
+        {
+          "vuid": "VUID-vkSetDebugUtilsObjectNameEXT-pNameInfo-02588",
+          "text": " <code>pNameInfo</code>-&gt;objectHandle <strong class=\"purple\">must</strong> not be <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>"
+        },
+        {
+          "vuid": "VUID-vkSetDebugUtilsObjectNameEXT-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkSetDebugUtilsObjectNameEXT-pNameInfo-parameter",
+          "text": " <code>pNameInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkDebugUtilsObjectNameInfoEXT\">VkDebugUtilsObjectNameInfoEXT</a> structure"
+        }
+      ]
+    },
+    "VkDebugUtilsObjectNameInfoEXT": {
+      "(VK_EXT_debug_utils)": [
+        {
+          "vuid": "VUID-VkDebugUtilsObjectNameInfoEXT-objectType-02589",
+          "text": " If <code>objectType</code> is <code>VK_OBJECT_TYPE_UNKNOWN</code>, <code>objectHandle</code> <strong class=\"purple\">must</strong> not be <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>"
+        },
+        {
+          "vuid": "VUID-VkDebugUtilsObjectNameInfoEXT-objectType-02590",
+          "text": " If <code>objectType</code> is not <code>VK_OBJECT_TYPE_UNKNOWN</code>, <code>objectHandle</code> <strong class=\"purple\">must</strong> be <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a> or a valid Vulkan handle of the type associated with <code>objectType</code> as defined in the <a href=\"#debugging-object-types\">VkObjectType and Vulkan Handle Relationship</a> table"
+        },
+        {
+          "vuid": "VUID-VkDebugUtilsObjectNameInfoEXT-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT</code>"
+        },
+        {
+          "vuid": "VUID-VkDebugUtilsObjectNameInfoEXT-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-VkDebugUtilsObjectNameInfoEXT-objectType-parameter",
+          "text": " <code>objectType</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkObjectType\">VkObjectType</a> value"
+        },
+        {
+          "vuid": "VUID-VkDebugUtilsObjectNameInfoEXT-pObjectName-parameter",
+          "text": " If <code>pObjectName</code> is not <code>NULL</code>, <code>pObjectName</code> <strong class=\"purple\">must</strong> be a null-terminated UTF-8 string"
+        }
+      ]
+    },
+    "vkSetDebugUtilsObjectTagEXT": {
+      "(VK_EXT_debug_utils)": [
+        {
+          "vuid": "VUID-vkSetDebugUtilsObjectTagEXT-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkSetDebugUtilsObjectTagEXT-pTagInfo-parameter",
+          "text": " <code>pTagInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkDebugUtilsObjectTagInfoEXT\">VkDebugUtilsObjectTagInfoEXT</a> structure"
+        }
+      ]
+    },
+    "VkDebugUtilsObjectTagInfoEXT": {
+      "(VK_EXT_debug_utils)": [
+        {
+          "vuid": "VUID-VkDebugUtilsObjectTagInfoEXT-objectType-01908",
+          "text": " <code>objectType</code> <strong class=\"purple\">must</strong> not be <code>VK_OBJECT_TYPE_UNKNOWN</code>"
+        },
+        {
+          "vuid": "VUID-VkDebugUtilsObjectTagInfoEXT-objectHandle-01910",
+          "text": " <code>objectHandle</code> <strong class=\"purple\">must</strong> be a valid Vulkan handle of the type associated with <code>objectType</code> as defined in the <a href=\"#debugging-object-types\">VkObjectType and Vulkan Handle Relationship</a> table"
+        },
+        {
+          "vuid": "VUID-VkDebugUtilsObjectTagInfoEXT-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_TAG_INFO_EXT</code>"
+        },
+        {
+          "vuid": "VUID-VkDebugUtilsObjectTagInfoEXT-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-VkDebugUtilsObjectTagInfoEXT-objectType-parameter",
+          "text": " <code>objectType</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkObjectType\">VkObjectType</a> value"
+        },
+        {
+          "vuid": "VUID-VkDebugUtilsObjectTagInfoEXT-pTag-parameter",
+          "text": " <code>pTag</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>tagSize</code> bytes"
+        },
+        {
+          "vuid": "VUID-VkDebugUtilsObjectTagInfoEXT-tagSize-arraylength",
+          "text": " <code>tagSize</code> <strong class=\"purple\">must</strong> be greater than <code>0</code>"
+        }
+      ]
+    },
+    "vkQueueBeginDebugUtilsLabelEXT": {
+      "(VK_EXT_debug_utils)": [
+        {
+          "vuid": "VUID-vkQueueBeginDebugUtilsLabelEXT-queue-parameter",
+          "text": " <code>queue</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkQueue\">VkQueue</a> handle"
+        },
+        {
+          "vuid": "VUID-vkQueueBeginDebugUtilsLabelEXT-pLabelInfo-parameter",
+          "text": " <code>pLabelInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkDebugUtilsLabelEXT\">VkDebugUtilsLabelEXT</a> structure"
+        }
+      ]
+    },
+    "VkDebugUtilsLabelEXT": {
+      "(VK_EXT_debug_utils)": [
+        {
+          "vuid": "VUID-VkDebugUtilsLabelEXT-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT</code>"
+        },
+        {
+          "vuid": "VUID-VkDebugUtilsLabelEXT-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-VkDebugUtilsLabelEXT-pLabelName-parameter",
+          "text": " <code>pLabelName</code> <strong class=\"purple\">must</strong> be a null-terminated UTF-8 string"
+        }
+      ]
+    },
+    "vkQueueEndDebugUtilsLabelEXT": {
+      "(VK_EXT_debug_utils)": [
+        {
+          "vuid": "VUID-vkQueueEndDebugUtilsLabelEXT-None-01911",
+          "text": " There <strong class=\"purple\">must</strong> be an outstanding <code>vkQueueBeginDebugUtilsLabelEXT</code> command prior to the <code>vkQueueEndDebugUtilsLabelEXT</code> on the queue"
+        },
+        {
+          "vuid": "VUID-vkQueueEndDebugUtilsLabelEXT-queue-parameter",
+          "text": " <code>queue</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkQueue\">VkQueue</a> handle"
+        }
+      ]
+    },
+    "vkQueueInsertDebugUtilsLabelEXT": {
+      "(VK_EXT_debug_utils)": [
+        {
+          "vuid": "VUID-vkQueueInsertDebugUtilsLabelEXT-queue-parameter",
+          "text": " <code>queue</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkQueue\">VkQueue</a> handle"
+        },
+        {
+          "vuid": "VUID-vkQueueInsertDebugUtilsLabelEXT-pLabelInfo-parameter",
+          "text": " <code>pLabelInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkDebugUtilsLabelEXT\">VkDebugUtilsLabelEXT</a> structure"
+        }
+      ]
+    },
+    "vkCmdBeginDebugUtilsLabelEXT": {
+      "(VK_EXT_debug_utils)": [
+        {
+          "vuid": "VUID-vkCmdBeginDebugUtilsLabelEXT-commandBuffer-parameter",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkCommandBuffer\">VkCommandBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdBeginDebugUtilsLabelEXT-pLabelInfo-parameter",
+          "text": " <code>pLabelInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkDebugUtilsLabelEXT\">VkDebugUtilsLabelEXT</a> structure"
+        },
+        {
+          "vuid": "VUID-vkCmdBeginDebugUtilsLabelEXT-commandBuffer-recording",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be in the <a href=\"#commandbuffers-lifecycle\">recording state</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdBeginDebugUtilsLabelEXT-commandBuffer-cmdpool",
+          "text": " The <code>VkCommandPool</code> that <code>commandBuffer</code> was allocated from <strong class=\"purple\">must</strong> support graphics, or compute operations"
+        }
+      ]
+    },
+    "vkCmdEndDebugUtilsLabelEXT": {
+      "(VK_EXT_debug_utils)": [
+        {
+          "vuid": "VUID-vkCmdEndDebugUtilsLabelEXT-commandBuffer-01912",
+          "text": " There <strong class=\"purple\">must</strong> be an outstanding <code>vkCmdBeginDebugUtilsLabelEXT</code> command prior to the <code>vkCmdEndDebugUtilsLabelEXT</code> on the queue that <code>commandBuffer</code> is submitted to"
+        },
+        {
+          "vuid": "VUID-vkCmdEndDebugUtilsLabelEXT-commandBuffer-01913",
+          "text": " If <code>commandBuffer</code> is a secondary command buffer, there <strong class=\"purple\">must</strong> be an outstanding <code>vkCmdBeginDebugUtilsLabelEXT</code> command recorded to <code>commandBuffer</code> that has not previously been ended by a call to <code>vkCmdEndDebugUtilsLabelEXT</code>."
+        },
+        {
+          "vuid": "VUID-vkCmdEndDebugUtilsLabelEXT-commandBuffer-parameter",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkCommandBuffer\">VkCommandBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdEndDebugUtilsLabelEXT-commandBuffer-recording",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be in the <a href=\"#commandbuffers-lifecycle\">recording state</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdEndDebugUtilsLabelEXT-commandBuffer-cmdpool",
+          "text": " The <code>VkCommandPool</code> that <code>commandBuffer</code> was allocated from <strong class=\"purple\">must</strong> support graphics, or compute operations"
+        }
+      ]
+    },
+    "vkCmdInsertDebugUtilsLabelEXT": {
+      "(VK_EXT_debug_utils)": [
+        {
+          "vuid": "VUID-vkCmdInsertDebugUtilsLabelEXT-commandBuffer-parameter",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkCommandBuffer\">VkCommandBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdInsertDebugUtilsLabelEXT-pLabelInfo-parameter",
+          "text": " <code>pLabelInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkDebugUtilsLabelEXT\">VkDebugUtilsLabelEXT</a> structure"
+        },
+        {
+          "vuid": "VUID-vkCmdInsertDebugUtilsLabelEXT-commandBuffer-recording",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be in the <a href=\"#commandbuffers-lifecycle\">recording state</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdInsertDebugUtilsLabelEXT-commandBuffer-cmdpool",
+          "text": " The <code>VkCommandPool</code> that <code>commandBuffer</code> was allocated from <strong class=\"purple\">must</strong> support graphics, or compute operations"
+        }
+      ]
+    },
+    "vkCreateDebugUtilsMessengerEXT": {
+      "(VK_EXT_debug_utils)": [
+        {
+          "vuid": "VUID-vkCreateDebugUtilsMessengerEXT-instance-parameter",
+          "text": " <code>instance</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkInstance\">VkInstance</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCreateDebugUtilsMessengerEXT-pCreateInfo-parameter",
+          "text": " <code>pCreateInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkDebugUtilsMessengerCreateInfoEXT\">VkDebugUtilsMessengerCreateInfoEXT</a> structure"
+        },
+        {
+          "vuid": "VUID-vkCreateDebugUtilsMessengerEXT-pAllocator-parameter",
+          "text": " If <code>pAllocator</code> is not <code>NULL</code>, <code>pAllocator</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkAllocationCallbacks\">VkAllocationCallbacks</a> structure"
+        },
+        {
+          "vuid": "VUID-vkCreateDebugUtilsMessengerEXT-pMessenger-parameter",
+          "text": " <code>pMessenger</code> <strong class=\"purple\">must</strong> be a valid pointer to a <a href=\"#VkDebugUtilsMessengerEXT\">VkDebugUtilsMessengerEXT</a> handle"
+        }
+      ]
+    },
+    "VkDebugUtilsMessengerCreateInfoEXT": {
+      "(VK_EXT_debug_utils)": [
+        {
+          "vuid": "VUID-VkDebugUtilsMessengerCreateInfoEXT-pfnUserCallback-01914",
+          "text": " <code>pfnUserCallback</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#PFN_vkDebugUtilsMessengerCallbackEXT\">PFN_vkDebugUtilsMessengerCallbackEXT</a>"
+        },
+        {
+          "vuid": "VUID-VkDebugUtilsMessengerCreateInfoEXT-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT</code>"
+        },
+        {
+          "vuid": "VUID-VkDebugUtilsMessengerCreateInfoEXT-flags-zerobitmask",
+          "text": " <code>flags</code> <strong class=\"purple\">must</strong> be <code>0</code>"
+        },
+        {
+          "vuid": "VUID-VkDebugUtilsMessengerCreateInfoEXT-messageSeverity-parameter",
+          "text": " <code>messageSeverity</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkDebugUtilsMessageSeverityFlagBitsEXT\">VkDebugUtilsMessageSeverityFlagBitsEXT</a> values"
+        },
+        {
+          "vuid": "VUID-VkDebugUtilsMessengerCreateInfoEXT-messageSeverity-requiredbitmask",
+          "text": " <code>messageSeverity</code> <strong class=\"purple\">must</strong> not be <code>0</code>"
+        },
+        {
+          "vuid": "VUID-VkDebugUtilsMessengerCreateInfoEXT-messageType-parameter",
+          "text": " <code>messageType</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkDebugUtilsMessageTypeFlagBitsEXT\">VkDebugUtilsMessageTypeFlagBitsEXT</a> values"
+        },
+        {
+          "vuid": "VUID-VkDebugUtilsMessengerCreateInfoEXT-messageType-requiredbitmask",
+          "text": " <code>messageType</code> <strong class=\"purple\">must</strong> not be <code>0</code>"
+        },
+        {
+          "vuid": "VUID-VkDebugUtilsMessengerCreateInfoEXT-pfnUserCallback-parameter",
+          "text": " <code>pfnUserCallback</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#PFN_vkDebugUtilsMessengerCallbackEXT\">PFN_vkDebugUtilsMessengerCallbackEXT</a> value"
+        }
+      ]
+    },
+    "VkDebugUtilsMessengerCallbackDataEXT": {
+      "(VK_EXT_debug_utils)": [
+        {
+          "vuid": "VUID-VkDebugUtilsMessengerCallbackDataEXT-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CALLBACK_DATA_EXT</code>"
+        },
+        {
+          "vuid": "VUID-VkDebugUtilsMessengerCallbackDataEXT-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-VkDebugUtilsMessengerCallbackDataEXT-flags-zerobitmask",
+          "text": " <code>flags</code> <strong class=\"purple\">must</strong> be <code>0</code>"
+        },
+        {
+          "vuid": "VUID-VkDebugUtilsMessengerCallbackDataEXT-pMessageIdName-parameter",
+          "text": " If <code>pMessageIdName</code> is not <code>NULL</code>, <code>pMessageIdName</code> <strong class=\"purple\">must</strong> be a null-terminated UTF-8 string"
+        },
+        {
+          "vuid": "VUID-VkDebugUtilsMessengerCallbackDataEXT-pMessage-parameter",
+          "text": " <code>pMessage</code> <strong class=\"purple\">must</strong> be a null-terminated UTF-8 string"
+        },
+        {
+          "vuid": "VUID-VkDebugUtilsMessengerCallbackDataEXT-pQueueLabels-parameter",
+          "text": " If <code>queueLabelCount</code> is not <code>0</code>, <code>pQueueLabels</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>queueLabelCount</code> valid <a href=\"#VkDebugUtilsLabelEXT\">VkDebugUtilsLabelEXT</a> structures"
+        },
+        {
+          "vuid": "VUID-VkDebugUtilsMessengerCallbackDataEXT-pCmdBufLabels-parameter",
+          "text": " If <code>cmdBufLabelCount</code> is not <code>0</code>, <code>pCmdBufLabels</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>cmdBufLabelCount</code> valid <a href=\"#VkDebugUtilsLabelEXT\">VkDebugUtilsLabelEXT</a> structures"
+        },
+        {
+          "vuid": "VUID-VkDebugUtilsMessengerCallbackDataEXT-pObjects-parameter",
+          "text": " If <code>objectCount</code> is not <code>0</code>, <code>pObjects</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>objectCount</code> valid <a href=\"#VkDebugUtilsObjectNameInfoEXT\">VkDebugUtilsObjectNameInfoEXT</a> structures"
+        }
+      ]
+    },
+    "vkSubmitDebugUtilsMessageEXT": {
+      "(VK_EXT_debug_utils)": [
+        {
+          "vuid": "VUID-vkSubmitDebugUtilsMessageEXT-objectType-02591",
+          "text": " The <code>objectType</code> member of each element of <code>pCallbackData</code>-&gt;pObjects <strong class=\"purple\">must</strong> not be <code>VK_OBJECT_TYPE_UNKNOWN</code>"
+        },
+        {
+          "vuid": "VUID-vkSubmitDebugUtilsMessageEXT-instance-parameter",
+          "text": " <code>instance</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkInstance\">VkInstance</a> handle"
+        },
+        {
+          "vuid": "VUID-vkSubmitDebugUtilsMessageEXT-messageSeverity-parameter",
+          "text": " <code>messageSeverity</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDebugUtilsMessageSeverityFlagBitsEXT\">VkDebugUtilsMessageSeverityFlagBitsEXT</a> value"
+        },
+        {
+          "vuid": "VUID-vkSubmitDebugUtilsMessageEXT-messageTypes-parameter",
+          "text": " <code>messageTypes</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkDebugUtilsMessageTypeFlagBitsEXT\">VkDebugUtilsMessageTypeFlagBitsEXT</a> values"
+        },
+        {
+          "vuid": "VUID-vkSubmitDebugUtilsMessageEXT-messageTypes-requiredbitmask",
+          "text": " <code>messageTypes</code> <strong class=\"purple\">must</strong> not be <code>0</code>"
+        },
+        {
+          "vuid": "VUID-vkSubmitDebugUtilsMessageEXT-pCallbackData-parameter",
+          "text": " <code>pCallbackData</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkDebugUtilsMessengerCallbackDataEXT\">VkDebugUtilsMessengerCallbackDataEXT</a> structure"
+        }
+      ]
+    },
+    "vkDestroyDebugUtilsMessengerEXT": {
+      "(VK_EXT_debug_utils)": [
+        {
+          "vuid": "VUID-vkDestroyDebugUtilsMessengerEXT-messenger-01915",
+          "text": " If <code>VkAllocationCallbacks</code> were provided when <code>messenger</code> was created, a compatible set of callbacks <strong class=\"purple\">must</strong> be provided here"
+        },
+        {
+          "vuid": "VUID-vkDestroyDebugUtilsMessengerEXT-messenger-01916",
+          "text": " If no <code>VkAllocationCallbacks</code> were provided when <code>messenger</code> was created, <code>pAllocator</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-vkDestroyDebugUtilsMessengerEXT-instance-parameter",
+          "text": " <code>instance</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkInstance\">VkInstance</a> handle"
+        },
+        {
+          "vuid": "VUID-vkDestroyDebugUtilsMessengerEXT-messenger-parameter",
+          "text": " <code>messenger</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDebugUtilsMessengerEXT\">VkDebugUtilsMessengerEXT</a> handle"
+        },
+        {
+          "vuid": "VUID-vkDestroyDebugUtilsMessengerEXT-pAllocator-parameter",
+          "text": " If <code>pAllocator</code> is not <code>NULL</code>, <code>pAllocator</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkAllocationCallbacks\">VkAllocationCallbacks</a> structure"
+        },
+        {
+          "vuid": "VUID-vkDestroyDebugUtilsMessengerEXT-messenger-parent",
+          "text": " <code>messenger</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from <code>instance</code>"
+        }
+      ]
+    },
+    "vkDebugMarkerSetObjectNameEXT": {
+      "(VK_EXT_debug_marker)": [
+        {
+          "vuid": "VUID-vkDebugMarkerSetObjectNameEXT-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkDebugMarkerSetObjectNameEXT-pNameInfo-parameter",
+          "text": " <code>pNameInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkDebugMarkerObjectNameInfoEXT\">VkDebugMarkerObjectNameInfoEXT</a> structure"
+        }
+      ]
+    },
+    "VkDebugMarkerObjectNameInfoEXT": {
+      "(VK_EXT_debug_marker)": [
+        {
+          "vuid": "VUID-VkDebugMarkerObjectNameInfoEXT-objectType-01490",
+          "text": " <code>objectType</code> <strong class=\"purple\">must</strong> not be <code>VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT</code>"
+        },
+        {
+          "vuid": "VUID-VkDebugMarkerObjectNameInfoEXT-object-01491",
+          "text": " <code>object</code> <strong class=\"purple\">must</strong> not be <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>"
+        },
+        {
+          "vuid": "VUID-VkDebugMarkerObjectNameInfoEXT-object-01492",
+          "text": " <code>object</code> <strong class=\"purple\">must</strong> be a Vulkan object of the type associated with <code>objectType</code> as defined in <a href=\"#debug-report-object-types\">VkDebugReportObjectTypeEXT and Vulkan Handle Relationship</a>."
+        },
+        {
+          "vuid": "VUID-VkDebugMarkerObjectNameInfoEXT-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_NAME_INFO_EXT</code>"
+        },
+        {
+          "vuid": "VUID-VkDebugMarkerObjectNameInfoEXT-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-VkDebugMarkerObjectNameInfoEXT-objectType-parameter",
+          "text": " <code>objectType</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDebugReportObjectTypeEXT\">VkDebugReportObjectTypeEXT</a> value"
+        },
+        {
+          "vuid": "VUID-VkDebugMarkerObjectNameInfoEXT-pObjectName-parameter",
+          "text": " <code>pObjectName</code> <strong class=\"purple\">must</strong> be a null-terminated UTF-8 string"
+        }
+      ]
+    },
+    "vkDebugMarkerSetObjectTagEXT": {
+      "(VK_EXT_debug_marker)": [
+        {
+          "vuid": "VUID-vkDebugMarkerSetObjectTagEXT-device-parameter",
+          "text": " <code>device</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDevice\">VkDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkDebugMarkerSetObjectTagEXT-pTagInfo-parameter",
+          "text": " <code>pTagInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkDebugMarkerObjectTagInfoEXT\">VkDebugMarkerObjectTagInfoEXT</a> structure"
+        }
+      ]
+    },
+    "VkDebugMarkerObjectTagInfoEXT": {
+      "(VK_EXT_debug_marker)": [
+        {
+          "vuid": "VUID-VkDebugMarkerObjectTagInfoEXT-objectType-01493",
+          "text": " <code>objectType</code> <strong class=\"purple\">must</strong> not be <code>VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT</code>"
+        },
+        {
+          "vuid": "VUID-VkDebugMarkerObjectTagInfoEXT-object-01494",
+          "text": " <code>object</code> <strong class=\"purple\">must</strong> not be <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>"
+        },
+        {
+          "vuid": "VUID-VkDebugMarkerObjectTagInfoEXT-object-01495",
+          "text": " <code>object</code> <strong class=\"purple\">must</strong> be a Vulkan object of the type associated with <code>objectType</code> as defined in <a href=\"#debug-report-object-types\">VkDebugReportObjectTypeEXT and Vulkan Handle Relationship</a>."
+        },
+        {
+          "vuid": "VUID-VkDebugMarkerObjectTagInfoEXT-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_TAG_INFO_EXT</code>"
+        },
+        {
+          "vuid": "VUID-VkDebugMarkerObjectTagInfoEXT-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-VkDebugMarkerObjectTagInfoEXT-objectType-parameter",
+          "text": " <code>objectType</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDebugReportObjectTypeEXT\">VkDebugReportObjectTypeEXT</a> value"
+        },
+        {
+          "vuid": "VUID-VkDebugMarkerObjectTagInfoEXT-pTag-parameter",
+          "text": " <code>pTag</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>tagSize</code> bytes"
+        },
+        {
+          "vuid": "VUID-VkDebugMarkerObjectTagInfoEXT-tagSize-arraylength",
+          "text": " <code>tagSize</code> <strong class=\"purple\">must</strong> be greater than <code>0</code>"
+        }
+      ]
+    },
+    "vkCmdDebugMarkerBeginEXT": {
+      "(VK_EXT_debug_marker)": [
+        {
+          "vuid": "VUID-vkCmdDebugMarkerBeginEXT-commandBuffer-parameter",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkCommandBuffer\">VkCommandBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdDebugMarkerBeginEXT-pMarkerInfo-parameter",
+          "text": " <code>pMarkerInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkDebugMarkerMarkerInfoEXT\">VkDebugMarkerMarkerInfoEXT</a> structure"
+        },
+        {
+          "vuid": "VUID-vkCmdDebugMarkerBeginEXT-commandBuffer-recording",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be in the <a href=\"#commandbuffers-lifecycle\">recording state</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdDebugMarkerBeginEXT-commandBuffer-cmdpool",
+          "text": " The <code>VkCommandPool</code> that <code>commandBuffer</code> was allocated from <strong class=\"purple\">must</strong> support graphics, or compute operations"
+        }
+      ]
+    },
+    "VkDebugMarkerMarkerInfoEXT": {
+      "(VK_EXT_debug_marker)": [
+        {
+          "vuid": "VUID-VkDebugMarkerMarkerInfoEXT-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_DEBUG_MARKER_MARKER_INFO_EXT</code>"
+        },
+        {
+          "vuid": "VUID-VkDebugMarkerMarkerInfoEXT-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-VkDebugMarkerMarkerInfoEXT-pMarkerName-parameter",
+          "text": " <code>pMarkerName</code> <strong class=\"purple\">must</strong> be a null-terminated UTF-8 string"
+        }
+      ]
+    },
+    "vkCmdDebugMarkerEndEXT": {
+      "(VK_EXT_debug_marker)": [
+        {
+          "vuid": "VUID-vkCmdDebugMarkerEndEXT-commandBuffer-01239",
+          "text": " There <strong class=\"purple\">must</strong> be an outstanding <a href=\"#vkCmdDebugMarkerBeginEXT\">vkCmdDebugMarkerBeginEXT</a> command prior to the <code>vkCmdDebugMarkerEndEXT</code> on the queue that <code>commandBuffer</code> is submitted to"
+        },
+        {
+          "vuid": "VUID-vkCmdDebugMarkerEndEXT-commandBuffer-01240",
+          "text": " If <code>commandBuffer</code> is a secondary command buffer, there <strong class=\"purple\">must</strong> be an outstanding <a href=\"#vkCmdDebugMarkerBeginEXT\">vkCmdDebugMarkerBeginEXT</a> command recorded to <code>commandBuffer</code> that has not previously been ended by a call to <a href=\"#vkCmdDebugMarkerEndEXT\">vkCmdDebugMarkerEndEXT</a>."
+        },
+        {
+          "vuid": "VUID-vkCmdDebugMarkerEndEXT-commandBuffer-parameter",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkCommandBuffer\">VkCommandBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdDebugMarkerEndEXT-commandBuffer-recording",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be in the <a href=\"#commandbuffers-lifecycle\">recording state</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdDebugMarkerEndEXT-commandBuffer-cmdpool",
+          "text": " The <code>VkCommandPool</code> that <code>commandBuffer</code> was allocated from <strong class=\"purple\">must</strong> support graphics, or compute operations"
+        }
+      ]
+    },
+    "vkCmdDebugMarkerInsertEXT": {
+      "(VK_EXT_debug_marker)": [
+        {
+          "vuid": "VUID-vkCmdDebugMarkerInsertEXT-commandBuffer-parameter",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkCommandBuffer\">VkCommandBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdDebugMarkerInsertEXT-pMarkerInfo-parameter",
+          "text": " <code>pMarkerInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkDebugMarkerMarkerInfoEXT\">VkDebugMarkerMarkerInfoEXT</a> structure"
+        },
+        {
+          "vuid": "VUID-vkCmdDebugMarkerInsertEXT-commandBuffer-recording",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be in the <a href=\"#commandbuffers-lifecycle\">recording state</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdDebugMarkerInsertEXT-commandBuffer-cmdpool",
+          "text": " The <code>VkCommandPool</code> that <code>commandBuffer</code> was allocated from <strong class=\"purple\">must</strong> support graphics, or compute operations"
+        }
+      ]
+    },
+    "vkCreateDebugReportCallbackEXT": {
+      "(VK_EXT_debug_report)": [
+        {
+          "vuid": "VUID-vkCreateDebugReportCallbackEXT-instance-parameter",
+          "text": " <code>instance</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkInstance\">VkInstance</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCreateDebugReportCallbackEXT-pCreateInfo-parameter",
+          "text": " <code>pCreateInfo</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkDebugReportCallbackCreateInfoEXT\">VkDebugReportCallbackCreateInfoEXT</a> structure"
+        },
+        {
+          "vuid": "VUID-vkCreateDebugReportCallbackEXT-pAllocator-parameter",
+          "text": " If <code>pAllocator</code> is not <code>NULL</code>, <code>pAllocator</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkAllocationCallbacks\">VkAllocationCallbacks</a> structure"
+        },
+        {
+          "vuid": "VUID-vkCreateDebugReportCallbackEXT-pCallback-parameter",
+          "text": " <code>pCallback</code> <strong class=\"purple\">must</strong> be a valid pointer to a <a href=\"#VkDebugReportCallbackEXT\">VkDebugReportCallbackEXT</a> handle"
+        }
+      ]
+    },
+    "VkDebugReportCallbackCreateInfoEXT": {
+      "(VK_EXT_debug_report)": [
+        {
+          "vuid": "VUID-VkDebugReportCallbackCreateInfoEXT-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT</code>"
+        },
+        {
+          "vuid": "VUID-VkDebugReportCallbackCreateInfoEXT-flags-parameter",
+          "text": " <code>flags</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkDebugReportFlagBitsEXT\">VkDebugReportFlagBitsEXT</a> values"
+        },
+        {
+          "vuid": "VUID-VkDebugReportCallbackCreateInfoEXT-pfnCallback-parameter",
+          "text": " <code>pfnCallback</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#PFN_vkDebugReportCallbackEXT\">PFN_vkDebugReportCallbackEXT</a> value"
+        }
+      ]
+    },
+    "vkDebugReportMessageEXT": {
+      "(VK_EXT_debug_report)": [
+        {
+          "vuid": "VUID-vkDebugReportMessageEXT-object-01241",
+          "text": " <code>object</code> <strong class=\"purple\">must</strong> be a Vulkan object or <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>"
+        },
+        {
+          "vuid": "VUID-vkDebugReportMessageEXT-objectType-01498",
+          "text": " If <code>objectType</code> is not <code>VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT</code> and <code>object</code> is not <a href=\"#VK_NULL_HANDLE\">VK_NULL_HANDLE</a>, <code>object</code> <strong class=\"purple\">must</strong> be a Vulkan object of the corresponding type associated with <code>objectType</code> as defined in <a href=\"#debug-report-object-types\">VkDebugReportObjectTypeEXT and Vulkan Handle Relationship</a>."
+        },
+        {
+          "vuid": "VUID-vkDebugReportMessageEXT-instance-parameter",
+          "text": " <code>instance</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkInstance\">VkInstance</a> handle"
+        },
+        {
+          "vuid": "VUID-vkDebugReportMessageEXT-flags-parameter",
+          "text": " <code>flags</code> <strong class=\"purple\">must</strong> be a valid combination of <a href=\"#VkDebugReportFlagBitsEXT\">VkDebugReportFlagBitsEXT</a> values"
+        },
+        {
+          "vuid": "VUID-vkDebugReportMessageEXT-flags-requiredbitmask",
+          "text": " <code>flags</code> <strong class=\"purple\">must</strong> not be <code>0</code>"
+        },
+        {
+          "vuid": "VUID-vkDebugReportMessageEXT-objectType-parameter",
+          "text": " <code>objectType</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDebugReportObjectTypeEXT\">VkDebugReportObjectTypeEXT</a> value"
+        },
+        {
+          "vuid": "VUID-vkDebugReportMessageEXT-pLayerPrefix-parameter",
+          "text": " <code>pLayerPrefix</code> <strong class=\"purple\">must</strong> be a null-terminated UTF-8 string"
+        },
+        {
+          "vuid": "VUID-vkDebugReportMessageEXT-pMessage-parameter",
+          "text": " <code>pMessage</code> <strong class=\"purple\">must</strong> be a null-terminated UTF-8 string"
+        }
+      ]
+    },
+    "vkDestroyDebugReportCallbackEXT": {
+      "(VK_EXT_debug_report)": [
+        {
+          "vuid": "VUID-vkDestroyDebugReportCallbackEXT-instance-01242",
+          "text": " If <code>VkAllocationCallbacks</code> were provided when <code>callback</code> was created, a compatible set of callbacks <strong class=\"purple\">must</strong> be provided here"
+        },
+        {
+          "vuid": "VUID-vkDestroyDebugReportCallbackEXT-instance-01243",
+          "text": " If no <code>VkAllocationCallbacks</code> were provided when <code>callback</code> was created, <code>pAllocator</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        },
+        {
+          "vuid": "VUID-vkDestroyDebugReportCallbackEXT-instance-parameter",
+          "text": " <code>instance</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkInstance\">VkInstance</a> handle"
+        },
+        {
+          "vuid": "VUID-vkDestroyDebugReportCallbackEXT-callback-parameter",
+          "text": " <code>callback</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkDebugReportCallbackEXT\">VkDebugReportCallbackEXT</a> handle"
+        },
+        {
+          "vuid": "VUID-vkDestroyDebugReportCallbackEXT-pAllocator-parameter",
+          "text": " If <code>pAllocator</code> is not <code>NULL</code>, <code>pAllocator</code> <strong class=\"purple\">must</strong> be a valid pointer to a valid <a href=\"#VkAllocationCallbacks\">VkAllocationCallbacks</a> structure"
+        },
+        {
+          "vuid": "VUID-vkDestroyDebugReportCallbackEXT-callback-parent",
+          "text": " <code>callback</code> <strong class=\"purple\">must</strong> have been created, allocated, or retrieved from <code>instance</code>"
+        }
+      ]
+    },
+    "vkCmdSetCheckpointNV": {
+      "(VK_NV_device_diagnostic_checkpoints)": [
+        {
+          "vuid": "VUID-vkCmdSetCheckpointNV-commandBuffer-parameter",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkCommandBuffer\">VkCommandBuffer</a> handle"
+        },
+        {
+          "vuid": "VUID-vkCmdSetCheckpointNV-commandBuffer-recording",
+          "text": " <code>commandBuffer</code> <strong class=\"purple\">must</strong> be in the <a href=\"#commandbuffers-lifecycle\">recording state</a>"
+        },
+        {
+          "vuid": "VUID-vkCmdSetCheckpointNV-commandBuffer-cmdpool",
+          "text": " The <code>VkCommandPool</code> that <code>commandBuffer</code> was allocated from <strong class=\"purple\">must</strong> support graphics, compute, or transfer operations"
+        }
+      ]
+    },
+    "vkGetQueueCheckpointDataNV": {
+      "(VK_NV_device_diagnostic_checkpoints)": [
+        {
+          "vuid": "VUID-vkGetQueueCheckpointDataNV-queue-02025",
+          "text": " The device that <code>queue</code> belongs to <strong class=\"purple\">must</strong> be in the lost state"
+        },
+        {
+          "vuid": "VUID-vkGetQueueCheckpointDataNV-queue-parameter",
+          "text": " <code>queue</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkQueue\">VkQueue</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetQueueCheckpointDataNV-pCheckpointDataCount-parameter",
+          "text": " <code>pCheckpointDataCount</code> <strong class=\"purple\">must</strong> be a valid pointer to a <code>uint32_t</code> value"
+        },
+        {
+          "vuid": "VUID-vkGetQueueCheckpointDataNV-pCheckpointData-parameter",
+          "text": " If the value referenced by <code>pCheckpointDataCount</code> is not <code>0</code>, and <code>pCheckpointData</code> is not <code>NULL</code>, <code>pCheckpointData</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>pCheckpointDataCount</code> <a href=\"#VkCheckpointDataNV\">VkCheckpointDataNV</a> structures"
+        }
+      ]
+    },
+    "VkCheckpointDataNV": {
+      "(VK_NV_device_diagnostic_checkpoints)": [
+        {
+          "vuid": "VUID-VkCheckpointDataNV-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_CHECKPOINT_DATA_NV</code>"
+        },
+        {
+          "vuid": "VUID-VkCheckpointDataNV-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        }
+      ]
+    },
+    "vkGetPhysicalDeviceToolPropertiesEXT": {
+      "(VK_EXT_tooling_info)": [
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceToolPropertiesEXT-physicalDevice-parameter",
+          "text": " <code>physicalDevice</code> <strong class=\"purple\">must</strong> be a valid <a href=\"#VkPhysicalDevice\">VkPhysicalDevice</a> handle"
+        },
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceToolPropertiesEXT-pToolCount-parameter",
+          "text": " <code>pToolCount</code> <strong class=\"purple\">must</strong> be a valid pointer to a <code>uint32_t</code> value"
+        },
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceToolPropertiesEXT-pToolProperties-parameter",
+          "text": " If <code>pToolProperties</code> is not <code>NULL</code>, <code>pToolProperties</code> <strong class=\"purple\">must</strong> be a valid pointer to an array of <code>pToolCount</code> <a href=\"#VkPhysicalDeviceToolPropertiesEXT\">VkPhysicalDeviceToolPropertiesEXT</a> structures"
+        },
+        {
+          "vuid": "VUID-vkGetPhysicalDeviceToolPropertiesEXT-pToolCount-arraylength",
+          "text": " If <code>pToolProperties</code> is not <code>NULL</code>, the value referenced by <code>pToolCount</code> <strong class=\"purple\">must</strong> be greater than <code>0</code>"
+        }
+      ]
+    },
+    "VkPhysicalDeviceToolPropertiesEXT": {
+      "(VK_EXT_tooling_info)": [
+        {
+          "vuid": "VUID-VkPhysicalDeviceToolPropertiesEXT-sType-sType",
+          "text": " <code>sType</code> <strong class=\"purple\">must</strong> be <code>VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TOOL_PROPERTIES_EXT</code>"
+        },
+        {
+          "vuid": "VUID-VkPhysicalDeviceToolPropertiesEXT-pNext-pNext",
+          "text": " <code>pNext</code> <strong class=\"purple\">must</strong> be <code>NULL</code>"
+        }
+      ]
+    }
+  }
+}
\ No newline at end of file
diff --git a/src/third_party/vulkan-headers/src/registry/vk.xml b/src/third_party/vulkan-headers/src/registry/vk.xml
new file mode 100644
index 0000000..1a3d613
--- /dev/null
+++ b/src/third_party/vulkan-headers/src/registry/vk.xml
@@ -0,0 +1,11952 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<registry>
+    <comment>
+Copyright (c) 2015-2019 The Khronos Group Inc.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+---- Exceptions to the Apache 2.0 License: ----
+
+As an exception, if you use this Software to generate code and portions of
+this Software are embedded into the generated code as a result, you may
+redistribute such product without providing attribution as would otherwise
+be required by Sections 4(a), 4(b) and 4(d) of the License.
+
+In addition, if you combine or link code generated by this Software with
+software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1
+("`Combined Software`") and if a court of competent jurisdiction determines
+that the patent provision (Section 3), the indemnity provision (Section 9)
+or other Section of the License conflicts with the conditions of the
+applicable GPL or LGPL license, you may retroactively and prospectively
+choose to deem waived or otherwise exclude such Section(s) of the License,
+but only in their entirety and only with respect to the Combined Software.
+    </comment>
+
+    <comment>
+This file, vk.xml, is the Vulkan API Registry. It is a critically important
+and normative part of the Vulkan Specification, including a canonical
+machine-readable definition of the API, parameter and member validation
+language incorporated into the Specification and reference pages, and other
+material which is registered by Khronos, such as tags used by extension and
+layer authors. The authoritative public version of vk.xml is maintained in
+the master branch of the Khronos Vulkan GitHub project. The authoritative
+private version is maintained in the master branch of the member gitlab
+server.
+    </comment>
+
+    <platforms comment="Vulkan platform names, reserved for use with platform- and window system-specific extensions">
+        <platform name="xlib" protect="VK_USE_PLATFORM_XLIB_KHR" comment="X Window System, Xlib client library"/>
+        <platform name="xlib_xrandr" protect="VK_USE_PLATFORM_XLIB_XRANDR_EXT" comment="X Window System, Xlib client library, XRandR extension"/>
+        <platform name="xcb" protect="VK_USE_PLATFORM_XCB_KHR" comment="X Window System, Xcb client library"/>
+        <platform name="wayland" protect="VK_USE_PLATFORM_WAYLAND_KHR" comment="Wayland display server protocol"/>
+        <platform name="android" protect="VK_USE_PLATFORM_ANDROID_KHR" comment="Android OS"/>
+        <platform name="win32" protect="VK_USE_PLATFORM_WIN32_KHR" comment="Microsoft Win32 API (also refers to Win64 apps)"/>
+        <platform name="vi" protect="VK_USE_PLATFORM_VI_NN" comment="Nintendo Vi"/>
+        <platform name="ios" protect="VK_USE_PLATFORM_IOS_MVK" comment="Apple IOS"/>
+        <platform name="macos" protect="VK_USE_PLATFORM_MACOS_MVK" comment="Apple MacOS"/>
+        <platform name="metal" protect="VK_USE_PLATFORM_METAL_EXT" comment="Metal on CoreAnimation on Apple platforms"/>
+        <platform name="fuchsia" protect="VK_USE_PLATFORM_FUCHSIA" comment="Fuchsia"/>
+        <platform name="ggp" protect="VK_USE_PLATFORM_GGP" comment="Google Games Platform"/>
+    </platforms>
+
+    <tags comment="Vulkan vendor/author tags for extensions and layers">
+        <tag name="IMG"         author="Imagination Technologies"      contact="Michael Worcester @michaelworcester"/>
+        <tag name="AMD"         author="Advanced Micro Devices, Inc."  contact="Daniel Rakos @drakos-amd"/>
+        <tag name="AMDX"        author="Advanced Micro Devices, Inc."  contact="Daniel Rakos @drakos-amd"/>
+        <tag name="ARM"         author="ARM Limited"                   contact="Jan-Harald Fredriksen @janharaldfredriksen-arm"/>
+        <tag name="FSL"         author="Freescale Semiconductor, Inc." contact="Norbert Nopper @FslNopper"/>
+        <tag name="BRCM"        author="Broadcom Corporation"          contact="Graeme Leese @gnl21"/>
+        <tag name="NXP"         author="NXP Semiconductors N.V."       contact="Norbert Nopper @FslNopper"/>
+        <tag name="NV"          author="NVIDIA Corporation"            contact="Daniel Koch @dgkoch"/>
+        <tag name="NVX"         author="NVIDIA Corporation"            contact="Daniel Koch @dgkoch"/>
+        <tag name="VIV"         author="Vivante Corporation"           contact="Yanjun Zhang gitlab:@yanjunzhang"/>
+        <tag name="VSI"         author="VeriSilicon Holdings Co., Ltd." contact="Yanjun Zhang gitlab:@yanjunzhang"/>
+        <tag name="KDAB"        author="KDAB"                          contact="Sean Harmer @seanharmer"/>
+        <tag name="ANDROID"     author="Google LLC"                    contact="Jesse Hall @critsec"/>
+        <tag name="CHROMIUM"    author="Google LLC"                    contact="Jesse Hall @critsec"/>
+        <tag name="FUCHSIA"     author="Google LLC"                    contact="Craig Stout @cdotstout, Jesse Hall @critsec"/>
+        <tag name="GGP"         author="Google, LLC"                   contact="Jean-Francois Roy @jfroy, Hai Nguyen @chaoticbob, Jesse Hall @critsec"/>
+        <tag name="GOOGLE"      author="Google LLC"                    contact="Jesse Hall @critsec"/>
+        <tag name="QCOM"        author="Qualcomm Technologies, Inc."   contact="Maurice Ribble @mribble"/>
+        <tag name="LUNARG"      author="LunarG, Inc."                  contact="Karen Ghavam @karenghavam-lunarg"/>
+        <tag name="SAMSUNG"     author="Samsung Electronics Co., Ltd." contact="Alon Or-bach @alonorbach"/>
+        <tag name="SEC"         author="Samsung Electronics Co., Ltd." contact="Alon Or-bach @alonorbach"/>
+        <tag name="TIZEN"       author="Samsung Electronics Co., Ltd." contact="Alon Or-bach @alonorbach"/>
+        <tag name="RENDERDOC"   author="RenderDoc (renderdoc.org)"     contact="Baldur Karlsson @baldurk"/>
+        <tag name="NN"          author="Nintendo Co., Ltd."            contact="Yasuhiro Yoshioka gitlab:@yoshioka_yasuhiro"/>
+        <tag name="MVK"         author="The Brenwill Workshop Ltd."    contact="Bill Hollings @billhollings"/>
+        <tag name="KHR"         author="Khronos"                       contact="Tom Olson @tomolson"/>
+        <tag name="KHX"         author="Khronos"                       contact="Tom Olson @tomolson"/>
+        <tag name="EXT"         author="Multivendor"                   contact="Jon Leech @oddhack"/>
+        <tag name="MESA"        author="Mesa open source project"      contact="Chad Versace @chadversary, Daniel Stone @fooishbar, David Airlie @airlied, Jason Ekstrand @jekstrand"/>
+        <tag name="INTEL"       author="Intel Corporation"             contact="Slawek Grajewski @sgrajewski"/>
+    </tags>
+
+    <types comment="Vulkan type definitions">
+        <type name="vk_platform" category="include">#include "vk_platform.h"</type>
+
+            <comment>WSI extensions</comment>
+
+        <type category="include" name="X11/Xlib.h"/>
+        <type category="include" name="X11/extensions/Xrandr.h"/>
+        <type category="include" name="wayland-client.h"/>
+        <type category="include" name="windows.h"/>
+        <type category="include" name="xcb/xcb.h"/>
+        <type category="include" name="zircon/types.h"/>
+        <type category="include" name="ggp_c/vulkan_types.h"/>
+            <comment>
+                In the current header structure, each platform's interfaces
+                are confined to a platform-specific header (vulkan_xlib.h,
+                vulkan_win32.h, etc.). These headers are not self-contained,
+                and should not include native headers (X11/Xlib.h,
+                windows.h, etc.). Code should either include vulkan.h after
+                defining the appropriate VK_USE_PLATFORM_platform
+                macros, or include the required native headers prior to
+                explicitly including the corresponding platform header.
+
+                To accomplish this, the dependencies of native types require
+                native headers, but the XML defines the content for those
+                native headers as empty. The actual native header includes
+                can be restored by modifying the native header tags above
+                to #include the header file in the 'name' attribute.
+            </comment>
+
+        <type requires="X11/Xlib.h" name="Display"/>
+        <type requires="X11/Xlib.h" name="VisualID"/>
+        <type requires="X11/Xlib.h" name="Window"/>
+        <type requires="X11/extensions/Xrandr.h" name="RROutput"/>
+        <type requires="wayland-client.h" name="wl_display"/>
+        <type requires="wayland-client.h" name="wl_surface"/>
+        <type requires="windows.h" name="HINSTANCE"/>
+        <type requires="windows.h" name="HWND"/>
+        <type requires="windows.h" name="HMONITOR"/>
+        <type requires="windows.h" name="HANDLE"/>
+        <type requires="windows.h" name="SECURITY_ATTRIBUTES"/>
+        <type requires="windows.h" name="DWORD"/>
+        <type requires="windows.h" name="LPCWSTR"/>
+        <type requires="xcb/xcb.h" name="xcb_connection_t"/>
+        <type requires="xcb/xcb.h" name="xcb_visualid_t"/>
+        <type requires="xcb/xcb.h" name="xcb_window_t"/>
+        <type requires="zircon/types.h" name="zx_handle_t"/>
+        <type requires="ggp_c/vulkan_types.h" name="GgpStreamDescriptor"/>
+        <type requires="ggp_c/vulkan_types.h" name="GgpFrameToken"/>
+
+        <type category="define">#define <name>VK_MAKE_VERSION</name>(major, minor, patch) \
+    (((major) &lt;&lt; 22) | ((minor) &lt;&lt; 12) | (patch))</type>
+        <type category="define">#define <name>VK_VERSION_MAJOR</name>(version) ((uint32_t)(version) &gt;&gt; 22)</type>
+        <type category="define">#define <name>VK_VERSION_MINOR</name>(version) (((uint32_t)(version) &gt;&gt; 12) &amp; 0x3ff)</type>
+        <type category="define">#define <name>VK_VERSION_PATCH</name>(version) ((uint32_t)(version) &amp; 0xfff)</type>
+
+        <type category="define">// DEPRECATED: This define has been removed. Specific version defines (e.g. VK_API_VERSION_1_0), or the VK_MAKE_VERSION macro, should be used instead.
+//#define <name>VK_API_VERSION</name> <type>VK_MAKE_VERSION</type>(1, 0, 0) // Patch version should always be set to 0</type>
+        <type category="define">// Vulkan 1.0 version number
+#define <name>VK_API_VERSION_1_0</name> <type>VK_MAKE_VERSION</type>(1, 0, 0)// Patch version should always be set to 0</type>
+        <type category="define">// Vulkan 1.1 version number
+#define <name>VK_API_VERSION_1_1</name> <type>VK_MAKE_VERSION</type>(1, 1, 0)// Patch version should always be set to 0</type>
+        <type category="define">// Version of this file
+#define <name>VK_HEADER_VERSION</name> 130</type>
+
+        <type category="define">
+#define <name>VK_DEFINE_HANDLE</name>(object) typedef struct object##_T* object;</type>
+
+        <type category="define" name="VK_DEFINE_NON_DISPATCHABLE_HANDLE">
+#if !defined(VK_DEFINE_NON_DISPATCHABLE_HANDLE)
+#if defined(__LP64__) || defined(_WIN64) || (defined(__x86_64__) &amp;&amp; !defined(__ILP32__) ) || defined(_M_X64) || defined(__ia64) || defined (_M_IA64) || defined(__aarch64__) || defined(__powerpc64__)
+        #define VK_DEFINE_NON_DISPATCHABLE_HANDLE(object) typedef struct object##_T *object;
+#else
+        #define VK_DEFINE_NON_DISPATCHABLE_HANDLE(object) typedef uint64_t object;
+#endif
+#endif</type>
+
+        <type category="define">
+#define <name>VK_NULL_HANDLE</name> 0</type>
+
+        <type category="define">struct <name>ANativeWindow</name>;</type>
+        <type category="define">struct <name>AHardwareBuffer</name>;</type>
+        <type category="define">
+#ifdef __OBJC__
+@class CAMetalLayer;
+#else
+typedef void <name>CAMetalLayer</name>;
+#endif</type>
+
+        <type category="basetype">typedef <type>uint32_t</type> <name>VkSampleMask</name>;</type>
+        <type category="basetype">typedef <type>uint32_t</type> <name>VkBool32</name>;</type>
+        <type category="basetype">typedef <type>uint32_t</type> <name>VkFlags</name>;</type>
+        <type category="basetype">typedef <type>uint64_t</type> <name>VkDeviceSize</name>;</type>
+        <type category="basetype">typedef <type>uint64_t</type> <name>VkDeviceAddress</name>;</type>
+
+            <comment>Basic C types, pulled in via vk_platform.h</comment>
+        <type requires="vk_platform" name="void"/>
+        <type requires="vk_platform" name="char"/>
+        <type requires="vk_platform" name="float"/>
+        <type requires="vk_platform" name="double"/>
+        <type requires="vk_platform" name="uint8_t"/>
+        <type requires="vk_platform" name="uint16_t"/>
+        <type requires="vk_platform" name="uint32_t"/>
+        <type requires="vk_platform" name="uint64_t"/>
+        <type requires="vk_platform" name="int32_t"/>
+        <type requires="vk_platform" name="int64_t"/>
+        <type requires="vk_platform" name="size_t"/>
+        <type name="int"/>
+
+            <comment>Bitmask types</comment>
+        <type requires="VkFramebufferCreateFlagBits"      category="bitmask">typedef <type>VkFlags</type> <name>VkFramebufferCreateFlags</name>;</type>
+        <type                                             category="bitmask">typedef <type>VkFlags</type> <name>VkQueryPoolCreateFlags</name>;</type>
+        <type requires="VkRenderPassCreateFlagBits"       category="bitmask">typedef <type>VkFlags</type> <name>VkRenderPassCreateFlags</name>;</type>
+        <type requires="VkSamplerCreateFlagBits"          category="bitmask">typedef <type>VkFlags</type> <name>VkSamplerCreateFlags</name>;</type>
+        <type                                             category="bitmask">typedef <type>VkFlags</type> <name>VkPipelineLayoutCreateFlags</name>;</type>
+        <type                                             category="bitmask">typedef <type>VkFlags</type> <name>VkPipelineCacheCreateFlags</name>;</type>
+        <type                                             category="bitmask">typedef <type>VkFlags</type> <name>VkPipelineDepthStencilStateCreateFlags</name>;</type>
+        <type                                             category="bitmask">typedef <type>VkFlags</type> <name>VkPipelineDynamicStateCreateFlags</name>;</type>
+        <type                                             category="bitmask">typedef <type>VkFlags</type> <name>VkPipelineColorBlendStateCreateFlags</name>;</type>
+        <type                                             category="bitmask">typedef <type>VkFlags</type> <name>VkPipelineMultisampleStateCreateFlags</name>;</type>
+        <type                                             category="bitmask">typedef <type>VkFlags</type> <name>VkPipelineRasterizationStateCreateFlags</name>;</type>
+        <type                                             category="bitmask">typedef <type>VkFlags</type> <name>VkPipelineViewportStateCreateFlags</name>;</type>
+        <type                                             category="bitmask">typedef <type>VkFlags</type> <name>VkPipelineTessellationStateCreateFlags</name>;</type>
+        <type                                             category="bitmask">typedef <type>VkFlags</type> <name>VkPipelineInputAssemblyStateCreateFlags</name>;</type>
+        <type                                             category="bitmask">typedef <type>VkFlags</type> <name>VkPipelineVertexInputStateCreateFlags</name>;</type>
+        <type requires="VkPipelineShaderStageCreateFlagBits" category="bitmask">typedef <type>VkFlags</type> <name>VkPipelineShaderStageCreateFlags</name>;</type>
+        <type requires="VkDescriptorSetLayoutCreateFlagBits" category="bitmask">typedef <type>VkFlags</type> <name>VkDescriptorSetLayoutCreateFlags</name>;</type>
+        <type                                             category="bitmask">typedef <type>VkFlags</type> <name>VkBufferViewCreateFlags</name>;</type>
+        <type                                             category="bitmask">typedef <type>VkFlags</type> <name>VkInstanceCreateFlags</name>;</type>
+        <type                                             category="bitmask">typedef <type>VkFlags</type> <name>VkDeviceCreateFlags</name>;</type>
+        <type requires="VkDeviceQueueCreateFlagBits"      category="bitmask">typedef <type>VkFlags</type> <name>VkDeviceQueueCreateFlags</name>;</type>
+        <type requires="VkQueueFlagBits"                  category="bitmask">typedef <type>VkFlags</type> <name>VkQueueFlags</name>;</type>
+        <type requires="VkMemoryPropertyFlagBits"         category="bitmask">typedef <type>VkFlags</type> <name>VkMemoryPropertyFlags</name>;</type>
+        <type requires="VkMemoryHeapFlagBits"             category="bitmask">typedef <type>VkFlags</type> <name>VkMemoryHeapFlags</name>;</type>
+        <type requires="VkAccessFlagBits"                 category="bitmask">typedef <type>VkFlags</type> <name>VkAccessFlags</name>;</type>
+        <type requires="VkBufferUsageFlagBits"            category="bitmask">typedef <type>VkFlags</type> <name>VkBufferUsageFlags</name>;</type>
+        <type requires="VkBufferCreateFlagBits"           category="bitmask">typedef <type>VkFlags</type> <name>VkBufferCreateFlags</name>;</type>
+        <type requires="VkShaderStageFlagBits"            category="bitmask">typedef <type>VkFlags</type> <name>VkShaderStageFlags</name>;</type>
+        <type requires="VkImageUsageFlagBits"             category="bitmask">typedef <type>VkFlags</type> <name>VkImageUsageFlags</name>;</type>
+        <type requires="VkImageCreateFlagBits"            category="bitmask">typedef <type>VkFlags</type> <name>VkImageCreateFlags</name>;</type>
+        <type requires="VkImageViewCreateFlagBits"        category="bitmask">typedef <type>VkFlags</type> <name>VkImageViewCreateFlags</name>;</type>
+        <type requires="VkPipelineCreateFlagBits"         category="bitmask">typedef <type>VkFlags</type> <name>VkPipelineCreateFlags</name>;</type>
+        <type requires="VkColorComponentFlagBits"         category="bitmask">typedef <type>VkFlags</type> <name>VkColorComponentFlags</name>;</type>
+        <type requires="VkFenceCreateFlagBits"            category="bitmask">typedef <type>VkFlags</type> <name>VkFenceCreateFlags</name>;</type>
+        <type                                             category="bitmask">typedef <type>VkFlags</type> <name>VkSemaphoreCreateFlags</name>;</type>
+        <type requires="VkFormatFeatureFlagBits"          category="bitmask">typedef <type>VkFlags</type> <name>VkFormatFeatureFlags</name>;</type>
+        <type requires="VkQueryControlFlagBits"           category="bitmask">typedef <type>VkFlags</type> <name>VkQueryControlFlags</name>;</type>
+        <type requires="VkQueryResultFlagBits"            category="bitmask">typedef <type>VkFlags</type> <name>VkQueryResultFlags</name>;</type>
+        <type requires="VkShaderModuleCreateFlagBits"     category="bitmask">typedef <type>VkFlags</type> <name>VkShaderModuleCreateFlags</name>;</type>
+        <type                                             category="bitmask">typedef <type>VkFlags</type> <name>VkEventCreateFlags</name>;</type>
+        <type requires="VkCommandPoolCreateFlagBits"          category="bitmask">typedef <type>VkFlags</type> <name>VkCommandPoolCreateFlags</name>;</type>
+        <type requires="VkCommandPoolResetFlagBits"           category="bitmask">typedef <type>VkFlags</type> <name>VkCommandPoolResetFlags</name>;</type>
+        <type requires="VkCommandBufferResetFlagBits"         category="bitmask">typedef <type>VkFlags</type> <name>VkCommandBufferResetFlags</name>;</type>
+        <type requires="VkCommandBufferUsageFlagBits"         category="bitmask">typedef <type>VkFlags</type> <name>VkCommandBufferUsageFlags</name>;</type>
+        <type requires="VkQueryPipelineStatisticFlagBits" category="bitmask">typedef <type>VkFlags</type> <name>VkQueryPipelineStatisticFlags</name>;</type>
+        <type                                             category="bitmask">typedef <type>VkFlags</type> <name>VkMemoryMapFlags</name>;</type>
+        <type requires="VkImageAspectFlagBits"            category="bitmask">typedef <type>VkFlags</type> <name>VkImageAspectFlags</name>;</type>
+        <type requires="VkSparseMemoryBindFlagBits"       category="bitmask">typedef <type>VkFlags</type> <name>VkSparseMemoryBindFlags</name>;</type>
+        <type requires="VkSparseImageFormatFlagBits"      category="bitmask">typedef <type>VkFlags</type> <name>VkSparseImageFormatFlags</name>;</type>
+        <type requires="VkSubpassDescriptionFlagBits"     category="bitmask">typedef <type>VkFlags</type> <name>VkSubpassDescriptionFlags</name>;</type>
+        <type requires="VkPipelineStageFlagBits"          category="bitmask">typedef <type>VkFlags</type> <name>VkPipelineStageFlags</name>;</type>
+        <type requires="VkSampleCountFlagBits"            category="bitmask">typedef <type>VkFlags</type> <name>VkSampleCountFlags</name>;</type>
+        <type requires="VkAttachmentDescriptionFlagBits"  category="bitmask">typedef <type>VkFlags</type> <name>VkAttachmentDescriptionFlags</name>;</type>
+        <type requires="VkStencilFaceFlagBits"            category="bitmask">typedef <type>VkFlags</type> <name>VkStencilFaceFlags</name>;</type>
+        <type requires="VkCullModeFlagBits"               category="bitmask">typedef <type>VkFlags</type> <name>VkCullModeFlags</name>;</type>
+        <type requires="VkDescriptorPoolCreateFlagBits"   category="bitmask">typedef <type>VkFlags</type> <name>VkDescriptorPoolCreateFlags</name>;</type>
+        <type                                             category="bitmask">typedef <type>VkFlags</type> <name>VkDescriptorPoolResetFlags</name>;</type>
+        <type requires="VkDependencyFlagBits"             category="bitmask">typedef <type>VkFlags</type> <name>VkDependencyFlags</name>;</type>
+        <type requires="VkSubgroupFeatureFlagBits"        category="bitmask">typedef <type>VkFlags</type> <name>VkSubgroupFeatureFlags</name>;</type>
+        <type requires="VkIndirectCommandsLayoutUsageFlagBitsNVX"  category="bitmask">typedef <type>VkFlags</type> <name>VkIndirectCommandsLayoutUsageFlagsNVX</name>;</type>
+        <type requires="VkObjectEntryUsageFlagBitsNVX"             category="bitmask">typedef <type>VkFlags</type> <name>VkObjectEntryUsageFlagsNVX</name>;</type>
+        <type requires="VkGeometryFlagBitsNV"            category="bitmask">typedef <type>VkFlags</type> <name>VkGeometryFlagsNV</name>;</type>
+        <type requires="VkGeometryInstanceFlagBitsNV"    category="bitmask">typedef <type>VkFlags</type> <name>VkGeometryInstanceFlagsNV</name>;</type>
+        <type requires="VkBuildAccelerationStructureFlagBitsNV" category="bitmask">typedef <type>VkFlags</type> <name>VkBuildAccelerationStructureFlagsNV</name>;</type>
+
+        <type                                             category="bitmask">typedef <type>VkFlags</type> <name>VkDescriptorUpdateTemplateCreateFlags</name>;</type>
+        <type                                             category="bitmask" name="VkDescriptorUpdateTemplateCreateFlagsKHR" alias="VkDescriptorUpdateTemplateCreateFlags"/>
+        <type requires="VkPipelineCreationFeedbackFlagBitsEXT" category="bitmask">typedef <type>VkFlags</type> <name>VkPipelineCreationFeedbackFlagsEXT</name>;</type>
+        <type requires="VkPerformanceCounterDescriptionFlagBitsKHR" category="bitmask">typedef <type>VkFlags</type> <name>VkPerformanceCounterDescriptionFlagsKHR</name>;</type>
+        <type requires="VkAcquireProfilingLockFlagBitsKHR"          category="bitmask">typedef <type>VkFlags</type> <name>VkAcquireProfilingLockFlagsKHR</name>;</type>
+        <type requires="VkSemaphoreWaitFlagBitsKHR"       category="bitmask">typedef <type>VkFlags</type> <name>VkSemaphoreWaitFlagsKHR</name>;</type>
+        <type requires="VkPipelineCompilerControlFlagBitsAMD" category="bitmask">typedef <type>VkFlags</type> <name>VkPipelineCompilerControlFlagsAMD</name>;</type>
+        <type requires="VkShaderCorePropertiesFlagBitsAMD" category="bitmask">typedef <type>VkFlags</type> <name>VkShaderCorePropertiesFlagsAMD</name>;</type>
+
+            <comment>WSI extensions</comment>
+        <type requires="VkCompositeAlphaFlagBitsKHR"      category="bitmask">typedef <type>VkFlags</type> <name>VkCompositeAlphaFlagsKHR</name>;</type>
+        <type requires="VkDisplayPlaneAlphaFlagBitsKHR"   category="bitmask">typedef <type>VkFlags</type> <name>VkDisplayPlaneAlphaFlagsKHR</name>;</type>
+        <type requires="VkSurfaceTransformFlagBitsKHR"    category="bitmask">typedef <type>VkFlags</type> <name>VkSurfaceTransformFlagsKHR</name>;</type>
+        <type requires="VkSwapchainCreateFlagBitsKHR"     category="bitmask">typedef <type>VkFlags</type> <name>VkSwapchainCreateFlagsKHR</name>;</type>
+        <type                                             category="bitmask">typedef <type>VkFlags</type> <name>VkDisplayModeCreateFlagsKHR</name>;</type>
+        <type                                             category="bitmask">typedef <type>VkFlags</type> <name>VkDisplaySurfaceCreateFlagsKHR</name>;</type>
+        <type                                             category="bitmask">typedef <type>VkFlags</type> <name>VkAndroidSurfaceCreateFlagsKHR</name>;</type>
+        <type                                             category="bitmask">typedef <type>VkFlags</type> <name>VkViSurfaceCreateFlagsNN</name>;</type>
+        <type                                             category="bitmask">typedef <type>VkFlags</type> <name>VkWaylandSurfaceCreateFlagsKHR</name>;</type>
+        <type                                             category="bitmask">typedef <type>VkFlags</type> <name>VkWin32SurfaceCreateFlagsKHR</name>;</type>
+        <type                                             category="bitmask">typedef <type>VkFlags</type> <name>VkXlibSurfaceCreateFlagsKHR</name>;</type>
+        <type                                             category="bitmask">typedef <type>VkFlags</type> <name>VkXcbSurfaceCreateFlagsKHR</name>;</type>
+        <type                                             category="bitmask">typedef <type>VkFlags</type> <name>VkIOSSurfaceCreateFlagsMVK</name>;</type>
+        <type                                             category="bitmask">typedef <type>VkFlags</type> <name>VkMacOSSurfaceCreateFlagsMVK</name>;</type>
+        <type                                             category="bitmask">typedef <type>VkFlags</type> <name>VkMetalSurfaceCreateFlagsEXT</name>;</type>
+        <type                                             category="bitmask">typedef <type>VkFlags</type> <name>VkImagePipeSurfaceCreateFlagsFUCHSIA</name>;</type>
+        <type                                             category="bitmask">typedef <type>VkFlags</type> <name>VkStreamDescriptorSurfaceCreateFlagsGGP</name>;</type>
+        <type                                             category="bitmask">typedef <type>VkFlags</type> <name>VkHeadlessSurfaceCreateFlagsEXT</name>;</type>
+        <type requires="VkPeerMemoryFeatureFlagBits"   category="bitmask">typedef <type>VkFlags</type> <name>VkPeerMemoryFeatureFlags</name>;</type>
+        <type                                             category="bitmask" name="VkPeerMemoryFeatureFlagsKHR"               alias="VkPeerMemoryFeatureFlags"/>
+        <type requires="VkMemoryAllocateFlagBits"      category="bitmask">typedef <type>VkFlags</type> <name>VkMemoryAllocateFlags</name>;</type>
+        <type                                             category="bitmask" name="VkMemoryAllocateFlagsKHR"                  alias="VkMemoryAllocateFlags"/>
+        <type requires="VkDeviceGroupPresentModeFlagBitsKHR" category="bitmask">typedef <type>VkFlags</type> <name>VkDeviceGroupPresentModeFlagsKHR</name>;</type>
+
+        <type requires="VkDebugReportFlagBitsEXT"      category="bitmask">typedef <type>VkFlags</type> <name>VkDebugReportFlagsEXT</name>;</type>
+        <type                                             category="bitmask">typedef <type>VkFlags</type> <name>VkCommandPoolTrimFlags</name>;</type>
+        <type                                             category="bitmask" name="VkCommandPoolTrimFlagsKHR"                 alias="VkCommandPoolTrimFlags"/>
+        <type requires="VkExternalMemoryHandleTypeFlagBitsNV" category="bitmask">typedef <type>VkFlags</type> <name>VkExternalMemoryHandleTypeFlagsNV</name>;</type>
+        <type requires="VkExternalMemoryFeatureFlagBitsNV" category="bitmask">typedef <type>VkFlags</type> <name>VkExternalMemoryFeatureFlagsNV</name>;</type>
+        <type requires="VkExternalMemoryHandleTypeFlagBits" category="bitmask">typedef <type>VkFlags</type> <name>VkExternalMemoryHandleTypeFlags</name>;</type>
+        <type                                             category="bitmask" name="VkExternalMemoryHandleTypeFlagsKHR"        alias="VkExternalMemoryHandleTypeFlags"/>
+        <type requires="VkExternalMemoryFeatureFlagBits" category="bitmask">typedef <type>VkFlags</type> <name>VkExternalMemoryFeatureFlags</name>;</type>
+        <type                                             category="bitmask" name="VkExternalMemoryFeatureFlagsKHR"           alias="VkExternalMemoryFeatureFlags"/>
+        <type requires="VkExternalSemaphoreHandleTypeFlagBits" category="bitmask">typedef <type>VkFlags</type> <name>VkExternalSemaphoreHandleTypeFlags</name>;</type>
+        <type                                             category="bitmask" name="VkExternalSemaphoreHandleTypeFlagsKHR"     alias="VkExternalSemaphoreHandleTypeFlags"/>
+        <type requires="VkExternalSemaphoreFeatureFlagBits" category="bitmask">typedef <type>VkFlags</type> <name>VkExternalSemaphoreFeatureFlags</name>;</type>
+        <type                                             category="bitmask" name="VkExternalSemaphoreFeatureFlagsKHR"        alias="VkExternalSemaphoreFeatureFlags"/>
+        <type requires="VkSemaphoreImportFlagBits" category="bitmask">typedef <type>VkFlags</type> <name>VkSemaphoreImportFlags</name>;</type>
+        <type                                             category="bitmask" name="VkSemaphoreImportFlagsKHR"                 alias="VkSemaphoreImportFlags"/>
+        <type requires="VkExternalFenceHandleTypeFlagBits" category="bitmask">typedef <type>VkFlags</type> <name>VkExternalFenceHandleTypeFlags</name>;</type>
+        <type                                             category="bitmask" name="VkExternalFenceHandleTypeFlagsKHR"         alias="VkExternalFenceHandleTypeFlags"/>
+        <type requires="VkExternalFenceFeatureFlagBits" category="bitmask">typedef <type>VkFlags</type> <name>VkExternalFenceFeatureFlags</name>;</type>
+        <type                                             category="bitmask" name="VkExternalFenceFeatureFlagsKHR"            alias="VkExternalFenceFeatureFlags"/>
+        <type requires="VkFenceImportFlagBits" category="bitmask">typedef <type>VkFlags</type> <name>VkFenceImportFlags</name>;</type>
+        <type                                             category="bitmask" name="VkFenceImportFlagsKHR"                     alias="VkFenceImportFlags"/>
+        <type requires="VkSurfaceCounterFlagBitsEXT"      category="bitmask">typedef <type>VkFlags</type> <name>VkSurfaceCounterFlagsEXT</name>;</type>
+        <type                                             category="bitmask">typedef <type>VkFlags</type> <name>VkPipelineViewportSwizzleStateCreateFlagsNV</name>;</type>
+        <type                                             category="bitmask">typedef <type>VkFlags</type> <name>VkPipelineDiscardRectangleStateCreateFlagsEXT</name>;</type>
+        <type                                             category="bitmask">typedef <type>VkFlags</type> <name>VkPipelineCoverageToColorStateCreateFlagsNV</name>;</type>
+        <type                                             category="bitmask">typedef <type>VkFlags</type> <name>VkPipelineCoverageModulationStateCreateFlagsNV</name>;</type>
+        <type                                             category="bitmask">typedef <type>VkFlags</type> <name>VkPipelineCoverageReductionStateCreateFlagsNV</name>;</type>
+        <type                                             category="bitmask">typedef <type>VkFlags</type> <name>VkValidationCacheCreateFlagsEXT</name>;</type>
+        <type requires="VkDebugUtilsMessageSeverityFlagBitsEXT"  category="bitmask">typedef <type>VkFlags</type> <name>VkDebugUtilsMessageSeverityFlagsEXT</name>;</type>
+        <type requires="VkDebugUtilsMessageTypeFlagBitsEXT"      category="bitmask">typedef <type>VkFlags</type> <name>VkDebugUtilsMessageTypeFlagsEXT</name>;</type>
+        <type                                             category="bitmask">typedef <type>VkFlags</type> <name>VkDebugUtilsMessengerCreateFlagsEXT</name>;</type>
+        <type                                             category="bitmask">typedef <type>VkFlags</type> <name>VkDebugUtilsMessengerCallbackDataFlagsEXT</name>;</type>
+        <type                                             category="bitmask">typedef <type>VkFlags</type> <name>VkPipelineRasterizationConservativeStateCreateFlagsEXT</name>;</type>
+        <type requires="VkDescriptorBindingFlagBitsEXT" category="bitmask">typedef <type>VkFlags</type> <name>VkDescriptorBindingFlagsEXT</name>;</type>
+        <type requires="VkConditionalRenderingFlagBitsEXT"   category="bitmask">typedef <type>VkFlags</type> <name>VkConditionalRenderingFlagsEXT</name>;</type>
+        <type requires="VkResolveModeFlagBitsKHR"         category="bitmask">typedef <type>VkFlags</type> <name>VkResolveModeFlagsKHR</name>;</type>
+        <type                                             category="bitmask">typedef <type>VkFlags</type> <name>VkPipelineRasterizationStateStreamCreateFlagsEXT</name>;</type>
+        <type                                             category="bitmask">typedef <type>VkFlags</type> <name>VkPipelineRasterizationDepthClipStateCreateFlagsEXT</name>;</type>
+        <type requires="VkSwapchainImageUsageFlagBitsANDROID" category="bitmask">typedef <type>VkFlags</type> <name>VkSwapchainImageUsageFlagsANDROID</name>;</type>
+        <type requires="VkToolPurposeFlagBitsEXT"         category="bitmask">typedef <type>VkFlags</type> <name>VkToolPurposeFlagsEXT</name>;</type>
+
+            <comment>Types which can be void pointers or class pointers, selected at compile time</comment>
+        <type category="handle"><type>VK_DEFINE_HANDLE</type>(<name>VkInstance</name>)</type>
+        <type category="handle" parent="VkInstance"><type>VK_DEFINE_HANDLE</type>(<name>VkPhysicalDevice</name>)</type>
+        <type category="handle" parent="VkPhysicalDevice"><type>VK_DEFINE_HANDLE</type>(<name>VkDevice</name>)</type>
+        <type category="handle" parent="VkDevice"><type>VK_DEFINE_HANDLE</type>(<name>VkQueue</name>)</type>
+        <type category="handle" parent="VkCommandPool"><type>VK_DEFINE_HANDLE</type>(<name>VkCommandBuffer</name>)</type>
+        <type category="handle" parent="VkDevice"><type>VK_DEFINE_NON_DISPATCHABLE_HANDLE</type>(<name>VkDeviceMemory</name>)</type>
+        <type category="handle" parent="VkDevice"><type>VK_DEFINE_NON_DISPATCHABLE_HANDLE</type>(<name>VkCommandPool</name>)</type>
+        <type category="handle" parent="VkDevice"><type>VK_DEFINE_NON_DISPATCHABLE_HANDLE</type>(<name>VkBuffer</name>)</type>
+        <type category="handle" parent="VkDevice"><type>VK_DEFINE_NON_DISPATCHABLE_HANDLE</type>(<name>VkBufferView</name>)</type>
+        <type category="handle" parent="VkDevice"><type>VK_DEFINE_NON_DISPATCHABLE_HANDLE</type>(<name>VkImage</name>)</type>
+        <type category="handle" parent="VkDevice"><type>VK_DEFINE_NON_DISPATCHABLE_HANDLE</type>(<name>VkImageView</name>)</type>
+        <type category="handle" parent="VkDevice"><type>VK_DEFINE_NON_DISPATCHABLE_HANDLE</type>(<name>VkShaderModule</name>)</type>
+        <type category="handle" parent="VkDevice"><type>VK_DEFINE_NON_DISPATCHABLE_HANDLE</type>(<name>VkPipeline</name>)</type>
+        <type category="handle" parent="VkDevice"><type>VK_DEFINE_NON_DISPATCHABLE_HANDLE</type>(<name>VkPipelineLayout</name>)</type>
+        <type category="handle" parent="VkDevice"><type>VK_DEFINE_NON_DISPATCHABLE_HANDLE</type>(<name>VkSampler</name>)</type>
+        <type category="handle" parent="VkDescriptorPool"><type>VK_DEFINE_NON_DISPATCHABLE_HANDLE</type>(<name>VkDescriptorSet</name>)</type>
+        <type category="handle" parent="VkDevice"><type>VK_DEFINE_NON_DISPATCHABLE_HANDLE</type>(<name>VkDescriptorSetLayout</name>)</type>
+        <type category="handle" parent="VkDevice"><type>VK_DEFINE_NON_DISPATCHABLE_HANDLE</type>(<name>VkDescriptorPool</name>)</type>
+        <type category="handle" parent="VkDevice"><type>VK_DEFINE_NON_DISPATCHABLE_HANDLE</type>(<name>VkFence</name>)</type>
+        <type category="handle" parent="VkDevice"><type>VK_DEFINE_NON_DISPATCHABLE_HANDLE</type>(<name>VkSemaphore</name>)</type>
+        <type category="handle" parent="VkDevice"><type>VK_DEFINE_NON_DISPATCHABLE_HANDLE</type>(<name>VkEvent</name>)</type>
+        <type category="handle" parent="VkDevice"><type>VK_DEFINE_NON_DISPATCHABLE_HANDLE</type>(<name>VkQueryPool</name>)</type>
+        <type category="handle" parent="VkDevice"><type>VK_DEFINE_NON_DISPATCHABLE_HANDLE</type>(<name>VkFramebuffer</name>)</type>
+        <type category="handle" parent="VkDevice"><type>VK_DEFINE_NON_DISPATCHABLE_HANDLE</type>(<name>VkRenderPass</name>)</type>
+        <type category="handle" parent="VkDevice"><type>VK_DEFINE_NON_DISPATCHABLE_HANDLE</type>(<name>VkPipelineCache</name>)</type>
+        <type category="handle" parent="VkDevice"><type>VK_DEFINE_NON_DISPATCHABLE_HANDLE</type>(<name>VkObjectTableNVX</name>)</type>
+        <type category="handle" parent="VkDevice"><type>VK_DEFINE_NON_DISPATCHABLE_HANDLE</type>(<name>VkIndirectCommandsLayoutNVX</name>)</type>
+        <type category="handle" parent="VkDevice"><type>VK_DEFINE_NON_DISPATCHABLE_HANDLE</type>(<name>VkDescriptorUpdateTemplate</name>)</type>
+        <type category="handle" name="VkDescriptorUpdateTemplateKHR" alias="VkDescriptorUpdateTemplate"/>
+        <type category="handle" parent="VkDevice"><type>VK_DEFINE_NON_DISPATCHABLE_HANDLE</type>(<name>VkSamplerYcbcrConversion</name>)</type>
+        <type category="handle" name="VkSamplerYcbcrConversionKHR"   alias="VkSamplerYcbcrConversion"/>
+        <type category="handle" parent="VkDevice"><type>VK_DEFINE_NON_DISPATCHABLE_HANDLE</type>(<name>VkValidationCacheEXT</name>)</type>
+        <type category="handle" parent="VkDevice"><type>VK_DEFINE_NON_DISPATCHABLE_HANDLE</type>(<name>VkAccelerationStructureNV</name>)</type>
+        <type category="handle" parent="VkDevice"><type>VK_DEFINE_NON_DISPATCHABLE_HANDLE</type>(<name>VkPerformanceConfigurationINTEL</name>)</type>
+
+            <comment>WSI extensions</comment>
+        <type category="handle"><type>VK_DEFINE_NON_DISPATCHABLE_HANDLE</type>(<name>VkDisplayKHR</name>)</type>
+        <type category="handle" parent="VkPhysicalDevice,VkDisplayKHR"><type>VK_DEFINE_NON_DISPATCHABLE_HANDLE</type>(<name>VkDisplayModeKHR</name>)</type>
+        <type category="handle" parent="VkInstance"><type>VK_DEFINE_NON_DISPATCHABLE_HANDLE</type>(<name>VkSurfaceKHR</name>)</type>
+        <type category="handle" parent="VkSurfaceKHR"><type>VK_DEFINE_NON_DISPATCHABLE_HANDLE</type>(<name>VkSwapchainKHR</name>)</type>
+        <type category="handle" parent="VkInstance"><type>VK_DEFINE_NON_DISPATCHABLE_HANDLE</type>(<name>VkDebugReportCallbackEXT</name>)</type>
+        <type category="handle" parent="VkInstance"><type>VK_DEFINE_NON_DISPATCHABLE_HANDLE</type>(<name>VkDebugUtilsMessengerEXT</name>)</type>
+
+            <comment>Types generated from corresponding enums tags below</comment>
+        <type name="VkAttachmentLoadOp" category="enum"/>
+        <type name="VkAttachmentStoreOp" category="enum"/>
+        <type name="VkBlendFactor" category="enum"/>
+        <type name="VkBlendOp" category="enum"/>
+        <type name="VkBorderColor" category="enum"/>
+        <type name="VkFramebufferCreateFlagBits" category="enum"/>
+        <type name="VkQueryPoolCreateFlagBits" category="enum"/>
+        <type name="VkRenderPassCreateFlagBits" category="enum"/>
+        <type name="VkSamplerCreateFlagBits" category="enum"/>
+        <type name="VkPipelineCacheHeaderVersion" category="enum"/>
+        <type name="VkPipelineLayoutCreateFlagBits" category="enum"/>
+        <type name="VkPipelineCacheCreateFlagBits" category="enum"/>
+        <type name="VkPipelineDepthStencilStateCreateFlagBits" category="enum"/>
+        <type name="VkPipelineDynamicStateCreateFlagBits" category="enum"/>
+        <type name="VkPipelineColorBlendStateCreateFlagBits" category="enum"/>
+        <type name="VkPipelineMultisampleStateCreateFlagBits" category="enum"/>
+        <type name="VkPipelineRasterizationStateCreateFlagBits" category="enum"/>
+        <type name="VkPipelineViewportStateCreateFlagBits" category="enum"/>
+        <type name="VkPipelineTessellationStateCreateFlagBits" category="enum"/>
+        <type name="VkPipelineInputAssemblyStateCreateFlagBits" category="enum"/>
+        <type name="VkPipelineVertexInputStateCreateFlagBits" category="enum"/>
+        <type name="VkPipelineShaderStageCreateFlagBits" category="enum"/>
+        <type name="VkDescriptorSetLayoutCreateFlagBits" category="enum"/>
+        <type name="VkBufferViewCreateFlagBits" category="enum"/>
+        <type name="VkInstanceCreateFlagBits" category="enum"/>
+        <type name="VkDeviceQueueCreateFlagBits" category="enum"/>
+        <type name="VkBufferCreateFlagBits" category="enum"/>
+        <type name="VkBufferUsageFlagBits" category="enum"/>
+        <type name="VkColorComponentFlagBits" category="enum"/>
+        <type name="VkComponentSwizzle" category="enum"/>
+        <type name="VkCommandPoolCreateFlagBits" category="enum"/>
+        <type name="VkCommandPoolResetFlagBits" category="enum"/>
+        <type name="VkCommandBufferResetFlagBits" category="enum"/>
+        <type name="VkCommandBufferLevel" category="enum"/>
+        <type name="VkCommandBufferUsageFlagBits" category="enum"/>
+        <type name="VkCompareOp" category="enum"/>
+        <type name="VkCullModeFlagBits" category="enum"/>
+        <type name="VkDescriptorType" category="enum"/>
+        <type name="VkDeviceCreateFlagBits" category="enum"/>
+        <type name="VkDynamicState" category="enum"/>
+        <type name="VkFenceCreateFlagBits" category="enum"/>
+        <type name="VkPolygonMode" category="enum"/>
+        <type name="VkFormat" category="enum"/>
+        <type name="VkFormatFeatureFlagBits" category="enum"/>
+        <type name="VkFrontFace" category="enum"/>
+        <type name="VkImageAspectFlagBits" category="enum"/>
+        <type name="VkImageCreateFlagBits" category="enum"/>
+        <type name="VkImageLayout" category="enum"/>
+        <type name="VkImageTiling" category="enum"/>
+        <type name="VkImageType" category="enum"/>
+        <type name="VkImageUsageFlagBits" category="enum"/>
+        <type name="VkImageViewCreateFlagBits" category="enum"/>
+        <type name="VkImageViewType" category="enum"/>
+        <type name="VkSharingMode" category="enum"/>
+        <type name="VkIndexType" category="enum"/>
+        <type name="VkLogicOp" category="enum"/>
+        <type name="VkMemoryHeapFlagBits" category="enum"/>
+        <type name="VkAccessFlagBits" category="enum"/>
+        <type name="VkMemoryPropertyFlagBits" category="enum"/>
+        <type name="VkPhysicalDeviceType" category="enum"/>
+        <type name="VkPipelineBindPoint" category="enum"/>
+        <type name="VkPipelineCreateFlagBits" category="enum"/>
+        <type name="VkPrimitiveTopology" category="enum"/>
+        <type name="VkQueryControlFlagBits" category="enum"/>
+        <type name="VkQueryPipelineStatisticFlagBits" category="enum"/>
+        <type name="VkQueryResultFlagBits" category="enum"/>
+        <type name="VkQueryType" category="enum"/>
+        <type name="VkQueueFlagBits" category="enum"/>
+        <type name="VkSubpassContents" category="enum"/>
+        <type name="VkResult" category="enum"/>
+        <type name="VkShaderStageFlagBits" category="enum"/>
+        <type name="VkSparseMemoryBindFlagBits" category="enum"/>
+        <type name="VkStencilFaceFlagBits" category="enum"/>
+        <type name="VkStencilOp" category="enum"/>
+        <type name="VkStructureType" category="enum"/>
+        <type name="VkSystemAllocationScope" category="enum"/>
+        <type name="VkInternalAllocationType" category="enum"/>
+        <type name="VkSamplerAddressMode" category="enum"/>
+        <type name="VkFilter" category="enum"/>
+        <type name="VkSamplerMipmapMode" category="enum"/>
+        <type name="VkVertexInputRate" category="enum"/>
+        <type name="VkPipelineStageFlagBits" category="enum"/>
+        <type name="VkSparseImageFormatFlagBits" category="enum"/>
+        <type name="VkSampleCountFlagBits" category="enum"/>
+        <type name="VkAttachmentDescriptionFlagBits" category="enum"/>
+        <type name="VkDescriptorPoolCreateFlagBits" category="enum"/>
+        <type name="VkDependencyFlagBits" category="enum"/>
+        <type name="VkObjectType" category="enum"/>
+        <type name="VkDescriptorBindingFlagBitsEXT" category="enum"/>
+        <type name="VkConditionalRenderingFlagBitsEXT" category="enum"/>
+
+        <comment>Extensions</comment>
+        <type name="VkIndirectCommandsLayoutUsageFlagBitsNVX" category="enum"/>
+        <type name="VkIndirectCommandsTokenTypeNVX" category="enum"/>
+        <type name="VkObjectEntryUsageFlagBitsNVX" category="enum"/>
+        <type name="VkObjectEntryTypeNVX" category="enum"/>
+        <type name="VkDescriptorUpdateTemplateType" category="enum"/>
+        <type category="enum" name="VkDescriptorUpdateTemplateTypeKHR"             alias="VkDescriptorUpdateTemplateType"/>
+        <type name="VkViewportCoordinateSwizzleNV" category="enum"/>
+        <type name="VkDiscardRectangleModeEXT" category="enum"/>
+        <type name="VkSubpassDescriptionFlagBits" category="enum"/>
+        <type name="VkPointClippingBehavior" category="enum"/>
+        <type category="enum" name="VkPointClippingBehaviorKHR"                    alias="VkPointClippingBehavior"/>
+        <type name="VkCoverageModulationModeNV" category="enum"/>
+        <type name="VkCoverageReductionModeNV" category="enum"/>
+        <type name="VkValidationCacheHeaderVersionEXT" category="enum"/>
+        <type name="VkShaderInfoTypeAMD" category="enum"/>
+        <type name="VkQueueGlobalPriorityEXT" category="enum"/>
+        <type name="VkTimeDomainEXT" category="enum"/>
+        <type name="VkConservativeRasterizationModeEXT" category="enum"/>
+        <type name="VkSemaphoreTypeKHR" category="enum"/>
+        <type name="VkResolveModeFlagBitsKHR" category="enum"/>
+        <type name="VkGeometryFlagBitsNV" category="enum"/>
+        <type name="VkGeometryInstanceFlagBitsNV" category="enum"/>
+        <type name="VkBuildAccelerationStructureFlagBitsNV" category="enum"/>
+        <type name="VkCopyAccelerationStructureModeNV" category="enum"/>
+        <type name="VkAccelerationStructureTypeNV" category="enum"/>
+        <type name="VkGeometryTypeNV" category="enum"/>
+        <type name="VkRayTracingShaderGroupTypeNV" category="enum"/>
+        <type name="VkAccelerationStructureMemoryRequirementsTypeNV" category="enum"/>
+        <type name="VkMemoryOverallocationBehaviorAMD" category="enum"/>
+        <type name="VkScopeNV" category="enum"/>
+        <type name="VkComponentTypeNV" category="enum"/>
+        <type name="VkPipelineCreationFeedbackFlagBitsEXT" category="enum"/>
+        <type name="VkPerformanceCounterScopeKHR" category="enum"/>
+        <type name="VkPerformanceCounterUnitKHR" category="enum"/>
+        <type name="VkPerformanceCounterStorageKHR" category="enum"/>
+        <type name="VkPerformanceCounterDescriptionFlagBitsKHR" category="enum"/>
+        <type name="VkAcquireProfilingLockFlagBitsKHR" category="enum"/>
+        <type name="VkSemaphoreWaitFlagBitsKHR" category="enum"/>
+        <type name="VkPerformanceConfigurationTypeINTEL" category="enum"/>
+        <type name="VkQueryPoolSamplingModeINTEL" category="enum"/>
+        <type name="VkPerformanceOverrideTypeINTEL" category="enum"/>
+        <type name="VkPerformanceParameterTypeINTEL" category="enum"/>
+        <type name="VkPerformanceValueTypeINTEL" category="enum"/>
+        <type name="VkLineRasterizationModeEXT" category="enum"/>
+        <type name="VkShaderModuleCreateFlagBits" category="enum"/>
+        <type name="VkPipelineCompilerControlFlagBitsAMD" category="enum"/>
+        <type name="VkShaderCorePropertiesFlagBitsAMD" category="enum"/>
+        <type name="VkToolPurposeFlagBitsEXT" category="enum"/>
+
+            <comment>WSI extensions</comment>
+        <type name="VkColorSpaceKHR" category="enum"/>
+        <type name="VkCompositeAlphaFlagBitsKHR" category="enum"/>
+        <type name="VkDisplayPlaneAlphaFlagBitsKHR" category="enum"/>
+        <type name="VkPresentModeKHR" category="enum"/>
+        <type name="VkSurfaceTransformFlagBitsKHR" category="enum"/>
+        <type name="VkDebugReportFlagBitsEXT" category="enum"/>
+        <type name="VkDebugReportObjectTypeEXT" category="enum"/>
+        <type name="VkRasterizationOrderAMD" category="enum"/>
+        <type name="VkExternalMemoryHandleTypeFlagBitsNV" category="enum"/>
+        <type name="VkExternalMemoryFeatureFlagBitsNV" category="enum"/>
+        <type name="VkValidationCheckEXT" category="enum"/>
+        <type name="VkValidationFeatureEnableEXT" category="enum"/>
+        <type name="VkValidationFeatureDisableEXT" category="enum"/>
+        <type name="VkExternalMemoryHandleTypeFlagBits" category="enum"/>
+        <type category="enum" name="VkExternalMemoryHandleTypeFlagBitsKHR"         alias="VkExternalMemoryHandleTypeFlagBits"/>
+        <type name="VkExternalMemoryFeatureFlagBits" category="enum"/>
+        <type category="enum" name="VkExternalMemoryFeatureFlagBitsKHR"            alias="VkExternalMemoryFeatureFlagBits"/>
+        <type name="VkExternalSemaphoreHandleTypeFlagBits" category="enum"/>
+        <type category="enum" name="VkExternalSemaphoreHandleTypeFlagBitsKHR"      alias="VkExternalSemaphoreHandleTypeFlagBits"/>
+        <type name="VkExternalSemaphoreFeatureFlagBits" category="enum"/>
+        <type category="enum" name="VkExternalSemaphoreFeatureFlagBitsKHR"         alias="VkExternalSemaphoreFeatureFlagBits"/>
+        <type name="VkSemaphoreImportFlagBits" category="enum"/>
+        <type category="enum" name="VkSemaphoreImportFlagBitsKHR"                  alias="VkSemaphoreImportFlagBits"/>
+        <type name="VkExternalFenceHandleTypeFlagBits" category="enum"/>
+        <type category="enum" name="VkExternalFenceHandleTypeFlagBitsKHR"          alias="VkExternalFenceHandleTypeFlagBits"/>
+        <type name="VkExternalFenceFeatureFlagBits" category="enum"/>
+        <type category="enum" name="VkExternalFenceFeatureFlagBitsKHR"             alias="VkExternalFenceFeatureFlagBits"/>
+        <type name="VkFenceImportFlagBits" category="enum"/>
+        <type category="enum" name="VkFenceImportFlagBitsKHR"                      alias="VkFenceImportFlagBits"/>
+        <type name="VkSurfaceCounterFlagBitsEXT" category="enum"/>
+        <type name="VkDisplayPowerStateEXT" category="enum"/>
+        <type name="VkDeviceEventTypeEXT" category="enum"/>
+        <type name="VkDisplayEventTypeEXT" category="enum"/>
+        <type name="VkPeerMemoryFeatureFlagBits" category="enum"/>
+        <type category="enum" name="VkPeerMemoryFeatureFlagBitsKHR"                alias="VkPeerMemoryFeatureFlagBits"/>
+        <type name="VkMemoryAllocateFlagBits" category="enum"/>
+        <type category="enum" name="VkMemoryAllocateFlagBitsKHR"                   alias="VkMemoryAllocateFlagBits"/>
+        <type name="VkDeviceGroupPresentModeFlagBitsKHR" category="enum"/>
+        <type name="VkSwapchainCreateFlagBitsKHR" category="enum"/>
+        <type name="VkSubgroupFeatureFlagBits" category="enum"/>
+        <type name="VkTessellationDomainOrigin" category="enum"/>
+        <type category="enum" name="VkTessellationDomainOriginKHR"                 alias="VkTessellationDomainOrigin"/>
+        <type name="VkSamplerYcbcrModelConversion" category="enum"/>
+        <type category="enum" name="VkSamplerYcbcrModelConversionKHR"              alias="VkSamplerYcbcrModelConversion"/>
+        <type name="VkSamplerYcbcrRange" category="enum"/>
+        <type category="enum" name="VkSamplerYcbcrRangeKHR"                        alias="VkSamplerYcbcrRange"/>
+        <type name="VkChromaLocation" category="enum"/>
+        <type category="enum" name="VkChromaLocationKHR"                           alias="VkChromaLocation"/>
+        <type name="VkSamplerReductionModeEXT" category="enum"/>
+        <type name="VkBlendOverlapEXT" category="enum"/>
+        <type name="VkDebugUtilsMessageSeverityFlagBitsEXT" category="enum"/>
+        <type name="VkDebugUtilsMessageTypeFlagBitsEXT" category="enum"/>
+        <type name="VkFullScreenExclusiveEXT" category="enum"/>
+        <type name="VkShaderFloatControlsIndependenceKHR" category="enum"/>
+        <type name="VkSwapchainImageUsageFlagBitsANDROID" category="enum"/>
+
+            <comment>Enumerated types in the header, but not used by the API</comment>
+        <type name="VkVendorId" category="enum"/>
+        <type name="VkDriverIdKHR" category="enum"/>
+        <type name="VkShadingRatePaletteEntryNV" category="enum"/>
+        <type name="VkCoarseSampleOrderTypeNV" category="enum"/>
+        <type name="VkPipelineExecutableStatisticFormatKHR" category="enum"/>
+
+        <comment>The PFN_vk*Function types are used by VkAllocationCallbacks below</comment>
+        <type category="funcpointer">typedef void (VKAPI_PTR *<name>PFN_vkInternalAllocationNotification</name>)(
+    <type>void</type>*                                       pUserData,
+    <type>size_t</type>                                      size,
+    <type>VkInternalAllocationType</type>                    allocationType,
+    <type>VkSystemAllocationScope</type>                     allocationScope);</type>
+        <type category="funcpointer">typedef void (VKAPI_PTR *<name>PFN_vkInternalFreeNotification</name>)(
+    <type>void</type>*                                       pUserData,
+    <type>size_t</type>                                      size,
+    <type>VkInternalAllocationType</type>                    allocationType,
+    <type>VkSystemAllocationScope</type>                     allocationScope);</type>
+        <type category="funcpointer">typedef void* (VKAPI_PTR *<name>PFN_vkReallocationFunction</name>)(
+    <type>void</type>*                                       pUserData,
+    <type>void</type>*                                       pOriginal,
+    <type>size_t</type>                                      size,
+    <type>size_t</type>                                      alignment,
+    <type>VkSystemAllocationScope</type>                     allocationScope);</type>
+        <type category="funcpointer">typedef void* (VKAPI_PTR *<name>PFN_vkAllocationFunction</name>)(
+    <type>void</type>*                                       pUserData,
+    <type>size_t</type>                                      size,
+    <type>size_t</type>                                      alignment,
+    <type>VkSystemAllocationScope</type>                     allocationScope);</type>
+        <type category="funcpointer">typedef void (VKAPI_PTR *<name>PFN_vkFreeFunction</name>)(
+    <type>void</type>*                                       pUserData,
+    <type>void</type>*                                       pMemory);</type>
+
+            <comment>The PFN_vkVoidFunction type are used by VkGet*ProcAddr below</comment>
+        <type category="funcpointer">typedef void (VKAPI_PTR *<name>PFN_vkVoidFunction</name>)(void);</type>
+
+            <comment>The PFN_vkDebugReportCallbackEXT type are used by the DEBUG_REPORT extension</comment>
+        <type category="funcpointer">typedef VkBool32 (VKAPI_PTR *<name>PFN_vkDebugReportCallbackEXT</name>)(
+    <type>VkDebugReportFlagsEXT</type>                       flags,
+    <type>VkDebugReportObjectTypeEXT</type>                  objectType,
+    <type>uint64_t</type>                                    object,
+    <type>size_t</type>                                      location,
+    <type>int32_t</type>                                     messageCode,
+    const <type>char</type>*                                 pLayerPrefix,
+    const <type>char</type>*                                 pMessage,
+    <type>void</type>*                                       pUserData);</type>
+
+            <comment>The PFN_vkDebugUtilsMessengerCallbackEXT type are used by the VK_EXT_debug_utils extension</comment>
+        <type category="funcpointer" requires="VkDebugUtilsMessengerCallbackDataEXT">typedef VkBool32 (VKAPI_PTR *<name>PFN_vkDebugUtilsMessengerCallbackEXT</name>)(
+    <type>VkDebugUtilsMessageSeverityFlagBitsEXT</type>           messageSeverity,
+    <type>VkDebugUtilsMessageTypeFlagsEXT</type>                  messageTypes,
+    const <type>VkDebugUtilsMessengerCallbackDataEXT</type>*      pCallbackData,
+    <type>void</type>*                                            pUserData);</type>
+
+            <comment>Struct types</comment>
+        <type category="struct" name="VkBaseOutStructure">
+            <member><type>VkStructureType</type> <name>sType</name></member>
+            <member>struct <type>VkBaseOutStructure</type>* <name>pNext</name></member>
+        </type>
+        <type category="struct" name="VkBaseInStructure">
+            <member><type>VkStructureType</type> <name>sType</name></member>
+            <member>const struct <type>VkBaseInStructure</type>* <name>pNext</name></member>
+        </type>
+        <type category="struct" name="VkOffset2D">
+            <member><type>int32_t</type>        <name>x</name></member>
+            <member><type>int32_t</type>        <name>y</name></member>
+        </type>
+        <type category="struct" name="VkOffset3D">
+            <member><type>int32_t</type>        <name>x</name></member>
+            <member><type>int32_t</type>        <name>y</name></member>
+            <member><type>int32_t</type>        <name>z</name></member>
+        </type>
+        <type category="struct" name="VkExtent2D">
+            <member><type>uint32_t</type>        <name>width</name></member>
+            <member><type>uint32_t</type>        <name>height</name></member>
+        </type>
+        <type category="struct" name="VkExtent3D">
+            <member><type>uint32_t</type>        <name>width</name></member>
+            <member><type>uint32_t</type>        <name>height</name></member>
+            <member><type>uint32_t</type>        <name>depth</name></member>
+        </type>
+        <type category="struct" name="VkViewport">
+            <member noautovalidity="true"><type>float</type> <name>x</name></member>
+            <member noautovalidity="true"><type>float</type> <name>y</name></member>
+            <member noautovalidity="true"><type>float</type> <name>width</name></member>
+            <member noautovalidity="true"><type>float</type> <name>height</name></member>
+            <member><type>float</type>                       <name>minDepth</name></member>
+            <member><type>float</type>                       <name>maxDepth</name></member>
+        </type>
+        <type category="struct" name="VkRect2D">
+            <member><type>VkOffset2D</type>     <name>offset</name></member>
+            <member><type>VkExtent2D</type>     <name>extent</name></member>
+        </type>
+        <type category="struct" name="VkClearRect">
+            <member><type>VkRect2D</type>       <name>rect</name></member>
+            <member><type>uint32_t</type>       <name>baseArrayLayer</name></member>
+            <member><type>uint32_t</type>       <name>layerCount</name></member>
+        </type>
+        <type category="struct" name="VkComponentMapping">
+            <member><type>VkComponentSwizzle</type> <name>r</name></member>
+            <member><type>VkComponentSwizzle</type> <name>g</name></member>
+            <member><type>VkComponentSwizzle</type> <name>b</name></member>
+            <member><type>VkComponentSwizzle</type> <name>a</name></member>
+        </type>
+        <type category="struct" name="VkPhysicalDeviceProperties" returnedonly="true">
+            <member><type>uint32_t</type>       <name>apiVersion</name></member>
+            <member><type>uint32_t</type>       <name>driverVersion</name></member>
+            <member><type>uint32_t</type>       <name>vendorID</name></member>
+            <member><type>uint32_t</type>       <name>deviceID</name></member>
+            <member><type>VkPhysicalDeviceType</type> <name>deviceType</name></member>
+            <member><type>char</type>           <name>deviceName</name>[<enum>VK_MAX_PHYSICAL_DEVICE_NAME_SIZE</enum>]</member>
+            <member><type>uint8_t</type>        <name>pipelineCacheUUID</name>[<enum>VK_UUID_SIZE</enum>]</member>
+            <member><type>VkPhysicalDeviceLimits</type> <name>limits</name></member>
+            <member><type>VkPhysicalDeviceSparseProperties</type> <name>sparseProperties</name></member>
+        </type>
+        <type category="struct" name="VkExtensionProperties" returnedonly="true">
+            <member><type>char</type>            <name>extensionName</name>[<enum>VK_MAX_EXTENSION_NAME_SIZE</enum>]<comment>extension name</comment></member>
+            <member><type>uint32_t</type>        <name>specVersion</name><comment>version of the extension specification implemented</comment></member>
+        </type>
+        <type category="struct" name="VkLayerProperties" returnedonly="true">
+            <member><type>char</type>            <name>layerName</name>[<enum>VK_MAX_EXTENSION_NAME_SIZE</enum>]<comment>layer name</comment></member>
+            <member><type>uint32_t</type>        <name>specVersion</name><comment>version of the layer specification implemented</comment></member>
+            <member><type>uint32_t</type>        <name>implementationVersion</name><comment>build or release version of the layer's library</comment></member>
+            <member><type>char</type>            <name>description</name>[<enum>VK_MAX_DESCRIPTION_SIZE</enum>]<comment>Free-form description of the layer</comment></member>
+        </type>
+        <type category="struct" name="VkApplicationInfo">
+            <member values="VK_STRUCTURE_TYPE_APPLICATION_INFO"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*     <name>pNext</name></member>
+            <member optional="true" len="null-terminated">const <type>char</type>*     <name>pApplicationName</name></member>
+            <member><type>uint32_t</type>        <name>applicationVersion</name></member>
+            <member optional="true" len="null-terminated">const <type>char</type>*     <name>pEngineName</name></member>
+            <member><type>uint32_t</type>        <name>engineVersion</name></member>
+            <member><type>uint32_t</type>        <name>apiVersion</name></member>
+        </type>
+        <type category="struct" name="VkAllocationCallbacks">
+            <member optional="true"><type>void</type>*           <name>pUserData</name></member>
+            <member noautovalidity="true"><type>PFN_vkAllocationFunction</type>   <name>pfnAllocation</name></member>
+            <member noautovalidity="true"><type>PFN_vkReallocationFunction</type> <name>pfnReallocation</name></member>
+            <member noautovalidity="true"><type>PFN_vkFreeFunction</type>    <name>pfnFree</name></member>
+            <member optional="true" noautovalidity="true"><type>PFN_vkInternalAllocationNotification</type> <name>pfnInternalAllocation</name></member>
+            <member optional="true" noautovalidity="true"><type>PFN_vkInternalFreeNotification</type> <name>pfnInternalFree</name></member>
+        </type>
+        <type category="struct" name="VkDeviceQueueCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*     <name>pNext</name></member>
+            <member optional="true"><type>VkDeviceQueueCreateFlags</type>    <name>flags</name></member>
+            <member><type>uint32_t</type>        <name>queueFamilyIndex</name></member>
+            <member><type>uint32_t</type>        <name>queueCount</name></member>
+            <member len="queueCount">const <type>float</type>*    <name>pQueuePriorities</name></member>
+        </type>
+        <type category="struct" name="VkDeviceCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*     <name>pNext</name></member>
+            <member optional="true"><type>VkDeviceCreateFlags</type>    <name>flags</name></member>
+            <member><type>uint32_t</type>        <name>queueCreateInfoCount</name></member>
+            <member len="queueCreateInfoCount">const <type>VkDeviceQueueCreateInfo</type>* <name>pQueueCreateInfos</name></member>
+            <member optional="true"><type>uint32_t</type>               <name>enabledLayerCount</name></member>
+            <member len="enabledLayerCount,null-terminated">const <type>char</type>* const*      <name>ppEnabledLayerNames</name><comment>Ordered list of layer names to be enabled</comment></member>
+            <member optional="true"><type>uint32_t</type>               <name>enabledExtensionCount</name></member>
+            <member len="enabledExtensionCount,null-terminated">const <type>char</type>* const*      <name>ppEnabledExtensionNames</name></member>
+            <member optional="true">const <type>VkPhysicalDeviceFeatures</type>* <name>pEnabledFeatures</name></member>
+        </type>
+        <type category="struct" name="VkInstanceCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*     <name>pNext</name></member>
+            <member optional="true"><type>VkInstanceCreateFlags</type>  <name>flags</name></member>
+            <member optional="true">const <type>VkApplicationInfo</type>* <name>pApplicationInfo</name></member>
+            <member optional="true"><type>uint32_t</type>               <name>enabledLayerCount</name></member>
+            <member len="enabledLayerCount,null-terminated">const <type>char</type>* const*      <name>ppEnabledLayerNames</name><comment>Ordered list of layer names to be enabled</comment></member>
+            <member optional="true"><type>uint32_t</type>               <name>enabledExtensionCount</name></member>
+            <member len="enabledExtensionCount,null-terminated">const <type>char</type>* const*      <name>ppEnabledExtensionNames</name><comment>Extension names to be enabled</comment></member>
+        </type>
+        <type category="struct" name="VkQueueFamilyProperties" returnedonly="true">
+            <member optional="true"><type>VkQueueFlags</type>           <name>queueFlags</name><comment>Queue flags</comment></member>
+            <member><type>uint32_t</type>               <name>queueCount</name></member>
+            <member><type>uint32_t</type>               <name>timestampValidBits</name></member>
+            <member><type>VkExtent3D</type>             <name>minImageTransferGranularity</name><comment>Minimum alignment requirement for image transfers</comment></member>
+        </type>
+        <type category="struct" name="VkPhysicalDeviceMemoryProperties" returnedonly="true">
+            <member><type>uint32_t</type>               <name>memoryTypeCount</name></member>
+            <member><type>VkMemoryType</type>           <name>memoryTypes</name>[<enum>VK_MAX_MEMORY_TYPES</enum>]</member>
+            <member><type>uint32_t</type>               <name>memoryHeapCount</name></member>
+            <member><type>VkMemoryHeap</type>           <name>memoryHeaps</name>[<enum>VK_MAX_MEMORY_HEAPS</enum>]</member>
+        </type>
+        <type category="struct" name="VkMemoryAllocateInfo">
+            <member values="VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*            <name>pNext</name></member>
+            <member><type>VkDeviceSize</type>           <name>allocationSize</name><comment>Size of memory allocation</comment></member>
+            <member><type>uint32_t</type>               <name>memoryTypeIndex</name><comment>Index of the of the memory type to allocate from</comment></member>
+        </type>
+        <type category="struct" name="VkMemoryRequirements" returnedonly="true">
+            <member><type>VkDeviceSize</type>           <name>size</name><comment>Specified in bytes</comment></member>
+            <member><type>VkDeviceSize</type>           <name>alignment</name><comment>Specified in bytes</comment></member>
+            <member><type>uint32_t</type>               <name>memoryTypeBits</name><comment>Bitmask of the allowed memory type indices into memoryTypes[] for this object</comment></member>
+        </type>
+        <type category="struct" name="VkSparseImageFormatProperties" returnedonly="true">
+            <member optional="true"><type>VkImageAspectFlags</type>     <name>aspectMask</name></member>
+            <member><type>VkExtent3D</type>             <name>imageGranularity</name></member>
+            <member optional="true"><type>VkSparseImageFormatFlags</type> <name>flags</name></member>
+        </type>
+        <type category="struct" name="VkSparseImageMemoryRequirements" returnedonly="true">
+            <member><type>VkSparseImageFormatProperties</type> <name>formatProperties</name></member>
+            <member><type>uint32_t</type>               <name>imageMipTailFirstLod</name></member>
+            <member><type>VkDeviceSize</type>           <name>imageMipTailSize</name><comment>Specified in bytes, must be a multiple of sparse block size in bytes / alignment</comment></member>
+            <member><type>VkDeviceSize</type>           <name>imageMipTailOffset</name><comment>Specified in bytes, must be a multiple of sparse block size in bytes / alignment</comment></member>
+            <member><type>VkDeviceSize</type>           <name>imageMipTailStride</name><comment>Specified in bytes, must be a multiple of sparse block size in bytes / alignment</comment></member>
+        </type>
+        <type category="struct" name="VkMemoryType" returnedonly="true">
+            <member optional="true"><type>VkMemoryPropertyFlags</type>  <name>propertyFlags</name><comment>Memory properties of this memory type</comment></member>
+            <member><type>uint32_t</type>               <name>heapIndex</name><comment>Index of the memory heap allocations of this memory type are taken from</comment></member>
+        </type>
+        <type category="struct" name="VkMemoryHeap" returnedonly="true">
+            <member><type>VkDeviceSize</type>           <name>size</name><comment>Available memory in the heap</comment></member>
+            <member optional="true"><type>VkMemoryHeapFlags</type>      <name>flags</name><comment>Flags for the heap</comment></member>
+        </type>
+        <type category="struct" name="VkMappedMemoryRange">
+            <member values="VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*            <name>pNext</name></member>
+            <member><type>VkDeviceMemory</type>         <name>memory</name><comment>Mapped memory object</comment></member>
+            <member><type>VkDeviceSize</type>           <name>offset</name><comment>Offset within the memory object where the range starts</comment></member>
+            <member><type>VkDeviceSize</type>           <name>size</name><comment>Size of the range within the memory object</comment></member>
+        </type>
+        <type category="struct" name="VkFormatProperties" returnedonly="true">
+            <member optional="true"><type>VkFormatFeatureFlags</type>   <name>linearTilingFeatures</name><comment>Format features in case of linear tiling</comment></member>
+            <member optional="true"><type>VkFormatFeatureFlags</type>   <name>optimalTilingFeatures</name><comment>Format features in case of optimal tiling</comment></member>
+            <member optional="true"><type>VkFormatFeatureFlags</type>   <name>bufferFeatures</name><comment>Format features supported by buffers</comment></member>
+        </type>
+        <type category="struct" name="VkImageFormatProperties" returnedonly="true">
+            <member><type>VkExtent3D</type>             <name>maxExtent</name><comment>max image dimensions for this resource type</comment></member>
+            <member><type>uint32_t</type>               <name>maxMipLevels</name><comment>max number of mipmap levels for this resource type</comment></member>
+            <member><type>uint32_t</type>               <name>maxArrayLayers</name><comment>max array size for this resource type</comment></member>
+            <member optional="true"><type>VkSampleCountFlags</type>     <name>sampleCounts</name><comment>supported sample counts for this resource type</comment></member>
+            <member><type>VkDeviceSize</type>           <name>maxResourceSize</name><comment>max size (in bytes) of this resource type</comment></member>
+        </type>
+        <type category="struct" name="VkDescriptorBufferInfo">
+            <member><type>VkBuffer</type>               <name>buffer</name><comment>Buffer used for this descriptor slot.</comment></member>
+            <member><type>VkDeviceSize</type>           <name>offset</name><comment>Base offset from buffer start in bytes to update in the descriptor set.</comment></member>
+            <member><type>VkDeviceSize</type>           <name>range</name><comment>Size in bytes of the buffer resource for this descriptor update.</comment></member>
+        </type>
+        <type category="struct" name="VkDescriptorImageInfo">
+            <member noautovalidity="true"><type>VkSampler</type>       <name>sampler</name><comment>Sampler to write to the descriptor in case it is a SAMPLER or COMBINED_IMAGE_SAMPLER descriptor. Ignored otherwise.</comment></member>
+            <member noautovalidity="true"><type>VkImageView</type>     <name>imageView</name><comment>Image view to write to the descriptor in case it is a SAMPLED_IMAGE, STORAGE_IMAGE, COMBINED_IMAGE_SAMPLER, or INPUT_ATTACHMENT descriptor. Ignored otherwise.</comment></member>
+            <member noautovalidity="true"><type>VkImageLayout</type>   <name>imageLayout</name><comment>Layout the image is expected to be in when accessed using this descriptor (only used if imageView is not VK_NULL_HANDLE).</comment></member>
+        </type>
+        <type category="struct" name="VkWriteDescriptorSet">
+            <member values="VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*            <name>pNext</name></member>
+            <member noautovalidity="true"><type>VkDescriptorSet</type>        <name>dstSet</name><comment>Destination descriptor set</comment></member>
+            <member><type>uint32_t</type>               <name>dstBinding</name><comment>Binding within the destination descriptor set to write</comment></member>
+            <member><type>uint32_t</type>               <name>dstArrayElement</name><comment>Array element within the destination binding to write</comment></member>
+            <member><type>uint32_t</type>               <name>descriptorCount</name><comment>Number of descriptors to write (determines the size of the array pointed by pDescriptors)</comment></member>
+            <member><type>VkDescriptorType</type>       <name>descriptorType</name><comment>Descriptor type to write (determines which members of the array pointed by pDescriptors are going to be used)</comment></member>
+            <member noautovalidity="true" len="descriptorCount">const <type>VkDescriptorImageInfo</type>* <name>pImageInfo</name><comment>Sampler, image view, and layout for SAMPLER, COMBINED_IMAGE_SAMPLER, {SAMPLED,STORAGE}_IMAGE, and INPUT_ATTACHMENT descriptor types.</comment></member>
+            <member noautovalidity="true" len="descriptorCount">const <type>VkDescriptorBufferInfo</type>* <name>pBufferInfo</name><comment>Raw buffer, size, and offset for {UNIFORM,STORAGE}_BUFFER[_DYNAMIC] descriptor types.</comment></member>
+            <member noautovalidity="true" len="descriptorCount">const <type>VkBufferView</type>*    <name>pTexelBufferView</name><comment>Buffer view to write to the descriptor for {UNIFORM,STORAGE}_TEXEL_BUFFER descriptor types.</comment></member>
+        </type>
+        <type category="struct" name="VkCopyDescriptorSet">
+            <member values="VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*            <name>pNext</name></member>
+            <member><type>VkDescriptorSet</type>        <name>srcSet</name><comment>Source descriptor set</comment></member>
+            <member><type>uint32_t</type>               <name>srcBinding</name><comment>Binding within the source descriptor set to copy from</comment></member>
+            <member><type>uint32_t</type>               <name>srcArrayElement</name><comment>Array element within the source binding to copy from</comment></member>
+            <member><type>VkDescriptorSet</type>        <name>dstSet</name><comment>Destination descriptor set</comment></member>
+            <member><type>uint32_t</type>               <name>dstBinding</name><comment>Binding within the destination descriptor set to copy to</comment></member>
+            <member><type>uint32_t</type>               <name>dstArrayElement</name><comment>Array element within the destination binding to copy to</comment></member>
+            <member><type>uint32_t</type>               <name>descriptorCount</name><comment>Number of descriptors to write (determines the size of the array pointed by pDescriptors)</comment></member>
+        </type>
+        <type category="struct" name="VkBufferCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*            <name>pNext</name></member>
+            <member optional="true"><type>VkBufferCreateFlags</type>    <name>flags</name><comment>Buffer creation flags</comment></member>
+            <member><type>VkDeviceSize</type>           <name>size</name><comment>Specified in bytes</comment></member>
+            <member><type>VkBufferUsageFlags</type>     <name>usage</name><comment>Buffer usage flags</comment></member>
+            <member><type>VkSharingMode</type>          <name>sharingMode</name></member>
+            <member optional="true"><type>uint32_t</type>               <name>queueFamilyIndexCount</name></member>
+            <member noautovalidity="true" len="queueFamilyIndexCount">const <type>uint32_t</type>*        <name>pQueueFamilyIndices</name></member>
+        </type>
+        <type category="struct" name="VkBufferViewCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*            <name>pNext</name></member>
+            <member optional="true"><type>VkBufferViewCreateFlags</type><name>flags</name></member>
+            <member><type>VkBuffer</type>               <name>buffer</name></member>
+            <member><type>VkFormat</type>               <name>format</name><comment>Optionally specifies format of elements</comment></member>
+            <member><type>VkDeviceSize</type>           <name>offset</name><comment>Specified in bytes</comment></member>
+            <member><type>VkDeviceSize</type>           <name>range</name><comment>View size specified in bytes</comment></member>
+        </type>
+        <type category="struct" name="VkImageSubresource">
+            <member><type>VkImageAspectFlags</type>     <name>aspectMask</name></member>
+            <member><type>uint32_t</type>               <name>mipLevel</name></member>
+            <member><type>uint32_t</type>               <name>arrayLayer</name></member>
+        </type>
+        <type category="struct" name="VkImageSubresourceLayers">
+            <member><type>VkImageAspectFlags</type>     <name>aspectMask</name></member>
+            <member><type>uint32_t</type>               <name>mipLevel</name></member>
+            <member><type>uint32_t</type>               <name>baseArrayLayer</name></member>
+            <member><type>uint32_t</type>               <name>layerCount</name></member>
+        </type>
+        <type category="struct" name="VkImageSubresourceRange">
+            <member><type>VkImageAspectFlags</type>     <name>aspectMask</name></member>
+            <member><type>uint32_t</type>               <name>baseMipLevel</name></member>
+            <member><type>uint32_t</type>               <name>levelCount</name></member>
+            <member><type>uint32_t</type>               <name>baseArrayLayer</name></member>
+            <member><type>uint32_t</type>               <name>layerCount</name></member>
+        </type>
+        <type category="struct" name="VkMemoryBarrier">
+            <member values="VK_STRUCTURE_TYPE_MEMORY_BARRIER"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*            <name>pNext</name></member>
+            <member optional="true"><type>VkAccessFlags</type>          <name>srcAccessMask</name><comment>Memory accesses from the source of the dependency to synchronize</comment></member>
+            <member optional="true"><type>VkAccessFlags</type>          <name>dstAccessMask</name><comment>Memory accesses from the destination of the dependency to synchronize</comment></member>
+        </type>
+        <type category="struct" name="VkBufferMemoryBarrier">
+            <member values="VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*            <name>pNext</name></member>
+            <member noautovalidity="true"><type>VkAccessFlags</type>          <name>srcAccessMask</name><comment>Memory accesses from the source of the dependency to synchronize</comment></member>
+            <member noautovalidity="true"><type>VkAccessFlags</type>          <name>dstAccessMask</name><comment>Memory accesses from the destination of the dependency to synchronize</comment></member>
+            <member><type>uint32_t</type>               <name>srcQueueFamilyIndex</name><comment>Queue family to transition ownership from</comment></member>
+            <member><type>uint32_t</type>               <name>dstQueueFamilyIndex</name><comment>Queue family to transition ownership to</comment></member>
+            <member><type>VkBuffer</type>               <name>buffer</name><comment>Buffer to sync</comment></member>
+            <member><type>VkDeviceSize</type>           <name>offset</name><comment>Offset within the buffer to sync</comment></member>
+            <member><type>VkDeviceSize</type>           <name>size</name><comment>Amount of bytes to sync</comment></member>
+        </type>
+        <type category="struct" name="VkImageMemoryBarrier">
+            <member values="VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*            <name>pNext</name></member>
+            <member noautovalidity="true"><type>VkAccessFlags</type>          <name>srcAccessMask</name><comment>Memory accesses from the source of the dependency to synchronize</comment></member>
+            <member noautovalidity="true"><type>VkAccessFlags</type>          <name>dstAccessMask</name><comment>Memory accesses from the destination of the dependency to synchronize</comment></member>
+            <member><type>VkImageLayout</type>          <name>oldLayout</name><comment>Current layout of the image</comment></member>
+            <member><type>VkImageLayout</type>          <name>newLayout</name><comment>New layout to transition the image to</comment></member>
+            <member><type>uint32_t</type>               <name>srcQueueFamilyIndex</name><comment>Queue family to transition ownership from</comment></member>
+            <member><type>uint32_t</type>               <name>dstQueueFamilyIndex</name><comment>Queue family to transition ownership to</comment></member>
+            <member><type>VkImage</type>                <name>image</name><comment>Image to sync</comment></member>
+            <member><type>VkImageSubresourceRange</type> <name>subresourceRange</name><comment>Subresource range to sync</comment></member>
+        </type>
+        <type category="struct" name="VkImageCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*            <name>pNext</name></member>
+            <member optional="true"><type>VkImageCreateFlags</type>     <name>flags</name><comment>Image creation flags</comment></member>
+            <member><type>VkImageType</type>            <name>imageType</name></member>
+            <member><type>VkFormat</type>               <name>format</name></member>
+            <member><type>VkExtent3D</type>             <name>extent</name></member>
+            <member><type>uint32_t</type>               <name>mipLevels</name></member>
+            <member><type>uint32_t</type>               <name>arrayLayers</name></member>
+            <member><type>VkSampleCountFlagBits</type>  <name>samples</name></member>
+            <member><type>VkImageTiling</type>          <name>tiling</name></member>
+            <member><type>VkImageUsageFlags</type>      <name>usage</name><comment>Image usage flags</comment></member>
+            <member><type>VkSharingMode</type>          <name>sharingMode</name><comment>Cross-queue-family sharing mode</comment></member>
+            <member optional="true"><type>uint32_t</type>               <name>queueFamilyIndexCount</name><comment>Number of queue families to share across</comment></member>
+            <member noautovalidity="true" len="queueFamilyIndexCount">const <type>uint32_t</type>*        <name>pQueueFamilyIndices</name><comment>Array of queue family indices to share across</comment></member>
+            <member><type>VkImageLayout</type>          <name>initialLayout</name><comment>Initial image layout for all subresources</comment></member>
+        </type>
+        <type category="struct" name="VkSubresourceLayout" returnedonly="true">
+            <member><type>VkDeviceSize</type>           <name>offset</name><comment>Specified in bytes</comment></member>
+            <member><type>VkDeviceSize</type>           <name>size</name><comment>Specified in bytes</comment></member>
+            <member><type>VkDeviceSize</type>           <name>rowPitch</name><comment>Specified in bytes</comment></member>
+            <member><type>VkDeviceSize</type>           <name>arrayPitch</name><comment>Specified in bytes</comment></member>
+            <member><type>VkDeviceSize</type>           <name>depthPitch</name><comment>Specified in bytes</comment></member>
+        </type>
+        <type category="struct" name="VkImageViewCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*            <name>pNext</name></member>
+            <member optional="true"><type>VkImageViewCreateFlags</type> <name>flags</name></member>
+            <member><type>VkImage</type>                <name>image</name></member>
+            <member><type>VkImageViewType</type>        <name>viewType</name></member>
+            <member><type>VkFormat</type>               <name>format</name></member>
+            <member><type>VkComponentMapping</type>     <name>components</name></member>
+            <member><type>VkImageSubresourceRange</type> <name>subresourceRange</name></member>
+        </type>
+        <type category="struct" name="VkBufferCopy">
+            <member><type>VkDeviceSize</type>                       <name>srcOffset</name><comment>Specified in bytes</comment></member>
+            <member><type>VkDeviceSize</type>                       <name>dstOffset</name><comment>Specified in bytes</comment></member>
+            <member noautovalidity="true"><type>VkDeviceSize</type> <name>size</name><comment>Specified in bytes</comment></member>
+        </type>
+        <type category="struct" name="VkSparseMemoryBind">
+            <member><type>VkDeviceSize</type>           <name>resourceOffset</name><comment>Specified in bytes</comment></member>
+            <member><type>VkDeviceSize</type>           <name>size</name><comment>Specified in bytes</comment></member>
+            <member optional="true"><type>VkDeviceMemory</type>         <name>memory</name></member>
+            <member><type>VkDeviceSize</type>           <name>memoryOffset</name><comment>Specified in bytes</comment></member>
+            <member optional="true"><type>VkSparseMemoryBindFlags</type><name>flags</name></member>
+        </type>
+        <type category="struct" name="VkSparseImageMemoryBind">
+            <member><type>VkImageSubresource</type>     <name>subresource</name></member>
+            <member><type>VkOffset3D</type>             <name>offset</name></member>
+            <member><type>VkExtent3D</type>             <name>extent</name></member>
+            <member optional="true"><type>VkDeviceMemory</type>         <name>memory</name></member>
+            <member><type>VkDeviceSize</type>           <name>memoryOffset</name><comment>Specified in bytes</comment></member>
+            <member optional="true"><type>VkSparseMemoryBindFlags</type><name>flags</name></member>
+        </type>
+        <type category="struct" name="VkSparseBufferMemoryBindInfo">
+            <member><type>VkBuffer</type> <name>buffer</name></member>
+            <member><type>uint32_t</type>               <name>bindCount</name></member>
+            <member len="bindCount">const <type>VkSparseMemoryBind</type>* <name>pBinds</name></member>
+        </type>
+        <type category="struct" name="VkSparseImageOpaqueMemoryBindInfo">
+            <member><type>VkImage</type> <name>image</name></member>
+            <member><type>uint32_t</type>               <name>bindCount</name></member>
+            <member len="bindCount">const <type>VkSparseMemoryBind</type>* <name>pBinds</name></member>
+        </type>
+        <type category="struct" name="VkSparseImageMemoryBindInfo">
+            <member><type>VkImage</type> <name>image</name></member>
+            <member><type>uint32_t</type>               <name>bindCount</name></member>
+            <member len="bindCount">const <type>VkSparseImageMemoryBind</type>* <name>pBinds</name></member>
+        </type>
+        <type category="struct" name="VkBindSparseInfo">
+            <member values="VK_STRUCTURE_TYPE_BIND_SPARSE_INFO"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*            <name>pNext</name></member>
+            <member optional="true"><type>uint32_t</type>               <name>waitSemaphoreCount</name></member>
+            <member len="waitSemaphoreCount">const <type>VkSemaphore</type>*     <name>pWaitSemaphores</name></member>
+            <member optional="true"><type>uint32_t</type>               <name>bufferBindCount</name></member>
+            <member len="bufferBindCount">const <type>VkSparseBufferMemoryBindInfo</type>* <name>pBufferBinds</name></member>
+            <member optional="true"><type>uint32_t</type>               <name>imageOpaqueBindCount</name></member>
+            <member len="imageOpaqueBindCount">const <type>VkSparseImageOpaqueMemoryBindInfo</type>* <name>pImageOpaqueBinds</name></member>
+            <member optional="true"><type>uint32_t</type>               <name>imageBindCount</name></member>
+            <member len="imageBindCount">const <type>VkSparseImageMemoryBindInfo</type>* <name>pImageBinds</name></member>
+            <member optional="true"><type>uint32_t</type>               <name>signalSemaphoreCount</name></member>
+            <member len="signalSemaphoreCount">const <type>VkSemaphore</type>*     <name>pSignalSemaphores</name></member>
+        </type>
+        <type category="struct" name="VkImageCopy">
+            <member><type>VkImageSubresourceLayers</type> <name>srcSubresource</name></member>
+            <member><type>VkOffset3D</type>             <name>srcOffset</name><comment>Specified in pixels for both compressed and uncompressed images</comment></member>
+            <member><type>VkImageSubresourceLayers</type> <name>dstSubresource</name></member>
+            <member><type>VkOffset3D</type>             <name>dstOffset</name><comment>Specified in pixels for both compressed and uncompressed images</comment></member>
+            <member><type>VkExtent3D</type>             <name>extent</name><comment>Specified in pixels for both compressed and uncompressed images</comment></member>
+        </type>
+        <type category="struct" name="VkImageBlit">
+            <member><type>VkImageSubresourceLayers</type> <name>srcSubresource</name></member>
+            <member><type>VkOffset3D</type>             <name>srcOffsets</name>[2]<comment>Specified in pixels for both compressed and uncompressed images</comment></member>
+            <member><type>VkImageSubresourceLayers</type> <name>dstSubresource</name></member>
+            <member><type>VkOffset3D</type>             <name>dstOffsets</name>[2]<comment>Specified in pixels for both compressed and uncompressed images</comment></member>
+        </type>
+        <type category="struct" name="VkBufferImageCopy">
+            <member><type>VkDeviceSize</type>           <name>bufferOffset</name><comment>Specified in bytes</comment></member>
+            <member><type>uint32_t</type>               <name>bufferRowLength</name><comment>Specified in texels</comment></member>
+            <member><type>uint32_t</type>               <name>bufferImageHeight</name></member>
+            <member><type>VkImageSubresourceLayers</type> <name>imageSubresource</name></member>
+            <member><type>VkOffset3D</type>             <name>imageOffset</name><comment>Specified in pixels for both compressed and uncompressed images</comment></member>
+            <member><type>VkExtent3D</type>             <name>imageExtent</name><comment>Specified in pixels for both compressed and uncompressed images</comment></member>
+        </type>
+        <type category="struct" name="VkImageResolve">
+            <member><type>VkImageSubresourceLayers</type> <name>srcSubresource</name></member>
+            <member><type>VkOffset3D</type>             <name>srcOffset</name></member>
+            <member><type>VkImageSubresourceLayers</type> <name>dstSubresource</name></member>
+            <member><type>VkOffset3D</type>             <name>dstOffset</name></member>
+            <member><type>VkExtent3D</type>             <name>extent</name></member>
+        </type>
+        <type category="struct" name="VkShaderModuleCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*            <name>pNext</name></member>
+            <member optional="true"><type>VkShaderModuleCreateFlags</type> <name>flags</name></member>
+            <member><type>size_t</type>                 <name>codeSize</name><comment>Specified in bytes</comment></member>
+            <member len="latexmath:[\textrm{codeSize} \over 4]" altlen="codeSize / 4">const <type>uint32_t</type>*            <name>pCode</name><comment>Binary code of size codeSize</comment></member>
+        </type>
+        <type category="struct" name="VkDescriptorSetLayoutBinding">
+            <member><type>uint32_t</type>               <name>binding</name><comment>Binding number for this entry</comment></member>
+            <member><type>VkDescriptorType</type>       <name>descriptorType</name><comment>Type of the descriptors in this binding</comment></member>
+            <member optional="true"><type>uint32_t</type> <name>descriptorCount</name><comment>Number of descriptors in this binding</comment></member>
+            <member noautovalidity="true"><type>VkShaderStageFlags</type>     <name>stageFlags</name><comment>Shader stages this binding is visible to</comment></member>
+            <member noautovalidity="true" optional="true" len="descriptorCount">const <type>VkSampler</type>*       <name>pImmutableSamplers</name><comment>Immutable samplers (used if descriptor type is SAMPLER or COMBINED_IMAGE_SAMPLER, is either NULL or contains count number of elements)</comment></member>
+        </type>
+        <type category="struct" name="VkDescriptorSetLayoutCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*            <name>pNext</name></member>
+            <member optional="true"><type>VkDescriptorSetLayoutCreateFlags</type>    <name>flags</name></member>
+            <member optional="true"><type>uint32_t</type>               <name>bindingCount</name><comment>Number of bindings in the descriptor set layout</comment></member>
+            <member len="bindingCount">const <type>VkDescriptorSetLayoutBinding</type>* <name>pBindings</name><comment>Array of descriptor set layout bindings</comment></member>
+        </type>
+        <type category="struct" name="VkDescriptorPoolSize">
+            <member><type>VkDescriptorType</type>       <name>type</name></member>
+            <member><type>uint32_t</type>               <name>descriptorCount</name></member>
+        </type>
+        <type category="struct" name="VkDescriptorPoolCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*            <name>pNext</name></member>
+            <member optional="true"><type>VkDescriptorPoolCreateFlags</type>  <name>flags</name></member>
+            <member><type>uint32_t</type>               <name>maxSets</name></member>
+            <member><type>uint32_t</type>               <name>poolSizeCount</name></member>
+            <member len="poolSizeCount">const <type>VkDescriptorPoolSize</type>* <name>pPoolSizes</name></member>
+        </type>
+        <type category="struct" name="VkDescriptorSetAllocateInfo">
+            <member values="VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*            <name>pNext</name></member>
+            <member><type>VkDescriptorPool</type>       <name>descriptorPool</name></member>
+            <member><type>uint32_t</type>               <name>descriptorSetCount</name></member>
+            <member len="descriptorSetCount">const <type>VkDescriptorSetLayout</type>* <name>pSetLayouts</name></member>
+        </type>
+        <type category="struct" name="VkSpecializationMapEntry">
+            <member><type>uint32_t</type>                     <name>constantID</name><comment>The SpecConstant ID specified in the BIL</comment></member>
+            <member><type>uint32_t</type>                     <name>offset</name><comment>Offset of the value in the data block</comment></member>
+            <member noautovalidity="true"><type>size_t</type> <name>size</name><comment>Size in bytes of the SpecConstant</comment></member>
+        </type>
+        <type category="struct" name="VkSpecializationInfo">
+            <member optional="true"><type>uint32_t</type>               <name>mapEntryCount</name><comment>Number of entries in the map</comment></member>
+            <member len="mapEntryCount">const <type>VkSpecializationMapEntry</type>* <name>pMapEntries</name><comment>Array of map entries</comment></member>
+            <member optional="true"><type>size_t</type>                 <name>dataSize</name><comment>Size in bytes of pData</comment></member>
+            <member len="dataSize">const <type>void</type>*            <name>pData</name><comment>Pointer to SpecConstant data</comment></member>
+        </type>
+        <type category="struct" name="VkPipelineShaderStageCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*            <name>pNext</name></member>
+            <member optional="true"><type>VkPipelineShaderStageCreateFlags</type>    <name>flags</name></member>
+            <member><type>VkShaderStageFlagBits</type>  <name>stage</name><comment>Shader stage</comment></member>
+            <member><type>VkShaderModule</type>         <name>module</name><comment>Module containing entry point</comment></member>
+            <member len="null-terminated">const <type>char</type>*            <name>pName</name><comment>Null-terminated entry point name</comment></member>
+            <member optional="true">const <type>VkSpecializationInfo</type>* <name>pSpecializationInfo</name></member>
+        </type>
+        <type category="struct" name="VkComputePipelineCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*            <name>pNext</name></member>
+            <member optional="true"><type>VkPipelineCreateFlags</type>  <name>flags</name><comment>Pipeline creation flags</comment></member>
+            <member><type>VkPipelineShaderStageCreateInfo</type> <name>stage</name></member>
+            <member><type>VkPipelineLayout</type>       <name>layout</name><comment>Interface layout of the pipeline</comment></member>
+            <member noautovalidity="true" optional="true"><type>VkPipeline</type>      <name>basePipelineHandle</name><comment>If VK_PIPELINE_CREATE_DERIVATIVE_BIT is set and this value is nonzero, it specifies the handle of the base pipeline this is a derivative of</comment></member>
+            <member><type>int32_t</type>                <name>basePipelineIndex</name><comment>If VK_PIPELINE_CREATE_DERIVATIVE_BIT is set and this value is not -1, it specifies an index into pCreateInfos of the base pipeline this is a derivative of</comment></member>
+        </type>
+        <type category="struct" name="VkVertexInputBindingDescription">
+            <member><type>uint32_t</type>               <name>binding</name><comment>Vertex buffer binding id</comment></member>
+            <member><type>uint32_t</type>               <name>stride</name><comment>Distance between vertices in bytes (0 = no advancement)</comment></member>
+            <member><type>VkVertexInputRate</type>      <name>inputRate</name><comment>The rate at which the vertex data is consumed</comment></member>
+        </type>
+        <type category="struct" name="VkVertexInputAttributeDescription">
+            <member><type>uint32_t</type>               <name>location</name><comment>location of the shader vertex attrib</comment></member>
+            <member><type>uint32_t</type>               <name>binding</name><comment>Vertex buffer binding id</comment></member>
+            <member><type>VkFormat</type>               <name>format</name><comment>format of source data</comment></member>
+            <member><type>uint32_t</type>               <name>offset</name><comment>Offset of first element in bytes from base of vertex</comment></member>
+        </type>
+        <type category="struct" name="VkPipelineVertexInputStateCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*            <name>pNext</name></member>
+            <member optional="true"><type>VkPipelineVertexInputStateCreateFlags</type>    <name>flags</name></member>
+            <member optional="true"><type>uint32_t</type>               <name>vertexBindingDescriptionCount</name><comment>number of bindings</comment></member>
+            <member len="vertexBindingDescriptionCount">const <type>VkVertexInputBindingDescription</type>* <name>pVertexBindingDescriptions</name></member>
+            <member optional="true"><type>uint32_t</type>               <name>vertexAttributeDescriptionCount</name><comment>number of attributes</comment></member>
+            <member len="vertexAttributeDescriptionCount">const <type>VkVertexInputAttributeDescription</type>* <name>pVertexAttributeDescriptions</name></member>
+        </type>
+        <type category="struct" name="VkPipelineInputAssemblyStateCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*            <name>pNext</name></member>
+            <member optional="true"><type>VkPipelineInputAssemblyStateCreateFlags</type>    <name>flags</name></member>
+            <member><type>VkPrimitiveTopology</type>    <name>topology</name></member>
+            <member><type>VkBool32</type>               <name>primitiveRestartEnable</name></member>
+        </type>
+        <type category="struct" name="VkPipelineTessellationStateCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*            <name>pNext</name></member>
+            <member optional="true"><type>VkPipelineTessellationStateCreateFlags</type>    <name>flags</name></member>
+            <member><type>uint32_t</type>               <name>patchControlPoints</name></member>
+        </type>
+        <type category="struct" name="VkPipelineViewportStateCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*            <name>pNext</name></member>
+            <member optional="true"><type>VkPipelineViewportStateCreateFlags</type>    <name>flags</name></member>
+            <member><type>uint32_t</type>               <name>viewportCount</name></member>
+            <member noautovalidity="true" optional="true" len="viewportCount">const <type>VkViewport</type>*      <name>pViewports</name></member>
+            <member><type>uint32_t</type>               <name>scissorCount</name></member>
+            <member noautovalidity="true" optional="true" len="scissorCount">const <type>VkRect2D</type>*        <name>pScissors</name></member>
+        </type>
+        <type category="struct" name="VkPipelineRasterizationStateCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>* <name>pNext</name></member>
+            <member optional="true"><type>VkPipelineRasterizationStateCreateFlags</type>    <name>flags</name></member>
+            <member><type>VkBool32</type>               <name>depthClampEnable</name></member>
+            <member><type>VkBool32</type>               <name>rasterizerDiscardEnable</name></member>
+            <member><type>VkPolygonMode</type>          <name>polygonMode</name><comment>optional (GL45)</comment></member>
+            <member optional="true"><type>VkCullModeFlags</type>        <name>cullMode</name></member>
+            <member><type>VkFrontFace</type>            <name>frontFace</name></member>
+            <member><type>VkBool32</type>               <name>depthBiasEnable</name></member>
+            <member><type>float</type>                  <name>depthBiasConstantFactor</name></member>
+            <member><type>float</type>                  <name>depthBiasClamp</name></member>
+            <member><type>float</type>                  <name>depthBiasSlopeFactor</name></member>
+            <member><type>float</type>                  <name>lineWidth</name></member>
+        </type>
+        <type category="struct" name="VkPipelineMultisampleStateCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*            <name>pNext</name></member>
+            <member optional="true"><type>VkPipelineMultisampleStateCreateFlags</type>    <name>flags</name></member>
+            <member><type>VkSampleCountFlagBits</type>  <name>rasterizationSamples</name><comment>Number of samples used for rasterization</comment></member>
+            <member><type>VkBool32</type>               <name>sampleShadingEnable</name><comment>optional (GL45)</comment></member>
+            <member><type>float</type>                  <name>minSampleShading</name><comment>optional (GL45)</comment></member>
+            <member optional="true" len="latexmath:[\lceil{\mathit{rasterizationSamples} \over 32}\rceil]" altlen="(rasterizationSamples + 31) / 32">const <type>VkSampleMask</type>*    <name>pSampleMask</name><comment>Array of sampleMask words</comment></member>
+            <member><type>VkBool32</type>               <name>alphaToCoverageEnable</name></member>
+            <member><type>VkBool32</type>               <name>alphaToOneEnable</name></member>
+        </type>
+        <type category="struct" name="VkPipelineColorBlendAttachmentState">
+            <member><type>VkBool32</type>               <name>blendEnable</name></member>
+            <member><type>VkBlendFactor</type>          <name>srcColorBlendFactor</name></member>
+            <member><type>VkBlendFactor</type>          <name>dstColorBlendFactor</name></member>
+            <member><type>VkBlendOp</type>              <name>colorBlendOp</name></member>
+            <member><type>VkBlendFactor</type>          <name>srcAlphaBlendFactor</name></member>
+            <member><type>VkBlendFactor</type>          <name>dstAlphaBlendFactor</name></member>
+            <member><type>VkBlendOp</type>              <name>alphaBlendOp</name></member>
+            <member optional="true"><type>VkColorComponentFlags</type>  <name>colorWriteMask</name></member>
+        </type>
+        <type category="struct" name="VkPipelineColorBlendStateCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*            <name>pNext</name></member>
+            <member optional="true"><type>VkPipelineColorBlendStateCreateFlags</type>    <name>flags</name></member>
+            <member><type>VkBool32</type>               <name>logicOpEnable</name></member>
+            <member noautovalidity="true"><type>VkLogicOp</type>              <name>logicOp</name></member>
+            <member optional="true"><type>uint32_t</type>               <name>attachmentCount</name><comment># of pAttachments</comment></member>
+            <member len="attachmentCount">const <type>VkPipelineColorBlendAttachmentState</type>* <name>pAttachments</name></member>
+            <member><type>float</type>                  <name>blendConstants</name>[4]</member>
+        </type>
+        <type category="struct" name="VkPipelineDynamicStateCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*            <name>pNext</name></member>
+            <member optional="true"><type>VkPipelineDynamicStateCreateFlags</type>    <name>flags</name></member>
+            <member optional="true"><type>uint32_t</type>               <name>dynamicStateCount</name></member>
+            <member len="dynamicStateCount">const <type>VkDynamicState</type>*  <name>pDynamicStates</name></member>
+        </type>
+        <type category="struct" name="VkStencilOpState">
+            <member><type>VkStencilOp</type>            <name>failOp</name></member>
+            <member><type>VkStencilOp</type>            <name>passOp</name></member>
+            <member><type>VkStencilOp</type>            <name>depthFailOp</name></member>
+            <member><type>VkCompareOp</type>            <name>compareOp</name></member>
+            <member><type>uint32_t</type>               <name>compareMask</name></member>
+            <member><type>uint32_t</type>               <name>writeMask</name></member>
+            <member><type>uint32_t</type>               <name>reference</name></member>
+        </type>
+        <type category="struct" name="VkPipelineDepthStencilStateCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*            <name>pNext</name></member>
+            <member optional="true"><type>VkPipelineDepthStencilStateCreateFlags</type>    <name>flags</name></member>
+            <member><type>VkBool32</type>               <name>depthTestEnable</name></member>
+            <member><type>VkBool32</type>               <name>depthWriteEnable</name></member>
+            <member><type>VkCompareOp</type>            <name>depthCompareOp</name></member>
+            <member><type>VkBool32</type>               <name>depthBoundsTestEnable</name><comment>optional (depth_bounds_test)</comment></member>
+            <member><type>VkBool32</type>               <name>stencilTestEnable</name></member>
+            <member><type>VkStencilOpState</type>       <name>front</name></member>
+            <member><type>VkStencilOpState</type>       <name>back</name></member>
+            <member><type>float</type>                  <name>minDepthBounds</name></member>
+            <member><type>float</type>                  <name>maxDepthBounds</name></member>
+        </type>
+        <type category="struct" name="VkGraphicsPipelineCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*            <name>pNext</name></member>
+            <member optional="true"><type>VkPipelineCreateFlags</type>  <name>flags</name><comment>Pipeline creation flags</comment></member>
+            <member><type>uint32_t</type>               <name>stageCount</name></member>
+            <member len="stageCount">const <type>VkPipelineShaderStageCreateInfo</type>* <name>pStages</name><comment>One entry for each active shader stage</comment></member>
+            <member noautovalidity="true" optional="true">const <type>VkPipelineVertexInputStateCreateInfo</type>* <name>pVertexInputState</name></member>
+            <member noautovalidity="true" optional="true">const <type>VkPipelineInputAssemblyStateCreateInfo</type>* <name>pInputAssemblyState</name></member>
+            <member noautovalidity="true" optional="true">const <type>VkPipelineTessellationStateCreateInfo</type>* <name>pTessellationState</name></member>
+            <member noautovalidity="true" optional="true">const <type>VkPipelineViewportStateCreateInfo</type>* <name>pViewportState</name></member>
+            <member>const <type>VkPipelineRasterizationStateCreateInfo</type>* <name>pRasterizationState</name></member>
+            <member noautovalidity="true" optional="true">const <type>VkPipelineMultisampleStateCreateInfo</type>* <name>pMultisampleState</name></member>
+            <member noautovalidity="true" optional="true">const <type>VkPipelineDepthStencilStateCreateInfo</type>* <name>pDepthStencilState</name></member>
+            <member noautovalidity="true" optional="true">const <type>VkPipelineColorBlendStateCreateInfo</type>* <name>pColorBlendState</name></member>
+            <member optional="true">const <type>VkPipelineDynamicStateCreateInfo</type>* <name>pDynamicState</name></member>
+            <member><type>VkPipelineLayout</type>       <name>layout</name><comment>Interface layout of the pipeline</comment></member>
+            <member><type>VkRenderPass</type>           <name>renderPass</name></member>
+            <member><type>uint32_t</type>               <name>subpass</name></member>
+            <member noautovalidity="true" optional="true"><type>VkPipeline</type>      <name>basePipelineHandle</name><comment>If VK_PIPELINE_CREATE_DERIVATIVE_BIT is set and this value is nonzero, it specifies the handle of the base pipeline this is a derivative of</comment></member>
+            <member><type>int32_t</type>                <name>basePipelineIndex</name><comment>If VK_PIPELINE_CREATE_DERIVATIVE_BIT is set and this value is not -1, it specifies an index into pCreateInfos of the base pipeline this is a derivative of</comment></member>
+        </type>
+        <type category="struct" name="VkPipelineCacheCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*            <name>pNext</name></member>
+            <member optional="true"><type>VkPipelineCacheCreateFlags</type>    <name>flags</name></member>
+            <member optional="true"><type>size_t</type>                 <name>initialDataSize</name><comment>Size of initial data to populate cache, in bytes</comment></member>
+            <member len="initialDataSize">const <type>void</type>*            <name>pInitialData</name><comment>Initial data to populate cache</comment></member>
+        </type>
+        <type category="struct" name="VkPushConstantRange">
+            <member><type>VkShaderStageFlags</type>     <name>stageFlags</name><comment>Which stages use the range</comment></member>
+            <member><type>uint32_t</type>               <name>offset</name><comment>Start of the range, in bytes</comment></member>
+            <member><type>uint32_t</type>               <name>size</name><comment>Size of the range, in bytes</comment></member>
+        </type>
+        <type category="struct" name="VkPipelineLayoutCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*            <name>pNext</name></member>
+            <member optional="true"><type>VkPipelineLayoutCreateFlags</type>    <name>flags</name></member>
+            <member optional="true"><type>uint32_t</type>               <name>setLayoutCount</name><comment>Number of descriptor sets interfaced by the pipeline</comment></member>
+            <member len="setLayoutCount">const <type>VkDescriptorSetLayout</type>* <name>pSetLayouts</name><comment>Array of setCount number of descriptor set layout objects defining the layout of the</comment></member>
+            <member optional="true"><type>uint32_t</type>               <name>pushConstantRangeCount</name><comment>Number of push-constant ranges used by the pipeline</comment></member>
+            <member len="pushConstantRangeCount">const <type>VkPushConstantRange</type>* <name>pPushConstantRanges</name><comment>Array of pushConstantRangeCount number of ranges used by various shader stages</comment></member>
+        </type>
+        <type category="struct" name="VkSamplerCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*            <name>pNext</name></member>
+            <member optional="true"><type>VkSamplerCreateFlags</type>   <name>flags</name></member>
+            <member><type>VkFilter</type>               <name>magFilter</name><comment>Filter mode for magnification</comment></member>
+            <member><type>VkFilter</type>               <name>minFilter</name><comment>Filter mode for minifiation</comment></member>
+            <member><type>VkSamplerMipmapMode</type>    <name>mipmapMode</name><comment>Mipmap selection mode</comment></member>
+            <member><type>VkSamplerAddressMode</type>   <name>addressModeU</name></member>
+            <member><type>VkSamplerAddressMode</type>   <name>addressModeV</name></member>
+            <member><type>VkSamplerAddressMode</type>   <name>addressModeW</name></member>
+            <member><type>float</type>                  <name>mipLodBias</name></member>
+            <member><type>VkBool32</type>               <name>anisotropyEnable</name></member>
+            <member><type>float</type>                  <name>maxAnisotropy</name></member>
+            <member><type>VkBool32</type>               <name>compareEnable</name></member>
+            <member noautovalidity="true"><type>VkCompareOp</type>            <name>compareOp</name></member>
+            <member><type>float</type>                  <name>minLod</name></member>
+            <member><type>float</type>                  <name>maxLod</name></member>
+            <member noautovalidity="true"><type>VkBorderColor</type>          <name>borderColor</name></member>
+            <member><type>VkBool32</type>               <name>unnormalizedCoordinates</name></member>
+        </type>
+        <type category="struct" name="VkCommandPoolCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*            <name>pNext</name></member>
+            <member optional="true"><type>VkCommandPoolCreateFlags</type>   <name>flags</name><comment>Command pool creation flags</comment></member>
+            <member><type>uint32_t</type>               <name>queueFamilyIndex</name></member>
+        </type>
+        <type category="struct" name="VkCommandBufferAllocateInfo">
+            <member values="VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*            <name>pNext</name></member>
+            <member><type>VkCommandPool</type>          <name>commandPool</name></member>
+            <member><type>VkCommandBufferLevel</type>   <name>level</name></member>
+            <member><type>uint32_t</type>               <name>commandBufferCount</name></member>
+        </type>
+        <type category="struct" name="VkCommandBufferInheritanceInfo">
+            <member values="VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*            <name>pNext</name></member>
+            <member optional="true" noautovalidity="true"><type>VkRenderPass</type>    <name>renderPass</name><comment>Render pass for secondary command buffers</comment></member>
+            <member><type>uint32_t</type>               <name>subpass</name></member>
+            <member optional="true" noautovalidity="true"><type>VkFramebuffer</type>   <name>framebuffer</name><comment>Framebuffer for secondary command buffers</comment></member>
+            <member><type>VkBool32</type>               <name>occlusionQueryEnable</name><comment>Whether this secondary command buffer may be executed during an occlusion query</comment></member>
+            <member optional="true" noautovalidity="true"><type>VkQueryControlFlags</type>    <name>queryFlags</name><comment>Query flags used by this secondary command buffer, if executed during an occlusion query</comment></member>
+            <member optional="true" noautovalidity="true"><type>VkQueryPipelineStatisticFlags</type> <name>pipelineStatistics</name><comment>Pipeline statistics that may be counted for this secondary command buffer</comment></member>
+        </type>
+        <type category="struct" name="VkCommandBufferBeginInfo">
+            <member values="VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*            <name>pNext</name></member>
+            <member optional="true"><type>VkCommandBufferUsageFlags</type>  <name>flags</name><comment>Command buffer usage flags</comment></member>
+            <member optional="true" noautovalidity="true">const <type>VkCommandBufferInheritanceInfo</type>*       <name>pInheritanceInfo</name><comment>Pointer to inheritance info for secondary command buffers</comment></member>
+        </type>
+        <type category="struct" name="VkRenderPassBeginInfo">
+            <member values="VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*            <name>pNext</name></member>
+            <member><type>VkRenderPass</type>           <name>renderPass</name></member>
+            <member><type>VkFramebuffer</type>          <name>framebuffer</name></member>
+            <member><type>VkRect2D</type>               <name>renderArea</name></member>
+            <member optional="true"><type>uint32_t</type>               <name>clearValueCount</name></member>
+            <member len="clearValueCount">const <type>VkClearValue</type>*    <name>pClearValues</name></member>
+        </type>
+        <type category="union" name="VkClearColorValue" comment="// Union allowing specification of floating point, integer, or unsigned integer color data. Actual value selected is based on image/attachment being cleared.">
+            <member><type>float</type>                  <name>float32</name>[4]</member>
+            <member><type>int32_t</type>                <name>int32</name>[4]</member>
+            <member><type>uint32_t</type>               <name>uint32</name>[4]</member>
+        </type>
+        <type category="struct" name="VkClearDepthStencilValue">
+            <member><type>float</type>                  <name>depth</name></member>
+            <member><type>uint32_t</type>               <name>stencil</name></member>
+        </type>
+        <type category="union" name="VkClearValue" comment="// Union allowing specification of color or depth and stencil values. Actual value selected is based on attachment being cleared.">
+            <member><type>VkClearColorValue</type>      <name>color</name></member>
+            <member><type>VkClearDepthStencilValue</type> <name>depthStencil</name></member>
+        </type>
+        <type category="struct" name="VkClearAttachment">
+            <member><type>VkImageAspectFlags</type>     <name>aspectMask</name></member>
+            <member><type>uint32_t</type>               <name>colorAttachment</name></member>
+            <member><type>VkClearValue</type>           <name>clearValue</name></member>
+        </type>
+        <type category="struct" name="VkAttachmentDescription">
+            <member optional="true"><type>VkAttachmentDescriptionFlags</type> <name>flags</name></member>
+            <member><type>VkFormat</type>               <name>format</name></member>
+            <member><type>VkSampleCountFlagBits</type>  <name>samples</name></member>
+            <member><type>VkAttachmentLoadOp</type>     <name>loadOp</name><comment>Load operation for color or depth data</comment></member>
+            <member><type>VkAttachmentStoreOp</type>    <name>storeOp</name><comment>Store operation for color or depth data</comment></member>
+            <member><type>VkAttachmentLoadOp</type>     <name>stencilLoadOp</name><comment>Load operation for stencil data</comment></member>
+            <member><type>VkAttachmentStoreOp</type>    <name>stencilStoreOp</name><comment>Store operation for stencil data</comment></member>
+            <member><type>VkImageLayout</type>          <name>initialLayout</name></member>
+            <member><type>VkImageLayout</type>          <name>finalLayout</name></member>
+        </type>
+        <type category="struct" name="VkAttachmentReference">
+            <member><type>uint32_t</type>               <name>attachment</name></member>
+            <member><type>VkImageLayout</type>          <name>layout</name></member>
+        </type>
+        <type category="struct" name="VkSubpassDescription">
+            <member optional="true"><type>VkSubpassDescriptionFlags</type> <name>flags</name></member>
+            <member><type>VkPipelineBindPoint</type>    <name>pipelineBindPoint</name><comment>Must be VK_PIPELINE_BIND_POINT_GRAPHICS for now</comment></member>
+            <member optional="true"><type>uint32_t</type>               <name>inputAttachmentCount</name></member>
+            <member len="inputAttachmentCount">const <type>VkAttachmentReference</type>* <name>pInputAttachments</name></member>
+            <member optional="true"><type>uint32_t</type>               <name>colorAttachmentCount</name></member>
+            <member len="colorAttachmentCount">const <type>VkAttachmentReference</type>* <name>pColorAttachments</name></member>
+            <member optional="true" len="colorAttachmentCount">const <type>VkAttachmentReference</type>* <name>pResolveAttachments</name></member>
+            <member optional="true">const <type>VkAttachmentReference</type>* <name>pDepthStencilAttachment</name></member>
+            <member optional="true"><type>uint32_t</type>               <name>preserveAttachmentCount</name></member>
+            <member len="preserveAttachmentCount">const <type>uint32_t</type>* <name>pPreserveAttachments</name></member>
+        </type>
+        <type category="struct" name="VkSubpassDependency">
+            <member><type>uint32_t</type>               <name>srcSubpass</name></member>
+            <member><type>uint32_t</type>               <name>dstSubpass</name></member>
+            <member><type>VkPipelineStageFlags</type>   <name>srcStageMask</name></member>
+            <member><type>VkPipelineStageFlags</type>   <name>dstStageMask</name></member>
+            <member optional="true"><type>VkAccessFlags</type>          <name>srcAccessMask</name><comment>Memory accesses from the source of the dependency to synchronize</comment></member>
+            <member optional="true"><type>VkAccessFlags</type>          <name>dstAccessMask</name><comment>Memory accesses from the destination of the dependency to synchronize</comment></member>
+            <member optional="true"><type>VkDependencyFlags</type>      <name>dependencyFlags</name></member>
+        </type>
+        <type category="struct" name="VkRenderPassCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*            <name>pNext</name></member>
+            <member optional="true"><type>VkRenderPassCreateFlags</type> <name>flags</name></member>
+            <member optional="true"><type>uint32_t</type>   <name>attachmentCount</name></member>
+            <member len="attachmentCount">const <type>VkAttachmentDescription</type>* <name>pAttachments</name></member>
+            <member><type>uint32_t</type>               <name>subpassCount</name></member>
+            <member len="subpassCount">const <type>VkSubpassDescription</type>* <name>pSubpasses</name></member>
+            <member optional="true"><type>uint32_t</type>       <name>dependencyCount</name></member>
+            <member len="dependencyCount">const <type>VkSubpassDependency</type>* <name>pDependencies</name></member>
+        </type>
+        <type category="struct" name="VkEventCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_EVENT_CREATE_INFO"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*            <name>pNext</name></member>
+            <member optional="true"><type>VkEventCreateFlags</type>     <name>flags</name><comment>Event creation flags</comment></member>
+        </type>
+        <type category="struct" name="VkFenceCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_FENCE_CREATE_INFO"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*            <name>pNext</name></member>
+            <member optional="true"><type>VkFenceCreateFlags</type>     <name>flags</name><comment>Fence creation flags</comment></member>
+        </type>
+        <type category="struct" name="VkPhysicalDeviceFeatures">
+            <member><type>VkBool32</type>               <name>robustBufferAccess</name><comment>out of bounds buffer accesses are well defined</comment></member>
+            <member><type>VkBool32</type>               <name>fullDrawIndexUint32</name><comment>full 32-bit range of indices for indexed draw calls</comment></member>
+            <member><type>VkBool32</type>               <name>imageCubeArray</name><comment>image views which are arrays of cube maps</comment></member>
+            <member><type>VkBool32</type>               <name>independentBlend</name><comment>blending operations are controlled per-attachment</comment></member>
+            <member><type>VkBool32</type>               <name>geometryShader</name><comment>geometry stage</comment></member>
+            <member><type>VkBool32</type>               <name>tessellationShader</name><comment>tessellation control and evaluation stage</comment></member>
+            <member><type>VkBool32</type>               <name>sampleRateShading</name><comment>per-sample shading and interpolation</comment></member>
+            <member><type>VkBool32</type>               <name>dualSrcBlend</name><comment>blend operations which take two sources</comment></member>
+            <member><type>VkBool32</type>               <name>logicOp</name><comment>logic operations</comment></member>
+            <member><type>VkBool32</type>               <name>multiDrawIndirect</name><comment>multi draw indirect</comment></member>
+            <member><type>VkBool32</type>               <name>drawIndirectFirstInstance</name><comment>indirect draws can use non-zero firstInstance</comment></member>
+            <member><type>VkBool32</type>               <name>depthClamp</name><comment>depth clamping</comment></member>
+            <member><type>VkBool32</type>               <name>depthBiasClamp</name><comment>depth bias clamping</comment></member>
+            <member><type>VkBool32</type>               <name>fillModeNonSolid</name><comment>point and wireframe fill modes</comment></member>
+            <member><type>VkBool32</type>               <name>depthBounds</name><comment>depth bounds test</comment></member>
+            <member><type>VkBool32</type>               <name>wideLines</name><comment>lines with width greater than 1</comment></member>
+            <member><type>VkBool32</type>               <name>largePoints</name><comment>points with size greater than 1</comment></member>
+            <member><type>VkBool32</type>               <name>alphaToOne</name><comment>the fragment alpha component can be forced to maximum representable alpha value</comment></member>
+            <member><type>VkBool32</type>               <name>multiViewport</name><comment>viewport arrays</comment></member>
+            <member><type>VkBool32</type>               <name>samplerAnisotropy</name><comment>anisotropic sampler filtering</comment></member>
+            <member><type>VkBool32</type>               <name>textureCompressionETC2</name><comment>ETC texture compression formats</comment></member>
+            <member><type>VkBool32</type>               <name>textureCompressionASTC_LDR</name><comment>ASTC LDR texture compression formats</comment></member>
+            <member><type>VkBool32</type>               <name>textureCompressionBC</name><comment>BC1-7 texture compressed formats</comment></member>
+            <member><type>VkBool32</type>               <name>occlusionQueryPrecise</name><comment>precise occlusion queries returning actual sample counts</comment></member>
+            <member><type>VkBool32</type>               <name>pipelineStatisticsQuery</name><comment>pipeline statistics query</comment></member>
+            <member><type>VkBool32</type>               <name>vertexPipelineStoresAndAtomics</name><comment>stores and atomic ops on storage buffers and images are supported in vertex, tessellation, and geometry stages</comment></member>
+            <member><type>VkBool32</type>               <name>fragmentStoresAndAtomics</name><comment>stores and atomic ops on storage buffers and images are supported in the fragment stage</comment></member>
+            <member><type>VkBool32</type>               <name>shaderTessellationAndGeometryPointSize</name><comment>tessellation and geometry stages can export point size</comment></member>
+            <member><type>VkBool32</type>               <name>shaderImageGatherExtended</name><comment>image gather with run-time values and independent offsets</comment></member>
+            <member><type>VkBool32</type>               <name>shaderStorageImageExtendedFormats</name><comment>the extended set of formats can be used for storage images</comment></member>
+            <member><type>VkBool32</type>               <name>shaderStorageImageMultisample</name><comment>multisample images can be used for storage images</comment></member>
+            <member><type>VkBool32</type>               <name>shaderStorageImageReadWithoutFormat</name><comment>read from storage image does not require format qualifier</comment></member>
+            <member><type>VkBool32</type>               <name>shaderStorageImageWriteWithoutFormat</name><comment>write to storage image does not require format qualifier</comment></member>
+            <member><type>VkBool32</type>               <name>shaderUniformBufferArrayDynamicIndexing</name><comment>arrays of uniform buffers can be accessed with dynamically uniform indices</comment></member>
+            <member><type>VkBool32</type>               <name>shaderSampledImageArrayDynamicIndexing</name><comment>arrays of sampled images can be accessed with dynamically uniform indices</comment></member>
+            <member><type>VkBool32</type>               <name>shaderStorageBufferArrayDynamicIndexing</name><comment>arrays of storage buffers can be accessed with dynamically uniform indices</comment></member>
+            <member><type>VkBool32</type>               <name>shaderStorageImageArrayDynamicIndexing</name><comment>arrays of storage images can be accessed with dynamically uniform indices</comment></member>
+            <member><type>VkBool32</type>               <name>shaderClipDistance</name><comment>clip distance in shaders</comment></member>
+            <member><type>VkBool32</type>               <name>shaderCullDistance</name><comment>cull distance in shaders</comment></member>
+            <member><type>VkBool32</type>               <name>shaderFloat64</name><comment>64-bit floats (doubles) in shaders</comment></member>
+            <member><type>VkBool32</type>               <name>shaderInt64</name><comment>64-bit integers in shaders</comment></member>
+            <member><type>VkBool32</type>               <name>shaderInt16</name><comment>16-bit integers in shaders</comment></member>
+            <member><type>VkBool32</type>               <name>shaderResourceResidency</name><comment>shader can use texture operations that return resource residency information (requires sparseNonResident support)</comment></member>
+            <member><type>VkBool32</type>               <name>shaderResourceMinLod</name><comment>shader can use texture operations that specify minimum resource LOD</comment></member>
+            <member><type>VkBool32</type>               <name>sparseBinding</name><comment>Sparse resources support: Resource memory can be managed at opaque page level rather than object level</comment></member>
+            <member><type>VkBool32</type>               <name>sparseResidencyBuffer</name><comment>Sparse resources support: GPU can access partially resident buffers </comment></member>
+            <member><type>VkBool32</type>               <name>sparseResidencyImage2D</name><comment>Sparse resources support: GPU can access partially resident 2D (non-MSAA non-depth/stencil) images </comment></member>
+            <member><type>VkBool32</type>               <name>sparseResidencyImage3D</name><comment>Sparse resources support: GPU can access partially resident 3D images </comment></member>
+            <member><type>VkBool32</type>               <name>sparseResidency2Samples</name><comment>Sparse resources support: GPU can access partially resident MSAA 2D images with 2 samples</comment></member>
+            <member><type>VkBool32</type>               <name>sparseResidency4Samples</name><comment>Sparse resources support: GPU can access partially resident MSAA 2D images with 4 samples</comment></member>
+            <member><type>VkBool32</type>               <name>sparseResidency8Samples</name><comment>Sparse resources support: GPU can access partially resident MSAA 2D images with 8 samples</comment></member>
+            <member><type>VkBool32</type>               <name>sparseResidency16Samples</name><comment>Sparse resources support: GPU can access partially resident MSAA 2D images with 16 samples</comment></member>
+            <member><type>VkBool32</type>               <name>sparseResidencyAliased</name><comment>Sparse resources support: GPU can correctly access data aliased into multiple locations (opt-in)</comment></member>
+            <member><type>VkBool32</type>               <name>variableMultisampleRate</name><comment>multisample rate must be the same for all pipelines in a subpass</comment></member>
+            <member><type>VkBool32</type>               <name>inheritedQueries</name><comment>Queries may be inherited from primary to secondary command buffers</comment></member>
+        </type>
+        <type category="struct" name="VkPhysicalDeviceSparseProperties" returnedonly="true">
+            <member><type>VkBool32</type>               <name>residencyStandard2DBlockShape</name><comment>Sparse resources support: GPU will access all 2D (single sample) sparse resources using the standard sparse image block shapes (based on pixel format)</comment></member>
+            <member><type>VkBool32</type>               <name>residencyStandard2DMultisampleBlockShape</name><comment>Sparse resources support: GPU will access all 2D (multisample) sparse resources using the standard sparse image block shapes (based on pixel format)</comment></member>
+            <member><type>VkBool32</type>               <name>residencyStandard3DBlockShape</name><comment>Sparse resources support: GPU will access all 3D sparse resources using the standard sparse image block shapes (based on pixel format)</comment></member>
+            <member><type>VkBool32</type>               <name>residencyAlignedMipSize</name><comment>Sparse resources support: Images with mip level dimensions that are NOT a multiple of the sparse image block dimensions will be placed in the mip tail</comment></member>
+            <member><type>VkBool32</type>               <name>residencyNonResidentStrict</name><comment>Sparse resources support: GPU can consistently access non-resident regions of a resource, all reads return as if data is 0, writes are discarded</comment></member>
+        </type>
+        <type category="struct" name="VkPhysicalDeviceLimits" returnedonly="true">
+                <comment>resource maximum sizes</comment>
+            <member><type>uint32_t</type>               <name>maxImageDimension1D</name><comment>max 1D image dimension</comment></member>
+            <member><type>uint32_t</type>               <name>maxImageDimension2D</name><comment>max 2D image dimension</comment></member>
+            <member><type>uint32_t</type>               <name>maxImageDimension3D</name><comment>max 3D image dimension</comment></member>
+            <member><type>uint32_t</type>               <name>maxImageDimensionCube</name><comment>max cubemap image dimension</comment></member>
+            <member><type>uint32_t</type>               <name>maxImageArrayLayers</name><comment>max layers for image arrays</comment></member>
+            <member><type>uint32_t</type>               <name>maxTexelBufferElements</name><comment>max texel buffer size (fstexels)</comment></member>
+            <member><type>uint32_t</type>               <name>maxUniformBufferRange</name><comment>max uniform buffer range (bytes)</comment></member>
+            <member><type>uint32_t</type>               <name>maxStorageBufferRange</name><comment>max storage buffer range (bytes)</comment></member>
+            <member><type>uint32_t</type>               <name>maxPushConstantsSize</name><comment>max size of the push constants pool (bytes)</comment></member>
+                <comment>memory limits</comment>
+            <member><type>uint32_t</type>               <name>maxMemoryAllocationCount</name><comment>max number of device memory allocations supported</comment></member>
+            <member><type>uint32_t</type>               <name>maxSamplerAllocationCount</name><comment>max number of samplers that can be allocated on a device</comment></member>
+            <member><type>VkDeviceSize</type>           <name>bufferImageGranularity</name><comment>Granularity (in bytes) at which buffers and images can be bound to adjacent memory for simultaneous usage</comment></member>
+            <member><type>VkDeviceSize</type>           <name>sparseAddressSpaceSize</name><comment>Total address space available for sparse allocations (bytes)</comment></member>
+                <comment>descriptor set limits</comment>
+            <member><type>uint32_t</type>               <name>maxBoundDescriptorSets</name><comment>max number of descriptors sets that can be bound to a pipeline</comment></member>
+            <member><type>uint32_t</type>               <name>maxPerStageDescriptorSamplers</name><comment>max number of samplers allowed per-stage in a descriptor set</comment></member>
+            <member><type>uint32_t</type>               <name>maxPerStageDescriptorUniformBuffers</name><comment>max number of uniform buffers allowed per-stage in a descriptor set</comment></member>
+            <member><type>uint32_t</type>               <name>maxPerStageDescriptorStorageBuffers</name><comment>max number of storage buffers allowed per-stage in a descriptor set</comment></member>
+            <member><type>uint32_t</type>               <name>maxPerStageDescriptorSampledImages</name><comment>max number of sampled images allowed per-stage in a descriptor set</comment></member>
+            <member><type>uint32_t</type>               <name>maxPerStageDescriptorStorageImages</name><comment>max number of storage images allowed per-stage in a descriptor set</comment></member>
+            <member><type>uint32_t</type>               <name>maxPerStageDescriptorInputAttachments</name><comment>max number of input attachments allowed per-stage in a descriptor set</comment></member>
+            <member><type>uint32_t</type>               <name>maxPerStageResources</name><comment>max number of resources allowed by a single stage</comment></member>
+            <member><type>uint32_t</type>               <name>maxDescriptorSetSamplers</name><comment>max number of samplers allowed in all stages in a descriptor set</comment></member>
+            <member><type>uint32_t</type>               <name>maxDescriptorSetUniformBuffers</name><comment>max number of uniform buffers allowed in all stages in a descriptor set</comment></member>
+            <member><type>uint32_t</type>               <name>maxDescriptorSetUniformBuffersDynamic</name><comment>max number of dynamic uniform buffers allowed in all stages in a descriptor set</comment></member>
+            <member><type>uint32_t</type>               <name>maxDescriptorSetStorageBuffers</name><comment>max number of storage buffers allowed in all stages in a descriptor set</comment></member>
+            <member><type>uint32_t</type>               <name>maxDescriptorSetStorageBuffersDynamic</name><comment>max number of dynamic storage buffers allowed in all stages in a descriptor set</comment></member>
+            <member><type>uint32_t</type>               <name>maxDescriptorSetSampledImages</name><comment>max number of sampled images allowed in all stages in a descriptor set</comment></member>
+            <member><type>uint32_t</type>               <name>maxDescriptorSetStorageImages</name><comment>max number of storage images allowed in all stages in a descriptor set</comment></member>
+            <member><type>uint32_t</type>               <name>maxDescriptorSetInputAttachments</name><comment>max number of input attachments allowed in all stages in a descriptor set</comment></member>
+                <comment>vertex stage limits</comment>
+            <member><type>uint32_t</type>               <name>maxVertexInputAttributes</name><comment>max number of vertex input attribute slots</comment></member>
+            <member><type>uint32_t</type>               <name>maxVertexInputBindings</name><comment>max number of vertex input binding slots</comment></member>
+            <member><type>uint32_t</type>               <name>maxVertexInputAttributeOffset</name><comment>max vertex input attribute offset added to vertex buffer offset</comment></member>
+            <member><type>uint32_t</type>               <name>maxVertexInputBindingStride</name><comment>max vertex input binding stride</comment></member>
+            <member><type>uint32_t</type>               <name>maxVertexOutputComponents</name><comment>max number of output components written by vertex shader</comment></member>
+                <comment>tessellation control stage limits</comment>
+            <member><type>uint32_t</type>               <name>maxTessellationGenerationLevel</name><comment>max level supported by tessellation primitive generator</comment></member>
+            <member><type>uint32_t</type>               <name>maxTessellationPatchSize</name><comment>max patch size (vertices)</comment></member>
+            <member><type>uint32_t</type>               <name>maxTessellationControlPerVertexInputComponents</name><comment>max number of input components per-vertex in TCS</comment></member>
+            <member><type>uint32_t</type>               <name>maxTessellationControlPerVertexOutputComponents</name><comment>max number of output components per-vertex in TCS</comment></member>
+            <member><type>uint32_t</type>               <name>maxTessellationControlPerPatchOutputComponents</name><comment>max number of output components per-patch in TCS</comment></member>
+            <member><type>uint32_t</type>               <name>maxTessellationControlTotalOutputComponents</name><comment>max total number of per-vertex and per-patch output components in TCS</comment></member>
+                <comment>tessellation evaluation stage limits</comment>
+            <member><type>uint32_t</type>               <name>maxTessellationEvaluationInputComponents</name><comment>max number of input components per vertex in TES</comment></member>
+            <member><type>uint32_t</type>               <name>maxTessellationEvaluationOutputComponents</name><comment>max number of output components per vertex in TES</comment></member>
+                <comment>geometry stage limits</comment>
+            <member><type>uint32_t</type>               <name>maxGeometryShaderInvocations</name><comment>max invocation count supported in geometry shader</comment></member>
+            <member><type>uint32_t</type>               <name>maxGeometryInputComponents</name><comment>max number of input components read in geometry stage</comment></member>
+            <member><type>uint32_t</type>               <name>maxGeometryOutputComponents</name><comment>max number of output components written in geometry stage</comment></member>
+            <member><type>uint32_t</type>               <name>maxGeometryOutputVertices</name><comment>max number of vertices that can be emitted in geometry stage</comment></member>
+            <member><type>uint32_t</type>               <name>maxGeometryTotalOutputComponents</name><comment>max total number of components (all vertices) written in geometry stage</comment></member>
+                <comment>fragment stage limits</comment>
+            <member><type>uint32_t</type>               <name>maxFragmentInputComponents</name><comment>max number of input components read in fragment stage</comment></member>
+            <member><type>uint32_t</type>               <name>maxFragmentOutputAttachments</name><comment>max number of output attachments written in fragment stage</comment></member>
+            <member><type>uint32_t</type>               <name>maxFragmentDualSrcAttachments</name><comment>max number of output attachments written when using dual source blending</comment></member>
+            <member><type>uint32_t</type>               <name>maxFragmentCombinedOutputResources</name><comment>max total number of storage buffers, storage images and output buffers</comment></member>
+                <comment>compute stage limits</comment>
+            <member><type>uint32_t</type>               <name>maxComputeSharedMemorySize</name><comment>max total storage size of work group local storage (bytes)</comment></member>
+            <member><type>uint32_t</type>               <name>maxComputeWorkGroupCount</name>[3]<comment>max num of compute work groups that may be dispatched by a single command (x,y,z)</comment></member>
+            <member><type>uint32_t</type>               <name>maxComputeWorkGroupInvocations</name><comment>max total compute invocations in a single local work group</comment></member>
+            <member><type>uint32_t</type>               <name>maxComputeWorkGroupSize</name>[3]<comment>max local size of a compute work group (x,y,z)</comment></member>
+            <member><type>uint32_t</type>               <name>subPixelPrecisionBits</name><comment>number bits of subpixel precision in screen x and y</comment></member>
+            <member><type>uint32_t</type>               <name>subTexelPrecisionBits</name><comment>number bits of precision for selecting texel weights</comment></member>
+            <member><type>uint32_t</type>               <name>mipmapPrecisionBits</name><comment>number bits of precision for selecting mipmap weights</comment></member>
+            <member><type>uint32_t</type>               <name>maxDrawIndexedIndexValue</name><comment>max index value for indexed draw calls (for 32-bit indices)</comment></member>
+            <member><type>uint32_t</type>               <name>maxDrawIndirectCount</name><comment>max draw count for indirect draw calls</comment></member>
+            <member><type>float</type>                  <name>maxSamplerLodBias</name><comment>max absolute sampler LOD bias</comment></member>
+            <member><type>float</type>                  <name>maxSamplerAnisotropy</name><comment>max degree of sampler anisotropy</comment></member>
+            <member><type>uint32_t</type>               <name>maxViewports</name><comment>max number of active viewports</comment></member>
+            <member><type>uint32_t</type>               <name>maxViewportDimensions</name>[2]<comment>max viewport dimensions (x,y)</comment></member>
+            <member><type>float</type>                  <name>viewportBoundsRange</name>[2]<comment>viewport bounds range (min,max)</comment></member>
+            <member><type>uint32_t</type>               <name>viewportSubPixelBits</name><comment>number bits of subpixel precision for viewport</comment></member>
+            <member><type>size_t</type>                 <name>minMemoryMapAlignment</name><comment>min required alignment of pointers returned by MapMemory (bytes)</comment></member>
+            <member><type>VkDeviceSize</type>           <name>minTexelBufferOffsetAlignment</name><comment>min required alignment for texel buffer offsets (bytes) </comment></member>
+            <member><type>VkDeviceSize</type>           <name>minUniformBufferOffsetAlignment</name><comment>min required alignment for uniform buffer sizes and offsets (bytes)</comment></member>
+            <member><type>VkDeviceSize</type>           <name>minStorageBufferOffsetAlignment</name><comment>min required alignment for storage buffer offsets (bytes)</comment></member>
+            <member><type>int32_t</type>                <name>minTexelOffset</name><comment>min texel offset for OpTextureSampleOffset</comment></member>
+            <member><type>uint32_t</type>               <name>maxTexelOffset</name><comment>max texel offset for OpTextureSampleOffset</comment></member>
+            <member><type>int32_t</type>                <name>minTexelGatherOffset</name><comment>min texel offset for OpTextureGatherOffset</comment></member>
+            <member><type>uint32_t</type>               <name>maxTexelGatherOffset</name><comment>max texel offset for OpTextureGatherOffset</comment></member>
+            <member><type>float</type>                  <name>minInterpolationOffset</name><comment>furthest negative offset for interpolateAtOffset</comment></member>
+            <member><type>float</type>                  <name>maxInterpolationOffset</name><comment>furthest positive offset for interpolateAtOffset</comment></member>
+            <member><type>uint32_t</type>               <name>subPixelInterpolationOffsetBits</name><comment>number of subpixel bits for interpolateAtOffset</comment></member>
+            <member><type>uint32_t</type>               <name>maxFramebufferWidth</name><comment>max width for a framebuffer</comment></member>
+            <member><type>uint32_t</type>               <name>maxFramebufferHeight</name><comment>max height for a framebuffer</comment></member>
+            <member><type>uint32_t</type>               <name>maxFramebufferLayers</name><comment>max layer count for a layered framebuffer</comment></member>
+            <member optional="true"><type>VkSampleCountFlags</type>     <name>framebufferColorSampleCounts</name><comment>supported color sample counts for a framebuffer</comment></member>
+            <member optional="true"><type>VkSampleCountFlags</type>     <name>framebufferDepthSampleCounts</name><comment>supported depth sample counts for a framebuffer</comment></member>
+            <member optional="true"><type>VkSampleCountFlags</type>     <name>framebufferStencilSampleCounts</name><comment>supported stencil sample counts for a framebuffer</comment></member>
+            <member optional="true"><type>VkSampleCountFlags</type>     <name>framebufferNoAttachmentsSampleCounts</name><comment>supported sample counts for a framebuffer with no attachments</comment></member>
+            <member><type>uint32_t</type>               <name>maxColorAttachments</name><comment>max number of color attachments per subpass</comment></member>
+            <member optional="true"><type>VkSampleCountFlags</type>     <name>sampledImageColorSampleCounts</name><comment>supported color sample counts for a non-integer sampled image</comment></member>
+            <member optional="true"><type>VkSampleCountFlags</type>     <name>sampledImageIntegerSampleCounts</name><comment>supported sample counts for an integer image</comment></member>
+            <member optional="true"><type>VkSampleCountFlags</type>     <name>sampledImageDepthSampleCounts</name><comment>supported depth sample counts for a sampled image</comment></member>
+            <member optional="true"><type>VkSampleCountFlags</type>     <name>sampledImageStencilSampleCounts</name><comment>supported stencil sample counts for a sampled image</comment></member>
+            <member optional="true"><type>VkSampleCountFlags</type>     <name>storageImageSampleCounts</name><comment>supported sample counts for a storage image</comment></member>
+            <member><type>uint32_t</type>               <name>maxSampleMaskWords</name><comment>max number of sample mask words</comment></member>
+            <member><type>VkBool32</type>               <name>timestampComputeAndGraphics</name><comment>timestamps on graphics and compute queues</comment></member>
+            <member><type>float</type>                  <name>timestampPeriod</name><comment>number of nanoseconds it takes for timestamp query value to increment by 1</comment></member>
+            <member><type>uint32_t</type>               <name>maxClipDistances</name><comment>max number of clip distances</comment></member>
+            <member><type>uint32_t</type>               <name>maxCullDistances</name><comment>max number of cull distances</comment></member>
+            <member><type>uint32_t</type>               <name>maxCombinedClipAndCullDistances</name><comment>max combined number of user clipping</comment></member>
+            <member><type>uint32_t</type>               <name>discreteQueuePriorities</name><comment>distinct queue priorities available </comment></member>
+            <member><type>float</type>                  <name>pointSizeRange</name>[2]<comment>range (min,max) of supported point sizes</comment></member>
+            <member><type>float</type>                  <name>lineWidthRange</name>[2]<comment>range (min,max) of supported line widths</comment></member>
+            <member><type>float</type>                  <name>pointSizeGranularity</name><comment>granularity of supported point sizes</comment></member>
+            <member><type>float</type>                  <name>lineWidthGranularity</name><comment>granularity of supported line widths</comment></member>
+            <member><type>VkBool32</type>               <name>strictLines</name><comment>line rasterization follows preferred rules</comment></member>
+            <member><type>VkBool32</type>               <name>standardSampleLocations</name><comment>supports standard sample locations for all supported sample counts</comment></member>
+            <member><type>VkDeviceSize</type>           <name>optimalBufferCopyOffsetAlignment</name><comment>optimal offset of buffer copies</comment></member>
+            <member><type>VkDeviceSize</type>           <name>optimalBufferCopyRowPitchAlignment</name><comment>optimal pitch of buffer copies</comment></member>
+            <member><type>VkDeviceSize</type>           <name>nonCoherentAtomSize</name><comment>minimum size and alignment for non-coherent host-mapped device memory access</comment></member>
+        </type>
+        <type category="struct" name="VkSemaphoreCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*            <name>pNext</name></member>
+            <member optional="true"><type>VkSemaphoreCreateFlags</type> <name>flags</name><comment>Semaphore creation flags</comment></member>
+        </type>
+        <type category="struct" name="VkQueryPoolCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*            <name>pNext</name></member>
+            <member optional="true"><type>VkQueryPoolCreateFlags</type> <name>flags</name></member>
+            <member><type>VkQueryType</type>            <name>queryType</name></member>
+            <member><type>uint32_t</type>               <name>queryCount</name></member>
+            <member optional="true" noautovalidity="true"><type>VkQueryPipelineStatisticFlags</type> <name>pipelineStatistics</name><comment>Optional</comment></member>
+        </type>
+        <type category="struct" name="VkFramebufferCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*            <name>pNext</name></member>
+            <member optional="true"><type>VkFramebufferCreateFlags</type>    <name>flags</name></member>
+            <member><type>VkRenderPass</type>           <name>renderPass</name></member>
+            <member optional="true"><type>uint32_t</type>               <name>attachmentCount</name></member>
+            <member noautovalidity="true" len="attachmentCount">const <type>VkImageView</type>*     <name>pAttachments</name></member>
+            <member><type>uint32_t</type>               <name>width</name></member>
+            <member><type>uint32_t</type>               <name>height</name></member>
+            <member><type>uint32_t</type>               <name>layers</name></member>
+        </type>
+        <type category="struct" name="VkDrawIndirectCommand">
+            <member><type>uint32_t</type>                       <name>vertexCount</name></member>
+            <member><type>uint32_t</type>                       <name>instanceCount</name></member>
+            <member><type>uint32_t</type>                       <name>firstVertex</name></member>
+            <member noautovalidity="true"><type>uint32_t</type> <name>firstInstance</name></member>
+        </type>
+        <type category="struct" name="VkDrawIndexedIndirectCommand">
+            <member><type>uint32_t</type>                       <name>indexCount</name></member>
+            <member><type>uint32_t</type>                       <name>instanceCount</name></member>
+            <member><type>uint32_t</type>                       <name>firstIndex</name></member>
+            <member><type>int32_t</type>                        <name>vertexOffset</name></member>
+            <member noautovalidity="true"><type>uint32_t</type> <name>firstInstance</name></member>
+        </type>
+        <type category="struct" name="VkDispatchIndirectCommand">
+            <member noautovalidity="true"><type>uint32_t</type> <name>x</name></member>
+            <member noautovalidity="true"><type>uint32_t</type> <name>y</name></member>
+            <member noautovalidity="true"><type>uint32_t</type> <name>z</name></member>
+        </type>
+        <type category="struct" name="VkSubmitInfo">
+            <member values="VK_STRUCTURE_TYPE_SUBMIT_INFO"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>* <name>pNext</name></member>
+            <member optional="true"><type>uint32_t</type>       <name>waitSemaphoreCount</name></member>
+            <member len="waitSemaphoreCount">const <type>VkSemaphore</type>*     <name>pWaitSemaphores</name></member>
+            <member len="waitSemaphoreCount">const <type>VkPipelineStageFlags</type>*           <name>pWaitDstStageMask</name></member>
+            <member optional="true"><type>uint32_t</type>       <name>commandBufferCount</name></member>
+            <member len="commandBufferCount">const <type>VkCommandBuffer</type>*     <name>pCommandBuffers</name></member>
+            <member optional="true"><type>uint32_t</type>       <name>signalSemaphoreCount</name></member>
+            <member len="signalSemaphoreCount">const <type>VkSemaphore</type>*     <name>pSignalSemaphores</name></member>
+        </type>
+            <comment>WSI extensions</comment>
+        <type category="struct" name="VkDisplayPropertiesKHR" returnedonly="true">
+            <member><type>VkDisplayKHR</type>                     <name>display</name><comment>Handle of the display object</comment></member>
+            <member len="null-terminated">const <type>char</type>*                      <name>displayName</name><comment>Name of the display</comment></member>
+            <member><type>VkExtent2D</type>                       <name>physicalDimensions</name><comment>In millimeters?</comment></member>
+            <member><type>VkExtent2D</type>                       <name>physicalResolution</name><comment>Max resolution for CRT?</comment></member>
+            <member optional="true"><type>VkSurfaceTransformFlagsKHR</type>       <name>supportedTransforms</name><comment>one or more bits from VkSurfaceTransformFlagsKHR</comment></member>
+            <member><type>VkBool32</type>                         <name>planeReorderPossible</name><comment>VK_TRUE if the overlay plane's z-order can be changed on this display.</comment></member>
+            <member><type>VkBool32</type>                         <name>persistentContent</name><comment>VK_TRUE if this is a "smart" display that supports self-refresh/internal buffering.</comment></member>
+        </type>
+        <type category="struct" name="VkDisplayPlanePropertiesKHR" returnedonly="true">
+            <member><type>VkDisplayKHR</type>                     <name>currentDisplay</name><comment>Display the plane is currently associated with.  Will be VK_NULL_HANDLE if the plane is not in use.</comment></member>
+            <member><type>uint32_t</type>                         <name>currentStackIndex</name><comment>Current z-order of the plane.</comment></member>
+        </type>
+        <type category="struct" name="VkDisplayModeParametersKHR">
+            <member><type>VkExtent2D</type>                       <name>visibleRegion</name><comment>Visible scanout region.</comment></member>
+            <member noautovalidity="true"><type>uint32_t</type>   <name>refreshRate</name><comment>Number of times per second the display is updated.</comment></member>
+        </type>
+        <type category="struct" name="VkDisplayModePropertiesKHR" returnedonly="true">
+            <member><type>VkDisplayModeKHR</type>                 <name>displayMode</name><comment>Handle of this display mode.</comment></member>
+            <member><type>VkDisplayModeParametersKHR</type>       <name>parameters</name><comment>The parameters this mode uses.</comment></member>
+        </type>
+        <type category="struct" name="VkDisplayModeCreateInfoKHR">
+            <member values="VK_STRUCTURE_TYPE_DISPLAY_MODE_CREATE_INFO_KHR"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                      <name>pNext</name></member>
+            <member optional="true"><type>VkDisplayModeCreateFlagsKHR</type>      <name>flags</name></member>
+            <member><type>VkDisplayModeParametersKHR</type>       <name>parameters</name><comment>The parameters this mode uses.</comment></member>
+        </type>
+        <type category="struct" name="VkDisplayPlaneCapabilitiesKHR" returnedonly="true">
+            <member optional="true"><type>VkDisplayPlaneAlphaFlagsKHR</type>      <name>supportedAlpha</name><comment>Types of alpha blending supported, if any.</comment></member>
+            <member><type>VkOffset2D</type>                       <name>minSrcPosition</name><comment>Does the plane have any position and extent restrictions?</comment></member>
+            <member><type>VkOffset2D</type>                       <name>maxSrcPosition</name></member>
+            <member><type>VkExtent2D</type>                       <name>minSrcExtent</name></member>
+            <member><type>VkExtent2D</type>                       <name>maxSrcExtent</name></member>
+            <member><type>VkOffset2D</type>                       <name>minDstPosition</name></member>
+            <member><type>VkOffset2D</type>                       <name>maxDstPosition</name></member>
+            <member><type>VkExtent2D</type>                       <name>minDstExtent</name></member>
+            <member><type>VkExtent2D</type>                       <name>maxDstExtent</name></member>
+        </type>
+        <type category="struct" name="VkDisplaySurfaceCreateInfoKHR">
+            <member values="VK_STRUCTURE_TYPE_DISPLAY_SURFACE_CREATE_INFO_KHR"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                      <name>pNext</name></member>
+            <member optional="true"><type>VkDisplaySurfaceCreateFlagsKHR</type>   <name>flags</name></member>
+            <member><type>VkDisplayModeKHR</type>                 <name>displayMode</name><comment>The mode to use when displaying this surface</comment></member>
+            <member><type>uint32_t</type>                         <name>planeIndex</name><comment>The plane on which this surface appears.  Must be between 0 and the value returned by vkGetPhysicalDeviceDisplayPlanePropertiesKHR() in pPropertyCount.</comment></member>
+            <member><type>uint32_t</type>                         <name>planeStackIndex</name><comment>The z-order of the plane.</comment></member>
+            <member><type>VkSurfaceTransformFlagBitsKHR</type>    <name>transform</name><comment>Transform to apply to the images as part of the scanout operation</comment></member>
+            <member><type>float</type>                            <name>globalAlpha</name><comment>Global alpha value.  Must be between 0 and 1, inclusive.  Ignored if alphaMode is not VK_DISPLAY_PLANE_ALPHA_GLOBAL_BIT_KHR</comment></member>
+            <member><type>VkDisplayPlaneAlphaFlagBitsKHR</type>   <name>alphaMode</name><comment>What type of alpha blending to use.  Must be a bit from vkGetDisplayPlanePropertiesKHR::supportedAlpha.</comment></member>
+            <member><type>VkExtent2D</type>                       <name>imageExtent</name><comment>size of the images to use with this surface</comment></member>
+        </type>
+        <type category="struct" name="VkDisplayPresentInfoKHR" structextends="VkPresentInfoKHR">
+            <member values="VK_STRUCTURE_TYPE_DISPLAY_PRESENT_INFO_KHR"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                      <name>pNext</name></member>
+            <member><type>VkRect2D</type>                         <name>srcRect</name><comment>Rectangle within the presentable image to read pixel data from when presenting to the display.</comment></member>
+            <member><type>VkRect2D</type>                         <name>dstRect</name><comment>Rectangle within the current display mode's visible region to display srcRectangle in.</comment></member>
+            <member><type>VkBool32</type>                         <name>persistent</name><comment>For smart displays, use buffered mode.  If the display properties member "persistentMode" is VK_FALSE, this member must always be VK_FALSE.</comment></member>
+        </type>
+        <type category="struct" name="VkSurfaceCapabilitiesKHR" returnedonly="true">
+            <member><type>uint32_t</type>                         <name>minImageCount</name><comment>Supported minimum number of images for the surface</comment></member>
+            <member><type>uint32_t</type>                         <name>maxImageCount</name><comment>Supported maximum number of images for the surface, 0 for unlimited</comment></member>
+            <member><type>VkExtent2D</type>                       <name>currentExtent</name><comment>Current image width and height for the surface, (0, 0) if undefined</comment></member>
+            <member><type>VkExtent2D</type>                       <name>minImageExtent</name><comment>Supported minimum image width and height for the surface</comment></member>
+            <member><type>VkExtent2D</type>                       <name>maxImageExtent</name><comment>Supported maximum image width and height for the surface</comment></member>
+            <member><type>uint32_t</type>                         <name>maxImageArrayLayers</name><comment>Supported maximum number of image layers for the surface</comment></member>
+            <member optional="true"><type>VkSurfaceTransformFlagsKHR</type>       <name>supportedTransforms</name><comment>1 or more bits representing the transforms supported</comment></member>
+            <member><type>VkSurfaceTransformFlagBitsKHR</type>    <name>currentTransform</name><comment>The surface's current transform relative to the device's natural orientation</comment></member>
+            <member optional="true"><type>VkCompositeAlphaFlagsKHR</type>         <name>supportedCompositeAlpha</name><comment>1 or more bits representing the alpha compositing modes supported</comment></member>
+            <member optional="true"><type>VkImageUsageFlags</type>                <name>supportedUsageFlags</name><comment>Supported image usage flags for the surface</comment></member>
+        </type>
+        <type category="struct" name="VkAndroidSurfaceCreateInfoKHR">
+            <member values="VK_STRUCTURE_TYPE_ANDROID_SURFACE_CREATE_INFO_KHR"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                                    <name>pNext</name></member>
+            <member optional="true"><type>VkAndroidSurfaceCreateFlagsKHR</type> <name>flags</name></member>
+            <member noautovalidity="true">struct <type>ANativeWindow</type>*    <name>window</name></member>
+        </type>
+        <type category="struct" name="VkViSurfaceCreateInfoNN">
+            <member values="VK_STRUCTURE_TYPE_VI_SURFACE_CREATE_INFO_NN"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                      <name>pNext</name></member>
+            <member optional="true"><type>VkViSurfaceCreateFlagsNN</type>   <name>flags</name></member>
+            <member noautovalidity="true"><type>void</type>*                            <name>window</name></member>
+        </type>
+        <type category="struct" name="VkWaylandSurfaceCreateInfoKHR">
+            <member values="VK_STRUCTURE_TYPE_WAYLAND_SURFACE_CREATE_INFO_KHR"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                      <name>pNext</name></member>
+            <member optional="true"><type>VkWaylandSurfaceCreateFlagsKHR</type>   <name>flags</name></member>
+            <member noautovalidity="true">struct <type>wl_display</type>*               <name>display</name></member>
+            <member noautovalidity="true">struct <type>wl_surface</type>*               <name>surface</name></member>
+        </type>
+        <type category="struct" name="VkWin32SurfaceCreateInfoKHR">
+            <member values="VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                      <name>pNext</name></member>
+            <member optional="true"><type>VkWin32SurfaceCreateFlagsKHR</type>   <name>flags</name></member>
+            <member><type>HINSTANCE</type>                        <name>hinstance</name></member>
+            <member><type>HWND</type>                             <name>hwnd</name></member>
+        </type>
+        <type category="struct" name="VkXlibSurfaceCreateInfoKHR">
+            <member values="VK_STRUCTURE_TYPE_XLIB_SURFACE_CREATE_INFO_KHR"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                      <name>pNext</name></member>
+            <member optional="true"><type>VkXlibSurfaceCreateFlagsKHR</type>   <name>flags</name></member>
+            <member noautovalidity="true"><type>Display</type>*                         <name>dpy</name></member>
+            <member><type>Window</type>                           <name>window</name></member>
+        </type>
+        <type category="struct" name="VkXcbSurfaceCreateInfoKHR">
+            <member values="VK_STRUCTURE_TYPE_XCB_SURFACE_CREATE_INFO_KHR"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                      <name>pNext</name></member>
+            <member optional="true"><type>VkXcbSurfaceCreateFlagsKHR</type>   <name>flags</name></member>
+            <member noautovalidity="true"><type>xcb_connection_t</type>*                <name>connection</name></member>
+            <member><type>xcb_window_t</type>                     <name>window</name></member>
+        </type>
+        <type category="struct" name="VkImagePipeSurfaceCreateInfoFUCHSIA">
+            <member values="VK_STRUCTURE_TYPE_IMAGEPIPE_SURFACE_CREATE_INFO_FUCHSIA"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                      <name>pNext</name></member>
+            <member optional="true"><type>VkImagePipeSurfaceCreateFlagsFUCHSIA</type>   <name>flags</name></member>
+            <member><type>zx_handle_t</type>                      <name>imagePipeHandle</name></member>
+        </type>
+        <type category="struct" name="VkStreamDescriptorSurfaceCreateInfoGGP">
+            <member values="VK_STRUCTURE_TYPE_STREAM_DESCRIPTOR_SURFACE_CREATE_INFO_GGP"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                      <name>pNext</name></member>
+            <member optional="true"><type>VkStreamDescriptorSurfaceCreateFlagsGGP</type> <name>flags</name></member>
+            <member><type>GgpStreamDescriptor</type>              <name>streamDescriptor</name></member>
+        </type>
+        <type category="struct" name="VkSurfaceFormatKHR" returnedonly="true">
+            <member><type>VkFormat</type>                         <name>format</name><comment>Supported pair of rendering format</comment></member>
+            <member><type>VkColorSpaceKHR</type>                  <name>colorSpace</name><comment>and color space for the surface</comment></member>
+        </type>
+        <type category="struct" name="VkSwapchainCreateInfoKHR">
+            <member values="VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                      <name>pNext</name></member>
+            <member optional="true"><type>VkSwapchainCreateFlagsKHR</type>        <name>flags</name></member>
+            <member><type>VkSurfaceKHR</type>                     <name>surface</name><comment>The swapchain's target surface</comment></member>
+            <member><type>uint32_t</type>                         <name>minImageCount</name><comment>Minimum number of presentation images the application needs</comment></member>
+            <member><type>VkFormat</type>                         <name>imageFormat</name><comment>Format of the presentation images</comment></member>
+            <member><type>VkColorSpaceKHR</type>                  <name>imageColorSpace</name><comment>Colorspace of the presentation images</comment></member>
+            <member><type>VkExtent2D</type>                       <name>imageExtent</name><comment>Dimensions of the presentation images</comment></member>
+            <member><type>uint32_t</type>                         <name>imageArrayLayers</name><comment>Determines the number of views for multiview/stereo presentation</comment></member>
+            <member><type>VkImageUsageFlags</type>                <name>imageUsage</name><comment>Bits indicating how the presentation images will be used</comment></member>
+            <member><type>VkSharingMode</type>                    <name>imageSharingMode</name><comment>Sharing mode used for the presentation images</comment></member>
+            <member optional="true"><type>uint32_t</type>         <name>queueFamilyIndexCount</name><comment>Number of queue families having access to the images in case of concurrent sharing mode</comment></member>
+            <member noautovalidity="true" len="queueFamilyIndexCount">const <type>uint32_t</type>*                  <name>pQueueFamilyIndices</name><comment>Array of queue family indices having access to the images in case of concurrent sharing mode</comment></member>
+            <member><type>VkSurfaceTransformFlagBitsKHR</type>    <name>preTransform</name><comment>The transform, relative to the device's natural orientation, applied to the image content prior to presentation</comment></member>
+            <member><type>VkCompositeAlphaFlagBitsKHR</type>      <name>compositeAlpha</name><comment>The alpha blending mode used when compositing this surface with other surfaces in the window system</comment></member>
+            <member><type>VkPresentModeKHR</type>                 <name>presentMode</name><comment>Which presentation mode to use for presents on this swap chain</comment></member>
+            <member><type>VkBool32</type>                         <name>clipped</name><comment>Specifies whether presentable images may be affected by window clip regions</comment></member>
+            <member optional="true"><type>VkSwapchainKHR</type>   <name>oldSwapchain</name><comment>Existing swap chain to replace, if any</comment></member>
+        </type>
+        <type category="struct" name="VkPresentInfoKHR">
+            <member values="VK_STRUCTURE_TYPE_PRESENT_INFO_KHR"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*  <name>pNext</name></member>
+            <member optional="true"><type>uint32_t</type>         <name>waitSemaphoreCount</name><comment>Number of semaphores to wait for before presenting</comment></member>
+            <member len="waitSemaphoreCount">const <type>VkSemaphore</type>* <name>pWaitSemaphores</name><comment>Semaphores to wait for before presenting</comment></member>
+            <member><type>uint32_t</type>                         <name>swapchainCount</name><comment>Number of swapchains to present in this call</comment></member>
+            <member len="swapchainCount">const <type>VkSwapchainKHR</type>* <name>pSwapchains</name><comment>Swapchains to present an image from</comment></member>
+            <member len="swapchainCount">const <type>uint32_t</type>* <name>pImageIndices</name><comment>Indices of which presentable images to present</comment></member>
+            <member optional="true" len="swapchainCount"><type>VkResult</type>* <name>pResults</name><comment>Optional (i.e. if non-NULL) VkResult for each swapchain</comment></member>
+        </type>
+        <type category="struct" name="VkDebugReportCallbackCreateInfoEXT" structextends="VkInstanceCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                      <name>pNext</name></member>
+            <member optional="true"><type>VkDebugReportFlagsEXT</type>            <name>flags</name><comment>Indicates which events call this callback</comment></member>
+            <member><type>PFN_vkDebugReportCallbackEXT</type>     <name>pfnCallback</name><comment>Function pointer of a callback function</comment></member>
+            <member optional="true"><type>void</type>*            <name>pUserData</name><comment>User data provided to callback function</comment></member>
+        </type>
+        <type category="struct" name="VkValidationFlagsEXT" structextends="VkInstanceCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_VALIDATION_FLAGS_EXT"><type>VkStructureType</type>                  <name>sType</name><comment>Must be VK_STRUCTURE_TYPE_VALIDATION_FLAGS_EXT</comment></member>
+            <member>const <type>void</type>*                      <name>pNext</name></member>
+            <member><type>uint32_t</type>                         <name>disabledValidationCheckCount</name><comment>Number of validation checks to disable</comment></member>
+            <member len="disabledValidationCheckCount">const <type>VkValidationCheckEXT</type>* <name>pDisabledValidationChecks</name><comment>Validation checks to disable</comment></member>
+        </type>
+        <type category="struct" name="VkValidationFeaturesEXT" structextends="VkInstanceCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_VALIDATION_FEATURES_EXT"><type>VkStructureType</type>  <name>sType</name><comment>Must be VK_STRUCTURE_TYPE_VALIDATION_FEATURES_EXT</comment></member>
+            <member>const <type>void</type>*                      <name>pNext</name></member>
+            <member optional="true"><type>uint32_t</type>                         <name>enabledValidationFeatureCount</name><comment>Number of validation features to enable</comment></member>
+            <member len="enabledValidationFeatureCount">const <type>VkValidationFeatureEnableEXT</type>* <name>pEnabledValidationFeatures</name><comment>Validation features to enable</comment></member>
+            <member optional="true"><type>uint32_t</type>                         <name>disabledValidationFeatureCount</name><comment>Number of validation features to disable</comment></member>
+            <member len="disabledValidationFeatureCount">const <type>VkValidationFeatureDisableEXT</type>* <name>pDisabledValidationFeatures</name><comment>Validation features to disable</comment></member>
+        </type>
+        <type category="struct" name="VkPipelineRasterizationStateRasterizationOrderAMD" structextends="VkPipelineRasterizationStateCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_RASTERIZATION_ORDER_AMD"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                      <name>pNext</name></member>
+            <member><type>VkRasterizationOrderAMD</type>          <name>rasterizationOrder</name><comment>Rasterization order to use for the pipeline</comment></member>
+        </type>
+        <type category="struct" name="VkDebugMarkerObjectNameInfoEXT">
+            <member values="VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_NAME_INFO_EXT"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                      <name>pNext</name></member>
+            <member><type>VkDebugReportObjectTypeEXT</type>       <name>objectType</name><comment>The type of the object</comment></member>
+            <member><type>uint64_t</type>                         <name>object</name><comment>The handle of the object, cast to uint64_t</comment></member>
+            <member len="null-terminated">const <type>char</type>* <name>pObjectName</name><comment>Name to apply to the object</comment></member>
+        </type>
+        <type category="struct" name="VkDebugMarkerObjectTagInfoEXT">
+            <member values="VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_TAG_INFO_EXT"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                      <name>pNext</name></member>
+            <member><type>VkDebugReportObjectTypeEXT</type>       <name>objectType</name><comment>The type of the object</comment></member>
+            <member><type>uint64_t</type>                         <name>object</name><comment>The handle of the object, cast to uint64_t</comment></member>
+            <member><type>uint64_t</type>                         <name>tagName</name><comment>The name of the tag to set on the object</comment></member>
+            <member><type>size_t</type>                           <name>tagSize</name><comment>The length in bytes of the tag data</comment></member>
+            <member len="tagSize">const <type>void</type>*        <name>pTag</name><comment>Tag data to attach to the object</comment></member>
+        </type>
+        <type category="struct" name="VkDebugMarkerMarkerInfoEXT">
+            <member values="VK_STRUCTURE_TYPE_DEBUG_MARKER_MARKER_INFO_EXT"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                      <name>pNext</name></member>
+            <member len="null-terminated">const <type>char</type>* <name>pMarkerName</name><comment>Name of the debug marker</comment></member>
+            <member optional="true"><type>float</type>            <name>color</name>[4]<comment>Optional color for debug marker</comment></member>
+        </type>
+        <type category="struct" name="VkDedicatedAllocationImageCreateInfoNV" structextends="VkImageCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_IMAGE_CREATE_INFO_NV"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                      <name>pNext</name></member>
+            <member><type>VkBool32</type>                         <name>dedicatedAllocation</name><comment>Whether this image uses a dedicated allocation</comment></member>
+        </type>
+        <type category="struct" name="VkDedicatedAllocationBufferCreateInfoNV" structextends="VkBufferCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_BUFFER_CREATE_INFO_NV"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                      <name>pNext</name></member>
+            <member><type>VkBool32</type>                         <name>dedicatedAllocation</name><comment>Whether this buffer uses a dedicated allocation</comment></member>
+        </type>
+        <type category="struct" name="VkDedicatedAllocationMemoryAllocateInfoNV" structextends="VkMemoryAllocateInfo">
+            <member values="VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_MEMORY_ALLOCATE_INFO_NV"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                      <name>pNext</name></member>
+            <member optional="true"><type>VkImage</type>          <name>image</name><comment>Image that this allocation will be bound to</comment></member>
+            <member optional="true"><type>VkBuffer</type>         <name>buffer</name><comment>Buffer that this allocation will be bound to</comment></member>
+        </type>
+        <type category="struct" name="VkExternalImageFormatPropertiesNV" returnedonly="true">
+            <member><type>VkImageFormatProperties</type>          <name>imageFormatProperties</name></member>
+            <member optional="true"><type>VkExternalMemoryFeatureFlagsNV</type>   <name>externalMemoryFeatures</name></member>
+            <member optional="true"><type>VkExternalMemoryHandleTypeFlagsNV</type> <name>exportFromImportedHandleTypes</name></member>
+            <member optional="true"><type>VkExternalMemoryHandleTypeFlagsNV</type> <name>compatibleHandleTypes</name></member>
+        </type>
+        <type category="struct" name="VkExternalMemoryImageCreateInfoNV" structextends="VkImageCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_NV"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                      <name>pNext</name></member>
+            <member optional="true"><type>VkExternalMemoryHandleTypeFlagsNV</type> <name>handleTypes</name></member>
+        </type>
+        <type category="struct" name="VkExportMemoryAllocateInfoNV" structextends="VkMemoryAllocateInfo">
+            <member values="VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_NV"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                      <name>pNext</name></member>
+            <member optional="true"><type>VkExternalMemoryHandleTypeFlagsNV</type> <name>handleTypes</name></member>
+        </type>
+        <type category="struct" name="VkImportMemoryWin32HandleInfoNV" structextends="VkMemoryAllocateInfo">
+            <member values="VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_NV"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                      <name>pNext</name></member>
+            <member optional="true"><type>VkExternalMemoryHandleTypeFlagsNV</type> <name>handleType</name></member>
+            <member optional="true"><type>HANDLE</type>                           <name>handle</name></member>
+        </type>
+        <type category="struct" name="VkExportMemoryWin32HandleInfoNV" structextends="VkMemoryAllocateInfo">
+            <member values="VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_NV"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                      <name>pNext</name></member>
+            <member optional="true">const <type>SECURITY_ATTRIBUTES</type>*       <name>pAttributes</name></member>
+            <member optional="true"><type>DWORD</type>                            <name>dwAccess</name></member>
+        </type>
+        <type category="struct" name="VkWin32KeyedMutexAcquireReleaseInfoNV" structextends="VkSubmitInfo">
+            <member values="VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_NV"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                      <name>pNext</name></member>
+            <member optional="true"><type>uint32_t</type>                         <name>acquireCount</name></member>
+            <member len="acquireCount">const <type>VkDeviceMemory</type>*            <name>pAcquireSyncs</name></member>
+            <member len="acquireCount">const <type>uint64_t</type>*                  <name>pAcquireKeys</name></member>
+            <member len="acquireCount">const <type>uint32_t</type>*                  <name>pAcquireTimeoutMilliseconds</name></member>
+            <member optional="true"><type>uint32_t</type>                         <name>releaseCount</name></member>
+            <member len="releaseCount">const <type>VkDeviceMemory</type>*            <name>pReleaseSyncs</name></member>
+            <member len="releaseCount">const <type>uint64_t</type>*                  <name>pReleaseKeys</name></member>
+        </type>
+        <type category="struct" name="VkDeviceGeneratedCommandsFeaturesNVX">
+            <member values="VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_FEATURES_NVX"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                      <name>pNext</name></member>
+            <member><type>VkBool32</type>                         <name>computeBindingPointSupport</name></member>
+        </type>
+        <type category="struct" name="VkDeviceGeneratedCommandsLimitsNVX">
+            <member values="VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_LIMITS_NVX"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                      <name>pNext</name></member>
+            <member><type>uint32_t</type>                         <name>maxIndirectCommandsLayoutTokenCount</name></member>
+            <member><type>uint32_t</type>                         <name>maxObjectEntryCounts</name></member>
+            <member><type>uint32_t</type>                         <name>minSequenceCountBufferOffsetAlignment</name></member>
+            <member><type>uint32_t</type>                         <name>minSequenceIndexBufferOffsetAlignment</name></member>
+            <member><type>uint32_t</type>                         <name>minCommandsTokenBufferOffsetAlignment</name></member>
+        </type>
+        <type category="struct" name="VkIndirectCommandsTokenNVX">
+            <member><type>VkIndirectCommandsTokenTypeNVX</type>      <name>tokenType</name></member>
+            <member><type>VkBuffer</type>                         <name>buffer</name><comment>buffer containing tableEntries and additional data for indirectCommands</comment></member>
+            <member><type>VkDeviceSize</type>                     <name>offset</name><comment>offset from the base address of the buffer</comment></member>
+        </type>
+        <type category="struct" name="VkIndirectCommandsLayoutTokenNVX">
+            <member><type>VkIndirectCommandsTokenTypeNVX</type>      <name>tokenType</name></member>
+            <member><type>uint32_t</type>                         <name>bindingUnit</name><comment>Binding unit for vertex attribute / descriptor set, offset for pushconstants</comment></member>
+            <member><type>uint32_t</type>                         <name>dynamicCount</name><comment>Number of variable dynamic values for descriptor set / push constants</comment></member>
+            <member><type>uint32_t</type>                         <name>divisor</name><comment>Rate the which the array is advanced per element (must be power of 2, minimum 1)</comment></member>
+        </type>
+        <type category="struct" name="VkIndirectCommandsLayoutCreateInfoNVX">
+            <member values="VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_NVX"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                      <name>pNext</name></member>
+            <member><type>VkPipelineBindPoint</type>                      <name>pipelineBindPoint</name></member>
+            <member><type>VkIndirectCommandsLayoutUsageFlagsNVX</type>    <name>flags</name></member>
+            <member><type>uint32_t</type>                                 <name>tokenCount</name></member>
+            <member len="tokenCount">const <type>VkIndirectCommandsLayoutTokenNVX</type>*  <name>pTokens</name></member>
+        </type>
+        <type category="struct" name="VkCmdProcessCommandsInfoNVX">
+            <member values="VK_STRUCTURE_TYPE_CMD_PROCESS_COMMANDS_INFO_NVX"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                      <name>pNext</name></member>
+            <member externsync="true"><type>VkObjectTableNVX</type>                                         <name>objectTable</name></member>
+            <member><type>VkIndirectCommandsLayoutNVX</type>                              <name>indirectCommandsLayout</name></member>
+            <member><type>uint32_t</type>                                                 <name>indirectCommandsTokenCount</name></member>
+            <member len="indirectCommandsTokenCount">const <type>VkIndirectCommandsTokenNVX</type>*       <name>pIndirectCommandsTokens</name></member>
+            <member><type>uint32_t</type>                                                 <name>maxSequencesCount</name></member>
+            <member optional="true" externsync="true"><type>VkCommandBuffer</type>                          <name>targetCommandBuffer</name></member>
+            <member optional="true"><type>VkBuffer</type>                                 <name>sequencesCountBuffer</name></member>
+            <member optional="true"><type>VkDeviceSize</type>                             <name>sequencesCountOffset</name></member>
+            <member optional="true"><type>VkBuffer</type>                                 <name>sequencesIndexBuffer</name></member>
+            <member optional="true"><type>VkDeviceSize</type>                             <name>sequencesIndexOffset</name></member>
+        </type>
+        <type category="struct" name="VkCmdReserveSpaceForCommandsInfoNVX">
+            <member values="VK_STRUCTURE_TYPE_CMD_RESERVE_SPACE_FOR_COMMANDS_INFO_NVX"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                      <name>pNext</name></member>
+            <member externsync="true"><type>VkObjectTableNVX</type>                                         <name>objectTable</name></member>
+            <member><type>VkIndirectCommandsLayoutNVX</type>                              <name>indirectCommandsLayout</name></member>
+            <member><type>uint32_t</type>                                                 <name>maxSequencesCount</name></member>
+        </type>
+        <type category="struct" name="VkObjectTableCreateInfoNVX">
+            <member values="VK_STRUCTURE_TYPE_OBJECT_TABLE_CREATE_INFO_NVX"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                      <name>pNext</name></member>
+            <member><type>uint32_t</type>                                          <name>objectCount</name></member>
+            <member len="objectCount">const <type>VkObjectEntryTypeNVX</type>*       <name>pObjectEntryTypes</name></member>
+            <member len="objectCount">const <type>uint32_t</type>*                   <name>pObjectEntryCounts</name></member>
+            <member len="objectCount">const <type>VkObjectEntryUsageFlagsNVX</type>* <name>pObjectEntryUsageFlags</name></member>
+
+            <member><type>uint32_t</type> <name>maxUniformBuffersPerDescriptor</name></member>
+            <member><type>uint32_t</type> <name>maxStorageBuffersPerDescriptor</name></member>
+            <member><type>uint32_t</type> <name>maxStorageImagesPerDescriptor</name></member>
+            <member><type>uint32_t</type> <name>maxSampledImagesPerDescriptor</name></member>
+            <member><type>uint32_t</type> <name>maxPipelineLayouts</name></member>
+        </type>
+        <type category="struct" name="VkObjectTableEntryNVX">
+            <member><type>VkObjectEntryTypeNVX</type>         <name>type</name></member>
+            <member><type>VkObjectEntryUsageFlagsNVX</type>   <name>flags</name></member>
+        </type>
+        <type category="struct" name="VkObjectTablePipelineEntryNVX">
+            <member><type>VkObjectEntryTypeNVX</type>         <name>type</name></member>
+            <member><type>VkObjectEntryUsageFlagsNVX</type>   <name>flags</name></member>
+            <member><type>VkPipeline</type>                   <name>pipeline</name></member>
+        </type>
+        <type category="struct" name="VkObjectTableDescriptorSetEntryNVX">
+            <member><type>VkObjectEntryTypeNVX</type>         <name>type</name></member>
+            <member><type>VkObjectEntryUsageFlagsNVX</type>   <name>flags</name></member>
+            <member><type>VkPipelineLayout</type>             <name>pipelineLayout</name></member>
+            <member><type>VkDescriptorSet</type>              <name>descriptorSet</name></member>
+        </type>
+        <type category="struct" name="VkObjectTableVertexBufferEntryNVX">
+            <member><type>VkObjectEntryTypeNVX</type>         <name>type</name></member>
+            <member><type>VkObjectEntryUsageFlagsNVX</type>   <name>flags</name></member>
+            <member><type>VkBuffer</type>                     <name>buffer</name></member>
+        </type>
+        <type category="struct" name="VkObjectTableIndexBufferEntryNVX">
+            <member><type>VkObjectEntryTypeNVX</type>         <name>type</name></member>
+            <member><type>VkObjectEntryUsageFlagsNVX</type>   <name>flags</name></member>
+            <member><type>VkBuffer</type>                     <name>buffer</name></member>
+            <member><type>VkIndexType</type>                  <name>indexType</name></member>
+        </type>
+        <type category="struct" name="VkObjectTablePushConstantEntryNVX">
+            <member><type>VkObjectEntryTypeNVX</type>         <name>type</name></member>
+            <member><type>VkObjectEntryUsageFlagsNVX</type>   <name>flags</name></member>
+            <member><type>VkPipelineLayout</type>             <name>pipelineLayout</name></member>
+            <member><type>VkShaderStageFlags</type>           <name>stageFlags</name></member>
+        </type>
+        <type category="struct" name="VkPhysicalDeviceFeatures2" structextends="VkDeviceCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2"><type>VkStructureType</type> <name>sType</name></member>
+            <member><type>void</type>*                            <name>pNext</name></member>
+            <member><type>VkPhysicalDeviceFeatures</type>         <name>features</name></member>
+        </type>
+        <type category="struct" name="VkPhysicalDeviceFeatures2KHR"                            alias="VkPhysicalDeviceFeatures2"/>
+        <type category="struct" name="VkPhysicalDeviceProperties2" returnedonly="true">
+            <member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2"><type>VkStructureType</type> <name>sType</name></member>
+            <member><type>void</type>*                            <name>pNext</name></member>
+            <member><type>VkPhysicalDeviceProperties</type>       <name>properties</name></member>
+        </type>
+        <type category="struct" name="VkPhysicalDeviceProperties2KHR"                          alias="VkPhysicalDeviceProperties2"/>
+        <type category="struct" name="VkFormatProperties2" returnedonly="true">
+            <member values="VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2"><type>VkStructureType</type> <name>sType</name></member>
+            <member><type>void</type>*                            <name>pNext</name></member>
+            <member><type>VkFormatProperties</type>               <name>formatProperties</name></member>
+        </type>
+        <type category="struct" name="VkFormatProperties2KHR"                                  alias="VkFormatProperties2"/>
+        <type category="struct" name="VkImageFormatProperties2" returnedonly="true">
+            <member values="VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2"><type>VkStructureType</type> <name>sType</name></member>
+            <member><type>void</type>* <name>pNext</name></member>
+            <member><type>VkImageFormatProperties</type>          <name>imageFormatProperties</name></member>
+        </type>
+        <type category="struct" name="VkImageFormatProperties2KHR"                             alias="VkImageFormatProperties2"/>
+        <type category="struct" name="VkPhysicalDeviceImageFormatInfo2">
+            <member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>* <name>pNext</name></member>
+            <member><type>VkFormat</type>                         <name>format</name></member>
+            <member><type>VkImageType</type>                      <name>type</name></member>
+            <member><type>VkImageTiling</type>                    <name>tiling</name></member>
+            <member><type>VkImageUsageFlags</type>                <name>usage</name></member>
+            <member optional="true"><type>VkImageCreateFlags</type> <name>flags</name></member>
+        </type>
+        <type category="struct" name="VkPhysicalDeviceImageFormatInfo2KHR"                     alias="VkPhysicalDeviceImageFormatInfo2"/>
+        <type category="struct" name="VkQueueFamilyProperties2" returnedonly="true">
+            <member values="VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2"><type>VkStructureType</type> <name>sType</name></member>
+            <member><type>void</type>*                            <name>pNext</name></member>
+            <member><type>VkQueueFamilyProperties</type>          <name>queueFamilyProperties</name></member>
+        </type>
+        <type category="struct" name="VkQueueFamilyProperties2KHR"                             alias="VkQueueFamilyProperties2"/>
+        <type category="struct" name="VkPhysicalDeviceMemoryProperties2" returnedonly="true">
+            <member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2"><type>VkStructureType</type> <name>sType</name></member>
+            <member><type>void</type>*                            <name>pNext</name></member>
+            <member><type>VkPhysicalDeviceMemoryProperties</type> <name>memoryProperties</name></member>
+        </type>
+        <type category="struct" name="VkPhysicalDeviceMemoryProperties2KHR"                    alias="VkPhysicalDeviceMemoryProperties2"/>
+        <type category="struct" name="VkSparseImageFormatProperties2" returnedonly="true">
+            <member values="VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2"><type>VkStructureType</type> <name>sType</name></member>
+            <member><type>void</type>*                            <name>pNext</name></member>
+            <member><type>VkSparseImageFormatProperties</type>    <name>properties</name></member>
+        </type>
+        <type category="struct" name="VkSparseImageFormatProperties2KHR"                       alias="VkSparseImageFormatProperties2"/>
+        <type category="struct" name="VkPhysicalDeviceSparseImageFormatInfo2">
+            <member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                      <name>pNext</name></member>
+            <member><type>VkFormat</type>                         <name>format</name></member>
+            <member><type>VkImageType</type>                      <name>type</name></member>
+            <member><type>VkSampleCountFlagBits</type>            <name>samples</name></member>
+            <member><type>VkImageUsageFlags</type>                <name>usage</name></member>
+            <member><type>VkImageTiling</type>                    <name>tiling</name></member>
+        </type>
+        <type category="struct" name="VkPhysicalDeviceSparseImageFormatInfo2KHR"               alias="VkPhysicalDeviceSparseImageFormatInfo2"/>
+        <type category="struct" name="VkPhysicalDevicePushDescriptorPropertiesKHR" returnedonly="true" structextends="VkPhysicalDeviceProperties2">
+            <member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR"><type>VkStructureType</type> <name>sType</name></member>
+            <member><type>void</type>*                            <name>pNext</name></member>
+            <member><type>uint32_t</type>                         <name>maxPushDescriptors</name></member>
+        </type>
+        <type category="struct" name="VkConformanceVersionKHR">
+            <member><type>uint8_t</type>                          <name>major</name></member>
+            <member><type>uint8_t</type>                          <name>minor</name></member>
+            <member><type>uint8_t</type>                          <name>subminor</name></member>
+            <member><type>uint8_t</type>                          <name>patch</name></member>
+        </type>
+        <type category="struct" name="VkPhysicalDeviceDriverPropertiesKHR" structextends="VkPhysicalDeviceProperties2" returnedonly="true">
+            <member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES_KHR"><type>VkStructureType</type> <name>sType</name></member>
+            <member><type>void</type>*                            <name>pNext</name></member>
+            <member><type>VkDriverIdKHR</type>                    <name>driverID</name></member>
+            <member><type>char</type>                             <name>driverName</name>[<enum>VK_MAX_DRIVER_NAME_SIZE_KHR</enum>]</member>
+            <member><type>char</type>                             <name>driverInfo</name>[<enum>VK_MAX_DRIVER_INFO_SIZE_KHR</enum>]</member>
+            <member><type>VkConformanceVersionKHR</type>          <name>conformanceVersion</name></member>
+        </type>
+        <type category="struct" name="VkPresentRegionsKHR" structextends="VkPresentInfoKHR">
+            <member values="VK_STRUCTURE_TYPE_PRESENT_REGIONS_KHR"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                      <name>pNext</name></member>
+            <member><type>uint32_t</type>                         <name>swapchainCount</name><comment>Copy of VkPresentInfoKHR::swapchainCount</comment></member>
+            <member len="swapchainCount" optional="true">const <type>VkPresentRegionKHR</type>*   <name>pRegions</name><comment>The regions that have changed</comment></member>
+        </type>
+        <type category="struct" name="VkPresentRegionKHR">
+            <member optional="true"><type>uint32_t</type>         <name>rectangleCount</name><comment>Number of rectangles in pRectangles</comment></member>
+            <member optional="true" len="rectangleCount">const <type>VkRectLayerKHR</type>*   <name>pRectangles</name><comment>Array of rectangles that have changed in a swapchain's image(s)</comment></member>
+        </type>
+        <type category="struct" name="VkRectLayerKHR">
+            <member><type>VkOffset2D</type>                       <name>offset</name><comment>upper-left corner of a rectangle that has not changed, in pixels of a presentation images</comment></member>
+            <member noautovalidity="true"><type>VkExtent2D</type> <name>extent</name><comment>Dimensions of a rectangle that has not changed, in pixels of a presentation images</comment></member>
+            <member><type>uint32_t</type>                         <name>layer</name><comment>Layer of a swapchain's image(s), for stereoscopic-3D images</comment></member>
+        </type>
+        <type category="struct" name="VkPhysicalDeviceVariablePointersFeatures" structextends="VkPhysicalDeviceFeatures2,VkDeviceCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES"><type>VkStructureType</type> <name>sType</name></member>
+            <member><type>void</type>*                            <name>pNext</name></member>
+            <member><type>VkBool32</type>                         <name>variablePointersStorageBuffer</name></member>
+            <member><type>VkBool32</type>                         <name>variablePointers</name></member>
+        </type>
+        <type category="struct" name="VkPhysicalDeviceVariablePointersFeaturesKHR"             alias="VkPhysicalDeviceVariablePointersFeatures"/>
+        <type category="struct" name="VkPhysicalDeviceVariablePointerFeaturesKHR"              alias="VkPhysicalDeviceVariablePointersFeatures"/>
+        <type category="struct" name="VkPhysicalDeviceVariablePointerFeatures"                 alias="VkPhysicalDeviceVariablePointersFeatures"/>
+        <type category="struct" name="VkExternalMemoryProperties" returnedonly="true">
+            <member><type>VkExternalMemoryFeatureFlags</type>  <name>externalMemoryFeatures</name></member>
+            <member optional="true"><type>VkExternalMemoryHandleTypeFlags</type> <name>exportFromImportedHandleTypes</name></member>
+            <member><type>VkExternalMemoryHandleTypeFlags</type> <name>compatibleHandleTypes</name></member>
+        </type>
+        <type category="struct" name="VkExternalMemoryPropertiesKHR"                           alias="VkExternalMemoryProperties"/>
+        <type category="struct" name="VkPhysicalDeviceExternalImageFormatInfo"  structextends="VkPhysicalDeviceImageFormatInfo2">
+            <member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                      <name>pNext</name></member>
+            <member optional="true"><type>VkExternalMemoryHandleTypeFlagBits</type> <name>handleType</name></member>
+        </type>
+        <type category="struct" name="VkPhysicalDeviceExternalImageFormatInfoKHR"              alias="VkPhysicalDeviceExternalImageFormatInfo"/>
+        <type category="struct" name="VkExternalImageFormatProperties" returnedonly="true" structextends="VkImageFormatProperties2">
+            <member values="VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES"><type>VkStructureType</type> <name>sType</name></member>
+            <member><type>void</type>*                            <name>pNext</name></member>
+            <member><type>VkExternalMemoryProperties</type> <name>externalMemoryProperties</name></member>
+        </type>
+        <type category="struct" name="VkExternalImageFormatPropertiesKHR"                      alias="VkExternalImageFormatProperties"/>
+        <type category="struct" name="VkPhysicalDeviceExternalBufferInfo">
+            <member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                      <name>pNext</name></member>
+            <member optional="true"><type>VkBufferCreateFlags</type> <name>flags</name></member>
+            <member><type>VkBufferUsageFlags</type>               <name>usage</name></member>
+            <member><type>VkExternalMemoryHandleTypeFlagBits</type> <name>handleType</name></member>
+        </type>
+        <type category="struct" name="VkPhysicalDeviceExternalBufferInfoKHR"                   alias="VkPhysicalDeviceExternalBufferInfo"/>
+        <type category="struct" name="VkExternalBufferProperties" returnedonly="true">
+            <member values="VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES"><type>VkStructureType</type> <name>sType</name></member>
+            <member><type>void</type>*                            <name>pNext</name></member>
+            <member><type>VkExternalMemoryProperties</type>    <name>externalMemoryProperties</name></member>
+        </type>
+        <type category="struct" name="VkExternalBufferPropertiesKHR"                           alias="VkExternalBufferProperties"/>
+        <type category="struct" name="VkPhysicalDeviceIDProperties" returnedonly="true" structextends="VkPhysicalDeviceProperties2">
+            <member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES"><type>VkStructureType</type> <name>sType</name></member>
+            <member><type>void</type>*                            <name>pNext</name></member>
+            <member><type>uint8_t</type>                          <name>deviceUUID</name>[<enum>VK_UUID_SIZE</enum>]</member>
+            <member><type>uint8_t</type>                          <name>driverUUID</name>[<enum>VK_UUID_SIZE</enum>]</member>
+            <member><type>uint8_t</type>                          <name>deviceLUID</name>[<enum>VK_LUID_SIZE</enum>]</member>
+            <member><type>uint32_t</type>                         <name>deviceNodeMask</name></member>
+            <member><type>VkBool32</type>                         <name>deviceLUIDValid</name></member>
+        </type>
+        <type category="struct" name="VkPhysicalDeviceIDPropertiesKHR"                         alias="VkPhysicalDeviceIDProperties"/>
+        <type category="struct" name="VkExternalMemoryImageCreateInfo" structextends="VkImageCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                      <name>pNext</name></member>
+            <member><type>VkExternalMemoryHandleTypeFlags</type> <name>handleTypes</name></member>
+        </type>
+        <type category="struct" name="VkExternalMemoryImageCreateInfoKHR"                      alias="VkExternalMemoryImageCreateInfo"/>
+        <type category="struct" name="VkExternalMemoryBufferCreateInfo" structextends="VkBufferCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                      <name>pNext</name></member>
+            <member optional="true"><type>VkExternalMemoryHandleTypeFlags</type> <name>handleTypes</name></member>
+        </type>
+        <type category="struct" name="VkExternalMemoryBufferCreateInfoKHR"                     alias="VkExternalMemoryBufferCreateInfo"/>
+        <type category="struct" name="VkExportMemoryAllocateInfo" structextends="VkMemoryAllocateInfo">
+            <member values="VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                      <name>pNext</name></member>
+            <member optional="true"><type>VkExternalMemoryHandleTypeFlags</type> <name>handleTypes</name></member>
+        </type>
+        <type category="struct" name="VkExportMemoryAllocateInfoKHR"                           alias="VkExportMemoryAllocateInfo"/>
+        <type category="struct" name="VkImportMemoryWin32HandleInfoKHR" structextends="VkMemoryAllocateInfo">
+            <member values="VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_KHR"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                      <name>pNext</name></member>
+            <member optional="true"><type>VkExternalMemoryHandleTypeFlagBits</type> <name>handleType</name></member>
+            <member optional="true"><type>HANDLE</type>           <name>handle</name></member>
+            <member optional="true"><type>LPCWSTR</type>          <name>name</name></member>
+        </type>
+        <type category="struct" name="VkExportMemoryWin32HandleInfoKHR" structextends="VkMemoryAllocateInfo">
+            <member values="VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_KHR"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                      <name>pNext</name></member>
+            <member optional="true">const <type>SECURITY_ATTRIBUTES</type>* <name>pAttributes</name></member>
+            <member><type>DWORD</type>                            <name>dwAccess</name></member>
+            <member><type>LPCWSTR</type>                          <name>name</name></member>
+        </type>
+        <type category="struct" name="VkMemoryWin32HandlePropertiesKHR" returnedonly="true">
+            <member values="VK_STRUCTURE_TYPE_MEMORY_WIN32_HANDLE_PROPERTIES_KHR"><type>VkStructureType</type> <name>sType</name></member>
+            <member><type>void</type>*                            <name>pNext</name></member>
+            <member><type>uint32_t</type>                         <name>memoryTypeBits</name></member>
+        </type>
+        <type category="struct" name="VkMemoryGetWin32HandleInfoKHR">
+            <member values="VK_STRUCTURE_TYPE_MEMORY_GET_WIN32_HANDLE_INFO_KHR"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                      <name>pNext</name></member>
+            <member><type>VkDeviceMemory</type>                   <name>memory</name></member>
+            <member><type>VkExternalMemoryHandleTypeFlagBits</type> <name>handleType</name></member>
+        </type>
+        <type category="struct" name="VkImportMemoryFdInfoKHR" structextends="VkMemoryAllocateInfo">
+            <member values="VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                      <name>pNext</name></member>
+            <member optional="true"><type>VkExternalMemoryHandleTypeFlagBits</type> <name>handleType</name></member>
+            <member><type>int</type>                              <name>fd</name></member>
+        </type>
+        <type category="struct" name="VkMemoryFdPropertiesKHR" returnedonly="true">
+            <member values="VK_STRUCTURE_TYPE_MEMORY_FD_PROPERTIES_KHR"><type>VkStructureType</type> <name>sType</name></member>
+            <member><type>void</type>*                            <name>pNext</name></member>
+            <member><type>uint32_t</type>                         <name>memoryTypeBits</name></member>
+        </type>
+        <type category="struct" name="VkMemoryGetFdInfoKHR">
+            <member values="VK_STRUCTURE_TYPE_MEMORY_GET_FD_INFO_KHR"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                      <name>pNext</name></member>
+            <member><type>VkDeviceMemory</type>                   <name>memory</name></member>
+            <member><type>VkExternalMemoryHandleTypeFlagBits</type> <name>handleType</name></member>
+        </type>
+        <type category="struct" name="VkWin32KeyedMutexAcquireReleaseInfoKHR" structextends="VkSubmitInfo">
+            <member values="VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_KHR"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                      <name>pNext</name></member>
+            <member optional="true"><type>uint32_t</type>         <name>acquireCount</name></member>
+            <member len="acquireCount">const <type>VkDeviceMemory</type>* <name>pAcquireSyncs</name></member>
+            <member len="acquireCount">const <type>uint64_t</type>* <name>pAcquireKeys</name></member>
+            <member len="acquireCount">const <type>uint32_t</type>* <name>pAcquireTimeouts</name></member>
+            <member optional="true"><type>uint32_t</type>         <name>releaseCount</name></member>
+            <member len="releaseCount">const <type>VkDeviceMemory</type>* <name>pReleaseSyncs</name></member>
+            <member len="releaseCount">const <type>uint64_t</type>* <name>pReleaseKeys</name></member>
+        </type>
+        <type category="struct" name="VkPhysicalDeviceExternalSemaphoreInfo">
+            <member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                      <name>pNext</name></member>
+            <member><type>VkExternalSemaphoreHandleTypeFlagBits</type> <name>handleType</name></member>
+        </type>
+        <type category="struct" name="VkPhysicalDeviceExternalSemaphoreInfoKHR"                alias="VkPhysicalDeviceExternalSemaphoreInfo"/>
+        <type category="struct" name="VkExternalSemaphoreProperties" returnedonly="true">
+            <member values="VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES"><type>VkStructureType</type> <name>sType</name></member>
+            <member><type>void</type>*                            <name>pNext</name></member>
+            <member><type>VkExternalSemaphoreHandleTypeFlags</type> <name>exportFromImportedHandleTypes</name></member>
+            <member><type>VkExternalSemaphoreHandleTypeFlags</type> <name>compatibleHandleTypes</name></member>
+            <member optional="true"><type>VkExternalSemaphoreFeatureFlags</type> <name>externalSemaphoreFeatures</name></member>
+        </type>
+        <type category="struct" name="VkExternalSemaphorePropertiesKHR"                        alias="VkExternalSemaphoreProperties"/>
+        <type category="struct" name="VkExportSemaphoreCreateInfo" structextends="VkSemaphoreCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                      <name>pNext</name></member>
+            <member optional="true"><type>VkExternalSemaphoreHandleTypeFlags</type> <name>handleTypes</name></member>
+        </type>
+        <type category="struct" name="VkExportSemaphoreCreateInfoKHR"                          alias="VkExportSemaphoreCreateInfo"/>
+        <type category="struct" name="VkImportSemaphoreWin32HandleInfoKHR">
+            <member values="VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                      <name>pNext</name></member>
+            <member externsync="true"><type>VkSemaphore</type>    <name>semaphore</name></member>
+            <member optional="true"><type>VkSemaphoreImportFlags</type> <name>flags</name></member>
+            <member optional="true"><type>VkExternalSemaphoreHandleTypeFlagBits</type> <name>handleType</name></member>
+            <member optional="true"><type>HANDLE</type>           <name>handle</name></member>
+            <member optional="true"><type>LPCWSTR</type>          <name>name</name></member>
+        </type>
+        <type category="struct" name="VkExportSemaphoreWin32HandleInfoKHR" structextends="VkSemaphoreCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                      <name>pNext</name></member>
+            <member optional="true">const <type>SECURITY_ATTRIBUTES</type>*       <name>pAttributes</name></member>
+            <member><type>DWORD</type>                            <name>dwAccess</name></member>
+            <member><type>LPCWSTR</type>                          <name>name</name></member>
+        </type>
+        <type category="struct" name="VkD3D12FenceSubmitInfoKHR" structextends="VkSubmitInfo">
+            <member values="VK_STRUCTURE_TYPE_D3D12_FENCE_SUBMIT_INFO_KHR"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                      <name>pNext</name></member>
+            <member optional="true"><type>uint32_t</type>         <name>waitSemaphoreValuesCount</name></member>
+            <member optional="true" len="waitSemaphoreValuesCount">const <type>uint64_t</type>* <name>pWaitSemaphoreValues</name></member>
+            <member optional="true"><type>uint32_t</type>         <name>signalSemaphoreValuesCount</name></member>
+            <member optional="true" len="signalSemaphoreValuesCount">const <type>uint64_t</type>* <name>pSignalSemaphoreValues</name></member>
+        </type>
+        <type category="struct" name="VkSemaphoreGetWin32HandleInfoKHR">
+            <member values="VK_STRUCTURE_TYPE_SEMAPHORE_GET_WIN32_HANDLE_INFO_KHR"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                      <name>pNext</name></member>
+            <member><type>VkSemaphore</type>                      <name>semaphore</name></member>
+            <member><type>VkExternalSemaphoreHandleTypeFlagBits</type> <name>handleType</name></member>
+        </type>
+        <type category="struct" name="VkImportSemaphoreFdInfoKHR">
+            <member values="VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_FD_INFO_KHR"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                      <name>pNext</name></member>
+            <member externsync="true"><type>VkSemaphore</type>    <name>semaphore</name></member>
+            <member optional="true"><type>VkSemaphoreImportFlags</type> <name>flags</name></member>
+            <member><type>VkExternalSemaphoreHandleTypeFlagBits</type> <name>handleType</name></member>
+            <member><type>int</type>                              <name>fd</name></member>
+        </type>
+        <type category="struct" name="VkSemaphoreGetFdInfoKHR">
+            <member values="VK_STRUCTURE_TYPE_SEMAPHORE_GET_FD_INFO_KHR"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                      <name>pNext</name></member>
+            <member><type>VkSemaphore</type>                      <name>semaphore</name></member>
+            <member><type>VkExternalSemaphoreHandleTypeFlagBits</type> <name>handleType</name></member>
+        </type>
+        <type category="struct" name="VkPhysicalDeviceExternalFenceInfo">
+            <member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                      <name>pNext</name></member>
+            <member><type>VkExternalFenceHandleTypeFlagBits</type> <name>handleType</name></member>
+        </type>
+        <type category="struct" name="VkPhysicalDeviceExternalFenceInfoKHR"                    alias="VkPhysicalDeviceExternalFenceInfo"/>
+        <type category="struct" name="VkExternalFenceProperties" returnedonly="true">
+            <member values="VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES"><type>VkStructureType</type> <name>sType</name></member>
+            <member><type>void</type>*                            <name>pNext</name></member>
+            <member><type>VkExternalFenceHandleTypeFlags</type> <name>exportFromImportedHandleTypes</name></member>
+            <member><type>VkExternalFenceHandleTypeFlags</type> <name>compatibleHandleTypes</name></member>
+            <member optional="true"><type>VkExternalFenceFeatureFlags</type> <name>externalFenceFeatures</name></member>
+        </type>
+        <type category="struct" name="VkExternalFencePropertiesKHR"                            alias="VkExternalFenceProperties"/>
+        <type category="struct" name="VkExportFenceCreateInfo" structextends="VkFenceCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                      <name>pNext</name></member>
+            <member optional="true"><type>VkExternalFenceHandleTypeFlags</type> <name>handleTypes</name></member>
+        </type>
+        <type category="struct" name="VkExportFenceCreateInfoKHR"                              alias="VkExportFenceCreateInfo"/>
+        <type category="struct" name="VkImportFenceWin32HandleInfoKHR">
+            <member values="VK_STRUCTURE_TYPE_IMPORT_FENCE_WIN32_HANDLE_INFO_KHR"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                                        <name>pNext</name></member>
+            <member externsync="true"><type>VkFence</type>                          <name>fence</name></member>
+            <member optional="true"><type>VkFenceImportFlags</type>              <name>flags</name></member>
+            <member optional="true"><type>VkExternalFenceHandleTypeFlagBits</type>  <name>handleType</name></member>
+            <member optional="true"><type>HANDLE</type>                             <name>handle</name></member>
+            <member optional="true"><type>LPCWSTR</type>                            <name>name</name></member>
+        </type>
+        <type category="struct" name="VkExportFenceWin32HandleInfoKHR" structextends="VkFenceCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_EXPORT_FENCE_WIN32_HANDLE_INFO_KHR"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                                <name>pNext</name></member>
+            <member optional="true">const <type>SECURITY_ATTRIBUTES</type>* <name>pAttributes</name></member>
+            <member><type>DWORD</type>                                      <name>dwAccess</name></member>
+            <member><type>LPCWSTR</type>                                    <name>name</name></member>
+        </type>
+        <type category="struct" name="VkFenceGetWin32HandleInfoKHR">
+            <member values="VK_STRUCTURE_TYPE_FENCE_GET_WIN32_HANDLE_INFO_KHR"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                            <name>pNext</name></member>
+            <member><type>VkFence</type>                                <name>fence</name></member>
+            <member><type>VkExternalFenceHandleTypeFlagBits</type>   <name>handleType</name></member>
+        </type>
+        <type category="struct" name="VkImportFenceFdInfoKHR">
+            <member values="VK_STRUCTURE_TYPE_IMPORT_FENCE_FD_INFO_KHR"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                            <name>pNext</name></member>
+            <member externsync="true"><type>VkFence</type>              <name>fence</name></member>
+            <member optional="true"><type>VkFenceImportFlags</type>  <name>flags</name></member>
+            <member><type>VkExternalFenceHandleTypeFlagBits</type>   <name>handleType</name></member>
+            <member><type>int</type>                                    <name>fd</name></member>
+        </type>
+        <type category="struct" name="VkFenceGetFdInfoKHR">
+            <member values="VK_STRUCTURE_TYPE_FENCE_GET_FD_INFO_KHR"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                            <name>pNext</name></member>
+            <member><type>VkFence</type>                                <name>fence</name></member>
+            <member><type>VkExternalFenceHandleTypeFlagBits</type>   <name>handleType</name></member>
+        </type>
+        <type category="struct" name="VkPhysicalDeviceMultiviewFeatures" structextends="VkPhysicalDeviceFeatures2,VkDeviceCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES"><type>VkStructureType</type> <name>sType</name></member>
+            <member noautovalidity="true"><type>void</type>*                            <name>pNext</name></member>
+            <member><type>VkBool32</type>                         <name>multiview</name><comment>Multiple views in a renderpass</comment></member>
+            <member><type>VkBool32</type>                         <name>multiviewGeometryShader</name><comment>Multiple views in a renderpass w/ geometry shader</comment></member>
+            <member><type>VkBool32</type>                         <name>multiviewTessellationShader</name><comment>Multiple views in a renderpass w/ tessellation shader</comment></member>
+        </type>
+        <type category="struct" name="VkPhysicalDeviceMultiviewFeaturesKHR"                    alias="VkPhysicalDeviceMultiviewFeatures"/>
+        <type category="struct" name="VkPhysicalDeviceMultiviewProperties" returnedonly="true" structextends="VkPhysicalDeviceProperties2">
+            <member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES"><type>VkStructureType</type> <name>sType</name></member>
+            <member><type>void</type>*                            <name>pNext</name></member>
+            <member><type>uint32_t</type>                         <name>maxMultiviewViewCount</name><comment>max number of views in a subpass</comment></member>
+            <member><type>uint32_t</type>                         <name>maxMultiviewInstanceIndex</name><comment>max instance index for a draw in a multiview subpass</comment></member>
+        </type>
+        <type category="struct" name="VkPhysicalDeviceMultiviewPropertiesKHR"                  alias="VkPhysicalDeviceMultiviewProperties"/>
+        <type category="struct" name="VkRenderPassMultiviewCreateInfo" structextends="VkRenderPassCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO"><type>VkStructureType</type>        <name>sType</name></member>
+            <member>const <type>void</type>*            <name>pNext</name></member>
+            <member optional="true"><type>uint32_t</type>               <name>subpassCount</name></member>
+            <member len="subpassCount">const <type>uint32_t</type>*     <name>pViewMasks</name></member>
+            <member optional="true"><type>uint32_t</type>               <name>dependencyCount</name></member>
+            <member len="dependencyCount">const <type>int32_t</type>*   <name>pViewOffsets</name></member>
+            <member optional="true"><type>uint32_t</type>               <name>correlationMaskCount</name></member>
+            <member len="correlationMaskCount">const <type>uint32_t</type>* <name>pCorrelationMasks</name></member>
+        </type>
+        <type category="struct" name="VkRenderPassMultiviewCreateInfoKHR"                      alias="VkRenderPassMultiviewCreateInfo"/>
+        <type category="struct" name="VkSurfaceCapabilities2EXT" returnedonly="true">
+            <member values="VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_EXT"><type>VkStructureType</type> <name>sType</name></member>
+            <member><type>void</type>*                            <name>pNext</name></member>
+            <member><type>uint32_t</type>                         <name>minImageCount</name><comment>Supported minimum number of images for the surface</comment></member>
+            <member><type>uint32_t</type>                         <name>maxImageCount</name><comment>Supported maximum number of images for the surface, 0 for unlimited</comment></member>
+            <member><type>VkExtent2D</type>                       <name>currentExtent</name><comment>Current image width and height for the surface, (0, 0) if undefined</comment></member>
+            <member><type>VkExtent2D</type>                       <name>minImageExtent</name><comment>Supported minimum image width and height for the surface</comment></member>
+            <member><type>VkExtent2D</type>                       <name>maxImageExtent</name><comment>Supported maximum image width and height for the surface</comment></member>
+            <member><type>uint32_t</type>                         <name>maxImageArrayLayers</name><comment>Supported maximum number of image layers for the surface</comment></member>
+            <member optional="true"><type>VkSurfaceTransformFlagsKHR</type>       <name>supportedTransforms</name><comment>1 or more bits representing the transforms supported</comment></member>
+            <member><type>VkSurfaceTransformFlagBitsKHR</type>    <name>currentTransform</name><comment>The surface's current transform relative to the device's natural orientation</comment></member>
+            <member optional="true"><type>VkCompositeAlphaFlagsKHR</type>         <name>supportedCompositeAlpha</name><comment>1 or more bits representing the alpha compositing modes supported</comment></member>
+            <member optional="true"><type>VkImageUsageFlags</type>                <name>supportedUsageFlags</name><comment>Supported image usage flags for the surface</comment></member>
+            <member optional="true"><type>VkSurfaceCounterFlagsEXT</type> <name>supportedSurfaceCounters</name></member>
+        </type>
+        <type category="struct" name="VkDisplayPowerInfoEXT">
+            <member values="VK_STRUCTURE_TYPE_DISPLAY_POWER_INFO_EXT"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                      <name>pNext</name></member>
+            <member><type>VkDisplayPowerStateEXT</type>           <name>powerState</name></member>
+        </type>
+        <type category="struct" name="VkDeviceEventInfoEXT">
+            <member values="VK_STRUCTURE_TYPE_DEVICE_EVENT_INFO_EXT"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                      <name>pNext</name></member>
+            <member><type>VkDeviceEventTypeEXT</type>             <name>deviceEvent</name></member>
+        </type>
+        <type category="struct" name="VkDisplayEventInfoEXT">
+            <member values="VK_STRUCTURE_TYPE_DISPLAY_EVENT_INFO_EXT"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                      <name>pNext</name></member>
+            <member><type>VkDisplayEventTypeEXT</type>            <name>displayEvent</name></member>
+        </type>
+        <type category="struct" name="VkSwapchainCounterCreateInfoEXT" structextends="VkSwapchainCreateInfoKHR">
+            <member values="VK_STRUCTURE_TYPE_SWAPCHAIN_COUNTER_CREATE_INFO_EXT"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                      <name>pNext</name></member>
+            <member optional="true"><type>VkSurfaceCounterFlagsEXT</type>         <name>surfaceCounters</name></member>
+        </type>
+        <type category="struct" name="VkPhysicalDeviceGroupProperties" returnedonly="true">
+            <member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES"><type>VkStructureType</type> <name>sType</name></member>
+            <member><type>void</type>*                            <name>pNext</name></member>
+            <member><type>uint32_t</type>                         <name>physicalDeviceCount</name></member>
+            <member><type>VkPhysicalDevice</type>                 <name>physicalDevices</name>[<enum>VK_MAX_DEVICE_GROUP_SIZE</enum>]</member>
+            <member><type>VkBool32</type>                         <name>subsetAllocation</name></member>
+        </type>
+        <type category="struct" name="VkPhysicalDeviceGroupPropertiesKHR"                      alias="VkPhysicalDeviceGroupProperties"/>
+        <type category="struct" name="VkMemoryAllocateFlagsInfo" structextends="VkMemoryAllocateInfo">
+            <member values="VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                      <name>pNext</name></member>
+            <member optional="true"><type>VkMemoryAllocateFlags</type> <name>flags</name></member>
+            <member><type>uint32_t</type>                         <name>deviceMask</name></member>
+        </type>
+        <type category="struct" name="VkMemoryAllocateFlagsInfoKHR"                            alias="VkMemoryAllocateFlagsInfo"/>
+        <type category="struct" name="VkBindBufferMemoryInfo">
+            <member values="VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                      <name>pNext</name></member>
+            <member><type>VkBuffer</type>                         <name>buffer</name></member>
+            <member><type>VkDeviceMemory</type>                   <name>memory</name></member>
+            <member><type>VkDeviceSize</type>                     <name>memoryOffset</name></member>
+        </type>
+        <type category="struct" name="VkBindBufferMemoryInfoKHR"                               alias="VkBindBufferMemoryInfo"/>
+        <type category="struct" name="VkBindBufferMemoryDeviceGroupInfo" structextends="VkBindBufferMemoryInfo">
+            <member values="VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                      <name>pNext</name></member>
+            <member optional="true"><type>uint32_t</type>         <name>deviceIndexCount</name></member>
+            <member len="deviceIndexCount">const <type>uint32_t</type>*  <name>pDeviceIndices</name></member>
+        </type>
+        <type category="struct" name="VkBindBufferMemoryDeviceGroupInfoKHR"                    alias="VkBindBufferMemoryDeviceGroupInfo"/>
+        <type category="struct" name="VkBindImageMemoryInfo">
+            <member values="VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                      <name>pNext</name></member>
+            <member><type>VkImage</type>                          <name>image</name></member>
+            <member noautovalidity="true"><type>VkDeviceMemory</type>                   <name>memory</name></member>
+            <member><type>VkDeviceSize</type>                     <name>memoryOffset</name></member>
+        </type>
+        <type category="struct" name="VkBindImageMemoryInfoKHR"                                alias="VkBindImageMemoryInfo"/>
+        <type category="struct" name="VkBindImageMemoryDeviceGroupInfo" structextends="VkBindImageMemoryInfo">
+            <member values="VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                      <name>pNext</name></member>
+            <member optional="true"><type>uint32_t</type>         <name>deviceIndexCount</name></member>
+            <member len="deviceIndexCount">const <type>uint32_t</type>*  <name>pDeviceIndices</name></member>
+            <member optional="true"><type>uint32_t</type>         <name>splitInstanceBindRegionCount</name></member>
+            <member len="splitInstanceBindRegionCount">const <type>VkRect2D</type>*  <name>pSplitInstanceBindRegions</name></member>
+        </type>
+        <type category="struct" name="VkBindImageMemoryDeviceGroupInfoKHR"                     alias="VkBindImageMemoryDeviceGroupInfo"/>
+        <type category="struct" name="VkDeviceGroupRenderPassBeginInfo" structextends="VkRenderPassBeginInfo">
+            <member values="VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                      <name>pNext</name></member>
+            <member><type>uint32_t</type>                         <name>deviceMask</name></member>
+            <member optional="true"><type>uint32_t</type>         <name>deviceRenderAreaCount</name></member>
+            <member len="deviceRenderAreaCount">const <type>VkRect2D</type>*  <name>pDeviceRenderAreas</name></member>
+        </type>
+        <type category="struct" name="VkDeviceGroupRenderPassBeginInfoKHR"                     alias="VkDeviceGroupRenderPassBeginInfo"/>
+        <type category="struct" name="VkDeviceGroupCommandBufferBeginInfo" structextends="VkCommandBufferBeginInfo">
+            <member values="VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                      <name>pNext</name></member>
+            <member><type>uint32_t</type>                         <name>deviceMask</name></member>
+        </type>
+        <type category="struct" name="VkDeviceGroupCommandBufferBeginInfoKHR"                  alias="VkDeviceGroupCommandBufferBeginInfo"/>
+        <type category="struct" name="VkDeviceGroupSubmitInfo" structextends="VkSubmitInfo">
+            <member values="VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                      <name>pNext</name></member>
+            <member optional="true"><type>uint32_t</type>         <name>waitSemaphoreCount</name></member>
+            <member len="waitSemaphoreCount">const <type>uint32_t</type>*    <name>pWaitSemaphoreDeviceIndices</name></member>
+            <member optional="true"><type>uint32_t</type>         <name>commandBufferCount</name></member>
+            <member len="commandBufferCount">const <type>uint32_t</type>*    <name>pCommandBufferDeviceMasks</name></member>
+            <member optional="true"><type>uint32_t</type>         <name>signalSemaphoreCount</name></member>
+            <member len="signalSemaphoreCount">const <type>uint32_t</type>*  <name>pSignalSemaphoreDeviceIndices</name></member>
+        </type>
+        <type category="struct" name="VkDeviceGroupSubmitInfoKHR"                              alias="VkDeviceGroupSubmitInfo"/>
+        <type category="struct" name="VkDeviceGroupBindSparseInfo" structextends="VkBindSparseInfo">
+            <member values="VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                      <name>pNext</name></member>
+            <member><type>uint32_t</type>                         <name>resourceDeviceIndex</name></member>
+            <member><type>uint32_t</type>                         <name>memoryDeviceIndex</name></member>
+        </type>
+        <type category="struct" name="VkDeviceGroupBindSparseInfoKHR"                          alias="VkDeviceGroupBindSparseInfo"/>
+        <type category="struct" name="VkDeviceGroupPresentCapabilitiesKHR" returnedonly="true">
+            <member values="VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_CAPABILITIES_KHR"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                      <name>pNext</name></member>
+            <member><type>uint32_t</type>                         <name>presentMask</name>[<enum>VK_MAX_DEVICE_GROUP_SIZE</enum>]</member>
+            <member><type>VkDeviceGroupPresentModeFlagsKHR</type> <name>modes</name></member>
+        </type>
+        <type category="struct" name="VkImageSwapchainCreateInfoKHR" structextends="VkImageCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_IMAGE_SWAPCHAIN_CREATE_INFO_KHR"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                      <name>pNext</name></member>
+            <member optional="true"><type>VkSwapchainKHR</type>   <name>swapchain</name></member>
+        </type>
+        <type category="struct" name="VkBindImageMemorySwapchainInfoKHR" structextends="VkBindImageMemoryInfo">
+            <member values="VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_SWAPCHAIN_INFO_KHR"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                      <name>pNext</name></member>
+            <member externsync="true"><type>VkSwapchainKHR</type> <name>swapchain</name></member>
+            <member><type>uint32_t</type>                         <name>imageIndex</name></member>
+        </type>
+        <type category="struct" name="VkAcquireNextImageInfoKHR">
+            <member values="VK_STRUCTURE_TYPE_ACQUIRE_NEXT_IMAGE_INFO_KHR"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                      <name>pNext</name></member>
+            <member externsync="true"><type>VkSwapchainKHR</type> <name>swapchain</name></member>
+            <member><type>uint64_t</type>                         <name>timeout</name></member>
+            <member optional="true" externsync="true"><type>VkSemaphore</type> <name>semaphore</name></member>
+            <member optional="true" externsync="true"><type>VkFence</type> <name>fence</name></member>
+            <member><type>uint32_t</type>                         <name>deviceMask</name></member>
+        </type>
+        <type category="struct" name="VkDeviceGroupPresentInfoKHR" structextends="VkPresentInfoKHR">
+            <member values="VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_INFO_KHR"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                      <name>pNext</name></member>
+            <member optional="true"><type>uint32_t</type>         <name>swapchainCount</name></member>
+            <member len="swapchainCount">const <type>uint32_t</type>* <name>pDeviceMasks</name></member>
+            <member><type>VkDeviceGroupPresentModeFlagBitsKHR</type> <name>mode</name></member>
+        </type>
+        <type category="struct" name="VkDeviceGroupDeviceCreateInfo" structextends="VkDeviceCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                      <name>pNext</name></member>
+            <member optional="true"><type>uint32_t</type>                         <name>physicalDeviceCount</name></member>
+            <member len="physicalDeviceCount">const <type>VkPhysicalDevice</type>*  <name>pPhysicalDevices</name></member>
+        </type>
+        <type category="struct" name="VkDeviceGroupDeviceCreateInfoKHR"                        alias="VkDeviceGroupDeviceCreateInfo"/>
+        <type category="struct" name="VkDeviceGroupSwapchainCreateInfoKHR" structextends="VkSwapchainCreateInfoKHR">
+            <member values="VK_STRUCTURE_TYPE_DEVICE_GROUP_SWAPCHAIN_CREATE_INFO_KHR"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                      <name>pNext</name></member>
+            <member><type>VkDeviceGroupPresentModeFlagsKHR</type>                         <name>modes</name></member>
+        </type>
+        <type category="struct" name="VkDescriptorUpdateTemplateEntry">
+            <member><type>uint32_t</type>                         <name>dstBinding</name><comment>Binding within the destination descriptor set to write</comment></member>
+            <member><type>uint32_t</type>                         <name>dstArrayElement</name><comment>Array element within the destination binding to write</comment></member>
+            <member><type>uint32_t</type>                         <name>descriptorCount</name><comment>Number of descriptors to write</comment></member>
+            <member><type>VkDescriptorType</type>                 <name>descriptorType</name><comment>Descriptor type to write</comment></member>
+            <member><type>size_t</type>                           <name>offset</name><comment>Offset into pData where the descriptors to update are stored</comment></member>
+            <member><type>size_t</type>                           <name>stride</name><comment>Stride between two descriptors in pData when writing more than one descriptor</comment></member>
+        </type>
+        <type category="struct" name="VkDescriptorUpdateTemplateEntryKHR"                      alias="VkDescriptorUpdateTemplateEntry"/>
+        <type category="struct" name="VkDescriptorUpdateTemplateCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                               <name>pNext</name></member>
+            <member optional="true"><type>VkDescriptorUpdateTemplateCreateFlags</type>    <name>flags</name></member>
+            <member><type>uint32_t</type>                 <name>descriptorUpdateEntryCount</name><comment>Number of descriptor update entries to use for the update template</comment></member>
+            <member len="descriptorUpdateEntryCount">const <type>VkDescriptorUpdateTemplateEntry</type>* <name>pDescriptorUpdateEntries</name><comment>Descriptor update entries for the template</comment></member>
+            <member><type>VkDescriptorUpdateTemplateType</type> <name>templateType</name></member>
+            <member noautovalidity="true"><type>VkDescriptorSetLayout</type> <name>descriptorSetLayout</name></member>
+            <member noautovalidity="true"><type>VkPipelineBindPoint</type> <name>pipelineBindPoint</name></member>
+            <member noautovalidity="true"><type>VkPipelineLayout</type><name>pipelineLayout</name><comment>If used for push descriptors, this is the only allowed layout</comment></member>
+            <member noautovalidity="true"><type>uint32_t</type> <name>set</name></member>
+        </type>
+        <type category="struct" name="VkDescriptorUpdateTemplateCreateInfoKHR"                 alias="VkDescriptorUpdateTemplateCreateInfo"/>
+        <type category="struct" name="VkXYColorEXT" comment="Chromaticity coordinate">
+            <member><type>float</type>   <name>x</name></member>
+            <member><type>float</type>   <name>y</name></member>
+        </type>
+        <type category="struct" name="VkHdrMetadataEXT">
+                <comment>Display primary in chromaticity coordinates</comment>
+            <member values="VK_STRUCTURE_TYPE_HDR_METADATA_EXT"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*    <name>pNext</name></member>
+                <comment> From SMPTE 2086</comment>
+            <member noautovalidity="true"><type>VkXYColorEXT</type>   <name>displayPrimaryRed</name><comment>Display primary's Red</comment></member>
+            <member noautovalidity="true"><type>VkXYColorEXT</type>   <name>displayPrimaryGreen</name><comment>Display primary's Green</comment></member>
+            <member noautovalidity="true"><type>VkXYColorEXT</type>   <name>displayPrimaryBlue</name><comment>Display primary's Blue</comment></member>
+            <member noautovalidity="true"><type>VkXYColorEXT</type>   <name>whitePoint</name><comment>Display primary's Blue</comment></member>
+            <member noautovalidity="true"><type>float</type>          <name>maxLuminance</name><comment>Display maximum luminance</comment></member>
+            <member noautovalidity="true"><type>float</type>          <name>minLuminance</name><comment>Display minimum luminance</comment></member>
+                <comment> From CTA 861.3</comment>
+            <member noautovalidity="true"><type>float</type>          <name>maxContentLightLevel</name><comment>Content maximum luminance</comment></member>
+            <member noautovalidity="true"><type>float</type>          <name>maxFrameAverageLightLevel</name></member>
+        </type>
+        <type category="struct" name="VkDisplayNativeHdrSurfaceCapabilitiesAMD" returnedonly="true" structextends="VkSurfaceCapabilities2KHR">
+            <member values="VK_STRUCTURE_TYPE_DISPLAY_NATIVE_HDR_SURFACE_CAPABILITIES_AMD"><type>VkStructureType</type> <name>sType</name></member>
+            <member><type>void</type>*          <name>pNext</name></member>
+            <member><type>VkBool32</type>       <name>localDimmingSupport</name></member>
+        </type>
+        <type category="struct" name="VkSwapchainDisplayNativeHdrCreateInfoAMD" structextends="VkSwapchainCreateInfoKHR">
+            <member values="VK_STRUCTURE_TYPE_SWAPCHAIN_DISPLAY_NATIVE_HDR_CREATE_INFO_AMD"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*    <name>pNext</name></member>
+            <member><type>VkBool32</type>       <name>localDimmingEnable</name></member>
+        </type>
+        <type category="struct" name="VkRefreshCycleDurationGOOGLE" returnedonly="true">
+            <member><type>uint64_t</type>                         <name>refreshDuration</name><comment>Number of nanoseconds from the start of one refresh cycle to the next</comment></member>
+        </type>
+        <type category="struct" name="VkPastPresentationTimingGOOGLE" returnedonly="true">
+            <member><type>uint32_t</type>                         <name>presentID</name><comment>Application-provided identifier, previously given to vkQueuePresentKHR</comment></member>
+            <member><type>uint64_t</type>                         <name>desiredPresentTime</name><comment>Earliest time an image should have been presented, previously given to vkQueuePresentKHR</comment></member>
+            <member><type>uint64_t</type>                         <name>actualPresentTime</name><comment>Time the image was actually displayed</comment></member>
+            <member><type>uint64_t</type>                         <name>earliestPresentTime</name><comment>Earliest time the image could have been displayed</comment></member>
+            <member><type>uint64_t</type>                         <name>presentMargin</name><comment>How early vkQueuePresentKHR was processed vs. how soon it needed to be and make earliestPresentTime</comment></member>
+        </type>
+        <type category="struct" name="VkPresentTimesInfoGOOGLE" structextends="VkPresentInfoKHR">
+            <member values="VK_STRUCTURE_TYPE_PRESENT_TIMES_INFO_GOOGLE"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                      <name>pNext</name></member>
+            <member><type>uint32_t</type>                         <name>swapchainCount</name><comment>Copy of VkPresentInfoKHR::swapchainCount</comment></member>
+            <member len="swapchainCount" optional="true">const <type>VkPresentTimeGOOGLE</type>*   <name>pTimes</name><comment>The earliest times to present images</comment></member>
+        </type>
+        <type category="struct" name="VkPresentTimeGOOGLE">
+            <member><type>uint32_t</type>                         <name>presentID</name><comment>Application-provided identifier</comment></member>
+            <member><type>uint64_t</type>                         <name>desiredPresentTime</name><comment>Earliest time an image should be presented</comment></member>
+        </type>
+        <type category="struct" name="VkIOSSurfaceCreateInfoMVK">
+            <member values="VK_STRUCTURE_TYPE_IOS_SURFACE_CREATE_INFO_MVK"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                                    <name>pNext</name></member>
+            <member optional="true"><type>VkIOSSurfaceCreateFlagsMVK</type>     <name>flags</name></member>
+            <member noautovalidity="true">const <type>void</type>*                                    <name>pView</name></member>
+        </type>
+        <type category="struct" name="VkMacOSSurfaceCreateInfoMVK">
+            <member values="VK_STRUCTURE_TYPE_MACOS_SURFACE_CREATE_INFO_MVK"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                                    <name>pNext</name></member>
+            <member optional="true"><type>VkMacOSSurfaceCreateFlagsMVK</type>   <name>flags</name></member>
+            <member noautovalidity="true">const <type>void</type>*                                    <name>pView</name></member>
+        </type>
+        <type category="struct" name="VkMetalSurfaceCreateInfoEXT">
+            <member values="VK_STRUCTURE_TYPE_METAL_SURFACE_CREATE_INFO_EXT"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                                    <name>pNext</name></member>
+            <member optional="true"><type>VkMetalSurfaceCreateFlagsEXT</type>   <name>flags</name></member>
+            <member noautovalidity="true">const <type>CAMetalLayer</type>*      <name>pLayer</name></member>
+        </type>
+        <type category="struct" name="VkViewportWScalingNV">
+            <member><type>float</type>          <name>xcoeff</name></member>
+            <member><type>float</type>          <name>ycoeff</name></member>
+        </type>
+        <type category="struct" name="VkPipelineViewportWScalingStateCreateInfoNV" structextends="VkPipelineViewportStateCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_W_SCALING_STATE_CREATE_INFO_NV"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                      <name>pNext</name></member>
+            <member><type>VkBool32</type>               <name>viewportWScalingEnable</name></member>
+            <member><type>uint32_t</type>               <name>viewportCount</name></member>
+            <member noautovalidity="true" optional="true" len="viewportCount">const <type>VkViewportWScalingNV</type>*      <name>pViewportWScalings</name></member>
+        </type>
+        <type category="struct" name="VkViewportSwizzleNV">
+            <member><type>VkViewportCoordinateSwizzleNV</type>          <name>x</name></member>
+            <member><type>VkViewportCoordinateSwizzleNV</type>          <name>y</name></member>
+            <member><type>VkViewportCoordinateSwizzleNV</type>          <name>z</name></member>
+            <member><type>VkViewportCoordinateSwizzleNV</type>          <name>w</name></member>
+        </type>
+        <type category="struct" name="VkPipelineViewportSwizzleStateCreateInfoNV" structextends="VkPipelineViewportStateCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SWIZZLE_STATE_CREATE_INFO_NV"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*            <name>pNext</name></member>
+            <member optional="true"><type>VkPipelineViewportSwizzleStateCreateFlagsNV</type>    <name>flags</name></member>
+            <member><type>uint32_t</type>               <name>viewportCount</name></member>
+            <member len="viewportCount">const <type>VkViewportSwizzleNV</type>*      <name>pViewportSwizzles</name></member>
+        </type>
+        <type category="struct" name="VkPhysicalDeviceDiscardRectanglePropertiesEXT" returnedonly="true" structextends="VkPhysicalDeviceProperties2">
+            <member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DISCARD_RECTANGLE_PROPERTIES_EXT"><type>VkStructureType</type> <name>sType</name></member>
+            <member><type>void</type>*                  <name>pNext</name></member>
+            <member><type>uint32_t</type>               <name>maxDiscardRectangles</name><comment>max number of active discard rectangles</comment></member>
+        </type>
+        <type category="struct" name="VkPipelineDiscardRectangleStateCreateInfoEXT" structextends="VkGraphicsPipelineCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_PIPELINE_DISCARD_RECTANGLE_STATE_CREATE_INFO_EXT"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                                                                      <name>pNext</name></member>
+            <member optional="true"><type>VkPipelineDiscardRectangleStateCreateFlagsEXT</type>                    <name>flags</name></member>
+            <member><type>VkDiscardRectangleModeEXT</type>                                                        <name>discardRectangleMode</name></member>
+            <member optional="true"><type>uint32_t</type>                                                         <name>discardRectangleCount</name></member>
+            <member noautovalidity="true" optional="true" len="discardRectangleCount">const <type>VkRect2D</type>* <name>pDiscardRectangles</name></member>
+        </type>
+        <type category="struct" name="VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX" returnedonly="true" structextends="VkPhysicalDeviceProperties2">
+            <member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_ATTRIBUTES_PROPERTIES_NVX"><type>VkStructureType</type> <name>sType</name></member>
+            <member noautovalidity="true"><type>void</type>*                            <name>pNext</name></member>
+            <member><type>VkBool32</type>                         <name>perViewPositionAllComponents</name></member>
+        </type>
+        <type category="struct" name="VkInputAttachmentAspectReference">
+            <member><type>uint32_t</type>                        <name>subpass</name></member>
+            <member><type>uint32_t</type>                        <name>inputAttachmentIndex</name></member>
+            <member><type>VkImageAspectFlags</type>              <name>aspectMask</name></member>
+        </type>
+        <type category="struct" name="VkInputAttachmentAspectReferenceKHR"                     alias="VkInputAttachmentAspectReference"/>
+        <type category="struct" name="VkRenderPassInputAttachmentAspectCreateInfo" structextends="VkRenderPassCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                     <name>pNext</name></member>
+            <member><type>uint32_t</type>                        <name>aspectReferenceCount</name></member>
+            <member len="aspectReferenceCount">const <type>VkInputAttachmentAspectReference</type>* <name>pAspectReferences</name></member>
+        </type>
+        <type category="struct" name="VkRenderPassInputAttachmentAspectCreateInfoKHR"          alias="VkRenderPassInputAttachmentAspectCreateInfo"/>
+        <type category="struct" name="VkPhysicalDeviceSurfaceInfo2KHR">
+            <member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SURFACE_INFO_2_KHR"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>* <name>pNext</name></member>
+            <member><type>VkSurfaceKHR</type> <name>surface</name></member>
+        </type>
+        <type category="struct" name="VkSurfaceCapabilities2KHR" returnedonly="true">
+            <member values="VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_KHR"><type>VkStructureType</type> <name>sType</name></member>
+            <member><type>void</type>*   <name>pNext</name></member>
+            <member><type>VkSurfaceCapabilitiesKHR</type> <name>surfaceCapabilities</name></member>
+        </type>
+        <type category="struct" name="VkSurfaceFormat2KHR" returnedonly="true">
+            <member values="VK_STRUCTURE_TYPE_SURFACE_FORMAT_2_KHR"><type>VkStructureType</type> <name>sType</name></member>
+            <member><type>void</type>* <name>pNext</name></member>
+            <member><type>VkSurfaceFormatKHR</type> <name>surfaceFormat</name></member>
+        </type>
+        <type category="struct" name="VkDisplayProperties2KHR" returnedonly="true">
+            <member values="VK_STRUCTURE_TYPE_DISPLAY_PROPERTIES_2_KHR"><type>VkStructureType</type> <name>sType</name></member>
+            <member><type>void</type>* <name>pNext</name></member>
+            <member><type>VkDisplayPropertiesKHR</type> <name>displayProperties</name></member>
+        </type>
+        <type category="struct" name="VkDisplayPlaneProperties2KHR" returnedonly="true">
+            <member values="VK_STRUCTURE_TYPE_DISPLAY_PLANE_PROPERTIES_2_KHR"><type>VkStructureType</type> <name>sType</name></member>
+            <member><type>void</type>* <name>pNext</name></member>
+            <member><type>VkDisplayPlanePropertiesKHR</type> <name>displayPlaneProperties</name></member>
+        </type>
+        <type category="struct" name="VkDisplayModeProperties2KHR" returnedonly="true">
+            <member values="VK_STRUCTURE_TYPE_DISPLAY_MODE_PROPERTIES_2_KHR"><type>VkStructureType</type> <name>sType</name></member>
+            <member><type>void</type>* <name>pNext</name></member>
+            <member><type>VkDisplayModePropertiesKHR</type> <name>displayModeProperties</name></member>
+        </type>
+        <type category="struct" name="VkDisplayPlaneInfo2KHR">
+            <member values="VK_STRUCTURE_TYPE_DISPLAY_PLANE_INFO_2_KHR"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>* <name>pNext</name></member>
+            <member externsync="true"><type>VkDisplayModeKHR</type> <name>mode</name></member>
+            <member><type>uint32_t</type> <name>planeIndex</name></member>
+        </type>
+        <type category="struct" name="VkDisplayPlaneCapabilities2KHR" returnedonly="true">
+            <member values="VK_STRUCTURE_TYPE_DISPLAY_PLANE_CAPABILITIES_2_KHR"><type>VkStructureType</type> <name>sType</name></member>
+            <member><type>void</type>* <name>pNext</name></member>
+            <member><type>VkDisplayPlaneCapabilitiesKHR</type> <name>capabilities</name></member>
+        </type>
+        <type category="struct" name="VkSharedPresentSurfaceCapabilitiesKHR" returnedonly="true" structextends="VkSurfaceCapabilities2KHR">
+            <member values="VK_STRUCTURE_TYPE_SHARED_PRESENT_SURFACE_CAPABILITIES_KHR"><type>VkStructureType</type> <name>sType</name></member>
+            <member noautovalidity="true"><type>void</type>*                            <name>pNext</name></member>
+            <member optional="true"><type>VkImageUsageFlags</type> <name>sharedPresentSupportedUsageFlags</name><comment>Supported image usage flags if swapchain created using a shared present mode</comment></member>
+        </type>
+        <type category="struct" name="VkPhysicalDevice16BitStorageFeatures" structextends="VkPhysicalDeviceFeatures2,VkDeviceCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES"><type>VkStructureType</type> <name>sType</name></member>
+            <member noautovalidity="true"><type>void</type>*      <name>pNext</name></member>
+            <member><type>VkBool32</type>                         <name>storageBuffer16BitAccess</name><comment>16-bit integer/floating-point variables supported in BufferBlock</comment></member>
+            <member><type>VkBool32</type>                         <name>uniformAndStorageBuffer16BitAccess</name><comment>16-bit integer/floating-point variables supported in BufferBlock and Block</comment></member>
+            <member><type>VkBool32</type>                         <name>storagePushConstant16</name><comment>16-bit integer/floating-point variables supported in PushConstant</comment></member>
+            <member><type>VkBool32</type>                         <name>storageInputOutput16</name><comment>16-bit integer/floating-point variables supported in shader inputs and outputs</comment></member>
+        </type>
+        <type category="struct" name="VkPhysicalDevice16BitStorageFeaturesKHR"                 alias="VkPhysicalDevice16BitStorageFeatures"/>
+        <type category="struct" name="VkPhysicalDeviceSubgroupProperties" returnedonly="true" structextends="VkPhysicalDeviceProperties2">
+            <member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_PROPERTIES"><type>VkStructureType</type> <name>sType</name></member>
+            <member><type>void</type>*                   <name>pNext</name></member>
+            <member noautovalidity="true"><type>uint32_t</type>                      <name>subgroupSize</name><comment>The size of a subgroup for this queue.</comment></member>
+            <member noautovalidity="true"><type>VkShaderStageFlags</type>            <name>supportedStages</name><comment>Bitfield of what shader stages support subgroup operations</comment></member>
+            <member noautovalidity="true"><type>VkSubgroupFeatureFlags</type>        <name>supportedOperations</name><comment>Bitfield of what subgroup operations are supported.</comment></member>
+            <member noautovalidity="true"><type>VkBool32</type> <name>quadOperationsInAllStages</name><comment>Flag to specify whether quad operations are available in all stages.</comment></member>
+        </type>
+        <type category="struct" name="VkPhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR" structextends="VkPhysicalDeviceFeatures2,VkDeviceCreateInfo">
+             <member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES_KHR"><type>VkStructureType</type> <name>sType</name></member>
+             <member><type>void</type>*                          <name>pNext</name></member>
+             <member noautovalidity="true"><type>VkBool32</type> <name>shaderSubgroupExtendedTypes</name><comment>Flag to specify whether subgroup operations with extended types are supported</comment></member>
+        </type>
+        <type category="struct" name="VkBufferMemoryRequirementsInfo2">
+            <member values="VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                                                          <name>pNext</name></member>
+            <member><type>VkBuffer</type>                                                             <name>buffer</name></member>
+        </type>
+        <type category="struct" name="VkBufferMemoryRequirementsInfo2KHR"                      alias="VkBufferMemoryRequirementsInfo2"/>
+        <type category="struct" name="VkImageMemoryRequirementsInfo2">
+            <member values="VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                                                          <name>pNext</name></member>
+            <member><type>VkImage</type>                                                              <name>image</name></member>
+        </type>
+        <type category="struct" name="VkImageMemoryRequirementsInfo2KHR"                       alias="VkImageMemoryRequirementsInfo2"/>
+        <type category="struct" name="VkImageSparseMemoryRequirementsInfo2">
+            <member values="VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                                                          <name>pNext</name></member>
+            <member><type>VkImage</type>                                                              <name>image</name></member>
+        </type>
+        <type category="struct" name="VkImageSparseMemoryRequirementsInfo2KHR"                 alias="VkImageSparseMemoryRequirementsInfo2"/>
+        <type category="struct" name="VkMemoryRequirements2" returnedonly="true">
+            <member values="VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2"><type>VkStructureType</type> <name>sType</name></member>
+            <member><type>void</type>* <name>pNext</name></member>
+            <member><type>VkMemoryRequirements</type>                                                 <name>memoryRequirements</name></member>
+        </type>
+        <type category="struct" name="VkMemoryRequirements2KHR"                                alias="VkMemoryRequirements2"/>
+        <type category="struct" name="VkSparseImageMemoryRequirements2" returnedonly="true">
+            <member values="VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2"><type>VkStructureType</type> <name>sType</name></member>
+            <member><type>void</type>*                                       <name>pNext</name></member>
+            <member><type>VkSparseImageMemoryRequirements</type>                                      <name>memoryRequirements</name></member>
+        </type>
+        <type category="struct" name="VkSparseImageMemoryRequirements2KHR"                     alias="VkSparseImageMemoryRequirements2"/>
+        <type category="struct" name="VkPhysicalDevicePointClippingProperties" returnedonly="true" structextends="VkPhysicalDeviceProperties2">
+            <member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES"><type>VkStructureType</type> <name>sType</name></member>
+            <member><type>void</type>*                            <name>pNext</name></member>
+            <member><type>VkPointClippingBehavior</type>      <name>pointClippingBehavior</name></member>
+        </type>
+        <type category="struct" name="VkPhysicalDevicePointClippingPropertiesKHR"              alias="VkPhysicalDevicePointClippingProperties"/>
+        <type category="struct" name="VkMemoryDedicatedRequirements" returnedonly="true" structextends="VkMemoryRequirements2">
+            <member values="VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS"><type>VkStructureType</type> <name>sType</name></member>
+            <member><type>void</type>*                            <name>pNext</name></member>
+            <member><type>VkBool32</type>                         <name>prefersDedicatedAllocation</name></member>
+            <member><type>VkBool32</type>                         <name>requiresDedicatedAllocation</name></member>
+        </type>
+        <type category="struct" name="VkMemoryDedicatedRequirementsKHR"                        alias="VkMemoryDedicatedRequirements"/>
+        <type category="struct" name="VkMemoryDedicatedAllocateInfo" structextends="VkMemoryAllocateInfo">
+            <member values="VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                      <name>pNext</name></member>
+            <member optional="true"><type>VkImage</type>          <name>image</name><comment>Image that this allocation will be bound to</comment></member>
+            <member optional="true"><type>VkBuffer</type>         <name>buffer</name><comment>Buffer that this allocation will be bound to</comment></member>
+        </type>
+        <type category="struct" name="VkMemoryDedicatedAllocateInfoKHR"                        alias="VkMemoryDedicatedAllocateInfo"/>
+        <type category="struct" name="VkImageViewUsageCreateInfo" structextends="VkImageViewCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>* <name>pNext</name></member>
+            <member><type>VkImageUsageFlags</type> <name>usage</name></member>
+        </type>
+        <type category="struct" name="VkImageViewUsageCreateInfoKHR"                           alias="VkImageViewUsageCreateInfo"/>
+        <type category="struct" name="VkPipelineTessellationDomainOriginStateCreateInfo" structextends="VkPipelineTessellationStateCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                      <name>pNext</name></member>
+            <member><type>VkTessellationDomainOrigin</type>    <name>domainOrigin</name></member>
+        </type>
+        <type category="struct" name="VkPipelineTessellationDomainOriginStateCreateInfoKHR"    alias="VkPipelineTessellationDomainOriginStateCreateInfo"/>
+        <type category="struct" name="VkSamplerYcbcrConversionInfo" structextends="VkSamplerCreateInfo,VkImageViewCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                      <name>pNext</name></member>
+            <member><type>VkSamplerYcbcrConversion</type>      <name>conversion</name></member>
+        </type>
+        <type category="struct" name="VkSamplerYcbcrConversionInfoKHR"                         alias="VkSamplerYcbcrConversionInfo"/>
+        <type category="struct" name="VkSamplerYcbcrConversionCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                      <name>pNext</name></member>
+            <member><type>VkFormat</type>                         <name>format</name></member>
+            <member><type>VkSamplerYcbcrModelConversion</type> <name>ycbcrModel</name></member>
+            <member><type>VkSamplerYcbcrRange</type>           <name>ycbcrRange</name></member>
+            <member><type>VkComponentMapping</type>               <name>components</name></member>
+            <member><type>VkChromaLocation</type>              <name>xChromaOffset</name></member>
+            <member><type>VkChromaLocation</type>              <name>yChromaOffset</name></member>
+            <member><type>VkFilter</type>                         <name>chromaFilter</name></member>
+            <member><type>VkBool32</type>                         <name>forceExplicitReconstruction</name></member>
+        </type>
+        <type category="struct" name="VkSamplerYcbcrConversionCreateInfoKHR"                   alias="VkSamplerYcbcrConversionCreateInfo"/>
+        <type category="struct" name="VkBindImagePlaneMemoryInfo" structextends="VkBindImageMemoryInfo">
+            <member values="VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                      <name>pNext</name></member>
+            <member><type>VkImageAspectFlagBits</type>            <name>planeAspect</name></member>
+        </type>
+        <type category="struct" name="VkBindImagePlaneMemoryInfoKHR"                           alias="VkBindImagePlaneMemoryInfo"/>
+        <type category="struct" name="VkImagePlaneMemoryRequirementsInfo" structextends="VkImageMemoryRequirementsInfo2">
+            <member values="VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                      <name>pNext</name></member>
+            <member><type>VkImageAspectFlagBits</type>            <name>planeAspect</name></member>
+        </type>
+        <type category="struct" name="VkImagePlaneMemoryRequirementsInfoKHR"                   alias="VkImagePlaneMemoryRequirementsInfo"/>
+        <type category="struct" name="VkPhysicalDeviceSamplerYcbcrConversionFeatures" structextends="VkPhysicalDeviceFeatures2,VkDeviceCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES"><type>VkStructureType</type> <name>sType</name></member>
+            <member noautovalidity="true"><type>void</type>*      <name>pNext</name></member>
+            <member><type>VkBool32</type>                         <name>samplerYcbcrConversion</name><comment>Sampler color conversion supported</comment></member>
+        </type>
+        <type category="struct" name="VkPhysicalDeviceSamplerYcbcrConversionFeaturesKHR"       alias="VkPhysicalDeviceSamplerYcbcrConversionFeatures"/>
+        <type category="struct" name="VkSamplerYcbcrConversionImageFormatProperties" returnedonly="true" structextends="VkImageFormatProperties2">
+            <member values="VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES"><type>VkStructureType</type> <name>sType</name></member>
+            <member noautovalidity="true"><type>void</type>*      <name>pNext</name></member>
+            <member><type>uint32_t</type>                         <name>combinedImageSamplerDescriptorCount</name></member>
+        </type>
+        <type category="struct" name="VkSamplerYcbcrConversionImageFormatPropertiesKHR"        alias="VkSamplerYcbcrConversionImageFormatProperties"/>
+        <type category="struct" name="VkTextureLODGatherFormatPropertiesAMD" returnedonly="true" structextends="VkImageFormatProperties2">
+            <member values="VK_STRUCTURE_TYPE_TEXTURE_LOD_GATHER_FORMAT_PROPERTIES_AMD"><type>VkStructureType</type> <name>sType</name></member>
+            <member><type>void</type>*                            <name>pNext</name></member>
+            <member><type>VkBool32</type>                         <name>supportsTextureGatherLODBiasAMD</name></member>
+        </type>
+        <type category="struct" name="VkConditionalRenderingBeginInfoEXT">
+            <member values="VK_STRUCTURE_TYPE_CONDITIONAL_RENDERING_BEGIN_INFO_EXT"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                      <name>pNext</name></member>
+            <member><type>VkBuffer</type>                         <name>buffer</name></member>
+            <member><type>VkDeviceSize</type>                     <name>offset</name></member>
+            <member optional="true"><type>VkConditionalRenderingFlagsEXT</type>    <name>flags</name></member>
+        </type>
+        <type category="struct" name="VkProtectedSubmitInfo" structextends="VkSubmitInfo">
+            <member values="VK_STRUCTURE_TYPE_PROTECTED_SUBMIT_INFO"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                     <name>pNext</name></member>
+            <member><type>VkBool32</type>                        <name>protectedSubmit</name><comment>Submit protected command buffers</comment></member>
+        </type>
+        <type category="struct" name="VkPhysicalDeviceProtectedMemoryFeatures" structextends="VkPhysicalDeviceFeatures2,VkDeviceCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_FEATURES"><type>VkStructureType</type> <name>sType</name></member>
+            <member><type>void</type>*                               <name>pNext</name></member>
+            <member><type>VkBool32</type>                            <name>protectedMemory</name></member>
+        </type>
+        <type category="struct" name="VkPhysicalDeviceProtectedMemoryProperties" returnedonly="true" structextends="VkPhysicalDeviceProperties2">
+            <member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_PROPERTIES"><type>VkStructureType</type> <name>sType</name></member>
+            <member><type>void</type>*                               <name>pNext</name></member>
+            <member><type>VkBool32</type>                            <name>protectedNoFault</name></member>
+        </type>
+        <type category="struct" name="VkDeviceQueueInfo2">
+            <member values="VK_STRUCTURE_TYPE_DEVICE_QUEUE_INFO_2"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                         <name>pNext</name></member>
+            <member optional="true"><type>VkDeviceQueueCreateFlags</type>            <name>flags</name></member>
+            <member><type>uint32_t</type>                            <name>queueFamilyIndex</name></member>
+            <member><type>uint32_t</type>                            <name>queueIndex</name></member>
+        </type>
+        <type category="struct" name="VkPipelineCoverageToColorStateCreateInfoNV" structextends="VkPipelineMultisampleStateCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_TO_COLOR_STATE_CREATE_INFO_NV"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                                                                      <name>pNext</name></member>
+            <member optional="true"><type>VkPipelineCoverageToColorStateCreateFlagsNV</type>                    <name>flags</name></member>
+            <member><type>VkBool32</type>                         <name>coverageToColorEnable</name></member>
+            <member optional="true"><type>uint32_t</type>         <name>coverageToColorLocation</name></member>
+        </type>
+        <type category="struct" name="VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT" returnedonly="true" structextends="VkPhysicalDeviceProperties2">
+            <member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES_EXT"><type>VkStructureType</type> <name>sType</name></member>
+            <member><type>void</type>*                  <name>pNext</name></member>
+            <member><type>VkBool32</type>               <name>filterMinmaxSingleComponentFormats</name></member>
+            <member><type>VkBool32</type>               <name>filterMinmaxImageComponentMapping</name></member>
+        </type>
+        <type category="struct" name="VkSampleLocationEXT">
+            <member><type>float</type>                            <name>x</name></member>
+            <member><type>float</type>                            <name>y</name></member>
+        </type>
+        <type category="struct" name="VkSampleLocationsInfoEXT" structextends="VkImageMemoryBarrier">
+            <member values="VK_STRUCTURE_TYPE_SAMPLE_LOCATIONS_INFO_EXT"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                            <name>pNext</name></member>
+            <member optional="true"><type>VkSampleCountFlagBits</type>  <name>sampleLocationsPerPixel</name></member>
+            <member><type>VkExtent2D</type>                             <name>sampleLocationGridSize</name></member>
+            <member optional="true"><type>uint32_t</type>               <name>sampleLocationsCount</name></member>
+            <member len="sampleLocationsCount">const <type>VkSampleLocationEXT</type>* <name>pSampleLocations</name></member>
+        </type>
+        <type category="struct" name="VkAttachmentSampleLocationsEXT">
+            <member><type>uint32_t</type>                         <name>attachmentIndex</name></member>
+            <member><type>VkSampleLocationsInfoEXT</type>         <name>sampleLocationsInfo</name></member>
+        </type>
+        <type category="struct" name="VkSubpassSampleLocationsEXT">
+            <member><type>uint32_t</type>                         <name>subpassIndex</name></member>
+            <member><type>VkSampleLocationsInfoEXT</type>         <name>sampleLocationsInfo</name></member>
+        </type>
+        <type category="struct" name="VkRenderPassSampleLocationsBeginInfoEXT" structextends="VkRenderPassBeginInfo">
+            <member values="VK_STRUCTURE_TYPE_RENDER_PASS_SAMPLE_LOCATIONS_BEGIN_INFO_EXT"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                      <name>pNext</name></member>
+            <member optional="true"><type>uint32_t</type>         <name>attachmentInitialSampleLocationsCount</name></member>
+            <member len="attachmentInitialSampleLocationsCount">const <type>VkAttachmentSampleLocationsEXT</type>* <name>pAttachmentInitialSampleLocations</name></member>
+            <member optional="true"><type>uint32_t</type>         <name>postSubpassSampleLocationsCount</name></member>
+            <member len="postSubpassSampleLocationsCount">const <type>VkSubpassSampleLocationsEXT</type>* <name>pPostSubpassSampleLocations</name></member>
+        </type>
+        <type category="struct" name="VkPipelineSampleLocationsStateCreateInfoEXT" structextends="VkPipelineMultisampleStateCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_PIPELINE_SAMPLE_LOCATIONS_STATE_CREATE_INFO_EXT"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                      <name>pNext</name></member>
+            <member><type>VkBool32</type>                         <name>sampleLocationsEnable</name></member>
+            <member><type>VkSampleLocationsInfoEXT</type>         <name>sampleLocationsInfo</name></member>
+        </type>
+        <type category="struct" name="VkPhysicalDeviceSampleLocationsPropertiesEXT" returnedonly="true" structextends="VkPhysicalDeviceProperties2">
+            <member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLE_LOCATIONS_PROPERTIES_EXT"><type>VkStructureType</type> <name>sType</name></member>
+            <member><type>void</type>*                            <name>pNext</name></member>
+            <member><type>VkSampleCountFlags</type>               <name>sampleLocationSampleCounts</name></member>
+            <member><type>VkExtent2D</type>                       <name>maxSampleLocationGridSize</name></member>
+            <member><type>float</type>                            <name>sampleLocationCoordinateRange</name>[2]</member>
+            <member><type>uint32_t</type>                         <name>sampleLocationSubPixelBits</name></member>
+            <member><type>VkBool32</type>                         <name>variableSampleLocations</name></member>
+        </type>
+        <type category="struct" name="VkMultisamplePropertiesEXT" returnedonly="true">
+            <member values="VK_STRUCTURE_TYPE_MULTISAMPLE_PROPERTIES_EXT"><type>VkStructureType</type> <name>sType</name></member>
+            <member><type>void</type>*                            <name>pNext</name></member>
+            <member><type>VkExtent2D</type>                       <name>maxSampleLocationGridSize</name></member>
+        </type>
+        <type category="struct" name="VkSamplerReductionModeCreateInfoEXT" structextends="VkSamplerCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO_EXT"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*            <name>pNext</name></member>
+            <member><type>VkSamplerReductionModeEXT</type> <name>reductionMode</name></member>
+        </type>
+        <type category="struct" name="VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT" structextends="VkPhysicalDeviceFeatures2,VkDeviceCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_FEATURES_EXT"><type>VkStructureType</type> <name>sType</name></member>
+            <member noautovalidity="true"><type>void</type>*                            <name>pNext</name></member>
+            <member><type>VkBool32</type>                         <name>advancedBlendCoherentOperations</name></member>
+        </type>
+        <type category="struct" name="VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT" returnedonly="true" structextends="VkPhysicalDeviceProperties2">
+            <member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_PROPERTIES_EXT"><type>VkStructureType</type> <name>sType</name></member>
+            <member noautovalidity="true"><type>void</type>*                            <name>pNext</name></member>
+            <member><type>uint32_t</type>                         <name>advancedBlendMaxColorAttachments</name></member>
+            <member><type>VkBool32</type>                         <name>advancedBlendIndependentBlend</name></member>
+            <member><type>VkBool32</type>                         <name>advancedBlendNonPremultipliedSrcColor</name></member>
+            <member><type>VkBool32</type>                         <name>advancedBlendNonPremultipliedDstColor</name></member>
+            <member><type>VkBool32</type>                         <name>advancedBlendCorrelatedOverlap</name></member>
+            <member><type>VkBool32</type>                         <name>advancedBlendAllOperations</name></member>
+        </type>
+        <type category="struct" name="VkPipelineColorBlendAdvancedStateCreateInfoEXT" structextends="VkPipelineColorBlendStateCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_ADVANCED_STATE_CREATE_INFO_EXT"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*            <name>pNext</name></member>
+            <member><type>VkBool32</type>               <name>srcPremultiplied</name></member>
+            <member><type>VkBool32</type>               <name>dstPremultiplied</name></member>
+            <member><type>VkBlendOverlapEXT</type>      <name>blendOverlap</name></member>
+        </type>
+        <type category="struct" name="VkPhysicalDeviceInlineUniformBlockFeaturesEXT" structextends="VkPhysicalDeviceFeatures2,VkDeviceCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES_EXT"><type>VkStructureType</type> <name>sType</name></member>
+            <member><type>void</type>*                  <name>pNext</name></member>
+            <member><type>VkBool32</type>               <name>inlineUniformBlock</name></member>
+            <member><type>VkBool32</type>               <name>descriptorBindingInlineUniformBlockUpdateAfterBind</name></member>
+        </type>
+        <type category="struct" name="VkPhysicalDeviceInlineUniformBlockPropertiesEXT" returnedonly="true" structextends="VkPhysicalDeviceProperties2">
+            <member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_PROPERTIES_EXT"><type>VkStructureType</type> <name>sType</name></member>
+            <member><type>void</type>*                  <name>pNext</name></member>
+            <member><type>uint32_t</type>               <name>maxInlineUniformBlockSize</name></member>
+            <member><type>uint32_t</type>               <name>maxPerStageDescriptorInlineUniformBlocks</name></member>
+            <member><type>uint32_t</type>               <name>maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks</name></member>
+            <member><type>uint32_t</type>               <name>maxDescriptorSetInlineUniformBlocks</name></member>
+            <member><type>uint32_t</type>               <name>maxDescriptorSetUpdateAfterBindInlineUniformBlocks</name></member>
+        </type>
+        <type category="struct" name="VkWriteDescriptorSetInlineUniformBlockEXT" structextends="VkWriteDescriptorSet">
+            <member values="VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK_EXT"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*            <name>pNext</name></member>
+            <member><type>uint32_t</type>               <name>dataSize</name></member>
+            <member len="dataSize">const <type>void</type>* <name>pData</name></member>
+        </type>
+        <type category="struct" name="VkDescriptorPoolInlineUniformBlockCreateInfoEXT" structextends="VkDescriptorPoolCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO_EXT"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*            <name>pNext</name></member>
+            <member><type>uint32_t</type>               <name>maxInlineUniformBlockBindings</name></member>
+        </type>
+        <type category="struct" name="VkPipelineCoverageModulationStateCreateInfoNV" structextends="VkPipelineMultisampleStateCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_MODULATION_STATE_CREATE_INFO_NV"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                                                                      <name>pNext</name></member>
+            <member optional="true"><type>VkPipelineCoverageModulationStateCreateFlagsNV</type>                   <name>flags</name></member>
+            <member><type>VkCoverageModulationModeNV</type>                                                       <name>coverageModulationMode</name></member>
+            <member><type>VkBool32</type>                                                                         <name>coverageModulationTableEnable</name></member>
+            <member optional="true"><type>uint32_t</type>                                                         <name>coverageModulationTableCount</name></member>
+            <member noautovalidity="true" optional="true" len="coverageModulationTableCount">const <type>float</type>* <name>pCoverageModulationTable</name></member>
+        </type>
+        <type category="struct" name="VkImageFormatListCreateInfoKHR" structextends="VkImageCreateInfo,VkSwapchainCreateInfoKHR,VkPhysicalDeviceImageFormatInfo2">
+            <member values="VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO_KHR"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*            <name>pNext</name></member>
+            <member optional="true"><type>uint32_t</type>               <name>viewFormatCount</name></member>
+            <member len="viewFormatCount">const <type>VkFormat</type>*      <name>pViewFormats</name></member>
+        </type>
+        <type category="struct" name="VkValidationCacheCreateInfoEXT">
+            <member values="VK_STRUCTURE_TYPE_VALIDATION_CACHE_CREATE_INFO_EXT"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*            <name>pNext</name></member>
+            <member optional="true"><type>VkValidationCacheCreateFlagsEXT</type>    <name>flags</name></member>
+            <member optional="true"><type>size_t</type>                 <name>initialDataSize</name></member>
+            <member len="initialDataSize">const <type>void</type>*            <name>pInitialData</name></member>
+        </type>
+        <type category="struct" name="VkShaderModuleValidationCacheCreateInfoEXT" structextends="VkShaderModuleCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_SHADER_MODULE_VALIDATION_CACHE_CREATE_INFO_EXT"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*            <name>pNext</name></member>
+            <member><type>VkValidationCacheEXT</type>    <name>validationCache</name></member>
+        </type>
+        <type category="struct" name="VkPhysicalDeviceMaintenance3Properties" returnedonly="true" structextends="VkPhysicalDeviceProperties2">
+            <member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES"><type>VkStructureType</type> <name>sType</name></member>
+            <member><type>void</type>*                            <name>pNext</name></member>
+            <member><type>uint32_t</type>                         <name>maxPerSetDescriptors</name></member>
+            <member><type>VkDeviceSize</type>                     <name>maxMemoryAllocationSize</name></member>
+        </type>
+        <type category="struct" name="VkPhysicalDeviceMaintenance3PropertiesKHR"               alias="VkPhysicalDeviceMaintenance3Properties"/>
+        <type category="struct" name="VkDescriptorSetLayoutSupport" returnedonly="true">
+            <member values="VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT"><type>VkStructureType</type> <name>sType</name></member>
+            <member><type>void</type>*            <name>pNext</name></member>
+            <member><type>VkBool32</type>         <name>supported</name></member>
+        </type>
+        <type category="struct" name="VkDescriptorSetLayoutSupportKHR"                         alias="VkDescriptorSetLayoutSupport"/>
+        <type category="struct" name="VkPhysicalDeviceShaderDrawParametersFeatures" structextends="VkPhysicalDeviceFeatures2,VkDeviceCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES"><type>VkStructureType</type> <name>sType</name></member>
+            <member noautovalidity="true"><type>void</type>*                            <name>pNext</name></member>
+            <member><type>VkBool32</type>                         <name>shaderDrawParameters</name></member>
+        </type>
+        <type category="struct" name="VkPhysicalDeviceShaderDrawParameterFeatures"             alias="VkPhysicalDeviceShaderDrawParametersFeatures"/>
+        <type category="struct" name="VkPhysicalDeviceShaderFloat16Int8FeaturesKHR" structextends="VkPhysicalDeviceFeatures2,VkDeviceCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES_KHR"><type>VkStructureType</type> <name>sType</name></member>
+            <member noautovalidity="true"><type>void</type>*      <name>pNext</name></member>                            <!-- Pointer to next structure -->
+            <member><type>VkBool32</type>                         <name>shaderFloat16</name></member>                 <!-- 16-bit floats (halfs) in shaders -->
+            <member><type>VkBool32</type>                         <name>shaderInt8</name></member>                    <!-- 8-bit integers in shaders -->
+        </type>
+        <type category="struct" name="VkPhysicalDeviceFloat16Int8FeaturesKHR" alias="VkPhysicalDeviceShaderFloat16Int8FeaturesKHR"/>
+        <type category="struct" name="VkPhysicalDeviceFloatControlsPropertiesKHR" returnedonly="true" structextends="VkPhysicalDeviceProperties2">
+            <member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES_KHR"><type>VkStructureType</type> <name>sType</name></member>
+            <member><type>void</type>*                            <name>pNext</name></member>
+            <member><type>VkShaderFloatControlsIndependenceKHR</type> <name>denormBehaviorIndependence</name></member>
+            <member><type>VkShaderFloatControlsIndependenceKHR</type> <name>roundingModeIndependence</name></member>
+            <member><type>VkBool32</type>                         <name>shaderSignedZeroInfNanPreserveFloat16</name></member>  <!-- An implementation can preserve signed zero, nan, inf -->
+            <member><type>VkBool32</type>                         <name>shaderSignedZeroInfNanPreserveFloat32</name></member>  <!-- An implementation can preserve signed zero, nan, inf -->
+            <member><type>VkBool32</type>                         <name>shaderSignedZeroInfNanPreserveFloat64</name></member>  <!-- An implementation can preserve signed zero, nan, inf -->
+            <member><type>VkBool32</type>                         <name>shaderDenormPreserveFloat16</name></member>            <!-- An implementation can preserve  denormals -->
+            <member><type>VkBool32</type>                         <name>shaderDenormPreserveFloat32</name></member>            <!-- An implementation can preserve  denormals -->
+            <member><type>VkBool32</type>                         <name>shaderDenormPreserveFloat64</name></member>            <!-- An implementation can preserve  denormals -->
+            <member><type>VkBool32</type>                         <name>shaderDenormFlushToZeroFloat16</name></member>         <!-- An implementation can flush to zero  denormals -->
+            <member><type>VkBool32</type>                         <name>shaderDenormFlushToZeroFloat32</name></member>         <!-- An implementation can flush to zero  denormals -->
+            <member><type>VkBool32</type>                         <name>shaderDenormFlushToZeroFloat64</name></member>         <!-- An implementation can flush to zero  denormals -->
+            <member><type>VkBool32</type>                         <name>shaderRoundingModeRTEFloat16</name></member>           <!-- An implementation can support RTE -->
+            <member><type>VkBool32</type>                         <name>shaderRoundingModeRTEFloat32</name></member>           <!-- An implementation can support RTE -->
+            <member><type>VkBool32</type>                         <name>shaderRoundingModeRTEFloat64</name></member>           <!-- An implementation can support RTE -->
+            <member><type>VkBool32</type>                         <name>shaderRoundingModeRTZFloat16</name></member>           <!-- An implementation can support RTZ -->
+            <member><type>VkBool32</type>                         <name>shaderRoundingModeRTZFloat32</name></member>           <!-- An implementation can support RTZ -->
+            <member><type>VkBool32</type>                         <name>shaderRoundingModeRTZFloat64</name></member>           <!-- An implementation can support RTZ -->
+        </type>
+        <type category="struct" name="VkPhysicalDeviceHostQueryResetFeaturesEXT" structextends="VkPhysicalDeviceFeatures2,VkDeviceCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES_EXT"><type>VkStructureType</type> <name>sType</name></member>
+            <member noautovalidity="true"><type>void</type>*        <name>pNext</name></member>
+            <member><type>VkBool32</type>                           <name>hostQueryReset</name></member>
+        </type>
+        <type category="struct" name="VkNativeBufferUsage2ANDROID">
+            <member><type>uint64_t</type> <name>consumer</name></member>
+            <member><type>uint64_t</type> <name>producer</name></member>
+        </type>
+        <type category="struct" name="VkNativeBufferANDROID">
+            <member values="VK_STRUCTURE_TYPE_NATIVE_BUFFER_ANDROID"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>* <name>pNext</name></member>
+            <member>const <type>void</type>* <name>handle</name></member>
+            <member><type>int</type> <name>stride</name></member>
+            <member><type>int</type> <name>format</name></member>
+            <member><type>int</type> <name>usage</name></member>
+            <member><type>VkNativeBufferUsage2ANDROID</type> <name>usage2</name></member>
+        </type>
+        <type category="struct" name="VkSwapchainImageCreateInfoANDROID">
+            <member values="VK_STRUCTURE_TYPE_SWAPCHAIN_IMAGE_CREATE_INFO_ANDROID"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>* <name>pNext</name></member>
+            <member><type>VkSwapchainImageUsageFlagsANDROID</type> <name>usage</name></member>
+        </type>
+        <type category="struct" name="VkPhysicalDevicePresentationPropertiesANDROID">
+            <member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENTATION_PROPERTIES_ANDROID"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>* <name>pNext</name></member>
+            <member><type>VkBool32</type> <name>sharedImage</name></member>
+        </type>
+        <type category="struct" name="VkShaderResourceUsageAMD" returnedonly="true">
+            <member><type>uint32_t</type> <name>numUsedVgprs</name></member>
+            <member><type>uint32_t</type> <name>numUsedSgprs</name></member>
+            <member><type>uint32_t</type> <name>ldsSizePerLocalWorkGroup</name></member>
+            <member><type>size_t</type> <name>ldsUsageSizeInBytes</name></member>
+            <member><type>size_t</type> <name>scratchMemUsageInBytes</name></member>
+        </type>
+        <type category="struct" name="VkShaderStatisticsInfoAMD" returnedonly="true">
+            <member><type>VkShaderStageFlags</type> <name>shaderStageMask</name></member>
+            <member><type>VkShaderResourceUsageAMD</type> <name>resourceUsage</name></member>
+            <member><type>uint32_t</type> <name>numPhysicalVgprs</name></member>
+            <member><type>uint32_t</type> <name>numPhysicalSgprs</name></member>
+            <member><type>uint32_t</type> <name>numAvailableVgprs</name></member>
+            <member><type>uint32_t</type> <name>numAvailableSgprs</name></member>
+            <member><type>uint32_t</type> <name>computeWorkGroupSize</name>[3]</member>
+        </type>
+        <type category="struct" name="VkDeviceQueueGlobalPriorityCreateInfoEXT" structextends="VkDeviceQueueCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_EXT"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                    <name>pNext</name></member>
+            <member><type>VkQueueGlobalPriorityEXT</type>       <name>globalPriority</name></member>
+        </type>
+        <type category="struct" name="VkDebugUtilsObjectNameInfoEXT">
+            <member values="VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                                            <name>pNext</name></member>
+            <member><type>VkObjectType</type>                                           <name>objectType</name></member>
+            <member><type>uint64_t</type>                                               <name>objectHandle</name></member>
+            <member optional="true" len="null-terminated">const <type>char</type>*      <name>pObjectName</name></member>
+        </type>
+        <type category="struct" name="VkDebugUtilsObjectTagInfoEXT">
+            <member values="VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_TAG_INFO_EXT"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                            <name>pNext</name></member>
+            <member><type>VkObjectType</type>                           <name>objectType</name></member>
+            <member><type>uint64_t</type>                               <name>objectHandle</name></member>
+            <member><type>uint64_t</type>                               <name>tagName</name></member>
+            <member><type>size_t</type>                                 <name>tagSize</name></member>
+            <member len="tagSize">const <type>void</type>*              <name>pTag</name></member>
+        </type>
+        <type category="struct" name="VkDebugUtilsLabelEXT">
+            <member values="VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                            <name>pNext</name></member>
+            <member len="null-terminated">const <type>char</type>*      <name>pLabelName</name></member>
+            <member optional="true"><type>float</type>                  <name>color</name>[4]</member>
+        </type>
+        <type category="struct" name="VkDebugUtilsMessengerCreateInfoEXT" structextends="VkInstanceCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                                          <name>pNext</name></member>
+            <member optional="true"><type>VkDebugUtilsMessengerCreateFlagsEXT</type>  <name>flags</name></member>
+            <member><type>VkDebugUtilsMessageSeverityFlagsEXT</type>                  <name>messageSeverity</name></member>
+            <member><type>VkDebugUtilsMessageTypeFlagsEXT</type>                      <name>messageType</name></member>
+            <member><type>PFN_vkDebugUtilsMessengerCallbackEXT</type>                 <name>pfnUserCallback</name></member>
+            <member optional="true"><type>void</type>*                                <name>pUserData</name></member>
+        </type>
+        <type category="struct" name="VkDebugUtilsMessengerCallbackDataEXT">
+            <member values="VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CALLBACK_DATA_EXT"><type>VkStructureType</type> <name>sType</name></member>
+            <member optional="true">const <type>void</type>*                                                        <name>pNext</name></member>
+            <member optional="true"><type>VkDebugUtilsMessengerCallbackDataFlagsEXT</type>                          <name>flags</name></member>
+            <member optional="true" len="null-terminated">const <type>char</type>*                                  <name>pMessageIdName</name></member>
+            <member optional="true"><type>int32_t</type>                                                            <name>messageIdNumber</name></member>
+            <member len="null-terminated">const <type>char</type>*                                                  <name>pMessage</name></member>
+            <member optional="true"><type>uint32_t</type>                                                           <name>queueLabelCount</name></member>
+            <member len="queueLabelCount">const <type>VkDebugUtilsLabelEXT</type>*                  <name>pQueueLabels</name></member>
+            <member optional="true"><type>uint32_t</type>                                                           <name>cmdBufLabelCount</name></member>
+            <member len="cmdBufLabelCount">const <type>VkDebugUtilsLabelEXT</type>*                 <name>pCmdBufLabels</name></member>
+            <member optional="true"><type>uint32_t</type>                                                           <name>objectCount</name></member>
+            <member len="objectCount">const <type>VkDebugUtilsObjectNameInfoEXT</type>*             <name>pObjects</name></member>
+        </type>
+        <type category="struct" name="VkImportMemoryHostPointerInfoEXT" structextends="VkMemoryAllocateInfo">
+            <member values="VK_STRUCTURE_TYPE_IMPORT_MEMORY_HOST_POINTER_INFO_EXT"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>* <name>pNext</name></member>
+            <member><type>VkExternalMemoryHandleTypeFlagBits</type> <name>handleType</name></member>
+            <member optional="false"><type>void</type>* <name>pHostPointer</name></member>
+        </type>
+        <type category="struct" name="VkMemoryHostPointerPropertiesEXT" returnedonly="true">
+            <member values="VK_STRUCTURE_TYPE_MEMORY_HOST_POINTER_PROPERTIES_EXT"><type>VkStructureType</type> <name>sType</name></member>
+            <member><type>void</type>* <name>pNext</name></member>
+            <member><type>uint32_t</type> <name>memoryTypeBits</name></member>
+        </type>
+        <type category="struct" name="VkPhysicalDeviceExternalMemoryHostPropertiesEXT" returnedonly="true" structextends="VkPhysicalDeviceProperties2">
+            <member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_HOST_PROPERTIES_EXT"><type>VkStructureType</type> <name>sType</name></member>
+            <member><type>void</type>* <name>pNext</name></member>
+            <member><type>VkDeviceSize</type> <name>minImportedHostPointerAlignment</name></member>
+        </type>
+        <type category="struct" name="VkPhysicalDeviceConservativeRasterizationPropertiesEXT" returnedonly="true" structextends="VkPhysicalDeviceProperties2">
+            <member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONSERVATIVE_RASTERIZATION_PROPERTIES_EXT"><type>VkStructureType</type> <name>sType</name></member>
+            <member><type>void</type>*                  <name>pNext</name><comment>Pointer to next structure</comment></member>
+            <member><type>float</type>                  <name>primitiveOverestimationSize</name><comment>The size in pixels the primitive is enlarged at each edge during conservative rasterization</comment></member>
+            <member><type>float</type>                  <name>maxExtraPrimitiveOverestimationSize</name><comment>The maximum additional overestimation the client can specify in the pipeline state</comment></member>
+            <member><type>float</type>                  <name>extraPrimitiveOverestimationSizeGranularity</name><comment>The granularity of extra overestimation sizes the implementations supports between 0 and maxExtraOverestimationSize</comment></member>
+            <member><type>VkBool32</type>               <name>primitiveUnderestimation</name><comment>true if the implementation supports conservative rasterization underestimation mode</comment></member>
+            <member><type>VkBool32</type>               <name>conservativePointAndLineRasterization</name><comment>true if conservative rasterization also applies to points and lines</comment></member>
+            <member><type>VkBool32</type>               <name>degenerateTrianglesRasterized</name><comment>true if degenerate triangles (those with zero area after snap) are rasterized</comment></member>
+            <member><type>VkBool32</type>               <name>degenerateLinesRasterized</name><comment>true if degenerate lines (those with zero length after snap) are rasterized</comment></member>
+            <member><type>VkBool32</type>               <name>fullyCoveredFragmentShaderInputVariable</name><comment>true if the implementation supports the FullyCoveredEXT SPIR-V builtin fragment shader input variable</comment></member>
+            <member><type>VkBool32</type>               <name>conservativeRasterizationPostDepthCoverage</name><comment>true if the implementation supports both conservative rasterization and post depth coverage sample coverage mask</comment></member>
+        </type>
+        <type category="struct" name="VkCalibratedTimestampInfoEXT">
+            <member values="VK_STRUCTURE_TYPE_CALIBRATED_TIMESTAMP_INFO_EXT"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*            <name>pNext</name></member>
+            <member><type>VkTimeDomainEXT</type>        <name>timeDomain</name></member>
+        </type>
+        <type category="struct" name="VkPhysicalDeviceShaderCorePropertiesAMD" returnedonly="true" structextends="VkPhysicalDeviceProperties2">
+            <member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_AMD"><type>VkStructureType</type> <name>sType</name></member>
+            <member><type>void</type>*    <name>pNext</name><comment>Pointer to next structure</comment></member>
+            <member><type>uint32_t</type> <name>shaderEngineCount</name><comment>number of shader engines</comment></member>
+            <member><type>uint32_t</type> <name>shaderArraysPerEngineCount</name><comment>number of shader arrays</comment></member>
+            <member><type>uint32_t</type> <name>computeUnitsPerShaderArray</name><comment>number of physical CUs per shader array</comment></member>
+            <member><type>uint32_t</type> <name>simdPerComputeUnit</name><comment>number of SIMDs per compute unit</comment></member>
+            <member><type>uint32_t</type> <name>wavefrontsPerSimd</name><comment>number of wavefront slots in each SIMD</comment></member>
+            <member><type>uint32_t</type> <name>wavefrontSize</name><comment>maximum number of threads per wavefront</comment></member>
+            <member><type>uint32_t</type> <name>sgprsPerSimd</name><comment>number of physical SGPRs per SIMD</comment></member>
+            <member><type>uint32_t</type> <name>minSgprAllocation</name><comment>minimum number of SGPRs that can be allocated by a wave</comment></member>
+            <member><type>uint32_t</type> <name>maxSgprAllocation</name><comment>number of available SGPRs</comment></member>
+            <member><type>uint32_t</type> <name>sgprAllocationGranularity</name><comment>SGPRs are allocated in groups of this size</comment></member>
+            <member><type>uint32_t</type> <name>vgprsPerSimd</name><comment>number of physical VGPRs per SIMD</comment></member>
+            <member><type>uint32_t</type> <name>minVgprAllocation</name><comment>minimum number of VGPRs that can be allocated by a wave</comment></member>
+            <member><type>uint32_t</type> <name>maxVgprAllocation</name><comment>number of available VGPRs</comment></member>
+            <member><type>uint32_t</type> <name>vgprAllocationGranularity</name><comment>VGPRs are allocated in groups of this size</comment></member>
+        </type>
+        <type category="struct" name="VkPhysicalDeviceShaderCoreProperties2AMD" returnedonly="true" structextends="VkPhysicalDeviceProperties2">
+            <member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_2_AMD"><type>VkStructureType</type> <name>sType</name></member>
+            <member><type>void</type>*    <name>pNext</name><comment>Pointer to next structure</comment></member>
+            <member><type>VkShaderCorePropertiesFlagsAMD</type> <name>shaderCoreFeatures</name><comment>features supported by the shader core</comment></member>
+            <member><type>uint32_t</type> <name>activeComputeUnitCount</name><comment>number of active compute units across all shader engines/arrays</comment></member>
+        </type>
+        <type category="struct" name="VkPipelineRasterizationConservativeStateCreateInfoEXT" structextends="VkPipelineRasterizationStateCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_CONSERVATIVE_STATE_CREATE_INFO_EXT"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                                                                      <name>pNext</name></member>                 <!-- Pointer to next structure -->
+            <member optional="true"><type>VkPipelineRasterizationConservativeStateCreateFlagsEXT</type>           <name>flags</name></member>                 <!-- Reserved -->
+            <member><type>VkConservativeRasterizationModeEXT</type>                                               <name>conservativeRasterizationMode</name></member>      <!-- Conservative rasterization mode -->
+            <member><type>float</type>                                                                            <name>extraPrimitiveOverestimationSize</name></member>   <!-- Extra overestimation to add to the primitive -->
+        </type>
+        <type category="struct" name="VkPhysicalDeviceDescriptorIndexingFeaturesEXT" structextends="VkPhysicalDeviceFeatures2,VkDeviceCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES_EXT"><type>VkStructureType</type> <name>sType</name></member>
+            <member noautovalidity="true"><type>void</type>*                            <name>pNext</name></member>
+            <member><type>VkBool32</type>               <name>shaderInputAttachmentArrayDynamicIndexing</name></member>
+            <member><type>VkBool32</type>               <name>shaderUniformTexelBufferArrayDynamicIndexing</name></member>
+            <member><type>VkBool32</type>               <name>shaderStorageTexelBufferArrayDynamicIndexing</name></member>
+            <member><type>VkBool32</type>               <name>shaderUniformBufferArrayNonUniformIndexing</name></member>
+            <member><type>VkBool32</type>               <name>shaderSampledImageArrayNonUniformIndexing</name></member>
+            <member><type>VkBool32</type>               <name>shaderStorageBufferArrayNonUniformIndexing</name></member>
+            <member><type>VkBool32</type>               <name>shaderStorageImageArrayNonUniformIndexing</name></member>
+            <member><type>VkBool32</type>               <name>shaderInputAttachmentArrayNonUniformIndexing</name></member>
+            <member><type>VkBool32</type>               <name>shaderUniformTexelBufferArrayNonUniformIndexing</name></member>
+            <member><type>VkBool32</type>               <name>shaderStorageTexelBufferArrayNonUniformIndexing</name></member>
+            <member><type>VkBool32</type>               <name>descriptorBindingUniformBufferUpdateAfterBind</name></member>
+            <member><type>VkBool32</type>               <name>descriptorBindingSampledImageUpdateAfterBind</name></member>
+            <member><type>VkBool32</type>               <name>descriptorBindingStorageImageUpdateAfterBind</name></member>
+            <member><type>VkBool32</type>               <name>descriptorBindingStorageBufferUpdateAfterBind</name></member>
+            <member><type>VkBool32</type>               <name>descriptorBindingUniformTexelBufferUpdateAfterBind</name></member>
+            <member><type>VkBool32</type>               <name>descriptorBindingStorageTexelBufferUpdateAfterBind</name></member>
+            <member><type>VkBool32</type>               <name>descriptorBindingUpdateUnusedWhilePending</name></member>
+            <member><type>VkBool32</type>               <name>descriptorBindingPartiallyBound</name></member>
+            <member><type>VkBool32</type>               <name>descriptorBindingVariableDescriptorCount</name></member>
+            <member><type>VkBool32</type>               <name>runtimeDescriptorArray</name></member>
+        </type>
+        <type category="struct" name="VkPhysicalDeviceDescriptorIndexingPropertiesEXT" returnedonly="true" structextends="VkPhysicalDeviceProperties2">
+            <member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES_EXT"><type>VkStructureType</type> <name>sType</name></member>
+            <member noautovalidity="true"><type>void</type>*                            <name>pNext</name></member>
+            <member><type>uint32_t</type>               <name>maxUpdateAfterBindDescriptorsInAllPools</name></member>
+            <member><type>VkBool32</type>               <name>shaderUniformBufferArrayNonUniformIndexingNative</name></member>
+            <member><type>VkBool32</type>               <name>shaderSampledImageArrayNonUniformIndexingNative</name></member>
+            <member><type>VkBool32</type>               <name>shaderStorageBufferArrayNonUniformIndexingNative</name></member>
+            <member><type>VkBool32</type>               <name>shaderStorageImageArrayNonUniformIndexingNative</name></member>
+            <member><type>VkBool32</type>               <name>shaderInputAttachmentArrayNonUniformIndexingNative</name></member>
+            <member><type>VkBool32</type>               <name>robustBufferAccessUpdateAfterBind</name></member>
+            <member><type>VkBool32</type>               <name>quadDivergentImplicitLod</name></member>
+            <member><type>uint32_t</type>               <name>maxPerStageDescriptorUpdateAfterBindSamplers</name></member>
+            <member><type>uint32_t</type>               <name>maxPerStageDescriptorUpdateAfterBindUniformBuffers</name></member>
+            <member><type>uint32_t</type>               <name>maxPerStageDescriptorUpdateAfterBindStorageBuffers</name></member>
+            <member><type>uint32_t</type>               <name>maxPerStageDescriptorUpdateAfterBindSampledImages</name></member>
+            <member><type>uint32_t</type>               <name>maxPerStageDescriptorUpdateAfterBindStorageImages</name></member>
+            <member><type>uint32_t</type>               <name>maxPerStageDescriptorUpdateAfterBindInputAttachments</name></member>
+            <member><type>uint32_t</type>               <name>maxPerStageUpdateAfterBindResources</name></member>
+            <member><type>uint32_t</type>               <name>maxDescriptorSetUpdateAfterBindSamplers</name></member>
+            <member><type>uint32_t</type>               <name>maxDescriptorSetUpdateAfterBindUniformBuffers</name></member>
+            <member><type>uint32_t</type>               <name>maxDescriptorSetUpdateAfterBindUniformBuffersDynamic</name></member>
+            <member><type>uint32_t</type>               <name>maxDescriptorSetUpdateAfterBindStorageBuffers</name></member>
+            <member><type>uint32_t</type>               <name>maxDescriptorSetUpdateAfterBindStorageBuffersDynamic</name></member>
+            <member><type>uint32_t</type>               <name>maxDescriptorSetUpdateAfterBindSampledImages</name></member>
+            <member><type>uint32_t</type>               <name>maxDescriptorSetUpdateAfterBindStorageImages</name></member>
+            <member><type>uint32_t</type>               <name>maxDescriptorSetUpdateAfterBindInputAttachments</name></member>
+        </type>
+        <type category="struct" name="VkDescriptorSetLayoutBindingFlagsCreateInfoEXT" structextends="VkDescriptorSetLayoutCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*            <name>pNext</name></member>
+            <member optional="true"><type>uint32_t</type>               <name>bindingCount</name></member>
+            <member len="bindingCount" optional="true">const <type>VkDescriptorBindingFlagsEXT</type>* <name>pBindingFlags</name></member>
+        </type>
+        <type category="struct" name="VkDescriptorSetVariableDescriptorCountAllocateInfoEXT" structextends="VkDescriptorSetAllocateInfo">
+            <member values="VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO_EXT"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*            <name>pNext</name></member>
+            <member optional="true"><type>uint32_t</type>               <name>descriptorSetCount</name></member>
+            <member len="descriptorSetCount">const <type>uint32_t</type>* <name>pDescriptorCounts</name></member>
+        </type>
+        <type category="struct" name="VkDescriptorSetVariableDescriptorCountLayoutSupportEXT" structextends="VkDescriptorSetLayoutSupport" returnedonly="true">
+            <member values="VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT_EXT"><type>VkStructureType</type> <name>sType</name></member>
+            <member><type>void</type>*            <name>pNext</name></member>
+            <member><type>uint32_t</type>         <name>maxVariableDescriptorCount</name></member>
+        </type>
+        <type category="struct" name="VkAttachmentDescription2KHR">
+            <member values="VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_2_KHR"><type>VkStructureType</type> <name>sType</name></member>
+            <member noautovalidity="true">const <type>void</type>*            <name>pNext</name></member>
+            <member optional="true"><type>VkAttachmentDescriptionFlags</type> <name>flags</name></member>
+            <member><type>VkFormat</type>                                     <name>format</name></member>
+            <member><type>VkSampleCountFlagBits</type>                        <name>samples</name></member>
+            <member><type>VkAttachmentLoadOp</type>                           <name>loadOp</name><comment>Load operation for color or depth data</comment></member>
+            <member><type>VkAttachmentStoreOp</type>                          <name>storeOp</name><comment>Store operation for color or depth data</comment></member>
+            <member><type>VkAttachmentLoadOp</type>                           <name>stencilLoadOp</name><comment>Load operation for stencil data</comment></member>
+            <member><type>VkAttachmentStoreOp</type>                          <name>stencilStoreOp</name><comment>Store operation for stencil data</comment></member>
+            <member><type>VkImageLayout</type>                                <name>initialLayout</name></member>
+            <member><type>VkImageLayout</type>                                <name>finalLayout</name></member>
+        </type>
+        <type category="struct" name="VkAttachmentReference2KHR">
+            <member values="VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2_KHR"><type>VkStructureType</type> <name>sType</name></member>
+            <member noautovalidity="true">const <type>void</type>* <name>pNext</name></member>
+            <member><type>uint32_t</type>                          <name>attachment</name></member>
+            <member><type>VkImageLayout</type>                     <name>layout</name></member>
+            <member noautovalidity="true"><type>VkImageAspectFlags</type> <name>aspectMask</name></member>
+        </type>
+        <type category="struct" name="VkSubpassDescription2KHR">
+            <member values="VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_2_KHR"><type>VkStructureType</type> <name>sType</name></member>
+            <member noautovalidity="true">const <type>void</type>*                           <name>pNext</name></member>
+            <member optional="true"><type>VkSubpassDescriptionFlags</type>                   <name>flags</name></member>
+            <member><type>VkPipelineBindPoint</type>                                         <name>pipelineBindPoint</name></member>
+            <member><type>uint32_t</type>                                                    <name>viewMask</name></member>
+            <member optional="true"><type>uint32_t</type>                                    <name>inputAttachmentCount</name></member>
+            <member len="inputAttachmentCount">const <type>VkAttachmentReference2KHR</type>* <name>pInputAttachments</name></member>
+            <member optional="true"><type>uint32_t</type>                                    <name>colorAttachmentCount</name></member>
+            <member len="colorAttachmentCount">const <type>VkAttachmentReference2KHR</type>* <name>pColorAttachments</name></member>
+            <member optional="true" len="colorAttachmentCount">const <type>VkAttachmentReference2KHR</type>* <name>pResolveAttachments</name></member>
+            <member optional="true">const <type>VkAttachmentReference2KHR</type>*            <name>pDepthStencilAttachment</name></member>
+            <member optional="true"><type>uint32_t</type>                                    <name>preserveAttachmentCount</name></member>
+            <member len="preserveAttachmentCount">const <type>uint32_t</type>*               <name>pPreserveAttachments</name></member>
+        </type>
+        <type category="struct" name="VkSubpassDependency2KHR">
+            <member values="VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2_KHR"><type>VkStructureType</type> <name>sType</name></member>
+            <member noautovalidity="true">const <type>void</type>* <name>pNext</name></member>
+            <member><type>uint32_t</type>                          <name>srcSubpass</name></member>
+            <member><type>uint32_t</type>                          <name>dstSubpass</name></member>
+            <member><type>VkPipelineStageFlags</type>              <name>srcStageMask</name></member>
+            <member><type>VkPipelineStageFlags</type>              <name>dstStageMask</name></member>
+            <member optional="true"><type>VkAccessFlags</type>     <name>srcAccessMask</name></member>
+            <member optional="true"><type>VkAccessFlags</type>     <name>dstAccessMask</name></member>
+            <member optional="true"><type>VkDependencyFlags</type> <name>dependencyFlags</name></member>
+            <member optional="true"><type>int32_t</type>           <name>viewOffset</name></member>
+        </type>
+        <type category="struct" name="VkRenderPassCreateInfo2KHR">
+            <member values="VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO_2_KHR"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                                              <name>pNext</name></member>
+            <member optional="true"><type>VkRenderPassCreateFlags</type>                  <name>flags</name></member>
+            <member optional="true"><type>uint32_t</type>                                 <name>attachmentCount</name></member>
+            <member len="attachmentCount">const <type>VkAttachmentDescription2KHR</type>* <name>pAttachments</name></member>
+            <member><type>uint32_t</type>                                                 <name>subpassCount</name></member>
+            <member len="subpassCount">const <type>VkSubpassDescription2KHR</type>*       <name>pSubpasses</name></member>
+            <member optional="true"><type>uint32_t</type>                                 <name>dependencyCount</name></member>
+            <member len="dependencyCount">const <type>VkSubpassDependency2KHR</type>*     <name>pDependencies</name></member>
+            <member optional="true"><type>uint32_t</type>                                 <name>correlatedViewMaskCount</name></member>
+            <member len="correlatedViewMaskCount">const <type>uint32_t</type>*            <name>pCorrelatedViewMasks</name></member>
+        </type>
+        <type category="struct" name="VkSubpassBeginInfoKHR">
+            <member values="VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO_KHR"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*            <name>pNext</name></member>
+            <member><type>VkSubpassContents</type>      <name>contents</name></member>
+        </type>
+        <type category="struct" name="VkSubpassEndInfoKHR">
+            <member values="VK_STRUCTURE_TYPE_SUBPASS_END_INFO_KHR"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*            <name>pNext</name></member>
+        </type>
+        <type category="struct" name="VkPhysicalDeviceTimelineSemaphoreFeaturesKHR" structextends="VkPhysicalDeviceFeatures2,VkDeviceCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES_KHR"><type>VkStructureType</type> <name>sType</name></member>
+            <member><type>void</type>*                  <name>pNext</name></member>
+            <member><type>VkBool32</type>               <name>timelineSemaphore</name></member>
+        </type>
+        <type category="struct" name="VkPhysicalDeviceTimelineSemaphorePropertiesKHR" structextends="VkPhysicalDeviceProperties2" returnedonly="true">
+            <member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES_KHR"><type>VkStructureType</type> <name>sType</name></member>
+            <member><type>void</type>*                  <name>pNext</name></member>
+            <member><type>uint64_t</type>               <name>maxTimelineSemaphoreValueDifference</name></member>
+        </type>
+        <type category="struct" name="VkSemaphoreTypeCreateInfoKHR" structextends="VkSemaphoreCreateInfo,VkPhysicalDeviceExternalSemaphoreInfo">
+            <member values="VK_STRUCTURE_TYPE_SEMAPHORE_TYPE_CREATE_INFO_KHR"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*            <name>pNext</name></member>
+            <member><type>VkSemaphoreTypeKHR</type>     <name>semaphoreType</name></member>
+            <member><type>uint64_t</type>               <name>initialValue</name></member>
+        </type>
+        <type category="struct" name="VkTimelineSemaphoreSubmitInfoKHR" structextends="VkSubmitInfo,VkBindSparseInfo">
+            <member values="VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO_KHR"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                      <name>pNext</name></member>
+            <member optional="true"><type>uint32_t</type>         <name>waitSemaphoreValueCount</name></member>
+            <member optional="true" len="waitSemaphoreValueCount">const <type>uint64_t</type>* <name>pWaitSemaphoreValues</name></member>
+            <member optional="true"><type>uint32_t</type>         <name>signalSemaphoreValueCount</name></member>
+            <member optional="true" len="signalSemaphoreValueCount">const <type>uint64_t</type>* <name>pSignalSemaphoreValues</name></member>
+        </type>
+        <type category="struct" name="VkSemaphoreWaitInfoKHR">
+            <member values="VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO_KHR"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*            <name>pNext</name></member>
+            <member optional="true"><type>VkSemaphoreWaitFlagsKHR</type> <name>flags</name></member>
+            <member><type>uint32_t</type>               <name>semaphoreCount</name></member>
+            <member len="semaphoreCount">const <type>VkSemaphore</type>* <name>pSemaphores</name></member>
+            <member len="semaphoreCount">const <type>uint64_t</type>*    <name>pValues</name></member>
+        </type>
+        <type category="struct" name="VkSemaphoreSignalInfoKHR">
+            <member values="VK_STRUCTURE_TYPE_SEMAPHORE_SIGNAL_INFO_KHR"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*            <name>pNext</name></member>
+            <member><type>VkSemaphore</type>            <name>semaphore</name></member>
+            <member><type>uint64_t</type>               <name>value</name></member>
+        </type>
+        <type category="struct" name="VkVertexInputBindingDivisorDescriptionEXT">
+            <member><type>uint32_t</type>          <name>binding</name></member>
+            <member><type>uint32_t</type>          <name>divisor</name></member>
+        </type>
+        <type category="struct" name="VkPipelineVertexInputDivisorStateCreateInfoEXT" structextends="VkPipelineVertexInputStateCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_EXT"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                         <name>pNext</name></member>
+            <member><type>uint32_t</type>                            <name>vertexBindingDivisorCount</name></member>
+            <member len="vertexBindingDivisorCount">const <type>VkVertexInputBindingDivisorDescriptionEXT</type>*      <name>pVertexBindingDivisors</name></member>
+        </type>
+        <type category="struct" name="VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT" returnedonly="true" structextends="VkPhysicalDeviceProperties2">
+            <member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_EXT"><type>VkStructureType</type> <name>sType</name></member>
+            <member><type>void</type>*                  <name>pNext</name></member>
+            <member><type>uint32_t</type>               <name>maxVertexAttribDivisor</name><comment>max value of vertex attribute divisor</comment></member>
+        </type>
+        <type category="struct" name="VkPhysicalDevicePCIBusInfoPropertiesEXT" structextends="VkPhysicalDeviceProperties2" returnedonly="true">
+            <member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PCI_BUS_INFO_PROPERTIES_EXT"><type>VkStructureType</type> <name>sType</name></member>
+            <member><type>void</type>*                  <name>pNext</name></member>
+            <member><type>uint32_t</type>               <name>pciDomain</name></member>
+            <member><type>uint32_t</type>               <name>pciBus</name></member>
+            <member><type>uint32_t</type>               <name>pciDevice</name></member>
+            <member><type>uint32_t</type>               <name>pciFunction</name></member>
+        </type>
+        <type category="struct" name="VkImportAndroidHardwareBufferInfoANDROID" structextends="VkMemoryAllocateInfo">
+            <member values="VK_STRUCTURE_TYPE_IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                        <name>pNext</name></member>
+            <member>struct <type>AHardwareBuffer</type>*            <name>buffer</name></member>
+        </type>
+        <type category="struct" name="VkAndroidHardwareBufferUsageANDROID" structextends="VkImageFormatProperties2" returnedonly="true">
+            <member values="VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_USAGE_ANDROID"><type>VkStructureType</type> <name>sType</name></member>
+            <member><type>void</type>*                              <name>pNext</name></member>
+            <member><type>uint64_t</type>                           <name>androidHardwareBufferUsage</name></member>
+        </type>
+        <type category="struct" name="VkAndroidHardwareBufferPropertiesANDROID" returnedonly="true">
+            <member values="VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID"><type>VkStructureType</type> <name>sType</name></member>
+            <member><type>void</type>*                              <name>pNext</name></member>
+            <member><type>VkDeviceSize</type>                       <name>allocationSize</name></member>
+            <member><type>uint32_t</type>                           <name>memoryTypeBits</name></member>
+        </type>
+        <type category="struct" name="VkMemoryGetAndroidHardwareBufferInfoANDROID">
+            <member values="VK_STRUCTURE_TYPE_MEMORY_GET_ANDROID_HARDWARE_BUFFER_INFO_ANDROID"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                        <name>pNext</name></member>
+            <member><type>VkDeviceMemory</type>                     <name>memory</name></member>
+        </type>
+        <type category="struct" name="VkAndroidHardwareBufferFormatPropertiesANDROID" structextends="VkAndroidHardwareBufferPropertiesANDROID" returnedonly="true">
+            <member values="VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID"><type>VkStructureType</type> <name>sType</name></member>
+            <member><type>void</type>*                              <name>pNext</name></member>
+            <member><type>VkFormat</type>                           <name>format</name></member>
+            <member><type>uint64_t</type>                           <name>externalFormat</name></member>
+            <member><type>VkFormatFeatureFlags</type>               <name>formatFeatures</name></member>
+            <member><type>VkComponentMapping</type>                 <name>samplerYcbcrConversionComponents</name></member>
+            <member><type>VkSamplerYcbcrModelConversion</type>      <name>suggestedYcbcrModel</name></member>
+            <member><type>VkSamplerYcbcrRange</type>                <name>suggestedYcbcrRange</name></member>
+            <member><type>VkChromaLocation</type>                   <name>suggestedXChromaOffset</name></member>
+            <member><type>VkChromaLocation</type>                   <name>suggestedYChromaOffset</name></member>
+        </type>
+        <type category="struct" name="VkCommandBufferInheritanceConditionalRenderingInfoEXT" structextends="VkCommandBufferInheritanceInfo">
+            <member values="VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_CONDITIONAL_RENDERING_INFO_EXT"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                         <name>pNext</name></member>
+            <member><type>VkBool32</type>                            <name>conditionalRenderingEnable</name><comment>Whether this secondary command buffer may be executed during an active conditional rendering</comment></member>
+        </type>
+        <type category="struct" name="VkExternalFormatANDROID" structextends="VkImageCreateInfo,VkSamplerYcbcrConversionCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID"><type>VkStructureType</type> <name>sType</name></member>
+            <member><type>void</type>*                              <name>pNext</name></member>
+            <member><type>uint64_t</type>                           <name>externalFormat</name></member>
+        </type>
+        <type category="struct" name="VkPhysicalDevice8BitStorageFeaturesKHR" structextends="VkPhysicalDeviceFeatures2,VkDeviceCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES_KHR"><type>VkStructureType</type> <name>sType</name></member>
+            <member noautovalidity="true"><type>void</type>*      <name>pNext</name></member>
+            <member><type>VkBool32</type>                         <name>storageBuffer8BitAccess</name><comment>8-bit integer variables supported in StorageBuffer</comment></member>
+            <member><type>VkBool32</type>                         <name>uniformAndStorageBuffer8BitAccess</name><comment>8-bit integer variables supported in StorageBuffer and Uniform</comment></member>
+            <member><type>VkBool32</type>                         <name>storagePushConstant8</name><comment>8-bit integer variables supported in PushConstant</comment></member>
+        </type>
+        <type category="struct" name="VkPhysicalDeviceConditionalRenderingFeaturesEXT" structextends="VkPhysicalDeviceFeatures2,VkDeviceCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONDITIONAL_RENDERING_FEATURES_EXT"><type>VkStructureType</type> <name>sType</name></member>
+            <member noautovalidity="true"><type>void</type>*        <name>pNext</name></member>
+            <member><type>VkBool32</type>                           <name>conditionalRendering</name></member>
+            <member><type>VkBool32</type>                           <name>inheritedConditionalRendering</name></member>
+        </type>
+        <type category="struct" name="VkPhysicalDeviceVulkanMemoryModelFeaturesKHR" structextends="VkPhysicalDeviceFeatures2,VkDeviceCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES_KHR"><type>VkStructureType</type> <name>sType</name></member>
+            <member noautovalidity="true"><type>void</type>*      <name>pNext</name></member>
+            <member><type>VkBool32</type>                         <name>vulkanMemoryModel</name></member>
+            <member><type>VkBool32</type>                         <name>vulkanMemoryModelDeviceScope</name></member>
+            <member><type>VkBool32</type>                         <name>vulkanMemoryModelAvailabilityVisibilityChains</name></member>
+        </type>
+        <type category="struct" name="VkPhysicalDeviceShaderAtomicInt64FeaturesKHR" structextends="VkPhysicalDeviceFeatures2,VkDeviceCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES_KHR"><type>VkStructureType</type> <name>sType</name></member>
+            <member><type>void</type>*                               <name>pNext</name></member>
+            <member><type>VkBool32</type>                            <name>shaderBufferInt64Atomics</name></member>
+            <member><type>VkBool32</type>                            <name>shaderSharedInt64Atomics</name></member>
+        </type>
+        <type category="struct" name="VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT" structextends="VkPhysicalDeviceFeatures2,VkDeviceCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_EXT"><type>VkStructureType</type> <name>sType</name></member>
+            <member noautovalidity="true"><type>void</type>*        <name>pNext</name></member>
+            <member><type>VkBool32</type>                           <name>vertexAttributeInstanceRateDivisor</name></member>
+            <member><type>VkBool32</type>                           <name>vertexAttributeInstanceRateZeroDivisor</name></member>
+        </type>
+        <type category="struct" name="VkQueueFamilyCheckpointPropertiesNV" structextends="VkQueueFamilyProperties2" returnedonly="true">
+            <member values="VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_NV"><type>VkStructureType</type> <name>sType</name></member>
+            <member><type>void</type>*           <name>pNext</name></member>
+            <member><type>VkPipelineStageFlags</type> <name>checkpointExecutionStageMask</name></member>
+        </type>
+        <type category="struct" name="VkCheckpointDataNV" returnedonly="true">
+            <member values="VK_STRUCTURE_TYPE_CHECKPOINT_DATA_NV"><type>VkStructureType</type> <name>sType</name></member>
+            <member><type>void</type>*                  <name>pNext</name></member>
+            <member><type>VkPipelineStageFlagBits</type>   <name>stage</name></member>
+            <member noautovalidity="true"><type>void</type>* <name>pCheckpointMarker</name></member>
+        </type>
+        <type category="struct" name="VkPhysicalDeviceDepthStencilResolvePropertiesKHR" structextends="VkPhysicalDeviceProperties2" returnedonly="true">
+            <member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES_KHR"><type>VkStructureType</type> <name>sType</name></member>
+            <member><type>void</type>*                                <name>pNext</name></member>
+            <member><type>VkResolveModeFlagsKHR</type>                <name>supportedDepthResolveModes</name><comment>supported depth resolve modes</comment></member>
+            <member><type>VkResolveModeFlagsKHR</type>                <name>supportedStencilResolveModes</name><comment>supported stencil resolve modes</comment></member>
+            <member><type>VkBool32</type>                             <name>independentResolveNone</name><comment>depth and stencil resolve modes can be set independently if one of them is none</comment></member>
+            <member><type>VkBool32</type>                             <name>independentResolve</name><comment>depth and stencil resolve modes can be set independently</comment></member>
+        </type>
+        <type category="struct" name="VkSubpassDescriptionDepthStencilResolveKHR" structextends="VkSubpassDescription2KHR">
+            <member values="VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE_KHR"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                                              <name>pNext</name></member>
+            <member><type>VkResolveModeFlagBitsKHR</type>                                 <name>depthResolveMode</name><comment>depth resolve mode</comment></member>
+            <member><type>VkResolveModeFlagBitsKHR</type>                                 <name>stencilResolveMode</name><comment>stencil resolve mode</comment></member>
+            <member optional="true">const <type>VkAttachmentReference2KHR</type>*         <name>pDepthStencilResolveAttachment</name><comment>depth/stencil resolve attachment</comment></member>
+        </type>
+        <type category="struct" name="VkImageViewASTCDecodeModeEXT" structextends="VkImageViewCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_IMAGE_VIEW_ASTC_DECODE_MODE_EXT"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                      <name>pNext</name></member>
+            <member><type>VkFormat</type>                         <name>decodeMode</name></member>
+        </type>
+        <type category="struct" name="VkPhysicalDeviceASTCDecodeFeaturesEXT" structextends="VkPhysicalDeviceFeatures2,VkDeviceCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ASTC_DECODE_FEATURES_EXT"><type>VkStructureType</type> <name>sType</name></member>
+            <member noautovalidity="true"><type>void</type>*      <name>pNext</name></member>
+            <member><type>VkBool32</type>                         <name>decodeModeSharedExponent</name></member>
+        </type>
+        <type category="struct" name="VkPhysicalDeviceTransformFeedbackFeaturesEXT" structextends="VkPhysicalDeviceFeatures2,VkDeviceCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_FEATURES_EXT"><type>VkStructureType</type> <name>sType</name></member>
+            <member><type>void</type>*                  <name>pNext</name></member>
+            <member><type>VkBool32</type>               <name>transformFeedback</name></member>
+            <member><type>VkBool32</type>               <name>geometryStreams</name></member>
+        </type>
+        <type category="struct" name="VkPhysicalDeviceTransformFeedbackPropertiesEXT" structextends="VkPhysicalDeviceProperties2" returnedonly="true">
+            <member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_PROPERTIES_EXT"><type>VkStructureType</type> <name>sType</name></member>
+            <member><type>void</type>*                  <name>pNext</name></member>
+            <member><type>uint32_t</type>               <name>maxTransformFeedbackStreams</name></member>
+            <member><type>uint32_t</type>               <name>maxTransformFeedbackBuffers</name></member>
+            <member><type>VkDeviceSize</type>           <name>maxTransformFeedbackBufferSize</name></member>
+            <member><type>uint32_t</type>               <name>maxTransformFeedbackStreamDataSize</name></member>
+            <member><type>uint32_t</type>               <name>maxTransformFeedbackBufferDataSize</name></member>
+            <member><type>uint32_t</type>               <name>maxTransformFeedbackBufferDataStride</name></member>
+            <member><type>VkBool32</type>               <name>transformFeedbackQueries</name></member>
+            <member><type>VkBool32</type>               <name>transformFeedbackStreamsLinesTriangles</name></member>
+            <member><type>VkBool32</type>               <name>transformFeedbackRasterizationStreamSelect</name></member>
+            <member><type>VkBool32</type>               <name>transformFeedbackDraw</name></member>
+        </type>
+        <type category="struct" name="VkPipelineRasterizationStateStreamCreateInfoEXT" structextends="VkPipelineRasterizationStateCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_STREAM_CREATE_INFO_EXT"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                                                                      <name>pNext</name></member>
+            <member optional="true"><type>VkPipelineRasterizationStateStreamCreateFlagsEXT</type>                 <name>flags</name></member>
+            <member><type>uint32_t</type>                                                                         <name>rasterizationStream</name></member>
+        </type>
+        <type category="struct" name="VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV" structextends="VkPhysicalDeviceFeatures2,VkDeviceCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_REPRESENTATIVE_FRAGMENT_TEST_FEATURES_NV"><type>VkStructureType</type><name>sType</name></member>
+            <member noautovalidity="true"><type>void</type>*    <name>pNext</name></member>
+            <member><type>VkBool32</type>                       <name>representativeFragmentTest</name></member>
+        </type>
+        <type category="struct" name="VkPipelineRepresentativeFragmentTestStateCreateInfoNV" structextends="VkGraphicsPipelineCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_PIPELINE_REPRESENTATIVE_FRAGMENT_TEST_STATE_CREATE_INFO_NV"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*    <name>pNext</name></member>
+            <member><type>VkBool32</type>       <name>representativeFragmentTestEnable</name></member>
+        </type>
+        <type category="struct" name="VkPhysicalDeviceExclusiveScissorFeaturesNV" structextends="VkPhysicalDeviceFeatures2,VkDeviceCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXCLUSIVE_SCISSOR_FEATURES_NV"><type>VkStructureType</type> <name>sType</name></member>
+            <member><type>void</type>*                            <name>pNext</name></member>
+            <member><type>VkBool32</type>                         <name>exclusiveScissor</name></member>
+        </type>
+        <type category="struct" name="VkPipelineViewportExclusiveScissorStateCreateInfoNV" structextends="VkPipelineViewportStateCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_EXCLUSIVE_SCISSOR_STATE_CREATE_INFO_NV"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                                                    <name>pNext</name></member>
+            <member optional="true"><type>uint32_t</type>                                       <name>exclusiveScissorCount</name></member>
+            <member len="exclusiveScissorCount" optional="true">const <type>VkRect2D</type>*    <name>pExclusiveScissors</name></member>
+        </type>
+        <type category="struct" name="VkPhysicalDeviceCornerSampledImageFeaturesNV" structextends="VkPhysicalDeviceFeatures2,VkDeviceCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CORNER_SAMPLED_IMAGE_FEATURES_NV"><type>VkStructureType</type> <name>sType</name></member>
+            <member><type>void</type>*                              <name>pNext</name></member>
+            <member><type>VkBool32</type>                           <name>cornerSampledImage</name></member>
+        </type>
+        <type category="struct" name="VkPhysicalDeviceComputeShaderDerivativesFeaturesNV" structextends="VkPhysicalDeviceFeatures2,VkDeviceCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_FEATURES_NV"><type>VkStructureType</type> <name>sType</name></member>
+            <member><type>void</type>*                            <name>pNext</name></member>
+            <member><type>VkBool32</type>                         <name>computeDerivativeGroupQuads</name></member>
+            <member><type>VkBool32</type>                         <name>computeDerivativeGroupLinear</name></member>
+        </type>
+        <type category="struct" name="VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV" structextends="VkPhysicalDeviceFeatures2,VkDeviceCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_FEATURES_NV"><type>VkStructureType</type> <name>sType</name></member>
+            <member><type>void</type>*                            <name>pNext</name></member>
+            <member><type>VkBool32</type>                         <name>fragmentShaderBarycentric</name></member>
+        </type>
+        <type category="struct" name="VkPhysicalDeviceShaderImageFootprintFeaturesNV" structextends="VkPhysicalDeviceFeatures2,VkDeviceCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_IMAGE_FOOTPRINT_FEATURES_NV"><type>VkStructureType</type> <name>sType</name></member>
+            <member><type>void</type>*                              <name>pNext</name></member>
+            <member><type>VkBool32</type>                           <name>imageFootprint</name></member>
+        </type>
+        <type category="struct" name="VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV" structextends="VkPhysicalDeviceFeatures2,VkDeviceCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEDICATED_ALLOCATION_IMAGE_ALIASING_FEATURES_NV"><type>VkStructureType</type> <name>sType</name></member>
+            <member><type>void</type>*                            <name>pNext</name></member>
+            <member><type>VkBool32</type>                         <name>dedicatedAllocationImageAliasing</name></member>
+        </type>
+        <type category="struct" name="VkShadingRatePaletteNV">
+            <member><type>uint32_t</type>                                                               <name>shadingRatePaletteEntryCount</name></member>
+            <member len="shadingRatePaletteEntryCount">const <type>VkShadingRatePaletteEntryNV</type>*  <name>pShadingRatePaletteEntries</name></member>
+        </type>
+        <type category="struct" name="VkPipelineViewportShadingRateImageStateCreateInfoNV" structextends="VkPipelineViewportStateCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SHADING_RATE_IMAGE_STATE_CREATE_INFO_NV"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                                                            <name>pNext</name></member>
+            <member><type>VkBool32</type>                                                               <name>shadingRateImageEnable</name></member>
+            <member optional="true"><type>uint32_t</type>                                                               <name>viewportCount</name></member>
+            <member len="viewportCount" optional="true">const <type>VkShadingRatePaletteNV</type>*      <name>pShadingRatePalettes</name></member>
+        </type>
+        <type category="struct" name="VkPhysicalDeviceShadingRateImageFeaturesNV" structextends="VkPhysicalDeviceFeatures2,VkDeviceCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_FEATURES_NV"><type>VkStructureType</type> <name>sType</name></member>
+            <member><type>void</type>*                               <name>pNext</name></member>
+            <member><type>VkBool32</type>                            <name>shadingRateImage</name></member>
+            <member><type>VkBool32</type>                            <name>shadingRateCoarseSampleOrder</name></member>
+        </type>
+        <type category="struct" name="VkPhysicalDeviceShadingRateImagePropertiesNV" structextends="VkPhysicalDeviceProperties2" returnedonly="true">
+            <member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_PROPERTIES_NV"><type>VkStructureType</type> <name>sType</name></member>
+            <member><type>void</type>*                               <name>pNext</name></member>
+            <member><type>VkExtent2D</type>                          <name>shadingRateTexelSize</name></member>
+            <member><type>uint32_t</type>                            <name>shadingRatePaletteSize</name></member>
+            <member><type>uint32_t</type>                            <name>shadingRateMaxCoarseSamples</name></member>
+        </type>
+        <type category="struct" name="VkCoarseSampleLocationNV">
+            <member><type>uint32_t</type>                            <name>pixelX</name></member>
+            <member><type>uint32_t</type>                            <name>pixelY</name></member>
+            <member><type>uint32_t</type>                            <name>sample</name></member>
+        </type>
+        <type category="struct" name="VkCoarseSampleOrderCustomNV">
+            <member><type>VkShadingRatePaletteEntryNV</type>         <name>shadingRate</name></member>
+            <member><type>uint32_t</type>                            <name>sampleCount</name></member>
+            <member><type>uint32_t</type>                            <name>sampleLocationCount</name></member>
+            <member len="sampleLocationCount">const <type>VkCoarseSampleLocationNV</type>* <name>pSampleLocations</name></member>
+        </type>
+        <type category="struct" name="VkPipelineViewportCoarseSampleOrderStateCreateInfoNV" structextends="VkPipelineViewportStateCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_COARSE_SAMPLE_ORDER_STATE_CREATE_INFO_NV"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                                                            <name>pNext</name></member>
+            <member><type>VkCoarseSampleOrderTypeNV</type>                                              <name>sampleOrderType</name></member>
+            <member optional="true"><type>uint32_t</type>                                               <name>customSampleOrderCount</name></member>
+            <member len="customSampleOrderCount">const <type>VkCoarseSampleOrderCustomNV</type>*        <name>pCustomSampleOrders</name></member>
+        </type>
+        <type category="struct" name="VkPhysicalDeviceMeshShaderFeaturesNV" structextends="VkPhysicalDeviceFeatures2,VkDeviceCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_FEATURES_NV"><type>VkStructureType</type> <name>sType</name></member>
+            <member><type>void</type>*                               <name>pNext</name></member>
+            <member><type>VkBool32</type>                            <name>taskShader</name></member>
+            <member><type>VkBool32</type>                            <name>meshShader</name></member>
+        </type>
+        <type category="struct" name="VkPhysicalDeviceMeshShaderPropertiesNV" returnedonly="true" structextends="VkPhysicalDeviceProperties2">
+            <member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_PROPERTIES_NV"><type>VkStructureType</type> <name>sType</name></member>
+            <member><type>void</type>*                               <name>pNext</name></member>
+            <member><type>uint32_t</type>                            <name>maxDrawMeshTasksCount</name></member>
+            <member><type>uint32_t</type>                            <name>maxTaskWorkGroupInvocations</name></member>
+            <member><type>uint32_t</type>                            <name>maxTaskWorkGroupSize</name>[3]</member>
+            <member><type>uint32_t</type>                            <name>maxTaskTotalMemorySize</name></member>
+            <member><type>uint32_t</type>                            <name>maxTaskOutputCount</name></member>
+            <member><type>uint32_t</type>                            <name>maxMeshWorkGroupInvocations</name></member>
+            <member><type>uint32_t</type>                            <name>maxMeshWorkGroupSize</name>[3]</member>
+            <member><type>uint32_t</type>                            <name>maxMeshTotalMemorySize</name></member>
+            <member><type>uint32_t</type>                            <name>maxMeshOutputVertices</name></member>
+            <member><type>uint32_t</type>                            <name>maxMeshOutputPrimitives</name></member>
+            <member><type>uint32_t</type>                            <name>maxMeshMultiviewViewCount</name></member>
+            <member><type>uint32_t</type>                            <name>meshOutputPerVertexGranularity</name></member>
+            <member><type>uint32_t</type>                            <name>meshOutputPerPrimitiveGranularity</name></member>
+        </type>
+        <type category="struct" name="VkDrawMeshTasksIndirectCommandNV">
+            <member><type>uint32_t</type>               <name>taskCount</name></member>
+            <member><type>uint32_t</type>               <name>firstTask</name></member>
+        </type>
+        <type category="struct" name="VkRayTracingShaderGroupCreateInfoNV">
+            <member values="VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*            <name>pNext</name></member>
+            <member><type>VkRayTracingShaderGroupTypeNV</type> <name>type</name></member>
+            <member><type>uint32_t</type>               <name>generalShader</name></member>
+            <member><type>uint32_t</type>               <name>closestHitShader</name></member>
+            <member><type>uint32_t</type>               <name>anyHitShader</name></member>
+            <member><type>uint32_t</type>               <name>intersectionShader</name></member>
+        </type>
+        <type category="struct" name="VkRayTracingPipelineCreateInfoNV">
+            <member values="VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_NV"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*            <name>pNext</name></member>
+            <member optional="true"><type>VkPipelineCreateFlags</type>  <name>flags</name><comment>Pipeline creation flags</comment></member>
+            <member><type>uint32_t</type>               <name>stageCount</name></member>
+            <member len="stageCount">const <type>VkPipelineShaderStageCreateInfo</type>* <name>pStages</name><comment>One entry for each active shader stage</comment></member>
+            <member><type>uint32_t</type>               <name>groupCount</name></member>
+            <member len="groupCount">const <type>VkRayTracingShaderGroupCreateInfoNV</type>* <name>pGroups</name></member>
+            <member><type>uint32_t</type>               <name>maxRecursionDepth</name></member>
+            <member><type>VkPipelineLayout</type>       <name>layout</name><comment>Interface layout of the pipeline</comment></member>
+            <member noautovalidity="true" optional="true"><type>VkPipeline</type>      <name>basePipelineHandle</name><comment>If VK_PIPELINE_CREATE_DERIVATIVE_BIT is set and this value is nonzero, it specifies the handle of the base pipeline this is a derivative of</comment></member>
+            <member><type>int32_t</type>                <name>basePipelineIndex</name><comment>If VK_PIPELINE_CREATE_DERIVATIVE_BIT is set and this value is not -1, it specifies an index into pCreateInfos of the base pipeline this is a derivative of</comment></member>
+        </type>
+        <type category="struct" name="VkGeometryTrianglesNV">
+            <member values="VK_STRUCTURE_TYPE_GEOMETRY_TRIANGLES_NV"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                <name>pNext</name></member>
+            <member optional="true"><type>VkBuffer</type>   <name>vertexData</name></member>
+            <member><type>VkDeviceSize</type>               <name>vertexOffset</name></member>
+            <member><type>uint32_t</type>                   <name>vertexCount</name></member>
+            <member><type>VkDeviceSize</type>               <name>vertexStride</name></member>
+            <member><type>VkFormat</type>                   <name>vertexFormat</name></member>
+            <member optional="true"><type>VkBuffer</type>   <name>indexData</name></member>
+            <member><type>VkDeviceSize</type>               <name>indexOffset</name></member>
+            <member><type>uint32_t</type>                   <name>indexCount</name></member>
+            <member><type>VkIndexType</type>                <name>indexType</name></member>
+            <member optional="true"><type>VkBuffer</type>   <name>transformData</name><comment>Optional reference to array of floats representing a 3x4 row major affine transformation matrix.</comment></member>
+            <member><type>VkDeviceSize</type>               <name>transformOffset</name></member>
+        </type>
+        <type category="struct" name="VkGeometryAABBNV">
+            <member values="VK_STRUCTURE_TYPE_GEOMETRY_AABB_NV"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                <name>pNext</name></member>
+            <member optional="true"><type>VkBuffer</type>   <name>aabbData</name></member>
+            <member><type>uint32_t</type>                   <name>numAABBs</name></member>
+            <member><type>uint32_t</type>                   <name>stride</name><comment>Stride in bytes between AABBs</comment></member>
+            <member><type>VkDeviceSize</type>               <name>offset</name><comment>Offset in bytes of the first AABB in aabbData</comment></member>
+        </type>
+        <type category="struct" name="VkGeometryDataNV">
+            <member><type>VkGeometryTrianglesNV</type>                  <name>triangles</name></member>
+            <member><type>VkGeometryAABBNV</type>                       <name>aabbs</name></member>
+        </type>
+        <type category="struct" name="VkGeometryNV">
+            <member values="VK_STRUCTURE_TYPE_GEOMETRY_NV"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                        <name>pNext</name></member>
+            <member><type>VkGeometryTypeNV</type>                  <name>geometryType</name></member>
+            <member><type>VkGeometryDataNV</type>                  <name>geometry</name></member>
+            <member optional="true"><type>VkGeometryFlagsNV</type> <name>flags</name></member>
+        </type>
+        <type category="struct" name="VkAccelerationStructureInfoNV">
+            <member values="VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_INFO_NV"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                            <name>pNext</name></member>
+            <member><type>VkAccelerationStructureTypeNV</type>         <name>type</name></member>
+            <member optional="true"><type>VkBuildAccelerationStructureFlagsNV</type><name>flags</name></member>
+            <member optional="true"><type>uint32_t</type>               <name>instanceCount</name></member>
+            <member optional="true"><type>uint32_t</type>               <name>geometryCount</name></member>
+            <member len="geometryCount">const <type>VkGeometryNV</type>* <name>pGeometries</name></member>
+        </type>
+        <type category="struct" name="VkAccelerationStructureCreateInfoNV">
+            <member values="VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_NV"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                            <name>pNext</name></member>
+            <member><type>VkDeviceSize</type>                           <name>compactedSize</name></member>
+            <member><type>VkAccelerationStructureInfoNV</type>          <name>info</name></member>
+        </type>
+        <type category="struct" name="VkBindAccelerationStructureMemoryInfoNV">
+            <member values="VK_STRUCTURE_TYPE_BIND_ACCELERATION_STRUCTURE_MEMORY_INFO_NV"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                      <name>pNext</name></member>
+            <member><type>VkAccelerationStructureNV</type>        <name>accelerationStructure</name></member>
+            <member><type>VkDeviceMemory</type>                   <name>memory</name></member>
+            <member><type>VkDeviceSize</type>                     <name>memoryOffset</name></member>
+            <member optional="true"><type>uint32_t</type>         <name>deviceIndexCount</name></member>
+            <member len="deviceIndexCount">const <type>uint32_t</type>*  <name>pDeviceIndices</name></member>
+        </type>
+        <type category="struct" name="VkWriteDescriptorSetAccelerationStructureNV" structextends="VkWriteDescriptorSet">
+            <member values="VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_NV"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                      <name>pNext</name></member>
+            <member><type>uint32_t</type>                         <name>accelerationStructureCount</name></member>
+            <member len="accelerationStructureCount">const <type>VkAccelerationStructureNV</type>* <name>pAccelerationStructures</name></member>
+        </type>
+        <type category="struct" name="VkAccelerationStructureMemoryRequirementsInfoNV">
+            <member values="VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                                                          <name>pNext</name></member>
+            <member><type>VkAccelerationStructureMemoryRequirementsTypeNV</type>                      <name>type</name></member>
+            <member><type>VkAccelerationStructureNV</type>                                            <name>accelerationStructure</name></member>
+        </type>
+        <type category="struct" name="VkPhysicalDeviceRayTracingPropertiesNV" returnedonly="true" structextends="VkPhysicalDeviceProperties2">
+            <member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PROPERTIES_NV"><type>VkStructureType</type> <name>sType</name></member>
+            <member><type>void</type>*                            <name>pNext</name></member>
+            <member><type>uint32_t</type>                         <name>shaderGroupHandleSize</name></member>
+            <member><type>uint32_t</type>                         <name>maxRecursionDepth</name></member>
+            <member><type>uint32_t</type>                         <name>maxShaderGroupStride</name></member>
+            <member><type>uint32_t</type>                         <name>shaderGroupBaseAlignment</name></member>
+            <member><type>uint64_t</type>                         <name>maxGeometryCount</name></member>
+            <member><type>uint64_t</type>                         <name>maxInstanceCount</name></member>
+            <member><type>uint64_t</type>                         <name>maxTriangleCount</name></member>
+            <member><type>uint32_t</type>                         <name>maxDescriptorSetAccelerationStructures</name></member>
+        </type>
+        <type category="struct" name="VkDrmFormatModifierPropertiesListEXT" returnedonly="true" structextends="VkFormatProperties2">
+            <member values="VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT"><type>VkStructureType</type> <name>sType</name></member>
+            <member><type>void</type>* <name>pNext</name></member>
+            <member optional="true"><type>uint32_t</type> <name>drmFormatModifierCount</name></member>
+            <member optional="true,false" len="drmFormatModifierCount"><type>VkDrmFormatModifierPropertiesEXT</type>* <name>pDrmFormatModifierProperties</name></member>
+        </type>
+        <type category="struct" name="VkDrmFormatModifierPropertiesEXT" returnedonly="true">
+            <member><type>uint64_t</type> <name>drmFormatModifier</name></member>
+            <member><type>uint32_t</type> <name>drmFormatModifierPlaneCount</name></member>
+            <member><type>VkFormatFeatureFlags</type> <name>drmFormatModifierTilingFeatures</name></member>
+        </type>
+        <type category="struct" name="VkPhysicalDeviceImageDrmFormatModifierInfoEXT" structextends="VkPhysicalDeviceImageFormatInfo2">
+            <member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_DRM_FORMAT_MODIFIER_INFO_EXT"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>* <name>pNext</name></member>
+            <member><type>uint64_t</type> <name>drmFormatModifier</name></member>
+            <member><type>VkSharingMode</type> <name>sharingMode</name></member>
+            <member optional="true"><type>uint32_t</type> <name>queueFamilyIndexCount</name></member>
+            <member noautovalidity="true" len="queueFamilyIndexCount">const <type>uint32_t</type>* <name>pQueueFamilyIndices</name></member>
+        </type>
+        <type category="struct" name="VkImageDrmFormatModifierListCreateInfoEXT" structextends="VkImageCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_LIST_CREATE_INFO_EXT"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>* <name>pNext</name></member>
+            <member><type>uint32_t</type> <name>drmFormatModifierCount</name></member>
+            <member len="drmFormatModifierCount">const <type>uint64_t</type>* <name>pDrmFormatModifiers</name></member>
+        </type>
+        <type category="struct" name="VkImageDrmFormatModifierExplicitCreateInfoEXT" structextends="VkImageCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_EXPLICIT_CREATE_INFO_EXT"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>* <name>pNext</name></member>
+            <member><type>uint64_t</type> <name>drmFormatModifier</name></member>
+            <member optional="false"><type>uint32_t</type> <name>drmFormatModifierPlaneCount</name></member>
+            <member len="drmFormatModifierPlaneCount">const <type>VkSubresourceLayout</type>* <name>pPlaneLayouts</name></member>
+        </type>
+        <type category="struct" name="VkImageDrmFormatModifierPropertiesEXT" returnedonly="true">
+            <member values="VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT"><type>VkStructureType</type> <name>sType</name></member>
+            <member><type>void</type>* <name>pNext</name></member>
+            <member><type>uint64_t</type> <name>drmFormatModifier</name></member>
+        </type>
+        <type category="struct" name="VkImageStencilUsageCreateInfoEXT" structextends="VkImageCreateInfo,VkPhysicalDeviceImageFormatInfo2">
+            <member values="VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO_EXT"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>* <name>pNext</name></member>
+            <member><type>VkImageUsageFlags</type> <name>stencilUsage</name></member>
+        </type>
+        <type category="struct" name="VkDeviceMemoryOverallocationCreateInfoAMD"  structextends="VkDeviceCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_DEVICE_MEMORY_OVERALLOCATION_CREATE_INFO_AMD"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                      <name>pNext</name></member>
+            <member><type>VkMemoryOverallocationBehaviorAMD</type> <name>overallocationBehavior</name></member>
+        </type>
+        <type category="struct" name="VkPhysicalDeviceFragmentDensityMapFeaturesEXT" returnedonly="true" structextends="VkPhysicalDeviceFeatures2,VkDeviceCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_FEATURES_EXT"><type>VkStructureType</type> <name>sType</name></member>
+            <member><type>void</type>*                            <name>pNext</name></member>
+            <member><type>VkBool32</type>                         <name>fragmentDensityMap</name></member>
+            <member><type>VkBool32</type>                         <name>fragmentDensityMapDynamic</name></member>
+            <member><type>VkBool32</type>                         <name>fragmentDensityMapNonSubsampledImages</name></member>
+        </type>
+        <type category="struct" name="VkPhysicalDeviceFragmentDensityMapPropertiesEXT" returnedonly="true" structextends="VkPhysicalDeviceProperties2">
+            <member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_PROPERTIES_EXT"><type>VkStructureType</type> <name>sType</name></member>
+            <member><type>void</type>*                            <name>pNext</name></member>
+            <member><type>VkExtent2D</type>                       <name>minFragmentDensityTexelSize</name></member>
+            <member><type>VkExtent2D</type>                       <name>maxFragmentDensityTexelSize</name></member>
+            <member><type>VkBool32</type>                         <name>fragmentDensityInvocations</name></member>
+        </type>
+        <type category="struct" name="VkRenderPassFragmentDensityMapCreateInfoEXT" structextends="VkRenderPassCreateInfo,VkRenderPassCreateInfo2KHR">
+            <member values="VK_STRUCTURE_TYPE_RENDER_PASS_FRAGMENT_DENSITY_MAP_CREATE_INFO_EXT"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                      <name>pNext</name></member>
+            <member><type>VkAttachmentReference</type>            <name>fragmentDensityMapAttachment</name></member>
+        </type>
+        <type category="struct" name="VkPhysicalDeviceScalarBlockLayoutFeaturesEXT" structextends="VkPhysicalDeviceFeatures2,VkDeviceCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES_EXT"><type>VkStructureType</type> <name>sType</name></member>
+            <member><type>void</type>*                               <name>pNext</name></member>
+            <member><type>VkBool32</type>                            <name>scalarBlockLayout</name></member>
+        </type>
+        <type category="struct" name="VkSurfaceProtectedCapabilitiesKHR" structextends="VkSurfaceCapabilities2KHR">
+            <member values="VK_STRUCTURE_TYPE_SURFACE_PROTECTED_CAPABILITIES_KHR"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>* <name>pNext</name></member>
+            <member><type>VkBool32</type> <name>supportsProtected</name><comment>Represents if surface can be protected</comment></member>
+        </type>
+        <type category="struct" name="VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR" structextends="VkPhysicalDeviceFeatures2,VkDeviceCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES_KHR"><type>VkStructureType</type> <name>sType</name></member>
+            <member><type>void</type>*                               <name>pNext</name></member>
+            <member><type>VkBool32</type>                            <name>uniformBufferStandardLayout</name></member>
+        </type>
+        <type category="struct" name="VkPhysicalDeviceDepthClipEnableFeaturesEXT" structextends="VkPhysicalDeviceFeatures2,VkDeviceCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLIP_ENABLE_FEATURES_EXT"><type>VkStructureType</type> <name>sType</name></member>
+            <member><type>void</type>*                  <name>pNext</name><comment>Pointer to next structure</comment></member>
+            <member><type>VkBool32</type>               <name>depthClipEnable</name></member>
+        </type>
+        <type category="struct" name="VkPipelineRasterizationDepthClipStateCreateInfoEXT" structextends="VkPipelineRasterizationStateCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_DEPTH_CLIP_STATE_CREATE_INFO_EXT"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                                                                 <name>pNext</name></member>                 <!-- Pointer to next structure -->
+            <member optional="true"><type>VkPipelineRasterizationDepthClipStateCreateFlagsEXT</type>         <name>flags</name></member>                 <!-- Reserved -->
+            <member><type>VkBool32</type>                                                                    <name>depthClipEnable</name></member>
+        </type>
+        <type category="struct" name="VkPhysicalDeviceMemoryBudgetPropertiesEXT" structextends="VkPhysicalDeviceMemoryProperties2" returnedonly="true">
+            <member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT"><type>VkStructureType</type> <name>sType</name></member>
+            <member noautovalidity="true"><type>void</type>*        <name>pNext</name></member>
+            <member><type>VkDeviceSize</type>                       <name>heapBudget</name>[<enum>VK_MAX_MEMORY_HEAPS</enum>]</member>
+            <member><type>VkDeviceSize</type>                       <name>heapUsage</name>[<enum>VK_MAX_MEMORY_HEAPS</enum>]</member>
+        </type>
+        <type category="struct" name="VkPhysicalDeviceMemoryPriorityFeaturesEXT" structextends="VkPhysicalDeviceFeatures2,VkDeviceCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PRIORITY_FEATURES_EXT"><type>VkStructureType</type> <name>sType</name></member>
+            <member noautovalidity="true"><type>void</type>*        <name>pNext</name></member>
+            <member><type>VkBool32</type>                           <name>memoryPriority</name></member>
+        </type>
+        <type category="struct" name="VkMemoryPriorityAllocateInfoEXT" structextends="VkMemoryAllocateInfo">
+            <member values="VK_STRUCTURE_TYPE_MEMORY_PRIORITY_ALLOCATE_INFO_EXT"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                        <name>pNext</name></member>
+            <member><type>float</type>                              <name>priority</name></member>
+        </type>
+        <type category="struct" name="VkPhysicalDeviceBufferDeviceAddressFeaturesKHR" structextends="VkPhysicalDeviceFeatures2,VkDeviceCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_KHR"><type>VkStructureType</type> <name>sType</name></member>
+            <member noautovalidity="true"><type>void</type>*        <name>pNext</name></member>
+            <member><type>VkBool32</type>                           <name>bufferDeviceAddress</name></member>
+            <member><type>VkBool32</type>                           <name>bufferDeviceAddressCaptureReplay</name></member>
+            <member><type>VkBool32</type>                           <name>bufferDeviceAddressMultiDevice</name></member>
+        </type>
+        <type category="struct" name="VkPhysicalDeviceBufferDeviceAddressFeaturesEXT" structextends="VkPhysicalDeviceFeatures2,VkDeviceCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_EXT"><type>VkStructureType</type> <name>sType</name></member>
+            <member noautovalidity="true"><type>void</type>*        <name>pNext</name></member>
+            <member><type>VkBool32</type>                           <name>bufferDeviceAddress</name></member>
+            <member><type>VkBool32</type>                           <name>bufferDeviceAddressCaptureReplay</name></member>
+            <member><type>VkBool32</type>                           <name>bufferDeviceAddressMultiDevice</name></member>
+        </type>
+        <type category="struct" name="VkPhysicalDeviceBufferAddressFeaturesEXT"       alias="VkPhysicalDeviceBufferDeviceAddressFeaturesEXT"/>
+        <type category="struct" name="VkBufferDeviceAddressInfoKHR">
+            <member values="VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO_KHR"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                                            <name>pNext</name></member>
+            <member><type>VkBuffer</type>                                               <name>buffer</name></member>
+        </type>
+        <type category="struct" name="VkBufferDeviceAddressInfoEXT" alias="VkBufferDeviceAddressInfoKHR"/>
+        <type category="struct" name="VkBufferOpaqueCaptureAddressCreateInfoKHR" structextends="VkBufferCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_BUFFER_OPAQUE_CAPTURE_ADDRESS_CREATE_INFO_KHR"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                      <name>pNext</name></member>
+            <member><type>uint64_t</type>                         <name>opaqueCaptureAddress</name></member>
+        </type>
+        <type category="struct" name="VkBufferDeviceAddressCreateInfoEXT" structextends="VkBufferCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_CREATE_INFO_EXT"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                      <name>pNext</name></member>
+            <member><type>VkDeviceAddress</type>                  <name>deviceAddress</name></member>
+        </type>
+        <type category="struct" name="VkPhysicalDeviceImageViewImageFormatInfoEXT" structextends="VkPhysicalDeviceImageFormatInfo2">
+            <member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_VIEW_IMAGE_FORMAT_INFO_EXT"><type>VkStructureType</type> <name>sType</name></member>
+            <member><type>void</type>*                            <name>pNext</name></member>
+            <member><type>VkImageViewType</type>                  <name>imageViewType</name></member>
+        </type>
+        <type category="struct" name="VkFilterCubicImageViewImageFormatPropertiesEXT" returnedonly="true" structextends="VkImageFormatProperties2">
+            <member values="VK_STRUCTURE_TYPE_FILTER_CUBIC_IMAGE_VIEW_IMAGE_FORMAT_PROPERTIES_EXT"><type>VkStructureType</type> <name>sType</name></member>
+            <member><type>void</type>*                            <name>pNext</name></member>
+            <member><type>VkBool32</type>                         <name>filterCubic</name></member> <!-- The combinations of format, image type (and image view type if provided) can be filtered with VK_FILTER_CUBIC_EXT -->
+            <member><type>VkBool32</type>                         <name>filterCubicMinmax</name> </member> <!-- The combination of format, image type (and image view type if provided) can be filtered with VK_FILTER_CUBIC_EXT and ReductionMode of Min or Max -->
+        </type>
+        <type category="struct" name="VkPhysicalDeviceImagelessFramebufferFeaturesKHR" structextends="VkPhysicalDeviceFeatures2,VkDeviceCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES_KHR"><type>VkStructureType</type> <name>sType</name></member>
+            <member><type>void</type>*                                    <name>pNext</name></member>
+            <member><type>VkBool32</type>                                 <name>imagelessFramebuffer</name></member>
+        </type>
+        <type category="struct" name="VkFramebufferAttachmentsCreateInfoKHR" structextends="VkFramebufferCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO_KHR"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                              <name>pNext</name></member>
+            <member optional="true"><type>uint32_t</type>                 <name>attachmentImageInfoCount</name></member>
+            <member len="attachmentImageInfoCount">const <type>VkFramebufferAttachmentImageInfoKHR</type>* <name>pAttachmentImageInfos</name></member>
+        </type>
+        <type category="struct" name="VkFramebufferAttachmentImageInfoKHR">
+            <member values="VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO_KHR"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                              <name>pNext</name></member>
+            <member optional="true"><type>VkImageCreateFlags</type>       <name>flags</name><comment>Image creation flags</comment></member>
+            <member><type>VkImageUsageFlags</type>                        <name>usage</name><comment>Image usage flags</comment></member>
+            <member><type>uint32_t</type>                                 <name>width</name></member>
+            <member><type>uint32_t</type>                                 <name>height</name></member>
+            <member><type>uint32_t</type>                                 <name>layerCount</name></member>
+            <member optional="true"><type>uint32_t</type>                 <name>viewFormatCount</name></member>
+            <member len="viewFormatCount">const <type>VkFormat</type>*    <name>pViewFormats</name></member>
+        </type>
+        <type category="struct" name="VkRenderPassAttachmentBeginInfoKHR" structextends="VkRenderPassBeginInfo">
+            <member values="VK_STRUCTURE_TYPE_RENDER_PASS_ATTACHMENT_BEGIN_INFO_KHR"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                              <name>pNext</name></member>             <!-- Pointer to next structure -->
+            <member optional="true"><type>uint32_t</type>                 <name>attachmentCount</name></member>
+            <member len="attachmentCount">const <type>VkImageView</type>* <name>pAttachments</name></member>
+        </type>
+        <type category="struct" name="VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT" structextends="VkPhysicalDeviceFeatures2,VkDeviceCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXTURE_COMPRESSION_ASTC_HDR_FEATURES_EXT"><type>VkStructureType</type> <name>sType</name></member>
+            <member><type>void</type>*                  <name>pNext</name></member>
+            <member><type>VkBool32</type>               <name>textureCompressionASTC_HDR</name></member>
+        </type>
+        <type category="struct" name="VkPhysicalDeviceCooperativeMatrixFeaturesNV" structextends="VkPhysicalDeviceFeatures2,VkDeviceCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_FEATURES_NV"><type>VkStructureType</type> <name>sType</name></member>
+            <member><type>void</type>*                               <name>pNext</name></member>
+            <member><type>VkBool32</type>                            <name>cooperativeMatrix</name></member>
+            <member><type>VkBool32</type>                            <name>cooperativeMatrixRobustBufferAccess</name></member>
+        </type>
+        <type category="struct" name="VkPhysicalDeviceCooperativeMatrixPropertiesNV" returnedonly="true" structextends="VkPhysicalDeviceProperties2">
+            <member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_PROPERTIES_NV"><type>VkStructureType</type> <name>sType</name></member>
+            <member><type>void</type>*                               <name>pNext</name></member>
+            <member><type>VkShaderStageFlags</type>                  <name>cooperativeMatrixSupportedStages</name></member>
+        </type>
+        <type category="struct" name="VkCooperativeMatrixPropertiesNV">
+            <member values="VK_STRUCTURE_TYPE_COOPERATIVE_MATRIX_PROPERTIES_NV"><type>VkStructureType</type> <name>sType</name></member>
+            <member><type>void</type>*                               <name>pNext</name></member>
+            <member><type>uint32_t</type>                            <name>MSize</name></member>
+            <member><type>uint32_t</type>                            <name>NSize</name></member>
+            <member><type>uint32_t</type>                            <name>KSize</name></member>
+            <member><type>VkComponentTypeNV</type>                   <name>AType</name></member>
+            <member><type>VkComponentTypeNV</type>                   <name>BType</name></member>
+            <member><type>VkComponentTypeNV</type>                   <name>CType</name></member>
+            <member><type>VkComponentTypeNV</type>                   <name>DType</name></member>
+            <member><type>VkScopeNV</type>                           <name>scope</name></member>
+        </type>
+        <type category="struct" name="VkPhysicalDeviceYcbcrImageArraysFeaturesEXT" structextends="VkPhysicalDeviceFeatures2,VkDeviceCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_YCBCR_IMAGE_ARRAYS_FEATURES_EXT"><type>VkStructureType</type> <name>sType</name></member>
+            <member noautovalidity="true"><type>void</type>*        <name>pNext</name></member>
+            <member><type>VkBool32</type>                           <name>ycbcrImageArrays</name></member>
+        </type>
+        <type category="struct" name="VkImageViewHandleInfoNVX">
+            <member values="VK_STRUCTURE_TYPE_IMAGE_VIEW_HANDLE_INFO_NVX"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*            <name>pNext</name></member>
+            <member><type>VkImageView</type>                         <name>imageView</name></member>
+            <member><type>VkDescriptorType</type>                    <name>descriptorType</name></member>
+            <member optional="true"><type>VkSampler</type>           <name>sampler</name></member>
+        </type>
+        <type category="struct" name="VkPresentFrameTokenGGP" structextends="VkPresentInfoKHR">
+            <member values="VK_STRUCTURE_TYPE_PRESENT_FRAME_TOKEN_GGP"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                      <name>pNext</name></member>
+            <member><type>GgpFrameToken</type>                    <name>frameToken</name></member>
+        </type>
+        <type category="struct" name="VkPipelineCreationFeedbackEXT" returnedonly="true">
+            <member><type>VkPipelineCreationFeedbackFlagsEXT</type>  <name>flags</name></member>
+            <member><type>uint64_t</type>                            <name>duration</name></member>
+        </type>
+        <type category="struct" name="VkPipelineCreationFeedbackCreateInfoEXT" structextends="VkGraphicsPipelineCreateInfo,VkComputePipelineCreateInfo,VkRayTracingPipelineCreateInfoNV">
+            <member values="VK_STRUCTURE_TYPE_PIPELINE_CREATION_FEEDBACK_CREATE_INFO_EXT"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                         <name>pNext</name></member>
+            <member><type>VkPipelineCreationFeedbackEXT</type>*      <name>pPipelineCreationFeedback</name><comment>Output pipeline creation feedback.</comment></member>
+            <member><type>uint32_t</type>                            <name>pipelineStageCreationFeedbackCount</name></member>
+            <member len="pipelineStageCreationFeedbackCount"><type>VkPipelineCreationFeedbackEXT</type>* <name>pPipelineStageCreationFeedbacks</name><comment>One entry for each shader stage specified in the parent Vk*PipelineCreateInfo struct</comment></member>
+        </type>
+        <type category="struct" name="VkSurfaceFullScreenExclusiveInfoEXT" structextends="VkPhysicalDeviceSurfaceInfo2KHR,VkSwapchainCreateInfoKHR">
+            <member values="VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_INFO_EXT"><type>VkStructureType</type> <name>sType</name></member>
+            <member><type>void</type>*                            <name>pNext</name></member>
+            <member><type>VkFullScreenExclusiveEXT</type>         <name>fullScreenExclusive</name></member>
+        </type>
+        <type category="struct" name="VkSurfaceFullScreenExclusiveWin32InfoEXT" structextends="VkPhysicalDeviceSurfaceInfo2KHR,VkSwapchainCreateInfoKHR">
+            <member values="VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_WIN32_INFO_EXT"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*      <name>pNext</name></member>
+            <member><type>HMONITOR</type>         <name>hmonitor</name></member>
+        </type>
+        <type category="struct" name="VkSurfaceCapabilitiesFullScreenExclusiveEXT" structextends="VkSurfaceCapabilities2KHR">
+            <member values="VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_FULL_SCREEN_EXCLUSIVE_EXT"><type>VkStructureType</type> <name>sType</name></member>
+            <member><type>void</type>*            <name>pNext</name></member>
+            <member><type>VkBool32</type>         <name>fullScreenExclusiveSupported</name></member>
+        </type>
+        <type category="struct" name="VkPhysicalDevicePerformanceQueryFeaturesKHR" structextends="VkPhysicalDeviceFeatures2,VkDeviceCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PERFORMANCE_QUERY_FEATURES_KHR"><type>VkStructureType</type> <name>sType</name></member>
+            <member noautovalidity="true"><type>void</type>*      <name>pNext</name></member>
+            <member><type>VkBool32</type>                         <name>performanceCounterQueryPools</name><comment>performance counters supported in query pools</comment></member>
+            <member><type>VkBool32</type>                         <name>performanceCounterMultipleQueryPools</name><comment>performance counters from multiple query pools can be accessed in the same primary command buffer</comment></member>        </type>
+        <type category="struct" name="VkPhysicalDevicePerformanceQueryPropertiesKHR" returnedonly="true" structextends="VkPhysicalDeviceProperties2">
+             <member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PERFORMANCE_QUERY_PROPERTIES_KHR"><type>VkStructureType</type> <name>sType</name></member>
+             <member><type>void</type>* <name>pNext</name></member>
+             <member noautovalidity="true"><type>VkBool32</type> <name>allowCommandBufferQueryCopies</name><comment>Flag to specify whether performance queries are allowed to be used in vkCmdCopyQueryPoolResults</comment></member>
+        </type>
+        <type category="struct" name="VkPerformanceCounterKHR" returnedonly="true">
+            <member values="VK_STRUCTURE_TYPE_PERFORMANCE_COUNTER_KHR"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                             <name>pNext</name></member> <!-- Pointer to next structure -->
+            <member><type>VkPerformanceCounterUnitKHR</type>        <name>unit</name></member>
+            <member><type>VkPerformanceCounterScopeKHR</type>       <name>scope</name></member>
+            <member><type>VkPerformanceCounterStorageKHR</type>     <name>storage</name></member>
+            <member><type>uint8_t</type> <name>uuid</name>[<enum>VK_UUID_SIZE</enum>]</member>
+        </type>
+        <type category="struct" name="VkPerformanceCounterDescriptionKHR" returnedonly="true">
+            <member values="VK_STRUCTURE_TYPE_PERFORMANCE_COUNTER_DESCRIPTION_KHR"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                             <name>pNext</name></member> <!-- Pointer to next structure -->
+            <member optional="true"><type>VkPerformanceCounterDescriptionFlagsKHR</type> <name>flags</name></member>
+            <member><type>char</type>                                    <name>name</name>[<enum>VK_MAX_DESCRIPTION_SIZE</enum>]</member>
+            <member><type>char</type>                                    <name>category</name>[<enum>VK_MAX_DESCRIPTION_SIZE</enum>]</member>
+            <member><type>char</type>                                    <name>description</name>[<enum>VK_MAX_DESCRIPTION_SIZE</enum>]</member>
+        </type>
+        <type category="struct" name="VkQueryPoolPerformanceCreateInfoKHR" structextends="VkQueryPoolCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_QUERY_POOL_PERFORMANCE_CREATE_INFO_KHR"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                             <name>pNext</name></member> <!-- Pointer to next structure -->
+            <member><type>uint32_t</type>                                <name>queueFamilyIndex</name></member>
+            <member><type>uint32_t</type>                                <name>counterIndexCount</name></member>
+            <member len="counterIndexCount">const <type>uint32_t</type>* <name>pCounterIndices</name></member>
+        </type>
+        <type category="union" name="VkPerformanceCounterResultKHR" comment="// Union of all the possible return types a counter result could return">
+            <member><type>int32_t</type>  <name>int32</name></member>
+            <member><type>int64_t</type>  <name>int64</name></member>
+            <member><type>uint32_t</type> <name>uint32</name></member>
+            <member><type>uint64_t</type> <name>uint64</name></member>
+            <member><type>float</type>    <name>float32</name></member>
+            <member><type>double</type>   <name>float64</name></member>
+        </type>
+        <type category="struct" name="VkAcquireProfilingLockInfoKHR">
+            <member values="VK_STRUCTURE_TYPE_ACQUIRE_PROFILING_LOCK_INFO_KHR"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*            <name>pNext</name></member>
+            <member optional="true"><type>VkAcquireProfilingLockFlagsKHR</type> <name>flags</name><comment>Acquire profiling lock flags</comment></member>
+            <member><type>uint64_t</type> <name>timeout</name></member>
+        </type>
+        <type category="struct" name="VkPerformanceQuerySubmitInfoKHR" structextends="VkSubmitInfo">
+            <member values="VK_STRUCTURE_TYPE_PERFORMANCE_QUERY_SUBMIT_INFO_KHR"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*         <name>pNext</name></member>
+            <member><type>uint32_t</type>            <name>counterPassIndex</name><comment>Index for which counter pass to submit</comment></member>
+        </type>
+        <type category="struct" name="VkHeadlessSurfaceCreateInfoEXT">
+            <member values="VK_STRUCTURE_TYPE_HEADLESS_SURFACE_CREATE_INFO_EXT"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*            <name>pNext</name></member>
+            <member optional="true"><type>VkHeadlessSurfaceCreateFlagsEXT</type>   <name>flags</name></member>
+        </type>
+        <type category="struct" name="VkPhysicalDeviceCoverageReductionModeFeaturesNV" structextends="VkPhysicalDeviceFeatures2,VkDeviceCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COVERAGE_REDUCTION_MODE_FEATURES_NV"><type>VkStructureType</type><name>sType</name></member>
+            <member noautovalidity="true"><type>void</type>*    <name>pNext</name></member>
+            <member><type>VkBool32</type>                       <name>coverageReductionMode</name></member>
+        </type>
+        <type category="struct" name="VkPipelineCoverageReductionStateCreateInfoNV" structextends="VkPipelineMultisampleStateCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_REDUCTION_STATE_CREATE_INFO_NV"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                                                        <name>pNext</name></member>
+            <member optional="true"><type>VkPipelineCoverageReductionStateCreateFlagsNV</type>      <name>flags</name></member>
+            <member><type>VkCoverageReductionModeNV</type>                                          <name>coverageReductionMode</name></member>
+        </type>
+        <type category="struct" name="VkFramebufferMixedSamplesCombinationNV" returnedonly="true">
+            <member values="VK_STRUCTURE_TYPE_FRAMEBUFFER_MIXED_SAMPLES_COMBINATION_NV"><type>VkStructureType</type> <name>sType</name></member>
+            <member><type>void</type>*                      <name>pNext</name></member>
+            <member><type>VkCoverageReductionModeNV</type>  <name>coverageReductionMode</name></member>
+            <member><type>VkSampleCountFlagBits</type>      <name>rasterizationSamples</name></member>
+            <member><type>VkSampleCountFlags</type>         <name>depthStencilSamples</name></member>
+            <member><type>VkSampleCountFlags</type>         <name>colorSamples</name></member>
+        </type>
+        <type category="struct" name="VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL" structextends="VkPhysicalDeviceFeatures2,VkDeviceCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_FUNCTIONS_2_FEATURES_INTEL"><type>VkStructureType</type> <name>sType</name></member>
+            <member><type>void</type>*                            <name>pNext</name></member>
+            <member><type>VkBool32</type>                         <name>shaderIntegerFunctions2</name></member>
+        </type>
+        <type category="union" name="VkPerformanceValueDataINTEL">
+            <member><type>uint32_t</type>                           <name>value32</name></member>
+            <member><type>uint64_t</type>                           <name>value64</name></member>
+            <member><type>float</type>                              <name>valueFloat</name></member>
+            <member><type>VkBool32</type>                           <name>valueBool</name></member>
+            <member>const <type>char</type>*                        <name>valueString</name></member>
+        </type>
+        <type category="struct" name="VkPerformanceValueINTEL">
+            <member><type>VkPerformanceValueTypeINTEL</type>        <name>type</name></member>
+            <member><type>VkPerformanceValueDataINTEL</type>        <name>data</name></member>
+        </type>
+        <type category="struct" name="VkInitializePerformanceApiInfoINTEL" >
+            <member values="VK_STRUCTURE_TYPE_INITIALIZE_PERFORMANCE_API_INFO_INTEL"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                         <name>pNext</name></member>
+            <member optional="false"><type>void</type>*             <name>pUserData</name></member>
+        </type>
+        <type category="struct" name="VkQueryPoolCreateInfoINTEL">
+            <member values="VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO_INTEL"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                         <name>pNext</name></member>
+            <member><type>VkQueryPoolSamplingModeINTEL</type>        <name>performanceCountersSampling</name></member>
+        </type>
+        <type category="struct" name="VkPerformanceMarkerInfoINTEL">
+            <member values="VK_STRUCTURE_TYPE_PERFORMANCE_MARKER_INFO_INTEL"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                         <name>pNext</name></member>
+            <member><type>uint64_t</type>                            <name>marker</name></member>
+        </type>
+        <type category="struct" name="VkPerformanceStreamMarkerInfoINTEL">
+            <member values="VK_STRUCTURE_TYPE_PERFORMANCE_STREAM_MARKER_INFO_INTEL"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                         <name>pNext</name></member>
+            <member><type>uint32_t</type>                            <name>marker</name></member>
+        </type>
+        <type category="struct" name="VkPerformanceOverrideInfoINTEL">
+            <member values="VK_STRUCTURE_TYPE_PERFORMANCE_OVERRIDE_INFO_INTEL"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                         <name>pNext</name></member>
+            <member><type>VkPerformanceOverrideTypeINTEL</type>      <name>type</name></member>
+            <member><type>VkBool32</type>                            <name>enable</name></member>
+            <member><type>uint64_t</type>                            <name>parameter</name></member>
+        </type>
+        <type category="struct" name="VkPerformanceConfigurationAcquireInfoINTEL">
+            <member values="VK_STRUCTURE_TYPE_PERFORMANCE_CONFIGURATION_ACQUIRE_INFO_INTEL"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                         <name>pNext</name></member>
+            <member><type>VkPerformanceConfigurationTypeINTEL</type> <name>type</name></member>
+        </type>
+        <type category="struct" name="VkPhysicalDeviceShaderClockFeaturesKHR" structextends="VkPhysicalDeviceFeatures2,VkDeviceCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CLOCK_FEATURES_KHR"><type>VkStructureType</type> <name>sType</name></member>
+            <member><type>void</type>*                               <name>pNext</name></member>
+            <member><type>VkBool32</type>                            <name>shaderSubgroupClock</name></member>
+            <member><type>VkBool32</type>                            <name>shaderDeviceClock</name></member>
+        </type>
+        <type category="struct" name="VkPhysicalDeviceIndexTypeUint8FeaturesEXT" structextends="VkPhysicalDeviceFeatures2,VkDeviceCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_EXT"><type>VkStructureType</type> <name>sType</name></member>
+            <member noautovalidity="true"><type>void</type>*        <name>pNext</name></member>
+            <member><type>VkBool32</type>                           <name>indexTypeUint8</name></member>
+        </type>
+        <type category="struct" name="VkPhysicalDeviceShaderSMBuiltinsPropertiesNV" returnedonly="true" structextends="VkPhysicalDeviceProperties2">
+            <member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_PROPERTIES_NV"><type>VkStructureType</type> <name>sType</name></member>
+            <member><type>void</type>*                          <name>pNext</name></member>
+            <member><type>uint32_t</type>                       <name>shaderSMCount</name></member>
+            <member><type>uint32_t</type>                       <name>shaderWarpsPerSM</name></member>
+        </type>
+        <type category="struct" name="VkPhysicalDeviceShaderSMBuiltinsFeaturesNV" structextends="VkPhysicalDeviceFeatures2,VkDeviceCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_FEATURES_NV"><type>VkStructureType</type><name>sType</name></member>
+            <member noautovalidity="true"><type>void</type>*    <name>pNext</name></member>
+            <member><type>VkBool32</type>                       <name>shaderSMBuiltins</name></member>
+        </type>
+        <type category="struct" name="VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT" structextends="VkPhysicalDeviceFeatures2,VkDeviceCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_INTERLOCK_FEATURES_EXT"><type>VkStructureType</type> <name>sType</name></member>
+            <member><type>void</type>*                  <name>pNext</name><comment>Pointer to next structure</comment></member>
+            <member><type>VkBool32</type>               <name>fragmentShaderSampleInterlock</name></member>
+            <member><type>VkBool32</type>               <name>fragmentShaderPixelInterlock</name></member>
+            <member><type>VkBool32</type>               <name>fragmentShaderShadingRateInterlock</name></member>
+        </type>
+        <type category="struct" name="VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR" structextends="VkPhysicalDeviceFeatures2,VkDeviceCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES_KHR"><type>VkStructureType</type><name>sType</name></member>
+            <member noautovalidity="true"><type>void</type>*    <name>pNext</name></member>
+            <member><type>VkBool32</type>                       <name>separateDepthStencilLayouts</name></member>
+        </type>
+        <type category="struct" name="VkAttachmentReferenceStencilLayoutKHR" structextends="VkAttachmentReference2KHR">
+            <member values="VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_STENCIL_LAYOUT_KHR"><type>VkStructureType</type><name>sType</name></member>
+            <member noautovalidity="true"><type>void</type>*    <name>pNext</name></member>
+            <member><type>VkImageLayout</type>                  <name>stencilLayout</name></member>
+        </type>
+        <type category="struct" name="VkAttachmentDescriptionStencilLayoutKHR" structextends="VkAttachmentDescription2KHR">
+            <member values="VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT_KHR"><type>VkStructureType</type><name>sType</name></member>
+            <member noautovalidity="true"><type>void</type>*    <name>pNext</name></member>
+            <member><type>VkImageLayout</type>                  <name>stencilInitialLayout</name></member>
+            <member><type>VkImageLayout</type>                  <name>stencilFinalLayout</name></member>
+        </type>
+        <type category="struct" name="VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR" structextends="VkPhysicalDeviceFeatures2,VkDeviceCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_EXECUTABLE_PROPERTIES_FEATURES_KHR"><type>VkStructureType</type> <name>sType</name></member>
+            <member><type>void</type>*              <name>pNext</name></member>
+            <member><type>VkBool32</type>           <name>pipelineExecutableInfo</name></member>
+        </type>
+        <type category="struct" name="VkPipelineInfoKHR">
+            <member values="VK_STRUCTURE_TYPE_PIPELINE_INFO_KHR"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*        <name>pNext</name></member>
+            <member><type>VkPipeline</type>         <name>pipeline</name></member>
+        </type>
+        <type category="struct" name="VkPipelineExecutablePropertiesKHR" returnedonly="true">
+            <member values="VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_PROPERTIES_KHR"><type>VkStructureType</type> <name>sType</name></member>
+            <member><type>void</type>*              <name>pNext</name></member>
+            <member><type>VkShaderStageFlags</type> <name>stages</name></member>
+            <member><type>char</type>               <name>name</name>[<enum>VK_MAX_DESCRIPTION_SIZE</enum>]</member>
+            <member><type>char</type>               <name>description</name>[<enum>VK_MAX_DESCRIPTION_SIZE</enum>]</member>
+            <member><type>uint32_t</type>           <name>subgroupSize</name></member>
+        </type>
+        <type category="struct" name="VkPipelineExecutableInfoKHR">
+            <member values="VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INFO_KHR"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*        <name>pNext</name></member>
+            <member><type>VkPipeline</type>         <name>pipeline</name></member>
+            <member><type>uint32_t</type>           <name>executableIndex</name></member>
+        </type>
+        <type category="union" name="VkPipelineExecutableStatisticValueKHR" returnedonly="true">
+            <member><type>VkBool32</type>           <name>b32</name></member>
+            <member><type>int64_t</type>            <name>i64</name></member>
+            <member><type>uint64_t</type>           <name>u64</name></member>
+            <member><type>double</type>             <name>f64</name></member>
+        </type>
+        <type category="struct" name="VkPipelineExecutableStatisticKHR" returnedonly="true">
+            <member values="VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_STATISTIC_KHR"><type>VkStructureType</type> <name>sType</name></member>
+            <member><type>void</type>*              <name>pNext</name></member>
+            <member><type>char</type>               <name>name</name>[<enum>VK_MAX_DESCRIPTION_SIZE</enum>]</member>
+            <member><type>char</type>               <name>description</name>[<enum>VK_MAX_DESCRIPTION_SIZE</enum>]</member>
+            <member><type>VkPipelineExecutableStatisticFormatKHR</type> <name>format</name></member>
+            <member><type>VkPipelineExecutableStatisticValueKHR</type>  <name>value</name></member>
+        </type>
+        <type category="struct" name="VkPipelineExecutableInternalRepresentationKHR" returnedonly="true">
+            <member values="VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INTERNAL_REPRESENTATION_KHR"><type>VkStructureType</type> <name>sType</name></member>
+            <member><type>void</type>*              <name>pNext</name></member>
+            <member><type>char</type>               <name>name</name>[<enum>VK_MAX_DESCRIPTION_SIZE</enum>]</member>
+            <member><type>char</type>               <name>description</name>[<enum>VK_MAX_DESCRIPTION_SIZE</enum>]</member>
+            <member><type>VkBool32</type>           <name>isText</name></member>
+            <member optional="true"><type>size_t</type>               <name>dataSize</name></member>
+            <member optional="true" len="dataSize"><type>void</type>* <name>pData</name></member>
+        </type>
+        <type category="struct" name="VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT" structextends="VkPhysicalDeviceFeatures2,VkDeviceCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES_EXT"><type>VkStructureType</type> <name>sType</name></member>
+            <member noautovalidity="true"><type>void</type>*        <name>pNext</name></member>
+            <member><type>VkBool32</type>                           <name>shaderDemoteToHelperInvocation</name></member>
+        </type>
+        <type category="struct" name="VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT" structextends="VkPhysicalDeviceFeatures2,VkDeviceCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_FEATURES_EXT"><type>VkStructureType</type> <name>sType</name></member>
+            <member noautovalidity="true"><type>void</type>*        <name>pNext</name></member>
+            <member><type>VkBool32</type>                           <name>texelBufferAlignment</name></member>
+        </type>
+        <type category="struct" name="VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT" structextends="VkPhysicalDeviceProperties2" returnedonly="true">
+            <member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES_EXT"><type>VkStructureType</type> <name>sType</name></member>
+            <member noautovalidity="true"><type>void</type>*        <name>pNext</name></member>
+            <member><type>VkDeviceSize</type>                       <name>storageTexelBufferOffsetAlignmentBytes</name></member>
+            <member><type>VkBool32</type>                           <name>storageTexelBufferOffsetSingleTexelAlignment</name></member>
+            <member><type>VkDeviceSize</type>                       <name>uniformTexelBufferOffsetAlignmentBytes</name></member>
+            <member><type>VkBool32</type>                           <name>uniformTexelBufferOffsetSingleTexelAlignment</name></member>
+        </type>
+        <type category="struct" name="VkPhysicalDeviceSubgroupSizeControlFeaturesEXT" structextends="VkPhysicalDeviceFeatures2,VkDeviceCreateInfo">
+             <member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES_EXT"><type>VkStructureType</type> <name>sType</name></member>
+             <member><type>void</type>*                          <name>pNext</name></member>
+             <member><type>VkBool32</type> <name>subgroupSizeControl</name></member>
+             <member><type>VkBool32</type> <name>computeFullSubgroups</name></member>
+        </type>
+        <type category="struct" name="VkPhysicalDeviceSubgroupSizeControlPropertiesEXT" returnedonly="true" structextends="VkPhysicalDeviceProperties2">
+             <member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES_EXT"><type>VkStructureType</type> <name>sType</name></member>
+             <member><type>void</type>*                          <name>pNext</name></member>
+             <member noautovalidity="true"><type>uint32_t</type> <name>minSubgroupSize</name><comment>The minimum subgroup size supported by this device</comment></member>
+             <member noautovalidity="true"><type>uint32_t</type> <name>maxSubgroupSize</name><comment>The maximum subgroup size supported by this device</comment></member>
+             <member noautovalidity="true"><type>uint32_t</type> <name>maxComputeWorkgroupSubgroups</name><comment>The maximum number of subgroups supported in a workgroup</comment></member>
+             <member><type>VkShaderStageFlags</type>             <name>requiredSubgroupSizeStages</name><comment>The shader stages that support specifying a subgroup size</comment></member>
+        </type>
+        <type category="struct" name="VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT" returnedonly="true" structextends="VkPipelineShaderStageCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO_EXT"><type>VkStructureType</type> <name>sType</name></member>
+            <member><type>void</type>*                  <name>pNext</name></member>
+            <member><type>uint32_t</type>               <name>requiredSubgroupSize</name></member>
+        </type>
+        <type category="struct" name="VkMemoryOpaqueCaptureAddressAllocateInfoKHR" structextends="VkMemoryAllocateInfo">
+            <member values="VK_STRUCTURE_TYPE_MEMORY_OPAQUE_CAPTURE_ADDRESS_ALLOCATE_INFO_KHR"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                   <name>pNext</name></member>
+            <member><type>uint64_t</type>                      <name>opaqueCaptureAddress</name></member>
+        </type>
+        <type category="struct" name="VkDeviceMemoryOpaqueCaptureAddressInfoKHR">
+            <member values="VK_STRUCTURE_TYPE_DEVICE_MEMORY_OPAQUE_CAPTURE_ADDRESS_INFO_KHR"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                      <name>pNext</name></member>
+            <member><type>VkDeviceMemory</type>                   <name>memory</name></member>
+        </type>
+        <type category="struct" name="VkPhysicalDeviceLineRasterizationFeaturesEXT" structextends="VkPhysicalDeviceFeatures2,VkDeviceCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_EXT"><type>VkStructureType</type> <name>sType</name></member>
+            <member noautovalidity="true"><type>void</type>*        <name>pNext</name></member>
+            <member><type>VkBool32</type>                           <name>rectangularLines</name></member>
+            <member><type>VkBool32</type>                           <name>bresenhamLines</name></member>
+            <member><type>VkBool32</type>                           <name>smoothLines</name></member>
+            <member><type>VkBool32</type>                           <name>stippledRectangularLines</name></member>
+            <member><type>VkBool32</type>                           <name>stippledBresenhamLines</name></member>
+            <member><type>VkBool32</type>                           <name>stippledSmoothLines</name></member>
+        </type>
+        <type category="struct" name="VkPhysicalDeviceLineRasterizationPropertiesEXT" returnedonly="true" structextends="VkPhysicalDeviceProperties2">
+            <member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES_EXT"><type>VkStructureType</type> <name>sType</name></member>
+            <member><type>void</type>*                               <name>pNext</name></member>
+            <member><type>uint32_t</type>                            <name>lineSubPixelPrecisionBits</name></member>
+        </type>
+        <type category="struct" name="VkPipelineRasterizationLineStateCreateInfoEXT" structextends="VkPipelineRasterizationStateCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO_EXT"><type>VkStructureType</type> <name>sType</name></member>
+            <member>const <type>void</type>*                                                      <name>pNext</name></member>
+            <member><type>VkLineRasterizationModeEXT</type>                                       <name>lineRasterizationMode</name></member>
+            <member><type>VkBool32</type>                                                         <name>stippledLineEnable</name></member>
+            <member optional="true"><type>uint32_t</type>                                         <name>lineStippleFactor</name></member>
+            <member optional="true"><type>uint16_t</type>                                         <name>lineStipplePattern</name></member>
+        </type>
+        <type category="struct" name="VkPipelineCompilerControlCreateInfoAMD" structextends="VkGraphicsPipelineCreateInfo,VkComputePipelineCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_PIPELINE_COMPILER_CONTROL_CREATE_INFO_AMD"><type>VkStructureType</type>   <name>sType</name></member>
+            <member>const <type>void</type>*                                                                            <name>pNext</name></member>
+            <member optional="true"><type>VkPipelineCompilerControlFlagsAMD</type>                                      <name>compilerControlFlags</name></member>
+        </type>
+        <type category="struct" name="VkPhysicalDeviceCoherentMemoryFeaturesAMD" structextends="VkPhysicalDeviceFeatures2,VkDeviceCreateInfo">
+            <member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COHERENT_MEMORY_FEATURES_AMD"><type>VkStructureType</type> <name>sType</name></member>
+            <member noautovalidity="true"><type>void</type>*        <name>pNext</name></member>
+            <member><type>VkBool32</type>                           <name>deviceCoherentMemory</name></member>
+        </type>
+        <type category="struct" name="VkPhysicalDeviceToolPropertiesEXT" returnedonly="true">
+            <member values="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TOOL_PROPERTIES_EXT"><type>VkStructureType</type> <name>sType</name></member>
+            <member><type>void</type>* <name>pNext</name></member>
+            <member><type>char</type>            <name>name</name>[<enum>VK_MAX_EXTENSION_NAME_SIZE</enum>]</member>
+            <member><type>char</type>            <name>version</name>[<enum>VK_MAX_EXTENSION_NAME_SIZE</enum>]</member>
+            <member><type>VkToolPurposeFlagsEXT</type> <name>purposes</name></member>
+            <member><type>char</type>            <name>description</name>[<enum>VK_MAX_DESCRIPTION_SIZE</enum>]</member>
+            <member><type>char</type>            <name>layer</name>[<enum>VK_MAX_EXTENSION_NAME_SIZE</enum>]</member>
+        </type>
+    </types>
+
+    <comment>Vulkan enumerant (token) definitions</comment>
+
+    <enums name="API Constants" comment="Vulkan hardcoded constants - not an enumerated type, part of the header boilerplate">
+        <enum value="256"   name="VK_MAX_PHYSICAL_DEVICE_NAME_SIZE"/>
+        <enum value="16"    name="VK_UUID_SIZE"/>
+        <enum value="8"     name="VK_LUID_SIZE"/>
+        <enum               name="VK_LUID_SIZE_KHR" alias="VK_LUID_SIZE"/>
+        <enum value="256"   name="VK_MAX_EXTENSION_NAME_SIZE"/>
+        <enum value="256"   name="VK_MAX_DESCRIPTION_SIZE"/>
+        <enum value="32"    name="VK_MAX_MEMORY_TYPES"/>
+        <enum value="16"    name="VK_MAX_MEMORY_HEAPS" comment="The maximum number of unique memory heaps, each of which supporting 1 or more memory types"/>
+        <enum value="1000.0f" name="VK_LOD_CLAMP_NONE"/>
+        <enum value="(~0U)" name="VK_REMAINING_MIP_LEVELS"/>
+        <enum value="(~0U)" name="VK_REMAINING_ARRAY_LAYERS"/>
+        <enum value="(~0ULL)" name="VK_WHOLE_SIZE"/>
+        <enum value="(~0U)" name="VK_ATTACHMENT_UNUSED"/>
+        <enum value="1"     name="VK_TRUE"/>
+        <enum value="0"     name="VK_FALSE"/>
+        <enum value="(~0U)" name="VK_QUEUE_FAMILY_IGNORED"/>
+        <enum value="(~0U-1)" name="VK_QUEUE_FAMILY_EXTERNAL"/>
+        <enum               name="VK_QUEUE_FAMILY_EXTERNAL_KHR" alias="VK_QUEUE_FAMILY_EXTERNAL"/>
+        <enum value="(~0U-2)" name="VK_QUEUE_FAMILY_FOREIGN_EXT"/>
+        <enum value="(~0U)" name="VK_SUBPASS_EXTERNAL"/>
+        <enum value="32"    name="VK_MAX_DEVICE_GROUP_SIZE"/>
+        <enum               name="VK_MAX_DEVICE_GROUP_SIZE_KHR" alias="VK_MAX_DEVICE_GROUP_SIZE"/>
+        <enum value="256"   name="VK_MAX_DRIVER_NAME_SIZE_KHR"/>
+        <enum value="256"   name="VK_MAX_DRIVER_INFO_SIZE_KHR"/>
+        <enum value="(~0U)" name="VK_SHADER_UNUSED_NV"/>
+    </enums>
+
+    <comment>
+        Unlike OpenGL, most tokens in Vulkan are actual typed enumerants in
+        their own numeric namespaces. The "name" attribute is the C enum
+        type name, and is pulled in from a type tag definition above
+        (slightly clunky, but retains the type / enum distinction). "type"
+        attributes of "enum" or "bitmask" indicate that these values should
+        be generated inside an appropriate definition.
+    </comment>
+
+    <enums name="VkImageLayout" type="enum">
+        <enum value="0"     name="VK_IMAGE_LAYOUT_UNDEFINED"                         comment="Implicit layout an image is when its contents are undefined due to various reasons (e.g. right after creation)"/>
+        <enum value="1"     name="VK_IMAGE_LAYOUT_GENERAL"                           comment="General layout when image can be used for any kind of access"/>
+        <enum value="2"     name="VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL"          comment="Optimal layout when image is only used for color attachment read/write"/>
+        <enum value="3"     name="VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL"  comment="Optimal layout when image is only used for depth/stencil attachment read/write"/>
+        <enum value="4"     name="VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL"   comment="Optimal layout when image is used for read only depth/stencil attachment and shader access"/>
+        <enum value="5"     name="VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL"          comment="Optimal layout when image is used for read only shader access"/>
+        <enum value="6"     name="VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL"              comment="Optimal layout when image is used only as source of transfer operations"/>
+        <enum value="7"     name="VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL"              comment="Optimal layout when image is used only as destination of transfer operations"/>
+        <enum value="8"     name="VK_IMAGE_LAYOUT_PREINITIALIZED"                    comment="Initial layout used when the data is populated by the CPU"/>
+    </enums>
+    <enums name="VkAttachmentLoadOp" type="enum">
+        <enum value="0"     name="VK_ATTACHMENT_LOAD_OP_LOAD"/>
+        <enum value="1"     name="VK_ATTACHMENT_LOAD_OP_CLEAR"/>
+        <enum value="2"     name="VK_ATTACHMENT_LOAD_OP_DONT_CARE"/>
+    </enums>
+    <enums name="VkAttachmentStoreOp" type="enum">
+        <enum value="0"     name="VK_ATTACHMENT_STORE_OP_STORE"/>
+        <enum value="1"     name="VK_ATTACHMENT_STORE_OP_DONT_CARE"/>
+    </enums>
+    <enums name="VkImageType" type="enum">
+        <enum value="0"     name="VK_IMAGE_TYPE_1D"/>
+        <enum value="1"     name="VK_IMAGE_TYPE_2D"/>
+        <enum value="2"     name="VK_IMAGE_TYPE_3D"/>
+    </enums>
+    <enums name="VkImageTiling" type="enum">
+        <enum value="0"     name="VK_IMAGE_TILING_OPTIMAL"/>
+        <enum value="1"     name="VK_IMAGE_TILING_LINEAR"/>
+    </enums>
+    <enums name="VkImageViewType" type="enum">
+        <enum value="0"     name="VK_IMAGE_VIEW_TYPE_1D"/>
+        <enum value="1"     name="VK_IMAGE_VIEW_TYPE_2D"/>
+        <enum value="2"     name="VK_IMAGE_VIEW_TYPE_3D"/>
+        <enum value="3"     name="VK_IMAGE_VIEW_TYPE_CUBE"/>
+        <enum value="4"     name="VK_IMAGE_VIEW_TYPE_1D_ARRAY"/>
+        <enum value="5"     name="VK_IMAGE_VIEW_TYPE_2D_ARRAY"/>
+        <enum value="6"     name="VK_IMAGE_VIEW_TYPE_CUBE_ARRAY"/>
+    </enums>
+    <enums name="VkCommandBufferLevel" type="enum">
+        <enum value="0"     name="VK_COMMAND_BUFFER_LEVEL_PRIMARY"/>
+        <enum value="1"     name="VK_COMMAND_BUFFER_LEVEL_SECONDARY"/>
+    </enums>
+    <enums name="VkComponentSwizzle" type="enum">
+        <enum value="0"     name="VK_COMPONENT_SWIZZLE_IDENTITY"/>
+        <enum value="1"     name="VK_COMPONENT_SWIZZLE_ZERO"/>
+        <enum value="2"     name="VK_COMPONENT_SWIZZLE_ONE"/>
+        <enum value="3"     name="VK_COMPONENT_SWIZZLE_R"/>
+        <enum value="4"     name="VK_COMPONENT_SWIZZLE_G"/>
+        <enum value="5"     name="VK_COMPONENT_SWIZZLE_B"/>
+        <enum value="6"     name="VK_COMPONENT_SWIZZLE_A"/>
+    </enums>
+    <enums name="VkDescriptorType" type="enum">
+        <enum value="0"     name="VK_DESCRIPTOR_TYPE_SAMPLER"/>
+        <enum value="1"     name="VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER"/>
+        <enum value="2"     name="VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE"/>
+        <enum value="3"     name="VK_DESCRIPTOR_TYPE_STORAGE_IMAGE"/>
+        <enum value="4"     name="VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER"/>
+        <enum value="5"     name="VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER"/>
+        <enum value="6"     name="VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER"/>
+        <enum value="7"     name="VK_DESCRIPTOR_TYPE_STORAGE_BUFFER"/>
+        <enum value="8"     name="VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC"/>
+        <enum value="9"     name="VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC"/>
+        <enum value="10"    name="VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT"/>
+    </enums>
+    <enums name="VkQueryType" type="enum">
+        <enum value="0"     name="VK_QUERY_TYPE_OCCLUSION"/>
+        <enum value="1"     name="VK_QUERY_TYPE_PIPELINE_STATISTICS"                 comment="Optional"/>
+        <enum value="2"     name="VK_QUERY_TYPE_TIMESTAMP"/>
+    </enums>
+    <enums name="VkBorderColor" type="enum">
+        <enum value="0"     name="VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK"/>
+        <enum value="1"     name="VK_BORDER_COLOR_INT_TRANSPARENT_BLACK"/>
+        <enum value="2"     name="VK_BORDER_COLOR_FLOAT_OPAQUE_BLACK"/>
+        <enum value="3"     name="VK_BORDER_COLOR_INT_OPAQUE_BLACK"/>
+        <enum value="4"     name="VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE"/>
+        <enum value="5"     name="VK_BORDER_COLOR_INT_OPAQUE_WHITE"/>
+    </enums>
+    <enums name="VkPipelineBindPoint" type="enum">
+        <enum value="0"     name="VK_PIPELINE_BIND_POINT_GRAPHICS"/>
+        <enum value="1"     name="VK_PIPELINE_BIND_POINT_COMPUTE"/>
+    </enums>
+    <enums name="VkPipelineCacheHeaderVersion" type="enum">
+        <enum value="1"     name="VK_PIPELINE_CACHE_HEADER_VERSION_ONE"/>
+    </enums>
+    <enums name="VkPrimitiveTopology" type="enum">
+        <enum value="0"     name="VK_PRIMITIVE_TOPOLOGY_POINT_LIST"/>
+        <enum value="1"     name="VK_PRIMITIVE_TOPOLOGY_LINE_LIST"/>
+        <enum value="2"     name="VK_PRIMITIVE_TOPOLOGY_LINE_STRIP"/>
+        <enum value="3"     name="VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST"/>
+        <enum value="4"     name="VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP"/>
+        <enum value="5"     name="VK_PRIMITIVE_TOPOLOGY_TRIANGLE_FAN"/>
+        <enum value="6"     name="VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY"/>
+        <enum value="7"     name="VK_PRIMITIVE_TOPOLOGY_LINE_STRIP_WITH_ADJACENCY"/>
+        <enum value="8"     name="VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY"/>
+        <enum value="9"     name="VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY"/>
+        <enum value="10"    name="VK_PRIMITIVE_TOPOLOGY_PATCH_LIST"/>
+    </enums>
+    <enums name="VkSharingMode" type="enum">
+        <enum value="0"     name="VK_SHARING_MODE_EXCLUSIVE"/>
+        <enum value="1"     name="VK_SHARING_MODE_CONCURRENT"/>
+    </enums>
+    <enums name="VkIndexType" type="enum">
+        <enum value="0"     name="VK_INDEX_TYPE_UINT16"/>
+        <enum value="1"     name="VK_INDEX_TYPE_UINT32"/>
+    </enums>
+    <enums name="VkFilter" type="enum">
+        <enum value="0"     name="VK_FILTER_NEAREST"/>
+        <enum value="1"     name="VK_FILTER_LINEAR"/>
+    </enums>
+    <enums name="VkSamplerMipmapMode" type="enum">
+        <enum value="0"     name="VK_SAMPLER_MIPMAP_MODE_NEAREST"                        comment="Choose nearest mip level"/>
+        <enum value="1"     name="VK_SAMPLER_MIPMAP_MODE_LINEAR"                         comment="Linear filter between mip levels"/>
+    </enums>
+    <enums name="VkSamplerAddressMode" type="enum">
+        <enum value="0"     name="VK_SAMPLER_ADDRESS_MODE_REPEAT"/>
+        <enum value="1"     name="VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT"/>
+        <enum value="2"     name="VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE"/>
+        <enum value="3"     name="VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER"/>
+            <comment>
+                value="4" reserved for VK_KHR_sampler_mirror_clamp_to_edge
+                enum VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE; do not
+                alias!
+            </comment>
+    </enums>
+    <enums name="VkCompareOp" type="enum">
+        <enum value="0"     name="VK_COMPARE_OP_NEVER"/>
+        <enum value="1"     name="VK_COMPARE_OP_LESS"/>
+        <enum value="2"     name="VK_COMPARE_OP_EQUAL"/>
+        <enum value="3"     name="VK_COMPARE_OP_LESS_OR_EQUAL"/>
+        <enum value="4"     name="VK_COMPARE_OP_GREATER"/>
+        <enum value="5"     name="VK_COMPARE_OP_NOT_EQUAL"/>
+        <enum value="6"     name="VK_COMPARE_OP_GREATER_OR_EQUAL"/>
+        <enum value="7"     name="VK_COMPARE_OP_ALWAYS"/>
+    </enums>
+    <enums name="VkPolygonMode" type="enum">
+        <enum value="0"     name="VK_POLYGON_MODE_FILL"/>
+        <enum value="1"     name="VK_POLYGON_MODE_LINE"/>
+        <enum value="2"     name="VK_POLYGON_MODE_POINT"/>
+    </enums>
+    <enums name="VkCullModeFlagBits" type="bitmask">
+        <enum value="0"     name="VK_CULL_MODE_NONE"/>
+        <enum bitpos="0"    name="VK_CULL_MODE_FRONT_BIT"/>
+        <enum bitpos="1"    name="VK_CULL_MODE_BACK_BIT"/>
+        <enum value="0x00000003" name="VK_CULL_MODE_FRONT_AND_BACK"/>
+    </enums>
+    <enums name="VkFrontFace" type="enum">
+        <enum value="0"     name="VK_FRONT_FACE_COUNTER_CLOCKWISE"/>
+        <enum value="1"     name="VK_FRONT_FACE_CLOCKWISE"/>
+    </enums>
+    <enums name="VkBlendFactor" type="enum">
+        <enum value="0"     name="VK_BLEND_FACTOR_ZERO"/>
+        <enum value="1"     name="VK_BLEND_FACTOR_ONE"/>
+        <enum value="2"     name="VK_BLEND_FACTOR_SRC_COLOR"/>
+        <enum value="3"     name="VK_BLEND_FACTOR_ONE_MINUS_SRC_COLOR"/>
+        <enum value="4"     name="VK_BLEND_FACTOR_DST_COLOR"/>
+        <enum value="5"     name="VK_BLEND_FACTOR_ONE_MINUS_DST_COLOR"/>
+        <enum value="6"     name="VK_BLEND_FACTOR_SRC_ALPHA"/>
+        <enum value="7"     name="VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA"/>
+        <enum value="8"     name="VK_BLEND_FACTOR_DST_ALPHA"/>
+        <enum value="9"     name="VK_BLEND_FACTOR_ONE_MINUS_DST_ALPHA"/>
+        <enum value="10"    name="VK_BLEND_FACTOR_CONSTANT_COLOR"/>
+        <enum value="11"    name="VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_COLOR"/>
+        <enum value="12"    name="VK_BLEND_FACTOR_CONSTANT_ALPHA"/>
+        <enum value="13"    name="VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA"/>
+        <enum value="14"    name="VK_BLEND_FACTOR_SRC_ALPHA_SATURATE"/>
+        <enum value="15"    name="VK_BLEND_FACTOR_SRC1_COLOR"/>
+        <enum value="16"    name="VK_BLEND_FACTOR_ONE_MINUS_SRC1_COLOR"/>
+        <enum value="17"    name="VK_BLEND_FACTOR_SRC1_ALPHA"/>
+        <enum value="18"    name="VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA"/>
+    </enums>
+    <enums name="VkBlendOp" type="enum">
+        <enum value="0"     name="VK_BLEND_OP_ADD"/>
+        <enum value="1"     name="VK_BLEND_OP_SUBTRACT"/>
+        <enum value="2"     name="VK_BLEND_OP_REVERSE_SUBTRACT"/>
+        <enum value="3"     name="VK_BLEND_OP_MIN"/>
+        <enum value="4"     name="VK_BLEND_OP_MAX"/>
+    </enums>
+    <enums name="VkStencilOp" type="enum">
+        <enum value="0"     name="VK_STENCIL_OP_KEEP"/>
+        <enum value="1"     name="VK_STENCIL_OP_ZERO"/>
+        <enum value="2"     name="VK_STENCIL_OP_REPLACE"/>
+        <enum value="3"     name="VK_STENCIL_OP_INCREMENT_AND_CLAMP"/>
+        <enum value="4"     name="VK_STENCIL_OP_DECREMENT_AND_CLAMP"/>
+        <enum value="5"     name="VK_STENCIL_OP_INVERT"/>
+        <enum value="6"     name="VK_STENCIL_OP_INCREMENT_AND_WRAP"/>
+        <enum value="7"     name="VK_STENCIL_OP_DECREMENT_AND_WRAP"/>
+    </enums>
+    <enums name="VkLogicOp" type="enum">
+        <enum value="0"     name="VK_LOGIC_OP_CLEAR"/>
+        <enum value="1"     name="VK_LOGIC_OP_AND"/>
+        <enum value="2"     name="VK_LOGIC_OP_AND_REVERSE"/>
+        <enum value="3"     name="VK_LOGIC_OP_COPY"/>
+        <enum value="4"     name="VK_LOGIC_OP_AND_INVERTED"/>
+        <enum value="5"     name="VK_LOGIC_OP_NO_OP"/>
+        <enum value="6"     name="VK_LOGIC_OP_XOR"/>
+        <enum value="7"     name="VK_LOGIC_OP_OR"/>
+        <enum value="8"     name="VK_LOGIC_OP_NOR"/>
+        <enum value="9"     name="VK_LOGIC_OP_EQUIVALENT"/>
+        <enum value="10"    name="VK_LOGIC_OP_INVERT"/>
+        <enum value="11"    name="VK_LOGIC_OP_OR_REVERSE"/>
+        <enum value="12"    name="VK_LOGIC_OP_COPY_INVERTED"/>
+        <enum value="13"    name="VK_LOGIC_OP_OR_INVERTED"/>
+        <enum value="14"    name="VK_LOGIC_OP_NAND"/>
+        <enum value="15"    name="VK_LOGIC_OP_SET"/>
+    </enums>
+    <enums name="VkInternalAllocationType" type="enum">
+        <enum value="0"     name="VK_INTERNAL_ALLOCATION_TYPE_EXECUTABLE"/>
+    </enums>
+    <enums name="VkSystemAllocationScope" type="enum">
+        <enum value="0"     name="VK_SYSTEM_ALLOCATION_SCOPE_COMMAND"/>
+        <enum value="1"     name="VK_SYSTEM_ALLOCATION_SCOPE_OBJECT"/>
+        <enum value="2"     name="VK_SYSTEM_ALLOCATION_SCOPE_CACHE"/>
+        <enum value="3"     name="VK_SYSTEM_ALLOCATION_SCOPE_DEVICE"/>
+        <enum value="4"     name="VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE"/>
+    </enums>
+    <enums name="VkPhysicalDeviceType" type="enum">
+        <enum value="0"     name="VK_PHYSICAL_DEVICE_TYPE_OTHER"/>
+        <enum value="1"     name="VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU"/>
+        <enum value="2"     name="VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU"/>
+        <enum value="3"     name="VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU"/>
+        <enum value="4"     name="VK_PHYSICAL_DEVICE_TYPE_CPU"/>
+    </enums>
+    <enums name="VkVertexInputRate" type="enum">
+        <enum value="0"     name="VK_VERTEX_INPUT_RATE_VERTEX"/>
+        <enum value="1"     name="VK_VERTEX_INPUT_RATE_INSTANCE"/>
+    </enums>
+    <enums name="VkFormat" type="enum" comment="Vulkan format definitions">
+        <enum value="0"     name="VK_FORMAT_UNDEFINED"/>
+        <enum value="1"     name="VK_FORMAT_R4G4_UNORM_PACK8"/>
+        <enum value="2"     name="VK_FORMAT_R4G4B4A4_UNORM_PACK16"/>
+        <enum value="3"     name="VK_FORMAT_B4G4R4A4_UNORM_PACK16"/>
+        <enum value="4"     name="VK_FORMAT_R5G6B5_UNORM_PACK16"/>
+        <enum value="5"     name="VK_FORMAT_B5G6R5_UNORM_PACK16"/>
+        <enum value="6"     name="VK_FORMAT_R5G5B5A1_UNORM_PACK16"/>
+        <enum value="7"     name="VK_FORMAT_B5G5R5A1_UNORM_PACK16"/>
+        <enum value="8"     name="VK_FORMAT_A1R5G5B5_UNORM_PACK16"/>
+        <enum value="9"     name="VK_FORMAT_R8_UNORM"/>
+        <enum value="10"    name="VK_FORMAT_R8_SNORM"/>
+        <enum value="11"    name="VK_FORMAT_R8_USCALED"/>
+        <enum value="12"    name="VK_FORMAT_R8_SSCALED"/>
+        <enum value="13"    name="VK_FORMAT_R8_UINT"/>
+        <enum value="14"    name="VK_FORMAT_R8_SINT"/>
+        <enum value="15"    name="VK_FORMAT_R8_SRGB"/>
+        <enum value="16"    name="VK_FORMAT_R8G8_UNORM"/>
+        <enum value="17"    name="VK_FORMAT_R8G8_SNORM"/>
+        <enum value="18"    name="VK_FORMAT_R8G8_USCALED"/>
+        <enum value="19"    name="VK_FORMAT_R8G8_SSCALED"/>
+        <enum value="20"    name="VK_FORMAT_R8G8_UINT"/>
+        <enum value="21"    name="VK_FORMAT_R8G8_SINT"/>
+        <enum value="22"    name="VK_FORMAT_R8G8_SRGB"/>
+        <enum value="23"    name="VK_FORMAT_R8G8B8_UNORM"/>
+        <enum value="24"    name="VK_FORMAT_R8G8B8_SNORM"/>
+        <enum value="25"    name="VK_FORMAT_R8G8B8_USCALED"/>
+        <enum value="26"    name="VK_FORMAT_R8G8B8_SSCALED"/>
+        <enum value="27"    name="VK_FORMAT_R8G8B8_UINT"/>
+        <enum value="28"    name="VK_FORMAT_R8G8B8_SINT"/>
+        <enum value="29"    name="VK_FORMAT_R8G8B8_SRGB"/>
+        <enum value="30"    name="VK_FORMAT_B8G8R8_UNORM"/>
+        <enum value="31"    name="VK_FORMAT_B8G8R8_SNORM"/>
+        <enum value="32"    name="VK_FORMAT_B8G8R8_USCALED"/>
+        <enum value="33"    name="VK_FORMAT_B8G8R8_SSCALED"/>
+        <enum value="34"    name="VK_FORMAT_B8G8R8_UINT"/>
+        <enum value="35"    name="VK_FORMAT_B8G8R8_SINT"/>
+        <enum value="36"    name="VK_FORMAT_B8G8R8_SRGB"/>
+        <enum value="37"    name="VK_FORMAT_R8G8B8A8_UNORM"/>
+        <enum value="38"    name="VK_FORMAT_R8G8B8A8_SNORM"/>
+        <enum value="39"    name="VK_FORMAT_R8G8B8A8_USCALED"/>
+        <enum value="40"    name="VK_FORMAT_R8G8B8A8_SSCALED"/>
+        <enum value="41"    name="VK_FORMAT_R8G8B8A8_UINT"/>
+        <enum value="42"    name="VK_FORMAT_R8G8B8A8_SINT"/>
+        <enum value="43"    name="VK_FORMAT_R8G8B8A8_SRGB"/>
+        <enum value="44"    name="VK_FORMAT_B8G8R8A8_UNORM"/>
+        <enum value="45"    name="VK_FORMAT_B8G8R8A8_SNORM"/>
+        <enum value="46"    name="VK_FORMAT_B8G8R8A8_USCALED"/>
+        <enum value="47"    name="VK_FORMAT_B8G8R8A8_SSCALED"/>
+        <enum value="48"    name="VK_FORMAT_B8G8R8A8_UINT"/>
+        <enum value="49"    name="VK_FORMAT_B8G8R8A8_SINT"/>
+        <enum value="50"    name="VK_FORMAT_B8G8R8A8_SRGB"/>
+        <enum value="51"    name="VK_FORMAT_A8B8G8R8_UNORM_PACK32"/>
+        <enum value="52"    name="VK_FORMAT_A8B8G8R8_SNORM_PACK32"/>
+        <enum value="53"    name="VK_FORMAT_A8B8G8R8_USCALED_PACK32"/>
+        <enum value="54"    name="VK_FORMAT_A8B8G8R8_SSCALED_PACK32"/>
+        <enum value="55"    name="VK_FORMAT_A8B8G8R8_UINT_PACK32"/>
+        <enum value="56"    name="VK_FORMAT_A8B8G8R8_SINT_PACK32"/>
+        <enum value="57"    name="VK_FORMAT_A8B8G8R8_SRGB_PACK32"/>
+        <enum value="58"    name="VK_FORMAT_A2R10G10B10_UNORM_PACK32"/>
+        <enum value="59"    name="VK_FORMAT_A2R10G10B10_SNORM_PACK32"/>
+        <enum value="60"    name="VK_FORMAT_A2R10G10B10_USCALED_PACK32"/>
+        <enum value="61"    name="VK_FORMAT_A2R10G10B10_SSCALED_PACK32"/>
+        <enum value="62"    name="VK_FORMAT_A2R10G10B10_UINT_PACK32"/>
+        <enum value="63"    name="VK_FORMAT_A2R10G10B10_SINT_PACK32"/>
+        <enum value="64"    name="VK_FORMAT_A2B10G10R10_UNORM_PACK32"/>
+        <enum value="65"    name="VK_FORMAT_A2B10G10R10_SNORM_PACK32"/>
+        <enum value="66"    name="VK_FORMAT_A2B10G10R10_USCALED_PACK32"/>
+        <enum value="67"    name="VK_FORMAT_A2B10G10R10_SSCALED_PACK32"/>
+        <enum value="68"    name="VK_FORMAT_A2B10G10R10_UINT_PACK32"/>
+        <enum value="69"    name="VK_FORMAT_A2B10G10R10_SINT_PACK32"/>
+        <enum value="70"    name="VK_FORMAT_R16_UNORM"/>
+        <enum value="71"    name="VK_FORMAT_R16_SNORM"/>
+        <enum value="72"    name="VK_FORMAT_R16_USCALED"/>
+        <enum value="73"    name="VK_FORMAT_R16_SSCALED"/>
+        <enum value="74"    name="VK_FORMAT_R16_UINT"/>
+        <enum value="75"    name="VK_FORMAT_R16_SINT"/>
+        <enum value="76"    name="VK_FORMAT_R16_SFLOAT"/>
+        <enum value="77"    name="VK_FORMAT_R16G16_UNORM"/>
+        <enum value="78"    name="VK_FORMAT_R16G16_SNORM"/>
+        <enum value="79"    name="VK_FORMAT_R16G16_USCALED"/>
+        <enum value="80"    name="VK_FORMAT_R16G16_SSCALED"/>
+        <enum value="81"    name="VK_FORMAT_R16G16_UINT"/>
+        <enum value="82"    name="VK_FORMAT_R16G16_SINT"/>
+        <enum value="83"    name="VK_FORMAT_R16G16_SFLOAT"/>
+        <enum value="84"    name="VK_FORMAT_R16G16B16_UNORM"/>
+        <enum value="85"    name="VK_FORMAT_R16G16B16_SNORM"/>
+        <enum value="86"    name="VK_FORMAT_R16G16B16_USCALED"/>
+        <enum value="87"    name="VK_FORMAT_R16G16B16_SSCALED"/>
+        <enum value="88"    name="VK_FORMAT_R16G16B16_UINT"/>
+        <enum value="89"    name="VK_FORMAT_R16G16B16_SINT"/>
+        <enum value="90"    name="VK_FORMAT_R16G16B16_SFLOAT"/>
+        <enum value="91"    name="VK_FORMAT_R16G16B16A16_UNORM"/>
+        <enum value="92"    name="VK_FORMAT_R16G16B16A16_SNORM"/>
+        <enum value="93"    name="VK_FORMAT_R16G16B16A16_USCALED"/>
+        <enum value="94"    name="VK_FORMAT_R16G16B16A16_SSCALED"/>
+        <enum value="95"    name="VK_FORMAT_R16G16B16A16_UINT"/>
+        <enum value="96"    name="VK_FORMAT_R16G16B16A16_SINT"/>
+        <enum value="97"    name="VK_FORMAT_R16G16B16A16_SFLOAT"/>
+        <enum value="98"    name="VK_FORMAT_R32_UINT"/>
+        <enum value="99"    name="VK_FORMAT_R32_SINT"/>
+        <enum value="100"   name="VK_FORMAT_R32_SFLOAT"/>
+        <enum value="101"   name="VK_FORMAT_R32G32_UINT"/>
+        <enum value="102"   name="VK_FORMAT_R32G32_SINT"/>
+        <enum value="103"   name="VK_FORMAT_R32G32_SFLOAT"/>
+        <enum value="104"   name="VK_FORMAT_R32G32B32_UINT"/>
+        <enum value="105"   name="VK_FORMAT_R32G32B32_SINT"/>
+        <enum value="106"   name="VK_FORMAT_R32G32B32_SFLOAT"/>
+        <enum value="107"   name="VK_FORMAT_R32G32B32A32_UINT"/>
+        <enum value="108"   name="VK_FORMAT_R32G32B32A32_SINT"/>
+        <enum value="109"   name="VK_FORMAT_R32G32B32A32_SFLOAT"/>
+        <enum value="110"   name="VK_FORMAT_R64_UINT"/>
+        <enum value="111"   name="VK_FORMAT_R64_SINT"/>
+        <enum value="112"   name="VK_FORMAT_R64_SFLOAT"/>
+        <enum value="113"   name="VK_FORMAT_R64G64_UINT"/>
+        <enum value="114"   name="VK_FORMAT_R64G64_SINT"/>
+        <enum value="115"   name="VK_FORMAT_R64G64_SFLOAT"/>
+        <enum value="116"   name="VK_FORMAT_R64G64B64_UINT"/>
+        <enum value="117"   name="VK_FORMAT_R64G64B64_SINT"/>
+        <enum value="118"   name="VK_FORMAT_R64G64B64_SFLOAT"/>
+        <enum value="119"   name="VK_FORMAT_R64G64B64A64_UINT"/>
+        <enum value="120"   name="VK_FORMAT_R64G64B64A64_SINT"/>
+        <enum value="121"   name="VK_FORMAT_R64G64B64A64_SFLOAT"/>
+        <enum value="122"   name="VK_FORMAT_B10G11R11_UFLOAT_PACK32"/>
+        <enum value="123"   name="VK_FORMAT_E5B9G9R9_UFLOAT_PACK32"/>
+        <enum value="124"   name="VK_FORMAT_D16_UNORM"/>
+        <enum value="125"   name="VK_FORMAT_X8_D24_UNORM_PACK32"/>
+        <enum value="126"   name="VK_FORMAT_D32_SFLOAT"/>
+        <enum value="127"   name="VK_FORMAT_S8_UINT"/>
+        <enum value="128"   name="VK_FORMAT_D16_UNORM_S8_UINT"/>
+        <enum value="129"   name="VK_FORMAT_D24_UNORM_S8_UINT"/>
+        <enum value="130"   name="VK_FORMAT_D32_SFLOAT_S8_UINT"/>
+        <enum value="131"   name="VK_FORMAT_BC1_RGB_UNORM_BLOCK"/>
+        <enum value="132"   name="VK_FORMAT_BC1_RGB_SRGB_BLOCK"/>
+        <enum value="133"   name="VK_FORMAT_BC1_RGBA_UNORM_BLOCK"/>
+        <enum value="134"   name="VK_FORMAT_BC1_RGBA_SRGB_BLOCK"/>
+        <enum value="135"   name="VK_FORMAT_BC2_UNORM_BLOCK"/>
+        <enum value="136"   name="VK_FORMAT_BC2_SRGB_BLOCK"/>
+        <enum value="137"   name="VK_FORMAT_BC3_UNORM_BLOCK"/>
+        <enum value="138"   name="VK_FORMAT_BC3_SRGB_BLOCK"/>
+        <enum value="139"   name="VK_FORMAT_BC4_UNORM_BLOCK"/>
+        <enum value="140"   name="VK_FORMAT_BC4_SNORM_BLOCK"/>
+        <enum value="141"   name="VK_FORMAT_BC5_UNORM_BLOCK"/>
+        <enum value="142"   name="VK_FORMAT_BC5_SNORM_BLOCK"/>
+        <enum value="143"   name="VK_FORMAT_BC6H_UFLOAT_BLOCK"/>
+        <enum value="144"   name="VK_FORMAT_BC6H_SFLOAT_BLOCK"/>
+        <enum value="145"   name="VK_FORMAT_BC7_UNORM_BLOCK"/>
+        <enum value="146"   name="VK_FORMAT_BC7_SRGB_BLOCK"/>
+        <enum value="147"   name="VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK"/>
+        <enum value="148"   name="VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK"/>
+        <enum value="149"   name="VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK"/>
+        <enum value="150"   name="VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK"/>
+        <enum value="151"   name="VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK"/>
+        <enum value="152"   name="VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK"/>
+        <enum value="153"   name="VK_FORMAT_EAC_R11_UNORM_BLOCK"/>
+        <enum value="154"   name="VK_FORMAT_EAC_R11_SNORM_BLOCK"/>
+        <enum value="155"   name="VK_FORMAT_EAC_R11G11_UNORM_BLOCK"/>
+        <enum value="156"   name="VK_FORMAT_EAC_R11G11_SNORM_BLOCK"/>
+        <enum value="157"   name="VK_FORMAT_ASTC_4x4_UNORM_BLOCK"/>
+        <enum value="158"   name="VK_FORMAT_ASTC_4x4_SRGB_BLOCK"/>
+        <enum value="159"   name="VK_FORMAT_ASTC_5x4_UNORM_BLOCK"/>
+        <enum value="160"   name="VK_FORMAT_ASTC_5x4_SRGB_BLOCK"/>
+        <enum value="161"   name="VK_FORMAT_ASTC_5x5_UNORM_BLOCK"/>
+        <enum value="162"   name="VK_FORMAT_ASTC_5x5_SRGB_BLOCK"/>
+        <enum value="163"   name="VK_FORMAT_ASTC_6x5_UNORM_BLOCK"/>
+        <enum value="164"   name="VK_FORMAT_ASTC_6x5_SRGB_BLOCK"/>
+        <enum value="165"   name="VK_FORMAT_ASTC_6x6_UNORM_BLOCK"/>
+        <enum value="166"   name="VK_FORMAT_ASTC_6x6_SRGB_BLOCK"/>
+        <enum value="167"   name="VK_FORMAT_ASTC_8x5_UNORM_BLOCK"/>
+        <enum value="168"   name="VK_FORMAT_ASTC_8x5_SRGB_BLOCK"/>
+        <enum value="169"   name="VK_FORMAT_ASTC_8x6_UNORM_BLOCK"/>
+        <enum value="170"   name="VK_FORMAT_ASTC_8x6_SRGB_BLOCK"/>
+        <enum value="171"   name="VK_FORMAT_ASTC_8x8_UNORM_BLOCK"/>
+        <enum value="172"   name="VK_FORMAT_ASTC_8x8_SRGB_BLOCK"/>
+        <enum value="173"   name="VK_FORMAT_ASTC_10x5_UNORM_BLOCK"/>
+        <enum value="174"   name="VK_FORMAT_ASTC_10x5_SRGB_BLOCK"/>
+        <enum value="175"   name="VK_FORMAT_ASTC_10x6_UNORM_BLOCK"/>
+        <enum value="176"   name="VK_FORMAT_ASTC_10x6_SRGB_BLOCK"/>
+        <enum value="177"   name="VK_FORMAT_ASTC_10x8_UNORM_BLOCK"/>
+        <enum value="178"   name="VK_FORMAT_ASTC_10x8_SRGB_BLOCK"/>
+        <enum value="179"   name="VK_FORMAT_ASTC_10x10_UNORM_BLOCK"/>
+        <enum value="180"   name="VK_FORMAT_ASTC_10x10_SRGB_BLOCK"/>
+        <enum value="181"   name="VK_FORMAT_ASTC_12x10_UNORM_BLOCK"/>
+        <enum value="182"   name="VK_FORMAT_ASTC_12x10_SRGB_BLOCK"/>
+        <enum value="183"   name="VK_FORMAT_ASTC_12x12_UNORM_BLOCK"/>
+        <enum value="184"   name="VK_FORMAT_ASTC_12x12_SRGB_BLOCK"/>
+    </enums>
+    <enums name="VkStructureType" type="enum" comment="Structure type enumerant">
+        <enum value="0"     name="VK_STRUCTURE_TYPE_APPLICATION_INFO"/>
+        <enum value="1"     name="VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO"/>
+        <enum value="2"     name="VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO"/>
+        <enum value="3"     name="VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO"/>
+        <enum value="4"     name="VK_STRUCTURE_TYPE_SUBMIT_INFO"/>
+        <enum value="5"     name="VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO"/>
+        <enum value="6"     name="VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE"/>
+        <enum value="7"     name="VK_STRUCTURE_TYPE_BIND_SPARSE_INFO"/>
+        <enum value="8"     name="VK_STRUCTURE_TYPE_FENCE_CREATE_INFO"/>
+        <enum value="9"     name="VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO"/>
+        <enum value="10"    name="VK_STRUCTURE_TYPE_EVENT_CREATE_INFO"/>
+        <enum value="11"    name="VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO"/>
+        <enum value="12"    name="VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO"/>
+        <enum value="13"    name="VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO"/>
+        <enum value="14"    name="VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO"/>
+        <enum value="15"    name="VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO"/>
+        <enum value="16"    name="VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO"/>
+        <enum value="17"    name="VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO"/>
+        <enum value="18"    name="VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO"/>
+        <enum value="19"    name="VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO"/>
+        <enum value="20"    name="VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO"/>
+        <enum value="21"    name="VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO"/>
+        <enum value="22"    name="VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO"/>
+        <enum value="23"    name="VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO"/>
+        <enum value="24"    name="VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO"/>
+        <enum value="25"    name="VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO"/>
+        <enum value="26"    name="VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO"/>
+        <enum value="27"    name="VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO"/>
+        <enum value="28"    name="VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO"/>
+        <enum value="29"    name="VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO"/>
+        <enum value="30"    name="VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO"/>
+        <enum value="31"    name="VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO"/>
+        <enum value="32"    name="VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO"/>
+        <enum value="33"    name="VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO"/>
+        <enum value="34"    name="VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO"/>
+        <enum value="35"    name="VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET"/>
+        <enum value="36"    name="VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET"/>
+        <enum value="37"    name="VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO"/>
+        <enum value="38"    name="VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO"/>
+        <enum value="39"    name="VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO"/>
+        <enum value="40"    name="VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO"/>
+        <enum value="41"    name="VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO"/>
+        <enum value="42"    name="VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO"/>
+        <enum value="43"    name="VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO"/>
+        <enum value="44"    name="VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER"/>
+        <enum value="45"    name="VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER"/>
+        <enum value="46"    name="VK_STRUCTURE_TYPE_MEMORY_BARRIER"/>
+        <enum value="47"    name="VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO" comment="Reserved for internal use by the loader, layers, and ICDs"/>
+        <enum value="48"    name="VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO" comment="Reserved for internal use by the loader, layers, and ICDs"/>
+    </enums>
+    <enums name="VkSubpassContents" type="enum">
+        <enum value="0"     name="VK_SUBPASS_CONTENTS_INLINE"/>
+        <enum value="1"     name="VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS"/>
+    </enums>
+    <enums name="VkResult" type="enum" comment="API result codes">
+            <comment>Return codes (positive values)</comment>
+        <enum value="0"     name="VK_SUCCESS" comment="Command completed successfully"/>
+        <enum value="1"     name="VK_NOT_READY" comment="A fence or query has not yet completed"/>
+        <enum value="2"     name="VK_TIMEOUT" comment="A wait operation has not completed in the specified time"/>
+        <enum value="3"     name="VK_EVENT_SET" comment="An event is signaled"/>
+        <enum value="4"     name="VK_EVENT_RESET" comment="An event is unsignaled"/>
+        <enum value="5"     name="VK_INCOMPLETE" comment="A return array was too small for the result"/>
+            <comment>Error codes (negative values)</comment>
+        <enum value="-1"    name="VK_ERROR_OUT_OF_HOST_MEMORY" comment="A host memory allocation has failed"/>
+        <enum value="-2"    name="VK_ERROR_OUT_OF_DEVICE_MEMORY" comment="A device memory allocation has failed"/>
+        <enum value="-3"    name="VK_ERROR_INITIALIZATION_FAILED" comment="Initialization of a object has failed"/>
+        <enum value="-4"    name="VK_ERROR_DEVICE_LOST" comment="The logical device has been lost. See &lt;&lt;devsandqueues-lost-device&gt;&gt;"/>
+        <enum value="-5"    name="VK_ERROR_MEMORY_MAP_FAILED" comment="Mapping of a memory object has failed"/>
+        <enum value="-6"    name="VK_ERROR_LAYER_NOT_PRESENT" comment="Layer specified does not exist"/>
+        <enum value="-7"    name="VK_ERROR_EXTENSION_NOT_PRESENT" comment="Extension specified does not exist"/>
+        <enum value="-8"    name="VK_ERROR_FEATURE_NOT_PRESENT" comment="Requested feature is not available on this device"/>
+        <enum value="-9"    name="VK_ERROR_INCOMPATIBLE_DRIVER" comment="Unable to find a Vulkan driver"/>
+        <enum value="-10"   name="VK_ERROR_TOO_MANY_OBJECTS" comment="Too many objects of the type have already been created"/>
+        <enum value="-11"   name="VK_ERROR_FORMAT_NOT_SUPPORTED" comment="Requested format is not supported on this device"/>
+        <enum value="-12"   name="VK_ERROR_FRAGMENTED_POOL" comment="A requested pool allocation has failed due to fragmentation of the pool's memory"/>
+            <unused start="-13" comment="This is the next unused available error code (negative value)"/>
+    </enums>
+    <enums name="VkDynamicState" type="enum">
+        <enum value="0"     name="VK_DYNAMIC_STATE_VIEWPORT"/>
+        <enum value="1"     name="VK_DYNAMIC_STATE_SCISSOR"/>
+        <enum value="2"     name="VK_DYNAMIC_STATE_LINE_WIDTH"/>
+        <enum value="3"     name="VK_DYNAMIC_STATE_DEPTH_BIAS"/>
+        <enum value="4"     name="VK_DYNAMIC_STATE_BLEND_CONSTANTS"/>
+        <enum value="5"     name="VK_DYNAMIC_STATE_DEPTH_BOUNDS"/>
+        <enum value="6"     name="VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK"/>
+        <enum value="7"     name="VK_DYNAMIC_STATE_STENCIL_WRITE_MASK"/>
+        <enum value="8"     name="VK_DYNAMIC_STATE_STENCIL_REFERENCE"/>
+    </enums>
+    <enums name="VkDescriptorUpdateTemplateType" type="enum">
+        <enum value="0"     name="VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET"   comment="Create descriptor update template for descriptor set updates"/>
+    </enums>
+    <enums name="VkObjectType" type="enum" comment="Enums to track objects of various types">
+        <enum value="0"     name="VK_OBJECT_TYPE_UNKNOWN"/>
+        <enum value="1"     name="VK_OBJECT_TYPE_INSTANCE"                           comment="VkInstance"/>
+        <enum value="2"     name="VK_OBJECT_TYPE_PHYSICAL_DEVICE"                    comment="VkPhysicalDevice"/>
+        <enum value="3"     name="VK_OBJECT_TYPE_DEVICE"                             comment="VkDevice"/>
+        <enum value="4"     name="VK_OBJECT_TYPE_QUEUE"                              comment="VkQueue"/>
+        <enum value="5"     name="VK_OBJECT_TYPE_SEMAPHORE"                          comment="VkSemaphore"/>
+        <enum value="6"     name="VK_OBJECT_TYPE_COMMAND_BUFFER"                     comment="VkCommandBuffer"/>
+        <enum value="7"     name="VK_OBJECT_TYPE_FENCE"                              comment="VkFence"/>
+        <enum value="8"     name="VK_OBJECT_TYPE_DEVICE_MEMORY"                      comment="VkDeviceMemory"/>
+        <enum value="9"     name="VK_OBJECT_TYPE_BUFFER"                             comment="VkBuffer"/>
+        <enum value="10"    name="VK_OBJECT_TYPE_IMAGE"                              comment="VkImage"/>
+        <enum value="11"    name="VK_OBJECT_TYPE_EVENT"                              comment="VkEvent"/>
+        <enum value="12"    name="VK_OBJECT_TYPE_QUERY_POOL"                         comment="VkQueryPool"/>
+        <enum value="13"    name="VK_OBJECT_TYPE_BUFFER_VIEW"                        comment="VkBufferView"/>
+        <enum value="14"    name="VK_OBJECT_TYPE_IMAGE_VIEW"                         comment="VkImageView"/>
+        <enum value="15"    name="VK_OBJECT_TYPE_SHADER_MODULE"                      comment="VkShaderModule"/>
+        <enum value="16"    name="VK_OBJECT_TYPE_PIPELINE_CACHE"                     comment="VkPipelineCache"/>
+        <enum value="17"    name="VK_OBJECT_TYPE_PIPELINE_LAYOUT"                    comment="VkPipelineLayout"/>
+        <enum value="18"    name="VK_OBJECT_TYPE_RENDER_PASS"                        comment="VkRenderPass"/>
+        <enum value="19"    name="VK_OBJECT_TYPE_PIPELINE"                           comment="VkPipeline"/>
+        <enum value="20"    name="VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT"              comment="VkDescriptorSetLayout"/>
+        <enum value="21"    name="VK_OBJECT_TYPE_SAMPLER"                            comment="VkSampler"/>
+        <enum value="22"    name="VK_OBJECT_TYPE_DESCRIPTOR_POOL"                    comment="VkDescriptorPool"/>
+        <enum value="23"    name="VK_OBJECT_TYPE_DESCRIPTOR_SET"                     comment="VkDescriptorSet"/>
+        <enum value="24"    name="VK_OBJECT_TYPE_FRAMEBUFFER"                        comment="VkFramebuffer"/>
+        <enum value="25"    name="VK_OBJECT_TYPE_COMMAND_POOL"                       comment="VkCommandPool"/>
+    </enums>
+
+        <comment>Flags</comment>
+    <enums name="VkQueueFlagBits" type="bitmask">
+        <enum bitpos="0"    name="VK_QUEUE_GRAPHICS_BIT"                             comment="Queue supports graphics operations"/>
+        <enum bitpos="1"    name="VK_QUEUE_COMPUTE_BIT"                              comment="Queue supports compute operations"/>
+        <enum bitpos="2"    name="VK_QUEUE_TRANSFER_BIT"                             comment="Queue supports transfer operations"/>
+        <enum bitpos="3"    name="VK_QUEUE_SPARSE_BINDING_BIT"                       comment="Queue supports sparse resource memory management operations"/>
+    </enums>
+    <enums name="VkRenderPassCreateFlagBits" type="bitmask"></enums>
+    <enums name="VkDeviceQueueCreateFlagBits" type="bitmask"></enums>
+    <enums name="VkMemoryPropertyFlagBits" type="bitmask">
+        <enum bitpos="0"    name="VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT"               comment="If otherwise stated, then allocate memory on device"/>
+        <enum bitpos="1"    name="VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT"               comment="Memory is mappable by host"/>
+        <enum bitpos="2"    name="VK_MEMORY_PROPERTY_HOST_COHERENT_BIT"              comment="Memory will have i/o coherency. If not set, application may need to use vkFlushMappedMemoryRanges and vkInvalidateMappedMemoryRanges to flush/invalidate host cache"/>
+        <enum bitpos="3"    name="VK_MEMORY_PROPERTY_HOST_CACHED_BIT"                comment="Memory will be cached by the host"/>
+        <enum bitpos="4"    name="VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT"           comment="Memory may be allocated by the driver when it is required"/>
+    </enums>
+    <enums name="VkMemoryHeapFlagBits" type="bitmask">
+        <enum bitpos="0"    name="VK_MEMORY_HEAP_DEVICE_LOCAL_BIT"                   comment="If set, heap represents device memory"/>
+    </enums>
+    <enums name="VkAccessFlagBits" type="bitmask">
+        <enum bitpos="0"    name="VK_ACCESS_INDIRECT_COMMAND_READ_BIT"               comment="Controls coherency of indirect command reads"/>
+        <enum bitpos="1"    name="VK_ACCESS_INDEX_READ_BIT"                          comment="Controls coherency of index reads"/>
+        <enum bitpos="2"    name="VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT"               comment="Controls coherency of vertex attribute reads"/>
+        <enum bitpos="3"    name="VK_ACCESS_UNIFORM_READ_BIT"                        comment="Controls coherency of uniform buffer reads"/>
+        <enum bitpos="4"    name="VK_ACCESS_INPUT_ATTACHMENT_READ_BIT"               comment="Controls coherency of input attachment reads"/>
+        <enum bitpos="5"    name="VK_ACCESS_SHADER_READ_BIT"                         comment="Controls coherency of shader reads"/>
+        <enum bitpos="6"    name="VK_ACCESS_SHADER_WRITE_BIT"                        comment="Controls coherency of shader writes"/>
+        <enum bitpos="7"    name="VK_ACCESS_COLOR_ATTACHMENT_READ_BIT"               comment="Controls coherency of color attachment reads"/>
+        <enum bitpos="8"    name="VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT"              comment="Controls coherency of color attachment writes"/>
+        <enum bitpos="9"    name="VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT"       comment="Controls coherency of depth/stencil attachment reads"/>
+        <enum bitpos="10"   name="VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT"      comment="Controls coherency of depth/stencil attachment writes"/>
+        <enum bitpos="11"   name="VK_ACCESS_TRANSFER_READ_BIT"                       comment="Controls coherency of transfer reads"/>
+        <enum bitpos="12"   name="VK_ACCESS_TRANSFER_WRITE_BIT"                      comment="Controls coherency of transfer writes"/>
+        <enum bitpos="13"   name="VK_ACCESS_HOST_READ_BIT"                           comment="Controls coherency of host reads"/>
+        <enum bitpos="14"   name="VK_ACCESS_HOST_WRITE_BIT"                          comment="Controls coherency of host writes"/>
+        <enum bitpos="15"   name="VK_ACCESS_MEMORY_READ_BIT"                         comment="Controls coherency of memory reads"/>
+        <enum bitpos="16"   name="VK_ACCESS_MEMORY_WRITE_BIT"                        comment="Controls coherency of memory writes"/>
+    </enums>
+    <enums name="VkBufferUsageFlagBits" type="bitmask">
+        <enum bitpos="0"    name="VK_BUFFER_USAGE_TRANSFER_SRC_BIT"                  comment="Can be used as a source of transfer operations"/>
+        <enum bitpos="1"    name="VK_BUFFER_USAGE_TRANSFER_DST_BIT"                  comment="Can be used as a destination of transfer operations"/>
+        <enum bitpos="2"    name="VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT"          comment="Can be used as TBO"/>
+        <enum bitpos="3"    name="VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT"          comment="Can be used as IBO"/>
+        <enum bitpos="4"    name="VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT"                comment="Can be used as UBO"/>
+        <enum bitpos="5"    name="VK_BUFFER_USAGE_STORAGE_BUFFER_BIT"                comment="Can be used as SSBO"/>
+        <enum bitpos="6"    name="VK_BUFFER_USAGE_INDEX_BUFFER_BIT"                  comment="Can be used as source of fixed-function index fetch (index buffer)"/>
+        <enum bitpos="7"    name="VK_BUFFER_USAGE_VERTEX_BUFFER_BIT"                 comment="Can be used as source of fixed-function vertex fetch (VBO)"/>
+        <enum bitpos="8"    name="VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT"               comment="Can be the source of indirect parameters (e.g. indirect buffer, parameter buffer)"/>
+    </enums>
+    <enums name="VkBufferCreateFlagBits" type="bitmask">
+        <enum bitpos="0"    name="VK_BUFFER_CREATE_SPARSE_BINDING_BIT"               comment="Buffer should support sparse backing"/>
+        <enum bitpos="1"    name="VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT"             comment="Buffer should support sparse backing with partial residency"/>
+        <enum bitpos="2"    name="VK_BUFFER_CREATE_SPARSE_ALIASED_BIT"               comment="Buffer should support constent data access to physical memory ranges mapped into multiple locations of sparse buffers"/>
+    </enums>
+    <enums name="VkShaderStageFlagBits" type="bitmask">
+        <enum bitpos="0"    name="VK_SHADER_STAGE_VERTEX_BIT"/>
+        <enum bitpos="1"    name="VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT"/>
+        <enum bitpos="2"    name="VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT"/>
+        <enum bitpos="3"    name="VK_SHADER_STAGE_GEOMETRY_BIT"/>
+        <enum bitpos="4"    name="VK_SHADER_STAGE_FRAGMENT_BIT"/>
+        <enum bitpos="5"    name="VK_SHADER_STAGE_COMPUTE_BIT"/>
+        <enum value="0x0000001F" name="VK_SHADER_STAGE_ALL_GRAPHICS"/>
+        <enum value="0x7FFFFFFF" name="VK_SHADER_STAGE_ALL"/>
+    </enums>
+    <enums name="VkImageUsageFlagBits" type="bitmask">
+        <enum bitpos="0"    name="VK_IMAGE_USAGE_TRANSFER_SRC_BIT"                   comment="Can be used as a source of transfer operations"/>
+        <enum bitpos="1"    name="VK_IMAGE_USAGE_TRANSFER_DST_BIT"                   comment="Can be used as a destination of transfer operations"/>
+        <enum bitpos="2"    name="VK_IMAGE_USAGE_SAMPLED_BIT"                        comment="Can be sampled from (SAMPLED_IMAGE and COMBINED_IMAGE_SAMPLER descriptor types)"/>
+        <enum bitpos="3"    name="VK_IMAGE_USAGE_STORAGE_BIT"                        comment="Can be used as storage image (STORAGE_IMAGE descriptor type)"/>
+        <enum bitpos="4"    name="VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT"               comment="Can be used as framebuffer color attachment"/>
+        <enum bitpos="5"    name="VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT"       comment="Can be used as framebuffer depth/stencil attachment"/>
+        <enum bitpos="6"    name="VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT"           comment="Image data not needed outside of rendering"/>
+        <enum bitpos="7"    name="VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT"               comment="Can be used as framebuffer input attachment"/>
+    </enums>
+    <enums name="VkImageCreateFlagBits" type="bitmask">
+        <enum bitpos="0"    name="VK_IMAGE_CREATE_SPARSE_BINDING_BIT"                comment="Image should support sparse backing"/>
+        <enum bitpos="1"    name="VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT"              comment="Image should support sparse backing with partial residency"/>
+        <enum bitpos="2"    name="VK_IMAGE_CREATE_SPARSE_ALIASED_BIT"                comment="Image should support constent data access to physical memory ranges mapped into multiple locations of sparse images"/>
+        <enum bitpos="3"    name="VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT"                comment="Allows image views to have different format than the base image"/>
+        <enum bitpos="4"    name="VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT"               comment="Allows creating image views with cube type from the created image"/>
+    </enums>
+    <enums name="VkImageViewCreateFlagBits" type="bitmask">
+    </enums>
+    <enums name="VkSamplerCreateFlagBits" type="bitmask">
+    </enums>
+    <enums name="VkPipelineCreateFlagBits" type="bitmask">
+        <enum bitpos="0"    name="VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT"/>
+        <enum bitpos="1"    name="VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT"/>
+        <enum bitpos="2"    name="VK_PIPELINE_CREATE_DERIVATIVE_BIT"/>
+    </enums>
+    <enums name="VkPipelineShaderStageCreateFlagBits" type="bitmask">
+    </enums>
+    <enums name="VkColorComponentFlagBits" type="bitmask">
+        <enum bitpos="0"    name="VK_COLOR_COMPONENT_R_BIT"/>
+        <enum bitpos="1"    name="VK_COLOR_COMPONENT_G_BIT"/>
+        <enum bitpos="2"    name="VK_COLOR_COMPONENT_B_BIT"/>
+        <enum bitpos="3"    name="VK_COLOR_COMPONENT_A_BIT"/>
+    </enums>
+    <enums name="VkFenceCreateFlagBits" type="bitmask">
+        <enum bitpos="0"    name="VK_FENCE_CREATE_SIGNALED_BIT"/>
+    </enums>
+    <enums name="VkSemaphoreCreateFlagBits" type="bitmask">
+    </enums>
+    <enums name="VkFormatFeatureFlagBits" type="bitmask">
+        <enum bitpos="0"    name="VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT"               comment="Format can be used for sampled images (SAMPLED_IMAGE and COMBINED_IMAGE_SAMPLER descriptor types)"/>
+        <enum bitpos="1"    name="VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT"               comment="Format can be used for storage images (STORAGE_IMAGE descriptor type)"/>
+        <enum bitpos="2"    name="VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT"        comment="Format supports atomic operations in case it is used for storage images"/>
+        <enum bitpos="3"    name="VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT"        comment="Format can be used for uniform texel buffers (TBOs)"/>
+        <enum bitpos="4"    name="VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT"        comment="Format can be used for storage texel buffers (IBOs)"/>
+        <enum bitpos="5"    name="VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_ATOMIC_BIT" comment="Format supports atomic operations in case it is used for storage texel buffers"/>
+        <enum bitpos="6"    name="VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT"               comment="Format can be used for vertex buffers (VBOs)"/>
+        <enum bitpos="7"    name="VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT"            comment="Format can be used for color attachment images"/>
+        <enum bitpos="8"    name="VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT"      comment="Format supports blending in case it is used for color attachment images"/>
+        <enum bitpos="9"    name="VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT"    comment="Format can be used for depth/stencil attachment images"/>
+        <enum bitpos="10"   name="VK_FORMAT_FEATURE_BLIT_SRC_BIT"                    comment="Format can be used as the source image of blits with vkCmdBlitImage"/>
+        <enum bitpos="11"   name="VK_FORMAT_FEATURE_BLIT_DST_BIT"                    comment="Format can be used as the destination image of blits with vkCmdBlitImage"/>
+        <enum bitpos="12"   name="VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT" comment="Format can be filtered with VK_FILTER_LINEAR when being sampled"/>
+    </enums>
+    <enums name="VkQueryControlFlagBits" type="bitmask">
+        <enum bitpos="0"    name="VK_QUERY_CONTROL_PRECISE_BIT"                      comment="Require precise results to be collected by the query"/>
+    </enums>
+    <enums name="VkQueryResultFlagBits" type="bitmask">
+        <enum bitpos="0"    name="VK_QUERY_RESULT_64_BIT"                            comment="Results of the queries are written to the destination buffer as 64-bit values"/>
+        <enum bitpos="1"    name="VK_QUERY_RESULT_WAIT_BIT"                          comment="Results of the queries are waited on before proceeding with the result copy"/>
+        <enum bitpos="2"    name="VK_QUERY_RESULT_WITH_AVAILABILITY_BIT"             comment="Besides the results of the query, the availability of the results is also written"/>
+        <enum bitpos="3"    name="VK_QUERY_RESULT_PARTIAL_BIT"                       comment="Copy the partial results of the query even if the final results are not available"/>
+    </enums>
+    <enums name="VkCommandBufferUsageFlagBits" type="bitmask">
+        <enum bitpos="0"    name="VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT"/>
+        <enum bitpos="1"    name="VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT"/>
+        <enum bitpos="2"    name="VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT"      comment="Command buffer may be submitted/executed more than once simultaneously"/>
+    </enums>
+    <enums name="VkQueryPipelineStatisticFlagBits" type="bitmask">
+        <enum bitpos="0"    name="VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_VERTICES_BIT"                    comment="Optional"/>
+        <enum bitpos="1"    name="VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_PRIMITIVES_BIT"                  comment="Optional"/>
+        <enum bitpos="2"    name="VK_QUERY_PIPELINE_STATISTIC_VERTEX_SHADER_INVOCATIONS_BIT"                  comment="Optional"/>
+        <enum bitpos="3"    name="VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_INVOCATIONS_BIT"                comment="Optional"/>
+        <enum bitpos="4"    name="VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_PRIMITIVES_BIT"                 comment="Optional"/>
+        <enum bitpos="5"    name="VK_QUERY_PIPELINE_STATISTIC_CLIPPING_INVOCATIONS_BIT"                       comment="Optional"/>
+        <enum bitpos="6"    name="VK_QUERY_PIPELINE_STATISTIC_CLIPPING_PRIMITIVES_BIT"                        comment="Optional"/>
+        <enum bitpos="7"    name="VK_QUERY_PIPELINE_STATISTIC_FRAGMENT_SHADER_INVOCATIONS_BIT"                comment="Optional"/>
+        <enum bitpos="8"    name="VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_CONTROL_SHADER_PATCHES_BIT"        comment="Optional"/>
+        <enum bitpos="9"    name="VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_EVALUATION_SHADER_INVOCATIONS_BIT" comment="Optional"/>
+        <enum bitpos="10"   name="VK_QUERY_PIPELINE_STATISTIC_COMPUTE_SHADER_INVOCATIONS_BIT"                 comment="Optional"/>
+    </enums>
+    <enums name="VkImageAspectFlagBits" type="bitmask">
+        <enum bitpos="0"    name="VK_IMAGE_ASPECT_COLOR_BIT"/>
+        <enum bitpos="1"    name="VK_IMAGE_ASPECT_DEPTH_BIT"/>
+        <enum bitpos="2"    name="VK_IMAGE_ASPECT_STENCIL_BIT"/>
+        <enum bitpos="3"    name="VK_IMAGE_ASPECT_METADATA_BIT"/>
+    </enums>
+    <enums name="VkSparseImageFormatFlagBits" type="bitmask">
+        <enum bitpos="0"    name="VK_SPARSE_IMAGE_FORMAT_SINGLE_MIPTAIL_BIT"         comment="Image uses a single mip tail region for all array layers"/>
+        <enum bitpos="1"    name="VK_SPARSE_IMAGE_FORMAT_ALIGNED_MIP_SIZE_BIT"       comment="Image requires mip level dimensions to be an integer multiple of the sparse image block dimensions for non-tail mip levels."/>
+        <enum bitpos="2"    name="VK_SPARSE_IMAGE_FORMAT_NONSTANDARD_BLOCK_SIZE_BIT" comment="Image uses a non-standard sparse image block dimensions"/>
+    </enums>
+    <enums name="VkSparseMemoryBindFlagBits" type="bitmask">
+        <enum bitpos="0"    name="VK_SPARSE_MEMORY_BIND_METADATA_BIT"                comment="Operation binds resource metadata to memory"/>
+    </enums>
+    <enums name="VkPipelineStageFlagBits" type="bitmask">
+        <enum bitpos="0"    name="VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT"                 comment="Before subsequent commands are processed"/>
+        <enum bitpos="1"    name="VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT"               comment="Draw/DispatchIndirect command fetch"/>
+        <enum bitpos="2"    name="VK_PIPELINE_STAGE_VERTEX_INPUT_BIT"                comment="Vertex/index fetch"/>
+        <enum bitpos="3"    name="VK_PIPELINE_STAGE_VERTEX_SHADER_BIT"               comment="Vertex shading"/>
+        <enum bitpos="4"    name="VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT" comment="Tessellation control shading"/>
+        <enum bitpos="5"    name="VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT" comment="Tessellation evaluation shading"/>
+        <enum bitpos="6"    name="VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT"             comment="Geometry shading"/>
+        <enum bitpos="7"    name="VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT"             comment="Fragment shading"/>
+        <enum bitpos="8"    name="VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT"        comment="Early fragment (depth and stencil) tests"/>
+        <enum bitpos="9"    name="VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT"         comment="Late fragment (depth and stencil) tests"/>
+        <enum bitpos="10"   name="VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT"     comment="Color attachment writes"/>
+        <enum bitpos="11"   name="VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT"              comment="Compute shading"/>
+        <enum bitpos="12"   name="VK_PIPELINE_STAGE_TRANSFER_BIT"                    comment="Transfer/copy operations"/>
+        <enum bitpos="13"   name="VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT"              comment="After previous commands have completed"/>
+        <enum bitpos="14"   name="VK_PIPELINE_STAGE_HOST_BIT"                        comment="Indicates host (CPU) is a source/sink of the dependency"/>
+        <enum bitpos="15"   name="VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT"                comment="All stages of the graphics pipeline"/>
+        <enum bitpos="16"   name="VK_PIPELINE_STAGE_ALL_COMMANDS_BIT"                comment="All stages supported on the queue"/>
+    </enums>
+    <enums name="VkCommandPoolCreateFlagBits" type="bitmask">
+        <enum bitpos="0"    name="VK_COMMAND_POOL_CREATE_TRANSIENT_BIT"              comment="Command buffers have a short lifetime"/>
+        <enum bitpos="1"    name="VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT"   comment="Command buffers may release their memory individually"/>
+    </enums>
+    <enums name="VkCommandPoolResetFlagBits" type="bitmask">
+        <enum bitpos="0"    name="VK_COMMAND_POOL_RESET_RELEASE_RESOURCES_BIT"       comment="Release resources owned by the pool"/>
+    </enums>
+    <enums name="VkCommandBufferResetFlagBits" type="bitmask">
+        <enum bitpos="0"    name="VK_COMMAND_BUFFER_RESET_RELEASE_RESOURCES_BIT"     comment="Release resources owned by the buffer"/>
+    </enums>
+    <enums name="VkSampleCountFlagBits" type="bitmask">
+        <enum bitpos="0"    name="VK_SAMPLE_COUNT_1_BIT"                             comment="Sample count 1 supported"/>
+        <enum bitpos="1"    name="VK_SAMPLE_COUNT_2_BIT"                             comment="Sample count 2 supported"/>
+        <enum bitpos="2"    name="VK_SAMPLE_COUNT_4_BIT"                             comment="Sample count 4 supported"/>
+        <enum bitpos="3"    name="VK_SAMPLE_COUNT_8_BIT"                             comment="Sample count 8 supported"/>
+        <enum bitpos="4"    name="VK_SAMPLE_COUNT_16_BIT"                            comment="Sample count 16 supported"/>
+        <enum bitpos="5"    name="VK_SAMPLE_COUNT_32_BIT"                            comment="Sample count 32 supported"/>
+        <enum bitpos="6"    name="VK_SAMPLE_COUNT_64_BIT"                            comment="Sample count 64 supported"/>
+    </enums>
+    <enums name="VkAttachmentDescriptionFlagBits" type="bitmask">
+        <enum bitpos="0"    name="VK_ATTACHMENT_DESCRIPTION_MAY_ALIAS_BIT"           comment="The attachment may alias physical memory of another attachment in the same render pass"/>
+    </enums>
+    <enums name="VkStencilFaceFlagBits" type="bitmask">
+        <enum bitpos="0"    name="VK_STENCIL_FACE_FRONT_BIT"                         comment="Front face"/>
+        <enum bitpos="1"    name="VK_STENCIL_FACE_BACK_BIT"                          comment="Back face"/>
+        <enum value="0x00000003" name="VK_STENCIL_FACE_FRONT_AND_BACK"               comment="Front and back faces"/>
+        <enum                    name="VK_STENCIL_FRONT_AND_BACK" alias="VK_STENCIL_FACE_FRONT_AND_BACK" comment="Alias for backwards compatibility"/>
+    </enums>
+    <enums name="VkDescriptorPoolCreateFlagBits" type="bitmask">
+        <enum bitpos="0"    name="VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT" comment="Descriptor sets may be freed individually"/>
+    </enums>
+    <enums name="VkDependencyFlagBits" type="bitmask">
+        <enum bitpos="0"    name="VK_DEPENDENCY_BY_REGION_BIT"                       comment="Dependency is per pixel region "/>
+    </enums>
+    <enums name="VkSemaphoreTypeKHR" type="enum">
+        <enum value="0"     name="VK_SEMAPHORE_TYPE_BINARY_KHR"/>
+        <enum value="1"     name="VK_SEMAPHORE_TYPE_TIMELINE_KHR"/>
+    </enums>
+    <enums name="VkSemaphoreWaitFlagBitsKHR" type="bitmask">
+        <enum bitpos="0"    name="VK_SEMAPHORE_WAIT_ANY_BIT_KHR"/>
+    </enums>
+
+        <comment>WSI Extensions</comment>
+    <enums name="VkPresentModeKHR" type="enum">
+        <enum value="0"     name="VK_PRESENT_MODE_IMMEDIATE_KHR"/>
+        <enum value="1"     name="VK_PRESENT_MODE_MAILBOX_KHR"/>
+        <enum value="2"     name="VK_PRESENT_MODE_FIFO_KHR"/>
+        <enum value="3"     name="VK_PRESENT_MODE_FIFO_RELAXED_KHR"/>
+    </enums>
+    <enums name="VkColorSpaceKHR" type="enum">
+        <enum value="0"     name="VK_COLOR_SPACE_SRGB_NONLINEAR_KHR"/>
+        <enum               name="VK_COLORSPACE_SRGB_NONLINEAR_KHR" alias="VK_COLOR_SPACE_SRGB_NONLINEAR_KHR" comment="Backwards-compatible alias containing a typo"/>
+    </enums>
+    <enums name="VkDisplayPlaneAlphaFlagBitsKHR" type="bitmask">
+        <enum bitpos="0"    name="VK_DISPLAY_PLANE_ALPHA_OPAQUE_BIT_KHR"/>
+        <enum bitpos="1"    name="VK_DISPLAY_PLANE_ALPHA_GLOBAL_BIT_KHR"/>
+        <enum bitpos="2"    name="VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_BIT_KHR"/>
+        <enum bitpos="3"    name="VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_PREMULTIPLIED_BIT_KHR"/>
+    </enums>
+    <enums name="VkCompositeAlphaFlagBitsKHR" type="bitmask">
+        <enum bitpos="0"    name="VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR"/>
+        <enum bitpos="1"    name="VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR"/>
+        <enum bitpos="2"    name="VK_COMPOSITE_ALPHA_POST_MULTIPLIED_BIT_KHR"/>
+        <enum bitpos="3"    name="VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR"/>
+    </enums>
+    <enums name="VkSurfaceTransformFlagBitsKHR" type="bitmask">
+        <enum bitpos="0"    name="VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR"/>
+        <enum bitpos="1"    name="VK_SURFACE_TRANSFORM_ROTATE_90_BIT_KHR"/>
+        <enum bitpos="2"    name="VK_SURFACE_TRANSFORM_ROTATE_180_BIT_KHR"/>
+        <enum bitpos="3"    name="VK_SURFACE_TRANSFORM_ROTATE_270_BIT_KHR"/>
+        <enum bitpos="4"    name="VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_BIT_KHR"/>
+        <enum bitpos="5"    name="VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_90_BIT_KHR"/>
+        <enum bitpos="6"    name="VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_180_BIT_KHR"/>
+        <enum bitpos="7"    name="VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_270_BIT_KHR"/>
+        <enum bitpos="8"    name="VK_SURFACE_TRANSFORM_INHERIT_BIT_KHR"/>
+    </enums>
+    <enums name="VkSwapchainImageUsageFlagBitsANDROID" type="bitmask">
+      <enum bitpos="0"      name="VK_SWAPCHAIN_IMAGE_USAGE_SHARED_BIT_ANDROID"/>
+    </enums>
+    <enums name="VkTimeDomainEXT" type="enum">
+        <enum value="0"     name="VK_TIME_DOMAIN_DEVICE_EXT"/>
+        <enum value="1"     name="VK_TIME_DOMAIN_CLOCK_MONOTONIC_EXT"/>
+        <enum value="2"     name="VK_TIME_DOMAIN_CLOCK_MONOTONIC_RAW_EXT"/>
+        <enum value="3"     name="VK_TIME_DOMAIN_QUERY_PERFORMANCE_COUNTER_EXT"/>
+    </enums>
+    <enums name="VkDebugReportFlagBitsEXT" type="bitmask">
+        <enum bitpos="0"    name="VK_DEBUG_REPORT_INFORMATION_BIT_EXT"/>
+        <enum bitpos="1"    name="VK_DEBUG_REPORT_WARNING_BIT_EXT"/>
+        <enum bitpos="2"    name="VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT"/>
+        <enum bitpos="3"    name="VK_DEBUG_REPORT_ERROR_BIT_EXT"/>
+        <enum bitpos="4"    name="VK_DEBUG_REPORT_DEBUG_BIT_EXT"/>
+    </enums>
+    <enums name="VkDebugReportObjectTypeEXT" type="enum">
+        <enum value="0"     name="VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT"/>
+        <enum value="1"     name="VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT"/>
+        <enum value="2"     name="VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT"/>
+        <enum value="3"     name="VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT"/>
+        <enum value="4"     name="VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT"/>
+        <enum value="5"     name="VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT"/>
+        <enum value="6"     name="VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT"/>
+        <enum value="7"     name="VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT"/>
+        <enum value="8"     name="VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT"/>
+        <enum value="9"     name="VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT"/>
+        <enum value="10"    name="VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT"/>
+        <enum value="11"    name="VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT"/>
+        <enum value="12"    name="VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT"/>
+        <enum value="13"    name="VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT"/>
+        <enum value="14"    name="VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT"/>
+        <enum value="15"    name="VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT"/>
+        <enum value="16"    name="VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT"/>
+        <enum value="17"    name="VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT"/>
+        <enum value="18"    name="VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT"/>
+        <enum value="19"    name="VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT"/>
+        <enum value="20"    name="VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT"/>
+        <enum value="21"    name="VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT"/>
+        <enum value="22"    name="VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT"/>
+        <enum value="23"    name="VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT"/>
+        <enum value="24"    name="VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT"/>
+        <enum value="25"    name="VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT"/>
+        <enum value="26"    name="VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT"/>
+        <enum value="27"    name="VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT"/>
+        <enum value="28"    name="VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT_EXT"/>
+        <enum               name="VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_EXT" alias="VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT_EXT" comment="Backwards-compatible alias containing a typo"/>
+        <enum value="29"    name="VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_KHR_EXT"/>
+        <enum value="30"    name="VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_MODE_KHR_EXT"/>
+        <enum value="31"    name="VK_DEBUG_REPORT_OBJECT_TYPE_OBJECT_TABLE_NVX_EXT"/>
+        <enum value="32"    name="VK_DEBUG_REPORT_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX_EXT"/>
+        <enum value="33"    name="VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT_EXT"/>
+        <enum               name="VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT" alias="VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT_EXT" comment="Backwards-compatible alias containing a typo"/>
+    </enums>
+    <enums name="VkRasterizationOrderAMD" type="enum">
+        <enum value="0"     name="VK_RASTERIZATION_ORDER_STRICT_AMD"/>
+        <enum value="1"     name="VK_RASTERIZATION_ORDER_RELAXED_AMD"/>
+    </enums>
+    <enums name="VkExternalMemoryHandleTypeFlagBitsNV" type="bitmask">
+        <enum bitpos="0"    name="VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT_NV"/>
+        <enum bitpos="1"    name="VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_NV"/>
+        <enum bitpos="2"    name="VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_BIT_NV"/>
+        <enum bitpos="3"    name="VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_KMT_BIT_NV"/>
+    </enums>
+    <enums name="VkExternalMemoryFeatureFlagBitsNV" type="bitmask">
+        <enum bitpos="0"    name="VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT_NV"/>
+        <enum bitpos="1"    name="VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT_NV"/>
+        <enum bitpos="2"    name="VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT_NV"/>
+    </enums>
+    <enums name="VkValidationCheckEXT" type="enum">
+        <enum value="0"     name="VK_VALIDATION_CHECK_ALL_EXT"/>
+        <enum value="1"     name="VK_VALIDATION_CHECK_SHADERS_EXT"/>
+    </enums>
+    <enums name="VkValidationFeatureEnableEXT" type="enum">
+        <enum value="0"     name="VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_EXT"/>
+        <enum value="1"     name="VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_RESERVE_BINDING_SLOT_EXT"/>
+        <enum value="2"     name="VK_VALIDATION_FEATURE_ENABLE_BEST_PRACTICES_EXT"/>
+    </enums>
+    <enums name="VkValidationFeatureDisableEXT" type="enum">
+        <enum value="0"     name="VK_VALIDATION_FEATURE_DISABLE_ALL_EXT"/>
+        <enum value="1"     name="VK_VALIDATION_FEATURE_DISABLE_SHADERS_EXT"/>
+        <enum value="2"     name="VK_VALIDATION_FEATURE_DISABLE_THREAD_SAFETY_EXT"/>
+        <enum value="3"     name="VK_VALIDATION_FEATURE_DISABLE_API_PARAMETERS_EXT"/>
+        <enum value="4"     name="VK_VALIDATION_FEATURE_DISABLE_OBJECT_LIFETIMES_EXT"/>
+        <enum value="5"     name="VK_VALIDATION_FEATURE_DISABLE_CORE_CHECKS_EXT"/>
+        <enum value="6"     name="VK_VALIDATION_FEATURE_DISABLE_UNIQUE_HANDLES_EXT"/>
+    </enums>
+    <enums name="VkSubgroupFeatureFlagBits" type="bitmask">
+        <enum bitpos="0"    name="VK_SUBGROUP_FEATURE_BASIC_BIT"              comment="Basic subgroup operations"/>
+        <enum bitpos="1"    name="VK_SUBGROUP_FEATURE_VOTE_BIT"               comment="Vote subgroup operations"/>
+        <enum bitpos="2"    name="VK_SUBGROUP_FEATURE_ARITHMETIC_BIT"         comment="Arithmetic subgroup operations"/>
+        <enum bitpos="3"    name="VK_SUBGROUP_FEATURE_BALLOT_BIT"             comment="Ballot subgroup operations"/>
+        <enum bitpos="4"    name="VK_SUBGROUP_FEATURE_SHUFFLE_BIT"            comment="Shuffle subgroup operations"/>
+        <enum bitpos="5"    name="VK_SUBGROUP_FEATURE_SHUFFLE_RELATIVE_BIT"   comment="Shuffle relative subgroup operations"/>
+        <enum bitpos="6"    name="VK_SUBGROUP_FEATURE_CLUSTERED_BIT"          comment="Clustered subgroup operations"/>
+        <enum bitpos="7"    name="VK_SUBGROUP_FEATURE_QUAD_BIT"               comment="Quad subgroup operations"/>
+    </enums>
+    <enums name="VkIndirectCommandsLayoutUsageFlagBitsNVX" type="bitmask">
+        <enum bitpos="0"    name="VK_INDIRECT_COMMANDS_LAYOUT_USAGE_UNORDERED_SEQUENCES_BIT_NVX"/>
+        <enum bitpos="1"    name="VK_INDIRECT_COMMANDS_LAYOUT_USAGE_SPARSE_SEQUENCES_BIT_NVX"/>
+        <enum bitpos="2"    name="VK_INDIRECT_COMMANDS_LAYOUT_USAGE_EMPTY_EXECUTIONS_BIT_NVX"/>
+        <enum bitpos="3"    name="VK_INDIRECT_COMMANDS_LAYOUT_USAGE_INDEXED_SEQUENCES_BIT_NVX"/>
+    </enums>
+    <enums name="VkObjectEntryUsageFlagBitsNVX" type="bitmask">
+        <enum bitpos="0"    name="VK_OBJECT_ENTRY_USAGE_GRAPHICS_BIT_NVX"/>
+        <enum bitpos="1"    name="VK_OBJECT_ENTRY_USAGE_COMPUTE_BIT_NVX"/>
+    </enums>
+    <enums name="VkIndirectCommandsTokenTypeNVX" type="enum">
+        <enum value="0"     name="VK_INDIRECT_COMMANDS_TOKEN_TYPE_PIPELINE_NVX"/>
+        <enum value="1"     name="VK_INDIRECT_COMMANDS_TOKEN_TYPE_DESCRIPTOR_SET_NVX"/>
+        <enum value="2"     name="VK_INDIRECT_COMMANDS_TOKEN_TYPE_INDEX_BUFFER_NVX"/>
+        <enum value="3"     name="VK_INDIRECT_COMMANDS_TOKEN_TYPE_VERTEX_BUFFER_NVX"/>
+        <enum value="4"     name="VK_INDIRECT_COMMANDS_TOKEN_TYPE_PUSH_CONSTANT_NVX"/>
+        <enum value="5"     name="VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_NVX"/>
+        <enum value="6"     name="VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_NVX"/>
+        <enum value="7"     name="VK_INDIRECT_COMMANDS_TOKEN_TYPE_DISPATCH_NVX"/>
+    </enums>
+    <enums name="VkObjectEntryTypeNVX" type="enum">
+        <enum value="0"     name="VK_OBJECT_ENTRY_TYPE_DESCRIPTOR_SET_NVX"/>
+        <enum value="1"     name="VK_OBJECT_ENTRY_TYPE_PIPELINE_NVX"/>
+        <enum value="2"     name="VK_OBJECT_ENTRY_TYPE_INDEX_BUFFER_NVX"/>
+        <enum value="3"     name="VK_OBJECT_ENTRY_TYPE_VERTEX_BUFFER_NVX"/>
+        <enum value="4"     name="VK_OBJECT_ENTRY_TYPE_PUSH_CONSTANT_NVX"/>
+    </enums>
+    <enums name="VkDescriptorSetLayoutCreateFlagBits" type="bitmask">
+    </enums>
+    <enums name="VkExternalMemoryHandleTypeFlagBits" type="bitmask">
+        <enum bitpos="0"    name="VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT"/>
+        <enum bitpos="1"    name="VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT"/>
+        <enum bitpos="2"    name="VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT"/>
+        <enum bitpos="3"    name="VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT"/>
+        <enum bitpos="4"    name="VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_KMT_BIT"/>
+        <enum bitpos="5"    name="VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP_BIT"/>
+        <enum bitpos="6"    name="VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT"/>
+    </enums>
+    <enums name="VkExternalMemoryFeatureFlagBits" type="bitmask">
+        <enum bitpos="0"    name="VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT"/>
+        <enum bitpos="1"    name="VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT"/>
+        <enum bitpos="2"    name="VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT"/>
+    </enums>
+    <enums name="VkExternalSemaphoreHandleTypeFlagBits" type="bitmask">
+        <enum bitpos="0"    name="VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT"/>
+        <enum bitpos="1"    name="VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT"/>
+        <enum bitpos="2"    name="VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT"/>
+        <enum bitpos="3"    name="VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT"/>
+        <enum bitpos="4"    name="VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT"/>
+    </enums>
+    <enums name="VkExternalSemaphoreFeatureFlagBits" type="bitmask">
+        <enum bitpos="0"    name="VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT"/>
+        <enum bitpos="1"    name="VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT"/>
+    </enums>
+    <enums name="VkSemaphoreImportFlagBits" type="bitmask">
+        <enum bitpos="0"    name="VK_SEMAPHORE_IMPORT_TEMPORARY_BIT"/>
+    </enums>
+    <enums name="VkExternalFenceHandleTypeFlagBits" type="bitmask">
+        <enum bitpos="0"    name="VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_FD_BIT"/>
+        <enum bitpos="1"    name="VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_BIT"/>
+        <enum bitpos="2"    name="VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT"/>
+        <enum bitpos="3"    name="VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT"/>
+    </enums>
+    <enums name="VkExternalFenceFeatureFlagBits" type="bitmask">
+        <enum bitpos="0"    name="VK_EXTERNAL_FENCE_FEATURE_EXPORTABLE_BIT"/>
+        <enum bitpos="1"    name="VK_EXTERNAL_FENCE_FEATURE_IMPORTABLE_BIT"/>
+    </enums>
+    <enums name="VkFenceImportFlagBits" type="bitmask">
+        <enum bitpos="0"    name="VK_FENCE_IMPORT_TEMPORARY_BIT"/>
+    </enums>
+    <enums name="VkSurfaceCounterFlagBitsEXT" type="bitmask">
+        <enum bitpos="0"    name="VK_SURFACE_COUNTER_VBLANK_EXT"/>
+    </enums>
+    <enums name="VkDisplayPowerStateEXT" type="enum">
+        <enum value="0"     name="VK_DISPLAY_POWER_STATE_OFF_EXT"/>
+        <enum value="1"     name="VK_DISPLAY_POWER_STATE_SUSPEND_EXT"/>
+        <enum value="2"     name="VK_DISPLAY_POWER_STATE_ON_EXT"/>
+    </enums>
+    <enums name="VkDeviceEventTypeEXT" type="enum">
+        <enum value="0"     name="VK_DEVICE_EVENT_TYPE_DISPLAY_HOTPLUG_EXT"/>
+    </enums>
+    <enums name="VkDisplayEventTypeEXT" type="enum">
+        <enum value="0"     name="VK_DISPLAY_EVENT_TYPE_FIRST_PIXEL_OUT_EXT"/>
+    </enums>
+    <enums name="VkPeerMemoryFeatureFlagBits" type="bitmask">
+        <enum bitpos="0"    name="VK_PEER_MEMORY_FEATURE_COPY_SRC_BIT"           comment="Can read with vkCmdCopy commands"/>
+        <enum bitpos="1"    name="VK_PEER_MEMORY_FEATURE_COPY_DST_BIT"           comment="Can write with vkCmdCopy commands"/>
+        <enum bitpos="2"    name="VK_PEER_MEMORY_FEATURE_GENERIC_SRC_BIT"        comment="Can read with any access type/command"/>
+        <enum bitpos="3"    name="VK_PEER_MEMORY_FEATURE_GENERIC_DST_BIT"        comment="Can write with and access type/command"/>
+    </enums>
+    <enums name="VkMemoryAllocateFlagBits" type="bitmask">
+        <enum bitpos="0"    name="VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT"            comment="Force allocation on specific devices"/>
+    </enums>
+    <enums name="VkDeviceGroupPresentModeFlagBitsKHR" type="bitmask">
+        <enum bitpos="0"    name="VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_BIT_KHR"        comment="Present from local memory"/>
+        <enum bitpos="1"    name="VK_DEVICE_GROUP_PRESENT_MODE_REMOTE_BIT_KHR"       comment="Present from remote memory"/>
+        <enum bitpos="2"    name="VK_DEVICE_GROUP_PRESENT_MODE_SUM_BIT_KHR"          comment="Present sum of local and/or remote memory"/>
+        <enum bitpos="3"    name="VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_MULTI_DEVICE_BIT_KHR" comment="Each physical device presents from local memory"/>
+    </enums>
+    <enums name="VkSwapchainCreateFlagBitsKHR" type="bitmask">
+    </enums>
+    <enums name="VkViewportCoordinateSwizzleNV" type="enum">
+        <enum value="0"     name="VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_X_NV"/>
+        <enum value="1"     name="VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_X_NV"/>
+        <enum value="2"     name="VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_Y_NV"/>
+        <enum value="3"     name="VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_Y_NV"/>
+        <enum value="4"     name="VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_Z_NV"/>
+        <enum value="5"     name="VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_Z_NV"/>
+        <enum value="6"     name="VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_W_NV"/>
+        <enum value="7"     name="VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_W_NV"/>
+    </enums>
+    <enums name="VkDiscardRectangleModeEXT" type="enum">
+        <enum value="0"     name="VK_DISCARD_RECTANGLE_MODE_INCLUSIVE_EXT"/>
+        <enum value="1"     name="VK_DISCARD_RECTANGLE_MODE_EXCLUSIVE_EXT"/>
+    </enums>
+    <enums name="VkSubpassDescriptionFlagBits" type="bitmask">
+    </enums>
+    <enums name="VkPointClippingBehavior" type="enum">
+        <enum value="0"     name="VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES"/>
+        <enum value="1"     name="VK_POINT_CLIPPING_BEHAVIOR_USER_CLIP_PLANES_ONLY"/>
+    </enums>
+    <enums name="VkSamplerReductionModeEXT" type="enum">
+        <enum value="0"     name="VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE_EXT"/>
+        <enum value="1"     name="VK_SAMPLER_REDUCTION_MODE_MIN_EXT"/>
+        <enum value="2"     name="VK_SAMPLER_REDUCTION_MODE_MAX_EXT"/>
+    </enums>
+    <enums name="VkTessellationDomainOrigin" type="enum">
+        <enum value="0"     name="VK_TESSELLATION_DOMAIN_ORIGIN_UPPER_LEFT"/>
+        <enum value="1"     name="VK_TESSELLATION_DOMAIN_ORIGIN_LOWER_LEFT"/>
+    </enums>
+    <enums name="VkSamplerYcbcrModelConversion" type="enum">
+        <enum value="0"     name="VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY"/>
+        <enum value="1"     name="VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_IDENTITY" comment="just range expansion"/>
+        <enum value="2"     name="VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709"      comment="aka HD YUV"/>
+        <enum value="3"     name="VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_601"      comment="aka SD YUV"/>
+        <enum value="4"     name="VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020"     comment="aka UHD YUV"/>
+    </enums>
+    <enums name="VkSamplerYcbcrRange" type="enum">
+        <enum value="0"     name="VK_SAMPLER_YCBCR_RANGE_ITU_FULL"    comment="Luma 0..1 maps to 0..255, chroma -0.5..0.5 to 1..255 (clamped)"/>
+        <enum value="1"     name="VK_SAMPLER_YCBCR_RANGE_ITU_NARROW"  comment="Luma 0..1 maps to 16..235, chroma -0.5..0.5 to 16..240"/>
+    </enums>
+    <enums name="VkChromaLocation" type="enum">
+        <enum value="0"     name="VK_CHROMA_LOCATION_COSITED_EVEN"/>
+        <enum value="1"     name="VK_CHROMA_LOCATION_MIDPOINT"/>
+    </enums>
+    <enums name="VkBlendOverlapEXT" type="enum">
+        <enum value="0"     name="VK_BLEND_OVERLAP_UNCORRELATED_EXT"/>
+        <enum value="1"     name="VK_BLEND_OVERLAP_DISJOINT_EXT"/>
+        <enum value="2"     name="VK_BLEND_OVERLAP_CONJOINT_EXT"/>
+    </enums>
+    <enums name="VkCoverageModulationModeNV" type="enum">
+        <enum value="0"     name="VK_COVERAGE_MODULATION_MODE_NONE_NV"/>
+        <enum value="1"     name="VK_COVERAGE_MODULATION_MODE_RGB_NV"/>
+        <enum value="2"     name="VK_COVERAGE_MODULATION_MODE_ALPHA_NV"/>
+        <enum value="3"     name="VK_COVERAGE_MODULATION_MODE_RGBA_NV"/>
+    </enums>
+    <enums name="VkCoverageReductionModeNV" type="enum">
+        <enum value="0"     name="VK_COVERAGE_REDUCTION_MODE_MERGE_NV"/>
+        <enum value="1"     name="VK_COVERAGE_REDUCTION_MODE_TRUNCATE_NV"/>
+    </enums>
+    <enums name="VkValidationCacheHeaderVersionEXT" type="enum">
+        <enum value="1"     name="VK_VALIDATION_CACHE_HEADER_VERSION_ONE_EXT"/>
+    </enums>
+    <enums name="VkShaderInfoTypeAMD" type="enum">
+        <enum value="0"     name="VK_SHADER_INFO_TYPE_STATISTICS_AMD"/>
+        <enum value="1"     name="VK_SHADER_INFO_TYPE_BINARY_AMD"/>
+        <enum value="2"     name="VK_SHADER_INFO_TYPE_DISASSEMBLY_AMD"/>
+    </enums>
+    <enums name="VkQueueGlobalPriorityEXT" type="enum">
+        <enum value="128"   name="VK_QUEUE_GLOBAL_PRIORITY_LOW_EXT"/>
+        <enum value="256"   name="VK_QUEUE_GLOBAL_PRIORITY_MEDIUM_EXT"/>
+        <enum value="512"   name="VK_QUEUE_GLOBAL_PRIORITY_HIGH_EXT"/>
+        <enum value="1024"  name="VK_QUEUE_GLOBAL_PRIORITY_REALTIME_EXT"/>
+    </enums>
+    <enums name="VkDebugUtilsMessageSeverityFlagBitsEXT" type="bitmask">
+        <enum bitpos="0"    name="VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT"/>
+        <enum bitpos="4"    name="VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT"/>
+        <enum bitpos="8"    name="VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT"/>
+        <enum bitpos="12"   name="VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT"/>
+    </enums>
+    <enums name="VkDebugUtilsMessageTypeFlagBitsEXT" type="bitmask">
+        <enum bitpos="0"    name="VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT"/>
+        <enum bitpos="1"    name="VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT"/>
+        <enum bitpos="2"    name="VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT"/>
+    </enums>
+    <enums name="VkConservativeRasterizationModeEXT" type="enum">
+        <enum value="0"     name="VK_CONSERVATIVE_RASTERIZATION_MODE_DISABLED_EXT"/>
+        <enum value="1"     name="VK_CONSERVATIVE_RASTERIZATION_MODE_OVERESTIMATE_EXT"/>
+        <enum value="2"     name="VK_CONSERVATIVE_RASTERIZATION_MODE_UNDERESTIMATE_EXT"/>
+    </enums>
+    <enums name="VkDescriptorBindingFlagBitsEXT" type="bitmask">
+        <enum bitpos="0" name="VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT"/>
+        <enum bitpos="1" name="VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT_EXT"/>
+        <enum bitpos="2" name="VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT_EXT"/>
+        <enum bitpos="3" name="VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT_EXT"/>
+    </enums>
+    <enums name="VkVendorId" type="enum">
+        <comment>Vendor IDs are now represented as enums instead of the old
+                 &lt;vendorids&gt; tag, allowing them to be included in the
+                 API headers.</comment>
+        <enum value="0x10001" name="VK_VENDOR_ID_VIV"   comment="Vivante vendor ID"/>
+        <enum value="0x10002" name="VK_VENDOR_ID_VSI"   comment="VeriSilicon vendor ID"/>
+        <enum value="0x10003" name="VK_VENDOR_ID_KAZAN" comment="Kazan Software Renderer"/>
+            <unused start="0x10004" comment="This is the next unused available Khronos vendor ID"/>
+    </enums>
+    <enums name="VkDriverIdKHR" type="enum">
+        <comment>Driver IDs are now represented as enums instead of the old
+                 &lt;driverids&gt; tag, allowing them to be included in the
+                 API headers.</comment>
+        <enum value="1"       name="VK_DRIVER_ID_AMD_PROPRIETARY_KHR"           comment="Advanced Micro Devices, Inc."/>
+        <enum value="2"       name="VK_DRIVER_ID_AMD_OPEN_SOURCE_KHR"           comment="Advanced Micro Devices, Inc."/>
+        <enum value="3"       name="VK_DRIVER_ID_MESA_RADV_KHR"                 comment="Mesa open source project"/>
+        <enum value="4"       name="VK_DRIVER_ID_NVIDIA_PROPRIETARY_KHR"        comment="NVIDIA Corporation"/>
+        <enum value="5"       name="VK_DRIVER_ID_INTEL_PROPRIETARY_WINDOWS_KHR" comment="Intel Corporation"/>
+        <enum value="6"       name="VK_DRIVER_ID_INTEL_OPEN_SOURCE_MESA_KHR"    comment="Intel Corporation"/>
+        <enum value="7"       name="VK_DRIVER_ID_IMAGINATION_PROPRIETARY_KHR"   comment="Imagination Technologies"/>
+        <enum value="8"       name="VK_DRIVER_ID_QUALCOMM_PROPRIETARY_KHR"      comment="Qualcomm Technologies, Inc."/>
+        <enum value="9"       name="VK_DRIVER_ID_ARM_PROPRIETARY_KHR"           comment="Arm Limited"/>
+        <enum value="10"      name="VK_DRIVER_ID_GOOGLE_SWIFTSHADER_KHR"        comment="Google LLC"/>
+        <enum value="11"      name="VK_DRIVER_ID_GGP_PROPRIETARY_KHR"           comment="Google LLC"/>
+        <enum value="12"      name="VK_DRIVER_ID_BROADCOM_PROPRIETARY_KHR"      comment="Broadcom Inc."/>
+    </enums>
+    <enums name="VkConditionalRenderingFlagBitsEXT" type="bitmask">
+        <enum bitpos="0"    name="VK_CONDITIONAL_RENDERING_INVERTED_BIT_EXT"/>
+    </enums>
+    <enums name="VkResolveModeFlagBitsKHR" type="bitmask">
+        <enum value="0" name="VK_RESOLVE_MODE_NONE_KHR"/>
+        <enum bitpos="0" name="VK_RESOLVE_MODE_SAMPLE_ZERO_BIT_KHR"/>
+        <enum bitpos="1" name="VK_RESOLVE_MODE_AVERAGE_BIT_KHR"/>
+        <enum bitpos="2" name="VK_RESOLVE_MODE_MIN_BIT_KHR"/>
+        <enum bitpos="3" name="VK_RESOLVE_MODE_MAX_BIT_KHR"/>
+    </enums>
+    <enums name="VkShadingRatePaletteEntryNV" type="enum">
+        <enum value="0" name="VK_SHADING_RATE_PALETTE_ENTRY_NO_INVOCATIONS_NV"/>
+        <enum value="1" name="VK_SHADING_RATE_PALETTE_ENTRY_16_INVOCATIONS_PER_PIXEL_NV"/>
+        <enum value="2" name="VK_SHADING_RATE_PALETTE_ENTRY_8_INVOCATIONS_PER_PIXEL_NV"/>
+        <enum value="3" name="VK_SHADING_RATE_PALETTE_ENTRY_4_INVOCATIONS_PER_PIXEL_NV"/>
+        <enum value="4" name="VK_SHADING_RATE_PALETTE_ENTRY_2_INVOCATIONS_PER_PIXEL_NV"/>
+        <enum value="5" name="VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_PIXEL_NV"/>
+        <enum value="6" name="VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X1_PIXELS_NV"/>
+        <enum value="7" name="VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_1X2_PIXELS_NV"/>
+        <enum value="8" name="VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X2_PIXELS_NV"/>
+        <enum value="9" name="VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_4X2_PIXELS_NV"/>
+        <enum value="10" name="VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X4_PIXELS_NV"/>
+        <enum value="11" name="VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_4X4_PIXELS_NV"/>
+    </enums>
+    <enums name="VkCoarseSampleOrderTypeNV" type="enum">
+        <enum value="0" name="VK_COARSE_SAMPLE_ORDER_TYPE_DEFAULT_NV"/>
+        <enum value="1" name="VK_COARSE_SAMPLE_ORDER_TYPE_CUSTOM_NV"/>
+        <enum value="2" name="VK_COARSE_SAMPLE_ORDER_TYPE_PIXEL_MAJOR_NV"/>
+        <enum value="3" name="VK_COARSE_SAMPLE_ORDER_TYPE_SAMPLE_MAJOR_NV"/>
+    </enums>
+    <enums name="VkGeometryInstanceFlagBitsNV" type="bitmask">
+        <enum bitpos="0" name="VK_GEOMETRY_INSTANCE_TRIANGLE_CULL_DISABLE_BIT_NV"/>
+        <enum bitpos="1" name="VK_GEOMETRY_INSTANCE_TRIANGLE_FRONT_COUNTERCLOCKWISE_BIT_NV"/>
+        <enum bitpos="2" name="VK_GEOMETRY_INSTANCE_FORCE_OPAQUE_BIT_NV"/>
+        <enum bitpos="3" name="VK_GEOMETRY_INSTANCE_FORCE_NO_OPAQUE_BIT_NV"/>
+    </enums>
+    <enums name="VkGeometryFlagBitsNV" type="bitmask">
+        <enum bitpos="0" name="VK_GEOMETRY_OPAQUE_BIT_NV"/>
+        <enum bitpos="1" name="VK_GEOMETRY_NO_DUPLICATE_ANY_HIT_INVOCATION_BIT_NV"/>
+    </enums>
+    <enums name="VkBuildAccelerationStructureFlagBitsNV" type="bitmask">
+        <enum bitpos="0" name="VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_NV"/>
+        <enum bitpos="1" name="VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_NV"/>
+        <enum bitpos="2" name="VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_TRACE_BIT_NV"/>
+        <enum bitpos="3" name="VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_BUILD_BIT_NV"/>
+        <enum bitpos="4" name="VK_BUILD_ACCELERATION_STRUCTURE_LOW_MEMORY_BIT_NV"/>
+    </enums>
+    <enums name="VkCopyAccelerationStructureModeNV" type="enum">
+        <enum value="0" name="VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_NV"/>
+        <enum value="1" name="VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_NV"/>
+    </enums>
+    <enums name="VkAccelerationStructureTypeNV" type="enum">
+        <enum value="0" name="VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_NV"/>
+        <enum value="1" name="VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_NV"/>
+    </enums>
+    <enums name="VkGeometryTypeNV" type="enum">
+        <enum value="0" name="VK_GEOMETRY_TYPE_TRIANGLES_NV"/>
+        <enum value="1" name="VK_GEOMETRY_TYPE_AABBS_NV"/>
+    </enums>
+    <enums name="VkAccelerationStructureMemoryRequirementsTypeNV" type="enum">
+        <enum value="0" name="VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV"/>
+        <enum value="1" name="VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV"/>
+        <enum value="2" name="VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV"/>
+    </enums>
+    <enums name="VkRayTracingShaderGroupTypeNV" type="enum">
+        <enum value="0" name="VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_NV"/>
+        <enum value="1" name="VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_NV"/>
+        <enum value="2" name="VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_NV"/>
+    </enums>
+    <enums name="VkMemoryOverallocationBehaviorAMD" type="enum">
+        <enum value="0"     name="VK_MEMORY_OVERALLOCATION_BEHAVIOR_DEFAULT_AMD"/>
+        <enum value="1"     name="VK_MEMORY_OVERALLOCATION_BEHAVIOR_ALLOWED_AMD"/>
+        <enum value="2"     name="VK_MEMORY_OVERALLOCATION_BEHAVIOR_DISALLOWED_AMD"/>
+    </enums>
+    <enums name="VkFramebufferCreateFlagBits" type="bitmask">
+    </enums>
+    <enums name="VkScopeNV" type="enum">
+        <enum value="1"     name="VK_SCOPE_DEVICE_NV"/>
+        <enum value="2"     name="VK_SCOPE_WORKGROUP_NV"/>
+        <enum value="3"     name="VK_SCOPE_SUBGROUP_NV"/>
+        <enum value="5"     name="VK_SCOPE_QUEUE_FAMILY_NV"/>
+    </enums>
+    <enums name="VkComponentTypeNV" type="enum">
+        <enum value="0"     name="VK_COMPONENT_TYPE_FLOAT16_NV"/>
+        <enum value="1"     name="VK_COMPONENT_TYPE_FLOAT32_NV"/>
+        <enum value="2"     name="VK_COMPONENT_TYPE_FLOAT64_NV"/>
+        <enum value="3"     name="VK_COMPONENT_TYPE_SINT8_NV"/>
+        <enum value="4"     name="VK_COMPONENT_TYPE_SINT16_NV"/>
+        <enum value="5"     name="VK_COMPONENT_TYPE_SINT32_NV"/>
+        <enum value="6"     name="VK_COMPONENT_TYPE_SINT64_NV"/>
+        <enum value="7"     name="VK_COMPONENT_TYPE_UINT8_NV"/>
+        <enum value="8"     name="VK_COMPONENT_TYPE_UINT16_NV"/>
+        <enum value="9"     name="VK_COMPONENT_TYPE_UINT32_NV"/>
+        <enum value="10"    name="VK_COMPONENT_TYPE_UINT64_NV"/>
+    </enums>
+    <enums name="VkPipelineCreationFeedbackFlagBitsEXT" type="bitmask">
+        <enum bitpos="0"    name="VK_PIPELINE_CREATION_FEEDBACK_VALID_BIT_EXT"/>
+        <enum bitpos="1"    name="VK_PIPELINE_CREATION_FEEDBACK_APPLICATION_PIPELINE_CACHE_HIT_BIT_EXT"/>
+        <enum bitpos="2"    name="VK_PIPELINE_CREATION_FEEDBACK_BASE_PIPELINE_ACCELERATION_BIT_EXT"/>
+    </enums>
+    <enums name="VkFullScreenExclusiveEXT" type="enum">
+        <enum value="0"     name="VK_FULL_SCREEN_EXCLUSIVE_DEFAULT_EXT"/>
+        <enum value="1"     name="VK_FULL_SCREEN_EXCLUSIVE_ALLOWED_EXT"/>
+        <enum value="2"     name="VK_FULL_SCREEN_EXCLUSIVE_DISALLOWED_EXT"/>
+        <enum value="3"     name="VK_FULL_SCREEN_EXCLUSIVE_APPLICATION_CONTROLLED_EXT"/>
+    </enums>
+    <enums name="VkPerformanceCounterScopeKHR" type="enum">
+        <enum value="0"     name="VK_QUERY_SCOPE_COMMAND_BUFFER_KHR"/>
+        <enum value="1"     name="VK_QUERY_SCOPE_RENDER_PASS_KHR"/>
+        <enum value="2"     name="VK_QUERY_SCOPE_COMMAND_KHR"/>
+    </enums>
+    <enums name="VkPerformanceCounterUnitKHR" type="enum">
+        <enum value="0"     name="VK_PERFORMANCE_COUNTER_UNIT_GENERIC_KHR"/>
+        <enum value="1"     name="VK_PERFORMANCE_COUNTER_UNIT_PERCENTAGE_KHR"/>
+        <enum value="2"     name="VK_PERFORMANCE_COUNTER_UNIT_NANOSECONDS_KHR"/>
+        <enum value="3"     name="VK_PERFORMANCE_COUNTER_UNIT_BYTES_KHR"/>
+        <enum value="4"     name="VK_PERFORMANCE_COUNTER_UNIT_BYTES_PER_SECOND_KHR"/>
+        <enum value="5"     name="VK_PERFORMANCE_COUNTER_UNIT_KELVIN_KHR"/>
+        <enum value="6"     name="VK_PERFORMANCE_COUNTER_UNIT_WATTS_KHR"/>
+        <enum value="7"     name="VK_PERFORMANCE_COUNTER_UNIT_VOLTS_KHR"/>
+        <enum value="8"     name="VK_PERFORMANCE_COUNTER_UNIT_AMPS_KHR"/>
+        <enum value="9"     name="VK_PERFORMANCE_COUNTER_UNIT_HERTZ_KHR"/>
+        <enum value="10"    name="VK_PERFORMANCE_COUNTER_UNIT_CYCLES_KHR"/>
+    </enums>
+    <enums name="VkPerformanceCounterStorageKHR" type="enum">
+        <enum value="0"     name="VK_PERFORMANCE_COUNTER_STORAGE_INT32_KHR"/>
+        <enum value="1"     name="VK_PERFORMANCE_COUNTER_STORAGE_INT64_KHR"/>
+        <enum value="2"     name="VK_PERFORMANCE_COUNTER_STORAGE_UINT32_KHR"/>
+        <enum value="3"     name="VK_PERFORMANCE_COUNTER_STORAGE_UINT64_KHR"/>
+        <enum value="4"     name="VK_PERFORMANCE_COUNTER_STORAGE_FLOAT32_KHR"/>
+        <enum value="5"     name="VK_PERFORMANCE_COUNTER_STORAGE_FLOAT64_KHR"/>
+    </enums>
+    <enums name="VkPerformanceCounterDescriptionFlagBitsKHR" type="bitmask">
+        <enum bitpos="0"     name="VK_PERFORMANCE_COUNTER_DESCRIPTION_PERFORMANCE_IMPACTING_KHR"/>
+        <enum bitpos="1"     name="VK_PERFORMANCE_COUNTER_DESCRIPTION_CONCURRENTLY_IMPACTED_KHR"/>
+    </enums>
+    <enums name="VkAcquireProfilingLockFlagBitsKHR" type="bitmask">
+    </enums>
+    <enums name="VkShaderCorePropertiesFlagBitsAMD" type="bitmask">
+    </enums>
+    <enums name="VkPerformanceConfigurationTypeINTEL" type="enum">
+        <enum value="0"     name="VK_PERFORMANCE_CONFIGURATION_TYPE_COMMAND_QUEUE_METRICS_DISCOVERY_ACTIVATED_INTEL"/>
+    </enums>
+    <enums name="VkQueryPoolSamplingModeINTEL" type="enum">
+        <enum value="0"     name="VK_QUERY_POOL_SAMPLING_MODE_MANUAL_INTEL"/>
+    </enums>
+    <enums name="VkPerformanceOverrideTypeINTEL" type="enum">
+        <enum value="0"     name="VK_PERFORMANCE_OVERRIDE_TYPE_NULL_HARDWARE_INTEL"/>
+        <enum value="1"     name="VK_PERFORMANCE_OVERRIDE_TYPE_FLUSH_GPU_CACHES_INTEL"/>
+    </enums>
+    <enums name="VkPerformanceParameterTypeINTEL" type="enum">
+        <enum value="0"     name="VK_PERFORMANCE_PARAMETER_TYPE_HW_COUNTERS_SUPPORTED_INTEL"/>
+        <enum value="1"     name="VK_PERFORMANCE_PARAMETER_TYPE_STREAM_MARKER_VALID_BITS_INTEL"/>
+    </enums>
+    <enums name="VkPerformanceValueTypeINTEL" type="enum">
+        <enum value="0"     name="VK_PERFORMANCE_VALUE_TYPE_UINT32_INTEL"/>
+        <enum value="1"     name="VK_PERFORMANCE_VALUE_TYPE_UINT64_INTEL"/>
+        <enum value="2"     name="VK_PERFORMANCE_VALUE_TYPE_FLOAT_INTEL"/>
+        <enum value="3"     name="VK_PERFORMANCE_VALUE_TYPE_BOOL_INTEL"/>
+        <enum value="4"     name="VK_PERFORMANCE_VALUE_TYPE_STRING_INTEL"/>
+    </enums>
+    <enums name="VkPipelineExecutableStatisticFormatKHR" type="enum">
+        <enum value="0" name="VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_BOOL32_KHR"/>
+        <enum value="1" name="VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_INT64_KHR"/>
+        <enum value="2" name="VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_UINT64_KHR"/>
+        <enum value="3" name="VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_FLOAT64_KHR"/>
+    </enums>
+    <enums name="VkShaderFloatControlsIndependenceKHR" type="enum">
+        <enum value="0"     name="VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_32_BIT_ONLY_KHR"/>
+        <enum value="1"     name="VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_ALL_KHR"/>
+        <enum value="2"     name="VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_NONE_KHR"/>
+    </enums>
+    <enums name="VkLineRasterizationModeEXT" type="enum">
+        <enum value="0"     name="VK_LINE_RASTERIZATION_MODE_DEFAULT_EXT"/>
+        <enum value="1"     name="VK_LINE_RASTERIZATION_MODE_RECTANGULAR_EXT"/>
+        <enum value="2"     name="VK_LINE_RASTERIZATION_MODE_BRESENHAM_EXT"/>
+        <enum value="3"     name="VK_LINE_RASTERIZATION_MODE_RECTANGULAR_SMOOTH_EXT"/>
+    </enums>
+    <enums name="VkShaderModuleCreateFlagBits" type="bitmask">
+    </enums>
+    <enums name="VkPipelineCompilerControlFlagBitsAMD" type="bitmask">
+    </enums>
+    <enums name="VkToolPurposeFlagBitsEXT" type="bitmask">
+        <enum bitpos="0"     name="VK_TOOL_PURPOSE_VALIDATION_BIT_EXT"/>
+        <enum bitpos="1"     name="VK_TOOL_PURPOSE_PROFILING_BIT_EXT"/>
+        <enum bitpos="2"     name="VK_TOOL_PURPOSE_TRACING_BIT_EXT"/>
+        <enum bitpos="3"     name="VK_TOOL_PURPOSE_ADDITIONAL_FEATURES_BIT_EXT"/>
+        <enum bitpos="4"     name="VK_TOOL_PURPOSE_MODIFYING_FEATURES_BIT_EXT"/>
+    </enums>
+
+    <commands comment="Vulkan command definitions">
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY,VK_ERROR_INITIALIZATION_FAILED,VK_ERROR_LAYER_NOT_PRESENT,VK_ERROR_EXTENSION_NOT_PRESENT,VK_ERROR_INCOMPATIBLE_DRIVER">
+            <proto><type>VkResult</type> <name>vkCreateInstance</name></proto>
+            <param>const <type>VkInstanceCreateInfo</type>* <name>pCreateInfo</name></param>
+            <param optional="true">const <type>VkAllocationCallbacks</type>* <name>pAllocator</name></param>
+            <param><type>VkInstance</type>* <name>pInstance</name></param>
+        </command>
+        <command>
+            <proto><type>void</type> <name>vkDestroyInstance</name></proto>
+            <param optional="true" externsync="true"><type>VkInstance</type> <name>instance</name></param>
+            <param optional="true">const <type>VkAllocationCallbacks</type>* <name>pAllocator</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS,VK_INCOMPLETE" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY,VK_ERROR_INITIALIZATION_FAILED">
+            <proto><type>VkResult</type> <name>vkEnumeratePhysicalDevices</name></proto>
+            <param><type>VkInstance</type> <name>instance</name></param>
+            <param optional="false,true"><type>uint32_t</type>* <name>pPhysicalDeviceCount</name></param>
+            <param optional="true" len="pPhysicalDeviceCount"><type>VkPhysicalDevice</type>* <name>pPhysicalDevices</name></param>
+        </command>
+        <command>
+            <proto><type>PFN_vkVoidFunction</type> <name>vkGetDeviceProcAddr</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param len="null-terminated">const <type>char</type>* <name>pName</name></param>
+        </command>
+        <command>
+            <proto><type>PFN_vkVoidFunction</type> <name>vkGetInstanceProcAddr</name></proto>
+            <param optional="true"><type>VkInstance</type> <name>instance</name></param>
+            <param len="null-terminated">const <type>char</type>* <name>pName</name></param>
+        </command>
+        <command>
+            <proto><type>void</type> <name>vkGetPhysicalDeviceProperties</name></proto>
+            <param><type>VkPhysicalDevice</type> <name>physicalDevice</name></param>
+            <param><type>VkPhysicalDeviceProperties</type>* <name>pProperties</name></param>
+        </command>
+        <command>
+            <proto><type>void</type> <name>vkGetPhysicalDeviceQueueFamilyProperties</name></proto>
+            <param><type>VkPhysicalDevice</type> <name>physicalDevice</name></param>
+            <param optional="false,true"><type>uint32_t</type>* <name>pQueueFamilyPropertyCount</name></param>
+            <param optional="true" len="pQueueFamilyPropertyCount"><type>VkQueueFamilyProperties</type>* <name>pQueueFamilyProperties</name></param>
+        </command>
+        <command>
+            <proto><type>void</type> <name>vkGetPhysicalDeviceMemoryProperties</name></proto>
+            <param><type>VkPhysicalDevice</type> <name>physicalDevice</name></param>
+            <param><type>VkPhysicalDeviceMemoryProperties</type>* <name>pMemoryProperties</name></param>
+        </command>
+        <command>
+            <proto><type>void</type> <name>vkGetPhysicalDeviceFeatures</name></proto>
+            <param><type>VkPhysicalDevice</type> <name>physicalDevice</name></param>
+            <param><type>VkPhysicalDeviceFeatures</type>* <name>pFeatures</name></param>
+        </command>
+        <command>
+            <proto><type>void</type> <name>vkGetPhysicalDeviceFormatProperties</name></proto>
+            <param><type>VkPhysicalDevice</type> <name>physicalDevice</name></param>
+            <param><type>VkFormat</type> <name>format</name></param>
+            <param><type>VkFormatProperties</type>* <name>pFormatProperties</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY,VK_ERROR_FORMAT_NOT_SUPPORTED">
+            <proto><type>VkResult</type> <name>vkGetPhysicalDeviceImageFormatProperties</name></proto>
+            <param><type>VkPhysicalDevice</type> <name>physicalDevice</name></param>
+            <param><type>VkFormat</type> <name>format</name></param>
+            <param><type>VkImageType</type> <name>type</name></param>
+            <param><type>VkImageTiling</type> <name>tiling</name></param>
+            <param><type>VkImageUsageFlags</type> <name>usage</name></param>
+            <param optional="true"><type>VkImageCreateFlags</type> <name>flags</name></param>
+            <param><type>VkImageFormatProperties</type>* <name>pImageFormatProperties</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY,VK_ERROR_INITIALIZATION_FAILED,VK_ERROR_EXTENSION_NOT_PRESENT,VK_ERROR_FEATURE_NOT_PRESENT,VK_ERROR_TOO_MANY_OBJECTS,VK_ERROR_DEVICE_LOST">
+            <proto><type>VkResult</type> <name>vkCreateDevice</name></proto>
+            <param><type>VkPhysicalDevice</type> <name>physicalDevice</name></param>
+            <param>const <type>VkDeviceCreateInfo</type>* <name>pCreateInfo</name></param>
+            <param optional="true">const <type>VkAllocationCallbacks</type>* <name>pAllocator</name></param>
+            <param><type>VkDevice</type>* <name>pDevice</name></param>
+        </command>
+        <command>
+            <proto><type>void</type> <name>vkDestroyDevice</name></proto>
+            <param optional="true" externsync="true"><type>VkDevice</type> <name>device</name></param>
+            <param optional="true">const <type>VkAllocationCallbacks</type>* <name>pAllocator</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS">
+            <proto><type>VkResult</type> <name>vkEnumerateInstanceVersion</name></proto>
+            <param><type>uint32_t</type>* <name>pApiVersion</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS,VK_INCOMPLETE" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY">
+            <proto><type>VkResult</type> <name>vkEnumerateInstanceLayerProperties</name></proto>
+            <param optional="false,true"><type>uint32_t</type>* <name>pPropertyCount</name></param>
+            <param optional="true" len="pPropertyCount"><type>VkLayerProperties</type>* <name>pProperties</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS,VK_INCOMPLETE" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY,VK_ERROR_LAYER_NOT_PRESENT">
+            <proto><type>VkResult</type> <name>vkEnumerateInstanceExtensionProperties</name></proto>
+            <param optional="true" len="null-terminated">const <type>char</type>* <name>pLayerName</name></param>
+            <param optional="false,true"><type>uint32_t</type>* <name>pPropertyCount</name></param>
+            <param optional="true" len="pPropertyCount"><type>VkExtensionProperties</type>* <name>pProperties</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS,VK_INCOMPLETE" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY">
+            <proto><type>VkResult</type> <name>vkEnumerateDeviceLayerProperties</name></proto>
+            <param><type>VkPhysicalDevice</type> <name>physicalDevice</name></param>
+            <param optional="false,true"><type>uint32_t</type>* <name>pPropertyCount</name></param>
+            <param optional="true" len="pPropertyCount"><type>VkLayerProperties</type>* <name>pProperties</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS,VK_INCOMPLETE" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY,VK_ERROR_LAYER_NOT_PRESENT">
+            <proto><type>VkResult</type> <name>vkEnumerateDeviceExtensionProperties</name></proto>
+            <param><type>VkPhysicalDevice</type> <name>physicalDevice</name></param>
+            <param optional="true" len="null-terminated">const <type>char</type>* <name>pLayerName</name></param>
+            <param optional="false,true"><type>uint32_t</type>* <name>pPropertyCount</name></param>
+            <param optional="true" len="pPropertyCount"><type>VkExtensionProperties</type>* <name>pProperties</name></param>
+        </command>
+        <command>
+            <proto><type>void</type> <name>vkGetDeviceQueue</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param><type>uint32_t</type> <name>queueFamilyIndex</name></param>
+            <param><type>uint32_t</type> <name>queueIndex</name></param>
+            <param><type>VkQueue</type>* <name>pQueue</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY,VK_ERROR_DEVICE_LOST">
+            <proto><type>VkResult</type> <name>vkQueueSubmit</name></proto>
+            <param externsync="true"><type>VkQueue</type> <name>queue</name></param>
+            <param optional="true"><type>uint32_t</type> <name>submitCount</name></param>
+            <param len="submitCount">const <type>VkSubmitInfo</type>* <name>pSubmits</name></param>
+            <param optional="true" externsync="true"><type>VkFence</type> <name>fence</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY,VK_ERROR_DEVICE_LOST">
+            <proto><type>VkResult</type> <name>vkQueueWaitIdle</name></proto>
+            <param externsync="true"><type>VkQueue</type> <name>queue</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY,VK_ERROR_DEVICE_LOST">
+            <proto><type>VkResult</type> <name>vkDeviceWaitIdle</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <implicitexternsyncparams>
+                <param>all sname:VkQueue objects created from pname:device</param>
+            </implicitexternsyncparams>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY,VK_ERROR_TOO_MANY_OBJECTS,VK_ERROR_INVALID_EXTERNAL_HANDLE,VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS_KHR">
+            <proto><type>VkResult</type> <name>vkAllocateMemory</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param>const <type>VkMemoryAllocateInfo</type>* <name>pAllocateInfo</name></param>
+            <param optional="true">const <type>VkAllocationCallbacks</type>* <name>pAllocator</name></param>
+            <param><type>VkDeviceMemory</type>* <name>pMemory</name></param>
+        </command>
+        <command>
+            <proto><type>void</type> <name>vkFreeMemory</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param optional="true" externsync="true"><type>VkDeviceMemory</type> <name>memory</name></param>
+            <param optional="true">const <type>VkAllocationCallbacks</type>* <name>pAllocator</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY,VK_ERROR_MEMORY_MAP_FAILED">
+            <proto><type>VkResult</type> <name>vkMapMemory</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param externsync="true"><type>VkDeviceMemory</type> <name>memory</name></param>
+            <param><type>VkDeviceSize</type> <name>offset</name></param>
+            <param><type>VkDeviceSize</type> <name>size</name></param>
+            <param optional="true"><type>VkMemoryMapFlags</type> <name>flags</name></param>
+            <param optional="false,true"><type>void</type>** <name>ppData</name></param>
+        </command>
+        <command>
+            <proto><type>void</type> <name>vkUnmapMemory</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param externsync="true"><type>VkDeviceMemory</type> <name>memory</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY">
+            <proto><type>VkResult</type> <name>vkFlushMappedMemoryRanges</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param><type>uint32_t</type> <name>memoryRangeCount</name></param>
+            <param len="memoryRangeCount">const <type>VkMappedMemoryRange</type>* <name>pMemoryRanges</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY">
+            <proto><type>VkResult</type> <name>vkInvalidateMappedMemoryRanges</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param><type>uint32_t</type> <name>memoryRangeCount</name></param>
+            <param len="memoryRangeCount">const <type>VkMappedMemoryRange</type>* <name>pMemoryRanges</name></param>
+        </command>
+        <command>
+            <proto><type>void</type> <name>vkGetDeviceMemoryCommitment</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param><type>VkDeviceMemory</type> <name>memory</name></param>
+            <param><type>VkDeviceSize</type>* <name>pCommittedMemoryInBytes</name></param>
+        </command>
+        <command>
+            <proto><type>void</type> <name>vkGetBufferMemoryRequirements</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param><type>VkBuffer</type> <name>buffer</name></param>
+            <param><type>VkMemoryRequirements</type>* <name>pMemoryRequirements</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY,VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS_KHR">
+            <proto><type>VkResult</type> <name>vkBindBufferMemory</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param externsync="true"><type>VkBuffer</type> <name>buffer</name></param>
+            <param><type>VkDeviceMemory</type> <name>memory</name></param>
+            <param><type>VkDeviceSize</type> <name>memoryOffset</name></param>
+        </command>
+        <command>
+            <proto><type>void</type> <name>vkGetImageMemoryRequirements</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param><type>VkImage</type> <name>image</name></param>
+            <param><type>VkMemoryRequirements</type>* <name>pMemoryRequirements</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY">
+            <proto><type>VkResult</type> <name>vkBindImageMemory</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param externsync="true"><type>VkImage</type> <name>image</name></param>
+            <param><type>VkDeviceMemory</type> <name>memory</name></param>
+            <param><type>VkDeviceSize</type> <name>memoryOffset</name></param>
+        </command>
+        <command>
+            <proto><type>void</type> <name>vkGetImageSparseMemoryRequirements</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param><type>VkImage</type> <name>image</name></param>
+            <param optional="false,true"><type>uint32_t</type>* <name>pSparseMemoryRequirementCount</name></param>
+            <param optional="true" len="pSparseMemoryRequirementCount"><type>VkSparseImageMemoryRequirements</type>* <name>pSparseMemoryRequirements</name></param>
+        </command>
+        <command>
+            <proto><type>void</type> <name>vkGetPhysicalDeviceSparseImageFormatProperties</name></proto>
+            <param><type>VkPhysicalDevice</type> <name>physicalDevice</name></param>
+            <param><type>VkFormat</type> <name>format</name></param>
+            <param><type>VkImageType</type> <name>type</name></param>
+            <param><type>VkSampleCountFlagBits</type> <name>samples</name></param>
+            <param><type>VkImageUsageFlags</type> <name>usage</name></param>
+            <param><type>VkImageTiling</type> <name>tiling</name></param>
+            <param optional="false,true"><type>uint32_t</type>* <name>pPropertyCount</name></param>
+            <param optional="true" len="pPropertyCount"><type>VkSparseImageFormatProperties</type>* <name>pProperties</name></param>
+        </command>
+        <command queues="sparse_binding" successcodes="VK_SUCCESS" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY,VK_ERROR_DEVICE_LOST">
+            <proto><type>VkResult</type> <name>vkQueueBindSparse</name></proto>
+            <param externsync="true"><type>VkQueue</type> <name>queue</name></param>
+            <param optional="true"><type>uint32_t</type> <name>bindInfoCount</name></param>
+            <param len="bindInfoCount" externsync="pBindInfo[].pBufferBinds[].buffer,pBindInfo[].pImageOpaqueBinds[].image,pBindInfo[].pImageBinds[].image">const <type>VkBindSparseInfo</type>* <name>pBindInfo</name></param>
+            <param optional="true" externsync="true"><type>VkFence</type> <name>fence</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY">
+            <proto><type>VkResult</type> <name>vkCreateFence</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param>const <type>VkFenceCreateInfo</type>* <name>pCreateInfo</name></param>
+            <param optional="true">const <type>VkAllocationCallbacks</type>* <name>pAllocator</name></param>
+            <param><type>VkFence</type>* <name>pFence</name></param>
+        </command>
+        <command>
+            <proto><type>void</type> <name>vkDestroyFence</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param optional="true" externsync="true"><type>VkFence</type> <name>fence</name></param>
+            <param optional="true">const <type>VkAllocationCallbacks</type>* <name>pAllocator</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY">
+            <proto><type>VkResult</type> <name>vkResetFences</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param><type>uint32_t</type> <name>fenceCount</name></param>
+            <param len="fenceCount" externsync="true">const <type>VkFence</type>* <name>pFences</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS,VK_NOT_READY" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY,VK_ERROR_DEVICE_LOST">
+            <proto><type>VkResult</type> <name>vkGetFenceStatus</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param><type>VkFence</type> <name>fence</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS,VK_TIMEOUT" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY,VK_ERROR_DEVICE_LOST">
+            <proto><type>VkResult</type> <name>vkWaitForFences</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param><type>uint32_t</type> <name>fenceCount</name></param>
+            <param len="fenceCount">const <type>VkFence</type>* <name>pFences</name></param>
+            <param><type>VkBool32</type> <name>waitAll</name></param>
+            <param><type>uint64_t</type> <name>timeout</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY">
+            <proto><type>VkResult</type> <name>vkCreateSemaphore</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param>const <type>VkSemaphoreCreateInfo</type>* <name>pCreateInfo</name></param>
+            <param optional="true">const <type>VkAllocationCallbacks</type>* <name>pAllocator</name></param>
+            <param><type>VkSemaphore</type>* <name>pSemaphore</name></param>
+        </command>
+        <command>
+            <proto><type>void</type> <name>vkDestroySemaphore</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param optional="true" externsync="true"><type>VkSemaphore</type> <name>semaphore</name></param>
+            <param optional="true">const <type>VkAllocationCallbacks</type>* <name>pAllocator</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY">
+            <proto><type>VkResult</type> <name>vkCreateEvent</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param>const <type>VkEventCreateInfo</type>* <name>pCreateInfo</name></param>
+            <param optional="true">const <type>VkAllocationCallbacks</type>* <name>pAllocator</name></param>
+            <param><type>VkEvent</type>* <name>pEvent</name></param>
+        </command>
+        <command>
+            <proto><type>void</type> <name>vkDestroyEvent</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param optional="true" externsync="true"><type>VkEvent</type> <name>event</name></param>
+            <param optional="true">const <type>VkAllocationCallbacks</type>* <name>pAllocator</name></param>
+        </command>
+        <command successcodes="VK_EVENT_SET,VK_EVENT_RESET" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY,VK_ERROR_DEVICE_LOST">
+            <proto><type>VkResult</type> <name>vkGetEventStatus</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param><type>VkEvent</type> <name>event</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY">
+            <proto><type>VkResult</type> <name>vkSetEvent</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param externsync="true"><type>VkEvent</type> <name>event</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY">
+            <proto><type>VkResult</type> <name>vkResetEvent</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param externsync="true"><type>VkEvent</type> <name>event</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY">
+            <proto><type>VkResult</type> <name>vkCreateQueryPool</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param>const <type>VkQueryPoolCreateInfo</type>* <name>pCreateInfo</name></param>
+            <param optional="true">const <type>VkAllocationCallbacks</type>* <name>pAllocator</name></param>
+            <param><type>VkQueryPool</type>* <name>pQueryPool</name></param>
+        </command>
+        <command>
+            <proto><type>void</type> <name>vkDestroyQueryPool</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param optional="true" externsync="true"><type>VkQueryPool</type> <name>queryPool</name></param>
+            <param optional="true">const <type>VkAllocationCallbacks</type>* <name>pAllocator</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS,VK_NOT_READY" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY,VK_ERROR_DEVICE_LOST">
+            <proto><type>VkResult</type> <name>vkGetQueryPoolResults</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param><type>VkQueryPool</type> <name>queryPool</name></param>
+            <param><type>uint32_t</type> <name>firstQuery</name></param>
+            <param><type>uint32_t</type> <name>queryCount</name></param>
+            <param><type>size_t</type> <name>dataSize</name></param>
+            <param len="dataSize"><type>void</type>* <name>pData</name></param>
+            <param><type>VkDeviceSize</type> <name>stride</name></param>
+            <param optional="true"><type>VkQueryResultFlags</type> <name>flags</name></param>
+        </command>
+        <command>
+            <proto><type>void</type> <name>vkResetQueryPoolEXT</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param><type>VkQueryPool</type> <name>queryPool</name></param>
+            <param><type>uint32_t</type> <name>firstQuery</name></param>
+            <param><type>uint32_t</type> <name>queryCount</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY,VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS_KHR">
+            <proto><type>VkResult</type> <name>vkCreateBuffer</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param>const <type>VkBufferCreateInfo</type>* <name>pCreateInfo</name></param>
+            <param optional="true">const <type>VkAllocationCallbacks</type>* <name>pAllocator</name></param>
+            <param><type>VkBuffer</type>* <name>pBuffer</name></param>
+        </command>
+        <command>
+            <proto><type>void</type> <name>vkDestroyBuffer</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param optional="true" externsync="true"><type>VkBuffer</type> <name>buffer</name></param>
+            <param optional="true">const <type>VkAllocationCallbacks</type>* <name>pAllocator</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY">
+            <proto><type>VkResult</type> <name>vkCreateBufferView</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param>const <type>VkBufferViewCreateInfo</type>* <name>pCreateInfo</name></param>
+            <param optional="true">const <type>VkAllocationCallbacks</type>* <name>pAllocator</name></param>
+            <param><type>VkBufferView</type>* <name>pView</name></param>
+        </command>
+        <command>
+            <proto><type>void</type> <name>vkDestroyBufferView</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param optional="true" externsync="true"><type>VkBufferView</type> <name>bufferView</name></param>
+            <param optional="true">const <type>VkAllocationCallbacks</type>* <name>pAllocator</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY">
+            <proto><type>VkResult</type> <name>vkCreateImage</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param>const <type>VkImageCreateInfo</type>* <name>pCreateInfo</name></param>
+            <param optional="true">const <type>VkAllocationCallbacks</type>* <name>pAllocator</name></param>
+            <param><type>VkImage</type>* <name>pImage</name></param>
+        </command>
+        <command>
+            <proto><type>void</type> <name>vkDestroyImage</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param optional="true" externsync="true"><type>VkImage</type> <name>image</name></param>
+            <param optional="true">const <type>VkAllocationCallbacks</type>* <name>pAllocator</name></param>
+        </command>
+        <command>
+            <proto><type>void</type> <name>vkGetImageSubresourceLayout</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param><type>VkImage</type> <name>image</name></param>
+            <param>const <type>VkImageSubresource</type>* <name>pSubresource</name></param>
+            <param><type>VkSubresourceLayout</type>* <name>pLayout</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY">
+            <proto><type>VkResult</type> <name>vkCreateImageView</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param>const <type>VkImageViewCreateInfo</type>* <name>pCreateInfo</name></param>
+            <param optional="true">const <type>VkAllocationCallbacks</type>* <name>pAllocator</name></param>
+            <param><type>VkImageView</type>* <name>pView</name></param>
+        </command>
+        <command>
+            <proto><type>void</type> <name>vkDestroyImageView</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param optional="true" externsync="true"><type>VkImageView</type> <name>imageView</name></param>
+            <param optional="true">const <type>VkAllocationCallbacks</type>* <name>pAllocator</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY,VK_ERROR_INVALID_SHADER_NV">
+            <proto><type>VkResult</type> <name>vkCreateShaderModule</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param>const <type>VkShaderModuleCreateInfo</type>* <name>pCreateInfo</name></param>
+            <param optional="true">const <type>VkAllocationCallbacks</type>* <name>pAllocator</name></param>
+            <param><type>VkShaderModule</type>* <name>pShaderModule</name></param>
+        </command>
+        <command>
+            <proto><type>void</type> <name>vkDestroyShaderModule</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param optional="true" externsync="true"><type>VkShaderModule</type> <name>shaderModule</name></param>
+            <param optional="true">const <type>VkAllocationCallbacks</type>* <name>pAllocator</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY">
+            <proto><type>VkResult</type> <name>vkCreatePipelineCache</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param>const <type>VkPipelineCacheCreateInfo</type>* <name>pCreateInfo</name></param>
+            <param optional="true">const <type>VkAllocationCallbacks</type>* <name>pAllocator</name></param>
+            <param><type>VkPipelineCache</type>* <name>pPipelineCache</name></param>
+        </command>
+        <command>
+            <proto><type>void</type> <name>vkDestroyPipelineCache</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param optional="true" externsync="true"><type>VkPipelineCache</type> <name>pipelineCache</name></param>
+            <param optional="true">const <type>VkAllocationCallbacks</type>* <name>pAllocator</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS,VK_INCOMPLETE" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY">
+            <proto><type>VkResult</type> <name>vkGetPipelineCacheData</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param><type>VkPipelineCache</type> <name>pipelineCache</name></param>
+            <param optional="false,true"><type>size_t</type>* <name>pDataSize</name></param>
+            <param optional="true" len="pDataSize"><type>void</type>* <name>pData</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY">
+            <proto><type>VkResult</type> <name>vkMergePipelineCaches</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param externsync="true"><type>VkPipelineCache</type> <name>dstCache</name></param>
+            <param><type>uint32_t</type> <name>srcCacheCount</name></param>
+            <param len="srcCacheCount">const <type>VkPipelineCache</type>* <name>pSrcCaches</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY,VK_ERROR_INVALID_SHADER_NV">
+            <proto><type>VkResult</type> <name>vkCreateGraphicsPipelines</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param optional="true"><type>VkPipelineCache</type> <name>pipelineCache</name></param>
+            <param><type>uint32_t</type> <name>createInfoCount</name></param>
+            <param len="createInfoCount">const <type>VkGraphicsPipelineCreateInfo</type>* <name>pCreateInfos</name></param>
+            <param optional="true">const <type>VkAllocationCallbacks</type>* <name>pAllocator</name></param>
+            <param len="createInfoCount"><type>VkPipeline</type>* <name>pPipelines</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY,VK_ERROR_INVALID_SHADER_NV">
+            <proto><type>VkResult</type> <name>vkCreateComputePipelines</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param optional="true"><type>VkPipelineCache</type> <name>pipelineCache</name></param>
+            <param><type>uint32_t</type> <name>createInfoCount</name></param>
+            <param len="createInfoCount">const <type>VkComputePipelineCreateInfo</type>* <name>pCreateInfos</name></param>
+            <param optional="true">const <type>VkAllocationCallbacks</type>* <name>pAllocator</name></param>
+            <param len="createInfoCount"><type>VkPipeline</type>* <name>pPipelines</name></param>
+        </command>
+        <command>
+            <proto><type>void</type> <name>vkDestroyPipeline</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param optional="true" externsync="true"><type>VkPipeline</type> <name>pipeline</name></param>
+            <param optional="true">const <type>VkAllocationCallbacks</type>* <name>pAllocator</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY">
+            <proto><type>VkResult</type> <name>vkCreatePipelineLayout</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param>const <type>VkPipelineLayoutCreateInfo</type>* <name>pCreateInfo</name></param>
+            <param optional="true">const <type>VkAllocationCallbacks</type>* <name>pAllocator</name></param>
+            <param><type>VkPipelineLayout</type>* <name>pPipelineLayout</name></param>
+        </command>
+        <command>
+            <proto><type>void</type> <name>vkDestroyPipelineLayout</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param optional="true" externsync="true"><type>VkPipelineLayout</type> <name>pipelineLayout</name></param>
+            <param optional="true">const <type>VkAllocationCallbacks</type>* <name>pAllocator</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY,VK_ERROR_TOO_MANY_OBJECTS">
+            <proto><type>VkResult</type> <name>vkCreateSampler</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param>const <type>VkSamplerCreateInfo</type>* <name>pCreateInfo</name></param>
+            <param optional="true">const <type>VkAllocationCallbacks</type>* <name>pAllocator</name></param>
+            <param><type>VkSampler</type>* <name>pSampler</name></param>
+        </command>
+        <command>
+            <proto><type>void</type> <name>vkDestroySampler</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param optional="true" externsync="true"><type>VkSampler</type> <name>sampler</name></param>
+            <param optional="true">const <type>VkAllocationCallbacks</type>* <name>pAllocator</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY">
+            <proto><type>VkResult</type> <name>vkCreateDescriptorSetLayout</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param>const <type>VkDescriptorSetLayoutCreateInfo</type>* <name>pCreateInfo</name></param>
+            <param optional="true">const <type>VkAllocationCallbacks</type>* <name>pAllocator</name></param>
+            <param><type>VkDescriptorSetLayout</type>* <name>pSetLayout</name></param>
+        </command>
+        <command>
+            <proto><type>void</type> <name>vkDestroyDescriptorSetLayout</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param optional="true" externsync="true"><type>VkDescriptorSetLayout</type> <name>descriptorSetLayout</name></param>
+            <param optional="true">const <type>VkAllocationCallbacks</type>* <name>pAllocator</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY,VK_ERROR_FRAGMENTATION_EXT">
+            <proto><type>VkResult</type> <name>vkCreateDescriptorPool</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param>const <type>VkDescriptorPoolCreateInfo</type>* <name>pCreateInfo</name></param>
+            <param optional="true">const <type>VkAllocationCallbacks</type>* <name>pAllocator</name></param>
+            <param><type>VkDescriptorPool</type>* <name>pDescriptorPool</name></param>
+        </command>
+        <command>
+            <proto><type>void</type> <name>vkDestroyDescriptorPool</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param optional="true" externsync="true"><type>VkDescriptorPool</type> <name>descriptorPool</name></param>
+            <param optional="true">const <type>VkAllocationCallbacks</type>* <name>pAllocator</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS">
+            <proto><type>VkResult</type> <name>vkResetDescriptorPool</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param externsync="true"><type>VkDescriptorPool</type> <name>descriptorPool</name></param>
+            <param optional="true"><type>VkDescriptorPoolResetFlags</type> <name>flags</name></param>
+            <implicitexternsyncparams>
+                <param>any sname:VkDescriptorSet objects allocated from pname:descriptorPool</param>
+            </implicitexternsyncparams>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY,VK_ERROR_FRAGMENTED_POOL,VK_ERROR_OUT_OF_POOL_MEMORY">
+            <proto><type>VkResult</type> <name>vkAllocateDescriptorSets</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param externsync="pAllocateInfo::descriptorPool">const <type>VkDescriptorSetAllocateInfo</type>* <name>pAllocateInfo</name></param>
+            <param len="pAllocateInfo::descriptorSetCount"><type>VkDescriptorSet</type>* <name>pDescriptorSets</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS">
+            <proto><type>VkResult</type> <name>vkFreeDescriptorSets</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param externsync="true"><type>VkDescriptorPool</type> <name>descriptorPool</name></param>
+            <param><type>uint32_t</type> <name>descriptorSetCount</name></param>
+            <param noautovalidity="true" externsync="true" len="descriptorSetCount">const <type>VkDescriptorSet</type>* <name>pDescriptorSets</name></param>
+        </command>
+        <command>
+            <proto><type>void</type> <name>vkUpdateDescriptorSets</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param optional="true"><type>uint32_t</type> <name>descriptorWriteCount</name></param>
+            <param len="descriptorWriteCount" externsync="pDescriptorWrites[].dstSet">const <type>VkWriteDescriptorSet</type>* <name>pDescriptorWrites</name></param>
+            <param optional="true"><type>uint32_t</type> <name>descriptorCopyCount</name></param>
+            <param len="descriptorCopyCount" externsync="pDescriptorCopies[].dstSet">const <type>VkCopyDescriptorSet</type>* <name>pDescriptorCopies</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY">
+            <proto><type>VkResult</type> <name>vkCreateFramebuffer</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param>const <type>VkFramebufferCreateInfo</type>* <name>pCreateInfo</name></param>
+            <param optional="true">const <type>VkAllocationCallbacks</type>* <name>pAllocator</name></param>
+            <param><type>VkFramebuffer</type>* <name>pFramebuffer</name></param>
+        </command>
+        <command>
+            <proto><type>void</type> <name>vkDestroyFramebuffer</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param optional="true" externsync="true"><type>VkFramebuffer</type> <name>framebuffer</name></param>
+            <param optional="true">const <type>VkAllocationCallbacks</type>* <name>pAllocator</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY">
+            <proto><type>VkResult</type> <name>vkCreateRenderPass</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param>const <type>VkRenderPassCreateInfo</type>* <name>pCreateInfo</name></param>
+            <param optional="true">const <type>VkAllocationCallbacks</type>* <name>pAllocator</name></param>
+            <param><type>VkRenderPass</type>* <name>pRenderPass</name></param>
+        </command>
+        <command>
+            <proto><type>void</type> <name>vkDestroyRenderPass</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param optional="true" externsync="true"><type>VkRenderPass</type> <name>renderPass</name></param>
+            <param optional="true">const <type>VkAllocationCallbacks</type>* <name>pAllocator</name></param>
+        </command>
+        <command>
+            <proto><type>void</type> <name>vkGetRenderAreaGranularity</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param><type>VkRenderPass</type> <name>renderPass</name></param>
+            <param><type>VkExtent2D</type>* <name>pGranularity</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY">
+            <proto><type>VkResult</type> <name>vkCreateCommandPool</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param>const <type>VkCommandPoolCreateInfo</type>* <name>pCreateInfo</name></param>
+            <param optional="true">const <type>VkAllocationCallbacks</type>* <name>pAllocator</name></param>
+            <param><type>VkCommandPool</type>* <name>pCommandPool</name></param>
+        </command>
+        <command>
+            <proto><type>void</type> <name>vkDestroyCommandPool</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param optional="true" externsync="true"><type>VkCommandPool</type> <name>commandPool</name></param>
+            <param optional="true">const <type>VkAllocationCallbacks</type>* <name>pAllocator</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY">
+            <proto><type>VkResult</type> <name>vkResetCommandPool</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param externsync="true"><type>VkCommandPool</type> <name>commandPool</name></param>
+            <param optional="true"><type>VkCommandPoolResetFlags</type> <name>flags</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY">
+            <proto><type>VkResult</type> <name>vkAllocateCommandBuffers</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param externsync="pAllocateInfo::commandPool">const <type>VkCommandBufferAllocateInfo</type>* <name>pAllocateInfo</name></param>
+            <param len="pAllocateInfo::commandBufferCount"><type>VkCommandBuffer</type>* <name>pCommandBuffers</name></param>
+        </command>
+        <command>
+            <proto><type>void</type> <name>vkFreeCommandBuffers</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param externsync="true"><type>VkCommandPool</type> <name>commandPool</name></param>
+            <param><type>uint32_t</type> <name>commandBufferCount</name></param>
+            <param noautovalidity="true" externsync="true" len="commandBufferCount">const <type>VkCommandBuffer</type>* <name>pCommandBuffers</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY">
+            <proto><type>VkResult</type> <name>vkBeginCommandBuffer</name></proto>
+            <param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
+            <param>const <type>VkCommandBufferBeginInfo</type>* <name>pBeginInfo</name></param>
+            <implicitexternsyncparams>
+                <param>the sname:VkCommandPool that pname:commandBuffer was allocated from</param>
+            </implicitexternsyncparams>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY">
+            <proto><type>VkResult</type> <name>vkEndCommandBuffer</name></proto>
+            <param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
+            <implicitexternsyncparams>
+                <param>the sname:VkCommandPool that pname:commandBuffer was allocated from</param>
+            </implicitexternsyncparams>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY">
+            <proto><type>VkResult</type> <name>vkResetCommandBuffer</name></proto>
+            <param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
+            <param optional="true"><type>VkCommandBufferResetFlags</type> <name>flags</name></param>
+        </command>
+        <command queues="graphics,compute" renderpass="both" cmdbufferlevel="primary,secondary">
+            <proto><type>void</type> <name>vkCmdBindPipeline</name></proto>
+            <param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
+            <param><type>VkPipelineBindPoint</type> <name>pipelineBindPoint</name></param>
+            <param><type>VkPipeline</type> <name>pipeline</name></param>
+        </command>
+        <command queues="graphics" renderpass="both" cmdbufferlevel="primary,secondary">
+            <proto><type>void</type> <name>vkCmdSetViewport</name></proto>
+            <param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
+            <param><type>uint32_t</type> <name>firstViewport</name></param>
+            <param><type>uint32_t</type> <name>viewportCount</name></param>
+            <param len="viewportCount">const <type>VkViewport</type>* <name>pViewports</name></param>
+        </command>
+        <command queues="graphics" renderpass="both" cmdbufferlevel="primary,secondary">
+            <proto><type>void</type> <name>vkCmdSetScissor</name></proto>
+            <param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
+            <param><type>uint32_t</type> <name>firstScissor</name></param>
+            <param><type>uint32_t</type> <name>scissorCount</name></param>
+            <param len="scissorCount">const <type>VkRect2D</type>* <name>pScissors</name></param>
+        </command>
+        <command queues="graphics" renderpass="both" cmdbufferlevel="primary,secondary">
+            <proto><type>void</type> <name>vkCmdSetLineWidth</name></proto>
+            <param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
+            <param><type>float</type> <name>lineWidth</name></param>
+        </command>
+        <command queues="graphics" renderpass="both" cmdbufferlevel="primary,secondary">
+            <proto><type>void</type> <name>vkCmdSetDepthBias</name></proto>
+            <param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
+            <param><type>float</type> <name>depthBiasConstantFactor</name></param>
+            <param><type>float</type> <name>depthBiasClamp</name></param>
+            <param><type>float</type> <name>depthBiasSlopeFactor</name></param>
+        </command>
+        <command queues="graphics" renderpass="both" cmdbufferlevel="primary,secondary">
+            <proto><type>void</type> <name>vkCmdSetBlendConstants</name></proto>
+            <param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
+            <param>const <type>float</type> <name>blendConstants</name>[4]</param>
+        </command>
+        <command queues="graphics" renderpass="both" cmdbufferlevel="primary,secondary">
+            <proto><type>void</type> <name>vkCmdSetDepthBounds</name></proto>
+            <param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
+            <param><type>float</type> <name>minDepthBounds</name></param>
+            <param><type>float</type> <name>maxDepthBounds</name></param>
+        </command>
+        <command queues="graphics" renderpass="both" cmdbufferlevel="primary,secondary">
+            <proto><type>void</type> <name>vkCmdSetStencilCompareMask</name></proto>
+            <param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
+            <param><type>VkStencilFaceFlags</type> <name>faceMask</name></param>
+            <param><type>uint32_t</type> <name>compareMask</name></param>
+        </command>
+        <command queues="graphics" renderpass="both" cmdbufferlevel="primary,secondary">
+            <proto><type>void</type> <name>vkCmdSetStencilWriteMask</name></proto>
+            <param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
+            <param><type>VkStencilFaceFlags</type> <name>faceMask</name></param>
+            <param><type>uint32_t</type> <name>writeMask</name></param>
+        </command>
+        <command queues="graphics" renderpass="both" cmdbufferlevel="primary,secondary">
+            <proto><type>void</type> <name>vkCmdSetStencilReference</name></proto>
+            <param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
+            <param><type>VkStencilFaceFlags</type> <name>faceMask</name></param>
+            <param><type>uint32_t</type> <name>reference</name></param>
+        </command>
+        <command queues="graphics,compute" renderpass="both" cmdbufferlevel="primary,secondary">
+            <proto><type>void</type> <name>vkCmdBindDescriptorSets</name></proto>
+            <param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
+            <param><type>VkPipelineBindPoint</type> <name>pipelineBindPoint</name></param>
+            <param><type>VkPipelineLayout</type> <name>layout</name></param>
+            <param><type>uint32_t</type> <name>firstSet</name></param>
+            <param><type>uint32_t</type> <name>descriptorSetCount</name></param>
+            <param len="descriptorSetCount">const <type>VkDescriptorSet</type>* <name>pDescriptorSets</name></param>
+            <param optional="true"><type>uint32_t</type> <name>dynamicOffsetCount</name></param>
+            <param len="dynamicOffsetCount">const <type>uint32_t</type>* <name>pDynamicOffsets</name></param>
+        </command>
+        <command queues="graphics" renderpass="both" cmdbufferlevel="primary,secondary">
+            <proto><type>void</type> <name>vkCmdBindIndexBuffer</name></proto>
+            <param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
+            <param><type>VkBuffer</type> <name>buffer</name></param>
+            <param><type>VkDeviceSize</type> <name>offset</name></param>
+            <param><type>VkIndexType</type> <name>indexType</name></param>
+        </command>
+        <command queues="graphics" renderpass="both" cmdbufferlevel="primary,secondary">
+            <proto><type>void</type> <name>vkCmdBindVertexBuffers</name></proto>
+            <param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
+            <param><type>uint32_t</type> <name>firstBinding</name></param>
+            <param><type>uint32_t</type> <name>bindingCount</name></param>
+            <param len="bindingCount">const <type>VkBuffer</type>* <name>pBuffers</name></param>
+            <param len="bindingCount">const <type>VkDeviceSize</type>* <name>pOffsets</name></param>
+        </command>
+        <command queues="graphics" renderpass="inside" cmdbufferlevel="primary,secondary" pipeline="graphics">
+            <proto><type>void</type> <name>vkCmdDraw</name></proto>
+            <param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
+            <param><type>uint32_t</type> <name>vertexCount</name></param>
+            <param><type>uint32_t</type> <name>instanceCount</name></param>
+            <param><type>uint32_t</type> <name>firstVertex</name></param>
+            <param><type>uint32_t</type> <name>firstInstance</name></param>
+        </command>
+        <command queues="graphics" renderpass="inside" cmdbufferlevel="primary,secondary" pipeline="graphics">
+            <proto><type>void</type> <name>vkCmdDrawIndexed</name></proto>
+            <param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
+            <param><type>uint32_t</type> <name>indexCount</name></param>
+            <param><type>uint32_t</type> <name>instanceCount</name></param>
+            <param><type>uint32_t</type> <name>firstIndex</name></param>
+            <param><type>int32_t</type> <name>vertexOffset</name></param>
+            <param><type>uint32_t</type> <name>firstInstance</name></param>
+        </command>
+        <command queues="graphics" renderpass="inside" cmdbufferlevel="primary,secondary" pipeline="graphics">
+            <proto><type>void</type> <name>vkCmdDrawIndirect</name></proto>
+            <param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
+            <param><type>VkBuffer</type> <name>buffer</name></param>
+            <param><type>VkDeviceSize</type> <name>offset</name></param>
+            <param><type>uint32_t</type> <name>drawCount</name></param>
+            <param><type>uint32_t</type> <name>stride</name></param>
+        </command>
+        <command queues="graphics" renderpass="inside" cmdbufferlevel="primary,secondary" pipeline="graphics">
+            <proto><type>void</type> <name>vkCmdDrawIndexedIndirect</name></proto>
+            <param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
+            <param><type>VkBuffer</type> <name>buffer</name></param>
+            <param><type>VkDeviceSize</type> <name>offset</name></param>
+            <param><type>uint32_t</type> <name>drawCount</name></param>
+            <param><type>uint32_t</type> <name>stride</name></param>
+        </command>
+        <command queues="compute" renderpass="outside" cmdbufferlevel="primary,secondary" pipeline="compute">
+            <proto><type>void</type> <name>vkCmdDispatch</name></proto>
+            <param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
+            <param><type>uint32_t</type> <name>groupCountX</name></param>
+            <param><type>uint32_t</type> <name>groupCountY</name></param>
+            <param><type>uint32_t</type> <name>groupCountZ</name></param>
+        </command>
+        <command queues="compute" renderpass="outside" cmdbufferlevel="primary,secondary" pipeline="compute">
+            <proto><type>void</type> <name>vkCmdDispatchIndirect</name></proto>
+            <param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
+            <param><type>VkBuffer</type> <name>buffer</name></param>
+            <param><type>VkDeviceSize</type> <name>offset</name></param>
+        </command>
+        <command queues="transfer,graphics,compute" renderpass="outside" cmdbufferlevel="primary,secondary" pipeline="transfer">
+            <proto><type>void</type> <name>vkCmdCopyBuffer</name></proto>
+            <param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
+            <param><type>VkBuffer</type> <name>srcBuffer</name></param>
+            <param><type>VkBuffer</type> <name>dstBuffer</name></param>
+            <param><type>uint32_t</type> <name>regionCount</name></param>
+            <param len="regionCount">const <type>VkBufferCopy</type>* <name>pRegions</name></param>
+        </command>
+        <command queues="transfer,graphics,compute" renderpass="outside" cmdbufferlevel="primary,secondary" pipeline="transfer">
+            <proto><type>void</type> <name>vkCmdCopyImage</name></proto>
+            <param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
+            <param><type>VkImage</type> <name>srcImage</name></param>
+            <param><type>VkImageLayout</type> <name>srcImageLayout</name></param>
+            <param><type>VkImage</type> <name>dstImage</name></param>
+            <param><type>VkImageLayout</type> <name>dstImageLayout</name></param>
+            <param><type>uint32_t</type> <name>regionCount</name></param>
+            <param len="regionCount">const <type>VkImageCopy</type>* <name>pRegions</name></param>
+        </command>
+        <command queues="graphics" renderpass="outside" cmdbufferlevel="primary,secondary" pipeline="transfer">
+            <proto><type>void</type> <name>vkCmdBlitImage</name></proto>
+            <param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
+            <param><type>VkImage</type> <name>srcImage</name></param>
+            <param><type>VkImageLayout</type> <name>srcImageLayout</name></param>
+            <param><type>VkImage</type> <name>dstImage</name></param>
+            <param><type>VkImageLayout</type> <name>dstImageLayout</name></param>
+            <param><type>uint32_t</type> <name>regionCount</name></param>
+            <param len="regionCount">const <type>VkImageBlit</type>* <name>pRegions</name></param>
+            <param><type>VkFilter</type> <name>filter</name></param>
+        </command>
+        <command queues="transfer,graphics,compute" renderpass="outside" cmdbufferlevel="primary,secondary" pipeline="transfer">
+            <proto><type>void</type> <name>vkCmdCopyBufferToImage</name></proto>
+            <param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
+            <param><type>VkBuffer</type> <name>srcBuffer</name></param>
+            <param><type>VkImage</type> <name>dstImage</name></param>
+            <param><type>VkImageLayout</type> <name>dstImageLayout</name></param>
+            <param><type>uint32_t</type> <name>regionCount</name></param>
+            <param len="regionCount">const <type>VkBufferImageCopy</type>* <name>pRegions</name></param>
+        </command>
+        <command queues="transfer,graphics,compute" renderpass="outside" cmdbufferlevel="primary,secondary" pipeline="transfer">
+            <proto><type>void</type> <name>vkCmdCopyImageToBuffer</name></proto>
+            <param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
+            <param><type>VkImage</type> <name>srcImage</name></param>
+            <param><type>VkImageLayout</type> <name>srcImageLayout</name></param>
+            <param><type>VkBuffer</type> <name>dstBuffer</name></param>
+            <param><type>uint32_t</type> <name>regionCount</name></param>
+            <param len="regionCount">const <type>VkBufferImageCopy</type>* <name>pRegions</name></param>
+        </command>
+        <command queues="transfer,graphics,compute" renderpass="outside" cmdbufferlevel="primary,secondary" pipeline="transfer">
+            <proto><type>void</type> <name>vkCmdUpdateBuffer</name></proto>
+            <param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
+            <param><type>VkBuffer</type> <name>dstBuffer</name></param>
+            <param><type>VkDeviceSize</type> <name>dstOffset</name></param>
+            <param><type>VkDeviceSize</type> <name>dataSize</name></param>
+            <param len="dataSize">const <type>void</type>* <name>pData</name></param>
+        </command>
+        <command queues="transfer,graphics,compute" renderpass="outside" cmdbufferlevel="primary,secondary" pipeline="transfer" comment="transfer support is only available when VK_KHR_maintenance1 is enabled, as documented in valid usage language in the specification">
+            <proto><type>void</type> <name>vkCmdFillBuffer</name></proto>
+            <param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
+            <param><type>VkBuffer</type> <name>dstBuffer</name></param>
+            <param><type>VkDeviceSize</type> <name>dstOffset</name></param>
+            <param><type>VkDeviceSize</type> <name>size</name></param>
+            <param><type>uint32_t</type> <name>data</name></param>
+        </command>
+        <command queues="graphics,compute" renderpass="outside" cmdbufferlevel="primary,secondary" pipeline="transfer">
+            <proto><type>void</type> <name>vkCmdClearColorImage</name></proto>
+            <param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
+            <param><type>VkImage</type> <name>image</name></param>
+            <param><type>VkImageLayout</type> <name>imageLayout</name></param>
+            <param>const <type>VkClearColorValue</type>* <name>pColor</name></param>
+            <param><type>uint32_t</type> <name>rangeCount</name></param>
+            <param len="rangeCount">const <type>VkImageSubresourceRange</type>* <name>pRanges</name></param>
+        </command>
+        <command queues="graphics" renderpass="outside" cmdbufferlevel="primary,secondary" pipeline="transfer">
+            <proto><type>void</type> <name>vkCmdClearDepthStencilImage</name></proto>
+            <param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
+            <param><type>VkImage</type> <name>image</name></param>
+            <param><type>VkImageLayout</type> <name>imageLayout</name></param>
+            <param>const <type>VkClearDepthStencilValue</type>* <name>pDepthStencil</name></param>
+            <param><type>uint32_t</type> <name>rangeCount</name></param>
+            <param len="rangeCount">const <type>VkImageSubresourceRange</type>* <name>pRanges</name></param>
+        </command>
+        <command queues="graphics" renderpass="inside" cmdbufferlevel="primary,secondary" pipeline="graphics">
+            <proto><type>void</type> <name>vkCmdClearAttachments</name></proto>
+            <param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
+            <param><type>uint32_t</type> <name>attachmentCount</name></param>
+            <param len="attachmentCount">const <type>VkClearAttachment</type>* <name>pAttachments</name></param>
+            <param><type>uint32_t</type> <name>rectCount</name></param>
+            <param len="rectCount">const <type>VkClearRect</type>* <name>pRects</name></param>
+        </command>
+        <command queues="graphics" renderpass="outside" cmdbufferlevel="primary,secondary" pipeline="transfer">
+            <proto><type>void</type> <name>vkCmdResolveImage</name></proto>
+            <param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
+            <param><type>VkImage</type> <name>srcImage</name></param>
+            <param><type>VkImageLayout</type> <name>srcImageLayout</name></param>
+            <param><type>VkImage</type> <name>dstImage</name></param>
+            <param><type>VkImageLayout</type> <name>dstImageLayout</name></param>
+            <param><type>uint32_t</type> <name>regionCount</name></param>
+            <param len="regionCount">const <type>VkImageResolve</type>* <name>pRegions</name></param>
+        </command>
+        <command queues="graphics,compute" renderpass="outside" cmdbufferlevel="primary,secondary">
+            <proto><type>void</type> <name>vkCmdSetEvent</name></proto>
+            <param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
+            <param><type>VkEvent</type> <name>event</name></param>
+            <param><type>VkPipelineStageFlags</type> <name>stageMask</name></param>
+        </command>
+        <command queues="graphics,compute" renderpass="outside" cmdbufferlevel="primary,secondary">
+            <proto><type>void</type> <name>vkCmdResetEvent</name></proto>
+            <param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
+            <param><type>VkEvent</type> <name>event</name></param>
+            <param><type>VkPipelineStageFlags</type> <name>stageMask</name></param>
+        </command>
+        <command queues="graphics,compute" renderpass="both" cmdbufferlevel="primary,secondary">
+            <proto><type>void</type> <name>vkCmdWaitEvents</name></proto>
+            <param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
+            <param><type>uint32_t</type> <name>eventCount</name></param>
+            <param len="eventCount">const <type>VkEvent</type>* <name>pEvents</name></param>
+            <param><type>VkPipelineStageFlags</type> <name>srcStageMask</name></param>
+            <param><type>VkPipelineStageFlags</type> <name>dstStageMask</name></param>
+            <param optional="true"><type>uint32_t</type> <name>memoryBarrierCount</name></param>
+            <param len="memoryBarrierCount">const <type>VkMemoryBarrier</type>* <name>pMemoryBarriers</name></param>
+            <param optional="true"><type>uint32_t</type> <name>bufferMemoryBarrierCount</name></param>
+            <param len="bufferMemoryBarrierCount">const <type>VkBufferMemoryBarrier</type>* <name>pBufferMemoryBarriers</name></param>
+            <param optional="true"><type>uint32_t</type> <name>imageMemoryBarrierCount</name></param>
+            <param len="imageMemoryBarrierCount">const <type>VkImageMemoryBarrier</type>* <name>pImageMemoryBarriers</name></param>
+        </command>
+        <command queues="transfer,graphics,compute" renderpass="both" cmdbufferlevel="primary,secondary">
+            <proto><type>void</type> <name>vkCmdPipelineBarrier</name></proto>
+            <param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
+            <param><type>VkPipelineStageFlags</type> <name>srcStageMask</name></param>
+            <param><type>VkPipelineStageFlags</type> <name>dstStageMask</name></param>
+            <param optional="true"><type>VkDependencyFlags</type> <name>dependencyFlags</name></param>
+            <param optional="true"><type>uint32_t</type> <name>memoryBarrierCount</name></param>
+            <param len="memoryBarrierCount">const <type>VkMemoryBarrier</type>* <name>pMemoryBarriers</name></param>
+            <param optional="true"><type>uint32_t</type> <name>bufferMemoryBarrierCount</name></param>
+            <param len="bufferMemoryBarrierCount">const <type>VkBufferMemoryBarrier</type>* <name>pBufferMemoryBarriers</name></param>
+            <param optional="true"><type>uint32_t</type> <name>imageMemoryBarrierCount</name></param>
+            <param len="imageMemoryBarrierCount">const <type>VkImageMemoryBarrier</type>* <name>pImageMemoryBarriers</name></param>
+        </command>
+        <command queues="graphics,compute" renderpass="both" cmdbufferlevel="primary,secondary">
+            <proto><type>void</type> <name>vkCmdBeginQuery</name></proto>
+            <param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
+            <param><type>VkQueryPool</type> <name>queryPool</name></param>
+            <param><type>uint32_t</type> <name>query</name></param>
+            <param optional="true"><type>VkQueryControlFlags</type> <name>flags</name></param>
+        </command>
+        <command queues="graphics,compute" renderpass="both" cmdbufferlevel="primary,secondary">
+            <proto><type>void</type> <name>vkCmdEndQuery</name></proto>
+            <param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
+            <param><type>VkQueryPool</type> <name>queryPool</name></param>
+            <param><type>uint32_t</type> <name>query</name></param>
+        </command>
+        <command queues="graphics,compute" renderpass="both" cmdbufferlevel="primary,secondary">
+            <proto><type>void</type> <name>vkCmdBeginConditionalRenderingEXT</name></proto>
+            <param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
+            <param>const <type>VkConditionalRenderingBeginInfoEXT</type>* <name>pConditionalRenderingBegin</name></param>
+        </command>
+        <command queues="graphics,compute" renderpass="both" cmdbufferlevel="primary,secondary">
+            <proto><type>void</type> <name>vkCmdEndConditionalRenderingEXT</name></proto>
+            <param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
+        </command>
+        <command queues="graphics,compute" renderpass="outside" cmdbufferlevel="primary,secondary">
+            <proto><type>void</type> <name>vkCmdResetQueryPool</name></proto>
+            <param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
+            <param><type>VkQueryPool</type> <name>queryPool</name></param>
+            <param><type>uint32_t</type> <name>firstQuery</name></param>
+            <param><type>uint32_t</type> <name>queryCount</name></param>
+        </command>
+        <command queues="transfer,graphics,compute" renderpass="both" cmdbufferlevel="primary,secondary" pipeline="transfer">
+            <proto><type>void</type> <name>vkCmdWriteTimestamp</name></proto>
+            <param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
+            <param><type>VkPipelineStageFlagBits</type> <name>pipelineStage</name></param>
+            <param><type>VkQueryPool</type> <name>queryPool</name></param>
+            <param><type>uint32_t</type> <name>query</name></param>
+        </command>
+        <command queues="graphics,compute" renderpass="outside" cmdbufferlevel="primary,secondary" pipeline="transfer">
+            <proto><type>void</type> <name>vkCmdCopyQueryPoolResults</name></proto>
+            <param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
+            <param><type>VkQueryPool</type> <name>queryPool</name></param>
+            <param><type>uint32_t</type> <name>firstQuery</name></param>
+            <param><type>uint32_t</type> <name>queryCount</name></param>
+            <param><type>VkBuffer</type> <name>dstBuffer</name></param>
+            <param><type>VkDeviceSize</type> <name>dstOffset</name></param>
+            <param><type>VkDeviceSize</type> <name>stride</name></param>
+            <param optional="true"><type>VkQueryResultFlags</type> <name>flags</name></param>
+        </command>
+        <command queues="graphics,compute" renderpass="both" cmdbufferlevel="primary,secondary">
+            <proto><type>void</type> <name>vkCmdPushConstants</name></proto>
+            <param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
+            <param><type>VkPipelineLayout</type> <name>layout</name></param>
+            <param><type>VkShaderStageFlags</type> <name>stageFlags</name></param>
+            <param><type>uint32_t</type> <name>offset</name></param>
+            <param><type>uint32_t</type> <name>size</name></param>
+            <param len="size">const <type>void</type>* <name>pValues</name></param>
+        </command>
+        <command queues="graphics" renderpass="outside" cmdbufferlevel="primary" pipeline="graphics">
+            <proto><type>void</type> <name>vkCmdBeginRenderPass</name></proto>
+            <param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
+            <param>const <type>VkRenderPassBeginInfo</type>* <name>pRenderPassBegin</name></param>
+            <param><type>VkSubpassContents</type> <name>contents</name></param>
+        </command>
+        <command queues="graphics" renderpass="inside" cmdbufferlevel="primary" pipeline="graphics">
+            <proto><type>void</type> <name>vkCmdNextSubpass</name></proto>
+            <param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
+            <param><type>VkSubpassContents</type> <name>contents</name></param>
+        </command>
+        <command queues="graphics" renderpass="inside" cmdbufferlevel="primary" pipeline="graphics">
+            <proto><type>void</type> <name>vkCmdEndRenderPass</name></proto>
+            <param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
+        </command>
+        <command queues="transfer,graphics,compute" renderpass="both" cmdbufferlevel="primary">
+            <proto><type>void</type> <name>vkCmdExecuteCommands</name></proto>
+            <param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
+            <param><type>uint32_t</type> <name>commandBufferCount</name></param>
+            <param len="commandBufferCount">const <type>VkCommandBuffer</type>* <name>pCommandBuffers</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY,VK_ERROR_NATIVE_WINDOW_IN_USE_KHR">
+            <proto><type>VkResult</type> <name>vkCreateAndroidSurfaceKHR</name></proto>
+            <param><type>VkInstance</type> <name>instance</name></param>
+            <param>const <type>VkAndroidSurfaceCreateInfoKHR</type>* <name>pCreateInfo</name></param>
+            <param optional="true">const <type>VkAllocationCallbacks</type>* <name>pAllocator</name></param>
+            <param><type>VkSurfaceKHR</type>* <name>pSurface</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS,VK_INCOMPLETE" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY">
+            <proto><type>VkResult</type> <name>vkGetPhysicalDeviceDisplayPropertiesKHR</name></proto>
+            <param><type>VkPhysicalDevice</type> <name>physicalDevice</name></param>
+            <param optional="false,true"><type>uint32_t</type>* <name>pPropertyCount</name></param>
+            <param optional="true" len="pPropertyCount"><type>VkDisplayPropertiesKHR</type>* <name>pProperties</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS,VK_INCOMPLETE" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY">
+            <proto><type>VkResult</type> <name>vkGetPhysicalDeviceDisplayPlanePropertiesKHR</name></proto>
+            <param><type>VkPhysicalDevice</type> <name>physicalDevice</name></param>
+            <param optional="false,true"><type>uint32_t</type>* <name>pPropertyCount</name></param>
+            <param optional="true" len="pPropertyCount"><type>VkDisplayPlanePropertiesKHR</type>* <name>pProperties</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS,VK_INCOMPLETE" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY">
+            <proto><type>VkResult</type> <name>vkGetDisplayPlaneSupportedDisplaysKHR</name></proto>
+            <param><type>VkPhysicalDevice</type> <name>physicalDevice</name></param>
+            <param><type>uint32_t</type> <name>planeIndex</name></param>
+            <param optional="false,true"><type>uint32_t</type>* <name>pDisplayCount</name></param>
+            <param optional="true" len="pDisplayCount"><type>VkDisplayKHR</type>* <name>pDisplays</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS,VK_INCOMPLETE" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY">
+            <proto><type>VkResult</type> <name>vkGetDisplayModePropertiesKHR</name></proto>
+            <param><type>VkPhysicalDevice</type> <name>physicalDevice</name></param>
+            <param><type>VkDisplayKHR</type> <name>display</name></param>
+            <param optional="false,true"><type>uint32_t</type>* <name>pPropertyCount</name></param>
+            <param optional="true" len="pPropertyCount"><type>VkDisplayModePropertiesKHR</type>* <name>pProperties</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY,VK_ERROR_INITIALIZATION_FAILED">
+            <proto><type>VkResult</type> <name>vkCreateDisplayModeKHR</name></proto>
+            <param><type>VkPhysicalDevice</type> <name>physicalDevice</name></param>
+            <param externsync="true"><type>VkDisplayKHR</type> <name>display</name></param>
+            <param>const <type>VkDisplayModeCreateInfoKHR</type>* <name>pCreateInfo</name></param>
+            <param optional="true">const <type>VkAllocationCallbacks</type>* <name>pAllocator</name></param>
+            <param><type>VkDisplayModeKHR</type>* <name>pMode</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY">
+            <proto><type>VkResult</type> <name>vkGetDisplayPlaneCapabilitiesKHR</name></proto>
+            <param><type>VkPhysicalDevice</type> <name>physicalDevice</name></param>
+            <param externsync="true"><type>VkDisplayModeKHR</type> <name>mode</name></param>
+            <param><type>uint32_t</type> <name>planeIndex</name></param>
+            <param><type>VkDisplayPlaneCapabilitiesKHR</type>* <name>pCapabilities</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY">
+            <proto><type>VkResult</type> <name>vkCreateDisplayPlaneSurfaceKHR</name></proto>
+            <param><type>VkInstance</type> <name>instance</name></param>
+            <param>const <type>VkDisplaySurfaceCreateInfoKHR</type>* <name>pCreateInfo</name></param>
+            <param optional="true">const <type>VkAllocationCallbacks</type>* <name>pAllocator</name></param>
+            <param><type>VkSurfaceKHR</type>* <name>pSurface</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY,VK_ERROR_INCOMPATIBLE_DISPLAY_KHR,VK_ERROR_DEVICE_LOST,VK_ERROR_SURFACE_LOST_KHR">
+            <proto><type>VkResult</type> <name>vkCreateSharedSwapchainsKHR</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param><type>uint32_t</type> <name>swapchainCount</name></param>
+            <param len="swapchainCount" externsync="pCreateInfos[].surface,pCreateInfos[].oldSwapchain">const <type>VkSwapchainCreateInfoKHR</type>* <name>pCreateInfos</name></param>
+            <param optional="true">const <type>VkAllocationCallbacks</type>* <name>pAllocator</name></param>
+            <param len="swapchainCount"><type>VkSwapchainKHR</type>* <name>pSwapchains</name></param>
+        </command>
+        <command>
+            <proto><type>void</type> <name>vkDestroySurfaceKHR</name></proto>
+            <param><type>VkInstance</type> <name>instance</name></param>
+            <param optional="true" externsync="true"><type>VkSurfaceKHR</type> <name>surface</name></param>
+            <param optional="true">const <type>VkAllocationCallbacks</type>* <name>pAllocator</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY,VK_ERROR_SURFACE_LOST_KHR">
+            <proto><type>VkResult</type> <name>vkGetPhysicalDeviceSurfaceSupportKHR</name></proto>
+            <param><type>VkPhysicalDevice</type> <name>physicalDevice</name></param>
+            <param><type>uint32_t</type> <name>queueFamilyIndex</name></param>
+            <param><type>VkSurfaceKHR</type> <name>surface</name></param>
+            <param><type>VkBool32</type>* <name>pSupported</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY,VK_ERROR_SURFACE_LOST_KHR">
+            <proto><type>VkResult</type> <name>vkGetPhysicalDeviceSurfaceCapabilitiesKHR</name></proto>
+            <param><type>VkPhysicalDevice</type> <name>physicalDevice</name></param>
+            <param><type>VkSurfaceKHR</type> <name>surface</name></param>
+            <param><type>VkSurfaceCapabilitiesKHR</type>* <name>pSurfaceCapabilities</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS,VK_INCOMPLETE" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY,VK_ERROR_SURFACE_LOST_KHR">
+            <proto><type>VkResult</type> <name>vkGetPhysicalDeviceSurfaceFormatsKHR</name></proto>
+            <param><type>VkPhysicalDevice</type> <name>physicalDevice</name></param>
+            <param><type>VkSurfaceKHR</type> <name>surface</name></param>
+            <param optional="false,true"><type>uint32_t</type>* <name>pSurfaceFormatCount</name></param>
+            <param optional="true" len="pSurfaceFormatCount"><type>VkSurfaceFormatKHR</type>* <name>pSurfaceFormats</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS,VK_INCOMPLETE" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY,VK_ERROR_SURFACE_LOST_KHR">
+            <proto><type>VkResult</type> <name>vkGetPhysicalDeviceSurfacePresentModesKHR</name></proto>
+            <param><type>VkPhysicalDevice</type> <name>physicalDevice</name></param>
+            <param><type>VkSurfaceKHR</type> <name>surface</name></param>
+            <param optional="false,true"><type>uint32_t</type>* <name>pPresentModeCount</name></param>
+            <param optional="true" len="pPresentModeCount"><type>VkPresentModeKHR</type>* <name>pPresentModes</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY,VK_ERROR_DEVICE_LOST,VK_ERROR_SURFACE_LOST_KHR,VK_ERROR_NATIVE_WINDOW_IN_USE_KHR,VK_ERROR_INITIALIZATION_FAILED">
+            <proto><type>VkResult</type> <name>vkCreateSwapchainKHR</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param externsync="pCreateInfo.surface,pCreateInfo.oldSwapchain">const <type>VkSwapchainCreateInfoKHR</type>* <name>pCreateInfo</name></param>
+            <param optional="true">const <type>VkAllocationCallbacks</type>* <name>pAllocator</name></param>
+            <param><type>VkSwapchainKHR</type>* <name>pSwapchain</name></param>
+        </command>
+        <command>
+            <proto><type>void</type> <name>vkDestroySwapchainKHR</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param optional="true" externsync="true"><type>VkSwapchainKHR</type> <name>swapchain</name></param>
+            <param optional="true">const <type>VkAllocationCallbacks</type>* <name>pAllocator</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS,VK_INCOMPLETE" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY">
+            <proto><type>VkResult</type> <name>vkGetSwapchainImagesKHR</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param><type>VkSwapchainKHR</type> <name>swapchain</name></param>
+            <param optional="false,true"><type>uint32_t</type>* <name>pSwapchainImageCount</name></param>
+            <param optional="true" len="pSwapchainImageCount"><type>VkImage</type>* <name>pSwapchainImages</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS,VK_TIMEOUT,VK_NOT_READY,VK_SUBOPTIMAL_KHR" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY,VK_ERROR_DEVICE_LOST,VK_ERROR_OUT_OF_DATE_KHR,VK_ERROR_SURFACE_LOST_KHR,VK_ERROR_FULL_SCREEN_EXCLUSIVE_MODE_LOST_EXT">
+            <proto><type>VkResult</type> <name>vkAcquireNextImageKHR</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param externsync="true"><type>VkSwapchainKHR</type> <name>swapchain</name></param>
+            <param><type>uint64_t</type> <name>timeout</name></param>
+            <param optional="true" externsync="true"><type>VkSemaphore</type> <name>semaphore</name></param>
+            <param optional="true" externsync="true"><type>VkFence</type> <name>fence</name></param>
+            <param><type>uint32_t</type>* <name>pImageIndex</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS,VK_SUBOPTIMAL_KHR" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY,VK_ERROR_DEVICE_LOST,VK_ERROR_OUT_OF_DATE_KHR,VK_ERROR_SURFACE_LOST_KHR,VK_ERROR_FULL_SCREEN_EXCLUSIVE_MODE_LOST_EXT">
+            <proto><type>VkResult</type> <name>vkQueuePresentKHR</name></proto>
+            <param externsync="true"><type>VkQueue</type> <name>queue</name></param>
+            <param externsync="pPresentInfo.pWaitSemaphores[],pPresentInfo.pSwapchains[]">const <type>VkPresentInfoKHR</type>* <name>pPresentInfo</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY,VK_ERROR_NATIVE_WINDOW_IN_USE_KHR">
+            <proto><type>VkResult</type> <name>vkCreateViSurfaceNN</name></proto>
+            <param><type>VkInstance</type> <name>instance</name></param>
+            <param>const <type>VkViSurfaceCreateInfoNN</type>* <name>pCreateInfo</name></param>
+            <param optional="true">const <type>VkAllocationCallbacks</type>* <name>pAllocator</name></param>
+            <param><type>VkSurfaceKHR</type>* <name>pSurface</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY">
+            <proto><type>VkResult</type> <name>vkCreateWaylandSurfaceKHR</name></proto>
+            <param><type>VkInstance</type> <name>instance</name></param>
+            <param>const <type>VkWaylandSurfaceCreateInfoKHR</type>* <name>pCreateInfo</name></param>
+            <param optional="true">const <type>VkAllocationCallbacks</type>* <name>pAllocator</name></param>
+            <param><type>VkSurfaceKHR</type>* <name>pSurface</name></param>
+        </command>
+        <command>
+            <proto><type>VkBool32</type> <name>vkGetPhysicalDeviceWaylandPresentationSupportKHR</name></proto>
+            <param><type>VkPhysicalDevice</type> <name>physicalDevice</name></param>
+            <param><type>uint32_t</type> <name>queueFamilyIndex</name></param>
+            <param>struct <type>wl_display</type>* <name>display</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY">
+            <proto><type>VkResult</type> <name>vkCreateWin32SurfaceKHR</name></proto>
+            <param><type>VkInstance</type> <name>instance</name></param>
+            <param>const <type>VkWin32SurfaceCreateInfoKHR</type>* <name>pCreateInfo</name></param>
+            <param optional="true">const <type>VkAllocationCallbacks</type>* <name>pAllocator</name></param>
+            <param><type>VkSurfaceKHR</type>* <name>pSurface</name></param>
+        </command>
+        <command>
+            <proto><type>VkBool32</type> <name>vkGetPhysicalDeviceWin32PresentationSupportKHR</name></proto>
+            <param><type>VkPhysicalDevice</type> <name>physicalDevice</name></param>
+            <param><type>uint32_t</type> <name>queueFamilyIndex</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY">
+            <proto><type>VkResult</type> <name>vkCreateXlibSurfaceKHR</name></proto>
+            <param><type>VkInstance</type> <name>instance</name></param>
+            <param>const <type>VkXlibSurfaceCreateInfoKHR</type>* <name>pCreateInfo</name></param>
+            <param optional="true">const <type>VkAllocationCallbacks</type>* <name>pAllocator</name></param>
+            <param><type>VkSurfaceKHR</type>* <name>pSurface</name></param>
+        </command>
+        <command>
+            <proto><type>VkBool32</type> <name>vkGetPhysicalDeviceXlibPresentationSupportKHR</name></proto>
+            <param><type>VkPhysicalDevice</type> <name>physicalDevice</name></param>
+            <param><type>uint32_t</type> <name>queueFamilyIndex</name></param>
+            <param><type>Display</type>* <name>dpy</name></param>
+            <param><type>VisualID</type> <name>visualID</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY">
+            <proto><type>VkResult</type> <name>vkCreateXcbSurfaceKHR</name></proto>
+            <param><type>VkInstance</type> <name>instance</name></param>
+            <param>const <type>VkXcbSurfaceCreateInfoKHR</type>* <name>pCreateInfo</name></param>
+            <param optional="true">const <type>VkAllocationCallbacks</type>* <name>pAllocator</name></param>
+            <param><type>VkSurfaceKHR</type>* <name>pSurface</name></param>
+        </command>
+        <command>
+            <proto><type>VkBool32</type> <name>vkGetPhysicalDeviceXcbPresentationSupportKHR</name></proto>
+            <param><type>VkPhysicalDevice</type> <name>physicalDevice</name></param>
+            <param><type>uint32_t</type> <name>queueFamilyIndex</name></param>
+            <param><type>xcb_connection_t</type>* <name>connection</name></param>
+            <param><type>xcb_visualid_t</type> <name>visual_id</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY">
+            <proto><type>VkResult</type> <name>vkCreateImagePipeSurfaceFUCHSIA</name></proto>
+            <param><type>VkInstance</type> <name>instance</name></param>
+            <param>const <type>VkImagePipeSurfaceCreateInfoFUCHSIA</type>* <name>pCreateInfo</name></param>
+            <param optional="true">const <type>VkAllocationCallbacks</type>* <name>pAllocator</name></param>
+            <param><type>VkSurfaceKHR</type>* <name>pSurface</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY,VK_ERROR_NATIVE_WINDOW_IN_USE_KHR">
+            <proto><type>VkResult</type> <name>vkCreateStreamDescriptorSurfaceGGP</name></proto>
+            <param><type>VkInstance</type> <name>instance</name></param>
+            <param>const <type>VkStreamDescriptorSurfaceCreateInfoGGP</type>* <name>pCreateInfo</name></param>
+            <param optional="true">const <type>VkAllocationCallbacks</type>* <name>pAllocator</name></param>
+            <param><type>VkSurfaceKHR</type>* <name>pSurface</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY">
+            <proto><type>VkResult</type> <name>vkCreateDebugReportCallbackEXT</name></proto>
+            <param><type>VkInstance</type> <name>instance</name></param>
+            <param>const <type>VkDebugReportCallbackCreateInfoEXT</type>* <name>pCreateInfo</name></param>
+            <param optional="true">const <type>VkAllocationCallbacks</type>* <name>pAllocator</name></param>
+            <param><type>VkDebugReportCallbackEXT</type>* <name>pCallback</name></param>
+        </command>
+        <command>
+            <proto><type>void</type> <name>vkDestroyDebugReportCallbackEXT</name></proto>
+            <param><type>VkInstance</type> <name>instance</name></param>
+            <param externsync="true"><type>VkDebugReportCallbackEXT</type> <name>callback</name></param>
+            <param optional="true">const <type>VkAllocationCallbacks</type>* <name>pAllocator</name></param>
+        </command>
+        <command>
+            <proto><type>void</type> <name>vkDebugReportMessageEXT</name></proto>
+            <param><type>VkInstance</type> <name>instance</name></param>
+            <param><type>VkDebugReportFlagsEXT</type> <name>flags</name></param>
+            <param><type>VkDebugReportObjectTypeEXT</type> <name>objectType</name></param>
+            <param><type>uint64_t</type> <name>object</name></param>
+            <param><type>size_t</type> <name>location</name></param>
+            <param><type>int32_t</type> <name>messageCode</name></param>
+            <param len="null-terminated">const <type>char</type>* <name>pLayerPrefix</name></param>
+            <param len="null-terminated">const <type>char</type>* <name>pMessage</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY">
+            <proto><type>VkResult</type> <name>vkDebugMarkerSetObjectNameEXT</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param externsync="pNameInfo.object">const <type>VkDebugMarkerObjectNameInfoEXT</type>* <name>pNameInfo</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY">
+            <proto><type>VkResult</type> <name>vkDebugMarkerSetObjectTagEXT</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param externsync="pTagInfo.object">const <type>VkDebugMarkerObjectTagInfoEXT</type>* <name>pTagInfo</name></param>
+        </command>
+        <command queues="graphics,compute" renderpass="both" cmdbufferlevel="primary,secondary">
+            <proto><type>void</type> <name>vkCmdDebugMarkerBeginEXT</name></proto>
+            <param><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
+            <param>const <type>VkDebugMarkerMarkerInfoEXT</type>* <name>pMarkerInfo</name></param>
+        </command>
+        <command queues="graphics,compute" renderpass="both" cmdbufferlevel="primary,secondary">
+            <proto><type>void</type> <name>vkCmdDebugMarkerEndEXT</name></proto>
+            <param><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
+        </command>
+        <command queues="graphics,compute" renderpass="both" cmdbufferlevel="primary,secondary">
+            <proto><type>void</type> <name>vkCmdDebugMarkerInsertEXT</name></proto>
+            <param><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
+            <param>const <type>VkDebugMarkerMarkerInfoEXT</type>* <name>pMarkerInfo</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY,VK_ERROR_FORMAT_NOT_SUPPORTED">
+            <proto><type>VkResult</type> <name>vkGetPhysicalDeviceExternalImageFormatPropertiesNV</name></proto>
+            <param><type>VkPhysicalDevice</type> <name>physicalDevice</name></param>
+            <param><type>VkFormat</type> <name>format</name></param>
+            <param><type>VkImageType</type> <name>type</name></param>
+            <param><type>VkImageTiling</type> <name>tiling</name></param>
+            <param><type>VkImageUsageFlags</type> <name>usage</name></param>
+            <param optional="true"><type>VkImageCreateFlags</type> <name>flags</name></param>
+            <param optional="true"><type>VkExternalMemoryHandleTypeFlagsNV</type> <name>externalHandleType</name></param>
+            <param><type>VkExternalImageFormatPropertiesNV</type>* <name>pExternalImageFormatProperties</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_TOO_MANY_OBJECTS,VK_ERROR_OUT_OF_HOST_MEMORY">
+            <proto><type>VkResult</type> <name>vkGetMemoryWin32HandleNV</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param><type>VkDeviceMemory</type> <name>memory</name></param>
+            <param><type>VkExternalMemoryHandleTypeFlagsNV</type> <name>handleType</name></param>
+            <param><type>HANDLE</type>* <name>pHandle</name></param>
+        </command>
+        <command queues="graphics,compute" renderpass="inside" cmdbufferlevel="primary,secondary">
+            <proto><type>void</type> <name>vkCmdProcessCommandsNVX</name></proto>
+            <param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
+            <param>const <type>VkCmdProcessCommandsInfoNVX</type>* <name>pProcessCommandsInfo</name></param>
+        </command>
+        <command queues="graphics,compute" renderpass="inside" cmdbufferlevel="secondary">
+            <proto><type>void</type> <name>vkCmdReserveSpaceForCommandsNVX</name></proto>
+            <param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
+            <param>const <type>VkCmdReserveSpaceForCommandsInfoNVX</type>* <name>pReserveSpaceInfo</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY">
+            <proto><type>VkResult</type> <name>vkCreateIndirectCommandsLayoutNVX</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param>const <type>VkIndirectCommandsLayoutCreateInfoNVX</type>* <name>pCreateInfo</name></param>
+            <param optional="true">const <type>VkAllocationCallbacks</type>* <name>pAllocator</name></param>
+            <param><type>VkIndirectCommandsLayoutNVX</type>* <name>pIndirectCommandsLayout</name></param>
+        </command>
+        <command>
+            <proto><type>void</type> <name>vkDestroyIndirectCommandsLayoutNVX</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param><type>VkIndirectCommandsLayoutNVX</type> <name>indirectCommandsLayout</name></param>
+            <param optional="true">const <type>VkAllocationCallbacks</type>* <name>pAllocator</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY">
+            <proto><type>VkResult</type> <name>vkCreateObjectTableNVX</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param>const <type>VkObjectTableCreateInfoNVX</type>* <name>pCreateInfo</name></param>
+            <param optional="true">const <type>VkAllocationCallbacks</type>* <name>pAllocator</name></param>
+            <param><type>VkObjectTableNVX</type>* <name>pObjectTable</name></param>
+        </command>
+        <command>
+            <proto><type>void</type> <name>vkDestroyObjectTableNVX</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param externsync="true"><type>VkObjectTableNVX</type> <name>objectTable</name></param>
+            <param optional="true">const <type>VkAllocationCallbacks</type>* <name>pAllocator</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY">
+            <proto><type>VkResult</type> <name>vkRegisterObjectsNVX</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param externsync="true"><type>VkObjectTableNVX</type> <name>objectTable</name></param>
+            <param><type>uint32_t</type> <name>objectCount</name></param>
+            <param len="objectCount">const <type>VkObjectTableEntryNVX</type>* const*    <name>ppObjectTableEntries</name></param>
+            <param len="objectCount">const <type>uint32_t</type>* <name>pObjectIndices</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY">
+            <proto><type>VkResult</type> <name>vkUnregisterObjectsNVX</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param externsync="true"><type>VkObjectTableNVX</type> <name>objectTable</name></param>
+            <param><type>uint32_t</type> <name>objectCount</name></param>
+            <param len="objectCount">const <type>VkObjectEntryTypeNVX</type>* <name>pObjectEntryTypes</name></param>
+            <param len="objectCount">const <type>uint32_t</type>* <name>pObjectIndices</name></param>
+        </command>
+        <command>
+            <proto><type>void</type> <name>vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX</name></proto>
+            <param><type>VkPhysicalDevice</type> <name>physicalDevice</name></param>
+            <param><type>VkDeviceGeneratedCommandsFeaturesNVX</type>* <name>pFeatures</name></param>
+            <param><type>VkDeviceGeneratedCommandsLimitsNVX</type>* <name>pLimits</name></param>
+        </command>
+        <command>
+            <proto><type>void</type> <name>vkGetPhysicalDeviceFeatures2</name></proto>
+            <param><type>VkPhysicalDevice</type> <name>physicalDevice</name></param>
+            <param><type>VkPhysicalDeviceFeatures2</type>* <name>pFeatures</name></param>
+        </command>
+        <command name="vkGetPhysicalDeviceFeatures2KHR"                        alias="vkGetPhysicalDeviceFeatures2"/>
+        <command>
+            <proto><type>void</type> <name>vkGetPhysicalDeviceProperties2</name></proto>
+            <param><type>VkPhysicalDevice</type> <name>physicalDevice</name></param>
+            <param><type>VkPhysicalDeviceProperties2</type>* <name>pProperties</name></param>
+        </command>
+        <command name="vkGetPhysicalDeviceProperties2KHR"                      alias="vkGetPhysicalDeviceProperties2"/>
+        <command>
+            <proto><type>void</type> <name>vkGetPhysicalDeviceFormatProperties2</name></proto>
+            <param><type>VkPhysicalDevice</type> <name>physicalDevice</name></param>
+            <param><type>VkFormat</type> <name>format</name></param>
+            <param><type>VkFormatProperties2</type>* <name>pFormatProperties</name></param>
+        </command>
+        <command name="vkGetPhysicalDeviceFormatProperties2KHR"                alias="vkGetPhysicalDeviceFormatProperties2"/>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY,VK_ERROR_FORMAT_NOT_SUPPORTED">
+            <proto><type>VkResult</type> <name>vkGetPhysicalDeviceImageFormatProperties2</name></proto>
+            <param><type>VkPhysicalDevice</type> <name>physicalDevice</name></param>
+            <param>const <type>VkPhysicalDeviceImageFormatInfo2</type>* <name>pImageFormatInfo</name></param>
+            <param><type>VkImageFormatProperties2</type>* <name>pImageFormatProperties</name></param>
+        </command>
+        <command name="vkGetPhysicalDeviceImageFormatProperties2KHR"           alias="vkGetPhysicalDeviceImageFormatProperties2"/>
+        <command>
+            <proto><type>void</type> <name>vkGetPhysicalDeviceQueueFamilyProperties2</name></proto>
+            <param><type>VkPhysicalDevice</type> <name>physicalDevice</name></param>
+            <param optional="false,true"><type>uint32_t</type>* <name>pQueueFamilyPropertyCount</name></param>
+            <param optional="true" len="pQueueFamilyPropertyCount"><type>VkQueueFamilyProperties2</type>* <name>pQueueFamilyProperties</name></param>
+        </command>
+        <command name="vkGetPhysicalDeviceQueueFamilyProperties2KHR"           alias="vkGetPhysicalDeviceQueueFamilyProperties2"/>
+        <command>
+            <proto><type>void</type> <name>vkGetPhysicalDeviceMemoryProperties2</name></proto>
+            <param><type>VkPhysicalDevice</type> <name>physicalDevice</name></param>
+            <param><type>VkPhysicalDeviceMemoryProperties2</type>* <name>pMemoryProperties</name></param>
+        </command>
+        <command name="vkGetPhysicalDeviceMemoryProperties2KHR"                alias="vkGetPhysicalDeviceMemoryProperties2"/>
+        <command>
+            <proto><type>void</type> <name>vkGetPhysicalDeviceSparseImageFormatProperties2</name></proto>
+            <param><type>VkPhysicalDevice</type> <name>physicalDevice</name></param>
+            <param>const <type>VkPhysicalDeviceSparseImageFormatInfo2</type>* <name>pFormatInfo</name></param>
+            <param optional="false,true"><type>uint32_t</type>* <name>pPropertyCount</name></param>
+            <param optional="true" len="pPropertyCount"><type>VkSparseImageFormatProperties2</type>* <name>pProperties</name></param>
+        </command>
+        <command name="vkGetPhysicalDeviceSparseImageFormatProperties2KHR"     alias="vkGetPhysicalDeviceSparseImageFormatProperties2"/>
+        <command queues="graphics,compute" renderpass="both" cmdbufferlevel="primary,secondary">
+            <proto><type>void</type> <name>vkCmdPushDescriptorSetKHR</name></proto>
+            <param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
+            <param><type>VkPipelineBindPoint</type> <name>pipelineBindPoint</name></param>
+            <param><type>VkPipelineLayout</type> <name>layout</name></param>
+            <param><type>uint32_t</type> <name>set</name></param>
+            <param><type>uint32_t</type> <name>descriptorWriteCount</name></param>
+            <param len="descriptorWriteCount">const <type>VkWriteDescriptorSet</type>* <name>pDescriptorWrites</name></param>
+        </command>
+        <command>
+            <proto><type>void</type> <name>vkTrimCommandPool</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param externsync="true"><type>VkCommandPool</type> <name>commandPool</name></param>
+            <param optional="true"><type>VkCommandPoolTrimFlags</type> <name>flags</name></param>
+        </command>
+        <command name="vkTrimCommandPoolKHR"                                   alias="vkTrimCommandPool"/>
+        <command>
+            <proto><type>void</type> <name>vkGetPhysicalDeviceExternalBufferProperties</name></proto>
+            <param><type>VkPhysicalDevice</type> <name>physicalDevice</name></param>
+            <param>const <type>VkPhysicalDeviceExternalBufferInfo</type>* <name>pExternalBufferInfo</name></param>
+            <param><type>VkExternalBufferProperties</type>* <name>pExternalBufferProperties</name></param>
+        </command>
+        <command name="vkGetPhysicalDeviceExternalBufferPropertiesKHR"         alias="vkGetPhysicalDeviceExternalBufferProperties"/>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_TOO_MANY_OBJECTS,VK_ERROR_OUT_OF_HOST_MEMORY">
+            <proto><type>VkResult</type> <name>vkGetMemoryWin32HandleKHR</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param>const <type>VkMemoryGetWin32HandleInfoKHR</type>* <name>pGetWin32HandleInfo</name></param>
+            <param><type>HANDLE</type>* <name>pHandle</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_INVALID_EXTERNAL_HANDLE">
+            <proto><type>VkResult</type> <name>vkGetMemoryWin32HandlePropertiesKHR</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param><type>VkExternalMemoryHandleTypeFlagBits</type> <name>handleType</name></param>
+            <param><type>HANDLE</type> <name>handle</name></param>
+            <param><type>VkMemoryWin32HandlePropertiesKHR</type>* <name>pMemoryWin32HandleProperties</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_TOO_MANY_OBJECTS,VK_ERROR_OUT_OF_HOST_MEMORY">
+            <proto><type>VkResult</type> <name>vkGetMemoryFdKHR</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param>const <type>VkMemoryGetFdInfoKHR</type>* <name>pGetFdInfo</name></param>
+            <param><type>int</type>* <name>pFd</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_INVALID_EXTERNAL_HANDLE">
+            <proto><type>VkResult</type> <name>vkGetMemoryFdPropertiesKHR</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param><type>VkExternalMemoryHandleTypeFlagBits</type> <name>handleType</name></param>
+            <param><type>int</type> <name>fd</name></param>
+            <param><type>VkMemoryFdPropertiesKHR</type>* <name>pMemoryFdProperties</name></param>
+        </command>
+        <command>
+            <proto><type>void</type> <name>vkGetPhysicalDeviceExternalSemaphoreProperties</name></proto>
+            <param><type>VkPhysicalDevice</type> <name>physicalDevice</name></param>
+            <param>const <type>VkPhysicalDeviceExternalSemaphoreInfo</type>* <name>pExternalSemaphoreInfo</name></param>
+            <param><type>VkExternalSemaphoreProperties</type>* <name>pExternalSemaphoreProperties</name></param>
+        </command>
+        <command name="vkGetPhysicalDeviceExternalSemaphorePropertiesKHR"           alias="vkGetPhysicalDeviceExternalSemaphoreProperties"/>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_TOO_MANY_OBJECTS,VK_ERROR_OUT_OF_HOST_MEMORY">
+            <proto><type>VkResult</type> <name>vkGetSemaphoreWin32HandleKHR</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param>const <type>VkSemaphoreGetWin32HandleInfoKHR</type>* <name>pGetWin32HandleInfo</name></param>
+            <param><type>HANDLE</type>* <name>pHandle</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_INVALID_EXTERNAL_HANDLE">
+            <proto><type>VkResult</type> <name>vkImportSemaphoreWin32HandleKHR</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param>const <type>VkImportSemaphoreWin32HandleInfoKHR</type>* <name>pImportSemaphoreWin32HandleInfo</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_TOO_MANY_OBJECTS,VK_ERROR_OUT_OF_HOST_MEMORY">
+            <proto><type>VkResult</type> <name>vkGetSemaphoreFdKHR</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param>const <type>VkSemaphoreGetFdInfoKHR</type>* <name>pGetFdInfo</name></param>
+            <param><type>int</type>* <name>pFd</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_INVALID_EXTERNAL_HANDLE">
+            <proto><type>VkResult</type> <name>vkImportSemaphoreFdKHR</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param>const <type>VkImportSemaphoreFdInfoKHR</type>* <name>pImportSemaphoreFdInfo</name></param>
+        </command>
+        <command>
+            <proto><type>void</type> <name>vkGetPhysicalDeviceExternalFenceProperties</name></proto>
+            <param><type>VkPhysicalDevice</type> <name>physicalDevice</name></param>
+            <param>const <type>VkPhysicalDeviceExternalFenceInfo</type>* <name>pExternalFenceInfo</name></param>
+            <param><type>VkExternalFenceProperties</type>* <name>pExternalFenceProperties</name></param>
+        </command>
+        <command name="vkGetPhysicalDeviceExternalFencePropertiesKHR"           alias="vkGetPhysicalDeviceExternalFenceProperties"/>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_TOO_MANY_OBJECTS,VK_ERROR_OUT_OF_HOST_MEMORY">
+            <proto><type>VkResult</type> <name>vkGetFenceWin32HandleKHR</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param>const <type>VkFenceGetWin32HandleInfoKHR</type>* <name>pGetWin32HandleInfo</name></param>
+            <param><type>HANDLE</type>* <name>pHandle</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_INVALID_EXTERNAL_HANDLE">
+            <proto><type>VkResult</type> <name>vkImportFenceWin32HandleKHR</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param>const <type>VkImportFenceWin32HandleInfoKHR</type>* <name>pImportFenceWin32HandleInfo</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_TOO_MANY_OBJECTS,VK_ERROR_OUT_OF_HOST_MEMORY">
+            <proto><type>VkResult</type> <name>vkGetFenceFdKHR</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param>const <type>VkFenceGetFdInfoKHR</type>* <name>pGetFdInfo</name></param>
+            <param><type>int</type>* <name>pFd</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_INVALID_EXTERNAL_HANDLE">
+            <proto><type>VkResult</type> <name>vkImportFenceFdKHR</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param>const <type>VkImportFenceFdInfoKHR</type>* <name>pImportFenceFdInfo</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS">
+            <proto><type>VkResult</type> <name>vkReleaseDisplayEXT</name></proto>
+            <param><type>VkPhysicalDevice</type> <name>physicalDevice</name></param>
+            <param><type>VkDisplayKHR</type> <name>display</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_INITIALIZATION_FAILED">
+            <proto><type>VkResult</type> <name>vkAcquireXlibDisplayEXT</name></proto>
+            <param><type>VkPhysicalDevice</type> <name>physicalDevice</name></param>
+            <param><type>Display</type>* <name>dpy</name></param>
+            <param><type>VkDisplayKHR</type> <name>display</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS">
+            <proto><type>VkResult</type> <name>vkGetRandROutputDisplayEXT</name></proto>
+            <param><type>VkPhysicalDevice</type> <name>physicalDevice</name></param>
+            <param><type>Display</type>* <name>dpy</name></param>
+            <param><type>RROutput</type> <name>rrOutput</name></param>
+            <param><type>VkDisplayKHR</type>* <name>pDisplay</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS">
+            <proto><type>VkResult</type> <name>vkDisplayPowerControlEXT</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param><type>VkDisplayKHR</type> <name>display</name></param>
+            <param>const <type>VkDisplayPowerInfoEXT</type>* <name>pDisplayPowerInfo</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS">
+            <proto><type>VkResult</type> <name>vkRegisterDeviceEventEXT</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param>const <type>VkDeviceEventInfoEXT</type>* <name>pDeviceEventInfo</name></param>
+            <param optional="true">const <type>VkAllocationCallbacks</type>* <name>pAllocator</name></param>
+            <param><type>VkFence</type>* <name>pFence</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS">
+            <proto><type>VkResult</type> <name>vkRegisterDisplayEventEXT</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param><type>VkDisplayKHR</type> <name>display</name></param>
+            <param>const <type>VkDisplayEventInfoEXT</type>* <name>pDisplayEventInfo</name></param>
+            <param optional="true">const <type>VkAllocationCallbacks</type>* <name>pAllocator</name></param>
+            <param><type>VkFence</type>* <name>pFence</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_DEVICE_LOST,VK_ERROR_OUT_OF_DATE_KHR">
+            <proto><type>VkResult</type> <name>vkGetSwapchainCounterEXT</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param><type>VkSwapchainKHR</type> <name>swapchain</name></param>
+            <param><type>VkSurfaceCounterFlagBitsEXT</type> <name>counter</name></param>
+            <param><type>uint64_t</type>* <name>pCounterValue</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY,VK_ERROR_SURFACE_LOST_KHR">
+            <proto><type>VkResult</type> <name>vkGetPhysicalDeviceSurfaceCapabilities2EXT</name></proto>
+            <param><type>VkPhysicalDevice</type> <name>physicalDevice</name></param>
+            <param><type>VkSurfaceKHR</type> <name>surface</name></param>
+            <param><type>VkSurfaceCapabilities2EXT</type>* <name>pSurfaceCapabilities</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS,VK_INCOMPLETE" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY,VK_ERROR_INITIALIZATION_FAILED">
+            <proto><type>VkResult</type> <name>vkEnumeratePhysicalDeviceGroups</name></proto>
+            <param><type>VkInstance</type> <name>instance</name></param>
+            <param optional="false,true"><type>uint32_t</type>* <name>pPhysicalDeviceGroupCount</name></param>
+            <param optional="true" len="pPhysicalDeviceGroupCount"><type>VkPhysicalDeviceGroupProperties</type>* <name>pPhysicalDeviceGroupProperties</name></param>
+        </command>
+        <command name="vkEnumeratePhysicalDeviceGroupsKHR"                     alias="vkEnumeratePhysicalDeviceGroups"/>
+        <command>
+            <proto><type>void</type> <name>vkGetDeviceGroupPeerMemoryFeatures</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param><type>uint32_t</type> <name>heapIndex</name></param>
+            <param><type>uint32_t</type> <name>localDeviceIndex</name></param>
+            <param><type>uint32_t</type> <name>remoteDeviceIndex</name></param>
+            <param><type>VkPeerMemoryFeatureFlags</type>* <name>pPeerMemoryFeatures</name></param>
+        </command>
+        <command name="vkGetDeviceGroupPeerMemoryFeaturesKHR"                  alias="vkGetDeviceGroupPeerMemoryFeatures"/>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY,VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS_KHR">
+            <proto><type>VkResult</type> <name>vkBindBufferMemory2</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param><type>uint32_t</type> <name>bindInfoCount</name></param>
+            <param len="bindInfoCount">const <type>VkBindBufferMemoryInfo</type>* <name>pBindInfos</name></param>
+        </command>
+        <command name="vkBindBufferMemory2KHR"                                 alias="vkBindBufferMemory2"/>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY">
+            <proto><type>VkResult</type> <name>vkBindImageMemory2</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param><type>uint32_t</type> <name>bindInfoCount</name></param>
+            <param len="bindInfoCount">const <type>VkBindImageMemoryInfo</type>* <name>pBindInfos</name></param>
+        </command>
+        <command name="vkBindImageMemory2KHR"                                  alias="vkBindImageMemory2"/>
+        <command queues="graphics,compute,transfer" renderpass="both" cmdbufferlevel="primary,secondary">
+            <proto><type>void</type> <name>vkCmdSetDeviceMask</name></proto>
+            <param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
+            <param><type>uint32_t</type> <name>deviceMask</name></param>
+        </command>
+        <command name="vkCmdSetDeviceMaskKHR"                                  alias="vkCmdSetDeviceMask"/>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY">
+            <proto><type>VkResult</type> <name>vkGetDeviceGroupPresentCapabilitiesKHR</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param><type>VkDeviceGroupPresentCapabilitiesKHR</type>* <name>pDeviceGroupPresentCapabilities</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY,VK_ERROR_SURFACE_LOST_KHR">
+            <proto><type>VkResult</type> <name>vkGetDeviceGroupSurfacePresentModesKHR</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param externsync="true"><type>VkSurfaceKHR</type> <name>surface</name></param>
+            <param optional="false,true"><type>VkDeviceGroupPresentModeFlagsKHR</type>* <name>pModes</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS,VK_TIMEOUT,VK_NOT_READY,VK_SUBOPTIMAL_KHR" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY,VK_ERROR_DEVICE_LOST,VK_ERROR_OUT_OF_DATE_KHR,VK_ERROR_SURFACE_LOST_KHR,VK_ERROR_FULL_SCREEN_EXCLUSIVE_MODE_LOST_EXT">
+            <proto><type>VkResult</type> <name>vkAcquireNextImage2KHR</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param>const <type>VkAcquireNextImageInfoKHR</type>* <name>pAcquireInfo</name></param>
+            <param><type>uint32_t</type>* <name>pImageIndex</name></param>
+        </command>
+        <command queues="compute" renderpass="outside" cmdbufferlevel="primary,secondary">
+            <proto><type>void</type> <name>vkCmdDispatchBase</name></proto>
+            <param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
+            <param><type>uint32_t</type> <name>baseGroupX</name></param>
+            <param><type>uint32_t</type> <name>baseGroupY</name></param>
+            <param><type>uint32_t</type> <name>baseGroupZ</name></param>
+            <param><type>uint32_t</type> <name>groupCountX</name></param>
+            <param><type>uint32_t</type> <name>groupCountY</name></param>
+            <param><type>uint32_t</type> <name>groupCountZ</name></param>
+        </command>
+        <command name="vkCmdDispatchBaseKHR"                                   alias="vkCmdDispatchBase"/>
+        <command successcodes="VK_SUCCESS,VK_INCOMPLETE" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY">
+            <proto><type>VkResult</type> <name>vkGetPhysicalDevicePresentRectanglesKHR</name></proto>
+            <param><type>VkPhysicalDevice</type> <name>physicalDevice</name></param>
+            <param externsync="true"><type>VkSurfaceKHR</type> <name>surface</name></param>
+            <param optional="false,true"><type>uint32_t</type>* <name>pRectCount</name></param>
+            <param optional="true" len="pRectCount"><type>VkRect2D</type>* <name>pRects</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY">
+            <proto><type>VkResult</type> <name>vkCreateDescriptorUpdateTemplate</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param>const <type>VkDescriptorUpdateTemplateCreateInfo</type>* <name>pCreateInfo</name></param>
+            <param optional="true">const <type>VkAllocationCallbacks</type>* <name>pAllocator</name></param>
+            <param><type>VkDescriptorUpdateTemplate</type>* <name>pDescriptorUpdateTemplate</name></param>
+        </command>
+        <command name="vkCreateDescriptorUpdateTemplateKHR"                    alias="vkCreateDescriptorUpdateTemplate"/>
+        <command>
+            <proto><type>void</type> <name>vkDestroyDescriptorUpdateTemplate</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param optional="true" externsync="true"><type>VkDescriptorUpdateTemplate</type> <name>descriptorUpdateTemplate</name></param>
+            <param optional="true">const <type>VkAllocationCallbacks</type>* <name>pAllocator</name></param>
+        </command>
+        <command name="vkDestroyDescriptorUpdateTemplateKHR"                   alias="vkDestroyDescriptorUpdateTemplate"/>
+        <command>
+            <proto><type>void</type> <name>vkUpdateDescriptorSetWithTemplate</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param externsync="true"><type>VkDescriptorSet</type> <name>descriptorSet</name></param>
+            <param><type>VkDescriptorUpdateTemplate</type> <name>descriptorUpdateTemplate</name></param>
+            <param noautovalidity="true">const <type>void</type>* <name>pData</name></param>
+        </command>
+        <command name="vkUpdateDescriptorSetWithTemplateKHR"                   alias="vkUpdateDescriptorSetWithTemplate"/>
+        <command queues="graphics,compute" renderpass="both" cmdbufferlevel="primary,secondary">
+            <proto><type>void</type> <name>vkCmdPushDescriptorSetWithTemplateKHR</name></proto>
+            <param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
+            <param><type>VkDescriptorUpdateTemplate</type> <name>descriptorUpdateTemplate</name></param>
+            <param><type>VkPipelineLayout</type> <name>layout</name></param>
+            <param><type>uint32_t</type> <name>set</name></param>
+            <param noautovalidity="true">const <type>void</type>* <name>pData</name></param>
+        </command>
+        <command>
+            <proto><type>void</type> <name>vkSetHdrMetadataEXT</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param><type>uint32_t</type> <name>swapchainCount</name></param>
+            <param len="swapchainCount">const <type>VkSwapchainKHR</type>* <name>pSwapchains</name></param>
+            <param len="swapchainCount">const <type>VkHdrMetadataEXT</type>* <name>pMetadata</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS,VK_SUBOPTIMAL_KHR" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY,VK_ERROR_DEVICE_LOST,VK_ERROR_OUT_OF_DATE_KHR,VK_ERROR_SURFACE_LOST_KHR,VK_ERROR_FULL_SCREEN_EXCLUSIVE_MODE_LOST_EXT">
+            <proto><type>VkResult</type> <name>vkGetSwapchainStatusKHR</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param externsync="true"><type>VkSwapchainKHR</type> <name>swapchain</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_DEVICE_LOST,VK_ERROR_SURFACE_LOST_KHR">
+            <proto><type>VkResult</type> <name>vkGetRefreshCycleDurationGOOGLE</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param externsync="true"><type>VkSwapchainKHR</type> <name>swapchain</name></param>
+            <param><type>VkRefreshCycleDurationGOOGLE</type>* <name>pDisplayTimingProperties</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS,VK_INCOMPLETE" errorcodes="VK_ERROR_DEVICE_LOST,VK_ERROR_OUT_OF_DATE_KHR,VK_ERROR_SURFACE_LOST_KHR">
+            <proto><type>VkResult</type> <name>vkGetPastPresentationTimingGOOGLE</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param externsync="true"><type>VkSwapchainKHR</type> <name>swapchain</name></param>
+            <param optional="false,true"><type>uint32_t</type>* <name>pPresentationTimingCount</name></param>
+            <param optional="true" len="pPresentationTimingCount"><type>VkPastPresentationTimingGOOGLE</type>* <name>pPresentationTimings</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY,VK_ERROR_NATIVE_WINDOW_IN_USE_KHR">
+            <proto><type>VkResult</type> <name>vkCreateIOSSurfaceMVK</name></proto>
+            <param><type>VkInstance</type> <name>instance</name></param>
+            <param>const <type>VkIOSSurfaceCreateInfoMVK</type>* <name>pCreateInfo</name></param>
+            <param optional="true">const <type>VkAllocationCallbacks</type>* <name>pAllocator</name></param>
+            <param><type>VkSurfaceKHR</type>* <name>pSurface</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY,VK_ERROR_NATIVE_WINDOW_IN_USE_KHR">
+            <proto><type>VkResult</type> <name>vkCreateMacOSSurfaceMVK</name></proto>
+            <param><type>VkInstance</type> <name>instance</name></param>
+            <param>const <type>VkMacOSSurfaceCreateInfoMVK</type>* <name>pCreateInfo</name></param>
+            <param optional="true">const <type>VkAllocationCallbacks</type>* <name>pAllocator</name></param>
+            <param><type>VkSurfaceKHR</type>* <name>pSurface</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY,VK_ERROR_NATIVE_WINDOW_IN_USE_KHR">
+            <proto><type>VkResult</type> <name>vkCreateMetalSurfaceEXT</name></proto>
+            <param><type>VkInstance</type> <name>instance</name></param>
+            <param>const <type>VkMetalSurfaceCreateInfoEXT</type>* <name>pCreateInfo</name></param>
+            <param optional="true">const <type>VkAllocationCallbacks</type>* <name>pAllocator</name></param>
+            <param><type>VkSurfaceKHR</type>* <name>pSurface</name></param>
+        </command>
+        <command queues="graphics" renderpass="both" cmdbufferlevel="primary,secondary">
+            <proto><type>void</type> <name>vkCmdSetViewportWScalingNV</name></proto>
+            <param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
+            <param><type>uint32_t</type> <name>firstViewport</name></param>
+            <param><type>uint32_t</type> <name>viewportCount</name></param>
+            <param len="viewportCount">const <type>VkViewportWScalingNV</type>* <name>pViewportWScalings</name></param>
+        </command>
+        <command queues="graphics" renderpass="both" cmdbufferlevel="primary,secondary">
+            <proto><type>void</type> <name>vkCmdSetDiscardRectangleEXT</name></proto>
+            <param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
+            <param><type>uint32_t</type> <name>firstDiscardRectangle</name></param>
+            <param><type>uint32_t</type> <name>discardRectangleCount</name></param>
+            <param len="discardRectangleCount">const <type>VkRect2D</type>* <name>pDiscardRectangles</name></param>
+        </command>
+        <command queues="graphics" renderpass="both" cmdbufferlevel="primary,secondary">
+            <proto><type>void</type> <name>vkCmdSetSampleLocationsEXT</name></proto>
+            <param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
+            <param>const <type>VkSampleLocationsInfoEXT</type>* <name>pSampleLocationsInfo</name></param>
+        </command>
+        <command>
+            <proto><type>void</type> <name>vkGetPhysicalDeviceMultisamplePropertiesEXT</name></proto>
+            <param><type>VkPhysicalDevice</type> <name>physicalDevice</name></param>
+            <param><type>VkSampleCountFlagBits</type> <name>samples</name></param>
+            <param><type>VkMultisamplePropertiesEXT</type>* <name>pMultisampleProperties</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY,VK_ERROR_SURFACE_LOST_KHR">
+            <proto><type>VkResult</type> <name>vkGetPhysicalDeviceSurfaceCapabilities2KHR</name></proto>
+            <param><type>VkPhysicalDevice</type> <name>physicalDevice</name></param>
+            <param>const <type>VkPhysicalDeviceSurfaceInfo2KHR</type>* <name>pSurfaceInfo</name></param>
+            <param><type>VkSurfaceCapabilities2KHR</type>* <name>pSurfaceCapabilities</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS,VK_INCOMPLETE" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY,VK_ERROR_SURFACE_LOST_KHR">
+            <proto><type>VkResult</type> <name>vkGetPhysicalDeviceSurfaceFormats2KHR</name></proto>
+            <param><type>VkPhysicalDevice</type> <name>physicalDevice</name></param>
+            <param>const <type>VkPhysicalDeviceSurfaceInfo2KHR</type>* <name>pSurfaceInfo</name></param>
+            <param optional="false,true"><type>uint32_t</type>* <name>pSurfaceFormatCount</name></param>
+            <param optional="true" len="pSurfaceFormatCount"><type>VkSurfaceFormat2KHR</type>* <name>pSurfaceFormats</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS,VK_INCOMPLETE" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY">
+            <proto><type>VkResult</type> <name>vkGetPhysicalDeviceDisplayProperties2KHR</name></proto>
+            <param><type>VkPhysicalDevice</type> <name>physicalDevice</name></param>
+            <param optional="false,true"><type>uint32_t</type>* <name>pPropertyCount</name></param>
+            <param optional="true" len="pPropertyCount"><type>VkDisplayProperties2KHR</type>* <name>pProperties</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS,VK_INCOMPLETE" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY">
+            <proto><type>VkResult</type> <name>vkGetPhysicalDeviceDisplayPlaneProperties2KHR</name></proto>
+            <param><type>VkPhysicalDevice</type> <name>physicalDevice</name></param>
+            <param optional="false,true"><type>uint32_t</type>* <name>pPropertyCount</name></param>
+            <param optional="true" len="pPropertyCount"><type>VkDisplayPlaneProperties2KHR</type>* <name>pProperties</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS,VK_INCOMPLETE" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY">
+            <proto><type>VkResult</type> <name>vkGetDisplayModeProperties2KHR</name></proto>
+            <param><type>VkPhysicalDevice</type> <name>physicalDevice</name></param>
+            <param><type>VkDisplayKHR</type> <name>display</name></param>
+            <param optional="false,true"><type>uint32_t</type>* <name>pPropertyCount</name></param>
+            <param optional="true" len="pPropertyCount"><type>VkDisplayModeProperties2KHR</type>* <name>pProperties</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY">
+            <proto><type>VkResult</type> <name>vkGetDisplayPlaneCapabilities2KHR</name></proto>
+            <param><type>VkPhysicalDevice</type> <name>physicalDevice</name></param>
+            <param>const <type>VkDisplayPlaneInfo2KHR</type>* <name>pDisplayPlaneInfo</name></param>
+            <param><type>VkDisplayPlaneCapabilities2KHR</type>* <name>pCapabilities</name></param>
+        </command>
+        <command>
+            <proto><type>void</type> <name>vkGetBufferMemoryRequirements2</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param>const <type>VkBufferMemoryRequirementsInfo2</type>* <name>pInfo</name></param>
+            <param><type>VkMemoryRequirements2</type>* <name>pMemoryRequirements</name></param>
+        </command>
+        <command name="vkGetBufferMemoryRequirements2KHR"                      alias="vkGetBufferMemoryRequirements2"/>
+        <command>
+            <proto><type>void</type> <name>vkGetImageMemoryRequirements2</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param>const <type>VkImageMemoryRequirementsInfo2</type>* <name>pInfo</name></param>
+            <param><type>VkMemoryRequirements2</type>* <name>pMemoryRequirements</name></param>
+        </command>
+        <command name="vkGetImageMemoryRequirements2KHR"                       alias="vkGetImageMemoryRequirements2"/>
+        <command>
+            <proto><type>void</type> <name>vkGetImageSparseMemoryRequirements2</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param>const <type>VkImageSparseMemoryRequirementsInfo2</type>* <name>pInfo</name></param>
+            <param optional="false,true"><type>uint32_t</type>* <name>pSparseMemoryRequirementCount</name></param>
+            <param optional="true" len="pSparseMemoryRequirementCount"><type>VkSparseImageMemoryRequirements2</type>* <name>pSparseMemoryRequirements</name></param>
+        </command>
+        <command name="vkGetImageSparseMemoryRequirements2KHR"                 alias="vkGetImageSparseMemoryRequirements2"/>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY">
+            <proto><type>VkResult</type> <name>vkCreateSamplerYcbcrConversion</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param>const <type>VkSamplerYcbcrConversionCreateInfo</type>* <name>pCreateInfo</name></param>
+            <param optional="true">const <type>VkAllocationCallbacks</type>* <name>pAllocator</name></param>
+            <param><type>VkSamplerYcbcrConversion</type>* <name>pYcbcrConversion</name></param>
+        </command>
+        <command name="vkCreateSamplerYcbcrConversionKHR"                      alias="vkCreateSamplerYcbcrConversion"/>
+        <command>
+            <proto><type>void</type> <name>vkDestroySamplerYcbcrConversion</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param optional="true" externsync="true"><type>VkSamplerYcbcrConversion</type> <name>ycbcrConversion</name></param>
+            <param optional="true">const <type>VkAllocationCallbacks</type>* <name>pAllocator</name></param>
+        </command>
+        <command name="vkDestroySamplerYcbcrConversionKHR"                     alias="vkDestroySamplerYcbcrConversion"/>
+        <command>
+            <proto><type>void</type> <name>vkGetDeviceQueue2</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param>const <type>VkDeviceQueueInfo2</type>* <name>pQueueInfo</name></param>
+            <param><type>VkQueue</type>* <name>pQueue</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY">
+            <proto><type>VkResult</type> <name>vkCreateValidationCacheEXT</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param>const <type>VkValidationCacheCreateInfoEXT</type>* <name>pCreateInfo</name></param>
+            <param optional="true">const <type>VkAllocationCallbacks</type>* <name>pAllocator</name></param>
+            <param><type>VkValidationCacheEXT</type>* <name>pValidationCache</name></param>
+        </command>
+        <command>
+            <proto><type>void</type> <name>vkDestroyValidationCacheEXT</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param optional="true" externsync="true"><type>VkValidationCacheEXT</type> <name>validationCache</name></param>
+            <param optional="true">const <type>VkAllocationCallbacks</type>* <name>pAllocator</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS,VK_INCOMPLETE" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY">
+            <proto><type>VkResult</type> <name>vkGetValidationCacheDataEXT</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param><type>VkValidationCacheEXT</type> <name>validationCache</name></param>
+            <param optional="false,true"><type>size_t</type>* <name>pDataSize</name></param>
+            <param optional="true" len="pDataSize"><type>void</type>* <name>pData</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY">
+            <proto><type>VkResult</type> <name>vkMergeValidationCachesEXT</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param externsync="true"><type>VkValidationCacheEXT</type> <name>dstCache</name></param>
+            <param><type>uint32_t</type> <name>srcCacheCount</name></param>
+            <param len="srcCacheCount">const <type>VkValidationCacheEXT</type>* <name>pSrcCaches</name></param>
+        </command>
+        <command>
+            <proto><type>void</type> <name>vkGetDescriptorSetLayoutSupport</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param>const <type>VkDescriptorSetLayoutCreateInfo</type>* <name>pCreateInfo</name></param>
+            <param><type>VkDescriptorSetLayoutSupport</type>* <name>pSupport</name></param>
+        </command>
+        <command name="vkGetDescriptorSetLayoutSupportKHR"                     alias="vkGetDescriptorSetLayoutSupport"/>
+        <command>
+            <proto><type>VkResult</type> <name>vkGetSwapchainGrallocUsageANDROID</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param><type>VkFormat</type> <name>format</name></param>
+            <param><type>VkImageUsageFlags</type> <name>imageUsage</name></param>
+            <param><type>int</type>* <name>grallocUsage</name></param>
+        </command>
+        <command>
+            <proto><type>VkResult</type> <name>vkGetSwapchainGrallocUsage2ANDROID</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param><type>VkFormat</type> <name>format</name></param>
+            <param><type>VkImageUsageFlags</type> <name>imageUsage</name></param>
+            <param><type>VkSwapchainImageUsageFlagsANDROID</type> <name>swapchainImageUsage</name></param>
+            <param><type>uint64_t</type>* <name>grallocConsumerUsage</name></param>
+            <param><type>uint64_t</type>* <name>grallocProducerUsage</name></param>
+        </command>
+        <command>
+            <proto><type>VkResult</type> <name>vkAcquireImageANDROID</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param><type>VkImage</type> <name>image</name></param>
+            <param><type>int</type> <name>nativeFenceFd</name></param>
+            <param><type>VkSemaphore</type> <name>semaphore</name></param>
+            <param><type>VkFence</type> <name>fence</name></param>
+        </command>
+        <command>
+            <proto><type>VkResult</type> <name>vkQueueSignalReleaseImageANDROID</name></proto>
+            <param><type>VkQueue</type> <name>queue</name></param>
+            <param><type>uint32_t</type> <name>waitSemaphoreCount</name></param>
+            <param>const <type>VkSemaphore</type>* <name>pWaitSemaphores</name></param>
+            <param><type>VkImage</type> <name>image</name></param>
+            <param><type>int</type>* <name>pNativeFenceFd</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS,VK_INCOMPLETE" errorcodes="VK_ERROR_FEATURE_NOT_PRESENT,VK_ERROR_OUT_OF_HOST_MEMORY">
+            <proto><type>VkResult</type> <name>vkGetShaderInfoAMD</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param><type>VkPipeline</type> <name>pipeline</name></param>
+            <param><type>VkShaderStageFlagBits</type> <name>shaderStage</name></param>
+            <param><type>VkShaderInfoTypeAMD</type> <name>infoType</name></param>
+            <param optional="false,true"><type>size_t</type>* <name>pInfoSize</name></param>
+            <param optional="true" len="pInfoSize"><type>void</type>* <name>pInfo</name></param>
+        </command>
+        <command>
+            <proto><type>void</type> <name>vkSetLocalDimmingAMD</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param><type>VkSwapchainKHR</type> <name>swapChain</name></param>
+            <param><type>VkBool32</type> <name>localDimmingEnable</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS,VK_INCOMPLETE" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY">
+            <proto><type>VkResult</type> <name>vkGetPhysicalDeviceCalibrateableTimeDomainsEXT</name></proto>
+            <param><type>VkPhysicalDevice</type> <name>physicalDevice</name></param>
+            <param optional="false,true"><type>uint32_t</type>* <name>pTimeDomainCount</name></param>
+            <param optional="true" len="pTimeDomainCount"><type>VkTimeDomainEXT</type>* <name>pTimeDomains</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY">
+            <proto><type>VkResult</type> <name>vkGetCalibratedTimestampsEXT</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param><type>uint32_t</type> <name>timestampCount</name></param>
+            <param len="timestampCount">const <type>VkCalibratedTimestampInfoEXT</type>* <name>pTimestampInfos</name></param>
+            <param len="timestampCount"><type>uint64_t</type>* <name>pTimestamps</name></param>
+            <param><type>uint64_t</type>* <name>pMaxDeviation</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY">
+            <proto><type>VkResult</type> <name>vkSetDebugUtilsObjectNameEXT</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param externsync="pNameInfo.objectHandle">const <type>VkDebugUtilsObjectNameInfoEXT</type>* <name>pNameInfo</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY">
+            <proto><type>VkResult</type> <name>vkSetDebugUtilsObjectTagEXT</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param externsync="pTagInfo.objectHandle">const <type>VkDebugUtilsObjectTagInfoEXT</type>* <name>pTagInfo</name></param>
+        </command>
+        <command>
+            <proto><type>void</type> <name>vkQueueBeginDebugUtilsLabelEXT</name></proto>
+            <param><type>VkQueue</type> <name>queue</name></param>
+            <param>const <type>VkDebugUtilsLabelEXT</type>* <name>pLabelInfo</name></param>
+        </command>
+        <command>
+            <proto><type>void</type> <name>vkQueueEndDebugUtilsLabelEXT</name></proto>
+            <param><type>VkQueue</type> <name>queue</name></param>
+        </command>
+        <command>
+            <proto><type>void</type> <name>vkQueueInsertDebugUtilsLabelEXT</name></proto>
+            <param><type>VkQueue</type> <name>queue</name></param>
+            <param>const <type>VkDebugUtilsLabelEXT</type>* <name>pLabelInfo</name></param>
+        </command>
+        <command queues="graphics,compute" renderpass="both" cmdbufferlevel="primary,secondary">
+            <proto><type>void</type> <name>vkCmdBeginDebugUtilsLabelEXT</name></proto>
+            <param><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
+            <param>const <type>VkDebugUtilsLabelEXT</type>* <name>pLabelInfo</name></param>
+        </command>
+        <command queues="graphics,compute" renderpass="both" cmdbufferlevel="primary,secondary">
+            <proto><type>void</type> <name>vkCmdEndDebugUtilsLabelEXT</name></proto>
+            <param><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
+        </command>
+        <command queues="graphics,compute" renderpass="both" cmdbufferlevel="primary,secondary">
+            <proto><type>void</type> <name>vkCmdInsertDebugUtilsLabelEXT</name></proto>
+            <param><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
+            <param>const <type>VkDebugUtilsLabelEXT</type>* <name>pLabelInfo</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY">
+            <proto><type>VkResult</type> <name>vkCreateDebugUtilsMessengerEXT</name></proto>
+            <param><type>VkInstance</type> <name>instance</name></param>
+            <param>const <type>VkDebugUtilsMessengerCreateInfoEXT</type>* <name>pCreateInfo</name></param>
+            <param optional="true">const <type>VkAllocationCallbacks</type>* <name>pAllocator</name></param>
+            <param><type>VkDebugUtilsMessengerEXT</type>* <name>pMessenger</name></param>
+        </command>
+        <command>
+            <proto><type>void</type> <name>vkDestroyDebugUtilsMessengerEXT</name></proto>
+            <param><type>VkInstance</type> <name>instance</name></param>
+            <param externsync="true"><type>VkDebugUtilsMessengerEXT</type> <name>messenger</name></param>
+            <param optional="true">const <type>VkAllocationCallbacks</type>* <name>pAllocator</name></param>
+        </command>
+        <command>
+            <proto><type>void</type> <name>vkSubmitDebugUtilsMessageEXT</name></proto>
+            <param><type>VkInstance</type> <name>instance</name></param>
+            <param><type>VkDebugUtilsMessageSeverityFlagBitsEXT</type> <name>messageSeverity</name></param>
+            <param><type>VkDebugUtilsMessageTypeFlagsEXT</type> <name>messageTypes</name></param>
+            <param>const <type>VkDebugUtilsMessengerCallbackDataEXT</type>* <name>pCallbackData</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_INVALID_EXTERNAL_HANDLE">
+            <proto><type>VkResult</type> <name>vkGetMemoryHostPointerPropertiesEXT</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param><type>VkExternalMemoryHandleTypeFlagBits</type> <name>handleType</name></param>
+            <param optional="false">const <type>void</type>* <name>pHostPointer</name></param>
+            <param><type>VkMemoryHostPointerPropertiesEXT</type>* <name>pMemoryHostPointerProperties</name></param>
+        </command>
+        <command queues="transfer,graphics,compute" renderpass="both" cmdbufferlevel="primary,secondary" pipeline="transfer">
+            <proto><type>void</type> <name>vkCmdWriteBufferMarkerAMD</name></proto>
+            <param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
+            <param><type>VkPipelineStageFlagBits</type> <name>pipelineStage</name></param>
+            <param><type>VkBuffer</type> <name>dstBuffer</name></param>
+            <param><type>VkDeviceSize</type> <name>dstOffset</name></param>
+            <param><type>uint32_t</type> <name>marker</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY">
+            <proto><type>VkResult</type> <name>vkCreateRenderPass2KHR</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param>const <type>VkRenderPassCreateInfo2KHR</type>* <name>pCreateInfo</name></param>
+            <param optional="true">const <type>VkAllocationCallbacks</type>* <name>pAllocator</name></param>
+            <param><type>VkRenderPass</type>* <name>pRenderPass</name></param>
+        </command>
+        <command queues="graphics" renderpass="outside" cmdbufferlevel="primary" pipeline="graphics">
+            <proto><type>void</type> <name>vkCmdBeginRenderPass2KHR</name></proto>
+            <param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
+            <param>const <type>VkRenderPassBeginInfo</type>*      <name>pRenderPassBegin</name></param>
+            <param>const <type>VkSubpassBeginInfoKHR</type>*      <name>pSubpassBeginInfo</name></param>
+        </command>
+        <command queues="graphics" renderpass="inside" cmdbufferlevel="primary" pipeline="graphics">
+            <proto><type>void</type> <name>vkCmdNextSubpass2KHR</name></proto>
+            <param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
+            <param>const <type>VkSubpassBeginInfoKHR</type>*      <name>pSubpassBeginInfo</name></param>
+            <param>const <type>VkSubpassEndInfoKHR</type>*        <name>pSubpassEndInfo</name></param>
+        </command>
+        <command queues="graphics" renderpass="inside" cmdbufferlevel="primary" pipeline="graphics">
+            <proto><type>void</type> <name>vkCmdEndRenderPass2KHR</name></proto>
+            <param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
+            <param>const <type>VkSubpassEndInfoKHR</type>*        <name>pSubpassEndInfo</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY,VK_ERROR_DEVICE_LOST">
+            <proto><type>VkResult</type> <name>vkGetSemaphoreCounterValueKHR</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param><type>VkSemaphore</type> <name>semaphore</name></param>
+            <param><type>uint64_t</type>* <name>pValue</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS,VK_TIMEOUT" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY,VK_ERROR_DEVICE_LOST">
+            <proto><type>VkResult</type> <name>vkWaitSemaphoresKHR</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param>const <type>VkSemaphoreWaitInfoKHR</type>* <name>pWaitInfo</name></param>
+            <param><type>uint64_t</type> <name>timeout</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY">
+            <proto><type>VkResult</type> <name>vkSignalSemaphoreKHR</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param>const <type>VkSemaphoreSignalInfoKHR</type>* <name>pSignalInfo</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_INVALID_EXTERNAL_HANDLE_KHR">
+            <proto><type>VkResult</type> <name>vkGetAndroidHardwareBufferPropertiesANDROID</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param>const struct <type>AHardwareBuffer</type>* <name>buffer</name></param>
+            <param><type>VkAndroidHardwareBufferPropertiesANDROID</type>* <name>pProperties</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_TOO_MANY_OBJECTS,VK_ERROR_OUT_OF_HOST_MEMORY">
+            <proto><type>VkResult</type> <name>vkGetMemoryAndroidHardwareBufferANDROID</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param>const <type>VkMemoryGetAndroidHardwareBufferInfoANDROID</type>* <name>pInfo</name></param>
+            <param>struct <type>AHardwareBuffer</type>** <name>pBuffer</name></param>
+        </command>
+        <command queues="graphics" renderpass="inside" cmdbufferlevel="primary,secondary" pipeline="graphics">
+            <proto><type>void</type> <name>vkCmdDrawIndirectCountKHR</name></proto>
+            <param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
+            <param><type>VkBuffer</type> <name>buffer</name></param>
+            <param><type>VkDeviceSize</type> <name>offset</name></param>
+            <param><type>VkBuffer</type> <name>countBuffer</name></param>
+            <param><type>VkDeviceSize</type> <name>countBufferOffset</name></param>
+            <param><type>uint32_t</type> <name>maxDrawCount</name></param>
+            <param><type>uint32_t</type> <name>stride</name></param>
+        </command>
+        <command name="vkCmdDrawIndirectCountAMD"                                  alias="vkCmdDrawIndirectCountKHR"/>
+        <command queues="graphics" renderpass="inside" cmdbufferlevel="primary,secondary" pipeline="graphics">
+            <proto><type>void</type> <name>vkCmdDrawIndexedIndirectCountKHR</name></proto>
+            <param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
+            <param><type>VkBuffer</type> <name>buffer</name></param>
+            <param><type>VkDeviceSize</type> <name>offset</name></param>
+            <param><type>VkBuffer</type> <name>countBuffer</name></param>
+            <param><type>VkDeviceSize</type> <name>countBufferOffset</name></param>
+            <param><type>uint32_t</type> <name>maxDrawCount</name></param>
+            <param><type>uint32_t</type> <name>stride</name></param>
+        </command>
+        <command name="vkCmdDrawIndexedIndirectCountAMD"                           alias="vkCmdDrawIndexedIndirectCountKHR"/>
+        <command queues="graphics,compute,transfer" renderpass="both" cmdbufferlevel="primary,secondary">
+            <proto><type>void</type> <name>vkCmdSetCheckpointNV</name></proto>
+            <param><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
+            <param noautovalidity="true">const <type>void</type>* <name>pCheckpointMarker</name></param>
+        </command>
+        <command>
+            <proto><type>void</type> <name>vkGetQueueCheckpointDataNV</name></proto>
+            <param><type>VkQueue</type> <name>queue</name></param>
+            <param optional="false,true"><type>uint32_t</type>* <name>pCheckpointDataCount</name></param>
+            <param optional="true" len="pCheckpointDataCount"><type>VkCheckpointDataNV</type>* <name>pCheckpointData</name></param>
+        </command>
+        <command queues="graphics" renderpass="both" cmdbufferlevel="primary,secondary">
+            <proto><type>void</type> <name>vkCmdBindTransformFeedbackBuffersEXT</name></proto>
+            <param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
+            <param><type>uint32_t</type> <name>firstBinding</name></param>
+            <param><type>uint32_t</type> <name>bindingCount</name></param>
+            <param len="bindingCount">const <type>VkBuffer</type>* <name>pBuffers</name></param>
+            <param len="bindingCount">const <type>VkDeviceSize</type>* <name>pOffsets</name></param>
+            <param optional="true" len="bindingCount">const <type>VkDeviceSize</type>* <name>pSizes</name></param>
+        </command>
+        <command queues="graphics" renderpass="inside" cmdbufferlevel="primary,secondary">
+            <proto><type>void</type> <name>vkCmdBeginTransformFeedbackEXT</name></proto>
+            <param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
+            <param><type>uint32_t</type> <name>firstCounterBuffer</name></param>
+            <param optional="true"><type>uint32_t</type> <name>counterBufferCount</name></param>
+            <param noautovalidity="true" len="counterBufferCount">const <type>VkBuffer</type>* <name>pCounterBuffers</name></param>
+            <param optional="true" len="counterBufferCount">const <type>VkDeviceSize</type>* <name>pCounterBufferOffsets</name></param>
+        </command>
+        <command queues="graphics" renderpass="inside" cmdbufferlevel="primary,secondary">
+            <proto><type>void</type> <name>vkCmdEndTransformFeedbackEXT</name></proto>
+            <param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
+            <param><type>uint32_t</type> <name>firstCounterBuffer</name></param>
+            <param optional="true"><type>uint32_t</type> <name>counterBufferCount</name></param>
+            <param noautovalidity="true" len="counterBufferCount">const <type>VkBuffer</type>* <name>pCounterBuffers</name></param>
+            <param optional="true" len="counterBufferCount">const <type>VkDeviceSize</type>* <name>pCounterBufferOffsets</name></param>
+        </command>
+        <command queues="graphics,compute" renderpass="both" cmdbufferlevel="primary,secondary">
+            <proto><type>void</type> <name>vkCmdBeginQueryIndexedEXT</name></proto>
+            <param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
+            <param><type>VkQueryPool</type> <name>queryPool</name></param>
+            <param><type>uint32_t</type> <name>query</name></param>
+            <param optional="true"><type>VkQueryControlFlags</type> <name>flags</name></param>
+            <param><type>uint32_t</type> <name>index</name></param>
+        </command>
+        <command queues="graphics,compute" renderpass="both" cmdbufferlevel="primary,secondary">
+            <proto><type>void</type> <name>vkCmdEndQueryIndexedEXT</name></proto>
+            <param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
+            <param><type>VkQueryPool</type> <name>queryPool</name></param>
+            <param><type>uint32_t</type> <name>query</name></param>
+            <param><type>uint32_t</type> <name>index</name></param>
+        </command>
+        <command queues="graphics" renderpass="inside" cmdbufferlevel="primary,secondary" pipeline="graphics">
+            <proto><type>void</type> <name>vkCmdDrawIndirectByteCountEXT</name></proto>
+            <param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
+            <param><type>uint32_t</type> <name>instanceCount</name></param>
+            <param><type>uint32_t</type> <name>firstInstance</name></param>
+            <param><type>VkBuffer</type> <name>counterBuffer</name></param>
+            <param><type>VkDeviceSize</type> <name>counterBufferOffset</name></param>
+            <param><type>uint32_t</type> <name>counterOffset</name></param>
+            <param><type>uint32_t</type> <name>vertexStride</name></param>
+        </command>
+        <command queues="graphics" renderpass="both" cmdbufferlevel="primary,secondary">
+            <proto><type>void</type> <name>vkCmdSetExclusiveScissorNV</name></proto>
+            <param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
+            <param><type>uint32_t</type> <name>firstExclusiveScissor</name></param>
+            <param><type>uint32_t</type> <name>exclusiveScissorCount</name></param>
+            <param len="exclusiveScissorCount">const <type>VkRect2D</type>* <name>pExclusiveScissors</name></param>
+        </command>
+        <command queues="graphics" renderpass="both" cmdbufferlevel="primary,secondary">
+            <proto><type>void</type> <name>vkCmdBindShadingRateImageNV</name></proto>
+            <param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
+            <param optional="true"><type>VkImageView</type> <name>imageView</name></param>
+            <param><type>VkImageLayout</type> <name>imageLayout</name></param>
+        </command>
+        <command queues="graphics" renderpass="both" cmdbufferlevel="primary,secondary">
+            <proto><type>void</type> <name>vkCmdSetViewportShadingRatePaletteNV</name></proto>
+            <param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
+            <param><type>uint32_t</type> <name>firstViewport</name></param>
+            <param><type>uint32_t</type> <name>viewportCount</name></param>
+            <param len="viewportCount">const <type>VkShadingRatePaletteNV</type>* <name>pShadingRatePalettes</name></param>
+        </command>
+        <command queues="graphics" renderpass="both" cmdbufferlevel="primary,secondary">
+            <proto><type>void</type> <name>vkCmdSetCoarseSampleOrderNV</name></proto>
+            <param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
+            <param><type>VkCoarseSampleOrderTypeNV</type> <name>sampleOrderType</name></param>
+            <param optional="true"><type>uint32_t</type> <name>customSampleOrderCount</name></param>
+            <param len="customSampleOrderCount">const <type>VkCoarseSampleOrderCustomNV</type>* <name>pCustomSampleOrders</name></param>
+        </command>
+        <command queues="graphics" renderpass="inside" cmdbufferlevel="primary,secondary" pipeline="graphics">
+            <proto><type>void</type> <name>vkCmdDrawMeshTasksNV</name></proto>
+            <param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
+            <param><type>uint32_t</type> <name>taskCount</name></param>
+            <param><type>uint32_t</type> <name>firstTask</name></param>
+        </command>
+        <command queues="graphics" renderpass="inside" cmdbufferlevel="primary,secondary" pipeline="graphics">
+            <proto><type>void</type> <name>vkCmdDrawMeshTasksIndirectNV</name></proto>
+            <param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
+            <param><type>VkBuffer</type> <name>buffer</name></param>
+            <param><type>VkDeviceSize</type> <name>offset</name></param>
+            <param><type>uint32_t</type> <name>drawCount</name></param>
+            <param><type>uint32_t</type> <name>stride</name></param>
+        </command>
+        <command queues="graphics" renderpass="inside" cmdbufferlevel="primary,secondary" pipeline="graphics">
+            <proto><type>void</type> <name>vkCmdDrawMeshTasksIndirectCountNV</name></proto>
+            <param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
+            <param><type>VkBuffer</type> <name>buffer</name></param>
+            <param><type>VkDeviceSize</type> <name>offset</name></param>
+            <param><type>VkBuffer</type> <name>countBuffer</name></param>
+            <param><type>VkDeviceSize</type> <name>countBufferOffset</name></param>
+            <param><type>uint32_t</type> <name>maxDrawCount</name></param>
+            <param><type>uint32_t</type> <name>stride</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY">
+            <proto><type>VkResult</type> <name>vkCompileDeferredNV</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param><type>VkPipeline</type> <name>pipeline</name></param>
+            <param><type>uint32_t</type> <name>shader</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY">
+            <proto><type>VkResult</type> <name>vkCreateAccelerationStructureNV</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param>const <type>VkAccelerationStructureCreateInfoNV</type>* <name>pCreateInfo</name></param>
+            <param optional="true">const <type>VkAllocationCallbacks</type>* <name>pAllocator</name></param>
+            <param><type>VkAccelerationStructureNV</type>* <name>pAccelerationStructure</name></param>
+        </command>
+        <command>
+            <proto><type>void</type> <name>vkDestroyAccelerationStructureNV</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param><type>VkAccelerationStructureNV</type> <name>accelerationStructure</name></param>
+            <param optional="true">const <type>VkAllocationCallbacks</type>* <name>pAllocator</name></param>
+        </command>
+        <command>
+            <proto><type>void</type> <name>vkGetAccelerationStructureMemoryRequirementsNV</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param>const <type>VkAccelerationStructureMemoryRequirementsInfoNV</type>* <name>pInfo</name></param>
+            <param><type>VkMemoryRequirements2KHR</type>* <name>pMemoryRequirements</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY">
+            <proto><type>VkResult</type> <name>vkBindAccelerationStructureMemoryNV</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param><type>uint32_t</type> <name>bindInfoCount</name></param>
+            <param len="bindInfoCount">const <type>VkBindAccelerationStructureMemoryInfoNV</type>* <name>pBindInfos</name></param>
+        </command>
+        <command queues="compute" renderpass="outside" cmdbufferlevel="primary,secondary">
+            <proto><type>void</type> <name>vkCmdCopyAccelerationStructureNV</name></proto>
+            <param><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
+            <param><type>VkAccelerationStructureNV</type> <name>dst</name></param>
+            <param><type>VkAccelerationStructureNV</type> <name>src</name></param>
+            <param><type>VkCopyAccelerationStructureModeNV</type> <name>mode</name></param>
+        </command>
+        <command queues="compute" renderpass="outside" cmdbufferlevel="primary,secondary">
+            <proto><type>void</type> <name>vkCmdWriteAccelerationStructuresPropertiesNV</name></proto>
+            <param><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
+            <param><type>uint32_t</type> <name>accelerationStructureCount</name></param>
+            <param len="accelerationStructureCount">const <type>VkAccelerationStructureNV</type>* <name>pAccelerationStructures</name></param>
+            <param><type>VkQueryType</type> <name>queryType</name></param>
+            <param><type>VkQueryPool</type> <name>queryPool</name></param>
+            <param><type>uint32_t</type> <name>firstQuery</name></param>
+        </command>
+        <command queues="compute" renderpass="outside" cmdbufferlevel="primary,secondary">
+            <proto><type>void</type> <name>vkCmdBuildAccelerationStructureNV</name></proto>
+            <param><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
+            <param>const <type>VkAccelerationStructureInfoNV</type>* <name>pInfo</name></param>
+            <param optional="true"><type>VkBuffer</type> <name>instanceData</name></param>
+            <param><type>VkDeviceSize</type> <name>instanceOffset</name></param>
+            <param><type>VkBool32</type> <name>update</name></param>
+            <param><type>VkAccelerationStructureNV</type> <name>dst</name></param>
+            <param optional="true"><type>VkAccelerationStructureNV</type> <name>src</name></param>
+            <param><type>VkBuffer</type> <name>scratch</name></param>
+            <param><type>VkDeviceSize</type> <name>scratchOffset</name></param>
+        </command>
+        <command queues="compute" renderpass="outside" cmdbufferlevel="primary,secondary">
+            <proto><type>void</type> <name>vkCmdTraceRaysNV</name></proto>
+            <param><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
+            <param><type>VkBuffer</type> <name>raygenShaderBindingTableBuffer</name></param>
+            <param><type>VkDeviceSize</type> <name>raygenShaderBindingOffset</name></param>
+            <param optional="true"><type>VkBuffer</type> <name>missShaderBindingTableBuffer</name></param>
+            <param><type>VkDeviceSize</type> <name>missShaderBindingOffset</name></param>
+            <param><type>VkDeviceSize</type> <name>missShaderBindingStride</name></param>
+            <param optional="true"><type>VkBuffer</type> <name>hitShaderBindingTableBuffer</name></param>
+            <param><type>VkDeviceSize</type> <name>hitShaderBindingOffset</name></param>
+            <param><type>VkDeviceSize</type> <name>hitShaderBindingStride</name></param>
+            <param optional="true"><type>VkBuffer</type> <name>callableShaderBindingTableBuffer</name></param>
+            <param><type>VkDeviceSize</type> <name>callableShaderBindingOffset</name></param>
+            <param><type>VkDeviceSize</type> <name>callableShaderBindingStride</name></param>
+            <param><type>uint32_t</type> <name>width</name></param>
+            <param><type>uint32_t</type> <name>height</name></param>
+            <param><type>uint32_t</type> <name>depth</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY">
+            <proto><type>VkResult</type> <name>vkGetRayTracingShaderGroupHandlesNV</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param><type>VkPipeline</type> <name>pipeline</name></param>
+            <param><type>uint32_t</type> <name>firstGroup</name></param>
+            <param><type>uint32_t</type> <name>groupCount</name></param>
+            <param><type>size_t</type> <name>dataSize</name></param>
+            <param len="dataSize"><type>void</type>* <name>pData</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY">
+            <proto><type>VkResult</type> <name>vkGetAccelerationStructureHandleNV</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param><type>VkAccelerationStructureNV</type> <name>accelerationStructure</name></param>
+            <param><type>size_t</type> <name>dataSize</name></param>
+            <param len="dataSize"><type>void</type>* <name>pData</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY,VK_ERROR_INVALID_SHADER_NV">
+            <proto><type>VkResult</type> <name>vkCreateRayTracingPipelinesNV</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param optional="true"><type>VkPipelineCache</type> <name>pipelineCache</name></param>
+            <param><type>uint32_t</type> <name>createInfoCount</name></param>
+            <param len="createInfoCount">const <type>VkRayTracingPipelineCreateInfoNV</type>* <name>pCreateInfos</name></param>
+            <param optional="true">const <type>VkAllocationCallbacks</type>* <name>pAllocator</name></param>
+            <param len="createInfoCount"><type>VkPipeline</type>* <name>pPipelines</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS,VK_INCOMPLETE" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY">
+            <proto><type>VkResult</type> <name>vkGetPhysicalDeviceCooperativeMatrixPropertiesNV</name></proto>
+            <param><type>VkPhysicalDevice</type> <name>physicalDevice</name></param>
+            <param optional="false,true"><type>uint32_t</type>* <name>pPropertyCount</name></param>
+            <param optional="true" len="pPropertyCount"><type>VkCooperativeMatrixPropertiesNV</type>* <name>pProperties</name></param>
+        </command>
+        <command>
+            <proto><type>uint32_t</type> <name>vkGetImageViewHandleNVX</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param>const <type>VkImageViewHandleInfoNVX</type>* <name>pInfo</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS,VK_INCOMPLETE" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY,VK_ERROR_SURFACE_LOST_KHR">
+            <proto><type>VkResult</type> <name>vkGetPhysicalDeviceSurfacePresentModes2EXT</name></proto>
+            <param><type>VkPhysicalDevice</type> <name>physicalDevice</name></param>
+            <param>const <type>VkPhysicalDeviceSurfaceInfo2KHR</type>* <name>pSurfaceInfo</name></param>
+            <param optional="false,true"><type>uint32_t</type>* <name>pPresentModeCount</name></param>
+            <param optional="true" len="pPresentModeCount"><type>VkPresentModeKHR</type>* <name>pPresentModes</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY,VK_ERROR_SURFACE_LOST_KHR">
+            <proto><type>VkResult</type> <name>vkGetDeviceGroupSurfacePresentModes2EXT</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param>const <type>VkPhysicalDeviceSurfaceInfo2KHR</type>* <name>pSurfaceInfo</name></param>
+            <param optional="false,true"><type>VkDeviceGroupPresentModeFlagsKHR</type>* <name>pModes</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY,VK_ERROR_INITIALIZATION_FAILED,VK_ERROR_SURFACE_LOST_KHR">
+            <proto><type>VkResult</type> <name>vkAcquireFullScreenExclusiveModeEXT</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param><type>VkSwapchainKHR</type> <name>swapchain</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY,VK_ERROR_SURFACE_LOST_KHR">
+            <proto><type>VkResult</type> <name>vkReleaseFullScreenExclusiveModeEXT</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param><type>VkSwapchainKHR</type> <name>swapchain</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS,VK_INCOMPLETE" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY,VK_ERROR_INITIALIZATION_FAILED">
+            <proto><type>VkResult</type> <name>vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR</name></proto>
+            <param><type>VkPhysicalDevice</type> <name>physicalDevice</name></param>
+            <param><type>uint32_t</type> <name>queueFamilyIndex</name></param>
+            <param optional="false,true"><type>uint32_t</type>* <name>pCounterCount</name></param>
+            <param optional="true" len="pCounterCount"><type>VkPerformanceCounterKHR</type>* <name>pCounters</name></param>
+            <param optional="true" len="pCounterCount"><type>VkPerformanceCounterDescriptionKHR</type>* <name>pCounterDescriptions</name></param>
+        </command>
+        <command>
+            <proto><type>void</type> <name>vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR</name></proto>
+            <param><type>VkPhysicalDevice</type> <name>physicalDevice</name></param>
+            <param>const <type>VkQueryPoolPerformanceCreateInfoKHR</type>* <name>pPerformanceQueryCreateInfo</name></param>
+            <param><type>uint32_t</type>* <name>pNumPasses</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_TIMEOUT">
+            <proto><type>VkResult</type> <name>vkAcquireProfilingLockKHR</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param>const <type>VkAcquireProfilingLockInfoKHR</type>* <name>pInfo</name></param>
+        </command>
+        <command>
+            <proto><type>void</type> <name>vkReleaseProfilingLockKHR</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS">
+            <proto><type>VkResult</type> <name>vkGetImageDrmFormatModifierPropertiesEXT</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param><type>VkImage</type> <name>image</name></param>
+            <param><type>VkImageDrmFormatModifierPropertiesEXT</type>* <name>pProperties</name></param>
+        </command>
+        <command>
+            <proto><type>uint64_t</type> <name>vkGetBufferOpaqueCaptureAddressKHR</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param>const <type>VkBufferDeviceAddressInfoKHR</type>* <name>pInfo</name></param>
+        </command>
+        <command>
+            <proto><type>VkDeviceAddress</type> <name>vkGetBufferDeviceAddressKHR</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param>const <type>VkBufferDeviceAddressInfoKHR</type>* <name>pInfo</name></param>
+        </command>
+        <command name="vkGetBufferDeviceAddressEXT"        alias="vkGetBufferDeviceAddressKHR"/>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY">
+            <proto><type>VkResult</type> <name>vkCreateHeadlessSurfaceEXT</name></proto>
+            <param><type>VkInstance</type> <name>instance</name></param>
+            <param>const <type>VkHeadlessSurfaceCreateInfoEXT</type>* <name>pCreateInfo</name></param>
+            <param optional="true">const <type>VkAllocationCallbacks</type>* <name>pAllocator</name></param>
+            <param><type>VkSurfaceKHR</type>* <name>pSurface</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS,VK_INCOMPLETE" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY">
+            <proto><type>VkResult</type> <name>vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV</name></proto>
+            <param><type>VkPhysicalDevice</type> <name>physicalDevice</name></param>
+            <param optional="false,true"><type>uint32_t</type>* <name>pCombinationCount</name></param>
+            <param optional="true" len="pCombinationCount"><type>VkFramebufferMixedSamplesCombinationNV</type>* <name>pCombinations</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_TOO_MANY_OBJECTS,VK_ERROR_OUT_OF_HOST_MEMORY">
+            <proto><type>VkResult</type> <name>vkInitializePerformanceApiINTEL</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param>const <type>VkInitializePerformanceApiInfoINTEL</type>* <name>pInitializeInfo</name></param>
+        </command>
+        <command>
+            <proto><type>void</type> <name>vkUninitializePerformanceApiINTEL</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+        </command>
+        <command queues="graphics,compute,transfer" renderpass="both" cmdbufferlevel="primary,secondary" successcodes="VK_SUCCESS" errorcodes="VK_ERROR_TOO_MANY_OBJECTS,VK_ERROR_OUT_OF_HOST_MEMORY">
+            <proto><type>VkResult</type> <name>vkCmdSetPerformanceMarkerINTEL</name></proto>
+            <param><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
+            <param>const <type>VkPerformanceMarkerInfoINTEL</type>* <name>pMarkerInfo</name></param>
+        </command>
+        <command queues="graphics,compute,transfer" renderpass="both" cmdbufferlevel="primary,secondary" successcodes="VK_SUCCESS" errorcodes="VK_ERROR_TOO_MANY_OBJECTS,VK_ERROR_OUT_OF_HOST_MEMORY">
+            <proto><type>VkResult</type> <name>vkCmdSetPerformanceStreamMarkerINTEL</name></proto>
+            <param><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
+            <param>const <type>VkPerformanceStreamMarkerInfoINTEL</type>* <name>pMarkerInfo</name></param>
+        </command>
+        <command queues="graphics,compute,transfer" renderpass="both" cmdbufferlevel="primary,secondary" successcodes="VK_SUCCESS" errorcodes="VK_ERROR_TOO_MANY_OBJECTS,VK_ERROR_OUT_OF_HOST_MEMORY">
+            <proto><type>VkResult</type> <name>vkCmdSetPerformanceOverrideINTEL</name></proto>
+            <param><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
+            <param>const <type>VkPerformanceOverrideInfoINTEL</type>* <name>pOverrideInfo</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_TOO_MANY_OBJECTS,VK_ERROR_OUT_OF_HOST_MEMORY">
+            <proto><type>VkResult</type> <name>vkAcquirePerformanceConfigurationINTEL</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param>const <type>VkPerformanceConfigurationAcquireInfoINTEL</type>* <name>pAcquireInfo</name></param>
+            <param><type>VkPerformanceConfigurationINTEL</type>* <name>pConfiguration</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_TOO_MANY_OBJECTS,VK_ERROR_OUT_OF_HOST_MEMORY">
+            <proto><type>VkResult</type> <name>vkReleasePerformanceConfigurationINTEL</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param><type>VkPerformanceConfigurationINTEL</type> <name>configuration</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_TOO_MANY_OBJECTS,VK_ERROR_OUT_OF_HOST_MEMORY">
+            <proto><type>VkResult</type> <name>vkQueueSetPerformanceConfigurationINTEL</name></proto>
+            <param><type>VkQueue</type> <name>queue</name></param>
+            <param><type>VkPerformanceConfigurationINTEL</type> <name>configuration</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS" errorcodes="VK_ERROR_TOO_MANY_OBJECTS,VK_ERROR_OUT_OF_HOST_MEMORY">
+            <proto><type>VkResult</type> <name>vkGetPerformanceParameterINTEL</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param><type>VkPerformanceParameterTypeINTEL</type> <name>parameter</name></param>
+            <param><type>VkPerformanceValueINTEL</type>* <name>pValue</name></param>
+        </command>
+        <command>
+            <proto><type>uint64_t</type> <name>vkGetDeviceMemoryOpaqueCaptureAddressKHR</name></proto>
+            <param><type>VkDevice</type> <name>device</name></param>
+            <param>const <type>VkDeviceMemoryOpaqueCaptureAddressInfoKHR</type>* <name>pInfo</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS,VK_INCOMPLETE" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY">
+            <proto><type>VkResult</type> <name>vkGetPipelineExecutablePropertiesKHR</name></proto>
+            <param><type>VkDevice</type>                        <name>device</name></param>
+            <param>const <type>VkPipelineInfoKHR</type>*        <name>pPipelineInfo</name></param>
+            <param optional="false,true"><type>uint32_t</type>* <name>pExecutableCount</name></param>
+            <param optional="true" len="pExecutableCount"><type>VkPipelineExecutablePropertiesKHR</type>* <name>pProperties</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS,VK_INCOMPLETE" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY">
+            <proto><type>VkResult</type> <name>vkGetPipelineExecutableStatisticsKHR</name></proto>
+            <param><type>VkDevice</type>                        <name>device</name></param>
+            <param>const <type>VkPipelineExecutableInfoKHR</type>*  <name>pExecutableInfo</name></param>
+            <param optional="false,true"><type>uint32_t</type>* <name>pStatisticCount</name></param>
+            <param optional="true" len="pStatisticCount"><type>VkPipelineExecutableStatisticKHR</type>* <name>pStatistics</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS,VK_INCOMPLETE" errorcodes="VK_ERROR_OUT_OF_HOST_MEMORY,VK_ERROR_OUT_OF_DEVICE_MEMORY">
+            <proto><type>VkResult</type> <name>vkGetPipelineExecutableInternalRepresentationsKHR</name></proto>
+            <param><type>VkDevice</type>                        <name>device</name></param>
+            <param>const <type>VkPipelineExecutableInfoKHR</type>*  <name>pExecutableInfo</name></param>
+            <param optional="false,true"><type>uint32_t</type>* <name>pInternalRepresentationCount</name></param>
+            <param optional="true" len="pInternalRepresentationCount"><type>VkPipelineExecutableInternalRepresentationKHR</type>* <name>pInternalRepresentations</name></param>
+        </command>
+        <command queues="graphics" renderpass="both" cmdbufferlevel="primary,secondary">
+            <proto><type>void</type> <name>vkCmdSetLineStippleEXT</name></proto>
+            <param externsync="true"><type>VkCommandBuffer</type> <name>commandBuffer</name></param>
+            <param><type>uint32_t</type> <name>lineStippleFactor</name></param>
+            <param><type>uint16_t</type> <name>lineStipplePattern</name></param>
+        </command>
+        <command successcodes="VK_SUCCESS,VK_INCOMPLETE">
+            <proto><type>VkResult</type> <name>vkGetPhysicalDeviceToolPropertiesEXT</name></proto>
+            <param><type>VkPhysicalDevice</type> <name>physicalDevice</name></param>
+            <param><type>uint32_t</type>* <name>pToolCount</name></param>
+            <param optional="true" len="pToolCount"><type>VkPhysicalDeviceToolPropertiesEXT</type>* <name>pToolProperties</name></param>
+        </command>
+    </commands>
+
+    <feature api="vulkan" name="VK_VERSION_1_0" number="1.0" comment="Vulkan core API interface definitions">
+        <require comment="Header boilerplate">
+            <type name="vk_platform"/>
+        </require>
+        <require comment="API version">
+            <type name="VK_API_VERSION"/>
+            <type name="VK_API_VERSION_1_0"/>
+            <type name="VK_VERSION_MAJOR"/>
+            <type name="VK_VERSION_MINOR"/>
+            <type name="VK_VERSION_PATCH"/>
+            <type name="VK_HEADER_VERSION"/>
+        </require>
+        <require comment="API constants">
+            <enum name="VK_LOD_CLAMP_NONE"/>
+            <enum name="VK_REMAINING_MIP_LEVELS"/>
+            <enum name="VK_REMAINING_ARRAY_LAYERS"/>
+            <enum name="VK_WHOLE_SIZE"/>
+            <enum name="VK_ATTACHMENT_UNUSED"/>
+            <enum name="VK_TRUE"/>
+            <enum name="VK_FALSE"/>
+            <type name="VK_NULL_HANDLE"/>
+            <enum name="VK_QUEUE_FAMILY_IGNORED"/>
+            <enum name="VK_SUBPASS_EXTERNAL"/>
+            <type name="VkPipelineCacheHeaderVersion"/>
+        </require>
+        <require comment="Device initialization">
+            <command name="vkCreateInstance"/>
+            <command name="vkDestroyInstance"/>
+            <command name="vkEnumeratePhysicalDevices"/>
+            <command name="vkGetPhysicalDeviceFeatures"/>
+            <command name="vkGetPhysicalDeviceFormatProperties"/>
+            <command name="vkGetPhysicalDeviceImageFormatProperties"/>
+            <command name="vkGetPhysicalDeviceProperties"/>
+            <command name="vkGetPhysicalDeviceQueueFamilyProperties"/>
+            <command name="vkGetPhysicalDeviceMemoryProperties"/>
+            <command name="vkGetInstanceProcAddr"/>
+            <command name="vkGetDeviceProcAddr"/>
+        </require>
+        <require comment="Device commands">
+            <command name="vkCreateDevice"/>
+            <command name="vkDestroyDevice"/>
+        </require>
+        <require comment="Extension discovery commands">
+            <command name="vkEnumerateInstanceExtensionProperties"/>
+            <command name="vkEnumerateDeviceExtensionProperties"/>
+        </require>
+        <require comment="Layer discovery commands">
+            <command name="vkEnumerateInstanceLayerProperties"/>
+            <command name="vkEnumerateDeviceLayerProperties"/>
+        </require>
+        <require comment="queue commands">
+            <command name="vkGetDeviceQueue"/>
+            <command name="vkQueueSubmit"/>
+            <command name="vkQueueWaitIdle"/>
+            <command name="vkDeviceWaitIdle"/>
+        </require>
+        <require comment="Memory commands">
+            <command name="vkAllocateMemory"/>
+            <command name="vkFreeMemory"/>
+            <command name="vkMapMemory"/>
+            <command name="vkUnmapMemory"/>
+            <command name="vkFlushMappedMemoryRanges"/>
+            <command name="vkInvalidateMappedMemoryRanges"/>
+            <command name="vkGetDeviceMemoryCommitment"/>
+        </require>
+        <require comment="Memory management API commands">
+            <command name="vkBindBufferMemory"/>
+            <command name="vkBindImageMemory"/>
+            <command name="vkGetBufferMemoryRequirements"/>
+            <command name="vkGetImageMemoryRequirements"/>
+        </require>
+        <require comment="Sparse resource memory management API commands">
+            <command name="vkGetImageSparseMemoryRequirements"/>
+            <command name="vkGetPhysicalDeviceSparseImageFormatProperties"/>
+            <command name="vkQueueBindSparse"/>
+        </require>
+        <require comment="Fence commands">
+            <command name="vkCreateFence"/>
+            <command name="vkDestroyFence"/>
+            <command name="vkResetFences"/>
+            <command name="vkGetFenceStatus"/>
+            <command name="vkWaitForFences"/>
+        </require>
+        <require comment="Queue semaphore commands">
+            <command name="vkCreateSemaphore"/>
+            <command name="vkDestroySemaphore"/>
+        </require>
+        <require comment="Event commands">
+            <command name="vkCreateEvent"/>
+            <command name="vkDestroyEvent"/>
+            <command name="vkGetEventStatus"/>
+            <command name="vkSetEvent"/>
+            <command name="vkResetEvent"/>
+        </require>
+        <require comment="Query commands">
+            <command name="vkCreateQueryPool"/>
+            <command name="vkDestroyQueryPool"/>
+            <command name="vkGetQueryPoolResults"/>
+        </require>
+        <require comment="Buffer commands">
+            <command name="vkCreateBuffer"/>
+            <command name="vkDestroyBuffer"/>
+        </require>
+        <require comment="Buffer view commands">
+            <command name="vkCreateBufferView"/>
+            <command name="vkDestroyBufferView"/>
+        </require>
+        <require comment="Image commands">
+            <command name="vkCreateImage"/>
+            <command name="vkDestroyImage"/>
+            <command name="vkGetImageSubresourceLayout"/>
+        </require>
+        <require comment="Image view commands">
+            <command name="vkCreateImageView"/>
+            <command name="vkDestroyImageView"/>
+        </require>
+        <require comment="Shader commands">
+            <command name="vkCreateShaderModule"/>
+            <command name="vkDestroyShaderModule"/>
+        </require>
+        <require comment="Pipeline Cache commands">
+            <command name="vkCreatePipelineCache"/>
+            <command name="vkDestroyPipelineCache"/>
+            <command name="vkGetPipelineCacheData"/>
+            <command name="vkMergePipelineCaches"/>
+        </require>
+        <require comment="Pipeline commands">
+            <command name="vkCreateGraphicsPipelines"/>
+            <command name="vkCreateComputePipelines"/>
+            <command name="vkDestroyPipeline"/>
+        </require>
+        <require comment="Pipeline layout commands">
+            <command name="vkCreatePipelineLayout"/>
+            <command name="vkDestroyPipelineLayout"/>
+        </require>
+        <require comment="Sampler commands">
+            <command name="vkCreateSampler"/>
+            <command name="vkDestroySampler"/>
+        </require>
+        <require comment="Descriptor set commands">
+            <command name="vkCreateDescriptorSetLayout"/>
+            <command name="vkDestroyDescriptorSetLayout"/>
+            <command name="vkCreateDescriptorPool"/>
+            <command name="vkDestroyDescriptorPool"/>
+            <command name="vkResetDescriptorPool"/>
+            <command name="vkAllocateDescriptorSets"/>
+            <command name="vkFreeDescriptorSets"/>
+            <command name="vkUpdateDescriptorSets"/>
+        </require>
+        <require comment="Pass commands">
+            <command name="vkCreateFramebuffer"/>
+            <command name="vkDestroyFramebuffer"/>
+            <command name="vkCreateRenderPass"/>
+            <command name="vkDestroyRenderPass"/>
+            <command name="vkGetRenderAreaGranularity"/>
+        </require>
+        <require comment="Command pool commands">
+            <command name="vkCreateCommandPool"/>
+            <command name="vkDestroyCommandPool"/>
+            <command name="vkResetCommandPool"/>
+        </require>
+        <require comment="Command buffer commands">
+            <command name="vkAllocateCommandBuffers"/>
+            <command name="vkFreeCommandBuffers"/>
+            <command name="vkBeginCommandBuffer"/>
+            <command name="vkEndCommandBuffer"/>
+            <command name="vkResetCommandBuffer"/>
+        </require>
+        <require comment="Command buffer building commands">
+            <command name="vkCmdBindPipeline"/>
+            <command name="vkCmdSetViewport"/>
+            <command name="vkCmdSetScissor"/>
+            <command name="vkCmdSetLineWidth"/>
+            <command name="vkCmdSetDepthBias"/>
+            <command name="vkCmdSetBlendConstants"/>
+            <command name="vkCmdSetDepthBounds"/>
+            <command name="vkCmdSetStencilCompareMask"/>
+            <command name="vkCmdSetStencilWriteMask"/>
+            <command name="vkCmdSetStencilReference"/>
+            <command name="vkCmdBindDescriptorSets"/>
+            <command name="vkCmdBindIndexBuffer"/>
+            <command name="vkCmdBindVertexBuffers"/>
+            <command name="vkCmdDraw"/>
+            <command name="vkCmdDrawIndexed"/>
+            <command name="vkCmdDrawIndirect"/>
+            <command name="vkCmdDrawIndexedIndirect"/>
+            <command name="vkCmdDispatch"/>
+            <command name="vkCmdDispatchIndirect"/>
+            <command name="vkCmdCopyBuffer"/>
+            <command name="vkCmdCopyImage"/>
+            <command name="vkCmdBlitImage"/>
+            <command name="vkCmdCopyBufferToImage"/>
+            <command name="vkCmdCopyImageToBuffer"/>
+            <command name="vkCmdUpdateBuffer"/>
+            <command name="vkCmdFillBuffer"/>
+            <command name="vkCmdClearColorImage"/>
+            <command name="vkCmdClearDepthStencilImage"/>
+            <command name="vkCmdClearAttachments"/>
+            <command name="vkCmdResolveImage"/>
+            <command name="vkCmdSetEvent"/>
+            <command name="vkCmdResetEvent"/>
+            <command name="vkCmdWaitEvents"/>
+            <command name="vkCmdPipelineBarrier"/>
+            <command name="vkCmdBeginQuery"/>
+            <command name="vkCmdEndQuery"/>
+            <command name="vkCmdResetQueryPool"/>
+            <command name="vkCmdWriteTimestamp"/>
+            <command name="vkCmdCopyQueryPoolResults"/>
+            <command name="vkCmdPushConstants"/>
+            <command name="vkCmdBeginRenderPass"/>
+            <command name="vkCmdNextSubpass"/>
+            <command name="vkCmdEndRenderPass"/>
+            <command name="vkCmdExecuteCommands"/>
+        </require>
+        <require comment="These types are part of the API and should always be defined, even when no enabled features require them.">
+            <type name="VkBufferMemoryBarrier"/>
+            <type name="VkDispatchIndirectCommand"/>
+            <type name="VkDrawIndexedIndirectCommand"/>
+            <type name="VkDrawIndirectCommand"/>
+            <type name="VkImageMemoryBarrier"/>
+            <type name="VkMemoryBarrier"/>
+            <type name="VkObjectType"/>
+            <type name="VkBaseOutStructure"/>
+            <type name="VkBaseInStructure"/>
+            <type name="VkVendorId"/>
+        </require>
+    </feature>
+    <feature api="vulkan" name="VK_VERSION_1_1" number="1.1" comment="Vulkan 1.1 core API interface definitions.">
+        <require>
+            <type name="VK_API_VERSION_1_1"/>
+        </require>
+        <require comment="Device Initialization">
+            <command name="vkEnumerateInstanceVersion"/>
+        </require>
+        <require comment="Promoted from VK_KHR_relaxed_block_layout, which has no API"/>
+        <require comment="Promoted from VK_KHR_storage_buffer_storage_class, which has no API"/>
+        <require comment="Originally based on VK_KHR_subgroup (extension 94), but the actual enum block used was, incorrectly, that of extension 95">
+            <enum extends="VkStructureType" extnumber="95"  offset="0"          name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_PROPERTIES"/>
+            <type                                       name="VkPhysicalDeviceSubgroupProperties"/>
+            <type                                       name="VkSubgroupFeatureFlags"/>
+            <type                                       name="VkSubgroupFeatureFlagBits"/>
+        </require>
+        <require comment="Promoted from VK_KHR_bind_memory2">
+            <command name="vkBindBufferMemory2"/>
+            <command name="vkBindImageMemory2"/>
+            <enum extends="VkStructureType" extnumber="158" offset="0"          name="VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO"/>
+            <enum extends="VkStructureType" extnumber="158" offset="1"          name="VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO"/>
+            <enum bitpos="10" extends="VkImageCreateFlagBits"                   name="VK_IMAGE_CREATE_ALIAS_BIT"/>
+            <type name="VkBindBufferMemoryInfo"/>
+            <type name="VkBindImageMemoryInfo"/>
+        </require>
+        <require comment="Promoted from VK_KHR_16bit_storage">
+            <enum extends="VkStructureType" extnumber="84"  offset="0"          name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES"/>
+            <type name="VkPhysicalDevice16BitStorageFeatures"/>
+        </require>
+        <require comment="Promoted from VK_KHR_dedicated_allocation">
+            <enum extends="VkStructureType" extnumber="128" offset="0"          name="VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS"/>
+            <enum extends="VkStructureType" extnumber="128" offset="1"          name="VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO"/>
+            <type name="VkMemoryDedicatedRequirements"/>
+            <type name="VkMemoryDedicatedAllocateInfo"/>
+        </require>
+        <require comment="Promoted from VK_KHR_device_group">
+            <enum extends="VkStructureType" extnumber="61"  offset="0"          name="VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO"/>
+            <comment>offset 1 reserved for the old VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO_KHX enum</comment>
+            <comment>offset 2 reserved for the old VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO_KHX enum</comment>
+            <enum extends="VkStructureType" extnumber="61"  offset="3"          name="VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO"/>
+            <enum extends="VkStructureType" extnumber="61"  offset="4"          name="VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO"/>
+            <enum extends="VkStructureType" extnumber="61"  offset="5"          name="VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO"/>
+            <enum extends="VkStructureType" extnumber="61"  offset="6"          name="VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO"/>
+            <type name="VkPeerMemoryFeatureFlags"/>
+            <type name="VkPeerMemoryFeatureFlagBits"/>
+            <type name="VkMemoryAllocateFlags"/>
+            <type name="VkMemoryAllocateFlagBits"/>
+            <type name="VkMemoryAllocateFlagsInfo"/>
+            <type name="VkDeviceGroupRenderPassBeginInfo"/>
+            <type name="VkDeviceGroupCommandBufferBeginInfo"/>
+            <type name="VkDeviceGroupSubmitInfo"/>
+            <type name="VkDeviceGroupBindSparseInfo"/>
+            <command name="vkGetDeviceGroupPeerMemoryFeatures"/>
+            <command name="vkCmdSetDeviceMask"/>
+            <command name="vkCmdDispatchBase"/>
+            <enum bitpos="3"  extends="VkPipelineCreateFlagBits"                name="VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT"/>
+            <enum bitpos="4"  extends="VkPipelineCreateFlagBits"                name="VK_PIPELINE_CREATE_DISPATCH_BASE_BIT"/>
+            <enum extends="VkPipelineCreateFlagBits"                            name="VK_PIPELINE_CREATE_DISPATCH_BASE" alias="VK_PIPELINE_CREATE_DISPATCH_BASE_BIT"/>
+            <enum bitpos="2"  extends="VkDependencyFlagBits"                    name="VK_DEPENDENCY_DEVICE_GROUP_BIT" comment="Dependency is across devices"/>
+        </require>
+        <require comment="Promoted from VK_KHR_device_group + VK_KHR_bind_memory2">
+            <enum extends="VkStructureType" extnumber="61"  offset="13"         name="VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO"/>
+            <enum extends="VkStructureType" extnumber="61"  offset="14"         name="VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO"/>
+            <type name="VkBindBufferMemoryDeviceGroupInfo"/>
+            <type name="VkBindImageMemoryDeviceGroupInfo"/>
+            <enum bitpos="6"  extends="VkImageCreateFlagBits"                   name="VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT" comment="Allows using VkBindImageMemoryDeviceGroupInfo::pSplitInstanceBindRegions when binding memory to the image"/>
+        </require>
+        <require comment="Promoted from VK_KHR_device_group_creation">
+            <enum extends="VkStructureType" extnumber="71"  offset="0"          name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES"/>
+            <enum extends="VkStructureType" extnumber="71"  offset="1"          name="VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO"/>
+            <enum name="VK_MAX_DEVICE_GROUP_SIZE"/>
+            <type name="VkPhysicalDeviceGroupProperties"/>
+            <type name="VkDeviceGroupDeviceCreateInfo"/>
+            <command name="vkEnumeratePhysicalDeviceGroups"/>
+            <enum bitpos="1"  extends="VkMemoryHeapFlagBits"                    name="VK_MEMORY_HEAP_MULTI_INSTANCE_BIT" comment="If set, heap allocations allocate multiple instances by default"/>
+        </require>
+        <require comment="Promoted from VK_KHR_get_memory_requirements2">
+            <enum extends="VkStructureType" extnumber="147" offset="0"          name="VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2"/>
+            <enum extends="VkStructureType" extnumber="147" offset="1"          name="VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2"/>
+            <enum extends="VkStructureType" extnumber="147" offset="2"          name="VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2"/>
+            <enum extends="VkStructureType" extnumber="147" offset="3"          name="VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2"/>
+            <enum extends="VkStructureType" extnumber="147" offset="4"          name="VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2"/>
+            <type name="VkBufferMemoryRequirementsInfo2"/>
+            <type name="VkImageMemoryRequirementsInfo2"/>
+            <type name="VkImageSparseMemoryRequirementsInfo2"/>
+            <type name="VkMemoryRequirements2KHR"/>
+            <type name="VkMemoryRequirements2"/>
+            <type name="VkSparseImageMemoryRequirements2"/>
+            <command name="vkGetImageMemoryRequirements2"/>
+            <command name="vkGetBufferMemoryRequirements2"/>
+            <command name="vkGetImageSparseMemoryRequirements2"/>
+        </require>
+        <require comment="Promoted from VK_KHR_get_physical_device_properties2">
+            <enum extends="VkStructureType" extnumber="60"  offset="0"          name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2"/>
+            <enum extends="VkStructureType" extnumber="60"  offset="1"          name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2"/>
+            <enum extends="VkStructureType" extnumber="60"  offset="2"          name="VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2"/>
+            <enum extends="VkStructureType" extnumber="60"  offset="3"          name="VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2"/>
+            <enum extends="VkStructureType" extnumber="60"  offset="4"          name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2"/>
+            <enum extends="VkStructureType" extnumber="60"  offset="5"          name="VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2"/>
+            <enum extends="VkStructureType" extnumber="60"  offset="6"          name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2"/>
+            <enum extends="VkStructureType" extnumber="60"  offset="7"          name="VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2"/>
+            <enum extends="VkStructureType" extnumber="60"  offset="8"          name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2"/>
+            <type name="VkPhysicalDeviceFeatures2"/>
+            <type name="VkPhysicalDeviceProperties2"/>
+            <type name="VkFormatProperties2"/>
+            <type name="VkImageFormatProperties2"/>
+            <type name="VkPhysicalDeviceImageFormatInfo2"/>
+            <type name="VkQueueFamilyProperties2"/>
+            <type name="VkPhysicalDeviceMemoryProperties2"/>
+            <type name="VkSparseImageFormatProperties2"/>
+            <type name="VkPhysicalDeviceSparseImageFormatInfo2"/>
+            <command name="vkGetPhysicalDeviceFeatures2"/>
+            <command name="vkGetPhysicalDeviceProperties2"/>
+            <command name="vkGetPhysicalDeviceFormatProperties2"/>
+            <command name="vkGetPhysicalDeviceImageFormatProperties2"/>
+            <command name="vkGetPhysicalDeviceQueueFamilyProperties2"/>
+            <command name="vkGetPhysicalDeviceMemoryProperties2"/>
+            <command name="vkGetPhysicalDeviceSparseImageFormatProperties2"/>
+        </require>
+        <require comment="Promoted from VK_KHR_maintenance1">
+            <enum extends="VkResult"        extnumber="70"  offset="0"  dir="-" name="VK_ERROR_OUT_OF_POOL_MEMORY"/>
+            <enum bitpos="14" extends="VkFormatFeatureFlagBits"                 name="VK_FORMAT_FEATURE_TRANSFER_SRC_BIT" comment="Format can be used as the source image of image transfer commands"/>
+            <enum bitpos="15" extends="VkFormatFeatureFlagBits"                 name="VK_FORMAT_FEATURE_TRANSFER_DST_BIT" comment="Format can be used as the destination image of image transfer commands"/>
+            <enum bitpos="5"  extends="VkImageCreateFlagBits"                   name="VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT" comment="The 3D image can be viewed as a 2D or 2D array image"/>
+            <command name="vkTrimCommandPool"/>
+            <comment>Additional dependent types / tokens extending enumerants, not explicitly mentioned</comment>
+            <type name="VkCommandPoolTrimFlags"/>
+        </require>
+        <require comment="Promoted from VK_KHR_maintenance2">
+            <enum bitpos="7"  extends="VkImageCreateFlagBits"                   name="VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT"/>
+            <enum bitpos="8"  extends="VkImageCreateFlagBits"                   name="VK_IMAGE_CREATE_EXTENDED_USAGE_BIT"/>
+            <enum extends="VkStructureType" extnumber="118" offset="0"          name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES"/>
+            <enum extends="VkStructureType" extnumber="118" offset="1"          name="VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO"/>
+            <enum extends="VkStructureType" extnumber="118" offset="2"          name="VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO"/>
+            <enum extends="VkStructureType" extnumber="118" offset="3"          name="VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO"/>
+            <enum extends="VkImageLayout"   extnumber="118" offset="0"          name="VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL"/>
+            <enum extends="VkImageLayout"   extnumber="118" offset="1"          name="VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL"/>
+            <type name="VkPhysicalDevicePointClippingProperties"/>
+            <type name="VkPointClippingBehavior"/>
+            <type name="VkRenderPassInputAttachmentAspectCreateInfo"/>
+            <type name="VkInputAttachmentAspectReference"/>
+            <type name="VkImageViewUsageCreateInfo"/>
+            <type name="VkTessellationDomainOrigin"/>
+            <type name="VkPipelineTessellationDomainOriginStateCreateInfo"/>
+        </require>
+        <require comment="Promoted from VK_KHR_multiview">
+            <enum extends="VkStructureType" extnumber="54"  offset="0"          name="VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO"/>
+            <enum extends="VkStructureType" extnumber="54"  offset="1"          name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES"/>
+            <enum extends="VkStructureType" extnumber="54"  offset="2"          name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES"/>
+            <enum bitpos="1"  extends="VkDependencyFlagBits"                    name="VK_DEPENDENCY_VIEW_LOCAL_BIT"/>
+            <type name="VkRenderPassMultiviewCreateInfo"/>
+            <type name="VkPhysicalDeviceMultiviewFeatures"/>
+            <type name="VkPhysicalDeviceMultiviewProperties"/>
+        </require>
+        <require comment="Promoted from VK_KHR_variable_pointers">
+            <enum extends="VkStructureType" extnumber="121" offset="0"          name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES"/>
+            <enum extends="VkStructureType"                                     name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES" alias="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES"/>
+            <type name="VkPhysicalDeviceVariablePointerFeatures"/>
+            <type name="VkPhysicalDeviceVariablePointersFeatures"/>
+        </require>
+        <require comment="Originally based on VK_KHR_protected_memory (extension 146), which was never published; thus the mystifying large value= numbers below. These are not aliased since they weren't actually promoted from an extension.">
+            <enum extends="VkStructureType" extnumber="146" offset="0"          name="VK_STRUCTURE_TYPE_PROTECTED_SUBMIT_INFO"/>
+            <enum extends="VkStructureType" extnumber="146" offset="1"          name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_FEATURES"/>
+            <enum extends="VkStructureType" extnumber="146" offset="2"          name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_PROPERTIES"/>
+            <enum extends="VkStructureType" extnumber="146" offset="3"          name="VK_STRUCTURE_TYPE_DEVICE_QUEUE_INFO_2"/>
+            <enum bitpos="4"  extends="VkQueueFlagBits"                         name="VK_QUEUE_PROTECTED_BIT" comment="Queues may support protected operations"/>
+            <enum bitpos="0"  extends="VkDeviceQueueCreateFlagBits"             name="VK_DEVICE_QUEUE_CREATE_PROTECTED_BIT" comment="Queue is a protected-capable device queue"/>
+            <enum bitpos="5"  extends="VkMemoryPropertyFlagBits"                name="VK_MEMORY_PROPERTY_PROTECTED_BIT" comment="Memory is protected"/>
+            <enum bitpos="3"  extends="VkBufferCreateFlagBits"                  name="VK_BUFFER_CREATE_PROTECTED_BIT" comment="Buffer requires protected memory"/>
+            <enum bitpos="11" extends="VkImageCreateFlagBits"                   name="VK_IMAGE_CREATE_PROTECTED_BIT" comment="Image requires protected memory"/>
+            <enum bitpos="2"  extends="VkCommandPoolCreateFlagBits"             name="VK_COMMAND_POOL_CREATE_PROTECTED_BIT" comment="Command buffers allocated from pool are protected command buffers"/>
+            <type name="VkPhysicalDeviceProtectedMemoryFeatures"/>
+            <type name="VkPhysicalDeviceProtectedMemoryProperties"/>
+            <type name="VkDeviceQueueInfo2"/>
+            <type name="VkProtectedSubmitInfo"/>
+            <command name="vkGetDeviceQueue2"/>
+        </require>
+        <require comment="Promoted from VK_KHR_sampler_ycbcr_conversion">
+            <enum extends="VkStructureType" extnumber="157" offset="0"          name="VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO"/>
+            <enum extends="VkStructureType" extnumber="157" offset="1"          name="VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO"/>
+            <enum extends="VkStructureType" extnumber="157" offset="2"          name="VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO"/>
+            <enum extends="VkStructureType" extnumber="157" offset="3"          name="VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO"/>
+            <enum extends="VkStructureType" extnumber="157" offset="4"          name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES"/>
+            <enum extends="VkStructureType" extnumber="157" offset="5"          name="VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES"/>
+            <enum extends="VkObjectType"    extnumber="157" offset="0"          name="VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION"/>
+            <enum extends="VkFormat"        extnumber="157" offset="0"          name="VK_FORMAT_G8B8G8R8_422_UNORM"/>
+            <enum extends="VkFormat"        extnumber="157" offset="1"          name="VK_FORMAT_B8G8R8G8_422_UNORM"/>
+            <enum extends="VkFormat"        extnumber="157" offset="2"          name="VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM"/>
+            <enum extends="VkFormat"        extnumber="157" offset="3"          name="VK_FORMAT_G8_B8R8_2PLANE_420_UNORM"/>
+            <enum extends="VkFormat"        extnumber="157" offset="4"          name="VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM"/>
+            <enum extends="VkFormat"        extnumber="157" offset="5"          name="VK_FORMAT_G8_B8R8_2PLANE_422_UNORM"/>
+            <enum extends="VkFormat"        extnumber="157" offset="6"          name="VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM"/>
+            <enum extends="VkFormat"        extnumber="157" offset="7"          name="VK_FORMAT_R10X6_UNORM_PACK16"/>
+            <enum extends="VkFormat"        extnumber="157" offset="8"          name="VK_FORMAT_R10X6G10X6_UNORM_2PACK16"/>
+            <enum extends="VkFormat"        extnumber="157" offset="9"          name="VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16"/>
+            <enum extends="VkFormat"        extnumber="157" offset="10"         name="VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16"/>
+            <enum extends="VkFormat"        extnumber="157" offset="11"         name="VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16"/>
+            <enum extends="VkFormat"        extnumber="157" offset="12"         name="VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16"/>
+            <enum extends="VkFormat"        extnumber="157" offset="13"         name="VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16"/>
+            <enum extends="VkFormat"        extnumber="157" offset="14"         name="VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16"/>
+            <enum extends="VkFormat"        extnumber="157" offset="15"         name="VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16"/>
+            <enum extends="VkFormat"        extnumber="157" offset="16"         name="VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16"/>
+            <enum extends="VkFormat"        extnumber="157" offset="17"         name="VK_FORMAT_R12X4_UNORM_PACK16"/>
+            <enum extends="VkFormat"        extnumber="157" offset="18"         name="VK_FORMAT_R12X4G12X4_UNORM_2PACK16"/>
+            <enum extends="VkFormat"        extnumber="157" offset="19"         name="VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16"/>
+            <enum extends="VkFormat"        extnumber="157" offset="20"         name="VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16"/>
+            <enum extends="VkFormat"        extnumber="157" offset="21"         name="VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16"/>
+            <enum extends="VkFormat"        extnumber="157" offset="22"         name="VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16"/>
+            <enum extends="VkFormat"        extnumber="157" offset="23"         name="VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16"/>
+            <enum extends="VkFormat"        extnumber="157" offset="24"         name="VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16"/>
+            <enum extends="VkFormat"        extnumber="157" offset="25"         name="VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16"/>
+            <enum extends="VkFormat"        extnumber="157" offset="26"         name="VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16"/>
+            <enum extends="VkFormat"        extnumber="157" offset="27"         name="VK_FORMAT_G16B16G16R16_422_UNORM"/>
+            <enum extends="VkFormat"        extnumber="157" offset="28"         name="VK_FORMAT_B16G16R16G16_422_UNORM"/>
+            <enum extends="VkFormat"        extnumber="157" offset="29"         name="VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM"/>
+            <enum extends="VkFormat"        extnumber="157" offset="30"         name="VK_FORMAT_G16_B16R16_2PLANE_420_UNORM"/>
+            <enum extends="VkFormat"        extnumber="157" offset="31"         name="VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM"/>
+            <enum extends="VkFormat"        extnumber="157" offset="32"         name="VK_FORMAT_G16_B16R16_2PLANE_422_UNORM"/>
+            <enum extends="VkFormat"        extnumber="157" offset="33"         name="VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM"/>
+            <enum bitpos="4"  extends="VkImageAspectFlagBits"                   name="VK_IMAGE_ASPECT_PLANE_0_BIT"/>
+            <enum bitpos="5"  extends="VkImageAspectFlagBits"                   name="VK_IMAGE_ASPECT_PLANE_1_BIT"/>
+            <enum bitpos="6"  extends="VkImageAspectFlagBits"                   name="VK_IMAGE_ASPECT_PLANE_2_BIT"/>
+            <enum bitpos="9"  extends="VkImageCreateFlagBits"                   name="VK_IMAGE_CREATE_DISJOINT_BIT"/>
+            <enum bitpos="17" extends="VkFormatFeatureFlagBits"                 name="VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT" comment="Format can have midpoint rather than cosited chroma samples"/>
+            <enum bitpos="18" extends="VkFormatFeatureFlagBits"                 name="VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT" comment="Format can be used with linear filtering whilst color conversion is enabled"/>
+            <enum bitpos="19" extends="VkFormatFeatureFlagBits"                 name="VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT" comment="Format can have different chroma, min and mag filters"/>
+            <enum bitpos="20" extends="VkFormatFeatureFlagBits"                 name="VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT"/>
+            <enum bitpos="21" extends="VkFormatFeatureFlagBits"                 name="VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT"/>
+            <enum bitpos="22" extends="VkFormatFeatureFlagBits"                 name="VK_FORMAT_FEATURE_DISJOINT_BIT" comment="Format supports disjoint planes"/>
+            <enum bitpos="23" extends="VkFormatFeatureFlagBits"                 name="VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT" comment="Format can have cosited rather than midpoint chroma samples"/>
+            <type name="VkSamplerYcbcrConversionCreateInfo"/>
+            <type name="VkSamplerYcbcrConversionInfo"/>
+            <type name="VkBindImagePlaneMemoryInfo"/>
+            <type name="VkImagePlaneMemoryRequirementsInfo"/>
+            <type name="VkPhysicalDeviceSamplerYcbcrConversionFeatures"/>
+            <type name="VkSamplerYcbcrConversionImageFormatProperties"/>
+            <command name="vkCreateSamplerYcbcrConversion"/>
+            <command name="vkDestroySamplerYcbcrConversion"/>
+            <comment>Additional dependent types / tokens extending enumerants, not explicitly mentioned</comment>
+            <type name="VkSamplerYcbcrConversion"/>
+            <type name="VkSamplerYcbcrModelConversion"/>
+            <type name="VkSamplerYcbcrRange"/>
+            <type name="VkChromaLocation"/>
+        </require>
+        <require comment="Promoted from VK_KHR_descriptor_update_template">
+            <enum extends="VkStructureType" extnumber="86"  offset="0"          name="VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO"/>
+            <enum extends="VkObjectType"    extnumber="86"  offset="0"          name="VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE"/>
+            <command name="vkCreateDescriptorUpdateTemplate"/>
+            <command name="vkDestroyDescriptorUpdateTemplate"/>
+            <command name="vkUpdateDescriptorSetWithTemplate"/>
+            <type name="VkDescriptorUpdateTemplate"/>
+            <type name="VkDescriptorUpdateTemplateCreateFlags"/>
+            <type name="VkDescriptorUpdateTemplateType"/>
+            <type name="VkDescriptorUpdateTemplateEntry"/>
+            <type name="VkDescriptorUpdateTemplateCreateInfo"/>
+        </require>
+        <require comment="Promoted from VK_KHR_external_memory_capabilities">
+            <enum extends="VkStructureType" extnumber="72"  offset="0"          name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO"/>
+            <enum extends="VkStructureType" extnumber="72"  offset="1"          name="VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES"/>
+            <enum extends="VkStructureType" extnumber="72"  offset="2"          name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO"/>
+            <enum extends="VkStructureType" extnumber="72"  offset="3"          name="VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES"/>
+            <enum extends="VkStructureType" extnumber="72"  offset="4"          name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES"/>
+            <enum name="VK_LUID_SIZE"/>
+            <type name="VkExternalMemoryHandleTypeFlags"/>
+            <type name="VkExternalMemoryHandleTypeFlagBits"/>
+            <type name="VkExternalMemoryFeatureFlags"/>
+            <type name="VkExternalMemoryFeatureFlagBits"/>
+            <type name="VkExternalMemoryProperties"/>
+            <type name="VkPhysicalDeviceExternalImageFormatInfo"/>
+            <type name="VkExternalImageFormatProperties"/>
+            <type name="VkPhysicalDeviceExternalBufferInfo"/>
+            <type name="VkExternalBufferProperties"/>
+            <type name="VkPhysicalDeviceIDProperties"/>
+            <command name="vkGetPhysicalDeviceExternalBufferProperties"/>
+        </require>
+        <require comment="Promoted from VK_KHR_external_memory">
+            <enum extends="VkStructureType" extnumber="73"  offset="0"          name="VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO"/>
+            <enum extends="VkStructureType" extnumber="73"  offset="1"          name="VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO"/>
+            <enum extends="VkStructureType" extnumber="73"  offset="2"          name="VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO"/>
+            <enum extends="VkResult"        extnumber="73"  offset="3"  dir="-" name="VK_ERROR_INVALID_EXTERNAL_HANDLE"/>
+            <enum name="VK_QUEUE_FAMILY_EXTERNAL"/>
+            <type name="VkExternalMemoryImageCreateInfo"/>
+            <type name="VkExternalMemoryBufferCreateInfo"/>
+            <type name="VkExportMemoryAllocateInfo"/>
+        </require>
+        <require comment="Promoted from VK_KHR_external_fence_capabilities">
+            <enum extends="VkStructureType" extnumber="113" offset="0"          name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO"/>
+            <enum extends="VkStructureType" extnumber="113" offset="1"          name="VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES"/>
+            <type name="VkExternalFenceHandleTypeFlags"/>
+            <type name="VkExternalFenceHandleTypeFlagBits"/>
+            <type name="VkExternalFenceFeatureFlags"/>
+            <type name="VkExternalFenceFeatureFlagBits"/>
+            <type name="VkPhysicalDeviceExternalFenceInfo"/>
+            <type name="VkExternalFenceProperties"/>
+            <command name="vkGetPhysicalDeviceExternalFenceProperties"/>
+        </require>
+        <require comment="Promoted from VK_KHR_external_fence">
+            <enum extends="VkStructureType" extnumber="114" offset="0"          name="VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO"/>
+            <type name="VkFenceImportFlags"/>
+            <type name="VkFenceImportFlagBits"/>
+            <type name="VkExportFenceCreateInfo"/>
+        </require>
+        <require comment="Promoted from VK_KHR_external_semaphore">
+            <enum extends="VkStructureType" extnumber="78"  offset="0"          name="VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO"/>
+            <type name="VkSemaphoreImportFlags"/>
+            <type name="VkSemaphoreImportFlagBits"/>
+            <type name="VkExportSemaphoreCreateInfo"/>
+        </require>
+        <require comment="Promoted from VK_KHR_external_semaphore_capabilities">
+            <enum extends="VkStructureType" extnumber="77"  offset="0"          name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO"/>
+            <enum extends="VkStructureType" extnumber="77"  offset="1"          name="VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES"/>
+            <type name="VkExternalSemaphoreHandleTypeFlags"/>
+            <type name="VkExternalSemaphoreHandleTypeFlagBits"/>
+            <type name="VkExternalSemaphoreFeatureFlags"/>
+            <type name="VkExternalSemaphoreFeatureFlagBits"/>
+            <type name="VkPhysicalDeviceExternalSemaphoreInfo"/>
+            <type name="VkExternalSemaphoreProperties"/>
+            <command name="vkGetPhysicalDeviceExternalSemaphoreProperties"/>
+        </require>
+        <require comment="Promoted from VK_KHR_maintenance3">
+            <enum extends="VkStructureType" extnumber="169" offset="0"          name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES"/>
+            <enum extends="VkStructureType" extnumber="169" offset="1"          name="VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT"/>
+            <type name="VkPhysicalDeviceMaintenance3Properties"/>
+            <type name="VkDescriptorSetLayoutSupport"/>
+            <command name="vkGetDescriptorSetLayoutSupport"/>
+        </require>
+        <require comment="Promoted from VK_KHR_shader_draw_parameters, with a feature support query added">
+            <enum extends="VkStructureType" extnumber="64"  offset="0"          name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES"/>
+            <enum extends="VkStructureType"                                     name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETER_FEATURES" alias="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES"/>
+            <type name="VkPhysicalDeviceShaderDrawParameterFeatures"/>
+            <type name="VkPhysicalDeviceShaderDrawParametersFeatures"/>
+        </require>
+    </feature>
+
+
+    <extensions comment="Vulkan extension interface definitions">
+        <extension name="VK_KHR_surface" number="1" type="instance" author="KHR" contact="James Jones @cubanismo,Ian Elliott @ianelliottus" supported="vulkan">
+            <require>
+                <enum value="25"                                                name="VK_KHR_SURFACE_SPEC_VERSION"/>
+                <enum value="&quot;VK_KHR_surface&quot;"                        name="VK_KHR_SURFACE_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkResult" dir="-"                     name="VK_ERROR_SURFACE_LOST_KHR"/>
+                <enum offset="1" extends="VkResult" dir="-"                     name="VK_ERROR_NATIVE_WINDOW_IN_USE_KHR"/>
+                <enum offset="0" extends="VkObjectType"                         name="VK_OBJECT_TYPE_SURFACE_KHR"                  comment="VkSurfaceKHR"/>
+                <command name="vkDestroySurfaceKHR"/>
+                <command name="vkGetPhysicalDeviceSurfaceSupportKHR"/>
+                <command name="vkGetPhysicalDeviceSurfaceCapabilitiesKHR"/>
+                <command name="vkGetPhysicalDeviceSurfaceFormatsKHR"/>
+                <command name="vkGetPhysicalDeviceSurfacePresentModesKHR"/>
+            </require>
+        </extension>
+        <extension name="VK_KHR_swapchain" number="2" type="device" requires="VK_KHR_surface" author="KHR" contact="James Jones @cubanismo,Ian Elliott @ianelliottus" supported="vulkan">
+            <require>
+                <enum value="70"                                                name="VK_KHR_SWAPCHAIN_SPEC_VERSION"/>
+                <enum value="&quot;VK_KHR_swapchain&quot;"                      name="VK_KHR_SWAPCHAIN_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkStructureType"                      name="VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR"/>
+                <enum offset="1" extends="VkStructureType"                      name="VK_STRUCTURE_TYPE_PRESENT_INFO_KHR"/>
+                <enum offset="2" extends="VkImageLayout"                        name="VK_IMAGE_LAYOUT_PRESENT_SRC_KHR"/>
+                <enum offset="3" extends="VkResult"                             name="VK_SUBOPTIMAL_KHR"/>
+                <enum offset="4" extends="VkResult" dir="-"                     name="VK_ERROR_OUT_OF_DATE_KHR"/>
+                <enum offset="0" extends="VkObjectType"                         name="VK_OBJECT_TYPE_SWAPCHAIN_KHR"              comment="VkSwapchainKHR"/>
+                <command name="vkCreateSwapchainKHR"/>
+                <command name="vkDestroySwapchainKHR"/>
+                <command name="vkGetSwapchainImagesKHR"/>
+                <command name="vkAcquireNextImageKHR"/>
+                <command name="vkQueuePresentKHR"/>
+            </require>
+            <require feature="VK_VERSION_1_1">
+                <comment>This duplicates definitions in VK_KHR_device_group below</comment>
+                <enum extends="VkStructureType" extnumber="61"  offset="7"      name="VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_CAPABILITIES_KHR"/>
+                <enum extends="VkStructureType" extnumber="61"  offset="8"      name="VK_STRUCTURE_TYPE_IMAGE_SWAPCHAIN_CREATE_INFO_KHR"/>
+                <enum extends="VkStructureType" extnumber="61"  offset="9"      name="VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_SWAPCHAIN_INFO_KHR"/>
+                <enum extends="VkStructureType" extnumber="61"  offset="10"     name="VK_STRUCTURE_TYPE_ACQUIRE_NEXT_IMAGE_INFO_KHR"/>
+                <enum extends="VkStructureType" extnumber="61"  offset="11"     name="VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_INFO_KHR"/>
+                <enum extends="VkStructureType" extnumber="61"  offset="12"     name="VK_STRUCTURE_TYPE_DEVICE_GROUP_SWAPCHAIN_CREATE_INFO_KHR"/>
+                <enum bitpos="0" extends="VkSwapchainCreateFlagBitsKHR"         name="VK_SWAPCHAIN_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR" comment="Allow images with VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT"/>
+                <type name="VkImageSwapchainCreateInfoKHR"/>
+                <type name="VkBindImageMemorySwapchainInfoKHR"/>
+                <type name="VkAcquireNextImageInfoKHR"/>
+                <type name="VkDeviceGroupPresentModeFlagBitsKHR"/>
+                <type name="VkDeviceGroupPresentModeFlagsKHR"/>
+                <type name="VkDeviceGroupPresentCapabilitiesKHR"/>
+                <type name="VkDeviceGroupPresentInfoKHR"/>
+                <type name="VkDeviceGroupSwapchainCreateInfoKHR"/>
+                <command name="vkGetDeviceGroupPresentCapabilitiesKHR"/>
+                <command name="vkGetDeviceGroupSurfacePresentModesKHR"/>
+                <command name="vkGetPhysicalDevicePresentRectanglesKHR"/>
+                <command name="vkAcquireNextImage2KHR"/>
+                <enum bitpos="1" extends="VkSwapchainCreateFlagBitsKHR"         name="VK_SWAPCHAIN_CREATE_PROTECTED_BIT_KHR"     comment="Swapchain is protected"/>
+            </require>
+        </extension>
+        <extension name="VK_KHR_display" number="3" type="instance" requires="VK_KHR_surface" author="KHR" contact="James Jones @cubanismo,Norbert Nopper @FslNopper" supported="vulkan">
+            <require>
+                <enum value="23"                                                name="VK_KHR_DISPLAY_SPEC_VERSION"/>
+                <enum value="&quot;VK_KHR_display&quot;"                        name="VK_KHR_DISPLAY_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkStructureType"                      name="VK_STRUCTURE_TYPE_DISPLAY_MODE_CREATE_INFO_KHR"/>
+                <enum offset="1" extends="VkStructureType"                      name="VK_STRUCTURE_TYPE_DISPLAY_SURFACE_CREATE_INFO_KHR"/>
+                <enum offset="0" extends="VkObjectType"                         name="VK_OBJECT_TYPE_DISPLAY_KHR"               comment="VkDisplayKHR"/>
+                <enum offset="1" extends="VkObjectType"                         name="VK_OBJECT_TYPE_DISPLAY_MODE_KHR"          comment="VkDisplayModeKHR"/>
+                <type name="VkDisplayPlaneAlphaFlagsKHR"/>
+                <type name="VkDisplayPlaneAlphaFlagBitsKHR"/>
+                <type name="VkDisplayPropertiesKHR"/>
+                <type name="VkDisplayModeParametersKHR"/>
+                <type name="VkDisplayModePropertiesKHR"/>
+                <type name="VkDisplayModeCreateInfoKHR"/>
+                <type name="VkDisplayPlaneCapabilitiesKHR"/>
+                <type name="VkDisplayPlanePropertiesKHR"/>
+                <type name="VkDisplaySurfaceCreateInfoKHR"/>
+                <command name="vkGetPhysicalDeviceDisplayPropertiesKHR"/>
+                <command name="vkGetPhysicalDeviceDisplayPlanePropertiesKHR"/>
+                <command name="vkGetDisplayPlaneSupportedDisplaysKHR"/>
+                <command name="vkGetDisplayModePropertiesKHR"/>
+                <command name="vkCreateDisplayModeKHR"/>
+                <command name="vkGetDisplayPlaneCapabilitiesKHR"/>
+                <command name="vkCreateDisplayPlaneSurfaceKHR"/>
+            </require>
+        </extension>
+        <extension name="VK_KHR_display_swapchain" number="4" type="device" requires="VK_KHR_swapchain,VK_KHR_display" author="KHR" contact="James Jones @cubanismo" supported="vulkan">
+            <require>
+                <enum value="10"                                                name="VK_KHR_DISPLAY_SWAPCHAIN_SPEC_VERSION"/>
+                <enum value="&quot;VK_KHR_display_swapchain&quot;"              name="VK_KHR_DISPLAY_SWAPCHAIN_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkStructureType"                      name="VK_STRUCTURE_TYPE_DISPLAY_PRESENT_INFO_KHR"/>
+                <enum offset="1" extends="VkResult" dir="-"                     name="VK_ERROR_INCOMPATIBLE_DISPLAY_KHR"/>
+                <type name="VkDisplayPresentInfoKHR"/>
+                <command name="vkCreateSharedSwapchainsKHR"/>
+            </require>
+        </extension>
+        <extension name="VK_KHR_xlib_surface" number="5" type="instance" requires="VK_KHR_surface" platform="xlib" author="KHR" contact="Jesse Hall @critsec,Ian Elliott @ianelliottus" supported="vulkan">
+            <require>
+                <enum value="6"                                                 name="VK_KHR_XLIB_SURFACE_SPEC_VERSION"/>
+                <enum value="&quot;VK_KHR_xlib_surface&quot;"                   name="VK_KHR_XLIB_SURFACE_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkStructureType"                      name="VK_STRUCTURE_TYPE_XLIB_SURFACE_CREATE_INFO_KHR"/>
+                <type name="VkXlibSurfaceCreateFlagsKHR"/>
+                <type name="VkXlibSurfaceCreateInfoKHR"/>
+                <command name="vkCreateXlibSurfaceKHR"/>
+                <command name="vkGetPhysicalDeviceXlibPresentationSupportKHR"/>
+            </require>
+        </extension>
+        <extension name="VK_KHR_xcb_surface" number="6" type="instance" requires="VK_KHR_surface" platform="xcb" author="KHR" contact="Jesse Hall @critsec,Ian Elliott @ianelliottus" supported="vulkan">
+            <require>
+                <enum value="6"                                                 name="VK_KHR_XCB_SURFACE_SPEC_VERSION"/>
+                <enum value="&quot;VK_KHR_xcb_surface&quot;"                    name="VK_KHR_XCB_SURFACE_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkStructureType"                      name="VK_STRUCTURE_TYPE_XCB_SURFACE_CREATE_INFO_KHR"/>
+                <type name="VkXcbSurfaceCreateFlagsKHR"/>
+                <type name="VkXcbSurfaceCreateInfoKHR"/>
+                <command name="vkCreateXcbSurfaceKHR"/>
+                <command name="vkGetPhysicalDeviceXcbPresentationSupportKHR"/>
+            </require>
+        </extension>
+        <extension name="VK_KHR_wayland_surface" number="7" type="instance" requires="VK_KHR_surface" platform="wayland" author="KHR" contact="Jesse Hall @critsec,Ian Elliott @ianelliottus" supported="vulkan">
+            <require>
+                <enum value="6"                                                 name="VK_KHR_WAYLAND_SURFACE_SPEC_VERSION"/>
+                <enum value="&quot;VK_KHR_wayland_surface&quot;"                name="VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkStructureType"                      name="VK_STRUCTURE_TYPE_WAYLAND_SURFACE_CREATE_INFO_KHR"/>
+                <type name="VkWaylandSurfaceCreateFlagsKHR"/>
+                <type name="VkWaylandSurfaceCreateInfoKHR"/>
+                <command name="vkCreateWaylandSurfaceKHR"/>
+                <command name="vkGetPhysicalDeviceWaylandPresentationSupportKHR"/>
+            </require>
+        </extension>
+        <!-- Extension permanently disabled.  Extension number should not be re-used -->
+        <extension name="VK_KHR_mir_surface" number="8" type="instance" requires="VK_KHR_surface" author="KHR" supported="disabled">
+            <require>
+                <enum value="4"                                                 name="VK_KHR_MIR_SURFACE_SPEC_VERSION"/>
+                <enum value="&quot;VK_KHR_mir_surface&quot;"                    name="VK_KHR_MIR_SURFACE_EXTENSION_NAME"/>
+            </require>
+        </extension>
+        <extension name="VK_KHR_android_surface" number="9" type="instance" requires="VK_KHR_surface" platform="android" author="KHR" contact="Jesse Hall @critsec" supported="vulkan">
+            <require>
+                <enum value="6"                                                 name="VK_KHR_ANDROID_SURFACE_SPEC_VERSION"/>
+                <enum value="&quot;VK_KHR_android_surface&quot;"                name="VK_KHR_ANDROID_SURFACE_EXTENSION_NAME"/>
+                <type name="ANativeWindow"/>
+                <enum offset="0" extends="VkStructureType"                      name="VK_STRUCTURE_TYPE_ANDROID_SURFACE_CREATE_INFO_KHR"/>
+                <type name="VkAndroidSurfaceCreateFlagsKHR"/>
+                <type name="VkAndroidSurfaceCreateInfoKHR"/>
+                <command name="vkCreateAndroidSurfaceKHR"/>
+            </require>
+        </extension>
+        <extension name="VK_KHR_win32_surface" number="10" type="instance" requires="VK_KHR_surface" platform="win32" author="KHR" contact="Jesse Hall @critsec,Ian Elliott @ianelliottus" supported="vulkan">
+            <require>
+                <enum value="6"                                                 name="VK_KHR_WIN32_SURFACE_SPEC_VERSION"/>
+                <enum value="&quot;VK_KHR_win32_surface&quot;"                  name="VK_KHR_WIN32_SURFACE_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkStructureType"                      name="VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR"/>
+                <type name="VkWin32SurfaceCreateFlagsKHR"/>
+                <type name="VkWin32SurfaceCreateInfoKHR"/>
+                <command name="vkCreateWin32SurfaceKHR"/>
+                <command name="vkGetPhysicalDeviceWin32PresentationSupportKHR"/>
+            </require>
+        </extension>
+        <extension name="VK_ANDROID_native_buffer" number="11" type="device" author="ANDROID" platform="android" contact="Jesse Hall @critsec" supported="disabled">
+            <require>
+                <comment>VK_ANDROID_native_buffer is used between the Android Vulkan loader and drivers to implement the WSI extensions. It isn't exposed to applications and uses types that aren't part of Android's stable public API, so it is left disabled to keep it out of the standard Vulkan headers.</comment>
+                <enum value="8"                                                 name="VK_ANDROID_NATIVE_BUFFER_SPEC_VERSION"/>
+                <enum value="11"                                                name="VK_ANDROID_NATIVE_BUFFER_NUMBER"/>
+                <enum value="&quot;VK_ANDROID_native_buffer&quot;"              name="VK_ANDROID_NATIVE_BUFFER_NAME"/>
+                <enum offset="0" extends="VkStructureType"                      name="VK_STRUCTURE_TYPE_NATIVE_BUFFER_ANDROID"/>
+                <enum offset="1" extends="VkStructureType"                      name="VK_STRUCTURE_TYPE_SWAPCHAIN_IMAGE_CREATE_INFO_ANDROID"/>
+                <enum offset="2" extends="VkStructureType"                      name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENTATION_PROPERTIES_ANDROID"/>
+                <type name="VkNativeBufferANDROID"/>
+                <type name="VkSwapchainImageCreateInfoANDROID"/>
+                <type name="VkPhysicalDevicePresentationPropertiesANDROID"/>
+                <type name="VkNativeBufferUsage2ANDROID"/>
+                <type name="VkSwapchainImageUsageFlagBitsANDROID"/>
+                <type name="VkSwapchainImageUsageFlagsANDROID"/>
+                <command name="vkGetSwapchainGrallocUsageANDROID"/>
+                <command name="vkAcquireImageANDROID"/>
+                <command name="vkQueueSignalReleaseImageANDROID"/>
+                <command name="vkGetSwapchainGrallocUsage2ANDROID"/>
+            </require>
+        </extension>
+        <extension name="VK_EXT_debug_report" number="12" type="instance" author="GOOGLE" contact="Courtney Goeltzenleuchter @courtney-g" supported="vulkan" deprecatedby="VK_EXT_debug_utils">
+            <require>
+                <enum value="9"                                                 name="VK_EXT_DEBUG_REPORT_SPEC_VERSION"/>
+                <enum value="&quot;VK_EXT_debug_report&quot;"                   name="VK_EXT_DEBUG_REPORT_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkStructureType"                      name="VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT"/>
+                <enum alias="VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT" extends="VkStructureType" name="VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT" comment="Backwards-compatible alias containing a typo"/>
+                <enum offset="1" extends="VkResult" dir="-"                     name="VK_ERROR_VALIDATION_FAILED_EXT"/>
+                <enum offset="0" extends="VkObjectType"                         name="VK_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT"          comment="VkDebugReportCallbackEXT"/>
+                <type name="VkDebugReportObjectTypeEXT"/>
+                <type name="VkDebugReportCallbackCreateInfoEXT"/>
+                <command name="vkCreateDebugReportCallbackEXT"/>
+                <command name="vkDestroyDebugReportCallbackEXT"/>
+                <command name="vkDebugReportMessageEXT"/>
+            </require>
+            <require feature="VK_VERSION_1_1">
+                <comment>This duplicates definitions in other extensions, below</comment>
+                <enum extends="VkDebugReportObjectTypeEXT" extnumber="157" offset="0"  name="VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT"/>
+                <enum extends="VkDebugReportObjectTypeEXT" extnumber="86"  offset="0"  name="VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_EXT"/>
+            </require>
+        </extension>
+        <extension name="VK_NV_glsl_shader" number="13" type="device" author="NV" contact="Piers Daniell @pdaniell-nv" supported="vulkan" deprecatedby="">
+            <require>
+                <enum value="1"                                                 name="VK_NV_GLSL_SHADER_SPEC_VERSION"/>
+                <enum value="&quot;VK_NV_glsl_shader&quot;"                     name="VK_NV_GLSL_SHADER_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkResult" dir="-"                     name="VK_ERROR_INVALID_SHADER_NV"/>
+            </require>
+        </extension>
+        <extension name="VK_EXT_depth_range_unrestricted" type="device" number="14" author="NV" contact="Piers Daniell @pdaniell-nv" supported="vulkan">
+            <require>
+                <enum value="1"                                                 name="VK_EXT_DEPTH_RANGE_UNRESTRICTED_SPEC_VERSION"/>
+                <enum value="&quot;VK_EXT_depth_range_unrestricted&quot;"       name="VK_EXT_DEPTH_RANGE_UNRESTRICTED_EXTENSION_NAME"/>
+            </require>
+        </extension>
+        <extension name="VK_KHR_sampler_mirror_clamp_to_edge" type="device" number="15" author="KHR" contact="Tobias Hector @tobski" supported="vulkan">
+            <require>
+                <enum value="3"                                                 name="VK_KHR_SAMPLER_MIRROR_CLAMP_TO_EDGE_SPEC_VERSION"/>
+                <enum value="&quot;VK_KHR_sampler_mirror_clamp_to_edge&quot;"   name="VK_KHR_SAMPLER_MIRROR_CLAMP_TO_EDGE_EXTENSION_NAME"/>
+                <enum value="4" extends="VkSamplerAddressMode"                  name="VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE" comment="Note that this defines what was previously a core enum, and so uses the 'value' attribute rather than 'offset', and does not have a suffix. This is a special case, and should not be repeated"/>
+                <enum           extends="VkSamplerAddressMode"                  name="VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE_KHR" alias="VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE" comment="Alias introduced for consistency with extension suffixing rules"/>
+            </require>
+        </extension>
+        <extension name="VK_IMG_filter_cubic" number="16" type="device" author="IMG" contact="Tobias Hector @tobski" supported="vulkan">
+            <require>
+                <enum value="1"                                                 name="VK_IMG_FILTER_CUBIC_SPEC_VERSION"/>
+                <enum value="&quot;VK_IMG_filter_cubic&quot;"                   name="VK_IMG_FILTER_CUBIC_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkFilter"                             name="VK_FILTER_CUBIC_IMG"/>
+                <enum bitpos="13" extends="VkFormatFeatureFlagBits"             name="VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_IMG" comment="Format can be filtered with VK_FILTER_CUBIC_IMG when being sampled"/>
+            </require>
+        </extension>
+        <extension name="VK_AMD_extension_17" number="17" author="AMD" contact="Daniel Rakos @drakos-amd" supported="disabled">
+            <require>
+                <enum value="0"                                                 name="VK_AMD_EXTENSION_17_SPEC_VERSION"/>
+                <enum value="&quot;VK_AMD_extension_17&quot;"                   name="VK_AMD_EXTENSION_17_EXTENSION_NAME"/>
+            </require>
+        </extension>
+        <extension name="VK_AMD_extension_18" number="18" author="AMD" contact="Daniel Rakos @drakos-amd" supported="disabled">
+            <require>
+                <enum value="0"                                                 name="VK_AMD_EXTENSION_18_SPEC_VERSION"/>
+                <enum value="&quot;VK_AMD_extension_18&quot;"                   name="VK_AMD_EXTENSION_18_EXTENSION_NAME"/>
+            </require>
+        </extension>
+        <extension name="VK_AMD_rasterization_order" number="19" type="device" author="AMD" contact="Daniel Rakos @drakos-amd" supported="vulkan">
+            <require>
+                <enum value="1"                                                 name="VK_AMD_RASTERIZATION_ORDER_SPEC_VERSION"/>
+                <enum value="&quot;VK_AMD_rasterization_order&quot;"            name="VK_AMD_RASTERIZATION_ORDER_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkStructureType"                      name="VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_RASTERIZATION_ORDER_AMD"/>
+                <type name="VkRasterizationOrderAMD"/>
+                <type name="VkPipelineRasterizationStateRasterizationOrderAMD"/>
+            </require>
+        </extension>
+        <extension name="VK_AMD_extension_20" number="20" author="AMD" contact="Daniel Rakos @drakos-amd" supported="disabled">
+            <require>
+                <enum value="0"                                                 name="VK_AMD_EXTENSION_20_SPEC_VERSION"/>
+                <enum value="&quot;VK_AMD_extension_20&quot;"                   name="VK_AMD_EXTENSION_20_EXTENSION_NAME"/>
+            </require>
+        </extension>
+        <extension name="VK_AMD_shader_trinary_minmax" number="21" type="device" author="AMD" contact="Qun Lin @linqun" supported="vulkan">
+            <require>
+                <enum value="1"                                                 name="VK_AMD_SHADER_TRINARY_MINMAX_SPEC_VERSION"/>
+                <enum value="&quot;VK_AMD_shader_trinary_minmax&quot;"          name="VK_AMD_SHADER_TRINARY_MINMAX_EXTENSION_NAME"/>
+            </require>
+        </extension>
+        <extension name="VK_AMD_shader_explicit_vertex_parameter" number="22" type="device" author="AMD" contact="Qun Lin @linqun" supported="vulkan">
+            <require>
+                <enum value="1"                                                 name="VK_AMD_SHADER_EXPLICIT_VERTEX_PARAMETER_SPEC_VERSION"/>
+                <enum value="&quot;VK_AMD_shader_explicit_vertex_parameter&quot;" name="VK_AMD_SHADER_EXPLICIT_VERTEX_PARAMETER_EXTENSION_NAME"/>
+            </require>
+        </extension>
+        <extension name="VK_EXT_debug_marker" number="23" type="device" requires="VK_EXT_debug_report" author="Baldur Karlsson" contact="Baldur Karlsson @baldurk" supported="vulkan" promotedto="VK_EXT_debug_utils">
+            <require>
+                <enum value="4"                                                 name="VK_EXT_DEBUG_MARKER_SPEC_VERSION"/>
+                <enum value="&quot;VK_EXT_debug_marker&quot;"                   name="VK_EXT_DEBUG_MARKER_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkStructureType"                      name="VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_NAME_INFO_EXT"/>
+                <enum offset="1" extends="VkStructureType"                      name="VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_TAG_INFO_EXT"/>
+                <enum offset="2" extends="VkStructureType"                      name="VK_STRUCTURE_TYPE_DEBUG_MARKER_MARKER_INFO_EXT"/>
+                <type name="VkDebugReportObjectTypeEXT"/>
+                <type name="VkDebugMarkerObjectNameInfoEXT"/>
+                <type name="VkDebugMarkerObjectTagInfoEXT"/>
+                <type name="VkDebugMarkerMarkerInfoEXT"/>
+                <command name="vkDebugMarkerSetObjectTagEXT"/>
+                <command name="vkDebugMarkerSetObjectNameEXT"/>
+                <command name="vkCmdDebugMarkerBeginEXT"/>
+                <command name="vkCmdDebugMarkerEndEXT"/>
+                <command name="vkCmdDebugMarkerInsertEXT"/>
+            </require>
+        </extension>
+        <extension name="VK_AMD_extension_24" number="24" author="AMD" contact="Daniel Rakos @drakos-amd" supported="disabled">
+            <require>
+                <enum value="0"                                         name="VK_AMD_EXTENSION_24_SPEC_VERSION"/>
+                <enum value="&quot;VK_AMD_extension_24&quot;"           name="VK_AMD_EXTENSION_24_EXTENSION_NAME"/>
+                <enum bitpos="6" extends="VkQueueFlagBits"              name="VK_QUEUE_RESERVED_6_BIT_KHR"/>
+                <enum bitpos="27" extends="VkPipelineStageFlagBits"     name="VK_PIPELINE_STAGE_RESERVED_27_BIT_KHR"/>
+                <enum bitpos="30" extends="VkAccessFlagBits"            name="VK_ACCESS_RESERVED_30_BIT_KHR"/>
+                <enum bitpos="31" extends="VkAccessFlagBits"            name="VK_ACCESS_RESERVED_31_BIT_KHR"/>
+                <enum bitpos="15" extends="VkBufferUsageFlagBits"       name="VK_BUFFER_USAGE_RESERVED_15_BIT_KHR"/>
+                <enum bitpos="16" extends="VkBufferUsageFlagBits"       name="VK_BUFFER_USAGE_RESERVED_16_BIT_KHR"/>
+                <enum bitpos="13" extends="VkImageUsageFlagBits"        name="VK_IMAGE_USAGE_RESERVED_13_BIT_KHR"/>
+                <enum bitpos="14" extends="VkImageUsageFlagBits"        name="VK_IMAGE_USAGE_RESERVED_14_BIT_KHR"/>
+                <enum bitpos="15" extends="VkImageUsageFlagBits"        name="VK_IMAGE_USAGE_RESERVED_15_BIT_KHR"/>
+                <enum bitpos="27" extends="VkFormatFeatureFlagBits"     name="VK_FORMAT_FEATURE_RESERVED_27_BIT_KHR"/>
+                <enum bitpos="28" extends="VkFormatFeatureFlagBits"     name="VK_FORMAT_FEATURE_RESERVED_28_BIT_KHR"/>
+                <enum offset="8" extends="VkQueryType"                  name="VK_QUERY_TYPE_RESERVED_8"/>
+            </require>
+        </extension>
+        <extension name="VK_AMD_extension_25" number="25" author="AMD" contact="Daniel Rakos @drakos-amd" supported="disabled">
+            <require>
+                <enum value="0"                                         name="VK_AMD_EXTENSION_25_SPEC_VERSION"/>
+                <enum value="&quot;VK_AMD_extension_25&quot;"           name="VK_AMD_EXTENSION_25_EXTENSION_NAME"/>
+                <enum bitpos="5" extends="VkQueueFlagBits"              name="VK_QUEUE_RESERVED_5_BIT_KHR"/>
+                <enum bitpos="26" extends="VkPipelineStageFlagBits"     name="VK_PIPELINE_STAGE_RESERVED_26_BIT_KHR"/>
+                <enum bitpos="28" extends="VkAccessFlagBits"            name="VK_ACCESS_RESERVED_28_BIT_KHR"/>
+                <enum bitpos="29" extends="VkAccessFlagBits"            name="VK_ACCESS_RESERVED_29_BIT_KHR"/>
+                <enum bitpos="13" extends="VkBufferUsageFlagBits"       name="VK_BUFFER_USAGE_RESERVED_13_BIT_KHR"/>
+                <enum bitpos="14" extends="VkBufferUsageFlagBits"       name="VK_BUFFER_USAGE_RESERVED_14_BIT_KHR"/>
+                <enum bitpos="10" extends="VkImageUsageFlagBits"        name="VK_IMAGE_USAGE_RESERVED_10_BIT_KHR"/>
+                <enum bitpos="11" extends="VkImageUsageFlagBits"        name="VK_IMAGE_USAGE_RESERVED_11_BIT_KHR"/>
+                <enum bitpos="12" extends="VkImageUsageFlagBits"        name="VK_IMAGE_USAGE_RESERVED_12_BIT_KHR"/>
+                <enum bitpos="25" extends="VkFormatFeatureFlagBits"     name="VK_FORMAT_FEATURE_RESERVED_25_BIT_KHR"/>
+                <enum bitpos="26" extends="VkFormatFeatureFlagBits"     name="VK_FORMAT_FEATURE_RESERVED_26_BIT_KHR"/>
+                <enum offset="4"  extends="VkQueryType"                 name="VK_QUERY_TYPE_RESERVED_4"/>
+            </require>
+        </extension>
+        <extension name="VK_AMD_gcn_shader" number="26" type="device" author="AMD" contact="Dominik Witczak @dominikwitczakamd" supported="vulkan">
+            <require>
+                <enum value="1"                                                 name="VK_AMD_GCN_SHADER_SPEC_VERSION"/>
+                <enum value="&quot;VK_AMD_gcn_shader&quot;"                     name="VK_AMD_GCN_SHADER_EXTENSION_NAME"/>
+            </require>
+        </extension>
+        <extension name="VK_NV_dedicated_allocation" number="27" type="device" author="NV" contact="Jeff Bolz @jeffbolznv" supported="vulkan" deprecatedby="VK_KHR_dedicated_allocation">
+            <require>
+                <enum value="1"                                                 name="VK_NV_DEDICATED_ALLOCATION_SPEC_VERSION"/>
+                <enum value="&quot;VK_NV_dedicated_allocation&quot;"            name="VK_NV_DEDICATED_ALLOCATION_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkStructureType"                      name="VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_IMAGE_CREATE_INFO_NV"/>
+                <enum offset="1" extends="VkStructureType"                      name="VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_BUFFER_CREATE_INFO_NV"/>
+                <enum offset="2" extends="VkStructureType"                      name="VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_MEMORY_ALLOCATE_INFO_NV"/>
+                <type name="VkDedicatedAllocationImageCreateInfoNV"/>
+                <type name="VkDedicatedAllocationBufferCreateInfoNV"/>
+                <type name="VkDedicatedAllocationMemoryAllocateInfoNV"/>
+            </require>
+        </extension>
+        <extension name="VK_EXT_extension_28" number="28" author="NV" contact="Piers Daniell @pdaniell-nv" supported="disabled">
+            <require>
+                <enum value="0"                                                 name="VK_EXT_EXTENSION_28_SPEC_VERSION"/>
+                <enum value="&quot;VK_NV_extension_28&quot;"                    name="VK_EXT_EXTENSION_28_EXTENSION_NAME"/>
+            </require>
+        </extension>
+        <extension name="VK_EXT_transform_feedback" number="29" type="device" author="NV" contact="Piers Daniell @pdaniell-nv" supported="vulkan" requires="VK_KHR_get_physical_device_properties2">
+            <require>
+                <enum value="1"                                                 name="VK_EXT_TRANSFORM_FEEDBACK_SPEC_VERSION"/>
+                <enum value="&quot;VK_EXT_transform_feedback&quot;"             name="VK_EXT_TRANSFORM_FEEDBACK_EXTENSION_NAME"/>
+                <command name="vkCmdBindTransformFeedbackBuffersEXT"/>
+                <command name="vkCmdBeginTransformFeedbackEXT"/>
+                <command name="vkCmdEndTransformFeedbackEXT"/>
+                <command name="vkCmdBeginQueryIndexedEXT"/>
+                <command name="vkCmdEndQueryIndexedEXT"/>
+                <command name="vkCmdDrawIndirectByteCountEXT"/>
+
+                <enum offset="0" extends="VkStructureType"                      name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_FEATURES_EXT"/>
+                <enum offset="1" extends="VkStructureType"                      name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_PROPERTIES_EXT"/>
+                <enum offset="2" extends="VkStructureType"                      name="VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_STREAM_CREATE_INFO_EXT"/>
+
+                <enum offset="4" extends="VkQueryType"                          name="VK_QUERY_TYPE_TRANSFORM_FEEDBACK_STREAM_EXT"/>
+
+                <enum bitpos="11" extends="VkBufferUsageFlagBits"                name="VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_BUFFER_BIT_EXT"/>
+                <enum bitpos="12" extends="VkBufferUsageFlagBits"                name="VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_COUNTER_BUFFER_BIT_EXT"/>
+
+                <enum bitpos="25" extends="VkAccessFlagBits"                    name="VK_ACCESS_TRANSFORM_FEEDBACK_WRITE_BIT_EXT"/>
+                <enum bitpos="26" extends="VkAccessFlagBits"                    name="VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_READ_BIT_EXT"/>
+                <enum bitpos="27" extends="VkAccessFlagBits"                    name="VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_WRITE_BIT_EXT"/>
+
+                <enum bitpos="24" extends="VkPipelineStageFlagBits"             name="VK_PIPELINE_STAGE_TRANSFORM_FEEDBACK_BIT_EXT"/>
+
+                <type name="VkPhysicalDeviceTransformFeedbackFeaturesEXT"/>
+                <type name="VkPhysicalDeviceTransformFeedbackPropertiesEXT"/>
+                <type name="VkPipelineRasterizationStateStreamCreateInfoEXT"/>
+
+                <type name="VkPipelineRasterizationStateStreamCreateFlagsEXT"/>
+            </require>
+        </extension>
+        <extension name="VK_NVX_extension_30" number="30" author="NVX" contact="Jeff Juliano @jjulianoatnv" supported="disabled">
+            <require>
+                <enum value="0"                                                 name="VK_NVX_EXTENSION_30_SPEC_VERSION"/>
+                <enum value="&quot;VK_NVX_extension_30&quot;"                   name="VK_NVX_EXTENSION_30_EXTENSION_NAME"/>
+            </require>
+        </extension>
+        <extension name="VK_NVX_image_view_handle" number="31" type="device" author="NVX" contact="Eric Werness @ewerness" supported="vulkan">
+            <require>
+                <enum value="1"                                                 name="VK_NVX_IMAGE_VIEW_HANDLE_SPEC_VERSION"/>
+                <enum value="&quot;VK_NVX_image_view_handle&quot;"              name="VK_NVX_IMAGE_VIEW_HANDLE_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkStructureType"                      name="VK_STRUCTURE_TYPE_IMAGE_VIEW_HANDLE_INFO_NVX"/>
+                <type name="VkImageViewHandleInfoNVX"/>
+                <command name="vkGetImageViewHandleNVX"/>
+            </require>
+        </extension>
+        <extension name="VK_AMD_extension_32" number="32" author="AMD" contact="Daniel Rakos @drakos-amd" supported="disabled">
+            <require>
+                <enum value="0"                                                 name="VK_AMD_EXTENSION_32_SPEC_VERSION"/>
+                <enum value="&quot;VK_AMD_extension_32&quot;"                   name="VK_AMD_EXTENSION_32_EXTENSION_NAME"/>
+            </require>
+        </extension>
+        <extension name="VK_AMD_extension_33" number="33" author="AMD" contact="Daniel Rakos @drakos-amd" supported="disabled">
+            <require>
+                <enum value="0"                                                 name="VK_AMD_EXTENSION_33_SPEC_VERSION"/>
+                <enum value="&quot;VK_AMD_extension_33&quot;"                   name="VK_AMD_EXTENSION_33_EXTENSION_NAME"/>
+            </require>
+        </extension>
+        <extension name="VK_AMD_draw_indirect_count" number="34" type="device" author="AMD" contact="Daniel Rakos @drakos-amd" supported="vulkan" promotedto="VK_KHR_draw_indirect_count">
+            <require>
+                <enum value="2"                                                 name="VK_AMD_DRAW_INDIRECT_COUNT_SPEC_VERSION"/>
+                <enum value="&quot;VK_AMD_draw_indirect_count&quot;"            name="VK_AMD_DRAW_INDIRECT_COUNT_EXTENSION_NAME"/>
+                <command name="vkCmdDrawIndirectCountAMD"/>
+                <command name="vkCmdDrawIndexedIndirectCountAMD"/>
+            </require>
+        </extension>
+        <extension name="VK_AMD_extension_35" number="35" author="AMD" contact="Daniel Rakos @drakos-amd" supported="disabled">
+            <require>
+                <enum value="0"                                                 name="VK_AMD_EXTENSION_35_SPEC_VERSION"/>
+                <enum value="&quot;VK_AMD_extension_35&quot;"                   name="VK_AMD_EXTENSION_35_EXTENSION_NAME"/>
+            </require>
+        </extension>
+        <extension name="VK_AMD_negative_viewport_height" number="36" type="device" author="AMD" contact="Matthaeus G. Chajdas @anteru" supported="vulkan" obsoletedby="VK_KHR_maintenance1">
+            <require>
+                <enum value="1"                                                 name="VK_AMD_NEGATIVE_VIEWPORT_HEIGHT_SPEC_VERSION"/>
+                <enum value="&quot;VK_AMD_negative_viewport_height&quot;"       name="VK_AMD_NEGATIVE_VIEWPORT_HEIGHT_EXTENSION_NAME"/>
+            </require>
+        </extension>
+        <extension name="VK_AMD_gpu_shader_half_float" number="37" type="device" author="AMD" contact="Dominik Witczak @dominikwitczakamd" supported="vulkan" deprecatedby="VK_KHR_shader_float16_int8">
+            <require>
+                <enum value="2"                                                 name="VK_AMD_GPU_SHADER_HALF_FLOAT_SPEC_VERSION"/>
+                <enum value="&quot;VK_AMD_gpu_shader_half_float&quot;"          name="VK_AMD_GPU_SHADER_HALF_FLOAT_EXTENSION_NAME"/>
+            </require>
+        </extension>
+        <extension name="VK_AMD_shader_ballot" number="38" type="device" author="AMD" contact="Dominik Witczak @dominikwitczakamd" supported="vulkan">
+            <require>
+                <enum value="1"                                                 name="VK_AMD_SHADER_BALLOT_SPEC_VERSION"/>
+                <enum value="&quot;VK_AMD_shader_ballot&quot;"                  name="VK_AMD_SHADER_BALLOT_EXTENSION_NAME"/>
+            </require>
+        </extension>
+        <extension name="VK_AMD_extension_39" number="39" author="AMD" contact="Daniel Rakos @drakos-amd" supported="disabled">
+            <require>
+                <enum value="0"                                                 name="VK_AMD_EXTENSION_39_SPEC_VERSION"/>
+                <enum value="&quot;VK_AMD_extension_39&quot;"                   name="VK_AMD_EXTENSION_39_EXTENSION_NAME"/>
+            </require>
+        </extension>
+        <extension name="VK_AMD_extension_40" number="40" author="AMD" contact="Daniel Rakos @drakos-amd" supported="disabled">
+            <require>
+                <enum value="0"                                                 name="VK_AMD_EXTENSION_40_SPEC_VERSION"/>
+                <enum value="&quot;VK_AMD_extension_40&quot;"                   name="VK_AMD_EXTENSION_40_EXTENSION_NAME"/>
+            </require>
+        </extension>
+        <extension name="VK_AMD_extension_41" number="41" author="AMD" contact="Daniel Rakos @drakos-amd" supported="disabled">
+            <require>
+                <enum value="0"                                                 name="VK_AMD_EXTENSION_41_SPEC_VERSION"/>
+                <enum value="&quot;VK_AMD_extension_41&quot;"                   name="VK_AMD_EXTENSION_41_EXTENSION_NAME"/>
+            </require>
+        </extension>
+        <extension name="VK_AMD_texture_gather_bias_lod" number="42" author="AMD" contact="Rex Xu @amdrexu" supported="vulkan" type="device" requires="VK_KHR_get_physical_device_properties2">
+            <require>
+                <enum value="1"                                                 name="VK_AMD_TEXTURE_GATHER_BIAS_LOD_SPEC_VERSION"/>
+                <enum value="&quot;VK_AMD_texture_gather_bias_lod&quot;"        name="VK_AMD_TEXTURE_GATHER_BIAS_LOD_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkStructureType"                      name="VK_STRUCTURE_TYPE_TEXTURE_LOD_GATHER_FORMAT_PROPERTIES_AMD"/>
+                <type name="VkTextureLODGatherFormatPropertiesAMD"/>
+            </require>
+        </extension>
+        <extension name="VK_AMD_shader_info" number="43" author="AMD" contact="Jaakko Konttinen @jaakkoamd" supported="vulkan" type="device">
+            <require>
+                <enum value="1"                                                 name="VK_AMD_SHADER_INFO_SPEC_VERSION"/>
+                <enum value="&quot;VK_AMD_shader_info&quot;"                    name="VK_AMD_SHADER_INFO_EXTENSION_NAME"/>
+                <type name="VkShaderInfoTypeAMD"/>
+                <type name="VkShaderResourceUsageAMD"/>
+                <type name="VkShaderStatisticsInfoAMD"/>
+                <command name="vkGetShaderInfoAMD"/>
+            </require>
+        </extension>
+        <extension name="VK_AMD_extension_44" number="44" author="AMD" contact="Daniel Rakos @drakos-amd" supported="disabled">
+            <require>
+                <enum value="0"                                                 name="VK_AMD_EXTENSION_44_SPEC_VERSION"/>
+                <enum value="&quot;VK_AMD_extension_44&quot;"                   name="VK_AMD_EXTENSION_44_EXTENSION_NAME"/>
+            </require>
+        </extension>
+        <extension name="VK_AMD_extension_45" number="45" author="AMD" contact="Daniel Rakos @drakos-amd" supported="disabled">
+            <require>
+                <enum value="0"                                                 name="VK_AMD_EXTENSION_45_SPEC_VERSION"/>
+                <enum value="&quot;VK_AMD_extension_45&quot;"                   name="VK_AMD_EXTENSION_45_EXTENSION_NAME"/>
+            </require>
+        </extension>
+        <extension name="VK_AMD_extension_46" number="46" author="AMD" contact="Daniel Rakos @drakos-amd" supported="disabled">
+            <require>
+                <enum value="0"                                                 name="VK_AMD_EXTENSION_46_SPEC_VERSION"/>
+                <enum value="&quot;VK_AMD_extension_46&quot;"                   name="VK_AMD_EXTENSION_46_EXTENSION_NAME"/>
+            </require>
+        </extension>
+        <extension name="VK_AMD_shader_image_load_store_lod" number="47" author="AMD" contact="Dominik Witczak @dominikwitczakamd" supported="vulkan" type="device">
+            <require>
+                <enum value="1"                                                 name="VK_AMD_SHADER_IMAGE_LOAD_STORE_LOD_SPEC_VERSION"/>
+                <enum value="&quot;VK_AMD_shader_image_load_store_lod&quot;"    name="VK_AMD_SHADER_IMAGE_LOAD_STORE_LOD_EXTENSION_NAME"/>
+            </require>
+        </extension>
+        <extension name="VK_NVX_extension_48" number="48" author="NVX" contact="James Jones @cubanismo" supported="disabled">
+            <require>
+                <enum value="0"                                                 name="VK_NVX_EXTENSION_48_SPEC_VERSION"/>
+                <enum value="&quot;VK_NVX_extension_48&quot;"                   name="VK_NVX_EXTENSION_48_EXTENSION_NAME"/>
+            </require>
+        </extension>
+        <extension name="VK_GOOGLE_extension_49" number="49" author="GOOGLE" contact="Jean-Francois Roy @jfroy" supported="disabled">
+            <require>
+                <enum value="0"                                                 name="VK_GOOGLE_EXTENSION_49_SPEC_VERSION"/>
+                <enum value="&quot;VK_GOOGLE_extension_49&quot;"                name="VK_GOOGLE_EXTENSION_49_EXTENSION_NAME"/>
+            </require>
+        </extension>
+        <extension name="VK_GGP_stream_descriptor_surface" number="50" type="instance" requires="VK_KHR_surface" platform="ggp" author="GGP" contact="Jean-Francois Roy @jfroy" supported="vulkan">
+            <require>
+                <enum value="1"                                                 name="VK_GGP_STREAM_DESCRIPTOR_SURFACE_SPEC_VERSION"/>
+                <enum value="&quot;VK_GGP_stream_descriptor_surface&quot;"      name="VK_GGP_STREAM_DESCRIPTOR_SURFACE_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkStructureType"                      name="VK_STRUCTURE_TYPE_STREAM_DESCRIPTOR_SURFACE_CREATE_INFO_GGP"/>
+                <type name="VkStreamDescriptorSurfaceCreateFlagsGGP"/>
+                <type name="VkStreamDescriptorSurfaceCreateInfoGGP"/>
+                <command name="vkCreateStreamDescriptorSurfaceGGP"/>
+            </require>
+        </extension>
+        <extension name="VK_NV_corner_sampled_image" number="51" author="NV" type="device" requires="VK_KHR_get_physical_device_properties2" contact="Daniel Koch @dgkoch" supported="vulkan">
+            <require>
+                <enum value="2"                                                 name="VK_NV_CORNER_SAMPLED_IMAGE_SPEC_VERSION"/>
+                <enum value="&quot;VK_NV_corner_sampled_image&quot;"            name="VK_NV_CORNER_SAMPLED_IMAGE_EXTENSION_NAME"/>
+                <enum bitpos="13" extends="VkImageCreateFlagBits"               name="VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV"/>
+                <enum offset="0" extends="VkStructureType"                      name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CORNER_SAMPLED_IMAGE_FEATURES_NV"/>
+                <type name="VkPhysicalDeviceCornerSampledImageFeaturesNV"/>
+            </require>
+        </extension>
+        <extension name="VK_NV_extension_52" number="52" author="NV" contact="Daniel Koch @dgkoch" supported="disabled">
+            <require>
+                <enum value="0"                                                 name="VK_NV_EXTENSION_52_SPEC_VERSION"/>
+                <enum value="&quot;VK_NV_extension_52&quot;"                    name="VK_NV_EXTENSION_52_EXTENSION_NAME"/>
+                <enum bitpos="0" extends="VkShaderModuleCreateFlagBits"         name="VK_SHADER_MODULE_CREATE_RESERVED_0_BIT_NV"/>
+                <enum bitpos="2" extends="VkPipelineShaderStageCreateFlagBits"  name="VK_PIPELINE_SHADER_STAGE_CREATE_RESERVED_2_BIT_NV"/>
+            </require>
+        </extension>
+        <extension name="VK_NV_extension_53" number="53" author="NV" contact="Jeff Bolz @jeffbolznv" supported="disabled">
+            <require>
+                <enum value="0"                                                 name="VK_NV_EXTENSION_53_SPEC_VERSION"/>
+                <enum value="&quot;VK_NV_extension_53&quot;"                    name="VK_NV_EXTENSION_53_EXTENSION_NAME"/>
+            </require>
+        </extension>
+        <extension name="VK_KHR_multiview" number="54" type="device" author="KHR" requires="VK_KHR_get_physical_device_properties2" contact="Jeff Bolz @jeffbolznv" supported="vulkan" promotedto="VK_VERSION_1_1">
+            <require>
+                <enum value="1"                                                 name="VK_KHR_MULTIVIEW_SPEC_VERSION"/>
+                <enum value="&quot;VK_KHR_multiview&quot;"                      name="VK_KHR_MULTIVIEW_EXTENSION_NAME"/>
+                <enum extends="VkStructureType"                                 name="VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO_KHR" alias="VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO"/>
+                <enum extends="VkStructureType"                                 name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES_KHR" alias="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES"/>
+                <enum extends="VkStructureType"                                 name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES_KHR" alias="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES"/>
+                <enum extends="VkDependencyFlagBits"                            name="VK_DEPENDENCY_VIEW_LOCAL_BIT_KHR" alias="VK_DEPENDENCY_VIEW_LOCAL_BIT"/>
+                <type name="VkRenderPassMultiviewCreateInfoKHR"/>
+                <type name="VkPhysicalDeviceMultiviewFeaturesKHR"/>
+                <type name="VkPhysicalDeviceMultiviewPropertiesKHR"/>
+            </require>
+        </extension>
+        <extension name="VK_IMG_format_pvrtc" number="55" type="device" author="IMG" contact="Stuart Smith" supported="vulkan">
+            <require>
+                <enum value="1"                                                 name="VK_IMG_FORMAT_PVRTC_SPEC_VERSION"/>
+                <enum value="&quot;VK_IMG_format_pvrtc&quot;"                   name="VK_IMG_FORMAT_PVRTC_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkFormat"                             name="VK_FORMAT_PVRTC1_2BPP_UNORM_BLOCK_IMG"/>
+                <enum offset="1" extends="VkFormat"                             name="VK_FORMAT_PVRTC1_4BPP_UNORM_BLOCK_IMG"/>
+                <enum offset="2" extends="VkFormat"                             name="VK_FORMAT_PVRTC2_2BPP_UNORM_BLOCK_IMG"/>
+                <enum offset="3" extends="VkFormat"                             name="VK_FORMAT_PVRTC2_4BPP_UNORM_BLOCK_IMG"/>
+                <enum offset="4" extends="VkFormat"                             name="VK_FORMAT_PVRTC1_2BPP_SRGB_BLOCK_IMG"/>
+                <enum offset="5" extends="VkFormat"                             name="VK_FORMAT_PVRTC1_4BPP_SRGB_BLOCK_IMG"/>
+                <enum offset="6" extends="VkFormat"                             name="VK_FORMAT_PVRTC2_2BPP_SRGB_BLOCK_IMG"/>
+                <enum offset="7" extends="VkFormat"                             name="VK_FORMAT_PVRTC2_4BPP_SRGB_BLOCK_IMG"/>
+            </require>
+        </extension>
+        <extension name="VK_NV_external_memory_capabilities" number="56" type="instance" author="NV" contact="James Jones @cubanismo" supported="vulkan" deprecatedby="VK_KHR_external_memory_capabilities">
+            <require>
+                <enum value="1"                                                 name="VK_NV_EXTERNAL_MEMORY_CAPABILITIES_SPEC_VERSION"/>
+                <enum value="&quot;VK_NV_external_memory_capabilities&quot;"    name="VK_NV_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME"/>
+                <type name="VkExternalMemoryHandleTypeFlagsNV"/>
+                <type name="VkExternalMemoryHandleTypeFlagBitsNV"/>
+                <type name="VkExternalMemoryFeatureFlagsNV"/>
+                <type name="VkExternalMemoryFeatureFlagBitsNV"/>
+                <type name="VkExternalImageFormatPropertiesNV"/>
+                <command name="vkGetPhysicalDeviceExternalImageFormatPropertiesNV"/>
+            </require>
+        </extension>
+        <extension name="VK_NV_external_memory" number="57" type="device" requires="VK_NV_external_memory_capabilities" author="NV" contact="James Jones @cubanismo" supported="vulkan" deprecatedby="VK_KHR_external_memory">
+            <require>
+                <enum value="1"                                                 name="VK_NV_EXTERNAL_MEMORY_SPEC_VERSION"/>
+                <enum value="&quot;VK_NV_external_memory&quot;"                 name="VK_NV_EXTERNAL_MEMORY_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkStructureType"                      name="VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_NV"/>
+                <enum offset="1" extends="VkStructureType"                      name="VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_NV"/>
+                <type name="VkExternalMemoryImageCreateInfoNV"/>
+                <type name="VkExportMemoryAllocateInfoNV"/>
+            </require>
+        </extension>
+        <extension name="VK_NV_external_memory_win32" number="58" type="device" requires="VK_NV_external_memory" author="NV" contact="James Jones @cubanismo" platform="win32" supported="vulkan" deprecatedby="VK_KHR_external_memory_win32">
+            <require>
+                <enum value="1"                                                 name="VK_NV_EXTERNAL_MEMORY_WIN32_SPEC_VERSION"/>
+                <enum value="&quot;VK_NV_external_memory_win32&quot;"           name="VK_NV_EXTERNAL_MEMORY_WIN32_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkStructureType"                      name="VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_NV"/>
+                <enum offset="1" extends="VkStructureType"                      name="VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_NV"/>
+                <type name="VkImportMemoryWin32HandleInfoNV"/>
+                <type name="VkExportMemoryWin32HandleInfoNV"/>
+                <command name="vkGetMemoryWin32HandleNV"/>
+            </require>
+        </extension>
+        <extension name="VK_NV_win32_keyed_mutex" number="59" type="device" requires="VK_NV_external_memory_win32" author="NV" contact="Carsten Rohde @crohde" platform="win32" supported="vulkan" promotedto="VK_KHR_win32_keyed_mutex">
+            <require>
+                <enum value="2"                                                 name="VK_NV_WIN32_KEYED_MUTEX_SPEC_VERSION"/>
+                <enum value="&quot;VK_NV_win32_keyed_mutex&quot;"               name="VK_NV_WIN32_KEYED_MUTEX_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkStructureType"                      name="VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_NV"/>
+                <type name="VkWin32KeyedMutexAcquireReleaseInfoNV"/>
+            </require>
+        </extension>
+        <extension name="VK_KHR_get_physical_device_properties2" number="60" type="instance" author="KHR" contact="Jeff Bolz @jeffbolznv" supported="vulkan" promotedto="VK_VERSION_1_1">
+            <require>
+                <enum value="2"                                                 name="VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_SPEC_VERSION"/>
+                <enum value="&quot;VK_KHR_get_physical_device_properties2&quot;" name="VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME"/>
+                <enum extends="VkStructureType"                                 name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2_KHR" alias="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2"/>
+                <enum extends="VkStructureType"                                 name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2_KHR" alias="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2"/>
+                <enum extends="VkStructureType"                                 name="VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2_KHR" alias="VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2"/>
+                <enum extends="VkStructureType"                                 name="VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2_KHR" alias="VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2"/>
+                <enum extends="VkStructureType"                                 name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2_KHR" alias="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2"/>
+                <enum extends="VkStructureType"                                 name="VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2_KHR" alias="VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2"/>
+                <enum extends="VkStructureType"                                 name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2_KHR" alias="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2"/>
+                <enum extends="VkStructureType"                                 name="VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2_KHR" alias="VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2"/>
+                <enum extends="VkStructureType"                                 name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2_KHR" alias="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2"/>
+                <type name="VkPhysicalDeviceFeatures2KHR"/>
+                <type name="VkPhysicalDeviceProperties2KHR"/>
+                <type name="VkFormatProperties2KHR"/>
+                <type name="VkImageFormatProperties2KHR"/>
+                <type name="VkPhysicalDeviceImageFormatInfo2KHR"/>
+                <type name="VkQueueFamilyProperties2KHR"/>
+                <type name="VkPhysicalDeviceMemoryProperties2KHR"/>
+                <type name="VkSparseImageFormatProperties2KHR"/>
+                <type name="VkPhysicalDeviceSparseImageFormatInfo2KHR"/>
+                <command name="vkGetPhysicalDeviceFeatures2KHR"/>
+                <command name="vkGetPhysicalDeviceProperties2KHR"/>
+                <command name="vkGetPhysicalDeviceFormatProperties2KHR"/>
+                <command name="vkGetPhysicalDeviceImageFormatProperties2KHR"/>
+                <command name="vkGetPhysicalDeviceQueueFamilyProperties2KHR"/>
+                <command name="vkGetPhysicalDeviceMemoryProperties2KHR"/>
+                <command name="vkGetPhysicalDeviceSparseImageFormatProperties2KHR"/>
+            </require>
+        </extension>
+        <extension name="VK_KHR_device_group" number="61" type="device" author="KHR" requires="VK_KHR_device_group_creation" contact="Jeff Bolz @jeffbolznv" supported="vulkan" promotedto="VK_VERSION_1_1">
+            <require>
+                <enum value="4"                                                 name="VK_KHR_DEVICE_GROUP_SPEC_VERSION"/>
+                <enum value="&quot;VK_KHR_device_group&quot;"                   name="VK_KHR_DEVICE_GROUP_EXTENSION_NAME"/>
+                <enum extends="VkStructureType"                                 name="VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO_KHR" alias="VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO"/>
+                <enum extends="VkStructureType"                                 name="VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO_KHR" alias="VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO"/>
+                <enum extends="VkStructureType"                                 name="VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO_KHR" alias="VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO"/>
+                <enum extends="VkStructureType"                                 name="VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO_KHR" alias="VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO"/>
+                <enum extends="VkStructureType"                                 name="VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO_KHR" alias="VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO"/>
+                <type name="VkPeerMemoryFeatureFlagsKHR"/>
+                <type name="VkPeerMemoryFeatureFlagBitsKHR"/>
+                <enum extends="VkPeerMemoryFeatureFlagBits"                     name="VK_PEER_MEMORY_FEATURE_COPY_SRC_BIT_KHR" alias="VK_PEER_MEMORY_FEATURE_COPY_SRC_BIT"/>
+                <enum extends="VkPeerMemoryFeatureFlagBits"                     name="VK_PEER_MEMORY_FEATURE_COPY_DST_BIT_KHR" alias="VK_PEER_MEMORY_FEATURE_COPY_DST_BIT"/>
+                <enum extends="VkPeerMemoryFeatureFlagBits"                     name="VK_PEER_MEMORY_FEATURE_GENERIC_SRC_BIT_KHR" alias="VK_PEER_MEMORY_FEATURE_GENERIC_SRC_BIT"/>
+                <enum extends="VkPeerMemoryFeatureFlagBits"                     name="VK_PEER_MEMORY_FEATURE_GENERIC_DST_BIT_KHR" alias="VK_PEER_MEMORY_FEATURE_GENERIC_DST_BIT"/>
+                <type name="VkMemoryAllocateFlagsKHR"/>
+                <type name="VkMemoryAllocateFlagBitsKHR"/>
+                <enum extends="VkMemoryAllocateFlagBits"                        name="VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT_KHR" alias="VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT"/>
+                <type name="VkMemoryAllocateFlagsInfoKHR"/>
+                <type name="VkDeviceGroupRenderPassBeginInfoKHR"/>
+                <type name="VkDeviceGroupCommandBufferBeginInfoKHR"/>
+                <type name="VkDeviceGroupSubmitInfoKHR"/>
+                <type name="VkDeviceGroupBindSparseInfoKHR"/>
+                <command name="vkGetDeviceGroupPeerMemoryFeaturesKHR"/>
+                <command name="vkCmdSetDeviceMaskKHR"/>
+                <command name="vkCmdDispatchBaseKHR"/>
+                <enum extends="VkPipelineCreateFlagBits"                        name="VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT_KHR" alias="VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT"/>
+                <enum extends="VkPipelineCreateFlagBits"                        name="VK_PIPELINE_CREATE_DISPATCH_BASE_KHR" alias="VK_PIPELINE_CREATE_DISPATCH_BASE"/>
+                <enum extends="VkDependencyFlagBits"                            name="VK_DEPENDENCY_DEVICE_GROUP_BIT_KHR" alias="VK_DEPENDENCY_DEVICE_GROUP_BIT"/>
+            </require>
+            <require extension="VK_KHR_bind_memory2">
+                <enum extends="VkStructureType"                                 name="VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO_KHR" alias="VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO"/>
+                <enum extends="VkStructureType"                                 name="VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO_KHR" alias="VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO"/>
+                <type name="VkBindBufferMemoryDeviceGroupInfoKHR"/>
+                <type name="VkBindImageMemoryDeviceGroupInfoKHR"/>
+                <enum extends="VkImageCreateFlagBits"                           name="VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR" alias="VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT"/>
+            </require>
+            <require extension="VK_KHR_surface">
+                <enum offset="7" extends="VkStructureType"                      name="VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_CAPABILITIES_KHR"/>
+                <type name="VkDeviceGroupPresentModeFlagBitsKHR"/>
+                <type name="VkDeviceGroupPresentModeFlagsKHR"/>
+                <type name="VkDeviceGroupPresentCapabilitiesKHR"/>
+                <command name="vkGetDeviceGroupPresentCapabilitiesKHR"/>
+                <command name="vkGetDeviceGroupSurfacePresentModesKHR"/>
+                <command name="vkGetPhysicalDevicePresentRectanglesKHR"/>
+            </require>
+            <require extension="VK_KHR_swapchain">
+                <enum offset="8" extends="VkStructureType"                      name="VK_STRUCTURE_TYPE_IMAGE_SWAPCHAIN_CREATE_INFO_KHR"/>
+                <enum offset="9" extends="VkStructureType"                      name="VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_SWAPCHAIN_INFO_KHR"/>
+                <enum offset="10" extends="VkStructureType"                     name="VK_STRUCTURE_TYPE_ACQUIRE_NEXT_IMAGE_INFO_KHR"/>
+                <enum offset="11" extends="VkStructureType"                     name="VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_INFO_KHR"/>
+                <enum offset="12" extends="VkStructureType"                     name="VK_STRUCTURE_TYPE_DEVICE_GROUP_SWAPCHAIN_CREATE_INFO_KHR"/>
+                <enum bitpos="0" extends="VkSwapchainCreateFlagBitsKHR"         name="VK_SWAPCHAIN_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR" comment="Allow images with VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT"/>
+                <type name="VkImageSwapchainCreateInfoKHR"/>
+                <type name="VkBindImageMemorySwapchainInfoKHR"/>
+                <type name="VkAcquireNextImageInfoKHR"/>
+                <type name="VkDeviceGroupPresentInfoKHR"/>
+                <type name="VkDeviceGroupSwapchainCreateInfoKHR"/>
+                <command name="vkAcquireNextImage2KHR"/>
+            </require>
+        </extension>
+        <extension name="VK_EXT_validation_flags" number="62" type="instance" author="GOOGLE" contact="Tobin Ehlis @tobine" supported="vulkan" deprecatedby="VK_EXT_validation_features">
+            <require>
+                <enum value="2"                                                 name="VK_EXT_VALIDATION_FLAGS_SPEC_VERSION"/>
+                <enum value="&quot;VK_EXT_validation_flags&quot;"               name="VK_EXT_VALIDATION_FLAGS_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkStructureType"                      name="VK_STRUCTURE_TYPE_VALIDATION_FLAGS_EXT"/>
+                <type name="VkValidationFlagsEXT"/>
+            </require>
+        </extension>
+        <extension name="VK_NN_vi_surface" number="63" type="instance" author="NN" contact="Mathias Heyer gitlab:@mheyer" requires="VK_KHR_surface" platform="vi" supported="vulkan">
+            <require>
+                <enum value="1"                                                 name="VK_NN_VI_SURFACE_SPEC_VERSION"/>
+                <enum value="&quot;VK_NN_vi_surface&quot;"                      name="VK_NN_VI_SURFACE_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkStructureType"                      name="VK_STRUCTURE_TYPE_VI_SURFACE_CREATE_INFO_NN"/>
+                <type name="VkViSurfaceCreateFlagsNN"/>
+                <type name="VkViSurfaceCreateInfoNN"/>
+                <command name="vkCreateViSurfaceNN"/>
+            </require>
+        </extension>
+        <extension name="VK_KHR_shader_draw_parameters" number="64" type="device" author="KHR" contact="Daniel Koch @dgkoch" supported="vulkan" promotedto="VK_VERSION_1_1">
+            <require>
+                <enum value="1"                                                 name="VK_KHR_SHADER_DRAW_PARAMETERS_SPEC_VERSION"/>
+                <enum value="&quot;VK_KHR_shader_draw_parameters&quot;"         name="VK_KHR_SHADER_DRAW_PARAMETERS_EXTENSION_NAME"/>
+            </require>
+        </extension>
+        <extension name="VK_EXT_shader_subgroup_ballot" number="65" type="device" author="NV" contact="Daniel Koch @dgkoch" supported="vulkan">
+            <require>
+                <enum value="1"                                                 name="VK_EXT_SHADER_SUBGROUP_BALLOT_SPEC_VERSION"/>
+                <enum value="&quot;VK_EXT_shader_subgroup_ballot&quot;"         name="VK_EXT_SHADER_SUBGROUP_BALLOT_EXTENSION_NAME"/>
+            </require>
+        </extension>
+        <extension name="VK_EXT_shader_subgroup_vote" number="66" type="device" author="NV" contact="Daniel Koch @dgkoch" supported="vulkan">
+            <require>
+                <enum value="1"                                                 name="VK_EXT_SHADER_SUBGROUP_VOTE_SPEC_VERSION"/>
+                <enum value="&quot;VK_EXT_shader_subgroup_vote&quot;"           name="VK_EXT_SHADER_SUBGROUP_VOTE_EXTENSION_NAME"/>
+            </require>
+        </extension>
+        <extension name="VK_EXT_texture_compression_astc_hdr" number="67" type="device" author="ARM" contact="Jan-Harald Fredriksen @janharaldfredriksen-arm" requires="VK_KHR_get_physical_device_properties2" supported="vulkan">
+            <require>
+                <enum value="1"                                               name="VK_EXT_TEXTURE_COMPRESSION_ASTC_HDR_SPEC_VERSION"/>
+                <enum value="&quot;VK_EXT_texture_compression_astc_hdr&quot;" name="VK_EXT_TEXTURE_COMPRESSION_ASTC_HDR_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkStructureType"                    name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXTURE_COMPRESSION_ASTC_HDR_FEATURES_EXT"/>
+                <type name="VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT"/>
+                <enum extends="VkFormat" extnumber="67" offset="0" name="VK_FORMAT_ASTC_4x4_SFLOAT_BLOCK_EXT"/>
+                <enum extends="VkFormat" extnumber="67" offset="1" name="VK_FORMAT_ASTC_5x4_SFLOAT_BLOCK_EXT"/>
+                <enum extends="VkFormat" extnumber="67" offset="2" name="VK_FORMAT_ASTC_5x5_SFLOAT_BLOCK_EXT"/>
+                <enum extends="VkFormat" extnumber="67" offset="3" name="VK_FORMAT_ASTC_6x5_SFLOAT_BLOCK_EXT"/>
+                <enum extends="VkFormat" extnumber="67" offset="4" name="VK_FORMAT_ASTC_6x6_SFLOAT_BLOCK_EXT"/>
+                <enum extends="VkFormat" extnumber="67" offset="5" name="VK_FORMAT_ASTC_8x5_SFLOAT_BLOCK_EXT"/>
+                <enum extends="VkFormat" extnumber="67" offset="6" name="VK_FORMAT_ASTC_8x6_SFLOAT_BLOCK_EXT"/>
+                <enum extends="VkFormat" extnumber="67" offset="7" name="VK_FORMAT_ASTC_8x8_SFLOAT_BLOCK_EXT"/>
+                <enum extends="VkFormat" extnumber="67" offset="8" name="VK_FORMAT_ASTC_10x5_SFLOAT_BLOCK_EXT"/>
+                <enum extends="VkFormat" extnumber="67" offset="9" name="VK_FORMAT_ASTC_10x6_SFLOAT_BLOCK_EXT"/>
+                <enum extends="VkFormat" extnumber="67" offset="10" name="VK_FORMAT_ASTC_10x8_SFLOAT_BLOCK_EXT"/>
+                <enum extends="VkFormat" extnumber="67" offset="11" name="VK_FORMAT_ASTC_10x10_SFLOAT_BLOCK_EXT"/>
+                <enum extends="VkFormat" extnumber="67" offset="12" name="VK_FORMAT_ASTC_12x10_SFLOAT_BLOCK_EXT"/>
+                <enum extends="VkFormat" extnumber="67" offset="13" name="VK_FORMAT_ASTC_12x12_SFLOAT_BLOCK_EXT"/>
+            </require>
+        </extension>
+        <extension name="VK_EXT_astc_decode_mode" number="68" type="device" author="ARM" contact="Jan-Harald Fredriksen @janharaldfredriksen-arm" requires="VK_KHR_get_physical_device_properties2" supported="vulkan">
+            <require>
+                <enum value="1"                                         name="VK_EXT_ASTC_DECODE_MODE_SPEC_VERSION"/>
+                <enum value="&quot;VK_EXT_astc_decode_mode&quot;"       name="VK_EXT_ASTC_DECODE_MODE_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkStructureType"              name="VK_STRUCTURE_TYPE_IMAGE_VIEW_ASTC_DECODE_MODE_EXT"/>
+                <enum offset="1" extends="VkStructureType"              name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ASTC_DECODE_FEATURES_EXT"/>
+                <type name="VkImageViewASTCDecodeModeEXT"/>
+                <type name="VkPhysicalDeviceASTCDecodeFeaturesEXT"/>
+            </require>
+        </extension>
+        <extension name="VK_IMG_extension_69" number="69" type="device" author="IMG" contact="Tobias Hector @tobski" supported="disabled">
+            <require>
+                <enum value="0"                                                 name="VK_IMG_EXTENSION_69_SPEC_VERSION"/>
+                <enum value="&quot;VK_IMG_extension_69&quot;"                   name="VK_IMG_EXTENSION_69_EXTENSION_NAME"/>
+            </require>
+        </extension>
+        <extension name="VK_KHR_maintenance1" number="70" type="device" author="KHR" contact="Piers Daniell @pdaniell-nv" supported="vulkan" promotedto="VK_VERSION_1_1">
+            <require>
+                <enum value="2"                                                 name="VK_KHR_MAINTENANCE1_SPEC_VERSION"/>
+                <enum value="&quot;VK_KHR_maintenance1&quot;"                   name="VK_KHR_MAINTENANCE1_EXTENSION_NAME"/>
+                <enum extends="VkResult"                                        name="VK_ERROR_OUT_OF_POOL_MEMORY_KHR" alias="VK_ERROR_OUT_OF_POOL_MEMORY"/>
+                <enum extends="VkFormatFeatureFlagBits"                         name="VK_FORMAT_FEATURE_TRANSFER_SRC_BIT_KHR" alias="VK_FORMAT_FEATURE_TRANSFER_SRC_BIT"/>
+                <enum extends="VkFormatFeatureFlagBits"                         name="VK_FORMAT_FEATURE_TRANSFER_DST_BIT_KHR" alias="VK_FORMAT_FEATURE_TRANSFER_DST_BIT"/>
+                <enum extends="VkImageCreateFlagBits"                           name="VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT_KHR" alias="VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT"/>
+                <type name="VkCommandPoolTrimFlagsKHR"/>
+                <command name="vkTrimCommandPoolKHR"/>
+            </require>
+        </extension>
+        <extension name="VK_KHR_device_group_creation" number="71" type="instance" author="KHR" contact="Jeff Bolz @jeffbolznv" supported="vulkan" promotedto="VK_VERSION_1_1">
+            <require>
+                <enum value="1"                                                 name="VK_KHR_DEVICE_GROUP_CREATION_SPEC_VERSION"/>
+                <enum value="&quot;VK_KHR_device_group_creation&quot;"          name="VK_KHR_DEVICE_GROUP_CREATION_EXTENSION_NAME"/>
+                <enum extends="VkStructureType"                                 name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES_KHR" alias="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES"/>
+                <enum extends="VkStructureType"                                 name="VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO_KHR" alias="VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO"/>
+                <enum name="VK_MAX_DEVICE_GROUP_SIZE_KHR"/>
+                <type name="VkPhysicalDeviceGroupPropertiesKHR"/>
+                <type name="VkDeviceGroupDeviceCreateInfoKHR"/>
+                <command name="vkEnumeratePhysicalDeviceGroupsKHR"/>
+                <enum extends="VkMemoryHeapFlagBits"                            name="VK_MEMORY_HEAP_MULTI_INSTANCE_BIT_KHR" alias="VK_MEMORY_HEAP_MULTI_INSTANCE_BIT"/>
+            </require>
+        </extension>
+        <extension name="VK_KHR_external_memory_capabilities" number="72" type="instance" author="KHR" requires="VK_KHR_get_physical_device_properties2" contact="James Jones @cubanismo" supported="vulkan" promotedto="VK_VERSION_1_1">
+            <require>
+                <enum value="1"                                                 name="VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_SPEC_VERSION"/>
+                <enum value="&quot;VK_KHR_external_memory_capabilities&quot;"   name="VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME"/>
+                <enum extends="VkStructureType"                                 name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO_KHR" alias="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO"/>
+                <enum extends="VkStructureType"                                 name="VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES_KHR" alias="VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES"/>
+                <enum extends="VkStructureType"                                 name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO_KHR" alias="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO"/>
+                <enum extends="VkStructureType"                                 name="VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES_KHR" alias="VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES"/>
+                <enum extends="VkStructureType"                                 name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES_KHR" alias="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES"/>
+                <enum name="VK_LUID_SIZE_KHR"/>
+                <type name="VkExternalMemoryHandleTypeFlagsKHR"/>
+                <type name="VkExternalMemoryHandleTypeFlagBitsKHR"/>
+                <enum extends="VkExternalMemoryHandleTypeFlagBits"              name="VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT_KHR" alias="VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT"/>
+                <enum extends="VkExternalMemoryHandleTypeFlagBits"              name="VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT_KHR" alias="VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT"/>
+                <enum extends="VkExternalMemoryHandleTypeFlagBits"              name="VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_KHR" alias="VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT"/>
+                <enum extends="VkExternalMemoryHandleTypeFlagBits"              name="VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT_KHR" alias="VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT"/>
+                <enum extends="VkExternalMemoryHandleTypeFlagBits"              name="VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_KMT_BIT_KHR" alias="VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_KMT_BIT"/>
+                <enum extends="VkExternalMemoryHandleTypeFlagBits"              name="VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP_BIT_KHR" alias="VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP_BIT"/>
+                <enum extends="VkExternalMemoryHandleTypeFlagBits"              name="VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT_KHR" alias="VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT"/>
+                <type name="VkExternalMemoryFeatureFlagsKHR"/>
+                <type name="VkExternalMemoryFeatureFlagBitsKHR"/>
+                <enum extends="VkExternalMemoryFeatureFlagBits"                 name="VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT_KHR" alias="VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT"/>
+                <enum extends="VkExternalMemoryFeatureFlagBits"                 name="VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT_KHR" alias="VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT"/>
+                <enum extends="VkExternalMemoryFeatureFlagBits"                 name="VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT_KHR" alias="VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT"/>
+                <type name="VkExternalMemoryPropertiesKHR"/>
+                <type name="VkPhysicalDeviceExternalImageFormatInfoKHR"/>
+                <type name="VkExternalImageFormatPropertiesKHR"/>
+                <type name="VkPhysicalDeviceExternalBufferInfoKHR"/>
+                <type name="VkExternalBufferPropertiesKHR"/>
+                <type name="VkPhysicalDeviceIDPropertiesKHR"/>
+                <command name="vkGetPhysicalDeviceExternalBufferPropertiesKHR"/>
+            </require>
+        </extension>
+        <extension name="VK_KHR_external_memory" number="73" type="device" requires="VK_KHR_external_memory_capabilities" author="KHR" contact="James Jones @cubanismo" supported="vulkan" promotedto="VK_VERSION_1_1">
+            <require>
+                <enum value="1"                                                 name="VK_KHR_EXTERNAL_MEMORY_SPEC_VERSION"/>
+                <enum value="&quot;VK_KHR_external_memory&quot;"                name="VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME"/>
+                <enum extends="VkStructureType"                                 name="VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO_KHR" alias="VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO"/>
+                <enum extends="VkStructureType"                                 name="VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_KHR" alias="VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO"/>
+                <enum extends="VkStructureType"                                 name="VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_KHR" alias="VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO"/>
+                <enum extends="VkResult"                                        name="VK_ERROR_INVALID_EXTERNAL_HANDLE_KHR" alias="VK_ERROR_INVALID_EXTERNAL_HANDLE"/>
+                <enum name="VK_QUEUE_FAMILY_EXTERNAL_KHR"/>
+                <type name="VkExternalMemoryImageCreateInfoKHR"/>
+                <type name="VkExternalMemoryBufferCreateInfoKHR"/>
+                <type name="VkExportMemoryAllocateInfoKHR"/>
+            </require>
+        </extension>
+        <extension name="VK_KHR_external_memory_win32" number="74" type="device" requires="VK_KHR_external_memory" author="KHR" contact="James Jones @cubanismo" platform="win32" supported="vulkan">
+            <require>
+                <enum value="1"                                                 name="VK_KHR_EXTERNAL_MEMORY_WIN32_SPEC_VERSION"/>
+                <enum value="&quot;VK_KHR_external_memory_win32&quot;"          name="VK_KHR_EXTERNAL_MEMORY_WIN32_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkStructureType"                      name="VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_KHR"/>
+                <enum offset="1" extends="VkStructureType"                      name="VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_KHR"/>
+                <enum offset="2" extends="VkStructureType"                      name="VK_STRUCTURE_TYPE_MEMORY_WIN32_HANDLE_PROPERTIES_KHR"/>
+                <enum offset="3" extends="VkStructureType"                      name="VK_STRUCTURE_TYPE_MEMORY_GET_WIN32_HANDLE_INFO_KHR"/>
+                <type name="VkImportMemoryWin32HandleInfoKHR"/>
+                <type name="VkExportMemoryWin32HandleInfoKHR"/>
+                <type name="VkMemoryWin32HandlePropertiesKHR"/>
+                <type name="VkMemoryGetWin32HandleInfoKHR"/>
+                <command name="vkGetMemoryWin32HandleKHR"/>
+                <command name="vkGetMemoryWin32HandlePropertiesKHR"/>
+            </require>
+        </extension>
+        <extension name="VK_KHR_external_memory_fd" number="75" type="device" requires="VK_KHR_external_memory" author="KHR" contact="James Jones @cubanismo" supported="vulkan">
+            <require>
+                <enum value="1"                                                 name="VK_KHR_EXTERNAL_MEMORY_FD_SPEC_VERSION"/>
+                <enum value="&quot;VK_KHR_external_memory_fd&quot;"             name="VK_KHR_EXTERNAL_MEMORY_FD_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkStructureType"                      name="VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR"/>
+                <enum offset="1" extends="VkStructureType"                      name="VK_STRUCTURE_TYPE_MEMORY_FD_PROPERTIES_KHR"/>
+                <enum offset="2" extends="VkStructureType"                      name="VK_STRUCTURE_TYPE_MEMORY_GET_FD_INFO_KHR"/>
+                <type name="VkImportMemoryFdInfoKHR"/>
+                <type name="VkMemoryFdPropertiesKHR"/>
+                <type name="VkMemoryGetFdInfoKHR"/>
+                <command name="vkGetMemoryFdKHR"/>
+                <command name="vkGetMemoryFdPropertiesKHR"/>
+            </require>
+        </extension>
+        <extension name="VK_KHR_win32_keyed_mutex" number="76" type="device" requires="VK_KHR_external_memory_win32" author="KHR" contact="Carsten Rohde @crohde" platform="win32" supported="vulkan">
+            <require>
+                <enum value="1"                                                 name="VK_KHR_WIN32_KEYED_MUTEX_SPEC_VERSION"/>
+                <enum value="&quot;VK_KHR_win32_keyed_mutex&quot;"              name="VK_KHR_WIN32_KEYED_MUTEX_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkStructureType"                      name="VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_KHR"/>
+                <type name="VkWin32KeyedMutexAcquireReleaseInfoKHR"/>
+            </require>
+        </extension>
+        <extension name="VK_KHR_external_semaphore_capabilities" number="77" type="instance" author="KHR" requires="VK_KHR_get_physical_device_properties2" contact="James Jones @cubanismo" supported="vulkan" promotedto="VK_VERSION_1_1">
+            <require>
+                <enum value="1"                                                 name="VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_SPEC_VERSION"/>
+                <enum value="&quot;VK_KHR_external_semaphore_capabilities&quot;" name="VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_EXTENSION_NAME"/>
+                <enum extends="VkStructureType"                                 name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO_KHR" alias="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO"/>
+                <enum extends="VkStructureType"                                 name="VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES_KHR" alias="VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES"/>
+                <enum name="VK_LUID_SIZE_KHR"/>
+                <type name="VkExternalSemaphoreHandleTypeFlagsKHR"/>
+                <type name="VkExternalSemaphoreHandleTypeFlagBitsKHR"/>
+                <enum extends="VkExternalSemaphoreHandleTypeFlagBits"       name="VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT_KHR" alias="VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT"/>
+                <enum extends="VkExternalSemaphoreHandleTypeFlagBits"       name="VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT_KHR" alias="VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT"/>
+                <enum extends="VkExternalSemaphoreHandleTypeFlagBits"       name="VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_KHR" alias="VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT"/>
+                <enum extends="VkExternalSemaphoreHandleTypeFlagBits"       name="VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT_KHR" alias="VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT"/>
+                <enum extends="VkExternalSemaphoreHandleTypeFlagBits"       name="VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT_KHR" alias="VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT"/>
+                <type name="VkExternalSemaphoreFeatureFlagsKHR"/>
+                <type name="VkExternalSemaphoreFeatureFlagBitsKHR"/>
+                <enum extends="VkExternalSemaphoreFeatureFlagBits"          name="VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT_KHR" alias="VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT"/>
+                <enum extends="VkExternalSemaphoreFeatureFlagBits"          name="VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT_KHR" alias="VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT"/>
+                <type name="VkPhysicalDeviceExternalSemaphoreInfoKHR"/>
+                <type name="VkExternalSemaphorePropertiesKHR"/>
+                <type name="VkPhysicalDeviceIDPropertiesKHR"/>
+                <command name="vkGetPhysicalDeviceExternalSemaphorePropertiesKHR"/>
+            </require>
+        </extension>
+        <extension name="VK_KHR_external_semaphore" number="78" type="device" requires="VK_KHR_external_semaphore_capabilities" author="KHR" contact="James Jones @cubanismo" supported="vulkan" promotedto="VK_VERSION_1_1">
+            <require>
+                <enum value="1"                                             name="VK_KHR_EXTERNAL_SEMAPHORE_SPEC_VERSION"/>
+                <enum value="&quot;VK_KHR_external_semaphore&quot;"         name="VK_KHR_EXTERNAL_SEMAPHORE_EXTENSION_NAME"/>
+                <enum extends="VkStructureType"                             name="VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO_KHR" alias="VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO"/>
+                <type name="VkSemaphoreImportFlagsKHR"/>
+                <type name="VkSemaphoreImportFlagBitsKHR"/>
+                <enum extends="VkSemaphoreImportFlagBits"                   name="VK_SEMAPHORE_IMPORT_TEMPORARY_BIT_KHR" alias="VK_SEMAPHORE_IMPORT_TEMPORARY_BIT"/>
+                <type name="VkExportSemaphoreCreateInfoKHR"/>
+            </require>
+        </extension>
+        <extension name="VK_KHR_external_semaphore_win32" number="79" type="device" requires="VK_KHR_external_semaphore" author="KHR" contact="James Jones @cubanismo" platform="win32" supported="vulkan">
+            <require>
+                <enum value="1"                                             name="VK_KHR_EXTERNAL_SEMAPHORE_WIN32_SPEC_VERSION"/>
+                <enum value="&quot;VK_KHR_external_semaphore_win32&quot;"   name="VK_KHR_EXTERNAL_SEMAPHORE_WIN32_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR"/>
+                <enum offset="1" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR"/>
+                <enum offset="2" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_D3D12_FENCE_SUBMIT_INFO_KHR"/>
+                <enum offset="3" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_SEMAPHORE_GET_WIN32_HANDLE_INFO_KHR"/>
+                <type name="VkImportSemaphoreWin32HandleInfoKHR"/>
+                <type name="VkExportSemaphoreWin32HandleInfoKHR"/>
+                <type name="VkD3D12FenceSubmitInfoKHR"/>
+                <type name="VkSemaphoreGetWin32HandleInfoKHR"/>
+                <command name="vkImportSemaphoreWin32HandleKHR"/>
+                <command name="vkGetSemaphoreWin32HandleKHR"/>
+            </require>
+        </extension>
+        <extension name="VK_KHR_external_semaphore_fd" number="80" type="device" requires="VK_KHR_external_semaphore" author="KHR" contact="James Jones @cubanismo" supported="vulkan">
+            <require>
+                <enum value="1"                                             name="VK_KHR_EXTERNAL_SEMAPHORE_FD_SPEC_VERSION"/>
+                <enum value="&quot;VK_KHR_external_semaphore_fd&quot;"      name="VK_KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_FD_INFO_KHR"/>
+                <enum offset="1" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_SEMAPHORE_GET_FD_INFO_KHR"/>
+                <type name="VkImportSemaphoreFdInfoKHR"/>
+                <type name="VkSemaphoreGetFdInfoKHR"/>
+                <command name="vkImportSemaphoreFdKHR"/>
+                <command name="vkGetSemaphoreFdKHR"/>
+            </require>
+        </extension>
+        <extension name="VK_KHR_push_descriptor" number="81" type="device" author="KHR" requires="VK_KHR_get_physical_device_properties2" contact="Jeff Bolz @jeffbolznv" supported="vulkan">
+            <require>
+                <enum value="2"                                             name="VK_KHR_PUSH_DESCRIPTOR_SPEC_VERSION"/>
+                <enum value="&quot;VK_KHR_push_descriptor&quot;"            name="VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR"/>
+                <enum bitpos="0" extends="VkDescriptorSetLayoutCreateFlagBits"   name="VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR"  comment="Descriptors are pushed via flink:vkCmdPushDescriptorSetKHR"/>
+                <command name="vkCmdPushDescriptorSetKHR"/>
+                <type name="VkPhysicalDevicePushDescriptorPropertiesKHR"/>
+            </require>
+            <require feature="VK_VERSION_1_1">
+                <command name="vkCmdPushDescriptorSetWithTemplateKHR"/>
+                <enum value="1" extends="VkDescriptorUpdateTemplateType"    name="VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR" comment="Create descriptor update template for pushed descriptor updates"/>
+            </require>
+        </extension>
+        <extension name="VK_EXT_conditional_rendering" number="82" type="device" author="NV" contact="Vikram Kushwaha @vkushwaha" supported="vulkan">
+            <require>
+                <enum value="2"                                             name="VK_EXT_CONDITIONAL_RENDERING_SPEC_VERSION"/>
+                <enum value="&quot;VK_EXT_conditional_rendering&quot;"      name="VK_EXT_CONDITIONAL_RENDERING_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_CONDITIONAL_RENDERING_INFO_EXT"/>
+                <enum offset="1" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONDITIONAL_RENDERING_FEATURES_EXT"/>
+                <enum offset="2" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_CONDITIONAL_RENDERING_BEGIN_INFO_EXT"/>
+                <type name="VkConditionalRenderingFlagsEXT"/>
+                <type name="VkConditionalRenderingFlagBitsEXT"/>
+                <enum bitpos="20" extends="VkAccessFlagBits"                name="VK_ACCESS_CONDITIONAL_RENDERING_READ_BIT_EXT"    comment="read access flag for reading conditional rendering predicate"/>
+                <enum bitpos="9"  extends="VkBufferUsageFlagBits"           name="VK_BUFFER_USAGE_CONDITIONAL_RENDERING_BIT_EXT"   comment="Specifies the buffer can be used as predicate in conditional rendering"/>
+                <enum bitpos="18" extends="VkPipelineStageFlagBits"         name="VK_PIPELINE_STAGE_CONDITIONAL_RENDERING_BIT_EXT" comment="A pipeline stage for conditional rendering predicate fetch"/>
+                <command name="vkCmdBeginConditionalRenderingEXT"/>
+                <command name="vkCmdEndConditionalRenderingEXT"/>
+                <type name="VkConditionalRenderingBeginInfoEXT"/>
+                <type name="VkPhysicalDeviceConditionalRenderingFeaturesEXT"/>
+                <type name="VkCommandBufferInheritanceConditionalRenderingInfoEXT"/>
+            </require>
+        </extension>
+        <extension name="VK_KHR_shader_float16_int8" number="83" type="device" requires="VK_KHR_get_physical_device_properties2" author="KHR" contact="Alexander Galazin @alegal-arm" supported="vulkan">
+            <require>
+                <enum value="1"                                           name="VK_KHR_SHADER_FLOAT16_INT8_SPEC_VERSION"/>
+                <enum value="&quot;VK_KHR_shader_float16_int8&quot;"      name="VK_KHR_SHADER_FLOAT16_INT8_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkStructureType"                name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES_KHR"/>
+                <enum            extends="VkStructureType"                name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT16_INT8_FEATURES_KHR" alias="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES_KHR"/>
+                <type name="VkPhysicalDeviceShaderFloat16Int8FeaturesKHR"/>
+                <type name="VkPhysicalDeviceFloat16Int8FeaturesKHR"/>
+            </require>
+        </extension>
+        <extension name="VK_KHR_16bit_storage" number="84" type="device" requires="VK_KHR_get_physical_device_properties2,VK_KHR_storage_buffer_storage_class" author="KHR" contact="Jan-Harald Fredriksen @janharaldfredriksen-arm" supported="vulkan" promotedto="VK_VERSION_1_1">
+            <require>
+                <enum value="1"                                             name="VK_KHR_16BIT_STORAGE_SPEC_VERSION"/>
+                <enum value="&quot;VK_KHR_16bit_storage&quot;"              name="VK_KHR_16BIT_STORAGE_EXTENSION_NAME"/>
+                <enum extends="VkStructureType"                             name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES_KHR" alias="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES"/>
+                <type name="VkPhysicalDevice16BitStorageFeaturesKHR"/>
+            </require>
+        </extension>
+        <extension name="VK_KHR_incremental_present" number="85" type="device" author="KHR" requires="VK_KHR_swapchain" contact="Ian Elliott @ianelliottus" supported="vulkan">
+            <require>
+                <enum value="1"                                             name="VK_KHR_INCREMENTAL_PRESENT_SPEC_VERSION"/>
+                <enum value="&quot;VK_KHR_incremental_present&quot;"        name="VK_KHR_INCREMENTAL_PRESENT_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_PRESENT_REGIONS_KHR"/>
+                <type name="VkPresentRegionsKHR"/>
+                <type name="VkPresentRegionKHR"/>
+                <type name="VkRectLayerKHR"/>
+            </require>
+        </extension>
+        <extension name="VK_KHR_descriptor_update_template" number="86" type="device" author="KHR" contact="Markus Tavenrath @mtavenrath" supported="vulkan" promotedto="VK_VERSION_1_1">
+            <require>
+                <enum value="1"                                             name="VK_KHR_DESCRIPTOR_UPDATE_TEMPLATE_SPEC_VERSION"/>
+                <enum value="&quot;VK_KHR_descriptor_update_template&quot;" name="VK_KHR_DESCRIPTOR_UPDATE_TEMPLATE_EXTENSION_NAME"/>
+                <enum extends="VkStructureType"                             name="VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO_KHR" alias="VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO"/>
+                <enum extends="VkObjectType"                                name="VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_KHR" alias="VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE"/>
+                <command name="vkCreateDescriptorUpdateTemplateKHR"/>
+                <command name="vkDestroyDescriptorUpdateTemplateKHR"/>
+                <command name="vkUpdateDescriptorSetWithTemplateKHR"/>
+                <type name="VkDescriptorUpdateTemplateKHR"/>
+                <type name="VkDescriptorUpdateTemplateCreateFlagsKHR"/>
+                <type name="VkDescriptorUpdateTemplateTypeKHR"/>
+                <type name="VkDescriptorUpdateTemplateEntryKHR"/>
+                <type name="VkDescriptorUpdateTemplateCreateInfoKHR"/>
+                <enum extends="VkDescriptorUpdateTemplateType"              name="VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET_KHR" alias="VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET"/>
+            </require>
+            <require extension="VK_KHR_push_descriptor">
+                <command name="vkCmdPushDescriptorSetWithTemplateKHR"/>
+                <enum value="1" extends="VkDescriptorUpdateTemplateType"    name="VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR" comment="Create descriptor update template for pushed descriptor updates"/>
+            </require>
+            <require extension="VK_EXT_debug_report">
+                <enum extends="VkDebugReportObjectTypeEXT"                  name="VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_KHR_EXT" alias="VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_EXT"/>
+            </require>
+        </extension>
+        <extension name="VK_NVX_device_generated_commands" number="87" type="device" author="NVX" contact="Christoph Kubisch @pixeljetstream" supported="vulkan">
+            <require>
+                <enum value="3"                                             name="VK_NVX_DEVICE_GENERATED_COMMANDS_SPEC_VERSION"/>
+                <enum value="&quot;VK_NVX_device_generated_commands&quot;"  name="VK_NVX_DEVICE_GENERATED_COMMANDS_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_OBJECT_TABLE_CREATE_INFO_NVX"/>
+                <enum offset="1" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_NVX"/>
+                <enum offset="2" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_CMD_PROCESS_COMMANDS_INFO_NVX"/>
+                <enum offset="3" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_CMD_RESERVE_SPACE_FOR_COMMANDS_INFO_NVX"/>
+                <enum offset="4" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_LIMITS_NVX"/>
+                <enum offset="5" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_FEATURES_NVX"/>
+                <enum bitpos="17" extends="VkPipelineStageFlagBits"         name="VK_PIPELINE_STAGE_COMMAND_PROCESS_BIT_NVX"/>
+                <enum bitpos="17" extends="VkAccessFlagBits"                name="VK_ACCESS_COMMAND_PROCESS_READ_BIT_NVX"/>
+                <enum bitpos="18" extends="VkAccessFlagBits"                name="VK_ACCESS_COMMAND_PROCESS_WRITE_BIT_NVX"/>
+                <enum offset="0" extends="VkObjectType"                     name="VK_OBJECT_TYPE_OBJECT_TABLE_NVX"                     comment="VkobjectTableNVX"/>
+                <enum offset="1" extends="VkObjectType"                     name="VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX"         comment="VkIndirectCommandsLayoutNVX"/>
+                <type name="VkObjectTableNVX"/>
+                <type name="VkIndirectCommandsLayoutNVX"/>
+                <type name="VkIndirectCommandsLayoutUsageFlagsNVX"/>
+                <type name="VkObjectEntryUsageFlagsNVX"/>
+                <type name="VkIndirectCommandsLayoutUsageFlagBitsNVX"/>
+                <type name="VkIndirectCommandsTokenTypeNVX"/>
+                <type name="VkObjectEntryUsageFlagBitsNVX"/>
+                <type name="VkObjectEntryTypeNVX"/>
+                <type name="VkDeviceGeneratedCommandsFeaturesNVX"/>
+                <type name="VkDeviceGeneratedCommandsLimitsNVX"/>
+                <type name="VkIndirectCommandsTokenNVX"/>
+                <type name="VkIndirectCommandsLayoutTokenNVX"/>
+                <type name="VkIndirectCommandsLayoutCreateInfoNVX"/>
+                <type name="VkCmdProcessCommandsInfoNVX"/>
+                <type name="VkCmdReserveSpaceForCommandsInfoNVX"/>
+                <type name="VkObjectTableCreateInfoNVX"/>
+                <type name="VkObjectTableEntryNVX"/>
+                <type name="VkObjectTablePipelineEntryNVX"/>
+                <type name="VkObjectTableDescriptorSetEntryNVX"/>
+                <type name="VkObjectTableVertexBufferEntryNVX"/>
+                <type name="VkObjectTableIndexBufferEntryNVX"/>
+                <type name="VkObjectTablePushConstantEntryNVX"/>
+                <command name="vkCmdProcessCommandsNVX"/>
+                <command name="vkCmdReserveSpaceForCommandsNVX"/>
+                <command name="vkCreateIndirectCommandsLayoutNVX"/>
+                <command name="vkDestroyIndirectCommandsLayoutNVX"/>
+                <command name="vkCreateObjectTableNVX"/>
+                <command name="vkDestroyObjectTableNVX"/>
+                <command name="vkRegisterObjectsNVX"/>
+                <command name="vkUnregisterObjectsNVX"/>
+                <command name="vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX"/>
+            </require>
+        </extension>
+        <extension name="VK_NV_clip_space_w_scaling" number="88" type="device" author="NV" contact="Eric Werness @ewerness-nv" supported="vulkan">
+            <require>
+                <enum value="1"                                             name="VK_NV_CLIP_SPACE_W_SCALING_SPEC_VERSION"/>
+                <enum value="&quot;VK_NV_clip_space_w_scaling&quot;"        name="VK_NV_CLIP_SPACE_W_SCALING_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_W_SCALING_STATE_CREATE_INFO_NV"/>
+                <enum offset="0" extends="VkDynamicState"                   name="VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV"/>
+                <type name="VkViewportWScalingNV"/>
+                <type name="VkPipelineViewportWScalingStateCreateInfoNV"/>
+                <command name="vkCmdSetViewportWScalingNV"/>
+            </require>
+        </extension>
+        <extension name="VK_EXT_direct_mode_display" number="89" type="instance" requires="VK_KHR_display" author="NV" contact="James Jones @cubanismo" supported="vulkan">
+            <require>
+                <enum value="1"                                             name="VK_EXT_DIRECT_MODE_DISPLAY_SPEC_VERSION"/>
+                <enum value="&quot;VK_EXT_direct_mode_display&quot;"        name="VK_EXT_DIRECT_MODE_DISPLAY_EXTENSION_NAME"/>
+                <command name="vkReleaseDisplayEXT"/>
+            </require>
+        </extension>
+        <extension name="VK_EXT_acquire_xlib_display" number="90" type="instance" requires="VK_EXT_direct_mode_display" author="NV" contact="James Jones @cubanismo" platform="xlib_xrandr" supported="vulkan">
+            <require>
+                <enum value="1"                                             name="VK_EXT_ACQUIRE_XLIB_DISPLAY_SPEC_VERSION"/>
+                <enum value="&quot;VK_EXT_acquire_xlib_display&quot;"       name="VK_EXT_ACQUIRE_XLIB_DISPLAY_EXTENSION_NAME"/>
+                <command name="vkAcquireXlibDisplayEXT"/>
+                <command name="vkGetRandROutputDisplayEXT"/>
+            </require>
+        </extension>
+        <extension name="VK_EXT_display_surface_counter" number="91" type="instance" requires="VK_KHR_display" author="NV" contact="James Jones @cubanismo" supported="vulkan">
+            <require>
+                <enum value="1"                                             name="VK_EXT_DISPLAY_SURFACE_COUNTER_SPEC_VERSION"/>
+                <enum value="&quot;VK_EXT_display_surface_counter&quot;"    name="VK_EXT_DISPLAY_SURFACE_COUNTER_EXTENSION_NAME"/>
+                <enum offset="0"                                           extends="VkStructureType" name="VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_EXT"/>
+                <enum alias="VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_EXT" extends="VkStructureType" name="VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES2_EXT" comment="Backwards-compatible alias containing a typo"/>
+                <type name="VkSurfaceCounterFlagsEXT"/>
+                <type name="VkSurfaceCounterFlagBitsEXT"/>
+                <type name="VkSurfaceCapabilities2EXT"/>
+                <command name="vkGetPhysicalDeviceSurfaceCapabilities2EXT"/>
+            </require>
+        </extension>
+        <extension name="VK_EXT_display_control" number="92" type="device" requires="VK_EXT_display_surface_counter,VK_KHR_swapchain" author="NV" contact="James Jones @cubanismo" supported="vulkan">
+            <require>
+                <enum value="1"                                             name="VK_EXT_DISPLAY_CONTROL_SPEC_VERSION"/>
+                <enum value="&quot;VK_EXT_display_control&quot;"            name="VK_EXT_DISPLAY_CONTROL_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_DISPLAY_POWER_INFO_EXT"/>
+                <enum offset="1" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_DEVICE_EVENT_INFO_EXT"/>
+                <enum offset="2" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_DISPLAY_EVENT_INFO_EXT"/>
+                <enum offset="3" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_SWAPCHAIN_COUNTER_CREATE_INFO_EXT"/>
+                <type name="VkDisplayPowerStateEXT"/>
+                <type name="VkDeviceEventTypeEXT"/>
+                <type name="VkDisplayEventTypeEXT"/>
+                <type name="VkDisplayPowerInfoEXT"/>
+                <type name="VkDeviceEventInfoEXT"/>
+                <type name="VkDisplayEventInfoEXT"/>
+                <type name="VkSwapchainCounterCreateInfoEXT"/>
+                <command name="vkDisplayPowerControlEXT"/>
+                <command name="vkRegisterDeviceEventEXT"/>
+                <command name="vkRegisterDisplayEventEXT"/>
+                <command name="vkGetSwapchainCounterEXT"/>
+            </require>
+        </extension>
+        <extension name="VK_GOOGLE_display_timing" number="93" type="device" author="GOOGLE" requires="VK_KHR_swapchain" contact="Ian Elliott @ianelliottus" supported="vulkan">
+            <require>
+                <enum value="1"                                             name="VK_GOOGLE_DISPLAY_TIMING_SPEC_VERSION"/>
+                <enum value="&quot;VK_GOOGLE_display_timing&quot;"          name="VK_GOOGLE_DISPLAY_TIMING_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_PRESENT_TIMES_INFO_GOOGLE"/>
+                <type name="VkRefreshCycleDurationGOOGLE"/>
+                <type name="VkPastPresentationTimingGOOGLE"/>
+                <type name="VkPresentTimesInfoGOOGLE"/>
+                <type name="VkPresentTimeGOOGLE"/>
+                <command name="vkGetRefreshCycleDurationGOOGLE"/>
+                <command name="vkGetPastPresentationTimingGOOGLE"/>
+            </require>
+        </extension>
+        <extension name="RESERVED_DO_NOT_USE_94" number="94" supported="disabled" comment="Used for functionality subsumed into Vulkan 1.1 and not published as an extension">
+        </extension>
+        <extension name="VK_NV_sample_mask_override_coverage" number="95" type="device" author="NV" contact="Piers Daniell @pdaniell-nv" supported="vulkan">
+            <require>
+                <enum value="1"                                             name="VK_NV_SAMPLE_MASK_OVERRIDE_COVERAGE_SPEC_VERSION"/>
+                <enum value="&quot;VK_NV_sample_mask_override_coverage&quot;" name="VK_NV_SAMPLE_MASK_OVERRIDE_COVERAGE_EXTENSION_NAME"/>
+                <comment>
+                    enum offset=0 was mistakenly used for the 1.1 core enum
+                    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_PROPERTIES
+                    (value=1000094000). Fortunately, no conflict resulted.
+                </comment>
+            </require>
+        </extension>
+        <extension name="VK_NV_geometry_shader_passthrough" number="96" type="device" author="NV" contact="Daniel Koch @dgkoch" supported="vulkan">
+            <require>
+                <enum value="1"                                             name="VK_NV_GEOMETRY_SHADER_PASSTHROUGH_SPEC_VERSION"/>
+                <enum value="&quot;VK_NV_geometry_shader_passthrough&quot;" name="VK_NV_GEOMETRY_SHADER_PASSTHROUGH_EXTENSION_NAME"/>
+            </require>
+        </extension>
+        <extension name="VK_NV_viewport_array2" number="97" type="device" author="NV" contact="Daniel Koch @dgkoch" supported="vulkan">
+            <require>
+                <enum value="1"                                             name="VK_NV_VIEWPORT_ARRAY2_SPEC_VERSION"/>
+                <enum value="&quot;VK_NV_viewport_array2&quot;"             name="VK_NV_VIEWPORT_ARRAY2_EXTENSION_NAME"/>
+            </require>
+        </extension>
+        <extension name="VK_NVX_multiview_per_view_attributes" number="98" type="device" requires="VK_KHR_multiview" author="NVX" contact="Jeff Bolz @jeffbolznv" supported="vulkan">
+            <require>
+                <enum value="1"                                             name="VK_NVX_MULTIVIEW_PER_VIEW_ATTRIBUTES_SPEC_VERSION"/>
+                <enum value="&quot;VK_NVX_multiview_per_view_attributes&quot;" name="VK_NVX_MULTIVIEW_PER_VIEW_ATTRIBUTES_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_ATTRIBUTES_PROPERTIES_NVX"/>
+                <enum bitpos="0" extends="VkSubpassDescriptionFlagBits"     name="VK_SUBPASS_DESCRIPTION_PER_VIEW_ATTRIBUTES_BIT_NVX"/>
+                <enum bitpos="1" extends="VkSubpassDescriptionFlagBits"     name="VK_SUBPASS_DESCRIPTION_PER_VIEW_POSITION_X_ONLY_BIT_NVX"/>
+                <type name="VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX"/>
+            </require>
+        </extension>
+        <extension name="VK_NV_viewport_swizzle" number="99" type="device" author="NV" contact="Piers Daniell @pdaniell-nv" supported="vulkan">
+            <require>
+                <enum value="1"                                             name="VK_NV_VIEWPORT_SWIZZLE_SPEC_VERSION"/>
+                <enum value="&quot;VK_NV_viewport_swizzle&quot;"            name="VK_NV_VIEWPORT_SWIZZLE_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SWIZZLE_STATE_CREATE_INFO_NV"/>
+                <type name="VkViewportSwizzleNV"/>
+                <type name="VkViewportCoordinateSwizzleNV"/>
+                <type name="VkPipelineViewportSwizzleStateCreateInfoNV"/>
+                <type name="VkPipelineViewportSwizzleStateCreateFlagsNV"/>
+            </require>
+        </extension>
+        <extension name="VK_EXT_discard_rectangles" number="100" type="device" requires="VK_KHR_get_physical_device_properties2" author="NV" contact="Piers Daniell @pdaniell-nv" supported="vulkan">
+            <require>
+                <enum value="1"                                             name="VK_EXT_DISCARD_RECTANGLES_SPEC_VERSION"/>
+                <enum value="&quot;VK_EXT_discard_rectangles&quot;"         name="VK_EXT_DISCARD_RECTANGLES_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DISCARD_RECTANGLE_PROPERTIES_EXT"/>
+                <enum offset="1" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_PIPELINE_DISCARD_RECTANGLE_STATE_CREATE_INFO_EXT"/>
+                <enum offset="0" extends="VkDynamicState"                   name="VK_DYNAMIC_STATE_DISCARD_RECTANGLE_EXT"/>
+                <type name="VkPhysicalDeviceDiscardRectanglePropertiesEXT"/>
+                <type name="VkPipelineDiscardRectangleStateCreateInfoEXT"/>
+                <type name="VkPipelineDiscardRectangleStateCreateFlagsEXT"/>
+                <type name="VkDiscardRectangleModeEXT"/>
+                <command name="vkCmdSetDiscardRectangleEXT"/>
+            </require>
+        </extension>
+        <extension name="VK_NV_extension_101" number="101" author="NV" contact="Daniel Koch @dgkoch" supported="disabled">
+            <require>
+                <enum value="0"                                             name="VK_NV_EXTENSION_101_SPEC_VERSION"/>
+                <enum value="&quot;VK_NV_extension_101&quot;"               name="VK_NV_EXTENSION_101_EXTENSION_NAME"/>
+            </require>
+        </extension>
+        <extension name="VK_EXT_conservative_rasterization" number="102" type="device" requires="VK_KHR_get_physical_device_properties2" author="NV" contact="Piers Daniell @pdaniell-nv" supported="vulkan">
+            <require>
+                <enum value="1"                                         name="VK_EXT_CONSERVATIVE_RASTERIZATION_SPEC_VERSION"/>
+                <enum value="&quot;VK_EXT_conservative_rasterization&quot;"    name="VK_EXT_CONSERVATIVE_RASTERIZATION_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkStructureType"              name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONSERVATIVE_RASTERIZATION_PROPERTIES_EXT"/>
+                <enum offset="1" extends="VkStructureType"              name="VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_CONSERVATIVE_STATE_CREATE_INFO_EXT"/>
+                <type name="VkPhysicalDeviceConservativeRasterizationPropertiesEXT"/>
+                <type name="VkPipelineRasterizationConservativeStateCreateInfoEXT"/>
+                <type name="VkPipelineRasterizationConservativeStateCreateFlagsEXT"/>
+                <type name="VkConservativeRasterizationModeEXT"/>
+            </require>
+        </extension>
+        <extension name="VK_EXT_depth_clip_enable" number="103" type="device" author="EXT" contact="Piers Daniell @pdaniell-nv" supported="vulkan">
+            <require>
+                <enum value="1"                                             name="VK_EXT_DEPTH_CLIP_ENABLE_SPEC_VERSION"/>
+                <enum value="&quot;VK_EXT_depth_clip_enable&quot;"          name="VK_EXT_DEPTH_CLIP_ENABLE_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLIP_ENABLE_FEATURES_EXT"/>
+                <enum offset="1" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_DEPTH_CLIP_STATE_CREATE_INFO_EXT"/>
+                <type name="VkPhysicalDeviceDepthClipEnableFeaturesEXT"/>
+                <type name="VkPipelineRasterizationDepthClipStateCreateInfoEXT"/>
+                <type name="VkPipelineRasterizationDepthClipStateCreateFlagsEXT"/>
+            </require>
+        </extension>
+        <extension name="VK_NV_extension_104" number="104" author="NV" contact="Mathias Schott gitlab:@mschott" supported="disabled">
+            <require>
+                <enum value="0"                                             name="VK_NV_EXTENSION_104_SPEC_VERSION"/>
+                <enum value="&quot;VK_NV_extension_104&quot;"               name="VK_NV_EXTENSION_104_EXTENSION_NAME"/>
+            </require>
+        </extension>
+        <extension name="VK_EXT_swapchain_colorspace" number="105" type="instance" author="GOOGLE" contact="Courtney Goeltzenleuchter @courtney-g" requires="VK_KHR_surface" supported="vulkan">
+            <require>
+                <enum value="4"                                             name="VK_EXT_SWAPCHAIN_COLOR_SPACE_SPEC_VERSION"/>
+                <enum value="&quot;VK_EXT_swapchain_colorspace&quot;"       name="VK_EXT_SWAPCHAIN_COLOR_SPACE_EXTENSION_NAME"/>
+                <enum offset="1" extends="VkColorSpaceKHR"                  name="VK_COLOR_SPACE_DISPLAY_P3_NONLINEAR_EXT"/>
+                <enum offset="2" extends="VkColorSpaceKHR"                  name="VK_COLOR_SPACE_EXTENDED_SRGB_LINEAR_EXT"/>
+                <enum offset="3" extends="VkColorSpaceKHR"                  name="VK_COLOR_SPACE_DISPLAY_P3_LINEAR_EXT"/>
+                <enum offset="4" extends="VkColorSpaceKHR"                  name="VK_COLOR_SPACE_DCI_P3_NONLINEAR_EXT"/>
+                <enum offset="5" extends="VkColorSpaceKHR"                  name="VK_COLOR_SPACE_BT709_LINEAR_EXT"/>
+                <enum offset="6" extends="VkColorSpaceKHR"                  name="VK_COLOR_SPACE_BT709_NONLINEAR_EXT"/>
+                <enum offset="7" extends="VkColorSpaceKHR"                  name="VK_COLOR_SPACE_BT2020_LINEAR_EXT"/>
+                <enum offset="8" extends="VkColorSpaceKHR"                  name="VK_COLOR_SPACE_HDR10_ST2084_EXT"/>
+                <enum offset="9" extends="VkColorSpaceKHR"                  name="VK_COLOR_SPACE_DOLBYVISION_EXT"/>
+                <enum offset="10" extends="VkColorSpaceKHR"                 name="VK_COLOR_SPACE_HDR10_HLG_EXT"/>
+                <enum offset="11" extends="VkColorSpaceKHR"                 name="VK_COLOR_SPACE_ADOBERGB_LINEAR_EXT"/>
+                <enum offset="12" extends="VkColorSpaceKHR"                 name="VK_COLOR_SPACE_ADOBERGB_NONLINEAR_EXT"/>
+                <enum offset="13" extends="VkColorSpaceKHR"                 name="VK_COLOR_SPACE_PASS_THROUGH_EXT"/>
+                <enum offset="14" extends="VkColorSpaceKHR"                 name="VK_COLOR_SPACE_EXTENDED_SRGB_NONLINEAR_EXT"/>
+                <enum extends="VkColorSpaceKHR" name="VK_COLOR_SPACE_DCI_P3_LINEAR_EXT" alias="VK_COLOR_SPACE_DISPLAY_P3_LINEAR_EXT" comment="Deprecated name for backwards compatibility"/>
+            </require>
+        </extension>
+        <extension name="VK_EXT_hdr_metadata" number="106" type="device" requires="VK_KHR_swapchain" author="GOOGLE" contact="Courtney Goeltzenleuchter @courtney-g" supported="vulkan">
+            <require>
+                <enum value="2"                                             name="VK_EXT_HDR_METADATA_SPEC_VERSION"/>
+                <enum value="&quot;VK_EXT_hdr_metadata&quot;"               name="VK_EXT_HDR_METADATA_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_HDR_METADATA_EXT"/>
+                <type name="VkHdrMetadataEXT"/>
+                <type name="VkXYColorEXT"/>
+                <command name="vkSetHdrMetadataEXT"/>
+            </require>
+        </extension>
+        <extension name="VK_IMG_extension_107" number="107" author="IMG" contact="Michael Worcester @michaelworcester" supported="disabled">
+            <require>
+                <enum value="0"                                             name="VK_IMG_EXTENSION_107_SPEC_VERSION"/>
+                <enum value="&quot;VK_IMG_extension_107&quot;"              name="VK_IMG_EXTENSION_107_EXTENSION_NAME"/>
+            </require>
+        </extension>
+        <extension name="VK_IMG_extension_108" number="108" author="IMG" contact="Michael Worcester @michaelworcester" supported="disabled">
+            <require>
+                <enum value="0"                                             name="VK_IMG_EXTENSION_108_SPEC_VERSION"/>
+                <enum value="&quot;VK_IMG_extension_108&quot;"              name="VK_IMG_EXTENSION_108_EXTENSION_NAME"/>
+            </require>
+        </extension>
+        <extension name="VK_KHR_imageless_framebuffer" requires="VK_KHR_maintenance2,VK_KHR_image_format_list" number="109" author="KHR" contact="Tobias Hector @tobias" type="device" supported="vulkan">
+            <require>
+                <enum value="1"                                             name="VK_KHR_IMAGELESS_FRAMEBUFFER_SPEC_VERSION"/>
+                <enum value="&quot;VK_KHR_imageless_framebuffer&quot;"      name="VK_KHR_IMAGELESS_FRAMEBUFFER_EXTENSION_NAME"/>
+                <type name="VkPhysicalDeviceImagelessFramebufferFeaturesKHR"/>
+                <type name="VkFramebufferAttachmentsCreateInfoKHR"/>
+                <type name="VkRenderPassAttachmentBeginInfoKHR"/>
+                <enum offset="0" extends="VkStructureType" name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES_KHR"/>
+                <enum offset="1" extends="VkStructureType" name="VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO_KHR"/>
+                <enum offset="2" extends="VkStructureType" name="VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO_KHR"/>
+                <enum offset="3" extends="VkStructureType" name="VK_STRUCTURE_TYPE_RENDER_PASS_ATTACHMENT_BEGIN_INFO_KHR"/>
+                <enum bitpos="0" extends="VkFramebufferCreateFlagBits" name="VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR"/>
+            </require>
+        </extension>
+        <extension name="VK_KHR_create_renderpass2" requires="VK_KHR_multiview,VK_KHR_maintenance2" number="110" contact="Tobias Hector @tobias" type="device" supported="vulkan">
+            <require>
+                <enum value="1"                                         name="VK_KHR_CREATE_RENDERPASS_2_SPEC_VERSION"/>
+                <enum value="&quot;VK_KHR_create_renderpass2&quot;"     name="VK_KHR_CREATE_RENDERPASS_2_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkStructureType"              name="VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_2_KHR"/>
+                <enum offset="1" extends="VkStructureType"              name="VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2_KHR"/>
+                <enum offset="2" extends="VkStructureType"              name="VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_2_KHR"/>
+                <enum offset="3" extends="VkStructureType"              name="VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2_KHR"/>
+                <enum offset="4" extends="VkStructureType"              name="VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO_2_KHR"/>
+                <enum offset="5" extends="VkStructureType"              name="VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO_KHR"/>
+                <enum offset="6" extends="VkStructureType"              name="VK_STRUCTURE_TYPE_SUBPASS_END_INFO_KHR"/>
+                <command name="vkCreateRenderPass2KHR"/>
+                <command name="vkCmdBeginRenderPass2KHR"/>
+                <command name="vkCmdNextSubpass2KHR"/>
+                <command name="vkCmdEndRenderPass2KHR"/>
+            </require>
+        </extension>
+        <extension name="VK_IMG_extension_111" number="111" author="IMG" contact="Michael Worcester @michaelworcester" supported="disabled">
+            <require>
+                <enum value="0"                                             name="VK_IMG_EXTENSION_111_SPEC_VERSION"/>
+                <enum value="&quot;VK_IMG_extension_111&quot;"              name="VK_IMG_EXTENSION_111_EXTENSION_NAME"/>
+            </require>
+        </extension>
+        <extension name="VK_KHR_shared_presentable_image" number="112" type="device" requires="VK_KHR_swapchain,VK_KHR_get_physical_device_properties2,VK_KHR_get_surface_capabilities2" author="KHR" contact="Alon Or-bach @alonorbach" supported="vulkan">
+            <require>
+                <enum value="1"                                             name="VK_KHR_SHARED_PRESENTABLE_IMAGE_SPEC_VERSION"/>
+                <enum value="&quot;VK_KHR_shared_presentable_image&quot;"   name="VK_KHR_SHARED_PRESENTABLE_IMAGE_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_SHARED_PRESENT_SURFACE_CAPABILITIES_KHR"/>
+                <enum offset="0" extends="VkPresentModeKHR"                 name="VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR"/>
+                <enum offset="1" extends="VkPresentModeKHR"                 name="VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR"/>
+                <enum offset="0" extends="VkImageLayout"                    name="VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR"/>
+                <type name="VkSharedPresentSurfaceCapabilitiesKHR"/>
+                <command name="vkGetSwapchainStatusKHR"/>
+            </require>
+        </extension>
+        <extension name="VK_KHR_external_fence_capabilities" number="113" type="instance" author="KHR" requires="VK_KHR_get_physical_device_properties2" contact="Jesse Hall @critsec" supported="vulkan" promotedto="VK_VERSION_1_1">
+            <require>
+                <enum value="1"                                             name="VK_KHR_EXTERNAL_FENCE_CAPABILITIES_SPEC_VERSION"/>
+                <enum value="&quot;VK_KHR_external_fence_capabilities&quot;" name="VK_KHR_EXTERNAL_FENCE_CAPABILITIES_EXTENSION_NAME"/>
+                <enum extends="VkStructureType"                             name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO_KHR" alias="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO"/>
+                <enum extends="VkStructureType"                             name="VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES_KHR" alias="VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES"/>
+                <enum name="VK_LUID_SIZE_KHR"/>
+                <type name="VkExternalFenceHandleTypeFlagsKHR"/>
+                <type name="VkExternalFenceHandleTypeFlagBitsKHR"/>
+                <enum extends="VkExternalFenceHandleTypeFlagBits"           name="VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_FD_BIT_KHR" alias="VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_FD_BIT"/>
+                <enum extends="VkExternalFenceHandleTypeFlagBits"           name="VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_BIT_KHR" alias="VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_BIT"/>
+                <enum extends="VkExternalFenceHandleTypeFlagBits"           name="VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_KHR" alias="VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT"/>
+                <enum extends="VkExternalFenceHandleTypeFlagBits"           name="VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT_KHR" alias="VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT"/>
+                <type name="VkExternalFenceFeatureFlagsKHR"/>
+                <type name="VkExternalFenceFeatureFlagBitsKHR"/>
+                <enum extends="VkExternalFenceFeatureFlagBits"              name="VK_EXTERNAL_FENCE_FEATURE_EXPORTABLE_BIT_KHR" alias="VK_EXTERNAL_FENCE_FEATURE_EXPORTABLE_BIT"/>
+                <enum extends="VkExternalFenceFeatureFlagBits"              name="VK_EXTERNAL_FENCE_FEATURE_IMPORTABLE_BIT_KHR" alias="VK_EXTERNAL_FENCE_FEATURE_IMPORTABLE_BIT"/>
+                <type name="VkPhysicalDeviceExternalFenceInfoKHR"/>
+                <type name="VkExternalFencePropertiesKHR"/>
+                <type name="VkPhysicalDeviceIDPropertiesKHR"/>
+                <command name="vkGetPhysicalDeviceExternalFencePropertiesKHR"/>
+            </require>
+        </extension>
+        <extension name="VK_KHR_external_fence" number="114" type="device" requires="VK_KHR_external_fence_capabilities" author="KHR" contact="Jesse Hall @critsec" supported="vulkan" promotedto="VK_VERSION_1_1">
+            <require>
+                <enum value="1"                                             name="VK_KHR_EXTERNAL_FENCE_SPEC_VERSION"/>
+                <enum value="&quot;VK_KHR_external_fence&quot;"             name="VK_KHR_EXTERNAL_FENCE_EXTENSION_NAME"/>
+                <enum extends="VkStructureType"                             name="VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO_KHR" alias="VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO"/>
+                <type name="VkFenceImportFlagsKHR"/>
+                <type name="VkFenceImportFlagBitsKHR"/>
+                <enum extends="VkFenceImportFlagBits"                       name="VK_FENCE_IMPORT_TEMPORARY_BIT_KHR" alias="VK_FENCE_IMPORT_TEMPORARY_BIT"/>
+                <type name="VkExportFenceCreateInfoKHR"/>
+            </require>
+        </extension>
+        <extension name="VK_KHR_external_fence_win32" number="115" type="device" requires="VK_KHR_external_fence" author="KHR" contact="Jesse Hall @critsec" platform="win32" supported="vulkan">
+            <require>
+                <enum value="1"                                             name="VK_KHR_EXTERNAL_FENCE_WIN32_SPEC_VERSION"/>
+                <enum value="&quot;VK_KHR_external_fence_win32&quot;"       name="VK_KHR_EXTERNAL_FENCE_WIN32_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_IMPORT_FENCE_WIN32_HANDLE_INFO_KHR"/>
+                <enum offset="1" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_EXPORT_FENCE_WIN32_HANDLE_INFO_KHR"/>
+                <enum offset="2" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_FENCE_GET_WIN32_HANDLE_INFO_KHR"/>
+                <type name="VkImportFenceWin32HandleInfoKHR"/>
+                <type name="VkExportFenceWin32HandleInfoKHR"/>
+                <type name="VkFenceGetWin32HandleInfoKHR"/>
+                <command name="vkImportFenceWin32HandleKHR"/>
+                <command name="vkGetFenceWin32HandleKHR"/>
+            </require>
+        </extension>
+        <extension name="VK_KHR_external_fence_fd" number="116" type="device" requires="VK_KHR_external_fence" author="KHR" contact="Jesse Hall @critsec" supported="vulkan">
+            <require>
+                <enum value="1"                                             name="VK_KHR_EXTERNAL_FENCE_FD_SPEC_VERSION"/>
+                <enum value="&quot;VK_KHR_external_fence_fd&quot;"          name="VK_KHR_EXTERNAL_FENCE_FD_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_IMPORT_FENCE_FD_INFO_KHR"/>
+                <enum offset="1" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_FENCE_GET_FD_INFO_KHR"/>
+                <type name="VkImportFenceFdInfoKHR"/>
+                <type name="VkFenceGetFdInfoKHR"/>
+                <command name="vkImportFenceFdKHR"/>
+                <command name="vkGetFenceFdKHR"/>
+            </require>
+        </extension>
+        <extension name="VK_KHR_performance_query" number="117" type="device" requires="VK_KHR_get_physical_device_properties2" author="KHR" contact="Alon Or-bach @alonorbach" supported="vulkan">
+            <require>
+                <enum value="1"                                    name="VK_KHR_PERFORMANCE_QUERY_SPEC_VERSION"/>
+                <enum value="&quot;VK_KHR_performance_query&quot;" name="VK_KHR_PERFORMANCE_QUERY_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkQueryType"             name="VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR"/>
+                <enum offset="0" extends="VkStructureType"         name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PERFORMANCE_QUERY_FEATURES_KHR"/>
+                <enum offset="1" extends="VkStructureType"         name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PERFORMANCE_QUERY_PROPERTIES_KHR"/>
+                <enum offset="2" extends="VkStructureType"         name="VK_STRUCTURE_TYPE_QUERY_POOL_PERFORMANCE_CREATE_INFO_KHR"/>
+                <enum offset="3" extends="VkStructureType"         name="VK_STRUCTURE_TYPE_PERFORMANCE_QUERY_SUBMIT_INFO_KHR"/>
+                <enum offset="4" extends="VkStructureType"         name="VK_STRUCTURE_TYPE_ACQUIRE_PROFILING_LOCK_INFO_KHR"/>
+                <enum offset="5" extends="VkStructureType"         name="VK_STRUCTURE_TYPE_PERFORMANCE_COUNTER_KHR"/>
+                <enum offset="6" extends="VkStructureType"         name="VK_STRUCTURE_TYPE_PERFORMANCE_COUNTER_DESCRIPTION_KHR"/>
+                <type name="VkPhysicalDevicePerformanceQueryFeaturesKHR"/>
+                <type name="VkPhysicalDevicePerformanceQueryPropertiesKHR"/>
+                <type name="VkPerformanceCounterKHR"/>
+                <type name="VkPerformanceCounterDescriptionKHR"/>
+                <type name="VkPerformanceCounterDescriptionFlagsKHR"/>
+                <type name="VkPerformanceCounterDescriptionFlagBitsKHR"/>
+                <type name="VkQueryPoolPerformanceCreateInfoKHR"/>
+                <type name="VkPerformanceCounterScopeKHR"/>
+                <type name="VkPerformanceCounterStorageKHR"/>
+                <type name="VkPerformanceCounterUnitKHR"/>
+                <type name="VkPerformanceCounterResultKHR"/>
+                <type name="VkAcquireProfilingLockInfoKHR"/>
+                <type name="VkAcquireProfilingLockFlagsKHR"/>
+                <type name="VkAcquireProfilingLockFlagBitsKHR"/>
+                <type name="VkPerformanceQuerySubmitInfoKHR"/>
+                <command name="vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR"/>
+                <command name="vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR"/>
+                <command name="vkAcquireProfilingLockKHR"/>
+                <command name="vkReleaseProfilingLockKHR"/>
+            </require>
+        </extension>
+        <extension name="VK_KHR_maintenance2" number="118" type="device" author="KHR" contact="Michael Worcester @michaelworcester" supported="vulkan" promotedto="VK_VERSION_1_1">
+            <require>
+                <enum value="1"                                             name="VK_KHR_MAINTENANCE2_SPEC_VERSION"/>
+                <enum value="&quot;VK_KHR_maintenance2&quot;"               name="VK_KHR_MAINTENANCE2_EXTENSION_NAME"/>
+                <enum extends="VkImageCreateFlagBits"                       name="VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT_KHR" alias="VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT"/>
+                <enum extends="VkImageCreateFlagBits"                       name="VK_IMAGE_CREATE_EXTENDED_USAGE_BIT_KHR" alias="VK_IMAGE_CREATE_EXTENDED_USAGE_BIT"/>
+                <enum extends="VkStructureType"                             name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES_KHR" alias="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES"/>
+                <enum extends="VkStructureType"                             name="VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO_KHR" alias="VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO"/>
+                <enum extends="VkStructureType"                             name="VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO_KHR" alias="VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO"/>
+                <enum extends="VkStructureType"                             name="VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO_KHR" alias="VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO"/>
+                <enum extends="VkImageLayout"                               name="VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL_KHR" alias="VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL"/>
+                <enum extends="VkImageLayout"                               name="VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL_KHR" alias="VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL"/>
+                <type name="VkPhysicalDevicePointClippingPropertiesKHR"/>
+                <type name="VkPointClippingBehaviorKHR"/>
+                <enum extends="VkPointClippingBehavior"                     name="VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES_KHR" alias="VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES"/>
+                <enum extends="VkPointClippingBehavior"                     name="VK_POINT_CLIPPING_BEHAVIOR_USER_CLIP_PLANES_ONLY_KHR" alias="VK_POINT_CLIPPING_BEHAVIOR_USER_CLIP_PLANES_ONLY"/>
+                <type name="VkRenderPassInputAttachmentAspectCreateInfoKHR"/>
+                <type name="VkInputAttachmentAspectReferenceKHR"/>
+                <type name="VkImageViewUsageCreateInfoKHR"/>
+                <type name="VkTessellationDomainOriginKHR"/>
+                <enum extends="VkTessellationDomainOrigin"                  name="VK_TESSELLATION_DOMAIN_ORIGIN_UPPER_LEFT_KHR" alias="VK_TESSELLATION_DOMAIN_ORIGIN_UPPER_LEFT"/>
+                <enum extends="VkTessellationDomainOrigin"                  name="VK_TESSELLATION_DOMAIN_ORIGIN_LOWER_LEFT_KHR" alias="VK_TESSELLATION_DOMAIN_ORIGIN_LOWER_LEFT"/>
+                <type name="VkPipelineTessellationDomainOriginStateCreateInfoKHR"/>
+            </require>
+        </extension>
+        <extension name="VK_KHR_extension_119" number="119" author="KHR" contact="Michael Worcester @michaelworcester" supported="disabled">
+            <require>
+                <enum value="0"                                             name="VK_KHR_EXTENSION_119_SPEC_VERSION"/>
+                <enum value="&quot;VK_KHR_extension_119&quot;"              name="VK_KHR_EXTENSION_119_EXTENSION_NAME"/>
+            </require>
+        </extension>
+        <extension name="VK_KHR_get_surface_capabilities2" number="120" type="instance" requires="VK_KHR_surface" author="KHR" contact="James Jones @cubanismo" supported="vulkan">
+            <require>
+                <enum value="1"                                             name="VK_KHR_GET_SURFACE_CAPABILITIES_2_SPEC_VERSION"/>
+                <enum value="&quot;VK_KHR_get_surface_capabilities2&quot;"  name="VK_KHR_GET_SURFACE_CAPABILITIES_2_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SURFACE_INFO_2_KHR"/>
+                <enum offset="1" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_KHR"/>
+                <enum offset="2" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_SURFACE_FORMAT_2_KHR"/>
+                <type name="VkPhysicalDeviceSurfaceInfo2KHR"/>
+                <type name="VkSurfaceCapabilities2KHR"/>
+                <type name="VkSurfaceFormat2KHR"/>
+                <command name="vkGetPhysicalDeviceSurfaceCapabilities2KHR"/>
+                <command name="vkGetPhysicalDeviceSurfaceFormats2KHR"/>
+            </require>
+        </extension>
+        <extension name="VK_KHR_variable_pointers" number="121" type="device" author="KHR" contact="Jesse Hall @critsec" requires="VK_KHR_get_physical_device_properties2,VK_KHR_storage_buffer_storage_class" supported="vulkan" promotedto="VK_VERSION_1_1">
+            <require>
+                <enum value="1"                                             name="VK_KHR_VARIABLE_POINTERS_SPEC_VERSION"/>
+                <enum value="&quot;VK_KHR_variable_pointers&quot;"          name="VK_KHR_VARIABLE_POINTERS_EXTENSION_NAME"/>
+                <enum extends="VkStructureType"                             name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES_KHR" alias="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES"/>
+                <enum extends="VkStructureType"                             name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES_KHR" alias="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES"/>
+                <type name="VkPhysicalDeviceVariablePointerFeaturesKHR"/>
+                <type name="VkPhysicalDeviceVariablePointersFeaturesKHR"/>
+            </require>
+        </extension>
+        <extension name="VK_KHR_get_display_properties2" number="122" type="instance" requires="VK_KHR_display" author="KHR" contact="James Jones @cubanismo" supported="vulkan">
+            <require>
+                <enum value="1"                                         name="VK_KHR_GET_DISPLAY_PROPERTIES_2_SPEC_VERSION"/>
+                <enum value="&quot;VK_KHR_get_display_properties2&quot;" name="VK_KHR_GET_DISPLAY_PROPERTIES_2_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkStructureType"              name="VK_STRUCTURE_TYPE_DISPLAY_PROPERTIES_2_KHR"/>
+                <enum offset="1" extends="VkStructureType"              name="VK_STRUCTURE_TYPE_DISPLAY_PLANE_PROPERTIES_2_KHR"/>
+                <enum offset="2" extends="VkStructureType"              name="VK_STRUCTURE_TYPE_DISPLAY_MODE_PROPERTIES_2_KHR"/>
+                <enum offset="3" extends="VkStructureType"              name="VK_STRUCTURE_TYPE_DISPLAY_PLANE_INFO_2_KHR"/>
+                <enum offset="4" extends="VkStructureType"              name="VK_STRUCTURE_TYPE_DISPLAY_PLANE_CAPABILITIES_2_KHR"/>
+                <type name="VkDisplayProperties2KHR"/>
+                <type name="VkDisplayPlaneProperties2KHR"/>
+                <type name="VkDisplayModeProperties2KHR"/>
+                <type name="VkDisplayPlaneInfo2KHR"/>
+                <type name="VkDisplayPlaneCapabilities2KHR"/>
+                <command name="vkGetPhysicalDeviceDisplayProperties2KHR"/>
+                <command name="vkGetPhysicalDeviceDisplayPlaneProperties2KHR"/>
+                <command name="vkGetDisplayModeProperties2KHR"/>
+                <command name="vkGetDisplayPlaneCapabilities2KHR"/>
+            </require>
+        </extension>
+        <extension name="VK_MVK_ios_surface" number="123" type="instance" requires="VK_KHR_surface" platform="ios" supported="vulkan" author="MVK" contact="Bill Hollings @billhollings">
+            <require>
+                <enum value="2"                                             name="VK_MVK_IOS_SURFACE_SPEC_VERSION"/>
+                <enum value="&quot;VK_MVK_ios_surface&quot;"                name="VK_MVK_IOS_SURFACE_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_IOS_SURFACE_CREATE_INFO_MVK"/>
+                <type name="VkIOSSurfaceCreateFlagsMVK"/>
+                <type name="VkIOSSurfaceCreateInfoMVK"/>
+                <command name="vkCreateIOSSurfaceMVK"/>
+            </require>
+        </extension>
+        <extension name="VK_MVK_macos_surface" number="124" type="instance" requires="VK_KHR_surface" platform="macos" supported="vulkan" author="MVK" contact="Bill Hollings @billhollings">
+            <require>
+                <enum value="2"                                             name="VK_MVK_MACOS_SURFACE_SPEC_VERSION"/>
+                <enum value="&quot;VK_MVK_macos_surface&quot;"              name="VK_MVK_MACOS_SURFACE_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_MACOS_SURFACE_CREATE_INFO_MVK"/>
+                <type name="VkMacOSSurfaceCreateFlagsMVK"/>
+                <type name="VkMacOSSurfaceCreateInfoMVK"/>
+                <command name="vkCreateMacOSSurfaceMVK"/>
+            </require>
+        </extension>
+        <extension name="VK_MVK_moltenvk" number="125" type="instance" author="MVK" contact="Bill Hollings @billhollings" supported="disabled">
+            <require>
+                <enum value="0"                                             name="VK_MVK_MOLTENVK_SPEC_VERSION"/>
+                <enum value="&quot;VK_MVK_moltenvk&quot;"                   name="VK_MVK_MOLTENVK_EXTENSION_NAME"/>
+            </require>
+        </extension>
+        <extension name="VK_EXT_external_memory_dma_buf" number="126" type="device" requires="VK_KHR_external_memory_fd" author="EXT" contact="Chad Versace @chadversary" supported="vulkan">
+            <require>
+                <enum value="1"                                             name="VK_EXT_EXTERNAL_MEMORY_DMA_BUF_SPEC_VERSION"/>
+                <enum value="&quot;VK_EXT_external_memory_dma_buf&quot;"    name="VK_EXT_EXTERNAL_MEMORY_DMA_BUF_EXTENSION_NAME"/>
+                <enum bitpos="9" extends="VkExternalMemoryHandleTypeFlagBits" name="VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT"/>
+            </require>
+        </extension>
+        <extension name="VK_EXT_queue_family_foreign" number="127" type="device" author="EXT" requires="VK_KHR_external_memory" contact="Chad Versace @chadversary" supported="vulkan">
+            <require>
+                <enum value="1"                                             name="VK_EXT_QUEUE_FAMILY_FOREIGN_SPEC_VERSION"/>
+                <enum value="&quot;VK_EXT_queue_family_foreign&quot;"       name="VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME"/>
+                <enum                                                       name="VK_QUEUE_FAMILY_FOREIGN_EXT"/>
+            </require>
+        </extension>
+        <extension name="VK_KHR_dedicated_allocation" number="128" type="device" author="KHR" requires="VK_KHR_get_memory_requirements2" contact="James Jones @cubanismo" supported="vulkan" promotedto="VK_VERSION_1_1">
+            <require>
+                <enum value="3"                                             name="VK_KHR_DEDICATED_ALLOCATION_SPEC_VERSION"/>
+                <enum value="&quot;VK_KHR_dedicated_allocation&quot;"       name="VK_KHR_DEDICATED_ALLOCATION_EXTENSION_NAME"/>
+                <enum extends="VkStructureType"                             name="VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR" alias="VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS"/>
+                <enum extends="VkStructureType"                             name="VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR" alias="VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO"/>
+                <type name="VkMemoryDedicatedRequirementsKHR"/>
+                <type name="VkMemoryDedicatedAllocateInfoKHR"/>
+            </require>
+        </extension>
+        <extension name="VK_EXT_debug_utils" number="129" type="instance" author="EXT" contact="Mark Young @marky-lunarg" supported="vulkan">
+            <require>
+                <enum value="1"                                             name="VK_EXT_DEBUG_UTILS_SPEC_VERSION"/>
+                <enum value="&quot;VK_EXT_debug_utils&quot;"                name="VK_EXT_DEBUG_UTILS_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT"/>
+                <enum offset="1" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_TAG_INFO_EXT"/>
+                <enum offset="2" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT"/>
+                <enum offset="3" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CALLBACK_DATA_EXT"/>
+                <enum offset="4" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT"/>
+                <enum offset="0" extends="VkObjectType"                     name="VK_OBJECT_TYPE_DEBUG_UTILS_MESSENGER_EXT"          comment="VkDebugUtilsMessengerEXT"/>
+                <type name="VkDebugUtilsObjectNameInfoEXT"/>
+                <type name="VkDebugUtilsObjectTagInfoEXT"/>
+                <type name="VkDebugUtilsLabelEXT"/>
+                <type name="VkDebugUtilsMessengerCallbackDataEXT"/>
+                <type name="VkDebugUtilsMessengerCreateInfoEXT"/>
+                <command name="vkSetDebugUtilsObjectNameEXT"/>
+                <command name="vkSetDebugUtilsObjectTagEXT"/>
+                <command name="vkQueueBeginDebugUtilsLabelEXT"/>
+                <command name="vkQueueEndDebugUtilsLabelEXT"/>
+                <command name="vkQueueInsertDebugUtilsLabelEXT"/>
+                <command name="vkCmdBeginDebugUtilsLabelEXT"/>
+                <command name="vkCmdEndDebugUtilsLabelEXT"/>
+                <command name="vkCmdInsertDebugUtilsLabelEXT"/>
+                <command name="vkCreateDebugUtilsMessengerEXT"/>
+                <command name="vkDestroyDebugUtilsMessengerEXT"/>
+                <command name="vkSubmitDebugUtilsMessageEXT"/>
+            </require>
+        </extension>
+        <extension name="VK_ANDROID_external_memory_android_hardware_buffer" number="130" type="device" author="ANDROID" requires="VK_KHR_sampler_ycbcr_conversion,VK_KHR_external_memory,VK_EXT_queue_family_foreign" platform="android" contact="Jesse Hall @critsec" supported="vulkan">
+            <require>
+                <enum value="3"                                             name="VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_SPEC_VERSION"/>
+                <enum value="&quot;VK_ANDROID_external_memory_android_hardware_buffer&quot;" name="VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME"/>
+                <enum bitpos="10" extends="VkExternalMemoryHandleTypeFlagBits" name="VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID"/>
+                <enum offset="0" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_USAGE_ANDROID"/>
+                <enum offset="1" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID"/>
+                <enum offset="2" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID"/>
+                <enum offset="3" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID"/>
+                <enum offset="4" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_MEMORY_GET_ANDROID_HARDWARE_BUFFER_INFO_ANDROID"/>
+                <enum offset="5" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID"/>
+                <type name="VkAndroidHardwareBufferUsageANDROID"/>
+                <type name="VkAndroidHardwareBufferPropertiesANDROID"/>
+                <type name="VkAndroidHardwareBufferFormatPropertiesANDROID"/>
+                <type name="VkImportAndroidHardwareBufferInfoANDROID"/>
+                <type name="VkMemoryGetAndroidHardwareBufferInfoANDROID"/>
+                <type name="VkExternalFormatANDROID"/>
+                <command name="vkGetAndroidHardwareBufferPropertiesANDROID"/>
+                <command name="vkGetMemoryAndroidHardwareBufferANDROID"/>
+            </require>
+        </extension>
+        <extension name="VK_EXT_sampler_filter_minmax" number="131" type="device" author="NV" requires="VK_KHR_get_physical_device_properties2" contact="Jeff Bolz @jeffbolznv" supported="vulkan">
+            <require>
+                <enum value="2"                                             name="VK_EXT_SAMPLER_FILTER_MINMAX_SPEC_VERSION"/>
+                <enum value="&quot;VK_EXT_sampler_filter_minmax&quot;"      name="VK_EXT_SAMPLER_FILTER_MINMAX_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES_EXT"/>
+                <enum offset="1" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO_EXT"/>
+                <enum bitpos="16" extends="VkFormatFeatureFlagBits"         name="VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT_EXT" comment="Format can be used with min/max reduction filtering"/>
+                <type name="VkSamplerReductionModeEXT"/>
+                <type name="VkSamplerReductionModeCreateInfoEXT"/>
+                <type name="VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT"/>
+            </require>
+        </extension>
+        <extension name="VK_KHR_storage_buffer_storage_class" number="132" type="device" author="KHR" contact="Alexander Galazin @alegal-arm" supported="vulkan" promotedto="VK_VERSION_1_1">
+            <require>
+                <enum value="1"                                             name="VK_KHR_STORAGE_BUFFER_STORAGE_CLASS_SPEC_VERSION"/>
+                <enum value="&quot;VK_KHR_storage_buffer_storage_class&quot;" name="VK_KHR_STORAGE_BUFFER_STORAGE_CLASS_EXTENSION_NAME"/>
+            </require>
+        </extension>
+        <extension name="VK_AMD_gpu_shader_int16" number="133" type="device" author="AMD" contact="Qun Lin @linqun" supported="vulkan" deprecatedby="VK_KHR_shader_float16_int8">
+            <require>
+                <enum value="2"                                             name="VK_AMD_GPU_SHADER_INT16_SPEC_VERSION"/>
+                <enum value="&quot;VK_AMD_gpu_shader_int16&quot;"           name="VK_AMD_GPU_SHADER_INT16_EXTENSION_NAME"/>
+            </require>
+        </extension>
+        <extension name="VK_AMD_extension_134" number="134" author="AMD" contact="Mais Alnasser @malnasse" supported="disabled">
+            <require>
+                <enum value="0"                                             name="VK_AMD_EXTENSION_134_SPEC_VERSION"/>
+                <enum value="&quot;VK_AMD_extension_134&quot;"              name="VK_AMD_EXTENSION_134_EXTENSION_NAME"/>
+            </require>
+        </extension>
+        <extension name="VK_AMD_extension_135" number="135" author="AMD" contact="Mais Alnasser @malnasse" supported="disabled">
+            <require>
+                <enum value="0"                                             name="VK_AMD_EXTENSION_135_SPEC_VERSION"/>
+                <enum value="&quot;VK_AMD_extension_135&quot;"              name="VK_AMD_EXTENSION_135_EXTENSION_NAME"/>
+            </require>
+        </extension>
+        <extension name="VK_AMD_extension_136" number="136" author="AMD" contact="Mais Alnasser @malnasse" supported="disabled">
+            <require>
+                <enum value="0"                                             name="VK_AMD_EXTENSION_136_SPEC_VERSION"/>
+                <enum value="&quot;VK_AMD_extension_136&quot;"              name="VK_AMD_EXTENSION_136_EXTENSION_NAME"/>
+            </require>
+        </extension>
+        <extension name="VK_AMD_mixed_attachment_samples" number="137" type="device" author="AMD" contact="Matthaeus G. Chajdas @anteru" supported="vulkan">
+            <require>
+                <enum value="1"                                             name="VK_AMD_MIXED_ATTACHMENT_SAMPLES_SPEC_VERSION"/>
+                <enum value="&quot;VK_AMD_mixed_attachment_samples&quot;"   name="VK_AMD_MIXED_ATTACHMENT_SAMPLES_EXTENSION_NAME"/>
+            </require>
+        </extension>
+        <extension name="VK_AMD_shader_fragment_mask" number="138" author="AMD" contact="Aaron Hagan @AaronHaganAMD" supported="vulkan" type="device">
+            <require>
+                <enum value="1"                                             name="VK_AMD_SHADER_FRAGMENT_MASK_SPEC_VERSION"/>
+                <enum value="&quot;VK_AMD_shader_fragment_mask&quot;"       name="VK_AMD_SHADER_FRAGMENT_MASK_EXTENSION_NAME"/>
+            </require>
+        </extension>
+        <extension name="VK_EXT_inline_uniform_block" number="139" type="device" author="EXT" requires="VK_KHR_get_physical_device_properties2,VK_KHR_maintenance1" contact="Daniel Rakos @aqnuep" supported="vulkan">
+            <require>
+                <enum value="1"                                          name="VK_EXT_INLINE_UNIFORM_BLOCK_SPEC_VERSION"/>
+                <enum value="&quot;VK_EXT_inline_uniform_block&quot;"    name="VK_EXT_INLINE_UNIFORM_BLOCK_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkDescriptorType"              name="VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT"/>
+                <enum offset="0" extends="VkStructureType"               name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES_EXT"/>
+                <enum offset="1" extends="VkStructureType"               name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_PROPERTIES_EXT"/>
+                <enum offset="2" extends="VkStructureType"               name="VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK_EXT"/>
+                <enum offset="3" extends="VkStructureType"               name="VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO_EXT"/>
+                <type name="VkPhysicalDeviceInlineUniformBlockFeaturesEXT"/>
+                <type name="VkPhysicalDeviceInlineUniformBlockPropertiesEXT"/>
+                <type name="VkWriteDescriptorSetInlineUniformBlockEXT"/>
+                <type name="VkDescriptorPoolInlineUniformBlockCreateInfoEXT"/>
+            </require>
+        </extension>
+        <extension name="VK_AMD_extension_140" number="140" author="AMD" contact="Mais Alnasser @malnasse" supported="disabled">
+            <require>
+                <enum value="0"                                             name="VK_AMD_EXTENSION_140_SPEC_VERSION"/>
+                <enum value="&quot;VK_AMD_extension_140&quot;"              name="VK_AMD_EXTENSION_140_EXTENSION_NAME"/>
+            </require>
+        </extension>
+        <extension name="VK_EXT_shader_stencil_export" number="141" type="device" author="EXT" contact="Dominik Witczak @dominikwitczakamd" supported="vulkan">
+            <require>
+                <enum value="1"                                             name="VK_EXT_SHADER_STENCIL_EXPORT_SPEC_VERSION"/>
+                <enum value="&quot;VK_EXT_shader_stencil_export&quot;"      name="VK_EXT_SHADER_STENCIL_EXPORT_EXTENSION_NAME"/>
+            </require>
+        </extension>
+        <extension name="VK_AMD_extension_142" number="142" author="AMD" contact="Mais Alnasser @malnasse" supported="disabled">
+            <require>
+                <enum value="0"                                             name="VK_AMD_EXTENSION_142_SPEC_VERSION"/>
+                <enum value="&quot;VK_AMD_extension_142&quot;"              name="VK_AMD_EXTENSION_142_EXTENSION_NAME"/>
+            </require>
+        </extension>
+        <extension name="VK_AMD_extension_143" number="143" author="AMD" contact="Mais Alnasser @malnasse" supported="disabled">
+            <require>
+                <enum value="0"                                             name="VK_AMD_EXTENSION_143_SPEC_VERSION"/>
+                <enum value="&quot;VK_AMD_extension_143&quot;"              name="VK_AMD_EXTENSION_143_EXTENSION_NAME"/>
+            </require>
+        </extension>
+        <extension name="VK_EXT_sample_locations" number="144" type="device" author="AMD" contact="Daniel Rakos @drakos-amd" supported="vulkan" requires="VK_KHR_get_physical_device_properties2">
+            <require>
+                <enum value="1"                                             name="VK_EXT_SAMPLE_LOCATIONS_SPEC_VERSION"/>
+                <enum value="&quot;VK_EXT_sample_locations&quot;"           name="VK_EXT_SAMPLE_LOCATIONS_EXTENSION_NAME"/>
+                <enum bitpos="12" extends="VkImageCreateFlagBits"           name="VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT"/>
+                <enum offset="0" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_SAMPLE_LOCATIONS_INFO_EXT"/>
+                <enum offset="1" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_RENDER_PASS_SAMPLE_LOCATIONS_BEGIN_INFO_EXT"/>
+                <enum offset="2" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_PIPELINE_SAMPLE_LOCATIONS_STATE_CREATE_INFO_EXT"/>
+                <enum offset="3" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLE_LOCATIONS_PROPERTIES_EXT"/>
+                <enum offset="4" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_MULTISAMPLE_PROPERTIES_EXT"/>
+                <enum offset="0" extends="VkDynamicState"                   name="VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_EXT"/>
+                <type name="VkSampleLocationEXT"/>
+                <type name="VkSampleLocationsInfoEXT"/>
+                <type name="VkAttachmentSampleLocationsEXT"/>
+                <type name="VkSubpassSampleLocationsEXT"/>
+                <type name="VkRenderPassSampleLocationsBeginInfoEXT"/>
+                <type name="VkPipelineSampleLocationsStateCreateInfoEXT"/>
+                <type name="VkPhysicalDeviceSampleLocationsPropertiesEXT"/>
+                <type name="VkMultisamplePropertiesEXT"/>
+                <command name="vkCmdSetSampleLocationsEXT"/>
+                <command name="vkGetPhysicalDeviceMultisamplePropertiesEXT"/>
+            </require>
+        </extension>
+        <extension name="VK_KHR_relaxed_block_layout" number="145" type="device" author="KHR" contact="John Kessenich @johnkslang" supported="vulkan" promotedto="VK_VERSION_1_1">
+            <require>
+                <enum value="1"                                             name="VK_KHR_RELAXED_BLOCK_LAYOUT_SPEC_VERSION"/>
+                <enum value="&quot;VK_KHR_relaxed_block_layout&quot;"       name="VK_KHR_RELAXED_BLOCK_LAYOUT_EXTENSION_NAME"/>
+            </require>
+        </extension>
+        <extension name="RESERVED_DO_NOT_USE_146" number="146" supported="disabled" comment="Used for functionality subsumed into Vulkan 1.1 and not published as an extension">
+        </extension>
+        <extension name="VK_KHR_get_memory_requirements2" number="147" type="device" author="KHR" contact="Jason Ekstrand @jekstrand" supported="vulkan" promotedto="VK_VERSION_1_1">
+            <require>
+                <enum value="1" name="VK_KHR_GET_MEMORY_REQUIREMENTS_2_SPEC_VERSION"/>
+                <enum value="&quot;VK_KHR_get_memory_requirements2&quot;"   name="VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME"/>
+                <enum extends="VkStructureType"                             name="VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR" alias="VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2"/>
+                <enum extends="VkStructureType"                             name="VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR" alias="VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2"/>
+                <enum extends="VkStructureType"                             name="VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2_KHR" alias="VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2"/>
+                <enum extends="VkStructureType"                             name="VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR" alias="VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2"/>
+                <enum extends="VkStructureType"                             name="VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2_KHR" alias="VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2"/>
+                <type name="VkBufferMemoryRequirementsInfo2KHR"/>
+                <type name="VkImageMemoryRequirementsInfo2KHR"/>
+                <type name="VkImageSparseMemoryRequirementsInfo2KHR"/>
+                <type name="VkMemoryRequirements2KHR"/>
+                <type name="VkSparseImageMemoryRequirements2KHR"/>
+                <command name="vkGetImageMemoryRequirements2KHR"/>
+                <command name="vkGetBufferMemoryRequirements2KHR"/>
+                <command name="vkGetImageSparseMemoryRequirements2KHR"/>
+            </require>
+        </extension>
+        <extension name="VK_KHR_image_format_list" number="148" type="device" author="KHR" contact="Jason Ekstrand @jekstrand" supported="vulkan">
+            <require>
+                <enum value="1"                                             name="VK_KHR_IMAGE_FORMAT_LIST_SPEC_VERSION"/>
+                <enum value="&quot;VK_KHR_image_format_list&quot;"          name="VK_KHR_IMAGE_FORMAT_LIST_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO_KHR"/>
+                <type name="VkImageFormatListCreateInfoKHR"/>
+            </require>
+        </extension>
+        <extension name="VK_EXT_blend_operation_advanced" number="149" type="device" author="NV" contact="Jeff Bolz @jeffbolznv" supported="vulkan">
+            <require>
+                <enum value="2"                                             name="VK_EXT_BLEND_OPERATION_ADVANCED_SPEC_VERSION"/>
+                <enum value="&quot;VK_EXT_blend_operation_advanced&quot;"   name="VK_EXT_BLEND_OPERATION_ADVANCED_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_FEATURES_EXT"/>
+                <enum offset="1" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_PROPERTIES_EXT"/>
+                <enum offset="2" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_ADVANCED_STATE_CREATE_INFO_EXT"/>
+                <type name="VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT"/>
+                <type name="VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT"/>
+                <type name="VkPipelineColorBlendAdvancedStateCreateInfoEXT"/>
+                <type name="VkBlendOverlapEXT"/>
+                <enum offset="0" extends="VkBlendOp"                        name="VK_BLEND_OP_ZERO_EXT"/>
+                <enum offset="1" extends="VkBlendOp"                        name="VK_BLEND_OP_SRC_EXT"/>
+                <enum offset="2" extends="VkBlendOp"                        name="VK_BLEND_OP_DST_EXT"/>
+                <enum offset="3" extends="VkBlendOp"                        name="VK_BLEND_OP_SRC_OVER_EXT"/>
+                <enum offset="4" extends="VkBlendOp"                        name="VK_BLEND_OP_DST_OVER_EXT"/>
+                <enum offset="5" extends="VkBlendOp"                        name="VK_BLEND_OP_SRC_IN_EXT"/>
+                <enum offset="6" extends="VkBlendOp"                        name="VK_BLEND_OP_DST_IN_EXT"/>
+                <enum offset="7" extends="VkBlendOp"                        name="VK_BLEND_OP_SRC_OUT_EXT"/>
+                <enum offset="8" extends="VkBlendOp"                        name="VK_BLEND_OP_DST_OUT_EXT"/>
+                <enum offset="9" extends="VkBlendOp"                        name="VK_BLEND_OP_SRC_ATOP_EXT"/>
+                <enum offset="10" extends="VkBlendOp"                       name="VK_BLEND_OP_DST_ATOP_EXT"/>
+                <enum offset="11" extends="VkBlendOp"                       name="VK_BLEND_OP_XOR_EXT"/>
+                <enum offset="12" extends="VkBlendOp"                       name="VK_BLEND_OP_MULTIPLY_EXT"/>
+                <enum offset="13" extends="VkBlendOp"                       name="VK_BLEND_OP_SCREEN_EXT"/>
+                <enum offset="14" extends="VkBlendOp"                       name="VK_BLEND_OP_OVERLAY_EXT"/>
+                <enum offset="15" extends="VkBlendOp"                       name="VK_BLEND_OP_DARKEN_EXT"/>
+                <enum offset="16" extends="VkBlendOp"                       name="VK_BLEND_OP_LIGHTEN_EXT"/>
+                <enum offset="17" extends="VkBlendOp"                       name="VK_BLEND_OP_COLORDODGE_EXT"/>
+                <enum offset="18" extends="VkBlendOp"                       name="VK_BLEND_OP_COLORBURN_EXT"/>
+                <enum offset="19" extends="VkBlendOp"                       name="VK_BLEND_OP_HARDLIGHT_EXT"/>
+                <enum offset="20" extends="VkBlendOp"                       name="VK_BLEND_OP_SOFTLIGHT_EXT"/>
+                <enum offset="21" extends="VkBlendOp"                       name="VK_BLEND_OP_DIFFERENCE_EXT"/>
+                <enum offset="22" extends="VkBlendOp"                       name="VK_BLEND_OP_EXCLUSION_EXT"/>
+                <enum offset="23" extends="VkBlendOp"                       name="VK_BLEND_OP_INVERT_EXT"/>
+                <enum offset="24" extends="VkBlendOp"                       name="VK_BLEND_OP_INVERT_RGB_EXT"/>
+                <enum offset="25" extends="VkBlendOp"                       name="VK_BLEND_OP_LINEARDODGE_EXT"/>
+                <enum offset="26" extends="VkBlendOp"                       name="VK_BLEND_OP_LINEARBURN_EXT"/>
+                <enum offset="27" extends="VkBlendOp"                       name="VK_BLEND_OP_VIVIDLIGHT_EXT"/>
+                <enum offset="28" extends="VkBlendOp"                       name="VK_BLEND_OP_LINEARLIGHT_EXT"/>
+                <enum offset="29" extends="VkBlendOp"                       name="VK_BLEND_OP_PINLIGHT_EXT"/>
+                <enum offset="30" extends="VkBlendOp"                       name="VK_BLEND_OP_HARDMIX_EXT"/>
+                <enum offset="31" extends="VkBlendOp"                       name="VK_BLEND_OP_HSL_HUE_EXT"/>
+                <enum offset="32" extends="VkBlendOp"                       name="VK_BLEND_OP_HSL_SATURATION_EXT"/>
+                <enum offset="33" extends="VkBlendOp"                       name="VK_BLEND_OP_HSL_COLOR_EXT"/>
+                <enum offset="34" extends="VkBlendOp"                       name="VK_BLEND_OP_HSL_LUMINOSITY_EXT"/>
+                <enum offset="35" extends="VkBlendOp"                       name="VK_BLEND_OP_PLUS_EXT"/>
+                <enum offset="36" extends="VkBlendOp"                       name="VK_BLEND_OP_PLUS_CLAMPED_EXT"/>
+                <enum offset="37" extends="VkBlendOp"                       name="VK_BLEND_OP_PLUS_CLAMPED_ALPHA_EXT"/>
+                <enum offset="38" extends="VkBlendOp"                       name="VK_BLEND_OP_PLUS_DARKER_EXT"/>
+                <enum offset="39" extends="VkBlendOp"                       name="VK_BLEND_OP_MINUS_EXT"/>
+                <enum offset="40" extends="VkBlendOp"                       name="VK_BLEND_OP_MINUS_CLAMPED_EXT"/>
+                <enum offset="41" extends="VkBlendOp"                       name="VK_BLEND_OP_CONTRAST_EXT"/>
+                <enum offset="42" extends="VkBlendOp"                       name="VK_BLEND_OP_INVERT_OVG_EXT"/>
+                <enum offset="43" extends="VkBlendOp"                       name="VK_BLEND_OP_RED_EXT"/>
+                <enum offset="44" extends="VkBlendOp"                       name="VK_BLEND_OP_GREEN_EXT"/>
+                <enum offset="45" extends="VkBlendOp"                       name="VK_BLEND_OP_BLUE_EXT"/>
+                <enum bitpos="19" extends="VkAccessFlagBits"                name="VK_ACCESS_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT"/>
+            </require>
+        </extension>
+        <extension name="VK_NV_fragment_coverage_to_color" number="150" type="device" author="NV" contact="Jeff Bolz @jeffbolznv" supported="vulkan">
+            <require>
+                <enum value="1"                                             name="VK_NV_FRAGMENT_COVERAGE_TO_COLOR_SPEC_VERSION"/>
+                <enum value="&quot;VK_NV_fragment_coverage_to_color&quot;"  name="VK_NV_FRAGMENT_COVERAGE_TO_COLOR_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_TO_COLOR_STATE_CREATE_INFO_NV"/>
+                <type name="VkPipelineCoverageToColorStateCreateFlagsNV"/>
+                <type name="VkPipelineCoverageToColorStateCreateInfoNV"/>
+            </require>
+        </extension>
+        <extension name="VK_NV_extension_151" number="151" author="NV" contact="Jeff Bolz @jeffbolznv" supported="disabled">
+            <require>
+                <enum value="0"                                             name="VK_NV_EXTENSION_151_SPEC_VERSION"/>
+                <enum value="&quot;VK_NV_extension_151&quot;"               name="VK_NV_EXTENSION_151_EXTENSION_NAME"/>
+                <enum bitpos="11" extends="VkPipelineCreateFlagBits"        name="VK_PIPELINE_CREATE_EXTENSION_151_BIT0_NV"/>
+                <enum bitpos="29" extends="VkFormatFeatureFlagBits"         name="VK_FORMAT_FEATURE_RESERVED_29_BIT_NV"/>
+                <enum bitpos="14" extends="VkPipelineCreateFlagBits"        name="VK_PIPELINE_CREATE_EXTENSION_151_BIT1_NV"/>
+                <enum bitpos="15" extends="VkPipelineCreateFlagBits"        name="VK_PIPELINE_CREATE_EXTENSION_151_BIT2_NV"/>
+                <enum bitpos="16" extends="VkPipelineCreateFlagBits"        name="VK_PIPELINE_CREATE_EXTENSION_151_BIT3_NV"/>
+                <enum bitpos="17" extends="VkPipelineCreateFlagBits"        name="VK_PIPELINE_CREATE_EXTENSION_151_BIT4_NV"/>
+            </require>
+        </extension>
+        <extension name="VK_NV_extension_152" number="152" author="NV" contact="Jeff Bolz @jeffbolznv" supported="disabled">
+            <require>
+                <enum value="0"                                             name="VK_NV_EXTENSION_152_SPEC_VERSION"/>
+                <enum value="&quot;VK_NV_extension_152&quot;"               name="VK_NV_EXTENSION_152_EXTENSION_NAME"/>
+            </require>
+        </extension>
+        <extension name="VK_NV_framebuffer_mixed_samples" number="153" type="device" author="NV" contact="Jeff Bolz @jeffbolznv" supported="vulkan">
+            <require>
+                <enum value="1"                                             name="VK_NV_FRAMEBUFFER_MIXED_SAMPLES_SPEC_VERSION"/>
+                <enum value="&quot;VK_NV_framebuffer_mixed_samples&quot;"   name="VK_NV_FRAMEBUFFER_MIXED_SAMPLES_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_MODULATION_STATE_CREATE_INFO_NV"/>
+                <type name="VkPipelineCoverageModulationStateCreateInfoNV"/>
+                <type name="VkPipelineCoverageModulationStateCreateFlagsNV"/>
+                <type name="VkCoverageModulationModeNV"/>
+            </require>
+        </extension>
+        <extension name="VK_NV_fill_rectangle" number="154" type="device" author="NV" contact="Jeff Bolz @jeffbolznv" supported="vulkan">
+            <require>
+                <enum value="1"                                             name="VK_NV_FILL_RECTANGLE_SPEC_VERSION"/>
+                <enum value="&quot;VK_NV_fill_rectangle&quot;"              name="VK_NV_FILL_RECTANGLE_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkPolygonMode"                    name="VK_POLYGON_MODE_FILL_RECTANGLE_NV"/>
+            </require>
+        </extension>
+        <extension name="VK_NV_shader_sm_builtins" number="155" type="device" requiresCore="1.1" author="NV" contact="Daniel Koch @dgkoch" supported="vulkan">
+            <require>
+                <enum value="1"                                             name="VK_NV_SHADER_SM_BUILTINS_SPEC_VERSION"/>
+                <enum value="&quot;VK_NV_shader_sm_builtins&quot;"          name="VK_NV_SHADER_SM_BUILTINS_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_FEATURES_NV"/>
+                <enum offset="1" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_PROPERTIES_NV"/>
+                <type name="VkPhysicalDeviceShaderSMBuiltinsPropertiesNV"/>
+                <type name="VkPhysicalDeviceShaderSMBuiltinsFeaturesNV"/>
+            </require>
+        </extension>
+        <extension name="VK_EXT_post_depth_coverage" number="156" type="device" author="NV" contact="Daniel Koch @dgkoch" supported="vulkan">
+            <require>
+                <enum value="1"                                             name="VK_EXT_POST_DEPTH_COVERAGE_SPEC_VERSION"/>
+                <enum value="&quot;VK_EXT_post_depth_coverage&quot;"        name="VK_EXT_POST_DEPTH_COVERAGE_EXTENSION_NAME"/>
+            </require>
+        </extension>
+        <extension name="VK_KHR_sampler_ycbcr_conversion" number="157" type="device" requires="VK_KHR_maintenance1,VK_KHR_bind_memory2,VK_KHR_get_memory_requirements2,VK_KHR_get_physical_device_properties2" author="KHR" contact="Andrew Garrard @fluppeteer" supported="vulkan" promotedto="VK_VERSION_1_1">
+            <require>
+                <enum value="14"                                            name="VK_KHR_SAMPLER_YCBCR_CONVERSION_SPEC_VERSION"/>
+                <enum value="&quot;VK_KHR_sampler_ycbcr_conversion&quot;"   name="VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME"/>
+                <enum extends="VkStructureType"                             name="VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO_KHR" alias="VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO"/>
+                <enum extends="VkStructureType"                             name="VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO_KHR" alias="VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO"/>
+                <enum extends="VkStructureType"                             name="VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO_KHR" alias="VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO"/>
+                <enum extends="VkStructureType"                             name="VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO_KHR" alias="VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO"/>
+                <enum extends="VkStructureType"                             name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES_KHR" alias="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES"/>
+                <enum extends="VkStructureType"                             name="VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES_KHR" alias="VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES"/>
+                <enum extends="VkDebugReportObjectTypeEXT"                  name="VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR_EXT" alias="VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT"/>
+                <enum extends="VkObjectType"                                name="VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR" alias="VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION"/>
+                <enum extends="VkFormat"                                    name="VK_FORMAT_G8B8G8R8_422_UNORM_KHR" alias="VK_FORMAT_G8B8G8R8_422_UNORM"/>
+                <enum extends="VkFormat"                                    name="VK_FORMAT_B8G8R8G8_422_UNORM_KHR" alias="VK_FORMAT_B8G8R8G8_422_UNORM"/>
+                <enum extends="VkFormat"                                    name="VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM_KHR" alias="VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM"/>
+                <enum extends="VkFormat"                                    name="VK_FORMAT_G8_B8R8_2PLANE_420_UNORM_KHR" alias="VK_FORMAT_G8_B8R8_2PLANE_420_UNORM"/>
+                <enum extends="VkFormat"                                    name="VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM_KHR" alias="VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM"/>
+                <enum extends="VkFormat"                                    name="VK_FORMAT_G8_B8R8_2PLANE_422_UNORM_KHR" alias="VK_FORMAT_G8_B8R8_2PLANE_422_UNORM"/>
+                <enum extends="VkFormat"                                    name="VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM_KHR" alias="VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM"/>
+                <enum extends="VkFormat"                                    name="VK_FORMAT_R10X6_UNORM_PACK16_KHR" alias="VK_FORMAT_R10X6_UNORM_PACK16"/>
+                <enum extends="VkFormat"                                    name="VK_FORMAT_R10X6G10X6_UNORM_2PACK16_KHR" alias="VK_FORMAT_R10X6G10X6_UNORM_2PACK16"/>
+                <enum extends="VkFormat"                                    name="VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16_KHR" alias="VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16"/>
+                <enum extends="VkFormat"                                    name="VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16_KHR" alias="VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16"/>
+                <enum extends="VkFormat"                                    name="VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16_KHR" alias="VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16"/>
+                <enum extends="VkFormat"                                    name="VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16_KHR" alias="VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16"/>
+                <enum extends="VkFormat"                                    name="VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16_KHR" alias="VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16"/>
+                <enum extends="VkFormat"                                    name="VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16_KHR" alias="VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16"/>
+                <enum extends="VkFormat"                                    name="VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16_KHR" alias="VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16"/>
+                <enum extends="VkFormat"                                    name="VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16_KHR" alias="VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16"/>
+                <enum extends="VkFormat"                                    name="VK_FORMAT_R12X4_UNORM_PACK16_KHR" alias="VK_FORMAT_R12X4_UNORM_PACK16"/>
+                <enum extends="VkFormat"                                    name="VK_FORMAT_R12X4G12X4_UNORM_2PACK16_KHR" alias="VK_FORMAT_R12X4G12X4_UNORM_2PACK16"/>
+                <enum extends="VkFormat"                                    name="VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16_KHR" alias="VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16"/>
+                <enum extends="VkFormat"                                    name="VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16_KHR" alias="VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16"/>
+                <enum extends="VkFormat"                                    name="VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16_KHR" alias="VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16"/>
+                <enum extends="VkFormat"                                    name="VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16_KHR" alias="VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16"/>
+                <enum extends="VkFormat"                                    name="VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16_KHR" alias="VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16"/>
+                <enum extends="VkFormat"                                    name="VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16_KHR" alias="VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16"/>
+                <enum extends="VkFormat"                                    name="VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16_KHR" alias="VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16"/>
+                <enum extends="VkFormat"                                    name="VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16_KHR" alias="VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16"/>
+                <enum extends="VkFormat"                                    name="VK_FORMAT_G16B16G16R16_422_UNORM_KHR" alias="VK_FORMAT_G16B16G16R16_422_UNORM"/>
+                <enum extends="VkFormat"                                    name="VK_FORMAT_B16G16R16G16_422_UNORM_KHR" alias="VK_FORMAT_B16G16R16G16_422_UNORM"/>
+                <enum extends="VkFormat"                                    name="VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM_KHR" alias="VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM"/>
+                <enum extends="VkFormat"                                    name="VK_FORMAT_G16_B16R16_2PLANE_420_UNORM_KHR" alias="VK_FORMAT_G16_B16R16_2PLANE_420_UNORM"/>
+                <enum extends="VkFormat"                                    name="VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM_KHR" alias="VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM"/>
+                <enum extends="VkFormat"                                    name="VK_FORMAT_G16_B16R16_2PLANE_422_UNORM_KHR" alias="VK_FORMAT_G16_B16R16_2PLANE_422_UNORM"/>
+                <enum extends="VkFormat"                                    name="VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM_KHR" alias="VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM"/>
+                <enum extends="VkImageAspectFlagBits"                       name="VK_IMAGE_ASPECT_PLANE_0_BIT_KHR" alias="VK_IMAGE_ASPECT_PLANE_0_BIT"/>
+                <enum extends="VkImageAspectFlagBits"                       name="VK_IMAGE_ASPECT_PLANE_1_BIT_KHR" alias="VK_IMAGE_ASPECT_PLANE_1_BIT"/>
+                <enum extends="VkImageAspectFlagBits"                       name="VK_IMAGE_ASPECT_PLANE_2_BIT_KHR" alias="VK_IMAGE_ASPECT_PLANE_2_BIT"/>
+                <enum extends="VkImageCreateFlagBits"                       name="VK_IMAGE_CREATE_DISJOINT_BIT_KHR" alias="VK_IMAGE_CREATE_DISJOINT_BIT"/>
+                <enum extends="VkFormatFeatureFlagBits"                     name="VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT_KHR" alias="VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT"/>
+                <enum extends="VkFormatFeatureFlagBits"                     name="VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT_KHR" alias="VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT"/>
+                <enum extends="VkFormatFeatureFlagBits"                     name="VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT_KHR" alias="VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT"/>
+                <enum extends="VkFormatFeatureFlagBits"                     name="VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT_KHR" alias="VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT"/>
+                <enum extends="VkFormatFeatureFlagBits"                     name="VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT_KHR" alias="VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT"/>
+                <enum extends="VkFormatFeatureFlagBits"                     name="VK_FORMAT_FEATURE_DISJOINT_BIT_KHR" alias="VK_FORMAT_FEATURE_DISJOINT_BIT"/>
+                <enum extends="VkFormatFeatureFlagBits"                     name="VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT_KHR" alias="VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT"/>
+                <type name="VkSamplerYcbcrConversionCreateInfoKHR"/>
+                <type name="VkSamplerYcbcrConversionInfoKHR"/>
+                <type name="VkBindImagePlaneMemoryInfoKHR"/>
+                <type name="VkImagePlaneMemoryRequirementsInfoKHR"/>
+                <type name="VkPhysicalDeviceSamplerYcbcrConversionFeaturesKHR"/>
+                <type name="VkSamplerYcbcrConversionImageFormatPropertiesKHR"/>
+                <command name="vkCreateSamplerYcbcrConversionKHR"/>
+                <command name="vkDestroySamplerYcbcrConversionKHR"/>
+                <type name="VkSamplerYcbcrConversionKHR"/>
+                <type name="VkSamplerYcbcrModelConversionKHR"/>
+                <enum extends="VkSamplerYcbcrModelConversion"               name="VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY_KHR" alias="VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY"/>
+                <enum extends="VkSamplerYcbcrModelConversion"               name="VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_IDENTITY_KHR" alias="VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_IDENTITY"/>
+                <enum extends="VkSamplerYcbcrModelConversion"               name="VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709_KHR" alias="VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709"/>
+                <enum extends="VkSamplerYcbcrModelConversion"               name="VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_601_KHR" alias="VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_601"/>
+                <enum extends="VkSamplerYcbcrModelConversion"               name="VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020_KHR" alias="VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020"/>
+                <type name="VkSamplerYcbcrRangeKHR"/>
+                <enum extends="VkSamplerYcbcrRange"                         name="VK_SAMPLER_YCBCR_RANGE_ITU_FULL_KHR" alias="VK_SAMPLER_YCBCR_RANGE_ITU_FULL"/>
+                <enum extends="VkSamplerYcbcrRange"                         name="VK_SAMPLER_YCBCR_RANGE_ITU_NARROW_KHR" alias="VK_SAMPLER_YCBCR_RANGE_ITU_NARROW"/>
+                <type name="VkChromaLocationKHR"/>
+                <enum extends="VkChromaLocation"                            name="VK_CHROMA_LOCATION_COSITED_EVEN_KHR" alias="VK_CHROMA_LOCATION_COSITED_EVEN"/>
+                <enum extends="VkChromaLocation"                            name="VK_CHROMA_LOCATION_MIDPOINT_KHR" alias="VK_CHROMA_LOCATION_MIDPOINT"/>
+            </require>
+            <require extension="VK_EXT_debug_report">
+                <enum extends="VkDebugReportObjectTypeEXT" offset="0"       name="VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT"/>
+            </require>
+        </extension>
+        <extension name="VK_KHR_bind_memory2" number="158" type="device" author="KHR" contact="Tobias Hector @tobski" supported="vulkan" promotedto="VK_VERSION_1_1">
+            <require>
+                <enum value="1"                                             name="VK_KHR_BIND_MEMORY_2_SPEC_VERSION"/>
+                <enum value="&quot;VK_KHR_bind_memory2&quot;"               name="VK_KHR_BIND_MEMORY_2_EXTENSION_NAME"/>
+                <command name="vkBindBufferMemory2KHR"/>
+                <command name="vkBindImageMemory2KHR"/>
+                <enum extends="VkStructureType"                             name="VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO_KHR" alias="VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO"/>
+                <enum extends="VkStructureType"                             name="VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO_KHR" alias="VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO"/>
+                <enum extends="VkImageCreateFlagBits"                       name="VK_IMAGE_CREATE_ALIAS_BIT_KHR" alias="VK_IMAGE_CREATE_ALIAS_BIT"/>
+                <type name="VkBindBufferMemoryInfoKHR"/>
+                <type name="VkBindImageMemoryInfoKHR"/>
+            </require>
+        </extension>
+        <extension name="VK_EXT_image_drm_format_modifier" number="159" type="device" requires="VK_KHR_bind_memory2,VK_KHR_get_physical_device_properties2,VK_KHR_image_format_list,VK_KHR_sampler_ycbcr_conversion" author="EXT" contact="Chad Versace @chadversary" supported="vulkan">
+            <require>
+                <enum value="1"                                             name="VK_EXT_IMAGE_DRM_FORMAT_MODIFIER_SPEC_VERSION"/>
+                <enum value="&quot;VK_EXT_image_drm_format_modifier&quot;"  name="VK_EXT_IMAGE_DRM_FORMAT_MODIFIER_EXTENSION_NAME"/>
+
+                <enum offset="0" dir="-" extends="VkResult" name="VK_ERROR_INVALID_DRM_FORMAT_MODIFIER_PLANE_LAYOUT_EXT"/>
+
+                <enum offset="0" extends="VkStructureType" name="VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT"/>
+                <enum offset="1" extends="VkStructureType" name="VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT"/>
+                <enum offset="2" extends="VkStructureType" name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_DRM_FORMAT_MODIFIER_INFO_EXT"/>
+                <enum offset="3" extends="VkStructureType" name="VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_LIST_CREATE_INFO_EXT"/>
+                <enum offset="4" extends="VkStructureType" name="VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_EXPLICIT_CREATE_INFO_EXT"/>
+                <enum offset="5" extends="VkStructureType" name="VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT"/>
+
+                <enum offset="0" extends="VkImageTiling" name="VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT"/>
+
+                <enum bitpos="7"  extends="VkImageAspectFlagBits" name="VK_IMAGE_ASPECT_MEMORY_PLANE_0_BIT_EXT"/>
+                <enum bitpos="8"  extends="VkImageAspectFlagBits" name="VK_IMAGE_ASPECT_MEMORY_PLANE_1_BIT_EXT"/>
+                <enum bitpos="9"  extends="VkImageAspectFlagBits" name="VK_IMAGE_ASPECT_MEMORY_PLANE_2_BIT_EXT"/>
+                <enum bitpos="10" extends="VkImageAspectFlagBits" name="VK_IMAGE_ASPECT_MEMORY_PLANE_3_BIT_EXT"/>
+
+                <type name="VkDrmFormatModifierPropertiesListEXT"/>
+                <type name="VkDrmFormatModifierPropertiesEXT"/>
+                <type name="VkPhysicalDeviceImageDrmFormatModifierInfoEXT"/>
+                <type name="VkImageDrmFormatModifierListCreateInfoEXT"/>
+                <type name="VkImageDrmFormatModifierExplicitCreateInfoEXT"/>
+                <type name="VkImageDrmFormatModifierPropertiesEXT"/>
+
+                <command name="vkGetImageDrmFormatModifierPropertiesEXT"/>
+            </require>
+        </extension>
+        <extension name="VK_EXT_extension_160" number="160" author="EXT" contact="Mark Young @marky-lunarg" supported="disabled">
+            <require>
+                <enum value="0"                                             name="VK_EXT_EXTENSION_160_SPEC_VERSION"/>
+                <enum value="&quot;VK_EXT_extension_160&quot;"              name="VK_EXT_EXTENSION_160_EXTENSION_NAME"/>
+            </require>
+        </extension>
+        <extension name="VK_EXT_validation_cache" number="161" type="device" author="GOOGLE" contact="Cort Stratton @cdwfs" supported="vulkan">
+            <require>
+                <enum value="1"                                             name="VK_EXT_VALIDATION_CACHE_SPEC_VERSION"/>
+                <enum value="&quot;VK_EXT_validation_cache&quot;"           name="VK_EXT_VALIDATION_CACHE_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_VALIDATION_CACHE_CREATE_INFO_EXT"/>
+                <enum offset="1" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_SHADER_MODULE_VALIDATION_CACHE_CREATE_INFO_EXT"/>
+                <enum offset="0" extends="VkObjectType"                     name="VK_OBJECT_TYPE_VALIDATION_CACHE_EXT" comment="VkValidationCacheEXT"/>
+                <type name="VkValidationCacheEXT"/>
+                <type name="VkValidationCacheCreateInfoEXT"/>
+                <type name="VkShaderModuleValidationCacheCreateInfoEXT"/>
+                <type name="VkValidationCacheHeaderVersionEXT"/>
+                <type name="VkValidationCacheCreateFlagsEXT"/>
+                <command name="vkCreateValidationCacheEXT"/>
+                <command name="vkDestroyValidationCacheEXT"/>
+                <command name="vkMergeValidationCachesEXT"/>
+                <command name="vkGetValidationCacheDataEXT"/>
+            </require>
+        </extension>
+        <extension name="VK_EXT_descriptor_indexing" number="162" type="device" requires="VK_KHR_get_physical_device_properties2,VK_KHR_maintenance3" author="NV" contact="Jeff Bolz @jeffbolznv" supported="vulkan">
+            <require>
+                <enum value="2"                                          name="VK_EXT_DESCRIPTOR_INDEXING_SPEC_VERSION"/>
+                <enum value="&quot;VK_EXT_descriptor_indexing&quot;"     name="VK_EXT_DESCRIPTOR_INDEXING_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkStructureType"               name="VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT"/>
+                <enum offset="1" extends="VkStructureType"               name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES_EXT"/>
+                <enum offset="2" extends="VkStructureType"               name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES_EXT"/>
+                <enum offset="3" extends="VkStructureType"               name="VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO_EXT"/>
+                <enum offset="4" extends="VkStructureType"               name="VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT_EXT"/>
+                <enum bitpos="1" extends="VkDescriptorPoolCreateFlagBits" name="VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT_EXT"/>
+                <enum bitpos="1" extends="VkDescriptorSetLayoutCreateFlagBits" name="VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT"/>
+                <enum offset="0" dir="-" extends="VkResult"              name="VK_ERROR_FRAGMENTATION_EXT"/>
+                <type name="VkDescriptorSetLayoutBindingFlagsCreateInfoEXT"/>
+                <type name="VkPhysicalDeviceDescriptorIndexingFeaturesEXT"/>
+                <type name="VkPhysicalDeviceDescriptorIndexingPropertiesEXT"/>
+                <type name="VkDescriptorSetVariableDescriptorCountAllocateInfoEXT"/>
+                <type name="VkDescriptorSetVariableDescriptorCountLayoutSupportEXT"/>
+            </require>
+        </extension>
+        <extension name="VK_EXT_shader_viewport_index_layer" number="163" type="device" author="NV" contact="Daniel Koch @dgkoch" supported="vulkan">
+            <require>
+                <enum value="1"                                             name="VK_EXT_SHADER_VIEWPORT_INDEX_LAYER_SPEC_VERSION"/>
+                <enum value="&quot;VK_EXT_shader_viewport_index_layer&quot;" name="VK_EXT_SHADER_VIEWPORT_INDEX_LAYER_EXTENSION_NAME"/>
+            </require>
+        </extension>
+        <extension name="VK_NV_extension_164" number="164" author="NV" contact="Daniel Koch @dgkoch" supported="disabled">
+            <require>
+                <enum value="0"                                             name="VK_EXT_EXTENSION_164_SPEC_VERSION"/>
+                <enum value="&quot;VK_NV_extension_164&quot;"               name="VK_EXT_EXTENSION_164_EXTENSION_NAME"/>
+            </require>
+        </extension>
+        <extension name="VK_NV_shading_rate_image" number="165" type="device" requires="VK_KHR_get_physical_device_properties2" author="NV" contact="Pat Brown @nvpbrown" supported="vulkan">
+            <require>
+                <enum value="3"                                             name="VK_NV_SHADING_RATE_IMAGE_SPEC_VERSION"/>
+                <enum value="&quot;VK_NV_shading_rate_image&quot;"          name="VK_NV_SHADING_RATE_IMAGE_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SHADING_RATE_IMAGE_STATE_CREATE_INFO_NV"/>
+                <enum offset="1" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_FEATURES_NV"/>
+                <enum offset="2" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_PROPERTIES_NV"/>
+                <enum offset="3" extends="VkImageLayout"                    name="VK_IMAGE_LAYOUT_SHADING_RATE_OPTIMAL_NV"/>
+                <enum offset="4" extends="VkDynamicState"                   name="VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV"/>
+                <enum bitpos="23" extends="VkAccessFlagBits"                name="VK_ACCESS_SHADING_RATE_IMAGE_READ_BIT_NV"/>
+                <enum bitpos="8" extends="VkImageUsageFlagBits"             name="VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV"/>
+                <enum bitpos="22" extends="VkPipelineStageFlagBits"         name="VK_PIPELINE_STAGE_SHADING_RATE_IMAGE_BIT_NV"/>
+                <enum offset="5" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_COARSE_SAMPLE_ORDER_STATE_CREATE_INFO_NV"/>
+                <enum offset="6" extends="VkDynamicState"                   name="VK_DYNAMIC_STATE_VIEWPORT_COARSE_SAMPLE_ORDER_NV"/>
+                <type name="VkShadingRatePaletteEntryNV"/>
+                <type name="VkShadingRatePaletteNV"/>
+                <type name="VkPipelineViewportShadingRateImageStateCreateInfoNV"/>
+                <type name="VkPhysicalDeviceShadingRateImageFeaturesNV"/>
+                <type name="VkPhysicalDeviceShadingRateImagePropertiesNV"/>
+                <type name="VkCoarseSampleLocationNV"/>
+                <type name="VkCoarseSampleOrderCustomNV"/>
+                <type name="VkPipelineViewportCoarseSampleOrderStateCreateInfoNV"/>
+                <type name="VkCoarseSampleOrderTypeNV"/>
+                <command name="vkCmdBindShadingRateImageNV"/>
+                <command name="vkCmdSetViewportShadingRatePaletteNV"/>
+                <command name="vkCmdSetCoarseSampleOrderNV"/>
+            </require>
+        </extension>
+        <extension name="VK_NV_ray_tracing" number="166" type="device" requires="VK_KHR_get_physical_device_properties2,VK_KHR_get_memory_requirements2" author="NV" contact="Eric Werness @ewerness" supported="vulkan">
+            <require>
+                <enum value="3"                                          name="VK_NV_RAY_TRACING_SPEC_VERSION"/>
+                <enum value="&quot;VK_NV_ray_tracing&quot;"              name="VK_NV_RAY_TRACING_EXTENSION_NAME"/>
+                <enum                                                    name="VK_SHADER_UNUSED_NV"/>
+                <enum offset="0" extends="VkStructureType"               name="VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_NV"/>
+                <enum offset="1" extends="VkStructureType"               name="VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_NV"/>
+                <enum offset="3" extends="VkStructureType"               name="VK_STRUCTURE_TYPE_GEOMETRY_NV"/>
+                <enum offset="4" extends="VkStructureType"               name="VK_STRUCTURE_TYPE_GEOMETRY_TRIANGLES_NV"/>
+                <enum offset="5" extends="VkStructureType"               name="VK_STRUCTURE_TYPE_GEOMETRY_AABB_NV"/>
+                <enum offset="6" extends="VkStructureType"               name="VK_STRUCTURE_TYPE_BIND_ACCELERATION_STRUCTURE_MEMORY_INFO_NV"/>
+                <enum offset="7" extends="VkStructureType"               name="VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_NV"/>
+                <enum offset="8" extends="VkStructureType"               name="VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV"/>
+                <enum offset="9" extends="VkStructureType"               name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PROPERTIES_NV"/>
+                <enum offset="11" extends="VkStructureType"              name="VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV"/>
+                <enum offset="12" extends="VkStructureType"              name="VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_INFO_NV"/>
+                <enum bitpos="8" extends="VkShaderStageFlagBits"         name="VK_SHADER_STAGE_RAYGEN_BIT_NV"/>
+                <enum bitpos="9" extends="VkShaderStageFlagBits"         name="VK_SHADER_STAGE_ANY_HIT_BIT_NV"/>
+                <enum bitpos="10" extends="VkShaderStageFlagBits"        name="VK_SHADER_STAGE_CLOSEST_HIT_BIT_NV"/>
+                <enum bitpos="11" extends="VkShaderStageFlagBits"        name="VK_SHADER_STAGE_MISS_BIT_NV"/>
+                <enum bitpos="12" extends="VkShaderStageFlagBits"        name="VK_SHADER_STAGE_INTERSECTION_BIT_NV"/>
+                <enum bitpos="13" extends="VkShaderStageFlagBits"        name="VK_SHADER_STAGE_CALLABLE_BIT_NV"/>
+                <enum bitpos="21" extends="VkPipelineStageFlagBits"      name="VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_NV"/>
+                <enum bitpos="25" extends="VkPipelineStageFlagBits"      name="VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_NV"/>
+                <enum bitpos="10" extends="VkBufferUsageFlagBits"        name="VK_BUFFER_USAGE_RAY_TRACING_BIT_NV"/>
+                <enum offset="0" extends="VkPipelineBindPoint"           name="VK_PIPELINE_BIND_POINT_RAY_TRACING_NV"/>
+                <enum offset="0" extends="VkDescriptorType"              name="VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV"/>
+                <enum bitpos="21" extends="VkAccessFlagBits"             name="VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_NV"/>
+                <enum bitpos="22" extends="VkAccessFlagBits"             name="VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_NV"/>
+                <enum offset="0" extends="VkQueryType"                   name="VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_NV"/>
+                <enum bitpos="5" extends="VkPipelineCreateFlagBits"      name="VK_PIPELINE_CREATE_DEFER_COMPILE_BIT_NV"/>
+                <enum offset="0" extends="VkObjectType"                  name="VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV"/>
+                <enum offset="0" extends="VkDebugReportObjectTypeEXT"    name="VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV_EXT"/>
+                <enum offset="0" extends="VkIndexType"                   name="VK_INDEX_TYPE_NONE_NV"/>
+                <type name="VkAccelerationStructureTypeNV"/>
+                <type name="VkRayTracingShaderGroupCreateInfoNV"/>
+                <type name="VkRayTracingShaderGroupTypeNV"/>
+                <type name="VkRayTracingPipelineCreateInfoNV"/>
+                <type name="VkGeometryTrianglesNV"/>
+                <type name="VkGeometryAABBNV"/>
+                <type name="VkGeometryDataNV"/>
+                <type name="VkGeometryNV"/>
+                <type name="VkGeometryFlagsNV"/>
+                <type name="VkGeometryInstanceFlagsNV"/>
+                <type name="VkGeometryFlagBitsNV"/>
+                <type name="VkGeometryInstanceFlagBitsNV"/>
+                <type name="VkAccelerationStructureInfoNV"/>
+                <type name="VkAccelerationStructureCreateInfoNV"/>
+                <type name="VkAccelerationStructureNV"/>
+                <type name="VkBuildAccelerationStructureFlagBitsNV"/>
+                <type name="VkBuildAccelerationStructureFlagsNV"/>
+                <type name="VkCopyAccelerationStructureModeNV"/>
+                <type name="VkGeometryTypeNV"/>
+                <type name="VkBindAccelerationStructureMemoryInfoNV"/>
+                <type name="VkWriteDescriptorSetAccelerationStructureNV"/>
+                <type name="VkAccelerationStructureMemoryRequirementsInfoNV"/>
+                <type name="VkPhysicalDeviceRayTracingPropertiesNV"/>
+                <type name="VkMemoryRequirements2KHR"/>
+                <type name="VkAccelerationStructureMemoryRequirementsTypeNV"/>
+                <command name="vkCreateAccelerationStructureNV"/>
+                <command name="vkDestroyAccelerationStructureNV"/>
+                <command name="vkGetAccelerationStructureMemoryRequirementsNV"/>
+                <command name="vkBindAccelerationStructureMemoryNV"/>
+                <command name="vkCmdBuildAccelerationStructureNV"/>
+                <command name="vkCmdCopyAccelerationStructureNV"/>
+                <command name="vkCmdTraceRaysNV"/>
+                <command name="vkCreateRayTracingPipelinesNV"/>
+                <command name="vkGetRayTracingShaderGroupHandlesNV"/>
+                <command name="vkGetAccelerationStructureHandleNV"/>
+                <command name="vkCmdWriteAccelerationStructuresPropertiesNV"/>
+                <command name="vkCompileDeferredNV"/>
+            </require>
+        </extension>
+        <extension name="VK_NV_representative_fragment_test" number="167" type="device" author="NV" contact="Kedarnath Thangudu @kthangudu" supported="vulkan">
+            <require>
+                <enum value="2"                                             name="VK_NV_REPRESENTATIVE_FRAGMENT_TEST_SPEC_VERSION"/>
+                <enum value="&quot;VK_NV_representative_fragment_test&quot;" name="VK_NV_REPRESENTATIVE_FRAGMENT_TEST_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkStructureType"  name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_REPRESENTATIVE_FRAGMENT_TEST_FEATURES_NV"/>
+                <enum offset="1" extends="VkStructureType"  name="VK_STRUCTURE_TYPE_PIPELINE_REPRESENTATIVE_FRAGMENT_TEST_STATE_CREATE_INFO_NV"/>
+                <type name="VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV"/>
+                <type name="VkPipelineRepresentativeFragmentTestStateCreateInfoNV"/>
+            </require>
+        </extension>
+        <extension name="VK_NV_extension_168" number="168" author="NV" contact="Daniel Koch @dgkoch" supported="disabled">
+            <require>
+                <enum value="0"                                             name="VK_EXT_EXTENSION_168_SPEC_VERSION"/>
+                <enum value="&quot;VK_NV_extension_168&quot;"               name="VK_EXT_EXTENSION_168_EXTENSION_NAME"/>
+            </require>
+        </extension>
+        <extension name="VK_KHR_maintenance3" number="169" type="device" requires="VK_KHR_get_physical_device_properties2" author="KHR" contact="Jeff Bolz @jeffbolznv" supported="vulkan" promotedto="VK_VERSION_1_1">
+            <require>
+                <enum value="1"                                             name="VK_KHR_MAINTENANCE3_SPEC_VERSION"/>
+                <enum value="&quot;VK_KHR_maintenance3&quot;"               name="VK_KHR_MAINTENANCE3_EXTENSION_NAME"/>
+                <enum extends="VkStructureType"                             name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES_KHR" alias="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES"/>
+                <enum extends="VkStructureType"                             name="VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT_KHR" alias="VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT"/>
+                <type name="VkPhysicalDeviceMaintenance3PropertiesKHR"/>
+                <type name="VkDescriptorSetLayoutSupportKHR"/>
+                <command name="vkGetDescriptorSetLayoutSupportKHR"/>
+            </require>
+        </extension>
+        <extension name="VK_KHR_draw_indirect_count" number="170" type="device" author="KHR" contact="Piers Daniell @pdaniell-nv" supported="vulkan">
+            <require>
+                <enum value="1"                                          name="VK_KHR_DRAW_INDIRECT_COUNT_SPEC_VERSION"/>
+                <enum value="&quot;VK_KHR_draw_indirect_count&quot;"     name="VK_KHR_DRAW_INDIRECT_COUNT_EXTENSION_NAME"/>
+                <command name="vkCmdDrawIndirectCountKHR"/>
+                <command name="vkCmdDrawIndexedIndirectCountKHR"/>
+            </require>
+        </extension>
+        <extension name="VK_EXT_filter_cubic" number="171" type="device" requires="VK_IMG_filter_cubic" author="QCOM" contact="Bill Licea-Kane @wwlk" supported="vulkan">
+            <require>
+                <enum value="2"                                             name="VK_EXT_FILTER_CUBIC_SPEC_VERSION"/>
+                <enum value="&quot;VK_EXT_filter_cubic&quot;"               name="VK_EXT_FILTER_CUBIC_EXTENSION_NAME"/>
+                <enum extends="VkFilter"                                    name="VK_FILTER_CUBIC_EXT" alias="VK_FILTER_CUBIC_IMG"/>
+                <enum extends="VkFormatFeatureFlagBits"                     name="VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT" alias="VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_IMG"/>
+                <enum offset="0" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_VIEW_IMAGE_FORMAT_INFO_EXT"/>
+                <enum offset="1" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_FILTER_CUBIC_IMAGE_VIEW_IMAGE_FORMAT_PROPERTIES_EXT"/>
+                <type name="VkPhysicalDeviceImageViewImageFormatInfoEXT"/>
+                <type name="VkFilterCubicImageViewImageFormatPropertiesEXT"/>
+            </require>
+        </extension>
+        <extension name="VK_QCOM_extension_172" number="172" author="QCOM" contact="Bill Licea-Kane @wwlk" supported="disabled">
+            <require>
+                <enum value="0"                                             name="VK_QCOM_extension_172_SPEC_VERSION"/>
+                <enum value="&quot;VK_QCOM_extension_172&quot;"             name="VK_QCOM_extension_172_EXTENSION_NAME"/>
+                <enum bitpos="2" extends="VkSubpassDescriptionFlagBits"     name="VK_SUBPASS_DESCRIPTION_RESERVED_2_BIT_QCOM"/>
+                <enum bitpos="3" extends="VkSubpassDescriptionFlagBits"     name="VK_SUBPASS_DESCRIPTION_RESERVED_3_BIT_QCOM"/>
+            </require>
+        </extension>
+        <extension name="VK_QCOM_extension_173" number="173" author="QCOM" contact="Bill Licea-Kane @wwlk" supported="disabled">
+            <require>
+                <enum value="0"                                             name="VK_QCOM_extension_173_SPEC_VERSION"/>
+                <enum value="&quot;VK_QCOM_extension_173&quot;"             name="VK_QCOM_extension_173_EXTENSION_NAME"/>
+            </require>
+        </extension>
+        <extension name="VK_QCOM_extension_174" number="174" author="QCOM" contact="Bill Licea-Kane @wwlk" supported="disabled">
+            <require>
+                <enum value="0"                                             name="VK_QCOM_extension_174_SPEC_VERSION"/>
+                <enum value="&quot;VK_QCOM_extension_174&quot;"             name="VK_QCOM_extension_174_EXTENSION_NAME"/>
+            </require>
+        </extension>
+        <extension name="VK_EXT_global_priority" number="175" type="device" author="EXT" contact="Andres Rodriguez @lostgoat" supported="vulkan">
+            <require>
+                <enum value="2"                                             name="VK_EXT_GLOBAL_PRIORITY_SPEC_VERSION"/>
+                <enum value="&quot;VK_EXT_global_priority&quot;"            name="VK_EXT_GLOBAL_PRIORITY_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_EXT"/>
+                <enum offset="1" dir="-" extends="VkResult"                 name="VK_ERROR_NOT_PERMITTED_EXT"/>
+                <type name="VkDeviceQueueGlobalPriorityCreateInfoEXT"/>
+                <type name="VkQueueGlobalPriorityEXT"/>
+            </require>
+        </extension>
+        <extension name="VK_KHR_shader_subgroup_extended_types" number="176" type="device" requiresCore="1.1" author="KHR" contact="Neil Henning @sheredom" supported="vulkan">
+            <require>
+                <enum value="1"                                                 name="VK_KHR_SHADER_SUBGROUP_EXTENDED_TYPES_SPEC_VERSION"/>
+                <enum value="&quot;VK_KHR_shader_subgroup_extended_types&quot;" name="VK_KHR_SHADER_SUBGROUP_EXTENDED_TYPES_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkStructureType"              name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES_KHR"/>
+                <type name="VkPhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR"/>
+            </require>
+        </extension>
+        <extension name="VK_EXT_extension_177" number="177" author="EXT" contact="Neil Henning @sheredom" supported="disabled">
+            <require>
+                <enum value="0"                                             name="VK_KHR_EXTENSION_177_SPEC_VERSION"/>
+                <enum value="&quot;VK_KHR_extension_177&quot;"              name="VK_KHR_EXTENSION_177_EXTENSION_NAME"/>
+            </require>
+        </extension>
+        <extension name="VK_KHR_8bit_storage" number="178" type="device" requires="VK_KHR_get_physical_device_properties2,VK_KHR_storage_buffer_storage_class" author="KHR" contact="Alexander Galazin @alegal-arm" supported="vulkan">
+            <require>
+                <enum value="1"                                          name="VK_KHR_8BIT_STORAGE_SPEC_VERSION"/>
+                <enum value="&quot;VK_KHR_8bit_storage&quot;"            name="VK_KHR_8BIT_STORAGE_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkStructureType"               name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES_KHR"/>
+                <type name="VkPhysicalDevice8BitStorageFeaturesKHR"/>
+            </require>
+        </extension>
+        <extension name="VK_EXT_external_memory_host" number="179" type="device" author="EXT" requires="VK_KHR_external_memory" contact="Daniel Rakos @drakos-amd" supported="vulkan">
+            <require>
+                <enum value="1"                                             name="VK_EXT_EXTERNAL_MEMORY_HOST_SPEC_VERSION"/>
+                <enum value="&quot;VK_EXT_external_memory_host&quot;"       name="VK_EXT_EXTERNAL_MEMORY_HOST_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_IMPORT_MEMORY_HOST_POINTER_INFO_EXT"/>
+                <enum offset="1" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_MEMORY_HOST_POINTER_PROPERTIES_EXT"/>
+                <enum offset="2" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_HOST_PROPERTIES_EXT"/>
+                <enum bitpos="7" extends="VkExternalMemoryHandleTypeFlagBits" name="VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT"/>
+                <enum bitpos="8" extends="VkExternalMemoryHandleTypeFlagBits" name="VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_MAPPED_FOREIGN_MEMORY_BIT_EXT"/>
+                <type name="VkExternalMemoryHandleTypeFlagsKHR"/>
+                <type name="VkExternalMemoryHandleTypeFlagBitsKHR"/>
+                <type name="VkImportMemoryHostPointerInfoEXT"/>
+                <type name="VkMemoryHostPointerPropertiesEXT"/>
+                <type name="VkPhysicalDeviceExternalMemoryHostPropertiesEXT"/>
+                <command name="vkGetMemoryHostPointerPropertiesEXT"/>
+            </require>
+        </extension>
+        <extension name="VK_AMD_buffer_marker" number="180" type="device" author="AMD" contact="Daniel Rakos @drakos-amd" supported="vulkan">
+            <require>
+                <enum value="1"                                          name="VK_AMD_BUFFER_MARKER_SPEC_VERSION"/>
+                <enum value="&quot;VK_AMD_buffer_marker&quot;"           name="VK_AMD_BUFFER_MARKER_EXTENSION_NAME"/>
+                <command name="vkCmdWriteBufferMarkerAMD"/>
+            </require>
+        </extension>
+        <extension name="VK_KHR_shader_atomic_int64" number="181" type="device" author="KHR" requires="VK_KHR_get_physical_device_properties2" contact="Aaron Hagan @ahagan" supported="vulkan">
+            <require>
+                <enum value="1"                                             name="VK_KHR_SHADER_ATOMIC_INT64_SPEC_VERSION"/>
+                <enum value="&quot;VK_KHR_shader_atomic_int64&quot;"        name="VK_KHR_SHADER_ATOMIC_INT64_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES_KHR"/>
+                <type name="VkPhysicalDeviceShaderAtomicInt64FeaturesKHR"/>
+            </require>
+        </extension>
+        <extension name="VK_KHR_shader_clock" number="182" type="device" author="KHR" requires="VK_KHR_get_physical_device_properties2" contact="Aaron Hagan @ahagan" supported="vulkan">
+            <require>
+                <enum value="1"                                          name="VK_KHR_SHADER_CLOCK_SPEC_VERSION"/>
+                <enum value="&quot;VK_KHR_shader_clock&quot;"            name="VK_KHR_SHADER_CLOCK_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkStructureType"               name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CLOCK_FEATURES_KHR"/>
+                <type name="VkPhysicalDeviceShaderClockFeaturesKHR"/>
+            </require>
+        </extension>
+        <extension name="VK_AMD_extension_183" number="183" author="AMD" contact="Daniel Rakos @drakos-amd" supported="disabled">
+            <require>
+                <enum value="0"                                             name="VK_KHR_EXTENSION_183_SPEC_VERSION"/>
+                <enum value="&quot;VK_AMD_extension_183&quot;"              name="VK_KHR_EXTENSION_183_EXTENSION_NAME"/>
+            </require>
+        </extension>
+        <extension name="VK_AMD_pipeline_compiler_control" number="184" type="device" author="AMD" contact="Matthaeus G. Chajdas @anteru" supported="vulkan">
+            <require>
+                <enum value="1"                                             name="VK_AMD_PIPELINE_COMPILER_CONTROL_SPEC_VERSION"/>
+                <enum value="&quot;VK_AMD_pipeline_compiler_control&quot;"  name="VK_AMD_PIPELINE_COMPILER_CONTROL_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_PIPELINE_COMPILER_CONTROL_CREATE_INFO_AMD"/>
+                <type name="VkPipelineCompilerControlFlagBitsAMD"/>
+                <type name="VkPipelineCompilerControlFlagsAMD"/>
+                <type name="VkPipelineCompilerControlCreateInfoAMD"/>
+            </require>
+        </extension>
+        <extension name="VK_EXT_calibrated_timestamps" number="185" type="device" author="EXT" contact="Daniel Rakos @drakos-amd" supported="vulkan">
+            <require>
+                <enum value="1"                                             name="VK_EXT_CALIBRATED_TIMESTAMPS_SPEC_VERSION"/>
+                <enum value="&quot;VK_EXT_calibrated_timestamps&quot;"      name="VK_EXT_CALIBRATED_TIMESTAMPS_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_CALIBRATED_TIMESTAMP_INFO_EXT"/>
+                <type name="VkTimeDomainEXT"/>
+                <type name="VkCalibratedTimestampInfoEXT"/>
+                <command name="vkGetPhysicalDeviceCalibrateableTimeDomainsEXT"/>
+                <command name="vkGetCalibratedTimestampsEXT"/>
+            </require>
+        </extension>
+        <extension name="VK_AMD_shader_core_properties" number="186" type="device" author="AMD" requires="VK_KHR_get_physical_device_properties2" contact="Martin Dinkov @mdinkov" supported="vulkan">
+            <require>
+                <enum value="2"                                          name="VK_AMD_SHADER_CORE_PROPERTIES_SPEC_VERSION"/>
+                <enum value="&quot;VK_AMD_shader_core_properties&quot;"  name="VK_AMD_SHADER_CORE_PROPERTIES_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkStructureType"               name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_AMD"/>
+                <type name="VkPhysicalDeviceShaderCorePropertiesAMD"/>
+            </require>
+        </extension>
+        <extension name="VK_AMD_extension_187" number="187" author="AMD" contact="Daniel Rakos @drakos-amd" supported="disabled">
+            <require>
+                <enum value="0"                                             name="VK_KHR_EXTENSION_187_SPEC_VERSION"/>
+                <enum value="&quot;VK_AMD_extension_187&quot;"              name="VK_KHR_EXTENSION_187_EXTENSION_NAME"/>
+            </require>
+        </extension>
+        <extension name="VK_AMD_extension_188" number="188" author="AMD" contact="Daniel Rakos @drakos-amd" supported="disabled">
+            <require>
+                <enum value="0"                                             name="VK_KHR_EXTENSION_188_SPEC_VERSION"/>
+                <enum value="&quot;VK_AMD_extension_188&quot;"              name="VK_KHR_EXTENSION_188_EXTENSION_NAME"/>
+            </require>
+        </extension>
+        <extension name="VK_AMD_extension_189" number="189" author="AMD" contact="Daniel Rakos @drakos-amd" supported="disabled">
+            <require>
+                <enum value="0"                                             name="VK_KHR_EXTENSION_189_SPEC_VERSION"/>
+                <enum value="&quot;VK_AMD_extension_189&quot;"              name="VK_KHR_EXTENSION_189_EXTENSION_NAME"/>
+            </require>
+        </extension>
+        <extension name="VK_AMD_memory_overallocation_behavior" number="190" type="device" author="AMD" contact="Martin Dinkov @mdinkov" supported="vulkan">
+            <require>
+                <enum value="1"                                             name="VK_AMD_MEMORY_OVERALLOCATION_BEHAVIOR_SPEC_VERSION"/>
+                <enum value="&quot;VK_AMD_memory_overallocation_behavior&quot;"    name="VK_AMD_MEMORY_OVERALLOCATION_BEHAVIOR_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_DEVICE_MEMORY_OVERALLOCATION_CREATE_INFO_AMD"/>
+                <type name="VkMemoryOverallocationBehaviorAMD"/>
+                <type name="VkDeviceMemoryOverallocationCreateInfoAMD"/>
+            </require>
+        </extension>
+        <extension name="VK_EXT_vertex_attribute_divisor" number="191" type="device" requires="VK_KHR_get_physical_device_properties2" author="NV" contact="Vikram Kushwaha @vkushwaha" supported="vulkan">
+            <require>
+                <enum value="3"                                         name="VK_EXT_VERTEX_ATTRIBUTE_DIVISOR_SPEC_VERSION"/>
+                <enum value="&quot;VK_EXT_vertex_attribute_divisor&quot;"   name="VK_EXT_VERTEX_ATTRIBUTE_DIVISOR_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkStructureType"              name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_EXT"/>
+                <enum offset="1" extends="VkStructureType"              name="VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_EXT"/>
+                <enum offset="2" extends="VkStructureType"              name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_EXT"/>
+                <type name="VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT"/>
+                <type name="VkVertexInputBindingDivisorDescriptionEXT"/>
+                <type name="VkPipelineVertexInputDivisorStateCreateInfoEXT"/>
+                <type name="VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT"/>
+            </require>
+        </extension>
+        <extension name="VK_GGP_frame_token" number="192" type="device" requires="VK_KHR_swapchain,VK_GGP_stream_descriptor_surface" platform="ggp" author="GGP" contact="Jean-Francois Roy @jfroy" supported="vulkan">
+            <require>
+                <enum value="1"                                                 name="VK_GGP_FRAME_TOKEN_SPEC_VERSION"/>
+                <enum value="&quot;VK_GGP_frame_token&quot;"                    name="VK_GGP_FRAME_TOKEN_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkStructureType"                      name="VK_STRUCTURE_TYPE_PRESENT_FRAME_TOKEN_GGP"/>
+                <type name="VkPresentFrameTokenGGP"/>
+            </require>
+        </extension>
+        <extension name="VK_EXT_pipeline_creation_feedback" number="193" type="device" author="GOOGLE" contact="Jean-Francois Roy @jfroy" supported="vulkan">
+            <require>
+                <enum value="1"                                         name="VK_EXT_PIPELINE_CREATION_FEEDBACK_SPEC_VERSION"/>
+                <enum value="&quot;VK_EXT_pipeline_creation_feedback&quot;" name="VK_EXT_PIPELINE_CREATION_FEEDBACK_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkStructureType"              name="VK_STRUCTURE_TYPE_PIPELINE_CREATION_FEEDBACK_CREATE_INFO_EXT"/>
+                <type name="VkPipelineCreationFeedbackFlagBitsEXT"/>
+                <type name="VkPipelineCreationFeedbackFlagsEXT"/>
+                <type name="VkPipelineCreationFeedbackCreateInfoEXT"/>
+                <type name="VkPipelineCreationFeedbackEXT"/>
+            </require>
+        </extension>
+        <extension name="VK_GOOGLE_extension_194" number="194" author="GOOGLE" contact="Jean-Francois Roy @jfroy" supported="disabled">
+            <require>
+                <enum value="0"                                         name="VK_GOOGLE_EXTENSION_194_SPEC_VERSION"/>
+                <enum value="&quot;VK_GOOGLE_extension_194&quot;"       name="VK_GOOGLE_EXTENSION_194_EXTENSION_NAME"/>
+            </require>
+        </extension>
+        <extension name="VK_GOOGLE_extension_195" number="195" author="GOOGLE" contact="Jean-Francois Roy @jfroy" supported="disabled">
+            <require>
+                <enum value="0"                                         name="VK_GOOGLE_EXTENSION_195_SPEC_VERSION"/>
+                <enum value="&quot;VK_GOOGLE_extension_195&quot;"       name="VK_GOOGLE_EXTENSION_195_EXTENSION_NAME"/>
+            </require>
+        </extension>
+        <extension name="VK_GOOGLE_extension_196" number="196" author="GOOGLE" contact="Jean-Francois Roy @jfroy" supported="disabled">
+            <require>
+                <enum value="0"                                         name="VK_GOOGLE_EXTENSION_196_SPEC_VERSION"/>
+                <enum value="&quot;VK_GOOGLE_extension_196&quot;"       name="VK_GOOGLE_EXTENSION_196_EXTENSION_NAME"/>
+            </require>
+        </extension>
+        <extension name="VK_KHR_driver_properties" number="197" type="device" requires="VK_KHR_get_physical_device_properties2" author="KHR" contact="Daniel Rakos @drakos-amd" supported="vulkan">
+            <require>
+                <enum value="1"                                         name="VK_KHR_DRIVER_PROPERTIES_SPEC_VERSION"/>
+                <enum value="&quot;VK_KHR_driver_properties&quot;"      name="VK_KHR_DRIVER_PROPERTIES_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkStructureType"              name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES_KHR"/>
+                <enum name="VK_MAX_DRIVER_NAME_SIZE_KHR"/>
+                <enum name="VK_MAX_DRIVER_INFO_SIZE_KHR"/>
+                <type name="VkDriverIdKHR"/>
+                <type name="VkConformanceVersionKHR"/>
+                <type name="VkPhysicalDeviceDriverPropertiesKHR"/>
+            </require>
+        </extension>
+        <extension name="VK_KHR_shader_float_controls" number="198" type="device" requires="VK_KHR_get_physical_device_properties2" author="KHR" contact="Alexander Galazin @alegal-arm" supported="vulkan">
+            <require>
+                <enum value="4"                                           name="VK_KHR_SHADER_FLOAT_CONTROLS_SPEC_VERSION"/>
+                <enum value="&quot;VK_KHR_shader_float_controls&quot;"    name="VK_KHR_SHADER_FLOAT_CONTROLS_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkStructureType"                name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES_KHR"/>
+                <type name="VkPhysicalDeviceFloatControlsPropertiesKHR"/>
+                <type name="VkShaderFloatControlsIndependenceKHR"/>
+            </require>
+        </extension>
+        <extension name="VK_NV_shader_subgroup_partitioned" number="199" type="device" requiresCore="1.1" author="NV" contact="Jeff Bolz @jeffbolznv" supported="vulkan">
+            <require>
+                <enum value="1"                                             name="VK_NV_SHADER_SUBGROUP_PARTITIONED_SPEC_VERSION"/>
+                <enum value="&quot;VK_NV_shader_subgroup_partitioned&quot;" name="VK_NV_SHADER_SUBGROUP_PARTITIONED_EXTENSION_NAME"/>
+                <enum bitpos="8" extends="VkSubgroupFeatureFlagBits"        name="VK_SUBGROUP_FEATURE_PARTITIONED_BIT_NV"/>
+            </require>
+        </extension>
+        <extension name="VK_KHR_depth_stencil_resolve" number="200" type="device" requires="VK_KHR_create_renderpass2" author="KHR" contact="Jan-Harald Fredriksen @janharald" supported="vulkan">
+            <require>
+                <enum value="1"                                             name="VK_KHR_DEPTH_STENCIL_RESOLVE_SPEC_VERSION"/>
+                <enum value="&quot;VK_KHR_depth_stencil_resolve&quot;"      name="VK_KHR_DEPTH_STENCIL_RESOLVE_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES_KHR"/>
+                <enum offset="1" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE_KHR"/>
+                <type name="VkSubpassDescriptionDepthStencilResolveKHR"/>
+                <type name="VkPhysicalDeviceDepthStencilResolvePropertiesKHR"/>
+                <type name="VkResolveModeFlagBitsKHR"/>
+            </require>
+        </extension>
+        <extension name="VK_KHR_swapchain_mutable_format" number="201" type="device" author="KHR" requires="VK_KHR_swapchain,VK_KHR_maintenance2,VK_KHR_image_format_list" contact="Daniel Rakos @drakos-arm" supported="vulkan">
+            <require>
+                <enum value="1"                                         name="VK_KHR_SWAPCHAIN_MUTABLE_FORMAT_SPEC_VERSION"/>
+                <enum value="&quot;VK_KHR_swapchain_mutable_format&quot;" name="VK_KHR_SWAPCHAIN_MUTABLE_FORMAT_EXTENSION_NAME"/>
+                <enum bitpos="2" extends="VkSwapchainCreateFlagBitsKHR" name="VK_SWAPCHAIN_CREATE_MUTABLE_FORMAT_BIT_KHR"/>
+            </require>
+        </extension>
+        <extension name="VK_NV_compute_shader_derivatives" number="202" type="device" requires="VK_KHR_get_physical_device_properties2" author="NV" contact="Pat Brown @nvpbrown" supported="vulkan">
+            <require>
+                <enum value="1"                                         name="VK_NV_COMPUTE_SHADER_DERIVATIVES_SPEC_VERSION"/>
+                <enum value="&quot;VK_NV_compute_shader_derivatives&quot;" name="VK_NV_COMPUTE_SHADER_DERIVATIVES_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkStructureType" name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_FEATURES_NV"/>
+                <type name="VkPhysicalDeviceComputeShaderDerivativesFeaturesNV"/>
+            </require>
+        </extension>
+        <extension name="VK_NV_mesh_shader" number="203" type="device" requires="VK_KHR_get_physical_device_properties2" author="NV" contact="Christoph Kubisch @pixeljetstream" supported="vulkan">
+            <require>
+                <enum value="1"                                         name="VK_NV_MESH_SHADER_SPEC_VERSION"/>
+                <enum value="&quot;VK_NV_mesh_shader&quot;"             name="VK_NV_MESH_SHADER_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkStructureType"              name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_FEATURES_NV"/>
+                <enum offset="1" extends="VkStructureType"              name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_PROPERTIES_NV"/>
+                <enum bitpos="6" extends="VkShaderStageFlagBits"        name="VK_SHADER_STAGE_TASK_BIT_NV"/>
+                <enum bitpos="7" extends="VkShaderStageFlagBits"        name="VK_SHADER_STAGE_MESH_BIT_NV"/>
+                <enum bitpos="19" extends="VkPipelineStageFlagBits"     name="VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV"/>
+                <enum bitpos="20" extends="VkPipelineStageFlagBits"     name="VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV"/>
+                <command name="vkCmdDrawMeshTasksNV"/>
+                <command name="vkCmdDrawMeshTasksIndirectNV"/>
+                <command name="vkCmdDrawMeshTasksIndirectCountNV"/>
+                <type name="VkPhysicalDeviceMeshShaderFeaturesNV"/>
+                <type name="VkPhysicalDeviceMeshShaderPropertiesNV"/>
+                <type name="VkDrawMeshTasksIndirectCommandNV"/>
+            </require>
+        </extension>
+        <extension name="VK_NV_fragment_shader_barycentric" number="204" type="device" requires="VK_KHR_get_physical_device_properties2" author="NV" contact="Pat Brown @nvpbrown" supported="vulkan">
+            <require>
+                <enum value="1"                                         name="VK_NV_FRAGMENT_SHADER_BARYCENTRIC_SPEC_VERSION"/>
+                <enum value="&quot;VK_NV_fragment_shader_barycentric&quot;" name="VK_NV_FRAGMENT_SHADER_BARYCENTRIC_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkStructureType" name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_FEATURES_NV"/>
+                <type name="VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV"/>
+            </require>
+        </extension>
+        <extension name="VK_NV_shader_image_footprint" number="205" type="device" requires="VK_KHR_get_physical_device_properties2" author="NV" contact="Pat Brown @nvpbrown" supported="vulkan">
+            <require>
+                <enum value="2"                                         name="VK_NV_SHADER_IMAGE_FOOTPRINT_SPEC_VERSION"/>
+                <enum value="&quot;VK_NV_shader_image_footprint&quot;"  name="VK_NV_SHADER_IMAGE_FOOTPRINT_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkStructureType"              name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_IMAGE_FOOTPRINT_FEATURES_NV"/>
+                <type name="VkPhysicalDeviceShaderImageFootprintFeaturesNV"/>
+            </require>
+        </extension>
+        <extension name="VK_NV_scissor_exclusive" number="206" type="device" requires="VK_KHR_get_physical_device_properties2" author="NV" contact="Pat Brown @nvpbrown" supported="vulkan">
+            <require>
+                <enum value="1"                                         name="VK_NV_SCISSOR_EXCLUSIVE_SPEC_VERSION"/>
+                <enum value="&quot;VK_NV_scissor_exclusive&quot;"       name="VK_NV_SCISSOR_EXCLUSIVE_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkStructureType" name="VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_EXCLUSIVE_SCISSOR_STATE_CREATE_INFO_NV"/>
+                <enum offset="1" extends="VkDynamicState" name="VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_NV"/>
+                <enum offset="2" extends="VkStructureType" name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXCLUSIVE_SCISSOR_FEATURES_NV"/>
+                <type name="VkPipelineViewportExclusiveScissorStateCreateInfoNV"/>
+                <type name="VkPhysicalDeviceExclusiveScissorFeaturesNV"/>
+                <command name="vkCmdSetExclusiveScissorNV"/>
+            </require>
+        </extension>
+        <extension name="VK_NV_device_diagnostic_checkpoints" type="device" number="207" requires="VK_KHR_get_physical_device_properties2" author="NVIDIA" contact="Nuno Subtil @nsubtil" supported="vulkan">
+            <require>
+                <enum value="2"                                         name="VK_NV_DEVICE_DIAGNOSTIC_CHECKPOINTS_SPEC_VERSION"/>
+                <enum value="&quot;VK_NV_device_diagnostic_checkpoints&quot;" name="VK_NV_DEVICE_DIAGNOSTIC_CHECKPOINTS_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkStructureType"              name="VK_STRUCTURE_TYPE_CHECKPOINT_DATA_NV"/>
+                <enum offset="1" extends="VkStructureType"              name="VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_NV"/>
+                <type name="VkQueueFamilyCheckpointPropertiesNV"/>
+                <type name="VkCheckpointDataNV"/>
+                <command name="vkCmdSetCheckpointNV"/>
+                <command name="vkGetQueueCheckpointDataNV"/>
+            </require>
+        </extension>
+        <extension name="VK_KHR_timeline_semaphore" number="208" type="device" author="KHR" requires="VK_KHR_get_physical_device_properties2" contact="Jason Ekstrand @jekstrand" supported="vulkan">
+            <require>
+                <enum value="2"                                         name="VK_KHR_TIMELINE_SEMAPHORE_SPEC_VERSION"/>
+                <enum value="&quot;VK_KHR_timeline_semaphore&quot;"     name="VK_KHR_TIMELINE_SEMAPHORE_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkStructureType"              name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES_KHR"/>
+                <enum offset="1" extends="VkStructureType"              name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES_KHR"/>
+                <enum offset="2" extends="VkStructureType"              name="VK_STRUCTURE_TYPE_SEMAPHORE_TYPE_CREATE_INFO_KHR"/>
+                <enum offset="3" extends="VkStructureType"              name="VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO_KHR"/>
+                <enum offset="4" extends="VkStructureType"              name="VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO_KHR"/>
+                <enum offset="5" extends="VkStructureType"              name="VK_STRUCTURE_TYPE_SEMAPHORE_SIGNAL_INFO_KHR"/>
+                <type name="VkSemaphoreTypeKHR"/>
+                <type name="VkPhysicalDeviceTimelineSemaphoreFeaturesKHR"/>
+                <type name="VkPhysicalDeviceTimelineSemaphorePropertiesKHR"/>
+                <type name="VkSemaphoreTypeCreateInfoKHR"/>
+                <type name="VkTimelineSemaphoreSubmitInfoKHR"/>
+                <type name="VkSemaphoreWaitFlagBitsKHR"/>
+                <type name="VkSemaphoreWaitFlagsKHR"/>
+                <type name="VkSemaphoreWaitInfoKHR"/>
+                <type name="VkSemaphoreSignalInfoKHR"/>
+                <command name="vkGetSemaphoreCounterValueKHR"/>
+                <command name="vkWaitSemaphoresKHR"/>
+                <command name="vkSignalSemaphoreKHR"/>
+            </require>
+        </extension>
+        <extension name="VK_KHR_extension_209" number="209" type="device" author="KHR" contact="Ian Elliott @ianelliott" supported="disabled">
+            <require>
+                <enum value="0"                                         name="VK_KHR_EXTENSION_209_SPEC_VERSION"/>
+                <enum value="&quot;VK_KHR_extension_209&quot;"          name="VK_KHR_EXTENSION_209_EXTENSION_NAME"/>
+            </require>
+        </extension>
+        <extension name="VK_INTEL_shader_integer_functions2" number="210" type="device" requires="VK_KHR_get_physical_device_properties2" author="INTEL" contact="Ian Romanick @ianromanick" supported="vulkan">
+            <require>
+                <enum value="1"                                         name="VK_INTEL_SHADER_INTEGER_FUNCTIONS_2_SPEC_VERSION"/>
+                <enum value="&quot;VK_INTEL_shader_integer_functions2&quot;" name="VK_INTEL_SHADER_INTEGER_FUNCTIONS_2_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkStructureType"              name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_FUNCTIONS_2_FEATURES_INTEL"/>
+                <type name="VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL"/>
+            </require>
+        </extension>
+        <extension name="VK_INTEL_performance_query" number="211" type="device" author="INTEL" contact="Lionel Landwerlin @llandwerlin" supported="vulkan">
+            <require>
+                <enum value="1"                                         name="VK_INTEL_PERFORMANCE_QUERY_SPEC_VERSION"/>
+                <enum value="&quot;VK_INTEL_performance_query&quot;"    name="VK_INTEL_PERFORMANCE_QUERY_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkStructureType"              name="VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO_INTEL"/>
+                <enum offset="1" extends="VkStructureType"              name="VK_STRUCTURE_TYPE_INITIALIZE_PERFORMANCE_API_INFO_INTEL"/>
+                <enum offset="2" extends="VkStructureType"              name="VK_STRUCTURE_TYPE_PERFORMANCE_MARKER_INFO_INTEL"/>
+                <enum offset="3" extends="VkStructureType"              name="VK_STRUCTURE_TYPE_PERFORMANCE_STREAM_MARKER_INFO_INTEL"/>
+                <enum offset="4" extends="VkStructureType"              name="VK_STRUCTURE_TYPE_PERFORMANCE_OVERRIDE_INFO_INTEL"/>
+                <enum offset="5" extends="VkStructureType"              name="VK_STRUCTURE_TYPE_PERFORMANCE_CONFIGURATION_ACQUIRE_INFO_INTEL"/>
+                <enum offset="0" extends="VkQueryType"                  name="VK_QUERY_TYPE_PERFORMANCE_QUERY_INTEL"/>
+                <enum offset="0" extends="VkObjectType"                 name="VK_OBJECT_TYPE_PERFORMANCE_CONFIGURATION_INTEL"/>
+                <type name="VkPerformanceConfigurationTypeINTEL"/>
+                <type name="VkQueryPoolSamplingModeINTEL"/>
+                <type name="VkPerformanceOverrideTypeINTEL"/>
+                <type name="VkPerformanceParameterTypeINTEL"/>
+                <type name="VkPerformanceValueTypeINTEL"/>
+                <type name="VkPerformanceValueDataINTEL"/>
+                <type name="VkPerformanceValueINTEL"/>
+                <type name="VkInitializePerformanceApiInfoINTEL"/>
+                <type name="VkQueryPoolCreateInfoINTEL"/>
+                <type name="VkPerformanceMarkerInfoINTEL"/>
+                <type name="VkPerformanceStreamMarkerInfoINTEL"/>
+                <type name="VkPerformanceOverrideInfoINTEL"/>
+                <type name="VkPerformanceConfigurationAcquireInfoINTEL"/>
+                <type name="VkPerformanceConfigurationINTEL"/>
+                <command name="vkInitializePerformanceApiINTEL"/>
+                <command name="vkUninitializePerformanceApiINTEL"/>
+                <command name="vkCmdSetPerformanceMarkerINTEL"/>
+                <command name="vkCmdSetPerformanceStreamMarkerINTEL"/>
+                <command name="vkCmdSetPerformanceOverrideINTEL"/>
+                <command name="vkAcquirePerformanceConfigurationINTEL"/>
+                <command name="vkReleasePerformanceConfigurationINTEL"/>
+                <command name="vkQueueSetPerformanceConfigurationINTEL"/>
+                <command name="vkGetPerformanceParameterINTEL"/>
+            </require>
+        </extension>
+        <extension name="VK_KHR_vulkan_memory_model" number="212" type="device" author="KHR" contact="Jeff Bolz @jeffbolznv" supported="vulkan">
+            <require>
+                <enum value="3"                                         name="VK_KHR_VULKAN_MEMORY_MODEL_SPEC_VERSION"/>
+                <enum value="&quot;VK_KHR_vulkan_memory_model&quot;"    name="VK_KHR_VULKAN_MEMORY_MODEL_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkStructureType"              name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES_KHR"/>
+                <type name="VkPhysicalDeviceVulkanMemoryModelFeaturesKHR"/>
+            </require>
+        </extension>
+        <extension name="VK_EXT_pci_bus_info" number="213" type="device" author="EXT" requires="VK_KHR_get_physical_device_properties2" contact="Matthaeus G. Chajdas @anteru" supported="vulkan">
+            <require>
+                <enum value="2"                                         name="VK_EXT_PCI_BUS_INFO_SPEC_VERSION"/>
+                <enum value="&quot;VK_EXT_pci_bus_info&quot;"           name="VK_EXT_PCI_BUS_INFO_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkStructureType"              name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PCI_BUS_INFO_PROPERTIES_EXT"/>
+                <type name="VkPhysicalDevicePCIBusInfoPropertiesEXT"/>
+            </require>
+        </extension>
+        <extension name="VK_AMD_display_native_hdr" number="214" type="device" author="AMD" requires="VK_KHR_get_physical_device_properties2,VK_KHR_get_surface_capabilities2,VK_KHR_swapchain" contact="Matthaeus G. Chajdas @anteru" supported="vulkan">
+            <require>
+                <enum value="1"                                         name="VK_AMD_DISPLAY_NATIVE_HDR_SPEC_VERSION"/>
+                <enum value="&quot;VK_AMD_display_native_hdr&quot;"     name="VK_AMD_DISPLAY_NATIVE_HDR_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkStructureType"              name="VK_STRUCTURE_TYPE_DISPLAY_NATIVE_HDR_SURFACE_CAPABILITIES_AMD"/>
+                <enum offset="1" extends="VkStructureType"              name="VK_STRUCTURE_TYPE_SWAPCHAIN_DISPLAY_NATIVE_HDR_CREATE_INFO_AMD"/>
+                <enum offset="0" extends="VkColorSpaceKHR"              name="VK_COLOR_SPACE_DISPLAY_NATIVE_AMD"/>
+                <type name="VkDisplayNativeHdrSurfaceCapabilitiesAMD"/>
+                <type name="VkSwapchainDisplayNativeHdrCreateInfoAMD"/>
+                <command name="vkSetLocalDimmingAMD"/>
+            </require>
+        </extension>
+        <extension name="VK_FUCHSIA_imagepipe_surface" number="215" type="instance" author="FUCHSIA" requires="VK_KHR_surface" platform="fuchsia" contact="Craig Stout @cdotstout" supported="vulkan">
+            <require>
+                <enum value="1"                                         name="VK_FUCHSIA_IMAGEPIPE_SURFACE_SPEC_VERSION"/>
+                <enum value="&quot;VK_FUCHSIA_imagepipe_surface&quot;"  name="VK_FUCHSIA_IMAGEPIPE_SURFACE_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkStructureType"              name="VK_STRUCTURE_TYPE_IMAGEPIPE_SURFACE_CREATE_INFO_FUCHSIA"/>
+                <type name="VkImagePipeSurfaceCreateFlagsFUCHSIA"/>
+                <type name="VkImagePipeSurfaceCreateInfoFUCHSIA"/>
+                <command name="vkCreateImagePipeSurfaceFUCHSIA"/>
+            </require>
+        </extension>
+        <extension name="VK_GOOGLE_extension_216" number="216" author="GOOGLE" contact="Jesse Hall @critsec" supported="disabled">
+            <require>
+                <enum value="0"                                             name="VK_KHR_EXTENSION_216_SPEC_VERSION"/>
+                <enum value="&quot;VK_KHR_extension_216&quot;"              name="VK_KHR_EXTENSION_216_EXTENSION_NAME"/>
+            </require>
+        </extension>
+        <extension name="VK_GOOGLE_extension_217" number="217" author="GOOGLE" contact="Jesse Hall @critsec" supported="disabled">
+            <require>
+                <enum value="0"                                             name="VK_KHR_EXTENSION_217_SPEC_VERSION"/>
+                <enum value="&quot;VK_KHR_extension_217&quot;"              name="VK_KHR_EXTENSION_217_EXTENSION_NAME"/>
+            </require>
+        </extension>
+        <extension name="VK_EXT_metal_surface" number="218" type="instance" requires="VK_KHR_surface" platform="metal" supported="vulkan" author="EXT" contact="Dzmitry Malyshau @kvark">
+            <require>
+                <enum value="1"                                             name="VK_EXT_METAL_SURFACE_SPEC_VERSION"/>
+                <enum value="&quot;VK_EXT_metal_surface&quot;"              name="VK_EXT_METAL_SURFACE_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_METAL_SURFACE_CREATE_INFO_EXT"/>
+                <type name="VkMetalSurfaceCreateFlagsEXT"/>
+                <type name="VkMetalSurfaceCreateInfoEXT"/>
+                <command name="vkCreateMetalSurfaceEXT"/>
+            </require>
+        </extension>
+        <extension name="VK_EXT_fragment_density_map" number="219" type="device" requires="VK_KHR_get_physical_device_properties2" author="EXT" contact="Matthew Netsch @mnetsch" supported="vulkan">
+            <require>
+                <enum value="1"                                             name="VK_EXT_FRAGMENT_DENSITY_MAP_SPEC_VERSION"/>
+                <enum value="&quot;VK_EXT_fragment_density_map&quot;"       name="VK_EXT_FRAGMENT_DENSITY_MAP_EXTENSION_NAME"/>
+                <enum offset="0"  extends="VkStructureType"                 name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_FEATURES_EXT"/>
+                <enum offset="1"  extends="VkStructureType"                 name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_PROPERTIES_EXT"/>
+                <enum offset="2"  extends="VkStructureType"                 name="VK_STRUCTURE_TYPE_RENDER_PASS_FRAGMENT_DENSITY_MAP_CREATE_INFO_EXT"/>
+                <enum bitpos="14" extends="VkImageCreateFlagBits"           name="VK_IMAGE_CREATE_SUBSAMPLED_BIT_EXT"/>
+                <enum offset="0"  extends="VkImageLayout"                   name="VK_IMAGE_LAYOUT_FRAGMENT_DENSITY_MAP_OPTIMAL_EXT"/>
+                <enum bitpos="24" extends="VkAccessFlagBits"                name="VK_ACCESS_FRAGMENT_DENSITY_MAP_READ_BIT_EXT"/>
+                <enum bitpos="24" extends="VkFormatFeatureFlagBits"         name="VK_FORMAT_FEATURE_FRAGMENT_DENSITY_MAP_BIT_EXT"/>
+                <enum bitpos="9"  extends="VkImageUsageFlagBits"            name="VK_IMAGE_USAGE_FRAGMENT_DENSITY_MAP_BIT_EXT"/>
+                <enum bitpos="0"  extends="VkImageViewCreateFlagBits"       name="VK_IMAGE_VIEW_CREATE_FRAGMENT_DENSITY_MAP_DYNAMIC_BIT_EXT"/>
+                <enum bitpos="23" extends="VkPipelineStageFlagBits"         name="VK_PIPELINE_STAGE_FRAGMENT_DENSITY_PROCESS_BIT_EXT"/>
+                <enum bitpos="0"  extends="VkSamplerCreateFlagBits"         name="VK_SAMPLER_CREATE_SUBSAMPLED_BIT_EXT"/>
+                <enum bitpos="1"  extends="VkSamplerCreateFlagBits"         name="VK_SAMPLER_CREATE_SUBSAMPLED_COARSE_RECONSTRUCTION_BIT_EXT"/>
+                <type name="VkPhysicalDeviceFragmentDensityMapFeaturesEXT"/>
+                <type name="VkPhysicalDeviceFragmentDensityMapPropertiesEXT"/>
+                <type name="VkRenderPassFragmentDensityMapCreateInfoEXT"/>
+            </require>
+        </extension>
+        <extension name="VK_EXT_extension_220" number="220" author="EXT" contact="Dzmitry Malyshau @kvark" supported="disabled">
+            <require>
+                <enum value="0"                                              name="VK_EXT_EXTENSION_220_SPEC_VERSION"/>
+                <enum value="&quot;VK_EXT_extension_220&quot;"               name="VK_EXT_EXTENSION_220_EXTENSION_NAME"/>
+            </require>
+        </extension>
+        <extension name="VK_KHR_extension_221" number="221" author="KHR" contact="Tobias Hector @tobski" supported="disabled">
+            <require>
+                <enum value="0"                                              name="VK_KHR_EXTENSION_221_SPEC_VERSION"/>
+                <enum value="&quot;VK_KHR_extension_221&quot;"               name="VK_KHR_EXTENSION_221_EXTENSION_NAME"/>
+                <enum bitpos="0" extends="VkRenderPassCreateFlagBits"        name="VK_RENDER_PASS_CREATE_RESERVED_0_BIT_KHR"/>
+            </require>
+        </extension>
+        <extension name="VK_EXT_scalar_block_layout" number="222" requires="VK_KHR_get_physical_device_properties2" type="device" author="EXT" contact="Tobias Hector @tobski" supported="vulkan">
+            <require>
+                <enum value="1"                                             name="VK_EXT_SCALAR_BLOCK_LAYOUT_SPEC_VERSION"/>
+                <enum value="&quot;VK_EXT_scalar_block_layout&quot;"        name="VK_EXT_SCALAR_BLOCK_LAYOUT_EXTENSION_NAME"/>
+                <type                                                       name="VkPhysicalDeviceScalarBlockLayoutFeaturesEXT"/>
+                <enum offset="0" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES_EXT"/>
+            </require>
+        </extension>
+        <extension name="VK_EXT_extension_223" number="223" author="EXT" contact="Tobias Hector @tobski" supported="disabled">
+            <require>
+                <enum value="0"                                              name="VK_EXT_EXTENSION_223_SPEC_VERSION"/>
+                <enum value="&quot;VK_EXT_extension_223&quot;"               name="VK_EXT_EXTENSION_223_EXTENSION_NAME"/>
+            </require>
+        </extension>
+        <extension name="VK_GOOGLE_hlsl_functionality1" number="224" type="device" author="GOOGLE" contact="Hai Nguyen @chaoticbob" supported="vulkan">
+            <require>
+                <enum value="1"                                             name="VK_GOOGLE_HLSL_FUNCTIONALITY1_SPEC_VERSION"/>
+                <enum value="&quot;VK_GOOGLE_hlsl_functionality1&quot;"     name="VK_GOOGLE_HLSL_FUNCTIONALITY1_EXTENSION_NAME"/>
+            </require>
+        </extension>
+        <extension name="VK_GOOGLE_decorate_string" number="225" type="device" author="GOOGLE" contact="Hai Nguyen @chaoticbob" supported="vulkan">
+            <require>
+                <enum value="1"                                             name="VK_GOOGLE_DECORATE_STRING_SPEC_VERSION"/>
+                <enum value="&quot;VK_GOOGLE_decorate_string&quot;"         name="VK_GOOGLE_DECORATE_STRING_EXTENSION_NAME"/>
+            </require>
+        </extension>
+        <extension name="VK_EXT_subgroup_size_control" number="226" type="device" requiresCore="1.1" author="EXT" contact="Neil Henning @sheredom" supported="vulkan">
+            <require>
+                <enum value="2"                                                name="VK_EXT_SUBGROUP_SIZE_CONTROL_SPEC_VERSION"/>
+                <enum value="&quot;VK_EXT_subgroup_size_control&quot;"         name="VK_EXT_SUBGROUP_SIZE_CONTROL_EXTENSION_NAME"/>
+                <type                                                          name="VkPhysicalDeviceSubgroupSizeControlFeaturesEXT"/>
+                <type                                                          name="VkPhysicalDeviceSubgroupSizeControlPropertiesEXT"/>
+                <type                                                          name="VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT"/>
+                <enum offset="0" extends="VkStructureType"                     name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES_EXT"/>
+                <enum offset="1" extends="VkStructureType"                     name="VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO_EXT"/>
+                <enum offset="2" extends="VkStructureType"                     name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES_EXT"/>
+                <enum bitpos="0" extends="VkPipelineShaderStageCreateFlagBits" name="VK_PIPELINE_SHADER_STAGE_CREATE_ALLOW_VARYING_SUBGROUP_SIZE_BIT_EXT"/>
+                <enum bitpos="1" extends="VkPipelineShaderStageCreateFlagBits" name="VK_PIPELINE_SHADER_STAGE_CREATE_REQUIRE_FULL_SUBGROUPS_BIT_EXT"/>
+            </require>
+        </extension>
+        <extension name="VK_AMD_extension_227" number="227" author="AMD" contact="Martin Dinkov @mdinkov" supported="disabled">
+            <require>
+                <enum value="0"                                             name="VK_AMD_EXTENSION_227_SPEC_VERSION"/>
+                <enum value="&quot;VK_AMD_extension_227&quot;"              name="VK_AMD_EXTENSION_227_EXTENSION_NAME"/>
+            </require>
+        </extension>
+        <extension name="VK_AMD_shader_core_properties2" number="228" type="device" author="AMD" contact="Matthaeus G. Chajdas @anteru" supported="vulkan" requires="VK_AMD_shader_core_properties">
+            <require>
+                <enum value="1"                                             name="VK_AMD_SHADER_CORE_PROPERTIES_2_SPEC_VERSION"/>
+                <enum value="&quot;VK_AMD_shader_core_properties2&quot;"    name="VK_AMD_SHADER_CORE_PROPERTIES_2_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_2_AMD"/>
+                <type                                                       name="VkPhysicalDeviceShaderCoreProperties2AMD"/>
+                <type                                                       name="VkShaderCorePropertiesFlagBitsAMD"/>
+                <type                                                       name="VkShaderCorePropertiesFlagsAMD"/>
+            </require>
+        </extension>
+        <extension name="VK_AMD_extension_229" number="229" author="AMD" contact="Martin Dinkov @mdinkov" supported="disabled">
+            <require>
+                <enum value="0"                                             name="VK_AMD_EXTENSION_229_SPEC_VERSION"/>
+                <enum value="&quot;VK_AMD_extension_229&quot;"              name="VK_AMD_EXTENSION_229_EXTENSION_NAME"/>
+            </require>
+        </extension>
+        <extension name="VK_AMD_device_coherent_memory" number="230" type="device" author="AMD" contact="Tobias Hector @tobski" supported="vulkan">
+            <require>
+                <enum value="1"                                             name="VK_AMD_DEVICE_COHERENT_MEMORY_SPEC_VERSION"/>
+                <enum value="&quot;VK_AMD_device_coherent_memory&quot;"     name="VK_AMD_DEVICE_COHERENT_MEMORY_EXTENSION_NAME"/>
+                <enum bitpos="6" extends="VkMemoryPropertyFlagBits"         name="VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD"/>
+                <enum bitpos="7" extends="VkMemoryPropertyFlagBits"         name="VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD"/>
+                <enum offset="0" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COHERENT_MEMORY_FEATURES_AMD"/>
+                <type                                                       name="VkPhysicalDeviceCoherentMemoryFeaturesAMD"/>
+            </require>
+        </extension>
+        <extension name="VK_AMD_extension_231" number="231" author="AMD" contact="Martin Dinkov @mdinkov" supported="disabled">
+            <require>
+                <enum value="0"                                             name="VK_AMD_EXTENSION_231_SPEC_VERSION"/>
+                <enum value="&quot;VK_AMD_extension_231&quot;"              name="VK_AMD_EXTENSION_231_EXTENSION_NAME"/>
+            </require>
+        </extension>
+        <extension name="VK_AMD_extension_232" number="232" author="AMD" contact="Martin Dinkov @mdinkov" supported="disabled">
+            <require>
+                <enum value="0"                                             name="VK_AMD_EXTENSION_232_SPEC_VERSION"/>
+                <enum value="&quot;VK_AMD_extension_232&quot;"              name="VK_AMD_EXTENSION_232_EXTENSION_NAME"/>
+            </require>
+        </extension>
+        <extension name="VK_AMD_extension_233" number="233" author="AMD" contact="Martin Dinkov @mdinkov" supported="disabled">
+            <require>
+                <enum value="0"                                             name="VK_AMD_EXTENSION_233_SPEC_VERSION"/>
+                <enum value="&quot;VK_AMD_extension_233&quot;"              name="VK_AMD_EXTENSION_233_EXTENSION_NAME"/>
+            </require>
+        </extension>
+        <extension name="VK_AMD_extension_234" number="234" author="AMD" contact="Martin Dinkov @mdinkov" supported="disabled">
+            <require>
+                <enum value="0"                                             name="VK_AMD_EXTENSION_234_SPEC_VERSION"/>
+                <enum value="&quot;VK_AMD_extension_234&quot;"              name="VK_AMD_EXTENSION_234_EXTENSION_NAME"/>
+            </require>
+        </extension>
+        <extension name="VK_AMD_extension_235" number="235" author="AMD" contact="Martin Dinkov @mdinkov" supported="disabled">
+            <require>
+                <enum value="0"                                             name="VK_AMD_EXTENSION_235_SPEC_VERSION"/>
+                <enum value="&quot;VK_AMD_extension_235&quot;"              name="VK_AMD_EXTENSION_235_EXTENSION_NAME"/>
+            </require>
+        </extension>
+        <extension name="VK_AMD_extension_236" number="236" author="AMD" contact="Martin Dinkov @mdinkov" supported="disabled">
+            <require>
+                <enum value="0"                                             name="VK_AMD_EXTENSION_236_SPEC_VERSION"/>
+                <enum value="&quot;VK_AMD_extension_236&quot;"              name="VK_AMD_EXTENSION_236_EXTENSION_NAME"/>
+            </require>
+        </extension>
+        <extension name="VK_KHR_spirv_1_4" number="237" type="device" requiresCore="1.1" requires="VK_KHR_shader_float_controls" author="KHR" contact="Jesse Hall @critsec" supported="vulkan">
+            <require>
+                <enum value="1"                                             name="VK_KHR_SPIRV_1_4_SPEC_VERSION"/>
+                <enum value="&quot;VK_KHR_spirv_1_4&quot;"                  name="VK_KHR_SPIRV_1_4_EXTENSION_NAME"/>
+            </require>
+        </extension>
+        <extension name="VK_EXT_memory_budget" number="238" type="device" requires="VK_KHR_get_physical_device_properties2" author="EXT" contact="Jeff Bolz @jeffbolznv" supported="vulkan">
+            <require>
+                <enum value="1"                                             name="VK_EXT_MEMORY_BUDGET_SPEC_VERSION"/>
+                <enum value="&quot;VK_EXT_memory_budget&quot;"              name="VK_EXT_MEMORY_BUDGET_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT"/>
+                <type name="VkPhysicalDeviceMemoryBudgetPropertiesEXT"/>
+            </require>
+        </extension>
+        <extension name="VK_EXT_memory_priority" number="239" type="device" requires="VK_KHR_get_physical_device_properties2"  author="EXT" contact="Jeff Bolz @jeffbolznv" supported="vulkan">
+            <require>
+                <enum value="1"                                             name="VK_EXT_MEMORY_PRIORITY_SPEC_VERSION"/>
+                <enum value="&quot;VK_EXT_memory_priority&quot;"            name="VK_EXT_MEMORY_PRIORITY_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PRIORITY_FEATURES_EXT"/>
+                <enum offset="1" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_MEMORY_PRIORITY_ALLOCATE_INFO_EXT"/>
+                <type name="VkPhysicalDeviceMemoryPriorityFeaturesEXT"/>
+                <type name="VkMemoryPriorityAllocateInfoEXT"/>
+            </require>
+        </extension>
+        <extension name="VK_KHR_surface_protected_capabilities" number="240" type="instance" requiresCore="1.1" requires="VK_KHR_get_surface_capabilities2" author="KHR" contact="Sandeep Shinde @sashinde" supported="vulkan">
+            <require>
+                <enum value="1"                                             name="VK_KHR_SURFACE_PROTECTED_CAPABILITIES_SPEC_VERSION"/>
+                <enum value="&quot;VK_KHR_surface_protected_capabilities&quot;"   name="VK_KHR_SURFACE_PROTECTED_CAPABILITIES_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_SURFACE_PROTECTED_CAPABILITIES_KHR"/>
+                <type name="VkSurfaceProtectedCapabilitiesKHR"/>
+            </require>
+        </extension>
+        <extension name="VK_NV_dedicated_allocation_image_aliasing" number="241" type="device" requires="VK_KHR_dedicated_allocation" author="NVIDIA" contact="Nuno Subtil @nsubtil" supported="vulkan">
+            <require>
+                <enum value="1"                                                         name="VK_NV_DEDICATED_ALLOCATION_IMAGE_ALIASING_SPEC_VERSION"/>
+                <enum value="&quot;VK_NV_dedicated_allocation_image_aliasing&quot;"     name="VK_NV_DEDICATED_ALLOCATION_IMAGE_ALIASING_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkStructureType"                              name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEDICATED_ALLOCATION_IMAGE_ALIASING_FEATURES_NV"/>
+                <type name="VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV"/>
+            </require>
+        </extension>
+        <extension name="VK_KHR_separate_depth_stencil_layouts" number="242" type="device" requires="VK_KHR_get_physical_device_properties2,VK_KHR_create_renderpass2" author="KHR" contact="Piers Daniell @pdaniell-nv" supported="vulkan">
+            <require>
+                <enum value="1"                                                   name="VK_KHR_SEPARATE_DEPTH_STENCIL_LAYOUTS_SPEC_VERSION"/>
+                <enum value="&quot;VK_KHR_separate_depth_stencil_layouts&quot;"   name="VK_KHR_SEPARATE_DEPTH_STENCIL_LAYOUTS_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkStructureType"                        name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES_KHR"/>
+                <enum offset="1" extends="VkStructureType"                        name="VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_STENCIL_LAYOUT_KHR"/>
+                <enum offset="2" extends="VkStructureType"                        name="VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT_KHR"/>
+                <enum offset="0" extends="VkImageLayout"                          name="VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR"/>
+                <enum offset="1" extends="VkImageLayout"                          name="VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR"/>
+                <enum offset="2" extends="VkImageLayout"                          name="VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR"/>
+                <enum offset="3" extends="VkImageLayout"                          name="VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR"/>
+                <type name="VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR"/>
+                <type name="VkAttachmentReferenceStencilLayoutKHR"/>
+                <type name="VkAttachmentDescriptionStencilLayoutKHR"/>
+            </require>
+        </extension>
+        <extension name="VK_INTEL_extension_243" number="243" author="INTEL" contact="Slawek Grajewski @sgrajewski" supported="disabled">
+            <require>
+                <enum value="0"                                              name="VK_INTEL_EXTENSION_243_SPEC_VERSION"/>
+                <enum value="&quot;VK_INTEL_extension_243&quot;"             name="VK_INTEL_EXTENSION_243_EXTENSION_NAME"/>
+            </require>
+        </extension>
+        <extension name="VK_MESA_extension_244" number="244" author="MESA" contact="Andres Rodriguez @lostgoat" supported="disabled">
+            <require>
+                <enum value="0"                                              name="VK_MESA_EXTENSION_244_SPEC_VERSION"/>
+                <enum value="&quot;VK_MESA_extension_244&quot;"              name="VK_MESA_EXTENSION_244_EXTENSION_NAME"/>
+            </require>
+        </extension>
+        <extension name="VK_EXT_buffer_device_address" number="245" type="device" requires="VK_KHR_get_physical_device_properties2" author="NV" contact="Jeff Bolz @jeffbolznv"  deprecatedby="VK_KHR_buffer_device_address" supported="vulkan">
+            <require>
+                <enum value="2"                                             name="VK_EXT_BUFFER_DEVICE_ADDRESS_SPEC_VERSION"/>
+                <enum value="&quot;VK_EXT_buffer_device_address&quot;"      name="VK_EXT_BUFFER_DEVICE_ADDRESS_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_EXT"/>
+                <enum extends="VkStructureType"                             name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_ADDRESS_FEATURES_EXT" alias="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_EXT"/>
+                <enum extends="VkStructureType"                             name="VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO_EXT" alias="VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO_KHR"/>
+                <enum offset="2" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_CREATE_INFO_EXT"/>
+                <enum extends="VkBufferUsageFlagBits"                       name="VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_EXT" alias="VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_KHR"/>
+                <enum extends="VkBufferCreateFlagBits"                      name="VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_EXT" alias="VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_KHR"/>
+                <enum extends="VkResult"                                    name="VK_ERROR_INVALID_DEVICE_ADDRESS_EXT" alias="VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS_KHR"/>
+                <type name="VkPhysicalDeviceBufferAddressFeaturesEXT"/>
+                <type name="VkPhysicalDeviceBufferDeviceAddressFeaturesEXT"/>
+                <type name="VkBufferDeviceAddressInfoEXT"/>
+                <type name="VkBufferDeviceAddressCreateInfoEXT"/>
+                <command name="vkGetBufferDeviceAddressEXT"/>
+            </require>
+        </extension>
+        <extension name="VK_EXT_tooling_info" number="246" type="device" author="EXT" contact="Tobias Hector @tobski" supported="vulkan">
+            <require>
+                <enum value="1"                                             name="VK_EXT_TOOLING_INFO_SPEC_VERSION"/>
+                <enum value="&quot;VK_EXT_tooling_info&quot;"               name="VK_EXT_TOOLING_INFO_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TOOL_PROPERTIES_EXT"/>
+                <type                                                       name="VkToolPurposeFlagBitsEXT"/>
+                <type                                                       name="VkToolPurposeFlagsEXT"/>
+                <type                                                       name="VkPhysicalDeviceToolPropertiesEXT"/>
+                <command                                                    name="vkGetPhysicalDeviceToolPropertiesEXT"/>
+            </require>
+            <require extension="VK_EXT_debug_report">
+                <enum bitpos="5" extends="VkToolPurposeFlagBitsEXT"         name="VK_TOOL_PURPOSE_DEBUG_REPORTING_BIT_EXT"/>
+            </require>
+            <require extension="VK_EXT_debug_marker">
+                <enum bitpos="6" extends="VkToolPurposeFlagBitsEXT"         name="VK_TOOL_PURPOSE_DEBUG_MARKERS_BIT_EXT"/>
+            </require>
+            <require extension="VK_EXT_debug_utils">
+                <enum bitpos="5" extends="VkToolPurposeFlagBitsEXT"         name="VK_TOOL_PURPOSE_DEBUG_REPORTING_BIT_EXT"/>
+                <enum bitpos="6" extends="VkToolPurposeFlagBitsEXT"         name="VK_TOOL_PURPOSE_DEBUG_MARKERS_BIT_EXT"/>
+            </require>
+        </extension>
+        <extension name="VK_EXT_separate_stencil_usage" number="247" type="device" author="EXT" contact="Daniel Rakos @drakos-amd" supported="vulkan">
+            <require>
+                <enum value="1"                                             name="VK_EXT_SEPARATE_STENCIL_USAGE_SPEC_VERSION"/>
+                <enum value="&quot;VK_EXT_separate_stencil_usage&quot;"     name="VK_EXT_SEPARATE_STENCIL_USAGE_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO_EXT"/>
+                <type name="VkImageStencilUsageCreateInfoEXT"/>
+            </require>
+        </extension>
+        <extension name="VK_EXT_validation_features" number="248" type="instance" author="LUNARG" contact="Karl Schultz @karl-lunarg" supported="vulkan">
+            <require>
+                <enum value="2"                                             name="VK_EXT_VALIDATION_FEATURES_SPEC_VERSION"/>
+                <enum value="&quot;VK_EXT_validation_features&quot;"        name="VK_EXT_VALIDATION_FEATURES_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_VALIDATION_FEATURES_EXT"/>
+                <type name="VkValidationFeaturesEXT"/>
+            </require>
+        </extension>
+        <extension name="VK_KHR_extension_249" number="249" author="KHR" contact="Keith Packard @keithp" supported="disabled">
+            <require>
+                <enum value="0"                                             name="VK_KHR_EXTENSION_249_SPEC_VERSION"/>
+                <enum value="&quot;VK_KHR_extension_249&quot;"              name="VK_KHR_EXTENSION_249_EXTENSION_NAME"/>
+            </require>
+        </extension>
+        <extension name="VK_NV_cooperative_matrix" number="250" type="device" requires="VK_KHR_get_physical_device_properties2" author="NV" contact="Jeff Bolz @jeffbolznv" supported="vulkan">
+            <require>
+                <enum value="1"                                              name="VK_NV_COOPERATIVE_MATRIX_SPEC_VERSION"/>
+                <enum value="&quot;VK_NV_cooperative_matrix&quot;"           name="VK_NV_COOPERATIVE_MATRIX_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkStructureType"                   name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_FEATURES_NV"/>
+                <enum offset="1" extends="VkStructureType"                   name="VK_STRUCTURE_TYPE_COOPERATIVE_MATRIX_PROPERTIES_NV"/>
+                <enum offset="2" extends="VkStructureType"                   name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_PROPERTIES_NV"/>
+                <type name="VkCooperativeMatrixPropertiesNV"/>
+                <type name="VkScopeNV"/>
+                <type name="VkComponentTypeNV"/>
+                <type name="VkPhysicalDeviceCooperativeMatrixFeaturesNV"/>
+                <type name="VkPhysicalDeviceCooperativeMatrixPropertiesNV"/>
+                <command name="vkGetPhysicalDeviceCooperativeMatrixPropertiesNV"/>
+            </require>
+        </extension>
+        <extension name="VK_NV_coverage_reduction_mode" number="251" requires="VK_NV_framebuffer_mixed_samples" type="device" author="NV" contact="Kedarnath Thangudu @kthangudu" supported="vulkan">
+            <require>
+                <enum value="1"                                             name="VK_NV_COVERAGE_REDUCTION_MODE_SPEC_VERSION"/>
+                <enum value="&quot;VK_NV_coverage_reduction_mode&quot;"     name="VK_NV_COVERAGE_REDUCTION_MODE_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COVERAGE_REDUCTION_MODE_FEATURES_NV"/>
+                <enum offset="1" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_REDUCTION_STATE_CREATE_INFO_NV"/>
+                <enum offset="2" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_FRAMEBUFFER_MIXED_SAMPLES_COMBINATION_NV"/>
+                <type name="VkPhysicalDeviceCoverageReductionModeFeaturesNV"/>
+                <type name="VkPipelineCoverageReductionStateCreateInfoNV"/>
+                <type name="VkPipelineCoverageReductionStateCreateFlagsNV"/>
+                <type name="VkCoverageReductionModeNV"/>
+                <type name="VkFramebufferMixedSamplesCombinationNV"/>
+                <command name="vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV"/>
+            </require>
+        </extension>
+        <extension name="VK_EXT_fragment_shader_interlock" number="252" author="EXT" type="device" requires="VK_KHR_get_physical_device_properties2" contact="Piers Daniell @pdaniell-nv" supported="vulkan">
+            <require>
+                <enum value="1"                                                 name="VK_EXT_FRAGMENT_SHADER_INTERLOCK_SPEC_VERSION"/>
+                <enum value="&quot;VK_EXT_fragment_shader_interlock&quot;"      name="VK_EXT_FRAGMENT_SHADER_INTERLOCK_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkStructureType"                      name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_INTERLOCK_FEATURES_EXT"/>
+                <type name="VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT"/>
+            </require>
+        </extension>
+        <extension name="VK_EXT_ycbcr_image_arrays" number="253" type="device" requires="VK_KHR_sampler_ycbcr_conversion" author="EXT" contact="Piers Daniell @pdaniell-nv" supported="vulkan">
+            <require>
+                <enum value="1"                                             name="VK_EXT_YCBCR_IMAGE_ARRAYS_SPEC_VERSION"/>
+                <enum value="&quot;VK_EXT_ycbcr_image_arrays&quot;"         name="VK_EXT_YCBCR_IMAGE_ARRAYS_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_YCBCR_IMAGE_ARRAYS_FEATURES_EXT"/>
+                <type name="VkPhysicalDeviceYcbcrImageArraysFeaturesEXT"/>
+            </require>
+        </extension>
+        <extension name="VK_KHR_uniform_buffer_standard_layout" number="254" requires="VK_KHR_get_physical_device_properties2" type="device" author="KHR" contact="Graeme Leese @gnl21" supported="vulkan">
+            <require>
+                <enum value="1"                                                 name="VK_KHR_UNIFORM_BUFFER_STANDARD_LAYOUT_SPEC_VERSION"/>
+                <enum value="&quot;VK_KHR_uniform_buffer_standard_layout&quot;" name="VK_KHR_UNIFORM_BUFFER_STANDARD_LAYOUT_EXTENSION_NAME"/>
+                <type                                                           name="VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR"/>
+                <enum offset="0" extends="VkStructureType"                      name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES_KHR"/>
+            </require>
+        </extension>
+        <extension name="VK_EXT_extension_255" number="255" author="EXT" contact="Jesse Hall @jessehall" supported="disabled">
+            <require>
+                <enum value="0"                                             name="VK_EXT_EXTENSION_255_SPEC_VERSION"/>
+                <enum value="&quot;VK_EXT_extension_255&quot;"              name="VK_EXT_EXTENSION_255_EXTENSION_NAME"/>
+            </require>
+        </extension>
+        <extension name="VK_EXT_full_screen_exclusive" number="256" type="device" author="EXT" requires="VK_KHR_get_physical_device_properties2,VK_KHR_surface,VK_KHR_get_surface_capabilities2,VK_KHR_swapchain" platform="win32" contact="James Jones @cubanismo" supported="vulkan">
+            <require>
+                <enum value="4"                                             name="VK_EXT_FULL_SCREEN_EXCLUSIVE_SPEC_VERSION"/>
+                <enum value="&quot;VK_EXT_full_screen_exclusive&quot;"      name="VK_EXT_FULL_SCREEN_EXCLUSIVE_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_INFO_EXT"/>
+                <enum offset="2" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_FULL_SCREEN_EXCLUSIVE_EXT"/>
+                <enum offset="0" extends="VkResult" dir="-"                 name="VK_ERROR_FULL_SCREEN_EXCLUSIVE_MODE_LOST_EXT"/>
+                <type name="VkFullScreenExclusiveEXT"/>
+                <type name="VkSurfaceFullScreenExclusiveInfoEXT"/>
+                <type name="VkSurfaceCapabilitiesFullScreenExclusiveEXT"/>
+                <command name="vkGetPhysicalDeviceSurfacePresentModes2EXT"/>
+                <command name="vkAcquireFullScreenExclusiveModeEXT"/>
+                <command name="vkReleaseFullScreenExclusiveModeEXT"/>
+            </require>
+            <require extension="VK_KHR_win32_surface">
+                <enum offset="1" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_WIN32_INFO_EXT"/>
+                <type name="VkSurfaceFullScreenExclusiveWin32InfoEXT"/>
+            </require>
+            <require extension="VK_KHR_device_group">
+                <command name="vkGetDeviceGroupSurfacePresentModes2EXT"/>
+            </require>
+            <require feature="VK_VERSION_1_1">
+                <command name="vkGetDeviceGroupSurfacePresentModes2EXT"/>
+            </require>
+        </extension>
+        <extension name="VK_EXT_headless_surface" number="257" type="instance" requires="VK_KHR_surface" author="EXT" contact="Lisa Wu @chengtianww" supported="vulkan">
+            <require>
+                <enum value="1"                                                 name="VK_EXT_HEADLESS_SURFACE_SPEC_VERSION"/>
+                <enum value="&quot;VK_EXT_headless_surface&quot;"               name="VK_EXT_HEADLESS_SURFACE_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkStructureType"                      name="VK_STRUCTURE_TYPE_HEADLESS_SURFACE_CREATE_INFO_EXT"/>
+                <type name="VkHeadlessSurfaceCreateFlagsEXT"/>
+                <type name="VkHeadlessSurfaceCreateInfoEXT"/>
+                <command name="vkCreateHeadlessSurfaceEXT"/>
+            </require>
+        </extension>
+        <extension name="VK_KHR_buffer_device_address" number="258" type="device" requires="VK_KHR_get_physical_device_properties2" author="KHR" contact="Jeff Bolz @jeffbolznv" supported="vulkan">
+            <require>
+                <enum value="1"                                             name="VK_KHR_BUFFER_DEVICE_ADDRESS_SPEC_VERSION"/>
+                <enum value="&quot;VK_KHR_buffer_device_address&quot;"      name="VK_KHR_BUFFER_DEVICE_ADDRESS_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_KHR"/>
+                <enum offset="1" extends="VkStructureType" extnumber="245"  name="VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO_KHR"/>
+                <enum offset="2" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_BUFFER_OPAQUE_CAPTURE_ADDRESS_CREATE_INFO_KHR"/>
+                <enum offset="3" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_MEMORY_OPAQUE_CAPTURE_ADDRESS_ALLOCATE_INFO_KHR"/>
+                <enum offset="4" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_DEVICE_MEMORY_OPAQUE_CAPTURE_ADDRESS_INFO_KHR"/>
+                <enum bitpos="17" extends="VkBufferUsageFlagBits"           name="VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_KHR"/>
+                <enum bitpos="4"  extends="VkBufferCreateFlagBits"          name="VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_KHR"/>
+                <enum bitpos="1" extends="VkMemoryAllocateFlagBits"         name="VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT_KHR"/>
+                <enum bitpos="2" extends="VkMemoryAllocateFlagBits"         name="VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_KHR"/>
+                <enum offset="0" dir="-" extends="VkResult" extnumber="245" name="VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS_KHR"/>
+                <type name="VkPhysicalDeviceBufferDeviceAddressFeaturesKHR"/>
+                <type name="VkBufferDeviceAddressInfoKHR"/>
+                <type name="VkBufferOpaqueCaptureAddressCreateInfoKHR"/>
+                <type name="VkMemoryOpaqueCaptureAddressAllocateInfoKHR"/>
+                <type name="VkDeviceMemoryOpaqueCaptureAddressInfoKHR"/>
+                <command name="vkGetBufferDeviceAddressKHR"/>
+                <command name="vkGetBufferOpaqueCaptureAddressKHR"/>
+                <command name="vkGetDeviceMemoryOpaqueCaptureAddressKHR"/>
+            </require>
+        </extension>
+        <extension name="VK_EXT_extension_259" number="259" author="EXT" contact="Jeff Leger @jackohound" supported="disabled">
+            <require>
+                <enum value="0"                                             name="VK_EXT_EXTENSION_259_SPEC_VERSION"/>
+                <enum value="&quot;VK_EXT_extension_259&quot;"              name="VK_EXT_EXTENSION_259_EXTENSION_NAME"/>
+            </require>
+        </extension>
+        <extension name="VK_EXT_line_rasterization" number="260" type="device" requires="VK_KHR_get_physical_device_properties2" author="EXT" contact="Jeff Bolz @jeffbolznv" supported="vulkan">
+            <require>
+                <enum value="1"                                             name="VK_EXT_LINE_RASTERIZATION_SPEC_VERSION"/>
+                <enum value="&quot;VK_EXT_line_rasterization&quot;"         name="VK_EXT_LINE_RASTERIZATION_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_EXT"/>
+                <enum offset="1" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO_EXT"/>
+                <enum offset="2" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES_EXT"/>
+                <enum offset="0" extends="VkDynamicState"                   name="VK_DYNAMIC_STATE_LINE_STIPPLE_EXT"/>
+                <type name="VkPhysicalDeviceLineRasterizationFeaturesEXT"/>
+                <type name="VkPhysicalDeviceLineRasterizationPropertiesEXT"/>
+                <type name="VkPipelineRasterizationLineStateCreateInfoEXT"/>
+                <type name="VkLineRasterizationModeEXT"/>
+                <command name="vkCmdSetLineStippleEXT"/>
+            </require>
+        </extension>
+        <extension name="VK_NV_extension_261" number="261" author="NV" contact="Kedarnath Thangudu @kthangudu" supported="disabled">
+            <require>
+                <enum value="0"                                             name="VK_NV_EXTENSION_261_SPEC_VERSION"/>
+                <enum value="&quot;VK_NV_extension_261&quot;"               name="VK_NV_EXTENSION_261_EXTENSION_NAME"/>
+            </require>
+        </extension>
+        <extension name="VK_EXT_host_query_reset" number="262" author="EXT" contact="Bas Nieuwenhuizen @BNieuwenhuizen" supported="vulkan" type="device" requires="VK_KHR_get_physical_device_properties2">
+            <require>
+                <enum value="1"                                             name="VK_EXT_HOST_QUERY_RESET_SPEC_VERSION"/>
+                <enum value="&quot;VK_EXT_host_query_reset&quot;"           name="VK_EXT_HOST_QUERY_RESET_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES_EXT"/>
+                <type name="VkPhysicalDeviceHostQueryResetFeaturesEXT"/>
+                <command name="vkResetQueryPoolEXT"/>
+            </require>
+        </extension>
+        <extension name="VK_GGP_extension_263" number="263" author="GGP" contact="Jean-Francois Roy @jfroy" supported="disabled">
+            <require>
+                <enum value="0"                                             name="VK_GOOGLE_EXTENSION_263_SPEC_VERSION"/>
+                <enum value="&quot;VK_GGP_extension_263&quot;"              name="VK_GOOGLE_EXTENSION_263_EXTENSION_NAME"/>
+            </require>
+        </extension>
+        <extension name="VK_BRCM_extension_264" number="264" author="BRCM" contact="Graeme Leese @gnl21" supported="disabled">
+            <require>
+                <enum value="0"                                             name="VK_BRCM_EXTENSION_264_SPEC_VERSION"/>
+                <enum value="&quot;VK_BRCM_extension_264&quot;"             name="VK_BRCM_EXTENSION_264_EXTENSION_NAME"/>
+            </require>
+        </extension>
+        <extension name="VK_BRCM_extension_265" number="265" author="BRCM" contact="Graeme Leese @gnl21" supported="disabled">
+            <require>
+                <enum value="0"                                             name="VK_BRCM_EXTENSION_265_SPEC_VERSION"/>
+                <enum value="&quot;VK_BRCM_extension_265&quot;"             name="VK_BRCM_EXTENSION_265_EXTENSION_NAME"/>
+            </require>
+        </extension>
+        <extension name="VK_EXT_index_type_uint8" number="266" type="device" author="EXT" contact="Piers Daniell @pdaniell-nv" supported="vulkan">
+            <require>
+                <enum value="1"                                             name="VK_EXT_INDEX_TYPE_UINT8_SPEC_VERSION"/>
+                <enum value="&quot;VK_EXT_index_type_uint8&quot;"           name="VK_EXT_INDEX_TYPE_UINT8_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_EXT"/>
+                <enum offset="0" extends="VkIndexType"                      name="VK_INDEX_TYPE_UINT8_EXT"/>
+                <type name="VkPhysicalDeviceIndexTypeUint8FeaturesEXT"/>
+            </require>
+        </extension>
+        <extension name="VK_EXT_extension_267" number="267" type="device" author="EXT" contact="Piers Daniell @pdaniell-nv" supported="disabled">
+            <require>
+                <enum value="0"                                             name="VK_EXT_EXTENSION_267_SPEC_VERSION"/>
+                <enum value="&quot;VK_EXT_extension_267&quot;"              name="VK_EXT_extension_267"/>
+            </require>
+        </extension>
+        <extension name="VK_KHR_extension_268" number="268" type="device" author="KHR" contact="Piers Daniell @pdaniell-nv" supported="disabled">
+            <require>
+                <enum value="0"                                             name="VK_EXT_EXTENSION_268_SPEC_VERSION"/>
+                <enum value="&quot;VK_KHR_extension_268&quot;"              name="VK_EXT_extension_268"/>
+            </require>
+        </extension>
+        <extension name="VK_KHR_extension_269" number="269" type="device" author="KHR" contact="Josh Barczak @jbarczak" supported="disabled">
+            <require>
+                <enum value="0"                                             name="VK_KHR_EXTENSION_269_SPEC_VERSION"/>
+                <enum value="&quot;VK_KHR_extension_269&quot;"              name="VK_KHR_extension_269"/>
+            </require>
+        </extension>
+        <extension name="VK_KHR_pipeline_executable_properties" number="270" type="device" author="KHR" contact="Jason Ekstrand @jekstrand" supported="vulkan">
+            <require>
+                <enum value="1"                                         name="VK_KHR_PIPELINE_EXECUTABLE_PROPERTIES_SPEC_VERSION"/>
+                <enum value="&quot;VK_KHR_pipeline_executable_properties&quot;"   name="VK_KHR_PIPELINE_EXECUTABLE_PROPERTIES_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkStructureType"              name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_EXECUTABLE_PROPERTIES_FEATURES_KHR"/>
+                <enum offset="1" extends="VkStructureType"              name="VK_STRUCTURE_TYPE_PIPELINE_INFO_KHR"/>
+                <enum offset="2" extends="VkStructureType"              name="VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_PROPERTIES_KHR"/>
+                <enum offset="3" extends="VkStructureType"              name="VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INFO_KHR"/>
+                <enum offset="4" extends="VkStructureType"              name="VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_STATISTIC_KHR"/>
+                <enum offset="5" extends="VkStructureType"              name="VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INTERNAL_REPRESENTATION_KHR"/>
+                <enum bitpos="6" extends="VkPipelineCreateFlagBits"     name="VK_PIPELINE_CREATE_CAPTURE_STATISTICS_BIT_KHR"/>
+                <enum bitpos="7" extends="VkPipelineCreateFlagBits"     name="VK_PIPELINE_CREATE_CAPTURE_INTERNAL_REPRESENTATIONS_BIT_KHR"/>
+                <type name="VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR"/>
+                <type name="VkPipelineInfoKHR"/>
+                <type name="VkPipelineExecutablePropertiesKHR"/>
+                <type name="VkPipelineExecutableInfoKHR"/>
+                <type name="VkPipelineExecutableStatisticFormatKHR"/>
+                <type name="VkPipelineExecutableStatisticValueKHR"/>
+                <type name="VkPipelineExecutableStatisticKHR"/>
+                <type name="VkPipelineExecutableInternalRepresentationKHR"/>
+                <command name="vkGetPipelineExecutablePropertiesKHR"/>
+                <command name="vkGetPipelineExecutableStatisticsKHR"/>
+                <command name="vkGetPipelineExecutableInternalRepresentationsKHR"/>
+            </require>
+        </extension>
+        <extension name="VK_INTEL_extension_271" number="271" type="device" author="INTEL" contact="Jason Ekstrand @jekstrand" supported="disabled">
+            <require>
+                <enum value="0"                                             name="VK_INTEL_EXTENSION_271_SPEC_VERSION"/>
+                <enum value="&quot;VK_KHR_extension_271&quot;"              name="VK_INTEL_extension_271"/>
+            </require>
+        </extension>
+        <extension name="VK_INTEL_extension_272" number="272" type="device" author="INTEL" contact="Jason Ekstrand @jekstrand" supported="disabled">
+            <require>
+                <enum value="0"                                             name="VK_INTEL_EXTENSION_272_SPEC_VERSION"/>
+                <enum value="&quot;VK_KHR_extension_272&quot;"              name="VK_INTEL_extension_272"/>
+            </require>
+        </extension>
+        <extension name="VK_INTEL_extension_273" number="273" type="device" author="INTEL" contact="Jason Ekstrand @jekstrand" supported="disabled">
+            <require>
+                <enum value="0"                                             name="VK_INTEL_EXTENSION_273_SPEC_VERSION"/>
+                <enum value="&quot;VK_KHR_extension_273&quot;"              name="VK_INTEL_extension_273"/>
+            </require>
+        </extension>
+        <extension name="VK_INTEL_extension_274" number="274" type="device" author="INTEL" contact="Jason Ekstrand @jekstrand" supported="disabled">
+            <require>
+                <enum value="0"                                             name="VK_INTEL_EXTENSION_274_SPEC_VERSION"/>
+                <enum value="&quot;VK_KHR_extension_274&quot;"              name="VK_INTEL_extension_274"/>
+            </require>
+        </extension>
+        <extension name="VK_KHR_extension_275" number="275" type="instance" author="KHR" contact="Lionel Landwerlin @llandwerlin" supported="disabled">
+            <require>
+                <enum value="0"                                             name="VK_KHR_EXTENSION_275_SPEC_VERSION"/>
+                <enum value="&quot;VK_KHR_extension_275&quot;"              name="VK_KHR_extension_275"/>
+            </require>
+        </extension>
+        <extension name="VK_KHR_extension_276" number="276" type="device" author="KHR" contact="James Jones @cubanismo" supported="disabled">
+            <require>
+                <enum value="0"                                             name="VK_KHR_EXTENSION_276_SPEC_VERSION"/>
+                <enum value="&quot;VK_KHR_extension_276&quot;"              name="VK_KHR_extension_276"/>
+            </require>
+        </extension>
+        <extension name="VK_EXT_shader_demote_to_helper_invocation" number="277" type="device" requires="VK_KHR_get_physical_device_properties2" author="EXT" contact="Jeff Bolz @jeffbolznv" supported="vulkan">
+            <require>
+                <enum value="1"                                                     name="VK_EXT_SHADER_DEMOTE_TO_HELPER_INVOCATION_SPEC_VERSION"/>
+                <enum value="&quot;VK_EXT_shader_demote_to_helper_invocation&quot;" name="VK_EXT_SHADER_DEMOTE_TO_HELPER_INVOCATION_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkStructureType"                          name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES_EXT"/>
+                <type name="VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT"/>
+            </require>
+        </extension>
+        <extension name="VK_NV_extension_278" number="278" type="device" author="NV" contact="Christoph Kubisch @pixeljetstream" supported="disabled">
+            <require>
+                <enum value="0"                                             name="VK_NV_EXTENSION_278_SPEC_VERSION"/>
+                <enum value="&quot;VK_NV_extension_278&quot;"               name="VK_NV_extension_278"/>
+            </require>
+        </extension>
+        <extension name="VK_NV_extension_279" number="279" type="device" author="NV" contact="Christoph Kubisch @pixeljetstream" supported="disabled">
+            <require>
+                <enum value="0"                                             name="VK_NV_EXTENSION_279_SPEC_VERSION"/>
+                <enum value="&quot;VK_NV_extension_279&quot;"               name="VK_NV_extension_279"/>
+            </require>
+        </extension>
+        <extension name="VK_KHR_extension_280" number="280" type="device" author="KHR" contact="Kevin Petit @kevinpetit" supported="disabled">
+            <require>
+                <enum value="0"                                             name="VK_KHR_EXTENSION_280_SPEC_VERSION"/>
+                <enum value="&quot;VK_KHR_extension_280&quot;"              name="VK_KHR_extension_280"/>
+            </require>
+        </extension>
+        <extension name="VK_ARM_extension_281" number="281" type="device" author="ARM" contact="Kevin Petit @kevinpetit" supported="disabled">
+            <require>
+                <enum value="0"                                             name="VK_ARM_EXTENSION_281_SPEC_VERSION"/>
+                <enum value="&quot;VK_ARM_extension_281&quot;"              name="VK_ARM_extension_281"/>
+            </require>
+        </extension>
+        <extension name="VK_EXT_texel_buffer_alignment" number="282" type="device" requires="VK_KHR_get_physical_device_properties2" author="EXT" contact="Jeff Bolz @jeffbolznv" supported="vulkan">
+            <require>
+                <enum value="1"                                             name="VK_EXT_TEXEL_BUFFER_ALIGNMENT_SPEC_VERSION"/>
+                <enum value="&quot;VK_EXT_texel_buffer_alignment&quot;"     name="VK_EXT_TEXEL_BUFFER_ALIGNMENT_EXTENSION_NAME"/>
+                <enum offset="0" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_FEATURES_EXT"/>
+                <enum offset="1" extends="VkStructureType"                  name="VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES_EXT"/>
+                <type name="VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT"/>
+                <type name="VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT"/>
+            </require>
+        </extension>
+        <extension name="VK_QCOM_extension_283" number="283" type="device" author="QCOM" contact="Jeff Leger @jackohound" supported="disabled">
+            <require>
+                <enum value="0"                                             name="VK_QCOM_EXTENSION_283_SPEC_VERSION"/>
+                <enum value="&quot;VK_QCOM_extension_283&quot;"             name="VK_QCOM_extension_283"/>
+                <enum bitpos="1" extends="VkRenderPassCreateFlagBits"       name="VK_RENDER_PASS_RESERVED_BIT_1_QCOM"/>
+            </require>
+        </extension>
+        <extension name="VK_EXT_extension_284" number="284" type="device" author="EXT" contact="Samuel Pitoiset @hakzsam" supported="disabled">
+            <require>
+                <enum value="0"                                             name="VK_EXT_EXTENSION_284_SPEC_VERSION"/>
+                <enum value="&quot;VK_EXT_extension_284&quot;"              name="VK_EXT_extension_284"/>
+            </require>
+        </extension>
+        <extension name="VK_EXT_extension_285" number="285" type="device" author="EXT" contact="Yiwei Zhang @zzyiwei" supported="disabled">
+            <require>
+                <enum value="0"                                             name="VK_EXT_EXTENSION_285_SPEC_VERSION"/>
+                <enum value="&quot;VK_EXT_extension_285&quot;"              name="VK_EXT_extension_285"/>
+            </require>
+        </extension>
+        <extension name="VK_EXT_extension_286" number="286" type="instance" author="EXT" contact="Drew DeVault sir@cmpwn.com" supported="disabled">
+            <require>
+                <enum value="0"                                             name="VK_EXT_EXTENSION_286_SPEC_VERSION"/>
+                <enum value="&quot;VK_EXT_extension_286&quot;"              name="VK_EXT_extension_286"/>
+            </require>
+        </extension>
+        <extension name="VK_NVX_extension_287" number="287" author="NVX" contact="Liam Middlebrook @liam-middlebrook" supported="disabled">
+            <require>
+                <enum value="0"                                             name="VK_NVX_EXTENSION_287_SPEC_VERSION"/>
+                <enum value="&quot;VK_NVX_extension_287&quot;"              name="VK_NVX_EXTENSION_287_EXTENSION_NAME"/>
+            </require>
+        </extension>
+        <extension name="VK_NVX_extension_288" number="288" author="NVX" contact="Liam Middlebrook @liam-middlebrook" supported="disabled">
+            <require>
+                <enum value="0"                                             name="VK_NVX_EXTENSION_288_SPEC_VERSION"/>
+                <enum value="&quot;VK_NVX_extension_288&quot;"              name="VK_NVX_EXTENSION_288_EXTENSION_NAME"/>
+            </require>
+        </extension>
+        <extension name="VK_EXT_extension_289" number="289" author="EXT" contact="Jan-Harald Fredriksen @janharaldfredriksen-arm" supported="disabled">
+            <require>
+                <enum value="0"                                             name="VK_EXT_EXTENSION_289_SPEC_VERSION"/>
+                <enum value="&quot;VK_EXT_extension_289&quot;"              name="VK_EXT_EXTENSION_289_EXTENSION_NAME"/>
+            </require>
+        </extension>
+        <extension name="VK_GOOGLE_user_type" number="290" type="device" author="GOOGLE" contact="Kaye Mason @chaleur" supported="vulkan">
+            <require>
+                <enum value="1"                                             name="VK_GOOGLE_USER_TYPE_SPEC_VERSION"/>
+                <enum value="&quot;VK_GOOGLE_user_type&quot;"               name="VK_GOOGLE_USER_TYPE_EXTENSION_NAME"/>
+            </require>
+        </extension>
+        <extension name="VK_NV_extension_291" number="291" author="NV" contact="Daniel Koch @dgkoch" supported="disabled">
+            <require>
+                <enum value="0"                                             name="VK_NV_EXTENSION_291_SPEC_VERSION"/>
+                <enum value="&quot;VK_NV_extension_291&quot;"               name="VK_NV_EXTENSION_291_EXTENSION_NAME"/>
+                <enum bitpos="12"  extends="VkPipelineCreateFlagBits"       name="VK_PIPELINE_CREATE_EXTENSION_291_BIT0_NV"/>
+                <enum bitpos="13"  extends="VkPipelineCreateFlagBits"       name="VK_PIPELINE_CREATE_EXTENSION_291_BIT1_NV"/>
+            </require>
+        </extension>
+        <extension name="VK_NV_extension_292" number="292" author="NV" contact="Daniel Koch @dgkoch" supported="disabled">
+            <require>
+                <enum value="0"                                             name="VK_NV_EXTENSION_292_SPEC_VERSION"/>
+                <enum value="&quot;VK_NV_extension_292&quot;"               name="VK_NV_EXTENSION_292_EXTENSION_NAME"/>
+            </require>
+        </extension>
+        <extension name="VK_NV_extension_293" number="293" author="NV" contact="Daniel Koch @dgkoch" supported="disabled">
+            <require>
+                <enum value="0"                                             name="VK_NV_EXTENSION_293_SPEC_VERSION"/>
+                <enum value="&quot;VK_NV_extension_293&quot;"               name="VK_NV_EXTENSION_293_EXTENSION_NAME"/>
+            </require>
+        </extension>
+        <extension name="VK_KHR_extension_294" number="294" author="KHR" contact="Baldur Karlsson @baldurk" supported="disabled">
+            <require>
+                <enum value="0"                                             name="VK_KHR_EXTENSION_294_SPEC_VERSION"/>
+                <enum value="&quot;VK_KHR_extension_294&quot;"               name="VK_KHR_EXTENSION_294_EXTENSION_NAME"/>
+            </require>
+        </extension>
+        <extension name="VK_KHR_extension_295" number="295" author="KHR" contact="Keith Packard @keithp" supported="disabled">
+            <require>
+                <enum value="0"                                             name="VK_KHR_EXTENSION_295_SPEC_VERSION"/>
+                <enum value="&quot;VK_KHR_extension_295&quot;"              name="VK_KHR_EXTENSION_295_EXTENSION_NAME"/>
+            </require>
+        </extension>
+        <extension name="VK_NV_extension_296" number="296" author="NV" contact="Matthew Rusch gitlab:@mrusch" supported="disabled">
+            <require>
+                <enum value="0"                                             name="VK_NV_EXTENSION_296_SPEC_VERSION"/>
+                <enum value="&quot;VK_NV_extension_296&quot;"               name="VK_NV_EXTENSION_296_EXTENSION_NAME"/>
+            </require>
+        </extension>
+        <extension name="VK_KHR_extension_297" number="297" author="KHR" contact="Corentin Wallez @Kangz" supported="disabled">
+            <require>
+                <enum value="0"                                             name="VK_KHR_EXTENSION_297_SPEC_VERSION"/>
+                <enum value="&quot;VK_KHR_extension_297&quot;"              name="VK_KHR_EXTENSION_297_EXTENSION_NAME"/>
+                <enum bitpos="3" extends="VkPipelineShaderStageCreateFlagBits"  name="VK_PIPELINE_SHADER_STAGE_CREATE_RESERVED_3_BIT_KHR"/>
+            </require>
+        </extension>
+        <extension name="VK_EXT_extension_298" number="298" author="AMD" contact="Gregory Grebe @grgrebe-amd" supported="disabled">
+            <require>
+                <enum value="0"                                         name="VK_EXT_EXTENSION_298_SPEC_VERSION"/>
+                <enum value="&quot;VK_EXT_extension_298&quot;"          name="VK_EXT_EXTENSION_298_EXTENSION_NAME"/>
+                <enum bitpos="8"  extends="VkPipelineCreateFlagBits"    name="VK_PIPELINE_CREATE_RESERVED_8_BIT_EXT"/>
+                <enum bitpos="9"  extends="VkPipelineCreateFlagBits"    name="VK_PIPELINE_CREATE_RESERVED_9_BIT_EXT"/>
+                <enum bitpos="10" extends="VkPipelineCreateFlagBits"    name="VK_PIPELINE_CREATE_RESERVED_10_BIT_EXT"/>
+                <enum extends="VkResult" offset="0"                     name="VK_RESULT_EXT_298_RESERVED_VALUE_0_EXT"/>
+            </require>
+        </extension>
+        <extension name="VK_KHR_extension_299" number="299" author="KHR" contact="Mark Bellamy @mark.bellamy_arm" supported="disabled">
+            <require>
+                <enum value="0"                                         name="VK_KHR_EXTENSION_299_SPEC_VERSION"/>
+                <enum value="&quot;VK_KHR_extension_299&quot;"          name="VK_KHR_EXTENSION_299_EXTENSION_NAME"/>
+            </require>
+        </extension>
+        <extension name="VK_KHR_extension_300" number="300" author="KHR" contact="Aidan Fabius @afabius" supported="disabled">
+            <require>
+                <enum value="0"                                         name="VK_KHR_EXTENSION_300_SPEC_VERSION"/>
+                <enum value="&quot;VK_KHR_extension_300&quot;"          name="VK_KHR_EXTENSION_300_EXTENSION_NAME"/>
+            </require>
+        </extension>
+        <extension name="VK_NV_extension_301" number="301" author="NV" contact="Kedarnath Thangudu @kthangudu" supported="disabled">
+            <require>
+                <enum value="0"                                         name="VK_NV_EXTENSION_301_SPEC_VERSION"/>
+                <enum value="&quot;VK_NV_extension_301&quot;"           name="VK_NV_EXTENSION_301_EXTENSION_NAME"/>
+            </require>
+        </extension>
+    </extensions>
+</registry>
diff --git a/src/third_party/vulkan-headers/src/registry/vkconventions.py b/src/third_party/vulkan-headers/src/registry/vkconventions.py
new file mode 100644
index 0000000..3d0e32a
--- /dev/null
+++ b/src/third_party/vulkan-headers/src/registry/vkconventions.py
@@ -0,0 +1,248 @@
+#!/usr/bin/python3 -i
+#
+# Copyright (c) 2013-2019 The Khronos Group Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Working-group-specific style conventions,
+# used in generation.
+
+import re
+
+from conventions import ConventionsBase
+
+
+# Modified from default implementation - see category_requires_validation() below
+CATEGORIES_REQUIRING_VALIDATION = set(('handle', 'enum', 'bitmask'))
+
+# Tokenize into "words" for structure types, approximately per spec "Implicit Valid Usage" section 2.7.2
+# This first set is for things we recognize explicitly as words,
+# as exceptions to the general regex.
+# Ideally these would be listed in the spec as exceptions, as OpenXR does.
+SPECIAL_WORDS = set((
+    '16Bit',  # VkPhysicalDevice16BitStorageFeatures
+    '8Bit',  # VkPhysicalDevice8BitStorageFeaturesKHR
+    'AABB',  # VkGeometryAABBNV
+    'ASTC',  # VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT
+    'D3D12',  # VkD3D12FenceSubmitInfoKHR
+    'Float16',  # VkPhysicalDeviceShaderFloat16Int8FeaturesKHR
+    'ImagePipe',  # VkImagePipeSurfaceCreateInfoFUCHSIA
+    'Int64',  # VkPhysicalDeviceShaderAtomicInt64FeaturesKHR
+    'Int8',  # VkPhysicalDeviceShaderFloat16Int8FeaturesKHR
+    'MacOS',  # VkMacOSSurfaceCreateInfoMVK
+    'Uint8',  # VkPhysicalDeviceIndexTypeUint8FeaturesEXT
+    'Win32',  # VkWin32SurfaceCreateInfoKHR
+))
+# A regex to match any of the SPECIAL_WORDS
+EXCEPTION_PATTERN = r'(?P<exception>{})'.format(
+    '|'.join('(%s)' % re.escape(w) for w in SPECIAL_WORDS))
+MAIN_RE = re.compile(
+    # the negative lookahead is to prevent the all-caps pattern from being too greedy.
+    r'({}|([0-9]+)|([A-Z][a-z]+)|([A-Z][A-Z]*(?![a-z])))'.format(EXCEPTION_PATTERN))
+
+
+class VulkanConventions(ConventionsBase):
+    def formatExtension(self, name):
+        """Mark up a name as an extension for the spec."""
+        return '`<<{}>>`'.format(name)
+
+    @property
+    def null(self):
+        """Preferred spelling of NULL."""
+        return '`NULL`'
+
+    @property
+    def struct_macro(self):
+        """Get the appropriate format macro for a structure.
+
+        Primarily affects generated valid usage statements.
+        """
+
+        return 'slink:'
+
+    @property
+    def constFlagBits(self):
+        """Returns True if static const flag bits should be generated, False if an enumerated type should be generated."""
+        return False
+
+    @property
+    def structtype_member_name(self):
+        """Return name of the structure type member"""
+        return 'sType'
+
+    @property
+    def nextpointer_member_name(self):
+        """Return name of the structure pointer chain member"""
+        return 'pNext'
+
+    @property
+    def valid_pointer_prefix(self):
+        """Return prefix to pointers which must themselves be valid"""
+        return 'valid'
+
+    def is_structure_type_member(self, paramtype, paramname):
+        """Determine if member type and name match the structure type member."""
+        return paramtype == 'VkStructureType' and paramname == self.structtype_member_name
+
+    def is_nextpointer_member(self, paramtype, paramname):
+        """Determine if member type and name match the next pointer chain member."""
+        return paramtype == 'void' and paramname == self.nextpointer_member_name
+
+    def generate_structure_type_from_name(self, structname):
+        """Generate a structure type name, like VK_STRUCTURE_TYPE_CREATE_INSTANCE_INFO"""
+        structure_type_parts = []
+        # Tokenize into "words"
+        for elem in MAIN_RE.findall(structname):
+            word = elem[0]
+            if word == 'Vk':
+                structure_type_parts.append('VK_STRUCTURE_TYPE')
+            else:
+                structure_type_parts.append(word.upper())
+        return '_'.join(structure_type_parts)
+
+    @property
+    def warning_comment(self):
+        """Return warning comment to be placed in header of generated Asciidoctor files"""
+        return '// WARNING: DO NOT MODIFY! This file is automatically generated from the vk.xml registry'
+
+    @property
+    def file_suffix(self):
+        """Return suffix of generated Asciidoctor files"""
+        return '.txt'
+
+    def api_name(self, spectype='api'):
+        """Return API or specification name for citations in ref pages.ref
+           pages should link to for
+
+           spectype is the spec this refpage is for: 'api' is the Vulkan API
+           Specification. Defaults to 'api'. If an unrecognized spectype is
+           given, returns None.
+        """
+        if spectype == 'api' or spectype is None:
+            return 'Vulkan'
+        else:
+            return None
+
+    @property
+    def xml_supported_name_of_api(self):
+        """Return the supported= attribute used in API XML"""
+        return 'vulkan'
+
+    @property
+    def api_prefix(self):
+        """Return API token prefix"""
+        return 'VK_'
+
+    @property
+    def write_contacts(self):
+        """Return whether contact list should be written to extension appendices"""
+        return True
+
+    @property
+    def write_refpage_include(self):
+        """Return whether refpage include should be written to extension appendices"""
+        return True
+
+    @property
+    def member_used_for_unique_vuid(self):
+        """Return the member name used in the VUID-...-...-unique ID."""
+        return self.structtype_member_name
+
+    def is_externsync_command(self, protoname):
+        """Returns True if the protoname element is an API command requiring
+           external synchronization
+        """
+        return protoname is not None and 'vkCmd' in protoname
+
+    def is_api_name(self, name):
+        """Returns True if name is in the reserved API namespace.
+        For Vulkan, these are names with a case-insensitive 'vk' prefix, or
+        a 'PFN_vk' function pointer type prefix.
+        """
+        return name[0:2].lower() == 'vk' or name[0:6] == 'PFN_vk'
+
+    def specURL(self, spectype='api'):
+        """Return public registry URL which ref pages should link to for the
+           current all-extensions HTML specification, so xrefs in the
+           asciidoc source that aren't to ref pages can link into it
+           instead. N.b. this may need to change on a per-refpage basis if
+           there are multiple documents involved.
+        """
+        return 'https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html'
+
+    @property
+    def xml_api_name(self):
+        """Return the name used in the default API XML registry for the default API"""
+        return 'vulkan'
+
+    @property
+    def registry_path(self):
+        """Return relpath to the default API XML registry in this project."""
+        return 'xml/vk.xml'
+
+    @property
+    def specification_path(self):
+        """Return relpath to the Asciidoctor specification sources in this project."""
+        return '{generated}/meta'
+
+    @property
+    def extra_refpage_headers(self):
+        """Return any extra text to add to refpage headers."""
+        return 'include::../config/attribs.txt[]'
+
+    @property
+    def extension_index_prefixes(self):
+        """Return a list of extension prefixes used to group extension refpages."""
+        return ['VK_KHR', 'VK_EXT', 'VK']
+
+    @property
+    def unified_flag_refpages(self):
+        """Return True if Flags/FlagBits refpages are unified, False if
+           they're separate.
+        """
+        return False
+
+    @property
+    def spec_reflow_path(self):
+        """Return the relative path to the spec source folder to reflow"""
+        return '.'
+
+    @property
+    def spec_no_reflow_dirs(self):
+        """Return a set of directories not to automatically descend into
+           when reflowing spec text
+        """
+        return ('scripts', 'style')
+
+    @property
+    def zero(self):
+        return '`0`'
+
+    def category_requires_validation(self, category):
+        """Return True if the given type 'category' always requires validation.
+
+        Overridden because Vulkan doesn't require "valid" text for basetype in the spec right now."""
+        return category in CATEGORIES_REQUIRING_VALIDATION
+
+    @property
+    def should_skip_checking_codes(self):
+        """Return True if more than the basic validation of return codes should
+        be skipped for a command.
+
+        Vulkan mostly relies on the validation layers rather than API
+        builtin error checking, so these checks are not appropriate.
+
+        For example, passing in a VkFormat parameter will not potentially
+        generate a VK_ERROR_FORMAT_NOT_SUPPORTED code."""
+
+        return True
diff --git a/src/third_party/vulkan-loader/src/.appveyor.yml b/src/third_party/vulkan-loader/src/.appveyor.yml
new file mode 100644
index 0000000..5146891
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/.appveyor.yml
@@ -0,0 +1,77 @@
+# Windows Build Configuration for AppVeyor
+# http://www.appveyor.com/docs/appveyor-yml
+#
+
+# This version starts a separte job for each platform config
+# in order to get around the AppVeyor limit of 60 mins per job.
+
+# build version format
+version: "{build}"
+
+# Free accounts have a max of 1, but ask anyway.
+max_jobs: 4
+
+os:
+  - Visual Studio 2015
+
+init:
+  - git config --global core.autocrlf true
+
+environment:
+  PYTHON_PATH: "C:/Python35"
+  PYTHON_PACKAGE_PATH: "C:/Python35/Scripts"
+  CMAKE_URL: "http://cmake.org/files/v3.10/cmake-3.10.2-win64-x64.zip"
+
+branches:
+  only:
+    - master
+
+install:
+  - appveyor DownloadFile %CMAKE_URL% -FileName cmake.zip
+  - 7z x cmake.zip -oC:\cmake > nul
+  - set path=C:\cmake\bin;%path%
+  - cmake --version
+
+before_build:
+  - "SET PATH=C:\\Python35;C:\\Python35\\Scripts;%PATH%"
+  - echo.
+  - echo Starting build for %APPVEYOR_REPO_NAME%
+  # Install dependencies
+  - python scripts/update_deps.py --dir=external
+  # Get Google Test
+  - git clone https://github.com/google/googletest.git external/googletest
+  - cd external/googletest
+  - git checkout tags/release-1.8.1
+  - cd %APPVEYOR_BUILD_FOLDER%
+  # Verify consistency between source file generators and output
+  - echo Verifying consistency between source file generators and output
+  - python scripts/generate_source.py --verify external/Vulkan-Headers/registry
+  # Generate build files using CMake for the build step.
+  - echo Generating CMake files for %PLATFORM%
+  - mkdir build
+  - cd build
+  - cmake -A %PLATFORM% -C../external/helper.cmake ..
+  - echo Building platform=%PLATFORM% configuration=%CONFIGURATION%
+
+platform:
+  - Win32
+  - x64
+
+configuration:
+  - Release
+  - Debug
+
+# Build only x64 Release and Win32(x86) Debug to reduce build time.
+# This should still provide adequate 32-bit vs 64-bit and
+# Release vs Debug coverage.
+matrix:
+  exclude:
+    - configuration: Release
+      platform: Win32
+    - configuration: Debug
+      platform: x64
+
+build:
+  parallel: true                   # enable MSBuild parallel builds
+  project: build/Vulkan-Loader.sln # path to Visual Studio solution or project
+  verbosity: quiet                 # quiet|minimal|normal|detailed
diff --git a/src/third_party/vulkan-loader/src/.clang-format b/src/third_party/vulkan-loader/src/.clang-format
new file mode 100644
index 0000000..0af4d40
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/.clang-format
@@ -0,0 +1,7 @@
+---
+# Use defaults from the Google style with the following exceptions:
+BasedOnStyle: Google
+IndentWidth: 4
+ColumnLimit: 132
+SortIncludes: false
+...
diff --git a/src/third_party/vulkan-loader/src/.cmake-format.py b/src/third_party/vulkan-loader/src/.cmake-format.py
new file mode 100644
index 0000000..07d2f99
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/.cmake-format.py
@@ -0,0 +1,34 @@
+# Configuration for cmake-format (v0.4.1, circa Jul 2018)
+# https://github.com/cheshirekow/cmake_format
+
+# How wide to allow formatted cmake files
+line_width = 132
+
+# How many spaces to tab for indent
+tab_size = 4
+
+# If arglists are longer than this, break them always
+max_subargs_per_line = 3
+
+# If true, separate flow control names from their parentheses with a space
+separate_ctrl_name_with_space = False
+
+# If true, separate function names from parentheses with a space
+separate_fn_name_with_space = False
+
+# If a statement is wrapped to more than one line, than dangle the closing
+# parenthesis on it's own line
+dangle_parens = False
+
+# What character to use for bulleted lists
+bullet_char = u'*'
+
+# What character to use as punctuation after numerals in an enumerated list
+enum_char = u'.'
+
+# What style line endings to use in the output.
+line_ending = u'unix'
+
+# Format command names consistently as 'lower' or 'upper' case
+command_case = u'lower'
+
diff --git a/src/third_party/vulkan-loader/src/.gn b/src/third_party/vulkan-loader/src/.gn
new file mode 100644
index 0000000..e190259
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/.gn
@@ -0,0 +1,22 @@
+# Copyright (C) 2019 LunarG, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+buildconfig = "//build/config/BUILDCONFIG.gn"
+secondary_source = "//build-gn/secondary/"
+
+default_args = {
+    clang_use_chrome_plugins = false
+    use_custom_libcxx = false
+}
+
diff --git a/src/third_party/vulkan-loader/src/.travis.yml b/src/third_party/vulkan-loader/src/.travis.yml
new file mode 100644
index 0000000..2a302cf
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/.travis.yml
@@ -0,0 +1,139 @@
+# Build Configuration for Travis CI
+# https://travis-ci.org
+
+dist: xenial
+sudo: required
+language: cpp
+
+matrix:
+  # Show final status immediately if a test fails.
+  fast_finish: true
+  allow_failures:
+    - env: CHECK_COMMIT_FORMAT=ON
+    - env: VULKAN_BUILD_TARGET=GN
+  include:
+    # Linux GCC debug build.
+    - os: linux
+      compiler: gcc
+      env: VULKAN_BUILD_TARGET=LINUX
+    # Linux clang debug build.
+    - os: linux
+      compiler: clang
+      env: VULKAN_BUILD_TARGET=LINUX
+    # Linux GN debug build
+    - os: linux
+      env: VULKAN_BUILD_TARGET=GN
+    # MacOS clang debug build.
+    - os: osx
+      compiler: clang
+      env: VULKAN_BUILD_TARGET=MACOS
+    # Check for proper clang formatting in the pull request.
+    - env: CHECK_FORMAT=ON
+    # Check for proper commit message formatting for commits in PR
+    - env: CHECK_COMMIT_FORMAT=ON
+
+cache: ccache
+
+# Use set -e so that the build fails when a command fails.
+# Note that set +e must be called at the end or else failures may occur within Travis
+# The default action for Travis-CI is to continue running even if a command fails.
+# See https://github.com/travis-ci/travis-ci/issues/1066.
+# Use the YAML block scalar header (|) to allow easier multiline script coding.
+
+before_install:
+  - set -e
+  - CMAKE_VERSION=3.10.2
+  - |
+    if [[ "${TRAVIS_OS_NAME}" == "linux" ]]; then
+      # Upgrade to the desired version of CMake
+      CMAKE_URL="https://cmake.org/files/v${CMAKE_VERSION%.*}/cmake-${CMAKE_VERSION}-Linux-x86_64.tar.gz"
+      echo CMAKE_URL=${CMAKE_URL}
+      mkdir cmake-${CMAKE_VERSION} && travis_retry wget --no-check-certificate -O - ${CMAKE_URL} | tar --strip-components=1 -xz -C cmake-${CMAKE_VERSION}
+      export PATH=${PWD}/cmake-${CMAKE_VERSION}/bin:${PATH}
+    else
+      brew install cmake || brew upgrade cmake
+    fi
+    cmake --version
+  - unset -f cd pushd popd
+  - |
+    if [[ "$TRAVIS_EVENT_TYPE" == "cron" ]]; then
+        # Add an option to update dependencies from master
+        UPDATE_DEPS_EXTRA_OPTIONS="--ref=master"
+    fi
+  - |
+    if [[ "$VULKAN_BUILD_TARGET" == "LINUX" ]] || [[ "$VULKAN_BUILD_TARGET" == "GN" ]]; then
+      # Install the appropriate Linux packages.
+      sudo apt-get -qq update
+      sudo apt-get -y install libxkbcommon-dev libwayland-dev libxrandr-dev libx11-xcb-dev \
+                              python-pathlib
+    fi
+  - |
+    if [[ "$VULKAN_BUILD_TARGET" == "LINUX" ]] || [[ "$VULKAN_BUILD_TARGET" == "MACOS" ]]; then
+      # Install dependencies
+      python scripts/update_deps.py --dir=external $UPDATE_DEPS_EXTRA_OPTIONS
+      # Get Google Test
+      git clone https://github.com/google/googletest.git external/googletest
+      pushd ${TRAVIS_BUILD_DIR}/external/googletest
+      git checkout tags/release-1.8.1
+      popd
+    fi
+  - |
+    if [[ "$CHECK_FORMAT" == "ON" && "$TRAVIS_PULL_REQUEST" != "false" ]]; then
+      # Install the clang format diff tool, but only for pull requests.
+      curl -L http://llvm.org/svn/llvm-project/cfe/trunk/tools/clang-format/clang-format-diff.py -o scripts/clang-format-diff.py;
+    fi
+  # Misc setup
+  - |
+  - export core_count=$(nproc || echo 4) && echo core_count = $core_count
+  - set +e
+
+# It is important to use `unset -f cd` on MacOS because RVM overrides it, which causes conflicts with `set -e`
+
+script:
+  - set -e
+  - |
+    if [[ "$VULKAN_BUILD_TARGET" == "LINUX" ]] || [[ "$VULKAN_BUILD_TARGET" == "MACOS" ]]; then
+      # Verify consistency between source file generators and output
+      echo Verifying consistency between source file generators and output
+      python3 scripts/generate_source.py --verify external/Vulkan-Headers/registry
+      # Build Vulkan-Loader
+      mkdir build
+      cd build
+      cmake -DCMAKE_BUILD_TYPE=Debug -C../external/helper.cmake ..
+      make -j $core_count
+      cd ..
+    fi
+  - |
+    if [[ "$VULKAN_BUILD_TARGET" == "GN" ]]; then
+      git clone https://chromium.googlesource.com/chromium/tools/depot_tools.git depot_tools
+      export PATH=$PATH:$PWD/depot_tools
+      ./build-gn/update_deps.sh
+      gn gen out/Debug
+      ninja -C out/Debug
+    fi
+  - |
+    if [[ "$CHECK_FORMAT" == "ON" ]]; then
+      if [[ "$TRAVIS_PULL_REQUEST" != "false" ]]; then
+        # Run the clang format check only for pull request builds because the
+        # master branch is needed to do the git diff.
+        echo "Checking clang-format between TRAVIS_BRANCH=$TRAVIS_BRANCH and TRAVIS_PULL_REQUEST_BRANCH=$TRAVIS_PULL_REQUEST_BRANCH"
+        ./scripts/check_code_format.sh
+      else
+        echo "Skipping clang-format check since this is not a pull request."
+      fi
+    fi
+  - |
+    if [[ "$CHECK_COMMIT_FORMAT" == "ON" ]]; then
+      if [[ "$TRAVIS_PULL_REQUEST" != "false" ]]; then
+        echo "Checking commit message formats:  See CONTRIBUTING.md"
+        ./scripts/check_commit_message_format.sh
+      fi
+    fi
+  - set +e
+
+notifications:
+  email:
+    recipients:
+      - lenny@lunarg.com
+    on_success: change
+    on_failure: always
diff --git a/src/third_party/vulkan-loader/src/BUILD.gn b/src/third_party/vulkan-loader/src/BUILD.gn
new file mode 100644
index 0000000..14a55ad
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/BUILD.gn
@@ -0,0 +1,136 @@
+# Copyright (C) 2018-2019 The ANGLE Project Authors.
+# Copyright (C) 2019 LunarG, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import("//build_overrides/vulkan_loader.gni")
+
+# Fuchsia has non-upstream changes to the vulkan loader, so we don't want
+# to build it from upstream sources.
+assert(!is_fuchsia)
+
+if (!is_android) {
+  vulkan_undefine_configs = []
+}
+if (is_win) {
+  vulkan_undefine_configs += [
+    "//build/config/win:nominmax",
+    "//build/config/win:unicode",
+  ]
+}
+
+config("vulkan_internal_config") {
+  defines = [ "VULKAN_NON_CMAKE_BUILD" ]
+  if (is_clang || !is_win) {
+    cflags = [ "-Wno-unused-function" ]
+  }
+  if (is_linux || is_mac) {
+    defines += [
+      "SYSCONFDIR=\"/etc\"",
+      "FALLBACK_CONFIG_DIRS=\"/etc/xdg\"",
+      "FALLBACK_DATA_DIRS=\"/usr/local/share:/usr/share\"",
+    ]
+  }
+}
+
+# Vulkan loader
+# -------------
+
+config("vulkan_loader_config") {
+  include_dirs = [
+    "loader/generated",
+    "loader",
+  ]
+  defines = [
+    "API_NAME=\"Vulkan\"",
+    "USE_UNSAFE_FILE_SEARCH=1"
+  ]
+
+  if (is_win) {
+    cflags = [ "/wd4201" ]
+  }
+  if (is_linux) {
+    # assume secure_getenv() is available
+    defines += [ "HAVE_SECURE_GETENV" ]
+  }
+}
+
+if (!is_android) {
+  if (vulkan_loader_shared) {
+    library_type = "shared_library"
+  } else {
+    library_type = "static_library"
+  }
+
+  target(library_type, "libvulkan") {
+    sources = [
+      "loader/asm_offset.c",
+      "loader/cJSON.c",
+      "loader/cJSON.h",
+      "loader/debug_utils.c",
+      "loader/debug_utils.h",
+      "loader/dev_ext_trampoline.c",
+      "loader/extension_manual.c",
+      "loader/extension_manual.h",
+      "loader/gpa_helper.h",
+      "loader/loader.c",
+      "loader/loader.h",
+      "loader/murmurhash.c",
+      "loader/murmurhash.h",
+      "loader/phys_dev_ext.c",
+      "loader/trampoline.c",
+
+      # TODO(jmadill): Use assembler where available.
+      "loader/unknown_ext_chain.c",
+      "loader/vk_loader_platform.h",
+      "loader/wsi.c",
+      "loader/wsi.h",
+    ]
+    if (is_win) {
+      sources += [
+        "loader/dirent_on_windows.c",
+        "loader/dirent_on_windows.h",
+        "loader/dxgi_loader.c",
+        "loader/dxgi_loader.h",
+      ]
+      if (!is_clang) {
+        cflags = [
+          "/wd4054",  # Type cast from function pointer
+          "/wd4055",  # Type cast from data pointer
+          "/wd4100",  # Unreferenced formal parameter
+          "/wd4152",  # Nonstandard extension used (pointer conversion)
+          "/wd4201",  # Nonstandard extension used: nameless struct/union
+          "/wd4214",  # Nonstandard extension used: bit field types other than int
+          "/wd4232",  # Nonstandard extension used: address of dllimport is not static
+          "/wd4305",  # Type cast truncation
+          "/wd4706",  # Assignment within conditional expression
+          "/wd4996",  # Unsafe stdlib function
+        ]
+      }
+      if (is_clang) {
+          cflags = [ "-Wno-incompatible-pointer-types" ]
+      }
+    }
+    if (is_mac) {
+      libs = [ "CoreFoundation.framework" ]
+    }
+    public_deps = [
+      "$vulkan_headers_dir:vulkan_headers",
+    ]
+    configs -= [ "//build/config/compiler:chromium_code" ]
+    configs += [ "//build/config/compiler:no_chromium_code" ]
+    configs += [ ":vulkan_internal_config" ]
+    public_configs = [ ":vulkan_loader_config" ]
+    configs -= vulkan_undefine_configs
+  }
+}
diff --git a/src/third_party/vulkan-loader/src/BUILD.md b/src/third_party/vulkan-loader/src/BUILD.md
new file mode 100644
index 0000000..5fc7075
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/BUILD.md
@@ -0,0 +1,693 @@
+# Build Instructions
+
+Instructions for building this repository on Linux, Windows, and MacOS.
+
+## Index
+
+1. [Contributing](#contributing-to-the-repository)
+1. [Repository Content](#repository-content)
+1. [Repository Set-Up](#repository-set-up)
+1. [Windows Build](#building-on-windows)
+1. [Linux Build](#building-on-linux)
+1. [MacOS build](#building-on-macos)
+
+## Contributing to the Repository
+
+If you intend to contribute, the preferred work flow is for you to develop
+your contribution in a fork of this repository in your GitHub account and then
+submit a pull request. Please see the [CONTRIBUTING.md](CONTRIBUTING.md) file
+in this repository for more details.
+
+## Repository Content
+
+This repository contains the source code necessary to build the desktop Vulkan
+loader and its tests.
+
+### Installed Files
+
+The `install` target installs the following files under the directory
+indicated by *install_dir*:
+
+- *install_dir*`/lib` : The Vulkan loader library
+- *install_dir*`/bin` : The Vulkan loader library DLL (Windows)
+
+The `uninstall` target can be used to remove the above files from the install
+directory.
+
+## Repository Set-Up
+
+### Display Drivers
+
+This repository does not contain a Vulkan-capable driver. You will need to
+obtain and install a Vulkan driver from your graphics hardware vendor or from
+some other suitable source if you intend to run Vulkan applications.
+
+### Download the Repository
+
+To create your local git repository:
+
+    git clone https://github.com/KhronosGroup/Vulkan-Loader.git
+
+### Repository Dependencies
+
+This repository attempts to resolve some of its dependencies by using
+components found from the following places, in this order:
+
+1. CMake or Environment variable overrides (e.g., -DVULKAN_HEADERS_INSTALL_DIR)
+1. LunarG Vulkan SDK, located by the `VULKAN_SDK` environment variable
+1. System-installed packages, mostly applicable on Linux
+
+Dependencies that cannot be resolved by the SDK or installed packages must be
+resolved with the "install directory" override and are listed below. The
+"install directory" override can also be used to force the use of a specific
+version of that dependency.
+
+#### Vulkan-Headers
+
+This repository has a required dependency on the [Vulkan Headers repository](https://github.com/KhronosGroup/Vulkan-Headers).
+You must clone the headers repository and build its `install` target before
+building this repository. The Vulkan-Headers repository is required because it
+contains the Vulkan API definition files (registry) that are required to build
+the loader. You must also take note of the headers install directory and pass
+it on the CMake command line for building this repository, as described below.
+
+#### Windows Driver Kit (WDK)
+
+On Windows builds, the loader needs to have a WDK installed. Microsoft provides
+[WDK releases](https://docs.microsoft.com/en-us/windows-hardware/drivers/download-the-wdk),
+including several old releases. The installed WDK must be at least version 1709.
+Take note of the fact that the latest WDK release generally requires the latest
+version of Visual Studio. It may be necessary to use an older WDK with an older
+Visual Studio.
+
+#### Google Test
+
+The loader tests depend on the [Google Test](https://github.com/google/googletest)
+framework and do not build unless this framework is downloaded into the
+repository's `external` directory.
+
+To obtain the framework, change your current directory to the top of your
+Vulkan-Loader repository and run:
+
+    git clone https://github.com/google/googletest.git external/googletest
+    cd external/googletest
+    git checkout tags/release-1.8.1
+
+before configuring your build with CMake.
+
+If you do not need the loader tests, there is no need to download this
+framework.
+
+### Build and Install Directories
+
+A common convention is to place the `build` directory in the top directory of
+the repository and place the `install` directory as a child of the `build`
+directory. The remainder of these instructions follow this convention,
+although you can place these directories in any location.
+
+### Building Dependent Repositories with Known-Good Revisions
+
+There is a Python utility script, `scripts/update_deps.py`, that you can use
+to gather and build the dependent repositories mentioned above. This program
+also uses information stored in the `scripts/known-good.json` file to checkout
+dependent repository revisions that are known to be compatible with the
+revision of this repository that you currently have checked out.
+
+Here is a usage example for this repository:
+
+    git clone git@github.com:KhronosGroup/Vulkan-Loader.git
+    cd Vulkan-Loader
+    mkdir build
+    cd build
+    ../scripts/update_deps.py
+    cmake -C helper.cmake ..
+    cmake --build .
+
+#### Notes
+
+- You may need to adjust some of the CMake options based on your platform. See
+  the platform-specific sections later in this document.
+- The `update_deps.py` script fetches and builds the dependent repositories in
+  the current directory when it is invoked. In this case, they are built in
+  the `build` directory.
+- The `build` directory is also being used to build this
+  (Vulkan-ValidationLayers) repository. But there shouldn't be any conflicts
+  inside the `build` directory between the dependent repositories and the
+  build files for this repository.
+- The `--dir` option for `update_deps.py` can be used to relocate the
+  dependent repositories to another arbitrary directory using an absolute or
+  relative path.
+- The `update_deps.py` script generates a file named `helper.cmake` and places
+  it in the same directory as the dependent repositories (`build` in this
+  case). This file contains CMake commands to set the CMake `*_INSTALL_DIR`
+  variables that are used to point to the install artifacts of the dependent
+  repositories. You can use this file with the `cmake -C` option to set these
+  variables when you generate your build files with CMake. This lets you avoid
+  entering several `*_INSTALL_DIR` variable settings on the CMake command line.
+- If using "MINGW" (Git For Windows), you may wish to run
+  `winpty update_deps.py` in order to avoid buffering all of the script's
+  "print" output until the end and to retain the ability to interrupt script
+  execution.
+- Please use `update_deps.py --help` to list additional options and read the
+  internal documentation in `update_deps.py` for further information.
+
+### Generated source code
+
+This repository contains generated source code in the `loader/generated`
+directory which is not intended to be modified directly. Instead, changes should be
+made to the corresponding generator in the `scripts` directory. The source files can
+then be regenerated using `scripts/generate_source.py`:
+
+    python3 scripts/generate_source.py PATH_TO_VULKAN_HEADERS_REGISTRY_DIR
+
+A helper CMake target `VulkanLoader_generated_source` is also provided to simplify
+the invocation of `scripts/generate_source.py` from the build directory:
+
+    cmake --build . --target VulkanLoader_generated_source
+
+### Build Options
+
+When generating native platform build files through CMake, several options can
+be specified to customize the build. Some of the options are binary on/off
+options, while others take a string as input. The following is a table of all
+on/off options currently supported by this repository:
+
+| Option | Platform | Default | Description |
+| ------ | -------- | ------- | ----------- |
+| BUILD_LOADER | All | `ON` | Controls whether or not the loader is built. Setting this to `OFF` will allow building the tests against a loader that is installed to the system. |
+| BUILD_TESTS | All | `???` | Controls whether or not the loader tests are built. The default is `ON` when the Google Test repository is cloned into the `external` directory.  Otherwise, the default is `OFF`. |
+| BUILD_WSI_XCB_SUPPORT | Linux | `ON` | Build the loader with the XCB entry points enabled. Without this, the XCB headers should not be needed, but the extension `VK_KHR_xcb_surface` won't be available. |
+| BUILD_WSI_XLIB_SUPPORT | Linux | `ON` | Build the loader with the Xlib entry points enabled. Without this, the X11 headers should not be needed, but the extension `VK_KHR_xlib_surface` won't be available. |
+| BUILD_WSI_WAYLAND_SUPPORT | Linux | `ON` | Build the loader with the Wayland entry points enabled. Without this, the Wayland headers should not be needed, but the extension `VK_KHR_wayland_surface` won't be available. |
+| ENABLE_STATIC_LOADER | Windows | `OFF` | By default, the loader is built as a dynamic library. This allows it to be built as a static library, instead. |
+| ENABLE_WIN10_ONECORE | Windows | `OFF` | Link the loader to the [OneCore](https://msdn.microsoft.com/en-us/library/windows/desktop/mt654039.aspx) umbrella library, instead of the standard Win32 ones. |
+| USE_CCACHE | Linux | `OFF` | Enable caching with the CCache program. |
+
+The following is a table of all string options currently supported by this repository:
+
+| Option | Platform | Default | Description |
+| ------ | -------- | ------- | ----------- |
+| CMAKE_OSX_DEPLOYMENT_TARGET | MacOS | `10.12` | The minimum version of MacOS for loader deployment. |
+| FALLBACK_CONFIG_DIRS | Linux/MacOS | `/etc/xdg` | Configuration path(s) to use instead of `XDG_CONFIG_DIRS` if that environment variable is unavailable. The default setting is freedesktop compliant. |
+| FALLBACK_DATA_DIRS | Linux/MacOS | `/usr/local/share:/usr/share` | Configuration path(s) to use instead of `XDG_DATA_DIRS` if that environment variable is unavailable. The default setting is freedesktop compliant. |
+
+These variables should be set using the `-D` option when invoking
+CMake to generate the native platform files.
+
+## Building On Windows
+
+### Windows Development Environment Requirements
+
+- Windows
+  - Any Personal Computer version supported by Microsoft
+- Microsoft [Visual Studio](https://www.visualstudio.com/)
+  - Versions
+    - [2015](https://www.visualstudio.com/vs/older-downloads/)
+    - [2017](https://www.visualstudio.com/vs/older-downloads/)
+    - [2019](https://www.visualstudio.com/vs/downloads/)
+  - The Community Edition of each of the above versions is sufficient, as
+    well as any more capable edition.
+- [Windows Driver Kit](https://docs.microsoft.com/en-us/windows-hardware/drivers/download-the-wdk) 1803 or later
+- [CMake 3.10.2](https://cmake.org/files/v3.10/cmake-3.10.2-win64-x64.zip) is recommended.
+  - Use the installer option to add CMake to the system PATH
+- Git Client Support
+  - [Git for Windows](http://git-scm.com/download/win) is a popular solution
+    for Windows
+  - Some IDEs (e.g., [Visual Studio](https://www.visualstudio.com/),
+    [GitHub Desktop](https://desktop.github.com/)) have integrated
+    Git client support
+
+### Windows Build - Microsoft Visual Studio
+
+The general approach is to run CMake to generate the Visual Studio project
+files. Then either run CMake with the `--build` option to build from the
+command line or use the Visual Studio IDE to open the generated solution and
+work with the solution interactively.
+
+#### Windows Quick Start
+
+Open a developer command prompt and enter:
+
+    cd Vulkan-Loader
+    mkdir build
+    cd build
+    cmake -A x64 -DVULKAN_HEADERS_INSTALL_DIR=absolute_path_to_install_dir ..
+    cmake --build .
+
+The above commands instruct CMake to find and use the default Visual Studio
+installation to generate a Visual Studio solution and projects for the x64
+architecture. The second CMake command builds the Debug (default)
+configuration of the solution.
+
+Note that if you do not wish to use a developer command prompt, you may either
+run either `vcvars64.bat` or `vcvars32.bat` to set the required environment
+variables. You may also define a `WDK_FULL_PATH` variable when first invoking CMake
+like:
+
+    cmake -A x64 --DVULKAN_HEADERS_INSTALL_DIR=absolute_path_to_install_dir -DWDK_BASE="C:/Program Files (x86)/Windows Kits/10/Include/10.0.17763.0" ..
+
+See below for the details.
+
+#### Use `CMake` to Create the Visual Studio Project Files
+
+Change your current directory to the top of the cloned repository directory,
+create a build directory and generate the Visual Studio project files:
+
+    cd Vulkan-Loader
+    mkdir build
+    cd build
+    cmake -A x64 -DVULKAN_HEADERS_INSTALL_DIR=absolute_path_to_install_dir ..
+
+> Note: The `..` parameter tells `cmake` the location of the top of the
+> repository. If you place your build directory someplace else, you'll need to
+> specify the location of the repository top differently.
+
+The `-A` option is used to select either the "Win32" or "x64" architecture.
+
+If a generator for a specific version of Visual Studio is required, you can
+specify it for Visual Studio 2015, for example, with:
+
+    64-bit: -G "Visual Studio 14 2015 Win64"
+    32-bit: -G "Visual Studio 14 2015"
+
+See this [list](#cmake-visual-studio-generators) of other possible generators
+for Visual Studio.
+
+When generating the project files, the absolute path to a Vulkan-Headers
+install directory must be provided. This can be done by setting the
+`VULKAN_HEADERS_INSTALL_DIR` environment variable or by setting the
+`VULKAN_HEADERS_INSTALL_DIR` CMake variable with the `-D` CMake option. In
+either case, the variable should point to the installation directory of a
+Vulkan-Headers repository built with the install target.
+
+The above steps create a Windows solution file named `Vulkan-Loader.sln` in
+the build directory.
+
+At this point, you can build the solution from the command line or open the
+generated solution with Visual Studio.
+
+#### Build the Solution From the Command Line
+
+While still in the build directory:
+
+    cmake --build .
+
+to build the Debug configuration (the default), or:
+
+    cmake --build . --config Release
+
+to make a Release build.
+
+#### Build the Solution With Visual Studio
+
+Launch Visual Studio and open the "Vulkan-Loader.sln" solution file in the
+build folder. You may select "Debug" or "Release" from the Solution
+Configurations drop-down list. Start a build by selecting the Build->Build
+Solution menu item.
+
+#### Windows Install Target
+
+The CMake project also generates an "install" target that you can use to copy
+the primary build artifacts to a specific location using a "bin, include, lib"
+style directory structure. This may be useful for collecting the artifacts and
+providing them to another project that is dependent on them.
+
+The default location is `$CMAKE_BINARY_DIR\install`, but can be changed with
+the `CMAKE_INSTALL_PREFIX` variable when first generating the project build
+files with CMake.
+
+You can build the install target from the command line with:
+
+    cmake --build . --config Release --target install
+
+or build the `INSTALL` target from the Visual Studio solution explorer.
+
+### Windows Tests
+
+The Vulkan-Loader repository contains some simple unit tests for the loader
+but no other test clients.
+
+To run the loader test script, open a Powershell Console, change to the
+`build\tests` directory, and run:
+
+For Release builds:
+
+    .\run_all_tests.ps1
+
+For Debug builds:
+
+    .\run_all_tests.ps1 -Debug
+
+This script will run the following tests:
+
+- `vk_loader_validation_tests`:
+  Vulkan loader handle wrapping, allocation callback, and loader/layer interface tests
+
+You can also change to either `build\tests\Debug` or `build\tests\Release`
+(depending on which one you built) and run the executable tests (`*.exe`)
+files from there.
+
+### Windows Notes
+
+#### CMake Visual Studio Generators
+
+The chosen generator should match one of the Visual Studio versions that you
+have installed. Generator strings that correspond to versions of Visual Studio
+include:
+
+| Build Platform               | 64-bit Generator              | 32-bit Generator        |
+|------------------------------|-------------------------------|-------------------------|
+| Microsoft Visual Studio 2015 | "Visual Studio 14 2015 Win64" | "Visual Studio 14 2015" |
+| Microsoft Visual Studio 2017 | "Visual Studio 15 2017 Win64" | "Visual Studio 15 2017" |
+| Microsoft Visual Studio 2019 | "Visual Studio 16 2019"       | "Visual Studio 16 2019" |
+
+Note that with Visual Studio 2019, the architecture will need to be specified with the `-A`
+flag for 64-bit builds.
+
+#### Using The Vulkan Loader Library in this Repository on Windows
+
+Vulkan programs must be able to find and use the Vulkan loader
+(`vulkan-1.dll`) library as well as any other libraries the program requires.
+One convenient way to do this is to copy the required libraries into the same
+directory as the program. The projects in this solution copy the Vulkan loader
+library and the "googletest" libraries to the `build\tests\Debug` or the
+`build\tests\Release` directory, which is where the
+`vk_loader_validation_test.exe` executable is found, depending on what
+configuration you built. (The loader validation tests use the "googletest"
+testing framework.)
+
+Other techniques include placing the library in a system folder
+(C:\Windows\System32) or in a directory that appears in the `PATH` environment
+variable.
+
+See the `LoaderAndLayerInterface` document in the `loader` folder in this
+repository for more information on how the loader finds driver libraries and
+layer libraries. The document also describes both how ICDs and layers should
+be packaged, and how developers can point to ICDs and layers within their
+builds.
+
+## Building On Linux
+
+### Linux Development Environment Requirements
+
+This repository has been built and tested on the two most recent Ubuntu LTS
+versions. Currently, the oldest supported version is Ubuntu 16.04, meaning
+that the minimum officially supported C++11 compiler version is GCC 5.4.0,
+although earlier versions may work. It should be straightforward to adapt this
+repository to other Linux distributions.
+
+[CMake 3.10.2](https://cmake.org/files/v3.10/cmake-3.10.2-Linux-x86_64.tar.gz) is recommended.
+
+#### Required Package List
+
+    sudo apt-get install git build-essential libx11-xcb-dev \
+        libxkbcommon-dev libwayland-dev libxrandr-dev
+
+### Linux Build
+
+The general approach is to run CMake to generate make files. Then either run
+CMake with the `--build` option or `make` to build from the command line.
+
+#### Linux Quick Start
+
+    cd Vulkan-Loader
+    mkdir build
+    cd build
+    cmake -DVULKAN_HEADERS_INSTALL_DIR=absolute_path_to_install_dir ..
+    make
+
+See below for the details.
+
+#### Use CMake to Create the Make Files
+
+Change your current directory to the top of the cloned repository directory,
+create a build directory and generate the make files.
+
+    cd Vulkan-Loader
+    mkdir build
+    cd build
+    cmake -DCMAKE_BUILD_TYPE=Debug \
+          -DVULKAN_HEADERS_INSTALL_DIR=absolute_path_to_install_dir \
+          -DCMAKE_INSTALL_PREFIX=install ..
+
+> Note: The `..` parameter tells `cmake` the location of the top of the
+> repository. If you place your `build` directory someplace else, you'll need
+> to specify the location of the repository top differently.
+
+Use `-DCMAKE_BUILD_TYPE` to specify a Debug or Release build.
+
+When generating the project files, the absolute path to a Vulkan-Headers
+install directory must be provided. This can be done by setting the
+`VULKAN_HEADERS_INSTALL_DIR` environment variable or by setting the
+`VULKAN_HEADERS_INSTALL_DIR` CMake variable with the `-D` CMake option. In
+either case, the variable should point to the installation directory of a
+Vulkan-Headers repository built with the install target.
+
+> Note: For Linux, the default value for `CMAKE_INSTALL_PREFIX` is
+> `/usr/local`, which would be used if you do not specify
+> `CMAKE_INSTALL_PREFIX`. In this case, you may need to use `sudo` to install
+> to system directories later when you run `make install`.
+
+#### Build the Project
+
+You can just run `make` to begin the build.
+
+To speed up the build on a multi-core machine, use the `-j` option for `make`
+to specify the number of cores to use for the build. For example:
+
+    make -j4
+
+You can also use
+
+    cmake --build .
+
+If your build system supports ccache, you can enable that via CMake option
+`-DUSE_CCACHE=On`
+
+### Linux Notes
+
+#### Using The Vulkan Loader Library in this Repository on Linux
+
+The `vk_loader_validation_tests` executable is linked with an RPATH setting to
+allow it to find the Vulkan loader library in the repository's build
+directory. This allows the test executable to run and find this Vulkan loader
+library without installing the loader library to a directory searched by the
+system loader or in the `LD_LIBRARY_PATH`.
+
+If you want to test a Vulkan application that is not built within this
+repository with the loader you just built from this repository, you can direct
+the application to load it from your build directory:
+
+    export LD_LIBRARY_PATH=<path to your repository root>/build/loader
+
+#### WSI Support Build Options
+
+By default, the Vulkan Loader is built with support for the Vulkan-defined WSI
+display servers: Xcb, Xlib, and Wayland. It is recommended to build the
+repository components with support for these display servers to maximize their
+usability across Linux platforms. If it is necessary to build these modules
+without support for one of the display servers, the appropriate CMake option
+of the form `BUILD_WSI_xxx_SUPPORT` can be set to `OFF`.
+
+#### Linux Install to System Directories
+
+Installing the files resulting from your build to the systems directories is
+optional since environment variables can usually be used instead to locate the
+binaries. There are also risks with interfering with binaries installed by
+packages. If you are certain that you would like to install your binaries to
+system directories, you can proceed with these instructions.
+
+Assuming that you've built the code as described above and the current
+directory is still `build`, you can execute:
+
+    sudo make install
+
+This command installs files to `/usr/local` if no `CMAKE_INSTALL_PREFIX` is
+specified when creating the build files with CMake:
+
+- `/usr/local/lib`:  Vulkan loader library and package config files
+
+You may need to run `ldconfig` in order to refresh the system loader search
+cache on some Linux systems.
+
+You can further customize the installation location by setting additional
+CMake variables to override their defaults. For example, if you would like to
+install to `/tmp/build` instead of `/usr/local`, on your CMake command line
+specify:
+
+    -DCMAKE_INSTALL_PREFIX=/tmp/build
+
+Then run `make install` as before. The install step places the files in
+`/tmp/build`. This may be useful for collecting the artifacts and providing
+them to another project that is dependent on them.
+
+Using the `CMAKE_INSTALL_PREFIX` to customize the install location also
+modifies the loader search paths to include searching for layers in the
+specified install location. In this example, setting `CMAKE_INSTALL_PREFIX` to
+`/tmp/build` causes the loader to search
+`/tmp/build/etc/vulkan/explicit_layer.d` and
+`/tmp/build/share/vulkan/explicit_layer.d` for the layer JSON files. The
+loader also searches the "standard" system locations of
+`/etc/vulkan/explicit_layer.d` and `/usr/share/vulkan/explicit_layer.d` after
+searching the two locations under `/tmp/build`.
+
+You can further customize the installation directories by using the CMake
+variables `CMAKE_INSTALL_SYSCONFDIR` to rename the `etc` directory and
+`CMAKE_INSTALL_DATADIR` to rename the `share` directory.
+
+See the CMake documentation for more details on using these variables to
+further customize your installation.
+
+Also see the `LoaderAndLayerInterface` document in the `loader` folder in this
+repository for more information about loader operation.
+
+Note that some executables in this repository (e.g.,
+`vk_loader_validation_tests`) use the RPATH linker directive to load the
+Vulkan loader from the build directory, `build` in this example. This means
+that even after installing the loader to the system directories, these
+executables still use the loader from the build directory.
+
+#### Linux Uninstall
+
+To uninstall the files from the system directories, you can execute:
+
+    sudo make uninstall
+
+#### Linux Tests
+
+The Vulkan-Loader repository contains some simple unit tests for the loader
+but no other test clients.
+
+To run the loader test script, change to the `build/tests` directory, and run:
+
+    ./run_all_tests.sh
+
+This script will run the following tests:
+
+- `vk_loader_validation_tests`: Vulkan loader handle wrapping, allocation
+  callback, and loader/layer interface tests
+
+#### Linux 32-bit support
+
+Usage of this repository's contents in 32-bit Linux environments is not
+officially supported. However, since this repository is supported on 32-bit
+Windows, these modules should generally work on 32-bit Linux.
+
+Here are some notes for building 32-bit targets on a 64-bit Ubuntu "reference"
+platform:
+
+If not already installed, install the following 32-bit development libraries:
+
+`gcc-multilib g++-multilib libx11-dev:i386`
+
+This list may vary depending on your distribution and which windowing systems
+you are building for.
+
+Set up your environment for building 32-bit targets:
+
+    export ASFLAGS=--32
+    export CFLAGS=-m32
+    export CXXFLAGS=-m32
+    export PKG_CONFIG_LIBDIR=/usr/lib/i386-linux-gnu
+
+Again, your PKG_CONFIG configuration may be different, depending on your
+distribution.
+
+Finally, rebuild the repository using `cmake` and `make`, as explained above.
+
+## Building on MacOS
+
+### MacOS Development Environment Requirements
+
+Tested on OSX version 10.12.6
+
+Setup Homebrew and components
+
+- Follow instructions on [brew.sh](http://brew.sh) to get Homebrew installed.
+
+      /usr/bin/ruby -e "$(curl -fsSL \
+          https://raw.githubusercontent.com/Homebrew/install/master/install)"
+
+- Ensure Homebrew is at the beginning of your PATH:
+
+      export PATH=/usr/local/bin:$PATH
+
+- Add packages with the following (may need refinement)
+
+      brew install python python3 git
+
+### Clone the Repository
+
+Clone the Vulkan-ValidationLayers repository:
+
+    git clone https://github.com/KhronosGroup/Vulkan-ValidationLayers.git
+
+### MacOS build
+
+[CMake 3.10.2](https://cmake.org/files/v3.10/cmake-3.10.2-Darwin-x86_64.tar.gz) is recommended.
+
+#### CMake Generators
+
+This repository uses CMake to generate build or project files that are then
+used to build the repository. The CMake generators explicitly supported in
+this repository are:
+
+- Unix Makefiles
+- Xcode
+
+#### Building with the Unix Makefiles Generator
+
+This generator is the default generator.
+
+When generating the project files, the absolute path to a Vulkan-Headers
+install directory must be provided. This can be done by setting the
+`VULKAN_HEADERS_INSTALL_DIR` environment variable or by setting the
+`VULKAN_HEADERS_INSTALL_DIR` CMake variable with the `-D` CMake option. In
+either case, the variable should point to the installation directory of a
+Vulkan-Headers repository built with the install target.
+
+    mkdir build
+    cd build
+    cmake -DVULKAN_HEADERS_INSTALL_DIR=absolute_path_to_install_dir -DCMAKE_BUILD_TYPE=Debug ..
+    make
+
+To speed up the build on a multi-core machine, use the `-j` option for `make`
+to specify the number of cores to use for the build. For example:
+
+    make -j4
+
+#### Building with the Xcode Generator
+
+To create and open an Xcode project:
+
+    mkdir build-xcode
+    cd build-xcode
+    cmake -GXcode ..
+    open Vulkan-Loader.xcodeproj
+
+Within Xcode, you can select Debug or Release builds in the project's Build
+Settings.
+
+### Using the new macOS loader
+
+If you want to test a Vulkan application with the loader you just built, you
+can direct the application to load it from your build directory:
+
+    export DYLD_LIBRARY_PATH=<path to your repository>/build/loader
+
+### MacOS Tests
+
+The Vulkan-Loader repository contains some simple unit tests for the loader
+but no other test clients.
+
+Before you run these tests, you will need to clone and build the
+[MoltenVK](https://github.com/KhronosGroup/MoltenVK) repository.
+
+You will also need to direct your new loader to the MoltenVK ICD:
+
+    export VK_ICD_FILENAMES=<path to MoltenVK repository>/Package/Latest/MoltenVK/macOS/MoltenVK_icd.json
+
+To run the loader test script, change to the `build/tests` directory in your
+Vulkan-Loader repository, and run:
+
+    ./vk_loader_validation_tests
diff --git a/src/third_party/vulkan-loader/src/CMakeLists.txt b/src/third_party/vulkan-loader/src/CMakeLists.txt
new file mode 100644
index 0000000..d659ef8
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/CMakeLists.txt
@@ -0,0 +1,221 @@
+# ~~~
+# Copyright (c) 2014-2019 Valve Corporation
+# Copyright (c) 2014-2019 LunarG, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ~~~
+
+cmake_minimum_required(VERSION 3.10.2)
+
+# Apple: Must be set before enable_language() or project() as it may influence configuration of the toolchain and flags.
+set(CMAKE_OSX_DEPLOYMENT_TARGET "10.12" CACHE STRING "Minimum OS X deployment version")
+
+project(Vulkan-Loader)
+
+enable_testing()
+
+set_property(DIRECTORY APPEND PROPERTY COMPILE_DEFINITIONS API_NAME="Vulkan")
+
+set(CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} "${CMAKE_CURRENT_SOURCE_DIR}/cmake")
+find_package(PythonInterp 3 QUIET)
+
+if (TARGET Vulkan::Headers)
+    message(STATUS "Using Vulkan headers from Vulkan::Headers target")
+    get_target_property(VulkanHeaders_INCLUDE_DIRS Vulkan::Headers INTERFACE_INCLUDE_DIRECTORIES)
+    get_target_property(VulkanRegistry_DIR Vulkan::Registry INTERFACE_INCLUDE_DIRECTORIES)
+else()
+    find_package(VulkanHeaders)
+    if(NOT ${VulkanHeaders_FOUND})
+        message(FATAL_ERROR "Could not find Vulkan headers path. This can be fixed by setting VULKAN_HEADERS_INSTALL_DIR to an "
+                            "installation of the Vulkan-Headers repository.")
+    endif()
+    if(NOT ${VulkanRegistry_FOUND})
+        message(FATAL_ERROR "Could not find Vulkan registry path. This can be fixed by setting VULKAN_HEADERS_INSTALL_DIR to an "
+                            "installation of the Vulkan-Headers repository.")
+    endif()
+
+    # set up the Vulkan::Headers target for consistency
+    add_library(vulkan-headers INTERFACE)
+    target_include_directories(vulkan-headers INTERFACE "${VulkanHeaders_INCLUDE_DIRS}")
+    add_library(Vulkan::Headers ALIAS vulkan-headers)
+endif()
+
+option(USE_CCACHE "Use ccache" OFF)
+if(USE_CCACHE)
+    find_program(CCACHE_FOUND ccache)
+    if(CCACHE_FOUND)
+        set_property(GLOBAL PROPERTY RULE_LAUNCH_COMPILE ccache)
+    endif()
+endif()
+
+include(GNUInstallDirs)
+
+if(UNIX AND NOT APPLE) # i.e.: Linux
+    include(FindPkgConfig)
+endif()
+
+if(APPLE)
+    # CMake versions 3 or later need CMAKE_MACOSX_RPATH defined. This avoids the CMP0042 policy message.
+    set(CMAKE_MACOSX_RPATH 1)
+endif()
+
+if(WIN32 AND CMAKE_INSTALL_PREFIX_INITIALIZED_TO_DEFAULT)
+    # Windows: if install locations not set by user, set install prefix to "<build_dir>\install".
+    set(CMAKE_INSTALL_PREFIX "${CMAKE_BINARY_DIR}/install" CACHE PATH "default install path" FORCE)
+endif()
+
+# Enable IDE GUI folders.  "Helper targets" that don't have interesting source code should set their FOLDER property to this
+set_property(GLOBAL PROPERTY USE_FOLDERS ON)
+set(LOADER_HELPER_FOLDER "Helper Targets")
+
+if(UNIX)
+    set(
+        FALLBACK_CONFIG_DIRS "/etc/xdg"
+        CACHE
+            STRING
+            "Search path to use when XDG_CONFIG_DIRS is unset or empty or the current process is SUID/SGID. Default is freedesktop compliant."
+        )
+    set(
+        FALLBACK_DATA_DIRS "/usr/local/share:/usr/share"
+        CACHE
+            STRING
+            "Search path to use when XDG_DATA_DIRS is unset or empty or the current process is SUID/SGID. Default is freedesktop compliant."
+        )
+    set(
+        SYSCONFDIR ""
+        CACHE
+            STRING
+            "System-wide search directory. If not set or empty, CMAKE_INSTALL_FULL_SYSCONFDIR and /etc are used."
+        )
+endif()
+
+if(UNIX AND NOT APPLE) # i.e.: Linux
+    option(BUILD_WSI_XCB_SUPPORT "Build XCB WSI support" ON)
+    option(BUILD_WSI_XLIB_SUPPORT "Build Xlib WSI support" ON)
+    option(BUILD_WSI_WAYLAND_SUPPORT "Build Wayland WSI support" ON)
+
+    if(BUILD_WSI_XCB_SUPPORT)
+        find_package(XCB REQUIRED)
+    endif()
+
+    if(BUILD_WSI_XLIB_SUPPORT)
+        find_package(X11 REQUIRED)
+    endif()
+
+    if(BUILD_WSI_WAYLAND_SUPPORT)
+        find_package(Wayland REQUIRED)
+        include_directories(${WAYLAND_CLIENT_INCLUDE_DIR})
+    endif()
+endif()
+
+if(WIN32)
+    option(ENABLE_WIN10_ONECORE "Link the loader with OneCore umbrella libraries" OFF)
+    option(ENABLE_STATIC_LOADER "Build the loader as a static library" OFF)
+endif()
+
+option(BUILD_LOADER "Build loader" ON)
+
+if(IS_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}/external/googletest)
+    option(BUILD_TESTS "Build Tests" ON)
+else()
+    option(BUILD_TESTS "Build Tests" OFF)
+endif()
+
+if(CMAKE_COMPILER_IS_GNUCC OR CMAKE_C_COMPILER_ID MATCHES "Clang")
+    set(COMMON_COMPILE_FLAGS "-Wall -Wextra -Wno-unused-parameter -Wno-missing-field-initializers")
+    set(COMMON_COMPILE_FLAGS "${COMMON_COMPILE_FLAGS} -fno-strict-aliasing -fno-builtin-memcmp")
+
+    # For GCC version 7.1 or greater, we need to disable the implicit fallthrough warning since there's no consistent way to satisfy
+    # all compilers until they all accept the C++17 standard
+    if(CMAKE_COMPILER_IS_GNUCC)
+        set(COMMON_COMPILE_FLAGS "${COMMON_COMPILE_FLAGS} -Wno-stringop-truncation -Wno-stringop-overflow")
+        if(CMAKE_CXX_COMPILER_VERSION GREATER_EQUAL 7.1)
+            set(COMMON_COMPILE_FLAGS "${COMMON_COMPILE_FLAGS} -Wimplicit-fallthrough=0")
+        endif()
+    endif()
+
+    if(APPLE)
+        set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} ${COMMON_COMPILE_FLAGS}")
+    #clang-cl on Windows
+    elseif((CMAKE_C_COMPILER_ID MATCHES "Clang") AND (CMAKE_CXX_SIMULATE_ID MATCHES "MSVC"))
+        set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -Xclang -std=c99 ${COMMON_COMPILE_FLAGS}")
+        set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Xclang -std=c++11 -fno-rtti")
+    # clang (not clang-cl) or gcc
+    else()
+        set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -std=c99 ${COMMON_COMPILE_FLAGS}")
+        set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11 -fno-rtti")
+    endif()
+
+    set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${COMMON_COMPILE_FLAGS}")
+
+    if(UNIX)
+        set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -fvisibility=hidden")
+        set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fvisibility=hidden")
+    endif()
+endif()
+
+if(MSVC)
+    # /WX: Treat warnings as errors
+    # /GR-: Disable RTTI
+    # /w34456: Warn about nested declarations
+    # /w34701, /w34703: Warn about potentially uninitialized variables
+    # /w34057: Warn about different indirection types.
+    # /w34245: Warn about signed/unsigned mismatch.
+    set(MSVC_LOADER_COMPILE_OPTIONS /WX /GR- /w34456 /w34701 /w34703 /w34057 /w34245)
+endif()
+
+# Optional codegen target
+if(PYTHONINTERP_FOUND)
+    add_custom_target(VulkanLoader_generated_source
+                      COMMAND ${PYTHON_EXECUTABLE} ${PROJECT_SOURCE_DIR}/scripts/generate_source.py
+                              ${VulkanRegistry_DIR} --incremental
+                      )
+else()
+    message("WARNING: VulkanLoader_generated_source target requires python 3")
+endif()
+
+
+if(UNIX)
+    set_property(DIRECTORY APPEND PROPERTY COMPILE_DEFINITIONS FALLBACK_CONFIG_DIRS="${FALLBACK_CONFIG_DIRS}" FALLBACK_DATA_DIRS="${FALLBACK_DATA_DIRS}")
+
+    if(NOT (SYSCONFDIR STREQUAL ""))
+        # SYSCONFDIR is specified, use it and do not force /etc.
+        set_property(DIRECTORY APPEND PROPERTY COMPILE_DEFINITIONS SYSCONFDIR="${SYSCONFDIR}")
+    else()
+        set_property(DIRECTORY APPEND PROPERTY COMPILE_DEFINITIONS SYSCONFDIR="${CMAKE_INSTALL_FULL_SYSCONFDIR}")
+
+        # Make sure /etc is searched by the loader
+        if(NOT (CMAKE_INSTALL_FULL_SYSCONFDIR STREQUAL "/etc"))
+            set_property(DIRECTORY APPEND PROPERTY COMPILE_DEFINITIONS EXTRASYSCONFDIR="/etc")
+        endif()
+    endif()
+endif()
+
+# uninstall target
+if(NOT TARGET uninstall)
+    configure_file("${CMAKE_CURRENT_SOURCE_DIR}/cmake/cmake_uninstall.cmake.in"
+                   "${CMAKE_CURRENT_BINARY_DIR}/cmake_uninstall.cmake"
+                   IMMEDIATE
+                   @ONLY)
+    add_custom_target(uninstall COMMAND ${CMAKE_COMMAND} -P ${CMAKE_CURRENT_BINARY_DIR}/cmake_uninstall.cmake)
+    set_target_properties(uninstall PROPERTIES FOLDER ${LOADER_HELPER_FOLDER})
+endif()
+
+if(BUILD_LOADER)
+    add_subdirectory(loader)
+endif()
+
+add_subdirectory(external)
+if(BUILD_TESTS)
+    add_subdirectory(tests)
+endif()
diff --git a/src/third_party/vulkan-loader/src/CODE_OF_CONDUCT.md b/src/third_party/vulkan-loader/src/CODE_OF_CONDUCT.md
new file mode 100644
index 0000000..a11610b
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/CODE_OF_CONDUCT.md
@@ -0,0 +1 @@
+A reminder that this issue tracker is managed by the Khronos Group. Interactions here should follow the Khronos Code of Conduct (https://www.khronos.org/developers/code-of-conduct), which prohibits aggressive or derogatory language. Please keep the discussion friendly and civil.
diff --git a/src/third_party/vulkan-loader/src/CONTRIBUTING.md b/src/third_party/vulkan-loader/src/CONTRIBUTING.md
new file mode 100644
index 0000000..ce68790
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/CONTRIBUTING.md
@@ -0,0 +1,169 @@
+# How to Contribute to Vulkan Source Repositories
+
+## The Repository
+
+The source code for the Vulkan Loader component is sponsored
+by Khronos and LunarG.
+
+* [KhronosGroup/Vulkan-Loader](https://github.com/KhronosGroup/Vulkan-Loader)
+
+### The Vulkan Ecosystem Needs Your Help
+
+There are a couple of methods to identify areas of need:
+
+* Examine the [issues list](https://github.com/KhronosGroup/Vulkan-Loader/issues)
+  in this repository and look for issues that are of interest
+* If you have your own work in mind, please open an issue to describe
+  it and assign it to yourself.
+
+Please feel free to contact any of the developers that are actively
+contributing should you wish to coordinate further.
+
+Repository Issue labels:
+
+* _Bug_:          These issues refer to invalid or broken functionality and are
+  the highest priority.
+* _Enhancement_:  These issues refer to ideas for extending or improving the
+  loader.
+
+It is the maintainers goal for all issues to be assigned within one business day
+of their submission.
+If you choose to work on an issue that is assigned, simply coordinate with the
+current assignee.
+
+### How to Submit Fixes
+
+* **Ensure that the bug was not already reported or fixed** by searching on
+  GitHub under Issues and Pull Requests.
+* Use the existing GitHub forking and pull request process.
+  This will involve
+  [forking the repository](https://help.github.com/articles/fork-a-repo/),
+  creating a branch with your commits, and then
+  [submitting a pull request](https://help.github.com/articles/using-pull-requests/).
+* Please read and adhere to the style and process
+  [guidelines](#coding-conventions-and-formatting) enumerated below.
+* Please base your fixes on the master branch.
+  SDK branches are generally not updated except for critical fixes needed to
+  repair an SDK release.
+* The resulting Pull Request will be assigned to a repository maintainer.
+  It is the maintainer's responsibility to ensure the Pull Request
+  passes the Google/LunarG internal CI processes.
+  Once the Pull Request has been approved and is passing internal CI,
+  a repository maintainer will merge the PR.
+
+#### Coding Conventions and Formatting
+
+* Use the
+ **[Google style guide](https://google.github.io/styleguide/cppguide.html)**
+ for source code with the following exceptions:
+  * The column limit is 132 (as opposed to the default value 80).
+    The clang-format tool will handle this. See below.
+  * The indent is 4 spaces instead of the default 2 spaces.
+    Again, the clang-format tool will handle this.
+  * If you can justify a reason for violating a rule in the guidelines,
+    then you are free to do so. Be prepared to defend your
+    decision during code review. This should be used responsibly.
+    An example of a bad reason is "I don't like that rule."
+    An example of a good reason is "This violates the style guide,
+    but it improves type safety."
+
+* Run **clang-format** on your changes to maintain consistent formatting
+  * There are `.clang-format` files present in the repository to define
+    clang-format settings which are found and used automatically by clang-format.
+  * **clang-format** binaries are available from the LLVM orginization, here:
+    [LLVM](https://clang.llvm.org/).
+    Our CI system (Travis-CI) currently uses clang-format version 5.0.0 to
+    check that the lines of code you have changed are formatted properly.
+    It is recommended that you use the same version to format your code prior
+    to submission.
+  * A sample git workflow may look like:
+
+>        # Make changes to the source.
+>        $ git add -u .
+>        $ git clang-format --style=file
+>        # Check to see if clang-format made any changes and if they are OK.
+>        $ git add -u .
+>        $ git commit
+
+* **Commit Messages**
+  * Limit the subject line to 50 characters --
+    this allows the information to display correctly in git/Github logs
+  * Begin subject line with a one-word component description followed
+    by a colon (e.g. loader, layers, tests, etc.)
+  * Separate subject from body with a blank line
+  * Wrap the body at 72 characters
+  * Capitalize the subject line
+  * Do not end the subject line with a period
+  * Use the body to explain what and why vs. how
+  * Use the imperative mode in the subject line.
+    This just means to write it as a command (e.g. Fix the sprocket)
+
+Strive for commits that implement a single or related set of functionality,
+using as many commits as is necessary (more is better).
+
+Please ensure that the repository compiles and passes tests without
+error for each commit in your pull request.
+Note that to be accepted into the repository, the pull request must
+pass all tests on all supported platforms.
+The automatic Github Travis and AppVeyor continuous integration features
+will assist in enforcing this requirement.*
+
+#### Generated Source Code
+
+The `loader/generated` directory contains source code that is created by several
+generator scripts in the `scripts` directory. All changes to these scripts _must_ be submitted with the
+corresponding generated output to keep the repository self-consistent. This requirement is enforced by both
+Travis CI and AppVeyor test configurations. Regenerate source files after modifying any of the generator
+scripts and before building and testing your changes. More details can be found in
+[BUILD.md](https://github.com/KhronosGroup/Vulkan-Loader/blob/master/BUILD.md#generated-source-code).
+
+#### Testing Your Changes
+
+* Run the existing tests in the `tests` directory of the repository
+  before and after each of your commits to check for any regressions.
+  * Linux: `run_all_tests.sh`
+  * Windows: `run_all_tests.ps1`
+
+* Run tests that explicitly exercise your changes.
+* Feel free to subject your code changes to other tests as well!
+
+#### Coding Conventions for [CMake](http://cmake.org) files
+
+* When editing configuration files for CMake, follow the style conventions of the surrounding code.
+  * The column limit is 132.
+  * The indent is 4 spaces.
+  * CMake functions are lower-case.
+  * Variable and keyword names are upper-case.
+* The format is defined by
+  [cmake-format](https://github.com/cheshirekow/cmake_format)
+  using the `.cmake-format.py` file in the repository to define the settings.
+  See the cmake-format page for information about its simple markup for comments.
+* Disable reformatting of a block of comment lines by inserting
+  a `# ~~~` comment line before and after that block.
+* Disable any formatting of a block of lines by surrounding that block with
+  `# cmake-format: off` and `# cmake-format: on` comment lines.
+* To install: `sudo pip install cmake_format`
+* To run: `cmake-format --in-place $FILENAME`
+* **IMPORTANT (June 2018)** cmake-format v0.3.6 has a
+  [bug]( https://github.com/cheshirekow/cmake_format/issues/50)
+  that can corrupt the formatting of comment lines in CMake files.
+  A workaround is to use the following command _before_ running cmake-format:
+  `sed --in-place='' 's/^  *#/#/' $FILENAME`
+
+### Contributor License Agreement (CLA)
+
+You will be prompted with a one-time "click-through" CLA dialog as part of
+submitting your pull request or other contribution to GitHub.
+
+### License and Copyrights
+
+All contributions made to the Vulkan-Loader repository are Khronos branded
+and as such, any new files need to have the Khronos license
+(Apache 2.0 style) and copyright included.
+Please see an existing file in this repository for an example.
+
+All contributions made to the LunarG repositories are to be made under
+the Apache 2.0 license and any new files need to include this license
+and any applicable copyrights.
+
+You can include your individual copyright after any existing copyrights.
diff --git a/src/third_party/vulkan-loader/src/GOVERNANCE.md b/src/third_party/vulkan-loader/src/GOVERNANCE.md
new file mode 100644
index 0000000..c86bfdf
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/GOVERNANCE.md
@@ -0,0 +1,48 @@
+# Vulkan Loader Repository Management
+
+## **Open Source Project Objectives**
+
+- Alignment with the Vulkan Specification and the Loader Specification
+  - [Vulkan Specification](https://github.com/KhronosGroup/Vulkan-Docs)
+  - [Loader Specification](loader/LoaderAndLayerInterface.md)
+- IHV/ISV Enablement
+  - Updates of loader functionality should be available in a timely fashion
+  - Every effort will be made to be responsive to IHV/ISV issues with the loader
+- Cross Platform Compatibility
+  - LunarG: Monitor for desktop (Windows and Linux)
+  - Continuous Integration: HW test farms operated by LunarG monitor various hardware/software platforms
+- Repo Quality
+  - Repo remains in healthy state with all tests passing and good-quality, consistent codebase
+  - Continuous Integration: Along with Github, HW test farms operated by LunarG perform pre-commit cloud testing on pull-requests
+
+## **Roles and Definitions**
+
+- Contributor, Commenter, User
+  - Submitting contributions, creating issues, or using the contents of the repository
+- Approver
+  - Experienced project members who have made significant technical contributions
+  - Write control: Approve pull/merge requests (verify submissions vs. acceptance criteria)
+- Technical Project Leads
+  - Lead the project in terms of versioning, quality assurance, and overarching objectives
+  - Monitor github issues and drive timely resolution
+  - Designate new approvers
+  - Ensure project information such as the Readme, Contributing docs, wiki, etc., kept up-to-date
+  - Act as a facilitator in resolving technical conflicts
+  - Is a point-of-contact for project-related questions
+
+The technical project leads for this repository are:
+
+- **Lenny Komow** [lenny@lunarg.com](mailto:lenny@lunarg.com)
+
+## **Acceptance Criteria and Process**
+
+- All source code to include Khronos copyright and license (Apache 2.0).
+  - Additional copyrights of contributors appended
+- Contributions are via pull requests
+  - Project leads will assigning approvers to contributor pull requests
+  - Approvers can self-assign their reviewers
+  - For complex or invasive contributions, Project Leads may request approval from specific reviewers
+  - At least one review approval is required to complete a pull request
+  - The goal is to be responsive to contributors while ensuring acceptance criteria is met and to facilitate their submissions
+  - Approval is dependent upon adherence to the guidelines in [CONTRIBUTING.md](CONTRIBUTING.md), and alignment with repository goals of maintainability, completeness, and quality
+  - Conflicts or questions will ultimately be resolved by the project leads
diff --git a/src/third_party/vulkan-loader/src/LICENSE.txt b/src/third_party/vulkan-loader/src/LICENSE.txt
new file mode 100644
index 0000000..6599e31
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/LICENSE.txt
@@ -0,0 +1,207 @@
+The majority of files in this project use the Apache 2.0 License.
+There are a few exceptions and their license can be found in the source.
+Any license deviations from Apache 2.0 are "more permissive" licenses.
+
+===========================================================================================
+
+                                 Apache License
+                           Version 2.0, January 2004
+                        http://www.apache.org/licenses/
+
+   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+   1. Definitions.
+
+      "License" shall mean the terms and conditions for use, reproduction,
+      and distribution as defined by Sections 1 through 9 of this document.
+
+      "Licensor" shall mean the copyright owner or entity authorized by
+      the copyright owner that is granting the License.
+
+      "Legal Entity" shall mean the union of the acting entity and all
+      other entities that control, are controlled by, or are under common
+      control with that entity. For the purposes of this definition,
+      "control" means (i) the power, direct or indirect, to cause the
+      direction or management of such entity, whether by contract or
+      otherwise, or (ii) ownership of fifty percent (50%) or more of the
+      outstanding shares, or (iii) beneficial ownership of such entity.
+
+      "You" (or "Your") shall mean an individual or Legal Entity
+      exercising permissions granted by this License.
+
+      "Source" form shall mean the preferred form for making modifications,
+      including but not limited to software source code, documentation
+      source, and configuration files.
+
+      "Object" form shall mean any form resulting from mechanical
+      transformation or translation of a Source form, including but
+      not limited to compiled object code, generated documentation,
+      and conversions to other media types.
+
+      "Work" shall mean the work of authorship, whether in Source or
+      Object form, made available under the License, as indicated by a
+      copyright notice that is included in or attached to the work
+      (an example is provided in the Appendix below).
+
+      "Derivative Works" shall mean any work, whether in Source or Object
+      form, that is based on (or derived from) the Work and for which the
+      editorial revisions, annotations, elaborations, or other modifications
+      represent, as a whole, an original work of authorship. For the purposes
+      of this License, Derivative Works shall not include works that remain
+      separable from, or merely link (or bind by name) to the interfaces of,
+      the Work and Derivative Works thereof.
+
+      "Contribution" shall mean any work of authorship, including
+      the original version of the Work and any modifications or additions
+      to that Work or Derivative Works thereof, that is intentionally
+      submitted to Licensor for inclusion in the Work by the copyright owner
+      or by an individual or Legal Entity authorized to submit on behalf of
+      the copyright owner. For the purposes of this definition, "submitted"
+      means any form of electronic, verbal, or written communication sent
+      to the Licensor or its representatives, including but not limited to
+      communication on electronic mailing lists, source code control systems,
+      and issue tracking systems that are managed by, or on behalf of, the
+      Licensor for the purpose of discussing and improving the Work, but
+      excluding communication that is conspicuously marked or otherwise
+      designated in writing by the copyright owner as "Not a Contribution."
+
+      "Contributor" shall mean Licensor and any individual or Legal Entity
+      on behalf of whom a Contribution has been received by Licensor and
+      subsequently incorporated within the Work.
+
+   2. Grant of Copyright License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      copyright license to reproduce, prepare Derivative Works of,
+      publicly display, publicly perform, sublicense, and distribute the
+      Work and such Derivative Works in Source or Object form.
+
+   3. Grant of Patent License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      (except as stated in this section) patent license to make, have made,
+      use, offer to sell, sell, import, and otherwise transfer the Work,
+      where such license applies only to those patent claims licensable
+      by such Contributor that are necessarily infringed by their
+      Contribution(s) alone or by combination of their Contribution(s)
+      with the Work to which such Contribution(s) was submitted. If You
+      institute patent litigation against any entity (including a
+      cross-claim or counterclaim in a lawsuit) alleging that the Work
+      or a Contribution incorporated within the Work constitutes direct
+      or contributory patent infringement, then any patent licenses
+      granted to You under this License for that Work shall terminate
+      as of the date such litigation is filed.
+
+   4. Redistribution. You may reproduce and distribute copies of the
+      Work or Derivative Works thereof in any medium, with or without
+      modifications, and in Source or Object form, provided that You
+      meet the following conditions:
+
+      (a) You must give any other recipients of the Work or
+          Derivative Works a copy of this License; and
+
+      (b) You must cause any modified files to carry prominent notices
+          stating that You changed the files; and
+
+      (c) You must retain, in the Source form of any Derivative Works
+          that You distribute, all copyright, patent, trademark, and
+          attribution notices from the Source form of the Work,
+          excluding those notices that do not pertain to any part of
+          the Derivative Works; and
+
+      (d) If the Work includes a "NOTICE" text file as part of its
+          distribution, then any Derivative Works that You distribute must
+          include a readable copy of the attribution notices contained
+          within such NOTICE file, excluding those notices that do not
+          pertain to any part of the Derivative Works, in at least one
+          of the following places: within a NOTICE text file distributed
+          as part of the Derivative Works; within the Source form or
+          documentation, if provided along with the Derivative Works; or,
+          within a display generated by the Derivative Works, if and
+          wherever such third-party notices normally appear. The contents
+          of the NOTICE file are for informational purposes only and
+          do not modify the License. You may add Your own attribution
+          notices within Derivative Works that You distribute, alongside
+          or as an addendum to the NOTICE text from the Work, provided
+          that such additional attribution notices cannot be construed
+          as modifying the License.
+
+      You may add Your own copyright statement to Your modifications and
+      may provide additional or different license terms and conditions
+      for use, reproduction, or distribution of Your modifications, or
+      for any such Derivative Works as a whole, provided Your use,
+      reproduction, and distribution of the Work otherwise complies with
+      the conditions stated in this License.
+
+   5. Submission of Contributions. Unless You explicitly state otherwise,
+      any Contribution intentionally submitted for inclusion in the Work
+      by You to the Licensor shall be under the terms and conditions of
+      this License, without any additional terms or conditions.
+      Notwithstanding the above, nothing herein shall supersede or modify
+      the terms of any separate license agreement you may have executed
+      with Licensor regarding such Contributions.
+
+   6. Trademarks. This License does not grant permission to use the trade
+      names, trademarks, service marks, or product names of the Licensor,
+      except as required for reasonable and customary use in describing the
+      origin of the Work and reproducing the content of the NOTICE file.
+
+   7. Disclaimer of Warranty. Unless required by applicable law or
+      agreed to in writing, Licensor provides the Work (and each
+      Contributor provides its Contributions) on an "AS IS" BASIS,
+      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+      implied, including, without limitation, any warranties or conditions
+      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+      PARTICULAR PURPOSE. You are solely responsible for determining the
+      appropriateness of using or redistributing the Work and assume any
+      risks associated with Your exercise of permissions under this License.
+
+   8. Limitation of Liability. In no event and under no legal theory,
+      whether in tort (including negligence), contract, or otherwise,
+      unless required by applicable law (such as deliberate and grossly
+      negligent acts) or agreed to in writing, shall any Contributor be
+      liable to You for damages, including any direct, indirect, special,
+      incidental, or consequential damages of any character arising as a
+      result of this License or out of the use or inability to use the
+      Work (including but not limited to damages for loss of goodwill,
+      work stoppage, computer failure or malfunction, or any and all
+      other commercial damages or losses), even if such Contributor
+      has been advised of the possibility of such damages.
+
+   9. Accepting Warranty or Additional Liability. While redistributing
+      the Work or Derivative Works thereof, You may choose to offer,
+      and charge a fee for, acceptance of support, warranty, indemnity,
+      or other liability obligations and/or rights consistent with this
+      License. However, in accepting such obligations, You may act only
+      on Your own behalf and on Your sole responsibility, not on behalf
+      of any other Contributor, and only if You agree to indemnify,
+      defend, and hold each Contributor harmless for any liability
+      incurred by, or claims asserted against, such Contributor by reason
+      of your accepting any such warranty or additional liability.
+
+   END OF TERMS AND CONDITIONS
+
+   APPENDIX: How to apply the Apache License to your work.
+
+      To apply the Apache License to your work, attach the following
+      boilerplate notice, with the fields enclosed by brackets "[]"
+      replaced with your own identifying information. (Don't include
+      the brackets!)  The text should be enclosed in the appropriate
+      comment syntax for the file format. We also recommend that a
+      file or class name and description of purpose be included on the
+      same "printed page" as the copyright notice for easier
+      identification within third-party archives.
+
+   Copyright [yyyy] [name of copyright owner]
+
+   Licensed under the Apache License, Version 2.0 (the "License");
+   you may not use this file except in compliance with the License.
+   You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
diff --git a/src/third_party/vulkan-loader/src/README.md b/src/third_party/vulkan-loader/src/README.md
new file mode 100644
index 0000000..8771daa
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/README.md
@@ -0,0 +1,67 @@
+# Vulkan Ecosystem Components
+
+This project provides the Khronos official Vulkan ICD desktop loader for Windows, Linux, and MacOS.
+
+## CI Build Status
+
+| Platform | Build Status |
+|:--------:|:------------:|
+| Linux/MacOS | [![Build Status](https://travis-ci.org/KhronosGroup/Vulkan-Loader.svg?branch=master)](https://travis-ci.org/KhronosGroup/Vulkan-Loader) |
+| Windows |[![Build status](https://ci.appveyor.com/api/projects/status/l93pu0w90tui708m?svg=true)](https://ci.appveyor.com/project/Khronoswebmaster/vulkan-loader/branch/master) |
+
+## Introduction
+
+Vulkan is an explicit API, enabling direct control over how GPUs actually work.
+As such, Vulkan supports systems that have multiple GPUs, each running with a different driver, or ICD (Installable Client Driver).
+Vulkan also supports multiple global contexts (instances, in Vulkan terminology).
+The ICD loader is a library that is placed between a Vulkan application and any number of Vulkan drivers, in order to support multiple drivers and the instance-level functionality that works across these drivers.
+Additionally, the loader manages inserting Vulkan layer libraries, such as validation layers, between an application and the drivers.
+
+This repository contains the Vulkan loader that is used for Linux, Windows, MacOS, and iOS.
+There is also a separate loader, maintained by Google, which is used on Android.
+
+The following components are available in this repository:
+
+- [Vulkan header files (Vulkan-Headers submodule)](https://github.com/KhronosGroup/Vulkan-Headers)
+- [ICD Loader](loader/)
+- [Loader Documentation](loader/LoaderAndLayerInterface.md)
+- [Tests](tests/)
+
+## Contact Information
+
+- [Lenny Komow](mailto:lenny@lunarg.com)
+
+## Information for Developing or Contributing
+
+Please see the [CONTRIBUTING.md](CONTRIBUTING.md) file in this repository for more details.
+Please see the [GOVERNANCE.md](GOVERNANCE.md) file in this repository for repository
+management details.
+
+## How to Build and Run
+
+[BUILD.md](BUILD.md)
+Includes directions for building all components.
+
+Architecture and interface information for the loader is in
+[loader/LoaderAndLayerInterface.md](loader/LoaderAndLayerInterface.md).
+
+## Version Tagging Scheme
+
+Updates to the `Vulkan-Loader` repository which correspond to a new Vulkan specification release are tagged using the following format: `v<`_`version`_`>` (e.g., `v1.1.96`).
+
+**Note**: Marked version releases have undergone thorough testing but do not imply the same quality level as SDK tags. SDK tags follow the `sdk-<`_`version`_`>.<`_`patch`_`>` format (e.g., `sdk-1.1.92.0`).
+
+This scheme was adopted following the 1.1.96 Vulkan specification release.
+
+## License
+
+This work is released as open source under a Apache-style license from Khronos
+including a Khronos copyright.
+
+See COPYRIGHT.txt for a full list of licenses used in this repository.
+
+## Acknowledgements
+
+While this project has been developed primarily by LunarG, Inc., there are many other
+companies and individuals making this possible: Valve Corporation, funding
+project development; Khronos providing oversight and hosting of the project.
diff --git a/src/third_party/vulkan-loader/src/build-gn/secondary/build_overrides/build.gni b/src/third_party/vulkan-loader/src/build-gn/secondary/build_overrides/build.gni
new file mode 100644
index 0000000..c6c11fa
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/build-gn/secondary/build_overrides/build.gni
@@ -0,0 +1,18 @@
+# Copyright (c) 2019 LunarG, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+build_with_chromium = false
+ignore_elf32_limitations = true
+linux_use_bundled_binutils_override = false
+use_system_xcode = true
diff --git a/src/third_party/vulkan-loader/src/build-gn/secondary/build_overrides/vulkan_loader.gni b/src/third_party/vulkan-loader/src/build-gn/secondary/build_overrides/vulkan_loader.gni
new file mode 100644
index 0000000..c826fc1
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/build-gn/secondary/build_overrides/vulkan_loader.gni
@@ -0,0 +1,23 @@
+# Copyright (c) 2019 LunarG, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Paths to loader dependencies
+vulkan_headers_dir = "//external/Vulkan-Headers"
+
+# Subdirectories for generated files
+vulkan_gen_subdir = "" 
+
+# Vulkan loader build options
+vulkan_loader_shared = true
+
diff --git a/src/third_party/vulkan-loader/src/build-gn/update_deps.sh b/src/third_party/vulkan-loader/src/build-gn/update_deps.sh
new file mode 100755
index 0000000..06a194a
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/build-gn/update_deps.sh
@@ -0,0 +1,37 @@
+#!/bin/sh
+
+# Copyright (c) 2019 LunarG, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Execute at repo root
+cd "$(dirname $0)/.."
+
+# Use update_deps.py to update source dependencies from /scripts/known_good.json
+scripts/update_deps.py --dir="external" --no-build
+
+# Use gclient to update toolchain dependencies from /build-gn/DEPS (from chromium)
+cat << EOF >> .gclient
+solutions = [
+  { "name"        : ".",
+    "url"         : "https://github.com/KhronosGroup/Vulkan-Loader",
+    "deps_file"   : "build-gn/DEPS",
+    "managed"     : False,
+    "custom_deps" : {
+    },
+    "custom_vars": {},
+  },
+]
+EOF
+gclient sync
+
diff --git a/src/third_party/vulkan-loader/src/cmake/Copyright_cmake.txt b/src/third_party/vulkan-loader/src/cmake/Copyright_cmake.txt
new file mode 100644
index 0000000..743c634
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/cmake/Copyright_cmake.txt
@@ -0,0 +1,126 @@
+CMake - Cross Platform Makefile Generator
+Copyright 2000-2018 Kitware, Inc. and Contributors
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions
+are met:
+
+* Redistributions of source code must retain the above copyright
+  notice, this list of conditions and the following disclaimer.
+
+* Redistributions in binary form must reproduce the above copyright
+  notice, this list of conditions and the following disclaimer in the
+  documentation and/or other materials provided with the distribution.
+
+* Neither the name of Kitware, Inc. nor the names of Contributors
+  may be used to endorse or promote products derived from this
+  software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+------------------------------------------------------------------------------
+
+The following individuals and institutions are among the Contributors:
+
+* Aaron C. Meadows <cmake@shadowguarddev.com>
+* Adriaan de Groot <groot@kde.org>
+* Aleksey Avdeev <solo@altlinux.ru>
+* Alexander Neundorf <neundorf@kde.org>
+* Alexander Smorkalov <alexander.smorkalov@itseez.com>
+* Alexey Sokolov <sokolov@google.com>
+* Alex Turbov <i.zaufi@gmail.com>
+* Andreas Pakulat <apaku@gmx.de>
+* Andreas Schneider <asn@cryptomilk.org>
+* André Rigland Brodtkorb <Andre.Brodtkorb@ifi.uio.no>
+* Axel Huebl, Helmholtz-Zentrum Dresden - Rossendorf
+* Benjamin Eikel
+* Bjoern Ricks <bjoern.ricks@gmail.com>
+* Brad Hards <bradh@kde.org>
+* Christopher Harvey
+* Christoph Grüninger <foss@grueninger.de>
+* Clement Creusot <creusot@cs.york.ac.uk>
+* Daniel Blezek <blezek@gmail.com>
+* Daniel Pfeifer <daniel@pfeifer-mail.de>
+* Enrico Scholz <enrico.scholz@informatik.tu-chemnitz.de>
+* Eran Ifrah <eran.ifrah@gmail.com>
+* Esben Mose Hansen, Ange Optimization ApS
+* Geoffrey Viola <geoffrey.viola@asirobots.com>
+* Google Inc
+* Gregor Jasny
+* Helio Chissini de Castro <helio@kde.org>
+* Ilya Lavrenov <ilya.lavrenov@itseez.com>
+* Insight Software Consortium <insightsoftwareconsortium.org>
+* Jan Woetzel
+* Kelly Thompson <kgt@lanl.gov>
+* Konstantin Podsvirov <konstantin@podsvirov.pro>
+* Mario Bensi <mbensi@ipsquad.net>
+* Mathieu Malaterre <mathieu.malaterre@gmail.com>
+* Matthaeus G. Chajdas
+* Matthias Kretz <kretz@kde.org>
+* Matthias Maennich <matthias@maennich.net>
+* Michael Stürmer
+* Miguel A. Figueroa-Villanueva
+* Mike Jackson
+* Mike McQuaid <mike@mikemcquaid.com>
+* Nicolas Bock <nicolasbock@gmail.com>
+* Nicolas Despres <nicolas.despres@gmail.com>
+* Nikita Krupen'ko <krnekit@gmail.com>
+* NVIDIA Corporation <www.nvidia.com>
+* OpenGamma Ltd. <opengamma.com>
+* Patrick Stotko <stotko@cs.uni-bonn.de>
+* Per Øyvind Karlsen <peroyvind@mandriva.org>
+* Peter Collingbourne <peter@pcc.me.uk>
+* Petr Gotthard <gotthard@honeywell.com>
+* Philip Lowman <philip@yhbt.com>
+* Philippe Proulx <pproulx@efficios.com>
+* Raffi Enficiaud, Max Planck Society
+* Raumfeld <raumfeld.com>
+* Roger Leigh <rleigh@codelibre.net>
+* Rolf Eike Beer <eike@sf-mail.de>
+* Roman Donchenko <roman.donchenko@itseez.com>
+* Roman Kharitonov <roman.kharitonov@itseez.com>
+* Ruslan Baratov
+* Sebastian Holtermann <sebholt@xwmw.org>
+* Stephen Kelly <steveire@gmail.com>
+* Sylvain Joubert <joubert.sy@gmail.com>
+* Thomas Sondergaard <ts@medical-insight.com>
+* Tobias Hunger <tobias.hunger@qt.io>
+* Todd Gamblin <tgamblin@llnl.gov>
+* Tristan Carel
+* University of Dundee
+* Vadim Zhukov
+* Will Dicharry <wdicharry@stellarscience.com>
+
+See version control history for details of individual contributions.
+
+The above copyright and license notice applies to distributions of
+CMake in source and binary form.  Third-party software packages supplied
+with CMake under compatible licenses provide their own copyright notices
+documented in corresponding subdirectories or source files.
+
+------------------------------------------------------------------------------
+
+CMake was initially developed by Kitware with the following sponsorship:
+
+ * National Library of Medicine at the National Institutes of Health
+   as part of the Insight Segmentation and Registration Toolkit (ITK).
+
+ * US National Labs (Los Alamos, Livermore, Sandia) ASC Parallel
+   Visualization Initiative.
+
+ * National Alliance for Medical Image Computing (NAMIC) is funded by the
+   National Institutes of Health through the NIH Roadmap for Medical Research,
+   Grant U54 EB005149.
+
+ * Kitware, Inc.
diff --git a/src/third_party/vulkan-loader/src/cmake/FindPCIAccess.cmake b/src/third_party/vulkan-loader/src/cmake/FindPCIAccess.cmake
new file mode 100644
index 0000000..65f7d5c
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/cmake/FindPCIAccess.cmake
@@ -0,0 +1,28 @@
+# - FindPCIAccess
+#
+# Copyright 2015 Valve Corporation
+
+find_package(PkgConfig)
+
+pkg_check_modules(PC_PCIACCESS QUIET pciaccess)
+
+find_path(PCIACCESS_INCLUDE_DIR NAMES pciaccess.h
+    HINTS
+    ${PC_PCIACCESS_INCLUDEDIR}
+    ${PC_PCIACCESS_INCLUDE_DIRS}
+    )
+
+find_library(PCIACCESS_LIBRARY NAMES pciaccess
+    HINTS
+    ${PC_PCIACCESS_LIBDIR}
+    ${PC_PCIACCESS_LIBRARY_DIRS}
+    )
+
+include(FindPackageHandleStandardArgs)
+find_package_handle_standard_args(PCIAccess DEFAULT_MSG
+    PCIACCESS_INCLUDE_DIR PCIACCESS_LIBRARY)
+
+mark_as_advanced(PCIACCESS_INCLUDE_DIR PCIACCESS_LIBRARY)
+
+set(PCIACCESS_INCLUDE_DIRS ${PCIACCESS_INCLUDE_DIR})
+set(PCIACCESS_LIBRARIES ${PCIACCESS_LIBRARY})
diff --git a/src/third_party/vulkan-loader/src/cmake/FindPthreadStubs.cmake b/src/third_party/vulkan-loader/src/cmake/FindPthreadStubs.cmake
new file mode 100644
index 0000000..063bbe5
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/cmake/FindPthreadStubs.cmake
@@ -0,0 +1,14 @@
+# - FindPthreadStubs
+#
+# Copyright (C) 2015 Valve Corporation
+
+find_package(PkgConfig)
+
+pkg_check_modules(PC_PTHREADSTUBS QUIET pthread-stubs)
+
+include(FindPackageHandleStandardArgs)
+find_package_handle_standard_args(PthreadStubs DEFAULT_MSG
+    PC_PTHREADSTUBS_FOUND)
+
+set(PTHREADSTUBS_INCLUDE_DIRS "")
+set(PTHREADSTUBS_LIBRARIES "")
diff --git a/src/third_party/vulkan-loader/src/cmake/FindUDev.cmake b/src/third_party/vulkan-loader/src/cmake/FindUDev.cmake
new file mode 100644
index 0000000..e3d1699
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/cmake/FindUDev.cmake
@@ -0,0 +1,28 @@
+# - FindUDev
+#
+# Copyright (C) 2015 Valve Corporation
+
+find_package(PkgConfig)
+
+pkg_check_modules(PC_LIBUDEV QUIET libudev)
+
+find_path(UDEV_INCLUDE_DIR NAMES libudev.h
+    HINTS
+    ${PC_LIBUDEV_INCLUDEDIR}
+    ${PC_LIBUDEV_INCLUDE_DIRS}
+    )
+
+find_library(UDEV_LIBRARY NAMES udev
+    HINTS
+    ${PC_LIBUDEV_LIBDIR}
+    ${PC_LIBUDEV_LIBRARY_DIRS}
+    )
+
+include(FindPackageHandleStandardArgs)
+find_package_handle_standard_args(UDev DEFAULT_MSG
+    UDEV_INCLUDE_DIR UDEV_LIBRARY)
+
+mark_as_advanced(UDEV_INCLUDE_DIR UDEV_LIBRARY)
+
+set(UDEV_INCLUDE_DIRS ${UDEV_INCLUDE_DIR})
+set(UDEV_LIBRARIES ${UDEV_LIBRARY})
diff --git a/src/third_party/vulkan-loader/src/cmake/FindValgrind.cmake b/src/third_party/vulkan-loader/src/cmake/FindValgrind.cmake
new file mode 100644
index 0000000..5c1fb56
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/cmake/FindValgrind.cmake
@@ -0,0 +1,22 @@
+# - FindValgrind
+#
+# Copyright (C) 2015 Valve Corporation
+
+find_package(PkgConfig)
+
+pkg_check_modules(PC_VALGRIND QUIET valgrind)
+
+find_path(VALGRIND_INCLUDE_DIR NAMES valgrind.h memcheck.h
+    HINTS
+    ${PC_VALGRIND_INCLUDEDIR}
+    ${PC_VALGRIND_INCLUDE_DIRS}
+    )
+
+include(FindPackageHandleStandardArgs)
+find_package_handle_standard_args(Valgrind DEFAULT_MSG
+    VALGRIND_INCLUDE_DIR)
+
+mark_as_advanced(VALGRIND_INCLUDE_DIR)
+
+set(VALGRIND_INCLUDE_DIRS ${VALGRIND_INCLUDE_DIR})
+set(VALGRIND_LIBRARIES "")
diff --git a/src/third_party/vulkan-loader/src/cmake/FindVulkanHeaders.cmake b/src/third_party/vulkan-loader/src/cmake/FindVulkanHeaders.cmake
new file mode 100644
index 0000000..41afa9b
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/cmake/FindVulkanHeaders.cmake
@@ -0,0 +1,134 @@
+#.rst:
+# FindVulkanHeaders
+# -----------------
+#
+# Try to find Vulkan Headers and Registry.
+#
+# This module is intended to be used by projects that build Vulkan
+# "system" components such as the loader and layers.
+# Vulkan applications should instead use the FindVulkan (or similar)
+# find module that locates the headers and the loader library.
+#
+# When using this find module to locate the headers and registry
+# in a Vulkan-Headers repository, the Vulkan-Headers repository
+# should be built with 'install' target and the following environment
+# or CMake variable set to the location of the install directory.
+#
+#    VULKAN_HEADERS_INSTALL_DIR
+#
+# IMPORTED Targets
+# ^^^^^^^^^^^^^^^^
+#
+# This module defines no IMPORTED targets
+#
+# Result Variables
+# ^^^^^^^^^^^^^^^^
+#
+# This module defines the following variables::
+#
+#   VulkanHeaders_FOUND          - True if VulkanHeaders was found
+#   VulkanHeaders_INCLUDE_DIRS   - include directories for VulkanHeaders
+#
+#   VulkanRegistry_FOUND         - True if VulkanRegistry was found
+#   VulkanRegistry_DIRS          - directories for VulkanRegistry
+#
+#   VulkanHeaders_VERSION_MAJOR  - The Major API version of the latest version
+#                                  contained in the Vulkan header
+#   VulkanHeaders_VERSION_MINOR  - The Minor API version of the latest version
+#                                  contained in the Vulkan header
+#   VulkanHeaders_VERSION_PATCH  - The Patch API version of the latest version
+#                                  contained in the Vulkan header
+#
+# The module will also define two cache variables::
+#
+#   VulkanHeaders_INCLUDE_DIR    - the VulkanHeaders include directory
+#   VulkanRegistry_DIR           - the VulkanRegistry directory
+#
+
+# Use HINTS instead of PATH to search these locations before
+# searching system environment variables like $PATH that may
+# contain SDK directories.
+find_path(VulkanHeaders_INCLUDE_DIR
+    NAMES vulkan/vulkan.h
+    HINTS
+        ${VULKAN_HEADERS_INSTALL_DIR}/include
+        "$ENV{VULKAN_HEADERS_INSTALL_DIR}/include"
+        "$ENV{VULKAN_SDK}/include")
+
+if(VulkanHeaders_INCLUDE_DIR)
+   get_filename_component(VULKAN_REGISTRY_PATH_HINT ${VulkanHeaders_INCLUDE_DIR} DIRECTORY)
+   find_path(VulkanRegistry_DIR
+       NAMES vk.xml
+       HINTS "${VULKAN_REGISTRY_PATH_HINT}/share/vulkan/registry")
+endif()
+
+set(VulkanHeaders_INCLUDE_DIRS ${VulkanHeaders_INCLUDE_DIR})
+set(VulkanRegistry_DIRS ${VulkanRegistry_DIR})
+
+include(FindPackageHandleStandardArgs)
+find_package_handle_standard_args(VulkanHeaders
+    DEFAULT_MSG
+    VulkanHeaders_INCLUDE_DIR)
+find_package_handle_standard_args(VulkanRegistry
+    DEFAULT_MSG
+    VulkanRegistry_DIR)
+
+mark_as_advanced(VulkanHeaders_INCLUDE_DIR VulkanRegistry_DIR)
+
+# Determine the major/minor/patch version from the vulkan header
+set(VulkanHeaders_VERSION_MAJOR "0")
+set(VulkanHeaders_VERSION_MINOR "0")
+set(VulkanHeaders_VERSION_PATCH "0")
+
+# First, determine which header we need to grab the version information from.
+# Starting with Vulkan 1.1, we should use vulkan_core.h, but prior to that,
+# the information was in vulkan.h.
+if (EXISTS "${VulkanHeaders_INCLUDE_DIR}/vulkan/vulkan_core.h")
+    set(VulkanHeaders_main_header ${VulkanHeaders_INCLUDE_DIR}/vulkan/vulkan_core.h)
+else()
+    set(VulkanHeaders_main_header ${VulkanHeaders_INCLUDE_DIR}/vulkan/vulkan.h)
+endif()
+
+# Find all lines in the header file that contain any version we may be interested in
+#  NOTE: They start with #define and then have other keywords
+file(STRINGS
+        ${VulkanHeaders_main_header}
+        VulkanHeaders_lines
+        REGEX "^#define (VK_API_VERSION.*VK_MAKE_VERSION|VK_HEADER_VERSION)")
+
+foreach(VulkanHeaders_line ${VulkanHeaders_lines})
+
+    # First, handle the case where we have a major/minor version
+    #   Format is:
+    #        #define VK_API_VERSION_X_Y VK_MAKE_VERSION(X, Y, 0)
+    #   We grab the major version (X) and minor version (Y) out of the parentheses
+    string(REGEX MATCH "VK_MAKE_VERSION\\(.*\\)" VulkanHeaders_out ${VulkanHeaders_line})
+    string(REGEX MATCHALL "[0-9]+" VulkanHeaders_MAJOR_MINOR "${VulkanHeaders_out}")
+    if (VulkanHeaders_MAJOR_MINOR)
+        list (GET VulkanHeaders_MAJOR_MINOR 0 VulkanHeaders_cur_major)
+        list (GET VulkanHeaders_MAJOR_MINOR 1 VulkanHeaders_cur_minor)
+        if (${VulkanHeaders_cur_major} GREATER ${VulkanHeaders_VERSION_MAJOR})
+            set(VulkanHeaders_VERSION_MAJOR ${VulkanHeaders_cur_major})
+            set(VulkanHeaders_VERSION_MINOR ${VulkanHeaders_cur_minor})
+        endif()
+        if (${VulkanHeaders_cur_major} EQUAL ${VulkanHeaders_VERSION_MAJOR} AND
+            ${VulkanHeaders_cur_minor} GREATER ${VulkanHeaders_VERSION_MINOR})
+            set(VulkanHeaders_VERSION_MINOR ${VulkanHeaders_cur_minor})
+        endif()
+    endif()
+
+    # Second, handle the case where we have the patch version
+    #   Format is:
+    #      #define VK_HEADER_VERSION Z
+    #   Where Z is the patch version which we just grab off the end
+    string(REGEX MATCH "define.*VK_HEADER_VERSION.*[0-9]+" VulkanHeaders_out ${VulkanHeaders_line})
+    list(LENGTH VulkanHeaders_out VulkanHeaders_len)
+    if (VulkanHeaders_len)
+        string(REGEX MATCH "[0-9]+" VulkanHeaders_VERSION_PATCH "${VulkanHeaders_out}")
+    endif()
+
+endforeach()
+MESSAGE(STATUS
+        "Detected Vulkan Version ${VulkanHeaders_VERSION_MAJOR}."
+        "${VulkanHeaders_VERSION_MINOR}."
+        "${VulkanHeaders_VERSION_PATCH}")
diff --git a/src/third_party/vulkan-loader/src/cmake/FindWayland.cmake b/src/third_party/vulkan-loader/src/cmake/FindWayland.cmake
new file mode 100644
index 0000000..f93218b
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/cmake/FindWayland.cmake
@@ -0,0 +1,66 @@
+# Try to find Wayland on a Unix system
+#
+# This will define:
+#
+#   WAYLAND_FOUND       - True if Wayland is found
+#   WAYLAND_LIBRARIES   - Link these to use Wayland
+#   WAYLAND_INCLUDE_DIR - Include directory for Wayland
+#   WAYLAND_DEFINITIONS - Compiler flags for using Wayland
+#
+# In addition the following more fine grained variables will be defined:
+#
+#   WAYLAND_CLIENT_FOUND  WAYLAND_CLIENT_INCLUDE_DIR  WAYLAND_CLIENT_LIBRARIES
+#   WAYLAND_SERVER_FOUND  WAYLAND_SERVER_INCLUDE_DIR  WAYLAND_SERVER_LIBRARIES
+#   WAYLAND_EGL_FOUND     WAYLAND_EGL_INCLUDE_DIR     WAYLAND_EGL_LIBRARIES
+#
+# Copyright (c) 2013 Martin Gräßlin <mgraesslin@kde.org>
+#
+# Redistribution and use is allowed according to the terms of the BSD license.
+# For details see the accompanying COPYING-CMAKE-SCRIPTS file.
+
+IF (NOT WIN32)
+  IF (WAYLAND_INCLUDE_DIR AND WAYLAND_LIBRARIES)
+    # In the cache already
+    SET(WAYLAND_FIND_QUIETLY TRUE)
+  ENDIF ()
+
+  # Use pkg-config to get the directories and then use these values
+  # in the FIND_PATH() and FIND_LIBRARY() calls
+  FIND_PACKAGE(PkgConfig)
+  PKG_CHECK_MODULES(PKG_WAYLAND QUIET wayland-client wayland-server wayland-egl wayland-cursor)
+
+  SET(WAYLAND_DEFINITIONS ${PKG_WAYLAND_CFLAGS})
+
+  FIND_PATH(WAYLAND_CLIENT_INCLUDE_DIR  NAMES wayland-client.h HINTS ${PKG_WAYLAND_INCLUDE_DIRS})
+  FIND_PATH(WAYLAND_SERVER_INCLUDE_DIR  NAMES wayland-server.h HINTS ${PKG_WAYLAND_INCLUDE_DIRS})
+  FIND_PATH(WAYLAND_EGL_INCLUDE_DIR     NAMES wayland-egl.h    HINTS ${PKG_WAYLAND_INCLUDE_DIRS})
+  FIND_PATH(WAYLAND_CURSOR_INCLUDE_DIR  NAMES wayland-cursor.h HINTS ${PKG_WAYLAND_INCLUDE_DIRS})
+
+  FIND_LIBRARY(WAYLAND_CLIENT_LIBRARIES NAMES wayland-client   HINTS ${PKG_WAYLAND_LIBRARY_DIRS})
+  FIND_LIBRARY(WAYLAND_SERVER_LIBRARIES NAMES wayland-server   HINTS ${PKG_WAYLAND_LIBRARY_DIRS})
+  FIND_LIBRARY(WAYLAND_EGL_LIBRARIES    NAMES wayland-egl      HINTS ${PKG_WAYLAND_LIBRARY_DIRS})
+  FIND_LIBRARY(WAYLAND_CURSOR_LIBRARIES NAMES wayland-cursor   HINTS ${PKG_WAYLAND_LIBRARY_DIRS})
+
+  set(WAYLAND_INCLUDE_DIR ${WAYLAND_CLIENT_INCLUDE_DIR} ${WAYLAND_SERVER_INCLUDE_DIR} ${WAYLAND_EGL_INCLUDE_DIR} ${WAYLAND_CURSOR_INCLUDE_DIR})
+
+  set(WAYLAND_LIBRARIES ${WAYLAND_CLIENT_LIBRARIES} ${WAYLAND_SERVER_LIBRARIES} ${WAYLAND_EGL_LIBRARIES} ${WAYLAND_CURSOR_LIBRARIES})
+
+  list(REMOVE_DUPLICATES WAYLAND_INCLUDE_DIR)
+
+  include(FindPackageHandleStandardArgs)
+
+  FIND_PACKAGE_HANDLE_STANDARD_ARGS(WAYLAND_CLIENT  DEFAULT_MSG  WAYLAND_CLIENT_LIBRARIES  WAYLAND_CLIENT_INCLUDE_DIR)
+  FIND_PACKAGE_HANDLE_STANDARD_ARGS(WAYLAND_SERVER  DEFAULT_MSG  WAYLAND_SERVER_LIBRARIES  WAYLAND_SERVER_INCLUDE_DIR)
+  FIND_PACKAGE_HANDLE_STANDARD_ARGS(WAYLAND_EGL     DEFAULT_MSG  WAYLAND_EGL_LIBRARIES     WAYLAND_EGL_INCLUDE_DIR)
+  FIND_PACKAGE_HANDLE_STANDARD_ARGS(WAYLAND_CURSOR  DEFAULT_MSG  WAYLAND_CURSOR_LIBRARIES  WAYLAND_CURSOR_INCLUDE_DIR)
+  FIND_PACKAGE_HANDLE_STANDARD_ARGS(WAYLAND         DEFAULT_MSG  WAYLAND_LIBRARIES         WAYLAND_INCLUDE_DIR)
+
+  MARK_AS_ADVANCED(
+        WAYLAND_INCLUDE_DIR         WAYLAND_LIBRARIES
+        WAYLAND_CLIENT_INCLUDE_DIR  WAYLAND_CLIENT_LIBRARIES
+        WAYLAND_SERVER_INCLUDE_DIR  WAYLAND_SERVER_LIBRARIES
+        WAYLAND_EGL_INCLUDE_DIR     WAYLAND_EGL_LIBRARIES
+        WAYLAND_CURSOR_INCLUDE_DIR  WAYLAND_CURSOR_LIBRARIES
+  )
+
+ENDIF ()
diff --git a/src/third_party/vulkan-loader/src/cmake/FindX11_XCB.cmake b/src/third_party/vulkan-loader/src/cmake/FindX11_XCB.cmake
new file mode 100644
index 0000000..956bf89
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/cmake/FindX11_XCB.cmake
@@ -0,0 +1,32 @@
+# - Try to find libX11-xcb
+# Once done this will define
+#
+# X11_XCB_FOUND - system has libX11-xcb
+# X11_XCB_LIBRARIES - Link these to use libX11-xcb
+# X11_XCB_INCLUDE_DIR - the libX11-xcb include dir
+# X11_XCB_DEFINITIONS - compiler switches required for using libX11-xcb
+
+# Copyright (c) 2011 Fredrik Höglund <fredrik@kde.org>
+# Copyright (c) 2008 Helio Chissini de Castro, <helio@kde.org>
+# Copyright (c) 2007 Matthias Kretz, <kretz@kde.org>
+#
+# Redistribution and use is allowed according to the terms of the BSD license.
+# For details see the accompanying COPYING-CMAKE-SCRIPTS file.
+
+IF (NOT WIN32)
+  # use pkg-config to get the directories and then use these values
+  # in the FIND_PATH() and FIND_LIBRARY() calls
+  FIND_PACKAGE(PkgConfig)
+  PKG_CHECK_MODULES(PKG_X11_XCB QUIET x11-xcb)
+
+  SET(X11_XCB_DEFINITIONS ${PKG_X11_XCB_CFLAGS})
+
+  FIND_PATH(X11_XCB_INCLUDE_DIR NAMES X11/Xlib-xcb.h HINTS ${PKG_X11_XCB_INCLUDE_DIRS})
+  FIND_LIBRARY(X11_XCB_LIBRARIES NAMES X11-xcb HINTS ${PKG_X11_XCB_LIBRARY_DIRS})
+
+  include(FindPackageHandleStandardArgs)
+  FIND_PACKAGE_HANDLE_STANDARD_ARGS(X11_XCB DEFAULT_MSG X11_XCB_LIBRARIES X11_XCB_INCLUDE_DIR)
+
+  MARK_AS_ADVANCED(X11_XCB_INCLUDE_DIR X11_XCB_LIBRARIES)
+ENDIF (NOT WIN32)
+
diff --git a/src/third_party/vulkan-loader/src/cmake/FindXCB.cmake b/src/third_party/vulkan-loader/src/cmake/FindXCB.cmake
new file mode 100644
index 0000000..2311591
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/cmake/FindXCB.cmake
@@ -0,0 +1,51 @@
+# - FindXCB
+#
+# Copyright (C) 2015 Valve Corporation
+
+find_package(PkgConfig)
+
+if(NOT XCB_FIND_COMPONENTS)
+    set(XCB_FIND_COMPONENTS xcb)
+endif()
+
+include(FindPackageHandleStandardArgs)
+set(XCB_FOUND true)
+set(XCB_INCLUDE_DIRS "")
+set(XCB_LIBRARIES "")
+foreach(comp ${XCB_FIND_COMPONENTS})
+    # component name
+    string(TOUPPER ${comp} compname)
+    string(REPLACE "-" "_" compname ${compname})
+    # header name
+    string(REPLACE "xcb-" "" headername xcb/${comp}.h)
+    # library name
+    set(libname ${comp})
+
+    pkg_check_modules(PC_${comp} QUIET ${comp})
+
+    find_path(${compname}_INCLUDE_DIR NAMES ${headername}
+        HINTS
+        ${PC_${comp}_INCLUDEDIR}
+        ${PC_${comp}_INCLUDE_DIRS}
+        )
+
+    find_library(${compname}_LIBRARY NAMES ${libname}
+        HINTS
+        ${PC_${comp}_LIBDIR}
+        ${PC_${comp}_LIBRARY_DIRS}
+        )
+
+    find_package_handle_standard_args(${comp}
+        FOUND_VAR ${comp}_FOUND
+        REQUIRED_VARS ${compname}_INCLUDE_DIR ${compname}_LIBRARY)
+    mark_as_advanced(${compname}_INCLUDE_DIR ${compname}_LIBRARY)
+
+    list(APPEND XCB_INCLUDE_DIRS ${${compname}_INCLUDE_DIR})
+    list(APPEND XCB_LIBRARIES ${${compname}_LIBRARY})
+
+    if(NOT ${comp}_FOUND)
+        set(XCB_FOUND false)
+    endif()
+endforeach()
+
+list(REMOVE_DUPLICATES XCB_INCLUDE_DIRS)
diff --git a/src/third_party/vulkan-loader/src/cmake/cmake_uninstall.cmake.in b/src/third_party/vulkan-loader/src/cmake/cmake_uninstall.cmake.in
new file mode 100644
index 0000000..2037e36
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/cmake/cmake_uninstall.cmake.in
@@ -0,0 +1,21 @@
+if(NOT EXISTS "@CMAKE_CURRENT_BINARY_DIR@/install_manifest.txt")
+  message(FATAL_ERROR "Cannot find install manifest: @CMAKE_CURRENT_BINARY_DIR@/install_manifest.txt")
+endif(NOT EXISTS "@CMAKE_CURRENT_BINARY_DIR@/install_manifest.txt")
+
+file(READ "@CMAKE_CURRENT_BINARY_DIR@/install_manifest.txt" files)
+string(REGEX REPLACE "\n" ";" files "${files}")
+foreach(file ${files})
+  message(STATUS "Uninstalling $ENV{DESTDIR}${file}")
+  if(IS_SYMLINK "$ENV{DESTDIR}${file}" OR EXISTS "$ENV{DESTDIR}${file}")
+    exec_program(
+      "@CMAKE_COMMAND@" ARGS "-E remove \"$ENV{DESTDIR}${file}\""
+      OUTPUT_VARIABLE rm_out
+      RETURN_VALUE rm_retval
+      )
+    if(NOT "${rm_retval}" STREQUAL 0)
+      message(FATAL_ERROR "Problem when removing $ENV{DESTDIR}${file}")
+    endif(NOT "${rm_retval}" STREQUAL 0)
+  else(IS_SYMLINK "$ENV{DESTDIR}${file}" OR EXISTS "$ENV{DESTDIR}${file}")
+    message(STATUS "File $ENV{DESTDIR}${file} does not exist.")
+  endif(IS_SYMLINK "$ENV{DESTDIR}${file}" OR EXISTS "$ENV{DESTDIR}${file}")
+endforeach(file)
diff --git a/src/third_party/vulkan-loader/src/external/CMakeLists.txt b/src/third_party/vulkan-loader/src/external/CMakeLists.txt
new file mode 100644
index 0000000..964d098
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/external/CMakeLists.txt
@@ -0,0 +1,39 @@
+# ~~~
+# Copyright (c) 2018 Valve Corporation
+# Copyright (c) 2018 LunarG, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ~~~
+
+# Add your optional dependencies in this "external" directory.
+
+# googletest is an optional external dependency for this repo.
+if(BUILD_TESTS)
+    # Attempt to enable if it is available.
+    if(TARGET gtest_main)
+        # Already enabled as a target (perhaps by a project enclosing this one)
+        message(STATUS "Vulkan-Loader/external: " "googletest already configured - using it")
+    elseif(IS_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}/googletest")
+        # The googletest directory exists, so enable it as a target.
+        message(STATUS "Vulkan-Loader/external: " "googletest found - configuring it for tests")
+        set(BUILD_GTEST ON CACHE BOOL "Builds the googletest subproject")
+        set(BUILD_GMOCK OFF CACHE BOOL "Builds the googlemock subproject")
+        set(gtest_force_shared_crt ON CACHE BOOL "Link gtest runtimes dynamically")
+        set(BUILD_SHARED_LIBS ON CACHE BOOL "Build shared libraries")
+        # EXCLUDE_FROM_ALL keeps the install target from installing GTEST files.
+        add_subdirectory("${CMAKE_CURRENT_SOURCE_DIR}/googletest" EXCLUDE_FROM_ALL)
+    else()
+        message(SEND_ERROR "Vulkan-Loader/external: " "Google Test was not found.  "
+                           "Provide Google Test in external/googletest or set BUILD_TESTS=OFF")
+    endif()
+endif()
diff --git a/src/third_party/vulkan-loader/src/external/README.md b/src/third_party/vulkan-loader/src/external/README.md
new file mode 100644
index 0000000..5b502ee
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/external/README.md
@@ -0,0 +1,9 @@
+# External dependencies
+
+This directory provides a location where external projects can be cloned that are used by the loader.
+Currently, the only project that can be used by the loader is Google Test.
+It can be enabled by cloning it here like:
+
+```
+git clone https://github.com/google/googletest.git
+```
diff --git a/src/third_party/vulkan-loader/src/loader/CMakeLists.txt b/src/third_party/vulkan-loader/src/loader/CMakeLists.txt
new file mode 100644
index 0000000..e129dc1
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/loader/CMakeLists.txt
@@ -0,0 +1,318 @@
+# ~~~
+# Copyright (c) 2014-2019 Valve Corporation
+# Copyright (c) 2014-2019 LunarG, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ~~~
+
+include_directories(${CMAKE_CURRENT_SOURCE_DIR} ${CMAKE_CURRENT_SOURCE_DIR}/generated ${CMAKE_CURRENT_BINARY_DIR})
+
+# Check for the existance of the secure_getenv or __secure_getenv commands
+include(CheckFunctionExists)
+include(CheckIncludeFile)
+
+check_function_exists(secure_getenv HAVE_SECURE_GETENV)
+check_function_exists(__secure_getenv HAVE___SECURE_GETENV)
+if(NOT (HAVE_SECURE_GETENV OR HAVE__SECURE_GETENV))
+    message(WARNING "Using non-secure environmental lookups. This loader will not properly disable environent variables when run with elevated permissions.")
+endif()
+configure_file(${CMAKE_CURRENT_SOURCE_DIR}/loader_cmake_config.h.in ${CMAKE_CURRENT_BINARY_DIR}/loader_cmake_config.h)
+
+if(WIN32)
+    set_property(DIRECTORY APPEND PROPERTY COMPILE_DEFINITIONS VK_USE_PLATFORM_WIN32_KHR WIN32_LEAN_AND_MEAN)
+    if(MSVC AND NOT MSVC_VERSION LESS 1900)
+        # Enable control flow guard
+        message(STATUS "Building loader with control flow guard")
+        set(MSVC_LOADER_COMPILE_OPTIONS ${MSVC_LOADER_COMPILE_OPTIONS} /guard:cf)
+        set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} /guard:cf")
+        set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} /guard:cf")
+    endif()
+elseif(ANDROID)
+    set_property(DIRECTORY APPEND PROPERTY COMPILE_DEFINITIONS VK_USE_PLATFORM_ANDROID_KHR)
+elseif(APPLE)
+    set_property(DIRECTORY APPEND PROPERTY COMPILE_DEFINITIONS VK_USE_PLATFORM_MACOS_MVK VK_USE_PLATFORM_METAL_EXT)
+elseif(UNIX AND NOT APPLE) # i.e.: Linux
+    if(BUILD_WSI_XCB_SUPPORT)
+    set_property(DIRECTORY APPEND PROPERTY COMPILE_DEFINITIONS VK_USE_PLATFORM_XCB_KHR)
+    endif()
+
+    if(BUILD_WSI_XLIB_SUPPORT)
+        set_property(DIRECTORY APPEND PROPERTY COMPILE_DEFINITIONS VK_USE_PLATFORM_XLIB_KHR VK_USE_PLATFORM_XLIB_XRANDR_EXT)
+    endif()
+
+    if(BUILD_WSI_WAYLAND_SUPPORT)
+        set_property(DIRECTORY APPEND PROPERTY COMPILE_DEFINITIONS VK_USE_PLATFORM_WAYLAND_KHR)
+    endif()
+else()
+    message(FATAL_ERROR "Unsupported Platform!")
+endif()
+
+# DEBUG enables runtime loader ICD verification
+set(CMAKE_C_FLAGS_DEBUG "${CMAKE_C_FLAGS_DEBUG} -DDEBUG")
+set(CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG} -DDEBUG")
+
+if(WIN32)
+    # Use static MSVCRT libraries
+    foreach(configuration
+            in
+            CMAKE_C_FLAGS_DEBUG
+            CMAKE_C_FLAGS_MINSIZEREL
+            CMAKE_C_FLAGS_RELEASE
+            CMAKE_C_FLAGS_RELWITHDEBINFO
+            CMAKE_CXX_FLAGS_DEBUG
+            CMAKE_CXX_FLAGS_MINSIZEREL
+            CMAKE_CXX_FLAGS_RELEASE
+            CMAKE_CXX_FLAGS_RELWITHDEBINFO)
+        if(${configuration} MATCHES "/MD")
+            string(REGEX
+                   REPLACE "/MD"
+                           "/MT"
+                           ${configuration}
+                           "${${configuration}}")
+        endif()
+    endforeach()
+
+    if(ENABLE_WIN10_ONECORE)
+        # Note: When linking your app or driver to OneCore.lib, be sure to remove any links to non-umbrella libs (such as
+        # kernel32.lib).
+        set(CMAKE_CXX_STANDARD_LIBRARIES " ") # space is intentional
+        set(CMAKE_C_STANDARD_LIBRARIES ${CMAKE_CXX_STANDARD_LIBRARIES})
+    endif()
+
+    set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -D_CRT_SECURE_NO_WARNINGS")
+    # ~~~
+    # Build dev_ext_trampoline.c with -O2 to allow tail-call optimization.
+    # Build other C files with normal options.
+    # Setup two CMake targets (loader-norm and loader-opt) for the different compilation flags.
+    # ~~~
+    separate_arguments(LOCAL_C_FLAGS_DBG WINDOWS_COMMAND ${CMAKE_C_FLAGS_DEBUG})
+    set(CMAKE_C_FLAGS_DEBUG " ")
+    separate_arguments(LOCAL_C_FLAGS_REL WINDOWS_COMMAND ${CMAKE_C_FLAGS_RELEASE})
+endif()
+
+set(NORMAL_LOADER_SRCS
+    extension_manual.c
+    loader.c
+    loader.h
+    vk_loader_platform.h
+    vk_loader_layer.h
+    trampoline.c
+    wsi.c
+    wsi.h
+    debug_utils.c
+    debug_utils.h
+    gpa_helper.h
+    cJSON.c
+    cJSON.h
+    murmurhash.c
+    murmurhash.h)
+
+if(WIN32)
+    set(NORMAL_LOADER_SRCS ${NORMAL_LOADER_SRCS} adapters.h)
+endif()
+
+set(OPT_LOADER_SRCS dev_ext_trampoline.c phys_dev_ext.c)
+
+# Check for assembler support
+set(ASM_FAILURE_MSG "The build will fall back on building with C code\n")
+set(ASM_FAILURE_MSG "${ASM_FAILURE_MSG}Note that this may be unsafe, as the C code requires tail-call optimizations to remove")
+set(ASM_FAILURE_MSG "${ASM_FAILURE_MSG} the stack frame for certain calls. If the compiler does not do this, then unknown device")
+set(ASM_FAILURE_MSG "${ASM_FAILURE_MSG} extensions will suffer from a corrupted stack.")
+if(WIN32)
+    enable_language(ASM_MASM)
+    if(CMAKE_ASM_MASM_COMPILER_WORKS)
+        if(NOT CMAKE_CL_64)
+            set(CMAKE_ASM_MASM_FLAGS ${CMAKE_ASM_MASM_FLAGS} /safeseh)
+        endif()
+
+        add_executable(asm_offset asm_offset.c)
+        target_link_libraries(asm_offset Vulkan::Headers)
+        add_custom_command(OUTPUT gen_defines.asm DEPENDS asm_offset COMMAND asm_offset MASM)
+        add_custom_target(loader_asm_gen_files DEPENDS gen_defines.asm)
+        set_target_properties(loader_asm_gen_files PROPERTIES FOLDER ${LOADER_HELPER_FOLDER})
+        add_library(loader-unknown-chain OBJECT unknown_ext_chain_masm.asm)
+        add_dependencies(loader-unknown-chain loader_asm_gen_files)
+    else()
+        message(WARNING "Could not find working MASM assebler\n${ASM_FAILURE_MSG}")
+        add_custom_target(loader_asm_gen_files)
+        add_library(loader-unknown-chain OBJECT unknown_ext_chain.c)
+        target_compile_options(loader-unknown-chain PUBLIC "$<$<CONFIG:DEBUG>:${LOCAL_C_FLAGS_REL}>")
+        target_compile_options(loader-unknown-chain PUBLIC ${MSVC_LOADER_COMPILE_OPTIONS})
+    endif()
+elseif(APPLE)
+    # For MacOS, use the C code and force the compiler's tail-call optimization instead of using assembly code.
+    set(OPT_LOADER_SRCS ${OPT_LOADER_SRCS} unknown_ext_chain.c)
+    set_source_files_properties(${OPT_LOADER_SRCS} PROPERTIES COMPILE_FLAGS -O)
+    add_custom_target(loader_asm_gen_files) # This causes no assembly files to be generated.
+else(UNIX AND NOT APPLE) # i.e.: Linux
+    enable_language(ASM)
+    set(CMAKE_ASM_FLAGS "${CMAKE_C_FLAGS}")
+
+    check_include_file("cet.h" HAVE_CET_H)
+    if(HAVE_CET_H)
+        set_property(DIRECTORY APPEND PROPERTY COMPILE_DEFINITIONS HAVE_CET_H)
+    endif()
+    set(CMAKE_TRY_COMPILE_TARGET_TYPE STATIC_LIBRARY)
+    try_compile(ASSEMBLER_WORKS ${CMAKE_CURRENT_BINARY_DIR} ${CMAKE_CURRENT_SOURCE_DIR}/asm_test.S)
+    if(ASSEMBLER_WORKS)
+        set(OPT_LOADER_SRCS ${OPT_LOADER_SRCS} unknown_ext_chain_gas.S)
+        add_executable(asm_offset asm_offset.c)
+        target_link_libraries(asm_offset Vulkan::Headers)
+        add_custom_command(OUTPUT gen_defines.asm DEPENDS asm_offset COMMAND asm_offset GAS)
+        add_custom_target(loader_asm_gen_files DEPENDS gen_defines.asm)
+    else()
+        message(WARNING "Could not find working x86 GAS assembler\n${ASM_FAILURE_MSG}")
+        set(OPT_LOADER_SRCS ${OPT_LOADER_SRCS} unknown_ext_chain.c)
+        add_custom_target(loader_asm_gen_files)
+    endif()
+endif()
+
+if(WIN32)
+    add_library(loader-norm OBJECT ${NORMAL_LOADER_SRCS} dirent_on_windows.c dxgi_loader.c)
+    target_compile_options(loader-norm PUBLIC "$<$<CONFIG:DEBUG>:${LOCAL_C_FLAGS_DBG}>")
+    target_compile_options(loader-norm PUBLIC ${MSVC_LOADER_COMPILE_OPTIONS})
+    target_include_directories(loader-norm PRIVATE "$<TARGET_PROPERTY:Vulkan::Headers,INTERFACE_INCLUDE_DIRECTORIES>")
+
+    add_library(loader-opt OBJECT ${OPT_LOADER_SRCS})
+    add_dependencies(loader-opt loader_asm_gen_files)
+    target_compile_options(loader-opt PUBLIC "$<$<CONFIG:DEBUG>:${LOCAL_C_FLAGS_REL}>")
+    target_compile_options(loader-opt PUBLIC ${MSVC_LOADER_COMPILE_OPTIONS})
+    target_include_directories(loader-opt PRIVATE "$<TARGET_PROPERTY:Vulkan::Headers,INTERFACE_INCLUDE_DIRECTORIES>")
+
+    if(NOT ENABLE_STATIC_LOADER)
+        target_compile_definitions(loader-norm PUBLIC LOADER_DYNAMIC_LIB)
+        target_compile_definitions(loader-opt PUBLIC LOADER_DYNAMIC_LIB)
+
+        add_library(vulkan
+                    SHARED
+                    $<TARGET_OBJECTS:loader-opt>
+                    $<TARGET_OBJECTS:loader-norm>
+                    $<TARGET_OBJECTS:loader-unknown-chain>
+                    ${CMAKE_CURRENT_SOURCE_DIR}/vulkan-1.def
+                    ${CMAKE_CURRENT_SOURCE_DIR}/loader.rc)
+        set_target_properties(vulkan
+                              PROPERTIES LINK_FLAGS_DEBUG
+                                         "/ignore:4098"
+                                         OUTPUT_NAME
+                                         vulkan-1)
+        target_link_libraries(vulkan Vulkan::Headers)
+    else()
+        add_library(vulkan
+                    STATIC
+                    $<TARGET_OBJECTS:loader-opt>
+                    $<TARGET_OBJECTS:loader-norm>
+                    $<TARGET_OBJECTS:loader-unknown-chain>)
+        set_target_properties(vulkan PROPERTIES OUTPUT_NAME VKstatic.1)
+    endif()
+
+    if(ENABLE_WIN10_ONECORE)
+        target_link_libraries(vulkan OneCoreUAP.lib LIBCMT.LIB LIBCMTD.LIB LIBVCRUNTIME.LIB LIBUCRT.LIB)
+        set_target_properties(vulkan PROPERTIES LINK_FLAGS "/NODEFAULTLIB")
+    else()
+        target_link_libraries(vulkan Cfgmgr32)
+    endif()
+
+    add_dependencies(vulkan loader_asm_gen_files)
+
+else()
+    # Linux and MacOS
+    set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -Wpointer-arith")
+
+    # Clang (and not gcc) warns about redefining a typedef with the same types, so disable that warning. Note that it will still
+    # throw an error if a typedef is redefined with a different type.
+    if(CMAKE_C_COMPILER_ID MATCHES "Clang")
+        set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -Wno-typedef-redefinition")
+    endif()
+
+    add_library(vulkan SHARED ${NORMAL_LOADER_SRCS} ${OPT_LOADER_SRCS})
+    add_dependencies(vulkan loader_asm_gen_files)
+    target_compile_definitions(vulkan PUBLIC LOADER_DYNAMIC_LIB)
+    set_target_properties(vulkan
+                          PROPERTIES SOVERSION
+                                     "1"
+                                     VERSION
+                                     "${VulkanHeaders_VERSION_MAJOR}.${VulkanHeaders_VERSION_MINOR}.${VulkanHeaders_VERSION_PATCH}")
+    target_link_libraries(vulkan ${CMAKE_DL_LIBS} pthread m)
+    target_link_libraries(vulkan Vulkan::Headers)
+
+    if(APPLE)
+        find_library(COREFOUNDATION_LIBRARY NAMES CoreFoundation)
+        target_link_libraries(vulkan "-framework CoreFoundation")
+
+        # Build vulkan.framework
+        set(FRAMEWORK_HEADERS
+            ${VulkanHeaders_INCLUDE_DIRS}/vulkan/vk_icd.h
+            ${VulkanHeaders_INCLUDE_DIRS}/vulkan/vk_layer.h
+            ${VulkanHeaders_INCLUDE_DIRS}/vulkan/vk_platform.h
+            ${VulkanHeaders_INCLUDE_DIRS}/vulkan/vk_sdk_platform.h
+            ${VulkanHeaders_INCLUDE_DIRS}/vulkan/vulkan_android.h
+            ${VulkanHeaders_INCLUDE_DIRS}/vulkan/vulkan_core.h
+            ${VulkanHeaders_INCLUDE_DIRS}/vulkan/vulkan_ios.h
+            ${VulkanHeaders_INCLUDE_DIRS}/vulkan/vulkan_macos.h
+            ${VulkanHeaders_INCLUDE_DIRS}/vulkan/vulkan_vi.h
+            ${VulkanHeaders_INCLUDE_DIRS}/vulkan/vulkan_wayland.h
+            ${VulkanHeaders_INCLUDE_DIRS}/vulkan/vulkan_win32.h
+            ${VulkanHeaders_INCLUDE_DIRS}/vulkan/vulkan_xcb.h
+            ${VulkanHeaders_INCLUDE_DIRS}/vulkan/vulkan_xlib.h
+            ${VulkanHeaders_INCLUDE_DIRS}/vulkan/vulkan_xlib_xrandr.h
+            ${VulkanHeaders_INCLUDE_DIRS}/vulkan/vulkan.h
+            ${VulkanHeaders_INCLUDE_DIRS}/vulkan/vulkan.hpp)
+        add_library(vulkan-framework SHARED ${NORMAL_LOADER_SRCS} ${OPT_LOADER_SRCS} ${FRAMEWORK_HEADERS})
+        add_dependencies(vulkan-framework loader_asm_gen_files)
+        target_compile_definitions(vulkan-framework PUBLIC LOADER_DYNAMIC_LIB)
+        target_link_libraries(vulkan-framework -ldl -lpthread -lm "-framework CoreFoundation")
+        target_link_libraries(vulkan-framework Vulkan::Headers)
+
+        # The FRAMEWORK_VERSION needs to be "A" here so that Xcode code-signing works when a user adds their framework to an Xcode
+        # project and does "Sign on Copy". It would have been nicer to use "1" to denote Vulkan 1. Although Apple docs say that a
+        # framework version does not have to be "A", this part of the Apple toolchain expects it.
+        # https://forums.developer.apple.com/thread/65963
+
+# cmake-format: off
+        set_target_properties(vulkan-framework PROPERTIES
+            OUTPUT_NAME vulkan
+            FRAMEWORK TRUE
+            FRAMEWORK_VERSION A
+            VERSION "${VulkanHeaders_VERSION_MAJOR}.${VulkanHeaders_VERSION_MINOR}.${VulkanHeaders_VERSION_PATCH}" # "current version"
+            SOVERSION "1.0.0"                        # "compatibility version"
+            MACOSX_FRAMEWORK_IDENTIFIER com.lunarg.vulkanFramework
+            PUBLIC_HEADER "${FRAMEWORK_HEADERS}"
+        )
+        install(TARGETS vulkan-framework
+            PUBLIC_HEADER DESTINATION vulkan
+            FRAMEWORK DESTINATION loader
+        )
+# cmake-format: on
+    endif()
+
+    if(NOT APPLE)
+        # Generate pkg-config file.
+        include(FindPkgConfig QUIET)
+        if(PKG_CONFIG_FOUND)
+            set(VK_API_VERSION "${VulkanHeaders_VERSION_MAJOR}.${VulkanHeaders_VERSION_MINOR}.${VulkanHeaders_VERSION_PATCH}")
+            foreach(LIB ${CMAKE_CXX_IMPLICIT_LINK_LIBRARIES} ${PLATFORM_LIBS})
+                set(PRIVATE_LIBS "${PRIVATE_LIBS} -l${LIB}")
+            endforeach()
+            configure_file("vulkan.pc.in" "vulkan.pc" @ONLY)
+            install(FILES "${CMAKE_CURRENT_BINARY_DIR}/vulkan.pc" DESTINATION "${CMAKE_INSTALL_LIBDIR}/pkgconfig")
+        endif()
+    endif()
+endif()
+
+target_link_libraries(vulkan Vulkan::Headers)
+add_library(Vulkan::Vulkan ALIAS vulkan)
+
+install(TARGETS vulkan
+        LIBRARY DESTINATION ${CMAKE_INSTALL_LIBDIR}
+        ARCHIVE DESTINATION ${CMAKE_INSTALL_LIBDIR}
+        RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR})
diff --git a/src/third_party/vulkan-loader/src/loader/LoaderAndLayerInterface.md b/src/third_party/vulkan-loader/src/loader/LoaderAndLayerInterface.md
new file mode 100644
index 0000000..52091f0
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/loader/LoaderAndLayerInterface.md
@@ -0,0 +1,2773 @@
+<!-- markdownlint-disable MD041 -->
+[![Khronos Vulkan][1]][2]
+
+[1]: https://vulkan.lunarg.com/img/Vulkan_100px_Dec16.png "https://www.khronos.org/vulkan/"
+[2]: https://www.khronos.org/vulkan/
+
+# Architecture of the Vulkan Loader Interfaces
+[![Creative Commons][3]][4]
+
+<!-- Copyright &copy; 2015-2019 LunarG, Inc. -->
+
+
+[3]: https://i.creativecommons.org/l/by-nd/4.0/88x31.png "Creative Commons License"
+[4]: https://creativecommons.org/licenses/by-nd/4.0/
+## Table of Contents
+  * [Overview](#overview)
+    * [Who Should Read This Document](#who-should-read-this-document)
+    * [The Loader](#the-loader)
+    * [Layers](#layers)
+    * [Installable Client Drivers](#installable-client-drivers)
+    * [Instance Versus Device](#instance-versus-device)
+    * [Dispatch Tables and Call Chains](#dispatch-tables-and-call-chains)
+
+  * [Application Interface to the Loader](#application-interface-to-the-loader)
+    * [Interfacing with Vulkan Functions](#interfacing-with-vulkan-functions)
+    * [Application Layer Usage](#application-layer-usage)
+    * [Application Usage of Extensions](#application-usage-of-extensions)
+
+  * [Loader and Layer Interface](#loader-and-layer-interface)
+    * [Layer Discovery](#layer-discovery)
+    * [Layer Version Negotiation](#layer-version-negotiation)
+    * [Layer Call Chains and Distributed Dispatch](#layer-call-chains-and-distributed-dispatch)
+    * [Layer Unknown Physical Device Extensions](#layer-unknown-physical-device-extensions)
+    * [Layer Intercept Requirements](#layer-intercept-requirements)
+    * [Distributed Dispatching Requirements](#distributed-dispatching-requirements)
+    * [Layer Conventions and Rules](#layer-conventions-and-rules)
+    * [Layer Dispatch Initialization](#layer-dispatch-initialization)
+    * [Example Code for CreateInstance](#example-code-for-createinstance)
+    * [Example Code for CreateDevice](#example-code-for-createdevice)
+    * [Meta-layers](#meta-layers)
+    * [Pre-Instance Functions](#pre-instance-functions)
+    * [Special Considerations](#special-considerations)
+    * [Layer Manifest File Format](#layer-manifest-file-format)
+    * [Layer Library Versions](#layer-library-versions)
+
+  * [Vulkan Installable Client Driver interface with the loader](#vulkan-installable-client-driver-interface-with-the-loader)
+    * [ICD Discovery](#icd-discovery)
+    * [ICD Manifest File Format](#icd-manifest-file-format)
+    * [ICD Vulkan Entry Point Discovery](#icd-vulkan-entry-point-discovery)
+    * [ICD API Version](#icd-api-version)
+    * [ICD Unknown Physical Device Extensions](#icd-unknown-physical-device-extensions)
+    * [ICD Dispatchable Object Creation](#icd-dispatchable-object-creation)
+    * [Handling KHR Surface Objects in WSI Extensions](#handling-khr-surface-objects-in-wsi-extensions)
+    * [Loader and ICD Interface Negotiation](#loader-and-icd-interface-negotiation)
+
+  * [Table of Debug Environment Variables](#table-of-debug-environment-variables)
+  * [Glossary of Terms](#glossary-of-terms)
+ 
+## Overview
+
+Vulkan is a layered architecture, made up of the following elements:
+  * The Vulkan Application
+  * [The Vulkan Loader](#the-loader)
+  * [Vulkan Layers](#layers)
+  * [Installable Client Drivers (ICDs)](#installable-client-drivers)
+
+![High Level View of Loader](./images/high_level_loader.png)
+
+The general concepts in this document are applicable to the loaders available
+for Windows, Linux, Android, and macOS systems.
+
+
+#### Who Should Read This Document
+
+While this document is primarily targeted at developers of Vulkan applications,
+drivers and layers, the information contained in it could be useful to anyone
+wanting a better understanding of the Vulkan runtime.
+
+
+#### The Loader
+
+The application sits on one end of, and interfaces directly with, the
+loader.  On the other end of the loader from the application are the ICDs, which
+control the Vulkan-capable hardware.  An important point to remember is that
+Vulkan-capable hardware can be graphics-based, compute-based, or both. Between
+the application and the ICDs the loader can inject a number of optional
+[layers](#layers) that provide special functionality.
+
+The loader is responsible for working with the various layers as well as
+supporting multiple GPUs and their drivers.  Any Vulkan function may
+wind up calling into a diverse set of modules: loader, layers, and ICDs.
+The loader is critical to managing the proper dispatching of Vulkan
+functions to the appropriate set of layers and ICDs. The Vulkan object
+model allows the loader to insert layers into a call chain so that the layers
+can process Vulkan functions prior to the ICD being called.
+
+This document is intended to provide an overview of the necessary interfaces
+between each of these.
+
+
+##### Goals of the Loader
+
+The loader was designed with the following goals in mind.
+ 1. Support one or more Vulkan-capable ICD on a user's computer system without
+them interfering with one another.
+ 2. Support Vulkan Layers which are optional modules that can be enabled by an
+application, developer, or standard system settings.
+ 3. Impact the overall performance of a Vulkan application in the lowest
+possible fashion.
+
+
+#### Layers
+
+Layers are optional components that augment the Vulkan system.  They can
+intercept, evaluate, and modify existing Vulkan functions on their way from the
+application down to the hardware.  Layers are implemented as libraries that can
+be enabled in different ways (including by application request) and are loaded
+during CreateInstance.  Each layer can choose to hook (intercept) any Vulkan
+functions which in turn can be ignored or augmented.  A layer does not need to
+intercept all Vulkan functions.  It may choose to intercept all known functions,
+or, it may choose to intercept only one function.
+
+Some examples of features that layers may expose include:
+ * Validating API usage
+ * Adding the ability to perform Vulkan API tracing and debugging
+ * Overlay additional content on the application's surfaces
+
+Because layers are optional, you may choose to enable layers for debugging
+your application, but then disable any layer usage when you release your
+product.
+
+
+#### Installable Client Drivers
+
+Vulkan allows multiple Installable Client Drivers (ICDs) each supporting one
+or more devices (represented by a Vulkan `VkPhysicalDevice` object) to be used
+collectively. The loader is responsible for discovering available Vulkan ICDs on
+the system. Given a list of available ICDs, the loader can enumerate all the
+physical devices available  for an application and return this information to
+the application.
+
+
+#### Instance Versus Device
+
+There is an important concept which you will see brought up repeatedly
+throughout this document.  Many functions, extensions, and other things in
+Vulkan are separated into two main groups:
+ * Instance-related Objects
+ * Device-related Objects
+
+
+##### Instance-related Objects
+
+A Vulkan Instance is a high-level construct used to provide Vulkan system-level
+information, or functionality.  Vulkan objects associated directly with an
+instance are:
+ * `VkInstance`
+ * `VkPhysicalDevice`
+
+An Instance function is any Vulkan function which takes as its first parameter
+either an object from the Instance list, or nothing at all.  Some Vulkan
+Instance functions are:
+ * `vkEnumerateInstanceExtensionProperties`
+ * `vkEnumeratePhysicalDevices`
+ * `vkCreateInstance`
+ * `vkDestroyInstance`
+
+You query Vulkan Instance functions using `vkGetInstanceProcAddr`.
+`vkGetInstanceProcAddr` can be used to query either device or instance entry
+points in addition to all core entry points.  The returned function pointer is
+valid for this Instance and any object created under this Instance (including
+all `VkDevice` objects).  
+
+Similarly, an Instance extension is a set of Vulkan Instance functions extending
+the Vulkan language.  These will be discussed in more detail later.
+
+
+##### Device-related Objects
+
+A Vulkan Device, on the other-hand, is a logical identifier used to associate
+functions with a particular physical device on a user's system.  Vulkan
+constructs associated directly with a device include:
+ * `VkDevice`
+ * `VkQueue`
+ * `VkCommandBuffer`
+ * Any dispatchable object that is a child of one of the above
+
+A Device function is any Vulkan function which takes any Device Object as its
+first parameter.  Some Vulkan Device functions are:
+ * `vkQueueSubmit`
+ * `vkBeginCommandBuffer`
+ * `vkCreateEvent`
+
+You can query Vulkan Device functions using either `vkGetInstanceProcAddr` or 
+`vkGetDeviceProcAddr`.  If you choose to use `vkGetInstanceProcAddr`, it will
+have an additional level built into the call chain, which will reduce
+performance slightly.  However, the function pointer returned can be used for
+any device created later, as long as it is associated with the same Vulkan
+Instance. If, instead you use `vkGetDeviceProcAddr`, the call chain will be more
+optimized to the specific device, but it will **only** work for the device used
+to query the function function pointer.  Also, unlike `vkGetInstanceProcAddr`,
+`vkGetDeviceProcAddr` can only be used on core Vulkan Device functions, or
+Device extension functions.
+
+The best solution is to query Instance extension functions using
+`vkGetInstanceProcAddr`, and to query Device extension functions using
+`vkGetDeviceProcAddr`.  See
+[Best Application Performance Setup](#best-application-performance-setup) for
+more information on this.
+
+As with Instance extensions, a Device extension is a set of Vulkan Device
+functions extending the Vulkan language. You can read more about these later in
+the document.
+
+
+#### Dispatch Tables and Call Chains
+
+Vulkan uses an object model to control the scope of a particular action or
+operation.  The object to be acted on is always the first parameter of a Vulkan
+call and is a dispatchable object (see Vulkan specification section 2.3 Object
+Model).  Under the covers, the dispatchable object handle is a pointer to a
+structure, which in turn, contains a pointer to a dispatch table maintained by
+the loader.  This dispatch table contains pointers to the Vulkan functions
+appropriate to that object.
+
+There are two types of dispatch tables the loader maintains:
+ - Instance Dispatch Table
+   - Created in the loader during the call to `vkCreateInstance`
+ - Device Dispatch Table
+   - Created in the loader during the call to `vkCreateDevice`
+
+At that time the application and the system can each specify optional layers to be
+included.  The loader will initialize the specified layers to create a call
+chain for each Vulkan function and each entry of the dispatch table will point
+to the first element of that chain. Thus, the loader builds an instance call
+chain for each `VkInstance` that is created and a device call chain for each
+`VkDevice` that is created.
+
+When an application calls a Vulkan function, this typically will first hit a
+*trampoline* function in the loader.  These *trampoline* functions are small,
+simple functions that jump to the appropriate dispatch table entry for the
+object they are given.  Additionally, for functions in the instance call chain,
+the loader has an additional function, called a *terminator*, which is called
+after all enabled layers to marshall the appropriate information to all
+available ICDs.
+
+
+##### Instance Call Chain Example
+
+For example, the diagram below represents what happens in the call chain for
+`vkCreateInstance`. After initializing the chain, the loader will call into the
+first layer's `vkCreateInstance` which will call the next finally terminating in
+the loader again where this function calls every ICD's `vkCreateInstance` and
+saves the results. This allows every enabled layer for this chain to set up
+what it needs based on the `VkInstanceCreateInfo` structure from the
+application.
+
+![Instance Call Chain](./images/loader_instance_chain.png)
+
+This also highlights some of the complexity the loader must manage when using
+instance call chains. As shown here, the loader's *terminator* must aggregate
+information to and from multiple ICDs when they are present. This implies that
+the loader has to be aware of any instance-level extensions which work on a
+`VkInstance` to aggregate them correctly.
+
+
+##### Device Call Chain Example
+
+Device call chains are created at `vkCreateDevice` and are generally simpler
+because they deal with only a single device and the ICD can always be the
+*terminator* of the chain. 
+
+![Loader Device Call Chain](./images/loader_device_chain_loader.png)
+
+
+<br/>
+<br/>
+
+## Application Interface to the Loader
+
+In this section we'll discuss how an application interacts with the loader,
+including:
+  * [Interfacing with Vulkan Functions](#interfacing-with-vulkan-functions)
+    * [Vulkan Direct Exports](#vulkan-direct-exports)
+    * [Directly Linking to the Loader](#directly-linking-to-the-loader)
+      * [Dynamic Linking](#dynamic-linking)
+      * [Static Linking](#static-linking)
+    * [Indirectly Linking to the Loader](#indirectly-linking-to-the-loader)
+    * [Best Application Performance Setup](#best-application-performance-setup)
+    * [ABI Versioning](#abi-versioning)
+  * [Application Layer Usage](#application-layer-usage)
+    * [Implicit vs Explicit Layers](#implicit-vs-explicit-layers)
+    * [Forcing Layer Source Folders](#forcing-layer-source-folders)
+    * [Forcing Layers to be Enabled](#forcing-layers-to-be-enabled)
+    * [Overall Layer Ordering](#overall-layer-ordering)
+  * [Application Usage of Extensions](#application-usage-of-extensions)
+    * [Instance and Device Extensions](#instance-and-device-extensions)
+    * [WSI Extensions](#wsi-extensions)
+    * [Unknown Extensions](#unknown-extensions)
+
+  
+#### Interfacing with Vulkan Functions
+There are several ways you can interface with Vulkan functions through the
+loader.
+
+
+##### Vulkan Direct Exports
+The loader library on Windows, Linux, Android, and macOS will export all core Vulkan
+and all appropriate Window System Interface (WSI) extensions. This is done to
+make it simpler to get started with Vulkan development. When an application
+links directly to the loader library in this way, the Vulkan calls are simple
+*trampoline* functions that jump to the appropriate dispatch table entry for the
+object they are given.
+
+
+##### Directly Linking to the Loader
+
+###### Dynamic Linking
+The loader is ordinarily distributed as a dynamic library (.dll on Windows or
+.so on Linux or .dylib on macOS) which gets installed to the system path
+for dynamic libraries.
+Linking to the dynamic library is generally the preferred method of linking to
+the loader, as doing so allows the loader to be updated for bug fixes and
+improvements. Furthermore, the dynamic library is generally installed to Windows
+systems as part of driver installation and is generally provided on Linux
+through the system package manager. This means that applications can usually
+expect a copy of the loader to be present on a system. If applications want to
+be completely sure that a loader is present, they can include a loader or
+runtime installer with their application.
+
+###### Static Linking
+The loader can also be used as a static library (this is shipped in the
+Windows SDK as `VKstatic.1.lib`). Linking to the static loader means that the
+user does not need to have a Vulkan runtime installed, and it also guarantees
+that your application will use a specific version of the loader. However, there
+are several downsides to this approach:
+
+  - The static library can never be updated without re-linking the application
+  - This opens up the possibility that two included libraries could contain
+  different versions of the loader
+    - This could potentially cause conflicts between the different loader versions
+
+As a result, it is recommended that users prefer linking to the dynamic
+versions of the loader.
+
+
+##### Indirectly Linking to the Loader
+Applications are not required to link directly to the loader library, instead
+they can use the appropriate platform-specific dynamic symbol lookup on the
+loader library to initialize the application's own dispatch table. This allows
+an application to fail gracefully if the loader cannot be found.  It also
+provides the fastest mechanism for the application to call Vulkan functions. An
+application will only need to query (via system calls such as `dlsym`) the
+address of `vkGetInstanceProcAddr` from the loader library. Using
+`vkGetInstanceProcAddr` the application can then discover the address of all
+functions and extensions available, such as `vkCreateInstance`,
+`vkEnumerateInstanceExtensionProperties` and
+`vkEnumerateInstanceLayerProperties` in a platform-independent way.
+
+
+##### Best Application Performance Setup
+
+If you desire the best performance possible, you should set up your own
+dispatch table so that all your Instance functions are queried using
+`vkGetInstanceProcAddr` and all your Device functions are queried using
+`vkGetDeviceProcAddr`.
+
+*Why should you do this?*
+
+The answer comes in how the call chain of Instance functions are implemented
+versus the call chain of a Device functions.  Remember, a [Vulkan Instance is a
+high-level construct used to provide Vulkan system-level 
+information](#instance-related-objects).
+Because of this, Instance functions need to be broadcast to
+every available ICD on the system.  The following diagram shows an approximate
+view of an Instance call chain with three enabled layers:
+
+![Instance Call Chain](./images/loader_instance_chain.png)
+
+This is also how a Vulkan Device function call chain looks if you query it
+using `vkGetInstanceProcAddr`.  On the other hand, a Device
+function doesn't need to worry about the broadcast because it knows specifically
+which associated ICD and which associated Physical Device the call should
+terminate at.  Because of this, the loader doesn't need to get involved between
+any enabled layers and the ICD.  Thus, if you used a loader-exported Vulkan
+Device function, the call chain in the same scenario as above would look like:
+
+![Loader Device Call Chain](./images/loader_device_chain_loader.png)
+
+An even better solution would be for an application to perform a
+`vkGetDeviceProcAddr` call on all Device functions.  This further optimizes the
+call chain by removing the loader all-together under most scenarios:
+
+![Application Device Call Chain](./images/loader_device_chain_app.png)
+
+Also, notice if no layers are enabled, your application function pointer would
+point **directly to the ICD**.  If called enough, those fewer calls can add up
+to performance savings.
+
+**NOTE:** There are some Device functions which still require the loader to
+intercept them with a *trampoline* and *terminator*. There are very few of
+these, but they are typically functions which the loader wraps with its own
+data.  In those cases, even the Device call chain will continue to look like the
+Instance call chain.  One example of a Device function requiring a *terminator*
+is `vkCreateSwapchainKHR`.  For that function, the loader needs to potentially
+convert the KHR_surface object into an ICD-specific KHR_surface object prior to
+passing down the rest of the function's information to the ICD.
+
+Remember:
+ * `vkGetInstanceProcAddr` can be used to query
+either device or instance entry points in addition to all core entry points.
+ * `vkGetDeviceProcAddr` can only be used to query for device
+extension or core device entry points.
+
+
+##### ABI Versioning
+
+The Vulkan loader library will be distributed in various ways including Vulkan
+SDKs, OS package distributions and Independent Hardware Vendor (IHV) driver
+packages. These details are beyond the scope of this document. However, the name
+and versioning of the Vulkan loader library is specified so an app can link to
+the correct Vulkan ABI library version. Vulkan versioning is such that ABI
+backward compatibility is guaranteed for all versions with the same major
+number (e.g. 1.0 and 1.1). On Windows, the loader library encodes the ABI
+version in its name such that multiple ABI incompatible versions of the loader
+can peacefully coexist on a given system. The Vulkan loader library file name is
+`vulkan-<ABI version>.dll`. For example, for Vulkan version 1.X on Windows the
+library filename is `vulkan-1.dll`. And this library file can typically be found
+in the `windows\system32` directory (on 64-bit Windows installs, the 32-bit
+version of the loader with the same name can be found in the `windows\sysWOW64`
+directory).
+
+For Linux and macOS, shared libraries are versioned based on a suffix. Thus, the ABI
+number is not encoded in the base of the library filename as on Windows. On
+Linux an application wanting to link to the latest Vulkan ABI version would
+just link to the name vulkan (`libvulkan.so`).  A specific Vulkan ABI version can
+also be linked to by applications (e.g. `libvulkan.so.1`).
+On macOS, the libraries are `libvulkan.dylib` and `libvulkan.1.dylib`.
+
+
+#### Application Layer Usage
+
+Applications desiring Vulkan functionality beyond what the core API offers may
+use various layers or extensions. A layer cannot introduce new Vulkan core API
+entry points to an application that are not exposed in Vulkan.h.  However,
+layers may offer extensions that introduce new Vulkan commands that can be
+queried through the extension interface.
+
+A common use of layers is for API validation which can be enabled by
+loading the layer during application development, but not loading the layer
+for application release. This eliminates the overhead of validating the
+application's usage of the API, something that wasn't available on some previous
+graphics APIs.
+
+To find out what layers are available to your application, use
+`vkEnumerateInstanceLayerProperties`.  This will report all layers
+that have been discovered by the loader.  The loader looks in various locations
+to find layers on the system.  For more information see the
+[Layer discovery](#layer-discovery) section below.
+
+To enable a layer, or layers, simply pass the name of the layers you wish to
+enable in the `ppEnabledLayerNames` field of the `VkInstanceCreateInfo` during
+a call to `vkCreateInstance`.  Once done, the layers you have enabled will be
+active for all Vulkan functions using the created `VkInstance`, and any of
+its child objects.
+
+**NOTE:** Layer ordering is important in several cases since some layers
+interact with each other.  Be careful when enabling layers as this may be
+the case.  See the [Overall Layer Ordering](#overall-layer-ordering) section
+for more information.
+
+The following code section shows how you would go about enabling the
+VK_LAYER_LUNARG_standard_validation layer.
+
+```
+   char *instance_validation_layers[] = {
+        "VK_LAYER_LUNARG_standard_validation"
+    };
+    const VkApplicationInfo app = {
+        .sType = VK_STRUCTURE_TYPE_APPLICATION_INFO,
+        .pNext = NULL,
+        .pApplicationName = "TEST_APP",
+        .applicationVersion = 0,
+        .pEngineName = "TEST_ENGINE",
+        .engineVersion = 0,
+        .apiVersion = VK_API_VERSION_1_0,
+    };
+    VkInstanceCreateInfo inst_info = {
+        .sType = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO,
+        .pNext = NULL,
+        .pApplicationInfo = &app,
+        .enabledLayerCount = 1,
+        .ppEnabledLayerNames = (const char *const *)instance_validation_layers,
+        .enabledExtensionCount = 0,
+        .ppEnabledExtensionNames = NULL,
+    };
+    err = vkCreateInstance(&inst_info, NULL, &demo->inst);
+```
+
+At `vkCreateInstance` and `vkCreateDevice`, the loader constructs call chains
+that include the application specified (enabled) layers.  Order is important in
+the `ppEnabledLayerNames` array; array element 0 is the topmost (closest to the
+application) layer inserted in the chain and the last array element is closest
+to the driver.  See the [Overall Layer Ordering](#overall-layer-ordering)
+section for more information on layer ordering.
+
+**NOTE:** *Device Layers Are Now Deprecated*
+> `vkCreateDevice` originally was able to select layers in a similar manner to
+`vkCreateInstance`.  This lead to the concept of "instance
+> layers" and "device layers".  It was decided by Khronos to deprecate the
+> "device layer" functionality and only consider "instance layers".
+> Therefore, `vkCreateDevice` will use the layers specified at
+`vkCreateInstance`.
+> Because of this, the following items have been deprecated:
+> * `VkDeviceCreateInfo` fields:
+>  * `ppEnabledLayerNames`
+>  * `enabledLayerCount`
+> * The `vkEnumerateDeviceLayerProperties` function
+
+
+##### Implicit vs Explicit Layers
+
+Explicit layers are layers which are enabled by an application (e.g. with the
+vkCreateInstance function), or by an environment variable (as mentioned
+previously).
+
+Implicit layers are those which are enabled by their existence. For example,
+certain application environments (e.g. Steam or an automotive infotainment
+system) may have layers which they always want enabled for all applications
+that they start. Other implicit layers may be for all applications started on a
+given system (e.g. layers that overlay frames-per-second). Implicit layers are
+enabled automatically, whereas explicit layers must be enabled explicitly.
+
+Implicit layers have an additional requirement over explicit layers in that they
+require being able to be disabled by an environmental variable.  This is due
+to the fact that they are not visible to the application and could cause issues.
+A good principle to keep in mind would be to define both an enable and disable
+environment variable so the users can deterministically enable the functionality.
+On Desktop platforms (Windows, Linux, and macOS), these enable/disable settings are
+defined in the layer's JSON file.
+
+Discovery of system-installed implicit and explicit layers is described later in
+the [Layer Discovery Section](#layer-discovery).  For now, simply know that what
+distinguishes a layer as implicit or explicit is dependent on the Operating
+system, as shown in the table below.
+
+| Operating System | Implicit Layer Identification |
+|----------------|--------------------|
+| Windows  | Implicit layers are located in a different Windows registry location than explicit layers. |
+| Linux | Implicit layers are located in a different directory location than explicit layers. |
+| Android | There is **No Support For Implicit Layers** on Android. |
+| macOS | Implicit layers are located in a different directory location than explicit layers. |
+
+
+##### Forcing Layer Source Folders
+
+Developers may need to use special, pre-production layers, without modifying the
+system-installed layers. You can direct the loader to look for layers in a
+specific folder by defining the "VK\_LAYER\_PATH" environment variable.  This
+will override the mechanism used for finding system-installed layers. Because
+layers of interest may exist in several distinct folders on a system, this
+environment variable can contains several paths separated by the operating
+specific path separator.  On Windows, each separate folder should be separated
+in the list using a semi-colon.  On Linux and macOS, each folder name should be separated
+using a colon.
+
+If "VK\_LAYER\_PATH" exists, **only** the folders listed in it will be scanned
+for layers.  Each directory listed should be the full pathname of a folder
+containing layer manifest files.
+
+
+##### Forcing Layers to be Enabled on Windows, Linux and macOS
+
+Developers may want to enable layers that are not enabled by the given
+application they are using. On desktop systems, the environment variable
+"VK\_INSTANCE\_LAYERS" can be used to enable additional layers which are
+not specified (enabled) by the application at `vkCreateInstance`.
+"VK\_INSTANCE\_LAYERS" is a colon (Linux and macOS)/semi-colon (Windows) separated
+list of layer names to enable. Order is relevant with the first layer in the
+list being the top-most layer (closest to the application) and the last
+layer in the list being the bottom-most layer (closest to the driver).
+See the [Overall Layer Ordering](#overall-layer-ordering) section
+for more information.
+
+Application specified layers and user specified layers (via environment
+variables) are aggregated and duplicates removed by the loader when enabling
+layers. Layers specified via environment variable are top-most (closest to the
+application) while layers specified by the application are bottom-most.
+
+An example of using these environment variables to activate the validation
+layer `VK_LAYER_LUNARG_parameter_validation` on Linux or macOS is as follows:
+
+```
+> $ export VK_INSTANCE_LAYERS=VK_LAYER_LUNARG_parameter_validation
+```
+
+
+##### Overall Layer Ordering
+
+The overall ordering of all layers by the loader based on the above looks
+as follows:
+
+![Loader Layer Ordering](./images/loader_layer_order.png)
+
+Ordering may also be important internal to the list of explicit layers.
+Some layers may be dependent on other behavior being implemented before
+or after the loader calls it.  For example: the VK_LAYER_LUNARG_core_validation
+layer expects the VK_LAYER_LUNARG_parameter_validation layer to be called first.
+This is because the VK_LAYER_LUNARG_parameter_validation layer will filter out any
+invalid `NULL` pointer calls prior to the rest of the validation checking
+done by the VK_LAYER_LUNARG_core_validation layer.  If not done properly, you may see
+crashes in the VK_LAYER_LUNARG_core_validation layer that would otherwise be
+avoided.
+
+
+#### Application Usage of Extensions
+
+Extensions are optional functionality provided by a layer, the loader or an
+ICD. Extensions can modify the behavior of the Vulkan API and need to be
+specified and registered with Khronos.  These extensions can be created
+by an Independent Hardware Vendor (IHV) to expose new hardware functionality,
+or by a layer writer to expose some internal feature, or by the loader to
+improve functional behavior.  Information about various extensions can be
+found in the Vulkan Spec, and vulkan.h header file.
+
+
+##### Instance and Device Extensions
+
+As hinted at in the [Instance Versus Device](#instance-versus-device) section,
+there are really two types of extensions:
+ * Instance Extensions
+ * Device Extensions
+
+An Instance extension is an extension which modifies existing behavior or
+implements new behavior on instance-level objects, like a `VkInstance` or
+a `VkPhysicalDevice`.  A Device extension is an extension which does the same,
+but for any `VkDevice` object, or any dispatchable object that is a child of a
+`VkDevice` (`VkQueue` and `VkCommandBuffer` are examples of these).
+
+It is **very** important to know what type of extension you are desiring to
+enable as you will enable Instance extensions during `vkCreateInstance` and
+Device extensions during `vkCreateDevice`.
+
+The loader discovers and aggregates all
+extensions from layers (both explicit and implicit), ICDs and the loader before
+reporting them to the application in `vkEnumerateXXXExtensionProperties`
+(where XXX is either "Instance" or "Device").
+ - Instance extensions are discovered via
+`vkEnumerateInstanceExtensionProperties`
+ - Device extensions are be discovered via
+`vkEnumerateDeviceExtensionProperties`
+
+Looking at `vulkan.h`, you'll notice that they are both similar.  For example,
+the `vkEnumerateInstanceExtensionProperties` prototype looks as follows:
+
+```
+   VkResult
+   vkEnumerateInstanceExtensionProperties(const char *pLayerName,
+                                          uint32_t *pPropertyCount,
+                                          VkExtensionProperties *pProperties);
+```
+
+The "pLayerName" parameter in these functions is used to select either a single
+layer or the Vulkan platform implementation. If "pLayerName" is NULL, extensions
+from Vulkan implementation components (including loader, implicit layers, and
+ICDs) are enumerated. If "pLayerName" is equal to a discovered layer module name
+then only extensions from that layer (which may be implicit or explicit) are
+enumerated. Duplicate extensions (e.g. an implicit layer and ICD might report
+support for the same extension) are eliminated by the loader. For duplicates,
+the ICD version is reported and the layer version is culled.
+
+Also, Extensions *must be enabled* (in `vkCreateInstance` or `vkCreateDevice`)
+before the functions associated with the extensions can be used.  If you get an
+Extension function using either `vkGetInstanceProcAddr` or
+`vkGetDeviceProcAddr`, but fail to enable it, you could experience undefined
+behavior.  This should actually be flagged if you run with Validation layers
+enabled.
+
+
+##### WSI Extensions
+
+Khronos-approved WSI extensions are available and provide Windows System
+Integration support for various execution environments. It is important to
+understand that some WSI extensions are valid for all targets, but others are
+particular to a given execution environment (and loader). This desktop loader
+(currently targeting Windows, Linux, and macOS) only enables and directly exports those
+WSI extensions that are appropriate to the current environment. For the most
+part, the selection is done in the loader using compile-time preprocessor flags.
+All versions of the desktop loader currently expose at least the following WSI
+extension support:
+- VK_KHR_surface
+- VK_KHR_swapchain
+- VK_KHR_display
+
+In addition, each of the following OS targets for the loader support 
+target-specific extensions:
+
+| Windowing System | Extensions available |
+|----------------|--------------------|
+| Windows  | VK_KHR_win32_surface |
+| Linux (Wayland) | VK_KHR_wayland_surface |
+| Linux (X11) |  VK_KHR_xcb_surface and VK_KHR_xlib_surface |
+| macOS (MoltenVK) | VK_MVK_macos_surface |
+
+It is important to understand that while the loader may support the various
+entry points for these extensions, there is a handshake required to actually
+use them:
+* At least one physical device must support the extension(s)
+* The application must select such a physical device
+* The application must request the extension(s) be enabled while creating the
+instance or logical device (this depends on whether or not the given extension
+works with an instance or a device)
+* The instance and/or logical device creation must succeed
+
+Only then can you expect to properly use a WSI extension in your Vulkan program.
+
+
+##### Unknown Extensions
+
+With the ability to expand Vulkan so easily, extensions will be created that the
+loader knows nothing about.  If the extension is a device extension, the loader
+will pass the unknown entry point down the device call chain ending with the
+appropriate ICD entry points.  The same thing will happen if the extension is
+an instance extension which takes a physical device parameter as its first
+component.  However, for all other instance extensions the loader will fail to
+load it.
+
+*But why doesn't the loader support unknown instance extensions?*
+<br/>
+Let's look again at the Instance call chain:
+
+![Instance call chain](./images/loader_instance_chain.png)
+
+Notice that for a normal instance function call, the loader has to handle
+passing along the function call to the available ICDs.  If the loader has no
+idea of the parameters or return value of the instance call, it can't properly
+pass information along to the ICDs.  There may be ways to do this, which will be
+explored in the future.  However, for now, this loader does not support
+instance extensions which don't take a physical device as their first parameter.
+
+Because the device call-chain does not normally pass through the loader
+*terminator*, this is not a problem for device extensions.  Additionally,
+since a physical device is associated with one ICD, we can use a generic
+*terminator* pointing to one ICD.  This is because both of these extensions
+terminate directly in the ICD they are associated with.
+
+*Is this a big problem?*
+<br/>
+No!  Most extension functionality only affects either a physical or logical
+device and not an instance.  Thus, the overwhelming majority of extensions
+should be supported with direct loader support.
+
+##### Filtering Out Unknown Instance Extension Names
+In some cases, an ICD may support instance extensions that the loader does not.
+For the above reasons, the loader will filter out the names of these unknown instance
+extensions when an application calls `vkEnumerateInstanceExtensionProperties`.
+Additionally, this behavior will cause the loader to throw an error during
+`vkCreateInstance` if you still attempt to use one of these extensions.  The intent is
+to protect applications so that they don't inadvertently use functionality
+which could lead to a crash.  
+
+On the other hand, if you know you can safely use the extension, you may disable
+the filtering by defining the environment variable `VK_LOADER_DISABLE_INST_EXT_FILTER`
+and setting the value to a non-zero number.  This will effectively disable the
+loader's filtering of instance extension names.
+
+<br/>
+<br/>
+
+## Loader and Layer Interface
+
+In this section we'll discuss how the loader interacts with layers, including:
+  * [Layer Discovery](#layer-discovery)
+    * [Layer Manifest File Usage](#layer-manifest-file-usage)
+    * [Android Layer Discovery](#android-layer-discovery)
+    * [Windows Layer Discovery](#windows-layer-discovery)
+    * [Linux Layer Discovery](#linux-layer-discovery)
+    * [macOS Layer Discovery](#macos-layer-discovery)
+  * [Layer Version Negotiation](#layer-version-negotiation)
+  * [Layer Call Chains and Distributed Dispatch](#layer-call-chains-and-distributed-dispatch)
+  * [Layer Unknown Physical Device Extensions](#layer-unknown-physical-device-extensions)
+  * [Layer Intercept Requirements](#layer-intercept-requirements)
+  * [Distributed Dispatching Requirements](#distributed-dispatching-requirements)
+  * [Layer Conventions and Rules](#layer-conventions-and-rules)
+  * [Layer Dispatch Initialization](#layer-dispatch-initialization)
+  * [Example Code for CreateInstance](#example-code-for-createinstance)
+  * [Example Code for CreateDevice](#example-code-for-createdevice)
+  * [Meta-layers](#meta-layers)
+  * [Pre-Instance Functions](#pre-instance-functions)
+  * [Special Considerations](#special-considerations)
+    * [Associating Private Data with Vulkan Objects Within a Layer](#associating-private-data-with-vulkan-objects-within-a-layer)
+      * [Wrapping](#wrapping)
+      * [Hash Maps](#hash-maps)
+    * [Creating New Dispatchable Objects](#creating-new-dispatchable-objects)
+  * [Layer Manifest File Format](#layer-manifest-file-format)
+    * [Layer Manifest File Version History](#layer-manifest-file-version-history)
+  * [Layer Library Versions](#layer-library-versions)
+    * [Layer Library API Version 2](#layer-library-api-version-2)
+    * [Layer Library API Version 1](#layer-library-api-version-1)
+    * [Layer Library API Version 0](#layer-library-api-version-0)
+  
+
+ 
+#### Layer Discovery
+
+As mentioned in the
+[Application Interface section](#implicit-vs-explicit-layers),
+layers can be categorized into two categories:
+ * Implicit Layers
+ * Explicit Layers
+
+The main difference between the two is that implicit layers are automatically
+enabled, unless overridden, and explicit layers must be enabled.  Remember,
+implicit layers are not present on all Operating Systems (like Android).
+
+On any system, the loader looks in specific areas for information on the
+layers that it can load at a user's request.  The process of finding the
+available layers on a system is known as Layer Discovery.  During discovery,
+the loader determines what layers are available, the layer name, the layer
+version, and any extensions supported by the layer.  This information is
+provided back to an application through `vkEnumerateInstanceLayerProperties`.
+
+The group of layers available to the loader is known as a layer library.  This
+section defines an extensible interface to discover what layers are contained in
+the layer library.
+
+This section also specifies the minimal conventions and rules a layer must
+follow, especially with regards to interacting with the loader and other layers.
+
+##### Layer Manifest File Usage
+
+On Windows, Linux, and macOS systems, JSON-formatted manifest files are used to store
+layer information.  In order to find system-installed layers, the Vulkan loader
+will read the JSON files to identify the names and attributes of layers and
+their extensions. The use of manifest files allows the loader to avoid loading
+any shared library files when the application does not query nor request any
+extensions.  The format of [Layer Manifest File](#layer-manifest-file-format)
+is detailed below.
+
+The Android loader does not use manifest files.  Instead, the loader queries the
+layer properties using special functions known as "introspection" functions.
+The intent of these functions is to determine the same required information
+gathered from reading the manifest files.  These introspection functions are
+not used by the desktop loader but should be present in layers to maintain
+consistency.  The specific "introspection" functions are called out in
+the [Layer Manifest File Format](#layer-manifest-file-format) table.
+
+
+##### Android Layer Discovery
+
+On Android, the loader looks for layers to enumerate in the
+`/data/local/debug/vulkan` folder.  An application enabled for debug has the
+ability to enumerate and enable any layers in that location.
+
+
+##### Windows Layer Discovery
+
+In order to find system-installed layers, the Vulkan loader will scan the
+values in the following Windows registry keys:
+
+```
+   HKEY_LOCAL_MACHINE\SOFTWARE\Khronos\Vulkan\ExplicitLayers
+   HKEY_CURRENT_USER\SOFTWARE\Khronos\Vulkan\ExplicitLayers
+   HKEY_LOCAL_MACHINE\SOFTWARE\Khronos\Vulkan\ImplicitLayers
+   HKEY_CURRENT_USER\SOFTWARE\Khronos\Vulkan\ImplicitLayers
+```
+
+For each value in these keys which has DWORD data set to 0, the loader opens
+the JSON manifest file specified by the name of the value. Each name must be a
+full pathname to the manifest file. Additionally, the `HKEY_CURRENT_USER` locations
+will only be searched if an application does not have administrative privileges.
+This is done to ensure that an application with administrative privileges does not
+run layers that did not need administrator access to install.
+
+Additionally, the loader will scan through registry keys specific to Display
+Adapters and all Software Components associated with these adapters for the
+locations of JSON manifest files. These keys are located in device keys
+created during driver installation and contain configuration information
+for base settings, including Vulkan, OpenGL, and Direct3D ICD location.
+
+The Device Adapter and Software Component key paths should be obtained through the PnP
+Configuration Manager API. The `000X` key will be a numbered key, where each
+device is assigned a different number.
+
+```
+   HKEY_LOCAL_MACHINE\System\CurrentControlSet\Control\Class\{Adapter GUID}\000X\VulkanExplicitLayers
+   HKEY_LOCAL_MACHINE\System\CurrentControlSet\Control\Class\{Adapter GUID}\000X\VulkanImplicitLayers
+   HKEY_LOCAL_MACHINE\System\CurrentControlSet\Control\Class\{Software Component GUID}\000X\VulkanExplicitLayers
+   HKEY_LOCAL_MACHINE\System\CurrentControlSet\Control\Class\{Software Component GUID}\000X\VulkanImplicitLayers
+```
+
+In addition, on 64-bit systems there may be another set of registry values, listed
+below. These values record the locations of 32-bit layers on 64-bit operating systems,
+in the same way as the Windows-on-Windows functionality.
+
+```
+   HKEY_LOCAL_MACHINE\System\CurrentControlSet\Control\Class\{Adapter GUID}\000X\VulkanExplicitLayersWow
+   HKEY_LOCAL_MACHINE\System\CurrentControlSet\Control\Class\{Adapter GUID}\000X\VulkanImplicitLayersWow
+   HKEY_LOCAL_MACHINE\System\CurrentControlSet\Control\Class\{Software Component GUID}\000X\VulkanExplicitLayersWow
+   HKEY_LOCAL_MACHINE\System\CurrentControlSet\Control\Class\{Software Component GUID}\000X\VulkanImplicitLayersWow
+```
+
+If any of the above values exist and is of type `REG_SZ`, the loader will open the JSON
+manifest file specified by the key value. Each value must be a full absolute
+path to a JSON manifest file. A key value may also be of type `REG_MULTI_SZ`, in
+which case the value will be interpreted as a list of paths to JSON manifest files.
+
+In general, applications should install layers into the `SOFTWARE\Khronos\Vulkan`
+paths. The PnP registry locations are intended specifically for layers that are
+distributed as part of a driver installation. An application installer should not
+modify the device-specific registries, while a device driver should not modify
+the system registries.
+
+The Vulkan loader will open each manifest file that is given
+to obtain information about the layer, including the name or pathname of a
+shared library (".dll") file.  However, if VK\_LAYER\_PATH is defined, then the
+loader will instead look at the paths defined by that variable instead of using
+the information provided by these registry keys.  See
+[Forcing Layer Source Folders](#forcing-layer-source-folders) for more
+information on this.
+
+
+##### Linux Layer Discovery
+
+On Linux, the Vulkan loader will scan the files in the following Linux
+directories:
+
+    /usr/local/etc/vulkan/explicit_layer.d
+    /usr/local/etc/vulkan/implicit_layer.d
+    /usr/local/share/vulkan/explicit_layer.d
+    /usr/local/share/vulkan/implicit_layer.d
+    /etc/vulkan/explicit_layer.d
+    /etc/vulkan/implicit_layer.d
+    /usr/share/vulkan/explicit_layer.d
+    /usr/share/vulkan/implicit_layer.d
+    $HOME/.local/share/vulkan/explicit_layer.d
+    $HOME/.local/share/vulkan/implicit_layer.d
+
+Of course, there are some things you have to know about the above folders:
+ 1. The "/usr/local/*" directories can be configured to be other directories at
+build time.
+ 2. $HOME is the current home directory of the application's user id; this path
+will be ignored for suid programs.
+ 3. The "/usr/local/etc/vulkan/\*\_layer.d" and
+"/usr/local/share/vulkan/\*\_layer.d" directories are for layers that are
+installed from locally-built sources.
+ 4. The "/usr/share/vulkan/\*\_layer.d" directories are for layers that are
+installed from Linux-distribution-provided packages.
+5. The locations in `$HOME` will only be searched if an application does not have
+root access. This is done to ensure that an application with root access does not
+run layers that did not need root access to install.
+
+As on Windows, if VK\_LAYER\_PATH is defined, then the
+loader will instead look at the paths defined by that variable instead of using
+the information provided by these default paths.  However, these
+environment variables are only used for non-suid programs.  See
+[Forcing Layer Source Folders](#forcing-layer-source-folders) for more
+information on this.
+
+##### macOS Layer Discovery
+
+On macOS, the Vulkan loader will scan the files in the following directories:
+
+    <bundle>/Contents/Resources/vulkan/explicit_layer.d
+    <bundle>/Contents/Resources/vulkan/implicit_layer.d
+    /etc/vulkan/explicit_layer.d
+    /etc/vulkan/implicit_layer.d
+    /usr/local/share/vulkan/explicit_layer.d
+    /usr/local/share/vulkan/implicit_layer.d
+    /usr/share/vulkan/explicit_layer.d
+    /usr/share/vulkan/implicit_layer.d
+    $HOME/.local/share/vulkan/explicit_layer.d
+    $HOME/.local/share/vulkan/implicit_layer.d
+
+1. &lt;bundle&gt; is the directory containing a bundled application.  It is scanned first.
+2. The "/usr/local/\*" directories can be configured to be other directories at
+build time.
+3. $HOME is the current home directory of the application's user id; this path
+will be ignored for suid programs.
+4. The locations in `$HOME` will only be searched if an application does not have
+root access. This is done to ensure that an application with root access does not
+run layers that did not need root access to install.
+
+As on Windows, if VK\_LAYER\_PATH is defined, then the
+loader will instead look at the paths defined by that variable instead of using
+the information provided by these default paths.  However, these
+environment variables are only used for non-suid programs.  See
+[Forcing Layer Source Folders](#forcing-layer-source-folders) for more
+information on this.
+
+#### Layer Version Negotiation
+
+Now that a layer has been discovered, an application can choose to load it (or
+it is loaded by default if it is an Implicit layer).  When the loader attempts
+to load the layer, the first thing it does is attempt to negotiate the version
+of the loader to layer interface.  In order to negotiate the loader/layer
+interface version, the layer must implement the
+`vkNegotiateLoaderLayerInterfaceVersion` function.  The following information is
+provided for this interface in include/vulkan/vk_layer.h:
+
+```cpp
+  typedef enum VkNegotiateLayerStructType {
+      LAYER_NEGOTIATE_INTERFACE_STRUCT = 1,
+  } VkNegotiateLayerStructType;
+
+  typedef struct VkNegotiateLayerInterface {
+      VkNegotiateLayerStructType sType;
+      void *pNext;
+      uint32_t loaderLayerInterfaceVersion;
+      PFN_vkGetInstanceProcAddr pfnGetInstanceProcAddr;
+      PFN_vkGetDeviceProcAddr pfnGetDeviceProcAddr;
+      PFN_GetPhysicalDeviceProcAddr pfnGetPhysicalDeviceProcAddr;
+  } VkNegotiateLayerInterface;
+
+  VkResult vkNegotiateLoaderLayerInterfaceVersion(
+                   VkNegotiateLayerInterface *pVersionStruct);
+```
+
+You'll notice the `VkNegotiateLayerInterface` structure is similar to other
+Vulkan structures.  The "sType" field, in this case takes a new enum defined
+just for internal loader/layer interfacing use.  The valid values for "sType"
+could grow in the future, but right now only has the one value
+"LAYER_NEGOTIATE_INTERFACE_STRUCT".
+
+This function (`vkNegotiateLoaderLayerInterfaceVersion`) should be exported by
+the layer so that using "GetProcAddress" on Windows or "dlsym" on Linux or macOS, should
+return a valid function pointer to it.  Once the loader has grabbed a valid
+address to the layers function, the loader will create a variable of type
+`VkNegotiateLayerInterface` and initialize it in the following ways:
+ 1. Set the structure "sType" to "LAYER_NEGOTIATE_INTERFACE_STRUCT"
+ 2. Set pNext to NULL.
+     - This is for future growth
+ 3. Set "loaderLayerInterfaceVersion" to the current version the loader desires
+to set the interface to.
+      - The minimum value sent by the loader will be 2 since it is the first
+version supporting this function.
+
+The loader will then individually call each layer’s
+`vkNegotiateLoaderLayerInterfaceVersion` function with the filled out
+“VkNegotiateLayerInterface”. The layer will either accept the loader's version
+set in "loaderLayerInterfaceVersion", or modify it to the closest value version
+of the interface that the layer can support.  The value should not be higher
+than the version requested by the loader.  If the layer can't support at a
+minimum the version requested, then the layer should return an error like
+"VK_ERROR_INITIALIZATION_FAILED".  If a layer can support some version, then
+the layer should do the following:
+ 1. Adjust the version to the layer's desired version.
+ 2. The layer should fill in the function pointer values to its internal
+functions:
+    - "pfnGetInstanceProcAddr" should be set to the layer’s internal
+`GetInstanceProcAddr` function.
+    - "pfnGetDeviceProcAddr" should be set to the layer’s internal
+`GetDeviceProcAddr` function.
+    - "pfnGetPhysicalDeviceProcAddr" should be set to the layer’s internal
+`GetPhysicalDeviceProcAddr` function.
+      - If the layer supports no physical device extensions, it may set the
+value to NULL.
+      - More on this function later
+ 3. The layer should return "VK_SUCCESS"
+
+This function **SHOULD NOT CALL DOWN** the layer chain to the next layer.
+The loader will work with each layer individually.
+
+If the layer supports the new interface and reports version 2 or greater, then
+the loader will use the “fpGetInstanceProcAddr” and “fpGetDeviceProcAddr”
+functions from the “VkNegotiateLayerInterface” structure.  Prior to these
+changes, the loader would query each of those functions using "GetProcAddress"
+on Windows or "dlsym" on Linux or macOS.
+
+
+#### Layer Call Chains and Distributed Dispatch
+
+There are two key architectural features that drive the loader to layer library
+interface:
+ 1. Separate and distinct instance and device call chains
+ 2. Distributed dispatch.
+
+You can read an overview of dispatch tables and call chains above in the
+[Dispatch Tables and Call Chains](#dispatch-tables-and-call-chains) section.
+
+What's important to note here is that a layer can intercept Vulkan
+instance functions, device functions or both. For a layer to intercept instance
+functions, it must participate in the instance call chain.  For a layer to
+intercept device functions, it must participate in the device call chain.
+
+Remember, a layer does not need to intercept all instance or device functions,
+instead, it can choose to intercept only a subset of those functions.
+
+Normally, when a layer intercepts a given Vulkan function, it will call down the
+instance or device call chain as needed. The loader and all layer libraries that
+participate in a call chain cooperate to ensure the correct sequencing of calls
+from one entity to the next. This group effort for call chain sequencing is
+hereinafter referred to as **distributed dispatch**.
+
+In distributed dispatch each layer is responsible for properly calling the next
+entity in the call chain. This means that a dispatch mechanism is required for
+all Vulkan functions that a layer intercepts. If a Vulkan function is not
+intercepted by a layer, or if a layer chooses to terminate the function by not
+calling down the chain, then no dispatch is needed for that particular function.
+
+For example, if the enabled layers intercepted only certain instance functions,
+the call chain would look as follows:
+![Instance Function Chain](./images/function_instance_chain.png)
+
+Likewise, if the enabled layers intercepted only a few of the device functions,
+the call chain could look this way:
+![Device Function Chain](./images/function_device_chain.png)
+
+The loader is responsible for dispatching all core and instance extension Vulkan
+functions to the first entity in the call chain.
+
+
+#### Layer Unknown Physical Device Extensions
+
+Originally, if the loader was called with `vkGetInstanceProcAddr`, it would
+result in the following behavior:
+ 1. The loader would check if core function:
+    - If it was, it would return the function pointer
+ 2. The loader would check if known extension function:
+    - If it was, it would return the function pointer
+ 3. If the loader knew nothing about it, it would call down using
+`GetInstanceProcAddr`
+    - If it returned non-NULL, treat it as an unknown logical device command.
+    - This meant setting up a generic trampoline function that takes in a
+VkDevice as the first parameter and adjusting the dispatch table to call the
+ICD/Layers function after getting the dispatch table from the VkDevice.
+ 4. If all the above failed, the loader would return NULL to the application.
+
+This caused problems when a layer attempted to expose new physical device
+extensions the loader knew nothing about, but an application did.  Because the
+loader knew nothing about it, the loader would get to step 3 in the above
+process and would treat the function as an unknown logical device command.  The
+problem is, this would create a generic VkDevice trampoline function which, on
+the first call, would attempt to dereference the VkPhysicalDevice as a VkDevice.
+This would lead to a crash or corruption.
+
+In order to identify the extension entry points specific to physical device
+extensions, the following function can be added to a layer:
+
+```cpp
+PFN_vkVoidFunction vk_layerGetPhysicalDeviceProcAddr(VkInstance instance,
+                                                     const char* pName);
+```
+
+This function behaves similar to `vkGetInstanceProcAddr` and
+`vkGetDeviceProcAddr` except it should only return values for physical device
+extension entry points.  In this way, it compares "pName" to every physical
+device function supported in the layer.
+
+The following rules apply:
+  * If it is the name of a physical device function supported by the layer, the
+pointer to the layer's corresponding function should be returned.
+  * If it is the name of a valid function which is **not** a physical device
+function (i.e. an Instance, Device, or other function implemented by the layer),
+then the value of NULL should be returned.
+    * We don’t call down since we know the command is not a physical device
+extension).
+  * If the layer has no idea what this function is, it should call down the layer
+chain to the next `vk_layerGetPhysicalDeviceProcAddr` call.
+    * This can be retrieved in one of two ways:
+      * During `vkCreateInstance`, it is passed to a layer in the
+chain information passed to a layer in the `VkLayerInstanceCreateInfo`
+structure.
+        * Use `get_chain_info()` to get the pointer to the
+`VkLayerInstanceCreateInfo` structure.  Let's call it chain_info.
+        * The address is then under
+chain_info->u.pLayerInfo->pfnNextGetPhysicalDeviceProcAddr
+        * See
+[Example Code for CreateInstance](#example-code-for-createinstance)
+      * Using the next layer’s `GetInstanceProcAddr` function to query for
+`vk_layerGetPhysicalDeviceProcAddr`.
+
+This support is optional and should not be considered a requirement.  This is
+only required if a layer intends to support some functionality not directly
+supported by loaders released in the public.  If a layer does implement this
+support, it should return the address of its `vk_layerGetPhysicalDeviceProcAddr`
+function in the "pfnGetPhysicalDeviceProcAddr" member of the
+`VkNegotiateLayerInterface` structure during
+[Layer Version Negotiation](#layer-version-negotiation).  Additionally, the
+layer should also make sure `vkGetInstanceProcAddr` returns a valid function
+pointer to a query of `vk_layerGetPhysicalDeviceProcAddr`.
+
+The new behavior of the loader's `vkGetInstanceProcAddr` with support for the
+`vk_layerGetPhysicalDeviceProcAddr` function is as follows:
+ 1. Check if core function:
+    - If it is, return the function pointer
+ 2. Check if known instance or device extension function:
+    - If it is, return the function pointer
+ 3. Call the layer/ICD `GetPhysicalDeviceProcAddr`
+    - If it returns non-NULL, return a trampoline to a generic physical device
+function, and set up a generic terminator which will pass it to the proper ICD.
+ 4. Call down using `GetInstanceProcAddr`
+    - If it returns non-NULL, treat it as an unknown logical device command.
+This means setting up a generic trampoline function that takes in a VkDevice as
+the first parameter and adjusting the dispatch table to call the ICD/Layers
+function after getting the dispatch table from the VkDevice. Then, return the
+pointer to corresponding trampoline function.
+ 5. Return NULL
+
+You can see now, that, if the command gets promoted to core later, it will no
+longer be set up using `vk_layerGetPhysicalDeviceProcAddr`.  Additionally, if the
+loader adds direct support for the extension, it will no longer get to step 3,
+because step 2 will return a valid function pointer.  However, the layer should
+continue to support the command query via `vk_layerGetPhysicalDeviceProcAddr`,
+until at least a Vulkan version bump, because an older loader may still be
+attempting to use the commands.
+
+
+#### Layer Intercept Requirements
+
+  * Layers intercept a Vulkan function by defining a C/C++ function with
+signature **identical** to the Vulkan API for that function.
+  * A layer **must intercept at least** `vkGetInstanceProcAddr` and
+`vkCreateInstance` to participate in the instance call chain.
+  * A layer **may also intercept** `vkGetDeviceProcAddr` and `vkCreateDevice`
+to participate in the device call chain.
+  * For any Vulkan function a layer intercepts which has a non-void return value,
+**an appropriate value must be returned** by the layer intercept function.
+  * Most functions a layer intercepts **should call down the chain** to the
+corresponding Vulkan function in the next entity.
+    * The common behavior for a layer is to intercept a call, perform some
+behavior, then pass it down to the next entity.
+      * If you don't pass the information down, undefined behavior may occur.
+      * This is because the function will not be received by layers further down
+the chain, or any ICDs.
+    * One function that **must never call down the chain** is:
+      * `vkNegotiateLoaderLayerInterfaceVersion`
+    * Three common functions that **may not call down the chain** are:
+      * `vkGetInstanceProcAddr`
+      * `vkGetDeviceProcAddr`
+      * `vk_layerGetPhysicalDeviceProcAddr`
+      * These functions only call down the chain for Vulkan functions that they
+do not intercept.
+  * Layer intercept functions **may insert extra calls** to Vulkan functions in
+addition to the intercept.
+    * For example, a layer intercepting `vkQueueSubmit` may want to add a call to
+`vkQueueWaitIdle` after calling down the chain for `vkQueueSubmit`.
+    * This would result in two calls down the chain: First a call down the
+`vkQueueSubmit` chain, followed by a call down the `vkQueueWaitIdle` chain.
+    * Any additional calls inserted by a layer must be on the same chain
+      * If the function is a device function, only other device functions should
+be added.
+      * Likewise, if the function is an instance function, only other instance
+functions should be added.
+
+
+#### Distributed Dispatching Requirements
+
+- For each entry point a layer intercepts, it must keep track of the entry
+point residing in the next entity in the chain it will call down into.
+  * In other words, the layer must have a list of pointers to functions of the
+appropriate type to call into the next entity.
+  * This can be implemented in various ways but
+for clarity, will be referred to as a dispatch table.
+- A layer can use the `VkLayerDispatchTable` structure as a device dispatch
+table (see include/vulkan/vk_dispatch_table_helper.h).
+- A layer can use the `VkLayerInstanceDispatchTable` structure as a instance
+dispatch table (see include/vulkan/vk_dispatch_table_helper.h).
+- A Layer's `vkGetInstanceProcAddr` function uses the next entity's
+`vkGetInstanceProcAddr` to call down the chain for unknown (i.e.
+non-intercepted) functions.
+- A Layer's `vkGetDeviceProcAddr` function uses the next entity's
+`vkGetDeviceProcAddr` to call down the chain for unknown (i.e. non-intercepted)
+functions.
+- A Layer's `vk_layerGetPhysicalDeviceProcAddr` function uses the next entity's
+`vk_layerGetPhysicalDeviceProcAddr` to call down the chain for unknown (i.e.
+non-intercepted) functions.
+
+
+#### Layer Conventions and Rules
+
+A layer, when inserted into an otherwise compliant Vulkan implementation, must
+still result in a compliant Vulkan implementation.  The intention is for layers
+to have a well-defined baseline behavior.  Therefore, it must follow some
+conventions and rules defined below.
+
+A layer is always chained with other layers.  It must not make invalid calls
+to, or rely on undefined behaviors of, its lower layers.  When it changes the
+behavior of a function, it must make sure its upper layers do not make invalid
+calls to or rely on undefined behaviors of its lower layers because of the
+changed behavior.  For example, when a layer intercepts an object creation
+function to wrap the objects created by its lower layers, it must make sure its
+lower layers never see the wrapping objects, directly from itself or
+indirectly from its upper layers.
+
+When a layer requires host memory, it may ignore the provided allocators.  It
+should use memory allocators if the layer is intended to run in a production
+environment.  For example, this usually applies to implicit layers that are
+always enabled.  That will allow applications to include the layer's memory
+usage.
+
+Additional rules include:
+  - `vkEnumerateInstanceLayerProperties` **must** enumerate and **only**
+enumerate the layer itself.
+  - `vkEnumerateInstanceExtensionProperties` **must** handle the case where
+`pLayerName` is itself.
+    - It **must** return `VK_ERROR_LAYER_NOT_PRESENT` otherwise, including when
+`pLayerName` is `NULL`.
+  - `vkEnumerateDeviceLayerProperties` **is deprecated and may be omitted**.
+    - Using this will result in undefined behavior.
+  - `vkEnumerateDeviceExtensionProperties` **must** handle the case where
+`pLayerName` is itself.
+    - In other cases, it should normally chain to other layers.
+  - `vkCreateInstance` **must not** generate an error for unrecognized layer
+names and extension names.
+    - It may assume the layer names and extension names have been validated.
+  - `vkGetInstanceProcAddr` intercepts a Vulkan function by returning a local
+entry point
+    - Otherwise it returns the value obtained by calling down the instance call
+chain.
+  - `vkGetDeviceProcAddr` intercepts a Vulkan function by returning a local
+entry point
+    - Otherwise it returns the value obtained by calling down the device call
+chain.
+    - These additional functions must be intercepted if the layer implements
+device-level call chaining:
+      - `vkGetDeviceProcAddr`
+      - `vkCreateDevice`(only required for any device-level chaining)
+         - **NOTE:** older layer libraries may expect that `vkGetInstanceProcAddr`
+ignore `instance` when `pName` is `vkCreateDevice`.
+  - The specification **requires** `NULL` to be returned from
+`vkGetInstanceProcAddr` and `vkGetDeviceProcAddr` for disabled functions.
+    - A layer may return `NULL` itself or rely on the following layers to do so.
+
+
+#### Layer Dispatch Initialization
+
+- A layer initializes its instance dispatch table within its `vkCreateInstance`
+function.
+- A layer initializes its device dispatch table within its `vkCreateDevice`
+function.
+- The loader passes a linked list of initialization structures to layers via
+the "pNext" field in the `VkInstanceCreateInfo` and `VkDeviceCreateInfo`
+structures for `vkCreateInstance` and `VkCreateDevice` respectively.
+- The head node in this linked list is of type `VkLayerInstanceCreateInfo` for
+instance and VkLayerDeviceCreateInfo for device. See file
+`include/vulkan/vk_layer.h` for details.
+- A VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO is used by the loader for the
+"sType" field in `VkLayerInstanceCreateInfo`.
+- A VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO is used by the loader for the
+"sType" field in `VkLayerDeviceCreateInfo`.
+- The "function" field indicates how the union field "u" should be interpreted
+within `VkLayer*CreateInfo`. The loader will set the "function" field to
+VK_LAYER_LINK_INFO. This indicates "u" field should be `VkLayerInstanceLink` or
+`VkLayerDeviceLink`.
+- The `VkLayerInstanceLink` and `VkLayerDeviceLink` structures are the list
+nodes.
+- The `VkLayerInstanceLink` contains the next entity's `vkGetInstanceProcAddr`
+used by a layer.
+- The `VkLayerDeviceLink` contains the next entity's `vkGetInstanceProcAddr` and
+`vkGetDeviceProcAddr` used by a layer.
+- Given the above structures set up by the loader, layer must initialize their
+dispatch table as follows:
+  - Find the `VkLayerInstanceCreateInfo`/`VkLayerDeviceCreateInfo` structure in
+the `VkInstanceCreateInfo`/`VkDeviceCreateInfo` structure.
+  - Get the next entity's vkGet*ProcAddr from the "pLayerInfo" field.
+  - For CreateInstance get the next entity's `vkCreateInstance` by calling the
+"pfnNextGetInstanceProcAddr":
+     pfnNextGetInstanceProcAddr(NULL, "vkCreateInstance").
+  - For CreateDevice get the next entity's `vkCreateDevice` by calling the
+"pfnNextGetInstanceProcAddr":
+     pfnNextGetInstanceProcAddr(NULL, "vkCreateDevice").
+  - Advanced the linked list to the next node: pLayerInfo = pLayerInfo->pNext.
+  - Call down the chain either `vkCreateDevice` or `vkCreateInstance`
+  - Initialize your layer dispatch table by calling the next entity's
+Get*ProcAddr function once for each Vulkan function needed in your dispatch
+table
+
+#### Example Code for CreateInstance
+
+```cpp
+VkResult vkCreateInstance(
+        const VkInstanceCreateInfo *pCreateInfo,
+        const VkAllocationCallbacks *pAllocator,
+        VkInstance *pInstance)
+{
+   VkLayerInstanceCreateInfo *chain_info =
+        get_chain_info(pCreateInfo, VK_LAYER_LINK_INFO);
+
+    assert(chain_info->u.pLayerInfo);
+    PFN_vkGetInstanceProcAddr fpGetInstanceProcAddr =
+        chain_info->u.pLayerInfo->pfnNextGetInstanceProcAddr;
+    PFN_vkCreateInstance fpCreateInstance =
+        (PFN_vkCreateInstance)fpGetInstanceProcAddr(NULL, "vkCreateInstance");
+    if (fpCreateInstance == NULL) {
+        return VK_ERROR_INITIALIZATION_FAILED;
+    }
+
+    // Advance the link info for the next element of the chain
+    chain_info->u.pLayerInfo = chain_info->u.pLayerInfo->pNext;
+
+    // Continue call down the chain
+    VkResult result = fpCreateInstance(pCreateInfo, pAllocator, pInstance);
+    if (result != VK_SUCCESS)
+        return result;
+
+    // Init layer's dispatch table using GetInstanceProcAddr of
+    // next layer in the chain.
+    instance_dispatch_table = new VkLayerInstanceDispatchTable;
+    layer_init_instance_dispatch_table(
+        *pInstance, my_data->instance_dispatch_table, fpGetInstanceProcAddr);
+
+    // Other layer initialization
+    ...
+
+    return VK_SUCCESS;
+}
+```
+
+#### Example Code for CreateDevice
+
+```cpp
+VkResult 
+vkCreateDevice(
+        VkPhysicalDevice gpu,
+        const VkDeviceCreateInfo *pCreateInfo,
+        const VkAllocationCallbacks *pAllocator,
+        VkDevice *pDevice)
+{
+    VkLayerDeviceCreateInfo *chain_info =
+        get_chain_info(pCreateInfo, VK_LAYER_LINK_INFO);
+
+    PFN_vkGetInstanceProcAddr fpGetInstanceProcAddr =
+        chain_info->u.pLayerInfo->pfnNextGetInstanceProcAddr;
+    PFN_vkGetDeviceProcAddr fpGetDeviceProcAddr =
+        chain_info->u.pLayerInfo->pfnNextGetDeviceProcAddr;
+    PFN_vkCreateDevice fpCreateDevice =
+        (PFN_vkCreateDevice)fpGetInstanceProcAddr(NULL, "vkCreateDevice");
+    if (fpCreateDevice == NULL) {
+        return VK_ERROR_INITIALIZATION_FAILED;
+    }
+
+    // Advance the link info for the next element on the chain
+    chain_info->u.pLayerInfo = chain_info->u.pLayerInfo->pNext;
+
+    VkResult result = fpCreateDevice(gpu, pCreateInfo, pAllocator, pDevice);
+    if (result != VK_SUCCESS) {
+        return result;
+    }
+
+    // initialize layer's dispatch table
+    device_dispatch_table = new VkLayerDispatchTable;
+    layer_init_device_dispatch_table(
+        *pDevice, device_dispatch_table, fpGetDeviceProcAddr);
+
+    // Other layer initialization
+    ...
+
+    return VK_SUCCESS;
+}
+```
+
+
+#### Meta-layers
+
+Meta-layers are a special kind of layer which is only available through the
+desktop loader.  While normal layers are associated with one particular library,
+a meta-layer is actually a collection layer which contains an ordered list of
+other layers (called component layers).
+
+The most common example of a meta-layer is the
+`VK_LAYER_LUNARG_standard_validation` layer which groups all the most common
+individual validation layers into a single layer for ease-of-use.
+
+The benefits of a meta-layer are:
+ 1. You can activate more than one layer using a single layer name by simply
+grouping multiple layers in a meta-layer.
+ 2. You can define the order the loader will activate individual layers within
+the meta-layer.
+ 3. You can easily share your special layer configuration with others.
+ 4. The loader will automatically collate all instance and device extensions in
+a meta-layer's component layers, and report them as the meta-layer's properties
+to the application when queried.
+ 
+Restrictions to defining and using a meta-layer are:
+ 1. A Meta-layer Manifest file **must** be a properly formatted that contains one
+or more component layers.
+ 3. All component layers **must be** present on a system for the meta-layer to
+be used.
+ 4. All component layers **must be** at the same Vulkan API major and minor
+version for the meta-layer to be used.
+ 
+The ordering of a meta-layer's component layers in the instance or device
+call-chain is simple:
+  * The first layer listed will be the layer closest to the application.
+  * The last layer listed will be the layer closest to the drivers.
+
+Inside the meta-layer Manifest file, each component layer is listed by its
+layer name.  This is the "name" tag's value associated with each component layer's
+Manifest file under the "layer" or "layers" tag.  This is also the name that
+would normally be used when activating a layer during `vkCreateInstance`.
+
+Any duplicate layer names in either the component layer list, or globally among
+all enabled layers, will simply be ignored.  Only the first instance of any
+layer name will be used.
+
+For example, if you have a layer enabled using the environment variable
+`VK_INSTANCE_LAYERS` and have that same layer listed in a meta-layer, then the
+environment-variable-enabled layer will be used and the component layer will
+be dropped.  Likewise, if a person were to enable a meta-layer and then
+separately enable one of the component layers afterwards, the second
+instantiation of the layer name would be ignored.
+
+The
+Manifest file formatting necessary to define a meta-layer can be found in the
+[Layer Manifest File Format](#layer-manifest-file-format) section.
+
+#### Pre-Instance Functions
+
+Vulkan includes a small number of functions which are called without any dispatchable object.
+Most layers do not intercept these functions, as layers are enabled when an instance is created.
+However, under certain conditions it is possible for a layer to intercept these functions.
+
+In order to intercept the pre-instance functions, several conditions must be met:
+* The layer must be implicit
+* The layer manifest version must be 1.1.2 or later
+* The layer must export the entry point symbols for each intercepted function
+* The layer manifest must specify the name of each intercepted function in a `pre_instance_functions` JSON object
+
+The functions that may be intercepted in this way are:
+* `vkEnumerateInstanceExtensionProperties`
+* `vkEnumerateInstanceLayerProperties`
+
+Pre-instance functions work differently from all other layer intercept functions.
+Other intercept functions have a function prototype identical to that of the function they are intercepting.
+They then rely on data that was passed to the layer at instance or device creation so that layers can call down the chain.
+Because there is no need to create an instance before calling the pre-instance functions, these functions must use a separate mechanism for constructing the call chain.
+This mechanism consists of an extra parameter that will be passed to the layer intercept function when it is called.
+This parameter will be a pointer to a struct, defined as follows:
+
+```
+typedef struct Vk...Chain
+{
+    struct {
+        VkChainType type;
+        uint32_t version;
+        uint32_t size;
+    } header;
+    PFN_vkVoidFunction pfnNextLayer;
+    const struct Vk...Chain* pNextLink;
+} Vk...Chain;
+```
+
+These structs are defined in the `vk_layer.h` file so that it is not necessary to redefine the chain structs in any external code.
+The name of each struct is be similar to the name of the function it corresponds to, but the leading "V" is capitalized, and the word "Chain" is added to the end.
+For example, the struct for `vkEnumerateInstanceExtensionProperties` is called `VkEnumerateInstanceExtensionPropertiesChain`.
+Furthermore, the `pfnNextLayer` struct member is not actually a void function pointer &mdash; its type will be the actual type of each function in the call chain.
+
+Each layer intercept function must have a prototype that is the same as the prototype of the function being intercepted, except that the first parameter must be that function's chain struct (passed as a const pointer).
+For example, a function that wishes to intercept `vkEnumerateInstanceExtensionProperties` would have the prototype:
+
+```
+VkResult InterceptFunctionName(const VkEnumerateInstanceExtensionProperties* pChain,
+    const char* pLayerName, uint32_t* pPropertyCount, VkExtensionProperties* pProperties);
+```
+
+The name of the function is arbitrary; it can be anything provided that it is given in the layer manifest file (see [Layer Manifest File Format](#layer-manifest-file-format)).
+The implementation of each intercept function is responsible for calling the next item in the call chain, using the chain parameter.
+This is done by calling the `pfnNextLayer` member of the chain struct, passing `pNextLink` as the first argument, and passing the remaining function arguments after that.
+For example, a simple implementation for `vkEnumerateInstanceExtensionProperties` that does nothing but call down the chain would look like:
+
+```
+VkResult InterceptFunctionName(const VkEnumerateInstanceExtensionProperties* pChain,
+    const char* pLayerName, uint32_t* pPropertyCount, VkExtensionProperties* pProperties)
+{
+    return pChain->pfnNextLayer(pChain->pNextLink, pLayerName, pPropertyCount, pProperties);
+}
+```
+
+When using a C++ compiler, each chain type also defines a function named `CallDown` which can be used to automatically handle the first argument.
+Implementing the above function using this method would look like:
+
+```
+VkResult InterceptFunctionName(const VkEnumerateInstanceExtensionProperties* pChain,
+    const char* pLayerName, uint32_t* pPropertyCount, VkExtensionProperties* pProperties)
+{
+    return pChain->CallDown(pLayerName, pPropertyCount, pProperties);
+}
+```
+
+Unlike with other functions in layers, the layer may not save any global data between these function calls.
+Because Vulkan does not store any state until an instance has been created, all layer libraries are released at the end of each pre-instance call.
+This means that implicit layers can use pre-instance intercepts to modify data that is returned by the functions, but they cannot be used to record that data.
+
+#### Special Considerations
+
+
+##### Associating Private Data with Vulkan Objects Within a Layer
+
+A layer may want to associate its own private data with one or more Vulkan
+objects.  Two common methods to do this are hash maps and object wrapping. 
+
+
+###### Wrapping
+
+The loader supports layers wrapping any Vulkan object, including dispatchable
+objects.  For functions that return object handles, each layer does not touch
+the value passed down the call chain.  This is because lower items may need to
+use the original value.  However, when the value is returned from a
+lower-level layer (possibly the ICD), the layer saves the handle  and returns
+its own handle to the layer above it (possibly the application).  When a layer
+receives a Vulkan function using something that it previously returned a handle
+for, the layer is required to unwrap the handle and pass along the saved handle
+to the layer below it.  This means that the layer **must intercept every Vulkan
+function which uses the object in question**, and wrap or unwrap the object, as
+appropriate.  This includes adding support for all extensions with functions
+using any object the layer wraps.
+
+Layers above the object wrapping layer will see the wrapped object. Layers
+which wrap dispatchable objects must ensure that the first field in the wrapping
+structure is a pointer to a dispatch table as defined in `vk_layer.h`.
+Specifically, an instance wrapped dispatchable object could be as follows:
+```
+struct my_wrapped_instance_obj_ {
+    VkLayerInstanceDispatchTable *disp;
+    // whatever data layer wants to add to this object
+};
+```
+A device wrapped dispatchable object could be as follows:
+```
+struct my_wrapped_instance_obj_ {
+    VkLayerDispatchTable *disp;
+    // whatever data layer wants to add to this object
+};
+```
+
+Layers that wrap dispatchable objects must follow the guidelines for creating
+new dispatchable objects (below).
+
+###### Cautions About Wrapping
+
+Layers are generally discouraged from wrapping objects, because of the
+potential for incompatibilities with new extensions.  For example, let's say
+that a layer wraps `VkImage` objects, and properly wraps and unwraps `VkImage`
+object handles for all core functions.  If a new extension is created which has
+functions that take `VkImage` objects as parameters, and if the layer does not
+support those new functions, an application that uses both the layer and the new
+extension will have undefined behavior when those new functions are called (e.g.
+the application may crash).  This is because the lower-level layers and ICD
+won't receive the handle that they generated.  Instead, they will receive a
+handle that is only known by the layer that is wrapping the object.
+
+Because of the potential for incompatibilities with unsupported extensions,
+layers that wrap objects must check which extensions are being used by the
+application, and take appropriate action if the layer is used with unsupported
+extensions (e.g. disable layer functionality, stop wrapping objects, issue a
+message to the user).
+
+The reason that the validation layers wrap objects is to track the proper use
+and destruction of each object.  They issue a validation error if used with
+unsupported extensions, alerting the user to the potential for undefined
+behavior.
+
+
+###### Hash Maps
+
+Alternatively, a layer may want to use a hash map to associate data with a
+given object. The key to the map could be the object. Alternatively, for
+dispatchable objects at a given level (eg device or instance) the layer may
+want data associated with the `VkDevice` or `VkInstance` objects. Since
+there are multiple dispatchable objects for a given `VkInstance` or `VkDevice`,
+the `VkDevice` or `VkInstance` object is not a great map key. Instead the layer
+should use the dispatch table pointer within the `VkDevice` or `VkInstance`
+since that will be unique for a given `VkInstance` or `VkDevice`.
+
+
+##### Creating New Dispatchable Objects
+
+Layers which create dispatchable objects must take special care. Remember that
+loader *trampoline* code normally fills in the dispatch table pointer in the
+newly created object. Thus, the layer must fill in the dispatch table pointer if
+the loader *trampoline* will not do so.  Common cases where a layer (or ICD) may
+create a dispatchable object without loader *trampoline* code is as follows:
+- layers that wrap dispatchable objects
+- layers which add extensions that create dispatchable objects
+- layers which insert extra Vulkan functions in the stream of functions they
+intercept from the application
+- ICDs which add extensions that create dispatchable objects
+
+The desktop loader provides a callback that can be used for initializing
+a dispatchable object.  The callback is passed as an extension structure via the
+pNext field in the create info structure when creating an instance
+(`VkInstanceCreateInfo`) or device (`VkDeviceCreateInfo`).  The callback
+prototype is defined as follows for instance and device callbacks respectively
+(see `vk_layer.h`):
+
+```cpp
+VKAPI_ATTR VkResult VKAPI_CALL vkSetInstanceLoaderData(VkInstance instance,
+                                                       void *object);
+VKAPI_ATTR VkResult VKAPI_CALL vkSetDeviceLoaderData(VkDevice device,
+                                                     void *object);
+```
+
+To obtain these callbacks the layer must search through the list of structures
+pointed to by the "pNext" field in the `VkInstanceCreateInfo` and
+`VkDeviceCreateInfo` parameters to find any callback structures inserted by the
+loader. The salient details are as follows:
+- For `VkInstanceCreateInfo` the callback structure pointed to by "pNext" is
+`VkLayerInstanceCreateInfo` as defined in `include/vulkan/vk_layer.h`.
+- A "sType" field in of VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO within
+`VkInstanceCreateInfo` parameter indicates a loader structure.
+- Within `VkLayerInstanceCreateInfo`, the "function" field indicates how the
+union field "u" should be interpreted.
+- A "function" equal to VK_LOADER_DATA_CALLBACK indicates the "u" field will
+contain the callback in "pfnSetInstanceLoaderData".
+- For `VkDeviceCreateInfo` the callback structure pointed to by "pNext" is
+`VkLayerDeviceCreateInfo` as defined in `include/vulkan/vk_layer.h`.
+- A "sType" field in of VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO within
+`VkDeviceCreateInfo` parameter indicates a loader structure.
+- Within `VkLayerDeviceCreateInfo`, the "function" field indicates how the union
+field "u" should be interpreted.
+- A "function" equal to VK_LOADER_DATA_CALLBACK indicates the "u" field will
+contain the callback in "pfnSetDeviceLoaderData".
+
+Alternatively, if an older loader is being used that doesn't provide these
+callbacks, the layer may manually initialize the newly created dispatchable
+object.  To fill in the dispatch table pointer in newly created dispatchable
+object, the layer should copy the dispatch pointer, which is always the first
+entry in the structure, from an existing parent object of the same level
+(instance versus device).
+
+For example, if there is a newly created `VkCommandBuffer` object, then the
+dispatch pointer from the `VkDevice` object, which is the parent of the
+`VkCommandBuffer` object, should be copied into the newly created object.
+
+
+#### Layer Manifest File Format
+
+On Windows, Linux and macOS (desktop), the loader uses manifest files to discover
+layer libraries and layers.  The desktop loader doesn't directly query the
+layer library except during chaining.  This is to reduce the likelihood of
+loading a malicious layer into memory.  Instead, details are read from the
+Manifest file, which are then provided for applications to determine what
+layers should actually be loaded.
+
+The following section discusses the details of the Layer Manifest JSON file
+format.  The JSON file itself does not have any requirements for naming.  The
+only requirement is that the extension suffix of the file is ".json".
+
+Here is an example layer JSON Manifest file with a single layer:
+
+```
+{
+   "file_format_version" : "1.0.0",
+   "layer": {
+       "name": "VK_LAYER_LUNARG_overlay",
+       "type": "INSTANCE",
+       "library_path": "vkOverlayLayer.dll",
+       "api_version" : "1.0.5",
+       "implementation_version" : "2",
+       "description" : "LunarG HUD layer",
+       "functions": {
+           "vkNegotiateLoaderLayerInterfaceVersion":
+               "OverlayLayer_NegotiateLoaderLayerInterfaceVersion"
+       },
+       "instance_extensions": [
+           {
+               "name": "VK_EXT_debug_report",
+               "spec_version": "1"
+           },
+           {
+               "name": "VK_VENDOR_ext_x",
+               "spec_version": "3"
+            }
+       ],
+       "device_extensions": [
+           {
+               "name": "VK_EXT_debug_marker",
+               "spec_version": "1",
+               "entrypoints": ["vkCmdDbgMarkerBegin", "vkCmdDbgMarkerEnd"]
+           }
+       ],
+       "enable_environment": {
+           "ENABLE_LAYER_OVERLAY_1": "1"
+       },
+       "disable_environment": {
+           "DISABLE_LAYER_OVERLAY_1": ""
+       }
+   }
+}
+```
+
+Here's a snippet with the changes required to support multiple layers per
+manifest file:
+```
+{
+   "file_format_version" : "1.0.1",
+   "layers": [
+      {
+           "name": "VK_LAYER_layer_name1",
+           "type": "INSTANCE",
+           ...
+      },
+      {
+           "name": "VK_LAYER_layer_name2",
+           "type": "INSTANCE",
+           ...
+      }
+   ]
+}
+```
+
+Here's an example of a meta-layer manifest file:
+```
+{
+   "file_format_version" : "1.1.1",
+   "layer": {
+       "name": "VK_LAYER_LUNARG_standard_validation",
+       "type": "GLOBAL",
+       "api_version" : "1.0.40",
+       "implementation_version" : "1",
+       "description" : "LunarG Standard Validation Meta-layer",
+       "component_layers": [
+           "VK_LAYER_GOOGLE_threading",
+           "VK_LAYER_LUNARG_parameter_validation",
+           "VK_LAYER_LUNARG_object_tracker",
+           "VK_LAYER_LUNARG_core_validation",
+           "VK_LAYER_GOOGLE_unique_objects"
+       ]
+   }
+}
+```
+| JSON Node | Description and Notes | Introspection Query |
+|:----------------:|--------------------|:----------------:
+| "file\_format\_version" | Manifest format major.minor.patch version number. | N/A |
+| | Supported versions are: 1.0.0, 1.0.1, 1.1.0, 1.1.1, and 1.1.2. | |
+| "layer" | The identifier used to group a single layer's information together. | vkEnumerateInstanceLayerProperties |
+| "layers" | The identifier used to group multiple layers' information together.  This requires a minimum Manifest file format version of 1.0.1.| vkEnumerateInstanceLayerProperties |
+| "name" | The string used to uniquely identify this layer to applications. | vkEnumerateInstanceLayerProperties |
+| "type" | This field indicates the type of layer.  The values can be: GLOBAL, or INSTANCE. | vkEnumerate*LayerProperties |
+|  | **NOTES:** Prior to deprecation, the "type" node was used to indicate which layer chain(s) to activate the layer upon: instance, device, or both. Distinct instance and device layers are deprecated; there are now just layers. Allowable values for type (both before and after deprecation) are "INSTANCE", "GLOBAL" and, "DEVICE." "DEVICE" layers are skipped over by the loader as if they were not found. |  |
+| "library\_path" | The "library\_path" specifies either a filename, a relative pathname, or a full pathname to a layer shared library file.  If "library\_path" specifies a relative pathname, it is relative to the path of the JSON manifest file (e.g. for cases when an application provides a layer that is in the same folder hierarchy as the rest of the application files).  If "library\_path" specifies a filename, the library must live in the system's shared object search path. There are no rules about the name of the layer shared library files other than it should end with the appropriate suffix (".DLL" on Windows, ".so" on Linux, and ".dylib" on macOS).  **This field must not be present if "component_layers" is defined**.  | N/A |
+| "api\_version" | The major.minor.patch version number of the Vulkan API that the shared library file for the library was built against. For example: 1.0.33. | vkEnumerateInstanceLayerProperties |
+| "implementation_version" | The version of the layer implemented.  If the layer itself has any major changes, this number should change so the loader and/or application can identify it properly. | vkEnumerateInstanceLayerProperties |
+| "description" | A high-level description of the layer and its intended use. | vkEnumerateInstanceLayerProperties |
+| "functions" | **OPTIONAL:** This section can be used to identify a different function name for the loader to use in place of standard layer interface functions. The "functions" node is required if the layer is using an alternative name for `vkNegotiateLoaderLayerInterfaceVersion`. | vkGet*ProcAddr |
+| "instance\_extensions" | **OPTIONAL:** Contains the list of instance extension names supported by this layer. One "instance\_extensions" node with an array of one or more elements is required if any instance extensions are supported by a layer; otherwise the node is optional. Each element of the array must have the nodes "name" and "spec_version" which correspond to `VkExtensionProperties` "extensionName" and "specVersion" respectively. | vkEnumerateInstanceExtensionProperties |
+| "device\_extensions" | **OPTIONAL:** Contains the list of device extension names supported by this layer. One "device\_extensions" node with an array of one or more elements is required if any device extensions are supported by a layer; otherwise the node is optional. Each element of the array must have the nodes "name" and "spec_version" which correspond to `VkExtensionProperties` "extensionName" and "specVersion" respectively. Additionally, each element of the array of device extensions must have the node "entrypoints" if the device extension adds Vulkan API functions; otherwise this node is not required. The "entrypoint" node is an array of the names of all entrypoints added by the supported extension. | vkEnumerateDeviceExtensionProperties |
+| "enable\_environment" | **Implicit Layers Only** - **OPTIONAL:** Indicates an environment variable used to enable the Implicit Layer (if set to a value of 1).  This environment variable (which should vary with each "version" of the layer) must be set to the given value or else the implicit layer is not loaded. This is for application environments (e.g. Steam) which want to enable a layer(s) only for applications that they launch, and allows for applications run outside of an application environment to not get that implicit layer(s).| N/A |
+| "disable\_environment" | **Implicit Layers Only** - **REQUIRED:**Indicates an environment variable used to disable the Implicit Layer (w/ value of 1). In rare cases of an application not working with an implicit layer, the application can set this environment variable (before calling Vulkan functions) in order to "blacklist" the layer. This environment variable (which should vary with each "version" of the layer) must be set (not particularly to any value). If both the "enable_environment" and "disable_environment" variables are set, the implicit layer is disabled. | N/A |
+| "component_layers" | **Meta-layers Only** - Indicates the component layer names that are part of a meta-layer.  The names listed must be the "name" identified in each of the component layer's Mainfest file "name" tag (this is the same as the name of the layer that is passed to the `vkCreateInstance` command).  All component layers must be present on the system and found by the loader in order for this meta-layer to be available and activated. **This field must not be present if "library\_path" is defined**. | N/A |
+| "pre_instance_functions" | **Implicit Layers Only** - **OPTIONAL:** Indicates which functions the layer wishes to intercept, that do not require that an instance has been created. This should be an object where each function to be intercepted is defined as a string entry where the key is the Vulkan function name and the value is the name of the intercept function in the layer's dynamic library. Available in layer manifest versions 1.1.2 and up. See [Pre-Instance Functions](#pre-instance-functions) for more information. | vkEnumerateInstance*Properties |
+
+##### Layer Manifest File Version History
+
+The current highest supported Layer Manifest file format supported is 1.1.2.
+Information about each version is detailed in the following sub-sections:
+
+###### Layer Manifest File Version 1.1.2
+
+Version 1.1.2 introduced the ability of layers to intercept function calls that do not have an instance.
+
+###### Layer Manifest File Version 1.1.1
+
+The ability to define custom metalayers was added.
+To support metalayers, the "component_layers" section was added, and the requirement for a "library_path" section to be present was removed when the "component_layers" section is present.
+
+###### Layer Manifest File Version 1.1.0
+
+Layer Manifest File Version 1.1.0 is tied to changes exposed by the Loader/Layer
+interface version 2.
+  1. Renaming "vkGetInstanceProcAddr" in the "functions" section is
+     deprecated since the loader no longer needs to query the layer about
+     "vkGetInstanceProcAddr" directly.  It is now returned during the layer
+     negotiation, so this field will be ignored.
+  2. Renaming "vkGetDeviceProcAddr" in the "functions" section is
+     deprecated since the loader no longer needs to query the layer about
+     "vkGetDeviceProcAddr" directly.  It too is now returned during the layer
+     negotiation, so this field will be ignored.
+  3. Renaming the "vkNegotiateLoaderLayerInterfaceVersion" function is
+     being added to the "functions" section, since this is now the only
+     function the loader needs to query using OS-specific calls.
+      - NOTE: This is an optional field and, as the two previous fields, only
+needed if the layer requires changing the name of the function for some reason.
+
+You do not need to update your layer manifest file if you don't change the
+names of any of the listed functions.
+
+###### Layer Manifest File Version 1.0.1
+
+The ability to define multiple layers using the "layers" array was added.  This
+JSON array field can be used when defining a single layer or multiple layers.
+The "layer" field is still present and valid for a single layer definition.
+
+###### Layer Manifest File Version 1.0.0
+
+The initial version of the layer manifest file specified the basic format and
+fields of a layer JSON file.  The fields of the 1.0.0 file format include:
+ * "file\_format\_version"
+ * "layer"
+ * "name"
+ * "type"
+ * "library\_path"
+ * "api\_version"
+ * "implementation\_version"
+ * "description"
+ * "functions"
+ * "instance\_extensions"
+ * "device\_extensions"
+ * "enable\_environment"
+ * "disable\_environment"
+
+It was also during this time that the value of "DEVICE" was deprecated from
+the "type" field.
+
+
+#### Layer Library Versions
+
+The current Layer Library interface is at version 2.  The following sections
+detail the differences between the various versions.
+
+##### Layer Library API Version 2
+
+Introduced the concept of
+[loader and layer interface](#layer-version-negotiation) using the new
+`vkNegotiateLoaderLayerInterfaceVersion` function. Additionally, it introduced
+the concept of
+[Layer Unknown Physical Device Extensions](#layer-unknown-physical-device-extensions)
+and the associated `vk_layerGetPhysicalDeviceProcAddr` function.  Finally, it
+changed the manifest file definition to 1.1.0.
+
+##### Layer Library API Version 1
+
+A layer library supporting interface version 1 had the following behavior:
+ 1. `GetInstanceProcAddr` and `GetDeviceProcAddr` were directly exported
+ 2. The layer manifest file was able to override the names of the
+`GetInstanceProcAddr` and `GetDeviceProcAddr`functions.
+
+##### Layer Library API Version 0
+
+A layer library supporting interface version 0 must define and export these
+introspection functions, unrelated to any Vulkan function despite the names,
+signatures, and other similarities:
+
+- `vkEnumerateInstanceLayerProperties` enumerates all layers in a layer
+library.
+  - This function never fails.
+  - When a layer library contains only one layer, this function may be an alias
+   to the layer's `vkEnumerateInstanceLayerProperties`.
+- `vkEnumerateInstanceExtensionProperties` enumerates instance extensions of
+   layers in a layer library.
+  - "pLayerName" is always a valid layer name.
+  - This function never fails.
+  - When a layer library contains only one layer, this function may be an alias
+   to the layer's `vkEnumerateInstanceExtensionProperties`.
+- `vkEnumerateDeviceLayerProperties` enumerates a subset (can be full,
+   proper, or empty subset) of layers in a layer library.
+  - "physicalDevice" is always `VK_NULL_HANDLE`.
+  - This function never fails.
+  - If a layer is not enumerated by this function, it will not participate in
+   device function interception.
+- `vkEnumerateDeviceExtensionProperties` enumerates device extensions of
+   layers in a layer library.
+  - "physicalDevice" is always `VK_NULL_HANDLE`.
+  - "pLayerName" is always a valid layer name.
+  - This function never fails.
+
+It must also define and export these functions once for each layer in the
+library:
+
+- `<layerName>GetInstanceProcAddr(instance, pName)` behaves identically to a
+layer's vkGetInstanceProcAddr except it is exported.
+
+   When a layer library contains only one layer, this function may
+   alternatively be named `vkGetInstanceProcAddr`.
+
+- `<layerName>GetDeviceProcAddr`  behaves identically to a layer's
+vkGetDeviceProcAddr except it is exported.
+
+   When a layer library contains only one layer, this function may
+   alternatively be named `vkGetDeviceProcAddr`.
+
+All layers contained within a library must support `vk_layer.h`.  They do not
+need to implement functions that they do not intercept.  They are recommended
+not to export any functions.
+
+
+<br/>
+<br/>
+
+## Vulkan Installable Client Driver Interface With the Loader
+
+This section discusses the various requirements for the loader and a Vulkan
+ICD to properly handshake.
+
+  * [ICD Discovery](#icd-discovery)
+    * [Overriding the Default ICD Usage](#overriding-the-default-icd-usage)
+    * [ICD Manifest File Usage](#icd-manifest-file-usage)
+    * [ICD Discovery on Windows](#icd-discovery-on-windows)
+    * [ICD Discovery on Linux](#icd-discovery-on-linux)
+    * [ICD Discovery on macOS](#icd-discovery-on-macos)
+    * [Using Pre-Production ICDs on Windows, Linux and macOS](#using-pre-production-icds-on-windows-and-linux)
+    * [ICD Discovery on Android](#icd-discovery-on-android)
+  * [ICD Manifest File Format](#icd-manifest-file-format)
+    * [ICD Manifest File Versions](#icd-manifest-file-versions)
+      * [ICD Manifest File Version 1.0.0](#icd-manifest-file-version-1.0.0)
+  * [ICD Vulkan Entry Point Discovery](#icd-vulkan-entry point-discovery)
+  * [ICD API Version](#icd-api-version)
+  * [ICD Unknown Physical Device Extensions](#icd-unknown-physical-device-extensions)
+  * [ICD Dispatchable Object Creation](#icd-dispatchable-object-creation)
+  * [Handling KHR Surface Objects in WSI Extensions](#handling-khr-surface-objects-in-wsi-extensions)
+  * [Loader and ICD Interface Negotiation](#loader-and-icd-interface-negotiation)
+    * [Windows, Linux, and macOS ICD Negotiation](#windows-and-linux-icd-negotiation)
+      * [Version Negotiation Between Loader and ICDs](#version-negotiation-between-loader-and-icds)
+        * [Interfacing With Legacy ICDs or Loader](#interfacing-with-legacy-icds-or-loader)
+      * [Loader Version 5 Interface Requirements](#loader-version-5-interface-requirements)
+      * [Loader Version 4 Interface Requirements](#loader-version-4-interface-requirements)
+      * [Loader Version 3 Interface Requirements](#loader-version-3-interface-requirements)
+      * [Loader Version 2 Interface Requirements](#loader-version-2-interface-requirements)
+      * [Loader Versions 0 and 1 Interface Requirements](#loader-versions-0-and-1-interface-requirements)
+    * [Android ICD Negotiation](#android-icd-negotiation)
+
+
+### ICD Discovery
+
+Vulkan allows multiple drivers each with one or more devices (represented by a
+Vulkan `VkPhysicalDevice` object) to be used collectively. The loader is
+responsible for discovering available Vulkan ICDs on the system. Given a list
+of available ICDs, the loader can enumerate all the physical devices available
+for an application and return this information to the application. The process
+in which the loader discovers the available Installable Client Drivers (ICDs)
+on a system is platform-dependent. Windows, Linux, Android, and macOS ICD discovery
+details are listed below.
+
+#### Overriding the Default ICD Usage
+
+There may be times that a developer wishes to force the loader to use a specific ICD.
+This could be for many reasons including  using a beta driver, or forcing the loader
+to skip a problematic ICD.  In order to support this, the loader can be forced to
+look at specific ICDs with the `VK_ICD_FILENAMES` environment variable.  In order
+to use the setting, simply set it to a properly delimited list of ICD Manifest
+files that you wish to use.  In this case, please provide the global path to these
+files to reduce issues.
+
+For example:
+
+##### On Windows
+
+```
+set VK_ICD_FILENAMES=\windows\system32\nv-vk64.json
+```
+
+This is an example which is using the `VK_ICD_FILENAMES` override on Windows to point
+to the Nvidia Vulkan driver's ICD Manifest file.
+
+##### On Linux
+
+```
+export VK_ICD_FILENAMES=/home/user/dev/mesa/share/vulkan/icd.d/intel_icd.x86_64.json
+```
+
+This is an example which is using the `VK_ICD_FILENAMES` override on Linux to point
+to the Intel Mesa driver's ICD Manifest file.
+
+##### On macOS
+
+```
+export VK_ICD_FILENAMES=/home/user/MoltenVK/Package/Latest/MoltenVK/macOS/MoltenVK_icd.json
+```
+
+This is an example which is using the `VK_ICD_FILENAMES` override on macOS to point
+to an installation and build of the MoltenVK GitHub repository that contains the MoltenVK ICD.
+
+#### ICD Manifest File Usage
+
+As with layers, on Windows, Linux and macOS systems, JSON-formatted manifest files are
+used to store ICD information.  In order to find system-installed drivers, the
+Vulkan loader will read the JSON files to identify the names and attributes of
+each driver.  One thing you will notice is that ICD Manifest files are much
+simpler than the corresponding layer Manifest files.
+
+See the [Current ICD Manifest File Format](#icd-manifest-file-format) section
+for more details.
+
+
+#### ICD Discovery on Windows
+
+In order to find installed ICDs, the loader scans through registry keys specific to Display
+Adapters and all Software Components associated with these adapters for the
+locations of JSON manifest files. These keys are located in device keys
+created during driver installation and contain configuration information
+for base settings, including OpenGL and Direct3D ICD location.
+
+The Device Adapter and Software Component key paths should be obtained through the PnP
+Configuration Manager API. The `000X` key will be a numbered key, where each
+device is assigned a different number.
+
+```
+   HKEY_LOCAL_MACHINE\System\CurrentControlSet\Control\Class\{Adapter GUID}\000X\VulkanDriverName
+   HKEY_LOCAL_MACHINE\System\CurrentControlSet\Control\Class\{SoftwareComponent GUID}\000X\VulkanDriverName
+```
+
+In addition, on 64-bit systems there may be another set of registry values, listed
+below. These values record the locations of 32-bit layers on 64-bit operating systems,
+in the same way as the Windows-on-Windows functionality.
+
+```
+   HKEY_LOCAL_MACHINE\System\CurrentControlSet\Control\Class\{Adapter GUID}\000X\VulkanDriverNameWow
+   HKEY_LOCAL_MACHINE\System\CurrentControlSet\Control\Class\{SoftwareComponent GUID}\000X\VulkanDriverNameWow
+```
+
+If any of the above values exist and is of type `REG_SZ`, the loader will open the JSON
+manifest file specified by the key value. Each value must be a full absolute
+path to a JSON manifest file. The values may also be of type `REG_MULTI_SZ`, in
+which case the value will be interpreted as a list of paths to JSON manifest files.
+
+Additionally, the Vulkan loader will scan the values in the following Windows registry key:
+
+```
+   HKEY_LOCAL_MACHINE\SOFTWARE\Khronos\Vulkan\Drivers
+```
+
+For 32-bit applications on 64-bit Windows, the loader scan's the 32-bit
+registry location:
+
+```
+   HKEY_LOCAL_MACHINE\SOFTWARE\WOW6432Node\Khronos\Vulkan\Drivers
+```
+
+Every ICD in these locations should be given as a DWORD, with value 0, where
+the name of the value is the full path to a JSON manifest file. The Vulkan loader
+will attempt to open each manifest file to obtain the information about an ICD's
+shared library (".dll") file.
+
+For example, let us assume the registry contains the following data:
+
+```
+[HKEY_LOCAL_MACHINE\SOFTWARE\Khronos\Vulkan\Drivers\]
+
+"C:\vendor a\vk_vendora.json"=dword:00000000
+"C:\windows\system32\vendorb_vk.json"=dword:00000001
+"C:\windows\system32\vendorc_icd.json"=dword:00000000
+```
+
+In this case, the loader will step through each entry, and check the value.  If
+the value is 0, then the loader will attempt to load the file.  In this case,
+the loader will open the first and last listings, but not the middle.  This
+is because the value of 1 for vendorb_vk.json disables the driver.
+
+The Vulkan loader will open each enabled manifest file found to obtain the name
+or pathname of an ICD shared library (".DLL") file.
+
+ICDs should use the registry locations from the PnP Configuration Manager wherever
+practical. That location clearly ties the ICD to a given device. The
+`SOFTWARE\Khronos\Vulkan\Drivers` location is the older method for locating ICDs,
+and is retained for backward compatibility.
+
+See the [ICD Manifest File Format](#icd-manifest-file-format) section for more
+details.
+
+
+#### ICD Discovery on Linux
+
+In order to find installed ICDs, the Vulkan loader will scan the files
+in the following Linux directories:
+
+```
+    /usr/local/etc/vulkan/icd.d
+    /usr/local/share/vulkan/icd.d
+    /etc/vulkan/icd.d
+    /usr/share/vulkan/icd.d
+    $HOME/.local/share/vulkan/icd.d
+```
+
+The "/usr/local/*" directories can be configured to be other directories at
+build time.
+
+The typical usage of the directories is indicated in the table below.
+
+| Location  |  Details |
+|-------------------|------------------------|
+| $HOME/.local/share/vulkan/icd.d | $HOME is the current home directory of the application's user id; this path will be ignored for suid programs |
+| "/usr/local/etc/vulkan/icd.d" | Directory for locally built ICDs |
+| "/usr/local/share/vulkan/icd.d" | Directory for locally built ICDs |
+| "/etc/vulkan/icd.d" | Location of ICDs installed from non-Linux-distribution-provided packages |
+| "/usr/share/vulkan/icd.d" | Location of ICDs installed from Linux-distribution-provided packages |
+
+The Vulkan loader will open each manifest file found to obtain the name or
+pathname of an ICD shared library (".so") file.
+
+See the [ICD Manifest File Format](#icd-manifest-file-format) section for more
+details.
+
+#### ICD Discovery on macOS
+
+In order to find installed ICDs, the Vulkan loader will scan the files
+in the following directories:
+
+```
+    <bundle>/Contents/Resources/vulkan/icd.d
+    /etc/vulkan/icd.d
+    /usr/local/share/vulkan/icd.d
+    /usr/share/vulkan/icd.d
+    $HOME/.local/share/vulkan/icd.d
+```
+
+The "/usr/local/*" directories can be configured to be other directories at
+build time.
+
+The typical usage of the directories is indicated in the table below.
+
+| Location  |  Details |
+|-------------------|------------------------|
+| &lt;bundle&gt;/Contents/Resources/vulkan/icd.d | Directory for ICDs that are bundled with the application (searched first) |
+| "/etc/vulkan/icd.d" | Location of ICDs installed manually |
+| "/usr/local/share/vulkan/icd.d" | Directory for locally built ICDs |
+| "/usr/share/vulkan/icd.d" | Location of ICDs installed from packages |
+| $HOME/.local/share/vulkan/icd.d | $HOME is the current home directory of the application's user id; this path will be ignored for suid programs |
+
+The Vulkan loader will open each manifest file found to obtain the name or
+pathname of an ICD shared library (".dylib") file.
+
+See the [ICD Manifest File Format](#icd-manifest-file-format) section for more
+details.
+
+##### Additional Settings For ICD Debugging
+
+If you are seeing issues which may be related to the ICD, a possible option to debug is to enable the
+`LD_BIND_NOW` environment variable.  This forces every dynamic library's symbols to be fully resolved on load.  If
+there is a problem with an ICD missing symbols on your system, this will expose it and cause the Vulkan loader
+to fail on loading the ICD.  It is recommended that you enable `LD_BIND_NOW` along with `VK_LOADER_DEBUG=warn`
+to expose any issues.
+
+#### Using Pre-Production ICDs on Windows, Linux and macOS
+
+Independent Hardware Vendor (IHV) pre-production ICDs. In some cases, a
+pre-production ICD may be in an installable package. In other cases, a
+pre-production ICD may simply be a shared library in the developer's build tree.
+In this latter case, we want to allow developers to point to such an ICD without
+modifying the system-installed ICD(s) on their system.
+
+This need is met with the use of the "VK\_ICD\_FILENAMES" environment variable,
+which will override the mechanism used for finding system-installed ICDs. In
+other words, only the ICDs listed in "VK\_ICD\_FILENAMES" will be used.
+
+The "VK\_ICD\_FILENAMES" environment variable is a list of ICD
+manifest files, containing the full path to the ICD JSON Manifest file.  This
+list is colon-separated on Linux and macOS, and semi-colon-separated on Windows.
+
+Typically, "VK\_ICD\_FILENAMES" will only contain a full pathname to one info
+file for a developer-built ICD. A separator (colon or semi-colon) is only used
+if more than one ICD is listed.
+
+**NOTE:** On Linux and macOS, this environment variable will be ignored for suid programs.
+
+
+#### ICD Discovery on Android
+
+The Android loader lives in the system library folder. The location cannot be
+changed. The loader will load the driver/ICD via `hw_get_module` with the ID
+of "vulkan". **Due to security policies in Android, none of this can be modified
+under normal use.**
+
+
+### ICD Manifest File Format
+
+The following section discusses the details of the ICD Manifest JSON file
+format.  The JSON file itself does not have any requirements for naming.  The
+only requirement is that the extension suffix of the file is ".json".
+
+Here is an example ICD JSON Manifest file:
+
+```
+{
+   "file_format_version": "1.0.0",
+   "ICD": {
+      "library_path": "path to ICD library",
+      "api_version": "1.0.5"
+   }
+}
+```
+
+| Field Name | Field Value |
+|----------------|--------------------|
+| "file\_format\_version" | The JSON format major.minor.patch version number of this file.  Currently supported version is 1.0.0. |
+| "ICD" | The identifier used to group all ICD information together. |
+| "library_path" | The "library\_path" specifies either a filename, a relative pathname, or a full pathname to an ICD shared library file.  If "library\_path" specifies a relative pathname, it is relative to the path of the JSON manifest file.  If "library\_path" specifies a filename, the library must live in the system's shared object search path. There are no rules about the name of the ICD shared library files other than it should end with the appropriate suffix (".DLL" on Windows, ".so" on Linux and ".dylib" on macOS). | N/A |
+| "api_version" | The major.minor.patch version number of the Vulkan API that the shared library files for the ICD was built against. For example: 1.0.33. |
+
+**NOTE:** If the same ICD shared library supports multiple, incompatible
+versions of text manifest file format versions, it must have separate
+JSON files for each (all of which may point to the same shared library).
+
+##### ICD Manifest File Versions
+
+There has only been one version of the ICD manifest files supported.  This is
+version 1.0.0.
+
+###### ICD Manifest File Version 1.0.0
+
+The initial version of the ICD Manifest file specified the basic format and
+fields of a layer JSON file.  The fields of the 1.0.0 file format include:
+ * "file\_format\_version"
+ * "ICD"
+ * "library\_path"
+ * "api\_version"
+
+ 
+###  ICD Vulkan Entry Point Discovery
+
+The Vulkan symbols exported by an ICD must not clash with the loader's exported
+Vulkan symbols.  This could be for several reasons.  Because of this, all ICDs
+must export the following function that is used for discovery of ICD Vulkan
+entry points.  This entry point is not a part of the Vulkan API itself, only a
+private interface between the loader and ICDs for version 1 and higher
+interfaces.
+
+```cpp
+VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vk_icdGetInstanceProcAddr(
+                                               VkInstance instance,
+                                               const char* pName);
+```
+
+This function has very similar semantics to `vkGetInstanceProcAddr`.
+`vk_icdGetInstanceProcAddr` returns valid function pointers for all the 
+global-level and instance-level Vulkan functions, and also for `vkGetDeviceProcAddr`.
+Global-level functions are those which contain no dispatchable object as the
+first parameter, such as `vkCreateInstance` and
+`vkEnumerateInstanceExtensionProperties`. The ICD must support querying 
+global-level entry points by calling `vk_icdGetInstanceProcAddr` with a NULL
+`VkInstance` parameter. Instance-level functions are those that have either
+`VkInstance`, or `VkPhysicalDevice` as the first parameter dispatchable object.
+Both core entry points and any instance extension entry points the ICD supports
+should be available via `vk_icdGetInstanceProcAddr`. Future Vulkan instance
+extensions may define and use new instance-level dispatchable objects other
+than `VkInstance` and `VkPhysicalDevice`, in which case extension entry points
+using these newly defined dispatchable objects must be queryable via
+`vk_icdGetInstanceProcAddr`.
+
+All other Vulkan entry points must either:
+ * NOT be exported directly from the ICD library
+ * or NOT use the official Vulkan function names if they are exported
+ 
+This requirement is for ICD libraries that include other
+functionality (such as OpenGL) and thus could be loaded by the
+application prior to when the Vulkan loader library is loaded by the
+application.
+
+Beware of interposing by dynamic OS library loaders if the official Vulkan
+names are used. On Linux, if official names are used, the ICD library must be
+linked with `-Bsymbolic`.
+
+
+### ICD API Version
+When an application calls `vkCreateInstance`, it can optionally include a
+`VkApplicationInfo` struct, which includes an `apiVersion` field. A Vulkan 1.0
+ICD was required to return `VK_ERROR_INCOMPATIBLE_DRIVER` if it did not
+support the API version that the user passed. Beginning with Vulkan 1.1, ICDs
+are not allowed to return this error for any value of `apiVersion`. This
+creates a problem when working with multiple ICDs, where one is a 1.0 ICD and
+another is newer.
+
+A loader that is newer than 1.0 will always give the version it supports when
+the application calls `vkEnumerateInstanceVersion`, regardless of the API
+version supported by the ICDs on the system. This means that when the
+application calls `vkCreateInstance`, the loader will be forced to pass a copy
+of the `VkApplicationInfo` struct where `apiVersion` is 1.0 to any 1.0 drivers
+in order to prevent an error. To determine if this must be done, the loader
+will perform the following steps:
+
+1. Check the ICD's JSON manifest file for the "api_version" field.
+2. If the JSON version is greater greater than or equal to 1.1, Load the ICD's dynamic library
+3. Call the ICD's `vkGetInstanceProcAddr` command to get a pointer to
+`vkEnumerateInstanceVersion`
+4. If the pointer to `vkEnumerateInstanceVersion` is not `NULL`, it will be
+called to get the ICD's supported API version
+
+The ICD will be treated as a 1.0 ICD if any of the following conditions are met:
+
+- The JSON manifest's "api_version" field is less that version 1.1
+- The function pointer to `vkEnumerateInstanceVersion` is `NULL`
+- The version returned by `vkEnumerateInstanceVersion` is less than 1.1
+- `vkEnumerateInstanceVersion` returns anything other than `VK_SUCCESS`
+
+If the ICD only supports Vulkan 1.0, the loader will ensure that any
+`VkApplicationInfo` struct that is passed to the ICD will have an `apiVersion`
+field set to Vulkan 1.0. Otherwise, the loader will pass the struct to the ICD
+without any changes.
+
+
+### ICD Unknown Physical Device Extensions
+
+Originally, if the loader was called with `vkGetInstanceProcAddr`, it would
+result in the following behavior:
+ 1. The loader would check if core function:
+    - If it was, it would return the function pointer
+ 2. The loader would check if known extension function:
+    - If it was, it would return the function pointer
+ 3. If the loader knew nothing about it, it would call down using
+`GetInstanceProcAddr`
+    - If it returned non-NULL, treat it as an unknown logical device command.
+    - This meant setting up a generic trampoline function that takes in a
+VkDevice as the first parameter and adjusting the dispatch table to call the
+ICD/Layers function after getting the dispatch table from the VkDevice.
+ 4. If all the above failed, the loader would return NULL to the application.
+
+This caused problems when an ICD attempted to expose new physical device
+extensions the loader knew nothing about, but an application did.  Because the
+loader knew nothing about it, the loader would get to step 3 in the above
+process and would treat the function as an unknown logical device command.  The
+problem is, this would create a generic VkDevice trampoline function which, on
+the first call, would attempt to dereference the VkPhysicalDevice as a VkDevice.
+This would lead to a crash or corruption.
+
+In order to identify the extension entry points specific to physical device
+extensions, the following function can be added to an ICD:
+
+```cpp
+PFN_vkVoidFunction vk_icdGetPhysicalDeviceProcAddr(VkInstance instance,
+                                                   const char* pName);
+```
+
+This function behaves similar to `vkGetInstanceProcAddr` and
+`vkGetDeviceProcAddr` except it should only return values for physical device
+extension entry points.  In this way, it compares "pName" to every physical
+device function supported in the ICD.
+
+The following rules apply:
+* If it is the name of a physical device function supported by the ICD, the
+pointer to the ICD's corresponding function should be returned.
+* If it is the name of a valid function which is **not** a physical device
+function (i.e. an Instance, Device, or other function implemented by the ICD),
+then the value of NULL should be returned.
+* If the ICD has no idea what this function is, it should return NULL.
+
+This support is optional and should not be considered a requirement.  This is
+only required if an ICD intends to support some functionality not directly
+supported by a significant population of loaders in the public.  If an ICD
+does implement this support, it should return the address of its
+`vk_icdGetPhysicalDeviceProcAddr` function through the `vkGetInstanceProcAddr`
+function.
+
+The new behavior of the loader's vkGetInstanceProcAddr with support for the
+`vk_icdGetPhysicalDeviceProcAddr` function is as follows:
+ 1. Check if core function:
+    - If it is, return the function pointer
+ 2. Check if known instance or device extension function:
+    - If it is, return the function pointer
+ 3. Call the layer/ICD `GetPhysicalDeviceProcAddr`
+    - If it returns non-NULL, return a trampoline to a generic physical device
+function, and set up a generic terminator which will pass it to the proper ICD.
+ 4. Call down using `GetInstanceProcAddr`
+    - If it returns non-NULL, treat it as an unknown logical device command.
+This means setting up a generic trampoline function that takes in a VkDevice as
+the first parameter and adjusting the dispatch table to call the ICD/Layers
+function after getting the dispatch table from the VkDevice. Then, return the
+pointer to corresponding trampoline function.
+ 5. Return NULL
+
+You can see now, that, if the command gets promoted to core later, it will no
+longer be set up using `vk_icdGetPhysicalDeviceProcAddr`.  Additionally, if the
+loader adds direct support for the extension, it will no longer get to step 3,
+because step 2 will return a valid function pointer.  However, the ICD should
+continue to support the command query via `vk_icdGetPhysicalDeviceProcAddr`,
+until at least a Vulkan version bump, because an older loader may still be
+attempting to use the commands.
+
+
+### ICD Dispatchable Object Creation
+
+As previously covered, the loader requires dispatch tables to be accessible
+within Vulkan dispatchable objects, such as: `VkInstance`, `VkPhysicalDevice`,
+`VkDevice`, `VkQueue`, and `VkCommandBuffer`. The specific requirements on all
+dispatchable objects created by ICDs are as follows:
+
+- All dispatchable objects created by an ICD can be cast to void \*\*
+- The loader will replace the first entry with a pointer to the dispatch table
+  which is owned by the loader. This implies three things for ICD drivers
+  1. The ICD must return a pointer for the opaque dispatchable object handle
+  2. This pointer points to a regular C structure with the first entry being a
+   pointer.
+   * **NOTE:** For any C\++ ICD's that implement VK objects directly as C\++
+classes:
+     * The C\++ compiler may put a vtable at offset zero if your class is 
+non-POD due to the use of a virtual function.
+     * In this case use a regular C structure (see below).
+  3. The loader checks for a magic value (ICD\_LOADER\_MAGIC) in all the created
+   dispatchable objects, as follows (see `include/vulkan/vk_icd.h`):
+
+```cpp
+#include "vk_icd.h"
+
+union _VK_LOADER_DATA {
+    uintptr loadermagic;
+    void *loaderData;
+} VK_LOADER_DATA;
+
+vkObj alloc_icd_obj()
+{
+    vkObj *newObj = alloc_obj();
+    ...
+    // Initialize pointer to loader's dispatch table with ICD_LOADER_MAGIC
+
+    set_loader_magic_value(newObj);
+    ...
+    return newObj;
+}
+```
+ 
+
+### Handling KHR Surface Objects in WSI Extensions
+
+Normally, ICDs handle object creation and destruction for various Vulkan
+objects. The WSI surface extensions for Linux, Windows, and macOS
+("VK\_KHR\_win32\_surface", "VK\_KHR\_xcb\_surface", "VK\_KHR\_xlib\_surface",
+"VK\_KHR\_wayland\_surface", "VK\_MVK\_macos\_surface"
+and "VK\_KHR\_surface")
+are handled differently.  For these extensions, the `VkSurfaceKHR` object
+creation and destruction may be handled by either the loader  or an ICD.
+
+If the loader handles the management of the `VkSurfaceKHR` objects:
+ 1. The loader will handle the calls to `vkCreateXXXSurfaceKHR` and
+`vkDestroySurfaceKHR`
+    functions without involving the ICDs.
+    * Where XXX stands for the Windowing System name:
+      * Wayland
+      * XCB
+      * Xlib
+      * Windows
+      * Android
+      * MacOS (`vkCreateMacOSSurfaceMVK`)
+ 2. The loader creates a `VkIcdSurfaceXXX` object for the corresponding
+`vkCreateXXXSurfaceKHR` call.
+    * The `VkIcdSurfaceXXX` structures are defined in `include/vulkan/vk_icd.h`.
+ 3. ICDs can cast any `VkSurfaceKHR` object to a pointer to the appropriate
+    `VkIcdSurfaceXXX` structure.
+ 4. The first field of all the `VkIcdSurfaceXXX` structures is a
+`VkIcdSurfaceBase` enumerant that indicates whether the
+    surface object is Win32, XCB, Xlib, or Wayland.
+
+The ICD may choose to handle `VkSurfaceKHR` object creation instead.  If an ICD
+desires to handle creating and destroying it must do the following:
+ 1. Support version 3 or newer of the loader/ICD interface.
+ 2. Export and handle all functions that take in a `VkSurfaceKHR` object,
+including:
+     * `vkCreateXXXSurfaceKHR`
+     * `vkGetPhysicalDeviceSurfaceSupportKHR`
+     * `vkGetPhysicalDeviceSurfaceCapabilitiesKHR`
+     * `vkGetPhysicalDeviceSurfaceFormatsKHR`
+     * `vkGetPhysicalDeviceSurfacePresentModesKHR`
+     * `vkCreateSwapchainKHR`
+     * `vkDestroySurfaceKHR`
+
+Because the `VkSurfaceKHR` object is an instance-level object, one object can be
+associated with multiple ICDs.  Therefore, when the loader receives the
+`vkCreateXXXSurfaceKHR` call, it still creates an internal `VkSurfaceIcdXXX`
+object.  This object acts as a container for each ICD's version of the
+`VkSurfaceKHR` object.  If an ICD does not support the creation of its own
+`VkSurfaceKHR` object, the loader's container stores a NULL for that ICD.  On
+the other hand, if the ICD does support `VkSurfaceKHR` creation, the loader will
+make the appropriate `vkCreateXXXSurfaceKHR` call to the ICD, and store the
+returned pointer in its container object.  The loader then returns the
+`VkSurfaceIcdXXX` as a `VkSurfaceKHR` object back up the call chain.  Finally,
+when the loader receives the `vkDestroySurfaceKHR` call, it subsequently calls
+`vkDestroySurfaceKHR` for each ICD who's internal `VkSurfaceKHR` object is not
+NULL.  Then the loader destroys the container object before returning.
+
+
+### Loader and ICD Interface Negotiation
+
+Generally, for functions issued by an application, the loader can be
+viewed as a pass through. That is, the loader generally doesn't modify the
+functions or their parameters, but simply calls the ICDs entry point for that
+function. There are specific additional interface requirements an ICD needs to
+comply with that are not part of any requirements from the Vulkan specification.
+These additional requirements are versioned to allow flexibility in the future.
+
+
+#### Windows, Linux and macOS ICD Negotiation
+
+
+##### Version Negotiation Between Loader and ICDs
+
+All ICDs (supporting interface version 2 or higher) must export the following
+function that is used for determination of the interface version that will be
+used.  This entry point is not a part of the Vulkan API itself, only a private
+interface between the loader and ICDs.
+
+```cpp
+   VKAPI_ATTR VkResult VKAPI_CALL
+       vk_icdNegotiateLoaderICDInterfaceVersion(
+           uint32_t* pSupportedVersion);
+```
+
+This function allows the loader and ICD to agree on an interface version to use.
+The "pSupportedVersion" parameter is both an input and output parameter.
+"pSupportedVersion" is filled in by the loader with the desired latest interface
+version supported by the loader (typically the latest). The ICD receives this
+and returns back the version it desires in the same field.  Because it is
+setting up the interface version between the loader and ICD, this should be
+the first call made by a loader to the ICD (even prior to any calls to
+`vk_icdGetInstanceProcAddr`).
+
+If the ICD receiving the call no longer supports the interface version provided
+by the loader (due to deprecation), then it should report a
+VK_ERROR_INCOMPATIBLE_DRIVER error.  Otherwise it sets the value pointed by
+"pSupportedVersion" to the latest interface version supported by both the ICD
+and the loader and returns VK_SUCCESS.
+
+The ICD should report VK_SUCCESS in case the loader-provided interface version
+is newer than that supported by the ICD, as it's the loader's responsibility to
+determine whether it can support the older interface version supported by the
+ICD.  The ICD should also report VK_SUCCESS in the case its interface version
+is greater than the loader's, but return the loader's version. Thus, upon
+return of VK_SUCCESS the "pSupportedVersion" will contain the desired interface
+version to be used by the ICD.
+
+If the loader receives an interface version from the ICD that the loader no
+longer supports (due to deprecation), or it receives a
+VK_ERROR_INCOMPATIBLE_DRIVER error instead of VK_SUCCESS, then the loader will
+treat the ICD as incompatible and will not load it for use.  In this case, the
+application will not see the ICDs `vkPhysicalDevice` during enumeration.
+
+###### Interfacing With Legacy ICDs or Loader
+
+If a loader sees that an ICD does not export the
+`vk_icdNegotiateLoaderICDInterfaceVersion` function, then the loader assumes the
+corresponding ICD only supports either interface version 0 or 1.
+
+From the other side of the interface, if an ICD sees a call to
+`vk_icdGetInstanceProcAddr` before a call to
+`vk_icdNegotiateLoaderICDInterfaceVersion`, then it knows that loader making the calls
+is a legacy loader supporting version 0 or 1.  If the loader calls
+`vk_icdGetInstanceProcAddr` first, it supports at least version 1.  Otherwise,
+the loader only supports version 0.
+
+
+##### Loader Version 5 Interface Requirements
+
+Version 5 of the loader/ICD interface has no changes to the actual interface.
+If the loader requests interface version 5 or greater, it is simply
+an indication to ICDs that the loader is now evaluating whether the API Version info
+passed into vkCreateInstance is a valid version for the loader.  If it is not,
+the loader will catch this during vkCreateInstance and fail with a
+VK_ERROR_INCOMPATIBLE_DRIVER error.
+
+On the other hand, if version 5 or newer is not requested by the loader, then it
+indicates to the ICD that the loader is ignorant of the API version being
+requested.  Because of this, it falls on the ICD to validate that the API
+Version is not greater than major = 1 and minor = 0.  If it is, then the ICD
+should automatically fail with a VK_ERROR_INCOMPATIBLE_DRIVER error since the
+loader is a 1.0 loader, and is unaware of the version.
+
+Here is a table of the expected behaviors:
+
+| Loader Supports I/f Version  |  ICD Supports I/f Version  |    Result        |
+| :---: |:---:|------------------------|
+|           <= 4               |           <= 4             | ICD must fail with `VK_ERROR_INCOMPATIBLE_DRIVER` for all vkCreateInstance calls with apiVersion set to > Vulkan 1.0 because both the loader and ICD support interface version <= 4. Otherwise, the ICD should behave as normal. |
+|           <= 4               |           >= 5             | ICD must fail with `VK_ERROR_INCOMPATIBLE_DRIVER` for all vkCreateInstance calls with apiVersion set to > Vulkan 1.0 because the loader is still at interface version <= 4. Otherwise, the ICD should behave as normal.  |
+|           >= 5               |           <= 4             | Loader will fail with `VK_ERROR_INCOMPATIBLE_DRIVER` if it can't handle the apiVersion.  ICD may pass for all apiVersions, but since its interface is <= 4, it is best if it assumes it needs to do the work of rejecting anything > Vulkan 1.0 and fail with `VK_ERROR_INCOMPATIBLE_DRIVER`. Otherwise, the ICD should behave as normal.  |
+|           >= 5               |           >= 5             | Loader will fail with `VK_ERROR_INCOMPATIBLE_DRIVER` if it can't handle the apiVersion, and ICDs should fail with `VK_ERROR_INCOMPATIBLE_DRIVER` **only if** they can not support the specified apiVersion. Otherwise, the ICD should behave as normal.  |
+
+##### Loader Version 4 Interface Requirements
+
+The major change to version 4 of the loader/ICD interface is the support of
+[Unknown Physical Device Extensions](#icd-unknown-physical-device-extensions)
+using the `vk_icdGetPhysicalDeviceProcAddr` function.  This
+function is purely optional.  However, if an ICD supports a Physical Device
+extension, it must provide a `vk_icdGetPhysicalDeviceProcAddr` function.
+Otherwise, the loader will continue to treat any unknown functions as VkDevice
+functions and cause invalid behavior.
+
+
+##### Loader Version 3 Interface Requirements
+
+The primary change that occurred in version 3 of the loader/ICD interface was to
+allow an ICD to handle creation/destruction of their own KHR_surfaces.  Up until
+this point, the loader created a surface object that was used by all ICDs.
+However, some ICDs may want to provide their own surface handles.  If an ICD
+chooses to enable this support, it must export support for version 3 of the
+loader/ICD interface, as well as any Vulkan function that uses a KHR_surface
+handle, such as:
+- `vkCreateXXXSurfaceKHR` (where XXX is the platform-specific identifier [i.e.
+`vkCreateWin32SurfaceKHR` for Windows])
+- `vkDestroySurfaceKHR`
+- `vkCreateSwapchainKHR`
+- `vkGetPhysicalDeviceSurfaceSupportKHR`
+- `vkGetPhysicalDeviceSurfaceCapabilitiesKHR`
+- `vkGetPhysicalDeviceSurfaceFormatsKHR`
+- `vkGetPhysicalDeviceSurfacePresentModesKHR`
+
+An ICD can still choose to not take advantage of this functionality by simply
+not exposing the above `vkCreateXXXSurfaceKHR` and `vkDestroySurfaceKHR`
+functions.
+
+
+##### Loader Version 2 Interface Requirements
+
+Version 2 interface has requirements in three areas:
+ 1. ICD Vulkan entry point discovery,
+ 2. `KHR_surface` related requirements in the WSI extensions,
+ 3. Vulkan dispatchable object creation requirements.
+
+##### Loader Versions 0 and 1 Interface Requirements
+
+Version 0 and 1 interfaces do not support version negotiation via
+`vk_icdNegotiateLoaderICDInterfaceVersion`.  ICDs can distinguish version 0 and
+version 1 interfaces as follows: if the loader calls `vk_icdGetInstanceProcAddr`
+first it supports version 1; otherwise the loader only supports version 0.
+
+Version 0 interface does not support `vk_icdGetInstanceProcAddr`.  Version 0
+interface requirements for obtaining ICD Vulkan entry points are as follows:
+
+- The function `vkGetInstanceProcAddr` **must be exported** in the ICD library
+and returns valid function pointers for all the Vulkan API entry points.
+- `vkCreateInstance` **must be exported** by the ICD library.
+- `vkEnumerateInstanceExtensionProperties` **must be exported** by the ICD
+library.
+
+Additional Notes:
+
+- The loader will filter out extensions requested in `vkCreateInstance` and
+`vkCreateDevice` before calling into the ICD; Filtering will be of extensions
+advertised by entities (e.g. layers) different from the ICD in question.
+- The loader will not call the ICD for `vkEnumerate*LayerProperties` as layer
+properties are obtained from the layer libraries and layer JSON files.
+- If an ICD library author wants to implement a layer, it can do so by having
+the appropriate layer JSON manifest file refer to the ICD library file.
+- The loader will not call the ICD for
+  `vkEnumerate*ExtensionProperties` if "pLayerName" is not equal to `NULL`.
+- ICDs creating new dispatchable objects via device extensions need to
+initialize the created dispatchable object.  The loader has generic *trampoline*
+code for unknown device extensions.  This generic *trampoline* code doesn't
+initialize the dispatch table within the newly created object.  See the
+[Creating New Dispatchable Objects](#creating-new-dispatchable-objects) section
+for more information on how to initialize created dispatchable objects for
+extensions non known by the loader.
+
+
+#### Android ICD Negotiation
+
+The Android loader uses the same protocol for initializing the dispatch
+table as described above. The only difference is that the Android
+loader queries layer and extension information directly from the
+respective libraries and does not use the JSON manifest files used
+by the Windows, Linux and macOS loaders.
+
+## Table of Debug Environment Variables
+
+The following are all the Debug Environment Variables available for use with the
+Loader.  These are referenced throughout the text, but collected here for ease
+of discovery.
+
+| Environment Variable              | Behavior |  Example Format  |
+|:---:|---------------------|----------------------|
+| VK_ICD_FILENAMES                  | Force the loader to use the specific ICD JSON files.  The value should contain a list of delimited full path listings to ICD JSON Manifest files.  **NOTE:** If you fail to use the global path to a JSON file, you may encounter issues.  |  `export VK_ICD_FILENAMES=<folder_a>/intel.json:<folder_b>/amd.json`<br/><br/>`set VK_ICD_FILENAMES=<folder_a>\nvidia.json;<folder_b>\mesa.json` |
+| VK_INSTANCE_LAYERS                | Force the loader to add the given layers to the list of Enabled layers normally passed into `vkCreateInstance`.  These layers are added first, and the loader will remove any duplicate layers that appear in both this list as well as that passed into `ppEnabledLayerNames`. | `export VK_INSTANCE_LAYERS=<layer_a>:<layer_b>`<br/><br/>`set VK_INSTANCE_LAYERS=<layer_a>;<layer_b>` |
+| VK_LAYER_PATH                     | Override the loader's standard Layer library search folders and use the provided delimited folders to search for layer Manifest files. | `export VK_LAYER_PATH=<path_a>:<path_b>`<br/><br/>`set VK_LAYER_PATH=<path_a>;<path_b>` |
+| VK_LOADER_DISABLE_INST_EXT_FILTER | Disable the filtering out of instance extensions that the loader doesn't know about.  This will allow applications to enable instance extensions exposed by ICDs but that the loader has no support for.  **NOTE:** This may cause the loader or application to crash. |  `export VK_LOADER_DISABLE_INST_EXT_FILTER=1`<br/><br/>`set VK_LOADER_DISABLE_INST_EXT_FILTER=1` |
+| VK_LOADER_DEBUG                   | Enable loader debug messages.  Options are:<br/>- error (only errors)<br/>- warn (warnings and errors)<br/>- info (info, warning, and errors)<br/> - debug (debug + all before) <br/> -all (report out all messages) | `export VK_LOADER_DEBUG=all`<br/><br/>`set VK_LOADER_DEBUG=warn` |
+ 
+## Glossary of Terms
+
+| Field Name | Field Value |
+|:---:|--------------------|
+| Android Loader | The loader designed to work primarily for the Android OS.  This is generated from a different code base than the desktop loader.  But, in all important aspects, it should be functionally equivalent. |
+| Desktop Loader | The loader designed to work on Windows, Linux and macOS.  This is generated from a different [code base](#https://github.com/KhronosGroup/Vulkan-Loader) than the Android loader.  But in all important aspects, it should be functionally equivalent. |
+| Core Function | A function that is already part of the Vulkan core specification and not an extension.  For example, vkCreateDevice(). |
+| Device Call Chain | The call chain of functions followed for device functions.  This call chain for a device function is usually as follows: first the application calls into a loader trampoline, then the loader trampoline calls enabled layers, and the final layer calls into the ICD specific to the device.  See the [Dispatch Tables and Call Chains](#dispatch-tables-and-call-chains) section for more information |
+| Device Function | A Device function is any Vulkan function which takes a `VkDevice`, `VkQueue`, `VkCommandBuffer`, or any child of these, as its first parameter.  Some Vulkan Device functions are: `vkQueueSubmit`, `vkBeginCommandBuffer`, `vkCreateEvent`.  See the [Instance Versus Device](#instance-versus-device) section for more information. |
+| Discovery | The process of the loader searching for ICD and Layer files to set up the internal list of Vulkan objects available.  On Windows/Linux/macOS, the discovery process typically focuses on searching for Manifest files.  On Android, the process focuses on searching for library files. |
+| Dispatch Table | An array of function pointers (including core and possibly extension functions) used to step to the next entity in a call chain.  The entity could be the loader, a layer or an ICD.  See [Dispatch Tables and Call Chains](#dispatch-tables-and-call-chains) for more information.  |
+| Extension | A concept of Vulkan used to expand the core Vulkan functionality.  Extensions may be IHV-specific, platform-specific, or more broadly available.  You should always query if an extension exists, and enable it during `vkCreateInstance` (if it is an instance extension) or during `vkCreateDevice` (if it is a device extension). |
+| ICD | Acronym for Installable Client Driver.  These are drivers that are provided by IHVs to interact with the hardware they provide.  See [Installable Client Drivers](#installable-client-drivers) section for more information.
+| IHV | Acronym for an Independent Hardware Vendor.  Typically the company that built the underlying hardware technology you are trying to use.  A typical examples for a Graphics IHV are: AMD, ARM, Imagination, Intel, Nvidia, Qualcomm, etc. |
+| Instance Call Chain | The call chain of functions followed for instance functions.  This call chain for an instance function is usually as follows: first the application calls into a loader trampoline, then the loader trampoline calls enabled layers, the final layer calls a loader terminator, and the loader terminator calls all available ICDs.  See the [Dispatch Tables and Call Chains](#dispatch-tables-and-call-chains) section for more information |
+| Instance Function | An Instance function is any Vulkan function which takes as its first parameter either a `VkInstance` or a `VkPhysicalDevice` or nothing at all.  Some Vulkan Instance functions are: `vkEnumerateInstanceExtensionProperties`, `vkEnumeratePhysicalDevices`, `vkCreateInstance`, `vkDestroyInstance`.  See the [Instance Versus Device](#instance-versus-device) section for more information. |
+| Layer | Layers are optional components that augment the Vulkan system.  They can intercept, evaluate, and modify existing Vulkan functions on their way from the application down to the hardware.  See the [Layers](#layers) section for more information. |
+| Loader | The middleware program which acts as the mediator between Vulkan applications, Vulkan layers and Vulkan drivers.  See [The Loader](#the-loader) section for more information. |
+| Manifest Files | Data files in JSON format used by the desktop loader.  These files contain specific information for either a [Layer](#layer-manifest-file-format) or an [ICD](#icd-manifest-file-format).
+| Terminator Function | The last function in the instance call chain above the ICDs and owned by the loader.  This function is required in the instance call chain because all instance functionality must be communicated to all ICDs capable of receiving the call.  See [Dispatch Tables and Call Chains](#dispatch-tables-and-call-chains) for more information. |
+| Trampoline Function | The first function in an instance or device call chain owned by the loader which handles the set up and proper call chain walk using the appropriate dispatch table.  On device functions (in the device call chain) this function can actually be skipped.  See [Dispatch Tables and Call Chains](#dispatch-tables-and-call-chains) for more information. |
+| WSI Extension | Acronym for Windowing System Integration.  A Vulkan extension targeting a particular Windowing system and designed to interface between the Windowing system and Vulkan. See [WSI Extensions](#wsi-extensions) for more information. |
diff --git a/src/third_party/vulkan-loader/src/loader/README.md b/src/third_party/vulkan-loader/src/loader/README.md
new file mode 100644
index 0000000..cef13b8
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/loader/README.md
@@ -0,0 +1,5 @@
+# Loader Specification and Interfaces
+See LoaderAndLayerInterface.md for detailed documentation.
+
+# Building
+Builds for Linux, Windows, and MacOS are supported via CMake. See top level BUILD.md file.
diff --git a/src/third_party/vulkan-loader/src/loader/adapters.h b/src/third_party/vulkan-loader/src/loader/adapters.h
new file mode 100644
index 0000000..ef97d66
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/loader/adapters.h
@@ -0,0 +1,80 @@
+/*
+* Copyright (c) 2019 The Khronos Group Inc.
+* Copyright (c) 2019 Valve Corporation
+* Copyright (c) 2019 LunarG, Inc.
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*
+* Author: Lenny Komow <lenny@lunarg.com>
+*/
+
+typedef struct LoaderEnumAdapters2 {
+    ULONG adapter_count;
+    struct {
+        UINT handle;
+        LUID luid;
+        ULONG source_count;
+        BOOL present_move_regions_preferred;
+    } * adapters;
+} LoaderEnumAdapters2;
+
+typedef _Check_return_ NTSTATUS(APIENTRY *PFN_LoaderEnumAdapters2)(const LoaderEnumAdapters2 *);
+
+typedef enum AdapterInfoType {
+    LOADER_QUERY_TYPE_REGISTRY = 48,
+} AdapterInfoType;
+
+typedef struct LoaderQueryAdapterInfo {
+    UINT handle;
+    AdapterInfoType type;
+    VOID *private_data;
+    UINT private_data_size;
+} LoaderQueryAdapterInfo;
+
+typedef _Check_return_ NTSTATUS(APIENTRY *PFN_LoaderQueryAdapterInfo)(const LoaderQueryAdapterInfo *);
+
+typedef enum LoaderQueryRegistryType {
+    LOADER_QUERY_REGISTRY_ADAPTER_KEY = 1,
+} LoaderQueryRegistryType;
+
+typedef enum LoaderQueryRegistryStatus {
+    LOADER_QUERY_REGISTRY_STATUS_SUCCESS = 0,
+    LOADER_QUERY_REGISTRY_STATUS_BUFFER_OVERFLOW = 1,
+} LoaderQueryRegistryStatus;
+
+typedef struct LoaderQueryRegistryFlags {
+    union {
+        struct {
+            UINT translate_path : 1;
+            UINT mutable_value : 1;
+            UINT reserved : 30;
+        };
+        UINT value;
+    };
+} LoaderQueryRegistryFlags;
+
+typedef struct LoaderQueryRegistryInfo {
+    LoaderQueryRegistryType query_type;
+    LoaderQueryRegistryFlags query_flags;
+    WCHAR value_name[MAX_PATH];
+    ULONG value_type;
+    ULONG physical_adapter_index;
+    ULONG output_value_size;
+    LoaderQueryRegistryStatus status;
+    union {
+        DWORD output_dword;
+        UINT64 output_qword;
+        WCHAR output_string[1];
+        BYTE output_binary[1];
+    };
+} LoaderQueryRegistryInfo;
diff --git a/src/third_party/vulkan-loader/src/loader/asm_offset.c b/src/third_party/vulkan-loader/src/loader/asm_offset.c
new file mode 100644
index 0000000..97832af
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/loader/asm_offset.c
@@ -0,0 +1,94 @@
+/*
+ * Copyright (c) 2017-2018 The Khronos Group Inc.
+ * Copyright (c) 2017-2018 Valve Corporation
+ * Copyright (c) 2017-2018 LunarG, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Lenny Komow <lenny@lunarg.com>
+ */
+
+// This code generates an assembly file which provides offsets to get struct members from assembly code.
+
+#include <stdio.h>
+#include "loader.h"
+
+#if !defined(_MSC_VER) || (_MSC_VER >= 1900)
+#define SIZE_T_FMT "%-8zu"
+#else
+#define SIZE_T_FMT "%-8lu"
+#endif
+
+struct ValueInfo
+{
+    const char *name;
+    size_t value;
+    const char *comment;
+};
+
+int main(int argc, char **argv) {
+    const char *assembler = NULL;
+    for (int i = 0; i < argc; ++i) {
+        if (!strcmp(argv[i], "MASM")) {
+            assembler = "MASM";
+        } else if (!strcmp(argv[i], "GAS")) {
+            assembler = "GAS";
+        }
+    }
+    if (assembler == NULL) {
+        return 1;
+    }
+
+    struct ValueInfo values[] = {
+        { .name = "VK_DEBUG_REPORT_ERROR_BIT_EXT", .value = (size_t) VK_DEBUG_REPORT_ERROR_BIT_EXT,
+            .comment = "The numerical value of the enum value 'VK_DEBUG_REPORT_ERROR_BIT_EXT'" },
+        { .name = "PTR_SIZE", .value = sizeof(void*),
+            .comment = "The size of a pointer" },
+        { .name = "HASH_SIZE", .value = sizeof(struct loader_dispatch_hash_entry),
+            .comment = "The size of a 'loader_dispatch_hash_entry' struct" },
+        { .name = "HASH_OFFSET_INSTANCE", .value = offsetof(struct loader_instance, phys_dev_ext_disp_hash),
+            .comment = "The offset of 'phys_dev_ext_disp_hash' within a 'loader_instance' struct" },
+        { .name = "PHYS_DEV_OFFSET_INST_DISPATCH", .value = offsetof(struct loader_instance_dispatch_table, phys_dev_ext),
+            .comment = "The offset of 'phys_dev_ext' within in 'loader_instance_dispatch_table' struct" },
+        { .name = "PHYS_DEV_OFFSET_PHYS_DEV_TRAMP", .value = offsetof(struct loader_physical_device_tramp, phys_dev),
+            .comment = "The offset of 'phys_dev' within a 'loader_physical_device_tramp' struct" },
+        { .name = "ICD_TERM_OFFSET_PHYS_DEV_TERM", .value = offsetof(struct loader_physical_device_term, this_icd_term),
+            .comment = "The offset of 'this_icd_term' within a 'loader_physical_device_term' struct" },
+        { .name = "PHYS_DEV_OFFSET_PHYS_DEV_TERM", .value = offsetof(struct loader_physical_device_term, phys_dev),
+            .comment = "The offset of 'phys_dev' within a 'loader_physical_device_term' struct" },
+        { .name = "INSTANCE_OFFSET_ICD_TERM", .value = offsetof(struct loader_icd_term, this_instance),
+            .comment = "The offset of 'this_instance' within a 'loader_icd_term' struct" },
+        { .name = "DISPATCH_OFFSET_ICD_TERM", .value = offsetof(struct loader_icd_term, phys_dev_ext),
+            .comment = "The offset of 'phys_dev_ext' within a 'loader_icd_term' struct" },
+        { .name = "FUNC_NAME_OFFSET_HASH", .value = offsetof(struct loader_dispatch_hash_entry, func_name),
+            .comment = "The offset of 'func_name' within a 'loader_dispatch_hash_entry' struct" },
+        { .name = "EXT_OFFSET_DEVICE_DISPATCH", .value = offsetof(struct loader_dev_dispatch_table, ext_dispatch),
+            .comment = "The offset of 'ext_dispatch' within a 'loader_dev_dispatch_table' struct" },
+    };
+
+    FILE *file = fopen("gen_defines.asm", "w");
+    fprintf(file, "\n");
+    if (!strcmp(assembler, "MASM")) {
+        for (size_t i = 0; i < sizeof(values)/sizeof(values[0]); ++i) {
+            fprintf(file, "%-32s equ " SIZE_T_FMT "; %s\n", values[i].name, values[i].value, values[i].comment);
+        }
+    } else if (!strcmp(assembler, "GAS")) {
+#ifdef __x86_64__
+        fprintf(file, ".set X86_64, 1\n");
+#endif // __x86_64__
+        for (size_t i = 0; i < sizeof(values)/sizeof(values[0]); ++i) {
+            fprintf(file, ".set %-32s, " SIZE_T_FMT "# %s\n", values[i].name, values[i].value, values[i].comment);
+        }
+    }
+    return fclose(file);
+}
diff --git a/src/third_party/vulkan-loader/src/loader/asm_test.S b/src/third_party/vulkan-loader/src/loader/asm_test.S
new file mode 100644
index 0000000..5d974df
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/loader/asm_test.S
@@ -0,0 +1,24 @@
+#
+# Copyright (c) 2019 The Khronos Group Inc.
+# Copyright (c) 2019 Valve Corporation
+# Copyright (c) 2019 LunarG, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+.intel_syntax noprefix
+.text
+.global sample
+.set PHYS_DEV_OFFSET_INST_DISPATCH, 10
+.set PTR_SIZE, 4
+sample:
+  mov ecx, [eax + (PHYS_DEV_OFFSET_INST_DISPATCH + (PTR_SIZE * 4))]
diff --git a/src/third_party/vulkan-loader/src/loader/cJSON.c b/src/third_party/vulkan-loader/src/loader/cJSON.c
new file mode 100644
index 0000000..8da6d83
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/loader/cJSON.c
@@ -0,0 +1,1215 @@
+/*
+  Copyright (c) 2009 Dave Gamble
+  Copyright (c) 2015-2016 The Khronos Group Inc.
+  Copyright (c) 2015-2016 Valve Corporation
+  Copyright (c) 2015-2016 LunarG, Inc.
+
+  Permission is hereby granted, free of charge, to any person obtaining a copy
+  of this software and associated documentation files (the "Software"), to deal
+  in the Software without restriction, including without limitation the rights
+  to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+  copies of the Software, and to permit persons to whom the Software is
+  furnished to do so, subject to the following conditions:
+
+  The above copyright notice and this permission notice shall be included in
+  all copies or substantial portions of the Software.
+
+  THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+  IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+  FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+  AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+  LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+  OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+  THE SOFTWARE.
+*/
+
+/* cJSON */
+/* JSON parser in C. */
+
+#include <string.h>
+#include <stdio.h>
+#include <math.h>
+#include <stdlib.h>
+#include <float.h>
+#include <limits.h>
+#include <ctype.h>
+#include "cJSON.h"
+
+static const char *ep;
+
+const char *cJSON_GetErrorPtr(void) { return ep; }
+
+static void *(*cJSON_malloc)(size_t sz) = malloc;
+static void (*cJSON_free)(void *ptr) = free;
+
+static char *cJSON_strdup(const char *str) {
+    size_t len;
+    char *copy;
+
+    len = strlen(str) + 1;
+    if (!(copy = (char *)cJSON_malloc(len))) return 0;
+    memcpy(copy, str, len);
+    return copy;
+}
+
+void cJSON_InitHooks(cJSON_Hooks *hooks) {
+    if (!hooks) { /* Reset hooks */
+        cJSON_malloc = malloc;
+        cJSON_free = free;
+        return;
+    }
+
+    cJSON_malloc = (hooks->malloc_fn) ? hooks->malloc_fn : malloc;
+    cJSON_free = (hooks->free_fn) ? hooks->free_fn : free;
+}
+
+/* Internal constructor. */
+static cJSON *cJSON_New_Item(void) {
+    cJSON *node = (cJSON *)cJSON_malloc(sizeof(cJSON));
+    if (node) memset(node, 0, sizeof(cJSON));
+    return node;
+}
+
+/* Delete a cJSON structure. */
+void cJSON_Delete(cJSON *c) {
+    cJSON *next;
+    while (c) {
+        next = c->next;
+        if (!(c->type & cJSON_IsReference) && c->child) cJSON_Delete(c->child);
+        if (!(c->type & cJSON_IsReference) && c->valuestring) cJSON_free(c->valuestring);
+        if (!(c->type & cJSON_StringIsConst) && c->string) cJSON_free(c->string);
+        cJSON_free(c);
+        c = next;
+    }
+}
+
+void cJSON_Free(void *p) { cJSON_free(p); }
+
+/* Parse the input text to generate a number, and populate the result into item.
+ */
+static const char *parse_number(cJSON *item, const char *num) {
+    double n = 0, sign = 1, scale = 0;
+    int subscale = 0, signsubscale = 1;
+
+    if (*num == '-') sign = -1, num++; /* Has sign? */
+    if (*num == '0') num++;            /* is zero */
+    if (*num >= '1' && *num <= '9') do
+            n = (n * 10.0) + (*num++ - '0');
+        while (*num >= '0' && *num <= '9'); /* Number? */
+    if (*num == '.' && num[1] >= '0' && num[1] <= '9') {
+        num++;
+        do
+            n = (n * 10.0) + (*num++ - '0'), scale--;
+        while (*num >= '0' && *num <= '9');
+    }                               /* Fractional part? */
+    if (*num == 'e' || *num == 'E') /* Exponent? */
+    {
+        num++;
+        if (*num == '+')
+            num++;
+        else if (*num == '-')
+            signsubscale = -1, num++;                                                   /* With sign? */
+        while (*num >= '0' && *num <= '9') subscale = (subscale * 10) + (*num++ - '0'); /* Number? */
+    }
+
+    n = sign * n * pow(10.0, (scale + subscale * signsubscale)); /* number = +/-
+                                                                    number.fraction *
+                                                                    10^+/- exponent */
+
+    item->valuedouble = n;
+    item->valueint = (int)n;
+    item->type = cJSON_Number;
+    return num;
+}
+
+static size_t pow2gt(size_t x) {
+    --x;
+    x |= x >> 1;
+    x |= x >> 2;
+    x |= x >> 4;
+    x |= x >> 8;
+    x |= x >> 16;
+    return x + 1;
+}
+
+typedef struct {
+    char *buffer;
+    size_t length;
+    size_t offset;
+} printbuffer;
+
+static char *ensure(printbuffer *p, size_t needed) {
+    char *newbuffer;
+    size_t newsize;
+    if (!p || !p->buffer) return 0;
+    needed += p->offset;
+    if (needed <= p->length) return p->buffer + p->offset;
+
+    newsize = pow2gt(needed);
+    newbuffer = (char *)cJSON_malloc(newsize);
+    if (!newbuffer) {
+        cJSON_free(p->buffer);
+        p->length = 0, p->buffer = 0;
+        return 0;
+    }
+    if (newbuffer) memcpy(newbuffer, p->buffer, p->length);
+    cJSON_free(p->buffer);
+    p->length = newsize;
+    p->buffer = newbuffer;
+    return newbuffer + p->offset;
+}
+
+static size_t update(printbuffer *p) {
+    char *str;
+    if (!p || !p->buffer) return 0;
+    str = p->buffer + p->offset;
+    return p->offset + strlen(str);
+}
+
+/* Render the number nicely from the given item into a string. */
+static char *print_number(cJSON *item, printbuffer *p) {
+    char *str = 0;
+    double d = item->valuedouble;
+    if (d == 0) {
+        if (p)
+            str = ensure(p, 2);
+        else
+            str = (char *)cJSON_malloc(2); /* special case for 0. */
+        if (str) strcpy(str, "0");
+    } else if (fabs(((double)item->valueint) - d) <= DBL_EPSILON && d <= INT_MAX && d >= INT_MIN) {
+        if (p)
+            str = ensure(p, 21);
+        else
+            str = (char *)cJSON_malloc(21); /* 2^64+1 can be represented in 21 chars. */
+        if (str) sprintf(str, "%d", item->valueint);
+    } else {
+        if (p)
+            str = ensure(p, 64);
+        else
+            str = (char *)cJSON_malloc(64); /* This is a nice tradeoff. */
+        if (str) {
+            if (fabs(floor(d) - d) <= DBL_EPSILON && fabs(d) < 1.0e60)
+                sprintf(str, "%.0f", d);
+            else if (fabs(d) < 1.0e-6 || fabs(d) > 1.0e9)
+                sprintf(str, "%e", d);
+            else
+                sprintf(str, "%f", d);
+        }
+    }
+    return str;
+}
+
+static unsigned parse_hex4(const char *str) {
+    unsigned h = 0;
+    if (*str >= '0' && *str <= '9')
+        h += (*str) - '0';
+    else if (*str >= 'A' && *str <= 'F')
+        h += 10 + (*str) - 'A';
+    else if (*str >= 'a' && *str <= 'f')
+        h += 10 + (*str) - 'a';
+    else
+        return 0;
+    h = h << 4;
+    str++;
+    if (*str >= '0' && *str <= '9')
+        h += (*str) - '0';
+    else if (*str >= 'A' && *str <= 'F')
+        h += 10 + (*str) - 'A';
+    else if (*str >= 'a' && *str <= 'f')
+        h += 10 + (*str) - 'a';
+    else
+        return 0;
+    h = h << 4;
+    str++;
+    if (*str >= '0' && *str <= '9')
+        h += (*str) - '0';
+    else if (*str >= 'A' && *str <= 'F')
+        h += 10 + (*str) - 'A';
+    else if (*str >= 'a' && *str <= 'f')
+        h += 10 + (*str) - 'a';
+    else
+        return 0;
+    h = h << 4;
+    str++;
+    if (*str >= '0' && *str <= '9')
+        h += (*str) - '0';
+    else if (*str >= 'A' && *str <= 'F')
+        h += 10 + (*str) - 'A';
+    else if (*str >= 'a' && *str <= 'f')
+        h += 10 + (*str) - 'a';
+    else
+        return 0;
+    return h;
+}
+
+/* Parse the input text into an unescaped cstring, and populate item. */
+static const unsigned char firstByteMark[7] = {0x00, 0x00, 0xC0, 0xE0, 0xF0, 0xF8, 0xFC};
+static const char *parse_string(cJSON *item, const char *str) {
+    const char *ptr = str + 1;
+    char *ptr2;
+    char *out;
+    int len = 0;
+    unsigned uc, uc2;
+    if (*str != '\"') {
+        ep = str;
+        return 0;
+    } /* not a string! */
+
+    while (*ptr != '\"' && *ptr && ++len)
+        if (*ptr++ == '\\') ptr++; /* Skip escaped quotes. */
+
+    out = (char *)cJSON_malloc(len + 1); /* This is how long we need for the string, roughly. */
+    if (!out) return 0;
+
+    ptr = str + 1;
+    ptr2 = out;
+    while (*ptr != '\"' && *ptr) {
+        if (*ptr != '\\')
+            *ptr2++ = *ptr++;
+        else {
+            ptr++;
+            switch (*ptr) {
+                case 'b':
+                    *ptr2++ = '\b';
+                    break;
+                case 'f':
+                    *ptr2++ = '\f';
+                    break;
+                case 'n':
+                    *ptr2++ = '\n';
+                    break;
+                case 'r':
+                    *ptr2++ = '\r';
+                    break;
+                case 't':
+                    *ptr2++ = '\t';
+                    break;
+                case 'u': /* transcode utf16 to utf8. */
+                    uc = parse_hex4(ptr + 1);
+                    ptr += 4; /* get the unicode char. */
+
+                    if ((uc >= 0xDC00 && uc <= 0xDFFF) || uc == 0) break; /* check for invalid.	*/
+
+                    if (uc >= 0xD800 && uc <= 0xDBFF) /* UTF16 surrogate pairs.	*/
+                    {
+                        if (ptr[1] != '\\' || ptr[2] != 'u') break; /* missing second-half of surrogate.	*/
+                        uc2 = parse_hex4(ptr + 3);
+                        ptr += 6;
+                        if (uc2 < 0xDC00 || uc2 > 0xDFFF) break; /* invalid second-half of surrogate.	*/
+                        uc = 0x10000 + (((uc & 0x3FF) << 10) | (uc2 & 0x3FF));
+                    }
+
+                    len = 4;
+                    if (uc < 0x80)
+                        len = 1;
+                    else if (uc < 0x800)
+                        len = 2;
+                    else if (uc < 0x10000)
+                        len = 3;
+                    ptr2 += len;
+
+                    switch (len) {
+                        case 4:
+                            *--ptr2 = ((uc | 0x80) & 0xBF);
+                            uc >>= 6;
+                            // fall through
+                        case 3:
+                            *--ptr2 = ((uc | 0x80) & 0xBF);
+                            uc >>= 6;
+                            // fall through
+                        case 2:
+                            *--ptr2 = ((uc | 0x80) & 0xBF);
+                            uc >>= 6;
+                            // fall through
+                        case 1:
+                            *--ptr2 = ((unsigned char)uc | firstByteMark[len]);
+                    }
+                    ptr2 += len;
+                    break;
+                default:
+                    *ptr2++ = *ptr;
+                    break;
+            }
+            ptr++;
+        }
+    }
+    *ptr2 = 0;
+    if (*ptr == '\"') ptr++;
+    item->valuestring = out;
+    item->type = cJSON_String;
+    return ptr;
+}
+
+/* Render the cstring provided to an escaped version that can be printed. */
+static char *print_string_ptr(const char *str, printbuffer *p) {
+    const char *ptr;
+    char *ptr2;
+    char *out;
+    size_t len = 0, flag = 0;
+    unsigned char token;
+
+    for (ptr = str; *ptr; ptr++) flag |= ((*ptr > 0 && *ptr < 32) || (*ptr == '\"') || (*ptr == '\\')) ? 1 : 0;
+    if (!flag) {
+        len = ptr - str;
+        if (p)
+            out = ensure(p, len + 3);
+        else
+            out = (char *)cJSON_malloc(len + 3);
+        if (!out) return 0;
+        ptr2 = out;
+        *ptr2++ = '\"';
+        strcpy(ptr2, str);
+        ptr2[len] = '\"';
+        ptr2[len + 1] = 0;
+        return out;
+    }
+
+    if (!str) {
+        if (p)
+            out = ensure(p, 3);
+        else
+            out = (char *)cJSON_malloc(3);
+        if (!out) return 0;
+        strcpy(out, "\"\"");
+        return out;
+    }
+    ptr = str;
+    while ((token = *ptr) && ++len) {
+        if (strchr("\"\\\b\f\n\r\t", token))
+            len++;
+        else if (token < 32)
+            len += 5;
+        ptr++;
+    }
+
+    if (p)
+        out = ensure(p, len + 3);
+    else
+        out = (char *)cJSON_malloc(len + 3);
+    if (!out) return 0;
+
+    ptr2 = out;
+    ptr = str;
+    *ptr2++ = '\"';
+    while (*ptr) {
+        if ((unsigned char)*ptr > 31 && *ptr != '\"' && *ptr != '\\')
+            *ptr2++ = *ptr++;
+        else {
+            switch (token = *ptr++) {
+                case '\\':
+                    *ptr2++ = '\\';
+                    break;
+                case '\"':
+                    *ptr2++ = '\"';
+                    break;
+                case '\b':
+                    *ptr2++ = '\b';
+                    break;
+                case '\f':
+                    *ptr2++ = '\f';
+                    break;
+                case '\n':
+                    *ptr2++ = '\n';
+                    break;
+                case '\r':
+                    *ptr2++ = '\r';
+                    break;
+                case '\t':
+                    *ptr2++ = '\t';
+                    break;
+                default:
+                    sprintf(ptr2, "u%04x", token);
+                    ptr2 += 5;
+                    break; /* escape and print */
+            }
+        }
+    }
+    *ptr2++ = '\"';
+    *ptr2++ = 0;
+    return out;
+}
+/* Invote print_string_ptr (which is useful) on an item. */
+static char *print_string(cJSON *item, printbuffer *p) { return print_string_ptr(item->valuestring, p); }
+
+/* Predeclare these prototypes. */
+static const char *parse_value(cJSON *item, const char *value);
+static char *print_value(cJSON *item, int depth, int fmt, printbuffer *p);
+static const char *parse_array(cJSON *item, const char *value);
+static char *print_array(cJSON *item, int depth, int fmt, printbuffer *p);
+static const char *parse_object(cJSON *item, const char *value);
+static char *print_object(cJSON *item, int depth, int fmt, printbuffer *p);
+
+/* Utility to jump whitespace and cr/lf */
+static const char *skip(const char *in) {
+    while (in && *in && (unsigned char)*in <= 32) in++;
+    return in;
+}
+
+/* Parse an object - create a new root, and populate. */
+cJSON *cJSON_ParseWithOpts(const char *value, const char **return_parse_end, int require_null_terminated) {
+    const char *end = 0;
+    cJSON *c = cJSON_New_Item();
+    ep = 0;
+    if (!c) return 0; /* memory fail */
+
+    end = parse_value(c, skip(value));
+    if (!end) {
+        cJSON_Delete(c);
+        return 0;
+    } /* parse failure. ep is set. */
+
+    /* if we require null-terminated JSON without appended garbage, skip and
+     * then check for a null terminator */
+    if (require_null_terminated) {
+        end = skip(end);
+        if (*end) {
+            cJSON_Delete(c);
+            ep = end;
+            return 0;
+        }
+    }
+    if (return_parse_end) *return_parse_end = end;
+    return c;
+}
+/* Default options for cJSON_Parse */
+cJSON *cJSON_Parse(const char *value) { return cJSON_ParseWithOpts(value, 0, 0); }
+
+/* Render a cJSON item/entity/structure to text. */
+char *cJSON_Print(cJSON *item) { return print_value(item, 0, 1, 0); }
+char *cJSON_PrintUnformatted(cJSON *item) { return print_value(item, 0, 0, 0); }
+
+char *cJSON_PrintBuffered(cJSON *item, int prebuffer, int fmt) {
+    printbuffer p;
+    p.buffer = (char *)cJSON_malloc(prebuffer);
+    p.length = prebuffer;
+    p.offset = 0;
+    return print_value(item, 0, fmt, &p);
+}
+
+/* Parser core - when encountering text, process appropriately. */
+static const char *parse_value(cJSON *item, const char *value) {
+    if (!value) return 0; /* Fail on null. */
+    if (!strncmp(value, "null", 4)) {
+        item->type = cJSON_NULL;
+        return value + 4;
+    }
+    if (!strncmp(value, "false", 5)) {
+        item->type = cJSON_False;
+        return value + 5;
+    }
+    if (!strncmp(value, "true", 4)) {
+        item->type = cJSON_True;
+        item->valueint = 1;
+        return value + 4;
+    }
+    if (*value == '\"') {
+        return parse_string(item, value);
+    }
+    if (*value == '-' || (*value >= '0' && *value <= '9')) {
+        return parse_number(item, value);
+    }
+    if (*value == '[') {
+        return parse_array(item, value);
+    }
+    if (*value == '{') {
+        return parse_object(item, value);
+    }
+
+    ep = value;
+    return 0; /* failure. */
+}
+
+/* Render a value to text. */
+static char *print_value(cJSON *item, int depth, int fmt, printbuffer *p) {
+    char *out = 0;
+    if (!item) return 0;
+    if (p) {
+        switch ((item->type) & 255) {
+            case cJSON_NULL: {
+                out = ensure(p, 5);
+                if (out) strcpy(out, "null");
+                break;
+            }
+            case cJSON_False: {
+                out = ensure(p, 6);
+                if (out) strcpy(out, "false");
+                break;
+            }
+            case cJSON_True: {
+                out = ensure(p, 5);
+                if (out) strcpy(out, "true");
+                break;
+            }
+            case cJSON_Number:
+                out = print_number(item, p);
+                break;
+            case cJSON_String:
+                out = print_string(item, p);
+                break;
+            case cJSON_Array:
+                out = print_array(item, depth, fmt, p);
+                break;
+            case cJSON_Object:
+                out = print_object(item, depth, fmt, p);
+                break;
+        }
+    } else {
+        switch ((item->type) & 255) {
+            case cJSON_NULL:
+                out = cJSON_strdup("null");
+                break;
+            case cJSON_False:
+                out = cJSON_strdup("false");
+                break;
+            case cJSON_True:
+                out = cJSON_strdup("true");
+                break;
+            case cJSON_Number:
+                out = print_number(item, 0);
+                break;
+            case cJSON_String:
+                out = print_string(item, 0);
+                break;
+            case cJSON_Array:
+                out = print_array(item, depth, fmt, 0);
+                break;
+            case cJSON_Object:
+                out = print_object(item, depth, fmt, 0);
+                break;
+        }
+    }
+    return out;
+}
+
+/* Build an array from input text. */
+static const char *parse_array(cJSON *item, const char *value) {
+    cJSON *child;
+    if (*value != '[') {
+        ep = value;
+        return 0;
+    } /* not an array! */
+
+    item->type = cJSON_Array;
+    value = skip(value + 1);
+    if (*value == ']') return value + 1; /* empty array. */
+
+    item->child = child = cJSON_New_Item();
+    if (!item->child) return 0;                    /* memory fail */
+    value = skip(parse_value(child, skip(value))); /* skip any spacing, get the value. */
+    if (!value) return 0;
+
+    while (*value == ',') {
+        cJSON *new_item;
+        if (!(new_item = cJSON_New_Item())) return 0; /* memory fail */
+        child->next = new_item;
+        new_item->prev = child;
+        child = new_item;
+        value = skip(parse_value(child, skip(value + 1)));
+        if (!value) return 0; /* memory fail */
+    }
+
+    if (*value == ']') return value + 1; /* end of array */
+    ep = value;
+    return 0; /* malformed. */
+}
+
+/* Render an array to text */
+static char *print_array(cJSON *item, int depth, int fmt, printbuffer *p) {
+    char **entries;
+    char *out = 0, *ptr, *ret;
+    size_t len = 5;
+    cJSON *child = item->child;
+    int numentries = 0, fail = 0, j = 0;
+    size_t tmplen = 0, i = 0;
+
+    /* How many entries in the array? */
+    while (child) numentries++, child = child->next;
+    /* Explicitly handle numentries==0 */
+    if (!numentries) {
+        if (p)
+            out = ensure(p, 3);
+        else
+            out = (char *)cJSON_malloc(3);
+        if (out) strcpy(out, "[]");
+        return out;
+    }
+
+    if (p) {
+        /* Compose the output array. */
+        i = p->offset;
+        ptr = ensure(p, 1);
+        if (!ptr) return 0;
+        *ptr = '[';
+        p->offset++;
+        child = item->child;
+        while (child && !fail) {
+            print_value(child, depth + 1, fmt, p);
+            p->offset = update(p);
+            if (child->next) {
+                len = fmt ? 2 : 1;
+                ptr = ensure(p, len + 1);
+                if (!ptr) return 0;
+                *ptr++ = ',';
+                if (fmt) *ptr++ = ' ';
+                *ptr = 0;
+                p->offset += len;
+            }
+            child = child->next;
+        }
+        ptr = ensure(p, 2);
+        if (!ptr) return 0;
+        *ptr++ = ']';
+        *ptr = 0;
+        out = (p->buffer) + i;
+    } else {
+        /* Allocate an array to hold the values for each */
+        entries = (char **)cJSON_malloc(numentries * sizeof(char *));
+        if (!entries) return 0;
+        memset(entries, 0, numentries * sizeof(char *));
+        /* Retrieve all the results: */
+        child = item->child;
+        while (child && !fail) {
+            ret = print_value(child, depth + 1, fmt, 0);
+            entries[i++] = ret;
+            if (ret)
+                len += strlen(ret) + 2 + (fmt ? 1 : 0);
+            else
+                fail = 1;
+            child = child->next;
+        }
+
+        /* If we didn't fail, try to malloc the output string */
+        if (!fail) out = (char *)cJSON_malloc(len);
+        /* If that fails, we fail. */
+        if (!out) fail = 1;
+
+        /* Handle failure. */
+        if (fail) {
+            for (j = 0; j < numentries; j++)
+                if (entries[j]) cJSON_free(entries[j]);
+            cJSON_free(entries);
+            return 0;
+        }
+
+        /* Compose the output array. */
+        *out = '[';
+        ptr = out + 1;
+        *ptr = 0;
+        for (j = 0; j < numentries; j++) {
+            tmplen = strlen(entries[j]);
+            memcpy(ptr, entries[j], tmplen);
+            ptr += tmplen;
+            if (j != numentries - 1) {
+                *ptr++ = ',';
+                if (fmt) *ptr++ = ' ';
+                *ptr = 0;
+            }
+            cJSON_free(entries[j]);
+        }
+        cJSON_free(entries);
+        *ptr++ = ']';
+        *ptr++ = 0;
+    }
+    return out;
+}
+
+/* Build an object from the text. */
+static const char *parse_object(cJSON *item, const char *value) {
+    cJSON *child;
+    if (*value != '{') {
+        ep = value;
+        return 0;
+    } /* not an object! */
+
+    item->type = cJSON_Object;
+    value = skip(value + 1);
+    if (*value == '}') return value + 1; /* empty array. */
+
+    item->child = child = cJSON_New_Item();
+    if (!item->child) return 0;
+    value = skip(parse_string(child, skip(value)));
+    if (!value) return 0;
+    child->string = child->valuestring;
+    child->valuestring = 0;
+    if (*value != ':') {
+        ep = value;
+        return 0;
+    }                                                  /* fail! */
+    value = skip(parse_value(child, skip(value + 1))); /* skip any spacing, get the value. */
+    if (!value) return 0;
+
+    while (*value == ',') {
+        cJSON *new_item;
+        if (!(new_item = cJSON_New_Item())) return 0; /* memory fail */
+        child->next = new_item;
+        new_item->prev = child;
+        child = new_item;
+        value = skip(parse_string(child, skip(value + 1)));
+        if (!value) return 0;
+        child->string = child->valuestring;
+        child->valuestring = 0;
+        if (*value != ':') {
+            ep = value;
+            return 0;
+        }                                                  /* fail! */
+        value = skip(parse_value(child, skip(value + 1))); /* skip any spacing, get the value. */
+        if (!value) return 0;
+    }
+
+    if (*value == '}') return value + 1; /* end of array */
+    ep = value;
+    return 0; /* malformed. */
+}
+
+/* Render an object to text. */
+static char *print_object(cJSON *item, int depth, int fmt, printbuffer *p) {
+    char **entries = 0, **names = 0;
+    char *out = 0, *ptr, *ret, *str;
+    int j;
+    cJSON *child = item->child;
+    int numentries = 0, fail = 0, k;
+    size_t tmplen = 0, i = 0, len = 7;
+    /* Count the number of entries. */
+    while (child) numentries++, child = child->next;
+    /* Explicitly handle empty object case */
+    if (!numentries) {
+        if (p)
+            out = ensure(p, fmt ? depth + 4 : 3);
+        else
+            out = (char *)cJSON_malloc(fmt ? depth + 4 : 3);
+        if (!out) return 0;
+        ptr = out;
+        *ptr++ = '{';
+        if (fmt) {
+            *ptr++ = '\n';
+            for (j = 0; j < depth - 1; j++) *ptr++ = '\t';
+        }
+        *ptr++ = '}';
+        *ptr++ = 0;
+        return out;
+    }
+    if (p) {
+        /* Compose the output: */
+        i = p->offset;
+        len = fmt ? 2 : 1;
+        ptr = ensure(p, len + 1);
+        if (!ptr) return 0;
+        *ptr++ = '{';
+        if (fmt) *ptr++ = '\n';
+        *ptr = 0;
+        p->offset += len;
+        child = item->child;
+        depth++;
+        while (child) {
+            if (fmt) {
+                ptr = ensure(p, depth);
+                if (!ptr) return 0;
+                for (j = 0; j < depth; j++) *ptr++ = '\t';
+                p->offset += depth;
+            }
+            print_string_ptr(child->string, p);
+            p->offset = update(p);
+
+            len = fmt ? 2 : 1;
+            ptr = ensure(p, len);
+            if (!ptr) return 0;
+            *ptr++ = ':';
+            if (fmt) *ptr++ = '\t';
+            p->offset += len;
+
+            print_value(child, depth, fmt, p);
+            p->offset = update(p);
+
+            len = (fmt ? 1 : 0) + (child->next ? 1 : 0);
+            ptr = ensure(p, len + 1);
+            if (!ptr) return 0;
+            if (child->next) *ptr++ = ',';
+            if (fmt) *ptr++ = '\n';
+            *ptr = 0;
+            p->offset += len;
+            child = child->next;
+        }
+        ptr = ensure(p, fmt ? (depth + 1) : 2);
+        if (!ptr) return 0;
+        if (fmt)
+            for (j = 0; j < depth - 1; j++) *ptr++ = '\t';
+        *ptr++ = '}';
+        *ptr = 0;
+        out = (p->buffer) + i;
+    } else {
+        /* Allocate space for the names and the objects */
+        entries = (char **)cJSON_malloc(numentries * sizeof(char *));
+        if (!entries) return 0;
+        names = (char **)cJSON_malloc(numentries * sizeof(char *));
+        if (!names) {
+            cJSON_free(entries);
+            return 0;
+        }
+        memset(entries, 0, sizeof(char *) * numentries);
+        memset(names, 0, sizeof(char *) * numentries);
+
+        /* Collect all the results into our arrays: */
+        child = item->child;
+        depth++;
+        if (fmt) len += depth;
+        while (child) {
+            names[i] = str = print_string_ptr(child->string, 0);
+            entries[i++] = ret = print_value(child, depth, fmt, 0);
+            if (str && ret)
+                len += strlen(ret) + strlen(str) + 2 + (fmt ? 2 + depth : 0);
+            else
+                fail = 1;
+            child = child->next;
+        }
+
+        /* Try to allocate the output string */
+        if (!fail) out = (char *)cJSON_malloc(len);
+        if (!out) fail = 1;
+
+        /* Handle failure */
+        if (fail) {
+            for (j = 0; j < numentries; j++) {
+                if (names[i]) cJSON_free(names[j]);
+                if (entries[j]) cJSON_free(entries[j]);
+            }
+            cJSON_free(names);
+            cJSON_free(entries);
+            return 0;
+        }
+
+        /* Compose the output: */
+        *out = '{';
+        ptr = out + 1;
+        if (fmt) *ptr++ = '\n';
+        *ptr = 0;
+        for (j = 0; j < numentries; j++) {
+            if (fmt)
+                for (k = 0; k < depth; k++) *ptr++ = '\t';
+            tmplen = strlen(names[j]);
+            memcpy(ptr, names[j], tmplen);
+            ptr += tmplen;
+            *ptr++ = ':';
+            if (fmt) *ptr++ = '\t';
+            strcpy(ptr, entries[j]);
+            ptr += strlen(entries[j]);
+            if (j != numentries - 1) *ptr++ = ',';
+            if (fmt) *ptr++ = '\n';
+            *ptr = 0;
+            cJSON_free(names[j]);
+            cJSON_free(entries[j]);
+        }
+
+        cJSON_free(names);
+        cJSON_free(entries);
+        if (fmt)
+            for (j = 0; j < depth - 1; j++) *ptr++ = '\t';
+        *ptr++ = '}';
+        *ptr++ = 0;
+    }
+    return out;
+}
+
+/* Get Array size/item / object item. */
+int cJSON_GetArraySize(cJSON *array) {
+    cJSON *c = array->child;
+    int i = 0;
+    while (c) i++, c = c->next;
+    return i;
+}
+cJSON *cJSON_GetArrayItem(cJSON *array, int item) {
+    cJSON *c = array->child;
+    while (c && item > 0) item--, c = c->next;
+    return c;
+}
+cJSON *cJSON_GetObjectItem(cJSON *object, const char *string) {
+    cJSON *c = object->child;
+    while (c && strcmp(c->string, string)) c = c->next;
+    return c;
+}
+
+/* Utility for array list handling. */
+static void suffix_object(cJSON *prev, cJSON *item) {
+    prev->next = item;
+    item->prev = prev;
+}
+/* Utility for handling references. */
+static cJSON *create_reference(cJSON *item) {
+    cJSON *ref = cJSON_New_Item();
+    if (!ref) return 0;
+    memcpy(ref, item, sizeof(cJSON));
+    ref->string = 0;
+    ref->type |= cJSON_IsReference;
+    ref->next = ref->prev = 0;
+    return ref;
+}
+
+/* Add item to array/object. */
+void cJSON_AddItemToArray(cJSON *array, cJSON *item) {
+    cJSON *c = array->child;
+    if (!item) return;
+    if (!c) {
+        array->child = item;
+    } else {
+        while (c && c->next) c = c->next;
+        suffix_object(c, item);
+    }
+}
+void cJSON_AddItemToObject(cJSON *object, const char *string, cJSON *item) {
+    if (!item) return;
+    if (item->string) cJSON_free(item->string);
+    item->string = cJSON_strdup(string);
+    cJSON_AddItemToArray(object, item);
+}
+void cJSON_AddItemToObjectCS(cJSON *object, const char *string, cJSON *item) {
+    if (!item) return;
+    if (!(item->type & cJSON_StringIsConst) && item->string) cJSON_free(item->string);
+    item->string = (char *)string;
+    item->type |= cJSON_StringIsConst;
+    cJSON_AddItemToArray(object, item);
+}
+void cJSON_AddItemReferenceToArray(cJSON *array, cJSON *item) { cJSON_AddItemToArray(array, create_reference(item)); }
+void cJSON_AddItemReferenceToObject(cJSON *object, const char *string, cJSON *item) {
+    cJSON_AddItemToObject(object, string, create_reference(item));
+}
+
+cJSON *cJSON_DetachItemFromArray(cJSON *array, int which) {
+    cJSON *c = array->child;
+    while (c && which > 0) c = c->next, which--;
+    if (!c) return 0;
+    if (c->prev) c->prev->next = c->next;
+    if (c->next) c->next->prev = c->prev;
+    if (c == array->child) array->child = c->next;
+    c->prev = c->next = 0;
+    return c;
+}
+void cJSON_DeleteItemFromArray(cJSON *array, int which) { cJSON_Delete(cJSON_DetachItemFromArray(array, which)); }
+cJSON *cJSON_DetachItemFromObject(cJSON *object, const char *string) {
+    int i = 0;
+    cJSON *c = object->child;
+    while (c && strcmp(c->string, string)) i++, c = c->next;
+    if (c) return cJSON_DetachItemFromArray(object, i);
+    return 0;
+}
+void cJSON_DeleteItemFromObject(cJSON *object, const char *string) { cJSON_Delete(cJSON_DetachItemFromObject(object, string)); }
+
+/* Replace array/object items with new ones. */
+void cJSON_InsertItemInArray(cJSON *array, int which, cJSON *newitem) {
+    cJSON *c = array->child;
+    while (c && which > 0) c = c->next, which--;
+    if (!c) {
+        cJSON_AddItemToArray(array, newitem);
+        return;
+    }
+    newitem->next = c;
+    newitem->prev = c->prev;
+    c->prev = newitem;
+    if (c == array->child)
+        array->child = newitem;
+    else
+        newitem->prev->next = newitem;
+}
+void cJSON_ReplaceItemInArray(cJSON *array, int which, cJSON *newitem) {
+    cJSON *c = array->child;
+    while (c && which > 0) c = c->next, which--;
+    if (!c) return;
+    newitem->next = c->next;
+    newitem->prev = c->prev;
+    if (newitem->next) newitem->next->prev = newitem;
+    if (c == array->child)
+        array->child = newitem;
+    else
+        newitem->prev->next = newitem;
+    c->next = c->prev = 0;
+    cJSON_Delete(c);
+}
+void cJSON_ReplaceItemInObject(cJSON *object, const char *string, cJSON *newitem) {
+    int i = 0;
+    cJSON *c = object->child;
+    while (c && strcmp(c->string, string)) i++, c = c->next;
+    if (c) {
+        newitem->string = cJSON_strdup(string);
+        cJSON_ReplaceItemInArray(object, i, newitem);
+    }
+}
+
+/* Create basic types: */
+cJSON *cJSON_CreateNull(void) {
+    cJSON *item = cJSON_New_Item();
+    if (item) item->type = cJSON_NULL;
+    return item;
+}
+cJSON *cJSON_CreateTrue(void) {
+    cJSON *item = cJSON_New_Item();
+    if (item) item->type = cJSON_True;
+    return item;
+}
+cJSON *cJSON_CreateFalse(void) {
+    cJSON *item = cJSON_New_Item();
+    if (item) item->type = cJSON_False;
+    return item;
+}
+cJSON *cJSON_CreateBool(int b) {
+    cJSON *item = cJSON_New_Item();
+    if (item) item->type = b ? cJSON_True : cJSON_False;
+    return item;
+}
+cJSON *cJSON_CreateNumber(double num) {
+    cJSON *item = cJSON_New_Item();
+    if (item) {
+        item->type = cJSON_Number;
+        item->valuedouble = num;
+        item->valueint = (int)num;
+    }
+    return item;
+}
+cJSON *cJSON_CreateString(const char *string) {
+    cJSON *item = cJSON_New_Item();
+    if (item) {
+        item->type = cJSON_String;
+        item->valuestring = cJSON_strdup(string);
+    }
+    return item;
+}
+cJSON *cJSON_CreateArray(void) {
+    cJSON *item = cJSON_New_Item();
+    if (item) item->type = cJSON_Array;
+    return item;
+}
+cJSON *cJSON_CreateObject(void) {
+    cJSON *item = cJSON_New_Item();
+    if (item) item->type = cJSON_Object;
+    return item;
+}
+
+/* Create Arrays: */
+cJSON *cJSON_CreateIntArray(const int *numbers, int count) {
+    int i;
+    cJSON *n = 0, *p = 0, *a = cJSON_CreateArray();
+    for (i = 0; a && i < count; i++) {
+        n = cJSON_CreateNumber(numbers[i]);
+        if (!i)
+            a->child = n;
+        else
+            suffix_object(p, n);
+        p = n;
+    }
+    return a;
+}
+cJSON *cJSON_CreateFloatArray(const float *numbers, int count) {
+    int i;
+    cJSON *n = 0, *p = 0, *a = cJSON_CreateArray();
+    for (i = 0; a && i < count; i++) {
+        n = cJSON_CreateNumber(numbers[i]);
+        if (!i)
+            a->child = n;
+        else
+            suffix_object(p, n);
+        p = n;
+    }
+    return a;
+}
+cJSON *cJSON_CreateDoubleArray(const double *numbers, int count) {
+    int i;
+    cJSON *n = 0, *p = 0, *a = cJSON_CreateArray();
+    for (i = 0; a && i < count; i++) {
+        n = cJSON_CreateNumber(numbers[i]);
+        if (!i)
+            a->child = n;
+        else
+            suffix_object(p, n);
+        p = n;
+    }
+    return a;
+}
+cJSON *cJSON_CreateStringArray(const char **strings, int count) {
+    int i;
+    cJSON *n = 0, *p = 0, *a = cJSON_CreateArray();
+    for (i = 0; a && i < count; i++) {
+        n = cJSON_CreateString(strings[i]);
+        if (!i)
+            a->child = n;
+        else
+            suffix_object(p, n);
+        p = n;
+    }
+    return a;
+}
+
+/* Duplication */
+cJSON *cJSON_Duplicate(cJSON *item, int recurse) {
+    cJSON *newitem, *cptr, *nptr = 0, *newchild;
+    /* Bail on bad ptr */
+    if (!item) return 0;
+    /* Create new item */
+    newitem = cJSON_New_Item();
+    if (!newitem) return 0;
+    /* Copy over all vars */
+    newitem->type = item->type & (~cJSON_IsReference), newitem->valueint = item->valueint, newitem->valuedouble = item->valuedouble;
+    if (item->valuestring) {
+        newitem->valuestring = cJSON_strdup(item->valuestring);
+        if (!newitem->valuestring) {
+            cJSON_Delete(newitem);
+            return 0;
+        }
+    }
+    if (item->string) {
+        newitem->string = cJSON_strdup(item->string);
+        if (!newitem->string) {
+            cJSON_Delete(newitem);
+            return 0;
+        }
+    }
+    /* If non-recursive, then we're done! */
+    if (!recurse) return newitem;
+    /* Walk the ->next chain for the child. */
+    cptr = item->child;
+    while (cptr) {
+        newchild = cJSON_Duplicate(cptr, 1); /* Duplicate (with recurse) each item in the ->next chain */
+        if (!newchild) {
+            cJSON_Delete(newitem);
+            return 0;
+        }
+        if (nptr) {
+            nptr->next = newchild, newchild->prev = nptr;
+            nptr = newchild;
+        } /* If newitem->child already set, then crosswire ->prev and ->next and
+             move on */
+        else {
+            newitem->child = newchild;
+            nptr = newchild;
+        } /* Set newitem->child and move to it */
+        cptr = cptr->next;
+    }
+    return newitem;
+}
+
+void cJSON_Minify(char *json) {
+    char *into = json;
+    while (*json) {
+        if (*json == ' ')
+            json++;
+        else if (*json == '\t')
+            json++; /* Whitespace characters. */
+        else if (*json == '\r')
+            json++;
+        else if (*json == '\n')
+            json++;
+        else if (*json == '/' && json[1] == '/')
+            while (*json && *json != '\n') json++; /* double-slash comments, to end of line. */
+        else if (*json == '/' && json[1] == '*') {
+            while (*json && !(*json == '*' && json[1] == '/')) json++;
+            json += 2;
+        } /* multiline comments. */
+        else if (*json == '\"') {
+            *into++ = *json++;
+            while (*json && *json != '\"') {
+                if (*json == '\\') *into++ = *json++;
+                *into++ = *json++;
+            }
+            *into++ = *json++;
+        } /* string literals, which are \" sensitive. */
+        else
+            *into++ = *json++; /* All other characters. */
+    }
+    *into = 0; /* and null-terminate. */
+}
diff --git a/src/third_party/vulkan-loader/src/loader/cJSON.h b/src/third_party/vulkan-loader/src/loader/cJSON.h
new file mode 100644
index 0000000..f0059ab
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/loader/cJSON.h
@@ -0,0 +1,174 @@
+/*
+  Copyright (c) 2009 Dave Gamble
+  Copyright (c) 2015-2016 The Khronos Group Inc.
+  Copyright (c) 2015-2016 Valve Corporation
+  Copyright (c) 2015-2016 LunarG, Inc.
+
+  Permission is hereby granted, free of charge, to any person obtaining a copy
+  of this software and associated documentation files (the "Software"), to deal
+  in the Software without restriction, including without limitation the rights
+  to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+  copies of the Software, and to permit persons to whom the Software is
+  furnished to do so, subject to the following conditions:
+
+  The above copyright notice and this permission notice shall be included in
+  all copies or substantial portions of the Software.
+
+  THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+  IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+  FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+  AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+  LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+  OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+  THE SOFTWARE.
+*/
+
+#ifndef cJSON__h
+#define cJSON__h
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+/* cJSON Types: */
+#define cJSON_False 0
+#define cJSON_True 1
+#define cJSON_NULL 2
+#define cJSON_Number 3
+#define cJSON_String 4
+#define cJSON_Array 5
+#define cJSON_Object 6
+
+#define cJSON_IsReference 256
+#define cJSON_StringIsConst 512
+
+/* The cJSON structure: */
+typedef struct cJSON {
+    struct cJSON *next, *prev; /* next/prev allow you to walk array/object
+                                  chains. Alternatively, use
+                                  GetArraySize/GetArrayItem/GetObjectItem */
+    struct cJSON *child;       /* An array or object item will have a child pointer
+                                  pointing to a chain of the items in the
+                                  array/object. */
+
+    int type; /* The type of the item, as above. */
+
+    char *valuestring;  /* The item's string, if type==cJSON_String */
+    int valueint;       /* The item's number, if type==cJSON_Number */
+    double valuedouble; /* The item's number, if type==cJSON_Number */
+
+    char *string; /* The item's name string, if this item is the child of, or is
+                     in the list of subitems of an object. */
+} cJSON;
+
+typedef struct cJSON_Hooks {
+    void *(*malloc_fn)(size_t sz);
+    void (*free_fn)(void *ptr);
+} cJSON_Hooks;
+
+/* Supply malloc, realloc and free functions to cJSON */
+extern void cJSON_InitHooks(cJSON_Hooks *hooks);
+
+/* Supply a block of JSON, and this returns a cJSON object you can interrogate.
+ * Call cJSON_Delete when finished. */
+extern cJSON *cJSON_Parse(const char *value);
+/* Render a cJSON entity to text for transfer/storage. Free the char* when
+ * finished. */
+extern char *cJSON_Print(cJSON *item);
+/* Render a cJSON entity to text for transfer/storage without any formatting.
+ * Free the char* when finished. */
+extern char *cJSON_PrintUnformatted(cJSON *item);
+/* Render a cJSON entity to text using a buffered strategy. prebuffer is a guess
+ * at the final size. guessing well reduces reallocation. fmt=0 gives
+ * unformatted, =1 gives formatted */
+extern char *cJSON_PrintBuffered(cJSON *item, int prebuffer, int fmt);
+/* Delete a cJSON entity and all subentities. */
+extern void cJSON_Delete(cJSON *c);
+/* Delete an item allocated inside the JSON parser*/
+extern void cJSON_Free(void *p);
+
+/* Returns the number of items in an array (or object). */
+extern int cJSON_GetArraySize(cJSON *array);
+/* Retrieve item number "item" from array "array". Returns NULL if unsuccessful.
+ */
+extern cJSON *cJSON_GetArrayItem(cJSON *array, int item);
+/* Get item "string" from object. Case insensitive. */
+extern cJSON *cJSON_GetObjectItem(cJSON *object, const char *string);
+
+/* For analysing failed parses. This returns a pointer to the parse error.
+ * You'll probably need to look a few chars back to make sense of it. Defined
+ * when cJSON_Parse() returns 0. 0 when cJSON_Parse() succeeds. */
+extern const char *cJSON_GetErrorPtr(void);
+
+/* These calls create a cJSON item of the appropriate type. */
+extern cJSON *cJSON_CreateNull(void);
+extern cJSON *cJSON_CreateTrue(void);
+extern cJSON *cJSON_CreateFalse(void);
+extern cJSON *cJSON_CreateBool(int b);
+extern cJSON *cJSON_CreateNumber(double num);
+extern cJSON *cJSON_CreateString(const char *string);
+extern cJSON *cJSON_CreateArray(void);
+extern cJSON *cJSON_CreateObject(void);
+
+/* These utilities create an Array of count items. */
+extern cJSON *cJSON_CreateIntArray(const int *numbers, int count);
+extern cJSON *cJSON_CreateFloatArray(const float *numbers, int count);
+extern cJSON *cJSON_CreateDoubleArray(const double *numbers, int count);
+extern cJSON *cJSON_CreateStringArray(const char **strings, int count);
+
+/* Append item to the specified array/object. */
+extern void cJSON_AddItemToArray(cJSON *array, cJSON *item);
+extern void cJSON_AddItemToObject(cJSON *object, const char *string, cJSON *item);
+extern void cJSON_AddItemToObjectCS(cJSON *object, const char *string,
+                                    cJSON *item); /* Use this when string is definitely const (i.e. a literal,
+                                                     or as good as), and will definitely survive the cJSON
+                                                     object */
+/* Append reference to item to the specified array/object. Use this when you
+ * want to add an existing cJSON to a new cJSON, but don't want to corrupt your
+ * existing cJSON. */
+extern void cJSON_AddItemReferenceToArray(cJSON *array, cJSON *item);
+extern void cJSON_AddItemReferenceToObject(cJSON *object, const char *string, cJSON *item);
+
+/* Remove/Detatch items from Arrays/Objects. */
+extern cJSON *cJSON_DetachItemFromArray(cJSON *array, int which);
+extern void cJSON_DeleteItemFromArray(cJSON *array, int which);
+extern cJSON *cJSON_DetachItemFromObject(cJSON *object, const char *string);
+extern void cJSON_DeleteItemFromObject(cJSON *object, const char *string);
+
+/* Update array items. */
+extern void cJSON_InsertItemInArray(cJSON *array, int which, cJSON *newitem); /* Shifts pre-existing items to the right. */
+extern void cJSON_ReplaceItemInArray(cJSON *array, int which, cJSON *newitem);
+extern void cJSON_ReplaceItemInObject(cJSON *object, const char *string, cJSON *newitem);
+
+/* Duplicate a cJSON item */
+extern cJSON *cJSON_Duplicate(cJSON *item, int recurse);
+/* Duplicate will create a new, identical cJSON item to the one you pass, in new
+memory that will
+need to be released. With recurse!=0, it will duplicate any children connected
+to the item.
+The item->next and ->prev pointers are always zero on return from Duplicate. */
+
+/* ParseWithOpts allows you to require (and check) that the JSON is null
+ * terminated, and to retrieve the pointer to the final byte parsed. */
+extern cJSON *cJSON_ParseWithOpts(const char *value, const char **return_parse_end, int require_null_terminated);
+
+extern void cJSON_Minify(char *json);
+
+/* Macros for creating things quickly. */
+#define cJSON_AddNullToObject(object, name) cJSON_AddItemToObject(object, name, cJSON_CreateNull())
+#define cJSON_AddTrueToObject(object, name) cJSON_AddItemToObject(object, name, cJSON_CreateTrue())
+#define cJSON_AddFalseToObject(object, name) cJSON_AddItemToObject(object, name, cJSON_CreateFalse())
+#define cJSON_AddBoolToObject(object, name, b) cJSON_AddItemToObject(object, name, cJSON_CreateBool(b))
+#define cJSON_AddNumberToObject(object, name, n) cJSON_AddItemToObject(object, name, cJSON_CreateNumber(n))
+#define cJSON_AddStringToObject(object, name, s) cJSON_AddItemToObject(object, name, cJSON_CreateString(s))
+
+/* When assigning an integer value, it needs to be propagated to valuedouble
+ * too. */
+#define cJSON_SetIntValue(object, val) ((object) ? (object)->valueint = (object)->valuedouble = (val) : (val))
+#define cJSON_SetNumberValue(object, val) ((object) ? (object)->valueint = (object)->valuedouble = (val) : (val))
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif
diff --git a/src/third_party/vulkan-loader/src/loader/debug_utils.c b/src/third_party/vulkan-loader/src/loader/debug_utils.c
new file mode 100644
index 0000000..10701e7
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/loader/debug_utils.c
@@ -0,0 +1,996 @@
+/*
+ * Copyright (c) 2015-2017 The Khronos Group Inc.
+ * Copyright (c) 2015-2017 Valve Corporation
+ * Copyright (c) 2015-2017 LunarG, Inc.
+ * Copyright (C) 2015-2016 Google Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Courtney Goeltzenleuchter <courtney@LunarG.com>
+ * Author: Jon Ashburn <jon@LunarG.com>
+ * Author: Mark Young <marky@lunarg.com>
+ *
+ */
+
+#ifndef _GNU_SOURCE
+#define _GNU_SOURCE
+#endif
+#include <stdio.h>
+#include <string.h>
+#include <stdlib.h>
+#include <inttypes.h>
+#ifndef WIN32
+#include <signal.h>
+#else
+#endif
+#include "vk_loader_platform.h"
+#include "debug_utils.h"
+#include "vulkan/vk_layer.h"
+#include "vk_object_types.h"
+
+// VK_EXT_debug_report related items
+
+VkResult util_CreateDebugUtilsMessenger(struct loader_instance *inst, const VkDebugUtilsMessengerCreateInfoEXT *pCreateInfo,
+                                        const VkAllocationCallbacks *pAllocator, VkDebugUtilsMessengerEXT messenger) {
+    VkLayerDbgFunctionNode *pNewDbgFuncNode = NULL;
+
+#if (DEBUG_DISABLE_APP_ALLOCATORS == 1)
+    {
+#else
+    if (pAllocator != NULL) {
+        pNewDbgFuncNode = (VkLayerDbgFunctionNode *)pAllocator->pfnAllocation(pAllocator->pUserData, sizeof(VkLayerDbgFunctionNode),
+                                                                              sizeof(int *), VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
+    } else {
+#endif
+        pNewDbgFuncNode = (VkLayerDbgFunctionNode *)loader_instance_heap_alloc(inst, sizeof(VkLayerDbgFunctionNode),
+                                                                               VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
+    }
+    if (!pNewDbgFuncNode) {
+        return VK_ERROR_OUT_OF_HOST_MEMORY;
+    }
+    memset(pNewDbgFuncNode, 0, sizeof(VkLayerDbgFunctionNode));
+
+    pNewDbgFuncNode->is_messenger = true;
+    pNewDbgFuncNode->messenger.messenger = messenger;
+    pNewDbgFuncNode->messenger.pfnUserCallback = pCreateInfo->pfnUserCallback;
+    pNewDbgFuncNode->messenger.messageSeverity = pCreateInfo->messageSeverity;
+    pNewDbgFuncNode->messenger.messageType = pCreateInfo->messageType;
+    pNewDbgFuncNode->pUserData = pCreateInfo->pUserData;
+    pNewDbgFuncNode->pNext = inst->DbgFunctionHead;
+    inst->DbgFunctionHead = pNewDbgFuncNode;
+
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL
+debug_utils_CreateDebugUtilsMessengerEXT(VkInstance instance, const VkDebugUtilsMessengerCreateInfoEXT *pCreateInfo,
+                                         const VkAllocationCallbacks *pAllocator, VkDebugUtilsMessengerEXT *pMessenger) {
+    struct loader_instance *inst = loader_get_instance(instance);
+    loader_platform_thread_lock_mutex(&loader_lock);
+    VkResult result = inst->disp->layer_inst_disp.CreateDebugUtilsMessengerEXT(instance, pCreateInfo, pAllocator, pMessenger);
+    loader_platform_thread_unlock_mutex(&loader_lock);
+    return result;
+}
+
+VkBool32 util_SubmitDebugUtilsMessageEXT(const struct loader_instance *inst, VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity,
+                                         VkDebugUtilsMessageTypeFlagsEXT messageTypes,
+                                         const VkDebugUtilsMessengerCallbackDataEXT *pCallbackData) {
+    VkBool32 bail = false;
+
+    if (NULL != pCallbackData) {
+        VkLayerDbgFunctionNode *pTrav = inst->DbgFunctionHead;
+        VkDebugReportObjectTypeEXT object_type = VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT;
+        VkDebugReportFlagsEXT object_flags = 0;
+        uint64_t object_handle = 0;
+
+        debug_utils_AnnotFlagsToReportFlags(messageSeverity, messageTypes, &object_flags);
+        if (0 < pCallbackData->objectCount) {
+            debug_utils_AnnotObjectToDebugReportObject(pCallbackData->pObjects, &object_type, &object_handle);
+        }
+
+        while (pTrav) {
+            if (pTrav->is_messenger && (pTrav->messenger.messageSeverity & messageSeverity) &&
+                (pTrav->messenger.messageType & messageTypes)) {
+                if (pTrav->messenger.pfnUserCallback(messageSeverity, messageTypes, pCallbackData, pTrav->pUserData)) {
+                    bail = true;
+                }
+            }
+            if (!pTrav->is_messenger && pTrav->report.msgFlags & object_flags) {
+                if (pTrav->report.pfnMsgCallback(object_flags, object_type, object_handle, 0, pCallbackData->messageIdNumber,
+                                                 pCallbackData->pMessageIdName, pCallbackData->pMessage, pTrav->pUserData)) {
+                    bail = true;
+                }
+            }
+
+            pTrav = pTrav->pNext;
+        }
+    }
+
+    return bail;
+}
+
+void util_DestroyDebugUtilsMessenger(struct loader_instance *inst, VkDebugUtilsMessengerEXT messenger,
+                                     const VkAllocationCallbacks *pAllocator) {
+    VkLayerDbgFunctionNode *pTrav = inst->DbgFunctionHead;
+    VkLayerDbgFunctionNode *pPrev = pTrav;
+
+    while (pTrav) {
+        if (pTrav->is_messenger && pTrav->messenger.messenger == messenger) {
+            pPrev->pNext = pTrav->pNext;
+            if (inst->DbgFunctionHead == pTrav) inst->DbgFunctionHead = pTrav->pNext;
+#if (DEBUG_DISABLE_APP_ALLOCATORS == 1)
+            {
+#else
+            if (pAllocator != NULL) {
+                pAllocator->pfnFree(pAllocator->pUserData, pTrav);
+            } else {
+#endif
+                loader_instance_heap_free(inst, pTrav);
+            }
+            break;
+        }
+        pPrev = pTrav;
+        pTrav = pTrav->pNext;
+    }
+}
+
+// This utility (used by vkInstanceCreateInfo(), looks at a pNext chain.  It
+// counts any VkDebugUtilsMessengerCreateInfoEXT structs that it finds.  It
+// then allocates array that can hold that many structs, as well as that many
+// VkDebugUtilsMessengerEXT handles.  It then copies each
+// VkDebugUtilsMessengerCreateInfoEXT, and initializes each handle.
+VkResult util_CopyDebugUtilsMessengerCreateInfos(const void *pChain, const VkAllocationCallbacks *pAllocator,
+                                                 uint32_t *num_messengers, VkDebugUtilsMessengerCreateInfoEXT **infos,
+                                                 VkDebugUtilsMessengerEXT **messengers) {
+    uint32_t n = *num_messengers = 0;
+    VkDebugUtilsMessengerCreateInfoEXT *pInfos = NULL;
+    VkDebugUtilsMessengerEXT *pMessengers = NULL;
+
+    const void *pNext = pChain;
+    while (pNext) {
+        // 1st, count the number VkDebugUtilsMessengerCreateInfoEXT:
+        if (((VkDebugUtilsMessengerCreateInfoEXT *)pNext)->sType == VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT) {
+            n++;
+        }
+        pNext = (void *)((VkDebugUtilsMessengerCreateInfoEXT *)pNext)->pNext;
+    }
+    if (n == 0) {
+        return VK_SUCCESS;
+    }
+
+// 2nd, allocate memory for each VkDebugUtilsMessengerCreateInfoEXT:
+#if (DEBUG_DISABLE_APP_ALLOCATORS == 1)
+    {
+#else
+    if (pAllocator != NULL) {
+        pInfos = *infos = ((VkDebugUtilsMessengerCreateInfoEXT *)pAllocator->pfnAllocation(
+            pAllocator->pUserData, n * sizeof(VkDebugUtilsMessengerCreateInfoEXT), sizeof(void *),
+            VK_SYSTEM_ALLOCATION_SCOPE_OBJECT));
+    } else {
+#endif
+        pInfos = *infos = ((VkDebugUtilsMessengerCreateInfoEXT *)malloc(n * sizeof(VkDebugUtilsMessengerCreateInfoEXT)));
+    }
+    if (!pInfos) {
+        return VK_ERROR_OUT_OF_HOST_MEMORY;
+    }
+// 3rd, allocate memory for a unique handle for each callback:
+#if (DEBUG_DISABLE_APP_ALLOCATORS == 1)
+    {
+#else
+    if (pAllocator != NULL) {
+        pMessengers = *messengers = ((VkDebugUtilsMessengerEXT *)pAllocator->pfnAllocation(
+            pAllocator->pUserData, n * sizeof(VkDebugUtilsMessengerEXT), sizeof(void *), VK_SYSTEM_ALLOCATION_SCOPE_OBJECT));
+        if (NULL == pMessengers) {
+            pAllocator->pfnFree(pAllocator->pUserData, pInfos);
+            return VK_ERROR_OUT_OF_HOST_MEMORY;
+        }
+    } else {
+#endif
+        pMessengers = *messengers = ((VkDebugUtilsMessengerEXT *)malloc(n * sizeof(VkDebugUtilsMessengerEXT)));
+        if (NULL == pMessengers) {
+            free(pInfos);
+            return VK_ERROR_OUT_OF_HOST_MEMORY;
+        }
+    }
+    // 4th, copy each VkDebugUtilsMessengerCreateInfoEXT for use by
+    // vkDestroyInstance, and assign a unique handle to each messenger (just
+    // use the address of the copied VkDebugUtilsMessengerCreateInfoEXT):
+    pNext = pChain;
+    while (pNext) {
+        if (((VkInstanceCreateInfo *)pNext)->sType == VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT) {
+            memcpy(pInfos, pNext, sizeof(VkDebugUtilsMessengerCreateInfoEXT));
+            *pMessengers++ = (VkDebugUtilsMessengerEXT)(uintptr_t)pInfos++;
+        }
+        pNext = (void *)((VkInstanceCreateInfo *)pNext)->pNext;
+    }
+
+    *num_messengers = n;
+    return VK_SUCCESS;
+}
+
+void util_FreeDebugUtilsMessengerCreateInfos(const VkAllocationCallbacks *pAllocator, VkDebugUtilsMessengerCreateInfoEXT *infos,
+                                             VkDebugUtilsMessengerEXT *messengers) {
+#if (DEBUG_DISABLE_APP_ALLOCATORS == 1)
+    {
+#else
+    if (pAllocator != NULL) {
+        pAllocator->pfnFree(pAllocator->pUserData, infos);
+        pAllocator->pfnFree(pAllocator->pUserData, messengers);
+    } else {
+#endif
+        free(infos);
+        free(messengers);
+    }
+}
+
+VkResult util_CreateDebugUtilsMessengers(struct loader_instance *inst, const VkAllocationCallbacks *pAllocator,
+                                         uint32_t num_messengers, VkDebugUtilsMessengerCreateInfoEXT *infos,
+                                         VkDebugUtilsMessengerEXT *messengers) {
+    VkResult rtn = VK_SUCCESS;
+    for (uint32_t i = 0; i < num_messengers; i++) {
+        rtn = util_CreateDebugUtilsMessenger(inst, &infos[i], pAllocator, messengers[i]);
+        if (rtn != VK_SUCCESS) {
+            for (uint32_t j = 0; j < i; j++) {
+                util_DestroyDebugUtilsMessenger(inst, messengers[j], pAllocator);
+            }
+            return rtn;
+        }
+    }
+    return rtn;
+}
+
+void util_DestroyDebugUtilsMessengers(struct loader_instance *inst, const VkAllocationCallbacks *pAllocator,
+                                      uint32_t num_messengers, VkDebugUtilsMessengerEXT *messengers) {
+    for (uint32_t i = 0; i < num_messengers; i++) {
+        util_DestroyDebugUtilsMessenger(inst, messengers[i], pAllocator);
+    }
+}
+
+static VKAPI_ATTR void VKAPI_CALL debug_utils_SubmitDebugUtilsMessageEXT(
+    VkInstance instance, VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VkDebugUtilsMessageTypeFlagsEXT messageTypes,
+    const VkDebugUtilsMessengerCallbackDataEXT *pCallbackData) {
+    struct loader_instance *inst = loader_get_instance(instance);
+
+    inst->disp->layer_inst_disp.SubmitDebugUtilsMessageEXT(instance, messageSeverity, messageTypes, pCallbackData);
+}
+
+static VKAPI_ATTR void VKAPI_CALL debug_utils_DestroyDebugUtilsMessengerEXT(VkInstance instance, VkDebugUtilsMessengerEXT messenger,
+                                                                            const VkAllocationCallbacks *pAllocator) {
+    struct loader_instance *inst = loader_get_instance(instance);
+    loader_platform_thread_lock_mutex(&loader_lock);
+
+    inst->disp->layer_inst_disp.DestroyDebugUtilsMessengerEXT(instance, messenger, pAllocator);
+
+    loader_platform_thread_unlock_mutex(&loader_lock);
+}
+
+// This is the instance chain terminator function for CreateDebugUtilsMessenger
+VKAPI_ATTR VkResult VKAPI_CALL terminator_CreateDebugUtilsMessengerEXT(VkInstance instance,
+                                                                       const VkDebugUtilsMessengerCreateInfoEXT *pCreateInfo,
+                                                                       const VkAllocationCallbacks *pAllocator,
+                                                                       VkDebugUtilsMessengerEXT *pMessenger) {
+    VkDebugUtilsMessengerEXT *icd_info = NULL;
+    const struct loader_icd_term *icd_term;
+    struct loader_instance *inst = (struct loader_instance *)instance;
+    VkResult res = VK_SUCCESS;
+    uint32_t storage_idx;
+    VkLayerDbgFunctionNode *pNewDbgFuncNode = NULL;
+
+#if (DEBUG_DISABLE_APP_ALLOCATORS == 1)
+    {
+#else
+    if (pAllocator != NULL) {
+        icd_info = ((VkDebugUtilsMessengerEXT *)pAllocator->pfnAllocation(pAllocator->pUserData,
+                                                                          inst->total_icd_count * sizeof(VkDebugUtilsMessengerEXT),
+                                                                          sizeof(void *), VK_SYSTEM_ALLOCATION_SCOPE_OBJECT));
+        if (icd_info) {
+            memset(icd_info, 0, inst->total_icd_count * sizeof(VkDebugUtilsMessengerEXT));
+        }
+    } else {
+#endif
+        icd_info = calloc(sizeof(VkDebugUtilsMessengerEXT), inst->total_icd_count);
+    }
+    if (!icd_info) {
+        res = VK_ERROR_OUT_OF_HOST_MEMORY;
+        goto out;
+    }
+
+    storage_idx = 0;
+    for (icd_term = inst->icd_terms; icd_term; icd_term = icd_term->next) {
+        if (!icd_term->dispatch.CreateDebugUtilsMessengerEXT) {
+            continue;
+        }
+
+        res = icd_term->dispatch.CreateDebugUtilsMessengerEXT(icd_term->instance, pCreateInfo, pAllocator, &icd_info[storage_idx]);
+
+        if (res != VK_SUCCESS) {
+            goto out;
+        }
+        storage_idx++;
+    }
+
+// Setup the debug report callback in the terminator since a layer may want
+// to grab the information itself (RenderDoc) and then return back to the
+// user callback a sub-set of the messages.
+#if (DEBUG_DISABLE_APP_ALLOCATORS == 0)
+    if (pAllocator != NULL) {
+        pNewDbgFuncNode = (VkLayerDbgFunctionNode *)pAllocator->pfnAllocation(pAllocator->pUserData, sizeof(VkLayerDbgFunctionNode),
+                                                                              sizeof(int *), VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
+    } else {
+#else
+    {
+#endif
+        pNewDbgFuncNode = (VkLayerDbgFunctionNode *)loader_instance_heap_alloc(inst, sizeof(VkLayerDbgFunctionNode),
+                                                                               VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
+    }
+    if (!pNewDbgFuncNode) {
+        res = VK_ERROR_OUT_OF_HOST_MEMORY;
+        goto out;
+    }
+    memset(pNewDbgFuncNode, 0, sizeof(VkLayerDbgFunctionNode));
+
+    pNewDbgFuncNode->is_messenger = true;
+    pNewDbgFuncNode->messenger.pfnUserCallback = pCreateInfo->pfnUserCallback;
+    pNewDbgFuncNode->messenger.messageSeverity = pCreateInfo->messageSeverity;
+    pNewDbgFuncNode->messenger.messageType = pCreateInfo->messageType;
+    pNewDbgFuncNode->pUserData = pCreateInfo->pUserData;
+    pNewDbgFuncNode->pNext = inst->DbgFunctionHead;
+    inst->DbgFunctionHead = pNewDbgFuncNode;
+
+    *(VkDebugUtilsMessengerEXT **)pMessenger = icd_info;
+    pNewDbgFuncNode->messenger.messenger = *pMessenger;
+
+out:
+
+    // Roll back on errors
+    if (VK_SUCCESS != res) {
+        storage_idx = 0;
+        for (icd_term = inst->icd_terms; icd_term; icd_term = icd_term->next) {
+            if (NULL == icd_term->dispatch.DestroyDebugUtilsMessengerEXT) {
+                continue;
+            }
+
+            if (icd_info && icd_info[storage_idx]) {
+                icd_term->dispatch.DestroyDebugUtilsMessengerEXT(icd_term->instance, icd_info[storage_idx], pAllocator);
+            }
+            storage_idx++;
+        }
+
+#if (DEBUG_DISABLE_APP_ALLOCATORS == 1)
+        {
+#else
+        if (pAllocator != NULL) {
+            if (NULL != pNewDbgFuncNode) {
+                pAllocator->pfnFree(pAllocator->pUserData, pNewDbgFuncNode);
+            }
+            if (NULL != icd_info) {
+                pAllocator->pfnFree(pAllocator->pUserData, icd_info);
+            }
+        } else {
+#endif
+            if (NULL != pNewDbgFuncNode) {
+                free(pNewDbgFuncNode);
+            }
+            if (NULL != icd_info) {
+                free(icd_info);
+            }
+        }
+    }
+
+    return res;
+}
+
+// This is the instance chain terminator function for DestroyDebugUtilsMessenger
+VKAPI_ATTR void VKAPI_CALL terminator_DestroyDebugUtilsMessengerEXT(VkInstance instance, VkDebugUtilsMessengerEXT messenger,
+                                                                    const VkAllocationCallbacks *pAllocator) {
+    uint32_t storage_idx;
+    VkDebugUtilsMessengerEXT *icd_info;
+    const struct loader_icd_term *icd_term;
+
+    struct loader_instance *inst = (struct loader_instance *)instance;
+    icd_info = *(VkDebugUtilsMessengerEXT **)&messenger;
+    storage_idx = 0;
+    for (icd_term = inst->icd_terms; icd_term; icd_term = icd_term->next) {
+        if (NULL == icd_term->dispatch.DestroyDebugUtilsMessengerEXT) {
+            continue;
+        }
+
+        if (icd_info[storage_idx]) {
+            icd_term->dispatch.DestroyDebugUtilsMessengerEXT(icd_term->instance, icd_info[storage_idx], pAllocator);
+        }
+        storage_idx++;
+    }
+
+    util_DestroyDebugUtilsMessenger(inst, messenger, pAllocator);
+
+#if (DEBUG_DISABLE_APP_ALLOCATORS == 1)
+    {
+#else
+    if (pAllocator != NULL) {
+        pAllocator->pfnFree(pAllocator->pUserData, icd_info);
+    } else {
+#endif
+        free(icd_info);
+    }
+}
+
+// This is the instance chain terminator function for SubmitDebugUtilsMessageEXT
+VKAPI_ATTR void VKAPI_CALL terminator_SubmitDebugUtilsMessageEXT(VkInstance instance,
+                                                                 VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity,
+                                                                 VkDebugUtilsMessageTypeFlagsEXT messageTypes,
+                                                                 const VkDebugUtilsMessengerCallbackDataEXT *pCallbackData) {
+    loader_platform_thread_lock_mutex(&loader_lock);
+    // NOTE: Just make the callback ourselves because there could be one or more ICDs that support this extension
+    //       and each one will trigger the callback to the user.  This would result in multiple callback triggers
+    //       per message.  Instead, if we get a messaged up to here, then just trigger the message ourselves and
+    //       return.  This would still allow the ICDs to trigger their own messages, but won't get any external ones.
+    struct loader_instance *inst = (struct loader_instance *)instance;
+    util_SubmitDebugUtilsMessageEXT(inst, messageSeverity, messageTypes, pCallbackData);
+    loader_platform_thread_unlock_mutex(&loader_lock);
+}
+
+// VK_EXT_debug_report related items
+
+VkResult util_CreateDebugReportCallback(struct loader_instance *inst, VkDebugReportCallbackCreateInfoEXT *pCreateInfo,
+                                        const VkAllocationCallbacks *pAllocator, VkDebugReportCallbackEXT callback) {
+    VkLayerDbgFunctionNode *pNewDbgFuncNode = NULL;
+
+#if (DEBUG_DISABLE_APP_ALLOCATORS == 1)
+    {
+#else
+    if (pAllocator != NULL) {
+        pNewDbgFuncNode = (VkLayerDbgFunctionNode *)pAllocator->pfnAllocation(pAllocator->pUserData, sizeof(VkLayerDbgFunctionNode),
+                                                                              sizeof(int *), VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
+    } else {
+#endif
+        pNewDbgFuncNode = (VkLayerDbgFunctionNode *)loader_instance_heap_alloc(inst, sizeof(VkLayerDbgFunctionNode),
+                                                                               VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
+    }
+    if (!pNewDbgFuncNode) {
+        return VK_ERROR_OUT_OF_HOST_MEMORY;
+    }
+    memset(pNewDbgFuncNode, 0, sizeof(VkLayerDbgFunctionNode));
+
+    pNewDbgFuncNode->is_messenger = false;
+    pNewDbgFuncNode->report.msgCallback = callback;
+    pNewDbgFuncNode->report.pfnMsgCallback = pCreateInfo->pfnCallback;
+    pNewDbgFuncNode->report.msgFlags = pCreateInfo->flags;
+    pNewDbgFuncNode->pUserData = pCreateInfo->pUserData;
+    pNewDbgFuncNode->pNext = inst->DbgFunctionHead;
+    inst->DbgFunctionHead = pNewDbgFuncNode;
+
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL
+debug_utils_CreateDebugReportCallbackEXT(VkInstance instance, const VkDebugReportCallbackCreateInfoEXT *pCreateInfo,
+                                         const VkAllocationCallbacks *pAllocator, VkDebugReportCallbackEXT *pCallback) {
+    struct loader_instance *inst = loader_get_instance(instance);
+    loader_platform_thread_lock_mutex(&loader_lock);
+    VkResult result = inst->disp->layer_inst_disp.CreateDebugReportCallbackEXT(instance, pCreateInfo, pAllocator, pCallback);
+    loader_platform_thread_unlock_mutex(&loader_lock);
+    return result;
+}
+
+// Utility function to handle reporting
+VkBool32 util_DebugReportMessage(const struct loader_instance *inst, VkFlags msgFlags, VkDebugReportObjectTypeEXT objectType,
+                                 uint64_t srcObject, size_t location, int32_t msgCode, const char *pLayerPrefix, const char *pMsg) {
+    VkBool32 bail = false;
+    VkLayerDbgFunctionNode *pTrav = inst->DbgFunctionHead;
+    VkDebugUtilsMessageSeverityFlagBitsEXT severity;
+    VkDebugUtilsMessageTypeFlagsEXT types;
+    VkDebugUtilsMessengerCallbackDataEXT callback_data;
+    VkDebugUtilsObjectNameInfoEXT object_name;
+
+    debug_utils_ReportFlagsToAnnotFlags(msgFlags, false, &severity, &types);
+    debug_utils_ReportObjectToAnnotObject(objectType, srcObject, &object_name);
+
+    callback_data.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CALLBACK_DATA_EXT;
+    callback_data.pNext = NULL;
+    callback_data.flags = 0;
+    callback_data.pMessageIdName = pLayerPrefix;
+    callback_data.messageIdNumber = msgCode;
+    callback_data.pMessage = pMsg;
+    callback_data.cmdBufLabelCount = 0;
+    callback_data.pCmdBufLabels = NULL;
+    callback_data.queueLabelCount = 0;
+    callback_data.pQueueLabels = NULL;
+    callback_data.objectCount = 1;
+    callback_data.pObjects = &object_name;
+
+    while (pTrav) {
+        if (!pTrav->is_messenger && pTrav->report.msgFlags & msgFlags) {
+            if (pTrav->report.pfnMsgCallback(msgFlags, objectType, srcObject, location, msgCode, pLayerPrefix, pMsg,
+                                             pTrav->pUserData)) {
+                bail = true;
+            }
+        }
+        if (pTrav->is_messenger && (pTrav->messenger.messageSeverity & severity) && (pTrav->messenger.messageType & types)) {
+            if (pTrav->messenger.pfnUserCallback(severity, types, &callback_data, pTrav->pUserData)) {
+                bail = true;
+            }
+        }
+
+        pTrav = pTrav->pNext;
+    }
+
+    return bail;
+}
+
+void util_DestroyDebugReportCallback(struct loader_instance *inst, VkDebugReportCallbackEXT callback,
+                                     const VkAllocationCallbacks *pAllocator) {
+    VkLayerDbgFunctionNode *pTrav = inst->DbgFunctionHead;
+    VkLayerDbgFunctionNode *pPrev = pTrav;
+
+    while (pTrav) {
+        if (!pTrav->is_messenger && pTrav->report.msgCallback == callback) {
+            pPrev->pNext = pTrav->pNext;
+            if (inst->DbgFunctionHead == pTrav) inst->DbgFunctionHead = pTrav->pNext;
+#if (DEBUG_DISABLE_APP_ALLOCATORS == 1)
+            {
+#else
+            if (pAllocator != NULL) {
+                pAllocator->pfnFree(pAllocator->pUserData, pTrav);
+            } else {
+#endif
+                loader_instance_heap_free(inst, pTrav);
+            }
+            break;
+        }
+        pPrev = pTrav;
+        pTrav = pTrav->pNext;
+    }
+}
+
+// This utility (used by vkInstanceCreateInfo(), looks at a pNext chain.  It
+// counts any VkDebugReportCallbackCreateInfoEXT structs that it finds.  It
+// then allocates array that can hold that many structs, as well as that many
+// VkDebugReportCallbackEXT handles.  It then copies each
+// VkDebugReportCallbackCreateInfoEXT, and initializes each handle.
+VkResult util_CopyDebugReportCreateInfos(const void *pChain, const VkAllocationCallbacks *pAllocator, uint32_t *num_callbacks,
+                                         VkDebugReportCallbackCreateInfoEXT **infos, VkDebugReportCallbackEXT **callbacks) {
+    uint32_t n = *num_callbacks = 0;
+    VkDebugReportCallbackCreateInfoEXT *pInfos = NULL;
+    VkDebugReportCallbackEXT *pCallbacks = NULL;
+
+    const void *pNext = pChain;
+    while (pNext) {
+        // 1st, count the number VkDebugReportCallbackCreateInfoEXT:
+        if (((VkDebugReportCallbackCreateInfoEXT *)pNext)->sType == VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT) {
+            n++;
+        }
+        pNext = (void *)((VkDebugReportCallbackCreateInfoEXT *)pNext)->pNext;
+    }
+    if (n == 0) {
+        return VK_SUCCESS;
+    }
+
+// 2nd, allocate memory for each VkDebugReportCallbackCreateInfoEXT:
+#if (DEBUG_DISABLE_APP_ALLOCATORS == 1)
+    {
+#else
+    if (pAllocator != NULL) {
+        pInfos = *infos = ((VkDebugReportCallbackCreateInfoEXT *)pAllocator->pfnAllocation(
+            pAllocator->pUserData, n * sizeof(VkDebugReportCallbackCreateInfoEXT), sizeof(void *),
+            VK_SYSTEM_ALLOCATION_SCOPE_OBJECT));
+    } else {
+#endif
+        pInfos = *infos = ((VkDebugReportCallbackCreateInfoEXT *)malloc(n * sizeof(VkDebugReportCallbackCreateInfoEXT)));
+    }
+    if (!pInfos) {
+        return VK_ERROR_OUT_OF_HOST_MEMORY;
+    }
+// 3rd, allocate memory for a unique handle for each callback:
+#if (DEBUG_DISABLE_APP_ALLOCATORS == 1)
+    {
+#else
+    if (pAllocator != NULL) {
+        pCallbacks = *callbacks = ((VkDebugReportCallbackEXT *)pAllocator->pfnAllocation(
+            pAllocator->pUserData, n * sizeof(VkDebugReportCallbackEXT), sizeof(void *), VK_SYSTEM_ALLOCATION_SCOPE_OBJECT));
+        if (!pCallbacks) {
+            pAllocator->pfnFree(pAllocator->pUserData, pInfos);
+            return VK_ERROR_OUT_OF_HOST_MEMORY;
+        }
+    } else {
+#endif
+        pCallbacks = *callbacks = ((VkDebugReportCallbackEXT *)malloc(n * sizeof(VkDebugReportCallbackEXT)));
+        if (!pCallbacks) {
+            free(pInfos);
+            return VK_ERROR_OUT_OF_HOST_MEMORY;
+        }
+    }
+    // 4th, copy each VkDebugReportCallbackCreateInfoEXT for use by
+    // vkDestroyInstance, and assign a unique handle to each callback (just
+    // use the address of the copied VkDebugReportCallbackCreateInfoEXT):
+    pNext = pChain;
+    while (pNext) {
+        if (((VkInstanceCreateInfo *)pNext)->sType == VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT) {
+            memcpy(pInfos, pNext, sizeof(VkDebugReportCallbackCreateInfoEXT));
+            *pCallbacks++ = (VkDebugReportCallbackEXT)(uintptr_t)pInfos++;
+        }
+        pNext = (void *)((VkInstanceCreateInfo *)pNext)->pNext;
+    }
+
+    *num_callbacks = n;
+    return VK_SUCCESS;
+}
+
+void util_FreeDebugReportCreateInfos(const VkAllocationCallbacks *pAllocator, VkDebugReportCallbackCreateInfoEXT *infos,
+                                     VkDebugReportCallbackEXT *callbacks) {
+#if (DEBUG_DISABLE_APP_ALLOCATORS == 1)
+    {
+#else
+    if (pAllocator != NULL) {
+        pAllocator->pfnFree(pAllocator->pUserData, infos);
+        pAllocator->pfnFree(pAllocator->pUserData, callbacks);
+    } else {
+#endif
+        free(infos);
+        free(callbacks);
+    }
+}
+
+VkResult util_CreateDebugReportCallbacks(struct loader_instance *inst, const VkAllocationCallbacks *pAllocator,
+                                         uint32_t num_callbacks, VkDebugReportCallbackCreateInfoEXT *infos,
+                                         VkDebugReportCallbackEXT *callbacks) {
+    VkResult rtn = VK_SUCCESS;
+    for (uint32_t i = 0; i < num_callbacks; i++) {
+        rtn = util_CreateDebugReportCallback(inst, &infos[i], pAllocator, callbacks[i]);
+        if (rtn != VK_SUCCESS) {
+            for (uint32_t j = 0; j < i; j++) {
+                util_DestroyDebugReportCallback(inst, callbacks[j], pAllocator);
+            }
+            return rtn;
+        }
+    }
+    return rtn;
+}
+
+void util_DestroyDebugReportCallbacks(struct loader_instance *inst, const VkAllocationCallbacks *pAllocator, uint32_t num_callbacks,
+                                      VkDebugReportCallbackEXT *callbacks) {
+    for (uint32_t i = 0; i < num_callbacks; i++) {
+        util_DestroyDebugReportCallback(inst, callbacks[i], pAllocator);
+    }
+}
+
+static VKAPI_ATTR void VKAPI_CALL debug_utils_DestroyDebugReportCallbackEXT(VkInstance instance, VkDebugReportCallbackEXT callback,
+                                                                            const VkAllocationCallbacks *pAllocator) {
+    struct loader_instance *inst = loader_get_instance(instance);
+    loader_platform_thread_lock_mutex(&loader_lock);
+
+    inst->disp->layer_inst_disp.DestroyDebugReportCallbackEXT(instance, callback, pAllocator);
+
+    loader_platform_thread_unlock_mutex(&loader_lock);
+}
+
+static VKAPI_ATTR void VKAPI_CALL debug_utils_DebugReportMessageEXT(VkInstance instance, VkDebugReportFlagsEXT flags,
+                                                                    VkDebugReportObjectTypeEXT objType, uint64_t object,
+                                                                    size_t location, int32_t msgCode, const char *pLayerPrefix,
+                                                                    const char *pMsg) {
+    struct loader_instance *inst = loader_get_instance(instance);
+
+    inst->disp->layer_inst_disp.DebugReportMessageEXT(instance, flags, objType, object, location, msgCode, pLayerPrefix, pMsg);
+}
+
+// This is the instance chain terminator function
+// for CreateDebugReportCallback
+VKAPI_ATTR VkResult VKAPI_CALL terminator_CreateDebugReportCallbackEXT(VkInstance instance,
+                                                                       const VkDebugReportCallbackCreateInfoEXT *pCreateInfo,
+                                                                       const VkAllocationCallbacks *pAllocator,
+                                                                       VkDebugReportCallbackEXT *pCallback) {
+    VkDebugReportCallbackEXT *icd_info = NULL;
+    const struct loader_icd_term *icd_term;
+    struct loader_instance *inst = (struct loader_instance *)instance;
+    VkResult res = VK_SUCCESS;
+    uint32_t storage_idx;
+    VkLayerDbgFunctionNode *pNewDbgFuncNode = NULL;
+
+#if (DEBUG_DISABLE_APP_ALLOCATORS == 1)
+    {
+#else
+    if (pAllocator != NULL) {
+        icd_info = ((VkDebugReportCallbackEXT *)pAllocator->pfnAllocation(pAllocator->pUserData,
+                                                                          inst->total_icd_count * sizeof(VkDebugReportCallbackEXT),
+                                                                          sizeof(void *), VK_SYSTEM_ALLOCATION_SCOPE_OBJECT));
+        if (icd_info) {
+            memset(icd_info, 0, inst->total_icd_count * sizeof(VkDebugReportCallbackEXT));
+        }
+    } else {
+#endif
+        icd_info = calloc(sizeof(VkDebugReportCallbackEXT), inst->total_icd_count);
+    }
+    if (!icd_info) {
+        res = VK_ERROR_OUT_OF_HOST_MEMORY;
+        goto out;
+    }
+
+    storage_idx = 0;
+    for (icd_term = inst->icd_terms; icd_term; icd_term = icd_term->next) {
+        if (!icd_term->dispatch.CreateDebugReportCallbackEXT) {
+            continue;
+        }
+
+        res = icd_term->dispatch.CreateDebugReportCallbackEXT(icd_term->instance, pCreateInfo, pAllocator, &icd_info[storage_idx]);
+
+        if (res != VK_SUCCESS) {
+            goto out;
+        }
+        storage_idx++;
+    }
+
+// Setup the debug report callback in the terminator since a layer may want
+// to grab the information itself (RenderDoc) and then return back to the
+// user callback a sub-set of the messages.
+#if (DEBUG_DISABLE_APP_ALLOCATORS == 0)
+    if (pAllocator != NULL) {
+        pNewDbgFuncNode = (VkLayerDbgFunctionNode *)pAllocator->pfnAllocation(pAllocator->pUserData, sizeof(VkLayerDbgFunctionNode),
+                                                                              sizeof(int *), VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
+    } else {
+#else
+    {
+#endif
+        pNewDbgFuncNode = (VkLayerDbgFunctionNode *)loader_instance_heap_alloc(inst, sizeof(VkLayerDbgFunctionNode),
+                                                                               VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
+    }
+    if (!pNewDbgFuncNode) {
+        res = VK_ERROR_OUT_OF_HOST_MEMORY;
+        goto out;
+    }
+    memset(pNewDbgFuncNode, 0, sizeof(VkLayerDbgFunctionNode));
+
+    pNewDbgFuncNode->is_messenger = false;
+    pNewDbgFuncNode->report.pfnMsgCallback = pCreateInfo->pfnCallback;
+    pNewDbgFuncNode->report.msgFlags = pCreateInfo->flags;
+    pNewDbgFuncNode->pUserData = pCreateInfo->pUserData;
+    pNewDbgFuncNode->pNext = inst->DbgFunctionHead;
+    inst->DbgFunctionHead = pNewDbgFuncNode;
+
+    *(VkDebugReportCallbackEXT **)pCallback = icd_info;
+    pNewDbgFuncNode->report.msgCallback = *pCallback;
+
+out:
+
+    // Roll back on errors
+    if (VK_SUCCESS != res) {
+        storage_idx = 0;
+        for (icd_term = inst->icd_terms; icd_term; icd_term = icd_term->next) {
+            if (NULL == icd_term->dispatch.DestroyDebugReportCallbackEXT) {
+                continue;
+            }
+
+            if (icd_info && icd_info[storage_idx]) {
+                icd_term->dispatch.DestroyDebugReportCallbackEXT(icd_term->instance, icd_info[storage_idx], pAllocator);
+            }
+            storage_idx++;
+        }
+
+#if (DEBUG_DISABLE_APP_ALLOCATORS == 1)
+        {
+#else
+        if (pAllocator != NULL) {
+            if (NULL != pNewDbgFuncNode) {
+                pAllocator->pfnFree(pAllocator->pUserData, pNewDbgFuncNode);
+            }
+            if (NULL != icd_info) {
+                pAllocator->pfnFree(pAllocator->pUserData, icd_info);
+            }
+        } else {
+#endif
+            if (NULL != pNewDbgFuncNode) {
+                free(pNewDbgFuncNode);
+            }
+            if (NULL != icd_info) {
+                free(icd_info);
+            }
+        }
+    }
+
+    return res;
+}
+
+// This is the instance chain terminator function for DestroyDebugReportCallback
+VKAPI_ATTR void VKAPI_CALL terminator_DestroyDebugReportCallbackEXT(VkInstance instance, VkDebugReportCallbackEXT callback,
+                                                                    const VkAllocationCallbacks *pAllocator) {
+    uint32_t storage_idx;
+    VkDebugReportCallbackEXT *icd_info;
+    const struct loader_icd_term *icd_term;
+
+    struct loader_instance *inst = (struct loader_instance *)instance;
+    icd_info = *(VkDebugReportCallbackEXT **)&callback;
+    storage_idx = 0;
+    for (icd_term = inst->icd_terms; icd_term; icd_term = icd_term->next) {
+        if (NULL == icd_term->dispatch.DestroyDebugReportCallbackEXT) {
+            continue;
+        }
+
+        if (icd_info[storage_idx]) {
+            icd_term->dispatch.DestroyDebugReportCallbackEXT(icd_term->instance, icd_info[storage_idx], pAllocator);
+        }
+        storage_idx++;
+    }
+
+    util_DestroyDebugReportCallback(inst, callback, pAllocator);
+
+#if (DEBUG_DISABLE_APP_ALLOCATORS == 1)
+    {
+#else
+    if (pAllocator != NULL) {
+        pAllocator->pfnFree(pAllocator->pUserData, icd_info);
+    } else {
+#endif
+        free(icd_info);
+    }
+}
+
+// This is the instance chain terminator function for DebugReportMessage
+VKAPI_ATTR void VKAPI_CALL terminator_DebugReportMessageEXT(VkInstance instance, VkDebugReportFlagsEXT flags,
+                                                            VkDebugReportObjectTypeEXT objType, uint64_t object, size_t location,
+                                                            int32_t msgCode, const char *pLayerPrefix, const char *pMsg) {
+    const struct loader_icd_term *icd_term;
+
+    struct loader_instance *inst = (struct loader_instance *)instance;
+
+    loader_platform_thread_lock_mutex(&loader_lock);
+    for (icd_term = inst->icd_terms; icd_term; icd_term = icd_term->next) {
+        if (icd_term->dispatch.DebugReportMessageEXT != NULL) {
+            icd_term->dispatch.DebugReportMessageEXT(icd_term->instance, flags, objType, object, location, msgCode, pLayerPrefix,
+                                                     pMsg);
+        }
+    }
+
+    // Now that all ICDs have seen the message, call the necessary callbacks.  Ignoring "bail" return value
+    // as there is nothing to bail from at this point.
+
+    util_DebugReportMessage(inst, flags, objType, object, location, msgCode, pLayerPrefix, pMsg);
+
+    loader_platform_thread_unlock_mutex(&loader_lock);
+}
+
+// General utilities
+
+static const VkExtensionProperties debug_utils_extension_info[] = {
+    {VK_EXT_DEBUG_REPORT_EXTENSION_NAME, VK_EXT_DEBUG_REPORT_SPEC_VERSION},
+    {VK_EXT_DEBUG_UTILS_EXTENSION_NAME, VK_EXT_DEBUG_UTILS_SPEC_VERSION},
+};
+
+void debug_utils_AddInstanceExtensions(const struct loader_instance *inst, struct loader_extension_list *ext_list) {
+    loader_add_to_ext_list(inst, ext_list, sizeof(debug_utils_extension_info) / sizeof(VkExtensionProperties),
+                           debug_utils_extension_info);
+}
+
+void debug_utils_CreateInstance(struct loader_instance *ptr_instance, const VkInstanceCreateInfo *pCreateInfo) {
+    ptr_instance->enabled_known_extensions.ext_debug_report = 0;
+    ptr_instance->enabled_known_extensions.ext_debug_utils = 0;
+
+    for (uint32_t i = 0; i < pCreateInfo->enabledExtensionCount; i++) {
+        if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_EXT_DEBUG_REPORT_EXTENSION_NAME) == 0) {
+            ptr_instance->enabled_known_extensions.ext_debug_report = 1;
+        } else if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_EXT_DEBUG_UTILS_EXTENSION_NAME) == 0) {
+            ptr_instance->enabled_known_extensions.ext_debug_utils = 1;
+        }
+    }
+}
+
+bool debug_utils_InstanceGpa(struct loader_instance *ptr_instance, const char *name, void **addr) {
+    bool ret_type = false;
+
+    *addr = NULL;
+
+    if (!strcmp("vkCreateDebugReportCallbackEXT", name)) {
+        *addr = ptr_instance->enabled_known_extensions.ext_debug_report == 1 ? (void *)debug_utils_CreateDebugReportCallbackEXT : NULL;
+        ret_type = true;
+    } else if (!strcmp("vkDestroyDebugReportCallbackEXT", name)) {
+        *addr = ptr_instance->enabled_known_extensions.ext_debug_report == 1 ? (void *)debug_utils_DestroyDebugReportCallbackEXT : NULL;
+        ret_type = true;
+    } else if (!strcmp("vkDebugReportMessageEXT", name)) {
+        *addr = ptr_instance->enabled_known_extensions.ext_debug_report == 1 ? (void *)debug_utils_DebugReportMessageEXT : NULL;
+        return true;
+    }
+    if (!strcmp("vkCreateDebugUtilsMessengerEXT", name)) {
+        *addr = ptr_instance->enabled_known_extensions.ext_debug_utils == 1 ? (void *)debug_utils_CreateDebugUtilsMessengerEXT : NULL;
+        ret_type = true;
+    } else if (!strcmp("vkDestroyDebugUtilsMessengerEXT", name)) {
+        *addr = ptr_instance->enabled_known_extensions.ext_debug_utils == 1 ? (void *)debug_utils_DestroyDebugUtilsMessengerEXT : NULL;
+        ret_type = true;
+    } else if (!strcmp("vkSubmitDebugUtilsMessageEXT", name)) {
+        *addr = ptr_instance->enabled_known_extensions.ext_debug_utils == 1 ? (void *)debug_utils_SubmitDebugUtilsMessageEXT : NULL;
+        ret_type = true;
+    }
+
+    return ret_type;
+}
+
+bool debug_utils_ReportFlagsToAnnotFlags(VkDebugReportFlagsEXT dr_flags, bool default_flag_is_spec,
+                                         VkDebugUtilsMessageSeverityFlagBitsEXT *da_severity,
+                                         VkDebugUtilsMessageTypeFlagsEXT *da_type) {
+    bool type_set = false;
+    if (NULL == da_severity || NULL == da_type) {
+        return false;
+    }
+    *da_type = 0;
+    *da_severity = 0;
+
+    if ((dr_flags & VK_DEBUG_REPORT_INFORMATION_BIT_EXT) != 0) {
+        *da_severity = VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT;
+        *da_type |= VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT;
+        type_set = true;
+    } else if ((dr_flags & (VK_DEBUG_REPORT_WARNING_BIT_EXT | VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT)) != 0) {
+        *da_severity = VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT;
+    } else if ((dr_flags & VK_DEBUG_REPORT_ERROR_BIT_EXT) != 0) {
+        *da_severity = VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT;
+    } else if ((dr_flags & VK_DEBUG_REPORT_DEBUG_BIT_EXT) != 0) {
+        *da_severity = VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT;
+        *da_type |= VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT;
+        type_set = true;
+    }
+
+    if ((dr_flags & VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT) != 0) {
+        *da_type |= VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT;
+    } else if (!type_set) {
+        if (default_flag_is_spec) {
+            *da_type |= VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT;
+        } else {
+            *da_type |= VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT;
+        }
+    }
+
+    return true;
+}
+
+bool debug_utils_AnnotFlagsToReportFlags(VkDebugUtilsMessageSeverityFlagBitsEXT da_severity,
+                                         VkDebugUtilsMessageTypeFlagsEXT da_type, VkDebugReportFlagsEXT *dr_flags) {
+    if (NULL == dr_flags) {
+        return false;
+    }
+
+    *dr_flags = 0;
+
+    if ((da_severity & VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT) != 0) {
+        *dr_flags |= VK_DEBUG_REPORT_ERROR_BIT_EXT;
+    } else if ((da_severity & VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT) != 0) {
+        if ((da_type & VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT) != 0) {
+            *dr_flags |= VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT;
+        } else {
+            *dr_flags |= VK_DEBUG_REPORT_WARNING_BIT_EXT;
+        }
+    } else if ((da_severity & VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT) != 0) {
+        *dr_flags |= VK_DEBUG_REPORT_INFORMATION_BIT_EXT;
+    } else if ((da_severity & VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT) != 0) {
+        *dr_flags |= VK_DEBUG_REPORT_DEBUG_BIT_EXT;
+    }
+
+    return true;
+}
+
+bool debug_utils_ReportObjectToAnnotObject(VkDebugReportObjectTypeEXT dr_object_type, uint64_t object_handle,
+                                           VkDebugUtilsObjectNameInfoEXT *da_object_name_info) {
+    if (NULL == da_object_name_info) {
+        return false;
+    }
+    da_object_name_info->sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT;
+    da_object_name_info->pNext = NULL;
+    da_object_name_info->objectHandle = (uint64_t)(uintptr_t)object_handle;
+    da_object_name_info->pObjectName = NULL;
+    da_object_name_info->objectType = convertDebugReportObjectToCoreObject(dr_object_type);
+    return true;
+}
+
+bool debug_utils_AnnotObjectToDebugReportObject(const VkDebugUtilsObjectNameInfoEXT *da_object_name_info,
+                                                VkDebugReportObjectTypeEXT *dr_object_type, uint64_t *dr_object_handle) {
+    if (NULL == da_object_name_info || NULL == dr_object_type || NULL == dr_object_handle) {
+        return false;
+    }
+    *dr_object_type = convertCoreObjectToDebugReportObject(da_object_name_info->objectType);
+    *dr_object_handle = da_object_name_info->objectHandle;
+    return true;
+}
diff --git a/src/third_party/vulkan-loader/src/loader/debug_utils.h b/src/third_party/vulkan-loader/src/loader/debug_utils.h
new file mode 100644
index 0000000..c33a6fc
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/loader/debug_utils.h
@@ -0,0 +1,101 @@
+/*
+ * Copyright (c) 2015-2017 The Khronos Group Inc.
+ * Copyright (c) 2015-2017 Valve Corporation
+ * Copyright (c) 2015-2017 LunarG, Inc.
+ * Copyright (C) 2015-2016 Google Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Courtney Goeltzenleuchter <courtney@LunarG.com>
+ * Author: Jon Ashburn <jon@lunarg.com>
+ * Author: Mark Young <markyk@lunarg.com>
+ *
+ */
+
+#include "vk_loader_platform.h"
+#include "loader.h"
+
+// General utilities
+
+void debug_utils_AddInstanceExtensions(const struct loader_instance *inst, struct loader_extension_list *ext_list);
+void debug_utils_CreateInstance(struct loader_instance *ptr_instance, const VkInstanceCreateInfo *pCreateInfo);
+bool debug_utils_InstanceGpa(struct loader_instance *ptr_instance, const char *name, void **addr);
+bool debug_utils_ReportFlagsToAnnotFlags(VkDebugReportFlagsEXT dr_flags, bool default_flag_is_spec,
+                                         VkDebugUtilsMessageSeverityFlagBitsEXT *da_severity,
+                                         VkDebugUtilsMessageTypeFlagsEXT *da_type);
+bool debug_utils_AnnotFlagsToReportFlags(VkDebugUtilsMessageSeverityFlagBitsEXT da_severity,
+                                         VkDebugUtilsMessageTypeFlagsEXT da_type, VkDebugReportFlagsEXT *dr_flags);
+bool debug_utils_ReportObjectToAnnotObject(VkDebugReportObjectTypeEXT dr_object_type, uint64_t object_handle,
+                                           VkDebugUtilsObjectNameInfoEXT *da_object_name_info);
+bool debug_utils_AnnotObjectToDebugReportObject(const VkDebugUtilsObjectNameInfoEXT *da_object_name_info,
+                                                VkDebugReportObjectTypeEXT *dr_object_type, uint64_t *dr_object_handle);
+
+// VK_EXT_debug_utils related items
+
+VKAPI_ATTR VkResult VKAPI_CALL terminator_CreateDebugUtilsMessengerEXT(VkInstance instance,
+                                                                       const VkDebugUtilsMessengerCreateInfoEXT *pCreateInfo,
+                                                                       const VkAllocationCallbacks *pAllocator,
+                                                                       VkDebugUtilsMessengerEXT *pMessenger);
+VKAPI_ATTR void VKAPI_CALL terminator_DestroyDebugUtilsMessengerEXT(VkInstance instance, VkDebugUtilsMessengerEXT messenger,
+                                                                    const VkAllocationCallbacks *pAllocator);
+VKAPI_ATTR void VKAPI_CALL terminator_SubmitDebugUtilsMessageEXT(VkInstance instance,
+                                                                 VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity,
+                                                                 VkDebugUtilsMessageTypeFlagsEXT messageTypes,
+                                                                 const VkDebugUtilsMessengerCallbackDataEXT *pCallbackData);
+VkResult util_CreateDebugUtilsMessenger(struct loader_instance *inst, const VkDebugUtilsMessengerCreateInfoEXT *pCreateInfo,
+                                        const VkAllocationCallbacks *pAllocator, VkDebugUtilsMessengerEXT messenger);
+VkResult util_CreateDebugUtilsMessengers(struct loader_instance *inst, const VkAllocationCallbacks *pAllocator,
+                                         uint32_t num_messengers, VkDebugUtilsMessengerCreateInfoEXT *infos,
+                                         VkDebugUtilsMessengerEXT *messengers);
+VkBool32 util_SubmitDebugUtilsMessageEXT(const struct loader_instance *inst, VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity,
+                                         VkDebugUtilsMessageTypeFlagsEXT messageTypes,
+                                         const VkDebugUtilsMessengerCallbackDataEXT *pCallbackData);
+VkResult util_CopyDebugUtilsMessengerCreateInfos(const void *pChain, const VkAllocationCallbacks *pAllocator,
+                                                 uint32_t *num_messengers, VkDebugUtilsMessengerCreateInfoEXT **infos,
+                                                 VkDebugUtilsMessengerEXT **messengers);
+void util_DestroyDebugUtilsMessenger(struct loader_instance *inst, VkDebugUtilsMessengerEXT messenger,
+                                     const VkAllocationCallbacks *pAllocator);
+void util_DestroyDebugUtilsMessengers(struct loader_instance *inst, const VkAllocationCallbacks *pAllocator,
+                                      uint32_t num_messengers, VkDebugUtilsMessengerEXT *messengers);
+void util_FreeDebugUtilsMessengerCreateInfos(const VkAllocationCallbacks *pAllocator, VkDebugUtilsMessengerCreateInfoEXT *infos,
+                                             VkDebugUtilsMessengerEXT *messengers);
+
+// VK_EXT_debug_report related items
+
+VKAPI_ATTR VkResult VKAPI_CALL terminator_CreateDebugReportCallbackEXT(VkInstance instance,
+                                                                       const VkDebugReportCallbackCreateInfoEXT *pCreateInfo,
+                                                                       const VkAllocationCallbacks *pAllocator,
+                                                                       VkDebugReportCallbackEXT *pCallback);
+
+VKAPI_ATTR void VKAPI_CALL terminator_DestroyDebugReportCallbackEXT(VkInstance instance, VkDebugReportCallbackEXT callback,
+                                                                    const VkAllocationCallbacks *pAllocator);
+
+VKAPI_ATTR void VKAPI_CALL terminator_DebugReportMessageEXT(VkInstance instance, VkDebugReportFlagsEXT flags,
+                                                            VkDebugReportObjectTypeEXT objType, uint64_t object, size_t location,
+                                                            int32_t msgCode, const char *pLayerPrefix, const char *pMsg);
+
+VkResult util_CreateDebugReportCallback(struct loader_instance *inst, VkDebugReportCallbackCreateInfoEXT *pCreateInfo,
+                                        const VkAllocationCallbacks *pAllocator, VkDebugReportCallbackEXT callback);
+VkResult util_CreateDebugReportCallbacks(struct loader_instance *inst, const VkAllocationCallbacks *pAllocator,
+                                         uint32_t num_callbacks, VkDebugReportCallbackCreateInfoEXT *infos,
+                                         VkDebugReportCallbackEXT *callbacks);
+VkBool32 util_DebugReportMessage(const struct loader_instance *inst, VkFlags msgFlags, VkDebugReportObjectTypeEXT objectType,
+                                 uint64_t srcObject, size_t location, int32_t msgCode, const char *pLayerPrefix, const char *pMsg);
+VkResult util_CopyDebugReportCreateInfos(const void *pChain, const VkAllocationCallbacks *pAllocator, uint32_t *num_callbacks,
+                                         VkDebugReportCallbackCreateInfoEXT **infos, VkDebugReportCallbackEXT **callbacks);
+void util_DestroyDebugReportCallback(struct loader_instance *inst, VkDebugReportCallbackEXT callback,
+                                     const VkAllocationCallbacks *pAllocator);
+void util_DestroyDebugReportCallbacks(struct loader_instance *inst, const VkAllocationCallbacks *pAllocator, uint32_t num_callbacks,
+                                      VkDebugReportCallbackEXT *callbacks);
+void util_FreeDebugReportCreateInfos(const VkAllocationCallbacks *pAllocator, VkDebugReportCallbackCreateInfoEXT *infos,
+                                     VkDebugReportCallbackEXT *callbacks);
diff --git a/src/third_party/vulkan-loader/src/loader/dev_ext_trampoline.c b/src/third_party/vulkan-loader/src/loader/dev_ext_trampoline.c
new file mode 100644
index 0000000..55eee0c
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/loader/dev_ext_trampoline.c
@@ -0,0 +1,538 @@
+/*
+ * Copyright (c) 2015-2016 The Khronos Group Inc.
+ * Copyright (c) 2015-2016 Valve Corporation
+ * Copyright (c) 2015-2016 LunarG, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Jon Ashburn <jon@lunarg.com>
+ * Author: Lenny Komow <lenny@lunarg.com>
+ */
+
+#include "vk_loader_platform.h"
+#include "loader.h"
+#if defined(__GNUC__) && !defined(__clang__)
+#pragma GCC optimize(3)  // force gcc to use tail-calls
+#endif
+
+// Clang-format does not understand macros.
+// clang-format off
+
+VKAPI_ATTR void VKAPI_CALL vkdev_ext0(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext1(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext2(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext3(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext4(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext5(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext6(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext7(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext8(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext9(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext10(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext11(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext12(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext13(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext14(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext15(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext16(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext17(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext18(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext19(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext20(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext21(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext22(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext23(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext24(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext25(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext26(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext27(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext28(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext29(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext30(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext31(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext32(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext33(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext34(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext35(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext36(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext37(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext38(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext39(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext40(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext41(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext42(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext43(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext44(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext45(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext46(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext47(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext48(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext49(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext50(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext51(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext52(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext53(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext54(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext55(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext56(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext57(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext58(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext59(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext60(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext61(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext62(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext63(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext64(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext65(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext66(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext67(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext68(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext69(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext70(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext71(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext72(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext73(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext74(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext75(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext76(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext77(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext78(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext79(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext80(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext81(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext82(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext83(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext84(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext85(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext86(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext87(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext88(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext89(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext90(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext91(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext92(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext93(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext94(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext95(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext96(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext97(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext98(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext99(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext100(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext101(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext102(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext103(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext104(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext105(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext106(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext107(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext108(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext109(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext110(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext111(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext112(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext113(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext114(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext115(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext116(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext117(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext118(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext119(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext120(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext121(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext122(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext123(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext124(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext125(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext126(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext127(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext128(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext129(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext130(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext131(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext132(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext133(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext134(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext135(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext136(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext137(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext138(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext139(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext140(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext141(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext142(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext143(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext144(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext145(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext146(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext147(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext148(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext149(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext150(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext151(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext152(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext153(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext154(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext155(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext156(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext157(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext158(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext159(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext160(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext161(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext162(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext163(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext164(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext165(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext166(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext167(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext168(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext169(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext170(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext171(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext172(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext173(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext174(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext175(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext176(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext177(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext178(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext179(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext180(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext181(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext182(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext183(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext184(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext185(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext186(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext187(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext188(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext189(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext190(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext191(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext192(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext193(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext194(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext195(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext196(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext197(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext198(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext199(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext200(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext201(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext202(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext203(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext204(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext205(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext206(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext207(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext208(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext209(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext210(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext211(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext212(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext213(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext214(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext215(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext216(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext217(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext218(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext219(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext220(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext221(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext222(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext223(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext224(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext225(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext226(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext227(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext228(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext229(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext230(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext231(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext232(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext233(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext234(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext235(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext236(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext237(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext238(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext239(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext240(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext241(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext242(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext243(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext244(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext245(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext246(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext247(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext248(VkDevice device);
+VKAPI_ATTR void VKAPI_CALL vkdev_ext249(VkDevice device);
+
+void *loader_get_dev_ext_trampoline(uint32_t index) {
+    switch (index) {
+#define CASE_HANDLE(num) case num: return vkdev_ext##num
+        CASE_HANDLE(0);
+        CASE_HANDLE(1);
+        CASE_HANDLE(2);
+        CASE_HANDLE(3);
+        CASE_HANDLE(4);
+        CASE_HANDLE(5);
+        CASE_HANDLE(6);
+        CASE_HANDLE(7);
+        CASE_HANDLE(8);
+        CASE_HANDLE(9);
+        CASE_HANDLE(10);
+        CASE_HANDLE(11);
+        CASE_HANDLE(12);
+        CASE_HANDLE(13);
+        CASE_HANDLE(14);
+        CASE_HANDLE(15);
+        CASE_HANDLE(16);
+        CASE_HANDLE(17);
+        CASE_HANDLE(18);
+        CASE_HANDLE(19);
+        CASE_HANDLE(20);
+        CASE_HANDLE(21);
+        CASE_HANDLE(22);
+        CASE_HANDLE(23);
+        CASE_HANDLE(24);
+        CASE_HANDLE(25);
+        CASE_HANDLE(26);
+        CASE_HANDLE(27);
+        CASE_HANDLE(28);
+        CASE_HANDLE(29);
+        CASE_HANDLE(30);
+        CASE_HANDLE(31);
+        CASE_HANDLE(32);
+        CASE_HANDLE(33);
+        CASE_HANDLE(34);
+        CASE_HANDLE(35);
+        CASE_HANDLE(36);
+        CASE_HANDLE(37);
+        CASE_HANDLE(38);
+        CASE_HANDLE(39);
+        CASE_HANDLE(40);
+        CASE_HANDLE(41);
+        CASE_HANDLE(42);
+        CASE_HANDLE(43);
+        CASE_HANDLE(44);
+        CASE_HANDLE(45);
+        CASE_HANDLE(46);
+        CASE_HANDLE(47);
+        CASE_HANDLE(48);
+        CASE_HANDLE(49);
+        CASE_HANDLE(50);
+        CASE_HANDLE(51);
+        CASE_HANDLE(52);
+        CASE_HANDLE(53);
+        CASE_HANDLE(54);
+        CASE_HANDLE(55);
+        CASE_HANDLE(56);
+        CASE_HANDLE(57);
+        CASE_HANDLE(58);
+        CASE_HANDLE(59);
+        CASE_HANDLE(60);
+        CASE_HANDLE(61);
+        CASE_HANDLE(62);
+        CASE_HANDLE(63);
+        CASE_HANDLE(64);
+        CASE_HANDLE(65);
+        CASE_HANDLE(66);
+        CASE_HANDLE(67);
+        CASE_HANDLE(68);
+        CASE_HANDLE(69);
+        CASE_HANDLE(70);
+        CASE_HANDLE(71);
+        CASE_HANDLE(72);
+        CASE_HANDLE(73);
+        CASE_HANDLE(74);
+        CASE_HANDLE(75);
+        CASE_HANDLE(76);
+        CASE_HANDLE(77);
+        CASE_HANDLE(78);
+        CASE_HANDLE(79);
+        CASE_HANDLE(80);
+        CASE_HANDLE(81);
+        CASE_HANDLE(82);
+        CASE_HANDLE(83);
+        CASE_HANDLE(84);
+        CASE_HANDLE(85);
+        CASE_HANDLE(86);
+        CASE_HANDLE(87);
+        CASE_HANDLE(88);
+        CASE_HANDLE(89);
+        CASE_HANDLE(90);
+        CASE_HANDLE(91);
+        CASE_HANDLE(92);
+        CASE_HANDLE(93);
+        CASE_HANDLE(94);
+        CASE_HANDLE(95);
+        CASE_HANDLE(96);
+        CASE_HANDLE(97);
+        CASE_HANDLE(98);
+        CASE_HANDLE(99);
+        CASE_HANDLE(100);
+        CASE_HANDLE(101);
+        CASE_HANDLE(102);
+        CASE_HANDLE(103);
+        CASE_HANDLE(104);
+        CASE_HANDLE(105);
+        CASE_HANDLE(106);
+        CASE_HANDLE(107);
+        CASE_HANDLE(108);
+        CASE_HANDLE(109);
+        CASE_HANDLE(110);
+        CASE_HANDLE(111);
+        CASE_HANDLE(112);
+        CASE_HANDLE(113);
+        CASE_HANDLE(114);
+        CASE_HANDLE(115);
+        CASE_HANDLE(116);
+        CASE_HANDLE(117);
+        CASE_HANDLE(118);
+        CASE_HANDLE(119);
+        CASE_HANDLE(120);
+        CASE_HANDLE(121);
+        CASE_HANDLE(122);
+        CASE_HANDLE(123);
+        CASE_HANDLE(124);
+        CASE_HANDLE(125);
+        CASE_HANDLE(126);
+        CASE_HANDLE(127);
+        CASE_HANDLE(128);
+        CASE_HANDLE(129);
+        CASE_HANDLE(130);
+        CASE_HANDLE(131);
+        CASE_HANDLE(132);
+        CASE_HANDLE(133);
+        CASE_HANDLE(134);
+        CASE_HANDLE(135);
+        CASE_HANDLE(136);
+        CASE_HANDLE(137);
+        CASE_HANDLE(138);
+        CASE_HANDLE(139);
+        CASE_HANDLE(140);
+        CASE_HANDLE(141);
+        CASE_HANDLE(142);
+        CASE_HANDLE(143);
+        CASE_HANDLE(144);
+        CASE_HANDLE(145);
+        CASE_HANDLE(146);
+        CASE_HANDLE(147);
+        CASE_HANDLE(148);
+        CASE_HANDLE(149);
+        CASE_HANDLE(150);
+        CASE_HANDLE(151);
+        CASE_HANDLE(152);
+        CASE_HANDLE(153);
+        CASE_HANDLE(154);
+        CASE_HANDLE(155);
+        CASE_HANDLE(156);
+        CASE_HANDLE(157);
+        CASE_HANDLE(158);
+        CASE_HANDLE(159);
+        CASE_HANDLE(160);
+        CASE_HANDLE(161);
+        CASE_HANDLE(162);
+        CASE_HANDLE(163);
+        CASE_HANDLE(164);
+        CASE_HANDLE(165);
+        CASE_HANDLE(166);
+        CASE_HANDLE(167);
+        CASE_HANDLE(168);
+        CASE_HANDLE(169);
+        CASE_HANDLE(170);
+        CASE_HANDLE(171);
+        CASE_HANDLE(172);
+        CASE_HANDLE(173);
+        CASE_HANDLE(174);
+        CASE_HANDLE(175);
+        CASE_HANDLE(176);
+        CASE_HANDLE(177);
+        CASE_HANDLE(178);
+        CASE_HANDLE(179);
+        CASE_HANDLE(180);
+        CASE_HANDLE(181);
+        CASE_HANDLE(182);
+        CASE_HANDLE(183);
+        CASE_HANDLE(184);
+        CASE_HANDLE(185);
+        CASE_HANDLE(186);
+        CASE_HANDLE(187);
+        CASE_HANDLE(188);
+        CASE_HANDLE(189);
+        CASE_HANDLE(190);
+        CASE_HANDLE(191);
+        CASE_HANDLE(192);
+        CASE_HANDLE(193);
+        CASE_HANDLE(194);
+        CASE_HANDLE(195);
+        CASE_HANDLE(196);
+        CASE_HANDLE(197);
+        CASE_HANDLE(198);
+        CASE_HANDLE(199);
+        CASE_HANDLE(200);
+        CASE_HANDLE(201);
+        CASE_HANDLE(202);
+        CASE_HANDLE(203);
+        CASE_HANDLE(204);
+        CASE_HANDLE(205);
+        CASE_HANDLE(206);
+        CASE_HANDLE(207);
+        CASE_HANDLE(208);
+        CASE_HANDLE(209);
+        CASE_HANDLE(210);
+        CASE_HANDLE(211);
+        CASE_HANDLE(212);
+        CASE_HANDLE(213);
+        CASE_HANDLE(214);
+        CASE_HANDLE(215);
+        CASE_HANDLE(216);
+        CASE_HANDLE(217);
+        CASE_HANDLE(218);
+        CASE_HANDLE(219);
+        CASE_HANDLE(220);
+        CASE_HANDLE(221);
+        CASE_HANDLE(222);
+        CASE_HANDLE(223);
+        CASE_HANDLE(224);
+        CASE_HANDLE(225);
+        CASE_HANDLE(226);
+        CASE_HANDLE(227);
+        CASE_HANDLE(228);
+        CASE_HANDLE(229);
+        CASE_HANDLE(230);
+        CASE_HANDLE(231);
+        CASE_HANDLE(232);
+        CASE_HANDLE(233);
+        CASE_HANDLE(234);
+        CASE_HANDLE(235);
+        CASE_HANDLE(236);
+        CASE_HANDLE(237);
+        CASE_HANDLE(238);
+        CASE_HANDLE(239);
+        CASE_HANDLE(240);
+        CASE_HANDLE(241);
+        CASE_HANDLE(242);
+        CASE_HANDLE(243);
+        CASE_HANDLE(244);
+        CASE_HANDLE(245);
+        CASE_HANDLE(246);
+        CASE_HANDLE(247);
+        CASE_HANDLE(248);
+        CASE_HANDLE(249);
+    }
+
+    return NULL;
+}
diff --git a/src/third_party/vulkan-loader/src/loader/dirent_on_windows.c b/src/third_party/vulkan-loader/src/loader/dirent_on_windows.c
new file mode 100644
index 0000000..16318cc
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/loader/dirent_on_windows.c
@@ -0,0 +1,128 @@
+/*
+
+    Implementation of POSIX directory browsing functions and types for Win32.
+
+    Author:  Kevlin Henney (kevlin@acm.org, kevlin@curbralan.com)
+    History: Created March 1997. Updated June 2003 and July 2012.
+    Rights:  See end of file.
+
+*/
+#include "dirent_on_windows.h"
+#include <errno.h>
+#include <io.h> /* _findfirst and _findnext set errno iff they return -1 */
+#include <stdlib.h>
+#include <string.h>
+#include "vk_loader_platform.h"
+#include "loader.h"
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+typedef ptrdiff_t handle_type; /* C99's intptr_t not sufficiently portable */
+
+struct DIR {
+    handle_type handle; /* -1 for failed rewind */
+    struct _finddata_t info;
+    struct dirent result; /* d_name null iff first time */
+    char *name;           /* null-terminated char string */
+};
+
+DIR *opendir(const char *name) {
+    DIR *dir = 0;
+
+    if (name && name[0]) {
+        size_t base_length = strlen(name);
+        const char *all = /* search pattern must end with suitable wildcard */
+            strchr("/\\", name[base_length - 1]) ? "*" : "/*";
+
+        if ((dir = (DIR *)loader_instance_tls_heap_alloc(sizeof *dir)) != 0 &&
+            (dir->name = (char *)loader_instance_tls_heap_alloc(base_length + strlen(all) + 1)) != 0) {
+            strcat(strcpy(dir->name, name), all);
+
+            if ((dir->handle = (handle_type)_findfirst(dir->name, &dir->info)) != -1) {
+                dir->result.d_name = 0;
+            } else /* rollback */
+            {
+                loader_instance_tls_heap_free(dir->name);
+                loader_instance_tls_heap_free(dir);
+                dir = 0;
+            }
+        } else /* rollback */
+        {
+            loader_instance_tls_heap_free(dir);
+            dir = 0;
+            errno = ENOMEM;
+        }
+    } else {
+        errno = EINVAL;
+    }
+
+    return dir;
+}
+
+int closedir(DIR *dir) {
+    int result = -1;
+
+    if (dir) {
+        if (dir->handle != -1) {
+            result = _findclose(dir->handle);
+        }
+
+        loader_instance_tls_heap_free(dir->name);
+        loader_instance_tls_heap_free(dir);
+    }
+
+    if (result == -1) /* map all errors to EBADF */
+    {
+        errno = EBADF;
+    }
+
+    return result;
+}
+
+struct dirent *readdir(DIR *dir) {
+    struct dirent *result = 0;
+
+    if (dir && dir->handle != -1) {
+        if (!dir->result.d_name || _findnext(dir->handle, &dir->info) != -1) {
+            result = &dir->result;
+            result->d_name = dir->info.name;
+        }
+    } else {
+        errno = EBADF;
+    }
+
+    return result;
+}
+
+void rewinddir(DIR *dir) {
+    if (dir && dir->handle != -1) {
+        _findclose(dir->handle);
+        dir->handle = (handle_type)_findfirst(dir->name, &dir->info);
+        dir->result.d_name = 0;
+    } else {
+        errno = EBADF;
+    }
+}
+
+#ifdef __cplusplus
+}
+#endif
+
+/*
+
+    Copyright Kevlin Henney, 1997, 2003, 2012. All rights reserved.
+    Copyright (c) 2015 The Khronos Group Inc.
+    Copyright (c) 2015 Valve Corporation
+    Copyright (c) 2015 LunarG, Inc.
+    Permission to use, copy, modify, and distribute this software and its
+    documentation for any purpose is hereby granted without fee, provided
+    that this copyright and permissions notice appear in all copies and
+    derivatives.
+
+    This software is supplied "as is" without express or implied warranty.
+
+    But that said, if there are any problems please get in touch.
+
+*/
diff --git a/src/third_party/vulkan-loader/src/loader/dirent_on_windows.h b/src/third_party/vulkan-loader/src/loader/dirent_on_windows.h
new file mode 100644
index 0000000..8600f8e
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/loader/dirent_on_windows.h
@@ -0,0 +1,51 @@
+#ifndef DIRENT_INCLUDED
+#define DIRENT_INCLUDED
+
+/*
+
+    Declaration of POSIX directory browsing functions and types for Win32.
+
+    Author:  Kevlin Henney (kevlin@acm.org, kevlin@curbralan.com)
+    History: Created March 1997. Updated June 2003.
+    Rights:  See end of file.
+
+*/
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+typedef struct DIR DIR;
+
+struct dirent {
+    char *d_name;
+};
+
+DIR *opendir(const char *);
+int closedir(DIR *);
+struct dirent *readdir(DIR *);
+void rewinddir(DIR *);
+
+/*
+
+    Copyright Kevlin Henney, 1997, 2003. All rights reserved.
+    Copyright (c) 2015 The Khronos Group Inc.
+    Copyright (c) 2015 Valve Corporation
+    Copyright (c) 2015 LunarG, Inc.
+
+    Permission to use, copy, modify, and distribute this software and its
+    documentation for any purpose is hereby granted without fee, provided
+    that this copyright and permissions notice appear in all copies and
+    derivatives.
+
+    This software is supplied "as is" without express or implied warranty.
+
+    But that said, if there are any problems please get in touch.
+
+*/
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif
diff --git a/src/third_party/vulkan-loader/src/loader/dxgi_loader.c b/src/third_party/vulkan-loader/src/loader/dxgi_loader.c
new file mode 100644
index 0000000..c2a3fa5
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/loader/dxgi_loader.c
@@ -0,0 +1,23 @@
+#include "dxgi_loader.h"
+
+#include <strsafe.h>
+
+static HMODULE load_dxgi_module() {
+    TCHAR systemPath[MAX_PATH] = "";
+    GetSystemDirectory(systemPath, MAX_PATH);
+    StringCchCat(systemPath, MAX_PATH, TEXT("\\dxgi.dll"));
+
+    return LoadLibrary(systemPath);
+}
+
+typedef HRESULT (APIENTRY *PFN_CreateDXGIFactory1)(REFIID riid, void **ppFactory);
+
+HRESULT dyn_CreateDXGIFactory1(REFIID riid, void **ppFactory) {
+    PFN_CreateDXGIFactory1 fpCreateDXGIFactory1 =
+        (PFN_CreateDXGIFactory1)GetProcAddress(load_dxgi_module(), "CreateDXGIFactory1");
+
+    if (fpCreateDXGIFactory1 != NULL)
+        return fpCreateDXGIFactory1(riid, ppFactory);
+
+    return DXGI_ERROR_NOT_FOUND;
+}
\ No newline at end of file
diff --git a/src/third_party/vulkan-loader/src/loader/dxgi_loader.h b/src/third_party/vulkan-loader/src/loader/dxgi_loader.h
new file mode 100644
index 0000000..00daf08
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/loader/dxgi_loader.h
@@ -0,0 +1,8 @@
+#ifndef DXGI_LOADER_H
+#define DXGI_LOADER_H
+
+#include <dxgi1_2.h>
+
+HRESULT dyn_CreateDXGIFactory1(REFIID riid, void **ppFactory);
+
+#endif
\ No newline at end of file
diff --git a/src/third_party/vulkan-loader/src/loader/extension_manual.c b/src/third_party/vulkan-loader/src/loader/extension_manual.c
new file mode 100644
index 0000000..490496d
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/loader/extension_manual.c
@@ -0,0 +1,443 @@
+/*
+ * Copyright (c) 2015-2017 The Khronos Group Inc.
+ * Copyright (c) 2015-2017 Valve Corporation
+ * Copyright (c) 2015-2017 LunarG, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Mark Young <marky@lunarg.com>
+ * Author: Lenny Komow <lenny@lunarg.com>
+ */
+
+#ifndef _GNU_SOURCE
+#define _GNU_SOURCE
+#endif
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+#include "vk_loader_platform.h"
+#include "loader.h"
+#include "vk_loader_extensions.h"
+#include <vulkan/vk_icd.h>
+#include "wsi.h"
+#include "debug_utils.h"
+
+// ---- Manually added trampoline/terminator functions
+
+// These functions, for whatever reason, require more complex changes than
+// can easily be automatically generated.
+
+// ---- VK_KHR_device_group extension trampoline/terminators
+
+VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceSurfaceCapabilities2KHR(VkPhysicalDevice physicalDevice,
+                                                                        const VkPhysicalDeviceSurfaceInfo2KHR *pSurfaceInfo,
+                                                                        VkSurfaceCapabilities2KHR *pSurfaceCapabilities) {
+    const VkLayerInstanceDispatchTable *disp;
+    VkPhysicalDevice unwrapped_phys_dev = loader_unwrap_physical_device(physicalDevice);
+    disp = loader_get_instance_layer_dispatch(physicalDevice);
+    return disp->GetPhysicalDeviceSurfaceCapabilities2KHR(unwrapped_phys_dev, pSurfaceInfo, pSurfaceCapabilities);
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL terminator_GetPhysicalDeviceSurfaceCapabilities2KHR(
+    VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR *pSurfaceInfo,
+    VkSurfaceCapabilities2KHR *pSurfaceCapabilities) {
+    struct loader_physical_device_term *phys_dev_term = (struct loader_physical_device_term *)physicalDevice;
+    struct loader_icd_term *icd_term = phys_dev_term->this_icd_term;
+
+    VkIcdSurface *icd_surface = (VkIcdSurface *)(pSurfaceInfo->surface);
+    uint8_t icd_index = phys_dev_term->icd_index;
+
+    if (icd_term->dispatch.GetPhysicalDeviceSurfaceCapabilities2KHR != NULL) {
+        VkBaseOutStructure *pNext = (VkBaseOutStructure *)pSurfaceCapabilities->pNext;
+        while (pNext != NULL) {
+            if ((int)pNext->sType == VK_STRUCTURE_TYPE_SURFACE_PROTECTED_CAPABILITIES_KHR) {
+                // Not all ICDs may be supporting VK_KHR_surface_protected_capabilities
+                // Initialize VkSurfaceProtectedCapabilitiesKHR.supportsProtected to false and
+                // if an ICD supports protected surfaces, it will reset it to true accordingly.
+                ((VkSurfaceProtectedCapabilitiesKHR *)pNext)->supportsProtected = VK_FALSE;
+            }
+            pNext = (VkBaseOutStructure *)pNext->pNext;
+        }
+
+        // Pass the call to the driver, possibly unwrapping the ICD surface
+        if (icd_surface->real_icd_surfaces != NULL && (void *)icd_surface->real_icd_surfaces[icd_index] != NULL) {
+            VkPhysicalDeviceSurfaceInfo2KHR info_copy = *pSurfaceInfo;
+            info_copy.surface = icd_surface->real_icd_surfaces[icd_index];
+            return icd_term->dispatch.GetPhysicalDeviceSurfaceCapabilities2KHR(phys_dev_term->phys_dev, &info_copy,
+                                                                               pSurfaceCapabilities);
+        } else {
+            return icd_term->dispatch.GetPhysicalDeviceSurfaceCapabilities2KHR(phys_dev_term->phys_dev, pSurfaceInfo,
+                                                                               pSurfaceCapabilities);
+        }
+    } else {
+        // Emulate the call
+        loader_log(icd_term->this_instance, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, 0,
+                   "vkGetPhysicalDeviceSurfaceCapabilities2KHR: Emulating call in ICD \"%s\" using "
+                   "vkGetPhysicalDeviceSurfaceCapabilitiesKHR",
+                   icd_term->scanned_icd->lib_name);
+
+        if (pSurfaceInfo->pNext != NULL) {
+            loader_log(icd_term->this_instance, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0,
+                       "vkGetPhysicalDeviceSurfaceCapabilities2KHR: Emulation found unrecognized structure type in "
+                       "pSurfaceInfo->pNext - this struct will be ignored");
+        }
+
+        // Write to the VkSurfaceCapabilities2KHR struct
+        VkSurfaceKHR surface = pSurfaceInfo->surface;
+        if (icd_surface->real_icd_surfaces != NULL && (void *)icd_surface->real_icd_surfaces[icd_index] != NULL) {
+            surface = icd_surface->real_icd_surfaces[icd_index];
+        }
+        VkResult res = icd_term->dispatch.GetPhysicalDeviceSurfaceCapabilitiesKHR(phys_dev_term->phys_dev, surface,
+                                                                                  &pSurfaceCapabilities->surfaceCapabilities);
+
+        if (pSurfaceCapabilities->pNext != NULL) {
+            loader_log(icd_term->this_instance, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0,
+                       "vkGetPhysicalDeviceSurfaceCapabilities2KHR: Emulation found unrecognized structure type in "
+                       "pSurfaceCapabilities->pNext - this struct will be ignored");
+        }
+        return res;
+    }
+}
+
+// ---- VK_NV_external_memory_capabilities extension trampoline/terminators
+
+VKAPI_ATTR VkResult VKAPI_CALL
+GetPhysicalDeviceExternalImageFormatPropertiesNV(
+    VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type,
+    VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags,
+    VkExternalMemoryHandleTypeFlagsNV externalHandleType,
+    VkExternalImageFormatPropertiesNV *pExternalImageFormatProperties) {
+    const VkLayerInstanceDispatchTable *disp;
+    VkPhysicalDevice unwrapped_phys_dev = loader_unwrap_physical_device(physicalDevice);
+    disp = loader_get_instance_layer_dispatch(physicalDevice);
+
+    return disp->GetPhysicalDeviceExternalImageFormatPropertiesNV(
+        unwrapped_phys_dev, format, type, tiling, usage, flags,
+        externalHandleType, pExternalImageFormatProperties);
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL
+terminator_GetPhysicalDeviceExternalImageFormatPropertiesNV(
+    VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type,
+    VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags,
+    VkExternalMemoryHandleTypeFlagsNV externalHandleType,
+    VkExternalImageFormatPropertiesNV *pExternalImageFormatProperties) {
+    struct loader_physical_device_term *phys_dev_term =
+        (struct loader_physical_device_term *)physicalDevice;
+    struct loader_icd_term *icd_term = phys_dev_term->this_icd_term;
+
+    if (!icd_term->dispatch.GetPhysicalDeviceExternalImageFormatPropertiesNV) {
+        if (externalHandleType) {
+            return VK_ERROR_FORMAT_NOT_SUPPORTED;
+        }
+
+        if (!icd_term->dispatch.GetPhysicalDeviceImageFormatProperties) {
+            return VK_ERROR_INITIALIZATION_FAILED;
+        }
+
+        pExternalImageFormatProperties->externalMemoryFeatures = 0;
+        pExternalImageFormatProperties->exportFromImportedHandleTypes = 0;
+        pExternalImageFormatProperties->compatibleHandleTypes = 0;
+
+        return icd_term->dispatch.GetPhysicalDeviceImageFormatProperties(
+            phys_dev_term->phys_dev, format, type, tiling, usage, flags,
+            &pExternalImageFormatProperties->imageFormatProperties);
+    }
+
+    return icd_term->dispatch.GetPhysicalDeviceExternalImageFormatPropertiesNV(
+        phys_dev_term->phys_dev, format, type, tiling, usage, flags,
+        externalHandleType, pExternalImageFormatProperties);
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceSurfaceFormats2KHR(VkPhysicalDevice physicalDevice,
+                                                                   const VkPhysicalDeviceSurfaceInfo2KHR *pSurfaceInfo,
+                                                                   uint32_t *pSurfaceFormatCount,
+                                                                   VkSurfaceFormat2KHR *pSurfaceFormats) {
+    const VkLayerInstanceDispatchTable *disp;
+    VkPhysicalDevice unwrapped_phys_dev = loader_unwrap_physical_device(physicalDevice);
+    disp = loader_get_instance_layer_dispatch(physicalDevice);
+    return disp->GetPhysicalDeviceSurfaceFormats2KHR(unwrapped_phys_dev, pSurfaceInfo, pSurfaceFormatCount, pSurfaceFormats);
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL terminator_GetPhysicalDeviceSurfaceFormats2KHR(VkPhysicalDevice physicalDevice,
+                                                                              const VkPhysicalDeviceSurfaceInfo2KHR *pSurfaceInfo,
+                                                                              uint32_t *pSurfaceFormatCount,
+                                                                              VkSurfaceFormat2KHR *pSurfaceFormats) {
+    struct loader_physical_device_term *phys_dev_term = (struct loader_physical_device_term *)physicalDevice;
+    struct loader_icd_term *icd_term = phys_dev_term->this_icd_term;
+
+    VkIcdSurface *icd_surface = (VkIcdSurface *)(pSurfaceInfo->surface);
+    uint8_t icd_index = phys_dev_term->icd_index;
+
+    if (icd_term->dispatch.GetPhysicalDeviceSurfaceFormats2KHR != NULL) {
+        // Pass the call to the driver, possibly unwrapping the ICD surface
+        if (icd_surface->real_icd_surfaces != NULL && (void *)icd_surface->real_icd_surfaces[icd_index] != NULL) {
+            VkPhysicalDeviceSurfaceInfo2KHR info_copy = *pSurfaceInfo;
+            info_copy.surface = icd_surface->real_icd_surfaces[icd_index];
+            return icd_term->dispatch.GetPhysicalDeviceSurfaceFormats2KHR(phys_dev_term->phys_dev, &info_copy, pSurfaceFormatCount,
+                                                                          pSurfaceFormats);
+        } else {
+            return icd_term->dispatch.GetPhysicalDeviceSurfaceFormats2KHR(phys_dev_term->phys_dev, pSurfaceInfo,
+                                                                          pSurfaceFormatCount, pSurfaceFormats);
+        }
+    } else {
+        // Emulate the call
+        loader_log(icd_term->this_instance, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, 0,
+                   "vkGetPhysicalDeviceSurfaceFormats2KHR: Emulating call in ICD \"%s\" using vkGetPhysicalDeviceSurfaceFormatsKHR",
+                   icd_term->scanned_icd->lib_name);
+
+        if (pSurfaceInfo->pNext != NULL) {
+            loader_log(icd_term->this_instance, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0,
+                       "vkGetPhysicalDeviceSurfaceFormats2KHR: Emulation found unrecognized structure type in pSurfaceInfo->pNext "
+                       "- this struct will be ignored");
+        }
+
+        VkSurfaceKHR surface = pSurfaceInfo->surface;
+        if (icd_surface->real_icd_surfaces != NULL && (void *)icd_surface->real_icd_surfaces[icd_index] != NULL) {
+            surface = icd_surface->real_icd_surfaces[icd_index];
+        }
+
+        if (*pSurfaceFormatCount == 0 || pSurfaceFormats == NULL) {
+            // Write to pSurfaceFormatCount
+            return icd_term->dispatch.GetPhysicalDeviceSurfaceFormatsKHR(phys_dev_term->phys_dev, surface, pSurfaceFormatCount,
+                                                                         NULL);
+        } else {
+            // Allocate a temporary array for the output of the old function
+            VkSurfaceFormatKHR *formats = loader_stack_alloc(*pSurfaceFormatCount * sizeof(VkSurfaceFormatKHR));
+            if (formats == NULL) {
+                return VK_ERROR_OUT_OF_HOST_MEMORY;
+            }
+
+            VkResult res = icd_term->dispatch.GetPhysicalDeviceSurfaceFormatsKHR(phys_dev_term->phys_dev, surface,
+                                                                                 pSurfaceFormatCount, formats);
+            for (uint32_t i = 0; i < *pSurfaceFormatCount; ++i) {
+                pSurfaceFormats[i].surfaceFormat = formats[i];
+                if (pSurfaceFormats[i].pNext != NULL) {
+                    loader_log(icd_term->this_instance, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0,
+                               "vkGetPhysicalDeviceSurfaceFormats2KHR: Emulation found unrecognized structure type in "
+                               "pSurfaceFormats[%d].pNext - this struct will be ignored",
+                               i);
+                }
+            }
+            return res;
+        }
+    }
+}
+
+// ---- VK_EXT_display_surface_counter extension trampoline/terminators
+
+VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceSurfaceCapabilities2EXT(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface,
+                                                                        VkSurfaceCapabilities2EXT *pSurfaceCapabilities) {
+    const VkLayerInstanceDispatchTable *disp;
+    VkPhysicalDevice unwrapped_phys_dev = loader_unwrap_physical_device(physicalDevice);
+    disp = loader_get_instance_layer_dispatch(physicalDevice);
+    return disp->GetPhysicalDeviceSurfaceCapabilities2EXT(unwrapped_phys_dev, surface, pSurfaceCapabilities);
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL terminator_GetPhysicalDeviceSurfaceCapabilities2EXT(
+    VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, VkSurfaceCapabilities2EXT *pSurfaceCapabilities) {
+    struct loader_physical_device_term *phys_dev_term = (struct loader_physical_device_term *)physicalDevice;
+    struct loader_icd_term *icd_term = phys_dev_term->this_icd_term;
+
+    VkIcdSurface *icd_surface = (VkIcdSurface *)(surface);
+    uint8_t icd_index = phys_dev_term->icd_index;
+
+    // Unwrap the surface if needed
+    VkSurfaceKHR unwrapped_surface = surface;
+    if (icd_surface->real_icd_surfaces != NULL && (void *)icd_surface->real_icd_surfaces[icd_index] != NULL) {
+        unwrapped_surface = icd_surface->real_icd_surfaces[icd_index];
+    }
+
+    if (icd_term->dispatch.GetPhysicalDeviceSurfaceCapabilities2EXT != NULL) {
+        // Pass the call to the driver
+        return icd_term->dispatch.GetPhysicalDeviceSurfaceCapabilities2EXT(phys_dev_term->phys_dev, unwrapped_surface,
+                                                                           pSurfaceCapabilities);
+    } else {
+        // Emulate the call
+        loader_log(icd_term->this_instance, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, 0,
+                   "vkGetPhysicalDeviceSurfaceCapabilities2EXT: Emulating call in ICD \"%s\" using "
+                   "vkGetPhysicalDeviceSurfaceCapabilitiesKHR",
+                   icd_term->scanned_icd->lib_name);
+
+        VkSurfaceCapabilitiesKHR surface_caps;
+        VkResult res =
+            icd_term->dispatch.GetPhysicalDeviceSurfaceCapabilitiesKHR(phys_dev_term->phys_dev, unwrapped_surface, &surface_caps);
+        pSurfaceCapabilities->minImageCount = surface_caps.minImageCount;
+        pSurfaceCapabilities->maxImageCount = surface_caps.maxImageCount;
+        pSurfaceCapabilities->currentExtent = surface_caps.currentExtent;
+        pSurfaceCapabilities->minImageExtent = surface_caps.minImageExtent;
+        pSurfaceCapabilities->maxImageExtent = surface_caps.maxImageExtent;
+        pSurfaceCapabilities->maxImageArrayLayers = surface_caps.maxImageArrayLayers;
+        pSurfaceCapabilities->supportedTransforms = surface_caps.supportedTransforms;
+        pSurfaceCapabilities->currentTransform = surface_caps.currentTransform;
+        pSurfaceCapabilities->supportedCompositeAlpha = surface_caps.supportedCompositeAlpha;
+        pSurfaceCapabilities->supportedUsageFlags = surface_caps.supportedUsageFlags;
+        pSurfaceCapabilities->supportedSurfaceCounters = 0;
+
+        if (pSurfaceCapabilities->pNext != NULL) {
+            loader_log(icd_term->this_instance, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0,
+                       "vkGetPhysicalDeviceSurfaceCapabilities2EXT: Emulation found unrecognized structure type in "
+                       "pSurfaceCapabilities->pNext - this struct will be ignored");
+        }
+
+        return res;
+    }
+}
+
+// ---- VK_EXT_direct_mode_display extension trampoline/terminators
+
+VKAPI_ATTR VkResult VKAPI_CALL ReleaseDisplayEXT(VkPhysicalDevice physicalDevice, VkDisplayKHR display) {
+    const VkLayerInstanceDispatchTable *disp;
+    VkPhysicalDevice unwrapped_phys_dev = loader_unwrap_physical_device(physicalDevice);
+    disp = loader_get_instance_layer_dispatch(physicalDevice);
+    return disp->ReleaseDisplayEXT(unwrapped_phys_dev, display);
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL terminator_ReleaseDisplayEXT(VkPhysicalDevice physicalDevice, VkDisplayKHR display) {
+    struct loader_physical_device_term *phys_dev_term = (struct loader_physical_device_term *)physicalDevice;
+    struct loader_icd_term *icd_term = phys_dev_term->this_icd_term;
+
+    if (icd_term->dispatch.ReleaseDisplayEXT == NULL) {
+        loader_log(icd_term->this_instance, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                   "ICD \"%s\" associated with VkPhysicalDevice does not support vkReleaseDisplayEXT - Consequently, the call is "
+                   "invalid because it should not be possible to acquire a display on this device",
+                   icd_term->scanned_icd->lib_name);
+    }
+    return icd_term->dispatch.ReleaseDisplayEXT(phys_dev_term->phys_dev, display);
+}
+
+// ---- VK_EXT_acquire_xlib_display extension trampoline/terminators
+
+#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
+VKAPI_ATTR VkResult VKAPI_CALL AcquireXlibDisplayEXT(VkPhysicalDevice physicalDevice, Display *dpy, VkDisplayKHR display) {
+    const VkLayerInstanceDispatchTable *disp;
+    VkPhysicalDevice unwrapped_phys_dev = loader_unwrap_physical_device(physicalDevice);
+    disp = loader_get_instance_layer_dispatch(physicalDevice);
+    return disp->AcquireXlibDisplayEXT(unwrapped_phys_dev, dpy, display);
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL terminator_AcquireXlibDisplayEXT(VkPhysicalDevice physicalDevice, Display *dpy,
+                                                                VkDisplayKHR display) {
+    struct loader_physical_device_term *phys_dev_term = (struct loader_physical_device_term *)physicalDevice;
+    struct loader_icd_term *icd_term = phys_dev_term->this_icd_term;
+
+    if (icd_term->dispatch.AcquireXlibDisplayEXT != NULL) {
+        // Pass the call to the driver
+        return icd_term->dispatch.AcquireXlibDisplayEXT(phys_dev_term->phys_dev, dpy, display);
+    } else {
+        // Emulate the call
+        loader_log(icd_term->this_instance, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, 0,
+                   "vkAcquireXLibDisplayEXT: Emulating call in ICD \"%s\" by returning error", icd_term->scanned_icd->lib_name);
+
+        // Fail for the unsupported command
+        return VK_ERROR_INITIALIZATION_FAILED;
+    }
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL GetRandROutputDisplayEXT(VkPhysicalDevice physicalDevice, Display *dpy, RROutput rrOutput,
+                                                        VkDisplayKHR *pDisplay) {
+    const VkLayerInstanceDispatchTable *disp;
+    VkPhysicalDevice unwrapped_phys_dev = loader_unwrap_physical_device(physicalDevice);
+    disp = loader_get_instance_layer_dispatch(physicalDevice);
+    return disp->GetRandROutputDisplayEXT(unwrapped_phys_dev, dpy, rrOutput, pDisplay);
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL terminator_GetRandROutputDisplayEXT(VkPhysicalDevice physicalDevice, Display *dpy, RROutput rrOutput,
+                                                                   VkDisplayKHR *pDisplay) {
+    struct loader_physical_device_term *phys_dev_term = (struct loader_physical_device_term *)physicalDevice;
+    struct loader_icd_term *icd_term = phys_dev_term->this_icd_term;
+
+    if (icd_term->dispatch.GetRandROutputDisplayEXT != NULL) {
+        // Pass the call to the driver
+        return icd_term->dispatch.GetRandROutputDisplayEXT(phys_dev_term->phys_dev, dpy, rrOutput, pDisplay);
+    } else {
+        // Emulate the call
+        loader_log(icd_term->this_instance, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, 0,
+                   "vkGetRandROutputDisplayEXT: Emulating call in ICD \"%s\" by returning null display",
+                   icd_term->scanned_icd->lib_name);
+
+        // Return a null handle to indicate this can't be done
+        *pDisplay = VK_NULL_HANDLE;
+        return VK_SUCCESS;
+    }
+}
+
+#endif  // VK_USE_PLATFORM_XLIB_XRANDR_EXT
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceSurfacePresentModes2EXT(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceSurfaceInfo2KHR*      pSurfaceInfo,
+    uint32_t*                                   pPresentModeCount,
+    VkPresentModeKHR*                           pPresentModes) {
+    const VkLayerInstanceDispatchTable *disp;
+    VkPhysicalDevice unwrapped_phys_dev = loader_unwrap_physical_device(physicalDevice);
+    disp = loader_get_instance_layer_dispatch(physicalDevice);
+    return disp->GetPhysicalDeviceSurfacePresentModes2EXT(unwrapped_phys_dev, pSurfaceInfo, pPresentModeCount, pPresentModes);
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL terminator_GetPhysicalDeviceSurfacePresentModes2EXT(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceSurfaceInfo2KHR*      pSurfaceInfo,
+    uint32_t*                                   pPresentModeCount,
+    VkPresentModeKHR*                           pPresentModes) {
+    struct loader_physical_device_term *phys_dev_term = (struct loader_physical_device_term *)physicalDevice;
+    struct loader_icd_term *icd_term = phys_dev_term->this_icd_term;
+    if (NULL == icd_term->dispatch.GetPhysicalDeviceSurfacePresentModes2EXT) {
+        loader_log(icd_term->this_instance, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                   "ICD associated with VkPhysicalDevice does not support GetPhysicalDeviceSurfacePresentModes2EXT");
+    }
+    VkIcdSurface *icd_surface = (VkIcdSurface *)(pSurfaceInfo->surface);
+    uint8_t icd_index = phys_dev_term->icd_index;
+    if (NULL != icd_surface->real_icd_surfaces && NULL != (void *)icd_surface->real_icd_surfaces[icd_index]) {
+        const VkPhysicalDeviceSurfaceInfo2KHR surface_info_copy = {
+            .sType = pSurfaceInfo->sType,
+            .pNext = pSurfaceInfo->pNext,
+            .surface = icd_surface->real_icd_surfaces[icd_index],
+        };
+        return icd_term->dispatch.GetPhysicalDeviceSurfacePresentModes2EXT(phys_dev_term->phys_dev, &surface_info_copy, pPresentModeCount, pPresentModes);
+    }
+    return icd_term->dispatch.GetPhysicalDeviceSurfacePresentModes2EXT(phys_dev_term->phys_dev, pSurfaceInfo, pPresentModeCount, pPresentModes);
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL GetDeviceGroupSurfacePresentModes2EXT(
+    VkDevice                                    device,
+    const VkPhysicalDeviceSurfaceInfo2KHR*      pSurfaceInfo,
+    VkDeviceGroupPresentModeFlagsKHR*           pModes) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(device);
+    return disp->GetDeviceGroupSurfacePresentModes2EXT(device, pSurfaceInfo, pModes);
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL terminator_GetDeviceGroupSurfacePresentModes2EXT(
+    VkDevice                                    device,
+    const VkPhysicalDeviceSurfaceInfo2KHR*      pSurfaceInfo,
+    VkDeviceGroupPresentModeFlagsKHR*           pModes) {
+    uint32_t icd_index = 0;
+    struct loader_device *dev;
+    struct loader_icd_term *icd_term = loader_get_icd_and_device(device, &dev, &icd_index);
+    if (NULL != icd_term && NULL != icd_term->dispatch.GetDeviceGroupSurfacePresentModes2EXT) {
+        VkIcdSurface *icd_surface = (VkIcdSurface *)(uintptr_t)pSurfaceInfo->surface;
+        if (NULL != icd_surface->real_icd_surfaces && (VkSurfaceKHR)NULL != icd_surface->real_icd_surfaces[icd_index]) {
+            const VkPhysicalDeviceSurfaceInfo2KHR surface_info_copy = {
+                .sType = pSurfaceInfo->sType,
+                .pNext = pSurfaceInfo->pNext,
+                .surface = icd_surface->real_icd_surfaces[icd_index],
+            };
+            return icd_term->dispatch.GetDeviceGroupSurfacePresentModes2EXT(device, &surface_info_copy, pModes);
+        }
+        return icd_term->dispatch.GetDeviceGroupSurfacePresentModes2EXT(device, pSurfaceInfo, pModes);
+    }
+    return VK_SUCCESS;
+}
+
+#endif  // VK_USE_PLATFORM_WIN32_KHR
diff --git a/src/third_party/vulkan-loader/src/loader/extension_manual.h b/src/third_party/vulkan-loader/src/loader/extension_manual.h
new file mode 100644
index 0000000..e07b910
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/loader/extension_manual.h
@@ -0,0 +1,106 @@
+/*
+ * Copyright (c) 2015-2017 The Khronos Group Inc.
+ * Copyright (c) 2015-2017 Valve Corporation
+ * Copyright (c) 2015-2017 LunarG, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Mark Young <marky@lunarg.com>
+ */
+
+#pragma once
+
+// ---- Manually added trampoline/terminator functions
+
+// These functions, for whatever reason, require more complex changes than
+// can easily be automatically generated.
+
+VKAPI_ATTR VkResult VKAPI_CALL
+GetPhysicalDeviceExternalImageFormatPropertiesNV(
+    VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type,
+    VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags,
+    VkExternalMemoryHandleTypeFlagsNV externalHandleType,
+    VkExternalImageFormatPropertiesNV *pExternalImageFormatProperties);
+
+VKAPI_ATTR VkResult VKAPI_CALL
+terminator_GetPhysicalDeviceExternalImageFormatPropertiesNV(
+    VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type,
+    VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags,
+    VkExternalMemoryHandleTypeFlagsNV externalHandleType,
+    VkExternalImageFormatPropertiesNV *pExternalImageFormatProperties);
+
+VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceSurfaceCapabilities2KHR(VkPhysicalDevice physicalDevice,
+                                                                        const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo,
+                                                                        VkSurfaceCapabilities2KHR* pSurfaceCapabilities);
+
+VKAPI_ATTR VkResult VKAPI_CALL terminator_GetPhysicalDeviceSurfaceCapabilities2KHR(
+    VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo,
+    VkSurfaceCapabilities2KHR* pSurfaceCapabilities);
+
+VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceSurfaceFormats2KHR(VkPhysicalDevice physicalDevice,
+                                                                   const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo,
+                                                                   uint32_t* pSurfaceFormatCount,
+                                                                   VkSurfaceFormat2KHR* pSurfaceFormats);
+
+VKAPI_ATTR VkResult VKAPI_CALL terminator_GetPhysicalDeviceSurfaceFormats2KHR(VkPhysicalDevice physicalDevice,
+                                                                              const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo,
+                                                                              uint32_t* pSurfaceFormatCount,
+                                                                              VkSurfaceFormat2KHR* pSurfaceFormats);
+
+VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceSurfaceCapabilities2EXT(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface,
+                                                                        VkSurfaceCapabilities2EXT* pSurfaceCapabilities);
+
+VKAPI_ATTR VkResult VKAPI_CALL terminator_GetPhysicalDeviceSurfaceCapabilities2EXT(VkPhysicalDevice physicalDevice,
+                                                                                   VkSurfaceKHR surface,
+                                                                                   VkSurfaceCapabilities2EXT* pSurfaceCapabilities);
+
+VKAPI_ATTR VkResult VKAPI_CALL ReleaseDisplayEXT(VkPhysicalDevice physicalDevice, VkDisplayKHR display);
+
+VKAPI_ATTR VkResult VKAPI_CALL terminator_ReleaseDisplayEXT(VkPhysicalDevice physicalDevice, VkDisplayKHR display);
+
+#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
+VKAPI_ATTR VkResult VKAPI_CALL AcquireXlibDisplayEXT(VkPhysicalDevice physicalDevice, Display* dpy, VkDisplayKHR display);
+
+VKAPI_ATTR VkResult VKAPI_CALL terminator_AcquireXlibDisplayEXT(VkPhysicalDevice physicalDevice, Display* dpy,
+                                                                VkDisplayKHR display);
+
+VKAPI_ATTR VkResult VKAPI_CALL GetRandROutputDisplayEXT(VkPhysicalDevice physicalDevice, Display* dpy, RROutput rrOutput,
+                                                        VkDisplayKHR* pDisplay);
+
+VKAPI_ATTR VkResult VKAPI_CALL terminator_GetRandROutputDisplayEXT(VkPhysicalDevice physicalDevice, Display* dpy, RROutput rrOutput,
+                                                                   VkDisplayKHR* pDisplay);
+#endif  // VK_USE_PLATFORM_XLIB_XRANDR_EXT
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceSurfacePresentModes2EXT(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceSurfaceInfo2KHR*      pSurfaceInfo,
+    uint32_t*                                   pPresentModeCount,
+    VkPresentModeKHR*                           pPresentModes);
+
+VKAPI_ATTR VkResult VKAPI_CALL terminator_GetPhysicalDeviceSurfacePresentModes2EXT(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceSurfaceInfo2KHR*      pSurfaceInfo,
+    uint32_t*                                   pPresentModeCount,
+    VkPresentModeKHR*                           pPresentModes);
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+VKAPI_ATTR VkResult VKAPI_CALL GetDeviceGroupSurfacePresentModes2EXT(
+    VkDevice                                    device,
+    const VkPhysicalDeviceSurfaceInfo2KHR*      pSurfaceInfo,
+    VkDeviceGroupPresentModeFlagsKHR*           pModes);
+
+VKAPI_ATTR VkResult VKAPI_CALL terminator_GetDeviceGroupSurfacePresentModes2EXT(
+    VkDevice                                    device,
+    const VkPhysicalDeviceSurfaceInfo2KHR*      pSurfaceInfo,
+    VkDeviceGroupPresentModeFlagsKHR*           pModes);
diff --git a/src/third_party/vulkan-loader/src/loader/generated/.clang-format b/src/third_party/vulkan-loader/src/loader/generated/.clang-format
new file mode 100644
index 0000000..3bb983a
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/loader/generated/.clang-format
@@ -0,0 +1,5 @@
+---
+# Disable clang-format for generated code
+DisableFormat: true
+SortIncludes: false
+...
diff --git a/src/third_party/vulkan-loader/src/loader/generated/vk_dispatch_table_helper.h b/src/third_party/vulkan-loader/src/loader/generated/vk_dispatch_table_helper.h
new file mode 100644
index 0000000..74a9bd7
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/loader/generated/vk_dispatch_table_helper.h
@@ -0,0 +1,769 @@
+#pragma once
+// *** THIS FILE IS GENERATED - DO NOT EDIT ***
+// See dispatch_helper_generator.py for modifications
+
+/*
+ * Copyright (c) 2015-2017 The Khronos Group Inc.
+ * Copyright (c) 2015-2017 Valve Corporation
+ * Copyright (c) 2015-2017 LunarG, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Courtney Goeltzenleuchter <courtney@LunarG.com>
+ * Author: Jon Ashburn <jon@lunarg.com>
+ * Author: Mark Lobodzinski <mark@lunarg.com>
+ */
+
+#include <vulkan/vulkan.h>
+#include <vulkan/vk_layer.h>
+#include <string.h>
+#include "vk_layer_dispatch_table.h"
+
+static VKAPI_ATTR VkResult VKAPI_CALL StubCreateSwapchainKHR(VkDevice device, const VkSwapchainCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSwapchainKHR* pSwapchain) { return VK_SUCCESS; };
+static VKAPI_ATTR void VKAPI_CALL StubDestroySwapchainKHR(VkDevice device, VkSwapchainKHR swapchain, const VkAllocationCallbacks* pAllocator) {  };
+static VKAPI_ATTR VkResult VKAPI_CALL StubGetSwapchainImagesKHR(VkDevice device, VkSwapchainKHR swapchain, uint32_t* pSwapchainImageCount, VkImage* pSwapchainImages) { return VK_SUCCESS; };
+static VKAPI_ATTR VkResult VKAPI_CALL StubAcquireNextImageKHR(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout, VkSemaphore semaphore, VkFence fence, uint32_t* pImageIndex) { return VK_SUCCESS; };
+static VKAPI_ATTR VkResult VKAPI_CALL StubQueuePresentKHR(VkQueue queue, const VkPresentInfoKHR* pPresentInfo) { return VK_SUCCESS; };
+static VKAPI_ATTR VkResult VKAPI_CALL StubGetDeviceGroupPresentCapabilitiesKHR(VkDevice device, VkDeviceGroupPresentCapabilitiesKHR* pDeviceGroupPresentCapabilities) { return VK_SUCCESS; };
+static VKAPI_ATTR VkResult VKAPI_CALL StubGetDeviceGroupSurfacePresentModesKHR(VkDevice device, VkSurfaceKHR surface, VkDeviceGroupPresentModeFlagsKHR* pModes) { return VK_SUCCESS; };
+static VKAPI_ATTR VkResult VKAPI_CALL StubAcquireNextImage2KHR(VkDevice device, const VkAcquireNextImageInfoKHR* pAcquireInfo, uint32_t* pImageIndex) { return VK_SUCCESS; };
+static VKAPI_ATTR VkResult VKAPI_CALL StubCreateSharedSwapchainsKHR(VkDevice device, uint32_t swapchainCount, const VkSwapchainCreateInfoKHR* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkSwapchainKHR* pSwapchains) { return VK_SUCCESS; };
+static VKAPI_ATTR void VKAPI_CALL StubGetDeviceGroupPeerMemoryFeaturesKHR(VkDevice device, uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VkPeerMemoryFeatureFlags* pPeerMemoryFeatures) {  };
+static VKAPI_ATTR void VKAPI_CALL StubCmdSetDeviceMaskKHR(VkCommandBuffer commandBuffer, uint32_t deviceMask) {  };
+static VKAPI_ATTR void VKAPI_CALL StubCmdDispatchBaseKHR(VkCommandBuffer commandBuffer, uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ) {  };
+static VKAPI_ATTR void VKAPI_CALL StubTrimCommandPoolKHR(VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlags flags) {  };
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+static VKAPI_ATTR VkResult VKAPI_CALL StubGetMemoryWin32HandleKHR(VkDevice device, const VkMemoryGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle) { return VK_SUCCESS; };
+#endif // VK_USE_PLATFORM_WIN32_KHR
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+static VKAPI_ATTR VkResult VKAPI_CALL StubGetMemoryWin32HandlePropertiesKHR(VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, VkMemoryWin32HandlePropertiesKHR* pMemoryWin32HandleProperties) { return VK_SUCCESS; };
+#endif // VK_USE_PLATFORM_WIN32_KHR
+static VKAPI_ATTR VkResult VKAPI_CALL StubGetMemoryFdKHR(VkDevice device, const VkMemoryGetFdInfoKHR* pGetFdInfo, int* pFd) { return VK_SUCCESS; };
+static VKAPI_ATTR VkResult VKAPI_CALL StubGetMemoryFdPropertiesKHR(VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, int fd, VkMemoryFdPropertiesKHR* pMemoryFdProperties) { return VK_SUCCESS; };
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+static VKAPI_ATTR VkResult VKAPI_CALL StubImportSemaphoreWin32HandleKHR(VkDevice device, const VkImportSemaphoreWin32HandleInfoKHR* pImportSemaphoreWin32HandleInfo) { return VK_SUCCESS; };
+#endif // VK_USE_PLATFORM_WIN32_KHR
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+static VKAPI_ATTR VkResult VKAPI_CALL StubGetSemaphoreWin32HandleKHR(VkDevice device, const VkSemaphoreGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle) { return VK_SUCCESS; };
+#endif // VK_USE_PLATFORM_WIN32_KHR
+static VKAPI_ATTR VkResult VKAPI_CALL StubImportSemaphoreFdKHR(VkDevice device, const VkImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo) { return VK_SUCCESS; };
+static VKAPI_ATTR VkResult VKAPI_CALL StubGetSemaphoreFdKHR(VkDevice device, const VkSemaphoreGetFdInfoKHR* pGetFdInfo, int* pFd) { return VK_SUCCESS; };
+static VKAPI_ATTR void VKAPI_CALL StubCmdPushDescriptorSetKHR(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount, const VkWriteDescriptorSet* pDescriptorWrites) {  };
+static VKAPI_ATTR void VKAPI_CALL StubCmdPushDescriptorSetWithTemplateKHR(VkCommandBuffer commandBuffer, VkDescriptorUpdateTemplate descriptorUpdateTemplate, VkPipelineLayout layout, uint32_t set, const void* pData) {  };
+static VKAPI_ATTR VkResult VKAPI_CALL StubCreateDescriptorUpdateTemplateKHR(VkDevice device, const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate) { return VK_SUCCESS; };
+static VKAPI_ATTR void VKAPI_CALL StubDestroyDescriptorUpdateTemplateKHR(VkDevice device, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const VkAllocationCallbacks* pAllocator) {  };
+static VKAPI_ATTR void VKAPI_CALL StubUpdateDescriptorSetWithTemplateKHR(VkDevice device, VkDescriptorSet descriptorSet, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData) {  };
+static VKAPI_ATTR VkResult VKAPI_CALL StubCreateRenderPass2KHR(VkDevice device, const VkRenderPassCreateInfo2KHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass) { return VK_SUCCESS; };
+static VKAPI_ATTR void VKAPI_CALL StubCmdBeginRenderPass2KHR(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo*      pRenderPassBegin, const VkSubpassBeginInfoKHR*      pSubpassBeginInfo) {  };
+static VKAPI_ATTR void VKAPI_CALL StubCmdNextSubpass2KHR(VkCommandBuffer commandBuffer, const VkSubpassBeginInfoKHR*      pSubpassBeginInfo, const VkSubpassEndInfoKHR*        pSubpassEndInfo) {  };
+static VKAPI_ATTR void VKAPI_CALL StubCmdEndRenderPass2KHR(VkCommandBuffer commandBuffer, const VkSubpassEndInfoKHR*        pSubpassEndInfo) {  };
+static VKAPI_ATTR VkResult VKAPI_CALL StubGetSwapchainStatusKHR(VkDevice device, VkSwapchainKHR swapchain) { return VK_SUCCESS; };
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+static VKAPI_ATTR VkResult VKAPI_CALL StubImportFenceWin32HandleKHR(VkDevice device, const VkImportFenceWin32HandleInfoKHR* pImportFenceWin32HandleInfo) { return VK_SUCCESS; };
+#endif // VK_USE_PLATFORM_WIN32_KHR
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+static VKAPI_ATTR VkResult VKAPI_CALL StubGetFenceWin32HandleKHR(VkDevice device, const VkFenceGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle) { return VK_SUCCESS; };
+#endif // VK_USE_PLATFORM_WIN32_KHR
+static VKAPI_ATTR VkResult VKAPI_CALL StubImportFenceFdKHR(VkDevice device, const VkImportFenceFdInfoKHR* pImportFenceFdInfo) { return VK_SUCCESS; };
+static VKAPI_ATTR VkResult VKAPI_CALL StubGetFenceFdKHR(VkDevice device, const VkFenceGetFdInfoKHR* pGetFdInfo, int* pFd) { return VK_SUCCESS; };
+static VKAPI_ATTR VkResult VKAPI_CALL StubAcquireProfilingLockKHR(VkDevice device, const VkAcquireProfilingLockInfoKHR* pInfo) { return VK_SUCCESS; };
+static VKAPI_ATTR void VKAPI_CALL StubReleaseProfilingLockKHR(VkDevice device) {  };
+static VKAPI_ATTR void VKAPI_CALL StubGetImageMemoryRequirements2KHR(VkDevice device, const VkImageMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements) {  };
+static VKAPI_ATTR void VKAPI_CALL StubGetBufferMemoryRequirements2KHR(VkDevice device, const VkBufferMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements) {  };
+static VKAPI_ATTR void VKAPI_CALL StubGetImageSparseMemoryRequirements2KHR(VkDevice device, const VkImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2* pSparseMemoryRequirements) {  };
+static VKAPI_ATTR VkResult VKAPI_CALL StubCreateSamplerYcbcrConversionKHR(VkDevice device, const VkSamplerYcbcrConversionCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSamplerYcbcrConversion* pYcbcrConversion) { return VK_SUCCESS; };
+static VKAPI_ATTR void VKAPI_CALL StubDestroySamplerYcbcrConversionKHR(VkDevice device, VkSamplerYcbcrConversion ycbcrConversion, const VkAllocationCallbacks* pAllocator) {  };
+static VKAPI_ATTR VkResult VKAPI_CALL StubBindBufferMemory2KHR(VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo* pBindInfos) { return VK_SUCCESS; };
+static VKAPI_ATTR VkResult VKAPI_CALL StubBindImageMemory2KHR(VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo* pBindInfos) { return VK_SUCCESS; };
+static VKAPI_ATTR void VKAPI_CALL StubGetDescriptorSetLayoutSupportKHR(VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, VkDescriptorSetLayoutSupport* pSupport) {  };
+static VKAPI_ATTR void VKAPI_CALL StubCmdDrawIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride) {  };
+static VKAPI_ATTR void VKAPI_CALL StubCmdDrawIndexedIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride) {  };
+static VKAPI_ATTR VkResult VKAPI_CALL StubGetSemaphoreCounterValueKHR(VkDevice device, VkSemaphore semaphore, uint64_t* pValue) { return VK_SUCCESS; };
+static VKAPI_ATTR VkResult VKAPI_CALL StubWaitSemaphoresKHR(VkDevice device, const VkSemaphoreWaitInfoKHR* pWaitInfo, uint64_t timeout) { return VK_SUCCESS; };
+static VKAPI_ATTR VkResult VKAPI_CALL StubSignalSemaphoreKHR(VkDevice device, const VkSemaphoreSignalInfoKHR* pSignalInfo) { return VK_SUCCESS; };
+static VKAPI_ATTR VkResult VKAPI_CALL StubGetPipelineExecutablePropertiesKHR(VkDevice                        device, const VkPipelineInfoKHR*        pPipelineInfo, uint32_t* pExecutableCount, VkPipelineExecutablePropertiesKHR* pProperties) { return VK_SUCCESS; };
+static VKAPI_ATTR VkResult VKAPI_CALL StubGetPipelineExecutableStatisticsKHR(VkDevice                        device, const VkPipelineExecutableInfoKHR*  pExecutableInfo, uint32_t* pStatisticCount, VkPipelineExecutableStatisticKHR* pStatistics) { return VK_SUCCESS; };
+static VKAPI_ATTR VkResult VKAPI_CALL StubGetPipelineExecutableInternalRepresentationsKHR(VkDevice                        device, const VkPipelineExecutableInfoKHR*  pExecutableInfo, uint32_t* pInternalRepresentationCount, VkPipelineExecutableInternalRepresentationKHR* pInternalRepresentations) { return VK_SUCCESS; };
+static VKAPI_ATTR VkResult VKAPI_CALL StubDebugMarkerSetObjectTagEXT(VkDevice device, const VkDebugMarkerObjectTagInfoEXT* pTagInfo) { return VK_SUCCESS; };
+static VKAPI_ATTR VkResult VKAPI_CALL StubDebugMarkerSetObjectNameEXT(VkDevice device, const VkDebugMarkerObjectNameInfoEXT* pNameInfo) { return VK_SUCCESS; };
+static VKAPI_ATTR void VKAPI_CALL StubCmdDebugMarkerBeginEXT(VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT* pMarkerInfo) {  };
+static VKAPI_ATTR void VKAPI_CALL StubCmdDebugMarkerEndEXT(VkCommandBuffer commandBuffer) {  };
+static VKAPI_ATTR void VKAPI_CALL StubCmdDebugMarkerInsertEXT(VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT* pMarkerInfo) {  };
+static VKAPI_ATTR void VKAPI_CALL StubCmdBindTransformFeedbackBuffersEXT(VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets, const VkDeviceSize* pSizes) {  };
+static VKAPI_ATTR void VKAPI_CALL StubCmdBeginTransformFeedbackEXT(VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VkBuffer* pCounterBuffers, const VkDeviceSize* pCounterBufferOffsets) {  };
+static VKAPI_ATTR void VKAPI_CALL StubCmdEndTransformFeedbackEXT(VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VkBuffer* pCounterBuffers, const VkDeviceSize* pCounterBufferOffsets) {  };
+static VKAPI_ATTR void VKAPI_CALL StubCmdBeginQueryIndexedEXT(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, VkQueryControlFlags flags, uint32_t index) {  };
+static VKAPI_ATTR void VKAPI_CALL StubCmdEndQueryIndexedEXT(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, uint32_t index) {  };
+static VKAPI_ATTR void VKAPI_CALL StubCmdDrawIndirectByteCountEXT(VkCommandBuffer commandBuffer, uint32_t instanceCount, uint32_t firstInstance, VkBuffer counterBuffer, VkDeviceSize counterBufferOffset, uint32_t counterOffset, uint32_t vertexStride) {  };
+static VKAPI_ATTR void VKAPI_CALL StubGetImageViewHandleNVX(VkDevice device, const VkImageViewHandleInfoNVX* pInfo) {  };
+static VKAPI_ATTR void VKAPI_CALL StubCmdDrawIndirectCountAMD(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride) {  };
+static VKAPI_ATTR void VKAPI_CALL StubCmdDrawIndexedIndirectCountAMD(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride) {  };
+static VKAPI_ATTR VkResult VKAPI_CALL StubGetShaderInfoAMD(VkDevice device, VkPipeline pipeline, VkShaderStageFlagBits shaderStage, VkShaderInfoTypeAMD infoType, size_t* pInfoSize, void* pInfo) { return VK_SUCCESS; };
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+static VKAPI_ATTR VkResult VKAPI_CALL StubGetMemoryWin32HandleNV(VkDevice device, VkDeviceMemory memory, VkExternalMemoryHandleTypeFlagsNV handleType, HANDLE* pHandle) { return VK_SUCCESS; };
+#endif // VK_USE_PLATFORM_WIN32_KHR
+static VKAPI_ATTR void VKAPI_CALL StubCmdBeginConditionalRenderingEXT(VkCommandBuffer commandBuffer, const VkConditionalRenderingBeginInfoEXT* pConditionalRenderingBegin) {  };
+static VKAPI_ATTR void VKAPI_CALL StubCmdEndConditionalRenderingEXT(VkCommandBuffer commandBuffer) {  };
+static VKAPI_ATTR void VKAPI_CALL StubCmdProcessCommandsNVX(VkCommandBuffer commandBuffer, const VkCmdProcessCommandsInfoNVX* pProcessCommandsInfo) {  };
+static VKAPI_ATTR void VKAPI_CALL StubCmdReserveSpaceForCommandsNVX(VkCommandBuffer commandBuffer, const VkCmdReserveSpaceForCommandsInfoNVX* pReserveSpaceInfo) {  };
+static VKAPI_ATTR VkResult VKAPI_CALL StubCreateIndirectCommandsLayoutNVX(VkDevice device, const VkIndirectCommandsLayoutCreateInfoNVX* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkIndirectCommandsLayoutNVX* pIndirectCommandsLayout) { return VK_SUCCESS; };
+static VKAPI_ATTR void VKAPI_CALL StubDestroyIndirectCommandsLayoutNVX(VkDevice device, VkIndirectCommandsLayoutNVX indirectCommandsLayout, const VkAllocationCallbacks* pAllocator) {  };
+static VKAPI_ATTR VkResult VKAPI_CALL StubCreateObjectTableNVX(VkDevice device, const VkObjectTableCreateInfoNVX* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkObjectTableNVX* pObjectTable) { return VK_SUCCESS; };
+static VKAPI_ATTR void VKAPI_CALL StubDestroyObjectTableNVX(VkDevice device, VkObjectTableNVX objectTable, const VkAllocationCallbacks* pAllocator) {  };
+static VKAPI_ATTR VkResult VKAPI_CALL StubRegisterObjectsNVX(VkDevice device, VkObjectTableNVX objectTable, uint32_t objectCount, const VkObjectTableEntryNVX* const*    ppObjectTableEntries, const uint32_t* pObjectIndices) { return VK_SUCCESS; };
+static VKAPI_ATTR VkResult VKAPI_CALL StubUnregisterObjectsNVX(VkDevice device, VkObjectTableNVX objectTable, uint32_t objectCount, const VkObjectEntryTypeNVX* pObjectEntryTypes, const uint32_t* pObjectIndices) { return VK_SUCCESS; };
+static VKAPI_ATTR void VKAPI_CALL StubCmdSetViewportWScalingNV(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewportWScalingNV* pViewportWScalings) {  };
+static VKAPI_ATTR VkResult VKAPI_CALL StubDisplayPowerControlEXT(VkDevice device, VkDisplayKHR display, const VkDisplayPowerInfoEXT* pDisplayPowerInfo) { return VK_SUCCESS; };
+static VKAPI_ATTR VkResult VKAPI_CALL StubRegisterDeviceEventEXT(VkDevice device, const VkDeviceEventInfoEXT* pDeviceEventInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence) { return VK_SUCCESS; };
+static VKAPI_ATTR VkResult VKAPI_CALL StubRegisterDisplayEventEXT(VkDevice device, VkDisplayKHR display, const VkDisplayEventInfoEXT* pDisplayEventInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence) { return VK_SUCCESS; };
+static VKAPI_ATTR VkResult VKAPI_CALL StubGetSwapchainCounterEXT(VkDevice device, VkSwapchainKHR swapchain, VkSurfaceCounterFlagBitsEXT counter, uint64_t* pCounterValue) { return VK_SUCCESS; };
+static VKAPI_ATTR VkResult VKAPI_CALL StubGetRefreshCycleDurationGOOGLE(VkDevice device, VkSwapchainKHR swapchain, VkRefreshCycleDurationGOOGLE* pDisplayTimingProperties) { return VK_SUCCESS; };
+static VKAPI_ATTR VkResult VKAPI_CALL StubGetPastPresentationTimingGOOGLE(VkDevice device, VkSwapchainKHR swapchain, uint32_t* pPresentationTimingCount, VkPastPresentationTimingGOOGLE* pPresentationTimings) { return VK_SUCCESS; };
+static VKAPI_ATTR void VKAPI_CALL StubCmdSetDiscardRectangleEXT(VkCommandBuffer commandBuffer, uint32_t firstDiscardRectangle, uint32_t discardRectangleCount, const VkRect2D* pDiscardRectangles) {  };
+static VKAPI_ATTR void VKAPI_CALL StubSetHdrMetadataEXT(VkDevice device, uint32_t swapchainCount, const VkSwapchainKHR* pSwapchains, const VkHdrMetadataEXT* pMetadata) {  };
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+static VKAPI_ATTR VkResult VKAPI_CALL StubGetAndroidHardwareBufferPropertiesANDROID(VkDevice device, const struct AHardwareBuffer* buffer, VkAndroidHardwareBufferPropertiesANDROID* pProperties) { return VK_SUCCESS; };
+#endif // VK_USE_PLATFORM_ANDROID_KHR
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+static VKAPI_ATTR VkResult VKAPI_CALL StubGetMemoryAndroidHardwareBufferANDROID(VkDevice device, const VkMemoryGetAndroidHardwareBufferInfoANDROID* pInfo, struct AHardwareBuffer** pBuffer) { return VK_SUCCESS; };
+#endif // VK_USE_PLATFORM_ANDROID_KHR
+static VKAPI_ATTR void VKAPI_CALL StubCmdSetSampleLocationsEXT(VkCommandBuffer commandBuffer, const VkSampleLocationsInfoEXT* pSampleLocationsInfo) {  };
+static VKAPI_ATTR VkResult VKAPI_CALL StubGetImageDrmFormatModifierPropertiesEXT(VkDevice device, VkImage image, VkImageDrmFormatModifierPropertiesEXT* pProperties) { return VK_SUCCESS; };
+static VKAPI_ATTR VkResult VKAPI_CALL StubCreateValidationCacheEXT(VkDevice device, const VkValidationCacheCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkValidationCacheEXT* pValidationCache) { return VK_SUCCESS; };
+static VKAPI_ATTR void VKAPI_CALL StubDestroyValidationCacheEXT(VkDevice device, VkValidationCacheEXT validationCache, const VkAllocationCallbacks* pAllocator) {  };
+static VKAPI_ATTR VkResult VKAPI_CALL StubMergeValidationCachesEXT(VkDevice device, VkValidationCacheEXT dstCache, uint32_t srcCacheCount, const VkValidationCacheEXT* pSrcCaches) { return VK_SUCCESS; };
+static VKAPI_ATTR VkResult VKAPI_CALL StubGetValidationCacheDataEXT(VkDevice device, VkValidationCacheEXT validationCache, size_t* pDataSize, void* pData) { return VK_SUCCESS; };
+static VKAPI_ATTR void VKAPI_CALL StubCmdBindShadingRateImageNV(VkCommandBuffer commandBuffer, VkImageView imageView, VkImageLayout imageLayout) {  };
+static VKAPI_ATTR void VKAPI_CALL StubCmdSetViewportShadingRatePaletteNV(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkShadingRatePaletteNV* pShadingRatePalettes) {  };
+static VKAPI_ATTR void VKAPI_CALL StubCmdSetCoarseSampleOrderNV(VkCommandBuffer commandBuffer, VkCoarseSampleOrderTypeNV sampleOrderType, uint32_t customSampleOrderCount, const VkCoarseSampleOrderCustomNV* pCustomSampleOrders) {  };
+static VKAPI_ATTR VkResult VKAPI_CALL StubCreateAccelerationStructureNV(VkDevice device, const VkAccelerationStructureCreateInfoNV* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkAccelerationStructureNV* pAccelerationStructure) { return VK_SUCCESS; };
+static VKAPI_ATTR void VKAPI_CALL StubDestroyAccelerationStructureNV(VkDevice device, VkAccelerationStructureNV accelerationStructure, const VkAllocationCallbacks* pAllocator) {  };
+static VKAPI_ATTR void VKAPI_CALL StubGetAccelerationStructureMemoryRequirementsNV(VkDevice device, const VkAccelerationStructureMemoryRequirementsInfoNV* pInfo, VkMemoryRequirements2KHR* pMemoryRequirements) {  };
+static VKAPI_ATTR VkResult VKAPI_CALL StubBindAccelerationStructureMemoryNV(VkDevice device, uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoNV* pBindInfos) { return VK_SUCCESS; };
+static VKAPI_ATTR void VKAPI_CALL StubCmdBuildAccelerationStructureNV(VkCommandBuffer commandBuffer, const VkAccelerationStructureInfoNV* pInfo, VkBuffer instanceData, VkDeviceSize instanceOffset, VkBool32 update, VkAccelerationStructureNV dst, VkAccelerationStructureNV src, VkBuffer scratch, VkDeviceSize scratchOffset) {  };
+static VKAPI_ATTR void VKAPI_CALL StubCmdCopyAccelerationStructureNV(VkCommandBuffer commandBuffer, VkAccelerationStructureNV dst, VkAccelerationStructureNV src, VkCopyAccelerationStructureModeNV mode) {  };
+static VKAPI_ATTR void VKAPI_CALL StubCmdTraceRaysNV(VkCommandBuffer commandBuffer, VkBuffer raygenShaderBindingTableBuffer, VkDeviceSize raygenShaderBindingOffset, VkBuffer missShaderBindingTableBuffer, VkDeviceSize missShaderBindingOffset, VkDeviceSize missShaderBindingStride, VkBuffer hitShaderBindingTableBuffer, VkDeviceSize hitShaderBindingOffset, VkDeviceSize hitShaderBindingStride, VkBuffer callableShaderBindingTableBuffer, VkDeviceSize callableShaderBindingOffset, VkDeviceSize callableShaderBindingStride, uint32_t width, uint32_t height, uint32_t depth) {  };
+static VKAPI_ATTR VkResult VKAPI_CALL StubCreateRayTracingPipelinesNV(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkRayTracingPipelineCreateInfoNV* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines) { return VK_SUCCESS; };
+static VKAPI_ATTR VkResult VKAPI_CALL StubGetRayTracingShaderGroupHandlesNV(VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData) { return VK_SUCCESS; };
+static VKAPI_ATTR VkResult VKAPI_CALL StubGetAccelerationStructureHandleNV(VkDevice device, VkAccelerationStructureNV accelerationStructure, size_t dataSize, void* pData) { return VK_SUCCESS; };
+static VKAPI_ATTR void VKAPI_CALL StubCmdWriteAccelerationStructuresPropertiesNV(VkCommandBuffer commandBuffer, uint32_t accelerationStructureCount, const VkAccelerationStructureNV* pAccelerationStructures, VkQueryType queryType, VkQueryPool queryPool, uint32_t firstQuery) {  };
+static VKAPI_ATTR VkResult VKAPI_CALL StubCompileDeferredNV(VkDevice device, VkPipeline pipeline, uint32_t shader) { return VK_SUCCESS; };
+static VKAPI_ATTR VkResult VKAPI_CALL StubGetMemoryHostPointerPropertiesEXT(VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, const void* pHostPointer, VkMemoryHostPointerPropertiesEXT* pMemoryHostPointerProperties) { return VK_SUCCESS; };
+static VKAPI_ATTR void VKAPI_CALL StubCmdWriteBufferMarkerAMD(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage, VkBuffer dstBuffer, VkDeviceSize dstOffset, uint32_t marker) {  };
+static VKAPI_ATTR VkResult VKAPI_CALL StubGetCalibratedTimestampsEXT(VkDevice device, uint32_t timestampCount, const VkCalibratedTimestampInfoEXT* pTimestampInfos, uint64_t* pTimestamps, uint64_t* pMaxDeviation) { return VK_SUCCESS; };
+static VKAPI_ATTR void VKAPI_CALL StubCmdDrawMeshTasksNV(VkCommandBuffer commandBuffer, uint32_t taskCount, uint32_t firstTask) {  };
+static VKAPI_ATTR void VKAPI_CALL StubCmdDrawMeshTasksIndirectNV(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride) {  };
+static VKAPI_ATTR void VKAPI_CALL StubCmdDrawMeshTasksIndirectCountNV(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride) {  };
+static VKAPI_ATTR void VKAPI_CALL StubCmdSetExclusiveScissorNV(VkCommandBuffer commandBuffer, uint32_t firstExclusiveScissor, uint32_t exclusiveScissorCount, const VkRect2D* pExclusiveScissors) {  };
+static VKAPI_ATTR void VKAPI_CALL StubCmdSetCheckpointNV(VkCommandBuffer commandBuffer, const void* pCheckpointMarker) {  };
+static VKAPI_ATTR void VKAPI_CALL StubGetQueueCheckpointDataNV(VkQueue queue, uint32_t* pCheckpointDataCount, VkCheckpointDataNV* pCheckpointData) {  };
+static VKAPI_ATTR VkResult VKAPI_CALL StubInitializePerformanceApiINTEL(VkDevice device, const VkInitializePerformanceApiInfoINTEL* pInitializeInfo) { return VK_SUCCESS; };
+static VKAPI_ATTR void VKAPI_CALL StubUninitializePerformanceApiINTEL(VkDevice device) {  };
+static VKAPI_ATTR VkResult VKAPI_CALL StubCmdSetPerformanceMarkerINTEL(VkCommandBuffer commandBuffer, const VkPerformanceMarkerInfoINTEL* pMarkerInfo) { return VK_SUCCESS; };
+static VKAPI_ATTR VkResult VKAPI_CALL StubCmdSetPerformanceStreamMarkerINTEL(VkCommandBuffer commandBuffer, const VkPerformanceStreamMarkerInfoINTEL* pMarkerInfo) { return VK_SUCCESS; };
+static VKAPI_ATTR VkResult VKAPI_CALL StubCmdSetPerformanceOverrideINTEL(VkCommandBuffer commandBuffer, const VkPerformanceOverrideInfoINTEL* pOverrideInfo) { return VK_SUCCESS; };
+static VKAPI_ATTR VkResult VKAPI_CALL StubAcquirePerformanceConfigurationINTEL(VkDevice device, const VkPerformanceConfigurationAcquireInfoINTEL* pAcquireInfo, VkPerformanceConfigurationINTEL* pConfiguration) { return VK_SUCCESS; };
+static VKAPI_ATTR VkResult VKAPI_CALL StubReleasePerformanceConfigurationINTEL(VkDevice device, VkPerformanceConfigurationINTEL configuration) { return VK_SUCCESS; };
+static VKAPI_ATTR VkResult VKAPI_CALL StubQueueSetPerformanceConfigurationINTEL(VkQueue queue, VkPerformanceConfigurationINTEL configuration) { return VK_SUCCESS; };
+static VKAPI_ATTR VkResult VKAPI_CALL StubGetPerformanceParameterINTEL(VkDevice device, VkPerformanceParameterTypeINTEL parameter, VkPerformanceValueINTEL* pValue) { return VK_SUCCESS; };
+static VKAPI_ATTR void VKAPI_CALL StubSetLocalDimmingAMD(VkDevice device, VkSwapchainKHR swapChain, VkBool32 localDimmingEnable) {  };
+static VKAPI_ATTR void VKAPI_CALL StubGetBufferDeviceAddressEXT(VkDevice device, const VkBufferDeviceAddressInfoEXT* pInfo) {  };
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+static VKAPI_ATTR VkResult VKAPI_CALL StubAcquireFullScreenExclusiveModeEXT(VkDevice device, VkSwapchainKHR swapchain) { return VK_SUCCESS; };
+#endif // VK_USE_PLATFORM_WIN32_KHR
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+static VKAPI_ATTR VkResult VKAPI_CALL StubReleaseFullScreenExclusiveModeEXT(VkDevice device, VkSwapchainKHR swapchain) { return VK_SUCCESS; };
+#endif // VK_USE_PLATFORM_WIN32_KHR
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+static VKAPI_ATTR VkResult VKAPI_CALL StubGetDeviceGroupSurfacePresentModes2EXT(VkDevice device, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, VkDeviceGroupPresentModeFlagsKHR* pModes) { return VK_SUCCESS; };
+#endif // VK_USE_PLATFORM_WIN32_KHR
+static VKAPI_ATTR void VKAPI_CALL StubCmdSetLineStippleEXT(VkCommandBuffer commandBuffer, uint32_t lineStippleFactor, uint16_t lineStipplePattern) {  };
+static VKAPI_ATTR void VKAPI_CALL StubResetQueryPoolEXT(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount) {  };
+
+
+
+static inline void layer_init_device_dispatch_table(VkDevice device, VkLayerDispatchTable *table, PFN_vkGetDeviceProcAddr gpa) {
+    memset(table, 0, sizeof(*table));
+    // Device function pointers
+    table->GetDeviceProcAddr = gpa;
+    table->DestroyDevice = (PFN_vkDestroyDevice) gpa(device, "vkDestroyDevice");
+    table->GetDeviceQueue = (PFN_vkGetDeviceQueue) gpa(device, "vkGetDeviceQueue");
+    table->QueueSubmit = (PFN_vkQueueSubmit) gpa(device, "vkQueueSubmit");
+    table->QueueWaitIdle = (PFN_vkQueueWaitIdle) gpa(device, "vkQueueWaitIdle");
+    table->DeviceWaitIdle = (PFN_vkDeviceWaitIdle) gpa(device, "vkDeviceWaitIdle");
+    table->AllocateMemory = (PFN_vkAllocateMemory) gpa(device, "vkAllocateMemory");
+    table->FreeMemory = (PFN_vkFreeMemory) gpa(device, "vkFreeMemory");
+    table->MapMemory = (PFN_vkMapMemory) gpa(device, "vkMapMemory");
+    table->UnmapMemory = (PFN_vkUnmapMemory) gpa(device, "vkUnmapMemory");
+    table->FlushMappedMemoryRanges = (PFN_vkFlushMappedMemoryRanges) gpa(device, "vkFlushMappedMemoryRanges");
+    table->InvalidateMappedMemoryRanges = (PFN_vkInvalidateMappedMemoryRanges) gpa(device, "vkInvalidateMappedMemoryRanges");
+    table->GetDeviceMemoryCommitment = (PFN_vkGetDeviceMemoryCommitment) gpa(device, "vkGetDeviceMemoryCommitment");
+    table->BindBufferMemory = (PFN_vkBindBufferMemory) gpa(device, "vkBindBufferMemory");
+    table->BindImageMemory = (PFN_vkBindImageMemory) gpa(device, "vkBindImageMemory");
+    table->GetBufferMemoryRequirements = (PFN_vkGetBufferMemoryRequirements) gpa(device, "vkGetBufferMemoryRequirements");
+    table->GetImageMemoryRequirements = (PFN_vkGetImageMemoryRequirements) gpa(device, "vkGetImageMemoryRequirements");
+    table->GetImageSparseMemoryRequirements = (PFN_vkGetImageSparseMemoryRequirements) gpa(device, "vkGetImageSparseMemoryRequirements");
+    table->QueueBindSparse = (PFN_vkQueueBindSparse) gpa(device, "vkQueueBindSparse");
+    table->CreateFence = (PFN_vkCreateFence) gpa(device, "vkCreateFence");
+    table->DestroyFence = (PFN_vkDestroyFence) gpa(device, "vkDestroyFence");
+    table->ResetFences = (PFN_vkResetFences) gpa(device, "vkResetFences");
+    table->GetFenceStatus = (PFN_vkGetFenceStatus) gpa(device, "vkGetFenceStatus");
+    table->WaitForFences = (PFN_vkWaitForFences) gpa(device, "vkWaitForFences");
+    table->CreateSemaphore = (PFN_vkCreateSemaphore) gpa(device, "vkCreateSemaphore");
+    table->DestroySemaphore = (PFN_vkDestroySemaphore) gpa(device, "vkDestroySemaphore");
+    table->CreateEvent = (PFN_vkCreateEvent) gpa(device, "vkCreateEvent");
+    table->DestroyEvent = (PFN_vkDestroyEvent) gpa(device, "vkDestroyEvent");
+    table->GetEventStatus = (PFN_vkGetEventStatus) gpa(device, "vkGetEventStatus");
+    table->SetEvent = (PFN_vkSetEvent) gpa(device, "vkSetEvent");
+    table->ResetEvent = (PFN_vkResetEvent) gpa(device, "vkResetEvent");
+    table->CreateQueryPool = (PFN_vkCreateQueryPool) gpa(device, "vkCreateQueryPool");
+    table->DestroyQueryPool = (PFN_vkDestroyQueryPool) gpa(device, "vkDestroyQueryPool");
+    table->GetQueryPoolResults = (PFN_vkGetQueryPoolResults) gpa(device, "vkGetQueryPoolResults");
+    table->CreateBuffer = (PFN_vkCreateBuffer) gpa(device, "vkCreateBuffer");
+    table->DestroyBuffer = (PFN_vkDestroyBuffer) gpa(device, "vkDestroyBuffer");
+    table->CreateBufferView = (PFN_vkCreateBufferView) gpa(device, "vkCreateBufferView");
+    table->DestroyBufferView = (PFN_vkDestroyBufferView) gpa(device, "vkDestroyBufferView");
+    table->CreateImage = (PFN_vkCreateImage) gpa(device, "vkCreateImage");
+    table->DestroyImage = (PFN_vkDestroyImage) gpa(device, "vkDestroyImage");
+    table->GetImageSubresourceLayout = (PFN_vkGetImageSubresourceLayout) gpa(device, "vkGetImageSubresourceLayout");
+    table->CreateImageView = (PFN_vkCreateImageView) gpa(device, "vkCreateImageView");
+    table->DestroyImageView = (PFN_vkDestroyImageView) gpa(device, "vkDestroyImageView");
+    table->CreateShaderModule = (PFN_vkCreateShaderModule) gpa(device, "vkCreateShaderModule");
+    table->DestroyShaderModule = (PFN_vkDestroyShaderModule) gpa(device, "vkDestroyShaderModule");
+    table->CreatePipelineCache = (PFN_vkCreatePipelineCache) gpa(device, "vkCreatePipelineCache");
+    table->DestroyPipelineCache = (PFN_vkDestroyPipelineCache) gpa(device, "vkDestroyPipelineCache");
+    table->GetPipelineCacheData = (PFN_vkGetPipelineCacheData) gpa(device, "vkGetPipelineCacheData");
+    table->MergePipelineCaches = (PFN_vkMergePipelineCaches) gpa(device, "vkMergePipelineCaches");
+    table->CreateGraphicsPipelines = (PFN_vkCreateGraphicsPipelines) gpa(device, "vkCreateGraphicsPipelines");
+    table->CreateComputePipelines = (PFN_vkCreateComputePipelines) gpa(device, "vkCreateComputePipelines");
+    table->DestroyPipeline = (PFN_vkDestroyPipeline) gpa(device, "vkDestroyPipeline");
+    table->CreatePipelineLayout = (PFN_vkCreatePipelineLayout) gpa(device, "vkCreatePipelineLayout");
+    table->DestroyPipelineLayout = (PFN_vkDestroyPipelineLayout) gpa(device, "vkDestroyPipelineLayout");
+    table->CreateSampler = (PFN_vkCreateSampler) gpa(device, "vkCreateSampler");
+    table->DestroySampler = (PFN_vkDestroySampler) gpa(device, "vkDestroySampler");
+    table->CreateDescriptorSetLayout = (PFN_vkCreateDescriptorSetLayout) gpa(device, "vkCreateDescriptorSetLayout");
+    table->DestroyDescriptorSetLayout = (PFN_vkDestroyDescriptorSetLayout) gpa(device, "vkDestroyDescriptorSetLayout");
+    table->CreateDescriptorPool = (PFN_vkCreateDescriptorPool) gpa(device, "vkCreateDescriptorPool");
+    table->DestroyDescriptorPool = (PFN_vkDestroyDescriptorPool) gpa(device, "vkDestroyDescriptorPool");
+    table->ResetDescriptorPool = (PFN_vkResetDescriptorPool) gpa(device, "vkResetDescriptorPool");
+    table->AllocateDescriptorSets = (PFN_vkAllocateDescriptorSets) gpa(device, "vkAllocateDescriptorSets");
+    table->FreeDescriptorSets = (PFN_vkFreeDescriptorSets) gpa(device, "vkFreeDescriptorSets");
+    table->UpdateDescriptorSets = (PFN_vkUpdateDescriptorSets) gpa(device, "vkUpdateDescriptorSets");
+    table->CreateFramebuffer = (PFN_vkCreateFramebuffer) gpa(device, "vkCreateFramebuffer");
+    table->DestroyFramebuffer = (PFN_vkDestroyFramebuffer) gpa(device, "vkDestroyFramebuffer");
+    table->CreateRenderPass = (PFN_vkCreateRenderPass) gpa(device, "vkCreateRenderPass");
+    table->DestroyRenderPass = (PFN_vkDestroyRenderPass) gpa(device, "vkDestroyRenderPass");
+    table->GetRenderAreaGranularity = (PFN_vkGetRenderAreaGranularity) gpa(device, "vkGetRenderAreaGranularity");
+    table->CreateCommandPool = (PFN_vkCreateCommandPool) gpa(device, "vkCreateCommandPool");
+    table->DestroyCommandPool = (PFN_vkDestroyCommandPool) gpa(device, "vkDestroyCommandPool");
+    table->ResetCommandPool = (PFN_vkResetCommandPool) gpa(device, "vkResetCommandPool");
+    table->AllocateCommandBuffers = (PFN_vkAllocateCommandBuffers) gpa(device, "vkAllocateCommandBuffers");
+    table->FreeCommandBuffers = (PFN_vkFreeCommandBuffers) gpa(device, "vkFreeCommandBuffers");
+    table->BeginCommandBuffer = (PFN_vkBeginCommandBuffer) gpa(device, "vkBeginCommandBuffer");
+    table->EndCommandBuffer = (PFN_vkEndCommandBuffer) gpa(device, "vkEndCommandBuffer");
+    table->ResetCommandBuffer = (PFN_vkResetCommandBuffer) gpa(device, "vkResetCommandBuffer");
+    table->CmdBindPipeline = (PFN_vkCmdBindPipeline) gpa(device, "vkCmdBindPipeline");
+    table->CmdSetViewport = (PFN_vkCmdSetViewport) gpa(device, "vkCmdSetViewport");
+    table->CmdSetScissor = (PFN_vkCmdSetScissor) gpa(device, "vkCmdSetScissor");
+    table->CmdSetLineWidth = (PFN_vkCmdSetLineWidth) gpa(device, "vkCmdSetLineWidth");
+    table->CmdSetDepthBias = (PFN_vkCmdSetDepthBias) gpa(device, "vkCmdSetDepthBias");
+    table->CmdSetBlendConstants = (PFN_vkCmdSetBlendConstants) gpa(device, "vkCmdSetBlendConstants");
+    table->CmdSetDepthBounds = (PFN_vkCmdSetDepthBounds) gpa(device, "vkCmdSetDepthBounds");
+    table->CmdSetStencilCompareMask = (PFN_vkCmdSetStencilCompareMask) gpa(device, "vkCmdSetStencilCompareMask");
+    table->CmdSetStencilWriteMask = (PFN_vkCmdSetStencilWriteMask) gpa(device, "vkCmdSetStencilWriteMask");
+    table->CmdSetStencilReference = (PFN_vkCmdSetStencilReference) gpa(device, "vkCmdSetStencilReference");
+    table->CmdBindDescriptorSets = (PFN_vkCmdBindDescriptorSets) gpa(device, "vkCmdBindDescriptorSets");
+    table->CmdBindIndexBuffer = (PFN_vkCmdBindIndexBuffer) gpa(device, "vkCmdBindIndexBuffer");
+    table->CmdBindVertexBuffers = (PFN_vkCmdBindVertexBuffers) gpa(device, "vkCmdBindVertexBuffers");
+    table->CmdDraw = (PFN_vkCmdDraw) gpa(device, "vkCmdDraw");
+    table->CmdDrawIndexed = (PFN_vkCmdDrawIndexed) gpa(device, "vkCmdDrawIndexed");
+    table->CmdDrawIndirect = (PFN_vkCmdDrawIndirect) gpa(device, "vkCmdDrawIndirect");
+    table->CmdDrawIndexedIndirect = (PFN_vkCmdDrawIndexedIndirect) gpa(device, "vkCmdDrawIndexedIndirect");
+    table->CmdDispatch = (PFN_vkCmdDispatch) gpa(device, "vkCmdDispatch");
+    table->CmdDispatchIndirect = (PFN_vkCmdDispatchIndirect) gpa(device, "vkCmdDispatchIndirect");
+    table->CmdCopyBuffer = (PFN_vkCmdCopyBuffer) gpa(device, "vkCmdCopyBuffer");
+    table->CmdCopyImage = (PFN_vkCmdCopyImage) gpa(device, "vkCmdCopyImage");
+    table->CmdBlitImage = (PFN_vkCmdBlitImage) gpa(device, "vkCmdBlitImage");
+    table->CmdCopyBufferToImage = (PFN_vkCmdCopyBufferToImage) gpa(device, "vkCmdCopyBufferToImage");
+    table->CmdCopyImageToBuffer = (PFN_vkCmdCopyImageToBuffer) gpa(device, "vkCmdCopyImageToBuffer");
+    table->CmdUpdateBuffer = (PFN_vkCmdUpdateBuffer) gpa(device, "vkCmdUpdateBuffer");
+    table->CmdFillBuffer = (PFN_vkCmdFillBuffer) gpa(device, "vkCmdFillBuffer");
+    table->CmdClearColorImage = (PFN_vkCmdClearColorImage) gpa(device, "vkCmdClearColorImage");
+    table->CmdClearDepthStencilImage = (PFN_vkCmdClearDepthStencilImage) gpa(device, "vkCmdClearDepthStencilImage");
+    table->CmdClearAttachments = (PFN_vkCmdClearAttachments) gpa(device, "vkCmdClearAttachments");
+    table->CmdResolveImage = (PFN_vkCmdResolveImage) gpa(device, "vkCmdResolveImage");
+    table->CmdSetEvent = (PFN_vkCmdSetEvent) gpa(device, "vkCmdSetEvent");
+    table->CmdResetEvent = (PFN_vkCmdResetEvent) gpa(device, "vkCmdResetEvent");
+    table->CmdWaitEvents = (PFN_vkCmdWaitEvents) gpa(device, "vkCmdWaitEvents");
+    table->CmdPipelineBarrier = (PFN_vkCmdPipelineBarrier) gpa(device, "vkCmdPipelineBarrier");
+    table->CmdBeginQuery = (PFN_vkCmdBeginQuery) gpa(device, "vkCmdBeginQuery");
+    table->CmdEndQuery = (PFN_vkCmdEndQuery) gpa(device, "vkCmdEndQuery");
+    table->CmdResetQueryPool = (PFN_vkCmdResetQueryPool) gpa(device, "vkCmdResetQueryPool");
+    table->CmdWriteTimestamp = (PFN_vkCmdWriteTimestamp) gpa(device, "vkCmdWriteTimestamp");
+    table->CmdCopyQueryPoolResults = (PFN_vkCmdCopyQueryPoolResults) gpa(device, "vkCmdCopyQueryPoolResults");
+    table->CmdPushConstants = (PFN_vkCmdPushConstants) gpa(device, "vkCmdPushConstants");
+    table->CmdBeginRenderPass = (PFN_vkCmdBeginRenderPass) gpa(device, "vkCmdBeginRenderPass");
+    table->CmdNextSubpass = (PFN_vkCmdNextSubpass) gpa(device, "vkCmdNextSubpass");
+    table->CmdEndRenderPass = (PFN_vkCmdEndRenderPass) gpa(device, "vkCmdEndRenderPass");
+    table->CmdExecuteCommands = (PFN_vkCmdExecuteCommands) gpa(device, "vkCmdExecuteCommands");
+    table->BindBufferMemory2 = (PFN_vkBindBufferMemory2) gpa(device, "vkBindBufferMemory2");
+    table->BindImageMemory2 = (PFN_vkBindImageMemory2) gpa(device, "vkBindImageMemory2");
+    table->GetDeviceGroupPeerMemoryFeatures = (PFN_vkGetDeviceGroupPeerMemoryFeatures) gpa(device, "vkGetDeviceGroupPeerMemoryFeatures");
+    table->CmdSetDeviceMask = (PFN_vkCmdSetDeviceMask) gpa(device, "vkCmdSetDeviceMask");
+    table->CmdDispatchBase = (PFN_vkCmdDispatchBase) gpa(device, "vkCmdDispatchBase");
+    table->GetImageMemoryRequirements2 = (PFN_vkGetImageMemoryRequirements2) gpa(device, "vkGetImageMemoryRequirements2");
+    table->GetBufferMemoryRequirements2 = (PFN_vkGetBufferMemoryRequirements2) gpa(device, "vkGetBufferMemoryRequirements2");
+    table->GetImageSparseMemoryRequirements2 = (PFN_vkGetImageSparseMemoryRequirements2) gpa(device, "vkGetImageSparseMemoryRequirements2");
+    table->TrimCommandPool = (PFN_vkTrimCommandPool) gpa(device, "vkTrimCommandPool");
+    table->GetDeviceQueue2 = (PFN_vkGetDeviceQueue2) gpa(device, "vkGetDeviceQueue2");
+    table->CreateSamplerYcbcrConversion = (PFN_vkCreateSamplerYcbcrConversion) gpa(device, "vkCreateSamplerYcbcrConversion");
+    table->DestroySamplerYcbcrConversion = (PFN_vkDestroySamplerYcbcrConversion) gpa(device, "vkDestroySamplerYcbcrConversion");
+    table->CreateDescriptorUpdateTemplate = (PFN_vkCreateDescriptorUpdateTemplate) gpa(device, "vkCreateDescriptorUpdateTemplate");
+    table->DestroyDescriptorUpdateTemplate = (PFN_vkDestroyDescriptorUpdateTemplate) gpa(device, "vkDestroyDescriptorUpdateTemplate");
+    table->UpdateDescriptorSetWithTemplate = (PFN_vkUpdateDescriptorSetWithTemplate) gpa(device, "vkUpdateDescriptorSetWithTemplate");
+    table->GetDescriptorSetLayoutSupport = (PFN_vkGetDescriptorSetLayoutSupport) gpa(device, "vkGetDescriptorSetLayoutSupport");
+    table->CreateSwapchainKHR = (PFN_vkCreateSwapchainKHR) gpa(device, "vkCreateSwapchainKHR");
+    if (table->CreateSwapchainKHR == nullptr) { table->CreateSwapchainKHR = (PFN_vkCreateSwapchainKHR)StubCreateSwapchainKHR; }
+    table->DestroySwapchainKHR = (PFN_vkDestroySwapchainKHR) gpa(device, "vkDestroySwapchainKHR");
+    if (table->DestroySwapchainKHR == nullptr) { table->DestroySwapchainKHR = (PFN_vkDestroySwapchainKHR)StubDestroySwapchainKHR; }
+    table->GetSwapchainImagesKHR = (PFN_vkGetSwapchainImagesKHR) gpa(device, "vkGetSwapchainImagesKHR");
+    if (table->GetSwapchainImagesKHR == nullptr) { table->GetSwapchainImagesKHR = (PFN_vkGetSwapchainImagesKHR)StubGetSwapchainImagesKHR; }
+    table->AcquireNextImageKHR = (PFN_vkAcquireNextImageKHR) gpa(device, "vkAcquireNextImageKHR");
+    if (table->AcquireNextImageKHR == nullptr) { table->AcquireNextImageKHR = (PFN_vkAcquireNextImageKHR)StubAcquireNextImageKHR; }
+    table->QueuePresentKHR = (PFN_vkQueuePresentKHR) gpa(device, "vkQueuePresentKHR");
+    if (table->QueuePresentKHR == nullptr) { table->QueuePresentKHR = (PFN_vkQueuePresentKHR)StubQueuePresentKHR; }
+    table->GetDeviceGroupPresentCapabilitiesKHR = (PFN_vkGetDeviceGroupPresentCapabilitiesKHR) gpa(device, "vkGetDeviceGroupPresentCapabilitiesKHR");
+    if (table->GetDeviceGroupPresentCapabilitiesKHR == nullptr) { table->GetDeviceGroupPresentCapabilitiesKHR = (PFN_vkGetDeviceGroupPresentCapabilitiesKHR)StubGetDeviceGroupPresentCapabilitiesKHR; }
+    table->GetDeviceGroupSurfacePresentModesKHR = (PFN_vkGetDeviceGroupSurfacePresentModesKHR) gpa(device, "vkGetDeviceGroupSurfacePresentModesKHR");
+    if (table->GetDeviceGroupSurfacePresentModesKHR == nullptr) { table->GetDeviceGroupSurfacePresentModesKHR = (PFN_vkGetDeviceGroupSurfacePresentModesKHR)StubGetDeviceGroupSurfacePresentModesKHR; }
+    table->AcquireNextImage2KHR = (PFN_vkAcquireNextImage2KHR) gpa(device, "vkAcquireNextImage2KHR");
+    if (table->AcquireNextImage2KHR == nullptr) { table->AcquireNextImage2KHR = (PFN_vkAcquireNextImage2KHR)StubAcquireNextImage2KHR; }
+    table->CreateSharedSwapchainsKHR = (PFN_vkCreateSharedSwapchainsKHR) gpa(device, "vkCreateSharedSwapchainsKHR");
+    if (table->CreateSharedSwapchainsKHR == nullptr) { table->CreateSharedSwapchainsKHR = (PFN_vkCreateSharedSwapchainsKHR)StubCreateSharedSwapchainsKHR; }
+    table->GetDeviceGroupPeerMemoryFeaturesKHR = (PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR) gpa(device, "vkGetDeviceGroupPeerMemoryFeaturesKHR");
+    if (table->GetDeviceGroupPeerMemoryFeaturesKHR == nullptr) { table->GetDeviceGroupPeerMemoryFeaturesKHR = (PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR)StubGetDeviceGroupPeerMemoryFeaturesKHR; }
+    table->CmdSetDeviceMaskKHR = (PFN_vkCmdSetDeviceMaskKHR) gpa(device, "vkCmdSetDeviceMaskKHR");
+    if (table->CmdSetDeviceMaskKHR == nullptr) { table->CmdSetDeviceMaskKHR = (PFN_vkCmdSetDeviceMaskKHR)StubCmdSetDeviceMaskKHR; }
+    table->CmdDispatchBaseKHR = (PFN_vkCmdDispatchBaseKHR) gpa(device, "vkCmdDispatchBaseKHR");
+    if (table->CmdDispatchBaseKHR == nullptr) { table->CmdDispatchBaseKHR = (PFN_vkCmdDispatchBaseKHR)StubCmdDispatchBaseKHR; }
+    table->TrimCommandPoolKHR = (PFN_vkTrimCommandPoolKHR) gpa(device, "vkTrimCommandPoolKHR");
+    if (table->TrimCommandPoolKHR == nullptr) { table->TrimCommandPoolKHR = (PFN_vkTrimCommandPoolKHR)StubTrimCommandPoolKHR; }
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    table->GetMemoryWin32HandleKHR = (PFN_vkGetMemoryWin32HandleKHR) gpa(device, "vkGetMemoryWin32HandleKHR");
+    if (table->GetMemoryWin32HandleKHR == nullptr) { table->GetMemoryWin32HandleKHR = (PFN_vkGetMemoryWin32HandleKHR)StubGetMemoryWin32HandleKHR; }
+#endif // VK_USE_PLATFORM_WIN32_KHR
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    table->GetMemoryWin32HandlePropertiesKHR = (PFN_vkGetMemoryWin32HandlePropertiesKHR) gpa(device, "vkGetMemoryWin32HandlePropertiesKHR");
+    if (table->GetMemoryWin32HandlePropertiesKHR == nullptr) { table->GetMemoryWin32HandlePropertiesKHR = (PFN_vkGetMemoryWin32HandlePropertiesKHR)StubGetMemoryWin32HandlePropertiesKHR; }
+#endif // VK_USE_PLATFORM_WIN32_KHR
+    table->GetMemoryFdKHR = (PFN_vkGetMemoryFdKHR) gpa(device, "vkGetMemoryFdKHR");
+    if (table->GetMemoryFdKHR == nullptr) { table->GetMemoryFdKHR = (PFN_vkGetMemoryFdKHR)StubGetMemoryFdKHR; }
+    table->GetMemoryFdPropertiesKHR = (PFN_vkGetMemoryFdPropertiesKHR) gpa(device, "vkGetMemoryFdPropertiesKHR");
+    if (table->GetMemoryFdPropertiesKHR == nullptr) { table->GetMemoryFdPropertiesKHR = (PFN_vkGetMemoryFdPropertiesKHR)StubGetMemoryFdPropertiesKHR; }
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    table->ImportSemaphoreWin32HandleKHR = (PFN_vkImportSemaphoreWin32HandleKHR) gpa(device, "vkImportSemaphoreWin32HandleKHR");
+    if (table->ImportSemaphoreWin32HandleKHR == nullptr) { table->ImportSemaphoreWin32HandleKHR = (PFN_vkImportSemaphoreWin32HandleKHR)StubImportSemaphoreWin32HandleKHR; }
+#endif // VK_USE_PLATFORM_WIN32_KHR
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    table->GetSemaphoreWin32HandleKHR = (PFN_vkGetSemaphoreWin32HandleKHR) gpa(device, "vkGetSemaphoreWin32HandleKHR");
+    if (table->GetSemaphoreWin32HandleKHR == nullptr) { table->GetSemaphoreWin32HandleKHR = (PFN_vkGetSemaphoreWin32HandleKHR)StubGetSemaphoreWin32HandleKHR; }
+#endif // VK_USE_PLATFORM_WIN32_KHR
+    table->ImportSemaphoreFdKHR = (PFN_vkImportSemaphoreFdKHR) gpa(device, "vkImportSemaphoreFdKHR");
+    if (table->ImportSemaphoreFdKHR == nullptr) { table->ImportSemaphoreFdKHR = (PFN_vkImportSemaphoreFdKHR)StubImportSemaphoreFdKHR; }
+    table->GetSemaphoreFdKHR = (PFN_vkGetSemaphoreFdKHR) gpa(device, "vkGetSemaphoreFdKHR");
+    if (table->GetSemaphoreFdKHR == nullptr) { table->GetSemaphoreFdKHR = (PFN_vkGetSemaphoreFdKHR)StubGetSemaphoreFdKHR; }
+    table->CmdPushDescriptorSetKHR = (PFN_vkCmdPushDescriptorSetKHR) gpa(device, "vkCmdPushDescriptorSetKHR");
+    if (table->CmdPushDescriptorSetKHR == nullptr) { table->CmdPushDescriptorSetKHR = (PFN_vkCmdPushDescriptorSetKHR)StubCmdPushDescriptorSetKHR; }
+    table->CmdPushDescriptorSetWithTemplateKHR = (PFN_vkCmdPushDescriptorSetWithTemplateKHR) gpa(device, "vkCmdPushDescriptorSetWithTemplateKHR");
+    if (table->CmdPushDescriptorSetWithTemplateKHR == nullptr) { table->CmdPushDescriptorSetWithTemplateKHR = (PFN_vkCmdPushDescriptorSetWithTemplateKHR)StubCmdPushDescriptorSetWithTemplateKHR; }
+    table->CreateDescriptorUpdateTemplateKHR = (PFN_vkCreateDescriptorUpdateTemplateKHR) gpa(device, "vkCreateDescriptorUpdateTemplateKHR");
+    if (table->CreateDescriptorUpdateTemplateKHR == nullptr) { table->CreateDescriptorUpdateTemplateKHR = (PFN_vkCreateDescriptorUpdateTemplateKHR)StubCreateDescriptorUpdateTemplateKHR; }
+    table->DestroyDescriptorUpdateTemplateKHR = (PFN_vkDestroyDescriptorUpdateTemplateKHR) gpa(device, "vkDestroyDescriptorUpdateTemplateKHR");
+    if (table->DestroyDescriptorUpdateTemplateKHR == nullptr) { table->DestroyDescriptorUpdateTemplateKHR = (PFN_vkDestroyDescriptorUpdateTemplateKHR)StubDestroyDescriptorUpdateTemplateKHR; }
+    table->UpdateDescriptorSetWithTemplateKHR = (PFN_vkUpdateDescriptorSetWithTemplateKHR) gpa(device, "vkUpdateDescriptorSetWithTemplateKHR");
+    if (table->UpdateDescriptorSetWithTemplateKHR == nullptr) { table->UpdateDescriptorSetWithTemplateKHR = (PFN_vkUpdateDescriptorSetWithTemplateKHR)StubUpdateDescriptorSetWithTemplateKHR; }
+    table->CreateRenderPass2KHR = (PFN_vkCreateRenderPass2KHR) gpa(device, "vkCreateRenderPass2KHR");
+    if (table->CreateRenderPass2KHR == nullptr) { table->CreateRenderPass2KHR = (PFN_vkCreateRenderPass2KHR)StubCreateRenderPass2KHR; }
+    table->CmdBeginRenderPass2KHR = (PFN_vkCmdBeginRenderPass2KHR) gpa(device, "vkCmdBeginRenderPass2KHR");
+    if (table->CmdBeginRenderPass2KHR == nullptr) { table->CmdBeginRenderPass2KHR = (PFN_vkCmdBeginRenderPass2KHR)StubCmdBeginRenderPass2KHR; }
+    table->CmdNextSubpass2KHR = (PFN_vkCmdNextSubpass2KHR) gpa(device, "vkCmdNextSubpass2KHR");
+    if (table->CmdNextSubpass2KHR == nullptr) { table->CmdNextSubpass2KHR = (PFN_vkCmdNextSubpass2KHR)StubCmdNextSubpass2KHR; }
+    table->CmdEndRenderPass2KHR = (PFN_vkCmdEndRenderPass2KHR) gpa(device, "vkCmdEndRenderPass2KHR");
+    if (table->CmdEndRenderPass2KHR == nullptr) { table->CmdEndRenderPass2KHR = (PFN_vkCmdEndRenderPass2KHR)StubCmdEndRenderPass2KHR; }
+    table->GetSwapchainStatusKHR = (PFN_vkGetSwapchainStatusKHR) gpa(device, "vkGetSwapchainStatusKHR");
+    if (table->GetSwapchainStatusKHR == nullptr) { table->GetSwapchainStatusKHR = (PFN_vkGetSwapchainStatusKHR)StubGetSwapchainStatusKHR; }
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    table->ImportFenceWin32HandleKHR = (PFN_vkImportFenceWin32HandleKHR) gpa(device, "vkImportFenceWin32HandleKHR");
+    if (table->ImportFenceWin32HandleKHR == nullptr) { table->ImportFenceWin32HandleKHR = (PFN_vkImportFenceWin32HandleKHR)StubImportFenceWin32HandleKHR; }
+#endif // VK_USE_PLATFORM_WIN32_KHR
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    table->GetFenceWin32HandleKHR = (PFN_vkGetFenceWin32HandleKHR) gpa(device, "vkGetFenceWin32HandleKHR");
+    if (table->GetFenceWin32HandleKHR == nullptr) { table->GetFenceWin32HandleKHR = (PFN_vkGetFenceWin32HandleKHR)StubGetFenceWin32HandleKHR; }
+#endif // VK_USE_PLATFORM_WIN32_KHR
+    table->ImportFenceFdKHR = (PFN_vkImportFenceFdKHR) gpa(device, "vkImportFenceFdKHR");
+    if (table->ImportFenceFdKHR == nullptr) { table->ImportFenceFdKHR = (PFN_vkImportFenceFdKHR)StubImportFenceFdKHR; }
+    table->GetFenceFdKHR = (PFN_vkGetFenceFdKHR) gpa(device, "vkGetFenceFdKHR");
+    if (table->GetFenceFdKHR == nullptr) { table->GetFenceFdKHR = (PFN_vkGetFenceFdKHR)StubGetFenceFdKHR; }
+    table->AcquireProfilingLockKHR = (PFN_vkAcquireProfilingLockKHR) gpa(device, "vkAcquireProfilingLockKHR");
+    if (table->AcquireProfilingLockKHR == nullptr) { table->AcquireProfilingLockKHR = (PFN_vkAcquireProfilingLockKHR)StubAcquireProfilingLockKHR; }
+    table->ReleaseProfilingLockKHR = (PFN_vkReleaseProfilingLockKHR) gpa(device, "vkReleaseProfilingLockKHR");
+    if (table->ReleaseProfilingLockKHR == nullptr) { table->ReleaseProfilingLockKHR = (PFN_vkReleaseProfilingLockKHR)StubReleaseProfilingLockKHR; }
+    table->GetImageMemoryRequirements2KHR = (PFN_vkGetImageMemoryRequirements2KHR) gpa(device, "vkGetImageMemoryRequirements2KHR");
+    if (table->GetImageMemoryRequirements2KHR == nullptr) { table->GetImageMemoryRequirements2KHR = (PFN_vkGetImageMemoryRequirements2KHR)StubGetImageMemoryRequirements2KHR; }
+    table->GetBufferMemoryRequirements2KHR = (PFN_vkGetBufferMemoryRequirements2KHR) gpa(device, "vkGetBufferMemoryRequirements2KHR");
+    if (table->GetBufferMemoryRequirements2KHR == nullptr) { table->GetBufferMemoryRequirements2KHR = (PFN_vkGetBufferMemoryRequirements2KHR)StubGetBufferMemoryRequirements2KHR; }
+    table->GetImageSparseMemoryRequirements2KHR = (PFN_vkGetImageSparseMemoryRequirements2KHR) gpa(device, "vkGetImageSparseMemoryRequirements2KHR");
+    if (table->GetImageSparseMemoryRequirements2KHR == nullptr) { table->GetImageSparseMemoryRequirements2KHR = (PFN_vkGetImageSparseMemoryRequirements2KHR)StubGetImageSparseMemoryRequirements2KHR; }
+    table->CreateSamplerYcbcrConversionKHR = (PFN_vkCreateSamplerYcbcrConversionKHR) gpa(device, "vkCreateSamplerYcbcrConversionKHR");
+    if (table->CreateSamplerYcbcrConversionKHR == nullptr) { table->CreateSamplerYcbcrConversionKHR = (PFN_vkCreateSamplerYcbcrConversionKHR)StubCreateSamplerYcbcrConversionKHR; }
+    table->DestroySamplerYcbcrConversionKHR = (PFN_vkDestroySamplerYcbcrConversionKHR) gpa(device, "vkDestroySamplerYcbcrConversionKHR");
+    if (table->DestroySamplerYcbcrConversionKHR == nullptr) { table->DestroySamplerYcbcrConversionKHR = (PFN_vkDestroySamplerYcbcrConversionKHR)StubDestroySamplerYcbcrConversionKHR; }
+    table->BindBufferMemory2KHR = (PFN_vkBindBufferMemory2KHR) gpa(device, "vkBindBufferMemory2KHR");
+    if (table->BindBufferMemory2KHR == nullptr) { table->BindBufferMemory2KHR = (PFN_vkBindBufferMemory2KHR)StubBindBufferMemory2KHR; }
+    table->BindImageMemory2KHR = (PFN_vkBindImageMemory2KHR) gpa(device, "vkBindImageMemory2KHR");
+    if (table->BindImageMemory2KHR == nullptr) { table->BindImageMemory2KHR = (PFN_vkBindImageMemory2KHR)StubBindImageMemory2KHR; }
+    table->GetDescriptorSetLayoutSupportKHR = (PFN_vkGetDescriptorSetLayoutSupportKHR) gpa(device, "vkGetDescriptorSetLayoutSupportKHR");
+    if (table->GetDescriptorSetLayoutSupportKHR == nullptr) { table->GetDescriptorSetLayoutSupportKHR = (PFN_vkGetDescriptorSetLayoutSupportKHR)StubGetDescriptorSetLayoutSupportKHR; }
+    table->CmdDrawIndirectCountKHR = (PFN_vkCmdDrawIndirectCountKHR) gpa(device, "vkCmdDrawIndirectCountKHR");
+    if (table->CmdDrawIndirectCountKHR == nullptr) { table->CmdDrawIndirectCountKHR = (PFN_vkCmdDrawIndirectCountKHR)StubCmdDrawIndirectCountKHR; }
+    table->CmdDrawIndexedIndirectCountKHR = (PFN_vkCmdDrawIndexedIndirectCountKHR) gpa(device, "vkCmdDrawIndexedIndirectCountKHR");
+    if (table->CmdDrawIndexedIndirectCountKHR == nullptr) { table->CmdDrawIndexedIndirectCountKHR = (PFN_vkCmdDrawIndexedIndirectCountKHR)StubCmdDrawIndexedIndirectCountKHR; }
+    table->GetSemaphoreCounterValueKHR = (PFN_vkGetSemaphoreCounterValueKHR) gpa(device, "vkGetSemaphoreCounterValueKHR");
+    if (table->GetSemaphoreCounterValueKHR == nullptr) { table->GetSemaphoreCounterValueKHR = (PFN_vkGetSemaphoreCounterValueKHR)StubGetSemaphoreCounterValueKHR; }
+    table->WaitSemaphoresKHR = (PFN_vkWaitSemaphoresKHR) gpa(device, "vkWaitSemaphoresKHR");
+    if (table->WaitSemaphoresKHR == nullptr) { table->WaitSemaphoresKHR = (PFN_vkWaitSemaphoresKHR)StubWaitSemaphoresKHR; }
+    table->SignalSemaphoreKHR = (PFN_vkSignalSemaphoreKHR) gpa(device, "vkSignalSemaphoreKHR");
+    if (table->SignalSemaphoreKHR == nullptr) { table->SignalSemaphoreKHR = (PFN_vkSignalSemaphoreKHR)StubSignalSemaphoreKHR; }
+    table->GetPipelineExecutablePropertiesKHR = (PFN_vkGetPipelineExecutablePropertiesKHR) gpa(device, "vkGetPipelineExecutablePropertiesKHR");
+    if (table->GetPipelineExecutablePropertiesKHR == nullptr) { table->GetPipelineExecutablePropertiesKHR = (PFN_vkGetPipelineExecutablePropertiesKHR)StubGetPipelineExecutablePropertiesKHR; }
+    table->GetPipelineExecutableStatisticsKHR = (PFN_vkGetPipelineExecutableStatisticsKHR) gpa(device, "vkGetPipelineExecutableStatisticsKHR");
+    if (table->GetPipelineExecutableStatisticsKHR == nullptr) { table->GetPipelineExecutableStatisticsKHR = (PFN_vkGetPipelineExecutableStatisticsKHR)StubGetPipelineExecutableStatisticsKHR; }
+    table->GetPipelineExecutableInternalRepresentationsKHR = (PFN_vkGetPipelineExecutableInternalRepresentationsKHR) gpa(device, "vkGetPipelineExecutableInternalRepresentationsKHR");
+    if (table->GetPipelineExecutableInternalRepresentationsKHR == nullptr) { table->GetPipelineExecutableInternalRepresentationsKHR = (PFN_vkGetPipelineExecutableInternalRepresentationsKHR)StubGetPipelineExecutableInternalRepresentationsKHR; }
+    table->DebugMarkerSetObjectTagEXT = (PFN_vkDebugMarkerSetObjectTagEXT) gpa(device, "vkDebugMarkerSetObjectTagEXT");
+    if (table->DebugMarkerSetObjectTagEXT == nullptr) { table->DebugMarkerSetObjectTagEXT = (PFN_vkDebugMarkerSetObjectTagEXT)StubDebugMarkerSetObjectTagEXT; }
+    table->DebugMarkerSetObjectNameEXT = (PFN_vkDebugMarkerSetObjectNameEXT) gpa(device, "vkDebugMarkerSetObjectNameEXT");
+    if (table->DebugMarkerSetObjectNameEXT == nullptr) { table->DebugMarkerSetObjectNameEXT = (PFN_vkDebugMarkerSetObjectNameEXT)StubDebugMarkerSetObjectNameEXT; }
+    table->CmdDebugMarkerBeginEXT = (PFN_vkCmdDebugMarkerBeginEXT) gpa(device, "vkCmdDebugMarkerBeginEXT");
+    if (table->CmdDebugMarkerBeginEXT == nullptr) { table->CmdDebugMarkerBeginEXT = (PFN_vkCmdDebugMarkerBeginEXT)StubCmdDebugMarkerBeginEXT; }
+    table->CmdDebugMarkerEndEXT = (PFN_vkCmdDebugMarkerEndEXT) gpa(device, "vkCmdDebugMarkerEndEXT");
+    if (table->CmdDebugMarkerEndEXT == nullptr) { table->CmdDebugMarkerEndEXT = (PFN_vkCmdDebugMarkerEndEXT)StubCmdDebugMarkerEndEXT; }
+    table->CmdDebugMarkerInsertEXT = (PFN_vkCmdDebugMarkerInsertEXT) gpa(device, "vkCmdDebugMarkerInsertEXT");
+    if (table->CmdDebugMarkerInsertEXT == nullptr) { table->CmdDebugMarkerInsertEXT = (PFN_vkCmdDebugMarkerInsertEXT)StubCmdDebugMarkerInsertEXT; }
+    table->CmdBindTransformFeedbackBuffersEXT = (PFN_vkCmdBindTransformFeedbackBuffersEXT) gpa(device, "vkCmdBindTransformFeedbackBuffersEXT");
+    if (table->CmdBindTransformFeedbackBuffersEXT == nullptr) { table->CmdBindTransformFeedbackBuffersEXT = (PFN_vkCmdBindTransformFeedbackBuffersEXT)StubCmdBindTransformFeedbackBuffersEXT; }
+    table->CmdBeginTransformFeedbackEXT = (PFN_vkCmdBeginTransformFeedbackEXT) gpa(device, "vkCmdBeginTransformFeedbackEXT");
+    if (table->CmdBeginTransformFeedbackEXT == nullptr) { table->CmdBeginTransformFeedbackEXT = (PFN_vkCmdBeginTransformFeedbackEXT)StubCmdBeginTransformFeedbackEXT; }
+    table->CmdEndTransformFeedbackEXT = (PFN_vkCmdEndTransformFeedbackEXT) gpa(device, "vkCmdEndTransformFeedbackEXT");
+    if (table->CmdEndTransformFeedbackEXT == nullptr) { table->CmdEndTransformFeedbackEXT = (PFN_vkCmdEndTransformFeedbackEXT)StubCmdEndTransformFeedbackEXT; }
+    table->CmdBeginQueryIndexedEXT = (PFN_vkCmdBeginQueryIndexedEXT) gpa(device, "vkCmdBeginQueryIndexedEXT");
+    if (table->CmdBeginQueryIndexedEXT == nullptr) { table->CmdBeginQueryIndexedEXT = (PFN_vkCmdBeginQueryIndexedEXT)StubCmdBeginQueryIndexedEXT; }
+    table->CmdEndQueryIndexedEXT = (PFN_vkCmdEndQueryIndexedEXT) gpa(device, "vkCmdEndQueryIndexedEXT");
+    if (table->CmdEndQueryIndexedEXT == nullptr) { table->CmdEndQueryIndexedEXT = (PFN_vkCmdEndQueryIndexedEXT)StubCmdEndQueryIndexedEXT; }
+    table->CmdDrawIndirectByteCountEXT = (PFN_vkCmdDrawIndirectByteCountEXT) gpa(device, "vkCmdDrawIndirectByteCountEXT");
+    if (table->CmdDrawIndirectByteCountEXT == nullptr) { table->CmdDrawIndirectByteCountEXT = (PFN_vkCmdDrawIndirectByteCountEXT)StubCmdDrawIndirectByteCountEXT; }
+    table->GetImageViewHandleNVX = (PFN_vkGetImageViewHandleNVX) gpa(device, "vkGetImageViewHandleNVX");
+    if (table->GetImageViewHandleNVX == nullptr) { table->GetImageViewHandleNVX = (PFN_vkGetImageViewHandleNVX)StubGetImageViewHandleNVX; }
+    table->CmdDrawIndirectCountAMD = (PFN_vkCmdDrawIndirectCountAMD) gpa(device, "vkCmdDrawIndirectCountAMD");
+    if (table->CmdDrawIndirectCountAMD == nullptr) { table->CmdDrawIndirectCountAMD = (PFN_vkCmdDrawIndirectCountAMD)StubCmdDrawIndirectCountAMD; }
+    table->CmdDrawIndexedIndirectCountAMD = (PFN_vkCmdDrawIndexedIndirectCountAMD) gpa(device, "vkCmdDrawIndexedIndirectCountAMD");
+    if (table->CmdDrawIndexedIndirectCountAMD == nullptr) { table->CmdDrawIndexedIndirectCountAMD = (PFN_vkCmdDrawIndexedIndirectCountAMD)StubCmdDrawIndexedIndirectCountAMD; }
+    table->GetShaderInfoAMD = (PFN_vkGetShaderInfoAMD) gpa(device, "vkGetShaderInfoAMD");
+    if (table->GetShaderInfoAMD == nullptr) { table->GetShaderInfoAMD = (PFN_vkGetShaderInfoAMD)StubGetShaderInfoAMD; }
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    table->GetMemoryWin32HandleNV = (PFN_vkGetMemoryWin32HandleNV) gpa(device, "vkGetMemoryWin32HandleNV");
+    if (table->GetMemoryWin32HandleNV == nullptr) { table->GetMemoryWin32HandleNV = (PFN_vkGetMemoryWin32HandleNV)StubGetMemoryWin32HandleNV; }
+#endif // VK_USE_PLATFORM_WIN32_KHR
+    table->CmdBeginConditionalRenderingEXT = (PFN_vkCmdBeginConditionalRenderingEXT) gpa(device, "vkCmdBeginConditionalRenderingEXT");
+    if (table->CmdBeginConditionalRenderingEXT == nullptr) { table->CmdBeginConditionalRenderingEXT = (PFN_vkCmdBeginConditionalRenderingEXT)StubCmdBeginConditionalRenderingEXT; }
+    table->CmdEndConditionalRenderingEXT = (PFN_vkCmdEndConditionalRenderingEXT) gpa(device, "vkCmdEndConditionalRenderingEXT");
+    if (table->CmdEndConditionalRenderingEXT == nullptr) { table->CmdEndConditionalRenderingEXT = (PFN_vkCmdEndConditionalRenderingEXT)StubCmdEndConditionalRenderingEXT; }
+    table->CmdProcessCommandsNVX = (PFN_vkCmdProcessCommandsNVX) gpa(device, "vkCmdProcessCommandsNVX");
+    if (table->CmdProcessCommandsNVX == nullptr) { table->CmdProcessCommandsNVX = (PFN_vkCmdProcessCommandsNVX)StubCmdProcessCommandsNVX; }
+    table->CmdReserveSpaceForCommandsNVX = (PFN_vkCmdReserveSpaceForCommandsNVX) gpa(device, "vkCmdReserveSpaceForCommandsNVX");
+    if (table->CmdReserveSpaceForCommandsNVX == nullptr) { table->CmdReserveSpaceForCommandsNVX = (PFN_vkCmdReserveSpaceForCommandsNVX)StubCmdReserveSpaceForCommandsNVX; }
+    table->CreateIndirectCommandsLayoutNVX = (PFN_vkCreateIndirectCommandsLayoutNVX) gpa(device, "vkCreateIndirectCommandsLayoutNVX");
+    if (table->CreateIndirectCommandsLayoutNVX == nullptr) { table->CreateIndirectCommandsLayoutNVX = (PFN_vkCreateIndirectCommandsLayoutNVX)StubCreateIndirectCommandsLayoutNVX; }
+    table->DestroyIndirectCommandsLayoutNVX = (PFN_vkDestroyIndirectCommandsLayoutNVX) gpa(device, "vkDestroyIndirectCommandsLayoutNVX");
+    if (table->DestroyIndirectCommandsLayoutNVX == nullptr) { table->DestroyIndirectCommandsLayoutNVX = (PFN_vkDestroyIndirectCommandsLayoutNVX)StubDestroyIndirectCommandsLayoutNVX; }
+    table->CreateObjectTableNVX = (PFN_vkCreateObjectTableNVX) gpa(device, "vkCreateObjectTableNVX");
+    if (table->CreateObjectTableNVX == nullptr) { table->CreateObjectTableNVX = (PFN_vkCreateObjectTableNVX)StubCreateObjectTableNVX; }
+    table->DestroyObjectTableNVX = (PFN_vkDestroyObjectTableNVX) gpa(device, "vkDestroyObjectTableNVX");
+    if (table->DestroyObjectTableNVX == nullptr) { table->DestroyObjectTableNVX = (PFN_vkDestroyObjectTableNVX)StubDestroyObjectTableNVX; }
+    table->RegisterObjectsNVX = (PFN_vkRegisterObjectsNVX) gpa(device, "vkRegisterObjectsNVX");
+    if (table->RegisterObjectsNVX == nullptr) { table->RegisterObjectsNVX = (PFN_vkRegisterObjectsNVX)StubRegisterObjectsNVX; }
+    table->UnregisterObjectsNVX = (PFN_vkUnregisterObjectsNVX) gpa(device, "vkUnregisterObjectsNVX");
+    if (table->UnregisterObjectsNVX == nullptr) { table->UnregisterObjectsNVX = (PFN_vkUnregisterObjectsNVX)StubUnregisterObjectsNVX; }
+    table->CmdSetViewportWScalingNV = (PFN_vkCmdSetViewportWScalingNV) gpa(device, "vkCmdSetViewportWScalingNV");
+    if (table->CmdSetViewportWScalingNV == nullptr) { table->CmdSetViewportWScalingNV = (PFN_vkCmdSetViewportWScalingNV)StubCmdSetViewportWScalingNV; }
+    table->DisplayPowerControlEXT = (PFN_vkDisplayPowerControlEXT) gpa(device, "vkDisplayPowerControlEXT");
+    if (table->DisplayPowerControlEXT == nullptr) { table->DisplayPowerControlEXT = (PFN_vkDisplayPowerControlEXT)StubDisplayPowerControlEXT; }
+    table->RegisterDeviceEventEXT = (PFN_vkRegisterDeviceEventEXT) gpa(device, "vkRegisterDeviceEventEXT");
+    if (table->RegisterDeviceEventEXT == nullptr) { table->RegisterDeviceEventEXT = (PFN_vkRegisterDeviceEventEXT)StubRegisterDeviceEventEXT; }
+    table->RegisterDisplayEventEXT = (PFN_vkRegisterDisplayEventEXT) gpa(device, "vkRegisterDisplayEventEXT");
+    if (table->RegisterDisplayEventEXT == nullptr) { table->RegisterDisplayEventEXT = (PFN_vkRegisterDisplayEventEXT)StubRegisterDisplayEventEXT; }
+    table->GetSwapchainCounterEXT = (PFN_vkGetSwapchainCounterEXT) gpa(device, "vkGetSwapchainCounterEXT");
+    if (table->GetSwapchainCounterEXT == nullptr) { table->GetSwapchainCounterEXT = (PFN_vkGetSwapchainCounterEXT)StubGetSwapchainCounterEXT; }
+    table->GetRefreshCycleDurationGOOGLE = (PFN_vkGetRefreshCycleDurationGOOGLE) gpa(device, "vkGetRefreshCycleDurationGOOGLE");
+    if (table->GetRefreshCycleDurationGOOGLE == nullptr) { table->GetRefreshCycleDurationGOOGLE = (PFN_vkGetRefreshCycleDurationGOOGLE)StubGetRefreshCycleDurationGOOGLE; }
+    table->GetPastPresentationTimingGOOGLE = (PFN_vkGetPastPresentationTimingGOOGLE) gpa(device, "vkGetPastPresentationTimingGOOGLE");
+    if (table->GetPastPresentationTimingGOOGLE == nullptr) { table->GetPastPresentationTimingGOOGLE = (PFN_vkGetPastPresentationTimingGOOGLE)StubGetPastPresentationTimingGOOGLE; }
+    table->CmdSetDiscardRectangleEXT = (PFN_vkCmdSetDiscardRectangleEXT) gpa(device, "vkCmdSetDiscardRectangleEXT");
+    if (table->CmdSetDiscardRectangleEXT == nullptr) { table->CmdSetDiscardRectangleEXT = (PFN_vkCmdSetDiscardRectangleEXT)StubCmdSetDiscardRectangleEXT; }
+    table->SetHdrMetadataEXT = (PFN_vkSetHdrMetadataEXT) gpa(device, "vkSetHdrMetadataEXT");
+    if (table->SetHdrMetadataEXT == nullptr) { table->SetHdrMetadataEXT = (PFN_vkSetHdrMetadataEXT)StubSetHdrMetadataEXT; }
+    table->SetDebugUtilsObjectNameEXT = (PFN_vkSetDebugUtilsObjectNameEXT) gpa(device, "vkSetDebugUtilsObjectNameEXT");
+    table->SetDebugUtilsObjectTagEXT = (PFN_vkSetDebugUtilsObjectTagEXT) gpa(device, "vkSetDebugUtilsObjectTagEXT");
+    table->QueueBeginDebugUtilsLabelEXT = (PFN_vkQueueBeginDebugUtilsLabelEXT) gpa(device, "vkQueueBeginDebugUtilsLabelEXT");
+    table->QueueEndDebugUtilsLabelEXT = (PFN_vkQueueEndDebugUtilsLabelEXT) gpa(device, "vkQueueEndDebugUtilsLabelEXT");
+    table->QueueInsertDebugUtilsLabelEXT = (PFN_vkQueueInsertDebugUtilsLabelEXT) gpa(device, "vkQueueInsertDebugUtilsLabelEXT");
+    table->CmdBeginDebugUtilsLabelEXT = (PFN_vkCmdBeginDebugUtilsLabelEXT) gpa(device, "vkCmdBeginDebugUtilsLabelEXT");
+    table->CmdEndDebugUtilsLabelEXT = (PFN_vkCmdEndDebugUtilsLabelEXT) gpa(device, "vkCmdEndDebugUtilsLabelEXT");
+    table->CmdInsertDebugUtilsLabelEXT = (PFN_vkCmdInsertDebugUtilsLabelEXT) gpa(device, "vkCmdInsertDebugUtilsLabelEXT");
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+    table->GetAndroidHardwareBufferPropertiesANDROID = (PFN_vkGetAndroidHardwareBufferPropertiesANDROID) gpa(device, "vkGetAndroidHardwareBufferPropertiesANDROID");
+    if (table->GetAndroidHardwareBufferPropertiesANDROID == nullptr) { table->GetAndroidHardwareBufferPropertiesANDROID = (PFN_vkGetAndroidHardwareBufferPropertiesANDROID)StubGetAndroidHardwareBufferPropertiesANDROID; }
+#endif // VK_USE_PLATFORM_ANDROID_KHR
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+    table->GetMemoryAndroidHardwareBufferANDROID = (PFN_vkGetMemoryAndroidHardwareBufferANDROID) gpa(device, "vkGetMemoryAndroidHardwareBufferANDROID");
+    if (table->GetMemoryAndroidHardwareBufferANDROID == nullptr) { table->GetMemoryAndroidHardwareBufferANDROID = (PFN_vkGetMemoryAndroidHardwareBufferANDROID)StubGetMemoryAndroidHardwareBufferANDROID; }
+#endif // VK_USE_PLATFORM_ANDROID_KHR
+    table->CmdSetSampleLocationsEXT = (PFN_vkCmdSetSampleLocationsEXT) gpa(device, "vkCmdSetSampleLocationsEXT");
+    if (table->CmdSetSampleLocationsEXT == nullptr) { table->CmdSetSampleLocationsEXT = (PFN_vkCmdSetSampleLocationsEXT)StubCmdSetSampleLocationsEXT; }
+    table->GetImageDrmFormatModifierPropertiesEXT = (PFN_vkGetImageDrmFormatModifierPropertiesEXT) gpa(device, "vkGetImageDrmFormatModifierPropertiesEXT");
+    if (table->GetImageDrmFormatModifierPropertiesEXT == nullptr) { table->GetImageDrmFormatModifierPropertiesEXT = (PFN_vkGetImageDrmFormatModifierPropertiesEXT)StubGetImageDrmFormatModifierPropertiesEXT; }
+    table->CreateValidationCacheEXT = (PFN_vkCreateValidationCacheEXT) gpa(device, "vkCreateValidationCacheEXT");
+    if (table->CreateValidationCacheEXT == nullptr) { table->CreateValidationCacheEXT = (PFN_vkCreateValidationCacheEXT)StubCreateValidationCacheEXT; }
+    table->DestroyValidationCacheEXT = (PFN_vkDestroyValidationCacheEXT) gpa(device, "vkDestroyValidationCacheEXT");
+    if (table->DestroyValidationCacheEXT == nullptr) { table->DestroyValidationCacheEXT = (PFN_vkDestroyValidationCacheEXT)StubDestroyValidationCacheEXT; }
+    table->MergeValidationCachesEXT = (PFN_vkMergeValidationCachesEXT) gpa(device, "vkMergeValidationCachesEXT");
+    if (table->MergeValidationCachesEXT == nullptr) { table->MergeValidationCachesEXT = (PFN_vkMergeValidationCachesEXT)StubMergeValidationCachesEXT; }
+    table->GetValidationCacheDataEXT = (PFN_vkGetValidationCacheDataEXT) gpa(device, "vkGetValidationCacheDataEXT");
+    if (table->GetValidationCacheDataEXT == nullptr) { table->GetValidationCacheDataEXT = (PFN_vkGetValidationCacheDataEXT)StubGetValidationCacheDataEXT; }
+    table->CmdBindShadingRateImageNV = (PFN_vkCmdBindShadingRateImageNV) gpa(device, "vkCmdBindShadingRateImageNV");
+    if (table->CmdBindShadingRateImageNV == nullptr) { table->CmdBindShadingRateImageNV = (PFN_vkCmdBindShadingRateImageNV)StubCmdBindShadingRateImageNV; }
+    table->CmdSetViewportShadingRatePaletteNV = (PFN_vkCmdSetViewportShadingRatePaletteNV) gpa(device, "vkCmdSetViewportShadingRatePaletteNV");
+    if (table->CmdSetViewportShadingRatePaletteNV == nullptr) { table->CmdSetViewportShadingRatePaletteNV = (PFN_vkCmdSetViewportShadingRatePaletteNV)StubCmdSetViewportShadingRatePaletteNV; }
+    table->CmdSetCoarseSampleOrderNV = (PFN_vkCmdSetCoarseSampleOrderNV) gpa(device, "vkCmdSetCoarseSampleOrderNV");
+    if (table->CmdSetCoarseSampleOrderNV == nullptr) { table->CmdSetCoarseSampleOrderNV = (PFN_vkCmdSetCoarseSampleOrderNV)StubCmdSetCoarseSampleOrderNV; }
+    table->CreateAccelerationStructureNV = (PFN_vkCreateAccelerationStructureNV) gpa(device, "vkCreateAccelerationStructureNV");
+    if (table->CreateAccelerationStructureNV == nullptr) { table->CreateAccelerationStructureNV = (PFN_vkCreateAccelerationStructureNV)StubCreateAccelerationStructureNV; }
+    table->DestroyAccelerationStructureNV = (PFN_vkDestroyAccelerationStructureNV) gpa(device, "vkDestroyAccelerationStructureNV");
+    if (table->DestroyAccelerationStructureNV == nullptr) { table->DestroyAccelerationStructureNV = (PFN_vkDestroyAccelerationStructureNV)StubDestroyAccelerationStructureNV; }
+    table->GetAccelerationStructureMemoryRequirementsNV = (PFN_vkGetAccelerationStructureMemoryRequirementsNV) gpa(device, "vkGetAccelerationStructureMemoryRequirementsNV");
+    if (table->GetAccelerationStructureMemoryRequirementsNV == nullptr) { table->GetAccelerationStructureMemoryRequirementsNV = (PFN_vkGetAccelerationStructureMemoryRequirementsNV)StubGetAccelerationStructureMemoryRequirementsNV; }
+    table->BindAccelerationStructureMemoryNV = (PFN_vkBindAccelerationStructureMemoryNV) gpa(device, "vkBindAccelerationStructureMemoryNV");
+    if (table->BindAccelerationStructureMemoryNV == nullptr) { table->BindAccelerationStructureMemoryNV = (PFN_vkBindAccelerationStructureMemoryNV)StubBindAccelerationStructureMemoryNV; }
+    table->CmdBuildAccelerationStructureNV = (PFN_vkCmdBuildAccelerationStructureNV) gpa(device, "vkCmdBuildAccelerationStructureNV");
+    if (table->CmdBuildAccelerationStructureNV == nullptr) { table->CmdBuildAccelerationStructureNV = (PFN_vkCmdBuildAccelerationStructureNV)StubCmdBuildAccelerationStructureNV; }
+    table->CmdCopyAccelerationStructureNV = (PFN_vkCmdCopyAccelerationStructureNV) gpa(device, "vkCmdCopyAccelerationStructureNV");
+    if (table->CmdCopyAccelerationStructureNV == nullptr) { table->CmdCopyAccelerationStructureNV = (PFN_vkCmdCopyAccelerationStructureNV)StubCmdCopyAccelerationStructureNV; }
+    table->CmdTraceRaysNV = (PFN_vkCmdTraceRaysNV) gpa(device, "vkCmdTraceRaysNV");
+    if (table->CmdTraceRaysNV == nullptr) { table->CmdTraceRaysNV = (PFN_vkCmdTraceRaysNV)StubCmdTraceRaysNV; }
+    table->CreateRayTracingPipelinesNV = (PFN_vkCreateRayTracingPipelinesNV) gpa(device, "vkCreateRayTracingPipelinesNV");
+    if (table->CreateRayTracingPipelinesNV == nullptr) { table->CreateRayTracingPipelinesNV = (PFN_vkCreateRayTracingPipelinesNV)StubCreateRayTracingPipelinesNV; }
+    table->GetRayTracingShaderGroupHandlesNV = (PFN_vkGetRayTracingShaderGroupHandlesNV) gpa(device, "vkGetRayTracingShaderGroupHandlesNV");
+    if (table->GetRayTracingShaderGroupHandlesNV == nullptr) { table->GetRayTracingShaderGroupHandlesNV = (PFN_vkGetRayTracingShaderGroupHandlesNV)StubGetRayTracingShaderGroupHandlesNV; }
+    table->GetAccelerationStructureHandleNV = (PFN_vkGetAccelerationStructureHandleNV) gpa(device, "vkGetAccelerationStructureHandleNV");
+    if (table->GetAccelerationStructureHandleNV == nullptr) { table->GetAccelerationStructureHandleNV = (PFN_vkGetAccelerationStructureHandleNV)StubGetAccelerationStructureHandleNV; }
+    table->CmdWriteAccelerationStructuresPropertiesNV = (PFN_vkCmdWriteAccelerationStructuresPropertiesNV) gpa(device, "vkCmdWriteAccelerationStructuresPropertiesNV");
+    if (table->CmdWriteAccelerationStructuresPropertiesNV == nullptr) { table->CmdWriteAccelerationStructuresPropertiesNV = (PFN_vkCmdWriteAccelerationStructuresPropertiesNV)StubCmdWriteAccelerationStructuresPropertiesNV; }
+    table->CompileDeferredNV = (PFN_vkCompileDeferredNV) gpa(device, "vkCompileDeferredNV");
+    if (table->CompileDeferredNV == nullptr) { table->CompileDeferredNV = (PFN_vkCompileDeferredNV)StubCompileDeferredNV; }
+    table->GetMemoryHostPointerPropertiesEXT = (PFN_vkGetMemoryHostPointerPropertiesEXT) gpa(device, "vkGetMemoryHostPointerPropertiesEXT");
+    if (table->GetMemoryHostPointerPropertiesEXT == nullptr) { table->GetMemoryHostPointerPropertiesEXT = (PFN_vkGetMemoryHostPointerPropertiesEXT)StubGetMemoryHostPointerPropertiesEXT; }
+    table->CmdWriteBufferMarkerAMD = (PFN_vkCmdWriteBufferMarkerAMD) gpa(device, "vkCmdWriteBufferMarkerAMD");
+    if (table->CmdWriteBufferMarkerAMD == nullptr) { table->CmdWriteBufferMarkerAMD = (PFN_vkCmdWriteBufferMarkerAMD)StubCmdWriteBufferMarkerAMD; }
+    table->GetCalibratedTimestampsEXT = (PFN_vkGetCalibratedTimestampsEXT) gpa(device, "vkGetCalibratedTimestampsEXT");
+    if (table->GetCalibratedTimestampsEXT == nullptr) { table->GetCalibratedTimestampsEXT = (PFN_vkGetCalibratedTimestampsEXT)StubGetCalibratedTimestampsEXT; }
+    table->CmdDrawMeshTasksNV = (PFN_vkCmdDrawMeshTasksNV) gpa(device, "vkCmdDrawMeshTasksNV");
+    if (table->CmdDrawMeshTasksNV == nullptr) { table->CmdDrawMeshTasksNV = (PFN_vkCmdDrawMeshTasksNV)StubCmdDrawMeshTasksNV; }
+    table->CmdDrawMeshTasksIndirectNV = (PFN_vkCmdDrawMeshTasksIndirectNV) gpa(device, "vkCmdDrawMeshTasksIndirectNV");
+    if (table->CmdDrawMeshTasksIndirectNV == nullptr) { table->CmdDrawMeshTasksIndirectNV = (PFN_vkCmdDrawMeshTasksIndirectNV)StubCmdDrawMeshTasksIndirectNV; }
+    table->CmdDrawMeshTasksIndirectCountNV = (PFN_vkCmdDrawMeshTasksIndirectCountNV) gpa(device, "vkCmdDrawMeshTasksIndirectCountNV");
+    if (table->CmdDrawMeshTasksIndirectCountNV == nullptr) { table->CmdDrawMeshTasksIndirectCountNV = (PFN_vkCmdDrawMeshTasksIndirectCountNV)StubCmdDrawMeshTasksIndirectCountNV; }
+    table->CmdSetExclusiveScissorNV = (PFN_vkCmdSetExclusiveScissorNV) gpa(device, "vkCmdSetExclusiveScissorNV");
+    if (table->CmdSetExclusiveScissorNV == nullptr) { table->CmdSetExclusiveScissorNV = (PFN_vkCmdSetExclusiveScissorNV)StubCmdSetExclusiveScissorNV; }
+    table->CmdSetCheckpointNV = (PFN_vkCmdSetCheckpointNV) gpa(device, "vkCmdSetCheckpointNV");
+    if (table->CmdSetCheckpointNV == nullptr) { table->CmdSetCheckpointNV = (PFN_vkCmdSetCheckpointNV)StubCmdSetCheckpointNV; }
+    table->GetQueueCheckpointDataNV = (PFN_vkGetQueueCheckpointDataNV) gpa(device, "vkGetQueueCheckpointDataNV");
+    if (table->GetQueueCheckpointDataNV == nullptr) { table->GetQueueCheckpointDataNV = (PFN_vkGetQueueCheckpointDataNV)StubGetQueueCheckpointDataNV; }
+    table->InitializePerformanceApiINTEL = (PFN_vkInitializePerformanceApiINTEL) gpa(device, "vkInitializePerformanceApiINTEL");
+    if (table->InitializePerformanceApiINTEL == nullptr) { table->InitializePerformanceApiINTEL = (PFN_vkInitializePerformanceApiINTEL)StubInitializePerformanceApiINTEL; }
+    table->UninitializePerformanceApiINTEL = (PFN_vkUninitializePerformanceApiINTEL) gpa(device, "vkUninitializePerformanceApiINTEL");
+    if (table->UninitializePerformanceApiINTEL == nullptr) { table->UninitializePerformanceApiINTEL = (PFN_vkUninitializePerformanceApiINTEL)StubUninitializePerformanceApiINTEL; }
+    table->CmdSetPerformanceMarkerINTEL = (PFN_vkCmdSetPerformanceMarkerINTEL) gpa(device, "vkCmdSetPerformanceMarkerINTEL");
+    if (table->CmdSetPerformanceMarkerINTEL == nullptr) { table->CmdSetPerformanceMarkerINTEL = (PFN_vkCmdSetPerformanceMarkerINTEL)StubCmdSetPerformanceMarkerINTEL; }
+    table->CmdSetPerformanceStreamMarkerINTEL = (PFN_vkCmdSetPerformanceStreamMarkerINTEL) gpa(device, "vkCmdSetPerformanceStreamMarkerINTEL");
+    if (table->CmdSetPerformanceStreamMarkerINTEL == nullptr) { table->CmdSetPerformanceStreamMarkerINTEL = (PFN_vkCmdSetPerformanceStreamMarkerINTEL)StubCmdSetPerformanceStreamMarkerINTEL; }
+    table->CmdSetPerformanceOverrideINTEL = (PFN_vkCmdSetPerformanceOverrideINTEL) gpa(device, "vkCmdSetPerformanceOverrideINTEL");
+    if (table->CmdSetPerformanceOverrideINTEL == nullptr) { table->CmdSetPerformanceOverrideINTEL = (PFN_vkCmdSetPerformanceOverrideINTEL)StubCmdSetPerformanceOverrideINTEL; }
+    table->AcquirePerformanceConfigurationINTEL = (PFN_vkAcquirePerformanceConfigurationINTEL) gpa(device, "vkAcquirePerformanceConfigurationINTEL");
+    if (table->AcquirePerformanceConfigurationINTEL == nullptr) { table->AcquirePerformanceConfigurationINTEL = (PFN_vkAcquirePerformanceConfigurationINTEL)StubAcquirePerformanceConfigurationINTEL; }
+    table->ReleasePerformanceConfigurationINTEL = (PFN_vkReleasePerformanceConfigurationINTEL) gpa(device, "vkReleasePerformanceConfigurationINTEL");
+    if (table->ReleasePerformanceConfigurationINTEL == nullptr) { table->ReleasePerformanceConfigurationINTEL = (PFN_vkReleasePerformanceConfigurationINTEL)StubReleasePerformanceConfigurationINTEL; }
+    table->QueueSetPerformanceConfigurationINTEL = (PFN_vkQueueSetPerformanceConfigurationINTEL) gpa(device, "vkQueueSetPerformanceConfigurationINTEL");
+    if (table->QueueSetPerformanceConfigurationINTEL == nullptr) { table->QueueSetPerformanceConfigurationINTEL = (PFN_vkQueueSetPerformanceConfigurationINTEL)StubQueueSetPerformanceConfigurationINTEL; }
+    table->GetPerformanceParameterINTEL = (PFN_vkGetPerformanceParameterINTEL) gpa(device, "vkGetPerformanceParameterINTEL");
+    if (table->GetPerformanceParameterINTEL == nullptr) { table->GetPerformanceParameterINTEL = (PFN_vkGetPerformanceParameterINTEL)StubGetPerformanceParameterINTEL; }
+    table->SetLocalDimmingAMD = (PFN_vkSetLocalDimmingAMD) gpa(device, "vkSetLocalDimmingAMD");
+    if (table->SetLocalDimmingAMD == nullptr) { table->SetLocalDimmingAMD = (PFN_vkSetLocalDimmingAMD)StubSetLocalDimmingAMD; }
+    table->GetBufferDeviceAddressEXT = (PFN_vkGetBufferDeviceAddressEXT) gpa(device, "vkGetBufferDeviceAddressEXT");
+    if (table->GetBufferDeviceAddressEXT == nullptr) { table->GetBufferDeviceAddressEXT = (PFN_vkGetBufferDeviceAddressEXT)StubGetBufferDeviceAddressEXT; }
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    table->AcquireFullScreenExclusiveModeEXT = (PFN_vkAcquireFullScreenExclusiveModeEXT) gpa(device, "vkAcquireFullScreenExclusiveModeEXT");
+    if (table->AcquireFullScreenExclusiveModeEXT == nullptr) { table->AcquireFullScreenExclusiveModeEXT = (PFN_vkAcquireFullScreenExclusiveModeEXT)StubAcquireFullScreenExclusiveModeEXT; }
+#endif // VK_USE_PLATFORM_WIN32_KHR
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    table->ReleaseFullScreenExclusiveModeEXT = (PFN_vkReleaseFullScreenExclusiveModeEXT) gpa(device, "vkReleaseFullScreenExclusiveModeEXT");
+    if (table->ReleaseFullScreenExclusiveModeEXT == nullptr) { table->ReleaseFullScreenExclusiveModeEXT = (PFN_vkReleaseFullScreenExclusiveModeEXT)StubReleaseFullScreenExclusiveModeEXT; }
+#endif // VK_USE_PLATFORM_WIN32_KHR
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    table->GetDeviceGroupSurfacePresentModes2EXT = (PFN_vkGetDeviceGroupSurfacePresentModes2EXT) gpa(device, "vkGetDeviceGroupSurfacePresentModes2EXT");
+    if (table->GetDeviceGroupSurfacePresentModes2EXT == nullptr) { table->GetDeviceGroupSurfacePresentModes2EXT = (PFN_vkGetDeviceGroupSurfacePresentModes2EXT)StubGetDeviceGroupSurfacePresentModes2EXT; }
+#endif // VK_USE_PLATFORM_WIN32_KHR
+    table->CmdSetLineStippleEXT = (PFN_vkCmdSetLineStippleEXT) gpa(device, "vkCmdSetLineStippleEXT");
+    if (table->CmdSetLineStippleEXT == nullptr) { table->CmdSetLineStippleEXT = (PFN_vkCmdSetLineStippleEXT)StubCmdSetLineStippleEXT; }
+    table->ResetQueryPoolEXT = (PFN_vkResetQueryPoolEXT) gpa(device, "vkResetQueryPoolEXT");
+    if (table->ResetQueryPoolEXT == nullptr) { table->ResetQueryPoolEXT = (PFN_vkResetQueryPoolEXT)StubResetQueryPoolEXT; }
+}
+
+
+static inline void layer_init_instance_dispatch_table(VkInstance instance, VkLayerInstanceDispatchTable *table, PFN_vkGetInstanceProcAddr gpa) {
+    memset(table, 0, sizeof(*table));
+    // Instance function pointers
+    table->DestroyInstance = (PFN_vkDestroyInstance) gpa(instance, "vkDestroyInstance");
+    table->EnumeratePhysicalDevices = (PFN_vkEnumeratePhysicalDevices) gpa(instance, "vkEnumeratePhysicalDevices");
+    table->GetPhysicalDeviceFeatures = (PFN_vkGetPhysicalDeviceFeatures) gpa(instance, "vkGetPhysicalDeviceFeatures");
+    table->GetPhysicalDeviceFormatProperties = (PFN_vkGetPhysicalDeviceFormatProperties) gpa(instance, "vkGetPhysicalDeviceFormatProperties");
+    table->GetPhysicalDeviceImageFormatProperties = (PFN_vkGetPhysicalDeviceImageFormatProperties) gpa(instance, "vkGetPhysicalDeviceImageFormatProperties");
+    table->GetPhysicalDeviceProperties = (PFN_vkGetPhysicalDeviceProperties) gpa(instance, "vkGetPhysicalDeviceProperties");
+    table->GetPhysicalDeviceQueueFamilyProperties = (PFN_vkGetPhysicalDeviceQueueFamilyProperties) gpa(instance, "vkGetPhysicalDeviceQueueFamilyProperties");
+    table->GetPhysicalDeviceMemoryProperties = (PFN_vkGetPhysicalDeviceMemoryProperties) gpa(instance, "vkGetPhysicalDeviceMemoryProperties");
+    table->GetInstanceProcAddr = gpa;
+    table->EnumerateDeviceExtensionProperties = (PFN_vkEnumerateDeviceExtensionProperties) gpa(instance, "vkEnumerateDeviceExtensionProperties");
+    table->EnumerateDeviceLayerProperties = (PFN_vkEnumerateDeviceLayerProperties) gpa(instance, "vkEnumerateDeviceLayerProperties");
+    table->GetPhysicalDeviceSparseImageFormatProperties = (PFN_vkGetPhysicalDeviceSparseImageFormatProperties) gpa(instance, "vkGetPhysicalDeviceSparseImageFormatProperties");
+    table->EnumeratePhysicalDeviceGroups = (PFN_vkEnumeratePhysicalDeviceGroups) gpa(instance, "vkEnumeratePhysicalDeviceGroups");
+    table->GetPhysicalDeviceFeatures2 = (PFN_vkGetPhysicalDeviceFeatures2) gpa(instance, "vkGetPhysicalDeviceFeatures2");
+    table->GetPhysicalDeviceProperties2 = (PFN_vkGetPhysicalDeviceProperties2) gpa(instance, "vkGetPhysicalDeviceProperties2");
+    table->GetPhysicalDeviceFormatProperties2 = (PFN_vkGetPhysicalDeviceFormatProperties2) gpa(instance, "vkGetPhysicalDeviceFormatProperties2");
+    table->GetPhysicalDeviceImageFormatProperties2 = (PFN_vkGetPhysicalDeviceImageFormatProperties2) gpa(instance, "vkGetPhysicalDeviceImageFormatProperties2");
+    table->GetPhysicalDeviceQueueFamilyProperties2 = (PFN_vkGetPhysicalDeviceQueueFamilyProperties2) gpa(instance, "vkGetPhysicalDeviceQueueFamilyProperties2");
+    table->GetPhysicalDeviceMemoryProperties2 = (PFN_vkGetPhysicalDeviceMemoryProperties2) gpa(instance, "vkGetPhysicalDeviceMemoryProperties2");
+    table->GetPhysicalDeviceSparseImageFormatProperties2 = (PFN_vkGetPhysicalDeviceSparseImageFormatProperties2) gpa(instance, "vkGetPhysicalDeviceSparseImageFormatProperties2");
+    table->GetPhysicalDeviceExternalBufferProperties = (PFN_vkGetPhysicalDeviceExternalBufferProperties) gpa(instance, "vkGetPhysicalDeviceExternalBufferProperties");
+    table->GetPhysicalDeviceExternalFenceProperties = (PFN_vkGetPhysicalDeviceExternalFenceProperties) gpa(instance, "vkGetPhysicalDeviceExternalFenceProperties");
+    table->GetPhysicalDeviceExternalSemaphoreProperties = (PFN_vkGetPhysicalDeviceExternalSemaphoreProperties) gpa(instance, "vkGetPhysicalDeviceExternalSemaphoreProperties");
+    table->DestroySurfaceKHR = (PFN_vkDestroySurfaceKHR) gpa(instance, "vkDestroySurfaceKHR");
+    table->GetPhysicalDeviceSurfaceSupportKHR = (PFN_vkGetPhysicalDeviceSurfaceSupportKHR) gpa(instance, "vkGetPhysicalDeviceSurfaceSupportKHR");
+    table->GetPhysicalDeviceSurfaceCapabilitiesKHR = (PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR) gpa(instance, "vkGetPhysicalDeviceSurfaceCapabilitiesKHR");
+    table->GetPhysicalDeviceSurfaceFormatsKHR = (PFN_vkGetPhysicalDeviceSurfaceFormatsKHR) gpa(instance, "vkGetPhysicalDeviceSurfaceFormatsKHR");
+    table->GetPhysicalDeviceSurfacePresentModesKHR = (PFN_vkGetPhysicalDeviceSurfacePresentModesKHR) gpa(instance, "vkGetPhysicalDeviceSurfacePresentModesKHR");
+    table->GetPhysicalDevicePresentRectanglesKHR = (PFN_vkGetPhysicalDevicePresentRectanglesKHR) gpa(instance, "vkGetPhysicalDevicePresentRectanglesKHR");
+    table->GetPhysicalDeviceDisplayPropertiesKHR = (PFN_vkGetPhysicalDeviceDisplayPropertiesKHR) gpa(instance, "vkGetPhysicalDeviceDisplayPropertiesKHR");
+    table->GetPhysicalDeviceDisplayPlanePropertiesKHR = (PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR) gpa(instance, "vkGetPhysicalDeviceDisplayPlanePropertiesKHR");
+    table->GetDisplayPlaneSupportedDisplaysKHR = (PFN_vkGetDisplayPlaneSupportedDisplaysKHR) gpa(instance, "vkGetDisplayPlaneSupportedDisplaysKHR");
+    table->GetDisplayModePropertiesKHR = (PFN_vkGetDisplayModePropertiesKHR) gpa(instance, "vkGetDisplayModePropertiesKHR");
+    table->CreateDisplayModeKHR = (PFN_vkCreateDisplayModeKHR) gpa(instance, "vkCreateDisplayModeKHR");
+    table->GetDisplayPlaneCapabilitiesKHR = (PFN_vkGetDisplayPlaneCapabilitiesKHR) gpa(instance, "vkGetDisplayPlaneCapabilitiesKHR");
+    table->CreateDisplayPlaneSurfaceKHR = (PFN_vkCreateDisplayPlaneSurfaceKHR) gpa(instance, "vkCreateDisplayPlaneSurfaceKHR");
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+    table->CreateXlibSurfaceKHR = (PFN_vkCreateXlibSurfaceKHR) gpa(instance, "vkCreateXlibSurfaceKHR");
+#endif // VK_USE_PLATFORM_XLIB_KHR
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+    table->GetPhysicalDeviceXlibPresentationSupportKHR = (PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR) gpa(instance, "vkGetPhysicalDeviceXlibPresentationSupportKHR");
+#endif // VK_USE_PLATFORM_XLIB_KHR
+#ifdef VK_USE_PLATFORM_XCB_KHR
+    table->CreateXcbSurfaceKHR = (PFN_vkCreateXcbSurfaceKHR) gpa(instance, "vkCreateXcbSurfaceKHR");
+#endif // VK_USE_PLATFORM_XCB_KHR
+#ifdef VK_USE_PLATFORM_XCB_KHR
+    table->GetPhysicalDeviceXcbPresentationSupportKHR = (PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR) gpa(instance, "vkGetPhysicalDeviceXcbPresentationSupportKHR");
+#endif // VK_USE_PLATFORM_XCB_KHR
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+    table->CreateWaylandSurfaceKHR = (PFN_vkCreateWaylandSurfaceKHR) gpa(instance, "vkCreateWaylandSurfaceKHR");
+#endif // VK_USE_PLATFORM_WAYLAND_KHR
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+    table->GetPhysicalDeviceWaylandPresentationSupportKHR = (PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR) gpa(instance, "vkGetPhysicalDeviceWaylandPresentationSupportKHR");
+#endif // VK_USE_PLATFORM_WAYLAND_KHR
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+    table->CreateAndroidSurfaceKHR = (PFN_vkCreateAndroidSurfaceKHR) gpa(instance, "vkCreateAndroidSurfaceKHR");
+#endif // VK_USE_PLATFORM_ANDROID_KHR
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    table->CreateWin32SurfaceKHR = (PFN_vkCreateWin32SurfaceKHR) gpa(instance, "vkCreateWin32SurfaceKHR");
+#endif // VK_USE_PLATFORM_WIN32_KHR
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    table->GetPhysicalDeviceWin32PresentationSupportKHR = (PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR) gpa(instance, "vkGetPhysicalDeviceWin32PresentationSupportKHR");
+#endif // VK_USE_PLATFORM_WIN32_KHR
+    table->GetPhysicalDeviceFeatures2KHR = (PFN_vkGetPhysicalDeviceFeatures2KHR) gpa(instance, "vkGetPhysicalDeviceFeatures2KHR");
+    table->GetPhysicalDeviceProperties2KHR = (PFN_vkGetPhysicalDeviceProperties2KHR) gpa(instance, "vkGetPhysicalDeviceProperties2KHR");
+    table->GetPhysicalDeviceFormatProperties2KHR = (PFN_vkGetPhysicalDeviceFormatProperties2KHR) gpa(instance, "vkGetPhysicalDeviceFormatProperties2KHR");
+    table->GetPhysicalDeviceImageFormatProperties2KHR = (PFN_vkGetPhysicalDeviceImageFormatProperties2KHR) gpa(instance, "vkGetPhysicalDeviceImageFormatProperties2KHR");
+    table->GetPhysicalDeviceQueueFamilyProperties2KHR = (PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR) gpa(instance, "vkGetPhysicalDeviceQueueFamilyProperties2KHR");
+    table->GetPhysicalDeviceMemoryProperties2KHR = (PFN_vkGetPhysicalDeviceMemoryProperties2KHR) gpa(instance, "vkGetPhysicalDeviceMemoryProperties2KHR");
+    table->GetPhysicalDeviceSparseImageFormatProperties2KHR = (PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR) gpa(instance, "vkGetPhysicalDeviceSparseImageFormatProperties2KHR");
+    table->EnumeratePhysicalDeviceGroupsKHR = (PFN_vkEnumeratePhysicalDeviceGroupsKHR) gpa(instance, "vkEnumeratePhysicalDeviceGroupsKHR");
+    table->GetPhysicalDeviceExternalBufferPropertiesKHR = (PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR) gpa(instance, "vkGetPhysicalDeviceExternalBufferPropertiesKHR");
+    table->GetPhysicalDeviceExternalSemaphorePropertiesKHR = (PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR) gpa(instance, "vkGetPhysicalDeviceExternalSemaphorePropertiesKHR");
+    table->GetPhysicalDeviceExternalFencePropertiesKHR = (PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR) gpa(instance, "vkGetPhysicalDeviceExternalFencePropertiesKHR");
+    table->EnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR = (PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR) gpa(instance, "vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR");
+    table->GetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR = (PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR) gpa(instance, "vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR");
+    table->GetPhysicalDeviceSurfaceCapabilities2KHR = (PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR) gpa(instance, "vkGetPhysicalDeviceSurfaceCapabilities2KHR");
+    table->GetPhysicalDeviceSurfaceFormats2KHR = (PFN_vkGetPhysicalDeviceSurfaceFormats2KHR) gpa(instance, "vkGetPhysicalDeviceSurfaceFormats2KHR");
+    table->GetPhysicalDeviceDisplayProperties2KHR = (PFN_vkGetPhysicalDeviceDisplayProperties2KHR) gpa(instance, "vkGetPhysicalDeviceDisplayProperties2KHR");
+    table->GetPhysicalDeviceDisplayPlaneProperties2KHR = (PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR) gpa(instance, "vkGetPhysicalDeviceDisplayPlaneProperties2KHR");
+    table->GetDisplayModeProperties2KHR = (PFN_vkGetDisplayModeProperties2KHR) gpa(instance, "vkGetDisplayModeProperties2KHR");
+    table->GetDisplayPlaneCapabilities2KHR = (PFN_vkGetDisplayPlaneCapabilities2KHR) gpa(instance, "vkGetDisplayPlaneCapabilities2KHR");
+    table->CreateDebugReportCallbackEXT = (PFN_vkCreateDebugReportCallbackEXT) gpa(instance, "vkCreateDebugReportCallbackEXT");
+    table->DestroyDebugReportCallbackEXT = (PFN_vkDestroyDebugReportCallbackEXT) gpa(instance, "vkDestroyDebugReportCallbackEXT");
+    table->DebugReportMessageEXT = (PFN_vkDebugReportMessageEXT) gpa(instance, "vkDebugReportMessageEXT");
+#ifdef VK_USE_PLATFORM_GGP
+    table->CreateStreamDescriptorSurfaceGGP = (PFN_vkCreateStreamDescriptorSurfaceGGP) gpa(instance, "vkCreateStreamDescriptorSurfaceGGP");
+#endif // VK_USE_PLATFORM_GGP
+    table->GetPhysicalDeviceExternalImageFormatPropertiesNV = (PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV) gpa(instance, "vkGetPhysicalDeviceExternalImageFormatPropertiesNV");
+#ifdef VK_USE_PLATFORM_VI_NN
+    table->CreateViSurfaceNN = (PFN_vkCreateViSurfaceNN) gpa(instance, "vkCreateViSurfaceNN");
+#endif // VK_USE_PLATFORM_VI_NN
+    table->GetPhysicalDeviceGeneratedCommandsPropertiesNVX = (PFN_vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX) gpa(instance, "vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX");
+    table->ReleaseDisplayEXT = (PFN_vkReleaseDisplayEXT) gpa(instance, "vkReleaseDisplayEXT");
+#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
+    table->AcquireXlibDisplayEXT = (PFN_vkAcquireXlibDisplayEXT) gpa(instance, "vkAcquireXlibDisplayEXT");
+#endif // VK_USE_PLATFORM_XLIB_XRANDR_EXT
+#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
+    table->GetRandROutputDisplayEXT = (PFN_vkGetRandROutputDisplayEXT) gpa(instance, "vkGetRandROutputDisplayEXT");
+#endif // VK_USE_PLATFORM_XLIB_XRANDR_EXT
+    table->GetPhysicalDeviceSurfaceCapabilities2EXT = (PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT) gpa(instance, "vkGetPhysicalDeviceSurfaceCapabilities2EXT");
+#ifdef VK_USE_PLATFORM_IOS_MVK
+    table->CreateIOSSurfaceMVK = (PFN_vkCreateIOSSurfaceMVK) gpa(instance, "vkCreateIOSSurfaceMVK");
+#endif // VK_USE_PLATFORM_IOS_MVK
+#ifdef VK_USE_PLATFORM_MACOS_MVK
+    table->CreateMacOSSurfaceMVK = (PFN_vkCreateMacOSSurfaceMVK) gpa(instance, "vkCreateMacOSSurfaceMVK");
+#endif // VK_USE_PLATFORM_MACOS_MVK
+    table->CreateDebugUtilsMessengerEXT = (PFN_vkCreateDebugUtilsMessengerEXT) gpa(instance, "vkCreateDebugUtilsMessengerEXT");
+    table->DestroyDebugUtilsMessengerEXT = (PFN_vkDestroyDebugUtilsMessengerEXT) gpa(instance, "vkDestroyDebugUtilsMessengerEXT");
+    table->SubmitDebugUtilsMessageEXT = (PFN_vkSubmitDebugUtilsMessageEXT) gpa(instance, "vkSubmitDebugUtilsMessageEXT");
+    table->GetPhysicalDeviceMultisamplePropertiesEXT = (PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT) gpa(instance, "vkGetPhysicalDeviceMultisamplePropertiesEXT");
+    table->GetPhysicalDeviceCalibrateableTimeDomainsEXT = (PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT) gpa(instance, "vkGetPhysicalDeviceCalibrateableTimeDomainsEXT");
+#ifdef VK_USE_PLATFORM_FUCHSIA
+    table->CreateImagePipeSurfaceFUCHSIA = (PFN_vkCreateImagePipeSurfaceFUCHSIA) gpa(instance, "vkCreateImagePipeSurfaceFUCHSIA");
+#endif // VK_USE_PLATFORM_FUCHSIA
+#ifdef VK_USE_PLATFORM_METAL_EXT
+    table->CreateMetalSurfaceEXT = (PFN_vkCreateMetalSurfaceEXT) gpa(instance, "vkCreateMetalSurfaceEXT");
+#endif // VK_USE_PLATFORM_METAL_EXT
+    table->GetPhysicalDeviceCooperativeMatrixPropertiesNV = (PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV) gpa(instance, "vkGetPhysicalDeviceCooperativeMatrixPropertiesNV");
+    table->GetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV = (PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV) gpa(instance, "vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV");
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    table->GetPhysicalDeviceSurfacePresentModes2EXT = (PFN_vkGetPhysicalDeviceSurfacePresentModes2EXT) gpa(instance, "vkGetPhysicalDeviceSurfacePresentModes2EXT");
+#endif // VK_USE_PLATFORM_WIN32_KHR
+    table->CreateHeadlessSurfaceEXT = (PFN_vkCreateHeadlessSurfaceEXT) gpa(instance, "vkCreateHeadlessSurfaceEXT");
+}
diff --git a/src/third_party/vulkan-loader/src/loader/generated/vk_layer_dispatch_table.h b/src/third_party/vulkan-loader/src/loader/generated/vk_layer_dispatch_table.h
new file mode 100644
index 0000000..f822acb
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/loader/generated/vk_layer_dispatch_table.h
@@ -0,0 +1,659 @@
+// *** THIS FILE IS GENERATED - DO NOT EDIT ***
+// See loader_extension_generator.py for modifications
+
+/*
+ * Copyright (c) 2015-2017 The Khronos Group Inc.
+ * Copyright (c) 2015-2017 Valve Corporation
+ * Copyright (c) 2015-2017 LunarG, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Mark Lobodzinski <mark@lunarg.com>
+ * Author: Mark Young <marky@lunarg.com>
+ */
+
+#pragma once
+
+typedef PFN_vkVoidFunction (VKAPI_PTR *PFN_GetPhysicalDeviceProcAddr)(VkInstance instance, const char* pName);
+
+// Instance function pointer dispatch table
+typedef struct VkLayerInstanceDispatchTable_ {
+    // Manually add in GetPhysicalDeviceProcAddr entry
+    PFN_GetPhysicalDeviceProcAddr GetPhysicalDeviceProcAddr;
+
+    // ---- Core 1_0 commands
+    PFN_vkCreateInstance CreateInstance;
+    PFN_vkDestroyInstance DestroyInstance;
+    PFN_vkEnumeratePhysicalDevices EnumeratePhysicalDevices;
+    PFN_vkGetPhysicalDeviceFeatures GetPhysicalDeviceFeatures;
+    PFN_vkGetPhysicalDeviceFormatProperties GetPhysicalDeviceFormatProperties;
+    PFN_vkGetPhysicalDeviceImageFormatProperties GetPhysicalDeviceImageFormatProperties;
+    PFN_vkGetPhysicalDeviceProperties GetPhysicalDeviceProperties;
+    PFN_vkGetPhysicalDeviceQueueFamilyProperties GetPhysicalDeviceQueueFamilyProperties;
+    PFN_vkGetPhysicalDeviceMemoryProperties GetPhysicalDeviceMemoryProperties;
+    PFN_vkGetInstanceProcAddr GetInstanceProcAddr;
+    PFN_vkCreateDevice CreateDevice;
+    PFN_vkEnumerateInstanceExtensionProperties EnumerateInstanceExtensionProperties;
+    PFN_vkEnumerateDeviceExtensionProperties EnumerateDeviceExtensionProperties;
+    PFN_vkEnumerateInstanceLayerProperties EnumerateInstanceLayerProperties;
+    PFN_vkEnumerateDeviceLayerProperties EnumerateDeviceLayerProperties;
+    PFN_vkGetPhysicalDeviceSparseImageFormatProperties GetPhysicalDeviceSparseImageFormatProperties;
+
+    // ---- Core 1_1 commands
+    PFN_vkEnumerateInstanceVersion EnumerateInstanceVersion;
+    PFN_vkEnumeratePhysicalDeviceGroups EnumeratePhysicalDeviceGroups;
+    PFN_vkGetPhysicalDeviceFeatures2 GetPhysicalDeviceFeatures2;
+    PFN_vkGetPhysicalDeviceProperties2 GetPhysicalDeviceProperties2;
+    PFN_vkGetPhysicalDeviceFormatProperties2 GetPhysicalDeviceFormatProperties2;
+    PFN_vkGetPhysicalDeviceImageFormatProperties2 GetPhysicalDeviceImageFormatProperties2;
+    PFN_vkGetPhysicalDeviceQueueFamilyProperties2 GetPhysicalDeviceQueueFamilyProperties2;
+    PFN_vkGetPhysicalDeviceMemoryProperties2 GetPhysicalDeviceMemoryProperties2;
+    PFN_vkGetPhysicalDeviceSparseImageFormatProperties2 GetPhysicalDeviceSparseImageFormatProperties2;
+    PFN_vkGetPhysicalDeviceExternalBufferProperties GetPhysicalDeviceExternalBufferProperties;
+    PFN_vkGetPhysicalDeviceExternalFenceProperties GetPhysicalDeviceExternalFenceProperties;
+    PFN_vkGetPhysicalDeviceExternalSemaphoreProperties GetPhysicalDeviceExternalSemaphoreProperties;
+
+    // ---- VK_KHR_surface extension commands
+    PFN_vkDestroySurfaceKHR DestroySurfaceKHR;
+    PFN_vkGetPhysicalDeviceSurfaceSupportKHR GetPhysicalDeviceSurfaceSupportKHR;
+    PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR GetPhysicalDeviceSurfaceCapabilitiesKHR;
+    PFN_vkGetPhysicalDeviceSurfaceFormatsKHR GetPhysicalDeviceSurfaceFormatsKHR;
+    PFN_vkGetPhysicalDeviceSurfacePresentModesKHR GetPhysicalDeviceSurfacePresentModesKHR;
+
+    // ---- VK_KHR_swapchain extension commands
+    PFN_vkGetPhysicalDevicePresentRectanglesKHR GetPhysicalDevicePresentRectanglesKHR;
+
+    // ---- VK_KHR_display extension commands
+    PFN_vkGetPhysicalDeviceDisplayPropertiesKHR GetPhysicalDeviceDisplayPropertiesKHR;
+    PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR GetPhysicalDeviceDisplayPlanePropertiesKHR;
+    PFN_vkGetDisplayPlaneSupportedDisplaysKHR GetDisplayPlaneSupportedDisplaysKHR;
+    PFN_vkGetDisplayModePropertiesKHR GetDisplayModePropertiesKHR;
+    PFN_vkCreateDisplayModeKHR CreateDisplayModeKHR;
+    PFN_vkGetDisplayPlaneCapabilitiesKHR GetDisplayPlaneCapabilitiesKHR;
+    PFN_vkCreateDisplayPlaneSurfaceKHR CreateDisplayPlaneSurfaceKHR;
+
+    // ---- VK_KHR_xlib_surface extension commands
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+    PFN_vkCreateXlibSurfaceKHR CreateXlibSurfaceKHR;
+#endif // VK_USE_PLATFORM_XLIB_KHR
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+    PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR GetPhysicalDeviceXlibPresentationSupportKHR;
+#endif // VK_USE_PLATFORM_XLIB_KHR
+
+    // ---- VK_KHR_xcb_surface extension commands
+#ifdef VK_USE_PLATFORM_XCB_KHR
+    PFN_vkCreateXcbSurfaceKHR CreateXcbSurfaceKHR;
+#endif // VK_USE_PLATFORM_XCB_KHR
+#ifdef VK_USE_PLATFORM_XCB_KHR
+    PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR GetPhysicalDeviceXcbPresentationSupportKHR;
+#endif // VK_USE_PLATFORM_XCB_KHR
+
+    // ---- VK_KHR_wayland_surface extension commands
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+    PFN_vkCreateWaylandSurfaceKHR CreateWaylandSurfaceKHR;
+#endif // VK_USE_PLATFORM_WAYLAND_KHR
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+    PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR GetPhysicalDeviceWaylandPresentationSupportKHR;
+#endif // VK_USE_PLATFORM_WAYLAND_KHR
+
+    // ---- VK_KHR_android_surface extension commands
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+    PFN_vkCreateAndroidSurfaceKHR CreateAndroidSurfaceKHR;
+#endif // VK_USE_PLATFORM_ANDROID_KHR
+
+    // ---- VK_KHR_win32_surface extension commands
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    PFN_vkCreateWin32SurfaceKHR CreateWin32SurfaceKHR;
+#endif // VK_USE_PLATFORM_WIN32_KHR
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR GetPhysicalDeviceWin32PresentationSupportKHR;
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+    // ---- VK_KHR_get_physical_device_properties2 extension commands
+    PFN_vkGetPhysicalDeviceFeatures2KHR GetPhysicalDeviceFeatures2KHR;
+    PFN_vkGetPhysicalDeviceProperties2KHR GetPhysicalDeviceProperties2KHR;
+    PFN_vkGetPhysicalDeviceFormatProperties2KHR GetPhysicalDeviceFormatProperties2KHR;
+    PFN_vkGetPhysicalDeviceImageFormatProperties2KHR GetPhysicalDeviceImageFormatProperties2KHR;
+    PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR GetPhysicalDeviceQueueFamilyProperties2KHR;
+    PFN_vkGetPhysicalDeviceMemoryProperties2KHR GetPhysicalDeviceMemoryProperties2KHR;
+    PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR GetPhysicalDeviceSparseImageFormatProperties2KHR;
+
+    // ---- VK_KHR_device_group_creation extension commands
+    PFN_vkEnumeratePhysicalDeviceGroupsKHR EnumeratePhysicalDeviceGroupsKHR;
+
+    // ---- VK_KHR_external_memory_capabilities extension commands
+    PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR GetPhysicalDeviceExternalBufferPropertiesKHR;
+
+    // ---- VK_KHR_external_semaphore_capabilities extension commands
+    PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR GetPhysicalDeviceExternalSemaphorePropertiesKHR;
+
+    // ---- VK_KHR_external_fence_capabilities extension commands
+    PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR GetPhysicalDeviceExternalFencePropertiesKHR;
+
+    // ---- VK_KHR_performance_query extension commands
+    PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR EnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR;
+    PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR GetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR;
+
+    // ---- VK_KHR_get_surface_capabilities2 extension commands
+    PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR GetPhysicalDeviceSurfaceCapabilities2KHR;
+    PFN_vkGetPhysicalDeviceSurfaceFormats2KHR GetPhysicalDeviceSurfaceFormats2KHR;
+
+    // ---- VK_KHR_get_display_properties2 extension commands
+    PFN_vkGetPhysicalDeviceDisplayProperties2KHR GetPhysicalDeviceDisplayProperties2KHR;
+    PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR GetPhysicalDeviceDisplayPlaneProperties2KHR;
+    PFN_vkGetDisplayModeProperties2KHR GetDisplayModeProperties2KHR;
+    PFN_vkGetDisplayPlaneCapabilities2KHR GetDisplayPlaneCapabilities2KHR;
+
+    // ---- VK_EXT_debug_report extension commands
+    PFN_vkCreateDebugReportCallbackEXT CreateDebugReportCallbackEXT;
+    PFN_vkDestroyDebugReportCallbackEXT DestroyDebugReportCallbackEXT;
+    PFN_vkDebugReportMessageEXT DebugReportMessageEXT;
+
+    // ---- VK_GGP_stream_descriptor_surface extension commands
+#ifdef VK_USE_PLATFORM_GGP
+    PFN_vkCreateStreamDescriptorSurfaceGGP CreateStreamDescriptorSurfaceGGP;
+#endif // VK_USE_PLATFORM_GGP
+
+    // ---- VK_NV_external_memory_capabilities extension commands
+    PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV GetPhysicalDeviceExternalImageFormatPropertiesNV;
+
+    // ---- VK_NN_vi_surface extension commands
+#ifdef VK_USE_PLATFORM_VI_NN
+    PFN_vkCreateViSurfaceNN CreateViSurfaceNN;
+#endif // VK_USE_PLATFORM_VI_NN
+
+    // ---- VK_NVX_device_generated_commands extension commands
+    PFN_vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX GetPhysicalDeviceGeneratedCommandsPropertiesNVX;
+
+    // ---- VK_EXT_direct_mode_display extension commands
+    PFN_vkReleaseDisplayEXT ReleaseDisplayEXT;
+
+    // ---- VK_EXT_acquire_xlib_display extension commands
+#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
+    PFN_vkAcquireXlibDisplayEXT AcquireXlibDisplayEXT;
+#endif // VK_USE_PLATFORM_XLIB_XRANDR_EXT
+#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
+    PFN_vkGetRandROutputDisplayEXT GetRandROutputDisplayEXT;
+#endif // VK_USE_PLATFORM_XLIB_XRANDR_EXT
+
+    // ---- VK_EXT_display_surface_counter extension commands
+    PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT GetPhysicalDeviceSurfaceCapabilities2EXT;
+
+    // ---- VK_MVK_ios_surface extension commands
+#ifdef VK_USE_PLATFORM_IOS_MVK
+    PFN_vkCreateIOSSurfaceMVK CreateIOSSurfaceMVK;
+#endif // VK_USE_PLATFORM_IOS_MVK
+
+    // ---- VK_MVK_macos_surface extension commands
+#ifdef VK_USE_PLATFORM_MACOS_MVK
+    PFN_vkCreateMacOSSurfaceMVK CreateMacOSSurfaceMVK;
+#endif // VK_USE_PLATFORM_MACOS_MVK
+
+    // ---- VK_EXT_debug_utils extension commands
+    PFN_vkCreateDebugUtilsMessengerEXT CreateDebugUtilsMessengerEXT;
+    PFN_vkDestroyDebugUtilsMessengerEXT DestroyDebugUtilsMessengerEXT;
+    PFN_vkSubmitDebugUtilsMessageEXT SubmitDebugUtilsMessageEXT;
+
+    // ---- VK_EXT_sample_locations extension commands
+    PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT GetPhysicalDeviceMultisamplePropertiesEXT;
+
+    // ---- VK_EXT_calibrated_timestamps extension commands
+    PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT GetPhysicalDeviceCalibrateableTimeDomainsEXT;
+
+    // ---- VK_FUCHSIA_imagepipe_surface extension commands
+#ifdef VK_USE_PLATFORM_FUCHSIA
+    PFN_vkCreateImagePipeSurfaceFUCHSIA CreateImagePipeSurfaceFUCHSIA;
+#endif // VK_USE_PLATFORM_FUCHSIA
+
+    // ---- VK_EXT_metal_surface extension commands
+#ifdef VK_USE_PLATFORM_METAL_EXT
+    PFN_vkCreateMetalSurfaceEXT CreateMetalSurfaceEXT;
+#endif // VK_USE_PLATFORM_METAL_EXT
+
+    // ---- VK_NV_cooperative_matrix extension commands
+    PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV GetPhysicalDeviceCooperativeMatrixPropertiesNV;
+
+    // ---- VK_NV_coverage_reduction_mode extension commands
+    PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV GetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV;
+
+    // ---- VK_EXT_full_screen_exclusive extension commands
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    PFN_vkGetPhysicalDeviceSurfacePresentModes2EXT GetPhysicalDeviceSurfacePresentModes2EXT;
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+    // ---- VK_EXT_headless_surface extension commands
+    PFN_vkCreateHeadlessSurfaceEXT CreateHeadlessSurfaceEXT;
+} VkLayerInstanceDispatchTable;
+
+// Device function pointer dispatch table
+typedef struct VkLayerDispatchTable_ {
+
+    // ---- Core 1_0 commands
+    PFN_vkGetDeviceProcAddr GetDeviceProcAddr;
+    PFN_vkDestroyDevice DestroyDevice;
+    PFN_vkGetDeviceQueue GetDeviceQueue;
+    PFN_vkQueueSubmit QueueSubmit;
+    PFN_vkQueueWaitIdle QueueWaitIdle;
+    PFN_vkDeviceWaitIdle DeviceWaitIdle;
+    PFN_vkAllocateMemory AllocateMemory;
+    PFN_vkFreeMemory FreeMemory;
+    PFN_vkMapMemory MapMemory;
+    PFN_vkUnmapMemory UnmapMemory;
+    PFN_vkFlushMappedMemoryRanges FlushMappedMemoryRanges;
+    PFN_vkInvalidateMappedMemoryRanges InvalidateMappedMemoryRanges;
+    PFN_vkGetDeviceMemoryCommitment GetDeviceMemoryCommitment;
+    PFN_vkBindBufferMemory BindBufferMemory;
+    PFN_vkBindImageMemory BindImageMemory;
+    PFN_vkGetBufferMemoryRequirements GetBufferMemoryRequirements;
+    PFN_vkGetImageMemoryRequirements GetImageMemoryRequirements;
+    PFN_vkGetImageSparseMemoryRequirements GetImageSparseMemoryRequirements;
+    PFN_vkQueueBindSparse QueueBindSparse;
+    PFN_vkCreateFence CreateFence;
+    PFN_vkDestroyFence DestroyFence;
+    PFN_vkResetFences ResetFences;
+    PFN_vkGetFenceStatus GetFenceStatus;
+    PFN_vkWaitForFences WaitForFences;
+    PFN_vkCreateSemaphore CreateSemaphore;
+    PFN_vkDestroySemaphore DestroySemaphore;
+    PFN_vkCreateEvent CreateEvent;
+    PFN_vkDestroyEvent DestroyEvent;
+    PFN_vkGetEventStatus GetEventStatus;
+    PFN_vkSetEvent SetEvent;
+    PFN_vkResetEvent ResetEvent;
+    PFN_vkCreateQueryPool CreateQueryPool;
+    PFN_vkDestroyQueryPool DestroyQueryPool;
+    PFN_vkGetQueryPoolResults GetQueryPoolResults;
+    PFN_vkCreateBuffer CreateBuffer;
+    PFN_vkDestroyBuffer DestroyBuffer;
+    PFN_vkCreateBufferView CreateBufferView;
+    PFN_vkDestroyBufferView DestroyBufferView;
+    PFN_vkCreateImage CreateImage;
+    PFN_vkDestroyImage DestroyImage;
+    PFN_vkGetImageSubresourceLayout GetImageSubresourceLayout;
+    PFN_vkCreateImageView CreateImageView;
+    PFN_vkDestroyImageView DestroyImageView;
+    PFN_vkCreateShaderModule CreateShaderModule;
+    PFN_vkDestroyShaderModule DestroyShaderModule;
+    PFN_vkCreatePipelineCache CreatePipelineCache;
+    PFN_vkDestroyPipelineCache DestroyPipelineCache;
+    PFN_vkGetPipelineCacheData GetPipelineCacheData;
+    PFN_vkMergePipelineCaches MergePipelineCaches;
+    PFN_vkCreateGraphicsPipelines CreateGraphicsPipelines;
+    PFN_vkCreateComputePipelines CreateComputePipelines;
+    PFN_vkDestroyPipeline DestroyPipeline;
+    PFN_vkCreatePipelineLayout CreatePipelineLayout;
+    PFN_vkDestroyPipelineLayout DestroyPipelineLayout;
+    PFN_vkCreateSampler CreateSampler;
+    PFN_vkDestroySampler DestroySampler;
+    PFN_vkCreateDescriptorSetLayout CreateDescriptorSetLayout;
+    PFN_vkDestroyDescriptorSetLayout DestroyDescriptorSetLayout;
+    PFN_vkCreateDescriptorPool CreateDescriptorPool;
+    PFN_vkDestroyDescriptorPool DestroyDescriptorPool;
+    PFN_vkResetDescriptorPool ResetDescriptorPool;
+    PFN_vkAllocateDescriptorSets AllocateDescriptorSets;
+    PFN_vkFreeDescriptorSets FreeDescriptorSets;
+    PFN_vkUpdateDescriptorSets UpdateDescriptorSets;
+    PFN_vkCreateFramebuffer CreateFramebuffer;
+    PFN_vkDestroyFramebuffer DestroyFramebuffer;
+    PFN_vkCreateRenderPass CreateRenderPass;
+    PFN_vkDestroyRenderPass DestroyRenderPass;
+    PFN_vkGetRenderAreaGranularity GetRenderAreaGranularity;
+    PFN_vkCreateCommandPool CreateCommandPool;
+    PFN_vkDestroyCommandPool DestroyCommandPool;
+    PFN_vkResetCommandPool ResetCommandPool;
+    PFN_vkAllocateCommandBuffers AllocateCommandBuffers;
+    PFN_vkFreeCommandBuffers FreeCommandBuffers;
+    PFN_vkBeginCommandBuffer BeginCommandBuffer;
+    PFN_vkEndCommandBuffer EndCommandBuffer;
+    PFN_vkResetCommandBuffer ResetCommandBuffer;
+    PFN_vkCmdBindPipeline CmdBindPipeline;
+    PFN_vkCmdSetViewport CmdSetViewport;
+    PFN_vkCmdSetScissor CmdSetScissor;
+    PFN_vkCmdSetLineWidth CmdSetLineWidth;
+    PFN_vkCmdSetDepthBias CmdSetDepthBias;
+    PFN_vkCmdSetBlendConstants CmdSetBlendConstants;
+    PFN_vkCmdSetDepthBounds CmdSetDepthBounds;
+    PFN_vkCmdSetStencilCompareMask CmdSetStencilCompareMask;
+    PFN_vkCmdSetStencilWriteMask CmdSetStencilWriteMask;
+    PFN_vkCmdSetStencilReference CmdSetStencilReference;
+    PFN_vkCmdBindDescriptorSets CmdBindDescriptorSets;
+    PFN_vkCmdBindIndexBuffer CmdBindIndexBuffer;
+    PFN_vkCmdBindVertexBuffers CmdBindVertexBuffers;
+    PFN_vkCmdDraw CmdDraw;
+    PFN_vkCmdDrawIndexed CmdDrawIndexed;
+    PFN_vkCmdDrawIndirect CmdDrawIndirect;
+    PFN_vkCmdDrawIndexedIndirect CmdDrawIndexedIndirect;
+    PFN_vkCmdDispatch CmdDispatch;
+    PFN_vkCmdDispatchIndirect CmdDispatchIndirect;
+    PFN_vkCmdCopyBuffer CmdCopyBuffer;
+    PFN_vkCmdCopyImage CmdCopyImage;
+    PFN_vkCmdBlitImage CmdBlitImage;
+    PFN_vkCmdCopyBufferToImage CmdCopyBufferToImage;
+    PFN_vkCmdCopyImageToBuffer CmdCopyImageToBuffer;
+    PFN_vkCmdUpdateBuffer CmdUpdateBuffer;
+    PFN_vkCmdFillBuffer CmdFillBuffer;
+    PFN_vkCmdClearColorImage CmdClearColorImage;
+    PFN_vkCmdClearDepthStencilImage CmdClearDepthStencilImage;
+    PFN_vkCmdClearAttachments CmdClearAttachments;
+    PFN_vkCmdResolveImage CmdResolveImage;
+    PFN_vkCmdSetEvent CmdSetEvent;
+    PFN_vkCmdResetEvent CmdResetEvent;
+    PFN_vkCmdWaitEvents CmdWaitEvents;
+    PFN_vkCmdPipelineBarrier CmdPipelineBarrier;
+    PFN_vkCmdBeginQuery CmdBeginQuery;
+    PFN_vkCmdEndQuery CmdEndQuery;
+    PFN_vkCmdResetQueryPool CmdResetQueryPool;
+    PFN_vkCmdWriteTimestamp CmdWriteTimestamp;
+    PFN_vkCmdCopyQueryPoolResults CmdCopyQueryPoolResults;
+    PFN_vkCmdPushConstants CmdPushConstants;
+    PFN_vkCmdBeginRenderPass CmdBeginRenderPass;
+    PFN_vkCmdNextSubpass CmdNextSubpass;
+    PFN_vkCmdEndRenderPass CmdEndRenderPass;
+    PFN_vkCmdExecuteCommands CmdExecuteCommands;
+
+    // ---- Core 1_1 commands
+    PFN_vkBindBufferMemory2 BindBufferMemory2;
+    PFN_vkBindImageMemory2 BindImageMemory2;
+    PFN_vkGetDeviceGroupPeerMemoryFeatures GetDeviceGroupPeerMemoryFeatures;
+    PFN_vkCmdSetDeviceMask CmdSetDeviceMask;
+    PFN_vkCmdDispatchBase CmdDispatchBase;
+    PFN_vkGetImageMemoryRequirements2 GetImageMemoryRequirements2;
+    PFN_vkGetBufferMemoryRequirements2 GetBufferMemoryRequirements2;
+    PFN_vkGetImageSparseMemoryRequirements2 GetImageSparseMemoryRequirements2;
+    PFN_vkTrimCommandPool TrimCommandPool;
+    PFN_vkGetDeviceQueue2 GetDeviceQueue2;
+    PFN_vkCreateSamplerYcbcrConversion CreateSamplerYcbcrConversion;
+    PFN_vkDestroySamplerYcbcrConversion DestroySamplerYcbcrConversion;
+    PFN_vkCreateDescriptorUpdateTemplate CreateDescriptorUpdateTemplate;
+    PFN_vkDestroyDescriptorUpdateTemplate DestroyDescriptorUpdateTemplate;
+    PFN_vkUpdateDescriptorSetWithTemplate UpdateDescriptorSetWithTemplate;
+    PFN_vkGetDescriptorSetLayoutSupport GetDescriptorSetLayoutSupport;
+
+    // ---- VK_KHR_swapchain extension commands
+    PFN_vkCreateSwapchainKHR CreateSwapchainKHR;
+    PFN_vkDestroySwapchainKHR DestroySwapchainKHR;
+    PFN_vkGetSwapchainImagesKHR GetSwapchainImagesKHR;
+    PFN_vkAcquireNextImageKHR AcquireNextImageKHR;
+    PFN_vkQueuePresentKHR QueuePresentKHR;
+    PFN_vkGetDeviceGroupPresentCapabilitiesKHR GetDeviceGroupPresentCapabilitiesKHR;
+    PFN_vkGetDeviceGroupSurfacePresentModesKHR GetDeviceGroupSurfacePresentModesKHR;
+    PFN_vkAcquireNextImage2KHR AcquireNextImage2KHR;
+
+    // ---- VK_KHR_display_swapchain extension commands
+    PFN_vkCreateSharedSwapchainsKHR CreateSharedSwapchainsKHR;
+
+    // ---- VK_KHR_device_group extension commands
+    PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR GetDeviceGroupPeerMemoryFeaturesKHR;
+    PFN_vkCmdSetDeviceMaskKHR CmdSetDeviceMaskKHR;
+    PFN_vkCmdDispatchBaseKHR CmdDispatchBaseKHR;
+
+    // ---- VK_KHR_maintenance1 extension commands
+    PFN_vkTrimCommandPoolKHR TrimCommandPoolKHR;
+
+    // ---- VK_KHR_external_memory_win32 extension commands
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    PFN_vkGetMemoryWin32HandleKHR GetMemoryWin32HandleKHR;
+#endif // VK_USE_PLATFORM_WIN32_KHR
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    PFN_vkGetMemoryWin32HandlePropertiesKHR GetMemoryWin32HandlePropertiesKHR;
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+    // ---- VK_KHR_external_memory_fd extension commands
+    PFN_vkGetMemoryFdKHR GetMemoryFdKHR;
+    PFN_vkGetMemoryFdPropertiesKHR GetMemoryFdPropertiesKHR;
+
+    // ---- VK_KHR_external_semaphore_win32 extension commands
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    PFN_vkImportSemaphoreWin32HandleKHR ImportSemaphoreWin32HandleKHR;
+#endif // VK_USE_PLATFORM_WIN32_KHR
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    PFN_vkGetSemaphoreWin32HandleKHR GetSemaphoreWin32HandleKHR;
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+    // ---- VK_KHR_external_semaphore_fd extension commands
+    PFN_vkImportSemaphoreFdKHR ImportSemaphoreFdKHR;
+    PFN_vkGetSemaphoreFdKHR GetSemaphoreFdKHR;
+
+    // ---- VK_KHR_push_descriptor extension commands
+    PFN_vkCmdPushDescriptorSetKHR CmdPushDescriptorSetKHR;
+    PFN_vkCmdPushDescriptorSetWithTemplateKHR CmdPushDescriptorSetWithTemplateKHR;
+
+    // ---- VK_KHR_descriptor_update_template extension commands
+    PFN_vkCreateDescriptorUpdateTemplateKHR CreateDescriptorUpdateTemplateKHR;
+    PFN_vkDestroyDescriptorUpdateTemplateKHR DestroyDescriptorUpdateTemplateKHR;
+    PFN_vkUpdateDescriptorSetWithTemplateKHR UpdateDescriptorSetWithTemplateKHR;
+
+    // ---- VK_KHR_create_renderpass2 extension commands
+    PFN_vkCreateRenderPass2KHR CreateRenderPass2KHR;
+    PFN_vkCmdBeginRenderPass2KHR CmdBeginRenderPass2KHR;
+    PFN_vkCmdNextSubpass2KHR CmdNextSubpass2KHR;
+    PFN_vkCmdEndRenderPass2KHR CmdEndRenderPass2KHR;
+
+    // ---- VK_KHR_shared_presentable_image extension commands
+    PFN_vkGetSwapchainStatusKHR GetSwapchainStatusKHR;
+
+    // ---- VK_KHR_external_fence_win32 extension commands
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    PFN_vkImportFenceWin32HandleKHR ImportFenceWin32HandleKHR;
+#endif // VK_USE_PLATFORM_WIN32_KHR
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    PFN_vkGetFenceWin32HandleKHR GetFenceWin32HandleKHR;
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+    // ---- VK_KHR_external_fence_fd extension commands
+    PFN_vkImportFenceFdKHR ImportFenceFdKHR;
+    PFN_vkGetFenceFdKHR GetFenceFdKHR;
+
+    // ---- VK_KHR_performance_query extension commands
+    PFN_vkAcquireProfilingLockKHR AcquireProfilingLockKHR;
+    PFN_vkReleaseProfilingLockKHR ReleaseProfilingLockKHR;
+
+    // ---- VK_KHR_get_memory_requirements2 extension commands
+    PFN_vkGetImageMemoryRequirements2KHR GetImageMemoryRequirements2KHR;
+    PFN_vkGetBufferMemoryRequirements2KHR GetBufferMemoryRequirements2KHR;
+    PFN_vkGetImageSparseMemoryRequirements2KHR GetImageSparseMemoryRequirements2KHR;
+
+    // ---- VK_KHR_sampler_ycbcr_conversion extension commands
+    PFN_vkCreateSamplerYcbcrConversionKHR CreateSamplerYcbcrConversionKHR;
+    PFN_vkDestroySamplerYcbcrConversionKHR DestroySamplerYcbcrConversionKHR;
+
+    // ---- VK_KHR_bind_memory2 extension commands
+    PFN_vkBindBufferMemory2KHR BindBufferMemory2KHR;
+    PFN_vkBindImageMemory2KHR BindImageMemory2KHR;
+
+    // ---- VK_KHR_maintenance3 extension commands
+    PFN_vkGetDescriptorSetLayoutSupportKHR GetDescriptorSetLayoutSupportKHR;
+
+    // ---- VK_KHR_draw_indirect_count extension commands
+    PFN_vkCmdDrawIndirectCountKHR CmdDrawIndirectCountKHR;
+    PFN_vkCmdDrawIndexedIndirectCountKHR CmdDrawIndexedIndirectCountKHR;
+
+    // ---- VK_KHR_timeline_semaphore extension commands
+    PFN_vkGetSemaphoreCounterValueKHR GetSemaphoreCounterValueKHR;
+    PFN_vkWaitSemaphoresKHR WaitSemaphoresKHR;
+    PFN_vkSignalSemaphoreKHR SignalSemaphoreKHR;
+
+    // ---- VK_KHR_pipeline_executable_properties extension commands
+    PFN_vkGetPipelineExecutablePropertiesKHR GetPipelineExecutablePropertiesKHR;
+    PFN_vkGetPipelineExecutableStatisticsKHR GetPipelineExecutableStatisticsKHR;
+    PFN_vkGetPipelineExecutableInternalRepresentationsKHR GetPipelineExecutableInternalRepresentationsKHR;
+
+    // ---- VK_EXT_debug_marker extension commands
+    PFN_vkDebugMarkerSetObjectTagEXT DebugMarkerSetObjectTagEXT;
+    PFN_vkDebugMarkerSetObjectNameEXT DebugMarkerSetObjectNameEXT;
+    PFN_vkCmdDebugMarkerBeginEXT CmdDebugMarkerBeginEXT;
+    PFN_vkCmdDebugMarkerEndEXT CmdDebugMarkerEndEXT;
+    PFN_vkCmdDebugMarkerInsertEXT CmdDebugMarkerInsertEXT;
+
+    // ---- VK_EXT_transform_feedback extension commands
+    PFN_vkCmdBindTransformFeedbackBuffersEXT CmdBindTransformFeedbackBuffersEXT;
+    PFN_vkCmdBeginTransformFeedbackEXT CmdBeginTransformFeedbackEXT;
+    PFN_vkCmdEndTransformFeedbackEXT CmdEndTransformFeedbackEXT;
+    PFN_vkCmdBeginQueryIndexedEXT CmdBeginQueryIndexedEXT;
+    PFN_vkCmdEndQueryIndexedEXT CmdEndQueryIndexedEXT;
+    PFN_vkCmdDrawIndirectByteCountEXT CmdDrawIndirectByteCountEXT;
+
+    // ---- VK_NVX_image_view_handle extension commands
+    PFN_vkGetImageViewHandleNVX GetImageViewHandleNVX;
+
+    // ---- VK_AMD_draw_indirect_count extension commands
+    PFN_vkCmdDrawIndirectCountAMD CmdDrawIndirectCountAMD;
+    PFN_vkCmdDrawIndexedIndirectCountAMD CmdDrawIndexedIndirectCountAMD;
+
+    // ---- VK_AMD_shader_info extension commands
+    PFN_vkGetShaderInfoAMD GetShaderInfoAMD;
+
+    // ---- VK_NV_external_memory_win32 extension commands
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    PFN_vkGetMemoryWin32HandleNV GetMemoryWin32HandleNV;
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+    // ---- VK_EXT_conditional_rendering extension commands
+    PFN_vkCmdBeginConditionalRenderingEXT CmdBeginConditionalRenderingEXT;
+    PFN_vkCmdEndConditionalRenderingEXT CmdEndConditionalRenderingEXT;
+
+    // ---- VK_NVX_device_generated_commands extension commands
+    PFN_vkCmdProcessCommandsNVX CmdProcessCommandsNVX;
+    PFN_vkCmdReserveSpaceForCommandsNVX CmdReserveSpaceForCommandsNVX;
+    PFN_vkCreateIndirectCommandsLayoutNVX CreateIndirectCommandsLayoutNVX;
+    PFN_vkDestroyIndirectCommandsLayoutNVX DestroyIndirectCommandsLayoutNVX;
+    PFN_vkCreateObjectTableNVX CreateObjectTableNVX;
+    PFN_vkDestroyObjectTableNVX DestroyObjectTableNVX;
+    PFN_vkRegisterObjectsNVX RegisterObjectsNVX;
+    PFN_vkUnregisterObjectsNVX UnregisterObjectsNVX;
+
+    // ---- VK_NV_clip_space_w_scaling extension commands
+    PFN_vkCmdSetViewportWScalingNV CmdSetViewportWScalingNV;
+
+    // ---- VK_EXT_display_control extension commands
+    PFN_vkDisplayPowerControlEXT DisplayPowerControlEXT;
+    PFN_vkRegisterDeviceEventEXT RegisterDeviceEventEXT;
+    PFN_vkRegisterDisplayEventEXT RegisterDisplayEventEXT;
+    PFN_vkGetSwapchainCounterEXT GetSwapchainCounterEXT;
+
+    // ---- VK_GOOGLE_display_timing extension commands
+    PFN_vkGetRefreshCycleDurationGOOGLE GetRefreshCycleDurationGOOGLE;
+    PFN_vkGetPastPresentationTimingGOOGLE GetPastPresentationTimingGOOGLE;
+
+    // ---- VK_EXT_discard_rectangles extension commands
+    PFN_vkCmdSetDiscardRectangleEXT CmdSetDiscardRectangleEXT;
+
+    // ---- VK_EXT_hdr_metadata extension commands
+    PFN_vkSetHdrMetadataEXT SetHdrMetadataEXT;
+
+    // ---- VK_EXT_debug_utils extension commands
+    PFN_vkSetDebugUtilsObjectNameEXT SetDebugUtilsObjectNameEXT;
+    PFN_vkSetDebugUtilsObjectTagEXT SetDebugUtilsObjectTagEXT;
+    PFN_vkQueueBeginDebugUtilsLabelEXT QueueBeginDebugUtilsLabelEXT;
+    PFN_vkQueueEndDebugUtilsLabelEXT QueueEndDebugUtilsLabelEXT;
+    PFN_vkQueueInsertDebugUtilsLabelEXT QueueInsertDebugUtilsLabelEXT;
+    PFN_vkCmdBeginDebugUtilsLabelEXT CmdBeginDebugUtilsLabelEXT;
+    PFN_vkCmdEndDebugUtilsLabelEXT CmdEndDebugUtilsLabelEXT;
+    PFN_vkCmdInsertDebugUtilsLabelEXT CmdInsertDebugUtilsLabelEXT;
+
+    // ---- VK_ANDROID_external_memory_android_hardware_buffer extension commands
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+    PFN_vkGetAndroidHardwareBufferPropertiesANDROID GetAndroidHardwareBufferPropertiesANDROID;
+#endif // VK_USE_PLATFORM_ANDROID_KHR
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+    PFN_vkGetMemoryAndroidHardwareBufferANDROID GetMemoryAndroidHardwareBufferANDROID;
+#endif // VK_USE_PLATFORM_ANDROID_KHR
+
+    // ---- VK_EXT_sample_locations extension commands
+    PFN_vkCmdSetSampleLocationsEXT CmdSetSampleLocationsEXT;
+
+    // ---- VK_EXT_image_drm_format_modifier extension commands
+    PFN_vkGetImageDrmFormatModifierPropertiesEXT GetImageDrmFormatModifierPropertiesEXT;
+
+    // ---- VK_EXT_validation_cache extension commands
+    PFN_vkCreateValidationCacheEXT CreateValidationCacheEXT;
+    PFN_vkDestroyValidationCacheEXT DestroyValidationCacheEXT;
+    PFN_vkMergeValidationCachesEXT MergeValidationCachesEXT;
+    PFN_vkGetValidationCacheDataEXT GetValidationCacheDataEXT;
+
+    // ---- VK_NV_shading_rate_image extension commands
+    PFN_vkCmdBindShadingRateImageNV CmdBindShadingRateImageNV;
+    PFN_vkCmdSetViewportShadingRatePaletteNV CmdSetViewportShadingRatePaletteNV;
+    PFN_vkCmdSetCoarseSampleOrderNV CmdSetCoarseSampleOrderNV;
+
+    // ---- VK_NV_ray_tracing extension commands
+    PFN_vkCreateAccelerationStructureNV CreateAccelerationStructureNV;
+    PFN_vkDestroyAccelerationStructureNV DestroyAccelerationStructureNV;
+    PFN_vkGetAccelerationStructureMemoryRequirementsNV GetAccelerationStructureMemoryRequirementsNV;
+    PFN_vkBindAccelerationStructureMemoryNV BindAccelerationStructureMemoryNV;
+    PFN_vkCmdBuildAccelerationStructureNV CmdBuildAccelerationStructureNV;
+    PFN_vkCmdCopyAccelerationStructureNV CmdCopyAccelerationStructureNV;
+    PFN_vkCmdTraceRaysNV CmdTraceRaysNV;
+    PFN_vkCreateRayTracingPipelinesNV CreateRayTracingPipelinesNV;
+    PFN_vkGetRayTracingShaderGroupHandlesNV GetRayTracingShaderGroupHandlesNV;
+    PFN_vkGetAccelerationStructureHandleNV GetAccelerationStructureHandleNV;
+    PFN_vkCmdWriteAccelerationStructuresPropertiesNV CmdWriteAccelerationStructuresPropertiesNV;
+    PFN_vkCompileDeferredNV CompileDeferredNV;
+
+    // ---- VK_EXT_external_memory_host extension commands
+    PFN_vkGetMemoryHostPointerPropertiesEXT GetMemoryHostPointerPropertiesEXT;
+
+    // ---- VK_AMD_buffer_marker extension commands
+    PFN_vkCmdWriteBufferMarkerAMD CmdWriteBufferMarkerAMD;
+
+    // ---- VK_EXT_calibrated_timestamps extension commands
+    PFN_vkGetCalibratedTimestampsEXT GetCalibratedTimestampsEXT;
+
+    // ---- VK_NV_mesh_shader extension commands
+    PFN_vkCmdDrawMeshTasksNV CmdDrawMeshTasksNV;
+    PFN_vkCmdDrawMeshTasksIndirectNV CmdDrawMeshTasksIndirectNV;
+    PFN_vkCmdDrawMeshTasksIndirectCountNV CmdDrawMeshTasksIndirectCountNV;
+
+    // ---- VK_NV_scissor_exclusive extension commands
+    PFN_vkCmdSetExclusiveScissorNV CmdSetExclusiveScissorNV;
+
+    // ---- VK_NV_device_diagnostic_checkpoints extension commands
+    PFN_vkCmdSetCheckpointNV CmdSetCheckpointNV;
+    PFN_vkGetQueueCheckpointDataNV GetQueueCheckpointDataNV;
+
+    // ---- VK_INTEL_performance_query extension commands
+    PFN_vkInitializePerformanceApiINTEL InitializePerformanceApiINTEL;
+    PFN_vkUninitializePerformanceApiINTEL UninitializePerformanceApiINTEL;
+    PFN_vkCmdSetPerformanceMarkerINTEL CmdSetPerformanceMarkerINTEL;
+    PFN_vkCmdSetPerformanceStreamMarkerINTEL CmdSetPerformanceStreamMarkerINTEL;
+    PFN_vkCmdSetPerformanceOverrideINTEL CmdSetPerformanceOverrideINTEL;
+    PFN_vkAcquirePerformanceConfigurationINTEL AcquirePerformanceConfigurationINTEL;
+    PFN_vkReleasePerformanceConfigurationINTEL ReleasePerformanceConfigurationINTEL;
+    PFN_vkQueueSetPerformanceConfigurationINTEL QueueSetPerformanceConfigurationINTEL;
+    PFN_vkGetPerformanceParameterINTEL GetPerformanceParameterINTEL;
+
+    // ---- VK_AMD_display_native_hdr extension commands
+    PFN_vkSetLocalDimmingAMD SetLocalDimmingAMD;
+
+    // ---- VK_EXT_buffer_device_address extension commands
+    PFN_vkGetBufferDeviceAddressEXT GetBufferDeviceAddressEXT;
+
+    // ---- VK_EXT_full_screen_exclusive extension commands
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    PFN_vkAcquireFullScreenExclusiveModeEXT AcquireFullScreenExclusiveModeEXT;
+#endif // VK_USE_PLATFORM_WIN32_KHR
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    PFN_vkReleaseFullScreenExclusiveModeEXT ReleaseFullScreenExclusiveModeEXT;
+#endif // VK_USE_PLATFORM_WIN32_KHR
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    PFN_vkGetDeviceGroupSurfacePresentModes2EXT GetDeviceGroupSurfacePresentModes2EXT;
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+    // ---- VK_EXT_line_rasterization extension commands
+    PFN_vkCmdSetLineStippleEXT CmdSetLineStippleEXT;
+
+    // ---- VK_EXT_host_query_reset extension commands
+    PFN_vkResetQueryPoolEXT ResetQueryPoolEXT;
+} VkLayerDispatchTable;
+
+
diff --git a/src/third_party/vulkan-loader/src/loader/generated/vk_loader_extensions.c b/src/third_party/vulkan-loader/src/loader/generated/vk_loader_extensions.c
new file mode 100644
index 0000000..a6e01e6
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/loader/generated/vk_loader_extensions.c
@@ -0,0 +1,4556 @@
+// *** THIS FILE IS GENERATED - DO NOT EDIT ***
+// See loader_extension_generator.py for modifications
+
+/*
+ * Copyright (c) 2015-2017 The Khronos Group Inc.
+ * Copyright (c) 2015-2017 Valve Corporation
+ * Copyright (c) 2015-2017 LunarG, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Mark Lobodzinski <mark@lunarg.com>
+ * Author: Mark Young <marky@lunarg.com>
+ */
+
+#ifndef _GNU_SOURCE
+#define _GNU_SOURCE
+#endif
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+#include "vk_loader_platform.h"
+#include "loader.h"
+#include "vk_loader_extensions.h"
+#include <vulkan/vk_icd.h>
+#include "wsi.h"
+#include "debug_utils.h"
+#include "extension_manual.h"
+
+// Device extension error function
+VKAPI_ATTR VkResult VKAPI_CALL vkDevExtError(VkDevice dev) {
+    struct loader_device *found_dev;
+    // The device going in is a trampoline device
+    struct loader_icd_term *icd_term = loader_get_icd_and_device(dev, &found_dev, NULL);
+
+    if (icd_term)
+        loader_log(icd_term->this_instance, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                   "Bad destination in loader trampoline dispatch,"
+                   "Are layers and extensions that you are calling enabled?");
+    return VK_ERROR_EXTENSION_NOT_PRESENT;
+}
+
+VKAPI_ATTR bool VKAPI_CALL loader_icd_init_entries(struct loader_icd_term *icd_term, VkInstance inst,
+                                                   const PFN_vkGetInstanceProcAddr fp_gipa) {
+
+#define LOOKUP_GIPA(func, required)                                                        \
+    do {                                                                                   \
+        icd_term->dispatch.func = (PFN_vk##func)fp_gipa(inst, "vk" #func);                 \
+        if (!icd_term->dispatch.func && required) {                                        \
+            loader_log((struct loader_instance *)inst, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0, \
+                       loader_platform_get_proc_address_error("vk" #func));                \
+            return false;                                                                  \
+        }                                                                                  \
+    } while (0)
+
+
+    // ---- Core 1_0
+    LOOKUP_GIPA(DestroyInstance, true);
+    LOOKUP_GIPA(EnumeratePhysicalDevices, true);
+    LOOKUP_GIPA(GetPhysicalDeviceFeatures, true);
+    LOOKUP_GIPA(GetPhysicalDeviceFormatProperties, true);
+    LOOKUP_GIPA(GetPhysicalDeviceImageFormatProperties, true);
+    LOOKUP_GIPA(GetPhysicalDeviceProperties, true);
+    LOOKUP_GIPA(GetPhysicalDeviceQueueFamilyProperties, true);
+    LOOKUP_GIPA(GetPhysicalDeviceMemoryProperties, true);
+    LOOKUP_GIPA(GetDeviceProcAddr, true);
+    LOOKUP_GIPA(CreateDevice, true);
+    LOOKUP_GIPA(EnumerateDeviceExtensionProperties, true);
+    LOOKUP_GIPA(GetPhysicalDeviceSparseImageFormatProperties, true);
+
+    // ---- Core 1_1
+    LOOKUP_GIPA(EnumeratePhysicalDeviceGroups, false);
+    LOOKUP_GIPA(GetPhysicalDeviceFeatures2, false);
+    LOOKUP_GIPA(GetPhysicalDeviceProperties2, false);
+    LOOKUP_GIPA(GetPhysicalDeviceFormatProperties2, false);
+    LOOKUP_GIPA(GetPhysicalDeviceImageFormatProperties2, false);
+    LOOKUP_GIPA(GetPhysicalDeviceQueueFamilyProperties2, false);
+    LOOKUP_GIPA(GetPhysicalDeviceMemoryProperties2, false);
+    LOOKUP_GIPA(GetPhysicalDeviceSparseImageFormatProperties2, false);
+    LOOKUP_GIPA(GetPhysicalDeviceExternalBufferProperties, false);
+    LOOKUP_GIPA(GetPhysicalDeviceExternalFenceProperties, false);
+    LOOKUP_GIPA(GetPhysicalDeviceExternalSemaphoreProperties, false);
+
+    // ---- VK_KHR_surface extension commands
+    LOOKUP_GIPA(DestroySurfaceKHR, false);
+    LOOKUP_GIPA(GetPhysicalDeviceSurfaceSupportKHR, false);
+    LOOKUP_GIPA(GetPhysicalDeviceSurfaceCapabilitiesKHR, false);
+    LOOKUP_GIPA(GetPhysicalDeviceSurfaceFormatsKHR, false);
+    LOOKUP_GIPA(GetPhysicalDeviceSurfacePresentModesKHR, false);
+
+    // ---- VK_KHR_swapchain extension commands
+    LOOKUP_GIPA(CreateSwapchainKHR, false);
+    LOOKUP_GIPA(GetDeviceGroupSurfacePresentModesKHR, false);
+    LOOKUP_GIPA(GetPhysicalDevicePresentRectanglesKHR, false);
+
+    // ---- VK_KHR_display extension commands
+    LOOKUP_GIPA(GetPhysicalDeviceDisplayPropertiesKHR, false);
+    LOOKUP_GIPA(GetPhysicalDeviceDisplayPlanePropertiesKHR, false);
+    LOOKUP_GIPA(GetDisplayPlaneSupportedDisplaysKHR, false);
+    LOOKUP_GIPA(GetDisplayModePropertiesKHR, false);
+    LOOKUP_GIPA(CreateDisplayModeKHR, false);
+    LOOKUP_GIPA(GetDisplayPlaneCapabilitiesKHR, false);
+    LOOKUP_GIPA(CreateDisplayPlaneSurfaceKHR, false);
+
+    // ---- VK_KHR_display_swapchain extension commands
+    LOOKUP_GIPA(CreateSharedSwapchainsKHR, false);
+
+    // ---- VK_KHR_xlib_surface extension commands
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+    LOOKUP_GIPA(CreateXlibSurfaceKHR, false);
+#endif // VK_USE_PLATFORM_XLIB_KHR
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+    LOOKUP_GIPA(GetPhysicalDeviceXlibPresentationSupportKHR, false);
+#endif // VK_USE_PLATFORM_XLIB_KHR
+
+    // ---- VK_KHR_xcb_surface extension commands
+#ifdef VK_USE_PLATFORM_XCB_KHR
+    LOOKUP_GIPA(CreateXcbSurfaceKHR, false);
+#endif // VK_USE_PLATFORM_XCB_KHR
+#ifdef VK_USE_PLATFORM_XCB_KHR
+    LOOKUP_GIPA(GetPhysicalDeviceXcbPresentationSupportKHR, false);
+#endif // VK_USE_PLATFORM_XCB_KHR
+
+    // ---- VK_KHR_wayland_surface extension commands
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+    LOOKUP_GIPA(CreateWaylandSurfaceKHR, false);
+#endif // VK_USE_PLATFORM_WAYLAND_KHR
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+    LOOKUP_GIPA(GetPhysicalDeviceWaylandPresentationSupportKHR, false);
+#endif // VK_USE_PLATFORM_WAYLAND_KHR
+
+    // ---- VK_KHR_android_surface extension commands
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+    LOOKUP_GIPA(CreateAndroidSurfaceKHR, false);
+#endif // VK_USE_PLATFORM_ANDROID_KHR
+
+    // ---- VK_KHR_win32_surface extension commands
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    LOOKUP_GIPA(CreateWin32SurfaceKHR, false);
+#endif // VK_USE_PLATFORM_WIN32_KHR
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    LOOKUP_GIPA(GetPhysicalDeviceWin32PresentationSupportKHR, false);
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+    // ---- VK_KHR_get_physical_device_properties2 extension commands
+    LOOKUP_GIPA(GetPhysicalDeviceFeatures2KHR, false);
+    LOOKUP_GIPA(GetPhysicalDeviceProperties2KHR, false);
+    LOOKUP_GIPA(GetPhysicalDeviceFormatProperties2KHR, false);
+    LOOKUP_GIPA(GetPhysicalDeviceImageFormatProperties2KHR, false);
+    LOOKUP_GIPA(GetPhysicalDeviceQueueFamilyProperties2KHR, false);
+    LOOKUP_GIPA(GetPhysicalDeviceMemoryProperties2KHR, false);
+    LOOKUP_GIPA(GetPhysicalDeviceSparseImageFormatProperties2KHR, false);
+
+    // ---- VK_KHR_device_group_creation extension commands
+    LOOKUP_GIPA(EnumeratePhysicalDeviceGroupsKHR, false);
+
+    // ---- VK_KHR_external_memory_capabilities extension commands
+    LOOKUP_GIPA(GetPhysicalDeviceExternalBufferPropertiesKHR, false);
+
+    // ---- VK_KHR_external_semaphore_capabilities extension commands
+    LOOKUP_GIPA(GetPhysicalDeviceExternalSemaphorePropertiesKHR, false);
+
+    // ---- VK_KHR_external_fence_capabilities extension commands
+    LOOKUP_GIPA(GetPhysicalDeviceExternalFencePropertiesKHR, false);
+
+    // ---- VK_KHR_performance_query extension commands
+    LOOKUP_GIPA(EnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR, false);
+    LOOKUP_GIPA(GetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR, false);
+
+    // ---- VK_KHR_get_surface_capabilities2 extension commands
+    LOOKUP_GIPA(GetPhysicalDeviceSurfaceCapabilities2KHR, false);
+    LOOKUP_GIPA(GetPhysicalDeviceSurfaceFormats2KHR, false);
+
+    // ---- VK_KHR_get_display_properties2 extension commands
+    LOOKUP_GIPA(GetPhysicalDeviceDisplayProperties2KHR, false);
+    LOOKUP_GIPA(GetPhysicalDeviceDisplayPlaneProperties2KHR, false);
+    LOOKUP_GIPA(GetDisplayModeProperties2KHR, false);
+    LOOKUP_GIPA(GetDisplayPlaneCapabilities2KHR, false);
+
+    // ---- VK_EXT_debug_report extension commands
+    LOOKUP_GIPA(CreateDebugReportCallbackEXT, false);
+    LOOKUP_GIPA(DestroyDebugReportCallbackEXT, false);
+    LOOKUP_GIPA(DebugReportMessageEXT, false);
+
+    // ---- VK_EXT_debug_marker extension commands
+    LOOKUP_GIPA(DebugMarkerSetObjectTagEXT, false);
+    LOOKUP_GIPA(DebugMarkerSetObjectNameEXT, false);
+
+    // ---- VK_GGP_stream_descriptor_surface extension commands
+#ifdef VK_USE_PLATFORM_GGP
+    LOOKUP_GIPA(CreateStreamDescriptorSurfaceGGP, false);
+#endif // VK_USE_PLATFORM_GGP
+
+    // ---- VK_NV_external_memory_capabilities extension commands
+    LOOKUP_GIPA(GetPhysicalDeviceExternalImageFormatPropertiesNV, false);
+
+    // ---- VK_NN_vi_surface extension commands
+#ifdef VK_USE_PLATFORM_VI_NN
+    LOOKUP_GIPA(CreateViSurfaceNN, false);
+#endif // VK_USE_PLATFORM_VI_NN
+
+    // ---- VK_NVX_device_generated_commands extension commands
+    LOOKUP_GIPA(GetPhysicalDeviceGeneratedCommandsPropertiesNVX, false);
+
+    // ---- VK_EXT_direct_mode_display extension commands
+    LOOKUP_GIPA(ReleaseDisplayEXT, false);
+
+    // ---- VK_EXT_acquire_xlib_display extension commands
+#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
+    LOOKUP_GIPA(AcquireXlibDisplayEXT, false);
+#endif // VK_USE_PLATFORM_XLIB_XRANDR_EXT
+#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
+    LOOKUP_GIPA(GetRandROutputDisplayEXT, false);
+#endif // VK_USE_PLATFORM_XLIB_XRANDR_EXT
+
+    // ---- VK_EXT_display_surface_counter extension commands
+    LOOKUP_GIPA(GetPhysicalDeviceSurfaceCapabilities2EXT, false);
+
+    // ---- VK_MVK_ios_surface extension commands
+#ifdef VK_USE_PLATFORM_IOS_MVK
+    LOOKUP_GIPA(CreateIOSSurfaceMVK, false);
+#endif // VK_USE_PLATFORM_IOS_MVK
+
+    // ---- VK_MVK_macos_surface extension commands
+#ifdef VK_USE_PLATFORM_MACOS_MVK
+    LOOKUP_GIPA(CreateMacOSSurfaceMVK, false);
+#endif // VK_USE_PLATFORM_MACOS_MVK
+
+    // ---- VK_EXT_debug_utils extension commands
+    LOOKUP_GIPA(SetDebugUtilsObjectNameEXT, false);
+    LOOKUP_GIPA(SetDebugUtilsObjectTagEXT, false);
+    LOOKUP_GIPA(CreateDebugUtilsMessengerEXT, false);
+    LOOKUP_GIPA(DestroyDebugUtilsMessengerEXT, false);
+    LOOKUP_GIPA(SubmitDebugUtilsMessageEXT, false);
+
+    // ---- VK_EXT_sample_locations extension commands
+    LOOKUP_GIPA(GetPhysicalDeviceMultisamplePropertiesEXT, false);
+
+    // ---- VK_EXT_calibrated_timestamps extension commands
+    LOOKUP_GIPA(GetPhysicalDeviceCalibrateableTimeDomainsEXT, false);
+
+    // ---- VK_FUCHSIA_imagepipe_surface extension commands
+#ifdef VK_USE_PLATFORM_FUCHSIA
+    LOOKUP_GIPA(CreateImagePipeSurfaceFUCHSIA, false);
+#endif // VK_USE_PLATFORM_FUCHSIA
+
+    // ---- VK_EXT_metal_surface extension commands
+#ifdef VK_USE_PLATFORM_METAL_EXT
+    LOOKUP_GIPA(CreateMetalSurfaceEXT, false);
+#endif // VK_USE_PLATFORM_METAL_EXT
+
+    // ---- VK_NV_cooperative_matrix extension commands
+    LOOKUP_GIPA(GetPhysicalDeviceCooperativeMatrixPropertiesNV, false);
+
+    // ---- VK_NV_coverage_reduction_mode extension commands
+    LOOKUP_GIPA(GetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV, false);
+
+    // ---- VK_EXT_full_screen_exclusive extension commands
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    LOOKUP_GIPA(GetPhysicalDeviceSurfacePresentModes2EXT, false);
+#endif // VK_USE_PLATFORM_WIN32_KHR
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    LOOKUP_GIPA(GetDeviceGroupSurfacePresentModes2EXT, false);
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+    // ---- VK_EXT_headless_surface extension commands
+    LOOKUP_GIPA(CreateHeadlessSurfaceEXT, false);
+
+#undef LOOKUP_GIPA
+
+    return true;
+};
+
+// Init Device function pointer dispatch table with core commands
+VKAPI_ATTR void VKAPI_CALL loader_init_device_dispatch_table(struct loader_dev_dispatch_table *dev_table, PFN_vkGetDeviceProcAddr gpa,
+                                                             VkDevice dev) {
+    VkLayerDispatchTable *table = &dev_table->core_dispatch;
+    for (uint32_t i = 0; i < MAX_NUM_UNKNOWN_EXTS; i++) dev_table->ext_dispatch.dev_ext[i] = (PFN_vkDevExt)vkDevExtError;
+
+    // ---- Core 1_0 commands
+    table->GetDeviceProcAddr = gpa;
+    table->DestroyDevice = (PFN_vkDestroyDevice)gpa(dev, "vkDestroyDevice");
+    table->GetDeviceQueue = (PFN_vkGetDeviceQueue)gpa(dev, "vkGetDeviceQueue");
+    table->QueueSubmit = (PFN_vkQueueSubmit)gpa(dev, "vkQueueSubmit");
+    table->QueueWaitIdle = (PFN_vkQueueWaitIdle)gpa(dev, "vkQueueWaitIdle");
+    table->DeviceWaitIdle = (PFN_vkDeviceWaitIdle)gpa(dev, "vkDeviceWaitIdle");
+    table->AllocateMemory = (PFN_vkAllocateMemory)gpa(dev, "vkAllocateMemory");
+    table->FreeMemory = (PFN_vkFreeMemory)gpa(dev, "vkFreeMemory");
+    table->MapMemory = (PFN_vkMapMemory)gpa(dev, "vkMapMemory");
+    table->UnmapMemory = (PFN_vkUnmapMemory)gpa(dev, "vkUnmapMemory");
+    table->FlushMappedMemoryRanges = (PFN_vkFlushMappedMemoryRanges)gpa(dev, "vkFlushMappedMemoryRanges");
+    table->InvalidateMappedMemoryRanges = (PFN_vkInvalidateMappedMemoryRanges)gpa(dev, "vkInvalidateMappedMemoryRanges");
+    table->GetDeviceMemoryCommitment = (PFN_vkGetDeviceMemoryCommitment)gpa(dev, "vkGetDeviceMemoryCommitment");
+    table->BindBufferMemory = (PFN_vkBindBufferMemory)gpa(dev, "vkBindBufferMemory");
+    table->BindImageMemory = (PFN_vkBindImageMemory)gpa(dev, "vkBindImageMemory");
+    table->GetBufferMemoryRequirements = (PFN_vkGetBufferMemoryRequirements)gpa(dev, "vkGetBufferMemoryRequirements");
+    table->GetImageMemoryRequirements = (PFN_vkGetImageMemoryRequirements)gpa(dev, "vkGetImageMemoryRequirements");
+    table->GetImageSparseMemoryRequirements = (PFN_vkGetImageSparseMemoryRequirements)gpa(dev, "vkGetImageSparseMemoryRequirements");
+    table->QueueBindSparse = (PFN_vkQueueBindSparse)gpa(dev, "vkQueueBindSparse");
+    table->CreateFence = (PFN_vkCreateFence)gpa(dev, "vkCreateFence");
+    table->DestroyFence = (PFN_vkDestroyFence)gpa(dev, "vkDestroyFence");
+    table->ResetFences = (PFN_vkResetFences)gpa(dev, "vkResetFences");
+    table->GetFenceStatus = (PFN_vkGetFenceStatus)gpa(dev, "vkGetFenceStatus");
+    table->WaitForFences = (PFN_vkWaitForFences)gpa(dev, "vkWaitForFences");
+    table->CreateSemaphore = (PFN_vkCreateSemaphore)gpa(dev, "vkCreateSemaphore");
+    table->DestroySemaphore = (PFN_vkDestroySemaphore)gpa(dev, "vkDestroySemaphore");
+    table->CreateEvent = (PFN_vkCreateEvent)gpa(dev, "vkCreateEvent");
+    table->DestroyEvent = (PFN_vkDestroyEvent)gpa(dev, "vkDestroyEvent");
+    table->GetEventStatus = (PFN_vkGetEventStatus)gpa(dev, "vkGetEventStatus");
+    table->SetEvent = (PFN_vkSetEvent)gpa(dev, "vkSetEvent");
+    table->ResetEvent = (PFN_vkResetEvent)gpa(dev, "vkResetEvent");
+    table->CreateQueryPool = (PFN_vkCreateQueryPool)gpa(dev, "vkCreateQueryPool");
+    table->DestroyQueryPool = (PFN_vkDestroyQueryPool)gpa(dev, "vkDestroyQueryPool");
+    table->GetQueryPoolResults = (PFN_vkGetQueryPoolResults)gpa(dev, "vkGetQueryPoolResults");
+    table->CreateBuffer = (PFN_vkCreateBuffer)gpa(dev, "vkCreateBuffer");
+    table->DestroyBuffer = (PFN_vkDestroyBuffer)gpa(dev, "vkDestroyBuffer");
+    table->CreateBufferView = (PFN_vkCreateBufferView)gpa(dev, "vkCreateBufferView");
+    table->DestroyBufferView = (PFN_vkDestroyBufferView)gpa(dev, "vkDestroyBufferView");
+    table->CreateImage = (PFN_vkCreateImage)gpa(dev, "vkCreateImage");
+    table->DestroyImage = (PFN_vkDestroyImage)gpa(dev, "vkDestroyImage");
+    table->GetImageSubresourceLayout = (PFN_vkGetImageSubresourceLayout)gpa(dev, "vkGetImageSubresourceLayout");
+    table->CreateImageView = (PFN_vkCreateImageView)gpa(dev, "vkCreateImageView");
+    table->DestroyImageView = (PFN_vkDestroyImageView)gpa(dev, "vkDestroyImageView");
+    table->CreateShaderModule = (PFN_vkCreateShaderModule)gpa(dev, "vkCreateShaderModule");
+    table->DestroyShaderModule = (PFN_vkDestroyShaderModule)gpa(dev, "vkDestroyShaderModule");
+    table->CreatePipelineCache = (PFN_vkCreatePipelineCache)gpa(dev, "vkCreatePipelineCache");
+    table->DestroyPipelineCache = (PFN_vkDestroyPipelineCache)gpa(dev, "vkDestroyPipelineCache");
+    table->GetPipelineCacheData = (PFN_vkGetPipelineCacheData)gpa(dev, "vkGetPipelineCacheData");
+    table->MergePipelineCaches = (PFN_vkMergePipelineCaches)gpa(dev, "vkMergePipelineCaches");
+    table->CreateGraphicsPipelines = (PFN_vkCreateGraphicsPipelines)gpa(dev, "vkCreateGraphicsPipelines");
+    table->CreateComputePipelines = (PFN_vkCreateComputePipelines)gpa(dev, "vkCreateComputePipelines");
+    table->DestroyPipeline = (PFN_vkDestroyPipeline)gpa(dev, "vkDestroyPipeline");
+    table->CreatePipelineLayout = (PFN_vkCreatePipelineLayout)gpa(dev, "vkCreatePipelineLayout");
+    table->DestroyPipelineLayout = (PFN_vkDestroyPipelineLayout)gpa(dev, "vkDestroyPipelineLayout");
+    table->CreateSampler = (PFN_vkCreateSampler)gpa(dev, "vkCreateSampler");
+    table->DestroySampler = (PFN_vkDestroySampler)gpa(dev, "vkDestroySampler");
+    table->CreateDescriptorSetLayout = (PFN_vkCreateDescriptorSetLayout)gpa(dev, "vkCreateDescriptorSetLayout");
+    table->DestroyDescriptorSetLayout = (PFN_vkDestroyDescriptorSetLayout)gpa(dev, "vkDestroyDescriptorSetLayout");
+    table->CreateDescriptorPool = (PFN_vkCreateDescriptorPool)gpa(dev, "vkCreateDescriptorPool");
+    table->DestroyDescriptorPool = (PFN_vkDestroyDescriptorPool)gpa(dev, "vkDestroyDescriptorPool");
+    table->ResetDescriptorPool = (PFN_vkResetDescriptorPool)gpa(dev, "vkResetDescriptorPool");
+    table->AllocateDescriptorSets = (PFN_vkAllocateDescriptorSets)gpa(dev, "vkAllocateDescriptorSets");
+    table->FreeDescriptorSets = (PFN_vkFreeDescriptorSets)gpa(dev, "vkFreeDescriptorSets");
+    table->UpdateDescriptorSets = (PFN_vkUpdateDescriptorSets)gpa(dev, "vkUpdateDescriptorSets");
+    table->CreateFramebuffer = (PFN_vkCreateFramebuffer)gpa(dev, "vkCreateFramebuffer");
+    table->DestroyFramebuffer = (PFN_vkDestroyFramebuffer)gpa(dev, "vkDestroyFramebuffer");
+    table->CreateRenderPass = (PFN_vkCreateRenderPass)gpa(dev, "vkCreateRenderPass");
+    table->DestroyRenderPass = (PFN_vkDestroyRenderPass)gpa(dev, "vkDestroyRenderPass");
+    table->GetRenderAreaGranularity = (PFN_vkGetRenderAreaGranularity)gpa(dev, "vkGetRenderAreaGranularity");
+    table->CreateCommandPool = (PFN_vkCreateCommandPool)gpa(dev, "vkCreateCommandPool");
+    table->DestroyCommandPool = (PFN_vkDestroyCommandPool)gpa(dev, "vkDestroyCommandPool");
+    table->ResetCommandPool = (PFN_vkResetCommandPool)gpa(dev, "vkResetCommandPool");
+    table->AllocateCommandBuffers = (PFN_vkAllocateCommandBuffers)gpa(dev, "vkAllocateCommandBuffers");
+    table->FreeCommandBuffers = (PFN_vkFreeCommandBuffers)gpa(dev, "vkFreeCommandBuffers");
+    table->BeginCommandBuffer = (PFN_vkBeginCommandBuffer)gpa(dev, "vkBeginCommandBuffer");
+    table->EndCommandBuffer = (PFN_vkEndCommandBuffer)gpa(dev, "vkEndCommandBuffer");
+    table->ResetCommandBuffer = (PFN_vkResetCommandBuffer)gpa(dev, "vkResetCommandBuffer");
+    table->CmdBindPipeline = (PFN_vkCmdBindPipeline)gpa(dev, "vkCmdBindPipeline");
+    table->CmdSetViewport = (PFN_vkCmdSetViewport)gpa(dev, "vkCmdSetViewport");
+    table->CmdSetScissor = (PFN_vkCmdSetScissor)gpa(dev, "vkCmdSetScissor");
+    table->CmdSetLineWidth = (PFN_vkCmdSetLineWidth)gpa(dev, "vkCmdSetLineWidth");
+    table->CmdSetDepthBias = (PFN_vkCmdSetDepthBias)gpa(dev, "vkCmdSetDepthBias");
+    table->CmdSetBlendConstants = (PFN_vkCmdSetBlendConstants)gpa(dev, "vkCmdSetBlendConstants");
+    table->CmdSetDepthBounds = (PFN_vkCmdSetDepthBounds)gpa(dev, "vkCmdSetDepthBounds");
+    table->CmdSetStencilCompareMask = (PFN_vkCmdSetStencilCompareMask)gpa(dev, "vkCmdSetStencilCompareMask");
+    table->CmdSetStencilWriteMask = (PFN_vkCmdSetStencilWriteMask)gpa(dev, "vkCmdSetStencilWriteMask");
+    table->CmdSetStencilReference = (PFN_vkCmdSetStencilReference)gpa(dev, "vkCmdSetStencilReference");
+    table->CmdBindDescriptorSets = (PFN_vkCmdBindDescriptorSets)gpa(dev, "vkCmdBindDescriptorSets");
+    table->CmdBindIndexBuffer = (PFN_vkCmdBindIndexBuffer)gpa(dev, "vkCmdBindIndexBuffer");
+    table->CmdBindVertexBuffers = (PFN_vkCmdBindVertexBuffers)gpa(dev, "vkCmdBindVertexBuffers");
+    table->CmdDraw = (PFN_vkCmdDraw)gpa(dev, "vkCmdDraw");
+    table->CmdDrawIndexed = (PFN_vkCmdDrawIndexed)gpa(dev, "vkCmdDrawIndexed");
+    table->CmdDrawIndirect = (PFN_vkCmdDrawIndirect)gpa(dev, "vkCmdDrawIndirect");
+    table->CmdDrawIndexedIndirect = (PFN_vkCmdDrawIndexedIndirect)gpa(dev, "vkCmdDrawIndexedIndirect");
+    table->CmdDispatch = (PFN_vkCmdDispatch)gpa(dev, "vkCmdDispatch");
+    table->CmdDispatchIndirect = (PFN_vkCmdDispatchIndirect)gpa(dev, "vkCmdDispatchIndirect");
+    table->CmdCopyBuffer = (PFN_vkCmdCopyBuffer)gpa(dev, "vkCmdCopyBuffer");
+    table->CmdCopyImage = (PFN_vkCmdCopyImage)gpa(dev, "vkCmdCopyImage");
+    table->CmdBlitImage = (PFN_vkCmdBlitImage)gpa(dev, "vkCmdBlitImage");
+    table->CmdCopyBufferToImage = (PFN_vkCmdCopyBufferToImage)gpa(dev, "vkCmdCopyBufferToImage");
+    table->CmdCopyImageToBuffer = (PFN_vkCmdCopyImageToBuffer)gpa(dev, "vkCmdCopyImageToBuffer");
+    table->CmdUpdateBuffer = (PFN_vkCmdUpdateBuffer)gpa(dev, "vkCmdUpdateBuffer");
+    table->CmdFillBuffer = (PFN_vkCmdFillBuffer)gpa(dev, "vkCmdFillBuffer");
+    table->CmdClearColorImage = (PFN_vkCmdClearColorImage)gpa(dev, "vkCmdClearColorImage");
+    table->CmdClearDepthStencilImage = (PFN_vkCmdClearDepthStencilImage)gpa(dev, "vkCmdClearDepthStencilImage");
+    table->CmdClearAttachments = (PFN_vkCmdClearAttachments)gpa(dev, "vkCmdClearAttachments");
+    table->CmdResolveImage = (PFN_vkCmdResolveImage)gpa(dev, "vkCmdResolveImage");
+    table->CmdSetEvent = (PFN_vkCmdSetEvent)gpa(dev, "vkCmdSetEvent");
+    table->CmdResetEvent = (PFN_vkCmdResetEvent)gpa(dev, "vkCmdResetEvent");
+    table->CmdWaitEvents = (PFN_vkCmdWaitEvents)gpa(dev, "vkCmdWaitEvents");
+    table->CmdPipelineBarrier = (PFN_vkCmdPipelineBarrier)gpa(dev, "vkCmdPipelineBarrier");
+    table->CmdBeginQuery = (PFN_vkCmdBeginQuery)gpa(dev, "vkCmdBeginQuery");
+    table->CmdEndQuery = (PFN_vkCmdEndQuery)gpa(dev, "vkCmdEndQuery");
+    table->CmdResetQueryPool = (PFN_vkCmdResetQueryPool)gpa(dev, "vkCmdResetQueryPool");
+    table->CmdWriteTimestamp = (PFN_vkCmdWriteTimestamp)gpa(dev, "vkCmdWriteTimestamp");
+    table->CmdCopyQueryPoolResults = (PFN_vkCmdCopyQueryPoolResults)gpa(dev, "vkCmdCopyQueryPoolResults");
+    table->CmdPushConstants = (PFN_vkCmdPushConstants)gpa(dev, "vkCmdPushConstants");
+    table->CmdBeginRenderPass = (PFN_vkCmdBeginRenderPass)gpa(dev, "vkCmdBeginRenderPass");
+    table->CmdNextSubpass = (PFN_vkCmdNextSubpass)gpa(dev, "vkCmdNextSubpass");
+    table->CmdEndRenderPass = (PFN_vkCmdEndRenderPass)gpa(dev, "vkCmdEndRenderPass");
+    table->CmdExecuteCommands = (PFN_vkCmdExecuteCommands)gpa(dev, "vkCmdExecuteCommands");
+
+    // ---- Core 1_1 commands
+    table->BindBufferMemory2 = (PFN_vkBindBufferMemory2)gpa(dev, "vkBindBufferMemory2");
+    table->BindImageMemory2 = (PFN_vkBindImageMemory2)gpa(dev, "vkBindImageMemory2");
+    table->GetDeviceGroupPeerMemoryFeatures = (PFN_vkGetDeviceGroupPeerMemoryFeatures)gpa(dev, "vkGetDeviceGroupPeerMemoryFeatures");
+    table->CmdSetDeviceMask = (PFN_vkCmdSetDeviceMask)gpa(dev, "vkCmdSetDeviceMask");
+    table->CmdDispatchBase = (PFN_vkCmdDispatchBase)gpa(dev, "vkCmdDispatchBase");
+    table->GetImageMemoryRequirements2 = (PFN_vkGetImageMemoryRequirements2)gpa(dev, "vkGetImageMemoryRequirements2");
+    table->GetBufferMemoryRequirements2 = (PFN_vkGetBufferMemoryRequirements2)gpa(dev, "vkGetBufferMemoryRequirements2");
+    table->GetImageSparseMemoryRequirements2 = (PFN_vkGetImageSparseMemoryRequirements2)gpa(dev, "vkGetImageSparseMemoryRequirements2");
+    table->TrimCommandPool = (PFN_vkTrimCommandPool)gpa(dev, "vkTrimCommandPool");
+    table->GetDeviceQueue2 = (PFN_vkGetDeviceQueue2)gpa(dev, "vkGetDeviceQueue2");
+    table->CreateSamplerYcbcrConversion = (PFN_vkCreateSamplerYcbcrConversion)gpa(dev, "vkCreateSamplerYcbcrConversion");
+    table->DestroySamplerYcbcrConversion = (PFN_vkDestroySamplerYcbcrConversion)gpa(dev, "vkDestroySamplerYcbcrConversion");
+    table->CreateDescriptorUpdateTemplate = (PFN_vkCreateDescriptorUpdateTemplate)gpa(dev, "vkCreateDescriptorUpdateTemplate");
+    table->DestroyDescriptorUpdateTemplate = (PFN_vkDestroyDescriptorUpdateTemplate)gpa(dev, "vkDestroyDescriptorUpdateTemplate");
+    table->UpdateDescriptorSetWithTemplate = (PFN_vkUpdateDescriptorSetWithTemplate)gpa(dev, "vkUpdateDescriptorSetWithTemplate");
+    table->GetDescriptorSetLayoutSupport = (PFN_vkGetDescriptorSetLayoutSupport)gpa(dev, "vkGetDescriptorSetLayoutSupport");
+}
+
+// Init Device function pointer dispatch table with extension commands
+VKAPI_ATTR void VKAPI_CALL loader_init_device_extension_dispatch_table(struct loader_dev_dispatch_table *dev_table,
+                                                                       PFN_vkGetInstanceProcAddr gipa,
+                                                                       PFN_vkGetDeviceProcAddr gdpa,
+                                                                       VkInstance inst,
+                                                                       VkDevice dev) {
+    VkLayerDispatchTable *table = &dev_table->core_dispatch;
+
+    // ---- VK_KHR_swapchain extension commands
+    table->CreateSwapchainKHR = (PFN_vkCreateSwapchainKHR)gdpa(dev, "vkCreateSwapchainKHR");
+    table->DestroySwapchainKHR = (PFN_vkDestroySwapchainKHR)gdpa(dev, "vkDestroySwapchainKHR");
+    table->GetSwapchainImagesKHR = (PFN_vkGetSwapchainImagesKHR)gdpa(dev, "vkGetSwapchainImagesKHR");
+    table->AcquireNextImageKHR = (PFN_vkAcquireNextImageKHR)gdpa(dev, "vkAcquireNextImageKHR");
+    table->QueuePresentKHR = (PFN_vkQueuePresentKHR)gdpa(dev, "vkQueuePresentKHR");
+    table->GetDeviceGroupPresentCapabilitiesKHR = (PFN_vkGetDeviceGroupPresentCapabilitiesKHR)gdpa(dev, "vkGetDeviceGroupPresentCapabilitiesKHR");
+    table->GetDeviceGroupSurfacePresentModesKHR = (PFN_vkGetDeviceGroupSurfacePresentModesKHR)gdpa(dev, "vkGetDeviceGroupSurfacePresentModesKHR");
+    table->AcquireNextImage2KHR = (PFN_vkAcquireNextImage2KHR)gdpa(dev, "vkAcquireNextImage2KHR");
+
+    // ---- VK_KHR_display_swapchain extension commands
+    table->CreateSharedSwapchainsKHR = (PFN_vkCreateSharedSwapchainsKHR)gdpa(dev, "vkCreateSharedSwapchainsKHR");
+
+    // ---- VK_KHR_device_group extension commands
+    table->GetDeviceGroupPeerMemoryFeaturesKHR = (PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR)gdpa(dev, "vkGetDeviceGroupPeerMemoryFeaturesKHR");
+    table->CmdSetDeviceMaskKHR = (PFN_vkCmdSetDeviceMaskKHR)gdpa(dev, "vkCmdSetDeviceMaskKHR");
+    table->CmdDispatchBaseKHR = (PFN_vkCmdDispatchBaseKHR)gdpa(dev, "vkCmdDispatchBaseKHR");
+
+    // ---- VK_KHR_maintenance1 extension commands
+    table->TrimCommandPoolKHR = (PFN_vkTrimCommandPoolKHR)gdpa(dev, "vkTrimCommandPoolKHR");
+
+    // ---- VK_KHR_external_memory_win32 extension commands
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    table->GetMemoryWin32HandleKHR = (PFN_vkGetMemoryWin32HandleKHR)gdpa(dev, "vkGetMemoryWin32HandleKHR");
+#endif // VK_USE_PLATFORM_WIN32_KHR
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    table->GetMemoryWin32HandlePropertiesKHR = (PFN_vkGetMemoryWin32HandlePropertiesKHR)gdpa(dev, "vkGetMemoryWin32HandlePropertiesKHR");
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+    // ---- VK_KHR_external_memory_fd extension commands
+    table->GetMemoryFdKHR = (PFN_vkGetMemoryFdKHR)gdpa(dev, "vkGetMemoryFdKHR");
+    table->GetMemoryFdPropertiesKHR = (PFN_vkGetMemoryFdPropertiesKHR)gdpa(dev, "vkGetMemoryFdPropertiesKHR");
+
+    // ---- VK_KHR_external_semaphore_win32 extension commands
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    table->ImportSemaphoreWin32HandleKHR = (PFN_vkImportSemaphoreWin32HandleKHR)gdpa(dev, "vkImportSemaphoreWin32HandleKHR");
+#endif // VK_USE_PLATFORM_WIN32_KHR
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    table->GetSemaphoreWin32HandleKHR = (PFN_vkGetSemaphoreWin32HandleKHR)gdpa(dev, "vkGetSemaphoreWin32HandleKHR");
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+    // ---- VK_KHR_external_semaphore_fd extension commands
+    table->ImportSemaphoreFdKHR = (PFN_vkImportSemaphoreFdKHR)gdpa(dev, "vkImportSemaphoreFdKHR");
+    table->GetSemaphoreFdKHR = (PFN_vkGetSemaphoreFdKHR)gdpa(dev, "vkGetSemaphoreFdKHR");
+
+    // ---- VK_KHR_push_descriptor extension commands
+    table->CmdPushDescriptorSetKHR = (PFN_vkCmdPushDescriptorSetKHR)gdpa(dev, "vkCmdPushDescriptorSetKHR");
+    table->CmdPushDescriptorSetWithTemplateKHR = (PFN_vkCmdPushDescriptorSetWithTemplateKHR)gdpa(dev, "vkCmdPushDescriptorSetWithTemplateKHR");
+
+    // ---- VK_KHR_descriptor_update_template extension commands
+    table->CreateDescriptorUpdateTemplateKHR = (PFN_vkCreateDescriptorUpdateTemplateKHR)gdpa(dev, "vkCreateDescriptorUpdateTemplateKHR");
+    table->DestroyDescriptorUpdateTemplateKHR = (PFN_vkDestroyDescriptorUpdateTemplateKHR)gdpa(dev, "vkDestroyDescriptorUpdateTemplateKHR");
+    table->UpdateDescriptorSetWithTemplateKHR = (PFN_vkUpdateDescriptorSetWithTemplateKHR)gdpa(dev, "vkUpdateDescriptorSetWithTemplateKHR");
+
+    // ---- VK_KHR_create_renderpass2 extension commands
+    table->CreateRenderPass2KHR = (PFN_vkCreateRenderPass2KHR)gdpa(dev, "vkCreateRenderPass2KHR");
+    table->CmdBeginRenderPass2KHR = (PFN_vkCmdBeginRenderPass2KHR)gdpa(dev, "vkCmdBeginRenderPass2KHR");
+    table->CmdNextSubpass2KHR = (PFN_vkCmdNextSubpass2KHR)gdpa(dev, "vkCmdNextSubpass2KHR");
+    table->CmdEndRenderPass2KHR = (PFN_vkCmdEndRenderPass2KHR)gdpa(dev, "vkCmdEndRenderPass2KHR");
+
+    // ---- VK_KHR_shared_presentable_image extension commands
+    table->GetSwapchainStatusKHR = (PFN_vkGetSwapchainStatusKHR)gdpa(dev, "vkGetSwapchainStatusKHR");
+
+    // ---- VK_KHR_external_fence_win32 extension commands
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    table->ImportFenceWin32HandleKHR = (PFN_vkImportFenceWin32HandleKHR)gdpa(dev, "vkImportFenceWin32HandleKHR");
+#endif // VK_USE_PLATFORM_WIN32_KHR
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    table->GetFenceWin32HandleKHR = (PFN_vkGetFenceWin32HandleKHR)gdpa(dev, "vkGetFenceWin32HandleKHR");
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+    // ---- VK_KHR_external_fence_fd extension commands
+    table->ImportFenceFdKHR = (PFN_vkImportFenceFdKHR)gdpa(dev, "vkImportFenceFdKHR");
+    table->GetFenceFdKHR = (PFN_vkGetFenceFdKHR)gdpa(dev, "vkGetFenceFdKHR");
+
+    // ---- VK_KHR_performance_query extension commands
+    table->AcquireProfilingLockKHR = (PFN_vkAcquireProfilingLockKHR)gdpa(dev, "vkAcquireProfilingLockKHR");
+    table->ReleaseProfilingLockKHR = (PFN_vkReleaseProfilingLockKHR)gdpa(dev, "vkReleaseProfilingLockKHR");
+
+    // ---- VK_KHR_get_memory_requirements2 extension commands
+    table->GetImageMemoryRequirements2KHR = (PFN_vkGetImageMemoryRequirements2KHR)gdpa(dev, "vkGetImageMemoryRequirements2KHR");
+    table->GetBufferMemoryRequirements2KHR = (PFN_vkGetBufferMemoryRequirements2KHR)gdpa(dev, "vkGetBufferMemoryRequirements2KHR");
+    table->GetImageSparseMemoryRequirements2KHR = (PFN_vkGetImageSparseMemoryRequirements2KHR)gdpa(dev, "vkGetImageSparseMemoryRequirements2KHR");
+
+    // ---- VK_KHR_sampler_ycbcr_conversion extension commands
+    table->CreateSamplerYcbcrConversionKHR = (PFN_vkCreateSamplerYcbcrConversionKHR)gdpa(dev, "vkCreateSamplerYcbcrConversionKHR");
+    table->DestroySamplerYcbcrConversionKHR = (PFN_vkDestroySamplerYcbcrConversionKHR)gdpa(dev, "vkDestroySamplerYcbcrConversionKHR");
+
+    // ---- VK_KHR_bind_memory2 extension commands
+    table->BindBufferMemory2KHR = (PFN_vkBindBufferMemory2KHR)gdpa(dev, "vkBindBufferMemory2KHR");
+    table->BindImageMemory2KHR = (PFN_vkBindImageMemory2KHR)gdpa(dev, "vkBindImageMemory2KHR");
+
+    // ---- VK_KHR_maintenance3 extension commands
+    table->GetDescriptorSetLayoutSupportKHR = (PFN_vkGetDescriptorSetLayoutSupportKHR)gdpa(dev, "vkGetDescriptorSetLayoutSupportKHR");
+
+    // ---- VK_KHR_draw_indirect_count extension commands
+    table->CmdDrawIndirectCountKHR = (PFN_vkCmdDrawIndirectCountKHR)gdpa(dev, "vkCmdDrawIndirectCountKHR");
+    table->CmdDrawIndexedIndirectCountKHR = (PFN_vkCmdDrawIndexedIndirectCountKHR)gdpa(dev, "vkCmdDrawIndexedIndirectCountKHR");
+
+    // ---- VK_KHR_timeline_semaphore extension commands
+    table->GetSemaphoreCounterValueKHR = (PFN_vkGetSemaphoreCounterValueKHR)gdpa(dev, "vkGetSemaphoreCounterValueKHR");
+    table->WaitSemaphoresKHR = (PFN_vkWaitSemaphoresKHR)gdpa(dev, "vkWaitSemaphoresKHR");
+    table->SignalSemaphoreKHR = (PFN_vkSignalSemaphoreKHR)gdpa(dev, "vkSignalSemaphoreKHR");
+
+    // ---- VK_KHR_pipeline_executable_properties extension commands
+    table->GetPipelineExecutablePropertiesKHR = (PFN_vkGetPipelineExecutablePropertiesKHR)gdpa(dev, "vkGetPipelineExecutablePropertiesKHR");
+    table->GetPipelineExecutableStatisticsKHR = (PFN_vkGetPipelineExecutableStatisticsKHR)gdpa(dev, "vkGetPipelineExecutableStatisticsKHR");
+    table->GetPipelineExecutableInternalRepresentationsKHR = (PFN_vkGetPipelineExecutableInternalRepresentationsKHR)gdpa(dev, "vkGetPipelineExecutableInternalRepresentationsKHR");
+
+    // ---- VK_EXT_debug_marker extension commands
+    table->DebugMarkerSetObjectTagEXT = (PFN_vkDebugMarkerSetObjectTagEXT)gdpa(dev, "vkDebugMarkerSetObjectTagEXT");
+    table->DebugMarkerSetObjectNameEXT = (PFN_vkDebugMarkerSetObjectNameEXT)gdpa(dev, "vkDebugMarkerSetObjectNameEXT");
+    table->CmdDebugMarkerBeginEXT = (PFN_vkCmdDebugMarkerBeginEXT)gdpa(dev, "vkCmdDebugMarkerBeginEXT");
+    table->CmdDebugMarkerEndEXT = (PFN_vkCmdDebugMarkerEndEXT)gdpa(dev, "vkCmdDebugMarkerEndEXT");
+    table->CmdDebugMarkerInsertEXT = (PFN_vkCmdDebugMarkerInsertEXT)gdpa(dev, "vkCmdDebugMarkerInsertEXT");
+
+    // ---- VK_EXT_transform_feedback extension commands
+    table->CmdBindTransformFeedbackBuffersEXT = (PFN_vkCmdBindTransformFeedbackBuffersEXT)gdpa(dev, "vkCmdBindTransformFeedbackBuffersEXT");
+    table->CmdBeginTransformFeedbackEXT = (PFN_vkCmdBeginTransformFeedbackEXT)gdpa(dev, "vkCmdBeginTransformFeedbackEXT");
+    table->CmdEndTransformFeedbackEXT = (PFN_vkCmdEndTransformFeedbackEXT)gdpa(dev, "vkCmdEndTransformFeedbackEXT");
+    table->CmdBeginQueryIndexedEXT = (PFN_vkCmdBeginQueryIndexedEXT)gdpa(dev, "vkCmdBeginQueryIndexedEXT");
+    table->CmdEndQueryIndexedEXT = (PFN_vkCmdEndQueryIndexedEXT)gdpa(dev, "vkCmdEndQueryIndexedEXT");
+    table->CmdDrawIndirectByteCountEXT = (PFN_vkCmdDrawIndirectByteCountEXT)gdpa(dev, "vkCmdDrawIndirectByteCountEXT");
+
+    // ---- VK_NVX_image_view_handle extension commands
+    table->GetImageViewHandleNVX = (PFN_vkGetImageViewHandleNVX)gdpa(dev, "vkGetImageViewHandleNVX");
+
+    // ---- VK_AMD_draw_indirect_count extension commands
+    table->CmdDrawIndirectCountAMD = (PFN_vkCmdDrawIndirectCountAMD)gdpa(dev, "vkCmdDrawIndirectCountAMD");
+    table->CmdDrawIndexedIndirectCountAMD = (PFN_vkCmdDrawIndexedIndirectCountAMD)gdpa(dev, "vkCmdDrawIndexedIndirectCountAMD");
+
+    // ---- VK_AMD_shader_info extension commands
+    table->GetShaderInfoAMD = (PFN_vkGetShaderInfoAMD)gdpa(dev, "vkGetShaderInfoAMD");
+
+    // ---- VK_NV_external_memory_win32 extension commands
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    table->GetMemoryWin32HandleNV = (PFN_vkGetMemoryWin32HandleNV)gdpa(dev, "vkGetMemoryWin32HandleNV");
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+    // ---- VK_EXT_conditional_rendering extension commands
+    table->CmdBeginConditionalRenderingEXT = (PFN_vkCmdBeginConditionalRenderingEXT)gdpa(dev, "vkCmdBeginConditionalRenderingEXT");
+    table->CmdEndConditionalRenderingEXT = (PFN_vkCmdEndConditionalRenderingEXT)gdpa(dev, "vkCmdEndConditionalRenderingEXT");
+
+    // ---- VK_NVX_device_generated_commands extension commands
+    table->CmdProcessCommandsNVX = (PFN_vkCmdProcessCommandsNVX)gdpa(dev, "vkCmdProcessCommandsNVX");
+    table->CmdReserveSpaceForCommandsNVX = (PFN_vkCmdReserveSpaceForCommandsNVX)gdpa(dev, "vkCmdReserveSpaceForCommandsNVX");
+    table->CreateIndirectCommandsLayoutNVX = (PFN_vkCreateIndirectCommandsLayoutNVX)gdpa(dev, "vkCreateIndirectCommandsLayoutNVX");
+    table->DestroyIndirectCommandsLayoutNVX = (PFN_vkDestroyIndirectCommandsLayoutNVX)gdpa(dev, "vkDestroyIndirectCommandsLayoutNVX");
+    table->CreateObjectTableNVX = (PFN_vkCreateObjectTableNVX)gdpa(dev, "vkCreateObjectTableNVX");
+    table->DestroyObjectTableNVX = (PFN_vkDestroyObjectTableNVX)gdpa(dev, "vkDestroyObjectTableNVX");
+    table->RegisterObjectsNVX = (PFN_vkRegisterObjectsNVX)gdpa(dev, "vkRegisterObjectsNVX");
+    table->UnregisterObjectsNVX = (PFN_vkUnregisterObjectsNVX)gdpa(dev, "vkUnregisterObjectsNVX");
+
+    // ---- VK_NV_clip_space_w_scaling extension commands
+    table->CmdSetViewportWScalingNV = (PFN_vkCmdSetViewportWScalingNV)gdpa(dev, "vkCmdSetViewportWScalingNV");
+
+    // ---- VK_EXT_display_control extension commands
+    table->DisplayPowerControlEXT = (PFN_vkDisplayPowerControlEXT)gdpa(dev, "vkDisplayPowerControlEXT");
+    table->RegisterDeviceEventEXT = (PFN_vkRegisterDeviceEventEXT)gdpa(dev, "vkRegisterDeviceEventEXT");
+    table->RegisterDisplayEventEXT = (PFN_vkRegisterDisplayEventEXT)gdpa(dev, "vkRegisterDisplayEventEXT");
+    table->GetSwapchainCounterEXT = (PFN_vkGetSwapchainCounterEXT)gdpa(dev, "vkGetSwapchainCounterEXT");
+
+    // ---- VK_GOOGLE_display_timing extension commands
+    table->GetRefreshCycleDurationGOOGLE = (PFN_vkGetRefreshCycleDurationGOOGLE)gdpa(dev, "vkGetRefreshCycleDurationGOOGLE");
+    table->GetPastPresentationTimingGOOGLE = (PFN_vkGetPastPresentationTimingGOOGLE)gdpa(dev, "vkGetPastPresentationTimingGOOGLE");
+
+    // ---- VK_EXT_discard_rectangles extension commands
+    table->CmdSetDiscardRectangleEXT = (PFN_vkCmdSetDiscardRectangleEXT)gdpa(dev, "vkCmdSetDiscardRectangleEXT");
+
+    // ---- VK_EXT_hdr_metadata extension commands
+    table->SetHdrMetadataEXT = (PFN_vkSetHdrMetadataEXT)gdpa(dev, "vkSetHdrMetadataEXT");
+
+    // ---- VK_EXT_debug_utils extension commands
+    table->SetDebugUtilsObjectNameEXT = (PFN_vkSetDebugUtilsObjectNameEXT)gipa(inst, "vkSetDebugUtilsObjectNameEXT");
+    table->SetDebugUtilsObjectTagEXT = (PFN_vkSetDebugUtilsObjectTagEXT)gipa(inst, "vkSetDebugUtilsObjectTagEXT");
+    table->QueueBeginDebugUtilsLabelEXT = (PFN_vkQueueBeginDebugUtilsLabelEXT)gipa(inst, "vkQueueBeginDebugUtilsLabelEXT");
+    table->QueueEndDebugUtilsLabelEXT = (PFN_vkQueueEndDebugUtilsLabelEXT)gipa(inst, "vkQueueEndDebugUtilsLabelEXT");
+    table->QueueInsertDebugUtilsLabelEXT = (PFN_vkQueueInsertDebugUtilsLabelEXT)gipa(inst, "vkQueueInsertDebugUtilsLabelEXT");
+    table->CmdBeginDebugUtilsLabelEXT = (PFN_vkCmdBeginDebugUtilsLabelEXT)gipa(inst, "vkCmdBeginDebugUtilsLabelEXT");
+    table->CmdEndDebugUtilsLabelEXT = (PFN_vkCmdEndDebugUtilsLabelEXT)gipa(inst, "vkCmdEndDebugUtilsLabelEXT");
+    table->CmdInsertDebugUtilsLabelEXT = (PFN_vkCmdInsertDebugUtilsLabelEXT)gipa(inst, "vkCmdInsertDebugUtilsLabelEXT");
+
+    // ---- VK_ANDROID_external_memory_android_hardware_buffer extension commands
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+    table->GetAndroidHardwareBufferPropertiesANDROID = (PFN_vkGetAndroidHardwareBufferPropertiesANDROID)gdpa(dev, "vkGetAndroidHardwareBufferPropertiesANDROID");
+#endif // VK_USE_PLATFORM_ANDROID_KHR
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+    table->GetMemoryAndroidHardwareBufferANDROID = (PFN_vkGetMemoryAndroidHardwareBufferANDROID)gdpa(dev, "vkGetMemoryAndroidHardwareBufferANDROID");
+#endif // VK_USE_PLATFORM_ANDROID_KHR
+
+    // ---- VK_EXT_sample_locations extension commands
+    table->CmdSetSampleLocationsEXT = (PFN_vkCmdSetSampleLocationsEXT)gdpa(dev, "vkCmdSetSampleLocationsEXT");
+
+    // ---- VK_EXT_image_drm_format_modifier extension commands
+    table->GetImageDrmFormatModifierPropertiesEXT = (PFN_vkGetImageDrmFormatModifierPropertiesEXT)gdpa(dev, "vkGetImageDrmFormatModifierPropertiesEXT");
+
+    // ---- VK_EXT_validation_cache extension commands
+    table->CreateValidationCacheEXT = (PFN_vkCreateValidationCacheEXT)gdpa(dev, "vkCreateValidationCacheEXT");
+    table->DestroyValidationCacheEXT = (PFN_vkDestroyValidationCacheEXT)gdpa(dev, "vkDestroyValidationCacheEXT");
+    table->MergeValidationCachesEXT = (PFN_vkMergeValidationCachesEXT)gdpa(dev, "vkMergeValidationCachesEXT");
+    table->GetValidationCacheDataEXT = (PFN_vkGetValidationCacheDataEXT)gdpa(dev, "vkGetValidationCacheDataEXT");
+
+    // ---- VK_NV_shading_rate_image extension commands
+    table->CmdBindShadingRateImageNV = (PFN_vkCmdBindShadingRateImageNV)gdpa(dev, "vkCmdBindShadingRateImageNV");
+    table->CmdSetViewportShadingRatePaletteNV = (PFN_vkCmdSetViewportShadingRatePaletteNV)gdpa(dev, "vkCmdSetViewportShadingRatePaletteNV");
+    table->CmdSetCoarseSampleOrderNV = (PFN_vkCmdSetCoarseSampleOrderNV)gdpa(dev, "vkCmdSetCoarseSampleOrderNV");
+
+    // ---- VK_NV_ray_tracing extension commands
+    table->CreateAccelerationStructureNV = (PFN_vkCreateAccelerationStructureNV)gdpa(dev, "vkCreateAccelerationStructureNV");
+    table->DestroyAccelerationStructureNV = (PFN_vkDestroyAccelerationStructureNV)gdpa(dev, "vkDestroyAccelerationStructureNV");
+    table->GetAccelerationStructureMemoryRequirementsNV = (PFN_vkGetAccelerationStructureMemoryRequirementsNV)gdpa(dev, "vkGetAccelerationStructureMemoryRequirementsNV");
+    table->BindAccelerationStructureMemoryNV = (PFN_vkBindAccelerationStructureMemoryNV)gdpa(dev, "vkBindAccelerationStructureMemoryNV");
+    table->CmdBuildAccelerationStructureNV = (PFN_vkCmdBuildAccelerationStructureNV)gdpa(dev, "vkCmdBuildAccelerationStructureNV");
+    table->CmdCopyAccelerationStructureNV = (PFN_vkCmdCopyAccelerationStructureNV)gdpa(dev, "vkCmdCopyAccelerationStructureNV");
+    table->CmdTraceRaysNV = (PFN_vkCmdTraceRaysNV)gdpa(dev, "vkCmdTraceRaysNV");
+    table->CreateRayTracingPipelinesNV = (PFN_vkCreateRayTracingPipelinesNV)gdpa(dev, "vkCreateRayTracingPipelinesNV");
+    table->GetRayTracingShaderGroupHandlesNV = (PFN_vkGetRayTracingShaderGroupHandlesNV)gdpa(dev, "vkGetRayTracingShaderGroupHandlesNV");
+    table->GetAccelerationStructureHandleNV = (PFN_vkGetAccelerationStructureHandleNV)gdpa(dev, "vkGetAccelerationStructureHandleNV");
+    table->CmdWriteAccelerationStructuresPropertiesNV = (PFN_vkCmdWriteAccelerationStructuresPropertiesNV)gdpa(dev, "vkCmdWriteAccelerationStructuresPropertiesNV");
+    table->CompileDeferredNV = (PFN_vkCompileDeferredNV)gdpa(dev, "vkCompileDeferredNV");
+
+    // ---- VK_EXT_external_memory_host extension commands
+    table->GetMemoryHostPointerPropertiesEXT = (PFN_vkGetMemoryHostPointerPropertiesEXT)gdpa(dev, "vkGetMemoryHostPointerPropertiesEXT");
+
+    // ---- VK_AMD_buffer_marker extension commands
+    table->CmdWriteBufferMarkerAMD = (PFN_vkCmdWriteBufferMarkerAMD)gdpa(dev, "vkCmdWriteBufferMarkerAMD");
+
+    // ---- VK_EXT_calibrated_timestamps extension commands
+    table->GetCalibratedTimestampsEXT = (PFN_vkGetCalibratedTimestampsEXT)gdpa(dev, "vkGetCalibratedTimestampsEXT");
+
+    // ---- VK_NV_mesh_shader extension commands
+    table->CmdDrawMeshTasksNV = (PFN_vkCmdDrawMeshTasksNV)gdpa(dev, "vkCmdDrawMeshTasksNV");
+    table->CmdDrawMeshTasksIndirectNV = (PFN_vkCmdDrawMeshTasksIndirectNV)gdpa(dev, "vkCmdDrawMeshTasksIndirectNV");
+    table->CmdDrawMeshTasksIndirectCountNV = (PFN_vkCmdDrawMeshTasksIndirectCountNV)gdpa(dev, "vkCmdDrawMeshTasksIndirectCountNV");
+
+    // ---- VK_NV_scissor_exclusive extension commands
+    table->CmdSetExclusiveScissorNV = (PFN_vkCmdSetExclusiveScissorNV)gdpa(dev, "vkCmdSetExclusiveScissorNV");
+
+    // ---- VK_NV_device_diagnostic_checkpoints extension commands
+    table->CmdSetCheckpointNV = (PFN_vkCmdSetCheckpointNV)gdpa(dev, "vkCmdSetCheckpointNV");
+    table->GetQueueCheckpointDataNV = (PFN_vkGetQueueCheckpointDataNV)gdpa(dev, "vkGetQueueCheckpointDataNV");
+
+    // ---- VK_INTEL_performance_query extension commands
+    table->InitializePerformanceApiINTEL = (PFN_vkInitializePerformanceApiINTEL)gdpa(dev, "vkInitializePerformanceApiINTEL");
+    table->UninitializePerformanceApiINTEL = (PFN_vkUninitializePerformanceApiINTEL)gdpa(dev, "vkUninitializePerformanceApiINTEL");
+    table->CmdSetPerformanceMarkerINTEL = (PFN_vkCmdSetPerformanceMarkerINTEL)gdpa(dev, "vkCmdSetPerformanceMarkerINTEL");
+    table->CmdSetPerformanceStreamMarkerINTEL = (PFN_vkCmdSetPerformanceStreamMarkerINTEL)gdpa(dev, "vkCmdSetPerformanceStreamMarkerINTEL");
+    table->CmdSetPerformanceOverrideINTEL = (PFN_vkCmdSetPerformanceOverrideINTEL)gdpa(dev, "vkCmdSetPerformanceOverrideINTEL");
+    table->AcquirePerformanceConfigurationINTEL = (PFN_vkAcquirePerformanceConfigurationINTEL)gdpa(dev, "vkAcquirePerformanceConfigurationINTEL");
+    table->ReleasePerformanceConfigurationINTEL = (PFN_vkReleasePerformanceConfigurationINTEL)gdpa(dev, "vkReleasePerformanceConfigurationINTEL");
+    table->QueueSetPerformanceConfigurationINTEL = (PFN_vkQueueSetPerformanceConfigurationINTEL)gdpa(dev, "vkQueueSetPerformanceConfigurationINTEL");
+    table->GetPerformanceParameterINTEL = (PFN_vkGetPerformanceParameterINTEL)gdpa(dev, "vkGetPerformanceParameterINTEL");
+
+    // ---- VK_AMD_display_native_hdr extension commands
+    table->SetLocalDimmingAMD = (PFN_vkSetLocalDimmingAMD)gdpa(dev, "vkSetLocalDimmingAMD");
+
+    // ---- VK_EXT_buffer_device_address extension commands
+    table->GetBufferDeviceAddressEXT = (PFN_vkGetBufferDeviceAddressEXT)gdpa(dev, "vkGetBufferDeviceAddressEXT");
+
+    // ---- VK_EXT_full_screen_exclusive extension commands
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    table->AcquireFullScreenExclusiveModeEXT = (PFN_vkAcquireFullScreenExclusiveModeEXT)gdpa(dev, "vkAcquireFullScreenExclusiveModeEXT");
+#endif // VK_USE_PLATFORM_WIN32_KHR
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    table->ReleaseFullScreenExclusiveModeEXT = (PFN_vkReleaseFullScreenExclusiveModeEXT)gdpa(dev, "vkReleaseFullScreenExclusiveModeEXT");
+#endif // VK_USE_PLATFORM_WIN32_KHR
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    table->GetDeviceGroupSurfacePresentModes2EXT = (PFN_vkGetDeviceGroupSurfacePresentModes2EXT)gdpa(dev, "vkGetDeviceGroupSurfacePresentModes2EXT");
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+    // ---- VK_EXT_line_rasterization extension commands
+    table->CmdSetLineStippleEXT = (PFN_vkCmdSetLineStippleEXT)gdpa(dev, "vkCmdSetLineStippleEXT");
+
+    // ---- VK_EXT_host_query_reset extension commands
+    table->ResetQueryPoolEXT = (PFN_vkResetQueryPoolEXT)gdpa(dev, "vkResetQueryPoolEXT");
+}
+
+// Init Instance function pointer dispatch table with core commands
+VKAPI_ATTR void VKAPI_CALL loader_init_instance_core_dispatch_table(VkLayerInstanceDispatchTable *table, PFN_vkGetInstanceProcAddr gpa,
+                                                                    VkInstance inst) {
+
+    // ---- Core 1_0 commands
+    table->DestroyInstance = (PFN_vkDestroyInstance)gpa(inst, "vkDestroyInstance");
+    table->EnumeratePhysicalDevices = (PFN_vkEnumeratePhysicalDevices)gpa(inst, "vkEnumeratePhysicalDevices");
+    table->GetPhysicalDeviceFeatures = (PFN_vkGetPhysicalDeviceFeatures)gpa(inst, "vkGetPhysicalDeviceFeatures");
+    table->GetPhysicalDeviceFormatProperties = (PFN_vkGetPhysicalDeviceFormatProperties)gpa(inst, "vkGetPhysicalDeviceFormatProperties");
+    table->GetPhysicalDeviceImageFormatProperties = (PFN_vkGetPhysicalDeviceImageFormatProperties)gpa(inst, "vkGetPhysicalDeviceImageFormatProperties");
+    table->GetPhysicalDeviceProperties = (PFN_vkGetPhysicalDeviceProperties)gpa(inst, "vkGetPhysicalDeviceProperties");
+    table->GetPhysicalDeviceQueueFamilyProperties = (PFN_vkGetPhysicalDeviceQueueFamilyProperties)gpa(inst, "vkGetPhysicalDeviceQueueFamilyProperties");
+    table->GetPhysicalDeviceMemoryProperties = (PFN_vkGetPhysicalDeviceMemoryProperties)gpa(inst, "vkGetPhysicalDeviceMemoryProperties");
+    table->GetInstanceProcAddr = gpa;
+    table->EnumerateDeviceExtensionProperties = (PFN_vkEnumerateDeviceExtensionProperties)gpa(inst, "vkEnumerateDeviceExtensionProperties");
+    table->EnumerateDeviceLayerProperties = (PFN_vkEnumerateDeviceLayerProperties)gpa(inst, "vkEnumerateDeviceLayerProperties");
+    table->GetPhysicalDeviceSparseImageFormatProperties = (PFN_vkGetPhysicalDeviceSparseImageFormatProperties)gpa(inst, "vkGetPhysicalDeviceSparseImageFormatProperties");
+
+    // ---- Core 1_1 commands
+    table->EnumeratePhysicalDeviceGroups = (PFN_vkEnumeratePhysicalDeviceGroups)gpa(inst, "vkEnumeratePhysicalDeviceGroups");
+    table->GetPhysicalDeviceFeatures2 = (PFN_vkGetPhysicalDeviceFeatures2)gpa(inst, "vkGetPhysicalDeviceFeatures2");
+    table->GetPhysicalDeviceProperties2 = (PFN_vkGetPhysicalDeviceProperties2)gpa(inst, "vkGetPhysicalDeviceProperties2");
+    table->GetPhysicalDeviceFormatProperties2 = (PFN_vkGetPhysicalDeviceFormatProperties2)gpa(inst, "vkGetPhysicalDeviceFormatProperties2");
+    table->GetPhysicalDeviceImageFormatProperties2 = (PFN_vkGetPhysicalDeviceImageFormatProperties2)gpa(inst, "vkGetPhysicalDeviceImageFormatProperties2");
+    table->GetPhysicalDeviceQueueFamilyProperties2 = (PFN_vkGetPhysicalDeviceQueueFamilyProperties2)gpa(inst, "vkGetPhysicalDeviceQueueFamilyProperties2");
+    table->GetPhysicalDeviceMemoryProperties2 = (PFN_vkGetPhysicalDeviceMemoryProperties2)gpa(inst, "vkGetPhysicalDeviceMemoryProperties2");
+    table->GetPhysicalDeviceSparseImageFormatProperties2 = (PFN_vkGetPhysicalDeviceSparseImageFormatProperties2)gpa(inst, "vkGetPhysicalDeviceSparseImageFormatProperties2");
+    table->GetPhysicalDeviceExternalBufferProperties = (PFN_vkGetPhysicalDeviceExternalBufferProperties)gpa(inst, "vkGetPhysicalDeviceExternalBufferProperties");
+    table->GetPhysicalDeviceExternalFenceProperties = (PFN_vkGetPhysicalDeviceExternalFenceProperties)gpa(inst, "vkGetPhysicalDeviceExternalFenceProperties");
+    table->GetPhysicalDeviceExternalSemaphoreProperties = (PFN_vkGetPhysicalDeviceExternalSemaphoreProperties)gpa(inst, "vkGetPhysicalDeviceExternalSemaphoreProperties");
+}
+
+// Init Instance function pointer dispatch table with core commands
+VKAPI_ATTR void VKAPI_CALL loader_init_instance_extension_dispatch_table(VkLayerInstanceDispatchTable *table, PFN_vkGetInstanceProcAddr gpa,
+                                                                        VkInstance inst) {
+
+    // ---- VK_KHR_surface extension commands
+    table->DestroySurfaceKHR = (PFN_vkDestroySurfaceKHR)gpa(inst, "vkDestroySurfaceKHR");
+    table->GetPhysicalDeviceSurfaceSupportKHR = (PFN_vkGetPhysicalDeviceSurfaceSupportKHR)gpa(inst, "vkGetPhysicalDeviceSurfaceSupportKHR");
+    table->GetPhysicalDeviceSurfaceCapabilitiesKHR = (PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR)gpa(inst, "vkGetPhysicalDeviceSurfaceCapabilitiesKHR");
+    table->GetPhysicalDeviceSurfaceFormatsKHR = (PFN_vkGetPhysicalDeviceSurfaceFormatsKHR)gpa(inst, "vkGetPhysicalDeviceSurfaceFormatsKHR");
+    table->GetPhysicalDeviceSurfacePresentModesKHR = (PFN_vkGetPhysicalDeviceSurfacePresentModesKHR)gpa(inst, "vkGetPhysicalDeviceSurfacePresentModesKHR");
+
+    // ---- VK_KHR_swapchain extension commands
+    table->GetPhysicalDevicePresentRectanglesKHR = (PFN_vkGetPhysicalDevicePresentRectanglesKHR)gpa(inst, "vkGetPhysicalDevicePresentRectanglesKHR");
+
+    // ---- VK_KHR_display extension commands
+    table->GetPhysicalDeviceDisplayPropertiesKHR = (PFN_vkGetPhysicalDeviceDisplayPropertiesKHR)gpa(inst, "vkGetPhysicalDeviceDisplayPropertiesKHR");
+    table->GetPhysicalDeviceDisplayPlanePropertiesKHR = (PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR)gpa(inst, "vkGetPhysicalDeviceDisplayPlanePropertiesKHR");
+    table->GetDisplayPlaneSupportedDisplaysKHR = (PFN_vkGetDisplayPlaneSupportedDisplaysKHR)gpa(inst, "vkGetDisplayPlaneSupportedDisplaysKHR");
+    table->GetDisplayModePropertiesKHR = (PFN_vkGetDisplayModePropertiesKHR)gpa(inst, "vkGetDisplayModePropertiesKHR");
+    table->CreateDisplayModeKHR = (PFN_vkCreateDisplayModeKHR)gpa(inst, "vkCreateDisplayModeKHR");
+    table->GetDisplayPlaneCapabilitiesKHR = (PFN_vkGetDisplayPlaneCapabilitiesKHR)gpa(inst, "vkGetDisplayPlaneCapabilitiesKHR");
+    table->CreateDisplayPlaneSurfaceKHR = (PFN_vkCreateDisplayPlaneSurfaceKHR)gpa(inst, "vkCreateDisplayPlaneSurfaceKHR");
+
+    // ---- VK_KHR_xlib_surface extension commands
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+    table->CreateXlibSurfaceKHR = (PFN_vkCreateXlibSurfaceKHR)gpa(inst, "vkCreateXlibSurfaceKHR");
+#endif // VK_USE_PLATFORM_XLIB_KHR
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+    table->GetPhysicalDeviceXlibPresentationSupportKHR = (PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR)gpa(inst, "vkGetPhysicalDeviceXlibPresentationSupportKHR");
+#endif // VK_USE_PLATFORM_XLIB_KHR
+
+    // ---- VK_KHR_xcb_surface extension commands
+#ifdef VK_USE_PLATFORM_XCB_KHR
+    table->CreateXcbSurfaceKHR = (PFN_vkCreateXcbSurfaceKHR)gpa(inst, "vkCreateXcbSurfaceKHR");
+#endif // VK_USE_PLATFORM_XCB_KHR
+#ifdef VK_USE_PLATFORM_XCB_KHR
+    table->GetPhysicalDeviceXcbPresentationSupportKHR = (PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR)gpa(inst, "vkGetPhysicalDeviceXcbPresentationSupportKHR");
+#endif // VK_USE_PLATFORM_XCB_KHR
+
+    // ---- VK_KHR_wayland_surface extension commands
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+    table->CreateWaylandSurfaceKHR = (PFN_vkCreateWaylandSurfaceKHR)gpa(inst, "vkCreateWaylandSurfaceKHR");
+#endif // VK_USE_PLATFORM_WAYLAND_KHR
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+    table->GetPhysicalDeviceWaylandPresentationSupportKHR = (PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR)gpa(inst, "vkGetPhysicalDeviceWaylandPresentationSupportKHR");
+#endif // VK_USE_PLATFORM_WAYLAND_KHR
+
+    // ---- VK_KHR_android_surface extension commands
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+    table->CreateAndroidSurfaceKHR = (PFN_vkCreateAndroidSurfaceKHR)gpa(inst, "vkCreateAndroidSurfaceKHR");
+#endif // VK_USE_PLATFORM_ANDROID_KHR
+
+    // ---- VK_KHR_win32_surface extension commands
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    table->CreateWin32SurfaceKHR = (PFN_vkCreateWin32SurfaceKHR)gpa(inst, "vkCreateWin32SurfaceKHR");
+#endif // VK_USE_PLATFORM_WIN32_KHR
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    table->GetPhysicalDeviceWin32PresentationSupportKHR = (PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR)gpa(inst, "vkGetPhysicalDeviceWin32PresentationSupportKHR");
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+    // ---- VK_KHR_get_physical_device_properties2 extension commands
+    table->GetPhysicalDeviceFeatures2KHR = (PFN_vkGetPhysicalDeviceFeatures2KHR)gpa(inst, "vkGetPhysicalDeviceFeatures2KHR");
+    table->GetPhysicalDeviceProperties2KHR = (PFN_vkGetPhysicalDeviceProperties2KHR)gpa(inst, "vkGetPhysicalDeviceProperties2KHR");
+    table->GetPhysicalDeviceFormatProperties2KHR = (PFN_vkGetPhysicalDeviceFormatProperties2KHR)gpa(inst, "vkGetPhysicalDeviceFormatProperties2KHR");
+    table->GetPhysicalDeviceImageFormatProperties2KHR = (PFN_vkGetPhysicalDeviceImageFormatProperties2KHR)gpa(inst, "vkGetPhysicalDeviceImageFormatProperties2KHR");
+    table->GetPhysicalDeviceQueueFamilyProperties2KHR = (PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR)gpa(inst, "vkGetPhysicalDeviceQueueFamilyProperties2KHR");
+    table->GetPhysicalDeviceMemoryProperties2KHR = (PFN_vkGetPhysicalDeviceMemoryProperties2KHR)gpa(inst, "vkGetPhysicalDeviceMemoryProperties2KHR");
+    table->GetPhysicalDeviceSparseImageFormatProperties2KHR = (PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR)gpa(inst, "vkGetPhysicalDeviceSparseImageFormatProperties2KHR");
+
+    // ---- VK_KHR_device_group_creation extension commands
+    table->EnumeratePhysicalDeviceGroupsKHR = (PFN_vkEnumeratePhysicalDeviceGroupsKHR)gpa(inst, "vkEnumeratePhysicalDeviceGroupsKHR");
+
+    // ---- VK_KHR_external_memory_capabilities extension commands
+    table->GetPhysicalDeviceExternalBufferPropertiesKHR = (PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR)gpa(inst, "vkGetPhysicalDeviceExternalBufferPropertiesKHR");
+
+    // ---- VK_KHR_external_semaphore_capabilities extension commands
+    table->GetPhysicalDeviceExternalSemaphorePropertiesKHR = (PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR)gpa(inst, "vkGetPhysicalDeviceExternalSemaphorePropertiesKHR");
+
+    // ---- VK_KHR_external_fence_capabilities extension commands
+    table->GetPhysicalDeviceExternalFencePropertiesKHR = (PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR)gpa(inst, "vkGetPhysicalDeviceExternalFencePropertiesKHR");
+
+    // ---- VK_KHR_performance_query extension commands
+    table->EnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR = (PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR)gpa(inst, "vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR");
+    table->GetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR = (PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR)gpa(inst, "vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR");
+
+    // ---- VK_KHR_get_surface_capabilities2 extension commands
+    table->GetPhysicalDeviceSurfaceCapabilities2KHR = (PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR)gpa(inst, "vkGetPhysicalDeviceSurfaceCapabilities2KHR");
+    table->GetPhysicalDeviceSurfaceFormats2KHR = (PFN_vkGetPhysicalDeviceSurfaceFormats2KHR)gpa(inst, "vkGetPhysicalDeviceSurfaceFormats2KHR");
+
+    // ---- VK_KHR_get_display_properties2 extension commands
+    table->GetPhysicalDeviceDisplayProperties2KHR = (PFN_vkGetPhysicalDeviceDisplayProperties2KHR)gpa(inst, "vkGetPhysicalDeviceDisplayProperties2KHR");
+    table->GetPhysicalDeviceDisplayPlaneProperties2KHR = (PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR)gpa(inst, "vkGetPhysicalDeviceDisplayPlaneProperties2KHR");
+    table->GetDisplayModeProperties2KHR = (PFN_vkGetDisplayModeProperties2KHR)gpa(inst, "vkGetDisplayModeProperties2KHR");
+    table->GetDisplayPlaneCapabilities2KHR = (PFN_vkGetDisplayPlaneCapabilities2KHR)gpa(inst, "vkGetDisplayPlaneCapabilities2KHR");
+
+    // ---- VK_EXT_debug_report extension commands
+    table->CreateDebugReportCallbackEXT = (PFN_vkCreateDebugReportCallbackEXT)gpa(inst, "vkCreateDebugReportCallbackEXT");
+    table->DestroyDebugReportCallbackEXT = (PFN_vkDestroyDebugReportCallbackEXT)gpa(inst, "vkDestroyDebugReportCallbackEXT");
+    table->DebugReportMessageEXT = (PFN_vkDebugReportMessageEXT)gpa(inst, "vkDebugReportMessageEXT");
+
+    // ---- VK_GGP_stream_descriptor_surface extension commands
+#ifdef VK_USE_PLATFORM_GGP
+    table->CreateStreamDescriptorSurfaceGGP = (PFN_vkCreateStreamDescriptorSurfaceGGP)gpa(inst, "vkCreateStreamDescriptorSurfaceGGP");
+#endif // VK_USE_PLATFORM_GGP
+
+    // ---- VK_NV_external_memory_capabilities extension commands
+    table->GetPhysicalDeviceExternalImageFormatPropertiesNV = (PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV)gpa(inst, "vkGetPhysicalDeviceExternalImageFormatPropertiesNV");
+
+    // ---- VK_NN_vi_surface extension commands
+#ifdef VK_USE_PLATFORM_VI_NN
+    table->CreateViSurfaceNN = (PFN_vkCreateViSurfaceNN)gpa(inst, "vkCreateViSurfaceNN");
+#endif // VK_USE_PLATFORM_VI_NN
+
+    // ---- VK_NVX_device_generated_commands extension commands
+    table->GetPhysicalDeviceGeneratedCommandsPropertiesNVX = (PFN_vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX)gpa(inst, "vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX");
+
+    // ---- VK_EXT_direct_mode_display extension commands
+    table->ReleaseDisplayEXT = (PFN_vkReleaseDisplayEXT)gpa(inst, "vkReleaseDisplayEXT");
+
+    // ---- VK_EXT_acquire_xlib_display extension commands
+#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
+    table->AcquireXlibDisplayEXT = (PFN_vkAcquireXlibDisplayEXT)gpa(inst, "vkAcquireXlibDisplayEXT");
+#endif // VK_USE_PLATFORM_XLIB_XRANDR_EXT
+#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
+    table->GetRandROutputDisplayEXT = (PFN_vkGetRandROutputDisplayEXT)gpa(inst, "vkGetRandROutputDisplayEXT");
+#endif // VK_USE_PLATFORM_XLIB_XRANDR_EXT
+
+    // ---- VK_EXT_display_surface_counter extension commands
+    table->GetPhysicalDeviceSurfaceCapabilities2EXT = (PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT)gpa(inst, "vkGetPhysicalDeviceSurfaceCapabilities2EXT");
+
+    // ---- VK_MVK_ios_surface extension commands
+#ifdef VK_USE_PLATFORM_IOS_MVK
+    table->CreateIOSSurfaceMVK = (PFN_vkCreateIOSSurfaceMVK)gpa(inst, "vkCreateIOSSurfaceMVK");
+#endif // VK_USE_PLATFORM_IOS_MVK
+
+    // ---- VK_MVK_macos_surface extension commands
+#ifdef VK_USE_PLATFORM_MACOS_MVK
+    table->CreateMacOSSurfaceMVK = (PFN_vkCreateMacOSSurfaceMVK)gpa(inst, "vkCreateMacOSSurfaceMVK");
+#endif // VK_USE_PLATFORM_MACOS_MVK
+
+    // ---- VK_EXT_debug_utils extension commands
+    table->CreateDebugUtilsMessengerEXT = (PFN_vkCreateDebugUtilsMessengerEXT)gpa(inst, "vkCreateDebugUtilsMessengerEXT");
+    table->DestroyDebugUtilsMessengerEXT = (PFN_vkDestroyDebugUtilsMessengerEXT)gpa(inst, "vkDestroyDebugUtilsMessengerEXT");
+    table->SubmitDebugUtilsMessageEXT = (PFN_vkSubmitDebugUtilsMessageEXT)gpa(inst, "vkSubmitDebugUtilsMessageEXT");
+
+    // ---- VK_EXT_sample_locations extension commands
+    table->GetPhysicalDeviceMultisamplePropertiesEXT = (PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT)gpa(inst, "vkGetPhysicalDeviceMultisamplePropertiesEXT");
+
+    // ---- VK_EXT_calibrated_timestamps extension commands
+    table->GetPhysicalDeviceCalibrateableTimeDomainsEXT = (PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT)gpa(inst, "vkGetPhysicalDeviceCalibrateableTimeDomainsEXT");
+
+    // ---- VK_FUCHSIA_imagepipe_surface extension commands
+#ifdef VK_USE_PLATFORM_FUCHSIA
+    table->CreateImagePipeSurfaceFUCHSIA = (PFN_vkCreateImagePipeSurfaceFUCHSIA)gpa(inst, "vkCreateImagePipeSurfaceFUCHSIA");
+#endif // VK_USE_PLATFORM_FUCHSIA
+
+    // ---- VK_EXT_metal_surface extension commands
+#ifdef VK_USE_PLATFORM_METAL_EXT
+    table->CreateMetalSurfaceEXT = (PFN_vkCreateMetalSurfaceEXT)gpa(inst, "vkCreateMetalSurfaceEXT");
+#endif // VK_USE_PLATFORM_METAL_EXT
+
+    // ---- VK_NV_cooperative_matrix extension commands
+    table->GetPhysicalDeviceCooperativeMatrixPropertiesNV = (PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV)gpa(inst, "vkGetPhysicalDeviceCooperativeMatrixPropertiesNV");
+
+    // ---- VK_NV_coverage_reduction_mode extension commands
+    table->GetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV = (PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV)gpa(inst, "vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV");
+
+    // ---- VK_EXT_full_screen_exclusive extension commands
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    table->GetPhysicalDeviceSurfacePresentModes2EXT = (PFN_vkGetPhysicalDeviceSurfacePresentModes2EXT)gpa(inst, "vkGetPhysicalDeviceSurfacePresentModes2EXT");
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+    // ---- VK_EXT_headless_surface extension commands
+    table->CreateHeadlessSurfaceEXT = (PFN_vkCreateHeadlessSurfaceEXT)gpa(inst, "vkCreateHeadlessSurfaceEXT");
+}
+
+// Device command lookup function
+VKAPI_ATTR void* VKAPI_CALL loader_lookup_device_dispatch_table(const VkLayerDispatchTable *table, const char *name) {
+    if (!name || name[0] != 'v' || name[1] != 'k') return NULL;
+
+    name += 2;
+
+    // ---- Core 1_0 commands
+    if (!strcmp(name, "GetDeviceProcAddr")) return (void *)table->GetDeviceProcAddr;
+    if (!strcmp(name, "DestroyDevice")) return (void *)table->DestroyDevice;
+    if (!strcmp(name, "GetDeviceQueue")) return (void *)table->GetDeviceQueue;
+    if (!strcmp(name, "QueueSubmit")) return (void *)table->QueueSubmit;
+    if (!strcmp(name, "QueueWaitIdle")) return (void *)table->QueueWaitIdle;
+    if (!strcmp(name, "DeviceWaitIdle")) return (void *)table->DeviceWaitIdle;
+    if (!strcmp(name, "AllocateMemory")) return (void *)table->AllocateMemory;
+    if (!strcmp(name, "FreeMemory")) return (void *)table->FreeMemory;
+    if (!strcmp(name, "MapMemory")) return (void *)table->MapMemory;
+    if (!strcmp(name, "UnmapMemory")) return (void *)table->UnmapMemory;
+    if (!strcmp(name, "FlushMappedMemoryRanges")) return (void *)table->FlushMappedMemoryRanges;
+    if (!strcmp(name, "InvalidateMappedMemoryRanges")) return (void *)table->InvalidateMappedMemoryRanges;
+    if (!strcmp(name, "GetDeviceMemoryCommitment")) return (void *)table->GetDeviceMemoryCommitment;
+    if (!strcmp(name, "BindBufferMemory")) return (void *)table->BindBufferMemory;
+    if (!strcmp(name, "BindImageMemory")) return (void *)table->BindImageMemory;
+    if (!strcmp(name, "GetBufferMemoryRequirements")) return (void *)table->GetBufferMemoryRequirements;
+    if (!strcmp(name, "GetImageMemoryRequirements")) return (void *)table->GetImageMemoryRequirements;
+    if (!strcmp(name, "GetImageSparseMemoryRequirements")) return (void *)table->GetImageSparseMemoryRequirements;
+    if (!strcmp(name, "QueueBindSparse")) return (void *)table->QueueBindSparse;
+    if (!strcmp(name, "CreateFence")) return (void *)table->CreateFence;
+    if (!strcmp(name, "DestroyFence")) return (void *)table->DestroyFence;
+    if (!strcmp(name, "ResetFences")) return (void *)table->ResetFences;
+    if (!strcmp(name, "GetFenceStatus")) return (void *)table->GetFenceStatus;
+    if (!strcmp(name, "WaitForFences")) return (void *)table->WaitForFences;
+    if (!strcmp(name, "CreateSemaphore")) return (void *)table->CreateSemaphore;
+    if (!strcmp(name, "DestroySemaphore")) return (void *)table->DestroySemaphore;
+    if (!strcmp(name, "CreateEvent")) return (void *)table->CreateEvent;
+    if (!strcmp(name, "DestroyEvent")) return (void *)table->DestroyEvent;
+    if (!strcmp(name, "GetEventStatus")) return (void *)table->GetEventStatus;
+    if (!strcmp(name, "SetEvent")) return (void *)table->SetEvent;
+    if (!strcmp(name, "ResetEvent")) return (void *)table->ResetEvent;
+    if (!strcmp(name, "CreateQueryPool")) return (void *)table->CreateQueryPool;
+    if (!strcmp(name, "DestroyQueryPool")) return (void *)table->DestroyQueryPool;
+    if (!strcmp(name, "GetQueryPoolResults")) return (void *)table->GetQueryPoolResults;
+    if (!strcmp(name, "CreateBuffer")) return (void *)table->CreateBuffer;
+    if (!strcmp(name, "DestroyBuffer")) return (void *)table->DestroyBuffer;
+    if (!strcmp(name, "CreateBufferView")) return (void *)table->CreateBufferView;
+    if (!strcmp(name, "DestroyBufferView")) return (void *)table->DestroyBufferView;
+    if (!strcmp(name, "CreateImage")) return (void *)table->CreateImage;
+    if (!strcmp(name, "DestroyImage")) return (void *)table->DestroyImage;
+    if (!strcmp(name, "GetImageSubresourceLayout")) return (void *)table->GetImageSubresourceLayout;
+    if (!strcmp(name, "CreateImageView")) return (void *)table->CreateImageView;
+    if (!strcmp(name, "DestroyImageView")) return (void *)table->DestroyImageView;
+    if (!strcmp(name, "CreateShaderModule")) return (void *)table->CreateShaderModule;
+    if (!strcmp(name, "DestroyShaderModule")) return (void *)table->DestroyShaderModule;
+    if (!strcmp(name, "CreatePipelineCache")) return (void *)table->CreatePipelineCache;
+    if (!strcmp(name, "DestroyPipelineCache")) return (void *)table->DestroyPipelineCache;
+    if (!strcmp(name, "GetPipelineCacheData")) return (void *)table->GetPipelineCacheData;
+    if (!strcmp(name, "MergePipelineCaches")) return (void *)table->MergePipelineCaches;
+    if (!strcmp(name, "CreateGraphicsPipelines")) return (void *)table->CreateGraphicsPipelines;
+    if (!strcmp(name, "CreateComputePipelines")) return (void *)table->CreateComputePipelines;
+    if (!strcmp(name, "DestroyPipeline")) return (void *)table->DestroyPipeline;
+    if (!strcmp(name, "CreatePipelineLayout")) return (void *)table->CreatePipelineLayout;
+    if (!strcmp(name, "DestroyPipelineLayout")) return (void *)table->DestroyPipelineLayout;
+    if (!strcmp(name, "CreateSampler")) return (void *)table->CreateSampler;
+    if (!strcmp(name, "DestroySampler")) return (void *)table->DestroySampler;
+    if (!strcmp(name, "CreateDescriptorSetLayout")) return (void *)table->CreateDescriptorSetLayout;
+    if (!strcmp(name, "DestroyDescriptorSetLayout")) return (void *)table->DestroyDescriptorSetLayout;
+    if (!strcmp(name, "CreateDescriptorPool")) return (void *)table->CreateDescriptorPool;
+    if (!strcmp(name, "DestroyDescriptorPool")) return (void *)table->DestroyDescriptorPool;
+    if (!strcmp(name, "ResetDescriptorPool")) return (void *)table->ResetDescriptorPool;
+    if (!strcmp(name, "AllocateDescriptorSets")) return (void *)table->AllocateDescriptorSets;
+    if (!strcmp(name, "FreeDescriptorSets")) return (void *)table->FreeDescriptorSets;
+    if (!strcmp(name, "UpdateDescriptorSets")) return (void *)table->UpdateDescriptorSets;
+    if (!strcmp(name, "CreateFramebuffer")) return (void *)table->CreateFramebuffer;
+    if (!strcmp(name, "DestroyFramebuffer")) return (void *)table->DestroyFramebuffer;
+    if (!strcmp(name, "CreateRenderPass")) return (void *)table->CreateRenderPass;
+    if (!strcmp(name, "DestroyRenderPass")) return (void *)table->DestroyRenderPass;
+    if (!strcmp(name, "GetRenderAreaGranularity")) return (void *)table->GetRenderAreaGranularity;
+    if (!strcmp(name, "CreateCommandPool")) return (void *)table->CreateCommandPool;
+    if (!strcmp(name, "DestroyCommandPool")) return (void *)table->DestroyCommandPool;
+    if (!strcmp(name, "ResetCommandPool")) return (void *)table->ResetCommandPool;
+    if (!strcmp(name, "AllocateCommandBuffers")) return (void *)table->AllocateCommandBuffers;
+    if (!strcmp(name, "FreeCommandBuffers")) return (void *)table->FreeCommandBuffers;
+    if (!strcmp(name, "BeginCommandBuffer")) return (void *)table->BeginCommandBuffer;
+    if (!strcmp(name, "EndCommandBuffer")) return (void *)table->EndCommandBuffer;
+    if (!strcmp(name, "ResetCommandBuffer")) return (void *)table->ResetCommandBuffer;
+    if (!strcmp(name, "CmdBindPipeline")) return (void *)table->CmdBindPipeline;
+    if (!strcmp(name, "CmdSetViewport")) return (void *)table->CmdSetViewport;
+    if (!strcmp(name, "CmdSetScissor")) return (void *)table->CmdSetScissor;
+    if (!strcmp(name, "CmdSetLineWidth")) return (void *)table->CmdSetLineWidth;
+    if (!strcmp(name, "CmdSetDepthBias")) return (void *)table->CmdSetDepthBias;
+    if (!strcmp(name, "CmdSetBlendConstants")) return (void *)table->CmdSetBlendConstants;
+    if (!strcmp(name, "CmdSetDepthBounds")) return (void *)table->CmdSetDepthBounds;
+    if (!strcmp(name, "CmdSetStencilCompareMask")) return (void *)table->CmdSetStencilCompareMask;
+    if (!strcmp(name, "CmdSetStencilWriteMask")) return (void *)table->CmdSetStencilWriteMask;
+    if (!strcmp(name, "CmdSetStencilReference")) return (void *)table->CmdSetStencilReference;
+    if (!strcmp(name, "CmdBindDescriptorSets")) return (void *)table->CmdBindDescriptorSets;
+    if (!strcmp(name, "CmdBindIndexBuffer")) return (void *)table->CmdBindIndexBuffer;
+    if (!strcmp(name, "CmdBindVertexBuffers")) return (void *)table->CmdBindVertexBuffers;
+    if (!strcmp(name, "CmdDraw")) return (void *)table->CmdDraw;
+    if (!strcmp(name, "CmdDrawIndexed")) return (void *)table->CmdDrawIndexed;
+    if (!strcmp(name, "CmdDrawIndirect")) return (void *)table->CmdDrawIndirect;
+    if (!strcmp(name, "CmdDrawIndexedIndirect")) return (void *)table->CmdDrawIndexedIndirect;
+    if (!strcmp(name, "CmdDispatch")) return (void *)table->CmdDispatch;
+    if (!strcmp(name, "CmdDispatchIndirect")) return (void *)table->CmdDispatchIndirect;
+    if (!strcmp(name, "CmdCopyBuffer")) return (void *)table->CmdCopyBuffer;
+    if (!strcmp(name, "CmdCopyImage")) return (void *)table->CmdCopyImage;
+    if (!strcmp(name, "CmdBlitImage")) return (void *)table->CmdBlitImage;
+    if (!strcmp(name, "CmdCopyBufferToImage")) return (void *)table->CmdCopyBufferToImage;
+    if (!strcmp(name, "CmdCopyImageToBuffer")) return (void *)table->CmdCopyImageToBuffer;
+    if (!strcmp(name, "CmdUpdateBuffer")) return (void *)table->CmdUpdateBuffer;
+    if (!strcmp(name, "CmdFillBuffer")) return (void *)table->CmdFillBuffer;
+    if (!strcmp(name, "CmdClearColorImage")) return (void *)table->CmdClearColorImage;
+    if (!strcmp(name, "CmdClearDepthStencilImage")) return (void *)table->CmdClearDepthStencilImage;
+    if (!strcmp(name, "CmdClearAttachments")) return (void *)table->CmdClearAttachments;
+    if (!strcmp(name, "CmdResolveImage")) return (void *)table->CmdResolveImage;
+    if (!strcmp(name, "CmdSetEvent")) return (void *)table->CmdSetEvent;
+    if (!strcmp(name, "CmdResetEvent")) return (void *)table->CmdResetEvent;
+    if (!strcmp(name, "CmdWaitEvents")) return (void *)table->CmdWaitEvents;
+    if (!strcmp(name, "CmdPipelineBarrier")) return (void *)table->CmdPipelineBarrier;
+    if (!strcmp(name, "CmdBeginQuery")) return (void *)table->CmdBeginQuery;
+    if (!strcmp(name, "CmdEndQuery")) return (void *)table->CmdEndQuery;
+    if (!strcmp(name, "CmdResetQueryPool")) return (void *)table->CmdResetQueryPool;
+    if (!strcmp(name, "CmdWriteTimestamp")) return (void *)table->CmdWriteTimestamp;
+    if (!strcmp(name, "CmdCopyQueryPoolResults")) return (void *)table->CmdCopyQueryPoolResults;
+    if (!strcmp(name, "CmdPushConstants")) return (void *)table->CmdPushConstants;
+    if (!strcmp(name, "CmdBeginRenderPass")) return (void *)table->CmdBeginRenderPass;
+    if (!strcmp(name, "CmdNextSubpass")) return (void *)table->CmdNextSubpass;
+    if (!strcmp(name, "CmdEndRenderPass")) return (void *)table->CmdEndRenderPass;
+    if (!strcmp(name, "CmdExecuteCommands")) return (void *)table->CmdExecuteCommands;
+
+    // ---- Core 1_1 commands
+    if (!strcmp(name, "BindBufferMemory2")) return (void *)table->BindBufferMemory2;
+    if (!strcmp(name, "BindImageMemory2")) return (void *)table->BindImageMemory2;
+    if (!strcmp(name, "GetDeviceGroupPeerMemoryFeatures")) return (void *)table->GetDeviceGroupPeerMemoryFeatures;
+    if (!strcmp(name, "CmdSetDeviceMask")) return (void *)table->CmdSetDeviceMask;
+    if (!strcmp(name, "CmdDispatchBase")) return (void *)table->CmdDispatchBase;
+    if (!strcmp(name, "GetImageMemoryRequirements2")) return (void *)table->GetImageMemoryRequirements2;
+    if (!strcmp(name, "GetBufferMemoryRequirements2")) return (void *)table->GetBufferMemoryRequirements2;
+    if (!strcmp(name, "GetImageSparseMemoryRequirements2")) return (void *)table->GetImageSparseMemoryRequirements2;
+    if (!strcmp(name, "TrimCommandPool")) return (void *)table->TrimCommandPool;
+    if (!strcmp(name, "GetDeviceQueue2")) return (void *)table->GetDeviceQueue2;
+    if (!strcmp(name, "CreateSamplerYcbcrConversion")) return (void *)table->CreateSamplerYcbcrConversion;
+    if (!strcmp(name, "DestroySamplerYcbcrConversion")) return (void *)table->DestroySamplerYcbcrConversion;
+    if (!strcmp(name, "CreateDescriptorUpdateTemplate")) return (void *)table->CreateDescriptorUpdateTemplate;
+    if (!strcmp(name, "DestroyDescriptorUpdateTemplate")) return (void *)table->DestroyDescriptorUpdateTemplate;
+    if (!strcmp(name, "UpdateDescriptorSetWithTemplate")) return (void *)table->UpdateDescriptorSetWithTemplate;
+    if (!strcmp(name, "GetDescriptorSetLayoutSupport")) return (void *)table->GetDescriptorSetLayoutSupport;
+
+    // ---- VK_KHR_swapchain extension commands
+    if (!strcmp(name, "CreateSwapchainKHR")) return (void *)table->CreateSwapchainKHR;
+    if (!strcmp(name, "DestroySwapchainKHR")) return (void *)table->DestroySwapchainKHR;
+    if (!strcmp(name, "GetSwapchainImagesKHR")) return (void *)table->GetSwapchainImagesKHR;
+    if (!strcmp(name, "AcquireNextImageKHR")) return (void *)table->AcquireNextImageKHR;
+    if (!strcmp(name, "QueuePresentKHR")) return (void *)table->QueuePresentKHR;
+    if (!strcmp(name, "GetDeviceGroupPresentCapabilitiesKHR")) return (void *)table->GetDeviceGroupPresentCapabilitiesKHR;
+    if (!strcmp(name, "GetDeviceGroupSurfacePresentModesKHR")) return (void *)table->GetDeviceGroupSurfacePresentModesKHR;
+    if (!strcmp(name, "AcquireNextImage2KHR")) return (void *)table->AcquireNextImage2KHR;
+
+    // ---- VK_KHR_display_swapchain extension commands
+    if (!strcmp(name, "CreateSharedSwapchainsKHR")) return (void *)table->CreateSharedSwapchainsKHR;
+
+    // ---- VK_KHR_device_group extension commands
+    if (!strcmp(name, "GetDeviceGroupPeerMemoryFeaturesKHR")) return (void *)table->GetDeviceGroupPeerMemoryFeaturesKHR;
+    if (!strcmp(name, "CmdSetDeviceMaskKHR")) return (void *)table->CmdSetDeviceMaskKHR;
+    if (!strcmp(name, "CmdDispatchBaseKHR")) return (void *)table->CmdDispatchBaseKHR;
+
+    // ---- VK_KHR_maintenance1 extension commands
+    if (!strcmp(name, "TrimCommandPoolKHR")) return (void *)table->TrimCommandPoolKHR;
+
+    // ---- VK_KHR_external_memory_win32 extension commands
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    if (!strcmp(name, "GetMemoryWin32HandleKHR")) return (void *)table->GetMemoryWin32HandleKHR;
+#endif // VK_USE_PLATFORM_WIN32_KHR
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    if (!strcmp(name, "GetMemoryWin32HandlePropertiesKHR")) return (void *)table->GetMemoryWin32HandlePropertiesKHR;
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+    // ---- VK_KHR_external_memory_fd extension commands
+    if (!strcmp(name, "GetMemoryFdKHR")) return (void *)table->GetMemoryFdKHR;
+    if (!strcmp(name, "GetMemoryFdPropertiesKHR")) return (void *)table->GetMemoryFdPropertiesKHR;
+
+    // ---- VK_KHR_external_semaphore_win32 extension commands
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    if (!strcmp(name, "ImportSemaphoreWin32HandleKHR")) return (void *)table->ImportSemaphoreWin32HandleKHR;
+#endif // VK_USE_PLATFORM_WIN32_KHR
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    if (!strcmp(name, "GetSemaphoreWin32HandleKHR")) return (void *)table->GetSemaphoreWin32HandleKHR;
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+    // ---- VK_KHR_external_semaphore_fd extension commands
+    if (!strcmp(name, "ImportSemaphoreFdKHR")) return (void *)table->ImportSemaphoreFdKHR;
+    if (!strcmp(name, "GetSemaphoreFdKHR")) return (void *)table->GetSemaphoreFdKHR;
+
+    // ---- VK_KHR_push_descriptor extension commands
+    if (!strcmp(name, "CmdPushDescriptorSetKHR")) return (void *)table->CmdPushDescriptorSetKHR;
+    if (!strcmp(name, "CmdPushDescriptorSetWithTemplateKHR")) return (void *)table->CmdPushDescriptorSetWithTemplateKHR;
+
+    // ---- VK_KHR_descriptor_update_template extension commands
+    if (!strcmp(name, "CreateDescriptorUpdateTemplateKHR")) return (void *)table->CreateDescriptorUpdateTemplateKHR;
+    if (!strcmp(name, "DestroyDescriptorUpdateTemplateKHR")) return (void *)table->DestroyDescriptorUpdateTemplateKHR;
+    if (!strcmp(name, "UpdateDescriptorSetWithTemplateKHR")) return (void *)table->UpdateDescriptorSetWithTemplateKHR;
+
+    // ---- VK_KHR_create_renderpass2 extension commands
+    if (!strcmp(name, "CreateRenderPass2KHR")) return (void *)table->CreateRenderPass2KHR;
+    if (!strcmp(name, "CmdBeginRenderPass2KHR")) return (void *)table->CmdBeginRenderPass2KHR;
+    if (!strcmp(name, "CmdNextSubpass2KHR")) return (void *)table->CmdNextSubpass2KHR;
+    if (!strcmp(name, "CmdEndRenderPass2KHR")) return (void *)table->CmdEndRenderPass2KHR;
+
+    // ---- VK_KHR_shared_presentable_image extension commands
+    if (!strcmp(name, "GetSwapchainStatusKHR")) return (void *)table->GetSwapchainStatusKHR;
+
+    // ---- VK_KHR_external_fence_win32 extension commands
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    if (!strcmp(name, "ImportFenceWin32HandleKHR")) return (void *)table->ImportFenceWin32HandleKHR;
+#endif // VK_USE_PLATFORM_WIN32_KHR
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    if (!strcmp(name, "GetFenceWin32HandleKHR")) return (void *)table->GetFenceWin32HandleKHR;
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+    // ---- VK_KHR_external_fence_fd extension commands
+    if (!strcmp(name, "ImportFenceFdKHR")) return (void *)table->ImportFenceFdKHR;
+    if (!strcmp(name, "GetFenceFdKHR")) return (void *)table->GetFenceFdKHR;
+
+    // ---- VK_KHR_performance_query extension commands
+    if (!strcmp(name, "AcquireProfilingLockKHR")) return (void *)table->AcquireProfilingLockKHR;
+    if (!strcmp(name, "ReleaseProfilingLockKHR")) return (void *)table->ReleaseProfilingLockKHR;
+
+    // ---- VK_KHR_get_memory_requirements2 extension commands
+    if (!strcmp(name, "GetImageMemoryRequirements2KHR")) return (void *)table->GetImageMemoryRequirements2KHR;
+    if (!strcmp(name, "GetBufferMemoryRequirements2KHR")) return (void *)table->GetBufferMemoryRequirements2KHR;
+    if (!strcmp(name, "GetImageSparseMemoryRequirements2KHR")) return (void *)table->GetImageSparseMemoryRequirements2KHR;
+
+    // ---- VK_KHR_sampler_ycbcr_conversion extension commands
+    if (!strcmp(name, "CreateSamplerYcbcrConversionKHR")) return (void *)table->CreateSamplerYcbcrConversionKHR;
+    if (!strcmp(name, "DestroySamplerYcbcrConversionKHR")) return (void *)table->DestroySamplerYcbcrConversionKHR;
+
+    // ---- VK_KHR_bind_memory2 extension commands
+    if (!strcmp(name, "BindBufferMemory2KHR")) return (void *)table->BindBufferMemory2KHR;
+    if (!strcmp(name, "BindImageMemory2KHR")) return (void *)table->BindImageMemory2KHR;
+
+    // ---- VK_KHR_maintenance3 extension commands
+    if (!strcmp(name, "GetDescriptorSetLayoutSupportKHR")) return (void *)table->GetDescriptorSetLayoutSupportKHR;
+
+    // ---- VK_KHR_draw_indirect_count extension commands
+    if (!strcmp(name, "CmdDrawIndirectCountKHR")) return (void *)table->CmdDrawIndirectCountKHR;
+    if (!strcmp(name, "CmdDrawIndexedIndirectCountKHR")) return (void *)table->CmdDrawIndexedIndirectCountKHR;
+
+    // ---- VK_KHR_timeline_semaphore extension commands
+    if (!strcmp(name, "GetSemaphoreCounterValueKHR")) return (void *)table->GetSemaphoreCounterValueKHR;
+    if (!strcmp(name, "WaitSemaphoresKHR")) return (void *)table->WaitSemaphoresKHR;
+    if (!strcmp(name, "SignalSemaphoreKHR")) return (void *)table->SignalSemaphoreKHR;
+
+    // ---- VK_KHR_pipeline_executable_properties extension commands
+    if (!strcmp(name, "GetPipelineExecutablePropertiesKHR")) return (void *)table->GetPipelineExecutablePropertiesKHR;
+    if (!strcmp(name, "GetPipelineExecutableStatisticsKHR")) return (void *)table->GetPipelineExecutableStatisticsKHR;
+    if (!strcmp(name, "GetPipelineExecutableInternalRepresentationsKHR")) return (void *)table->GetPipelineExecutableInternalRepresentationsKHR;
+
+    // ---- VK_EXT_debug_marker extension commands
+    if (!strcmp(name, "DebugMarkerSetObjectTagEXT")) return (void *)table->DebugMarkerSetObjectTagEXT;
+    if (!strcmp(name, "DebugMarkerSetObjectNameEXT")) return (void *)table->DebugMarkerSetObjectNameEXT;
+    if (!strcmp(name, "CmdDebugMarkerBeginEXT")) return (void *)table->CmdDebugMarkerBeginEXT;
+    if (!strcmp(name, "CmdDebugMarkerEndEXT")) return (void *)table->CmdDebugMarkerEndEXT;
+    if (!strcmp(name, "CmdDebugMarkerInsertEXT")) return (void *)table->CmdDebugMarkerInsertEXT;
+
+    // ---- VK_EXT_transform_feedback extension commands
+    if (!strcmp(name, "CmdBindTransformFeedbackBuffersEXT")) return (void *)table->CmdBindTransformFeedbackBuffersEXT;
+    if (!strcmp(name, "CmdBeginTransformFeedbackEXT")) return (void *)table->CmdBeginTransformFeedbackEXT;
+    if (!strcmp(name, "CmdEndTransformFeedbackEXT")) return (void *)table->CmdEndTransformFeedbackEXT;
+    if (!strcmp(name, "CmdBeginQueryIndexedEXT")) return (void *)table->CmdBeginQueryIndexedEXT;
+    if (!strcmp(name, "CmdEndQueryIndexedEXT")) return (void *)table->CmdEndQueryIndexedEXT;
+    if (!strcmp(name, "CmdDrawIndirectByteCountEXT")) return (void *)table->CmdDrawIndirectByteCountEXT;
+
+    // ---- VK_NVX_image_view_handle extension commands
+    if (!strcmp(name, "GetImageViewHandleNVX")) return (void *)table->GetImageViewHandleNVX;
+
+    // ---- VK_AMD_draw_indirect_count extension commands
+    if (!strcmp(name, "CmdDrawIndirectCountAMD")) return (void *)table->CmdDrawIndirectCountAMD;
+    if (!strcmp(name, "CmdDrawIndexedIndirectCountAMD")) return (void *)table->CmdDrawIndexedIndirectCountAMD;
+
+    // ---- VK_AMD_shader_info extension commands
+    if (!strcmp(name, "GetShaderInfoAMD")) return (void *)table->GetShaderInfoAMD;
+
+    // ---- VK_NV_external_memory_win32 extension commands
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    if (!strcmp(name, "GetMemoryWin32HandleNV")) return (void *)table->GetMemoryWin32HandleNV;
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+    // ---- VK_EXT_conditional_rendering extension commands
+    if (!strcmp(name, "CmdBeginConditionalRenderingEXT")) return (void *)table->CmdBeginConditionalRenderingEXT;
+    if (!strcmp(name, "CmdEndConditionalRenderingEXT")) return (void *)table->CmdEndConditionalRenderingEXT;
+
+    // ---- VK_NVX_device_generated_commands extension commands
+    if (!strcmp(name, "CmdProcessCommandsNVX")) return (void *)table->CmdProcessCommandsNVX;
+    if (!strcmp(name, "CmdReserveSpaceForCommandsNVX")) return (void *)table->CmdReserveSpaceForCommandsNVX;
+    if (!strcmp(name, "CreateIndirectCommandsLayoutNVX")) return (void *)table->CreateIndirectCommandsLayoutNVX;
+    if (!strcmp(name, "DestroyIndirectCommandsLayoutNVX")) return (void *)table->DestroyIndirectCommandsLayoutNVX;
+    if (!strcmp(name, "CreateObjectTableNVX")) return (void *)table->CreateObjectTableNVX;
+    if (!strcmp(name, "DestroyObjectTableNVX")) return (void *)table->DestroyObjectTableNVX;
+    if (!strcmp(name, "RegisterObjectsNVX")) return (void *)table->RegisterObjectsNVX;
+    if (!strcmp(name, "UnregisterObjectsNVX")) return (void *)table->UnregisterObjectsNVX;
+
+    // ---- VK_NV_clip_space_w_scaling extension commands
+    if (!strcmp(name, "CmdSetViewportWScalingNV")) return (void *)table->CmdSetViewportWScalingNV;
+
+    // ---- VK_EXT_display_control extension commands
+    if (!strcmp(name, "DisplayPowerControlEXT")) return (void *)table->DisplayPowerControlEXT;
+    if (!strcmp(name, "RegisterDeviceEventEXT")) return (void *)table->RegisterDeviceEventEXT;
+    if (!strcmp(name, "RegisterDisplayEventEXT")) return (void *)table->RegisterDisplayEventEXT;
+    if (!strcmp(name, "GetSwapchainCounterEXT")) return (void *)table->GetSwapchainCounterEXT;
+
+    // ---- VK_GOOGLE_display_timing extension commands
+    if (!strcmp(name, "GetRefreshCycleDurationGOOGLE")) return (void *)table->GetRefreshCycleDurationGOOGLE;
+    if (!strcmp(name, "GetPastPresentationTimingGOOGLE")) return (void *)table->GetPastPresentationTimingGOOGLE;
+
+    // ---- VK_EXT_discard_rectangles extension commands
+    if (!strcmp(name, "CmdSetDiscardRectangleEXT")) return (void *)table->CmdSetDiscardRectangleEXT;
+
+    // ---- VK_EXT_hdr_metadata extension commands
+    if (!strcmp(name, "SetHdrMetadataEXT")) return (void *)table->SetHdrMetadataEXT;
+
+    // ---- VK_EXT_debug_utils extension commands
+    if (!strcmp(name, "SetDebugUtilsObjectNameEXT")) return (void *)table->SetDebugUtilsObjectNameEXT;
+    if (!strcmp(name, "SetDebugUtilsObjectTagEXT")) return (void *)table->SetDebugUtilsObjectTagEXT;
+    if (!strcmp(name, "QueueBeginDebugUtilsLabelEXT")) return (void *)table->QueueBeginDebugUtilsLabelEXT;
+    if (!strcmp(name, "QueueEndDebugUtilsLabelEXT")) return (void *)table->QueueEndDebugUtilsLabelEXT;
+    if (!strcmp(name, "QueueInsertDebugUtilsLabelEXT")) return (void *)table->QueueInsertDebugUtilsLabelEXT;
+    if (!strcmp(name, "CmdBeginDebugUtilsLabelEXT")) return (void *)table->CmdBeginDebugUtilsLabelEXT;
+    if (!strcmp(name, "CmdEndDebugUtilsLabelEXT")) return (void *)table->CmdEndDebugUtilsLabelEXT;
+    if (!strcmp(name, "CmdInsertDebugUtilsLabelEXT")) return (void *)table->CmdInsertDebugUtilsLabelEXT;
+
+    // ---- VK_ANDROID_external_memory_android_hardware_buffer extension commands
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+    if (!strcmp(name, "GetAndroidHardwareBufferPropertiesANDROID")) return (void *)table->GetAndroidHardwareBufferPropertiesANDROID;
+#endif // VK_USE_PLATFORM_ANDROID_KHR
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+    if (!strcmp(name, "GetMemoryAndroidHardwareBufferANDROID")) return (void *)table->GetMemoryAndroidHardwareBufferANDROID;
+#endif // VK_USE_PLATFORM_ANDROID_KHR
+
+    // ---- VK_EXT_sample_locations extension commands
+    if (!strcmp(name, "CmdSetSampleLocationsEXT")) return (void *)table->CmdSetSampleLocationsEXT;
+
+    // ---- VK_EXT_image_drm_format_modifier extension commands
+    if (!strcmp(name, "GetImageDrmFormatModifierPropertiesEXT")) return (void *)table->GetImageDrmFormatModifierPropertiesEXT;
+
+    // ---- VK_EXT_validation_cache extension commands
+    if (!strcmp(name, "CreateValidationCacheEXT")) return (void *)table->CreateValidationCacheEXT;
+    if (!strcmp(name, "DestroyValidationCacheEXT")) return (void *)table->DestroyValidationCacheEXT;
+    if (!strcmp(name, "MergeValidationCachesEXT")) return (void *)table->MergeValidationCachesEXT;
+    if (!strcmp(name, "GetValidationCacheDataEXT")) return (void *)table->GetValidationCacheDataEXT;
+
+    // ---- VK_NV_shading_rate_image extension commands
+    if (!strcmp(name, "CmdBindShadingRateImageNV")) return (void *)table->CmdBindShadingRateImageNV;
+    if (!strcmp(name, "CmdSetViewportShadingRatePaletteNV")) return (void *)table->CmdSetViewportShadingRatePaletteNV;
+    if (!strcmp(name, "CmdSetCoarseSampleOrderNV")) return (void *)table->CmdSetCoarseSampleOrderNV;
+
+    // ---- VK_NV_ray_tracing extension commands
+    if (!strcmp(name, "CreateAccelerationStructureNV")) return (void *)table->CreateAccelerationStructureNV;
+    if (!strcmp(name, "DestroyAccelerationStructureNV")) return (void *)table->DestroyAccelerationStructureNV;
+    if (!strcmp(name, "GetAccelerationStructureMemoryRequirementsNV")) return (void *)table->GetAccelerationStructureMemoryRequirementsNV;
+    if (!strcmp(name, "BindAccelerationStructureMemoryNV")) return (void *)table->BindAccelerationStructureMemoryNV;
+    if (!strcmp(name, "CmdBuildAccelerationStructureNV")) return (void *)table->CmdBuildAccelerationStructureNV;
+    if (!strcmp(name, "CmdCopyAccelerationStructureNV")) return (void *)table->CmdCopyAccelerationStructureNV;
+    if (!strcmp(name, "CmdTraceRaysNV")) return (void *)table->CmdTraceRaysNV;
+    if (!strcmp(name, "CreateRayTracingPipelinesNV")) return (void *)table->CreateRayTracingPipelinesNV;
+    if (!strcmp(name, "GetRayTracingShaderGroupHandlesNV")) return (void *)table->GetRayTracingShaderGroupHandlesNV;
+    if (!strcmp(name, "GetAccelerationStructureHandleNV")) return (void *)table->GetAccelerationStructureHandleNV;
+    if (!strcmp(name, "CmdWriteAccelerationStructuresPropertiesNV")) return (void *)table->CmdWriteAccelerationStructuresPropertiesNV;
+    if (!strcmp(name, "CompileDeferredNV")) return (void *)table->CompileDeferredNV;
+
+    // ---- VK_EXT_external_memory_host extension commands
+    if (!strcmp(name, "GetMemoryHostPointerPropertiesEXT")) return (void *)table->GetMemoryHostPointerPropertiesEXT;
+
+    // ---- VK_AMD_buffer_marker extension commands
+    if (!strcmp(name, "CmdWriteBufferMarkerAMD")) return (void *)table->CmdWriteBufferMarkerAMD;
+
+    // ---- VK_EXT_calibrated_timestamps extension commands
+    if (!strcmp(name, "GetCalibratedTimestampsEXT")) return (void *)table->GetCalibratedTimestampsEXT;
+
+    // ---- VK_NV_mesh_shader extension commands
+    if (!strcmp(name, "CmdDrawMeshTasksNV")) return (void *)table->CmdDrawMeshTasksNV;
+    if (!strcmp(name, "CmdDrawMeshTasksIndirectNV")) return (void *)table->CmdDrawMeshTasksIndirectNV;
+    if (!strcmp(name, "CmdDrawMeshTasksIndirectCountNV")) return (void *)table->CmdDrawMeshTasksIndirectCountNV;
+
+    // ---- VK_NV_scissor_exclusive extension commands
+    if (!strcmp(name, "CmdSetExclusiveScissorNV")) return (void *)table->CmdSetExclusiveScissorNV;
+
+    // ---- VK_NV_device_diagnostic_checkpoints extension commands
+    if (!strcmp(name, "CmdSetCheckpointNV")) return (void *)table->CmdSetCheckpointNV;
+    if (!strcmp(name, "GetQueueCheckpointDataNV")) return (void *)table->GetQueueCheckpointDataNV;
+
+    // ---- VK_INTEL_performance_query extension commands
+    if (!strcmp(name, "InitializePerformanceApiINTEL")) return (void *)table->InitializePerformanceApiINTEL;
+    if (!strcmp(name, "UninitializePerformanceApiINTEL")) return (void *)table->UninitializePerformanceApiINTEL;
+    if (!strcmp(name, "CmdSetPerformanceMarkerINTEL")) return (void *)table->CmdSetPerformanceMarkerINTEL;
+    if (!strcmp(name, "CmdSetPerformanceStreamMarkerINTEL")) return (void *)table->CmdSetPerformanceStreamMarkerINTEL;
+    if (!strcmp(name, "CmdSetPerformanceOverrideINTEL")) return (void *)table->CmdSetPerformanceOverrideINTEL;
+    if (!strcmp(name, "AcquirePerformanceConfigurationINTEL")) return (void *)table->AcquirePerformanceConfigurationINTEL;
+    if (!strcmp(name, "ReleasePerformanceConfigurationINTEL")) return (void *)table->ReleasePerformanceConfigurationINTEL;
+    if (!strcmp(name, "QueueSetPerformanceConfigurationINTEL")) return (void *)table->QueueSetPerformanceConfigurationINTEL;
+    if (!strcmp(name, "GetPerformanceParameterINTEL")) return (void *)table->GetPerformanceParameterINTEL;
+
+    // ---- VK_AMD_display_native_hdr extension commands
+    if (!strcmp(name, "SetLocalDimmingAMD")) return (void *)table->SetLocalDimmingAMD;
+
+    // ---- VK_EXT_buffer_device_address extension commands
+    if (!strcmp(name, "GetBufferDeviceAddressEXT")) return (void *)table->GetBufferDeviceAddressEXT;
+
+    // ---- VK_EXT_full_screen_exclusive extension commands
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    if (!strcmp(name, "AcquireFullScreenExclusiveModeEXT")) return (void *)table->AcquireFullScreenExclusiveModeEXT;
+#endif // VK_USE_PLATFORM_WIN32_KHR
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    if (!strcmp(name, "ReleaseFullScreenExclusiveModeEXT")) return (void *)table->ReleaseFullScreenExclusiveModeEXT;
+#endif // VK_USE_PLATFORM_WIN32_KHR
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    if (!strcmp(name, "GetDeviceGroupSurfacePresentModes2EXT")) return (void *)table->GetDeviceGroupSurfacePresentModes2EXT;
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+    // ---- VK_EXT_line_rasterization extension commands
+    if (!strcmp(name, "CmdSetLineStippleEXT")) return (void *)table->CmdSetLineStippleEXT;
+
+    // ---- VK_EXT_host_query_reset extension commands
+    if (!strcmp(name, "ResetQueryPoolEXT")) return (void *)table->ResetQueryPoolEXT;
+
+    return NULL;
+}
+
+// Instance command lookup function
+VKAPI_ATTR void* VKAPI_CALL loader_lookup_instance_dispatch_table(const VkLayerInstanceDispatchTable *table, const char *name,
+                                                                 bool *found_name) {
+    if (!name || name[0] != 'v' || name[1] != 'k') {
+        *found_name = false;
+        return NULL;
+    }
+
+    *found_name = true;
+    name += 2;
+
+    // ---- Core 1_0 commands
+    if (!strcmp(name, "DestroyInstance")) return (void *)table->DestroyInstance;
+    if (!strcmp(name, "EnumeratePhysicalDevices")) return (void *)table->EnumeratePhysicalDevices;
+    if (!strcmp(name, "GetPhysicalDeviceFeatures")) return (void *)table->GetPhysicalDeviceFeatures;
+    if (!strcmp(name, "GetPhysicalDeviceFormatProperties")) return (void *)table->GetPhysicalDeviceFormatProperties;
+    if (!strcmp(name, "GetPhysicalDeviceImageFormatProperties")) return (void *)table->GetPhysicalDeviceImageFormatProperties;
+    if (!strcmp(name, "GetPhysicalDeviceProperties")) return (void *)table->GetPhysicalDeviceProperties;
+    if (!strcmp(name, "GetPhysicalDeviceQueueFamilyProperties")) return (void *)table->GetPhysicalDeviceQueueFamilyProperties;
+    if (!strcmp(name, "GetPhysicalDeviceMemoryProperties")) return (void *)table->GetPhysicalDeviceMemoryProperties;
+    if (!strcmp(name, "GetInstanceProcAddr")) return (void *)table->GetInstanceProcAddr;
+    if (!strcmp(name, "EnumerateDeviceExtensionProperties")) return (void *)table->EnumerateDeviceExtensionProperties;
+    if (!strcmp(name, "EnumerateDeviceLayerProperties")) return (void *)table->EnumerateDeviceLayerProperties;
+    if (!strcmp(name, "GetPhysicalDeviceSparseImageFormatProperties")) return (void *)table->GetPhysicalDeviceSparseImageFormatProperties;
+
+    // ---- Core 1_1 commands
+    if (!strcmp(name, "EnumeratePhysicalDeviceGroups")) return (void *)table->EnumeratePhysicalDeviceGroups;
+    if (!strcmp(name, "GetPhysicalDeviceFeatures2")) return (void *)table->GetPhysicalDeviceFeatures2;
+    if (!strcmp(name, "GetPhysicalDeviceProperties2")) return (void *)table->GetPhysicalDeviceProperties2;
+    if (!strcmp(name, "GetPhysicalDeviceFormatProperties2")) return (void *)table->GetPhysicalDeviceFormatProperties2;
+    if (!strcmp(name, "GetPhysicalDeviceImageFormatProperties2")) return (void *)table->GetPhysicalDeviceImageFormatProperties2;
+    if (!strcmp(name, "GetPhysicalDeviceQueueFamilyProperties2")) return (void *)table->GetPhysicalDeviceQueueFamilyProperties2;
+    if (!strcmp(name, "GetPhysicalDeviceMemoryProperties2")) return (void *)table->GetPhysicalDeviceMemoryProperties2;
+    if (!strcmp(name, "GetPhysicalDeviceSparseImageFormatProperties2")) return (void *)table->GetPhysicalDeviceSparseImageFormatProperties2;
+    if (!strcmp(name, "GetPhysicalDeviceExternalBufferProperties")) return (void *)table->GetPhysicalDeviceExternalBufferProperties;
+    if (!strcmp(name, "GetPhysicalDeviceExternalFenceProperties")) return (void *)table->GetPhysicalDeviceExternalFenceProperties;
+    if (!strcmp(name, "GetPhysicalDeviceExternalSemaphoreProperties")) return (void *)table->GetPhysicalDeviceExternalSemaphoreProperties;
+
+    // ---- VK_KHR_surface extension commands
+    if (!strcmp(name, "DestroySurfaceKHR")) return (void *)table->DestroySurfaceKHR;
+    if (!strcmp(name, "GetPhysicalDeviceSurfaceSupportKHR")) return (void *)table->GetPhysicalDeviceSurfaceSupportKHR;
+    if (!strcmp(name, "GetPhysicalDeviceSurfaceCapabilitiesKHR")) return (void *)table->GetPhysicalDeviceSurfaceCapabilitiesKHR;
+    if (!strcmp(name, "GetPhysicalDeviceSurfaceFormatsKHR")) return (void *)table->GetPhysicalDeviceSurfaceFormatsKHR;
+    if (!strcmp(name, "GetPhysicalDeviceSurfacePresentModesKHR")) return (void *)table->GetPhysicalDeviceSurfacePresentModesKHR;
+
+    // ---- VK_KHR_swapchain extension commands
+    if (!strcmp(name, "GetPhysicalDevicePresentRectanglesKHR")) return (void *)table->GetPhysicalDevicePresentRectanglesKHR;
+
+    // ---- VK_KHR_display extension commands
+    if (!strcmp(name, "GetPhysicalDeviceDisplayPropertiesKHR")) return (void *)table->GetPhysicalDeviceDisplayPropertiesKHR;
+    if (!strcmp(name, "GetPhysicalDeviceDisplayPlanePropertiesKHR")) return (void *)table->GetPhysicalDeviceDisplayPlanePropertiesKHR;
+    if (!strcmp(name, "GetDisplayPlaneSupportedDisplaysKHR")) return (void *)table->GetDisplayPlaneSupportedDisplaysKHR;
+    if (!strcmp(name, "GetDisplayModePropertiesKHR")) return (void *)table->GetDisplayModePropertiesKHR;
+    if (!strcmp(name, "CreateDisplayModeKHR")) return (void *)table->CreateDisplayModeKHR;
+    if (!strcmp(name, "GetDisplayPlaneCapabilitiesKHR")) return (void *)table->GetDisplayPlaneCapabilitiesKHR;
+    if (!strcmp(name, "CreateDisplayPlaneSurfaceKHR")) return (void *)table->CreateDisplayPlaneSurfaceKHR;
+
+    // ---- VK_KHR_xlib_surface extension commands
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+    if (!strcmp(name, "CreateXlibSurfaceKHR")) return (void *)table->CreateXlibSurfaceKHR;
+#endif // VK_USE_PLATFORM_XLIB_KHR
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+    if (!strcmp(name, "GetPhysicalDeviceXlibPresentationSupportKHR")) return (void *)table->GetPhysicalDeviceXlibPresentationSupportKHR;
+#endif // VK_USE_PLATFORM_XLIB_KHR
+
+    // ---- VK_KHR_xcb_surface extension commands
+#ifdef VK_USE_PLATFORM_XCB_KHR
+    if (!strcmp(name, "CreateXcbSurfaceKHR")) return (void *)table->CreateXcbSurfaceKHR;
+#endif // VK_USE_PLATFORM_XCB_KHR
+#ifdef VK_USE_PLATFORM_XCB_KHR
+    if (!strcmp(name, "GetPhysicalDeviceXcbPresentationSupportKHR")) return (void *)table->GetPhysicalDeviceXcbPresentationSupportKHR;
+#endif // VK_USE_PLATFORM_XCB_KHR
+
+    // ---- VK_KHR_wayland_surface extension commands
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+    if (!strcmp(name, "CreateWaylandSurfaceKHR")) return (void *)table->CreateWaylandSurfaceKHR;
+#endif // VK_USE_PLATFORM_WAYLAND_KHR
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+    if (!strcmp(name, "GetPhysicalDeviceWaylandPresentationSupportKHR")) return (void *)table->GetPhysicalDeviceWaylandPresentationSupportKHR;
+#endif // VK_USE_PLATFORM_WAYLAND_KHR
+
+    // ---- VK_KHR_android_surface extension commands
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+    if (!strcmp(name, "CreateAndroidSurfaceKHR")) return (void *)table->CreateAndroidSurfaceKHR;
+#endif // VK_USE_PLATFORM_ANDROID_KHR
+
+    // ---- VK_KHR_win32_surface extension commands
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    if (!strcmp(name, "CreateWin32SurfaceKHR")) return (void *)table->CreateWin32SurfaceKHR;
+#endif // VK_USE_PLATFORM_WIN32_KHR
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    if (!strcmp(name, "GetPhysicalDeviceWin32PresentationSupportKHR")) return (void *)table->GetPhysicalDeviceWin32PresentationSupportKHR;
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+    // ---- VK_KHR_get_physical_device_properties2 extension commands
+    if (!strcmp(name, "GetPhysicalDeviceFeatures2KHR")) return (void *)table->GetPhysicalDeviceFeatures2KHR;
+    if (!strcmp(name, "GetPhysicalDeviceProperties2KHR")) return (void *)table->GetPhysicalDeviceProperties2KHR;
+    if (!strcmp(name, "GetPhysicalDeviceFormatProperties2KHR")) return (void *)table->GetPhysicalDeviceFormatProperties2KHR;
+    if (!strcmp(name, "GetPhysicalDeviceImageFormatProperties2KHR")) return (void *)table->GetPhysicalDeviceImageFormatProperties2KHR;
+    if (!strcmp(name, "GetPhysicalDeviceQueueFamilyProperties2KHR")) return (void *)table->GetPhysicalDeviceQueueFamilyProperties2KHR;
+    if (!strcmp(name, "GetPhysicalDeviceMemoryProperties2KHR")) return (void *)table->GetPhysicalDeviceMemoryProperties2KHR;
+    if (!strcmp(name, "GetPhysicalDeviceSparseImageFormatProperties2KHR")) return (void *)table->GetPhysicalDeviceSparseImageFormatProperties2KHR;
+
+    // ---- VK_KHR_device_group_creation extension commands
+    if (!strcmp(name, "EnumeratePhysicalDeviceGroupsKHR")) return (void *)table->EnumeratePhysicalDeviceGroupsKHR;
+
+    // ---- VK_KHR_external_memory_capabilities extension commands
+    if (!strcmp(name, "GetPhysicalDeviceExternalBufferPropertiesKHR")) return (void *)table->GetPhysicalDeviceExternalBufferPropertiesKHR;
+
+    // ---- VK_KHR_external_semaphore_capabilities extension commands
+    if (!strcmp(name, "GetPhysicalDeviceExternalSemaphorePropertiesKHR")) return (void *)table->GetPhysicalDeviceExternalSemaphorePropertiesKHR;
+
+    // ---- VK_KHR_external_fence_capabilities extension commands
+    if (!strcmp(name, "GetPhysicalDeviceExternalFencePropertiesKHR")) return (void *)table->GetPhysicalDeviceExternalFencePropertiesKHR;
+
+    // ---- VK_KHR_performance_query extension commands
+    if (!strcmp(name, "EnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR")) return (void *)table->EnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR;
+    if (!strcmp(name, "GetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR")) return (void *)table->GetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR;
+
+    // ---- VK_KHR_get_surface_capabilities2 extension commands
+    if (!strcmp(name, "GetPhysicalDeviceSurfaceCapabilities2KHR")) return (void *)table->GetPhysicalDeviceSurfaceCapabilities2KHR;
+    if (!strcmp(name, "GetPhysicalDeviceSurfaceFormats2KHR")) return (void *)table->GetPhysicalDeviceSurfaceFormats2KHR;
+
+    // ---- VK_KHR_get_display_properties2 extension commands
+    if (!strcmp(name, "GetPhysicalDeviceDisplayProperties2KHR")) return (void *)table->GetPhysicalDeviceDisplayProperties2KHR;
+    if (!strcmp(name, "GetPhysicalDeviceDisplayPlaneProperties2KHR")) return (void *)table->GetPhysicalDeviceDisplayPlaneProperties2KHR;
+    if (!strcmp(name, "GetDisplayModeProperties2KHR")) return (void *)table->GetDisplayModeProperties2KHR;
+    if (!strcmp(name, "GetDisplayPlaneCapabilities2KHR")) return (void *)table->GetDisplayPlaneCapabilities2KHR;
+
+    // ---- VK_EXT_debug_report extension commands
+    if (!strcmp(name, "CreateDebugReportCallbackEXT")) return (void *)table->CreateDebugReportCallbackEXT;
+    if (!strcmp(name, "DestroyDebugReportCallbackEXT")) return (void *)table->DestroyDebugReportCallbackEXT;
+    if (!strcmp(name, "DebugReportMessageEXT")) return (void *)table->DebugReportMessageEXT;
+
+    // ---- VK_GGP_stream_descriptor_surface extension commands
+#ifdef VK_USE_PLATFORM_GGP
+    if (!strcmp(name, "CreateStreamDescriptorSurfaceGGP")) return (void *)table->CreateStreamDescriptorSurfaceGGP;
+#endif // VK_USE_PLATFORM_GGP
+
+    // ---- VK_NV_external_memory_capabilities extension commands
+    if (!strcmp(name, "GetPhysicalDeviceExternalImageFormatPropertiesNV")) return (void *)table->GetPhysicalDeviceExternalImageFormatPropertiesNV;
+
+    // ---- VK_NN_vi_surface extension commands
+#ifdef VK_USE_PLATFORM_VI_NN
+    if (!strcmp(name, "CreateViSurfaceNN")) return (void *)table->CreateViSurfaceNN;
+#endif // VK_USE_PLATFORM_VI_NN
+
+    // ---- VK_NVX_device_generated_commands extension commands
+    if (!strcmp(name, "GetPhysicalDeviceGeneratedCommandsPropertiesNVX")) return (void *)table->GetPhysicalDeviceGeneratedCommandsPropertiesNVX;
+
+    // ---- VK_EXT_direct_mode_display extension commands
+    if (!strcmp(name, "ReleaseDisplayEXT")) return (void *)table->ReleaseDisplayEXT;
+
+    // ---- VK_EXT_acquire_xlib_display extension commands
+#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
+    if (!strcmp(name, "AcquireXlibDisplayEXT")) return (void *)table->AcquireXlibDisplayEXT;
+#endif // VK_USE_PLATFORM_XLIB_XRANDR_EXT
+#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
+    if (!strcmp(name, "GetRandROutputDisplayEXT")) return (void *)table->GetRandROutputDisplayEXT;
+#endif // VK_USE_PLATFORM_XLIB_XRANDR_EXT
+
+    // ---- VK_EXT_display_surface_counter extension commands
+    if (!strcmp(name, "GetPhysicalDeviceSurfaceCapabilities2EXT")) return (void *)table->GetPhysicalDeviceSurfaceCapabilities2EXT;
+
+    // ---- VK_MVK_ios_surface extension commands
+#ifdef VK_USE_PLATFORM_IOS_MVK
+    if (!strcmp(name, "CreateIOSSurfaceMVK")) return (void *)table->CreateIOSSurfaceMVK;
+#endif // VK_USE_PLATFORM_IOS_MVK
+
+    // ---- VK_MVK_macos_surface extension commands
+#ifdef VK_USE_PLATFORM_MACOS_MVK
+    if (!strcmp(name, "CreateMacOSSurfaceMVK")) return (void *)table->CreateMacOSSurfaceMVK;
+#endif // VK_USE_PLATFORM_MACOS_MVK
+
+    // ---- VK_EXT_debug_utils extension commands
+    if (!strcmp(name, "CreateDebugUtilsMessengerEXT")) return (void *)table->CreateDebugUtilsMessengerEXT;
+    if (!strcmp(name, "DestroyDebugUtilsMessengerEXT")) return (void *)table->DestroyDebugUtilsMessengerEXT;
+    if (!strcmp(name, "SubmitDebugUtilsMessageEXT")) return (void *)table->SubmitDebugUtilsMessageEXT;
+
+    // ---- VK_EXT_sample_locations extension commands
+    if (!strcmp(name, "GetPhysicalDeviceMultisamplePropertiesEXT")) return (void *)table->GetPhysicalDeviceMultisamplePropertiesEXT;
+
+    // ---- VK_EXT_calibrated_timestamps extension commands
+    if (!strcmp(name, "GetPhysicalDeviceCalibrateableTimeDomainsEXT")) return (void *)table->GetPhysicalDeviceCalibrateableTimeDomainsEXT;
+
+    // ---- VK_FUCHSIA_imagepipe_surface extension commands
+#ifdef VK_USE_PLATFORM_FUCHSIA
+    if (!strcmp(name, "CreateImagePipeSurfaceFUCHSIA")) return (void *)table->CreateImagePipeSurfaceFUCHSIA;
+#endif // VK_USE_PLATFORM_FUCHSIA
+
+    // ---- VK_EXT_metal_surface extension commands
+#ifdef VK_USE_PLATFORM_METAL_EXT
+    if (!strcmp(name, "CreateMetalSurfaceEXT")) return (void *)table->CreateMetalSurfaceEXT;
+#endif // VK_USE_PLATFORM_METAL_EXT
+
+    // ---- VK_NV_cooperative_matrix extension commands
+    if (!strcmp(name, "GetPhysicalDeviceCooperativeMatrixPropertiesNV")) return (void *)table->GetPhysicalDeviceCooperativeMatrixPropertiesNV;
+
+    // ---- VK_NV_coverage_reduction_mode extension commands
+    if (!strcmp(name, "GetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV")) return (void *)table->GetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV;
+
+    // ---- VK_EXT_full_screen_exclusive extension commands
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    if (!strcmp(name, "GetPhysicalDeviceSurfacePresentModes2EXT")) return (void *)table->GetPhysicalDeviceSurfacePresentModes2EXT;
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+    // ---- VK_EXT_headless_surface extension commands
+    if (!strcmp(name, "CreateHeadlessSurfaceEXT")) return (void *)table->CreateHeadlessSurfaceEXT;
+
+    *found_name = false;
+    return NULL;
+}
+
+
+// ---- VK_KHR_device_group extension trampoline/terminators
+
+VKAPI_ATTR void VKAPI_CALL GetDeviceGroupPeerMemoryFeaturesKHR(
+    VkDevice                                    device,
+    uint32_t                                    heapIndex,
+    uint32_t                                    localDeviceIndex,
+    uint32_t                                    remoteDeviceIndex,
+    VkPeerMemoryFeatureFlags*                   pPeerMemoryFeatures) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(device);
+    disp->GetDeviceGroupPeerMemoryFeaturesKHR(device, heapIndex, localDeviceIndex, remoteDeviceIndex, pPeerMemoryFeatures);
+}
+
+VKAPI_ATTR void VKAPI_CALL CmdSetDeviceMaskKHR(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    deviceMask) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(commandBuffer);
+    disp->CmdSetDeviceMaskKHR(commandBuffer, deviceMask);
+}
+
+VKAPI_ATTR void VKAPI_CALL CmdDispatchBaseKHR(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    baseGroupX,
+    uint32_t                                    baseGroupY,
+    uint32_t                                    baseGroupZ,
+    uint32_t                                    groupCountX,
+    uint32_t                                    groupCountY,
+    uint32_t                                    groupCountZ) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(commandBuffer);
+    disp->CmdDispatchBaseKHR(commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ);
+}
+
+
+// ---- VK_KHR_maintenance1 extension trampoline/terminators
+
+VKAPI_ATTR void VKAPI_CALL TrimCommandPoolKHR(
+    VkDevice                                    device,
+    VkCommandPool                               commandPool,
+    VkCommandPoolTrimFlags                      flags) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(device);
+    disp->TrimCommandPoolKHR(device, commandPool, flags);
+}
+
+
+// ---- VK_KHR_external_memory_win32 extension trampoline/terminators
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+VKAPI_ATTR VkResult VKAPI_CALL GetMemoryWin32HandleKHR(
+    VkDevice                                    device,
+    const VkMemoryGetWin32HandleInfoKHR*        pGetWin32HandleInfo,
+    HANDLE*                                     pHandle) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(device);
+    return disp->GetMemoryWin32HandleKHR(device, pGetWin32HandleInfo, pHandle);
+}
+
+#endif // VK_USE_PLATFORM_WIN32_KHR
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+VKAPI_ATTR VkResult VKAPI_CALL GetMemoryWin32HandlePropertiesKHR(
+    VkDevice                                    device,
+    VkExternalMemoryHandleTypeFlagBits          handleType,
+    HANDLE                                      handle,
+    VkMemoryWin32HandlePropertiesKHR*           pMemoryWin32HandleProperties) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(device);
+    return disp->GetMemoryWin32HandlePropertiesKHR(device, handleType, handle, pMemoryWin32HandleProperties);
+}
+
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+// ---- VK_KHR_external_memory_fd extension trampoline/terminators
+
+VKAPI_ATTR VkResult VKAPI_CALL GetMemoryFdKHR(
+    VkDevice                                    device,
+    const VkMemoryGetFdInfoKHR*                 pGetFdInfo,
+    int*                                        pFd) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(device);
+    return disp->GetMemoryFdKHR(device, pGetFdInfo, pFd);
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL GetMemoryFdPropertiesKHR(
+    VkDevice                                    device,
+    VkExternalMemoryHandleTypeFlagBits          handleType,
+    int                                         fd,
+    VkMemoryFdPropertiesKHR*                    pMemoryFdProperties) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(device);
+    return disp->GetMemoryFdPropertiesKHR(device, handleType, fd, pMemoryFdProperties);
+}
+
+
+// ---- VK_KHR_external_semaphore_win32 extension trampoline/terminators
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+VKAPI_ATTR VkResult VKAPI_CALL ImportSemaphoreWin32HandleKHR(
+    VkDevice                                    device,
+    const VkImportSemaphoreWin32HandleInfoKHR*  pImportSemaphoreWin32HandleInfo) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(device);
+    return disp->ImportSemaphoreWin32HandleKHR(device, pImportSemaphoreWin32HandleInfo);
+}
+
+#endif // VK_USE_PLATFORM_WIN32_KHR
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+VKAPI_ATTR VkResult VKAPI_CALL GetSemaphoreWin32HandleKHR(
+    VkDevice                                    device,
+    const VkSemaphoreGetWin32HandleInfoKHR*     pGetWin32HandleInfo,
+    HANDLE*                                     pHandle) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(device);
+    return disp->GetSemaphoreWin32HandleKHR(device, pGetWin32HandleInfo, pHandle);
+}
+
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+// ---- VK_KHR_external_semaphore_fd extension trampoline/terminators
+
+VKAPI_ATTR VkResult VKAPI_CALL ImportSemaphoreFdKHR(
+    VkDevice                                    device,
+    const VkImportSemaphoreFdInfoKHR*           pImportSemaphoreFdInfo) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(device);
+    return disp->ImportSemaphoreFdKHR(device, pImportSemaphoreFdInfo);
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL GetSemaphoreFdKHR(
+    VkDevice                                    device,
+    const VkSemaphoreGetFdInfoKHR*              pGetFdInfo,
+    int*                                        pFd) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(device);
+    return disp->GetSemaphoreFdKHR(device, pGetFdInfo, pFd);
+}
+
+
+// ---- VK_KHR_push_descriptor extension trampoline/terminators
+
+VKAPI_ATTR void VKAPI_CALL CmdPushDescriptorSetKHR(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineBindPoint                         pipelineBindPoint,
+    VkPipelineLayout                            layout,
+    uint32_t                                    set,
+    uint32_t                                    descriptorWriteCount,
+    const VkWriteDescriptorSet*                 pDescriptorWrites) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(commandBuffer);
+    disp->CmdPushDescriptorSetKHR(commandBuffer, pipelineBindPoint, layout, set, descriptorWriteCount, pDescriptorWrites);
+}
+
+VKAPI_ATTR void VKAPI_CALL CmdPushDescriptorSetWithTemplateKHR(
+    VkCommandBuffer                             commandBuffer,
+    VkDescriptorUpdateTemplate                  descriptorUpdateTemplate,
+    VkPipelineLayout                            layout,
+    uint32_t                                    set,
+    const void*                                 pData) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(commandBuffer);
+    disp->CmdPushDescriptorSetWithTemplateKHR(commandBuffer, descriptorUpdateTemplate, layout, set, pData);
+}
+
+
+// ---- VK_KHR_descriptor_update_template extension trampoline/terminators
+
+VKAPI_ATTR VkResult VKAPI_CALL CreateDescriptorUpdateTemplateKHR(
+    VkDevice                                    device,
+    const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDescriptorUpdateTemplate*                 pDescriptorUpdateTemplate) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(device);
+    return disp->CreateDescriptorUpdateTemplateKHR(device, pCreateInfo, pAllocator, pDescriptorUpdateTemplate);
+}
+
+VKAPI_ATTR void VKAPI_CALL DestroyDescriptorUpdateTemplateKHR(
+    VkDevice                                    device,
+    VkDescriptorUpdateTemplate                  descriptorUpdateTemplate,
+    const VkAllocationCallbacks*                pAllocator) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(device);
+    disp->DestroyDescriptorUpdateTemplateKHR(device, descriptorUpdateTemplate, pAllocator);
+}
+
+VKAPI_ATTR void VKAPI_CALL UpdateDescriptorSetWithTemplateKHR(
+    VkDevice                                    device,
+    VkDescriptorSet                             descriptorSet,
+    VkDescriptorUpdateTemplate                  descriptorUpdateTemplate,
+    const void*                                 pData) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(device);
+    disp->UpdateDescriptorSetWithTemplateKHR(device, descriptorSet, descriptorUpdateTemplate, pData);
+}
+
+
+// ---- VK_KHR_create_renderpass2 extension trampoline/terminators
+
+VKAPI_ATTR VkResult VKAPI_CALL CreateRenderPass2KHR(
+    VkDevice                                    device,
+    const VkRenderPassCreateInfo2KHR*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkRenderPass*                               pRenderPass) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(device);
+    return disp->CreateRenderPass2KHR(device, pCreateInfo, pAllocator, pRenderPass);
+}
+
+VKAPI_ATTR void VKAPI_CALL CmdBeginRenderPass2KHR(
+    VkCommandBuffer                             commandBuffer,
+    const VkRenderPassBeginInfo*                pRenderPassBegin,
+    const VkSubpassBeginInfoKHR*                pSubpassBeginInfo) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(commandBuffer);
+    disp->CmdBeginRenderPass2KHR(commandBuffer, pRenderPassBegin, pSubpassBeginInfo);
+}
+
+VKAPI_ATTR void VKAPI_CALL CmdNextSubpass2KHR(
+    VkCommandBuffer                             commandBuffer,
+    const VkSubpassBeginInfoKHR*                pSubpassBeginInfo,
+    const VkSubpassEndInfoKHR*                  pSubpassEndInfo) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(commandBuffer);
+    disp->CmdNextSubpass2KHR(commandBuffer, pSubpassBeginInfo, pSubpassEndInfo);
+}
+
+VKAPI_ATTR void VKAPI_CALL CmdEndRenderPass2KHR(
+    VkCommandBuffer                             commandBuffer,
+    const VkSubpassEndInfoKHR*                  pSubpassEndInfo) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(commandBuffer);
+    disp->CmdEndRenderPass2KHR(commandBuffer, pSubpassEndInfo);
+}
+
+
+// ---- VK_KHR_shared_presentable_image extension trampoline/terminators
+
+VKAPI_ATTR VkResult VKAPI_CALL GetSwapchainStatusKHR(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(device);
+    return disp->GetSwapchainStatusKHR(device, swapchain);
+}
+
+
+// ---- VK_KHR_external_fence_win32 extension trampoline/terminators
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+VKAPI_ATTR VkResult VKAPI_CALL ImportFenceWin32HandleKHR(
+    VkDevice                                    device,
+    const VkImportFenceWin32HandleInfoKHR*      pImportFenceWin32HandleInfo) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(device);
+    return disp->ImportFenceWin32HandleKHR(device, pImportFenceWin32HandleInfo);
+}
+
+#endif // VK_USE_PLATFORM_WIN32_KHR
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+VKAPI_ATTR VkResult VKAPI_CALL GetFenceWin32HandleKHR(
+    VkDevice                                    device,
+    const VkFenceGetWin32HandleInfoKHR*         pGetWin32HandleInfo,
+    HANDLE*                                     pHandle) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(device);
+    return disp->GetFenceWin32HandleKHR(device, pGetWin32HandleInfo, pHandle);
+}
+
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+// ---- VK_KHR_external_fence_fd extension trampoline/terminators
+
+VKAPI_ATTR VkResult VKAPI_CALL ImportFenceFdKHR(
+    VkDevice                                    device,
+    const VkImportFenceFdInfoKHR*               pImportFenceFdInfo) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(device);
+    return disp->ImportFenceFdKHR(device, pImportFenceFdInfo);
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL GetFenceFdKHR(
+    VkDevice                                    device,
+    const VkFenceGetFdInfoKHR*                  pGetFdInfo,
+    int*                                        pFd) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(device);
+    return disp->GetFenceFdKHR(device, pGetFdInfo, pFd);
+}
+
+
+// ---- VK_KHR_performance_query extension trampoline/terminators
+
+VKAPI_ATTR VkResult VKAPI_CALL EnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t                                    queueFamilyIndex,
+    uint32_t*                                   pCounterCount,
+    VkPerformanceCounterKHR*                    pCounters,
+    VkPerformanceCounterDescriptionKHR*         pCounterDescriptions) {
+    const VkLayerInstanceDispatchTable *disp;
+    VkPhysicalDevice unwrapped_phys_dev = loader_unwrap_physical_device(physicalDevice);
+    disp = loader_get_instance_layer_dispatch(physicalDevice);
+    return disp->EnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(unwrapped_phys_dev, queueFamilyIndex, pCounterCount, pCounters, pCounterDescriptions);
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL terminator_EnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t                                    queueFamilyIndex,
+    uint32_t*                                   pCounterCount,
+    VkPerformanceCounterKHR*                    pCounters,
+    VkPerformanceCounterDescriptionKHR*         pCounterDescriptions) {
+    struct loader_physical_device_term *phys_dev_term = (struct loader_physical_device_term *)physicalDevice;
+    struct loader_icd_term *icd_term = phys_dev_term->this_icd_term;
+    if (NULL == icd_term->dispatch.EnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR) {
+        loader_log(icd_term->this_instance, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                   "ICD associated with VkPhysicalDevice does not support EnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR");
+    }
+    return icd_term->dispatch.EnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(phys_dev_term->phys_dev, queueFamilyIndex, pCounterCount, pCounters, pCounterDescriptions);
+}
+
+VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkQueryPoolPerformanceCreateInfoKHR*  pPerformanceQueryCreateInfo,
+    uint32_t*                                   pNumPasses) {
+    const VkLayerInstanceDispatchTable *disp;
+    VkPhysicalDevice unwrapped_phys_dev = loader_unwrap_physical_device(physicalDevice);
+    disp = loader_get_instance_layer_dispatch(physicalDevice);
+    disp->GetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR(unwrapped_phys_dev, pPerformanceQueryCreateInfo, pNumPasses);
+}
+
+VKAPI_ATTR void VKAPI_CALL terminator_GetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkQueryPoolPerformanceCreateInfoKHR*  pPerformanceQueryCreateInfo,
+    uint32_t*                                   pNumPasses) {
+    struct loader_physical_device_term *phys_dev_term = (struct loader_physical_device_term *)physicalDevice;
+    struct loader_icd_term *icd_term = phys_dev_term->this_icd_term;
+    if (NULL == icd_term->dispatch.GetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR) {
+        loader_log(icd_term->this_instance, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                   "ICD associated with VkPhysicalDevice does not support GetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR");
+    }
+    icd_term->dispatch.GetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR(phys_dev_term->phys_dev, pPerformanceQueryCreateInfo, pNumPasses);
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL AcquireProfilingLockKHR(
+    VkDevice                                    device,
+    const VkAcquireProfilingLockInfoKHR*        pInfo) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(device);
+    return disp->AcquireProfilingLockKHR(device, pInfo);
+}
+
+VKAPI_ATTR void VKAPI_CALL ReleaseProfilingLockKHR(
+    VkDevice                                    device) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(device);
+    disp->ReleaseProfilingLockKHR(device);
+}
+
+
+// ---- VK_KHR_get_memory_requirements2 extension trampoline/terminators
+
+VKAPI_ATTR void VKAPI_CALL GetImageMemoryRequirements2KHR(
+    VkDevice                                    device,
+    const VkImageMemoryRequirementsInfo2*       pInfo,
+    VkMemoryRequirements2*                      pMemoryRequirements) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(device);
+    disp->GetImageMemoryRequirements2KHR(device, pInfo, pMemoryRequirements);
+}
+
+VKAPI_ATTR void VKAPI_CALL GetBufferMemoryRequirements2KHR(
+    VkDevice                                    device,
+    const VkBufferMemoryRequirementsInfo2*      pInfo,
+    VkMemoryRequirements2*                      pMemoryRequirements) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(device);
+    disp->GetBufferMemoryRequirements2KHR(device, pInfo, pMemoryRequirements);
+}
+
+VKAPI_ATTR void VKAPI_CALL GetImageSparseMemoryRequirements2KHR(
+    VkDevice                                    device,
+    const VkImageSparseMemoryRequirementsInfo2* pInfo,
+    uint32_t*                                   pSparseMemoryRequirementCount,
+    VkSparseImageMemoryRequirements2*           pSparseMemoryRequirements) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(device);
+    disp->GetImageSparseMemoryRequirements2KHR(device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements);
+}
+
+
+// ---- VK_KHR_sampler_ycbcr_conversion extension trampoline/terminators
+
+VKAPI_ATTR VkResult VKAPI_CALL CreateSamplerYcbcrConversionKHR(
+    VkDevice                                    device,
+    const VkSamplerYcbcrConversionCreateInfo*   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSamplerYcbcrConversion*                   pYcbcrConversion) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(device);
+    return disp->CreateSamplerYcbcrConversionKHR(device, pCreateInfo, pAllocator, pYcbcrConversion);
+}
+
+VKAPI_ATTR void VKAPI_CALL DestroySamplerYcbcrConversionKHR(
+    VkDevice                                    device,
+    VkSamplerYcbcrConversion                    ycbcrConversion,
+    const VkAllocationCallbacks*                pAllocator) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(device);
+    disp->DestroySamplerYcbcrConversionKHR(device, ycbcrConversion, pAllocator);
+}
+
+
+// ---- VK_KHR_bind_memory2 extension trampoline/terminators
+
+VKAPI_ATTR VkResult VKAPI_CALL BindBufferMemory2KHR(
+    VkDevice                                    device,
+    uint32_t                                    bindInfoCount,
+    const VkBindBufferMemoryInfo*               pBindInfos) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(device);
+    return disp->BindBufferMemory2KHR(device, bindInfoCount, pBindInfos);
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL BindImageMemory2KHR(
+    VkDevice                                    device,
+    uint32_t                                    bindInfoCount,
+    const VkBindImageMemoryInfo*                pBindInfos) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(device);
+    return disp->BindImageMemory2KHR(device, bindInfoCount, pBindInfos);
+}
+
+
+// ---- VK_KHR_maintenance3 extension trampoline/terminators
+
+VKAPI_ATTR void VKAPI_CALL GetDescriptorSetLayoutSupportKHR(
+    VkDevice                                    device,
+    const VkDescriptorSetLayoutCreateInfo*      pCreateInfo,
+    VkDescriptorSetLayoutSupport*               pSupport) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(device);
+    disp->GetDescriptorSetLayoutSupportKHR(device, pCreateInfo, pSupport);
+}
+
+
+// ---- VK_KHR_draw_indirect_count extension trampoline/terminators
+
+VKAPI_ATTR void VKAPI_CALL CmdDrawIndirectCountKHR(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkBuffer                                    countBuffer,
+    VkDeviceSize                                countBufferOffset,
+    uint32_t                                    maxDrawCount,
+    uint32_t                                    stride) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(commandBuffer);
+    disp->CmdDrawIndirectCountKHR(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
+}
+
+VKAPI_ATTR void VKAPI_CALL CmdDrawIndexedIndirectCountKHR(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkBuffer                                    countBuffer,
+    VkDeviceSize                                countBufferOffset,
+    uint32_t                                    maxDrawCount,
+    uint32_t                                    stride) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(commandBuffer);
+    disp->CmdDrawIndexedIndirectCountKHR(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
+}
+
+
+// ---- VK_KHR_timeline_semaphore extension trampoline/terminators
+
+VKAPI_ATTR VkResult VKAPI_CALL GetSemaphoreCounterValueKHR(
+    VkDevice                                    device,
+    VkSemaphore                                 semaphore,
+    uint64_t*                                   pValue) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(device);
+    return disp->GetSemaphoreCounterValueKHR(device, semaphore, pValue);
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL WaitSemaphoresKHR(
+    VkDevice                                    device,
+    const VkSemaphoreWaitInfoKHR*               pWaitInfo,
+    uint64_t                                    timeout) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(device);
+    return disp->WaitSemaphoresKHR(device, pWaitInfo, timeout);
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL SignalSemaphoreKHR(
+    VkDevice                                    device,
+    const VkSemaphoreSignalInfoKHR*             pSignalInfo) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(device);
+    return disp->SignalSemaphoreKHR(device, pSignalInfo);
+}
+
+
+// ---- VK_KHR_pipeline_executable_properties extension trampoline/terminators
+
+VKAPI_ATTR VkResult VKAPI_CALL GetPipelineExecutablePropertiesKHR(
+    VkDevice                                    device,
+    const VkPipelineInfoKHR*                    pPipelineInfo,
+    uint32_t*                                   pExecutableCount,
+    VkPipelineExecutablePropertiesKHR*          pProperties) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(device);
+    return disp->GetPipelineExecutablePropertiesKHR(device, pPipelineInfo, pExecutableCount, pProperties);
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL GetPipelineExecutableStatisticsKHR(
+    VkDevice                                    device,
+    const VkPipelineExecutableInfoKHR*          pExecutableInfo,
+    uint32_t*                                   pStatisticCount,
+    VkPipelineExecutableStatisticKHR*           pStatistics) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(device);
+    return disp->GetPipelineExecutableStatisticsKHR(device, pExecutableInfo, pStatisticCount, pStatistics);
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL GetPipelineExecutableInternalRepresentationsKHR(
+    VkDevice                                    device,
+    const VkPipelineExecutableInfoKHR*          pExecutableInfo,
+    uint32_t*                                   pInternalRepresentationCount,
+    VkPipelineExecutableInternalRepresentationKHR* pInternalRepresentations) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(device);
+    return disp->GetPipelineExecutableInternalRepresentationsKHR(device, pExecutableInfo, pInternalRepresentationCount, pInternalRepresentations);
+}
+
+
+// ---- VK_EXT_debug_marker extension trampoline/terminators
+
+VKAPI_ATTR VkResult VKAPI_CALL DebugMarkerSetObjectTagEXT(
+    VkDevice                                    device,
+    const VkDebugMarkerObjectTagInfoEXT*        pTagInfo) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(device);
+    VkDebugMarkerObjectTagInfoEXT local_tag_info;
+    memcpy(&local_tag_info, pTagInfo, sizeof(VkDebugMarkerObjectTagInfoEXT));
+    // If this is a physical device, we have to replace it with the proper one for the next call.
+    if (pTagInfo->objectType == VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT) {
+        struct loader_physical_device_tramp *phys_dev_tramp = (struct loader_physical_device_tramp *)(uintptr_t)pTagInfo->object;
+        local_tag_info.object = (uint64_t)(uintptr_t)phys_dev_tramp->phys_dev;
+    }
+    return disp->DebugMarkerSetObjectTagEXT(device, &local_tag_info);
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL terminator_DebugMarkerSetObjectTagEXT(
+    VkDevice                                    device,
+    const VkDebugMarkerObjectTagInfoEXT*        pTagInfo) {
+    uint32_t icd_index = 0;
+    struct loader_device *dev;
+    struct loader_icd_term *icd_term = loader_get_icd_and_device(device, &dev, &icd_index);
+    if (NULL != icd_term && NULL != icd_term->dispatch.DebugMarkerSetObjectTagEXT) {
+        VkDebugMarkerObjectTagInfoEXT local_tag_info;
+        memcpy(&local_tag_info, pTagInfo, sizeof(VkDebugMarkerObjectTagInfoEXT));
+        // If this is a physical device, we have to replace it with the proper one for the next call.
+        if (pTagInfo->objectType == VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT) {
+            struct loader_physical_device_term *phys_dev_term = (struct loader_physical_device_term *)(uintptr_t)pTagInfo->object;
+            local_tag_info.object = (uint64_t)(uintptr_t)phys_dev_term->phys_dev;
+        // If this is a KHR_surface, and the ICD has created its own, we have to replace it with the proper one for the next call.
+        } else if (pTagInfo->objectType == VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT) {
+            if (NULL != icd_term && NULL != icd_term->dispatch.CreateSwapchainKHR) {
+                VkIcdSurface *icd_surface = (VkIcdSurface *)(uintptr_t)pTagInfo->object;
+                if (NULL != icd_surface->real_icd_surfaces) {
+                    local_tag_info.object = (uint64_t)icd_surface->real_icd_surfaces[icd_index];
+                }
+            }
+        }
+        return icd_term->dispatch.DebugMarkerSetObjectTagEXT(device, &local_tag_info);
+    } else {
+        return VK_SUCCESS;
+    }
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL DebugMarkerSetObjectNameEXT(
+    VkDevice                                    device,
+    const VkDebugMarkerObjectNameInfoEXT*       pNameInfo) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(device);
+    VkDebugMarkerObjectNameInfoEXT local_name_info;
+    memcpy(&local_name_info, pNameInfo, sizeof(VkDebugMarkerObjectNameInfoEXT));
+    // If this is a physical device, we have to replace it with the proper one for the next call.
+    if (pNameInfo->objectType == VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT) {
+        struct loader_physical_device_tramp *phys_dev_tramp = (struct loader_physical_device_tramp *)(uintptr_t)pNameInfo->object;
+        local_name_info.object = (uint64_t)(uintptr_t)phys_dev_tramp->phys_dev;
+    }
+    return disp->DebugMarkerSetObjectNameEXT(device, &local_name_info);
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL terminator_DebugMarkerSetObjectNameEXT(
+    VkDevice                                    device,
+    const VkDebugMarkerObjectNameInfoEXT*       pNameInfo) {
+    uint32_t icd_index = 0;
+    struct loader_device *dev;
+    struct loader_icd_term *icd_term = loader_get_icd_and_device(device, &dev, &icd_index);
+    if (NULL != icd_term && NULL != icd_term->dispatch.DebugMarkerSetObjectNameEXT) {
+        VkDebugMarkerObjectNameInfoEXT local_name_info;
+        memcpy(&local_name_info, pNameInfo, sizeof(VkDebugMarkerObjectNameInfoEXT));
+        // If this is a physical device, we have to replace it with the proper one for the next call.
+        if (pNameInfo->objectType == VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT) {
+            struct loader_physical_device_term *phys_dev_term = (struct loader_physical_device_term *)(uintptr_t)pNameInfo->object;
+            local_name_info.object = (uint64_t)(uintptr_t)phys_dev_term->phys_dev;
+        // If this is a KHR_surface, and the ICD has created its own, we have to replace it with the proper one for the next call.
+        } else if (pNameInfo->objectType == VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT) {
+            if (NULL != icd_term && NULL != icd_term->dispatch.CreateSwapchainKHR) {
+                VkIcdSurface *icd_surface = (VkIcdSurface *)(uintptr_t)pNameInfo->object;
+                if (NULL != icd_surface->real_icd_surfaces) {
+                    local_name_info.object = (uint64_t)icd_surface->real_icd_surfaces[icd_index];
+                }
+            }
+        }
+        return icd_term->dispatch.DebugMarkerSetObjectNameEXT(device, &local_name_info);
+    } else {
+        return VK_SUCCESS;
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL CmdDebugMarkerBeginEXT(
+    VkCommandBuffer                             commandBuffer,
+    const VkDebugMarkerMarkerInfoEXT*           pMarkerInfo) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(commandBuffer);
+    disp->CmdDebugMarkerBeginEXT(commandBuffer, pMarkerInfo);
+}
+
+VKAPI_ATTR void VKAPI_CALL CmdDebugMarkerEndEXT(
+    VkCommandBuffer                             commandBuffer) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(commandBuffer);
+    disp->CmdDebugMarkerEndEXT(commandBuffer);
+}
+
+VKAPI_ATTR void VKAPI_CALL CmdDebugMarkerInsertEXT(
+    VkCommandBuffer                             commandBuffer,
+    const VkDebugMarkerMarkerInfoEXT*           pMarkerInfo) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(commandBuffer);
+    disp->CmdDebugMarkerInsertEXT(commandBuffer, pMarkerInfo);
+}
+
+
+// ---- VK_EXT_transform_feedback extension trampoline/terminators
+
+VKAPI_ATTR void VKAPI_CALL CmdBindTransformFeedbackBuffersEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstBinding,
+    uint32_t                                    bindingCount,
+    const VkBuffer*                             pBuffers,
+    const VkDeviceSize*                         pOffsets,
+    const VkDeviceSize*                         pSizes) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(commandBuffer);
+    disp->CmdBindTransformFeedbackBuffersEXT(commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets, pSizes);
+}
+
+VKAPI_ATTR void VKAPI_CALL CmdBeginTransformFeedbackEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstCounterBuffer,
+    uint32_t                                    counterBufferCount,
+    const VkBuffer*                             pCounterBuffers,
+    const VkDeviceSize*                         pCounterBufferOffsets) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(commandBuffer);
+    disp->CmdBeginTransformFeedbackEXT(commandBuffer, firstCounterBuffer, counterBufferCount, pCounterBuffers, pCounterBufferOffsets);
+}
+
+VKAPI_ATTR void VKAPI_CALL CmdEndTransformFeedbackEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstCounterBuffer,
+    uint32_t                                    counterBufferCount,
+    const VkBuffer*                             pCounterBuffers,
+    const VkDeviceSize*                         pCounterBufferOffsets) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(commandBuffer);
+    disp->CmdEndTransformFeedbackEXT(commandBuffer, firstCounterBuffer, counterBufferCount, pCounterBuffers, pCounterBufferOffsets);
+}
+
+VKAPI_ATTR void VKAPI_CALL CmdBeginQueryIndexedEXT(
+    VkCommandBuffer                             commandBuffer,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    query,
+    VkQueryControlFlags                         flags,
+    uint32_t                                    index) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(commandBuffer);
+    disp->CmdBeginQueryIndexedEXT(commandBuffer, queryPool, query, flags, index);
+}
+
+VKAPI_ATTR void VKAPI_CALL CmdEndQueryIndexedEXT(
+    VkCommandBuffer                             commandBuffer,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    query,
+    uint32_t                                    index) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(commandBuffer);
+    disp->CmdEndQueryIndexedEXT(commandBuffer, queryPool, query, index);
+}
+
+VKAPI_ATTR void VKAPI_CALL CmdDrawIndirectByteCountEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    instanceCount,
+    uint32_t                                    firstInstance,
+    VkBuffer                                    counterBuffer,
+    VkDeviceSize                                counterBufferOffset,
+    uint32_t                                    counterOffset,
+    uint32_t                                    vertexStride) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(commandBuffer);
+    disp->CmdDrawIndirectByteCountEXT(commandBuffer, instanceCount, firstInstance, counterBuffer, counterBufferOffset, counterOffset, vertexStride);
+}
+
+
+// ---- VK_NVX_image_view_handle extension trampoline/terminators
+
+VKAPI_ATTR uint32_t VKAPI_CALL GetImageViewHandleNVX(
+    VkDevice                                    device,
+    const VkImageViewHandleInfoNVX*             pInfo) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(device);
+    return disp->GetImageViewHandleNVX(device, pInfo);
+}
+
+
+// ---- VK_AMD_draw_indirect_count extension trampoline/terminators
+
+VKAPI_ATTR void VKAPI_CALL CmdDrawIndirectCountAMD(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkBuffer                                    countBuffer,
+    VkDeviceSize                                countBufferOffset,
+    uint32_t                                    maxDrawCount,
+    uint32_t                                    stride) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(commandBuffer);
+    disp->CmdDrawIndirectCountAMD(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
+}
+
+VKAPI_ATTR void VKAPI_CALL CmdDrawIndexedIndirectCountAMD(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkBuffer                                    countBuffer,
+    VkDeviceSize                                countBufferOffset,
+    uint32_t                                    maxDrawCount,
+    uint32_t                                    stride) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(commandBuffer);
+    disp->CmdDrawIndexedIndirectCountAMD(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
+}
+
+
+// ---- VK_AMD_shader_info extension trampoline/terminators
+
+VKAPI_ATTR VkResult VKAPI_CALL GetShaderInfoAMD(
+    VkDevice                                    device,
+    VkPipeline                                  pipeline,
+    VkShaderStageFlagBits                       shaderStage,
+    VkShaderInfoTypeAMD                         infoType,
+    size_t*                                     pInfoSize,
+    void*                                       pInfo) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(device);
+    return disp->GetShaderInfoAMD(device, pipeline, shaderStage, infoType, pInfoSize, pInfo);
+}
+
+
+// ---- VK_GGP_stream_descriptor_surface extension trampoline/terminators
+
+#ifdef VK_USE_PLATFORM_GGP
+VKAPI_ATTR VkResult VKAPI_CALL CreateStreamDescriptorSurfaceGGP(
+    VkInstance                                  instance,
+    const VkStreamDescriptorSurfaceCreateInfoGGP* pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface) {
+#error("Not implemented. Likely needs to be manually generated!");
+    return disp->CreateStreamDescriptorSurfaceGGP(instance, pCreateInfo, pAllocator, pSurface);
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL terminator_CreateStreamDescriptorSurfaceGGP(
+    VkInstance                                  instance,
+    const VkStreamDescriptorSurfaceCreateInfoGGP* pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface) {
+#error("Not implemented. Likely needs to be manually generated!");
+}
+
+#endif // VK_USE_PLATFORM_GGP
+
+// ---- VK_NV_external_memory_win32 extension trampoline/terminators
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+VKAPI_ATTR VkResult VKAPI_CALL GetMemoryWin32HandleNV(
+    VkDevice                                    device,
+    VkDeviceMemory                              memory,
+    VkExternalMemoryHandleTypeFlagsNV           handleType,
+    HANDLE*                                     pHandle) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(device);
+    return disp->GetMemoryWin32HandleNV(device, memory, handleType, pHandle);
+}
+
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+// ---- VK_NN_vi_surface extension trampoline/terminators
+
+#ifdef VK_USE_PLATFORM_VI_NN
+VKAPI_ATTR VkResult VKAPI_CALL CreateViSurfaceNN(
+    VkInstance                                  instance,
+    const VkViSurfaceCreateInfoNN*              pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface) {
+#error("Not implemented. Likely needs to be manually generated!");
+    return disp->CreateViSurfaceNN(instance, pCreateInfo, pAllocator, pSurface);
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL terminator_CreateViSurfaceNN(
+    VkInstance                                  instance,
+    const VkViSurfaceCreateInfoNN*              pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface) {
+#error("Not implemented. Likely needs to be manually generated!");
+}
+
+#endif // VK_USE_PLATFORM_VI_NN
+
+// ---- VK_EXT_conditional_rendering extension trampoline/terminators
+
+VKAPI_ATTR void VKAPI_CALL CmdBeginConditionalRenderingEXT(
+    VkCommandBuffer                             commandBuffer,
+    const VkConditionalRenderingBeginInfoEXT*   pConditionalRenderingBegin) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(commandBuffer);
+    disp->CmdBeginConditionalRenderingEXT(commandBuffer, pConditionalRenderingBegin);
+}
+
+VKAPI_ATTR void VKAPI_CALL CmdEndConditionalRenderingEXT(
+    VkCommandBuffer                             commandBuffer) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(commandBuffer);
+    disp->CmdEndConditionalRenderingEXT(commandBuffer);
+}
+
+
+// ---- VK_NVX_device_generated_commands extension trampoline/terminators
+
+VKAPI_ATTR void VKAPI_CALL CmdProcessCommandsNVX(
+    VkCommandBuffer                             commandBuffer,
+    const VkCmdProcessCommandsInfoNVX*          pProcessCommandsInfo) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(commandBuffer);
+    disp->CmdProcessCommandsNVX(commandBuffer, pProcessCommandsInfo);
+}
+
+VKAPI_ATTR void VKAPI_CALL CmdReserveSpaceForCommandsNVX(
+    VkCommandBuffer                             commandBuffer,
+    const VkCmdReserveSpaceForCommandsInfoNVX*  pReserveSpaceInfo) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(commandBuffer);
+    disp->CmdReserveSpaceForCommandsNVX(commandBuffer, pReserveSpaceInfo);
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL CreateIndirectCommandsLayoutNVX(
+    VkDevice                                    device,
+    const VkIndirectCommandsLayoutCreateInfoNVX* pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkIndirectCommandsLayoutNVX*                pIndirectCommandsLayout) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(device);
+    return disp->CreateIndirectCommandsLayoutNVX(device, pCreateInfo, pAllocator, pIndirectCommandsLayout);
+}
+
+VKAPI_ATTR void VKAPI_CALL DestroyIndirectCommandsLayoutNVX(
+    VkDevice                                    device,
+    VkIndirectCommandsLayoutNVX                 indirectCommandsLayout,
+    const VkAllocationCallbacks*                pAllocator) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(device);
+    disp->DestroyIndirectCommandsLayoutNVX(device, indirectCommandsLayout, pAllocator);
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL CreateObjectTableNVX(
+    VkDevice                                    device,
+    const VkObjectTableCreateInfoNVX*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkObjectTableNVX*                           pObjectTable) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(device);
+    return disp->CreateObjectTableNVX(device, pCreateInfo, pAllocator, pObjectTable);
+}
+
+VKAPI_ATTR void VKAPI_CALL DestroyObjectTableNVX(
+    VkDevice                                    device,
+    VkObjectTableNVX                            objectTable,
+    const VkAllocationCallbacks*                pAllocator) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(device);
+    disp->DestroyObjectTableNVX(device, objectTable, pAllocator);
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL RegisterObjectsNVX(
+    VkDevice                                    device,
+    VkObjectTableNVX                            objectTable,
+    uint32_t                                    objectCount,
+    const VkObjectTableEntryNVX* const*         ppObjectTableEntries,
+    const uint32_t*                             pObjectIndices) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(device);
+    return disp->RegisterObjectsNVX(device, objectTable, objectCount, ppObjectTableEntries, pObjectIndices);
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL UnregisterObjectsNVX(
+    VkDevice                                    device,
+    VkObjectTableNVX                            objectTable,
+    uint32_t                                    objectCount,
+    const VkObjectEntryTypeNVX*                 pObjectEntryTypes,
+    const uint32_t*                             pObjectIndices) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(device);
+    return disp->UnregisterObjectsNVX(device, objectTable, objectCount, pObjectEntryTypes, pObjectIndices);
+}
+
+VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceGeneratedCommandsPropertiesNVX(
+    VkPhysicalDevice                            physicalDevice,
+    VkDeviceGeneratedCommandsFeaturesNVX*       pFeatures,
+    VkDeviceGeneratedCommandsLimitsNVX*         pLimits) {
+    const VkLayerInstanceDispatchTable *disp;
+    VkPhysicalDevice unwrapped_phys_dev = loader_unwrap_physical_device(physicalDevice);
+    disp = loader_get_instance_layer_dispatch(physicalDevice);
+    disp->GetPhysicalDeviceGeneratedCommandsPropertiesNVX(unwrapped_phys_dev, pFeatures, pLimits);
+}
+
+VKAPI_ATTR void VKAPI_CALL terminator_GetPhysicalDeviceGeneratedCommandsPropertiesNVX(
+    VkPhysicalDevice                            physicalDevice,
+    VkDeviceGeneratedCommandsFeaturesNVX*       pFeatures,
+    VkDeviceGeneratedCommandsLimitsNVX*         pLimits) {
+    struct loader_physical_device_term *phys_dev_term = (struct loader_physical_device_term *)physicalDevice;
+    struct loader_icd_term *icd_term = phys_dev_term->this_icd_term;
+    if (NULL == icd_term->dispatch.GetPhysicalDeviceGeneratedCommandsPropertiesNVX) {
+        loader_log(icd_term->this_instance, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                   "ICD associated with VkPhysicalDevice does not support GetPhysicalDeviceGeneratedCommandsPropertiesNVX");
+    }
+    icd_term->dispatch.GetPhysicalDeviceGeneratedCommandsPropertiesNVX(phys_dev_term->phys_dev, pFeatures, pLimits);
+}
+
+
+// ---- VK_NV_clip_space_w_scaling extension trampoline/terminators
+
+VKAPI_ATTR void VKAPI_CALL CmdSetViewportWScalingNV(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstViewport,
+    uint32_t                                    viewportCount,
+    const VkViewportWScalingNV*                 pViewportWScalings) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(commandBuffer);
+    disp->CmdSetViewportWScalingNV(commandBuffer, firstViewport, viewportCount, pViewportWScalings);
+}
+
+
+// ---- VK_EXT_display_control extension trampoline/terminators
+
+VKAPI_ATTR VkResult VKAPI_CALL DisplayPowerControlEXT(
+    VkDevice                                    device,
+    VkDisplayKHR                                display,
+    const VkDisplayPowerInfoEXT*                pDisplayPowerInfo) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(device);
+    return disp->DisplayPowerControlEXT(device, display, pDisplayPowerInfo);
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL RegisterDeviceEventEXT(
+    VkDevice                                    device,
+    const VkDeviceEventInfoEXT*                 pDeviceEventInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkFence*                                    pFence) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(device);
+    return disp->RegisterDeviceEventEXT(device, pDeviceEventInfo, pAllocator, pFence);
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL RegisterDisplayEventEXT(
+    VkDevice                                    device,
+    VkDisplayKHR                                display,
+    const VkDisplayEventInfoEXT*                pDisplayEventInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkFence*                                    pFence) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(device);
+    return disp->RegisterDisplayEventEXT(device, display, pDisplayEventInfo, pAllocator, pFence);
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL GetSwapchainCounterEXT(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    VkSurfaceCounterFlagBitsEXT                 counter,
+    uint64_t*                                   pCounterValue) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(device);
+    return disp->GetSwapchainCounterEXT(device, swapchain, counter, pCounterValue);
+}
+
+
+// ---- VK_GOOGLE_display_timing extension trampoline/terminators
+
+VKAPI_ATTR VkResult VKAPI_CALL GetRefreshCycleDurationGOOGLE(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    VkRefreshCycleDurationGOOGLE*               pDisplayTimingProperties) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(device);
+    return disp->GetRefreshCycleDurationGOOGLE(device, swapchain, pDisplayTimingProperties);
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL GetPastPresentationTimingGOOGLE(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    uint32_t*                                   pPresentationTimingCount,
+    VkPastPresentationTimingGOOGLE*             pPresentationTimings) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(device);
+    return disp->GetPastPresentationTimingGOOGLE(device, swapchain, pPresentationTimingCount, pPresentationTimings);
+}
+
+
+// ---- VK_EXT_discard_rectangles extension trampoline/terminators
+
+VKAPI_ATTR void VKAPI_CALL CmdSetDiscardRectangleEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstDiscardRectangle,
+    uint32_t                                    discardRectangleCount,
+    const VkRect2D*                             pDiscardRectangles) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(commandBuffer);
+    disp->CmdSetDiscardRectangleEXT(commandBuffer, firstDiscardRectangle, discardRectangleCount, pDiscardRectangles);
+}
+
+
+// ---- VK_EXT_hdr_metadata extension trampoline/terminators
+
+VKAPI_ATTR void VKAPI_CALL SetHdrMetadataEXT(
+    VkDevice                                    device,
+    uint32_t                                    swapchainCount,
+    const VkSwapchainKHR*                       pSwapchains,
+    const VkHdrMetadataEXT*                     pMetadata) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(device);
+    disp->SetHdrMetadataEXT(device, swapchainCount, pSwapchains, pMetadata);
+}
+
+
+// ---- VK_EXT_debug_utils extension trampoline/terminators
+
+VKAPI_ATTR VkResult VKAPI_CALL SetDebugUtilsObjectNameEXT(
+    VkDevice                                    device,
+    const VkDebugUtilsObjectNameInfoEXT*        pNameInfo) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(device);
+    VkDebugUtilsObjectNameInfoEXT local_name_info;
+    memcpy(&local_name_info, pNameInfo, sizeof(VkDebugUtilsObjectNameInfoEXT));
+    // If this is a physical device, we have to replace it with the proper one for the next call.
+    if (pNameInfo->objectType == VK_OBJECT_TYPE_PHYSICAL_DEVICE) {
+        struct loader_physical_device_tramp *phys_dev_tramp = (struct loader_physical_device_tramp *)(uintptr_t)pNameInfo->objectHandle;
+        local_name_info.objectHandle = (uint64_t)(uintptr_t)phys_dev_tramp->phys_dev;
+    }
+    if (disp->SetDebugUtilsObjectNameEXT != NULL) {
+        return disp->SetDebugUtilsObjectNameEXT(device, &local_name_info);
+    } else {
+        return VK_SUCCESS;
+    }
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL terminator_SetDebugUtilsObjectNameEXT(
+    VkDevice                                    device,
+    const VkDebugUtilsObjectNameInfoEXT*        pNameInfo) {
+    uint32_t icd_index = 0;
+    struct loader_device *dev;
+    struct loader_icd_term *icd_term = loader_get_icd_and_device(device, &dev, &icd_index);
+    if (NULL != icd_term && NULL != icd_term->dispatch.SetDebugUtilsObjectNameEXT) {
+        VkDebugUtilsObjectNameInfoEXT local_name_info;
+        memcpy(&local_name_info, pNameInfo, sizeof(VkDebugUtilsObjectNameInfoEXT));
+        // If this is a physical device, we have to replace it with the proper one for the next call.
+        if (pNameInfo->objectType == VK_OBJECT_TYPE_PHYSICAL_DEVICE) {
+            struct loader_physical_device_term *phys_dev_term = (struct loader_physical_device_term *)(uintptr_t)pNameInfo->objectHandle;
+            local_name_info.objectHandle = (uint64_t)(uintptr_t)phys_dev_term->phys_dev;
+        // If this is a KHR_surface, and the ICD has created its own, we have to replace it with the proper one for the next call.
+        } else if (pNameInfo->objectType == VK_OBJECT_TYPE_SURFACE_KHR) {
+            if (NULL != icd_term && NULL != icd_term->dispatch.CreateSwapchainKHR) {
+                VkIcdSurface *icd_surface = (VkIcdSurface *)(uintptr_t)pNameInfo->objectHandle;
+                if (NULL != icd_surface->real_icd_surfaces) {
+                    local_name_info.objectHandle = (uint64_t)icd_surface->real_icd_surfaces[icd_index];
+                }
+            }
+        }
+        return icd_term->dispatch.SetDebugUtilsObjectNameEXT(device, &local_name_info);
+    } else {
+        return VK_SUCCESS;
+    }
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL SetDebugUtilsObjectTagEXT(
+    VkDevice                                    device,
+    const VkDebugUtilsObjectTagInfoEXT*         pTagInfo) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(device);
+    VkDebugUtilsObjectTagInfoEXT local_tag_info;
+    memcpy(&local_tag_info, pTagInfo, sizeof(VkDebugUtilsObjectTagInfoEXT));
+    // If this is a physical device, we have to replace it with the proper one for the next call.
+    if (pTagInfo->objectType == VK_OBJECT_TYPE_PHYSICAL_DEVICE) {
+        struct loader_physical_device_tramp *phys_dev_tramp = (struct loader_physical_device_tramp *)(uintptr_t)pTagInfo->objectHandle;
+        local_tag_info.objectHandle = (uint64_t)(uintptr_t)phys_dev_tramp->phys_dev;
+    }
+    if (disp->SetDebugUtilsObjectTagEXT != NULL) {
+        return disp->SetDebugUtilsObjectTagEXT(device, &local_tag_info);
+    } else {
+        return VK_SUCCESS;
+    }
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL terminator_SetDebugUtilsObjectTagEXT(
+    VkDevice                                    device,
+    const VkDebugUtilsObjectTagInfoEXT*         pTagInfo) {
+    uint32_t icd_index = 0;
+    struct loader_device *dev;
+    struct loader_icd_term *icd_term = loader_get_icd_and_device(device, &dev, &icd_index);
+    if (NULL != icd_term && NULL != icd_term->dispatch.SetDebugUtilsObjectTagEXT) {
+        VkDebugUtilsObjectTagInfoEXT local_tag_info;
+        memcpy(&local_tag_info, pTagInfo, sizeof(VkDebugUtilsObjectTagInfoEXT));
+        // If this is a physical device, we have to replace it with the proper one for the next call.
+        if (pTagInfo->objectType == VK_OBJECT_TYPE_PHYSICAL_DEVICE) {
+            struct loader_physical_device_term *phys_dev_term = (struct loader_physical_device_term *)(uintptr_t)pTagInfo->objectHandle;
+            local_tag_info.objectHandle = (uint64_t)(uintptr_t)phys_dev_term->phys_dev;
+        // If this is a KHR_surface, and the ICD has created its own, we have to replace it with the proper one for the next call.
+        } else if (pTagInfo->objectType == VK_OBJECT_TYPE_SURFACE_KHR) {
+            if (NULL != icd_term && NULL != icd_term->dispatch.CreateSwapchainKHR) {
+                VkIcdSurface *icd_surface = (VkIcdSurface *)(uintptr_t)pTagInfo->objectHandle;
+                if (NULL != icd_surface->real_icd_surfaces) {
+                    local_tag_info.objectHandle = (uint64_t)icd_surface->real_icd_surfaces[icd_index];
+                }
+            }
+        }
+        return icd_term->dispatch.SetDebugUtilsObjectTagEXT(device, &local_tag_info);
+    } else {
+        return VK_SUCCESS;
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL QueueBeginDebugUtilsLabelEXT(
+    VkQueue                                     queue,
+    const VkDebugUtilsLabelEXT*                 pLabelInfo) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(queue);
+    if (disp->QueueBeginDebugUtilsLabelEXT != NULL) {
+    disp->QueueBeginDebugUtilsLabelEXT(queue, pLabelInfo);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL QueueEndDebugUtilsLabelEXT(
+    VkQueue                                     queue) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(queue);
+    if (disp->QueueEndDebugUtilsLabelEXT != NULL) {
+    disp->QueueEndDebugUtilsLabelEXT(queue);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL QueueInsertDebugUtilsLabelEXT(
+    VkQueue                                     queue,
+    const VkDebugUtilsLabelEXT*                 pLabelInfo) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(queue);
+    if (disp->QueueInsertDebugUtilsLabelEXT != NULL) {
+    disp->QueueInsertDebugUtilsLabelEXT(queue, pLabelInfo);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL CmdBeginDebugUtilsLabelEXT(
+    VkCommandBuffer                             commandBuffer,
+    const VkDebugUtilsLabelEXT*                 pLabelInfo) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(commandBuffer);
+    if (disp->CmdBeginDebugUtilsLabelEXT != NULL) {
+    disp->CmdBeginDebugUtilsLabelEXT(commandBuffer, pLabelInfo);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL CmdEndDebugUtilsLabelEXT(
+    VkCommandBuffer                             commandBuffer) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(commandBuffer);
+    if (disp->CmdEndDebugUtilsLabelEXT != NULL) {
+    disp->CmdEndDebugUtilsLabelEXT(commandBuffer);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL CmdInsertDebugUtilsLabelEXT(
+    VkCommandBuffer                             commandBuffer,
+    const VkDebugUtilsLabelEXT*                 pLabelInfo) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(commandBuffer);
+    if (disp->CmdInsertDebugUtilsLabelEXT != NULL) {
+    disp->CmdInsertDebugUtilsLabelEXT(commandBuffer, pLabelInfo);
+    }
+}
+
+
+// ---- VK_ANDROID_external_memory_android_hardware_buffer extension trampoline/terminators
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+VKAPI_ATTR VkResult VKAPI_CALL GetAndroidHardwareBufferPropertiesANDROID(
+    VkDevice                                    device,
+    const struct AHardwareBuffer*               buffer,
+    VkAndroidHardwareBufferPropertiesANDROID*   pProperties) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(device);
+    return disp->GetAndroidHardwareBufferPropertiesANDROID(device, buffer, pProperties);
+}
+
+#endif // VK_USE_PLATFORM_ANDROID_KHR
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+VKAPI_ATTR VkResult VKAPI_CALL GetMemoryAndroidHardwareBufferANDROID(
+    VkDevice                                    device,
+    const VkMemoryGetAndroidHardwareBufferInfoANDROID* pInfo,
+    struct AHardwareBuffer**                    pBuffer) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(device);
+    return disp->GetMemoryAndroidHardwareBufferANDROID(device, pInfo, pBuffer);
+}
+
+#endif // VK_USE_PLATFORM_ANDROID_KHR
+
+// ---- VK_EXT_sample_locations extension trampoline/terminators
+
+VKAPI_ATTR void VKAPI_CALL CmdSetSampleLocationsEXT(
+    VkCommandBuffer                             commandBuffer,
+    const VkSampleLocationsInfoEXT*             pSampleLocationsInfo) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(commandBuffer);
+    disp->CmdSetSampleLocationsEXT(commandBuffer, pSampleLocationsInfo);
+}
+
+VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceMultisamplePropertiesEXT(
+    VkPhysicalDevice                            physicalDevice,
+    VkSampleCountFlagBits                       samples,
+    VkMultisamplePropertiesEXT*                 pMultisampleProperties) {
+    const VkLayerInstanceDispatchTable *disp;
+    VkPhysicalDevice unwrapped_phys_dev = loader_unwrap_physical_device(physicalDevice);
+    disp = loader_get_instance_layer_dispatch(physicalDevice);
+    disp->GetPhysicalDeviceMultisamplePropertiesEXT(unwrapped_phys_dev, samples, pMultisampleProperties);
+}
+
+VKAPI_ATTR void VKAPI_CALL terminator_GetPhysicalDeviceMultisamplePropertiesEXT(
+    VkPhysicalDevice                            physicalDevice,
+    VkSampleCountFlagBits                       samples,
+    VkMultisamplePropertiesEXT*                 pMultisampleProperties) {
+    struct loader_physical_device_term *phys_dev_term = (struct loader_physical_device_term *)physicalDevice;
+    struct loader_icd_term *icd_term = phys_dev_term->this_icd_term;
+    if (NULL == icd_term->dispatch.GetPhysicalDeviceMultisamplePropertiesEXT) {
+        loader_log(icd_term->this_instance, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                   "ICD associated with VkPhysicalDevice does not support GetPhysicalDeviceMultisamplePropertiesEXT");
+    }
+    icd_term->dispatch.GetPhysicalDeviceMultisamplePropertiesEXT(phys_dev_term->phys_dev, samples, pMultisampleProperties);
+}
+
+
+// ---- VK_EXT_image_drm_format_modifier extension trampoline/terminators
+
+VKAPI_ATTR VkResult VKAPI_CALL GetImageDrmFormatModifierPropertiesEXT(
+    VkDevice                                    device,
+    VkImage                                     image,
+    VkImageDrmFormatModifierPropertiesEXT*      pProperties) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(device);
+    return disp->GetImageDrmFormatModifierPropertiesEXT(device, image, pProperties);
+}
+
+
+// ---- VK_EXT_validation_cache extension trampoline/terminators
+
+VKAPI_ATTR VkResult VKAPI_CALL CreateValidationCacheEXT(
+    VkDevice                                    device,
+    const VkValidationCacheCreateInfoEXT*       pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkValidationCacheEXT*                       pValidationCache) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(device);
+    return disp->CreateValidationCacheEXT(device, pCreateInfo, pAllocator, pValidationCache);
+}
+
+VKAPI_ATTR void VKAPI_CALL DestroyValidationCacheEXT(
+    VkDevice                                    device,
+    VkValidationCacheEXT                        validationCache,
+    const VkAllocationCallbacks*                pAllocator) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(device);
+    disp->DestroyValidationCacheEXT(device, validationCache, pAllocator);
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL MergeValidationCachesEXT(
+    VkDevice                                    device,
+    VkValidationCacheEXT                        dstCache,
+    uint32_t                                    srcCacheCount,
+    const VkValidationCacheEXT*                 pSrcCaches) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(device);
+    return disp->MergeValidationCachesEXT(device, dstCache, srcCacheCount, pSrcCaches);
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL GetValidationCacheDataEXT(
+    VkDevice                                    device,
+    VkValidationCacheEXT                        validationCache,
+    size_t*                                     pDataSize,
+    void*                                       pData) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(device);
+    return disp->GetValidationCacheDataEXT(device, validationCache, pDataSize, pData);
+}
+
+
+// ---- VK_NV_shading_rate_image extension trampoline/terminators
+
+VKAPI_ATTR void VKAPI_CALL CmdBindShadingRateImageNV(
+    VkCommandBuffer                             commandBuffer,
+    VkImageView                                 imageView,
+    VkImageLayout                               imageLayout) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(commandBuffer);
+    disp->CmdBindShadingRateImageNV(commandBuffer, imageView, imageLayout);
+}
+
+VKAPI_ATTR void VKAPI_CALL CmdSetViewportShadingRatePaletteNV(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstViewport,
+    uint32_t                                    viewportCount,
+    const VkShadingRatePaletteNV*               pShadingRatePalettes) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(commandBuffer);
+    disp->CmdSetViewportShadingRatePaletteNV(commandBuffer, firstViewport, viewportCount, pShadingRatePalettes);
+}
+
+VKAPI_ATTR void VKAPI_CALL CmdSetCoarseSampleOrderNV(
+    VkCommandBuffer                             commandBuffer,
+    VkCoarseSampleOrderTypeNV                   sampleOrderType,
+    uint32_t                                    customSampleOrderCount,
+    const VkCoarseSampleOrderCustomNV*          pCustomSampleOrders) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(commandBuffer);
+    disp->CmdSetCoarseSampleOrderNV(commandBuffer, sampleOrderType, customSampleOrderCount, pCustomSampleOrders);
+}
+
+
+// ---- VK_NV_ray_tracing extension trampoline/terminators
+
+VKAPI_ATTR VkResult VKAPI_CALL CreateAccelerationStructureNV(
+    VkDevice                                    device,
+    const VkAccelerationStructureCreateInfoNV*  pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkAccelerationStructureNV*                  pAccelerationStructure) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(device);
+    return disp->CreateAccelerationStructureNV(device, pCreateInfo, pAllocator, pAccelerationStructure);
+}
+
+VKAPI_ATTR void VKAPI_CALL DestroyAccelerationStructureNV(
+    VkDevice                                    device,
+    VkAccelerationStructureNV                   accelerationStructure,
+    const VkAllocationCallbacks*                pAllocator) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(device);
+    disp->DestroyAccelerationStructureNV(device, accelerationStructure, pAllocator);
+}
+
+VKAPI_ATTR void VKAPI_CALL GetAccelerationStructureMemoryRequirementsNV(
+    VkDevice                                    device,
+    const VkAccelerationStructureMemoryRequirementsInfoNV* pInfo,
+    VkMemoryRequirements2KHR*                   pMemoryRequirements) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(device);
+    disp->GetAccelerationStructureMemoryRequirementsNV(device, pInfo, pMemoryRequirements);
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL BindAccelerationStructureMemoryNV(
+    VkDevice                                    device,
+    uint32_t                                    bindInfoCount,
+    const VkBindAccelerationStructureMemoryInfoNV* pBindInfos) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(device);
+    return disp->BindAccelerationStructureMemoryNV(device, bindInfoCount, pBindInfos);
+}
+
+VKAPI_ATTR void VKAPI_CALL CmdBuildAccelerationStructureNV(
+    VkCommandBuffer                             commandBuffer,
+    const VkAccelerationStructureInfoNV*        pInfo,
+    VkBuffer                                    instanceData,
+    VkDeviceSize                                instanceOffset,
+    VkBool32                                    update,
+    VkAccelerationStructureNV                   dst,
+    VkAccelerationStructureNV                   src,
+    VkBuffer                                    scratch,
+    VkDeviceSize                                scratchOffset) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(commandBuffer);
+    disp->CmdBuildAccelerationStructureNV(commandBuffer, pInfo, instanceData, instanceOffset, update, dst, src, scratch, scratchOffset);
+}
+
+VKAPI_ATTR void VKAPI_CALL CmdCopyAccelerationStructureNV(
+    VkCommandBuffer                             commandBuffer,
+    VkAccelerationStructureNV                   dst,
+    VkAccelerationStructureNV                   src,
+    VkCopyAccelerationStructureModeNV           mode) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(commandBuffer);
+    disp->CmdCopyAccelerationStructureNV(commandBuffer, dst, src, mode);
+}
+
+VKAPI_ATTR void VKAPI_CALL CmdTraceRaysNV(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    raygenShaderBindingTableBuffer,
+    VkDeviceSize                                raygenShaderBindingOffset,
+    VkBuffer                                    missShaderBindingTableBuffer,
+    VkDeviceSize                                missShaderBindingOffset,
+    VkDeviceSize                                missShaderBindingStride,
+    VkBuffer                                    hitShaderBindingTableBuffer,
+    VkDeviceSize                                hitShaderBindingOffset,
+    VkDeviceSize                                hitShaderBindingStride,
+    VkBuffer                                    callableShaderBindingTableBuffer,
+    VkDeviceSize                                callableShaderBindingOffset,
+    VkDeviceSize                                callableShaderBindingStride,
+    uint32_t                                    width,
+    uint32_t                                    height,
+    uint32_t                                    depth) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(commandBuffer);
+    disp->CmdTraceRaysNV(commandBuffer, raygenShaderBindingTableBuffer, raygenShaderBindingOffset, missShaderBindingTableBuffer, missShaderBindingOffset, missShaderBindingStride, hitShaderBindingTableBuffer, hitShaderBindingOffset, hitShaderBindingStride, callableShaderBindingTableBuffer, callableShaderBindingOffset, callableShaderBindingStride, width, height, depth);
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL CreateRayTracingPipelinesNV(
+    VkDevice                                    device,
+    VkPipelineCache                             pipelineCache,
+    uint32_t                                    createInfoCount,
+    const VkRayTracingPipelineCreateInfoNV*     pCreateInfos,
+    const VkAllocationCallbacks*                pAllocator,
+    VkPipeline*                                 pPipelines) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(device);
+    return disp->CreateRayTracingPipelinesNV(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines);
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL GetRayTracingShaderGroupHandlesNV(
+    VkDevice                                    device,
+    VkPipeline                                  pipeline,
+    uint32_t                                    firstGroup,
+    uint32_t                                    groupCount,
+    size_t                                      dataSize,
+    void*                                       pData) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(device);
+    return disp->GetRayTracingShaderGroupHandlesNV(device, pipeline, firstGroup, groupCount, dataSize, pData);
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL GetAccelerationStructureHandleNV(
+    VkDevice                                    device,
+    VkAccelerationStructureNV                   accelerationStructure,
+    size_t                                      dataSize,
+    void*                                       pData) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(device);
+    return disp->GetAccelerationStructureHandleNV(device, accelerationStructure, dataSize, pData);
+}
+
+VKAPI_ATTR void VKAPI_CALL CmdWriteAccelerationStructuresPropertiesNV(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    accelerationStructureCount,
+    const VkAccelerationStructureNV*            pAccelerationStructures,
+    VkQueryType                                 queryType,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    firstQuery) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(commandBuffer);
+    disp->CmdWriteAccelerationStructuresPropertiesNV(commandBuffer, accelerationStructureCount, pAccelerationStructures, queryType, queryPool, firstQuery);
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL CompileDeferredNV(
+    VkDevice                                    device,
+    VkPipeline                                  pipeline,
+    uint32_t                                    shader) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(device);
+    return disp->CompileDeferredNV(device, pipeline, shader);
+}
+
+
+// ---- VK_EXT_external_memory_host extension trampoline/terminators
+
+VKAPI_ATTR VkResult VKAPI_CALL GetMemoryHostPointerPropertiesEXT(
+    VkDevice                                    device,
+    VkExternalMemoryHandleTypeFlagBits          handleType,
+    const void*                                 pHostPointer,
+    VkMemoryHostPointerPropertiesEXT*           pMemoryHostPointerProperties) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(device);
+    return disp->GetMemoryHostPointerPropertiesEXT(device, handleType, pHostPointer, pMemoryHostPointerProperties);
+}
+
+
+// ---- VK_AMD_buffer_marker extension trampoline/terminators
+
+VKAPI_ATTR void VKAPI_CALL CmdWriteBufferMarkerAMD(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineStageFlagBits                     pipelineStage,
+    VkBuffer                                    dstBuffer,
+    VkDeviceSize                                dstOffset,
+    uint32_t                                    marker) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(commandBuffer);
+    disp->CmdWriteBufferMarkerAMD(commandBuffer, pipelineStage, dstBuffer, dstOffset, marker);
+}
+
+
+// ---- VK_EXT_calibrated_timestamps extension trampoline/terminators
+
+VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceCalibrateableTimeDomainsEXT(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pTimeDomainCount,
+    VkTimeDomainEXT*                            pTimeDomains) {
+    const VkLayerInstanceDispatchTable *disp;
+    VkPhysicalDevice unwrapped_phys_dev = loader_unwrap_physical_device(physicalDevice);
+    disp = loader_get_instance_layer_dispatch(physicalDevice);
+    return disp->GetPhysicalDeviceCalibrateableTimeDomainsEXT(unwrapped_phys_dev, pTimeDomainCount, pTimeDomains);
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL terminator_GetPhysicalDeviceCalibrateableTimeDomainsEXT(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pTimeDomainCount,
+    VkTimeDomainEXT*                            pTimeDomains) {
+    struct loader_physical_device_term *phys_dev_term = (struct loader_physical_device_term *)physicalDevice;
+    struct loader_icd_term *icd_term = phys_dev_term->this_icd_term;
+    if (NULL == icd_term->dispatch.GetPhysicalDeviceCalibrateableTimeDomainsEXT) {
+        loader_log(icd_term->this_instance, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                   "ICD associated with VkPhysicalDevice does not support GetPhysicalDeviceCalibrateableTimeDomainsEXT");
+    }
+    return icd_term->dispatch.GetPhysicalDeviceCalibrateableTimeDomainsEXT(phys_dev_term->phys_dev, pTimeDomainCount, pTimeDomains);
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL GetCalibratedTimestampsEXT(
+    VkDevice                                    device,
+    uint32_t                                    timestampCount,
+    const VkCalibratedTimestampInfoEXT*         pTimestampInfos,
+    uint64_t*                                   pTimestamps,
+    uint64_t*                                   pMaxDeviation) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(device);
+    return disp->GetCalibratedTimestampsEXT(device, timestampCount, pTimestampInfos, pTimestamps, pMaxDeviation);
+}
+
+
+// ---- VK_NV_mesh_shader extension trampoline/terminators
+
+VKAPI_ATTR void VKAPI_CALL CmdDrawMeshTasksNV(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    taskCount,
+    uint32_t                                    firstTask) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(commandBuffer);
+    disp->CmdDrawMeshTasksNV(commandBuffer, taskCount, firstTask);
+}
+
+VKAPI_ATTR void VKAPI_CALL CmdDrawMeshTasksIndirectNV(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    uint32_t                                    drawCount,
+    uint32_t                                    stride) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(commandBuffer);
+    disp->CmdDrawMeshTasksIndirectNV(commandBuffer, buffer, offset, drawCount, stride);
+}
+
+VKAPI_ATTR void VKAPI_CALL CmdDrawMeshTasksIndirectCountNV(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkBuffer                                    countBuffer,
+    VkDeviceSize                                countBufferOffset,
+    uint32_t                                    maxDrawCount,
+    uint32_t                                    stride) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(commandBuffer);
+    disp->CmdDrawMeshTasksIndirectCountNV(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
+}
+
+
+// ---- VK_NV_scissor_exclusive extension trampoline/terminators
+
+VKAPI_ATTR void VKAPI_CALL CmdSetExclusiveScissorNV(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstExclusiveScissor,
+    uint32_t                                    exclusiveScissorCount,
+    const VkRect2D*                             pExclusiveScissors) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(commandBuffer);
+    disp->CmdSetExclusiveScissorNV(commandBuffer, firstExclusiveScissor, exclusiveScissorCount, pExclusiveScissors);
+}
+
+
+// ---- VK_NV_device_diagnostic_checkpoints extension trampoline/terminators
+
+VKAPI_ATTR void VKAPI_CALL CmdSetCheckpointNV(
+    VkCommandBuffer                             commandBuffer,
+    const void*                                 pCheckpointMarker) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(commandBuffer);
+    disp->CmdSetCheckpointNV(commandBuffer, pCheckpointMarker);
+}
+
+VKAPI_ATTR void VKAPI_CALL GetQueueCheckpointDataNV(
+    VkQueue                                     queue,
+    uint32_t*                                   pCheckpointDataCount,
+    VkCheckpointDataNV*                         pCheckpointData) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(queue);
+    disp->GetQueueCheckpointDataNV(queue, pCheckpointDataCount, pCheckpointData);
+}
+
+
+// ---- VK_INTEL_performance_query extension trampoline/terminators
+
+VKAPI_ATTR VkResult VKAPI_CALL InitializePerformanceApiINTEL(
+    VkDevice                                    device,
+    const VkInitializePerformanceApiInfoINTEL*  pInitializeInfo) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(device);
+    return disp->InitializePerformanceApiINTEL(device, pInitializeInfo);
+}
+
+VKAPI_ATTR void VKAPI_CALL UninitializePerformanceApiINTEL(
+    VkDevice                                    device) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(device);
+    disp->UninitializePerformanceApiINTEL(device);
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL CmdSetPerformanceMarkerINTEL(
+    VkCommandBuffer                             commandBuffer,
+    const VkPerformanceMarkerInfoINTEL*         pMarkerInfo) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(commandBuffer);
+    return disp->CmdSetPerformanceMarkerINTEL(commandBuffer, pMarkerInfo);
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL CmdSetPerformanceStreamMarkerINTEL(
+    VkCommandBuffer                             commandBuffer,
+    const VkPerformanceStreamMarkerInfoINTEL*   pMarkerInfo) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(commandBuffer);
+    return disp->CmdSetPerformanceStreamMarkerINTEL(commandBuffer, pMarkerInfo);
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL CmdSetPerformanceOverrideINTEL(
+    VkCommandBuffer                             commandBuffer,
+    const VkPerformanceOverrideInfoINTEL*       pOverrideInfo) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(commandBuffer);
+    return disp->CmdSetPerformanceOverrideINTEL(commandBuffer, pOverrideInfo);
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL AcquirePerformanceConfigurationINTEL(
+    VkDevice                                    device,
+    const VkPerformanceConfigurationAcquireInfoINTEL* pAcquireInfo,
+    VkPerformanceConfigurationINTEL*            pConfiguration) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(device);
+    return disp->AcquirePerformanceConfigurationINTEL(device, pAcquireInfo, pConfiguration);
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL ReleasePerformanceConfigurationINTEL(
+    VkDevice                                    device,
+    VkPerformanceConfigurationINTEL             configuration) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(device);
+    return disp->ReleasePerformanceConfigurationINTEL(device, configuration);
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL QueueSetPerformanceConfigurationINTEL(
+    VkQueue                                     queue,
+    VkPerformanceConfigurationINTEL             configuration) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(queue);
+    return disp->QueueSetPerformanceConfigurationINTEL(queue, configuration);
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL GetPerformanceParameterINTEL(
+    VkDevice                                    device,
+    VkPerformanceParameterTypeINTEL             parameter,
+    VkPerformanceValueINTEL*                    pValue) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(device);
+    return disp->GetPerformanceParameterINTEL(device, parameter, pValue);
+}
+
+
+// ---- VK_AMD_display_native_hdr extension trampoline/terminators
+
+VKAPI_ATTR void VKAPI_CALL SetLocalDimmingAMD(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapChain,
+    VkBool32                                    localDimmingEnable) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(device);
+    disp->SetLocalDimmingAMD(device, swapChain, localDimmingEnable);
+}
+
+
+// ---- VK_FUCHSIA_imagepipe_surface extension trampoline/terminators
+
+#ifdef VK_USE_PLATFORM_FUCHSIA
+VKAPI_ATTR VkResult VKAPI_CALL CreateImagePipeSurfaceFUCHSIA(
+    VkInstance                                  instance,
+    const VkImagePipeSurfaceCreateInfoFUCHSIA*  pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface) {
+#error("Not implemented. Likely needs to be manually generated!");
+    return disp->CreateImagePipeSurfaceFUCHSIA(instance, pCreateInfo, pAllocator, pSurface);
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL terminator_CreateImagePipeSurfaceFUCHSIA(
+    VkInstance                                  instance,
+    const VkImagePipeSurfaceCreateInfoFUCHSIA*  pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface) {
+#error("Not implemented. Likely needs to be manually generated!");
+}
+
+#endif // VK_USE_PLATFORM_FUCHSIA
+
+// ---- VK_EXT_buffer_device_address extension trampoline/terminators
+
+VKAPI_ATTR VkDeviceAddress VKAPI_CALL GetBufferDeviceAddressEXT(
+    VkDevice                                    device,
+    const VkBufferDeviceAddressInfoEXT*         pInfo) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(device);
+    return disp->GetBufferDeviceAddressEXT(device, pInfo);
+}
+
+
+// ---- VK_NV_cooperative_matrix extension trampoline/terminators
+
+VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceCooperativeMatrixPropertiesNV(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pPropertyCount,
+    VkCooperativeMatrixPropertiesNV*            pProperties) {
+    const VkLayerInstanceDispatchTable *disp;
+    VkPhysicalDevice unwrapped_phys_dev = loader_unwrap_physical_device(physicalDevice);
+    disp = loader_get_instance_layer_dispatch(physicalDevice);
+    return disp->GetPhysicalDeviceCooperativeMatrixPropertiesNV(unwrapped_phys_dev, pPropertyCount, pProperties);
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL terminator_GetPhysicalDeviceCooperativeMatrixPropertiesNV(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pPropertyCount,
+    VkCooperativeMatrixPropertiesNV*            pProperties) {
+    struct loader_physical_device_term *phys_dev_term = (struct loader_physical_device_term *)physicalDevice;
+    struct loader_icd_term *icd_term = phys_dev_term->this_icd_term;
+    if (NULL == icd_term->dispatch.GetPhysicalDeviceCooperativeMatrixPropertiesNV) {
+        loader_log(icd_term->this_instance, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                   "ICD associated with VkPhysicalDevice does not support GetPhysicalDeviceCooperativeMatrixPropertiesNV");
+    }
+    return icd_term->dispatch.GetPhysicalDeviceCooperativeMatrixPropertiesNV(phys_dev_term->phys_dev, pPropertyCount, pProperties);
+}
+
+
+// ---- VK_NV_coverage_reduction_mode extension trampoline/terminators
+
+VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pCombinationCount,
+    VkFramebufferMixedSamplesCombinationNV*     pCombinations) {
+    const VkLayerInstanceDispatchTable *disp;
+    VkPhysicalDevice unwrapped_phys_dev = loader_unwrap_physical_device(physicalDevice);
+    disp = loader_get_instance_layer_dispatch(physicalDevice);
+    return disp->GetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV(unwrapped_phys_dev, pCombinationCount, pCombinations);
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL terminator_GetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pCombinationCount,
+    VkFramebufferMixedSamplesCombinationNV*     pCombinations) {
+    struct loader_physical_device_term *phys_dev_term = (struct loader_physical_device_term *)physicalDevice;
+    struct loader_icd_term *icd_term = phys_dev_term->this_icd_term;
+    if (NULL == icd_term->dispatch.GetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV) {
+        loader_log(icd_term->this_instance, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                   "ICD associated with VkPhysicalDevice does not support GetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV");
+    }
+    return icd_term->dispatch.GetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV(phys_dev_term->phys_dev, pCombinationCount, pCombinations);
+}
+
+
+// ---- VK_EXT_full_screen_exclusive extension trampoline/terminators
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+VKAPI_ATTR VkResult VKAPI_CALL AcquireFullScreenExclusiveModeEXT(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(device);
+    return disp->AcquireFullScreenExclusiveModeEXT(device, swapchain);
+}
+
+#endif // VK_USE_PLATFORM_WIN32_KHR
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+VKAPI_ATTR VkResult VKAPI_CALL ReleaseFullScreenExclusiveModeEXT(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(device);
+    return disp->ReleaseFullScreenExclusiveModeEXT(device, swapchain);
+}
+
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+// ---- VK_EXT_line_rasterization extension trampoline/terminators
+
+VKAPI_ATTR void VKAPI_CALL CmdSetLineStippleEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    lineStippleFactor,
+    uint16_t                                    lineStipplePattern) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(commandBuffer);
+    disp->CmdSetLineStippleEXT(commandBuffer, lineStippleFactor, lineStipplePattern);
+}
+
+
+// ---- VK_EXT_host_query_reset extension trampoline/terminators
+
+VKAPI_ATTR void VKAPI_CALL ResetQueryPoolEXT(
+    VkDevice                                    device,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    firstQuery,
+    uint32_t                                    queryCount) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(device);
+    disp->ResetQueryPoolEXT(device, queryPool, firstQuery, queryCount);
+}
+
+// GPA helpers for extensions
+bool extension_instance_gpa(struct loader_instance *ptr_instance, const char *name, void **addr) {
+    *addr = NULL;
+
+
+    // ---- VK_KHR_get_physical_device_properties2 extension commands
+    if (!strcmp("vkGetPhysicalDeviceFeatures2KHR", name)) {
+        *addr = (ptr_instance->enabled_known_extensions.khr_get_physical_device_properties2 == 1)
+                     ? (void *)vkGetPhysicalDeviceFeatures2
+                     : NULL;
+        return true;
+    }
+    if (!strcmp("vkGetPhysicalDeviceProperties2KHR", name)) {
+        *addr = (ptr_instance->enabled_known_extensions.khr_get_physical_device_properties2 == 1)
+                     ? (void *)vkGetPhysicalDeviceProperties2
+                     : NULL;
+        return true;
+    }
+    if (!strcmp("vkGetPhysicalDeviceFormatProperties2KHR", name)) {
+        *addr = (ptr_instance->enabled_known_extensions.khr_get_physical_device_properties2 == 1)
+                     ? (void *)vkGetPhysicalDeviceFormatProperties2
+                     : NULL;
+        return true;
+    }
+    if (!strcmp("vkGetPhysicalDeviceImageFormatProperties2KHR", name)) {
+        *addr = (ptr_instance->enabled_known_extensions.khr_get_physical_device_properties2 == 1)
+                     ? (void *)vkGetPhysicalDeviceImageFormatProperties2
+                     : NULL;
+        return true;
+    }
+    if (!strcmp("vkGetPhysicalDeviceQueueFamilyProperties2KHR", name)) {
+        *addr = (ptr_instance->enabled_known_extensions.khr_get_physical_device_properties2 == 1)
+                     ? (void *)vkGetPhysicalDeviceQueueFamilyProperties2
+                     : NULL;
+        return true;
+    }
+    if (!strcmp("vkGetPhysicalDeviceMemoryProperties2KHR", name)) {
+        *addr = (ptr_instance->enabled_known_extensions.khr_get_physical_device_properties2 == 1)
+                     ? (void *)vkGetPhysicalDeviceMemoryProperties2
+                     : NULL;
+        return true;
+    }
+    if (!strcmp("vkGetPhysicalDeviceSparseImageFormatProperties2KHR", name)) {
+        *addr = (ptr_instance->enabled_known_extensions.khr_get_physical_device_properties2 == 1)
+                     ? (void *)vkGetPhysicalDeviceSparseImageFormatProperties2
+                     : NULL;
+        return true;
+    }
+
+    // ---- VK_KHR_device_group extension commands
+    if (!strcmp("vkGetDeviceGroupPeerMemoryFeaturesKHR", name)) {
+        *addr = (void *)GetDeviceGroupPeerMemoryFeaturesKHR;
+        return true;
+    }
+    if (!strcmp("vkCmdSetDeviceMaskKHR", name)) {
+        *addr = (void *)CmdSetDeviceMaskKHR;
+        return true;
+    }
+    if (!strcmp("vkCmdDispatchBaseKHR", name)) {
+        *addr = (void *)CmdDispatchBaseKHR;
+        return true;
+    }
+
+    // ---- VK_KHR_maintenance1 extension commands
+    if (!strcmp("vkTrimCommandPoolKHR", name)) {
+        *addr = (void *)TrimCommandPoolKHR;
+        return true;
+    }
+
+    // ---- VK_KHR_device_group_creation extension commands
+    if (!strcmp("vkEnumeratePhysicalDeviceGroupsKHR", name)) {
+        *addr = (ptr_instance->enabled_known_extensions.khr_device_group_creation == 1)
+                     ? (void *)vkEnumeratePhysicalDeviceGroups
+                     : NULL;
+        return true;
+    }
+
+    // ---- VK_KHR_external_memory_capabilities extension commands
+    if (!strcmp("vkGetPhysicalDeviceExternalBufferPropertiesKHR", name)) {
+        *addr = (ptr_instance->enabled_known_extensions.khr_external_memory_capabilities == 1)
+                     ? (void *)vkGetPhysicalDeviceExternalBufferProperties
+                     : NULL;
+        return true;
+    }
+
+    // ---- VK_KHR_external_memory_win32 extension commands
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    if (!strcmp("vkGetMemoryWin32HandleKHR", name)) {
+        *addr = (void *)GetMemoryWin32HandleKHR;
+        return true;
+    }
+#endif // VK_USE_PLATFORM_WIN32_KHR
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    if (!strcmp("vkGetMemoryWin32HandlePropertiesKHR", name)) {
+        *addr = (void *)GetMemoryWin32HandlePropertiesKHR;
+        return true;
+    }
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+    // ---- VK_KHR_external_memory_fd extension commands
+    if (!strcmp("vkGetMemoryFdKHR", name)) {
+        *addr = (void *)GetMemoryFdKHR;
+        return true;
+    }
+    if (!strcmp("vkGetMemoryFdPropertiesKHR", name)) {
+        *addr = (void *)GetMemoryFdPropertiesKHR;
+        return true;
+    }
+
+    // ---- VK_KHR_external_semaphore_capabilities extension commands
+    if (!strcmp("vkGetPhysicalDeviceExternalSemaphorePropertiesKHR", name)) {
+        *addr = (ptr_instance->enabled_known_extensions.khr_external_semaphore_capabilities == 1)
+                     ? (void *)vkGetPhysicalDeviceExternalSemaphoreProperties
+                     : NULL;
+        return true;
+    }
+
+    // ---- VK_KHR_external_semaphore_win32 extension commands
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    if (!strcmp("vkImportSemaphoreWin32HandleKHR", name)) {
+        *addr = (void *)ImportSemaphoreWin32HandleKHR;
+        return true;
+    }
+#endif // VK_USE_PLATFORM_WIN32_KHR
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    if (!strcmp("vkGetSemaphoreWin32HandleKHR", name)) {
+        *addr = (void *)GetSemaphoreWin32HandleKHR;
+        return true;
+    }
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+    // ---- VK_KHR_external_semaphore_fd extension commands
+    if (!strcmp("vkImportSemaphoreFdKHR", name)) {
+        *addr = (void *)ImportSemaphoreFdKHR;
+        return true;
+    }
+    if (!strcmp("vkGetSemaphoreFdKHR", name)) {
+        *addr = (void *)GetSemaphoreFdKHR;
+        return true;
+    }
+
+    // ---- VK_KHR_push_descriptor extension commands
+    if (!strcmp("vkCmdPushDescriptorSetKHR", name)) {
+        *addr = (void *)CmdPushDescriptorSetKHR;
+        return true;
+    }
+    if (!strcmp("vkCmdPushDescriptorSetWithTemplateKHR", name)) {
+        *addr = (void *)CmdPushDescriptorSetWithTemplateKHR;
+        return true;
+    }
+
+    // ---- VK_KHR_descriptor_update_template extension commands
+    if (!strcmp("vkCreateDescriptorUpdateTemplateKHR", name)) {
+        *addr = (void *)CreateDescriptorUpdateTemplateKHR;
+        return true;
+    }
+    if (!strcmp("vkDestroyDescriptorUpdateTemplateKHR", name)) {
+        *addr = (void *)DestroyDescriptorUpdateTemplateKHR;
+        return true;
+    }
+    if (!strcmp("vkUpdateDescriptorSetWithTemplateKHR", name)) {
+        *addr = (void *)UpdateDescriptorSetWithTemplateKHR;
+        return true;
+    }
+
+    // ---- VK_KHR_create_renderpass2 extension commands
+    if (!strcmp("vkCreateRenderPass2KHR", name)) {
+        *addr = (void *)CreateRenderPass2KHR;
+        return true;
+    }
+    if (!strcmp("vkCmdBeginRenderPass2KHR", name)) {
+        *addr = (void *)CmdBeginRenderPass2KHR;
+        return true;
+    }
+    if (!strcmp("vkCmdNextSubpass2KHR", name)) {
+        *addr = (void *)CmdNextSubpass2KHR;
+        return true;
+    }
+    if (!strcmp("vkCmdEndRenderPass2KHR", name)) {
+        *addr = (void *)CmdEndRenderPass2KHR;
+        return true;
+    }
+
+    // ---- VK_KHR_shared_presentable_image extension commands
+    if (!strcmp("vkGetSwapchainStatusKHR", name)) {
+        *addr = (void *)GetSwapchainStatusKHR;
+        return true;
+    }
+
+    // ---- VK_KHR_external_fence_capabilities extension commands
+    if (!strcmp("vkGetPhysicalDeviceExternalFencePropertiesKHR", name)) {
+        *addr = (ptr_instance->enabled_known_extensions.khr_external_fence_capabilities == 1)
+                     ? (void *)vkGetPhysicalDeviceExternalFenceProperties
+                     : NULL;
+        return true;
+    }
+
+    // ---- VK_KHR_external_fence_win32 extension commands
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    if (!strcmp("vkImportFenceWin32HandleKHR", name)) {
+        *addr = (void *)ImportFenceWin32HandleKHR;
+        return true;
+    }
+#endif // VK_USE_PLATFORM_WIN32_KHR
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    if (!strcmp("vkGetFenceWin32HandleKHR", name)) {
+        *addr = (void *)GetFenceWin32HandleKHR;
+        return true;
+    }
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+    // ---- VK_KHR_external_fence_fd extension commands
+    if (!strcmp("vkImportFenceFdKHR", name)) {
+        *addr = (void *)ImportFenceFdKHR;
+        return true;
+    }
+    if (!strcmp("vkGetFenceFdKHR", name)) {
+        *addr = (void *)GetFenceFdKHR;
+        return true;
+    }
+
+    // ---- VK_KHR_performance_query extension commands
+    if (!strcmp("vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR", name)) {
+        *addr = (void *)EnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR;
+        return true;
+    }
+    if (!strcmp("vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR", name)) {
+        *addr = (void *)GetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR;
+        return true;
+    }
+    if (!strcmp("vkAcquireProfilingLockKHR", name)) {
+        *addr = (void *)AcquireProfilingLockKHR;
+        return true;
+    }
+    if (!strcmp("vkReleaseProfilingLockKHR", name)) {
+        *addr = (void *)ReleaseProfilingLockKHR;
+        return true;
+    }
+
+    // ---- VK_KHR_get_surface_capabilities2 extension commands
+    if (!strcmp("vkGetPhysicalDeviceSurfaceCapabilities2KHR", name)) {
+        *addr = (ptr_instance->enabled_known_extensions.khr_get_surface_capabilities2 == 1)
+                     ? (void *)GetPhysicalDeviceSurfaceCapabilities2KHR
+                     : NULL;
+        return true;
+    }
+    if (!strcmp("vkGetPhysicalDeviceSurfaceFormats2KHR", name)) {
+        *addr = (ptr_instance->enabled_known_extensions.khr_get_surface_capabilities2 == 1)
+                     ? (void *)GetPhysicalDeviceSurfaceFormats2KHR
+                     : NULL;
+        return true;
+    }
+
+    // ---- VK_KHR_get_memory_requirements2 extension commands
+    if (!strcmp("vkGetImageMemoryRequirements2KHR", name)) {
+        *addr = (void *)GetImageMemoryRequirements2KHR;
+        return true;
+    }
+    if (!strcmp("vkGetBufferMemoryRequirements2KHR", name)) {
+        *addr = (void *)GetBufferMemoryRequirements2KHR;
+        return true;
+    }
+    if (!strcmp("vkGetImageSparseMemoryRequirements2KHR", name)) {
+        *addr = (void *)GetImageSparseMemoryRequirements2KHR;
+        return true;
+    }
+
+    // ---- VK_KHR_sampler_ycbcr_conversion extension commands
+    if (!strcmp("vkCreateSamplerYcbcrConversionKHR", name)) {
+        *addr = (void *)CreateSamplerYcbcrConversionKHR;
+        return true;
+    }
+    if (!strcmp("vkDestroySamplerYcbcrConversionKHR", name)) {
+        *addr = (void *)DestroySamplerYcbcrConversionKHR;
+        return true;
+    }
+
+    // ---- VK_KHR_bind_memory2 extension commands
+    if (!strcmp("vkBindBufferMemory2KHR", name)) {
+        *addr = (void *)BindBufferMemory2KHR;
+        return true;
+    }
+    if (!strcmp("vkBindImageMemory2KHR", name)) {
+        *addr = (void *)BindImageMemory2KHR;
+        return true;
+    }
+
+    // ---- VK_KHR_maintenance3 extension commands
+    if (!strcmp("vkGetDescriptorSetLayoutSupportKHR", name)) {
+        *addr = (void *)GetDescriptorSetLayoutSupportKHR;
+        return true;
+    }
+
+    // ---- VK_KHR_draw_indirect_count extension commands
+    if (!strcmp("vkCmdDrawIndirectCountKHR", name)) {
+        *addr = (void *)CmdDrawIndirectCountKHR;
+        return true;
+    }
+    if (!strcmp("vkCmdDrawIndexedIndirectCountKHR", name)) {
+        *addr = (void *)CmdDrawIndexedIndirectCountKHR;
+        return true;
+    }
+
+    // ---- VK_KHR_timeline_semaphore extension commands
+    if (!strcmp("vkGetSemaphoreCounterValueKHR", name)) {
+        *addr = (void *)GetSemaphoreCounterValueKHR;
+        return true;
+    }
+    if (!strcmp("vkWaitSemaphoresKHR", name)) {
+        *addr = (void *)WaitSemaphoresKHR;
+        return true;
+    }
+    if (!strcmp("vkSignalSemaphoreKHR", name)) {
+        *addr = (void *)SignalSemaphoreKHR;
+        return true;
+    }
+
+    // ---- VK_KHR_pipeline_executable_properties extension commands
+    if (!strcmp("vkGetPipelineExecutablePropertiesKHR", name)) {
+        *addr = (void *)GetPipelineExecutablePropertiesKHR;
+        return true;
+    }
+    if (!strcmp("vkGetPipelineExecutableStatisticsKHR", name)) {
+        *addr = (void *)GetPipelineExecutableStatisticsKHR;
+        return true;
+    }
+    if (!strcmp("vkGetPipelineExecutableInternalRepresentationsKHR", name)) {
+        *addr = (void *)GetPipelineExecutableInternalRepresentationsKHR;
+        return true;
+    }
+
+    // ---- VK_EXT_debug_marker extension commands
+    if (!strcmp("vkDebugMarkerSetObjectTagEXT", name)) {
+        *addr = (void *)DebugMarkerSetObjectTagEXT;
+        return true;
+    }
+    if (!strcmp("vkDebugMarkerSetObjectNameEXT", name)) {
+        *addr = (void *)DebugMarkerSetObjectNameEXT;
+        return true;
+    }
+    if (!strcmp("vkCmdDebugMarkerBeginEXT", name)) {
+        *addr = (void *)CmdDebugMarkerBeginEXT;
+        return true;
+    }
+    if (!strcmp("vkCmdDebugMarkerEndEXT", name)) {
+        *addr = (void *)CmdDebugMarkerEndEXT;
+        return true;
+    }
+    if (!strcmp("vkCmdDebugMarkerInsertEXT", name)) {
+        *addr = (void *)CmdDebugMarkerInsertEXT;
+        return true;
+    }
+
+    // ---- VK_EXT_transform_feedback extension commands
+    if (!strcmp("vkCmdBindTransformFeedbackBuffersEXT", name)) {
+        *addr = (void *)CmdBindTransformFeedbackBuffersEXT;
+        return true;
+    }
+    if (!strcmp("vkCmdBeginTransformFeedbackEXT", name)) {
+        *addr = (void *)CmdBeginTransformFeedbackEXT;
+        return true;
+    }
+    if (!strcmp("vkCmdEndTransformFeedbackEXT", name)) {
+        *addr = (void *)CmdEndTransformFeedbackEXT;
+        return true;
+    }
+    if (!strcmp("vkCmdBeginQueryIndexedEXT", name)) {
+        *addr = (void *)CmdBeginQueryIndexedEXT;
+        return true;
+    }
+    if (!strcmp("vkCmdEndQueryIndexedEXT", name)) {
+        *addr = (void *)CmdEndQueryIndexedEXT;
+        return true;
+    }
+    if (!strcmp("vkCmdDrawIndirectByteCountEXT", name)) {
+        *addr = (void *)CmdDrawIndirectByteCountEXT;
+        return true;
+    }
+
+    // ---- VK_NVX_image_view_handle extension commands
+    if (!strcmp("vkGetImageViewHandleNVX", name)) {
+        *addr = (void *)GetImageViewHandleNVX;
+        return true;
+    }
+
+    // ---- VK_AMD_draw_indirect_count extension commands
+    if (!strcmp("vkCmdDrawIndirectCountAMD", name)) {
+        *addr = (void *)CmdDrawIndirectCountAMD;
+        return true;
+    }
+    if (!strcmp("vkCmdDrawIndexedIndirectCountAMD", name)) {
+        *addr = (void *)CmdDrawIndexedIndirectCountAMD;
+        return true;
+    }
+
+    // ---- VK_AMD_shader_info extension commands
+    if (!strcmp("vkGetShaderInfoAMD", name)) {
+        *addr = (void *)GetShaderInfoAMD;
+        return true;
+    }
+
+    // ---- VK_GGP_stream_descriptor_surface extension commands
+#ifdef VK_USE_PLATFORM_GGP
+    if (!strcmp("vkCreateStreamDescriptorSurfaceGGP", name)) {
+        *addr = (ptr_instance->enabled_known_extensions.ggp_stream_descriptor_surface == 1)
+                     ? (void *)CreateStreamDescriptorSurfaceGGP
+                     : NULL;
+        return true;
+    }
+#endif // VK_USE_PLATFORM_GGP
+
+    // ---- VK_NV_external_memory_capabilities extension commands
+    if (!strcmp("vkGetPhysicalDeviceExternalImageFormatPropertiesNV", name)) {
+        *addr = (ptr_instance->enabled_known_extensions.nv_external_memory_capabilities == 1)
+                     ? (void *)GetPhysicalDeviceExternalImageFormatPropertiesNV
+                     : NULL;
+        return true;
+    }
+
+    // ---- VK_NV_external_memory_win32 extension commands
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    if (!strcmp("vkGetMemoryWin32HandleNV", name)) {
+        *addr = (void *)GetMemoryWin32HandleNV;
+        return true;
+    }
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+    // ---- VK_NN_vi_surface extension commands
+#ifdef VK_USE_PLATFORM_VI_NN
+    if (!strcmp("vkCreateViSurfaceNN", name)) {
+        *addr = (ptr_instance->enabled_known_extensions.nn_vi_surface == 1)
+                     ? (void *)CreateViSurfaceNN
+                     : NULL;
+        return true;
+    }
+#endif // VK_USE_PLATFORM_VI_NN
+
+    // ---- VK_EXT_conditional_rendering extension commands
+    if (!strcmp("vkCmdBeginConditionalRenderingEXT", name)) {
+        *addr = (void *)CmdBeginConditionalRenderingEXT;
+        return true;
+    }
+    if (!strcmp("vkCmdEndConditionalRenderingEXT", name)) {
+        *addr = (void *)CmdEndConditionalRenderingEXT;
+        return true;
+    }
+
+    // ---- VK_NVX_device_generated_commands extension commands
+    if (!strcmp("vkCmdProcessCommandsNVX", name)) {
+        *addr = (void *)CmdProcessCommandsNVX;
+        return true;
+    }
+    if (!strcmp("vkCmdReserveSpaceForCommandsNVX", name)) {
+        *addr = (void *)CmdReserveSpaceForCommandsNVX;
+        return true;
+    }
+    if (!strcmp("vkCreateIndirectCommandsLayoutNVX", name)) {
+        *addr = (void *)CreateIndirectCommandsLayoutNVX;
+        return true;
+    }
+    if (!strcmp("vkDestroyIndirectCommandsLayoutNVX", name)) {
+        *addr = (void *)DestroyIndirectCommandsLayoutNVX;
+        return true;
+    }
+    if (!strcmp("vkCreateObjectTableNVX", name)) {
+        *addr = (void *)CreateObjectTableNVX;
+        return true;
+    }
+    if (!strcmp("vkDestroyObjectTableNVX", name)) {
+        *addr = (void *)DestroyObjectTableNVX;
+        return true;
+    }
+    if (!strcmp("vkRegisterObjectsNVX", name)) {
+        *addr = (void *)RegisterObjectsNVX;
+        return true;
+    }
+    if (!strcmp("vkUnregisterObjectsNVX", name)) {
+        *addr = (void *)UnregisterObjectsNVX;
+        return true;
+    }
+    if (!strcmp("vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX", name)) {
+        *addr = (void *)GetPhysicalDeviceGeneratedCommandsPropertiesNVX;
+        return true;
+    }
+
+    // ---- VK_NV_clip_space_w_scaling extension commands
+    if (!strcmp("vkCmdSetViewportWScalingNV", name)) {
+        *addr = (void *)CmdSetViewportWScalingNV;
+        return true;
+    }
+
+    // ---- VK_EXT_direct_mode_display extension commands
+    if (!strcmp("vkReleaseDisplayEXT", name)) {
+        *addr = (ptr_instance->enabled_known_extensions.ext_direct_mode_display == 1)
+                     ? (void *)ReleaseDisplayEXT
+                     : NULL;
+        return true;
+    }
+
+    // ---- VK_EXT_acquire_xlib_display extension commands
+#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
+    if (!strcmp("vkAcquireXlibDisplayEXT", name)) {
+        *addr = (ptr_instance->enabled_known_extensions.ext_acquire_xlib_display == 1)
+                     ? (void *)AcquireXlibDisplayEXT
+                     : NULL;
+        return true;
+    }
+#endif // VK_USE_PLATFORM_XLIB_XRANDR_EXT
+#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
+    if (!strcmp("vkGetRandROutputDisplayEXT", name)) {
+        *addr = (ptr_instance->enabled_known_extensions.ext_acquire_xlib_display == 1)
+                     ? (void *)GetRandROutputDisplayEXT
+                     : NULL;
+        return true;
+    }
+#endif // VK_USE_PLATFORM_XLIB_XRANDR_EXT
+
+    // ---- VK_EXT_display_surface_counter extension commands
+    if (!strcmp("vkGetPhysicalDeviceSurfaceCapabilities2EXT", name)) {
+        *addr = (ptr_instance->enabled_known_extensions.ext_display_surface_counter == 1)
+                     ? (void *)GetPhysicalDeviceSurfaceCapabilities2EXT
+                     : NULL;
+        return true;
+    }
+
+    // ---- VK_EXT_display_control extension commands
+    if (!strcmp("vkDisplayPowerControlEXT", name)) {
+        *addr = (void *)DisplayPowerControlEXT;
+        return true;
+    }
+    if (!strcmp("vkRegisterDeviceEventEXT", name)) {
+        *addr = (void *)RegisterDeviceEventEXT;
+        return true;
+    }
+    if (!strcmp("vkRegisterDisplayEventEXT", name)) {
+        *addr = (void *)RegisterDisplayEventEXT;
+        return true;
+    }
+    if (!strcmp("vkGetSwapchainCounterEXT", name)) {
+        *addr = (void *)GetSwapchainCounterEXT;
+        return true;
+    }
+
+    // ---- VK_GOOGLE_display_timing extension commands
+    if (!strcmp("vkGetRefreshCycleDurationGOOGLE", name)) {
+        *addr = (void *)GetRefreshCycleDurationGOOGLE;
+        return true;
+    }
+    if (!strcmp("vkGetPastPresentationTimingGOOGLE", name)) {
+        *addr = (void *)GetPastPresentationTimingGOOGLE;
+        return true;
+    }
+
+    // ---- VK_EXT_discard_rectangles extension commands
+    if (!strcmp("vkCmdSetDiscardRectangleEXT", name)) {
+        *addr = (void *)CmdSetDiscardRectangleEXT;
+        return true;
+    }
+
+    // ---- VK_EXT_hdr_metadata extension commands
+    if (!strcmp("vkSetHdrMetadataEXT", name)) {
+        *addr = (void *)SetHdrMetadataEXT;
+        return true;
+    }
+
+    // ---- VK_EXT_debug_utils extension commands
+    if (!strcmp("vkSetDebugUtilsObjectNameEXT", name)) {
+        *addr = (ptr_instance->enabled_known_extensions.ext_debug_utils == 1)
+                     ? (void *)SetDebugUtilsObjectNameEXT
+                     : NULL;
+        return true;
+    }
+    if (!strcmp("vkSetDebugUtilsObjectTagEXT", name)) {
+        *addr = (ptr_instance->enabled_known_extensions.ext_debug_utils == 1)
+                     ? (void *)SetDebugUtilsObjectTagEXT
+                     : NULL;
+        return true;
+    }
+    if (!strcmp("vkQueueBeginDebugUtilsLabelEXT", name)) {
+        *addr = (ptr_instance->enabled_known_extensions.ext_debug_utils == 1)
+                     ? (void *)QueueBeginDebugUtilsLabelEXT
+                     : NULL;
+        return true;
+    }
+    if (!strcmp("vkQueueEndDebugUtilsLabelEXT", name)) {
+        *addr = (ptr_instance->enabled_known_extensions.ext_debug_utils == 1)
+                     ? (void *)QueueEndDebugUtilsLabelEXT
+                     : NULL;
+        return true;
+    }
+    if (!strcmp("vkQueueInsertDebugUtilsLabelEXT", name)) {
+        *addr = (ptr_instance->enabled_known_extensions.ext_debug_utils == 1)
+                     ? (void *)QueueInsertDebugUtilsLabelEXT
+                     : NULL;
+        return true;
+    }
+    if (!strcmp("vkCmdBeginDebugUtilsLabelEXT", name)) {
+        *addr = (ptr_instance->enabled_known_extensions.ext_debug_utils == 1)
+                     ? (void *)CmdBeginDebugUtilsLabelEXT
+                     : NULL;
+        return true;
+    }
+    if (!strcmp("vkCmdEndDebugUtilsLabelEXT", name)) {
+        *addr = (ptr_instance->enabled_known_extensions.ext_debug_utils == 1)
+                     ? (void *)CmdEndDebugUtilsLabelEXT
+                     : NULL;
+        return true;
+    }
+    if (!strcmp("vkCmdInsertDebugUtilsLabelEXT", name)) {
+        *addr = (ptr_instance->enabled_known_extensions.ext_debug_utils == 1)
+                     ? (void *)CmdInsertDebugUtilsLabelEXT
+                     : NULL;
+        return true;
+    }
+
+    // ---- VK_ANDROID_external_memory_android_hardware_buffer extension commands
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+    if (!strcmp("vkGetAndroidHardwareBufferPropertiesANDROID", name)) {
+        *addr = (void *)GetAndroidHardwareBufferPropertiesANDROID;
+        return true;
+    }
+#endif // VK_USE_PLATFORM_ANDROID_KHR
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+    if (!strcmp("vkGetMemoryAndroidHardwareBufferANDROID", name)) {
+        *addr = (void *)GetMemoryAndroidHardwareBufferANDROID;
+        return true;
+    }
+#endif // VK_USE_PLATFORM_ANDROID_KHR
+
+    // ---- VK_EXT_sample_locations extension commands
+    if (!strcmp("vkCmdSetSampleLocationsEXT", name)) {
+        *addr = (void *)CmdSetSampleLocationsEXT;
+        return true;
+    }
+    if (!strcmp("vkGetPhysicalDeviceMultisamplePropertiesEXT", name)) {
+        *addr = (void *)GetPhysicalDeviceMultisamplePropertiesEXT;
+        return true;
+    }
+
+    // ---- VK_EXT_image_drm_format_modifier extension commands
+    if (!strcmp("vkGetImageDrmFormatModifierPropertiesEXT", name)) {
+        *addr = (void *)GetImageDrmFormatModifierPropertiesEXT;
+        return true;
+    }
+
+    // ---- VK_EXT_validation_cache extension commands
+    if (!strcmp("vkCreateValidationCacheEXT", name)) {
+        *addr = (void *)CreateValidationCacheEXT;
+        return true;
+    }
+    if (!strcmp("vkDestroyValidationCacheEXT", name)) {
+        *addr = (void *)DestroyValidationCacheEXT;
+        return true;
+    }
+    if (!strcmp("vkMergeValidationCachesEXT", name)) {
+        *addr = (void *)MergeValidationCachesEXT;
+        return true;
+    }
+    if (!strcmp("vkGetValidationCacheDataEXT", name)) {
+        *addr = (void *)GetValidationCacheDataEXT;
+        return true;
+    }
+
+    // ---- VK_NV_shading_rate_image extension commands
+    if (!strcmp("vkCmdBindShadingRateImageNV", name)) {
+        *addr = (void *)CmdBindShadingRateImageNV;
+        return true;
+    }
+    if (!strcmp("vkCmdSetViewportShadingRatePaletteNV", name)) {
+        *addr = (void *)CmdSetViewportShadingRatePaletteNV;
+        return true;
+    }
+    if (!strcmp("vkCmdSetCoarseSampleOrderNV", name)) {
+        *addr = (void *)CmdSetCoarseSampleOrderNV;
+        return true;
+    }
+
+    // ---- VK_NV_ray_tracing extension commands
+    if (!strcmp("vkCreateAccelerationStructureNV", name)) {
+        *addr = (void *)CreateAccelerationStructureNV;
+        return true;
+    }
+    if (!strcmp("vkDestroyAccelerationStructureNV", name)) {
+        *addr = (void *)DestroyAccelerationStructureNV;
+        return true;
+    }
+    if (!strcmp("vkGetAccelerationStructureMemoryRequirementsNV", name)) {
+        *addr = (void *)GetAccelerationStructureMemoryRequirementsNV;
+        return true;
+    }
+    if (!strcmp("vkBindAccelerationStructureMemoryNV", name)) {
+        *addr = (void *)BindAccelerationStructureMemoryNV;
+        return true;
+    }
+    if (!strcmp("vkCmdBuildAccelerationStructureNV", name)) {
+        *addr = (void *)CmdBuildAccelerationStructureNV;
+        return true;
+    }
+    if (!strcmp("vkCmdCopyAccelerationStructureNV", name)) {
+        *addr = (void *)CmdCopyAccelerationStructureNV;
+        return true;
+    }
+    if (!strcmp("vkCmdTraceRaysNV", name)) {
+        *addr = (void *)CmdTraceRaysNV;
+        return true;
+    }
+    if (!strcmp("vkCreateRayTracingPipelinesNV", name)) {
+        *addr = (void *)CreateRayTracingPipelinesNV;
+        return true;
+    }
+    if (!strcmp("vkGetRayTracingShaderGroupHandlesNV", name)) {
+        *addr = (void *)GetRayTracingShaderGroupHandlesNV;
+        return true;
+    }
+    if (!strcmp("vkGetAccelerationStructureHandleNV", name)) {
+        *addr = (void *)GetAccelerationStructureHandleNV;
+        return true;
+    }
+    if (!strcmp("vkCmdWriteAccelerationStructuresPropertiesNV", name)) {
+        *addr = (void *)CmdWriteAccelerationStructuresPropertiesNV;
+        return true;
+    }
+    if (!strcmp("vkCompileDeferredNV", name)) {
+        *addr = (void *)CompileDeferredNV;
+        return true;
+    }
+
+    // ---- VK_EXT_external_memory_host extension commands
+    if (!strcmp("vkGetMemoryHostPointerPropertiesEXT", name)) {
+        *addr = (void *)GetMemoryHostPointerPropertiesEXT;
+        return true;
+    }
+
+    // ---- VK_AMD_buffer_marker extension commands
+    if (!strcmp("vkCmdWriteBufferMarkerAMD", name)) {
+        *addr = (void *)CmdWriteBufferMarkerAMD;
+        return true;
+    }
+
+    // ---- VK_EXT_calibrated_timestamps extension commands
+    if (!strcmp("vkGetPhysicalDeviceCalibrateableTimeDomainsEXT", name)) {
+        *addr = (void *)GetPhysicalDeviceCalibrateableTimeDomainsEXT;
+        return true;
+    }
+    if (!strcmp("vkGetCalibratedTimestampsEXT", name)) {
+        *addr = (void *)GetCalibratedTimestampsEXT;
+        return true;
+    }
+
+    // ---- VK_NV_mesh_shader extension commands
+    if (!strcmp("vkCmdDrawMeshTasksNV", name)) {
+        *addr = (void *)CmdDrawMeshTasksNV;
+        return true;
+    }
+    if (!strcmp("vkCmdDrawMeshTasksIndirectNV", name)) {
+        *addr = (void *)CmdDrawMeshTasksIndirectNV;
+        return true;
+    }
+    if (!strcmp("vkCmdDrawMeshTasksIndirectCountNV", name)) {
+        *addr = (void *)CmdDrawMeshTasksIndirectCountNV;
+        return true;
+    }
+
+    // ---- VK_NV_scissor_exclusive extension commands
+    if (!strcmp("vkCmdSetExclusiveScissorNV", name)) {
+        *addr = (void *)CmdSetExclusiveScissorNV;
+        return true;
+    }
+
+    // ---- VK_NV_device_diagnostic_checkpoints extension commands
+    if (!strcmp("vkCmdSetCheckpointNV", name)) {
+        *addr = (void *)CmdSetCheckpointNV;
+        return true;
+    }
+    if (!strcmp("vkGetQueueCheckpointDataNV", name)) {
+        *addr = (void *)GetQueueCheckpointDataNV;
+        return true;
+    }
+
+    // ---- VK_INTEL_performance_query extension commands
+    if (!strcmp("vkInitializePerformanceApiINTEL", name)) {
+        *addr = (void *)InitializePerformanceApiINTEL;
+        return true;
+    }
+    if (!strcmp("vkUninitializePerformanceApiINTEL", name)) {
+        *addr = (void *)UninitializePerformanceApiINTEL;
+        return true;
+    }
+    if (!strcmp("vkCmdSetPerformanceMarkerINTEL", name)) {
+        *addr = (void *)CmdSetPerformanceMarkerINTEL;
+        return true;
+    }
+    if (!strcmp("vkCmdSetPerformanceStreamMarkerINTEL", name)) {
+        *addr = (void *)CmdSetPerformanceStreamMarkerINTEL;
+        return true;
+    }
+    if (!strcmp("vkCmdSetPerformanceOverrideINTEL", name)) {
+        *addr = (void *)CmdSetPerformanceOverrideINTEL;
+        return true;
+    }
+    if (!strcmp("vkAcquirePerformanceConfigurationINTEL", name)) {
+        *addr = (void *)AcquirePerformanceConfigurationINTEL;
+        return true;
+    }
+    if (!strcmp("vkReleasePerformanceConfigurationINTEL", name)) {
+        *addr = (void *)ReleasePerformanceConfigurationINTEL;
+        return true;
+    }
+    if (!strcmp("vkQueueSetPerformanceConfigurationINTEL", name)) {
+        *addr = (void *)QueueSetPerformanceConfigurationINTEL;
+        return true;
+    }
+    if (!strcmp("vkGetPerformanceParameterINTEL", name)) {
+        *addr = (void *)GetPerformanceParameterINTEL;
+        return true;
+    }
+
+    // ---- VK_AMD_display_native_hdr extension commands
+    if (!strcmp("vkSetLocalDimmingAMD", name)) {
+        *addr = (void *)SetLocalDimmingAMD;
+        return true;
+    }
+
+    // ---- VK_FUCHSIA_imagepipe_surface extension commands
+#ifdef VK_USE_PLATFORM_FUCHSIA
+    if (!strcmp("vkCreateImagePipeSurfaceFUCHSIA", name)) {
+        *addr = (ptr_instance->enabled_known_extensions.fuchsia_imagepipe_surface == 1)
+                     ? (void *)CreateImagePipeSurfaceFUCHSIA
+                     : NULL;
+        return true;
+    }
+#endif // VK_USE_PLATFORM_FUCHSIA
+
+    // ---- VK_EXT_buffer_device_address extension commands
+    if (!strcmp("vkGetBufferDeviceAddressEXT", name)) {
+        *addr = (void *)GetBufferDeviceAddressEXT;
+        return true;
+    }
+
+    // ---- VK_NV_cooperative_matrix extension commands
+    if (!strcmp("vkGetPhysicalDeviceCooperativeMatrixPropertiesNV", name)) {
+        *addr = (void *)GetPhysicalDeviceCooperativeMatrixPropertiesNV;
+        return true;
+    }
+
+    // ---- VK_NV_coverage_reduction_mode extension commands
+    if (!strcmp("vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV", name)) {
+        *addr = (void *)GetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV;
+        return true;
+    }
+
+    // ---- VK_EXT_full_screen_exclusive extension commands
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    if (!strcmp("vkGetPhysicalDeviceSurfacePresentModes2EXT", name)) {
+        *addr = (void *)GetPhysicalDeviceSurfacePresentModes2EXT;
+        return true;
+    }
+#endif // VK_USE_PLATFORM_WIN32_KHR
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    if (!strcmp("vkAcquireFullScreenExclusiveModeEXT", name)) {
+        *addr = (void *)AcquireFullScreenExclusiveModeEXT;
+        return true;
+    }
+#endif // VK_USE_PLATFORM_WIN32_KHR
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    if (!strcmp("vkReleaseFullScreenExclusiveModeEXT", name)) {
+        *addr = (void *)ReleaseFullScreenExclusiveModeEXT;
+        return true;
+    }
+#endif // VK_USE_PLATFORM_WIN32_KHR
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    if (!strcmp("vkGetDeviceGroupSurfacePresentModes2EXT", name)) {
+        *addr = (void *)GetDeviceGroupSurfacePresentModes2EXT;
+        return true;
+    }
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+    // ---- VK_EXT_line_rasterization extension commands
+    if (!strcmp("vkCmdSetLineStippleEXT", name)) {
+        *addr = (void *)CmdSetLineStippleEXT;
+        return true;
+    }
+
+    // ---- VK_EXT_host_query_reset extension commands
+    if (!strcmp("vkResetQueryPoolEXT", name)) {
+        *addr = (void *)ResetQueryPoolEXT;
+        return true;
+    }
+    return false;
+}
+
+// A function that can be used to query enabled extensions during a vkCreateInstance call
+void extensions_create_instance(struct loader_instance *ptr_instance, const VkInstanceCreateInfo *pCreateInfo) {
+    for (uint32_t i = 0; i < pCreateInfo->enabledExtensionCount; i++) {
+
+    // ---- VK_KHR_get_physical_device_properties2 extension commands
+        if (0 == strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+            ptr_instance->enabled_known_extensions.khr_get_physical_device_properties2 = 1;
+
+    // ---- VK_KHR_device_group_creation extension commands
+        } else if (0 == strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_DEVICE_GROUP_CREATION_EXTENSION_NAME)) {
+            ptr_instance->enabled_known_extensions.khr_device_group_creation = 1;
+
+    // ---- VK_KHR_external_memory_capabilities extension commands
+        } else if (0 == strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME)) {
+            ptr_instance->enabled_known_extensions.khr_external_memory_capabilities = 1;
+
+    // ---- VK_KHR_external_semaphore_capabilities extension commands
+        } else if (0 == strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_EXTENSION_NAME)) {
+            ptr_instance->enabled_known_extensions.khr_external_semaphore_capabilities = 1;
+
+    // ---- VK_KHR_external_fence_capabilities extension commands
+        } else if (0 == strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_EXTERNAL_FENCE_CAPABILITIES_EXTENSION_NAME)) {
+            ptr_instance->enabled_known_extensions.khr_external_fence_capabilities = 1;
+
+    // ---- VK_KHR_get_surface_capabilities2 extension commands
+        } else if (0 == strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_GET_SURFACE_CAPABILITIES_2_EXTENSION_NAME)) {
+            ptr_instance->enabled_known_extensions.khr_get_surface_capabilities2 = 1;
+
+    // ---- VK_GGP_stream_descriptor_surface extension commands
+#ifdef VK_USE_PLATFORM_GGP
+        } else if (0 == strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_GGP_STREAM_DESCRIPTOR_SURFACE_EXTENSION_NAME)) {
+            ptr_instance->enabled_known_extensions.ggp_stream_descriptor_surface = 1;
+#endif // VK_USE_PLATFORM_GGP
+
+    // ---- VK_NV_external_memory_capabilities extension commands
+        } else if (0 == strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_NV_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME)) {
+            ptr_instance->enabled_known_extensions.nv_external_memory_capabilities = 1;
+
+    // ---- VK_NN_vi_surface extension commands
+#ifdef VK_USE_PLATFORM_VI_NN
+        } else if (0 == strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_NN_VI_SURFACE_EXTENSION_NAME)) {
+            ptr_instance->enabled_known_extensions.nn_vi_surface = 1;
+#endif // VK_USE_PLATFORM_VI_NN
+
+    // ---- VK_EXT_direct_mode_display extension commands
+        } else if (0 == strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_EXT_DIRECT_MODE_DISPLAY_EXTENSION_NAME)) {
+            ptr_instance->enabled_known_extensions.ext_direct_mode_display = 1;
+
+    // ---- VK_EXT_acquire_xlib_display extension commands
+#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
+        } else if (0 == strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_EXT_ACQUIRE_XLIB_DISPLAY_EXTENSION_NAME)) {
+            ptr_instance->enabled_known_extensions.ext_acquire_xlib_display = 1;
+#endif // VK_USE_PLATFORM_XLIB_XRANDR_EXT
+
+    // ---- VK_EXT_display_surface_counter extension commands
+        } else if (0 == strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_EXT_DISPLAY_SURFACE_COUNTER_EXTENSION_NAME)) {
+            ptr_instance->enabled_known_extensions.ext_display_surface_counter = 1;
+
+    // ---- VK_EXT_debug_utils extension commands
+        } else if (0 == strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_EXT_DEBUG_UTILS_EXTENSION_NAME)) {
+            ptr_instance->enabled_known_extensions.ext_debug_utils = 1;
+
+    // ---- VK_FUCHSIA_imagepipe_surface extension commands
+#ifdef VK_USE_PLATFORM_FUCHSIA
+        } else if (0 == strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_FUCHSIA_IMAGEPIPE_SURFACE_EXTENSION_NAME)) {
+            ptr_instance->enabled_known_extensions.fuchsia_imagepipe_surface = 1;
+#endif // VK_USE_PLATFORM_FUCHSIA
+        }
+    }
+}
+
+// Some device commands still need a terminator because the loader needs to unwrap something about them.
+// In many cases, the item needing unwrapping is a VkPhysicalDevice or VkSurfaceKHR object.  But there may be other items
+// in the future.
+PFN_vkVoidFunction get_extension_device_proc_terminator(struct loader_device *dev, const char *pName) {
+    PFN_vkVoidFunction addr = NULL;
+
+    // ---- VK_KHR_swapchain extension commands
+    if (dev->extensions.khr_swapchain_enabled) {
+        if(!strcmp(pName, "vkCreateSwapchainKHR")) {
+            addr = (PFN_vkVoidFunction)terminator_CreateSwapchainKHR;
+        } else if(!strcmp(pName, "vkGetDeviceGroupSurfacePresentModesKHR")) {
+            addr = (PFN_vkVoidFunction)terminator_GetDeviceGroupSurfacePresentModesKHR;
+        }
+    }
+
+    // ---- VK_KHR_display_swapchain extension commands
+    if (dev->extensions.khr_display_swapchain_enabled) {
+        if(!strcmp(pName, "vkCreateSharedSwapchainsKHR")) {
+            addr = (PFN_vkVoidFunction)terminator_CreateSharedSwapchainsKHR;
+        }
+    }
+
+    // ---- VK_EXT_debug_marker extension commands
+    if (dev->extensions.ext_debug_marker_enabled) {
+        if(!strcmp(pName, "vkDebugMarkerSetObjectTagEXT")) {
+            addr = (PFN_vkVoidFunction)terminator_DebugMarkerSetObjectTagEXT;
+        } else if(!strcmp(pName, "vkDebugMarkerSetObjectNameEXT")) {
+            addr = (PFN_vkVoidFunction)terminator_DebugMarkerSetObjectNameEXT;
+        }
+    }
+
+    // ---- VK_EXT_debug_utils extension commands
+    if (dev->extensions.ext_debug_utils_enabled) {
+        if(!strcmp(pName, "vkSetDebugUtilsObjectNameEXT")) {
+            addr = (PFN_vkVoidFunction)terminator_SetDebugUtilsObjectNameEXT;
+        } else if(!strcmp(pName, "vkSetDebugUtilsObjectTagEXT")) {
+            addr = (PFN_vkVoidFunction)terminator_SetDebugUtilsObjectTagEXT;
+        }
+    }
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+    // ---- VK_EXT_full_screen_exclusive extension commands
+    if (dev->extensions.ext_full_screen_exclusive_enabled && dev->extensions.khr_device_group_enabled) {
+        if(!strcmp(pName, "vkGetDeviceGroupSurfacePresentModes2EXT")) {
+            addr = (PFN_vkVoidFunction)terminator_GetDeviceGroupSurfacePresentModes2EXT;
+        }
+    }
+#endif // None
+    return addr;
+}
+
+// This table contains the loader's instance dispatch table, which contains
+// default functions if no instance layers are activated.  This contains
+// pointers to "terminator functions".
+const VkLayerInstanceDispatchTable instance_disp = {
+
+    // ---- Core 1_0 commands
+    .DestroyInstance = terminator_DestroyInstance,
+    .EnumeratePhysicalDevices = terminator_EnumeratePhysicalDevices,
+    .GetPhysicalDeviceFeatures = terminator_GetPhysicalDeviceFeatures,
+    .GetPhysicalDeviceFormatProperties = terminator_GetPhysicalDeviceFormatProperties,
+    .GetPhysicalDeviceImageFormatProperties = terminator_GetPhysicalDeviceImageFormatProperties,
+    .GetPhysicalDeviceProperties = terminator_GetPhysicalDeviceProperties,
+    .GetPhysicalDeviceQueueFamilyProperties = terminator_GetPhysicalDeviceQueueFamilyProperties,
+    .GetPhysicalDeviceMemoryProperties = terminator_GetPhysicalDeviceMemoryProperties,
+    .GetInstanceProcAddr = vkGetInstanceProcAddr,
+    .EnumerateDeviceExtensionProperties = terminator_EnumerateDeviceExtensionProperties,
+    .EnumerateDeviceLayerProperties = terminator_EnumerateDeviceLayerProperties,
+    .GetPhysicalDeviceSparseImageFormatProperties = terminator_GetPhysicalDeviceSparseImageFormatProperties,
+
+    // ---- Core 1_1 commands
+    .EnumeratePhysicalDeviceGroups = terminator_EnumeratePhysicalDeviceGroups,
+    .GetPhysicalDeviceFeatures2 = terminator_GetPhysicalDeviceFeatures2,
+    .GetPhysicalDeviceProperties2 = terminator_GetPhysicalDeviceProperties2,
+    .GetPhysicalDeviceFormatProperties2 = terminator_GetPhysicalDeviceFormatProperties2,
+    .GetPhysicalDeviceImageFormatProperties2 = terminator_GetPhysicalDeviceImageFormatProperties2,
+    .GetPhysicalDeviceQueueFamilyProperties2 = terminator_GetPhysicalDeviceQueueFamilyProperties2,
+    .GetPhysicalDeviceMemoryProperties2 = terminator_GetPhysicalDeviceMemoryProperties2,
+    .GetPhysicalDeviceSparseImageFormatProperties2 = terminator_GetPhysicalDeviceSparseImageFormatProperties2,
+    .GetPhysicalDeviceExternalBufferProperties = terminator_GetPhysicalDeviceExternalBufferProperties,
+    .GetPhysicalDeviceExternalFenceProperties = terminator_GetPhysicalDeviceExternalFenceProperties,
+    .GetPhysicalDeviceExternalSemaphoreProperties = terminator_GetPhysicalDeviceExternalSemaphoreProperties,
+
+    // ---- VK_KHR_surface extension commands
+    .DestroySurfaceKHR = terminator_DestroySurfaceKHR,
+    .GetPhysicalDeviceSurfaceSupportKHR = terminator_GetPhysicalDeviceSurfaceSupportKHR,
+    .GetPhysicalDeviceSurfaceCapabilitiesKHR = terminator_GetPhysicalDeviceSurfaceCapabilitiesKHR,
+    .GetPhysicalDeviceSurfaceFormatsKHR = terminator_GetPhysicalDeviceSurfaceFormatsKHR,
+    .GetPhysicalDeviceSurfacePresentModesKHR = terminator_GetPhysicalDeviceSurfacePresentModesKHR,
+
+    // ---- VK_KHR_swapchain extension commands
+    .GetPhysicalDevicePresentRectanglesKHR = terminator_GetPhysicalDevicePresentRectanglesKHR,
+
+    // ---- VK_KHR_display extension commands
+    .GetPhysicalDeviceDisplayPropertiesKHR = terminator_GetPhysicalDeviceDisplayPropertiesKHR,
+    .GetPhysicalDeviceDisplayPlanePropertiesKHR = terminator_GetPhysicalDeviceDisplayPlanePropertiesKHR,
+    .GetDisplayPlaneSupportedDisplaysKHR = terminator_GetDisplayPlaneSupportedDisplaysKHR,
+    .GetDisplayModePropertiesKHR = terminator_GetDisplayModePropertiesKHR,
+    .CreateDisplayModeKHR = terminator_CreateDisplayModeKHR,
+    .GetDisplayPlaneCapabilitiesKHR = terminator_GetDisplayPlaneCapabilitiesKHR,
+    .CreateDisplayPlaneSurfaceKHR = terminator_CreateDisplayPlaneSurfaceKHR,
+
+    // ---- VK_KHR_xlib_surface extension commands
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+    .CreateXlibSurfaceKHR = terminator_CreateXlibSurfaceKHR,
+#endif // VK_USE_PLATFORM_XLIB_KHR
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+    .GetPhysicalDeviceXlibPresentationSupportKHR = terminator_GetPhysicalDeviceXlibPresentationSupportKHR,
+#endif // VK_USE_PLATFORM_XLIB_KHR
+
+    // ---- VK_KHR_xcb_surface extension commands
+#ifdef VK_USE_PLATFORM_XCB_KHR
+    .CreateXcbSurfaceKHR = terminator_CreateXcbSurfaceKHR,
+#endif // VK_USE_PLATFORM_XCB_KHR
+#ifdef VK_USE_PLATFORM_XCB_KHR
+    .GetPhysicalDeviceXcbPresentationSupportKHR = terminator_GetPhysicalDeviceXcbPresentationSupportKHR,
+#endif // VK_USE_PLATFORM_XCB_KHR
+
+    // ---- VK_KHR_wayland_surface extension commands
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+    .CreateWaylandSurfaceKHR = terminator_CreateWaylandSurfaceKHR,
+#endif // VK_USE_PLATFORM_WAYLAND_KHR
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+    .GetPhysicalDeviceWaylandPresentationSupportKHR = terminator_GetPhysicalDeviceWaylandPresentationSupportKHR,
+#endif // VK_USE_PLATFORM_WAYLAND_KHR
+
+    // ---- VK_KHR_android_surface extension commands
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+    .CreateAndroidSurfaceKHR = terminator_CreateAndroidSurfaceKHR,
+#endif // VK_USE_PLATFORM_ANDROID_KHR
+
+    // ---- VK_KHR_win32_surface extension commands
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    .CreateWin32SurfaceKHR = terminator_CreateWin32SurfaceKHR,
+#endif // VK_USE_PLATFORM_WIN32_KHR
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    .GetPhysicalDeviceWin32PresentationSupportKHR = terminator_GetPhysicalDeviceWin32PresentationSupportKHR,
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+    // ---- VK_KHR_get_physical_device_properties2 extension commands
+    .GetPhysicalDeviceFeatures2KHR = terminator_GetPhysicalDeviceFeatures2,
+    .GetPhysicalDeviceProperties2KHR = terminator_GetPhysicalDeviceProperties2,
+    .GetPhysicalDeviceFormatProperties2KHR = terminator_GetPhysicalDeviceFormatProperties2,
+    .GetPhysicalDeviceImageFormatProperties2KHR = terminator_GetPhysicalDeviceImageFormatProperties2,
+    .GetPhysicalDeviceQueueFamilyProperties2KHR = terminator_GetPhysicalDeviceQueueFamilyProperties2,
+    .GetPhysicalDeviceMemoryProperties2KHR = terminator_GetPhysicalDeviceMemoryProperties2,
+    .GetPhysicalDeviceSparseImageFormatProperties2KHR = terminator_GetPhysicalDeviceSparseImageFormatProperties2,
+
+    // ---- VK_KHR_device_group_creation extension commands
+    .EnumeratePhysicalDeviceGroupsKHR = terminator_EnumeratePhysicalDeviceGroups,
+
+    // ---- VK_KHR_external_memory_capabilities extension commands
+    .GetPhysicalDeviceExternalBufferPropertiesKHR = terminator_GetPhysicalDeviceExternalBufferProperties,
+
+    // ---- VK_KHR_external_semaphore_capabilities extension commands
+    .GetPhysicalDeviceExternalSemaphorePropertiesKHR = terminator_GetPhysicalDeviceExternalSemaphoreProperties,
+
+    // ---- VK_KHR_external_fence_capabilities extension commands
+    .GetPhysicalDeviceExternalFencePropertiesKHR = terminator_GetPhysicalDeviceExternalFenceProperties,
+
+    // ---- VK_KHR_performance_query extension commands
+    .EnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR = terminator_EnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR,
+    .GetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR = terminator_GetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR,
+
+    // ---- VK_KHR_get_surface_capabilities2 extension commands
+    .GetPhysicalDeviceSurfaceCapabilities2KHR = terminator_GetPhysicalDeviceSurfaceCapabilities2KHR,
+    .GetPhysicalDeviceSurfaceFormats2KHR = terminator_GetPhysicalDeviceSurfaceFormats2KHR,
+
+    // ---- VK_KHR_get_display_properties2 extension commands
+    .GetPhysicalDeviceDisplayProperties2KHR = terminator_GetPhysicalDeviceDisplayProperties2KHR,
+    .GetPhysicalDeviceDisplayPlaneProperties2KHR = terminator_GetPhysicalDeviceDisplayPlaneProperties2KHR,
+    .GetDisplayModeProperties2KHR = terminator_GetDisplayModeProperties2KHR,
+    .GetDisplayPlaneCapabilities2KHR = terminator_GetDisplayPlaneCapabilities2KHR,
+
+    // ---- VK_EXT_debug_report extension commands
+    .CreateDebugReportCallbackEXT = terminator_CreateDebugReportCallbackEXT,
+    .DestroyDebugReportCallbackEXT = terminator_DestroyDebugReportCallbackEXT,
+    .DebugReportMessageEXT = terminator_DebugReportMessageEXT,
+
+    // ---- VK_GGP_stream_descriptor_surface extension commands
+#ifdef VK_USE_PLATFORM_GGP
+    .CreateStreamDescriptorSurfaceGGP = terminator_CreateStreamDescriptorSurfaceGGP,
+#endif // VK_USE_PLATFORM_GGP
+
+    // ---- VK_NV_external_memory_capabilities extension commands
+    .GetPhysicalDeviceExternalImageFormatPropertiesNV = terminator_GetPhysicalDeviceExternalImageFormatPropertiesNV,
+
+    // ---- VK_NN_vi_surface extension commands
+#ifdef VK_USE_PLATFORM_VI_NN
+    .CreateViSurfaceNN = terminator_CreateViSurfaceNN,
+#endif // VK_USE_PLATFORM_VI_NN
+
+    // ---- VK_NVX_device_generated_commands extension commands
+    .GetPhysicalDeviceGeneratedCommandsPropertiesNVX = terminator_GetPhysicalDeviceGeneratedCommandsPropertiesNVX,
+
+    // ---- VK_EXT_direct_mode_display extension commands
+    .ReleaseDisplayEXT = terminator_ReleaseDisplayEXT,
+
+    // ---- VK_EXT_acquire_xlib_display extension commands
+#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
+    .AcquireXlibDisplayEXT = terminator_AcquireXlibDisplayEXT,
+#endif // VK_USE_PLATFORM_XLIB_XRANDR_EXT
+#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
+    .GetRandROutputDisplayEXT = terminator_GetRandROutputDisplayEXT,
+#endif // VK_USE_PLATFORM_XLIB_XRANDR_EXT
+
+    // ---- VK_EXT_display_surface_counter extension commands
+    .GetPhysicalDeviceSurfaceCapabilities2EXT = terminator_GetPhysicalDeviceSurfaceCapabilities2EXT,
+
+    // ---- VK_MVK_ios_surface extension commands
+#ifdef VK_USE_PLATFORM_IOS_MVK
+    .CreateIOSSurfaceMVK = terminator_CreateIOSSurfaceMVK,
+#endif // VK_USE_PLATFORM_IOS_MVK
+
+    // ---- VK_MVK_macos_surface extension commands
+#ifdef VK_USE_PLATFORM_MACOS_MVK
+    .CreateMacOSSurfaceMVK = terminator_CreateMacOSSurfaceMVK,
+#endif // VK_USE_PLATFORM_MACOS_MVK
+
+    // ---- VK_EXT_debug_utils extension commands
+    .CreateDebugUtilsMessengerEXT = terminator_CreateDebugUtilsMessengerEXT,
+    .DestroyDebugUtilsMessengerEXT = terminator_DestroyDebugUtilsMessengerEXT,
+    .SubmitDebugUtilsMessageEXT = terminator_SubmitDebugUtilsMessageEXT,
+
+    // ---- VK_EXT_sample_locations extension commands
+    .GetPhysicalDeviceMultisamplePropertiesEXT = terminator_GetPhysicalDeviceMultisamplePropertiesEXT,
+
+    // ---- VK_EXT_calibrated_timestamps extension commands
+    .GetPhysicalDeviceCalibrateableTimeDomainsEXT = terminator_GetPhysicalDeviceCalibrateableTimeDomainsEXT,
+
+    // ---- VK_FUCHSIA_imagepipe_surface extension commands
+#ifdef VK_USE_PLATFORM_FUCHSIA
+    .CreateImagePipeSurfaceFUCHSIA = terminator_CreateImagePipeSurfaceFUCHSIA,
+#endif // VK_USE_PLATFORM_FUCHSIA
+
+    // ---- VK_EXT_metal_surface extension commands
+#ifdef VK_USE_PLATFORM_METAL_EXT
+    .CreateMetalSurfaceEXT = terminator_CreateMetalSurfaceEXT,
+#endif // VK_USE_PLATFORM_METAL_EXT
+
+    // ---- VK_NV_cooperative_matrix extension commands
+    .GetPhysicalDeviceCooperativeMatrixPropertiesNV = terminator_GetPhysicalDeviceCooperativeMatrixPropertiesNV,
+
+    // ---- VK_NV_coverage_reduction_mode extension commands
+    .GetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV = terminator_GetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV,
+
+    // ---- VK_EXT_full_screen_exclusive extension commands
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    .GetPhysicalDeviceSurfacePresentModes2EXT = terminator_GetPhysicalDeviceSurfacePresentModes2EXT,
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+    // ---- VK_EXT_headless_surface extension commands
+    .CreateHeadlessSurfaceEXT = terminator_CreateHeadlessSurfaceEXT,
+};
+
+// A null-terminated list of all of the instance extensions supported by the loader.
+// If an instance extension name is not in this list, but it is exported by one or more of the
+// ICDs detected by the loader, then the extension name not in the list will be filtered out
+// before passing the list of extensions to the application.
+const char *const LOADER_INSTANCE_EXTENSIONS[] = {
+                                                  VK_KHR_SURFACE_EXTENSION_NAME,
+                                                  VK_KHR_DISPLAY_EXTENSION_NAME,
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+                                                  VK_KHR_XLIB_SURFACE_EXTENSION_NAME,
+#endif // VK_USE_PLATFORM_XLIB_KHR
+#ifdef VK_USE_PLATFORM_XCB_KHR
+                                                  VK_KHR_XCB_SURFACE_EXTENSION_NAME,
+#endif // VK_USE_PLATFORM_XCB_KHR
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+                                                  VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME,
+#endif // VK_USE_PLATFORM_WAYLAND_KHR
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+                                                  VK_KHR_WIN32_SURFACE_EXTENSION_NAME,
+#endif // VK_USE_PLATFORM_WIN32_KHR
+                                                  VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME,
+                                                  VK_KHR_DEVICE_GROUP_CREATION_EXTENSION_NAME,
+                                                  VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME,
+                                                  VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_EXTENSION_NAME,
+                                                  VK_KHR_EXTERNAL_FENCE_CAPABILITIES_EXTENSION_NAME,
+                                                  VK_KHR_GET_SURFACE_CAPABILITIES_2_EXTENSION_NAME,
+                                                  VK_KHR_GET_DISPLAY_PROPERTIES_2_EXTENSION_NAME,
+                                                  VK_KHR_SURFACE_PROTECTED_CAPABILITIES_EXTENSION_NAME,
+                                                  VK_EXT_DEBUG_REPORT_EXTENSION_NAME,
+#ifdef VK_USE_PLATFORM_GGP
+                                                  VK_GGP_STREAM_DESCRIPTOR_SURFACE_EXTENSION_NAME,
+#endif // VK_USE_PLATFORM_GGP
+                                                  VK_NV_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME,
+                                                  VK_EXT_VALIDATION_FLAGS_EXTENSION_NAME,
+#ifdef VK_USE_PLATFORM_VI_NN
+                                                  VK_NN_VI_SURFACE_EXTENSION_NAME,
+#endif // VK_USE_PLATFORM_VI_NN
+                                                  VK_EXT_DIRECT_MODE_DISPLAY_EXTENSION_NAME,
+#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
+                                                  VK_EXT_ACQUIRE_XLIB_DISPLAY_EXTENSION_NAME,
+#endif // VK_USE_PLATFORM_XLIB_XRANDR_EXT
+                                                  VK_EXT_DISPLAY_SURFACE_COUNTER_EXTENSION_NAME,
+                                                  VK_EXT_SWAPCHAIN_COLOR_SPACE_EXTENSION_NAME,
+#ifdef VK_USE_PLATFORM_IOS_MVK
+                                                  VK_MVK_IOS_SURFACE_EXTENSION_NAME,
+#endif // VK_USE_PLATFORM_IOS_MVK
+#ifdef VK_USE_PLATFORM_MACOS_MVK
+                                                  VK_MVK_MACOS_SURFACE_EXTENSION_NAME,
+#endif // VK_USE_PLATFORM_MACOS_MVK
+                                                  VK_EXT_DEBUG_UTILS_EXTENSION_NAME,
+#ifdef VK_USE_PLATFORM_FUCHSIA
+                                                  VK_FUCHSIA_IMAGEPIPE_SURFACE_EXTENSION_NAME,
+#endif // VK_USE_PLATFORM_FUCHSIA
+#ifdef VK_USE_PLATFORM_METAL_EXT
+                                                  VK_EXT_METAL_SURFACE_EXTENSION_NAME,
+#endif // VK_USE_PLATFORM_METAL_EXT
+                                                  VK_EXT_VALIDATION_FEATURES_EXTENSION_NAME,
+                                                  VK_EXT_HEADLESS_SURFACE_EXTENSION_NAME,
+                                                  NULL };
+
diff --git a/src/third_party/vulkan-loader/src/loader/generated/vk_loader_extensions.h b/src/third_party/vulkan-loader/src/loader/generated/vk_loader_extensions.h
new file mode 100644
index 0000000..784455f
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/loader/generated/vk_loader_extensions.h
@@ -0,0 +1,448 @@
+// *** THIS FILE IS GENERATED - DO NOT EDIT ***
+// See loader_extension_generator.py for modifications
+
+/*
+ * Copyright (c) 2015-2017 The Khronos Group Inc.
+ * Copyright (c) 2015-2017 Valve Corporation
+ * Copyright (c) 2015-2017 LunarG, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Mark Lobodzinski <mark@lunarg.com>
+ * Author: Mark Young <marky@lunarg.com>
+ */
+
+#pragma once
+
+// Structures defined externally, but used here
+struct loader_instance;
+struct loader_device;
+struct loader_icd_term;
+struct loader_dev_dispatch_table;
+
+// Device extension error function
+VKAPI_ATTR VkResult VKAPI_CALL vkDevExtError(VkDevice dev);
+
+// Extension interception for vkGetInstanceProcAddr function, so we can return
+// the appropriate information for any instance extensions we know about.
+bool extension_instance_gpa(struct loader_instance *ptr_instance, const char *name, void **addr);
+
+// Extension interception for vkCreateInstance function, so we can properly
+// detect and enable any instance extension information for extensions we know
+// about.
+void extensions_create_instance(struct loader_instance *ptr_instance, const VkInstanceCreateInfo *pCreateInfo);
+
+// Extension interception for vkGetDeviceProcAddr function, so we can return
+// an appropriate terminator if this is one of those few device commands requiring
+// a terminator.
+PFN_vkVoidFunction get_extension_device_proc_terminator(struct loader_device *dev, const char *pName);
+
+// Dispatch table properly filled in with appropriate terminators for the
+// supported extensions.
+extern const VkLayerInstanceDispatchTable instance_disp;
+
+// Array of extension strings for instance extensions we support.
+extern const char *const LOADER_INSTANCE_EXTENSIONS[];
+
+VKAPI_ATTR bool VKAPI_CALL loader_icd_init_entries(struct loader_icd_term *icd_term, VkInstance inst,
+                                                   const PFN_vkGetInstanceProcAddr fp_gipa);
+
+// Init Device function pointer dispatch table with core commands
+VKAPI_ATTR void VKAPI_CALL loader_init_device_dispatch_table(struct loader_dev_dispatch_table *dev_table, PFN_vkGetDeviceProcAddr gpa,
+                                                             VkDevice dev);
+
+// Init Device function pointer dispatch table with extension commands
+VKAPI_ATTR void VKAPI_CALL loader_init_device_extension_dispatch_table(struct loader_dev_dispatch_table *dev_table,
+                                                                       PFN_vkGetInstanceProcAddr gipa,
+                                                                       PFN_vkGetDeviceProcAddr gdpa,
+                                                                       VkInstance inst,
+                                                                       VkDevice dev);
+
+// Init Instance function pointer dispatch table with core commands
+VKAPI_ATTR void VKAPI_CALL loader_init_instance_core_dispatch_table(VkLayerInstanceDispatchTable *table, PFN_vkGetInstanceProcAddr gpa,
+                                                                    VkInstance inst);
+
+// Init Instance function pointer dispatch table with core commands
+VKAPI_ATTR void VKAPI_CALL loader_init_instance_extension_dispatch_table(VkLayerInstanceDispatchTable *table, PFN_vkGetInstanceProcAddr gpa,
+                                                                         VkInstance inst);
+
+// Device command lookup function
+VKAPI_ATTR void* VKAPI_CALL loader_lookup_device_dispatch_table(const VkLayerDispatchTable *table, const char *name);
+
+// Instance command lookup function
+VKAPI_ATTR void* VKAPI_CALL loader_lookup_instance_dispatch_table(const VkLayerInstanceDispatchTable *table, const char *name,
+                                                                  bool *found_name);
+
+VKAPI_ATTR bool VKAPI_CALL loader_icd_init_entries(struct loader_icd_term *icd_term, VkInstance inst,
+                                                   const PFN_vkGetInstanceProcAddr fp_gipa);
+
+// Loader core instance terminators
+VKAPI_ATTR VkResult VKAPI_CALL terminator_CreateInstance(
+    const VkInstanceCreateInfo*                 pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkInstance*                                 pInstance);
+VKAPI_ATTR void VKAPI_CALL terminator_DestroyInstance(
+    VkInstance                                  instance,
+    const VkAllocationCallbacks*                pAllocator);
+VKAPI_ATTR VkResult VKAPI_CALL terminator_EnumeratePhysicalDevices(
+    VkInstance                                  instance,
+    uint32_t*                                   pPhysicalDeviceCount,
+    VkPhysicalDevice*                           pPhysicalDevices);
+VKAPI_ATTR void VKAPI_CALL terminator_GetPhysicalDeviceFeatures(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceFeatures*                   pFeatures);
+VKAPI_ATTR void VKAPI_CALL terminator_GetPhysicalDeviceFormatProperties(
+    VkPhysicalDevice                            physicalDevice,
+    VkFormat                                    format,
+    VkFormatProperties*                         pFormatProperties);
+VKAPI_ATTR VkResult VKAPI_CALL terminator_GetPhysicalDeviceImageFormatProperties(
+    VkPhysicalDevice                            physicalDevice,
+    VkFormat                                    format,
+    VkImageType                                 type,
+    VkImageTiling                               tiling,
+    VkImageUsageFlags                           usage,
+    VkImageCreateFlags                          flags,
+    VkImageFormatProperties*                    pImageFormatProperties);
+VKAPI_ATTR void VKAPI_CALL terminator_GetPhysicalDeviceProperties(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceProperties*                 pProperties);
+VKAPI_ATTR void VKAPI_CALL terminator_GetPhysicalDeviceQueueFamilyProperties(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pQueueFamilyPropertyCount,
+    VkQueueFamilyProperties*                    pQueueFamilyProperties);
+VKAPI_ATTR void VKAPI_CALL terminator_GetPhysicalDeviceMemoryProperties(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceMemoryProperties*           pMemoryProperties);
+VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL terminator_GetInstanceProcAddr(
+    VkInstance                                  instance,
+    const char*                                 pName);
+VKAPI_ATTR VkResult VKAPI_CALL terminator_CreateDevice(
+    VkPhysicalDevice                            physicalDevice,
+    const VkDeviceCreateInfo*                   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDevice*                                   pDevice);
+VKAPI_ATTR VkResult VKAPI_CALL terminator_EnumerateInstanceExtensionProperties(
+    const VkEnumerateInstanceExtensionPropertiesChain* chain,
+    const char*                                 pLayerName,
+    uint32_t*                                   pPropertyCount,
+    VkExtensionProperties*                      pProperties);
+VKAPI_ATTR VkResult VKAPI_CALL terminator_EnumerateDeviceExtensionProperties(
+    VkPhysicalDevice                            physicalDevice,
+    const char*                                 pLayerName,
+    uint32_t*                                   pPropertyCount,
+    VkExtensionProperties*                      pProperties);
+VKAPI_ATTR VkResult VKAPI_CALL terminator_EnumerateInstanceLayerProperties(
+    const VkEnumerateInstanceLayerPropertiesChain* chain,
+    uint32_t*                                   pPropertyCount,
+    VkLayerProperties*                          pProperties);
+VKAPI_ATTR VkResult VKAPI_CALL terminator_EnumerateDeviceLayerProperties(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pPropertyCount,
+    VkLayerProperties*                          pProperties);
+VKAPI_ATTR void VKAPI_CALL terminator_GetPhysicalDeviceSparseImageFormatProperties(
+    VkPhysicalDevice                            physicalDevice,
+    VkFormat                                    format,
+    VkImageType                                 type,
+    VkSampleCountFlagBits                       samples,
+    VkImageUsageFlags                           usage,
+    VkImageTiling                               tiling,
+    uint32_t*                                   pPropertyCount,
+    VkSparseImageFormatProperties*              pProperties);
+VKAPI_ATTR VkResult VKAPI_CALL terminator_EnumerateInstanceVersion(
+    const VkEnumerateInstanceVersionChain* chain,
+    uint32_t*                                   pApiVersion);
+VKAPI_ATTR VkResult VKAPI_CALL terminator_EnumeratePhysicalDeviceGroups(
+    VkInstance                                  instance,
+    uint32_t*                                   pPhysicalDeviceGroupCount,
+    VkPhysicalDeviceGroupProperties*            pPhysicalDeviceGroupProperties);
+VKAPI_ATTR void VKAPI_CALL terminator_GetPhysicalDeviceFeatures2(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceFeatures2*                  pFeatures);
+VKAPI_ATTR void VKAPI_CALL terminator_GetPhysicalDeviceProperties2(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceProperties2*                pProperties);
+VKAPI_ATTR void VKAPI_CALL terminator_GetPhysicalDeviceFormatProperties2(
+    VkPhysicalDevice                            physicalDevice,
+    VkFormat                                    format,
+    VkFormatProperties2*                        pFormatProperties);
+VKAPI_ATTR VkResult VKAPI_CALL terminator_GetPhysicalDeviceImageFormatProperties2(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceImageFormatInfo2*     pImageFormatInfo,
+    VkImageFormatProperties2*                   pImageFormatProperties);
+VKAPI_ATTR void VKAPI_CALL terminator_GetPhysicalDeviceQueueFamilyProperties2(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pQueueFamilyPropertyCount,
+    VkQueueFamilyProperties2*                   pQueueFamilyProperties);
+VKAPI_ATTR void VKAPI_CALL terminator_GetPhysicalDeviceMemoryProperties2(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceMemoryProperties2*          pMemoryProperties);
+VKAPI_ATTR void VKAPI_CALL terminator_GetPhysicalDeviceSparseImageFormatProperties2(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo,
+    uint32_t*                                   pPropertyCount,
+    VkSparseImageFormatProperties2*             pProperties);
+VKAPI_ATTR void VKAPI_CALL terminator_GetPhysicalDeviceExternalBufferProperties(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceExternalBufferInfo*   pExternalBufferInfo,
+    VkExternalBufferProperties*                 pExternalBufferProperties);
+VKAPI_ATTR void VKAPI_CALL terminator_GetPhysicalDeviceExternalFenceProperties(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceExternalFenceInfo*    pExternalFenceInfo,
+    VkExternalFenceProperties*                  pExternalFenceProperties);
+VKAPI_ATTR void VKAPI_CALL terminator_GetPhysicalDeviceExternalSemaphoreProperties(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo,
+    VkExternalSemaphoreProperties*              pExternalSemaphoreProperties);
+
+// ICD function pointer dispatch table
+struct loader_icd_term_dispatch {
+
+    // ---- Core 1_0 commands
+    PFN_vkCreateInstance CreateInstance;
+    PFN_vkDestroyInstance DestroyInstance;
+    PFN_vkEnumeratePhysicalDevices EnumeratePhysicalDevices;
+    PFN_vkGetPhysicalDeviceFeatures GetPhysicalDeviceFeatures;
+    PFN_vkGetPhysicalDeviceFormatProperties GetPhysicalDeviceFormatProperties;
+    PFN_vkGetPhysicalDeviceImageFormatProperties GetPhysicalDeviceImageFormatProperties;
+    PFN_vkGetPhysicalDeviceProperties GetPhysicalDeviceProperties;
+    PFN_vkGetPhysicalDeviceQueueFamilyProperties GetPhysicalDeviceQueueFamilyProperties;
+    PFN_vkGetPhysicalDeviceMemoryProperties GetPhysicalDeviceMemoryProperties;
+    PFN_vkGetDeviceProcAddr GetDeviceProcAddr;
+    PFN_vkCreateDevice CreateDevice;
+    PFN_vkEnumerateInstanceExtensionProperties EnumerateInstanceExtensionProperties;
+    PFN_vkEnumerateDeviceExtensionProperties EnumerateDeviceExtensionProperties;
+    PFN_vkEnumerateInstanceLayerProperties EnumerateInstanceLayerProperties;
+    PFN_vkGetPhysicalDeviceSparseImageFormatProperties GetPhysicalDeviceSparseImageFormatProperties;
+
+    // ---- Core 1_1 commands
+    PFN_vkEnumerateInstanceVersion EnumerateInstanceVersion;
+    PFN_vkEnumeratePhysicalDeviceGroups EnumeratePhysicalDeviceGroups;
+    PFN_vkGetPhysicalDeviceFeatures2 GetPhysicalDeviceFeatures2;
+    PFN_vkGetPhysicalDeviceProperties2 GetPhysicalDeviceProperties2;
+    PFN_vkGetPhysicalDeviceFormatProperties2 GetPhysicalDeviceFormatProperties2;
+    PFN_vkGetPhysicalDeviceImageFormatProperties2 GetPhysicalDeviceImageFormatProperties2;
+    PFN_vkGetPhysicalDeviceQueueFamilyProperties2 GetPhysicalDeviceQueueFamilyProperties2;
+    PFN_vkGetPhysicalDeviceMemoryProperties2 GetPhysicalDeviceMemoryProperties2;
+    PFN_vkGetPhysicalDeviceSparseImageFormatProperties2 GetPhysicalDeviceSparseImageFormatProperties2;
+    PFN_vkGetPhysicalDeviceExternalBufferProperties GetPhysicalDeviceExternalBufferProperties;
+    PFN_vkGetPhysicalDeviceExternalFenceProperties GetPhysicalDeviceExternalFenceProperties;
+    PFN_vkGetPhysicalDeviceExternalSemaphoreProperties GetPhysicalDeviceExternalSemaphoreProperties;
+
+    // ---- VK_KHR_surface extension commands
+    PFN_vkDestroySurfaceKHR DestroySurfaceKHR;
+    PFN_vkGetPhysicalDeviceSurfaceSupportKHR GetPhysicalDeviceSurfaceSupportKHR;
+    PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR GetPhysicalDeviceSurfaceCapabilitiesKHR;
+    PFN_vkGetPhysicalDeviceSurfaceFormatsKHR GetPhysicalDeviceSurfaceFormatsKHR;
+    PFN_vkGetPhysicalDeviceSurfacePresentModesKHR GetPhysicalDeviceSurfacePresentModesKHR;
+
+    // ---- VK_KHR_swapchain extension commands
+    PFN_vkCreateSwapchainKHR CreateSwapchainKHR;
+    PFN_vkGetDeviceGroupSurfacePresentModesKHR GetDeviceGroupSurfacePresentModesKHR;
+    PFN_vkGetPhysicalDevicePresentRectanglesKHR GetPhysicalDevicePresentRectanglesKHR;
+
+    // ---- VK_KHR_display extension commands
+    PFN_vkGetPhysicalDeviceDisplayPropertiesKHR GetPhysicalDeviceDisplayPropertiesKHR;
+    PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR GetPhysicalDeviceDisplayPlanePropertiesKHR;
+    PFN_vkGetDisplayPlaneSupportedDisplaysKHR GetDisplayPlaneSupportedDisplaysKHR;
+    PFN_vkGetDisplayModePropertiesKHR GetDisplayModePropertiesKHR;
+    PFN_vkCreateDisplayModeKHR CreateDisplayModeKHR;
+    PFN_vkGetDisplayPlaneCapabilitiesKHR GetDisplayPlaneCapabilitiesKHR;
+    PFN_vkCreateDisplayPlaneSurfaceKHR CreateDisplayPlaneSurfaceKHR;
+
+    // ---- VK_KHR_display_swapchain extension commands
+    PFN_vkCreateSharedSwapchainsKHR CreateSharedSwapchainsKHR;
+
+    // ---- VK_KHR_xlib_surface extension commands
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+    PFN_vkCreateXlibSurfaceKHR CreateXlibSurfaceKHR;
+#endif // VK_USE_PLATFORM_XLIB_KHR
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+    PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR GetPhysicalDeviceXlibPresentationSupportKHR;
+#endif // VK_USE_PLATFORM_XLIB_KHR
+
+    // ---- VK_KHR_xcb_surface extension commands
+#ifdef VK_USE_PLATFORM_XCB_KHR
+    PFN_vkCreateXcbSurfaceKHR CreateXcbSurfaceKHR;
+#endif // VK_USE_PLATFORM_XCB_KHR
+#ifdef VK_USE_PLATFORM_XCB_KHR
+    PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR GetPhysicalDeviceXcbPresentationSupportKHR;
+#endif // VK_USE_PLATFORM_XCB_KHR
+
+    // ---- VK_KHR_wayland_surface extension commands
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+    PFN_vkCreateWaylandSurfaceKHR CreateWaylandSurfaceKHR;
+#endif // VK_USE_PLATFORM_WAYLAND_KHR
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+    PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR GetPhysicalDeviceWaylandPresentationSupportKHR;
+#endif // VK_USE_PLATFORM_WAYLAND_KHR
+
+    // ---- VK_KHR_android_surface extension commands
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+    PFN_vkCreateAndroidSurfaceKHR CreateAndroidSurfaceKHR;
+#endif // VK_USE_PLATFORM_ANDROID_KHR
+
+    // ---- VK_KHR_win32_surface extension commands
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    PFN_vkCreateWin32SurfaceKHR CreateWin32SurfaceKHR;
+#endif // VK_USE_PLATFORM_WIN32_KHR
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR GetPhysicalDeviceWin32PresentationSupportKHR;
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+    // ---- VK_KHR_get_physical_device_properties2 extension commands
+    PFN_vkGetPhysicalDeviceFeatures2KHR GetPhysicalDeviceFeatures2KHR;
+    PFN_vkGetPhysicalDeviceProperties2KHR GetPhysicalDeviceProperties2KHR;
+    PFN_vkGetPhysicalDeviceFormatProperties2KHR GetPhysicalDeviceFormatProperties2KHR;
+    PFN_vkGetPhysicalDeviceImageFormatProperties2KHR GetPhysicalDeviceImageFormatProperties2KHR;
+    PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR GetPhysicalDeviceQueueFamilyProperties2KHR;
+    PFN_vkGetPhysicalDeviceMemoryProperties2KHR GetPhysicalDeviceMemoryProperties2KHR;
+    PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR GetPhysicalDeviceSparseImageFormatProperties2KHR;
+
+    // ---- VK_KHR_device_group_creation extension commands
+    PFN_vkEnumeratePhysicalDeviceGroupsKHR EnumeratePhysicalDeviceGroupsKHR;
+
+    // ---- VK_KHR_external_memory_capabilities extension commands
+    PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR GetPhysicalDeviceExternalBufferPropertiesKHR;
+
+    // ---- VK_KHR_external_semaphore_capabilities extension commands
+    PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR GetPhysicalDeviceExternalSemaphorePropertiesKHR;
+
+    // ---- VK_KHR_external_fence_capabilities extension commands
+    PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR GetPhysicalDeviceExternalFencePropertiesKHR;
+
+    // ---- VK_KHR_performance_query extension commands
+    PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR EnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR;
+    PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR GetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR;
+
+    // ---- VK_KHR_get_surface_capabilities2 extension commands
+    PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR GetPhysicalDeviceSurfaceCapabilities2KHR;
+    PFN_vkGetPhysicalDeviceSurfaceFormats2KHR GetPhysicalDeviceSurfaceFormats2KHR;
+
+    // ---- VK_KHR_get_display_properties2 extension commands
+    PFN_vkGetPhysicalDeviceDisplayProperties2KHR GetPhysicalDeviceDisplayProperties2KHR;
+    PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR GetPhysicalDeviceDisplayPlaneProperties2KHR;
+    PFN_vkGetDisplayModeProperties2KHR GetDisplayModeProperties2KHR;
+    PFN_vkGetDisplayPlaneCapabilities2KHR GetDisplayPlaneCapabilities2KHR;
+
+    // ---- VK_EXT_debug_report extension commands
+    PFN_vkCreateDebugReportCallbackEXT CreateDebugReportCallbackEXT;
+    PFN_vkDestroyDebugReportCallbackEXT DestroyDebugReportCallbackEXT;
+    PFN_vkDebugReportMessageEXT DebugReportMessageEXT;
+
+    // ---- VK_EXT_debug_marker extension commands
+    PFN_vkDebugMarkerSetObjectTagEXT DebugMarkerSetObjectTagEXT;
+    PFN_vkDebugMarkerSetObjectNameEXT DebugMarkerSetObjectNameEXT;
+
+    // ---- VK_GGP_stream_descriptor_surface extension commands
+#ifdef VK_USE_PLATFORM_GGP
+    PFN_vkCreateStreamDescriptorSurfaceGGP CreateStreamDescriptorSurfaceGGP;
+#endif // VK_USE_PLATFORM_GGP
+
+    // ---- VK_NV_external_memory_capabilities extension commands
+    PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV GetPhysicalDeviceExternalImageFormatPropertiesNV;
+
+    // ---- VK_NN_vi_surface extension commands
+#ifdef VK_USE_PLATFORM_VI_NN
+    PFN_vkCreateViSurfaceNN CreateViSurfaceNN;
+#endif // VK_USE_PLATFORM_VI_NN
+
+    // ---- VK_NVX_device_generated_commands extension commands
+    PFN_vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX GetPhysicalDeviceGeneratedCommandsPropertiesNVX;
+
+    // ---- VK_EXT_direct_mode_display extension commands
+    PFN_vkReleaseDisplayEXT ReleaseDisplayEXT;
+
+    // ---- VK_EXT_acquire_xlib_display extension commands
+#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
+    PFN_vkAcquireXlibDisplayEXT AcquireXlibDisplayEXT;
+#endif // VK_USE_PLATFORM_XLIB_XRANDR_EXT
+#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
+    PFN_vkGetRandROutputDisplayEXT GetRandROutputDisplayEXT;
+#endif // VK_USE_PLATFORM_XLIB_XRANDR_EXT
+
+    // ---- VK_EXT_display_surface_counter extension commands
+    PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT GetPhysicalDeviceSurfaceCapabilities2EXT;
+
+    // ---- VK_MVK_ios_surface extension commands
+#ifdef VK_USE_PLATFORM_IOS_MVK
+    PFN_vkCreateIOSSurfaceMVK CreateIOSSurfaceMVK;
+#endif // VK_USE_PLATFORM_IOS_MVK
+
+    // ---- VK_MVK_macos_surface extension commands
+#ifdef VK_USE_PLATFORM_MACOS_MVK
+    PFN_vkCreateMacOSSurfaceMVK CreateMacOSSurfaceMVK;
+#endif // VK_USE_PLATFORM_MACOS_MVK
+
+    // ---- VK_EXT_debug_utils extension commands
+    PFN_vkSetDebugUtilsObjectNameEXT SetDebugUtilsObjectNameEXT;
+    PFN_vkSetDebugUtilsObjectTagEXT SetDebugUtilsObjectTagEXT;
+    PFN_vkCreateDebugUtilsMessengerEXT CreateDebugUtilsMessengerEXT;
+    PFN_vkDestroyDebugUtilsMessengerEXT DestroyDebugUtilsMessengerEXT;
+    PFN_vkSubmitDebugUtilsMessageEXT SubmitDebugUtilsMessageEXT;
+
+    // ---- VK_EXT_sample_locations extension commands
+    PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT GetPhysicalDeviceMultisamplePropertiesEXT;
+
+    // ---- VK_EXT_calibrated_timestamps extension commands
+    PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT GetPhysicalDeviceCalibrateableTimeDomainsEXT;
+
+    // ---- VK_FUCHSIA_imagepipe_surface extension commands
+#ifdef VK_USE_PLATFORM_FUCHSIA
+    PFN_vkCreateImagePipeSurfaceFUCHSIA CreateImagePipeSurfaceFUCHSIA;
+#endif // VK_USE_PLATFORM_FUCHSIA
+
+    // ---- VK_EXT_metal_surface extension commands
+#ifdef VK_USE_PLATFORM_METAL_EXT
+    PFN_vkCreateMetalSurfaceEXT CreateMetalSurfaceEXT;
+#endif // VK_USE_PLATFORM_METAL_EXT
+
+    // ---- VK_NV_cooperative_matrix extension commands
+    PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV GetPhysicalDeviceCooperativeMatrixPropertiesNV;
+
+    // ---- VK_NV_coverage_reduction_mode extension commands
+    PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV GetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV;
+
+    // ---- VK_EXT_full_screen_exclusive extension commands
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    PFN_vkGetPhysicalDeviceSurfacePresentModes2EXT GetPhysicalDeviceSurfacePresentModes2EXT;
+#endif // VK_USE_PLATFORM_WIN32_KHR
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    PFN_vkGetDeviceGroupSurfacePresentModes2EXT GetDeviceGroupSurfacePresentModes2EXT;
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+    // ---- VK_EXT_headless_surface extension commands
+    PFN_vkCreateHeadlessSurfaceEXT CreateHeadlessSurfaceEXT;
+};
+
+union loader_instance_extension_enables {
+    struct {
+        uint8_t khr_get_physical_device_properties2 : 1;
+        uint8_t khr_device_group_creation : 1;
+        uint8_t khr_external_memory_capabilities : 1;
+        uint8_t khr_external_semaphore_capabilities : 1;
+        uint8_t khr_external_fence_capabilities : 1;
+        uint8_t khr_get_surface_capabilities2 : 1;
+        uint8_t ext_debug_report : 1;
+        uint8_t ggp_stream_descriptor_surface : 1;
+        uint8_t nv_external_memory_capabilities : 1;
+        uint8_t nn_vi_surface : 1;
+        uint8_t ext_direct_mode_display : 1;
+        uint8_t ext_acquire_xlib_display : 1;
+        uint8_t ext_display_surface_counter : 1;
+        uint8_t ext_debug_utils : 1;
+        uint8_t fuchsia_imagepipe_surface : 1;
+    };
+    uint64_t padding[4];
+};
+
+
diff --git a/src/third_party/vulkan-loader/src/loader/generated/vk_object_types.h b/src/third_party/vulkan-loader/src/loader/generated/vk_object_types.h
new file mode 100644
index 0000000..7fdf77a
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/loader/generated/vk_object_types.h
@@ -0,0 +1,383 @@
+// *** THIS FILE IS GENERATED - DO NOT EDIT ***
+// See helper_file_generator.py for modifications
+
+
+/***************************************************************************
+ *
+ * Copyright (c) 2015-2017 The Khronos Group Inc.
+ * Copyright (c) 2015-2017 Valve Corporation
+ * Copyright (c) 2015-2017 LunarG, Inc.
+ * Copyright (c) 2015-2017 Google Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Mark Lobodzinski <mark@lunarg.com>
+ * Author: Courtney Goeltzenleuchter <courtneygo@google.com>
+ * Author: Tobin Ehlis <tobine@google.com>
+ * Author: Chris Forbes <chrisforbes@google.com>
+ * Author: John Zulauf<jzulauf@lunarg.com>
+ *
+ ****************************************************************************/
+
+
+#pragma once
+
+#include <vulkan/vulkan.h>
+
+// Object Type enum for validation layer internal object handling
+typedef enum VulkanObjectType {
+    kVulkanObjectTypeUnknown = 0,
+    kVulkanObjectTypeInstance = 1,
+    kVulkanObjectTypePhysicalDevice = 2,
+    kVulkanObjectTypeDevice = 3,
+    kVulkanObjectTypeQueue = 4,
+    kVulkanObjectTypeSemaphore = 5,
+    kVulkanObjectTypeCommandBuffer = 6,
+    kVulkanObjectTypeFence = 7,
+    kVulkanObjectTypeDeviceMemory = 8,
+    kVulkanObjectTypeBuffer = 9,
+    kVulkanObjectTypeImage = 10,
+    kVulkanObjectTypeEvent = 11,
+    kVulkanObjectTypeQueryPool = 12,
+    kVulkanObjectTypeBufferView = 13,
+    kVulkanObjectTypeImageView = 14,
+    kVulkanObjectTypeShaderModule = 15,
+    kVulkanObjectTypePipelineCache = 16,
+    kVulkanObjectTypePipelineLayout = 17,
+    kVulkanObjectTypeRenderPass = 18,
+    kVulkanObjectTypePipeline = 19,
+    kVulkanObjectTypeDescriptorSetLayout = 20,
+    kVulkanObjectTypeSampler = 21,
+    kVulkanObjectTypeDescriptorPool = 22,
+    kVulkanObjectTypeDescriptorSet = 23,
+    kVulkanObjectTypeFramebuffer = 24,
+    kVulkanObjectTypeCommandPool = 25,
+    kVulkanObjectTypeSamplerYcbcrConversion = 26,
+    kVulkanObjectTypeDescriptorUpdateTemplate = 27,
+    kVulkanObjectTypeSurfaceKHR = 28,
+    kVulkanObjectTypeSwapchainKHR = 29,
+    kVulkanObjectTypeDisplayKHR = 30,
+    kVulkanObjectTypeDisplayModeKHR = 31,
+    kVulkanObjectTypeDebugReportCallbackEXT = 32,
+    kVulkanObjectTypeObjectTableNVX = 33,
+    kVulkanObjectTypeIndirectCommandsLayoutNVX = 34,
+    kVulkanObjectTypeDebugUtilsMessengerEXT = 35,
+    kVulkanObjectTypeValidationCacheEXT = 36,
+    kVulkanObjectTypeAccelerationStructureNV = 37,
+    kVulkanObjectTypePerformanceConfigurationINTEL = 38,
+    kVulkanObjectTypeMax = 39,
+    // Aliases for backwards compatibilty of "promoted" types
+    kVulkanObjectTypeDescriptorUpdateTemplateKHR = kVulkanObjectTypeDescriptorUpdateTemplate,
+    kVulkanObjectTypeSamplerYcbcrConversionKHR = kVulkanObjectTypeSamplerYcbcrConversion,
+} VulkanObjectType;
+
+// Array of object name strings for OBJECT_TYPE enum conversion
+static const char * const object_string[kVulkanObjectTypeMax] = {
+    "Unknown",
+    "Instance",
+    "PhysicalDevice",
+    "Device",
+    "Queue",
+    "Semaphore",
+    "CommandBuffer",
+    "Fence",
+    "DeviceMemory",
+    "Buffer",
+    "Image",
+    "Event",
+    "QueryPool",
+    "BufferView",
+    "ImageView",
+    "ShaderModule",
+    "PipelineCache",
+    "PipelineLayout",
+    "RenderPass",
+    "Pipeline",
+    "DescriptorSetLayout",
+    "Sampler",
+    "DescriptorPool",
+    "DescriptorSet",
+    "Framebuffer",
+    "CommandPool",
+    "SamplerYcbcrConversion",
+    "DescriptorUpdateTemplate",
+    "SurfaceKHR",
+    "SwapchainKHR",
+    "DisplayKHR",
+    "DisplayModeKHR",
+    "DebugReportCallbackEXT",
+    "ObjectTableNVX",
+    "IndirectCommandsLayoutNVX",
+    "DebugUtilsMessengerEXT",
+    "ValidationCacheEXT",
+    "AccelerationStructureNV",
+    "PerformanceConfigurationINTEL",
+};
+
+// Helper array to get Vulkan VK_EXT_debug_report object type enum from the internal layers version
+const VkDebugReportObjectTypeEXT get_debug_report_enum[] = {
+    VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, // kVulkanObjectTypeUnknown
+    VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT,   // kVulkanObjectTypeInstance
+    VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT,   // kVulkanObjectTypePhysicalDevice
+    VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,   // kVulkanObjectTypeDevice
+    VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT,   // kVulkanObjectTypeQueue
+    VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT,   // kVulkanObjectTypeSemaphore
+    VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,   // kVulkanObjectTypeCommandBuffer
+    VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT,   // kVulkanObjectTypeFence
+    VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,   // kVulkanObjectTypeDeviceMemory
+    VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT,   // kVulkanObjectTypeBuffer
+    VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,   // kVulkanObjectTypeImage
+    VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT,   // kVulkanObjectTypeEvent
+    VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT,   // kVulkanObjectTypeQueryPool
+    VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT,   // kVulkanObjectTypeBufferView
+    VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT,   // kVulkanObjectTypeImageView
+    VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT,   // kVulkanObjectTypeShaderModule
+    VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT,   // kVulkanObjectTypePipelineCache
+    VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT,   // kVulkanObjectTypePipelineLayout
+    VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,   // kVulkanObjectTypeRenderPass
+    VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,   // kVulkanObjectTypePipeline
+    VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT,   // kVulkanObjectTypeDescriptorSetLayout
+    VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT,   // kVulkanObjectTypeSampler
+    VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT,   // kVulkanObjectTypeDescriptorPool
+    VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT,   // kVulkanObjectTypeDescriptorSet
+    VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT,   // kVulkanObjectTypeFramebuffer
+    VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT,   // kVulkanObjectTypeCommandPool
+    VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT,   // kVulkanObjectTypeSamplerYcbcrConversion
+    VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_EXT,   // kVulkanObjectTypeDescriptorUpdateTemplate
+    VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT,   // kVulkanObjectTypeSurfaceKHR
+    VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT,   // kVulkanObjectTypeSwapchainKHR
+    VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_KHR_EXT,   // kVulkanObjectTypeDisplayKHR
+    VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_MODE_KHR_EXT,   // kVulkanObjectTypeDisplayModeKHR
+    VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT_EXT,   // kVulkanObjectTypeDebugReportCallbackEXT
+    VK_DEBUG_REPORT_OBJECT_TYPE_OBJECT_TABLE_NVX_EXT,   // kVulkanObjectTypeObjectTableNVX
+    VK_DEBUG_REPORT_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX_EXT,   // kVulkanObjectTypeIndirectCommandsLayoutNVX
+    VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT,   // kVulkanObjectTypeDebugUtilsMessengerEXT
+    VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT_EXT,   // kVulkanObjectTypeValidationCacheEXT
+    VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV_EXT,   // kVulkanObjectTypeAccelerationStructureNV
+    VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT,   // kVulkanObjectTypePerformanceConfigurationINTEL
+};
+
+// Helper array to get Official Vulkan VkObjectType enum from the internal layers version
+const VkObjectType get_object_type_enum[] = {
+    VK_OBJECT_TYPE_UNKNOWN, // kVulkanObjectTypeUnknown
+    VK_OBJECT_TYPE_INSTANCE,   // kVulkanObjectTypeInstance
+    VK_OBJECT_TYPE_PHYSICAL_DEVICE,   // kVulkanObjectTypePhysicalDevice
+    VK_OBJECT_TYPE_DEVICE,   // kVulkanObjectTypeDevice
+    VK_OBJECT_TYPE_QUEUE,   // kVulkanObjectTypeQueue
+    VK_OBJECT_TYPE_SEMAPHORE,   // kVulkanObjectTypeSemaphore
+    VK_OBJECT_TYPE_COMMAND_BUFFER,   // kVulkanObjectTypeCommandBuffer
+    VK_OBJECT_TYPE_FENCE,   // kVulkanObjectTypeFence
+    VK_OBJECT_TYPE_DEVICE_MEMORY,   // kVulkanObjectTypeDeviceMemory
+    VK_OBJECT_TYPE_BUFFER,   // kVulkanObjectTypeBuffer
+    VK_OBJECT_TYPE_IMAGE,   // kVulkanObjectTypeImage
+    VK_OBJECT_TYPE_EVENT,   // kVulkanObjectTypeEvent
+    VK_OBJECT_TYPE_QUERY_POOL,   // kVulkanObjectTypeQueryPool
+    VK_OBJECT_TYPE_BUFFER_VIEW,   // kVulkanObjectTypeBufferView
+    VK_OBJECT_TYPE_IMAGE_VIEW,   // kVulkanObjectTypeImageView
+    VK_OBJECT_TYPE_SHADER_MODULE,   // kVulkanObjectTypeShaderModule
+    VK_OBJECT_TYPE_PIPELINE_CACHE,   // kVulkanObjectTypePipelineCache
+    VK_OBJECT_TYPE_PIPELINE_LAYOUT,   // kVulkanObjectTypePipelineLayout
+    VK_OBJECT_TYPE_RENDER_PASS,   // kVulkanObjectTypeRenderPass
+    VK_OBJECT_TYPE_PIPELINE,   // kVulkanObjectTypePipeline
+    VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT,   // kVulkanObjectTypeDescriptorSetLayout
+    VK_OBJECT_TYPE_SAMPLER,   // kVulkanObjectTypeSampler
+    VK_OBJECT_TYPE_DESCRIPTOR_POOL,   // kVulkanObjectTypeDescriptorPool
+    VK_OBJECT_TYPE_DESCRIPTOR_SET,   // kVulkanObjectTypeDescriptorSet
+    VK_OBJECT_TYPE_FRAMEBUFFER,   // kVulkanObjectTypeFramebuffer
+    VK_OBJECT_TYPE_COMMAND_POOL,   // kVulkanObjectTypeCommandPool
+    VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION,   // kVulkanObjectTypeSamplerYcbcrConversion
+    VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE,   // kVulkanObjectTypeDescriptorUpdateTemplate
+    VK_OBJECT_TYPE_SURFACE_KHR,   // kVulkanObjectTypeSurfaceKHR
+    VK_OBJECT_TYPE_SWAPCHAIN_KHR,   // kVulkanObjectTypeSwapchainKHR
+    VK_OBJECT_TYPE_DISPLAY_KHR,   // kVulkanObjectTypeDisplayKHR
+    VK_OBJECT_TYPE_DISPLAY_MODE_KHR,   // kVulkanObjectTypeDisplayModeKHR
+    VK_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT,   // kVulkanObjectTypeDebugReportCallbackEXT
+    VK_OBJECT_TYPE_OBJECT_TABLE_NVX,   // kVulkanObjectTypeObjectTableNVX
+    VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX,   // kVulkanObjectTypeIndirectCommandsLayoutNVX
+    VK_OBJECT_TYPE_DEBUG_UTILS_MESSENGER_EXT,   // kVulkanObjectTypeDebugUtilsMessengerEXT
+    VK_OBJECT_TYPE_VALIDATION_CACHE_EXT,   // kVulkanObjectTypeValidationCacheEXT
+    VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV,   // kVulkanObjectTypeAccelerationStructureNV
+    VK_OBJECT_TYPE_PERFORMANCE_CONFIGURATION_INTEL,   // kVulkanObjectTypePerformanceConfigurationINTEL
+};
+
+// Helper function to convert from VkDebugReportObjectTypeEXT to VkObjectType
+static inline VkObjectType convertDebugReportObjectToCoreObject(VkDebugReportObjectTypeEXT debug_report_obj){
+    if (debug_report_obj == VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT) {
+        return VK_OBJECT_TYPE_UNKNOWN;
+    } else if (debug_report_obj == VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT) {
+        return VK_OBJECT_TYPE_UNKNOWN;
+    } else if (debug_report_obj == VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT) {
+        return VK_OBJECT_TYPE_INSTANCE;
+    } else if (debug_report_obj == VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT) {
+        return VK_OBJECT_TYPE_PHYSICAL_DEVICE;
+    } else if (debug_report_obj == VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT) {
+        return VK_OBJECT_TYPE_DEVICE;
+    } else if (debug_report_obj == VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT) {
+        return VK_OBJECT_TYPE_QUEUE;
+    } else if (debug_report_obj == VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT) {
+        return VK_OBJECT_TYPE_SEMAPHORE;
+    } else if (debug_report_obj == VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT) {
+        return VK_OBJECT_TYPE_COMMAND_BUFFER;
+    } else if (debug_report_obj == VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT) {
+        return VK_OBJECT_TYPE_FENCE;
+    } else if (debug_report_obj == VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT) {
+        return VK_OBJECT_TYPE_DEVICE_MEMORY;
+    } else if (debug_report_obj == VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT) {
+        return VK_OBJECT_TYPE_BUFFER;
+    } else if (debug_report_obj == VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT) {
+        return VK_OBJECT_TYPE_IMAGE;
+    } else if (debug_report_obj == VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT) {
+        return VK_OBJECT_TYPE_EVENT;
+    } else if (debug_report_obj == VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT) {
+        return VK_OBJECT_TYPE_QUERY_POOL;
+    } else if (debug_report_obj == VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT) {
+        return VK_OBJECT_TYPE_BUFFER_VIEW;
+    } else if (debug_report_obj == VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT) {
+        return VK_OBJECT_TYPE_IMAGE_VIEW;
+    } else if (debug_report_obj == VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT) {
+        return VK_OBJECT_TYPE_SHADER_MODULE;
+    } else if (debug_report_obj == VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT) {
+        return VK_OBJECT_TYPE_PIPELINE_CACHE;
+    } else if (debug_report_obj == VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT) {
+        return VK_OBJECT_TYPE_PIPELINE_LAYOUT;
+    } else if (debug_report_obj == VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT) {
+        return VK_OBJECT_TYPE_RENDER_PASS;
+    } else if (debug_report_obj == VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT) {
+        return VK_OBJECT_TYPE_PIPELINE;
+    } else if (debug_report_obj == VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT) {
+        return VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT;
+    } else if (debug_report_obj == VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT) {
+        return VK_OBJECT_TYPE_SAMPLER;
+    } else if (debug_report_obj == VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT) {
+        return VK_OBJECT_TYPE_DESCRIPTOR_POOL;
+    } else if (debug_report_obj == VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT) {
+        return VK_OBJECT_TYPE_DESCRIPTOR_SET;
+    } else if (debug_report_obj == VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT) {
+        return VK_OBJECT_TYPE_FRAMEBUFFER;
+    } else if (debug_report_obj == VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT) {
+        return VK_OBJECT_TYPE_COMMAND_POOL;
+    } else if (debug_report_obj == VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT) {
+        return VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION;
+    } else if (debug_report_obj == VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_EXT) {
+        return VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE;
+    } else if (debug_report_obj == VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT) {
+        return VK_OBJECT_TYPE_SURFACE_KHR;
+    } else if (debug_report_obj == VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT) {
+        return VK_OBJECT_TYPE_SWAPCHAIN_KHR;
+    } else if (debug_report_obj == VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_KHR_EXT) {
+        return VK_OBJECT_TYPE_DISPLAY_KHR;
+    } else if (debug_report_obj == VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_MODE_KHR_EXT) {
+        return VK_OBJECT_TYPE_DISPLAY_MODE_KHR;
+    } else if (debug_report_obj == VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT_EXT) {
+        return VK_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT;
+    } else if (debug_report_obj == VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_KHR_EXT) {
+        return VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_KHR;
+    } else if (debug_report_obj == VK_DEBUG_REPORT_OBJECT_TYPE_OBJECT_TABLE_NVX_EXT) {
+        return VK_OBJECT_TYPE_OBJECT_TABLE_NVX;
+    } else if (debug_report_obj == VK_DEBUG_REPORT_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX_EXT) {
+        return VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX;
+    } else if (debug_report_obj == VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR_EXT) {
+        return VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR;
+    } else if (debug_report_obj == VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT_EXT) {
+        return VK_OBJECT_TYPE_VALIDATION_CACHE_EXT;
+    } else if (debug_report_obj == VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV_EXT) {
+        return VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV;
+    }
+    return VK_OBJECT_TYPE_UNKNOWN;
+}
+
+// Helper function to convert from VkDebugReportObjectTypeEXT to VkObjectType
+static inline VkDebugReportObjectTypeEXT convertCoreObjectToDebugReportObject(VkObjectType core_report_obj){
+    if (core_report_obj == VK_OBJECT_TYPE_UNKNOWN) {
+        return VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT;
+    } else if (core_report_obj == VK_OBJECT_TYPE_UNKNOWN) {
+        return VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT;
+    } else if (core_report_obj == VK_OBJECT_TYPE_INSTANCE) {
+        return VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT;
+    } else if (core_report_obj == VK_OBJECT_TYPE_PHYSICAL_DEVICE) {
+        return VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT;
+    } else if (core_report_obj == VK_OBJECT_TYPE_DEVICE) {
+        return VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT;
+    } else if (core_report_obj == VK_OBJECT_TYPE_QUEUE) {
+        return VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT;
+    } else if (core_report_obj == VK_OBJECT_TYPE_SEMAPHORE) {
+        return VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT;
+    } else if (core_report_obj == VK_OBJECT_TYPE_COMMAND_BUFFER) {
+        return VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT;
+    } else if (core_report_obj == VK_OBJECT_TYPE_FENCE) {
+        return VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT;
+    } else if (core_report_obj == VK_OBJECT_TYPE_DEVICE_MEMORY) {
+        return VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT;
+    } else if (core_report_obj == VK_OBJECT_TYPE_BUFFER) {
+        return VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT;
+    } else if (core_report_obj == VK_OBJECT_TYPE_IMAGE) {
+        return VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT;
+    } else if (core_report_obj == VK_OBJECT_TYPE_EVENT) {
+        return VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT;
+    } else if (core_report_obj == VK_OBJECT_TYPE_QUERY_POOL) {
+        return VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT;
+    } else if (core_report_obj == VK_OBJECT_TYPE_BUFFER_VIEW) {
+        return VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT;
+    } else if (core_report_obj == VK_OBJECT_TYPE_IMAGE_VIEW) {
+        return VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT;
+    } else if (core_report_obj == VK_OBJECT_TYPE_SHADER_MODULE) {
+        return VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT;
+    } else if (core_report_obj == VK_OBJECT_TYPE_PIPELINE_CACHE) {
+        return VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT;
+    } else if (core_report_obj == VK_OBJECT_TYPE_PIPELINE_LAYOUT) {
+        return VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT;
+    } else if (core_report_obj == VK_OBJECT_TYPE_RENDER_PASS) {
+        return VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT;
+    } else if (core_report_obj == VK_OBJECT_TYPE_PIPELINE) {
+        return VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT;
+    } else if (core_report_obj == VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT) {
+        return VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT;
+    } else if (core_report_obj == VK_OBJECT_TYPE_SAMPLER) {
+        return VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT;
+    } else if (core_report_obj == VK_OBJECT_TYPE_DESCRIPTOR_POOL) {
+        return VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT;
+    } else if (core_report_obj == VK_OBJECT_TYPE_DESCRIPTOR_SET) {
+        return VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT;
+    } else if (core_report_obj == VK_OBJECT_TYPE_FRAMEBUFFER) {
+        return VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT;
+    } else if (core_report_obj == VK_OBJECT_TYPE_COMMAND_POOL) {
+        return VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT;
+    } else if (core_report_obj == VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION) {
+        return VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT;
+    } else if (core_report_obj == VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE) {
+        return VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_EXT;
+    } else if (core_report_obj == VK_OBJECT_TYPE_SURFACE_KHR) {
+        return VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT;
+    } else if (core_report_obj == VK_OBJECT_TYPE_SWAPCHAIN_KHR) {
+        return VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT;
+    } else if (core_report_obj == VK_OBJECT_TYPE_DISPLAY_KHR) {
+        return VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_KHR_EXT;
+    } else if (core_report_obj == VK_OBJECT_TYPE_DISPLAY_MODE_KHR) {
+        return VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_MODE_KHR_EXT;
+    } else if (core_report_obj == VK_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT) {
+        return VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT_EXT;
+    } else if (core_report_obj == VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_KHR) {
+        return VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_KHR_EXT;
+    } else if (core_report_obj == VK_OBJECT_TYPE_OBJECT_TABLE_NVX) {
+        return VK_DEBUG_REPORT_OBJECT_TYPE_OBJECT_TABLE_NVX_EXT;
+    } else if (core_report_obj == VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX) {
+        return VK_DEBUG_REPORT_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX_EXT;
+    } else if (core_report_obj == VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR) {
+        return VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR_EXT;
+    } else if (core_report_obj == VK_OBJECT_TYPE_VALIDATION_CACHE_EXT) {
+        return VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT_EXT;
+    } else if (core_report_obj == VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV) {
+        return VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV_EXT;
+    }
+    return VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT;
+}
diff --git a/src/third_party/vulkan-loader/src/loader/gpa_helper.h b/src/third_party/vulkan-loader/src/loader/gpa_helper.h
new file mode 100644
index 0000000..e08898b
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/loader/gpa_helper.h
@@ -0,0 +1,233 @@
+/*
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Valve Corporation
+ * Copyright (c) 2015 LunarG, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Jon Ashburn <jon@lunarg.com>
+ */
+
+#include <string.h>
+#include "debug_utils.h"
+#include "wsi.h"
+
+static inline void *trampolineGetProcAddr(struct loader_instance *inst, const char *funcName) {
+    // Don't include or check global functions
+    if (!strcmp(funcName, "vkGetInstanceProcAddr")) return vkGetInstanceProcAddr;
+    if (!strcmp(funcName, "vkDestroyInstance")) return vkDestroyInstance;
+    if (!strcmp(funcName, "vkEnumeratePhysicalDevices")) return vkEnumeratePhysicalDevices;
+    if (!strcmp(funcName, "vkGetPhysicalDeviceFeatures")) return vkGetPhysicalDeviceFeatures;
+    if (!strcmp(funcName, "vkGetPhysicalDeviceFormatProperties")) return vkGetPhysicalDeviceFormatProperties;
+    if (!strcmp(funcName, "vkGetPhysicalDeviceImageFormatProperties")) return vkGetPhysicalDeviceImageFormatProperties;
+    if (!strcmp(funcName, "vkGetPhysicalDeviceSparseImageFormatProperties")) return vkGetPhysicalDeviceSparseImageFormatProperties;
+    if (!strcmp(funcName, "vkGetPhysicalDeviceProperties")) return vkGetPhysicalDeviceProperties;
+    if (!strcmp(funcName, "vkGetPhysicalDeviceQueueFamilyProperties")) return vkGetPhysicalDeviceQueueFamilyProperties;
+    if (!strcmp(funcName, "vkGetPhysicalDeviceMemoryProperties")) return vkGetPhysicalDeviceMemoryProperties;
+    if (!strcmp(funcName, "vkEnumerateDeviceLayerProperties")) return vkEnumerateDeviceLayerProperties;
+    if (!strcmp(funcName, "vkEnumerateDeviceExtensionProperties")) return vkEnumerateDeviceExtensionProperties;
+    if (!strcmp(funcName, "vkCreateDevice")) return vkCreateDevice;
+    if (!strcmp(funcName, "vkGetDeviceProcAddr")) return vkGetDeviceProcAddr;
+    if (!strcmp(funcName, "vkDestroyDevice")) return vkDestroyDevice;
+    if (!strcmp(funcName, "vkGetDeviceQueue")) return vkGetDeviceQueue;
+    if (!strcmp(funcName, "vkQueueSubmit")) return vkQueueSubmit;
+    if (!strcmp(funcName, "vkQueueWaitIdle")) return vkQueueWaitIdle;
+    if (!strcmp(funcName, "vkDeviceWaitIdle")) return vkDeviceWaitIdle;
+    if (!strcmp(funcName, "vkAllocateMemory")) return vkAllocateMemory;
+    if (!strcmp(funcName, "vkFreeMemory")) return vkFreeMemory;
+    if (!strcmp(funcName, "vkMapMemory")) return vkMapMemory;
+    if (!strcmp(funcName, "vkUnmapMemory")) return vkUnmapMemory;
+    if (!strcmp(funcName, "vkFlushMappedMemoryRanges")) return vkFlushMappedMemoryRanges;
+    if (!strcmp(funcName, "vkInvalidateMappedMemoryRanges")) return vkInvalidateMappedMemoryRanges;
+    if (!strcmp(funcName, "vkGetDeviceMemoryCommitment")) return vkGetDeviceMemoryCommitment;
+    if (!strcmp(funcName, "vkGetImageSparseMemoryRequirements")) return vkGetImageSparseMemoryRequirements;
+    if (!strcmp(funcName, "vkGetImageMemoryRequirements")) return vkGetImageMemoryRequirements;
+    if (!strcmp(funcName, "vkGetBufferMemoryRequirements")) return vkGetBufferMemoryRequirements;
+    if (!strcmp(funcName, "vkBindImageMemory")) return vkBindImageMemory;
+    if (!strcmp(funcName, "vkBindBufferMemory")) return vkBindBufferMemory;
+    if (!strcmp(funcName, "vkQueueBindSparse")) return vkQueueBindSparse;
+    if (!strcmp(funcName, "vkCreateFence")) return vkCreateFence;
+    if (!strcmp(funcName, "vkDestroyFence")) return vkDestroyFence;
+    if (!strcmp(funcName, "vkGetFenceStatus")) return vkGetFenceStatus;
+    if (!strcmp(funcName, "vkResetFences")) return vkResetFences;
+    if (!strcmp(funcName, "vkWaitForFences")) return vkWaitForFences;
+    if (!strcmp(funcName, "vkCreateSemaphore")) return vkCreateSemaphore;
+    if (!strcmp(funcName, "vkDestroySemaphore")) return vkDestroySemaphore;
+    if (!strcmp(funcName, "vkCreateEvent")) return vkCreateEvent;
+    if (!strcmp(funcName, "vkDestroyEvent")) return vkDestroyEvent;
+    if (!strcmp(funcName, "vkGetEventStatus")) return vkGetEventStatus;
+    if (!strcmp(funcName, "vkSetEvent")) return vkSetEvent;
+    if (!strcmp(funcName, "vkResetEvent")) return vkResetEvent;
+    if (!strcmp(funcName, "vkCreateQueryPool")) return vkCreateQueryPool;
+    if (!strcmp(funcName, "vkDestroyQueryPool")) return vkDestroyQueryPool;
+    if (!strcmp(funcName, "vkGetQueryPoolResults")) return vkGetQueryPoolResults;
+    if (!strcmp(funcName, "vkCreateBuffer")) return vkCreateBuffer;
+    if (!strcmp(funcName, "vkDestroyBuffer")) return vkDestroyBuffer;
+    if (!strcmp(funcName, "vkCreateBufferView")) return vkCreateBufferView;
+    if (!strcmp(funcName, "vkDestroyBufferView")) return vkDestroyBufferView;
+    if (!strcmp(funcName, "vkCreateImage")) return vkCreateImage;
+    if (!strcmp(funcName, "vkDestroyImage")) return vkDestroyImage;
+    if (!strcmp(funcName, "vkGetImageSubresourceLayout")) return vkGetImageSubresourceLayout;
+    if (!strcmp(funcName, "vkCreateImageView")) return vkCreateImageView;
+    if (!strcmp(funcName, "vkDestroyImageView")) return vkDestroyImageView;
+    if (!strcmp(funcName, "vkCreateShaderModule")) return vkCreateShaderModule;
+    if (!strcmp(funcName, "vkDestroyShaderModule")) return vkDestroyShaderModule;
+    if (!strcmp(funcName, "vkCreatePipelineCache")) return vkCreatePipelineCache;
+    if (!strcmp(funcName, "vkDestroyPipelineCache")) return vkDestroyPipelineCache;
+    if (!strcmp(funcName, "vkGetPipelineCacheData")) return vkGetPipelineCacheData;
+    if (!strcmp(funcName, "vkMergePipelineCaches")) return vkMergePipelineCaches;
+    if (!strcmp(funcName, "vkCreateGraphicsPipelines")) return vkCreateGraphicsPipelines;
+    if (!strcmp(funcName, "vkCreateComputePipelines")) return vkCreateComputePipelines;
+    if (!strcmp(funcName, "vkDestroyPipeline")) return vkDestroyPipeline;
+    if (!strcmp(funcName, "vkCreatePipelineLayout")) return vkCreatePipelineLayout;
+    if (!strcmp(funcName, "vkDestroyPipelineLayout")) return vkDestroyPipelineLayout;
+    if (!strcmp(funcName, "vkCreateSampler")) return vkCreateSampler;
+    if (!strcmp(funcName, "vkDestroySampler")) return vkDestroySampler;
+    if (!strcmp(funcName, "vkCreateDescriptorSetLayout")) return vkCreateDescriptorSetLayout;
+    if (!strcmp(funcName, "vkDestroyDescriptorSetLayout")) return vkDestroyDescriptorSetLayout;
+    if (!strcmp(funcName, "vkCreateDescriptorPool")) return vkCreateDescriptorPool;
+    if (!strcmp(funcName, "vkDestroyDescriptorPool")) return vkDestroyDescriptorPool;
+    if (!strcmp(funcName, "vkResetDescriptorPool")) return vkResetDescriptorPool;
+    if (!strcmp(funcName, "vkAllocateDescriptorSets")) return vkAllocateDescriptorSets;
+    if (!strcmp(funcName, "vkFreeDescriptorSets")) return vkFreeDescriptorSets;
+    if (!strcmp(funcName, "vkUpdateDescriptorSets")) return vkUpdateDescriptorSets;
+    if (!strcmp(funcName, "vkCreateFramebuffer")) return vkCreateFramebuffer;
+    if (!strcmp(funcName, "vkDestroyFramebuffer")) return vkDestroyFramebuffer;
+    if (!strcmp(funcName, "vkCreateRenderPass")) return vkCreateRenderPass;
+    if (!strcmp(funcName, "vkDestroyRenderPass")) return vkDestroyRenderPass;
+    if (!strcmp(funcName, "vkGetRenderAreaGranularity")) return vkGetRenderAreaGranularity;
+    if (!strcmp(funcName, "vkCreateCommandPool")) return vkCreateCommandPool;
+    if (!strcmp(funcName, "vkDestroyCommandPool")) return vkDestroyCommandPool;
+    if (!strcmp(funcName, "vkResetCommandPool")) return vkResetCommandPool;
+    if (!strcmp(funcName, "vkAllocateCommandBuffers")) return vkAllocateCommandBuffers;
+    if (!strcmp(funcName, "vkFreeCommandBuffers")) return vkFreeCommandBuffers;
+    if (!strcmp(funcName, "vkBeginCommandBuffer")) return vkBeginCommandBuffer;
+    if (!strcmp(funcName, "vkEndCommandBuffer")) return vkEndCommandBuffer;
+    if (!strcmp(funcName, "vkResetCommandBuffer")) return vkResetCommandBuffer;
+    if (!strcmp(funcName, "vkCmdBindPipeline")) return vkCmdBindPipeline;
+    if (!strcmp(funcName, "vkCmdBindDescriptorSets")) return vkCmdBindDescriptorSets;
+    if (!strcmp(funcName, "vkCmdBindVertexBuffers")) return vkCmdBindVertexBuffers;
+    if (!strcmp(funcName, "vkCmdBindIndexBuffer")) return vkCmdBindIndexBuffer;
+    if (!strcmp(funcName, "vkCmdSetViewport")) return vkCmdSetViewport;
+    if (!strcmp(funcName, "vkCmdSetScissor")) return vkCmdSetScissor;
+    if (!strcmp(funcName, "vkCmdSetLineWidth")) return vkCmdSetLineWidth;
+    if (!strcmp(funcName, "vkCmdSetDepthBias")) return vkCmdSetDepthBias;
+    if (!strcmp(funcName, "vkCmdSetBlendConstants")) return vkCmdSetBlendConstants;
+    if (!strcmp(funcName, "vkCmdSetDepthBounds")) return vkCmdSetDepthBounds;
+    if (!strcmp(funcName, "vkCmdSetStencilCompareMask")) return vkCmdSetStencilCompareMask;
+    if (!strcmp(funcName, "vkCmdSetStencilWriteMask")) return vkCmdSetStencilWriteMask;
+    if (!strcmp(funcName, "vkCmdSetStencilReference")) return vkCmdSetStencilReference;
+    if (!strcmp(funcName, "vkCmdDraw")) return vkCmdDraw;
+    if (!strcmp(funcName, "vkCmdDrawIndexed")) return vkCmdDrawIndexed;
+    if (!strcmp(funcName, "vkCmdDrawIndirect")) return vkCmdDrawIndirect;
+    if (!strcmp(funcName, "vkCmdDrawIndexedIndirect")) return vkCmdDrawIndexedIndirect;
+    if (!strcmp(funcName, "vkCmdDispatch")) return vkCmdDispatch;
+    if (!strcmp(funcName, "vkCmdDispatchIndirect")) return vkCmdDispatchIndirect;
+    if (!strcmp(funcName, "vkCmdCopyBuffer")) return vkCmdCopyBuffer;
+    if (!strcmp(funcName, "vkCmdCopyImage")) return vkCmdCopyImage;
+    if (!strcmp(funcName, "vkCmdBlitImage")) return vkCmdBlitImage;
+    if (!strcmp(funcName, "vkCmdCopyBufferToImage")) return vkCmdCopyBufferToImage;
+    if (!strcmp(funcName, "vkCmdCopyImageToBuffer")) return vkCmdCopyImageToBuffer;
+    if (!strcmp(funcName, "vkCmdUpdateBuffer")) return vkCmdUpdateBuffer;
+    if (!strcmp(funcName, "vkCmdFillBuffer")) return vkCmdFillBuffer;
+    if (!strcmp(funcName, "vkCmdClearColorImage")) return vkCmdClearColorImage;
+    if (!strcmp(funcName, "vkCmdClearDepthStencilImage")) return vkCmdClearDepthStencilImage;
+    if (!strcmp(funcName, "vkCmdClearAttachments")) return vkCmdClearAttachments;
+    if (!strcmp(funcName, "vkCmdResolveImage")) return vkCmdResolveImage;
+    if (!strcmp(funcName, "vkCmdSetEvent")) return vkCmdSetEvent;
+    if (!strcmp(funcName, "vkCmdResetEvent")) return vkCmdResetEvent;
+    if (!strcmp(funcName, "vkCmdWaitEvents")) return vkCmdWaitEvents;
+    if (!strcmp(funcName, "vkCmdPipelineBarrier")) return vkCmdPipelineBarrier;
+    if (!strcmp(funcName, "vkCmdBeginQuery")) return vkCmdBeginQuery;
+    if (!strcmp(funcName, "vkCmdEndQuery")) return vkCmdEndQuery;
+    if (!strcmp(funcName, "vkCmdResetQueryPool")) return vkCmdResetQueryPool;
+    if (!strcmp(funcName, "vkCmdWriteTimestamp")) return vkCmdWriteTimestamp;
+    if (!strcmp(funcName, "vkCmdCopyQueryPoolResults")) return vkCmdCopyQueryPoolResults;
+    if (!strcmp(funcName, "vkCmdPushConstants")) return vkCmdPushConstants;
+    if (!strcmp(funcName, "vkCmdBeginRenderPass")) return vkCmdBeginRenderPass;
+    if (!strcmp(funcName, "vkCmdNextSubpass")) return vkCmdNextSubpass;
+    if (!strcmp(funcName, "vkCmdEndRenderPass")) return vkCmdEndRenderPass;
+    if (!strcmp(funcName, "vkCmdExecuteCommands")) return vkCmdExecuteCommands;
+
+    // Core 1.1 functions
+    if (!strcmp(funcName, "vkEnumeratePhysicalDeviceGroups")) return vkEnumeratePhysicalDeviceGroups;
+    if (!strcmp(funcName, "vkGetPhysicalDeviceFeatures2")) return vkGetPhysicalDeviceFeatures2;
+    if (!strcmp(funcName, "vkGetPhysicalDeviceProperties2")) return vkGetPhysicalDeviceProperties2;
+    if (!strcmp(funcName, "vkGetPhysicalDeviceFormatProperties2")) return vkGetPhysicalDeviceFormatProperties2;
+    if (!strcmp(funcName, "vkGetPhysicalDeviceImageFormatProperties2")) return vkGetPhysicalDeviceImageFormatProperties2;
+    if (!strcmp(funcName, "vkGetPhysicalDeviceQueueFamilyProperties2")) return vkGetPhysicalDeviceQueueFamilyProperties2;
+    if (!strcmp(funcName, "vkGetPhysicalDeviceMemoryProperties2")) return vkGetPhysicalDeviceMemoryProperties2;
+    if (!strcmp(funcName, "vkGetPhysicalDeviceSparseImageFormatProperties2"))
+        return vkGetPhysicalDeviceSparseImageFormatProperties2;
+    if (!strcmp(funcName, "vkGetPhysicalDeviceExternalBufferProperties")) return vkGetPhysicalDeviceExternalBufferProperties;
+    if (!strcmp(funcName, "vkGetPhysicalDeviceExternalSemaphoreProperties")) return vkGetPhysicalDeviceExternalSemaphoreProperties;
+    if (!strcmp(funcName, "vkGetPhysicalDeviceExternalFenceProperties")) return vkGetPhysicalDeviceExternalFenceProperties;
+    if (!strcmp(funcName, "vkBindBufferMemory2")) return vkBindBufferMemory2;
+    if (!strcmp(funcName, "vkBindImageMemory2")) return vkBindImageMemory2;
+    if (!strcmp(funcName, "vkGetDeviceGroupPeerMemoryFeatures")) return vkGetDeviceGroupPeerMemoryFeatures;
+    if (!strcmp(funcName, "vkCmdSetDeviceMask")) return vkCmdSetDeviceMask;
+    if (!strcmp(funcName, "vkCmdDispatchBase")) return vkCmdDispatchBase;
+    if (!strcmp(funcName, "vkGetImageMemoryRequirements2")) return vkGetImageMemoryRequirements2;
+    if (!strcmp(funcName, "vkTrimCommandPool")) return vkTrimCommandPool;
+    if (!strcmp(funcName, "vkGetDeviceQueue2")) return vkGetDeviceQueue2;
+    if (!strcmp(funcName, "vkCreateSamplerYcbcrConversion")) return vkCreateSamplerYcbcrConversion;
+    if (!strcmp(funcName, "vkDestroySamplerYcbcrConversion")) return vkDestroySamplerYcbcrConversion;
+    if (!strcmp(funcName, "vkGetDescriptorSetLayoutSupport")) return vkGetDescriptorSetLayoutSupport;
+    if (!strcmp(funcName, "vkCreateDescriptorUpdateTemplate")) return vkCreateDescriptorUpdateTemplate;
+    if (!strcmp(funcName, "vkDestroyDescriptorUpdateTemplate")) return vkDestroyDescriptorUpdateTemplate;
+    if (!strcmp(funcName, "vkUpdateDescriptorSetWithTemplate")) return vkUpdateDescriptorSetWithTemplate;
+    if (!strcmp(funcName, "vkGetImageSparseMemoryRequirements2")) return vkGetImageSparseMemoryRequirements2;
+    if (!strcmp(funcName, "vkGetBufferMemoryRequirements2")) return vkGetBufferMemoryRequirements2;
+
+    // Instance extensions
+    void *addr;
+    if (debug_utils_InstanceGpa(inst, funcName, &addr)) return addr;
+
+    if (wsi_swapchain_instance_gpa(inst, funcName, &addr)) return addr;
+
+    if (extension_instance_gpa(inst, funcName, &addr)) return addr;
+
+    // Unknown physical device extensions
+    if (loader_phys_dev_ext_gpa(inst, funcName, true, &addr, NULL)) return addr;
+
+    // Unknown device extensions
+    addr = loader_dev_ext_gpa(inst, funcName);
+    return addr;
+}
+
+static inline void *globalGetProcAddr(const char *name) {
+    if (!name || name[0] != 'v' || name[1] != 'k') return NULL;
+
+    name += 2;
+    if (!strcmp(name, "CreateInstance")) return vkCreateInstance;
+    if (!strcmp(name, "EnumerateInstanceExtensionProperties")) return vkEnumerateInstanceExtensionProperties;
+    if (!strcmp(name, "EnumerateInstanceLayerProperties")) return vkEnumerateInstanceLayerProperties;
+    if (!strcmp(name, "EnumerateInstanceVersion")) return vkEnumerateInstanceVersion;
+
+    return NULL;
+}
+
+static inline void *loader_non_passthrough_gdpa(const char *name) {
+    if (!name || name[0] != 'v' || name[1] != 'k') return NULL;
+
+    name += 2;
+
+    if (!strcmp(name, "GetDeviceProcAddr")) return vkGetDeviceProcAddr;
+    if (!strcmp(name, "DestroyDevice")) return vkDestroyDevice;
+    if (!strcmp(name, "GetDeviceQueue")) return vkGetDeviceQueue;
+    if (!strcmp(name, "GetDeviceQueue2")) return vkGetDeviceQueue2;
+    if (!strcmp(name, "AllocateCommandBuffers")) return vkAllocateCommandBuffers;
+
+    return NULL;
+}
diff --git a/src/third_party/vulkan-loader/src/loader/images/function_device_chain.png b/src/third_party/vulkan-loader/src/loader/images/function_device_chain.png
new file mode 100644
index 0000000..62ba7a3
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/loader/images/function_device_chain.png
Binary files differ
diff --git a/src/third_party/vulkan-loader/src/loader/images/function_device_chain_white.png b/src/third_party/vulkan-loader/src/loader/images/function_device_chain_white.png
new file mode 100644
index 0000000..b6107a8
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/loader/images/function_device_chain_white.png
Binary files differ
diff --git a/src/third_party/vulkan-loader/src/loader/images/function_instance_chain.png b/src/third_party/vulkan-loader/src/loader/images/function_instance_chain.png
new file mode 100644
index 0000000..b79fd96
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/loader/images/function_instance_chain.png
Binary files differ
diff --git a/src/third_party/vulkan-loader/src/loader/images/function_instance_chain_white.png b/src/third_party/vulkan-loader/src/loader/images/function_instance_chain_white.png
new file mode 100644
index 0000000..48705a2
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/loader/images/function_instance_chain_white.png
Binary files differ
diff --git a/src/third_party/vulkan-loader/src/loader/images/high_level_loader.png b/src/third_party/vulkan-loader/src/loader/images/high_level_loader.png
new file mode 100644
index 0000000..391abb7
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/loader/images/high_level_loader.png
Binary files differ
diff --git a/src/third_party/vulkan-loader/src/loader/images/high_level_loader_white.png b/src/third_party/vulkan-loader/src/loader/images/high_level_loader_white.png
new file mode 100644
index 0000000..c83cade
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/loader/images/high_level_loader_white.png
Binary files differ
diff --git a/src/third_party/vulkan-loader/src/loader/images/loader_device_chain_app.png b/src/third_party/vulkan-loader/src/loader/images/loader_device_chain_app.png
new file mode 100644
index 0000000..764fcd6
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/loader/images/loader_device_chain_app.png
Binary files differ
diff --git a/src/third_party/vulkan-loader/src/loader/images/loader_device_chain_app_white.png b/src/third_party/vulkan-loader/src/loader/images/loader_device_chain_app_white.png
new file mode 100644
index 0000000..8bd28ee
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/loader/images/loader_device_chain_app_white.png
Binary files differ
diff --git a/src/third_party/vulkan-loader/src/loader/images/loader_device_chain_loader.png b/src/third_party/vulkan-loader/src/loader/images/loader_device_chain_loader.png
new file mode 100644
index 0000000..d226a3e
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/loader/images/loader_device_chain_loader.png
Binary files differ
diff --git a/src/third_party/vulkan-loader/src/loader/images/loader_device_chain_loader_white.png b/src/third_party/vulkan-loader/src/loader/images/loader_device_chain_loader_white.png
new file mode 100644
index 0000000..993a555
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/loader/images/loader_device_chain_loader_white.png
Binary files differ
diff --git a/src/third_party/vulkan-loader/src/loader/images/loader_instance_chain.png b/src/third_party/vulkan-loader/src/loader/images/loader_instance_chain.png
new file mode 100644
index 0000000..8bc06d0
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/loader/images/loader_instance_chain.png
Binary files differ
diff --git a/src/third_party/vulkan-loader/src/loader/images/loader_instance_chain_white.png b/src/third_party/vulkan-loader/src/loader/images/loader_instance_chain_white.png
new file mode 100644
index 0000000..4c3c066
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/loader/images/loader_instance_chain_white.png
Binary files differ
diff --git a/src/third_party/vulkan-loader/src/loader/images/loader_layer_order.png b/src/third_party/vulkan-loader/src/loader/images/loader_layer_order.png
new file mode 100644
index 0000000..80b9ea4
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/loader/images/loader_layer_order.png
Binary files differ
diff --git a/src/third_party/vulkan-loader/src/loader/images/loader_layer_order_white.png b/src/third_party/vulkan-loader/src/loader/images/loader_layer_order_white.png
new file mode 100644
index 0000000..ed25dc9
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/loader/images/loader_layer_order_white.png
Binary files differ
diff --git a/src/third_party/vulkan-loader/src/loader/loader.aps b/src/third_party/vulkan-loader/src/loader/loader.aps
new file mode 100644
index 0000000..1ba7f21
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/loader/loader.aps
Binary files differ
diff --git a/src/third_party/vulkan-loader/src/loader/loader.c b/src/third_party/vulkan-loader/src/loader/loader.c
new file mode 100644
index 0000000..1f084c8
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/loader/loader.c
@@ -0,0 +1,8138 @@
+/*
+ *
+ * Copyright (c) 2014-2019 The Khronos Group Inc.
+ * Copyright (c) 2014-2019 Valve Corporation
+ * Copyright (c) 2014-2019 LunarG, Inc.
+ * Copyright (C) 2015 Google Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+
+ *
+ * Author: Jon Ashburn <jon@lunarg.com>
+ * Author: Courtney Goeltzenleuchter <courtney@LunarG.com>
+ * Author: Mark Young <marky@lunarg.com>
+ * Author: Lenny Komow <lenny@lunarg.com>
+ *
+ */
+
+// This needs to be defined first, or else we'll get redefinitions on NTSTATUS values
+#ifdef _WIN32
+#define UMDF_USING_NTSTATUS
+#include <ntstatus.h>
+#endif
+
+#ifndef _GNU_SOURCE
+#define _GNU_SOURCE
+#endif
+#include <inttypes.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include <stdarg.h>
+#include <stdbool.h>
+#include <string.h>
+#include <stddef.h>
+
+#if defined(__APPLE__)
+#include <CoreFoundation/CoreFoundation.h>
+#include <sys/param.h>
+#endif
+
+// Time related functions
+#include <time.h>
+
+#include <sys/types.h>
+#if defined(_WIN32)
+#include "dirent_on_windows.h"
+#else  // _WIN32
+#include <dirent.h>
+#endif  // _WIN32
+#include "vk_loader_platform.h"
+#include "loader.h"
+#include "gpa_helper.h"
+#include "debug_utils.h"
+#include "wsi.h"
+#include "vulkan/vk_icd.h"
+#include "cJSON.h"
+#include "murmurhash.h"
+
+#if defined(_WIN32)
+#include <cfgmgr32.h>
+#include <initguid.h>
+#include <devpkey.h>
+#include <winternl.h>
+#include "adapters.h"
+#include "dxgi_loader.h"
+#endif
+
+// This is a CMake generated file with #defines for any functions/includes
+// that it found present.  This is currently necessary to properly determine
+// if secure_getenv or __secure_getenv are present
+#if !defined(VULKAN_NON_CMAKE_BUILD)
+#include "loader_cmake_config.h"
+#endif  // !defined(VULKAN_NON_CMAKE_BUILD)
+
+// Generated file containing all the extension data
+#include "vk_loader_extensions.c"
+
+// Override layer information
+#define VK_OVERRIDE_LAYER_NAME "VK_LAYER_LUNARG_override"
+
+struct loader_struct loader = {0};
+// TLS for instance for alloc/free callbacks
+THREAD_LOCAL_DECL struct loader_instance *tls_instance;
+
+static size_t loader_platform_combine_path(char *dest, size_t len, ...);
+
+struct loader_phys_dev_per_icd {
+    uint32_t count;
+    VkPhysicalDevice *phys_devs;
+    struct loader_icd_term *this_icd_term;
+};
+
+enum loader_debug {
+    LOADER_INFO_BIT = 0x01,
+    LOADER_WARN_BIT = 0x02,
+    LOADER_PERF_BIT = 0x04,
+    LOADER_ERROR_BIT = 0x08,
+    LOADER_DEBUG_BIT = 0x10,
+};
+
+uint32_t g_loader_debug = 0;
+uint32_t g_loader_log_msgs = 0;
+
+enum loader_data_files_type {
+    LOADER_DATA_FILE_MANIFEST_ICD = 0,
+    LOADER_DATA_FILE_MANIFEST_LAYER,
+    LOADER_DATA_FILE_NUM_TYPES  // Not a real field, used for possible loop terminator
+};
+
+// thread safety lock for accessing global data structures such as "loader"
+// all entrypoints on the instance chain need to be locked except GPA
+// additionally CreateDevice and DestroyDevice needs to be locked
+loader_platform_thread_mutex loader_lock;
+loader_platform_thread_mutex loader_json_lock;
+
+LOADER_PLATFORM_THREAD_ONCE_DECLARATION(once_init);
+
+// This loader supports Vulkan API version 1.1
+uint32_t loader_major_version = 1;
+uint32_t loader_minor_version = 1;
+
+void *loader_instance_heap_alloc(const struct loader_instance *instance, size_t size, VkSystemAllocationScope alloc_scope) {
+    void *pMemory = NULL;
+#if (DEBUG_DISABLE_APP_ALLOCATORS == 1)
+    {
+#else
+    if (instance && instance->alloc_callbacks.pfnAllocation) {
+        // These are internal structures, so it's best to align everything to
+        // the largest unit size which is the size of a uint64_t.
+        pMemory = instance->alloc_callbacks.pfnAllocation(instance->alloc_callbacks.pUserData, size, sizeof(uint64_t), alloc_scope);
+    } else {
+#endif
+        pMemory = malloc(size);
+    }
+
+    return pMemory;
+}
+
+void loader_instance_heap_free(const struct loader_instance *instance, void *pMemory) {
+    if (pMemory != NULL) {
+#if (DEBUG_DISABLE_APP_ALLOCATORS == 1)
+        {
+#else
+        if (instance && instance->alloc_callbacks.pfnFree) {
+            instance->alloc_callbacks.pfnFree(instance->alloc_callbacks.pUserData, pMemory);
+        } else {
+#endif
+            free(pMemory);
+        }
+    }
+}
+
+void *loader_instance_heap_realloc(const struct loader_instance *instance, void *pMemory, size_t orig_size, size_t size,
+                                   VkSystemAllocationScope alloc_scope) {
+    void *pNewMem = NULL;
+    if (pMemory == NULL || orig_size == 0) {
+        pNewMem = loader_instance_heap_alloc(instance, size, alloc_scope);
+    } else if (size == 0) {
+        loader_instance_heap_free(instance, pMemory);
+#if (DEBUG_DISABLE_APP_ALLOCATORS == 1)
+#else
+    } else if (instance && instance->alloc_callbacks.pfnReallocation) {
+        // These are internal structures, so it's best to align everything to
+        // the largest unit size which is the size of a uint64_t.
+        pNewMem = instance->alloc_callbacks.pfnReallocation(instance->alloc_callbacks.pUserData, pMemory, size, sizeof(uint64_t),
+                                                            alloc_scope);
+#endif
+    } else {
+        pNewMem = realloc(pMemory, size);
+    }
+    return pNewMem;
+}
+
+void *loader_instance_tls_heap_alloc(size_t size) {
+    return loader_instance_heap_alloc(tls_instance, size, VK_SYSTEM_ALLOCATION_SCOPE_COMMAND);
+}
+
+void loader_instance_tls_heap_free(void *pMemory) { loader_instance_heap_free(tls_instance, pMemory); }
+
+void *loader_device_heap_alloc(const struct loader_device *device, size_t size, VkSystemAllocationScope alloc_scope) {
+    void *pMemory = NULL;
+#if (DEBUG_DISABLE_APP_ALLOCATORS == 1)
+    {
+#else
+    if (device && device->alloc_callbacks.pfnAllocation) {
+        // These are internal structures, so it's best to align everything to
+        // the largest unit size which is the size of a uint64_t.
+        pMemory = device->alloc_callbacks.pfnAllocation(device->alloc_callbacks.pUserData, size, sizeof(uint64_t), alloc_scope);
+    } else {
+#endif
+        pMemory = malloc(size);
+    }
+    return pMemory;
+}
+
+void loader_device_heap_free(const struct loader_device *device, void *pMemory) {
+    if (pMemory != NULL) {
+#if (DEBUG_DISABLE_APP_ALLOCATORS == 1)
+        {
+#else
+        if (device && device->alloc_callbacks.pfnFree) {
+            device->alloc_callbacks.pfnFree(device->alloc_callbacks.pUserData, pMemory);
+        } else {
+#endif
+            free(pMemory);
+        }
+    }
+}
+
+void *loader_device_heap_realloc(const struct loader_device *device, void *pMemory, size_t orig_size, size_t size,
+                                 VkSystemAllocationScope alloc_scope) {
+    void *pNewMem = NULL;
+    if (pMemory == NULL || orig_size == 0) {
+        pNewMem = loader_device_heap_alloc(device, size, alloc_scope);
+    } else if (size == 0) {
+        loader_device_heap_free(device, pMemory);
+#if (DEBUG_DISABLE_APP_ALLOCATORS == 1)
+#else
+    } else if (device && device->alloc_callbacks.pfnReallocation) {
+        // These are internal structures, so it's best to align everything to
+        // the largest unit size which is the size of a uint64_t.
+        pNewMem = device->alloc_callbacks.pfnReallocation(device->alloc_callbacks.pUserData, pMemory, size, sizeof(uint64_t),
+                                                          alloc_scope);
+#endif
+    } else {
+        pNewMem = realloc(pMemory, size);
+    }
+    return pNewMem;
+}
+
+// Environment variables
+#if defined(__linux__) || defined(__APPLE__)
+
+static inline bool IsHighIntegrity() {
+    return geteuid() != getuid() || getegid() != getgid();
+}
+
+static inline char *loader_getenv(const char *name, const struct loader_instance *inst) {
+    // No allocation of memory necessary for Linux, but we should at least touch
+    // the inst pointer to get rid of compiler warnings.
+    (void)inst;
+    return getenv(name);
+}
+
+static inline char *loader_secure_getenv(const char *name, const struct loader_instance *inst) {
+#if defined(__APPLE__)
+    // Apple does not appear to have a secure getenv implementation.
+    // The main difference between secure getenv and getenv is that secure getenv
+    // returns NULL if the process is being run with elevated privileges by a normal user.
+    // The idea is to prevent the reading of malicious environment variables by a process
+    // that can do damage.
+    // This algorithm is derived from glibc code that sets an internal
+    // variable (__libc_enable_secure) if the process is running under setuid or setgid.
+    return IsHighIntegrity() ? NULL : loader_getenv(name, inst);
+#else
+// Linux
+#if defined(HAVE_SECURE_GETENV) && !defined(USE_UNSAFE_FILE_SEARCH)
+    (void)inst;
+    return secure_getenv(name);
+#elif defined(HAVE___SECURE_GETENV) && !defined(USE_UNSAFE_FILE_SEARCH)
+    (void)inst;
+    return __secure_getenv(name);
+#else
+    return loader_getenv(name, inst);
+#endif
+#endif
+}
+
+static inline void loader_free_getenv(char *val, const struct loader_instance *inst) {
+    // No freeing of memory necessary for Linux, but we should at least touch
+    // the val and inst pointers to get rid of compiler warnings.
+    (void)val;
+    (void)inst;
+}
+
+#elif defined(WIN32)
+
+static inline bool IsHighIntegrity() {
+    HANDLE process_token;
+    if (OpenProcessToken(GetCurrentProcess(), TOKEN_QUERY | TOKEN_QUERY_SOURCE, &process_token)) {
+        // Maximum possible size of SID_AND_ATTRIBUTES is maximum size of a SID + size of attributes DWORD.
+        uint8_t mandatory_label_buffer[SECURITY_MAX_SID_SIZE + sizeof(DWORD)];
+        DWORD buffer_size;
+        if (GetTokenInformation(process_token, TokenIntegrityLevel, mandatory_label_buffer, sizeof(mandatory_label_buffer),
+            &buffer_size) != 0) {
+            const TOKEN_MANDATORY_LABEL *mandatory_label = (const TOKEN_MANDATORY_LABEL *)mandatory_label_buffer;
+            const DWORD sub_authority_count = *GetSidSubAuthorityCount(mandatory_label->Label.Sid);
+            const DWORD integrity_level = *GetSidSubAuthority(mandatory_label->Label.Sid, sub_authority_count - 1);
+
+            CloseHandle(process_token);
+            return integrity_level > SECURITY_MANDATORY_MEDIUM_RID;
+        }
+
+        CloseHandle(process_token);
+    }
+
+    return false;
+}
+
+static inline char *loader_getenv(const char *name, const struct loader_instance *inst) {
+    char *retVal;
+    DWORD valSize;
+
+    valSize = GetEnvironmentVariableA(name, NULL, 0);
+
+    // valSize DOES include the null terminator, so for any set variable
+    // will always be at least 1. If it's 0, the variable wasn't set.
+    if (valSize == 0) return NULL;
+
+    // Allocate the space necessary for the registry entry
+    if (NULL != inst && NULL != inst->alloc_callbacks.pfnAllocation) {
+        retVal = (char *)inst->alloc_callbacks.pfnAllocation(inst->alloc_callbacks.pUserData, valSize, sizeof(char *),
+                                                             VK_SYSTEM_ALLOCATION_SCOPE_COMMAND);
+    } else {
+        retVal = (char *)malloc(valSize);
+    }
+
+    if (NULL != retVal) {
+        GetEnvironmentVariableA(name, retVal, valSize);
+    }
+
+    return retVal;
+}
+
+static inline char *loader_secure_getenv(const char *name, const struct loader_instance *inst) {
+#if !defined(USE_UNSAFE_FILE_SEARCH)
+    if (IsHighIntegrity()) {
+        return NULL;
+    }
+#endif
+
+    return loader_getenv(name, inst);
+}
+
+static inline void loader_free_getenv(char *val, const struct loader_instance *inst) {
+    if (NULL != inst && NULL != inst->alloc_callbacks.pfnFree) {
+        inst->alloc_callbacks.pfnFree(inst->alloc_callbacks.pUserData, val);
+    } else {
+        free((void *)val);
+    }
+}
+
+#else
+
+static inline char *loader_getenv(const char *name, const struct loader_instance *inst) {
+    // stub func
+    (void)inst;
+    (void)name;
+    return NULL;
+}
+static inline void loader_free_getenv(char *val, const struct loader_instance *inst) {
+    // stub func
+    (void)val;
+    (void)inst;
+}
+
+#endif
+
+void loader_log(const struct loader_instance *inst, VkFlags msg_type, int32_t msg_code, const char *format, ...) {
+    char msg[512];
+    char cmd_line_msg[512];
+    size_t cmd_line_size = sizeof(cmd_line_msg);
+    va_list ap;
+    int ret;
+
+    va_start(ap, format);
+    ret = vsnprintf(msg, sizeof(msg), format, ap);
+    if ((ret >= (int)sizeof(msg)) || ret < 0) {
+        msg[sizeof(msg) - 1] = '\0';
+    }
+    va_end(ap);
+
+    if (inst) {
+        VkDebugUtilsMessageSeverityFlagBitsEXT severity = 0;
+        VkDebugUtilsMessageTypeFlagsEXT type;
+        VkDebugUtilsMessengerCallbackDataEXT callback_data;
+        VkDebugUtilsObjectNameInfoEXT object_name;
+
+        if ((msg_type & LOADER_INFO_BIT) != 0) {
+            severity = VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT;
+        } else if ((msg_type & LOADER_WARN_BIT) != 0) {
+            severity = VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT;
+        } else if ((msg_type & LOADER_ERROR_BIT) != 0) {
+            severity = VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT;
+        } else if ((msg_type & LOADER_DEBUG_BIT) != 0) {
+            severity = VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT;
+        }
+
+        if ((msg_type & LOADER_PERF_BIT) != 0) {
+            type = VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT;
+        } else {
+            type = VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT;
+        }
+
+        callback_data.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CALLBACK_DATA_EXT;
+        callback_data.pNext = NULL;
+        callback_data.flags = 0;
+        callback_data.pMessageIdName = "Loader Message";
+        callback_data.messageIdNumber = 0;
+        callback_data.pMessage = msg;
+        callback_data.queueLabelCount = 0;
+        callback_data.pQueueLabels = NULL;
+        callback_data.cmdBufLabelCount = 0;
+        callback_data.pCmdBufLabels = NULL;
+        callback_data.objectCount = 1;
+        callback_data.pObjects = &object_name;
+        object_name.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT;
+        object_name.pNext = NULL;
+        object_name.objectType = VK_OBJECT_TYPE_INSTANCE;
+        object_name.objectHandle = (uint64_t)(uintptr_t)inst;
+        object_name.pObjectName = NULL;
+
+        util_SubmitDebugUtilsMessageEXT(inst, severity, type, &callback_data);
+    }
+
+    if (!(msg_type & g_loader_log_msgs)) {
+        return;
+    }
+
+    cmd_line_msg[0] = '\0';
+    cmd_line_size -= 1;
+    size_t original_size = cmd_line_size;
+
+    if ((msg_type & LOADER_INFO_BIT) != 0) {
+        strncat(cmd_line_msg, "INFO", cmd_line_size);
+        cmd_line_size -= 4;
+    }
+    if ((msg_type & LOADER_WARN_BIT) != 0) {
+        if (cmd_line_size != original_size) {
+            strncat(cmd_line_msg, " | ", cmd_line_size);
+            cmd_line_size -= 3;
+        }
+        strncat(cmd_line_msg, "WARNING", cmd_line_size);
+        cmd_line_size -= 7;
+    }
+    if ((msg_type & LOADER_PERF_BIT) != 0) {
+        if (cmd_line_size != original_size) {
+            strncat(cmd_line_msg, " | ", cmd_line_size);
+            cmd_line_size -= 3;
+        }
+        strncat(cmd_line_msg, "PERF", cmd_line_size);
+        cmd_line_size -= 4;
+    }
+    if ((msg_type & LOADER_ERROR_BIT) != 0) {
+        if (cmd_line_size != original_size) {
+            strncat(cmd_line_msg, " | ", cmd_line_size);
+            cmd_line_size -= 3;
+        }
+        strncat(cmd_line_msg, "ERROR", cmd_line_size);
+        cmd_line_size -= 5;
+    }
+    if ((msg_type & LOADER_DEBUG_BIT) != 0) {
+        if (cmd_line_size != original_size) {
+            strncat(cmd_line_msg, " | ", cmd_line_size);
+            cmd_line_size -= 3;
+        }
+        strncat(cmd_line_msg, "DEBUG", cmd_line_size);
+        cmd_line_size -= 5;
+    }
+    if (cmd_line_size != original_size) {
+        strncat(cmd_line_msg, ": ", cmd_line_size);
+        cmd_line_size -= 2;
+    }
+
+    if (0 < cmd_line_size) {
+        // If the message is too long, trim it down
+        if (strlen(msg) > cmd_line_size) {
+            msg[cmd_line_size - 1] = '\0';
+        }
+        strncat(cmd_line_msg, msg, cmd_line_size);
+    } else {
+        // Shouldn't get here, but check to make sure if we've already overrun
+        // the string boundary
+        assert(false);
+    }
+
+#if defined(WIN32)
+    OutputDebugString(cmd_line_msg);
+    OutputDebugString("\n");
+#endif
+
+    fputs(cmd_line_msg, stderr);
+    fputc('\n', stderr);
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL vkSetInstanceDispatch(VkInstance instance, void *object) {
+    struct loader_instance *inst = loader_get_instance(instance);
+    if (!inst) {
+        loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                   "vkSetInstanceDispatch: Can not retrieve Instance "
+                   "dispatch table.");
+        return VK_ERROR_INITIALIZATION_FAILED;
+    }
+    loader_set_dispatch(object, inst->disp);
+    return VK_SUCCESS;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL vkSetDeviceDispatch(VkDevice device, void *object) {
+    struct loader_device *dev;
+    struct loader_icd_term *icd_term = loader_get_icd_and_device(device, &dev, NULL);
+
+    if (NULL == icd_term) {
+        return VK_ERROR_INITIALIZATION_FAILED;
+    }
+    loader_set_dispatch(object, &dev->loader_dispatch);
+    return VK_SUCCESS;
+}
+
+#if defined(_WIN32)
+
+// Append the JSON path data to the list and allocate/grow the list if it's not large enough.
+// Function returns true if filename was appended to reg_data list.
+// Caller should free reg_data.
+static bool loaderAddJsonEntry(const struct loader_instance *inst,
+                               char **reg_data,    // list of JSON files
+                               PDWORD total_size,  // size of reg_data
+                               LPCSTR key_name,    // key name - used for debug prints - i.e. VulkanDriverName
+                               DWORD key_type,     // key data type
+                               LPSTR json_path,    // JSON string to add to the list reg_data
+                               DWORD json_size,    // size in bytes of json_path
+                               VkResult *result) {
+    // Check for and ignore duplicates.
+    if (*reg_data && strstr(*reg_data, json_path)) {
+        // Success. The json_path is already in the list.
+        return true;
+    }
+
+    if (NULL == *reg_data) {
+        *reg_data = loader_instance_heap_alloc(inst, *total_size, VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE);
+        if (NULL == *reg_data) {
+            loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                       "loaderAddJsonEntry: Failed to allocate space for registry data for key %s", json_path);
+            *result = VK_ERROR_OUT_OF_HOST_MEMORY;
+            return false;
+        }
+        *reg_data[0] = '\0';
+    } else if (strlen(*reg_data) + json_size + 1 > *total_size) {
+        void *new_ptr =
+            loader_instance_heap_realloc(inst, *reg_data, *total_size, *total_size * 2, VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE);
+        if (NULL == new_ptr) {
+            loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                       "loaderAddJsonEntry: Failed to reallocate space for registry value of size %d for key %s", *total_size * 2,
+                       json_path);
+            *result = VK_ERROR_OUT_OF_HOST_MEMORY;
+            return false;
+        }
+        *reg_data = new_ptr;
+        *total_size *= 2;
+    }
+
+    for (char *curr_filename = json_path; curr_filename[0] != '\0'; curr_filename += strlen(curr_filename) + 1) {
+        if (strlen(*reg_data) == 0) {
+            (void)snprintf(*reg_data, json_size + 1, "%s", curr_filename);
+        } else {
+            (void)snprintf(*reg_data + strlen(*reg_data), json_size + 2, "%c%s", PATH_SEPARATOR, curr_filename);
+        }
+        loader_log(inst, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, 0, "%s: Located json file \"%s\" from PnP registry: %s", __FUNCTION__,
+                   curr_filename, key_name);
+
+        if (key_type == REG_SZ) {
+            break;
+        }
+    }
+    return true;
+}
+
+// Find the list of registry files (names VulkanDriverName/VulkanDriverNameWow) in hkr.
+//
+// This function looks for filename in given device handle, filename is then added to return list
+// function return true if filename was appended to reg_data list
+// If error occures result is updated with failure reason
+bool loaderGetDeviceRegistryEntry(const struct loader_instance *inst, char **reg_data, PDWORD total_size, DEVINST dev_id,
+                                  LPCSTR value_name, VkResult *result) {
+    HKEY hkrKey = INVALID_HANDLE_VALUE;
+    DWORD requiredSize, data_type;
+    char *manifest_path = NULL;
+    bool found = false;
+
+    if (NULL == total_size || NULL == reg_data) {
+        *result = VK_ERROR_INITIALIZATION_FAILED;
+        return false;
+    }
+
+    CONFIGRET status = CM_Open_DevNode_Key(dev_id, KEY_QUERY_VALUE, 0, RegDisposition_OpenExisting, &hkrKey, CM_REGISTRY_SOFTWARE);
+    if (status != CR_SUCCESS) {
+        loader_log(inst, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0,
+            "loaderGetDeviceRegistryEntry: Failed to open registry key for DeviceID(%d)", dev_id);
+        *result = VK_ERROR_INITIALIZATION_FAILED;
+        return false;
+    }
+
+    // query value
+    LSTATUS ret = RegQueryValueEx(
+        hkrKey,
+        value_name,
+        NULL,
+        NULL,
+        NULL,
+        &requiredSize);
+
+    if (ret != ERROR_SUCCESS) {
+        if (ret == ERROR_FILE_NOT_FOUND) {
+            loader_log(inst, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, 0,
+                "loaderGetDeviceRegistryEntry: Device ID(%d) Does not contain a value for \"%s\"", dev_id, value_name);
+        } else {
+            loader_log(inst, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, 0,
+                "loaderGetDeviceRegistryEntry: DeviceID(%d) Failed to obtain %s size", dev_id, value_name);
+        }
+        goto out;
+    }
+
+    manifest_path = loader_instance_heap_alloc(inst, requiredSize, VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE);
+    if (manifest_path == NULL) {
+        loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                   "loaderGetDeviceRegistryEntry: Failed to allocate space for DriverName.");
+        *result = VK_ERROR_OUT_OF_HOST_MEMORY;
+        goto out;
+    }
+
+    ret = RegQueryValueEx(
+        hkrKey,
+        value_name,
+        NULL,
+        &data_type,
+        (BYTE *)manifest_path,
+        &requiredSize
+    );
+
+    if (ret != ERROR_SUCCESS) {
+        loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+            "loaderGetDeviceRegistryEntry: DeviceID(%d) Failed to obtain %s", value_name);
+
+        *result = VK_ERROR_INITIALIZATION_FAILED;
+        goto out;
+    }
+
+    if (data_type != REG_SZ && data_type != REG_MULTI_SZ) {
+        loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+            "loaderGetDeviceRegistryEntry: Invalid %s data type. Expected REG_SZ or REG_MULTI_SZ.", value_name);
+        *result = VK_ERROR_INITIALIZATION_FAILED;
+        goto out;
+    }
+
+    found = loaderAddJsonEntry(inst, reg_data, total_size, value_name, data_type, manifest_path, requiredSize, result);
+
+out:
+    if (manifest_path != NULL) {
+        loader_instance_heap_free(inst, manifest_path);
+    }
+    RegCloseKey(hkrKey);
+    return found;
+}
+
+// Find the list of registry files (names VulkanDriverName/VulkanDriverNameWow) in hkr .
+//
+// This function looks for display devices and childish software components
+// for a list of files which are added to a returned list (function return
+// value).
+// Function return is a string with a ';'  separated list of filenames.
+// Function return is NULL if no valid name/value pairs  are found in the key,
+// or the key is not found.
+//
+// *reg_data contains a string list of filenames as pointer.
+// When done using the returned string list, the caller should free the pointer.
+VkResult loaderGetDeviceRegistryFiles(const struct loader_instance *inst, char **reg_data, PDWORD reg_data_size,
+                                      LPCSTR value_name) {
+    static const wchar_t *softwareComponentGUID = L"{5c4c3332-344d-483c-8739-259e934c9cc8}";
+    static const wchar_t *displayGUID = L"{4d36e968-e325-11ce-bfc1-08002be10318}";
+    const ULONG flags = CM_GETIDLIST_FILTER_CLASS | CM_GETIDLIST_FILTER_PRESENT;
+
+    wchar_t childGuid[MAX_GUID_STRING_LEN + 2];  // +2 for brackets {}
+    ULONG childGuidSize = sizeof(childGuid);
+
+    DEVINST devID = 0, childID = 0;
+    wchar_t *pDeviceNames = NULL;
+    ULONG deviceNamesSize = 0;
+    VkResult result = VK_SUCCESS;
+    bool found = false;
+
+    if (NULL == reg_data) {
+        result = VK_ERROR_INITIALIZATION_FAILED;
+        return result;
+    }
+
+    // if after obtaining the DeviceNameSize, new device is added start over
+    do {
+        CM_Get_Device_ID_List_SizeW(&deviceNamesSize, displayGUID, flags);
+
+        if (pDeviceNames != NULL) {
+            loader_instance_heap_free(inst, pDeviceNames);
+        }
+
+        pDeviceNames = loader_instance_heap_alloc(inst, deviceNamesSize * sizeof(wchar_t), VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE);
+        if (pDeviceNames == NULL) {
+            loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                       "loaderGetDeviceRegistryFiles: Failed to allocate space for display device names.");
+            result = VK_ERROR_OUT_OF_HOST_MEMORY;
+            return result;
+        }
+    } while (CM_Get_Device_ID_ListW(displayGUID, pDeviceNames, deviceNamesSize, flags) == CR_BUFFER_SMALL);
+
+    if (pDeviceNames) {
+        for (wchar_t *deviceName = pDeviceNames; *deviceName; deviceName += wcslen(deviceName) + 1) {
+            CONFIGRET status = CM_Locate_DevNodeW(&devID, deviceName, CM_LOCATE_DEVNODE_NORMAL);
+            if (CR_SUCCESS != status) {
+                loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0, "loaderGetDeviceRegistryFiles: failed to open DevNode %ls",
+                           deviceName);
+                continue;
+            }
+            ULONG ulStatus, ulProblem;
+            status = CM_Get_DevNode_Status(&ulStatus, &ulProblem, devID, 0);
+
+            if (CR_SUCCESS != status)
+            {
+                loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0, "loaderGetDeviceRegistryFiles: failed to probe device status %ls",
+                           deviceName);
+                continue;
+            }
+            if ((ulStatus & DN_HAS_PROBLEM) && (ulProblem == CM_PROB_NEED_RESTART || ulProblem == DN_NEED_RESTART)) {
+                loader_log(inst, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, 0,
+                           "loaderGetDeviceRegistryFiles: device %ls is pending reboot, skipping ...", deviceName);
+                continue;
+            }
+
+            loader_log(inst, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, 0, "loaderGetDeviceRegistryFiles: opening device %ls", deviceName);
+
+            if (loaderGetDeviceRegistryEntry(inst, reg_data, reg_data_size, devID, value_name, &result)) {
+                found = true;
+                continue;
+            }
+            else if (result == VK_ERROR_OUT_OF_HOST_MEMORY) {
+                break;
+            }
+
+            status = CM_Get_Child(&childID, devID, 0);
+            if (status != CR_SUCCESS) {
+                loader_log(inst, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, 0,
+                           "loaderGetDeviceRegistryFiles: unable to open child-device error:%d", status);
+                continue;
+            }
+
+            do {
+                wchar_t buffer[MAX_DEVICE_ID_LEN];
+                CM_Get_Device_IDW(childID, buffer, MAX_DEVICE_ID_LEN, 0);
+
+                loader_log(inst, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, 0,
+                           "loaderGetDeviceRegistryFiles: Opening child device %d - %ls", childID, buffer);
+
+                status = CM_Get_DevNode_Registry_PropertyW(childID, CM_DRP_CLASSGUID, NULL, &childGuid, &childGuidSize, 0);
+                if (status != CR_SUCCESS) {
+                    loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                               "loaderGetDeviceRegistryFiles: unable to obtain GUID for:%d error:%d", childID, status);
+
+                    result = VK_ERROR_INITIALIZATION_FAILED;
+                    continue;
+                }
+
+                if (wcscmp(childGuid, softwareComponentGUID) != 0) {
+                    loader_log(inst, VK_DEBUG_REPORT_DEBUG_BIT_EXT, 0,
+                               "loaderGetDeviceRegistryFiles: GUID for %d is not SoftwareComponent skipping", childID);
+                    continue;
+                }
+
+                if (loaderGetDeviceRegistryEntry(inst, reg_data, reg_data_size, childID, value_name, &result)) {
+                    found = true;
+                    break;  // check next-display-device
+                }
+
+            } while (CM_Get_Sibling(&childID, childID, 0) == CR_SUCCESS);
+        }
+
+        loader_instance_heap_free(inst, pDeviceNames);
+    }
+
+    if (!found && result != VK_ERROR_OUT_OF_HOST_MEMORY) {
+        result = VK_ERROR_INITIALIZATION_FAILED;
+    }
+
+    return result;
+}
+
+static char *loader_get_next_path(char *path);
+
+// Find the list of registry files (names within a key) in key "location".
+//
+// This function looks in the registry (hive = DEFAULT_VK_REGISTRY_HIVE) key as
+// given in "location"
+// for a list or name/values which are added to a returned list (function return
+// value).
+// The DWORD values within the key must be 0 or they are skipped.
+// Function return is a string with a ';'  separated list of filenames.
+// Function return is NULL if no valid name/value pairs  are found in the key,
+// or the key is not found.
+//
+// *reg_data contains a string list of filenames as pointer.
+// When done using the returned string list, the caller should free the pointer.
+VkResult loaderGetRegistryFiles(const struct loader_instance *inst, char *location, bool use_secondary_hive, char **reg_data,
+                                PDWORD reg_data_size) {
+    // This list contains all of the allowed ICDs. This allows us to verify that a device is actually present from the vendor
+    // specified. This does disallow other vendors, but any new driver should use the device-specific registries anyway.
+    static const struct {
+        const char *filename;
+        int vendor_id;
+    } known_drivers[] = {
+#if defined(_WIN64)
+        {
+            .filename = "igvk64.json",
+            .vendor_id = 0x8086,
+        },
+        {
+            .filename = "nv-vk64.json",
+            .vendor_id = 0x10de,
+        },
+        {
+            .filename = "amd-vulkan64.json",
+            .vendor_id = 0x1002,
+        },
+        {
+            .filename = "amdvlk64.json",
+            .vendor_id = 0x1002,
+        },
+#else
+        {
+            .filename = "igvk32.json",
+            .vendor_id = 0x8086,
+        },
+        {
+            .filename = "nv-vk32.json",
+            .vendor_id = 0x10de,
+        },
+        {
+            .filename = "amd-vulkan32.json",
+            .vendor_id = 0x1002,
+        },
+        {
+            .filename = "amdvlk32.json",
+            .vendor_id = 0x1002,
+        },
+#endif
+    };
+
+    LONG rtn_value;
+    HKEY hive = DEFAULT_VK_REGISTRY_HIVE, key;
+    DWORD access_flags;
+    char name[2048];
+    char *loc = location;
+    char *next;
+    DWORD idx;
+    DWORD name_size = sizeof(name);
+    DWORD value;
+    DWORD value_size = sizeof(value);
+    VkResult result = VK_SUCCESS;
+    bool found = false;
+    IDXGIFactory1 *dxgi_factory = NULL;
+    bool is_driver = !strcmp(location, VK_DRIVERS_INFO_REGISTRY_LOC);
+
+    if (NULL == reg_data) {
+        result = VK_ERROR_INITIALIZATION_FAILED;
+        goto out;
+    }
+
+    if (is_driver) {
+        HRESULT hres = dyn_CreateDXGIFactory1(&IID_IDXGIFactory1, &dxgi_factory);
+        if (hres != S_OK) {
+            loader_log(
+                inst, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0,
+                "loaderGetRegistryFiles: Failed to create dxgi factory for ICD registry verification. No ICDs will be added from "
+                "legacy registry locations");
+            goto out;
+        }
+    }
+
+    while (*loc) {
+        next = loader_get_next_path(loc);
+        access_flags = KEY_QUERY_VALUE;
+        rtn_value = RegOpenKeyEx(hive, loc, 0, access_flags, &key);
+        if (ERROR_SUCCESS == rtn_value) {
+            idx = 0;
+            while ((rtn_value = RegEnumValue(key, idx++, name, &name_size, NULL, NULL, (LPBYTE)&value, &value_size)) ==
+                   ERROR_SUCCESS) {
+                if (value_size == sizeof(value) && value == 0) {
+                    if (NULL == *reg_data) {
+                        *reg_data = loader_instance_heap_alloc(inst, *reg_data_size, VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE);
+                        if (NULL == *reg_data) {
+                            loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                                       "loaderGetRegistryFiles: Failed to allocate space for registry data for key %s", name);
+                            RegCloseKey(key);
+                            result = VK_ERROR_OUT_OF_HOST_MEMORY;
+                            goto out;
+                        }
+                        *reg_data[0] = '\0';
+                    } else if (strlen(*reg_data) + name_size + 1 > *reg_data_size) {
+                        void *new_ptr = loader_instance_heap_realloc(inst, *reg_data, *reg_data_size, *reg_data_size * 2,
+                                                                     VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE);
+                        if (NULL == new_ptr) {
+                            loader_log(
+                                inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                                "loaderGetRegistryFiles: Failed to reallocate space for registry value of size %d for key %s",
+                                *reg_data_size * 2, name);
+                            RegCloseKey(key);
+                            result = VK_ERROR_OUT_OF_HOST_MEMORY;
+                            goto out;
+                        }
+                        *reg_data = new_ptr;
+                        *reg_data_size *= 2;
+                    }
+
+                    // We've now found a json file. If this is an ICD, we still need to check if there is actually a device
+                    // that matches this ICD
+                    loader_log(
+                        inst, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, 0, "Located json file \"%s\" from registry \"%s\\%s\"", name,
+                        hive == DEFAULT_VK_REGISTRY_HIVE ? DEFAULT_VK_REGISTRY_HIVE_STR : SECONDARY_VK_REGISTRY_HIVE_STR, location);
+                    if (is_driver) {
+                        int i;
+                        for (i = 0; i < sizeof(known_drivers) / sizeof(known_drivers[0]); ++i) {
+                            if (!strcmp(name + strlen(name) - strlen(known_drivers[i].filename), known_drivers[i].filename)) {
+                                break;
+                            }
+                        }
+                        if (i == sizeof(known_drivers) / sizeof(known_drivers[0])) {
+                            loader_log(inst, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, 0,
+                                       "Driver %s is not recognized as a known driver. It will be assumed to be active", name);
+                        } else {
+                            bool found_gpu = false;
+                            for (int j = 0;; ++j) {
+                                IDXGIAdapter1 *adapter;
+                                HRESULT hres = dxgi_factory->lpVtbl->EnumAdapters1(dxgi_factory, j, &adapter);
+                                if (hres == DXGI_ERROR_NOT_FOUND) {
+                                    break;
+                                } else if (hres != S_OK) {
+                                    loader_log(inst, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0,
+                                               "Failed to enumerate DXGI adapters at index %d. As a result, drivers may be skipped", j);
+                                    continue;
+                                }
+
+                                DXGI_ADAPTER_DESC1 description;
+                                hres = adapter->lpVtbl->GetDesc1(adapter, &description);
+                                if (hres != S_OK) {
+                                    loader_log(
+                                        inst, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, 0,
+                                        "Failed to get DXGI adapter information at index %d. As a result, drivers may be skipped", j);
+                                    continue;
+                                }
+
+                                if (description.VendorId == known_drivers[i].vendor_id) {
+                                    found_gpu = true;
+                                    break;
+                                }
+                            }
+
+                            if (!found_gpu) {
+                                loader_log(inst, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, 0,
+                                           "Dropping driver %s as no corresponduing DXGI adapter was found", name);
+                                continue;
+                            }
+                        }
+                    }
+
+                    if (strlen(*reg_data) == 0) {
+                        // The list is emtpy. Add the first entry.
+                        (void)snprintf(*reg_data, name_size + 1, "%s", name);
+                        found = true;
+                    } else {
+                        // At this point the reg_data variable contains other JSON paths, likely from the PNP/device section
+                        // of the registry that we want to have precendence over this non-device specific section of the registry.
+                        // To make sure we avoid enumerating old JSON files/drivers that might be present in the non-device specific
+                        // area of the registry when a newer device specific JSON file is present, do a check before adding.
+                        // Find the file name, without path, of the JSON file found in the non-device specific registry location.
+                        // If the same JSON file name is already found in the list, don't add it again.
+                        bool foundDuplicate = false;
+                        char *pLastSlashName = strrchr(name, '\\');
+                        if (pLastSlashName != NULL) {
+                            char *foundMatch = strstr(*reg_data, pLastSlashName + 1);
+                            if (foundMatch != NULL) {
+                                foundDuplicate = true;
+                            }
+                        }
+
+                        if (foundDuplicate == false) {
+                            // Add the new entry to the list.
+                            (void)snprintf(*reg_data + strlen(*reg_data), name_size + 2, "%c%s", PATH_SEPARATOR, name);
+                            found = true;
+                        } else {
+                            loader_log(
+                                inst, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, 0,
+                                "Skipping adding of json file \"%s\" from registry \"%s\\%s\" to the list due to duplication", name,
+                                hive == DEFAULT_VK_REGISTRY_HIVE ? DEFAULT_VK_REGISTRY_HIVE_STR : SECONDARY_VK_REGISTRY_HIVE_STR,
+                                location);
+                        }
+                    }
+                }
+                name_size = sizeof(name);
+                value_size = sizeof(value);
+            }
+            RegCloseKey(key);
+        }
+
+        // Advance the location - if the next location is in the secondary hive, then reset the locations and advance the hive
+        if (use_secondary_hive && (hive == DEFAULT_VK_REGISTRY_HIVE) && (*next == '\0')) {
+            loc = location;
+            hive = SECONDARY_VK_REGISTRY_HIVE;
+        } else {
+            loc = next;
+        }
+    }
+
+    if (!found && result != VK_ERROR_OUT_OF_HOST_MEMORY) {
+        result = VK_ERROR_INITIALIZATION_FAILED;
+    }
+
+out:
+    if (is_driver && dxgi_factory != NULL) {
+        dxgi_factory->lpVtbl->Release(dxgi_factory);
+    }
+
+    return result;
+}
+
+#endif  // WIN32
+
+// Combine path elements, separating each element with the platform-specific
+// directory separator, and save the combined string to a destination buffer,
+// not exceeding the given length. Path elements are given as variable args,
+// with a NULL element terminating the list.
+//
+// \returns the total length of the combined string, not including an ASCII
+// NUL termination character. This length may exceed the available storage:
+// in this case, the written string will be truncated to avoid a buffer
+// overrun, and the return value will greater than or equal to the storage
+// size. A NULL argument may be provided as the destination buffer in order
+// to determine the required string length without actually writing a string.
+static size_t loader_platform_combine_path(char *dest, size_t len, ...) {
+    size_t required_len = 0;
+    va_list ap;
+    const char *component;
+
+    va_start(ap, len);
+
+    while ((component = va_arg(ap, const char *))) {
+        if (required_len > 0) {
+            // This path element is not the first non-empty element; prepend
+            // a directory separator if space allows
+            if (dest && required_len + 1 < len) {
+                (void)snprintf(dest + required_len, len - required_len, "%c", DIRECTORY_SYMBOL);
+            }
+            required_len++;
+        }
+
+        if (dest && required_len < len) {
+            strncpy(dest + required_len, component, len - required_len);
+        }
+        required_len += strlen(component);
+    }
+
+    va_end(ap);
+
+    // strncpy(3) won't add a NUL terminating byte in the event of truncation.
+    if (dest && required_len >= len) {
+        dest[len - 1] = '\0';
+    }
+
+    return required_len;
+}
+
+// Given string of three part form "maj.min.pat" convert to a vulkan version number.
+static uint32_t loader_make_version(char *vers_str) {
+    uint32_t vers = 0, major = 0, minor = 0, patch = 0;
+    char *vers_tok;
+
+    if (!vers_str) {
+        return vers;
+    }
+
+    vers_tok = strtok(vers_str, ".\"\n\r");
+    if (NULL != vers_tok) {
+        major = (uint16_t)atoi(vers_tok);
+        vers_tok = strtok(NULL, ".\"\n\r");
+        if (NULL != vers_tok) {
+            minor = (uint16_t)atoi(vers_tok);
+            vers_tok = strtok(NULL, ".\"\n\r");
+            if (NULL != vers_tok) {
+                patch = (uint16_t)atoi(vers_tok);
+            }
+        }
+    }
+
+    return VK_MAKE_VERSION(major, minor, patch);
+}
+
+bool compare_vk_extension_properties(const VkExtensionProperties *op1, const VkExtensionProperties *op2) {
+    return strcmp(op1->extensionName, op2->extensionName) == 0 ? true : false;
+}
+
+// Search the given ext_array for an extension matching the given vk_ext_prop
+bool has_vk_extension_property_array(const VkExtensionProperties *vk_ext_prop, const uint32_t count,
+                                     const VkExtensionProperties *ext_array) {
+    for (uint32_t i = 0; i < count; i++) {
+        if (compare_vk_extension_properties(vk_ext_prop, &ext_array[i])) return true;
+    }
+    return false;
+}
+
+// Search the given ext_list for an extension matching the given vk_ext_prop
+bool has_vk_extension_property(const VkExtensionProperties *vk_ext_prop, const struct loader_extension_list *ext_list) {
+    for (uint32_t i = 0; i < ext_list->count; i++) {
+        if (compare_vk_extension_properties(&ext_list->list[i], vk_ext_prop)) return true;
+    }
+    return false;
+}
+
+// Search the given ext_list for a device extension matching the given ext_prop
+bool has_vk_dev_ext_property(const VkExtensionProperties *ext_prop, const struct loader_device_extension_list *ext_list) {
+    for (uint32_t i = 0; i < ext_list->count; i++) {
+        if (compare_vk_extension_properties(&ext_list->list[i].props, ext_prop)) return true;
+    }
+    return false;
+}
+
+// Get the next unused layer property in the list. Init the property to zero.
+static struct loader_layer_properties *loaderGetNextLayerPropertySlot(const struct loader_instance *inst,
+                                                                      struct loader_layer_list *layer_list) {
+    if (layer_list->capacity == 0) {
+        layer_list->list =
+            loader_instance_heap_alloc(inst, sizeof(struct loader_layer_properties) * 64, VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE);
+        if (layer_list->list == NULL) {
+            loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                       "loaderGetNextLayerPropertySlot: Out of memory can "
+                       "not add any layer properties to list");
+            return NULL;
+        }
+        memset(layer_list->list, 0, sizeof(struct loader_layer_properties) * 64);
+        layer_list->capacity = sizeof(struct loader_layer_properties) * 64;
+    }
+
+    // Ensure enough room to add an entry
+    if ((layer_list->count + 1) * sizeof(struct loader_layer_properties) > layer_list->capacity) {
+        void *new_ptr = loader_instance_heap_realloc(inst, layer_list->list, layer_list->capacity, layer_list->capacity * 2,
+                                                     VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE);
+        if (NULL == new_ptr) {
+            loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0, "loaderGetNextLayerPropertySlot: realloc failed for layer list");
+            return NULL;
+        }
+        layer_list->list = new_ptr;
+        memset((uint8_t *)layer_list->list + layer_list->capacity, 0, layer_list->capacity);
+        layer_list->capacity *= 2;
+    }
+
+    layer_list->count++;
+    return &(layer_list->list[layer_list->count - 1]);
+}
+
+// Search the given layer list for a layer property matching the given layer name
+static struct loader_layer_properties *loaderFindLayerProperty(const char *name, const struct loader_layer_list *layer_list) {
+    for (uint32_t i = 0; i < layer_list->count; i++) {
+        const VkLayerProperties *item = &layer_list->list[i].info;
+        if (strcmp(name, item->layerName) == 0) return &layer_list->list[i];
+    }
+    return NULL;
+}
+
+// Search the given layer list for a layer matching the given layer name
+static bool loaderFindLayerNameInList(const char *name, const struct loader_layer_list *layer_list) {
+    if (NULL == layer_list) {
+        return false;
+    }
+    if (NULL != loaderFindLayerProperty(name, layer_list)) {
+        return true;
+    }
+    return false;
+}
+
+// Search the given meta-layer's component list for a layer matching the given layer name
+static bool loaderFindLayerNameInMetaLayer(const struct loader_instance *inst, const char *layer_name,
+                                           struct loader_layer_list *layer_list, struct loader_layer_properties *meta_layer_props) {
+    for (uint32_t comp_layer = 0; comp_layer < meta_layer_props->num_component_layers; comp_layer++) {
+        if (!strcmp(meta_layer_props->component_layer_names[comp_layer], layer_name)) {
+            return true;
+        }
+        struct loader_layer_properties *comp_layer_props =
+            loaderFindLayerProperty(meta_layer_props->component_layer_names[comp_layer], layer_list);
+        if (comp_layer_props->type_flags & VK_LAYER_TYPE_FLAG_META_LAYER) {
+            return loaderFindLayerNameInMetaLayer(inst, layer_name, layer_list, comp_layer_props);
+        }
+    }
+    return false;
+}
+
+// Search the override layer's blacklist for a layer matching the given layer name
+static bool loaderFindLayerNameInBlacklist(const struct loader_instance *inst, const char *layer_name,
+                                           struct loader_layer_list *layer_list, struct loader_layer_properties *meta_layer_props) {
+    for (uint32_t black_layer = 0; black_layer < meta_layer_props->num_blacklist_layers; ++black_layer) {
+        if (!strcmp(meta_layer_props->blacklist_layer_names[black_layer], layer_name)) {
+            return true;
+        }
+    }
+    return false;
+}
+
+// Remove all layer properties entries from the list
+void loaderDeleteLayerListAndProperties(const struct loader_instance *inst, struct loader_layer_list *layer_list) {
+    uint32_t i, j, k;
+    struct loader_device_extension_list *dev_ext_list;
+    struct loader_dev_ext_props *ext_props;
+    if (!layer_list) return;
+
+    for (i = 0; i < layer_list->count; i++) {
+        if (NULL != layer_list->list[i].blacklist_layer_names) {
+            loader_instance_heap_free(inst, layer_list->list[i].blacklist_layer_names);
+            layer_list->list[i].blacklist_layer_names = NULL;
+        }
+        if (NULL != layer_list->list[i].component_layer_names) {
+            loader_instance_heap_free(inst, layer_list->list[i].component_layer_names);
+            layer_list->list[i].component_layer_names = NULL;
+        }
+        if (NULL != layer_list->list[i].override_paths) {
+            loader_instance_heap_free(inst, layer_list->list[i].override_paths);
+            layer_list->list[i].override_paths = NULL;
+        }
+        loader_destroy_generic_list(inst, (struct loader_generic_list *)&layer_list->list[i].instance_extension_list);
+        dev_ext_list = &layer_list->list[i].device_extension_list;
+        if (dev_ext_list->capacity > 0 && NULL != dev_ext_list->list) {
+            for (j = 0; j < dev_ext_list->count; j++) {
+                ext_props = &dev_ext_list->list[j];
+                if (ext_props->entrypoint_count > 0) {
+                    for (k = 0; k < ext_props->entrypoint_count; k++) {
+                        loader_instance_heap_free(inst, ext_props->entrypoints[k]);
+                    }
+                    loader_instance_heap_free(inst, ext_props->entrypoints);
+                }
+            }
+        }
+        loader_destroy_generic_list(inst, (struct loader_generic_list *)dev_ext_list);
+    }
+    layer_list->count = 0;
+
+    if (layer_list->capacity > 0) {
+        layer_list->capacity = 0;
+        loader_instance_heap_free(inst, layer_list->list);
+    }
+}
+
+// Remove all layers in the layer list that are blacklisted by the override layer.
+// NOTE: This should only be called if an override layer is found and not expired.
+void loaderRemoveLayersInBlacklist(const struct loader_instance *inst, struct loader_layer_list *layer_list) {
+    struct loader_layer_properties *override_prop = loaderFindLayerProperty(VK_OVERRIDE_LAYER_NAME, layer_list);
+    if (NULL == override_prop) {
+        return;
+    }
+
+    for (int32_t j = 0; j < (int32_t)(layer_list->count); j++) {
+        struct loader_layer_properties cur_layer_prop = layer_list->list[j];
+        const char *cur_layer_name = &cur_layer_prop.info.layerName[0];
+
+        // Skip the override layer itself.
+        if (!strcmp(VK_OVERRIDE_LAYER_NAME, cur_layer_name)) {
+            continue;
+        }
+
+        // If found in the override layer's blacklist, remove it
+        if (loaderFindLayerNameInBlacklist(inst, cur_layer_name, layer_list, override_prop)) {
+            loader_log(inst, VK_DEBUG_REPORT_DEBUG_BIT_EXT, 0,
+                       "loaderRemoveLayersInBlacklist: Override layer is active and layer %s is in the blacklist"
+                       " inside of it. Removing that layer from current layer list.",
+                       cur_layer_name);
+
+            if (cur_layer_prop.type_flags & VK_LAYER_TYPE_FLAG_META_LAYER) {
+                // Delete the component layers
+                loader_instance_heap_free(inst, cur_layer_prop.component_layer_names);
+                loader_instance_heap_free(inst, cur_layer_prop.override_paths);
+                // Never need to free the blacklist, since it can only exist in the override layer
+            }
+
+            // Remove the current invalid meta-layer from the layer list.  Use memmove since we are
+            // overlapping the source and destination addresses.
+            memmove(&layer_list->list[j], &layer_list->list[j + 1],
+                    sizeof(struct loader_layer_properties) * (layer_list->count - 1 - j));
+
+            // Decrement the count (because we now have one less) and decrement the loop index since we need to
+            // re-check this index.
+            layer_list->count--;
+            j--;
+
+            // Re-do the query for the override layer
+            override_prop = loaderFindLayerProperty(VK_OVERRIDE_LAYER_NAME, layer_list);
+        }
+    }
+}
+
+// Remove all layers in the layer list that are not found inside any implicit meta-layers.
+void loaderRemoveLayersNotInImplicitMetaLayers(const struct loader_instance *inst, struct loader_layer_list *layer_list) {
+    int32_t i;
+    int32_t j;
+    int32_t layer_count = (int32_t)(layer_list->count);
+
+    for (i = 0; i < layer_count; i++) {
+        layer_list->list[i].keep = false;
+    }
+
+    for (i = 0; i < layer_count; i++) {
+        struct loader_layer_properties cur_layer_prop = layer_list->list[i];
+
+        if (0 == (cur_layer_prop.type_flags & VK_LAYER_TYPE_FLAG_EXPLICIT_LAYER)) {
+            cur_layer_prop.keep = true;
+        } else {
+            continue;
+        }
+
+        if (cur_layer_prop.type_flags & VK_LAYER_TYPE_FLAG_META_LAYER) {
+            for (j = 0; j < layer_count; j++) {
+                struct loader_layer_properties layer_to_check = layer_list->list[j];
+
+                if (i == j) {
+                    continue;
+                }
+
+                // For all layers found in this meta layer, we want to keep them as well.
+                if (loaderFindLayerNameInMetaLayer(inst, layer_to_check.info.layerName, layer_list, &cur_layer_prop)) {
+                    cur_layer_prop.keep = true;
+                }
+            }
+        }
+    }
+
+    // Remove any layers we don't want to keep (Don't use layer_count here as we need it to be
+    // dynamically updated if we delete a layer property in the list).
+    for (i = 0; i < (int32_t)(layer_list->count); i++) {
+        struct loader_layer_properties cur_layer_prop = layer_list->list[i];
+        if (!cur_layer_prop.keep) {
+            loader_log(inst, VK_DEBUG_REPORT_DEBUG_BIT_EXT, 0,
+                       "loaderRemoveLayersNotInImplicitMetaLayers : Implicit meta-layers are active, and layer %s is not list"
+                       " inside of any.  So removing layer from current layer list.",
+                       cur_layer_prop.info.layerName);
+
+            if (cur_layer_prop.type_flags & VK_LAYER_TYPE_FLAG_META_LAYER) {
+                // Delete the component layers
+                loader_instance_heap_free(inst, cur_layer_prop.component_layer_names);
+                loader_instance_heap_free(inst, cur_layer_prop.override_paths);
+            }
+
+            // Remove the current invalid meta-layer from the layer list.  Use memmove since we are
+            // overlapping the source and destination addresses.
+            memmove(&layer_list->list[i], &layer_list->list[i + 1],
+                    sizeof(struct loader_layer_properties) * (layer_list->count - 1 - i));
+
+            // Decrement the count (because we now have one less) and decrement the loop index since we need to
+            // re-check this index.
+            layer_list->count--;
+            i--;
+        }
+    }
+}
+
+static VkResult loader_add_instance_extensions(const struct loader_instance *inst,
+                                               const PFN_vkEnumerateInstanceExtensionProperties fp_get_props, const char *lib_name,
+                                               struct loader_extension_list *ext_list) {
+    uint32_t i, count = 0;
+    VkExtensionProperties *ext_props;
+    VkResult res = VK_SUCCESS;
+
+    if (!fp_get_props) {
+        // No EnumerateInstanceExtensionProperties defined
+        goto out;
+    }
+
+    res = fp_get_props(NULL, &count, NULL);
+    if (res != VK_SUCCESS) {
+        loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                   "loader_add_instance_extensions: Error getting Instance "
+                   "extension count from %s",
+                   lib_name);
+        goto out;
+    }
+
+    if (count == 0) {
+        // No ExtensionProperties to report
+        goto out;
+    }
+
+    ext_props = loader_stack_alloc(count * sizeof(VkExtensionProperties));
+    if (NULL == ext_props) {
+        res = VK_ERROR_OUT_OF_HOST_MEMORY;
+        goto out;
+    }
+
+    res = fp_get_props(NULL, &count, ext_props);
+    if (res != VK_SUCCESS) {
+        loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                   "loader_add_instance_extensions: Error getting Instance "
+                   "extensions from %s",
+                   lib_name);
+        goto out;
+    }
+
+    for (i = 0; i < count; i++) {
+        char spec_version[64];
+
+        bool ext_unsupported = wsi_unsupported_instance_extension(&ext_props[i]);
+        if (!ext_unsupported) {
+            (void)snprintf(spec_version, sizeof(spec_version), "%d.%d.%d", VK_VERSION_MAJOR(ext_props[i].specVersion),
+                           VK_VERSION_MINOR(ext_props[i].specVersion), VK_VERSION_PATCH(ext_props[i].specVersion));
+            loader_log(inst, VK_DEBUG_REPORT_DEBUG_BIT_EXT, 0, "Instance Extension: %s (%s) version %s", ext_props[i].extensionName,
+                       lib_name, spec_version);
+
+            res = loader_add_to_ext_list(inst, ext_list, 1, &ext_props[i]);
+            if (res != VK_SUCCESS) {
+                loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                           "loader_add_instance_extensions: Failed to add %s "
+                           "to Instance extension list",
+                           lib_name);
+                goto out;
+            }
+        }
+    }
+
+out:
+    return res;
+}
+
+// Initialize ext_list with the physical device extensions.
+// The extension properties are passed as inputs in count and ext_props.
+static VkResult loader_init_device_extensions(const struct loader_instance *inst, struct loader_physical_device_term *phys_dev_term,
+                                              uint32_t count, VkExtensionProperties *ext_props,
+                                              struct loader_extension_list *ext_list) {
+    VkResult res;
+    uint32_t i;
+
+    res = loader_init_generic_list(inst, (struct loader_generic_list *)ext_list, sizeof(VkExtensionProperties));
+    if (VK_SUCCESS != res) {
+        return res;
+    }
+
+    for (i = 0; i < count; i++) {
+        char spec_version[64];
+        (void)snprintf(spec_version, sizeof(spec_version), "%d.%d.%d", VK_VERSION_MAJOR(ext_props[i].specVersion),
+                       VK_VERSION_MINOR(ext_props[i].specVersion), VK_VERSION_PATCH(ext_props[i].specVersion));
+        loader_log(inst, VK_DEBUG_REPORT_DEBUG_BIT_EXT, 0, "Device Extension: %s (%s) version %s", ext_props[i].extensionName,
+                   phys_dev_term->this_icd_term->scanned_icd->lib_name, spec_version);
+        res = loader_add_to_ext_list(inst, ext_list, 1, &ext_props[i]);
+        if (res != VK_SUCCESS) return res;
+    }
+
+    return VK_SUCCESS;
+}
+
+VkResult loader_add_device_extensions(const struct loader_instance *inst,
+                                      PFN_vkEnumerateDeviceExtensionProperties fpEnumerateDeviceExtensionProperties,
+                                      VkPhysicalDevice physical_device, const char *lib_name,
+                                      struct loader_extension_list *ext_list) {
+    uint32_t i, count;
+    VkResult res;
+    VkExtensionProperties *ext_props;
+
+    res = fpEnumerateDeviceExtensionProperties(physical_device, NULL, &count, NULL);
+    if (res == VK_SUCCESS && count > 0) {
+        ext_props = loader_stack_alloc(count * sizeof(VkExtensionProperties));
+        if (!ext_props) {
+            loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                       "loader_add_device_extensions: Failed to allocate space"
+                       " for device extension properties.");
+            return VK_ERROR_OUT_OF_HOST_MEMORY;
+        }
+        res = fpEnumerateDeviceExtensionProperties(physical_device, NULL, &count, ext_props);
+        if (res != VK_SUCCESS) {
+            return res;
+        }
+        for (i = 0; i < count; i++) {
+            char spec_version[64];
+            (void)snprintf(spec_version, sizeof(spec_version), "%d.%d.%d", VK_VERSION_MAJOR(ext_props[i].specVersion),
+                           VK_VERSION_MINOR(ext_props[i].specVersion), VK_VERSION_PATCH(ext_props[i].specVersion));
+            loader_log(inst, VK_DEBUG_REPORT_DEBUG_BIT_EXT, 0, "Device Extension: %s (%s) version %s", ext_props[i].extensionName,
+                       lib_name, spec_version);
+            res = loader_add_to_ext_list(inst, ext_list, 1, &ext_props[i]);
+            if (res != VK_SUCCESS) {
+                return res;
+            }
+        }
+    } else {
+        loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                   "loader_add_device_extensions: Error getting physical "
+                   "device extension info count from library %s",
+                   lib_name);
+        return res;
+    }
+
+    return VK_SUCCESS;
+}
+
+VkResult loader_init_generic_list(const struct loader_instance *inst, struct loader_generic_list *list_info, size_t element_size) {
+    size_t capacity = 32 * element_size;
+    list_info->count = 0;
+    list_info->capacity = 0;
+    list_info->list = loader_instance_heap_alloc(inst, capacity, VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE);
+    if (list_info->list == NULL) {
+        loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                   "loader_init_generic_list: Failed to allocate space "
+                   "for generic list");
+        return VK_ERROR_OUT_OF_HOST_MEMORY;
+    }
+    memset(list_info->list, 0, capacity);
+    list_info->capacity = capacity;
+    return VK_SUCCESS;
+}
+
+void loader_destroy_generic_list(const struct loader_instance *inst, struct loader_generic_list *list) {
+    loader_instance_heap_free(inst, list->list);
+    list->count = 0;
+    list->capacity = 0;
+}
+
+// Append non-duplicate extension properties defined in props to the given ext_list.
+// Return - Vk_SUCCESS on success
+VkResult loader_add_to_ext_list(const struct loader_instance *inst, struct loader_extension_list *ext_list,
+                                uint32_t prop_list_count, const VkExtensionProperties *props) {
+    uint32_t i;
+    const VkExtensionProperties *cur_ext;
+
+    if (ext_list->list == NULL || ext_list->capacity == 0) {
+        VkResult res = loader_init_generic_list(inst, (struct loader_generic_list *)ext_list, sizeof(VkExtensionProperties));
+        if (VK_SUCCESS != res) {
+            return res;
+        }
+    }
+
+    for (i = 0; i < prop_list_count; i++) {
+        cur_ext = &props[i];
+
+        // look for duplicates
+        if (has_vk_extension_property(cur_ext, ext_list)) {
+            continue;
+        }
+
+        // add to list at end
+        // check for enough capacity
+        if (ext_list->count * sizeof(VkExtensionProperties) >= ext_list->capacity) {
+            void *new_ptr = loader_instance_heap_realloc(inst, ext_list->list, ext_list->capacity, ext_list->capacity * 2,
+                                                         VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE);
+            if (new_ptr == NULL) {
+                loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                           "loader_add_to_ext_list: Failed to reallocate "
+                           "space for extension list");
+                return VK_ERROR_OUT_OF_HOST_MEMORY;
+            }
+            ext_list->list = new_ptr;
+
+            // double capacity
+            ext_list->capacity *= 2;
+        }
+
+        memcpy(&ext_list->list[ext_list->count], cur_ext, sizeof(VkExtensionProperties));
+        ext_list->count++;
+    }
+    return VK_SUCCESS;
+}
+
+// Append one extension property defined in props with entrypoints defined in entries to the given
+// ext_list. Do not append if a duplicate.
+// Return - Vk_SUCCESS on success
+VkResult loader_add_to_dev_ext_list(const struct loader_instance *inst, struct loader_device_extension_list *ext_list,
+                                    const VkExtensionProperties *props, uint32_t entry_count, char **entrys) {
+    uint32_t idx;
+    if (ext_list->list == NULL || ext_list->capacity == 0) {
+        VkResult res = loader_init_generic_list(inst, (struct loader_generic_list *)ext_list, sizeof(struct loader_dev_ext_props));
+        if (VK_SUCCESS != res) {
+            return res;
+        }
+    }
+
+    // look for duplicates
+    if (has_vk_dev_ext_property(props, ext_list)) {
+        return VK_SUCCESS;
+    }
+
+    idx = ext_list->count;
+    // add to list at end
+    // check for enough capacity
+    if (idx * sizeof(struct loader_dev_ext_props) >= ext_list->capacity) {
+        void *new_ptr = loader_instance_heap_realloc(inst, ext_list->list, ext_list->capacity, ext_list->capacity * 2,
+                                                     VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE);
+
+        if (NULL == new_ptr) {
+            loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                       "loader_add_to_dev_ext_list: Failed to reallocate space for device extension list");
+            return VK_ERROR_OUT_OF_HOST_MEMORY;
+        }
+        ext_list->list = new_ptr;
+
+        // double capacity
+        ext_list->capacity *= 2;
+    }
+
+    memcpy(&ext_list->list[idx].props, props, sizeof(*props));
+    ext_list->list[idx].entrypoint_count = entry_count;
+    if (entry_count == 0) {
+        ext_list->list[idx].entrypoints = NULL;
+    } else {
+        ext_list->list[idx].entrypoints =
+            loader_instance_heap_alloc(inst, sizeof(char *) * entry_count, VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE);
+        if (ext_list->list[idx].entrypoints == NULL) {
+            loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                       "loader_add_to_dev_ext_list: Failed to allocate space "
+                       "for device extension entrypoint list in list %d",
+                       idx);
+            ext_list->list[idx].entrypoint_count = 0;
+            return VK_ERROR_OUT_OF_HOST_MEMORY;
+        }
+        for (uint32_t i = 0; i < entry_count; i++) {
+            ext_list->list[idx].entrypoints[i] =
+                loader_instance_heap_alloc(inst, strlen(entrys[i]) + 1, VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE);
+            if (ext_list->list[idx].entrypoints[i] == NULL) {
+                for (uint32_t j = 0; j < i; j++) {
+                    loader_instance_heap_free(inst, ext_list->list[idx].entrypoints[j]);
+                }
+                loader_instance_heap_free(inst, ext_list->list[idx].entrypoints);
+                ext_list->list[idx].entrypoint_count = 0;
+                ext_list->list[idx].entrypoints = NULL;
+                loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                           "loader_add_to_dev_ext_list: Failed to allocate space "
+                           "for device extension entrypoint %d name",
+                           i);
+                return VK_ERROR_OUT_OF_HOST_MEMORY;
+            }
+            strcpy(ext_list->list[idx].entrypoints[i], entrys[i]);
+        }
+    }
+    ext_list->count++;
+
+    return VK_SUCCESS;
+}
+
+// Prototypes needed.
+bool loaderAddMetaLayer(const struct loader_instance *inst, const struct loader_layer_properties *prop,
+                        struct loader_layer_list *target_list, struct loader_layer_list *expanded_target_list,
+                        const struct loader_layer_list *source_list);
+
+// Manage lists of VkLayerProperties
+static bool loaderInitLayerList(const struct loader_instance *inst, struct loader_layer_list *list) {
+    list->capacity = 32 * sizeof(struct loader_layer_properties);
+    list->list = loader_instance_heap_alloc(inst, list->capacity, VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE);
+    if (list->list == NULL) {
+        return false;
+    }
+    memset(list->list, 0, list->capacity);
+    list->count = 0;
+    return true;
+}
+
+// Search the given layer list for a list matching the given VkLayerProperties
+bool loaderListHasLayerProperty(const VkLayerProperties *vk_layer_prop, const struct loader_layer_list *list) {
+    for (uint32_t i = 0; i < list->count; i++) {
+        if (strcmp(vk_layer_prop->layerName, list->list[i].info.layerName) == 0) return true;
+    }
+    return false;
+}
+
+void loaderDestroyLayerList(const struct loader_instance *inst, struct loader_device *device,
+                            struct loader_layer_list *layer_list) {
+    if (device) {
+        loader_device_heap_free(device, layer_list->list);
+    } else {
+        loader_instance_heap_free(inst, layer_list->list);
+    }
+    layer_list->count = 0;
+    layer_list->capacity = 0;
+}
+
+// Append non-duplicate layer properties defined in prop_list to the given layer_info list
+VkResult loaderAddLayerPropertiesToList(const struct loader_instance *inst, struct loader_layer_list *list,
+                                        uint32_t prop_list_count, const struct loader_layer_properties *props) {
+    uint32_t i;
+    struct loader_layer_properties *layer;
+
+    if (list->list == NULL || list->capacity == 0) {
+        if (!loaderInitLayerList(inst, list)) {
+            return VK_ERROR_OUT_OF_HOST_MEMORY;
+        }
+    }
+
+    if (list->list == NULL) return VK_SUCCESS;
+
+    for (i = 0; i < prop_list_count; i++) {
+        layer = (struct loader_layer_properties *)&props[i];
+
+        // Look for duplicates, and skip
+        if (loaderListHasLayerProperty(&layer->info, list)) {
+            continue;
+        }
+
+        // Check for enough capacity
+        if (((list->count + 1) * sizeof(struct loader_layer_properties)) >= list->capacity) {
+            size_t new_capacity = list->capacity * 2;
+            void *new_ptr =
+                loader_instance_heap_realloc(inst, list->list, list->capacity, new_capacity, VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE);
+            if (NULL == new_ptr) {
+                loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                           "loaderAddLayerPropertiesToList: Realloc failed for when attempting to add new layer");
+                return VK_ERROR_OUT_OF_HOST_MEMORY;
+            }
+            list->list = new_ptr;
+            list->capacity = new_capacity;
+        }
+
+        memcpy(&list->list[list->count], layer, sizeof(struct loader_layer_properties));
+        list->count++;
+    }
+
+    return VK_SUCCESS;
+}
+
+// Search the given search_list for any layers in the props list.  Add these to the
+// output layer_list.  Don't add duplicates to the output layer_list.
+static VkResult loaderAddLayerNamesToList(const struct loader_instance *inst, struct loader_layer_list *output_list,
+                                          struct loader_layer_list *expanded_output_list, uint32_t name_count,
+                                          const char *const *names, const struct loader_layer_list *source_list) {
+    struct loader_layer_properties *layer_prop;
+    VkResult err = VK_SUCCESS;
+
+    for (uint32_t i = 0; i < name_count; i++) {
+        const char *source_name = names[i];
+        layer_prop = loaderFindLayerProperty(source_name, source_list);
+        if (NULL == layer_prop) {
+            loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0, "loaderAddLayerNamesToList: Unable to find layer %s", source_name);
+            err = VK_ERROR_LAYER_NOT_PRESENT;
+            continue;
+        }
+
+        // If not a meta-layer, simply add it.
+        if (0 == (layer_prop->type_flags & VK_LAYER_TYPE_FLAG_META_LAYER)) {
+            if (!loaderListHasLayerProperty(&layer_prop->info, output_list)) {
+                loaderAddLayerPropertiesToList(inst, output_list, 1, layer_prop);
+            }
+            if (!loaderListHasLayerProperty(&layer_prop->info, expanded_output_list)) {
+                loaderAddLayerPropertiesToList(inst, expanded_output_list, 1, layer_prop);
+            }
+        } else {
+            if (!loaderListHasLayerProperty(&layer_prop->info, output_list) ||
+                !loaderListHasLayerProperty(&layer_prop->info, expanded_output_list)) {
+                loaderAddMetaLayer(inst, layer_prop, output_list, expanded_output_list, source_list);
+            }
+        }
+    }
+
+    return err;
+}
+
+static bool checkExpiration(const struct loader_instance *inst, const struct loader_layer_properties *prop) {
+    time_t current = time(NULL);
+    struct tm tm_current = *localtime(&current);
+
+    struct tm tm_expiration = {
+        .tm_sec = 0,
+        .tm_min = prop->expiration.minute,
+        .tm_hour = prop->expiration.hour,
+        .tm_mday = prop->expiration.day,
+        .tm_mon = prop->expiration.month - 1,
+        .tm_year = prop->expiration.year - 1900,
+        .tm_isdst = tm_current.tm_isdst,
+        // wday and yday are ignored by mktime
+    };
+    time_t expiration = mktime(&tm_expiration);
+
+    return current < expiration;
+}
+
+// Determine if the provided implicit layer should be enabled by querying the appropriate environmental variables.
+// For an implicit layer, at least a disable environment variable is required.
+bool loaderImplicitLayerIsEnabled(const struct loader_instance *inst, const struct loader_layer_properties *prop) {
+    bool enable = false;
+    char *env_value = NULL;
+
+    // If no enable_environment variable is specified, this implicit layer is always be enabled by default.
+    if (prop->enable_env_var.name[0] == 0) {
+        enable = true;
+    } else {
+        // Otherwise, only enable this layer if the enable environment variable is defined
+        env_value = loader_getenv(prop->enable_env_var.name, inst);
+        if (env_value && !strcmp(prop->enable_env_var.value, env_value)) {
+            enable = true;
+        }
+        loader_free_getenv(env_value, inst);
+    }
+
+    // The disable_environment has priority over everything else.  If it is defined, the layer is always
+    // disabled.
+    env_value = loader_getenv(prop->disable_env_var.name, inst);
+    if (env_value) {
+        enable = false;
+    }
+    loader_free_getenv(env_value, inst);
+
+    // If this layer has an expiration, check it to determine if this layer has expired.
+    if (prop->has_expiration) {
+        enable = checkExpiration(inst, prop);
+    }
+
+    // Enable this layer if it is included in the override layer
+    if (inst != NULL && inst->override_layer_present) {
+        struct loader_layer_properties *override = NULL;
+        for (uint32_t i = 0; i < inst->instance_layer_list.count; ++i) {
+            if (strcmp(inst->instance_layer_list.list[i].info.layerName, VK_OVERRIDE_LAYER_NAME) == 0) {
+                override = &inst->instance_layer_list.list[i];
+                break;
+            }
+        }
+        if (override != NULL) {
+            for (uint32_t i = 0; i < override->num_component_layers; ++i) {
+                if (strcmp(override->component_layer_names[i], prop->info.layerName) == 0) {
+                    enable = true;
+                    break;
+                }
+            }
+        }
+    }
+
+    return enable;
+}
+
+// Check the individual implicit layer for the enable/disable environment variable settings.  Only add it after
+// every check has passed indicating it should be used.
+static void loaderAddImplicitLayer(const struct loader_instance *inst, const struct loader_layer_properties *prop,
+                                   struct loader_layer_list *target_list, struct loader_layer_list *expanded_target_list,
+                                   const struct loader_layer_list *source_list) {
+    bool enable = loaderImplicitLayerIsEnabled(inst, prop);
+
+    // If the implicit layer is supposed to be enable, make sure the layer supports at least the same API version
+    // that the application is asking (i.e. layer's API >= app's API).  If it's not, disable this layer.
+    if (enable) {
+        uint16_t layer_api_major_version = VK_VERSION_MAJOR(prop->info.specVersion);
+        uint16_t layer_api_minor_version = VK_VERSION_MINOR(prop->info.specVersion);
+        if (inst->app_api_major_version > layer_api_major_version ||
+            (inst->app_api_major_version == layer_api_major_version && inst->app_api_minor_version > layer_api_minor_version)) {
+            loader_log(inst, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, 0,
+                       "loader_add_implicit_layer: Disabling implicit layer %s for using an old API version %d.%d versus "
+                       "application requested %d.%d",
+                       prop->info.layerName, layer_api_major_version, layer_api_minor_version, inst->app_api_major_version,
+                       inst->app_api_minor_version);
+            enable = false;
+        }
+    }
+
+    if (enable) {
+        if (0 == (prop->type_flags & VK_LAYER_TYPE_FLAG_META_LAYER)) {
+            if (!loaderListHasLayerProperty(&prop->info, target_list)) {
+                loaderAddLayerPropertiesToList(inst, target_list, 1, prop);
+            }
+            if (NULL != expanded_target_list && !loaderListHasLayerProperty(&prop->info, expanded_target_list)) {
+                loaderAddLayerPropertiesToList(inst, expanded_target_list, 1, prop);
+            }
+        } else {
+            if (!loaderListHasLayerProperty(&prop->info, target_list) ||
+                (NULL != expanded_target_list && !loaderListHasLayerProperty(&prop->info, expanded_target_list))) {
+                loaderAddMetaLayer(inst, prop, target_list, expanded_target_list, source_list);
+            }
+        }
+    }
+}
+
+// Add the component layers of a meta-layer to the active list of layers
+bool loaderAddMetaLayer(const struct loader_instance *inst, const struct loader_layer_properties *prop,
+                        struct loader_layer_list *target_list, struct loader_layer_list *expanded_target_list,
+                        const struct loader_layer_list *source_list) {
+    bool found = true;
+
+    // If the meta-layer isn't present in the unexpanded list, add it.
+    if (!loaderListHasLayerProperty(&prop->info, target_list)) {
+        loaderAddLayerPropertiesToList(inst, target_list, 1, prop);
+    }
+
+    // We need to add all the individual component layers
+    for (uint32_t comp_layer = 0; comp_layer < prop->num_component_layers; comp_layer++) {
+        bool found_comp = false;
+        const struct loader_layer_properties *search_prop =
+            loaderFindLayerProperty(prop->component_layer_names[comp_layer], source_list);
+        if (search_prop != NULL) {
+            found_comp = true;
+
+            // If the component layer is itself an implicit layer, we need to do the implicit layer enable
+            // checks
+            if (0 == (search_prop->type_flags & VK_LAYER_TYPE_FLAG_EXPLICIT_LAYER)) {
+                loaderAddImplicitLayer(inst, search_prop, target_list, expanded_target_list, source_list);
+            } else {
+                if (0 != (search_prop->type_flags & VK_LAYER_TYPE_FLAG_META_LAYER)) {
+                    found = loaderAddMetaLayer(inst, search_prop, target_list, expanded_target_list, source_list);
+                } else {
+                    // Otherwise, just make sure it hasn't already been added to either list before we add it
+                    if (!loaderListHasLayerProperty(&search_prop->info, target_list)) {
+                        loaderAddLayerPropertiesToList(inst, target_list, 1, search_prop);
+                    }
+                    if (NULL != expanded_target_list && !loaderListHasLayerProperty(&search_prop->info, expanded_target_list)) {
+                        loaderAddLayerPropertiesToList(inst, expanded_target_list, 1, search_prop);
+                    }
+                }
+            }
+        }
+        if (!found_comp) {
+            loader_log(inst, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0,
+                       "loaderAddMetaLayer: Failed to find layer name %s component layer "
+                       "%s to activate",
+                       search_prop->info.layerName, prop->component_layer_names[comp_layer]);
+            found = false;
+        }
+    }
+
+    // Add this layer to the overall target list (not the expanded one)
+    if (found && !loaderListHasLayerProperty(&prop->info, target_list)) {
+        loaderAddLayerPropertiesToList(inst, target_list, 1, prop);
+    }
+
+    return found;
+}
+
+// Search the source_list for any layer with a name that matches the given name and a type
+// that matches the given type.  Add all matching layers to the target_list.
+// Do not add if found loader_layer_properties is already on the target_list.
+void loaderAddLayerNameToList(const struct loader_instance *inst, const char *name, const enum layer_type_flags type_flags,
+                              const struct loader_layer_list *source_list, struct loader_layer_list *target_list,
+                              struct loader_layer_list *expanded_target_list) {
+    bool found = false;
+    for (uint32_t i = 0; i < source_list->count; i++) {
+        struct loader_layer_properties *source_prop = &source_list->list[i];
+        if (0 == strcmp(source_prop->info.layerName, name) && (source_prop->type_flags & type_flags) == type_flags) {
+            // If not a meta-layer, simply add it.
+            if (0 == (source_prop->type_flags & VK_LAYER_TYPE_FLAG_META_LAYER)) {
+                if (!loaderListHasLayerProperty(&source_prop->info, target_list) &&
+                    VK_SUCCESS == loaderAddLayerPropertiesToList(inst, target_list, 1, source_prop)) {
+                    found = true;
+                }
+                if (!loaderListHasLayerProperty(&source_prop->info, expanded_target_list) &&
+                    VK_SUCCESS == loaderAddLayerPropertiesToList(inst, expanded_target_list, 1, source_prop)) {
+                    found = true;
+                }
+            } else {
+                found = loaderAddMetaLayer(inst, source_prop, target_list, expanded_target_list, source_list);
+            }
+        }
+    }
+    if (!found) {
+        loader_log(inst, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0, "loaderAddLayerNameToList: Failed to find layer name %s to activate",
+                   name);
+    }
+}
+
+static VkExtensionProperties *get_extension_property(const char *name, const struct loader_extension_list *list) {
+    for (uint32_t i = 0; i < list->count; i++) {
+        if (strcmp(name, list->list[i].extensionName) == 0) return &list->list[i];
+    }
+    return NULL;
+}
+
+static VkExtensionProperties *get_dev_extension_property(const char *name, const struct loader_device_extension_list *list) {
+    for (uint32_t i = 0; i < list->count; i++) {
+        if (strcmp(name, list->list[i].props.extensionName) == 0) return &list->list[i].props;
+    }
+    return NULL;
+}
+
+// For Instance extensions implemented within the loader (i.e. DEBUG_REPORT
+// the extension must provide two entry points for the loader to use:
+// - "trampoline" entry point - this is the address returned by GetProcAddr
+//                              and will always do what's necessary to support a
+//                              global call.
+// - "terminator" function    - this function will be put at the end of the
+//                              instance chain and will contain the necessary logic
+//                              to call / process the extension for the appropriate
+//                              ICDs that are available.
+// There is no generic mechanism for including these functions, the references
+// must be placed into the appropriate loader entry points.
+// GetInstanceProcAddr: call extension GetInstanceProcAddr to check for GetProcAddr
+// requests
+// loader_coalesce_extensions(void) - add extension records to the list of global
+//                                    extension available to the app.
+// instance_disp                    - add function pointer for terminator function
+//                                    to this array.
+// The extension itself should be in a separate file that will be linked directly
+// with the loader.
+VkResult loader_get_icd_loader_instance_extensions(const struct loader_instance *inst, struct loader_icd_tramp_list *icd_tramp_list,
+                                                   struct loader_extension_list *inst_exts) {
+    struct loader_extension_list icd_exts;
+    VkResult res = VK_SUCCESS;
+    char *env_value;
+    bool filter_extensions = true;
+
+    loader_log(inst, VK_DEBUG_REPORT_DEBUG_BIT_EXT, 0, "Build ICD instance extension list");
+
+    // Check if a user wants to disable the instance extension filtering behavior
+    env_value = loader_getenv("VK_LOADER_DISABLE_INST_EXT_FILTER", inst);
+    if (NULL != env_value && atoi(env_value) != 0) {
+        filter_extensions = false;
+    }
+    loader_free_getenv(env_value, inst);
+
+    // traverse scanned icd list adding non-duplicate extensions to the list
+    for (uint32_t i = 0; i < icd_tramp_list->count; i++) {
+        res = loader_init_generic_list(inst, (struct loader_generic_list *)&icd_exts, sizeof(VkExtensionProperties));
+        if (VK_SUCCESS != res) {
+            goto out;
+        }
+        res = loader_add_instance_extensions(inst, icd_tramp_list->scanned_list[i].EnumerateInstanceExtensionProperties,
+                                             icd_tramp_list->scanned_list[i].lib_name, &icd_exts);
+        if (VK_SUCCESS == res) {
+            if (filter_extensions) {
+                // Remove any extensions not recognized by the loader
+                for (int32_t j = 0; j < (int32_t)icd_exts.count; j++) {
+                    // See if the extension is in the list of supported extensions
+                    bool found = false;
+                    for (uint32_t k = 0; LOADER_INSTANCE_EXTENSIONS[k] != NULL; k++) {
+                        if (strcmp(icd_exts.list[j].extensionName, LOADER_INSTANCE_EXTENSIONS[k]) == 0) {
+                            found = true;
+                            break;
+                        }
+                    }
+
+                    // If it isn't in the list, remove it
+                    if (!found) {
+                        for (uint32_t k = j + 1; k < icd_exts.count; k++) {
+                            icd_exts.list[k - 1] = icd_exts.list[k];
+                        }
+                        --icd_exts.count;
+                        --j;
+                    }
+                }
+            }
+
+            res = loader_add_to_ext_list(inst, inst_exts, icd_exts.count, icd_exts.list);
+        }
+        loader_destroy_generic_list(inst, (struct loader_generic_list *)&icd_exts);
+        if (VK_SUCCESS != res) {
+            goto out;
+        }
+    };
+
+    // Traverse loader's extensions, adding non-duplicate extensions to the list
+    debug_utils_AddInstanceExtensions(inst, inst_exts);
+
+out:
+    return res;
+}
+
+struct loader_icd_term *loader_get_icd_and_device(const VkDevice device, struct loader_device **found_dev, uint32_t *icd_index) {
+    *found_dev = NULL;
+    for (struct loader_instance *inst = loader.instances; inst; inst = inst->next) {
+        uint32_t index = 0;
+        for (struct loader_icd_term *icd_term = inst->icd_terms; icd_term; icd_term = icd_term->next) {
+            for (struct loader_device *dev = icd_term->logical_device_list; dev; dev = dev->next)
+                // Value comparison of device prevents object wrapping by layers
+                if (loader_get_dispatch(dev->icd_device) == loader_get_dispatch(device) ||
+                    (dev->chain_device != VK_NULL_HANDLE &&
+                     loader_get_dispatch(dev->chain_device) == loader_get_dispatch(device))) {
+                    *found_dev = dev;
+                    if (NULL != icd_index) {
+                        *icd_index = index;
+                    }
+                    return icd_term;
+                }
+            index++;
+        }
+    }
+    return NULL;
+}
+
+void loader_destroy_logical_device(const struct loader_instance *inst, struct loader_device *dev,
+                                   const VkAllocationCallbacks *pAllocator) {
+    if (pAllocator) {
+        dev->alloc_callbacks = *pAllocator;
+    }
+    if (NULL != dev->expanded_activated_layer_list.list) {
+        loaderDeactivateLayers(inst, dev, &dev->expanded_activated_layer_list);
+    }
+    if (NULL != dev->app_activated_layer_list.list) {
+        loaderDestroyLayerList(inst, dev, &dev->app_activated_layer_list);
+    }
+    loader_device_heap_free(dev, dev);
+}
+
+struct loader_device *loader_create_logical_device(const struct loader_instance *inst, const VkAllocationCallbacks *pAllocator) {
+    struct loader_device *new_dev;
+#if (DEBUG_DISABLE_APP_ALLOCATORS == 1)
+    {
+#else
+    if (pAllocator) {
+        new_dev = (struct loader_device *)pAllocator->pfnAllocation(pAllocator->pUserData, sizeof(struct loader_device),
+                                                                    sizeof(int *), VK_SYSTEM_ALLOCATION_SCOPE_DEVICE);
+    } else {
+#endif
+        new_dev = (struct loader_device *)malloc(sizeof(struct loader_device));
+    }
+
+    if (!new_dev) {
+        loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                   "loader_create_logical_device: Failed to alloc struct "
+                   "loader_device");
+        return NULL;
+    }
+
+    memset(new_dev, 0, sizeof(struct loader_device));
+    if (pAllocator) {
+        new_dev->alloc_callbacks = *pAllocator;
+    }
+
+    return new_dev;
+}
+
+void loader_add_logical_device(const struct loader_instance *inst, struct loader_icd_term *icd_term, struct loader_device *dev) {
+    dev->next = icd_term->logical_device_list;
+    icd_term->logical_device_list = dev;
+}
+
+void loader_remove_logical_device(const struct loader_instance *inst, struct loader_icd_term *icd_term,
+                                  struct loader_device *found_dev, const VkAllocationCallbacks *pAllocator) {
+    struct loader_device *dev, *prev_dev;
+
+    if (!icd_term || !found_dev) return;
+
+    prev_dev = NULL;
+    dev = icd_term->logical_device_list;
+    while (dev && dev != found_dev) {
+        prev_dev = dev;
+        dev = dev->next;
+    }
+
+    if (prev_dev)
+        prev_dev->next = found_dev->next;
+    else
+        icd_term->logical_device_list = found_dev->next;
+    loader_destroy_logical_device(inst, found_dev, pAllocator);
+}
+
+static void loader_icd_destroy(struct loader_instance *ptr_inst, struct loader_icd_term *icd_term,
+                               const VkAllocationCallbacks *pAllocator) {
+    ptr_inst->total_icd_count--;
+    for (struct loader_device *dev = icd_term->logical_device_list; dev;) {
+        struct loader_device *next_dev = dev->next;
+        loader_destroy_logical_device(ptr_inst, dev, pAllocator);
+        dev = next_dev;
+    }
+
+    loader_instance_heap_free(ptr_inst, icd_term);
+}
+
+static struct loader_icd_term *loader_icd_create(const struct loader_instance *inst) {
+    struct loader_icd_term *icd_term;
+
+    icd_term = loader_instance_heap_alloc(inst, sizeof(struct loader_icd_term), VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE);
+    if (!icd_term) {
+        return NULL;
+    }
+
+    memset(icd_term, 0, sizeof(struct loader_icd_term));
+
+    return icd_term;
+}
+
+static struct loader_icd_term *loader_icd_add(struct loader_instance *ptr_inst, const struct loader_scanned_icd *scanned_icd) {
+    struct loader_icd_term *icd_term;
+
+    icd_term = loader_icd_create(ptr_inst);
+    if (!icd_term) {
+        return NULL;
+    }
+
+    icd_term->scanned_icd = scanned_icd;
+    icd_term->this_instance = ptr_inst;
+
+    // Prepend to the list
+    icd_term->next = ptr_inst->icd_terms;
+    ptr_inst->icd_terms = icd_term;
+    ptr_inst->total_icd_count++;
+
+    return icd_term;
+}
+
+// Determine the ICD interface version to use.
+//     @param icd
+//     @param pVersion Output parameter indicating which version to use or 0 if
+//            the negotiation API is not supported by the ICD
+//     @return  bool indicating true if the selected interface version is supported
+//            by the loader, false indicates the version is not supported
+bool loader_get_icd_interface_version(PFN_vkNegotiateLoaderICDInterfaceVersion fp_negotiate_icd_version, uint32_t *pVersion) {
+    if (fp_negotiate_icd_version == NULL) {
+        // ICD does not support the negotiation API, it supports version 0 or 1
+        // calling code must determine if it is version 0 or 1
+        *pVersion = 0;
+    } else {
+        // ICD supports the negotiation API, so call it with the loader's
+        // latest version supported
+        *pVersion = CURRENT_LOADER_ICD_INTERFACE_VERSION;
+        VkResult result = fp_negotiate_icd_version(pVersion);
+
+        if (result == VK_ERROR_INCOMPATIBLE_DRIVER) {
+            // ICD no longer supports the loader's latest interface version so
+            // fail loading the ICD
+            return false;
+        }
+    }
+
+#if MIN_SUPPORTED_LOADER_ICD_INTERFACE_VERSION > 0
+    if (*pVersion < MIN_SUPPORTED_LOADER_ICD_INTERFACE_VERSION) {
+        // Loader no longer supports the ICD's latest interface version so fail
+        // loading the ICD
+        return false;
+    }
+#endif
+    return true;
+}
+
+void loader_scanned_icd_clear(const struct loader_instance *inst, struct loader_icd_tramp_list *icd_tramp_list) {
+    if (0 != icd_tramp_list->capacity) {
+        for (uint32_t i = 0; i < icd_tramp_list->count; i++) {
+            loader_platform_close_library(icd_tramp_list->scanned_list[i].handle);
+            loader_instance_heap_free(inst, icd_tramp_list->scanned_list[i].lib_name);
+        }
+        loader_instance_heap_free(inst, icd_tramp_list->scanned_list);
+        icd_tramp_list->capacity = 0;
+        icd_tramp_list->count = 0;
+        icd_tramp_list->scanned_list = NULL;
+    }
+}
+
+static VkResult loader_scanned_icd_init(const struct loader_instance *inst, struct loader_icd_tramp_list *icd_tramp_list) {
+    VkResult err = VK_SUCCESS;
+    loader_scanned_icd_clear(inst, icd_tramp_list);
+    icd_tramp_list->capacity = 8 * sizeof(struct loader_scanned_icd);
+    icd_tramp_list->scanned_list = loader_instance_heap_alloc(inst, icd_tramp_list->capacity, VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE);
+    if (NULL == icd_tramp_list->scanned_list) {
+        loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                   "loader_scanned_icd_init: Realloc failed for layer list when "
+                   "attempting to add new layer");
+        err = VK_ERROR_OUT_OF_HOST_MEMORY;
+    }
+    return err;
+}
+
+static VkResult loader_scanned_icd_add(const struct loader_instance *inst, struct loader_icd_tramp_list *icd_tramp_list,
+                                       const char *filename, uint32_t api_version) {
+    loader_platform_dl_handle handle;
+    PFN_vkCreateInstance fp_create_inst;
+    PFN_vkEnumerateInstanceExtensionProperties fp_get_inst_ext_props;
+    PFN_vkGetInstanceProcAddr fp_get_proc_addr;
+    PFN_GetPhysicalDeviceProcAddr fp_get_phys_dev_proc_addr = NULL;
+    PFN_vkNegotiateLoaderICDInterfaceVersion fp_negotiate_icd_version;
+    struct loader_scanned_icd *new_scanned_icd;
+    uint32_t interface_vers;
+    VkResult res = VK_SUCCESS;
+
+    // TODO implement smarter opening/closing of libraries. For now this
+    // function leaves libraries open and the scanned_icd_clear closes them
+    handle = loader_platform_open_library(filename);
+    if (NULL == handle) {
+        loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0, loader_platform_open_library_error(filename));
+        goto out;
+    }
+
+    // Get and settle on an ICD interface version
+    fp_negotiate_icd_version = loader_platform_get_proc_address(handle, "vk_icdNegotiateLoaderICDInterfaceVersion");
+
+    if (!loader_get_icd_interface_version(fp_negotiate_icd_version, &interface_vers)) {
+        loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                   "loader_scanned_icd_add: ICD %s doesn't support interface"
+                   " version compatible with loader, skip this ICD.",
+                   filename);
+        goto out;
+    }
+
+    fp_get_proc_addr = loader_platform_get_proc_address(handle, "vk_icdGetInstanceProcAddr");
+    if (NULL == fp_get_proc_addr) {
+        assert(interface_vers == 0);
+        // Use deprecated interface from version 0
+        fp_get_proc_addr = loader_platform_get_proc_address(handle, "vkGetInstanceProcAddr");
+        if (NULL == fp_get_proc_addr) {
+            loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                       "loader_scanned_icd_add: Attempt to retrieve either "
+                       "\'vkGetInstanceProcAddr\' or "
+                       "\'vk_icdGetInstanceProcAddr\' from ICD %s failed.",
+                       filename);
+            goto out;
+        } else {
+            loader_log(inst, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0,
+                       "loader_scanned_icd_add: Using deprecated ICD "
+                       "interface of \'vkGetInstanceProcAddr\' instead of "
+                       "\'vk_icdGetInstanceProcAddr\' for ICD %s",
+                       filename);
+        }
+        fp_create_inst = loader_platform_get_proc_address(handle, "vkCreateInstance");
+        if (NULL == fp_create_inst) {
+            loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                       "loader_scanned_icd_add:  Failed querying "
+                       "\'vkCreateInstance\' via dlsym/loadlibrary for "
+                       "ICD %s",
+                       filename);
+            goto out;
+        }
+        fp_get_inst_ext_props = loader_platform_get_proc_address(handle, "vkEnumerateInstanceExtensionProperties");
+        if (NULL == fp_get_inst_ext_props) {
+            loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                       "loader_scanned_icd_add: Could not get \'vkEnumerate"
+                       "InstanceExtensionProperties\' via dlsym/loadlibrary "
+                       "for ICD %s",
+                       filename);
+            goto out;
+        }
+    } else {
+        // Use newer interface version 1 or later
+        if (interface_vers == 0) {
+            interface_vers = 1;
+        }
+
+        fp_create_inst = (PFN_vkCreateInstance)fp_get_proc_addr(NULL, "vkCreateInstance");
+        if (NULL == fp_create_inst) {
+            loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                       "loader_scanned_icd_add: Could not get "
+                       "\'vkCreateInstance\' via \'vk_icdGetInstanceProcAddr\'"
+                       " for ICD %s",
+                       filename);
+            goto out;
+        }
+        fp_get_inst_ext_props =
+            (PFN_vkEnumerateInstanceExtensionProperties)fp_get_proc_addr(NULL, "vkEnumerateInstanceExtensionProperties");
+        if (NULL == fp_get_inst_ext_props) {
+            loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                       "loader_scanned_icd_add: Could not get \'vkEnumerate"
+                       "InstanceExtensionProperties\' via "
+                       "\'vk_icdGetInstanceProcAddr\' for ICD %s",
+                       filename);
+            goto out;
+        }
+        fp_get_phys_dev_proc_addr = loader_platform_get_proc_address(handle, "vk_icdGetPhysicalDeviceProcAddr");
+    }
+
+    // check for enough capacity
+    if ((icd_tramp_list->count * sizeof(struct loader_scanned_icd)) >= icd_tramp_list->capacity) {
+        void *new_ptr = loader_instance_heap_realloc(inst, icd_tramp_list->scanned_list, icd_tramp_list->capacity,
+                                                     icd_tramp_list->capacity * 2, VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE);
+        if (NULL == new_ptr) {
+            res = VK_ERROR_OUT_OF_HOST_MEMORY;
+            loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                       "loader_scanned_icd_add: Realloc failed on icd library list for ICD %s", filename);
+            goto out;
+        }
+        icd_tramp_list->scanned_list = new_ptr;
+
+        // double capacity
+        icd_tramp_list->capacity *= 2;
+    }
+
+    new_scanned_icd = &(icd_tramp_list->scanned_list[icd_tramp_list->count]);
+    new_scanned_icd->handle = handle;
+    new_scanned_icd->api_version = api_version;
+    new_scanned_icd->GetInstanceProcAddr = fp_get_proc_addr;
+    new_scanned_icd->GetPhysicalDeviceProcAddr = fp_get_phys_dev_proc_addr;
+    new_scanned_icd->EnumerateInstanceExtensionProperties = fp_get_inst_ext_props;
+    new_scanned_icd->CreateInstance = fp_create_inst;
+    new_scanned_icd->interface_version = interface_vers;
+
+    new_scanned_icd->lib_name = (char *)loader_instance_heap_alloc(inst, strlen(filename) + 1, VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE);
+    if (NULL == new_scanned_icd->lib_name) {
+        loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0, "loader_scanned_icd_add: Out of memory can't add ICD %s", filename);
+        res = VK_ERROR_OUT_OF_HOST_MEMORY;
+        goto out;
+    }
+    strcpy(new_scanned_icd->lib_name, filename);
+    icd_tramp_list->count++;
+
+out:
+
+    return res;
+}
+
+static void loader_debug_init(void) {
+    char *env, *orig;
+
+    if (g_loader_debug > 0) return;
+
+    g_loader_debug = 0;
+
+    // Parse comma-separated debug options
+    orig = env = loader_getenv("VK_LOADER_DEBUG", NULL);
+    while (env) {
+        char *p = strchr(env, ',');
+        size_t len;
+
+        if (p)
+            len = p - env;
+        else
+            len = strlen(env);
+
+        if (len > 0) {
+            if (strncmp(env, "all", len) == 0) {
+                g_loader_debug = ~0u;
+                g_loader_log_msgs = ~0u;
+            } else if (strncmp(env, "warn", len) == 0) {
+                g_loader_debug |= LOADER_WARN_BIT;
+                g_loader_log_msgs |= VK_DEBUG_REPORT_WARNING_BIT_EXT;
+            } else if (strncmp(env, "info", len) == 0) {
+                g_loader_debug |= LOADER_INFO_BIT;
+                g_loader_log_msgs |= VK_DEBUG_REPORT_INFORMATION_BIT_EXT;
+            } else if (strncmp(env, "perf", len) == 0) {
+                g_loader_debug |= LOADER_PERF_BIT;
+                g_loader_log_msgs |= VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT;
+            } else if (strncmp(env, "error", len) == 0) {
+                g_loader_debug |= LOADER_ERROR_BIT;
+                g_loader_log_msgs |= VK_DEBUG_REPORT_ERROR_BIT_EXT;
+            } else if (strncmp(env, "debug", len) == 0) {
+                g_loader_debug |= LOADER_DEBUG_BIT;
+                g_loader_log_msgs |= VK_DEBUG_REPORT_DEBUG_BIT_EXT;
+            }
+        }
+
+        if (!p) break;
+
+        env = p + 1;
+    }
+
+    loader_free_getenv(orig, NULL);
+}
+
+void loader_initialize(void) {
+    // initialize mutexs
+    loader_platform_thread_create_mutex(&loader_lock);
+    loader_platform_thread_create_mutex(&loader_json_lock);
+
+    // initialize logging
+    loader_debug_init();
+
+    // initial cJSON to use alloc callbacks
+    cJSON_Hooks alloc_fns = {
+        .malloc_fn = loader_instance_tls_heap_alloc, .free_fn = loader_instance_tls_heap_free,
+    };
+    cJSON_InitHooks(&alloc_fns);
+
+#if defined(_WIN32)
+    // This is needed to ensure that newer APIs are available right away
+    // and not after the first call that has been statically linked
+    LoadLibrary("gdi32.dll");
+#endif
+}
+
+struct loader_data_files {
+    uint32_t count;
+    uint32_t alloc_count;
+    char **filename_list;
+};
+
+void loader_release() {
+    // release mutexs
+    loader_platform_thread_delete_mutex(&loader_lock);
+    loader_platform_thread_delete_mutex(&loader_json_lock);
+}
+
+// Get next file or dirname given a string list or registry key path
+//
+// \returns
+// A pointer to first char in the next path.
+// The next path (or NULL) in the list is returned in next_path.
+// Note: input string is modified in some cases. PASS IN A COPY!
+static char *loader_get_next_path(char *path) {
+    uint32_t len;
+    char *next;
+
+    if (path == NULL) return NULL;
+    next = strchr(path, PATH_SEPARATOR);
+    if (next == NULL) {
+        len = (uint32_t)strlen(path);
+        next = path + len;
+    } else {
+        *next = '\0';
+        next++;
+    }
+
+    return next;
+}
+
+// Given a path which is absolute or relative, expand the path if relative or
+// leave the path unmodified if absolute. The base path to prepend to relative
+// paths is given in rel_base.
+//
+// @return - A string in out_fullpath of the full absolute path
+static void loader_expand_path(const char *path, const char *rel_base, size_t out_size, char *out_fullpath) {
+    if (loader_platform_is_path_absolute(path)) {
+        // do not prepend a base to an absolute path
+        rel_base = "";
+    }
+
+    loader_platform_combine_path(out_fullpath, out_size, rel_base, path, NULL);
+}
+
+// Given a filename (file)  and a list of paths (dir), try to find an existing
+// file in the paths.  If filename already is a path then no searching in the given paths.
+//
+// @return - A string in out_fullpath of either the full path or file.
+static void loader_get_fullpath(const char *file, const char *dirs, size_t out_size, char *out_fullpath) {
+    if (!loader_platform_is_path(file) && *dirs) {
+        char *dirs_copy, *dir, *next_dir;
+
+        dirs_copy = loader_stack_alloc(strlen(dirs) + 1);
+        strcpy(dirs_copy, dirs);
+
+        // find if file exists after prepending paths in given list
+        for (dir = dirs_copy; *dir && (next_dir = loader_get_next_path(dir)); dir = next_dir) {
+            loader_platform_combine_path(out_fullpath, out_size, dir, file, NULL);
+            if (loader_platform_file_exists(out_fullpath)) {
+                return;
+            }
+        }
+    }
+
+    (void)snprintf(out_fullpath, out_size, "%s", file);
+}
+
+// Read a JSON file into a buffer.
+//
+// @return -  A pointer to a cJSON object representing the JSON parse tree.
+//            This returned buffer should be freed by caller.
+static VkResult loader_get_json(const struct loader_instance *inst, const char *filename, cJSON **json) {
+    FILE *file = NULL;
+    char *json_buf;
+    size_t len;
+    VkResult res = VK_SUCCESS;
+
+    if (NULL == json) {
+        loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0, "loader_get_json: Received invalid JSON file");
+        res = VK_ERROR_INITIALIZATION_FAILED;
+        goto out;
+    }
+
+    *json = NULL;
+
+    file = fopen(filename, "rb");
+    if (!file) {
+        loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0, "loader_get_json: Failed to open JSON file %s", filename);
+        res = VK_ERROR_INITIALIZATION_FAILED;
+        goto out;
+    }
+    fseek(file, 0, SEEK_END);
+    len = ftell(file);
+    fseek(file, 0, SEEK_SET);
+    json_buf = (char *)loader_stack_alloc(len + 1);
+    if (json_buf == NULL) {
+        loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                   "loader_get_json: Failed to allocate space for "
+                   "JSON file %s buffer of length %d",
+                   filename, len);
+        res = VK_ERROR_OUT_OF_HOST_MEMORY;
+        goto out;
+    }
+    if (fread(json_buf, sizeof(char), len, file) != len) {
+        loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0, "loader_get_json: Failed to read JSON file %s.", filename);
+        res = VK_ERROR_INITIALIZATION_FAILED;
+        goto out;
+    }
+    json_buf[len] = '\0';
+
+    // Parse text from file
+    *json = cJSON_Parse(json_buf);
+    if (*json == NULL) {
+        loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                   "loader_get_json: Failed to parse JSON file %s, "
+                   "this is usually because something ran out of "
+                   "memory.",
+                   filename);
+        res = VK_ERROR_OUT_OF_HOST_MEMORY;
+        goto out;
+    }
+
+out:
+    if (NULL != file) {
+        fclose(file);
+    }
+
+    return res;
+}
+
+const char *std_validation_str = "VK_LAYER_LUNARG_standard_validation";
+
+// Adds the legacy VK_LAYER_LUNARG_standard_validation as a meta-layer if it
+// fails to find it in the list already.  This is usually an indication that a
+// newer loader is being used with an older layer set.
+static bool loaderAddLegacyStandardValidationLayer(const struct loader_instance *inst,
+                                                   struct loader_layer_list *layer_instance_list) {
+    uint32_t i;
+    bool success = true;
+    struct loader_layer_properties *props = loaderGetNextLayerPropertySlot(inst, layer_instance_list);
+    const char std_validation_names[6][VK_MAX_EXTENSION_NAME_SIZE] = {
+        "VK_LAYER_GOOGLE_threading", "VK_LAYER_LUNARG_parameter_validation", "VK_LAYER_LUNARG_object_tracker",
+        "VK_LAYER_LUNARG_core_validation", "VK_LAYER_GOOGLE_unique_objects"};
+    uint32_t layer_count = sizeof(std_validation_names) / sizeof(std_validation_names[0]);
+
+    loader_log(inst, VK_DEBUG_REPORT_DEBUG_BIT_EXT, 0,
+               "Adding VK_LAYER_LUNARG_standard_validation using the loader legacy path.  This is"
+               " not an error.");
+
+    if (NULL == props) {
+        goto out;
+    }
+
+    memset(props, 0, sizeof(struct loader_layer_properties));
+    props->type_flags = VK_LAYER_TYPE_FLAG_INSTANCE_LAYER | VK_LAYER_TYPE_FLAG_EXPLICIT_LAYER | VK_LAYER_TYPE_FLAG_META_LAYER;
+    strncpy(props->info.description, "LunarG Standard Validation Layer", sizeof(props->info.description));
+    props->info.implementationVersion = 1;
+    strncpy(props->info.layerName, std_validation_str, sizeof(props->info.layerName));
+    props->info.specVersion = VK_MAKE_VERSION(1, 0, VK_HEADER_VERSION);
+
+    props->component_layer_names =
+        loader_instance_heap_alloc(inst, sizeof(char[MAX_STRING_SIZE]) * layer_count, VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE);
+    if (NULL == props->component_layer_names) {
+        loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                   "Failed to allocate space for legacy VK_LAYER_LUNARG_standard_validation"
+                   " meta-layer component_layers information.");
+        success = false;
+        goto out;
+    }
+    for (i = 0; i < layer_count; i++) {
+        strncpy(props->component_layer_names[i], std_validation_names[i], MAX_STRING_SIZE - 1);
+        props->component_layer_names[i][MAX_STRING_SIZE - 1] = '\0';
+    }
+
+out:
+
+    if (!success && NULL != props && NULL != props->component_layer_names) {
+        loader_instance_heap_free(inst, props->component_layer_names);
+        props->component_layer_names = NULL;
+    }
+
+    return success;
+}
+
+// Verify that all component layers in a meta-layer are valid.
+static bool verifyMetaLayerComponentLayers(const struct loader_instance *inst, struct loader_layer_properties *prop,
+                                           struct loader_layer_list *instance_layers) {
+    bool success = true;
+    const uint32_t expected_major = VK_VERSION_MAJOR(prop->info.specVersion);
+    const uint32_t expected_minor = VK_VERSION_MINOR(prop->info.specVersion);
+
+    for (uint32_t comp_layer = 0; comp_layer < prop->num_component_layers; comp_layer++) {
+        if (!loaderFindLayerNameInList(prop->component_layer_names[comp_layer], instance_layers)) {
+            if (NULL != inst) {
+                loader_log(inst, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0,
+                           "verifyMetaLayerComponentLayers: Meta-layer %s can't find component layer %s at index %d."
+                           "  Skipping this layer.",
+                           prop->info.layerName, prop->component_layer_names[comp_layer], comp_layer);
+            }
+            success = false;
+            break;
+        } else {
+            struct loader_layer_properties *comp_prop =
+                loaderFindLayerProperty(prop->component_layer_names[comp_layer], instance_layers);
+            if (comp_prop == NULL) {
+                if (NULL != inst) {
+                    loader_log(inst, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0,
+                               "verifyMetaLayerComponentLayers: Meta-layer %s can't find property for component layer "
+                               "%s at index %d.  Skipping this layer.",
+                               prop->info.layerName, prop->component_layer_names[comp_layer], comp_layer);
+                }
+                success = false;
+                break;
+            }
+
+            // Check the version of each layer, they need to at least match MAJOR and MINOR
+            uint32_t cur_major = VK_VERSION_MAJOR(comp_prop->info.specVersion);
+            uint32_t cur_minor = VK_VERSION_MINOR(comp_prop->info.specVersion);
+            if (cur_major != expected_major || cur_minor != expected_minor) {
+                if (NULL != inst) {
+                    loader_log(inst, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0,
+                               "verifyMetaLayerComponentLayers: Meta-layer uses API version %d.%d, but component "
+                               "layer %d uses API version %d.%d.  Skipping this layer.",
+                               expected_major, expected_minor, comp_layer, cur_major, cur_minor);
+                }
+                success = false;
+                break;
+            }
+
+            // Make sure the layer isn't using it's own name
+            if (!strcmp(prop->info.layerName, prop->component_layer_names[comp_layer])) {
+                if (NULL != inst) {
+                    loader_log(inst, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0,
+                               "verifyMetaLayerComponentLayers: Meta-layer %s lists itself in its component layer "
+                               "list at index %d.  Skipping this layer.",
+                               prop->info.layerName, comp_layer);
+                }
+                success = false;
+                break;
+            }
+            if (comp_prop->type_flags & VK_LAYER_TYPE_FLAG_META_LAYER) {
+                if (NULL != inst) {
+                    loader_log(inst, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, 0,
+                               "verifyMetaLayerComponentLayers: Adding meta-layer %s which also contains meta-layer %s",
+                               prop->info.layerName, comp_prop->info.layerName);
+                }
+
+                // Make sure if the layer is using a meta-layer in its component list that we also verify that.
+                if (!verifyMetaLayerComponentLayers(inst, comp_prop, instance_layers)) {
+                    if (NULL != inst) {
+                        loader_log(inst, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0,
+                                   "Meta-layer %s component layer %s can not find all component layers."
+                                   "  Skipping this layer.",
+                                   prop->info.layerName, prop->component_layer_names[comp_layer]);
+                    }
+                    success = false;
+                    break;
+                }
+            }
+
+            // Add any instance and device extensions from component layers to this layer
+            // list, so that anyone querying extensions will only need to look at the meta-layer
+            for (uint32_t ext = 0; ext < comp_prop->instance_extension_list.count; ext++) {
+                if (NULL != inst) {
+                    loader_log(inst, VK_DEBUG_REPORT_DEBUG_BIT_EXT, 0,
+                               "Meta-layer %s component layer %s adding instance extension %s", prop->info.layerName,
+                               prop->component_layer_names[comp_layer], comp_prop->instance_extension_list.list[ext].extensionName);
+                }
+                if (!has_vk_extension_property(&comp_prop->instance_extension_list.list[ext], &prop->instance_extension_list)) {
+                    loader_add_to_ext_list(inst, &prop->instance_extension_list, 1, &comp_prop->instance_extension_list.list[ext]);
+                }
+            }
+
+            for (uint32_t ext = 0; ext < comp_prop->device_extension_list.count; ext++) {
+                if (NULL != inst) {
+                    loader_log(inst, VK_DEBUG_REPORT_DEBUG_BIT_EXT, 0,
+                               "Meta-layer %s component layer %s adding device extension %s", prop->info.layerName,
+                               prop->component_layer_names[comp_layer],
+                               comp_prop->device_extension_list.list[ext].props.extensionName);
+                }
+                if (!has_vk_dev_ext_property(&comp_prop->device_extension_list.list[ext].props, &prop->device_extension_list)) {
+                    loader_add_to_dev_ext_list(inst, &prop->device_extension_list,
+                                               &comp_prop->device_extension_list.list[ext].props, 0, NULL);
+                }
+            }
+        }
+    }
+    if (success) {
+        loader_log(inst, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, 0, "Meta-layer %s all %d component layers appear to be valid.",
+                   prop->info.layerName, prop->num_component_layers);
+    }
+    return success;
+}
+
+// Verify that all meta-layers in a layer list are valid.
+static void VerifyAllMetaLayers(struct loader_instance *inst, struct loader_layer_list *instance_layers,
+                                bool *override_layer_present) {
+    *override_layer_present = false;
+    for (int32_t i = 0; i < (int32_t)instance_layers->count; i++) {
+        struct loader_layer_properties *prop = &instance_layers->list[i];
+
+        // If this is a meta-layer, make sure it is valid
+        if ((prop->type_flags & VK_LAYER_TYPE_FLAG_META_LAYER) && !verifyMetaLayerComponentLayers(inst, prop, instance_layers)) {
+            if (NULL != inst) {
+                loader_log(inst, VK_DEBUG_REPORT_DEBUG_BIT_EXT, 0,
+                           "Removing meta-layer %s from instance layer list since it appears invalid.", prop->info.layerName);
+            }
+
+            // Delete the component layers
+            loader_instance_heap_free(inst, prop->component_layer_names);
+            if (prop->blacklist_layer_names != NULL) {
+                loader_instance_heap_free(inst, prop->blacklist_layer_names);
+            }
+
+            // Remove the current invalid meta-layer from the layer list.  Use memmove since we are
+            // overlapping the source and destination addresses.
+            memmove(&instance_layers->list[i], &instance_layers->list[i + 1],
+                    sizeof(struct loader_layer_properties) * (instance_layers->count - 1 - i));
+
+            // Decrement the count (because we now have one less) and decrement the loop index since we need to
+            // re-check this index.
+            instance_layers->count--;
+            i--;
+        } else if (prop->is_override && loaderImplicitLayerIsEnabled(inst, prop)) {
+            *override_layer_present = true;
+        }
+    }
+}
+
+// This structure is used to store the json file version
+// in a more manageable way.
+typedef struct {
+    uint16_t major;
+    uint16_t minor;
+    uint16_t patch;
+} layer_json_version;
+
+static inline bool layer_json_supports_pre_instance_tag(const layer_json_version *layer_json) {
+    // Supported versions started in 1.1.2, so anything newer
+    return layer_json->major > 1 || layer_json->minor > 1 || (layer_json->minor == 1 && layer_json->patch > 1);
+}
+
+static VkResult loaderReadLayerJson(const struct loader_instance *inst, struct loader_layer_list *layer_instance_list,
+                                    cJSON *layer_node, layer_json_version version, cJSON *item, cJSON *disable_environment,
+                                    bool is_implicit, char *filename) {
+    char *temp;
+    char *name, *type, *library_path_str, *api_version;
+    char *implementation_version, *description;
+    cJSON *ext_item;
+    cJSON *library_path;
+    cJSON *component_layers;
+    cJSON *override_paths;
+    cJSON *blacklisted_layers;
+    VkExtensionProperties ext_prop;
+    VkResult result = VK_ERROR_INITIALIZATION_FAILED;
+    struct loader_layer_properties *props = NULL;
+    int i, j;
+
+// The following are required in the "layer" object:
+// (required) "name"
+// (required) "type"
+// (required) "library_path"
+// (required) "api_version"
+// (required) "implementation_version"
+// (required) "description"
+// (required for implicit layers) "disable_environment"
+#define GET_JSON_OBJECT(node, var)                                         \
+    {                                                                      \
+        var = cJSON_GetObjectItem(node, #var);                             \
+        if (var == NULL) {                                                 \
+            layer_node = layer_node->next;                                 \
+            loader_log(inst, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0,           \
+                       "Didn't find required layer object %s in manifest " \
+                       "JSON file, skipping this layer",                   \
+                       #var);                                              \
+            goto out;                                                      \
+        }                                                                  \
+    }
+#define GET_JSON_ITEM(node, var)                                               \
+    {                                                                          \
+        item = cJSON_GetObjectItem(node, #var);                                \
+        if (item == NULL) {                                                    \
+            layer_node = layer_node->next;                                     \
+            loader_log(inst, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0,               \
+                       "Didn't find required layer value %s in manifest JSON " \
+                       "file, skipping this layer",                            \
+                       #var);                                                  \
+            goto out;                                                          \
+        }                                                                      \
+        temp = cJSON_Print(item);                                              \
+        if (temp == NULL) {                                                    \
+            layer_node = layer_node->next;                                     \
+            loader_log(inst, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0,               \
+                       "Problem accessing layer value %s in manifest JSON "    \
+                       "file, skipping this layer",                            \
+                       #var);                                                  \
+            result = VK_ERROR_OUT_OF_HOST_MEMORY;                              \
+            goto out;                                                          \
+        }                                                                      \
+        temp[strlen(temp) - 1] = '\0';                                         \
+        var = loader_stack_alloc(strlen(temp) + 1);                            \
+        strcpy(var, &temp[1]);                                                 \
+        cJSON_Free(temp);                                                      \
+    }
+    GET_JSON_ITEM(layer_node, name)
+    GET_JSON_ITEM(layer_node, type)
+    GET_JSON_ITEM(layer_node, api_version)
+    GET_JSON_ITEM(layer_node, implementation_version)
+    GET_JSON_ITEM(layer_node, description)
+
+    // Add list entry
+    if (!strcmp(type, "DEVICE")) {
+        loader_log(inst, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0, "Device layers are deprecated skipping this layer");
+        layer_node = layer_node->next;
+        goto out;
+    }
+
+    // Allow either GLOBAL or INSTANCE type interchangeably to handle
+    // layers that must work with older loaders
+    if (!strcmp(type, "INSTANCE") || !strcmp(type, "GLOBAL")) {
+        if (layer_instance_list == NULL) {
+            layer_node = layer_node->next;
+            goto out;
+        }
+        props = loaderGetNextLayerPropertySlot(inst, layer_instance_list);
+        if (NULL == props) {
+            // Error already triggered in loaderGetNextLayerPropertySlot.
+            result = VK_ERROR_OUT_OF_HOST_MEMORY;
+            goto out;
+        }
+        props->type_flags = VK_LAYER_TYPE_FLAG_INSTANCE_LAYER;
+        if (!is_implicit) {
+            props->type_flags |= VK_LAYER_TYPE_FLAG_EXPLICIT_LAYER;
+        }
+    } else {
+        layer_node = layer_node->next;
+        goto out;
+    }
+
+    // Expiration date for override layer.  Field starte with JSON file 1.1.2 and
+    // is completely optional.  So, no check put in place.
+    if (!strcmp(name, VK_OVERRIDE_LAYER_NAME)) {
+        cJSON *expiration;
+
+        if (version.major < 1 && version.minor < 1 && version.patch < 2) {
+            loader_log(
+                inst, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0,
+                "Override layer expiration date not added until version 1.1.2.  Please update JSON file version appropriately.");
+        }
+
+        props->is_override = true;
+        expiration = cJSON_GetObjectItem(layer_node, "expiration_date");
+        if (NULL != expiration) {
+            char date_copy[32];
+            uint8_t cur_item = 0;
+
+            // Get the string for the current item
+            temp = cJSON_Print(expiration);
+            if (temp == NULL) {
+                loader_log(inst, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0,
+                           "Problem accessing layer value 'expiration_date' in manifest JSON file, skipping this layer");
+                result = VK_ERROR_OUT_OF_HOST_MEMORY;
+                goto out;
+            }
+            temp[strlen(temp) - 1] = '\0';
+            strcpy(date_copy, &temp[1]);
+            cJSON_Free(temp);
+
+            if (strlen(date_copy) == 16) {
+                char *cur_start = &date_copy[0];
+                char *next_dash = strchr(date_copy, '-');
+                if (NULL != next_dash) {
+                    while (cur_item < 5 && strlen(cur_start)) {
+                        if (next_dash != NULL) {
+                            *next_dash = '\0';
+                        }
+                        switch (cur_item) {
+                            case 0:  // Year
+                                props->expiration.year = atoi(cur_start);
+                                break;
+                            case 1:  // Month
+                                props->expiration.month = atoi(cur_start);
+                                break;
+                            case 2:  // Day
+                                props->expiration.day = atoi(cur_start);
+                                break;
+                            case 3:  // Hour
+                                props->expiration.hour = atoi(cur_start);
+                                break;
+                            case 4:  // Minute
+                                props->expiration.minute = atoi(cur_start);
+                                props->has_expiration = true;
+                                break;
+                            default:  // Ignore
+                                break;
+                        }
+                        if (next_dash != NULL) {
+                            cur_start = next_dash + 1;
+                            next_dash = strchr(cur_start, '-');
+                        }
+                        cur_item++;
+                    }
+                }
+            }
+        }
+    }
+
+    // Library path no longer required unless component_layers is also not defined
+    library_path = cJSON_GetObjectItem(layer_node, "library_path");
+    component_layers = cJSON_GetObjectItem(layer_node, "component_layers");
+    if (NULL != library_path) {
+        if (NULL != component_layers) {
+            loader_log(inst, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0,
+                       "Indicating meta-layer-specific component_layers, but also "
+                       "defining layer library path.  Both are not compatible, so "
+                       "skipping this layer");
+            goto out;
+        }
+        props->num_component_layers = 0;
+        props->component_layer_names = NULL;
+
+        temp = cJSON_Print(library_path);
+        if (NULL == temp) {
+            layer_node = layer_node->next;
+            loader_log(inst, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0,
+                       "Problem accessing layer value library_path in manifest JSON "
+                       "file, skipping this layer");
+            result = VK_ERROR_OUT_OF_HOST_MEMORY;
+            goto out;
+        }
+        temp[strlen(temp) - 1] = '\0';
+        library_path_str = loader_stack_alloc(strlen(temp) + 1);
+        strcpy(library_path_str, &temp[1]);
+        cJSON_Free(temp);
+
+        char *fullpath = props->lib_name;
+        char *rel_base;
+        if (NULL != library_path_str) {
+            if (loader_platform_is_path(library_path_str)) {
+                // A relative or absolute path
+                char *name_copy = loader_stack_alloc(strlen(filename) + 1);
+                strcpy(name_copy, filename);
+                rel_base = loader_platform_dirname(name_copy);
+                loader_expand_path(library_path_str, rel_base, MAX_STRING_SIZE, fullpath);
+            } else {
+// A filename which is assumed in a system directory
+#if defined(DEFAULT_VK_LAYERS_PATH)
+                loader_get_fullpath(library_path_str, DEFAULT_VK_LAYERS_PATH, MAX_STRING_SIZE, fullpath);
+#else
+                loader_get_fullpath(library_path_str, "", MAX_STRING_SIZE, fullpath);
+#endif
+            }
+        }
+    } else if (NULL != component_layers) {
+        if (version.major == 1 && (version.minor < 1 || version.patch < 1)) {
+            loader_log(inst, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0,
+                       "Indicating meta-layer-specific component_layers, but using older "
+                       "JSON file version.");
+        }
+        int count = cJSON_GetArraySize(component_layers);
+        props->num_component_layers = count;
+
+        // Allocate buffer for layer names
+        props->component_layer_names =
+            loader_instance_heap_alloc(inst, sizeof(char[MAX_STRING_SIZE]) * count, VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE);
+        if (NULL == props->component_layer_names) {
+            result = VK_ERROR_OUT_OF_HOST_MEMORY;
+            goto out;
+        }
+
+        // Copy the component layers into the array
+        for (i = 0; i < count; i++) {
+            cJSON *comp_layer = cJSON_GetArrayItem(component_layers, i);
+            if (NULL != comp_layer) {
+                temp = cJSON_Print(comp_layer);
+                if (NULL == temp) {
+                    result = VK_ERROR_OUT_OF_HOST_MEMORY;
+                    goto out;
+                }
+                temp[strlen(temp) - 1] = '\0';
+                strncpy(props->component_layer_names[i], temp + 1, MAX_STRING_SIZE - 1);
+                props->component_layer_names[i][MAX_STRING_SIZE - 1] = '\0';
+                cJSON_Free(temp);
+            }
+        }
+
+        // This is now, officially, a meta-layer
+        props->type_flags |= VK_LAYER_TYPE_FLAG_META_LAYER;
+        loader_log(inst, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, 0, "Encountered meta-layer %s", name);
+
+        // Make sure we set up other things so we head down the correct branches below
+        library_path_str = NULL;
+    } else {
+        loader_log(inst, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0,
+                   "Layer missing both library_path and component_layers fields.  One or the "
+                   "other MUST be defined.  Skipping this layer");
+        goto out;
+    }
+
+    props->num_blacklist_layers = 0;
+    props->blacklist_layer_names = NULL;
+    blacklisted_layers = cJSON_GetObjectItem(layer_node, "blacklisted_layers");
+    if (blacklisted_layers != NULL) {
+        if (strcmp(name, VK_OVERRIDE_LAYER_NAME)) {
+            loader_log(inst, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0,
+                       "Layer %s contains a blacklist, but a blacklist can only be provided by the override metalayer. "
+                       "This blacklist will be ignored.",
+                       name);
+        } else {
+            props->num_blacklist_layers = cJSON_GetArraySize(blacklisted_layers);
+
+            // Allocate the blacklist array
+            props->blacklist_layer_names = loader_instance_heap_alloc(
+                inst, sizeof(char[MAX_STRING_SIZE]) * props->num_blacklist_layers, VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE);
+            if (props->blacklist_layer_names == NULL) {
+                result = VK_ERROR_OUT_OF_HOST_MEMORY;
+                goto out;
+            }
+
+            // Copy the blacklisted layers into the array
+            for (i = 0; i < (int)props->num_blacklist_layers; ++i) {
+                cJSON *black_layer = cJSON_GetArrayItem(blacklisted_layers, i);
+                if (black_layer == NULL) {
+                    continue;
+                }
+                temp = cJSON_Print(black_layer);
+                if (temp == NULL) {
+                    result = VK_ERROR_OUT_OF_HOST_MEMORY;
+                    goto out;
+                }
+                temp[strlen(temp) - 1] = '\0';
+                strncpy(props->blacklist_layer_names[i], temp + 1, MAX_STRING_SIZE - 1);
+                props->blacklist_layer_names[i][MAX_STRING_SIZE - 1] = '\0';
+                cJSON_Free(temp);
+            }
+        }
+    }
+
+    override_paths = cJSON_GetObjectItem(layer_node, "override_paths");
+    if (NULL != override_paths) {
+        if (version.major == 1 && (version.minor < 1 || version.patch < 1)) {
+            loader_log(inst, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0,
+                       "Indicating meta-layer-specific override paths, but using older "
+                       "JSON file version.");
+        }
+        int count = cJSON_GetArraySize(override_paths);
+        props->num_override_paths = count;
+
+        // Allocate buffer for override paths
+        props->override_paths =
+            loader_instance_heap_alloc(inst, sizeof(char[MAX_STRING_SIZE]) * count, VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE);
+        if (NULL == props->override_paths) {
+            result = VK_ERROR_OUT_OF_HOST_MEMORY;
+            goto out;
+        }
+
+        // Copy the override paths into the array
+        for (i = 0; i < count; i++) {
+            cJSON *override_path = cJSON_GetArrayItem(override_paths, i);
+            if (NULL != override_path) {
+                temp = cJSON_Print(override_path);
+                if (NULL == temp) {
+                    result = VK_ERROR_OUT_OF_HOST_MEMORY;
+                    goto out;
+                }
+                temp[strlen(temp) - 1] = '\0';
+                strncpy(props->override_paths[i], temp + 1, MAX_STRING_SIZE - 1);
+                props->override_paths[i][MAX_STRING_SIZE - 1] = '\0';
+                cJSON_Free(temp);
+            }
+        }
+    }
+
+    if (is_implicit) {
+        GET_JSON_OBJECT(layer_node, disable_environment)
+    }
+#undef GET_JSON_ITEM
+#undef GET_JSON_OBJECT
+
+    strncpy(props->info.layerName, name, sizeof(props->info.layerName));
+    props->info.layerName[sizeof(props->info.layerName) - 1] = '\0';
+    props->info.specVersion = loader_make_version(api_version);
+    props->info.implementationVersion = atoi(implementation_version);
+    strncpy((char *)props->info.description, description, sizeof(props->info.description));
+    props->info.description[sizeof(props->info.description) - 1] = '\0';
+    if (is_implicit) {
+        if (!disable_environment || !disable_environment->child) {
+            loader_log(inst, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0,
+                       "Didn't find required layer child value disable_environment"
+                       "in manifest JSON file, skipping this layer");
+            layer_node = layer_node->next;
+            goto out;
+        }
+        strncpy(props->disable_env_var.name, disable_environment->child->string, sizeof(props->disable_env_var.name));
+        props->disable_env_var.name[sizeof(props->disable_env_var.name) - 1] = '\0';
+        strncpy(props->disable_env_var.value, disable_environment->child->valuestring, sizeof(props->disable_env_var.value));
+        props->disable_env_var.value[sizeof(props->disable_env_var.value) - 1] = '\0';
+    }
+
+// Now get all optional items and objects and put in list:
+// functions
+// instance_extensions
+// device_extensions
+// enable_environment (implicit layers only)
+#define GET_JSON_OBJECT(node, var) \
+    { var = cJSON_GetObjectItem(node, #var); }
+#define GET_JSON_ITEM(node, var)                            \
+    {                                                       \
+        item = cJSON_GetObjectItem(node, #var);             \
+        if (item != NULL) {                                 \
+            temp = cJSON_Print(item);                       \
+            if (temp != NULL) {                             \
+                temp[strlen(temp) - 1] = '\0';              \
+                var = loader_stack_alloc(strlen(temp) + 1); \
+                strcpy(var, &temp[1]);                      \
+                cJSON_Free(temp);                           \
+            } else {                                        \
+                result = VK_ERROR_OUT_OF_HOST_MEMORY;       \
+                goto out;                                   \
+            }                                               \
+        }                                                   \
+    }
+
+    cJSON *instance_extensions, *device_extensions, *functions, *enable_environment;
+    cJSON *entrypoints = NULL;
+    char *vkGetInstanceProcAddr = NULL;
+    char *vkGetDeviceProcAddr = NULL;
+    char *vkNegotiateLoaderLayerInterfaceVersion = NULL;
+    char *spec_version = NULL;
+    char **entry_array = NULL;
+
+    // Layer interface functions
+    //    vkGetInstanceProcAddr
+    //    vkGetDeviceProcAddr
+    //    vkNegotiateLoaderLayerInterfaceVersion (starting with JSON file 1.1.0)
+    GET_JSON_OBJECT(layer_node, functions)
+    if (functions != NULL) {
+        if (version.major > 1 || version.minor >= 1) {
+            GET_JSON_ITEM(functions, vkNegotiateLoaderLayerInterfaceVersion)
+            if (vkNegotiateLoaderLayerInterfaceVersion != NULL)
+                strncpy(props->functions.str_negotiate_interface, vkNegotiateLoaderLayerInterfaceVersion,
+                        sizeof(props->functions.str_negotiate_interface));
+            props->functions.str_negotiate_interface[sizeof(props->functions.str_negotiate_interface) - 1] = '\0';
+        } else {
+            props->functions.str_negotiate_interface[0] = '\0';
+        }
+        GET_JSON_ITEM(functions, vkGetInstanceProcAddr)
+        GET_JSON_ITEM(functions, vkGetDeviceProcAddr)
+        if (vkGetInstanceProcAddr != NULL) {
+            strncpy(props->functions.str_gipa, vkGetInstanceProcAddr, sizeof(props->functions.str_gipa));
+            if (version.major > 1 || version.minor >= 1) {
+                loader_log(inst, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, 0,
+                           "Layer \"%s\" using deprecated \'vkGetInstanceProcAddr\' tag which was deprecated starting with JSON "
+                           "file version 1.1.0. The new vkNegotiateLayerInterfaceVersion function is preferred, though for "
+                           "compatibility reasons it may be desirable to continue using the deprecated tag.",
+                           name);
+            }
+        }
+        props->functions.str_gipa[sizeof(props->functions.str_gipa) - 1] = '\0';
+        if (vkGetDeviceProcAddr != NULL) {
+            strncpy(props->functions.str_gdpa, vkGetDeviceProcAddr, sizeof(props->functions.str_gdpa));
+            if (version.major > 1 || version.minor >= 1) {
+                loader_log(inst, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, 0,
+                           "Layer \"%s\" using deprecated \'vkGetDeviceProcAddr\' tag which was deprecated starting with JSON "
+                           "file version 1.1.0. The new vkNegotiateLayerInterfaceVersion function is preferred, though for "
+                           "compatibility reasons it may be desirable to continue using the deprecated tag.",
+                           name);
+            }
+        }
+        props->functions.str_gdpa[sizeof(props->functions.str_gdpa) - 1] = '\0';
+    }
+
+    // instance_extensions
+    //   array of {
+    //     name
+    //     spec_version
+    //   }
+    GET_JSON_OBJECT(layer_node, instance_extensions)
+    if (instance_extensions != NULL) {
+        int count = cJSON_GetArraySize(instance_extensions);
+        for (i = 0; i < count; i++) {
+            ext_item = cJSON_GetArrayItem(instance_extensions, i);
+            GET_JSON_ITEM(ext_item, name)
+            if (name != NULL) {
+                strncpy(ext_prop.extensionName, name, sizeof(ext_prop.extensionName));
+                ext_prop.extensionName[sizeof(ext_prop.extensionName) - 1] = '\0';
+            }
+            GET_JSON_ITEM(ext_item, spec_version)
+            if (NULL != spec_version) {
+                ext_prop.specVersion = atoi(spec_version);
+            } else {
+                ext_prop.specVersion = 0;
+            }
+            bool ext_unsupported = wsi_unsupported_instance_extension(&ext_prop);
+            if (!ext_unsupported) {
+                loader_add_to_ext_list(inst, &props->instance_extension_list, 1, &ext_prop);
+            }
+        }
+    }
+
+    // device_extensions
+    //   array of {
+    //     name
+    //     spec_version
+    //     entrypoints
+    //   }
+    GET_JSON_OBJECT(layer_node, device_extensions)
+    if (device_extensions != NULL) {
+        int count = cJSON_GetArraySize(device_extensions);
+        for (i = 0; i < count; i++) {
+            ext_item = cJSON_GetArrayItem(device_extensions, i);
+            GET_JSON_ITEM(ext_item, name)
+            GET_JSON_ITEM(ext_item, spec_version)
+            if (name != NULL) {
+                strncpy(ext_prop.extensionName, name, sizeof(ext_prop.extensionName));
+                ext_prop.extensionName[sizeof(ext_prop.extensionName) - 1] = '\0';
+            }
+            if (NULL != spec_version) {
+                ext_prop.specVersion = atoi(spec_version);
+            } else {
+                ext_prop.specVersion = 0;
+            }
+            // entrypoints = cJSON_GetObjectItem(ext_item, "entrypoints");
+            GET_JSON_OBJECT(ext_item, entrypoints)
+            int entry_count;
+            if (entrypoints == NULL) {
+                loader_add_to_dev_ext_list(inst, &props->device_extension_list, &ext_prop, 0, NULL);
+                continue;
+            }
+            entry_count = cJSON_GetArraySize(entrypoints);
+            if (entry_count) {
+                entry_array = (char **)loader_stack_alloc(sizeof(char *) * entry_count);
+            }
+            for (j = 0; j < entry_count; j++) {
+                ext_item = cJSON_GetArrayItem(entrypoints, j);
+                if (ext_item != NULL) {
+                    temp = cJSON_Print(ext_item);
+                    if (NULL == temp) {
+                        entry_array[j] = NULL;
+                        result = VK_ERROR_OUT_OF_HOST_MEMORY;
+                        goto out;
+                    }
+                    temp[strlen(temp) - 1] = '\0';
+                    entry_array[j] = loader_stack_alloc(strlen(temp) + 1);
+                    strcpy(entry_array[j], &temp[1]);
+                    cJSON_Free(temp);
+                }
+            }
+            loader_add_to_dev_ext_list(inst, &props->device_extension_list, &ext_prop, entry_count, entry_array);
+        }
+    }
+    if (is_implicit) {
+        GET_JSON_OBJECT(layer_node, enable_environment)
+
+        // enable_environment is optional
+        if (enable_environment) {
+            strncpy(props->enable_env_var.name, enable_environment->child->string, sizeof(props->enable_env_var.name));
+            props->enable_env_var.name[sizeof(props->enable_env_var.name) - 1] = '\0';
+            strncpy(props->enable_env_var.value, enable_environment->child->valuestring, sizeof(props->enable_env_var.value));
+            props->enable_env_var.value[sizeof(props->enable_env_var.value) - 1] = '\0';
+        }
+    }
+
+    // Read in the pre-instance stuff
+    cJSON *pre_instance = cJSON_GetObjectItem(layer_node, "pre_instance_functions");
+    if (pre_instance) {
+        if (!layer_json_supports_pre_instance_tag(&version)) {
+            loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                       "Found pre_instance_functions section in layer from \"%s\". "
+                       "This section is only valid in manifest version 1.1.2 or later. The section will be ignored",
+                       filename);
+        } else if (!is_implicit) {
+            loader_log(inst, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0,
+                       "Found pre_instance_functions section in explicit layer from "
+                       "\"%s\". This section is only valid in implicit layers. The section will be ignored",
+                       filename);
+        } else {
+            cJSON *inst_ext_json = cJSON_GetObjectItem(pre_instance, "vkEnumerateInstanceExtensionProperties");
+            if (inst_ext_json) {
+                char *inst_ext_name = cJSON_Print(inst_ext_json);
+                if (inst_ext_name == NULL) {
+                    result = VK_ERROR_OUT_OF_HOST_MEMORY;
+                    goto out;
+                }
+                size_t len = strlen(inst_ext_name) >= MAX_STRING_SIZE ? MAX_STRING_SIZE - 3 : strlen(inst_ext_name) - 2;
+                strncpy(props->pre_instance_functions.enumerate_instance_extension_properties, inst_ext_name + 1, len);
+                props->pre_instance_functions.enumerate_instance_extension_properties[len] = '\0';
+                cJSON_Free(inst_ext_name);
+            }
+
+            cJSON *inst_layer_json = cJSON_GetObjectItem(pre_instance, "vkEnumerateInstanceLayerProperties");
+            if (inst_layer_json) {
+                char *inst_layer_name = cJSON_Print(inst_layer_json);
+                if (inst_layer_name == NULL) {
+                    result = VK_ERROR_OUT_OF_HOST_MEMORY;
+                    goto out;
+                }
+                size_t len = strlen(inst_layer_name) >= MAX_STRING_SIZE ? MAX_STRING_SIZE - 3 : strlen(inst_layer_name) - 2;
+                strncpy(props->pre_instance_functions.enumerate_instance_layer_properties, inst_layer_name + 1, len);
+                props->pre_instance_functions.enumerate_instance_layer_properties[len] = '\0';
+                cJSON_Free(inst_layer_name);
+            }
+
+            cJSON *inst_version_json = cJSON_GetObjectItem(pre_instance, "vkEnumerateInstanceVersion");
+            if (inst_version_json) {
+                char *inst_version_name = cJSON_Print(inst_version_json);
+                if (inst_version_json) {
+                    result = VK_ERROR_OUT_OF_HOST_MEMORY;
+                    goto out;
+                }
+                size_t len = strlen(inst_version_name) >= MAX_STRING_SIZE ? MAX_STRING_SIZE - 3 : strlen(inst_version_name) - 2;
+                strncpy(props->pre_instance_functions.enumerate_instance_version, inst_version_name + 1, len);
+                props->pre_instance_functions.enumerate_instance_version[len] = '\0';
+                cJSON_Free(inst_version_name);
+            }
+        }
+    }
+
+    result = VK_SUCCESS;
+
+out:
+#undef GET_JSON_ITEM
+#undef GET_JSON_OBJECT
+
+    if (VK_SUCCESS != result && NULL != props) {
+        if (NULL != props->blacklist_layer_names) {
+            loader_instance_heap_free(inst, props->blacklist_layer_names);
+        }
+        if (NULL != props->component_layer_names) {
+            loader_instance_heap_free(inst, props->component_layer_names);
+        }
+        if (NULL != props->override_paths) {
+            loader_instance_heap_free(inst, props->override_paths);
+        }
+        props->num_blacklist_layers = 0;
+        props->blacklist_layer_names = NULL;
+        props->num_component_layers = 0;
+        props->component_layer_names = NULL;
+        props->num_override_paths = 0;
+        props->override_paths = NULL;
+    }
+
+    return result;
+}
+
+static inline bool isValidLayerJsonVersion(const layer_json_version *layer_json) {
+    // Supported versions are: 1.0.0, 1.0.1, and 1.1.0 - 1.1.2.
+    if ((layer_json->major == 1 && layer_json->minor == 1 && layer_json->patch < 3) ||
+        (layer_json->major == 1 && layer_json->minor == 0 && layer_json->patch < 2)) {
+        return true;
+    }
+    return false;
+}
+
+static inline bool layerJsonSupportsMultipleLayers(const layer_json_version *layer_json) {
+    // Supported versions started in 1.0.1, so anything newer
+    if ((layer_json->major > 1 || layer_json->minor > 0 || layer_json->patch > 1)) {
+        return true;
+    }
+    return false;
+}
+
+// Given a cJSON struct (json) of the top level JSON object from layer manifest
+// file, add entry to the layer_list. Fill out the layer_properties in this list
+// entry from the input cJSON object.
+//
+// \returns
+// void
+// layer_list has a new entry and initialized accordingly.
+// If the json input object does not have all the required fields no entry
+// is added to the list.
+static VkResult loaderAddLayerProperties(const struct loader_instance *inst, struct loader_layer_list *layer_instance_list,
+                                         cJSON *json, bool is_implicit, char *filename) {
+    // The following Fields in layer manifest file that are required:
+    //   - "file_format_version"
+    //   - If more than one "layer" object are used, then the "layers" array is
+    //     required
+    VkResult result = VK_ERROR_INITIALIZATION_FAILED;
+    cJSON *item, *layers_node, *layer_node;
+    layer_json_version json_version = {0, 0, 0};
+    char *vers_tok;
+    cJSON *disable_environment = NULL;
+    item = cJSON_GetObjectItem(json, "file_format_version");
+    if (item == NULL) {
+        goto out;
+    }
+    char *file_vers = cJSON_PrintUnformatted(item);
+    if (NULL == file_vers) {
+        goto out;
+    }
+    loader_log(inst, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, 0, "Found manifest file %s, version %s", filename, file_vers);
+    // Get the major/minor/and patch as integers for easier comparison
+    vers_tok = strtok(file_vers, ".\"\n\r");
+    if (NULL != vers_tok) {
+        json_version.major = (uint16_t)atoi(vers_tok);
+        vers_tok = strtok(NULL, ".\"\n\r");
+        if (NULL != vers_tok) {
+            json_version.minor = (uint16_t)atoi(vers_tok);
+            vers_tok = strtok(NULL, ".\"\n\r");
+            if (NULL != vers_tok) {
+                json_version.patch = (uint16_t)atoi(vers_tok);
+            }
+        }
+    }
+
+    if (!isValidLayerJsonVersion(&json_version)) {
+        loader_log(inst, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0,
+                   "loaderAddLayerProperties: %s invalid layer manifest file version %d.%d.%d.  May cause errors.", filename,
+                   json_version.major, json_version.minor, json_version.patch);
+    }
+    cJSON_Free(file_vers);
+
+    // If "layers" is present, read in the array of layer objects
+    layers_node = cJSON_GetObjectItem(json, "layers");
+    if (layers_node != NULL) {
+        int numItems = cJSON_GetArraySize(layers_node);
+        if (!layerJsonSupportsMultipleLayers(&json_version)) {
+            loader_log(
+                inst, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0,
+                "loaderAddLayerProperties: \'layers\' tag not supported until file version 1.0.1, but %s is reporting version %s",
+                filename, file_vers);
+        }
+        for (int curLayer = 0; curLayer < numItems; curLayer++) {
+            layer_node = cJSON_GetArrayItem(layers_node, curLayer);
+            if (layer_node == NULL) {
+                loader_log(inst, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0,
+                           "loaderAddLayerProperties: Can not find 'layers' array element %d object in manifest JSON file %s.  "
+                           "Skipping this file",
+                           curLayer, filename);
+                goto out;
+            }
+            result = loaderReadLayerJson(inst, layer_instance_list, layer_node, json_version, item, disable_environment,
+                                         is_implicit, filename);
+        }
+    } else {
+        // Otherwise, try to read in individual layers
+        layer_node = cJSON_GetObjectItem(json, "layer");
+        if (layer_node == NULL) {
+            loader_log(inst, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0,
+                       "loaderAddLayerProperties: Can not find 'layer' object in manifest JSON file %s.  Skipping this file.",
+                       filename);
+            goto out;
+        }
+        // Loop through all "layer" objects in the file to get a count of them
+        // first.
+        uint16_t layer_count = 0;
+        cJSON *tempNode = layer_node;
+        do {
+            tempNode = tempNode->next;
+            layer_count++;
+        } while (tempNode != NULL);
+
+        // Throw a warning if we encounter multiple "layer" objects in file
+        // versions newer than 1.0.0.  Having multiple objects with the same
+        // name at the same level is actually a JSON standard violation.
+        if (layer_count > 1 && layerJsonSupportsMultipleLayers(&json_version)) {
+            loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                       "loaderAddLayerProperties: Multiple 'layer' nodes are deprecated starting in file version \"1.0.1\".  "
+                       "Please use 'layers' : [] array instead in %s.",
+                       filename);
+        } else {
+            do {
+                result = loaderReadLayerJson(inst, layer_instance_list, layer_node, json_version, item, disable_environment,
+                                             is_implicit, filename);
+                layer_node = layer_node->next;
+            } while (layer_node != NULL);
+        }
+    }
+
+out:
+
+    return result;
+}
+
+static inline size_t DetermineDataFilePathSize(const char *cur_path, size_t relative_path_size) {
+    size_t path_size = 0;
+
+    if (NULL != cur_path) {
+        // For each folder in cur_path, (detected by finding additional
+        // path separators in the string) we need to add the relative path on
+        // the end.  Plus, leave an additional two slots on the end to add an
+        // additional directory slash and path separator if needed
+        path_size += strlen(cur_path) + relative_path_size + 2;
+        for (const char *x = cur_path; *x; ++x) {
+            if (*x == PATH_SEPARATOR) {
+                path_size += relative_path_size + 2;
+            }
+        }
+    }
+
+    return path_size;
+}
+
+static inline void CopyDataFilePath(const char *cur_path, const char *relative_path, size_t relative_path_size,
+                                    char **output_path) {
+    if (NULL != cur_path) {
+        uint32_t start = 0;
+        uint32_t stop = 0;
+        char *cur_write = *output_path;
+
+        while (cur_path[start] != '\0') {
+            while (cur_path[start] == PATH_SEPARATOR) {
+                start++;
+            }
+            stop = start;
+            while (cur_path[stop] != PATH_SEPARATOR && cur_path[stop] != '\0') {
+                stop++;
+            }
+            const size_t s = stop - start;
+            if (s) {
+                memcpy(cur_write, &cur_path[start], s);
+                cur_write += s;
+
+                // If last symbol written was not a directory symbol, add it.
+                if (*(cur_write - 1) != DIRECTORY_SYMBOL) {
+                    *cur_write++ = DIRECTORY_SYMBOL;
+                }
+
+                if (relative_path_size > 0) {
+                    memcpy(cur_write, relative_path, relative_path_size);
+                    cur_write += relative_path_size;
+                }
+                *cur_write++ = PATH_SEPARATOR;
+                start = stop;
+            }
+        }
+        *output_path = cur_write;
+    }
+}
+
+// Check to see if there's enough space in the data file list.  If not, add some.
+static inline VkResult CheckAndAdjustDataFileList(const struct loader_instance *inst, struct loader_data_files *out_files) {
+    if (out_files->count == 0) {
+        out_files->filename_list = loader_instance_heap_alloc(inst, 64 * sizeof(char *), VK_SYSTEM_ALLOCATION_SCOPE_COMMAND);
+        if (NULL == out_files->filename_list) {
+            loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                       "CheckAndAdjustDataFileList: Failed to allocate space for manifest file name list");
+            return VK_ERROR_OUT_OF_HOST_MEMORY;
+        }
+        out_files->alloc_count = 64;
+    } else if (out_files->count == out_files->alloc_count) {
+        size_t new_size = out_files->alloc_count * sizeof(char *) * 2;
+        void *new_ptr = loader_instance_heap_realloc(inst, out_files->filename_list, out_files->alloc_count * sizeof(char *),
+                                                     new_size, VK_SYSTEM_ALLOCATION_SCOPE_COMMAND);
+        if (NULL == new_ptr) {
+            loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                       "CheckAndAdjustDataFileList: Failed to reallocate space for manifest file name list");
+            return VK_ERROR_OUT_OF_HOST_MEMORY;
+        }
+        out_files->filename_list = new_ptr;
+        out_files->alloc_count *= 2;
+    }
+
+    return VK_SUCCESS;
+}
+
+// If the file found is a manifest file name, add it to the out_files manifest list.
+static VkResult AddIfManifestFile(const struct loader_instance *inst, const char *file_name, struct loader_data_files *out_files) {
+    VkResult vk_result = VK_SUCCESS;
+
+    if (NULL == file_name || NULL == out_files) {
+        loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0, "AddIfManfistFile: Received NULL pointer");
+        vk_result = VK_ERROR_INITIALIZATION_FAILED;
+        goto out;
+    }
+
+    // Look for files ending with ".json" suffix
+    size_t name_len = strlen(file_name);
+    const char *name_suffix = file_name + name_len - 5;
+    if ((name_len < 5) || 0 != strncmp(name_suffix, ".json", 5)) {
+        // Use incomplete to indicate invalid name, but to keep going.
+        vk_result = VK_INCOMPLETE;
+        goto out;
+    }
+
+    // Check and allocate space in the manifest list if necessary
+    vk_result = CheckAndAdjustDataFileList(inst, out_files);
+    if (VK_SUCCESS != vk_result) {
+        goto out;
+    }
+
+    out_files->filename_list[out_files->count] =
+        loader_instance_heap_alloc(inst, strlen(file_name) + 1, VK_SYSTEM_ALLOCATION_SCOPE_COMMAND);
+    if (out_files->filename_list[out_files->count] == NULL) {
+        loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0, "AddIfManfistFile: Failed to allocate space for manifest file %d list",
+                   out_files->count);
+        vk_result = VK_ERROR_OUT_OF_HOST_MEMORY;
+        goto out;
+    }
+
+    strcpy(out_files->filename_list[out_files->count++], file_name);
+
+out:
+
+    return vk_result;
+}
+
+static VkResult AddDataFilesInPath(const struct loader_instance *inst, char *search_path, bool is_directory_list,
+                                   struct loader_data_files *out_files) {
+    VkResult vk_result = VK_SUCCESS;
+    DIR *dir_stream = NULL;
+    struct dirent *dir_entry;
+    char *cur_file;
+    char *next_file;
+    char *name;
+    char full_path[2048];
+#ifndef _WIN32
+    char temp_path[2048];
+#endif
+
+    // Now, parse the paths
+    next_file = search_path;
+    while (NULL != next_file && *next_file != '\0') {
+        name = NULL;
+        cur_file = next_file;
+        next_file = loader_get_next_path(cur_file);
+
+        // Get the next name in the list and verify it's valid
+        if (is_directory_list) {
+            dir_stream = opendir(cur_file);
+            if (NULL == dir_stream) {
+                continue;
+            }
+            while (1) {
+                dir_entry = readdir(dir_stream);
+                if (NULL == dir_entry) {
+                    break;
+                }
+
+                name = &(dir_entry->d_name[0]);
+                loader_get_fullpath(name, cur_file, sizeof(full_path), full_path);
+                name = full_path;
+
+                VkResult local_res;
+                local_res = AddIfManifestFile(inst, name, out_files);
+
+                // Incomplete means this was not a valid data file.
+                if (local_res == VK_INCOMPLETE) {
+                    continue;
+                } else if (local_res != VK_SUCCESS) {
+                    vk_result = local_res;
+                    break;
+                }
+            }
+            closedir(dir_stream);
+            if (vk_result != VK_SUCCESS) {
+                goto out;
+            }
+        } else {
+#ifdef _WIN32
+            name = cur_file;
+#else
+            // Only Linux has relative paths, make a copy of location so it isn't modified
+            size_t str_len;
+            if (NULL != next_file) {
+                str_len = next_file - cur_file + 1;
+            } else {
+                str_len = strlen(cur_file) + 1;
+            }
+            if (str_len > sizeof(temp_path)) {
+                loader_log(inst, VK_DEBUG_REPORT_DEBUG_BIT_EXT, 0, "AddDataFilesInPath: Path to %s too long\n", cur_file);
+                continue;
+            }
+            strcpy(temp_path, cur_file);
+            name = temp_path;
+#endif
+            loader_get_fullpath(cur_file, name, sizeof(full_path), full_path);
+            name = full_path;
+
+            VkResult local_res;
+            local_res = AddIfManifestFile(inst, name, out_files);
+
+            // Incomplete means this was not a valid data file.
+            if (local_res == VK_INCOMPLETE) {
+                continue;
+            } else if (local_res != VK_SUCCESS) {
+                vk_result = local_res;
+                break;
+            }
+        }
+    }
+
+out:
+
+    return vk_result;
+}
+
+// Look for data files in the provided paths, but first check the environment override to determine if we should use that
+// instead.
+static VkResult ReadDataFilesInSearchPaths(const struct loader_instance *inst, enum loader_data_files_type data_file_type,
+                                           const char *env_override, const char *path_override, const char *relative_location,
+                                           bool *override_active, struct loader_data_files *out_files) {
+    VkResult vk_result = VK_SUCCESS;
+    bool is_directory_list = true;
+    bool is_icd = (data_file_type == LOADER_DATA_FILE_MANIFEST_ICD);
+    char *override_env = NULL;
+    const char *override_path = NULL;
+    size_t search_path_size = 0;
+    char *search_path = NULL;
+    char *cur_path_ptr = NULL;
+    size_t rel_size = 0;
+#ifndef _WIN32
+    bool xdgconfig_alloc = true;
+    bool xdgdata_alloc = true;
+#endif
+
+#ifndef _WIN32
+    // Determine how much space is needed to generate the full search path
+    // for the current manifest files.
+    char *xdgconfdirs = loader_secure_getenv("XDG_CONFIG_DIRS", inst);
+    char *xdgdatadirs = loader_secure_getenv("XDG_DATA_DIRS", inst);
+    char *xdgdatahome = loader_secure_getenv("XDG_DATA_HOME", inst);
+    char *home = NULL;
+    char* home_root = NULL;
+
+    if (xdgconfdirs == NULL) {
+        xdgconfig_alloc = false;
+    }
+    if (xdgdatadirs == NULL) {
+        xdgdata_alloc = false;
+    }
+    if (xdgconfdirs == NULL || xdgconfdirs[0] == '\0') {
+        xdgconfdirs = FALLBACK_CONFIG_DIRS;
+    }
+    if (xdgdatadirs == NULL || xdgdatadirs[0] == '\0') {
+        xdgdatadirs = FALLBACK_DATA_DIRS;
+    }
+
+    // Only use HOME if XDG_DATA_HOME is not present on the system
+    if (NULL == xdgdatahome) {
+        home = loader_secure_getenv("HOME", inst);
+        if (home != NULL) {
+            home_root = loader_instance_heap_alloc(inst, strlen(home) + 14, VK_SYSTEM_ALLOCATION_SCOPE_COMMAND);
+            if (home_root == NULL) {
+                vk_result = VK_ERROR_OUT_OF_HOST_MEMORY;
+                goto out;
+            }
+            strcpy(home_root, home);
+            strcat(home_root, "/.local/share");
+        }
+    }
+#endif
+
+    if (path_override != NULL) {
+        override_path = path_override;
+    } else if (env_override != NULL) {
+#ifndef _WIN32
+        if (geteuid() != getuid() || getegid() != getgid()) {
+            // Don't allow setuid apps to use the env var:
+            env_override = NULL;
+        } else
+#endif
+        {
+            override_env = loader_secure_getenv(env_override, inst);
+
+            // The ICD override is actually a specific list of filenames, not directories
+            if (is_icd && NULL != override_env) {
+                is_directory_list = false;
+            }
+            override_path = override_env;
+        }
+    }
+
+    // Add two by default for NULL terminator and one path separator on end (just in case)
+    search_path_size = 2;
+
+    // If there's an override, use that (and the local folder if required) and nothing else
+    if (NULL != override_path) {
+        // Local folder and null terminator
+        search_path_size += strlen(override_path) + 1;
+    } else if (NULL == relative_location) {
+        // If there's no override, and no relative location, bail out.  This is usually
+        // the case when we're on Windows and the default path is to use the registry.
+        goto out;
+    } else {
+        // Add the general search folders (with the appropriate relative folder added)
+        rel_size = strlen(relative_location);
+        if (rel_size == 0) {
+            goto out;
+        } else {
+#if defined(__APPLE__)
+            search_path_size += MAXPATHLEN;
+#endif
+#ifndef _WIN32
+            search_path_size += DetermineDataFilePathSize(xdgconfdirs, rel_size);
+            search_path_size += DetermineDataFilePathSize(xdgdatadirs, rel_size);
+            search_path_size += DetermineDataFilePathSize(SYSCONFDIR, rel_size);
+#if defined(EXTRASYSCONFDIR)
+            search_path_size += DetermineDataFilePathSize(EXTRASYSCONFDIR, rel_size);
+#endif
+            if (is_directory_list) {
+                if (!IsHighIntegrity()) {
+                    search_path_size += DetermineDataFilePathSize(xdgdatahome, rel_size);
+                    search_path_size += DetermineDataFilePathSize(home_root, rel_size);
+                }
+            }
+#endif
+        }
+    }
+
+    // Allocate the required space
+    search_path = loader_instance_heap_alloc(inst, search_path_size, VK_SYSTEM_ALLOCATION_SCOPE_COMMAND);
+    if (NULL == search_path) {
+        loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                   "ReadDataFilesInSearchPaths: Failed to allocate space for search path of length %d", (uint32_t)search_path_size);
+        vk_result = VK_ERROR_OUT_OF_HOST_MEMORY;
+        goto out;
+    }
+
+    cur_path_ptr = search_path;
+
+    // Add the remaining paths to the list
+    if (NULL != override_path) {
+        strcpy(cur_path_ptr, override_path);
+    } else {
+#ifndef _WIN32
+        if (rel_size > 0) {
+#if defined(__APPLE__)
+            // Add the bundle's Resources dir to the beginning of the search path.
+            // Looks for manifests in the bundle first, before any system directories.
+            CFBundleRef main_bundle = CFBundleGetMainBundle();
+            if (NULL != main_bundle) {
+                CFURLRef ref = CFBundleCopyResourcesDirectoryURL(main_bundle);
+                if (NULL != ref) {
+                    if (CFURLGetFileSystemRepresentation(ref, TRUE, (UInt8 *)cur_path_ptr, search_path_size)) {
+                        cur_path_ptr += strlen(cur_path_ptr);
+                        *cur_path_ptr++ = DIRECTORY_SYMBOL;
+                        memcpy(cur_path_ptr, relative_location, rel_size);
+                        cur_path_ptr += rel_size;
+                        *cur_path_ptr++ = PATH_SEPARATOR;
+                    }
+                    CFRelease(ref);
+                }
+            }
+#endif
+            CopyDataFilePath(xdgconfdirs, relative_location, rel_size, &cur_path_ptr);
+            CopyDataFilePath(SYSCONFDIR, relative_location, rel_size, &cur_path_ptr);
+#if defined(EXTRASYSCONFDIR)
+            CopyDataFilePath(EXTRASYSCONFDIR, relative_location, rel_size, &cur_path_ptr);
+#endif
+            CopyDataFilePath(xdgdatadirs, relative_location, rel_size, &cur_path_ptr);
+            if (is_directory_list) {
+                CopyDataFilePath(xdgdatahome, relative_location, rel_size, &cur_path_ptr);
+                CopyDataFilePath(home_root, relative_location, rel_size, &cur_path_ptr);
+            }
+        }
+
+        // Remove the last path separator
+        --cur_path_ptr;
+
+        assert(cur_path_ptr - search_path < (ptrdiff_t)search_path_size);
+        *cur_path_ptr = '\0';
+#endif
+    }
+
+    // Print out the paths being searched if debugging is enabled
+    if (search_path_size > 0) {
+        loader_log(inst, VK_DEBUG_REPORT_DEBUG_BIT_EXT, 0,
+                   "ReadDataFilesInSearchPaths: Searching the following paths for manifest files: %s\n", search_path);
+    }
+
+    // Now, parse the paths and add any manifest files found in them.
+    vk_result = AddDataFilesInPath(inst, search_path, is_directory_list, out_files);
+
+    if (NULL != override_path) {
+        *override_active = true;
+    } else {
+        *override_active = false;
+    }
+
+out:
+
+    if (NULL != override_env) {
+        loader_free_getenv(override_env, inst);
+    }
+#ifndef _WIN32
+    if (xdgconfig_alloc) {
+        loader_free_getenv(xdgconfdirs, inst);
+    }
+    if (xdgdata_alloc) {
+        loader_free_getenv(xdgdatadirs, inst);
+    }
+    if (NULL != xdgdatahome) {
+        loader_free_getenv(xdgdatahome, inst);
+    }
+    if (NULL != home) {
+        loader_free_getenv(home, inst);
+    }
+    if (NULL != home_root) {
+        loader_instance_heap_free(inst, home_root);
+    }
+#endif
+
+    if (NULL != search_path) {
+        loader_instance_heap_free(inst, search_path);
+    }
+
+    return vk_result;
+}
+
+#ifdef _WIN32
+// Read manifest JSON files uing the Windows driver interface
+static VkResult ReadManifestsFromD3DAdapters(const struct loader_instance *inst, char **reg_data, PDWORD reg_data_size,
+                                             const wchar_t *value_name) {
+    VkResult result = VK_INCOMPLETE;
+    LoaderEnumAdapters2 adapters = {.adapter_count = 0, .adapters = NULL};
+    LoaderQueryRegistryInfo *full_info = NULL;
+    size_t full_info_size = 0;
+    char *json_path = NULL;
+    size_t json_path_size = 0;
+
+    PFN_LoaderEnumAdapters2 fpLoaderEnumAdapters2 =
+        (PFN_LoaderEnumAdapters2)GetProcAddress(GetModuleHandle("gdi32.dll"), "D3DKMTEnumAdapters2");
+    PFN_LoaderQueryAdapterInfo fpLoaderQueryAdapterInfo =
+        (PFN_LoaderQueryAdapterInfo)GetProcAddress(GetModuleHandle("gdi32.dll"), "D3DKMTQueryAdapterInfo");
+    if (fpLoaderEnumAdapters2 == NULL || fpLoaderQueryAdapterInfo == NULL) {
+        result = VK_ERROR_OUT_OF_HOST_MEMORY;
+        goto out;
+    }
+
+    // Get all of the adapters
+    NTSTATUS status = fpLoaderEnumAdapters2(&adapters);
+    if (status == STATUS_SUCCESS && adapters.adapter_count > 0) {
+        adapters.adapters = loader_instance_heap_alloc(inst, sizeof(*adapters.adapters) * adapters.adapter_count,
+                                                       VK_SYSTEM_ALLOCATION_SCOPE_COMMAND);
+        if (adapters.adapters == NULL) {
+            goto out;
+        }
+        status = fpLoaderEnumAdapters2(&adapters);
+    }
+    if (status != STATUS_SUCCESS) {
+        goto out;
+    }
+
+    // If that worked, we need to get the manifest file(s) for each adapter
+    for (ULONG i = 0; i < adapters.adapter_count; ++i) {
+        // The first query should just check if the field exists and how big it is
+        LoaderQueryRegistryInfo filename_info = {
+            .query_type = LOADER_QUERY_REGISTRY_ADAPTER_KEY,
+            .query_flags =
+                {
+                    .translate_path = true,
+                },
+            .value_type = REG_MULTI_SZ,
+            .physical_adapter_index = 0,
+        };
+        wcsncpy(filename_info.value_name, value_name, sizeof(filename_info.value_name) / sizeof(DWORD));
+        LoaderQueryAdapterInfo query_info = {
+            .handle = adapters.adapters[i].handle,
+            .type = LOADER_QUERY_TYPE_REGISTRY,
+            .private_data = &filename_info,
+            .private_data_size = sizeof(filename_info),
+        };
+        status = fpLoaderQueryAdapterInfo(&query_info);
+
+        // This error indicates that the type didn't match, so we'll try a REG_SZ
+        if (status != STATUS_SUCCESS) {
+            filename_info.value_type = REG_SZ;
+            status = fpLoaderQueryAdapterInfo(&query_info);
+        }
+
+        if (status != STATUS_SUCCESS || filename_info.status != LOADER_QUERY_REGISTRY_STATUS_BUFFER_OVERFLOW) {
+            continue;
+        }
+
+        while (status == STATUS_SUCCESS &&
+               ((LoaderQueryRegistryInfo *)query_info.private_data)->status == LOADER_QUERY_REGISTRY_STATUS_BUFFER_OVERFLOW) {
+            bool needs_copy = (full_info == NULL);
+            size_t full_size = sizeof(LoaderQueryRegistryInfo) + filename_info.output_value_size;
+            void *buffer =
+                loader_instance_heap_realloc(inst, full_info, full_info_size, full_size, VK_SYSTEM_ALLOCATION_SCOPE_COMMAND);
+            if (buffer == NULL) {
+                result = VK_ERROR_OUT_OF_HOST_MEMORY;
+                goto out;
+            }
+            full_info = buffer;
+            full_info_size = full_size;
+
+            if (needs_copy) {
+                memcpy(full_info, &filename_info, sizeof(LoaderQueryRegistryInfo));
+            }
+            query_info.private_data = full_info;
+            query_info.private_data_size = (UINT)full_info_size;
+            status = fpLoaderQueryAdapterInfo(&query_info);
+        }
+
+        if (status != STATUS_SUCCESS || full_info->status != LOADER_QUERY_REGISTRY_STATUS_SUCCESS) {
+            goto out;
+        }
+
+        // Convert the wide string to a narrow string
+        void *buffer = loader_instance_heap_realloc(inst, json_path, json_path_size, full_info->output_value_size,
+                                                    VK_SYSTEM_ALLOCATION_SCOPE_COMMAND);
+        if (buffer == NULL) {
+            result = VK_ERROR_OUT_OF_HOST_MEMORY;
+            goto out;
+        }
+        json_path = buffer;
+        json_path_size = full_info->output_value_size;
+
+        // Iterate over each component string
+        for (const wchar_t *curr_path = full_info->output_string; curr_path[0] != '\0'; curr_path += wcslen(curr_path) + 1) {
+            WideCharToMultiByte(CP_UTF8, 0, curr_path, -1, json_path, (int)json_path_size, NULL, NULL);
+
+            // Add the string to the output list
+            result = VK_SUCCESS;
+            loaderAddJsonEntry(inst, reg_data, reg_data_size, (LPCTSTR)L"EnumAdapters", REG_SZ, json_path,
+                               (DWORD)strlen(json_path) + 1, &result);
+            if (result != VK_SUCCESS) {
+                goto out;
+            }
+
+            // If this is a string and not a multi-string, we don't want to go throught the loop more than once
+            if (full_info->value_type == REG_SZ) {
+                break;
+            }
+        }
+    }
+
+out:
+    if (json_path != NULL) {
+        loader_instance_heap_free(inst, json_path);
+    }
+    if (full_info != NULL) {
+        loader_instance_heap_free(inst, full_info);
+    }
+    if (adapters.adapters != NULL) {
+        loader_instance_heap_free(inst, adapters.adapters);
+    }
+
+    return result;
+}
+
+// Look for data files in the registry.
+static VkResult ReadDataFilesInRegistry(const struct loader_instance *inst, enum loader_data_files_type data_file_type,
+                                        bool warn_if_not_present, char *registry_location, struct loader_data_files *out_files) {
+    VkResult vk_result = VK_SUCCESS;
+    bool is_icd = (data_file_type == LOADER_DATA_FILE_MANIFEST_ICD);
+    char *search_path = NULL;
+
+    // These calls look at the PNP/Device section of the registry.
+    VkResult regHKR_result = VK_SUCCESS;
+    DWORD reg_size = 4096;
+    if (!strncmp(registry_location, VK_DRIVERS_INFO_REGISTRY_LOC, sizeof(VK_DRIVERS_INFO_REGISTRY_LOC))) {
+        // If we're looking for drivers we need to try enumerating adapters
+        regHKR_result = ReadManifestsFromD3DAdapters(inst, &search_path, &reg_size, LoaderPnpDriverRegistryWide());
+        if (regHKR_result == VK_INCOMPLETE) {
+            regHKR_result = loaderGetDeviceRegistryFiles(inst, &search_path, &reg_size, LoaderPnpDriverRegistry());
+        }
+    } else if (!strncmp(registry_location, VK_ELAYERS_INFO_REGISTRY_LOC, sizeof(VK_ELAYERS_INFO_REGISTRY_LOC))) {
+        regHKR_result = ReadManifestsFromD3DAdapters(inst, &search_path, &reg_size, LoaderPnpELayerRegistryWide());
+        if (regHKR_result == VK_INCOMPLETE) {
+            regHKR_result = loaderGetDeviceRegistryFiles(inst, &search_path, &reg_size, LoaderPnpELayerRegistry());
+        }
+    } else if (!strncmp(registry_location, VK_ILAYERS_INFO_REGISTRY_LOC, sizeof(VK_ILAYERS_INFO_REGISTRY_LOC))) {
+        regHKR_result = ReadManifestsFromD3DAdapters(inst, &search_path, &reg_size, LoaderPnpILayerRegistryWide());
+        if (regHKR_result == VK_INCOMPLETE) {
+            regHKR_result = loaderGetDeviceRegistryFiles(inst, &search_path, &reg_size, LoaderPnpILayerRegistry());
+        }
+    }
+
+    // This call looks into the Khronos non-device specific section of the registry.
+    bool use_secondary_hive = (data_file_type == LOADER_DATA_FILE_MANIFEST_LAYER) && (!IsHighIntegrity());
+    VkResult reg_result = loaderGetRegistryFiles(inst, registry_location, use_secondary_hive, &search_path, &reg_size);
+
+    if ((VK_SUCCESS != reg_result && VK_SUCCESS != regHKR_result) || NULL == search_path) {
+        if (data_file_type == LOADER_DATA_FILE_MANIFEST_ICD) {
+            loader_log(
+                inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                "ReadDataFilesInRegistry: Registry lookup failed to get ICD manifest files.  Possibly missing Vulkan driver?");
+            if (VK_SUCCESS == reg_result || VK_ERROR_OUT_OF_HOST_MEMORY == reg_result) {
+                vk_result = reg_result;
+            } else {
+                vk_result = regHKR_result;
+            }
+        } else {
+            if (warn_if_not_present) {
+                if (data_file_type == LOADER_DATA_FILE_MANIFEST_LAYER) {
+                    // This is only a warning for layers
+                    loader_log(inst, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0,
+                               "ReadDataFilesInRegistry: Registry lookup failed to get layer manifest files.");
+                } else {
+                    // This is only a warning for general data files
+                    loader_log(inst, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0,
+                               "ReadDataFilesInRegistry: Registry lookup failed to get data files.");
+                }
+            }
+            if (reg_result == VK_ERROR_OUT_OF_HOST_MEMORY) {
+                vk_result = reg_result;
+            } else {
+                // Return success for now since it's not critical for layers
+                vk_result = VK_SUCCESS;
+            }
+        }
+        goto out;
+    }
+
+    // Now, parse the paths and add any manifest files found in them.
+    vk_result = AddDataFilesInPath(inst, search_path, false, out_files);
+
+out:
+
+    if (NULL != search_path) {
+        loader_instance_heap_free(inst, search_path);
+    }
+
+    return vk_result;
+}
+#endif  // _WIN32
+
+// Find the Vulkan library manifest files.
+//
+// This function scans the "location" or "env_override" directories/files
+// for a list of JSON manifest files.  If env_override is non-NULL
+// and has a valid value. Then the location is ignored.  Otherwise
+// location is used to look for manifest files. The location
+// is interpreted as  Registry path on Windows and a directory path(s)
+// on Linux. "home_location" is an additional directory in the users home
+// directory to look at. It is expanded into the dir path
+// $XDG_DATA_HOME/home_location or $HOME/.local/share/home_location depending
+// on environment variables. This "home_location" is only used on Linux.
+//
+// \returns
+// VKResult
+// A string list of manifest files to be opened in out_files param.
+// List has a pointer to string for each manifest filename.
+// When done using the list in out_files, pointers should be freed.
+// Location or override  string lists can be either files or directories as
+// follows:
+//            | location | override
+// --------------------------------
+// Win ICD    | files    | files
+// Win Layer  | files    | dirs
+// Linux ICD  | dirs     | files
+// Linux Layer| dirs     | dirs
+static VkResult loaderGetDataFiles(const struct loader_instance *inst, enum loader_data_files_type data_file_type,
+                                   bool warn_if_not_present, const char *env_override, const char *path_override,
+                                   char *registry_location, const char *relative_location, struct loader_data_files *out_files) {
+    VkResult res = VK_SUCCESS;
+    bool override_active = false;
+
+    // Free and init the out_files information so there's no false data left from uninitialized variables.
+    if (out_files->filename_list != NULL) {
+        for (uint32_t i = 0; i < out_files->count; i++) {
+            if (NULL != out_files->filename_list[i]) {
+                loader_instance_heap_free(inst, out_files->filename_list[i]);
+                out_files->filename_list[i] = NULL;
+            }
+        }
+        loader_instance_heap_free(inst, out_files->filename_list);
+    }
+    out_files->count = 0;
+    out_files->alloc_count = 0;
+    out_files->filename_list = NULL;
+
+    res = ReadDataFilesInSearchPaths(inst, data_file_type, env_override, path_override, relative_location, &override_active,
+                                     out_files);
+    if (VK_SUCCESS != res) {
+        goto out;
+    }
+
+#ifdef _WIN32
+    // Read the registry if the override wasn't active.
+    if (!override_active) {
+        res = ReadDataFilesInRegistry(inst, data_file_type, warn_if_not_present, registry_location, out_files);
+        if (VK_SUCCESS != res) {
+            goto out;
+        }
+    }
+#endif
+
+out:
+
+    if (VK_SUCCESS != res && NULL != out_files->filename_list) {
+        for (uint32_t remove = 0; remove < out_files->count; remove++) {
+            loader_instance_heap_free(inst, out_files->filename_list[remove]);
+        }
+        loader_instance_heap_free(inst, out_files->filename_list);
+        out_files->count = 0;
+        out_files->alloc_count = 0;
+        out_files->filename_list = NULL;
+    }
+
+    return res;
+}
+
+void loader_init_icd_lib_list() {}
+
+void loader_destroy_icd_lib_list() {}
+
+// Try to find the Vulkan ICD driver(s).
+//
+// This function scans the default system loader path(s) or path
+// specified by the \c VK_ICD_FILENAMES environment variable in
+// order to find loadable VK ICDs manifest files. From these
+// manifest files it finds the ICD libraries.
+//
+// \returns
+// Vulkan result
+// (on result == VK_SUCCESS) a list of icds that were discovered
+VkResult loader_icd_scan(const struct loader_instance *inst, struct loader_icd_tramp_list *icd_tramp_list) {
+    char *file_str;
+    uint16_t file_major_vers = 0;
+    uint16_t file_minor_vers = 0;
+    uint16_t file_patch_vers = 0;
+    char *vers_tok;
+    struct loader_data_files manifest_files;
+    VkResult res = VK_SUCCESS;
+    bool lockedMutex = false;
+    cJSON *json = NULL;
+    uint32_t num_good_icds = 0;
+
+    memset(&manifest_files, 0, sizeof(struct loader_data_files));
+
+    res = loader_scanned_icd_init(inst, icd_tramp_list);
+    if (VK_SUCCESS != res) {
+        goto out;
+    }
+
+    // Get a list of manifest files for ICDs
+    res = loaderGetDataFiles(inst, LOADER_DATA_FILE_MANIFEST_ICD, true, "VK_ICD_FILENAMES", NULL, VK_DRIVERS_INFO_REGISTRY_LOC,
+                             VK_DRIVERS_INFO_RELATIVE_DIR, &manifest_files);
+    if (VK_SUCCESS != res || manifest_files.count == 0) {
+        goto out;
+    }
+
+    loader_platform_thread_lock_mutex(&loader_json_lock);
+    lockedMutex = true;
+    for (uint32_t i = 0; i < manifest_files.count; i++) {
+        file_str = manifest_files.filename_list[i];
+        if (file_str == NULL) {
+            continue;
+        }
+
+        VkResult temp_res = loader_get_json(inst, file_str, &json);
+        if (NULL == json || temp_res != VK_SUCCESS) {
+            if (NULL != json) {
+                cJSON_Delete(json);
+                json = NULL;
+            }
+            // If we haven't already found an ICD, copy this result to
+            // the returned result.
+            if (num_good_icds == 0) {
+                res = temp_res;
+            }
+            if (temp_res == VK_ERROR_OUT_OF_HOST_MEMORY) {
+                break;
+            } else {
+                continue;
+            }
+        }
+        res = temp_res;
+
+        cJSON *item, *itemICD;
+        item = cJSON_GetObjectItem(json, "file_format_version");
+        if (item == NULL) {
+            if (num_good_icds == 0) {
+                res = VK_ERROR_INITIALIZATION_FAILED;
+            }
+            loader_log(inst, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0,
+                       "loader_icd_scan: ICD JSON %s does not have a"
+                       " \'file_format_version\' field. Skipping ICD JSON.",
+                       file_str);
+            cJSON_Delete(json);
+            json = NULL;
+            continue;
+        }
+
+        char *file_vers = cJSON_Print(item);
+        if (NULL == file_vers) {
+            // Only reason the print can fail is if there was an allocation issue
+            if (num_good_icds == 0) {
+                res = VK_ERROR_OUT_OF_HOST_MEMORY;
+            }
+            loader_log(inst, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0,
+                       "loader_icd_scan: Failed retrieving ICD JSON %s"
+                       " \'file_format_version\' field.  Skipping ICD JSON",
+                       file_str);
+            cJSON_Delete(json);
+            json = NULL;
+            continue;
+        }
+        loader_log(inst, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, 0, "Found ICD manifest file %s, version %s", file_str, file_vers);
+
+        // Get the major/minor/and patch as integers for easier comparison
+        vers_tok = strtok(file_vers, ".\"\n\r");
+        if (NULL != vers_tok) {
+            file_major_vers = (uint16_t)atoi(vers_tok);
+            vers_tok = strtok(NULL, ".\"\n\r");
+            if (NULL != vers_tok) {
+                file_minor_vers = (uint16_t)atoi(vers_tok);
+                vers_tok = strtok(NULL, ".\"\n\r");
+                if (NULL != vers_tok) {
+                    file_patch_vers = (uint16_t)atoi(vers_tok);
+                }
+            }
+        }
+
+        if (file_major_vers != 1 || file_minor_vers != 0 || file_patch_vers > 1) {
+            loader_log(inst, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0,
+                       "loader_icd_scan: Unexpected manifest file version "
+                       "(expected 1.0.0 or 1.0.1), may cause errors");
+        }
+        cJSON_Free(file_vers);
+
+        itemICD = cJSON_GetObjectItem(json, "ICD");
+        if (itemICD != NULL) {
+            item = cJSON_GetObjectItem(itemICD, "library_path");
+            if (item != NULL) {
+                char *temp = cJSON_Print(item);
+                if (!temp || strlen(temp) == 0) {
+                    if (num_good_icds == 0) {
+                        res = VK_ERROR_OUT_OF_HOST_MEMORY;
+                    }
+                    loader_log(inst, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0,
+                               "loader_icd_scan: Failed retrieving ICD JSON %s"
+                               " \'library_path\' field.  Skipping ICD JSON.",
+                               file_str);
+                    cJSON_Free(temp);
+                    cJSON_Delete(json);
+                    json = NULL;
+                    continue;
+                }
+                // strip out extra quotes
+                temp[strlen(temp) - 1] = '\0';
+                char *library_path = loader_stack_alloc(strlen(temp) + 1);
+                if (NULL == library_path) {
+                    loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                               "loader_icd_scan: Failed to allocate space for "
+                               "ICD JSON %s \'library_path\' value.  Skipping "
+                               "ICD JSON.",
+                               file_str);
+                    res = VK_ERROR_OUT_OF_HOST_MEMORY;
+                    cJSON_Free(temp);
+                    cJSON_Delete(json);
+                    json = NULL;
+                    goto out;
+                }
+                strcpy(library_path, &temp[1]);
+                cJSON_Free(temp);
+                if (strlen(library_path) == 0) {
+                    loader_log(inst, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0,
+                               "loader_icd_scan: ICD JSON %s \'library_path\'"
+                               " field is empty.  Skipping ICD JSON.",
+                               file_str);
+                    cJSON_Delete(json);
+                    json = NULL;
+                    continue;
+                }
+                char fullpath[MAX_STRING_SIZE];
+                // Print out the paths being searched if debugging is enabled
+                loader_log(inst, VK_DEBUG_REPORT_DEBUG_BIT_EXT, 0, "Searching for ICD drivers named %s", library_path);
+                if (loader_platform_is_path(library_path)) {
+                    // a relative or absolute path
+                    char *name_copy = loader_stack_alloc(strlen(file_str) + 1);
+                    char *rel_base;
+                    strcpy(name_copy, file_str);
+                    rel_base = loader_platform_dirname(name_copy);
+                    loader_expand_path(library_path, rel_base, sizeof(fullpath), fullpath);
+                } else {
+// a filename which is assumed in a system directory
+#if defined(DEFAULT_VK_DRIVERS_PATH)
+                    loader_get_fullpath(library_path, DEFAULT_VK_DRIVERS_PATH, sizeof(fullpath), fullpath);
+#else
+                        loader_get_fullpath(library_path, "", sizeof(fullpath), fullpath);
+#endif
+                }
+
+                uint32_t vers = 0;
+                item = cJSON_GetObjectItem(itemICD, "api_version");
+                if (item != NULL) {
+                    temp = cJSON_Print(item);
+                    if (NULL == temp) {
+                        loader_log(inst, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0,
+                                   "loader_icd_scan: Failed retrieving ICD JSON %s"
+                                   " \'api_version\' field.  Skipping ICD JSON.",
+                                   file_str);
+
+                        // Only reason the print can fail is if there was an
+                        // allocation issue
+                        if (num_good_icds == 0) {
+                            res = VK_ERROR_OUT_OF_HOST_MEMORY;
+                        }
+
+                        cJSON_Free(temp);
+                        cJSON_Delete(json);
+                        json = NULL;
+                        continue;
+                    }
+                    vers = loader_make_version(temp);
+                    cJSON_Free(temp);
+                } else {
+                    loader_log(inst, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0,
+                               "loader_icd_scan: ICD JSON %s does not have an"
+                               " \'api_version\' field.",
+                               file_str);
+                }
+
+                res = loader_scanned_icd_add(inst, icd_tramp_list, fullpath, vers);
+                if (VK_SUCCESS != res) {
+                    loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                               "loader_icd_scan: Failed to add ICD JSON %s. "
+                               " Skipping ICD JSON.",
+                               fullpath);
+                    cJSON_Delete(json);
+                    json = NULL;
+                    continue;
+                }
+                num_good_icds++;
+            } else {
+                loader_log(inst, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0,
+                           "loader_icd_scan: Failed to find \'library_path\' "
+                           "object in ICD JSON file %s.  Skipping ICD JSON.",
+                           file_str);
+            }
+        } else {
+            loader_log(inst, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0,
+                       "loader_icd_scan: Can not find \'ICD\' object in ICD JSON "
+                       "file %s.  Skipping ICD JSON",
+                       file_str);
+        }
+
+        cJSON_Delete(json);
+        json = NULL;
+    }
+
+out:
+
+    if (NULL != json) {
+        cJSON_Delete(json);
+    }
+
+    if (NULL != manifest_files.filename_list) {
+        for (uint32_t i = 0; i < manifest_files.count; i++) {
+            if (NULL != manifest_files.filename_list[i]) {
+                loader_instance_heap_free(inst, manifest_files.filename_list[i]);
+            }
+        }
+        loader_instance_heap_free(inst, manifest_files.filename_list);
+    }
+    if (lockedMutex) {
+        loader_platform_thread_unlock_mutex(&loader_json_lock);
+    }
+
+    return res;
+}
+
+void loaderScanForLayers(struct loader_instance *inst, struct loader_layer_list *instance_layers) {
+    char *file_str;
+    struct loader_data_files manifest_files;
+    cJSON *json;
+    bool override_layer_valid = false;
+    char *override_paths = NULL;
+    uint32_t total_count = 0;
+
+    memset(&manifest_files, 0, sizeof(struct loader_data_files));
+
+    // Cleanup any previously scanned libraries
+    loaderDeleteLayerListAndProperties(inst, instance_layers);
+
+    loader_platform_thread_lock_mutex(&loader_json_lock);
+
+    // Get a list of manifest files for any implicit layers
+    // Pass NULL for environment variable override - implicit layers are not overridden by LAYERS_PATH_ENV
+    if (VK_SUCCESS != loaderGetDataFiles(inst, LOADER_DATA_FILE_MANIFEST_LAYER, false, NULL, NULL, VK_ILAYERS_INFO_REGISTRY_LOC,
+                                         VK_ILAYERS_INFO_RELATIVE_DIR, &manifest_files)) {
+        goto out;
+    }
+
+    if (manifest_files.count != 0) {
+        total_count += manifest_files.count;
+        for (uint32_t i = 0; i < manifest_files.count; i++) {
+            file_str = manifest_files.filename_list[i];
+            if (file_str == NULL) {
+                continue;
+            }
+
+            // Parse file into JSON struct
+            VkResult res = loader_get_json(inst, file_str, &json);
+            if (VK_ERROR_OUT_OF_HOST_MEMORY == res) {
+                goto out;
+            } else if (VK_SUCCESS != res || NULL == json) {
+                continue;
+            }
+
+            VkResult local_res = loaderAddLayerProperties(inst, instance_layers, json, true, file_str);
+            cJSON_Delete(json);
+
+            if (VK_SUCCESS != local_res) {
+                goto out;
+            }
+        }
+    }
+
+    // Check to see if the override layer is present, and use it's override paths.
+    for (int32_t i = 0; i < (int32_t)instance_layers->count; i++) {
+        struct loader_layer_properties *prop = &instance_layers->list[i];
+        if (prop->is_override && loaderImplicitLayerIsEnabled(inst, prop) && prop->num_override_paths > 0) {
+            char *cur_write_ptr = NULL;
+            size_t override_path_size = 0;
+            for (uint32_t j = 0; j < prop->num_override_paths; j++) {
+                override_path_size += DetermineDataFilePathSize(prop->override_paths[j], 0);
+            }
+            override_paths = loader_instance_heap_alloc(inst, override_path_size, VK_SYSTEM_ALLOCATION_SCOPE_COMMAND);
+            if (override_paths == NULL) {
+                goto out;
+            }
+            cur_write_ptr = &override_paths[0];
+            for (uint32_t j = 0; j < prop->num_override_paths; j++) {
+                CopyDataFilePath(prop->override_paths[j], NULL, 0, &cur_write_ptr);
+            }
+            // Remove the last path separator
+            --cur_write_ptr;
+            assert(cur_write_ptr - override_paths < (ptrdiff_t)override_path_size);
+            *cur_write_ptr = '\0';
+            loader_log(NULL, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0, "loaderScanForLayers: Override layer has override paths set to %s",
+                       override_paths);
+        }
+    }
+
+    // Get a list of manifest files for explicit layers
+    if (VK_SUCCESS != loaderGetDataFiles(inst, LOADER_DATA_FILE_MANIFEST_LAYER, true, "VK_LAYER_PATH", override_paths,
+                                         VK_ELAYERS_INFO_REGISTRY_LOC, VK_ELAYERS_INFO_RELATIVE_DIR, &manifest_files)) {
+        goto out;
+    }
+
+    // Make sure we have at least one layer, if not, go ahead and return
+    if (manifest_files.count == 0 && total_count == 0) {
+        goto out;
+    } else {
+        total_count += manifest_files.count;
+        for (uint32_t i = 0; i < manifest_files.count; i++) {
+            file_str = manifest_files.filename_list[i];
+            if (file_str == NULL) {
+                continue;
+            }
+
+            // Parse file into JSON struct
+            VkResult res = loader_get_json(inst, file_str, &json);
+            if (VK_ERROR_OUT_OF_HOST_MEMORY == res) {
+                goto out;
+            } else if (VK_SUCCESS != res || NULL == json) {
+                continue;
+            }
+
+            VkResult local_res = loaderAddLayerProperties(inst, instance_layers, json, false, file_str);
+            cJSON_Delete(json);
+
+            // If the error is anything other than out of memory we still want to try to load the other layers
+            if (VK_ERROR_OUT_OF_HOST_MEMORY == local_res) {
+                goto out;
+            }
+        }
+    }
+
+    // See if "VK_LAYER_LUNARG_standard_validation" already in list.
+    bool found_std_val = false;
+    for (uint32_t i = 0; i < instance_layers->count; i++) {
+        struct loader_layer_properties *props = &instance_layers->list[i];
+        if (strcmp(props->info.layerName, std_validation_str) == 0) {
+            found_std_val = true;
+            break;
+        }
+    }
+
+    // If we didn't find the VK_LAYER_LUNARG_standard_validation meta-layer in
+    // the list, then we need to add it manually.  This is likely because we're
+    // dealing with a new loader, but an old layer folder.
+    if (!found_std_val && !loaderAddLegacyStandardValidationLayer(inst, instance_layers)) {
+        goto out;
+    }
+
+    // Verify any meta-layers in the list are valid and all the component layers are
+    // actually present in the available layer list
+    VerifyAllMetaLayers(inst, instance_layers, &override_layer_valid);
+
+    if (override_layer_valid) {
+        loaderRemoveLayersInBlacklist(inst, instance_layers);
+        if (NULL != inst) {
+            inst->override_layer_present = true;
+        }
+    }
+
+out:
+
+    if (NULL != override_paths) {
+        loader_instance_heap_free(inst, override_paths);
+    }
+    if (NULL != manifest_files.filename_list) {
+        for (uint32_t i = 0; i < manifest_files.count; i++) {
+            if (NULL != manifest_files.filename_list[i]) {
+                loader_instance_heap_free(inst, manifest_files.filename_list[i]);
+            }
+        }
+        loader_instance_heap_free(inst, manifest_files.filename_list);
+    }
+    loader_platform_thread_unlock_mutex(&loader_json_lock);
+}
+
+void loaderScanForImplicitLayers(struct loader_instance *inst, struct loader_layer_list *instance_layers) {
+    char *file_str;
+    struct loader_data_files manifest_files;
+    cJSON *json;
+    bool override_layer_valid = false;
+    char *override_paths = NULL;
+    bool implicit_metalayer_present = false;
+    bool have_json_lock = false;
+
+    // Before we begin anything, init manifest_files to avoid a delete of garbage memory if
+    // a failure occurs before allocating the manifest filename_list.
+    memset(&manifest_files, 0, sizeof(struct loader_data_files));
+
+    // Pass NULL for environment variable override - implicit layers are not overridden by LAYERS_PATH_ENV
+    VkResult res = loaderGetDataFiles(inst, LOADER_DATA_FILE_MANIFEST_LAYER, false, NULL, NULL, VK_ILAYERS_INFO_REGISTRY_LOC,
+                                      VK_ILAYERS_INFO_RELATIVE_DIR, &manifest_files);
+    if (VK_SUCCESS != res || manifest_files.count == 0) {
+        goto out;
+    }
+
+    // Cleanup any previously scanned libraries
+    loaderDeleteLayerListAndProperties(inst, instance_layers);
+
+    loader_platform_thread_lock_mutex(&loader_json_lock);
+    have_json_lock = true;
+
+    for (uint32_t i = 0; i < manifest_files.count; i++) {
+        file_str = manifest_files.filename_list[i];
+        if (file_str == NULL) {
+            continue;
+        }
+
+        // parse file into JSON struct
+        res = loader_get_json(inst, file_str, &json);
+        if (VK_ERROR_OUT_OF_HOST_MEMORY == res) {
+            goto out;
+        } else if (VK_SUCCESS != res || NULL == json) {
+            continue;
+        }
+
+        res = loaderAddLayerProperties(inst, instance_layers, json, true, file_str);
+
+        loader_instance_heap_free(inst, file_str);
+        manifest_files.filename_list[i] = NULL;
+        cJSON_Delete(json);
+
+        if (VK_ERROR_OUT_OF_HOST_MEMORY == res) {
+            goto out;
+        }
+    }
+
+    // Check to see if either the override layer is present, or another implicit meta-layer.
+    // Each of these may require explicit layers to be enabled at this time.
+    for (int32_t i = 0; i < (int32_t)instance_layers->count; i++) {
+        struct loader_layer_properties *prop = &instance_layers->list[i];
+        if (prop->is_override && loaderImplicitLayerIsEnabled(inst, prop)) {
+            override_layer_valid = true;
+            if (prop->num_override_paths > 0) {
+                char *cur_write_ptr = NULL;
+                size_t override_path_size = 0;
+                for (uint32_t j = 0; j < prop->num_override_paths; j++) {
+                    override_path_size += DetermineDataFilePathSize(prop->override_paths[j], 0);
+                }
+                override_paths = loader_instance_heap_alloc(inst, override_path_size, VK_SYSTEM_ALLOCATION_SCOPE_COMMAND);
+                if (override_paths == NULL) {
+                    goto out;
+                }
+                cur_write_ptr = &override_paths[0];
+                for (uint32_t j = 0; j < prop->num_override_paths; j++) {
+                    CopyDataFilePath(prop->override_paths[j], NULL, 0, &cur_write_ptr);
+                }
+                // Remove the last path separator
+                --cur_write_ptr;
+                assert(cur_write_ptr - override_paths < (ptrdiff_t)override_path_size);
+                *cur_write_ptr = '\0';
+                loader_log(NULL, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0,
+                           "loaderScanForImplicitLayers: Override layer has override paths set to %s", override_paths);
+            }
+        } else if (!prop->is_override && prop->type_flags & VK_LAYER_TYPE_FLAG_META_LAYER) {
+            implicit_metalayer_present = true;
+        }
+    }
+
+    // If either the override layer or an implicit meta-layer are present, we need to add
+    // explicit layer info as well.  Not to worry, though, all explicit layers not included
+    // in the override layer will be removed below in loaderRemoveLayersInBlacklist().
+    if (override_layer_valid || implicit_metalayer_present) {
+        if (VK_SUCCESS != loaderGetDataFiles(inst, LOADER_DATA_FILE_MANIFEST_LAYER, true, "VK_LAYER_PATH", override_paths,
+                                             VK_ELAYERS_INFO_REGISTRY_LOC, VK_ELAYERS_INFO_RELATIVE_DIR, &manifest_files)) {
+            goto out;
+        }
+
+        for (uint32_t i = 0; i < manifest_files.count; i++) {
+            file_str = manifest_files.filename_list[i];
+            if (file_str == NULL) {
+                continue;
+            }
+
+            // parse file into JSON struct
+            res = loader_get_json(inst, file_str, &json);
+            if (VK_ERROR_OUT_OF_HOST_MEMORY == res) {
+                goto out;
+            } else if (VK_SUCCESS != res || NULL == json) {
+                continue;
+            }
+
+            res = loaderAddLayerProperties(inst, instance_layers, json, true, file_str);
+
+            loader_instance_heap_free(inst, file_str);
+            cJSON_Delete(json);
+
+            if (VK_ERROR_OUT_OF_HOST_MEMORY == res) {
+                goto out;
+            }
+        }
+    }
+
+    // Verify any meta-layers in the list are valid and all the component layers are
+    // actually present in the available layer list
+    VerifyAllMetaLayers(inst, instance_layers, &override_layer_valid);
+
+    if (override_layer_valid || implicit_metalayer_present) {
+        loaderRemoveLayersNotInImplicitMetaLayers(inst, instance_layers);
+        if (override_layer_valid && inst != NULL) {
+            inst->override_layer_present = true;
+        }
+    }
+
+out:
+
+    if (NULL != override_paths) {
+        loader_instance_heap_free(inst, override_paths);
+    }
+    if (NULL != manifest_files.filename_list) {
+        loader_instance_heap_free(inst, manifest_files.filename_list);
+    }
+
+    if (have_json_lock) {
+        loader_platform_thread_unlock_mutex(&loader_json_lock);
+    }
+}
+
+static VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL loader_gpdpa_instance_internal(VkInstance inst, const char *pName) {
+    // inst is not wrapped
+    if (inst == VK_NULL_HANDLE) {
+        return NULL;
+    }
+    VkLayerInstanceDispatchTable *disp_table = *(VkLayerInstanceDispatchTable **)inst;
+    void *addr;
+
+    if (disp_table == NULL) return NULL;
+
+    bool found_name;
+    addr = loader_lookup_instance_dispatch_table(disp_table, pName, &found_name);
+    if (found_name) {
+        return addr;
+    }
+
+    if (loader_phys_dev_ext_gpa(loader_get_instance(inst), pName, true, NULL, &addr)) return addr;
+
+    // Don't call down the chain, this would be an infinite loop
+    loader_log(NULL, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0, "loader_gpdpa_instance_internal() unrecognized name %s", pName);
+    return NULL;
+}
+
+static VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL loader_gpdpa_instance_terminator(VkInstance inst, const char *pName) {
+    // inst is not wrapped
+    if (inst == VK_NULL_HANDLE) {
+        return NULL;
+    }
+    VkLayerInstanceDispatchTable *disp_table = *(VkLayerInstanceDispatchTable **)inst;
+    void *addr;
+
+    if (disp_table == NULL) return NULL;
+
+    bool found_name;
+    addr = loader_lookup_instance_dispatch_table(disp_table, pName, &found_name);
+    if (found_name) {
+        return addr;
+    }
+
+    // Get the terminator, but don't perform checking since it should already
+    // have been setup if we get here.
+    if (loader_phys_dev_ext_gpa(loader_get_instance(inst), pName, false, NULL, &addr)) {
+        return addr;
+    }
+
+    // Don't call down the chain, this would be an infinite loop
+    loader_log(NULL, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0, "loader_gpdpa_instance_terminator() unrecognized name %s", pName);
+    return NULL;
+}
+
+static VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL loader_gpa_instance_internal(VkInstance inst, const char *pName) {
+    if (!strcmp(pName, "vkGetInstanceProcAddr")) {
+        return (PFN_vkVoidFunction)loader_gpa_instance_internal;
+    }
+    if (!strcmp(pName, "vk_layerGetPhysicalDeviceProcAddr")) {
+        return (PFN_vkVoidFunction)loader_gpdpa_instance_terminator;
+    }
+    if (!strcmp(pName, "vkCreateInstance")) {
+        return (PFN_vkVoidFunction)terminator_CreateInstance;
+    }
+    if (!strcmp(pName, "vkCreateDevice")) {
+        return (PFN_vkVoidFunction)terminator_CreateDevice;
+    }
+
+    // inst is not wrapped
+    if (inst == VK_NULL_HANDLE) {
+        return NULL;
+    }
+    VkLayerInstanceDispatchTable *disp_table = *(VkLayerInstanceDispatchTable **)inst;
+    void *addr;
+
+    if (disp_table == NULL) return NULL;
+
+    bool found_name;
+    addr = loader_lookup_instance_dispatch_table(disp_table, pName, &found_name);
+    if (found_name) {
+        return addr;
+    }
+
+    // Don't call down the chain, this would be an infinite loop
+    loader_log(NULL, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0, "loader_gpa_instance_internal() unrecognized name %s", pName);
+    return NULL;
+}
+
+VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL loader_gpa_device_internal(VkDevice device, const char *pName) {
+    struct loader_device *dev;
+    struct loader_icd_term *icd_term = loader_get_icd_and_device(device, &dev, NULL);
+
+    // Return this function if a layer above here is asking for the vkGetDeviceProcAddr.
+    // This is so we can properly intercept any device commands needing a terminator.
+    if (!strcmp(pName, "vkGetDeviceProcAddr")) {
+        return (PFN_vkVoidFunction)loader_gpa_device_internal;
+    }
+
+    // NOTE: Device Funcs needing Trampoline/Terminator.
+    // Overrides for device functions needing a trampoline and
+    // a terminator because certain device entry-points still need to go
+    // through a terminator before hitting the ICD.  This could be for
+    // several reasons, but the main one is currently unwrapping an
+    // object before passing the appropriate info along to the ICD.
+    // This is why we also have to override the direct ICD call to
+    // vkGetDeviceProcAddr to intercept those calls.
+    PFN_vkVoidFunction addr = get_extension_device_proc_terminator(dev, pName);
+    if (NULL != addr) {
+        return addr;
+    }
+
+    return icd_term->dispatch.GetDeviceProcAddr(device, pName);
+}
+
+// Initialize device_ext dispatch table entry as follows:
+// If dev == NULL find all logical devices created within this instance and
+//  init the entry (given by idx) in the ext dispatch table.
+// If dev != NULL only initialize the entry in the given dev's dispatch table.
+// The initialization value is gotten by calling down the device chain with
+// GDPA.
+// If GDPA returns NULL then don't initialize the dispatch table entry.
+static void loader_init_dispatch_dev_ext_entry(struct loader_instance *inst, struct loader_device *dev, uint32_t idx,
+                                               const char *funcName)
+
+{
+    void *gdpa_value;
+    if (dev != NULL) {
+        gdpa_value = dev->loader_dispatch.core_dispatch.GetDeviceProcAddr(dev->chain_device, funcName);
+        if (gdpa_value != NULL) dev->loader_dispatch.ext_dispatch.dev_ext[idx] = (PFN_vkDevExt)gdpa_value;
+    } else {
+        for (struct loader_icd_term *icd_term = inst->icd_terms; icd_term != NULL; icd_term = icd_term->next) {
+            struct loader_device *ldev = icd_term->logical_device_list;
+            while (ldev) {
+                gdpa_value = ldev->loader_dispatch.core_dispatch.GetDeviceProcAddr(ldev->chain_device, funcName);
+                if (gdpa_value != NULL) ldev->loader_dispatch.ext_dispatch.dev_ext[idx] = (PFN_vkDevExt)gdpa_value;
+                ldev = ldev->next;
+            }
+        }
+    }
+}
+
+// Find all dev extension in the hash table  and initialize the dispatch table
+// for dev  for each of those extension entrypoints found in hash table.
+void loader_init_dispatch_dev_ext(struct loader_instance *inst, struct loader_device *dev) {
+    for (uint32_t i = 0; i < MAX_NUM_UNKNOWN_EXTS; i++) {
+        if (inst->dev_ext_disp_hash[i].func_name != NULL)
+            loader_init_dispatch_dev_ext_entry(inst, dev, i, inst->dev_ext_disp_hash[i].func_name);
+    }
+}
+
+static bool loader_check_icds_for_dev_ext_address(struct loader_instance *inst, const char *funcName) {
+    struct loader_icd_term *icd_term;
+    icd_term = inst->icd_terms;
+    while (NULL != icd_term) {
+        if (icd_term->scanned_icd->GetInstanceProcAddr(icd_term->instance, funcName))
+            // this icd supports funcName
+            return true;
+        icd_term = icd_term->next;
+    }
+
+    return false;
+}
+
+static bool loader_check_layer_list_for_dev_ext_address(const struct loader_layer_list *const layers, const char *funcName) {
+    // Iterate over the layers.
+    for (uint32_t layer = 0; layer < layers->count; ++layer) {
+        // Iterate over the extensions.
+        const struct loader_device_extension_list *const extensions = &(layers->list[layer].device_extension_list);
+        for (uint32_t extension = 0; extension < extensions->count; ++extension) {
+            // Iterate over the entry points.
+            const struct loader_dev_ext_props *const property = &(extensions->list[extension]);
+            for (uint32_t entry = 0; entry < property->entrypoint_count; ++entry) {
+                if (strcmp(property->entrypoints[entry], funcName) == 0) {
+                    return true;
+                }
+            }
+        }
+    }
+
+    return false;
+}
+
+static void loader_free_dev_ext_table(struct loader_instance *inst) {
+    for (uint32_t i = 0; i < MAX_NUM_UNKNOWN_EXTS; i++) {
+        loader_instance_heap_free(inst, inst->dev_ext_disp_hash[i].func_name);
+        loader_instance_heap_free(inst, inst->dev_ext_disp_hash[i].list.index);
+    }
+    memset(inst->dev_ext_disp_hash, 0, sizeof(inst->dev_ext_disp_hash));
+}
+
+static bool loader_add_dev_ext_table(struct loader_instance *inst, uint32_t *ptr_idx, const char *funcName) {
+    uint32_t i;
+    uint32_t idx = *ptr_idx;
+    struct loader_dispatch_hash_list *list = &inst->dev_ext_disp_hash[idx].list;
+
+    if (!inst->dev_ext_disp_hash[idx].func_name) {
+        // no entry here at this idx, so use it
+        assert(list->capacity == 0);
+        inst->dev_ext_disp_hash[idx].func_name =
+            (char *)loader_instance_heap_alloc(inst, strlen(funcName) + 1, VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE);
+        if (inst->dev_ext_disp_hash[idx].func_name == NULL) {
+            loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                       "loader_add_dev_ext_table: Failed to allocate memory "
+                       "for func_name %s",
+                       funcName);
+            return false;
+        }
+        strncpy(inst->dev_ext_disp_hash[idx].func_name, funcName, strlen(funcName) + 1);
+        return true;
+    }
+
+    // check for enough capacity
+    if (list->capacity == 0) {
+        list->index = loader_instance_heap_alloc(inst, 8 * sizeof(*(list->index)), VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE);
+        if (list->index == NULL) {
+            loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                       "loader_add_dev_ext_table: Failed to allocate memory for list index of function %s", funcName);
+            return false;
+        }
+        list->capacity = 8 * sizeof(*(list->index));
+    } else if (list->capacity < (list->count + 1) * sizeof(*(list->index))) {
+        void *new_ptr = loader_instance_heap_realloc(inst, list->index, list->capacity, list->capacity * 2,
+                                                     VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE);
+        if (NULL == new_ptr) {
+            loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                       "loader_add_dev_ext_table: Failed to reallocate memory for list index of function %s", funcName);
+            return false;
+        }
+        list->index = new_ptr;
+        list->capacity *= 2;
+    }
+
+    // find an unused index in the hash table and use it
+    i = (idx + 1) % MAX_NUM_UNKNOWN_EXTS;
+    do {
+        if (!inst->dev_ext_disp_hash[i].func_name) {
+            assert(inst->dev_ext_disp_hash[i].list.capacity == 0);
+            inst->dev_ext_disp_hash[i].func_name =
+                (char *)loader_instance_heap_alloc(inst, strlen(funcName) + 1, VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE);
+            if (inst->dev_ext_disp_hash[i].func_name == NULL) {
+                loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                           "loader_add_dev_ext_table: Failed to allocate memory "
+                           "for func_name %s",
+                           funcName);
+                return false;
+            }
+            strncpy(inst->dev_ext_disp_hash[i].func_name, funcName, strlen(funcName) + 1);
+            list->index[list->count] = i;
+            list->count++;
+            *ptr_idx = i;
+            return true;
+        }
+        i = (i + 1) % MAX_NUM_UNKNOWN_EXTS;
+    } while (i != idx);
+
+    loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+               "loader_add_dev_ext_table:  Could not insert into hash table; is "
+               "it full?");
+
+    return false;
+}
+
+static bool loader_name_in_dev_ext_table(struct loader_instance *inst, uint32_t *idx, const char *funcName) {
+    uint32_t alt_idx;
+    if (inst->dev_ext_disp_hash[*idx].func_name && !strcmp(inst->dev_ext_disp_hash[*idx].func_name, funcName)) return true;
+
+    // funcName wasn't at the primary spot in the hash table
+    // search the list of secondary locations (shallow search, not deep search)
+    for (uint32_t i = 0; i < inst->dev_ext_disp_hash[*idx].list.count; i++) {
+        alt_idx = inst->dev_ext_disp_hash[*idx].list.index[i];
+        if (!strcmp(inst->dev_ext_disp_hash[*idx].func_name, funcName)) {
+            *idx = alt_idx;
+            return true;
+        }
+    }
+
+    return false;
+}
+
+// This function returns generic trampoline code address for unknown entry
+// points.
+// Presumably, these unknown entry points (as given by funcName) are device
+// extension entrypoints.  A hash table is used to keep a list of unknown entry
+// points and their mapping to the device extension dispatch table
+// (struct loader_dev_ext_dispatch_table).
+// \returns
+// For a given entry point string (funcName), if an existing mapping is found
+// the
+// trampoline address for that mapping is returned. Otherwise, this unknown
+// entry point
+// has not been seen yet. Next check if a layer or ICD supports it.  If so then
+// a
+// new entry in the hash table is initialized and that trampoline address for
+// the new entry is returned. Null is returned if the hash table is full or
+// if no discovered layer or ICD returns a non-NULL GetProcAddr for it.
+void *loader_dev_ext_gpa(struct loader_instance *inst, const char *funcName) {
+    uint32_t idx;
+    uint32_t seed = 0;
+
+    idx = murmurhash(funcName, strlen(funcName), seed) % MAX_NUM_UNKNOWN_EXTS;
+
+    if (loader_name_in_dev_ext_table(inst, &idx, funcName))
+        // found funcName already in hash
+        return loader_get_dev_ext_trampoline(idx);
+
+    // Check if funcName is supported in either ICDs or a layer library
+    if (!loader_check_icds_for_dev_ext_address(inst, funcName) &&
+        !loader_check_layer_list_for_dev_ext_address(&inst->app_activated_layer_list, funcName)) {
+        // if support found in layers continue on
+        return NULL;
+    }
+
+    if (loader_add_dev_ext_table(inst, &idx, funcName)) {
+        // successfully added new table entry
+        // init any dev dispatch table entries as needed
+        loader_init_dispatch_dev_ext_entry(inst, NULL, idx, funcName);
+        return loader_get_dev_ext_trampoline(idx);
+    }
+
+    return NULL;
+}
+
+static bool loader_check_icds_for_phys_dev_ext_address(struct loader_instance *inst, const char *funcName) {
+    struct loader_icd_term *icd_term;
+    icd_term = inst->icd_terms;
+    while (NULL != icd_term) {
+        if (icd_term->scanned_icd->interface_version >= MIN_PHYS_DEV_EXTENSION_ICD_INTERFACE_VERSION &&
+            icd_term->scanned_icd->GetPhysicalDeviceProcAddr(icd_term->instance, funcName))
+            // this icd supports funcName
+            return true;
+        icd_term = icd_term->next;
+    }
+
+    return false;
+}
+
+static bool loader_check_layer_list_for_phys_dev_ext_address(struct loader_instance *inst, const char *funcName) {
+    struct loader_layer_properties *layer_prop_list = inst->expanded_activated_layer_list.list;
+    for (uint32_t layer = 0; layer < inst->expanded_activated_layer_list.count; ++layer) {
+        // If this layer supports the vk_layerGetPhysicalDeviceProcAddr, then call
+        // it and see if it returns a valid pointer for this function name.
+        if (layer_prop_list[layer].interface_version > 1) {
+            const struct loader_layer_functions *const functions = &(layer_prop_list[layer].functions);
+            if (NULL != functions->get_physical_device_proc_addr &&
+                NULL != functions->get_physical_device_proc_addr((VkInstance)inst->instance, funcName)) {
+                return true;
+            }
+        }
+    }
+
+    return false;
+}
+
+static void loader_free_phys_dev_ext_table(struct loader_instance *inst) {
+    for (uint32_t i = 0; i < MAX_NUM_UNKNOWN_EXTS; i++) {
+        loader_instance_heap_free(inst, inst->phys_dev_ext_disp_hash[i].func_name);
+        loader_instance_heap_free(inst, inst->phys_dev_ext_disp_hash[i].list.index);
+    }
+    memset(inst->phys_dev_ext_disp_hash, 0, sizeof(inst->phys_dev_ext_disp_hash));
+}
+
+static bool loader_add_phys_dev_ext_table(struct loader_instance *inst, uint32_t *ptr_idx, const char *funcName) {
+    uint32_t i;
+    uint32_t idx = *ptr_idx;
+    struct loader_dispatch_hash_list *list = &inst->phys_dev_ext_disp_hash[idx].list;
+
+    if (!inst->phys_dev_ext_disp_hash[idx].func_name) {
+        // no entry here at this idx, so use it
+        assert(list->capacity == 0);
+        inst->phys_dev_ext_disp_hash[idx].func_name =
+            (char *)loader_instance_heap_alloc(inst, strlen(funcName) + 1, VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE);
+        if (inst->phys_dev_ext_disp_hash[idx].func_name == NULL) {
+            loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                       "loader_add_phys_dev_ext_table() can't allocate memory for "
+                       "func_name");
+            return false;
+        }
+        strncpy(inst->phys_dev_ext_disp_hash[idx].func_name, funcName, strlen(funcName) + 1);
+        return true;
+    }
+
+    // check for enough capacity
+    if (list->capacity == 0) {
+        list->index = loader_instance_heap_alloc(inst, 8 * sizeof(*(list->index)), VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE);
+        if (list->index == NULL) {
+            loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0, "loader_add_phys_dev_ext_table() can't allocate list memory");
+            return false;
+        }
+        list->capacity = 8 * sizeof(*(list->index));
+    } else if (list->capacity < (list->count + 1) * sizeof(*(list->index))) {
+        void *new_ptr = loader_instance_heap_realloc(inst, list->index, list->capacity, list->capacity * 2,
+                                                     VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE);
+        if (NULL == new_ptr) {
+            loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0, "loader_add_phys_dev_ext_table() can't reallocate list memory");
+            return false;
+        }
+        list->index = new_ptr;
+        list->capacity *= 2;
+    }
+
+    // find an unused index in the hash table and use it
+    i = (idx + 1) % MAX_NUM_UNKNOWN_EXTS;
+    do {
+        if (!inst->phys_dev_ext_disp_hash[i].func_name) {
+            assert(inst->phys_dev_ext_disp_hash[i].list.capacity == 0);
+            inst->phys_dev_ext_disp_hash[i].func_name =
+                (char *)loader_instance_heap_alloc(inst, strlen(funcName) + 1, VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE);
+            if (inst->phys_dev_ext_disp_hash[i].func_name == NULL) {
+                loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                           "loader_add_dev_ext_table() can't reallocate "
+                           "func_name memory");
+                return false;
+            }
+            strncpy(inst->phys_dev_ext_disp_hash[i].func_name, funcName, strlen(funcName) + 1);
+            list->index[list->count] = i;
+            list->count++;
+            *ptr_idx = i;
+            return true;
+        }
+        i = (i + 1) % MAX_NUM_UNKNOWN_EXTS;
+    } while (i != idx);
+
+    loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+               "loader_add_phys_dev_ext_table() couldn't insert into hash table; is "
+               "it full?");
+    return false;
+}
+
+static bool loader_name_in_phys_dev_ext_table(struct loader_instance *inst, uint32_t *idx, const char *funcName) {
+    uint32_t alt_idx;
+    if (inst->phys_dev_ext_disp_hash[*idx].func_name && !strcmp(inst->phys_dev_ext_disp_hash[*idx].func_name, funcName))
+        return true;
+
+    // funcName wasn't at the primary spot in the hash table
+    // search the list of secondary locations (shallow search, not deep search)
+    for (uint32_t i = 0; i < inst->phys_dev_ext_disp_hash[*idx].list.count; i++) {
+        alt_idx = inst->phys_dev_ext_disp_hash[*idx].list.index[i];
+        if (!strcmp(inst->phys_dev_ext_disp_hash[*idx].func_name, funcName)) {
+            *idx = alt_idx;
+            return true;
+        }
+    }
+
+    return false;
+}
+
+// This function returns a generic trampoline and/or terminator function
+// address for any unknown physical device extension commands.  A hash
+// table is used to keep a list of unknown entry points and their
+// mapping to the physical device extension dispatch table (struct
+// loader_phys_dev_ext_dispatch_table).
+// For a given entry point string (funcName), if an existing mapping is
+// found, then the trampoline address for that mapping is returned in
+// tramp_addr (if it is not NULL) and the terminator address for that
+// mapping is returned in term_addr (if it is not NULL). Otherwise,
+// this unknown entry point has not been seen yet.
+// If it has not been seen before, and perform_checking is 'true',
+// check if a layer or and ICD supports it.  If so then a new entry in
+// the hash table is initialized and the trampoline and/or terminator
+// addresses are returned.
+// Null is returned if the hash table is full or if no discovered layer or
+// ICD returns a non-NULL GetProcAddr for it.
+bool loader_phys_dev_ext_gpa(struct loader_instance *inst, const char *funcName, bool perform_checking, void **tramp_addr,
+                             void **term_addr) {
+    uint32_t idx;
+    uint32_t seed = 0;
+    bool success = false;
+
+    if (inst == NULL) {
+        goto out;
+    }
+
+    if (NULL != tramp_addr) {
+        *tramp_addr = NULL;
+    }
+    if (NULL != term_addr) {
+        *term_addr = NULL;
+    }
+
+    // We should always check to see if any ICD supports it.
+    if (!loader_check_icds_for_phys_dev_ext_address(inst, funcName)) {
+        // If we're not checking layers, or we are and it's not in a layer, just
+        // return
+        if (!perform_checking || !loader_check_layer_list_for_phys_dev_ext_address(inst, funcName)) {
+            goto out;
+        }
+    }
+
+    idx = murmurhash(funcName, strlen(funcName), seed) % MAX_NUM_UNKNOWN_EXTS;
+    if (perform_checking && !loader_name_in_phys_dev_ext_table(inst, &idx, funcName)) {
+        uint32_t i;
+        bool added = false;
+
+        // Only need to add first one to get index in Instance.  Others will use
+        // the same index.
+        if (!added && loader_add_phys_dev_ext_table(inst, &idx, funcName)) {
+            added = true;
+        }
+
+        // Setup the ICD function pointers
+        struct loader_icd_term *icd_term = inst->icd_terms;
+        while (NULL != icd_term) {
+            if (MIN_PHYS_DEV_EXTENSION_ICD_INTERFACE_VERSION <= icd_term->scanned_icd->interface_version &&
+                NULL != icd_term->scanned_icd->GetPhysicalDeviceProcAddr) {
+                icd_term->phys_dev_ext[idx] =
+                    (PFN_PhysDevExt)icd_term->scanned_icd->GetPhysicalDeviceProcAddr(icd_term->instance, funcName);
+
+                // Make sure we set the instance dispatch to point to the
+                // loader's terminator now since we can at least handle it
+                // in one ICD.
+                inst->disp->phys_dev_ext[idx] = loader_get_phys_dev_ext_termin(idx);
+            } else {
+                icd_term->phys_dev_ext[idx] = NULL;
+            }
+
+            icd_term = icd_term->next;
+        }
+
+        // Now, search for the first layer attached and query using it to get
+        // the first entry point.
+        for (i = 0; i < inst->expanded_activated_layer_list.count; i++) {
+            struct loader_layer_properties *layer_prop = &inst->expanded_activated_layer_list.list[i];
+            if (layer_prop->interface_version > 1 && NULL != layer_prop->functions.get_physical_device_proc_addr) {
+                inst->disp->phys_dev_ext[idx] =
+                    (PFN_PhysDevExt)layer_prop->functions.get_physical_device_proc_addr((VkInstance)inst->instance, funcName);
+                if (NULL != inst->disp->phys_dev_ext[idx]) {
+                    break;
+                }
+            }
+        }
+    }
+
+    if (NULL != tramp_addr) {
+        *tramp_addr = loader_get_phys_dev_ext_tramp(idx);
+    }
+
+    if (NULL != term_addr) {
+        *term_addr = loader_get_phys_dev_ext_termin(idx);
+    }
+
+    success = true;
+
+out:
+    return success;
+}
+
+struct loader_instance *loader_get_instance(const VkInstance instance) {
+    // look up the loader_instance in our list by comparing dispatch tables, as
+    // there is no guarantee the instance is still a loader_instance* after any
+    // layers which wrap the instance object.
+    const VkLayerInstanceDispatchTable *disp;
+    struct loader_instance *ptr_instance = NULL;
+    disp = loader_get_instance_layer_dispatch(instance);
+    for (struct loader_instance *inst = loader.instances; inst; inst = inst->next) {
+        if (&inst->disp->layer_inst_disp == disp) {
+            ptr_instance = inst;
+            break;
+        }
+    }
+    return ptr_instance;
+}
+
+static loader_platform_dl_handle loaderOpenLayerFile(const struct loader_instance *inst, const char *chain_type,
+                                                     struct loader_layer_properties *prop) {
+    if ((prop->lib_handle = loader_platform_open_library(prop->lib_name)) == NULL) {
+        loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0, loader_platform_open_library_error(prop->lib_name));
+    } else {
+        loader_log(inst, VK_DEBUG_REPORT_DEBUG_BIT_EXT, 0, "Loading layer library %s", prop->lib_name);
+    }
+
+    return prop->lib_handle;
+}
+
+static void loaderCloseLayerFile(const struct loader_instance *inst, struct loader_layer_properties *prop) {
+    if (prop->lib_handle) {
+        loader_platform_close_library(prop->lib_handle);
+        loader_log(inst, VK_DEBUG_REPORT_DEBUG_BIT_EXT, 0, "Unloading layer library %s", prop->lib_name);
+        prop->lib_handle = NULL;
+    }
+}
+
+void loaderDeactivateLayers(const struct loader_instance *instance, struct loader_device *device, struct loader_layer_list *list) {
+    // Delete instance list of enabled layers and close any layer libraries
+    for (uint32_t i = 0; i < list->count; i++) {
+        struct loader_layer_properties *layer_prop = &list->list[i];
+
+        loaderCloseLayerFile(instance, layer_prop);
+    }
+    loaderDestroyLayerList(instance, device, list);
+}
+
+// Go through the search_list and find any layers which match type. If layer
+// type match is found in then add it to ext_list.
+static void loaderAddImplicitLayers(const struct loader_instance *inst, struct loader_layer_list *target_list,
+                                    struct loader_layer_list *expanded_target_list, const struct loader_layer_list *source_list) {
+    for (uint32_t src_layer = 0; src_layer < source_list->count; src_layer++) {
+        const struct loader_layer_properties *prop = &source_list->list[src_layer];
+        if (0 == (prop->type_flags & VK_LAYER_TYPE_FLAG_EXPLICIT_LAYER)) {
+            loaderAddImplicitLayer(inst, prop, target_list, expanded_target_list, source_list);
+        }
+    }
+}
+
+// Get the layer name(s) from the env_name environment variable. If layer is found in
+// search_list then add it to layer_list.  But only add it to layer_list if type_flags matches.
+static void loaderAddEnvironmentLayers(struct loader_instance *inst, const enum layer_type_flags type_flags, const char *env_name,
+                                       struct loader_layer_list *target_list, struct loader_layer_list *expanded_target_list,
+                                       const struct loader_layer_list *source_list) {
+    char *next, *name;
+    char *layer_env = loader_getenv(env_name, inst);
+    if (layer_env == NULL) {
+        goto out;
+    }
+    name = loader_stack_alloc(strlen(layer_env) + 1);
+    if (name == NULL) {
+        goto out;
+    }
+    strcpy(name, layer_env);
+
+    while (name && *name) {
+        next = loader_get_next_path(name);
+        loaderAddLayerNameToList(inst, name, type_flags, source_list, target_list, expanded_target_list);
+        name = next;
+    }
+
+out:
+
+    if (layer_env != NULL) {
+        loader_free_getenv(layer_env, inst);
+    }
+
+    return;
+}
+
+VkResult loaderEnableInstanceLayers(struct loader_instance *inst, const VkInstanceCreateInfo *pCreateInfo,
+                                    const struct loader_layer_list *instance_layers) {
+    VkResult err = VK_SUCCESS;
+    uint16_t layer_api_major_version;
+    uint16_t layer_api_minor_version;
+    uint32_t i;
+    struct loader_layer_properties *prop;
+
+    assert(inst && "Cannot have null instance");
+
+    if (!loaderInitLayerList(inst, &inst->app_activated_layer_list)) {
+        loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                   "loaderEnableInstanceLayers: Failed to initialize application version of the layer list");
+        return VK_ERROR_OUT_OF_HOST_MEMORY;
+    }
+
+    if (!loaderInitLayerList(inst, &inst->expanded_activated_layer_list)) {
+        loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                   "loaderEnableInstanceLayers: Failed to initialize expanded version of the layer list");
+        return VK_ERROR_OUT_OF_HOST_MEMORY;
+    }
+
+    // Add any implicit layers first
+    loaderAddImplicitLayers(inst, &inst->app_activated_layer_list, &inst->expanded_activated_layer_list, instance_layers);
+
+    // Add any layers specified via environment variable next
+    loaderAddEnvironmentLayers(inst, VK_LAYER_TYPE_FLAG_EXPLICIT_LAYER, "VK_INSTANCE_LAYERS", &inst->app_activated_layer_list,
+                               &inst->expanded_activated_layer_list, instance_layers);
+
+    // Add layers specified by the application
+    err = loaderAddLayerNamesToList(inst, &inst->app_activated_layer_list, &inst->expanded_activated_layer_list,
+                                    pCreateInfo->enabledLayerCount, pCreateInfo->ppEnabledLayerNames, instance_layers);
+
+    for (i = 0; i < inst->expanded_activated_layer_list.count; i++) {
+        // Verify that the layer api version is at least that of the application's request, if not, throw a warning since
+        // undefined behavior could occur.
+        prop = inst->expanded_activated_layer_list.list + i;
+        layer_api_major_version = VK_VERSION_MAJOR(prop->info.specVersion);
+        layer_api_minor_version = VK_VERSION_MINOR(prop->info.specVersion);
+        if (inst->app_api_major_version > layer_api_major_version ||
+            (inst->app_api_major_version == layer_api_major_version && inst->app_api_minor_version > layer_api_minor_version)) {
+            loader_log(inst, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0,
+                       "loader_add_to_layer_list: Explicit layer %s is using an old API version %" PRIu16 ".%" PRIu16
+                       " versus application requested %" PRIu16 ".%" PRIu16,
+                       prop->info.layerName, layer_api_major_version, layer_api_minor_version, inst->app_api_major_version,
+                       inst->app_api_minor_version);
+        }
+    }
+
+    return err;
+}
+
+// Determine the layer interface version to use.
+bool loaderGetLayerInterfaceVersion(PFN_vkNegotiateLoaderLayerInterfaceVersion fp_negotiate_layer_version,
+                                    VkNegotiateLayerInterface *interface_struct) {
+    memset(interface_struct, 0, sizeof(VkNegotiateLayerInterface));
+    interface_struct->sType = LAYER_NEGOTIATE_INTERFACE_STRUCT;
+    interface_struct->loaderLayerInterfaceVersion = 1;
+    interface_struct->pNext = NULL;
+
+    if (fp_negotiate_layer_version != NULL) {
+        // Layer supports the negotiation API, so call it with the loader's
+        // latest version supported
+        interface_struct->loaderLayerInterfaceVersion = CURRENT_LOADER_LAYER_INTERFACE_VERSION;
+        VkResult result = fp_negotiate_layer_version(interface_struct);
+
+        if (result != VK_SUCCESS) {
+            // Layer no longer supports the loader's latest interface version so
+            // fail loading the Layer
+            return false;
+        }
+    }
+
+    if (interface_struct->loaderLayerInterfaceVersion < MIN_SUPPORTED_LOADER_LAYER_INTERFACE_VERSION) {
+        // Loader no longer supports the layer's latest interface version so
+        // fail loading the layer
+        return false;
+    }
+
+    return true;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL loader_layer_create_device(VkInstance instance, VkPhysicalDevice physicalDevice,
+                                                          const VkDeviceCreateInfo *pCreateInfo,
+                                                          const VkAllocationCallbacks *pAllocator, VkDevice *pDevice,
+                                                          PFN_vkGetInstanceProcAddr layerGIPA, PFN_vkGetDeviceProcAddr *nextGDPA) {
+    VkResult res;
+    VkPhysicalDevice internal_device = VK_NULL_HANDLE;
+    struct loader_device *dev = NULL;
+    struct loader_instance *inst = NULL;
+
+    assert(pCreateInfo->queueCreateInfoCount >= 1);
+
+    if (instance != NULL) {
+        inst = loader_get_instance(instance);
+        internal_device = physicalDevice;
+    } else {
+        struct loader_physical_device_tramp *phys_dev = (struct loader_physical_device_tramp *)physicalDevice;
+        internal_device = phys_dev->phys_dev;
+        inst = (struct loader_instance *)phys_dev->this_instance;
+    }
+
+    // Get the physical device (ICD) extensions
+    struct loader_extension_list icd_exts;
+    icd_exts.list = NULL;
+    res = loader_init_generic_list(inst, (struct loader_generic_list *)&icd_exts, sizeof(VkExtensionProperties));
+    if (VK_SUCCESS != res) {
+        loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0, "vkCreateDevice:  Failed to create ICD extension list");
+        goto out;
+    }
+
+    PFN_vkEnumerateDeviceExtensionProperties enumDeviceExtensionProperties = NULL;
+    if (layerGIPA != NULL) {
+        enumDeviceExtensionProperties =
+            (PFN_vkEnumerateDeviceExtensionProperties)layerGIPA(instance, "vkEnumerateDeviceExtensionProperties");
+    } else {
+        enumDeviceExtensionProperties = inst->disp->layer_inst_disp.EnumerateDeviceExtensionProperties;
+    }
+    res = loader_add_device_extensions(inst, enumDeviceExtensionProperties, internal_device, "Unknown", &icd_exts);
+    if (res != VK_SUCCESS) {
+        loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0, "vkCreateDevice:  Failed to add extensions to list");
+        goto out;
+    }
+
+    // Make sure requested extensions to be enabled are supported
+    res = loader_validate_device_extensions(inst, &inst->expanded_activated_layer_list, &icd_exts, pCreateInfo);
+    if (res != VK_SUCCESS) {
+        loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0, "vkCreateDevice:  Failed to validate extensions in list");
+        goto out;
+    }
+
+    dev = loader_create_logical_device(inst, pAllocator);
+    if (dev == NULL) {
+        res = VK_ERROR_OUT_OF_HOST_MEMORY;
+        goto out;
+    }
+
+    // Copy the application enabled instance layer list into the device
+    if (NULL != inst->app_activated_layer_list.list) {
+        dev->app_activated_layer_list.capacity = inst->app_activated_layer_list.capacity;
+        dev->app_activated_layer_list.count = inst->app_activated_layer_list.count;
+        dev->app_activated_layer_list.list =
+            loader_device_heap_alloc(dev, inst->app_activated_layer_list.capacity, VK_SYSTEM_ALLOCATION_SCOPE_DEVICE);
+        if (dev->app_activated_layer_list.list == NULL) {
+            loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                       "vkCreateDevice:  Failed to allocate application activated layer list of size %d.",
+                       inst->app_activated_layer_list.capacity);
+            res = VK_ERROR_OUT_OF_HOST_MEMORY;
+            goto out;
+        }
+        memcpy(dev->app_activated_layer_list.list, inst->app_activated_layer_list.list,
+               sizeof(*dev->app_activated_layer_list.list) * dev->app_activated_layer_list.count);
+    } else {
+        dev->app_activated_layer_list.capacity = 0;
+        dev->app_activated_layer_list.count = 0;
+        dev->app_activated_layer_list.list = NULL;
+    }
+
+    // Copy the expanded enabled instance layer list into the device
+    if (NULL != inst->expanded_activated_layer_list.list) {
+        dev->expanded_activated_layer_list.capacity = inst->expanded_activated_layer_list.capacity;
+        dev->expanded_activated_layer_list.count = inst->expanded_activated_layer_list.count;
+        dev->expanded_activated_layer_list.list =
+            loader_device_heap_alloc(dev, inst->expanded_activated_layer_list.capacity, VK_SYSTEM_ALLOCATION_SCOPE_DEVICE);
+        if (dev->expanded_activated_layer_list.list == NULL) {
+            loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                       "vkCreateDevice:  Failed to allocate expanded activated layer list of size %d.",
+                       inst->expanded_activated_layer_list.capacity);
+            res = VK_ERROR_OUT_OF_HOST_MEMORY;
+            goto out;
+        }
+        memcpy(dev->expanded_activated_layer_list.list, inst->expanded_activated_layer_list.list,
+               sizeof(*dev->expanded_activated_layer_list.list) * dev->expanded_activated_layer_list.count);
+    } else {
+        dev->expanded_activated_layer_list.capacity = 0;
+        dev->expanded_activated_layer_list.count = 0;
+        dev->expanded_activated_layer_list.list = NULL;
+    }
+
+    res = loader_create_device_chain(internal_device, pCreateInfo, pAllocator, inst, dev, layerGIPA, nextGDPA);
+    if (res != VK_SUCCESS) {
+        loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0, "vkCreateDevice:  Failed to create device chain.");
+        goto out;
+    }
+
+    *pDevice = dev->chain_device;
+
+    // Initialize any device extension dispatch entry's from the instance list
+    loader_init_dispatch_dev_ext(inst, dev);
+
+    // Initialize WSI device extensions as part of core dispatch since loader
+    // has dedicated trampoline code for these
+    loader_init_device_extension_dispatch_table(&dev->loader_dispatch, inst->disp->layer_inst_disp.GetInstanceProcAddr,
+                                                dev->loader_dispatch.core_dispatch.GetDeviceProcAddr, inst->instance, *pDevice);
+
+out:
+
+    // Failure cleanup
+    if (VK_SUCCESS != res) {
+        if (NULL != dev) {
+            loader_destroy_logical_device(inst, dev, pAllocator);
+        }
+    }
+
+    if (NULL != icd_exts.list) {
+        loader_destroy_generic_list(inst, (struct loader_generic_list *)&icd_exts);
+    }
+    return res;
+}
+
+VKAPI_ATTR void VKAPI_CALL loader_layer_destroy_device(VkDevice device, const VkAllocationCallbacks *pAllocator,
+                                                       PFN_vkDestroyDevice destroyFunction) {
+    struct loader_device *dev;
+
+    if (device == VK_NULL_HANDLE) {
+        return;
+    }
+
+    struct loader_icd_term *icd_term = loader_get_icd_and_device(device, &dev, NULL);
+    const struct loader_instance *inst = icd_term->this_instance;
+
+    destroyFunction(device, pAllocator);
+    dev->chain_device = NULL;
+    dev->icd_device = NULL;
+    loader_remove_logical_device(inst, icd_term, dev, pAllocator);
+}
+
+// Given the list of layers to activate in the loader_instance
+// structure. This function will add a VkLayerInstanceCreateInfo
+// structure to the VkInstanceCreateInfo.pNext pointer.
+// Each activated layer will have it's own VkLayerInstanceLink
+// structure that tells the layer what Get*ProcAddr to call to
+// get function pointers to the next layer down.
+// Once the chain info has been created this function will
+// execute the CreateInstance call chain. Each layer will
+// then have an opportunity in it's CreateInstance function
+// to setup it's dispatch table when the lower layer returns
+// successfully.
+// Each layer can wrap or not-wrap the returned VkInstance object
+// as it sees fit.
+// The instance chain is terminated by a loader function
+// that will call CreateInstance on all available ICD's and
+// cache those VkInstance objects for future use.
+VkResult loader_create_instance_chain(const VkInstanceCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator,
+                                      struct loader_instance *inst, VkInstance *created_instance) {
+    uint32_t activated_layers = 0;
+    VkLayerInstanceCreateInfo chain_info;
+    VkLayerInstanceLink *layer_instance_link_info = NULL;
+    VkInstanceCreateInfo loader_create_info;
+    VkResult res;
+
+    PFN_vkGetInstanceProcAddr next_gipa = loader_gpa_instance_internal;
+    PFN_vkGetInstanceProcAddr cur_gipa = loader_gpa_instance_internal;
+    PFN_vkGetDeviceProcAddr cur_gdpa = loader_gpa_device_internal;
+    PFN_GetPhysicalDeviceProcAddr next_gpdpa = loader_gpdpa_instance_internal;
+    PFN_GetPhysicalDeviceProcAddr cur_gpdpa = loader_gpdpa_instance_internal;
+
+    memcpy(&loader_create_info, pCreateInfo, sizeof(VkInstanceCreateInfo));
+
+    if (inst->expanded_activated_layer_list.count > 0) {
+        chain_info.u.pLayerInfo = NULL;
+        chain_info.pNext = pCreateInfo->pNext;
+        chain_info.sType = VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO;
+        chain_info.function = VK_LAYER_LINK_INFO;
+        loader_create_info.pNext = &chain_info;
+
+        layer_instance_link_info = loader_stack_alloc(sizeof(VkLayerInstanceLink) * inst->expanded_activated_layer_list.count);
+        if (!layer_instance_link_info) {
+            loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                       "loader_create_instance_chain: Failed to alloc Instance"
+                       " objects for layer");
+            return VK_ERROR_OUT_OF_HOST_MEMORY;
+        }
+
+        // Create instance chain of enabled layers
+        for (int32_t i = inst->expanded_activated_layer_list.count - 1; i >= 0; i--) {
+            struct loader_layer_properties *layer_prop = &inst->expanded_activated_layer_list.list[i];
+            loader_platform_dl_handle lib_handle;
+
+            lib_handle = loaderOpenLayerFile(inst, "instance", layer_prop);
+            if (!lib_handle) {
+                continue;
+            }
+
+            if (NULL == layer_prop->functions.negotiate_layer_interface) {
+                PFN_vkNegotiateLoaderLayerInterfaceVersion negotiate_interface = NULL;
+                bool functions_in_interface = false;
+                if (strlen(layer_prop->functions.str_negotiate_interface) == 0) {
+                    negotiate_interface = (PFN_vkNegotiateLoaderLayerInterfaceVersion)loader_platform_get_proc_address(
+                        lib_handle, "vkNegotiateLoaderLayerInterfaceVersion");
+                } else {
+                    negotiate_interface = (PFN_vkNegotiateLoaderLayerInterfaceVersion)loader_platform_get_proc_address(
+                        lib_handle, layer_prop->functions.str_negotiate_interface);
+                }
+
+                // If we can negotiate an interface version, then we can also
+                // get everything we need from the one function call, so try
+                // that first, and see if we can get all the function pointers
+                // necessary from that one call.
+                if (NULL != negotiate_interface) {
+                    layer_prop->functions.negotiate_layer_interface = negotiate_interface;
+
+                    VkNegotiateLayerInterface interface_struct;
+
+                    if (loaderGetLayerInterfaceVersion(negotiate_interface, &interface_struct)) {
+                        // Go ahead and set the properties version to the
+                        // correct value.
+                        layer_prop->interface_version = interface_struct.loaderLayerInterfaceVersion;
+
+                        // If the interface is 2 or newer, we have access to the
+                        // new GetPhysicalDeviceProcAddr function, so grab it,
+                        // and the other necessary functions, from the
+                        // structure.
+                        if (interface_struct.loaderLayerInterfaceVersion > 1) {
+                            cur_gipa = interface_struct.pfnGetInstanceProcAddr;
+                            cur_gdpa = interface_struct.pfnGetDeviceProcAddr;
+                            cur_gpdpa = interface_struct.pfnGetPhysicalDeviceProcAddr;
+                            if (cur_gipa != NULL) {
+                                // We've set the functions, so make sure we
+                                // don't do the unnecessary calls later.
+                                functions_in_interface = true;
+                            }
+                        }
+                    }
+                }
+
+                if (!functions_in_interface) {
+                    if ((cur_gipa = layer_prop->functions.get_instance_proc_addr) == NULL) {
+                        if (strlen(layer_prop->functions.str_gipa) == 0) {
+                            cur_gipa =
+                                (PFN_vkGetInstanceProcAddr)loader_platform_get_proc_address(lib_handle, "vkGetInstanceProcAddr");
+                            layer_prop->functions.get_instance_proc_addr = cur_gipa;
+                        } else {
+                            cur_gipa = (PFN_vkGetInstanceProcAddr)loader_platform_get_proc_address(lib_handle,
+                                                                                                   layer_prop->functions.str_gipa);
+                        }
+
+                        if (NULL == cur_gipa) {
+                            loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                                       "loader_create_instance_chain: Failed to"
+                                       " find \'vkGetInstanceProcAddr\' in "
+                                       "layer %s",
+                                       layer_prop->lib_name);
+                            continue;
+                        }
+                    }
+                }
+            }
+
+            layer_instance_link_info[activated_layers].pNext = chain_info.u.pLayerInfo;
+            layer_instance_link_info[activated_layers].pfnNextGetInstanceProcAddr = next_gipa;
+            layer_instance_link_info[activated_layers].pfnNextGetPhysicalDeviceProcAddr = next_gpdpa;
+            next_gipa = cur_gipa;
+            if (layer_prop->interface_version > 1 && cur_gpdpa != NULL) {
+                layer_prop->functions.get_physical_device_proc_addr = cur_gpdpa;
+                next_gpdpa = cur_gpdpa;
+            }
+            if (layer_prop->interface_version > 1 && cur_gipa != NULL) {
+                layer_prop->functions.get_instance_proc_addr = cur_gipa;
+            }
+            if (layer_prop->interface_version > 1 && cur_gdpa != NULL) {
+                layer_prop->functions.get_device_proc_addr = cur_gdpa;
+            }
+
+            chain_info.u.pLayerInfo = &layer_instance_link_info[activated_layers];
+
+            loader_log(inst, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, 0, "Insert instance layer %s (%s)", layer_prop->info.layerName,
+                       layer_prop->lib_name);
+
+            activated_layers++;
+        }
+    }
+
+    PFN_vkCreateInstance fpCreateInstance = (PFN_vkCreateInstance)next_gipa(*created_instance, "vkCreateInstance");
+    if (fpCreateInstance) {
+        VkLayerInstanceCreateInfo create_info_disp;
+
+        create_info_disp.sType = VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO;
+        create_info_disp.function = VK_LOADER_DATA_CALLBACK;
+
+        create_info_disp.u.pfnSetInstanceLoaderData = vkSetInstanceDispatch;
+
+        create_info_disp.pNext = loader_create_info.pNext;
+        loader_create_info.pNext = &create_info_disp;
+
+        VkLayerInstanceCreateInfo create_info_disp2;
+
+        create_info_disp2.sType = VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO;
+        create_info_disp2.function = VK_LOADER_LAYER_CREATE_DEVICE_CALLBACK;
+
+        create_info_disp2.u.layerDevice.pfnLayerCreateDevice = loader_layer_create_device;
+        create_info_disp2.u.layerDevice.pfnLayerDestroyDevice = loader_layer_destroy_device;
+
+        create_info_disp2.pNext = loader_create_info.pNext;
+        loader_create_info.pNext = &create_info_disp2;
+
+        res = fpCreateInstance(&loader_create_info, pAllocator, created_instance);
+    } else {
+        loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                   "loader_create_instance_chain: Failed to find "
+                   "\'vkCreateInstance\'");
+        // Couldn't find CreateInstance function!
+        res = VK_ERROR_INITIALIZATION_FAILED;
+    }
+
+    if (res == VK_SUCCESS) {
+        loader_init_instance_core_dispatch_table(&inst->disp->layer_inst_disp, next_gipa, *created_instance);
+        inst->instance = *created_instance;
+    }
+
+    return res;
+}
+
+void loaderActivateInstanceLayerExtensions(struct loader_instance *inst, VkInstance created_inst) {
+    loader_init_instance_extension_dispatch_table(&inst->disp->layer_inst_disp, inst->disp->layer_inst_disp.GetInstanceProcAddr,
+                                                  created_inst);
+}
+
+VkResult loader_create_device_chain(const VkPhysicalDevice pd, const VkDeviceCreateInfo *pCreateInfo,
+                                    const VkAllocationCallbacks *pAllocator, const struct loader_instance *inst,
+                                    struct loader_device *dev, PFN_vkGetInstanceProcAddr callingLayer,
+                                    PFN_vkGetDeviceProcAddr *layerNextGDPA) {
+    uint32_t activated_layers = 0;
+    VkLayerDeviceLink *layer_device_link_info;
+    VkLayerDeviceCreateInfo chain_info;
+    VkDeviceCreateInfo loader_create_info;
+    VkResult res;
+
+    PFN_vkGetDeviceProcAddr fpGDPA = NULL, nextGDPA = loader_gpa_device_internal;
+    PFN_vkGetInstanceProcAddr fpGIPA = NULL, nextGIPA = loader_gpa_instance_internal;
+
+    memcpy(&loader_create_info, pCreateInfo, sizeof(VkDeviceCreateInfo));
+
+    // Before we continue, we need to find out if the KHR_device_group extension is in the enabled list.  If it is, we then
+    // need to look for the corresponding VkDeviceGroupDeviceCreateInfoKHR struct in the device list.  This is because we
+    // need to replace all the incoming physical device values (which are really loader trampoline physical device values)
+    // with the layer/ICD version.
+    {
+        VkBaseOutStructure *pNext = (VkBaseOutStructure *)loader_create_info.pNext;
+        VkBaseOutStructure *pPrev = (VkBaseOutStructure *)&loader_create_info;
+        while (NULL != pNext) {
+            if (VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO == pNext->sType) {
+                VkDeviceGroupDeviceCreateInfoKHR *cur_struct = (VkDeviceGroupDeviceCreateInfoKHR *)pNext;
+                if (0 < cur_struct->physicalDeviceCount && NULL != cur_struct->pPhysicalDevices) {
+                    VkDeviceGroupDeviceCreateInfoKHR *temp_struct = loader_stack_alloc(sizeof(VkDeviceGroupDeviceCreateInfoKHR));
+                    VkPhysicalDevice *phys_dev_array = NULL;
+                    if (NULL == temp_struct) {
+                        return VK_ERROR_OUT_OF_HOST_MEMORY;
+                    }
+                    memcpy(temp_struct, cur_struct, sizeof(VkDeviceGroupDeviceCreateInfoKHR));
+                    phys_dev_array = loader_stack_alloc(sizeof(VkPhysicalDevice) * cur_struct->physicalDeviceCount);
+                    if (NULL == phys_dev_array) {
+                        return VK_ERROR_OUT_OF_HOST_MEMORY;
+                    }
+
+                    // Before calling down, replace the incoming physical device values (which are really loader trampoline
+                    // physical devices) with the next layer (or possibly even the terminator) physical device values.
+                    struct loader_physical_device_tramp *cur_tramp;
+                    for (uint32_t phys_dev = 0; phys_dev < cur_struct->physicalDeviceCount; phys_dev++) {
+                        cur_tramp = (struct loader_physical_device_tramp *)cur_struct->pPhysicalDevices[phys_dev];
+                        phys_dev_array[phys_dev] = cur_tramp->phys_dev;
+                    }
+                    temp_struct->pPhysicalDevices = phys_dev_array;
+
+                    // Replace the old struct in the pNext chain with this one.
+                    pPrev->pNext = (VkBaseOutStructure *)temp_struct;
+                    pNext = (VkBaseOutStructure *)temp_struct;
+                }
+                break;
+            }
+
+            pPrev = pNext;
+            pNext = pNext->pNext;
+        }
+    }
+
+    layer_device_link_info = loader_stack_alloc(sizeof(VkLayerDeviceLink) * dev->expanded_activated_layer_list.count);
+    if (!layer_device_link_info) {
+        loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                   "loader_create_device_chain: Failed to alloc Device objects"
+                   " for layer.  Skipping Layer.");
+        return VK_ERROR_OUT_OF_HOST_MEMORY;
+    }
+
+    if (dev->expanded_activated_layer_list.count > 0) {
+        chain_info.sType = VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO;
+        chain_info.function = VK_LAYER_LINK_INFO;
+        chain_info.u.pLayerInfo = NULL;
+        chain_info.pNext = loader_create_info.pNext;
+        loader_create_info.pNext = &chain_info;
+
+        bool done = false;
+
+        // Create instance chain of enabled layers
+        for (int32_t i = dev->expanded_activated_layer_list.count - 1; i >= 0; i--) {
+            struct loader_layer_properties *layer_prop = &dev->expanded_activated_layer_list.list[i];
+            loader_platform_dl_handle lib_handle;
+
+            lib_handle = loaderOpenLayerFile(inst, "device", layer_prop);
+            if (!lib_handle || done) {
+                continue;
+            }
+
+            // The Get*ProcAddr pointers will already be filled in if they were received from either the json file or the
+            // version negotiation
+            if ((fpGIPA = layer_prop->functions.get_instance_proc_addr) == NULL) {
+                if (strlen(layer_prop->functions.str_gipa) == 0) {
+                    fpGIPA = (PFN_vkGetInstanceProcAddr)loader_platform_get_proc_address(lib_handle, "vkGetInstanceProcAddr");
+                    layer_prop->functions.get_instance_proc_addr = fpGIPA;
+                } else
+                    fpGIPA =
+                        (PFN_vkGetInstanceProcAddr)loader_platform_get_proc_address(lib_handle, layer_prop->functions.str_gipa);
+                if (!fpGIPA) {
+                    loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                               "loader_create_device_chain: Failed to find "
+                               "\'vkGetInstanceProcAddr\' in layer %s.  Skipping"
+                               " layer.",
+                               layer_prop->lib_name);
+                    continue;
+                }
+            }
+
+            if (fpGIPA == callingLayer) {
+                if (layerNextGDPA != NULL) {
+                    *layerNextGDPA = nextGDPA;
+                }
+                done = true;
+                continue;
+            }
+
+            if ((fpGDPA = layer_prop->functions.get_device_proc_addr) == NULL) {
+                if (strlen(layer_prop->functions.str_gdpa) == 0) {
+                    fpGDPA = (PFN_vkGetDeviceProcAddr)loader_platform_get_proc_address(lib_handle, "vkGetDeviceProcAddr");
+                    layer_prop->functions.get_device_proc_addr = fpGDPA;
+                } else
+                    fpGDPA =
+                        (PFN_vkGetDeviceProcAddr)loader_platform_get_proc_address(lib_handle, layer_prop->functions.str_gdpa);
+                if (!fpGDPA) {
+                    loader_log(inst, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, 0, "Failed to find vkGetDeviceProcAddr in layer %s",
+                               layer_prop->lib_name);
+                    continue;
+                }
+            }
+
+            layer_device_link_info[activated_layers].pNext = chain_info.u.pLayerInfo;
+            layer_device_link_info[activated_layers].pfnNextGetInstanceProcAddr = nextGIPA;
+            layer_device_link_info[activated_layers].pfnNextGetDeviceProcAddr = nextGDPA;
+            chain_info.u.pLayerInfo = &layer_device_link_info[activated_layers];
+            nextGIPA = fpGIPA;
+            nextGDPA = fpGDPA;
+
+            loader_log(inst, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, 0, "Inserted device layer %s (%s)", layer_prop->info.layerName,
+                       layer_prop->lib_name);
+
+            activated_layers++;
+        }
+    }
+
+    VkDevice created_device = (VkDevice)dev;
+    PFN_vkCreateDevice fpCreateDevice = (PFN_vkCreateDevice)nextGIPA(inst->instance, "vkCreateDevice");
+    if (fpCreateDevice) {
+        VkLayerDeviceCreateInfo create_info_disp;
+
+        create_info_disp.sType = VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO;
+        create_info_disp.function = VK_LOADER_DATA_CALLBACK;
+
+        create_info_disp.u.pfnSetDeviceLoaderData = vkSetDeviceDispatch;
+
+        create_info_disp.pNext = loader_create_info.pNext;
+        loader_create_info.pNext = &create_info_disp;
+        res = fpCreateDevice(pd, &loader_create_info, pAllocator, &created_device);
+        if (res != VK_SUCCESS) {
+            return res;
+        }
+        dev->chain_device = created_device;
+    } else {
+        loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                   "loader_create_device_chain: Failed to find \'vkCreateDevice\' "
+                   "in layers or ICD");
+        // Couldn't find CreateDevice function!
+        return VK_ERROR_INITIALIZATION_FAILED;
+    }
+
+    // Initialize device dispatch table
+    loader_init_device_dispatch_table(&dev->loader_dispatch, nextGDPA, dev->chain_device);
+
+    return res;
+}
+
+VkResult loaderValidateLayers(const struct loader_instance *inst, const uint32_t layer_count,
+                              const char *const *ppEnabledLayerNames, const struct loader_layer_list *list) {
+    struct loader_layer_properties *prop;
+
+    for (uint32_t i = 0; i < layer_count; i++) {
+        VkStringErrorFlags result = vk_string_validate(MaxLoaderStringLength, ppEnabledLayerNames[i]);
+        if (result != VK_STRING_ERROR_NONE) {
+            loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                       "loaderValidateLayers: Device ppEnabledLayerNames "
+                       "contains string that is too long or is badly formed");
+            return VK_ERROR_LAYER_NOT_PRESENT;
+        }
+
+        prop = loaderFindLayerProperty(ppEnabledLayerNames[i], list);
+        if (NULL == prop) {
+            loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                       "loaderValidateLayers: Layer %d does not exist in the list of available layers", i);
+            return VK_ERROR_LAYER_NOT_PRESENT;
+        }
+    }
+    return VK_SUCCESS;
+}
+
+VkResult loader_validate_instance_extensions(struct loader_instance *inst, const struct loader_extension_list *icd_exts,
+                                             const struct loader_layer_list *instance_layers,
+                                             const VkInstanceCreateInfo *pCreateInfo) {
+    VkExtensionProperties *extension_prop;
+    char *env_value;
+    bool check_if_known = true;
+    VkResult res = VK_SUCCESS;
+
+    struct loader_layer_list active_layers;
+    struct loader_layer_list expanded_layers;
+    memset(&active_layers, 0, sizeof(active_layers));
+    memset(&expanded_layers, 0, sizeof(expanded_layers));
+    if (!loaderInitLayerList(inst, &active_layers)) {
+        res = VK_ERROR_OUT_OF_HOST_MEMORY;
+        goto out;
+    }
+    if (!loaderInitLayerList(inst, &expanded_layers)) {
+        res = VK_ERROR_OUT_OF_HOST_MEMORY;
+        goto out;
+    }
+
+    // Build the lists of active layers (including metalayers) and expanded layers (with metalayers resolved to their components)
+    loaderAddImplicitLayers(inst, &active_layers, &expanded_layers, instance_layers);
+    loaderAddEnvironmentLayers(inst, VK_LAYER_TYPE_FLAG_EXPLICIT_LAYER, ENABLED_LAYERS_ENV, &active_layers, &expanded_layers,
+                               instance_layers);
+    res = loaderAddLayerNamesToList(inst, &active_layers, &expanded_layers, pCreateInfo->enabledLayerCount,
+                                    pCreateInfo->ppEnabledLayerNames, instance_layers);
+    if (VK_SUCCESS != res) {
+        goto out;
+    }
+
+    for (uint32_t i = 0; i < pCreateInfo->enabledExtensionCount; i++) {
+        VkStringErrorFlags result = vk_string_validate(MaxLoaderStringLength, pCreateInfo->ppEnabledExtensionNames[i]);
+        if (result != VK_STRING_ERROR_NONE) {
+            loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                       "loader_validate_instance_extensions: Instance ppEnabledExtensionNames contains "
+                       "string that is too long or is badly formed");
+            res = VK_ERROR_EXTENSION_NOT_PRESENT;
+            goto out;
+        }
+
+        // Check if a user wants to disable the instance extension filtering behavior
+        env_value = loader_getenv("VK_LOADER_DISABLE_INST_EXT_FILTER", inst);
+        if (NULL != env_value && atoi(env_value) != 0) {
+            check_if_known = false;
+        }
+        loader_free_getenv(env_value, inst);
+
+        if (check_if_known) {
+            // See if the extension is in the list of supported extensions
+            bool found = false;
+            for (uint32_t j = 0; LOADER_INSTANCE_EXTENSIONS[j] != NULL; j++) {
+                if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], LOADER_INSTANCE_EXTENSIONS[j]) == 0) {
+                    found = true;
+                    break;
+                }
+            }
+
+            // If it isn't in the list, return an error
+            if (!found) {
+                loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                           "loader_validate_instance_extensions: Extension %s not found in list of known instance extensions.",
+                           pCreateInfo->ppEnabledExtensionNames[i]);
+                res = VK_ERROR_EXTENSION_NOT_PRESENT;
+                goto out;
+            }
+        }
+
+        extension_prop = get_extension_property(pCreateInfo->ppEnabledExtensionNames[i], icd_exts);
+
+        if (extension_prop) {
+            continue;
+        }
+
+        extension_prop = NULL;
+
+        // Not in global list, search layer extension lists
+        struct loader_layer_properties *layer_prop = NULL;
+        for (uint32_t j = 0; NULL == extension_prop && j < expanded_layers.count; ++j) {
+            extension_prop =
+                get_extension_property(pCreateInfo->ppEnabledExtensionNames[i], &expanded_layers.list[j].instance_extension_list);
+            if (extension_prop) {
+                // Found the extension in one of the layers enabled by the app.
+                break;
+            }
+
+            layer_prop = loaderFindLayerProperty(expanded_layers.list[j].info.layerName, instance_layers);
+            if (NULL == layer_prop) {
+                // Should NOT get here, loaderValidateLayers should have already filtered this case out.
+                continue;
+            }
+        }
+
+        if (!extension_prop) {
+            // Didn't find extension name in any of the global layers, error out
+            loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                       "loader_validate_instance_extensions: Instance extension %s not supported by available ICDs or enabled "
+                       "layers.",
+                       pCreateInfo->ppEnabledExtensionNames[i]);
+            res = VK_ERROR_EXTENSION_NOT_PRESENT;
+            goto out;
+        }
+    }
+
+out:
+    loaderDestroyLayerList(inst, NULL, &active_layers);
+    loaderDestroyLayerList(inst, NULL, &expanded_layers);
+    return res;
+}
+
+VkResult loader_validate_device_extensions(struct loader_instance *this_instance,
+                                           const struct loader_layer_list *activated_device_layers,
+                                           const struct loader_extension_list *icd_exts, const VkDeviceCreateInfo *pCreateInfo) {
+    VkExtensionProperties *extension_prop;
+    struct loader_layer_properties *layer_prop;
+
+    for (uint32_t i = 0; i < pCreateInfo->enabledExtensionCount; i++) {
+        VkStringErrorFlags result = vk_string_validate(MaxLoaderStringLength, pCreateInfo->ppEnabledExtensionNames[i]);
+        if (result != VK_STRING_ERROR_NONE) {
+            loader_log(this_instance, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                       "loader_validate_device_extensions: Device ppEnabledExtensionNames contains "
+                       "string that is too long or is badly formed");
+            return VK_ERROR_EXTENSION_NOT_PRESENT;
+        }
+
+        const char *extension_name = pCreateInfo->ppEnabledExtensionNames[i];
+        extension_prop = get_extension_property(extension_name, icd_exts);
+
+        if (extension_prop) {
+            continue;
+        }
+
+        // Not in global list, search activated layer extension lists
+        for (uint32_t j = 0; j < activated_device_layers->count; j++) {
+            layer_prop = &activated_device_layers->list[j];
+
+            extension_prop = get_dev_extension_property(extension_name, &layer_prop->device_extension_list);
+            if (extension_prop) {
+                // Found the extension in one of the layers enabled by the app.
+                break;
+            }
+        }
+
+        if (!extension_prop) {
+            // Didn't find extension name in any of the device layers, error out
+            loader_log(this_instance, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                       "loader_validate_device_extensions: Device extension %s not supported by selected physical device "
+                       "or enabled layers.",
+                       pCreateInfo->ppEnabledExtensionNames[i]);
+            return VK_ERROR_EXTENSION_NOT_PRESENT;
+        }
+    }
+    return VK_SUCCESS;
+}
+
+// Terminator functions for the Instance chain
+// All named terminator_<Vulakn API name>
+VKAPI_ATTR VkResult VKAPI_CALL terminator_CreateInstance(const VkInstanceCreateInfo *pCreateInfo,
+                                                         const VkAllocationCallbacks *pAllocator, VkInstance *pInstance) {
+    struct loader_icd_term *icd_term;
+    VkExtensionProperties *prop;
+    char **filtered_extension_names = NULL;
+    VkInstanceCreateInfo icd_create_info;
+    VkResult res = VK_SUCCESS;
+    bool one_icd_successful = false;
+
+    struct loader_instance *ptr_instance = (struct loader_instance *)*pInstance;
+    memcpy(&icd_create_info, pCreateInfo, sizeof(icd_create_info));
+
+    icd_create_info.enabledLayerCount = 0;
+    icd_create_info.ppEnabledLayerNames = NULL;
+
+    // NOTE: Need to filter the extensions to only those supported by the ICD.
+    //       No ICD will advertise support for layers. An ICD library could
+    //       support a layer, but it would be independent of the actual ICD,
+    //       just in the same library.
+    filtered_extension_names = loader_stack_alloc(pCreateInfo->enabledExtensionCount * sizeof(char *));
+    if (!filtered_extension_names) {
+        loader_log(ptr_instance, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                   "terminator_CreateInstance: Failed create extension name array for %d extensions",
+                   pCreateInfo->enabledExtensionCount);
+        res = VK_ERROR_OUT_OF_HOST_MEMORY;
+        goto out;
+    }
+    icd_create_info.ppEnabledExtensionNames = (const char *const *)filtered_extension_names;
+
+    for (uint32_t i = 0; i < ptr_instance->icd_tramp_list.count; i++) {
+        icd_term = loader_icd_add(ptr_instance, &ptr_instance->icd_tramp_list.scanned_list[i]);
+        if (NULL == icd_term) {
+            loader_log(ptr_instance, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                       "terminator_CreateInstance: Failed to add ICD %d to ICD trampoline list.", i);
+            res = VK_ERROR_OUT_OF_HOST_MEMORY;
+            goto out;
+        }
+
+        // If any error happens after here, we need to remove the ICD from the list,
+        // because we've already added it, but haven't validated it
+
+        // Make sure that we reset the pApplicationInfo so we don't get an old pointer
+        icd_create_info.pApplicationInfo = pCreateInfo->pApplicationInfo;
+        icd_create_info.enabledExtensionCount = 0;
+        struct loader_extension_list icd_exts;
+
+        loader_log(ptr_instance, VK_DEBUG_REPORT_DEBUG_BIT_EXT, 0, "Build ICD instance extension list");
+        // traverse scanned icd list adding non-duplicate extensions to the list
+        res = loader_init_generic_list(ptr_instance, (struct loader_generic_list *)&icd_exts, sizeof(VkExtensionProperties));
+        if (VK_ERROR_OUT_OF_HOST_MEMORY == res) {
+            // If out of memory, bail immediately.
+            goto out;
+        } else if (VK_SUCCESS != res) {
+            // Something bad happened with this ICD, so free it and try the
+            // next.
+            ptr_instance->icd_terms = icd_term->next;
+            icd_term->next = NULL;
+            loader_icd_destroy(ptr_instance, icd_term, pAllocator);
+            continue;
+        }
+
+        res = loader_add_instance_extensions(ptr_instance, icd_term->scanned_icd->EnumerateInstanceExtensionProperties,
+                                             icd_term->scanned_icd->lib_name, &icd_exts);
+        if (VK_SUCCESS != res) {
+            loader_destroy_generic_list(ptr_instance, (struct loader_generic_list *)&icd_exts);
+            if (VK_ERROR_OUT_OF_HOST_MEMORY == res) {
+                // If out of memory, bail immediately.
+                goto out;
+            } else {
+                // Something bad happened with this ICD, so free it and try the next.
+                ptr_instance->icd_terms = icd_term->next;
+                icd_term->next = NULL;
+                loader_icd_destroy(ptr_instance, icd_term, pAllocator);
+                continue;
+            }
+        }
+
+        for (uint32_t j = 0; j < pCreateInfo->enabledExtensionCount; j++) {
+            prop = get_extension_property(pCreateInfo->ppEnabledExtensionNames[j], &icd_exts);
+            if (prop) {
+                filtered_extension_names[icd_create_info.enabledExtensionCount] = (char *)pCreateInfo->ppEnabledExtensionNames[j];
+                icd_create_info.enabledExtensionCount++;
+            }
+        }
+
+        loader_destroy_generic_list(ptr_instance, (struct loader_generic_list *)&icd_exts);
+
+        // Get the driver version from vkEnumerateInstanceVersion
+        uint32_t icd_version = VK_API_VERSION_1_0;
+        VkResult icd_result = VK_SUCCESS;
+        if (icd_term->scanned_icd->api_version >= VK_API_VERSION_1_1) {
+            PFN_vkEnumerateInstanceVersion icd_enumerate_instance_version = (PFN_vkEnumerateInstanceVersion)
+                icd_term->scanned_icd->GetInstanceProcAddr(NULL, "vkEnumerateInstanceVersion");
+            if (icd_enumerate_instance_version != NULL) {
+                icd_result = icd_enumerate_instance_version(&icd_version);
+                if (icd_result != VK_SUCCESS) {
+                    icd_version = VK_API_VERSION_1_0;
+                    loader_log(ptr_instance, VK_DEBUG_REPORT_DEBUG_BIT_EXT, 0, "terminator_CreateInstance: ICD \"%s\" "
+                        "vkEnumerateInstanceVersion returned error. The ICD will be treated as a 1.0 ICD",
+                        icd_term->scanned_icd->lib_name);
+                }
+            }
+        }
+
+        // Create an instance, substituting the version to 1.0 if necessary
+        VkApplicationInfo icd_app_info;
+        uint32_t icd_version_nopatch = VK_MAKE_VERSION(VK_VERSION_MAJOR(icd_version), VK_VERSION_MINOR(icd_version), 0);
+        uint32_t requested_version = pCreateInfo == NULL || pCreateInfo->pApplicationInfo == NULL ? VK_API_VERSION_1_0 : pCreateInfo->pApplicationInfo->apiVersion;
+        if ((requested_version != 0) && (icd_version_nopatch == VK_API_VERSION_1_0)) {
+            if (icd_create_info.pApplicationInfo == NULL) {
+                memset(&icd_app_info, 0, sizeof(icd_app_info));
+            } else {
+                memcpy(&icd_app_info, icd_create_info.pApplicationInfo, sizeof(icd_app_info));
+            }
+            icd_app_info.apiVersion = icd_version;
+            icd_create_info.pApplicationInfo = &icd_app_info;
+        }
+        icd_result = ptr_instance->icd_tramp_list.scanned_list[i].CreateInstance(&icd_create_info, pAllocator, &(icd_term->instance));
+        if (VK_ERROR_OUT_OF_HOST_MEMORY == icd_result) {
+            // If out of memory, bail immediately.
+            res = VK_ERROR_OUT_OF_HOST_MEMORY;
+            goto out;
+        } else if (VK_SUCCESS != icd_result) {
+            loader_log(ptr_instance, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0,
+                       "terminator_CreateInstance: Failed to CreateInstance in "
+                       "ICD %d.  Skipping ICD.",
+                       i);
+            ptr_instance->icd_terms = icd_term->next;
+            icd_term->next = NULL;
+            loader_icd_destroy(ptr_instance, icd_term, pAllocator);
+            continue;
+        }
+
+        if (!loader_icd_init_entries(icd_term, icd_term->instance,
+                                     ptr_instance->icd_tramp_list.scanned_list[i].GetInstanceProcAddr)) {
+            loader_log(ptr_instance, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0,
+                       "terminator_CreateInstance: Failed to CreateInstance and find "
+                       "entrypoints with ICD.  Skipping ICD.");
+            ptr_instance->icd_terms = icd_term->next;
+            icd_term->next = NULL;
+            loader_icd_destroy(ptr_instance, icd_term, pAllocator);
+            continue;
+        }
+
+        // If we made it this far, at least one ICD was successful
+        one_icd_successful = true;
+    }
+
+    // If no ICDs were added to instance list and res is unchanged from it's initial value, the loader was unable to
+    // find a suitable ICD.
+    if (VK_SUCCESS == res && (ptr_instance->icd_terms == NULL || !one_icd_successful)) {
+        res = VK_ERROR_INCOMPATIBLE_DRIVER;
+    }
+
+out:
+
+    if (VK_SUCCESS != res) {
+        while (NULL != ptr_instance->icd_terms) {
+            icd_term = ptr_instance->icd_terms;
+            ptr_instance->icd_terms = icd_term->next;
+            if (NULL != icd_term->instance) {
+                icd_term->dispatch.DestroyInstance(icd_term->instance, pAllocator);
+            }
+            loader_icd_destroy(ptr_instance, icd_term, pAllocator);
+        }
+    }
+
+    return res;
+}
+
+VKAPI_ATTR void VKAPI_CALL terminator_DestroyInstance(VkInstance instance, const VkAllocationCallbacks *pAllocator) {
+    struct loader_instance *ptr_instance = loader_instance(instance);
+    if (NULL == ptr_instance) {
+        return;
+    }
+    struct loader_icd_term *icd_terms = ptr_instance->icd_terms;
+    struct loader_icd_term *next_icd_term;
+
+    // Remove this instance from the list of instances:
+    struct loader_instance *prev = NULL;
+    struct loader_instance *next = loader.instances;
+    while (next != NULL) {
+        if (next == ptr_instance) {
+            // Remove this instance from the list:
+            if (prev)
+                prev->next = next->next;
+            else
+                loader.instances = next->next;
+            break;
+        }
+        prev = next;
+        next = next->next;
+    }
+
+    while (NULL != icd_terms) {
+        if (icd_terms->instance) {
+            icd_terms->dispatch.DestroyInstance(icd_terms->instance, pAllocator);
+        }
+        next_icd_term = icd_terms->next;
+        icd_terms->instance = VK_NULL_HANDLE;
+        loader_icd_destroy(ptr_instance, icd_terms, pAllocator);
+
+        icd_terms = next_icd_term;
+    }
+
+    loaderDeleteLayerListAndProperties(ptr_instance, &ptr_instance->instance_layer_list);
+    loader_scanned_icd_clear(ptr_instance, &ptr_instance->icd_tramp_list);
+    loader_destroy_generic_list(ptr_instance, (struct loader_generic_list *)&ptr_instance->ext_list);
+    if (NULL != ptr_instance->phys_devs_term) {
+        for (uint32_t i = 0; i < ptr_instance->phys_dev_count_term; i++) {
+            loader_instance_heap_free(ptr_instance, ptr_instance->phys_devs_term[i]);
+        }
+        loader_instance_heap_free(ptr_instance, ptr_instance->phys_devs_term);
+    }
+    if (NULL != ptr_instance->phys_dev_groups_term) {
+        for (uint32_t i = 0; i < ptr_instance->phys_dev_group_count_term; i++) {
+            loader_instance_heap_free(ptr_instance, ptr_instance->phys_dev_groups_term[i]);
+        }
+        loader_instance_heap_free(ptr_instance, ptr_instance->phys_dev_groups_term);
+    }
+    loader_free_dev_ext_table(ptr_instance);
+    loader_free_phys_dev_ext_table(ptr_instance);
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL terminator_CreateDevice(VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo *pCreateInfo,
+                                                       const VkAllocationCallbacks *pAllocator, VkDevice *pDevice) {
+    VkResult res = VK_SUCCESS;
+    struct loader_physical_device_term *phys_dev_term;
+    phys_dev_term = (struct loader_physical_device_term *)physicalDevice;
+    struct loader_icd_term *icd_term = phys_dev_term->this_icd_term;
+
+    struct loader_device *dev = (struct loader_device *)*pDevice;
+    PFN_vkCreateDevice fpCreateDevice = icd_term->dispatch.CreateDevice;
+    struct loader_extension_list icd_exts;
+
+    VkBaseOutStructure *caller_dgci_container = NULL;
+    VkDeviceGroupDeviceCreateInfoKHR *caller_dgci = NULL;
+
+    dev->phys_dev_term = phys_dev_term;
+
+    icd_exts.list = NULL;
+
+    if (fpCreateDevice == NULL) {
+        loader_log(icd_term->this_instance, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                   "terminator_CreateDevice: No vkCreateDevice command exposed "
+                   "by ICD %s",
+                   icd_term->scanned_icd->lib_name);
+        res = VK_ERROR_INITIALIZATION_FAILED;
+        goto out;
+    }
+
+    VkDeviceCreateInfo localCreateInfo;
+    memcpy(&localCreateInfo, pCreateInfo, sizeof(localCreateInfo));
+
+    // NOTE: Need to filter the extensions to only those supported by the ICD.
+    //       No ICD will advertise support for layers. An ICD library could support a layer,
+    //       but it would be independent of the actual ICD, just in the same library.
+    char **filtered_extension_names = NULL;
+    if (0 < pCreateInfo->enabledExtensionCount) {
+        filtered_extension_names = loader_stack_alloc(pCreateInfo->enabledExtensionCount * sizeof(char *));
+        if (NULL == filtered_extension_names) {
+            loader_log(icd_term->this_instance, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                       "terminator_CreateDevice: Failed to create extension name "
+                       "storage for %d extensions",
+                       pCreateInfo->enabledExtensionCount);
+            return VK_ERROR_OUT_OF_HOST_MEMORY;
+        }
+    }
+
+    localCreateInfo.enabledLayerCount = 0;
+    localCreateInfo.ppEnabledLayerNames = NULL;
+
+    localCreateInfo.enabledExtensionCount = 0;
+    localCreateInfo.ppEnabledExtensionNames = (const char *const *)filtered_extension_names;
+
+    // Get the physical device (ICD) extensions
+    res = loader_init_generic_list(icd_term->this_instance, (struct loader_generic_list *)&icd_exts, sizeof(VkExtensionProperties));
+    if (VK_SUCCESS != res) {
+        goto out;
+    }
+
+    res = loader_add_device_extensions(icd_term->this_instance, icd_term->dispatch.EnumerateDeviceExtensionProperties,
+                                       phys_dev_term->phys_dev, icd_term->scanned_icd->lib_name, &icd_exts);
+    if (res != VK_SUCCESS) {
+        goto out;
+    }
+
+    for (uint32_t i = 0; i < pCreateInfo->enabledExtensionCount; i++) {
+        const char *extension_name = pCreateInfo->ppEnabledExtensionNames[i];
+        VkExtensionProperties *prop = get_extension_property(extension_name, &icd_exts);
+        if (prop) {
+            filtered_extension_names[localCreateInfo.enabledExtensionCount] = (char *)extension_name;
+            localCreateInfo.enabledExtensionCount++;
+        } else {
+            loader_log(icd_term->this_instance, VK_DEBUG_REPORT_DEBUG_BIT_EXT, 0,
+                       "vkCreateDevice extension %s not available for "
+                       "devices associated with ICD %s",
+                       extension_name, icd_term->scanned_icd->lib_name);
+        }
+    }
+
+    // Before we continue, If KHX_device_group is the list of enabled and viable extensions, then we then need to look for the
+    // corresponding VkDeviceGroupDeviceCreateInfo struct in the device list and replace all the physical device values (which
+    // are really loader physical device terminator values) with the ICD versions.
+    //if (icd_term->this_instance->enabled_known_extensions.khr_device_group_creation == 1) {
+    {
+        VkBaseOutStructure *pNext = (VkBaseOutStructure *)localCreateInfo.pNext;
+        VkBaseOutStructure *pPrev = (VkBaseOutStructure *)&localCreateInfo;
+        while (NULL != pNext) {
+            if (VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO == pNext->sType) {
+                VkDeviceGroupDeviceCreateInfo *cur_struct = (VkDeviceGroupDeviceCreateInfo *)pNext;
+                if (0 < cur_struct->physicalDeviceCount && NULL != cur_struct->pPhysicalDevices) {
+                    VkDeviceGroupDeviceCreateInfo *temp_struct = loader_stack_alloc(sizeof(VkDeviceGroupDeviceCreateInfo));
+                    VkPhysicalDevice *phys_dev_array = NULL;
+                    if (NULL == temp_struct) {
+                        return VK_ERROR_OUT_OF_HOST_MEMORY;
+                    }
+                    memcpy(temp_struct, cur_struct, sizeof(VkDeviceGroupDeviceCreateInfo));
+                    phys_dev_array = loader_stack_alloc(sizeof(VkPhysicalDevice) * cur_struct->physicalDeviceCount);
+                    if (NULL == phys_dev_array) {
+                        return VK_ERROR_OUT_OF_HOST_MEMORY;
+                    }
+
+                    // Before calling down, replace the incoming physical device values (which are really loader terminator
+                    // physical devices) with the ICDs physical device values.
+                    struct loader_physical_device_term *cur_term;
+                    for (uint32_t phys_dev = 0; phys_dev < cur_struct->physicalDeviceCount; phys_dev++) {
+                        cur_term = (struct loader_physical_device_term *)cur_struct->pPhysicalDevices[phys_dev];
+                        phys_dev_array[phys_dev] = cur_term->phys_dev;
+                    }
+                    temp_struct->pPhysicalDevices = phys_dev_array;
+
+                    // Keep track of pointers to restore pNext chain before returning
+                    caller_dgci_container = pPrev;
+                    caller_dgci = cur_struct;
+
+                    // Replace the old struct in the pNext chain with this one.
+                    pPrev->pNext = (VkBaseOutStructure *)temp_struct;
+                    pNext = (VkBaseOutStructure *)temp_struct;
+                }
+                break;
+            }
+
+            pPrev = pNext;
+            pNext = pNext->pNext;
+        }
+    }
+
+    // Handle loader emulation for structs that are not supported by the ICD:
+    // Presently, the emulation leaves the pNext chain alone. This means that the ICD will receive items in the chain which
+    // are not recognized by the ICD. If this causes the ICD to fail, then the items would have to be removed here. The current
+    // implementation does not remove them because copying the pNext chain would be impossible if the loader does not recognize
+    // the any of the struct types, as the loader would not know the size to allocate and copy.
+    //if (icd_term->dispatch.GetPhysicalDeviceFeatures2 == NULL && icd_term->dispatch.GetPhysicalDeviceFeatures2KHR == NULL) {
+    {
+        const void *pNext = localCreateInfo.pNext;
+        while (pNext != NULL) {
+            switch (*(VkStructureType *)pNext) {
+                case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2: {
+                    const VkPhysicalDeviceFeatures2KHR *features = pNext;
+
+                    if (icd_term->dispatch.GetPhysicalDeviceFeatures2 == NULL && icd_term->dispatch.GetPhysicalDeviceFeatures2KHR == NULL) {
+                        loader_log(icd_term->this_instance, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, 0,
+                            "vkCreateDevice: Emulating handling of VkPhysicalDeviceFeatures2 in pNext chain for ICD \"%s\"",
+                            icd_term->scanned_icd->lib_name);
+
+                        // Verify that VK_KHR_get_physical_device_properties2 is enabled
+                        if (icd_term->this_instance->enabled_known_extensions.khr_get_physical_device_properties2) {
+                            localCreateInfo.pEnabledFeatures = &features->features;
+                        }
+                    }
+
+                    // Leave this item in the pNext chain for now
+
+                    pNext = features->pNext;
+                    break;
+                }
+
+                case VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO: {
+                    const VkDeviceGroupDeviceCreateInfoKHR *group_info = pNext;
+
+                    if (icd_term->dispatch.EnumeratePhysicalDeviceGroups == NULL && icd_term->dispatch.EnumeratePhysicalDeviceGroupsKHR == NULL) {
+                        loader_log(
+                            icd_term->this_instance, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, 0,
+                            "vkCreateDevice: Emulating handling of VkPhysicalDeviceGroupProperties in pNext chain for ICD \"%s\"",
+                            icd_term->scanned_icd->lib_name);
+
+                        // The group must contain only this one device, since physical device groups aren't actually supported
+                        if (group_info->physicalDeviceCount != 1) {
+                            loader_log(icd_term->this_instance, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                                   "vkCreateDevice: Emulation failed to create device from device group info");
+                            res = VK_ERROR_INITIALIZATION_FAILED;
+                            goto out;
+                        }
+                    }
+
+                    // Nothing needs to be done here because we're leaving the item in the pNext chain and because the spec states
+                    // that the physicalDevice argument must be included in the device group, and we've already checked that it is
+
+                    pNext = group_info->pNext;
+                    break;
+                }
+
+                // Multiview properties are also allowed, but since VK_KHX_multiview is a device extension, we'll just let the ICD
+                // handle that error when the user enables the extension here
+                default: {
+                    const VkBaseInStructure *header = pNext;
+                    pNext = header->pNext;
+                    break;
+                }
+            }
+        }
+    }
+
+    // Every extension that has a loader-defined terminator needs to be marked as enabled or disabled so that we know whether or
+    // not to return that terminator when vkGetDeviceProcAddr is called
+    for (uint32_t i = 0; i < localCreateInfo.enabledExtensionCount; ++i) {
+        if (!strcmp(localCreateInfo.ppEnabledExtensionNames[i], VK_KHR_SWAPCHAIN_EXTENSION_NAME)) {
+            dev->extensions.khr_swapchain_enabled = true;
+        } else if (!strcmp(localCreateInfo.ppEnabledExtensionNames[i], VK_KHR_DISPLAY_SWAPCHAIN_EXTENSION_NAME)) {
+            dev->extensions.khr_display_swapchain_enabled = true;
+        } else if (!strcmp(localCreateInfo.ppEnabledExtensionNames[i], VK_KHR_DEVICE_GROUP_EXTENSION_NAME)) {
+            dev->extensions.khr_device_group_enabled = true;
+        } else if (!strcmp(localCreateInfo.ppEnabledExtensionNames[i], VK_EXT_DEBUG_MARKER_EXTENSION_NAME)) {
+            dev->extensions.ext_debug_marker_enabled = true;
+        } else if (!strcmp(localCreateInfo.ppEnabledExtensionNames[i], "VK_EXT_full_screen_exclusive")) {
+            dev->extensions.ext_full_screen_exclusive_enabled = true;
+        }
+    }
+    dev->extensions.ext_debug_utils_enabled = icd_term->this_instance->enabled_known_extensions.ext_debug_utils;
+
+    if (!dev->extensions.khr_device_group_enabled) {
+        VkPhysicalDeviceProperties properties;
+        icd_term->dispatch.GetPhysicalDeviceProperties(phys_dev_term->phys_dev, &properties);
+        if (properties.apiVersion >= VK_MAKE_VERSION(1, 1, 0)) {
+            dev->extensions.khr_device_group_enabled = true;
+        }
+    }
+
+    res = fpCreateDevice(phys_dev_term->phys_dev, &localCreateInfo, pAllocator, &dev->icd_device);
+    if (res != VK_SUCCESS) {
+        loader_log(icd_term->this_instance, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                   "terminator_CreateDevice: Failed in ICD %s vkCreateDevice"
+                   "call",
+                   icd_term->scanned_icd->lib_name);
+        goto out;
+    }
+
+    *pDevice = dev->icd_device;
+    loader_add_logical_device(icd_term->this_instance, icd_term, dev);
+
+    // Init dispatch pointer in new device object
+    loader_init_dispatch(*pDevice, &dev->loader_dispatch);
+
+out:
+    if (NULL != icd_exts.list) {
+        loader_destroy_generic_list(icd_term->this_instance, (struct loader_generic_list *)&icd_exts);
+    }
+
+    // Restore pNext pointer to old VkDeviceGroupDeviceCreateInfoKHX
+    // in the chain to maintain consistency for the caller.
+    if (caller_dgci_container != NULL) {
+        caller_dgci_container->pNext = (VkBaseOutStructure *)caller_dgci;
+    }
+
+    return res;
+}
+
+VkResult setupLoaderTrampPhysDevs(VkInstance instance) {
+    VkResult res = VK_SUCCESS;
+    VkPhysicalDevice *local_phys_devs = NULL;
+    struct loader_instance *inst;
+    uint32_t total_count = 0;
+    struct loader_physical_device_tramp **new_phys_devs = NULL;
+
+    inst = loader_get_instance(instance);
+    if (NULL == inst) {
+        res = VK_ERROR_INITIALIZATION_FAILED;
+        goto out;
+    }
+
+    // Query how many GPUs there
+    res = inst->disp->layer_inst_disp.EnumeratePhysicalDevices(instance, &total_count, NULL);
+    if (res != VK_SUCCESS) {
+        loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                   "setupLoaderTrampPhysDevs:  Failed during dispatch call "
+                   "of \'vkEnumeratePhysicalDevices\' to lower layers or "
+                   "loader to get count.");
+        goto out;
+    }
+
+    // Really use what the total GPU count is since Optimus and other layers may mess
+    // the count up.
+    total_count = inst->total_gpu_count;
+
+    // Create an array for the new physical devices, which will be stored
+    // in the instance for the trampoline code.
+    new_phys_devs = (struct loader_physical_device_tramp **)loader_instance_heap_alloc(
+        inst, total_count * sizeof(struct loader_physical_device_tramp *), VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE);
+    if (NULL == new_phys_devs) {
+        loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                   "setupLoaderTrampPhysDevs:  Failed to allocate new physical device"
+                   " array of size %d",
+                   total_count);
+        res = VK_ERROR_OUT_OF_HOST_MEMORY;
+        goto out;
+    }
+    memset(new_phys_devs, 0, total_count * sizeof(struct loader_physical_device_tramp *));
+
+    // Create a temporary array (on the stack) to keep track of the
+    // returned VkPhysicalDevice values.
+    local_phys_devs = loader_stack_alloc(sizeof(VkPhysicalDevice) * total_count);
+    if (NULL == local_phys_devs) {
+        loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                   "setupLoaderTrampPhysDevs:  Failed to allocate local "
+                   "physical device array of size %d",
+                   total_count);
+        res = VK_ERROR_OUT_OF_HOST_MEMORY;
+        goto out;
+    }
+    memset(local_phys_devs, 0, sizeof(VkPhysicalDevice) * total_count);
+
+    res = inst->disp->layer_inst_disp.EnumeratePhysicalDevices(instance, &total_count, local_phys_devs);
+    if (VK_SUCCESS != res) {
+        loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                   "setupLoaderTrampPhysDevs:  Failed during dispatch call "
+                   "of \'vkEnumeratePhysicalDevices\' to lower layers or "
+                   "loader to get content.");
+        goto out;
+    }
+
+    // Copy or create everything to fill the new array of physical devices
+    for (uint32_t new_idx = 0; new_idx < total_count; new_idx++) {
+        // Check if this physical device is already in the old buffer
+        for (uint32_t old_idx = 0; old_idx < inst->phys_dev_count_tramp; old_idx++) {
+            if (local_phys_devs[new_idx] == inst->phys_devs_tramp[old_idx]->phys_dev) {
+                new_phys_devs[new_idx] = inst->phys_devs_tramp[old_idx];
+                break;
+            }
+        }
+
+        // If this physical device isn't in the old buffer, create it
+        if (NULL == new_phys_devs[new_idx]) {
+            new_phys_devs[new_idx] = (struct loader_physical_device_tramp *)loader_instance_heap_alloc(
+                inst, sizeof(struct loader_physical_device_tramp), VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE);
+            if (NULL == new_phys_devs[new_idx]) {
+                loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                           "setupLoaderTrampPhysDevs:  Failed to allocate "
+                           "physical device trampoline object %d",
+                           new_idx);
+                total_count = new_idx;
+                res = VK_ERROR_OUT_OF_HOST_MEMORY;
+                goto out;
+            }
+
+            // Initialize the new physicalDevice object
+            loader_set_dispatch((void *)new_phys_devs[new_idx], inst->disp);
+            new_phys_devs[new_idx]->this_instance = inst;
+            new_phys_devs[new_idx]->phys_dev = local_phys_devs[new_idx];
+        }
+    }
+
+out:
+
+    if (VK_SUCCESS != res) {
+        if (NULL != new_phys_devs) {
+            for (uint32_t i = 0; i < total_count; i++) {
+                loader_instance_heap_free(inst, new_phys_devs[i]);
+            }
+            loader_instance_heap_free(inst, new_phys_devs);
+        }
+        total_count = 0;
+    } else {
+        // Free everything that didn't carry over to the new array of
+        // physical devices
+        if (NULL != inst->phys_devs_tramp) {
+            for (uint32_t i = 0; i < inst->phys_dev_count_tramp; i++) {
+                bool found = false;
+                for (uint32_t j = 0; j < total_count; j++) {
+                    if (inst->phys_devs_tramp[i] == new_phys_devs[j]) {
+                        found = true;
+                        break;
+                    }
+                }
+                if (!found) {
+                    loader_instance_heap_free(inst, inst->phys_devs_tramp[i]);
+                }
+            }
+            loader_instance_heap_free(inst, inst->phys_devs_tramp);
+        }
+
+        // Swap in the new physical device list
+        inst->phys_dev_count_tramp = total_count;
+        inst->phys_devs_tramp = new_phys_devs;
+    }
+
+    return res;
+}
+
+VkResult setupLoaderTermPhysDevs(struct loader_instance *inst) {
+    VkResult res = VK_SUCCESS;
+    struct loader_icd_term *icd_term;
+    struct loader_phys_dev_per_icd *icd_phys_dev_array = NULL;
+    struct loader_physical_device_term **new_phys_devs = NULL;
+
+    inst->total_gpu_count = 0;
+
+    // Allocate something to store the physical device characteristics
+    // that we read from each ICD.
+    icd_phys_dev_array =
+        (struct loader_phys_dev_per_icd *)loader_stack_alloc(sizeof(struct loader_phys_dev_per_icd) * inst->total_icd_count);
+    if (NULL == icd_phys_dev_array) {
+        loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                   "setupLoaderTermPhysDevs:  Failed to allocate temporary "
+                   "ICD Physical device info array of size %d",
+                   inst->total_gpu_count);
+        res = VK_ERROR_OUT_OF_HOST_MEMORY;
+        goto out;
+    }
+    memset(icd_phys_dev_array, 0, sizeof(struct loader_phys_dev_per_icd) * inst->total_icd_count);
+    icd_term = inst->icd_terms;
+
+    // For each ICD, query the number of physical devices, and then get an
+    // internal value for those physical devices.
+    for (uint32_t icd_idx = 0; NULL != icd_term; icd_term = icd_term->next, icd_idx++) {
+        res = icd_term->dispatch.EnumeratePhysicalDevices(icd_term->instance, &icd_phys_dev_array[icd_idx].count, NULL);
+        if (VK_SUCCESS != res) {
+            loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                       "setupLoaderTermPhysDevs:  Call to "
+                       "ICD %d's \'vkEnumeratePhysicalDevices\' failed with"
+                       " error 0x%08x",
+                       icd_idx, res);
+            goto out;
+        }
+
+        icd_phys_dev_array[icd_idx].phys_devs =
+            (VkPhysicalDevice *)loader_stack_alloc(icd_phys_dev_array[icd_idx].count * sizeof(VkPhysicalDevice));
+        if (NULL == icd_phys_dev_array[icd_idx].phys_devs) {
+            loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                       "setupLoaderTermPhysDevs:  Failed to allocate temporary "
+                       "ICD Physical device array for ICD %d of size %d",
+                       icd_idx, inst->total_gpu_count);
+            res = VK_ERROR_OUT_OF_HOST_MEMORY;
+            goto out;
+        }
+
+        res = icd_term->dispatch.EnumeratePhysicalDevices(icd_term->instance, &(icd_phys_dev_array[icd_idx].count),
+                                                          icd_phys_dev_array[icd_idx].phys_devs);
+        if (VK_SUCCESS != res) {
+            goto out;
+        }
+        inst->total_gpu_count += icd_phys_dev_array[icd_idx].count;
+        icd_phys_dev_array[icd_idx].this_icd_term = icd_term;
+    }
+
+    if (0 == inst->total_gpu_count) {
+        loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                   "setupLoaderTermPhysDevs:  Failed to detect any valid"
+                   " GPUs in the current config");
+        res = VK_ERROR_INITIALIZATION_FAILED;
+        goto out;
+    }
+
+    new_phys_devs = loader_instance_heap_alloc(inst, sizeof(struct loader_physical_device_term *) * inst->total_gpu_count,
+                                               VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE);
+    if (NULL == new_phys_devs) {
+        loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                   "setupLoaderTermPhysDevs:  Failed to allocate new physical"
+                   " device array of size %d",
+                   inst->total_gpu_count);
+        res = VK_ERROR_OUT_OF_HOST_MEMORY;
+        goto out;
+    }
+    memset(new_phys_devs, 0, sizeof(struct loader_physical_device_term *) * inst->total_gpu_count);
+
+    // Copy or create everything to fill the new array of physical devices
+    uint32_t idx = 0;
+    for (uint32_t icd_idx = 0; icd_idx < inst->total_icd_count; icd_idx++) {
+        for (uint32_t pd_idx = 0; pd_idx < icd_phys_dev_array[icd_idx].count; pd_idx++) {
+            // Check if this physical device is already in the old buffer
+            if (NULL != inst->phys_devs_term) {
+                for (uint32_t old_idx = 0; old_idx < inst->phys_dev_count_term; old_idx++) {
+                    if (icd_phys_dev_array[icd_idx].phys_devs[pd_idx] == inst->phys_devs_term[old_idx]->phys_dev) {
+                        new_phys_devs[idx] = inst->phys_devs_term[old_idx];
+                        break;
+                    }
+                }
+            }
+            // If this physical device isn't in the old buffer, then we
+            // need to create it.
+            if (NULL == new_phys_devs[idx]) {
+                new_phys_devs[idx] = loader_instance_heap_alloc(inst, sizeof(struct loader_physical_device_term),
+                                                                VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE);
+                if (NULL == new_phys_devs[idx]) {
+                    loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                               "setupLoaderTermPhysDevs:  Failed to allocate "
+                               "physical device terminator object %d",
+                               idx);
+                    inst->total_gpu_count = idx;
+                    res = VK_ERROR_OUT_OF_HOST_MEMORY;
+                    goto out;
+                }
+
+                loader_set_dispatch((void *)new_phys_devs[idx], inst->disp);
+                new_phys_devs[idx]->this_icd_term = icd_phys_dev_array[icd_idx].this_icd_term;
+                new_phys_devs[idx]->icd_index = (uint8_t)(icd_idx);
+                new_phys_devs[idx]->phys_dev = icd_phys_dev_array[icd_idx].phys_devs[pd_idx];
+            }
+            idx++;
+        }
+    }
+
+out:
+
+    if (VK_SUCCESS != res) {
+        if (NULL != new_phys_devs) {
+            // We've encountered an error, so we should free the new buffers.
+            for (uint32_t i = 0; i < inst->total_gpu_count; i++) {
+                loader_instance_heap_free(inst, new_phys_devs[i]);
+            }
+            loader_instance_heap_free(inst, new_phys_devs);
+        }
+        inst->total_gpu_count = 0;
+    } else {
+        // Free everything that didn't carry over to the new array of
+        // physical devices.  Everything else will have been copied over
+        // to the new array.
+        if (NULL != inst->phys_devs_term) {
+            for (uint32_t cur_pd = 0; cur_pd < inst->phys_dev_count_term; cur_pd++) {
+                bool found = false;
+                for (uint32_t new_pd_idx = 0; new_pd_idx < inst->total_gpu_count; new_pd_idx++) {
+                    if (inst->phys_devs_term[cur_pd] == new_phys_devs[new_pd_idx]) {
+                        found = true;
+                        break;
+                    }
+                }
+                if (!found) {
+                    loader_instance_heap_free(inst, inst->phys_devs_term[cur_pd]);
+                }
+            }
+            loader_instance_heap_free(inst, inst->phys_devs_term);
+        }
+
+        // Swap out old and new devices list
+        inst->phys_dev_count_term = inst->total_gpu_count;
+        inst->phys_devs_term = new_phys_devs;
+    }
+
+    return res;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL terminator_EnumeratePhysicalDevices(VkInstance instance, uint32_t *pPhysicalDeviceCount,
+                                                                   VkPhysicalDevice *pPhysicalDevices) {
+    struct loader_instance *inst = (struct loader_instance *)instance;
+    VkResult res = VK_SUCCESS;
+
+    // Always call the setup loader terminator physical devices because they may
+    // have changed at any point.
+    res = setupLoaderTermPhysDevs(inst);
+    if (VK_SUCCESS != res) {
+        goto out;
+    }
+
+    uint32_t copy_count = inst->total_gpu_count;
+    if (NULL != pPhysicalDevices) {
+        if (copy_count > *pPhysicalDeviceCount) {
+            copy_count = *pPhysicalDeviceCount;
+            res = VK_INCOMPLETE;
+        }
+
+        for (uint32_t i = 0; i < copy_count; i++) {
+            pPhysicalDevices[i] = (VkPhysicalDevice)inst->phys_devs_term[i];
+        }
+    }
+
+    *pPhysicalDeviceCount = copy_count;
+
+out:
+
+    return res;
+}
+
+VKAPI_ATTR void VKAPI_CALL terminator_GetPhysicalDeviceProperties(VkPhysicalDevice physicalDevice,
+                                                                  VkPhysicalDeviceProperties *pProperties) {
+    struct loader_physical_device_term *phys_dev_term = (struct loader_physical_device_term *)physicalDevice;
+    struct loader_icd_term *icd_term = phys_dev_term->this_icd_term;
+    if (NULL != icd_term->dispatch.GetPhysicalDeviceProperties) {
+        icd_term->dispatch.GetPhysicalDeviceProperties(phys_dev_term->phys_dev, pProperties);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL terminator_GetPhysicalDeviceQueueFamilyProperties(VkPhysicalDevice physicalDevice,
+                                                                             uint32_t *pQueueFamilyPropertyCount,
+                                                                             VkQueueFamilyProperties *pProperties) {
+    struct loader_physical_device_term *phys_dev_term = (struct loader_physical_device_term *)physicalDevice;
+    struct loader_icd_term *icd_term = phys_dev_term->this_icd_term;
+    if (NULL != icd_term->dispatch.GetPhysicalDeviceQueueFamilyProperties) {
+        icd_term->dispatch.GetPhysicalDeviceQueueFamilyProperties(phys_dev_term->phys_dev, pQueueFamilyPropertyCount, pProperties);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL terminator_GetPhysicalDeviceMemoryProperties(VkPhysicalDevice physicalDevice,
+                                                                        VkPhysicalDeviceMemoryProperties *pProperties) {
+    struct loader_physical_device_term *phys_dev_term = (struct loader_physical_device_term *)physicalDevice;
+    struct loader_icd_term *icd_term = phys_dev_term->this_icd_term;
+    if (NULL != icd_term->dispatch.GetPhysicalDeviceMemoryProperties) {
+        icd_term->dispatch.GetPhysicalDeviceMemoryProperties(phys_dev_term->phys_dev, pProperties);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL terminator_GetPhysicalDeviceFeatures(VkPhysicalDevice physicalDevice,
+                                                                VkPhysicalDeviceFeatures *pFeatures) {
+    struct loader_physical_device_term *phys_dev_term = (struct loader_physical_device_term *)physicalDevice;
+    struct loader_icd_term *icd_term = phys_dev_term->this_icd_term;
+    if (NULL != icd_term->dispatch.GetPhysicalDeviceFeatures) {
+        icd_term->dispatch.GetPhysicalDeviceFeatures(phys_dev_term->phys_dev, pFeatures);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL terminator_GetPhysicalDeviceFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format,
+                                                                        VkFormatProperties *pFormatInfo) {
+    struct loader_physical_device_term *phys_dev_term = (struct loader_physical_device_term *)physicalDevice;
+    struct loader_icd_term *icd_term = phys_dev_term->this_icd_term;
+    if (NULL != icd_term->dispatch.GetPhysicalDeviceFormatProperties) {
+        icd_term->dispatch.GetPhysicalDeviceFormatProperties(phys_dev_term->phys_dev, format, pFormatInfo);
+    }
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL terminator_GetPhysicalDeviceImageFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format,
+                                                                                 VkImageType type, VkImageTiling tiling,
+                                                                                 VkImageUsageFlags usage, VkImageCreateFlags flags,
+                                                                                 VkImageFormatProperties *pImageFormatProperties) {
+    struct loader_physical_device_term *phys_dev_term = (struct loader_physical_device_term *)physicalDevice;
+    struct loader_icd_term *icd_term = phys_dev_term->this_icd_term;
+    if (NULL == icd_term->dispatch.GetPhysicalDeviceImageFormatProperties) {
+        loader_log(icd_term->this_instance, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                   "Encountered the vkEnumerateDeviceLayerProperties "
+                   "terminator.  This means a layer improperly continued.");
+        return VK_ERROR_INITIALIZATION_FAILED;
+    }
+    return icd_term->dispatch.GetPhysicalDeviceImageFormatProperties(phys_dev_term->phys_dev, format, type, tiling, usage, flags,
+                                                                     pImageFormatProperties);
+}
+
+VKAPI_ATTR void VKAPI_CALL terminator_GetPhysicalDeviceSparseImageFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format,
+                                                                                   VkImageType type, VkSampleCountFlagBits samples,
+                                                                                   VkImageUsageFlags usage, VkImageTiling tiling,
+                                                                                   uint32_t *pNumProperties,
+                                                                                   VkSparseImageFormatProperties *pProperties) {
+    struct loader_physical_device_term *phys_dev_term = (struct loader_physical_device_term *)physicalDevice;
+    struct loader_icd_term *icd_term = phys_dev_term->this_icd_term;
+    if (NULL != icd_term->dispatch.GetPhysicalDeviceSparseImageFormatProperties) {
+        icd_term->dispatch.GetPhysicalDeviceSparseImageFormatProperties(phys_dev_term->phys_dev, format, type, samples, usage,
+                                                                        tiling, pNumProperties, pProperties);
+    }
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL terminator_EnumerateDeviceExtensionProperties(VkPhysicalDevice physicalDevice,
+                                                                             const char *pLayerName, uint32_t *pPropertyCount,
+                                                                             VkExtensionProperties *pProperties) {
+    struct loader_physical_device_term *phys_dev_term;
+
+    struct loader_layer_list implicit_layer_list = {0};
+    struct loader_extension_list all_exts = {0};
+    struct loader_extension_list icd_exts = {0};
+
+    // Any layer or trampoline wrapping should be removed at this point in time can just cast to the expected
+    // type for VkPhysicalDevice.
+    phys_dev_term = (struct loader_physical_device_term *)physicalDevice;
+
+    // if we got here with a non-empty pLayerName, look up the extensions
+    // from the json
+    if (pLayerName != NULL && strlen(pLayerName) > 0) {
+        uint32_t count;
+        uint32_t copy_size;
+        const struct loader_instance *inst = phys_dev_term->this_icd_term->this_instance;
+        struct loader_device_extension_list *dev_ext_list = NULL;
+        struct loader_device_extension_list local_ext_list;
+        memset(&local_ext_list, 0, sizeof(local_ext_list));
+        if (vk_string_validate(MaxLoaderStringLength, pLayerName) == VK_STRING_ERROR_NONE) {
+            for (uint32_t i = 0; i < inst->instance_layer_list.count; i++) {
+                struct loader_layer_properties *props = &inst->instance_layer_list.list[i];
+                if (strcmp(props->info.layerName, pLayerName) == 0) {
+                    dev_ext_list = &props->device_extension_list;
+                }
+            }
+
+            count = (dev_ext_list == NULL) ? 0 : dev_ext_list->count;
+            if (pProperties == NULL) {
+                *pPropertyCount = count;
+                loader_destroy_generic_list(inst, (struct loader_generic_list *)&local_ext_list);
+                loader_platform_thread_unlock_mutex(&loader_lock);
+                return VK_SUCCESS;
+            }
+
+            copy_size = *pPropertyCount < count ? *pPropertyCount : count;
+            for (uint32_t i = 0; i < copy_size; i++) {
+                memcpy(&pProperties[i], &dev_ext_list->list[i].props, sizeof(VkExtensionProperties));
+            }
+            *pPropertyCount = copy_size;
+
+            loader_destroy_generic_list(inst, (struct loader_generic_list *)&local_ext_list);
+            if (copy_size < count) {
+                loader_platform_thread_unlock_mutex(&loader_lock);
+                return VK_INCOMPLETE;
+            }
+        } else {
+            loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                       "vkEnumerateDeviceExtensionProperties:  pLayerName "
+                       "is too long or is badly formed");
+            loader_platform_thread_unlock_mutex(&loader_lock);
+            return VK_ERROR_EXTENSION_NOT_PRESENT;
+        }
+
+        return VK_SUCCESS;
+    }
+
+    // This case is during the call down the instance chain with pLayerName == NULL
+    struct loader_icd_term *icd_term = phys_dev_term->this_icd_term;
+    uint32_t icd_ext_count = *pPropertyCount;
+    VkResult res;
+
+    // Get the available device extensions
+    res = icd_term->dispatch.EnumerateDeviceExtensionProperties(phys_dev_term->phys_dev, NULL, &icd_ext_count, pProperties);
+    if (res != VK_SUCCESS) {
+        goto out;
+    }
+
+    if (!loaderInitLayerList(icd_term->this_instance, &implicit_layer_list)) {
+        res = VK_ERROR_OUT_OF_HOST_MEMORY;
+        goto out;
+    }
+
+    loaderAddImplicitLayers(icd_term->this_instance, &implicit_layer_list, NULL, &icd_term->this_instance->instance_layer_list);
+    // We need to determine which implicit layers are active, and then add their extensions. This can't be cached as
+    // it depends on results of environment variables (which can change).
+    if (pProperties != NULL) {
+        // Initialize dev_extension list within the physicalDevice object
+        res = loader_init_device_extensions(icd_term->this_instance, phys_dev_term, icd_ext_count, pProperties, &icd_exts);
+        if (res != VK_SUCCESS) {
+            goto out;
+        }
+
+        // We need to determine which implicit layers are active, and then add their extensions. This can't be cached as
+        // it depends on results of environment variables (which can change).
+        res = loader_add_to_ext_list(icd_term->this_instance, &all_exts, icd_exts.count, icd_exts.list);
+        if (res != VK_SUCCESS) {
+            goto out;
+        }
+
+        loaderAddImplicitLayers(icd_term->this_instance, &implicit_layer_list, NULL, &icd_term->this_instance->instance_layer_list);
+
+        for (uint32_t i = 0; i < implicit_layer_list.count; i++) {
+            for (uint32_t j = 0; j < implicit_layer_list.list[i].device_extension_list.count; j++) {
+                res = loader_add_to_ext_list(icd_term->this_instance, &all_exts, 1,
+                                             &implicit_layer_list.list[i].device_extension_list.list[j].props);
+                if (res != VK_SUCCESS) {
+                    goto out;
+                }
+            }
+        }
+        uint32_t capacity = *pPropertyCount;
+        VkExtensionProperties *props = pProperties;
+
+        for (uint32_t i = 0; i < all_exts.count && i < capacity; i++) {
+            props[i] = all_exts.list[i];
+        }
+
+        // Wasn't enough space for the extensions, we did partial copy now return VK_INCOMPLETE
+        if (capacity < all_exts.count) {
+            res = VK_INCOMPLETE;
+        } else {
+            *pPropertyCount = all_exts.count;
+        }
+    } else {
+        // Just return the count; need to add in the count of implicit layer extensions
+        // don't worry about duplicates being added in the count
+        *pPropertyCount = icd_ext_count;
+
+        for (uint32_t i = 0; i < implicit_layer_list.count; i++) {
+            *pPropertyCount += implicit_layer_list.list[i].device_extension_list.count;
+        }
+        res = VK_SUCCESS;
+    }
+
+out:
+
+    if (NULL != implicit_layer_list.list) {
+        loader_destroy_generic_list(icd_term->this_instance, (struct loader_generic_list *)&implicit_layer_list);
+    }
+    if (NULL != all_exts.list) {
+        loader_destroy_generic_list(icd_term->this_instance, (struct loader_generic_list *)&all_exts);
+    }
+    if (NULL != icd_exts.list) {
+        loader_destroy_generic_list(icd_term->this_instance, (struct loader_generic_list *)&icd_exts);
+    }
+
+    return res;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL terminator_EnumerateDeviceLayerProperties(VkPhysicalDevice physicalDevice, uint32_t *pPropertyCount,
+                                                                         VkLayerProperties *pProperties) {
+    struct loader_physical_device_term *phys_dev_term = (struct loader_physical_device_term *)physicalDevice;
+    struct loader_icd_term *icd_term = phys_dev_term->this_icd_term;
+    loader_log(icd_term->this_instance, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+               "Encountered the vkEnumerateDeviceLayerProperties "
+               "terminator.  This means a layer improperly continued.");
+    // Should never get here this call isn't dispatched down the chain
+    return VK_ERROR_INITIALIZATION_FAILED;
+}
+
+VkStringErrorFlags vk_string_validate(const int max_length, const char *utf8) {
+    VkStringErrorFlags result = VK_STRING_ERROR_NONE;
+    int num_char_bytes = 0;
+    int i, j;
+
+    for (i = 0; i <= max_length; i++) {
+        if (utf8[i] == 0) {
+            break;
+        } else if (i == max_length) {
+            result |= VK_STRING_ERROR_LENGTH;
+            break;
+        } else if ((utf8[i] >= 0x20) && (utf8[i] < 0x7f)) {
+            num_char_bytes = 0;
+        } else if ((utf8[i] & UTF8_ONE_BYTE_MASK) == UTF8_ONE_BYTE_CODE) {
+            num_char_bytes = 1;
+        } else if ((utf8[i] & UTF8_TWO_BYTE_MASK) == UTF8_TWO_BYTE_CODE) {
+            num_char_bytes = 2;
+        } else if ((utf8[i] & UTF8_THREE_BYTE_MASK) == UTF8_THREE_BYTE_CODE) {
+            num_char_bytes = 3;
+        } else {
+            result = VK_STRING_ERROR_BAD_DATA;
+        }
+
+        // Validate the following num_char_bytes of data
+        for (j = 0; (j < num_char_bytes) && (i < max_length); j++) {
+            if (++i == max_length) {
+                result |= VK_STRING_ERROR_LENGTH;
+                break;
+            }
+            if ((utf8[i] & UTF8_DATA_BYTE_MASK) != UTF8_DATA_BYTE_CODE) {
+                result |= VK_STRING_ERROR_BAD_DATA;
+            }
+        }
+    }
+    return result;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL
+terminator_EnumerateInstanceVersion(const VkEnumerateInstanceVersionChain *chain, uint32_t* pApiVersion) {
+    // NOTE: The Vulkan WG doesn't want us checking pApiVersion for NULL, but instead
+    // prefers us crashing.
+    *pApiVersion = VK_MAKE_VERSION(loader_major_version, loader_minor_version, VK_HEADER_VERSION);
+    return VK_SUCCESS;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL
+terminator_EnumerateInstanceExtensionProperties(const VkEnumerateInstanceExtensionPropertiesChain *chain, const char *pLayerName,
+                                                uint32_t *pPropertyCount, VkExtensionProperties *pProperties) {
+    struct loader_extension_list *global_ext_list = NULL;
+    struct loader_layer_list instance_layers;
+    struct loader_extension_list local_ext_list;
+    struct loader_icd_tramp_list icd_tramp_list;
+    uint32_t copy_size;
+    VkResult res = VK_SUCCESS;
+
+    // tls_instance = NULL;
+    memset(&local_ext_list, 0, sizeof(local_ext_list));
+    memset(&instance_layers, 0, sizeof(instance_layers));
+
+    // Get layer libraries if needed
+    if (pLayerName && strlen(pLayerName) != 0) {
+        if (vk_string_validate(MaxLoaderStringLength, pLayerName) != VK_STRING_ERROR_NONE) {
+            assert(VK_FALSE &&
+                   "vkEnumerateInstanceExtensionProperties:  "
+                   "pLayerName is too long or is badly formed");
+            res = VK_ERROR_EXTENSION_NOT_PRESENT;
+            goto out;
+        }
+
+        loaderScanForLayers(NULL, &instance_layers);
+        for (uint32_t i = 0; i < instance_layers.count; i++) {
+            struct loader_layer_properties *props = &instance_layers.list[i];
+            if (strcmp(props->info.layerName, pLayerName) == 0) {
+                global_ext_list = &props->instance_extension_list;
+                break;
+            }
+        }
+    } else {
+        // Scan/discover all ICD libraries
+        memset(&icd_tramp_list, 0, sizeof(icd_tramp_list));
+        res = loader_icd_scan(NULL, &icd_tramp_list);
+        if (VK_SUCCESS != res) {
+            goto out;
+        }
+        // Get extensions from all ICD's, merge so no duplicates
+        res = loader_get_icd_loader_instance_extensions(NULL, &icd_tramp_list, &local_ext_list);
+        if (VK_SUCCESS != res) {
+            goto out;
+        }
+        loader_scanned_icd_clear(NULL, &icd_tramp_list);
+
+        // Append enabled implicit layers.
+        loaderScanForImplicitLayers(NULL, &instance_layers);
+        for (uint32_t i = 0; i < instance_layers.count; i++) {
+            if (!loaderImplicitLayerIsEnabled(NULL, &instance_layers.list[i])) {
+                continue;
+            }
+            struct loader_extension_list *ext_list = &instance_layers.list[i].instance_extension_list;
+            loader_add_to_ext_list(NULL, &local_ext_list, ext_list->count, ext_list->list);
+        }
+
+        global_ext_list = &local_ext_list;
+    }
+
+    if (global_ext_list == NULL) {
+        res = VK_ERROR_LAYER_NOT_PRESENT;
+        goto out;
+    }
+
+    if (pProperties == NULL) {
+        *pPropertyCount = global_ext_list->count;
+        goto out;
+    }
+
+    copy_size = *pPropertyCount < global_ext_list->count ? *pPropertyCount : global_ext_list->count;
+    for (uint32_t i = 0; i < copy_size; i++) {
+        memcpy(&pProperties[i], &global_ext_list->list[i], sizeof(VkExtensionProperties));
+    }
+    *pPropertyCount = copy_size;
+
+    if (copy_size < global_ext_list->count) {
+        res = VK_INCOMPLETE;
+        goto out;
+    }
+
+out:
+
+    loader_destroy_generic_list(NULL, (struct loader_generic_list *)&local_ext_list);
+    loaderDeleteLayerListAndProperties(NULL, &instance_layers);
+    return res;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL terminator_EnumerateInstanceLayerProperties(const VkEnumerateInstanceLayerPropertiesChain *chain,
+                                                                           uint32_t *pPropertyCount,
+                                                                           VkLayerProperties *pProperties) {
+    VkResult result = VK_SUCCESS;
+    struct loader_layer_list instance_layer_list;
+    tls_instance = NULL;
+
+    LOADER_PLATFORM_THREAD_ONCE(&once_init, loader_initialize);
+
+    uint32_t copy_size;
+
+    // Get layer libraries
+    memset(&instance_layer_list, 0, sizeof(instance_layer_list));
+    loaderScanForLayers(NULL, &instance_layer_list);
+
+    if (pProperties == NULL) {
+        *pPropertyCount = instance_layer_list.count;
+        goto out;
+    }
+
+    copy_size = (*pPropertyCount < instance_layer_list.count) ? *pPropertyCount : instance_layer_list.count;
+    for (uint32_t i = 0; i < copy_size; i++) {
+        memcpy(&pProperties[i], &instance_layer_list.list[i].info, sizeof(VkLayerProperties));
+    }
+
+    *pPropertyCount = copy_size;
+
+    if (copy_size < instance_layer_list.count) {
+        result = VK_INCOMPLETE;
+        goto out;
+    }
+
+out:
+
+    loaderDeleteLayerListAndProperties(NULL, &instance_layer_list);
+    return result;
+}
+
+#if defined(_WIN32) && defined(LOADER_DYNAMIC_LIB)
+BOOL WINAPI DllMain(HINSTANCE hinst, DWORD reason, LPVOID reserved) {
+    switch (reason) {
+        case DLL_PROCESS_ATTACH:
+            loader_initialize();
+            break;
+        case DLL_PROCESS_DETACH:
+            if (NULL == reserved) {
+                loader_release();
+            }
+            break;
+        default:
+            // Do nothing
+            break;
+    }
+    return TRUE;
+}
+#elif !defined(_WIN32)
+__attribute__((constructor)) void loader_init_library() { loader_initialize(); }
+
+__attribute__((destructor)) void loader_free_library() { loader_release(); }
+#endif
+
+// ---- Vulkan Core 1.1 terminators
+
+VkResult setupLoaderTermPhysDevGroups(struct loader_instance *inst) {
+    VkResult res = VK_SUCCESS;
+    struct loader_icd_term *icd_term;
+    uint32_t total_count = 0;
+    uint32_t cur_icd_group_count = 0;
+    VkPhysicalDeviceGroupPropertiesKHR **new_phys_dev_groups = NULL;
+    VkPhysicalDeviceGroupPropertiesKHR *local_phys_dev_groups = NULL;
+    PFN_vkEnumeratePhysicalDeviceGroups fpEnumeratePhysicalDeviceGroups = NULL;
+
+    if (0 == inst->phys_dev_count_term) {
+        loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+            "setupLoaderTermPhysDevGroups:  Loader failed to setup physical "
+            "device terminator info before calling \'EnumeratePhysicalDeviceGroups\'.");
+        assert(false);
+        res = VK_ERROR_INITIALIZATION_FAILED;
+        goto out;
+    }
+
+    // For each ICD, query the number of physical device groups, and then get an
+    // internal value for those physical devices.
+    icd_term = inst->icd_terms;
+    for (uint32_t icd_idx = 0; NULL != icd_term; icd_term = icd_term->next, icd_idx++) {
+        // Get the function pointer to use to call into the ICD. This could be the core or KHR version
+        if (inst->enabled_known_extensions.khr_device_group_creation) {
+            fpEnumeratePhysicalDeviceGroups = icd_term->dispatch.EnumeratePhysicalDeviceGroupsKHR;
+        } else {
+            fpEnumeratePhysicalDeviceGroups = icd_term->dispatch.EnumeratePhysicalDeviceGroups;
+        }
+
+        cur_icd_group_count = 0;
+        if (NULL == fpEnumeratePhysicalDeviceGroups) {
+            // Treat each ICD's GPU as it's own group if the extension isn't supported
+            res = icd_term->dispatch.EnumeratePhysicalDevices(icd_term->instance, &cur_icd_group_count, NULL);
+            if (res != VK_SUCCESS) {
+                loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                    "setupLoaderTermPhysDevGroups:  Failed during dispatch call of "
+                    "\'EnumeratePhysicalDevices\' to ICD %d to get plain phys dev count.",
+                    icd_idx);
+                goto out;
+            }
+        } else {
+            // Query the actual group info
+            res = fpEnumeratePhysicalDeviceGroups(icd_term->instance, &cur_icd_group_count, NULL);
+            if (res != VK_SUCCESS) {
+                loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                    "setupLoaderTermPhysDevGroups:  Failed during dispatch call of "
+                    "\'EnumeratePhysicalDeviceGroups\' to ICD %d to get count.",
+                    icd_idx);
+                goto out;
+            }
+        }
+        total_count += cur_icd_group_count;
+    }
+
+    // Create an array for the new physical device groups, which will be stored
+    // in the instance for the Terminator code.
+    new_phys_dev_groups = (VkPhysicalDeviceGroupProperties **)loader_instance_heap_alloc(
+        inst, total_count * sizeof(VkPhysicalDeviceGroupProperties *), VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE);
+    if (NULL == new_phys_dev_groups) {
+        loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+            "setupLoaderTermPhysDevGroups:  Failed to allocate new physical device"
+            " group array of size %d",
+            total_count);
+        res = VK_ERROR_OUT_OF_HOST_MEMORY;
+        goto out;
+    }
+    memset(new_phys_dev_groups, 0, total_count * sizeof(VkPhysicalDeviceGroupProperties *));
+
+    // Create a temporary array (on the stack) to keep track of the
+    // returned VkPhysicalDevice values.
+    local_phys_dev_groups = loader_stack_alloc(sizeof(VkPhysicalDeviceGroupProperties) * total_count);
+    if (NULL == local_phys_dev_groups) {
+        loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+            "setupLoaderTermPhysDevGroups:  Failed to allocate local "
+            "physical device group array of size %d",
+            total_count);
+        res = VK_ERROR_OUT_OF_HOST_MEMORY;
+        goto out;
+    }
+    // Initialize the memory to something valid
+    memset(local_phys_dev_groups, 0, sizeof(VkPhysicalDeviceGroupProperties) * total_count);
+    for (uint32_t group = 0; group < total_count; group++) {
+        local_phys_dev_groups[group].sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES_KHR;
+        local_phys_dev_groups[group].pNext = NULL;
+        local_phys_dev_groups[group].subsetAllocation = false;
+    }
+
+    cur_icd_group_count = 0;
+    icd_term = inst->icd_terms;
+    for (uint32_t icd_idx = 0; NULL != icd_term; icd_term = icd_term->next, icd_idx++) {
+        uint32_t count_this_time = total_count - cur_icd_group_count;
+
+        // Get the function pointer to use to call into the ICD. This could be the core or KHR version
+        if (inst->enabled_known_extensions.khr_device_group_creation) {
+            fpEnumeratePhysicalDeviceGroups = icd_term->dispatch.EnumeratePhysicalDeviceGroupsKHR;
+        } else {
+            fpEnumeratePhysicalDeviceGroups = icd_term->dispatch.EnumeratePhysicalDeviceGroups;
+        }
+
+        if (NULL == fpEnumeratePhysicalDeviceGroups) {
+            VkPhysicalDevice* phys_dev_array = loader_stack_alloc(sizeof(VkPhysicalDevice) * count_this_time);
+            if (NULL == phys_dev_array) {
+                loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                    "setupLoaderTermPhysDevGroups:  Failed to allocate local "
+                    "physical device array of size %d",
+                    count_this_time);
+                res = VK_ERROR_OUT_OF_HOST_MEMORY;
+                goto out;
+            }
+
+            res = icd_term->dispatch.EnumeratePhysicalDevices(icd_term->instance, &count_this_time, phys_dev_array);
+            if (res != VK_SUCCESS) {
+                loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                    "setupLoaderTermPhysDevGroups:  Failed during dispatch call of "
+                    "\'EnumeratePhysicalDevices\' to ICD %d to get plain phys dev count.",
+                    icd_idx);
+                goto out;
+            }
+
+            // Add each GPU as it's own group
+            for (uint32_t indiv_gpu = 0; indiv_gpu < count_this_time; indiv_gpu++) {
+                local_phys_dev_groups[indiv_gpu + cur_icd_group_count].physicalDeviceCount = 1;
+                local_phys_dev_groups[indiv_gpu + cur_icd_group_count].physicalDevices[0] = phys_dev_array[indiv_gpu];
+            }
+
+        } else {
+            res = fpEnumeratePhysicalDeviceGroups(icd_term->instance, &count_this_time, &local_phys_dev_groups[cur_icd_group_count]);
+            if (VK_SUCCESS != res) {
+                loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                    "setupLoaderTermPhysDevGroups:  Failed during dispatch call of "
+                    "\'EnumeratePhysicalDeviceGroups\' to ICD %d to get content.",
+                    icd_idx);
+                goto out;
+            }
+        }
+
+        cur_icd_group_count += count_this_time;
+    }
+
+    // Replace all the physical device IDs with the proper loader values
+    for (uint32_t group = 0; group < total_count; group++) {
+        for (uint32_t group_gpu = 0; group_gpu < local_phys_dev_groups[group].physicalDeviceCount; group_gpu++) {
+            bool found = false;
+            for (uint32_t term_gpu = 0; term_gpu < inst->phys_dev_count_term; term_gpu++) {
+                if (local_phys_dev_groups[group].physicalDevices[group_gpu] == inst->phys_devs_term[term_gpu]->phys_dev) {
+                    local_phys_dev_groups[group].physicalDevices[group_gpu] = (VkPhysicalDevice)inst->phys_devs_term[term_gpu];
+                    found = true;
+                    break;
+                }
+            }
+            if (!found) {
+                loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                    "setupLoaderTermPhysDevGroups:  Failed to find GPU %d in group %d"
+                    " returned by \'EnumeratePhysicalDeviceGroups\' in list returned"
+                    " by \'EnumeratePhysicalDevices\'", group_gpu, group);
+                res = VK_ERROR_INITIALIZATION_FAILED;
+                goto out;
+            }
+        }
+    }
+
+    // Copy or create everything to fill the new array of physical device groups
+    for (uint32_t new_idx = 0; new_idx < total_count; new_idx++) {
+        // Check if this physical device group with the same contents is already in the old buffer
+        for (uint32_t old_idx = 0; old_idx < inst->phys_dev_group_count_term; old_idx++) {
+            if (local_phys_dev_groups[new_idx].physicalDeviceCount == inst->phys_dev_groups_term[old_idx]->physicalDeviceCount) {
+                bool found_all_gpus = true;
+                for (uint32_t old_gpu = 0; old_gpu < inst->phys_dev_groups_term[old_idx]->physicalDeviceCount; old_gpu++) {
+                    bool found_gpu = false;
+                    for (uint32_t new_gpu = 0; new_gpu < local_phys_dev_groups[new_idx].physicalDeviceCount; new_gpu++) {
+                        if (local_phys_dev_groups[new_idx].physicalDevices[new_gpu] == inst->phys_dev_groups_term[old_idx]->physicalDevices[old_gpu]) {
+                            found_gpu = true;
+                            break;
+                        }
+                    }
+
+                    if (!found_gpu) {
+                        found_all_gpus = false;
+                        break;
+                    }
+                }
+                if (!found_all_gpus) {
+                    continue;
+                } else {
+                    new_phys_dev_groups[new_idx] = inst->phys_dev_groups_term[old_idx];
+                    break;
+                }
+            }
+        }
+
+        // If this physical device group isn't in the old buffer, create it
+        if (NULL == new_phys_dev_groups[new_idx]) {
+            new_phys_dev_groups[new_idx] = (VkPhysicalDeviceGroupPropertiesKHR *)loader_instance_heap_alloc(
+                inst, sizeof(VkPhysicalDeviceGroupPropertiesKHR), VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE);
+            if (NULL == new_phys_dev_groups[new_idx]) {
+                loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                    "setupLoaderTermPhysDevGroups:  Failed to allocate "
+                    "physical device group Terminator object %d",
+                    new_idx);
+                total_count = new_idx;
+                res = VK_ERROR_OUT_OF_HOST_MEMORY;
+                goto out;
+            }
+            memcpy(new_phys_dev_groups[new_idx], &local_phys_dev_groups[new_idx],
+                sizeof(VkPhysicalDeviceGroupPropertiesKHR));
+        }
+    }
+
+out:
+
+    if (VK_SUCCESS != res) {
+        if (NULL != new_phys_dev_groups) {
+            for (uint32_t i = 0; i < total_count; i++) {
+                loader_instance_heap_free(inst, new_phys_dev_groups[i]);
+            }
+            loader_instance_heap_free(inst, new_phys_dev_groups);
+        }
+        total_count = 0;
+    } else {
+        // Free everything that didn't carry over to the new array of
+        // physical device groups
+        if (NULL != inst->phys_dev_groups_term) {
+            for (uint32_t i = 0; i < inst->phys_dev_group_count_term; i++) {
+                bool found = false;
+                for (uint32_t j = 0; j < total_count; j++) {
+                    if (inst->phys_dev_groups_term[i] == new_phys_dev_groups[j]) {
+                        found = true;
+                        break;
+                    }
+                }
+                if (!found) {
+                    loader_instance_heap_free(inst, inst->phys_dev_groups_term[i]);
+                }
+            }
+            loader_instance_heap_free(inst, inst->phys_dev_groups_term);
+        }
+
+        // Swap in the new physical device group list
+        inst->phys_dev_group_count_term = total_count;
+        inst->phys_dev_groups_term = new_phys_dev_groups;
+    }
+
+    return res;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL terminator_EnumeratePhysicalDeviceGroups(
+    VkInstance instance, uint32_t *pPhysicalDeviceGroupCount,
+    VkPhysicalDeviceGroupProperties *pPhysicalDeviceGroupProperties) {
+    struct loader_instance *inst = (struct loader_instance *)instance;
+    VkResult res = VK_SUCCESS;
+
+    // Always call the setup loader terminator physical device groups because they may
+    // have changed at any point.
+    res = setupLoaderTermPhysDevGroups(inst);
+    if (VK_SUCCESS != res) {
+        goto out;
+    }
+
+    uint32_t copy_count = inst->phys_dev_group_count_term;
+    if (NULL != pPhysicalDeviceGroupProperties) {
+        if (copy_count > *pPhysicalDeviceGroupCount) {
+            copy_count = *pPhysicalDeviceGroupCount;
+            res = VK_INCOMPLETE;
+        }
+
+        for (uint32_t i = 0; i < copy_count; i++) {
+            memcpy(&pPhysicalDeviceGroupProperties[i], inst->phys_dev_groups_term[i],
+                   sizeof(VkPhysicalDeviceGroupPropertiesKHR));
+        }
+    }
+
+    *pPhysicalDeviceGroupCount = copy_count;
+
+out:
+
+    return res;
+}
+
+VKAPI_ATTR void VKAPI_CALL terminator_GetPhysicalDeviceFeatures2(VkPhysicalDevice physicalDevice,
+                                                                    VkPhysicalDeviceFeatures2 *pFeatures) {
+    struct loader_physical_device_term *phys_dev_term = (struct loader_physical_device_term *)physicalDevice;
+    struct loader_icd_term *icd_term = phys_dev_term->this_icd_term;
+    const struct loader_instance *inst = icd_term->this_instance;
+
+    // Get the function pointer to use to call into the ICD. This could be the core or KHR version
+    PFN_vkGetPhysicalDeviceFeatures2 fpGetPhysicalDeviceFeatures2 = NULL;
+    if (inst != NULL && inst->enabled_known_extensions.khr_get_physical_device_properties2) {
+        fpGetPhysicalDeviceFeatures2 = icd_term->dispatch.GetPhysicalDeviceFeatures2KHR;
+    } else {
+        fpGetPhysicalDeviceFeatures2 = icd_term->dispatch.GetPhysicalDeviceFeatures2;
+    }
+
+    if (fpGetPhysicalDeviceFeatures2 != NULL || !inst->enabled_known_extensions.khr_get_physical_device_properties2) {
+        // Pass the call to the driver
+        fpGetPhysicalDeviceFeatures2(phys_dev_term->phys_dev, pFeatures);
+    } else {
+        // Emulate the call
+        loader_log(icd_term->this_instance, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, 0,
+                   "vkGetPhysicalDeviceFeatures2: Emulating call in ICD \"%s\" using vkGetPhysicalDeviceFeatures",
+                   icd_term->scanned_icd->lib_name);
+
+        // Write to the VkPhysicalDeviceFeatures2 struct
+        icd_term->dispatch.GetPhysicalDeviceFeatures(phys_dev_term->phys_dev, &pFeatures->features);
+
+        const VkBaseInStructure *pNext = pFeatures->pNext;
+        while (pNext != NULL) {
+            switch (pNext->sType) {
+                case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES: {
+                    // Skip the check if VK_KHR_multiview is enabled because it's a device extension
+                    // Write to the VkPhysicalDeviceMultiviewFeaturesKHR struct
+                    VkPhysicalDeviceMultiviewFeaturesKHR *multiview_features = (VkPhysicalDeviceMultiviewFeaturesKHR *)pNext;
+                    multiview_features->multiview = VK_FALSE;
+                    multiview_features->multiviewGeometryShader = VK_FALSE;
+                    multiview_features->multiviewTessellationShader = VK_FALSE;
+
+                    pNext = multiview_features->pNext;
+                    break;
+                }
+                default: {
+                    loader_log(icd_term->this_instance, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0,
+                               "vkGetPhysicalDeviceFeatures2: Emulation found unrecognized structure type in pFeatures->pNext - "
+                               "this struct will be ignored");
+
+                    pNext = pNext->pNext;
+                    break;
+                }
+            }
+        }
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL terminator_GetPhysicalDeviceProperties2(VkPhysicalDevice physicalDevice,
+                                                                      VkPhysicalDeviceProperties2 *pProperties) {
+    struct loader_physical_device_term *phys_dev_term = (struct loader_physical_device_term *)physicalDevice;
+    struct loader_icd_term *icd_term = phys_dev_term->this_icd_term;
+    const struct loader_instance *inst = icd_term->this_instance;
+
+    // Get the function pointer to use to call into the ICD. This could be the core or KHR version
+    PFN_vkGetPhysicalDeviceProperties2 fpGetPhysicalDeviceProperties2 = NULL;
+    if (inst != NULL && inst->enabled_known_extensions.khr_get_physical_device_properties2) {
+        fpGetPhysicalDeviceProperties2 = icd_term->dispatch.GetPhysicalDeviceProperties2KHR;
+    } else {
+        fpGetPhysicalDeviceProperties2 = icd_term->dispatch.GetPhysicalDeviceProperties2;
+    }
+
+    if (fpGetPhysicalDeviceProperties2 != NULL || !inst->enabled_known_extensions.khr_get_physical_device_properties2) {
+        // Pass the call to the driver
+        fpGetPhysicalDeviceProperties2(phys_dev_term->phys_dev, pProperties);
+    } else {
+        // Emulate the call
+        loader_log(icd_term->this_instance, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, 0,
+                   "vkGetPhysicalDeviceProperties2: Emulating call in ICD \"%s\" using vkGetPhysicalDeviceProperties",
+                   icd_term->scanned_icd->lib_name);
+
+        // Write to the VkPhysicalDeviceProperties2 struct
+        icd_term->dispatch.GetPhysicalDeviceProperties(phys_dev_term->phys_dev, &pProperties->properties);
+
+        const VkBaseInStructure *pNext = pProperties->pNext;
+        while (pNext != NULL) {
+            switch (pNext->sType) {
+                case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES: {
+                    VkPhysicalDeviceIDPropertiesKHR *id_properties = (VkPhysicalDeviceIDPropertiesKHR *)pNext;
+
+                    // Verify that "VK_KHR_external_memory_capabilities" is enabled
+                    if (icd_term->this_instance->enabled_known_extensions.khr_external_memory_capabilities) {
+                        loader_log(icd_term->this_instance, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0,
+                                   "vkGetPhysicalDeviceProperties2: Emulation cannot generate unique IDs for struct "
+                                   "VkPhysicalDeviceIDProperties - setting IDs to zero instead");
+
+                        // Write to the VkPhysicalDeviceIDPropertiesKHR struct
+                        memset(id_properties->deviceUUID, 0, VK_UUID_SIZE);
+                        memset(id_properties->driverUUID, 0, VK_UUID_SIZE);
+                        id_properties->deviceLUIDValid = VK_FALSE;
+                    }
+
+                    pNext = id_properties->pNext;
+                    break;
+                }
+                default: {
+                    loader_log(icd_term->this_instance, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0,
+                               "vkGetPhysicalDeviceProperties2KHR: Emulation found unrecognized structure type in "
+                               "pProperties->pNext - this struct will be ignored");
+
+                    pNext = pNext->pNext;
+                    break;
+                }
+            }
+        }
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL terminator_GetPhysicalDeviceFormatProperties2(VkPhysicalDevice physicalDevice, VkFormat format,
+                                                                            VkFormatProperties2 *pFormatProperties) {
+    struct loader_physical_device_term *phys_dev_term = (struct loader_physical_device_term *)physicalDevice;
+    struct loader_icd_term *icd_term = phys_dev_term->this_icd_term;
+    const struct loader_instance *inst = icd_term->this_instance;
+
+    // Get the function pointer to use to call into the ICD. This could be the core or KHR version
+    PFN_vkGetPhysicalDeviceFormatProperties2 fpGetPhysicalDeviceFormatProperties2 = NULL;
+    if (inst != NULL && inst->enabled_known_extensions.khr_get_physical_device_properties2) {
+        fpGetPhysicalDeviceFormatProperties2 = icd_term->dispatch.GetPhysicalDeviceFormatProperties2KHR;
+    } else {
+        fpGetPhysicalDeviceFormatProperties2 = icd_term->dispatch.GetPhysicalDeviceFormatProperties2;
+    }
+
+    if (fpGetPhysicalDeviceFormatProperties2 != NULL || !inst->enabled_known_extensions.khr_get_physical_device_properties2) {
+        // Pass the call to the driver
+        fpGetPhysicalDeviceFormatProperties2(phys_dev_term->phys_dev, format, pFormatProperties);
+    } else {
+        // Emulate the call
+        loader_log(
+            icd_term->this_instance, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, 0,
+            "vkGetPhysicalDeviceFormatProperties2: Emulating call in ICD \"%s\" using vkGetPhysicalDeviceFormatProperties",
+            icd_term->scanned_icd->lib_name);
+
+        // Write to the VkFormatProperties2 struct
+        icd_term->dispatch.GetPhysicalDeviceFormatProperties(phys_dev_term->phys_dev, format, &pFormatProperties->formatProperties);
+
+        if (pFormatProperties->pNext != NULL) {
+            loader_log(icd_term->this_instance, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0,
+                       "vkGetPhysicalDeviceFormatProperties2: Emulation found unrecognized structure type in "
+                       "pFormatProperties->pNext - this struct will be ignored");
+        }
+    }
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL terminator_GetPhysicalDeviceImageFormatProperties2(
+    VkPhysicalDevice physicalDevice, const VkPhysicalDeviceImageFormatInfo2KHR *pImageFormatInfo,
+    VkImageFormatProperties2KHR *pImageFormatProperties) {
+    struct loader_physical_device_term *phys_dev_term = (struct loader_physical_device_term *)physicalDevice;
+    struct loader_icd_term *icd_term = phys_dev_term->this_icd_term;
+    const struct loader_instance *inst = icd_term->this_instance;
+
+    // Get the function pointer to use to call into the ICD. This could be the core or KHR version
+    PFN_vkGetPhysicalDeviceImageFormatProperties2 fpGetPhysicalDeviceImageFormatProperties2 = NULL;
+    if (inst != NULL && inst->enabled_known_extensions.khr_get_physical_device_properties2) {
+        fpGetPhysicalDeviceImageFormatProperties2 = icd_term->dispatch.GetPhysicalDeviceImageFormatProperties2KHR;
+    } else {
+        fpGetPhysicalDeviceImageFormatProperties2 = icd_term->dispatch.GetPhysicalDeviceImageFormatProperties2;
+    }
+
+    if (fpGetPhysicalDeviceImageFormatProperties2 != NULL || !inst->enabled_known_extensions.khr_get_physical_device_properties2) {
+        // Pass the call to the driver
+        return fpGetPhysicalDeviceImageFormatProperties2(phys_dev_term->phys_dev, pImageFormatInfo, pImageFormatProperties);
+    } else {
+        // Emulate the call
+        loader_log(icd_term->this_instance, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, 0,
+                   "vkGetPhysicalDeviceImageFormatProperties2: Emulating call in ICD \"%s\" using "
+                   "vkGetPhysicalDeviceImageFormatProperties",
+                   icd_term->scanned_icd->lib_name);
+
+        // If there is more info in  either pNext, then this is unsupported
+        if (pImageFormatInfo->pNext != NULL || pImageFormatProperties->pNext != NULL) {
+            return VK_ERROR_FORMAT_NOT_SUPPORTED;
+        }
+
+        // Write to the VkImageFormatProperties2KHR struct
+        return icd_term->dispatch.GetPhysicalDeviceImageFormatProperties(
+            phys_dev_term->phys_dev, pImageFormatInfo->format, pImageFormatInfo->type, pImageFormatInfo->tiling,
+            pImageFormatInfo->usage, pImageFormatInfo->flags, &pImageFormatProperties->imageFormatProperties);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL terminator_GetPhysicalDeviceQueueFamilyProperties2(
+    VkPhysicalDevice physicalDevice, uint32_t *pQueueFamilyPropertyCount, VkQueueFamilyProperties2KHR *pQueueFamilyProperties) {
+    struct loader_physical_device_term *phys_dev_term = (struct loader_physical_device_term *)physicalDevice;
+    struct loader_icd_term *icd_term = phys_dev_term->this_icd_term;
+    const struct loader_instance *inst = icd_term->this_instance;
+
+    // Get the function pointer to use to call into the ICD. This could be the core or KHR version
+    PFN_vkGetPhysicalDeviceQueueFamilyProperties2 fpGetPhysicalDeviceQueueFamilyProperties2 = NULL;
+    if (inst != NULL && inst->enabled_known_extensions.khr_get_physical_device_properties2) {
+        fpGetPhysicalDeviceQueueFamilyProperties2 = icd_term->dispatch.GetPhysicalDeviceQueueFamilyProperties2KHR;
+    } else {
+        fpGetPhysicalDeviceQueueFamilyProperties2 = icd_term->dispatch.GetPhysicalDeviceQueueFamilyProperties2;
+    }
+
+    if (fpGetPhysicalDeviceQueueFamilyProperties2 != NULL || !inst->enabled_known_extensions.khr_get_physical_device_properties2) {
+        // Pass the call to the driver
+        fpGetPhysicalDeviceQueueFamilyProperties2(phys_dev_term->phys_dev, pQueueFamilyPropertyCount, pQueueFamilyProperties);
+    } else {
+        // Emulate the call
+        loader_log(icd_term->this_instance, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, 0,
+                   "vkGetPhysicalDeviceQueueFamilyProperties2: Emulating call in ICD \"%s\" using "
+                   "vkGetPhysicalDeviceQueueFamilyProperties",
+                   icd_term->scanned_icd->lib_name);
+
+        if (pQueueFamilyProperties == NULL || *pQueueFamilyPropertyCount == 0) {
+            // Write to pQueueFamilyPropertyCount
+            icd_term->dispatch.GetPhysicalDeviceQueueFamilyProperties(phys_dev_term->phys_dev, pQueueFamilyPropertyCount, NULL);
+        } else {
+            // Allocate a temporary array for the output of the old function
+            VkQueueFamilyProperties *properties = loader_stack_alloc(*pQueueFamilyPropertyCount * sizeof(VkQueueFamilyProperties));
+            if (properties == NULL) {
+                *pQueueFamilyPropertyCount = 0;
+                loader_log(
+                    icd_term->this_instance, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                    "vkGetPhysicalDeviceQueueFamilyProperties2: Out of memory - Failed to allocate array for loader emulation.");
+                return;
+            }
+
+            icd_term->dispatch.GetPhysicalDeviceQueueFamilyProperties(phys_dev_term->phys_dev, pQueueFamilyPropertyCount,
+                                                                      properties);
+            for (uint32_t i = 0; i < *pQueueFamilyPropertyCount; ++i) {
+                // Write to the VkQueueFamilyProperties2KHR struct
+                memcpy(&pQueueFamilyProperties[i].queueFamilyProperties, &properties[i], sizeof(VkQueueFamilyProperties));
+
+                if (pQueueFamilyProperties[i].pNext != NULL) {
+                    loader_log(icd_term->this_instance, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0,
+                               "vkGetPhysicalDeviceQueueFamilyProperties2: Emulation found unrecognized structure type in "
+                               "pQueueFamilyProperties[%d].pNext - this struct will be ignored",
+                               i);
+                }
+            }
+        }
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL terminator_GetPhysicalDeviceMemoryProperties2(
+    VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties2 *pMemoryProperties) {
+    struct loader_physical_device_term *phys_dev_term = (struct loader_physical_device_term *)physicalDevice;
+    struct loader_icd_term *icd_term = phys_dev_term->this_icd_term;
+    const struct loader_instance *inst = icd_term->this_instance;
+
+    // Get the function pointer to use to call into the ICD. This could be the core or KHR version
+    PFN_vkGetPhysicalDeviceMemoryProperties2 fpGetPhysicalDeviceMemoryProperties2 = NULL;
+    if (inst != NULL && inst->enabled_known_extensions.khr_get_physical_device_properties2) {
+        fpGetPhysicalDeviceMemoryProperties2 = icd_term->dispatch.GetPhysicalDeviceMemoryProperties2KHR;
+    } else {
+        fpGetPhysicalDeviceMemoryProperties2 = icd_term->dispatch.GetPhysicalDeviceMemoryProperties2;
+    }
+
+    if (fpGetPhysicalDeviceMemoryProperties2 != NULL || !inst->enabled_known_extensions.khr_get_physical_device_properties2) {
+        // Pass the call to the driver
+        fpGetPhysicalDeviceMemoryProperties2(phys_dev_term->phys_dev, pMemoryProperties);
+    } else {
+        // Emulate the call
+        loader_log(
+            icd_term->this_instance, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, 0,
+            "vkGetPhysicalDeviceMemoryProperties2: Emulating call in ICD \"%s\" using vkGetPhysicalDeviceMemoryProperties",
+            icd_term->scanned_icd->lib_name);
+
+        // Write to the VkPhysicalDeviceMemoryProperties2 struct
+        icd_term->dispatch.GetPhysicalDeviceMemoryProperties(phys_dev_term->phys_dev, &pMemoryProperties->memoryProperties);
+
+        if (pMemoryProperties->pNext != NULL) {
+            loader_log(icd_term->this_instance, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0,
+                       "vkGetPhysicalDeviceMemoryProperties2: Emulation found unrecognized structure type in "
+                       "pMemoryProperties->pNext - this struct will be ignored");
+        }
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL terminator_GetPhysicalDeviceSparseImageFormatProperties2(
+    VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSparseImageFormatInfo2KHR *pFormatInfo, uint32_t *pPropertyCount,
+    VkSparseImageFormatProperties2KHR *pProperties) {
+    struct loader_physical_device_term *phys_dev_term = (struct loader_physical_device_term *)physicalDevice;
+    struct loader_icd_term *icd_term = phys_dev_term->this_icd_term;
+    const struct loader_instance *inst = icd_term->this_instance;
+
+    // Get the function pointer to use to call into the ICD. This could be the core or KHR version
+    PFN_vkGetPhysicalDeviceSparseImageFormatProperties2 fpGetPhysicalDeviceSparseImageFormatProperties2 = NULL;
+    if (inst != NULL && inst->enabled_known_extensions.khr_get_physical_device_properties2) {
+        fpGetPhysicalDeviceSparseImageFormatProperties2 = icd_term->dispatch.GetPhysicalDeviceSparseImageFormatProperties2KHR;
+    } else {
+        fpGetPhysicalDeviceSparseImageFormatProperties2 = icd_term->dispatch.GetPhysicalDeviceSparseImageFormatProperties2;
+    }
+
+    if (fpGetPhysicalDeviceSparseImageFormatProperties2 != NULL || !inst->enabled_known_extensions.khr_get_physical_device_properties2) {
+        // Pass the call to the driver
+        fpGetPhysicalDeviceSparseImageFormatProperties2(phys_dev_term->phys_dev, pFormatInfo, pPropertyCount, pProperties);
+    } else {
+        // Emulate the call
+        loader_log(icd_term->this_instance, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, 0,
+                   "vkGetPhysicalDeviceSparseImageFormatProperties2: Emulating call in ICD \"%s\" using "
+                   "vkGetPhysicalDeviceSparseImageFormatProperties",
+                   icd_term->scanned_icd->lib_name);
+
+        if (pFormatInfo->pNext != NULL) {
+            loader_log(icd_term->this_instance, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0,
+                       "vkGetPhysicalDeviceSparseImageFormatProperties2: Emulation found unrecognized structure type in "
+                       "pFormatInfo->pNext - this struct will be ignored");
+        }
+
+        if (pProperties == NULL || *pPropertyCount == 0) {
+            // Write to pPropertyCount
+            icd_term->dispatch.GetPhysicalDeviceSparseImageFormatProperties(
+                phys_dev_term->phys_dev, pFormatInfo->format, pFormatInfo->type, pFormatInfo->samples, pFormatInfo->usage,
+                pFormatInfo->tiling, pPropertyCount, NULL);
+        } else {
+            // Allocate a temporary array for the output of the old function
+            VkSparseImageFormatProperties *properties =
+                loader_stack_alloc(*pPropertyCount * sizeof(VkSparseImageMemoryRequirements));
+            if (properties == NULL) {
+                *pPropertyCount = 0;
+                loader_log(icd_term->this_instance, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                           "vkGetPhysicalDeviceSparseImageFormatProperties2: Out of memory - Failed to allocate array for "
+                           "loader emulation.");
+                return;
+            }
+
+            icd_term->dispatch.GetPhysicalDeviceSparseImageFormatProperties(
+                phys_dev_term->phys_dev, pFormatInfo->format, pFormatInfo->type, pFormatInfo->samples, pFormatInfo->usage,
+                pFormatInfo->tiling, pPropertyCount, properties);
+            for (uint32_t i = 0; i < *pPropertyCount; ++i) {
+                // Write to the VkSparseImageFormatProperties2KHR struct
+                memcpy(&pProperties[i].properties, &properties[i], sizeof(VkSparseImageFormatProperties));
+
+                if (pProperties[i].pNext != NULL) {
+                    loader_log(icd_term->this_instance, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0,
+                               "vkGetPhysicalDeviceSparseImageFormatProperties2: Emulation found unrecognized structure type in "
+                               "pProperties[%d].pNext - this struct will be ignored",
+                               i);
+                }
+            }
+        }
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL terminator_GetPhysicalDeviceExternalBufferProperties(
+    VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalBufferInfo *pExternalBufferInfo,
+    VkExternalBufferProperties *pExternalBufferProperties) {
+    struct loader_physical_device_term *phys_dev_term = (struct loader_physical_device_term *)physicalDevice;
+    struct loader_icd_term *icd_term = phys_dev_term->this_icd_term;
+    const struct loader_instance *inst = icd_term->this_instance;
+
+    // Get the function pointer to use to call into the ICD. This could be the core or KHR version
+    PFN_vkGetPhysicalDeviceExternalBufferProperties fpGetPhysicalDeviceExternalBufferProperties = NULL;
+    if (inst != NULL && inst->enabled_known_extensions.khr_external_memory_capabilities) {
+        fpGetPhysicalDeviceExternalBufferProperties = icd_term->dispatch.GetPhysicalDeviceExternalBufferPropertiesKHR;
+    } else {
+        fpGetPhysicalDeviceExternalBufferProperties = icd_term->dispatch.GetPhysicalDeviceExternalBufferProperties;
+    }
+
+    if (fpGetPhysicalDeviceExternalBufferProperties || !inst->enabled_known_extensions.khr_external_memory_capabilities) {
+        // Pass the call to the driver
+        fpGetPhysicalDeviceExternalBufferProperties(phys_dev_term->phys_dev, pExternalBufferInfo, pExternalBufferProperties);
+    } else {
+        // Emulate the call
+        loader_log(icd_term->this_instance, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, 0,
+                   "vkGetPhysicalDeviceExternalBufferProperties: Emulating call in ICD \"%s\"", icd_term->scanned_icd->lib_name);
+
+        if (pExternalBufferInfo->pNext != NULL) {
+            loader_log(icd_term->this_instance, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0,
+                       "vkGetPhysicalDeviceExternalBufferProperties: Emulation found unrecognized structure type in "
+                       "pExternalBufferInfo->pNext - this struct will be ignored");
+        }
+
+        // Fill in everything being unsupported
+        memset(&pExternalBufferProperties->externalMemoryProperties, 0, sizeof(VkExternalMemoryPropertiesKHR));
+
+        if (pExternalBufferProperties->pNext != NULL) {
+            loader_log(icd_term->this_instance, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0,
+                       "vkGetPhysicalDeviceExternalBufferProperties: Emulation found unrecognized structure type in "
+                       "pExternalBufferProperties->pNext - this struct will be ignored");
+        }
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL terminator_GetPhysicalDeviceExternalSemaphoreProperties(
+    VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalSemaphoreInfo *pExternalSemaphoreInfo,
+    VkExternalSemaphoreProperties *pExternalSemaphoreProperties) {
+    struct loader_physical_device_term *phys_dev_term = (struct loader_physical_device_term *)physicalDevice;
+    struct loader_icd_term *icd_term = phys_dev_term->this_icd_term;
+    const struct loader_instance *inst = icd_term->this_instance;
+
+    // Get the function pointer to use to call into the ICD. This could be the core or KHR version
+    PFN_vkGetPhysicalDeviceExternalSemaphoreProperties fpGetPhysicalDeviceExternalSemaphoreProperties = NULL;
+    if (inst != NULL && inst->enabled_known_extensions.khr_external_semaphore_capabilities) {
+        fpGetPhysicalDeviceExternalSemaphoreProperties = icd_term->dispatch.GetPhysicalDeviceExternalSemaphorePropertiesKHR;
+    } else {
+        fpGetPhysicalDeviceExternalSemaphoreProperties = icd_term->dispatch.GetPhysicalDeviceExternalSemaphoreProperties;
+    }
+
+    if (fpGetPhysicalDeviceExternalSemaphoreProperties != NULL || !inst->enabled_known_extensions.khr_external_semaphore_capabilities) {
+        // Pass the call to the driver
+        fpGetPhysicalDeviceExternalSemaphoreProperties(phys_dev_term->phys_dev, pExternalSemaphoreInfo, pExternalSemaphoreProperties);
+    } else {
+        // Emulate the call
+        loader_log(icd_term->this_instance, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, 0,
+                   "vkGetPhysicalDeviceExternalSemaphoreProperties: Emulating call in ICD \"%s\"",
+                   icd_term->scanned_icd->lib_name);
+
+        if (pExternalSemaphoreInfo->pNext != NULL) {
+            loader_log(icd_term->this_instance, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0,
+                       "vkGetPhysicalDeviceExternalSemaphoreProperties: Emulation found unrecognized structure type in "
+                       "pExternalSemaphoreInfo->pNext - this struct will be ignored");
+        }
+
+        // Fill in everything being unsupported
+        pExternalSemaphoreProperties->exportFromImportedHandleTypes = 0;
+        pExternalSemaphoreProperties->compatibleHandleTypes = 0;
+        pExternalSemaphoreProperties->externalSemaphoreFeatures = 0;
+
+        if (pExternalSemaphoreProperties->pNext != NULL) {
+            loader_log(icd_term->this_instance, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0,
+                       "vkGetPhysicalDeviceExternalSemaphoreProperties: Emulation found unrecognized structure type in "
+                       "pExternalSemaphoreProperties->pNext - this struct will be ignored");
+        }
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL terminator_GetPhysicalDeviceExternalFenceProperties(
+    VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalFenceInfo *pExternalFenceInfo,
+    VkExternalFenceProperties *pExternalFenceProperties) {
+    struct loader_physical_device_term *phys_dev_term = (struct loader_physical_device_term *)physicalDevice;
+    struct loader_icd_term *icd_term = phys_dev_term->this_icd_term;
+    const struct loader_instance *inst = icd_term->this_instance;
+
+    // Get the function pointer to use to call into the ICD. This could be the core or KHR version
+    PFN_vkGetPhysicalDeviceExternalFenceProperties fpGetPhysicalDeviceExternalFenceProperties = NULL;
+    if (inst != NULL && inst->enabled_known_extensions.khr_external_fence_capabilities) {
+        fpGetPhysicalDeviceExternalFenceProperties = icd_term->dispatch.GetPhysicalDeviceExternalFencePropertiesKHR;
+    } else {
+        fpGetPhysicalDeviceExternalFenceProperties = icd_term->dispatch.GetPhysicalDeviceExternalFenceProperties;
+    }
+
+    if (fpGetPhysicalDeviceExternalFenceProperties != NULL || !inst->enabled_known_extensions.khr_external_fence_capabilities) {
+        // Pass the call to the driver
+        fpGetPhysicalDeviceExternalFenceProperties(phys_dev_term->phys_dev, pExternalFenceInfo, pExternalFenceProperties);
+    } else {
+        // Emulate the call
+        loader_log(icd_term->this_instance, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, 0,
+                   "vkGetPhysicalDeviceExternalFenceProperties: Emulating call in ICD \"%s\"", icd_term->scanned_icd->lib_name);
+
+        if (pExternalFenceInfo->pNext != NULL) {
+            loader_log(icd_term->this_instance, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0,
+                       "vkGetPhysicalDeviceExternalFenceProperties: Emulation found unrecognized structure type in "
+                       "pExternalFenceInfo->pNext - this struct will be ignored");
+        }
+
+        // Fill in everything being unsupported
+        pExternalFenceProperties->exportFromImportedHandleTypes = 0;
+        pExternalFenceProperties->compatibleHandleTypes = 0;
+        pExternalFenceProperties->externalFenceFeatures = 0;
+
+        if (pExternalFenceProperties->pNext != NULL) {
+            loader_log(icd_term->this_instance, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0,
+                       "vkGetPhysicalDeviceExternalFenceProperties: Emulation found unrecognized structure type in "
+                       "pExternalFenceProperties->pNext - this struct will be ignored");
+        }
+    }
+}
diff --git a/src/third_party/vulkan-loader/src/loader/loader.h b/src/third_party/vulkan-loader/src/loader/loader.h
new file mode 100644
index 0000000..8d6b4c4
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/loader/loader.h
@@ -0,0 +1,532 @@
+/*
+ *
+ * Copyright (c) 2014-2019 The Khronos Group Inc.
+ * Copyright (c) 2014-2019 Valve Corporation
+ * Copyright (c) 2014-2019 LunarG, Inc.
+ * Copyright (C) 2015 Google Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Jon Ashburn <jon@lunarg.com>
+ * Author: Courtney Goeltzenleuchter <courtney@LunarG.com>
+ * Author: Chia-I Wu <olvaffe@gmail.com>
+ * Author: Chia-I Wu <olv@lunarg.com>
+ * Author: Mark Lobodzinski <mark@LunarG.com>
+ * Author: Lenny Komow <lenny@lunarg.com>
+ *
+ */
+
+#ifndef LOADER_H
+#define LOADER_H
+
+#include <vulkan/vulkan.h>
+#include "vk_loader_platform.h"
+#include "vk_loader_layer.h"
+#include <vulkan/vk_layer.h>
+#include <vulkan/vk_icd.h>
+#include <assert.h>
+#include "vk_layer_dispatch_table.h"
+#include "vk_loader_extensions.h"
+
+#if defined(__GNUC__) && __GNUC__ >= 4
+#define LOADER_EXPORT __attribute__((visibility("default")))
+#elif defined(__SUNPRO_C) && (__SUNPRO_C >= 0x590)
+#define LOADER_EXPORT __attribute__((visibility("default")))
+#else
+#define LOADER_EXPORT
+#endif
+
+// A debug option to disable allocators at compile time to investigate future issues.
+#define DEBUG_DISABLE_APP_ALLOCATORS 0
+
+#define MAX_STRING_SIZE 1024
+
+// This is defined in vk_layer.h, but if there's problems we need to create the define
+// here.
+#ifndef MAX_NUM_UNKNOWN_EXTS
+#define MAX_NUM_UNKNOWN_EXTS 250
+#endif
+
+enum layer_type_flags {
+    VK_LAYER_TYPE_FLAG_INSTANCE_LAYER = 0x1,  // If not set, indicates Device layer
+    VK_LAYER_TYPE_FLAG_EXPLICIT_LAYER = 0x2,  // If not set, indicates Implicit layer
+    VK_LAYER_TYPE_FLAG_META_LAYER = 0x4,      // If not set, indicates standard layer
+};
+
+typedef enum VkStringErrorFlagBits {
+    VK_STRING_ERROR_NONE = 0x00000000,
+    VK_STRING_ERROR_LENGTH = 0x00000001,
+    VK_STRING_ERROR_BAD_DATA = 0x00000002,
+} VkStringErrorFlagBits;
+typedef VkFlags VkStringErrorFlags;
+
+static const int MaxLoaderStringLength = 256;
+static const char UTF8_ONE_BYTE_CODE = 0xC0;
+static const char UTF8_ONE_BYTE_MASK = 0xE0;
+static const char UTF8_TWO_BYTE_CODE = 0xE0;
+static const char UTF8_TWO_BYTE_MASK = 0xF0;
+static const char UTF8_THREE_BYTE_CODE = 0xF0;
+static const char UTF8_THREE_BYTE_MASK = 0xF8;
+static const char UTF8_DATA_BYTE_CODE = 0x80;
+static const char UTF8_DATA_BYTE_MASK = 0xC0;
+
+// form of all dynamic lists/arrays
+// only the list element should be changed
+struct loader_generic_list {
+    size_t capacity;
+    uint32_t count;
+    void *list;
+};
+
+struct loader_extension_list {
+    size_t capacity;
+    uint32_t count;
+    VkExtensionProperties *list;
+};
+
+struct loader_dev_ext_props {
+    VkExtensionProperties props;
+    uint32_t entrypoint_count;
+    char **entrypoints;
+};
+
+struct loader_device_extension_list {
+    size_t capacity;
+    uint32_t count;
+    struct loader_dev_ext_props *list;
+};
+
+struct loader_name_value {
+    char name[MAX_STRING_SIZE];
+    char value[MAX_STRING_SIZE];
+};
+
+struct loader_layer_functions {
+    char str_gipa[MAX_STRING_SIZE];
+    char str_gdpa[MAX_STRING_SIZE];
+    char str_negotiate_interface[MAX_STRING_SIZE];
+    PFN_vkNegotiateLoaderLayerInterfaceVersion negotiate_layer_interface;
+    PFN_vkGetInstanceProcAddr get_instance_proc_addr;
+    PFN_vkGetDeviceProcAddr get_device_proc_addr;
+    PFN_GetPhysicalDeviceProcAddr get_physical_device_proc_addr;
+};
+
+struct loader_override_expiration {
+    uint16_t year;
+    uint8_t month;
+    uint8_t day;
+    uint8_t hour;
+    uint8_t minute;
+};
+
+struct loader_layer_properties {
+    VkLayerProperties info;
+    enum layer_type_flags type_flags;
+    uint32_t interface_version;  // PFN_vkNegotiateLoaderLayerInterfaceVersion
+    char lib_name[MAX_STRING_SIZE];
+    loader_platform_dl_handle lib_handle;
+    struct loader_layer_functions functions;
+    struct loader_extension_list instance_extension_list;
+    struct loader_device_extension_list device_extension_list;
+    struct loader_name_value disable_env_var;
+    struct loader_name_value enable_env_var;
+    uint32_t num_component_layers;
+    char (*component_layer_names)[MAX_STRING_SIZE];
+    struct {
+        char enumerate_instance_extension_properties[MAX_STRING_SIZE];
+        char enumerate_instance_layer_properties[MAX_STRING_SIZE];
+        char enumerate_instance_version[MAX_STRING_SIZE];
+    } pre_instance_functions;
+    uint32_t num_override_paths;
+    char (*override_paths)[MAX_STRING_SIZE];
+    bool is_override;
+    bool has_expiration;
+    struct loader_override_expiration expiration;
+    bool keep;
+    uint32_t num_blacklist_layers;
+    char (*blacklist_layer_names)[MAX_STRING_SIZE];
+};
+
+struct loader_layer_list {
+    size_t capacity;
+    uint32_t count;
+    struct loader_layer_properties *list;
+};
+
+struct loader_dispatch_hash_list {
+    size_t capacity;
+    uint32_t count;
+    uint32_t *index;  // index into the dev_ext dispatch table
+};
+
+// loader_dispatch_hash_entry and loader_dev_ext_dispatch_table.dev_ext have
+// one to one correspondence; one loader_dispatch_hash_entry for one dev_ext
+// dispatch entry.
+// Also have a one to one correspondence with functions in dev_ext_trampoline.c
+struct loader_dispatch_hash_entry {
+    char *func_name;
+    struct loader_dispatch_hash_list list;  // to handle hashing collisions
+};
+
+typedef VkResult(VKAPI_PTR *PFN_vkDevExt)(VkDevice device);
+struct loader_dev_ext_dispatch_table {
+    PFN_vkDevExt dev_ext[MAX_NUM_UNKNOWN_EXTS];
+};
+
+struct loader_dev_dispatch_table {
+    VkLayerDispatchTable core_dispatch;
+    struct loader_dev_ext_dispatch_table ext_dispatch;
+};
+
+// per CreateDevice structure
+struct loader_device {
+    struct loader_dev_dispatch_table loader_dispatch;
+    VkDevice chain_device;  // device object from the dispatch chain
+    VkDevice icd_device;    // device object from the icd
+    struct loader_physical_device_term *phys_dev_term;
+
+    // List of activated layers.
+    //  app_      is the version based on exactly what the application asked for.
+    //            This is what must be returned to the application on Enumerate calls.
+    //  expanded_ is the version based on expanding meta-layers into their
+    //            individual component layers.  This is what is used internally.
+    struct loader_layer_list app_activated_layer_list;
+    struct loader_layer_list expanded_activated_layer_list;
+
+    VkAllocationCallbacks alloc_callbacks;
+
+    // List of activated device extensions that have terminators implemented in the loader
+    struct {
+        bool khr_swapchain_enabled;
+        bool khr_display_swapchain_enabled;
+        bool khr_device_group_enabled;
+        bool ext_debug_marker_enabled;
+        bool ext_debug_utils_enabled;
+        bool ext_full_screen_exclusive_enabled;
+    } extensions;
+
+    struct loader_device *next;
+};
+
+// Per ICD information
+
+// Per ICD structure
+struct loader_icd_term {
+    // pointers to find other structs
+    const struct loader_scanned_icd *scanned_icd;
+    const struct loader_instance *this_instance;
+    struct loader_device *logical_device_list;
+    VkInstance instance;  // instance object from the icd
+    struct loader_icd_term_dispatch dispatch;
+
+    struct loader_icd_term *next;
+
+    PFN_PhysDevExt phys_dev_ext[MAX_NUM_UNKNOWN_EXTS];
+};
+
+// Per ICD library structure
+struct loader_icd_tramp_list {
+    size_t capacity;
+    uint32_t count;
+    struct loader_scanned_icd *scanned_list;
+};
+
+struct loader_instance_dispatch_table {
+    VkLayerInstanceDispatchTable layer_inst_disp;  // must be first entry in structure
+
+    // Physical device functions unknown to the loader
+    PFN_PhysDevExt phys_dev_ext[MAX_NUM_UNKNOWN_EXTS];
+};
+
+// Per instance structure
+struct loader_instance {
+    struct loader_instance_dispatch_table *disp;  // must be first entry in structure
+
+    // Vulkan API version the app is intending to use.
+    uint16_t app_api_major_version;
+    uint16_t app_api_minor_version;
+
+    // We need to manually track physical devices over time.  If the user
+    // re-queries the information, we don't want to delete old data or
+    // create new data unless necessary.
+    uint32_t total_gpu_count;
+    uint32_t phys_dev_count_term;
+    struct loader_physical_device_term **phys_devs_term;
+    uint32_t phys_dev_count_tramp;
+    struct loader_physical_device_tramp **phys_devs_tramp;
+
+    // We also need to manually track physical device groups, but we don't need
+    // loader specific structures since we have that content in the physical
+    // device stored internal to the public structures.
+    uint32_t phys_dev_group_count_term;
+    struct VkPhysicalDeviceGroupProperties **phys_dev_groups_term;
+    uint32_t phys_dev_group_count_tramp;
+    struct VkPhysicalDeviceGroupProperties **phys_dev_groups_tramp;
+
+    struct loader_instance *next;
+
+    uint32_t total_icd_count;
+    struct loader_icd_term *icd_terms;
+    struct loader_icd_tramp_list icd_tramp_list;
+
+    struct loader_dispatch_hash_entry dev_ext_disp_hash[MAX_NUM_UNKNOWN_EXTS];
+    struct loader_dispatch_hash_entry phys_dev_ext_disp_hash[MAX_NUM_UNKNOWN_EXTS];
+
+    struct loader_msg_callback_map_entry *icd_msg_callback_map;
+
+    struct loader_layer_list instance_layer_list;
+    bool override_layer_present;
+
+    // List of activated layers.
+    //  app_      is the version based on exactly what the application asked for.
+    //            This is what must be returned to the application on Enumerate calls.
+    //  expanded_ is the version based on expanding meta-layers into their
+    //            individual component layers.  This is what is used internally.
+    struct loader_layer_list app_activated_layer_list;
+    struct loader_layer_list expanded_activated_layer_list;
+
+    VkInstance instance;  // layers/ICD instance returned to trampoline
+
+    struct loader_extension_list ext_list;  // icds and loaders extensions
+    union loader_instance_extension_enables enabled_known_extensions;
+
+    VkLayerDbgFunctionNode *DbgFunctionHead;
+    uint32_t num_tmp_report_callbacks;
+    VkDebugReportCallbackCreateInfoEXT *tmp_report_create_infos;
+    VkDebugReportCallbackEXT *tmp_report_callbacks;
+    uint32_t num_tmp_messengers;
+    VkDebugUtilsMessengerCreateInfoEXT *tmp_messenger_create_infos;
+    VkDebugUtilsMessengerEXT *tmp_messengers;
+
+    VkAllocationCallbacks alloc_callbacks;
+
+    bool wsi_surface_enabled;
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    bool wsi_win32_surface_enabled;
+#endif
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+    bool wsi_wayland_surface_enabled;
+#endif
+#ifdef VK_USE_PLATFORM_XCB_KHR
+    bool wsi_xcb_surface_enabled;
+#endif
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+    bool wsi_xlib_surface_enabled;
+#endif
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+    bool wsi_android_surface_enabled;
+#endif
+#ifdef VK_USE_PLATFORM_MACOS_MVK
+    bool wsi_macos_surface_enabled;
+#endif
+#ifdef VK_USE_PLATFORM_IOS_MVK
+    bool wsi_ios_surface_enabled;
+#endif
+    bool wsi_headless_surface_enabled;
+#if defined(VK_USE_PLATFORM_METAL_EXT)
+    bool wsi_metal_surface_enabled;
+#endif
+    bool wsi_display_enabled;
+    bool wsi_display_props2_enabled;
+};
+
+// VkPhysicalDevice requires special treatment by loader.  Firstly, terminator
+// code must be able to get the struct loader_icd_term to call into the proper
+// driver  (multiple ICD/gpu case). This can be accomplished by wrapping the
+// created VkPhysicalDevice in loader terminate_EnumeratePhysicalDevices().
+// Secondly, the loader must be able to handle wrapped by layer VkPhysicalDevice
+// in trampoline code.  This implies, that the loader trampoline code must also
+// wrap the VkPhysicalDevice object in trampoline code.  Thus, loader has to
+// wrap the VkPhysicalDevice created object twice. In trampoline code it can't
+// rely on the terminator object wrapping since a layer may also wrap. Since
+// trampoline code wraps the VkPhysicalDevice this means all loader trampoline
+// code that passes a VkPhysicalDevice should unwrap it.
+
+// Per enumerated PhysicalDevice structure, used to wrap in trampoline code and
+// also same structure used to wrap in terminator code
+struct loader_physical_device_tramp {
+    struct loader_instance_dispatch_table *disp;  // must be first entry in structure
+    struct loader_instance *this_instance;
+    VkPhysicalDevice phys_dev;  // object from layers/loader terminator
+};
+
+// Per enumerated PhysicalDevice structure, used to wrap in terminator code
+struct loader_physical_device_term {
+    struct loader_instance_dispatch_table *disp;  // must be first entry in structure
+    struct loader_icd_term *this_icd_term;
+    uint8_t icd_index;
+    VkPhysicalDevice phys_dev;  // object from ICD
+};
+
+struct loader_struct {
+    struct loader_instance *instances;
+};
+
+struct loader_scanned_icd {
+    char *lib_name;
+    loader_platform_dl_handle handle;
+    uint32_t api_version;
+    uint32_t interface_version;
+    PFN_vkGetInstanceProcAddr GetInstanceProcAddr;
+    PFN_GetPhysicalDeviceProcAddr GetPhysicalDeviceProcAddr;
+    PFN_vkCreateInstance CreateInstance;
+    PFN_vkEnumerateInstanceExtensionProperties EnumerateInstanceExtensionProperties;
+};
+
+static inline struct loader_instance *loader_instance(VkInstance instance) { return (struct loader_instance *)instance; }
+
+static inline VkPhysicalDevice loader_unwrap_physical_device(VkPhysicalDevice physicalDevice) {
+    struct loader_physical_device_tramp *phys_dev = (struct loader_physical_device_tramp *)physicalDevice;
+    return phys_dev->phys_dev;
+}
+
+static inline void loader_set_dispatch(void *obj, const void *data) { *((const void **)obj) = data; }
+
+static inline VkLayerDispatchTable *loader_get_dispatch(const void *obj) { return *((VkLayerDispatchTable **)obj); }
+
+static inline struct loader_dev_dispatch_table *loader_get_dev_dispatch(const void *obj) {
+    return *((struct loader_dev_dispatch_table **)obj);
+}
+
+static inline VkLayerInstanceDispatchTable *loader_get_instance_layer_dispatch(const void *obj) {
+    return *((VkLayerInstanceDispatchTable **)obj);
+}
+
+static inline struct loader_instance_dispatch_table *loader_get_instance_dispatch(const void *obj) {
+    return *((struct loader_instance_dispatch_table **)obj);
+}
+
+static inline void loader_init_dispatch(void *obj, const void *data) {
+#ifdef DEBUG
+    assert(valid_loader_magic_value(obj) &&
+           "Incompatible ICD, first dword must be initialized to "
+           "ICD_LOADER_MAGIC. See loader/README.md for details.");
+#endif
+
+    loader_set_dispatch(obj, data);
+}
+
+// Global variables used across files
+extern struct loader_struct loader;
+extern THREAD_LOCAL_DECL struct loader_instance *tls_instance;
+#if defined(_WIN32) && !defined(LOADER_DYNAMIC_LIB)
+extern LOADER_PLATFORM_THREAD_ONCE_DEFINITION(once_init);
+#endif
+extern loader_platform_thread_mutex loader_lock;
+extern loader_platform_thread_mutex loader_json_lock;
+
+struct loader_msg_callback_map_entry {
+    VkDebugReportCallbackEXT icd_obj;
+    VkDebugReportCallbackEXT loader_obj;
+};
+
+// Helper function definitions
+void *loader_instance_heap_alloc(const struct loader_instance *instance, size_t size, VkSystemAllocationScope allocationScope);
+void loader_instance_heap_free(const struct loader_instance *instance, void *pMemory);
+void *loader_instance_heap_realloc(const struct loader_instance *instance, void *pMemory, size_t orig_size, size_t size,
+                                   VkSystemAllocationScope alloc_scope);
+void *loader_instance_tls_heap_alloc(size_t size);
+void loader_instance_tls_heap_free(void *pMemory);
+void *loader_device_heap_alloc(const struct loader_device *device, size_t size, VkSystemAllocationScope allocationScope);
+void loader_device_heap_free(const struct loader_device *device, void *pMemory);
+void *loader_device_heap_realloc(const struct loader_device *device, void *pMemory, size_t orig_size, size_t size,
+                                 VkSystemAllocationScope alloc_scope);
+
+void loader_log(const struct loader_instance *inst, VkFlags msg_type, int32_t msg_code, const char *format, ...);
+
+bool compare_vk_extension_properties(const VkExtensionProperties *op1, const VkExtensionProperties *op2);
+
+VkResult loaderValidateLayers(const struct loader_instance *inst, const uint32_t layer_count,
+                              const char *const *ppEnabledLayerNames, const struct loader_layer_list *list);
+
+VkResult loader_validate_instance_extensions(struct loader_instance *inst, const struct loader_extension_list *icd_exts,
+                                             const struct loader_layer_list *instance_layer,
+                                             const VkInstanceCreateInfo *pCreateInfo);
+
+void loader_initialize(void);
+bool has_vk_extension_property_array(const VkExtensionProperties *vk_ext_prop, const uint32_t count,
+                                     const VkExtensionProperties *ext_array);
+bool has_vk_extension_property(const VkExtensionProperties *vk_ext_prop, const struct loader_extension_list *ext_list);
+
+VkResult loader_add_to_ext_list(const struct loader_instance *inst, struct loader_extension_list *ext_list,
+                                uint32_t prop_list_count, const VkExtensionProperties *props);
+VkResult loader_add_to_dev_ext_list(const struct loader_instance *inst, struct loader_device_extension_list *ext_list,
+                                    const VkExtensionProperties *props, uint32_t entry_count, char **entrys);
+VkResult loader_add_device_extensions(const struct loader_instance *inst,
+                                      PFN_vkEnumerateDeviceExtensionProperties fpEnumerateDeviceExtensionProperties,
+                                      VkPhysicalDevice physical_device, const char *lib_name,
+                                      struct loader_extension_list *ext_list);
+VkResult loader_init_generic_list(const struct loader_instance *inst, struct loader_generic_list *list_info, size_t element_size);
+void loader_destroy_generic_list(const struct loader_instance *inst, struct loader_generic_list *list);
+void loaderDestroyLayerList(const struct loader_instance *inst, struct loader_device *device, struct loader_layer_list *layer_list);
+void loaderDeleteLayerListAndProperties(const struct loader_instance *inst, struct loader_layer_list *layer_list);
+void loaderAddLayerNameToList(const struct loader_instance *inst, const char *name, const enum layer_type_flags type_flags,
+                              const struct loader_layer_list *source_list, struct loader_layer_list *target_list,
+                              struct loader_layer_list *expanded_target_list);
+void loader_scanned_icd_clear(const struct loader_instance *inst, struct loader_icd_tramp_list *icd_tramp_list);
+VkResult loader_icd_scan(const struct loader_instance *inst, struct loader_icd_tramp_list *icd_tramp_list);
+void loaderScanForLayers(struct loader_instance *inst, struct loader_layer_list *instance_layers);
+void loaderScanForImplicitLayers(struct loader_instance *inst, struct loader_layer_list *instance_layers);
+bool loaderImplicitLayerIsEnabled(const struct loader_instance *inst, const struct loader_layer_properties *prop);
+VkResult loader_get_icd_loader_instance_extensions(const struct loader_instance *inst, struct loader_icd_tramp_list *icd_tramp_list,
+                                                   struct loader_extension_list *inst_exts);
+struct loader_icd_term *loader_get_icd_and_device(const VkDevice device, struct loader_device **found_dev, uint32_t *icd_index);
+void loader_init_dispatch_dev_ext(struct loader_instance *inst, struct loader_device *dev);
+void *loader_dev_ext_gpa(struct loader_instance *inst, const char *funcName);
+void *loader_get_dev_ext_trampoline(uint32_t index);
+bool loader_phys_dev_ext_gpa(struct loader_instance *inst, const char *funcName, bool perform_checking, void **tramp_addr,
+                             void **term_addr);
+void *loader_get_phys_dev_ext_tramp(uint32_t index);
+void *loader_get_phys_dev_ext_termin(uint32_t index);
+struct loader_instance *loader_get_instance(const VkInstance instance);
+void loaderDeactivateLayers(const struct loader_instance *instance, struct loader_device *device, struct loader_layer_list *list);
+struct loader_device *loader_create_logical_device(const struct loader_instance *inst, const VkAllocationCallbacks *pAllocator);
+void loader_add_logical_device(const struct loader_instance *inst, struct loader_icd_term *icd_term,
+                               struct loader_device *found_dev);
+void loader_remove_logical_device(const struct loader_instance *inst, struct loader_icd_term *icd_term,
+                                  struct loader_device *found_dev, const VkAllocationCallbacks *pAllocator);
+// NOTE: Outside of loader, this entry-point is only provided for error
+// cleanup.
+void loader_destroy_logical_device(const struct loader_instance *inst, struct loader_device *dev,
+                                   const VkAllocationCallbacks *pAllocator);
+
+VkResult loaderEnableInstanceLayers(struct loader_instance *inst, const VkInstanceCreateInfo *pCreateInfo,
+                                    const struct loader_layer_list *instance_layers);
+
+VkResult loader_create_instance_chain(const VkInstanceCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator,
+                                      struct loader_instance *inst, VkInstance *created_instance);
+
+void loaderActivateInstanceLayerExtensions(struct loader_instance *inst, VkInstance created_inst);
+
+VKAPI_ATTR VkResult VKAPI_CALL loader_layer_create_device(VkInstance instance, VkPhysicalDevice physicalDevice,
+                                                          const VkDeviceCreateInfo *pCreateInfo,
+                                                          const VkAllocationCallbacks *pAllocator, VkDevice *pDevice,
+                                                          PFN_vkGetInstanceProcAddr layerGIPA, PFN_vkGetDeviceProcAddr *nextGDPA);
+VKAPI_ATTR void VKAPI_CALL loader_layer_destroy_device(VkDevice device, const VkAllocationCallbacks *pAllocator,
+                                                       PFN_vkDestroyDevice destroyFunction);
+
+VkResult loader_create_device_chain(const VkPhysicalDevice pd, const VkDeviceCreateInfo *pCreateInfo,
+                                    const VkAllocationCallbacks *pAllocator, const struct loader_instance *inst,
+                                    struct loader_device *dev, PFN_vkGetInstanceProcAddr callingLayer,
+                                    PFN_vkGetDeviceProcAddr *layerNextGDPA);
+
+VkResult loader_validate_device_extensions(struct loader_instance *this_instance,
+                                           const struct loader_layer_list *activated_device_layers,
+                                           const struct loader_extension_list *icd_exts, const VkDeviceCreateInfo *pCreateInfo);
+
+VkResult setupLoaderTrampPhysDevs(VkInstance instance);
+VkResult setupLoaderTermPhysDevs(struct loader_instance *inst);
+
+VkStringErrorFlags vk_string_validate(const int max_length, const char *char_array);
+
+#endif  // LOADER_H
diff --git a/src/third_party/vulkan-loader/src/loader/loader.rc b/src/third_party/vulkan-loader/src/loader/loader.rc
new file mode 100755
index 0000000..f705126
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/loader/loader.rc
@@ -0,0 +1,98 @@
+//
+// Copyright (c) 2014-2019 The Khronos Group Inc.
+// Copyright (c) 2014-2019 Valve Corporation
+// Copyright (c) 2014-2019 LunarG, Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+// Author: David Pinedo <david@lunarg.com>
+//
+
+///////////////////////////////////////////////////////////////////////////////
+///////////////////////////////////////////////////////////////////////////////
+// Start customize section
+// Edit this section for your build
+///////////////////////////////////////////////////////////////////////////////
+///////////////////////////////////////////////////////////////////////////////
+
+#define VERSION_MAJOR               1
+#define VERSION_MINOR               0
+#define VERSION_PATCH               1111
+#define VERSION_BUILDNO             2222
+
+#define VERSION_BUILD_DESCRIPTION   "Dev Build"
+
+// All builds except release builds should set this to 0.
+// Release builds should set this to 1.
+#define VERSION_IS_RELEASEBUILD        0
+
+
+///////////////////////////////////////////////////////////////////////////////
+///////////////////////////////////////////////////////////////////////////////
+// End of customize section
+///////////////////////////////////////////////////////////////////////////////
+///////////////////////////////////////////////////////////////////////////////
+
+#include "winres.h"
+
+#define VER_FILE_VERSION            VERSION_MAJOR, VERSION_MINOR, VERSION_PATCH, VERSION_BUILDNO
+
+#define STRINGIZE2(s) #s
+#define STRINGIZE(s) STRINGIZE2(s)
+
+#if VERSION_IS_RELEASEBUILD==1
+   #define VER_FILE_DESCRIPTION_STR    "Vulkan Loader"
+   #define VER_FILE_VERSION_STR        STRINGIZE(VERSION_MAJOR)        \
+                                       "." STRINGIZE(VERSION_MINOR)    \
+                                       "." STRINGIZE(VERSION_PATCH)    \
+                                       "." STRINGIZE(VERSION_BUILDNO)
+#else
+   #define VER_FILE_DESCRIPTION_STR    "Vulkan Loader - " VERSION_BUILD_DESCRIPTION
+   #define VER_FILE_VERSION_STR        STRINGIZE(VERSION_MAJOR)        \
+                                    "." STRINGIZE(VERSION_MINOR)    \
+                                    "." STRINGIZE(VERSION_PATCH)    \
+                                    "." STRINGIZE(VERSION_BUILDNO) \
+                                    "." VERSION_BUILD_DESCRIPTION
+#endif
+
+
+VS_VERSION_INFO VERSIONINFO
+ FILEVERSION VER_FILE_VERSION
+ PRODUCTVERSION VER_FILE_VERSION
+ FILEFLAGSMASK 0x3fL
+#ifdef _DEBUG
+ FILEFLAGS VS_FF_DEBUG
+#else
+ FILEFLAGS 0x0L
+#endif
+
+ FILEOS 0x00000L
+ FILETYPE VFT_DLL
+ FILESUBTYPE 0x0L
+BEGIN
+    BLOCK "StringFileInfo"
+    BEGIN
+        BLOCK "04090000"
+        BEGIN
+            VALUE "FileDescription", VER_FILE_DESCRIPTION_STR
+            VALUE "FileVersion", VER_FILE_VERSION_STR
+            VALUE "LegalCopyright", "Copyright (C) 2015-2019"
+            VALUE "ProductName", "Vulkan Runtime"
+            VALUE "ProductVersion", VER_FILE_VERSION_STR
+        END
+    END
+    BLOCK "VarFileInfo"
+    BEGIN
+        VALUE "Translation", 0x409, 0000
+    END
+END
diff --git a/src/third_party/vulkan-loader/src/loader/loader_cmake_config.h.in b/src/third_party/vulkan-loader/src/loader/loader_cmake_config.h.in
new file mode 100644
index 0000000..3bbc461
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/loader/loader_cmake_config.h.in
@@ -0,0 +1,2 @@
+#cmakedefine HAVE_SECURE_GETENV
+#cmakedefine HAVE___SECURE_GETENV
diff --git a/src/third_party/vulkan-loader/src/loader/murmurhash.c b/src/third_party/vulkan-loader/src/loader/murmurhash.c
new file mode 100644
index 0000000..40f0d5e
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/loader/murmurhash.c
@@ -0,0 +1,98 @@
+
+/**
+ * `murmurhash.h' - murmurhash
+ *
+ * copyright (c) 2014 joseph werle <joseph.werle@gmail.com>
+ * Copyright (c) 2015-2016 The Khronos Group Inc.
+ * Copyright (c) 2015-2016 Valve Corporation
+ * Copyright (c) 2015-2016 LunarG, Inc.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and/or associated documentation files (the "Materials"), to
+ * deal in the Materials without restriction, including without limitation the
+ * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+ * sell copies of the Materials, and to permit persons to whom the Materials are
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included in
+ * all copies or substantial portions of the Materials.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ *
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+ * DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+ * OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE MATERIALS OR THE
+ * USE OR OTHER DEALINGS IN THE MATERIALS.
+ */
+
+#include <stdlib.h>
+#include <stdio.h>
+#include <stdint.h>
+#include "murmurhash.h"
+
+uint32_t murmurhash(const char *key, size_t len, uint32_t seed) {
+    uint32_t c1 = 0xcc9e2d51;
+    uint32_t c2 = 0x1b873593;
+    uint32_t r1 = 15;
+    uint32_t r2 = 13;
+    uint32_t m = 5;
+    uint32_t n = 0xe6546b64;
+    uint32_t h = 0;
+    uint32_t k = 0;
+    uint8_t *d = (uint8_t *)key;  // 32 bit extract from `key'
+    const uint32_t *chunks = NULL;
+    const uint8_t *tail = NULL;  // tail - last 8 bytes
+    int i = 0;
+    int l = (int)len / 4;  // chunk length
+
+    h = seed;
+
+    chunks = (const uint32_t *)(d + l * 4);  // body
+    tail = (const uint8_t *)(d + l * 4);     // last 8 byte chunk of `key'
+
+    // for each 4 byte chunk of `key'
+    for (i = -l; i != 0; ++i) {
+        // next 4 byte chunk of `key'
+        k = chunks[i];
+
+        // encode next 4 byte chunk of `key'
+        k *= c1;
+        k = (k << r1) | (k >> (32 - r1));
+        k *= c2;
+
+        // append to hash
+        h ^= k;
+        h = (h << r2) | (h >> (32 - r2));
+        h = h * m + n;
+    }
+
+    k = 0;
+
+    // remainder
+    switch (len & 3) {  // `len % 4'
+        case 3:
+            k ^= (tail[2] << 16);
+            // fall through
+        case 2:
+            k ^= (tail[1] << 8);
+            // fall through
+        case 1:
+            k ^= tail[0];
+            k *= c1;
+            k = (k << r1) | (k >> (32 - r1));
+            k *= c2;
+            h ^= k;
+    }
+
+    h ^= len;
+
+    h ^= (h >> 16);
+    h *= 0x85ebca6b;
+    h ^= (h >> 13);
+    h *= 0xc2b2ae35;
+    h ^= (h >> 16);
+
+    return h;
+}
diff --git a/src/third_party/vulkan-loader/src/loader/murmurhash.h b/src/third_party/vulkan-loader/src/loader/murmurhash.h
new file mode 100644
index 0000000..775532e
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/loader/murmurhash.h
@@ -0,0 +1,52 @@
+
+/**
+ * `murmurhash.h' - murmurhash
+ *
+ * copyright (c) 2014 joseph werle <joseph.werle@gmail.com>
+ * Copyright (c) 2015-2016 The Khronos Group Inc.
+ * Copyright (c) 2015-2016 Valve Corporation
+ * Copyright (c) 2015-2016 LunarG, Inc.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and/or associated documentation files (the "Materials"), to
+ * deal in the Materials without restriction, including without limitation the
+ * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+ * sell copies of the Materials, and to permit persons to whom the Materials are
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included in
+ * all copies or substantial portions of the Materials.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ *
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+ * DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+ * OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE MATERIALS OR THE
+ * USE OR OTHER DEALINGS IN THE MATERIALS.
+ */
+
+#ifndef MURMURHASH_H
+#define MURMURHASH_H 1
+
+#include <stdint.h>
+
+#define MURMURHASH_VERSION "0.0.3"
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+/**
+ * Returns a murmur hash of `key' based on `seed'
+ * using the MurmurHash3 algorithm
+ */
+
+uint32_t murmurhash(const char *key, size_t len, uint32_t seed);
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif
diff --git a/src/third_party/vulkan-loader/src/loader/phys_dev_ext.c b/src/third_party/vulkan-loader/src/loader/phys_dev_ext.c
new file mode 100644
index 0000000..91e0ef8
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/loader/phys_dev_ext.c
@@ -0,0 +1,1056 @@
+/*
+ *
+ * Copyright (c) 2016-17 The Khronos Group Inc.
+ * Copyright (c) 2016-17 Valve Corporation
+ * Copyright (c) 2016-17 LunarG, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Mark Young <marky@lunarg.com>
+ * Author: Lenny Komow <lenny@lunarg.com>
+ *
+ */
+
+// This code is used to enable generic instance extensions which use a physical device
+// as the first parameter.  If the extension is already known by the loader, it will
+// not use this code, but instead use the more direct route.  However, if it is
+// unknown to the loader, it will use this code.  Technically, this is not trampoline
+// code since we don't want to optimize it out.
+
+#include "vk_loader_platform.h"
+#include "loader.h"
+
+#if defined(__GNUC__) && !defined(__clang__)
+#pragma GCC optimize(3)  // force gcc to use tail-calls
+#endif
+
+// Declarations for the trampoline
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp0(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp1(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp2(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp3(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp4(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp5(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp6(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp7(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp8(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp9(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp10(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp11(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp12(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp13(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp14(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp15(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp16(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp17(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp18(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp19(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp20(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp21(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp22(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp23(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp24(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp25(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp26(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp27(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp28(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp29(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp30(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp31(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp32(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp33(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp34(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp35(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp36(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp37(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp38(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp39(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp40(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp41(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp42(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp43(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp44(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp45(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp46(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp47(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp48(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp49(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp50(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp51(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp52(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp53(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp54(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp55(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp56(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp57(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp58(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp59(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp60(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp61(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp62(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp63(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp64(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp65(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp66(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp67(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp68(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp69(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp70(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp71(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp72(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp73(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp74(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp75(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp76(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp77(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp78(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp79(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp80(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp81(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp82(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp83(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp84(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp85(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp86(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp87(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp88(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp89(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp90(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp91(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp92(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp93(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp94(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp95(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp96(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp97(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp98(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp99(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp100(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp101(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp102(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp103(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp104(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp105(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp106(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp107(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp108(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp109(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp110(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp111(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp112(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp113(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp114(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp115(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp116(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp117(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp118(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp119(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp120(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp121(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp122(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp123(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp124(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp125(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp126(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp127(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp128(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp129(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp130(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp131(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp132(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp133(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp134(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp135(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp136(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp137(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp138(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp139(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp140(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp141(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp142(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp143(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp144(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp145(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp146(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp147(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp148(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp149(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp150(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp151(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp152(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp153(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp154(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp155(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp156(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp157(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp158(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp159(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp160(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp161(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp162(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp163(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp164(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp165(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp166(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp167(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp168(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp169(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp170(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp171(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp172(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp173(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp174(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp175(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp176(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp177(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp178(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp179(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp180(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp181(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp182(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp183(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp184(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp185(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp186(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp187(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp188(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp189(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp190(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp191(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp192(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp193(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp194(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp195(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp196(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp197(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp198(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp199(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp200(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp201(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp202(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp203(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp204(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp205(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp206(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp207(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp208(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp209(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp210(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp211(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp212(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp213(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp214(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp215(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp216(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp217(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp218(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp219(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp220(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp221(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp222(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp223(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp224(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp225(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp226(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp227(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp228(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp229(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp230(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp231(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp232(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp233(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp234(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp235(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp236(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp237(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp238(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp239(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp240(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp241(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp242(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp243(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp244(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp245(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp246(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp247(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp248(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp249(VkPhysicalDevice);
+
+// Disable clang-format for lists of macros
+// clang-format off
+
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin0(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin1(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin2(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin3(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin4(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin5(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin6(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin7(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin8(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin9(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin10(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin11(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin12(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin13(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin14(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin15(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin16(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin17(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin18(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin19(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin20(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin21(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin22(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin23(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin24(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin25(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin26(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin27(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin28(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin29(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin30(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin31(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin32(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin33(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin34(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin35(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin36(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin37(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin38(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin39(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin40(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin41(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin42(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin43(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin44(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin45(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin46(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin47(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin48(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin49(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin50(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin51(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin52(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin53(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin54(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin55(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin56(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin57(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin58(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin59(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin60(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin61(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin62(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin63(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin64(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin65(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin66(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin67(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin68(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin69(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin70(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin71(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin72(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin73(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin74(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin75(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin76(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin77(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin78(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin79(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin80(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin81(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin82(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin83(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin84(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin85(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin86(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin87(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin88(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin89(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin90(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin91(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin92(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin93(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin94(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin95(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin96(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin97(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin98(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin99(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin100(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin101(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin102(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin103(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin104(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin105(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin106(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin107(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin108(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin109(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin110(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin111(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin112(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin113(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin114(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin115(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin116(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin117(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin118(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin119(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin120(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin121(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin122(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin123(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin124(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin125(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin126(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin127(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin128(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin129(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin130(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin131(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin132(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin133(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin134(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin135(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin136(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin137(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin138(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin139(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin140(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin141(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin142(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin143(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin144(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin145(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin146(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin147(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin148(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin149(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin150(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin151(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin152(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin153(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin154(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin155(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin156(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin157(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin158(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin159(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin160(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin161(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin162(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin163(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin164(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin165(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin166(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin167(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin168(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin169(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin170(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin171(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin172(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin173(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin174(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin175(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin176(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin177(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin178(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin179(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin180(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin181(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin182(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin183(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin184(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin185(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin186(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin187(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin188(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin189(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin190(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin191(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin192(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin193(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin194(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin195(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin196(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin197(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin198(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin199(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin200(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin201(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin202(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin203(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin204(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin205(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin206(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin207(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin208(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin209(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin210(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin211(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin212(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin213(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin214(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin215(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin216(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin217(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin218(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin219(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin220(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin221(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin222(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin223(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin224(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin225(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin226(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin227(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin228(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin229(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin230(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin231(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin232(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin233(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin234(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin235(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin236(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin237(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin238(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin239(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin240(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin241(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin242(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin243(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin244(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin245(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin246(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin247(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin248(VkPhysicalDevice);
+VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin249(VkPhysicalDevice);
+
+
+void *loader_get_phys_dev_ext_tramp(uint32_t index) {
+    switch (index) {
+#define TRAMP_CASE_HANDLE(num) case num: return vkPhysDevExtTramp##num
+        TRAMP_CASE_HANDLE(0);
+        TRAMP_CASE_HANDLE(1);
+        TRAMP_CASE_HANDLE(2);
+        TRAMP_CASE_HANDLE(3);
+        TRAMP_CASE_HANDLE(4);
+        TRAMP_CASE_HANDLE(5);
+        TRAMP_CASE_HANDLE(6);
+        TRAMP_CASE_HANDLE(7);
+        TRAMP_CASE_HANDLE(8);
+        TRAMP_CASE_HANDLE(9);
+        TRAMP_CASE_HANDLE(10);
+        TRAMP_CASE_HANDLE(11);
+        TRAMP_CASE_HANDLE(12);
+        TRAMP_CASE_HANDLE(13);
+        TRAMP_CASE_HANDLE(14);
+        TRAMP_CASE_HANDLE(15);
+        TRAMP_CASE_HANDLE(16);
+        TRAMP_CASE_HANDLE(17);
+        TRAMP_CASE_HANDLE(18);
+        TRAMP_CASE_HANDLE(19);
+        TRAMP_CASE_HANDLE(20);
+        TRAMP_CASE_HANDLE(21);
+        TRAMP_CASE_HANDLE(22);
+        TRAMP_CASE_HANDLE(23);
+        TRAMP_CASE_HANDLE(24);
+        TRAMP_CASE_HANDLE(25);
+        TRAMP_CASE_HANDLE(26);
+        TRAMP_CASE_HANDLE(27);
+        TRAMP_CASE_HANDLE(28);
+        TRAMP_CASE_HANDLE(29);
+        TRAMP_CASE_HANDLE(30);
+        TRAMP_CASE_HANDLE(31);
+        TRAMP_CASE_HANDLE(32);
+        TRAMP_CASE_HANDLE(33);
+        TRAMP_CASE_HANDLE(34);
+        TRAMP_CASE_HANDLE(35);
+        TRAMP_CASE_HANDLE(36);
+        TRAMP_CASE_HANDLE(37);
+        TRAMP_CASE_HANDLE(38);
+        TRAMP_CASE_HANDLE(39);
+        TRAMP_CASE_HANDLE(40);
+        TRAMP_CASE_HANDLE(41);
+        TRAMP_CASE_HANDLE(42);
+        TRAMP_CASE_HANDLE(43);
+        TRAMP_CASE_HANDLE(44);
+        TRAMP_CASE_HANDLE(45);
+        TRAMP_CASE_HANDLE(46);
+        TRAMP_CASE_HANDLE(47);
+        TRAMP_CASE_HANDLE(48);
+        TRAMP_CASE_HANDLE(49);
+        TRAMP_CASE_HANDLE(50);
+        TRAMP_CASE_HANDLE(51);
+        TRAMP_CASE_HANDLE(52);
+        TRAMP_CASE_HANDLE(53);
+        TRAMP_CASE_HANDLE(54);
+        TRAMP_CASE_HANDLE(55);
+        TRAMP_CASE_HANDLE(56);
+        TRAMP_CASE_HANDLE(57);
+        TRAMP_CASE_HANDLE(58);
+        TRAMP_CASE_HANDLE(59);
+        TRAMP_CASE_HANDLE(60);
+        TRAMP_CASE_HANDLE(61);
+        TRAMP_CASE_HANDLE(62);
+        TRAMP_CASE_HANDLE(63);
+        TRAMP_CASE_HANDLE(64);
+        TRAMP_CASE_HANDLE(65);
+        TRAMP_CASE_HANDLE(66);
+        TRAMP_CASE_HANDLE(67);
+        TRAMP_CASE_HANDLE(68);
+        TRAMP_CASE_HANDLE(69);
+        TRAMP_CASE_HANDLE(70);
+        TRAMP_CASE_HANDLE(71);
+        TRAMP_CASE_HANDLE(72);
+        TRAMP_CASE_HANDLE(73);
+        TRAMP_CASE_HANDLE(74);
+        TRAMP_CASE_HANDLE(75);
+        TRAMP_CASE_HANDLE(76);
+        TRAMP_CASE_HANDLE(77);
+        TRAMP_CASE_HANDLE(78);
+        TRAMP_CASE_HANDLE(79);
+        TRAMP_CASE_HANDLE(80);
+        TRAMP_CASE_HANDLE(81);
+        TRAMP_CASE_HANDLE(82);
+        TRAMP_CASE_HANDLE(83);
+        TRAMP_CASE_HANDLE(84);
+        TRAMP_CASE_HANDLE(85);
+        TRAMP_CASE_HANDLE(86);
+        TRAMP_CASE_HANDLE(87);
+        TRAMP_CASE_HANDLE(88);
+        TRAMP_CASE_HANDLE(89);
+        TRAMP_CASE_HANDLE(90);
+        TRAMP_CASE_HANDLE(91);
+        TRAMP_CASE_HANDLE(92);
+        TRAMP_CASE_HANDLE(93);
+        TRAMP_CASE_HANDLE(94);
+        TRAMP_CASE_HANDLE(95);
+        TRAMP_CASE_HANDLE(96);
+        TRAMP_CASE_HANDLE(97);
+        TRAMP_CASE_HANDLE(98);
+        TRAMP_CASE_HANDLE(99);
+        TRAMP_CASE_HANDLE(100);
+        TRAMP_CASE_HANDLE(101);
+        TRAMP_CASE_HANDLE(102);
+        TRAMP_CASE_HANDLE(103);
+        TRAMP_CASE_HANDLE(104);
+        TRAMP_CASE_HANDLE(105);
+        TRAMP_CASE_HANDLE(106);
+        TRAMP_CASE_HANDLE(107);
+        TRAMP_CASE_HANDLE(108);
+        TRAMP_CASE_HANDLE(109);
+        TRAMP_CASE_HANDLE(110);
+        TRAMP_CASE_HANDLE(111);
+        TRAMP_CASE_HANDLE(112);
+        TRAMP_CASE_HANDLE(113);
+        TRAMP_CASE_HANDLE(114);
+        TRAMP_CASE_HANDLE(115);
+        TRAMP_CASE_HANDLE(116);
+        TRAMP_CASE_HANDLE(117);
+        TRAMP_CASE_HANDLE(118);
+        TRAMP_CASE_HANDLE(119);
+        TRAMP_CASE_HANDLE(120);
+        TRAMP_CASE_HANDLE(121);
+        TRAMP_CASE_HANDLE(122);
+        TRAMP_CASE_HANDLE(123);
+        TRAMP_CASE_HANDLE(124);
+        TRAMP_CASE_HANDLE(125);
+        TRAMP_CASE_HANDLE(126);
+        TRAMP_CASE_HANDLE(127);
+        TRAMP_CASE_HANDLE(128);
+        TRAMP_CASE_HANDLE(129);
+        TRAMP_CASE_HANDLE(130);
+        TRAMP_CASE_HANDLE(131);
+        TRAMP_CASE_HANDLE(132);
+        TRAMP_CASE_HANDLE(133);
+        TRAMP_CASE_HANDLE(134);
+        TRAMP_CASE_HANDLE(135);
+        TRAMP_CASE_HANDLE(136);
+        TRAMP_CASE_HANDLE(137);
+        TRAMP_CASE_HANDLE(138);
+        TRAMP_CASE_HANDLE(139);
+        TRAMP_CASE_HANDLE(140);
+        TRAMP_CASE_HANDLE(141);
+        TRAMP_CASE_HANDLE(142);
+        TRAMP_CASE_HANDLE(143);
+        TRAMP_CASE_HANDLE(144);
+        TRAMP_CASE_HANDLE(145);
+        TRAMP_CASE_HANDLE(146);
+        TRAMP_CASE_HANDLE(147);
+        TRAMP_CASE_HANDLE(148);
+        TRAMP_CASE_HANDLE(149);
+        TRAMP_CASE_HANDLE(150);
+        TRAMP_CASE_HANDLE(151);
+        TRAMP_CASE_HANDLE(152);
+        TRAMP_CASE_HANDLE(153);
+        TRAMP_CASE_HANDLE(154);
+        TRAMP_CASE_HANDLE(155);
+        TRAMP_CASE_HANDLE(156);
+        TRAMP_CASE_HANDLE(157);
+        TRAMP_CASE_HANDLE(158);
+        TRAMP_CASE_HANDLE(159);
+        TRAMP_CASE_HANDLE(160);
+        TRAMP_CASE_HANDLE(161);
+        TRAMP_CASE_HANDLE(162);
+        TRAMP_CASE_HANDLE(163);
+        TRAMP_CASE_HANDLE(164);
+        TRAMP_CASE_HANDLE(165);
+        TRAMP_CASE_HANDLE(166);
+        TRAMP_CASE_HANDLE(167);
+        TRAMP_CASE_HANDLE(168);
+        TRAMP_CASE_HANDLE(169);
+        TRAMP_CASE_HANDLE(170);
+        TRAMP_CASE_HANDLE(171);
+        TRAMP_CASE_HANDLE(172);
+        TRAMP_CASE_HANDLE(173);
+        TRAMP_CASE_HANDLE(174);
+        TRAMP_CASE_HANDLE(175);
+        TRAMP_CASE_HANDLE(176);
+        TRAMP_CASE_HANDLE(177);
+        TRAMP_CASE_HANDLE(178);
+        TRAMP_CASE_HANDLE(179);
+        TRAMP_CASE_HANDLE(180);
+        TRAMP_CASE_HANDLE(181);
+        TRAMP_CASE_HANDLE(182);
+        TRAMP_CASE_HANDLE(183);
+        TRAMP_CASE_HANDLE(184);
+        TRAMP_CASE_HANDLE(185);
+        TRAMP_CASE_HANDLE(186);
+        TRAMP_CASE_HANDLE(187);
+        TRAMP_CASE_HANDLE(188);
+        TRAMP_CASE_HANDLE(189);
+        TRAMP_CASE_HANDLE(190);
+        TRAMP_CASE_HANDLE(191);
+        TRAMP_CASE_HANDLE(192);
+        TRAMP_CASE_HANDLE(193);
+        TRAMP_CASE_HANDLE(194);
+        TRAMP_CASE_HANDLE(195);
+        TRAMP_CASE_HANDLE(196);
+        TRAMP_CASE_HANDLE(197);
+        TRAMP_CASE_HANDLE(198);
+        TRAMP_CASE_HANDLE(199);
+        TRAMP_CASE_HANDLE(200);
+        TRAMP_CASE_HANDLE(201);
+        TRAMP_CASE_HANDLE(202);
+        TRAMP_CASE_HANDLE(203);
+        TRAMP_CASE_HANDLE(204);
+        TRAMP_CASE_HANDLE(205);
+        TRAMP_CASE_HANDLE(206);
+        TRAMP_CASE_HANDLE(207);
+        TRAMP_CASE_HANDLE(208);
+        TRAMP_CASE_HANDLE(209);
+        TRAMP_CASE_HANDLE(210);
+        TRAMP_CASE_HANDLE(211);
+        TRAMP_CASE_HANDLE(212);
+        TRAMP_CASE_HANDLE(213);
+        TRAMP_CASE_HANDLE(214);
+        TRAMP_CASE_HANDLE(215);
+        TRAMP_CASE_HANDLE(216);
+        TRAMP_CASE_HANDLE(217);
+        TRAMP_CASE_HANDLE(218);
+        TRAMP_CASE_HANDLE(219);
+        TRAMP_CASE_HANDLE(220);
+        TRAMP_CASE_HANDLE(221);
+        TRAMP_CASE_HANDLE(222);
+        TRAMP_CASE_HANDLE(223);
+        TRAMP_CASE_HANDLE(224);
+        TRAMP_CASE_HANDLE(225);
+        TRAMP_CASE_HANDLE(226);
+        TRAMP_CASE_HANDLE(227);
+        TRAMP_CASE_HANDLE(228);
+        TRAMP_CASE_HANDLE(229);
+        TRAMP_CASE_HANDLE(230);
+        TRAMP_CASE_HANDLE(231);
+        TRAMP_CASE_HANDLE(232);
+        TRAMP_CASE_HANDLE(233);
+        TRAMP_CASE_HANDLE(234);
+        TRAMP_CASE_HANDLE(235);
+        TRAMP_CASE_HANDLE(236);
+        TRAMP_CASE_HANDLE(237);
+        TRAMP_CASE_HANDLE(238);
+        TRAMP_CASE_HANDLE(239);
+        TRAMP_CASE_HANDLE(240);
+        TRAMP_CASE_HANDLE(241);
+        TRAMP_CASE_HANDLE(242);
+        TRAMP_CASE_HANDLE(243);
+        TRAMP_CASE_HANDLE(244);
+        TRAMP_CASE_HANDLE(245);
+        TRAMP_CASE_HANDLE(246);
+        TRAMP_CASE_HANDLE(247);
+        TRAMP_CASE_HANDLE(248);
+        TRAMP_CASE_HANDLE(249);
+    }
+    return NULL;
+}
+
+void *loader_get_phys_dev_ext_termin(uint32_t index) {
+    switch (index) {
+#define TERM_CASE_HANDLE(num) case num: return vkPhysDevExtTermin##num
+        TERM_CASE_HANDLE(0);
+        TERM_CASE_HANDLE(1);
+        TERM_CASE_HANDLE(2);
+        TERM_CASE_HANDLE(3);
+        TERM_CASE_HANDLE(4);
+        TERM_CASE_HANDLE(5);
+        TERM_CASE_HANDLE(6);
+        TERM_CASE_HANDLE(7);
+        TERM_CASE_HANDLE(8);
+        TERM_CASE_HANDLE(9);
+        TERM_CASE_HANDLE(10);
+        TERM_CASE_HANDLE(11);
+        TERM_CASE_HANDLE(12);
+        TERM_CASE_HANDLE(13);
+        TERM_CASE_HANDLE(14);
+        TERM_CASE_HANDLE(15);
+        TERM_CASE_HANDLE(16);
+        TERM_CASE_HANDLE(17);
+        TERM_CASE_HANDLE(18);
+        TERM_CASE_HANDLE(19);
+        TERM_CASE_HANDLE(20);
+        TERM_CASE_HANDLE(21);
+        TERM_CASE_HANDLE(22);
+        TERM_CASE_HANDLE(23);
+        TERM_CASE_HANDLE(24);
+        TERM_CASE_HANDLE(25);
+        TERM_CASE_HANDLE(26);
+        TERM_CASE_HANDLE(27);
+        TERM_CASE_HANDLE(28);
+        TERM_CASE_HANDLE(29);
+        TERM_CASE_HANDLE(30);
+        TERM_CASE_HANDLE(31);
+        TERM_CASE_HANDLE(32);
+        TERM_CASE_HANDLE(33);
+        TERM_CASE_HANDLE(34);
+        TERM_CASE_HANDLE(35);
+        TERM_CASE_HANDLE(36);
+        TERM_CASE_HANDLE(37);
+        TERM_CASE_HANDLE(38);
+        TERM_CASE_HANDLE(39);
+        TERM_CASE_HANDLE(40);
+        TERM_CASE_HANDLE(41);
+        TERM_CASE_HANDLE(42);
+        TERM_CASE_HANDLE(43);
+        TERM_CASE_HANDLE(44);
+        TERM_CASE_HANDLE(45);
+        TERM_CASE_HANDLE(46);
+        TERM_CASE_HANDLE(47);
+        TERM_CASE_HANDLE(48);
+        TERM_CASE_HANDLE(49);
+        TERM_CASE_HANDLE(50);
+        TERM_CASE_HANDLE(51);
+        TERM_CASE_HANDLE(52);
+        TERM_CASE_HANDLE(53);
+        TERM_CASE_HANDLE(54);
+        TERM_CASE_HANDLE(55);
+        TERM_CASE_HANDLE(56);
+        TERM_CASE_HANDLE(57);
+        TERM_CASE_HANDLE(58);
+        TERM_CASE_HANDLE(59);
+        TERM_CASE_HANDLE(60);
+        TERM_CASE_HANDLE(61);
+        TERM_CASE_HANDLE(62);
+        TERM_CASE_HANDLE(63);
+        TERM_CASE_HANDLE(64);
+        TERM_CASE_HANDLE(65);
+        TERM_CASE_HANDLE(66);
+        TERM_CASE_HANDLE(67);
+        TERM_CASE_HANDLE(68);
+        TERM_CASE_HANDLE(69);
+        TERM_CASE_HANDLE(70);
+        TERM_CASE_HANDLE(71);
+        TERM_CASE_HANDLE(72);
+        TERM_CASE_HANDLE(73);
+        TERM_CASE_HANDLE(74);
+        TERM_CASE_HANDLE(75);
+        TERM_CASE_HANDLE(76);
+        TERM_CASE_HANDLE(77);
+        TERM_CASE_HANDLE(78);
+        TERM_CASE_HANDLE(79);
+        TERM_CASE_HANDLE(80);
+        TERM_CASE_HANDLE(81);
+        TERM_CASE_HANDLE(82);
+        TERM_CASE_HANDLE(83);
+        TERM_CASE_HANDLE(84);
+        TERM_CASE_HANDLE(85);
+        TERM_CASE_HANDLE(86);
+        TERM_CASE_HANDLE(87);
+        TERM_CASE_HANDLE(88);
+        TERM_CASE_HANDLE(89);
+        TERM_CASE_HANDLE(90);
+        TERM_CASE_HANDLE(91);
+        TERM_CASE_HANDLE(92);
+        TERM_CASE_HANDLE(93);
+        TERM_CASE_HANDLE(94);
+        TERM_CASE_HANDLE(95);
+        TERM_CASE_HANDLE(96);
+        TERM_CASE_HANDLE(97);
+        TERM_CASE_HANDLE(98);
+        TERM_CASE_HANDLE(99);
+        TERM_CASE_HANDLE(100);
+        TERM_CASE_HANDLE(101);
+        TERM_CASE_HANDLE(102);
+        TERM_CASE_HANDLE(103);
+        TERM_CASE_HANDLE(104);
+        TERM_CASE_HANDLE(105);
+        TERM_CASE_HANDLE(106);
+        TERM_CASE_HANDLE(107);
+        TERM_CASE_HANDLE(108);
+        TERM_CASE_HANDLE(109);
+        TERM_CASE_HANDLE(110);
+        TERM_CASE_HANDLE(111);
+        TERM_CASE_HANDLE(112);
+        TERM_CASE_HANDLE(113);
+        TERM_CASE_HANDLE(114);
+        TERM_CASE_HANDLE(115);
+        TERM_CASE_HANDLE(116);
+        TERM_CASE_HANDLE(117);
+        TERM_CASE_HANDLE(118);
+        TERM_CASE_HANDLE(119);
+        TERM_CASE_HANDLE(120);
+        TERM_CASE_HANDLE(121);
+        TERM_CASE_HANDLE(122);
+        TERM_CASE_HANDLE(123);
+        TERM_CASE_HANDLE(124);
+        TERM_CASE_HANDLE(125);
+        TERM_CASE_HANDLE(126);
+        TERM_CASE_HANDLE(127);
+        TERM_CASE_HANDLE(128);
+        TERM_CASE_HANDLE(129);
+        TERM_CASE_HANDLE(130);
+        TERM_CASE_HANDLE(131);
+        TERM_CASE_HANDLE(132);
+        TERM_CASE_HANDLE(133);
+        TERM_CASE_HANDLE(134);
+        TERM_CASE_HANDLE(135);
+        TERM_CASE_HANDLE(136);
+        TERM_CASE_HANDLE(137);
+        TERM_CASE_HANDLE(138);
+        TERM_CASE_HANDLE(139);
+        TERM_CASE_HANDLE(140);
+        TERM_CASE_HANDLE(141);
+        TERM_CASE_HANDLE(142);
+        TERM_CASE_HANDLE(143);
+        TERM_CASE_HANDLE(144);
+        TERM_CASE_HANDLE(145);
+        TERM_CASE_HANDLE(146);
+        TERM_CASE_HANDLE(147);
+        TERM_CASE_HANDLE(148);
+        TERM_CASE_HANDLE(149);
+        TERM_CASE_HANDLE(150);
+        TERM_CASE_HANDLE(151);
+        TERM_CASE_HANDLE(152);
+        TERM_CASE_HANDLE(153);
+        TERM_CASE_HANDLE(154);
+        TERM_CASE_HANDLE(155);
+        TERM_CASE_HANDLE(156);
+        TERM_CASE_HANDLE(157);
+        TERM_CASE_HANDLE(158);
+        TERM_CASE_HANDLE(159);
+        TERM_CASE_HANDLE(160);
+        TERM_CASE_HANDLE(161);
+        TERM_CASE_HANDLE(162);
+        TERM_CASE_HANDLE(163);
+        TERM_CASE_HANDLE(164);
+        TERM_CASE_HANDLE(165);
+        TERM_CASE_HANDLE(166);
+        TERM_CASE_HANDLE(167);
+        TERM_CASE_HANDLE(168);
+        TERM_CASE_HANDLE(169);
+        TERM_CASE_HANDLE(170);
+        TERM_CASE_HANDLE(171);
+        TERM_CASE_HANDLE(172);
+        TERM_CASE_HANDLE(173);
+        TERM_CASE_HANDLE(174);
+        TERM_CASE_HANDLE(175);
+        TERM_CASE_HANDLE(176);
+        TERM_CASE_HANDLE(177);
+        TERM_CASE_HANDLE(178);
+        TERM_CASE_HANDLE(179);
+        TERM_CASE_HANDLE(180);
+        TERM_CASE_HANDLE(181);
+        TERM_CASE_HANDLE(182);
+        TERM_CASE_HANDLE(183);
+        TERM_CASE_HANDLE(184);
+        TERM_CASE_HANDLE(185);
+        TERM_CASE_HANDLE(186);
+        TERM_CASE_HANDLE(187);
+        TERM_CASE_HANDLE(188);
+        TERM_CASE_HANDLE(189);
+        TERM_CASE_HANDLE(190);
+        TERM_CASE_HANDLE(191);
+        TERM_CASE_HANDLE(192);
+        TERM_CASE_HANDLE(193);
+        TERM_CASE_HANDLE(194);
+        TERM_CASE_HANDLE(195);
+        TERM_CASE_HANDLE(196);
+        TERM_CASE_HANDLE(197);
+        TERM_CASE_HANDLE(198);
+        TERM_CASE_HANDLE(199);
+        TERM_CASE_HANDLE(200);
+        TERM_CASE_HANDLE(201);
+        TERM_CASE_HANDLE(202);
+        TERM_CASE_HANDLE(203);
+        TERM_CASE_HANDLE(204);
+        TERM_CASE_HANDLE(205);
+        TERM_CASE_HANDLE(206);
+        TERM_CASE_HANDLE(207);
+        TERM_CASE_HANDLE(208);
+        TERM_CASE_HANDLE(209);
+        TERM_CASE_HANDLE(210);
+        TERM_CASE_HANDLE(211);
+        TERM_CASE_HANDLE(212);
+        TERM_CASE_HANDLE(213);
+        TERM_CASE_HANDLE(214);
+        TERM_CASE_HANDLE(215);
+        TERM_CASE_HANDLE(216);
+        TERM_CASE_HANDLE(217);
+        TERM_CASE_HANDLE(218);
+        TERM_CASE_HANDLE(219);
+        TERM_CASE_HANDLE(220);
+        TERM_CASE_HANDLE(221);
+        TERM_CASE_HANDLE(222);
+        TERM_CASE_HANDLE(223);
+        TERM_CASE_HANDLE(224);
+        TERM_CASE_HANDLE(225);
+        TERM_CASE_HANDLE(226);
+        TERM_CASE_HANDLE(227);
+        TERM_CASE_HANDLE(228);
+        TERM_CASE_HANDLE(229);
+        TERM_CASE_HANDLE(230);
+        TERM_CASE_HANDLE(231);
+        TERM_CASE_HANDLE(232);
+        TERM_CASE_HANDLE(233);
+        TERM_CASE_HANDLE(234);
+        TERM_CASE_HANDLE(235);
+        TERM_CASE_HANDLE(236);
+        TERM_CASE_HANDLE(237);
+        TERM_CASE_HANDLE(238);
+        TERM_CASE_HANDLE(239);
+        TERM_CASE_HANDLE(240);
+        TERM_CASE_HANDLE(241);
+        TERM_CASE_HANDLE(242);
+        TERM_CASE_HANDLE(243);
+        TERM_CASE_HANDLE(244);
+        TERM_CASE_HANDLE(245);
+        TERM_CASE_HANDLE(246);
+        TERM_CASE_HANDLE(247);
+        TERM_CASE_HANDLE(248);
+        TERM_CASE_HANDLE(249);
+    }
+    return NULL;
+}
diff --git a/src/third_party/vulkan-loader/src/loader/trampoline.c b/src/third_party/vulkan-loader/src/loader/trampoline.c
new file mode 100644
index 0000000..52eea96
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/loader/trampoline.c
@@ -0,0 +1,2480 @@
+/*
+ *
+ * Copyright (c) 2015-2016 The Khronos Group Inc.
+ * Copyright (c) 2015-2016 Valve Corporation
+ * Copyright (c) 2015-2016 LunarG, Inc.
+ * Copyright (C) 2015 Google Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Courtney Goeltzenleuchter <courtney@lunarg.com>
+ * Author: Jon Ashburn <jon@lunarg.com>
+ * Author: Tony Barbour <tony@LunarG.com>
+ * Author: Chia-I Wu <olv@lunarg.com>
+ */
+
+#ifndef _GNU_SOURCE
+#define _GNU_SOURCE
+#endif
+#include <stdlib.h>
+#include <string.h>
+
+#include "vk_loader_platform.h"
+#include "loader.h"
+#include "debug_utils.h"
+#include "wsi.h"
+#include "vk_loader_extensions.h"
+#include "gpa_helper.h"
+
+
+// Trampoline entrypoints are in this file for core Vulkan commands
+
+// Get an instance level or global level entry point address.
+// @param instance
+// @param pName
+// @return
+//    If instance == NULL returns a global level functions only
+//    If instance is valid returns a trampoline entry point for all dispatchable Vulkan
+//    functions both core and extensions.
+LOADER_EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vkGetInstanceProcAddr(VkInstance instance, const char *pName) {
+    void *addr;
+
+    addr = globalGetProcAddr(pName);
+    if (instance == VK_NULL_HANDLE || addr != NULL) {
+        return addr;
+    }
+
+    struct loader_instance *ptr_instance = loader_get_instance(instance);
+    if (ptr_instance == NULL) return NULL;
+    // Return trampoline code for non-global entrypoints including any extensions.
+    // Device extensions are returned if a layer or ICD supports the extension.
+    // Instance extensions are returned if the extension is enabled and the
+    // loader or someone else supports the extension
+    return trampolineGetProcAddr(ptr_instance, pName);
+}
+
+// Get a device level or global level entry point address.
+// @param device
+// @param pName
+// @return
+//    If device is valid, returns a device relative entry point for device level
+//    entry points both core and extensions.
+//    Device relative means call down the device chain.
+LOADER_EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vkGetDeviceProcAddr(VkDevice device, const char *pName) {
+    void *addr;
+
+    // For entrypoints that loader must handle (ie non-dispatchable or create object)
+    // make sure the loader entrypoint is returned
+    addr = loader_non_passthrough_gdpa(pName);
+    if (addr) {
+        return addr;
+    }
+
+    // Although CreateDevice is on device chain it's dispatchable object isn't
+    // a VkDevice or child of VkDevice so return NULL.
+    if (!strcmp(pName, "CreateDevice")) return NULL;
+
+    // Return the dispatch table entrypoint for the fastest case
+    const VkLayerDispatchTable *disp_table = *(VkLayerDispatchTable **)device;
+    if (disp_table == NULL) return NULL;
+
+    addr = loader_lookup_device_dispatch_table(disp_table, pName);
+    if (addr) return addr;
+
+    if (disp_table->GetDeviceProcAddr == NULL) return NULL;
+    return disp_table->GetDeviceProcAddr(device, pName);
+}
+
+LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceExtensionProperties(const char *pLayerName,
+                                                                                    uint32_t *pPropertyCount,
+                                                                                    VkExtensionProperties *pProperties) {
+    tls_instance = NULL;
+    LOADER_PLATFORM_THREAD_ONCE(&once_init, loader_initialize);
+
+    // We know we need to call at least the terminator
+    VkResult res = VK_SUCCESS;
+    VkEnumerateInstanceExtensionPropertiesChain chain_tail = {
+        .header =
+            {
+                .type = VK_CHAIN_TYPE_ENUMERATE_INSTANCE_EXTENSION_PROPERTIES,
+                .version = VK_CURRENT_CHAIN_VERSION,
+                .size = sizeof(chain_tail),
+            },
+        .pfnNextLayer = &terminator_EnumerateInstanceExtensionProperties,
+        .pNextLink = NULL,
+    };
+    VkEnumerateInstanceExtensionPropertiesChain *chain_head = &chain_tail;
+
+    // Get the implicit layers
+    struct loader_layer_list layers;
+    memset(&layers, 0, sizeof(layers));
+    loaderScanForImplicitLayers(NULL, &layers);
+
+    // We'll need to save the dl handles so we can close them later
+    loader_platform_dl_handle *libs = malloc(sizeof(loader_platform_dl_handle) * layers.count);
+    if (libs == NULL) {
+        return VK_ERROR_OUT_OF_HOST_MEMORY;
+    }
+    size_t lib_count = 0;
+
+    // Prepend layers onto the chain if they implment this entry point
+    for (uint32_t i = 0; i < layers.count; ++i) {
+        if (!loaderImplicitLayerIsEnabled(NULL, layers.list + i) ||
+            layers.list[i].pre_instance_functions.enumerate_instance_extension_properties[0] == '\0') {
+            continue;
+        }
+
+        loader_platform_dl_handle layer_lib = loader_platform_open_library(layers.list[i].lib_name);
+        libs[lib_count++] = layer_lib;
+        void *pfn = loader_platform_get_proc_address(layer_lib,
+                                                     layers.list[i].pre_instance_functions.enumerate_instance_extension_properties);
+        if (pfn == NULL) {
+            loader_log(NULL, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0,
+                       "%s: Unable to resolve symbol \"%s\" in implicit layer library \"%s\"", __FUNCTION__,
+                       layers.list[i].pre_instance_functions.enumerate_instance_extension_properties, layers.list[i].lib_name);
+            continue;
+        }
+
+        VkEnumerateInstanceExtensionPropertiesChain *chain_link = malloc(sizeof(VkEnumerateInstanceExtensionPropertiesChain));
+        if (chain_link == NULL) {
+            res = VK_ERROR_OUT_OF_HOST_MEMORY;
+            break;
+        }
+
+        chain_link->header.type = VK_CHAIN_TYPE_ENUMERATE_INSTANCE_EXTENSION_PROPERTIES;
+        chain_link->header.version = VK_CURRENT_CHAIN_VERSION;
+        chain_link->header.size = sizeof(*chain_link);
+        chain_link->pfnNextLayer = pfn;
+        chain_link->pNextLink = chain_head;
+
+        chain_head = chain_link;
+    }
+
+    // Call down the chain
+    if (res == VK_SUCCESS) {
+        res = chain_head->pfnNextLayer(chain_head->pNextLink, pLayerName, pPropertyCount, pProperties);
+    }
+
+    // Free up the layers
+    loaderDeleteLayerListAndProperties(NULL, &layers);
+
+    // Tear down the chain
+    while (chain_head != &chain_tail) {
+        VkEnumerateInstanceExtensionPropertiesChain *holder = chain_head;
+        chain_head = (VkEnumerateInstanceExtensionPropertiesChain *)chain_head->pNextLink;
+        free(holder);
+    }
+
+    // Close the dl handles
+    for (size_t i = 0; i < lib_count; ++i) {
+        loader_platform_close_library(libs[i]);
+    }
+    free(libs);
+
+    return res;
+}
+
+LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceLayerProperties(uint32_t *pPropertyCount,
+                                                                                VkLayerProperties *pProperties) {
+    tls_instance = NULL;
+    LOADER_PLATFORM_THREAD_ONCE(&once_init, loader_initialize);
+
+    // We know we need to call at least the terminator
+    VkResult res = VK_SUCCESS;
+    VkEnumerateInstanceLayerPropertiesChain chain_tail = {
+        .header =
+            {
+                .type = VK_CHAIN_TYPE_ENUMERATE_INSTANCE_LAYER_PROPERTIES,
+                .version = VK_CURRENT_CHAIN_VERSION,
+                .size = sizeof(chain_tail),
+            },
+        .pfnNextLayer = &terminator_EnumerateInstanceLayerProperties,
+        .pNextLink = NULL,
+    };
+    VkEnumerateInstanceLayerPropertiesChain *chain_head = &chain_tail;
+
+    // Get the implicit layers
+    struct loader_layer_list layers;
+    memset(&layers, 0, sizeof(layers));
+    loaderScanForImplicitLayers(NULL, &layers);
+
+    // We'll need to save the dl handles so we can close them later
+    loader_platform_dl_handle *libs = malloc(sizeof(loader_platform_dl_handle) * layers.count);
+    if (libs == NULL) {
+        return VK_ERROR_OUT_OF_HOST_MEMORY;
+    }
+    size_t lib_count = 0;
+
+    // Prepend layers onto the chain if they implment this entry point
+    for (uint32_t i = 0; i < layers.count; ++i) {
+        if (!loaderImplicitLayerIsEnabled(NULL, layers.list + i) ||
+            layers.list[i].pre_instance_functions.enumerate_instance_layer_properties[0] == '\0') {
+            continue;
+        }
+
+        loader_platform_dl_handle layer_lib = loader_platform_open_library(layers.list[i].lib_name);
+        libs[lib_count++] = layer_lib;
+        void *pfn =
+            loader_platform_get_proc_address(layer_lib, layers.list[i].pre_instance_functions.enumerate_instance_layer_properties);
+        if (pfn == NULL) {
+            loader_log(NULL, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0,
+                       "%s: Unable to resolve symbol \"%s\" in implicit layer library \"%s\"", __FUNCTION__,
+                       layers.list[i].pre_instance_functions.enumerate_instance_layer_properties, layers.list[i].lib_name);
+            continue;
+        }
+
+        VkEnumerateInstanceLayerPropertiesChain *chain_link = malloc(sizeof(VkEnumerateInstanceLayerPropertiesChain));
+        if (chain_link == NULL) {
+            res = VK_ERROR_OUT_OF_HOST_MEMORY;
+            break;
+        }
+
+        chain_link->header.type = VK_CHAIN_TYPE_ENUMERATE_INSTANCE_LAYER_PROPERTIES;
+        chain_link->header.version = VK_CURRENT_CHAIN_VERSION;
+        chain_link->header.size = sizeof(*chain_link);
+        chain_link->pfnNextLayer = pfn;
+        chain_link->pNextLink = chain_head;
+
+        chain_head = chain_link;
+    }
+
+    // Call down the chain
+    if (res == VK_SUCCESS) {
+        res = chain_head->pfnNextLayer(chain_head->pNextLink, pPropertyCount, pProperties);
+    }
+
+    // Free up the layers
+    loaderDeleteLayerListAndProperties(NULL, &layers);
+
+    // Tear down the chain
+    while (chain_head != &chain_tail) {
+        VkEnumerateInstanceLayerPropertiesChain *holder = chain_head;
+        chain_head = (VkEnumerateInstanceLayerPropertiesChain *)chain_head->pNextLink;
+        free(holder);
+    }
+
+    // Close the dl handles
+    for (size_t i = 0; i < lib_count; ++i) {
+        loader_platform_close_library(libs[i]);
+    }
+    free(libs);
+
+    return res;
+}
+
+LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceVersion(uint32_t* pApiVersion) {
+
+    tls_instance = NULL;
+    LOADER_PLATFORM_THREAD_ONCE(&once_init, loader_initialize);
+
+    // We know we need to call at least the terminator
+    VkResult res = VK_SUCCESS;
+    VkEnumerateInstanceVersionChain chain_tail = {
+        .header =
+            {
+                .type = VK_CHAIN_TYPE_ENUMERATE_INSTANCE_VERSION,
+                .version = VK_CURRENT_CHAIN_VERSION,
+                .size = sizeof(chain_tail),
+            },
+        .pfnNextLayer = &terminator_EnumerateInstanceVersion,
+        .pNextLink = NULL,
+    };
+    VkEnumerateInstanceVersionChain *chain_head = &chain_tail;
+
+    // Get the implicit layers
+    struct loader_layer_list layers;
+    memset(&layers, 0, sizeof(layers));
+    loaderScanForImplicitLayers(NULL, &layers);
+
+    // We'll need to save the dl handles so we can close them later
+    loader_platform_dl_handle *libs = malloc(sizeof(loader_platform_dl_handle) * layers.count);
+    if (libs == NULL) {
+        return VK_ERROR_OUT_OF_HOST_MEMORY;
+    }
+    size_t lib_count = 0;
+
+    // Prepend layers onto the chain if they implment this entry point
+    for (uint32_t i = 0; i < layers.count; ++i) {
+        if (!loaderImplicitLayerIsEnabled(NULL, layers.list + i) ||
+            layers.list[i].pre_instance_functions.enumerate_instance_version[0] == '\0') {
+            continue;
+        }
+
+        loader_platform_dl_handle layer_lib = loader_platform_open_library(layers.list[i].lib_name);
+        libs[lib_count++] = layer_lib;
+        void *pfn = loader_platform_get_proc_address(layer_lib,
+                                                     layers.list[i].pre_instance_functions.enumerate_instance_version);
+        if (pfn == NULL) {
+            loader_log(NULL, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0,
+                       "%s: Unable to resolve symbol \"%s\" in implicit layer library \"%s\"", __FUNCTION__,
+                       layers.list[i].pre_instance_functions.enumerate_instance_version, layers.list[i].lib_name);
+            continue;
+        }
+
+        VkEnumerateInstanceVersionChain *chain_link = malloc(sizeof(VkEnumerateInstanceVersionChain));
+        if (chain_link == NULL) {
+            res = VK_ERROR_OUT_OF_HOST_MEMORY;
+            break;
+        }
+
+        chain_link->header.type = VK_CHAIN_TYPE_ENUMERATE_INSTANCE_VERSION;
+        chain_link->header.version = VK_CURRENT_CHAIN_VERSION;
+        chain_link->header.size = sizeof(*chain_link);
+        chain_link->pfnNextLayer = pfn;
+        chain_link->pNextLink = chain_head;
+
+        chain_head = chain_link;
+    }
+
+    // Call down the chain
+    if (res == VK_SUCCESS) {
+        res = chain_head->pfnNextLayer(chain_head->pNextLink, pApiVersion);
+    }
+
+    // Free up the layers
+    loaderDeleteLayerListAndProperties(NULL, &layers);
+
+    // Tear down the chain
+    while (chain_head != &chain_tail) {
+        VkEnumerateInstanceVersionChain *holder = chain_head;
+        chain_head = (VkEnumerateInstanceVersionChain *)chain_head->pNextLink;
+        free(holder);
+    }
+
+    // Close the dl handles
+    for (size_t i = 0; i < lib_count; ++i) {
+        loader_platform_close_library(libs[i]);
+    }
+    free(libs);
+
+    return res;
+}
+
+LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateInstance(const VkInstanceCreateInfo *pCreateInfo,
+                                                              const VkAllocationCallbacks *pAllocator, VkInstance *pInstance) {
+    struct loader_instance *ptr_instance = NULL;
+    VkInstance created_instance = VK_NULL_HANDLE;
+    bool loaderLocked = false;
+    VkResult res = VK_ERROR_INITIALIZATION_FAILED;
+
+    LOADER_PLATFORM_THREAD_ONCE(&once_init, loader_initialize);
+
+#if (DEBUG_DISABLE_APP_ALLOCATORS == 1)
+    {
+#else
+    if (pAllocator) {
+        ptr_instance = (struct loader_instance *)pAllocator->pfnAllocation(pAllocator->pUserData, sizeof(struct loader_instance),
+                                                                           sizeof(int *), VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE);
+    } else {
+#endif
+        ptr_instance = (struct loader_instance *)malloc(sizeof(struct loader_instance));
+    }
+
+    VkInstanceCreateInfo ici = *pCreateInfo;
+
+    if (ptr_instance == NULL) {
+        res = VK_ERROR_OUT_OF_HOST_MEMORY;
+        goto out;
+    }
+
+    tls_instance = ptr_instance;
+    loader_platform_thread_lock_mutex(&loader_lock);
+    loaderLocked = true;
+    memset(ptr_instance, 0, sizeof(struct loader_instance));
+    if (pAllocator) {
+        ptr_instance->alloc_callbacks = *pAllocator;
+    }
+
+    // Save the application version
+    if (NULL == pCreateInfo || NULL == pCreateInfo->pApplicationInfo || 0 == pCreateInfo->pApplicationInfo->apiVersion)
+{
+        ptr_instance->app_api_major_version = 1;
+        ptr_instance->app_api_minor_version = 0;
+    } else {
+        ptr_instance->app_api_major_version = VK_VERSION_MAJOR(pCreateInfo->pApplicationInfo->apiVersion);
+        ptr_instance->app_api_minor_version = VK_VERSION_MINOR(pCreateInfo->pApplicationInfo->apiVersion);
+    }
+
+    // Look for one or more VK_EXT_debug_report or VK_EXT_debug_utils create info structures
+    // and setup a callback(s) for each one found.
+    ptr_instance->num_tmp_report_callbacks = 0;
+    ptr_instance->tmp_report_create_infos = NULL;
+    ptr_instance->tmp_report_callbacks = NULL;
+    ptr_instance->num_tmp_messengers = 0;
+    ptr_instance->tmp_messenger_create_infos = NULL;
+    ptr_instance->tmp_messengers = NULL;
+
+    // Handle cases of VK_EXT_debug_utils
+    if (util_CopyDebugUtilsMessengerCreateInfos(pCreateInfo->pNext, pAllocator, &ptr_instance->num_tmp_messengers,
+                                                &ptr_instance->tmp_messenger_create_infos, &ptr_instance->tmp_messengers)) {
+        // One or more were found, but allocation failed.  Therefore, clean up and fail this function:
+        res = VK_ERROR_OUT_OF_HOST_MEMORY;
+        goto out;
+    } else if (ptr_instance->num_tmp_messengers > 0) {
+        // Setup the temporary messenger(s) here to catch early issues:
+        if (util_CreateDebugUtilsMessengers(ptr_instance, pAllocator, ptr_instance->num_tmp_messengers,
+                                            ptr_instance->tmp_messenger_create_infos, ptr_instance->tmp_messengers)) {
+            // Failure of setting up one or more of the messenger.  Therefore, clean up and fail this function:
+            res = VK_ERROR_OUT_OF_HOST_MEMORY;
+            goto out;
+        }
+    }
+
+    // Handle cases of VK_EXT_debug_report
+    if (util_CopyDebugReportCreateInfos(pCreateInfo->pNext, pAllocator, &ptr_instance->num_tmp_report_callbacks,
+                                        &ptr_instance->tmp_report_create_infos, &ptr_instance->tmp_report_callbacks)) {
+        // One or more were found, but allocation failed.  Therefore, clean up and fail this function:
+        res = VK_ERROR_OUT_OF_HOST_MEMORY;
+        goto out;
+    } else if (ptr_instance->num_tmp_report_callbacks > 0) {
+        // Setup the temporary callback(s) here to catch early issues:
+        if (util_CreateDebugReportCallbacks(ptr_instance, pAllocator, ptr_instance->num_tmp_report_callbacks,
+                                            ptr_instance->tmp_report_create_infos, ptr_instance->tmp_report_callbacks)) {
+            // Failure of setting up one or more of the callback.  Therefore, clean up and fail this function:
+            res = VK_ERROR_OUT_OF_HOST_MEMORY;
+            goto out;
+        }
+    }
+
+    // Due to implicit layers need to get layer list even if
+    // enabledLayerCount == 0 and VK_INSTANCE_LAYERS is unset. For now always
+    // get layer list via loaderScanForLayers().
+    memset(&ptr_instance->instance_layer_list, 0, sizeof(ptr_instance->instance_layer_list));
+    loaderScanForLayers(ptr_instance, &ptr_instance->instance_layer_list);
+
+    // Validate the app requested layers to be enabled
+    if (pCreateInfo->enabledLayerCount > 0) {
+        res = loaderValidateLayers(ptr_instance, pCreateInfo->enabledLayerCount, pCreateInfo->ppEnabledLayerNames,
+                                   &ptr_instance->instance_layer_list);
+        if (res != VK_SUCCESS) {
+            goto out;
+        }
+    }
+
+    // Scan/discover all ICD libraries
+    memset(&ptr_instance->icd_tramp_list, 0, sizeof(ptr_instance->icd_tramp_list));
+    res = loader_icd_scan(ptr_instance, &ptr_instance->icd_tramp_list);
+    if (res != VK_SUCCESS) {
+        goto out;
+    }
+
+    // Get extensions from all ICD's, merge so no duplicates, then validate
+    res = loader_get_icd_loader_instance_extensions(ptr_instance, &ptr_instance->icd_tramp_list, &ptr_instance->ext_list);
+    if (res != VK_SUCCESS) {
+        goto out;
+    }
+    res = loader_validate_instance_extensions(ptr_instance, &ptr_instance->ext_list, &ptr_instance->instance_layer_list, &ici);
+    if (res != VK_SUCCESS) {
+        goto out;
+    }
+
+    ptr_instance->disp = loader_instance_heap_alloc(ptr_instance, sizeof(struct loader_instance_dispatch_table),
+                                                    VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE);
+    if (ptr_instance->disp == NULL) {
+        loader_log(ptr_instance, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                   "vkCreateInstance:  Failed to allocate Loader's full Instance dispatch table.");
+        res = VK_ERROR_OUT_OF_HOST_MEMORY;
+        goto out;
+    }
+    memcpy(&ptr_instance->disp->layer_inst_disp, &instance_disp, sizeof(instance_disp));
+
+    ptr_instance->next = loader.instances;
+    loader.instances = ptr_instance;
+
+    // Activate any layers on instance chain
+    res = loaderEnableInstanceLayers(ptr_instance, &ici, &ptr_instance->instance_layer_list);
+    if (res != VK_SUCCESS) {
+        goto out;
+    }
+
+    created_instance = (VkInstance)ptr_instance;
+    res = loader_create_instance_chain(&ici, pAllocator, ptr_instance, &created_instance);
+
+    if (res == VK_SUCCESS) {
+        memset(ptr_instance->enabled_known_extensions.padding, 0, sizeof(uint64_t) * 4);
+
+        wsi_create_instance(ptr_instance, &ici);
+        debug_utils_CreateInstance(ptr_instance, &ici);
+        extensions_create_instance(ptr_instance, &ici);
+
+        *pInstance = created_instance;
+
+        // Finally have the layers in place and everyone has seen
+        // the CreateInstance command go by. This allows the layer's
+        // GetInstanceProcAddr functions to return valid extension functions
+        // if enabled.
+        loaderActivateInstanceLayerExtensions(ptr_instance, *pInstance);
+    }
+
+out:
+
+    if (NULL != ptr_instance) {
+        if (res != VK_SUCCESS) {
+            if (NULL != ptr_instance->next) {
+                loader.instances = ptr_instance->next;
+            }
+            if (NULL != ptr_instance->disp) {
+                loader_instance_heap_free(ptr_instance, ptr_instance->disp);
+            }
+            if (ptr_instance->num_tmp_report_callbacks > 0) {
+                // Remove temporary VK_EXT_debug_report items
+                util_DestroyDebugReportCallbacks(ptr_instance, pAllocator, ptr_instance->num_tmp_report_callbacks,
+                                                 ptr_instance->tmp_report_callbacks);
+                util_FreeDebugReportCreateInfos(pAllocator, ptr_instance->tmp_report_create_infos,
+                                                ptr_instance->tmp_report_callbacks);
+            }
+            if (ptr_instance->num_tmp_messengers > 0) {
+                // Remove temporary VK_EXT_debug_utils items
+                util_DestroyDebugUtilsMessengers(ptr_instance, pAllocator, ptr_instance->num_tmp_messengers,
+                                                 ptr_instance->tmp_messengers);
+                util_FreeDebugUtilsMessengerCreateInfos(pAllocator, ptr_instance->tmp_messenger_create_infos,
+                                                        ptr_instance->tmp_messengers);
+            }
+
+            if (NULL != ptr_instance->expanded_activated_layer_list.list) {
+                loaderDeactivateLayers(ptr_instance, NULL, &ptr_instance->expanded_activated_layer_list);
+            }
+            if (NULL != ptr_instance->app_activated_layer_list.list) {
+                loaderDestroyLayerList(ptr_instance, NULL, &ptr_instance->app_activated_layer_list);
+            }
+
+            loaderDeleteLayerListAndProperties(ptr_instance, &ptr_instance->instance_layer_list);
+            loader_scanned_icd_clear(ptr_instance, &ptr_instance->icd_tramp_list);
+            loader_destroy_generic_list(ptr_instance, (struct loader_generic_list *)&ptr_instance->ext_list);
+
+            loader_instance_heap_free(ptr_instance, ptr_instance);
+        } else {
+            // Remove temporary VK_EXT_debug_report or VK_EXT_debug_utils items
+            util_DestroyDebugUtilsMessengers(ptr_instance, pAllocator, ptr_instance->num_tmp_messengers,
+                                             ptr_instance->tmp_messengers);
+            util_DestroyDebugReportCallbacks(ptr_instance, pAllocator, ptr_instance->num_tmp_report_callbacks,
+                                             ptr_instance->tmp_report_callbacks);
+        }
+
+        if (loaderLocked) {
+            loader_platform_thread_unlock_mutex(&loader_lock);
+        }
+    }
+
+    return res;
+}
+
+LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkDestroyInstance(VkInstance instance, const VkAllocationCallbacks *pAllocator) {
+    const VkLayerInstanceDispatchTable *disp;
+    struct loader_instance *ptr_instance = NULL;
+    bool callback_setup = false;
+    bool messenger_setup = false;
+
+    if (instance == VK_NULL_HANDLE) {
+        return;
+    }
+
+    disp = loader_get_instance_layer_dispatch(instance);
+
+    loader_platform_thread_lock_mutex(&loader_lock);
+
+    ptr_instance = loader_get_instance(instance);
+
+    if (pAllocator) {
+        ptr_instance->alloc_callbacks = *pAllocator;
+    }
+
+    if (ptr_instance->num_tmp_messengers > 0) {
+        // Setup the temporary VK_EXT_debug_utils messenger(s) here to catch cleanup issues:
+        if (!util_CreateDebugUtilsMessengers(ptr_instance, pAllocator, ptr_instance->num_tmp_messengers,
+                                             ptr_instance->tmp_messenger_create_infos, ptr_instance->tmp_messengers)) {
+            messenger_setup = true;
+        }
+    }
+
+    if (ptr_instance->num_tmp_report_callbacks > 0) {
+        // Setup the temporary VK_EXT_debug_report callback(s) here to catch cleanup issues:
+        if (!util_CreateDebugReportCallbacks(ptr_instance, pAllocator, ptr_instance->num_tmp_report_callbacks,
+                                             ptr_instance->tmp_report_create_infos, ptr_instance->tmp_report_callbacks)) {
+            callback_setup = true;
+        }
+    }
+
+    disp->DestroyInstance(instance, pAllocator);
+
+    if (NULL != ptr_instance->expanded_activated_layer_list.list) {
+        loaderDeactivateLayers(ptr_instance, NULL, &ptr_instance->expanded_activated_layer_list);
+    }
+    if (NULL != ptr_instance->app_activated_layer_list.list) {
+        loaderDestroyLayerList(ptr_instance, NULL, &ptr_instance->app_activated_layer_list);
+    }
+
+    if (ptr_instance->phys_devs_tramp) {
+        for (uint32_t i = 0; i < ptr_instance->phys_dev_count_tramp; i++) {
+            loader_instance_heap_free(ptr_instance, ptr_instance->phys_devs_tramp[i]);
+        }
+        loader_instance_heap_free(ptr_instance, ptr_instance->phys_devs_tramp);
+    }
+
+    if (ptr_instance->phys_dev_groups_tramp) {
+        for (uint32_t i = 0; i < ptr_instance->phys_dev_group_count_tramp; i++) {
+            loader_instance_heap_free(ptr_instance, ptr_instance->phys_dev_groups_tramp[i]);
+        }
+        loader_instance_heap_free(ptr_instance, ptr_instance->phys_dev_groups_tramp);
+    }
+
+    if (messenger_setup) {
+        util_DestroyDebugUtilsMessengers(ptr_instance, pAllocator, ptr_instance->num_tmp_messengers, ptr_instance->tmp_messengers);
+        util_FreeDebugUtilsMessengerCreateInfos(pAllocator, ptr_instance->tmp_messenger_create_infos, ptr_instance->tmp_messengers);
+    }
+
+    if (callback_setup) {
+        util_DestroyDebugReportCallbacks(ptr_instance, pAllocator, ptr_instance->num_tmp_report_callbacks,
+                                         ptr_instance->tmp_report_callbacks);
+        util_FreeDebugReportCreateInfos(pAllocator, ptr_instance->tmp_report_create_infos, ptr_instance->tmp_report_callbacks);
+    }
+    loader_instance_heap_free(ptr_instance, ptr_instance->disp);
+    loader_instance_heap_free(ptr_instance, ptr_instance);
+    loader_platform_thread_unlock_mutex(&loader_lock);
+}
+
+LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumeratePhysicalDevices(VkInstance instance, uint32_t *pPhysicalDeviceCount,
+                                                                        VkPhysicalDevice *pPhysicalDevices) {
+    VkResult res = VK_SUCCESS;
+    uint32_t count;
+    uint32_t i;
+    struct loader_instance *inst;
+
+    loader_platform_thread_lock_mutex(&loader_lock);
+
+    inst = loader_get_instance(instance);
+    if (NULL == inst) {
+        res = VK_ERROR_INITIALIZATION_FAILED;
+        goto out;
+    }
+
+    if (NULL == pPhysicalDeviceCount) {
+        loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                   "vkEnumeratePhysicalDevices: Received NULL pointer for physical device count return value.");
+        res = VK_ERROR_INITIALIZATION_FAILED;
+        goto out;
+    }
+
+    // Setup the trampoline loader physical devices.  This will actually
+    // call down and setup the terminator loader physical devices during the
+    // process.
+    VkResult setup_res = setupLoaderTrampPhysDevs(instance);
+    if (setup_res != VK_SUCCESS && setup_res != VK_INCOMPLETE) {
+        res = setup_res;
+        goto out;
+    }
+
+    count = inst->phys_dev_count_tramp;
+
+    // Wrap the PhysDev object for loader usage, return wrapped objects
+    if (NULL != pPhysicalDevices) {
+        if (inst->phys_dev_count_tramp > *pPhysicalDeviceCount) {
+            loader_log(inst, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, 0,
+                       "vkEnumeratePhysicalDevices: Trimming device count down"
+                       " by application request from %d to %d physical devices",
+                       inst->phys_dev_count_tramp, *pPhysicalDeviceCount);
+            count = *pPhysicalDeviceCount;
+            res = VK_INCOMPLETE;
+        }
+        for (i = 0; i < count; i++) {
+            pPhysicalDevices[i] = (VkPhysicalDevice)inst->phys_devs_tramp[i];
+        }
+    }
+
+    *pPhysicalDeviceCount = count;
+
+out:
+
+    loader_platform_thread_unlock_mutex(&loader_lock);
+    return res;
+}
+
+LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFeatures(VkPhysicalDevice physicalDevice,
+                                                                     VkPhysicalDeviceFeatures *pFeatures) {
+    const VkLayerInstanceDispatchTable *disp;
+    VkPhysicalDevice unwrapped_phys_dev = loader_unwrap_physical_device(physicalDevice);
+    disp = loader_get_instance_layer_dispatch(physicalDevice);
+    disp->GetPhysicalDeviceFeatures(unwrapped_phys_dev, pFeatures);
+}
+
+LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format,
+                                                                             VkFormatProperties *pFormatInfo) {
+    const VkLayerInstanceDispatchTable *disp;
+    VkPhysicalDevice unwrapped_pd = loader_unwrap_physical_device(physicalDevice);
+    disp = loader_get_instance_layer_dispatch(physicalDevice);
+    disp->GetPhysicalDeviceFormatProperties(unwrapped_pd, format, pFormatInfo);
+}
+
+LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceImageFormatProperties(
+    VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage,
+    VkImageCreateFlags flags, VkImageFormatProperties *pImageFormatProperties) {
+    const VkLayerInstanceDispatchTable *disp;
+    VkPhysicalDevice unwrapped_phys_dev = loader_unwrap_physical_device(physicalDevice);
+    disp = loader_get_instance_layer_dispatch(physicalDevice);
+    return disp->GetPhysicalDeviceImageFormatProperties(unwrapped_phys_dev, format, type, tiling, usage, flags,
+                                                        pImageFormatProperties);
+}
+
+LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceProperties(VkPhysicalDevice physicalDevice,
+                                                                       VkPhysicalDeviceProperties *pProperties) {
+    const VkLayerInstanceDispatchTable *disp;
+    VkPhysicalDevice unwrapped_phys_dev = loader_unwrap_physical_device(physicalDevice);
+    disp = loader_get_instance_layer_dispatch(physicalDevice);
+    disp->GetPhysicalDeviceProperties(unwrapped_phys_dev, pProperties);
+}
+
+LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceQueueFamilyProperties(VkPhysicalDevice physicalDevice,
+                                                                                  uint32_t *pQueueFamilyPropertyCount,
+                                                                                  VkQueueFamilyProperties *pQueueProperties) {
+    const VkLayerInstanceDispatchTable *disp;
+    VkPhysicalDevice unwrapped_phys_dev = loader_unwrap_physical_device(physicalDevice);
+    disp = loader_get_instance_layer_dispatch(physicalDevice);
+    disp->GetPhysicalDeviceQueueFamilyProperties(unwrapped_phys_dev, pQueueFamilyPropertyCount, pQueueProperties);
+}
+
+LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceMemoryProperties(VkPhysicalDevice physicalDevice,
+                                                                             VkPhysicalDeviceMemoryProperties *pMemoryProperties) {
+    const VkLayerInstanceDispatchTable *disp;
+    VkPhysicalDevice unwrapped_phys_dev = loader_unwrap_physical_device(physicalDevice);
+    disp = loader_get_instance_layer_dispatch(physicalDevice);
+    disp->GetPhysicalDeviceMemoryProperties(unwrapped_phys_dev, pMemoryProperties);
+}
+
+LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateDevice(VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo *pCreateInfo,
+                                                            const VkAllocationCallbacks *pAllocator, VkDevice *pDevice) {
+    loader_platform_thread_lock_mutex(&loader_lock);
+    VkResult res = loader_layer_create_device(NULL, physicalDevice, pCreateInfo, pAllocator, pDevice, NULL, NULL);
+    loader_platform_thread_unlock_mutex(&loader_lock);
+    return res;
+}
+
+LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkDestroyDevice(VkDevice device, const VkAllocationCallbacks *pAllocator) {
+    const VkLayerDispatchTable *disp;
+
+    if (device == VK_NULL_HANDLE) {
+        return;
+    }
+    disp = loader_get_dispatch(device);
+
+    loader_platform_thread_lock_mutex(&loader_lock);
+
+    loader_layer_destroy_device(device, pAllocator, disp->DestroyDevice);
+
+    loader_platform_thread_unlock_mutex(&loader_lock);
+}
+
+LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateDeviceExtensionProperties(VkPhysicalDevice physicalDevice,
+                                                                                  const char *pLayerName, uint32_t *pPropertyCount,
+                                                                                  VkExtensionProperties *pProperties) {
+    VkResult res = VK_SUCCESS;
+    struct loader_physical_device_tramp *phys_dev;
+    const VkLayerInstanceDispatchTable *disp;
+    phys_dev = (struct loader_physical_device_tramp *)physicalDevice;
+
+    loader_platform_thread_lock_mutex(&loader_lock);
+
+    // always pass this call down the instance chain which will terminate
+    // in the ICD. This allows layers to filter the extensions coming back
+    // up the chain. In the terminator we look up layer extensions from the
+    // manifest file if it wasn't provided by the layer itself.
+    disp = loader_get_instance_layer_dispatch(physicalDevice);
+    res = disp->EnumerateDeviceExtensionProperties(phys_dev->phys_dev, pLayerName, pPropertyCount, pProperties);
+
+    loader_platform_thread_unlock_mutex(&loader_lock);
+    return res;
+}
+
+LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateDeviceLayerProperties(VkPhysicalDevice physicalDevice,
+                                                                              uint32_t *pPropertyCount,
+                                                                              VkLayerProperties *pProperties) {
+    uint32_t copy_size;
+    struct loader_physical_device_tramp *phys_dev;
+    struct loader_layer_list *enabled_layers, layers_list;
+    memset(&layers_list, 0, sizeof(layers_list));
+    loader_platform_thread_lock_mutex(&loader_lock);
+
+    // Don't dispatch this call down the instance chain, want all device layers
+    // enumerated and instance chain may not contain all device layers
+    // TODO re-evaluate the above statement we maybe able to start calling
+    // down the chain
+
+    phys_dev = (struct loader_physical_device_tramp *)physicalDevice;
+    const struct loader_instance *inst = phys_dev->this_instance;
+
+    uint32_t count = inst->app_activated_layer_list.count;
+    if (count == 0 || pProperties == NULL) {
+        *pPropertyCount = count;
+        loader_platform_thread_unlock_mutex(&loader_lock);
+        return VK_SUCCESS;
+    }
+    enabled_layers = (struct loader_layer_list *)&inst->app_activated_layer_list;
+
+    copy_size = (*pPropertyCount < count) ? *pPropertyCount : count;
+    for (uint32_t i = 0; i < copy_size; i++) {
+        memcpy(&pProperties[i], &(enabled_layers->list[i].info), sizeof(VkLayerProperties));
+    }
+    *pPropertyCount = copy_size;
+
+    if (copy_size < count) {
+        loader_platform_thread_unlock_mutex(&loader_lock);
+        return VK_INCOMPLETE;
+    }
+
+    loader_platform_thread_unlock_mutex(&loader_lock);
+    return VK_SUCCESS;
+}
+
+LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkGetDeviceQueue(VkDevice device, uint32_t queueNodeIndex, uint32_t queueIndex,
+                                                          VkQueue *pQueue) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(device);
+
+    disp->GetDeviceQueue(device, queueNodeIndex, queueIndex, pQueue);
+    loader_set_dispatch(*pQueue, disp);
+}
+
+LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkQueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo *pSubmits,
+                                                           VkFence fence) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(queue);
+
+    return disp->QueueSubmit(queue, submitCount, pSubmits, fence);
+}
+
+LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkQueueWaitIdle(VkQueue queue) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(queue);
+
+    return disp->QueueWaitIdle(queue);
+}
+
+LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkDeviceWaitIdle(VkDevice device) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(device);
+
+    return disp->DeviceWaitIdle(device);
+}
+
+LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkAllocateMemory(VkDevice device, const VkMemoryAllocateInfo *pAllocateInfo,
+                                                              const VkAllocationCallbacks *pAllocator, VkDeviceMemory *pMemory) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(device);
+
+    return disp->AllocateMemory(device, pAllocateInfo, pAllocator, pMemory);
+}
+
+LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkFreeMemory(VkDevice device, VkDeviceMemory mem,
+                                                      const VkAllocationCallbacks *pAllocator) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(device);
+
+    disp->FreeMemory(device, mem, pAllocator);
+}
+
+LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkMapMemory(VkDevice device, VkDeviceMemory mem, VkDeviceSize offset,
+                                                         VkDeviceSize size, VkFlags flags, void **ppData) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(device);
+
+    return disp->MapMemory(device, mem, offset, size, flags, ppData);
+}
+
+LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkUnmapMemory(VkDevice device, VkDeviceMemory mem) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(device);
+
+    disp->UnmapMemory(device, mem);
+}
+
+LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkFlushMappedMemoryRanges(VkDevice device, uint32_t memoryRangeCount,
+                                                                       const VkMappedMemoryRange *pMemoryRanges) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(device);
+
+    return disp->FlushMappedMemoryRanges(device, memoryRangeCount, pMemoryRanges);
+}
+
+LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkInvalidateMappedMemoryRanges(VkDevice device, uint32_t memoryRangeCount,
+                                                                            const VkMappedMemoryRange *pMemoryRanges) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(device);
+
+    return disp->InvalidateMappedMemoryRanges(device, memoryRangeCount, pMemoryRanges);
+}
+
+LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkGetDeviceMemoryCommitment(VkDevice device, VkDeviceMemory memory,
+                                                                     VkDeviceSize *pCommittedMemoryInBytes) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(device);
+
+    disp->GetDeviceMemoryCommitment(device, memory, pCommittedMemoryInBytes);
+}
+
+LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkBindBufferMemory(VkDevice device, VkBuffer buffer, VkDeviceMemory mem,
+                                                                VkDeviceSize offset) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(device);
+
+    return disp->BindBufferMemory(device, buffer, mem, offset);
+}
+
+LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkBindImageMemory(VkDevice device, VkImage image, VkDeviceMemory mem,
+                                                               VkDeviceSize offset) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(device);
+
+    return disp->BindImageMemory(device, image, mem, offset);
+}
+
+LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkGetBufferMemoryRequirements(VkDevice device, VkBuffer buffer,
+                                                                       VkMemoryRequirements *pMemoryRequirements) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(device);
+
+    disp->GetBufferMemoryRequirements(device, buffer, pMemoryRequirements);
+}
+
+LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkGetImageMemoryRequirements(VkDevice device, VkImage image,
+                                                                      VkMemoryRequirements *pMemoryRequirements) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(device);
+
+    disp->GetImageMemoryRequirements(device, image, pMemoryRequirements);
+}
+
+LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL
+vkGetImageSparseMemoryRequirements(VkDevice device, VkImage image, uint32_t *pSparseMemoryRequirementCount,
+                                   VkSparseImageMemoryRequirements *pSparseMemoryRequirements) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(device);
+
+    disp->GetImageSparseMemoryRequirements(device, image, pSparseMemoryRequirementCount, pSparseMemoryRequirements);
+}
+
+LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceSparseImageFormatProperties(
+    VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkSampleCountFlagBits samples, VkImageUsageFlags usage,
+    VkImageTiling tiling, uint32_t *pPropertyCount, VkSparseImageFormatProperties *pProperties) {
+    const VkLayerInstanceDispatchTable *disp;
+    VkPhysicalDevice unwrapped_phys_dev = loader_unwrap_physical_device(physicalDevice);
+    disp = loader_get_instance_layer_dispatch(physicalDevice);
+
+    disp->GetPhysicalDeviceSparseImageFormatProperties(unwrapped_phys_dev, format, type, samples, usage, tiling, pPropertyCount,
+                                                       pProperties);
+}
+
+LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkQueueBindSparse(VkQueue queue, uint32_t bindInfoCount,
+                                                               const VkBindSparseInfo *pBindInfo, VkFence fence) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(queue);
+
+    return disp->QueueBindSparse(queue, bindInfoCount, pBindInfo, fence);
+}
+
+LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateFence(VkDevice device, const VkFenceCreateInfo *pCreateInfo,
+                                                           const VkAllocationCallbacks *pAllocator, VkFence *pFence) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(device);
+
+    return disp->CreateFence(device, pCreateInfo, pAllocator, pFence);
+}
+
+LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkDestroyFence(VkDevice device, VkFence fence, const VkAllocationCallbacks *pAllocator) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(device);
+
+    disp->DestroyFence(device, fence, pAllocator);
+}
+
+LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkResetFences(VkDevice device, uint32_t fenceCount, const VkFence *pFences) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(device);
+
+    return disp->ResetFences(device, fenceCount, pFences);
+}
+
+LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkGetFenceStatus(VkDevice device, VkFence fence) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(device);
+
+    return disp->GetFenceStatus(device, fence);
+}
+
+LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkWaitForFences(VkDevice device, uint32_t fenceCount, const VkFence *pFences,
+                                                             VkBool32 waitAll, uint64_t timeout) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(device);
+
+    return disp->WaitForFences(device, fenceCount, pFences, waitAll, timeout);
+}
+
+LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateSemaphore(VkDevice device, const VkSemaphoreCreateInfo *pCreateInfo,
+                                                               const VkAllocationCallbacks *pAllocator, VkSemaphore *pSemaphore) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(device);
+
+    return disp->CreateSemaphore(device, pCreateInfo, pAllocator, pSemaphore);
+}
+
+LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkDestroySemaphore(VkDevice device, VkSemaphore semaphore,
+                                                            const VkAllocationCallbacks *pAllocator) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(device);
+
+    disp->DestroySemaphore(device, semaphore, pAllocator);
+}
+
+LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateEvent(VkDevice device, const VkEventCreateInfo *pCreateInfo,
+                                                           const VkAllocationCallbacks *pAllocator, VkEvent *pEvent) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(device);
+
+    return disp->CreateEvent(device, pCreateInfo, pAllocator, pEvent);
+}
+
+LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkDestroyEvent(VkDevice device, VkEvent event, const VkAllocationCallbacks *pAllocator) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(device);
+
+    disp->DestroyEvent(device, event, pAllocator);
+}
+
+LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkGetEventStatus(VkDevice device, VkEvent event) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(device);
+
+    return disp->GetEventStatus(device, event);
+}
+
+LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkSetEvent(VkDevice device, VkEvent event) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(device);
+
+    return disp->SetEvent(device, event);
+}
+
+LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkResetEvent(VkDevice device, VkEvent event) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(device);
+
+    return disp->ResetEvent(device, event);
+}
+
+LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateQueryPool(VkDevice device, const VkQueryPoolCreateInfo *pCreateInfo,
+                                                               const VkAllocationCallbacks *pAllocator, VkQueryPool *pQueryPool) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(device);
+
+    return disp->CreateQueryPool(device, pCreateInfo, pAllocator, pQueryPool);
+}
+
+LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkDestroyQueryPool(VkDevice device, VkQueryPool queryPool,
+                                                            const VkAllocationCallbacks *pAllocator) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(device);
+
+    disp->DestroyQueryPool(device, queryPool, pAllocator);
+}
+
+LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkGetQueryPoolResults(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery,
+                                                                   uint32_t queryCount, size_t dataSize, void *pData,
+                                                                   VkDeviceSize stride, VkQueryResultFlags flags) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(device);
+
+    return disp->GetQueryPoolResults(device, queryPool, firstQuery, queryCount, dataSize, pData, stride, flags);
+}
+
+LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateBuffer(VkDevice device, const VkBufferCreateInfo *pCreateInfo,
+                                                            const VkAllocationCallbacks *pAllocator, VkBuffer *pBuffer) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(device);
+
+    return disp->CreateBuffer(device, pCreateInfo, pAllocator, pBuffer);
+}
+
+LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkDestroyBuffer(VkDevice device, VkBuffer buffer,
+                                                         const VkAllocationCallbacks *pAllocator) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(device);
+
+    disp->DestroyBuffer(device, buffer, pAllocator);
+}
+
+LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateBufferView(VkDevice device, const VkBufferViewCreateInfo *pCreateInfo,
+                                                                const VkAllocationCallbacks *pAllocator, VkBufferView *pView) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(device);
+
+    return disp->CreateBufferView(device, pCreateInfo, pAllocator, pView);
+}
+
+LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkDestroyBufferView(VkDevice device, VkBufferView bufferView,
+                                                             const VkAllocationCallbacks *pAllocator) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(device);
+
+    disp->DestroyBufferView(device, bufferView, pAllocator);
+}
+
+LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateImage(VkDevice device, const VkImageCreateInfo *pCreateInfo,
+                                                           const VkAllocationCallbacks *pAllocator, VkImage *pImage) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(device);
+
+    return disp->CreateImage(device, pCreateInfo, pAllocator, pImage);
+}
+
+LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkDestroyImage(VkDevice device, VkImage image, const VkAllocationCallbacks *pAllocator) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(device);
+
+    disp->DestroyImage(device, image, pAllocator);
+}
+
+LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkGetImageSubresourceLayout(VkDevice device, VkImage image,
+                                                                     const VkImageSubresource *pSubresource,
+                                                                     VkSubresourceLayout *pLayout) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(device);
+
+    disp->GetImageSubresourceLayout(device, image, pSubresource, pLayout);
+}
+
+LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateImageView(VkDevice device, const VkImageViewCreateInfo *pCreateInfo,
+                                                               const VkAllocationCallbacks *pAllocator, VkImageView *pView) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(device);
+
+    return disp->CreateImageView(device, pCreateInfo, pAllocator, pView);
+}
+
+LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkDestroyImageView(VkDevice device, VkImageView imageView,
+                                                            const VkAllocationCallbacks *pAllocator) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(device);
+
+    disp->DestroyImageView(device, imageView, pAllocator);
+}
+
+LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateShaderModule(VkDevice device, const VkShaderModuleCreateInfo *pCreateInfo,
+                                                                  const VkAllocationCallbacks *pAllocator,
+                                                                  VkShaderModule *pShader) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(device);
+
+    return disp->CreateShaderModule(device, pCreateInfo, pAllocator, pShader);
+}
+
+LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkDestroyShaderModule(VkDevice device, VkShaderModule shaderModule,
+                                                               const VkAllocationCallbacks *pAllocator) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(device);
+
+    disp->DestroyShaderModule(device, shaderModule, pAllocator);
+}
+
+LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreatePipelineCache(VkDevice device, const VkPipelineCacheCreateInfo *pCreateInfo,
+                                                                   const VkAllocationCallbacks *pAllocator,
+                                                                   VkPipelineCache *pPipelineCache) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(device);
+
+    return disp->CreatePipelineCache(device, pCreateInfo, pAllocator, pPipelineCache);
+}
+
+LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkDestroyPipelineCache(VkDevice device, VkPipelineCache pipelineCache,
+                                                                const VkAllocationCallbacks *pAllocator) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(device);
+
+    disp->DestroyPipelineCache(device, pipelineCache, pAllocator);
+}
+
+LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkGetPipelineCacheData(VkDevice device, VkPipelineCache pipelineCache,
+                                                                    size_t *pDataSize, void *pData) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(device);
+
+    return disp->GetPipelineCacheData(device, pipelineCache, pDataSize, pData);
+}
+
+LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkMergePipelineCaches(VkDevice device, VkPipelineCache dstCache,
+                                                                   uint32_t srcCacheCount, const VkPipelineCache *pSrcCaches) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(device);
+
+    return disp->MergePipelineCaches(device, dstCache, srcCacheCount, pSrcCaches);
+}
+
+LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache,
+                                                                       uint32_t createInfoCount,
+                                                                       const VkGraphicsPipelineCreateInfo *pCreateInfos,
+                                                                       const VkAllocationCallbacks *pAllocator,
+                                                                       VkPipeline *pPipelines) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(device);
+
+    return disp->CreateGraphicsPipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines);
+}
+
+LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache,
+                                                                      uint32_t createInfoCount,
+                                                                      const VkComputePipelineCreateInfo *pCreateInfos,
+                                                                      const VkAllocationCallbacks *pAllocator,
+                                                                      VkPipeline *pPipelines) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(device);
+
+    return disp->CreateComputePipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines);
+}
+
+LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkDestroyPipeline(VkDevice device, VkPipeline pipeline,
+                                                           const VkAllocationCallbacks *pAllocator) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(device);
+
+    disp->DestroyPipeline(device, pipeline, pAllocator);
+}
+
+LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreatePipelineLayout(VkDevice device, const VkPipelineLayoutCreateInfo *pCreateInfo,
+                                                                    const VkAllocationCallbacks *pAllocator,
+                                                                    VkPipelineLayout *pPipelineLayout) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(device);
+
+    return disp->CreatePipelineLayout(device, pCreateInfo, pAllocator, pPipelineLayout);
+}
+
+LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkDestroyPipelineLayout(VkDevice device, VkPipelineLayout pipelineLayout,
+                                                                 const VkAllocationCallbacks *pAllocator) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(device);
+
+    disp->DestroyPipelineLayout(device, pipelineLayout, pAllocator);
+}
+
+LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateSampler(VkDevice device, const VkSamplerCreateInfo *pCreateInfo,
+                                                             const VkAllocationCallbacks *pAllocator, VkSampler *pSampler) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(device);
+
+    return disp->CreateSampler(device, pCreateInfo, pAllocator, pSampler);
+}
+
+LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkDestroySampler(VkDevice device, VkSampler sampler,
+                                                          const VkAllocationCallbacks *pAllocator) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(device);
+
+    disp->DestroySampler(device, sampler, pAllocator);
+}
+
+LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateDescriptorSetLayout(VkDevice device,
+                                                                         const VkDescriptorSetLayoutCreateInfo *pCreateInfo,
+                                                                         const VkAllocationCallbacks *pAllocator,
+                                                                         VkDescriptorSetLayout *pSetLayout) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(device);
+
+    return disp->CreateDescriptorSetLayout(device, pCreateInfo, pAllocator, pSetLayout);
+}
+
+LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkDestroyDescriptorSetLayout(VkDevice device, VkDescriptorSetLayout descriptorSetLayout,
+                                                                      const VkAllocationCallbacks *pAllocator) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(device);
+
+    disp->DestroyDescriptorSetLayout(device, descriptorSetLayout, pAllocator);
+}
+
+LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateDescriptorPool(VkDevice device, const VkDescriptorPoolCreateInfo *pCreateInfo,
+                                                                    const VkAllocationCallbacks *pAllocator,
+                                                                    VkDescriptorPool *pDescriptorPool) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(device);
+
+    return disp->CreateDescriptorPool(device, pCreateInfo, pAllocator, pDescriptorPool);
+}
+
+LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkDestroyDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool,
+                                                                 const VkAllocationCallbacks *pAllocator) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(device);
+
+    disp->DestroyDescriptorPool(device, descriptorPool, pAllocator);
+}
+
+LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkResetDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool,
+                                                                   VkDescriptorPoolResetFlags flags) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(device);
+
+    return disp->ResetDescriptorPool(device, descriptorPool, flags);
+}
+
+LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkAllocateDescriptorSets(VkDevice device,
+                                                                      const VkDescriptorSetAllocateInfo *pAllocateInfo,
+                                                                      VkDescriptorSet *pDescriptorSets) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(device);
+
+    return disp->AllocateDescriptorSets(device, pAllocateInfo, pDescriptorSets);
+}
+
+LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkFreeDescriptorSets(VkDevice device, VkDescriptorPool descriptorPool,
+                                                                  uint32_t descriptorSetCount,
+                                                                  const VkDescriptorSet *pDescriptorSets) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(device);
+
+    return disp->FreeDescriptorSets(device, descriptorPool, descriptorSetCount, pDescriptorSets);
+}
+
+LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkUpdateDescriptorSets(VkDevice device, uint32_t descriptorWriteCount,
+                                                                const VkWriteDescriptorSet *pDescriptorWrites,
+                                                                uint32_t descriptorCopyCount,
+                                                                const VkCopyDescriptorSet *pDescriptorCopies) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(device);
+
+    disp->UpdateDescriptorSets(device, descriptorWriteCount, pDescriptorWrites, descriptorCopyCount, pDescriptorCopies);
+}
+
+LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateFramebuffer(VkDevice device, const VkFramebufferCreateInfo *pCreateInfo,
+                                                                 const VkAllocationCallbacks *pAllocator,
+                                                                 VkFramebuffer *pFramebuffer) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(device);
+
+    return disp->CreateFramebuffer(device, pCreateInfo, pAllocator, pFramebuffer);
+}
+
+LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkDestroyFramebuffer(VkDevice device, VkFramebuffer framebuffer,
+                                                              const VkAllocationCallbacks *pAllocator) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(device);
+
+    disp->DestroyFramebuffer(device, framebuffer, pAllocator);
+}
+
+LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateRenderPass(VkDevice device, const VkRenderPassCreateInfo *pCreateInfo,
+                                                                const VkAllocationCallbacks *pAllocator,
+                                                                VkRenderPass *pRenderPass) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(device);
+
+    return disp->CreateRenderPass(device, pCreateInfo, pAllocator, pRenderPass);
+}
+
+LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkDestroyRenderPass(VkDevice device, VkRenderPass renderPass,
+                                                             const VkAllocationCallbacks *pAllocator) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(device);
+
+    disp->DestroyRenderPass(device, renderPass, pAllocator);
+}
+
+LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkGetRenderAreaGranularity(VkDevice device, VkRenderPass renderPass,
+                                                                    VkExtent2D *pGranularity) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(device);
+
+    disp->GetRenderAreaGranularity(device, renderPass, pGranularity);
+}
+
+LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateCommandPool(VkDevice device, const VkCommandPoolCreateInfo *pCreateInfo,
+                                                                 const VkAllocationCallbacks *pAllocator,
+                                                                 VkCommandPool *pCommandPool) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(device);
+
+    return disp->CreateCommandPool(device, pCreateInfo, pAllocator, pCommandPool);
+}
+
+LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkDestroyCommandPool(VkDevice device, VkCommandPool commandPool,
+                                                              const VkAllocationCallbacks *pAllocator) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(device);
+
+    disp->DestroyCommandPool(device, commandPool, pAllocator);
+}
+
+LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkResetCommandPool(VkDevice device, VkCommandPool commandPool,
+                                                                VkCommandPoolResetFlags flags) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(device);
+
+    return disp->ResetCommandPool(device, commandPool, flags);
+}
+
+LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkAllocateCommandBuffers(VkDevice device,
+                                                                      const VkCommandBufferAllocateInfo *pAllocateInfo,
+                                                                      VkCommandBuffer *pCommandBuffers) {
+    const VkLayerDispatchTable *disp;
+    VkResult res;
+
+    disp = loader_get_dispatch(device);
+
+    res = disp->AllocateCommandBuffers(device, pAllocateInfo, pCommandBuffers);
+    if (res == VK_SUCCESS) {
+        for (uint32_t i = 0; i < pAllocateInfo->commandBufferCount; i++) {
+            if (pCommandBuffers[i]) {
+                loader_init_dispatch(pCommandBuffers[i], disp);
+            }
+        }
+    }
+
+    return res;
+}
+
+LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkFreeCommandBuffers(VkDevice device, VkCommandPool commandPool,
+                                                              uint32_t commandBufferCount, const VkCommandBuffer *pCommandBuffers) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(device);
+
+    disp->FreeCommandBuffers(device, commandPool, commandBufferCount, pCommandBuffers);
+}
+
+LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkBeginCommandBuffer(VkCommandBuffer commandBuffer,
+                                                                  const VkCommandBufferBeginInfo *pBeginInfo) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(commandBuffer);
+
+    return disp->BeginCommandBuffer(commandBuffer, pBeginInfo);
+}
+
+LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEndCommandBuffer(VkCommandBuffer commandBuffer) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(commandBuffer);
+
+    return disp->EndCommandBuffer(commandBuffer);
+}
+
+LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkResetCommandBuffer(VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(commandBuffer);
+
+    return disp->ResetCommandBuffer(commandBuffer, flags);
+}
+
+LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdBindPipeline(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint,
+                                                           VkPipeline pipeline) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(commandBuffer);
+
+    disp->CmdBindPipeline(commandBuffer, pipelineBindPoint, pipeline);
+}
+
+LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdSetViewport(VkCommandBuffer commandBuffer, uint32_t firstViewport,
+                                                          uint32_t viewportCount, const VkViewport *pViewports) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(commandBuffer);
+
+    disp->CmdSetViewport(commandBuffer, firstViewport, viewportCount, pViewports);
+}
+
+LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdSetScissor(VkCommandBuffer commandBuffer, uint32_t firstScissor,
+                                                         uint32_t scissorCount, const VkRect2D *pScissors) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(commandBuffer);
+
+    disp->CmdSetScissor(commandBuffer, firstScissor, scissorCount, pScissors);
+}
+
+LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdSetLineWidth(VkCommandBuffer commandBuffer, float lineWidth) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(commandBuffer);
+
+    disp->CmdSetLineWidth(commandBuffer, lineWidth);
+}
+
+LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthBias(VkCommandBuffer commandBuffer, float depthBiasConstantFactor,
+                                                           float depthBiasClamp, float depthBiasSlopeFactor) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(commandBuffer);
+
+    disp->CmdSetDepthBias(commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor);
+}
+
+LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdSetBlendConstants(VkCommandBuffer commandBuffer, const float blendConstants[4]) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(commandBuffer);
+
+    disp->CmdSetBlendConstants(commandBuffer, blendConstants);
+}
+
+LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthBounds(VkCommandBuffer commandBuffer, float minDepthBounds,
+                                                             float maxDepthBounds) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(commandBuffer);
+
+    disp->CmdSetDepthBounds(commandBuffer, minDepthBounds, maxDepthBounds);
+}
+
+LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilCompareMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
+                                                                    uint32_t compareMask) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(commandBuffer);
+
+    disp->CmdSetStencilCompareMask(commandBuffer, faceMask, compareMask);
+}
+
+LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilWriteMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
+                                                                  uint32_t writeMask) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(commandBuffer);
+
+    disp->CmdSetStencilWriteMask(commandBuffer, faceMask, writeMask);
+}
+
+LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilReference(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
+                                                                  uint32_t reference) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(commandBuffer);
+
+    disp->CmdSetStencilReference(commandBuffer, faceMask, reference);
+}
+
+LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdBindDescriptorSets(VkCommandBuffer commandBuffer,
+                                                                 VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout,
+                                                                 uint32_t firstSet, uint32_t descriptorSetCount,
+                                                                 const VkDescriptorSet *pDescriptorSets,
+                                                                 uint32_t dynamicOffsetCount, const uint32_t *pDynamicOffsets) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(commandBuffer);
+
+    disp->CmdBindDescriptorSets(commandBuffer, pipelineBindPoint, layout, firstSet, descriptorSetCount, pDescriptorSets,
+                                dynamicOffsetCount, pDynamicOffsets);
+}
+
+LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdBindIndexBuffer(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
+                                                              VkIndexType indexType) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(commandBuffer);
+
+    disp->CmdBindIndexBuffer(commandBuffer, buffer, offset, indexType);
+}
+
+LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdBindVertexBuffers(VkCommandBuffer commandBuffer, uint32_t firstBinding,
+                                                                uint32_t bindingCount, const VkBuffer *pBuffers,
+                                                                const VkDeviceSize *pOffsets) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(commandBuffer);
+
+    disp->CmdBindVertexBuffers(commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets);
+}
+
+LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdDraw(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount,
+                                                   uint32_t firstVertex, uint32_t firstInstance) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(commandBuffer);
+
+    disp->CmdDraw(commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance);
+}
+
+LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndexed(VkCommandBuffer commandBuffer, uint32_t indexCount,
+                                                          uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset,
+                                                          uint32_t firstInstance) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(commandBuffer);
+
+    disp->CmdDrawIndexed(commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance);
+}
+
+LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
+                                                           uint32_t drawCount, uint32_t stride) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(commandBuffer);
+
+    disp->CmdDrawIndirect(commandBuffer, buffer, offset, drawCount, stride);
+}
+
+LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer,
+                                                                  VkDeviceSize offset, uint32_t drawCount, uint32_t stride) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(commandBuffer);
+
+    disp->CmdDrawIndexedIndirect(commandBuffer, buffer, offset, drawCount, stride);
+}
+
+LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdDispatch(VkCommandBuffer commandBuffer, uint32_t x, uint32_t y, uint32_t z) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(commandBuffer);
+
+    disp->CmdDispatch(commandBuffer, x, y, z);
+}
+
+LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdDispatchIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer,
+                                                               VkDeviceSize offset) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(commandBuffer);
+
+    disp->CmdDispatchIndirect(commandBuffer, buffer, offset);
+}
+
+LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdCopyBuffer(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer,
+                                                         uint32_t regionCount, const VkBufferCopy *pRegions) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(commandBuffer);
+
+    disp->CmdCopyBuffer(commandBuffer, srcBuffer, dstBuffer, regionCount, pRegions);
+}
+
+LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage,
+                                                        VkImageLayout srcImageLayout, VkImage dstImage,
+                                                        VkImageLayout dstImageLayout, uint32_t regionCount,
+                                                        const VkImageCopy *pRegions) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(commandBuffer);
+
+    disp->CmdCopyImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions);
+}
+
+LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage,
+                                                        VkImageLayout srcImageLayout, VkImage dstImage,
+                                                        VkImageLayout dstImageLayout, uint32_t regionCount,
+                                                        const VkImageBlit *pRegions, VkFilter filter) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(commandBuffer);
+
+    disp->CmdBlitImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions, filter);
+}
+
+LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage,
+                                                                VkImageLayout dstImageLayout, uint32_t regionCount,
+                                                                const VkBufferImageCopy *pRegions) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(commandBuffer);
+
+    disp->CmdCopyBufferToImage(commandBuffer, srcBuffer, dstImage, dstImageLayout, regionCount, pRegions);
+}
+
+LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage,
+                                                                VkImageLayout srcImageLayout, VkBuffer dstBuffer,
+                                                                uint32_t regionCount, const VkBufferImageCopy *pRegions) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(commandBuffer);
+
+    disp->CmdCopyImageToBuffer(commandBuffer, srcImage, srcImageLayout, dstBuffer, regionCount, pRegions);
+}
+
+LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdUpdateBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer,
+                                                           VkDeviceSize dstOffset, VkDeviceSize dataSize, const void *pData) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(commandBuffer);
+
+    disp->CmdUpdateBuffer(commandBuffer, dstBuffer, dstOffset, dataSize, pData);
+}
+
+LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdFillBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset,
+                                                         VkDeviceSize size, uint32_t data) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(commandBuffer);
+
+    disp->CmdFillBuffer(commandBuffer, dstBuffer, dstOffset, size, data);
+}
+
+LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdClearColorImage(VkCommandBuffer commandBuffer, VkImage image,
+                                                              VkImageLayout imageLayout, const VkClearColorValue *pColor,
+                                                              uint32_t rangeCount, const VkImageSubresourceRange *pRanges) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(commandBuffer);
+
+    disp->CmdClearColorImage(commandBuffer, image, imageLayout, pColor, rangeCount, pRanges);
+}
+
+LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdClearDepthStencilImage(VkCommandBuffer commandBuffer, VkImage image,
+                                                                     VkImageLayout imageLayout,
+                                                                     const VkClearDepthStencilValue *pDepthStencil,
+                                                                     uint32_t rangeCount, const VkImageSubresourceRange *pRanges) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(commandBuffer);
+
+    disp->CmdClearDepthStencilImage(commandBuffer, image, imageLayout, pDepthStencil, rangeCount, pRanges);
+}
+
+LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdClearAttachments(VkCommandBuffer commandBuffer, uint32_t attachmentCount,
+                                                               const VkClearAttachment *pAttachments, uint32_t rectCount,
+                                                               const VkClearRect *pRects) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(commandBuffer);
+
+    disp->CmdClearAttachments(commandBuffer, attachmentCount, pAttachments, rectCount, pRects);
+}
+
+LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdResolveImage(VkCommandBuffer commandBuffer, VkImage srcImage,
+                                                           VkImageLayout srcImageLayout, VkImage dstImage,
+                                                           VkImageLayout dstImageLayout, uint32_t regionCount,
+                                                           const VkImageResolve *pRegions) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(commandBuffer);
+
+    disp->CmdResolveImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions);
+}
+
+LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdSetEvent(VkCommandBuffer commandBuffer, VkEvent event,
+                                                       VkPipelineStageFlags stageMask) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(commandBuffer);
+
+    disp->CmdSetEvent(commandBuffer, event, stageMask);
+}
+
+LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdResetEvent(VkCommandBuffer commandBuffer, VkEvent event,
+                                                         VkPipelineStageFlags stageMask) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(commandBuffer);
+
+    disp->CmdResetEvent(commandBuffer, event, stageMask);
+}
+
+LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent *pEvents,
+                                                         VkPipelineStageFlags sourceStageMask, VkPipelineStageFlags dstStageMask,
+                                                         uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
+                                                         uint32_t bufferMemoryBarrierCount,
+                                                         const VkBufferMemoryBarrier *pBufferMemoryBarriers,
+                                                         uint32_t imageMemoryBarrierCount,
+                                                         const VkImageMemoryBarrier *pImageMemoryBarriers) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(commandBuffer);
+
+    disp->CmdWaitEvents(commandBuffer, eventCount, pEvents, sourceStageMask, dstStageMask, memoryBarrierCount, pMemoryBarriers,
+                        bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers);
+}
+
+LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdPipelineBarrier(VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask,
+                                                              VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags,
+                                                              uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
+                                                              uint32_t bufferMemoryBarrierCount,
+                                                              const VkBufferMemoryBarrier *pBufferMemoryBarriers,
+                                                              uint32_t imageMemoryBarrierCount,
+                                                              const VkImageMemoryBarrier *pImageMemoryBarriers) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(commandBuffer);
+
+    disp->CmdPipelineBarrier(commandBuffer, srcStageMask, dstStageMask, dependencyFlags, memoryBarrierCount, pMemoryBarriers,
+                             bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers);
+}
+
+LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdBeginQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t slot,
+                                                         VkFlags flags) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(commandBuffer);
+
+    disp->CmdBeginQuery(commandBuffer, queryPool, slot, flags);
+}
+
+LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdEndQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t slot) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(commandBuffer);
+
+    disp->CmdEndQuery(commandBuffer, queryPool, slot);
+}
+
+LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdResetQueryPool(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
+                                                             uint32_t firstQuery, uint32_t queryCount) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(commandBuffer);
+
+    disp->CmdResetQueryPool(commandBuffer, queryPool, firstQuery, queryCount);
+}
+
+LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdWriteTimestamp(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage,
+                                                             VkQueryPool queryPool, uint32_t slot) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(commandBuffer);
+
+    disp->CmdWriteTimestamp(commandBuffer, pipelineStage, queryPool, slot);
+}
+
+LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdCopyQueryPoolResults(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
+                                                                   uint32_t firstQuery, uint32_t queryCount, VkBuffer dstBuffer,
+                                                                   VkDeviceSize dstOffset, VkDeviceSize stride, VkFlags flags) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(commandBuffer);
+
+    disp->CmdCopyQueryPoolResults(commandBuffer, queryPool, firstQuery, queryCount, dstBuffer, dstOffset, stride, flags);
+}
+
+LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdPushConstants(VkCommandBuffer commandBuffer, VkPipelineLayout layout,
+                                                            VkShaderStageFlags stageFlags, uint32_t offset, uint32_t size,
+                                                            const void *pValues) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(commandBuffer);
+
+    disp->CmdPushConstants(commandBuffer, layout, stageFlags, offset, size, pValues);
+}
+
+LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdBeginRenderPass(VkCommandBuffer commandBuffer,
+                                                              const VkRenderPassBeginInfo *pRenderPassBegin,
+                                                              VkSubpassContents contents) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(commandBuffer);
+
+    disp->CmdBeginRenderPass(commandBuffer, pRenderPassBegin, contents);
+}
+
+LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(commandBuffer);
+
+    disp->CmdNextSubpass(commandBuffer, contents);
+}
+
+LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdEndRenderPass(VkCommandBuffer commandBuffer) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(commandBuffer);
+
+    disp->CmdEndRenderPass(commandBuffer);
+}
+
+LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdExecuteCommands(VkCommandBuffer commandBuffer, uint32_t commandBuffersCount,
+                                                              const VkCommandBuffer *pCommandBuffers) {
+    const VkLayerDispatchTable *disp;
+
+    disp = loader_get_dispatch(commandBuffer);
+
+    disp->CmdExecuteCommands(commandBuffer, commandBuffersCount, pCommandBuffers);
+}
+
+// ---- Vulkan core 1.1 trampolines
+
+VkResult setupLoaderTrampPhysDevGroups(VkInstance instance) {
+    VkResult res = VK_SUCCESS;
+    struct loader_instance *inst;
+    uint32_t total_count = 0;
+    VkPhysicalDeviceGroupPropertiesKHR **new_phys_dev_groups = NULL;
+    VkPhysicalDeviceGroupPropertiesKHR *local_phys_dev_groups = NULL;
+    PFN_vkEnumeratePhysicalDeviceGroups fpEnumeratePhysicalDeviceGroups = NULL;
+
+    inst = loader_get_instance(instance);
+    if (NULL == inst) {
+        res = VK_ERROR_INITIALIZATION_FAILED;
+        goto out;
+    }
+
+    // Get the function pointer to use to call into the ICD. This could be the core or KHR version
+    if (inst->enabled_known_extensions.khr_device_group_creation) {
+        fpEnumeratePhysicalDeviceGroups = inst->disp->layer_inst_disp.EnumeratePhysicalDeviceGroupsKHR;
+    } else {
+        fpEnumeratePhysicalDeviceGroups = inst->disp->layer_inst_disp.EnumeratePhysicalDeviceGroups;
+    }
+
+    // Setup the trampoline loader physical devices.  This will actually
+    // call down and setup the terminator loader physical devices during the
+    // process.
+    VkResult setup_res = setupLoaderTrampPhysDevs(instance);
+    if (setup_res != VK_SUCCESS && setup_res != VK_INCOMPLETE) {
+        res = setup_res;
+        goto out;
+    }
+
+    // Query how many physical device groups there
+    res = fpEnumeratePhysicalDeviceGroups(instance, &total_count, NULL);
+    if (res != VK_SUCCESS) {
+        loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+            "setupLoaderTrampPhysDevGroups:  Failed during dispatch call of "
+            "\'EnumeratePhysicalDeviceGroupsKHR\' to lower layers or "
+            "loader to get count.");
+        goto out;
+    }
+
+    // Create an array for the new physical device groups, which will be stored
+    // in the instance for the trampoline code.
+    new_phys_dev_groups = (VkPhysicalDeviceGroupPropertiesKHR **)loader_instance_heap_alloc(
+        inst, total_count * sizeof(VkPhysicalDeviceGroupPropertiesKHR *), VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE);
+    if (NULL == new_phys_dev_groups) {
+        loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+            "setupLoaderTrampPhysDevGroups:  Failed to allocate new physical device"
+            " group array of size %d",
+            total_count);
+        res = VK_ERROR_OUT_OF_HOST_MEMORY;
+        goto out;
+    }
+    memset(new_phys_dev_groups, 0, total_count * sizeof(VkPhysicalDeviceGroupPropertiesKHR *));
+
+    // Create a temporary array (on the stack) to keep track of the
+    // returned VkPhysicalDevice values.
+    local_phys_dev_groups = loader_stack_alloc(sizeof(VkPhysicalDeviceGroupPropertiesKHR) * total_count);
+    if (NULL == local_phys_dev_groups) {
+        loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+            "setupLoaderTrampPhysDevGroups:  Failed to allocate local "
+            "physical device group array of size %d",
+            total_count);
+        res = VK_ERROR_OUT_OF_HOST_MEMORY;
+        goto out;
+    }
+    // Initialize the memory to something valid
+    memset(local_phys_dev_groups, 0, sizeof(VkPhysicalDeviceGroupPropertiesKHR) * total_count);
+    for (uint32_t group = 0; group < total_count; group++) {
+        local_phys_dev_groups[group].sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES_KHR;
+        local_phys_dev_groups[group].pNext = NULL;
+        local_phys_dev_groups[group].subsetAllocation = false;
+    }
+
+    // Call down and get the content
+    fpEnumeratePhysicalDeviceGroups(instance, &total_count, local_phys_dev_groups);
+    if (VK_SUCCESS != res) {
+        loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+            "setupLoaderTrampPhysDevGroups:  Failed during dispatch call of "
+            "\'EnumeratePhysicalDeviceGroupsKHR\' to lower layers or "
+            "loader to get content.");
+        goto out;
+    }
+
+    // Replace all the physical device IDs with the proper loader values
+    for (uint32_t group = 0; group < total_count; group++) {
+        for (uint32_t group_gpu = 0; group_gpu < local_phys_dev_groups[group].physicalDeviceCount; group_gpu++) {
+            bool found = false;
+            for (uint32_t tramp_gpu = 0; tramp_gpu < inst->phys_dev_count_tramp; tramp_gpu++) {
+                if (local_phys_dev_groups[group].physicalDevices[group_gpu] == inst->phys_devs_tramp[tramp_gpu]->phys_dev) {
+                    local_phys_dev_groups[group].physicalDevices[group_gpu] = (VkPhysicalDevice)inst->phys_devs_tramp[tramp_gpu];
+                    found = true;
+                    break;
+                }
+            }
+            if (!found) {
+                loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                    "setupLoaderTrampPhysDevGroups:  Failed to find GPU %d in group %d"
+                    " returned by \'EnumeratePhysicalDeviceGroupsKHR\' in list returned"
+                    " by \'EnumeratePhysicalDevices\'", group_gpu, group);
+                res = VK_ERROR_INITIALIZATION_FAILED;
+                goto out;
+            }
+        }
+    }
+
+    // Copy or create everything to fill the new array of physical device groups
+    for (uint32_t new_idx = 0; new_idx < total_count; new_idx++) {
+        // Check if this physical device group with the same contents is already in the old buffer
+        for (uint32_t old_idx = 0; old_idx < inst->phys_dev_group_count_tramp; old_idx++) {
+            if (local_phys_dev_groups[new_idx].physicalDeviceCount == inst->phys_dev_groups_tramp[old_idx]->physicalDeviceCount) {
+                bool found_all_gpus = true;
+                for (uint32_t old_gpu = 0; old_gpu < inst->phys_dev_groups_tramp[old_idx]->physicalDeviceCount; old_gpu++) {
+                    bool found_gpu = false;
+                    for (uint32_t new_gpu = 0; new_gpu < local_phys_dev_groups[new_idx].physicalDeviceCount; new_gpu++) {
+                        if (local_phys_dev_groups[new_idx].physicalDevices[new_gpu] == inst->phys_dev_groups_tramp[old_idx]->physicalDevices[old_gpu]) {
+                            found_gpu = true;
+                            break;
+                        }
+                    }
+
+                    if (!found_gpu) {
+                        found_all_gpus = false;
+                        break;
+                    }
+                }
+                if (!found_all_gpus) {
+                    continue;
+                } else {
+                    new_phys_dev_groups[new_idx] = inst->phys_dev_groups_tramp[old_idx];
+                    break;
+                }
+            }
+        }
+
+        // If this physical device group isn't in the old buffer, create it
+        if (NULL == new_phys_dev_groups[new_idx]) {
+            new_phys_dev_groups[new_idx] = (VkPhysicalDeviceGroupPropertiesKHR *)loader_instance_heap_alloc(
+                inst, sizeof(VkPhysicalDeviceGroupPropertiesKHR), VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE);
+            if (NULL == new_phys_dev_groups[new_idx]) {
+                loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                    "setupLoaderTrampPhysDevGroups:  Failed to allocate "
+                    "physical device group trampoline object %d",
+                    new_idx);
+                total_count = new_idx;
+                res = VK_ERROR_OUT_OF_HOST_MEMORY;
+                goto out;
+            }
+            memcpy(new_phys_dev_groups[new_idx], &local_phys_dev_groups[new_idx],
+                sizeof(VkPhysicalDeviceGroupPropertiesKHR));
+        }
+    }
+
+out:
+
+    if (VK_SUCCESS != res) {
+        if (NULL != new_phys_dev_groups) {
+            for (uint32_t i = 0; i < total_count; i++) {
+                loader_instance_heap_free(inst, new_phys_dev_groups[i]);
+            }
+            loader_instance_heap_free(inst, new_phys_dev_groups);
+        }
+        total_count = 0;
+    } else {
+        // Free everything that didn't carry over to the new array of
+        // physical device groups
+        if (NULL != inst->phys_dev_groups_tramp) {
+            for (uint32_t i = 0; i < inst->phys_dev_group_count_tramp; i++) {
+                bool found = false;
+                for (uint32_t j = 0; j < total_count; j++) {
+                    if (inst->phys_dev_groups_tramp[i] == new_phys_dev_groups[j]) {
+                        found = true;
+                        break;
+                    }
+                }
+                if (!found) {
+                    loader_instance_heap_free(inst, inst->phys_dev_groups_tramp[i]);
+                }
+            }
+            loader_instance_heap_free(inst, inst->phys_dev_groups_tramp);
+        }
+
+        // Swap in the new physical device group list
+        inst->phys_dev_group_count_tramp = total_count;
+        inst->phys_dev_groups_tramp = new_phys_dev_groups;
+    }
+
+    return res;
+}
+
+LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumeratePhysicalDeviceGroups(
+    VkInstance instance, uint32_t *pPhysicalDeviceGroupCount,
+    VkPhysicalDeviceGroupProperties *pPhysicalDeviceGroupProperties) {
+    VkResult res = VK_SUCCESS;
+    uint32_t count;
+    uint32_t i;
+    struct loader_instance *inst = NULL;
+
+    loader_platform_thread_lock_mutex(&loader_lock);
+
+    inst = loader_get_instance(instance);
+    if (NULL == inst) {
+        res = VK_ERROR_INITIALIZATION_FAILED;
+        goto out;
+    }
+
+    if (NULL == pPhysicalDeviceGroupCount) {
+        loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                   "vkEnumeratePhysicalDeviceGroupsKHR: Received NULL pointer for physical "
+                   "device group count return value.");
+        res = VK_ERROR_INITIALIZATION_FAILED;
+        goto out;
+    }
+
+    VkResult setup_res = setupLoaderTrampPhysDevGroups(instance);
+    if (VK_SUCCESS != setup_res) {
+        res = setup_res;
+        goto out;
+    }
+
+    count = inst->phys_dev_group_count_tramp;
+
+    // Wrap the PhysDev object for loader usage, return wrapped objects
+    if (NULL != pPhysicalDeviceGroupProperties) {
+        if (inst->phys_dev_group_count_tramp > *pPhysicalDeviceGroupCount) {
+            loader_log(inst, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, 0,
+                       "vkEnumeratePhysicalDeviceGroupsKHR: Trimming device group count down"
+                       " by application request from %d to %d physical device groups",
+                       inst->phys_dev_group_count_tramp, *pPhysicalDeviceGroupCount);
+            count = *pPhysicalDeviceGroupCount;
+            res = VK_INCOMPLETE;
+        }
+        for (i = 0; i < count; i++) {
+            memcpy(&pPhysicalDeviceGroupProperties[i], inst->phys_dev_groups_tramp[i],
+                   sizeof(VkPhysicalDeviceGroupPropertiesKHR));
+        }
+    }
+
+    *pPhysicalDeviceGroupCount = count;
+
+out:
+
+    loader_platform_thread_unlock_mutex(&loader_lock);
+    return res;
+}
+
+LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFeatures2(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2 *pFeatures) {
+    VkPhysicalDevice unwrapped_phys_dev = loader_unwrap_physical_device(physicalDevice);
+    const VkLayerInstanceDispatchTable *disp = loader_get_instance_layer_dispatch(physicalDevice);
+    const struct loader_instance *inst = ((struct loader_physical_device_tramp*) physicalDevice)->this_instance;
+
+    if (inst != NULL && inst->enabled_known_extensions.khr_get_physical_device_properties2) {
+        disp->GetPhysicalDeviceFeatures2KHR(unwrapped_phys_dev, pFeatures);
+    } else {
+        disp->GetPhysicalDeviceFeatures2(unwrapped_phys_dev, pFeatures);
+    }
+}
+
+LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceProperties2(VkPhysicalDevice physicalDevice,
+                                                           VkPhysicalDeviceProperties2 *pProperties) {
+    VkPhysicalDevice unwrapped_phys_dev = loader_unwrap_physical_device(physicalDevice);
+    const VkLayerInstanceDispatchTable *disp = loader_get_instance_layer_dispatch(physicalDevice);
+    const struct loader_instance *inst = ((struct loader_physical_device_tramp*) physicalDevice)->this_instance;
+
+    if (inst != NULL && inst->enabled_known_extensions.khr_get_physical_device_properties2) {
+        disp->GetPhysicalDeviceProperties2KHR(unwrapped_phys_dev, pProperties);
+    } else {
+        disp->GetPhysicalDeviceProperties2(unwrapped_phys_dev, pProperties);
+    }
+}
+
+LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFormatProperties2(VkPhysicalDevice physicalDevice, VkFormat format,
+                                                                 VkFormatProperties2 *pFormatProperties) {
+    VkPhysicalDevice unwrapped_phys_dev = loader_unwrap_physical_device(physicalDevice);
+    const VkLayerInstanceDispatchTable *disp = loader_get_instance_layer_dispatch(physicalDevice);
+    const struct loader_instance *inst = ((struct loader_physical_device_tramp*) physicalDevice)->this_instance;
+
+    if (inst != NULL && inst->enabled_known_extensions.khr_get_physical_device_properties2) {
+        disp->GetPhysicalDeviceFormatProperties2KHR(unwrapped_phys_dev, format, pFormatProperties);
+    } else {
+        disp->GetPhysicalDeviceFormatProperties2(unwrapped_phys_dev, format, pFormatProperties);
+    }
+}
+
+LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceImageFormatProperties2(
+    VkPhysicalDevice physicalDevice, const VkPhysicalDeviceImageFormatInfo2 *pImageFormatInfo,
+    VkImageFormatProperties2 *pImageFormatProperties) {
+    VkPhysicalDevice unwrapped_phys_dev = loader_unwrap_physical_device(physicalDevice);
+    const VkLayerInstanceDispatchTable *disp = loader_get_instance_layer_dispatch(physicalDevice);
+    const struct loader_instance *inst = ((struct loader_physical_device_tramp*) physicalDevice)->this_instance;
+    
+    if (inst != NULL && inst->enabled_known_extensions.khr_get_physical_device_properties2) {
+        return disp->GetPhysicalDeviceImageFormatProperties2KHR(unwrapped_phys_dev, pImageFormatInfo, pImageFormatProperties);
+    } else {
+        return disp->GetPhysicalDeviceImageFormatProperties2(unwrapped_phys_dev, pImageFormatInfo, pImageFormatProperties);
+    }
+}
+
+LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceQueueFamilyProperties2(VkPhysicalDevice physicalDevice,
+                                                                      uint32_t *pQueueFamilyPropertyCount,
+                                                                      VkQueueFamilyProperties2 *pQueueFamilyProperties) {
+    VkPhysicalDevice unwrapped_phys_dev = loader_unwrap_physical_device(physicalDevice);
+    const VkLayerInstanceDispatchTable *disp = loader_get_instance_layer_dispatch(physicalDevice);
+    const struct loader_instance *inst = ((struct loader_physical_device_tramp*) physicalDevice)->this_instance;
+
+    if (inst != NULL && inst->enabled_known_extensions.khr_get_physical_device_properties2) {
+        disp->GetPhysicalDeviceQueueFamilyProperties2KHR(unwrapped_phys_dev, pQueueFamilyPropertyCount, pQueueFamilyProperties);
+    } else {
+        disp->GetPhysicalDeviceQueueFamilyProperties2(unwrapped_phys_dev, pQueueFamilyPropertyCount, pQueueFamilyProperties);
+    }
+}
+
+LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceMemoryProperties2(VkPhysicalDevice physicalDevice,
+                                                                 VkPhysicalDeviceMemoryProperties2 *pMemoryProperties) {
+    VkPhysicalDevice unwrapped_phys_dev = loader_unwrap_physical_device(physicalDevice);
+    const VkLayerInstanceDispatchTable *disp = loader_get_instance_layer_dispatch(physicalDevice);
+    const struct loader_instance *inst = ((struct loader_physical_device_tramp*) physicalDevice)->this_instance;
+
+    if (inst != NULL && inst->enabled_known_extensions.khr_get_physical_device_properties2) {
+        disp->GetPhysicalDeviceMemoryProperties2KHR(unwrapped_phys_dev, pMemoryProperties);
+    } else {
+        disp->GetPhysicalDeviceMemoryProperties2(unwrapped_phys_dev, pMemoryProperties);
+    }
+}
+
+LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceSparseImageFormatProperties2(
+    VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSparseImageFormatInfo2 *pFormatInfo, uint32_t *pPropertyCount,
+    VkSparseImageFormatProperties2 *pProperties) {
+    VkPhysicalDevice unwrapped_phys_dev = loader_unwrap_physical_device(physicalDevice);
+    const VkLayerInstanceDispatchTable *disp = loader_get_instance_layer_dispatch(physicalDevice);
+    const struct loader_instance *inst = ((struct loader_physical_device_tramp*) physicalDevice)->this_instance;
+
+    if (inst != NULL && inst->enabled_known_extensions.khr_get_physical_device_properties2) {
+        disp->GetPhysicalDeviceSparseImageFormatProperties2KHR(unwrapped_phys_dev, pFormatInfo, pPropertyCount, pProperties);
+    } else {
+        disp->GetPhysicalDeviceSparseImageFormatProperties2(unwrapped_phys_dev, pFormatInfo, pPropertyCount, pProperties);
+    }
+}
+
+LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceExternalBufferProperties(
+    VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalBufferInfo *pExternalBufferInfo,
+    VkExternalBufferProperties *pExternalBufferProperties) {
+    VkPhysicalDevice unwrapped_phys_dev = loader_unwrap_physical_device(physicalDevice);
+    const VkLayerInstanceDispatchTable *disp = loader_get_instance_layer_dispatch(physicalDevice);
+    const struct loader_instance *inst = ((struct loader_physical_device_tramp*) physicalDevice)->this_instance;
+
+    if (inst != NULL && inst->enabled_known_extensions.khr_external_memory_capabilities){
+        disp->GetPhysicalDeviceExternalBufferPropertiesKHR(unwrapped_phys_dev, pExternalBufferInfo, pExternalBufferProperties);
+    } else {
+        disp->GetPhysicalDeviceExternalBufferProperties(unwrapped_phys_dev, pExternalBufferInfo, pExternalBufferProperties);
+    }
+}
+
+LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceExternalSemaphoreProperties(
+    VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalSemaphoreInfoKHR *pExternalSemaphoreInfo,
+    VkExternalSemaphoreProperties *pExternalSemaphoreProperties) {
+    VkPhysicalDevice unwrapped_phys_dev = loader_unwrap_physical_device(physicalDevice);
+    const VkLayerInstanceDispatchTable *disp = loader_get_instance_layer_dispatch(physicalDevice);
+    const struct loader_instance *inst = ((struct loader_physical_device_tramp*) physicalDevice)->this_instance;
+
+    if (inst != NULL && inst->enabled_known_extensions.khr_external_semaphore_capabilities) {
+        disp->GetPhysicalDeviceExternalSemaphorePropertiesKHR(unwrapped_phys_dev, pExternalSemaphoreInfo, pExternalSemaphoreProperties);
+    } else {
+        disp->GetPhysicalDeviceExternalSemaphoreProperties(unwrapped_phys_dev, pExternalSemaphoreInfo, pExternalSemaphoreProperties);
+    }
+}
+
+LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceExternalFenceProperties(
+    VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalFenceInfo *pExternalFenceInfo,
+    VkExternalFenceProperties *pExternalFenceProperties) {
+    VkPhysicalDevice unwrapped_phys_dev = loader_unwrap_physical_device(physicalDevice);
+    const VkLayerInstanceDispatchTable *disp = loader_get_instance_layer_dispatch(physicalDevice);
+    const struct loader_instance *inst = ((struct loader_physical_device_tramp*) physicalDevice)->this_instance;
+
+    if (inst != NULL && inst->enabled_known_extensions.khr_external_fence_capabilities) {
+        disp->GetPhysicalDeviceExternalFencePropertiesKHR(unwrapped_phys_dev, pExternalFenceInfo, pExternalFenceProperties);
+    } else {
+        disp->GetPhysicalDeviceExternalFenceProperties(unwrapped_phys_dev, pExternalFenceInfo, pExternalFenceProperties);
+    }
+}
+
+LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkBindBufferMemory2(
+    VkDevice                                    device,
+    uint32_t                                    bindInfoCount,
+    const VkBindBufferMemoryInfo*               pBindInfos) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(device);
+    return disp->BindBufferMemory2(device, bindInfoCount, pBindInfos);
+}
+
+LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkBindImageMemory2(
+    VkDevice                                    device,
+    uint32_t                                    bindInfoCount,
+    const VkBindImageMemoryInfo*                pBindInfos) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(device);
+    return disp->BindImageMemory2(device, bindInfoCount, pBindInfos);
+}
+
+LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkGetDeviceGroupPeerMemoryFeatures(
+    VkDevice                                    device,
+    uint32_t                                    heapIndex,
+    uint32_t                                    localDeviceIndex,
+    uint32_t                                    remoteDeviceIndex,
+    VkPeerMemoryFeatureFlags*                   pPeerMemoryFeatures) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(device);
+    disp->GetDeviceGroupPeerMemoryFeatures(device, heapIndex, localDeviceIndex, remoteDeviceIndex, pPeerMemoryFeatures);
+}
+
+LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdSetDeviceMask(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    deviceMask) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(commandBuffer);
+    disp->CmdSetDeviceMask(commandBuffer, deviceMask);
+}
+
+LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdDispatchBase(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    baseGroupX,
+    uint32_t                                    baseGroupY,
+    uint32_t                                    baseGroupZ,
+    uint32_t                                    groupCountX,
+    uint32_t                                    groupCountY,
+    uint32_t                                    groupCountZ) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(commandBuffer);
+    disp->CmdDispatchBase(commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ);
+}
+
+LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkGetImageMemoryRequirements2(
+    VkDevice                                    device,
+    const VkImageMemoryRequirementsInfo2*       pInfo,
+    VkMemoryRequirements2*                      pMemoryRequirements) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(device);
+    disp->GetImageMemoryRequirements2(device, pInfo, pMemoryRequirements);
+}
+
+LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkGetBufferMemoryRequirements2(
+    VkDevice                                    device,
+    const VkBufferMemoryRequirementsInfo2*      pInfo,
+    VkMemoryRequirements2*                      pMemoryRequirements) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(device);
+    disp->GetBufferMemoryRequirements2(device, pInfo, pMemoryRequirements);
+}
+
+LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkGetImageSparseMemoryRequirements2(
+    VkDevice                                    device,
+    const VkImageSparseMemoryRequirementsInfo2* pInfo,
+    uint32_t*                                   pSparseMemoryRequirementCount,
+    VkSparseImageMemoryRequirements2*           pSparseMemoryRequirements) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(device);
+    disp->GetImageSparseMemoryRequirements2(device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements);
+}
+
+LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkTrimCommandPool(
+    VkDevice                                    device,
+    VkCommandPool                               commandPool,
+    VkCommandPoolTrimFlags                      flags) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(device);
+    disp->TrimCommandPool(device, commandPool, flags);
+}
+
+LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkGetDeviceQueue2(VkDevice device, const VkDeviceQueueInfo2 *pQueueInfo, VkQueue *pQueue) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(device);
+    disp->GetDeviceQueue2(device, pQueueInfo, pQueue);
+    if (*pQueue != VK_NULL_HANDLE)
+    {
+        loader_set_dispatch(*pQueue, disp);
+    }
+}
+
+LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateSamplerYcbcrConversion(
+    VkDevice                                    device,
+    const VkSamplerYcbcrConversionCreateInfo*   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSamplerYcbcrConversion*                   pYcbcrConversion) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(device);
+    return disp->CreateSamplerYcbcrConversion(device, pCreateInfo, pAllocator, pYcbcrConversion);
+}
+
+LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkDestroySamplerYcbcrConversion(
+    VkDevice                                    device,
+    VkSamplerYcbcrConversion                    ycbcrConversion,
+    const VkAllocationCallbacks*                pAllocator) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(device);
+    disp->DestroySamplerYcbcrConversion(device, ycbcrConversion, pAllocator);
+}
+
+LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkGetDescriptorSetLayoutSupport(
+    VkDevice                                    device,
+    const VkDescriptorSetLayoutCreateInfo*      pCreateInfo,
+    VkDescriptorSetLayoutSupport*               pSupport) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(device);
+    disp->GetDescriptorSetLayoutSupport(device, pCreateInfo, pSupport);
+}
+
+LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+vkCreateDescriptorUpdateTemplate(VkDevice device, const VkDescriptorUpdateTemplateCreateInfo *pCreateInfo,
+                                 const VkAllocationCallbacks *pAllocator, VkDescriptorUpdateTemplate *pDescriptorUpdateTemplate) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(device);
+    return disp->CreateDescriptorUpdateTemplate(device, pCreateInfo, pAllocator, pDescriptorUpdateTemplate);
+}
+
+LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkDestroyDescriptorUpdateTemplate(VkDevice device,
+                                                                           VkDescriptorUpdateTemplate descriptorUpdateTemplate,
+                                                                           const VkAllocationCallbacks *pAllocator) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(device);
+    disp->DestroyDescriptorUpdateTemplate(device, descriptorUpdateTemplate, pAllocator);
+}
+
+LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkUpdateDescriptorSetWithTemplate(VkDevice device, VkDescriptorSet descriptorSet,
+                                                                           VkDescriptorUpdateTemplate descriptorUpdateTemplate,
+                                                                           const void *pData) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(device);
+    disp->UpdateDescriptorSetWithTemplate(device, descriptorSet, descriptorUpdateTemplate, pData);
+}
diff --git a/src/third_party/vulkan-loader/src/loader/unknown_ext_chain.c b/src/third_party/vulkan-loader/src/loader/unknown_ext_chain.c
new file mode 100644
index 0000000..1c8560d
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/loader/unknown_ext_chain.c
@@ -0,0 +1,819 @@
+/*
+ * Copyright (c) 2017 The Khronos Group Inc.
+ * Copyright (c) 2017 Valve Corporation
+ * Copyright (c) 2017 LunarG, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author Jon Ashburn <jon@lunarg.com>
+ * Author: Lenny Komow <lenny@lunarg.com>
+ */
+
+ // This code is used to pass on physical device extensions through the call chain. It must do this without creating a stack frame,
+ // because the actual parameters of the call are not known. Since the first parameter is known to be a VkPhysicalDevice, it can
+// unwrap the physical device, overwriting the wrapped device, and then jump to the next function in the call chain. This code
+// attempts to accomplish this by relying on tail-call optimizations, but there is no guarantee that this will work. As a result,
+// this code is only compiled on systems where an assembly alternative has not been written.
+
+ #include "vk_loader_platform.h"
+ #include "loader.h"
+
+ #if defined(__GNUC__) && !defined(__clang__)
+ #pragma GCC optimize(3)  // force gcc to use tail-calls
+ #endif
+
+ // Trampoline function macro for unknown physical device extension command.
+ #define PhysDevExtTramp(num)                                                              \
+     VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTramp##num(VkPhysicalDevice physical_device) { \
+         const struct loader_instance_dispatch_table *disp;                                \
+         disp = loader_get_instance_dispatch(physical_device);                             \
+         disp->phys_dev_ext[num](loader_unwrap_physical_device(physical_device));          \
+     }
+
+// Terminator function macro for unknown physical device extension command.
+#define PhysDevExtTermin(num)                                                                                         \
+    VKAPI_ATTR void VKAPI_CALL vkPhysDevExtTermin##num(VkPhysicalDevice physical_device) {                            \
+         struct loader_physical_device_term *phys_dev_term = (struct loader_physical_device_term *)physical_device;    \
+         struct loader_icd_term *icd_term = phys_dev_term->this_icd_term;                                              \
+         struct loader_instance *inst = (struct loader_instance *)icd_term->this_instance;                             \
+         if (NULL == icd_term->phys_dev_ext[num]) {                                                                    \
+             loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0, "Extension %s not supported for this physical device", \
+                        inst->phys_dev_ext_disp_hash[num].func_name);                                                  \
+         }                                                                                                             \
+         icd_term->phys_dev_ext[num](phys_dev_term->phys_dev);                                                         \
+    }
+
+// Trampoline function macro for unknown physical device extension command.
+#define DevExtTramp(num)                                                          \
+   VKAPI_ATTR void VKAPI_CALL vkdev_ext##num(VkDevice device) {                   \
+           const struct loader_dev_dispatch_table *disp;                          \
+           disp = loader_get_dev_dispatch(device);                                \
+           disp->ext_dispatch.dev_ext[num](device);                               \
+       }
+
+
+// Instantiations of the trampoline
+PhysDevExtTramp(0)
+PhysDevExtTramp(1)
+PhysDevExtTramp(2)
+PhysDevExtTramp(3)
+PhysDevExtTramp(4)
+PhysDevExtTramp(5)
+PhysDevExtTramp(6)
+PhysDevExtTramp(7)
+PhysDevExtTramp(8)
+PhysDevExtTramp(9)
+PhysDevExtTramp(10)
+PhysDevExtTramp(11)
+PhysDevExtTramp(12)
+PhysDevExtTramp(13)
+PhysDevExtTramp(14)
+PhysDevExtTramp(15)
+PhysDevExtTramp(16)
+PhysDevExtTramp(17)
+PhysDevExtTramp(18)
+PhysDevExtTramp(19)
+PhysDevExtTramp(20)
+PhysDevExtTramp(21)
+PhysDevExtTramp(22)
+PhysDevExtTramp(23)
+PhysDevExtTramp(24)
+PhysDevExtTramp(25)
+PhysDevExtTramp(26)
+PhysDevExtTramp(27)
+PhysDevExtTramp(28)
+PhysDevExtTramp(29)
+PhysDevExtTramp(30)
+PhysDevExtTramp(31)
+PhysDevExtTramp(32)
+PhysDevExtTramp(33)
+PhysDevExtTramp(34)
+PhysDevExtTramp(35)
+PhysDevExtTramp(36)
+PhysDevExtTramp(37)
+PhysDevExtTramp(38)
+PhysDevExtTramp(39)
+PhysDevExtTramp(40)
+PhysDevExtTramp(41)
+PhysDevExtTramp(42)
+PhysDevExtTramp(43)
+PhysDevExtTramp(44)
+PhysDevExtTramp(45)
+PhysDevExtTramp(46)
+PhysDevExtTramp(47)
+PhysDevExtTramp(48)
+PhysDevExtTramp(49)
+PhysDevExtTramp(50)
+PhysDevExtTramp(51)
+PhysDevExtTramp(52)
+PhysDevExtTramp(53)
+PhysDevExtTramp(54)
+PhysDevExtTramp(55)
+PhysDevExtTramp(56)
+PhysDevExtTramp(57)
+PhysDevExtTramp(58)
+PhysDevExtTramp(59)
+PhysDevExtTramp(60)
+PhysDevExtTramp(61)
+PhysDevExtTramp(62)
+PhysDevExtTramp(63)
+PhysDevExtTramp(64)
+PhysDevExtTramp(65)
+PhysDevExtTramp(66)
+PhysDevExtTramp(67)
+PhysDevExtTramp(68)
+PhysDevExtTramp(69)
+PhysDevExtTramp(70)
+PhysDevExtTramp(71)
+PhysDevExtTramp(72)
+PhysDevExtTramp(73)
+PhysDevExtTramp(74)
+PhysDevExtTramp(75)
+PhysDevExtTramp(76)
+PhysDevExtTramp(77)
+PhysDevExtTramp(78)
+PhysDevExtTramp(79)
+PhysDevExtTramp(80)
+PhysDevExtTramp(81)
+PhysDevExtTramp(82)
+PhysDevExtTramp(83)
+PhysDevExtTramp(84)
+PhysDevExtTramp(85)
+PhysDevExtTramp(86)
+PhysDevExtTramp(87)
+PhysDevExtTramp(88)
+PhysDevExtTramp(89)
+PhysDevExtTramp(90)
+PhysDevExtTramp(91)
+PhysDevExtTramp(92)
+PhysDevExtTramp(93)
+PhysDevExtTramp(94)
+PhysDevExtTramp(95)
+PhysDevExtTramp(96)
+PhysDevExtTramp(97)
+PhysDevExtTramp(98)
+PhysDevExtTramp(99)
+PhysDevExtTramp(100)
+PhysDevExtTramp(101)
+PhysDevExtTramp(102)
+PhysDevExtTramp(103)
+PhysDevExtTramp(104)
+PhysDevExtTramp(105)
+PhysDevExtTramp(106)
+PhysDevExtTramp(107)
+PhysDevExtTramp(108)
+PhysDevExtTramp(109)
+PhysDevExtTramp(110)
+PhysDevExtTramp(111)
+PhysDevExtTramp(112)
+PhysDevExtTramp(113)
+PhysDevExtTramp(114)
+PhysDevExtTramp(115)
+PhysDevExtTramp(116)
+PhysDevExtTramp(117)
+PhysDevExtTramp(118)
+PhysDevExtTramp(119)
+PhysDevExtTramp(120)
+PhysDevExtTramp(121)
+PhysDevExtTramp(122)
+PhysDevExtTramp(123)
+PhysDevExtTramp(124)
+PhysDevExtTramp(125)
+PhysDevExtTramp(126)
+PhysDevExtTramp(127)
+PhysDevExtTramp(128)
+PhysDevExtTramp(129)
+PhysDevExtTramp(130)
+PhysDevExtTramp(131)
+PhysDevExtTramp(132)
+PhysDevExtTramp(133)
+PhysDevExtTramp(134)
+PhysDevExtTramp(135)
+PhysDevExtTramp(136)
+PhysDevExtTramp(137)
+PhysDevExtTramp(138)
+PhysDevExtTramp(139)
+PhysDevExtTramp(140)
+PhysDevExtTramp(141)
+PhysDevExtTramp(142)
+PhysDevExtTramp(143)
+PhysDevExtTramp(144)
+PhysDevExtTramp(145)
+PhysDevExtTramp(146)
+PhysDevExtTramp(147)
+PhysDevExtTramp(148)
+PhysDevExtTramp(149)
+PhysDevExtTramp(150)
+PhysDevExtTramp(151)
+PhysDevExtTramp(152)
+PhysDevExtTramp(153)
+PhysDevExtTramp(154)
+PhysDevExtTramp(155)
+PhysDevExtTramp(156)
+PhysDevExtTramp(157)
+PhysDevExtTramp(158)
+PhysDevExtTramp(159)
+PhysDevExtTramp(160)
+PhysDevExtTramp(161)
+PhysDevExtTramp(162)
+PhysDevExtTramp(163)
+PhysDevExtTramp(164)
+PhysDevExtTramp(165)
+PhysDevExtTramp(166)
+PhysDevExtTramp(167)
+PhysDevExtTramp(168)
+PhysDevExtTramp(169)
+PhysDevExtTramp(170)
+PhysDevExtTramp(171)
+PhysDevExtTramp(172)
+PhysDevExtTramp(173)
+PhysDevExtTramp(174)
+PhysDevExtTramp(175)
+PhysDevExtTramp(176)
+PhysDevExtTramp(177)
+PhysDevExtTramp(178)
+PhysDevExtTramp(179)
+PhysDevExtTramp(180)
+PhysDevExtTramp(181)
+PhysDevExtTramp(182)
+PhysDevExtTramp(183)
+PhysDevExtTramp(184)
+PhysDevExtTramp(185)
+PhysDevExtTramp(186)
+PhysDevExtTramp(187)
+PhysDevExtTramp(188)
+PhysDevExtTramp(189)
+PhysDevExtTramp(190)
+PhysDevExtTramp(191)
+PhysDevExtTramp(192)
+PhysDevExtTramp(193)
+PhysDevExtTramp(194)
+PhysDevExtTramp(195)
+PhysDevExtTramp(196)
+PhysDevExtTramp(197)
+PhysDevExtTramp(198)
+PhysDevExtTramp(199)
+PhysDevExtTramp(200)
+PhysDevExtTramp(201)
+PhysDevExtTramp(202)
+PhysDevExtTramp(203)
+PhysDevExtTramp(204)
+PhysDevExtTramp(205)
+PhysDevExtTramp(206)
+PhysDevExtTramp(207)
+PhysDevExtTramp(208)
+PhysDevExtTramp(209)
+PhysDevExtTramp(210)
+PhysDevExtTramp(211)
+PhysDevExtTramp(212)
+PhysDevExtTramp(213)
+PhysDevExtTramp(214)
+PhysDevExtTramp(215)
+PhysDevExtTramp(216)
+PhysDevExtTramp(217)
+PhysDevExtTramp(218)
+PhysDevExtTramp(219)
+PhysDevExtTramp(220)
+PhysDevExtTramp(221)
+PhysDevExtTramp(222)
+PhysDevExtTramp(223)
+PhysDevExtTramp(224)
+PhysDevExtTramp(225)
+PhysDevExtTramp(226)
+PhysDevExtTramp(227)
+PhysDevExtTramp(228)
+PhysDevExtTramp(229)
+PhysDevExtTramp(230)
+PhysDevExtTramp(231)
+PhysDevExtTramp(232)
+PhysDevExtTramp(233)
+PhysDevExtTramp(234)
+PhysDevExtTramp(235)
+PhysDevExtTramp(236)
+PhysDevExtTramp(237)
+PhysDevExtTramp(238)
+PhysDevExtTramp(239)
+PhysDevExtTramp(240)
+PhysDevExtTramp(241)
+PhysDevExtTramp(242)
+PhysDevExtTramp(243)
+PhysDevExtTramp(244)
+PhysDevExtTramp(245)
+PhysDevExtTramp(246)
+PhysDevExtTramp(247)
+PhysDevExtTramp(248)
+PhysDevExtTramp(249)
+
+// Instantiations of the terminator
+PhysDevExtTermin(0)
+PhysDevExtTermin(1)
+PhysDevExtTermin(2)
+PhysDevExtTermin(3)
+PhysDevExtTermin(4)
+PhysDevExtTermin(5)
+PhysDevExtTermin(6)
+PhysDevExtTermin(7)
+PhysDevExtTermin(8)
+PhysDevExtTermin(9)
+PhysDevExtTermin(10)
+PhysDevExtTermin(11)
+PhysDevExtTermin(12)
+PhysDevExtTermin(13)
+PhysDevExtTermin(14)
+PhysDevExtTermin(15)
+PhysDevExtTermin(16)
+PhysDevExtTermin(17)
+PhysDevExtTermin(18)
+PhysDevExtTermin(19)
+PhysDevExtTermin(20)
+PhysDevExtTermin(21)
+PhysDevExtTermin(22)
+PhysDevExtTermin(23)
+PhysDevExtTermin(24)
+PhysDevExtTermin(25)
+PhysDevExtTermin(26)
+PhysDevExtTermin(27)
+PhysDevExtTermin(28)
+PhysDevExtTermin(29)
+PhysDevExtTermin(30)
+PhysDevExtTermin(31)
+PhysDevExtTermin(32)
+PhysDevExtTermin(33)
+PhysDevExtTermin(34)
+PhysDevExtTermin(35)
+PhysDevExtTermin(36)
+PhysDevExtTermin(37)
+PhysDevExtTermin(38)
+PhysDevExtTermin(39)
+PhysDevExtTermin(40)
+PhysDevExtTermin(41)
+PhysDevExtTermin(42)
+PhysDevExtTermin(43)
+PhysDevExtTermin(44)
+PhysDevExtTermin(45)
+PhysDevExtTermin(46)
+PhysDevExtTermin(47)
+PhysDevExtTermin(48)
+PhysDevExtTermin(49)
+PhysDevExtTermin(50)
+PhysDevExtTermin(51)
+PhysDevExtTermin(52)
+PhysDevExtTermin(53)
+PhysDevExtTermin(54)
+PhysDevExtTermin(55)
+PhysDevExtTermin(56)
+PhysDevExtTermin(57)
+PhysDevExtTermin(58)
+PhysDevExtTermin(59)
+PhysDevExtTermin(60)
+PhysDevExtTermin(61)
+PhysDevExtTermin(62)
+PhysDevExtTermin(63)
+PhysDevExtTermin(64)
+PhysDevExtTermin(65)
+PhysDevExtTermin(66)
+PhysDevExtTermin(67)
+PhysDevExtTermin(68)
+PhysDevExtTermin(69)
+PhysDevExtTermin(70)
+PhysDevExtTermin(71)
+PhysDevExtTermin(72)
+PhysDevExtTermin(73)
+PhysDevExtTermin(74)
+PhysDevExtTermin(75)
+PhysDevExtTermin(76)
+PhysDevExtTermin(77)
+PhysDevExtTermin(78)
+PhysDevExtTermin(79)
+PhysDevExtTermin(80)
+PhysDevExtTermin(81)
+PhysDevExtTermin(82)
+PhysDevExtTermin(83)
+PhysDevExtTermin(84)
+PhysDevExtTermin(85)
+PhysDevExtTermin(86)
+PhysDevExtTermin(87)
+PhysDevExtTermin(88)
+PhysDevExtTermin(89)
+PhysDevExtTermin(90)
+PhysDevExtTermin(91)
+PhysDevExtTermin(92)
+PhysDevExtTermin(93)
+PhysDevExtTermin(94)
+PhysDevExtTermin(95)
+PhysDevExtTermin(96)
+PhysDevExtTermin(97)
+PhysDevExtTermin(98)
+PhysDevExtTermin(99)
+PhysDevExtTermin(100)
+PhysDevExtTermin(101)
+PhysDevExtTermin(102)
+PhysDevExtTermin(103)
+PhysDevExtTermin(104)
+PhysDevExtTermin(105)
+PhysDevExtTermin(106)
+PhysDevExtTermin(107)
+PhysDevExtTermin(108)
+PhysDevExtTermin(109)
+PhysDevExtTermin(110)
+PhysDevExtTermin(111)
+PhysDevExtTermin(112)
+PhysDevExtTermin(113)
+PhysDevExtTermin(114)
+PhysDevExtTermin(115)
+PhysDevExtTermin(116)
+PhysDevExtTermin(117)
+PhysDevExtTermin(118)
+PhysDevExtTermin(119)
+PhysDevExtTermin(120)
+PhysDevExtTermin(121)
+PhysDevExtTermin(122)
+PhysDevExtTermin(123)
+PhysDevExtTermin(124)
+PhysDevExtTermin(125)
+PhysDevExtTermin(126)
+PhysDevExtTermin(127)
+PhysDevExtTermin(128)
+PhysDevExtTermin(129)
+PhysDevExtTermin(130)
+PhysDevExtTermin(131)
+PhysDevExtTermin(132)
+PhysDevExtTermin(133)
+PhysDevExtTermin(134)
+PhysDevExtTermin(135)
+PhysDevExtTermin(136)
+PhysDevExtTermin(137)
+PhysDevExtTermin(138)
+PhysDevExtTermin(139)
+PhysDevExtTermin(140)
+PhysDevExtTermin(141)
+PhysDevExtTermin(142)
+PhysDevExtTermin(143)
+PhysDevExtTermin(144)
+PhysDevExtTermin(145)
+PhysDevExtTermin(146)
+PhysDevExtTermin(147)
+PhysDevExtTermin(148)
+PhysDevExtTermin(149)
+PhysDevExtTermin(150)
+PhysDevExtTermin(151)
+PhysDevExtTermin(152)
+PhysDevExtTermin(153)
+PhysDevExtTermin(154)
+PhysDevExtTermin(155)
+PhysDevExtTermin(156)
+PhysDevExtTermin(157)
+PhysDevExtTermin(158)
+PhysDevExtTermin(159)
+PhysDevExtTermin(160)
+PhysDevExtTermin(161)
+PhysDevExtTermin(162)
+PhysDevExtTermin(163)
+PhysDevExtTermin(164)
+PhysDevExtTermin(165)
+PhysDevExtTermin(166)
+PhysDevExtTermin(167)
+PhysDevExtTermin(168)
+PhysDevExtTermin(169)
+PhysDevExtTermin(170)
+PhysDevExtTermin(171)
+PhysDevExtTermin(172)
+PhysDevExtTermin(173)
+PhysDevExtTermin(174)
+PhysDevExtTermin(175)
+PhysDevExtTermin(176)
+PhysDevExtTermin(177)
+PhysDevExtTermin(178)
+PhysDevExtTermin(179)
+PhysDevExtTermin(180)
+PhysDevExtTermin(181)
+PhysDevExtTermin(182)
+PhysDevExtTermin(183)
+PhysDevExtTermin(184)
+PhysDevExtTermin(185)
+PhysDevExtTermin(186)
+PhysDevExtTermin(187)
+PhysDevExtTermin(188)
+PhysDevExtTermin(189)
+PhysDevExtTermin(190)
+PhysDevExtTermin(191)
+PhysDevExtTermin(192)
+PhysDevExtTermin(193)
+PhysDevExtTermin(194)
+PhysDevExtTermin(195)
+PhysDevExtTermin(196)
+PhysDevExtTermin(197)
+PhysDevExtTermin(198)
+PhysDevExtTermin(199)
+PhysDevExtTermin(200)
+PhysDevExtTermin(201)
+PhysDevExtTermin(202)
+PhysDevExtTermin(203)
+PhysDevExtTermin(204)
+PhysDevExtTermin(205)
+PhysDevExtTermin(206)
+PhysDevExtTermin(207)
+PhysDevExtTermin(208)
+PhysDevExtTermin(209)
+PhysDevExtTermin(210)
+PhysDevExtTermin(211)
+PhysDevExtTermin(212)
+PhysDevExtTermin(213)
+PhysDevExtTermin(214)
+PhysDevExtTermin(215)
+PhysDevExtTermin(216)
+PhysDevExtTermin(217)
+PhysDevExtTermin(218)
+PhysDevExtTermin(219)
+PhysDevExtTermin(220)
+PhysDevExtTermin(221)
+PhysDevExtTermin(222)
+PhysDevExtTermin(223)
+PhysDevExtTermin(224)
+PhysDevExtTermin(225)
+PhysDevExtTermin(226)
+PhysDevExtTermin(227)
+PhysDevExtTermin(228)
+PhysDevExtTermin(229)
+PhysDevExtTermin(230)
+PhysDevExtTermin(231)
+PhysDevExtTermin(232)
+PhysDevExtTermin(233)
+PhysDevExtTermin(234)
+PhysDevExtTermin(235)
+PhysDevExtTermin(236)
+PhysDevExtTermin(237)
+PhysDevExtTermin(238)
+PhysDevExtTermin(239)
+PhysDevExtTermin(240)
+PhysDevExtTermin(241)
+PhysDevExtTermin(242)
+PhysDevExtTermin(243)
+PhysDevExtTermin(244)
+PhysDevExtTermin(245)
+PhysDevExtTermin(246)
+PhysDevExtTermin(247)
+PhysDevExtTermin(248)
+PhysDevExtTermin(249)
+
+// Instantiations of the device trampoline
+DevExtTramp(0)
+DevExtTramp(1)
+DevExtTramp(2)
+DevExtTramp(3)
+DevExtTramp(4)
+DevExtTramp(5)
+DevExtTramp(6)
+DevExtTramp(7)
+DevExtTramp(8)
+DevExtTramp(9)
+DevExtTramp(10)
+DevExtTramp(11)
+DevExtTramp(12)
+DevExtTramp(13)
+DevExtTramp(14)
+DevExtTramp(15)
+DevExtTramp(16)
+DevExtTramp(17)
+DevExtTramp(18)
+DevExtTramp(19)
+DevExtTramp(20)
+DevExtTramp(21)
+DevExtTramp(22)
+DevExtTramp(23)
+DevExtTramp(24)
+DevExtTramp(25)
+DevExtTramp(26)
+DevExtTramp(27)
+DevExtTramp(28)
+DevExtTramp(29)
+DevExtTramp(30)
+DevExtTramp(31)
+DevExtTramp(32)
+DevExtTramp(33)
+DevExtTramp(34)
+DevExtTramp(35)
+DevExtTramp(36)
+DevExtTramp(37)
+DevExtTramp(38)
+DevExtTramp(39)
+DevExtTramp(40)
+DevExtTramp(41)
+DevExtTramp(42)
+DevExtTramp(43)
+DevExtTramp(44)
+DevExtTramp(45)
+DevExtTramp(46)
+DevExtTramp(47)
+DevExtTramp(48)
+DevExtTramp(49)
+DevExtTramp(50)
+DevExtTramp(51)
+DevExtTramp(52)
+DevExtTramp(53)
+DevExtTramp(54)
+DevExtTramp(55)
+DevExtTramp(56)
+DevExtTramp(57)
+DevExtTramp(58)
+DevExtTramp(59)
+DevExtTramp(60)
+DevExtTramp(61)
+DevExtTramp(62)
+DevExtTramp(63)
+DevExtTramp(64)
+DevExtTramp(65)
+DevExtTramp(66)
+DevExtTramp(67)
+DevExtTramp(68)
+DevExtTramp(69)
+DevExtTramp(70)
+DevExtTramp(71)
+DevExtTramp(72)
+DevExtTramp(73)
+DevExtTramp(74)
+DevExtTramp(75)
+DevExtTramp(76)
+DevExtTramp(77)
+DevExtTramp(78)
+DevExtTramp(79)
+DevExtTramp(80)
+DevExtTramp(81)
+DevExtTramp(82)
+DevExtTramp(83)
+DevExtTramp(84)
+DevExtTramp(85)
+DevExtTramp(86)
+DevExtTramp(87)
+DevExtTramp(88)
+DevExtTramp(89)
+DevExtTramp(90)
+DevExtTramp(91)
+DevExtTramp(92)
+DevExtTramp(93)
+DevExtTramp(94)
+DevExtTramp(95)
+DevExtTramp(96)
+DevExtTramp(97)
+DevExtTramp(98)
+DevExtTramp(99)
+DevExtTramp(100)
+DevExtTramp(101)
+DevExtTramp(102)
+DevExtTramp(103)
+DevExtTramp(104)
+DevExtTramp(105)
+DevExtTramp(106)
+DevExtTramp(107)
+DevExtTramp(108)
+DevExtTramp(109)
+DevExtTramp(110)
+DevExtTramp(111)
+DevExtTramp(112)
+DevExtTramp(113)
+DevExtTramp(114)
+DevExtTramp(115)
+DevExtTramp(116)
+DevExtTramp(117)
+DevExtTramp(118)
+DevExtTramp(119)
+DevExtTramp(120)
+DevExtTramp(121)
+DevExtTramp(122)
+DevExtTramp(123)
+DevExtTramp(124)
+DevExtTramp(125)
+DevExtTramp(126)
+DevExtTramp(127)
+DevExtTramp(128)
+DevExtTramp(129)
+DevExtTramp(130)
+DevExtTramp(131)
+DevExtTramp(132)
+DevExtTramp(133)
+DevExtTramp(134)
+DevExtTramp(135)
+DevExtTramp(136)
+DevExtTramp(137)
+DevExtTramp(138)
+DevExtTramp(139)
+DevExtTramp(140)
+DevExtTramp(141)
+DevExtTramp(142)
+DevExtTramp(143)
+DevExtTramp(144)
+DevExtTramp(145)
+DevExtTramp(146)
+DevExtTramp(147)
+DevExtTramp(148)
+DevExtTramp(149)
+DevExtTramp(150)
+DevExtTramp(151)
+DevExtTramp(152)
+DevExtTramp(153)
+DevExtTramp(154)
+DevExtTramp(155)
+DevExtTramp(156)
+DevExtTramp(157)
+DevExtTramp(158)
+DevExtTramp(159)
+DevExtTramp(160)
+DevExtTramp(161)
+DevExtTramp(162)
+DevExtTramp(163)
+DevExtTramp(164)
+DevExtTramp(165)
+DevExtTramp(166)
+DevExtTramp(167)
+DevExtTramp(168)
+DevExtTramp(169)
+DevExtTramp(170)
+DevExtTramp(171)
+DevExtTramp(172)
+DevExtTramp(173)
+DevExtTramp(174)
+DevExtTramp(175)
+DevExtTramp(176)
+DevExtTramp(177)
+DevExtTramp(178)
+DevExtTramp(179)
+DevExtTramp(180)
+DevExtTramp(181)
+DevExtTramp(182)
+DevExtTramp(183)
+DevExtTramp(184)
+DevExtTramp(185)
+DevExtTramp(186)
+DevExtTramp(187)
+DevExtTramp(188)
+DevExtTramp(189)
+DevExtTramp(190)
+DevExtTramp(191)
+DevExtTramp(192)
+DevExtTramp(193)
+DevExtTramp(194)
+DevExtTramp(195)
+DevExtTramp(196)
+DevExtTramp(197)
+DevExtTramp(198)
+DevExtTramp(199)
+DevExtTramp(200)
+DevExtTramp(201)
+DevExtTramp(202)
+DevExtTramp(203)
+DevExtTramp(204)
+DevExtTramp(205)
+DevExtTramp(206)
+DevExtTramp(207)
+DevExtTramp(208)
+DevExtTramp(209)
+DevExtTramp(210)
+DevExtTramp(211)
+DevExtTramp(212)
+DevExtTramp(213)
+DevExtTramp(214)
+DevExtTramp(215)
+DevExtTramp(216)
+DevExtTramp(217)
+DevExtTramp(218)
+DevExtTramp(219)
+DevExtTramp(220)
+DevExtTramp(221)
+DevExtTramp(222)
+DevExtTramp(223)
+DevExtTramp(224)
+DevExtTramp(225)
+DevExtTramp(226)
+DevExtTramp(227)
+DevExtTramp(228)
+DevExtTramp(229)
+DevExtTramp(230)
+DevExtTramp(231)
+DevExtTramp(232)
+DevExtTramp(233)
+DevExtTramp(234)
+DevExtTramp(235)
+DevExtTramp(236)
+DevExtTramp(237)
+DevExtTramp(238)
+DevExtTramp(239)
+DevExtTramp(240)
+DevExtTramp(241)
+DevExtTramp(242)
+DevExtTramp(243)
+DevExtTramp(244)
+DevExtTramp(245)
+DevExtTramp(246)
+DevExtTramp(247)
+DevExtTramp(248)
+DevExtTramp(249)
diff --git a/src/third_party/vulkan-loader/src/loader/unknown_ext_chain_gas.S b/src/third_party/vulkan-loader/src/loader/unknown_ext_chain_gas.S
new file mode 100644
index 0000000..f847e14
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/loader/unknown_ext_chain_gas.S
@@ -0,0 +1,885 @@
+#
+# Copyright (c) 2017 The Khronos Group Inc.
+# Copyright (c) 2017 Valve Corporation
+# Copyright (c) 2017 LunarG, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Author: Lenny Komow <lenny@lunarg.com>
+#
+
+# This code is used to pass on device (including physical device) extensions through the call chain. It must do this without
+# creating a stack frame, because the actual parameters of the call are not known. Since the first parameter is known to be a
+# VkPhysicalDevice or a dispatchable object it can unwrap the object, possibly overwriting the wrapped physical device, and then
+# jump to the next function in the call chain
+
+#ifdef HAVE_CET_H
+#include <cet.h>
+#else
+#define _CET_ENDBR
+#endif
+
+.intel_syntax noprefix
+.include "gen_defines.asm"
+
+.ifdef X86_64
+
+.macro PhysDevExtTramp num
+.global vkPhysDevExtTramp\num
+vkPhysDevExtTramp\num:
+    _CET_ENDBR
+    mov     rax, [rdi]
+    mov     rdi, [rdi + PHYS_DEV_OFFSET_PHYS_DEV_TRAMP]
+    jmp     [rax + (PHYS_DEV_OFFSET_INST_DISPATCH + (PTR_SIZE * \num))]
+.endm
+
+.macro PhysDevExtTermin num
+.global vkPhysDevExtTermin\num
+vkPhysDevExtTermin\num:
+    _CET_ENDBR
+    mov     rax, [rdi + ICD_TERM_OFFSET_PHYS_DEV_TERM]                          # Store the loader_icd_term* in rax
+    cmp     qword ptr [rax + (DISPATCH_OFFSET_ICD_TERM + (PTR_SIZE * \num))], 0 # Check if the next function in the chain is NULL
+    je      terminError\num                                                     # Go to the error section if it is NULL
+    mov     rdi, [rdi + PHYS_DEV_OFFSET_PHYS_DEV_TERM]                          # Load the unwrapped VkPhysicalDevice into the first arg
+    jmp     [rax + (DISPATCH_OFFSET_ICD_TERM + (PTR_SIZE * \num))]              # Jump to the next function in the chain
+terminError\num:
+    sub     rsp, 56                                                             # Create the stack frame
+    mov     rdi, [rax + INSTANCE_OFFSET_ICD_TERM]                               # Load the loader_instance into rdi (first arg)
+    mov     r8, [rdi + (HASH_OFFSET_INSTANCE + (HASH_SIZE * \num) + FUNC_NAME_OFFSET_HASH)] # Load the func name into r8 (fifth arg)
+    lea     rcx, termin_error_string@GOTPCREL                                   # Load the error string into rcx (fourth arg)
+    xor     edx, edx                                                            # Set rdx to zero (third arg)
+    lea     esi, [rdx + VK_DEBUG_REPORT_ERROR_BIT_EXT]                          # Write the error logging bit to rsi (second arg)
+    call    loader_log                                                          # Log the error message before we crash
+    add     rsp, 56                                                             # Clean up the stack frame
+    mov     rax, 0
+    jmp     rax                                                                 # Crash intentionally by jumping to address zero
+.endm
+
+.macro DevExtTramp num
+.global vkdev_ext\num
+vkdev_ext\num:
+    _CET_ENDBR
+    mov     rax, [rdi]                                                          # Dereference the handle to get the dispatch table
+    jmp     [rax + (EXT_OFFSET_DEVICE_DISPATCH + (PTR_SIZE * \num))]            # Jump to the appropriate call chain
+.endm
+
+.else
+
+.macro PhysDevExtTramp num
+.global vkPhysDevExtTramp\num
+vkPhysDevExtTramp\num:
+    _CET_ENDBR
+    mov     eax, [esp + 4]                              # Load the wrapped VkPhysicalDevice into eax
+    mov     ecx, [eax + PHYS_DEV_OFFSET_PHYS_DEV_TRAMP] # Load the unwrapped VkPhysicalDevice into ecx
+    mov     [esp + 4], ecx                              # Overwrite the wrapped VkPhysicalDevice with the unwrapped one (on the stack)
+    mov     eax, [eax]                                  # Dereference the wrapped VkPhysicalDevice to get the dispatch table in eax
+    jmp     [eax + (PHYS_DEV_OFFSET_INST_DISPATCH + (PTR_SIZE * \num))] # Dereference the wrapped VkPhysicalDevice to get the dispatch table in eax
+.endm
+
+.macro PhysDevExtTermin num
+.global vkPhysDevExtTermin\num
+vkPhysDevExtTermin\num:
+    _CET_ENDBR
+    mov     ecx, [esp + 4]                                                      # Move the wrapped VkPhysicalDevice into ecx
+    mov     eax, [ecx + ICD_TERM_OFFSET_PHYS_DEV_TERM]                          # Store the loader_icd_term* in eax
+    cmp     dword ptr [eax + (DISPATCH_OFFSET_ICD_TERM + (PTR_SIZE * \num))], 0 # Check if the next function in the chain is NULL
+    je      terminError\num                                                     # Go to the error section if it is NULL
+    mov     ecx, [ecx + PHYS_DEV_OFFSET_PHYS_DEV_TERM]                          # Unwrap the VkPhysicalDevice in ecx
+    mov     [esp + 4], ecx                                                      # Copy the unwrapped VkPhysicalDevice into the first arg
+    jmp     [eax + (DISPATCH_OFFSET_ICD_TERM + (PTR_SIZE * \num))]              # Jump to the next function in the chain
+terminError\num:
+    mov     eax, [eax + INSTANCE_OFFSET_ICD_TERM]                               # Load the loader_instance into eax
+    push    [eax + (HASH_OFFSET_INSTANCE + (HASH_SIZE * \num) + FUNC_NAME_OFFSET_HASH)] # Push the func name (fifth arg)
+    push    offset termin_error_string@GOT                                      # Push the error string (fourth arg)
+    push    0                                                                   # Push zero (third arg)
+    push    VK_DEBUG_REPORT_ERROR_BIT_EXT                                       # Push the error logging bit (second arg)
+    push    eax                                                                 # Push the loader_instance (first arg)
+    call    loader_log                                                          # Log the error message before we crash
+    add     esp, 20                                                             # Clean up the args
+    mov     eax, 0
+    jmp     eax                                                                 # Crash intentionally by jumping to address zero
+.endm
+
+.macro DevExtTramp num
+.global vkdev_ext\num
+vkdev_ext\num:
+    _CET_ENDBR
+    mov     eax, [esp + 4]                                                      # Dereference the handle to get the dispatch table
+    jmp     [eax + (EXT_OFFSET_DEVICE_DISPATCH + (PTR_SIZE * \num))]            # Jump to the appropriate call chain
+.endm
+
+.endif
+
+#if defined(__ELF__)
+.section .note.GNU-stack,"",%progbits
+#endif
+
+.data
+
+termin_error_string:
+.string "Extension %s not supported for this physical device"
+
+.text
+
+    PhysDevExtTramp 0
+    PhysDevExtTramp 1
+    PhysDevExtTramp 2
+    PhysDevExtTramp 3
+    PhysDevExtTramp 4
+    PhysDevExtTramp 5
+    PhysDevExtTramp 6
+    PhysDevExtTramp 7
+    PhysDevExtTramp 8
+    PhysDevExtTramp 9
+    PhysDevExtTramp 10
+    PhysDevExtTramp 11
+    PhysDevExtTramp 12
+    PhysDevExtTramp 13
+    PhysDevExtTramp 14
+    PhysDevExtTramp 15
+    PhysDevExtTramp 16
+    PhysDevExtTramp 17
+    PhysDevExtTramp 18
+    PhysDevExtTramp 19
+    PhysDevExtTramp 20
+    PhysDevExtTramp 21
+    PhysDevExtTramp 22
+    PhysDevExtTramp 23
+    PhysDevExtTramp 24
+    PhysDevExtTramp 25
+    PhysDevExtTramp 26
+    PhysDevExtTramp 27
+    PhysDevExtTramp 28
+    PhysDevExtTramp 29
+    PhysDevExtTramp 30
+    PhysDevExtTramp 31
+    PhysDevExtTramp 32
+    PhysDevExtTramp 33
+    PhysDevExtTramp 34
+    PhysDevExtTramp 35
+    PhysDevExtTramp 36
+    PhysDevExtTramp 37
+    PhysDevExtTramp 38
+    PhysDevExtTramp 39
+    PhysDevExtTramp 40
+    PhysDevExtTramp 41
+    PhysDevExtTramp 42
+    PhysDevExtTramp 43
+    PhysDevExtTramp 44
+    PhysDevExtTramp 45
+    PhysDevExtTramp 46
+    PhysDevExtTramp 47
+    PhysDevExtTramp 48
+    PhysDevExtTramp 49
+    PhysDevExtTramp 50
+    PhysDevExtTramp 51
+    PhysDevExtTramp 52
+    PhysDevExtTramp 53
+    PhysDevExtTramp 54
+    PhysDevExtTramp 55
+    PhysDevExtTramp 56
+    PhysDevExtTramp 57
+    PhysDevExtTramp 58
+    PhysDevExtTramp 59
+    PhysDevExtTramp 60
+    PhysDevExtTramp 61
+    PhysDevExtTramp 62
+    PhysDevExtTramp 63
+    PhysDevExtTramp 64
+    PhysDevExtTramp 65
+    PhysDevExtTramp 66
+    PhysDevExtTramp 67
+    PhysDevExtTramp 68
+    PhysDevExtTramp 69
+    PhysDevExtTramp 70
+    PhysDevExtTramp 71
+    PhysDevExtTramp 72
+    PhysDevExtTramp 73
+    PhysDevExtTramp 74
+    PhysDevExtTramp 75
+    PhysDevExtTramp 76
+    PhysDevExtTramp 77
+    PhysDevExtTramp 78
+    PhysDevExtTramp 79
+    PhysDevExtTramp 80
+    PhysDevExtTramp 81
+    PhysDevExtTramp 82
+    PhysDevExtTramp 83
+    PhysDevExtTramp 84
+    PhysDevExtTramp 85
+    PhysDevExtTramp 86
+    PhysDevExtTramp 87
+    PhysDevExtTramp 88
+    PhysDevExtTramp 89
+    PhysDevExtTramp 90
+    PhysDevExtTramp 91
+    PhysDevExtTramp 92
+    PhysDevExtTramp 93
+    PhysDevExtTramp 94
+    PhysDevExtTramp 95
+    PhysDevExtTramp 96
+    PhysDevExtTramp 97
+    PhysDevExtTramp 98
+    PhysDevExtTramp 99
+    PhysDevExtTramp 100
+    PhysDevExtTramp 101
+    PhysDevExtTramp 102
+    PhysDevExtTramp 103
+    PhysDevExtTramp 104
+    PhysDevExtTramp 105
+    PhysDevExtTramp 106
+    PhysDevExtTramp 107
+    PhysDevExtTramp 108
+    PhysDevExtTramp 109
+    PhysDevExtTramp 110
+    PhysDevExtTramp 111
+    PhysDevExtTramp 112
+    PhysDevExtTramp 113
+    PhysDevExtTramp 114
+    PhysDevExtTramp 115
+    PhysDevExtTramp 116
+    PhysDevExtTramp 117
+    PhysDevExtTramp 118
+    PhysDevExtTramp 119
+    PhysDevExtTramp 120
+    PhysDevExtTramp 121
+    PhysDevExtTramp 122
+    PhysDevExtTramp 123
+    PhysDevExtTramp 124
+    PhysDevExtTramp 125
+    PhysDevExtTramp 126
+    PhysDevExtTramp 127
+    PhysDevExtTramp 128
+    PhysDevExtTramp 129
+    PhysDevExtTramp 130
+    PhysDevExtTramp 131
+    PhysDevExtTramp 132
+    PhysDevExtTramp 133
+    PhysDevExtTramp 134
+    PhysDevExtTramp 135
+    PhysDevExtTramp 136
+    PhysDevExtTramp 137
+    PhysDevExtTramp 138
+    PhysDevExtTramp 139
+    PhysDevExtTramp 140
+    PhysDevExtTramp 141
+    PhysDevExtTramp 142
+    PhysDevExtTramp 143
+    PhysDevExtTramp 144
+    PhysDevExtTramp 145
+    PhysDevExtTramp 146
+    PhysDevExtTramp 147
+    PhysDevExtTramp 148
+    PhysDevExtTramp 149
+    PhysDevExtTramp 150
+    PhysDevExtTramp 151
+    PhysDevExtTramp 152
+    PhysDevExtTramp 153
+    PhysDevExtTramp 154
+    PhysDevExtTramp 155
+    PhysDevExtTramp 156
+    PhysDevExtTramp 157
+    PhysDevExtTramp 158
+    PhysDevExtTramp 159
+    PhysDevExtTramp 160
+    PhysDevExtTramp 161
+    PhysDevExtTramp 162
+    PhysDevExtTramp 163
+    PhysDevExtTramp 164
+    PhysDevExtTramp 165
+    PhysDevExtTramp 166
+    PhysDevExtTramp 167
+    PhysDevExtTramp 168
+    PhysDevExtTramp 169
+    PhysDevExtTramp 170
+    PhysDevExtTramp 171
+    PhysDevExtTramp 172
+    PhysDevExtTramp 173
+    PhysDevExtTramp 174
+    PhysDevExtTramp 175
+    PhysDevExtTramp 176
+    PhysDevExtTramp 177
+    PhysDevExtTramp 178
+    PhysDevExtTramp 179
+    PhysDevExtTramp 180
+    PhysDevExtTramp 181
+    PhysDevExtTramp 182
+    PhysDevExtTramp 183
+    PhysDevExtTramp 184
+    PhysDevExtTramp 185
+    PhysDevExtTramp 186
+    PhysDevExtTramp 187
+    PhysDevExtTramp 188
+    PhysDevExtTramp 189
+    PhysDevExtTramp 190
+    PhysDevExtTramp 191
+    PhysDevExtTramp 192
+    PhysDevExtTramp 193
+    PhysDevExtTramp 194
+    PhysDevExtTramp 195
+    PhysDevExtTramp 196
+    PhysDevExtTramp 197
+    PhysDevExtTramp 198
+    PhysDevExtTramp 199
+    PhysDevExtTramp 200
+    PhysDevExtTramp 201
+    PhysDevExtTramp 202
+    PhysDevExtTramp 203
+    PhysDevExtTramp 204
+    PhysDevExtTramp 205
+    PhysDevExtTramp 206
+    PhysDevExtTramp 207
+    PhysDevExtTramp 208
+    PhysDevExtTramp 209
+    PhysDevExtTramp 210
+    PhysDevExtTramp 211
+    PhysDevExtTramp 212
+    PhysDevExtTramp 213
+    PhysDevExtTramp 214
+    PhysDevExtTramp 215
+    PhysDevExtTramp 216
+    PhysDevExtTramp 217
+    PhysDevExtTramp 218
+    PhysDevExtTramp 219
+    PhysDevExtTramp 220
+    PhysDevExtTramp 221
+    PhysDevExtTramp 222
+    PhysDevExtTramp 223
+    PhysDevExtTramp 224
+    PhysDevExtTramp 225
+    PhysDevExtTramp 226
+    PhysDevExtTramp 227
+    PhysDevExtTramp 228
+    PhysDevExtTramp 229
+    PhysDevExtTramp 230
+    PhysDevExtTramp 231
+    PhysDevExtTramp 232
+    PhysDevExtTramp 233
+    PhysDevExtTramp 234
+    PhysDevExtTramp 235
+    PhysDevExtTramp 236
+    PhysDevExtTramp 237
+    PhysDevExtTramp 238
+    PhysDevExtTramp 239
+    PhysDevExtTramp 240
+    PhysDevExtTramp 241
+    PhysDevExtTramp 242
+    PhysDevExtTramp 243
+    PhysDevExtTramp 244
+    PhysDevExtTramp 245
+    PhysDevExtTramp 246
+    PhysDevExtTramp 247
+    PhysDevExtTramp 248
+    PhysDevExtTramp 249
+
+    PhysDevExtTermin 0
+    PhysDevExtTermin 1
+    PhysDevExtTermin 2
+    PhysDevExtTermin 3
+    PhysDevExtTermin 4
+    PhysDevExtTermin 5
+    PhysDevExtTermin 6
+    PhysDevExtTermin 7
+    PhysDevExtTermin 8
+    PhysDevExtTermin 9
+    PhysDevExtTermin 10
+    PhysDevExtTermin 11
+    PhysDevExtTermin 12
+    PhysDevExtTermin 13
+    PhysDevExtTermin 14
+    PhysDevExtTermin 15
+    PhysDevExtTermin 16
+    PhysDevExtTermin 17
+    PhysDevExtTermin 18
+    PhysDevExtTermin 19
+    PhysDevExtTermin 20
+    PhysDevExtTermin 21
+    PhysDevExtTermin 22
+    PhysDevExtTermin 23
+    PhysDevExtTermin 24
+    PhysDevExtTermin 25
+    PhysDevExtTermin 26
+    PhysDevExtTermin 27
+    PhysDevExtTermin 28
+    PhysDevExtTermin 29
+    PhysDevExtTermin 30
+    PhysDevExtTermin 31
+    PhysDevExtTermin 32
+    PhysDevExtTermin 33
+    PhysDevExtTermin 34
+    PhysDevExtTermin 35
+    PhysDevExtTermin 36
+    PhysDevExtTermin 37
+    PhysDevExtTermin 38
+    PhysDevExtTermin 39
+    PhysDevExtTermin 40
+    PhysDevExtTermin 41
+    PhysDevExtTermin 42
+    PhysDevExtTermin 43
+    PhysDevExtTermin 44
+    PhysDevExtTermin 45
+    PhysDevExtTermin 46
+    PhysDevExtTermin 47
+    PhysDevExtTermin 48
+    PhysDevExtTermin 49
+    PhysDevExtTermin 50
+    PhysDevExtTermin 51
+    PhysDevExtTermin 52
+    PhysDevExtTermin 53
+    PhysDevExtTermin 54
+    PhysDevExtTermin 55
+    PhysDevExtTermin 56
+    PhysDevExtTermin 57
+    PhysDevExtTermin 58
+    PhysDevExtTermin 59
+    PhysDevExtTermin 60
+    PhysDevExtTermin 61
+    PhysDevExtTermin 62
+    PhysDevExtTermin 63
+    PhysDevExtTermin 64
+    PhysDevExtTermin 65
+    PhysDevExtTermin 66
+    PhysDevExtTermin 67
+    PhysDevExtTermin 68
+    PhysDevExtTermin 69
+    PhysDevExtTermin 70
+    PhysDevExtTermin 71
+    PhysDevExtTermin 72
+    PhysDevExtTermin 73
+    PhysDevExtTermin 74
+    PhysDevExtTermin 75
+    PhysDevExtTermin 76
+    PhysDevExtTermin 77
+    PhysDevExtTermin 78
+    PhysDevExtTermin 79
+    PhysDevExtTermin 80
+    PhysDevExtTermin 81
+    PhysDevExtTermin 82
+    PhysDevExtTermin 83
+    PhysDevExtTermin 84
+    PhysDevExtTermin 85
+    PhysDevExtTermin 86
+    PhysDevExtTermin 87
+    PhysDevExtTermin 88
+    PhysDevExtTermin 89
+    PhysDevExtTermin 90
+    PhysDevExtTermin 91
+    PhysDevExtTermin 92
+    PhysDevExtTermin 93
+    PhysDevExtTermin 94
+    PhysDevExtTermin 95
+    PhysDevExtTermin 96
+    PhysDevExtTermin 97
+    PhysDevExtTermin 98
+    PhysDevExtTermin 99
+    PhysDevExtTermin 100
+    PhysDevExtTermin 101
+    PhysDevExtTermin 102
+    PhysDevExtTermin 103
+    PhysDevExtTermin 104
+    PhysDevExtTermin 105
+    PhysDevExtTermin 106
+    PhysDevExtTermin 107
+    PhysDevExtTermin 108
+    PhysDevExtTermin 109
+    PhysDevExtTermin 110
+    PhysDevExtTermin 111
+    PhysDevExtTermin 112
+    PhysDevExtTermin 113
+    PhysDevExtTermin 114
+    PhysDevExtTermin 115
+    PhysDevExtTermin 116
+    PhysDevExtTermin 117
+    PhysDevExtTermin 118
+    PhysDevExtTermin 119
+    PhysDevExtTermin 120
+    PhysDevExtTermin 121
+    PhysDevExtTermin 122
+    PhysDevExtTermin 123
+    PhysDevExtTermin 124
+    PhysDevExtTermin 125
+    PhysDevExtTermin 126
+    PhysDevExtTermin 127
+    PhysDevExtTermin 128
+    PhysDevExtTermin 129
+    PhysDevExtTermin 130
+    PhysDevExtTermin 131
+    PhysDevExtTermin 132
+    PhysDevExtTermin 133
+    PhysDevExtTermin 134
+    PhysDevExtTermin 135
+    PhysDevExtTermin 136
+    PhysDevExtTermin 137
+    PhysDevExtTermin 138
+    PhysDevExtTermin 139
+    PhysDevExtTermin 140
+    PhysDevExtTermin 141
+    PhysDevExtTermin 142
+    PhysDevExtTermin 143
+    PhysDevExtTermin 144
+    PhysDevExtTermin 145
+    PhysDevExtTermin 146
+    PhysDevExtTermin 147
+    PhysDevExtTermin 148
+    PhysDevExtTermin 149
+    PhysDevExtTermin 150
+    PhysDevExtTermin 151
+    PhysDevExtTermin 152
+    PhysDevExtTermin 153
+    PhysDevExtTermin 154
+    PhysDevExtTermin 155
+    PhysDevExtTermin 156
+    PhysDevExtTermin 157
+    PhysDevExtTermin 158
+    PhysDevExtTermin 159
+    PhysDevExtTermin 160
+    PhysDevExtTermin 161
+    PhysDevExtTermin 162
+    PhysDevExtTermin 163
+    PhysDevExtTermin 164
+    PhysDevExtTermin 165
+    PhysDevExtTermin 166
+    PhysDevExtTermin 167
+    PhysDevExtTermin 168
+    PhysDevExtTermin 169
+    PhysDevExtTermin 170
+    PhysDevExtTermin 171
+    PhysDevExtTermin 172
+    PhysDevExtTermin 173
+    PhysDevExtTermin 174
+    PhysDevExtTermin 175
+    PhysDevExtTermin 176
+    PhysDevExtTermin 177
+    PhysDevExtTermin 178
+    PhysDevExtTermin 179
+    PhysDevExtTermin 180
+    PhysDevExtTermin 181
+    PhysDevExtTermin 182
+    PhysDevExtTermin 183
+    PhysDevExtTermin 184
+    PhysDevExtTermin 185
+    PhysDevExtTermin 186
+    PhysDevExtTermin 187
+    PhysDevExtTermin 188
+    PhysDevExtTermin 189
+    PhysDevExtTermin 190
+    PhysDevExtTermin 191
+    PhysDevExtTermin 192
+    PhysDevExtTermin 193
+    PhysDevExtTermin 194
+    PhysDevExtTermin 195
+    PhysDevExtTermin 196
+    PhysDevExtTermin 197
+    PhysDevExtTermin 198
+    PhysDevExtTermin 199
+    PhysDevExtTermin 200
+    PhysDevExtTermin 201
+    PhysDevExtTermin 202
+    PhysDevExtTermin 203
+    PhysDevExtTermin 204
+    PhysDevExtTermin 205
+    PhysDevExtTermin 206
+    PhysDevExtTermin 207
+    PhysDevExtTermin 208
+    PhysDevExtTermin 209
+    PhysDevExtTermin 210
+    PhysDevExtTermin 211
+    PhysDevExtTermin 212
+    PhysDevExtTermin 213
+    PhysDevExtTermin 214
+    PhysDevExtTermin 215
+    PhysDevExtTermin 216
+    PhysDevExtTermin 217
+    PhysDevExtTermin 218
+    PhysDevExtTermin 219
+    PhysDevExtTermin 220
+    PhysDevExtTermin 221
+    PhysDevExtTermin 222
+    PhysDevExtTermin 223
+    PhysDevExtTermin 224
+    PhysDevExtTermin 225
+    PhysDevExtTermin 226
+    PhysDevExtTermin 227
+    PhysDevExtTermin 228
+    PhysDevExtTermin 229
+    PhysDevExtTermin 230
+    PhysDevExtTermin 231
+    PhysDevExtTermin 232
+    PhysDevExtTermin 233
+    PhysDevExtTermin 234
+    PhysDevExtTermin 235
+    PhysDevExtTermin 236
+    PhysDevExtTermin 237
+    PhysDevExtTermin 238
+    PhysDevExtTermin 239
+    PhysDevExtTermin 240
+    PhysDevExtTermin 241
+    PhysDevExtTermin 242
+    PhysDevExtTermin 243
+    PhysDevExtTermin 244
+    PhysDevExtTermin 245
+    PhysDevExtTermin 246
+    PhysDevExtTermin 247
+    PhysDevExtTermin 248
+    PhysDevExtTermin 249
+
+    DevExtTramp 0
+    DevExtTramp 1
+    DevExtTramp 2
+    DevExtTramp 3
+    DevExtTramp 4
+    DevExtTramp 5
+    DevExtTramp 6
+    DevExtTramp 7
+    DevExtTramp 8
+    DevExtTramp 9
+    DevExtTramp 10
+    DevExtTramp 11
+    DevExtTramp 12
+    DevExtTramp 13
+    DevExtTramp 14
+    DevExtTramp 15
+    DevExtTramp 16
+    DevExtTramp 17
+    DevExtTramp 18
+    DevExtTramp 19
+    DevExtTramp 20
+    DevExtTramp 21
+    DevExtTramp 22
+    DevExtTramp 23
+    DevExtTramp 24
+    DevExtTramp 25
+    DevExtTramp 26
+    DevExtTramp 27
+    DevExtTramp 28
+    DevExtTramp 29
+    DevExtTramp 30
+    DevExtTramp 31
+    DevExtTramp 32
+    DevExtTramp 33
+    DevExtTramp 34
+    DevExtTramp 35
+    DevExtTramp 36
+    DevExtTramp 37
+    DevExtTramp 38
+    DevExtTramp 39
+    DevExtTramp 40
+    DevExtTramp 41
+    DevExtTramp 42
+    DevExtTramp 43
+    DevExtTramp 44
+    DevExtTramp 45
+    DevExtTramp 46
+    DevExtTramp 47
+    DevExtTramp 48
+    DevExtTramp 49
+    DevExtTramp 50
+    DevExtTramp 51
+    DevExtTramp 52
+    DevExtTramp 53
+    DevExtTramp 54
+    DevExtTramp 55
+    DevExtTramp 56
+    DevExtTramp 57
+    DevExtTramp 58
+    DevExtTramp 59
+    DevExtTramp 60
+    DevExtTramp 61
+    DevExtTramp 62
+    DevExtTramp 63
+    DevExtTramp 64
+    DevExtTramp 65
+    DevExtTramp 66
+    DevExtTramp 67
+    DevExtTramp 68
+    DevExtTramp 69
+    DevExtTramp 70
+    DevExtTramp 71
+    DevExtTramp 72
+    DevExtTramp 73
+    DevExtTramp 74
+    DevExtTramp 75
+    DevExtTramp 76
+    DevExtTramp 77
+    DevExtTramp 78
+    DevExtTramp 79
+    DevExtTramp 80
+    DevExtTramp 81
+    DevExtTramp 82
+    DevExtTramp 83
+    DevExtTramp 84
+    DevExtTramp 85
+    DevExtTramp 86
+    DevExtTramp 87
+    DevExtTramp 88
+    DevExtTramp 89
+    DevExtTramp 90
+    DevExtTramp 91
+    DevExtTramp 92
+    DevExtTramp 93
+    DevExtTramp 94
+    DevExtTramp 95
+    DevExtTramp 96
+    DevExtTramp 97
+    DevExtTramp 98
+    DevExtTramp 99
+    DevExtTramp 100
+    DevExtTramp 101
+    DevExtTramp 102
+    DevExtTramp 103
+    DevExtTramp 104
+    DevExtTramp 105
+    DevExtTramp 106
+    DevExtTramp 107
+    DevExtTramp 108
+    DevExtTramp 109
+    DevExtTramp 110
+    DevExtTramp 111
+    DevExtTramp 112
+    DevExtTramp 113
+    DevExtTramp 114
+    DevExtTramp 115
+    DevExtTramp 116
+    DevExtTramp 117
+    DevExtTramp 118
+    DevExtTramp 119
+    DevExtTramp 120
+    DevExtTramp 121
+    DevExtTramp 122
+    DevExtTramp 123
+    DevExtTramp 124
+    DevExtTramp 125
+    DevExtTramp 126
+    DevExtTramp 127
+    DevExtTramp 128
+    DevExtTramp 129
+    DevExtTramp 130
+    DevExtTramp 131
+    DevExtTramp 132
+    DevExtTramp 133
+    DevExtTramp 134
+    DevExtTramp 135
+    DevExtTramp 136
+    DevExtTramp 137
+    DevExtTramp 138
+    DevExtTramp 139
+    DevExtTramp 140
+    DevExtTramp 141
+    DevExtTramp 142
+    DevExtTramp 143
+    DevExtTramp 144
+    DevExtTramp 145
+    DevExtTramp 146
+    DevExtTramp 147
+    DevExtTramp 148
+    DevExtTramp 149
+    DevExtTramp 150
+    DevExtTramp 151
+    DevExtTramp 152
+    DevExtTramp 153
+    DevExtTramp 154
+    DevExtTramp 155
+    DevExtTramp 156
+    DevExtTramp 157
+    DevExtTramp 158
+    DevExtTramp 159
+    DevExtTramp 160
+    DevExtTramp 161
+    DevExtTramp 162
+    DevExtTramp 163
+    DevExtTramp 164
+    DevExtTramp 165
+    DevExtTramp 166
+    DevExtTramp 167
+    DevExtTramp 168
+    DevExtTramp 169
+    DevExtTramp 170
+    DevExtTramp 171
+    DevExtTramp 172
+    DevExtTramp 173
+    DevExtTramp 174
+    DevExtTramp 175
+    DevExtTramp 176
+    DevExtTramp 177
+    DevExtTramp 178
+    DevExtTramp 179
+    DevExtTramp 180
+    DevExtTramp 181
+    DevExtTramp 182
+    DevExtTramp 183
+    DevExtTramp 184
+    DevExtTramp 185
+    DevExtTramp 186
+    DevExtTramp 187
+    DevExtTramp 188
+    DevExtTramp 189
+    DevExtTramp 190
+    DevExtTramp 191
+    DevExtTramp 192
+    DevExtTramp 193
+    DevExtTramp 194
+    DevExtTramp 195
+    DevExtTramp 196
+    DevExtTramp 197
+    DevExtTramp 198
+    DevExtTramp 199
+    DevExtTramp 200
+    DevExtTramp 201
+    DevExtTramp 202
+    DevExtTramp 203
+    DevExtTramp 204
+    DevExtTramp 205
+    DevExtTramp 206
+    DevExtTramp 207
+    DevExtTramp 208
+    DevExtTramp 209
+    DevExtTramp 210
+    DevExtTramp 211
+    DevExtTramp 212
+    DevExtTramp 213
+    DevExtTramp 214
+    DevExtTramp 215
+    DevExtTramp 216
+    DevExtTramp 217
+    DevExtTramp 218
+    DevExtTramp 219
+    DevExtTramp 220
+    DevExtTramp 221
+    DevExtTramp 222
+    DevExtTramp 223
+    DevExtTramp 224
+    DevExtTramp 225
+    DevExtTramp 226
+    DevExtTramp 227
+    DevExtTramp 228
+    DevExtTramp 229
+    DevExtTramp 230
+    DevExtTramp 231
+    DevExtTramp 232
+    DevExtTramp 233
+    DevExtTramp 234
+    DevExtTramp 235
+    DevExtTramp 236
+    DevExtTramp 237
+    DevExtTramp 238
+    DevExtTramp 239
+    DevExtTramp 240
+    DevExtTramp 241
+    DevExtTramp 242
+    DevExtTramp 243
+    DevExtTramp 244
+    DevExtTramp 245
+    DevExtTramp 246
+    DevExtTramp 247
+    DevExtTramp 248
+    DevExtTramp 249
diff --git a/src/third_party/vulkan-loader/src/loader/unknown_ext_chain_masm.asm b/src/third_party/vulkan-loader/src/loader/unknown_ext_chain_masm.asm
new file mode 100644
index 0000000..34bc7c2
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/loader/unknown_ext_chain_masm.asm
@@ -0,0 +1,883 @@
+;
+; Copyright (c) 2017 The Khronos Group Inc.
+; Copyright (c) 2017 Valve Corporation
+; Copyright (c) 2017 LunarG, Inc.
+;
+; Licensed under the Apache License, Version 2.0 (the "License");
+; you may not use this file except in compliance with the License.
+; You may obtain a copy of the License at
+;
+;     http://www.apache.org/licenses/LICENSE-2.0
+;
+; Unless required by applicable law or agreed to in writing, software
+; distributed under the License is distributed on an "AS IS" BASIS,
+; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+; See the License for the specific language governing permissions and
+; limitations under the License.
+;
+; Author: Lenny Komow <lenny@lunarg.com>
+;
+
+; This code is used to pass on device (including physical device) extensions through the call chain. It must do this without
+; creating a stack frame, because the actual parameters of the call are not known. Since the first parameter is known to be a
+; VkPhysicalDevice or a dispatchable object it can unwrap the object, possibly overwriting the wrapped physical device, and then
+; jump to the next function in the call chain
+
+; Codegen defines a number of values, chiefly offsets of members within structs and sizes of data types within gen_defines.asm.
+; Struct member offsets are defined in the format "XX_OFFSET_YY" where XX indicates the member within the struct and YY indicates
+; the struct type that it is a member of. Data type sizes are defined in the format "XX_SIZE" where XX indicates the data type.
+INCLUDE gen_defines.asm
+
+; 64-bit values and macro
+IFDEF rax
+
+PhysDevExtTramp macro num:req
+public vkPhysDevExtTramp&num&
+vkPhysDevExtTramp&num&:
+    mov     rax, qword ptr [rcx]                            ; Dereference the wrapped VkPhysicalDevice to get the dispatch table in rax
+    mov     rcx, qword ptr [rcx + PHYS_DEV_OFFSET_PHYS_DEV_TRAMP]   ; Load the unwrapped VkPhysicalDevice into rcx
+    jmp     qword ptr [rax + (PHYS_DEV_OFFSET_INST_DISPATCH + (PTR_SIZE * num))] ; Jump to the next function in the chain, preserving the args in other registers
+endm
+
+PhysDevExtTermin macro num
+public vkPhysDevExtTermin&num&
+vkPhysDevExtTermin&num&:
+    mov     rax, qword ptr [rcx + ICD_TERM_OFFSET_PHYS_DEV_TERM]                ; Store the loader_icd_term* in rax
+    cmp     qword ptr [rax + (DISPATCH_OFFSET_ICD_TERM + (PTR_SIZE * num))], 0  ; Check if the next function in the chain is NULL
+    je      terminError&num&                                                    ; Go to the error section if it is NULL
+    mov     rcx, qword ptr [rcx + PHYS_DEV_OFFSET_PHYS_DEV_TERM]                ; Load the unwrapped VkPhysicalDevice into the first arg
+    jmp     qword ptr [rax + (DISPATCH_OFFSET_ICD_TERM + (PTR_SIZE * num))]     ; Jump to the next function in the chain
+terminError&num&:
+    sub     rsp, 56                                                             ; Create the stack frame
+    mov     rcx, qword ptr [rax + INSTANCE_OFFSET_ICD_TERM]                     ; Load the loader_instance into rcx (first arg)
+    mov     rax, qword ptr [rcx + (HASH_OFFSET_INSTANCE + (HASH_SIZE * num) + FUNC_NAME_OFFSET_HASH)] ; Load the func name into rax
+    lea     r9, termin_error_string                                             ; Load the error string into r9 (fourth arg)
+    xor     r8d, r8d                                                            ; Set r8 to zero (third arg)
+    mov     qword ptr [rsp + 32], rax                                           ; Move the func name onto the stack (fifth arg)
+    lea     edx, [r8 + VK_DEBUG_REPORT_ERROR_BIT_EXT]                           ; Write the error logging bit to rdx (second arg)
+    call    loader_log                                                          ; Log the error message before we crash
+    add     rsp, 56                                                             ; Clean up the stack frame
+    mov     rax, 0
+    jmp     rax                                                                 ; Crash intentionally by jumping to address zero
+endm
+
+DevExtTramp macro num
+public vkdev_ext&num&
+vkdev_ext&num&:
+    mov     rax, qword ptr [rcx]                                               ; Dereference the handle to get the dispatch table
+    jmp     qword ptr [rax + (EXT_OFFSET_DEVICE_DISPATCH + (PTR_SIZE * num))]  ; Jump to the appropriate call chain
+endm
+
+; 32-bit values and macro
+ELSE
+
+PhysDevExtTramp macro num
+public _vkPhysDevExtTramp&num&@4
+_vkPhysDevExtTramp&num&@4:
+    mov     eax, dword ptr [esp + 4]                        ; Load the wrapped VkPhysicalDevice into eax
+    mov     ecx, [eax + PHYS_DEV_OFFSET_PHYS_DEV_TRAMP]     ; Load the unwrapped VkPhysicalDevice into ecx
+    mov     [esp + 4], ecx                                  ; Overwrite the wrapped VkPhysicalDevice with the unwrapped one (on the stack)
+    mov     eax, [eax]                                      ; Dereference the wrapped VkPhysicalDevice to get the dispatch table in eax
+    jmp     dword ptr [eax + (PHYS_DEV_OFFSET_INST_DISPATCH + (PTR_SIZE * num))] ; Jump to the next function in the chain, preserving the args on the stack
+endm
+
+PhysDevExtTermin macro num
+public _vkPhysDevExtTermin&num&@4
+_vkPhysDevExtTermin&num&@4:
+    mov     ecx, dword ptr [esp + 4]                                            ; Move the wrapped VkPhysicalDevice into ecx
+    mov     eax, dword ptr [ecx + ICD_TERM_OFFSET_PHYS_DEV_TERM]                ; Store the loader_icd_term* in eax
+    cmp     dword ptr [eax + (DISPATCH_OFFSET_ICD_TERM + (PTR_SIZE * num))], 0  ; Check if the next function in the chain is NULL
+    je      terminError&num&                                                    ; Go to the error section if it is NULL
+    mov     ecx, dword ptr [ecx + PHYS_DEV_OFFSET_PHYS_DEV_TERM]                ; Unwrap the VkPhysicalDevice in ecx
+    mov     dword ptr [esp + 4], ecx                                            ; Copy the unwrapped VkPhysicalDevice into the first arg
+    jmp     dword ptr [eax + (DISPATCH_OFFSET_ICD_TERM + (PTR_SIZE * num))]     ; Jump to the next function in the chain
+terminError&num&:
+    mov     eax, dword ptr [eax + INSTANCE_OFFSET_ICD_TERM]                     ; Load the loader_instance into eax
+    push    dword ptr [eax + (HASH_OFFSET_INSTANCE + (HASH_SIZE * num) + FUNC_NAME_OFFSET_HASH)] ; Push the func name (fifth arg)
+    push    offset termin_error_string                                          ; Push the error string (fourth arg)
+    push    0                                                                   ; Push zero (third arg)
+    push    VK_DEBUG_REPORT_ERROR_BIT_EXT                                       ; Push the error logging bit (second arg)
+    push    eax                                                                 ; Push the loader_instance (first arg)
+    call    _loader_log                                                         ; Log the error message before we crash
+    add     esp, 20                                                             ; Clean up the args
+    mov     eax, 0
+    jmp     eax                                                                 ; Crash intentionally by jumping to address zero
+endm
+
+DevExtTramp macro num
+public _vkdev_ext&num&@4
+_vkdev_ext&num&@4:
+    mov     eax, dword ptr [esp + 4]                                           ; Dereference the handle to get the dispatch table
+    jmp     dword ptr [eax + (EXT_OFFSET_DEVICE_DISPATCH + (PTR_SIZE * num))]  ; Jump to the appropriate call chain
+endm
+
+; This is also needed for 32-bit only
+.model flat
+
+ENDIF
+
+.const
+    termin_error_string db 'Extension %s not supported for this physical device', 0
+
+.code
+
+IFDEF rax
+extrn loader_log:near
+ELSE
+extrn _loader_log:near
+ENDIF
+
+    PhysDevExtTramp 0
+    PhysDevExtTramp 1
+    PhysDevExtTramp 2
+    PhysDevExtTramp 3
+    PhysDevExtTramp 4
+    PhysDevExtTramp 5
+    PhysDevExtTramp 6
+    PhysDevExtTramp 7
+    PhysDevExtTramp 8
+    PhysDevExtTramp 9
+    PhysDevExtTramp 10
+    PhysDevExtTramp 11
+    PhysDevExtTramp 12
+    PhysDevExtTramp 13
+    PhysDevExtTramp 14
+    PhysDevExtTramp 15
+    PhysDevExtTramp 16
+    PhysDevExtTramp 17
+    PhysDevExtTramp 18
+    PhysDevExtTramp 19
+    PhysDevExtTramp 20
+    PhysDevExtTramp 21
+    PhysDevExtTramp 22
+    PhysDevExtTramp 23
+    PhysDevExtTramp 24
+    PhysDevExtTramp 25
+    PhysDevExtTramp 26
+    PhysDevExtTramp 27
+    PhysDevExtTramp 28
+    PhysDevExtTramp 29
+    PhysDevExtTramp 30
+    PhysDevExtTramp 31
+    PhysDevExtTramp 32
+    PhysDevExtTramp 33
+    PhysDevExtTramp 34
+    PhysDevExtTramp 35
+    PhysDevExtTramp 36
+    PhysDevExtTramp 37
+    PhysDevExtTramp 38
+    PhysDevExtTramp 39
+    PhysDevExtTramp 40
+    PhysDevExtTramp 41
+    PhysDevExtTramp 42
+    PhysDevExtTramp 43
+    PhysDevExtTramp 44
+    PhysDevExtTramp 45
+    PhysDevExtTramp 46
+    PhysDevExtTramp 47
+    PhysDevExtTramp 48
+    PhysDevExtTramp 49
+    PhysDevExtTramp 50
+    PhysDevExtTramp 51
+    PhysDevExtTramp 52
+    PhysDevExtTramp 53
+    PhysDevExtTramp 54
+    PhysDevExtTramp 55
+    PhysDevExtTramp 56
+    PhysDevExtTramp 57
+    PhysDevExtTramp 58
+    PhysDevExtTramp 59
+    PhysDevExtTramp 60
+    PhysDevExtTramp 61
+    PhysDevExtTramp 62
+    PhysDevExtTramp 63
+    PhysDevExtTramp 64
+    PhysDevExtTramp 65
+    PhysDevExtTramp 66
+    PhysDevExtTramp 67
+    PhysDevExtTramp 68
+    PhysDevExtTramp 69
+    PhysDevExtTramp 70
+    PhysDevExtTramp 71
+    PhysDevExtTramp 72
+    PhysDevExtTramp 73
+    PhysDevExtTramp 74
+    PhysDevExtTramp 75
+    PhysDevExtTramp 76
+    PhysDevExtTramp 77
+    PhysDevExtTramp 78
+    PhysDevExtTramp 79
+    PhysDevExtTramp 80
+    PhysDevExtTramp 81
+    PhysDevExtTramp 82
+    PhysDevExtTramp 83
+    PhysDevExtTramp 84
+    PhysDevExtTramp 85
+    PhysDevExtTramp 86
+    PhysDevExtTramp 87
+    PhysDevExtTramp 88
+    PhysDevExtTramp 89
+    PhysDevExtTramp 90
+    PhysDevExtTramp 91
+    PhysDevExtTramp 92
+    PhysDevExtTramp 93
+    PhysDevExtTramp 94
+    PhysDevExtTramp 95
+    PhysDevExtTramp 96
+    PhysDevExtTramp 97
+    PhysDevExtTramp 98
+    PhysDevExtTramp 99
+    PhysDevExtTramp 100
+    PhysDevExtTramp 101
+    PhysDevExtTramp 102
+    PhysDevExtTramp 103
+    PhysDevExtTramp 104
+    PhysDevExtTramp 105
+    PhysDevExtTramp 106
+    PhysDevExtTramp 107
+    PhysDevExtTramp 108
+    PhysDevExtTramp 109
+    PhysDevExtTramp 110
+    PhysDevExtTramp 111
+    PhysDevExtTramp 112
+    PhysDevExtTramp 113
+    PhysDevExtTramp 114
+    PhysDevExtTramp 115
+    PhysDevExtTramp 116
+    PhysDevExtTramp 117
+    PhysDevExtTramp 118
+    PhysDevExtTramp 119
+    PhysDevExtTramp 120
+    PhysDevExtTramp 121
+    PhysDevExtTramp 122
+    PhysDevExtTramp 123
+    PhysDevExtTramp 124
+    PhysDevExtTramp 125
+    PhysDevExtTramp 126
+    PhysDevExtTramp 127
+    PhysDevExtTramp 128
+    PhysDevExtTramp 129
+    PhysDevExtTramp 130
+    PhysDevExtTramp 131
+    PhysDevExtTramp 132
+    PhysDevExtTramp 133
+    PhysDevExtTramp 134
+    PhysDevExtTramp 135
+    PhysDevExtTramp 136
+    PhysDevExtTramp 137
+    PhysDevExtTramp 138
+    PhysDevExtTramp 139
+    PhysDevExtTramp 140
+    PhysDevExtTramp 141
+    PhysDevExtTramp 142
+    PhysDevExtTramp 143
+    PhysDevExtTramp 144
+    PhysDevExtTramp 145
+    PhysDevExtTramp 146
+    PhysDevExtTramp 147
+    PhysDevExtTramp 148
+    PhysDevExtTramp 149
+    PhysDevExtTramp 150
+    PhysDevExtTramp 151
+    PhysDevExtTramp 152
+    PhysDevExtTramp 153
+    PhysDevExtTramp 154
+    PhysDevExtTramp 155
+    PhysDevExtTramp 156
+    PhysDevExtTramp 157
+    PhysDevExtTramp 158
+    PhysDevExtTramp 159
+    PhysDevExtTramp 160
+    PhysDevExtTramp 161
+    PhysDevExtTramp 162
+    PhysDevExtTramp 163
+    PhysDevExtTramp 164
+    PhysDevExtTramp 165
+    PhysDevExtTramp 166
+    PhysDevExtTramp 167
+    PhysDevExtTramp 168
+    PhysDevExtTramp 169
+    PhysDevExtTramp 170
+    PhysDevExtTramp 171
+    PhysDevExtTramp 172
+    PhysDevExtTramp 173
+    PhysDevExtTramp 174
+    PhysDevExtTramp 175
+    PhysDevExtTramp 176
+    PhysDevExtTramp 177
+    PhysDevExtTramp 178
+    PhysDevExtTramp 179
+    PhysDevExtTramp 180
+    PhysDevExtTramp 181
+    PhysDevExtTramp 182
+    PhysDevExtTramp 183
+    PhysDevExtTramp 184
+    PhysDevExtTramp 185
+    PhysDevExtTramp 186
+    PhysDevExtTramp 187
+    PhysDevExtTramp 188
+    PhysDevExtTramp 189
+    PhysDevExtTramp 190
+    PhysDevExtTramp 191
+    PhysDevExtTramp 192
+    PhysDevExtTramp 193
+    PhysDevExtTramp 194
+    PhysDevExtTramp 195
+    PhysDevExtTramp 196
+    PhysDevExtTramp 197
+    PhysDevExtTramp 198
+    PhysDevExtTramp 199
+    PhysDevExtTramp 200
+    PhysDevExtTramp 201
+    PhysDevExtTramp 202
+    PhysDevExtTramp 203
+    PhysDevExtTramp 204
+    PhysDevExtTramp 205
+    PhysDevExtTramp 206
+    PhysDevExtTramp 207
+    PhysDevExtTramp 208
+    PhysDevExtTramp 209
+    PhysDevExtTramp 210
+    PhysDevExtTramp 211
+    PhysDevExtTramp 212
+    PhysDevExtTramp 213
+    PhysDevExtTramp 214
+    PhysDevExtTramp 215
+    PhysDevExtTramp 216
+    PhysDevExtTramp 217
+    PhysDevExtTramp 218
+    PhysDevExtTramp 219
+    PhysDevExtTramp 220
+    PhysDevExtTramp 221
+    PhysDevExtTramp 222
+    PhysDevExtTramp 223
+    PhysDevExtTramp 224
+    PhysDevExtTramp 225
+    PhysDevExtTramp 226
+    PhysDevExtTramp 227
+    PhysDevExtTramp 228
+    PhysDevExtTramp 229
+    PhysDevExtTramp 230
+    PhysDevExtTramp 231
+    PhysDevExtTramp 232
+    PhysDevExtTramp 233
+    PhysDevExtTramp 234
+    PhysDevExtTramp 235
+    PhysDevExtTramp 236
+    PhysDevExtTramp 237
+    PhysDevExtTramp 238
+    PhysDevExtTramp 239
+    PhysDevExtTramp 240
+    PhysDevExtTramp 241
+    PhysDevExtTramp 242
+    PhysDevExtTramp 243
+    PhysDevExtTramp 244
+    PhysDevExtTramp 245
+    PhysDevExtTramp 246
+    PhysDevExtTramp 247
+    PhysDevExtTramp 248
+    PhysDevExtTramp 249
+
+    PhysDevExtTermin 0
+    PhysDevExtTermin 1
+    PhysDevExtTermin 2
+    PhysDevExtTermin 3
+    PhysDevExtTermin 4
+    PhysDevExtTermin 5
+    PhysDevExtTermin 6
+    PhysDevExtTermin 7
+    PhysDevExtTermin 8
+    PhysDevExtTermin 9
+    PhysDevExtTermin 10
+    PhysDevExtTermin 11
+    PhysDevExtTermin 12
+    PhysDevExtTermin 13
+    PhysDevExtTermin 14
+    PhysDevExtTermin 15
+    PhysDevExtTermin 16
+    PhysDevExtTermin 17
+    PhysDevExtTermin 18
+    PhysDevExtTermin 19
+    PhysDevExtTermin 20
+    PhysDevExtTermin 21
+    PhysDevExtTermin 22
+    PhysDevExtTermin 23
+    PhysDevExtTermin 24
+    PhysDevExtTermin 25
+    PhysDevExtTermin 26
+    PhysDevExtTermin 27
+    PhysDevExtTermin 28
+    PhysDevExtTermin 29
+    PhysDevExtTermin 30
+    PhysDevExtTermin 31
+    PhysDevExtTermin 32
+    PhysDevExtTermin 33
+    PhysDevExtTermin 34
+    PhysDevExtTermin 35
+    PhysDevExtTermin 36
+    PhysDevExtTermin 37
+    PhysDevExtTermin 38
+    PhysDevExtTermin 39
+    PhysDevExtTermin 40
+    PhysDevExtTermin 41
+    PhysDevExtTermin 42
+    PhysDevExtTermin 43
+    PhysDevExtTermin 44
+    PhysDevExtTermin 45
+    PhysDevExtTermin 46
+    PhysDevExtTermin 47
+    PhysDevExtTermin 48
+    PhysDevExtTermin 49
+    PhysDevExtTermin 50
+    PhysDevExtTermin 51
+    PhysDevExtTermin 52
+    PhysDevExtTermin 53
+    PhysDevExtTermin 54
+    PhysDevExtTermin 55
+    PhysDevExtTermin 56
+    PhysDevExtTermin 57
+    PhysDevExtTermin 58
+    PhysDevExtTermin 59
+    PhysDevExtTermin 60
+    PhysDevExtTermin 61
+    PhysDevExtTermin 62
+    PhysDevExtTermin 63
+    PhysDevExtTermin 64
+    PhysDevExtTermin 65
+    PhysDevExtTermin 66
+    PhysDevExtTermin 67
+    PhysDevExtTermin 68
+    PhysDevExtTermin 69
+    PhysDevExtTermin 70
+    PhysDevExtTermin 71
+    PhysDevExtTermin 72
+    PhysDevExtTermin 73
+    PhysDevExtTermin 74
+    PhysDevExtTermin 75
+    PhysDevExtTermin 76
+    PhysDevExtTermin 77
+    PhysDevExtTermin 78
+    PhysDevExtTermin 79
+    PhysDevExtTermin 80
+    PhysDevExtTermin 81
+    PhysDevExtTermin 82
+    PhysDevExtTermin 83
+    PhysDevExtTermin 84
+    PhysDevExtTermin 85
+    PhysDevExtTermin 86
+    PhysDevExtTermin 87
+    PhysDevExtTermin 88
+    PhysDevExtTermin 89
+    PhysDevExtTermin 90
+    PhysDevExtTermin 91
+    PhysDevExtTermin 92
+    PhysDevExtTermin 93
+    PhysDevExtTermin 94
+    PhysDevExtTermin 95
+    PhysDevExtTermin 96
+    PhysDevExtTermin 97
+    PhysDevExtTermin 98
+    PhysDevExtTermin 99
+    PhysDevExtTermin 100
+    PhysDevExtTermin 101
+    PhysDevExtTermin 102
+    PhysDevExtTermin 103
+    PhysDevExtTermin 104
+    PhysDevExtTermin 105
+    PhysDevExtTermin 106
+    PhysDevExtTermin 107
+    PhysDevExtTermin 108
+    PhysDevExtTermin 109
+    PhysDevExtTermin 110
+    PhysDevExtTermin 111
+    PhysDevExtTermin 112
+    PhysDevExtTermin 113
+    PhysDevExtTermin 114
+    PhysDevExtTermin 115
+    PhysDevExtTermin 116
+    PhysDevExtTermin 117
+    PhysDevExtTermin 118
+    PhysDevExtTermin 119
+    PhysDevExtTermin 120
+    PhysDevExtTermin 121
+    PhysDevExtTermin 122
+    PhysDevExtTermin 123
+    PhysDevExtTermin 124
+    PhysDevExtTermin 125
+    PhysDevExtTermin 126
+    PhysDevExtTermin 127
+    PhysDevExtTermin 128
+    PhysDevExtTermin 129
+    PhysDevExtTermin 130
+    PhysDevExtTermin 131
+    PhysDevExtTermin 132
+    PhysDevExtTermin 133
+    PhysDevExtTermin 134
+    PhysDevExtTermin 135
+    PhysDevExtTermin 136
+    PhysDevExtTermin 137
+    PhysDevExtTermin 138
+    PhysDevExtTermin 139
+    PhysDevExtTermin 140
+    PhysDevExtTermin 141
+    PhysDevExtTermin 142
+    PhysDevExtTermin 143
+    PhysDevExtTermin 144
+    PhysDevExtTermin 145
+    PhysDevExtTermin 146
+    PhysDevExtTermin 147
+    PhysDevExtTermin 148
+    PhysDevExtTermin 149
+    PhysDevExtTermin 150
+    PhysDevExtTermin 151
+    PhysDevExtTermin 152
+    PhysDevExtTermin 153
+    PhysDevExtTermin 154
+    PhysDevExtTermin 155
+    PhysDevExtTermin 156
+    PhysDevExtTermin 157
+    PhysDevExtTermin 158
+    PhysDevExtTermin 159
+    PhysDevExtTermin 160
+    PhysDevExtTermin 161
+    PhysDevExtTermin 162
+    PhysDevExtTermin 163
+    PhysDevExtTermin 164
+    PhysDevExtTermin 165
+    PhysDevExtTermin 166
+    PhysDevExtTermin 167
+    PhysDevExtTermin 168
+    PhysDevExtTermin 169
+    PhysDevExtTermin 170
+    PhysDevExtTermin 171
+    PhysDevExtTermin 172
+    PhysDevExtTermin 173
+    PhysDevExtTermin 174
+    PhysDevExtTermin 175
+    PhysDevExtTermin 176
+    PhysDevExtTermin 177
+    PhysDevExtTermin 178
+    PhysDevExtTermin 179
+    PhysDevExtTermin 180
+    PhysDevExtTermin 181
+    PhysDevExtTermin 182
+    PhysDevExtTermin 183
+    PhysDevExtTermin 184
+    PhysDevExtTermin 185
+    PhysDevExtTermin 186
+    PhysDevExtTermin 187
+    PhysDevExtTermin 188
+    PhysDevExtTermin 189
+    PhysDevExtTermin 190
+    PhysDevExtTermin 191
+    PhysDevExtTermin 192
+    PhysDevExtTermin 193
+    PhysDevExtTermin 194
+    PhysDevExtTermin 195
+    PhysDevExtTermin 196
+    PhysDevExtTermin 197
+    PhysDevExtTermin 198
+    PhysDevExtTermin 199
+    PhysDevExtTermin 200
+    PhysDevExtTermin 201
+    PhysDevExtTermin 202
+    PhysDevExtTermin 203
+    PhysDevExtTermin 204
+    PhysDevExtTermin 205
+    PhysDevExtTermin 206
+    PhysDevExtTermin 207
+    PhysDevExtTermin 208
+    PhysDevExtTermin 209
+    PhysDevExtTermin 210
+    PhysDevExtTermin 211
+    PhysDevExtTermin 212
+    PhysDevExtTermin 213
+    PhysDevExtTermin 214
+    PhysDevExtTermin 215
+    PhysDevExtTermin 216
+    PhysDevExtTermin 217
+    PhysDevExtTermin 218
+    PhysDevExtTermin 219
+    PhysDevExtTermin 220
+    PhysDevExtTermin 221
+    PhysDevExtTermin 222
+    PhysDevExtTermin 223
+    PhysDevExtTermin 224
+    PhysDevExtTermin 225
+    PhysDevExtTermin 226
+    PhysDevExtTermin 227
+    PhysDevExtTermin 228
+    PhysDevExtTermin 229
+    PhysDevExtTermin 230
+    PhysDevExtTermin 231
+    PhysDevExtTermin 232
+    PhysDevExtTermin 233
+    PhysDevExtTermin 234
+    PhysDevExtTermin 235
+    PhysDevExtTermin 236
+    PhysDevExtTermin 237
+    PhysDevExtTermin 238
+    PhysDevExtTermin 239
+    PhysDevExtTermin 240
+    PhysDevExtTermin 241
+    PhysDevExtTermin 242
+    PhysDevExtTermin 243
+    PhysDevExtTermin 244
+    PhysDevExtTermin 245
+    PhysDevExtTermin 246
+    PhysDevExtTermin 247
+    PhysDevExtTermin 248
+    PhysDevExtTermin 249
+
+    DevExtTramp 0
+    DevExtTramp 1
+    DevExtTramp 2
+    DevExtTramp 3
+    DevExtTramp 4
+    DevExtTramp 5
+    DevExtTramp 6
+    DevExtTramp 7
+    DevExtTramp 8
+    DevExtTramp 9
+    DevExtTramp 10
+    DevExtTramp 11
+    DevExtTramp 12
+    DevExtTramp 13
+    DevExtTramp 14
+    DevExtTramp 15
+    DevExtTramp 16
+    DevExtTramp 17
+    DevExtTramp 18
+    DevExtTramp 19
+    DevExtTramp 20
+    DevExtTramp 21
+    DevExtTramp 22
+    DevExtTramp 23
+    DevExtTramp 24
+    DevExtTramp 25
+    DevExtTramp 26
+    DevExtTramp 27
+    DevExtTramp 28
+    DevExtTramp 29
+    DevExtTramp 30
+    DevExtTramp 31
+    DevExtTramp 32
+    DevExtTramp 33
+    DevExtTramp 34
+    DevExtTramp 35
+    DevExtTramp 36
+    DevExtTramp 37
+    DevExtTramp 38
+    DevExtTramp 39
+    DevExtTramp 40
+    DevExtTramp 41
+    DevExtTramp 42
+    DevExtTramp 43
+    DevExtTramp 44
+    DevExtTramp 45
+    DevExtTramp 46
+    DevExtTramp 47
+    DevExtTramp 48
+    DevExtTramp 49
+    DevExtTramp 50
+    DevExtTramp 51
+    DevExtTramp 52
+    DevExtTramp 53
+    DevExtTramp 54
+    DevExtTramp 55
+    DevExtTramp 56
+    DevExtTramp 57
+    DevExtTramp 58
+    DevExtTramp 59
+    DevExtTramp 60
+    DevExtTramp 61
+    DevExtTramp 62
+    DevExtTramp 63
+    DevExtTramp 64
+    DevExtTramp 65
+    DevExtTramp 66
+    DevExtTramp 67
+    DevExtTramp 68
+    DevExtTramp 69
+    DevExtTramp 70
+    DevExtTramp 71
+    DevExtTramp 72
+    DevExtTramp 73
+    DevExtTramp 74
+    DevExtTramp 75
+    DevExtTramp 76
+    DevExtTramp 77
+    DevExtTramp 78
+    DevExtTramp 79
+    DevExtTramp 80
+    DevExtTramp 81
+    DevExtTramp 82
+    DevExtTramp 83
+    DevExtTramp 84
+    DevExtTramp 85
+    DevExtTramp 86
+    DevExtTramp 87
+    DevExtTramp 88
+    DevExtTramp 89
+    DevExtTramp 90
+    DevExtTramp 91
+    DevExtTramp 92
+    DevExtTramp 93
+    DevExtTramp 94
+    DevExtTramp 95
+    DevExtTramp 96
+    DevExtTramp 97
+    DevExtTramp 98
+    DevExtTramp 99
+    DevExtTramp 100
+    DevExtTramp 101
+    DevExtTramp 102
+    DevExtTramp 103
+    DevExtTramp 104
+    DevExtTramp 105
+    DevExtTramp 106
+    DevExtTramp 107
+    DevExtTramp 108
+    DevExtTramp 109
+    DevExtTramp 110
+    DevExtTramp 111
+    DevExtTramp 112
+    DevExtTramp 113
+    DevExtTramp 114
+    DevExtTramp 115
+    DevExtTramp 116
+    DevExtTramp 117
+    DevExtTramp 118
+    DevExtTramp 119
+    DevExtTramp 120
+    DevExtTramp 121
+    DevExtTramp 122
+    DevExtTramp 123
+    DevExtTramp 124
+    DevExtTramp 125
+    DevExtTramp 126
+    DevExtTramp 127
+    DevExtTramp 128
+    DevExtTramp 129
+    DevExtTramp 130
+    DevExtTramp 131
+    DevExtTramp 132
+    DevExtTramp 133
+    DevExtTramp 134
+    DevExtTramp 135
+    DevExtTramp 136
+    DevExtTramp 137
+    DevExtTramp 138
+    DevExtTramp 139
+    DevExtTramp 140
+    DevExtTramp 141
+    DevExtTramp 142
+    DevExtTramp 143
+    DevExtTramp 144
+    DevExtTramp 145
+    DevExtTramp 146
+    DevExtTramp 147
+    DevExtTramp 148
+    DevExtTramp 149
+    DevExtTramp 150
+    DevExtTramp 151
+    DevExtTramp 152
+    DevExtTramp 153
+    DevExtTramp 154
+    DevExtTramp 155
+    DevExtTramp 156
+    DevExtTramp 157
+    DevExtTramp 158
+    DevExtTramp 159
+    DevExtTramp 160
+    DevExtTramp 161
+    DevExtTramp 162
+    DevExtTramp 163
+    DevExtTramp 164
+    DevExtTramp 165
+    DevExtTramp 166
+    DevExtTramp 167
+    DevExtTramp 168
+    DevExtTramp 169
+    DevExtTramp 170
+    DevExtTramp 171
+    DevExtTramp 172
+    DevExtTramp 173
+    DevExtTramp 174
+    DevExtTramp 175
+    DevExtTramp 176
+    DevExtTramp 177
+    DevExtTramp 178
+    DevExtTramp 179
+    DevExtTramp 180
+    DevExtTramp 181
+    DevExtTramp 182
+    DevExtTramp 183
+    DevExtTramp 184
+    DevExtTramp 185
+    DevExtTramp 186
+    DevExtTramp 187
+    DevExtTramp 188
+    DevExtTramp 189
+    DevExtTramp 190
+    DevExtTramp 191
+    DevExtTramp 192
+    DevExtTramp 193
+    DevExtTramp 194
+    DevExtTramp 195
+    DevExtTramp 196
+    DevExtTramp 197
+    DevExtTramp 198
+    DevExtTramp 199
+    DevExtTramp 200
+    DevExtTramp 201
+    DevExtTramp 202
+    DevExtTramp 203
+    DevExtTramp 204
+    DevExtTramp 205
+    DevExtTramp 206
+    DevExtTramp 207
+    DevExtTramp 208
+    DevExtTramp 209
+    DevExtTramp 210
+    DevExtTramp 211
+    DevExtTramp 212
+    DevExtTramp 213
+    DevExtTramp 214
+    DevExtTramp 215
+    DevExtTramp 216
+    DevExtTramp 217
+    DevExtTramp 218
+    DevExtTramp 219
+    DevExtTramp 220
+    DevExtTramp 221
+    DevExtTramp 222
+    DevExtTramp 223
+    DevExtTramp 224
+    DevExtTramp 225
+    DevExtTramp 226
+    DevExtTramp 227
+    DevExtTramp 228
+    DevExtTramp 229
+    DevExtTramp 230
+    DevExtTramp 231
+    DevExtTramp 232
+    DevExtTramp 233
+    DevExtTramp 234
+    DevExtTramp 235
+    DevExtTramp 236
+    DevExtTramp 237
+    DevExtTramp 238
+    DevExtTramp 239
+    DevExtTramp 240
+    DevExtTramp 241
+    DevExtTramp 242
+    DevExtTramp 243
+    DevExtTramp 244
+    DevExtTramp 245
+    DevExtTramp 246
+    DevExtTramp 247
+    DevExtTramp 248
+    DevExtTramp 249
+
+end
diff --git a/src/third_party/vulkan-loader/src/loader/vk_loader_layer.h b/src/third_party/vulkan-loader/src/loader/vk_loader_layer.h
new file mode 100644
index 0000000..dfcf5b2
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/loader/vk_loader_layer.h
@@ -0,0 +1,46 @@
+/*
+*
+* Copyright (c) 2016 The Khronos Group Inc.
+* Copyright (c) 2016 Valve Corporation
+* Copyright (c) 2016 LunarG, Inc.
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*
+* Author: Mark Lobodzinski <mark@lunarg.com>
+*
+*/
+#pragma once
+
+// Linked list node for tree of debug callbacks
+typedef struct VkDebugReportContent {
+    VkDebugReportCallbackEXT msgCallback;
+    PFN_vkDebugReportCallbackEXT pfnMsgCallback;
+    VkFlags msgFlags;
+} VkDebugReportContent;
+
+typedef struct VkDebugUtilsMessengerContent {
+    VkDebugUtilsMessengerEXT messenger;
+    VkDebugUtilsMessageSeverityFlagsEXT messageSeverity;
+    VkDebugUtilsMessageTypeFlagsEXT messageType;
+    PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback;
+} VkDebugUtilsMessengerContent;
+
+typedef struct VkLayerDbgFunctionNode_ {
+    bool is_messenger;
+    union {
+        VkDebugReportContent report;
+        VkDebugUtilsMessengerContent messenger;
+    };
+    void *pUserData;
+    struct VkLayerDbgFunctionNode_ *pNext;
+} VkLayerDbgFunctionNode;
diff --git a/src/third_party/vulkan-loader/src/loader/vk_loader_platform.h b/src/third_party/vulkan-loader/src/loader/vk_loader_platform.h
new file mode 100644
index 0000000..2ffda55
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/loader/vk_loader_platform.h
@@ -0,0 +1,411 @@
+/*
+ *
+ * Copyright (c) 2015-2018 The Khronos Group Inc.
+ * Copyright (c) 2015-2018 Valve Corporation
+ * Copyright (c) 2015-2018 LunarG, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Ian Elliot <ian@lunarg.com>
+ * Author: Jon Ashburn <jon@lunarg.com>
+ * Author: Lenny Komow <lenny@lunarg.com>
+ *
+ */
+#pragma once
+
+#if defined(_WIN32)
+// WinSock2.h must be included *BEFORE* windows.h
+#include <winsock2.h>
+#endif  // _WIN32
+
+#include "vulkan/vk_platform.h"
+#include "vulkan/vk_sdk_platform.h"
+
+#if defined(__linux__) || defined(__APPLE__)
+/* Linux-specific common code: */
+
+// Headers:
+//#ifndef _GNU_SOURCE
+//#define _GNU_SOURCE 1
+//#endif
+// TBD: Are the contents of the following file used?
+#include <unistd.h>
+// Note: The following file is for dynamic loading:
+#include <dlfcn.h>
+#include <pthread.h>
+#include <assert.h>
+#include <string.h>
+#include <stdbool.h>
+#include <stdlib.h>
+#include <libgen.h>
+
+// VK Library Filenames, Paths, etc.:
+#define PATH_SEPARATOR ':'
+#define DIRECTORY_SYMBOL '/'
+
+#define VULKAN_DIR "vulkan/"
+#define VULKAN_ICDCONF_DIR "icd.d"
+#define VULKAN_ICD_DIR "icd"
+#define VULKAN_SETTINGSCONF_DIR "settings.d"
+#define VULKAN_ELAYERCONF_DIR "explicit_layer.d"
+#define VULKAN_ILAYERCONF_DIR "implicit_layer.d"
+#define VULKAN_LAYER_DIR "layer"
+
+#define VK_DRIVERS_INFO_RELATIVE_DIR VULKAN_DIR VULKAN_ICDCONF_DIR
+#define VK_SETTINGS_INFO_RELATIVE_DIR VULKAN_DIR VULKAN_SETTINGSCONF_DIR
+#define VK_ELAYERS_INFO_RELATIVE_DIR VULKAN_DIR VULKAN_ELAYERCONF_DIR
+#define VK_ILAYERS_INFO_RELATIVE_DIR VULKAN_DIR VULKAN_ILAYERCONF_DIR
+
+#define VK_DRIVERS_INFO_REGISTRY_LOC ""
+#define VK_SETTINGS_INFO_REGISTRY_LOC ""
+#define VK_ELAYERS_INFO_REGISTRY_LOC ""
+#define VK_ILAYERS_INFO_REGISTRY_LOC ""
+
+#if !defined(DEFAULT_VK_LAYERS_PATH)
+#define DEFAULT_VK_LAYERS_PATH ""
+#endif
+#if !defined(LAYERS_SOURCE_PATH)
+#define LAYERS_SOURCE_PATH NULL
+#endif
+#define LAYERS_PATH_ENV "VK_LAYER_PATH"
+#define ENABLED_LAYERS_ENV "VK_INSTANCE_LAYERS"
+
+// C99:
+#define PRINTF_SIZE_T_SPECIFIER "%zu"
+
+// File IO
+static inline bool loader_platform_file_exists(const char *path) {
+    if (access(path, F_OK))
+        return false;
+    else
+        return true;
+}
+
+static inline bool loader_platform_is_path_absolute(const char *path) {
+    if (path[0] == '/')
+        return true;
+    else
+        return false;
+}
+
+static inline char *loader_platform_dirname(char *path) { return dirname(path); }
+
+// Dynamic Loading of libraries:
+typedef void *loader_platform_dl_handle;
+static inline loader_platform_dl_handle loader_platform_open_library(const char *libPath) {
+    // When loading the library, we use RTLD_LAZY so that not all symbols have to be
+    // resolved at this time (which improves performance). Note that if not all symbols
+    // can be resolved, this could cause crashes later. Use the LD_BIND_NOW environment
+    // variable to force all symbols to be resolved here.
+    return dlopen(libPath, RTLD_LAZY | RTLD_LOCAL);
+}
+static inline const char *loader_platform_open_library_error(const char *libPath) { return dlerror(); }
+static inline void loader_platform_close_library(loader_platform_dl_handle library) { dlclose(library); }
+static inline void *loader_platform_get_proc_address(loader_platform_dl_handle library, const char *name) {
+    assert(library);
+    assert(name);
+    return dlsym(library, name);
+}
+static inline const char *loader_platform_get_proc_address_error(const char *name) { return dlerror(); }
+
+// Threads:
+typedef pthread_t loader_platform_thread;
+#define THREAD_LOCAL_DECL __thread
+
+// The once init functionality is not used on Linux
+#define LOADER_PLATFORM_THREAD_ONCE_DECLARATION(var)
+#define LOADER_PLATFORM_THREAD_ONCE_DEFINITION(var)
+#define LOADER_PLATFORM_THREAD_ONCE(ctl, func)
+
+// Thread IDs:
+typedef pthread_t loader_platform_thread_id;
+static inline loader_platform_thread_id loader_platform_get_thread_id() { return pthread_self(); }
+
+// Thread mutex:
+typedef pthread_mutex_t loader_platform_thread_mutex;
+static inline void loader_platform_thread_create_mutex(loader_platform_thread_mutex *pMutex) { pthread_mutex_init(pMutex, NULL); }
+static inline void loader_platform_thread_lock_mutex(loader_platform_thread_mutex *pMutex) { pthread_mutex_lock(pMutex); }
+static inline void loader_platform_thread_unlock_mutex(loader_platform_thread_mutex *pMutex) { pthread_mutex_unlock(pMutex); }
+static inline void loader_platform_thread_delete_mutex(loader_platform_thread_mutex *pMutex) { pthread_mutex_destroy(pMutex); }
+typedef pthread_cond_t loader_platform_thread_cond;
+static inline void loader_platform_thread_init_cond(loader_platform_thread_cond *pCond) { pthread_cond_init(pCond, NULL); }
+static inline void loader_platform_thread_cond_wait(loader_platform_thread_cond *pCond, loader_platform_thread_mutex *pMutex) {
+    pthread_cond_wait(pCond, pMutex);
+}
+static inline void loader_platform_thread_cond_broadcast(loader_platform_thread_cond *pCond) { pthread_cond_broadcast(pCond); }
+
+#define loader_stack_alloc(size) alloca(size)
+
+#elif defined(_WIN32)  // defined(__linux__)
+/* Windows-specific common code: */
+// WinBase.h defines CreateSemaphore and synchapi.h defines CreateEvent
+//  undefine them to avoid conflicts with VkLayerDispatchTable struct members.
+#ifdef CreateSemaphore
+#undef CreateSemaphore
+#endif
+#ifdef CreateEvent
+#undef CreateEvent
+#endif
+#include <assert.h>
+#include <stdio.h>
+#include <string.h>
+#include <io.h>
+#include <stdbool.h>
+#include <shlwapi.h>
+#ifdef __cplusplus
+#include <iostream>
+#include <string>
+#endif  // __cplusplus
+
+// VK Library Filenames, Paths, etc.:
+#define PATH_SEPARATOR ';'
+#define DIRECTORY_SYMBOL '\\'
+#define DEFAULT_VK_REGISTRY_HIVE HKEY_LOCAL_MACHINE
+#define DEFAULT_VK_REGISTRY_HIVE_STR "HKEY_LOCAL_MACHINE"
+#define SECONDARY_VK_REGISTRY_HIVE HKEY_CURRENT_USER
+#define SECONDARY_VK_REGISTRY_HIVE_STR "HKEY_CURRENT_USER"
+
+#define VK_DRIVERS_INFO_RELATIVE_DIR ""
+#define VK_SETTINGS_INFO_RELATIVE_DIR ""
+#define VK_ELAYERS_INFO_RELATIVE_DIR ""
+#define VK_ILAYERS_INFO_RELATIVE_DIR ""
+
+#ifdef _WIN64
+#define HKR_VK_DRIVER_NAME API_NAME "DriverName"
+#else
+#define HKR_VK_DRIVER_NAME API_NAME "DriverNameWow"
+#endif
+#define VK_DRIVERS_INFO_REGISTRY_LOC "SOFTWARE\\Khronos\\" API_NAME "\\Drivers"
+#define VK_SETTINGS_INFO_REGISTRY_LOC "SOFTWARE\\Khronos\\" API_NAME "\\Settings"
+#define VK_ELAYERS_INFO_REGISTRY_LOC "SOFTWARE\\Khronos\\" API_NAME "\\ExplicitLayers"
+#define VK_ILAYERS_INFO_REGISTRY_LOC "SOFTWARE\\Khronos\\" API_NAME "\\ImplicitLayers"
+
+#if !defined(DEFAULT_VK_LAYERS_PATH)
+#define DEFAULT_VK_LAYERS_PATH ""
+#endif
+#if !defined(LAYERS_SOURCE_PATH)
+#define LAYERS_SOURCE_PATH NULL
+#endif
+#define LAYERS_PATH_ENV "VK_LAYER_PATH"
+#define ENABLED_LAYERS_ENV "VK_INSTANCE_LAYERS"
+
+#define PRINTF_SIZE_T_SPECIFIER "%Iu"
+
+#if defined(_WIN32)
+// Get the key for the plug n play driver registry
+// The string returned by this function should NOT be freed
+static inline const char *LoaderPnpDriverRegistry() {
+    BOOL is_wow;
+    IsWow64Process(GetCurrentProcess(), &is_wow);
+    return is_wow ? "VulkanDriverNameWow" : "VulkanDriverName";
+}
+static inline const wchar_t *LoaderPnpDriverRegistryWide() {
+    BOOL is_wow;
+    IsWow64Process(GetCurrentProcess(), &is_wow);
+    return is_wow ? L"VulkanDriverNameWow" : L"VulkanDriverName";
+}
+
+// Get the key for the plug 'n play explicit layer registry
+// The string returned by this function should NOT be freed
+static inline const char *LoaderPnpELayerRegistry() {
+    BOOL is_wow;
+    IsWow64Process(GetCurrentProcess(), &is_wow);
+    return is_wow ? "VulkanExplicitLayersWow" : "VulkanExplicitLayers";
+}
+static inline const wchar_t *LoaderPnpELayerRegistryWide() {
+    BOOL is_wow;
+    IsWow64Process(GetCurrentProcess(), &is_wow);
+    return is_wow ? L"VulkanExplicitLayersWow" : L"VulkanExplicitLayers";
+}
+
+// Get the key for the plug 'n play implicit layer registry
+// The string returned by this function should NOT be freed
+static inline const char *LoaderPnpILayerRegistry() {
+    BOOL is_wow;
+    IsWow64Process(GetCurrentProcess(), &is_wow);
+    return is_wow ? "VulkanImplicitLayersWow" : "VulkanImplicitLayers";
+}
+static inline const wchar_t *LoaderPnpILayerRegistryWide() {
+    BOOL is_wow;
+    IsWow64Process(GetCurrentProcess(), &is_wow);
+    return is_wow ? L"VulkanImplicitLayersWow" : L"VulkanImplicitLayers";
+}
+#endif
+
+// File IO
+static bool loader_platform_file_exists(const char *path) {
+    if ((_access(path, 0)) == -1)
+        return false;
+    else
+        return true;
+}
+
+static bool loader_platform_is_path_absolute(const char *path) {
+    if (!path || !*path) {
+        return false;
+    }
+    if (*path == DIRECTORY_SYMBOL || path[1] == ':') {
+        return true;
+    }
+    return false;
+}
+
+// WIN32 runtime doesn't have dirname().
+static inline char *loader_platform_dirname(char *path) {
+    char *current, *next;
+
+    // TODO/TBD: Do we need to deal with the Windows's ":" character?
+
+    for (current = path; *current != '\0'; current = next) {
+        next = strchr(current, DIRECTORY_SYMBOL);
+        if (next == NULL) {
+            if (current != path) *(current - 1) = '\0';
+            return path;
+        } else {
+            // Point one character past the DIRECTORY_SYMBOL:
+            next++;
+        }
+    }
+    return path;
+}
+
+// WIN32 runtime doesn't have basename().
+// Microsoft also doesn't have basename().  Paths are different on Windows, and
+// so this is just a temporary solution in order to get us compiling, so that we
+// can test some scenarios, and develop the correct solution for Windows.
+// TODO: Develop a better, permanent solution for Windows, to replace this
+// temporary code:
+static char *loader_platform_basename(char *pathname) {
+    char *current, *next;
+
+    // TODO/TBD: Do we need to deal with the Windows's ":" character?
+
+    for (current = pathname; *current != '\0'; current = next) {
+        next = strchr(current, DIRECTORY_SYMBOL);
+        if (next == NULL) {
+            // No more DIRECTORY_SYMBOL's so return p:
+            return current;
+        } else {
+            // Point one character past the DIRECTORY_SYMBOL:
+            next++;
+        }
+    }
+    // We shouldn't get to here, but this makes the compiler happy:
+    return current;
+}
+
+// Dynamic Loading:
+typedef HMODULE loader_platform_dl_handle;
+static loader_platform_dl_handle loader_platform_open_library(const char *lib_path) {
+    // Try loading the library the original way first.
+    loader_platform_dl_handle lib_handle = LoadLibrary(lib_path);
+    if (lib_handle == NULL && GetLastError() == ERROR_MOD_NOT_FOUND) {
+        // If that failed, then try loading it with broader search folders.
+        lib_handle = LoadLibraryEx(lib_path, NULL, LOAD_LIBRARY_SEARCH_DEFAULT_DIRS | LOAD_LIBRARY_SEARCH_DLL_LOAD_DIR);
+    }
+    return lib_handle;
+}
+static char *loader_platform_open_library_error(const char *libPath) {
+    static char errorMsg[164];
+    (void)snprintf(errorMsg, 163, "Failed to open dynamic library \"%s\" with error %lu", libPath, GetLastError());
+    return errorMsg;
+}
+static void loader_platform_close_library(loader_platform_dl_handle library) { FreeLibrary(library); }
+static void *loader_platform_get_proc_address(loader_platform_dl_handle library, const char *name) {
+    assert(library);
+    assert(name);
+    return (void *)GetProcAddress(library, name);
+}
+static char *loader_platform_get_proc_address_error(const char *name) {
+    static char errorMsg[120];
+    (void)snprintf(errorMsg, 119, "Failed to find function \"%s\" in dynamic library", name);
+    return errorMsg;
+}
+
+// Threads:
+typedef HANDLE loader_platform_thread;
+
+// __declspec(thread) is not supported by MinGW compiler (ignored with warning or
+//                    cause erorr depending on compiler switches)
+//
+// __thread should be used instead
+//
+// __MINGW32__ defined for both 32 and 64 bit MinGW compilers, so it is enough to
+// detect any (32 or 64) flawor of MinGW compiler.
+//
+// @note __GNUC__ could be used as a more generic way to detect _any_
+//       GCC[-compitible] compiler on Windows, but this fix was tested
+//       only with MinGW, so keep it explicit at the moment.
+#if defined(__MINGW32__)
+#define THREAD_LOCAL_DECL __thread
+#else
+#define THREAD_LOCAL_DECL __declspec(thread)
+#endif
+
+// The once init functionality is not used when building a DLL on Windows. This is because there is no way to clean up the
+// resources allocated by anything allocated by once init. This isn't a problem for static libraries, but it is for dynamic
+// ones. When building a DLL, we use DllMain() instead to allow properly cleaning up resources.
+#if defined(LOADER_DYNAMIC_LIB)
+#define LOADER_PLATFORM_THREAD_ONCE_DECLARATION(var)
+#define LOADER_PLATFORM_THREAD_ONCE_DEFINITION(var)
+#define LOADER_PLATFORM_THREAD_ONCE(ctl, func)
+#else
+#define LOADER_PLATFORM_THREAD_ONCE_DECLARATION(var) INIT_ONCE var = INIT_ONCE_STATIC_INIT;
+#define LOADER_PLATFORM_THREAD_ONCE_DEFINITION(var) INIT_ONCE var;
+#define LOADER_PLATFORM_THREAD_ONCE(ctl, func) loader_platform_thread_once_fn(ctl, func)
+static BOOL CALLBACK InitFuncWrapper(PINIT_ONCE InitOnce, PVOID Parameter, PVOID *Context) {
+    void (*func)(void) = (void (*)(void))Parameter;
+    func();
+    return TRUE;
+}
+static void loader_platform_thread_once_fn(void *ctl, void (*func)(void)) {
+    assert(func != NULL);
+    assert(ctl != NULL);
+    InitOnceExecuteOnce((PINIT_ONCE)ctl, InitFuncWrapper, (void *)func, NULL);
+}
+#endif
+
+// Thread IDs:
+typedef DWORD loader_platform_thread_id;
+static loader_platform_thread_id loader_platform_get_thread_id() { return GetCurrentThreadId(); }
+
+// Thread mutex:
+typedef CRITICAL_SECTION loader_platform_thread_mutex;
+static void loader_platform_thread_create_mutex(loader_platform_thread_mutex *pMutex) { InitializeCriticalSection(pMutex); }
+static void loader_platform_thread_lock_mutex(loader_platform_thread_mutex *pMutex) { EnterCriticalSection(pMutex); }
+static void loader_platform_thread_unlock_mutex(loader_platform_thread_mutex *pMutex) { LeaveCriticalSection(pMutex); }
+static void loader_platform_thread_delete_mutex(loader_platform_thread_mutex *pMutex) { DeleteCriticalSection(pMutex); }
+typedef CONDITION_VARIABLE loader_platform_thread_cond;
+static void loader_platform_thread_init_cond(loader_platform_thread_cond *pCond) { InitializeConditionVariable(pCond); }
+static void loader_platform_thread_cond_wait(loader_platform_thread_cond *pCond, loader_platform_thread_mutex *pMutex) {
+    SleepConditionVariableCS(pCond, pMutex, INFINITE);
+}
+static void loader_platform_thread_cond_broadcast(loader_platform_thread_cond *pCond) { WakeAllConditionVariable(pCond); }
+
+#define loader_stack_alloc(size) _alloca(size)
+#else  // defined(_WIN32)
+
+#error The "loader_platform.h" file must be modified for this OS.
+
+// NOTE: In order to support another OS, an #elif needs to be added (above the
+// "#else // defined(_WIN32)") for that OS, and OS-specific versions of the
+// contents of this file must be created.
+
+// NOTE: Other OS-specific changes are also needed for this OS.  Search for
+// files with "WIN32" in it, as a quick way to find files that must be changed.
+
+#endif  // defined(_WIN32)
+
+// returns true if the given string appears to be a relative or absolute
+// path, as opposed to a bare filename.
+static inline bool loader_platform_is_path(const char *path) { return strchr(path, DIRECTORY_SYMBOL) != NULL; }
diff --git a/src/third_party/vulkan-loader/src/loader/vulkan-1.def b/src/third_party/vulkan-loader/src/loader/vulkan-1.def
new file mode 100644
index 0000000..1b6f519
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/loader/vulkan-1.def
@@ -0,0 +1,221 @@
+
+;;;; Begin Copyright Notice ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+;
+; Copyright (c) 2015-2017 The Khronos Group Inc.
+; Copyright (c) 2015-2017 Valve Corporation
+; Copyright (c) 2015-2017 LunarG, Inc.
+;
+; Licensed under the Apache License, Version 2.0 (the "License");
+; you may not use this file except in compliance with the License.
+; You may obtain a copy of the License at
+;
+;     http://www.apache.org/licenses/LICENSE-2.0
+;
+; Unless required by applicable law or agreed to in writing, software
+; distributed under the License is distributed on an "AS IS" BASIS,
+; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+; See the License for the specific language governing permissions and
+; limitations under the License.
+;
+;  Author: Courtney Goeltzenleuchter <courtney@LunarG.com>
+;
+;;;;  End Copyright Notice ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+
+LIBRARY vulkan-1.dll
+EXPORTS
+   vkCreateInstance
+   vkDestroyInstance
+   vkEnumeratePhysicalDevices
+   vkGetPhysicalDeviceFeatures
+   vkGetPhysicalDeviceFormatProperties
+   vkGetPhysicalDeviceImageFormatProperties
+   vkGetPhysicalDeviceProperties
+   vkGetPhysicalDeviceQueueFamilyProperties
+   vkGetPhysicalDeviceMemoryProperties
+   vkGetInstanceProcAddr
+   vkGetDeviceProcAddr
+   vkCreateDevice
+   vkDestroyDevice
+   vkEnumerateInstanceExtensionProperties
+   vkEnumerateDeviceExtensionProperties
+   vkEnumerateInstanceLayerProperties
+   vkEnumerateDeviceLayerProperties
+   vkGetDeviceQueue
+   vkQueueSubmit
+   vkQueueWaitIdle
+   vkDeviceWaitIdle
+   vkAllocateMemory
+   vkFreeMemory
+   vkMapMemory
+   vkUnmapMemory
+   vkFlushMappedMemoryRanges
+   vkInvalidateMappedMemoryRanges
+   vkGetDeviceMemoryCommitment
+   vkBindBufferMemory
+   vkBindImageMemory
+   vkGetBufferMemoryRequirements
+   vkGetImageMemoryRequirements
+   vkGetImageSparseMemoryRequirements
+   vkGetPhysicalDeviceSparseImageFormatProperties
+   vkQueueBindSparse
+   vkCreateFence
+   vkDestroyFence
+   vkResetFences
+   vkGetFenceStatus
+   vkWaitForFences
+   vkCreateSemaphore
+   vkDestroySemaphore
+   vkCreateEvent
+   vkDestroyEvent
+   vkGetEventStatus
+   vkSetEvent
+   vkResetEvent
+   vkCreateQueryPool
+   vkDestroyQueryPool
+   vkGetQueryPoolResults
+   vkCreateBuffer
+   vkDestroyBuffer
+   vkCreateBufferView
+   vkDestroyBufferView
+   vkCreateImage
+   vkDestroyImage
+   vkGetImageSubresourceLayout
+   vkCreateImageView
+   vkDestroyImageView
+   vkCreateShaderModule
+   vkDestroyShaderModule
+   vkCreatePipelineCache
+   vkDestroyPipelineCache
+   vkGetPipelineCacheData
+   vkMergePipelineCaches
+   vkCreateGraphicsPipelines
+   vkCreateComputePipelines
+   vkDestroyPipeline
+   vkCreatePipelineLayout
+   vkDestroyPipelineLayout
+   vkCreateSampler
+   vkDestroySampler
+   vkCreateDescriptorSetLayout
+   vkDestroyDescriptorSetLayout
+   vkCreateDescriptorPool
+   vkDestroyDescriptorPool
+   vkResetDescriptorPool
+   vkAllocateDescriptorSets
+   vkFreeDescriptorSets
+   vkUpdateDescriptorSets
+   vkCreateFramebuffer
+   vkDestroyFramebuffer
+   vkCreateRenderPass
+   vkDestroyRenderPass
+   vkGetRenderAreaGranularity
+   vkCreateCommandPool
+   vkDestroyCommandPool
+   vkResetCommandPool
+   vkAllocateCommandBuffers
+   vkFreeCommandBuffers
+   vkBeginCommandBuffer
+   vkEndCommandBuffer
+   vkResetCommandBuffer
+   vkCmdBindPipeline
+   vkCmdSetViewport
+   vkCmdSetScissor
+   vkCmdSetLineWidth
+   vkCmdSetDepthBias
+   vkCmdSetBlendConstants
+   vkCmdSetDepthBounds
+   vkCmdSetStencilCompareMask
+   vkCmdSetStencilWriteMask
+   vkCmdSetStencilReference
+   vkCmdBindDescriptorSets
+   vkCmdBindIndexBuffer
+   vkCmdBindVertexBuffers
+   vkCmdDraw
+   vkCmdDrawIndexed
+   vkCmdDrawIndirect
+   vkCmdDrawIndexedIndirect
+   vkCmdDispatch
+   vkCmdDispatchIndirect
+   vkCmdCopyBuffer
+   vkCmdCopyImage
+   vkCmdBlitImage
+   vkCmdCopyBufferToImage
+   vkCmdCopyImageToBuffer
+   vkCmdUpdateBuffer
+   vkCmdFillBuffer
+   vkCmdClearColorImage
+   vkCmdClearDepthStencilImage
+   vkCmdClearAttachments
+   vkCmdResolveImage
+   vkCmdSetEvent
+   vkCmdResetEvent
+   vkCmdWaitEvents
+   vkCmdPipelineBarrier
+   vkCmdBeginQuery
+   vkCmdEndQuery
+   vkCmdResetQueryPool
+   vkCmdWriteTimestamp
+   vkCmdCopyQueryPoolResults
+   vkCmdPushConstants
+   vkCmdBeginRenderPass
+   vkCmdNextSubpass
+   vkCmdEndRenderPass
+   vkCmdExecuteCommands
+   vkDestroySurfaceKHR
+   vkGetPhysicalDeviceSurfaceSupportKHR
+   vkGetPhysicalDeviceSurfaceCapabilitiesKHR
+   vkGetPhysicalDeviceSurfaceFormatsKHR
+   vkGetPhysicalDeviceSurfacePresentModesKHR
+   vkCreateSwapchainKHR
+   vkDestroySwapchainKHR
+   vkGetSwapchainImagesKHR
+   vkAcquireNextImageKHR
+   vkQueuePresentKHR
+   vkGetPhysicalDeviceDisplayPropertiesKHR
+   vkGetPhysicalDeviceDisplayPlanePropertiesKHR
+   vkGetDisplayPlaneSupportedDisplaysKHR
+   vkGetDisplayModePropertiesKHR
+   vkCreateDisplayModeKHR
+   vkGetDisplayPlaneCapabilitiesKHR
+   vkCreateDisplayPlaneSurfaceKHR
+   vkCreateSharedSwapchainsKHR
+   vkCreateWin32SurfaceKHR
+   vkGetPhysicalDeviceWin32PresentationSupportKHR
+
+   vkEnumerateInstanceVersion
+   vkEnumeratePhysicalDeviceGroups
+   vkGetPhysicalDeviceFeatures2
+   vkGetPhysicalDeviceProperties2
+   vkGetPhysicalDeviceFormatProperties2
+   vkGetPhysicalDeviceQueueFamilyProperties2
+   vkGetPhysicalDeviceMemoryProperties2
+   vkGetPhysicalDeviceSparseImageFormatProperties2
+   vkGetPhysicalDeviceExternalBufferProperties
+   vkGetPhysicalDeviceExternalSemaphoreProperties
+   vkGetPhysicalDeviceExternalFenceProperties
+   vkBindBufferMemory2
+   vkBindImageMemory2
+   vkGetDeviceGroupPeerMemoryFeatures
+   vkCmdSetDeviceMask
+   vkCmdDispatchBase
+   vkGetImageMemoryRequirements2
+   vkGetBufferMemoryRequirements2
+   vkTrimCommandPool
+   vkGetDeviceQueue2
+   vkCreateSamplerYcbcrConversion
+   vkDestroySamplerYcbcrConversion
+   vkGetDescriptorSetLayoutSupport
+   vkGetDeviceGroupPresentCapabilitiesKHR
+   vkGetDeviceGroupSurfacePresentModesKHR
+   vkGetPhysicalDevicePresentRectanglesKHR
+   vkAcquireNextImage2KHR
+   vkCreateDescriptorUpdateTemplate
+   vkDestroyDescriptorUpdateTemplate
+   vkUpdateDescriptorSetWithTemplate
+
+   vkGetPhysicalDeviceDisplayProperties2KHR
+   vkGetPhysicalDeviceDisplayPlaneProperties2KHR
+   vkGetDisplayModeProperties2KHR
+   vkGetDisplayPlaneCapabilities2KHR
+
+   vkGetImageSparseMemoryRequirements2
+   vkGetPhysicalDeviceImageFormatProperties2
diff --git a/src/third_party/vulkan-loader/src/loader/vulkan.pc.in b/src/third_party/vulkan-loader/src/loader/vulkan.pc.in
new file mode 100644
index 0000000..2ce5aea
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/loader/vulkan.pc.in
@@ -0,0 +1,12 @@
+prefix=@CMAKE_INSTALL_PREFIX@
+exec_prefix=@CMAKE_INSTALL_PREFIX@
+libdir=${exec_prefix}/@CMAKE_INSTALL_LIBDIR@
+includedir=${prefix}/include
+
+Name: @CMAKE_PROJECT_NAME@
+Description: Vulkan Loader
+Version: @VK_API_VERSION@
+Libs: -L${libdir} -lvulkan
+Libs.private: @PRIVATE_LIBS@
+Cflags: -I${includedir}
+
diff --git a/src/third_party/vulkan-loader/src/loader/wsi.c b/src/third_party/vulkan-loader/src/loader/wsi.c
new file mode 100644
index 0000000..f8d8e53
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/loader/wsi.c
@@ -0,0 +1,2023 @@
+/*
+ * Copyright (c) 2015-2016, 2019 The Khronos Group Inc.
+ * Copyright (c) 2015-2016, 2019 Valve Corporation
+ * Copyright (c) 2015-2016, 2019 LunarG, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Ian Elliott <ian@lunarg.com>
+ * Author: Jon Ashburn <jon@lunarg.com>
+ * Author: Ian Elliott <ianelliott@google.com>
+ * Author: Mark Lobodzinski <mark@lunarg.com>
+ * Author: Lenny Komow <lenny@lunarg.com>
+ */
+
+#ifndef _GNU_SOURCE
+#define _GNU_SOURCE
+#endif
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+#include "vk_loader_platform.h"
+#include "loader.h"
+#include "wsi.h"
+#include <vulkan/vk_icd.h>
+
+// The first ICD/Loader interface that support querying the SurfaceKHR from
+// the ICDs.
+#define ICD_VER_SUPPORTS_ICD_SURFACE_KHR 3
+
+void wsi_create_instance(struct loader_instance *ptr_instance, const VkInstanceCreateInfo *pCreateInfo) {
+    ptr_instance->wsi_surface_enabled = false;
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    ptr_instance->wsi_win32_surface_enabled = false;
+#endif  // VK_USE_PLATFORM_WIN32_KHR
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+    ptr_instance->wsi_wayland_surface_enabled = false;
+#endif  // VK_USE_PLATFORM_WAYLAND_KHR
+#ifdef VK_USE_PLATFORM_XCB_KHR
+    ptr_instance->wsi_xcb_surface_enabled = false;
+#endif  // VK_USE_PLATFORM_XCB_KHR
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+    ptr_instance->wsi_xlib_surface_enabled = false;
+#endif  // VK_USE_PLATFORM_XLIB_KHR
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+    ptr_instance->wsi_android_surface_enabled = false;
+#endif  // VK_USE_PLATFORM_ANDROID_KHR
+#ifdef VK_USE_PLATFORM_MACOS_MVK
+    ptr_instance->wsi_macos_surface_enabled = false;
+#endif  // VK_USE_PLATFORM_MACOS_MVK
+#ifdef VK_USE_PLATFORM_IOS_MVK
+    ptr_instance->wsi_ios_surface_enabled = false;
+#endif  // VK_USE_PLATFORM_IOS_MVK
+    ptr_instance->wsi_display_enabled = false;
+    ptr_instance->wsi_display_props2_enabled = false;
+#ifdef VK_USE_PLATFORM_METAL_EXT
+    ptr_instance->wsi_metal_surface_enabled = false;
+#endif // VK_USE_PLATFORM_METAL_EXT
+
+    for (uint32_t i = 0; i < pCreateInfo->enabledExtensionCount; i++) {
+        if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_SURFACE_EXTENSION_NAME) == 0) {
+            ptr_instance->wsi_surface_enabled = true;
+            continue;
+        }
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+        if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_WIN32_SURFACE_EXTENSION_NAME) == 0) {
+            ptr_instance->wsi_win32_surface_enabled = true;
+            continue;
+        }
+#endif  // VK_USE_PLATFORM_WIN32_KHR
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+        if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME) == 0) {
+            ptr_instance->wsi_wayland_surface_enabled = true;
+            continue;
+        }
+#endif  // VK_USE_PLATFORM_WAYLAND_KHR
+#ifdef VK_USE_PLATFORM_XCB_KHR
+        if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_XCB_SURFACE_EXTENSION_NAME) == 0) {
+            ptr_instance->wsi_xcb_surface_enabled = true;
+            continue;
+        }
+#endif  // VK_USE_PLATFORM_XCB_KHR
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+        if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_XLIB_SURFACE_EXTENSION_NAME) == 0) {
+            ptr_instance->wsi_xlib_surface_enabled = true;
+            continue;
+        }
+#endif  // VK_USE_PLATFORM_XLIB_KHR
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+        if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_ANDROID_SURFACE_EXTENSION_NAME) == 0) {
+            ptr_instance->wsi_android_surface_enabled = true;
+            continue;
+        }
+#endif  // VK_USE_PLATFORM_ANDROID_KHR
+#ifdef VK_USE_PLATFORM_MACOS_MVK
+        if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_MVK_MACOS_SURFACE_EXTENSION_NAME) == 0) {
+            ptr_instance->wsi_macos_surface_enabled = true;
+            continue;
+        }
+#endif  // VK_USE_PLATFORM_MACOS_MVK
+#ifdef VK_USE_PLATFORM_IOS_MVK
+        if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_MVK_IOS_SURFACE_EXTENSION_NAME) == 0) {
+            ptr_instance->wsi_ios_surface_enabled = true;
+            continue;
+        }
+#endif  // VK_USE_PLATFORM_IOS_MVK
+        if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_EXT_HEADLESS_SURFACE_EXTENSION_NAME) == 0) {
+            ptr_instance->wsi_headless_surface_enabled = true;
+            continue;
+        }
+#if defined(VK_USE_PLATFORM_METAL_EXT)
+        if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_EXT_METAL_SURFACE_EXTENSION_NAME) == 0) {
+            ptr_instance->wsi_metal_surface_enabled = true;
+            continue;
+        }
+#endif
+        if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_DISPLAY_EXTENSION_NAME) == 0) {
+            ptr_instance->wsi_display_enabled = true;
+            continue;
+        }
+        if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_GET_DISPLAY_PROPERTIES_2_EXTENSION_NAME) == 0) {
+            ptr_instance->wsi_display_props2_enabled = true;
+            continue;
+        }
+    }
+}
+
+// Linux WSI surface extensions are not always compiled into the loader. (Assume
+// for Windows the KHR_win32_surface is always compiled into loader). A given
+// Linux build environment might not have the headers required for building one
+// of the three extensions  (Xlib, Xcb, Wayland).  Thus, need to check if
+// the built loader actually supports the particular Linux surface extension.
+// If not supported by the built loader it will not be included in the list of
+// enumerated instance extensions.  This solves the issue where an ICD or layer
+// advertises support for a given Linux surface extension but the loader was not
+// built to support the extension.
+bool wsi_unsupported_instance_extension(const VkExtensionProperties *ext_prop) {
+#ifndef VK_USE_PLATFORM_WAYLAND_KHR
+    if (!strcmp(ext_prop->extensionName, "VK_KHR_wayland_surface")) return true;
+#endif  // VK_USE_PLATFORM_WAYLAND_KHR
+#ifndef VK_USE_PLATFORM_XCB_KHR
+    if (!strcmp(ext_prop->extensionName, "VK_KHR_xcb_surface")) return true;
+#endif  // VK_USE_PLATFORM_XCB_KHR
+#ifndef VK_USE_PLATFORM_XLIB_KHR
+    if (!strcmp(ext_prop->extensionName, "VK_KHR_xlib_surface")) return true;
+#endif  // VK_USE_PLATFORM_XLIB_KHR
+
+    return false;
+}
+
+// Functions for the VK_KHR_surface extension:
+
+// This is the trampoline entrypoint for DestroySurfaceKHR
+LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkDestroySurfaceKHR(VkInstance instance, VkSurfaceKHR surface,
+                                                             const VkAllocationCallbacks *pAllocator) {
+    const VkLayerInstanceDispatchTable *disp;
+    disp = loader_get_instance_layer_dispatch(instance);
+    disp->DestroySurfaceKHR(instance, surface, pAllocator);
+}
+
+// TODO probably need to lock around all the loader_get_instance() calls.
+
+// This is the instance chain terminator function for DestroySurfaceKHR
+VKAPI_ATTR void VKAPI_CALL terminator_DestroySurfaceKHR(VkInstance instance, VkSurfaceKHR surface,
+                                                        const VkAllocationCallbacks *pAllocator) {
+    struct loader_instance *ptr_instance = loader_get_instance(instance);
+
+    VkIcdSurface *icd_surface = (VkIcdSurface *)(uintptr_t)surface;
+    if (NULL != icd_surface) {
+        if (NULL != icd_surface->real_icd_surfaces) {
+            uint32_t i = 0;
+            for (struct loader_icd_term *icd_term = ptr_instance->icd_terms; icd_term != NULL; icd_term = icd_term->next, i++) {
+                if (icd_term->scanned_icd->interface_version >= ICD_VER_SUPPORTS_ICD_SURFACE_KHR) {
+                    if (NULL != icd_term->dispatch.DestroySurfaceKHR && (VkSurfaceKHR)NULL != icd_surface->real_icd_surfaces[i]) {
+                        icd_term->dispatch.DestroySurfaceKHR(icd_term->instance, icd_surface->real_icd_surfaces[i], pAllocator);
+                        icd_surface->real_icd_surfaces[i] = (VkSurfaceKHR)NULL;
+                    }
+                } else {
+                    // The real_icd_surface for any ICD not supporting the
+                    // proper interface version should be NULL.  If not, then
+                    // we have a problem.
+                    assert((VkSurfaceKHR)NULL == icd_surface->real_icd_surfaces[i]);
+                }
+            }
+            loader_instance_heap_free(ptr_instance, icd_surface->real_icd_surfaces);
+        }
+
+        loader_instance_heap_free(ptr_instance, (void *)(uintptr_t)surface);
+    }
+}
+
+// This is the trampoline entrypoint for GetPhysicalDeviceSurfaceSupportKHR
+LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceSupportKHR(VkPhysicalDevice physicalDevice,
+                                                                                  uint32_t queueFamilyIndex, VkSurfaceKHR surface,
+                                                                                  VkBool32 *pSupported) {
+    const VkLayerInstanceDispatchTable *disp;
+    VkPhysicalDevice unwrapped_phys_dev = loader_unwrap_physical_device(physicalDevice);
+    disp = loader_get_instance_layer_dispatch(physicalDevice);
+    VkResult res = disp->GetPhysicalDeviceSurfaceSupportKHR(unwrapped_phys_dev, queueFamilyIndex, surface, pSupported);
+    return res;
+}
+
+// This is the instance chain terminator function for
+// GetPhysicalDeviceSurfaceSupportKHR
+VKAPI_ATTR VkResult VKAPI_CALL terminator_GetPhysicalDeviceSurfaceSupportKHR(VkPhysicalDevice physicalDevice,
+                                                                             uint32_t queueFamilyIndex, VkSurfaceKHR surface,
+                                                                             VkBool32 *pSupported) {
+    // First, check to ensure the appropriate extension was enabled:
+    struct loader_physical_device_term *phys_dev_term = (struct loader_physical_device_term *)physicalDevice;
+    struct loader_icd_term *icd_term = phys_dev_term->this_icd_term;
+    struct loader_instance *ptr_instance = (struct loader_instance *)icd_term->this_instance;
+    if (!ptr_instance->wsi_surface_enabled) {
+        loader_log(ptr_instance, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                   "VK_KHR_surface extension not enabled.  vkGetPhysicalDeviceSurfaceSupportKHR not executed!\n");
+        return VK_SUCCESS;
+    }
+
+    if (NULL == pSupported) {
+        loader_log(ptr_instance, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                   "NULL pointer passed into vkGetPhysicalDeviceSurfaceSupportKHR for pSupported!\n");
+        assert(false && "GetPhysicalDeviceSurfaceSupportKHR: Error, null pSupported");
+    }
+    *pSupported = false;
+
+    if (NULL == icd_term->dispatch.GetPhysicalDeviceSurfaceSupportKHR) {
+        loader_log(ptr_instance, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                   "ICD for selected physical device is not exporting vkGetPhysicalDeviceSurfaceSupportKHR!\n");
+        assert(false && "loader: null GetPhysicalDeviceSurfaceSupportKHR ICD pointer");
+    }
+
+    VkIcdSurface *icd_surface = (VkIcdSurface *)(uintptr_t)surface;
+    if (NULL != icd_surface->real_icd_surfaces && (VkSurfaceKHR)NULL != icd_surface->real_icd_surfaces[phys_dev_term->icd_index]) {
+        return icd_term->dispatch.GetPhysicalDeviceSurfaceSupportKHR(
+            phys_dev_term->phys_dev, queueFamilyIndex, icd_surface->real_icd_surfaces[phys_dev_term->icd_index], pSupported);
+    }
+
+    return icd_term->dispatch.GetPhysicalDeviceSurfaceSupportKHR(phys_dev_term->phys_dev, queueFamilyIndex, surface, pSupported);
+}
+
+// This is the trampoline entrypoint for GetPhysicalDeviceSurfaceCapabilitiesKHR
+LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceCapabilitiesKHR(
+    VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, VkSurfaceCapabilitiesKHR *pSurfaceCapabilities) {
+    const VkLayerInstanceDispatchTable *disp;
+    VkPhysicalDevice unwrapped_phys_dev = loader_unwrap_physical_device(physicalDevice);
+    disp = loader_get_instance_layer_dispatch(physicalDevice);
+    VkResult res = disp->GetPhysicalDeviceSurfaceCapabilitiesKHR(unwrapped_phys_dev, surface, pSurfaceCapabilities);
+    return res;
+}
+
+// This is the instance chain terminator function for
+// GetPhysicalDeviceSurfaceCapabilitiesKHR
+VKAPI_ATTR VkResult VKAPI_CALL terminator_GetPhysicalDeviceSurfaceCapabilitiesKHR(VkPhysicalDevice physicalDevice,
+                                                                                  VkSurfaceKHR surface,
+                                                                                  VkSurfaceCapabilitiesKHR *pSurfaceCapabilities) {
+    // First, check to ensure the appropriate extension was enabled:
+    struct loader_physical_device_term *phys_dev_term = (struct loader_physical_device_term *)physicalDevice;
+    struct loader_icd_term *icd_term = phys_dev_term->this_icd_term;
+    struct loader_instance *ptr_instance = (struct loader_instance *)icd_term->this_instance;
+    if (!ptr_instance->wsi_surface_enabled) {
+        loader_log(ptr_instance, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                   "VK_KHR_surface extension not enabled.  vkGetPhysicalDeviceSurfaceCapabilitiesKHR not executed!\n");
+        return VK_SUCCESS;
+    }
+
+    if (NULL == pSurfaceCapabilities) {
+        loader_log(ptr_instance, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                   "NULL pointer passed into vkGetPhysicalDeviceSurfaceCapabilitiesKHR for pSurfaceCapabilities!\n");
+        assert(false && "GetPhysicalDeviceSurfaceCapabilitiesKHR: Error, null pSurfaceCapabilities");
+    }
+
+    if (NULL == icd_term->dispatch.GetPhysicalDeviceSurfaceCapabilitiesKHR) {
+        loader_log(ptr_instance, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                   "ICD for selected physical device is not exporting vkGetPhysicalDeviceSurfaceCapabilitiesKHR!\n");
+        assert(false && "loader: null GetPhysicalDeviceSurfaceCapabilitiesKHR ICD pointer");
+    }
+
+    VkIcdSurface *icd_surface = (VkIcdSurface *)(uintptr_t)surface;
+    if (NULL != icd_surface->real_icd_surfaces && (VkSurfaceKHR)NULL != icd_surface->real_icd_surfaces[phys_dev_term->icd_index]) {
+        return icd_term->dispatch.GetPhysicalDeviceSurfaceCapabilitiesKHR(
+            phys_dev_term->phys_dev, icd_surface->real_icd_surfaces[phys_dev_term->icd_index], pSurfaceCapabilities);
+    }
+
+    return icd_term->dispatch.GetPhysicalDeviceSurfaceCapabilitiesKHR(phys_dev_term->phys_dev, surface, pSurfaceCapabilities);
+}
+
+// This is the trampoline entrypoint for GetPhysicalDeviceSurfaceFormatsKHR
+LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceFormatsKHR(VkPhysicalDevice physicalDevice,
+                                                                                  VkSurfaceKHR surface,
+                                                                                  uint32_t *pSurfaceFormatCount,
+                                                                                  VkSurfaceFormatKHR *pSurfaceFormats) {
+    VkPhysicalDevice unwrapped_phys_dev = loader_unwrap_physical_device(physicalDevice);
+    const VkLayerInstanceDispatchTable *disp;
+    disp = loader_get_instance_layer_dispatch(physicalDevice);
+    VkResult res = disp->GetPhysicalDeviceSurfaceFormatsKHR(unwrapped_phys_dev, surface, pSurfaceFormatCount, pSurfaceFormats);
+    return res;
+}
+
+// This is the instance chain terminator function for
+// GetPhysicalDeviceSurfaceFormatsKHR
+VKAPI_ATTR VkResult VKAPI_CALL terminator_GetPhysicalDeviceSurfaceFormatsKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface,
+                                                                             uint32_t *pSurfaceFormatCount,
+                                                                             VkSurfaceFormatKHR *pSurfaceFormats) {
+    // First, check to ensure the appropriate extension was enabled:
+    struct loader_physical_device_term *phys_dev_term = (struct loader_physical_device_term *)physicalDevice;
+    struct loader_icd_term *icd_term = phys_dev_term->this_icd_term;
+    struct loader_instance *ptr_instance = (struct loader_instance *)icd_term->this_instance;
+    if (!ptr_instance->wsi_surface_enabled) {
+        loader_log(ptr_instance, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                   "VK_KHR_surface extension not enabled.  vkGetPhysicalDeviceSurfaceFormatsKHR not executed!\n");
+        return VK_SUCCESS;
+    }
+
+    if (NULL == pSurfaceFormatCount) {
+        loader_log(ptr_instance, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                   "NULL pointer passed into vkGetPhysicalDeviceSurfaceFormatsKHR for pSurfaceFormatCount!\n");
+        assert(false && "GetPhysicalDeviceSurfaceFormatsKHR(: Error, null pSurfaceFormatCount");
+    }
+
+    if (NULL == icd_term->dispatch.GetPhysicalDeviceSurfaceFormatsKHR) {
+        loader_log(ptr_instance, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                   "ICD for selected physical device is not exporting vkGetPhysicalDeviceSurfaceCapabilitiesKHR!\n");
+        assert(false && "loader: null GetPhysicalDeviceSurfaceFormatsKHR ICD pointer");
+    }
+
+    VkIcdSurface *icd_surface = (VkIcdSurface *)(uintptr_t)surface;
+    if (NULL != icd_surface->real_icd_surfaces && (VkSurfaceKHR)NULL != icd_surface->real_icd_surfaces[phys_dev_term->icd_index]) {
+        return icd_term->dispatch.GetPhysicalDeviceSurfaceFormatsKHR(phys_dev_term->phys_dev,
+                                                                     icd_surface->real_icd_surfaces[phys_dev_term->icd_index],
+                                                                     pSurfaceFormatCount, pSurfaceFormats);
+    }
+
+    return icd_term->dispatch.GetPhysicalDeviceSurfaceFormatsKHR(phys_dev_term->phys_dev, surface, pSurfaceFormatCount,
+                                                                 pSurfaceFormats);
+}
+
+// This is the trampoline entrypoint for GetPhysicalDeviceSurfacePresentModesKHR
+LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfacePresentModesKHR(VkPhysicalDevice physicalDevice,
+                                                                                       VkSurfaceKHR surface,
+                                                                                       uint32_t *pPresentModeCount,
+                                                                                       VkPresentModeKHR *pPresentModes) {
+    VkPhysicalDevice unwrapped_phys_dev = loader_unwrap_physical_device(physicalDevice);
+    const VkLayerInstanceDispatchTable *disp;
+    disp = loader_get_instance_layer_dispatch(physicalDevice);
+    VkResult res = disp->GetPhysicalDeviceSurfacePresentModesKHR(unwrapped_phys_dev, surface, pPresentModeCount, pPresentModes);
+    return res;
+}
+
+// This is the instance chain terminator function for
+// GetPhysicalDeviceSurfacePresentModesKHR
+VKAPI_ATTR VkResult VKAPI_CALL terminator_GetPhysicalDeviceSurfacePresentModesKHR(VkPhysicalDevice physicalDevice,
+                                                                                  VkSurfaceKHR surface, uint32_t *pPresentModeCount,
+                                                                                  VkPresentModeKHR *pPresentModes) {
+    // First, check to ensure the appropriate extension was enabled:
+    struct loader_physical_device_term *phys_dev_term = (struct loader_physical_device_term *)physicalDevice;
+    struct loader_icd_term *icd_term = phys_dev_term->this_icd_term;
+    struct loader_instance *ptr_instance = (struct loader_instance *)icd_term->this_instance;
+    if (!ptr_instance->wsi_surface_enabled) {
+        loader_log(ptr_instance, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                   "VK_KHR_surface extension not enabled.  vkGetPhysicalDeviceSurfacePresentModesKHR not executed!\n");
+        return VK_SUCCESS;
+    }
+
+    if (NULL == pPresentModeCount) {
+        loader_log(ptr_instance, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                   "NULL pointer passed into vkGetPhysicalDeviceSurfacePresentModesKHR for pPresentModeCount!\n");
+        assert(false && "GetPhysicalDeviceSurfacePresentModesKHR(: Error, null pPresentModeCount");
+    }
+
+    if (NULL == icd_term->dispatch.GetPhysicalDeviceSurfacePresentModesKHR) {
+        loader_log(ptr_instance, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                   "ICD for selected physical device is not exporting vkGetPhysicalDeviceSurfacePresentModesKHR!\n");
+        assert(false && "loader: null GetPhysicalDeviceSurfacePresentModesKHR ICD pointer");
+    }
+
+    VkIcdSurface *icd_surface = (VkIcdSurface *)(uintptr_t)surface;
+    if (NULL != icd_surface->real_icd_surfaces && (VkSurfaceKHR)NULL != icd_surface->real_icd_surfaces[phys_dev_term->icd_index]) {
+        return icd_term->dispatch.GetPhysicalDeviceSurfacePresentModesKHR(
+            phys_dev_term->phys_dev, icd_surface->real_icd_surfaces[phys_dev_term->icd_index], pPresentModeCount, pPresentModes);
+    }
+
+    return icd_term->dispatch.GetPhysicalDeviceSurfacePresentModesKHR(phys_dev_term->phys_dev, surface, pPresentModeCount,
+                                                                      pPresentModes);
+}
+
+// Functions for the VK_KHR_swapchain extension:
+
+// This is the trampoline entrypoint for CreateSwapchainKHR
+LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateSwapchainKHR(VkDevice device, const VkSwapchainCreateInfoKHR *pCreateInfo,
+                                                                  const VkAllocationCallbacks *pAllocator,
+                                                                  VkSwapchainKHR *pSwapchain) {
+    const VkLayerDispatchTable *disp;
+    disp = loader_get_dispatch(device);
+    return disp->CreateSwapchainKHR(device, pCreateInfo, pAllocator, pSwapchain);
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL terminator_CreateSwapchainKHR(VkDevice device, const VkSwapchainCreateInfoKHR *pCreateInfo,
+                                                             const VkAllocationCallbacks *pAllocator, VkSwapchainKHR *pSwapchain) {
+    uint32_t icd_index = 0;
+    struct loader_device *dev;
+    struct loader_icd_term *icd_term = loader_get_icd_and_device(device, &dev, &icd_index);
+    if (NULL != icd_term && NULL != icd_term->dispatch.CreateSwapchainKHR) {
+        VkIcdSurface *icd_surface = (VkIcdSurface *)(uintptr_t)pCreateInfo->surface;
+        if (NULL != icd_surface->real_icd_surfaces) {
+            if ((VkSurfaceKHR)NULL != icd_surface->real_icd_surfaces[icd_index]) {
+                // We found the ICD, and there is an ICD KHR surface
+                // associated with it, so copy the CreateInfo struct
+                // and point it at the ICD's surface.
+                VkSwapchainCreateInfoKHR *pCreateCopy = loader_stack_alloc(sizeof(VkSwapchainCreateInfoKHR));
+                if (NULL == pCreateCopy) {
+                    return VK_ERROR_OUT_OF_HOST_MEMORY;
+                }
+                memcpy(pCreateCopy, pCreateInfo, sizeof(VkSwapchainCreateInfoKHR));
+                pCreateCopy->surface = icd_surface->real_icd_surfaces[icd_index];
+                return icd_term->dispatch.CreateSwapchainKHR(device, pCreateCopy, pAllocator, pSwapchain);
+            }
+        }
+        return icd_term->dispatch.CreateSwapchainKHR(device, pCreateInfo, pAllocator, pSwapchain);
+    }
+    return VK_SUCCESS;
+}
+
+// This is the trampoline entrypoint for DestroySwapchainKHR
+LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkDestroySwapchainKHR(VkDevice device, VkSwapchainKHR swapchain,
+                                                               const VkAllocationCallbacks *pAllocator) {
+    const VkLayerDispatchTable *disp;
+    disp = loader_get_dispatch(device);
+    disp->DestroySwapchainKHR(device, swapchain, pAllocator);
+}
+
+// This is the trampoline entrypoint for GetSwapchainImagesKHR
+LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkGetSwapchainImagesKHR(VkDevice device, VkSwapchainKHR swapchain,
+                                                                     uint32_t *pSwapchainImageCount, VkImage *pSwapchainImages) {
+    const VkLayerDispatchTable *disp;
+    disp = loader_get_dispatch(device);
+    return disp->GetSwapchainImagesKHR(device, swapchain, pSwapchainImageCount, pSwapchainImages);
+}
+
+// This is the trampoline entrypoint for AcquireNextImageKHR
+LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkAcquireNextImageKHR(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout,
+                                                                   VkSemaphore semaphore, VkFence fence, uint32_t *pImageIndex) {
+    const VkLayerDispatchTable *disp;
+    disp = loader_get_dispatch(device);
+    return disp->AcquireNextImageKHR(device, swapchain, timeout, semaphore, fence, pImageIndex);
+}
+
+// This is the trampoline entrypoint for QueuePresentKHR
+LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkQueuePresentKHR(VkQueue queue, const VkPresentInfoKHR *pPresentInfo) {
+    const VkLayerDispatchTable *disp;
+    disp = loader_get_dispatch(queue);
+    return disp->QueuePresentKHR(queue, pPresentInfo);
+}
+
+static VkIcdSurface *AllocateIcdSurfaceStruct(struct loader_instance *instance, size_t base_size, size_t platform_size) {
+    // Next, if so, proceed with the implementation of this function:
+    VkIcdSurface *pIcdSurface = loader_instance_heap_alloc(instance, sizeof(VkIcdSurface), VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
+    if (pIcdSurface != NULL) {
+        // Setup the new sizes and offsets so we can grow the structures in the
+        // future without having problems
+        pIcdSurface->base_size = (uint32_t)base_size;
+        pIcdSurface->platform_size = (uint32_t)platform_size;
+        pIcdSurface->non_platform_offset = (uint32_t)((uint8_t *)(&pIcdSurface->base_size) - (uint8_t *)pIcdSurface);
+        pIcdSurface->entire_size = sizeof(VkIcdSurface);
+
+        pIcdSurface->real_icd_surfaces = loader_instance_heap_alloc(instance, sizeof(VkSurfaceKHR) * instance->total_icd_count,
+                                                                    VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
+        if (pIcdSurface->real_icd_surfaces == NULL) {
+            loader_instance_heap_free(instance, pIcdSurface);
+            pIcdSurface = NULL;
+        } else {
+            memset(pIcdSurface->real_icd_surfaces, 0, sizeof(VkSurfaceKHR) * instance->total_icd_count);
+        }
+    }
+    return pIcdSurface;
+}
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+// Functions for the VK_KHR_win32_surface extension:
+
+// This is the trampoline entrypoint for CreateWin32SurfaceKHR
+LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateWin32SurfaceKHR(VkInstance instance,
+                                                                     const VkWin32SurfaceCreateInfoKHR *pCreateInfo,
+                                                                     const VkAllocationCallbacks *pAllocator,
+                                                                     VkSurfaceKHR *pSurface) {
+    const VkLayerInstanceDispatchTable *disp;
+    disp = loader_get_instance_layer_dispatch(instance);
+    VkResult res;
+
+    res = disp->CreateWin32SurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
+    return res;
+}
+
+// This is the instance chain terminator function for CreateWin32SurfaceKHR
+VKAPI_ATTR VkResult VKAPI_CALL terminator_CreateWin32SurfaceKHR(VkInstance instance, const VkWin32SurfaceCreateInfoKHR *pCreateInfo,
+                                                                const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface) {
+    VkResult vkRes = VK_SUCCESS;
+    VkIcdSurface *pIcdSurface = NULL;
+    uint32_t i = 0;
+
+    // Initialize pSurface to NULL just to be safe.
+    *pSurface = VK_NULL_HANDLE;
+    // First, check to ensure the appropriate extension was enabled:
+    struct loader_instance *ptr_instance = loader_get_instance(instance);
+    if (!ptr_instance->wsi_win32_surface_enabled) {
+        loader_log(ptr_instance, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                   "VK_KHR_win32_surface extension not enabled.  vkCreateWin32SurfaceKHR not executed!\n");
+        vkRes = VK_ERROR_EXTENSION_NOT_PRESENT;
+        goto out;
+    }
+
+    // Next, if so, proceed with the implementation of this function:
+    pIcdSurface = AllocateIcdSurfaceStruct(ptr_instance, sizeof(pIcdSurface->win_surf.base), sizeof(pIcdSurface->win_surf));
+    if (pIcdSurface == NULL) {
+        vkRes = VK_ERROR_OUT_OF_HOST_MEMORY;
+        goto out;
+    }
+
+    pIcdSurface->win_surf.base.platform = VK_ICD_WSI_PLATFORM_WIN32;
+    pIcdSurface->win_surf.hinstance = pCreateInfo->hinstance;
+    pIcdSurface->win_surf.hwnd = pCreateInfo->hwnd;
+
+    // Loop through each ICD and determine if they need to create a surface
+    for (struct loader_icd_term *icd_term = ptr_instance->icd_terms; icd_term != NULL; icd_term = icd_term->next, i++) {
+        if (icd_term->scanned_icd->interface_version >= ICD_VER_SUPPORTS_ICD_SURFACE_KHR) {
+            if (NULL != icd_term->dispatch.CreateWin32SurfaceKHR) {
+                vkRes = icd_term->dispatch.CreateWin32SurfaceKHR(icd_term->instance, pCreateInfo, pAllocator,
+                                                                 &pIcdSurface->real_icd_surfaces[i]);
+                if (VK_SUCCESS != vkRes) {
+                    goto out;
+                }
+            }
+        }
+    }
+
+    *pSurface = (VkSurfaceKHR)(pIcdSurface);
+
+out:
+
+    if (VK_SUCCESS != vkRes && NULL != pIcdSurface) {
+        if (NULL != pIcdSurface->real_icd_surfaces) {
+            i = 0;
+            for (struct loader_icd_term *icd_term = ptr_instance->icd_terms; icd_term != NULL; icd_term = icd_term->next, i++) {
+                if ((VkSurfaceKHR)NULL != pIcdSurface->real_icd_surfaces[i] && NULL != icd_term->dispatch.DestroySurfaceKHR) {
+                    icd_term->dispatch.DestroySurfaceKHR(icd_term->instance, pIcdSurface->real_icd_surfaces[i], pAllocator);
+                }
+            }
+            loader_instance_heap_free(ptr_instance, pIcdSurface->real_icd_surfaces);
+        }
+        loader_instance_heap_free(ptr_instance, pIcdSurface);
+    }
+
+    return vkRes;
+}
+
+// This is the trampoline entrypoint for
+// GetPhysicalDeviceWin32PresentationSupportKHR
+LOADER_EXPORT VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceWin32PresentationSupportKHR(VkPhysicalDevice physicalDevice,
+                                                                                            uint32_t queueFamilyIndex) {
+    VkPhysicalDevice unwrapped_phys_dev = loader_unwrap_physical_device(physicalDevice);
+    const VkLayerInstanceDispatchTable *disp;
+    disp = loader_get_instance_layer_dispatch(physicalDevice);
+    VkBool32 res = disp->GetPhysicalDeviceWin32PresentationSupportKHR(unwrapped_phys_dev, queueFamilyIndex);
+    return res;
+}
+
+// This is the instance chain terminator function for
+// GetPhysicalDeviceWin32PresentationSupportKHR
+VKAPI_ATTR VkBool32 VKAPI_CALL terminator_GetPhysicalDeviceWin32PresentationSupportKHR(VkPhysicalDevice physicalDevice,
+                                                                                       uint32_t queueFamilyIndex) {
+    // First, check to ensure the appropriate extension was enabled:
+    struct loader_physical_device_term *phys_dev_term = (struct loader_physical_device_term *)physicalDevice;
+    struct loader_icd_term *icd_term = phys_dev_term->this_icd_term;
+    struct loader_instance *ptr_instance = (struct loader_instance *)icd_term->this_instance;
+    if (!ptr_instance->wsi_win32_surface_enabled) {
+        loader_log(ptr_instance, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                   "VK_KHR_win32_surface extension not enabled.  vkGetPhysicalDeviceWin32PresentationSupportKHR not executed!\n");
+        return VK_SUCCESS;
+    }
+
+    if (NULL == icd_term->dispatch.GetPhysicalDeviceWin32PresentationSupportKHR) {
+        loader_log(ptr_instance, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                   "ICD for selected physical device is not exporting vkGetPhysicalDeviceWin32PresentationSupportKHR!\n");
+        assert(false && "loader: null GetPhysicalDeviceWin32PresentationSupportKHR ICD pointer");
+    }
+
+    return icd_term->dispatch.GetPhysicalDeviceWin32PresentationSupportKHR(phys_dev_term->phys_dev, queueFamilyIndex);
+}
+#endif  // VK_USE_PLATFORM_WIN32_KHR
+
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+
+// This is the trampoline entrypoint for CreateWaylandSurfaceKHR
+LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateWaylandSurfaceKHR(VkInstance instance,
+                                                                       const VkWaylandSurfaceCreateInfoKHR *pCreateInfo,
+                                                                       const VkAllocationCallbacks *pAllocator,
+                                                                       VkSurfaceKHR *pSurface) {
+    const VkLayerInstanceDispatchTable *disp;
+    disp = loader_get_instance_layer_dispatch(instance);
+    VkResult res;
+
+    res = disp->CreateWaylandSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
+    return res;
+}
+
+// This is the instance chain terminator function for CreateWaylandSurfaceKHR
+VKAPI_ATTR VkResult VKAPI_CALL terminator_CreateWaylandSurfaceKHR(VkInstance instance,
+                                                                  const VkWaylandSurfaceCreateInfoKHR *pCreateInfo,
+                                                                  const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface) {
+    VkResult vkRes = VK_SUCCESS;
+    VkIcdSurface *pIcdSurface = NULL;
+    uint32_t i = 0;
+
+    // First, check to ensure the appropriate extension was enabled:
+    struct loader_instance *ptr_instance = loader_get_instance(instance);
+    if (!ptr_instance->wsi_wayland_surface_enabled) {
+        loader_log(ptr_instance, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                   "VK_KHR_wayland_surface extension not enabled.  vkCreateWaylandSurfaceKHR not executed!\n");
+        vkRes = VK_ERROR_EXTENSION_NOT_PRESENT;
+        goto out;
+    }
+
+    // Next, if so, proceed with the implementation of this function:
+    pIcdSurface = AllocateIcdSurfaceStruct(ptr_instance, sizeof(pIcdSurface->wayland_surf.base), sizeof(pIcdSurface->wayland_surf));
+    if (pIcdSurface == NULL) {
+        vkRes = VK_ERROR_OUT_OF_HOST_MEMORY;
+        goto out;
+    }
+
+    pIcdSurface->wayland_surf.base.platform = VK_ICD_WSI_PLATFORM_WAYLAND;
+    pIcdSurface->wayland_surf.display = pCreateInfo->display;
+    pIcdSurface->wayland_surf.surface = pCreateInfo->surface;
+
+    // Loop through each ICD and determine if they need to create a surface
+    for (struct loader_icd_term *icd_term = ptr_instance->icd_terms; icd_term != NULL; icd_term = icd_term->next, i++) {
+        if (icd_term->scanned_icd->interface_version >= ICD_VER_SUPPORTS_ICD_SURFACE_KHR) {
+            if (NULL != icd_term->dispatch.CreateWaylandSurfaceKHR) {
+                vkRes = icd_term->dispatch.CreateWaylandSurfaceKHR(icd_term->instance, pCreateInfo, pAllocator,
+                                                                   &pIcdSurface->real_icd_surfaces[i]);
+                if (VK_SUCCESS != vkRes) {
+                    goto out;
+                }
+            }
+        }
+    }
+
+    *pSurface = (VkSurfaceKHR)pIcdSurface;
+
+out:
+
+    if (VK_SUCCESS != vkRes && NULL != pIcdSurface) {
+        if (NULL != pIcdSurface->real_icd_surfaces) {
+            i = 0;
+            for (struct loader_icd_term *icd_term = ptr_instance->icd_terms; icd_term != NULL; icd_term = icd_term->next, i++) {
+                if ((VkSurfaceKHR)NULL != pIcdSurface->real_icd_surfaces[i] && NULL != icd_term->dispatch.DestroySurfaceKHR) {
+                    icd_term->dispatch.DestroySurfaceKHR(icd_term->instance, pIcdSurface->real_icd_surfaces[i], pAllocator);
+                }
+            }
+            loader_instance_heap_free(ptr_instance, pIcdSurface->real_icd_surfaces);
+        }
+        loader_instance_heap_free(ptr_instance, pIcdSurface);
+    }
+
+    return vkRes;
+}
+
+// This is the trampoline entrypoint for
+// GetPhysicalDeviceWaylandPresentationSupportKHR
+LOADER_EXPORT VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceWaylandPresentationSupportKHR(VkPhysicalDevice physicalDevice,
+                                                                                              uint32_t queueFamilyIndex,
+                                                                                              struct wl_display *display) {
+    VkPhysicalDevice unwrapped_phys_dev = loader_unwrap_physical_device(physicalDevice);
+    const VkLayerInstanceDispatchTable *disp;
+    disp = loader_get_instance_layer_dispatch(physicalDevice);
+    VkBool32 res = disp->GetPhysicalDeviceWaylandPresentationSupportKHR(unwrapped_phys_dev, queueFamilyIndex, display);
+    return res;
+}
+
+// This is the instance chain terminator function for
+// GetPhysicalDeviceWaylandPresentationSupportKHR
+VKAPI_ATTR VkBool32 VKAPI_CALL terminator_GetPhysicalDeviceWaylandPresentationSupportKHR(VkPhysicalDevice physicalDevice,
+                                                                                         uint32_t queueFamilyIndex,
+                                                                                         struct wl_display *display) {
+    // First, check to ensure the appropriate extension was enabled:
+    struct loader_physical_device_term *phys_dev_term = (struct loader_physical_device_term *)physicalDevice;
+    struct loader_icd_term *icd_term = phys_dev_term->this_icd_term;
+    struct loader_instance *ptr_instance = (struct loader_instance *)icd_term->this_instance;
+    if (!ptr_instance->wsi_wayland_surface_enabled) {
+        loader_log(
+            ptr_instance, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+            "VK_KHR_wayland_surface extension not enabled.  vkGetPhysicalDeviceWaylandPresentationSupportKHR not executed!\n");
+        return VK_SUCCESS;
+    }
+
+    if (NULL == icd_term->dispatch.GetPhysicalDeviceWaylandPresentationSupportKHR) {
+        loader_log(ptr_instance, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                   "ICD for selected physical device is not exporting vkGetPhysicalDeviceWaylandPresentationSupportKHR!\n");
+        assert(false && "loader: null GetPhysicalDeviceWaylandPresentationSupportKHR ICD pointer");
+    }
+
+    return icd_term->dispatch.GetPhysicalDeviceWaylandPresentationSupportKHR(phys_dev_term->phys_dev, queueFamilyIndex, display);
+}
+#endif  // VK_USE_PLATFORM_WAYLAND_KHR
+
+#ifdef VK_USE_PLATFORM_XCB_KHR
+
+// Functions for the VK_KHR_xcb_surface extension:
+
+// This is the trampoline entrypoint for CreateXcbSurfaceKHR
+LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateXcbSurfaceKHR(VkInstance instance,
+                                                                   const VkXcbSurfaceCreateInfoKHR *pCreateInfo,
+                                                                   const VkAllocationCallbacks *pAllocator,
+                                                                   VkSurfaceKHR *pSurface) {
+    const VkLayerInstanceDispatchTable *disp;
+    disp = loader_get_instance_layer_dispatch(instance);
+    VkResult res;
+
+    res = disp->CreateXcbSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
+    return res;
+}
+
+// This is the instance chain terminator function for CreateXcbSurfaceKHR
+VKAPI_ATTR VkResult VKAPI_CALL terminator_CreateXcbSurfaceKHR(VkInstance instance, const VkXcbSurfaceCreateInfoKHR *pCreateInfo,
+                                                              const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface) {
+    VkResult vkRes = VK_SUCCESS;
+    VkIcdSurface *pIcdSurface = NULL;
+    uint32_t i = 0;
+
+    // First, check to ensure the appropriate extension was enabled:
+    struct loader_instance *ptr_instance = loader_get_instance(instance);
+    if (!ptr_instance->wsi_xcb_surface_enabled) {
+        loader_log(ptr_instance, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                   "VK_KHR_xcb_surface extension not enabled.  vkCreateXcbSurfaceKHR not executed!\n");
+        vkRes = VK_ERROR_EXTENSION_NOT_PRESENT;
+        goto out;
+    }
+
+    // Next, if so, proceed with the implementation of this function:
+    pIcdSurface = AllocateIcdSurfaceStruct(ptr_instance, sizeof(pIcdSurface->xcb_surf.base), sizeof(pIcdSurface->xcb_surf));
+    if (pIcdSurface == NULL) {
+        vkRes = VK_ERROR_OUT_OF_HOST_MEMORY;
+        goto out;
+    }
+
+    pIcdSurface->xcb_surf.base.platform = VK_ICD_WSI_PLATFORM_XCB;
+    pIcdSurface->xcb_surf.connection = pCreateInfo->connection;
+    pIcdSurface->xcb_surf.window = pCreateInfo->window;
+
+    // Loop through each ICD and determine if they need to create a surface
+    for (struct loader_icd_term *icd_term = ptr_instance->icd_terms; icd_term != NULL; icd_term = icd_term->next, i++) {
+        if (icd_term->scanned_icd->interface_version >= ICD_VER_SUPPORTS_ICD_SURFACE_KHR) {
+            if (NULL != icd_term->dispatch.CreateXcbSurfaceKHR) {
+                vkRes = icd_term->dispatch.CreateXcbSurfaceKHR(icd_term->instance, pCreateInfo, pAllocator,
+                                                               &pIcdSurface->real_icd_surfaces[i]);
+                if (VK_SUCCESS != vkRes) {
+                    goto out;
+                }
+            }
+        }
+    }
+
+    *pSurface = (VkSurfaceKHR)pIcdSurface;
+
+out:
+
+    if (VK_SUCCESS != vkRes && NULL != pIcdSurface) {
+        if (NULL != pIcdSurface->real_icd_surfaces) {
+            i = 0;
+            for (struct loader_icd_term *icd_term = ptr_instance->icd_terms; icd_term != NULL; icd_term = icd_term->next, i++) {
+                if ((VkSurfaceKHR)NULL != pIcdSurface->real_icd_surfaces[i] && NULL != icd_term->dispatch.DestroySurfaceKHR) {
+                    icd_term->dispatch.DestroySurfaceKHR(icd_term->instance, pIcdSurface->real_icd_surfaces[i], pAllocator);
+                }
+            }
+            loader_instance_heap_free(ptr_instance, pIcdSurface->real_icd_surfaces);
+        }
+        loader_instance_heap_free(ptr_instance, pIcdSurface);
+    }
+
+    return vkRes;
+}
+
+// This is the trampoline entrypoint for
+// GetPhysicalDeviceXcbPresentationSupportKHR
+LOADER_EXPORT VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceXcbPresentationSupportKHR(VkPhysicalDevice physicalDevice,
+                                                                                          uint32_t queueFamilyIndex,
+                                                                                          xcb_connection_t *connection,
+                                                                                          xcb_visualid_t visual_id) {
+    VkPhysicalDevice unwrapped_phys_dev = loader_unwrap_physical_device(physicalDevice);
+    const VkLayerInstanceDispatchTable *disp;
+    disp = loader_get_instance_layer_dispatch(physicalDevice);
+    VkBool32 res = disp->GetPhysicalDeviceXcbPresentationSupportKHR(unwrapped_phys_dev, queueFamilyIndex, connection, visual_id);
+    return res;
+}
+
+// This is the instance chain terminator function for
+// GetPhysicalDeviceXcbPresentationSupportKHR
+VKAPI_ATTR VkBool32 VKAPI_CALL terminator_GetPhysicalDeviceXcbPresentationSupportKHR(VkPhysicalDevice physicalDevice,
+                                                                                     uint32_t queueFamilyIndex,
+                                                                                     xcb_connection_t *connection,
+                                                                                     xcb_visualid_t visual_id) {
+    // First, check to ensure the appropriate extension was enabled:
+    struct loader_physical_device_term *phys_dev_term = (struct loader_physical_device_term *)physicalDevice;
+    struct loader_icd_term *icd_term = phys_dev_term->this_icd_term;
+    struct loader_instance *ptr_instance = (struct loader_instance *)icd_term->this_instance;
+    if (!ptr_instance->wsi_xcb_surface_enabled) {
+        loader_log(ptr_instance, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                   "VK_KHR_xcb_surface extension not enabled.  vkGetPhysicalDeviceXcbPresentationSupportKHR not executed!\n");
+        return VK_SUCCESS;
+    }
+
+    if (NULL == icd_term->dispatch.GetPhysicalDeviceXcbPresentationSupportKHR) {
+        loader_log(ptr_instance, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                   "ICD for selected physical device is not exporting vkGetPhysicalDeviceXcbPresentationSupportKHR!\n");
+        assert(false && "loader: null GetPhysicalDeviceXcbPresentationSupportKHR ICD pointer");
+    }
+
+    return icd_term->dispatch.GetPhysicalDeviceXcbPresentationSupportKHR(phys_dev_term->phys_dev, queueFamilyIndex, connection,
+                                                                         visual_id);
+}
+#endif  // VK_USE_PLATFORM_XCB_KHR
+
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+
+// Functions for the VK_KHR_xlib_surface extension:
+
+// This is the trampoline entrypoint for CreateXlibSurfaceKHR
+LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateXlibSurfaceKHR(VkInstance instance,
+                                                                    const VkXlibSurfaceCreateInfoKHR *pCreateInfo,
+                                                                    const VkAllocationCallbacks *pAllocator,
+                                                                    VkSurfaceKHR *pSurface) {
+    const VkLayerInstanceDispatchTable *disp;
+    disp = loader_get_instance_layer_dispatch(instance);
+    VkResult res;
+
+    res = disp->CreateXlibSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
+    return res;
+}
+
+// This is the instance chain terminator function for CreateXlibSurfaceKHR
+VKAPI_ATTR VkResult VKAPI_CALL terminator_CreateXlibSurfaceKHR(VkInstance instance, const VkXlibSurfaceCreateInfoKHR *pCreateInfo,
+                                                               const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface) {
+    VkResult vkRes = VK_SUCCESS;
+    VkIcdSurface *pIcdSurface = NULL;
+    uint32_t i = 0;
+
+    // First, check to ensure the appropriate extension was enabled:
+    struct loader_instance *ptr_instance = loader_get_instance(instance);
+    if (!ptr_instance->wsi_xlib_surface_enabled) {
+        loader_log(ptr_instance, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                   "VK_KHR_xlib_surface extension not enabled.  vkCreateXlibSurfaceKHR not executed!\n");
+        vkRes = VK_ERROR_EXTENSION_NOT_PRESENT;
+        goto out;
+    }
+
+    // Next, if so, proceed with the implementation of this function:
+    pIcdSurface = AllocateIcdSurfaceStruct(ptr_instance, sizeof(pIcdSurface->xlib_surf.base), sizeof(pIcdSurface->xlib_surf));
+    if (pIcdSurface == NULL) {
+        vkRes = VK_ERROR_OUT_OF_HOST_MEMORY;
+        goto out;
+    }
+
+    pIcdSurface->xlib_surf.base.platform = VK_ICD_WSI_PLATFORM_XLIB;
+    pIcdSurface->xlib_surf.dpy = pCreateInfo->dpy;
+    pIcdSurface->xlib_surf.window = pCreateInfo->window;
+
+    // Loop through each ICD and determine if they need to create a surface
+    for (struct loader_icd_term *icd_term = ptr_instance->icd_terms; icd_term != NULL; icd_term = icd_term->next, i++) {
+        if (icd_term->scanned_icd->interface_version >= ICD_VER_SUPPORTS_ICD_SURFACE_KHR) {
+            if (NULL != icd_term->dispatch.CreateXlibSurfaceKHR) {
+                vkRes = icd_term->dispatch.CreateXlibSurfaceKHR(icd_term->instance, pCreateInfo, pAllocator,
+                                                                &pIcdSurface->real_icd_surfaces[i]);
+                if (VK_SUCCESS != vkRes) {
+                    goto out;
+                }
+            }
+        }
+    }
+
+    *pSurface = (VkSurfaceKHR)pIcdSurface;
+
+out:
+
+    if (VK_SUCCESS != vkRes && NULL != pIcdSurface) {
+        if (NULL != pIcdSurface->real_icd_surfaces) {
+            i = 0;
+            for (struct loader_icd_term *icd_term = ptr_instance->icd_terms; icd_term != NULL; icd_term = icd_term->next, i++) {
+                if ((VkSurfaceKHR)NULL != pIcdSurface->real_icd_surfaces[i] && NULL != icd_term->dispatch.DestroySurfaceKHR) {
+                    icd_term->dispatch.DestroySurfaceKHR(icd_term->instance, pIcdSurface->real_icd_surfaces[i], pAllocator);
+                }
+            }
+            loader_instance_heap_free(ptr_instance, pIcdSurface->real_icd_surfaces);
+        }
+        loader_instance_heap_free(ptr_instance, pIcdSurface);
+    }
+
+    return vkRes;
+}
+
+// This is the trampoline entrypoint for
+// GetPhysicalDeviceXlibPresentationSupportKHR
+LOADER_EXPORT VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceXlibPresentationSupportKHR(VkPhysicalDevice physicalDevice,
+                                                                                           uint32_t queueFamilyIndex, Display *dpy,
+                                                                                           VisualID visualID) {
+    VkPhysicalDevice unwrapped_phys_dev = loader_unwrap_physical_device(physicalDevice);
+    const VkLayerInstanceDispatchTable *disp;
+    disp = loader_get_instance_layer_dispatch(physicalDevice);
+    VkBool32 res = disp->GetPhysicalDeviceXlibPresentationSupportKHR(unwrapped_phys_dev, queueFamilyIndex, dpy, visualID);
+    return res;
+}
+
+// This is the instance chain terminator function for
+// GetPhysicalDeviceXlibPresentationSupportKHR
+VKAPI_ATTR VkBool32 VKAPI_CALL terminator_GetPhysicalDeviceXlibPresentationSupportKHR(VkPhysicalDevice physicalDevice,
+                                                                                      uint32_t queueFamilyIndex, Display *dpy,
+                                                                                      VisualID visualID) {
+    // First, check to ensure the appropriate extension was enabled:
+    struct loader_physical_device_term *phys_dev_term = (struct loader_physical_device_term *)physicalDevice;
+    struct loader_icd_term *icd_term = phys_dev_term->this_icd_term;
+    struct loader_instance *ptr_instance = (struct loader_instance *)icd_term->this_instance;
+    if (!ptr_instance->wsi_xlib_surface_enabled) {
+        loader_log(ptr_instance, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                   "VK_KHR_xlib_surface extension not enabled.  vkGetPhysicalDeviceXlibPresentationSupportKHR not executed!\n");
+        return VK_SUCCESS;
+    }
+
+    if (NULL == icd_term->dispatch.GetPhysicalDeviceXlibPresentationSupportKHR) {
+        loader_log(ptr_instance, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                   "ICD for selected physical device is not exporting vkGetPhysicalDeviceXlibPresentationSupportKHR!\n");
+        assert(false && "loader: null GetPhysicalDeviceXlibPresentationSupportKHR ICD pointer");
+    }
+
+    return icd_term->dispatch.GetPhysicalDeviceXlibPresentationSupportKHR(phys_dev_term->phys_dev, queueFamilyIndex, dpy, visualID);
+}
+#endif  // VK_USE_PLATFORM_XLIB_KHR
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+
+// Functions for the VK_KHR_android_surface extension:
+
+// This is the trampoline entrypoint for CreateAndroidSurfaceKHR
+LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateAndroidSurfaceKHR(VkInstance instance, ANativeWindow *window,
+                                                                       const VkAllocationCallbacks *pAllocator,
+                                                                       VkSurfaceKHR *pSurface) {
+    const VkLayerInstanceDispatchTable *disp;
+    disp = loader_get_instance_layer_dispatch(instance);
+    VkResult res;
+
+    res = disp->CreateAndroidSurfaceKHR(instance, window, pAllocator, pSurface);
+    return res;
+}
+
+// This is the instance chain terminator function for CreateAndroidSurfaceKHR
+VKAPI_ATTR VkResult VKAPI_CALL terminator_CreateAndroidSurfaceKHR(VkInstance instance, Window window,
+                                                                  const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface) {
+    // First, check to ensure the appropriate extension was enabled:
+    struct loader_instance *ptr_instance = loader_get_instance(instance);
+    if (!ptr_instance->wsi_display_enabled) {
+        loader_log(ptr_instance, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                   "VK_KHR_display extension not enabled.  vkCreateAndroidSurfaceKHR not executed!\n");
+        return VK_ERROR_EXTENSION_NOT_PRESENT;
+    }
+
+    // Next, if so, proceed with the implementation of this function:
+    VkIcdSurfaceAndroid *pIcdSurface =
+        loader_instance_heap_alloc(ptr_instance, sizeof(VkIcdSurfaceAndroid), VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
+    if (pIcdSurface == NULL) {
+        return VK_ERROR_OUT_OF_HOST_MEMORY;
+    }
+
+    pIcdSurface->base.platform = VK_ICD_WSI_PLATFORM_ANDROID;
+    pIcdSurface->window = window;
+
+    *pSurface = (VkSurfaceKHR)pIcdSurface;
+
+    return VK_SUCCESS;
+}
+
+#endif  // VK_USE_PLATFORM_ANDROID_KHR
+
+// Functions for the VK_EXT_headless_surface extension:
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateHeadlessSurfaceEXT(VkInstance instance, const VkHeadlessSurfaceCreateInfoEXT *pCreateInfo,
+                                                          const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface) {
+    const VkLayerInstanceDispatchTable *disp;
+    disp = loader_get_instance_layer_dispatch(instance);
+    VkResult res;
+
+    res = disp->CreateHeadlessSurfaceEXT(instance, pCreateInfo, pAllocator, pSurface);
+    return res;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL terminator_CreateHeadlessSurfaceEXT(VkInstance instance,
+                                                                   const VkHeadlessSurfaceCreateInfoEXT *pCreateInfo,
+                                                                   const VkAllocationCallbacks *pAllocator,
+                                                                   VkSurfaceKHR *pSurface) {
+    struct loader_instance *inst = loader_get_instance(instance);
+    VkIcdSurface *pIcdSurface = NULL;
+    VkResult vkRes = VK_SUCCESS;
+    uint32_t i = 0;
+
+    if (!inst->wsi_headless_surface_enabled) {
+        loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                   "VK_EXT_headless_surface extension not enabled.  "
+                   "vkCreateHeadlessSurfaceEXT not executed!\n");
+        return VK_SUCCESS;
+    }
+
+    // Next, if so, proceed with the implementation of this function:
+    pIcdSurface = AllocateIcdSurfaceStruct(inst, sizeof(pIcdSurface->headless_surf.base), sizeof(pIcdSurface->headless_surf));
+    if (pIcdSurface == NULL) {
+        vkRes = VK_ERROR_OUT_OF_HOST_MEMORY;
+        goto out;
+    }
+
+    pIcdSurface->headless_surf.base.platform = VK_ICD_WSI_PLATFORM_HEADLESS;
+    // Loop through each ICD and determine if they need to create a surface
+    for (struct loader_icd_term *icd_term = inst->icd_terms; icd_term != NULL; icd_term = icd_term->next, i++) {
+        if (icd_term->scanned_icd->interface_version >= ICD_VER_SUPPORTS_ICD_SURFACE_KHR) {
+            if (NULL != icd_term->dispatch.CreateHeadlessSurfaceEXT) {
+                vkRes = icd_term->dispatch.CreateHeadlessSurfaceEXT(icd_term->instance, pCreateInfo, pAllocator,
+                                                                    &pIcdSurface->real_icd_surfaces[i]);
+                if (VK_SUCCESS != vkRes) {
+                    goto out;
+                }
+            }
+        }
+    }
+
+    *pSurface = (VkSurfaceKHR)pIcdSurface;
+
+out:
+
+    if (VK_SUCCESS != vkRes && NULL != pIcdSurface) {
+        if (NULL != pIcdSurface->real_icd_surfaces) {
+            i = 0;
+            for (struct loader_icd_term *icd_term = inst->icd_terms; icd_term != NULL; icd_term = icd_term->next, i++) {
+                if ((VkSurfaceKHR)NULL != pIcdSurface->real_icd_surfaces[i] && NULL != icd_term->dispatch.DestroySurfaceKHR) {
+                    icd_term->dispatch.DestroySurfaceKHR(icd_term->instance, pIcdSurface->real_icd_surfaces[i], pAllocator);
+                }
+            }
+            loader_instance_heap_free(inst, pIcdSurface->real_icd_surfaces);
+        }
+        loader_instance_heap_free(inst, pIcdSurface);
+    }
+
+    return vkRes;
+}
+
+#ifdef VK_USE_PLATFORM_MACOS_MVK
+
+// Functions for the VK_MVK_macos_surface extension:
+
+// This is the trampoline entrypoint for CreateMacOSSurfaceMVK
+LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateMacOSSurfaceMVK(VkInstance instance,
+                                                                     const VkMacOSSurfaceCreateInfoMVK *pCreateInfo,
+                                                                     const VkAllocationCallbacks *pAllocator,
+                                                                     VkSurfaceKHR *pSurface) {
+    const VkLayerInstanceDispatchTable *disp;
+    disp = loader_get_instance_layer_dispatch(instance);
+    VkResult res;
+
+    res = disp->CreateMacOSSurfaceMVK(instance, pCreateInfo, pAllocator, pSurface);
+    return res;
+}
+
+// This is the instance chain terminator function for CreateMacOSSurfaceKHR
+VKAPI_ATTR VkResult VKAPI_CALL terminator_CreateMacOSSurfaceMVK(VkInstance instance, const VkMacOSSurfaceCreateInfoMVK *pCreateInfo,
+                                                                const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface) {
+    VkResult vkRes = VK_SUCCESS;
+    VkIcdSurface *pIcdSurface = NULL;
+    uint32_t i = 0;
+
+    // First, check to ensure the appropriate extension was enabled:
+    struct loader_instance *ptr_instance = loader_get_instance(instance);
+    if (!ptr_instance->wsi_macos_surface_enabled) {
+        loader_log(ptr_instance, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                   "VK_MVK_macos_surface extension not enabled.  vkCreateMacOSSurfaceMVK not executed!\n");
+        vkRes = VK_ERROR_EXTENSION_NOT_PRESENT;
+        goto out;
+    }
+
+    // Next, if so, proceed with the implementation of this function:
+    pIcdSurface = AllocateIcdSurfaceStruct(ptr_instance, sizeof(pIcdSurface->macos_surf.base), sizeof(pIcdSurface->macos_surf));
+    if (pIcdSurface == NULL) {
+        vkRes = VK_ERROR_OUT_OF_HOST_MEMORY;
+        goto out;
+    }
+
+    pIcdSurface->macos_surf.base.platform = VK_ICD_WSI_PLATFORM_MACOS;
+    pIcdSurface->macos_surf.pView = pCreateInfo->pView;
+
+    // Loop through each ICD and determine if they need to create a surface
+    for (struct loader_icd_term *icd_term = ptr_instance->icd_terms; icd_term != NULL; icd_term = icd_term->next, i++) {
+        if (icd_term->scanned_icd->interface_version >= ICD_VER_SUPPORTS_ICD_SURFACE_KHR) {
+            if (NULL != icd_term->dispatch.CreateMacOSSurfaceMVK) {
+                vkRes = icd_term->dispatch.CreateMacOSSurfaceMVK(icd_term->instance, pCreateInfo, pAllocator,
+                                                                 &pIcdSurface->real_icd_surfaces[i]);
+                if (VK_SUCCESS != vkRes) {
+                    goto out;
+                }
+            }
+        }
+    }
+
+    *pSurface = (VkSurfaceKHR)pIcdSurface;
+
+out:
+
+    if (VK_SUCCESS != vkRes && NULL != pIcdSurface) {
+        if (NULL != pIcdSurface->real_icd_surfaces) {
+            i = 0;
+            for (struct loader_icd_term *icd_term = ptr_instance->icd_terms; icd_term != NULL; icd_term = icd_term->next, i++) {
+                if ((VkSurfaceKHR)NULL != pIcdSurface->real_icd_surfaces[i] && NULL != icd_term->dispatch.DestroySurfaceKHR) {
+                    icd_term->dispatch.DestroySurfaceKHR(icd_term->instance, pIcdSurface->real_icd_surfaces[i], pAllocator);
+                }
+            }
+            loader_instance_heap_free(ptr_instance, pIcdSurface->real_icd_surfaces);
+        }
+        loader_instance_heap_free(ptr_instance, pIcdSurface);
+    }
+
+    return vkRes;
+}
+
+#endif  // VK_USE_PLATFORM_MACOS_MVK
+
+#ifdef VK_USE_PLATFORM_IOS_MVK
+
+// Functions for the VK_MVK_ios_surface extension:
+
+// This is the trampoline entrypoint for CreateIOSSurfaceMVK
+LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateIOSSurfaceMVK(VkInstance instance,
+                                                                   const VkIOSSurfaceCreateInfoMVK *pCreateInfo,
+                                                                   const VkAllocationCallbacks *pAllocator,
+                                                                   VkSurfaceKHR *pSurface) {
+    const VkLayerInstanceDispatchTable *disp;
+    disp = loader_get_instance_layer_dispatch(instance);
+    VkResult res;
+
+    res = disp->CreateIOSSurfaceMVK(instance, pCreateInfo, pAllocator, pSurface);
+    return res;
+}
+
+// This is the instance chain terminator function for CreateIOSSurfaceKHR
+VKAPI_ATTR VkResult VKAPI_CALL terminator_CreateIOSSurfaceMVK(VkInstance instance, const VkIOSSurfaceCreateInfoMVK *pCreateInfo,
+                                                              const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface) {
+    // First, check to ensure the appropriate extension was enabled:
+    struct loader_instance *ptr_instance = loader_get_instance(instance);
+    if (!ptr_instance->wsi_ios_surface_enabled) {
+        loader_log(ptr_instance, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                   "VK_MVK_ios_surface extension not enabled.  vkCreateIOSSurfaceMVK not executed!\n");
+        return VK_ERROR_EXTENSION_NOT_PRESENT;
+    }
+
+    // Next, if so, proceed with the implementation of this function:
+    VkIcdSurfaceIOS *pIcdSurface =
+        loader_instance_heap_alloc(ptr_instance, sizeof(VkIcdSurfaceIOS), VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
+    if (pIcdSurface == NULL) {
+        return VK_ERROR_OUT_OF_HOST_MEMORY;
+    }
+
+    pIcdSurface->base.platform = VK_ICD_WSI_PLATFORM_IOS;
+    pIcdSurface->pView = pCreateInfo->pView;
+
+    *pSurface = (VkSurfaceKHR)pIcdSurface;
+
+    return VK_SUCCESS;
+}
+
+#endif  // VK_USE_PLATFORM_IOS_MVK
+
+#if defined(VK_USE_PLATFORM_METAL_EXT)
+
+LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateMetalSurfaceEXT(VkInstance instance,
+                                                                     const VkMetalSurfaceCreateInfoEXT *pCreateInfo,
+                                                                     const VkAllocationCallbacks *pAllocator,
+                                                                     VkSurfaceKHR *pSurface) {
+    const VkLayerInstanceDispatchTable *disp = loader_get_instance_layer_dispatch(instance);
+    return disp->CreateMetalSurfaceEXT(instance, pCreateInfo, pAllocator, pSurface);
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL terminator_CreateMetalSurfaceEXT(VkInstance instance, const VkMetalSurfaceCreateInfoEXT *pCreateInfo,
+                                                                const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface) {
+    VkResult result = VK_SUCCESS;
+    VkIcdSurface *icd_surface = NULL;
+    uint32_t i;
+
+    // First, check to ensure the appropriate extension was enabled:
+    struct loader_instance *ptr_instance = loader_get_instance(instance);
+    if (!ptr_instance->wsi_metal_surface_enabled) {
+        loader_log(ptr_instance, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                   "VK_EXT_metal_surface extension not enabled. vkCreateMetalSurfaceEXT will not be executed.\n");
+    }
+
+    // Next, if so, proceed with the implementation of this function:
+    icd_surface = AllocateIcdSurfaceStruct(ptr_instance, sizeof(icd_surface->metal_surf.base), sizeof(icd_surface->metal_surf));
+    if (icd_surface == NULL) {
+        result = VK_ERROR_OUT_OF_HOST_MEMORY;
+        goto out;
+    }
+
+    icd_surface->metal_surf.base.platform = VK_ICD_WSI_PLATFORM_METAL;
+    icd_surface->metal_surf.pLayer = pCreateInfo->pLayer;
+
+    // Loop through each ICD and determine if they need to create a surface
+    i = 0;
+    for (struct loader_icd_term *icd_term = ptr_instance->icd_terms; icd_term != NULL; icd_term = icd_term->next, ++i) {
+        if (icd_term->scanned_icd->interface_version >= ICD_VER_SUPPORTS_ICD_SURFACE_KHR) {
+            if (icd_term->dispatch.CreateMetalSurfaceEXT != NULL) {
+                result = icd_term->dispatch.CreateMetalSurfaceEXT(icd_term->instance, pCreateInfo, pAllocator,
+                                                                  &icd_surface->real_icd_surfaces[i]);
+                if (result != VK_SUCCESS) {
+                    goto out;
+                }
+            }
+        }
+    }
+    *pSurface = (VkSurfaceKHR)icd_surface;
+
+out:
+    if (result != VK_SUCCESS && icd_surface != NULL) {
+        if (icd_surface->real_icd_surfaces != NULL) {
+            i = 0;
+            for (struct loader_icd_term *icd_term = ptr_instance->icd_terms; icd_term != NULL; icd_term = icd_term->next, ++i) {
+                if (icd_surface->real_icd_surfaces[i] == VK_NULL_HANDLE && icd_term->dispatch.DestroySurfaceKHR != NULL) {
+                    icd_term->dispatch.DestroySurfaceKHR(icd_term->instance, icd_surface->real_icd_surfaces[i], pAllocator);
+                }
+            }
+            loader_instance_heap_free(ptr_instance, icd_surface->real_icd_surfaces);
+        }
+        loader_instance_heap_free(ptr_instance, icd_surface);
+    }
+    return result;
+}
+
+#endif
+
+// Functions for the VK_KHR_display instance extension:
+LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceDisplayPropertiesKHR(VkPhysicalDevice physicalDevice,
+                                                                                     uint32_t *pPropertyCount,
+                                                                                     VkDisplayPropertiesKHR *pProperties) {
+    VkPhysicalDevice unwrapped_phys_dev = loader_unwrap_physical_device(physicalDevice);
+    const VkLayerInstanceDispatchTable *disp;
+    disp = loader_get_instance_layer_dispatch(physicalDevice);
+    VkResult res = disp->GetPhysicalDeviceDisplayPropertiesKHR(unwrapped_phys_dev, pPropertyCount, pProperties);
+    return res;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL terminator_GetPhysicalDeviceDisplayPropertiesKHR(VkPhysicalDevice physicalDevice,
+                                                                                uint32_t *pPropertyCount,
+                                                                                VkDisplayPropertiesKHR *pProperties) {
+    // First, check to ensure the appropriate extension was enabled:
+    struct loader_physical_device_term *phys_dev_term = (struct loader_physical_device_term *)physicalDevice;
+    struct loader_icd_term *icd_term = phys_dev_term->this_icd_term;
+    struct loader_instance *ptr_instance = (struct loader_instance *)icd_term->this_instance;
+    if (!ptr_instance->wsi_display_enabled) {
+        loader_log(ptr_instance, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                   "VK_KHR_display extension not enabled.  vkGetPhysicalDeviceDisplayPropertiesKHR not executed!\n");
+        return VK_SUCCESS;
+    }
+
+    if (NULL == icd_term->dispatch.GetPhysicalDeviceDisplayPropertiesKHR) {
+        loader_log(ptr_instance, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                   "ICD for selected physical device is not exporting vkGetPhysicalDeviceDisplayPropertiesKHR!\n");
+        assert(false && "loader: null GetPhysicalDeviceDisplayPropertiesKHR ICD pointer");
+    }
+
+    return icd_term->dispatch.GetPhysicalDeviceDisplayPropertiesKHR(phys_dev_term->phys_dev, pPropertyCount, pProperties);
+}
+
+LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceDisplayPlanePropertiesKHR(
+    VkPhysicalDevice physicalDevice, uint32_t *pPropertyCount, VkDisplayPlanePropertiesKHR *pProperties) {
+    VkPhysicalDevice unwrapped_phys_dev = loader_unwrap_physical_device(physicalDevice);
+    const VkLayerInstanceDispatchTable *disp;
+    disp = loader_get_instance_layer_dispatch(physicalDevice);
+    VkResult res = disp->GetPhysicalDeviceDisplayPlanePropertiesKHR(unwrapped_phys_dev, pPropertyCount, pProperties);
+    return res;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL terminator_GetPhysicalDeviceDisplayPlanePropertiesKHR(VkPhysicalDevice physicalDevice,
+                                                                                     uint32_t *pPropertyCount,
+                                                                                     VkDisplayPlanePropertiesKHR *pProperties) {
+    // First, check to ensure the appropriate extension was enabled:
+    struct loader_physical_device_term *phys_dev_term = (struct loader_physical_device_term *)physicalDevice;
+    struct loader_icd_term *icd_term = phys_dev_term->this_icd_term;
+    struct loader_instance *ptr_instance = (struct loader_instance *)icd_term->this_instance;
+    if (!ptr_instance->wsi_display_enabled) {
+        loader_log(ptr_instance, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                   "VK_KHR_display extension not enabled.  vkGetPhysicalDeviceDisplayPlanePropertiesKHR not executed!\n");
+        return VK_SUCCESS;
+    }
+
+    if (NULL == icd_term->dispatch.GetPhysicalDeviceDisplayPlanePropertiesKHR) {
+        loader_log(ptr_instance, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                   "ICD for selected physical device is not exporting vkGetPhysicalDeviceDisplayPlanePropertiesKHR!\n");
+        assert(false && "loader: null GetPhysicalDeviceDisplayPlanePropertiesKHR ICD pointer");
+    }
+
+    return icd_term->dispatch.GetPhysicalDeviceDisplayPlanePropertiesKHR(phys_dev_term->phys_dev, pPropertyCount, pProperties);
+}
+
+LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkGetDisplayPlaneSupportedDisplaysKHR(VkPhysicalDevice physicalDevice,
+                                                                                   uint32_t planeIndex, uint32_t *pDisplayCount,
+                                                                                   VkDisplayKHR *pDisplays) {
+    VkPhysicalDevice unwrapped_phys_dev = loader_unwrap_physical_device(physicalDevice);
+    const VkLayerInstanceDispatchTable *disp;
+    disp = loader_get_instance_layer_dispatch(physicalDevice);
+    VkResult res = disp->GetDisplayPlaneSupportedDisplaysKHR(unwrapped_phys_dev, planeIndex, pDisplayCount, pDisplays);
+    return res;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL terminator_GetDisplayPlaneSupportedDisplaysKHR(VkPhysicalDevice physicalDevice, uint32_t planeIndex,
+                                                                              uint32_t *pDisplayCount, VkDisplayKHR *pDisplays) {
+    // First, check to ensure the appropriate extension was enabled:
+    struct loader_physical_device_term *phys_dev_term = (struct loader_physical_device_term *)physicalDevice;
+    struct loader_icd_term *icd_term = phys_dev_term->this_icd_term;
+    struct loader_instance *ptr_instance = (struct loader_instance *)icd_term->this_instance;
+    if (!ptr_instance->wsi_display_enabled) {
+        loader_log(ptr_instance, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                   "VK_KHR_display extension not enabled.  vkGetDisplayPlaneSupportedDisplaysKHR not executed!\n");
+        return VK_SUCCESS;
+    }
+
+    if (NULL == icd_term->dispatch.GetDisplayPlaneSupportedDisplaysKHR) {
+        loader_log(ptr_instance, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                   "ICD for selected physical device is not exporting vkGetDisplayPlaneSupportedDisplaysKHR!\n");
+        assert(false && "loader: null GetDisplayPlaneSupportedDisplaysKHR ICD pointer");
+    }
+
+    return icd_term->dispatch.GetDisplayPlaneSupportedDisplaysKHR(phys_dev_term->phys_dev, planeIndex, pDisplayCount, pDisplays);
+}
+
+LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkGetDisplayModePropertiesKHR(VkPhysicalDevice physicalDevice, VkDisplayKHR display,
+                                                                           uint32_t *pPropertyCount,
+                                                                           VkDisplayModePropertiesKHR *pProperties) {
+    VkPhysicalDevice unwrapped_phys_dev = loader_unwrap_physical_device(physicalDevice);
+    const VkLayerInstanceDispatchTable *disp;
+    disp = loader_get_instance_layer_dispatch(physicalDevice);
+    VkResult res = disp->GetDisplayModePropertiesKHR(unwrapped_phys_dev, display, pPropertyCount, pProperties);
+    return res;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL terminator_GetDisplayModePropertiesKHR(VkPhysicalDevice physicalDevice, VkDisplayKHR display,
+                                                                      uint32_t *pPropertyCount,
+                                                                      VkDisplayModePropertiesKHR *pProperties) {
+    // First, check to ensure the appropriate extension was enabled:
+    struct loader_physical_device_term *phys_dev_term = (struct loader_physical_device_term *)physicalDevice;
+    struct loader_icd_term *icd_term = phys_dev_term->this_icd_term;
+    struct loader_instance *ptr_instance = (struct loader_instance *)icd_term->this_instance;
+    if (!ptr_instance->wsi_display_enabled) {
+        loader_log(ptr_instance, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                   "VK_KHR_display extension not enabled.  vkGetDisplayModePropertiesKHR not executed!\n");
+        return VK_SUCCESS;
+    }
+
+    if (NULL == icd_term->dispatch.GetDisplayModePropertiesKHR) {
+        loader_log(ptr_instance, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                   "ICD for selected physical device is not exporting vkGetDisplayModePropertiesKHR!\n");
+        assert(false && "loader: null GetDisplayModePropertiesKHR ICD pointer");
+    }
+
+    return icd_term->dispatch.GetDisplayModePropertiesKHR(phys_dev_term->phys_dev, display, pPropertyCount, pProperties);
+}
+
+LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateDisplayModeKHR(VkPhysicalDevice physicalDevice, VkDisplayKHR display,
+                                                                    const VkDisplayModeCreateInfoKHR *pCreateInfo,
+                                                                    const VkAllocationCallbacks *pAllocator,
+                                                                    VkDisplayModeKHR *pMode) {
+    VkPhysicalDevice unwrapped_phys_dev = loader_unwrap_physical_device(physicalDevice);
+    const VkLayerInstanceDispatchTable *disp;
+    disp = loader_get_instance_layer_dispatch(physicalDevice);
+    VkResult res = disp->CreateDisplayModeKHR(unwrapped_phys_dev, display, pCreateInfo, pAllocator, pMode);
+    return res;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL terminator_CreateDisplayModeKHR(VkPhysicalDevice physicalDevice, VkDisplayKHR display,
+                                                               const VkDisplayModeCreateInfoKHR *pCreateInfo,
+                                                               const VkAllocationCallbacks *pAllocator, VkDisplayModeKHR *pMode) {
+    // First, check to ensure the appropriate extension was enabled:
+    struct loader_physical_device_term *phys_dev_term = (struct loader_physical_device_term *)physicalDevice;
+    struct loader_icd_term *icd_term = phys_dev_term->this_icd_term;
+    struct loader_instance *ptr_instance = (struct loader_instance *)icd_term->this_instance;
+    if (!ptr_instance->wsi_display_enabled) {
+        loader_log(ptr_instance, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                   "VK_KHR_display extension not enabled.  vkCreateDisplayModeKHR not executed!\n");
+        return VK_ERROR_EXTENSION_NOT_PRESENT;
+    }
+
+    if (NULL == icd_term->dispatch.CreateDisplayModeKHR) {
+        loader_log(ptr_instance, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                   "ICD for selected physical device is not exporting vkCreateDisplayModeKHR!\n");
+        assert(false && "loader: null CreateDisplayModeKHR ICD pointer");
+    }
+
+    return icd_term->dispatch.CreateDisplayModeKHR(phys_dev_term->phys_dev, display, pCreateInfo, pAllocator, pMode);
+}
+
+LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkGetDisplayPlaneCapabilitiesKHR(VkPhysicalDevice physicalDevice,
+                                                                              VkDisplayModeKHR mode, uint32_t planeIndex,
+                                                                              VkDisplayPlaneCapabilitiesKHR *pCapabilities) {
+    VkPhysicalDevice unwrapped_phys_dev = loader_unwrap_physical_device(physicalDevice);
+    const VkLayerInstanceDispatchTable *disp;
+    disp = loader_get_instance_layer_dispatch(physicalDevice);
+    VkResult res = disp->GetDisplayPlaneCapabilitiesKHR(unwrapped_phys_dev, mode, planeIndex, pCapabilities);
+    return res;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL terminator_GetDisplayPlaneCapabilitiesKHR(VkPhysicalDevice physicalDevice, VkDisplayModeKHR mode,
+                                                                         uint32_t planeIndex,
+                                                                         VkDisplayPlaneCapabilitiesKHR *pCapabilities) {
+    // First, check to ensure the appropriate extension was enabled:
+    struct loader_physical_device_term *phys_dev_term = (struct loader_physical_device_term *)physicalDevice;
+    struct loader_icd_term *icd_term = phys_dev_term->this_icd_term;
+    struct loader_instance *ptr_instance = (struct loader_instance *)icd_term->this_instance;
+    if (!ptr_instance->wsi_display_enabled) {
+        loader_log(ptr_instance, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                   "VK_KHR_display extension not enabled.  vkGetDisplayPlaneCapabilitiesKHR not executed!\n");
+        return VK_SUCCESS;
+    }
+
+    if (NULL == icd_term->dispatch.GetDisplayPlaneCapabilitiesKHR) {
+        loader_log(ptr_instance, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                   "ICD for selected physical device is not exporting vkGetDisplayPlaneCapabilitiesKHR!\n");
+        assert(false && "loader: null GetDisplayPlaneCapabilitiesKHR ICD pointer");
+    }
+
+    return icd_term->dispatch.GetDisplayPlaneCapabilitiesKHR(phys_dev_term->phys_dev, mode, planeIndex, pCapabilities);
+}
+
+LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateDisplayPlaneSurfaceKHR(VkInstance instance,
+                                                                            const VkDisplaySurfaceCreateInfoKHR *pCreateInfo,
+                                                                            const VkAllocationCallbacks *pAllocator,
+                                                                            VkSurfaceKHR *pSurface) {
+    const VkLayerInstanceDispatchTable *disp;
+    disp = loader_get_instance_layer_dispatch(instance);
+    VkResult res;
+
+    res = disp->CreateDisplayPlaneSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
+    return res;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL terminator_CreateDisplayPlaneSurfaceKHR(VkInstance instance,
+                                                                       const VkDisplaySurfaceCreateInfoKHR *pCreateInfo,
+                                                                       const VkAllocationCallbacks *pAllocator,
+                                                                       VkSurfaceKHR *pSurface) {
+    struct loader_instance *inst = loader_get_instance(instance);
+    VkIcdSurface *pIcdSurface = NULL;
+    VkResult vkRes = VK_SUCCESS;
+    uint32_t i = 0;
+
+    if (!inst->wsi_display_enabled) {
+        loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                   "VK_KHR_surface extension not enabled.  vkCreateDisplayPlaneSurfaceKHR not executed!\n");
+        vkRes = VK_ERROR_EXTENSION_NOT_PRESENT;
+        goto out;
+    }
+
+    // Next, if so, proceed with the implementation of this function:
+    pIcdSurface = AllocateIcdSurfaceStruct(inst, sizeof(pIcdSurface->display_surf.base), sizeof(pIcdSurface->display_surf));
+    if (pIcdSurface == NULL) {
+        vkRes = VK_ERROR_OUT_OF_HOST_MEMORY;
+        goto out;
+    }
+
+    pIcdSurface->display_surf.base.platform = VK_ICD_WSI_PLATFORM_DISPLAY;
+    pIcdSurface->display_surf.displayMode = pCreateInfo->displayMode;
+    pIcdSurface->display_surf.planeIndex = pCreateInfo->planeIndex;
+    pIcdSurface->display_surf.planeStackIndex = pCreateInfo->planeStackIndex;
+    pIcdSurface->display_surf.transform = pCreateInfo->transform;
+    pIcdSurface->display_surf.globalAlpha = pCreateInfo->globalAlpha;
+    pIcdSurface->display_surf.alphaMode = pCreateInfo->alphaMode;
+    pIcdSurface->display_surf.imageExtent = pCreateInfo->imageExtent;
+
+    // Loop through each ICD and determine if they need to create a surface
+    for (struct loader_icd_term *icd_term = inst->icd_terms; icd_term != NULL; icd_term = icd_term->next, i++) {
+        if (icd_term->scanned_icd->interface_version >= ICD_VER_SUPPORTS_ICD_SURFACE_KHR) {
+            if (NULL != icd_term->dispatch.CreateDisplayPlaneSurfaceKHR) {
+                vkRes = icd_term->dispatch.CreateDisplayPlaneSurfaceKHR(icd_term->instance, pCreateInfo, pAllocator,
+                                                                        &pIcdSurface->real_icd_surfaces[i]);
+                if (VK_SUCCESS != vkRes) {
+                    goto out;
+                }
+            }
+        }
+    }
+
+    *pSurface = (VkSurfaceKHR)pIcdSurface;
+
+out:
+
+    if (VK_SUCCESS != vkRes && NULL != pIcdSurface) {
+        if (NULL != pIcdSurface->real_icd_surfaces) {
+            i = 0;
+            for (struct loader_icd_term *icd_term = inst->icd_terms; icd_term != NULL; icd_term = icd_term->next, i++) {
+                if ((VkSurfaceKHR)NULL != pIcdSurface->real_icd_surfaces[i] && NULL != icd_term->dispatch.DestroySurfaceKHR) {
+                    icd_term->dispatch.DestroySurfaceKHR(icd_term->instance, pIcdSurface->real_icd_surfaces[i], pAllocator);
+                }
+            }
+            loader_instance_heap_free(inst, pIcdSurface->real_icd_surfaces);
+        }
+        loader_instance_heap_free(inst, pIcdSurface);
+    }
+
+    return vkRes;
+}
+
+// EXT_display_swapchain Extension command
+
+LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateSharedSwapchainsKHR(VkDevice device, uint32_t swapchainCount,
+                                                                         const VkSwapchainCreateInfoKHR *pCreateInfos,
+                                                                         const VkAllocationCallbacks *pAllocator,
+                                                                         VkSwapchainKHR *pSwapchains) {
+    const VkLayerDispatchTable *disp;
+    disp = loader_get_dispatch(device);
+    return disp->CreateSharedSwapchainsKHR(device, swapchainCount, pCreateInfos, pAllocator, pSwapchains);
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL terminator_CreateSharedSwapchainsKHR(VkDevice device, uint32_t swapchainCount,
+                                                                    const VkSwapchainCreateInfoKHR *pCreateInfos,
+                                                                    const VkAllocationCallbacks *pAllocator,
+                                                                    VkSwapchainKHR *pSwapchains) {
+    uint32_t icd_index = 0;
+    struct loader_device *dev;
+    struct loader_icd_term *icd_term = loader_get_icd_and_device(device, &dev, &icd_index);
+    if (NULL != icd_term && NULL != icd_term->dispatch.CreateSharedSwapchainsKHR) {
+        VkIcdSurface *icd_surface = (VkIcdSurface *)(uintptr_t)pCreateInfos->surface;
+        if (NULL != icd_surface->real_icd_surfaces) {
+            if ((VkSurfaceKHR)NULL != icd_surface->real_icd_surfaces[icd_index]) {
+                // We found the ICD, and there is an ICD KHR surface
+                // associated with it, so copy the CreateInfo struct
+                // and point it at the ICD's surface.
+                VkSwapchainCreateInfoKHR *pCreateCopy = loader_stack_alloc(sizeof(VkSwapchainCreateInfoKHR) * swapchainCount);
+                if (NULL == pCreateCopy) {
+                    return VK_ERROR_OUT_OF_HOST_MEMORY;
+                }
+                memcpy(pCreateCopy, pCreateInfos, sizeof(VkSwapchainCreateInfoKHR) * swapchainCount);
+                for (uint32_t sc = 0; sc < swapchainCount; sc++) {
+                    pCreateCopy[sc].surface = icd_surface->real_icd_surfaces[icd_index];
+                }
+                return icd_term->dispatch.CreateSharedSwapchainsKHR(device, swapchainCount, pCreateCopy, pAllocator, pSwapchains);
+            }
+        }
+        return icd_term->dispatch.CreateSharedSwapchainsKHR(device, swapchainCount, pCreateInfos, pAllocator, pSwapchains);
+    }
+    return VK_SUCCESS;
+}
+
+LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkGetDeviceGroupPresentCapabilitiesKHR(
+    VkDevice                                    device,
+    VkDeviceGroupPresentCapabilitiesKHR*        pDeviceGroupPresentCapabilities) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(device);
+    return disp->GetDeviceGroupPresentCapabilitiesKHR(device, pDeviceGroupPresentCapabilities);
+}
+
+LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkGetDeviceGroupSurfacePresentModesKHR(
+    VkDevice                                    device,
+    VkSurfaceKHR                                surface,
+    VkDeviceGroupPresentModeFlagsKHR*           pModes) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(device);
+    return disp->GetDeviceGroupSurfacePresentModesKHR(device, surface, pModes);
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL terminator_GetDeviceGroupSurfacePresentModesKHR(
+    VkDevice                                    device,
+    VkSurfaceKHR                                surface,
+    VkDeviceGroupPresentModeFlagsKHR*           pModes) {
+    uint32_t icd_index = 0;
+    struct loader_device *dev;
+    struct loader_icd_term *icd_term = loader_get_icd_and_device(device, &dev, &icd_index);
+    if (NULL != icd_term && NULL != icd_term->dispatch.GetDeviceGroupSurfacePresentModesKHR) {
+        VkIcdSurface *icd_surface = (VkIcdSurface *)(uintptr_t)surface;
+        if (NULL != icd_surface->real_icd_surfaces && (VkSurfaceKHR)NULL != icd_surface->real_icd_surfaces[icd_index]) {
+            return icd_term->dispatch.GetDeviceGroupSurfacePresentModesKHR(device, icd_surface->real_icd_surfaces[icd_index], pModes);
+        }
+        return icd_term->dispatch.GetDeviceGroupSurfacePresentModesKHR(device, surface, pModes);
+    }
+    return VK_SUCCESS;
+}
+
+LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDevicePresentRectanglesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkSurfaceKHR                                surface,
+    uint32_t*                                   pRectCount,
+    VkRect2D*                                   pRects) {
+    const VkLayerInstanceDispatchTable *disp;
+    VkPhysicalDevice unwrapped_phys_dev = loader_unwrap_physical_device(physicalDevice);
+    disp = loader_get_instance_layer_dispatch(physicalDevice);
+    return disp->GetPhysicalDevicePresentRectanglesKHR(unwrapped_phys_dev, surface, pRectCount, pRects);
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL terminator_GetPhysicalDevicePresentRectanglesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkSurfaceKHR                                surface,
+    uint32_t*                                   pRectCount,
+    VkRect2D*                                   pRects) {
+    struct loader_physical_device_term *phys_dev_term = (struct loader_physical_device_term *)physicalDevice;
+    struct loader_icd_term *icd_term = phys_dev_term->this_icd_term;
+    if (NULL == icd_term->dispatch.GetPhysicalDevicePresentRectanglesKHR) {
+        loader_log(icd_term->this_instance, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+                   "ICD associated with VkPhysicalDevice does not support GetPhysicalDevicePresentRectanglesKHX");
+    }
+    VkIcdSurface *icd_surface = (VkIcdSurface *)(surface);
+    uint8_t icd_index = phys_dev_term->icd_index;
+    if (NULL != icd_surface->real_icd_surfaces && NULL != (void *)icd_surface->real_icd_surfaces[icd_index]) {
+        return icd_term->dispatch.GetPhysicalDevicePresentRectanglesKHR(phys_dev_term->phys_dev, icd_surface->real_icd_surfaces[icd_index], pRectCount, pRects);
+    }
+    return icd_term->dispatch.GetPhysicalDevicePresentRectanglesKHR(phys_dev_term->phys_dev, surface, pRectCount, pRects);
+}
+
+LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkAcquireNextImage2KHR(
+    VkDevice                                    device,
+    const VkAcquireNextImageInfoKHR*            pAcquireInfo,
+    uint32_t*                                   pImageIndex) {
+    const VkLayerDispatchTable *disp = loader_get_dispatch(device);
+    return disp->AcquireNextImage2KHR(device, pAcquireInfo, pImageIndex);
+}
+
+// ---- VK_KHR_get_display_properties2 extension trampoline/terminators
+
+LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceDisplayProperties2KHR(VkPhysicalDevice physicalDevice,
+                                                                                      uint32_t *pPropertyCount,
+                                                                                      VkDisplayProperties2KHR *pProperties) {
+    const VkLayerInstanceDispatchTable *disp;
+    VkPhysicalDevice unwrapped_phys_dev = loader_unwrap_physical_device(physicalDevice);
+    disp = loader_get_instance_layer_dispatch(physicalDevice);
+    return disp->GetPhysicalDeviceDisplayProperties2KHR(unwrapped_phys_dev, pPropertyCount, pProperties);
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL terminator_GetPhysicalDeviceDisplayProperties2KHR(VkPhysicalDevice physicalDevice,
+                                                                                 uint32_t *pPropertyCount,
+                                                                                 VkDisplayProperties2KHR *pProperties) {
+    struct loader_physical_device_term *phys_dev_term = (struct loader_physical_device_term *)physicalDevice;
+    struct loader_icd_term *icd_term = phys_dev_term->this_icd_term;
+
+    // If the function is available in the driver, just call into it
+    if (icd_term->dispatch.GetPhysicalDeviceDisplayProperties2KHR != NULL) {
+        return icd_term->dispatch.GetPhysicalDeviceDisplayProperties2KHR(phys_dev_term->phys_dev, pPropertyCount, pProperties);
+    }
+
+    // We have to emulate the function.
+    loader_log(icd_term->this_instance, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, 0,
+               "vkGetPhysicalDeviceDisplayProperties2KHR: Emulating call in ICD \"%s\"", icd_term->scanned_icd->lib_name);
+
+    // If the icd doesn't support VK_KHR_display, then no properties are available
+    if (icd_term->dispatch.GetPhysicalDeviceDisplayPropertiesKHR == NULL) {
+        *pPropertyCount = 0;
+        return VK_SUCCESS;
+    }
+
+    // If we aren't writing to pProperties, then emulation is straightforward
+    if (pProperties == NULL || *pPropertyCount == 0) {
+        return icd_term->dispatch.GetPhysicalDeviceDisplayPropertiesKHR(phys_dev_term->phys_dev, pPropertyCount, NULL);
+    }
+
+    // If we do have to write to pProperties, then we need to write to a temporary array of VkDisplayPropertiesKHR and copy it
+    VkDisplayPropertiesKHR *properties = loader_stack_alloc(*pPropertyCount * sizeof(VkDisplayPropertiesKHR));
+    if (properties == NULL) {
+        return VK_ERROR_OUT_OF_HOST_MEMORY;
+    }
+    VkResult res = icd_term->dispatch.GetPhysicalDeviceDisplayPropertiesKHR(phys_dev_term->phys_dev, pPropertyCount, properties);
+    if (res < 0) {
+        return res;
+    }
+    for (uint32_t i = 0; i < *pPropertyCount; ++i) {
+        memcpy(&pProperties[i].displayProperties, &properties[i], sizeof(VkDisplayPropertiesKHR));
+    }
+    return res;
+}
+
+LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceDisplayPlaneProperties2KHR(
+    VkPhysicalDevice physicalDevice, uint32_t *pPropertyCount, VkDisplayPlaneProperties2KHR *pProperties) {
+    const VkLayerInstanceDispatchTable *disp;
+    VkPhysicalDevice unwrapped_phys_dev = loader_unwrap_physical_device(physicalDevice);
+    disp = loader_get_instance_layer_dispatch(physicalDevice);
+    return disp->GetPhysicalDeviceDisplayPlaneProperties2KHR(unwrapped_phys_dev, pPropertyCount, pProperties);
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL terminator_GetPhysicalDeviceDisplayPlaneProperties2KHR(VkPhysicalDevice physicalDevice,
+                                                                                      uint32_t *pPropertyCount,
+                                                                                      VkDisplayPlaneProperties2KHR *pProperties) {
+    struct loader_physical_device_term *phys_dev_term = (struct loader_physical_device_term *)physicalDevice;
+    struct loader_icd_term *icd_term = phys_dev_term->this_icd_term;
+
+    // If the function is available in the driver, just call into it
+    if (icd_term->dispatch.GetPhysicalDeviceDisplayPlaneProperties2KHR != NULL) {
+        return icd_term->dispatch.GetPhysicalDeviceDisplayPlaneProperties2KHR(phys_dev_term->phys_dev, pPropertyCount, pProperties);
+    }
+
+    // We have to emulate the function.
+    loader_log(icd_term->this_instance, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, 0,
+               "vkGetPhysicalDeviceDisplayPlaneProperties2KHR: Emulating call in ICD \"%s\"", icd_term->scanned_icd->lib_name);
+
+    // If the icd doesn't support VK_KHR_display, then no properties are available
+    if (icd_term->dispatch.GetPhysicalDeviceDisplayPlanePropertiesKHR == NULL) {
+        *pPropertyCount = 0;
+        return VK_SUCCESS;
+    }
+
+    // If we aren't writing to pProperties, then emulation is straightforward
+    if (pProperties == NULL || *pPropertyCount == 0) {
+        return icd_term->dispatch.GetPhysicalDeviceDisplayPlanePropertiesKHR(phys_dev_term->phys_dev, pPropertyCount, NULL);
+    }
+
+    // If we do have to write to pProperties, then we need to write to a temporary array of VkDisplayPlanePropertiesKHR and copy it
+    VkDisplayPlanePropertiesKHR *properties = loader_stack_alloc(*pPropertyCount * sizeof(VkDisplayPlanePropertiesKHR));
+    if (properties == NULL) {
+        return VK_ERROR_OUT_OF_HOST_MEMORY;
+    }
+    VkResult res =
+        icd_term->dispatch.GetPhysicalDeviceDisplayPlanePropertiesKHR(phys_dev_term->phys_dev, pPropertyCount, properties);
+    if (res < 0) {
+        return res;
+    }
+    for (uint32_t i = 0; i < *pPropertyCount; ++i) {
+        memcpy(&pProperties[i].displayPlaneProperties, &properties[i], sizeof(VkDisplayPlanePropertiesKHR));
+    }
+    return res;
+}
+
+LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkGetDisplayModeProperties2KHR(VkPhysicalDevice physicalDevice, VkDisplayKHR display,
+                                                                            uint32_t *pPropertyCount,
+                                                                            VkDisplayModeProperties2KHR *pProperties) {
+    const VkLayerInstanceDispatchTable *disp;
+    VkPhysicalDevice unwrapped_phys_dev = loader_unwrap_physical_device(physicalDevice);
+    disp = loader_get_instance_layer_dispatch(physicalDevice);
+    return disp->GetDisplayModeProperties2KHR(unwrapped_phys_dev, display, pPropertyCount, pProperties);
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL terminator_GetDisplayModeProperties2KHR(VkPhysicalDevice physicalDevice, VkDisplayKHR display,
+                                                                       uint32_t *pPropertyCount,
+                                                                       VkDisplayModeProperties2KHR *pProperties) {
+    struct loader_physical_device_term *phys_dev_term = (struct loader_physical_device_term *)physicalDevice;
+    struct loader_icd_term *icd_term = phys_dev_term->this_icd_term;
+
+    // If the function is available in the driver, just call into it
+    if (icd_term->dispatch.GetDisplayModeProperties2KHR != NULL) {
+        return icd_term->dispatch.GetDisplayModeProperties2KHR(phys_dev_term->phys_dev, display, pPropertyCount, pProperties);
+    }
+
+    // We have to emulate the function.
+    loader_log(icd_term->this_instance, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, 0,
+               "vkGetDisplayModeProperties2KHR: Emulating call in ICD \"%s\"", icd_term->scanned_icd->lib_name);
+
+    // If the icd doesn't support VK_KHR_display, then no properties are available
+    if (icd_term->dispatch.GetDisplayModePropertiesKHR == NULL) {
+        *pPropertyCount = 0;
+        return VK_SUCCESS;
+    }
+
+    // If we aren't writing to pProperties, then emulation is straightforward
+    if (pProperties == NULL || *pPropertyCount == 0) {
+        return icd_term->dispatch.GetDisplayModePropertiesKHR(phys_dev_term->phys_dev, display, pPropertyCount, NULL);
+    }
+
+    // If we do have to write to pProperties, then we need to write to a temporary array of VkDisplayModePropertiesKHR and copy it
+    VkDisplayModePropertiesKHR *properties = loader_stack_alloc(*pPropertyCount * sizeof(VkDisplayModePropertiesKHR));
+    if (properties == NULL) {
+        return VK_ERROR_OUT_OF_HOST_MEMORY;
+    }
+    VkResult res = icd_term->dispatch.GetDisplayModePropertiesKHR(phys_dev_term->phys_dev, display, pPropertyCount, properties);
+    if (res < 0) {
+        return res;
+    }
+    for (uint32_t i = 0; i < *pPropertyCount; ++i) {
+        memcpy(&pProperties[i].displayModeProperties, &properties[i], sizeof(VkDisplayModePropertiesKHR));
+    }
+    return res;
+}
+
+LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkGetDisplayPlaneCapabilities2KHR(VkPhysicalDevice physicalDevice,
+                                                                               const VkDisplayPlaneInfo2KHR *pDisplayPlaneInfo,
+                                                                               VkDisplayPlaneCapabilities2KHR *pCapabilities) {
+    const VkLayerInstanceDispatchTable *disp;
+    VkPhysicalDevice unwrapped_phys_dev = loader_unwrap_physical_device(physicalDevice);
+    disp = loader_get_instance_layer_dispatch(physicalDevice);
+    return disp->GetDisplayPlaneCapabilities2KHR(unwrapped_phys_dev, pDisplayPlaneInfo, pCapabilities);
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL terminator_GetDisplayPlaneCapabilities2KHR(VkPhysicalDevice physicalDevice,
+                                                                          const VkDisplayPlaneInfo2KHR *pDisplayPlaneInfo,
+                                                                          VkDisplayPlaneCapabilities2KHR *pCapabilities) {
+    struct loader_physical_device_term *phys_dev_term = (struct loader_physical_device_term *)physicalDevice;
+    struct loader_icd_term *icd_term = phys_dev_term->this_icd_term;
+
+    // If the function is abailable in the driver, just call into it
+    if (icd_term->dispatch.GetDisplayPlaneCapabilities2KHR != NULL) {
+        return icd_term->dispatch.GetDisplayPlaneCapabilities2KHR(phys_dev_term->phys_dev, pDisplayPlaneInfo, pCapabilities);
+    }
+
+    // We have to emulate the function.
+    loader_log(icd_term->this_instance, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, 0,
+               "vkGetDisplayPlaneCapabilities2KHR: Emulating call in ICD \"%s\"", icd_term->scanned_icd->lib_name);
+
+    // Just call into the old version of the function.
+    // If the icd doesn't support VK_KHR_display, there are zero planes and this call is invalid (and will crash)
+    return icd_term->dispatch.GetDisplayPlaneCapabilitiesKHR(phys_dev_term->phys_dev, pDisplayPlaneInfo->mode,
+                                                             pDisplayPlaneInfo->planeIndex, &pCapabilities->capabilities);
+}
+
+bool wsi_swapchain_instance_gpa(struct loader_instance *ptr_instance, const char *name, void **addr) {
+    *addr = NULL;
+
+    // Functions for the VK_KHR_surface extension:
+    if (!strcmp("vkDestroySurfaceKHR", name)) {
+        *addr = ptr_instance->wsi_surface_enabled ? (void *)vkDestroySurfaceKHR : NULL;
+        return true;
+    }
+    if (!strcmp("vkGetPhysicalDeviceSurfaceSupportKHR", name)) {
+        *addr = ptr_instance->wsi_surface_enabled ? (void *)vkGetPhysicalDeviceSurfaceSupportKHR : NULL;
+        return true;
+    }
+    if (!strcmp("vkGetPhysicalDeviceSurfaceCapabilitiesKHR", name)) {
+        *addr = ptr_instance->wsi_surface_enabled ? (void *)vkGetPhysicalDeviceSurfaceCapabilitiesKHR : NULL;
+        return true;
+    }
+    if (!strcmp("vkGetPhysicalDeviceSurfaceFormatsKHR", name)) {
+        *addr = ptr_instance->wsi_surface_enabled ? (void *)vkGetPhysicalDeviceSurfaceFormatsKHR : NULL;
+        return true;
+    }
+    if (!strcmp("vkGetPhysicalDeviceSurfacePresentModesKHR", name)) {
+        *addr = ptr_instance->wsi_surface_enabled ? (void *)vkGetPhysicalDeviceSurfacePresentModesKHR : NULL;
+        return true;
+    }
+
+    if (!strcmp("vkGetDeviceGroupPresentCapabilitiesKHR", name)) {
+        *addr = ptr_instance->wsi_surface_enabled ? (void *)vkGetDeviceGroupPresentCapabilitiesKHR : NULL;
+        return true;
+    }
+
+    if (!strcmp("vkGetDeviceGroupSurfacePresentModesKHR", name)) {
+        *addr = ptr_instance->wsi_surface_enabled ? (void *)vkGetDeviceGroupSurfacePresentModesKHR : NULL;
+        return true;
+    }
+
+    if (!strcmp("vkGetPhysicalDevicePresentRectanglesKHR", name)) {
+        *addr = ptr_instance->wsi_surface_enabled ? (void *)vkGetPhysicalDevicePresentRectanglesKHR : NULL;
+        return true;
+    }
+
+    // Functions for the VK_KHR_swapchain extension:
+
+    // Note: This is a device extension, and its functions are statically
+    // exported from the loader.  Per Khronos decisions, the loader's GIPA
+    // function will return the trampoline function for such device-extension
+    // functions, regardless of whether the extension has been enabled.
+    if (!strcmp("vkCreateSwapchainKHR", name)) {
+        *addr = (void *)vkCreateSwapchainKHR;
+        return true;
+    }
+    if (!strcmp("vkDestroySwapchainKHR", name)) {
+        *addr = (void *)vkDestroySwapchainKHR;
+        return true;
+    }
+    if (!strcmp("vkGetSwapchainImagesKHR", name)) {
+        *addr = (void *)vkGetSwapchainImagesKHR;
+        return true;
+    }
+    if (!strcmp("vkAcquireNextImageKHR", name)) {
+        *addr = (void *)vkAcquireNextImageKHR;
+        return true;
+    }
+    if (!strcmp("vkQueuePresentKHR", name)) {
+        *addr = (void *)vkQueuePresentKHR;
+        return true;
+    }
+    if (!strcmp("vkAcquireNextImage2KHR", name)) {
+        *addr = (void *)vkAcquireNextImage2KHR;
+        return true;
+    }
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+    // Functions for the VK_KHR_win32_surface extension:
+    if (!strcmp("vkCreateWin32SurfaceKHR", name)) {
+        *addr = ptr_instance->wsi_win32_surface_enabled ? (void *)vkCreateWin32SurfaceKHR : NULL;
+        return true;
+    }
+    if (!strcmp("vkGetPhysicalDeviceWin32PresentationSupportKHR", name)) {
+        *addr = ptr_instance->wsi_win32_surface_enabled ? (void *)vkGetPhysicalDeviceWin32PresentationSupportKHR : NULL;
+        return true;
+    }
+#endif  // VK_USE_PLATFORM_WIN32_KHR
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+
+    // Functions for the VK_KHR_wayland_surface extension:
+    if (!strcmp("vkCreateWaylandSurfaceKHR", name)) {
+        *addr = ptr_instance->wsi_wayland_surface_enabled ? (void *)vkCreateWaylandSurfaceKHR : NULL;
+        return true;
+    }
+    if (!strcmp("vkGetPhysicalDeviceWaylandPresentationSupportKHR", name)) {
+        *addr = ptr_instance->wsi_wayland_surface_enabled ? (void *)vkGetPhysicalDeviceWaylandPresentationSupportKHR : NULL;
+        return true;
+    }
+#endif  // VK_USE_PLATFORM_WAYLAND_KHR
+#ifdef VK_USE_PLATFORM_XCB_KHR
+
+    // Functions for the VK_KHR_xcb_surface extension:
+    if (!strcmp("vkCreateXcbSurfaceKHR", name)) {
+        *addr = ptr_instance->wsi_xcb_surface_enabled ? (void *)vkCreateXcbSurfaceKHR : NULL;
+        return true;
+    }
+    if (!strcmp("vkGetPhysicalDeviceXcbPresentationSupportKHR", name)) {
+        *addr = ptr_instance->wsi_xcb_surface_enabled ? (void *)vkGetPhysicalDeviceXcbPresentationSupportKHR : NULL;
+        return true;
+    }
+#endif  // VK_USE_PLATFORM_XCB_KHR
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+
+    // Functions for the VK_KHR_xlib_surface extension:
+    if (!strcmp("vkCreateXlibSurfaceKHR", name)) {
+        *addr = ptr_instance->wsi_xlib_surface_enabled ? (void *)vkCreateXlibSurfaceKHR : NULL;
+        return true;
+    }
+    if (!strcmp("vkGetPhysicalDeviceXlibPresentationSupportKHR", name)) {
+        *addr = ptr_instance->wsi_xlib_surface_enabled ? (void *)vkGetPhysicalDeviceXlibPresentationSupportKHR : NULL;
+        return true;
+    }
+#endif  // VK_USE_PLATFORM_XLIB_KHR
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+
+    // Functions for the VK_KHR_android_surface extension:
+    if (!strcmp("vkCreateAndroidSurfaceKHR", name)) {
+        *addr = ptr_instance->wsi_android_surface_enabled ? (void *)vkCreateAndroidSurfaceKHR : NULL;
+        return true;
+    }
+#endif  // VK_USE_PLATFORM_ANDROID_KHR
+
+#ifdef VK_USE_PLATFORM_MACOS_MVK
+
+    // Functions for the VK_MVK_macos_surface extension:
+    if (!strcmp("vkCreateMacOSSurfaceMVK", name)) {
+        *addr = ptr_instance->wsi_macos_surface_enabled ? (void *)vkCreateMacOSSurfaceMVK : NULL;
+        return true;
+    }
+#endif  // VK_USE_PLATFORM_MACOS_MVK
+#ifdef VK_USE_PLATFORM_IOS_MVK
+
+    // Functions for the VK_MVK_ios_surface extension:
+    if (!strcmp("vkCreateIOSSurfaceMVK", name)) {
+        *addr = ptr_instance->wsi_ios_surface_enabled ? (void *)vkCreateIOSSurfaceMVK : NULL;
+        return true;
+    }
+#endif  // VK_USE_PLATFORM_IOS_MVK
+
+    // Functions for the VK_EXT_headless_surface extension:
+    if (!strcmp("vkCreateHeadlessSurfaceEXT", name)) {
+        *addr = ptr_instance->wsi_headless_surface_enabled ? (void *)vkCreateHeadlessSurfaceEXT : NULL;
+        return true;
+    }
+
+#if defined(VK_USE_PLATFORM_METAL_EXT)
+    // Functions for the VK_MVK_macos_surface extension:
+    if (!strcmp("vkCreateMetalSurfaceEXT", name)) {
+        *addr = ptr_instance->wsi_metal_surface_enabled ? (void *)vkCreateMetalSurfaceEXT : NULL;
+        return true;
+    }
+#endif // VK_USE_PLATFORM_METAL_EXT
+
+    // Functions for VK_KHR_display extension:
+    if (!strcmp("vkGetPhysicalDeviceDisplayPropertiesKHR", name)) {
+        *addr = ptr_instance->wsi_display_enabled ? (void *)vkGetPhysicalDeviceDisplayPropertiesKHR : NULL;
+        return true;
+    }
+    if (!strcmp("vkGetPhysicalDeviceDisplayPlanePropertiesKHR", name)) {
+        *addr = ptr_instance->wsi_display_enabled ? (void *)vkGetPhysicalDeviceDisplayPlanePropertiesKHR : NULL;
+        return true;
+    }
+    if (!strcmp("vkGetDisplayPlaneSupportedDisplaysKHR", name)) {
+        *addr = ptr_instance->wsi_display_enabled ? (void *)vkGetDisplayPlaneSupportedDisplaysKHR : NULL;
+        return true;
+    }
+    if (!strcmp("vkGetDisplayModePropertiesKHR", name)) {
+        *addr = ptr_instance->wsi_display_enabled ? (void *)vkGetDisplayModePropertiesKHR : NULL;
+        return true;
+    }
+    if (!strcmp("vkCreateDisplayModeKHR", name)) {
+        *addr = ptr_instance->wsi_display_enabled ? (void *)vkCreateDisplayModeKHR : NULL;
+        return true;
+    }
+    if (!strcmp("vkGetDisplayPlaneCapabilitiesKHR", name)) {
+        *addr = ptr_instance->wsi_display_enabled ? (void *)vkGetDisplayPlaneCapabilitiesKHR : NULL;
+        return true;
+    }
+    if (!strcmp("vkCreateDisplayPlaneSurfaceKHR", name)) {
+        *addr = ptr_instance->wsi_display_enabled ? (void *)vkCreateDisplayPlaneSurfaceKHR : NULL;
+        return true;
+    }
+
+    // Functions for KHR_display_swapchain extension:
+    if (!strcmp("vkCreateSharedSwapchainsKHR", name)) {
+        *addr = (void *)vkCreateSharedSwapchainsKHR;
+        return true;
+    }
+
+    // Functions for KHR_get_display_properties2
+    if (!strcmp("vkGetPhysicalDeviceDisplayProperties2KHR", name)) {
+        *addr = ptr_instance->wsi_display_props2_enabled ? (void *)vkGetPhysicalDeviceDisplayProperties2KHR : NULL;
+        return true;
+    }
+    if (!strcmp("vkGetPhysicalDeviceDisplayPlaneProperties2KHR", name)) {
+        *addr = ptr_instance->wsi_display_props2_enabled ? (void *)vkGetPhysicalDeviceDisplayPlaneProperties2KHR : NULL;
+        return true;
+    }
+    if (!strcmp("vkGetDisplayModeProperties2KHR", name)) {
+        *addr = ptr_instance->wsi_display_props2_enabled ? (void *)vkGetDisplayModeProperties2KHR : NULL;
+        return true;
+    }
+    if (!strcmp("vkGetDisplayPlaneCapabilities2KHR", name)) {
+        *addr = ptr_instance->wsi_display_props2_enabled ? (void *)vkGetDisplayPlaneCapabilities2KHR : NULL;
+        return true;
+    }
+
+    return false;
+}
diff --git a/src/third_party/vulkan-loader/src/loader/wsi.h b/src/third_party/vulkan-loader/src/loader/wsi.h
new file mode 100644
index 0000000..3e44efa
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/loader/wsi.h
@@ -0,0 +1,184 @@
+/*
+ * Copyright (c) 2015-2016 The Khronos Group Inc.
+ * Copyright (c) 2015-2016 Valve Corporation
+ * Copyright (c) 2015-2016 LunarG, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Ian Elliott <ian@lunarg.com>
+ *
+ */
+
+#ifndef WSI_H
+#define WSI_H
+
+#include "vk_loader_platform.h"
+#include "loader.h"
+
+typedef struct {
+    union {
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+        VkIcdSurfaceWayland wayland_surf;
+#endif  // VK_USE_PLATFORM_WAYLAND_KHR
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+        VkIcdSurfaceWin32 win_surf;
+#endif  // VK_USE_PLATFORM_WIN32_KHR
+#ifdef VK_USE_PLATFORM_XCB_KHR
+        VkIcdSurfaceXcb xcb_surf;
+#endif  // VK_USE_PLATFORM_XCB_KHR
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+        VkIcdSurfaceXlib xlib_surf;
+#endif  // VK_USE_PLATFORM_XLIB_KHR
+#ifdef VK_USE_PLATFORM_MACOS_MVK
+        VkIcdSurfaceMacOS macos_surf;
+#endif  // VK_USE_PLATFORM_MACOS_MVK
+#ifdef VK_USE_PLATFORM_METAL_EXT
+        VkIcdSurfaceMetal metal_surf;
+#endif // VK_USE_PLATFORM_METAL_EXT
+        VkIcdSurfaceDisplay display_surf;
+        VkIcdSurfaceHeadless headless_surf;
+    };
+    uint32_t base_size;            // Size of VkIcdSurfaceBase
+    uint32_t platform_size;        // Size of corresponding VkIcdSurfaceXXX
+    uint32_t non_platform_offset;  // Start offset to base_size
+    uint32_t entire_size;          // Size of entire VkIcdSurface
+    VkSurfaceKHR *real_icd_surfaces;
+} VkIcdSurface;
+
+bool wsi_swapchain_instance_gpa(struct loader_instance *ptr_instance, const char *name, void **addr);
+
+void wsi_create_instance(struct loader_instance *ptr_instance, const VkInstanceCreateInfo *pCreateInfo);
+bool wsi_unsupported_instance_extension(const VkExtensionProperties *ext_prop);
+
+VKAPI_ATTR VkResult VKAPI_CALL terminator_CreateHeadlessSurfaceEXT(VkInstance instance,
+                                                                   const VkHeadlessSurfaceCreateInfoEXT *pCreateInfo,
+                                                                   const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface);
+
+VKAPI_ATTR VkResult VKAPI_CALL terminator_CreateSwapchainKHR(VkDevice device, const VkSwapchainCreateInfoKHR *pCreateInfo,
+                                                             const VkAllocationCallbacks *pAllocator, VkSwapchainKHR *pSwapchain);
+
+VKAPI_ATTR void VKAPI_CALL terminator_DestroySurfaceKHR(VkInstance instance, VkSurfaceKHR surface,
+                                                        const VkAllocationCallbacks *pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL terminator_GetPhysicalDeviceSurfaceSupportKHR(VkPhysicalDevice physicalDevice,
+                                                                             uint32_t queueFamilyIndex, VkSurfaceKHR surface,
+                                                                             VkBool32 *pSupported);
+
+VKAPI_ATTR VkResult VKAPI_CALL terminator_GetPhysicalDeviceSurfaceCapabilitiesKHR(VkPhysicalDevice physicalDevice,
+                                                                                  VkSurfaceKHR surface,
+                                                                                  VkSurfaceCapabilitiesKHR *pSurfaceCapabilities);
+
+VKAPI_ATTR VkResult VKAPI_CALL terminator_GetPhysicalDeviceSurfaceFormatsKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface,
+                                                                             uint32_t *pSurfaceFormatCount,
+                                                                             VkSurfaceFormatKHR *pSurfaceFormats);
+
+VKAPI_ATTR VkResult VKAPI_CALL terminator_GetPhysicalDeviceSurfacePresentModesKHR(VkPhysicalDevice physicalDevice,
+                                                                                  VkSurfaceKHR surface, uint32_t *pPresentModeCount,
+                                                                                  VkPresentModeKHR *pPresentModes);
+
+VKAPI_ATTR VkResult VKAPI_CALL terminator_GetDeviceGroupSurfacePresentModesKHR(
+    VkDevice                                    device,
+    VkSurfaceKHR                                surface,
+    VkDeviceGroupPresentModeFlagsKHR*           pModes);
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+VKAPI_ATTR VkResult VKAPI_CALL terminator_CreateWin32SurfaceKHR(VkInstance instance, const VkWin32SurfaceCreateInfoKHR *pCreateInfo,
+                                                                const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface);
+VKAPI_ATTR VkBool32 VKAPI_CALL terminator_GetPhysicalDeviceWin32PresentationSupportKHR(VkPhysicalDevice physicalDevice,
+                                                                                       uint32_t queueFamilyIndex);
+#endif
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+VKAPI_ATTR VkResult VKAPI_CALL terminator_CreateWaylandSurfaceKHR(VkInstance instance,
+                                                                  const VkWaylandSurfaceCreateInfoKHR *pCreateInfo,
+                                                                  const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface);
+VKAPI_ATTR VkBool32 VKAPI_CALL terminator_GetPhysicalDeviceWaylandPresentationSupportKHR(VkPhysicalDevice physicalDevice,
+                                                                                         uint32_t queueFamilyIndex,
+                                                                                         struct wl_display *display);
+#endif
+#ifdef VK_USE_PLATFORM_XCB_KHR
+VKAPI_ATTR VkResult VKAPI_CALL terminator_CreateXcbSurfaceKHR(VkInstance instance, const VkXcbSurfaceCreateInfoKHR *pCreateInfo,
+                                                              const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface);
+
+VKAPI_ATTR VkBool32 VKAPI_CALL terminator_GetPhysicalDeviceXcbPresentationSupportKHR(VkPhysicalDevice physicalDevice,
+                                                                                     uint32_t queueFamilyIndex,
+                                                                                     xcb_connection_t *connection,
+                                                                                     xcb_visualid_t visual_id);
+#endif
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+VKAPI_ATTR VkResult VKAPI_CALL terminator_CreateXlibSurfaceKHR(VkInstance instance, const VkXlibSurfaceCreateInfoKHR *pCreateInfo,
+                                                               const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface);
+VKAPI_ATTR VkBool32 VKAPI_CALL terminator_GetPhysicalDeviceXlibPresentationSupportKHR(VkPhysicalDevice physicalDevice,
+                                                                                      uint32_t queueFamilyIndex, Display *dpy,
+                                                                                      VisualID visualID);
+#endif
+#ifdef VK_USE_PLATFORM_MACOS_MVK
+VKAPI_ATTR VkResult VKAPI_CALL terminator_CreateMacOSSurfaceMVK(VkInstance instance, const VkMacOSSurfaceCreateInfoMVK *pCreateInfo,
+                                                                const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface);
+#endif
+#ifdef VK_USE_PLATFORM_IOS_MVK
+VKAPI_ATTR VkResult VKAPI_CALL terminator_CreateIOSSurfaceMVK(VkInstance instance, const VkIOSSurfaceCreateInfoMVK *pCreateInfo,
+                                                              const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface);
+#endif
+#if defined(VK_USE_PLATFORM_METAL_EXT)
+VKAPI_ATTR VkResult VKAPI_CALL terminator_CreateMetalSurfaceEXT(VkInstance instance, const VkMetalSurfaceCreateInfoEXT *pCreateInfo,
+                                                                const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface);
+#endif
+VKAPI_ATTR VkResult VKAPI_CALL terminator_GetPhysicalDeviceDisplayPropertiesKHR(VkPhysicalDevice physicalDevice,
+                                                                                uint32_t *pPropertyCount,
+                                                                                VkDisplayPropertiesKHR *pProperties);
+VKAPI_ATTR VkResult VKAPI_CALL terminator_GetPhysicalDeviceDisplayPlanePropertiesKHR(VkPhysicalDevice physicalDevice,
+                                                                                     uint32_t *pPropertyCount,
+                                                                                     VkDisplayPlanePropertiesKHR *pProperties);
+VKAPI_ATTR VkResult VKAPI_CALL terminator_GetDisplayPlaneSupportedDisplaysKHR(VkPhysicalDevice physicalDevice, uint32_t planeIndex,
+                                                                              uint32_t *pDisplayCount, VkDisplayKHR *pDisplays);
+VKAPI_ATTR VkResult VKAPI_CALL terminator_GetDisplayModePropertiesKHR(VkPhysicalDevice physicalDevice, VkDisplayKHR display,
+                                                                      uint32_t *pPropertyCount,
+                                                                      VkDisplayModePropertiesKHR *pProperties);
+VKAPI_ATTR VkResult VKAPI_CALL terminator_CreateDisplayModeKHR(VkPhysicalDevice physicalDevice, VkDisplayKHR display,
+                                                               const VkDisplayModeCreateInfoKHR *pCreateInfo,
+                                                               const VkAllocationCallbacks *pAllocator, VkDisplayModeKHR *pMode);
+VKAPI_ATTR VkResult VKAPI_CALL terminator_GetDisplayPlaneCapabilitiesKHR(VkPhysicalDevice physicalDevice, VkDisplayModeKHR mode,
+                                                                         uint32_t planeIndex,
+                                                                         VkDisplayPlaneCapabilitiesKHR *pCapabilities);
+VKAPI_ATTR VkResult VKAPI_CALL terminator_CreateDisplayPlaneSurfaceKHR(VkInstance instance,
+                                                                       const VkDisplaySurfaceCreateInfoKHR *pCreateInfo,
+                                                                       const VkAllocationCallbacks *pAllocator,
+                                                                       VkSurfaceKHR *pSurface);
+
+VKAPI_ATTR VkResult VKAPI_CALL terminator_CreateSharedSwapchainsKHR(VkDevice device, uint32_t swapchainCount,
+                                                                    const VkSwapchainCreateInfoKHR *pCreateInfos,
+                                                                    const VkAllocationCallbacks *pAllocator,
+                                                                    VkSwapchainKHR *pSwapchains);
+
+VKAPI_ATTR VkResult VKAPI_CALL terminator_GetPhysicalDevicePresentRectanglesKHR(VkPhysicalDevice physicalDevice,
+                                                                                VkSurfaceKHR surface,
+                                                                                uint32_t* pRectCount,
+                                                                                VkRect2D* pRects);
+
+VKAPI_ATTR VkResult VKAPI_CALL terminator_GetPhysicalDeviceDisplayProperties2KHR(VkPhysicalDevice physicalDevice,
+                                                                                 uint32_t *pPropertyCount,
+                                                                                 VkDisplayProperties2KHR *pProperties);
+
+VKAPI_ATTR VkResult VKAPI_CALL terminator_GetPhysicalDeviceDisplayPlaneProperties2KHR(VkPhysicalDevice physicalDevice,
+                                                                                      uint32_t *pPropertyCount,
+                                                                                      VkDisplayPlaneProperties2KHR *pProperties);
+
+VKAPI_ATTR VkResult VKAPI_CALL terminator_GetDisplayModeProperties2KHR(VkPhysicalDevice physicalDevice, VkDisplayKHR display,
+                                                                       uint32_t *pPropertyCount,
+                                                                       VkDisplayModeProperties2KHR *pProperties);
+
+VKAPI_ATTR VkResult VKAPI_CALL terminator_GetDisplayPlaneCapabilities2KHR(VkPhysicalDevice physicalDevice,
+                                                                          const VkDisplayPlaneInfo2KHR *pDisplayPlaneInfo,
+                                                                          VkDisplayPlaneCapabilities2KHR *pCapabilities);
+
+#endif // WSI_H
diff --git a/src/third_party/vulkan-loader/src/scripts/check_code_format.sh b/src/third_party/vulkan-loader/src/scripts/check_code_format.sh
new file mode 100755
index 0000000..dde4379
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/scripts/check_code_format.sh
@@ -0,0 +1,41 @@
+#!/bin/bash
+# Copyright (c) 2017 Google Inc.
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Script to determine if source code in Pull Request is properly formatted.
+# Exits with non 0 exit code if formatting is needed.
+#
+# This script assumes to be invoked at the project root directory.
+
+RED='\033[0;31m'
+GREEN='\033[0;32m'
+NC='\033[0m' # No Color
+
+FILES_TO_CHECK=$(git diff --name-only master | grep -v -E "^include/vulkan" | grep -E ".*\.(cpp|cc|c\+\+|cxx|c|h|hpp)$")
+
+if [ -z "${FILES_TO_CHECK}" ]; then
+  echo -e "${GREEN}No source code to check for formatting.${NC}"
+  exit 0
+fi
+
+FORMAT_DIFF=$(git diff -U0 master -- ${FILES_TO_CHECK} | python ./scripts/clang-format-diff.py -p1 -style=file)
+
+if [ -z "${FORMAT_DIFF}" ]; then
+  echo -e "${GREEN}All source code in PR properly formatted.${NC}"
+  exit 0
+else
+  echo -e "${RED}Found formatting errors!${NC}"
+  echo "${FORMAT_DIFF}"
+  exit 1
+fi
diff --git a/src/third_party/vulkan-loader/src/scripts/check_commit_message_format.sh b/src/third_party/vulkan-loader/src/scripts/check_commit_message_format.sh
new file mode 100755
index 0000000..2966635
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/scripts/check_commit_message_format.sh
@@ -0,0 +1,102 @@
+#!/bin/bash
+# Copyright (c) 2018 Valve Corporation
+# Copyright (c) 2018 LunarG, Inc.
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Checks commit messages against project standards in CONTRIBUTING.md document
+# Script to determine if commit messages in Pull Request are properly formatted.
+# Exits with non 0 exit code if reformatting is needed.
+
+# Disable subshells
+shopt -s lastpipe
+
+RED='\033[0;31m'
+GREEN='\033[0;32m'
+NC='\033[0m' # No Color
+
+# TRAVIS_COMMIT_RANGE contains range of commits for this PR
+
+# Get user-supplied commit message text for applicable commits and insert
+# a unique separator string identifier. The git command returns ONLY the
+# subject line and body for each of the commits.
+COMMIT_TEXT=$(git log ${TRAVIS_COMMIT_RANGE} --pretty=format:"XXXNEWLINEXXX"%n%B)
+
+# Bail if there are none
+if [ -z "${COMMIT_TEXT}" ]; then
+  echo -e "${GREEN}No commit messgages to check for formatting.${NC}"
+  exit 0
+elif ! echo $TRAVIS_COMMIT_RANGE | grep -q "\.\.\."; then
+  echo -e "${GREEN}No commit messgages to check for formatting.${NC}"
+  exit 0
+fi
+
+# Process commit messages
+success=1
+current_line=0
+prevline=""
+
+# Process each line of the commit message output, resetting counter on separator
+printf %s "$COMMIT_TEXT" | while IFS='' read -r line; do
+  # echo "Count = $current_line <Line> = $line"
+  current_line=$((current_line+1))
+  if [ "$line" = "XXXNEWLINEXXX" ]; then
+    current_line=0
+  fi
+  chars=${#line}
+  if [ $current_line -eq 1 ]; then
+    # Subject line should be 50 chars or less (but give some slack here)
+    if [ $chars -gt 54 ]; then
+      echo "The following subject line exceeds 50 characters in length."
+      echo "     '$line'"
+      success=0
+    fi
+    i=$(($chars-1))
+    last_char=${line:$i:1}
+    # Output error if last char of subject line is not alpha-numeric
+    if [[ ! $last_char =~ [0-9a-zA-Z] ]]; then
+      echo "For the following commit, the last character of the subject line must not be non-alphanumeric."
+      echo "     '$line'"
+      success=0
+    fi
+    # Checking if subject line doesn't start with 'module: '
+    prefix=$(echo $line | cut -f1 -d " ")
+    if [ "${prefix: -1}" != ":" ]; then
+      echo "The following subject line must start with a single word specifying the functional area of the change, followed by a colon and space. I.e., 'layers: Subject line here'"
+      echo "     '$line'"
+      success=0
+    fi
+  elif [ $current_line -eq 2 ]; then
+    # Commit message must have a blank line between subject and body
+    if [ $chars -ne 0 ]; then
+      echo "The following subject line must be followed by a blank line."
+      echo "     '$prevline'"
+      success=0
+    fi
+  else
+    # Lines in a commit message body must be less than 72 characters in length (but give some slack)
+    if [ $chars -gt 76 ]; then
+      echo "The following commit message body line exceeds the 72 character limit."
+      echo "'$line\'"
+      success=0
+    fi
+  fi
+  prevline=$line
+done
+
+if [ $success -eq 1 ]; then
+  echo -e "${GREEN}All commit messages in pull request are properly formatted.${NC}"
+  exit 0
+else
+  exit 1
+fi
diff --git a/src/third_party/vulkan-loader/src/scripts/common_codegen.py b/src/third_party/vulkan-loader/src/scripts/common_codegen.py
new file mode 100644
index 0000000..d56ebac
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/scripts/common_codegen.py
@@ -0,0 +1,74 @@
+#!/usr/bin/python3 -i
+#
+# Copyright (c) 2015-2017, 2019 The Khronos Group Inc.
+# Copyright (c) 2015-2017, 2019 Valve Corporation
+# Copyright (c) 2015-2017, 2019 LunarG, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Author: Mark Lobodzinski <mark@lunarg.com>
+
+import os
+
+# Copyright text prefixing all headers (list of strings).
+prefixStrings = [
+    '/*',
+    '** Copyright (c) 2015-2017, 2019 The Khronos Group Inc.',
+    '** Copyright (c) 2015-2017, 2019 Valve Corporation',
+    '** Copyright (c) 2015-2017, 2019 LunarG, Inc.',
+    '** Copyright (c) 2015-2017, 2019 Google Inc.',
+    '**',
+    '** Licensed under the Apache License, Version 2.0 (the "License");',
+    '** you may not use this file except in compliance with the License.',
+    '** You may obtain a copy of the License at',
+    '**',
+    '**     http://www.apache.org/licenses/LICENSE-2.0',
+    '**',
+    '** Unless required by applicable law or agreed to in writing, software',
+    '** distributed under the License is distributed on an "AS IS" BASIS,',
+    '** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.',
+    '** See the License for the specific language governing permissions and',
+    '** limitations under the License.',
+    '*/',
+    ''
+]
+
+
+platform_dict = {
+    'android' : 'VK_USE_PLATFORM_ANDROID_KHR',
+    'fuchsia' : 'VK_USE_PLATFORM_FUCHSIA',
+    'ggp': 'VK_USE_PLATFORM_GGP',
+    'ios' : 'VK_USE_PLATFORM_IOS_MVK',
+    'macos' : 'VK_USE_PLATFORM_MACOS_MVK',
+    'metal' : 'VK_USE_PLATFORM_METAL_EXT',
+    'vi' : 'VK_USE_PLATFORM_VI_NN',
+    'wayland' : 'VK_USE_PLATFORM_WAYLAND_KHR',
+    'win32' : 'VK_USE_PLATFORM_WIN32_KHR',
+    'xcb' : 'VK_USE_PLATFORM_XCB_KHR',
+    'xlib' : 'VK_USE_PLATFORM_XLIB_KHR',
+    'xlib_xrandr' : 'VK_USE_PLATFORM_XLIB_XRANDR_EXT',
+}
+
+#
+# Return appropriate feature protect string from 'platform' tag on feature
+def GetFeatureProtect(interface):
+    """Get platform protection string"""
+    platform = interface.get('platform')
+    protect = None
+    if platform is not None:
+        protect = platform_dict[platform]
+    return protect
+
+# helper to define paths relative to the repo root
+def repo_relative(path):
+    return os.path.abspath(os.path.join(os.path.dirname(__file__), '..', path))
diff --git a/src/third_party/vulkan-loader/src/scripts/dispatch_table_helper_generator.py b/src/third_party/vulkan-loader/src/scripts/dispatch_table_helper_generator.py
new file mode 100644
index 0000000..c8a6636
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/scripts/dispatch_table_helper_generator.py
@@ -0,0 +1,240 @@
+#!/usr/bin/python3 -i
+#
+# Copyright (c) 2015-2017 The Khronos Group Inc.
+# Copyright (c) 2015-2017 Valve Corporation
+# Copyright (c) 2015-2017 LunarG, Inc.
+# Copyright (c) 2015-2017 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Author: Mark Lobodzinski <mark@lunarg.com>
+
+import os,re,sys
+import xml.etree.ElementTree as etree
+from generator import *
+from collections import namedtuple
+from common_codegen import *
+
+#
+# DispatchTableHelperOutputGeneratorOptions - subclass of GeneratorOptions.
+class DispatchTableHelperOutputGeneratorOptions(GeneratorOptions):
+    def __init__(self,
+                 conventions = None,
+                 filename = None,
+                 directory = '.',
+                 apiname = None,
+                 profile = None,
+                 versions = '.*',
+                 emitversions = '.*',
+                 defaultExtensions = None,
+                 addExtensions = None,
+                 removeExtensions = None,
+                 emitExtensions = None,
+                 sortProcedure = regSortFeatures,
+                 prefixText = "",
+                 genFuncPointers = True,
+                 apicall = '',
+                 apientry = '',
+                 apientryp = '',
+                 alignFuncParam = 0,
+                 expandEnumerants = True):
+        GeneratorOptions.__init__(self, conventions, filename, directory, apiname, profile,
+                                  versions, emitversions, defaultExtensions,
+                                  addExtensions, removeExtensions, emitExtensions, sortProcedure)
+        self.prefixText      = prefixText
+        self.genFuncPointers = genFuncPointers
+        self.prefixText      = None
+        self.apicall         = apicall
+        self.apientry        = apientry
+        self.apientryp       = apientryp
+        self.alignFuncParam  = alignFuncParam
+#
+# DispatchTableHelperOutputGenerator - subclass of OutputGenerator.
+# Generates dispatch table helper header files for LVL
+class DispatchTableHelperOutputGenerator(OutputGenerator):
+    """Generate dispatch table helper header based on XML element attributes"""
+    def __init__(self,
+                 errFile = sys.stderr,
+                 warnFile = sys.stderr,
+                 diagFile = sys.stdout):
+        OutputGenerator.__init__(self, errFile, warnFile, diagFile)
+        # Internal state - accumulators for different inner block text
+        self.instance_dispatch_list = []      # List of entries for instance dispatch list
+        self.device_dispatch_list = []        # List of entries for device dispatch list
+        self.dev_ext_stub_list = []           # List of stub functions for device extension functions
+        self.device_extension_list = []       # List of device extension functions
+        self.extension_type = ''
+    #
+    # Called once at the beginning of each run
+    def beginFile(self, genOpts):
+        OutputGenerator.beginFile(self, genOpts)
+        write("#pragma once", file=self.outFile)
+        # User-supplied prefix text, if any (list of strings)
+        if (genOpts.prefixText):
+            for s in genOpts.prefixText:
+                write(s, file=self.outFile)
+        # File Comment
+        file_comment = '// *** THIS FILE IS GENERATED - DO NOT EDIT ***\n'
+        file_comment += '// See dispatch_helper_generator.py for modifications\n'
+        write(file_comment, file=self.outFile)
+        # Copyright Notice
+        copyright =  '/*\n'
+        copyright += ' * Copyright (c) 2015-2017 The Khronos Group Inc.\n'
+        copyright += ' * Copyright (c) 2015-2017 Valve Corporation\n'
+        copyright += ' * Copyright (c) 2015-2017 LunarG, Inc.\n'
+        copyright += ' *\n'
+        copyright += ' * Licensed under the Apache License, Version 2.0 (the "License");\n'
+        copyright += ' * you may not use this file except in compliance with the License.\n'
+        copyright += ' * You may obtain a copy of the License at\n'
+        copyright += ' *\n'
+        copyright += ' *     http://www.apache.org/licenses/LICENSE-2.0\n'
+        copyright += ' *\n'
+        copyright += ' * Unless required by applicable law or agreed to in writing, software\n'
+        copyright += ' * distributed under the License is distributed on an "AS IS" BASIS,\n'
+        copyright += ' * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n'
+        copyright += ' * See the License for the specific language governing permissions and\n'
+        copyright += ' * limitations under the License.\n'
+        copyright += ' *\n'
+        copyright += ' * Author: Courtney Goeltzenleuchter <courtney@LunarG.com>\n'
+        copyright += ' * Author: Jon Ashburn <jon@lunarg.com>\n'
+        copyright += ' * Author: Mark Lobodzinski <mark@lunarg.com>\n'
+        copyright += ' */\n'
+
+        preamble = ''
+        preamble += '#include <vulkan/vulkan.h>\n'
+        preamble += '#include <vulkan/vk_layer.h>\n'
+        preamble += '#include <string.h>\n'
+        preamble += '#include "vk_layer_dispatch_table.h"\n'
+
+        write(copyright, file=self.outFile)
+        write(preamble, file=self.outFile)
+    #
+    # Write generate and write dispatch tables to output file
+    def endFile(self):
+        device_table = ''
+        instance_table = ''
+
+        device_table += self.OutputDispatchTableHelper('device')
+        instance_table += self.OutputDispatchTableHelper('instance')
+
+        for stub in self.dev_ext_stub_list:
+            write(stub, file=self.outFile)
+        write("\n\n", file=self.outFile)
+        write(device_table, file=self.outFile);
+        write("\n", file=self.outFile)
+        write(instance_table, file=self.outFile);
+
+        # Finish processing in superclass
+        OutputGenerator.endFile(self)
+    #
+    # Processing at beginning of each feature or extension
+    def beginFeature(self, interface, emit):
+        OutputGenerator.beginFeature(self, interface, emit)
+        self.featureExtraProtect = GetFeatureProtect(interface)
+        self.extension_type = interface.get('type')
+
+    #
+    # Process commands, adding to appropriate dispatch tables
+    def genCmd(self, cmdinfo, name, alias):
+        OutputGenerator.genCmd(self, cmdinfo, name, alias)
+
+        avoid_entries = ['vkCreateInstance',
+                         'vkCreateDevice']
+        # Get first param type
+        params = cmdinfo.elem.findall('param')
+        info = self.getTypeNameTuple(params[0])
+
+        if name not in avoid_entries:
+            self.AddCommandToDispatchList(name, info[0], self.featureExtraProtect, cmdinfo)
+
+    #
+    # Determine if this API should be ignored or added to the instance or device dispatch table
+    def AddCommandToDispatchList(self, name, handle_type, protect, cmdinfo):
+        handle = self.registry.tree.find("types/type/[name='" + handle_type + "'][@category='handle']")
+        if handle is None:
+            return
+        if handle_type != 'VkInstance' and handle_type != 'VkPhysicalDevice' and name != 'vkGetInstanceProcAddr':
+            self.device_dispatch_list.append((name, self.featureExtraProtect))
+            if "VK_VERSION" not in self.featureName and self.extension_type == 'device':
+                self.device_extension_list.append(name)
+                # Build up stub function
+                return_type = ''
+                decl = self.makeCDecls(cmdinfo.elem)[1]
+                if 'typedef VkResult' in decl:
+                    return_type = 'return VK_SUCCESS;'
+                decl = decl.split('*PFN_vk')[1]
+                decl = decl.replace(')(', '(')
+                if return_type == '':
+                    decl = 'static VKAPI_ATTR void VKAPI_CALL Stub' + decl
+                else:
+                    decl = 'static VKAPI_ATTR VkResult VKAPI_CALL Stub' + decl
+                func_body = ' { ' + return_type + ' };'
+                decl = decl.replace (';', func_body)
+                if self.featureExtraProtect is not None:
+                    self.dev_ext_stub_list.append('#ifdef %s' % self.featureExtraProtect)
+                self.dev_ext_stub_list.append(decl)
+                if self.featureExtraProtect is not None:
+                    self.dev_ext_stub_list.append('#endif // %s' % self.featureExtraProtect)
+        else:
+            self.instance_dispatch_list.append((name, self.featureExtraProtect))
+        return
+    #
+    # Retrieve the type and name for a parameter
+    def getTypeNameTuple(self, param):
+        type = ''
+        name = ''
+        for elem in param:
+            if elem.tag == 'type':
+                type = noneStr(elem.text)
+            elif elem.tag == 'name':
+                name = noneStr(elem.text)
+        return (type, name)
+    #
+    # Create a dispatch table from the appropriate list and return it as a string
+    def OutputDispatchTableHelper(self, table_type):
+        entries = []
+        table = ''
+        if table_type == 'device':
+            entries = self.device_dispatch_list
+            table += 'static inline void layer_init_device_dispatch_table(VkDevice device, VkLayerDispatchTable *table, PFN_vkGetDeviceProcAddr gpa) {\n'
+            table += '    memset(table, 0, sizeof(*table));\n'
+            table += '    // Device function pointers\n'
+        else:
+            entries = self.instance_dispatch_list
+            table += 'static inline void layer_init_instance_dispatch_table(VkInstance instance, VkLayerInstanceDispatchTable *table, PFN_vkGetInstanceProcAddr gpa) {\n'
+            table += '    memset(table, 0, sizeof(*table));\n'
+            table += '    // Instance function pointers\n'
+
+        for item in entries:
+            # Remove 'vk' from proto name
+            base_name = item[0][2:]
+
+            if item[1] is not None:
+                table += '#ifdef %s\n' % item[1]
+
+            # If we're looking for the proc we are passing in, just point the table to it.  This fixes the issue where
+            # a layer overrides the function name for the loader.
+            if (table_type == 'device' and base_name == 'GetDeviceProcAddr'):
+                table += '    table->GetDeviceProcAddr = gpa;\n'
+            elif (table_type != 'device' and base_name == 'GetInstanceProcAddr'):
+                table += '    table->GetInstanceProcAddr = gpa;\n'
+            else:
+                table += '    table->%s = (PFN_%s) gpa(%s, "%s");\n' % (base_name, item[0], table_type, item[0])
+            if item[0] in self.device_extension_list:
+                stub_check = '    if (table->%s == nullptr) { table->%s = (PFN_%s)Stub%s; }\n' % (base_name, base_name, item[0], base_name)
+                table += stub_check
+            if item[1] is not None:
+                table += '#endif // %s\n' % item[1]
+
+        table += '}'
+        return table
diff --git a/src/third_party/vulkan-loader/src/scripts/generate_source.py b/src/third_party/vulkan-loader/src/scripts/generate_source.py
new file mode 100755
index 0000000..712747b
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/scripts/generate_source.py
@@ -0,0 +1,113 @@
+#!/usr/bin/env python3
+# Copyright (c) 2019 The Khronos Group Inc.
+# Copyright (c) 2019 Valve Corporation
+# Copyright (c) 2019 LunarG, Inc.
+# Copyright (c) 2019 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Author: Mike Schuchardt <mikes@lunarg.com>
+
+import argparse
+import common_codegen
+import filecmp
+import os
+import shutil
+import subprocess
+import sys
+import tempfile
+
+# files to exclude from --verify check
+verify_exclude = ['.clang-format']
+
+def main(argv):
+    parser = argparse.ArgumentParser(description='Generate source code for this repository')
+    parser.add_argument('registry', metavar='REGISTRY_PATH', help='path to the Vulkan-Headers registry directory')
+    group = parser.add_mutually_exclusive_group()
+    group.add_argument('-i', '--incremental', action='store_true', help='only update repo files that change')
+    group.add_argument('-v', '--verify', action='store_true', help='verify repo files match generator output')
+    args = parser.parse_args(argv)
+
+    gen_cmds = [[common_codegen.repo_relative('scripts/loader_genvk.py'),
+                 '-registry', os.path.abspath(os.path.join(args.registry,  'vk.xml')),
+                 '-quiet',
+                 filename] for filename in ['vk_dispatch_table_helper.h',
+                                            'vk_layer_dispatch_table.h',
+                                            'vk_loader_extensions.h',
+                                            'vk_loader_extensions.c',
+                                            'vk_object_types.h']]
+
+    repo_dir = common_codegen.repo_relative('loader/generated')
+
+    # get directory where generators will run
+    if args.verify or args.incremental:
+        # generate in temp directory so we can compare or copy later
+        temp_obj = tempfile.TemporaryDirectory(prefix='VulkanLoader_generated_source_')
+        temp_dir = temp_obj.name
+        gen_dir = temp_dir
+    else:
+        # generate directly in the repo
+        gen_dir = repo_dir
+
+    # run each code generator
+    for cmd in gen_cmds:
+        print(' '.join(cmd))
+        try:
+            subprocess.check_call([sys.executable] + cmd,
+                                  # ignore generator output, vk_validation_stats.py is especially noisy
+                                  stdout=subprocess.DEVNULL,
+                                  cwd=gen_dir)
+        except Exception as e:
+            print('ERROR:', str(e))
+            return 1
+
+    # optional post-generation steps
+    if args.verify:
+        # compare contents of temp dir and repo
+        temp_files = set(os.listdir(temp_dir))
+        repo_files = set(os.listdir(repo_dir))
+        files_match = True
+        for filename in sorted((temp_files | repo_files) - set(verify_exclude)):
+            if filename not in repo_files:
+                print('ERROR: Missing repo file', filename)
+                files_match = False
+            elif filename not in temp_files:
+                print('ERROR: Missing generator for', filename)
+                files_match = False
+            elif not filecmp.cmp(os.path.join(temp_dir, filename),
+                               os.path.join(repo_dir, filename),
+                               shallow=False):
+                print('ERROR: Repo files do not match generator output for', filename)
+                files_match = False
+
+        # return code for test scripts
+        if files_match:
+            print('SUCCESS: Repo files match generator output')
+            return 0
+        return 1
+
+    elif args.incremental:
+        # copy missing or differing files from temp directory to repo
+        for filename in os.listdir(temp_dir):
+            temp_filename = os.path.join(temp_dir, filename)
+            repo_filename = os.path.join(repo_dir, filename)
+            if not os.path.exists(repo_filename) or \
+               not filecmp.cmp(temp_filename, repo_filename, shallow=False):
+                print('update', repo_filename)
+                shutil.copyfile(temp_filename, repo_filename)
+
+    return 0
+
+if __name__ == '__main__':
+    sys.exit(main(sys.argv[1:]))
+
diff --git a/src/third_party/vulkan-loader/src/scripts/helper_file_generator.py b/src/third_party/vulkan-loader/src/scripts/helper_file_generator.py
new file mode 100644
index 0000000..56045ee
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/scripts/helper_file_generator.py
@@ -0,0 +1,513 @@
+#!/usr/bin/python3 -i
+#
+# Copyright (c) 2015-2017 The Khronos Group Inc.
+# Copyright (c) 2015-2017 Valve Corporation
+# Copyright (c) 2015-2017 LunarG, Inc.
+# Copyright (c) 2015-2017 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Author: Mark Lobodzinski <mark@lunarg.com>
+# Author: Tobin Ehlis <tobine@google.com>
+# Author: John Zulauf <jzulauf@lunarg.com>
+
+import os,re,sys
+import xml.etree.ElementTree as etree
+from generator import *
+from collections import namedtuple
+from common_codegen import *
+
+#
+# HelperFileOutputGeneratorOptions - subclass of GeneratorOptions.
+class HelperFileOutputGeneratorOptions(GeneratorOptions):
+    def __init__(self,
+                 conventions = None,
+                 filename = None,
+                 directory = '.',
+                 apiname = None,
+                 profile = None,
+                 versions = '.*',
+                 emitversions = '.*',
+                 defaultExtensions = None,
+                 addExtensions = None,
+                 removeExtensions = None,
+                 emitExtensions = None,
+                 sortProcedure = regSortFeatures,
+                 prefixText = "",
+                 genFuncPointers = True,
+                 protectFile = True,
+                 protectFeature = True,
+                 apicall = '',
+                 apientry = '',
+                 apientryp = '',
+                 alignFuncParam = 0,
+                 library_name = '',
+                 expandEnumerants = True,
+                 helper_file_type = ''):
+        GeneratorOptions.__init__(self, conventions, filename, directory, apiname, profile,
+                                  versions, emitversions, defaultExtensions,
+                                  addExtensions, removeExtensions, emitExtensions, sortProcedure)
+        self.prefixText       = prefixText
+        self.genFuncPointers  = genFuncPointers
+        self.protectFile      = protectFile
+        self.protectFeature   = protectFeature
+        self.apicall          = apicall
+        self.apientry         = apientry
+        self.apientryp        = apientryp
+        self.alignFuncParam   = alignFuncParam
+        self.library_name     = library_name
+        self.helper_file_type = helper_file_type
+#
+# HelperFileOutputGenerator - subclass of OutputGenerator. Outputs Vulkan helper files
+class HelperFileOutputGenerator(OutputGenerator):
+    """Generate helper file based on XML element attributes"""
+    def __init__(self,
+                 errFile = sys.stderr,
+                 warnFile = sys.stderr,
+                 diagFile = sys.stdout):
+        OutputGenerator.__init__(self, errFile, warnFile, diagFile)
+        # Internal state - accumulators for different inner block text
+        self.enum_output = ''                             # string built up of enum string routines
+        # Internal state - accumulators for different inner block text
+        self.structNames = []                             # List of Vulkan struct typenames
+        self.structTypes = dict()                         # Map of Vulkan struct typename to required VkStructureType
+        self.structMembers = []                           # List of StructMemberData records for all Vulkan structs
+        self.object_types = []                            # List of all handle types
+        self.object_type_aliases = []                     # Aliases to handles types (for handles that were extensions)
+        self.debug_report_object_types = []               # Handy copy of debug_report_object_type enum data
+        self.core_object_types = []                       # Handy copy of core_object_type enum data
+        self.device_extension_info = dict()               # Dict of device extension name defines and ifdef values
+        self.instance_extension_info = dict()             # Dict of instance extension name defines and ifdef values
+
+        # Named tuples to store struct and command data
+        self.StructType = namedtuple('StructType', ['name', 'value'])
+        self.CommandParam = namedtuple('CommandParam', ['type', 'name', 'ispointer', 'isstaticarray', 'isconst', 'iscount', 'len', 'extstructs', 'cdecl'])
+        self.StructMemberData = namedtuple('StructMemberData', ['name', 'members', 'ifdef_protect'])
+
+        self.custom_construct_params = {
+            # safe_VkGraphicsPipelineCreateInfo needs to know if subpass has color and\or depth\stencil attachments to use its pointers
+            'VkGraphicsPipelineCreateInfo' :
+                ', const bool uses_color_attachment, const bool uses_depthstencil_attachment',
+            # safe_VkPipelineViewportStateCreateInfo needs to know if viewport and scissor is dynamic to use its pointers
+            'VkPipelineViewportStateCreateInfo' :
+                ', const bool is_dynamic_viewports, const bool is_dynamic_scissors',
+        }
+    #
+    # Called once at the beginning of each run
+    def beginFile(self, genOpts):
+        OutputGenerator.beginFile(self, genOpts)
+        # User-supplied prefix text, if any (list of strings)
+        self.helper_file_type = genOpts.helper_file_type
+        self.library_name = genOpts.library_name
+        # File Comment
+        file_comment = '// *** THIS FILE IS GENERATED - DO NOT EDIT ***\n'
+        file_comment += '// See helper_file_generator.py for modifications\n'
+        write(file_comment, file=self.outFile)
+        # Copyright Notice
+        copyright = ''
+        copyright += '\n'
+        copyright += '/***************************************************************************\n'
+        copyright += ' *\n'
+        copyright += ' * Copyright (c) 2015-2017 The Khronos Group Inc.\n'
+        copyright += ' * Copyright (c) 2015-2017 Valve Corporation\n'
+        copyright += ' * Copyright (c) 2015-2017 LunarG, Inc.\n'
+        copyright += ' * Copyright (c) 2015-2017 Google Inc.\n'
+        copyright += ' *\n'
+        copyright += ' * Licensed under the Apache License, Version 2.0 (the "License");\n'
+        copyright += ' * you may not use this file except in compliance with the License.\n'
+        copyright += ' * You may obtain a copy of the License at\n'
+        copyright += ' *\n'
+        copyright += ' *     http://www.apache.org/licenses/LICENSE-2.0\n'
+        copyright += ' *\n'
+        copyright += ' * Unless required by applicable law or agreed to in writing, software\n'
+        copyright += ' * distributed under the License is distributed on an "AS IS" BASIS,\n'
+        copyright += ' * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n'
+        copyright += ' * See the License for the specific language governing permissions and\n'
+        copyright += ' * limitations under the License.\n'
+        copyright += ' *\n'
+        copyright += ' * Author: Mark Lobodzinski <mark@lunarg.com>\n'
+        copyright += ' * Author: Courtney Goeltzenleuchter <courtneygo@google.com>\n'
+        copyright += ' * Author: Tobin Ehlis <tobine@google.com>\n'
+        copyright += ' * Author: Chris Forbes <chrisforbes@google.com>\n'
+        copyright += ' * Author: John Zulauf<jzulauf@lunarg.com>\n'
+        copyright += ' *\n'
+        copyright += ' ****************************************************************************/\n'
+        write(copyright, file=self.outFile)
+    #
+    # Write generated file content to output file
+    def endFile(self):
+        dest_file = ''
+        dest_file += self.OutputDestFile()
+        # Remove blank lines at EOF
+        if dest_file.endswith('\n'):
+            dest_file = dest_file[:-1]
+        write(dest_file, file=self.outFile);
+        # Finish processing in superclass
+        OutputGenerator.endFile(self)
+    #
+    # Override parent class to be notified of the beginning of an extension
+    def beginFeature(self, interface, emit):
+        # Start processing in superclass
+        OutputGenerator.beginFeature(self, interface, emit)
+        self.featureExtraProtect = GetFeatureProtect(interface)
+
+        if self.featureName == 'VK_VERSION_1_0' or self.featureName == 'VK_VERSION_1_1':
+            return
+        name = self.featureName
+        nameElem = interface[0][1]
+        name_define = nameElem.get('name')
+        if 'EXTENSION_NAME' not in name_define:
+            print("Error in vk.xml file -- extension name is not available")
+        requires = interface.get('requires')
+        if requires is not None:
+            required_extensions = requires.split(',')
+        else:
+            required_extensions = list()
+        info = { 'define': name_define, 'ifdef':self.featureExtraProtect, 'reqs':required_extensions }
+        if interface.get('type') == 'instance':
+            self.instance_extension_info[name] = info
+        else:
+            self.device_extension_info[name] = info
+
+    #
+    # Override parent class to be notified of the end of an extension
+    def endFeature(self):
+        # Finish processing in superclass
+        OutputGenerator.endFeature(self)
+    #
+    # Grab group (e.g. C "enum" type) info to output for enum-string conversion helper
+    def genGroup(self, groupinfo, groupName, alias):
+        OutputGenerator.genGroup(self, groupinfo, groupName, alias)
+        groupElem = groupinfo.elem
+        # For enum_string_header
+        if self.helper_file_type == 'enum_string_header':
+            value_set = set()
+            for elem in groupElem.findall('enum'):
+                if elem.get('supported') != 'disabled' and elem.get('alias') is None:
+                    value_set.add(elem.get('name'))
+            self.enum_output += self.GenerateEnumStringConversion(groupName, value_set)
+        elif self.helper_file_type == 'object_types_header':
+            if groupName == 'VkDebugReportObjectTypeEXT':
+                for elem in groupElem.findall('enum'):
+                    if elem.get('supported') != 'disabled':
+                        item_name = elem.get('name')
+                        self.debug_report_object_types.append(item_name)
+            elif groupName == 'VkObjectType':
+                for elem in groupElem.findall('enum'):
+                    if elem.get('supported') != 'disabled':
+                        item_name = elem.get('name')
+                        self.core_object_types.append(item_name)
+
+    #
+    # Called for each type -- if the type is a struct/union, grab the metadata
+    def genType(self, typeinfo, name, alias):
+        OutputGenerator.genType(self, typeinfo, name, alias)
+        typeElem = typeinfo.elem
+        # If the type is a struct type, traverse the imbedded <member> tags generating a structure.
+        # Otherwise, emit the tag text.
+        category = typeElem.get('category')
+        if category == 'handle':
+            if alias:
+                self.object_type_aliases.append((name,alias))
+            else:
+                self.object_types.append(name)
+        elif (category == 'struct' or category == 'union'):
+            self.structNames.append(name)
+            self.genStruct(typeinfo, name, alias)
+    #
+    # Generate a VkStructureType based on a structure typename
+    def genVkStructureType(self, typename):
+        # Add underscore between lowercase then uppercase
+        value = re.sub('([a-z0-9])([A-Z])', r'\1_\2', typename)
+        # Change to uppercase
+        value = value.upper()
+        # Add STRUCTURE_TYPE_
+        return re.sub('VK_', 'VK_STRUCTURE_TYPE_', value)
+    #
+    # Check if the parameter passed in is a pointer
+    def paramIsPointer(self, param):
+        ispointer = False
+        for elem in param:
+            if ((elem.tag is not 'type') and (elem.tail is not None)) and '*' in elem.tail:
+                ispointer = True
+        return ispointer
+    #
+    # Check if the parameter passed in is a static array
+    def paramIsStaticArray(self, param):
+        isstaticarray = 0
+        paramname = param.find('name')
+        if (paramname.tail is not None) and ('[' in paramname.tail):
+            isstaticarray = paramname.tail.count('[')
+        return isstaticarray
+    #
+    # Retrieve the type and name for a parameter
+    def getTypeNameTuple(self, param):
+        type = ''
+        name = ''
+        for elem in param:
+            if elem.tag == 'type':
+                type = noneStr(elem.text)
+            elif elem.tag == 'name':
+                name = noneStr(elem.text)
+        return (type, name)
+    # Extract length values from latexmath.  Currently an inflexible solution that looks for specific
+    # patterns that are found in vk.xml.  Will need to be updated when new patterns are introduced.
+    def parseLateXMath(self, source):
+        name = 'ERROR'
+        decoratedName = 'ERROR'
+        if 'mathit' in source:
+            # Matches expressions similar to 'latexmath:[\lceil{\mathit{rasterizationSamples} \over 32}\rceil]'
+            match = re.match(r'latexmath\s*\:\s*\[\s*\\l(\w+)\s*\{\s*\\mathit\s*\{\s*(\w+)\s*\}\s*\\over\s*(\d+)\s*\}\s*\\r(\w+)\s*\]', source)
+            if not match or match.group(1) != match.group(4):
+                raise 'Unrecognized latexmath expression'
+            name = match.group(2)
+            # Need to add 1 for ceiling function; otherwise, the allocated packet
+            # size will be less than needed during capture for some title which use
+            # this in VkPipelineMultisampleStateCreateInfo. based on ceiling function
+            # definition,it is '{0}%{1}?{0}/{1} + 1:{0}/{1}'.format(*match.group(2, 3)),
+            # its value <= '{}/{} + 1'.
+            if match.group(1) == 'ceil':
+                decoratedName = '{}/{} + 1'.format(*match.group(2, 3))
+            else:
+                decoratedName = '{}/{}'.format(*match.group(2, 3))
+        else:
+            # Matches expressions similar to 'latexmath : [dataSize \over 4]'
+            match = re.match(r'latexmath\s*\:\s*\[\s*(\\textrm\{)?(\w+)\}?\s*\\over\s*(\d+)\s*\]', source)
+            name = match.group(2)
+            decoratedName = '{}/{}'.format(*match.group(2, 3))
+        return name, decoratedName
+    #
+    # Retrieve the value of the len tag
+    def getLen(self, param):
+        result = None
+        len = param.attrib.get('len')
+        if len and len != 'null-terminated':
+            # For string arrays, 'len' can look like 'count,null-terminated', indicating that we
+            # have a null terminated array of strings.  We strip the null-terminated from the
+            # 'len' field and only return the parameter specifying the string count
+            if 'null-terminated' in len:
+                result = len.split(',')[0]
+            else:
+                result = len
+            if 'latexmath' in len:
+                param_type, param_name = self.getTypeNameTuple(param)
+                len_name, result = self.parseLateXMath(len)
+            # Spec has now notation for len attributes, using :: instead of platform specific pointer symbol
+            result = str(result).replace('::', '->')
+        return result
+    #
+    # Check if a structure is or contains a dispatchable (dispatchable = True) or
+    # non-dispatchable (dispatchable = False) handle
+    def TypeContainsObjectHandle(self, handle_type, dispatchable):
+        if dispatchable:
+            type_key = 'VK_DEFINE_HANDLE'
+        else:
+            type_key = 'VK_DEFINE_NON_DISPATCHABLE_HANDLE'
+        handle = self.registry.tree.find("types/type/[name='" + handle_type + "'][@category='handle']")
+        if handle is not None and handle.find('type').text == type_key:
+            return True
+        # if handle_type is a struct, search its members
+        if handle_type in self.structNames:
+            member_index = next((i for i, v in enumerate(self.structMembers) if v[0] == handle_type), None)
+            if member_index is not None:
+                for item in self.structMembers[member_index].members:
+                    handle = self.registry.tree.find("types/type/[name='" + item.type + "'][@category='handle']")
+                    if handle is not None and handle.find('type').text == type_key:
+                        return True
+        return False
+    #
+    # Generate local ready-access data describing Vulkan structures and unions from the XML metadata
+    def genStruct(self, typeinfo, typeName, alias):
+        OutputGenerator.genStruct(self, typeinfo, typeName, alias)
+        members = typeinfo.elem.findall('.//member')
+        # Iterate over members once to get length parameters for arrays
+        lens = set()
+        for member in members:
+            len = self.getLen(member)
+            if len:
+                lens.add(len)
+        # Generate member info
+        membersInfo = []
+        for member in members:
+            # Get the member's type and name
+            info = self.getTypeNameTuple(member)
+            type = info[0]
+            name = info[1]
+            cdecl = self.makeCParamDecl(member, 1)
+            # Process VkStructureType
+            if type == 'VkStructureType':
+                # Extract the required struct type value from the comments
+                # embedded in the original text defining the 'typeinfo' element
+                rawXml = etree.tostring(typeinfo.elem).decode('ascii')
+                result = re.search(r'VK_STRUCTURE_TYPE_\w+', rawXml)
+                if result:
+                    value = result.group(0)
+                else:
+                    value = self.genVkStructureType(typeName)
+                # Store the required type value
+                self.structTypes[typeName] = self.StructType(name=name, value=value)
+            # Store pointer/array/string info
+            isstaticarray = self.paramIsStaticArray(member)
+            membersInfo.append(self.CommandParam(type=type,
+                                                 name=name,
+                                                 ispointer=self.paramIsPointer(member),
+                                                 isstaticarray=isstaticarray,
+                                                 isconst=True if 'const' in cdecl else False,
+                                                 iscount=True if name in lens else False,
+                                                 len=self.getLen(member),
+                                                 extstructs=self.registry.validextensionstructs[typeName] if name == 'pNext' else None,
+                                                 cdecl=cdecl))
+        self.structMembers.append(self.StructMemberData(name=typeName, members=membersInfo, ifdef_protect=self.featureExtraProtect))
+    #
+    # Enum_string_header: Create a routine to convert an enumerated value into a string
+    def GenerateEnumStringConversion(self, groupName, value_list):
+        outstring = '\n'
+        outstring += 'static inline const char* string_%s(%s input_value)\n' % (groupName, groupName)
+        outstring += '{\n'
+        outstring += '    switch ((%s)input_value)\n' % groupName
+        outstring += '    {\n'
+        for item in value_list:
+            outstring += '        case %s:\n' % item
+            outstring += '            return "%s";\n' % item
+        outstring += '        default:\n'
+        outstring += '            return "Unhandled %s";\n' % groupName
+        outstring += '    }\n'
+        outstring += '}\n'
+        return outstring
+    #
+    # Combine object types helper header file preamble with body text and return
+    def GenerateObjectTypesHelperHeader(self):
+        object_types_helper_header = '\n'
+        object_types_helper_header += '#pragma once\n'
+        object_types_helper_header += '\n'
+        object_types_helper_header += '#include <vulkan/vulkan.h>\n\n'
+        object_types_helper_header += self.GenerateObjectTypesHeader()
+        return object_types_helper_header
+    #
+    # Object types header: create object enum type header file
+    def GenerateObjectTypesHeader(self):
+        object_types_header = ''
+        object_types_header += '// Object Type enum for validation layer internal object handling\n'
+        object_types_header += 'typedef enum VulkanObjectType {\n'
+        object_types_header += '    kVulkanObjectTypeUnknown = 0,\n'
+        enum_num = 1
+        type_list = [];
+        enum_entry_map = {}
+
+        # Output enum definition as each handle is processed, saving the names to use for the conversion routine
+        for item in self.object_types:
+            fixup_name = item[2:]
+            enum_entry = 'kVulkanObjectType%s' % fixup_name
+            enum_entry_map[item] = enum_entry
+            object_types_header += '    ' + enum_entry
+            object_types_header += ' = %d,\n' % enum_num
+            enum_num += 1
+            type_list.append(enum_entry)
+        object_types_header += '    kVulkanObjectTypeMax = %d,\n' % enum_num
+        object_types_header += '    // Aliases for backwards compatibilty of "promoted" types\n'
+        for (name, alias) in self.object_type_aliases:
+            fixup_name = name[2:]
+            object_types_header += '    kVulkanObjectType{} = {},\n'.format(fixup_name, enum_entry_map[alias])
+        object_types_header += '} VulkanObjectType;\n\n'
+
+        # Output name string helper
+        object_types_header += '// Array of object name strings for OBJECT_TYPE enum conversion\n'
+        object_types_header += 'static const char * const object_string[kVulkanObjectTypeMax] = {\n'
+        object_types_header += '    "Unknown",\n'
+        for item in self.object_types:
+            fixup_name = item[2:]
+            object_types_header += '    "%s",\n' % fixup_name
+        object_types_header += '};\n'
+
+        # Key creation helper for map comprehensions that convert between k<Name> and VK<Name> symbols
+        def to_key(regex, raw_key): return re.search(regex, raw_key).group(1).lower().replace("_","")
+
+        # Output a conversion routine from the layer object definitions to the debug report definitions
+        # As the VK_DEBUG_REPORT types are not being updated, specify UNKNOWN for unmatched types
+        object_types_header += '\n'
+        object_types_header += '// Helper array to get Vulkan VK_EXT_debug_report object type enum from the internal layers version\n'
+        object_types_header += 'const VkDebugReportObjectTypeEXT get_debug_report_enum[] = {\n'
+        object_types_header += '    VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, // kVulkanObjectTypeUnknown\n'
+
+        dbg_re = '^VK_DEBUG_REPORT_OBJECT_TYPE_(.*)_EXT$'
+        dbg_map = {to_key(dbg_re, dbg) : dbg for dbg in self.debug_report_object_types}
+        dbg_default = 'VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT'
+        for object_type in type_list:
+            vk_object_type = dbg_map.get(object_type.replace("kVulkanObjectType", "").lower(), dbg_default)
+            object_types_header += '    %s,   // %s\n' % (vk_object_type, object_type)
+        object_types_header += '};\n'
+
+        # Output a conversion routine from the layer object definitions to the core object type definitions
+        # This will intentionally *fail* for unmatched types as the VK_OBJECT_TYPE list should match the kVulkanObjectType list
+        object_types_header += '\n'
+        object_types_header += '// Helper array to get Official Vulkan VkObjectType enum from the internal layers version\n'
+        object_types_header += 'const VkObjectType get_object_type_enum[] = {\n'
+        object_types_header += '    VK_OBJECT_TYPE_UNKNOWN, // kVulkanObjectTypeUnknown\n'
+
+        vko_re = '^VK_OBJECT_TYPE_(.*)'
+        vko_map = {to_key(vko_re, vko) : vko for vko in self.core_object_types}
+        for object_type in type_list:
+            vk_object_type = vko_map[object_type.replace("kVulkanObjectType", "").lower()]
+            object_types_header += '    %s,   // %s\n' % (vk_object_type, object_type)
+        object_types_header += '};\n'
+
+        # Create a function to convert from VkDebugReportObjectTypeEXT to VkObjectType
+        object_types_header += '\n'
+        object_types_header += '// Helper function to convert from VkDebugReportObjectTypeEXT to VkObjectType\n'
+        object_types_header += 'static inline VkObjectType convertDebugReportObjectToCoreObject(VkDebugReportObjectTypeEXT debug_report_obj){\n'
+        object_types_header += '    if (debug_report_obj == VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT) {\n'
+        object_types_header += '        return VK_OBJECT_TYPE_UNKNOWN;\n'
+        for core_object_type in self.core_object_types:
+            core_target_type = core_object_type.replace("VK_OBJECT_TYPE_", "").lower()
+            core_target_type = core_target_type.replace("_", "")
+            for dr_object_type in self.debug_report_object_types:
+                dr_target_type = dr_object_type.replace("VK_DEBUG_REPORT_OBJECT_TYPE_", "").lower()
+                dr_target_type = dr_target_type[:-4]
+                dr_target_type = dr_target_type.replace("_", "")
+                if core_target_type == dr_target_type:
+                    object_types_header += '    } else if (debug_report_obj == %s) {\n' % dr_object_type
+                    object_types_header += '        return %s;\n' % core_object_type
+                    break
+        object_types_header += '    }\n'
+        object_types_header += '    return VK_OBJECT_TYPE_UNKNOWN;\n'
+        object_types_header += '}\n'
+
+        # Create a function to convert from VkObjectType to VkDebugReportObjectTypeEXT
+        object_types_header += '\n'
+        object_types_header += '// Helper function to convert from VkDebugReportObjectTypeEXT to VkObjectType\n'
+        object_types_header += 'static inline VkDebugReportObjectTypeEXT convertCoreObjectToDebugReportObject(VkObjectType core_report_obj){\n'
+        object_types_header += '    if (core_report_obj == VK_OBJECT_TYPE_UNKNOWN) {\n'
+        object_types_header += '        return VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT;\n'
+        for core_object_type in self.core_object_types:
+            core_target_type = core_object_type.replace("VK_OBJECT_TYPE_", "").lower()
+            core_target_type = core_target_type.replace("_", "")
+            for dr_object_type in self.debug_report_object_types:
+                dr_target_type = dr_object_type.replace("VK_DEBUG_REPORT_OBJECT_TYPE_", "").lower()
+                dr_target_type = dr_target_type[:-4]
+                dr_target_type = dr_target_type.replace("_", "")
+                if core_target_type == dr_target_type:
+                    object_types_header += '    } else if (core_report_obj == %s) {\n' % core_object_type
+                    object_types_header += '        return %s;\n' % dr_object_type
+                    break
+        object_types_header += '    }\n'
+        object_types_header += '    return VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT;\n'
+        object_types_header += '}\n'
+        return object_types_header
+
+    #
+    # Create a helper file and return it as a string
+    def OutputDestFile(self):
+        if self.helper_file_type == 'object_types_header':
+            return self.GenerateObjectTypesHelperHeader()
+        else:
+            return 'Bad Helper File Generator Option %s' % self.helper_file_type
diff --git a/src/third_party/vulkan-loader/src/scripts/known_good.json b/src/third_party/vulkan-loader/src/scripts/known_good.json
new file mode 100644
index 0000000..cca68b1
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/scripts/known_good.json
@@ -0,0 +1,15 @@
+{
+  "repos" : [
+    {
+      "name" : "Vulkan-Headers",
+      "url" : "https://github.com/KhronosGroup/Vulkan-Headers.git",
+      "sub_dir" : "Vulkan-Headers",
+      "build_dir" : "Vulkan-Headers/build",
+      "install_dir" : "Vulkan-Headers/build/install",
+      "commit" : "v1.1.128"
+    }
+  ],
+  "install_names" : {
+      "Vulkan-Headers" : "VULKAN_HEADERS_INSTALL_DIR"
+    }
+}
diff --git a/src/third_party/vulkan-loader/src/scripts/loader_extension_generator.py b/src/third_party/vulkan-loader/src/scripts/loader_extension_generator.py
new file mode 100644
index 0000000..90ca158
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/scripts/loader_extension_generator.py
@@ -0,0 +1,1572 @@
+#!/usr/bin/python3 -i
+#
+# Copyright (c) 2015-2017 The Khronos Group Inc.
+# Copyright (c) 2015-2017 Valve Corporation
+# Copyright (c) 2015-2017 LunarG, Inc.
+# Copyright (c) 2015-2017 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Author: Mark Young <marky@lunarg.com>
+# Author: Mark Lobodzinski <mark@lunarg.com>
+
+import os,re,sys
+import xml.etree.ElementTree as etree
+from generator import *
+from collections import namedtuple
+from common_codegen import *
+
+
+WSI_EXT_NAMES = ['VK_KHR_surface',
+                 'VK_KHR_display',
+                 'VK_KHR_xlib_surface',
+                 'VK_KHR_xcb_surface',
+                 'VK_KHR_wayland_surface',
+                 'VK_KHR_win32_surface',
+                 'VK_KHR_android_surface',
+                 'VK_MVK_macos_surface',
+                 'VK_MVK_ios_surface',
+                 'VK_EXT_headless_surface',
+                 'VK_EXT_metal_surface',
+                 'VK_KHR_swapchain',
+                 'VK_KHR_display_swapchain',
+                 'VK_KHR_get_display_properties2']
+
+ADD_INST_CMDS = ['vkCreateInstance',
+                 'vkEnumerateInstanceExtensionProperties',
+                 'vkEnumerateInstanceLayerProperties',
+                 'vkEnumerateInstanceVersion']
+
+AVOID_EXT_NAMES = ['VK_EXT_debug_report']
+
+NULL_CHECK_EXT_NAMES= ['VK_EXT_debug_utils']
+
+AVOID_CMD_NAMES = ['vkCreateDebugUtilsMessengerEXT',
+                   'vkDestroyDebugUtilsMessengerEXT',
+                   'vkSubmitDebugUtilsMessageEXT']
+
+DEVICE_CMDS_NEED_TERM = ['vkGetDeviceProcAddr',
+                         'vkCreateSwapchainKHR',
+                         'vkCreateSharedSwapchainsKHR',
+                         'vkGetDeviceGroupSurfacePresentModesKHR',
+                         'vkDebugMarkerSetObjectTagEXT',
+                         'vkDebugMarkerSetObjectNameEXT',
+                         'vkSetDebugUtilsObjectNameEXT',
+                         'vkSetDebugUtilsObjectTagEXT',
+                         'vkGetDeviceGroupSurfacePresentModes2EXT']
+
+ALIASED_CMDS = {
+    'vkEnumeratePhysicalDeviceGroupsKHR':                   'vkEnumeratePhysicalDeviceGroups',
+    'vkGetPhysicalDeviceFeatures2KHR':                      'vkGetPhysicalDeviceFeatures2',
+    'vkGetPhysicalDeviceProperties2KHR':                    'vkGetPhysicalDeviceProperties2',
+    'vkGetPhysicalDeviceFormatProperties2KHR':              'vkGetPhysicalDeviceFormatProperties2',
+    'vkGetPhysicalDeviceImageFormatProperties2KHR':         'vkGetPhysicalDeviceImageFormatProperties2',
+    'vkGetPhysicalDeviceQueueFamilyProperties2KHR':         'vkGetPhysicalDeviceQueueFamilyProperties2',
+    'vkGetPhysicalDeviceMemoryProperties2KHR':              'vkGetPhysicalDeviceMemoryProperties2',
+    'vkGetPhysicalDeviceSparseImageFormatProperties2KHR':   'vkGetPhysicalDeviceSparseImageFormatProperties2',
+    'vkGetPhysicalDeviceExternalBufferPropertiesKHR':       'vkGetPhysicalDeviceExternalBufferProperties',
+    'vkGetPhysicalDeviceExternalSemaphorePropertiesKHR':    'vkGetPhysicalDeviceExternalSemaphoreProperties',
+    'vkGetPhysicalDeviceExternalFencePropertiesKHR':        'vkGetPhysicalDeviceExternalFenceProperties',
+}
+
+PRE_INSTANCE_FUNCTIONS = ['vkEnumerateInstanceExtensionProperties',
+                          'vkEnumerateInstanceLayerProperties',
+                          'vkEnumerateInstanceVersion']
+
+#
+# LoaderExtensionGeneratorOptions - subclass of GeneratorOptions.
+class LoaderExtensionGeneratorOptions(GeneratorOptions):
+    def __init__(self,
+                 conventions = None,
+                 filename = None,
+                 directory = '.',
+                 apiname = None,
+                 profile = None,
+                 versions = '.*',
+                 emitversions = '.*',
+                 defaultExtensions = None,
+                 addExtensions = None,
+                 removeExtensions = None,
+                 emitExtensions = None,
+                 sortProcedure = regSortFeatures,
+                 prefixText = "",
+                 genFuncPointers = True,
+                 protectFile = True,
+                 protectFeature = True,
+                 apicall = '',
+                 apientry = '',
+                 apientryp = '',
+                 indentFuncProto = True,
+                 indentFuncPointer = False,
+                 alignFuncParam = 0,
+                 expandEnumerants = True):
+        GeneratorOptions.__init__(self, conventions, filename, directory, apiname, profile,
+                                  versions, emitversions, defaultExtensions,
+                                  addExtensions, removeExtensions, emitExtensions, sortProcedure)
+        self.prefixText      = prefixText
+        self.prefixText      = None
+        self.apicall         = apicall
+        self.apientry        = apientry
+        self.apientryp       = apientryp
+        self.alignFuncParam  = alignFuncParam
+        self.expandEnumerants = expandEnumerants
+
+#
+# LoaderExtensionOutputGenerator - subclass of OutputGenerator.
+# Generates dispatch table helper header files for LVL
+class LoaderExtensionOutputGenerator(OutputGenerator):
+    """Generate dispatch table helper header based on XML element attributes"""
+    def __init__(self,
+                 errFile = sys.stderr,
+                 warnFile = sys.stderr,
+                 diagFile = sys.stdout):
+        OutputGenerator.__init__(self, errFile, warnFile, diagFile)
+
+        # Internal state - accumulators for different inner block text
+        self.ext_instance_dispatch_list = []  # List of extension entries for instance dispatch list
+        self.ext_device_dispatch_list = []    # List of extension entries for device dispatch list
+        self.core_commands = []               # List of CommandData records for core Vulkan commands
+        self.ext_commands = []                # List of CommandData records for extension Vulkan commands
+        self.CommandParam = namedtuple('CommandParam', ['type', 'name', 'cdecl'])
+        self.CommandData = namedtuple('CommandData', ['name', 'ext_name', 'ext_type', 'require', 'protect', 'return_type', 'handle_type', 'params', 'cdecl'])
+        self.instanceExtensions = []
+        self.ExtensionData = namedtuple('ExtensionData', ['name', 'type', 'protect', 'define', 'num_commands'])
+
+    #
+    # Called once at the beginning of each run
+    def beginFile(self, genOpts):
+        OutputGenerator.beginFile(self, genOpts)
+
+        # User-supplied prefix text, if any (list of strings)
+        if (genOpts.prefixText):
+            for s in genOpts.prefixText:
+                write(s, file=self.outFile)
+
+        # File Comment
+        file_comment = '// *** THIS FILE IS GENERATED - DO NOT EDIT ***\n'
+        file_comment += '// See loader_extension_generator.py for modifications\n'
+        write(file_comment, file=self.outFile)
+
+        # Copyright Notice
+        copyright =  '/*\n'
+        copyright += ' * Copyright (c) 2015-2017 The Khronos Group Inc.\n'
+        copyright += ' * Copyright (c) 2015-2017 Valve Corporation\n'
+        copyright += ' * Copyright (c) 2015-2017 LunarG, Inc.\n'
+        copyright += ' *\n'
+        copyright += ' * Licensed under the Apache License, Version 2.0 (the "License");\n'
+        copyright += ' * you may not use this file except in compliance with the License.\n'
+        copyright += ' * You may obtain a copy of the License at\n'
+        copyright += ' *\n'
+        copyright += ' *     http://www.apache.org/licenses/LICENSE-2.0\n'
+        copyright += ' *\n'
+        copyright += ' * Unless required by applicable law or agreed to in writing, software\n'
+        copyright += ' * distributed under the License is distributed on an "AS IS" BASIS,\n'
+        copyright += ' * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n'
+        copyright += ' * See the License for the specific language governing permissions and\n'
+        copyright += ' * limitations under the License.\n'
+        copyright += ' *\n'
+        copyright += ' * Author: Mark Lobodzinski <mark@lunarg.com>\n'
+        copyright += ' * Author: Mark Young <marky@lunarg.com>\n'
+        copyright += ' */\n'
+
+        preamble = ''
+
+        if self.genOpts.filename == 'vk_loader_extensions.h':
+            preamble += '#pragma once\n'
+
+        elif self.genOpts.filename == 'vk_loader_extensions.c':
+            preamble += '#ifndef _GNU_SOURCE\n'
+            preamble += '#define _GNU_SOURCE\n'
+            preamble += '#endif\n'
+            preamble += '#include <stdio.h>\n'
+            preamble += '#include <stdlib.h>\n'
+            preamble += '#include <string.h>\n'
+            preamble += '#include "vk_loader_platform.h"\n'
+            preamble += '#include "loader.h"\n'
+            preamble += '#include "vk_loader_extensions.h"\n'
+            preamble += '#include <vulkan/vk_icd.h>\n'
+            preamble += '#include "wsi.h"\n'
+            preamble += '#include "debug_utils.h"\n'
+            preamble += '#include "extension_manual.h"\n'
+
+        elif self.genOpts.filename == 'vk_layer_dispatch_table.h':
+            preamble += '#pragma once\n'
+            preamble += '\n'
+            preamble += 'typedef PFN_vkVoidFunction (VKAPI_PTR *PFN_GetPhysicalDeviceProcAddr)(VkInstance instance, const char* pName);\n'
+
+        write(copyright, file=self.outFile)
+        write(preamble, file=self.outFile)
+
+    #
+    # Write generate and write dispatch tables to output file
+    def endFile(self):
+        file_data = ''
+
+        if self.genOpts.filename == 'vk_loader_extensions.h':
+            file_data += self.OutputPrototypesInHeader()
+            file_data += self.OutputLoaderTerminators()
+            file_data += self.OutputIcdDispatchTable()
+            file_data += self.OutputIcdExtensionEnableUnion()
+
+        elif self.genOpts.filename == 'vk_loader_extensions.c':
+            file_data += self.OutputUtilitiesInSource()
+            file_data += self.OutputIcdDispatchTableInit()
+            file_data += self.OutputLoaderDispatchTables()
+            file_data += self.OutputLoaderLookupFunc()
+            file_data += self.CreateTrampTermFuncs()
+            file_data += self.InstExtensionGPA()
+            file_data += self.InstantExtensionCreate()
+            file_data += self.DeviceExtensionGetTerminator()
+            file_data += self.InitInstLoaderExtensionDispatchTable()
+            file_data += self.OutputInstantExtensionWhitelistArray()
+
+        elif self.genOpts.filename == 'vk_layer_dispatch_table.h':
+            file_data += self.OutputLayerInstanceDispatchTable()
+            file_data += self.OutputLayerDeviceDispatchTable()
+
+        write(file_data, file=self.outFile);
+
+        # Finish processing in superclass
+        OutputGenerator.endFile(self)
+
+    def beginFeature(self, interface, emit):
+        # Start processing in superclass
+        OutputGenerator.beginFeature(self, interface, emit)
+        self.featureExtraProtect = GetFeatureProtect(interface)
+
+        enums = interface[0].findall('enum')
+        self.currentExtension = ''
+        self.name_definition = ''
+
+        for item in enums:
+            name_definition = item.get('name')
+            if 'EXTENSION_NAME' in name_definition:
+                self.name_definition = name_definition
+
+        self.type = interface.get('type')
+        self.num_commands = 0
+        name = interface.get('name')
+        self.currentExtension = name 
+
+    #
+    # Process commands, adding to appropriate dispatch tables
+    def genCmd(self, cmdinfo, name, alias):
+        OutputGenerator.genCmd(self, cmdinfo, name, alias)
+
+        # Get first param type
+        params = cmdinfo.elem.findall('param')
+        info = self.getTypeNameTuple(params[0])
+
+        self.num_commands += 1
+
+        if 'android' not in name:
+            self.AddCommandToDispatchList(self.currentExtension, self.type, name, cmdinfo, info[0])
+
+    def endFeature(self):
+
+        if 'android' not in self.currentExtension:
+            self.instanceExtensions.append(self.ExtensionData(name=self.currentExtension,
+                                                              type=self.type,
+                                                              protect=self.featureExtraProtect,
+                                                              define=self.name_definition,
+                                                              num_commands=self.num_commands))
+
+        # Finish processing in superclass
+        OutputGenerator.endFeature(self)
+
+    #
+    # Retrieve the value of the len tag
+    def getLen(self, param):
+        result = None
+        len = param.attrib.get('len')
+        if len and len != 'null-terminated':
+            # For string arrays, 'len' can look like 'count,null-terminated',
+            # indicating that we have a null terminated array of strings.  We
+            # strip the null-terminated from the 'len' field and only return
+            # the parameter specifying the string count
+            if 'null-terminated' in len:
+                result = len.split(',')[0]
+            else:
+                result = len
+            result = str(result).replace('::', '->')
+        return result
+
+    #
+    # Determine if this API should be ignored or added to the instance or device dispatch table
+    def AddCommandToDispatchList(self, extension_name, extension_type, name, cmdinfo, handle_type):
+        handle = self.registry.tree.find("types/type/[name='" + handle_type + "'][@category='handle']")
+
+        return_type =  cmdinfo.elem.find('proto/type')
+        if (return_type is not None and return_type.text == 'void'):
+           return_type = None
+
+        require = None
+        if name == 'vkGetDeviceGroupSurfacePresentModes2EXT':
+            require_node = self.registry.tree.find("./extensions/extension[@name='{}']/require/command[@name='{}']/..".format(extension_name, name))
+            if 'extension' in require_node.attrib:
+                require = require_node.attrib['extension']
+
+        cmd_params = []
+
+        # Generate a list of commands for use in printing the necessary
+        # core instance terminator prototypes
+        params = cmdinfo.elem.findall('param')
+        lens = set()
+        for param in params:
+            len = self.getLen(param)
+            if len:
+                lens.add(len)
+        paramsInfo = []
+        for param in params:
+            paramInfo = self.getTypeNameTuple(param)
+            param_type = paramInfo[0]
+            param_name = paramInfo[1]
+            param_cdecl = self.makeCParamDecl(param, 0)
+            cmd_params.append(self.CommandParam(type=param_type, name=param_name,
+                                                cdecl=param_cdecl))
+
+        if handle is not None and handle_type != 'VkInstance' and handle_type != 'VkPhysicalDevice':
+            # The Core Vulkan code will be wrapped in a feature called VK_VERSION_#_#
+            # For example: VK_VERSION_1_0 wraps the core 1.0 Vulkan functionality
+            if 'VK_VERSION_' in extension_name:
+                self.core_commands.append(
+                    self.CommandData(name=name, ext_name=extension_name,
+                                     ext_type='device',
+                                     require=require,
+                                     protect=self.featureExtraProtect,
+                                     return_type = return_type,
+                                     handle_type = handle_type,
+                                     params = cmd_params,
+                                     cdecl=self.makeCDecls(cmdinfo.elem)[0]))
+            else:
+                self.ext_device_dispatch_list.append((name, self.featureExtraProtect))
+                self.ext_commands.append(
+                    self.CommandData(name=name, ext_name=extension_name,
+                                     ext_type=extension_type,
+                                     require=require,
+                                     protect=self.featureExtraProtect,
+                                     return_type = return_type,
+                                     handle_type = handle_type,
+                                     params = cmd_params,
+                                     cdecl=self.makeCDecls(cmdinfo.elem)[0]))
+        else:
+            # The Core Vulkan code will be wrapped in a feature called VK_VERSION_#_#
+            # For example: VK_VERSION_1_0 wraps the core 1.0 Vulkan functionality
+            if 'VK_VERSION_' in extension_name:
+                self.core_commands.append(
+                    self.CommandData(name=name, ext_name=extension_name,
+                                     ext_type='instance',
+                                     require=require,
+                                     protect=self.featureExtraProtect,
+                                     return_type = return_type,
+                                     handle_type = handle_type,
+                                     params = cmd_params,
+                                     cdecl=self.makeCDecls(cmdinfo.elem)[0]))
+
+            else:
+                self.ext_instance_dispatch_list.append((name, self.featureExtraProtect))
+                self.ext_commands.append(
+                    self.CommandData(name=name, ext_name=extension_name,
+                                     ext_type=extension_type,
+                                     require=require,
+                                     protect=self.featureExtraProtect,
+                                     return_type = return_type,
+                                     handle_type = handle_type,
+                                     params = cmd_params,
+                                     cdecl=self.makeCDecls(cmdinfo.elem)[0]))
+
+    #
+    # Retrieve the type and name for a parameter
+    def getTypeNameTuple(self, param):
+        type = ''
+        name = ''
+        for elem in param:
+            if elem.tag == 'type':
+                type = noneStr(elem.text)
+            elif elem.tag == 'name':
+                name = noneStr(elem.text)
+        return (type, name)
+
+    def OutputPrototypesInHeader(self):
+        protos = ''
+        protos += '// Structures defined externally, but used here\n'
+        protos += 'struct loader_instance;\n'
+        protos += 'struct loader_device;\n'
+        protos += 'struct loader_icd_term;\n'
+        protos += 'struct loader_dev_dispatch_table;\n'
+        protos += '\n'
+        protos += '// Device extension error function\n'
+        protos += 'VKAPI_ATTR VkResult VKAPI_CALL vkDevExtError(VkDevice dev);\n'
+        protos += '\n'
+        protos += '// Extension interception for vkGetInstanceProcAddr function, so we can return\n'
+        protos += '// the appropriate information for any instance extensions we know about.\n'
+        protos += 'bool extension_instance_gpa(struct loader_instance *ptr_instance, const char *name, void **addr);\n'
+        protos += '\n'
+        protos += '// Extension interception for vkCreateInstance function, so we can properly\n'
+        protos += '// detect and enable any instance extension information for extensions we know\n'
+        protos += '// about.\n'
+        protos += 'void extensions_create_instance(struct loader_instance *ptr_instance, const VkInstanceCreateInfo *pCreateInfo);\n'
+        protos += '\n'
+        protos += '// Extension interception for vkGetDeviceProcAddr function, so we can return\n'
+        protos += '// an appropriate terminator if this is one of those few device commands requiring\n'
+        protos += '// a terminator.\n'
+        protos += 'PFN_vkVoidFunction get_extension_device_proc_terminator(struct loader_device *dev, const char *pName);\n'
+        protos += '\n'
+        protos += '// Dispatch table properly filled in with appropriate terminators for the\n'
+        protos += '// supported extensions.\n'
+        protos += 'extern const VkLayerInstanceDispatchTable instance_disp;\n'
+        protos += '\n'
+        protos += '// Array of extension strings for instance extensions we support.\n'
+        protos += 'extern const char *const LOADER_INSTANCE_EXTENSIONS[];\n'
+        protos += '\n'
+        protos += 'VKAPI_ATTR bool VKAPI_CALL loader_icd_init_entries(struct loader_icd_term *icd_term, VkInstance inst,\n'
+        protos += '                                                   const PFN_vkGetInstanceProcAddr fp_gipa);\n'
+        protos += '\n'
+        protos += '// Init Device function pointer dispatch table with core commands\n'
+        protos += 'VKAPI_ATTR void VKAPI_CALL loader_init_device_dispatch_table(struct loader_dev_dispatch_table *dev_table, PFN_vkGetDeviceProcAddr gpa,\n'
+        protos += '                                                             VkDevice dev);\n'
+        protos += '\n'
+        protos += '// Init Device function pointer dispatch table with extension commands\n'
+        protos += 'VKAPI_ATTR void VKAPI_CALL loader_init_device_extension_dispatch_table(struct loader_dev_dispatch_table *dev_table,\n'
+        protos += '                                                                       PFN_vkGetInstanceProcAddr gipa,\n'
+        protos += '                                                                       PFN_vkGetDeviceProcAddr gdpa,\n'
+        protos += '                                                                       VkInstance inst,\n'
+        protos += '                                                                       VkDevice dev);\n'
+        protos += '\n'
+        protos += '// Init Instance function pointer dispatch table with core commands\n'
+        protos += 'VKAPI_ATTR void VKAPI_CALL loader_init_instance_core_dispatch_table(VkLayerInstanceDispatchTable *table, PFN_vkGetInstanceProcAddr gpa,\n'
+        protos += '                                                                    VkInstance inst);\n'
+        protos += '\n'
+        protos += '// Init Instance function pointer dispatch table with core commands\n'
+        protos += 'VKAPI_ATTR void VKAPI_CALL loader_init_instance_extension_dispatch_table(VkLayerInstanceDispatchTable *table, PFN_vkGetInstanceProcAddr gpa,\n'
+        protos += '                                                                         VkInstance inst);\n'
+        protos += '\n'
+        protos += '// Device command lookup function\n'
+        protos += 'VKAPI_ATTR void* VKAPI_CALL loader_lookup_device_dispatch_table(const VkLayerDispatchTable *table, const char *name);\n'
+        protos += '\n'
+        protos += '// Instance command lookup function\n'
+        protos += 'VKAPI_ATTR void* VKAPI_CALL loader_lookup_instance_dispatch_table(const VkLayerInstanceDispatchTable *table, const char *name,\n'
+        protos += '                                                                  bool *found_name);\n'
+        protos += '\n'
+        protos += 'VKAPI_ATTR bool VKAPI_CALL loader_icd_init_entries(struct loader_icd_term *icd_term, VkInstance inst,\n'
+        protos += '                                                   const PFN_vkGetInstanceProcAddr fp_gipa);\n'
+        protos += '\n'
+        return protos
+
+    def OutputUtilitiesInSource(self):
+        protos = ''
+        protos += '// Device extension error function\n'
+        protos += 'VKAPI_ATTR VkResult VKAPI_CALL vkDevExtError(VkDevice dev) {\n'
+        protos += '    struct loader_device *found_dev;\n'
+        protos += '    // The device going in is a trampoline device\n'
+        protos += '    struct loader_icd_term *icd_term = loader_get_icd_and_device(dev, &found_dev, NULL);\n'
+        protos += '\n'
+        protos += '    if (icd_term)\n'
+        protos += '        loader_log(icd_term->this_instance, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,\n'
+        protos += '                   "Bad destination in loader trampoline dispatch,"\n'
+        protos += '                   "Are layers and extensions that you are calling enabled?");\n'
+        protos += '    return VK_ERROR_EXTENSION_NOT_PRESENT;\n'
+        protos += '}\n\n'
+        return protos
+
+    #
+    # Create a layer instance dispatch table from the appropriate list and return it as a string
+    def OutputLayerInstanceDispatchTable(self):
+        commands = []
+        table = ''
+        cur_extension_name = ''
+
+        table += '// Instance function pointer dispatch table\n'
+        table += 'typedef struct VkLayerInstanceDispatchTable_ {\n'
+
+        # First add in an entry for GetPhysicalDeviceProcAddr.  This will not
+        # ever show up in the XML or header, so we have to manually add it.
+        table += '    // Manually add in GetPhysicalDeviceProcAddr entry\n'
+        table += '    PFN_GetPhysicalDeviceProcAddr GetPhysicalDeviceProcAddr;\n'
+
+        for x in range(0, 2):
+            if x == 0:
+                commands = self.core_commands
+            else:
+                commands = self.ext_commands
+
+            for cur_cmd in commands:
+                is_inst_handle_type = cur_cmd.name in ADD_INST_CMDS or cur_cmd.handle_type == 'VkInstance' or cur_cmd.handle_type == 'VkPhysicalDevice'
+                if is_inst_handle_type:
+
+                    if cur_cmd.ext_name != cur_extension_name:
+                        if 'VK_VERSION_' in cur_cmd.ext_name:
+                            table += '\n    // ---- Core %s commands\n' % cur_cmd.ext_name[11:]
+                        else:
+                            table += '\n    // ---- %s extension commands\n' % cur_cmd.ext_name
+                        cur_extension_name = cur_cmd.ext_name
+
+                    # Remove 'vk' from proto name
+                    base_name = cur_cmd.name[2:]
+
+                    if cur_cmd.protect is not None:
+                        table += '#ifdef %s\n' % cur_cmd.protect
+
+                    table += '    PFN_%s %s;\n' % (cur_cmd.name, base_name)
+
+                    if cur_cmd.protect is not None:
+                        table += '#endif // %s\n' % cur_cmd.protect
+
+        table += '} VkLayerInstanceDispatchTable;\n\n'
+        return table
+
+    #
+    # Create a layer device dispatch table from the appropriate list and return it as a string
+    def OutputLayerDeviceDispatchTable(self):
+        commands = []
+        table = ''
+        cur_extension_name = ''
+
+        table += '// Device function pointer dispatch table\n'
+        table += 'typedef struct VkLayerDispatchTable_ {\n'
+
+        for x in range(0, 2):
+            if x == 0:
+                commands = self.core_commands
+            else:
+                commands = self.ext_commands
+
+            for cur_cmd in commands:
+                is_inst_handle_type = cur_cmd.name in ADD_INST_CMDS or cur_cmd.handle_type == 'VkInstance' or cur_cmd.handle_type == 'VkPhysicalDevice'
+                if not is_inst_handle_type:
+
+                    if cur_cmd.ext_name != cur_extension_name:
+                        if 'VK_VERSION_' in cur_cmd.ext_name:
+                            table += '\n    // ---- Core %s commands\n' % cur_cmd.ext_name[11:]
+                        else:
+                            table += '\n    // ---- %s extension commands\n' % cur_cmd.ext_name
+                        cur_extension_name = cur_cmd.ext_name
+
+                    # Remove 'vk' from proto name
+                    base_name = cur_cmd.name[2:]
+
+                    if cur_cmd.protect is not None:
+                        table += '#ifdef %s\n' % cur_cmd.protect
+
+                    table += '    PFN_%s %s;\n' % (cur_cmd.name, base_name)
+
+                    if cur_cmd.protect is not None:
+                        table += '#endif // %s\n' % cur_cmd.protect
+
+        table += '} VkLayerDispatchTable;\n\n'
+        return table
+
+    #
+    # Create a dispatch table from the appropriate list and return it as a string
+    def OutputIcdDispatchTable(self):
+        commands = []
+        table = ''
+        cur_extension_name = ''
+
+        table += '// ICD function pointer dispatch table\n'
+        table += 'struct loader_icd_term_dispatch {\n'
+
+        for x in range(0, 2):
+            if x == 0:
+                commands = self.core_commands
+            else:
+                commands = self.ext_commands
+
+            for cur_cmd in commands:
+                is_inst_handle_type = cur_cmd.name in ADD_INST_CMDS or cur_cmd.handle_type == 'VkInstance' or cur_cmd.handle_type == 'VkPhysicalDevice'
+                if ((is_inst_handle_type or cur_cmd.name in DEVICE_CMDS_NEED_TERM) and
+                    (cur_cmd.name != 'vkGetInstanceProcAddr' and cur_cmd.name != 'vkEnumerateDeviceLayerProperties')):
+
+                    if cur_cmd.ext_name != cur_extension_name:
+                        if 'VK_VERSION_' in cur_cmd.ext_name:
+                            table += '\n    // ---- Core %s commands\n' % cur_cmd.ext_name[11:]
+                        else:
+                            table += '\n    // ---- %s extension commands\n' % cur_cmd.ext_name
+                        cur_extension_name = cur_cmd.ext_name
+
+                    # Remove 'vk' from proto name
+                    base_name = cur_cmd.name[2:]
+
+                    if cur_cmd.protect is not None:
+                        table += '#ifdef %s\n' % cur_cmd.protect
+
+                    table += '    PFN_%s %s;\n' % (cur_cmd.name, base_name)
+
+                    if cur_cmd.protect is not None:
+                        table += '#endif // %s\n' % cur_cmd.protect
+
+        table += '};\n\n'
+        return table
+
+    #
+    # Init a dispatch table from the appropriate list and return it as a string
+    def OutputIcdDispatchTableInit(self):
+        commands = []
+        cur_extension_name = ''
+
+        table = ''
+        table += 'VKAPI_ATTR bool VKAPI_CALL loader_icd_init_entries(struct loader_icd_term *icd_term, VkInstance inst,\n'
+        table += '                                                   const PFN_vkGetInstanceProcAddr fp_gipa) {\n'
+        table += '\n'
+        table += '#define LOOKUP_GIPA(func, required)                                                        \\\n'
+        table += '    do {                                                                                   \\\n'
+        table += '        icd_term->dispatch.func = (PFN_vk##func)fp_gipa(inst, "vk" #func);                 \\\n'
+        table += '        if (!icd_term->dispatch.func && required) {                                        \\\n'
+        table += '            loader_log((struct loader_instance *)inst, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0, \\\n'
+        table += '                       loader_platform_get_proc_address_error("vk" #func));                \\\n'
+        table += '            return false;                                                                  \\\n'
+        table += '        }                                                                                  \\\n'
+        table += '    } while (0)\n'
+        table += '\n'
+
+        skip_gipa_commands = ['vkGetInstanceProcAddr',
+                              'vkEnumerateDeviceLayerProperties',
+                              'vkCreateInstance',
+                              'vkEnumerateInstanceExtensionProperties',
+                              'vkEnumerateInstanceLayerProperties',
+                              'vkEnumerateInstanceVersion',
+                             ]
+
+        for x in range(0, 2):
+            if x == 0:
+                commands = self.core_commands
+            else:
+                commands = self.ext_commands
+
+            required = False
+            for cur_cmd in commands:
+                is_inst_handle_type = cur_cmd.handle_type == 'VkInstance' or cur_cmd.handle_type == 'VkPhysicalDevice'
+                if ((is_inst_handle_type or cur_cmd.name in DEVICE_CMDS_NEED_TERM) and (cur_cmd.name not in skip_gipa_commands)):
+
+                    if cur_cmd.ext_name != cur_extension_name:
+                        if 'VK_VERSION_' in cur_cmd.ext_name:
+                            table += '\n    // ---- Core %s\n' % cur_cmd.ext_name[11:]
+                            required = cur_cmd.ext_name == 'VK_VERSION_1_0'
+                        else:
+                            table += '\n    // ---- %s extension commands\n' % cur_cmd.ext_name
+                            required = False
+                        cur_extension_name = cur_cmd.ext_name
+
+                    # Remove 'vk' from proto name
+                    base_name = cur_cmd.name[2:]
+
+                    if cur_cmd.protect is not None:
+                        table += '#ifdef %s\n' % cur_cmd.protect
+
+                    # The Core Vulkan code will be wrapped in a feature called VK_VERSION_#_#
+                    # For example: VK_VERSION_1_0 wraps the core 1.0 Vulkan functionality
+                    table += '    LOOKUP_GIPA(%s, %s);\n' % (base_name, 'true' if required else 'false')
+
+                    if cur_cmd.protect is not None:
+                        table += '#endif // %s\n' % cur_cmd.protect
+
+        table += '\n'
+        table += '#undef LOOKUP_GIPA\n'
+        table += '\n'
+        table += '    return true;\n'
+        table += '};\n\n'
+        return table
+
+    #
+    # Create the extension enable union
+    def OutputIcdExtensionEnableUnion(self):
+        extensions = self.instanceExtensions
+
+        union = ''
+        union += 'union loader_instance_extension_enables {\n'
+        union += '    struct {\n'
+        for ext in extensions:
+            if ('VK_VERSION_' in ext.name or ext.name in WSI_EXT_NAMES or
+                ext.type == 'device' or ext.num_commands == 0):
+                continue
+
+            union += '        uint8_t %s : 1;\n' % ext.name[3:].lower()
+
+        union += '    };\n'
+        union += '    uint64_t padding[4];\n'
+        union += '};\n\n'
+        return union
+
+    #
+    # Creates the prototypes for the loader's core instance command terminators
+    def OutputLoaderTerminators(self):
+        terminators = ''
+        terminators += '// Loader core instance terminators\n'
+
+        for cur_cmd in self.core_commands:
+            is_inst_handle_type = cur_cmd.name in ADD_INST_CMDS or cur_cmd.handle_type == 'VkInstance' or cur_cmd.handle_type == 'VkPhysicalDevice'
+            if is_inst_handle_type:
+                mod_string = ''
+                new_terminator = cur_cmd.cdecl
+                mod_string = new_terminator.replace("VKAPI_CALL vk", "VKAPI_CALL terminator_")
+
+                if cur_cmd.name in PRE_INSTANCE_FUNCTIONS:
+                    mod_string = mod_string.replace(cur_cmd.name[2:] + '(\n', cur_cmd.name[2:] + '(\n    const Vk' + cur_cmd.name[2:] + 'Chain* chain,\n')
+
+                if (cur_cmd.protect is not None):
+                    terminators += '#ifdef %s\n' % cur_cmd.protect
+
+                terminators += mod_string
+                terminators += '\n'
+
+                if (cur_cmd.protect is not None):
+                    terminators += '#endif // %s\n' % cur_cmd.protect
+
+        terminators += '\n'
+        return terminators
+
+    #
+    # Creates code to initialize the various dispatch tables
+    def OutputLoaderDispatchTables(self):
+        commands = []
+        tables = ''
+        gpa_param = ''
+        cur_type = ''
+        cur_extension_name = ''
+
+        for x in range(0, 4):
+            if x == 0:
+                cur_type = 'device'
+                gpa_param = 'dev'
+                commands = self.core_commands
+
+                tables += '// Init Device function pointer dispatch table with core commands\n'
+                tables += 'VKAPI_ATTR void VKAPI_CALL loader_init_device_dispatch_table(struct loader_dev_dispatch_table *dev_table, PFN_vkGetDeviceProcAddr gpa,\n'
+                tables += '                                                             VkDevice dev) {\n'
+                tables += '    VkLayerDispatchTable *table = &dev_table->core_dispatch;\n'
+                tables += '    for (uint32_t i = 0; i < MAX_NUM_UNKNOWN_EXTS; i++) dev_table->ext_dispatch.dev_ext[i] = (PFN_vkDevExt)vkDevExtError;\n'
+
+            elif x == 1:
+                cur_type = 'device'
+                gpa_param = 'dev'
+                commands = self.ext_commands
+
+                tables += '// Init Device function pointer dispatch table with extension commands\n'
+                tables += 'VKAPI_ATTR void VKAPI_CALL loader_init_device_extension_dispatch_table(struct loader_dev_dispatch_table *dev_table,\n'
+                tables += '                                                                       PFN_vkGetInstanceProcAddr gipa,\n'
+                tables += '                                                                       PFN_vkGetDeviceProcAddr gdpa,\n'
+                tables += '                                                                       VkInstance inst,\n'
+                tables += '                                                                       VkDevice dev) {\n'
+                tables += '    VkLayerDispatchTable *table = &dev_table->core_dispatch;\n'
+
+            elif x == 2:
+                cur_type = 'instance'
+                gpa_param = 'inst'
+                commands = self.core_commands
+
+                tables += '// Init Instance function pointer dispatch table with core commands\n'
+                tables += 'VKAPI_ATTR void VKAPI_CALL loader_init_instance_core_dispatch_table(VkLayerInstanceDispatchTable *table, PFN_vkGetInstanceProcAddr gpa,\n'
+                tables += '                                                                    VkInstance inst) {\n'
+
+            else:
+                cur_type = 'instance'
+                gpa_param = 'inst'
+                commands = self.ext_commands
+
+                tables += '// Init Instance function pointer dispatch table with core commands\n'
+                tables += 'VKAPI_ATTR void VKAPI_CALL loader_init_instance_extension_dispatch_table(VkLayerInstanceDispatchTable *table, PFN_vkGetInstanceProcAddr gpa,\n'
+                tables += '                                                                        VkInstance inst) {\n'
+
+            for cur_cmd in commands:
+                is_inst_handle_type = cur_cmd.handle_type == 'VkInstance' or cur_cmd.handle_type == 'VkPhysicalDevice'
+                if ((cur_type == 'instance' and is_inst_handle_type) or (cur_type == 'device' and not is_inst_handle_type)):
+                    if cur_cmd.ext_name != cur_extension_name:
+                        if 'VK_VERSION_' in cur_cmd.ext_name:
+                            tables += '\n    // ---- Core %s commands\n' % cur_cmd.ext_name[11:]
+                        else:
+                            tables += '\n    // ---- %s extension commands\n' % cur_cmd.ext_name
+                        cur_extension_name = cur_cmd.ext_name
+
+                    # Remove 'vk' from proto name
+                    base_name = cur_cmd.name[2:]
+
+                    # Names to skip
+                    if (base_name == 'CreateInstance' or base_name == 'CreateDevice' or
+                        base_name == 'EnumerateInstanceExtensionProperties' or
+                        base_name == 'EnumerateInstanceLayerProperties' or
+                        base_name == 'EnumerateInstanceVersion'):
+                        continue
+
+                    if cur_cmd.protect is not None:
+                        tables += '#ifdef %s\n' % cur_cmd.protect
+
+                    # If we're looking for the proc we are passing in, just point the table to it.  This fixes the issue where
+                    # a layer overrides the function name for the loader.
+                    if x == 1:
+                        if base_name == 'GetDeviceProcAddr':
+                            tables += '    table->GetDeviceProcAddr = gdpa;\n'
+                        elif cur_cmd.ext_type == 'instance':
+                            tables += '    table->%s = (PFN_%s)gipa(inst, "%s");\n' % (base_name, cur_cmd.name, cur_cmd.name)
+                        else:
+                            tables += '    table->%s = (PFN_%s)gdpa(dev, "%s");\n' % (base_name, cur_cmd.name, cur_cmd.name)
+                    elif (x < 1 and base_name == 'GetDeviceProcAddr'):
+                        tables += '    table->GetDeviceProcAddr = gpa;\n'
+                    elif (x > 1 and base_name == 'GetInstanceProcAddr'):
+                        tables += '    table->GetInstanceProcAddr = gpa;\n'
+                    else:
+                        tables += '    table->%s = (PFN_%s)gpa(%s, "%s");\n' % (base_name, cur_cmd.name, gpa_param, cur_cmd.name)
+
+                    if cur_cmd.protect is not None:
+                        tables += '#endif // %s\n' % cur_cmd.protect
+
+            tables += '}\n\n'
+        return tables
+
+    #
+    # Create a lookup table function from the appropriate list of entrypoints and
+    # return it as a string
+    def OutputLoaderLookupFunc(self):
+        commands = []
+        tables = ''
+        cur_type = ''
+        cur_extension_name = ''
+
+        for x in range(0, 2):
+            if x == 0:
+                cur_type = 'device'
+
+                tables += '// Device command lookup function\n'
+                tables += 'VKAPI_ATTR void* VKAPI_CALL loader_lookup_device_dispatch_table(const VkLayerDispatchTable *table, const char *name) {\n'
+                tables += '    if (!name || name[0] != \'v\' || name[1] != \'k\') return NULL;\n'
+                tables += '\n'
+                tables += '    name += 2;\n'
+            else:
+                cur_type = 'instance'
+
+                tables += '// Instance command lookup function\n'
+                tables += 'VKAPI_ATTR void* VKAPI_CALL loader_lookup_instance_dispatch_table(const VkLayerInstanceDispatchTable *table, const char *name,\n'
+                tables += '                                                                 bool *found_name) {\n'
+                tables += '    if (!name || name[0] != \'v\' || name[1] != \'k\') {\n'
+                tables += '        *found_name = false;\n'
+                tables += '        return NULL;\n'
+                tables += '    }\n'
+                tables += '\n'
+                tables += '    *found_name = true;\n'
+                tables += '    name += 2;\n'
+
+            for y in range(0, 2):
+                if y == 0:
+                    commands = self.core_commands
+                else:
+                    commands = self.ext_commands
+
+                for cur_cmd in commands:
+                    is_inst_handle_type = cur_cmd.handle_type == 'VkInstance' or cur_cmd.handle_type == 'VkPhysicalDevice'
+                    if ((cur_type == 'instance' and is_inst_handle_type) or (cur_type == 'device' and not is_inst_handle_type)):
+
+                        if cur_cmd.ext_name != cur_extension_name:
+                            if 'VK_VERSION_' in cur_cmd.ext_name:
+                                tables += '\n    // ---- Core %s commands\n' % cur_cmd.ext_name[11:]
+                            else:
+                                tables += '\n    // ---- %s extension commands\n' % cur_cmd.ext_name
+                            cur_extension_name = cur_cmd.ext_name
+
+                        # Remove 'vk' from proto name
+                        base_name = cur_cmd.name[2:]
+
+                        if (base_name == 'CreateInstance' or base_name == 'CreateDevice' or
+                            base_name == 'EnumerateInstanceExtensionProperties' or
+                            base_name == 'EnumerateInstanceLayerProperties' or
+                            base_name == 'EnumerateInstanceVersion'):
+                            continue
+
+                        if cur_cmd.protect is not None:
+                            tables += '#ifdef %s\n' % cur_cmd.protect
+
+                        tables += '    if (!strcmp(name, "%s")) return (void *)table->%s;\n' % (base_name, base_name)
+
+                        if cur_cmd.protect is not None:
+                            tables += '#endif // %s\n' % cur_cmd.protect
+
+            tables += '\n'
+            if x == 1:
+                tables += '    *found_name = false;\n'
+            tables += '    return NULL;\n'
+            tables += '}\n\n'
+        return tables
+
+    #
+    # Create the appropriate trampoline (and possibly terminator) functinos
+    def CreateTrampTermFuncs(self):
+        entries = []
+        funcs = ''
+        cur_extension_name = ''
+
+        # Some extensions have to be manually added.  Skip those in the automatic
+        # generation.  They will be manually added later.
+        manual_ext_commands = ['vkEnumeratePhysicalDeviceGroupsKHR',
+                               'vkGetPhysicalDeviceExternalImageFormatPropertiesNV',
+                               'vkGetPhysicalDeviceFeatures2KHR',
+                               'vkGetPhysicalDeviceProperties2KHR',
+                               'vkGetPhysicalDeviceFormatProperties2KHR',
+                               'vkGetPhysicalDeviceImageFormatProperties2KHR',
+                               'vkGetPhysicalDeviceQueueFamilyProperties2KHR',
+                               'vkGetPhysicalDeviceMemoryProperties2KHR',
+                               'vkGetPhysicalDeviceSparseImageFormatProperties2KHR',
+                               'vkGetPhysicalDeviceSurfaceCapabilities2KHR',
+                               'vkGetPhysicalDeviceSurfaceFormats2KHR',
+                               'vkGetPhysicalDeviceSurfaceCapabilities2EXT',
+                               'vkReleaseDisplayEXT',
+                               'vkAcquireXlibDisplayEXT',
+                               'vkGetRandROutputDisplayEXT',
+                               'vkGetPhysicalDeviceExternalBufferPropertiesKHR',
+                               'vkGetPhysicalDeviceExternalSemaphorePropertiesKHR',
+                               'vkGetPhysicalDeviceExternalFencePropertiesKHR',
+                               'vkGetPhysicalDeviceDisplayProperties2KHR',
+                               'vkGetPhysicalDeviceDisplayPlaneProperties2KHR',
+                               'vkGetDisplayModeProperties2KHR',
+                               'vkGetDisplayPlaneCapabilities2KHR',
+                               'vkGetPhysicalDeviceSurfacePresentModes2EXT',
+                               'vkGetDeviceGroupSurfacePresentModes2EXT']
+
+        for ext_cmd in self.ext_commands:
+            if (ext_cmd.ext_name in WSI_EXT_NAMES or
+                ext_cmd.ext_name in AVOID_EXT_NAMES or
+                ext_cmd.name in AVOID_CMD_NAMES or
+                ext_cmd.name in manual_ext_commands):
+                continue
+
+            if ext_cmd.ext_name != cur_extension_name:
+                if 'VK_VERSION_' in ext_cmd.ext_name:
+                    funcs += '\n// ---- Core %s trampoline/terminators\n\n' % ext_cmd.ext_name[11:]
+                else:
+                    funcs += '\n// ---- %s extension trampoline/terminators\n\n' % ext_cmd.ext_name
+                cur_extension_name = ext_cmd.ext_name
+
+            if ext_cmd.protect is not None:
+                funcs += '#ifdef %s\n' % ext_cmd.protect
+
+            func_header = ext_cmd.cdecl.replace(";", " {\n")
+            tramp_header = func_header.replace("VKAPI_CALL vk", "VKAPI_CALL ")
+            return_prefix = '    '
+            base_name = ext_cmd.name[2:]
+            has_surface = 0
+            update_structure_surface = 0
+            update_structure_string = ''
+            requires_terminator = 0
+            surface_var_name = ''
+            phys_dev_var_name = ''
+            has_return_type = False
+            always_use_param_name = True
+            surface_type_to_replace = ''
+            surface_name_replacement = ''
+            physdev_type_to_replace = ''
+            physdev_name_replacement = ''
+
+            for param in ext_cmd.params:
+                if param.type == 'VkSurfaceKHR':
+                    has_surface = 1
+                    surface_var_name = param.name
+                    requires_terminator = 1
+                    always_use_param_name = False
+                    surface_type_to_replace = 'VkSurfaceKHR'
+                    surface_name_replacement = 'icd_surface->real_icd_surfaces[icd_index]'
+                if param.type == 'VkPhysicalDeviceSurfaceInfo2KHR':
+                    has_surface = 1
+                    surface_var_name = param.name + '->surface'
+                    requires_terminator = 1
+                    update_structure_surface = 1
+                    update_structure_string = '        VkPhysicalDeviceSurfaceInfo2KHR info_copy = *pSurfaceInfo;\n'
+                    update_structure_string += '        info_copy.surface = icd_surface->real_icd_surfaces[icd_index];\n'
+                    always_use_param_name = False
+                    surface_type_to_replace = 'VkPhysicalDeviceSurfaceInfo2KHR'
+                    surface_name_replacement = '&info_copy'
+                if param.type == 'VkPhysicalDevice':
+                    requires_terminator = 1
+                    phys_dev_var_name = param.name
+                    always_use_param_name = False
+                    physdev_type_to_replace = 'VkPhysicalDevice'
+                    physdev_name_replacement = 'phys_dev_term->phys_dev'
+
+            if (ext_cmd.return_type is not None):
+                return_prefix += 'return '
+                has_return_type = True
+
+            if (ext_cmd.handle_type == 'VkInstance' or ext_cmd.handle_type == 'VkPhysicalDevice' or
+                'DebugMarkerSetObject' in ext_cmd.name or 'SetDebugUtilsObject' in ext_cmd.name or
+                ext_cmd.name in DEVICE_CMDS_NEED_TERM):
+                requires_terminator = 1
+
+            if requires_terminator == 1:
+                term_header = tramp_header.replace("VKAPI_CALL ", "VKAPI_CALL terminator_")
+
+                funcs += tramp_header
+
+                if ext_cmd.handle_type == 'VkPhysicalDevice':
+                    funcs += '    const VkLayerInstanceDispatchTable *disp;\n'
+                    funcs += '    VkPhysicalDevice unwrapped_phys_dev = loader_unwrap_physical_device(%s);\n' % (phys_dev_var_name)
+                    funcs += '    disp = loader_get_instance_layer_dispatch(%s);\n' % (phys_dev_var_name)
+                elif ext_cmd.handle_type == 'VkInstance':
+                    funcs += '#error("Not implemented. Likely needs to be manually generated!");\n'
+                else:
+                    funcs += '    const VkLayerDispatchTable *disp = loader_get_dispatch('
+                    funcs += ext_cmd.params[0].name
+                    funcs += ');\n'
+
+                if 'DebugMarkerSetObjectName' in ext_cmd.name:
+                    funcs += '    VkDebugMarkerObjectNameInfoEXT local_name_info;\n'
+                    funcs += '    memcpy(&local_name_info, pNameInfo, sizeof(VkDebugMarkerObjectNameInfoEXT));\n'
+                    funcs += '    // If this is a physical device, we have to replace it with the proper one for the next call.\n'
+                    funcs += '    if (pNameInfo->objectType == VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT) {\n'
+                    funcs += '        struct loader_physical_device_tramp *phys_dev_tramp = (struct loader_physical_device_tramp *)(uintptr_t)pNameInfo->object;\n'
+                    funcs += '        local_name_info.object = (uint64_t)(uintptr_t)phys_dev_tramp->phys_dev;\n'
+                    funcs += '    }\n'
+                elif 'DebugMarkerSetObjectTag' in ext_cmd.name:
+                    funcs += '    VkDebugMarkerObjectTagInfoEXT local_tag_info;\n'
+                    funcs += '    memcpy(&local_tag_info, pTagInfo, sizeof(VkDebugMarkerObjectTagInfoEXT));\n'
+                    funcs += '    // If this is a physical device, we have to replace it with the proper one for the next call.\n'
+                    funcs += '    if (pTagInfo->objectType == VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT) {\n'
+                    funcs += '        struct loader_physical_device_tramp *phys_dev_tramp = (struct loader_physical_device_tramp *)(uintptr_t)pTagInfo->object;\n'
+                    funcs += '        local_tag_info.object = (uint64_t)(uintptr_t)phys_dev_tramp->phys_dev;\n'
+                    funcs += '    }\n'
+                elif 'SetDebugUtilsObjectName' in ext_cmd.name:
+                    funcs += '    VkDebugUtilsObjectNameInfoEXT local_name_info;\n'
+                    funcs += '    memcpy(&local_name_info, pNameInfo, sizeof(VkDebugUtilsObjectNameInfoEXT));\n'
+                    funcs += '    // If this is a physical device, we have to replace it with the proper one for the next call.\n'
+                    funcs += '    if (pNameInfo->objectType == VK_OBJECT_TYPE_PHYSICAL_DEVICE) {\n'
+                    funcs += '        struct loader_physical_device_tramp *phys_dev_tramp = (struct loader_physical_device_tramp *)(uintptr_t)pNameInfo->objectHandle;\n'
+                    funcs += '        local_name_info.objectHandle = (uint64_t)(uintptr_t)phys_dev_tramp->phys_dev;\n'
+                    funcs += '    }\n'
+                elif 'SetDebugUtilsObjectTag' in ext_cmd.name:
+                    funcs += '    VkDebugUtilsObjectTagInfoEXT local_tag_info;\n'
+                    funcs += '    memcpy(&local_tag_info, pTagInfo, sizeof(VkDebugUtilsObjectTagInfoEXT));\n'
+                    funcs += '    // If this is a physical device, we have to replace it with the proper one for the next call.\n'
+                    funcs += '    if (pTagInfo->objectType == VK_OBJECT_TYPE_PHYSICAL_DEVICE) {\n'
+                    funcs += '        struct loader_physical_device_tramp *phys_dev_tramp = (struct loader_physical_device_tramp *)(uintptr_t)pTagInfo->objectHandle;\n'
+                    funcs += '        local_tag_info.objectHandle = (uint64_t)(uintptr_t)phys_dev_tramp->phys_dev;\n'
+                    funcs += '    }\n'
+
+                if ext_cmd.ext_name in NULL_CHECK_EXT_NAMES:
+                    funcs += '    if (disp->' + base_name + ' != NULL) {\n'
+                    funcs += '    '
+                funcs += return_prefix
+                funcs += 'disp->'
+                funcs += base_name
+                funcs += '('
+                count = 0
+                for param in ext_cmd.params:
+                    if count != 0:
+                        funcs += ', '
+
+                    if param.type == 'VkPhysicalDevice':
+                        funcs += 'unwrapped_phys_dev'
+                    elif ('DebugMarkerSetObject' in ext_cmd.name or 'SetDebugUtilsObject' in ext_cmd.name) and param.name == 'pNameInfo':
+                            funcs += '&local_name_info'
+                    elif ('DebugMarkerSetObject' in ext_cmd.name or 'SetDebugUtilsObject' in ext_cmd.name) and param.name == 'pTagInfo':
+                            funcs += '&local_tag_info'
+                    else:
+                        funcs += param.name
+
+                    count += 1
+                funcs += ');\n'
+                if ext_cmd.ext_name in NULL_CHECK_EXT_NAMES:
+                    if ext_cmd.return_type is not None:
+                        funcs += '    } else {\n'
+                        funcs += '        return VK_SUCCESS;\n'
+                    funcs += '    }\n'
+                funcs += '}\n\n'
+
+                funcs += term_header
+                if ext_cmd.handle_type == 'VkPhysicalDevice':
+                    funcs += '    struct loader_physical_device_term *phys_dev_term = (struct loader_physical_device_term *)%s;\n' % (phys_dev_var_name)
+                    funcs += '    struct loader_icd_term *icd_term = phys_dev_term->this_icd_term;\n'
+                    funcs += '    if (NULL == icd_term->dispatch.'
+                    funcs += base_name
+                    funcs += ') {\n'
+                    funcs += '        loader_log(icd_term->this_instance, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,\n'
+                    funcs += '                   "ICD associated with VkPhysicalDevice does not support '
+                    funcs += base_name
+                    funcs += '");\n'
+                    funcs += '    }\n'
+
+                    if has_surface == 1:
+                        funcs += '    VkIcdSurface *icd_surface = (VkIcdSurface *)(%s);\n' % (surface_var_name)
+                        funcs += '    uint8_t icd_index = phys_dev_term->icd_index;\n'
+                        funcs += '    if (NULL != icd_surface->real_icd_surfaces && NULL != (void *)icd_surface->real_icd_surfaces[icd_index]) {\n'
+
+                        # If there's a structure with a surface, we need to update its internals with the correct surface for the ICD
+                        if update_structure_surface == 1:
+                            funcs += update_structure_string
+
+                        funcs += '    ' + return_prefix + 'icd_term->dispatch.'
+                        funcs += base_name
+                        funcs += '('
+                        count = 0
+                        for param in ext_cmd.params:
+                            if count != 0:
+                                funcs += ', '
+
+                            if not always_use_param_name:
+                                if surface_type_to_replace and surface_type_to_replace == param.type:
+                                    funcs += surface_name_replacement
+                                elif physdev_type_to_replace and physdev_type_to_replace == param.type:
+                                    funcs += physdev_name_replacement
+                                else:
+                                    funcs += param.name
+                            else:
+                                funcs += param.name
+
+                            count += 1
+                        funcs += ');\n'
+                        if not has_return_type:
+                            funcs += '        return;\n'
+                        funcs += '    }\n'
+
+                    funcs += return_prefix
+                    funcs += 'icd_term->dispatch.'
+                    funcs += base_name
+                    funcs += '('
+                    count = 0
+                    for param in ext_cmd.params:
+                        if count != 0:
+                            funcs += ', '
+
+                        if param.type == 'VkPhysicalDevice':
+                            funcs += 'phys_dev_term->phys_dev'
+                        else:
+                            funcs += param.name
+
+                        count += 1
+                    funcs += ');\n'
+
+                elif has_surface == 1 and not (ext_cmd.handle_type == 'VkPhysicalDevice' or ext_cmd.handle_type == 'VkInstance'):
+                    funcs += '    uint32_t icd_index = 0;\n'
+                    funcs += '    struct loader_device *dev;\n'
+                    funcs += '    struct loader_icd_term *icd_term = loader_get_icd_and_device(device, &dev, &icd_index);\n'
+                    funcs += '    if (NULL != icd_term && NULL != icd_term->dispatch.%s) {\n' % base_name
+                    funcs += '        VkIcdSurface *icd_surface = (VkIcdSurface *)(uintptr_t)%s;\n' % (surface_var_name)
+                    funcs += '        if (NULL != icd_surface->real_icd_surfaces && (VkSurfaceKHR)NULL != icd_surface->real_icd_surfaces[icd_index]) {\n'
+                    funcs += '        %sicd_term->dispatch.%s(' % (return_prefix, base_name)
+                    count = 0
+                    for param in ext_cmd.params:
+                        if count != 0:
+                            funcs += ', '
+
+                        if param.type == 'VkSurfaceKHR':
+                            funcs += 'icd_surface->real_icd_surfaces[icd_index]'
+                        else:
+                            funcs += param.name
+
+                        count += 1
+                    funcs += ');\n'
+                    if not has_return_type:
+                        funcs += '                return;\n'
+                    funcs += '        }\n'
+                    funcs += '    %sicd_term->dispatch.%s(' % (return_prefix, base_name)
+                    count = 0
+                    for param in ext_cmd.params:
+                        if count != 0:
+                            funcs += ', '
+                        funcs += param.name
+                        count += 1
+                    funcs += ');\n'
+                    funcs += '    }\n'
+                    if has_return_type:
+                        funcs += '    return VK_SUCCESS;\n'
+
+                elif ext_cmd.handle_type == 'VkInstance':
+                    funcs += '#error("Not implemented. Likely needs to be manually generated!");\n'
+                elif 'DebugUtilsLabel' in ext_cmd.name:
+                    funcs += '    const VkLayerDispatchTable *disp = loader_get_dispatch('
+                    funcs += ext_cmd.params[0].name
+                    funcs += ');\n'
+                    if ext_cmd.ext_name in NULL_CHECK_EXT_NAMES:
+                        funcs += '    if (disp->' + base_name + ' != NULL) {\n'
+                        funcs += '    '
+                    funcs += '    '
+                    if has_return_type:
+                        funcs += 'return '
+                    funcs += 'disp->'
+                    funcs += base_name
+                    funcs += '('
+                    count = 0
+                    for param in ext_cmd.params:
+                        if count != 0:
+                            funcs += ', '
+                        funcs += param.name
+                        count += 1
+                    funcs += ');\n'
+                    if ext_cmd.ext_name in NULL_CHECK_EXT_NAMES:
+                        funcs += '    }\n'
+                elif 'DebugMarkerSetObject' in ext_cmd.name or 'SetDebugUtilsObject' in ext_cmd.name:
+                    funcs += '    uint32_t icd_index = 0;\n'
+                    funcs += '    struct loader_device *dev;\n'
+                    funcs += '    struct loader_icd_term *icd_term = loader_get_icd_and_device(%s, &dev, &icd_index);\n' % (ext_cmd.params[0].name)
+                    funcs += '    if (NULL != icd_term && NULL != icd_term->dispatch.'
+                    funcs += base_name
+                    funcs += ') {\n'
+                    if 'DebugMarkerSetObjectName' in ext_cmd.name:
+                        funcs += '        VkDebugMarkerObjectNameInfoEXT local_name_info;\n'
+                        funcs += '        memcpy(&local_name_info, pNameInfo, sizeof(VkDebugMarkerObjectNameInfoEXT));\n'
+                        funcs += '        // If this is a physical device, we have to replace it with the proper one for the next call.\n'
+                        funcs += '        if (pNameInfo->objectType == VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT) {\n'
+                        funcs += '            struct loader_physical_device_term *phys_dev_term = (struct loader_physical_device_term *)(uintptr_t)pNameInfo->object;\n'
+                        funcs += '            local_name_info.object = (uint64_t)(uintptr_t)phys_dev_term->phys_dev;\n'
+                        funcs += '        // If this is a KHR_surface, and the ICD has created its own, we have to replace it with the proper one for the next call.\n'
+                        funcs += '        } else if (pNameInfo->objectType == VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT) {\n'
+                        funcs += '            if (NULL != icd_term && NULL != icd_term->dispatch.CreateSwapchainKHR) {\n'
+                        funcs += '                VkIcdSurface *icd_surface = (VkIcdSurface *)(uintptr_t)pNameInfo->object;\n'
+                        funcs += '                if (NULL != icd_surface->real_icd_surfaces) {\n'
+                        funcs += '                    local_name_info.object = (uint64_t)icd_surface->real_icd_surfaces[icd_index];\n'
+                        funcs += '                }\n'
+                        funcs += '            }\n'
+                        funcs += '        }\n'
+                    elif 'DebugMarkerSetObjectTag' in ext_cmd.name:
+                        funcs += '        VkDebugMarkerObjectTagInfoEXT local_tag_info;\n'
+                        funcs += '        memcpy(&local_tag_info, pTagInfo, sizeof(VkDebugMarkerObjectTagInfoEXT));\n'
+                        funcs += '        // If this is a physical device, we have to replace it with the proper one for the next call.\n'
+                        funcs += '        if (pTagInfo->objectType == VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT) {\n'
+                        funcs += '            struct loader_physical_device_term *phys_dev_term = (struct loader_physical_device_term *)(uintptr_t)pTagInfo->object;\n'
+                        funcs += '            local_tag_info.object = (uint64_t)(uintptr_t)phys_dev_term->phys_dev;\n'
+                        funcs += '        // If this is a KHR_surface, and the ICD has created its own, we have to replace it with the proper one for the next call.\n'
+                        funcs += '        } else if (pTagInfo->objectType == VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT) {\n'
+                        funcs += '            if (NULL != icd_term && NULL != icd_term->dispatch.CreateSwapchainKHR) {\n'
+                        funcs += '                VkIcdSurface *icd_surface = (VkIcdSurface *)(uintptr_t)pTagInfo->object;\n'
+                        funcs += '                if (NULL != icd_surface->real_icd_surfaces) {\n'
+                        funcs += '                    local_tag_info.object = (uint64_t)icd_surface->real_icd_surfaces[icd_index];\n'
+                        funcs += '                }\n'
+                        funcs += '            }\n'
+                        funcs += '        }\n'
+                    elif 'SetDebugUtilsObjectName' in ext_cmd.name:
+                        funcs += '        VkDebugUtilsObjectNameInfoEXT local_name_info;\n'
+                        funcs += '        memcpy(&local_name_info, pNameInfo, sizeof(VkDebugUtilsObjectNameInfoEXT));\n'
+                        funcs += '        // If this is a physical device, we have to replace it with the proper one for the next call.\n'
+                        funcs += '        if (pNameInfo->objectType == VK_OBJECT_TYPE_PHYSICAL_DEVICE) {\n'
+                        funcs += '            struct loader_physical_device_term *phys_dev_term = (struct loader_physical_device_term *)(uintptr_t)pNameInfo->objectHandle;\n'
+                        funcs += '            local_name_info.objectHandle = (uint64_t)(uintptr_t)phys_dev_term->phys_dev;\n'
+                        funcs += '        // If this is a KHR_surface, and the ICD has created its own, we have to replace it with the proper one for the next call.\n'
+                        funcs += '        } else if (pNameInfo->objectType == VK_OBJECT_TYPE_SURFACE_KHR) {\n'
+                        funcs += '            if (NULL != icd_term && NULL != icd_term->dispatch.CreateSwapchainKHR) {\n'
+                        funcs += '                VkIcdSurface *icd_surface = (VkIcdSurface *)(uintptr_t)pNameInfo->objectHandle;\n'
+                        funcs += '                if (NULL != icd_surface->real_icd_surfaces) {\n'
+                        funcs += '                    local_name_info.objectHandle = (uint64_t)icd_surface->real_icd_surfaces[icd_index];\n'
+                        funcs += '                }\n'
+                        funcs += '            }\n'
+                        funcs += '        }\n'
+                    elif 'SetDebugUtilsObjectTag' in ext_cmd.name:
+                        funcs += '        VkDebugUtilsObjectTagInfoEXT local_tag_info;\n'
+                        funcs += '        memcpy(&local_tag_info, pTagInfo, sizeof(VkDebugUtilsObjectTagInfoEXT));\n'
+                        funcs += '        // If this is a physical device, we have to replace it with the proper one for the next call.\n'
+                        funcs += '        if (pTagInfo->objectType == VK_OBJECT_TYPE_PHYSICAL_DEVICE) {\n'
+                        funcs += '            struct loader_physical_device_term *phys_dev_term = (struct loader_physical_device_term *)(uintptr_t)pTagInfo->objectHandle;\n'
+                        funcs += '            local_tag_info.objectHandle = (uint64_t)(uintptr_t)phys_dev_term->phys_dev;\n'
+                        funcs += '        // If this is a KHR_surface, and the ICD has created its own, we have to replace it with the proper one for the next call.\n'
+                        funcs += '        } else if (pTagInfo->objectType == VK_OBJECT_TYPE_SURFACE_KHR) {\n'
+                        funcs += '            if (NULL != icd_term && NULL != icd_term->dispatch.CreateSwapchainKHR) {\n'
+                        funcs += '                VkIcdSurface *icd_surface = (VkIcdSurface *)(uintptr_t)pTagInfo->objectHandle;\n'
+                        funcs += '                if (NULL != icd_surface->real_icd_surfaces) {\n'
+                        funcs += '                    local_tag_info.objectHandle = (uint64_t)icd_surface->real_icd_surfaces[icd_index];\n'
+                        funcs += '                }\n'
+                        funcs += '            }\n'
+                        funcs += '        }\n'
+                    funcs += '        '
+                    if has_return_type:
+                        funcs += 'return '
+                    funcs += 'icd_term->dispatch.'
+                    funcs += base_name
+                    funcs += '('
+                    count = 0
+                    for param in ext_cmd.params:
+                        if count != 0:
+                            funcs += ', '
+
+                        if param.type == 'VkPhysicalDevice':
+                            funcs += 'phys_dev_term->phys_dev'
+                        elif param.type == 'VkSurfaceKHR':
+                            funcs += 'icd_surface->real_icd_surfaces[icd_index]'
+                        elif ('DebugMarkerSetObject' in ext_cmd.name or 'SetDebugUtilsObject' in ext_cmd.name) and param.name == 'pNameInfo':
+                            funcs += '&local_name_info'
+                        elif ('DebugMarkerSetObject' in ext_cmd.name or 'SetDebugUtilsObject' in ext_cmd.name) and param.name == 'pTagInfo':
+                            funcs += '&local_tag_info'
+                        else:
+                            funcs += param.name
+                        count += 1
+
+                    funcs += ');\n'
+                    if has_return_type:
+                        funcs += '    } else {\n'
+                        funcs += '        return VK_SUCCESS;\n'
+                    funcs += '    }\n'
+
+                else:
+                    funcs += '#error("Unknown error path!");\n'
+
+                funcs += '}\n\n'
+            else:
+                funcs += tramp_header
+
+                funcs += '    const VkLayerDispatchTable *disp = loader_get_dispatch('
+                funcs += ext_cmd.params[0].name
+                funcs += ');\n'
+
+                if ext_cmd.ext_name in NULL_CHECK_EXT_NAMES:
+                    funcs += '    if (disp->' + base_name + ' != NULL) {\n'
+                funcs += return_prefix
+                funcs += 'disp->'
+                funcs += base_name
+                funcs += '('
+                count = 0
+                for param in ext_cmd.params:
+                    if count != 0:
+                        funcs += ', '
+                    funcs += param.name
+                    count += 1
+                funcs += ');\n'
+                if ext_cmd.ext_name in NULL_CHECK_EXT_NAMES:
+                    if ext_cmd.return_type is not None:
+                        funcs += '    } else {\n'
+                        funcs += '        return VK_SUCCESS;\n'
+                    funcs += '    }\n'
+                funcs += '}\n\n'
+
+            if ext_cmd.protect is not None:
+                funcs += '#endif // %s\n' % ext_cmd.protect
+
+        return funcs
+
+
+    #
+    # Create a function for the extension GPA call
+    def InstExtensionGPA(self):
+        entries = []
+        gpa_func = ''
+        cur_extension_name = ''
+
+        gpa_func += '// GPA helpers for extensions\n'
+        gpa_func += 'bool extension_instance_gpa(struct loader_instance *ptr_instance, const char *name, void **addr) {\n'
+        gpa_func += '    *addr = NULL;\n\n'
+
+        for cur_cmd in self.ext_commands:
+            if ('VK_VERSION_' in cur_cmd.ext_name or
+                cur_cmd.ext_name in WSI_EXT_NAMES or
+                cur_cmd.ext_name in AVOID_EXT_NAMES or
+                cur_cmd.name in AVOID_CMD_NAMES ):
+                continue
+
+            if cur_cmd.ext_name != cur_extension_name:
+                gpa_func += '\n    // ---- %s extension commands\n' % cur_cmd.ext_name
+                cur_extension_name = cur_cmd.ext_name
+
+            if cur_cmd.protect is not None:
+                gpa_func += '#ifdef %s\n' % cur_cmd.protect
+
+            #base_name = cur_cmd.name[2:]
+            base_name = ALIASED_CMDS[cur_cmd.name] if cur_cmd.name in ALIASED_CMDS else cur_cmd.name[2:]
+
+            if (cur_cmd.ext_type == 'instance'):
+                gpa_func += '    if (!strcmp("%s", name)) {\n' % (cur_cmd.name)
+                gpa_func += '        *addr = (ptr_instance->enabled_known_extensions.'
+                gpa_func += cur_cmd.ext_name[3:].lower()
+                gpa_func += ' == 1)\n'
+                gpa_func += '                     ? (void *)%s\n' % (base_name)
+                gpa_func += '                     : NULL;\n'
+                gpa_func += '        return true;\n'
+                gpa_func += '    }\n'
+            else:
+                gpa_func += '    if (!strcmp("%s", name)) {\n' % (cur_cmd.name)
+                gpa_func += '        *addr = (void *)%s;\n' % (base_name)
+                gpa_func += '        return true;\n'
+                gpa_func += '    }\n'
+
+            if cur_cmd.protect is not None:
+                gpa_func += '#endif // %s\n' % cur_cmd.protect
+
+        gpa_func += '    return false;\n'
+        gpa_func += '}\n\n'
+
+        return gpa_func
+
+    #
+    # Create the extension name init function
+    def InstantExtensionCreate(self):
+        entries = []
+        entries = self.instanceExtensions
+        count = 0
+        cur_extension_name = ''
+
+        create_func = ''
+        create_func += '// A function that can be used to query enabled extensions during a vkCreateInstance call\n'
+        create_func += 'void extensions_create_instance(struct loader_instance *ptr_instance, const VkInstanceCreateInfo *pCreateInfo) {\n'
+        create_func += '    for (uint32_t i = 0; i < pCreateInfo->enabledExtensionCount; i++) {\n'
+        for ext in entries:
+            if ('VK_VERSION_' in ext.name or ext.name in WSI_EXT_NAMES or
+                ext.name in AVOID_EXT_NAMES or ext.name in AVOID_CMD_NAMES or
+                ext.type == 'device' or ext.num_commands == 0):
+                continue
+
+            if ext.name != cur_extension_name:
+                create_func += '\n    // ---- %s extension commands\n' % ext.name
+                cur_extension_name = ext.name
+
+            if ext.protect is not None:
+                create_func += '#ifdef %s\n' % ext.protect
+            if count == 0:
+                create_func += '        if (0 == strcmp(pCreateInfo->ppEnabledExtensionNames[i], '
+            else:
+                create_func += '        } else if (0 == strcmp(pCreateInfo->ppEnabledExtensionNames[i], '
+
+            create_func += ext.define + ')) {\n'
+            create_func += '            ptr_instance->enabled_known_extensions.'
+            create_func += ext.name[3:].lower()
+            create_func += ' = 1;\n'
+
+            if ext.protect is not None:
+                create_func += '#endif // %s\n' % ext.protect
+            count += 1
+
+        create_func += '        }\n'
+        create_func += '    }\n'
+        create_func += '}\n\n'
+        return create_func
+
+    #
+    # Create code to initialize a dispatch table from the appropriate list of
+    # extension entrypoints and return it as a string
+    def DeviceExtensionGetTerminator(self):
+        term_func = ''
+        cur_extension_name = ''
+
+        term_func += '// Some device commands still need a terminator because the loader needs to unwrap something about them.\n'
+        term_func += '// In many cases, the item needing unwrapping is a VkPhysicalDevice or VkSurfaceKHR object.  But there may be other items\n'
+        term_func += '// in the future.\n'
+        term_func += 'PFN_vkVoidFunction get_extension_device_proc_terminator(struct loader_device *dev, const char *pName) {\n'
+        term_func += '    PFN_vkVoidFunction addr = NULL;\n'
+
+        count = 0
+        is_extension = False
+        last_protect = None
+        for ext_cmd in self.ext_commands:
+            if ext_cmd.name in DEVICE_CMDS_NEED_TERM:
+                if ext_cmd.ext_name != cur_extension_name:
+                    if count > 0:
+                        count = 0;
+                        term_func += '        }\n'
+                    if is_extension:
+                        term_func += '    }\n'
+                        is_extension = False
+
+                    if 'VK_VERSION_' in ext_cmd.ext_name:
+                        term_func += '\n    // ---- Core %s commands\n' % ext_cmd.ext_name[11:]
+                    else:
+                        last_protect = ext_cmd.protect
+                        if ext_cmd.protect is not None:
+                            term_func += '#ifdef %s\n' % ext_cmd.protect
+                        term_func += '\n    // ---- %s extension commands\n' % ext_cmd.ext_name
+                        if ext_cmd.require:
+                            term_func += '    if (dev->extensions.%s_enabled && dev->extensions.%s_enabled) {\n' % (ext_cmd.ext_name[3:].lower(), ext_cmd.require[3:].lower())
+                        else:
+                            term_func += '    if (dev->extensions.%s_enabled) {\n' % ext_cmd.ext_name[3:].lower()
+                        is_extension = True
+                    cur_extension_name = ext_cmd.ext_name
+
+                if count == 0:
+                    term_func += '        if'
+                else:
+                    term_func += '        } else if'
+
+                term_func += '(!strcmp(pName, "%s")) {\n' % (ext_cmd.name)
+                term_func += '            addr = (PFN_vkVoidFunction)terminator_%s;\n' % (ext_cmd.name[2:])
+
+
+                count += 1
+
+        if count > 0:
+            term_func += '        }\n'
+        if is_extension:
+            term_func += '    }\n'
+            if last_protect is not None:
+                term_func += '#endif // %s\n' % ext_cmd.protect
+
+        term_func += '    return addr;\n'
+        term_func += '}\n\n'
+
+        return term_func
+
+    #
+    # Create code to initialize a dispatch table from the appropriate list of
+    # core and extension entrypoints and return it as a string
+    def InitInstLoaderExtensionDispatchTable(self):
+        commands = []
+        table = ''
+        cur_extension_name = ''
+
+        table += '// This table contains the loader\'s instance dispatch table, which contains\n'
+        table += '// default functions if no instance layers are activated.  This contains\n'
+        table += '// pointers to "terminator functions".\n'
+        table += 'const VkLayerInstanceDispatchTable instance_disp = {\n'
+
+        for x in range(0, 2):
+            if x == 0:
+                commands = self.core_commands
+            else:
+                commands = self.ext_commands
+
+            for cur_cmd in commands:
+                
+                if cur_cmd.handle_type == 'VkInstance' or cur_cmd.handle_type == 'VkPhysicalDevice':
+                    if cur_cmd.ext_name != cur_extension_name:
+                        if 'VK_VERSION_' in cur_cmd.ext_name:
+                            table += '\n    // ---- Core %s commands\n' % cur_cmd.ext_name[11:]
+                        else:
+                            table += '\n    // ---- %s extension commands\n' % cur_cmd.ext_name
+                        cur_extension_name = cur_cmd.ext_name
+
+                    # Remove 'vk' from proto name
+                    base_name = cur_cmd.name[2:]
+                    aliased_name = ALIASED_CMDS[cur_cmd.name][2:] if cur_cmd.name in ALIASED_CMDS else base_name
+
+                    if (base_name == 'CreateInstance' or base_name == 'CreateDevice' or
+                        base_name == 'EnumerateInstanceExtensionProperties' or
+                        base_name == 'EnumerateInstanceLayerProperties' or
+                        base_name == 'EnumerateInstanceVersion'):
+                        continue
+
+                    if cur_cmd.protect is not None:
+                        table += '#ifdef %s\n' % cur_cmd.protect
+
+                    if base_name == 'GetInstanceProcAddr':
+                        table += '    .%s = %s,\n' % (base_name, cur_cmd.name)
+                    else:
+                        table += '    .%s = terminator_%s,\n' % (base_name, aliased_name)
+
+                    if cur_cmd.protect is not None:
+                        table += '#endif // %s\n' % cur_cmd.protect
+        table += '};\n\n'
+
+        return table
+
+    #
+    # Create the extension name whitelist array
+    def OutputInstantExtensionWhitelistArray(self):
+        extensions = self.instanceExtensions
+
+        table = ''
+        table += '// A null-terminated list of all of the instance extensions supported by the loader.\n'
+        table += '// If an instance extension name is not in this list, but it is exported by one or more of the\n'
+        table += '// ICDs detected by the loader, then the extension name not in the list will be filtered out\n'
+        table += '// before passing the list of extensions to the application.\n'
+        table += 'const char *const LOADER_INSTANCE_EXTENSIONS[] = {\n'
+        for ext in extensions:
+            if ext.type == 'device' or 'VK_VERSION_' in ext.name:
+                continue
+
+            if ext.protect is not None:
+                table += '#ifdef %s\n' % ext.protect
+            table += '                                                  '
+            table += ext.define + ',\n'
+
+            if ext.protect is not None:
+                table += '#endif // %s\n' % ext.protect
+        table += '                                                  NULL };\n'
+        return table
+
diff --git a/src/third_party/vulkan-loader/src/scripts/loader_genvk.py b/src/third_party/vulkan-loader/src/scripts/loader_genvk.py
new file mode 100644
index 0000000..50e7a2c
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/scripts/loader_genvk.py
@@ -0,0 +1,400 @@
+#!/usr/bin/python3
+#
+# Copyright (c) 2013-2019 The Khronos Group Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import argparse, cProfile, pdb, string, sys, time, os
+
+# Simple timer functions
+startTime = None
+
+def startTimer(timeit):
+    global startTime
+    if timeit:
+        startTime = time.process_time()
+
+def endTimer(timeit, msg):
+    global startTime
+    if timeit:
+        endTime = time.process_time()
+        write(msg, endTime - startTime, file=sys.stderr)
+        startTime = None
+
+# Turn a list of strings into a regexp string matching exactly those strings
+def makeREstring(list, default = None):
+    if len(list) > 0 or default is None:
+        return '^(' + '|'.join(list) + ')$'
+    else:
+        return default
+
+# Returns a directory of [ generator function, generator options ] indexed
+# by specified short names. The generator options incorporate the following
+# parameters:
+#
+# args is an parsed argument object; see below for the fields that are used.
+def makeGenOpts(args):
+    global genOpts
+    genOpts = {}
+
+    # Default class of extensions to include, or None
+    defaultExtensions = args.defaultExtensions
+
+    # Additional extensions to include (list of extensions)
+    extensions = args.extension
+
+    # Extensions to remove (list of extensions)
+    removeExtensions = args.removeExtensions
+
+    # Extensions to emit (list of extensions)
+    emitExtensions = args.emitExtensions
+
+    # Features to include (list of features)
+    features = args.feature
+
+    # Whether to disable inclusion protect in headers
+    protect = args.protect
+
+    # Output target directory
+    directory = args.directory
+
+    # Descriptive names for various regexp patterns used to select
+    # versions and extensions
+    allFeatures     = allExtensions = '.*'
+    noFeatures      = noExtensions = None
+
+    # Turn lists of names/patterns into matching regular expressions
+    addExtensionsPat     = makeREstring(extensions, None)
+    removeExtensionsPat  = makeREstring(removeExtensions, None)
+    emitExtensionsPat    = makeREstring(emitExtensions, allExtensions)
+    featuresPat          = makeREstring(features, allFeatures)
+
+    # Copyright text prefixing all headers (list of strings).
+    prefixStrings = [
+        '/*',
+        '** Copyright (c) 2015-2019 The Khronos Group Inc.',
+        '**',
+        '** Licensed under the Apache License, Version 2.0 (the "License");',
+        '** you may not use this file except in compliance with the License.',
+        '** You may obtain a copy of the License at',
+        '**',
+        '**     http://www.apache.org/licenses/LICENSE-2.0',
+        '**',
+        '** Unless required by applicable law or agreed to in writing, software',
+        '** distributed under the License is distributed on an "AS IS" BASIS,',
+        '** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.',
+        '** See the License for the specific language governing permissions and',
+        '** limitations under the License.',
+        '*/',
+        ''
+    ]
+
+    # Text specific to Vulkan headers
+    vkPrefixStrings = [
+        '/*',
+        '** This header is generated from the Khronos Vulkan XML API Registry.',
+        '**',
+        '*/',
+        ''
+    ]
+
+    # Defaults for generating re-inclusion protection wrappers (or not)
+    protectFeature = protect
+
+    # An API style conventions object
+    conventions = VulkanConventions()
+
+    # Loader Generators
+    # Options for dispatch table helper generator
+    genOpts['vk_dispatch_table_helper.h'] = [
+          DispatchTableHelperOutputGenerator,
+          DispatchTableHelperOutputGeneratorOptions(
+            conventions       = conventions,
+            filename          = 'vk_dispatch_table_helper.h',
+            directory         = directory,
+            apiname           = 'vulkan',
+            profile           = None,
+            versions          = featuresPat,
+            emitversions      = featuresPat,
+            defaultExtensions = 'vulkan',
+            addExtensions     = addExtensionsPat,
+            removeExtensions  = removeExtensionsPat,
+            emitExtensions    = emitExtensionsPat,
+            prefixText        = prefixStrings + vkPrefixStrings,
+            apicall           = 'VKAPI_ATTR ',
+            apientry          = 'VKAPI_CALL ',
+            apientryp         = 'VKAPI_PTR *',
+            alignFuncParam    = 48,
+            expandEnumerants = False)
+        ]
+
+    # Options for Layer dispatch table generator
+    genOpts['vk_layer_dispatch_table.h'] = [
+          LoaderExtensionOutputGenerator,
+          LoaderExtensionGeneratorOptions(
+            conventions       = conventions,
+            filename          = 'vk_layer_dispatch_table.h',
+            directory         = directory,
+            apiname           = 'vulkan',
+            profile           = None,
+            versions          = featuresPat,
+            emitversions      = featuresPat,
+            defaultExtensions = 'vulkan',
+            addExtensions     = addExtensionsPat,
+            removeExtensions  = removeExtensionsPat,
+            emitExtensions    = emitExtensionsPat,
+            prefixText        = prefixStrings + vkPrefixStrings,
+            apicall           = 'VKAPI_ATTR ',
+            apientry          = 'VKAPI_CALL ',
+            apientryp         = 'VKAPI_PTR *',
+            alignFuncParam    = 48,
+            expandEnumerants = False)
+        ]
+
+    # Options for loader extension source generator
+    genOpts['vk_loader_extensions.h'] = [
+          LoaderExtensionOutputGenerator,
+          LoaderExtensionGeneratorOptions(
+            conventions       = conventions,
+            filename          = 'vk_loader_extensions.h',
+            directory         = directory,
+            apiname           = 'vulkan',
+            profile           = None,
+            versions          = featuresPat,
+            emitversions      = featuresPat,
+            defaultExtensions = 'vulkan',
+            addExtensions     = addExtensionsPat,
+            removeExtensions  = removeExtensionsPat,
+            emitExtensions    = emitExtensionsPat,
+            prefixText        = prefixStrings + vkPrefixStrings,
+            apicall           = 'VKAPI_ATTR ',
+            apientry          = 'VKAPI_CALL ',
+            apientryp         = 'VKAPI_PTR *',
+            alignFuncParam    = 48,
+            expandEnumerants = False)
+        ]
+
+    # Options for loader extension source generator
+    genOpts['vk_loader_extensions.c'] = [
+          LoaderExtensionOutputGenerator,
+          LoaderExtensionGeneratorOptions(
+            conventions       = conventions,
+            filename          = 'vk_loader_extensions.c',
+            directory         = directory,
+            apiname           = 'vulkan',
+            profile           = None,
+            versions          = featuresPat,
+            emitversions      = featuresPat,
+            defaultExtensions = 'vulkan',
+            addExtensions     = addExtensionsPat,
+            removeExtensions  = removeExtensionsPat,
+            emitExtensions    = emitExtensionsPat,
+            prefixText        = prefixStrings + vkPrefixStrings,
+            apicall           = 'VKAPI_ATTR ',
+            apientry          = 'VKAPI_CALL ',
+            apientryp         = 'VKAPI_PTR *',
+            alignFuncParam    = 48,
+            expandEnumerants = False)
+        ]
+
+    # Helper file generator options for vk_object_types.h
+    genOpts['vk_object_types.h'] = [
+          HelperFileOutputGenerator,
+          HelperFileOutputGeneratorOptions(
+            conventions       = conventions,
+            filename          = 'vk_object_types.h',
+            directory         = directory,
+            apiname           = 'vulkan',
+            profile           = None,
+            versions          = featuresPat,
+            emitversions      = featuresPat,
+            defaultExtensions = 'vulkan',
+            addExtensions     = addExtensionsPat,
+            removeExtensions  = removeExtensionsPat,
+            emitExtensions    = emitExtensionsPat,
+            prefixText        = prefixStrings + vkPrefixStrings,
+            apicall           = 'VKAPI_ATTR ',
+            apientry          = 'VKAPI_CALL ',
+            apientryp         = 'VKAPI_PTR *',
+            alignFuncParam    = 48,
+            expandEnumerants  = False,
+            helper_file_type  = 'object_types_header')
+        ]
+
+# Generate a target based on the options in the matching genOpts{} object.
+# This is encapsulated in a function so it can be profiled and/or timed.
+# The args parameter is an parsed argument object containing the following
+# fields that are used:
+#   target - target to generate
+#   directory - directory to generate it in
+#   protect - True if re-inclusion wrappers should be created
+#   extensions - list of additional extensions to include in generated
+#   interfaces
+def genTarget(args):
+    global genOpts
+
+    # Create generator options with specified parameters
+    makeGenOpts(args)
+
+    if (args.target in genOpts.keys()):
+        createGenerator = genOpts[args.target][0]
+        options = genOpts[args.target][1]
+
+        if not args.quiet:
+            write('* Building', options.filename, file=sys.stderr)
+            write('* options.versions          =', options.versions, file=sys.stderr)
+            write('* options.emitversions      =', options.emitversions, file=sys.stderr)
+            write('* options.defaultExtensions =', options.defaultExtensions, file=sys.stderr)
+            write('* options.addExtensions     =', options.addExtensions, file=sys.stderr)
+            write('* options.removeExtensions  =', options.removeExtensions, file=sys.stderr)
+            write('* options.emitExtensions    =', options.emitExtensions, file=sys.stderr)
+
+        startTimer(args.time)
+        gen = createGenerator(errFile=errWarn,
+                              warnFile=errWarn,
+                              diagFile=diag)
+        reg.setGenerator(gen)
+        reg.apiGen(options)
+
+        if not args.quiet:
+            write('* Generated', options.filename, file=sys.stderr)
+        endTimer(args.time, '* Time to generate ' + options.filename + ' =')
+    else:
+        write('No generator options for unknown target:',
+              args.target, file=sys.stderr)
+
+# -feature name
+# -extension name
+# For both, "name" may be a single name, or a space-separated list
+# of names, or a regular expression.
+if __name__ == '__main__':
+    parser = argparse.ArgumentParser()
+
+    parser.add_argument('-defaultExtensions', action='store',
+                        default='vulkan',
+                        help='Specify a single class of extensions to add to targets')
+    parser.add_argument('-extension', action='append',
+                        default=[],
+                        help='Specify an extension or extensions to add to targets')
+    parser.add_argument('-removeExtensions', action='append',
+                        default=[],
+                        help='Specify an extension or extensions to remove from targets')
+    parser.add_argument('-emitExtensions', action='append',
+                        default=[],
+                        help='Specify an extension or extensions to emit in targets')
+    parser.add_argument('-feature', action='append',
+                        default=[],
+                        help='Specify a core API feature name or names to add to targets')
+    parser.add_argument('-debug', action='store_true',
+                        help='Enable debugging')
+    parser.add_argument('-dump', action='store_true',
+                        help='Enable dump to stderr')
+    parser.add_argument('-diagfile', action='store',
+                        default=None,
+                        help='Write diagnostics to specified file')
+    parser.add_argument('-errfile', action='store',
+                        default=None,
+                        help='Write errors and warnings to specified file instead of stderr')
+    parser.add_argument('-noprotect', dest='protect', action='store_false',
+                        help='Disable inclusion protection in output headers')
+    parser.add_argument('-profile', action='store_true',
+                        help='Enable profiling')
+    parser.add_argument('-registry', action='store',
+                        default='vk.xml',
+                        help='Use specified registry file instead of vk.xml')
+    parser.add_argument('-time', action='store_true',
+                        help='Enable timing')
+    parser.add_argument('-validate', action='store_true',
+                        help='Enable group validation')
+    parser.add_argument('-o', action='store', dest='directory',
+                        default='.',
+                        help='Create target and related files in specified directory')
+    parser.add_argument('target', metavar='target', nargs='?',
+                        help='Specify target')
+    parser.add_argument('-quiet', action='store_true', default=True,
+                        help='Suppress script output during normal execution.')
+    parser.add_argument('-verbose', action='store_false', dest='quiet', default=True,
+                        help='Enable script output during normal execution.')
+
+    # This argument tells us where to load the script from the Vulkan-Headers registry
+    parser.add_argument('-scripts', action='store',
+                        help='Find additional scripts in this directory')
+
+    args = parser.parse_args()
+
+    # default scripts path to be same as registry
+    if not args.scripts:
+        args.scripts = os.path.dirname(args.registry)
+        print(args.scripts)
+
+    scripts_dir = os.path.dirname(os.path.abspath(__file__))
+    registry_dir = os.path.join(scripts_dir, args.scripts)
+    sys.path.insert(0, registry_dir)
+
+    # The imports need to be done here so that they can be picked up from Vulkan-Headers
+    from reg import *
+    from generator import write
+    from cgenerator import CGeneratorOptions, COutputGenerator
+
+    from dispatch_table_helper_generator import DispatchTableHelperOutputGenerator, DispatchTableHelperOutputGeneratorOptions
+    from helper_file_generator import HelperFileOutputGenerator, HelperFileOutputGeneratorOptions
+    from loader_extension_generator import LoaderExtensionOutputGenerator, LoaderExtensionGeneratorOptions
+    # Temporary workaround for vkconventions python2 compatibility
+    import abc; abc.ABC = abc.ABCMeta('ABC', (object,), {})
+    from vkconventions import VulkanConventions
+
+    # This splits arguments which are space-separated lists
+    args.feature = [name for arg in args.feature for name in arg.split()]
+    args.extension = [name for arg in args.extension for name in arg.split()]
+
+    # Load & parse registry
+    reg = Registry()
+
+    startTimer(args.time)
+    tree = etree.parse(args.registry)
+    endTimer(args.time, '* Time to make ElementTree =')
+
+    if args.debug:
+        pdb.run('reg.loadElementTree(tree)')
+    else:
+        startTimer(args.time)
+        reg.loadElementTree(tree)
+        endTimer(args.time, '* Time to parse ElementTree =')
+
+    if (args.validate):
+        reg.validateGroups()
+
+    if (args.dump):
+        write('* Dumping registry to regdump.txt', file=sys.stderr)
+        reg.dumpReg(filehandle = open('regdump.txt', 'w', encoding='utf-8'))
+
+    # create error/warning & diagnostic files
+    if (args.errfile):
+        errWarn = open(args.errfile, 'w', encoding='utf-8')
+    else:
+        errWarn = sys.stderr
+
+    if (args.diagfile):
+        diag = open(args.diagfile, 'w', encoding='utf-8')
+    else:
+        diag = None
+
+    if (args.debug):
+        pdb.run('genTarget(args)')
+    elif (args.profile):
+        import cProfile, pstats
+        cProfile.run('genTarget(args)', 'profile.txt')
+        p = pstats.Stats('profile.txt')
+        p.strip_dirs().sort_stats('time').print_stats(50)
+    else:
+        genTarget(args)
diff --git a/src/third_party/vulkan-loader/src/scripts/update_deps.py b/src/third_party/vulkan-loader/src/scripts/update_deps.py
new file mode 100755
index 0000000..f1fe36d
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/scripts/update_deps.py
@@ -0,0 +1,679 @@
+#!/usr/bin/env python
+
+# Copyright 2017 The Glslang Authors. All rights reserved.
+# Copyright (c) 2018 Valve Corporation
+# Copyright (c) 2018 LunarG, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This script was heavily leveraged from KhronosGroup/glslang
+# update_glslang_sources.py.
+"""update_deps.py
+
+Get and build dependent repositories using known-good commits.
+
+Purpose
+-------
+
+This program is intended to assist a developer of this repository
+(the "home" repository) by gathering and building the repositories that
+this home repository depend on.  It also checks out each dependent
+repository at a "known-good" commit in order to provide stability in
+the dependent repositories.
+
+Python Compatibility
+--------------------
+
+This program can be used with Python 2.7 and Python 3.
+
+Known-Good JSON Database
+------------------------
+
+This program expects to find a file named "known-good.json" in the
+same directory as the program file.  This JSON file is tailored for
+the needs of the home repository by including its dependent repositories.
+
+Program Options
+---------------
+
+See the help text (update_deps.py --help) for a complete list of options.
+
+Program Operation
+-----------------
+
+The program uses the user's current directory at the time of program
+invocation as the location for fetching and building the dependent
+repositories.  The user can override this by using the "--dir" option.
+
+For example, a directory named "build" in the repository's root directory
+is a good place to put the dependent repositories because that directory
+is not tracked by Git. (See the .gitignore file.)  The "external" directory
+may also be a suitable location.
+A user can issue:
+
+$ cd My-Repo
+$ mkdir build
+$ cd build
+$ ../scripts/update_deps.py
+
+or, to do the same thing, but using the --dir option:
+
+$ cd My-Repo
+$ mkdir build
+$ scripts/update_deps.py --dir=build
+
+With these commands, the "build" directory is considered the "top"
+directory where the program clones the dependent repositories.  The
+JSON file configures the build and install working directories to be
+within this "top" directory.
+
+Note that the "dir" option can also specify an absolute path:
+
+$ cd My-Repo
+$ scripts/update_deps.py --dir=/tmp/deps
+
+The "top" dir is then /tmp/deps (Linux filesystem example) and is
+where this program will clone and build the dependent repositories.
+
+Helper CMake Config File
+------------------------
+
+When the program finishes building the dependencies, it writes a file
+named "helper.cmake" to the "top" directory that contains CMake commands
+for setting CMake variables for locating the dependent repositories.
+This helper file can be used to set up the CMake build files for this
+"home" repository.
+
+A complete sequence might look like:
+
+$ git clone git@github.com:My-Group/My-Repo.git
+$ cd My-Repo
+$ mkdir build
+$ cd build
+$ ../scripts/update_deps.py
+$ cmake -C helper.cmake ..
+$ cmake --build .
+
+JSON File Schema
+----------------
+
+There's no formal schema for the "known-good" JSON file, but here is
+a description of its elements.  All elements are required except those
+marked as optional.  Please see the "known_good.json" file for
+examples of all of these elements.
+
+- name
+
+The name of the dependent repository.  This field can be referenced
+by the "deps.repo_name" structure to record a dependency.
+
+- url
+
+Specifies the URL of the repository.
+Example: https://github.com/KhronosGroup/Vulkan-Loader.git
+
+- sub_dir
+
+The directory where the program clones the repository, relative to
+the "top" directory.
+
+- build_dir
+
+The directory used to build the repository, relative to the "top"
+directory.
+
+- install_dir
+
+The directory used to store the installed build artifacts, relative
+to the "top" directory.
+
+- commit
+
+The commit used to checkout the repository.  This can be a SHA-1
+object name or a refname used with the remote name "origin".
+For example, this field can be set to "origin/sdk-1.1.77" to
+select the end of the sdk-1.1.77 branch.
+
+- deps (optional)
+
+An array of pairs consisting of a CMake variable name and a
+repository name to specify a dependent repo and a "link" to
+that repo's install artifacts.  For example:
+
+"deps" : [
+    {
+        "var_name" : "VULKAN_HEADERS_INSTALL_DIR",
+        "repo_name" : "Vulkan-Headers"
+    }
+]
+
+which represents that this repository depends on the Vulkan-Headers
+repository and uses the VULKAN_HEADERS_INSTALL_DIR CMake variable to
+specify the location where it expects to find the Vulkan-Headers install
+directory.
+Note that the "repo_name" element must match the "name" element of some
+other repository in the JSON file.
+
+- prebuild (optional)
+- prebuild_linux (optional)  (For Linux and MacOS)
+- prebuild_windows (optional)
+
+A list of commands to execute before building a dependent repository.
+This is useful for repositories that require the execution of some
+sort of "update" script or need to clone an auxillary repository like
+googletest.
+
+The commands listed in "prebuild" are executed first, and then the
+commands for the specific platform are executed.
+
+- custom_build (optional)
+
+A list of commands to execute as a custom build instead of using
+the built in CMake way of building. Requires "build_step" to be
+set to "custom"
+
+You can insert the following keywords into the commands listed in
+"custom_build" if they require runtime information (like whether the
+build config is "Debug" or "Release").
+
+Keywords:
+{0} reference to a dictionary of repos and their attributes
+{1} reference to the command line arguments set before start
+{2} reference to the CONFIG_MAP value of config.
+
+Example:
+{2} returns the CONFIG_MAP value of config e.g. debug -> Debug
+{1}.config returns the config variable set when you ran update_dep.py
+{0}[Vulkan-Headers][repo_root] returns the repo_root variable from
+                                   the Vulkan-Headers GoodRepo object.
+
+- cmake_options (optional)
+
+A list of options to pass to CMake during the generation phase.
+
+- ci_only (optional)
+
+A list of environment variables where one must be set to "true"
+(case-insensitive) in order for this repo to be fetched and built.
+This list can be used to specify repos that should be built only in CI.
+Typically, this list might contain "TRAVIS" and/or "APPVEYOR" because
+each of these CI systems sets an environment variable with its own
+name to "true".  Note that this could also be (ab)used to control
+the processing of the repo with any environment variable.  The default
+is an empty list, which means that the repo is always processed.
+
+- build_step (optional)
+
+Specifies if the dependent repository should be built or not. This can
+have a value of 'build', 'custom',  or 'skip'. The dependent repositories are
+built by default.
+
+- build_platforms (optional)
+
+A list of platforms the repository will be built on.
+Legal options include:
+"windows"
+"linux"
+"darwin"
+
+Builds on all platforms by default.
+
+Note
+----
+
+The "sub_dir", "build_dir", and "install_dir" elements are all relative
+to the effective "top" directory.  Specifying absolute paths is not
+supported.  However, the "top" directory specified with the "--dir"
+option can be a relative or absolute path.
+
+"""
+
+from __future__ import print_function
+
+import argparse
+import json
+import distutils.dir_util
+import os.path
+import subprocess
+import sys
+import platform
+import multiprocessing
+import shlex
+import shutil
+
+KNOWN_GOOD_FILE_NAME = 'known_good.json'
+
+CONFIG_MAP = {
+    'debug': 'Debug',
+    'release': 'Release',
+    'relwithdebinfo': 'RelWithDebInfo',
+    'minsizerel': 'MinSizeRel'
+}
+
+VERBOSE = False
+
+DEVNULL = open(os.devnull, 'wb')
+
+
+def command_output(cmd, directory, fail_ok=False):
+    """Runs a command in a directory and returns its standard output stream.
+
+    Captures the standard error stream and prints it if error.
+
+    Raises a RuntimeError if the command fails to launch or otherwise fails.
+    """
+    if VERBOSE:
+        print('In {d}: {cmd}'.format(d=directory, cmd=cmd))
+    p = subprocess.Popen(
+        cmd, cwd=directory, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+    (stdout, stderr) = p.communicate()
+    if p.returncode != 0:
+        print('*** Error ***\nstderr contents:\n{}'.format(stderr))
+        if not fail_ok:
+            raise RuntimeError('Failed to run {} in {}'.format(cmd, directory))
+    if VERBOSE:
+        print(stdout)
+    return stdout
+
+class GoodRepo(object):
+    """Represents a repository at a known-good commit."""
+
+    def __init__(self, json, args):
+        """Initializes this good repo object.
+
+        Args:
+        'json':  A fully populated JSON object describing the repo.
+        'args':  Results from ArgumentParser
+        """
+        self._json = json
+        self._args = args
+        # Required JSON elements
+        self.name = json['name']
+        self.url = json['url']
+        self.sub_dir = json['sub_dir']
+        self.commit = json['commit']
+        # Optional JSON elements
+        self.build_dir = None
+        self.install_dir = None
+        if json.get('build_dir'):
+            self.build_dir = os.path.normpath(json['build_dir'])
+        if json.get('install_dir'):
+            self.install_dir = os.path.normpath(json['install_dir'])
+        self.deps = json['deps'] if ('deps' in json) else []
+        self.prebuild = json['prebuild'] if ('prebuild' in json) else []
+        self.prebuild_linux = json['prebuild_linux'] if (
+            'prebuild_linux' in json) else []
+        self.prebuild_windows = json['prebuild_windows'] if (
+            'prebuild_windows' in json) else []
+        self.custom_build = json['custom_build'] if ('custom_build' in json) else []
+        self.cmake_options = json['cmake_options'] if (
+            'cmake_options' in json) else []
+        self.ci_only = json['ci_only'] if ('ci_only' in json) else []
+        self.build_step = json['build_step'] if ('build_step' in json) else 'build'
+        self.build_platforms = json['build_platforms'] if ('build_platforms' in json) else []
+        # Absolute paths for a repo's directories
+        dir_top = os.path.abspath(args.dir)
+        self.repo_dir = os.path.join(dir_top, self.sub_dir)
+        if self.build_dir:
+            self.build_dir = os.path.join(dir_top, self.build_dir)
+        if self.install_dir:
+            self.install_dir = os.path.join(dir_top, self.install_dir)
+	    # Check if platform is one to build on
+        self.on_build_platform = False
+        if self.build_platforms == [] or platform.system().lower() in self.build_platforms:
+            self.on_build_platform = True
+
+    def Clone(self):
+        distutils.dir_util.mkpath(self.repo_dir)
+        command_output(['git', 'clone', self.url, '.'], self.repo_dir)
+
+    def Fetch(self):
+        command_output(['git', 'fetch', 'origin'], self.repo_dir)
+
+    def Checkout(self):
+        print('Checking out {n} in {d}'.format(n=self.name, d=self.repo_dir))
+        if self._args.do_clean_repo:
+            shutil.rmtree(self.repo_dir, ignore_errors=True)
+        if not os.path.exists(os.path.join(self.repo_dir, '.git')):
+            self.Clone()
+        self.Fetch()
+        if len(self._args.ref):
+            command_output(['git', 'checkout', self._args.ref], self.repo_dir)
+        else:
+            command_output(['git', 'checkout', self.commit], self.repo_dir)
+        print(command_output(['git', 'status'], self.repo_dir))
+
+    def CustomPreProcess(self, cmd_str, repo_dict):
+        return cmd_str.format(repo_dict, self._args, CONFIG_MAP[self._args.config])
+
+    def PreBuild(self):
+        """Execute any prebuild steps from the repo root"""
+        for p in self.prebuild:
+            command_output(shlex.split(p), self.repo_dir)
+        if platform.system() == 'Linux' or platform.system() == 'Darwin':
+            for p in self.prebuild_linux:
+                command_output(shlex.split(p), self.repo_dir)
+        if platform.system() == 'Windows':
+            for p in self.prebuild_windows:
+                command_output(shlex.split(p), self.repo_dir)
+
+    def CustomBuild(self, repo_dict):
+        """Execute any custom_build steps from the repo root"""
+        for p in self.custom_build:
+            cmd = self.CustomPreProcess(p, repo_dict)
+            command_output(shlex.split(cmd), self.repo_dir)
+
+    def CMakeConfig(self, repos):
+        """Build CMake command for the configuration phase and execute it"""
+        if self._args.do_clean_build:
+            shutil.rmtree(self.build_dir)
+        if self._args.do_clean_install:
+            shutil.rmtree(self.install_dir)
+
+        # Create and change to build directory
+        distutils.dir_util.mkpath(self.build_dir)
+        os.chdir(self.build_dir)
+
+        cmake_cmd = [
+            'cmake', self.repo_dir,
+            '-DCMAKE_INSTALL_PREFIX=' + self.install_dir
+        ]
+
+        # For each repo this repo depends on, generate a CMake variable
+        # definitions for "...INSTALL_DIR" that points to that dependent
+        # repo's install dir.
+        for d in self.deps:
+            dep_commit = [r for r in repos if r.name == d['repo_name']]
+            if len(dep_commit):
+                cmake_cmd.append('-D{var_name}={install_dir}'.format(
+                    var_name=d['var_name'],
+                    install_dir=dep_commit[0].install_dir))
+
+        # Add any CMake options
+        for option in self.cmake_options:
+            cmake_cmd.append(option)
+
+        # Set build config for single-configuration generators
+        if platform.system() == 'Linux' or platform.system() == 'Darwin':
+            cmake_cmd.append('-DCMAKE_BUILD_TYPE={config}'.format(
+                config=CONFIG_MAP[self._args.config]))
+
+        # Use the CMake -A option to select the platform architecture
+        # without needing a Visual Studio generator.
+        if platform.system() == 'Windows':
+            if self._args.arch == '64' or self._args.arch == 'x64' or self._args.arch == 'win64':
+                cmake_cmd.append('-A')
+                cmake_cmd.append('x64')
+
+        # Apply a generator, if one is specified.  This can be used to supply
+        # a specific generator for the dependent repositories to match
+        # that of the main repository.
+        if self._args.generator is not None:
+            cmake_cmd.extend(['-G', self._args.generator])
+
+        if VERBOSE:
+            print("CMake command: " + " ".join(cmake_cmd))
+
+        ret_code = subprocess.call(cmake_cmd)
+        if ret_code != 0:
+            sys.exit(ret_code)
+
+    def CMakeBuild(self):
+        """Build CMake command for the build phase and execute it"""
+        cmake_cmd = ['cmake', '--build', self.build_dir, '--target', 'install']
+        if self._args.do_clean:
+            cmake_cmd.append('--clean-first')
+
+        if platform.system() == 'Windows':
+            cmake_cmd.append('--config')
+            cmake_cmd.append(CONFIG_MAP[self._args.config])
+
+        # Speed up the build.
+        if platform.system() == 'Linux' or platform.system() == 'Darwin':
+            cmake_cmd.append('--')
+            num_make_jobs = multiprocessing.cpu_count()
+            env_make_jobs = os.environ.get('MAKE_JOBS', None)
+            if env_make_jobs is not None:
+                try:
+                    num_make_jobs = min(num_make_jobs, int(env_make_jobs))
+                except ValueError:
+                    print('warning: environment variable MAKE_JOBS has non-numeric value "{}".  '
+                          'Using {} (CPU count) instead.'.format(env_make_jobs, num_make_jobs))
+            cmake_cmd.append('-j{}'.format(num_make_jobs))
+        if platform.system() == 'Windows':
+            cmake_cmd.append('--')
+            cmake_cmd.append('/maxcpucount')
+
+        if VERBOSE:
+            print("CMake command: " + " ".join(cmake_cmd))
+
+        ret_code = subprocess.call(cmake_cmd)
+        if ret_code != 0:
+            sys.exit(ret_code)
+
+    def Build(self, repos, repo_dict):
+        """Build the dependent repo"""
+        print('Building {n} in {d}'.format(n=self.name, d=self.repo_dir))
+        print('Build dir = {b}'.format(b=self.build_dir))
+        print('Install dir = {i}\n'.format(i=self.install_dir))
+
+        # Run any prebuild commands
+        self.PreBuild()
+
+        if self.build_step == 'custom':
+            self.CustomBuild(repo_dict)
+            return
+
+        # Build and execute CMake command for creating build files
+        self.CMakeConfig(repos)
+
+        # Build and execute CMake command for the build
+        self.CMakeBuild()
+
+
+def GetGoodRepos(args):
+    """Returns the latest list of GoodRepo objects.
+
+    The known-good file is expected to be in the same
+    directory as this script unless overridden by the 'known_good_dir'
+    parameter.
+    """
+    if args.known_good_dir:
+        known_good_file = os.path.join( os.path.abspath(args.known_good_dir),
+            KNOWN_GOOD_FILE_NAME)
+    else:
+        known_good_file = os.path.join(
+            os.path.dirname(os.path.abspath(__file__)), KNOWN_GOOD_FILE_NAME)
+    with open(known_good_file) as known_good:
+        return [
+            GoodRepo(repo, args)
+            for repo in json.loads(known_good.read())['repos']
+        ]
+
+
+def GetInstallNames(args):
+    """Returns the install names list.
+
+    The known-good file is expected to be in the same
+    directory as this script unless overridden by the 'known_good_dir'
+    parameter.
+    """
+    if args.known_good_dir:
+        known_good_file = os.path.join(os.path.abspath(args.known_good_dir),
+            KNOWN_GOOD_FILE_NAME)
+    else:
+        known_good_file = os.path.join(
+            os.path.dirname(os.path.abspath(__file__)), KNOWN_GOOD_FILE_NAME)
+    with open(known_good_file) as known_good:
+        install_info = json.loads(known_good.read())
+        if install_info.get('install_names'):
+            return install_info['install_names']
+        else:
+            return None
+
+
+def CreateHelper(args, repos, filename):
+    """Create a CMake config helper file.
+
+    The helper file is intended to be used with 'cmake -C <file>'
+    to build this home repo using the dependencies built by this script.
+
+    The install_names dictionary represents the CMake variables used by the
+    home repo to locate the install dirs of the dependent repos.
+    This information is baked into the CMake files of the home repo and so
+    this dictionary is kept with the repo via the json file.
+    """
+    def escape(path):
+        return path.replace('\\', '\\\\')
+    install_names = GetInstallNames(args)
+    with open(filename, 'w') as helper_file:
+        for repo in repos:
+            if install_names and repo.name in install_names and repo.on_build_platform:
+                helper_file.write('set({var} "{dir}" CACHE STRING "" FORCE)\n'
+                                  .format(
+                                      var=install_names[repo.name],
+                                      dir=escape(repo.install_dir)))
+
+
+def main():
+    parser = argparse.ArgumentParser(
+        description='Get and build dependent repos at known-good commits')
+    parser.add_argument(
+        '--known_good_dir',
+        dest='known_good_dir',
+        help="Specify directory for known_good.json file.")
+    parser.add_argument(
+        '--dir',
+        dest='dir',
+        default='.',
+        help="Set target directory for repository roots. Default is \'.\'.")
+    parser.add_argument(
+        '--ref',
+        dest='ref',
+        default='',
+        help="Override 'commit' with git reference. E.g., 'origin/master'")
+    parser.add_argument(
+        '--no-build',
+        dest='do_build',
+        action='store_false',
+        help=
+        "Clone/update repositories and generate build files without performing compilation",
+        default=True)
+    parser.add_argument(
+        '--clean',
+        dest='do_clean',
+        action='store_true',
+        help="Clean files generated by compiler and linker before building",
+        default=False)
+    parser.add_argument(
+        '--clean-repo',
+        dest='do_clean_repo',
+        action='store_true',
+        help="Delete repository directory before building",
+        default=False)
+    parser.add_argument(
+        '--clean-build',
+        dest='do_clean_build',
+        action='store_true',
+        help="Delete build directory before building",
+        default=False)
+    parser.add_argument(
+        '--clean-install',
+        dest='do_clean_install',
+        action='store_true',
+        help="Delete install directory before building",
+        default=False)
+    parser.add_argument(
+        '--arch',
+        dest='arch',
+        choices=['32', '64', 'x86', 'x64', 'win32', 'win64'],
+        type=str.lower,
+        help="Set build files architecture (Windows)",
+        default='64')
+    parser.add_argument(
+        '--config',
+        dest='config',
+        choices=['debug', 'release', 'relwithdebinfo', 'minsizerel'],
+        type=str.lower,
+        help="Set build files configuration",
+        default='debug')
+    parser.add_argument(
+        '--generator',
+        dest='generator',
+        help="Set the CMake generator",
+        default=None)
+
+    args = parser.parse_args()
+    save_cwd = os.getcwd()
+
+    # Create working "top" directory if needed
+    distutils.dir_util.mkpath(args.dir)
+    abs_top_dir = os.path.abspath(args.dir)
+
+    repos = GetGoodRepos(args)
+    repo_dict = {}
+
+    print('Starting builds in {d}'.format(d=abs_top_dir))
+    for repo in repos:
+        # If the repo has a platform whitelist, skip the repo
+        # unless we are building on a whitelisted platform.
+        if not repo.on_build_platform:
+            continue
+
+        field_list = ('url',
+                      'sub_dir',
+                      'commit',
+                      'build_dir',
+                      'install_dir',
+                      'deps',
+                      'prebuild',
+                      'prebuild_linux',
+                      'prebuild_windows',
+                      'custom_build',
+                      'cmake_options',
+                      'ci_only',
+                      'build_step',
+                      'build_platforms',
+                      'repo_dir',
+                      'on_build_platform')
+        repo_dict[repo.name] = {field: getattr(repo, field) for field in field_list}
+
+        # If the repo has a CI whitelist, skip the repo unless
+        # one of the CI's environment variable is set to true.
+        if len(repo.ci_only):
+            do_build = False
+            for env in repo.ci_only:
+                if not env in os.environ:
+                    continue
+                if os.environ[env].lower() == 'true':
+                    do_build = True
+                    break
+            if not do_build:
+                continue
+
+        # Clone/update the repository
+        repo.Checkout()
+
+        # Build the repository
+        if args.do_build and repo.build_step != 'skip':
+            repo.Build(repos, repo_dict)
+
+    # Need to restore original cwd in order for CreateHelper to find json file
+    os.chdir(save_cwd)
+    CreateHelper(args, repos, os.path.join(abs_top_dir, 'helper.cmake'))
+
+    sys.exit(0)
+
+
+if __name__ == '__main__':
+    main()
diff --git a/src/third_party/vulkan-loader/src/tests/CMakeLists.txt b/src/third_party/vulkan-loader/src/tests/CMakeLists.txt
new file mode 100644
index 0000000..1d7bc17
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/tests/CMakeLists.txt
@@ -0,0 +1,136 @@
+# ~~~
+# Copyright (c) 2014-2018 Valve Corporation
+# Copyright (c) 2014-2018 LunarG, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ~~~
+
+add_executable(vk_loader_validation_tests loader_validation_tests.cpp)
+add_test(NAME vk_loader_validation_tests COMMAND vk_loader_validation_tests)
+
+set_target_properties(vk_loader_validation_tests PROPERTIES COMPILE_DEFINITIONS "GTEST_LINKED_AS_SHARED_LIBRARY=1")
+if(UNIX)
+    set_target_properties(vk_loader_validation_tests PROPERTIES COMPILE_FLAGS "-Wno-sign-compare")
+endif()
+
+if(WIN32)
+    target_compile_options(vk_loader_validation_tests PUBLIC ${MSVC_LOADER_COMPILE_OPTIONS})
+
+    add_definitions(-DVK_USE_PLATFORM_WIN32_KHR -DWIN32_LEAN_AND_MEAN)
+    # Workaround for TR1 deprecation in Visual Studio 15.5 until Google Test is updated
+    add_definitions(-D_SILENCE_TR1_NAMESPACE_DEPRECATION_WARNING)
+elseif(ANDROID)
+    add_definitions(-DVK_USE_PLATFORM_ANDROID_KHR)
+elseif(APPLE)
+    add_definitions(-DVK_USE_PLATFORM_MACOS_MVK)
+elseif(UNIX AND NOT APPLE) # i.e.: Linux
+    if(BUILD_WSI_XCB_SUPPORT)
+        add_definitions(-DVK_USE_PLATFORM_XCB_KHR)
+    endif()
+
+    if(BUILD_WSI_XLIB_SUPPORT)
+        add_definitions(-DVK_USE_PLATFORM_XLIB_KHR)
+    endif()
+
+    if(BUILD_WSI_WAYLAND_SUPPORT)
+        add_definitions(-DVK_USE_PLATFORM_WAYLAND_KHR)
+    endif()
+else()
+    message(FATAL_ERROR "Unsupported Platform!")
+endif()
+
+if(WIN32)
+    set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -D_CRT_SECURE_NO_WARNINGS -D_USE_MATH_DEFINES")
+
+    # If MSVC, disable some signed/unsigned mismatch warnings.
+    if(MSVC)
+        set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /wd4267")
+    endif()
+
+else()
+    set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11")
+endif()
+
+set(LIBGLM_INCLUDE_DIR ${PROJECT_SOURCE_DIR}/libs)
+
+include_directories(${PROJECT_SOURCE_DIR}/external
+                    ${GTEST_SOURCE_DIR}/googletest/include
+                    ${CMAKE_CURRENT_BINARY_DIR}
+                    ${CMAKE_BINARY_DIR}
+                    ${PROJECT_BINARY_DIR})
+
+if(UNIX)
+    # extra setup for out-of-tree builds
+    if(NOT (CMAKE_CURRENT_SOURCE_DIR STREQUAL CMAKE_CURRENT_BINARY_DIR))
+        add_custom_target(binary-dir-symlinks ALL
+                          COMMAND ${CMAKE_COMMAND} -E create_symlink ${CMAKE_CURRENT_SOURCE_DIR}/run_wrap_objects_tests.sh
+                                  run_wrap_objects_tests.sh
+                          COMMAND ${CMAKE_COMMAND} -E create_symlink ${CMAKE_CURRENT_SOURCE_DIR}/run_loader_tests.sh
+                                  run_loader_tests.sh
+                          COMMAND ${CMAKE_COMMAND} -E create_symlink ${CMAKE_CURRENT_SOURCE_DIR}/run_extra_loader_tests.sh
+                                  run_extra_loader_tests.sh
+                          COMMAND ${CMAKE_COMMAND} -E create_symlink ${CMAKE_CURRENT_SOURCE_DIR}/run_all_tests.sh run_all_tests.sh
+                          VERBATIM)
+    endif()
+else()
+    if(NOT (CMAKE_CURRENT_SOURCE_DIR STREQUAL CMAKE_CURRENT_BINARY_DIR))
+        file(TO_NATIVE_PATH ${CMAKE_CURRENT_SOURCE_DIR}/_run_all_tests.ps1 RUN_ALL)
+        add_custom_target(binary-dir-symlinks ALL
+                          COMMAND ${CMAKE_COMMAND} -E copy_if_different ${RUN_ALL} run_all_tests.ps1
+                          VERBATIM)
+        set_target_properties(binary-dir-symlinks PROPERTIES FOLDER ${LOADER_HELPER_FOLDER})
+    endif()
+endif()
+
+if(TARGET vulkan)
+    set(LOADER_LIB vulkan)
+elseif(WIN32 AND NOT $ENV{VULKAN_SDK} EQUAL "")
+    if(CMAKE_SIZEOF_VOID_P EQUAL 8)
+        set(LOADER_LIB "$ENV{VULKAN_SDK}/Lib/vulkan-1.lib")
+    else()
+        set(LOADER_LIB "$ENV{VULKAN_SDK}/Lib32/vulkan-1.lib")
+    endif()
+else()
+    set(LOADER_LIB vulkan)
+endif()
+
+target_link_libraries(vk_loader_validation_tests "${LOADER_LIB}" gtest gtest_main)
+if(BUILD_LOADER AND ENABLE_STATIC_LOADER)
+    set_target_properties(vk_loader_validation_tests PROPERTIES LINK_FLAGS "/ignore:4098")
+endif()
+
+# Copy loader and googletest (gtest) libs to test dir so the test executable can find them.
+if(WIN32)
+    file(COPY vk_loader_validation_tests.vcxproj.user DESTINATION "${CMAKE_CURRENT_BINARY_DIR}")
+    if(CMAKE_GENERATOR MATCHES "^Visual Studio.*")
+        file(TO_NATIVE_PATH ${PROJECT_BINARY_DIR}/external/googletest/googletest/$<CONFIG>/gtest_main$<$<CONFIG:Debug>:d>.dll
+                            GTEST_COPY_SRC1)
+        file(TO_NATIVE_PATH ${PROJECT_BINARY_DIR}/external/googletest/googletest/$<CONFIG>/gtest$<$<CONFIG:Debug>:d>.dll
+                            GTEST_COPY_SRC2)
+        file(TO_NATIVE_PATH ${CMAKE_CURRENT_BINARY_DIR}/$<CONFIG> GTEST_COPY_DEST)
+    else()
+        file(TO_NATIVE_PATH ${PROJECT_BINARY_DIR}/external/googletest/googeltest/gtest_main.dll GTEST_COPY_SRC1)
+        file(TO_NATIVE_PATH ${PROJECT_BINARY_DIR}/external/googletest/googletest/gtest.dll GTEST_COPY_SRC2)
+        file(TO_NATIVE_PATH ${CMAKE_CURRENT_BINARY_DIR} GTEST_COPY_DEST)
+    endif()
+    add_custom_command(TARGET vk_loader_validation_tests POST_BUILD
+                       COMMAND xcopy /Y /I ${GTEST_COPY_SRC1} ${GTEST_COPY_DEST}
+                       COMMAND xcopy /Y /I ${GTEST_COPY_SRC2} ${GTEST_COPY_DEST})
+    # Copy the loader shared lib (if built) to the test application directory so the test app finds it.
+    if((NOT ENABLE_STATIC_LOADER) AND TARGET vulkan)
+        add_custom_command(TARGET vk_loader_validation_tests POST_BUILD
+                           COMMAND ${CMAKE_COMMAND} -E copy $<TARGET_FILE:vulkan> $<TARGET_FILE_DIR:vk_loader_validation_tests>)
+    endif()
+endif()
+
+add_subdirectory(layers)
diff --git a/src/third_party/vulkan-loader/src/tests/README.md b/src/third_party/vulkan-loader/src/tests/README.md
new file mode 100644
index 0000000..42fb061
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/tests/README.md
@@ -0,0 +1,14 @@
+
+# Loader Tests
+
+This directory contains a test suite for the Vulkan loader.
+These tests are not exhaustive &mdash; they are expected to be supplemented with other tests, such as CTS.
+
+## Running Tests
+
+To run the tests, your environment needs to be configured so that the test layers will be found.
+This can be done by setting the `VK_LAYER_PATH` environment variable to point at the built layers.
+Depending on the platform build tool you use, this location will either be `${CMAKE_BINARY_DIR}/tests/layers` or `${CMAKE_BINARY_DIR}/tests/layers/${CONFIGURATION}`.
+When using Visual Studio, a the generated project will already be set up to set the environment as needed.
+Running the tests through the `run_loader_tests.sh` script on Linux will also set up the environment properly.
+With any other toolchain, the user will have to set up the environment manually.
diff --git a/src/third_party/vulkan-loader/src/tests/_run_all_tests.ps1 b/src/third_party/vulkan-loader/src/tests/_run_all_tests.ps1
new file mode 100644
index 0000000..a03bc9c
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/tests/_run_all_tests.ps1
@@ -0,0 +1,30 @@
+# Be sure to run "Set-ExecutionPolicy RemoteSigned" before running powershell scripts
+
+# Use TestExceptions to filter out tests with known problems, separated by a colon
+# i.e. run_all_tests.ps1 -TestExceptions VkLayerTest.RequiredParameter:VkLayerTest.UnrecognizedValue
+
+# To trigger Debug tests, specify the parameter with a hyphen
+# i.e  run_all_tests.ps1 -Debug
+
+Param(
+    [switch]$Debug,
+    [string]$LoaderTestExceptions,
+    [string]$TestExceptions
+)
+
+if ($Debug) {
+    $dPath = "Debug"
+} else {
+    $dPath = "Release"
+}
+
+$AboveDir = (Get-Item -Path ".." -Verbose).FullName
+Write-Host "Using Vulkan run-time=$AboveDir\loader\$dPath"
+Set-Item -path env:Path -value ("$AboveDir\loader\$dPath;" + $env:Path)
+
+& $dPath\vk_loader_validation_tests --gtest_filter=-$LoaderTestExceptions
+if ($lastexitcode -ne 0) {
+   exit 1
+}
+
+exit $lastexitcode
diff --git a/src/third_party/vulkan-loader/src/tests/layers/CMakeLists.txt b/src/third_party/vulkan-loader/src/tests/layers/CMakeLists.txt
new file mode 100644
index 0000000..9aa5e3b
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/tests/layers/CMakeLists.txt
@@ -0,0 +1,96 @@
+# ~~~
+# Copyright (c) 2016-2018 Valve Corporation
+# Copyright (c) 2016-2018 LunarG, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ~~~
+
+set(CMAKE_INSTALL_RPATH /usr/lib/x86_64-linux-gnu/vulkan/layer:/usr/lib/i386-linux-gnu/vulkan/layer)
+
+if(WIN32)
+    macro(AddVkLayer target)
+        file(TO_NATIVE_PATH ${CMAKE_CURRENT_SOURCE_DIR}/VkLayer_${target}.def DEF_FILE)
+        add_custom_target(copy-${target}-def-file ALL
+                          COMMAND ${CMAKE_COMMAND} -E copy_if_different ${DEF_FILE} VkLayer_${target}.def
+                          VERBATIM)
+        set_target_properties(copy-${target}-def-file PROPERTIES FOLDER ${LOADER_HELPER_FOLDER})
+        add_library(VkLayer_${target} SHARED ${ARGN} VkLayer_${target}.def)
+        target_compile_options(VkLayer_${target} PUBLIC ${MSVC_LOADER_COMPILE_OPTIONS})
+        target_link_libraries(VkLayer_${target} Vulkan::Headers)
+    endmacro()
+elseif(APPLE)
+    macro(AddVkLayer target)
+        add_library(VkLayer_${target} SHARED ${ARGN})
+        set_target_properties(VkLayer_${target} PROPERTIES LINK_FLAGS "-Wl")
+        target_link_libraries(VkLayer_${target} Vulkan::Headers)
+    endmacro()
+else(UNIX AND NOT APPLE) # i.e.: Linux
+    macro(AddVkLayer target)
+        add_library(VkLayer_${target} SHARED ${ARGN})
+        set_target_properties(VkLayer_${target} PROPERTIES LINK_FLAGS "-Wl,-Bsymbolic")
+        target_link_libraries(VkLayer_${target} Vulkan::Headers)
+    endmacro()
+endif()
+
+include_directories(${CMAKE_CURRENT_SOURCE_DIR}
+                    ${CMAKE_CURRENT_SOURCE_DIR}/../../loader
+                    ${CMAKE_CURRENT_SOURCE_DIR}/../../loader/generated)
+
+if(WIN32)
+    set(CMAKE_CXX_FLAGS_RELEASE "${CMAKE_CXX_FLAGS_RELEASE} -D_CRT_SECURE_NO_WARNINGS")
+    set(CMAKE_C_FLAGS_RELEASE "${CMAKE_C_FLAGS_RELEASE} -D_CRT_SECURE_NO_WARNINGS")
+    set(CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG} -D_CRT_SECURE_NO_WARNINGS /bigobj")
+    set(CMAKE_C_FLAGS_DEBUG "${CMAKE_C_FLAGS_DEBUG} -D_CRT_SECURE_NO_WARNINGS /bigobj")
+else()
+    set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wpointer-arith -Wno-unused-function")
+    set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -Wpointer-arith -Wno-unused-function")
+endif()
+
+AddVkLayer(wrap_objects wrap_objects.cpp vk_layer_table.cpp vk_layer_extension_utils.cpp)
+
+AddVkLayer(test test.cpp vk_layer_table.cpp vk_layer_extension_utils.cpp)
+
+# --------------------------------------------------------------------------------------------------------------------------------
+
+# The output file needs Unix "/" separators or Windows "\" separators On top of that, Windows separators actually need to be doubled
+# because the json format uses backslash escapes
+file(TO_NATIVE_PATH "./" RELATIVE_PATH_PREFIX)
+string(REPLACE "\\"
+               "\\\\"
+               RELATIVE_PATH_PREFIX
+               "${RELATIVE_PATH_PREFIX}")
+
+# Run each .json.in file through the generator We need to create the generator.cmake script so that the generator can be run at
+# compile time, instead of configure time Running at compile time lets us use cmake generator expressions (TARGET_FILE_NAME and
+# TARGET_FILE_DIR, specifically)
+file(WRITE "${CMAKE_CURRENT_BINARY_DIR}/generator.cmake" "configure_file(\"\${INPUT_FILE}\" \"\${OUTPUT_FILE}\")")
+
+foreach(TARGET_NAME VkLayer_wrap_objects VkLayer_test VkLayer_meta VkLayer_meta_rev)
+    set(CONFIG_DEFINES -DINPUT_FILE="${CMAKE_CURRENT_SOURCE_DIR}/json/${TARGET_NAME}.json.in"
+        -DVK_VERSION="${VulkanHeaders_VERSION_MAJOR}.${VulkanHeaders_VERSION_MINOR}.${VulkanHeaders_VERSION_PATCH}")
+
+    # Append further config parameters, depending on which layer.
+    if(TARGET ${TARGET_NAME})
+        # This json file corresponds to an actual target (not a metalayer); query properties from that target.
+        set(CONFIG_DEFINES
+            ${CONFIG_DEFINES}
+            -DOUTPUT_FILE="$<TARGET_FILE_DIR:${TARGET_NAME}>/${TARGET_NAME}.json"
+            -DRELATIVE_LAYER_BINARY="${RELATIVE_PATH_PREFIX}$<TARGET_FILE_NAME:${TARGET_NAME}>")
+    else()
+        # This json file is a metalayer.  Query properties from the VkLayer_test layer; there is no layer binary file.
+        set(CONFIG_DEFINES ${CONFIG_DEFINES} -DOUTPUT_FILE="$<TARGET_FILE_DIR:VkLayer_test>/${TARGET_NAME}.json")
+    endif()
+
+    add_custom_target(${TARGET_NAME}-json ALL
+                      COMMAND ${CMAKE_COMMAND} ${CONFIG_DEFINES} -P "${CMAKE_CURRENT_BINARY_DIR}/generator.cmake")
+endforeach()
diff --git a/src/third_party/vulkan-loader/src/tests/layers/VkLayer_test.def b/src/third_party/vulkan-loader/src/tests/layers/VkLayer_test.def
new file mode 100644
index 0000000..afb401a
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/tests/layers/VkLayer_test.def
@@ -0,0 +1,31 @@
+; THIS FILE IS GENERATED.  DO NOT EDIT.
+
+;;;; Begin Copyright Notice ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+; Vulkan
+;
+; Copyright (c) 2015-2016 The Khronos Group Inc.
+; Copyright (c) 2015-2016 Valve Corporation
+; Copyright (c) 2015-2016 LunarG, Inc.
+;
+; Licensed under the Apache License, Version 2.0 (the "License");
+; you may not use this file except in compliance with the License.
+; You may obtain a copy of the License at
+;
+;     http://www.apache.org/licenses/LICENSE-2.0
+;
+; Unless required by applicable law or agreed to in writing, software
+; distributed under the License is distributed on an "AS IS" BASIS,
+; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+; See the License for the specific language governing permissions and
+; limitations under the License.
+;
+;  Author: Courtney Goeltzenleuchter <courtney@LunarG.com>
+;;;;  End Copyright Notice ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+
+; The following is required on Windows, for exporting symbols from the DLL
+
+LIBRARY VkLayer_test
+EXPORTS
+vkGetInstanceProcAddr
+vkEnumerateInstanceLayerProperties
+vkEnumerateInstanceExtensionProperties
diff --git a/src/third_party/vulkan-loader/src/tests/layers/VkLayer_wrap_objects.def b/src/third_party/vulkan-loader/src/tests/layers/VkLayer_wrap_objects.def
new file mode 100644
index 0000000..fcd4fa0
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/tests/layers/VkLayer_wrap_objects.def
@@ -0,0 +1,32 @@
+; THIS FILE IS GENERATED.  DO NOT EDIT.
+
+;;;; Begin Copyright Notice ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+; Vulkan
+;
+; Copyright (c) 2015-2016 The Khronos Group Inc.
+; Copyright (c) 2015-2016 Valve Corporation
+; Copyright (c) 2015-2016 LunarG, Inc.
+;
+; Licensed under the Apache License, Version 2.0 (the "License");
+; you may not use this file except in compliance with the License.
+; You may obtain a copy of the License at
+;
+;     http://www.apache.org/licenses/LICENSE-2.0
+;
+; Unless required by applicable law or agreed to in writing, software
+; distributed under the License is distributed on an "AS IS" BASIS,
+; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+; See the License for the specific language governing permissions and
+; limitations under the License.
+;
+;  Author: Courtney Goeltzenleuchter <courtney@LunarG.com>
+;;;;  End Copyright Notice ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+
+; The following is required on Windows, for exporting symbols from the DLL
+
+LIBRARY VkLayer_wrap_objects
+EXPORTS
+vkGetInstanceProcAddr
+vkGetDeviceProcAddr
+vkEnumerateInstanceLayerProperties
+vkEnumerateInstanceExtensionProperties
diff --git a/src/third_party/vulkan-loader/src/tests/layers/json/VkLayer_meta.json.in b/src/third_party/vulkan-loader/src/tests/layers/json/VkLayer_meta.json.in
new file mode 100644
index 0000000..7511bce
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/tests/layers/json/VkLayer_meta.json.in
@@ -0,0 +1,14 @@
+{
+    "file_format_version" : "1.1.1",
+    "layer" : {
+        "name": "VK_LAYER_LUNARG_meta",
+        "type": "GLOBAL",
+        "api_version": "@VK_VERSION@",
+        "implementation_version": "1",
+        "description": "LunarG Test Metalayer",
+        "component_layers": [
+            "VK_LAYER_LUNARG_wrap_objects",
+            "VK_LAYER_LUNARG_test"
+        ]
+    }
+}
diff --git a/src/third_party/vulkan-loader/src/tests/layers/json/VkLayer_meta_rev.json.in b/src/third_party/vulkan-loader/src/tests/layers/json/VkLayer_meta_rev.json.in
new file mode 100644
index 0000000..ef95bfb
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/tests/layers/json/VkLayer_meta_rev.json.in
@@ -0,0 +1,14 @@
+{
+    "file_format_version" : "1.1.1",
+    "layer" : {
+        "name": "VK_LAYER_LUNARG_meta_rev",
+        "type": "GLOBAL",
+        "api_version": "@VK_VERSION@",
+        "implementation_version": "1",
+        "description": "LunarG Test Metalayer (Reversed)",
+        "component_layers": [
+            "VK_LAYER_LUNARG_test",
+            "VK_LAYER_LUNARG_wrap_objects"
+        ]
+    }
+}
diff --git a/src/third_party/vulkan-loader/src/tests/layers/json/VkLayer_test.json.in b/src/third_party/vulkan-loader/src/tests/layers/json/VkLayer_test.json.in
new file mode 100644
index 0000000..054b547
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/tests/layers/json/VkLayer_test.json.in
@@ -0,0 +1,17 @@
+{
+    "file_format_version" : "1.0.0",
+    "layer" : {
+        "name": "VK_LAYER_LUNARG_test",
+        "type": "GLOBAL",
+        "library_path": "@RELATIVE_LAYER_BINARY@",
+        "api_version": "@VK_VERSION@",
+        "implementation_version": "1",
+        "description": "LunarG Test Layer",
+        "enable_environment": {
+            "ENABLE_LAYER_TEST_1": "enable"
+        },
+        "disable_environment": {
+            "DISABLE_LAYER_TEST_1": ""
+        }
+    }
+}
diff --git a/src/third_party/vulkan-loader/src/tests/layers/json/VkLayer_wrap_objects.json.in b/src/third_party/vulkan-loader/src/tests/layers/json/VkLayer_wrap_objects.json.in
new file mode 100644
index 0000000..d33154c
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/tests/layers/json/VkLayer_wrap_objects.json.in
@@ -0,0 +1,11 @@
+{
+    "file_format_version" : "1.0.0",
+    "layer" : {
+        "name": "VK_LAYER_LUNARG_wrap_objects",
+        "type": "GLOBAL",
+        "library_path": "@RELATIVE_LAYER_BINARY@",
+        "api_version": "@VK_VERSION@",
+        "implementation_version": "1",
+        "description": "LunarG Dispatchable Object Wrapping Layer"
+    }
+}
diff --git a/src/third_party/vulkan-loader/src/tests/layers/test.cpp b/src/third_party/vulkan-loader/src/tests/layers/test.cpp
new file mode 100644
index 0000000..d8147ef
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/tests/layers/test.cpp
@@ -0,0 +1,213 @@
+/*
+ * Copyright (c) 2015-2016 Valve Corporation
+ * Copyright (c) 2015-2016 LunarG, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Jeremy Hayes <jeremy@lunarg.com>
+ */
+
+#include <cassert>
+#include <iostream>
+#include <unordered_map>
+#include <vector>
+
+#include "vk_dispatch_table_helper.h"
+#include "vk_layer_data.h"
+#include "vk_layer_extension_utils.h"
+
+namespace test
+{
+
+struct layer_data {
+    VkInstance instance;
+    VkLayerInstanceDispatchTable *instance_dispatch_table;
+
+    layer_data() : instance(VK_NULL_HANDLE), instance_dispatch_table(nullptr) {};
+};
+
+static uint32_t loader_layer_if_version = CURRENT_LOADER_LAYER_INTERFACE_VERSION;
+
+static std::unordered_map<void *, layer_data *> layer_data_map;
+
+VKAPI_ATTR VkResult VKAPI_CALL CreateInstance(const VkInstanceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator,
+		VkInstance* pInstance)
+{
+    VkLayerInstanceCreateInfo *chain_info = get_chain_info(pCreateInfo, VK_LAYER_LINK_INFO);
+    assert(chain_info != nullptr);
+
+    assert(chain_info->u.pLayerInfo != nullptr);
+    PFN_vkGetInstanceProcAddr fpGetInstanceProcAddr = chain_info->u.pLayerInfo->pfnNextGetInstanceProcAddr;
+    assert(fpGetInstanceProcAddr != nullptr);
+
+    PFN_vkCreateInstance fpCreateInstance = (PFN_vkCreateInstance) fpGetInstanceProcAddr(NULL, "vkCreateInstance");
+    if (fpCreateInstance == nullptr)
+    {
+        return VK_ERROR_INITIALIZATION_FAILED;
+    }
+
+    chain_info->u.pLayerInfo = chain_info->u.pLayerInfo->pNext;
+    VkResult result = fpCreateInstance(pCreateInfo, pAllocator, pInstance);
+    if (result != VK_SUCCESS)
+    {
+        return result;
+    }
+
+    VkLayerInstanceCreateInfo *create_dev_info = get_chain_info(pCreateInfo, VK_LOADER_LAYER_CREATE_DEVICE_CALLBACK);
+    assert(create_dev_info != nullptr);
+    auto layer_create_device = create_dev_info->u.layerDevice.pfnLayerCreateDevice;
+    auto layer_destroy_device = create_dev_info->u.layerDevice.pfnLayerDestroyDevice;
+
+    layer_data *instance_data = GetLayerDataPtr(get_dispatch_key(*pInstance), layer_data_map);
+    instance_data->instance = *pInstance;
+    instance_data->instance_dispatch_table = new VkLayerInstanceDispatchTable;
+    layer_init_instance_dispatch_table(*pInstance, instance_data->instance_dispatch_table, fpGetInstanceProcAddr);
+
+    uint32_t count = 0;
+    instance_data->instance_dispatch_table->EnumeratePhysicalDevices(*pInstance, &count, nullptr);
+    std::vector<VkPhysicalDevice> devices(count);
+    instance_data->instance_dispatch_table->EnumeratePhysicalDevices(*pInstance, &count, devices.data());
+    VkDevice device;
+    auto device_create_info = VkDeviceCreateInfo{
+        VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO,  // sType
+        nullptr,                               // pNext
+        0,                                     // flags
+        0,                                     // queueCreateInfoCount
+        nullptr,                               // pQueueCreateInfos
+        0,                                     // enabledLayerCount
+        nullptr,                               // ppEnabledLayerNames
+        0,                                     // enabledExtensionCount
+        nullptr,                               // ppEnabledExtensionNames
+        nullptr                                // pEnabledFeatures
+    };
+    auto deviceQueue = VkDeviceQueueCreateInfo{};
+    deviceQueue.sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO;
+    float prios = 1;
+    deviceQueue.queueFamilyIndex = 0;
+    deviceQueue.queueCount = 1;
+    deviceQueue.pQueuePriorities = &prios;
+    device_create_info.pQueueCreateInfos = &deviceQueue;
+    device_create_info.queueCreateInfoCount = 1;
+
+    PFN_vkGetDeviceProcAddr newGDPA = nullptr;
+    layer_create_device(*pInstance, devices[0], &device_create_info, nullptr, &device, vkGetInstanceProcAddr, &newGDPA);
+    assert(newGDPA != nullptr);
+    PFN_vkDestroyDevice destroy = (PFN_vkDestroyDevice)newGDPA(device, "vkDestroyDevice");
+    layer_destroy_device(device, nullptr, destroy);
+
+    std::cout << "VK_LAYER_LUNARG_test: device count " << count << '\n';
+
+    // Marker for testing.
+    std::cout << "VK_LAYER_LUNARG_test: CreateInstance" << '\n';
+
+    return result;
+}
+
+VKAPI_ATTR void VKAPI_CALL DestroyInstance(VkInstance instance, const VkAllocationCallbacks *pAllocator)
+{
+    dispatch_key key = get_dispatch_key(instance);
+    layer_data *instance_data = GetLayerDataPtr(key, layer_data_map);
+    instance_data->instance_dispatch_table->DestroyInstance(instance, pAllocator);
+
+    delete instance_data->instance_dispatch_table;
+    layer_data_map.erase(key);
+
+    // Marker for testing.
+    std::cout << "VK_LAYER_LUNARG_test: DestroyInstance" << '\n';
+}
+
+VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL GetInstanceProcAddr(VkInstance instance, const char* funcName)
+{
+    // Return the functions that are intercepted by this layer.
+    static const struct
+    {
+        const char *name;
+        PFN_vkVoidFunction proc;
+    } core_instance_commands[] =
+    {
+        { "vkGetInstanceProcAddr", reinterpret_cast<PFN_vkVoidFunction>(GetInstanceProcAddr) },
+        { "vkCreateInstance", reinterpret_cast<PFN_vkVoidFunction>(CreateInstance) },
+        { "vkDestroyInstance", reinterpret_cast<PFN_vkVoidFunction>(DestroyInstance) }
+    };
+
+    for (size_t i = 0; i < ARRAY_SIZE(core_instance_commands); i++)
+    {
+        if (!strcmp(core_instance_commands[i].name, funcName))
+        {
+            return core_instance_commands[i].proc;
+        }
+    }
+
+    // Only call down the chain for Vulkan commands that this layer does not intercept.
+    layer_data *instance_data = GetLayerDataPtr(get_dispatch_key(instance), layer_data_map);
+    VkLayerInstanceDispatchTable *pTable = instance_data->instance_dispatch_table;
+    if (pTable->GetInstanceProcAddr == nullptr)
+    {
+        return nullptr;
+    }
+
+    return pTable->GetInstanceProcAddr(instance, funcName);
+}
+
+VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL GetPhysicalDeviceProcAddr(VkInstance instance, const char *funcName) {
+    assert(instance);
+
+    layer_data *instance_data = GetLayerDataPtr(get_dispatch_key(instance), layer_data_map);
+    VkLayerInstanceDispatchTable *pTable = instance_data->instance_dispatch_table;
+    if (pTable->GetPhysicalDeviceProcAddr == nullptr)
+    {
+        return nullptr;
+    }
+
+    return pTable->GetPhysicalDeviceProcAddr(instance, funcName);
+}
+
+}
+
+VK_LAYER_EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vkGetInstanceProcAddr(VkInstance instance, const char* funcName)
+{
+    return test::GetInstanceProcAddr(instance, funcName);
+}
+
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceExtensionProperties(const char *pLayerName, uint32_t *pCount, VkExtensionProperties *pProperties)
+{
+    return VK_ERROR_LAYER_NOT_PRESENT;
+}
+
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceLayerProperties(uint32_t *pCount, VkLayerProperties *pProperties)
+{
+    return VK_ERROR_LAYER_NOT_PRESENT;
+}
+
+VK_LAYER_EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vk_layerGetPhysicalDeviceProcAddr(VkInstance instance, const char *funcName) {
+    return test::GetPhysicalDeviceProcAddr(instance, funcName);
+}
+
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkNegotiateLoaderLayerInterfaceVersion(VkNegotiateLayerInterface *pVersionStruct) {
+    assert(pVersionStruct != NULL);
+    assert(pVersionStruct->sType == LAYER_NEGOTIATE_INTERFACE_STRUCT);
+
+    // Fill in the function pointers if our version is at least capable of having the structure contain them.
+    if (pVersionStruct->loaderLayerInterfaceVersion >= 2) {
+        pVersionStruct->pfnGetInstanceProcAddr = vkGetInstanceProcAddr;
+        pVersionStruct->pfnGetPhysicalDeviceProcAddr = vk_layerGetPhysicalDeviceProcAddr;
+    }
+
+    if (pVersionStruct->loaderLayerInterfaceVersion < CURRENT_LOADER_LAYER_INTERFACE_VERSION) {
+        test::loader_layer_if_version = pVersionStruct->loaderLayerInterfaceVersion;
+    } else if (pVersionStruct->loaderLayerInterfaceVersion > CURRENT_LOADER_LAYER_INTERFACE_VERSION) {
+        pVersionStruct->loaderLayerInterfaceVersion = CURRENT_LOADER_LAYER_INTERFACE_VERSION;
+    }
+
+    return VK_SUCCESS;
+}
diff --git a/src/third_party/vulkan-loader/src/tests/layers/vk_format_utils.h b/src/third_party/vulkan-loader/src/tests/layers/vk_format_utils.h
new file mode 100644
index 0000000..e76f9ec
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/tests/layers/vk_format_utils.h
@@ -0,0 +1,166 @@
+/* Copyright (c) 2015-2017 The Khronos Group Inc.
+ * Copyright (c) 2015-2017 Valve Corporation
+ * Copyright (c) 2015-2017 LunarG, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Mark Lobodzinski <mark@lunarg.com>
+ * Author: Courtney Goeltzenleuchter <courtney@LunarG.com>
+ * Author: Dave Houlton <daveh@lunarg.com>
+ */
+
+#pragma once
+#include <stdbool.h>
+#include <vector>
+#include "vulkan/vulkan.h"
+
+#if !defined(VK_LAYER_EXPORT)
+#if defined(__GNUC__) && __GNUC__ >= 4
+#define VK_LAYER_EXPORT __attribute__((visibility("default")))
+#elif defined(__SUNPRO_C) && (__SUNPRO_C >= 0x590)
+#define VK_LAYER_EXPORT __attribute__((visibility("default")))
+#else
+#define VK_LAYER_EXPORT
+#endif
+#endif
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#define VK_MULTIPLANE_FORMAT_MAX_PLANES 3
+
+typedef enum VkFormatCompatibilityClass {
+    VK_FORMAT_COMPATIBILITY_CLASS_NONE_BIT = 0,
+    VK_FORMAT_COMPATIBILITY_CLASS_8_BIT = 1,
+    VK_FORMAT_COMPATIBILITY_CLASS_16_BIT = 2,
+    VK_FORMAT_COMPATIBILITY_CLASS_24_BIT = 3,
+    VK_FORMAT_COMPATIBILITY_CLASS_32_BIT = 4,
+    VK_FORMAT_COMPATIBILITY_CLASS_48_BIT = 5,
+    VK_FORMAT_COMPATIBILITY_CLASS_64_BIT = 6,
+    VK_FORMAT_COMPATIBILITY_CLASS_96_BIT = 7,
+    VK_FORMAT_COMPATIBILITY_CLASS_128_BIT = 8,
+    VK_FORMAT_COMPATIBILITY_CLASS_192_BIT = 9,
+    VK_FORMAT_COMPATIBILITY_CLASS_256_BIT = 10,
+    VK_FORMAT_COMPATIBILITY_CLASS_BC1_RGB_BIT = 11,
+    VK_FORMAT_COMPATIBILITY_CLASS_BC1_RGBA_BIT = 12,
+    VK_FORMAT_COMPATIBILITY_CLASS_BC2_BIT = 13,
+    VK_FORMAT_COMPATIBILITY_CLASS_BC3_BIT = 14,
+    VK_FORMAT_COMPATIBILITY_CLASS_BC4_BIT = 15,
+    VK_FORMAT_COMPATIBILITY_CLASS_BC5_BIT = 16,
+    VK_FORMAT_COMPATIBILITY_CLASS_BC6H_BIT = 17,
+    VK_FORMAT_COMPATIBILITY_CLASS_BC7_BIT = 18,
+    VK_FORMAT_COMPATIBILITY_CLASS_ETC2_RGB_BIT = 19,
+    VK_FORMAT_COMPATIBILITY_CLASS_ETC2_RGBA_BIT = 20,
+    VK_FORMAT_COMPATIBILITY_CLASS_ETC2_EAC_RGBA_BIT = 21,
+    VK_FORMAT_COMPATIBILITY_CLASS_EAC_R_BIT = 22,
+    VK_FORMAT_COMPATIBILITY_CLASS_EAC_RG_BIT = 23,
+    VK_FORMAT_COMPATIBILITY_CLASS_ASTC_4X4_BIT = 24,
+    VK_FORMAT_COMPATIBILITY_CLASS_ASTC_5X4_BIT = 25,
+    VK_FORMAT_COMPATIBILITY_CLASS_ASTC_5X5_BIT = 26,
+    VK_FORMAT_COMPATIBILITY_CLASS_ASTC_6X5_BIT = 27,
+    VK_FORMAT_COMPATIBILITY_CLASS_ASTC_6X6_BIT = 28,
+    VK_FORMAT_COMPATIBILITY_CLASS_ASTC_8X5_BIT = 29,
+    VK_FORMAT_COMPATIBILITY_CLASS_ASTC_8X6_BIT = 20,
+    VK_FORMAT_COMPATIBILITY_CLASS_ASTC_8X8_BIT = 31,
+    VK_FORMAT_COMPATIBILITY_CLASS_ASTC_10X5_BIT = 32,
+    VK_FORMAT_COMPATIBILITY_CLASS_ASTC_10X6_BIT = 33,
+    VK_FORMAT_COMPATIBILITY_CLASS_ASTC_10X8_BIT = 34,
+    VK_FORMAT_COMPATIBILITY_CLASS_ASTC_10X10_BIT = 35,
+    VK_FORMAT_COMPATIBILITY_CLASS_ASTC_12X10_BIT = 36,
+    VK_FORMAT_COMPATIBILITY_CLASS_ASTC_12X12_BIT = 37,
+    VK_FORMAT_COMPATIBILITY_CLASS_D16_BIT = 38,
+    VK_FORMAT_COMPATIBILITY_CLASS_D24_BIT = 39,
+    VK_FORMAT_COMPATIBILITY_CLASS_D32_BIT = 30,
+    VK_FORMAT_COMPATIBILITY_CLASS_S8_BIT = 41,
+    VK_FORMAT_COMPATIBILITY_CLASS_D16S8_BIT = 42,
+    VK_FORMAT_COMPATIBILITY_CLASS_D24S8_BIT = 43,
+    VK_FORMAT_COMPATIBILITY_CLASS_D32S8_BIT = 44,
+    VK_FORMAT_COMPATIBILITY_CLASS_PVRTC1_2BPP_BIT = 45,
+    VK_FORMAT_COMPATIBILITY_CLASS_PVRTC1_4BPP_BIT = 46,
+    VK_FORMAT_COMPATIBILITY_CLASS_PVRTC2_2BPP_BIT = 47,
+    VK_FORMAT_COMPATIBILITY_CLASS_PVRTC2_4BPP_BIT = 48,
+    /* KHR_sampler_YCbCr_conversion */
+    VK_FORMAT_COMPATIBILITY_CLASS_32BIT_G8B8G8R8 = 49,
+    VK_FORMAT_COMPATIBILITY_CLASS_32BIT_B8G8R8G8 = 50,
+    VK_FORMAT_COMPATIBILITY_CLASS_64BIT_R10G10B10A10 = 51,
+    VK_FORMAT_COMPATIBILITY_CLASS_64BIT_G10B10G10R10 = 52,
+    VK_FORMAT_COMPATIBILITY_CLASS_64BIT_B10G10R10G10 = 53,
+    VK_FORMAT_COMPATIBILITY_CLASS_64BIT_R12G12B12A12 = 54,
+    VK_FORMAT_COMPATIBILITY_CLASS_64BIT_G12B12G12R12 = 55,
+    VK_FORMAT_COMPATIBILITY_CLASS_64BIT_B12G12R12G12 = 56,
+    VK_FORMAT_COMPATIBILITY_CLASS_64BIT_G16B16G16R16 = 57,
+    VK_FORMAT_COMPATIBILITY_CLASS_64BIT_B16G16R16G16 = 58,
+    VK_FORMAT_COMPATIBILITY_CLASS_8BIT_3PLANE_420 = 59,
+    VK_FORMAT_COMPATIBILITY_CLASS_8BIT_2PLANE_420 = 60,
+    VK_FORMAT_COMPATIBILITY_CLASS_8BIT_3PLANE_422 = 61,
+    VK_FORMAT_COMPATIBILITY_CLASS_8BIT_2PLANE_422 = 62,
+    VK_FORMAT_COMPATIBILITY_CLASS_8BIT_3PLANE_444 = 63,
+    VK_FORMAT_COMPATIBILITY_CLASS_10BIT_3PLANE_420 = 64,
+    VK_FORMAT_COMPATIBILITY_CLASS_10BIT_2PLANE_420 = 65,
+    VK_FORMAT_COMPATIBILITY_CLASS_10BIT_3PLANE_422 = 66,
+    VK_FORMAT_COMPATIBILITY_CLASS_10BIT_2PLANE_422 = 67,
+    VK_FORMAT_COMPATIBILITY_CLASS_10BIT_3PLANE_444 = 68,
+    VK_FORMAT_COMPATIBILITY_CLASS_12BIT_3PLANE_420 = 69,
+    VK_FORMAT_COMPATIBILITY_CLASS_12BIT_2PLANE_420 = 70,
+    VK_FORMAT_COMPATIBILITY_CLASS_12BIT_3PLANE_422 = 71,
+    VK_FORMAT_COMPATIBILITY_CLASS_12BIT_2PLANE_422 = 72,
+    VK_FORMAT_COMPATIBILITY_CLASS_12BIT_3PLANE_444 = 73,
+    VK_FORMAT_COMPATIBILITY_CLASS_16BIT_3PLANE_420 = 74,
+    VK_FORMAT_COMPATIBILITY_CLASS_16BIT_2PLANE_420 = 75,
+    VK_FORMAT_COMPATIBILITY_CLASS_16BIT_3PLANE_422 = 76,
+    VK_FORMAT_COMPATIBILITY_CLASS_16BIT_2PLANE_422 = 77,
+    VK_FORMAT_COMPATIBILITY_CLASS_16BIT_3PLANE_444 = 78,
+    VK_FORMAT_COMPATIBILITY_CLASS_MAX_ENUM = 79
+} VkFormatCompatibilityClass;
+
+VK_LAYER_EXPORT bool FormatIsDepthOrStencil(VkFormat format);
+VK_LAYER_EXPORT bool FormatIsDepthAndStencil(VkFormat format);
+VK_LAYER_EXPORT bool FormatIsDepthOnly(VkFormat format);
+VK_LAYER_EXPORT bool FormatIsStencilOnly(VkFormat format);
+VK_LAYER_EXPORT bool FormatIsCompressed_ETC2_EAC(VkFormat format);
+VK_LAYER_EXPORT bool FormatIsCompressed_ASTC_LDR(VkFormat format);
+VK_LAYER_EXPORT bool FormatIsCompressed_BC(VkFormat format);
+VK_LAYER_EXPORT bool FormatIsCompressed_PVRTC(VkFormat format);
+VK_LAYER_EXPORT bool FormatIsSinglePlane_422(VkFormat format);
+VK_LAYER_EXPORT bool FormatIsNorm(VkFormat format);
+VK_LAYER_EXPORT bool FormatIsUNorm(VkFormat format);
+VK_LAYER_EXPORT bool FormatIsSNorm(VkFormat format);
+VK_LAYER_EXPORT bool FormatIsInt(VkFormat format);
+VK_LAYER_EXPORT bool FormatIsSInt(VkFormat format);
+VK_LAYER_EXPORT bool FormatIsUInt(VkFormat format);
+VK_LAYER_EXPORT bool FormatIsFloat(VkFormat format);
+VK_LAYER_EXPORT bool FormatIsSRGB(VkFormat format);
+VK_LAYER_EXPORT bool FormatIsUScaled(VkFormat format);
+VK_LAYER_EXPORT bool FormatIsSScaled(VkFormat format);
+VK_LAYER_EXPORT bool FormatIsCompressed(VkFormat format);
+
+VK_LAYER_EXPORT uint32_t FormatPlaneCount(VkFormat format);
+VK_LAYER_EXPORT uint32_t FormatChannelCount(VkFormat format);
+VK_LAYER_EXPORT VkExtent3D FormatCompressedTexelBlockExtent(VkFormat format);
+VK_LAYER_EXPORT size_t FormatSize(VkFormat format);
+VK_LAYER_EXPORT VkFormatCompatibilityClass FormatCompatibilityClass(VkFormat format);
+VK_LAYER_EXPORT VkDeviceSize SafeModulo(VkDeviceSize dividend, VkDeviceSize divisor);
+VK_LAYER_EXPORT VkFormat FindMultiplaneCompatibleFormat(VkFormat fmt, uint32_t plane);
+
+static inline bool FormatIsUndef(VkFormat format) { return (format == VK_FORMAT_UNDEFINED); }
+static inline bool FormatHasDepth(VkFormat format) { return (FormatIsDepthOnly(format) || FormatIsDepthAndStencil(format)); }
+static inline bool FormatHasStencil(VkFormat format) { return (FormatIsStencilOnly(format) || FormatIsDepthAndStencil(format)); }
+static inline bool FormatIsMultiplane(VkFormat format) { return ((FormatPlaneCount(format)) > 1u); }
+static inline bool FormatIsColor(VkFormat format) {
+    return !(FormatIsUndef(format) || FormatIsDepthOrStencil(format) || FormatIsMultiplane(format));
+}
+
+#ifdef __cplusplus
+}
+#endif
diff --git a/src/third_party/vulkan-loader/src/tests/layers/vk_layer_config.h b/src/third_party/vulkan-loader/src/tests/layers/vk_layer_config.h
new file mode 100644
index 0000000..e0b9d32
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/tests/layers/vk_layer_config.h
@@ -0,0 +1,72 @@
+/* Copyright (c) 2015-2016 The Khronos Group Inc.
+ * Copyright (c) 2015-2016 Valve Corporation
+ * Copyright (c) 2015-2016 LunarG, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Jon Ashburn <jon@lunarg.com>
+ * Author: Mark Lobodzinski <mark@lunarg.com>
+ **************************************************************************/
+#pragma once
+#include "vulkan/vulkan.h"
+#include "vulkan/vk_layer.h"
+#include <string>
+#include <unordered_map>
+#include <stdbool.h>
+#include <stdio.h>
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+// Definitions for Debug Actions
+typedef enum VkLayerDbgActionBits {
+    VK_DBG_LAYER_ACTION_IGNORE = 0x00000000,
+    VK_DBG_LAYER_ACTION_CALLBACK = 0x00000001,
+    VK_DBG_LAYER_ACTION_LOG_MSG = 0x00000002,
+    VK_DBG_LAYER_ACTION_BREAK = 0x00000004,
+    VK_DBG_LAYER_ACTION_DEBUG_OUTPUT = 0x00000008,
+    VK_DBG_LAYER_ACTION_DEFAULT = 0x40000000,
+} VkLayerDbgActionBits;
+typedef VkFlags VkLayerDbgActionFlags;
+
+const std::unordered_map<std::string, VkFlags> debug_actions_option_definitions = {
+    {std::string("VK_DBG_LAYER_ACTION_IGNORE"), VK_DBG_LAYER_ACTION_IGNORE},
+    {std::string("VK_DBG_LAYER_ACTION_CALLBACK"), VK_DBG_LAYER_ACTION_CALLBACK},
+    {std::string("VK_DBG_LAYER_ACTION_LOG_MSG"), VK_DBG_LAYER_ACTION_LOG_MSG},
+    {std::string("VK_DBG_LAYER_ACTION_BREAK"), VK_DBG_LAYER_ACTION_BREAK},
+#if defined(WIN32)
+    {std::string("VK_DBG_LAYER_ACTION_DEBUG_OUTPUT"), VK_DBG_LAYER_ACTION_DEBUG_OUTPUT},
+#endif
+    {std::string("VK_DBG_LAYER_ACTION_DEFAULT"), VK_DBG_LAYER_ACTION_DEFAULT}};
+
+const std::unordered_map<std::string, VkFlags> report_flags_option_definitions = {
+    {std::string("warn"), VK_DEBUG_REPORT_WARNING_BIT_EXT},
+    {std::string("info"), VK_DEBUG_REPORT_INFORMATION_BIT_EXT},
+    {std::string("perf"), VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT},
+    {std::string("error"), VK_DEBUG_REPORT_ERROR_BIT_EXT},
+    {std::string("debug"), VK_DEBUG_REPORT_DEBUG_BIT_EXT}};
+
+VK_LAYER_EXPORT const char *getLayerOption(const char *_option);
+VK_LAYER_EXPORT FILE *getLayerLogOutput(const char *_option, const char *layerName);
+VK_LAYER_EXPORT VkFlags GetLayerOptionFlags(std::string _option, std::unordered_map<std::string, VkFlags> const &enum_data,
+                                            uint32_t option_default);
+
+VK_LAYER_EXPORT void setLayerOption(const char *_option, const char *_val);
+VK_LAYER_EXPORT void PrintMessageFlags(VkFlags vk_flags, char *msg_flags);
+VK_LAYER_EXPORT void PrintMessageSeverity(VkFlags vk_flags, char *msg_flags);
+VK_LAYER_EXPORT void PrintMessageType(VkFlags vk_flags, char *msg_flags);
+
+#ifdef __cplusplus
+}
+#endif
diff --git a/src/third_party/vulkan-loader/src/tests/layers/vk_layer_data.h b/src/third_party/vulkan-loader/src/tests/layers/vk_layer_data.h
new file mode 100644
index 0000000..046d2f4
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/tests/layers/vk_layer_data.h
@@ -0,0 +1,55 @@
+/* Copyright (c) 2015-2017 The Khronos Group Inc.
+ * Copyright (c) 2015-2017 Valve Corporation
+ * Copyright (c) 2015-2017 LunarG, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Tobin Ehlis <tobine@google.com>
+ */
+
+#ifndef LAYER_DATA_H
+#define LAYER_DATA_H
+
+#include <cassert>
+#include <unordered_map>
+#include "vk_layer_table.h"
+
+// For the given data key, look up the layer_data instance from given layer_data_map
+template <typename DATA_T>
+DATA_T *GetLayerDataPtr(void *data_key, std::unordered_map<void *, DATA_T *> &layer_data_map) {
+    DATA_T *debug_data;
+    typename std::unordered_map<void *, DATA_T *>::const_iterator got;
+
+    /* TODO: We probably should lock here, or have caller lock */
+    got = layer_data_map.find(data_key);
+
+    if (got == layer_data_map.end()) {
+        debug_data = new DATA_T;
+        layer_data_map[(void *)data_key] = debug_data;
+    } else {
+        debug_data = got->second;
+    }
+
+    return debug_data;
+}
+
+template <typename DATA_T>
+void FreeLayerDataPtr(void *data_key, std::unordered_map<void *, DATA_T *> &layer_data_map) {
+    auto got = layer_data_map.find(data_key);
+    assert(got != layer_data_map.end());
+
+    delete got->second;
+    layer_data_map.erase(got);
+}
+
+#endif  // LAYER_DATA_H
diff --git a/src/third_party/vulkan-loader/src/tests/layers/vk_layer_extension_utils.cpp b/src/third_party/vulkan-loader/src/tests/layers/vk_layer_extension_utils.cpp
new file mode 100644
index 0000000..4c1e396
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/tests/layers/vk_layer_extension_utils.cpp
@@ -0,0 +1,66 @@
+/* Copyright (c) 2015-2016 The Khronos Group Inc.
+ * Copyright (c) 2015-2016 Valve Corporation
+ * Copyright (c) 2015-2016 LunarG, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Courtney Goeltzenleuchter <courtney@LunarG.com>
+ *
+ */
+
+#include "string.h"
+#include "vk_layer_extension_utils.h"
+
+#define ARRAY_SIZE(a) (sizeof(a) / sizeof(a[0]))
+
+/*
+ * This file contains utility functions for layers
+ */
+
+VK_LAYER_EXPORT VkResult util_GetExtensionProperties(const uint32_t count, const VkExtensionProperties *layer_extensions,
+                                                     uint32_t *pCount, VkExtensionProperties *pProperties) {
+    uint32_t copy_size;
+
+    if (pProperties == NULL || layer_extensions == NULL) {
+        *pCount = count;
+        return VK_SUCCESS;
+    }
+
+    copy_size = *pCount < count ? *pCount : count;
+    memcpy(pProperties, layer_extensions, copy_size * sizeof(VkExtensionProperties));
+    *pCount = copy_size;
+    if (copy_size < count) {
+        return VK_INCOMPLETE;
+    }
+
+    return VK_SUCCESS;
+}
+
+VK_LAYER_EXPORT VkResult util_GetLayerProperties(const uint32_t count, const VkLayerProperties *layer_properties, uint32_t *pCount,
+                                                 VkLayerProperties *pProperties) {
+    uint32_t copy_size;
+
+    if (pProperties == NULL || layer_properties == NULL) {
+        *pCount = count;
+        return VK_SUCCESS;
+    }
+
+    copy_size = *pCount < count ? *pCount : count;
+    memcpy(pProperties, layer_properties, copy_size * sizeof(VkLayerProperties));
+    *pCount = copy_size;
+    if (copy_size < count) {
+        return VK_INCOMPLETE;
+    }
+
+    return VK_SUCCESS;
+}
diff --git a/src/third_party/vulkan-loader/src/tests/layers/vk_layer_extension_utils.h b/src/third_party/vulkan-loader/src/tests/layers/vk_layer_extension_utils.h
new file mode 100644
index 0000000..4a51c16
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/tests/layers/vk_layer_extension_utils.h
@@ -0,0 +1,40 @@
+/* Copyright (c) 2015-2016 The Khronos Group Inc.
+ * Copyright (c) 2015-2016 Valve Corporation
+ * Copyright (c) 2015-2016 LunarG, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Courtney Goeltzenleuchter <courtney@LunarG.com>
+ *
+ */
+
+#include "vulkan/vk_layer.h"
+
+#ifndef LAYER_EXTENSION_UTILS_H
+#define LAYER_EXTENSION_UTILS_H
+
+#define ARRAY_SIZE(a) (sizeof(a) / sizeof(a[0]))
+
+/*
+ * This file contains static functions for the generated layers
+ */
+extern "C" {
+
+VK_LAYER_EXPORT VkResult util_GetExtensionProperties(const uint32_t count, const VkExtensionProperties *layer_extensions,
+                                                     uint32_t *pCount, VkExtensionProperties *pProperties);
+
+VK_LAYER_EXPORT VkResult util_GetLayerProperties(const uint32_t count, const VkLayerProperties *layer_properties, uint32_t *pCount,
+                                                 VkLayerProperties *pProperties);
+
+}  // extern "C"
+#endif  // LAYER_EXTENSION_UTILS_H
diff --git a/src/third_party/vulkan-loader/src/tests/layers/vk_layer_logging.h b/src/third_party/vulkan-loader/src/tests/layers/vk_layer_logging.h
new file mode 100644
index 0000000..7bb64d6
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/tests/layers/vk_layer_logging.h
@@ -0,0 +1,1120 @@
+/* Copyright (c) 2015-2017 The Khronos Group Inc.
+ * Copyright (c) 2015-2017 Valve Corporation
+ * Copyright (c) 2015-2017 LunarG, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Courtney Goeltzenleuchter <courtney@LunarG.com>
+ * Author: Tobin Ehlis <tobin@lunarg.com>
+ * Author: Mark Young <marky@lunarg.com>
+ *
+ */
+
+#ifndef LAYER_LOGGING_H
+#define LAYER_LOGGING_H
+
+#include "vk_loader_layer.h"
+#include "vk_layer_config.h"
+#include "vk_layer_data.h"
+#include "vk_layer_table.h"
+#include "vk_loader_platform.h"
+#include "vulkan/vk_layer.h"
+#include "vk_object_types.h"
+#include <signal.h>
+#include <cinttypes>
+#include <stdarg.h>
+#include <stdbool.h>
+#include <stdio.h>
+#include <unordered_map>
+#include <vector>
+#include <sstream>
+#include <string>
+
+// TODO: Could be autogenerated for the specific handles for extra type safety...
+template <typename HANDLE_T>
+static inline uint64_t HandleToUint64(HANDLE_T *h) {
+    return reinterpret_cast<uint64_t>(h);
+}
+
+static inline uint64_t HandleToUint64(uint64_t h) { return h; }
+
+// Data we store per label for logging
+typedef struct _LoggingLabelData {
+    std::string name;
+    float color[4];
+} LoggingLabelData;
+
+typedef struct _debug_report_data {
+    VkLayerDbgFunctionNode *debug_callback_list;
+    VkLayerDbgFunctionNode *default_debug_callback_list;
+    VkDebugUtilsMessageSeverityFlagsEXT active_severities;
+    VkDebugUtilsMessageTypeFlagsEXT active_types;
+    bool g_DEBUG_REPORT;
+    bool g_DEBUG_UTILS;
+    std::unordered_map<uint64_t, std::string> *debugObjectNameMap;
+    std::unordered_map<uint64_t, std::string> *debugUtilsObjectNameMap;
+    std::unordered_map<VkQueue, std::vector<LoggingLabelData>> *debugUtilsQueueLabels;
+    bool queueLabelHasInsert;
+    std::unordered_map<VkCommandBuffer, std::vector<LoggingLabelData>> *debugUtilsCmdBufLabels;
+    bool cmdBufLabelHasInsert;
+} debug_report_data;
+
+template debug_report_data *GetLayerDataPtr<debug_report_data>(void *data_key,
+                                                               std::unordered_map<void *, debug_report_data *> &data_map);
+
+static inline void DebugReportFlagsToAnnotFlags(VkDebugReportFlagsEXT dr_flags, bool default_flag_is_spec,
+                                                VkDebugUtilsMessageSeverityFlagsEXT *da_severity,
+                                                VkDebugUtilsMessageTypeFlagsEXT *da_type) {
+    // All layer warnings are spec warnings currently.  At least as far as anything not specifically
+    // called out.  In the future, we'll label things using the new split severity and type values.
+    *da_type = VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT;
+    *da_severity = 0;
+    if ((dr_flags & VK_DEBUG_REPORT_DEBUG_BIT_EXT) != 0) {
+        *da_severity |= VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT;
+        *da_type |= VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT;
+    }
+    if ((dr_flags & VK_DEBUG_REPORT_INFORMATION_BIT_EXT) != 0) {
+        *da_severity |= VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT;
+        *da_type |= VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT;
+    }
+    if ((dr_flags & VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT) != 0) {
+        *da_severity |= VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT;
+        *da_type |= VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT;
+    }
+    if ((dr_flags & VK_DEBUG_REPORT_WARNING_BIT_EXT) != 0) {
+        *da_severity |= VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT;
+    }
+    if ((dr_flags & VK_DEBUG_REPORT_ERROR_BIT_EXT) != 0) {
+        *da_severity |= VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT;
+    }
+}
+
+// Forward Declarations
+static inline bool debug_log_msg(const debug_report_data *debug_data, VkFlags msg_flags, VkDebugReportObjectTypeEXT object_type,
+                                 uint64_t src_object, size_t location, int32_t msg_code, const char *layer_prefix,
+                                 const char *message, const char *text_vuid = NULL);
+
+// Add a debug message callback node structure to the specified callback linked list
+static inline void AddDebugCallbackNode(debug_report_data *debug_data, VkLayerDbgFunctionNode **list_head,
+                                        VkLayerDbgFunctionNode *new_node) {
+    new_node->pNext = *list_head;
+    *list_head = new_node;
+}
+
+// Remove specified debug messenger node structure from the specified linked list
+static inline void RemoveDebugUtilsMessenger(debug_report_data *debug_data, VkLayerDbgFunctionNode **list_head,
+                                             VkDebugUtilsMessengerEXT messenger) {
+    VkLayerDbgFunctionNode *cur_callback = *list_head;
+    VkLayerDbgFunctionNode *prev_callback = cur_callback;
+    bool matched = false;
+    VkFlags local_severities = 0;
+    VkFlags local_types = 0;
+
+    while (cur_callback) {
+        if (cur_callback->is_messenger && cur_callback->messenger.messenger == messenger) {
+            matched = true;
+            prev_callback->pNext = cur_callback->pNext;
+            if (*list_head == cur_callback) {
+                *list_head = cur_callback->pNext;
+            }
+            debug_log_msg(debug_data, VK_DEBUG_REPORT_DEBUG_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_EXT,
+                          reinterpret_cast<uint64_t &>(cur_callback->messenger.messenger), 0, 0, "DebugUtilsMessenger",
+                          "Destroyed messenger\n");
+        } else {
+            matched = false;
+            local_severities |= cur_callback->messenger.messageSeverity;
+            local_types |= cur_callback->messenger.messageType;
+        }
+        prev_callback = cur_callback;
+        cur_callback = cur_callback->pNext;
+        if (matched) {
+            free(prev_callback);
+        }
+    }
+    debug_data->active_severities = local_severities;
+    debug_data->active_types = local_types;
+}
+
+// Remove specified debug message callback node structure from the specified callback linked list
+static inline void RemoveDebugUtilsMessageCallback(debug_report_data *debug_data, VkLayerDbgFunctionNode **list_head,
+                                                   VkDebugReportCallbackEXT callback) {
+    VkLayerDbgFunctionNode *cur_callback = *list_head;
+    VkLayerDbgFunctionNode *prev_callback = cur_callback;
+    bool matched = false;
+    VkFlags local_severities = 0;
+    VkFlags local_types = 0;
+
+    while (cur_callback) {
+        if (!cur_callback->is_messenger && cur_callback->report.msgCallback == callback) {
+            matched = true;
+            prev_callback->pNext = cur_callback->pNext;
+            if (*list_head == cur_callback) {
+                *list_head = cur_callback->pNext;
+            }
+            debug_log_msg(debug_data, VK_DEBUG_REPORT_DEBUG_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_EXT,
+                          reinterpret_cast<uint64_t &>(cur_callback->report.msgCallback), 0, 0, "DebugReport",
+                          "Destroyed callback\n");
+        } else {
+            matched = false;
+            VkFlags this_severities = 0;
+            VkFlags this_types = 0;
+            DebugReportFlagsToAnnotFlags(cur_callback->report.msgFlags, true, &this_severities, &this_types);
+            local_severities |= this_severities;
+            local_types |= this_types;
+        }
+        prev_callback = cur_callback;
+        cur_callback = cur_callback->pNext;
+        if (matched) {
+            free(prev_callback);
+        }
+    }
+    debug_data->active_severities = local_severities;
+    debug_data->active_types = local_types;
+}
+
+// Removes all debug callback function nodes from the specified callback linked lists and frees their resources
+static inline void RemoveAllMessageCallbacks(debug_report_data *debug_data, VkLayerDbgFunctionNode **list_head) {
+    VkLayerDbgFunctionNode *current_callback = *list_head;
+    VkLayerDbgFunctionNode *prev_callback = current_callback;
+
+    while (current_callback) {
+        prev_callback = current_callback->pNext;
+        if (!current_callback->is_messenger) {
+            debug_log_msg(debug_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_EXT,
+                          (uint64_t)current_callback->report.msgCallback, 0, 0, "DebugReport",
+                          "Debug Report callbacks not removed before DestroyInstance");
+        } else {
+            debug_log_msg(debug_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_EXT,
+                          (uint64_t)current_callback->messenger.messenger, 0, 0, "Messenger",
+                          "Debug messengers not removed before DestroyInstance");
+        }
+        free(current_callback);
+        current_callback = prev_callback;
+    }
+    *list_head = NULL;
+}
+
+// Note that text_vuid is a default parameter, and is optional.  See the above forward declaration
+static inline bool debug_log_msg(const debug_report_data *debug_data, VkFlags msg_flags, VkDebugReportObjectTypeEXT object_type,
+                                 uint64_t src_object, size_t location, int32_t msg_code, const char *layer_prefix,
+                                 const char *message, const char *text_vuid) {
+    bool bail = false;
+    VkLayerDbgFunctionNode *layer_dbg_node = NULL;
+
+    if (debug_data->debug_callback_list != NULL) {
+        layer_dbg_node = debug_data->debug_callback_list;
+    } else {
+        layer_dbg_node = debug_data->default_debug_callback_list;
+    }
+
+    VkDebugUtilsMessageSeverityFlagsEXT severity;
+    VkDebugUtilsMessageTypeFlagsEXT types;
+    VkDebugUtilsMessengerCallbackDataEXT callback_data;
+    VkDebugUtilsObjectNameInfoEXT object_name_info;
+
+    // Convert the info to the VK_EXT_debug_utils form in case we need it.
+    DebugReportFlagsToAnnotFlags(msg_flags, true, &severity, &types);
+    object_name_info.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT;
+    object_name_info.pNext = NULL;
+    object_name_info.objectType = convertDebugReportObjectToCoreObject(object_type);
+    object_name_info.objectHandle = (uint64_t)(uintptr_t)src_object;
+    object_name_info.pObjectName = NULL;
+
+    callback_data.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CALLBACK_DATA_EXT;
+    callback_data.pNext = NULL;
+    callback_data.flags = 0;
+    callback_data.pMessageIdName = text_vuid;
+    callback_data.messageIdNumber = msg_code;
+    callback_data.pMessage = message;
+    callback_data.queueLabelCount = 0;
+    callback_data.pQueueLabels = NULL;
+    callback_data.cmdBufLabelCount = 0;
+    callback_data.pCmdBufLabels = NULL;
+    callback_data.objectCount = 1;
+    callback_data.pObjects = &object_name_info;
+
+    VkDebugUtilsLabelEXT *queue_labels = nullptr;
+    VkDebugUtilsLabelEXT *cmd_buf_labels = nullptr;
+    std::string new_debug_report_message = "";
+    std::ostringstream oss;
+
+    if (0 != src_object) {
+        oss << "Object: 0x" << std::hex << src_object;
+        // If this is a queue, add any queue labels to the callback data.
+        if (VK_OBJECT_TYPE_QUEUE == object_name_info.objectType) {
+            auto label_iter = debug_data->debugUtilsQueueLabels->find(reinterpret_cast<VkQueue>(src_object));
+            if (label_iter != debug_data->debugUtilsQueueLabels->end()) {
+                queue_labels = new VkDebugUtilsLabelEXT[label_iter->second.size()];
+                if (nullptr != queue_labels) {
+                    // Record the labels, but record them in reverse order since we want the
+                    // most recent at the top.
+                    uint32_t label_size = static_cast<uint32_t>(label_iter->second.size());
+                    uint32_t last_index = label_size - 1;
+                    for (uint32_t label = 0; label < label_size; ++label) {
+                        queue_labels[last_index - label].sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT;
+                        queue_labels[last_index - label].pNext = nullptr;
+                        queue_labels[last_index - label].pLabelName = label_iter->second[label].name.c_str();
+                        queue_labels[last_index - label].color[0] = label_iter->second[label].color[0];
+                        queue_labels[last_index - label].color[1] = label_iter->second[label].color[1];
+                        queue_labels[last_index - label].color[2] = label_iter->second[label].color[2];
+                        queue_labels[last_index - label].color[3] = label_iter->second[label].color[3];
+                    }
+                    callback_data.queueLabelCount = label_size;
+                    callback_data.pQueueLabels = queue_labels;
+                }
+            }
+            // If this is a command buffer, add any command buffer labels to the callback data.
+        } else if (VK_OBJECT_TYPE_COMMAND_BUFFER == object_name_info.objectType) {
+            auto label_iter = debug_data->debugUtilsCmdBufLabels->find(reinterpret_cast<VkCommandBuffer>(src_object));
+            if (label_iter != debug_data->debugUtilsCmdBufLabels->end()) {
+                cmd_buf_labels = new VkDebugUtilsLabelEXT[label_iter->second.size()];
+                if (nullptr != cmd_buf_labels) {
+                    // Record the labels, but record them in reverse order since we want the
+                    // most recent at the top.
+                    uint32_t label_size = static_cast<uint32_t>(label_iter->second.size());
+                    uint32_t last_index = label_size - 1;
+                    for (uint32_t label = 0; label < label_size; ++label) {
+                        cmd_buf_labels[last_index - label].sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT;
+                        cmd_buf_labels[last_index - label].pNext = nullptr;
+                        cmd_buf_labels[last_index - label].pLabelName = label_iter->second[label].name.c_str();
+                        cmd_buf_labels[last_index - label].color[0] = label_iter->second[label].color[0];
+                        cmd_buf_labels[last_index - label].color[1] = label_iter->second[label].color[1];
+                        cmd_buf_labels[last_index - label].color[2] = label_iter->second[label].color[2];
+                        cmd_buf_labels[last_index - label].color[3] = label_iter->second[label].color[3];
+                    }
+                    callback_data.cmdBufLabelCount = label_size;
+                    callback_data.pCmdBufLabels = cmd_buf_labels;
+                }
+            }
+        }
+        // Look for any debug utils or marker names to use for this object
+        object_name_info.pObjectName = NULL;
+        auto utils_name_iter = debug_data->debugUtilsObjectNameMap->find(src_object);
+        if (utils_name_iter != debug_data->debugUtilsObjectNameMap->end()) {
+            object_name_info.pObjectName = utils_name_iter->second.c_str();
+        } else {
+            auto marker_name_iter = debug_data->debugObjectNameMap->find(src_object);
+            if (marker_name_iter != debug_data->debugObjectNameMap->end()) {
+                object_name_info.pObjectName = marker_name_iter->second.c_str();
+            }
+        }
+        if (NULL != object_name_info.pObjectName) {
+            oss << " (Name = " << object_name_info.pObjectName << " : Type = ";
+        } else {
+            oss << " (Type = ";
+        }
+        oss << std::to_string(object_type) << ")";
+    } else {
+        oss << "Object: VK_NULL_HANDLE (Type = " << std::to_string(object_type) << ")";
+    }
+    new_debug_report_message += oss.str();
+    new_debug_report_message += " | ";
+    new_debug_report_message += message;
+
+    while (layer_dbg_node) {
+        // If the app uses the VK_EXT_debug_report extension, call all of those registered callbacks.
+        if (!layer_dbg_node->is_messenger && (layer_dbg_node->report.msgFlags & msg_flags)) {
+            if (text_vuid != nullptr) {
+                // If a text vuid is supplied for the old debug report extension, prepend it to the message string
+                new_debug_report_message.insert(0, " ] ");
+                new_debug_report_message.insert(0, text_vuid);
+                new_debug_report_message.insert(0, " [ ");
+            }
+
+            if (layer_dbg_node->report.pfnMsgCallback(msg_flags, object_type, src_object, location, msg_code, layer_prefix,
+                                                      new_debug_report_message.c_str(), layer_dbg_node->pUserData)) {
+                bail = true;
+            }
+            // If the app uses the VK_EXT_debug_utils extension, call all of those registered callbacks.
+        } else if (layer_dbg_node->is_messenger && (layer_dbg_node->messenger.messageSeverity & severity) &&
+                   (layer_dbg_node->messenger.messageType & types)) {
+            if (layer_dbg_node->messenger.pfnUserCallback(static_cast<VkDebugUtilsMessageSeverityFlagBitsEXT>(severity), types,
+                                                          &callback_data, layer_dbg_node->pUserData)) {
+                bail = true;
+            }
+        }
+        layer_dbg_node = layer_dbg_node->pNext;
+    }
+
+    if (nullptr != queue_labels) {
+        delete[] queue_labels;
+    }
+    if (nullptr != cmd_buf_labels) {
+        delete[] cmd_buf_labels;
+    }
+
+    return bail;
+}
+
+static inline void DebugAnnotFlagsToReportFlags(VkDebugUtilsMessageSeverityFlagBitsEXT da_severity,
+                                                VkDebugUtilsMessageTypeFlagsEXT da_type, VkDebugReportFlagsEXT *dr_flags) {
+    *dr_flags = 0;
+
+    if ((da_severity & VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT) != 0) {
+        *dr_flags |= VK_DEBUG_REPORT_ERROR_BIT_EXT;
+    } else if ((da_severity & VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT) != 0) {
+        if ((da_type & VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT) != 0) {
+            *dr_flags |= VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT;
+        } else {
+            *dr_flags |= VK_DEBUG_REPORT_WARNING_BIT_EXT;
+        }
+    } else if ((da_severity & VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT) != 0) {
+        *dr_flags |= VK_DEBUG_REPORT_INFORMATION_BIT_EXT;
+    } else if ((da_severity & VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT) != 0) {
+        *dr_flags |= VK_DEBUG_REPORT_DEBUG_BIT_EXT;
+    }
+}
+
+static inline bool debug_messenger_log_msg(const debug_report_data *debug_data,
+                                           VkDebugUtilsMessageSeverityFlagBitsEXT message_severity,
+                                           VkDebugUtilsMessageTypeFlagsEXT message_type,
+                                           VkDebugUtilsMessengerCallbackDataEXT *callback_data,
+                                           const VkDebugUtilsMessengerEXT *messenger) {
+    bool bail = false;
+    VkLayerDbgFunctionNode *layer_dbg_node = NULL;
+
+    if (debug_data->debug_callback_list != NULL) {
+        layer_dbg_node = debug_data->debug_callback_list;
+    } else {
+        layer_dbg_node = debug_data->default_debug_callback_list;
+    }
+
+    VkDebugReportFlagsEXT object_flags = 0;
+
+    DebugAnnotFlagsToReportFlags(message_severity, message_type, &object_flags);
+
+    VkDebugUtilsObjectNameInfoEXT object_name_info;
+    object_name_info.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT;
+    object_name_info.pNext = NULL;
+    object_name_info.objectType = VK_OBJECT_TYPE_DEBUG_UTILS_MESSENGER_EXT;
+    object_name_info.objectHandle = HandleToUint64(*messenger);
+    object_name_info.pObjectName = NULL;
+    callback_data->pObjects = &object_name_info;
+    callback_data->objectCount = 1;
+
+    while (layer_dbg_node) {
+        if (layer_dbg_node->is_messenger && (layer_dbg_node->messenger.messageSeverity & message_severity) &&
+            (layer_dbg_node->messenger.messageType & message_type)) {
+            auto it = debug_data->debugUtilsObjectNameMap->find(object_name_info.objectHandle);
+            if (it != debug_data->debugUtilsObjectNameMap->end()) {
+                object_name_info.pObjectName = it->second.c_str();
+            }
+            if (layer_dbg_node->messenger.pfnUserCallback(message_severity, message_type, callback_data,
+                                                          layer_dbg_node->pUserData)) {
+                bail = true;
+            }
+        } else if (!layer_dbg_node->is_messenger && layer_dbg_node->report.msgFlags & object_flags) {
+            auto it = debug_data->debugObjectNameMap->find(callback_data->pObjects[0].objectHandle);
+            VkDebugReportObjectTypeEXT object_type = convertCoreObjectToDebugReportObject(callback_data->pObjects[0].objectType);
+            if (it == debug_data->debugObjectNameMap->end()) {
+                if (layer_dbg_node->report.pfnMsgCallback(object_flags, object_type, callback_data->pObjects[0].objectHandle, 0,
+                                                          callback_data->messageIdNumber, callback_data->pMessageIdName,
+                                                          callback_data->pMessage, layer_dbg_node->pUserData)) {
+                    bail = true;
+                }
+            } else {
+                std::string newMsg = "SrcObject name = ";
+                newMsg.append(it->second.c_str());
+                newMsg.append(" ");
+                newMsg.append(callback_data->pMessage);
+                if (layer_dbg_node->report.pfnMsgCallback(object_flags, object_type, callback_data->pObjects[0].objectHandle, 0,
+                                                          callback_data->messageIdNumber, callback_data->pMessageIdName,
+                                                          newMsg.c_str(), layer_dbg_node->pUserData)) {
+                    bail = true;
+                }
+            }
+        }
+        layer_dbg_node = layer_dbg_node->pNext;
+    }
+
+    return bail;
+}
+
+static inline debug_report_data *debug_utils_create_instance(
+    VkLayerInstanceDispatchTable *table, VkInstance inst, uint32_t extension_count,
+    const char *const *enabled_extensions)  // layer or extension name to be enabled
+{
+    debug_report_data *debug_data = (debug_report_data *)malloc(sizeof(debug_report_data));
+    if (!debug_data) return NULL;
+
+    memset(debug_data, 0, sizeof(debug_report_data));
+    for (uint32_t i = 0; i < extension_count; i++) {
+        // TODO: Check other property fields
+        if (strcmp(enabled_extensions[i], VK_EXT_DEBUG_REPORT_EXTENSION_NAME) == 0) {
+            debug_data->g_DEBUG_REPORT = true;
+        } else if (strcmp(enabled_extensions[i], VK_EXT_DEBUG_UTILS_EXTENSION_NAME) == 0) {
+            debug_data->g_DEBUG_UTILS = true;
+        }
+    }
+    debug_data->debugObjectNameMap = new std::unordered_map<uint64_t, std::string>;
+    debug_data->debugUtilsObjectNameMap = new std::unordered_map<uint64_t, std::string>;
+    debug_data->debugUtilsQueueLabels = new std::unordered_map<VkQueue, std::vector<LoggingLabelData>>;
+    debug_data->debugUtilsCmdBufLabels = new std::unordered_map<VkCommandBuffer, std::vector<LoggingLabelData>>;
+    debug_data->queueLabelHasInsert = false;
+    debug_data->cmdBufLabelHasInsert = false;
+    return debug_data;
+}
+
+static inline void layer_debug_utils_destroy_instance(debug_report_data *debug_data) {
+    if (debug_data) {
+        RemoveAllMessageCallbacks(debug_data, &debug_data->default_debug_callback_list);
+        RemoveAllMessageCallbacks(debug_data, &debug_data->debug_callback_list);
+        delete debug_data->debugObjectNameMap;
+        delete debug_data->debugUtilsObjectNameMap;
+        delete debug_data->debugUtilsQueueLabels;
+        delete debug_data->debugUtilsCmdBufLabels;
+        free(debug_data);
+    }
+}
+
+static inline debug_report_data *layer_debug_utils_create_device(debug_report_data *instance_debug_data, VkDevice device) {
+    // DEBUG_REPORT shares data between Instance and Device,
+    // so just return instance's data pointer
+    return instance_debug_data;
+}
+
+static inline void layer_debug_utils_destroy_device(VkDevice device) {
+    // Nothing to do since we're using instance data record
+}
+
+static inline void layer_destroy_messenger_callback(debug_report_data *debug_data, VkDebugUtilsMessengerEXT messenger,
+                                                    const VkAllocationCallbacks *allocator) {
+    RemoveDebugUtilsMessenger(debug_data, &debug_data->debug_callback_list, messenger);
+    RemoveDebugUtilsMessenger(debug_data, &debug_data->default_debug_callback_list, messenger);
+}
+
+static inline VkResult layer_create_messenger_callback(debug_report_data *debug_data, bool default_callback,
+                                                       const VkDebugUtilsMessengerCreateInfoEXT *create_info,
+                                                       const VkAllocationCallbacks *allocator,
+                                                       VkDebugUtilsMessengerEXT *messenger) {
+    VkLayerDbgFunctionNode *pNewDbgFuncNode = (VkLayerDbgFunctionNode *)malloc(sizeof(VkLayerDbgFunctionNode));
+    if (!pNewDbgFuncNode) return VK_ERROR_OUT_OF_HOST_MEMORY;
+    memset(pNewDbgFuncNode, 0, sizeof(VkLayerDbgFunctionNode));
+    pNewDbgFuncNode->is_messenger = true;
+
+    // Handle of 0 is logging_callback so use allocated Node address as unique handle
+    if (!(*messenger)) *messenger = (VkDebugUtilsMessengerEXT)pNewDbgFuncNode;
+    pNewDbgFuncNode->messenger.messenger = *messenger;
+    pNewDbgFuncNode->messenger.pfnUserCallback = create_info->pfnUserCallback;
+    pNewDbgFuncNode->messenger.messageSeverity = create_info->messageSeverity;
+    pNewDbgFuncNode->messenger.messageType = create_info->messageType;
+    pNewDbgFuncNode->pUserData = create_info->pUserData;
+
+    debug_data->active_severities |= create_info->messageSeverity;
+    debug_data->active_types |= create_info->messageType;
+    if (default_callback) {
+        AddDebugCallbackNode(debug_data, &debug_data->default_debug_callback_list, pNewDbgFuncNode);
+    } else {
+        AddDebugCallbackNode(debug_data, &debug_data->debug_callback_list, pNewDbgFuncNode);
+    }
+
+    VkDebugUtilsMessengerCallbackDataEXT callback_data = {};
+    callback_data.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CALLBACK_DATA_EXT;
+    callback_data.pNext = NULL;
+    callback_data.flags = 0;
+    callback_data.pMessageIdName = "Layer Internal Message";
+    callback_data.messageIdNumber = 0;
+    callback_data.pMessage = "Added messenger";
+    callback_data.queueLabelCount = 0;
+    callback_data.pQueueLabels = NULL;
+    callback_data.cmdBufLabelCount = 0;
+    callback_data.pCmdBufLabels = NULL;
+    callback_data.objectCount = 0;
+    callback_data.pObjects = NULL;
+    debug_messenger_log_msg(debug_data, VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT,
+                            VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT, &callback_data, messenger);
+    return VK_SUCCESS;
+}
+
+static inline void layer_destroy_report_callback(debug_report_data *debug_data, VkDebugReportCallbackEXT callback,
+                                                 const VkAllocationCallbacks *allocator) {
+    RemoveDebugUtilsMessageCallback(debug_data, &debug_data->debug_callback_list, callback);
+    RemoveDebugUtilsMessageCallback(debug_data, &debug_data->default_debug_callback_list, callback);
+}
+
+static inline VkResult layer_create_report_callback(debug_report_data *debug_data, bool default_callback,
+                                                    const VkDebugReportCallbackCreateInfoEXT *create_info,
+                                                    const VkAllocationCallbacks *allocator, VkDebugReportCallbackEXT *callback) {
+    VkLayerDbgFunctionNode *pNewDbgFuncNode = (VkLayerDbgFunctionNode *)malloc(sizeof(VkLayerDbgFunctionNode));
+    if (!pNewDbgFuncNode) {
+        return VK_ERROR_OUT_OF_HOST_MEMORY;
+    }
+    memset(pNewDbgFuncNode, 0, sizeof(VkLayerDbgFunctionNode));
+    pNewDbgFuncNode->is_messenger = false;
+
+    // Handle of 0 is logging_callback so use allocated Node address as unique handle
+    if (!(*callback)) *callback = (VkDebugReportCallbackEXT)pNewDbgFuncNode;
+    pNewDbgFuncNode->report.msgCallback = *callback;
+    pNewDbgFuncNode->report.pfnMsgCallback = create_info->pfnCallback;
+    pNewDbgFuncNode->report.msgFlags = create_info->flags;
+    pNewDbgFuncNode->pUserData = create_info->pUserData;
+
+    VkFlags local_severity = 0;
+    VkFlags local_type = 0;
+    DebugReportFlagsToAnnotFlags(create_info->flags, true, &local_severity, &local_type);
+    debug_data->active_severities |= local_severity;
+    debug_data->active_types |= local_type;
+    if (default_callback) {
+        AddDebugCallbackNode(debug_data, &debug_data->default_debug_callback_list, pNewDbgFuncNode);
+    } else {
+        AddDebugCallbackNode(debug_data, &debug_data->debug_callback_list, pNewDbgFuncNode);
+    }
+
+    debug_log_msg(debug_data, VK_DEBUG_REPORT_DEBUG_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_EXT, (uint64_t)*callback, 0,
+                  0, "DebugReport", "Added callback");
+    return VK_SUCCESS;
+}
+
+static inline PFN_vkVoidFunction debug_utils_get_instance_proc_addr(debug_report_data *debug_data, const char *func_name) {
+    if (!debug_data) {
+        return NULL;
+    }
+    if (debug_data->g_DEBUG_REPORT) {
+        if (!strcmp(func_name, "vkCreateDebugReportCallbackEXT")) {
+            return (PFN_vkVoidFunction)vkCreateDebugReportCallbackEXT;
+        }
+        if (!strcmp(func_name, "vkDestroyDebugReportCallbackEXT")) {
+            return (PFN_vkVoidFunction)vkDestroyDebugReportCallbackEXT;
+        }
+        if (!strcmp(func_name, "vkDebugReportMessageEXT")) {
+            return (PFN_vkVoidFunction)vkDebugReportMessageEXT;
+        }
+    }
+    if (debug_data->g_DEBUG_UTILS) {
+        if (!strcmp(func_name, "vkCreateDebugUtilsMessengerEXT")) {
+            return (PFN_vkVoidFunction)vkCreateDebugUtilsMessengerEXT;
+        }
+        if (!strcmp(func_name, "vkDestroyDebugUtilsMessengerEXT")) {
+            return (PFN_vkVoidFunction)vkDestroyDebugUtilsMessengerEXT;
+        }
+        if (!strcmp(func_name, "vkSubmitDebugUtilsMessageEXT")) {
+            return (PFN_vkVoidFunction)vkSubmitDebugUtilsMessageEXT;
+        }
+    }
+    return NULL;
+}
+
+// This utility (called at vkCreateInstance() time), looks at a pNext chain.
+// It counts any VkDebugReportCallbackCreateInfoEXT structs that it finds.  It
+// then allocates an array that can hold that many structs, as well as that
+// many VkDebugReportCallbackEXT handles.  It then copies each
+// VkDebugReportCallbackCreateInfoEXT, and initializes each handle.
+static inline VkResult layer_copy_tmp_report_callbacks(const void *pChain, uint32_t *num_callbacks,
+                                                       VkDebugReportCallbackCreateInfoEXT **infos,
+                                                       VkDebugReportCallbackEXT **callbacks) {
+    uint32_t n = *num_callbacks = 0;
+
+    const void *pNext = pChain;
+    while (pNext) {
+        // 1st, count the number VkDebugReportCallbackCreateInfoEXT:
+        if (((VkDebugReportCallbackCreateInfoEXT *)pNext)->sType == VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT) {
+            n++;
+        }
+        pNext = (void *)((VkDebugReportCallbackCreateInfoEXT *)pNext)->pNext;
+    }
+    if (n == 0) {
+        return VK_SUCCESS;
+    }
+
+    // 2nd, allocate memory for each VkDebugReportCallbackCreateInfoEXT:
+    VkDebugReportCallbackCreateInfoEXT *pInfos = *infos =
+        ((VkDebugReportCallbackCreateInfoEXT *)malloc(n * sizeof(VkDebugReportCallbackCreateInfoEXT)));
+    if (!pInfos) {
+        return VK_ERROR_OUT_OF_HOST_MEMORY;
+    }
+    // 3rd, allocate memory for a unique handle for each callback:
+    VkDebugReportCallbackEXT *pCallbacks = *callbacks = ((VkDebugReportCallbackEXT *)malloc(n * sizeof(VkDebugReportCallbackEXT)));
+    if (!pCallbacks) {
+        free(pInfos);
+        return VK_ERROR_OUT_OF_HOST_MEMORY;
+    }
+    // 4th, copy each VkDebugReportCallbackCreateInfoEXT for use by
+    // vkDestroyInstance, and assign a unique handle to each callback (just
+    // use the address of the copied VkDebugReportCallbackCreateInfoEXT):
+    pNext = pChain;
+    while (pNext) {
+        if (((VkInstanceCreateInfo *)pNext)->sType == VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT) {
+            memcpy(pInfos, pNext, sizeof(VkDebugReportCallbackCreateInfoEXT));
+            *pCallbacks++ = (VkDebugReportCallbackEXT)pInfos++;
+        }
+        pNext = (void *)((VkInstanceCreateInfo *)pNext)->pNext;
+    }
+
+    *num_callbacks = n;
+    return VK_SUCCESS;
+}
+
+// This utility frees the arrays allocated by layer_copy_tmp_report_callbacks()
+static inline void layer_free_tmp_report_callbacks(VkDebugReportCallbackCreateInfoEXT *infos, VkDebugReportCallbackEXT *callbacks) {
+    free(infos);
+    free(callbacks);
+}
+
+// This utility enables all of the VkDebugReportCallbackCreateInfoEXT structs
+// that were copied by layer_copy_tmp_report_callbacks()
+static inline VkResult layer_enable_tmp_report_callbacks(debug_report_data *debug_data, uint32_t num_callbacks,
+                                                         VkDebugReportCallbackCreateInfoEXT *infos,
+                                                         VkDebugReportCallbackEXT *callbacks) {
+    VkResult rtn = VK_SUCCESS;
+    for (uint32_t i = 0; i < num_callbacks; i++) {
+        rtn = layer_create_report_callback(debug_data, false, &infos[i], NULL, &callbacks[i]);
+        if (rtn != VK_SUCCESS) {
+            for (uint32_t j = 0; j < i; j++) {
+                layer_destroy_report_callback(debug_data, callbacks[j], NULL);
+            }
+            return rtn;
+        }
+    }
+    return rtn;
+}
+
+// This utility disables all of the VkDebugReportCallbackCreateInfoEXT structs
+// that were copied by layer_copy_tmp_report_callbacks()
+static inline void layer_disable_tmp_report_callbacks(debug_report_data *debug_data, uint32_t num_callbacks,
+                                                      VkDebugReportCallbackEXT *callbacks) {
+    for (uint32_t i = 0; i < num_callbacks; i++) {
+        layer_destroy_report_callback(debug_data, callbacks[i], NULL);
+    }
+}
+
+// This utility (called at vkCreateInstance() time), looks at a pNext chain.
+// It counts any VkDebugUtilsMessengerCreateInfoEXT structs that it finds.  It
+// then allocates an array that can hold that many structs, as well as that
+// many VkDebugUtilsMessengerEXT handles.  It then copies each
+// VkDebugUtilsMessengerCreateInfoEXT, and initializes each handle.
+static inline VkResult layer_copy_tmp_debug_messengers(const void *pChain, uint32_t *num_messengers,
+                                                       VkDebugUtilsMessengerCreateInfoEXT **infos,
+                                                       VkDebugUtilsMessengerEXT **messengers) {
+    uint32_t n = *num_messengers = 0;
+
+    const void *pNext = pChain;
+    while (pNext) {
+        // 1st, count the number VkDebugUtilsMessengerCreateInfoEXT:
+        if (((VkDebugUtilsMessengerCreateInfoEXT *)pNext)->sType == VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT) {
+            n++;
+        }
+        pNext = (void *)((VkDebugUtilsMessengerCreateInfoEXT *)pNext)->pNext;
+    }
+    if (n == 0) {
+        return VK_SUCCESS;
+    }
+
+    // 2nd, allocate memory for each VkDebugUtilsMessengerCreateInfoEXT:
+    VkDebugUtilsMessengerCreateInfoEXT *pInfos = *infos =
+        ((VkDebugUtilsMessengerCreateInfoEXT *)malloc(n * sizeof(VkDebugUtilsMessengerCreateInfoEXT)));
+    if (!pInfos) {
+        return VK_ERROR_OUT_OF_HOST_MEMORY;
+    }
+    // 3rd, allocate memory for a unique handle for each messenger:
+    VkDebugUtilsMessengerEXT *pMessengers = *messengers =
+        ((VkDebugUtilsMessengerEXT *)malloc(n * sizeof(VkDebugUtilsMessengerEXT)));
+    if (!pMessengers) {
+        free(pInfos);
+        return VK_ERROR_OUT_OF_HOST_MEMORY;
+    }
+    // 4th, copy each VkDebugUtilsMessengerCreateInfoEXT for use by
+    // vkDestroyInstance, and assign a unique handle to each callback (just
+    // use the address of the copied VkDebugUtilsMessengerCreateInfoEXT):
+    pNext = pChain;
+    while (pNext) {
+        if (((VkInstanceCreateInfo *)pNext)->sType == VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT) {
+            memcpy(pInfos, pNext, sizeof(VkDebugUtilsMessengerCreateInfoEXT));
+            *pMessengers++ = (VkDebugUtilsMessengerEXT)pInfos++;
+        }
+        pNext = (void *)((VkInstanceCreateInfo *)pNext)->pNext;
+    }
+
+    *num_messengers = n;
+    return VK_SUCCESS;
+}
+
+// This utility frees the arrays allocated by layer_copy_tmp_debug_messengers()
+static inline void layer_free_tmp_debug_messengers(VkDebugUtilsMessengerCreateInfoEXT *infos,
+                                                   VkDebugUtilsMessengerEXT *messengers) {
+    free(infos);
+    free(messengers);
+}
+
+// This utility enables all of the VkDebugUtilsMessengerCreateInfoEXT structs
+// that were copied by layer_copy_tmp_debug_messengers()
+static inline VkResult layer_enable_tmp_debug_messengers(debug_report_data *debug_data, uint32_t num_messengers,
+                                                         VkDebugUtilsMessengerCreateInfoEXT *infos,
+                                                         VkDebugUtilsMessengerEXT *messengers) {
+    VkResult rtn = VK_SUCCESS;
+    for (uint32_t i = 0; i < num_messengers; i++) {
+        rtn = layer_create_messenger_callback(debug_data, false, &infos[i], NULL, &messengers[i]);
+        if (rtn != VK_SUCCESS) {
+            for (uint32_t j = 0; j < i; j++) {
+                layer_destroy_messenger_callback(debug_data, messengers[j], NULL);
+            }
+            return rtn;
+        }
+    }
+    return rtn;
+}
+
+// This utility disables all of the VkDebugUtilsMessengerCreateInfoEXT structs
+// that were copied by layer_copy_tmp_debug_messengers()
+static inline void layer_disable_tmp_debug_messengers(debug_report_data *debug_data, uint32_t num_messengers,
+                                                      VkDebugUtilsMessengerEXT *messengers) {
+    for (uint32_t i = 0; i < num_messengers; i++) {
+        layer_destroy_messenger_callback(debug_data, messengers[i], NULL);
+    }
+}
+
+// Checks if the message will get logged.
+// Allows layer to defer collecting & formating data if the
+// message will be discarded.
+static inline bool will_log_msg(debug_report_data *debug_data, VkFlags msg_flags) {
+    VkFlags local_severity = 0;
+    VkFlags local_type = 0;
+    DebugReportFlagsToAnnotFlags(msg_flags, true, &local_severity, &local_type);
+    if (!debug_data || !(debug_data->active_severities & local_severity) || !(debug_data->active_types & local_type)) {
+        // Message is not wanted
+        return false;
+    }
+
+    return true;
+}
+#ifndef WIN32
+static inline int string_sprintf(std::string *output, const char *fmt, ...) __attribute__((format(printf, 2, 3)));
+#endif
+static inline int string_sprintf(std::string *output, const char *fmt, ...) {
+    std::string &formatted = *output;
+    va_list argptr;
+    va_start(argptr, fmt);
+    int reserve = vsnprintf(nullptr, 0, fmt, argptr);
+    va_end(argptr);
+    formatted.reserve(reserve + 1);
+    va_start(argptr, fmt);
+    int result = vsnprintf((char *)formatted.data(), formatted.capacity(), fmt, argptr);
+    va_end(argptr);
+    assert(result == reserve);
+    return result;
+}
+
+#ifdef WIN32
+static inline int vasprintf(char **strp, char const *fmt, va_list ap) {
+    *strp = nullptr;
+    int size = _vscprintf(fmt, ap);
+    if (size >= 0) {
+        *strp = (char *)malloc(size + 1);
+        if (!*strp) {
+            return -1;
+        }
+        _vsnprintf(*strp, size + 1, fmt, ap);
+    }
+    return size;
+}
+#endif
+
+// Output log message via DEBUG_REPORT. Takes format and variable arg list so that output string is only computed if a message
+// needs to be logged
+#ifndef WIN32
+static inline bool log_msg(const debug_report_data *debug_data, VkFlags msg_flags, VkDebugReportObjectTypeEXT object_type,
+                           uint64_t src_object, int32_t msg_code, const char *format, ...) __attribute__((format(printf, 6, 7)));
+static inline bool log_msg(const debug_report_data *debug_data, VkFlags msg_flags, VkDebugReportObjectTypeEXT object_type,
+                           uint64_t src_object, std::string vuid_text, const char *format, ...)
+    __attribute__((format(printf, 6, 7)));
+#endif
+static inline bool log_msg(const debug_report_data *debug_data, VkFlags msg_flags, VkDebugReportObjectTypeEXT object_type,
+                           uint64_t src_object, int32_t msg_code, const char *format, ...) {
+    VkFlags local_severity = 0;
+    VkFlags local_type = 0;
+    DebugReportFlagsToAnnotFlags(msg_flags, true, &local_severity, &local_type);
+    if (!debug_data || !(debug_data->active_severities & local_severity) || !(debug_data->active_types & local_type)) {
+        // Message is not wanted
+        return false;
+    }
+
+    va_list argptr;
+    va_start(argptr, format);
+    char *str;
+    if (-1 == vasprintf(&str, format, argptr)) {
+        // On failure, glibc vasprintf leaves str undefined
+        str = nullptr;
+    }
+    va_end(argptr);
+
+    std::string str_plus_spec_text(str);
+
+    bool result = debug_log_msg(debug_data, msg_flags, object_type, src_object, 0, msg_code, "Validation",
+                                str_plus_spec_text.c_str() ? str_plus_spec_text.c_str() : "Allocation failure");
+    free(str);
+    return result;
+}
+
+// Overload of log_msg that takes a VUID string in place of a numerical VUID abstraction
+static inline bool log_msg(const debug_report_data *debug_data, VkFlags msg_flags, VkDebugReportObjectTypeEXT object_type,
+                           uint64_t src_object, std::string vuid_text, const char *format, ...) {
+    VkFlags local_severity = 0;
+    VkFlags local_type = 0;
+    DebugReportFlagsToAnnotFlags(msg_flags, true, &local_severity, &local_type);
+    if (!debug_data || !(debug_data->active_severities & local_severity) || !(debug_data->active_types & local_type)) {
+        // Message is not wanted
+        return false;
+    }
+
+    va_list argptr;
+    va_start(argptr, format);
+    char *str;
+    if (-1 == vasprintf(&str, format, argptr)) {
+        // On failure, glibc vasprintf leaves str undefined
+        str = nullptr;
+    }
+    va_end(argptr);
+
+    std::string str_plus_spec_text(str);
+
+    // Append layer prefix with VUID string, pass in UNDEFINED for numerical VUID
+    static const int UNDEFINED_VUID = -1;
+    bool result = debug_log_msg(debug_data, msg_flags, object_type, src_object, 0, UNDEFINED_VUID, "Validation",
+                                str_plus_spec_text.c_str() ? str_plus_spec_text.c_str() : "Allocation failure", vuid_text.c_str());
+
+    free(str);
+    return result;
+}
+
+static inline VKAPI_ATTR VkBool32 VKAPI_CALL report_log_callback(VkFlags msg_flags, VkDebugReportObjectTypeEXT obj_type,
+                                                                 uint64_t src_object, size_t location, int32_t msg_code,
+                                                                 const char *layer_prefix, const char *message, void *user_data) {
+    char msg_flag_string[30];
+
+    PrintMessageFlags(msg_flags, msg_flag_string);
+
+    fprintf((FILE *)user_data, "%s(%s): msg_code: %d: %s\n", layer_prefix, msg_flag_string, msg_code, message);
+    fflush((FILE *)user_data);
+
+    return false;
+}
+
+static inline VKAPI_ATTR VkBool32 VKAPI_CALL report_win32_debug_output_msg(VkFlags msg_flags, VkDebugReportObjectTypeEXT obj_type,
+                                                                           uint64_t src_object, size_t location, int32_t msg_code,
+                                                                           const char *layer_prefix, const char *message,
+                                                                           void *user_data) {
+#ifdef WIN32
+    char msg_flag_string[30];
+    char buf[2048];
+
+    PrintMessageFlags(msg_flags, msg_flag_string);
+    _snprintf(buf, sizeof(buf) - 1, "%s (%s): msg_code: %d: %s\n", layer_prefix, msg_flag_string, msg_code, message);
+
+    OutputDebugString(buf);
+#endif
+
+    return false;
+}
+
+static inline VKAPI_ATTR VkBool32 VKAPI_CALL DebugBreakCallback(VkFlags msgFlags, VkDebugReportObjectTypeEXT obj_type,
+                                                                uint64_t src_object, size_t location, int32_t msg_code,
+                                                                const char *layer_prefix, const char *message, void *user_data) {
+#ifdef WIN32
+    DebugBreak();
+#else
+    raise(SIGTRAP);
+#endif
+
+    return false;
+}
+
+static inline VKAPI_ATTR VkBool32 VKAPI_CALL messenger_log_callback(VkDebugUtilsMessageSeverityFlagBitsEXT message_severity,
+                                                                    VkDebugUtilsMessageTypeFlagsEXT message_type,
+                                                                    const VkDebugUtilsMessengerCallbackDataEXT *callback_data,
+                                                                    void *user_data) {
+    char msg_severity[30];
+    char msg_type[30];
+
+    PrintMessageSeverity(message_severity, msg_severity);
+    PrintMessageType(message_type, msg_type);
+
+    fprintf((FILE *)user_data, "%s(%s / %s): msgNum: %d - %s\n", callback_data->pMessageIdName, msg_severity, msg_type,
+            callback_data->messageIdNumber, callback_data->pMessage);
+    fprintf((FILE *)user_data, "    Objects: %d\n", callback_data->objectCount);
+    for (uint32_t obj = 0; obj < callback_data->objectCount; ++obj) {
+        fprintf((FILE *)user_data, "       [%d] 0x%" PRIx64 ", type: %d, name: %s\n", obj,
+                callback_data->pObjects[obj].objectHandle, callback_data->pObjects[obj].objectType,
+                callback_data->pObjects[obj].pObjectName);
+    }
+    fflush((FILE *)user_data);
+
+    return false;
+}
+
+static inline VKAPI_ATTR VkBool32 VKAPI_CALL messenger_win32_debug_output_msg(
+    VkDebugUtilsMessageSeverityFlagBitsEXT message_severity, VkDebugUtilsMessageTypeFlagsEXT message_type,
+    const VkDebugUtilsMessengerCallbackDataEXT *callback_data, void *user_data) {
+#ifdef WIN32
+    char buf[2048];
+    char msg_severity[30];
+    char msg_type[30];
+
+    PrintMessageSeverity(message_severity, msg_severity);
+    PrintMessageType(message_type, msg_type);
+
+    size_t buffer_space = sizeof(buf) - 1;
+    size_t remaining_space = buffer_space;
+    _snprintf(buf, sizeof(buf) - 1, "%s(%s / %s): msgNum: %d - %s\n", callback_data->pMessageIdName, msg_severity, msg_type,
+              callback_data->messageIdNumber, callback_data->pMessage);
+    remaining_space = buffer_space - strlen(buf);
+    _snprintf(buf, remaining_space, "    Objects: %d\n", callback_data->objectCount);
+    for (uint32_t obj = 0; obj < callback_data->objectCount; ++obj) {
+        remaining_space = buffer_space - strlen(buf);
+        if (remaining_space > 0) {
+            _snprintf(buf, remaining_space, "       [%d] 0x%" PRIx64 ", type: %d, name: %s\n", obj,
+                      callback_data->pObjects[obj].objectHandle, callback_data->pObjects[obj].objectType,
+                      callback_data->pObjects[obj].pObjectName);
+        }
+    }
+    OutputDebugString(buf);
+#endif
+
+    return false;
+}
+
+// This utility converts from the VkDebugUtilsLabelEXT structure into the logging version of the structure.
+// In the logging version, we only record what we absolutely need to convey back to the callbacks.
+static inline void InsertLabelIntoLog(const VkDebugUtilsLabelEXT *utils_label, std::vector<LoggingLabelData> &log_vector) {
+    LoggingLabelData log_label_data = {};
+    log_label_data.name = utils_label->pLabelName;
+    log_label_data.color[0] = utils_label->color[0];
+    log_label_data.color[1] = utils_label->color[1];
+    log_label_data.color[2] = utils_label->color[2];
+    log_label_data.color[3] = utils_label->color[3];
+    log_vector.push_back(log_label_data);
+}
+
+static inline void BeginQueueDebugUtilsLabel(debug_report_data *report_data, VkQueue queue,
+                                             const VkDebugUtilsLabelEXT *label_info) {
+    if (nullptr != label_info && nullptr != label_info->pLabelName) {
+        auto label_iter = report_data->debugUtilsQueueLabels->find(queue);
+        if (label_iter == report_data->debugUtilsQueueLabels->end()) {
+            std::vector<LoggingLabelData> new_queue_labels;
+            InsertLabelIntoLog(label_info, new_queue_labels);
+            report_data->debugUtilsQueueLabels->insert({queue, new_queue_labels});
+        } else {
+            // If the last thing was a label insert, we need to pop it off of the label vector before any
+            // changes. This is because a label added with "vkQueueInsertDebugUtilsLabelEXT" is only a
+            // temporary location that exists until the next operation occurs.  In this case, a new
+            // "vkQueueBeginDebugUtilsLabelEXT" has occurred erasing the previous inserted label.
+            if (report_data->queueLabelHasInsert) {
+                report_data->queueLabelHasInsert = false;
+                label_iter->second.pop_back();
+            }
+            InsertLabelIntoLog(label_info, label_iter->second);
+        }
+    }
+}
+
+static inline void EndQueueDebugUtilsLabel(debug_report_data *report_data, VkQueue queue) {
+    auto label_iter = report_data->debugUtilsQueueLabels->find(queue);
+    if (label_iter != report_data->debugUtilsQueueLabels->end()) {
+        // If the last thing was a label insert, we need to pop it off of the label vector before any
+        // changes. This is because a label added with "vkQueueInsertDebugUtilsLabelEXT" is only a
+        // temporary location that exists until the next operation occurs.  In this case, a
+        // "vkQueueEndDebugUtilsLabelEXT" has occurred erasing the inserted label.
+        if (report_data->queueLabelHasInsert) {
+            report_data->queueLabelHasInsert = false;
+            label_iter->second.pop_back();
+        }
+        // Now pop the normal item
+        label_iter->second.pop_back();
+    }
+}
+
+static inline void InsertQueueDebugUtilsLabel(debug_report_data *report_data, VkQueue queue,
+                                              const VkDebugUtilsLabelEXT *label_info) {
+    if (nullptr != label_info && nullptr != label_info->pLabelName) {
+        auto label_iter = report_data->debugUtilsQueueLabels->find(queue);
+        if (label_iter == report_data->debugUtilsQueueLabels->end()) {
+            std::vector<LoggingLabelData> new_queue_labels;
+            InsertLabelIntoLog(label_info, new_queue_labels);
+            report_data->debugUtilsQueueLabels->insert({queue, new_queue_labels});
+        } else {
+            // If the last thing was a label insert, we need to pop it off of the label vector before any
+            // changes. This is because a label added with "vkQueueInsertDebugUtilsLabelEXT" is only a
+            // temporary location that exists until the next operation occurs.  In this case, a new
+            // "vkQueueInsertDebugUtilsLabelEXT" has occurred erasing the previous inserted label.
+            if (report_data->queueLabelHasInsert) {
+                label_iter->second.pop_back();
+            }
+            // Insert this new label and mark it as one that has been "inserted" so we can remove it on
+            // the next queue label operation.
+            InsertLabelIntoLog(label_info, label_iter->second);
+            report_data->queueLabelHasInsert = true;
+        }
+    }
+}
+
+static inline void BeginCmdDebugUtilsLabel(debug_report_data *report_data, VkCommandBuffer command_buffer,
+                                           const VkDebugUtilsLabelEXT *label_info) {
+    if (nullptr != label_info && nullptr != label_info->pLabelName) {
+        auto label_iter = report_data->debugUtilsCmdBufLabels->find(command_buffer);
+        if (label_iter == report_data->debugUtilsCmdBufLabels->end()) {
+            std::vector<LoggingLabelData> new_cmdbuf_labels;
+            InsertLabelIntoLog(label_info, new_cmdbuf_labels);
+            report_data->debugUtilsCmdBufLabels->insert({command_buffer, new_cmdbuf_labels});
+        } else {
+            // If the last thing was a label insert, we need to pop it off of the label vector before any
+            // changes. This is because a label added with "vkCmdInsertDebugUtilsLabelEXT" is only a
+            // temporary location that exists until the next operation occurs.  In this case, a
+            // "vkCmdBeginDebugUtilsLabelEXT" has occurred erasing the inserted label.
+            if (report_data->cmdBufLabelHasInsert) {
+                report_data->cmdBufLabelHasInsert = false;
+                label_iter->second.pop_back();
+            }
+            InsertLabelIntoLog(label_info, label_iter->second);
+        }
+    }
+}
+
+static inline void EndCmdDebugUtilsLabel(debug_report_data *report_data, VkCommandBuffer command_buffer) {
+    auto label_iter = report_data->debugUtilsCmdBufLabels->find(command_buffer);
+    if (label_iter != report_data->debugUtilsCmdBufLabels->end()) {
+        // If the last thing was a label insert, we need to pop it off of the label vector before any
+        // changes. This is because a label added with "vkCmdInsertDebugUtilsLabelEXT" is only a
+        // temporary location that exists until the next operation occurs.  In this case, a
+        // "vkCmdEndDebugUtilsLabelEXT" has occurred erasing the inserted label.
+        if (report_data->cmdBufLabelHasInsert) {
+            report_data->cmdBufLabelHasInsert = false;
+            label_iter->second.pop_back();
+        }
+        // Now pop the normal item
+        label_iter->second.pop_back();
+    }
+}
+
+static inline void InsertCmdDebugUtilsLabel(debug_report_data *report_data, VkCommandBuffer command_buffer,
+                                            const VkDebugUtilsLabelEXT *label_info) {
+    if (nullptr != label_info && nullptr != label_info->pLabelName) {
+        auto label_iter = report_data->debugUtilsCmdBufLabels->find(command_buffer);
+        if (label_iter == report_data->debugUtilsCmdBufLabels->end()) {
+            std::vector<LoggingLabelData> new_cmdbuf_labels;
+            InsertLabelIntoLog(label_info, new_cmdbuf_labels);
+            report_data->debugUtilsCmdBufLabels->insert({command_buffer, new_cmdbuf_labels});
+        } else {
+            // If the last thing was a label insert, we need to pop it off of the label vector before any
+            // changes. This is because a label added with "vkCmdInsertDebugUtilsLabelEXT" is only a
+            // temporary location that exists until the next operation occurs.  In this case, a new
+            // "vkCmdInsertDebugUtilsLabelEXT" has occurred erasing the previous inserted label.
+            if (report_data->cmdBufLabelHasInsert) {
+                label_iter->second.pop_back();
+            }
+            // Insert this new label and mark it as one that has been "inserted" so we can remove it on
+            // the next command buffer label operation.
+            InsertLabelIntoLog(label_info, label_iter->second);
+            report_data->cmdBufLabelHasInsert = true;
+        }
+    }
+}
+
+#endif  // LAYER_LOGGING_H
diff --git a/src/third_party/vulkan-loader/src/tests/layers/vk_layer_table.cpp b/src/third_party/vulkan-loader/src/tests/layers/vk_layer_table.cpp
new file mode 100644
index 0000000..4a033b9
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/tests/layers/vk_layer_table.cpp
@@ -0,0 +1,146 @@
+/* Copyright (c) 2015-2016 The Khronos Group Inc.
+ * Copyright (c) 2015-2016 Valve Corporation
+ * Copyright (c) 2015-2016 LunarG, Inc.
+ * Copyright (c) 2015-2016 Google, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Tobin Ehlis <tobin@lunarg.com>
+ */
+#include <assert.h>
+#include <unordered_map>
+#include "vk_dispatch_table_helper.h"
+#include "vulkan/vk_layer.h"
+#include "vk_layer_table.h"
+static device_table_map tableMap;
+static instance_table_map tableInstanceMap;
+
+// Map lookup must be thread safe
+VkLayerDispatchTable *device_dispatch_table(void *object) {
+    dispatch_key key = get_dispatch_key(object);
+    device_table_map::const_iterator it = tableMap.find((void *)key);
+    assert(it != tableMap.end() && "Not able to find device dispatch entry");
+    return it->second;
+}
+
+VkLayerInstanceDispatchTable *instance_dispatch_table(void *object) {
+    dispatch_key key = get_dispatch_key(object);
+    instance_table_map::const_iterator it = tableInstanceMap.find((void *)key);
+    assert(it != tableInstanceMap.end() && "Not able to find instance dispatch entry");
+    return it->second;
+}
+
+void destroy_dispatch_table(device_table_map &map, dispatch_key key) {
+    device_table_map::const_iterator it = map.find((void *)key);
+    if (it != map.end()) {
+        delete it->second;
+        map.erase(it);
+    }
+}
+
+void destroy_dispatch_table(instance_table_map &map, dispatch_key key) {
+    instance_table_map::const_iterator it = map.find((void *)key);
+    if (it != map.end()) {
+        delete it->second;
+        map.erase(it);
+    }
+}
+
+void destroy_device_dispatch_table(dispatch_key key) { destroy_dispatch_table(tableMap, key); }
+
+void destroy_instance_dispatch_table(dispatch_key key) { destroy_dispatch_table(tableInstanceMap, key); }
+
+VkLayerDispatchTable *get_dispatch_table(device_table_map &map, void *object) {
+    dispatch_key key = get_dispatch_key(object);
+    device_table_map::const_iterator it = map.find((void *)key);
+    assert(it != map.end() && "Not able to find device dispatch entry");
+    return it->second;
+}
+
+VkLayerInstanceDispatchTable *get_dispatch_table(instance_table_map &map, void *object) {
+    dispatch_key key = get_dispatch_key(object);
+    instance_table_map::const_iterator it = map.find((void *)key);
+    assert(it != map.end() && "Not able to find instance dispatch entry");
+    return it->second;
+}
+
+VkLayerInstanceCreateInfo *get_chain_info(const VkInstanceCreateInfo *pCreateInfo, VkLayerFunction func) {
+    VkLayerInstanceCreateInfo *chain_info = (VkLayerInstanceCreateInfo *)pCreateInfo->pNext;
+    while (chain_info && !(chain_info->sType == VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO && chain_info->function == func)) {
+        chain_info = (VkLayerInstanceCreateInfo *)chain_info->pNext;
+    }
+    assert(chain_info != NULL);
+    return chain_info;
+}
+
+VkLayerDeviceCreateInfo *get_chain_info(const VkDeviceCreateInfo *pCreateInfo, VkLayerFunction func) {
+    VkLayerDeviceCreateInfo *chain_info = (VkLayerDeviceCreateInfo *)pCreateInfo->pNext;
+    while (chain_info && !(chain_info->sType == VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO && chain_info->function == func)) {
+        chain_info = (VkLayerDeviceCreateInfo *)chain_info->pNext;
+    }
+    assert(chain_info != NULL);
+    return chain_info;
+}
+
+/* Various dispatchable objects will use the same underlying dispatch table if they
+ * are created from that "parent" object. Thus use pointer to dispatch table
+ * as the key to these table maps.
+ *    Instance -> PhysicalDevice
+ *    Device -> CommandBuffer or Queue
+ * If use the object themselves as key to map then implies Create entrypoints have to be intercepted
+ * and a new key inserted into map */
+VkLayerInstanceDispatchTable *initInstanceTable(VkInstance instance, const PFN_vkGetInstanceProcAddr gpa, instance_table_map &map) {
+    VkLayerInstanceDispatchTable *pTable;
+    dispatch_key key = get_dispatch_key(instance);
+    instance_table_map::const_iterator it = map.find((void *)key);
+
+    if (it == map.end()) {
+        pTable = new VkLayerInstanceDispatchTable;
+        map[(void *)key] = pTable;
+    } else {
+        return it->second;
+    }
+
+    layer_init_instance_dispatch_table(instance, pTable, gpa);
+
+    // Setup func pointers that are required but not externally exposed.  These won't be added to the instance dispatch table by
+    // default.
+    pTable->GetPhysicalDeviceProcAddr = (PFN_GetPhysicalDeviceProcAddr)gpa(instance, "vk_layerGetPhysicalDeviceProcAddr");
+
+    return pTable;
+}
+
+VkLayerInstanceDispatchTable *initInstanceTable(VkInstance instance, const PFN_vkGetInstanceProcAddr gpa) {
+    return initInstanceTable(instance, gpa, tableInstanceMap);
+}
+
+VkLayerDispatchTable *initDeviceTable(VkDevice device, const PFN_vkGetDeviceProcAddr gpa, device_table_map &map) {
+    VkLayerDispatchTable *pTable;
+    dispatch_key key = get_dispatch_key(device);
+    device_table_map::const_iterator it = map.find((void *)key);
+
+    if (it == map.end()) {
+        pTable = new VkLayerDispatchTable;
+        map[(void *)key] = pTable;
+    } else {
+        return it->second;
+    }
+
+    layer_init_device_dispatch_table(device, pTable, gpa);
+
+    return pTable;
+}
+
+VkLayerDispatchTable *initDeviceTable(VkDevice device, const PFN_vkGetDeviceProcAddr gpa) {
+    return initDeviceTable(device, gpa, tableMap);
+}
diff --git a/src/third_party/vulkan-loader/src/tests/layers/vk_layer_table.h b/src/third_party/vulkan-loader/src/tests/layers/vk_layer_table.h
new file mode 100644
index 0000000..45b8f6e
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/tests/layers/vk_layer_table.h
@@ -0,0 +1,51 @@
+/* Copyright (c) 2015-2016 The Khronos Group Inc.
+ * Copyright (c) 2015-2016 Valve Corporation
+ * Copyright (c) 2015-2016 LunarG, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Tobin Ehlis <tobin@lunarg.com>
+ */
+
+#pragma once
+
+#include "vulkan/vk_layer.h"
+#include "vulkan/vulkan.h"
+#include <unordered_map>
+
+typedef std::unordered_map<void *, VkLayerDispatchTable *> device_table_map;
+typedef std::unordered_map<void *, VkLayerInstanceDispatchTable *> instance_table_map;
+VkLayerDispatchTable *initDeviceTable(VkDevice device, const PFN_vkGetDeviceProcAddr gpa, device_table_map &map);
+VkLayerDispatchTable *initDeviceTable(VkDevice device, const PFN_vkGetDeviceProcAddr gpa);
+VkLayerInstanceDispatchTable *initInstanceTable(VkInstance instance, const PFN_vkGetInstanceProcAddr gpa, instance_table_map &map);
+VkLayerInstanceDispatchTable *initInstanceTable(VkInstance instance, const PFN_vkGetInstanceProcAddr gpa);
+
+typedef void *dispatch_key;
+
+static inline dispatch_key get_dispatch_key(const void *object) { return (dispatch_key) * (VkLayerDispatchTable **)object; }
+
+VkLayerDispatchTable *device_dispatch_table(void *object);
+
+VkLayerInstanceDispatchTable *instance_dispatch_table(void *object);
+
+VkLayerDispatchTable *get_dispatch_table(device_table_map &map, void *object);
+
+VkLayerInstanceDispatchTable *get_dispatch_table(instance_table_map &map, void *object);
+
+VkLayerInstanceCreateInfo *get_chain_info(const VkInstanceCreateInfo *pCreateInfo, VkLayerFunction func);
+VkLayerDeviceCreateInfo *get_chain_info(const VkDeviceCreateInfo *pCreateInfo, VkLayerFunction func);
+
+void destroy_device_dispatch_table(dispatch_key key);
+void destroy_instance_dispatch_table(dispatch_key key);
+void destroy_dispatch_table(device_table_map &map, dispatch_key key);
+void destroy_dispatch_table(instance_table_map &map, dispatch_key key);
diff --git a/src/third_party/vulkan-loader/src/tests/layers/vk_layer_utils.h b/src/third_party/vulkan-loader/src/tests/layers/vk_layer_utils.h
new file mode 100644
index 0000000..cfad81c
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/tests/layers/vk_layer_utils.h
@@ -0,0 +1,125 @@
+/* Copyright (c) 2015-2017 The Khronos Group Inc.
+ * Copyright (c) 2015-2017 Valve Corporation
+ * Copyright (c) 2015-2017 LunarG, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Mark Lobodzinski <mark@lunarg.com>
+ * Author: Courtney Goeltzenleuchter <courtney@LunarG.com>
+ * Author: Dave Houlton <daveh@lunarg.com>
+ */
+
+#pragma once
+#include <stdbool.h>
+#include <string>
+#include <vector>
+#include "vk_format_utils.h"
+#include "vk_layer_logging.h"
+
+#ifndef WIN32
+#include <strings.h>  // For ffs()
+#else
+#include <intrin.h>  // For __lzcnt()
+#endif
+
+#ifdef __cplusplus
+// Traits objects to allow string_join to operate on collections of const char *
+template <typename String>
+struct StringJoinSizeTrait {
+    static size_t size(const String &str) { return str.size(); }
+};
+
+template <>
+struct StringJoinSizeTrait<const char *> {
+    static size_t size(const char *str) {
+        if (!str) return 0;
+        return strlen(str);
+    }
+};
+// Similar to perl/python join
+//    * String must support size, reserve, append, and be default constructable
+//    * StringCollection must support size, const forward iteration, and store
+//      strings compatible with String::append
+//    * Accessor trait can be set if default accessors (compatible with string
+//      and const char *) don't support size(StringCollection::value_type &)
+//
+// Return type based on sep type
+template <typename String = std::string, typename StringCollection = std::vector<String>,
+          typename Accessor = StringJoinSizeTrait<typename StringCollection::value_type>>
+static inline String string_join(const String &sep, const StringCollection &strings) {
+    String joined;
+    const size_t count = strings.size();
+    if (!count) return joined;
+
+    // Prereserved storage, s.t. we will execute in linear time (avoids reallocation copies)
+    size_t reserve = (count - 1) * sep.size();
+    for (const auto &str : strings) {
+        reserve += Accessor::size(str);  // abstracted to allow const char * type in StringCollection
+    }
+    joined.reserve(reserve + 1);
+
+    // Seps only occur *between* strings entries, so first is special
+    auto current = strings.cbegin();
+    joined.append(*current);
+    ++current;
+    for (; current != strings.cend(); ++current) {
+        joined.append(sep);
+        joined.append(*current);
+    }
+    return joined;
+}
+
+// Requires StringCollection::value_type has a const char * constructor and is compatible the string_join::String above
+template <typename StringCollection = std::vector<std::string>, typename SepString = std::string>
+static inline SepString string_join(const char *sep, const StringCollection &strings) {
+    return string_join<SepString, StringCollection>(SepString(sep), strings);
+}
+
+extern "C" {
+#endif
+
+#define VK_LAYER_API_VERSION VK_MAKE_VERSION(1, 0, VK_HEADER_VERSION)
+
+typedef enum VkStringErrorFlagBits {
+    VK_STRING_ERROR_NONE = 0x00000000,
+    VK_STRING_ERROR_LENGTH = 0x00000001,
+    VK_STRING_ERROR_BAD_DATA = 0x00000002,
+} VkStringErrorFlagBits;
+typedef VkFlags VkStringErrorFlags;
+
+VK_LAYER_EXPORT void layer_debug_report_actions(debug_report_data *report_data,
+                                                std::vector<VkDebugReportCallbackEXT> &logging_callback,
+                                                const VkAllocationCallbacks *pAllocator, const char *layer_identifier);
+
+VK_LAYER_EXPORT void layer_debug_messenger_actions(debug_report_data *report_data,
+                                                   std::vector<VkDebugUtilsMessengerEXT> &logging_messenger,
+                                                   const VkAllocationCallbacks *pAllocator, const char *layer_identifier);
+
+VK_LAYER_EXPORT VkStringErrorFlags vk_string_validate(const int max_length, const char *char_array);
+VK_LAYER_EXPORT bool white_list(const char *item, const char *whitelist);
+
+static inline int u_ffs(int val) {
+#ifdef WIN32
+    unsigned long bit_pos = 0;
+    if (_BitScanForward(&bit_pos, val) != 0) {
+        bit_pos += 1;
+    }
+    return bit_pos;
+#else
+    return ffs(val);
+#endif
+}
+
+#ifdef __cplusplus
+}
+#endif
diff --git a/src/third_party/vulkan-loader/src/tests/layers/wrap_objects.cpp b/src/third_party/vulkan-loader/src/tests/layers/wrap_objects.cpp
new file mode 100644
index 0000000..4cd0a77
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/tests/layers/wrap_objects.cpp
@@ -0,0 +1,1615 @@
+/*
+ * Copyright (c) 2015-2016 Valve Corporation
+ * Copyright (c) 2015-2016 LunarG, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Jon Ashburn <jon@lunarg.com>
+ */
+
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+#include <assert.h>
+#include "vk_loader_platform.h"
+#include "vulkan/vk_layer.h"
+#include "vk_dispatch_table_helper.h"
+#include "vk_layer_extension_utils.h"
+#include "vk_layer_utils.h"
+#include "wrap_objects.h"
+
+namespace wrap_objects {
+
+static const VkLayerProperties global_layer = {
+    "VK_LAYER_LUNARG_wrap_objects", VK_LAYER_API_VERSION, 1, "LunarG Test Layer",
+};
+
+static uint32_t loader_layer_if_version = CURRENT_LOADER_LAYER_INTERFACE_VERSION;
+
+//TODO Add wrapping of Vkdevice, Vkqueue, VkcommandBuffer
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateInstance(const VkInstanceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkInstance* pInstance)
+{
+    VkLayerInstanceCreateInfo *chain_info = get_chain_info(pCreateInfo, VK_LAYER_LINK_INFO);
+    PFN_vkGetInstanceProcAddr fpGetInstanceProcAddr = chain_info->u.pLayerInfo->pfnNextGetInstanceProcAddr;
+    PFN_vkCreateInstance fpCreateInstance = (PFN_vkCreateInstance) fpGetInstanceProcAddr(NULL, "vkCreateInstance");
+    if (fpCreateInstance == NULL) {
+        return VK_ERROR_INITIALIZATION_FAILED;
+    }
+    // Advance the link info for the next element on the chain
+    chain_info->u.pLayerInfo = chain_info->u.pLayerInfo->pNext;
+    VkResult result = fpCreateInstance(pCreateInfo, pAllocator, pInstance);
+    if (result != VK_SUCCESS) {
+        return result;
+    }
+    auto inst = new wrapped_inst_obj;
+    if (!inst)
+        return VK_ERROR_OUT_OF_HOST_MEMORY;
+    memset(inst, 0, sizeof(*inst));
+    inst->obj = (*pInstance);
+    *pInstance = reinterpret_cast<VkInstance> (inst);
+    // store the loader callback for initializing created dispatchable objects
+    chain_info = get_chain_info(pCreateInfo, VK_LOADER_DATA_CALLBACK);
+    if (chain_info) {
+        inst->pfn_inst_init = chain_info->u.pfnSetInstanceLoaderData;
+        result = inst->pfn_inst_init(inst->obj, reinterpret_cast<void *> (inst));
+        if (VK_SUCCESS != result)
+            return result;
+    } else {
+        inst->pfn_inst_init = NULL;
+        inst->loader_disp = *(reinterpret_cast<VkLayerInstanceDispatchTable **> (*pInstance));
+    }
+    layer_init_instance_dispatch_table(*pInstance, &inst->layer_disp, fpGetInstanceProcAddr);
+
+    return result;
+}
+
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyInstance(VkInstance instance, const VkAllocationCallbacks* pAllocator)
+{
+    wrapped_inst_obj *inst;
+    auto vk_inst = unwrap_instance(instance, &inst);
+    VkLayerInstanceDispatchTable *pDisp  =  &inst->layer_disp;
+    pDisp->DestroyInstance(vk_inst, pAllocator);
+    if (inst->ptr_phys_devs)
+        delete[] inst->ptr_phys_devs;
+    delete inst;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL vkEnumeratePhysicalDevices(VkInstance instance, uint32_t* pPhysicalDeviceCount, VkPhysicalDevice* pPhysicalDevices)
+{
+    wrapped_inst_obj *inst;
+    auto vk_inst = unwrap_instance(instance, &inst);
+    VkResult result = inst->layer_disp.EnumeratePhysicalDevices(vk_inst, pPhysicalDeviceCount, pPhysicalDevices);
+
+    if (VK_SUCCESS != result)
+        return result;
+
+    if (pPhysicalDevices != NULL) {
+        assert(pPhysicalDeviceCount);
+        auto phys_devs = new wrapped_phys_dev_obj[*pPhysicalDeviceCount];
+        if (!phys_devs)
+            return VK_ERROR_OUT_OF_HOST_MEMORY;
+        if (inst->ptr_phys_devs)
+            delete[] inst->ptr_phys_devs;
+        inst->ptr_phys_devs = phys_devs;
+        for (uint32_t i = 0; i < *pPhysicalDeviceCount; i++) {
+            if (inst->pfn_inst_init == NULL) {
+                phys_devs[i].loader_disp = *(reinterpret_cast<VkLayerInstanceDispatchTable **> (pPhysicalDevices[i]));
+            } else {
+                result = inst->pfn_inst_init(vk_inst, reinterpret_cast<void *> (&phys_devs[i]));
+                if (VK_SUCCESS != result)
+                    return result;
+
+            }
+            phys_devs[i].obj = reinterpret_cast<void *> (pPhysicalDevices[i]);
+            phys_devs[i].inst = inst;
+            pPhysicalDevices[i] = reinterpret_cast<VkPhysicalDevice> (&phys_devs[i]);
+        }
+    }
+    return result;
+}
+
+VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFeatures(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures* pFeatures)
+{
+    wrapped_phys_dev_obj *phys_dev;
+    auto vk_phys_dev = unwrap_phys_dev(physicalDevice, &phys_dev);
+    phys_dev->inst->layer_disp.GetPhysicalDeviceFeatures(vk_phys_dev, pFeatures);
+}
+
+VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties* pFormatProperties)
+{
+    wrapped_phys_dev_obj *phys_dev;
+    auto vk_phys_dev = unwrap_phys_dev(physicalDevice, &phys_dev);
+    phys_dev->inst->layer_disp.GetPhysicalDeviceFormatProperties(vk_phys_dev, format, pFormatProperties);
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceImageFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkImageFormatProperties* pImageFormatProperties)
+{
+    wrapped_phys_dev_obj *phys_dev;
+    auto vk_phys_dev = unwrap_phys_dev(physicalDevice, &phys_dev);
+    VkResult result = phys_dev->inst->layer_disp.GetPhysicalDeviceImageFormatProperties(vk_phys_dev, format, type, tiling, usage, flags, pImageFormatProperties);
+    return result;
+}
+
+VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceProperties(VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties* pProperties)
+{
+    wrapped_phys_dev_obj *phys_dev;
+    auto vk_phys_dev = unwrap_phys_dev(physicalDevice, &phys_dev);
+    phys_dev->inst->layer_disp.GetPhysicalDeviceProperties(vk_phys_dev, pProperties);
+}
+
+VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceQueueFamilyProperties(VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties* pQueueFamilyProperties)
+{
+    wrapped_phys_dev_obj *phys_dev;
+    auto vk_phys_dev = unwrap_phys_dev(physicalDevice, &phys_dev);
+    phys_dev->inst->layer_disp.GetPhysicalDeviceQueueFamilyProperties(vk_phys_dev, pQueueFamilyPropertyCount, pQueueFamilyProperties);
+}
+
+VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceMemoryProperties(VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties* pMemoryProperties)
+{
+    wrapped_phys_dev_obj *phys_dev;
+    auto vk_phys_dev = unwrap_phys_dev(physicalDevice, &phys_dev);
+    phys_dev->inst->layer_disp.GetPhysicalDeviceMemoryProperties(vk_phys_dev, pMemoryProperties);
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateDevice(VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDevice* pDevice)
+{
+    wrapped_phys_dev_obj *phys_dev;
+    auto vk_phys_dev = unwrap_phys_dev(physicalDevice, &phys_dev);
+    VkLayerDeviceCreateInfo *chain_info = get_chain_info(pCreateInfo, VK_LAYER_LINK_INFO);
+    PFN_vkGetInstanceProcAddr fpGetInstanceProcAddr = chain_info->u.pLayerInfo->pfnNextGetInstanceProcAddr;
+    PFN_vkGetDeviceProcAddr fpGetDeviceProcAddr = chain_info->u.pLayerInfo->pfnNextGetDeviceProcAddr;
+    PFN_vkCreateDevice fpCreateDevice = (PFN_vkCreateDevice)fpGetInstanceProcAddr(phys_dev->inst->obj, "vkCreateDevice");
+    if (fpCreateDevice == NULL) {
+        return VK_ERROR_INITIALIZATION_FAILED;
+    }
+    // Advance the link info for the next element on the chain
+    chain_info->u.pLayerInfo = chain_info->u.pLayerInfo->pNext;
+    VkResult result = fpCreateDevice(vk_phys_dev, pCreateInfo, pAllocator, pDevice);
+    if (result != VK_SUCCESS) {
+        return result;
+    }
+    initDeviceTable(*pDevice, fpGetDeviceProcAddr);
+
+#if 0 // TODO add once device is wrapped
+    // store the loader callback for initializing created dispatchable objects
+    chain_info = get_chain_info(pCreateInfo, VK_LOADER_DATA_CALLBACK);
+    if (chain_info) {
+        dev->pfn_dev_init = chain_info->u.pfnSetDeviceLoaderData;
+    } else {
+        dev->pfn_dev_init = NULL;
+    }
+#endif
+    return result;
+}
+
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyDevice(VkDevice device, const VkAllocationCallbacks* pAllocator)
+{
+    dispatch_key key = get_dispatch_key(device);
+    VkLayerDispatchTable *pDisp  =  device_dispatch_table(device);
+    pDisp->DestroyDevice(device, pAllocator);
+    destroy_device_dispatch_table(key);
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateDeviceExtensionProperties(VkPhysicalDevice physicalDevice, const char* pLayerName, uint32_t* pPropertyCount, VkExtensionProperties* pProperties)
+{
+    wrapped_phys_dev_obj *phys_dev;
+    auto vk_phys_dev = unwrap_phys_dev(physicalDevice, &phys_dev);
+
+    if (pLayerName && !strcmp(pLayerName, global_layer.layerName))
+        return util_GetExtensionProperties(0, nullptr, pPropertyCount, pProperties);
+
+    return phys_dev->inst->layer_disp.EnumerateDeviceExtensionProperties(vk_phys_dev, pLayerName, pPropertyCount, pProperties);
+}
+
+VKAPI_ATTR void VKAPI_CALL vkGetDeviceQueue(VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex, VkQueue* pQueue)
+{
+    device_dispatch_table(device)->GetDeviceQueue(device, queueFamilyIndex, queueIndex, pQueue);
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL vkQueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo* pSubmits, VkFence fence)
+{
+    VkResult result = device_dispatch_table(queue)->QueueSubmit(queue, submitCount, pSubmits, fence);
+    return result;
+}
+
+
+VKAPI_ATTR VkResult VKAPI_CALL vkQueueWaitIdle(VkQueue queue)
+{
+    VkResult result = device_dispatch_table(queue)->QueueWaitIdle(queue);
+    return result;
+}
+
+
+VKAPI_ATTR VkResult VKAPI_CALL vkDeviceWaitIdle(VkDevice device)
+{
+    VkResult result = device_dispatch_table(device)->DeviceWaitIdle(device);
+    return result;
+}
+
+
+VKAPI_ATTR VkResult VKAPI_CALL vkAllocateMemory(VkDevice device, const VkMemoryAllocateInfo* pAllocateInfo, const VkAllocationCallbacks* pAllocator, VkDeviceMemory* pMemory)
+{
+    VkResult result = device_dispatch_table(device)->AllocateMemory(device, pAllocateInfo, pAllocator, pMemory);
+    return result;
+}
+
+
+VKAPI_ATTR void VKAPI_CALL vkFreeMemory(VkDevice device, VkDeviceMemory memory, const VkAllocationCallbacks* pAllocator)
+{
+    device_dispatch_table(device)->FreeMemory(device, memory, pAllocator);
+}
+
+
+VKAPI_ATTR VkResult VKAPI_CALL vkMapMemory(VkDevice device, VkDeviceMemory memory, VkDeviceSize offset, VkDeviceSize size, VkMemoryMapFlags flags, void** ppData)
+{
+    VkResult result = device_dispatch_table(device)->MapMemory(device, memory, offset, size, flags, ppData);
+    return result;
+}
+
+VKAPI_ATTR void VKAPI_CALL vkUnmapMemory(VkDevice device, VkDeviceMemory memory)
+{
+    device_dispatch_table(device)->UnmapMemory(device, memory);
+}
+
+
+VKAPI_ATTR VkResult VKAPI_CALL vkFlushMappedMemoryRanges(VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange* pMemoryRanges)
+{
+    VkResult result = device_dispatch_table(device)->FlushMappedMemoryRanges(device, memoryRangeCount, pMemoryRanges);
+    return result;
+}
+
+
+
+VKAPI_ATTR VkResult VKAPI_CALL vkInvalidateMappedMemoryRanges(VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange* pMemoryRanges)
+{
+    VkResult result = device_dispatch_table(device)->InvalidateMappedMemoryRanges(device, memoryRangeCount, pMemoryRanges);
+    return result;
+}
+
+
+VKAPI_ATTR void VKAPI_CALL vkGetDeviceMemoryCommitment(VkDevice device, VkDeviceMemory memory, VkDeviceSize* pCommittedMemoryInBytes)
+{
+    device_dispatch_table(device)->GetDeviceMemoryCommitment(device, memory, pCommittedMemoryInBytes);
+}
+
+
+VKAPI_ATTR VkResult VKAPI_CALL vkBindBufferMemory(VkDevice device, VkBuffer buffer, VkDeviceMemory memory, VkDeviceSize memoryOffset)
+{
+    VkResult result = device_dispatch_table(device)->BindBufferMemory(device, buffer, memory, memoryOffset);
+    return result;
+}
+
+
+VKAPI_ATTR VkResult VKAPI_CALL vkBindImageMemory(VkDevice device, VkImage image, VkDeviceMemory memory, VkDeviceSize memoryOffset)
+{
+    VkResult result = device_dispatch_table(device)->BindImageMemory(device, image, memory, memoryOffset);
+    return result;
+}
+
+VKAPI_ATTR void VKAPI_CALL vkGetBufferMemoryRequirements(VkDevice device, VkBuffer buffer, VkMemoryRequirements* pMemoryRequirements)
+{
+    device_dispatch_table(device)->GetBufferMemoryRequirements(device, buffer, pMemoryRequirements);
+}
+
+
+VKAPI_ATTR void VKAPI_CALL vkGetImageMemoryRequirements(VkDevice device, VkImage image, VkMemoryRequirements* pMemoryRequirements)
+{
+    device_dispatch_table(device)->GetImageMemoryRequirements(device, image, pMemoryRequirements);
+}
+
+
+VKAPI_ATTR void VKAPI_CALL vkGetImageSparseMemoryRequirements(VkDevice device, VkImage image, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements* pSparseMemoryRequirements)
+{
+    device_dispatch_table(device)->GetImageSparseMemoryRequirements(device, image, pSparseMemoryRequirementCount, pSparseMemoryRequirements);
+}
+
+
+VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceSparseImageFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkSampleCountFlagBits samples, VkImageUsageFlags usage, VkImageTiling tiling, uint32_t* pPropertyCount, VkSparseImageFormatProperties* pProperties)
+{
+    wrapped_phys_dev_obj *phys_dev;
+    auto vk_phys_dev = unwrap_phys_dev(physicalDevice, &phys_dev);
+    phys_dev->inst->layer_disp.GetPhysicalDeviceSparseImageFormatProperties(vk_phys_dev, format, type, samples, usage, tiling, pPropertyCount, pProperties);
+}
+
+
+VKAPI_ATTR VkResult VKAPI_CALL vkQueueBindSparse(VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo* pBindInfo, VkFence fence)
+{
+    VkResult result = device_dispatch_table(queue)->QueueBindSparse(queue, bindInfoCount, pBindInfo, fence);
+    return result;
+}
+
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateFence(VkDevice device, const VkFenceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence)
+{
+    VkResult result = device_dispatch_table(device)->CreateFence(device, pCreateInfo, pAllocator, pFence);
+    return result;
+}
+
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyFence(VkDevice device, VkFence fence, const VkAllocationCallbacks* pAllocator)
+{
+    device_dispatch_table(device)->DestroyFence(device, fence, pAllocator);
+}
+
+
+VKAPI_ATTR VkResult VKAPI_CALL vkResetFences(VkDevice device, uint32_t fenceCount, const VkFence* pFences)
+{
+    VkResult result = device_dispatch_table(device)->ResetFences(device, fenceCount, pFences);
+    return result;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetFenceStatus(VkDevice device, VkFence fence)
+{
+    VkResult result = device_dispatch_table(device)->GetFenceStatus(device, fence);
+    return result;
+}
+
+
+VKAPI_ATTR VkResult VKAPI_CALL vkWaitForFences(VkDevice device, uint32_t fenceCount, const VkFence* pFences, VkBool32 waitAll, uint64_t timeout)
+{
+    VkResult result = device_dispatch_table(device)->WaitForFences(device, fenceCount, pFences, waitAll, timeout);
+    return result;
+}
+
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateSemaphore(VkDevice device, const VkSemaphoreCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSemaphore* pSemaphore)
+{
+    VkResult result = device_dispatch_table(device)->CreateSemaphore(device, pCreateInfo, pAllocator, pSemaphore);
+    return result;
+}
+
+
+VKAPI_ATTR void VKAPI_CALL vkDestroySemaphore(VkDevice device, VkSemaphore semaphore, const VkAllocationCallbacks* pAllocator)
+{
+    device_dispatch_table(device)->DestroySemaphore(device, semaphore, pAllocator);
+}
+
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateEvent(VkDevice device, const VkEventCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkEvent* pEvent)
+{
+    VkResult result = device_dispatch_table(device)->CreateEvent(device, pCreateInfo, pAllocator, pEvent);
+    return result;
+}
+
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyEvent(VkDevice device, VkEvent event, const VkAllocationCallbacks* pAllocator)
+{
+    device_dispatch_table(device)->DestroyEvent(device, event, pAllocator);
+}
+
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetEventStatus(VkDevice device, VkEvent event)
+{
+    VkResult result = device_dispatch_table(device)->GetEventStatus(device, event);
+    return result;
+}
+
+
+VKAPI_ATTR VkResult VKAPI_CALL vkSetEvent(VkDevice device, VkEvent event)
+{
+    VkResult result = device_dispatch_table(device)->SetEvent(device, event);
+    return result;
+}
+
+
+VKAPI_ATTR VkResult VKAPI_CALL vkResetEvent(VkDevice device, VkEvent event)
+{
+    VkResult result = device_dispatch_table(device)->ResetEvent(device, event);
+    return result;
+}
+
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateQueryPool(VkDevice device, const VkQueryPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkQueryPool* pQueryPool)
+{
+    VkResult result = device_dispatch_table(device)->CreateQueryPool(device, pCreateInfo, pAllocator, pQueryPool);
+    return result;
+}
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyQueryPool(VkDevice device, VkQueryPool queryPool, const VkAllocationCallbacks* pAllocator)
+{
+    device_dispatch_table(device)->DestroyQueryPool(device, queryPool, pAllocator);
+}
+
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetQueryPoolResults(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, void* pData, VkDeviceSize stride, VkQueryResultFlags flags)
+{
+    VkResult result = device_dispatch_table(device)->GetQueryPoolResults(device, queryPool, firstQuery, queryCount, dataSize, pData, stride, flags);
+    return result;
+}
+
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateBuffer(VkDevice device, const VkBufferCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkBuffer* pBuffer)
+{
+    VkResult result = device_dispatch_table(device)->CreateBuffer(device, pCreateInfo, pAllocator, pBuffer);
+    return result;
+}
+
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyBuffer(VkDevice device, VkBuffer buffer, const VkAllocationCallbacks* pAllocator)
+{
+    device_dispatch_table(device)->DestroyBuffer(device, buffer, pAllocator);
+}
+
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateBufferView(VkDevice device, const VkBufferViewCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkBufferView* pView)
+{
+    VkResult result = device_dispatch_table(device)->CreateBufferView(device, pCreateInfo, pAllocator, pView);
+    return result;
+}
+
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyBufferView(VkDevice device, VkBufferView bufferView, const VkAllocationCallbacks* pAllocator)
+{
+    device_dispatch_table(device)->DestroyBufferView(device, bufferView, pAllocator);
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateImage(VkDevice device, const VkImageCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkImage* pImage)
+{
+    VkResult result = device_dispatch_table(device)->CreateImage(device, pCreateInfo, pAllocator, pImage);
+    return result;
+}
+
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyImage(VkDevice device, VkImage image, const VkAllocationCallbacks* pAllocator)
+{
+    device_dispatch_table(device)->DestroyImage(device, image, pAllocator);
+}
+
+
+VKAPI_ATTR void VKAPI_CALL vkGetImageSubresourceLayout(VkDevice device, VkImage image, const VkImageSubresource* pSubresource, VkSubresourceLayout* pLayout)
+{
+    device_dispatch_table(device)->GetImageSubresourceLayout(device, image, pSubresource, pLayout);
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateImageView(VkDevice device, const VkImageViewCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkImageView* pView)
+{
+    VkResult result = device_dispatch_table(device)->CreateImageView(device, pCreateInfo, pAllocator, pView);
+    return result;
+}
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyImageView(VkDevice device, VkImageView imageView, const VkAllocationCallbacks* pAllocator)
+{
+    device_dispatch_table(device)->DestroyImageView(device, imageView, pAllocator);
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateShaderModule(VkDevice device, const VkShaderModuleCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkShaderModule* pShaderModule)
+{
+    VkResult result = device_dispatch_table(device)->CreateShaderModule(device, pCreateInfo, pAllocator, pShaderModule);
+    return result;
+}
+
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyShaderModule(VkDevice device, VkShaderModule shaderModule, const VkAllocationCallbacks* pAllocator)
+{
+    device_dispatch_table(device)->DestroyShaderModule(device, shaderModule, pAllocator);
+}
+
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreatePipelineCache(VkDevice device, const VkPipelineCacheCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPipelineCache* pPipelineCache)
+{
+    VkResult result = device_dispatch_table(device)->CreatePipelineCache(device, pCreateInfo, pAllocator, pPipelineCache);
+    return result;
+}
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyPipelineCache(VkDevice device, VkPipelineCache pipelineCache, const VkAllocationCallbacks* pAllocator)
+{
+    device_dispatch_table(device)->DestroyPipelineCache(device, pipelineCache, pAllocator);
+}
+
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPipelineCacheData(VkDevice device, VkPipelineCache pipelineCache, size_t* pDataSize, void* pData)
+{
+    VkResult result = device_dispatch_table(device)->GetPipelineCacheData(device, pipelineCache, pDataSize, pData);
+    return result;
+}
+
+
+VKAPI_ATTR VkResult VKAPI_CALL vkMergePipelineCaches(VkDevice device, VkPipelineCache dstCache, uint32_t srcCacheCount, const VkPipelineCache* pSrcCaches)
+{
+    VkResult result = device_dispatch_table(device)->MergePipelineCaches(device, dstCache, srcCacheCount, pSrcCaches);
+    return result;
+}
+
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkGraphicsPipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines)
+{
+    VkResult result = device_dispatch_table(device)->CreateGraphicsPipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines);
+    return result;
+}
+
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkComputePipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines)
+{
+    VkResult result = device_dispatch_table(device)->CreateComputePipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines);
+    return result;
+}
+
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyPipeline(VkDevice device, VkPipeline pipeline, const VkAllocationCallbacks* pAllocator)
+{
+    device_dispatch_table(device)->DestroyPipeline(device, pipeline, pAllocator);
+}
+
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreatePipelineLayout(VkDevice device, const VkPipelineLayoutCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPipelineLayout* pPipelineLayout)
+{
+    VkResult result = device_dispatch_table(device)->CreatePipelineLayout(device, pCreateInfo, pAllocator, pPipelineLayout);
+    return result;
+}
+
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyPipelineLayout(VkDevice device, VkPipelineLayout pipelineLayout, const VkAllocationCallbacks* pAllocator)
+{
+    device_dispatch_table(device)->DestroyPipelineLayout(device, pipelineLayout, pAllocator);
+}
+
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateSampler(VkDevice device, const VkSamplerCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSampler* pSampler)
+{
+    VkResult result = device_dispatch_table(device)->CreateSampler(device, pCreateInfo, pAllocator, pSampler);
+    return result;
+}
+
+
+VKAPI_ATTR void VKAPI_CALL vkDestroySampler(VkDevice device, VkSampler sampler, const VkAllocationCallbacks* pAllocator)
+{
+    device_dispatch_table(device)->DestroySampler(device, sampler, pAllocator);
+}
+
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateDescriptorSetLayout(VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorSetLayout* pSetLayout)
+{
+    VkResult result = device_dispatch_table(device)->CreateDescriptorSetLayout(device, pCreateInfo, pAllocator, pSetLayout);
+    return result;
+}
+
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyDescriptorSetLayout(VkDevice device, VkDescriptorSetLayout descriptorSetLayout, const VkAllocationCallbacks* pAllocator)
+{
+    device_dispatch_table(device)->DestroyDescriptorSetLayout(device, descriptorSetLayout, pAllocator);
+}
+
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateDescriptorPool(VkDevice device, const VkDescriptorPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorPool* pDescriptorPool)
+{
+    VkResult result = device_dispatch_table(device)->CreateDescriptorPool(device, pCreateInfo, pAllocator, pDescriptorPool);
+    return result;
+}
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool, const VkAllocationCallbacks* pAllocator)
+{
+    device_dispatch_table(device)->DestroyDescriptorPool(device, descriptorPool, pAllocator);
+}
+
+
+VKAPI_ATTR VkResult VKAPI_CALL vkResetDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool, VkDescriptorPoolResetFlags flags)
+{
+    VkResult result = device_dispatch_table(device)->ResetDescriptorPool(device, descriptorPool, flags);
+    return result;
+}
+
+
+VKAPI_ATTR VkResult VKAPI_CALL vkAllocateDescriptorSets(VkDevice device, const VkDescriptorSetAllocateInfo* pAllocateInfo, VkDescriptorSet* pDescriptorSets)
+{
+    VkResult result = device_dispatch_table(device)->AllocateDescriptorSets(device, pAllocateInfo, pDescriptorSets);
+    return result;
+}
+
+
+VKAPI_ATTR VkResult VKAPI_CALL vkFreeDescriptorSets(VkDevice device, VkDescriptorPool descriptorPool, uint32_t descriptorSetCount, const VkDescriptorSet* pDescriptorSets)
+{
+    VkResult result = device_dispatch_table(device)->FreeDescriptorSets(device, descriptorPool, descriptorSetCount, pDescriptorSets);
+    return result;
+}
+
+
+VKAPI_ATTR void VKAPI_CALL vkUpdateDescriptorSets(VkDevice device, uint32_t descriptorWriteCount, const VkWriteDescriptorSet* pDescriptorWrites, uint32_t descriptorCopyCount, const VkCopyDescriptorSet* pDescriptorCopies)
+{
+    device_dispatch_table(device)->UpdateDescriptorSets(device, descriptorWriteCount, pDescriptorWrites, descriptorCopyCount, pDescriptorCopies);
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateFramebuffer(VkDevice device, const VkFramebufferCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkFramebuffer* pFramebuffer)
+{
+    VkResult result = device_dispatch_table(device)->CreateFramebuffer(device, pCreateInfo, pAllocator, pFramebuffer);
+    return result;
+}
+
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyFramebuffer(VkDevice device, VkFramebuffer framebuffer, const VkAllocationCallbacks* pAllocator)
+{
+    device_dispatch_table(device)->DestroyFramebuffer(device, framebuffer, pAllocator);
+}
+
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateRenderPass(VkDevice device, const VkRenderPassCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass)
+{
+    VkResult result = device_dispatch_table(device)->CreateRenderPass(device, pCreateInfo, pAllocator, pRenderPass);
+    return result;
+}
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyRenderPass(VkDevice device, VkRenderPass renderPass, const VkAllocationCallbacks* pAllocator)
+{
+    device_dispatch_table(device)->DestroyRenderPass(device, renderPass, pAllocator);
+}
+
+
+VKAPI_ATTR void VKAPI_CALL vkGetRenderAreaGranularity(VkDevice device, VkRenderPass renderPass, VkExtent2D* pGranularity)
+{
+    device_dispatch_table(device)->GetRenderAreaGranularity(device, renderPass, pGranularity);
+}
+
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateCommandPool(VkDevice device, const VkCommandPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkCommandPool* pCommandPool)
+{
+    VkResult result = device_dispatch_table(device)->CreateCommandPool(device, pCreateInfo, pAllocator, pCommandPool);
+    return result;
+}
+
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyCommandPool(VkDevice device, VkCommandPool commandPool, const VkAllocationCallbacks* pAllocator)
+{
+    device_dispatch_table(device)->DestroyCommandPool(device, commandPool, pAllocator);
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL vkResetCommandPool(VkDevice device, VkCommandPool commandPool, VkCommandPoolResetFlags flags)
+{
+    VkResult result = device_dispatch_table(device)->ResetCommandPool(device, commandPool, flags);
+    return result;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL vkAllocateCommandBuffers(VkDevice device, const VkCommandBufferAllocateInfo* pAllocateInfo, VkCommandBuffer* pCommandBuffers)
+{
+    VkResult result = device_dispatch_table(device)->AllocateCommandBuffers(device, pAllocateInfo, pCommandBuffers);
+    return result;
+}
+
+VKAPI_ATTR void VKAPI_CALL vkFreeCommandBuffers(VkDevice device, VkCommandPool commandPool, uint32_t commandBufferCount, const VkCommandBuffer* pCommandBuffers)
+{
+    device_dispatch_table(device)->FreeCommandBuffers(device, commandPool, commandBufferCount, pCommandBuffers);
+}
+
+
+VKAPI_ATTR VkResult VKAPI_CALL vkBeginCommandBuffer(VkCommandBuffer commandBuffer, const VkCommandBufferBeginInfo* pBeginInfo)
+{
+    VkResult result = device_dispatch_table(commandBuffer)->BeginCommandBuffer(commandBuffer, pBeginInfo);
+    return result;
+}
+
+
+VKAPI_ATTR VkResult VKAPI_CALL vkEndCommandBuffer(VkCommandBuffer commandBuffer)
+{
+    VkResult result = device_dispatch_table(commandBuffer)->EndCommandBuffer(commandBuffer);
+    return result;
+}
+
+
+VKAPI_ATTR VkResult VKAPI_CALL vkResetCommandBuffer(VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags)
+{
+    VkResult result = device_dispatch_table(commandBuffer)->ResetCommandBuffer(commandBuffer, flags);
+    return result;
+}
+
+VKAPI_ATTR void VKAPI_CALL vkCmdBindPipeline(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline)
+{
+    device_dispatch_table(commandBuffer)->CmdBindPipeline(commandBuffer, pipelineBindPoint, pipeline);
+}
+
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetViewport(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewport* pViewports)
+{
+    device_dispatch_table(commandBuffer)->CmdSetViewport(commandBuffer, firstViewport, viewportCount, pViewports);
+}
+
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetScissor(VkCommandBuffer commandBuffer, uint32_t firstScissor, uint32_t scissorCount, const VkRect2D* pScissors)
+{
+    device_dispatch_table(commandBuffer)->CmdSetScissor(commandBuffer, firstScissor, scissorCount, pScissors);
+}
+
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetLineWidth(VkCommandBuffer commandBuffer, float lineWidth)
+{
+    device_dispatch_table(commandBuffer)->CmdSetLineWidth(commandBuffer, lineWidth);
+}
+
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthBias(VkCommandBuffer commandBuffer, float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor)
+{
+    device_dispatch_table(commandBuffer)->CmdSetDepthBias(commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor);
+}
+
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetBlendConstants(VkCommandBuffer commandBuffer, const float blendConstants[4])
+{
+    device_dispatch_table(commandBuffer)->CmdSetBlendConstants(commandBuffer, blendConstants);
+}
+
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthBounds(VkCommandBuffer commandBuffer, float minDepthBounds, float maxDepthBounds)
+{
+    device_dispatch_table(commandBuffer)->CmdSetDepthBounds(commandBuffer, minDepthBounds, maxDepthBounds);
+}
+
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilCompareMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t compareMask)
+{
+    device_dispatch_table(commandBuffer)->CmdSetStencilCompareMask(commandBuffer, faceMask, compareMask);
+}
+
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilWriteMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t writeMask)
+{
+    device_dispatch_table(commandBuffer)->CmdSetStencilWriteMask(commandBuffer, faceMask, writeMask);
+}
+
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilReference(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t reference)
+{
+    device_dispatch_table(commandBuffer)->CmdSetStencilReference(commandBuffer, faceMask, reference);
+}
+
+
+VKAPI_ATTR void VKAPI_CALL vkCmdBindDescriptorSets(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t firstSet, uint32_t descriptorSetCount, const VkDescriptorSet* pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t* pDynamicOffsets)
+{
+    device_dispatch_table(commandBuffer)->CmdBindDescriptorSets(commandBuffer, pipelineBindPoint, layout, firstSet, descriptorSetCount, pDescriptorSets, dynamicOffsetCount, pDynamicOffsets);
+}
+
+
+VKAPI_ATTR void VKAPI_CALL vkCmdBindIndexBuffer(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkIndexType indexType)
+{
+    device_dispatch_table(commandBuffer)->CmdBindIndexBuffer(commandBuffer, buffer, offset, indexType);
+}
+
+
+VKAPI_ATTR void VKAPI_CALL vkCmdBindVertexBuffers(VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets)
+{
+    device_dispatch_table(commandBuffer)->CmdBindVertexBuffers(commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets);
+}
+
+
+VKAPI_ATTR void VKAPI_CALL vkCmdDraw(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance)
+{
+    device_dispatch_table(commandBuffer)->CmdDraw(commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance);
+}
+
+
+VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndexed(VkCommandBuffer commandBuffer, uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance)
+{
+    device_dispatch_table(commandBuffer)->CmdDrawIndexed(commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance);
+}
+
+
+VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride)
+{
+    device_dispatch_table(commandBuffer)->CmdDrawIndirect(commandBuffer, buffer, offset, drawCount, stride);
+}
+
+
+VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride)
+{
+    device_dispatch_table(commandBuffer)->CmdDrawIndexedIndirect(commandBuffer, buffer, offset, drawCount, stride);
+}
+
+
+VKAPI_ATTR void VKAPI_CALL vkCmdDispatch(VkCommandBuffer commandBuffer, uint32_t x, uint32_t y, uint32_t z)
+{
+    device_dispatch_table(commandBuffer)->CmdDispatch(commandBuffer, x, y, z);
+}
+
+
+VKAPI_ATTR void VKAPI_CALL vkCmdDispatchIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset)
+{
+    device_dispatch_table(commandBuffer)->CmdDispatchIndirect(commandBuffer, buffer, offset);
+}
+
+
+VKAPI_ATTR void VKAPI_CALL vkCmdCopyBuffer(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer, uint32_t regionCount, const VkBufferCopy* pRegions)
+{
+    device_dispatch_table(commandBuffer)->CmdCopyBuffer(commandBuffer, srcBuffer, dstBuffer, regionCount, pRegions);
+}
+
+
+VKAPI_ATTR void VKAPI_CALL vkCmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageCopy* pRegions)
+{
+    device_dispatch_table(commandBuffer)->CmdCopyImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions);
+}
+
+
+VKAPI_ATTR void VKAPI_CALL vkCmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageBlit* pRegions, VkFilter filter)
+{
+    device_dispatch_table(commandBuffer)->CmdBlitImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions, filter);
+}
+
+
+VKAPI_ATTR void VKAPI_CALL vkCmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkBufferImageCopy* pRegions)
+{
+    device_dispatch_table(commandBuffer)->CmdCopyBufferToImage(commandBuffer, srcBuffer, dstImage, dstImageLayout, regionCount, pRegions);
+}
+
+VKAPI_ATTR void VKAPI_CALL vkCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkBuffer dstBuffer, uint32_t regionCount, const VkBufferImageCopy* pRegions)
+{
+    device_dispatch_table(commandBuffer)->CmdCopyImageToBuffer(commandBuffer, srcImage, srcImageLayout, dstBuffer, regionCount, pRegions);
+}
+
+
+VKAPI_ATTR void VKAPI_CALL vkCmdUpdateBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize dataSize, const uint32_t* pData)
+{
+    device_dispatch_table(commandBuffer)->CmdUpdateBuffer(commandBuffer, dstBuffer, dstOffset, dataSize, pData);
+}
+
+VKAPI_ATTR void VKAPI_CALL vkCmdFillBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize size, uint32_t data)
+{
+    device_dispatch_table(commandBuffer)->CmdFillBuffer(commandBuffer, dstBuffer, dstOffset, size, data);
+}
+
+
+VKAPI_ATTR void VKAPI_CALL vkCmdClearColorImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, const VkClearColorValue* pColor, uint32_t rangeCount, const VkImageSubresourceRange* pRanges)
+{
+    device_dispatch_table(commandBuffer)->CmdClearColorImage(commandBuffer, image, imageLayout, pColor, rangeCount, pRanges);
+}
+
+VKAPI_ATTR void VKAPI_CALL vkCmdClearDepthStencilImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, const VkClearDepthStencilValue* pDepthStencil, uint32_t rangeCount, const VkImageSubresourceRange* pRanges)
+{
+    device_dispatch_table(commandBuffer)->CmdClearDepthStencilImage(commandBuffer, image, imageLayout, pDepthStencil, rangeCount, pRanges);
+}
+
+
+VKAPI_ATTR void VKAPI_CALL vkCmdClearAttachments(VkCommandBuffer commandBuffer, uint32_t attachmentCount, const VkClearAttachment* pAttachments, uint32_t rectCount, const VkClearRect* pRects)
+{
+    device_dispatch_table(commandBuffer)->CmdClearAttachments(commandBuffer, attachmentCount, pAttachments, rectCount, pRects);
+}
+
+
+VKAPI_ATTR void VKAPI_CALL vkCmdResolveImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageResolve* pRegions)
+{
+    device_dispatch_table(commandBuffer)->CmdResolveImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions);
+}
+
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask)
+{
+    device_dispatch_table(commandBuffer)->CmdSetEvent(commandBuffer, event, stageMask);
+}
+
+
+VKAPI_ATTR void VKAPI_CALL vkCmdResetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask)
+{
+    device_dispatch_table(commandBuffer)->CmdResetEvent(commandBuffer, event, stageMask);
+}
+
+
+VKAPI_ATTR void VKAPI_CALL vkCmdWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent* pEvents, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers)
+{
+    device_dispatch_table(commandBuffer)->CmdWaitEvents(commandBuffer, eventCount, pEvents, srcStageMask, dstStageMask, memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers);
+}
+
+VKAPI_ATTR void VKAPI_CALL vkCmdPipelineBarrier(VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers)
+{
+    device_dispatch_table(commandBuffer)->CmdPipelineBarrier(commandBuffer, srcStageMask, dstStageMask, dependencyFlags, memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers);
+}
+
+VKAPI_ATTR void VKAPI_CALL vkCmdBeginQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, VkQueryControlFlags flags)
+{
+    device_dispatch_table(commandBuffer)->CmdBeginQuery(commandBuffer, queryPool, query, flags);
+}
+
+VKAPI_ATTR void VKAPI_CALL vkCmdEndQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query)
+{
+    device_dispatch_table(commandBuffer)->CmdEndQuery(commandBuffer, queryPool, query);
+}
+
+VKAPI_ATTR void VKAPI_CALL vkCmdResetQueryPool(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount)
+{
+    device_dispatch_table(commandBuffer)->CmdResetQueryPool(commandBuffer, queryPool, firstQuery, queryCount);
+}
+
+VKAPI_ATTR void VKAPI_CALL vkCmdWriteTimestamp(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage, VkQueryPool queryPool, uint32_t query)
+{
+    device_dispatch_table(commandBuffer)->CmdWriteTimestamp(commandBuffer, pipelineStage, queryPool, query);
+}
+
+VKAPI_ATTR void VKAPI_CALL vkCmdCopyQueryPoolResults(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize stride, VkQueryResultFlags flags)
+{
+    device_dispatch_table(commandBuffer)->CmdCopyQueryPoolResults(commandBuffer, queryPool, firstQuery, queryCount, dstBuffer, dstOffset, stride, flags);
+}
+
+VKAPI_ATTR void VKAPI_CALL vkCmdPushConstants(VkCommandBuffer commandBuffer, VkPipelineLayout layout, VkShaderStageFlags stageFlags, uint32_t offset, uint32_t size, const void* pValues)
+{
+    device_dispatch_table(commandBuffer)->CmdPushConstants(commandBuffer, layout, stageFlags, offset, size, pValues);
+}
+
+VKAPI_ATTR void VKAPI_CALL vkCmdBeginRenderPass(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo* pRenderPassBegin, VkSubpassContents contents)
+{
+    device_dispatch_table(commandBuffer)->CmdBeginRenderPass(commandBuffer, pRenderPassBegin, contents);
+}
+
+VKAPI_ATTR void VKAPI_CALL vkCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents)
+{
+    device_dispatch_table(commandBuffer)->CmdNextSubpass(commandBuffer, contents);
+}
+
+VKAPI_ATTR void VKAPI_CALL vkCmdEndRenderPass(VkCommandBuffer commandBuffer)
+{
+    device_dispatch_table(commandBuffer)->CmdEndRenderPass(commandBuffer);
+}
+
+VKAPI_ATTR void VKAPI_CALL vkCmdExecuteCommands(VkCommandBuffer commandBuffer, uint32_t commandBufferCount, const VkCommandBuffer* pCommandBuffers)
+{
+    device_dispatch_table(commandBuffer)->CmdExecuteCommands(commandBuffer, commandBufferCount, pCommandBuffers);
+}
+
+VKAPI_ATTR void VKAPI_CALL vkDestroySurfaceKHR(VkInstance instance, VkSurfaceKHR surface, const VkAllocationCallbacks* pAllocator)
+{
+    wrapped_inst_obj *inst;
+    auto vk_inst = unwrap_instance(instance, &inst);
+    inst->layer_disp.DestroySurfaceKHR(vk_inst, surface, pAllocator);
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceSupportKHR(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, VkSurfaceKHR surface, VkBool32* pSupported)
+{
+    wrapped_phys_dev_obj *phys_dev;
+    auto vk_phys_dev = unwrap_phys_dev(physicalDevice, &phys_dev);
+    VkResult result = phys_dev->inst->layer_disp.GetPhysicalDeviceSurfaceSupportKHR(vk_phys_dev, queueFamilyIndex, surface, pSupported);
+    return result;
+}
+
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceCapabilitiesKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, VkSurfaceCapabilitiesKHR* pSurfaceCapabilities)
+{
+    wrapped_phys_dev_obj *phys_dev;
+    auto vk_phys_dev = unwrap_phys_dev(physicalDevice, &phys_dev);
+    VkResult result = phys_dev->inst->layer_disp.GetPhysicalDeviceSurfaceCapabilitiesKHR(vk_phys_dev, surface, pSurfaceCapabilities);
+    return result;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceFormatsKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pSurfaceFormatCount, VkSurfaceFormatKHR* pSurfaceFormats)
+{
+    wrapped_phys_dev_obj *phys_dev;
+    auto vk_phys_dev = unwrap_phys_dev(physicalDevice, &phys_dev);
+    VkResult result = phys_dev->inst->layer_disp.GetPhysicalDeviceSurfaceFormatsKHR(vk_phys_dev, surface, pSurfaceFormatCount, pSurfaceFormats);
+    return result;
+}
+
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfacePresentModesKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pPresentModeCount, VkPresentModeKHR* pPresentModes)
+{
+    wrapped_phys_dev_obj *phys_dev;
+    auto vk_phys_dev = unwrap_phys_dev(physicalDevice, &phys_dev);
+    VkResult result = phys_dev->inst->layer_disp.GetPhysicalDeviceSurfacePresentModesKHR(vk_phys_dev, surface, pPresentModeCount, pPresentModes);
+    return result;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateSwapchainKHR(VkDevice device, const VkSwapchainCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSwapchainKHR* pSwapchain)
+{
+    VkResult result = device_dispatch_table(device)->CreateSwapchainKHR(device, pCreateInfo, pAllocator, pSwapchain);
+    return result;
+}
+
+VKAPI_ATTR void VKAPI_CALL vkDestroySwapchainKHR(VkDevice device, VkSwapchainKHR swapchain, const VkAllocationCallbacks* pAllocator)
+{
+    device_dispatch_table(device)->DestroySwapchainKHR(device, swapchain, pAllocator);
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetSwapchainImagesKHR(VkDevice device, VkSwapchainKHR swapchain, uint32_t* pSwapchainImageCount, VkImage* pSwapchainImages)
+{
+    VkResult result = device_dispatch_table(device)->GetSwapchainImagesKHR(device, swapchain, pSwapchainImageCount, pSwapchainImages);
+    return result;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL vkAcquireNextImageKHR(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout, VkSemaphore semaphore, VkFence fence, uint32_t* pImageIndex)
+{
+    VkResult result = device_dispatch_table(device)->AcquireNextImageKHR(device, swapchain, timeout, semaphore, fence, pImageIndex);
+    return result;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL vkQueuePresentKHR(VkQueue queue, const VkPresentInfoKHR* pPresentInfo)
+{
+    VkResult result = device_dispatch_table(queue)->QueuePresentKHR(queue, pPresentInfo);
+    return result;
+}
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+VKAPI_ATTR VkResult VKAPI_CALL
+vkCreateWin32SurfaceKHR(VkInstance instance, const VkWin32SurfaceCreateInfoKHR *pCreateInfo,
+                        const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface) {
+    VkResult result;
+    wrapped_inst_obj *inst;
+    auto vk_inst = unwrap_instance(instance, &inst);
+    result = inst->layer_disp.CreateWin32SurfaceKHR(vk_inst, pCreateInfo, pAllocator, pSurface);
+    return result;
+}
+
+VKAPI_ATTR VkBool32 VKAPI_CALL
+vkGetPhysicalDeviceWin32PresentationSupportKHR(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex) {
+    VkBool32 result;
+    wrapped_phys_dev_obj *phys_dev;
+    auto vk_phys_dev = unwrap_phys_dev(physicalDevice, &phys_dev);
+    result = phys_dev->inst->layer_disp.GetPhysicalDeviceWin32PresentationSupportKHR(vk_phys_dev, queueFamilyIndex);
+    return result;
+}
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+#ifdef VK_USE_PLATFORM_XCB_KHR
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateXcbSurfaceKHR(VkInstance instance, const VkXcbSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface)
+{
+    wrapped_inst_obj *inst;
+    auto vk_inst = unwrap_instance(instance, &inst);
+    VkResult result = inst->layer_disp.CreateXcbSurfaceKHR(vk_inst, pCreateInfo, pAllocator, pSurface);
+    return result;
+}
+
+VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceXcbPresentationSupportKHR(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, xcb_connection_t* connection, xcb_visualid_t visual_id)
+{
+    wrapped_phys_dev_obj *phys_dev;
+    auto vk_phys_dev = unwrap_phys_dev(physicalDevice, &phys_dev);
+    VkBool32 result = phys_dev->inst->layer_disp.GetPhysicalDeviceXcbPresentationSupportKHR(vk_phys_dev, queueFamilyIndex, connection, visual_id);
+    return result;
+}
+#endif  // VK_USE_PLATFORM_XCB_KHR
+
+
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateXlibSurfaceKHR(VkInstance instance, const VkXlibSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface)
+{
+    wrapped_inst_obj *inst;
+    auto vk_inst = unwrap_instance(instance, &inst);
+    VkResult result = inst->layer_disp.CreateXlibSurfaceKHR(vk_inst, pCreateInfo, pAllocator, pSurface);
+    return result;
+}
+
+VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceXlibPresentationSupportKHR(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, Display* dpy, VisualID visualID)
+{
+    wrapped_phys_dev_obj *phys_dev;
+    auto vk_phys_dev = unwrap_phys_dev(physicalDevice, &phys_dev);
+    VkBool32 result = phys_dev->inst->layer_disp.GetPhysicalDeviceXlibPresentationSupportKHR(vk_phys_dev, queueFamilyIndex, dpy, visualID);
+    return result;
+}
+#endif  // VK_USE_PLATFORM_XLIB_KHR
+
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateWaylandSurfaceKHR(VkInstance instance, const VkWaylandSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface)
+{
+    wrapped_inst_obj *inst;
+    auto vk_inst = unwrap_instance(instance, &inst);
+    VkResult result = inst->layer_disp.CreateWaylandSurfaceKHR(vk_inst, pCreateInfo, pAllocator, pSurface);
+    return result;
+}
+
+VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceWaylandPresentationSupportKHR(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, struct wl_display* display)
+{
+    wrapped_phys_dev_obj *phys_dev;
+    auto vk_phys_dev = unwrap_phys_dev(physicalDevice, &phys_dev);
+    VkBool32 result = phys_dev->inst->layer_disp.GetPhysicalDeviceWaylandPresentationSupportKHR(vk_phys_dev, queueFamilyIndex, display);
+    return result;
+}
+#endif  // VK_USE_PLATFORM_WAYLAND_KHR
+
+VKAPI_ATTR VkResult VKAPI_CALL
+vkCreateDebugReportCallbackEXT(VkInstance instance, const VkDebugReportCallbackCreateInfoEXT *pCreateInfo,
+                             const VkAllocationCallbacks *pAllocator, VkDebugReportCallbackEXT *pMsgCallback) {
+    wrapped_inst_obj *inst;
+    auto vk_inst = unwrap_instance(instance, &inst);
+
+    VkResult res = inst->layer_disp.CreateDebugReportCallbackEXT(vk_inst, pCreateInfo, pAllocator, pMsgCallback);
+    return res;
+}
+
+VKAPI_ATTR void VKAPI_CALL
+vkDestroyDebugReportCallbackEXT(VkInstance instance,
+                              VkDebugReportCallbackEXT msgCallback,
+                              const VkAllocationCallbacks *pAllocator) {
+    wrapped_inst_obj *inst;
+    auto vk_inst = unwrap_instance(instance, &inst);
+    inst->layer_disp.DestroyDebugReportCallbackEXT(vk_inst, msgCallback, pAllocator);
+}
+
+VKAPI_ATTR void VKAPI_CALL
+vkDebugReportMessageEXT(VkInstance instance, VkDebugReportFlagsEXT flags, VkDebugReportObjectTypeEXT objType, uint64_t object,
+                      size_t location, int32_t msgCode, const char *pLayerPrefix, const char *pMsg) {
+    wrapped_inst_obj *inst;
+    auto vk_inst = unwrap_instance(instance, &inst);
+    inst->layer_disp.DebugReportMessageEXT(vk_inst, flags, objType, object, location, msgCode, pLayerPrefix,
+                                                            pMsg);
+}
+
+static inline PFN_vkVoidFunction layer_intercept_proc(const char *name)
+{
+    if (!name || name[0] != 'v' || name[1] != 'k')
+        return NULL;
+
+    name += 2;
+    if (!strcmp(name, "CreateInstance"))
+        return (PFN_vkVoidFunction) vkCreateInstance;
+    if (!strcmp(name, "DestroyInstance"))
+        return (PFN_vkVoidFunction) vkDestroyInstance;
+    if (!strcmp(name, "EnumeratePhysicalDevices"))
+        return (PFN_vkVoidFunction) vkEnumeratePhysicalDevices;
+    if (!strcmp(name, "GetPhysicalDeviceFeatures"))
+        return (PFN_vkVoidFunction) vkGetPhysicalDeviceFeatures;
+    if (!strcmp(name, "GetPhysicalDeviceFormatProperties"))
+        return (PFN_vkVoidFunction) vkGetPhysicalDeviceFormatProperties;
+    if (!strcmp(name, "GetPhysicalDeviceImageFormatProperties"))
+        return (PFN_vkVoidFunction) vkGetPhysicalDeviceImageFormatProperties;
+    if (!strcmp(name, "GetPhysicalDeviceProperties"))
+        return (PFN_vkVoidFunction) vkGetPhysicalDeviceProperties;
+    if (!strcmp(name, "GetPhysicalDeviceQueueFamilyProperties"))
+        return (PFN_vkVoidFunction) vkGetPhysicalDeviceQueueFamilyProperties;
+    if (!strcmp(name, "GetPhysicalDeviceMemoryProperties"))
+        return (PFN_vkVoidFunction) vkGetPhysicalDeviceMemoryProperties;
+    if (!strcmp(name, "CreateDevice"))
+        return (PFN_vkVoidFunction) vkCreateDevice;
+    if (!strcmp(name, "DestroyDevice"))
+        return (PFN_vkVoidFunction) vkDestroyDevice;
+    if (!strcmp(name, "GetDeviceQueue"))
+        return (PFN_vkVoidFunction) vkGetDeviceQueue;
+    if (!strcmp(name, "QueueSubmit"))
+        return (PFN_vkVoidFunction) vkQueueSubmit;
+    if (!strcmp(name, "QueueWaitIdle"))
+        return (PFN_vkVoidFunction) vkQueueWaitIdle;
+    if (!strcmp(name, "DeviceWaitIdle"))
+        return (PFN_vkVoidFunction) vkDeviceWaitIdle;
+    if (!strcmp(name, "AllocateMemory"))
+        return (PFN_vkVoidFunction) vkAllocateMemory;
+    if (!strcmp(name, "FreeMemory"))
+        return (PFN_vkVoidFunction) vkFreeMemory;
+    if (!strcmp(name, "MapMemory"))
+        return (PFN_vkVoidFunction) vkMapMemory;
+    if (!strcmp(name, "UnmapMemory"))
+        return (PFN_vkVoidFunction) vkUnmapMemory;
+    if (!strcmp(name, "FlushMappedMemoryRanges"))
+        return (PFN_vkVoidFunction) vkFlushMappedMemoryRanges;
+    if (!strcmp(name, "InvalidateMappedMemoryRanges"))
+        return (PFN_vkVoidFunction) vkInvalidateMappedMemoryRanges;
+    if (!strcmp(name, "GetDeviceMemoryCommitment"))
+        return (PFN_vkVoidFunction) vkGetDeviceMemoryCommitment;
+    if (!strcmp(name, "BindBufferMemory"))
+        return (PFN_vkVoidFunction) vkBindBufferMemory;
+    if (!strcmp(name, "BindImageMemory"))
+        return (PFN_vkVoidFunction) vkBindImageMemory;
+    if (!strcmp(name, "GetBufferMemoryRequirements"))
+        return (PFN_vkVoidFunction) vkGetBufferMemoryRequirements;
+    if (!strcmp(name, "GetImageMemoryRequirements"))
+        return (PFN_vkVoidFunction) vkGetImageMemoryRequirements;
+    if (!strcmp(name, "GetImageSparseMemoryRequirements"))
+        return (PFN_vkVoidFunction) vkGetImageSparseMemoryRequirements;
+    if (!strcmp(name, "GetPhysicalDeviceSparseImageFormatProperties"))
+        return (PFN_vkVoidFunction) vkGetPhysicalDeviceSparseImageFormatProperties;
+    if (!strcmp(name, "QueueBindSparse"))
+        return (PFN_vkVoidFunction) vkQueueBindSparse;
+    if (!strcmp(name, "CreateFence"))
+        return (PFN_vkVoidFunction) vkCreateFence;
+    if (!strcmp(name, "DestroyFence"))
+        return (PFN_vkVoidFunction) vkDestroyFence;
+    if (!strcmp(name, "ResetFences"))
+        return (PFN_vkVoidFunction) vkResetFences;
+    if (!strcmp(name, "GetFenceStatus"))
+        return (PFN_vkVoidFunction) vkGetFenceStatus;
+    if (!strcmp(name, "WaitForFences"))
+        return (PFN_vkVoidFunction) vkWaitForFences;
+    if (!strcmp(name, "CreateSemaphore"))
+        return (PFN_vkVoidFunction) vkCreateSemaphore;
+    if (!strcmp(name, "DestroySemaphore"))
+        return (PFN_vkVoidFunction) vkDestroySemaphore;
+    if (!strcmp(name, "CreateEvent"))
+        return (PFN_vkVoidFunction) vkCreateEvent;
+    if (!strcmp(name, "DestroyEvent"))
+        return (PFN_vkVoidFunction) vkDestroyEvent;
+    if (!strcmp(name, "GetEventStatus"))
+        return (PFN_vkVoidFunction) vkGetEventStatus;
+    if (!strcmp(name, "SetEvent"))
+        return (PFN_vkVoidFunction) vkSetEvent;
+    if (!strcmp(name, "ResetEvent"))
+        return (PFN_vkVoidFunction) vkResetEvent;
+    if (!strcmp(name, "CreateQueryPool"))
+        return (PFN_vkVoidFunction) vkCreateQueryPool;
+    if (!strcmp(name, "DestroyQueryPool"))
+        return (PFN_vkVoidFunction) vkDestroyQueryPool;
+    if (!strcmp(name, "GetQueryPoolResults"))
+        return (PFN_vkVoidFunction) vkGetQueryPoolResults;
+    if (!strcmp(name, "CreateBuffer"))
+        return (PFN_vkVoidFunction) vkCreateBuffer;
+    if (!strcmp(name, "DestroyBuffer"))
+        return (PFN_vkVoidFunction) vkDestroyBuffer;
+    if (!strcmp(name, "CreateBufferView"))
+        return (PFN_vkVoidFunction) vkCreateBufferView;
+    if (!strcmp(name, "DestroyBufferView"))
+        return (PFN_vkVoidFunction) vkDestroyBufferView;
+    if (!strcmp(name, "CreateImage"))
+        return (PFN_vkVoidFunction) vkCreateImage;
+    if (!strcmp(name, "DestroyImage"))
+        return (PFN_vkVoidFunction) vkDestroyImage;
+    if (!strcmp(name, "GetImageSubresourceLayout"))
+        return (PFN_vkVoidFunction) vkGetImageSubresourceLayout;
+    if (!strcmp(name, "CreateImageView"))
+        return (PFN_vkVoidFunction) vkCreateImageView;
+    if (!strcmp(name, "DestroyImageView"))
+        return (PFN_vkVoidFunction) vkDestroyImageView;
+    if (!strcmp(name, "CreateShaderModule"))
+        return (PFN_vkVoidFunction) vkCreateShaderModule;
+    if (!strcmp(name, "DestroyShaderModule"))
+        return (PFN_vkVoidFunction) vkDestroyShaderModule;
+    if (!strcmp(name, "CreatePipelineCache"))
+        return (PFN_vkVoidFunction) vkCreatePipelineCache;
+    if (!strcmp(name, "DestroyPipelineCache"))
+        return (PFN_vkVoidFunction) vkDestroyPipelineCache;
+    if (!strcmp(name, "GetPipelineCacheData"))
+        return (PFN_vkVoidFunction) vkGetPipelineCacheData;
+    if (!strcmp(name, "MergePipelineCaches"))
+        return (PFN_vkVoidFunction) vkMergePipelineCaches;
+    if (!strcmp(name, "CreateGraphicsPipelines"))
+        return (PFN_vkVoidFunction) vkCreateGraphicsPipelines;
+    if (!strcmp(name, "CreateComputePipelines"))
+        return (PFN_vkVoidFunction) vkCreateComputePipelines;
+    if (!strcmp(name, "DestroyPipeline"))
+        return (PFN_vkVoidFunction) vkDestroyPipeline;
+    if (!strcmp(name, "CreatePipelineLayout"))
+        return (PFN_vkVoidFunction) vkCreatePipelineLayout;
+    if (!strcmp(name, "DestroyPipelineLayout"))
+        return (PFN_vkVoidFunction) vkDestroyPipelineLayout;
+    if (!strcmp(name, "CreateSampler"))
+        return (PFN_vkVoidFunction) vkCreateSampler;
+    if (!strcmp(name, "DestroySampler"))
+        return (PFN_vkVoidFunction) vkDestroySampler;
+    if (!strcmp(name, "CreateDescriptorSetLayout"))
+        return (PFN_vkVoidFunction) vkCreateDescriptorSetLayout;
+    if (!strcmp(name, "DestroyDescriptorSetLayout"))
+        return (PFN_vkVoidFunction) vkDestroyDescriptorSetLayout;
+    if (!strcmp(name, "CreateDescriptorPool"))
+        return (PFN_vkVoidFunction) vkCreateDescriptorPool;
+    if (!strcmp(name, "DestroyDescriptorPool"))
+        return (PFN_vkVoidFunction) vkDestroyDescriptorPool;
+    if (!strcmp(name, "ResetDescriptorPool"))
+        return (PFN_vkVoidFunction) vkResetDescriptorPool;
+    if (!strcmp(name, "AllocateDescriptorSets"))
+        return (PFN_vkVoidFunction) vkAllocateDescriptorSets;
+    if (!strcmp(name, "FreeDescriptorSets"))
+        return (PFN_vkVoidFunction) vkFreeDescriptorSets;
+    if (!strcmp(name, "UpdateDescriptorSets"))
+        return (PFN_vkVoidFunction) vkUpdateDescriptorSets;
+    if (!strcmp(name, "CreateFramebuffer"))
+        return (PFN_vkVoidFunction) vkCreateFramebuffer;
+    if (!strcmp(name, "DestroyFramebuffer"))
+        return (PFN_vkVoidFunction) vkDestroyFramebuffer;
+    if (!strcmp(name, "CreateRenderPass"))
+        return (PFN_vkVoidFunction) vkCreateRenderPass;
+    if (!strcmp(name, "DestroyRenderPass"))
+        return (PFN_vkVoidFunction) vkDestroyRenderPass;
+    if (!strcmp(name, "GetRenderAreaGranularity"))
+        return (PFN_vkVoidFunction) vkGetRenderAreaGranularity;
+    if (!strcmp(name, "CreateCommandPool"))
+        return (PFN_vkVoidFunction) vkCreateCommandPool;
+    if (!strcmp(name, "DestroyCommandPool"))
+        return (PFN_vkVoidFunction) vkDestroyCommandPool;
+    if (!strcmp(name, "ResetCommandPool"))
+        return (PFN_vkVoidFunction) vkResetCommandPool;
+    if (!strcmp(name, "AllocateCommandBuffers"))
+        return (PFN_vkVoidFunction) vkAllocateCommandBuffers;
+    if (!strcmp(name, "FreeCommandBuffers"))
+        return (PFN_vkVoidFunction) vkFreeCommandBuffers;
+    if (!strcmp(name, "BeginCommandBuffer"))
+        return (PFN_vkVoidFunction) vkBeginCommandBuffer;
+    if (!strcmp(name, "EndCommandBuffer"))
+        return (PFN_vkVoidFunction) vkEndCommandBuffer;
+    if (!strcmp(name, "ResetCommandBuffer"))
+        return (PFN_vkVoidFunction) vkResetCommandBuffer;
+    if (!strcmp(name, "CmdBindPipeline"))
+        return (PFN_vkVoidFunction) vkCmdBindPipeline;
+    if (!strcmp(name, "CmdSetViewport"))
+        return (PFN_vkVoidFunction) vkCmdSetViewport;
+    if (!strcmp(name, "CmdSetScissor"))
+        return (PFN_vkVoidFunction) vkCmdSetScissor;
+    if (!strcmp(name, "CmdSetLineWidth"))
+        return (PFN_vkVoidFunction) vkCmdSetLineWidth;
+    if (!strcmp(name, "CmdSetDepthBias"))
+        return (PFN_vkVoidFunction) vkCmdSetDepthBias;
+    if (!strcmp(name, "CmdSetBlendConstants"))
+        return (PFN_vkVoidFunction) vkCmdSetBlendConstants;
+    if (!strcmp(name, "CmdSetDepthBounds"))
+        return (PFN_vkVoidFunction) vkCmdSetDepthBounds;
+    if (!strcmp(name, "CmdSetStencilCompareMask"))
+        return (PFN_vkVoidFunction) vkCmdSetStencilCompareMask;
+    if (!strcmp(name, "CmdSetStencilWriteMask"))
+        return (PFN_vkVoidFunction) vkCmdSetStencilWriteMask;
+    if (!strcmp(name, "CmdSetStencilReference"))
+        return (PFN_vkVoidFunction) vkCmdSetStencilReference;
+    if (!strcmp(name, "CmdBindDescriptorSets"))
+        return (PFN_vkVoidFunction) vkCmdBindDescriptorSets;
+    if (!strcmp(name, "CmdBindIndexBuffer"))
+        return (PFN_vkVoidFunction) vkCmdBindIndexBuffer;
+    if (!strcmp(name, "CmdBindVertexBuffers"))
+        return (PFN_vkVoidFunction) vkCmdBindVertexBuffers;
+    if (!strcmp(name, "CmdDraw"))
+        return (PFN_vkVoidFunction) vkCmdDraw;
+    if (!strcmp(name, "CmdDrawIndexed"))
+        return (PFN_vkVoidFunction) vkCmdDrawIndexed;
+    if (!strcmp(name, "CmdDrawIndirect"))
+        return (PFN_vkVoidFunction) vkCmdDrawIndirect;
+    if (!strcmp(name, "CmdDrawIndexedIndirect"))
+        return (PFN_vkVoidFunction) vkCmdDrawIndexedIndirect;
+    if (!strcmp(name, "CmdDispatch"))
+        return (PFN_vkVoidFunction) vkCmdDispatch;
+    if (!strcmp(name, "CmdDispatchIndirect"))
+        return (PFN_vkVoidFunction) vkCmdDispatchIndirect;
+    if (!strcmp(name, "CmdCopyBuffer"))
+        return (PFN_vkVoidFunction) vkCmdCopyBuffer;
+    if (!strcmp(name, "CmdCopyImage"))
+        return (PFN_vkVoidFunction) vkCmdCopyImage;
+    if (!strcmp(name, "CmdBlitImage"))
+        return (PFN_vkVoidFunction) vkCmdBlitImage;
+    if (!strcmp(name, "CmdCopyBufferToImage"))
+        return (PFN_vkVoidFunction) vkCmdCopyBufferToImage;
+    if (!strcmp(name, "CmdCopyImageToBuffer"))
+        return (PFN_vkVoidFunction) vkCmdCopyImageToBuffer;
+    if (!strcmp(name, "CmdUpdateBuffer"))
+        return (PFN_vkVoidFunction) vkCmdUpdateBuffer;
+    if (!strcmp(name, "CmdFillBuffer"))
+        return (PFN_vkVoidFunction) vkCmdFillBuffer;
+    if (!strcmp(name, "CmdClearColorImage"))
+        return (PFN_vkVoidFunction) vkCmdClearColorImage;
+    if (!strcmp(name, "CmdClearDepthStencilImage"))
+        return (PFN_vkVoidFunction) vkCmdClearDepthStencilImage;
+    if (!strcmp(name, "CmdClearAttachments"))
+        return (PFN_vkVoidFunction) vkCmdClearAttachments;
+    if (!strcmp(name, "CmdResolveImage"))
+        return (PFN_vkVoidFunction) vkCmdResolveImage;
+    if (!strcmp(name, "CmdSetEvent"))
+        return (PFN_vkVoidFunction) vkCmdSetEvent;
+    if (!strcmp(name, "CmdResetEvent"))
+        return (PFN_vkVoidFunction) vkCmdResetEvent;
+    if (!strcmp(name, "CmdWaitEvents"))
+        return (PFN_vkVoidFunction) vkCmdWaitEvents;
+    if (!strcmp(name, "CmdPipelineBarrier"))
+        return (PFN_vkVoidFunction) vkCmdPipelineBarrier;
+    if (!strcmp(name, "CmdBeginQuery"))
+        return (PFN_vkVoidFunction) vkCmdBeginQuery;
+    if (!strcmp(name, "CmdEndQuery"))
+        return (PFN_vkVoidFunction) vkCmdEndQuery;
+    if (!strcmp(name, "CmdResetQueryPool"))
+        return (PFN_vkVoidFunction) vkCmdResetQueryPool;
+    if (!strcmp(name, "CmdWriteTimestamp"))
+        return (PFN_vkVoidFunction) vkCmdWriteTimestamp;
+    if (!strcmp(name, "CmdCopyQueryPoolResults"))
+        return (PFN_vkVoidFunction) vkCmdCopyQueryPoolResults;
+    if (!strcmp(name, "CmdPushConstants"))
+        return (PFN_vkVoidFunction) vkCmdPushConstants;
+    if (!strcmp(name, "CmdBeginRenderPass"))
+        return (PFN_vkVoidFunction) vkCmdBeginRenderPass;
+    if (!strcmp(name, "CmdNextSubpass"))
+        return (PFN_vkVoidFunction) vkCmdNextSubpass;
+    if (!strcmp(name, "CmdEndRenderPass"))
+        return (PFN_vkVoidFunction) vkCmdEndRenderPass;
+    if (!strcmp(name, "CmdExecuteCommands"))
+        return (PFN_vkVoidFunction) vkCmdExecuteCommands;
+
+    return NULL;
+}
+
+static inline PFN_vkVoidFunction layer_intercept_instance_proc(const char *name)
+{
+    if (!name || name[0] != 'v' || name[1] != 'k')
+        return NULL;
+
+    name += 2;
+    if (!strcmp(name, "GetInstanceProcAddr"))
+        return (PFN_vkVoidFunction)vkGetInstanceProcAddr;
+    if (!strcmp(name, "DestroyInstance"))
+        return (PFN_vkVoidFunction) vkDestroyInstance;
+    if (!strcmp(name, "EnumeratePhysicalDevices"))
+        return (PFN_vkVoidFunction) vkEnumeratePhysicalDevices;
+    if (!strcmp(name, "GetPhysicalDeviceFeatures"))
+        return (PFN_vkVoidFunction) vkGetPhysicalDeviceFeatures;
+    if (!strcmp(name, "GetPhysicalDeviceFormatProperties"))
+        return (PFN_vkVoidFunction) vkGetPhysicalDeviceFormatProperties;
+    if (!strcmp(name, "GetPhysicalDeviceImageFormatProperties"))
+        return (PFN_vkVoidFunction) vkGetPhysicalDeviceImageFormatProperties;
+    if (!strcmp(name, "GetPhysicalDeviceProperties"))
+        return (PFN_vkVoidFunction) vkGetPhysicalDeviceProperties;
+    if (!strcmp(name, "GetPhysicalDeviceQueueFamilyProperties"))
+        return (PFN_vkVoidFunction) vkGetPhysicalDeviceQueueFamilyProperties;
+    if (!strcmp(name, "GetPhysicalDeviceMemoryProperties"))
+        return (PFN_vkVoidFunction) vkGetPhysicalDeviceMemoryProperties;
+    if (!strcmp(name, "GetPhysicalDeviceSparseImageFormatProperties"))
+        return (PFN_vkVoidFunction) vkGetPhysicalDeviceSparseImageFormatProperties;
+    if (!strcmp(name, "EnumerateDeviceExtensionProperties"))
+        return (PFN_vkVoidFunction)vkEnumerateDeviceExtensionProperties;
+    return NULL;
+}
+
+VK_LAYER_EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vkGetDeviceProcAddr(VkDevice device, const char* funcName)
+{
+    PFN_vkVoidFunction addr;
+
+
+    if (!strcmp("vkGetDeviceProcAddr", funcName)) {
+        return (PFN_vkVoidFunction) vkGetDeviceProcAddr;
+    }
+
+    addr = layer_intercept_proc(funcName);
+    if (addr)
+        return addr;
+    if (device == VK_NULL_HANDLE) {
+        return NULL;
+    }
+
+    if (!strcmp("vkCreateSwapchainKHR", funcName))
+        return reinterpret_cast<PFN_vkVoidFunction>(vkCreateSwapchainKHR);
+    if (!strcmp("vkDestroySwapchainKHR", funcName))
+        return reinterpret_cast<PFN_vkVoidFunction>(vkDestroySwapchainKHR);
+    if (!strcmp("vkGetSwapchainImagesKHR", funcName))
+        return reinterpret_cast<PFN_vkVoidFunction>(vkGetSwapchainImagesKHR);
+    if (!strcmp("vkAcquireNextImageKHR", funcName))
+        return reinterpret_cast<PFN_vkVoidFunction>(vkAcquireNextImageKHR);
+    if (!strcmp("vkQueuePresentKHR", funcName))
+        return reinterpret_cast<PFN_vkVoidFunction>(vkQueuePresentKHR);
+
+    VkLayerDispatchTable *pDisp =  device_dispatch_table(device);
+    if (pDisp->GetDeviceProcAddr == NULL)
+    {
+        return NULL;
+    }
+
+    return pDisp->GetDeviceProcAddr(device, funcName);
+}
+
+VK_LAYER_EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vkGetInstanceProcAddr(VkInstance instance, const char* funcName)
+{
+    PFN_vkVoidFunction addr;
+
+    if (!strcmp(funcName, "vkCreateInstance"))
+        return (PFN_vkVoidFunction) vkCreateInstance;
+    if (!strcmp(funcName, "vkCreateDevice"))
+        return (PFN_vkVoidFunction) vkCreateDevice;
+
+    if (instance == VK_NULL_HANDLE) {
+        return NULL;
+    }
+
+    addr = layer_intercept_instance_proc(funcName);
+    if (addr)
+        return addr;
+
+    wrapped_inst_obj *inst;
+    (void)unwrap_instance(instance, &inst);
+    VkLayerInstanceDispatchTable* pTable = &inst->layer_disp;
+
+    // EXT_debug_report
+    if (!strcmp(funcName, "vkCreateDebugReportCallbackEXT"))
+        return (PFN_vkVoidFunction)vkCreateDebugReportCallbackEXT;
+    if (!strcmp(funcName, "vkDestroyDebugReportCallbackEXT"))
+        return (PFN_vkVoidFunction)vkDestroyDebugReportCallbackEXT;
+    if (!strcmp(funcName, "vkDebugReportMessageEXT"))
+        return (PFN_vkVoidFunction)vkDebugReportMessageEXT;
+
+    //KHR_surface
+    if (!strcmp("vkDestroySurfaceKHR", funcName))
+        return reinterpret_cast<PFN_vkVoidFunction>(vkDestroySurfaceKHR);
+    if (!strcmp("vkGetPhysicalDeviceSurfaceSupportKHR", funcName))
+        return reinterpret_cast<PFN_vkVoidFunction>(vkGetPhysicalDeviceSurfaceSupportKHR);
+    if (!strcmp("vkGetPhysicalDeviceSurfaceCapabilitiesKHR", funcName))
+        return reinterpret_cast<PFN_vkVoidFunction>(vkGetPhysicalDeviceSurfaceCapabilitiesKHR);
+    if (!strcmp("vkGetPhysicalDeviceSurfaceFormatsKHR", funcName))
+        return reinterpret_cast<PFN_vkVoidFunction>(vkGetPhysicalDeviceSurfaceFormatsKHR);
+    if (!strcmp("vkGetPhysicalDeviceSurfacePresentModesKHR", funcName))
+        return reinterpret_cast<PFN_vkVoidFunction>(vkGetPhysicalDeviceSurfacePresentModesKHR);
+
+    // KHR_XXX_surface
+#ifdef VK_USE_PLATFORM_XCB_KHR
+    if (!strcmp("vkCreateXcbSurfaceKHR", funcName))
+        return reinterpret_cast<PFN_vkVoidFunction>(vkCreateXcbSurfaceKHR);
+    if (!strcmp("vkGetPhysicalDeviceXcbPresentationSupportKHR", funcName))
+        return reinterpret_cast<PFN_vkVoidFunction>(vkGetPhysicalDeviceXcbPresentationSupportKHR);
+#endif // VK_USE_PLATFORM_XCB_KHR
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+    if (!strcmp("vkCreateXlibSurfaceKHR", funcName))
+        return reinterpret_cast<PFN_vkVoidFunction>(vkCreateXlibSurfaceKHR);
+    if (!strcmp("vkGetPhysicalDeviceXlibPresentationSupportKHR", funcName))
+        return reinterpret_cast<PFN_vkVoidFunction>(vkGetPhysicalDeviceXlibPresentationSupportKHR);
+#endif // VK_USE_PLATFORM_XLIB_KHR
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+    if (!strcmp("vkCreateWaylandSurfaceKHR", funcName))
+        return reinterpret_cast<PFN_vkVoidFunction>(vkCreateWaylandSurfaceKHR);
+    if (!strcmp("vkGetPhysicalDeviceWaylandPresentationSupportKHR", funcName))
+        return reinterpret_cast<PFN_vkVoidFunction>(vkGetPhysicalDeviceWaylandPresentationSupportKHR);
+#endif // VK_USE_PLATFORM_WAYLAND_KHR
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    if (!strcmp("vkCreateWin32SurfaceKHR", funcName))
+        return reinterpret_cast<PFN_vkVoidFunction>(vkCreateWin32SurfaceKHR);
+    if (!strcmp("vkGetPhysicalDeviceWin32PresentationSupportKHR", funcName))
+        return reinterpret_cast<PFN_vkVoidFunction>(vkGetPhysicalDeviceWin32PresentationSupportKHR);
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+    if (pTable->GetInstanceProcAddr == NULL)
+        return NULL;
+    return pTable->GetInstanceProcAddr(instance, funcName);
+}
+
+VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL GetPhysicalDeviceProcAddr(VkInstance instance, const char *funcName) {
+    assert(instance);
+
+    wrapped_inst_obj *inst;
+    (void)unwrap_instance(instance, &inst);
+    VkLayerInstanceDispatchTable* pTable = &inst->layer_disp;
+
+    if (pTable->GetPhysicalDeviceProcAddr == NULL)
+        return NULL;
+    return pTable->GetPhysicalDeviceProcAddr(instance, funcName);
+}
+
+} // namespace wrap_objects
+
+// loader-layer interface v0, just wrappers since there is only a layer
+VK_LAYER_EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vkGetInstanceProcAddr(VkInstance instance, const char* funcName) {
+    return wrap_objects::vkGetInstanceProcAddr(instance, funcName);
+}
+
+VK_LAYER_EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vkGetDeviceProcAddr(VkDevice device, const char* funcName) {
+    return wrap_objects::vkGetDeviceProcAddr(device, funcName);
+}
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+vkEnumerateInstanceExtensionProperties(const char *pLayerName, uint32_t *pCount, VkExtensionProperties *pProperties) {
+    assert(0); // TODO return wrap_objects::EnumerateInstanceExtensionProperties(pLayerName, pCount, pProperties);
+    return VK_SUCCESS;
+}
+
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+vkEnumerateInstanceLayerProperties(uint32_t *pCount, VkLayerProperties *pProperties) {
+    assert(0); // TODO return wrap_objects::EnumerateInstanceLayerProperties(pCount, pProperties);
+    return VK_SUCCESS;
+}
+
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+vkEnumerateDeviceLayerProperties(VkPhysicalDevice physicalDevice, uint32_t *pCount, VkLayerProperties *pProperties) {
+    // the layer command handles VK_NULL_HANDLE just fine internally
+    assert(physicalDevice == VK_NULL_HANDLE);
+    assert(0); // TODO return wrap_objects::EnumerateDeviceLayerProperties(VK_NULL_HANDLE, pCount, pProperties);
+    return VK_SUCCESS;
+}
+
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateDeviceExtensionProperties(VkPhysicalDevice physicalDevice,
+    const char *pLayerName, uint32_t *pCount,
+    VkExtensionProperties *pProperties) {
+    // the layer command handles VK_NULL_HANDLE just fine internally
+    assert(physicalDevice == VK_NULL_HANDLE);
+    return wrap_objects::vkEnumerateDeviceExtensionProperties(VK_NULL_HANDLE, pLayerName, pCount, pProperties);
+}
+
+VK_LAYER_EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vk_layerGetPhysicalDeviceProcAddr(VkInstance instance, const char *funcName) {
+    return wrap_objects::GetPhysicalDeviceProcAddr(instance, funcName);
+}
+
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkNegotiateLoaderLayerInterfaceVersion(VkNegotiateLayerInterface *pVersionStruct) {
+    assert(pVersionStruct != NULL);
+    assert(pVersionStruct->sType == LAYER_NEGOTIATE_INTERFACE_STRUCT);
+
+    // Fill in the function pointers if our version is at least capable of having the structure contain them.
+    if (pVersionStruct->loaderLayerInterfaceVersion >= 2) {
+        pVersionStruct->pfnGetInstanceProcAddr = vkGetInstanceProcAddr;
+        pVersionStruct->pfnGetDeviceProcAddr = vkGetDeviceProcAddr;
+        pVersionStruct->pfnGetPhysicalDeviceProcAddr = vk_layerGetPhysicalDeviceProcAddr;
+    }
+
+    if (pVersionStruct->loaderLayerInterfaceVersion < CURRENT_LOADER_LAYER_INTERFACE_VERSION) {
+        wrap_objects::loader_layer_if_version = pVersionStruct->loaderLayerInterfaceVersion;
+    } else if (pVersionStruct->loaderLayerInterfaceVersion > CURRENT_LOADER_LAYER_INTERFACE_VERSION) {
+        pVersionStruct->loaderLayerInterfaceVersion = CURRENT_LOADER_LAYER_INTERFACE_VERSION;
+    }
+
+    return VK_SUCCESS;
+}
diff --git a/src/third_party/vulkan-loader/src/tests/layers/wrap_objects.h b/src/third_party/vulkan-loader/src/tests/layers/wrap_objects.h
new file mode 100644
index 0000000..9e42226
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/tests/layers/wrap_objects.h
@@ -0,0 +1,65 @@
+/*
+ *
+ * Copyright (C) 2015-2016 Valve Corporation
+ * Copyright (C) 2015-2016 LunarG, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Jon Ashburn <jon@lunarg.com>
+ *
+ */
+
+#pragma once
+#include <unordered_map>
+#include "vulkan/vk_layer.h"
+
+struct wrapped_phys_dev_obj {
+    VkLayerInstanceDispatchTable *loader_disp;
+    struct wrapped_inst_obj *inst;  // parent instance object
+    void *obj;
+};
+
+struct wrapped_inst_obj {
+    VkLayerInstanceDispatchTable *loader_disp;
+    VkLayerInstanceDispatchTable layer_disp;    //this layer's dispatch table
+    PFN_vkSetInstanceLoaderData pfn_inst_init;
+    struct wrapped_phys_dev_obj *ptr_phys_devs; // any enumerated phys devs
+    VkInstance obj;
+};
+
+struct wrapped_dev_obj {
+    VkLayerDispatchTable *disp;
+    VkLayerInstanceDispatchTable *layer_disp;  // TODO use this
+    PFN_vkSetDeviceLoaderData pfn_dev_init;  //TODO use this
+    void *obj;
+};
+
+static inline VkInstance unwrap_instance(const VkInstance instance,  wrapped_inst_obj **inst) {
+   *inst = reinterpret_cast<wrapped_inst_obj *> (instance);
+   return (*inst)->obj;
+}
+
+static inline VkPhysicalDevice unwrap_phys_dev(const VkPhysicalDevice physical_device,  wrapped_phys_dev_obj **phys_dev) {
+   *phys_dev = reinterpret_cast<wrapped_phys_dev_obj *> (physical_device);
+   return reinterpret_cast <VkPhysicalDevice> ((*phys_dev)->obj);
+}
+
+static void create_device_register_extensions(const VkDeviceCreateInfo *pCreateInfo, VkDevice device) {
+    VkLayerDispatchTable *pDisp = device_dispatch_table(device);
+    PFN_vkGetDeviceProcAddr gpa = pDisp->GetDeviceProcAddr;
+    pDisp->CreateSwapchainKHR = (PFN_vkCreateSwapchainKHR)gpa(device, "vkCreateSwapchainKHR");
+    pDisp->DestroySwapchainKHR = (PFN_vkDestroySwapchainKHR)gpa(device, "vkDestroySwapchainKHR");
+    pDisp->GetSwapchainImagesKHR = (PFN_vkGetSwapchainImagesKHR)gpa(device, "vkGetSwapchainImagesKHR");
+    pDisp->AcquireNextImageKHR = (PFN_vkAcquireNextImageKHR)gpa(device, "vkAcquireNextImageKHR");
+    pDisp->QueuePresentKHR = (PFN_vkQueuePresentKHR)gpa(device, "vkQueuePresentKHR");
+}
diff --git a/src/third_party/vulkan-loader/src/tests/loader_validation_tests.cpp b/src/third_party/vulkan-loader/src/tests/loader_validation_tests.cpp
new file mode 100644
index 0000000..e54c488
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/tests/loader_validation_tests.cpp
@@ -0,0 +1,1744 @@
+/*
+ * Copyright (c) 2015-2017 The Khronos Group Inc.
+ * Copyright (c) 2015-2017 Valve Corporation
+ * Copyright (c) 2015-2017 LunarG, Inc.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and/or associated documentation files (the "Materials"), to
+ * deal in the Materials without restriction, including without limitation the
+ * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+ * sell copies of the Materials, and to permit persons to whom the Materials are
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included in
+ * all copies or substantial portions of the Materials.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ *
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+ * DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+ * OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE MATERIALS OR THE
+ * USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ * Author: Jeremy Hayes <jeremy@lunarG.com>
+ * Author: Mark Young <marky@lunarG.com>
+ */
+
+// Following items are needed for C++ to work with PRIxLEAST64
+#define __STDC_FORMAT_MACROS
+#include <inttypes.h>
+
+#include <stdint.h>  // For UINT32_MAX
+
+#include <algorithm>
+#include <iostream>
+#include <memory>
+#include <string>
+#include <vector>
+
+#include "test_common.h"
+#include <vulkan/vulkan.h>
+
+namespace VK {
+
+struct InstanceCreateInfo {
+    InstanceCreateInfo()
+        : info  // MSVC can't handle list initialization, thus explicit construction herein.
+          (VkInstanceCreateInfo{
+              VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO,  // sType
+              nullptr,                                 // pNext
+              0,                                       // flags
+              nullptr,                                 // pApplicationInfo
+              0,                                       // enabledLayerCount
+              nullptr,                                 // ppEnabledLayerNames
+              0,                                       // enabledExtensionCount
+              nullptr                                  // ppEnabledExtensionNames
+          }) {}
+
+    InstanceCreateInfo &sType(VkStructureType const &sType) {
+        info.sType = sType;
+
+        return *this;
+    }
+
+    InstanceCreateInfo &pNext(void const *const pNext) {
+        info.pNext = pNext;
+
+        return *this;
+    }
+
+    InstanceCreateInfo &flags(VkInstanceCreateFlags const &flags) {
+        info.flags = flags;
+
+        return *this;
+    }
+
+    InstanceCreateInfo &pApplicationInfo(VkApplicationInfo const *const pApplicationInfo) {
+        info.pApplicationInfo = pApplicationInfo;
+
+        return *this;
+    }
+
+    InstanceCreateInfo &enabledLayerCount(uint32_t const &enabledLayerCount) {
+        info.enabledLayerCount = enabledLayerCount;
+
+        return *this;
+    }
+
+    InstanceCreateInfo &ppEnabledLayerNames(char const *const *const ppEnabledLayerNames) {
+        info.ppEnabledLayerNames = ppEnabledLayerNames;
+
+        return *this;
+    }
+
+    InstanceCreateInfo &enabledExtensionCount(uint32_t const &enabledExtensionCount) {
+        info.enabledExtensionCount = enabledExtensionCount;
+
+        return *this;
+    }
+
+    InstanceCreateInfo &ppEnabledExtensionNames(char const *const *const ppEnabledExtensionNames) {
+        info.ppEnabledExtensionNames = ppEnabledExtensionNames;
+
+        return *this;
+    }
+
+    operator VkInstanceCreateInfo const *() const { return &info; }
+
+    operator VkInstanceCreateInfo *() { return &info; }
+
+    VkInstanceCreateInfo info;
+};
+
+struct DeviceQueueCreateInfo {
+    DeviceQueueCreateInfo()
+        : info  // MSVC can't handle list initialization, thus explicit construction herein.
+          (VkDeviceQueueCreateInfo{
+              VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO,  // sType
+              nullptr,                                     // pNext
+              0,                                           // flags
+              0,                                           // queueFamilyIndex
+              0,                                           // queueCount
+              nullptr                                      // pQueuePriorities
+          }) {}
+
+    DeviceQueueCreateInfo &sType(VkStructureType const &sType) {
+        info.sType = sType;
+
+        return *this;
+    }
+
+    DeviceQueueCreateInfo &pNext(void const *const pNext) {
+        info.pNext = pNext;
+
+        return *this;
+    }
+
+    DeviceQueueCreateInfo &flags(VkDeviceQueueCreateFlags const &flags) {
+        info.flags = flags;
+
+        return *this;
+    }
+
+    DeviceQueueCreateInfo &queueFamilyIndex(uint32_t const &queueFamilyIndex) {
+        info.queueFamilyIndex = queueFamilyIndex;
+
+        return *this;
+    }
+
+    DeviceQueueCreateInfo &queueCount(uint32_t const &queueCount) {
+        info.queueCount = queueCount;
+
+        return *this;
+    }
+
+    DeviceQueueCreateInfo &pQueuePriorities(float const *const pQueuePriorities) {
+        info.pQueuePriorities = pQueuePriorities;
+
+        return *this;
+    }
+
+    operator VkDeviceQueueCreateInfo() { return info; }
+
+    VkDeviceQueueCreateInfo info;
+};
+
+struct DeviceCreateInfo {
+    DeviceCreateInfo()
+        : info  // MSVC can't handle list initialization, thus explicit construction herein.
+          (VkDeviceCreateInfo{
+              VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO,  // sType
+              nullptr,                               // pNext
+              0,                                     // flags
+              0,                                     // queueCreateInfoCount
+              nullptr,                               // pQueueCreateInfos
+              0,                                     // enabledLayerCount
+              nullptr,                               // ppEnabledLayerNames
+              0,                                     // enabledExtensionCount
+              nullptr,                               // ppEnabledExtensionNames
+              nullptr                                // pEnabledFeatures
+          }) {}
+
+    DeviceCreateInfo &sType(VkStructureType const &sType) {
+        info.sType = sType;
+
+        return *this;
+    }
+
+    DeviceCreateInfo &pNext(void const *const pNext) {
+        info.pNext = pNext;
+
+        return *this;
+    }
+
+    DeviceCreateInfo &flags(VkDeviceQueueCreateFlags const &flags) {
+        info.flags = flags;
+
+        return *this;
+    }
+
+    DeviceCreateInfo &queueCreateInfoCount(uint32_t const &queueCreateInfoCount) {
+        info.queueCreateInfoCount = queueCreateInfoCount;
+
+        return *this;
+    }
+
+    DeviceCreateInfo &pQueueCreateInfos(VkDeviceQueueCreateInfo const *const pQueueCreateInfos) {
+        info.pQueueCreateInfos = pQueueCreateInfos;
+
+        return *this;
+    }
+
+    DeviceCreateInfo &enabledLayerCount(uint32_t const &enabledLayerCount) {
+        info.enabledLayerCount = enabledLayerCount;
+
+        return *this;
+    }
+
+    DeviceCreateInfo &ppEnabledLayerNames(char const *const *const ppEnabledLayerNames) {
+        info.ppEnabledLayerNames = ppEnabledLayerNames;
+
+        return *this;
+    }
+
+    DeviceCreateInfo &enabledExtensionCount(uint32_t const &enabledExtensionCount) {
+        info.enabledExtensionCount = enabledExtensionCount;
+
+        return *this;
+    }
+
+    DeviceCreateInfo &ppEnabledExtensionNames(char const *const *const ppEnabledExtensionNames) {
+        info.ppEnabledExtensionNames = ppEnabledExtensionNames;
+
+        return *this;
+    }
+
+    DeviceCreateInfo &pEnabledFeatures(VkPhysicalDeviceFeatures const *const pEnabledFeatures) {
+        info.pEnabledFeatures = pEnabledFeatures;
+
+        return *this;
+    }
+
+    operator VkDeviceCreateInfo const *() const { return &info; }
+
+    operator VkDeviceCreateInfo *() { return &info; }
+
+    VkDeviceCreateInfo info;
+};
+}  // namespace VK
+
+struct CommandLine : public ::testing::Test {
+    static void Initialize(int argc, char **argv) { arguments.assign(argv, argv + argc); };
+
+    static void SetUpTestCase(){};
+    static void TearDownTestCase(){};
+
+    static std::vector<std::string> arguments;
+};
+std::vector<std::string> CommandLine::arguments;
+
+struct EnumerateInstanceLayerProperties : public CommandLine {};
+struct EnumerateInstanceExtensionProperties : public CommandLine {};
+struct ImplicitLayer : public CommandLine {};
+
+// Allocation tracking utilities
+struct AllocTrack {
+    bool active;
+    bool was_allocated;
+    void *aligned_start_addr;
+    char *actual_start_addr;
+    size_t requested_size_bytes;
+    size_t actual_size_bytes;
+    VkSystemAllocationScope alloc_scope;
+    uint64_t user_data;
+
+    AllocTrack()
+        : active(false),
+          was_allocated(false),
+          aligned_start_addr(nullptr),
+          actual_start_addr(nullptr),
+          requested_size_bytes(0),
+          actual_size_bytes(0),
+          alloc_scope(VK_SYSTEM_ALLOCATION_SCOPE_COMMAND),
+          user_data(0) {}
+};
+
+// Global vector to track allocations.  This will be resized before each test and emptied after.
+// However, we have to globally define it so the allocation callback functions work properly.
+std::vector<AllocTrack> g_allocated_vector;
+bool g_intentional_fail_enabled = false;
+uint32_t g_intenional_fail_index = 0;
+uint32_t g_intenional_fail_count = 0;
+
+void FreeAllocTracker() { g_allocated_vector.clear(); }
+
+void InitAllocTracker(size_t size, uint32_t intentional_fail_index = UINT32_MAX) {
+    if (g_allocated_vector.size() > 0) {
+        FreeAllocTracker();
+    }
+    g_allocated_vector.resize(size);
+    if (intentional_fail_index != UINT32_MAX) {
+        g_intentional_fail_enabled = true;
+        g_intenional_fail_index = intentional_fail_index;
+        g_intenional_fail_count = 0;
+    } else {
+        g_intentional_fail_enabled = false;
+        g_intenional_fail_index = 0;
+        g_intenional_fail_count = 0;
+    }
+}
+
+bool IsAllocTrackerEmpty() {
+    bool success = true;
+    bool was_allocated = false;
+    char print_command[1024];
+    sprintf(print_command, "\t%%04d\t%%p (%%p) : 0x%%%s (0x%%%s) : scope %%d : user_data 0x%%%s\n", PRIxLEAST64, PRIxLEAST64,
+            PRIxLEAST64);
+    for (uint32_t iii = 0; iii < g_allocated_vector.size(); iii++) {
+        if (g_allocated_vector[iii].active) {
+            if (success) {
+                printf("ERROR: Allocations still remain!\n");
+            }
+            printf(print_command, iii, g_allocated_vector[iii].aligned_start_addr, g_allocated_vector[iii].actual_start_addr,
+                   g_allocated_vector[iii].requested_size_bytes, g_allocated_vector[iii].actual_size_bytes,
+                   g_allocated_vector[iii].alloc_scope, g_allocated_vector[iii].user_data);
+            success = false;
+        } else if (!was_allocated && g_allocated_vector[iii].was_allocated) {
+            was_allocated = true;
+        }
+    }
+    if (!g_intentional_fail_enabled && !was_allocated) {
+        printf("No allocations ever generated!");
+        success = false;
+    }
+    return success;
+}
+
+VKAPI_ATTR void *VKAPI_CALL AllocCallbackFunc(void *pUserData, size_t size, size_t alignment,
+                                              VkSystemAllocationScope allocationScope) {
+    if (g_intentional_fail_enabled) {
+        if (++g_intenional_fail_count >= g_intenional_fail_index) {
+            return nullptr;
+        }
+    }
+    for (uint32_t iii = 0; iii < g_allocated_vector.size(); iii++) {
+        if (!g_allocated_vector[iii].active) {
+            g_allocated_vector[iii].requested_size_bytes = size;
+            g_allocated_vector[iii].actual_size_bytes = size + (alignment - 1);
+            g_allocated_vector[iii].aligned_start_addr = NULL;
+            g_allocated_vector[iii].actual_start_addr = new char[g_allocated_vector[iii].actual_size_bytes];
+            if (g_allocated_vector[iii].actual_start_addr != NULL) {
+                uint64_t addr = (uint64_t)g_allocated_vector[iii].actual_start_addr;
+                addr += (alignment - 1);
+                addr &= ~(alignment - 1);
+                g_allocated_vector[iii].aligned_start_addr = (void *)addr;
+                g_allocated_vector[iii].alloc_scope = allocationScope;
+                g_allocated_vector[iii].user_data = (uint64_t)pUserData;
+                g_allocated_vector[iii].active = true;
+                g_allocated_vector[iii].was_allocated = true;
+            }
+            return g_allocated_vector[iii].aligned_start_addr;
+        }
+    }
+    return nullptr;
+}
+
+VKAPI_ATTR void VKAPI_CALL FreeCallbackFunc(void *pUserData, void *pMemory) {
+    for (uint32_t iii = 0; iii < g_allocated_vector.size(); iii++) {
+        if (g_allocated_vector[iii].active && g_allocated_vector[iii].aligned_start_addr == pMemory) {
+            delete[] g_allocated_vector[iii].actual_start_addr;
+            g_allocated_vector[iii].active = false;
+            break;
+        }
+    }
+}
+
+VKAPI_ATTR void *VKAPI_CALL ReallocCallbackFunc(void *pUserData, void *pOriginal, size_t size, size_t alignment,
+                                                VkSystemAllocationScope allocationScope) {
+    if (pOriginal != NULL) {
+        for (uint32_t iii = 0; iii < g_allocated_vector.size(); iii++) {
+            if (g_allocated_vector[iii].active && g_allocated_vector[iii].aligned_start_addr == pOriginal) {
+                if (size == 0) {
+                    FreeCallbackFunc(pUserData, pOriginal);
+                    return nullptr;
+                } else if (size < g_allocated_vector[iii].requested_size_bytes) {
+                    return pOriginal;
+                } else {
+                    void *pNew = AllocCallbackFunc(pUserData, size, alignment, allocationScope);
+                    if (pNew != NULL) {
+                        size_t copy_size = size;
+                        if (g_allocated_vector[iii].requested_size_bytes < size) {
+                            copy_size = g_allocated_vector[iii].requested_size_bytes;
+                        }
+                        memcpy(pNew, pOriginal, copy_size);
+                        FreeCallbackFunc(pUserData, pOriginal);
+                    }
+                    return pNew;
+                }
+            }
+        }
+        return nullptr;
+    } else {
+        return AllocCallbackFunc(pUserData, size, alignment, allocationScope);
+    }
+}
+
+void test_create_device(VkPhysicalDevice physical) {
+    uint32_t familyCount = 0;
+    VkResult result;
+    vkGetPhysicalDeviceQueueFamilyProperties(physical, &familyCount, nullptr);
+    ASSERT_GT(familyCount, 0u);
+
+    std::unique_ptr<VkQueueFamilyProperties[]> family(new VkQueueFamilyProperties[familyCount]);
+    vkGetPhysicalDeviceQueueFamilyProperties(physical, &familyCount, family.get());
+    ASSERT_GT(familyCount, 0u);
+
+    for (uint32_t q = 0; q < familyCount; ++q) {
+        if (~family[q].queueFlags & VK_QUEUE_GRAPHICS_BIT) {
+            continue;
+        }
+
+        float const priorities[] = {0.0f};  // Temporary required due to MSVC bug.
+        VkDeviceQueueCreateInfo const queueInfo[1]{
+            VK::DeviceQueueCreateInfo().queueFamilyIndex(q).queueCount(1).pQueuePriorities(priorities)};
+
+        auto const deviceInfo = VK::DeviceCreateInfo().queueCreateInfoCount(1).pQueueCreateInfos(queueInfo);
+
+        VkDevice device;
+        result = vkCreateDevice(physical, deviceInfo, nullptr, &device);
+        ASSERT_EQ(result, VK_SUCCESS);
+
+        vkDestroyDevice(device, nullptr);
+    }
+}
+
+// Test groups:
+// LX = lunar exchange
+// LVLGH = loader and validation github
+// LVLGL = lodaer and validation gitlab
+
+TEST(LX435, InstanceCreateInfoConst) {
+    VkInstanceCreateInfo const info = {VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO, nullptr, 0, nullptr, 0, nullptr, 0, nullptr};
+
+    VkInstance instance = VK_NULL_HANDLE;
+    VkResult result = vkCreateInstance(&info, VK_NULL_HANDLE, &instance);
+    EXPECT_EQ(result, VK_SUCCESS);
+
+    vkDestroyInstance(instance, nullptr);
+}
+
+TEST(LX475, DestroyInstanceNullHandle) { vkDestroyInstance(VK_NULL_HANDLE, nullptr); }
+
+TEST(LX475, DestroyDeviceNullHandle) { vkDestroyDevice(VK_NULL_HANDLE, nullptr); }
+
+TEST(CreateInstance, ExtensionNotPresent) {
+    char const *const names[] = {"NotPresent"};  // Temporary required due to MSVC bug.
+    auto const info = VK::InstanceCreateInfo().enabledExtensionCount(1).ppEnabledExtensionNames(names);
+
+    VkInstance instance = VK_NULL_HANDLE;
+    VkResult result = vkCreateInstance(info, VK_NULL_HANDLE, &instance);
+    ASSERT_EQ(result, VK_ERROR_EXTENSION_NOT_PRESENT);
+
+    // It's not necessary to destroy the instance because it will not be created successfully.
+}
+
+TEST(CreateInstance, LayerNotPresent) {
+    char const *const names[] = {"NotPresent"};  // Temporary required due to MSVC bug.
+    auto const info = VK::InstanceCreateInfo().enabledLayerCount(1).ppEnabledLayerNames(names);
+
+    VkInstance instance = VK_NULL_HANDLE;
+    VkResult result = vkCreateInstance(info, VK_NULL_HANDLE, &instance);
+    ASSERT_EQ(result, VK_ERROR_LAYER_NOT_PRESENT);
+
+    // It's not necessary to destroy the instance because it will not be created successfully.
+}
+
+// Used by run_loader_tests.sh to test for layer insertion.
+TEST(CreateInstance, LayerPresent) {
+    char const *const names1[] = {"VK_LAYER_LUNARG_test"};  // Temporary required due to MSVC bug.
+    char const *const names2[] = {"VK_LAYER_LUNARG_meta"};  // Temporary required due to MSVC bug.
+    char const *const names3[] = {"VK_LAYER_LUNARG_meta_rev"};  // Temporary required due to MSVC bug.
+    auto const info1 = VK::InstanceCreateInfo().enabledLayerCount(1).ppEnabledLayerNames(names1);
+    VkInstance instance = VK_NULL_HANDLE;
+    VkResult result = vkCreateInstance(info1, VK_NULL_HANDLE, &instance);
+    ASSERT_EQ(result, VK_SUCCESS);
+    vkDestroyInstance(instance, nullptr);
+
+    for (auto names : {names2, names3}) {
+        auto const info2 = VK::InstanceCreateInfo().enabledLayerCount(1).ppEnabledLayerNames(names);
+        instance = VK_NULL_HANDLE;
+        result = vkCreateInstance(info2, VK_NULL_HANDLE, &instance);
+        ASSERT_EQ(result, VK_SUCCESS);
+
+        uint32_t deviceCount;
+        vkEnumeratePhysicalDevices(instance, &deviceCount, nullptr);
+        std::vector<VkPhysicalDevice> devs(deviceCount);
+        vkEnumeratePhysicalDevices(instance, &deviceCount, devs.data());
+        test_create_device(devs[0]);
+
+        vkDestroyInstance(instance, nullptr);
+    }
+}
+
+// Used by run_loader_tests.sh to test that calling vkEnumeratePhysicalDevices without first querying
+// the count, works.
+TEST(EnumeratePhysicalDevices, OneCall) {
+    VkInstance instance = VK_NULL_HANDLE;
+    VkResult result = vkCreateInstance(VK::InstanceCreateInfo(), VK_NULL_HANDLE, &instance);
+    ASSERT_EQ(result, VK_SUCCESS);
+
+    uint32_t physicalCount = 500;
+    std::unique_ptr<VkPhysicalDevice[]> physical(new VkPhysicalDevice[physicalCount]);
+    result = vkEnumeratePhysicalDevices(instance, &physicalCount, physical.get());
+    ASSERT_EQ(result, VK_SUCCESS);
+    ASSERT_GT(physicalCount, 0u);
+
+    vkDestroyInstance(instance, nullptr);
+}
+
+// Used by run_loader_tests.sh to test for the expected usage of the vkEnumeratePhysicalDevices call.
+TEST(EnumeratePhysicalDevices, TwoCall) {
+    VkInstance instance = VK_NULL_HANDLE;
+    VkResult result = vkCreateInstance(VK::InstanceCreateInfo(), VK_NULL_HANDLE, &instance);
+    ASSERT_EQ(result, VK_SUCCESS);
+
+    uint32_t physicalCount = 0;
+    result = vkEnumeratePhysicalDevices(instance, &physicalCount, nullptr);
+    ASSERT_EQ(result, VK_SUCCESS);
+    ASSERT_GT(physicalCount, 0u);
+
+    std::unique_ptr<VkPhysicalDevice[]> physical(new VkPhysicalDevice[physicalCount]);
+    result = vkEnumeratePhysicalDevices(instance, &physicalCount, physical.get());
+    ASSERT_EQ(result, VK_SUCCESS);
+    ASSERT_GT(physicalCount, 0u);
+
+    vkDestroyInstance(instance, nullptr);
+}
+
+// Used by run_loader_tests.sh to test that calling vkEnumeratePhysicalDevices without first querying
+// the count, matches the count from the standard call.
+TEST(EnumeratePhysicalDevices, MatchOneAndTwoCallNumbers) {
+    VkInstance instance_one = VK_NULL_HANDLE;
+    VkResult result = vkCreateInstance(VK::InstanceCreateInfo(), VK_NULL_HANDLE, &instance_one);
+    ASSERT_EQ(result, VK_SUCCESS);
+
+    uint32_t physicalCount_one = 500;
+    std::unique_ptr<VkPhysicalDevice[]> physical_one(new VkPhysicalDevice[physicalCount_one]);
+    result = vkEnumeratePhysicalDevices(instance_one, &physicalCount_one, physical_one.get());
+    ASSERT_EQ(result, VK_SUCCESS);
+    ASSERT_GT(physicalCount_one, 0u);
+
+    VkInstance instance_two = VK_NULL_HANDLE;
+    result = vkCreateInstance(VK::InstanceCreateInfo(), VK_NULL_HANDLE, &instance_two);
+    ASSERT_EQ(result, VK_SUCCESS);
+
+    uint32_t physicalCount_two = 0;
+    result = vkEnumeratePhysicalDevices(instance_two, &physicalCount_two, nullptr);
+    ASSERT_EQ(result, VK_SUCCESS);
+    ASSERT_GT(physicalCount_two, 0u);
+
+    std::unique_ptr<VkPhysicalDevice[]> physical_two(new VkPhysicalDevice[physicalCount_two]);
+    result = vkEnumeratePhysicalDevices(instance_two, &physicalCount_two, physical_two.get());
+    ASSERT_EQ(result, VK_SUCCESS);
+    ASSERT_GT(physicalCount_two, 0u);
+
+    ASSERT_EQ(physicalCount_one, physicalCount_two);
+
+    vkDestroyInstance(instance_one, nullptr);
+    vkDestroyInstance(instance_two, nullptr);
+}
+
+// Used by run_loader_tests.sh to test for the expected usage of the vkEnumeratePhysicalDevices
+// call if not enough numbers are provided for the final list.
+TEST(EnumeratePhysicalDevices, TwoCallIncomplete) {
+    VkInstance instance = VK_NULL_HANDLE;
+    VkResult result = vkCreateInstance(VK::InstanceCreateInfo(), VK_NULL_HANDLE, &instance);
+    ASSERT_EQ(result, VK_SUCCESS);
+
+    uint32_t physicalCount = 0;
+    result = vkEnumeratePhysicalDevices(instance, &physicalCount, nullptr);
+    ASSERT_EQ(result, VK_SUCCESS);
+    ASSERT_GT(physicalCount, 0u);
+
+    std::unique_ptr<VkPhysicalDevice[]> physical(new VkPhysicalDevice[physicalCount]);
+
+    // Remove one from the physical device count so we can get the VK_INCOMPLETE message
+    physicalCount -= 1;
+
+    result = vkEnumeratePhysicalDevices(instance, &physicalCount, physical.get());
+    ASSERT_EQ(result, VK_INCOMPLETE);
+
+    vkDestroyInstance(instance, nullptr);
+}
+
+// Test to make sure that layers enabled in the instance show up in the list of device layers.
+TEST(EnumerateDeviceLayers, LayersMatch) {
+    char const *const names1[] = {"VK_LAYER_LUNARG_meta"};
+    char const *const names2[2] = {"VK_LAYER_LUNARG_test", "VK_LAYER_LUNARG_wrap_objects"};
+    auto const info1 = VK::InstanceCreateInfo().enabledLayerCount(1).ppEnabledLayerNames(names1);
+    VkInstance instance = VK_NULL_HANDLE;
+    VkResult result = vkCreateInstance(info1, VK_NULL_HANDLE, &instance);
+    ASSERT_EQ(result, VK_SUCCESS);
+
+    uint32_t physicalCount = 0;
+    result = vkEnumeratePhysicalDevices(instance, &physicalCount, nullptr);
+    ASSERT_EQ(result, VK_SUCCESS);
+    ASSERT_GT(physicalCount, 0u);
+
+    std::unique_ptr<VkPhysicalDevice[]> physical(new VkPhysicalDevice[physicalCount]);
+    result = vkEnumeratePhysicalDevices(instance, &physicalCount, physical.get());
+    ASSERT_EQ(result, VK_SUCCESS);
+    ASSERT_GT(physicalCount, 0u);
+    uint32_t count = 24;
+    VkLayerProperties layer_props[24];
+    vkEnumerateDeviceLayerProperties(physical[0], &count, layer_props);
+    ASSERT_GE(count, 1u);
+    bool found = false;
+    for (uint32_t iii = 0; iii < count; iii++) {
+        if (!strcmp(layer_props[iii].layerName, names1[0])) {
+            found = true;
+            break;
+        }
+    }
+    if (!found) {
+        ASSERT_EQ(count, 0);
+    }
+
+    vkDestroyInstance(instance, nullptr);
+
+    auto const info2 = VK::InstanceCreateInfo().enabledLayerCount(2).ppEnabledLayerNames(names2);
+    instance = VK_NULL_HANDLE;
+    result = vkCreateInstance(info2, VK_NULL_HANDLE, &instance);
+    ASSERT_EQ(result, VK_SUCCESS);
+
+    physicalCount = 0;
+    result = vkEnumeratePhysicalDevices(instance, &physicalCount, nullptr);
+    ASSERT_EQ(result, VK_SUCCESS);
+    ASSERT_GT(physicalCount, 0u);
+
+    std::unique_ptr<VkPhysicalDevice[]> physical2(new VkPhysicalDevice[physicalCount]);
+    result = vkEnumeratePhysicalDevices(instance, &physicalCount, physical2.get());
+    ASSERT_EQ(result, VK_SUCCESS);
+    ASSERT_GT(physicalCount, 0u);
+
+    count = 24;
+    vkEnumerateDeviceLayerProperties(physical2[0], &count, layer_props);
+    ASSERT_GE(count, 2u);
+    for (uint32_t jjj = 0; jjj < 2; jjj++) {
+        found = false;
+        for (uint32_t iii = 0; iii < count; iii++) {
+            if (!strcmp(layer_props[iii].layerName, names2[jjj])) {
+                found = true;
+                break;
+            }
+        }
+        if (!found) {
+            ASSERT_EQ(count, 0);
+        }
+    }
+
+    vkDestroyInstance(instance, nullptr);
+}
+
+TEST(CreateDevice, ExtensionNotPresent) {
+    VkInstance instance = VK_NULL_HANDLE;
+    VkResult result = vkCreateInstance(VK::InstanceCreateInfo(), VK_NULL_HANDLE, &instance);
+    ASSERT_EQ(result, VK_SUCCESS);
+
+    uint32_t physicalCount = 0;
+    result = vkEnumeratePhysicalDevices(instance, &physicalCount, nullptr);
+    ASSERT_EQ(result, VK_SUCCESS);
+    ASSERT_GT(physicalCount, 0u);
+
+    std::unique_ptr<VkPhysicalDevice[]> physical(new VkPhysicalDevice[physicalCount]);
+    result = vkEnumeratePhysicalDevices(instance, &physicalCount, physical.get());
+    ASSERT_EQ(result, VK_SUCCESS);
+    ASSERT_GT(physicalCount, 0u);
+
+    for (uint32_t p = 0; p < physicalCount; ++p) {
+        uint32_t familyCount = 0;
+        vkGetPhysicalDeviceQueueFamilyProperties(physical[p], &familyCount, nullptr);
+        ASSERT_GT(familyCount, 0u);
+
+        std::unique_ptr<VkQueueFamilyProperties[]> family(new VkQueueFamilyProperties[familyCount]);
+        vkGetPhysicalDeviceQueueFamilyProperties(physical[p], &familyCount, family.get());
+        ASSERT_GT(familyCount, 0u);
+
+        for (uint32_t q = 0; q < familyCount; ++q) {
+            if (~family[q].queueFlags & VK_QUEUE_GRAPHICS_BIT) {
+                continue;
+            }
+
+            float const priorities[] = {0.0f};  // Temporary required due to MSVC bug.
+            VkDeviceQueueCreateInfo const queueInfo[1]{
+                VK::DeviceQueueCreateInfo().queueFamilyIndex(q).queueCount(1).pQueuePriorities(priorities)};
+
+            char const *const names[] = {"NotPresent"};  // Temporary required due to MSVC bug.
+            auto const deviceInfo = VK::DeviceCreateInfo()
+                                        .queueCreateInfoCount(1)
+                                        .pQueueCreateInfos(queueInfo)
+                                        .enabledExtensionCount(1)
+                                        .ppEnabledExtensionNames(names);
+
+            VkDevice device;
+            result = vkCreateDevice(physical[p], deviceInfo, nullptr, &device);
+            ASSERT_EQ(result, VK_ERROR_EXTENSION_NOT_PRESENT);
+
+            // It's not necessary to destroy the device because it will not be created successfully.
+        }
+    }
+
+    vkDestroyInstance(instance, nullptr);
+}
+
+// LX535 / MI-76: Device layers are deprecated.
+// For backwards compatibility, they are allowed, but must be ignored.
+// Ensure that no errors occur if a bogus device layer list is passed to vkCreateDevice.
+TEST(CreateDevice, LayersNotPresent) {
+    VkInstance instance = VK_NULL_HANDLE;
+    VkResult result = vkCreateInstance(VK::InstanceCreateInfo(), VK_NULL_HANDLE, &instance);
+    ASSERT_EQ(result, VK_SUCCESS);
+
+    uint32_t physicalCount = 0;
+    result = vkEnumeratePhysicalDevices(instance, &physicalCount, nullptr);
+    ASSERT_EQ(result, VK_SUCCESS);
+    ASSERT_GT(physicalCount, 0u);
+
+    std::unique_ptr<VkPhysicalDevice[]> physical(new VkPhysicalDevice[physicalCount]);
+    result = vkEnumeratePhysicalDevices(instance, &physicalCount, physical.get());
+    ASSERT_EQ(result, VK_SUCCESS);
+    ASSERT_GT(physicalCount, 0u);
+
+    for (uint32_t p = 0; p < physicalCount; ++p) {
+        uint32_t familyCount = 0;
+        vkGetPhysicalDeviceQueueFamilyProperties(physical[p], &familyCount, nullptr);
+        ASSERT_EQ(result, VK_SUCCESS);
+        ASSERT_GT(familyCount, 0u);
+
+        std::unique_ptr<VkQueueFamilyProperties[]> family(new VkQueueFamilyProperties[familyCount]);
+        vkGetPhysicalDeviceQueueFamilyProperties(physical[p], &familyCount, family.get());
+        ASSERT_EQ(result, VK_SUCCESS);
+        ASSERT_GT(familyCount, 0u);
+
+        for (uint32_t q = 0; q < familyCount; ++q) {
+            if (~family[q].queueFlags & VK_QUEUE_GRAPHICS_BIT) {
+                continue;
+            }
+
+            float const priorities[] = {0.0f};  // Temporary required due to MSVC bug.
+            VkDeviceQueueCreateInfo const queueInfo[1]{
+                VK::DeviceQueueCreateInfo().queueFamilyIndex(q).queueCount(1).pQueuePriorities(priorities)};
+
+            char const *const names[] = {"NotPresent"};  // Temporary required due to MSVC bug.
+            auto const deviceInfo = VK::DeviceCreateInfo()
+                                        .queueCreateInfoCount(1)
+                                        .pQueueCreateInfos(queueInfo)
+                                        .enabledLayerCount(1)
+                                        .ppEnabledLayerNames(names);
+
+            VkDevice device;
+            result = vkCreateDevice(physical[p], deviceInfo, nullptr, &device);
+            ASSERT_EQ(result, VK_SUCCESS);
+
+            vkDestroyDevice(device, nullptr);
+        }
+    }
+
+    vkDestroyInstance(instance, nullptr);
+}
+
+TEST_F(EnumerateInstanceLayerProperties, PropertyCountLessThanAvailable) {
+    uint32_t count = 0u;
+    VkResult result = vkEnumerateInstanceLayerProperties(&count, nullptr);
+    ASSERT_EQ(result, VK_SUCCESS);
+
+    // We need atleast two for the test to be relevant.
+    if (count < 2u) {
+        return;
+    }
+
+    std::unique_ptr<VkLayerProperties[]> properties(new VkLayerProperties[count]);
+    count = 1;
+    result = vkEnumerateInstanceLayerProperties(&count, properties.get());
+    ASSERT_EQ(result, VK_INCOMPLETE);
+}
+
+TEST(EnumerateDeviceLayerProperties, PropertyCountLessThanAvailable) {
+    VkInstance instance = VK_NULL_HANDLE;
+    VkResult result = vkCreateInstance(VK::InstanceCreateInfo(), VK_NULL_HANDLE, &instance);
+    ASSERT_EQ(result, VK_SUCCESS);
+
+    uint32_t physicalCount = 0;
+    result = vkEnumeratePhysicalDevices(instance, &physicalCount, nullptr);
+    ASSERT_EQ(result, VK_SUCCESS);
+    ASSERT_GT(physicalCount, 0u);
+
+    std::unique_ptr<VkPhysicalDevice[]> physical(new VkPhysicalDevice[physicalCount]);
+    result = vkEnumeratePhysicalDevices(instance, &physicalCount, physical.get());
+    ASSERT_EQ(result, VK_SUCCESS);
+    ASSERT_GT(physicalCount, 0u);
+
+    for (uint32_t p = 0; p < physicalCount; ++p) {
+        uint32_t count = 0u;
+        result = vkEnumerateDeviceLayerProperties(physical[p], &count, nullptr);
+        ASSERT_EQ(result, VK_SUCCESS);
+
+        // We need atleast two for the test to be relevant.
+        if (count < 2u) {
+            continue;
+        }
+
+        std::unique_ptr<VkLayerProperties[]> properties(new VkLayerProperties[count]);
+        count = 1;
+        result = vkEnumerateDeviceLayerProperties(physical[p], &count, properties.get());
+        ASSERT_EQ(result, VK_INCOMPLETE);
+    }
+
+    vkDestroyInstance(instance, nullptr);
+}
+
+TEST_F(EnumerateInstanceLayerProperties, Count) {
+    uint32_t count = 0u;
+    VkResult result = vkEnumerateInstanceLayerProperties(&count, nullptr);
+    ASSERT_EQ(result, VK_SUCCESS);
+
+    if (std::find(arguments.begin(), arguments.end(), "count") != arguments.end()) {
+        std::cout << "count=" << count << '\n';
+    }
+}
+
+TEST_F(EnumerateInstanceLayerProperties, OnePass) {
+    // Count required for this test.
+    if (std::find(arguments.begin(), arguments.end(), "count") == arguments.end()) {
+        return;
+    }
+
+    uint32_t count = std::stoul(arguments[2]);
+
+    std::unique_ptr<VkLayerProperties[]> properties(new VkLayerProperties[count]);
+    VkResult result = vkEnumerateInstanceLayerProperties(&count, properties.get());
+    ASSERT_EQ(result, VK_SUCCESS);
+
+    if (std::find(arguments.begin(), arguments.end(), "properties") != arguments.end()) {
+        for (uint32_t p = 0; p < count; ++p) {
+            std::cout << "properties[" << p << "] =" << ' ' << properties[p].layerName << ' ' << properties[p].specVersion << ' '
+                      << properties[p].implementationVersion << ' ' << properties[p].description << '\n';
+        }
+    }
+}
+
+TEST_F(EnumerateInstanceLayerProperties, TwoPass) {
+    uint32_t count = 0u;
+    VkResult result = vkEnumerateInstanceLayerProperties(&count, nullptr);
+    ASSERT_EQ(result, VK_SUCCESS);
+
+    std::unique_ptr<VkLayerProperties[]> properties(new VkLayerProperties[count]);
+    result = vkEnumerateInstanceLayerProperties(&count, properties.get());
+    ASSERT_EQ(result, VK_SUCCESS);
+
+    if (std::find(arguments.begin(), arguments.end(), "properties") != arguments.end()) {
+        for (uint32_t p = 0; p < count; ++p) {
+            std::cout << "properties[" << p << "] =" << ' ' << properties[p].layerName << ' ' << properties[p].specVersion << ' '
+                      << properties[p].implementationVersion << ' ' << properties[p].description << '\n';
+        }
+    }
+}
+
+TEST_F(EnumerateInstanceExtensionProperties, PropertyCountLessThanAvailable) {
+    uint32_t count = 0u;
+    VkResult result = vkEnumerateInstanceExtensionProperties(nullptr, &count, nullptr);
+    ASSERT_EQ(result, VK_SUCCESS);
+
+    // We need atleast two for the test to be relevant.
+    if (count < 2u) {
+        return;
+    }
+
+    std::unique_ptr<VkExtensionProperties[]> properties(new VkExtensionProperties[count]);
+    count = 1;
+    result = vkEnumerateInstanceExtensionProperties(nullptr, &count, properties.get());
+    ASSERT_EQ(result, VK_INCOMPLETE);
+}
+
+TEST(EnumerateDeviceExtensionProperties, PropertyCountLessThanAvailable) {
+    VkInstance instance = VK_NULL_HANDLE;
+    VkResult result = vkCreateInstance(VK::InstanceCreateInfo(), VK_NULL_HANDLE, &instance);
+    ASSERT_EQ(result, VK_SUCCESS);
+
+    uint32_t physicalCount = 0;
+    result = vkEnumeratePhysicalDevices(instance, &physicalCount, nullptr);
+    ASSERT_EQ(result, VK_SUCCESS);
+    ASSERT_GT(physicalCount, 0u);
+
+    std::unique_ptr<VkPhysicalDevice[]> physical(new VkPhysicalDevice[physicalCount]);
+    result = vkEnumeratePhysicalDevices(instance, &physicalCount, physical.get());
+    ASSERT_EQ(result, VK_SUCCESS);
+    ASSERT_GT(physicalCount, 0u);
+
+    for (uint32_t p = 0; p < physicalCount; ++p) {
+        uint32_t count = 0u;
+        result = vkEnumerateDeviceExtensionProperties(physical[p], nullptr, &count, nullptr);
+        ASSERT_EQ(result, VK_SUCCESS);
+
+        // We need atleast two for the test to be relevant.
+        if (count < 2u) {
+            continue;
+        }
+
+        std::unique_ptr<VkExtensionProperties[]> properties(new VkExtensionProperties[count]);
+        count = 1;
+        result = vkEnumerateDeviceExtensionProperties(physical[p], nullptr, &count, properties.get());
+        ASSERT_EQ(result, VK_INCOMPLETE);
+    }
+
+    vkDestroyInstance(instance, nullptr);
+}
+
+TEST_F(EnumerateInstanceExtensionProperties, Count) {
+    uint32_t count = 0u;
+    VkResult result = vkEnumerateInstanceExtensionProperties(nullptr, &count, nullptr);
+    ASSERT_EQ(result, VK_SUCCESS);
+
+    if (std::find(arguments.begin(), arguments.end(), "count") != arguments.end()) {
+        std::cout << "count=" << count << '\n';
+    }
+}
+
+TEST_F(EnumerateInstanceExtensionProperties, OnePass) {
+    // Count required for this test.
+    if (std::find(arguments.begin(), arguments.end(), "count") == arguments.end()) {
+        return;
+    }
+
+    uint32_t count = std::stoul(arguments[2]);
+
+    std::unique_ptr<VkExtensionProperties[]> properties(new VkExtensionProperties[count]);
+    VkResult result = vkEnumerateInstanceExtensionProperties(nullptr, &count, properties.get());
+    ASSERT_EQ(result, VK_SUCCESS);
+
+    if (std::find(arguments.begin(), arguments.end(), "properties") != arguments.end()) {
+        for (uint32_t p = 0; p < count; ++p) {
+            std::cout << "properties[" << p << "] =" << ' ' << properties[p].extensionName << ' ' << properties[p].specVersion
+                      << '\n';
+        }
+    }
+}
+
+TEST_F(EnumerateInstanceExtensionProperties, TwoPass) {
+    uint32_t count = 0u;
+    VkResult result = vkEnumerateInstanceExtensionProperties(nullptr, &count, nullptr);
+    ASSERT_EQ(result, VK_SUCCESS);
+
+    std::unique_ptr<VkExtensionProperties[]> properties(new VkExtensionProperties[count]);
+    result = vkEnumerateInstanceExtensionProperties(nullptr, &count, properties.get());
+    ASSERT_EQ(result, VK_SUCCESS);
+
+    if (std::find(arguments.begin(), arguments.end(), "properties") != arguments.end()) {
+        for (uint32_t p = 0; p < count; ++p) {
+            std::cout << "properties[" << p << "] =" << ' ' << properties[p].extensionName << ' ' << properties[p].specVersion
+                      << '\n';
+        }
+    }
+}
+
+TEST_F(EnumerateInstanceExtensionProperties, InstanceExtensionEnumerated) {
+    uint32_t count = 0u;
+    VkResult result = vkEnumerateInstanceExtensionProperties(nullptr, &count, nullptr);
+    ASSERT_EQ(result, VK_SUCCESS);
+
+    std::unique_ptr<VkExtensionProperties[]> properties(new VkExtensionProperties[count]);
+    result = vkEnumerateInstanceExtensionProperties(nullptr, &count, properties.get());
+    ASSERT_EQ(result, VK_SUCCESS);
+
+    ASSERT_NE(std::find_if(
+                  &properties[0], &properties[count],
+                  [](VkExtensionProperties const &properties) { return strcmp(properties.extensionName, "VK_KHR_surface") == 0; }),
+              &properties[count]);
+}
+
+TEST(EnumerateDeviceExtensionProperties, DeviceExtensionEnumerated) {
+    VkInstance instance = VK_NULL_HANDLE;
+    VkResult result = vkCreateInstance(VK::InstanceCreateInfo(), VK_NULL_HANDLE, &instance);
+    ASSERT_EQ(result, VK_SUCCESS);
+
+    uint32_t physicalCount = 0;
+    result = vkEnumeratePhysicalDevices(instance, &physicalCount, nullptr);
+    ASSERT_EQ(result, VK_SUCCESS);
+    ASSERT_GT(physicalCount, 0u);
+
+    std::unique_ptr<VkPhysicalDevice[]> physical(new VkPhysicalDevice[physicalCount]);
+    result = vkEnumeratePhysicalDevices(instance, &physicalCount, physical.get());
+    ASSERT_EQ(result, VK_SUCCESS);
+    ASSERT_GT(physicalCount, 0u);
+
+    for (uint32_t p = 0; p < physicalCount; ++p) {
+        uint32_t count = 0u;
+        result = vkEnumerateDeviceExtensionProperties(physical[p], nullptr, &count, nullptr);
+        ASSERT_EQ(result, VK_SUCCESS);
+
+        std::unique_ptr<VkExtensionProperties[]> properties(new VkExtensionProperties[count]);
+        result = vkEnumerateDeviceExtensionProperties(physical[p], nullptr, &count, properties.get());
+        ASSERT_EQ(result, VK_SUCCESS);
+
+        ASSERT_NE(std::find_if(&properties[0], &properties[count],
+                               [](VkExtensionProperties const &properties) {
+                                   return strcmp(properties.extensionName, "VK_KHR_swapchain") == 0;
+                               }),
+                  &properties[count]);
+    }
+
+    vkDestroyInstance(instance, nullptr);
+}
+
+TEST_F(ImplicitLayer, Present) {
+    auto const info = VK::InstanceCreateInfo();
+    VkInstance instance = VK_NULL_HANDLE;
+    VkResult result = vkCreateInstance(info, VK_NULL_HANDLE, &instance);
+    ASSERT_EQ(result, VK_SUCCESS);
+
+    vkDestroyInstance(instance, nullptr);
+}
+
+TEST(WrapObjects, Insert) {
+    VkInstance instance = VK_NULL_HANDLE;
+    VkResult result = vkCreateInstance(VK::InstanceCreateInfo(), VK_NULL_HANDLE, &instance);
+    ASSERT_EQ(result, VK_SUCCESS);
+
+    uint32_t physicalCount = 0;
+    result = vkEnumeratePhysicalDevices(instance, &physicalCount, nullptr);
+    ASSERT_EQ(result, VK_SUCCESS);
+    ASSERT_GT(physicalCount, 0u);
+
+    std::unique_ptr<VkPhysicalDevice[]> physical(new VkPhysicalDevice[physicalCount]);
+    result = vkEnumeratePhysicalDevices(instance, &physicalCount, physical.get());
+    ASSERT_EQ(result, VK_SUCCESS);
+    ASSERT_GT(physicalCount, 0u);
+
+    for (uint32_t p = 0; p < physicalCount; ++p) {
+        test_create_device(physical[p]);
+    }
+
+    vkDestroyInstance(instance, nullptr);
+}
+
+// Test making sure the allocation functions are called to allocate and cleanup everything during
+// a CreateInstance/DestroyInstance call pair.
+TEST(Allocation, Instance) {
+    auto const info = VK::InstanceCreateInfo();
+    VkInstance instance = VK_NULL_HANDLE;
+    VkAllocationCallbacks alloc_callbacks = {};
+    alloc_callbacks.pUserData = (void *)0x00000001;
+    alloc_callbacks.pfnAllocation = AllocCallbackFunc;
+    alloc_callbacks.pfnReallocation = ReallocCallbackFunc;
+    alloc_callbacks.pfnFree = FreeCallbackFunc;
+
+    InitAllocTracker(2048);
+
+    VkResult result = vkCreateInstance(info, &alloc_callbacks, &instance);
+    ASSERT_EQ(result, VK_SUCCESS);
+
+    alloc_callbacks.pUserData = (void *)0x00000002;
+    vkDestroyInstance(instance, &alloc_callbacks);
+
+    // Make sure everything's been freed
+    ASSERT_EQ(true, IsAllocTrackerEmpty());
+    FreeAllocTracker();
+}
+
+// Test making sure the allocation functions are called to allocate and cleanup everything during
+// a CreateInstance/DestroyInstance call pair with a call to GetInstanceProcAddr.
+TEST(Allocation, GetInstanceProcAddr) {
+    auto const info = VK::InstanceCreateInfo();
+    VkInstance instance = VK_NULL_HANDLE;
+    VkAllocationCallbacks alloc_callbacks = {};
+    alloc_callbacks.pUserData = (void *)0x00000010;
+    alloc_callbacks.pfnAllocation = AllocCallbackFunc;
+    alloc_callbacks.pfnReallocation = ReallocCallbackFunc;
+    alloc_callbacks.pfnFree = FreeCallbackFunc;
+
+    InitAllocTracker(2048);
+
+    VkResult result = vkCreateInstance(info, &alloc_callbacks, &instance);
+    ASSERT_EQ(result, VK_SUCCESS);
+
+    void *pfnCreateDevice = (void *)vkGetInstanceProcAddr(instance, "vkCreateDevice");
+    void *pfnDestroyDevice = (void *)vkGetInstanceProcAddr(instance, "vkDestroyDevice");
+    ASSERT_TRUE(pfnCreateDevice != NULL && pfnDestroyDevice != NULL);
+
+    alloc_callbacks.pUserData = (void *)0x00000011;
+    vkDestroyInstance(instance, &alloc_callbacks);
+
+    // Make sure everything's been freed
+    ASSERT_EQ(true, IsAllocTrackerEmpty());
+    FreeAllocTracker();
+}
+
+// Test making sure the allocation functions are called to allocate and cleanup everything during
+// a vkEnumeratePhysicalDevices call pair.
+TEST(Allocation, EnumeratePhysicalDevices) {
+    auto const info = VK::InstanceCreateInfo();
+    VkInstance instance = VK_NULL_HANDLE;
+    VkAllocationCallbacks alloc_callbacks = {};
+    alloc_callbacks.pUserData = (void *)0x00000021;
+    alloc_callbacks.pfnAllocation = AllocCallbackFunc;
+    alloc_callbacks.pfnReallocation = ReallocCallbackFunc;
+    alloc_callbacks.pfnFree = FreeCallbackFunc;
+
+    InitAllocTracker(2048);
+
+    VkResult result = vkCreateInstance(info, &alloc_callbacks, &instance);
+    ASSERT_EQ(result, VK_SUCCESS);
+
+    uint32_t physicalCount = 0;
+    alloc_callbacks.pUserData = (void *)0x00000022;
+    result = vkEnumeratePhysicalDevices(instance, &physicalCount, nullptr);
+    ASSERT_EQ(result, VK_SUCCESS);
+    ASSERT_GT(physicalCount, 0u);
+
+    std::unique_ptr<VkPhysicalDevice[]> physical(new VkPhysicalDevice[physicalCount]);
+    alloc_callbacks.pUserData = (void *)0x00000023;
+    result = vkEnumeratePhysicalDevices(instance, &physicalCount, physical.get());
+    ASSERT_EQ(result, VK_SUCCESS);
+    ASSERT_GT(physicalCount, 0u);
+
+    alloc_callbacks.pUserData = (void *)0x00000024;
+    vkDestroyInstance(instance, &alloc_callbacks);
+
+    // Make sure everything's been freed
+    ASSERT_EQ(true, IsAllocTrackerEmpty());
+    FreeAllocTracker();
+}
+
+// Test making sure the allocation functions are called to allocate and cleanup everything from
+// vkCreateInstance, to vkCreateDevicce, and then through their destructors.  With special
+// allocators used on both the instance and device.
+TEST(Allocation, InstanceAndDevice) {
+    auto const info = VK::InstanceCreateInfo();
+    VkInstance instance = VK_NULL_HANDLE;
+    VkAllocationCallbacks alloc_callbacks = {};
+    alloc_callbacks.pUserData = (void *)0x00000031;
+    alloc_callbacks.pfnAllocation = AllocCallbackFunc;
+    alloc_callbacks.pfnReallocation = ReallocCallbackFunc;
+    alloc_callbacks.pfnFree = FreeCallbackFunc;
+
+    InitAllocTracker(2048);
+
+    VkResult result = vkCreateInstance(info, &alloc_callbacks, &instance);
+    ASSERT_EQ(result, VK_SUCCESS);
+
+    uint32_t physicalCount = 0;
+    result = vkEnumeratePhysicalDevices(instance, &physicalCount, nullptr);
+    ASSERT_EQ(result, VK_SUCCESS);
+    ASSERT_GT(physicalCount, 0u);
+
+    std::unique_ptr<VkPhysicalDevice[]> physical(new VkPhysicalDevice[physicalCount]);
+    result = vkEnumeratePhysicalDevices(instance, &physicalCount, physical.get());
+    ASSERT_EQ(result, VK_SUCCESS);
+    ASSERT_GT(physicalCount, 0u);
+
+    for (uint32_t p = 0; p < physicalCount; ++p) {
+        uint32_t familyCount = 0;
+        vkGetPhysicalDeviceQueueFamilyProperties(physical[p], &familyCount, nullptr);
+        ASSERT_GT(familyCount, 0u);
+
+        std::unique_ptr<VkQueueFamilyProperties[]> family(new VkQueueFamilyProperties[familyCount]);
+        vkGetPhysicalDeviceQueueFamilyProperties(physical[p], &familyCount, family.get());
+        ASSERT_GT(familyCount, 0u);
+
+        for (uint32_t q = 0; q < familyCount; ++q) {
+            if (~family[q].queueFlags & VK_QUEUE_GRAPHICS_BIT) {
+                continue;
+            }
+
+            float const priorities[] = {0.0f};  // Temporary required due to MSVC bug.
+            VkDeviceQueueCreateInfo const queueInfo[1]{
+                VK::DeviceQueueCreateInfo().queueFamilyIndex(q).queueCount(1).pQueuePriorities(priorities)};
+
+            auto const deviceInfo = VK::DeviceCreateInfo().queueCreateInfoCount(1).pQueueCreateInfos(queueInfo);
+
+            VkDevice device;
+            alloc_callbacks.pUserData = (void *)0x00000032;
+            result = vkCreateDevice(physical[p], deviceInfo, &alloc_callbacks, &device);
+            ASSERT_EQ(result, VK_SUCCESS);
+
+            alloc_callbacks.pUserData = (void *)0x00000033;
+            vkDestroyDevice(device, &alloc_callbacks);
+        }
+    }
+
+    alloc_callbacks.pUserData = (void *)0x00000034;
+    vkDestroyInstance(instance, &alloc_callbacks);
+
+    // Make sure everything's been freed
+    ASSERT_EQ(true, IsAllocTrackerEmpty());
+    FreeAllocTracker();
+}
+
+// Test making sure the allocation functions are called to allocate and cleanup everything from
+// vkCreateInstance, to vkCreateDevicce, and then through their destructors.  With special
+// allocators used on only the instance and not the device.
+TEST(Allocation, InstanceButNotDevice) {
+    auto const info = VK::InstanceCreateInfo();
+    VkInstance instance = VK_NULL_HANDLE;
+    VkAllocationCallbacks alloc_callbacks = {};
+    alloc_callbacks.pUserData = (void *)0x00000041;
+    alloc_callbacks.pfnAllocation = AllocCallbackFunc;
+    alloc_callbacks.pfnReallocation = ReallocCallbackFunc;
+    alloc_callbacks.pfnFree = FreeCallbackFunc;
+
+    InitAllocTracker(2048);
+
+    VkResult result = vkCreateInstance(info, &alloc_callbacks, &instance);
+    ASSERT_EQ(result, VK_SUCCESS);
+
+    uint32_t physicalCount = 0;
+    result = vkEnumeratePhysicalDevices(instance, &physicalCount, nullptr);
+    ASSERT_EQ(result, VK_SUCCESS);
+    ASSERT_GT(physicalCount, 0u);
+
+    std::unique_ptr<VkPhysicalDevice[]> physical(new VkPhysicalDevice[physicalCount]);
+    result = vkEnumeratePhysicalDevices(instance, &physicalCount, physical.get());
+    ASSERT_EQ(result, VK_SUCCESS);
+    ASSERT_GT(physicalCount, 0u);
+
+    for (uint32_t p = 0; p < physicalCount; ++p) {
+        uint32_t familyCount = 0;
+        vkGetPhysicalDeviceQueueFamilyProperties(physical[p], &familyCount, nullptr);
+        ASSERT_GT(familyCount, 0u);
+
+        std::unique_ptr<VkQueueFamilyProperties[]> family(new VkQueueFamilyProperties[familyCount]);
+        vkGetPhysicalDeviceQueueFamilyProperties(physical[p], &familyCount, family.get());
+        ASSERT_GT(familyCount, 0u);
+
+        for (uint32_t q = 0; q < familyCount; ++q) {
+            if (~family[q].queueFlags & VK_QUEUE_GRAPHICS_BIT) {
+                continue;
+            }
+
+            float const priorities[] = {0.0f};  // Temporary required due to MSVC bug.
+            VkDeviceQueueCreateInfo const queueInfo[1]{
+                VK::DeviceQueueCreateInfo().queueFamilyIndex(q).queueCount(1).pQueuePriorities(priorities)};
+
+            auto const deviceInfo = VK::DeviceCreateInfo().queueCreateInfoCount(1).pQueueCreateInfos(queueInfo);
+
+            VkDevice device;
+            result = vkCreateDevice(physical[p], deviceInfo, NULL, &device);
+            ASSERT_EQ(result, VK_SUCCESS);
+
+            vkDestroyDevice(device, NULL);
+        }
+    }
+
+    alloc_callbacks.pUserData = (void *)0x00000042;
+    vkDestroyInstance(instance, &alloc_callbacks);
+
+    // Make sure everything's been freed
+    ASSERT_EQ(true, IsAllocTrackerEmpty());
+    FreeAllocTracker();
+}
+
+// Test making sure the allocation functions are called to allocate and cleanup everything from
+// vkCreateInstance, to vkCreateDevicce, and then through their destructors.  With special
+// allocators used on only the device and not the instance.
+TEST(Allocation, DeviceButNotInstance) {
+    auto const info = VK::InstanceCreateInfo();
+    VkInstance instance = VK_NULL_HANDLE;
+    VkAllocationCallbacks alloc_callbacks = {};
+    alloc_callbacks.pfnAllocation = AllocCallbackFunc;
+    alloc_callbacks.pfnReallocation = ReallocCallbackFunc;
+    alloc_callbacks.pfnFree = FreeCallbackFunc;
+
+    InitAllocTracker(2048);
+
+    VkResult result = vkCreateInstance(info, NULL, &instance);
+    ASSERT_EQ(result, VK_SUCCESS);
+
+    uint32_t physicalCount = 0;
+    result = vkEnumeratePhysicalDevices(instance, &physicalCount, nullptr);
+    ASSERT_EQ(result, VK_SUCCESS);
+    ASSERT_GT(physicalCount, 0u);
+
+    std::unique_ptr<VkPhysicalDevice[]> physical(new VkPhysicalDevice[physicalCount]);
+    result = vkEnumeratePhysicalDevices(instance, &physicalCount, physical.get());
+    ASSERT_EQ(result, VK_SUCCESS);
+    ASSERT_GT(physicalCount, 0u);
+
+    for (uint32_t p = 0; p < physicalCount; ++p) {
+        uint32_t familyCount = 0;
+        vkGetPhysicalDeviceQueueFamilyProperties(physical[p], &familyCount, nullptr);
+        ASSERT_GT(familyCount, 0u);
+
+        std::unique_ptr<VkQueueFamilyProperties[]> family(new VkQueueFamilyProperties[familyCount]);
+        vkGetPhysicalDeviceQueueFamilyProperties(physical[p], &familyCount, family.get());
+        ASSERT_GT(familyCount, 0u);
+
+        for (uint32_t q = 0; q < familyCount; ++q) {
+            if (~family[q].queueFlags & VK_QUEUE_GRAPHICS_BIT) {
+                continue;
+            }
+
+            float const priorities[] = {0.0f};  // Temporary required due to MSVC bug.
+            VkDeviceQueueCreateInfo const queueInfo[1]{
+                VK::DeviceQueueCreateInfo().queueFamilyIndex(q).queueCount(1).pQueuePriorities(priorities)};
+
+            auto const deviceInfo = VK::DeviceCreateInfo().queueCreateInfoCount(1).pQueueCreateInfos(queueInfo);
+
+            VkDevice device;
+            alloc_callbacks.pUserData = (void *)0x00000051;
+            result = vkCreateDevice(physical[p], deviceInfo, &alloc_callbacks, &device);
+            ASSERT_EQ(result, VK_SUCCESS);
+
+            alloc_callbacks.pUserData = (void *)0x00000052;
+            vkDestroyDevice(device, &alloc_callbacks);
+        }
+    }
+
+    vkDestroyInstance(instance, NULL);
+
+    // Make sure everything's been freed
+    ASSERT_EQ(true, IsAllocTrackerEmpty());
+    FreeAllocTracker();
+}
+
+// Test failure during vkCreateInstance to make sure we don't leak memory if
+// one of the out-of-memory conditions trigger.
+TEST(Allocation, CreateInstanceIntentionalAllocFail) {
+    auto const info = VK::InstanceCreateInfo();
+    VkInstance instance = VK_NULL_HANDLE;
+    VkAllocationCallbacks alloc_callbacks = {};
+    alloc_callbacks.pfnAllocation = AllocCallbackFunc;
+    alloc_callbacks.pfnReallocation = ReallocCallbackFunc;
+    alloc_callbacks.pfnFree = FreeCallbackFunc;
+
+    VkResult result;
+    uint32_t fail_index = 1;
+    do {
+        InitAllocTracker(9999, fail_index);
+
+        result = vkCreateInstance(info, &alloc_callbacks, &instance);
+        if (result == VK_ERROR_OUT_OF_HOST_MEMORY) {
+            if (!IsAllocTrackerEmpty()) {
+                std::cout << "Failed on index " << fail_index << '\n';
+                ASSERT_EQ(true, IsAllocTrackerEmpty());
+            }
+        }
+        fail_index++;
+        // Make sure we don't overrun the memory
+        ASSERT_LT(fail_index, 9999u);
+
+        FreeAllocTracker();
+    } while (result == VK_ERROR_OUT_OF_HOST_MEMORY);
+
+    vkDestroyInstance(instance, &alloc_callbacks);
+}
+
+// Test failure during vkCreateDevice to make sure we don't leak memory if
+// one of the out-of-memory conditions trigger.
+TEST(Allocation, CreateDeviceIntentionalAllocFail) {
+    auto const info = VK::InstanceCreateInfo();
+    VkInstance instance = VK_NULL_HANDLE;
+    VkDevice device = VK_NULL_HANDLE;
+    VkAllocationCallbacks alloc_callbacks = {};
+    alloc_callbacks.pfnAllocation = AllocCallbackFunc;
+    alloc_callbacks.pfnReallocation = ReallocCallbackFunc;
+    alloc_callbacks.pfnFree = FreeCallbackFunc;
+
+    VkResult result = vkCreateInstance(info, NULL, &instance);
+    ASSERT_EQ(result, VK_SUCCESS);
+
+    uint32_t physicalCount = 0;
+    result = vkEnumeratePhysicalDevices(instance, &physicalCount, nullptr);
+    ASSERT_EQ(result, VK_SUCCESS);
+    ASSERT_GT(physicalCount, 0u);
+
+    std::unique_ptr<VkPhysicalDevice[]> physical(new VkPhysicalDevice[physicalCount]);
+    result = vkEnumeratePhysicalDevices(instance, &physicalCount, physical.get());
+    ASSERT_EQ(result, VK_SUCCESS);
+    ASSERT_GT(physicalCount, 0u);
+
+    for (uint32_t p = 0; p < physicalCount; ++p) {
+        uint32_t familyCount = 0;
+        vkGetPhysicalDeviceQueueFamilyProperties(physical[p], &familyCount, nullptr);
+        ASSERT_GT(familyCount, 0u);
+
+        std::unique_ptr<VkQueueFamilyProperties[]> family(new VkQueueFamilyProperties[familyCount]);
+        vkGetPhysicalDeviceQueueFamilyProperties(physical[p], &familyCount, family.get());
+        ASSERT_GT(familyCount, 0u);
+
+        for (uint32_t q = 0; q < familyCount; ++q) {
+            if (~family[q].queueFlags & VK_QUEUE_GRAPHICS_BIT) {
+                continue;
+            }
+
+            float const priorities[] = {0.0f};  // Temporary required due to MSVC bug.
+            VkDeviceQueueCreateInfo const queueInfo[1]{
+                VK::DeviceQueueCreateInfo().queueFamilyIndex(q).queueCount(1).pQueuePriorities(priorities)};
+
+            auto const deviceInfo = VK::DeviceCreateInfo().queueCreateInfoCount(1).pQueueCreateInfos(queueInfo);
+
+            uint32_t fail_index = 1;
+            do {
+                InitAllocTracker(9999, fail_index);
+
+                result = vkCreateDevice(physical[p], deviceInfo, &alloc_callbacks, &device);
+                if (result == VK_ERROR_OUT_OF_HOST_MEMORY) {
+                    if (!IsAllocTrackerEmpty()) {
+                        std::cout << "Failed on index " << fail_index << '\n';
+                        ASSERT_EQ(true, IsAllocTrackerEmpty());
+                    }
+                }
+                fail_index++;
+                // Make sure we don't overrun the memory
+                ASSERT_LT(fail_index, 9999u);
+
+                FreeAllocTracker();
+            } while (result == VK_ERROR_OUT_OF_HOST_MEMORY);
+            vkDestroyDevice(device, &alloc_callbacks);
+            break;
+        }
+    }
+
+    vkDestroyInstance(instance, NULL);
+}
+
+// Test failure during vkCreateInstance and vkCreateDevice to make sure we don't
+// leak memory if one of the out-of-memory conditions trigger.
+TEST(Allocation, CreateInstanceDeviceIntentionalAllocFail) {
+    auto const info = VK::InstanceCreateInfo();
+    VkInstance instance = VK_NULL_HANDLE;
+    VkDevice device = VK_NULL_HANDLE;
+    VkAllocationCallbacks alloc_callbacks = {};
+    alloc_callbacks.pfnAllocation = AllocCallbackFunc;
+    alloc_callbacks.pfnReallocation = ReallocCallbackFunc;
+    alloc_callbacks.pfnFree = FreeCallbackFunc;
+
+    VkResult result = VK_ERROR_OUT_OF_HOST_MEMORY;
+    uint32_t fail_index = 0;
+    uint32_t physicalCount = 0;
+    while (result == VK_ERROR_OUT_OF_HOST_MEMORY) {
+        InitAllocTracker(9999, ++fail_index);
+        ASSERT_LT(fail_index, 9999u);
+
+        result = vkCreateInstance(info, &alloc_callbacks, &instance);
+        if (result == VK_ERROR_OUT_OF_HOST_MEMORY) {
+            if (!IsAllocTrackerEmpty()) {
+                std::cout << "Failed on index " << fail_index << '\n';
+                ASSERT_EQ(true, IsAllocTrackerEmpty());
+            }
+            FreeAllocTracker();
+            continue;
+        }
+        ASSERT_EQ(result, VK_SUCCESS);
+
+        physicalCount = 0;
+        result = vkEnumeratePhysicalDevices(instance, &physicalCount, nullptr);
+        if (result == VK_ERROR_OUT_OF_HOST_MEMORY) {
+            vkDestroyInstance(instance, &alloc_callbacks);
+            if (!IsAllocTrackerEmpty()) {
+                std::cout << "Failed on index " << fail_index << '\n';
+                ASSERT_EQ(true, IsAllocTrackerEmpty());
+            }
+            FreeAllocTracker();
+            continue;
+        }
+        ASSERT_EQ(result, VK_SUCCESS);
+
+        std::unique_ptr<VkPhysicalDevice[]> physical(new VkPhysicalDevice[physicalCount]);
+        result = vkEnumeratePhysicalDevices(instance, &physicalCount, physical.get());
+        if (result == VK_ERROR_OUT_OF_HOST_MEMORY) {
+            vkDestroyInstance(instance, &alloc_callbacks);
+            if (!IsAllocTrackerEmpty()) {
+                std::cout << "Failed on index " << fail_index << '\n';
+                ASSERT_EQ(true, IsAllocTrackerEmpty());
+            }
+            FreeAllocTracker();
+            continue;
+        }
+        ASSERT_EQ(result, VK_SUCCESS);
+
+        uint32_t familyCount = 0;
+        vkGetPhysicalDeviceQueueFamilyProperties(physical[0], &familyCount, nullptr);
+        ASSERT_GT(familyCount, 0u);
+
+        std::unique_ptr<VkQueueFamilyProperties[]> family(new VkQueueFamilyProperties[familyCount]);
+        vkGetPhysicalDeviceQueueFamilyProperties(physical[0], &familyCount, family.get());
+        ASSERT_GT(familyCount, 0u);
+
+        uint32_t queue_index = 0;
+        for (uint32_t q = 0; q < familyCount; ++q) {
+            if (family[q].queueFlags & VK_QUEUE_GRAPHICS_BIT) {
+                queue_index = q;
+                break;
+            }
+        }
+
+        float const priorities[] = {0.0f};  // Temporary required due to MSVC bug.
+        VkDeviceQueueCreateInfo const queueInfo[1]{
+            VK::DeviceQueueCreateInfo().queueFamilyIndex(queue_index).queueCount(1).pQueuePriorities(priorities)};
+
+        auto const deviceInfo = VK::DeviceCreateInfo().queueCreateInfoCount(1).pQueueCreateInfos(queueInfo);
+
+        result = vkCreateDevice(physical[0], deviceInfo, &alloc_callbacks, &device);
+        if (result == VK_ERROR_OUT_OF_HOST_MEMORY) {
+            vkDestroyInstance(instance, &alloc_callbacks);
+            if (!IsAllocTrackerEmpty()) {
+                std::cout << "Failed on index " << fail_index << '\n';
+                ASSERT_EQ(true, IsAllocTrackerEmpty());
+            }
+            FreeAllocTracker();
+            continue;
+        }
+        vkDestroyDevice(device, &alloc_callbacks);
+        vkDestroyInstance(instance, &alloc_callbacks);
+        FreeAllocTracker();
+    }
+}
+
+// Used by run_loader_tests.sh to test that calling vkEnumeratePhysicalDeviceGroupsKHR without first querying
+// the count, works.  And, that it also returns only physical devices made available by the standard
+// enumerate call
+TEST(EnumeratePhysicalDeviceGroupsKHR, OneCall) {
+    VkInstance instance = VK_NULL_HANDLE;
+    char const *const names[] = {VK_KHR_DEVICE_GROUP_CREATION_EXTENSION_NAME};
+    auto const info = VK::InstanceCreateInfo().enabledExtensionCount(1).ppEnabledExtensionNames(names);
+    uint32_t group;
+    uint32_t dev;
+    std::vector<std::pair<VkPhysicalDevice, bool>> phys_dev_normal_found;
+    std::vector<std::pair<VkPhysicalDevice, bool>> phys_dev_group_found;
+
+    VkResult result = vkCreateInstance(info, VK_NULL_HANDLE, &instance);
+    if (result == VK_ERROR_EXTENSION_NOT_PRESENT) {
+        // Extension isn't present, just skip this test
+        ASSERT_EQ(result, VK_ERROR_EXTENSION_NOT_PRESENT);
+        std::cout << "Skipping EnumeratePhysicalDeviceGroupsKHR : OneCall due to Instance lacking support"
+                  << " for " << VK_KHR_DEVICE_GROUP_CREATION_EXTENSION_NAME << " extension\n";
+        return;
+    }
+
+    uint32_t phys_dev_count = 500;
+    std::unique_ptr<VkPhysicalDevice[]> phys_devs(new VkPhysicalDevice[phys_dev_count]);
+    result = vkEnumeratePhysicalDevices(instance, &phys_dev_count, phys_devs.get());
+    ASSERT_EQ(result, VK_SUCCESS);
+    ASSERT_GT(phys_dev_count, 0u);
+
+    // Initialize the normal physical device boolean pair array
+    for (dev = 0; dev < phys_dev_count; dev++) {
+        phys_dev_normal_found.push_back(std::make_pair(phys_devs[dev], false));
+    }
+
+    // Get a pointer to the new vkEnumeratePhysicalDeviceGroupsKHR call
+    PFN_vkEnumeratePhysicalDeviceGroupsKHR p_vkEnumeratePhysicalDeviceGroupsKHR =
+        (PFN_vkEnumeratePhysicalDeviceGroupsKHR)vkGetInstanceProcAddr(instance, "vkEnumeratePhysicalDeviceGroupsKHR");
+
+    // Setup the group information in preparation for the call
+    uint32_t group_count = 30;
+    std::unique_ptr<VkPhysicalDeviceGroupPropertiesKHR[]> phys_dev_groups(new VkPhysicalDeviceGroupPropertiesKHR[group_count]);
+    for (group = 0; group < group_count; group++) {
+        phys_dev_groups[group].sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES_KHR;
+        phys_dev_groups[group].pNext = nullptr;
+        phys_dev_groups[group].physicalDeviceCount = 0;
+        memset(phys_dev_groups[group].physicalDevices, 0, sizeof(VkPhysicalDevice) * VK_MAX_DEVICE_GROUP_SIZE_KHR);
+        phys_dev_groups[group].subsetAllocation = VK_FALSE;
+    }
+
+    result = p_vkEnumeratePhysicalDeviceGroupsKHR(instance, &group_count, phys_dev_groups.get());
+    ASSERT_EQ(result, VK_SUCCESS);
+    ASSERT_GT(group_count, 0u);
+
+    // Initialize the group physical device boolean pair array
+    for (group = 0; group < group_count; group++) {
+        for (dev = 0; dev < phys_dev_groups[group].physicalDeviceCount; dev++) {
+            phys_dev_group_found.push_back(std::make_pair(phys_dev_groups[group].physicalDevices[dev], false));
+        }
+    }
+
+    // Now, make sure we can find each normal and group item in the other list
+    for (dev = 0; dev < phys_dev_count; dev++) {
+        for (group = 0; group < phys_dev_group_found.size(); group++) {
+            if (phys_dev_normal_found[dev].first == phys_dev_group_found[group].first) {
+                phys_dev_normal_found[dev].second = true;
+                phys_dev_group_found[group].second = true;
+                break;
+            }
+        }
+    }
+
+    for (dev = 0; dev < phys_dev_count; dev++) {
+        ASSERT_EQ(phys_dev_normal_found[dev].second, true);
+    }
+    for (dev = 0; dev < phys_dev_group_found.size(); dev++) {
+        ASSERT_EQ(phys_dev_group_found[dev].second, true);
+    }
+
+    vkDestroyInstance(instance, nullptr);
+}
+
+// Used by run_loader_tests.sh to test for the expected usage of the
+// vkEnumeratePhysicalDeviceGroupsKHR call in a two call fasion (once with NULL data
+// to get count, and then again with data).
+TEST(EnumeratePhysicalDeviceGroupsKHR, TwoCall) {
+    VkInstance instance = VK_NULL_HANDLE;
+    char const *const names[] = {VK_KHR_DEVICE_GROUP_CREATION_EXTENSION_NAME};
+    auto const info = VK::InstanceCreateInfo().enabledExtensionCount(1).ppEnabledExtensionNames(names);
+    uint32_t group;
+    uint32_t group_count;
+    uint32_t dev;
+    std::vector<std::pair<VkPhysicalDevice, bool>> phys_dev_normal_found;
+    std::vector<std::pair<VkPhysicalDevice, bool>> phys_dev_group_found;
+
+    VkResult result = vkCreateInstance(info, VK_NULL_HANDLE, &instance);
+    if (result == VK_ERROR_EXTENSION_NOT_PRESENT) {
+        // Extension isn't present, just skip this test
+        ASSERT_EQ(result, VK_ERROR_EXTENSION_NOT_PRESENT);
+        std::cout << "Skipping EnumeratePhysicalDeviceGroupsKHR : TwoCall due to Instance lacking support"
+                  << " for " << VK_KHR_DEVICE_GROUP_CREATION_EXTENSION_NAME << " extension\n";
+        return;
+    }
+
+    // Get a pointer to the new vkEnumeratePhysicalDeviceGroupsKHR call
+    PFN_vkEnumeratePhysicalDeviceGroupsKHR p_vkEnumeratePhysicalDeviceGroupsKHR =
+        (PFN_vkEnumeratePhysicalDeviceGroupsKHR)vkGetInstanceProcAddr(instance, "vkEnumeratePhysicalDeviceGroupsKHR");
+
+    // Setup the group information in preparation for the call
+    uint32_t array_group_count = 30;
+    std::unique_ptr<VkPhysicalDeviceGroupPropertiesKHR[]> phys_dev_groups(
+        new VkPhysicalDeviceGroupPropertiesKHR[array_group_count]);
+    for (group = 0; group < array_group_count; group++) {
+        phys_dev_groups[group].sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES_KHR;
+        phys_dev_groups[group].pNext = nullptr;
+        phys_dev_groups[group].physicalDeviceCount = 0;
+        memset(phys_dev_groups[group].physicalDevices, 0, sizeof(VkPhysicalDevice) * VK_MAX_DEVICE_GROUP_SIZE_KHR);
+        phys_dev_groups[group].subsetAllocation = VK_FALSE;
+    }
+
+    result = p_vkEnumeratePhysicalDeviceGroupsKHR(instance, &group_count, nullptr);
+    ASSERT_EQ(result, VK_SUCCESS);
+    ASSERT_GT(group_count, 0u);
+    ASSERT_LT(group_count, array_group_count);
+
+    result = p_vkEnumeratePhysicalDeviceGroupsKHR(instance, &group_count, phys_dev_groups.get());
+    ASSERT_EQ(result, VK_SUCCESS);
+    ASSERT_GT(group_count, 0u);
+    ASSERT_LT(group_count, array_group_count);
+
+    // Initialize the group physical device boolean pair array
+    for (group = 0; group < group_count; group++) {
+        for (dev = 0; dev < phys_dev_groups[group].physicalDeviceCount; dev++) {
+            phys_dev_group_found.push_back(std::make_pair(phys_dev_groups[group].physicalDevices[dev], false));
+        }
+    }
+
+    uint32_t phys_dev_count = 500;
+    std::unique_ptr<VkPhysicalDevice[]> phys_devs(new VkPhysicalDevice[phys_dev_count]);
+    result = vkEnumeratePhysicalDevices(instance, &phys_dev_count, phys_devs.get());
+    ASSERT_EQ(result, VK_SUCCESS);
+    ASSERT_GT(phys_dev_count, 0u);
+
+    // Initialize the normal physical device boolean pair array
+    for (dev = 0; dev < phys_dev_count; dev++) {
+        phys_dev_normal_found.push_back(std::make_pair(phys_devs[dev], false));
+    }
+
+    // Now, make sure we can find each normal and group item in the other list
+    for (dev = 0; dev < phys_dev_count; dev++) {
+        for (group = 0; group < phys_dev_group_found.size(); group++) {
+            if (phys_dev_normal_found[dev].first == phys_dev_group_found[group].first) {
+                phys_dev_normal_found[dev].second = true;
+                phys_dev_group_found[group].second = true;
+                break;
+            }
+        }
+    }
+
+    for (dev = 0; dev < phys_dev_count; dev++) {
+        ASSERT_EQ(phys_dev_normal_found[dev].second, true);
+    }
+    for (dev = 0; dev < phys_dev_group_found.size(); dev++) {
+        ASSERT_EQ(phys_dev_group_found[dev].second, true);
+    }
+
+    vkDestroyInstance(instance, nullptr);
+}
+
+// Used by run_loader_tests.sh to test for the expected usage of the EnumeratePhysicalDeviceGroupsKHR
+// call if not enough numbers are provided for the final list.
+TEST(EnumeratePhysicalDeviceGroupsKHR, TwoCallIncomplete) {
+    VkInstance instance = VK_NULL_HANDLE;
+    char const *const names[] = {VK_KHR_DEVICE_GROUP_CREATION_EXTENSION_NAME};
+    auto const info = VK::InstanceCreateInfo().enabledExtensionCount(1).ppEnabledExtensionNames(names);
+    uint32_t group;
+    uint32_t group_count;
+
+    VkResult result = vkCreateInstance(info, VK_NULL_HANDLE, &instance);
+    if (result == VK_ERROR_EXTENSION_NOT_PRESENT) {
+        // Extension isn't present, just skip this test
+        ASSERT_EQ(result, VK_ERROR_EXTENSION_NOT_PRESENT);
+        std::cout << "Skipping EnumeratePhysicalDeviceGroupsKHR : TwoCallIncomplete due to Instance lacking support"
+                  << " for " << VK_KHR_DEVICE_GROUP_CREATION_EXTENSION_NAME << " extension\n";
+        return;
+    }
+
+    // Get a pointer to the new vkEnumeratePhysicalDeviceGroupsKHR call
+    PFN_vkEnumeratePhysicalDeviceGroupsKHR p_vkEnumeratePhysicalDeviceGroupsKHR =
+        (PFN_vkEnumeratePhysicalDeviceGroupsKHR)vkGetInstanceProcAddr(instance, "vkEnumeratePhysicalDeviceGroupsKHR");
+
+    // Setup the group information in preparation for the call
+    uint32_t array_group_count = 30;
+    std::unique_ptr<VkPhysicalDeviceGroupPropertiesKHR[]> phys_dev_groups(
+        new VkPhysicalDeviceGroupPropertiesKHR[array_group_count]);
+    for (group = 0; group < array_group_count; group++) {
+        phys_dev_groups[group].sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES_KHR;
+        phys_dev_groups[group].pNext = nullptr;
+        phys_dev_groups[group].physicalDeviceCount = 0;
+        memset(phys_dev_groups[group].physicalDevices, 0, sizeof(VkPhysicalDevice) * VK_MAX_DEVICE_GROUP_SIZE_KHR);
+        phys_dev_groups[group].subsetAllocation = VK_FALSE;
+    }
+
+    result = p_vkEnumeratePhysicalDeviceGroupsKHR(instance, &group_count, nullptr);
+    ASSERT_EQ(result, VK_SUCCESS);
+    ASSERT_GT(group_count, 0u);
+    ASSERT_LT(group_count, array_group_count);
+
+    group_count -= 1;
+
+    result = p_vkEnumeratePhysicalDeviceGroupsKHR(instance, &group_count, phys_dev_groups.get());
+    ASSERT_EQ(result, VK_INCOMPLETE);
+
+    vkDestroyInstance(instance, nullptr);
+}
+
+int main(int argc, char **argv) {
+    int result;
+
+    ::testing::InitGoogleTest(&argc, argv);
+
+    if (argc > 0) {
+        CommandLine::Initialize(argc, argv);
+    }
+
+    result = RUN_ALL_TESTS();
+
+    return result;
+}
diff --git a/src/third_party/vulkan-loader/src/tests/run_all_tests.sh b/src/third_party/vulkan-loader/src/tests/run_all_tests.sh
new file mode 100755
index 0000000..c2bb740
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/tests/run_all_tests.sh
@@ -0,0 +1,12 @@
+#!/bin/bash
+#
+# Run all the regression tests
+cd $(dirname "$0")
+
+# Halt on error
+set -e
+
+# Verify that the loader is working
+./run_loader_tests.sh
+./run_extra_loader_tests.sh
+
diff --git a/src/third_party/vulkan-loader/src/tests/run_extra_loader_tests.sh b/src/third_party/vulkan-loader/src/tests/run_extra_loader_tests.sh
new file mode 100755
index 0000000..86d1f59
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/tests/run_extra_loader_tests.sh
@@ -0,0 +1,111 @@
+#!/bin/bash
+
+pushd $(dirname "$0") > /dev/null
+
+RunImplicitLayerTest()
+{
+    # Check for local implicit directory.
+    : "${HOME:?}"
+    local implicitDirectory="$HOME/.local/share/vulkan/implicit_layer.d"
+    if [ ! -d "$implicitDirectory" ]
+    then
+        mkdir -p "$implicitDirectory"
+    fi
+
+    # Check for the shared object.
+    local sharedObject="libVkLayer_test.so"
+    local layerDirectory="./layers"
+    if [ ! -f "$layerDirectory/$sharedObject" ]
+    then
+        echo "The file, $layerDirectory/$sharedObject, can not be found." >&2
+        return 1
+    fi
+
+    # Check for the json which does not include the optional enable environment variable.
+    local json="VkLayer_test.json"
+    if [ ! -f "$layerDirectory/$json" ]
+    then
+        echo "The file, $layerDirectory/$json, can not be found." >&2
+        return 1
+    fi
+
+    # Copy the test layer into the implicit directory.
+    if ! cp "$layerDirectory/$sharedObject" "$implicitDirectory/" || ! cp "$layerDirectory/$json" "$implicitDirectory/"
+    then
+        echo "unable to install test layer" >&2
+        return 1
+    fi
+
+    # Test without setting enable environment variable. The loader should not load the layer.
+    output=$(GTEST_FILTER=ImplicitLayer.Present \
+        ./vk_loader_validation_tests 2>&1)
+    if echo "$output" | grep -q "VK_LAYER_LUNARG_test: CreateInstance"
+    then
+       echo "test layer detected but enable environment variable was not set" >&2
+       return 1
+    fi
+
+    # Test enable environment variable with good value. The loader should load the layer.
+    output=$(ENABLE_LAYER_TEST_1=enable \
+        GTEST_FILTER=ImplicitLayer.Present \
+        ./vk_loader_validation_tests 2>&1)
+    if ! echo "$output" | grep -q "VK_LAYER_LUNARG_test: CreateInstance"
+    then
+       echo "test layer not detected" >&2
+       return 1
+    fi
+
+    # Test enable environment variable with bad value. The loader should not load the layer.
+    output=$(ENABLE_LAYER_TEST_1=wrong \
+        GTEST_FILTER=ImplicitLayer.Present \
+        ./vk_loader_validation_tests 2>&1)
+    if echo "$output" | grep -q "VK_LAYER_LUNARG_test: CreateInstance"
+    then
+       echo "test layer detected but enable environment variable was set to wrong value" >&2
+       return 1
+    fi
+
+    # Test disable environment variable. The loader should not load the layer.
+    output=$(DISABLE_LAYER_TEST_1=value \
+        GTEST_FILTER=ImplicitLayer.Present \
+        ./vk_loader_validation_tests 2>&1)
+    if echo "$output" | grep -q "VK_LAYER_LUNARG_test: CreateInstance"
+    then
+       echo "test layer detected but disable environment variable was set" >&2
+       return 1
+    fi
+
+    # Remove the enable environment variable.
+    if ! sed -i '/enable_environment\|ENABLE_LAYER_TEST_1\|},/d' "$implicitDirectory/$json"
+    then
+        echo "unable to remove enable environment variable" >&2
+        return 1
+    fi
+
+    # Test without setting enable environment variable. The loader should load the layer.
+    output=$(GTEST_FILTER=ImplicitLayer.Present \
+        ./vk_loader_validation_tests 2>&1)
+    if ! echo "$output" | grep -q "VK_LAYER_LUNARG_test: CreateInstance"
+    then
+       echo "test layer not detected" >&2
+       return 1
+    fi
+
+    # Remove the test layer.
+    if ! rm "$implicitDirectory/$sharedObject" || ! rm "$implicitDirectory/$json"
+    then
+        echo "unable to uninstall test layer" >&2
+        return 1
+    fi
+
+    echo "ImplicitLayer test PASSED"
+}
+
+# Prevent the implicit layer test from running concurrently with itself in another process.
+# i.e. flock the following command subshell with an automatic file descriptor.
+filename=${0##*/}
+(
+    flock "$filedesc" && ! RunImplicitLayerTest && echo "ImplicitLayer test FAILED" >&2 && exit 1
+){filedesc}>"/tmp/$filename.lockfile"
+
+popd > /dev/null
diff --git a/src/third_party/vulkan-loader/src/tests/run_loader_tests.sh b/src/third_party/vulkan-loader/src/tests/run_loader_tests.sh
new file mode 100755
index 0000000..329d17b
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/tests/run_loader_tests.sh
@@ -0,0 +1,146 @@
+#!/bin/bash
+
+pushd $(dirname "$0") > /dev/null
+
+RunEnvironmentVariablePathsTest()
+{
+    # Check for proper handling of paths specified via environment variables.
+
+    # Set up a layer path that includes default and user-specified locations,
+    # so that the test app can find them.  Include some badly specified elements as well.
+    vk_layer_path="$VK_LAYER_PATH"
+    vk_layer_path+=":/usr/local/etc/vulkan/implicit_layer.d:/usr/local/share/vulkan/implicit_layer.d"
+    vk_layer_path+=":/tmp/carol:::"
+    vk_layer_path+=":/etc/vulkan/implicit_layer.d:/usr/share/vulkan/implicit_layer.d:$HOME/.local/share/vulkan/implicit_layer.d"
+    vk_layer_path+=":::::/tandy:"
+
+    # Set vars to include some "challenging" paths and run the test.
+    output=$(VK_LOADER_DEBUG=all \
+       XDG_CONFIG_DIRS=":/tmp/goober:::::/tmp/goober2/:/tmp/goober3/with spaces:::" \
+       XDG_DATA_DIRS="::::/tmp/goober4:::::/tmp/goober5:/tmp/goober6/with spaces::::/tmp/goober7:" \
+       VK_LAYER_PATH=${vk_layer_path} \
+       GTEST_FILTER=CreateInstance.LayerPresent \
+       ./vk_loader_validation_tests 2>&1)
+
+    # Here is a path we expect to find.  The loader constructs these from the XDG* env vars.
+    right_path="/tmp/goober/vulkan/icd.d:/tmp/goober2/vulkan/icd.d:/tmp/goober3/with spaces/vulkan/icd.d"
+    # There are other paths that come from SYSCONFIG settings established at build time.
+    # So we can't really guess at what those are here.
+    right_path+=".*"
+    # Also expect to find these, since we added them.
+    right_path+="/tmp/goober4/vulkan/icd.d:/tmp/goober5/vulkan/icd.d:/tmp/goober6/with spaces/vulkan/icd.d:/tmp/goober7/vulkan/icd.d"
+    echo "$output" | grep -q "$right_path"
+    ec=$?
+    if [ $ec -eq 1 ]
+    then
+       echo "Environment Variable Path test FAILED - ICD path incorrect" >&2
+       exit 1
+    fi
+    # Change the string to implicit layers.
+    right_path=${right_path//icd.d/implicit_layer.d}
+    echo "$output" | grep -q "$right_path"
+    ec=$?
+    if [ $ec -eq 1 ]
+    then
+       echo "Environment Variable Path test FAILED - Implicit layer path incorrect" >&2
+       exit 1
+    fi
+    # Sadly, the loader does not clean up this path and just stumbles through it.
+    # So just make sure it is the same.
+    right_path="${vk_layer_path}"
+    echo "$output" | grep -q "$right_path"
+    ec=$?
+    if [ $ec -eq 1 ]
+    then
+       echo "Environment Variable Path test FAILED - VK_LAYER_PATH incorrect" >&2
+       exit 1
+    fi
+    echo "Environment Variable Path test PASSED"
+}
+
+RunCreateInstanceTest()
+{
+    # Check for layer insertion via CreateInstance.
+    output=$(VK_LOADER_DEBUG=all \
+       GTEST_FILTER=CreateInstance.LayerPresent \
+       ./vk_loader_validation_tests 2>&1)
+
+    echo "$output" | grep -q "Insert instance layer VK_LAYER_LUNARG_test"
+    ec=$?
+
+    if [ $ec -eq 1 ]
+    then
+       echo "CreateInstance insertion test FAILED - test layer not detected in instance layers" >&2
+       exit 1
+    fi
+    echo "CreateInstance Insertion test PASSED"
+}
+
+RunEnumerateInstanceLayerPropertiesTest()
+{
+    count=$(GTEST_FILTER=EnumerateInstanceLayerProperties.Count \
+        ./vk_loader_validation_tests count 2>&1 |
+        grep -o 'count=[0-9]\+' | sed 's/^.*=//')
+
+    if [ "$count" -gt 1 ]
+    then
+        diff \
+            <(GTEST_PRINT_TIME=0 \
+                GTEST_FILTER=EnumerateInstanceLayerProperties.OnePass \
+                ./vk_loader_validation_tests count "$count" properties 2>&1 |
+                grep 'properties') \
+            <(GTEST_PRINT_TIME=0 \
+                GTEST_FILTER=EnumerateInstanceLayerProperties.TwoPass \
+                ./vk_loader_validation_tests properties 2>&1 |
+                grep 'properties')
+    fi
+    ec=$?
+
+    if [ $ec -eq 1 ]
+    then
+        echo "EnumerateInstanceLayerProperties OnePass vs TwoPass test FAILED - properties do not match" >&2
+        exit 1
+    fi
+    echo "EnumerateInstanceLayerProperties OnePass vs TwoPass test PASSED"
+}
+
+RunEnumerateInstanceExtensionPropertiesTest()
+{
+    count=$(GTEST_FILTER=EnumerateInstanceExtensionProperties.Count \
+        ./vk_loader_validation_tests count 2>&1 |
+        grep -o 'count=[0-9]\+' | sed 's/^.*=//')
+
+    if [ "$count" -gt 1 ]
+    then
+        diff \
+            <(GTEST_PRINT_TIME=0 \
+                GTEST_FILTER=EnumerateInstanceExtensionProperties.OnePass \
+                ./vk_loader_validation_tests count "$count" properties 2>&1 |
+                grep 'properties') \
+            <(GTEST_PRINT_TIME=0 \
+                GTEST_FILTER=EnumerateInstanceExtensionProperties.TwoPass \
+                ./vk_loader_validation_tests properties 2>&1 |
+                grep 'properties')
+    fi
+    ec=$?
+
+    if [ $ec -eq 1 ]
+    then
+        echo "EnumerateInstanceExtensionProperties OnePass vs TwoPass test FAILED - properties do not match" >&2
+        exit 1
+    fi
+    echo "EnumerateInstanceExtensionProperties OnePass vs TwoPass test PASSED"
+}
+
+VK_LAYER_PATH="$PWD/layers"
+./vk_loader_validation_tests
+
+RunEnvironmentVariablePathsTest
+RunCreateInstanceTest
+RunEnumerateInstanceLayerPropertiesTest
+RunEnumerateInstanceExtensionPropertiesTest
+
+# Test the wrap objects layer.
+./run_wrap_objects_tests.sh || exit 1
+
+popd > /dev/null
diff --git a/src/third_party/vulkan-loader/src/tests/run_wrap_objects_tests.sh b/src/third_party/vulkan-loader/src/tests/run_wrap_objects_tests.sh
new file mode 100755
index 0000000..637876b
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/tests/run_wrap_objects_tests.sh
@@ -0,0 +1,121 @@
+#!/bin/bash
+
+# Note that the Validation Layers must be installed in the default system
+# paths and/or VK_LAYER_PATH must point to Validation Layers to run this test.
+
+pushd $(dirname "$0") > /dev/null
+
+vk_layer_path=$VK_LAYER_PATH:`pwd`/layers
+ld_library_path=$LD_LIBRARY_PATH:`pwd`/layers
+
+# Check for insertion of wrap-objects layer.
+output=$(VK_LAYER_PATH=$vk_layer_path \
+   LD_LIBRARY_PATH=$ld_library_path \
+   VK_INSTANCE_LAYERS=VK_LAYER_LUNARG_wrap_objects \
+   VK_LOADER_DEBUG=all \
+   GTEST_FILTER=WrapObjects.Insert \
+   ./vk_loader_validation_tests 2>&1)
+
+echo "$output" | grep -q "Insert instance layer VK_LAYER_LUNARG_wrap_objects"
+ec=$?
+
+if [ $ec -eq 1 ]
+then
+   echo "Insertion test FAILED - wrap-objects not detected in instance layers" >&2
+   exit 1
+fi
+
+echo "$output" | grep -q "Inserted device layer VK_LAYER_LUNARG_wrap_objects"
+ec=$?
+
+if [ $ec -eq 1 ]
+then
+   echo "Insertion test FAILED - wrap-objects not detected in device layers" >&2
+   exit 1
+fi
+echo "Insertion test PASSED"
+
+# Check for insertion of wrap-objects layer in front.
+output=$(VK_LAYER_PATH=$vk_layer_path \
+   LD_LIBRARY_PATH=$ld_library_path \
+   VK_INSTANCE_LAYERS=VK_LAYER_LUNARG_parameter_validation:VK_LAYER_LUNARG_wrap_objects \
+   VK_LOADER_DEBUG=all \
+   GTEST_FILTER=WrapObjects.Insert \
+   ./vk_loader_validation_tests 2>&1)
+
+echo "$output" | grep -q "Insert instance layer VK_LAYER_LUNARG_wrap_objects"
+ec=$?
+
+if [ $ec -eq 1 ]
+then
+   echo "Front insertion test FAILED - wrap-objects not detected in instance layers" >&2
+   exit 1
+fi
+
+echo "$output" | grep -q "Inserted device layer VK_LAYER_LUNARG_wrap_objects"
+ec=$?
+
+if [ $ec -eq 1 ]
+then
+   echo "Front insertion test FAILED - wrap-objects not detected in device layers" >&2
+   exit 1
+fi
+echo "Front insertion test PASSED"
+
+# Check for insertion of wrap-objects layer in back.
+output=$(VK_LAYER_PATH=$vk_layer_path \
+   LD_LIBRARY_PATH=$ld_library_path \
+   VK_INSTANCE_LAYERS=VK_LAYER_LUNARG_wrap_objects:VK_LAYER_LUNARG_parameter_validation \
+   VK_LOADER_DEBUG=all \
+   GTEST_FILTER=WrapObjects.Insert \
+   ./vk_loader_validation_tests 2>&1)
+
+echo "$output" | grep -q "Insert instance layer VK_LAYER_LUNARG_wrap_objects"
+ec=$?
+
+if [ $ec -eq 1 ]
+then
+   echo "Back insertion test FAILED - wrap-objects not detected in instance layers" >&2
+   exit 1
+fi
+
+echo "$output" | grep -q "Inserted device layer VK_LAYER_LUNARG_wrap_objects"
+ec=$?
+
+if [ $ec -eq 1 ]
+then
+   echo "Back insertion test FAILED - wrap-objects not detected in device layers" >&2
+   exit 1
+fi
+echo "Back insertion test PASSED"
+
+# Check for insertion of wrap-objects layer in middle.
+output=$(VK_LAYER_PATH=$vk_layer_path \
+   LD_LIBRARY_PATH=$ld_library_path \
+   VK_INSTANCE_LAYERS=VK_LAYER_LUNARG_core_validation:VK_LAYER_LUNARG_wrap_objects:VK_LAYER_LUNARG_parameter_validation \
+   VK_LOADER_DEBUG=all \
+   GTEST_FILTER=WrapObjects.Insert \
+   ./vk_loader_validation_tests 2>&1)
+
+echo "$output" | grep -q "Insert instance layer VK_LAYER_LUNARG_wrap_objects"
+ec=$?
+
+if [ $ec -eq 1 ]
+then
+   echo "Middle insertion test FAILED - wrap-objects not detected in instance layers" >&2
+   exit 1
+fi
+
+echo "$output" | grep -q "Inserted device layer VK_LAYER_LUNARG_wrap_objects"
+ec=$?
+
+if [ $ec -eq 1 ]
+then
+   echo "Middle insertion test FAILED - wrap-objects not detected in device layers" >&2
+   exit 1
+fi
+echo "Middle insertion test PASSED"
+
+popd > /dev/null
+
+exit 0
diff --git a/src/third_party/vulkan-loader/src/tests/test_common.h b/src/third_party/vulkan-loader/src/tests/test_common.h
new file mode 100644
index 0000000..ac3f6a2
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/tests/test_common.h
@@ -0,0 +1,186 @@
+/*
+ * Copyright (c) 2015-2016 The Khronos Group Inc.
+ * Copyright (c) 2015-2016 Valve Corporation
+ * Copyright (c) 2015-2016 LunarG, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Chia-I Wu <olvaffe@gmail.com>
+ * Author: Chris Forbes <chrisf@ijw.co.nz>
+ * Author: Courtney Goeltzenleuchter <courtney@LunarG.com>
+ * Author: Mark Lobodzinski <mark@lunarg.com>
+ * Author: Mike Stroyan <mike@LunarG.com>
+ * Author: Tobin Ehlis <tobine@google.com>
+ * Author: Tony Barbour <tony@LunarG.com>
+ */
+
+#ifndef TEST_COMMON_H
+#define TEST_COMMON_H
+
+#include <assert.h>
+#include <stdbool.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+
+#ifdef _WIN32
+#define NOMINMAX
+// WinSock2.h must be included *BEFORE* windows.h
+#include <winsock2.h>
+#endif
+
+#include <vulkan/vk_sdk_platform.h>
+#include <vulkan/vulkan.h>
+
+#ifdef _WIN32
+#pragma warning(push)
+/*
+    warnings 4251 and 4275 have to do with potential dll-interface mismatch
+    between library (gtest) and users. Since we build the gtest library
+    as part of the test build we know that the dll-interface will match and
+    can disable these warnings.
+ */
+#pragma warning(disable : 4251)
+#pragma warning(disable : 4275)
+#endif
+
+// GTest and Xlib collide due to redefinitions of "None" and "Bool"
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+#pragma push_macro("None")
+#pragma push_macro("Bool")
+#undef None
+#undef Bool
+#endif
+
+// Use the NDK's header on Android
+#include "gtest/gtest.h"
+
+// Redefine Xlib definitions
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+#pragma pop_macro("Bool")
+#pragma pop_macro("None")
+#endif
+
+#ifdef _WIN32
+#pragma warning(pop)
+#endif
+
+#define ASSERT_VK_SUCCESS(err) ASSERT_EQ(VK_SUCCESS, err) << vk_result_string(err)
+
+static inline const char *vk_result_string(VkResult err) {
+    switch (err) {
+#define STR(r) \
+    case r:    \
+        return #r
+        STR(VK_SUCCESS);
+        STR(VK_NOT_READY);
+        STR(VK_TIMEOUT);
+        STR(VK_EVENT_SET);
+        STR(VK_EVENT_RESET);
+        STR(VK_ERROR_INITIALIZATION_FAILED);
+        STR(VK_ERROR_OUT_OF_HOST_MEMORY);
+        STR(VK_ERROR_OUT_OF_DEVICE_MEMORY);
+        STR(VK_ERROR_DEVICE_LOST);
+        STR(VK_ERROR_EXTENSION_NOT_PRESENT);
+        STR(VK_ERROR_LAYER_NOT_PRESENT);
+        STR(VK_ERROR_MEMORY_MAP_FAILED);
+        STR(VK_ERROR_INCOMPATIBLE_DRIVER);
+#undef STR
+        default:
+            return "UNKNOWN_RESULT";
+    }
+}
+
+static inline void test_error_callback(const char *expr, const char *file, unsigned int line, const char *function) {
+    ADD_FAILURE_AT(file, line) << "Assertion: `" << expr << "'";
+}
+
+#if defined(__linux__) || defined(__APPLE__)
+    /* Linux-specific common code: */
+
+#include <pthread.h>
+
+// Threads:
+typedef pthread_t test_platform_thread;
+
+static inline int test_platform_thread_create(test_platform_thread *thread, void *(*func)(void *), void *data) {
+    pthread_attr_t thread_attr;
+    pthread_attr_init(&thread_attr);
+    return pthread_create(thread, &thread_attr, func, data);
+}
+static inline int test_platform_thread_join(test_platform_thread thread, void **retval) { return pthread_join(thread, retval); }
+
+// Thread IDs:
+typedef pthread_t test_platform_thread_id;
+static inline test_platform_thread_id test_platform_get_thread_id() { return pthread_self(); }
+
+// Thread mutex:
+typedef pthread_mutex_t test_platform_thread_mutex;
+static inline void test_platform_thread_create_mutex(test_platform_thread_mutex *pMutex) { pthread_mutex_init(pMutex, NULL); }
+static inline void test_platform_thread_lock_mutex(test_platform_thread_mutex *pMutex) { pthread_mutex_lock(pMutex); }
+static inline void test_platform_thread_unlock_mutex(test_platform_thread_mutex *pMutex) { pthread_mutex_unlock(pMutex); }
+static inline void test_platform_thread_delete_mutex(test_platform_thread_mutex *pMutex) { pthread_mutex_destroy(pMutex); }
+typedef pthread_cond_t test_platform_thread_cond;
+static inline void test_platform_thread_init_cond(test_platform_thread_cond *pCond) { pthread_cond_init(pCond, NULL); }
+static inline void test_platform_thread_cond_wait(test_platform_thread_cond *pCond, test_platform_thread_mutex *pMutex) {
+    pthread_cond_wait(pCond, pMutex);
+}
+static inline void test_platform_thread_cond_broadcast(test_platform_thread_cond *pCond) { pthread_cond_broadcast(pCond); }
+
+#elif defined(_WIN32)  // defined(__linux__)
+// Threads:
+typedef HANDLE test_platform_thread;
+static inline int test_platform_thread_create(test_platform_thread *thread, void *(*func)(void *), void *data) {
+    DWORD threadID;
+    *thread = CreateThread(NULL,  // default security attributes
+                           0,     // use default stack size
+                           (LPTHREAD_START_ROUTINE)func,
+                           data,        // thread function argument
+                           0,           // use default creation flags
+                           &threadID);  // returns thread identifier
+    return (*thread != NULL);
+}
+static inline int test_platform_thread_join(test_platform_thread thread, void **retval) {
+    return WaitForSingleObject(thread, INFINITE);
+}
+
+// Thread IDs:
+typedef DWORD test_platform_thread_id;
+static test_platform_thread_id test_platform_get_thread_id() { return GetCurrentThreadId(); }
+
+// Thread mutex:
+typedef CRITICAL_SECTION test_platform_thread_mutex;
+static void test_platform_thread_create_mutex(test_platform_thread_mutex *pMutex) { InitializeCriticalSection(pMutex); }
+static void test_platform_thread_lock_mutex(test_platform_thread_mutex *pMutex) { EnterCriticalSection(pMutex); }
+static void test_platform_thread_unlock_mutex(test_platform_thread_mutex *pMutex) { LeaveCriticalSection(pMutex); }
+static void test_platform_thread_delete_mutex(test_platform_thread_mutex *pMutex) { DeleteCriticalSection(pMutex); }
+typedef CONDITION_VARIABLE test_platform_thread_cond;
+static void test_platform_thread_init_cond(test_platform_thread_cond *pCond) { InitializeConditionVariable(pCond); }
+static void test_platform_thread_cond_wait(test_platform_thread_cond *pCond, test_platform_thread_mutex *pMutex) {
+    SleepConditionVariableCS(pCond, pMutex, INFINITE);
+}
+static void test_platform_thread_cond_broadcast(test_platform_thread_cond *pCond) { WakeAllConditionVariable(pCond); }
+#else                  // defined(_WIN32)
+
+#error The "test_common.h" file must be modified for this OS.
+
+    // NOTE: In order to support another OS, an #elif needs to be added (above the
+    // "#else // defined(_WIN32)") for that OS, and OS-specific versions of the
+    // contents of this file must be created.
+
+    // NOTE: Other OS-specific changes are also needed for this OS.  Search for
+    // files with "WIN32" in it, as a quick way to find files that must be changed.
+
+#endif  // defined(_WIN32)
+
+#endif  // TEST_COMMON_H
diff --git a/src/third_party/vulkan-loader/src/tests/vk_layer_settings.txt b/src/third_party/vulkan-loader/src/tests/vk_layer_settings.txt
new file mode 100644
index 0000000..3d8dae9
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/tests/vk_layer_settings.txt
@@ -0,0 +1,8 @@
+lunarg_core_validation.report_flags = error
+lunarg_core_validation.debug_action = VK_DBG_LAYER_ACTION_LOG_MSG
+lunarg_object_tracker.report_flags = error
+lunarg_object_tracker.debug_action = VK_DBG_LAYER_ACTION_LOG_MSG
+lunarg_parameter_validation.report_flags = error
+lunarg_parameter_validation.debug_action = VK_DBG_LAYER_ACTION_LOG_MSG
+google_threading.report_flags = error
+google_threading.debug_action = VK_DBG_LAYER_ACTION_LOG_MSG
diff --git a/src/third_party/vulkan-loader/src/tests/vk_loader_validation_tests.vcxproj.user b/src/third_party/vulkan-loader/src/tests/vk_loader_validation_tests.vcxproj.user
new file mode 100644
index 0000000..fa9f9d4
--- /dev/null
+++ b/src/third_party/vulkan-loader/src/tests/vk_loader_validation_tests.vcxproj.user
@@ -0,0 +1,35 @@
+<?xml version="1.0" encoding="utf-8"?>
+<Project ToolsVersion="12.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
+  <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='RelWithDebInfo|x64'">
+    <LocalDebuggerEnvironment>VK_LAYER_PATH=$(ProjectDir)\layers\$(Configuration)</LocalDebuggerEnvironment>
+    <DebuggerFlavor>WindowsLocalDebugger</DebuggerFlavor>
+  </PropertyGroup>
+  <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
+    <LocalDebuggerEnvironment>VK_LAYER_PATH=$(ProjectDir)\layers\$(Configuration)</LocalDebuggerEnvironment>
+    <DebuggerFlavor>WindowsLocalDebugger</DebuggerFlavor>
+  </PropertyGroup>
+  <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
+    <LocalDebuggerEnvironment>VK_LAYER_PATH=$(ProjectDir)\layers\$(Configuration)</LocalDebuggerEnvironment>
+    <DebuggerFlavor>WindowsLocalDebugger</DebuggerFlavor>
+  </PropertyGroup>
+  <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='MinSizeRel|x64'">
+    <LocalDebuggerEnvironment>VK_LAYER_PATH=$(ProjectDir)\layers\$(Configuration)</LocalDebuggerEnvironment>
+    <DebuggerFlavor>WindowsLocalDebugger</DebuggerFlavor>
+  </PropertyGroup>
+  <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='RelWithDebInfo|Win32'">
+    <LocalDebuggerEnvironment>VK_LAYER_PATH=$(ProjectDir)\layers\$(Configuration)</LocalDebuggerEnvironment>
+    <DebuggerFlavor>WindowsLocalDebugger</DebuggerFlavor>
+  </PropertyGroup>
+  <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
+    <LocalDebuggerEnvironment>VK_LAYER_PATH=$(ProjectDir)\layers\$(Configuration)</LocalDebuggerEnvironment>
+    <DebuggerFlavor>WindowsLocalDebugger</DebuggerFlavor>
+  </PropertyGroup>
+  <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='MinSizeRel|Win32'">
+    <LocalDebuggerEnvironment>VK_LAYER_PATH=$(ProjectDir)\layers\$(Configuration)</LocalDebuggerEnvironment>
+    <DebuggerFlavor>WindowsLocalDebugger</DebuggerFlavor>
+  </PropertyGroup>
+  <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
+    <LocalDebuggerEnvironment>VK_LAYER_PATH=$(ProjectDir)\layers\$(Configuration)</LocalDebuggerEnvironment>
+    <DebuggerFlavor>WindowsLocalDebugger</DebuggerFlavor>
+  </PropertyGroup>
+</Project>
diff --git a/src/third_party/vulkan-tools/src/.appveyor.yml b/src/third_party/vulkan-tools/src/.appveyor.yml
new file mode 100644
index 0000000..b88089c
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/.appveyor.yml
@@ -0,0 +1,73 @@
+# Windows Build Configuration for AppVeyor
+# http://www.appveyor.com/docs/appveyor-yml
+#
+
+# This version starts a separte job for each platform config
+# in order to get around the AppVeyor limit of 60 mins per job.
+
+# build version format
+version: "{build}"
+
+# Free accounts have a max of 1, but ask anyway.
+max_jobs: 4
+
+os:
+  - Visual Studio 2015
+
+init:
+  - git config --global core.autocrlf true
+
+environment:
+  PYTHON_PATH: "C:/Python35"
+  PYTHON_PACKAGE_PATH: "C:/Python35/Scripts"
+  CMAKE_URL: "http://cmake.org/files/v3.10/cmake-3.10.2-win64-x64.zip"
+
+branches:
+  only:
+    - master
+
+# Install desired CMake version 3.10.2 before any other building
+install:
+  - appveyor DownloadFile %CMAKE_URL% -FileName cmake.zip
+  - 7z x cmake.zip -oC:\cmake > nul
+  - set path=C:\cmake\bin;%path%
+  - cmake --version
+
+before_build:
+  - "SET PATH=C:\\Python35;C:\\Python35\\Scripts;%PATH%"
+  - echo Starting build for %APPVEYOR_REPO_NAME% in %APPVEYOR_BUILD_FOLDER%
+  # Build dependencies
+  - python %APPVEYOR_BUILD_FOLDER%/scripts/update_deps.py --arch=%PLATFORM% --config=%CONFIGURATION% --dir=%APPVEYOR_BUILD_FOLDER%/external
+  # Verify consistency between source file generators and output
+  - echo Verifying consistency between source file generators and output
+  - python %APPVEYOR_BUILD_FOLDER%/scripts/generate_source.py --verify %APPVEYOR_BUILD_FOLDER%/external/Vulkan-Headers/registry
+  # Generate build files using CMake for the build step.
+  - echo Generating Vulkan-Tools CMake files for %PLATFORM% %CONFIGURATION%
+  - cd %APPVEYOR_BUILD_FOLDER%
+  - mkdir build
+  - cd build
+  - cmake -A %PLATFORM% -C %APPVEYOR_BUILD_FOLDER%/external/helper.cmake ..
+  - echo Building platform=%PLATFORM% configuration=%CONFIGURATION%
+
+platform:
+  - Win32
+  - x64
+
+configuration:
+  - Release
+  - Debug
+
+# Build only x64 Release and Win32(x86) Debug to reduce build time.
+# This should still provide adequate 32-bit vs 64-bit and
+# Release vs Debug coverage.
+matrix:
+  exclude:
+    - configuration: Release
+      platform: Win32
+    - configuration: Debug
+      platform: x64
+
+build:
+  parallel: true                  # enable MSBuild parallel builds
+  project: build/Vulkan-Tools.sln # path to Visual Studio solution or project
+  verbosity: quiet                # quiet|minimal|normal|detailed
diff --git a/src/third_party/vulkan-tools/src/.clang-format b/src/third_party/vulkan-tools/src/.clang-format
new file mode 100644
index 0000000..0af4d40
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/.clang-format
@@ -0,0 +1,7 @@
+---
+# Use defaults from the Google style with the following exceptions:
+BasedOnStyle: Google
+IndentWidth: 4
+ColumnLimit: 132
+SortIncludes: false
+...
diff --git a/src/third_party/vulkan-tools/src/.cmake-format.py b/src/third_party/vulkan-tools/src/.cmake-format.py
new file mode 100644
index 0000000..07d2f99
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/.cmake-format.py
@@ -0,0 +1,34 @@
+# Configuration for cmake-format (v0.4.1, circa Jul 2018)
+# https://github.com/cheshirekow/cmake_format
+
+# How wide to allow formatted cmake files
+line_width = 132
+
+# How many spaces to tab for indent
+tab_size = 4
+
+# If arglists are longer than this, break them always
+max_subargs_per_line = 3
+
+# If true, separate flow control names from their parentheses with a space
+separate_ctrl_name_with_space = False
+
+# If true, separate function names from parentheses with a space
+separate_fn_name_with_space = False
+
+# If a statement is wrapped to more than one line, than dangle the closing
+# parenthesis on it's own line
+dangle_parens = False
+
+# What character to use for bulleted lists
+bullet_char = u'*'
+
+# What character to use as punctuation after numerals in an enumerated list
+enum_char = u'.'
+
+# What style line endings to use in the output.
+line_ending = u'unix'
+
+# Format command names consistently as 'lower' or 'upper' case
+command_case = u'lower'
+
diff --git a/src/third_party/vulkan-tools/src/.gn b/src/third_party/vulkan-tools/src/.gn
new file mode 100644
index 0000000..e190259
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/.gn
@@ -0,0 +1,22 @@
+# Copyright (C) 2019 LunarG, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+buildconfig = "//build/config/BUILDCONFIG.gn"
+secondary_source = "//build-gn/secondary/"
+
+default_args = {
+    clang_use_chrome_plugins = false
+    use_custom_libcxx = false
+}
+
diff --git a/src/third_party/vulkan-tools/src/.travis.yml b/src/third_party/vulkan-tools/src/.travis.yml
new file mode 100644
index 0000000..f924a54
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/.travis.yml
@@ -0,0 +1,156 @@
+# Build Configuration for Travis CI
+# https://travis-ci.org
+
+dist: xenial
+sudo: required
+language: cpp
+
+matrix:
+  # Show final status immediately if a test fails.
+  fast_finish: true
+  allow_failures:
+    - env: CHECK_COMMIT_FORMAT=ON
+    - env: VULKAN_BUILD_TARGET=GN
+  include:
+    # Android build.
+    - os: linux
+      compiler: gcc
+      env: VULKAN_BUILD_TARGET=ANDROID ANDROID_TARGET=android-23 ANDROID_ABI=armeabi-v7a
+    # Android 64-bit build.
+    - os: linux
+      compiler: gcc
+      env: VULKAN_BUILD_TARGET=ANDROID ANDROID_TARGET=android-23 ANDROID_ABI=arm64-v8a
+    # Linux GCC debug build.
+    - os: linux
+      compiler: gcc
+      env: VULKAN_BUILD_TARGET=LINUX
+    # Linux clang debug build.
+    - os: linux
+      compiler: clang
+      env: VULKAN_BUILD_TARGET=LINUX
+    # Linux GN debug build
+    - os: linux
+      env: VULKAN_BUILD_TARGET=GN
+    # Check for proper clang formatting in the pull request.
+    - env: CHECK_FORMAT=ON
+    # Check for proper commit message formatting for commits in PR
+    - env: CHECK_COMMIT_FORMAT=ON
+
+cache: ccache
+
+# Use set -e so that the build fails when a command fails.
+# The default action for Travis-CI is to continue running even if a command fails.
+# See https://github.com/travis-ci/travis-ci/issues/1066.
+# Use the YAML block scalar header (|) to allow easier multiline script coding.
+
+before_install:
+  - set -e
+  - CMAKE_VERSION=3.10.2
+  - |
+    if [[ "${TRAVIS_OS_NAME}" == "linux" ]]; then
+      CMAKE_URL="https://cmake.org/files/v${CMAKE_VERSION%.*}/cmake-${CMAKE_VERSION}-Linux-x86_64.tar.gz"
+      echo CMAKE_URL=${CMAKE_URL}
+      mkdir cmake-${CMAKE_VERSION} && travis_retry wget --no-check-certificate -O - ${CMAKE_URL} | tar --strip-components=1 -xz -C cmake-${CMAKE_VERSION}
+      export PATH=${PWD}/cmake-${CMAKE_VERSION}/bin:${PATH}
+    else
+      brew install cmake || brew upgrade cmake
+    fi
+    cmake --version
+  - |
+    if [[ "$VULKAN_BUILD_TARGET" == "LINUX" ]] || [[ "$VULKAN_BUILD_TARGET" == "GN" ]]; then
+      # Install the appropriate Linux packages.
+      sudo apt-get -qq update
+      sudo apt-get -y install libxkbcommon-dev libwayland-dev libmirclient-dev libxrandr-dev libx11-xcb-dev python-pathlib
+    fi
+  - |
+    if [[ "$VULKAN_BUILD_TARGET" == "ANDROID" ]]; then
+      # Install the Android NDK.
+      export ARCH=`uname -m`
+      wget http://dl.google.com/android/repository/android-ndk-r15c-linux-${ARCH}.zip
+      unzip -u -q android-ndk-r15c-linux-${ARCH}.zip
+      export ANDROID_NDK_HOME=`pwd`/android-ndk-r15c
+      export JAVA_HOME="/usr/lib/jvm/java-8-oracle"
+      export PATH="$ANDROID_NDK_HOME:$PATH"
+    fi
+  - |
+    if [[ "$CHECK_FORMAT" == "ON" && "$TRAVIS_PULL_REQUEST" != "false" ]]; then
+      # Install the clang format diff tool, but only for pull requests.
+      curl -L http://llvm.org/svn/llvm-project/cfe/trunk/tools/clang-format/clang-format-diff.py -o scripts/clang-format-diff.py;
+    fi
+  # Misc setup
+  - |
+  - export core_count=$(nproc || echo 4) && echo core_count = $core_count
+  - set +e
+
+script:
+  - set -e
+  - |
+    if [[ "$VULKAN_BUILD_TARGET" == "LINUX" ]]; then
+      # Build Vulkan-Tools dependencies
+      cd ${TRAVIS_BUILD_DIR}
+      python ${TRAVIS_BUILD_DIR}/scripts/update_deps.py --config=Debug --dir=${TRAVIS_BUILD_DIR}/external
+      # Verify consistency between source file generators and output
+      echo Verifying consistency between source file generators and output
+      python3 ${TRAVIS_BUILD_DIR}/scripts/generate_source.py --verify ${TRAVIS_BUILD_DIR}/external/Vulkan-Headers/registry
+      # Build Vulkan-Tools
+      mkdir build
+      cd build
+      cmake -C${TRAVIS_BUILD_DIR}/external/helper.cmake -DCMAKE_BUILD_TYPE=Debug ..
+      make -j $core_count
+      cd ${TRAVIS_BUILD_DIR}
+    fi
+  - |
+    if [[ "$VULKAN_BUILD_TARGET" == "LINUX" ]]; then
+      # Run vulkaninfo as a test
+      cd ${TRAVIS_BUILD_DIR}
+      cd build/vulkaninfo
+      ldd vulkaninfo
+      VK_ICD_FILENAMES=../icd/VkICD_mock_icd.json ./vulkaninfo
+      cd ${TRAVIS_BUILD_DIR}
+    fi
+  - |
+    if [[ "$VULKAN_BUILD_TARGET" == "ANDROID" ]]; then
+      pushd ${TRAVIS_BUILD_DIR}/build-android
+      ./update_external_sources_android.sh  --abi $ANDROID_ABI --no-build
+      popd
+      pushd ${TRAVIS_BUILD_DIR}/cube/android
+      USE_CCACHE=1 NDK_CCACHE=ccache ndk-build APP_ABI=$ANDROID_ABI -j $core_count
+      popd
+    fi
+  - |
+    if [[ "$VULKAN_BUILD_TARGET" == "GN" ]]; then
+      git clone https://chromium.googlesource.com/chromium/tools/depot_tools.git depot_tools
+      export PATH=$PATH:$PWD/depot_tools
+      ./build-gn/update_deps.sh
+      gn gen out/Debug
+      ninja -C out/Debug
+    fi
+  - |
+    if [[ "$CHECK_FORMAT" == "ON" ]]; then
+      if [[ "$TRAVIS_PULL_REQUEST" != "false" ]]; then
+        # Run the clang format check only for pull request builds because the
+        # master branch is needed to do the git diff.
+        echo "Checking clang-format between TRAVIS_BRANCH=$TRAVIS_BRANCH and TRAVIS_PULL_REQUEST_BRANCH=$TRAVIS_PULL_REQUEST_BRANCH"
+        ./scripts/check_code_format.sh
+      else
+        echo "Skipping clang-format check since this is not a pull request."
+      fi
+    fi
+  - |
+    if [[ "$CHECK_COMMIT_FORMAT" == "ON" ]]; then
+      if [[ "$TRAVIS_PULL_REQUEST" != "false" ]]; then
+        echo "Checking commit message formats:  See CONTRIBUTING.md"
+        ./scripts/check_commit_message_format.sh
+      fi
+    fi
+  - set +e
+
+notifications:
+  email:
+    recipients:
+      - cnorthrop@google.com
+      - tobine@google.com
+      - chrisforbes@google.com
+      - shannon@lunarg.com
+    on_success: change
+    on_failure: always
diff --git a/src/third_party/vulkan-tools/src/BUILD.gn b/src/third_party/vulkan-tools/src/BUILD.gn
new file mode 100644
index 0000000..9612930
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/BUILD.gn
@@ -0,0 +1,97 @@
+# Copyright (C) 2018-2019 The ANGLE Project Authors.
+# Copyright (C) 2019 LunarG, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import("//build_overrides/vulkan_tools.gni")
+
+# Vulkan-tools isn't ported to Fuchsia yet.
+# TODO(spang): Port mock ICD to Fuchsia.
+assert(!is_fuchsia)
+
+vulkan_undefine_configs = []
+if (is_win) {
+  vulkan_undefine_configs += [
+    "//build/config/win:nominmax",
+    "//build/config/win:unicode",
+  ]
+}
+
+raw_vulkan_icd_dir = rebase_path("icd", root_build_dir)
+
+vulkan_data_dir = "$root_out_dir/$vulkan_data_subdir"
+raw_vulkan_data_dir = rebase_path(vulkan_data_dir, root_build_dir)
+
+config("vulkan_internal_config") {
+  defines = [ "VULKAN_NON_CMAKE_BUILD" ]
+  if (is_clang || !is_win) {
+    cflags = [ "-Wno-unused-function" ]
+  }
+  if (is_linux) {
+    defines += [
+      "SYSCONFDIR=\"/etc\"",
+      "FALLBACK_CONFIG_DIRS=\"/etc/xdg\"",
+      "FALLBACK_DATA_DIRS=\"/usr/local/share:/usr/share\"",
+    ]
+  }
+}
+
+if (!is_android) {
+  # Vulkan Mock ICD
+  # ---------------
+  shared_library("VkICD_mock_icd") {
+    configs -= vulkan_undefine_configs
+    public_deps = [
+      "$vulkan_headers_dir:vulkan_headers",
+    ]
+    data_deps = [
+      ":vulkan_gen_icd_json_file",
+    ]
+    sources = [
+      "icd/generated/mock_icd.cpp",
+      "icd/generated/mock_icd.h",
+    ]
+    if (is_win) {
+      sources += [ "icd/VkICD_mock_icd.def" ]
+    }
+    configs -= [ "//build/config/compiler:chromium_code" ]
+    configs += [ "//build/config/compiler:no_chromium_code" ]
+  }
+
+  action("vulkan_gen_icd_json_file") {
+    script = "build-gn/generate_vulkan_layers_json.py"
+    sources = [
+      "$vulkan_headers_dir/include/vulkan/vulkan_core.h",
+    ]
+    args = [ "--icd" ]
+    if (is_win) {
+      sources += [ "icd/windows/VkICD_mock_icd.json" ]
+      args += [ "$raw_vulkan_icd_dir/windows" ]
+    }
+    if (is_linux) {
+      sources += [ "icd/linux/VkICD_mock_icd.json" ]
+      args += [ "$raw_vulkan_icd_dir/linux" ]
+    }
+    if (is_mac) {
+      sources += [ "icd/macos/VkICD_mock_icd.json" ]
+      args += [ "$raw_vulkan_icd_dir/macos" ]
+    }
+
+    # The layer JSON files are part of the necessary data deps.
+    outputs = [
+      "$vulkan_data_dir/VkICD_mock_icd.json",
+    ]
+    data = outputs
+    args += [ raw_vulkan_data_dir ] + rebase_path(sources, root_build_dir)
+  }
+}
diff --git a/src/third_party/vulkan-tools/src/BUILD.md b/src/third_party/vulkan-tools/src/BUILD.md
new file mode 100644
index 0000000..a633c56
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/BUILD.md
@@ -0,0 +1,869 @@
+# Build Instructions
+
+Instructions for building this repository on Linux, Windows, Android, and MacOS.
+
+## Index
+
+1. [Contributing](#contributing-to-the-repository)
+1. [Repository Content](#repository-content)
+1. [Repository Set-Up](#repository-set-up)
+1. [Windows Build](#building-on-windows)
+1. [Linux Build](#building-on-linux)
+1. [Android Build](#building-on-android)
+1. [MacOS build](#building-on-macos)
+
+## Contributing to the Repository
+
+If you intend to contribute, the preferred work flow is for you to develop
+your contribution in a fork of this repository in your GitHub account and then
+submit a pull request. Please see the [CONTRIBUTING.md](CONTRIBUTING.md) file
+in this repository for more details.
+
+## Repository Content
+
+This repository contains the source code necessary to build the following components:
+
+- vulkaninfo
+- vkcube and vkcubepp demos
+- mock ICD
+
+### Installed Files
+
+The `install` target installs the following files under the directory
+indicated by *install_dir*:
+
+- *install_dir*`/bin` : The vulkaninfo, vkcube and vkcubepp executables
+- *install_dir*`/lib` : The mock ICD library and JSON (Windows) (If INSTALL_ICD=ON)
+- *install_dir*`/share/vulkan/icd.d` : mock ICD JSON (Linux/MacOS) (If INSTALL_ICD=ON)
+
+The `uninstall` target can be used to remove the above files from the install
+directory.
+
+## Repository Set-Up
+
+### Display Drivers
+
+This repository does not contain a Vulkan-capable driver. You will need to
+obtain and install a Vulkan driver from your graphics hardware vendor or from
+some other suitable source if you intend to run Vulkan applications.
+
+### Download the Repository
+
+To create your local git repository:
+
+    git clone https://github.com/KhronosGroup/Vulkan-Tools.git
+
+### Repository Dependencies
+
+This repository attempts to resolve some of its dependencies by using
+components found from the following places, in this order:
+
+1. CMake or Environment variable overrides (e.g., -DVULKAN_HEADERS_INSTALL_DIR)
+1. LunarG Vulkan SDK, located by the `VULKAN_SDK` environment variable
+1. System-installed packages, mostly applicable on Linux
+
+Dependencies that cannot be resolved by the SDK or installed packages must be
+resolved with the "install directory" override and are listed below. The
+"install directory" override can also be used to force the use of a specific
+version of that dependency.
+
+#### Vulkan-Headers
+
+This repository has a required dependency on the
+[Vulkan Headers repository](https://github.com/KhronosGroup/Vulkan-Headers).
+You must clone the headers repository and build its `install` target before
+building this repository. The Vulkan-Headers repository is required because it
+contains the Vulkan API definition files (registry) that are required to build
+the mock ICD. You must also take note of the headers install directory and
+pass it on the CMake command line for building this repository, as described
+below.
+
+Note that this dependency can be ignored if not building the mock ICD
+(CMake option: `-DBUILD_ICD=OFF`).
+
+#### glslang
+
+This repository has a required dependency on the `glslangValidator` (shader
+compiler) for compiling the shader programs for the vkcube demos.
+
+The CMake code in this repository downloads release binaries of glslang if a
+build glslang repository is not provided. The glslangValidator is obtained
+from this set of release binaries.
+
+If you don't wish the CMake code to download these binaries, then you must
+clone the [glslang repository](https://github.com/KhronosGroup/glslang) and
+build its `install` target. Follow the build instructions in the glslang
+[README.md](https://github.com/KhronosGroup/glslang/blob/master/README.md)
+file. Ensure that the `update_glslang_sources.py` script has been run as part
+of building glslang. You must also take note of the glslang install directory
+and pass it on the CMake command line for building this repository, as
+described below.
+
+Note that this dependency can be ignored if not building the vkcube demo
+(CMake option: `-DBUILD_CUBE=OFF`).
+
+### Build and Install Directories
+
+A common convention is to place the build directory in the top directory of
+the repository with a name of `build` and place the install directory as a
+child of the build directory with the name `install`. The remainder of these
+instructions follow this convention, although you can use any name for these
+directories and place them in any location.
+
+### Building Dependent Repositories with Known-Good Revisions
+
+There is a Python utility script, `scripts/update_deps.py`, that you can use to
+gather and build the dependent repositories mentioned above. This script uses
+information stored in the `scripts/known_good.json` file to check out dependent
+repository revisions that are known to be compatible with the revision of this
+repository that you currently have checked out. As such, this script is useful
+as a quick-start tool for common use cases and default configurations.
+
+For all platforms, start with:
+
+    git clone git@github.com:KhronosGroup/Vulkan-Tools.git
+    cd Vulkan-Tools
+    mkdir build
+    cd build
+
+For 64-bit Linux and MacOS, continue with:
+
+    ../scripts/update_deps.py
+    cmake -C helper.cmake ..
+    cmake --build .
+
+For 64-bit Windows, continue with:
+
+    ..\scripts\update_deps.py --arch x64
+    cmake -A x64 -C helper.cmake ..
+    cmake --build .
+
+For 32-bit Windows, continue with:
+
+    ..\scripts\update_deps.py --arch Win32
+    cmake -A Win32 -C helper.cmake ..
+    cmake --build .
+
+Please see the more detailed build information later in this file if you have
+specific requirements for configuring and building these components.
+
+#### Notes
+
+- You may need to adjust some of the CMake options based on your platform. See
+  the platform-specific sections later in this document.
+- The `update_deps.py` script fetches and builds the dependent repositories in
+  the current directory when it is invoked. In this case, they are built in
+  the `build` directory.
+- The `build` directory is also being used to build this
+  (Vulkan-Tools) repository. But there shouldn't be any conflicts
+  inside the `build` directory between the dependent repositories and the
+  build files for this repository.
+- The `--dir` option for `update_deps.py` can be used to relocate the
+  dependent repositories to another arbitrary directory using an absolute or
+  relative path.
+- The `update_deps.py` script generates a file named `helper.cmake` and places
+  it in the same directory as the dependent repositories (`build` in this
+  case). This file contains CMake commands to set the CMake `*_INSTALL_DIR`
+  variables that are used to point to the install artifacts of the dependent
+  repositories. You can use this file with the `cmake -C` option to set these
+  variables when you generate your build files with CMake. This lets you avoid
+  entering several `*_INSTALL_DIR` variable settings on the CMake command line.
+- If using "MINGW" (Git For Windows), you may wish to run
+  `winpty update_deps.py` in order to avoid buffering all of the script's
+  "print" output until the end and to retain the ability to interrupt script
+  execution.
+- Please use `update_deps.py --help` to list additional options and read the
+  internal documentation in `update_deps.py` for further information.
+
+### Generated source code
+
+This repository contains generated source code in the `icd/generated`
+directory which is not intended to be modified directly. Instead, changes should be
+made to the corresponding generator in the `scripts` directory. The source files can
+then be regenerated using `scripts/generate_source.py`:
+
+    python3 scripts/generate_source.py PATH_TO_VULKAN_HEADERS_REGISTRY_DIR
+
+A helper CMake target `VulkanTools_generated_source` is also provided to simplify
+the invocation of `scripts/generate_source.py` from the build directory:
+
+    cmake --build . --target VulkanTools_generated_source
+
+### Build Options
+
+When generating native platform build files through CMake, several options can
+be specified to customize the build. Some of the options are binary on/off
+options, while others take a string as input. The following is a table of all
+on/off options currently supported by this repository:
+
+| Option | Platform | Default | Description |
+| ------ | -------- | ------- | ----------- |
+| BUILD_CUBE | All | `ON` | Controls whether or not the vkcube demo is built. |
+| BUILD_VULKANINFO | All | `ON` | Controls whether or not the vulkaninfo utility is built. |
+| BUILD_ICD | All | `ON` | Controls whether or not the mock ICD is built. |
+| INSTALL_ICD | All | `OFF` | Controls whether or not the mock ICD is installed as part of the install target. |
+| BUILD_WSI_XCB_SUPPORT | Linux | `ON` | Build the components with XCB support. |
+| BUILD_WSI_XLIB_SUPPORT | Linux | `ON` | Build the components with Xlib support. |
+| BUILD_WSI_WAYLAND_SUPPORT | Linux | `ON` | Build the components with Wayland support. |
+| USE_CCACHE | Linux | `OFF` | Enable caching with the CCache program. |
+
+The following is a table of all string options currently supported by this repository:
+
+| Option | Platform | Default | Description |
+| ------ | -------- | ------- | ----------- |
+| CMAKE_OSX_DEPLOYMENT_TARGET | MacOS | `10.12` | The minimum version of MacOS for loader deployment. |
+
+These variables should be set using the `-D` option when invoking CMake to
+generate the native platform files.
+
+## Building On Windows
+
+### Windows Development Environment Requirements
+
+- Windows
+  - Any Personal Computer version supported by Microsoft
+- Microsoft [Visual Studio](https://www.visualstudio.com/)
+  - Versions
+    - [2013 (update 4)](https://www.visualstudio.com/vs/older-downloads/)
+    - [2015](https://www.visualstudio.com/vs/older-downloads/)
+    - [2017](https://www.visualstudio.com/vs/downloads/)
+  - The Community Edition of each of the above versions is sufficient, as
+    well as any more capable edition.
+- [CMake 3.10.2](https://cmake.org/files/v3.10/cmake-3.10.2-win64-x64.zip) is recommended.
+  - Use the installer option to add CMake to the system PATH
+- Git Client Support
+  - [Git for Windows](http://git-scm.com/download/win) is a popular solution
+    for Windows
+  - Some IDEs (e.g., [Visual Studio](https://www.visualstudio.com/),
+    [GitHub Desktop](https://desktop.github.com/)) have integrated
+    Git client support
+
+### Windows Build - Microsoft Visual Studio
+
+The general approach is to run CMake to generate the Visual Studio project
+files. Then either run CMake with the `--build` option to build from the
+command line or use the Visual Studio IDE to open the generated solution and
+work with the solution interactively.
+
+#### Windows Quick Start
+
+    cd Vulkan-Tools
+    mkdir build
+    cd build
+    cmake -A x64 -DVULKAN_HEADERS_INSTALL_DIR=absolute_path_to_install_dir
+    cmake --build .
+
+The above commands instruct CMake to find and use the default Visual Studio
+installation to generate a Visual Studio solution and projects for the x64
+architecture. The second CMake command builds the Debug (default)
+configuration of the solution.
+
+See below for the details.
+
+#### Use `CMake` to Create the Visual Studio Project Files
+
+Change your current directory to the top of the cloned repository directory,
+create a build directory and generate the Visual Studio project files:
+
+    cd Vulkan-Tools
+    mkdir build
+    cd build
+    cmake -A x64 -DVULKAN_HEADERS_INSTALL_DIR=absolute_path_to_install_dir
+
+> Note: The `..` parameter tells `cmake` the location of the top of the
+> repository. If you place your build directory someplace else, you'll need to
+> specify the location of the repository top differently.
+
+The `-A` option is used to select either the "Win32" or "x64" architecture.
+
+If a generator for a specific version of Visual Studio is required, you can
+specify it for Visual Studio 2015, for example, with:
+
+    64-bit: -G "Visual Studio 14 2015 Win64"
+    32-bit: -G "Visual Studio 14 2015"
+
+See this [list](#cmake-visual-studio-generators) of other possible generators
+for Visual Studio.
+
+When generating the project files, the absolute path to a Vulkan-Headers
+install directory must be provided. This can be done by setting the
+`VULKAN_HEADERS_INSTALL_DIR` environment variable or by setting the
+`VULKAN_HEADERS_INSTALL_DIR` CMake variable with the `-D` CMake option. In
+either case, the variable should point to the installation directory of a
+Vulkan-Headers repository built with the install target.
+
+The above steps create a Windows solution file named
+`Vulkan-Tools.sln` in the build directory.
+
+At this point, you can build the solution from the command line or open the
+generated solution with Visual Studio.
+
+#### Build the Solution From the Command Line
+
+While still in the build directory:
+
+    cmake --build .
+
+to build the Debug configuration (the default), or:
+
+    cmake --build . --config Release
+
+to make a Release build.
+
+#### Build the Solution With Visual Studio
+
+Launch Visual Studio and open the "Vulkan-Tools.sln" solution file in the
+build folder. You may select "Debug" or "Release" from the Solution
+Configurations drop-down list. Start a build by selecting the Build->Build
+Solution menu item.
+
+#### Windows Install Target
+
+The CMake project also generates an "install" target that you can use to copy
+the primary build artifacts to a specific location using a "bin, include, lib"
+style directory structure. This may be useful for collecting the artifacts and
+providing them to another project that is dependent on them.
+
+The default location is `$CMAKE_BINARY_DIR\install`, but can be changed with
+the `CMAKE_INSTALL_PREFIX` variable when first generating the project build
+files with CMake.
+
+You can build the install target from the command line with:
+
+    cmake --build . --config Release --target install
+
+or build the `INSTALL` target from the Visual Studio solution explorer.
+
+#### Using a Loader Built from a Repository
+
+If you do need to build and use your own loader, build the Vulkan-Loader
+repository with the install target and modify your CMake invocation to add the
+location of the loader's install directory:
+
+    cmake -A x64 -DVULKAN_HEADERS_INSTALL_DIR=absolute_path_to_install_dir \
+                 -DVULKAN_LOADER_INSTALL_DIR=absolute_path_to_install_dir ..
+
+#### Using glslang Built from a Repository
+
+If you do need to build and use your own glslang, build the glslang repository
+with the install target and modify your CMake invocation to add the location
+of the glslang's install directory:
+
+    cmake -A x64 -DVULKAN_HEADERS_INSTALL_DIR=absolute_path_to_install_dir \
+                 -DGLSLANG_INSTALL_DIR=absolute_path_to_install_dir ..
+
+### Windows Notes
+
+#### CMake Visual Studio Generators
+
+The chosen generator should match one of the Visual Studio versions that you
+have installed. Generator strings that correspond to versions of Visual Studio
+include:
+
+| Build Platform               | 64-bit Generator              | 32-bit Generator        |
+|------------------------------|-------------------------------|-------------------------|
+| Microsoft Visual Studio 2013 | "Visual Studio 12 2013 Win64" | "Visual Studio 12 2013" |
+| Microsoft Visual Studio 2015 | "Visual Studio 14 2015 Win64" | "Visual Studio 14 2015" |
+| Microsoft Visual Studio 2017 | "Visual Studio 15 2017 Win64" | "Visual Studio 15 2017" |
+
+## Building On Linux
+
+### Linux Build Requirements
+
+This repository has been built and tested on the two most recent Ubuntu LTS
+versions. Currently, the oldest supported version is Ubuntu 16.04, meaning
+that the minimum officially supported C++11 compiler version is GCC 5.4.0,
+although earlier versions may work. It should be straightforward to adapt this
+repository to other Linux distributions.
+
+[CMake 3.10.2](https://cmake.org/files/v3.10/cmake-3.10.2-Linux-x86_64.tar.gz) is recommended.
+
+#### Required Package List
+
+    sudo apt-get install git cmake build-essential libx11-xcb-dev \
+        libxkbcommon-dev libwayland-dev libxrandr-dev
+
+### Linux Build
+
+The general approach is to run CMake to generate make files. Then either run
+CMake with the `--build` option or `make` to build from the command line.
+
+#### Linux Quick Start
+
+    cd Vulkan-Tools
+    mkdir build
+    cd build
+    cmake -DVULKAN_HEADERS_INSTALL_DIR=absolute_path_to_install_dir ..
+    make
+
+See below for the details.
+
+#### Use CMake to Create the Make Files
+
+Change your current directory to the top of the cloned repository directory,
+create a build directory and generate the make files.
+
+    cd Vulkan-Tools
+    mkdir build
+    cd build
+    cmake -DCMAKE_BUILD_TYPE=Debug \
+          -DVULKAN_HEADERS_INSTALL_DIR=absolute_path_to_install_dir \
+          -DCMAKE_INSTALL_PREFIX=install ..
+
+> Note: The `..` parameter tells `cmake` the location of the top of the
+> repository. If you place your `build` directory someplace else, you'll need
+> to specify the location of the repository top differently.
+
+Use `-DCMAKE_BUILD_TYPE` to specify a Debug or Release build.
+
+When generating the project files, the absolute path to a Vulkan-Headers
+install directory must be provided. This can be done by setting the
+`VULKAN_HEADERS_INSTALL_DIR` environment variable or by setting the
+`VULKAN_HEADERS_INSTALL_DIR` CMake variable with the `-D` CMake option. In
+either case, the variable should point to the installation directory of a
+Vulkan-Headers repository built with the install target.
+
+> Note: For Linux, the default value for `CMAKE_INSTALL_PREFIX` is
+> `/usr/local`, which would be used if you do not specify
+> `CMAKE_INSTALL_PREFIX`. In this case, you may need to use `sudo` to install
+> to system directories later when you run `make install`.
+
+#### Build the Project
+
+You can just run `make` to begin the build.
+
+To speed up the build on a multi-core machine, use the `-j` option for `make`
+to specify the number of cores to use for the build. For example:
+
+    make -j4
+
+You can also use
+
+    cmake --build .
+
+If your build system supports ccache, you can enable that via CMake option `-DUSE_CCACHE=On`
+
+### Linux Notes
+
+#### WSI Support Build Options
+
+By default, the repository components are built with support for the
+Vulkan-defined WSI display servers: Xcb, Xlib, and Wayland. It is recommended
+to build the repository components with support for these display servers to
+maximize their usability across Linux platforms. If it is necessary to build
+these modules without support for one of the display servers, the appropriate
+CMake option of the form `BUILD_WSI_xxx_SUPPORT` can be set to `OFF`.
+
+Note vulkaninfo currently only supports Xcb and Xlib WSI display servers. See
+the CMakeLists.txt file in `Vulkan-Tools/vulkaninfo` for more info.
+
+You can select which WSI subsystem is used to execute the vkcube applications
+using a CMake option called DEMOS_WSI_SELECTION. Supported options are XCB
+(default), XLIB, and WAYLAND. Note that you must build using the corresponding
+BUILD_WSI_*_SUPPORT enabled at the base repository level. For instance,
+creating a build that will use Xlib when running the vkcube demos, your CMake
+command line might look like:
+
+    cmake -DCMAKE_BUILD_TYPE=Debug -DDEMOS_WSI_SELECTION=XLIB ..
+
+#### Linux Install to System Directories
+
+Installing the files resulting from your build to the systems directories is
+optional since environment variables can usually be used instead to locate the
+binaries. There are also risks with interfering with binaries installed by
+packages. If you are certain that you would like to install your binaries to
+system directories, you can proceed with these instructions.
+
+Assuming that you've built the code as described above and the current
+directory is still `build`, you can execute:
+
+    sudo make install
+
+This command installs files to `/usr/local` if no `CMAKE_INSTALL_PREFIX` is
+specified when creating the build files with CMake.
+
+You may need to run `ldconfig` in order to refresh the system loader search
+cache on some Linux systems.
+
+You can further customize the installation location by setting additional
+CMake variables to override their defaults. For example, if you would like to
+install to `/tmp/build` instead of `/usr/local`, on your CMake command line
+specify:
+
+    -DCMAKE_INSTALL_PREFIX=/tmp/build
+
+Then run `make install` as before. The install step places the files in
+`/tmp/build`. This may be useful for collecting the artifacts and providing
+them to another project that is dependent on them.
+
+Note: The Mock ICD is not installed by default since it is a "null" driver
+that does not render anything and is used for testing purposes. Installing it
+to system directories may cause some applications to discover and use this
+driver instead of other full drivers installed on the system. If you really
+want to install this null driver, use:
+
+    -DINSTALL_ICD=ON
+
+See the CMake documentation for more details on using these variables to
+further customize your installation.
+
+Also see the `LoaderAndLayerInterface` document in the `loader` folder of the
+Vulkan-Loader repository for more information about loader and layer
+operation.
+
+#### Linux Uninstall
+
+To uninstall the files from the system directories, you can execute:
+
+    sudo make uninstall
+
+### Linux Tests
+
+After making any changes to the repository, you should perform some quick
+sanity tests, such as running the vkcube demo with validation enabled.
+
+To run the **vkcube application** with validation, in a terminal change to the
+`build/cube` directory and run:
+
+    VK_LAYER_PATH=../path/to/validation/layers ./vkcube --validate
+
+If you have an SDK installed and have run the setup script to set the
+`VULKAN_SDK` environment variable, it may be unnecessary to specify a
+`VK_LAYER_PATH`.
+
+#### Linux 32-bit support
+
+Usage of the contents of this repository in 32-bit Linux environments is not
+officially supported. However, since this repository is supported on 32-bit
+Windows, these modules should generally work on 32-bit Linux.
+
+Here are some notes for building 32-bit targets on a 64-bit Ubuntu "reference"
+platform:
+
+If not already installed, install the following 32-bit development libraries:
+
+`gcc-multilib g++-multilib libx11-dev:i386`
+
+This list may vary depending on your distribution and which windowing systems
+you are building for.
+
+Set up your environment for building 32-bit targets:
+
+    export ASFLAGS=--32
+    export CFLAGS=-m32
+    export CXXFLAGS=-m32
+    export PKG_CONFIG_LIBDIR=/usr/lib/i386-linux-gnu
+
+Again, your PKG_CONFIG configuration may be different, depending on your
+distribution.
+
+Finally, rebuild the repository using `cmake` and `make`, as explained above.
+
+## Building On Android
+
+Install the required tools for Linux and Windows covered above, then add the
+following.
+
+### Android Build Requirements
+
+- Install [Android Studio 2.3](https://developer.android.com/studio/index.html) or later.
+- From the "Welcome to Android Studio" splash screen, add the following components using
+  Configure > SDK Manager:
+  - SDK Platforms > Android 6.0 and newer
+  - SDK Tools > Android SDK Build-Tools
+  - SDK Tools > Android SDK Platform-Tools
+  - SDK Tools > Android SDK Tools
+  - SDK Tools > NDK
+
+#### Add Android specifics to environment
+
+For each of the below, you may need to specify a different build-tools
+version, as Android Studio will roll it forward fairly regularly.
+
+On Linux:
+
+    export ANDROID_SDK_HOME=$HOME/Android/sdk
+    export ANDROID_NDK_HOME=$HOME/Android/sdk/ndk-bundle
+    export PATH=$ANDROID_SDK_HOME:$PATH
+    export PATH=$ANDROID_NDK_HOME:$PATH
+    export PATH=$ANDROID_SDK_HOME/build-tools/23.0.3:$PATH
+
+On Windows:
+
+    set ANDROID_SDK_HOME=%LOCALAPPDATA%\Android\sdk
+    set ANDROID_NDK_HOME=%LOCALAPPDATA%\Android\sdk\ndk-bundle
+    set PATH=%LOCALAPPDATA%\Android\sdk\ndk-bundle;%PATH%
+
+On OSX:
+
+    export ANDROID_SDK_HOME=$HOME/Library/Android/sdk
+    export ANDROID_NDK_HOME=$HOME/Library/Android/sdk/ndk-bundle
+    export PATH=$ANDROID_NDK_PATH:$PATH
+    export PATH=$ANDROID_SDK_HOME/build-tools/23.0.3:$PATH
+
+Note: If `jarsigner` is missing from your platform, you can find it in the
+Android Studio install or in your Java installation. If you do not have Java,
+you can get it with something like the following:
+
+  sudo apt-get install openjdk-8-jdk
+
+#### Additional OSX System Requirements
+
+Tested on OSX version 10.13.3
+
+Setup Homebrew and components
+
+- Follow instructions on [brew.sh](http://brew.sh) to get Homebrew installed.
+
+      /usr/bin/ruby -e "$(curl -fsSL \
+          https://raw.githubusercontent.com/Homebrew/install/master/install)"
+
+- Ensure Homebrew is at the beginning of your PATH:
+
+      export PATH=/usr/local/bin:$PATH
+
+- Add packages with the following:
+
+      brew install python
+
+### Android Build
+
+There are two options for building the Android tools. Either using the SPIRV
+tools provided as part of the Android NDK, or using upstream sources. To build
+with SPIRV tools from the NDK, remove the build-android/third_party directory
+created by running update_external_sources_android.sh, (or avoid running
+update_external_sources_android.sh). Use the following script to build
+everything in the repository for Android, including validation layers, tests,
+demos, and APK packaging: This script does retrieve and use the upstream SPRIV
+tools.
+
+    cd build-android
+    ./build_all.sh
+
+Test and application APKs can be installed on production devices with:
+
+    ./install_all.sh [-s <serial number>]
+
+Note that there are no equivalent scripts on Windows yet, that work needs to
+be completed. The following per platform commands can be used for layer only
+builds:
+
+#### Linux and OSX
+
+Follow the setup steps for Linux or OSX above, then from your terminal:
+
+    cd build-android
+    ./update_external_sources_android.sh --no-build
+    ./android-generate.sh
+    ndk-build -j4
+
+#### Windows
+
+Follow the setup steps for Windows above, then from Developer Command Prompt
+for VS2013:
+
+    cd build-android
+    update_external_sources_android.bat
+    android-generate.bat
+    ndk-build
+
+### Android Tests and Demos
+
+After making any changes to the repository you should perform some quick
+sanity tests, including the layer validation tests and the vkcube 
+demo with validation enabled.
+
+#### Run Layer Validation Tests
+
+Use the following steps to build, install, and run the layer validation tests
+for Android:
+
+    cd build-android
+    ./build_all.sh
+    adb install -r bin/VulkanLayerValidationTests.apk
+    adb shell am start com.example.VulkanLayerValidationTests/android.app.NativeActivity
+
+Alternatively, you can use the test_APK script to install and run the layer
+validation tests:
+
+    test_APK.sh -s <serial number> -p <platform name> -f <gtest_filter>
+
+#### Run vkcube with Validation
+
+TODO: This must be reworked to pull in layers from the ValidationLayers repo
+
+Use the following steps to build, install, and run vkcube for Android:
+
+    cd build-android
+    ./build_all.sh
+    adb install -r ../demos/android/cube/bin/vkcube.apk
+    adb shell am start com.example.Cube/android.app.NativeActivity
+
+To build, install, and run Cube with validation layers,
+first build layers using steps above, then run:
+
+    cd build-android
+    ./build_all.sh
+    adb install -r ../demos/android/cube-with-layers/bin/cube-with-layers.apk
+
+##### Run without validation enabled
+
+    adb shell am start com.example.CubeWithLayers/android.app.NativeActivity
+
+##### Run with validation enabled
+
+    adb shell am start -a android.intent.action.MAIN -c android-intent.category.LAUNCH -n com.example.CubeWithLayers/android.app.NativeActivity --es args "--validate"
+
+## Building on MacOS
+
+### MacOS Build Requirements
+
+Tested on OSX version 10.12.6
+
+- [CMake 3.10.2](https://cmake.org/files/v3.10/cmake-3.10.2-Darwin-x86_64.tar.gz) is recommended.
+
+Setup Homebrew and components
+
+- Follow instructions on [brew.sh](http://brew.sh) to get Homebrew installed.
+
+      /usr/bin/ruby -e "$(curl -fsSL \
+          https://raw.githubusercontent.com/Homebrew/install/master/install)"
+
+- Ensure Homebrew is at the beginning of your PATH:
+
+      export PATH=/usr/local/bin:$PATH
+
+- Add packages with the following (may need refinement)
+
+      brew install python python3 git
+
+### Clone the Repository
+
+Clone the Vulkan-Tools repository as defined above in the [Download the Repository](#download-the-repository)
+section.
+
+### Get the External Libraries
+
+[MoltenVK](https://github.com/KhronosGroup/MoltenVK) Library
+
+- Building the vkcube and vulkaninfo applications require linking to the
+  MoltenVK Library (libMoltenVK.dylib)
+  - The following option should be used on the cmake command line to specify a
+    vulkan loader library: MOLTENVK_REPO_ROOT=/absolute_path_to/MoltenVK
+    making sure to specify an absolute path, like so: cmake
+    -DMOLTENVK_REPO_ROOT=/absolute_path_to/MoltenVK ....
+
+Vulkan Loader Library
+
+- Building the vkcube and vulkaninfo applications require linking to the Vulkan
+  Loader Library (libvulkan.1.dylib)
+  - The following option should be used on the cmake command line to specify a
+    vulkan loader library:
+    VULKAN_LOADER_INSTALL_DIR=/absolute_path_to/Vulkan-Loader_install_dir
+    making sure to specify an absolute path.
+
+### MacOS build
+
+#### CMake Generators
+
+This repository uses CMake to generate build or project files that are then
+used to build the repository. The CMake generators explicitly supported in
+this repository are:
+
+- Unix Makefiles
+- Xcode
+
+#### Building with the Unix Makefiles Generator
+
+This generator is the default generator, so all that is needed for a debug
+build is:
+
+        mkdir build
+        cd build
+        cmake -DCMAKE_BUILD_TYPE=Debug \
+              -DVULKAN_LOADER_INSTALL_DIR=/absolute_path_to/Vulkan-Loader_install_dir \
+              -DMOLTENVK_REPO_ROOT=/absolute_path_to/MoltenVK \
+              -DCMAKE_INSTALL_PREFIX=install ..
+        make
+
+To speed up the build on a multi-core machine, use the `-j` option for `make`
+to specify the number of cores to use for the build. For example:
+
+    make -j4
+
+You can now run the demo applications from the command line:
+
+    open cube/vkcube.app
+    open cube/vkcubepp.app
+    open vulkaninfo/vulkaninfo.app
+
+Or you can locate them from `Finder` and launch them from there.
+
+##### The Install Target and RPATH
+
+The applications you just built are "bundled applications", but the
+executables are using the `RPATH` mechanism to locate runtime dependencies
+that are still in your build tree.
+
+To see this, run this command from your `build` directory:
+
+    otool -l cube/cube.app/Contents/MacOS/vkcube
+
+and note that the `vkcube` executable contains loader commands:
+
+- `LC_LOAD_DYLIB` to load `libvulkan.1.dylib` via an `@rpath`
+- `LC_RPATH` that contains an absolute path to the build location of the Vulkan loader
+
+This makes the bundled application "non-transportable", meaning that it won't
+run unless the Vulkan loader is on that specific absolute path. This is useful
+for debugging the loader or other components built in this repository, but not
+if you want to move the application to another machine or remove your build
+tree.
+
+To address this problem, run:
+
+    make install
+
+This step copies the bundled applications to the location specified by
+CMAKE_INSTALL_PREFIX and "cleans up" the `RPATH` to remove any external
+references and performs other bundle fix-ups. After running `make install`,
+run the `otool` command again from the `build/install` directory and note:
+
+- `LC_LOAD_DYLIB` is now `@executable_path/../MacOS/libvulkan.1.dylib`
+- `LC_RPATH` is no longer present
+
+The "bundle fix-up" operation also puts a copy of the Vulkan loader into the
+bundle, making the bundle completely self-contained and self-referencing.
+
+##### The Non-bundled vulkaninfo Application
+
+There is also a non-bundled version of the `vulkaninfo` application that you
+can run from the command line:
+
+    vulkaninfo/vulkaninfo
+
+If you run this from the build directory, vulkaninfo's RPATH is already
+set to point to the Vulkan loader in the build tree, so it has no trouble
+finding it. But the loader will not find the MoltenVK driver and you'll see a
+message about an incompatible driver. To remedy this:
+
+    VK_ICD_FILENAMES=<path-to>/MoltenVK/Package/Latest/MoltenVK/macOS/MoltenVK_icd.json vulkaninfo/vulkaninfo
+
+If you run `vulkaninfo` from the install directory, the `RPATH` in the
+`vulkaninfo` application got removed and the OS needs extra help to locate
+the Vulkan loader:
+
+    DYLD_LIBRARY_PATH=<path-to>/Vulkan-Loader/loader VK_ICD_FILENAMES=<path-to>/MoltenVK/Package/Latest/MoltenVK/macOS/MoltenVK_icd.json vulkaninfo/vulkaninfo
+
+#### Building with the Xcode Generator
+
+To create and open an Xcode project:
+
+        mkdir build-xcode
+        cd build-xcode
+        cmake -DVULKAN_LOADER_INSTALL_DIR=/absolute_path_to/Vulkan-Loader_install_dir -DMOLTENVK_REPO_ROOT=/absolute_path_to/MoltenVK -GXcode ..
+        open VULKAN.xcodeproj
+
+Within Xcode, you can select Debug or Release builds in the project's Build
+Settings. You can also select individual schemes for working with specific
+applications like `vkcube`.
diff --git a/src/third_party/vulkan-tools/src/CMakeLists.txt b/src/third_party/vulkan-tools/src/CMakeLists.txt
new file mode 100644
index 0000000..ec399d6
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/CMakeLists.txt
@@ -0,0 +1,171 @@
+# ~~~
+# Copyright (c) 2014-2018 Valve Corporation
+# Copyright (c) 2014-2018 LunarG, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ~~~
+
+# CMake project initialization ---------------------------------------------------------------------------------------------------
+# This section contains pre-project() initialization, and ends with the project() command.
+
+cmake_minimum_required(VERSION 3.10.2)
+
+# Apple: Must be set before enable_language() or project() as it may influence configuration of the toolchain and flags.
+set(CMAKE_OSX_DEPLOYMENT_TARGET "10.12" CACHE STRING "Minimum OS X deployment version")
+
+project(Vulkan-Tools)
+
+# find_package(), include() and global project settings --------------------------------------------------------------------------
+
+find_package(PythonInterp 3 QUIET)
+
+# User-interface declarations ----------------------------------------------------------------------------------------------------
+# This section contains variables that affect development GUIs (e.g. CMake GUI and IDEs), such as option(), folders, and variables
+# with the CACHE property.
+
+set(CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} "${CMAKE_CURRENT_SOURCE_DIR}/cmake")
+
+option(BUILD_CUBE "Build cube" ON)
+option(BUILD_VULKANINFO "Build vulkaninfo" ON)
+option(BUILD_ICD "Build icd" ON)
+# Installing the Mock ICD to system directories is probably not desired since this ICD is not a very complete implementation.
+# Require the user to ask that it be installed if they really want it.
+option(INSTALL_ICD "Install icd" OFF)
+
+# Enable IDE GUI folders
+set_property(GLOBAL PROPERTY USE_FOLDERS ON)
+# "Helper" targets that don't have interesting source code should set their FOLDER property to this
+set(TOOLS_HELPER_FOLDER "Helper Targets")
+
+option(USE_CCACHE "Use ccache" OFF)
+if(USE_CCACHE)
+    find_program(CCACHE_FOUND ccache)
+    if(CCACHE_FOUND)
+        set_property(GLOBAL PROPERTY RULE_LAUNCH_COMPILE ccache)
+    endif()
+endif()
+
+# ~~~
+# Find Vulkan Headers and Loader
+# Search order:
+#  User-supplied CMAKE_PREFIX_PATH containing paths to the header and/or loader install dirs
+#  CMake options VULKAN_HEADERS_INSTALL_DIR and/or VULKAN_LOADER_INSTALL_DIR
+#  Env vars VULKAN_HEADERS_INSTALL_DIR and/or VULKAN_LOADER_INSTALL_DIR
+#  If on MacOS
+#   CMake option MOTLENVK_REPO_ROOT
+#   Env vars MOLTENVK_REPO_ROOT
+#  Fallback to FindVulkan operation using SDK install or system installed components.
+# ~~~
+set(VULKAN_HEADERS_INSTALL_DIR "HEADERS-NOTFOUND" CACHE PATH "Absolute path to a Vulkan-Headers install directory")
+set(VULKAN_LOADER_INSTALL_DIR "LOADER-NOTFOUND" CACHE PATH "Absolute path to a Vulkan-Loader install directory")
+if(WIN32 AND "${VULKAN_LOADER_INSTALL_DIR}" STREQUAL "LOADER-NOTFOUND")
+    if(CMAKE_CL_64)
+        set(VULKAN_LOADER_INSTALL_DIR "${CMAKE_CURRENT_SOURCE_DIR}/external/x64")
+    else()
+        set(VULKAN_LOADER_INSTALL_DIR "${CMAKE_CURRENT_SOURCE_DIR}/external/x86")
+    endif()
+endif()
+set(CMAKE_PREFIX_PATH ${CMAKE_PREFIX_PATH};${VULKAN_HEADERS_INSTALL_DIR};${VULKAN_LOADER_INSTALL_DIR};
+    $ENV{VULKAN_HEADERS_INSTALL_DIR};$ENV{VULKAN_LOADER_INSTALL_DIR})
+
+if(APPLE)
+    set(MOLTENVK_REPO_ROOT "MOLTENVK-NOTFOUND" CACHE PATH "Absolute path to a MoltenVK repo directory")
+    if(NOT MOLTENVK_REPO_ROOT AND NOT DEFINED ENV{MOLTENVK_REPO_ROOT})
+        message(FATAL_ERROR "Must define location of MoltenVK repo -- see BUILD.md")
+    endif()
+
+    if(NOT MOLTENVK_REPO_ROOT)
+        set(MOLTENVK_REPO_ROOT $ENV{MOLTENVK_REPO_ROOT})
+    endif()
+    message(STATUS "Using MoltenVK repo location at ${MOLTENVK_REPO_ROOT}")
+endif()
+message(STATUS "Using find_package to locate Vulkan")
+find_package(Vulkan)
+find_package(VulkanHeaders)
+get_filename_component(Vulkan_LIBRARY_DIR ${Vulkan_LIBRARY} DIRECTORY)
+message(STATUS "Vulkan FOUND = ${Vulkan_FOUND}")
+message(STATUS "Vulkan Lib Dir = ${Vulkan_LIBRARY_DIR}")
+message(STATUS "Vulkan Lib = ${Vulkan_LIBRARY}")
+message(STATUS "Vulkan Headers Include = ${VulkanHeaders_INCLUDE_DIR}")
+message(STATUS "Vulkan Headers Registry = ${VulkanRegistry_DIR}")
+
+include(GNUInstallDirs)
+
+if(WIN32 AND CMAKE_INSTALL_PREFIX_INITIALIZED_TO_DEFAULT)
+    # Windows: if install locations not set by user, set install prefix to "<build_dir>\install".
+    set(CMAKE_INSTALL_PREFIX "${CMAKE_BINARY_DIR}/install" CACHE PATH "default install path" FORCE)
+endif()
+
+# uninstall target
+if(NOT TARGET uninstall)
+    configure_file("${CMAKE_CURRENT_SOURCE_DIR}/cmake/cmake_uninstall.cmake.in"
+                   "${CMAKE_CURRENT_BINARY_DIR}/cmake_uninstall.cmake"
+                   IMMEDIATE
+                   @ONLY)
+
+    add_custom_target(uninstall COMMAND ${CMAKE_COMMAND} -P ${CMAKE_CURRENT_BINARY_DIR}/cmake_uninstall.cmake)
+    set_target_properties(uninstall PROPERTIES FOLDER ${TOOLS_HELPER_FOLDER})
+endif()
+
+if(APPLE)
+    # CMake versions 3 or later need CMAKE_MACOSX_RPATH defined. This avoids the CMP0042 policy message.
+    set(CMAKE_MACOSX_RPATH 1)
+endif()
+
+if(CMAKE_COMPILER_IS_GNUCC OR CMAKE_C_COMPILER_ID MATCHES "Clang")
+    set(COMMON_COMPILE_FLAGS "-Wall -Wextra -Wno-unused-parameter -Wno-missing-field-initializers")
+    set(COMMON_COMPILE_FLAGS "${COMMON_COMPILE_FLAGS} -fno-strict-aliasing -fno-builtin-memcmp")
+
+    # For GCC version 7.1 or greater, we need to disable the implicit fallthrough warning since there's no consistent way to satisfy
+    # all compilers until they all accept the C++17 standard
+    if(CMAKE_COMPILER_IS_GNUCC AND NOT (CMAKE_CXX_COMPILER_VERSION LESS 7.1))
+        set(COMMON_COMPILE_FLAGS "${COMMON_COMPILE_FLAGS} -Wimplicit-fallthrough=0")
+    endif()
+
+    if(APPLE)
+        set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} ${COMMON_COMPILE_FLAGS}")
+    else()
+        set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -std=c99 ${COMMON_COMPILE_FLAGS}")
+    endif()
+    set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${COMMON_COMPILE_FLAGS} -std=c++11 -fno-rtti")
+    if(UNIX)
+        set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -fvisibility=hidden")
+        set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fvisibility=hidden")
+    endif()
+endif()
+
+# Optional codegen target
+if(PYTHONINTERP_FOUND)
+    add_custom_target(VulkanTools_generated_source
+                      COMMAND ${PYTHON_EXECUTABLE} ${PROJECT_SOURCE_DIR}/scripts/generate_source.py
+                              ${VulkanRegistry_DIR} --incremental
+                      )
+else()
+    message("WARNING: VulkanTools_generated_source target requires python 3")
+endif()
+
+if(APPLE)
+    include(mac_common.cmake)
+endif()
+
+if(BUILD_CUBE)
+    add_subdirectory(cube)
+endif()
+
+if(BUILD_VULKANINFO)
+    add_subdirectory(vulkaninfo)
+endif()
+
+if(BUILD_ICD)
+    add_subdirectory(icd)
+endif()
diff --git a/src/third_party/vulkan-tools/src/CODE_OF_CONDUCT.md b/src/third_party/vulkan-tools/src/CODE_OF_CONDUCT.md
new file mode 100644
index 0000000..a11610b
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/CODE_OF_CONDUCT.md
@@ -0,0 +1 @@
+A reminder that this issue tracker is managed by the Khronos Group. Interactions here should follow the Khronos Code of Conduct (https://www.khronos.org/developers/code-of-conduct), which prohibits aggressive or derogatory language. Please keep the discussion friendly and civil.
diff --git a/src/third_party/vulkan-tools/src/CONTRIBUTING.md b/src/third_party/vulkan-tools/src/CONTRIBUTING.md
new file mode 100644
index 0000000..41e0269
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/CONTRIBUTING.md
@@ -0,0 +1,130 @@
+# How to Contribute to Vulkan Source Repositories
+
+## **The Repository**
+
+The source code for The Vulkan-Tools components is sponsored by Khronos and LunarG.
+* [Khronos Vulkan-Tools](https://github.com/KhronosGroup/Vulkan-Tools)
+
+
+Repository Issue labels:
+
+* _Bug_:          These issues refer to invalid or broken functionality and are the highest priority.
+* _Enhancement_:  These issues refer to ideas for extending or improving tools and utilities
+
+It is the maintainers goal for all issues to be assigned within one business day of their submission. If you choose
+to work on an issue that is assigned, simply coordinate with the current assignee.
+
+### **How to Submit Fixes**
+
+* **Ensure that the bug was not already reported or fixed** by searching on GitHub under Issues
+  and Pull Requests.
+* Use the existing GitHub forking and pull request process.
+  This will involve [forking the repository](https://help.github.com/articles/fork-a-repo/),
+  creating a branch with your commits, and then [submitting a pull request](https://help.github.com/articles/using-pull-requests/).
+* Please read and adhere to the style and process [guidelines ](#coding-conventions-and-formatting) enumerated below.
+* Please base your fixes on the master branch.  SDK branches are generally not updated except for critical fixes needed to repair an SDK release.
+* The resulting Pull Request will be assigned to a repository maintainer. It is the maintainer's responsibility to ensure the Pull Request
+  passes the Google/LunarG internal CI processes. Once the Pull Request has been approved and is passing internal CI, a repository maintainer
+  will merge the PR.
+
+
+#### **Coding Conventions and Formatting**
+* Use the **[Google style guide](https://google.github.io/styleguide/cppguide.html)** for source code with the following exceptions:
+    * The column limit is 132 (as opposed to the default value 80). The clang-format tool will handle this. See below.
+    * The indent is 4 spaces instead of the default 2 spaces. Again, the clang-format tool will handle this.
+    * If you can justify a reason for violating a rule in the guidelines, then you are free to do so. Be prepared to defend your
+decision during code review. This should be used responsibly. An example of a bad reason is "I don't like that rule." An example of
+a good reason is "This violates the style guide, but it improves type safety."
+
+* Run **clang-format** on your changes to maintain consistent formatting
+    * There are `.clang-format` files present in the repository to define clang-format settings
+      which are found and used automatically by clang-format.
+	* **clang-format** binaries are available from the LLVM orginization, here: [LLVM](https://clang.llvm.org/). Our CI system (Travis-CI)
+	  currently uses clang-format version 5.0.0 to check that the lines of code you have changed are formatted properly. It is
+	  recommended that you use the same version to format your code prior to submission.
+    * A sample git workflow may look like:
+
+>        # Make changes to the source.
+>        $ git add -u .
+>        $ git clang-format --style=file
+>        # Check to see if clang-format made any changes and if they are OK.
+>        $ git add -u .
+>        $ git commit
+
+* **Commit Messages**
+    * Limit the subject line to 50 characters -- this allows the information to display correctly in git/Github logs
+    * Begin subject line with a one-word component description followed by a colon (e.g. loader, layers, tests, etc.)
+    * Separate subject from body with a blank line
+    * Wrap the body at 72 characters
+    * Capitalize the subject line
+    * Do not end the subject line with a period
+    * Use the body to explain what and why vs. how
+    * Use the imperative mode in the subject line. This just means to write it as a command (e.g. Fix the sprocket)
+
+Strive for commits that implement a single or related set of functionality, using as many commits as is necessary (more is better).
+That said, please ensure that the repository compiles and passes tests without error for each commit in your pull request.  Note
+that to be accepted into the repository, the pull request must [pass all tests](#testing your changes) on all supported platforms
+-- the automatic Github Travis and AppVeyor continuous integration features will assist in enforcing this requirement.
+
+#### Generated Source Code
+
+The `icd/generated` directory contains source code that is created by several
+generator scripts in the `scripts` directory. All changes to these scripts _must_ be submitted with the
+corresponding generated output to keep the repository self-consistent. This requirement is enforced by both
+Travis CI and AppVeyor test configurations. Regenerate source files after modifying any of the generator
+scripts and before building and testing your changes. More details can be found in
+[BUILD.md](https://github.com/KhronosGroup/Vulkan-Tools/blob/master/BUILD.md#generated-source-code).
+
+#### **Testing Your Changes**
+* Run the repository components with the Vulkan Validation Layers before and after each of your commits to check for any regressions.
+
+  (These instructions are for Linux)
+* In the `cube` directory, run:
+>        vkcube
+>        vkcube --validate
+* In the `vulkaninfo` directory, run:
+>        vulkaninfo
+* If you are adding or changing JSON output, please read
+  [Validating vulkaninfo JSON output](https://github.com/KhronosGroup/Vulkan-Tools/blob/master/vulkaninfo/json_validation_process.md).
+  **Note:** When adding new output to vulkaninfo, do NOT add JSON output unless the formmatting is defined by a schema.
+* Run tests that explicitly exercise your changes.
+* Feel free to subject your code changes to other tests as well!
+
+#### Coding Conventions for [CMake](http://cmake.org) files
+
+* When editing configuration files for CMake, follow the style conventions of the surrounding code.
+  * The column limit is 132.
+  * The indent is 4 spaces.
+  * CMake functions are lower-case.
+  * Variable and keyword names are upper-case.
+* The format is defined by
+  [cmake-format](https://github.com/cheshirekow/cmake_format)
+  using the `.cmake-format.py` file in the repository to define the settings.
+  See the cmake-format page for information about its simple markup for comments.
+* Disable reformatting of a block of comment lines by inserting
+  a `# ~~~` comment line before and after that block.
+* Disable any formatting of a block of lines by surrounding that block with
+  `# cmake-format: off` and `# cmake-format: on` comment lines.
+* To install: `sudo pip install cmake_format`
+* To run: `cmake-format --in-place $FILENAME`
+* **IMPORTANT (June 2018)** cmake-format v0.3.6 has a
+  [bug]( https://github.com/cheshirekow/cmake_format/issues/50)
+  that can corrupt the formatting of comment lines in CMake files.
+  A workaround is to use the following command _before_ running cmake-format:
+  `sed --in-place='' 's/^  *#/#/' $FILENAME`
+
+### **Contributor License Agreement (CLA)**
+
+You will be prompted with a one-time "click-through" CLA dialog as part of submitting your pull request
+or other contribution to GitHub.
+
+### **License and Copyrights**
+
+All contributions made to the Vulkan-Tools repository are Khronos branded and as such,
+any new files need to have the Khronos license (Apache 2.0 style) and copyright included.
+Please see an existing file in this repository for an example.
+
+All contributions made to the LunarG repositories are to be made under the Apache 2.0 license
+and any new files need to include this license and any applicable copyrights.
+
+You can include your individual copyright after any existing copyrights.
diff --git a/src/third_party/vulkan-tools/src/GOVERNANCE.md b/src/third_party/vulkan-tools/src/GOVERNANCE.md
new file mode 100644
index 0000000..60fc12c
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/GOVERNANCE.md
@@ -0,0 +1,51 @@
+## Khronos Group Vulkan-Tools Repository Management
+
+# **Open Source Project – Objectives**
+
+* Assist Vulkan Users
+  - The goal is for tool and utility behavior to assist in the development of vulkan applications.
+    - [Core Specification](https://www.khronos.org/registry/vulkan/specs/1.0/html/vkspec.html)
+    - [Header Files](https://www.khronos.org/registry/vulkan/#headers)
+    - [SDK Resources](https://vulkan.lunarg.com)
+* ISV Enablement
+  - Updates of tools and utilities should be available in a timely fashion
+  - Every effort will be made to be responsive to ISV issues
+* Cross Platform Compatibility
+  - Google and LunarG collaboration:
+    - Google: Monitor for Android
+    - LunarG: Monitor for desktop (Windows, Linux, and MacOS)
+    - Continuous Integration: HW test farms operated by Google and LunarG monitor various hardware/software platforms
+* Repo Quality
+  - Repo remains in healthy state with all tests passing and good-quality, consistent codebase
+  - Continuous Integration: Along with Github, HW test farms operated by Google and LunarG perform pre-commit cloud testing
+on pull-requests
+
+# **Roles and Definitions**
+* Contributor, Commenter, User
+  - Submitting contributions, creating issues, or using the contents of the repository
+* Approver
+  - Experienced project members who have made significant technical contributions
+  - Write control: Approve pull/merge requests (verify submissions vs. acceptance criteria)
+* Technical Project Leads
+  - Lead the project in terms of versioning, quality assurance, and overarching objectives
+  - Monitor github issues and drive timely resolution
+  - Designate new approvers
+  - Ensure project information such as the Readme, Contributing docs, wiki, etc., kept up-to-date
+  - Act as a facilitator in resolving technical conflicts
+  - Is a point-of-contact for project-related questions
+
+The technical project leads for this repository are:
+* **Jeremy Kniager** [jeremyk@lunarg.com](mailto:jeremyk@lunarg.com)
+
+# **Acceptance Criteria and Process**
+  - All source code to include Khronos copyright and license (Apache 2.0).
+    - Additional copyrights of contributors appended
+  - Contributions are via pull requests
+    - Project leads will assigning approvers to contributor pull requests
+    - Approvers can self-assign their reviewers
+    - For complex or invasive contributions, Project Leads may request approval from specific reviewers
+    - At least one review approval is required to complete a pull request
+    - The goal is to be responsive to contributors while ensuring acceptance criteria is met and to facilitate their submissions
+    - Approval is dependent upon adherence to the guidelines in [CONTRIBUTING.md](CONTRIBUTING.md), and alignment with
+repository goals of maintainability, completeness, and quality
+    - Conflicts or questions will ultimately be resolved by the project leads
diff --git a/src/third_party/vulkan-tools/src/LICENSE.txt b/src/third_party/vulkan-tools/src/LICENSE.txt
new file mode 100644
index 0000000..d645695
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/LICENSE.txt
@@ -0,0 +1,202 @@
+
+                                 Apache License
+                           Version 2.0, January 2004
+                        http://www.apache.org/licenses/
+
+   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+   1. Definitions.
+
+      "License" shall mean the terms and conditions for use, reproduction,
+      and distribution as defined by Sections 1 through 9 of this document.
+
+      "Licensor" shall mean the copyright owner or entity authorized by
+      the copyright owner that is granting the License.
+
+      "Legal Entity" shall mean the union of the acting entity and all
+      other entities that control, are controlled by, or are under common
+      control with that entity. For the purposes of this definition,
+      "control" means (i) the power, direct or indirect, to cause the
+      direction or management of such entity, whether by contract or
+      otherwise, or (ii) ownership of fifty percent (50%) or more of the
+      outstanding shares, or (iii) beneficial ownership of such entity.
+
+      "You" (or "Your") shall mean an individual or Legal Entity
+      exercising permissions granted by this License.
+
+      "Source" form shall mean the preferred form for making modifications,
+      including but not limited to software source code, documentation
+      source, and configuration files.
+
+      "Object" form shall mean any form resulting from mechanical
+      transformation or translation of a Source form, including but
+      not limited to compiled object code, generated documentation,
+      and conversions to other media types.
+
+      "Work" shall mean the work of authorship, whether in Source or
+      Object form, made available under the License, as indicated by a
+      copyright notice that is included in or attached to the work
+      (an example is provided in the Appendix below).
+
+      "Derivative Works" shall mean any work, whether in Source or Object
+      form, that is based on (or derived from) the Work and for which the
+      editorial revisions, annotations, elaborations, or other modifications
+      represent, as a whole, an original work of authorship. For the purposes
+      of this License, Derivative Works shall not include works that remain
+      separable from, or merely link (or bind by name) to the interfaces of,
+      the Work and Derivative Works thereof.
+
+      "Contribution" shall mean any work of authorship, including
+      the original version of the Work and any modifications or additions
+      to that Work or Derivative Works thereof, that is intentionally
+      submitted to Licensor for inclusion in the Work by the copyright owner
+      or by an individual or Legal Entity authorized to submit on behalf of
+      the copyright owner. For the purposes of this definition, "submitted"
+      means any form of electronic, verbal, or written communication sent
+      to the Licensor or its representatives, including but not limited to
+      communication on electronic mailing lists, source code control systems,
+      and issue tracking systems that are managed by, or on behalf of, the
+      Licensor for the purpose of discussing and improving the Work, but
+      excluding communication that is conspicuously marked or otherwise
+      designated in writing by the copyright owner as "Not a Contribution."
+
+      "Contributor" shall mean Licensor and any individual or Legal Entity
+      on behalf of whom a Contribution has been received by Licensor and
+      subsequently incorporated within the Work.
+
+   2. Grant of Copyright License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      copyright license to reproduce, prepare Derivative Works of,
+      publicly display, publicly perform, sublicense, and distribute the
+      Work and such Derivative Works in Source or Object form.
+
+   3. Grant of Patent License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      (except as stated in this section) patent license to make, have made,
+      use, offer to sell, sell, import, and otherwise transfer the Work,
+      where such license applies only to those patent claims licensable
+      by such Contributor that are necessarily infringed by their
+      Contribution(s) alone or by combination of their Contribution(s)
+      with the Work to which such Contribution(s) was submitted. If You
+      institute patent litigation against any entity (including a
+      cross-claim or counterclaim in a lawsuit) alleging that the Work
+      or a Contribution incorporated within the Work constitutes direct
+      or contributory patent infringement, then any patent licenses
+      granted to You under this License for that Work shall terminate
+      as of the date such litigation is filed.
+
+   4. Redistribution. You may reproduce and distribute copies of the
+      Work or Derivative Works thereof in any medium, with or without
+      modifications, and in Source or Object form, provided that You
+      meet the following conditions:
+
+      (a) You must give any other recipients of the Work or
+          Derivative Works a copy of this License; and
+
+      (b) You must cause any modified files to carry prominent notices
+          stating that You changed the files; and
+
+      (c) You must retain, in the Source form of any Derivative Works
+          that You distribute, all copyright, patent, trademark, and
+          attribution notices from the Source form of the Work,
+          excluding those notices that do not pertain to any part of
+          the Derivative Works; and
+
+      (d) If the Work includes a "NOTICE" text file as part of its
+          distribution, then any Derivative Works that You distribute must
+          include a readable copy of the attribution notices contained
+          within such NOTICE file, excluding those notices that do not
+          pertain to any part of the Derivative Works, in at least one
+          of the following places: within a NOTICE text file distributed
+          as part of the Derivative Works; within the Source form or
+          documentation, if provided along with the Derivative Works; or,
+          within a display generated by the Derivative Works, if and
+          wherever such third-party notices normally appear. The contents
+          of the NOTICE file are for informational purposes only and
+          do not modify the License. You may add Your own attribution
+          notices within Derivative Works that You distribute, alongside
+          or as an addendum to the NOTICE text from the Work, provided
+          that such additional attribution notices cannot be construed
+          as modifying the License.
+
+      You may add Your own copyright statement to Your modifications and
+      may provide additional or different license terms and conditions
+      for use, reproduction, or distribution of Your modifications, or
+      for any such Derivative Works as a whole, provided Your use,
+      reproduction, and distribution of the Work otherwise complies with
+      the conditions stated in this License.
+
+   5. Submission of Contributions. Unless You explicitly state otherwise,
+      any Contribution intentionally submitted for inclusion in the Work
+      by You to the Licensor shall be under the terms and conditions of
+      this License, without any additional terms or conditions.
+      Notwithstanding the above, nothing herein shall supersede or modify
+      the terms of any separate license agreement you may have executed
+      with Licensor regarding such Contributions.
+
+   6. Trademarks. This License does not grant permission to use the trade
+      names, trademarks, service marks, or product names of the Licensor,
+      except as required for reasonable and customary use in describing the
+      origin of the Work and reproducing the content of the NOTICE file.
+
+   7. Disclaimer of Warranty. Unless required by applicable law or
+      agreed to in writing, Licensor provides the Work (and each
+      Contributor provides its Contributions) on an "AS IS" BASIS,
+      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+      implied, including, without limitation, any warranties or conditions
+      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+      PARTICULAR PURPOSE. You are solely responsible for determining the
+      appropriateness of using or redistributing the Work and assume any
+      risks associated with Your exercise of permissions under this License.
+
+   8. Limitation of Liability. In no event and under no legal theory,
+      whether in tort (including negligence), contract, or otherwise,
+      unless required by applicable law (such as deliberate and grossly
+      negligent acts) or agreed to in writing, shall any Contributor be
+      liable to You for damages, including any direct, indirect, special,
+      incidental, or consequential damages of any character arising as a
+      result of this License or out of the use or inability to use the
+      Work (including but not limited to damages for loss of goodwill,
+      work stoppage, computer failure or malfunction, or any and all
+      other commercial damages or losses), even if such Contributor
+      has been advised of the possibility of such damages.
+
+   9. Accepting Warranty or Additional Liability. While redistributing
+      the Work or Derivative Works thereof, You may choose to offer,
+      and charge a fee for, acceptance of support, warranty, indemnity,
+      or other liability obligations and/or rights consistent with this
+      License. However, in accepting such obligations, You may act only
+      on Your own behalf and on Your sole responsibility, not on behalf
+      of any other Contributor, and only if You agree to indemnify,
+      defend, and hold each Contributor harmless for any liability
+      incurred by, or claims asserted against, such Contributor by reason
+      of your accepting any such warranty or additional liability.
+
+   END OF TERMS AND CONDITIONS
+
+   APPENDIX: How to apply the Apache License to your work.
+
+      To apply the Apache License to your work, attach the following
+      boilerplate notice, with the fields enclosed by brackets "[]"
+      replaced with your own identifying information. (Don't include
+      the brackets!)  The text should be enclosed in the appropriate
+      comment syntax for the file format. We also recommend that a
+      file or class name and description of purpose be included on the
+      same "printed page" as the copyright notice for easier
+      identification within third-party archives.
+
+   Copyright [yyyy] [name of copyright owner]
+
+   Licensed under the Apache License, Version 2.0 (the "License");
+   you may not use this file except in compliance with the License.
+   You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
diff --git a/src/third_party/vulkan-tools/src/README.md b/src/third_party/vulkan-tools/src/README.md
new file mode 100644
index 0000000..3dd2858
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/README.md
@@ -0,0 +1,54 @@
+# Vulkan Ecosystem Components
+
+This project provides Khronos official Vulkan Tools and Utilities for Windows, Linux, Android, and MacOS.
+
+## CI Build Status
+| Platform | Build Status |
+|:--------:|:------------:|
+| Linux/Android | [![Build Status](https://travis-ci.org/KhronosGroup/Vulkan-Tools.svg?branch=master)](https://travis-ci.org/KhronosGroup/Vulkan-Tools) |
+| Windows |[![Build status](https://ci.appveyor.com/api/projects/status/hwc1hy417hf42xnf?svg=true)](https://ci.appveyor.com/project/Khronoswebmaster/vulkan-tools/branch/master) |
+
+
+## Introduction
+
+This project provides Vulkan tools and utilities that can assist development by enabling developers to
+verify their applications correct use of the Vulkan API.
+
+The following components are available in this repository:
+- [*Mock ICD*](icd/)
+- [*Vkcube and Vkcube++ Demo*](cube/)
+- [*VulkanInfo*](vulkaninfo/)
+- [*Windows Runtime*](winrt/)
+
+## Contact Information
+* [Tobin Ehlis](mailto:tobine@google.com)
+* [Mark Lobodzinski](mailto:mark@lunarg.com)
+
+## Information for Developing or Contributing:
+
+Please see the [CONTRIBUTING.md](CONTRIBUTING.md) file in this repository for more details.
+Please see the [GOVERNANCE.md](GOVERNANCE.md) file in this repository for repository management details.
+
+## How to Build and Run
+
+[BUILD.md](BUILD.md)
+Includes directions for building all components as well as running the vkcube demo applications.
+
+## Version Tagging Scheme
+
+Updates to the `Vulkan-Tools` repository which correspond to a new Vulkan specification release are tagged using the following format: `v<`_`version`_`>` (e.g., `v1.1.96`).
+
+**Note**: Marked version releases have undergone thorough testing but do not imply the same quality level as SDK tags. SDK tags follow the `sdk-<`_`version`_`>.<`_`patch`_`>` format (e.g., `sdk-1.1.92.0`).
+
+This scheme was adopted following the 1.1.96 Vulkan specification release.
+
+## License
+This work is released as open source under a Apache-style license from Khronos including a Khronos copyright.
+
+See COPYRIGHT.txt for a full list of licenses used in this repository.
+
+## Acknowledgements
+While this project has been developed primarily by LunarG, Inc., there are many other
+companies and individuals making this possible: Valve Corporation, funding
+project development; Google providing significant contributions to the validation layers;
+Khronos providing oversight and hosting of the project.
diff --git a/src/third_party/vulkan-tools/src/build-android/AndroidManifest.xml b/src/third_party/vulkan-tools/src/build-android/AndroidManifest.xml
new file mode 100644
index 0000000..0f0a5ff
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/build-android/AndroidManifest.xml
@@ -0,0 +1,25 @@
+<?xml version="1.0"?>
+<manifest xmlns:android="http://schemas.android.com/apk/res/android" package="com.example.VulkanLayerValidationTests" android:versionCode="1" android:versionName="1.0">
+
+    <!-- This is the platform API where NativeActivity was introduced. -->
+    <uses-sdk android:minSdkVersion="23" android:targetSdkVersion="23"/>
+
+    <!-- This .apk has no Java code itself, so set hasCode to false. -->
+    <application android:label="@string/app_name" android:hasCode="false" android:debuggable='false'>
+
+        <!-- This allows writing log files to sdcard -->
+        <uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE"/>
+
+        <!-- Our activity is the built-in NativeActivity framework class.
+             This will take care of integrating with our NDK code. -->
+        <activity android:name="android.app.NativeActivity" android:label="@string/app_name" android:exported="true">
+            <!-- Tell NativeActivity the name of or .so -->
+            <meta-data android:name="android.app.lib_name" android:value="VulkanLayerValidationTests"/>
+            <intent-filter>
+                <action android:name="android.intent.action.MAIN"/>
+                <category android:name="android.intent.category.LAUNCHER"/>
+            </intent-filter>
+        </activity>
+    </application>
+
+</manifest>
diff --git a/src/third_party/vulkan-tools/src/build-android/build_all.sh b/src/third_party/vulkan-tools/src/build-android/build_all.sh
new file mode 100755
index 0000000..ecc4911
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/build-android/build_all.sh
@@ -0,0 +1,73 @@
+#!/bin/bash
+
+# Copyright 2017 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+if [ -z "${ANDROID_SDK_HOME}" ];
+then echo "Please set ANDROID_SDK_HOME, exiting"; exit 1;
+else echo "ANDROID_SDK_HOME is ${ANDROID_SDK_HOME}";
+fi
+
+if [ -z "${ANDROID_NDK_HOME}" ];
+then echo "Please set ANDROID_NDK_HOME, exiting"; exit 1;
+else echo "ANDROID_NDK_HOME is ${ANDROID_NDK_HOME}";
+fi
+
+if [[ $(uname) == "Linux" ]]; then
+    cores=$(nproc) || echo 4
+elif [[ $(uname) == "Darwin" ]]; then
+    cores=$(sysctl -n hw.ncpu) || echo 4
+fi
+
+function findtool() {
+    if [[ ! $(type -t $1) ]]; then
+        echo Command $1 not found, see ../BUILD.md;
+        exit 1;
+    fi
+}
+
+# Check for dependencies
+findtool aapt
+findtool zipalign
+findtool jarsigner
+
+set -ev
+
+DEMO_BUILD_DIR=$PWD/../cube/android
+echo DEMO_BUILD_DIR="${DEMO_BUILD_DIR}"
+
+function create_APK() {
+    aapt package -f -M AndroidManifest.xml -I "$ANDROID_SDK_HOME/platforms/android-23/android.jar" -S res -F bin/$1-unaligned.apk bin/libs
+    # update this logic to detect if key is already there.  If so, use it, otherwise create it.
+    jarsigner -verbose -keystore ~/.android/debug.keystore -storepass android -keypass android  bin/$1-unaligned.apk androiddebugkey
+    zipalign -f 4 bin/$1-unaligned.apk bin/$1.apk
+}
+
+./update_external_sources_android.sh --no-build
+
+#
+# build vkcube APK
+#
+(
+pushd $DEMO_BUILD_DIR
+ndk-build -j $cores
+mkdir -p $DEMO_BUILD_DIR/cube/bin/libs/lib
+cp -r $DEMO_BUILD_DIR/libs/* $DEMO_BUILD_DIR/cube/bin/libs/lib/
+cd $DEMO_BUILD_DIR/cube
+create_APK vkcube
+popd
+)
+
+echo Builds succeeded
+exit 0
diff --git a/src/third_party/vulkan-tools/src/build-android/cmake/README.md b/src/third_party/vulkan-tools/src/build-android/cmake/README.md
new file mode 100644
index 0000000..149966c
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/build-android/cmake/README.md
@@ -0,0 +1,43 @@
+Build Validation Layers with Android CMake Plugin
+=================================================
+Gradle project in this directory builds layers into AAR.
+The project could be directly added into application's gradle projects.
+[Android Studio 3.0.0+](https://developer.android.com/studio/index.html)
+IS required: earlier versions only publish release libs by default.
+
+Pre-requirements
+----------------
+Build ShaderC binary
+- Building from Github Repo source
+1. cd android-build
+2. ./update_external_sources_android.sh
+3. ./android-generate.sh
+
+Extra Steps if building from NDK's source tree
+```
+   cd ${your ANDROID_NDK_ROOT}/sources/third_party/shaderc
+   ndk-build  APP_ABI=all APP_STL=c++_static NDK_TOOLCHAIN_VERSION=clang NDK_PROJECT_PATH=. APP_BUILD_SCRIPT=Android.mk libshaderc_combined
+```
+
+Adding layer module into Android Studio application project
+--------------------------------------------------------
+1. app's settings.gradle, add 
+```
+    include ':layerLib'
+    project(':layerLib').projectDir = new File('/path/to/cmake/layerlib')
+```
+2. app's build.gradle:
+```
+dependencies {
+    // Android Studio 3.0.0+ is required
+    implementation project(':layerLib')
+}
+```
+BUILD_IN_NDK variable in layerlib/CMakeLists.txt could detect whether the source is
+from NDK or github repo clone, and would configure the file paths accordingly
+
+Tested
+-----
+Build on Mac OS, tested on Google Pixel XL with Android Oreo.
+Equvilaent build scripts for Windows OS are at the same directory.
+
diff --git a/src/third_party/vulkan-tools/src/build-android/cmake/layerlib/CMakeLists.txt b/src/third_party/vulkan-tools/src/build-android/cmake/layerlib/CMakeLists.txt
new file mode 100644
index 0000000..3981e31
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/build-android/cmake/layerlib/CMakeLists.txt
@@ -0,0 +1,122 @@
+cmake_minimum_required(VERSION 3.4.1)
+
+# Validation layers could be built with code from
+#    github repo   OR
+#    ndk's snapshot
+# The file structure for glslang spir-V is different, adding BUILD_IN_NDK variable
+set(BUILD_IN_NDK OFF)
+if (CMAKE_CURRENT_SOURCE_DIR MATCHES "sources/third_party/vulkan/src")
+    set(BUILD_IN_NDK ON)
+endif()
+
+set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11 -Wall -Werror \
+        -Wno-unused-function -Wno-unused-const-variable \
+        -Wno-c++98-compat-pedantic -DVK_PROTOTYPES \
+        -DVK_USE_PLATFORM_ANDROID_KHR")
+
+if (BUILD_IN_NDK)
+    # NDK directory structure:
+    #  $ANDROID_NDK/sources/third_party/
+    #        vulkan/src
+    #        shaderc/third_party (for glslang & spirv-tools )
+    # this build uses combined lib libshaderc.a for libSPIRV-Tools.a purpose
+    get_filename_component(SRC_DIR
+        "${ANDROID_NDK}/sources/third_party/vulkan/src" ABSOLUTE)
+    get_filename_component(EXTERNAL_DIR
+        "${SRC_DIR}/../../shaderc/third_party" ABSOLUTE)
+    get_filename_component(SPIRV_LIB
+        "${SRC_DIR}/../../shaderc/libs/${ANDROID_STL}/${ANDROID_ABI}/libshaderc.a"
+        ABSOLUTE)
+else ()
+    # github directory structure:
+    #   ${SRC_DIR}
+    #   ${SRC_DIR}/build-android/external (for glslang, spirv-tools & shaderc )
+    get_filename_component(SRC_DIR "${CMAKE_CURRENT_SOURCE_DIR}/../../.."  ABSOLUTE)
+    set(EXTERNAL_DIR "${SRC_DIR}/build-android/third_party/shaderc/third_party")
+    set(SPIRV_LIB
+        "${SRC_DIR}/build-android/third_party/shaderc/android_test/obj/local/${ANDROID_ABI}/libSPIRV-Tools.a")
+endif()
+set(COMMON_DIR "${SRC_DIR}/build-android/generated")
+
+set(CMAKE_CXX_clone "${CMAKE_CXX_FLAGS}")
+set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -DVK_USE_PLATFORM_ANDROID_KHR \
+        -fvisibility=hidden")
+include_directories(${SRC_DIR}/include
+                    ${COMMON_DIR}/include
+                    ${SRC_DIR}/layers
+                    ${SRC_DIR}/loader)
+add_library(layer_utils STATIC
+        ${SRC_DIR}/layers/vk_layer_config.cpp
+        ${SRC_DIR}/layers/vk_layer_extension_utils.cpp
+        ${SRC_DIR}/layers/vk_layer_utils.cpp
+        ${SRC_DIR}/layers/vk_format_utils.cpp)
+set(CMAKE_CXX_FLAGS "${CMAKE_CXX_clone}")
+
+# assume shaderc already built externally
+add_library(SPIRV-Tools-prebuilt STATIC IMPORTED)
+set_target_properties(SPIRV-Tools-prebuilt PROPERTIES IMPORTED_LOCATION
+    ${SPIRV_LIB})
+
+# build core_validation layers which including shader validation
+set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -DVK_USE_PLATFORM_ANDROID_KHR \
+                     -fvisibility=hidden")
+add_library(VkLayer_core_validation SHARED
+        ${SRC_DIR}/layers/core_validation.cpp
+        ${SRC_DIR}/layers/descriptor_sets.cpp
+        ${SRC_DIR}/layers/buffer_validation.cpp
+        ${SRC_DIR}/layers/shader_validation.cpp
+        ${SRC_DIR}/layers/vk_layer_table.cpp
+	${SRC_DIR}/layers/xxhash.c)
+target_include_directories(VkLayer_core_validation PRIVATE
+        ${SRC_DIR}/include
+        ${SRC_DIR}/layers
+        ${COMMON_DIR}/include
+        ${SRC_DIR}/loader
+        ${EXTERNAL_DIR}/glslang
+        ${EXTERNAL_DIR}/spirv-tools/include)
+target_link_libraries(VkLayer_core_validation PRIVATE
+        log layer_utils SPIRV-Tools-prebuilt)
+
+add_library(VkLayer_parameter_validation SHARED
+        ${COMMON_DIR}/include/parameter_validation.cpp
+        ${SRC_DIR}/layers/parameter_validation_utils.cpp
+        ${SRC_DIR}/layers/vk_layer_table.cpp)
+target_include_directories(VkLayer_parameter_validation PRIVATE
+        ${SRC_DIR}/include
+        ${COMMON_DIR}/include
+        ${SRC_DIR}/layers
+        ${SRC_DIR}/loader)
+target_link_libraries(VkLayer_parameter_validation PRIVATE log layer_utils)
+
+add_library(VkLayer_object_tracker SHARED
+        ${COMMON_DIR}/include/object_tracker.cpp
+        ${SRC_DIR}/layers/object_tracker_utils.cpp
+        ${SRC_DIR}/layers/vk_layer_table.cpp)
+target_include_directories(VkLayer_object_tracker PRIVATE
+        ${SRC_DIR}/include
+        ${SRC_DIR}/layers
+        ${COMMON_DIR}/include
+        ${SRC_DIR}/loader)
+target_link_libraries(VkLayer_object_tracker PRIVATE log layer_utils)
+
+add_library(VkLayer_threading SHARED
+        ${SRC_DIR}/layers/threading.cpp
+        ${SRC_DIR}/layers/vk_layer_table.cpp)
+target_include_directories(VkLayer_threading PRIVATE
+        ${SRC_DIR}/include
+        ${SRC_DIR}/layers
+        ${COMMON_DIR}/include
+        ${SRC_DIR}/loader)
+target_link_libraries(VkLayer_threading PRIVATE log layer_utils)
+
+add_library(VkLayer_unique_objects SHARED
+        ${SRC_DIR}/layers/unique_objects.cpp
+        ${SRC_DIR}/layers/vk_layer_table.cpp)
+target_include_directories(VkLayer_unique_objects PRIVATE
+        ${SRC_DIR}/include
+        ${SRC_DIR}/layers
+        ${COMMON_DIR}/include
+        ${SRC_DIR}/loader)
+target_link_libraries(VkLayer_unique_objects PRIVATE log layer_utils)
+
+set(CMAKE_CXX_FLAGS "${CMAKE_CXX_clone}")
diff --git a/src/third_party/vulkan-tools/src/build-android/cmake/layerlib/build.gradle b/src/third_party/vulkan-tools/src/build-android/cmake/layerlib/build.gradle
new file mode 100644
index 0000000..889d232
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/build-android/cmake/layerlib/build.gradle
@@ -0,0 +1,32 @@
+apply plugin: 'com.android.library'
+
+android {
+    /*
+     * Required: Android Studio 3.0.0+!
+     */
+    compileSdkVersion 25
+
+    defaultConfig {
+        minSdkVersion 24
+        targetSdkVersion 24
+        versionCode 1
+        versionName "1.0"
+        ndk.abiFilters 'armeabi-v7a', 'arm64-v8a', 'x86', 'x86_64'
+        externalNativeBuild {
+            cmake.arguments '-DANDROID_TOOLCHAIN=clang',
+                            '-DANDROID_STL=c++_static',
+                            '-DANDROID_PLATFORM=android-24'
+        }
+    }
+    externalNativeBuild {
+        cmake.path 'CMakeLists.txt'
+    }
+    buildTypes {
+        release {
+            minifyEnabled false
+            proguardFiles getDefaultProguardFile('proguard-android.txt'),
+                    'proguard-rules.pro'
+        }
+    }
+}
+
diff --git a/src/third_party/vulkan-tools/src/build-android/cmake/layerlib/proguard-rules.pro b/src/third_party/vulkan-tools/src/build-android/cmake/layerlib/proguard-rules.pro
new file mode 100644
index 0000000..b41fe70
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/build-android/cmake/layerlib/proguard-rules.pro
@@ -0,0 +1,17 @@
+# Add project specific ProGuard rules here.
+# By default, the flags in this file are appended to flags specified
+# in ${ANDROID_SDK}/tools/proguard/proguard-android.txt
+# You can edit the include path and order by changing the proguardFiles
+# directive in build.gradle.
+#
+# For more details, see
+#   http://developer.android.com/guide/developing/tools/proguard.html
+
+# Add any project specific keep options here:
+
+# If your project uses WebView with JS, uncomment the following
+# and specify the fully qualified class name to the JavaScript interface
+# class:
+#-keepclassmembers class fqcn.of.javascript.interface.for.webview {
+#   public *;
+#}
diff --git a/src/third_party/vulkan-tools/src/build-android/cmake/layerlib/src/main/AndroidManifest.xml b/src/third_party/vulkan-tools/src/build-android/cmake/layerlib/src/main/AndroidManifest.xml
new file mode 100644
index 0000000..78c3704
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/build-android/cmake/layerlib/src/main/AndroidManifest.xml
@@ -0,0 +1,9 @@
+<manifest xmlns:android="http://schemas.android.com/apk/res/android"
+    package="com.example.layerlib">
+
+    <application android:allowBackup="true" android:label="@string/app_name"
+        android:supportsRtl="true">
+
+    </application>
+
+</manifest>
diff --git a/src/third_party/vulkan-tools/src/build-android/cmake/layerlib/src/main/res/values/strings.xml b/src/third_party/vulkan-tools/src/build-android/cmake/layerlib/src/main/res/values/strings.xml
new file mode 100644
index 0000000..66f8389
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/build-android/cmake/layerlib/src/main/res/values/strings.xml
@@ -0,0 +1,3 @@
+<resources>
+    <string name="app_name">Layer Library</string>
+</resources>
diff --git a/src/third_party/vulkan-tools/src/build-android/glslang_revision_android b/src/third_party/vulkan-tools/src/build-android/glslang_revision_android
new file mode 100644
index 0000000..8843893
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/build-android/glslang_revision_android
@@ -0,0 +1 @@
+23ea3db3e5ea16b4964e9e3b5fee88bfc267fc57
diff --git a/src/third_party/vulkan-tools/src/build-android/glslang_url_android b/src/third_party/vulkan-tools/src/build-android/glslang_url_android
new file mode 100644
index 0000000..d661000
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/build-android/glslang_url_android
@@ -0,0 +1 @@
+https://github.com/KhronosGroup/glslang.git
diff --git a/src/third_party/vulkan-tools/src/build-android/install_all.sh b/src/third_party/vulkan-tools/src/build-android/install_all.sh
new file mode 100755
index 0000000..f2ecd90
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/build-android/install_all.sh
@@ -0,0 +1,76 @@
+#!/bin/bash
+
+# Copyright 2017 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+set -e
+
+#
+# Parse parameters
+#
+
+function printUsage {
+   echo "Supported parameters are:"
+   echo "    -s|--serial <target device serial number> (optional)"
+   echo
+   echo "i.e. ${0##*/} -s <serial number>"
+   exit 1
+}
+
+if [[ $(($# % 2)) -ne 0 ]]
+then
+    echo Parameters must be provided in pairs.
+    echo parameter count = $#
+    echo
+    printUsage
+    exit 1
+fi
+
+while [[ $# -gt 0 ]]
+do
+    case $1 in
+        -s|--serial)
+            # include the flag, because we need to leave it off if not provided
+            serial="$2"
+            shift 2
+            ;;
+        -*)
+            # unknown option
+            echo Unknown option: $1
+            echo
+            printUsage
+            exit 1
+            ;;
+    esac
+done
+
+if [[ $serial ]]; then
+    echo serial = "${serial}"
+    serialFlag="-s $serial"
+    if [[ $(adb devices) != *"$serial"* ]]
+    then
+        echo Device not found: "${serial}"
+        echo
+        printUsage
+        exit 1
+    fi
+else
+    echo Using device $(adb get-serialno)
+fi
+
+# Install everything built by build_all.sh
+echo "adb $serialFlag install -r ../cube/android/cube/bin/vkcube.apk"
+adb $serialFlag install -r ../cube/android/cube/bin/vkcube.apk
+
+exit $?
diff --git a/src/third_party/vulkan-tools/src/build-android/jni/Android.mk b/src/third_party/vulkan-tools/src/build-android/jni/Android.mk
new file mode 100644
index 0000000..204f755
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/build-android/jni/Android.mk
@@ -0,0 +1,20 @@
+# Copyright 2015 The Android Open Source Project
+# Copyright (C) 2015 Valve Corporation
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#      http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+LOCAL_PATH := $(call my-dir)
+SRC_DIR := ../..
+
+$(call import-module,android/native_app_glue)
+$(call import-module,third_party/shaderc)
diff --git a/src/third_party/vulkan-tools/src/build-android/jni/Application.mk b/src/third_party/vulkan-tools/src/build-android/jni/Application.mk
new file mode 100644
index 0000000..a80a357
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/build-android/jni/Application.mk
@@ -0,0 +1,20 @@
+# Copyright 2015 The Android Open Source Project
+# Copyright (C) 2015 Valve Corporation
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#      http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+APP_ABI := armeabi-v7a arm64-v8a x86 x86_64
+APP_PLATFORM := android-22
+APP_STL := c++_static
+NDK_TOOLCHAIN_VERSION := clang
+NDK_MODULE_PATH := .
diff --git a/src/third_party/vulkan-tools/src/build-android/jni/shaderc/Application.mk b/src/third_party/vulkan-tools/src/build-android/jni/shaderc/Application.mk
new file mode 100644
index 0000000..5447415
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/build-android/jni/shaderc/Application.mk
@@ -0,0 +1,4 @@
+APP_ABI := all
+APP_BUILD_SCRIPT := Android.mk
+APP_STL := c++_static
+APP_PLATFORM := android-23
diff --git a/src/third_party/vulkan-tools/src/build-android/res/values/strings.xml b/src/third_party/vulkan-tools/src/build-android/res/values/strings.xml
new file mode 100644
index 0000000..8ff71b0
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/build-android/res/values/strings.xml
@@ -0,0 +1,24 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright 2016 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+
+<!-- This file contains resource definitions for displayed strings, allowing
+     them to be changed based on the locale and options. -->
+
+<resources>
+    <!-- Simple strings. -->
+    <string name="app_name">VulkanLayerValidationTests</string>
+
+</resources>
diff --git a/src/third_party/vulkan-tools/src/build-android/shaderc_revision_android b/src/third_party/vulkan-tools/src/build-android/shaderc_revision_android
new file mode 100644
index 0000000..d65bc86
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/build-android/shaderc_revision_android
@@ -0,0 +1 @@
+563bc6e87a43c38b5495469307922c768edbd191
diff --git a/src/third_party/vulkan-tools/src/build-android/shaderc_url_android b/src/third_party/vulkan-tools/src/build-android/shaderc_url_android
new file mode 100644
index 0000000..488a1fe
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/build-android/shaderc_url_android
@@ -0,0 +1 @@
+https://github.com/google/shaderc.git
diff --git a/src/third_party/vulkan-tools/src/build-android/spirv-headers_revision_android b/src/third_party/vulkan-tools/src/build-android/spirv-headers_revision_android
new file mode 100644
index 0000000..f6cc8e5
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/build-android/spirv-headers_revision_android
@@ -0,0 +1 @@
+ce309203d7eceaf908bea8862c27f3e0749f7d00
diff --git a/src/third_party/vulkan-tools/src/build-android/spirv-headers_url_android b/src/third_party/vulkan-tools/src/build-android/spirv-headers_url_android
new file mode 100644
index 0000000..c37214f
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/build-android/spirv-headers_url_android
@@ -0,0 +1 @@
+https://github.com/KhronosGroup/SPIRV-Headers.git
diff --git a/src/third_party/vulkan-tools/src/build-android/spirv-tools_revision_android b/src/third_party/vulkan-tools/src/build-android/spirv-tools_revision_android
new file mode 100644
index 0000000..9f033b4
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/build-android/spirv-tools_revision_android
@@ -0,0 +1 @@
+5d442fad2f00fcca8354aaaa9bfd36be8c418a1b
diff --git a/src/third_party/vulkan-tools/src/build-android/spirv-tools_url_android b/src/third_party/vulkan-tools/src/build-android/spirv-tools_url_android
new file mode 100644
index 0000000..19ccae7
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/build-android/spirv-tools_url_android
@@ -0,0 +1 @@
+https://github.com/KhronosGroup/SPIRV-Tools.git
diff --git a/src/third_party/vulkan-tools/src/build-android/test_APK.sh b/src/third_party/vulkan-tools/src/build-android/test_APK.sh
new file mode 100755
index 0000000..801b16f
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/build-android/test_APK.sh
@@ -0,0 +1,239 @@
+#!/bin/bash
+
+# Copyright 2017 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+#
+# Parse parameters
+#
+
+function printUsage {
+   echo "Supported parameters are:"
+   echo "    -p|--platform <platform> (optional)"
+   echo "    -f|--filter <gtest filter list> (optional)"
+   echo "    -s|--serial <target device serial number> (optional)"
+   echo
+   echo "i.e. ${0##*/} -p <platform> -f <test filter> -s <serial number>"
+   exit 1
+}
+
+if [[ $(($# % 2)) -ne 0 ]]
+then
+    echo Parameters must be provided in pairs.
+    echo parameter count = $#
+    echo
+    printUsage
+    exit 1
+fi
+
+while [[ $# -gt 0 ]]
+do
+    case $1 in
+        -p|--platform)
+            platform="$2"
+            shift 2
+            ;;
+        -f|--filter)
+            filter="$2"
+            shift 2
+            ;;
+        -s|--serial)
+            serial="$2"
+            shift 2
+            ;;
+        -*)
+            # unknown option
+            echo Unknown option: $1
+            echo
+            printUsage
+            exit 1
+            ;;
+    esac
+done
+
+if [[ $serial ]]; then
+    serialFlag="-s $serial"
+    if [[ $(adb devices) != *"$serial"* ]]
+    then
+        echo Device not found: "${serial}"
+        echo
+        printUsage
+        exit 1
+    fi
+else
+    echo Using device $(adb get-serialno)
+fi
+
+if [[ -z $platform ]]
+then
+    echo No platform specified.
+    platform="UnspecifiedPlatform"
+fi
+
+if [[ -z $filter ]]
+then
+    echo No filter specified, running all tests.
+    filter="*"
+fi
+
+if [[ $platform ]]; then echo platform = "${platform}"; fi
+if [[ $filter ]]; then echo filter = "${filter}"; fi
+if [[ $serial ]]; then echo serial = "${serial}"; fi
+
+set -ev
+
+#
+# Start up
+#
+
+# Wake up the device
+adb $serialFlag shell input keyevent "KEYCODE_MENU"
+adb $serialFlag shell input keyevent "KEYCODE_HOME"
+
+# Grab our Android test mutex
+# Wait for any existing test runs on the devices
+
+# Blow away the lock if tests run too long, avoiding infinite loop
+lock_seconds=1200                                # Duration in seconds.
+lock_end_time=$(( $(date +%s) + lock_seconds ))  # Calculate end time.
+
+until mkdir /var/tmp/VkLayerValidationTests.$serial.lock
+do
+    sleep 5
+    echo "Waiting for existing Android test to complete on $serial"
+
+    if [ $(date +%s) -gt $lock_end_time ]
+    then
+        echo "Lock timeout reached: $lock_seconds seconds"
+        echo "Deleting /var/tmp/VkLayerValidationTests.$serial.lock"
+        rm -r /var/tmp/VkLayerValidationTests.$serial.lock
+    fi
+done
+
+# Clean up our lock on any exit condition
+function finish {
+   rm -r /var/tmp/VkLayerValidationTests.$serial.lock
+}
+trap finish EXIT
+
+# Clear the log
+adb $serialFlag logcat -c
+
+# Ensure any previous activity has stopped, otherwise it won't run tests
+adb $serialFlag shell am force-stop com.example.VulkanLayerValidationTests
+
+# Remove any existing APK that may have been installed from another host
+# Disable exit on error in case the APK is not present
+set +e
+adb $serialFlag shell pm list packages | grep com.example.VulkanLayerValidationTests
+if [ $? -eq 0 ]
+then
+    adb $serialFlag uninstall com.example.VulkanLayerValidationTests
+fi
+# Re-enable exit on error
+set -e
+
+# Install the current build
+adb $serialFlag install -r bin/VulkanLayerValidationTests.apk
+
+# Kick off the tests with known expection list
+adb $serialFlag shell am start -a android.intent.action.MAIN -c android-intent.category.LAUNCH -n com.example.VulkanLayerValidationTests/android.app.NativeActivity --es args --gtest_filter="${filter}"
+
+#
+# Scrape the log until we get pass/fail/crash
+#
+
+# The following loop will give tests 20 minutes to pass/fail/crash
+seconds=1200                          # Duration in seconds.
+endTime=$(( $(date +%s) + seconds ))  # Calculate end time.
+
+exitCode=-1;
+
+# Disable exit on error, we expect grep to fail multiple times in this loop
+set +e
+
+while [ $(date +%s) -lt $endTime ]; do  # Loop until interval has elapsed.
+
+    # The following line is printed from android_main on success
+    adb $serialFlag logcat -d | grep "==== Tests PASSED ===="
+    if [ $? -eq 0 ]
+    then
+        echo VulkanLayerValidationTests PASSED!
+        exitCode=0
+        break
+    fi
+
+    # The following line is printed from android_main on failure
+    adb $serialFlag logcat -d | grep "==== Tests FAILED ===="
+    if [ $? -eq 0 ]
+    then
+        echo VulkanLayerValidationTests FAILED!
+        exitCode=1
+        break
+    fi
+
+    # developer.android.com recommends searching for the following string to detect native crash
+    adb $serialFlag logcat -d | grep "\*\*\* \*\*\* \*\*\* \*\*\* \*\*\* \*\*\* \*\*\* \*\*\* \*\*\* \*\*\* \*\*\* \*\*\* \*\*\* \*\*\* \*\*\* \*\*\*"
+    if [ $? -eq 0 ]
+    then
+        exitCode=2
+        echo VulkanLayerValidationTests CRASHED!
+        break
+    fi
+
+    sleep 5
+
+done
+
+# Re-enable exit on error
+set -e
+
+if [ $exitCode -eq -1 ]
+then
+    echo "VulkanLayerValidationTests hasn't completed in $seconds seconds. Script exiting."
+fi
+
+#
+# Cleanup
+#
+
+# Return to home screen to clear any error pop-ups
+adb $serialFlag shell input keyevent "KEYCODE_HOME"
+
+# Stop the activity
+adb $serialFlag shell am force-stop com.example.VulkanLayerValidationTests
+
+today=$(date +%Y-%m-%d.%H:%M:%S)
+outFile="VulkanLayerValidationTests.$platform.$today.out.txt"
+errFile="VulkanLayerValidationTests.$platform.$today.err.txt"
+adb $serialFlag pull /sdcard/Android/data/com.example.VulkanLayerValidationTests/files/out.txt VulkanLayerValidationTests.$platform.$today.out.txt
+adb $serialFlag pull /sdcard/Android/data/com.example.VulkanLayerValidationTests/files/err.txt VulkanLayerValidationTests.$platform.$today.err.txt
+
+if [ -f $outFile ]; then
+    echo $outFile size $(wc -c < $outFile)
+fi
+
+if [ -f $errFile ]; then
+    echo $errFile size $(wc -c < $errFile)
+fi
+
+echo
+echo ===== Dumping logcat of VulkanLayerValidationTests =====
+echo If the test is crashing, be sure to inspect full log for complete stack trace.
+echo "adb $serialFlag logcat -d | grep VulkanLayerValidationTests"
+echo ========================================================
+echo
+adb $serialFlag logcat -d | grep VulkanLayerValidationTests
+
+exit $exitCode
diff --git a/src/third_party/vulkan-tools/src/build-android/update_external_sources_android.bat b/src/third_party/vulkan-tools/src/build-android/update_external_sources_android.bat
new file mode 100755
index 0000000..807021a
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/build-android/update_external_sources_android.bat
@@ -0,0 +1,335 @@
+@echo off

+REM Update source for glslang, spirv-tools, shaderc, vulkan-headers

+

+REM

+REM Copyright 2016 The Android Open Source Project

+REM Copyright (C) 2015 Valve Corporation

+REM

+REM Licensed under the Apache License, Version 2.0 (the "License");

+REM you may not use this file except in compliance with the License.

+REM You may obtain a copy of the License at

+REM

+REM      http://www.apache.org/licenses/LICENSE-2.0

+REM

+REM Unless required by applicable law or agreed to in writing, software

+REM distributed under the License is distributed on an "AS IS" BASIS,

+REM WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

+REM See the License for the specific language governing permissions and

+REM limitations under the License.

+REM

+

+setlocal EnableDelayedExpansion

+set errorCode=0

+set ANDROID_BUILD_DIR=%~dp0

+set BUILD_DIR=%ANDROID_BUILD_DIR%

+set BASE_DIR=%BUILD_DIR%\third_party

+set GLSLANG_DIR=%BASE_DIR%\shaderc\third_party\glslang

+set SPIRV_TOOLS_DIR=%BASE_DIR%\shaderc\third_party\spirv-tools

+set SPIRV_HEADERS_DIR=%BASE_DIR%\shaderc\third_party\spirv-tools\external\spirv-headers

+set SHADERC_DIR=%BASE_DIR%\shaderc

+set VULKAN_HEADERS_DIR=%BASE_DIR%\Vulkan-Headers

+

+for %%X in (where.exe) do (set FOUND=%%~$PATH:X)

+if not defined FOUND (

+   echo Dependency check failed:

+   echo   where.exe not found

+   echo   This script requires Windows Vista or later, which includes where.exe.

+   set errorCode=1

+)

+

+where /q git.exe

+if %ERRORLEVEL% equ 1 (

+   echo Dependency check failed:

+   echo   git.exe not found

+   echo   Git for Windows can be downloaded here:  https://git-scm.com/download/win

+   echo   Install and ensure git.exe makes it into your PATH

+   set errorCode=1

+)

+

+where /q ndk-build.cmd

+if %ERRORLEVEL% equ 1 (

+   echo Dependency check failed:

+   echo   ndk-build.cmd not found

+   echo   Android NDK can be downloaded here:  http://developer.android.com/ndk/guides/setup.html

+   echo   Install and ensure ndk-build.cmd makes it into your PATH

+   set errorCode=1

+)

+

+REM ensure where is working with below false test

+REM where /q foo

+REM if %ERRORLEVEL% equ 1 (

+REM echo foo

+REM )

+

+:main

+

+if %errorCode% neq 0 (goto:error)

+

+REM Read the target versions from external file, which is shared with Linux script

+

+if not exist %ANDROID_BUILD_DIR%\glslang_revision_android (

+   echo.

+   echo Missing glslang_revision_android file. Place it in %ANDROID_BUILD_DIR%

+   goto:error

+)

+

+if not exist %ANDROID_BUILD_DIR%\spirv-tools_revision_android (

+   echo.

+   echo Missing spirv-tools_revision_android file. Place it in %ANDROID_BUILD_DIR%

+   set errorCode=1

+   goto:error

+)

+

+if not exist %ANDROID_BUILD_DIR%\spirv-headers_revision_android (

+   echo.

+   echo Missing spirv-headers_revision_android file. Place it in %ANDROID_BUILD_DIR%

+   set errorCode=1

+   goto:error

+)

+

+if not exist %ANDROID_BUILD_DIR%\shaderc_revision_android (

+   echo.

+   echo Missing shaderc_revision_android file. Place it in %ANDROID_BUILD_DIR%

+   set errorCode=1

+   goto:error

+)

+

+if not exist %ANDROID_BUILD_DIR%\vulkan-headers_revision_android (

+   echo.

+   echo Missing vulkan-headers_revision_android file. Place it in %ANDROID_BUILD_DIR%

+   set errorCode=1

+   goto:error

+)

+

+set /p GLSLANG_REVISION= < glslang_revision_android

+set /p SPIRV_TOOLS_REVISION= < spirv-tools_revision_android

+set /p SPIRV_HEADERS_REVISION= < spirv-headers_revision_android

+set /p SHADERC_REVISION= < shaderc_revision_android

+set /p VULKAN_HEADERS_REVISION= < vulkan-headers_revision_android

+echo GLSLANG_REVISION=%GLSLANG_REVISION%

+echo SPIRV_TOOLS_REVISION=%SPIRV_TOOLS_REVISION%

+echo SPIRV_HEADERS_REVISION=%SPIRV_HEADERS_REVISION%

+echo SHADERC_REVISION=%SHADERC_REVISION%

+echo VULKAN_HEADERS_REVISION=%VULKAN_HEADERS_REVISION%

+

+

+echo Creating and/or updating glslang, spirv-tools, spirv-headers, shaderc, vulkan-headers in %BASE_DIR%

+

+set sync-glslang=0

+set sync-spirv-tools=0

+set sync-spirv-headers=0

+set sync-shaderc=0

+set sync-vulkan-headers=1

+set build-shaderc=0

+

+if %sync-shaderc% equ 1 (

+   if not exist %SHADERC_DIR% (

+      call:create_shaderc

+   )

+   if %errorCode% neq 0 (goto:error)

+   call:update_shaderc

+   if %errorCode% neq 0 (goto:error)

+)

+

+if %sync-glslang% equ 1 (

+   if not exist %GLSLANG_DIR% (

+      call:create_glslang

+   )

+   if %errorCode% neq 0 (goto:error)

+   call:update_glslang

+   if %errorCode% neq 0 (goto:error)

+)

+

+if %sync-spirv-tools% equ 1 (

+   if %ERRORLEVEL% neq 0 (goto:error)

+   if not exist %SPIRV_TOOLS_DIR% (

+      call:create_spirv-tools

+   )

+   if %errorCode% neq 0 (goto:error)

+   call:update_spirv-tools

+   if %errorCode% neq 0 (goto:error)

+)

+

+if %sync-spirv-headers% equ 1 (

+   if %ERRORLEVEL% neq 0 (goto:error)

+   if not exist %SPIRV_HEADERS_DIR% (

+      call:create_spirv-headers

+   )

+   if %errorCode% neq 0 (goto:error)

+   call:update_spirv-headers

+   if %errorCode% neq 0 (goto:error)

+)

+

+if %sync-vulkan-headers% equ 1 (

+   if %ERRORLEVEL% neq 0 (goto:error)

+   if not exist %VULKAN_HEADERS_DIR% (

+      call:create_vulkan-headers

+   )

+   if %errorCode% neq 0 (goto:error)

+   call:update_vulkan-headers

+   if %errorCode% neq 0 (goto:error)

+)

+

+if %build-shaderc% equ 1 (

+   call:build_shaderc

+   if %errorCode% neq 0 (goto:error)

+)

+

+echo.

+echo Exiting

+goto:finish

+

+:error

+echo.

+echo Halting due to error

+goto:finish

+

+:finish

+if not "%cd%\" == "%BUILD_DIR%" ( cd %BUILD_DIR% )

+endlocal

+REM This needs a fix to return error, something like exit %errorCode%

+REM Right now it is returning 0

+goto:eof

+

+

+

+REM // ======== Functions ======== //

+

+:create_glslang

+   echo.

+   echo Creating local glslang repository %GLSLANG_DIR%

+   if not exist "%GLSLANG_DIR%\" mkdir %GLSLANG_DIR%

+   cd %GLSLANG_DIR%

+   git clone https://github.com/KhronosGroup/glslang.git .

+   git checkout %GLSLANG_REVISION%

+   if not exist %GLSLANG_DIR%\SPIRV (

+      echo glslang source download failed!

+      set errorCode=1

+   )

+goto:eof

+

+:update_glslang

+   echo.

+   echo Updating %GLSLANG_DIR%

+   cd %GLSLANG_DIR%

+   git fetch --all

+   git checkout %GLSLANG_REVISION%

+   if not exist %GLSLANG_DIR%\SPIRV (

+      echo glslang source update failed!

+      set errorCode=1

+   )

+goto:eof

+

+:create_spirv-tools

+   echo.

+   echo Creating local spirv-tools repository %SPIRV_TOOLS_DIR%

+   if not exist "%SPIRV_TOOLS_DIR%\" mkdir %SPIRV_TOOLS_DIR%

+   cd %SPIRV_TOOLS_DIR%

+   git clone https://github.com/KhronosGroup/SPIRV-Tools.git .

+   git checkout %SPIRV_TOOLS_REVISION%

+   if not exist %SPIRV_TOOLS_DIR%\source (

+      echo spirv-tools source download failed!

+      set errorCode=1

+   )

+goto:eof

+

+:update_spirv-tools

+   echo.

+   echo Updating %SPIRV_TOOLS_DIR%

+   cd %SPIRV_TOOLS_DIR%

+   git fetch --all

+   git checkout %SPIRV_TOOLS_REVISION%

+   if not exist %SPIRV_TOOLS_DIR%\source (

+      echo spirv-tools source update failed!

+      set errorCode=1

+   )

+goto:eof

+

+:create_spirv-headers

+   echo.

+   echo Creating local spirv-headers repository %SPIRV_HEADERS_DIR%

+   if not exist "%SPIRV_HEADERS_DIR%\" mkdir %SPIRV_HEADERS_DIR%

+   cd %SPIRV_HEADERS_DIR%

+   git clone https://github.com/KhronosGroup/SPIRV-Headers.git .

+   git checkout %SPIRV_HEADERS_REVISION%

+   if not exist %SPIRV_HEADERS_DIR%\include (

+      echo spirv-headers source download failed!

+      set errorCode=1

+   )

+goto:eof

+

+:update_spirv-headers

+   echo.

+   echo Updating %SPIRV_HEADERS_DIR%

+   cd %SPIRV_HEADERS_DIR%

+   git fetch --all

+   git checkout %SPIRV_HEADERS_REVISION%

+   if not exist %SPIRV_HEADERS_DIR%\include (

+      echo spirv-headers source update failed!

+      set errorCode=1

+   )

+goto:eof

+

+:create_shaderc

+   echo.

+   echo Creating local shaderc repository %SHADERC_DIR%

+   if not exist "%SHADERC_DIR%\" mkdir %SHADERC_DIR%

+   cd %SHADERC_DIR%

+   git clone https://github.com/google/shaderc.git .

+   git checkout %SHADERC_REVISION%

+   if not exist %SHADERC_DIR%\libshaderc (

+      echo shaderc source download failed!

+      set errorCode=1

+   )

+goto:eof

+

+:update_shaderc

+   echo.

+   echo Updating %SHADERC_DIR%

+   cd %SHADERC_DIR%

+   git fetch --all

+   git checkout %SHADERC_REVISION%

+   if not exist %SHADERC_DIR%\libshaderc (

+      echo shaderc source update failed!

+      set errorCode=1

+   )

+goto:eof

+

+:build_shaderc

+   echo.

+   echo Building %SHADERC_DIR%

+   cd %SHADERC_DIR%\android_test

+   echo Building shaderc with Android NDK

+   call ndk-build NDK_APPLICATION_MK=../../../jni/shaderc/Application.mk THIRD_PARTY_PATH=../third_party -j 4

+   REM Check for existence of one lib, even though we should check for all results

+   if not exist %SHADERC_DIR%\android_test\obj\local\x86\libshaderc.a (

+      echo.

+      echo shaderc build failed!

+      set errorCode=1

+   )

+goto:eof

+

+:create_vulkan-headers

+   echo.

+   echo Creating local vulkan-headers repository %VULKAN_HEADERS_DIR%

+   if not exist "%VULKAN_HEADERS_DIR%\" mkdir %VULKAN_HEADERS_DIR%

+   cd %VULKAN_HEADERS_DIR%

+   git clone https://github.com/KhronosGroup/Vulkan-Headers.git .

+   git checkout %VULKAN_HEADERS_REVISION%

+   if not exist %VULKAN_HEADERS_DIR%\registry (

+      echo vulkan-headers source download failed!

+      set errorCode=1

+   )

+goto:eof

+

+:update_vulkan-headers

+   echo.

+   echo Updating %VULKAN_HEADERS_DIR%

+   cd %VULKAN_HEADERS_DIR%

+   git fetch --all

+   git checkout %VULKAN_HEADERS_REVISION%

+   if not exist %VULKAN_HEADERS_DIR%\registry (

+      echo vulkan-headers source update failed!

+      set errorCode=1

+   )

+goto:eof

diff --git a/src/third_party/vulkan-tools/src/build-android/update_external_sources_android.sh b/src/third_party/vulkan-tools/src/build-android/update_external_sources_android.sh
new file mode 100755
index 0000000..dc20c10
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/build-android/update_external_sources_android.sh
@@ -0,0 +1,264 @@
+#!/bin/bash
+# Update source for glslang, spirv-tools, shaderc, vulkan-headers
+
+# Copyright 2016 The Android Open Source Project
+# Copyright (C) 2015 Valve Corporation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+set -e
+
+sync_glslang=0
+sync_spirv_tools=0
+sync_spirv_headers=0
+sync_shaderc=0
+sync_vulkan_headers=1
+
+ANDROIDBUILDDIR=$PWD
+BUILDDIR=$ANDROIDBUILDDIR
+BASEDIR=$BUILDDIR/third_party
+
+GLSLANG_REVISION=$(cat $ANDROIDBUILDDIR/glslang_revision_android)
+SPIRV_TOOLS_REVISION=$(cat $ANDROIDBUILDDIR/spirv-tools_revision_android)
+SPIRV_HEADERS_REVISION=$(cat $ANDROIDBUILDDIR/spirv-headers_revision_android)
+SHADERC_REVISION=$(cat $ANDROIDBUILDDIR/shaderc_revision_android)
+VULKAN_HEADERS_REVISION=$(cat $ANDROIDBUILDDIR/vulkan-headers_revision_android)
+
+echo "GLSLANG_REVISION=$GLSLANG_REVISION"
+echo "SPIRV_TOOLS_REVISION=$SPIRV_TOOLS_REVISION"
+echo "SPIRV_HEADERS_REVISION=$SPIRV_HEADERS_REVISION"
+echo "SHADERC_REVISION=$SHADERC_REVISION"
+echo "VULKAN_HEADERS_REVISION=$VULKAN_HEADERS_REVISION"
+
+GLSLANG_URL=$(cat $ANDROIDBUILDDIR/glslang_url_android)
+SPIRV_TOOLS_URL=$(cat $ANDROIDBUILDDIR/spirv-tools_url_android)
+SPIRV_HEADERS_URL=$(cat $ANDROIDBUILDDIR/spirv-headers_url_android)
+SHADERC_URL=$(cat $ANDROIDBUILDDIR/shaderc_url_android)
+VULKAN_HEADERS_URL=$(cat $ANDROIDBUILDDIR/vulkan-headers_url_android)
+
+echo "GLSLANG_URL=$GLSLANG_URL"
+echo "SPIRV_TOOL_URLS_=$SPIRV_TOOLS_URL"
+echo "SPIRV_HEADERS_URL=$SPIRV_HEADERS_URL"
+echo "SHADERC_URL=$SHADERC_URL"
+echo "VULKAN_HEADERS_URL=$VULKAN_HEADERS_URL"
+
+if [[ $(uname) == "Linux" ]]; then
+    cores="$(nproc || echo 4)"
+elif [[ $(uname) == "Darwin" ]]; then
+    cores=$(sysctl -n hw.ncpu)
+fi
+
+#
+# Parse parameters
+#
+
+function printUsage {
+   echo "Supported parameters are:"
+   echo "    --abi <abi> (optional)"
+   echo "    --no-build (optional)"
+   echo
+   echo "i.e. ${0##*/} --abi arm64-v8a \\"
+   exit 1
+}
+
+while [[ $# -gt 0 ]]
+do
+    case $1 in
+        --abi)
+            abi="$2"
+            shift 2
+            ;;
+        --no-build)
+            nobuild=1
+            shift 1
+            ;;
+        *)
+            # unknown option
+            echo Unknown option: $1
+            echo
+            printUsage
+            exit 1
+            ;;
+    esac
+done
+
+echo abi=$abi
+if [[ -z $abi ]]
+then
+    echo No abi provided, so building for all supported abis.
+fi
+
+echo no-build=$nobuild
+if [[ $nobuild ]]
+then
+    echo Skipping build.
+fi
+
+function create_glslang () {
+   rm -rf $BASEDIR/shaderc/third_party/glslang
+   echo "Creating local glslang repository ($BASEDIR/glslang)."
+   mkdir -p $BASEDIR/shaderc/third_party/glslang
+   cd $BASEDIR/shaderc/third_party/glslang
+   git clone $GLSLANG_URL .
+   git checkout $GLSLANG_REVISION
+}
+
+function update_glslang () {
+   echo "Updating $BASEDIR/shaderc/third_party/glslang"
+   cd $BASEDIR/shaderc/third_party/glslang
+   if [[ $(git config --get remote.origin.url) != $GLSLANG_URL ]]; then
+      echo "glslang URL mismatch, recreating local repo"
+      create_glslang
+      return
+   fi
+   git fetch --all
+   git checkout $GLSLANG_REVISION
+}
+
+function create_spirv-tools () {
+   rm -rf $BASEDIR/shaderc/third_party/spirv-tools
+   echo "Creating local spirv-tools repository ($BASEDIR/shaderc/third_party/spirv-tools)."
+   mkdir -p $BASEDIR/shaderc/third_party/spirv-tools
+   cd $BASEDIR/shaderc/third_party/spirv-tools
+   git clone $SPIRV_TOOLS_URL .
+   git checkout $SPIRV_TOOLS_REVISION
+}
+
+function update_spirv-tools () {
+   echo "Updating $BASEDIR/shaderc/third_party/spirv-tools"
+   cd $BASEDIR/shaderc/third_party/spirv-tools
+   if [[ $(git config --get remote.origin.url) != $SPIRV_TOOLS_URL ]]; then
+      echo "spirv-tools URL mismatch, recreating local repo"
+      create_spirv-tools
+      return
+   fi
+   git fetch --all
+   git checkout $SPIRV_TOOLS_REVISION
+}
+
+function create_spirv-headers () {
+   rm -rf $BASEDIR/shaderc/third_party/spirv-tools/external/spirv-headers
+   echo "Creating local spirv-headers repository ($BASEDIR/shaderc/third_party/spirv-tools/external/spirv-headers)."
+   mkdir -p $BASEDIR/shaderc/third_party/spirv-tools/external/spirv-headers
+   cd $BASEDIR/shaderc/third_party/spirv-tools/external/spirv-headers
+   git clone $SPIRV_HEADERS_URL .
+   git checkout $SPIRV_HEADERS_REVISION
+}
+
+function update_spirv-headers () {
+   echo "Updating $BASEDIR/shaderc/third_party/spirv-tools/external/spirv-headers"
+   cd $BASEDIR/shaderc/third_party/spirv-tools/external/spirv-headers
+   if [[ $(git config --get remote.origin.url) != $SPIRV_HEADERS_URL ]]; then
+      echo "spirv-headers URL mismatch, recreating local repo"
+      create_spirv-headers
+      return
+   fi
+   git fetch --all
+   git checkout $SPIRV_HEADERS_REVISION
+}
+
+function create_shaderc () {
+   rm -rf $BASEDIR/shaderc
+   echo "Creating local shaderc repository ($BASEDIR/shaderc)."
+   mkdir -p $BASEDIR/shaderc
+   cd $BASEDIR/shaderc
+   git clone $SHADERC_URL .
+   git checkout $SHADERC_REVISION
+}
+
+function update_shaderc () {
+   echo "Updating $BASEDIR/shaderc"
+   cd $BASEDIR/shaderc
+   if [[ $(git config --get remote.origin.url) != $SHADERC_URL ]]; then
+      echo "shaderc URL mismatch, recreating local repo"
+      create_shaderc
+      return
+   fi
+   git fetch --all
+   git checkout $SHADERC_REVISION
+}
+
+function build_shaderc () {
+   echo "Building $BASEDIR/shaderc"
+   cd $BASEDIR/shaderc/android_test
+   if [[ $abi ]]; then
+      ndk-build NDK_APPLICATION_MK=../../../jni/shaderc/Application.mk THIRD_PARTY_PATH=../third_party APP_ABI=$abi -j $cores;
+   else
+      ndk-build NDK_APPLICATION_MK=../../../jni/shaderc/Application.mk THIRD_PARTY_PATH=../third_party -j $cores;
+   fi
+}
+
+function create_vulkan-headers () {
+   rm -rf $BASEDIR/Vulkan-Headers
+   echo "Creating local Vulkan-Headers repository ($BASEDIR/Vulkan-Headers)."
+   mkdir -p $BASEDIR/Vulkan-Headers
+   cd $BASEDIR/Vulkan-Headers
+   git clone $VULKAN_HEADERS_URL .
+   git checkout $VULKAN_HEADERS_REVISION
+}
+
+function update_vulkan-headers () {
+   echo "Updating $BASEDIR/Vulkan-Headers"
+   cd $BASEDIR/Vulkan-Headers
+   if [[ $(git config --get remote.origin.url) != $VULKAN_HEADERS_URL ]]; then
+      echo "Vulkan-Headers URL mismatch, recreating local repo"
+      create_vulkan-headers
+      return
+   fi
+   git fetch --all
+   git checkout $VULKAN_HEADERS_REVISION
+}
+
+if [ $sync_shaderc -eq 1 ]; then
+    if [ ! -d "$BASEDIR/shaderc" -o ! -d "$BASEDIR/shaderc/.git" ]; then
+        create_shaderc
+    fi
+    update_shaderc
+fi
+
+if [ $sync_glslang -eq 1 ]; then
+    if [ ! -d "$BASEDIR/shaderc/third_party/glslang" -o ! -d "$BASEDIR/shaderc/third_party/glslang/.git" -o -d "$BASEDIR/shaderc/third_party/glslang/.svn" ]; then
+    create_glslang
+    fi
+    update_glslang
+fi
+
+if [ $sync_spirv_tools -eq 1 ]; then
+    if [ ! -d "$BASEDIR/shaderc/third_party/spirv-tools" -o ! -d "$BASEDIR/shaderc/third_party/spirv-tools/.git" ]; then
+    create_spirv-tools
+    fi
+    update_spirv-tools
+fi
+
+if [ $sync_spirv_headers -eq 1 ]; then
+    if [ ! -d "$BASEDIR/shaderc/third_party/spirv-tools/external/spirv-headers" -o ! -d "$BASEDIR/shaderc/third_party/spirv-tools/external/spirv-headers/.git" ]; then
+    create_spirv-headers
+    fi
+    update_spirv-headers
+fi
+
+if [ $sync_vulkan_headers -eq 1 ]; then
+    if [ ! -d "$BASEDIR/Vulkan-Headers" -o ! -d "$BASEDIR/Vulkan-Headers/.git" ]; then
+    create_vulkan-headers
+    fi
+    update_vulkan-headers
+fi
+
+if [[ -z $nobuild ]]
+then
+build_shaderc
+fi
+
+echo ""
+echo "${0##*/} finished."
+
diff --git a/src/third_party/vulkan-tools/src/build-android/vulkan-headers_revision_android b/src/third_party/vulkan-tools/src/build-android/vulkan-headers_revision_android
new file mode 100644
index 0000000..23789c0
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/build-android/vulkan-headers_revision_android
@@ -0,0 +1 @@
+v1.1.130
diff --git a/src/third_party/vulkan-tools/src/build-android/vulkan-headers_url_android b/src/third_party/vulkan-tools/src/build-android/vulkan-headers_url_android
new file mode 100644
index 0000000..9ce9851
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/build-android/vulkan-headers_url_android
@@ -0,0 +1 @@
+https://github.com/KhronosGroup/Vulkan-Headers.git
diff --git a/src/third_party/vulkan-tools/src/build-gn/generate_vulkan_layers_json.py b/src/third_party/vulkan-tools/src/build-gn/generate_vulkan_layers_json.py
new file mode 100755
index 0000000..2999cd8
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/build-gn/generate_vulkan_layers_json.py
@@ -0,0 +1,126 @@
+#!/usr/bin/env python
+
+# Copyright (C) 2016 The ANGLE Project Authors.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Generate copies of the Vulkan layers JSON files, with no paths, forcing
+Vulkan to use the default search path to look for layers."""
+
+from __future__ import print_function
+
+import argparse
+import glob
+import json
+import os
+import platform
+import sys
+
+
+def glob_slash(dirname):
+    """Like regular glob but replaces \ with / in returned paths."""
+    return [s.replace('\\', '/') for s in glob.glob(dirname)]
+
+
+def main():
+    parser = argparse.ArgumentParser(description=__doc__)
+    parser.add_argument('--icd', action='store_true')
+    parser.add_argument('source_dir')
+    parser.add_argument('target_dir')
+    parser.add_argument('version_header', help='path to vulkan_core.h')
+    parser.add_argument('json_files', nargs='*')
+    args = parser.parse_args()
+
+    source_dir = args.source_dir
+    target_dir = args.target_dir
+
+    json_files = [j for j in args.json_files if j.endswith('.json')]
+    json_in_files = [j for j in args.json_files if j.endswith('.json.in')]
+
+    data_key = 'ICD' if args.icd else 'layer'
+
+    if not os.path.isdir(source_dir):
+        print(source_dir + ' is not a directory.', file=sys.stderr)
+        return 1
+
+    if not os.path.exists(target_dir):
+        os.makedirs(target_dir)
+
+    # Copy the *.json files from source dir to target dir
+    if (set(glob_slash(os.path.join(source_dir, '*.json'))) != set(json_files)):
+        print(glob.glob(os.path.join(source_dir, '*.json')))
+        print('.json list in gn file is out-of-date', file=sys.stderr)
+        return 1
+
+    for json_fname in json_files:
+        if not json_fname.endswith('.json'):
+            continue
+        with open(json_fname) as infile:
+            data = json.load(infile)
+
+        # Update the path.
+        if not data_key in data:
+            raise Exception(
+                "Could not find '%s' key in %s" % (data_key, json_fname))
+
+        # The standard validation layer has no library path.
+        if 'library_path' in data[data_key]:
+            prev_name = os.path.basename(data[data_key]['library_path'])
+            data[data_key]['library_path'] = prev_name
+
+        target_fname = os.path.join(target_dir, os.path.basename(json_fname))
+        with open(target_fname, 'wb') as outfile:
+            json.dump(data, outfile)
+
+    # Get the Vulkan version from the vulkan_core.h file
+    vk_header_filename = args.version_header
+    vk_version = None
+    with open(vk_header_filename) as vk_header_file:
+        for line in vk_header_file:
+            if line.startswith('#define VK_HEADER_VERSION'):
+                vk_version = line.split()[-1]
+                break
+    if not vk_version:
+        print('failed to extract vk_version', file=sys.stderr)
+        return 1
+
+    # Set json file prefix and suffix for generating files, default to Linux.
+    relative_path_prefix = '../lib'
+    file_type_suffix = '.so'
+    if platform.system() == 'Windows':
+        relative_path_prefix = r'..\\'  # json-escaped, hence two backslashes.
+        file_type_suffix = '.dll'
+
+    # For each *.json.in template files in source dir generate actual json file
+    # in target dir
+    if (set(glob_slash(os.path.join(source_dir, '*.json.in'))) !=
+            set(json_in_files)):
+        print('.json.in list in gn file is out-of-date', file=sys.stderr)
+        return 1
+    for json_in_name in json_in_files:
+        if not json_in_name.endswith('.json.in'):
+            continue
+        json_in_fname = os.path.basename(json_in_name)
+        layer_name = json_in_fname[:-len('.json.in')]
+        layer_lib_name = layer_name + file_type_suffix
+        json_out_fname = os.path.join(target_dir, json_in_fname[:-len('.in')])
+        with open(json_out_fname,'w') as json_out_file, \
+             open(json_in_name) as infile:
+            for line in infile:
+                line = line.replace('@RELATIVE_LAYER_BINARY@',
+                                    relative_path_prefix + layer_lib_name)
+                line = line.replace('@VK_VERSION@', '1.1.' + vk_version)
+                json_out_file.write(line)
+
+if __name__ == '__main__':
+    sys.exit(main())
diff --git a/src/third_party/vulkan-tools/src/build-gn/secondary/build_overrides/build.gni b/src/third_party/vulkan-tools/src/build-gn/secondary/build_overrides/build.gni
new file mode 100644
index 0000000..c6c11fa
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/build-gn/secondary/build_overrides/build.gni
@@ -0,0 +1,18 @@
+# Copyright (c) 2019 LunarG, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+build_with_chromium = false
+ignore_elf32_limitations = true
+linux_use_bundled_binutils_override = false
+use_system_xcode = true
diff --git a/src/third_party/vulkan-tools/src/build-gn/secondary/build_overrides/vulkan_tools.gni b/src/third_party/vulkan-tools/src/build-gn/secondary/build_overrides/vulkan_tools.gni
new file mode 100644
index 0000000..9f80846
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/build-gn/secondary/build_overrides/vulkan_tools.gni
@@ -0,0 +1,21 @@
+# Copyright (c) 2019 LunarG, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Paths to vulkan tools dependencies
+vulkan_headers_dir = "//external/Vulkan-Headers"
+
+# Subdirectories for generated files
+vulkan_data_subdir = ""
+vulkan_gen_subdir = ""
+
diff --git a/src/third_party/vulkan-tools/src/build-gn/update_deps.sh b/src/third_party/vulkan-tools/src/build-gn/update_deps.sh
new file mode 100755
index 0000000..41da2ab
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/build-gn/update_deps.sh
@@ -0,0 +1,37 @@
+#!/bin/sh
+
+# Copyright (c) 2019 LunarG, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Execute at repo root
+cd "$(dirname $0)/.."
+
+# Use update_deps.py to update source dependencies from /scripts/known_good.json
+scripts/update_deps.py --dir="external" --no-build
+
+# Use gclient to update toolchain dependencies from /build-gn/DEPS (from chromium)
+cat << EOF >> .gclient
+solutions = [
+  { "name"        : ".",
+    "url"         : "https://github.com/KhronosGroup/Vulkan-Tools",
+    "deps_file"   : "build-gn/DEPS",
+    "managed"     : False,
+    "custom_deps" : {
+    },
+    "custom_vars": {},
+  },
+]
+EOF
+gclient sync
+
diff --git a/src/third_party/vulkan-tools/src/cmake/Copyright_cmake.txt b/src/third_party/vulkan-tools/src/cmake/Copyright_cmake.txt
new file mode 100644
index 0000000..743c634
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/cmake/Copyright_cmake.txt
@@ -0,0 +1,126 @@
+CMake - Cross Platform Makefile Generator
+Copyright 2000-2018 Kitware, Inc. and Contributors
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions
+are met:
+
+* Redistributions of source code must retain the above copyright
+  notice, this list of conditions and the following disclaimer.
+
+* Redistributions in binary form must reproduce the above copyright
+  notice, this list of conditions and the following disclaimer in the
+  documentation and/or other materials provided with the distribution.
+
+* Neither the name of Kitware, Inc. nor the names of Contributors
+  may be used to endorse or promote products derived from this
+  software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+------------------------------------------------------------------------------
+
+The following individuals and institutions are among the Contributors:
+
+* Aaron C. Meadows <cmake@shadowguarddev.com>
+* Adriaan de Groot <groot@kde.org>
+* Aleksey Avdeev <solo@altlinux.ru>
+* Alexander Neundorf <neundorf@kde.org>
+* Alexander Smorkalov <alexander.smorkalov@itseez.com>
+* Alexey Sokolov <sokolov@google.com>
+* Alex Turbov <i.zaufi@gmail.com>
+* Andreas Pakulat <apaku@gmx.de>
+* Andreas Schneider <asn@cryptomilk.org>
+* André Rigland Brodtkorb <Andre.Brodtkorb@ifi.uio.no>
+* Axel Huebl, Helmholtz-Zentrum Dresden - Rossendorf
+* Benjamin Eikel
+* Bjoern Ricks <bjoern.ricks@gmail.com>
+* Brad Hards <bradh@kde.org>
+* Christopher Harvey
+* Christoph Grüninger <foss@grueninger.de>
+* Clement Creusot <creusot@cs.york.ac.uk>
+* Daniel Blezek <blezek@gmail.com>
+* Daniel Pfeifer <daniel@pfeifer-mail.de>
+* Enrico Scholz <enrico.scholz@informatik.tu-chemnitz.de>
+* Eran Ifrah <eran.ifrah@gmail.com>
+* Esben Mose Hansen, Ange Optimization ApS
+* Geoffrey Viola <geoffrey.viola@asirobots.com>
+* Google Inc
+* Gregor Jasny
+* Helio Chissini de Castro <helio@kde.org>
+* Ilya Lavrenov <ilya.lavrenov@itseez.com>
+* Insight Software Consortium <insightsoftwareconsortium.org>
+* Jan Woetzel
+* Kelly Thompson <kgt@lanl.gov>
+* Konstantin Podsvirov <konstantin@podsvirov.pro>
+* Mario Bensi <mbensi@ipsquad.net>
+* Mathieu Malaterre <mathieu.malaterre@gmail.com>
+* Matthaeus G. Chajdas
+* Matthias Kretz <kretz@kde.org>
+* Matthias Maennich <matthias@maennich.net>
+* Michael Stürmer
+* Miguel A. Figueroa-Villanueva
+* Mike Jackson
+* Mike McQuaid <mike@mikemcquaid.com>
+* Nicolas Bock <nicolasbock@gmail.com>
+* Nicolas Despres <nicolas.despres@gmail.com>
+* Nikita Krupen'ko <krnekit@gmail.com>
+* NVIDIA Corporation <www.nvidia.com>
+* OpenGamma Ltd. <opengamma.com>
+* Patrick Stotko <stotko@cs.uni-bonn.de>
+* Per Øyvind Karlsen <peroyvind@mandriva.org>
+* Peter Collingbourne <peter@pcc.me.uk>
+* Petr Gotthard <gotthard@honeywell.com>
+* Philip Lowman <philip@yhbt.com>
+* Philippe Proulx <pproulx@efficios.com>
+* Raffi Enficiaud, Max Planck Society
+* Raumfeld <raumfeld.com>
+* Roger Leigh <rleigh@codelibre.net>
+* Rolf Eike Beer <eike@sf-mail.de>
+* Roman Donchenko <roman.donchenko@itseez.com>
+* Roman Kharitonov <roman.kharitonov@itseez.com>
+* Ruslan Baratov
+* Sebastian Holtermann <sebholt@xwmw.org>
+* Stephen Kelly <steveire@gmail.com>
+* Sylvain Joubert <joubert.sy@gmail.com>
+* Thomas Sondergaard <ts@medical-insight.com>
+* Tobias Hunger <tobias.hunger@qt.io>
+* Todd Gamblin <tgamblin@llnl.gov>
+* Tristan Carel
+* University of Dundee
+* Vadim Zhukov
+* Will Dicharry <wdicharry@stellarscience.com>
+
+See version control history for details of individual contributions.
+
+The above copyright and license notice applies to distributions of
+CMake in source and binary form.  Third-party software packages supplied
+with CMake under compatible licenses provide their own copyright notices
+documented in corresponding subdirectories or source files.
+
+------------------------------------------------------------------------------
+
+CMake was initially developed by Kitware with the following sponsorship:
+
+ * National Library of Medicine at the National Institutes of Health
+   as part of the Insight Segmentation and Registration Toolkit (ITK).
+
+ * US National Labs (Los Alamos, Livermore, Sandia) ASC Parallel
+   Visualization Initiative.
+
+ * National Alliance for Medical Image Computing (NAMIC) is funded by the
+   National Institutes of Health through the NIH Roadmap for Medical Research,
+   Grant U54 EB005149.
+
+ * Kitware, Inc.
diff --git a/src/third_party/vulkan-tools/src/cmake/FindVulkan.cmake b/src/third_party/vulkan-tools/src/cmake/FindVulkan.cmake
new file mode 100644
index 0000000..1f4c8ad
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/cmake/FindVulkan.cmake
@@ -0,0 +1,80 @@
+# Distributed under the OSI-approved BSD 3-Clause License.  See accompanying
+# file Copyright.txt or https://cmake.org/licensing for details.
+
+#.rst:
+# FindVulkan
+# ----------
+#
+# Try to find Vulkan
+#
+# IMPORTED Targets
+# ^^^^^^^^^^^^^^^^
+#
+# This module defines :prop_tgt:`IMPORTED` target ``Vulkan::Vulkan``, if
+# Vulkan has been found.
+#
+# Result Variables
+# ^^^^^^^^^^^^^^^^
+#
+# This module defines the following variables::
+#
+#   Vulkan_FOUND          - True if Vulkan was found
+#   Vulkan_INCLUDE_DIRS   - include directories for Vulkan
+#   Vulkan_LIBRARIES      - link against this library to use Vulkan
+#
+# The module will also define two cache variables::
+#
+#   Vulkan_INCLUDE_DIR    - the Vulkan include directory
+#   Vulkan_LIBRARY        - the path to the Vulkan library
+#
+
+if(WIN32)
+  find_path(Vulkan_INCLUDE_DIR
+    NAMES vulkan/vulkan.h
+    PATHS
+      "$ENV{VULKAN_SDK}/Include"
+    )
+
+  if(CMAKE_SIZEOF_VOID_P EQUAL 8)
+    find_library(Vulkan_LIBRARY
+      NAMES vulkan-1
+      PATHS
+        "$ENV{VULKAN_SDK}/Lib"
+        "$ENV{VULKAN_SDK}/Bin"
+        )
+  elseif(CMAKE_SIZEOF_VOID_P EQUAL 4)
+    find_library(Vulkan_LIBRARY
+      NAMES vulkan-1
+      PATHS
+        "$ENV{VULKAN_SDK}/Lib32"
+        "$ENV{VULKAN_SDK}/Bin32"
+        NO_SYSTEM_ENVIRONMENT_PATH
+        )
+  endif()
+else()
+    find_path(Vulkan_INCLUDE_DIR
+      NAMES vulkan/vulkan.h
+      PATHS
+        "$ENV{VULKAN_SDK}/include")
+    find_library(Vulkan_LIBRARY
+      NAMES vulkan
+      PATHS
+        "$ENV{VULKAN_SDK}/lib")
+endif()
+
+set(Vulkan_LIBRARIES ${Vulkan_LIBRARY})
+set(Vulkan_INCLUDE_DIRS ${Vulkan_INCLUDE_DIR})
+
+include(FindPackageHandleStandardArgs)
+find_package_handle_standard_args(Vulkan
+  DEFAULT_MSG
+  Vulkan_LIBRARY Vulkan_INCLUDE_DIR)
+
+mark_as_advanced(Vulkan_INCLUDE_DIR Vulkan_LIBRARY)
+
+if(Vulkan_FOUND AND NOT TARGET Vulkan::Vulkan)
+  add_library(Vulkan::Vulkan UNKNOWN IMPORTED)
+  set_target_properties(Vulkan::Vulkan PROPERTIES
+    IMPORTED_LOCATION "${Vulkan_LIBRARIES}"
+    INTERFACE_INCLUDE_DIRECTORIES "${Vulkan_INCLUDE_DIRS}")
+endif()
diff --git a/src/third_party/vulkan-tools/src/cmake/FindVulkanHeaders.cmake b/src/third_party/vulkan-tools/src/cmake/FindVulkanHeaders.cmake
new file mode 100644
index 0000000..a1458fd
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/cmake/FindVulkanHeaders.cmake
@@ -0,0 +1,69 @@
+#.rst:
+# FindVulkanHeaders
+# -----------------
+#
+# Try to find Vulkan Headers and Registry.
+#
+# This module is intended to be used by projects that build Vulkan
+# "system" components such as the loader and layers.
+# Vulkan applications should instead use the FindVulkan (or similar)
+# find module that locates the headers and the loader library.
+#
+# When using this find module to locate the headers and registry
+# in a Vulkan-Headers repository, the Vulkan-Headers repository
+# should be built with 'install' target and the following environment
+# or CMake variable set to the location of the install directory.
+#
+#    VULKAN_HEADERS_INSTALL_DIR
+#
+# IMPORTED Targets
+# ^^^^^^^^^^^^^^^^
+#
+# This module defines no IMPORTED targets
+#
+# Result Variables
+# ^^^^^^^^^^^^^^^^
+#
+# This module defines the following variables::
+#
+#   VulkanHeaders_FOUND          - True if VulkanHeaders was found
+#   VulkanHeaders_INCLUDE_DIRS   - include directories for VulkanHeaders
+#
+#   VulkanRegistry_FOUND         - True if VulkanRegistry was found
+#   VulkanRegistry_DIRS          - directories for VulkanRegistry
+#
+# The module will also define two cache variables::
+#
+#   VulkanHeaders_INCLUDE_DIR    - the VulkanHeaders include directory
+#   VulkanRegistry_DIR           - the VulkanRegistry directory
+#
+
+# Use HINTS instead of PATH to search these locations before
+# searching system environment variables like $PATH that may
+# contain SDK directories.
+find_path(VulkanHeaders_INCLUDE_DIR
+    NAMES vulkan/vulkan.h
+    HINTS
+        ${VULKAN_HEADERS_INSTALL_DIR}/include
+        "$ENV{VULKAN_HEADERS_INSTALL_DIR}/include"
+        "$ENV{VULKAN_SDK}/include")
+
+if(VulkanHeaders_INCLUDE_DIR)
+   get_filename_component(VULKAN_REGISTRY_PATH_HINT ${VulkanHeaders_INCLUDE_DIR} DIRECTORY)
+   find_path(VulkanRegistry_DIR
+       NAMES vk.xml
+       HINTS "${VULKAN_REGISTRY_PATH_HINT}/share/vulkan/registry")
+endif()
+
+set(VulkanHeaders_INCLUDE_DIRS ${VulkanHeaders_INCLUDE_DIR})
+set(VulkanRegistry_DIRS ${VulkanRegistry_DIR})
+
+include(FindPackageHandleStandardArgs)
+find_package_handle_standard_args(VulkanHeaders
+    DEFAULT_MSG
+    VulkanHeaders_INCLUDE_DIR)
+find_package_handle_standard_args(VulkanRegistry
+    DEFAULT_MSG
+    VulkanRegistry_DIR)
+
+mark_as_advanced(VulkanHeaders_INCLUDE_DIR VulkanRegistry_DIR)
diff --git a/src/third_party/vulkan-tools/src/cmake/FindWayland.cmake b/src/third_party/vulkan-tools/src/cmake/FindWayland.cmake
new file mode 100644
index 0000000..f93218b
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/cmake/FindWayland.cmake
@@ -0,0 +1,66 @@
+# Try to find Wayland on a Unix system
+#
+# This will define:
+#
+#   WAYLAND_FOUND       - True if Wayland is found
+#   WAYLAND_LIBRARIES   - Link these to use Wayland
+#   WAYLAND_INCLUDE_DIR - Include directory for Wayland
+#   WAYLAND_DEFINITIONS - Compiler flags for using Wayland
+#
+# In addition the following more fine grained variables will be defined:
+#
+#   WAYLAND_CLIENT_FOUND  WAYLAND_CLIENT_INCLUDE_DIR  WAYLAND_CLIENT_LIBRARIES
+#   WAYLAND_SERVER_FOUND  WAYLAND_SERVER_INCLUDE_DIR  WAYLAND_SERVER_LIBRARIES
+#   WAYLAND_EGL_FOUND     WAYLAND_EGL_INCLUDE_DIR     WAYLAND_EGL_LIBRARIES
+#
+# Copyright (c) 2013 Martin Gräßlin <mgraesslin@kde.org>
+#
+# Redistribution and use is allowed according to the terms of the BSD license.
+# For details see the accompanying COPYING-CMAKE-SCRIPTS file.
+
+IF (NOT WIN32)
+  IF (WAYLAND_INCLUDE_DIR AND WAYLAND_LIBRARIES)
+    # In the cache already
+    SET(WAYLAND_FIND_QUIETLY TRUE)
+  ENDIF ()
+
+  # Use pkg-config to get the directories and then use these values
+  # in the FIND_PATH() and FIND_LIBRARY() calls
+  FIND_PACKAGE(PkgConfig)
+  PKG_CHECK_MODULES(PKG_WAYLAND QUIET wayland-client wayland-server wayland-egl wayland-cursor)
+
+  SET(WAYLAND_DEFINITIONS ${PKG_WAYLAND_CFLAGS})
+
+  FIND_PATH(WAYLAND_CLIENT_INCLUDE_DIR  NAMES wayland-client.h HINTS ${PKG_WAYLAND_INCLUDE_DIRS})
+  FIND_PATH(WAYLAND_SERVER_INCLUDE_DIR  NAMES wayland-server.h HINTS ${PKG_WAYLAND_INCLUDE_DIRS})
+  FIND_PATH(WAYLAND_EGL_INCLUDE_DIR     NAMES wayland-egl.h    HINTS ${PKG_WAYLAND_INCLUDE_DIRS})
+  FIND_PATH(WAYLAND_CURSOR_INCLUDE_DIR  NAMES wayland-cursor.h HINTS ${PKG_WAYLAND_INCLUDE_DIRS})
+
+  FIND_LIBRARY(WAYLAND_CLIENT_LIBRARIES NAMES wayland-client   HINTS ${PKG_WAYLAND_LIBRARY_DIRS})
+  FIND_LIBRARY(WAYLAND_SERVER_LIBRARIES NAMES wayland-server   HINTS ${PKG_WAYLAND_LIBRARY_DIRS})
+  FIND_LIBRARY(WAYLAND_EGL_LIBRARIES    NAMES wayland-egl      HINTS ${PKG_WAYLAND_LIBRARY_DIRS})
+  FIND_LIBRARY(WAYLAND_CURSOR_LIBRARIES NAMES wayland-cursor   HINTS ${PKG_WAYLAND_LIBRARY_DIRS})
+
+  set(WAYLAND_INCLUDE_DIR ${WAYLAND_CLIENT_INCLUDE_DIR} ${WAYLAND_SERVER_INCLUDE_DIR} ${WAYLAND_EGL_INCLUDE_DIR} ${WAYLAND_CURSOR_INCLUDE_DIR})
+
+  set(WAYLAND_LIBRARIES ${WAYLAND_CLIENT_LIBRARIES} ${WAYLAND_SERVER_LIBRARIES} ${WAYLAND_EGL_LIBRARIES} ${WAYLAND_CURSOR_LIBRARIES})
+
+  list(REMOVE_DUPLICATES WAYLAND_INCLUDE_DIR)
+
+  include(FindPackageHandleStandardArgs)
+
+  FIND_PACKAGE_HANDLE_STANDARD_ARGS(WAYLAND_CLIENT  DEFAULT_MSG  WAYLAND_CLIENT_LIBRARIES  WAYLAND_CLIENT_INCLUDE_DIR)
+  FIND_PACKAGE_HANDLE_STANDARD_ARGS(WAYLAND_SERVER  DEFAULT_MSG  WAYLAND_SERVER_LIBRARIES  WAYLAND_SERVER_INCLUDE_DIR)
+  FIND_PACKAGE_HANDLE_STANDARD_ARGS(WAYLAND_EGL     DEFAULT_MSG  WAYLAND_EGL_LIBRARIES     WAYLAND_EGL_INCLUDE_DIR)
+  FIND_PACKAGE_HANDLE_STANDARD_ARGS(WAYLAND_CURSOR  DEFAULT_MSG  WAYLAND_CURSOR_LIBRARIES  WAYLAND_CURSOR_INCLUDE_DIR)
+  FIND_PACKAGE_HANDLE_STANDARD_ARGS(WAYLAND         DEFAULT_MSG  WAYLAND_LIBRARIES         WAYLAND_INCLUDE_DIR)
+
+  MARK_AS_ADVANCED(
+        WAYLAND_INCLUDE_DIR         WAYLAND_LIBRARIES
+        WAYLAND_CLIENT_INCLUDE_DIR  WAYLAND_CLIENT_LIBRARIES
+        WAYLAND_SERVER_INCLUDE_DIR  WAYLAND_SERVER_LIBRARIES
+        WAYLAND_EGL_INCLUDE_DIR     WAYLAND_EGL_LIBRARIES
+        WAYLAND_CURSOR_INCLUDE_DIR  WAYLAND_CURSOR_LIBRARIES
+  )
+
+ENDIF ()
diff --git a/src/third_party/vulkan-tools/src/cmake/FindX11_XCB.cmake b/src/third_party/vulkan-tools/src/cmake/FindX11_XCB.cmake
new file mode 100644
index 0000000..956bf89
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/cmake/FindX11_XCB.cmake
@@ -0,0 +1,32 @@
+# - Try to find libX11-xcb
+# Once done this will define
+#
+# X11_XCB_FOUND - system has libX11-xcb
+# X11_XCB_LIBRARIES - Link these to use libX11-xcb
+# X11_XCB_INCLUDE_DIR - the libX11-xcb include dir
+# X11_XCB_DEFINITIONS - compiler switches required for using libX11-xcb
+
+# Copyright (c) 2011 Fredrik Höglund <fredrik@kde.org>
+# Copyright (c) 2008 Helio Chissini de Castro, <helio@kde.org>
+# Copyright (c) 2007 Matthias Kretz, <kretz@kde.org>
+#
+# Redistribution and use is allowed according to the terms of the BSD license.
+# For details see the accompanying COPYING-CMAKE-SCRIPTS file.
+
+IF (NOT WIN32)
+  # use pkg-config to get the directories and then use these values
+  # in the FIND_PATH() and FIND_LIBRARY() calls
+  FIND_PACKAGE(PkgConfig)
+  PKG_CHECK_MODULES(PKG_X11_XCB QUIET x11-xcb)
+
+  SET(X11_XCB_DEFINITIONS ${PKG_X11_XCB_CFLAGS})
+
+  FIND_PATH(X11_XCB_INCLUDE_DIR NAMES X11/Xlib-xcb.h HINTS ${PKG_X11_XCB_INCLUDE_DIRS})
+  FIND_LIBRARY(X11_XCB_LIBRARIES NAMES X11-xcb HINTS ${PKG_X11_XCB_LIBRARY_DIRS})
+
+  include(FindPackageHandleStandardArgs)
+  FIND_PACKAGE_HANDLE_STANDARD_ARGS(X11_XCB DEFAULT_MSG X11_XCB_LIBRARIES X11_XCB_INCLUDE_DIR)
+
+  MARK_AS_ADVANCED(X11_XCB_INCLUDE_DIR X11_XCB_LIBRARIES)
+ENDIF (NOT WIN32)
+
diff --git a/src/third_party/vulkan-tools/src/cmake/FindXCB.cmake b/src/third_party/vulkan-tools/src/cmake/FindXCB.cmake
new file mode 100644
index 0000000..2311591
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/cmake/FindXCB.cmake
@@ -0,0 +1,51 @@
+# - FindXCB
+#
+# Copyright (C) 2015 Valve Corporation
+
+find_package(PkgConfig)
+
+if(NOT XCB_FIND_COMPONENTS)
+    set(XCB_FIND_COMPONENTS xcb)
+endif()
+
+include(FindPackageHandleStandardArgs)
+set(XCB_FOUND true)
+set(XCB_INCLUDE_DIRS "")
+set(XCB_LIBRARIES "")
+foreach(comp ${XCB_FIND_COMPONENTS})
+    # component name
+    string(TOUPPER ${comp} compname)
+    string(REPLACE "-" "_" compname ${compname})
+    # header name
+    string(REPLACE "xcb-" "" headername xcb/${comp}.h)
+    # library name
+    set(libname ${comp})
+
+    pkg_check_modules(PC_${comp} QUIET ${comp})
+
+    find_path(${compname}_INCLUDE_DIR NAMES ${headername}
+        HINTS
+        ${PC_${comp}_INCLUDEDIR}
+        ${PC_${comp}_INCLUDE_DIRS}
+        )
+
+    find_library(${compname}_LIBRARY NAMES ${libname}
+        HINTS
+        ${PC_${comp}_LIBDIR}
+        ${PC_${comp}_LIBRARY_DIRS}
+        )
+
+    find_package_handle_standard_args(${comp}
+        FOUND_VAR ${comp}_FOUND
+        REQUIRED_VARS ${compname}_INCLUDE_DIR ${compname}_LIBRARY)
+    mark_as_advanced(${compname}_INCLUDE_DIR ${compname}_LIBRARY)
+
+    list(APPEND XCB_INCLUDE_DIRS ${${compname}_INCLUDE_DIR})
+    list(APPEND XCB_LIBRARIES ${${compname}_LIBRARY})
+
+    if(NOT ${comp}_FOUND)
+        set(XCB_FOUND false)
+    endif()
+endforeach()
+
+list(REMOVE_DUPLICATES XCB_INCLUDE_DIRS)
diff --git a/src/third_party/vulkan-tools/src/cmake/cmake_uninstall.cmake.in b/src/third_party/vulkan-tools/src/cmake/cmake_uninstall.cmake.in
new file mode 100644
index 0000000..2037e36
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/cmake/cmake_uninstall.cmake.in
@@ -0,0 +1,21 @@
+if(NOT EXISTS "@CMAKE_CURRENT_BINARY_DIR@/install_manifest.txt")
+  message(FATAL_ERROR "Cannot find install manifest: @CMAKE_CURRENT_BINARY_DIR@/install_manifest.txt")
+endif(NOT EXISTS "@CMAKE_CURRENT_BINARY_DIR@/install_manifest.txt")
+
+file(READ "@CMAKE_CURRENT_BINARY_DIR@/install_manifest.txt" files)
+string(REGEX REPLACE "\n" ";" files "${files}")
+foreach(file ${files})
+  message(STATUS "Uninstalling $ENV{DESTDIR}${file}")
+  if(IS_SYMLINK "$ENV{DESTDIR}${file}" OR EXISTS "$ENV{DESTDIR}${file}")
+    exec_program(
+      "@CMAKE_COMMAND@" ARGS "-E remove \"$ENV{DESTDIR}${file}\""
+      OUTPUT_VARIABLE rm_out
+      RETURN_VALUE rm_retval
+      )
+    if(NOT "${rm_retval}" STREQUAL 0)
+      message(FATAL_ERROR "Problem when removing $ENV{DESTDIR}${file}")
+    endif(NOT "${rm_retval}" STREQUAL 0)
+  else(IS_SYMLINK "$ENV{DESTDIR}${file}" OR EXISTS "$ENV{DESTDIR}${file}")
+    message(STATUS "File $ENV{DESTDIR}${file} does not exist.")
+  endif(IS_SYMLINK "$ENV{DESTDIR}${file}" OR EXISTS "$ENV{DESTDIR}${file}")
+endforeach(file)
diff --git a/src/third_party/vulkan-tools/src/common/README.md b/src/third_party/vulkan-tools/src/common/README.md
new file mode 100644
index 0000000..3e496be
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/common/README.md
@@ -0,0 +1,28 @@
+# Generating vulkan_wrapper
+
+The script for generating the vulkan_wrapper is based on the work done by @olvaffe for Hologram's dispatch table:
+https://github.com/LunarG/VulkanSamples/blob/master/Sample-Programs/Hologram/generate-dispatch-table
+
+The wrapper is used to support applications that don't want to link directly against libvulkan.so, which is not
+present on Android before 7.0 (Nougat).
+
+The wrapper only contains core functions and WSI extensions.
+
+To regenerate the wrappers:
+
+    cd scripts
+    pushd ../Vulkan-Headers/include/vulkan
+    ../../../scripts/generate_vulkan_wrapper.py parse vulkan.h > parsed_header
+
+Copy contents of parsed_header into generate_vulkan_wrapper.py between the sections that read:
+
+    # generated by "generate_vulkan_wrapper.py parse vulkan.h"
+    # end of generated code
+
+Then run the script:
+
+    popd
+    ./generate_vulkan_wrapper.py ../common/vulkan_wrapper.h
+    ./generate_vulkan_wrapper.py ../common/vulkan_wrapper.cpp
+    clang-format -i vulkan_wrapper.h
+    clang-format -i vulkan_wrapper.cpp
diff --git a/src/third_party/vulkan-tools/src/common/android_util.cpp b/src/third_party/vulkan-tools/src/common/android_util.cpp
new file mode 100644
index 0000000..173e8ab
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/common/android_util.cpp
@@ -0,0 +1,84 @@
+/*
+ * Copyright (c) 2016 Google, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Relicensed from the WTFPL (http://www.wtfpl.net/faq/).
+ */
+
+#include "android_util.h"
+#include <android_native_app_glue.h>
+#include <cassert>
+#include <cstring>
+#include <vector>
+#include <string>
+#include <sstream>
+#include <stdlib.h>
+
+extern "C" {
+
+// Convert Intents to arg list, returning argc and argv
+// Note that this C routine mallocs memory that the caller must free
+char **get_args(struct android_app *app, const char *intent_extra_data_key, const char *appTag, int *count) {
+    std::vector<std::string> args;
+    JavaVM &vm = *app->activity->vm;
+    JNIEnv *p_env;
+    if (vm.AttachCurrentThread(&p_env, nullptr) != JNI_OK) return nullptr;
+
+    JNIEnv &env = *p_env;
+    jobject activity = app->activity->clazz;
+    jmethodID get_intent_method = env.GetMethodID(env.GetObjectClass(activity), "getIntent", "()Landroid/content/Intent;");
+    jobject intent = env.CallObjectMethod(activity, get_intent_method);
+    jmethodID get_string_extra_method =
+        env.GetMethodID(env.GetObjectClass(intent), "getStringExtra", "(Ljava/lang/String;)Ljava/lang/String;");
+    jvalue get_string_extra_args;
+    get_string_extra_args.l = env.NewStringUTF(intent_extra_data_key);
+    jstring extra_str = static_cast<jstring>(env.CallObjectMethodA(intent, get_string_extra_method, &get_string_extra_args));
+
+    std::string args_str;
+    if (extra_str) {
+        const char *extra_utf = env.GetStringUTFChars(extra_str, nullptr);
+        args_str = extra_utf;
+        env.ReleaseStringUTFChars(extra_str, extra_utf);
+        env.DeleteLocalRef(extra_str);
+    }
+
+    env.DeleteLocalRef(get_string_extra_args.l);
+    env.DeleteLocalRef(intent);
+    vm.DetachCurrentThread();
+
+    // split args_str
+    std::stringstream ss(args_str);
+    std::string arg;
+    while (std::getline(ss, arg, ' ')) {
+        if (!arg.empty()) args.push_back(arg);
+    }
+
+    // Convert our STL results to C friendly constructs
+    assert(count != nullptr);
+    *count = args.size() + 1;
+    char **vector = (char **)malloc(*count * sizeof(char *));
+    const char *appName = appTag ? appTag : (const char *)"appTag";
+
+    vector[0] = (char *)malloc(strlen(appName) * sizeof(char));
+    strcpy(vector[0], appName);
+
+    for (uint32_t i = 0; i < args.size(); i++) {
+        vector[i + 1] = (char *)malloc(strlen(args[i].c_str()) * sizeof(char));
+        strcpy(vector[i + 1], args[i].c_str());
+    }
+
+    return vector;
+}
+
+}  // extern "C"
diff --git a/src/third_party/vulkan-tools/src/common/android_util.h b/src/third_party/vulkan-tools/src/common/android_util.h
new file mode 100644
index 0000000..e896645
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/common/android_util.h
@@ -0,0 +1,32 @@
+/*
+ * Copyright (c) 2016 Google, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Relicensed from the WTFPL (http://www.wtfpl.net/faq/).
+ */
+
+#ifndef ANDROID_UTIL_H
+#define ANDROID_UTIL_H
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+char **get_args(struct android_app *app, const char *intent_extra_data_key, const char *appTag, int *count);
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif
diff --git a/src/third_party/vulkan-tools/src/common/vulkan_wrapper.cpp b/src/third_party/vulkan-tools/src/common/vulkan_wrapper.cpp
new file mode 100644
index 0000000..229f58c
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/common/vulkan_wrapper.cpp
@@ -0,0 +1,796 @@
+/*
+ * Copyright 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+// This file is generated.
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#include "vulkan_wrapper.h"
+#include <dlfcn.h>
+
+int InitVulkan(void) {
+    void* libvulkan = dlopen("libvulkan.so", RTLD_NOW | RTLD_LOCAL);
+    if (!libvulkan) return 0;
+
+    // Vulkan supported, set function addresses
+    vkCreateInstance = reinterpret_cast<PFN_vkCreateInstance>(dlsym(libvulkan, "vkCreateInstance"));
+    vkDestroyInstance = reinterpret_cast<PFN_vkDestroyInstance>(dlsym(libvulkan, "vkDestroyInstance"));
+    vkEnumeratePhysicalDevices = reinterpret_cast<PFN_vkEnumeratePhysicalDevices>(dlsym(libvulkan, "vkEnumeratePhysicalDevices"));
+    vkGetPhysicalDeviceFeatures =
+        reinterpret_cast<PFN_vkGetPhysicalDeviceFeatures>(dlsym(libvulkan, "vkGetPhysicalDeviceFeatures"));
+    vkGetPhysicalDeviceFormatProperties =
+        reinterpret_cast<PFN_vkGetPhysicalDeviceFormatProperties>(dlsym(libvulkan, "vkGetPhysicalDeviceFormatProperties"));
+    vkGetPhysicalDeviceImageFormatProperties = reinterpret_cast<PFN_vkGetPhysicalDeviceImageFormatProperties>(
+        dlsym(libvulkan, "vkGetPhysicalDeviceImageFormatProperties"));
+    vkGetPhysicalDeviceProperties =
+        reinterpret_cast<PFN_vkGetPhysicalDeviceProperties>(dlsym(libvulkan, "vkGetPhysicalDeviceProperties"));
+    vkGetPhysicalDeviceQueueFamilyProperties = reinterpret_cast<PFN_vkGetPhysicalDeviceQueueFamilyProperties>(
+        dlsym(libvulkan, "vkGetPhysicalDeviceQueueFamilyProperties"));
+    vkGetPhysicalDeviceMemoryProperties =
+        reinterpret_cast<PFN_vkGetPhysicalDeviceMemoryProperties>(dlsym(libvulkan, "vkGetPhysicalDeviceMemoryProperties"));
+    vkGetInstanceProcAddr = reinterpret_cast<PFN_vkGetInstanceProcAddr>(dlsym(libvulkan, "vkGetInstanceProcAddr"));
+    vkGetDeviceProcAddr = reinterpret_cast<PFN_vkGetDeviceProcAddr>(dlsym(libvulkan, "vkGetDeviceProcAddr"));
+    vkCreateDevice = reinterpret_cast<PFN_vkCreateDevice>(dlsym(libvulkan, "vkCreateDevice"));
+    vkDestroyDevice = reinterpret_cast<PFN_vkDestroyDevice>(dlsym(libvulkan, "vkDestroyDevice"));
+    vkEnumerateInstanceExtensionProperties =
+        reinterpret_cast<PFN_vkEnumerateInstanceExtensionProperties>(dlsym(libvulkan, "vkEnumerateInstanceExtensionProperties"));
+    vkEnumerateDeviceExtensionProperties =
+        reinterpret_cast<PFN_vkEnumerateDeviceExtensionProperties>(dlsym(libvulkan, "vkEnumerateDeviceExtensionProperties"));
+    vkEnumerateInstanceLayerProperties =
+        reinterpret_cast<PFN_vkEnumerateInstanceLayerProperties>(dlsym(libvulkan, "vkEnumerateInstanceLayerProperties"));
+    vkEnumerateDeviceLayerProperties =
+        reinterpret_cast<PFN_vkEnumerateDeviceLayerProperties>(dlsym(libvulkan, "vkEnumerateDeviceLayerProperties"));
+    vkGetDeviceQueue = reinterpret_cast<PFN_vkGetDeviceQueue>(dlsym(libvulkan, "vkGetDeviceQueue"));
+    vkQueueSubmit = reinterpret_cast<PFN_vkQueueSubmit>(dlsym(libvulkan, "vkQueueSubmit"));
+    vkQueueWaitIdle = reinterpret_cast<PFN_vkQueueWaitIdle>(dlsym(libvulkan, "vkQueueWaitIdle"));
+    vkDeviceWaitIdle = reinterpret_cast<PFN_vkDeviceWaitIdle>(dlsym(libvulkan, "vkDeviceWaitIdle"));
+    vkAllocateMemory = reinterpret_cast<PFN_vkAllocateMemory>(dlsym(libvulkan, "vkAllocateMemory"));
+    vkFreeMemory = reinterpret_cast<PFN_vkFreeMemory>(dlsym(libvulkan, "vkFreeMemory"));
+    vkMapMemory = reinterpret_cast<PFN_vkMapMemory>(dlsym(libvulkan, "vkMapMemory"));
+    vkUnmapMemory = reinterpret_cast<PFN_vkUnmapMemory>(dlsym(libvulkan, "vkUnmapMemory"));
+    vkFlushMappedMemoryRanges = reinterpret_cast<PFN_vkFlushMappedMemoryRanges>(dlsym(libvulkan, "vkFlushMappedMemoryRanges"));
+    vkInvalidateMappedMemoryRanges =
+        reinterpret_cast<PFN_vkInvalidateMappedMemoryRanges>(dlsym(libvulkan, "vkInvalidateMappedMemoryRanges"));
+    vkGetDeviceMemoryCommitment =
+        reinterpret_cast<PFN_vkGetDeviceMemoryCommitment>(dlsym(libvulkan, "vkGetDeviceMemoryCommitment"));
+    vkBindBufferMemory = reinterpret_cast<PFN_vkBindBufferMemory>(dlsym(libvulkan, "vkBindBufferMemory"));
+    vkBindImageMemory = reinterpret_cast<PFN_vkBindImageMemory>(dlsym(libvulkan, "vkBindImageMemory"));
+    vkGetBufferMemoryRequirements =
+        reinterpret_cast<PFN_vkGetBufferMemoryRequirements>(dlsym(libvulkan, "vkGetBufferMemoryRequirements"));
+    vkGetImageMemoryRequirements =
+        reinterpret_cast<PFN_vkGetImageMemoryRequirements>(dlsym(libvulkan, "vkGetImageMemoryRequirements"));
+    vkGetImageSparseMemoryRequirements =
+        reinterpret_cast<PFN_vkGetImageSparseMemoryRequirements>(dlsym(libvulkan, "vkGetImageSparseMemoryRequirements"));
+    vkGetPhysicalDeviceSparseImageFormatProperties = reinterpret_cast<PFN_vkGetPhysicalDeviceSparseImageFormatProperties>(
+        dlsym(libvulkan, "vkGetPhysicalDeviceSparseImageFormatProperties"));
+    vkQueueBindSparse = reinterpret_cast<PFN_vkQueueBindSparse>(dlsym(libvulkan, "vkQueueBindSparse"));
+    vkCreateFence = reinterpret_cast<PFN_vkCreateFence>(dlsym(libvulkan, "vkCreateFence"));
+    vkDestroyFence = reinterpret_cast<PFN_vkDestroyFence>(dlsym(libvulkan, "vkDestroyFence"));
+    vkResetFences = reinterpret_cast<PFN_vkResetFences>(dlsym(libvulkan, "vkResetFences"));
+    vkGetFenceStatus = reinterpret_cast<PFN_vkGetFenceStatus>(dlsym(libvulkan, "vkGetFenceStatus"));
+    vkWaitForFences = reinterpret_cast<PFN_vkWaitForFences>(dlsym(libvulkan, "vkWaitForFences"));
+    vkCreateSemaphore = reinterpret_cast<PFN_vkCreateSemaphore>(dlsym(libvulkan, "vkCreateSemaphore"));
+    vkDestroySemaphore = reinterpret_cast<PFN_vkDestroySemaphore>(dlsym(libvulkan, "vkDestroySemaphore"));
+    vkCreateEvent = reinterpret_cast<PFN_vkCreateEvent>(dlsym(libvulkan, "vkCreateEvent"));
+    vkDestroyEvent = reinterpret_cast<PFN_vkDestroyEvent>(dlsym(libvulkan, "vkDestroyEvent"));
+    vkGetEventStatus = reinterpret_cast<PFN_vkGetEventStatus>(dlsym(libvulkan, "vkGetEventStatus"));
+    vkSetEvent = reinterpret_cast<PFN_vkSetEvent>(dlsym(libvulkan, "vkSetEvent"));
+    vkResetEvent = reinterpret_cast<PFN_vkResetEvent>(dlsym(libvulkan, "vkResetEvent"));
+    vkCreateQueryPool = reinterpret_cast<PFN_vkCreateQueryPool>(dlsym(libvulkan, "vkCreateQueryPool"));
+    vkDestroyQueryPool = reinterpret_cast<PFN_vkDestroyQueryPool>(dlsym(libvulkan, "vkDestroyQueryPool"));
+    vkGetQueryPoolResults = reinterpret_cast<PFN_vkGetQueryPoolResults>(dlsym(libvulkan, "vkGetQueryPoolResults"));
+    vkCreateBuffer = reinterpret_cast<PFN_vkCreateBuffer>(dlsym(libvulkan, "vkCreateBuffer"));
+    vkDestroyBuffer = reinterpret_cast<PFN_vkDestroyBuffer>(dlsym(libvulkan, "vkDestroyBuffer"));
+    vkCreateBufferView = reinterpret_cast<PFN_vkCreateBufferView>(dlsym(libvulkan, "vkCreateBufferView"));
+    vkDestroyBufferView = reinterpret_cast<PFN_vkDestroyBufferView>(dlsym(libvulkan, "vkDestroyBufferView"));
+    vkCreateImage = reinterpret_cast<PFN_vkCreateImage>(dlsym(libvulkan, "vkCreateImage"));
+    vkDestroyImage = reinterpret_cast<PFN_vkDestroyImage>(dlsym(libvulkan, "vkDestroyImage"));
+    vkGetImageSubresourceLayout =
+        reinterpret_cast<PFN_vkGetImageSubresourceLayout>(dlsym(libvulkan, "vkGetImageSubresourceLayout"));
+    vkCreateImageView = reinterpret_cast<PFN_vkCreateImageView>(dlsym(libvulkan, "vkCreateImageView"));
+    vkDestroyImageView = reinterpret_cast<PFN_vkDestroyImageView>(dlsym(libvulkan, "vkDestroyImageView"));
+    vkCreateShaderModule = reinterpret_cast<PFN_vkCreateShaderModule>(dlsym(libvulkan, "vkCreateShaderModule"));
+    vkDestroyShaderModule = reinterpret_cast<PFN_vkDestroyShaderModule>(dlsym(libvulkan, "vkDestroyShaderModule"));
+    vkCreatePipelineCache = reinterpret_cast<PFN_vkCreatePipelineCache>(dlsym(libvulkan, "vkCreatePipelineCache"));
+    vkDestroyPipelineCache = reinterpret_cast<PFN_vkDestroyPipelineCache>(dlsym(libvulkan, "vkDestroyPipelineCache"));
+    vkGetPipelineCacheData = reinterpret_cast<PFN_vkGetPipelineCacheData>(dlsym(libvulkan, "vkGetPipelineCacheData"));
+    vkMergePipelineCaches = reinterpret_cast<PFN_vkMergePipelineCaches>(dlsym(libvulkan, "vkMergePipelineCaches"));
+    vkCreateGraphicsPipelines = reinterpret_cast<PFN_vkCreateGraphicsPipelines>(dlsym(libvulkan, "vkCreateGraphicsPipelines"));
+    vkCreateComputePipelines = reinterpret_cast<PFN_vkCreateComputePipelines>(dlsym(libvulkan, "vkCreateComputePipelines"));
+    vkDestroyPipeline = reinterpret_cast<PFN_vkDestroyPipeline>(dlsym(libvulkan, "vkDestroyPipeline"));
+    vkCreatePipelineLayout = reinterpret_cast<PFN_vkCreatePipelineLayout>(dlsym(libvulkan, "vkCreatePipelineLayout"));
+    vkDestroyPipelineLayout = reinterpret_cast<PFN_vkDestroyPipelineLayout>(dlsym(libvulkan, "vkDestroyPipelineLayout"));
+    vkCreateSampler = reinterpret_cast<PFN_vkCreateSampler>(dlsym(libvulkan, "vkCreateSampler"));
+    vkDestroySampler = reinterpret_cast<PFN_vkDestroySampler>(dlsym(libvulkan, "vkDestroySampler"));
+    vkCreateDescriptorSetLayout =
+        reinterpret_cast<PFN_vkCreateDescriptorSetLayout>(dlsym(libvulkan, "vkCreateDescriptorSetLayout"));
+    vkDestroyDescriptorSetLayout =
+        reinterpret_cast<PFN_vkDestroyDescriptorSetLayout>(dlsym(libvulkan, "vkDestroyDescriptorSetLayout"));
+    vkCreateDescriptorPool = reinterpret_cast<PFN_vkCreateDescriptorPool>(dlsym(libvulkan, "vkCreateDescriptorPool"));
+    vkDestroyDescriptorPool = reinterpret_cast<PFN_vkDestroyDescriptorPool>(dlsym(libvulkan, "vkDestroyDescriptorPool"));
+    vkResetDescriptorPool = reinterpret_cast<PFN_vkResetDescriptorPool>(dlsym(libvulkan, "vkResetDescriptorPool"));
+    vkAllocateDescriptorSets = reinterpret_cast<PFN_vkAllocateDescriptorSets>(dlsym(libvulkan, "vkAllocateDescriptorSets"));
+    vkFreeDescriptorSets = reinterpret_cast<PFN_vkFreeDescriptorSets>(dlsym(libvulkan, "vkFreeDescriptorSets"));
+    vkUpdateDescriptorSets = reinterpret_cast<PFN_vkUpdateDescriptorSets>(dlsym(libvulkan, "vkUpdateDescriptorSets"));
+    vkCreateFramebuffer = reinterpret_cast<PFN_vkCreateFramebuffer>(dlsym(libvulkan, "vkCreateFramebuffer"));
+    vkDestroyFramebuffer = reinterpret_cast<PFN_vkDestroyFramebuffer>(dlsym(libvulkan, "vkDestroyFramebuffer"));
+    vkCreateRenderPass = reinterpret_cast<PFN_vkCreateRenderPass>(dlsym(libvulkan, "vkCreateRenderPass"));
+    vkDestroyRenderPass = reinterpret_cast<PFN_vkDestroyRenderPass>(dlsym(libvulkan, "vkDestroyRenderPass"));
+    vkGetRenderAreaGranularity = reinterpret_cast<PFN_vkGetRenderAreaGranularity>(dlsym(libvulkan, "vkGetRenderAreaGranularity"));
+    vkCreateCommandPool = reinterpret_cast<PFN_vkCreateCommandPool>(dlsym(libvulkan, "vkCreateCommandPool"));
+    vkDestroyCommandPool = reinterpret_cast<PFN_vkDestroyCommandPool>(dlsym(libvulkan, "vkDestroyCommandPool"));
+    vkResetCommandPool = reinterpret_cast<PFN_vkResetCommandPool>(dlsym(libvulkan, "vkResetCommandPool"));
+    vkAllocateCommandBuffers = reinterpret_cast<PFN_vkAllocateCommandBuffers>(dlsym(libvulkan, "vkAllocateCommandBuffers"));
+    vkFreeCommandBuffers = reinterpret_cast<PFN_vkFreeCommandBuffers>(dlsym(libvulkan, "vkFreeCommandBuffers"));
+    vkBeginCommandBuffer = reinterpret_cast<PFN_vkBeginCommandBuffer>(dlsym(libvulkan, "vkBeginCommandBuffer"));
+    vkEndCommandBuffer = reinterpret_cast<PFN_vkEndCommandBuffer>(dlsym(libvulkan, "vkEndCommandBuffer"));
+    vkResetCommandBuffer = reinterpret_cast<PFN_vkResetCommandBuffer>(dlsym(libvulkan, "vkResetCommandBuffer"));
+    vkCmdBindPipeline = reinterpret_cast<PFN_vkCmdBindPipeline>(dlsym(libvulkan, "vkCmdBindPipeline"));
+    vkCmdSetViewport = reinterpret_cast<PFN_vkCmdSetViewport>(dlsym(libvulkan, "vkCmdSetViewport"));
+    vkCmdSetScissor = reinterpret_cast<PFN_vkCmdSetScissor>(dlsym(libvulkan, "vkCmdSetScissor"));
+    vkCmdSetLineWidth = reinterpret_cast<PFN_vkCmdSetLineWidth>(dlsym(libvulkan, "vkCmdSetLineWidth"));
+    vkCmdSetDepthBias = reinterpret_cast<PFN_vkCmdSetDepthBias>(dlsym(libvulkan, "vkCmdSetDepthBias"));
+    vkCmdSetBlendConstants = reinterpret_cast<PFN_vkCmdSetBlendConstants>(dlsym(libvulkan, "vkCmdSetBlendConstants"));
+    vkCmdSetDepthBounds = reinterpret_cast<PFN_vkCmdSetDepthBounds>(dlsym(libvulkan, "vkCmdSetDepthBounds"));
+    vkCmdSetStencilCompareMask = reinterpret_cast<PFN_vkCmdSetStencilCompareMask>(dlsym(libvulkan, "vkCmdSetStencilCompareMask"));
+    vkCmdSetStencilWriteMask = reinterpret_cast<PFN_vkCmdSetStencilWriteMask>(dlsym(libvulkan, "vkCmdSetStencilWriteMask"));
+    vkCmdSetStencilReference = reinterpret_cast<PFN_vkCmdSetStencilReference>(dlsym(libvulkan, "vkCmdSetStencilReference"));
+    vkCmdBindDescriptorSets = reinterpret_cast<PFN_vkCmdBindDescriptorSets>(dlsym(libvulkan, "vkCmdBindDescriptorSets"));
+    vkCmdBindIndexBuffer = reinterpret_cast<PFN_vkCmdBindIndexBuffer>(dlsym(libvulkan, "vkCmdBindIndexBuffer"));
+    vkCmdBindVertexBuffers = reinterpret_cast<PFN_vkCmdBindVertexBuffers>(dlsym(libvulkan, "vkCmdBindVertexBuffers"));
+    vkCmdDraw = reinterpret_cast<PFN_vkCmdDraw>(dlsym(libvulkan, "vkCmdDraw"));
+    vkCmdDrawIndexed = reinterpret_cast<PFN_vkCmdDrawIndexed>(dlsym(libvulkan, "vkCmdDrawIndexed"));
+    vkCmdDrawIndirect = reinterpret_cast<PFN_vkCmdDrawIndirect>(dlsym(libvulkan, "vkCmdDrawIndirect"));
+    vkCmdDrawIndexedIndirect = reinterpret_cast<PFN_vkCmdDrawIndexedIndirect>(dlsym(libvulkan, "vkCmdDrawIndexedIndirect"));
+    vkCmdDispatch = reinterpret_cast<PFN_vkCmdDispatch>(dlsym(libvulkan, "vkCmdDispatch"));
+    vkCmdDispatchIndirect = reinterpret_cast<PFN_vkCmdDispatchIndirect>(dlsym(libvulkan, "vkCmdDispatchIndirect"));
+    vkCmdCopyBuffer = reinterpret_cast<PFN_vkCmdCopyBuffer>(dlsym(libvulkan, "vkCmdCopyBuffer"));
+    vkCmdCopyImage = reinterpret_cast<PFN_vkCmdCopyImage>(dlsym(libvulkan, "vkCmdCopyImage"));
+    vkCmdBlitImage = reinterpret_cast<PFN_vkCmdBlitImage>(dlsym(libvulkan, "vkCmdBlitImage"));
+    vkCmdCopyBufferToImage = reinterpret_cast<PFN_vkCmdCopyBufferToImage>(dlsym(libvulkan, "vkCmdCopyBufferToImage"));
+    vkCmdCopyImageToBuffer = reinterpret_cast<PFN_vkCmdCopyImageToBuffer>(dlsym(libvulkan, "vkCmdCopyImageToBuffer"));
+    vkCmdUpdateBuffer = reinterpret_cast<PFN_vkCmdUpdateBuffer>(dlsym(libvulkan, "vkCmdUpdateBuffer"));
+    vkCmdFillBuffer = reinterpret_cast<PFN_vkCmdFillBuffer>(dlsym(libvulkan, "vkCmdFillBuffer"));
+    vkCmdClearColorImage = reinterpret_cast<PFN_vkCmdClearColorImage>(dlsym(libvulkan, "vkCmdClearColorImage"));
+    vkCmdClearDepthStencilImage =
+        reinterpret_cast<PFN_vkCmdClearDepthStencilImage>(dlsym(libvulkan, "vkCmdClearDepthStencilImage"));
+    vkCmdClearAttachments = reinterpret_cast<PFN_vkCmdClearAttachments>(dlsym(libvulkan, "vkCmdClearAttachments"));
+    vkCmdResolveImage = reinterpret_cast<PFN_vkCmdResolveImage>(dlsym(libvulkan, "vkCmdResolveImage"));
+    vkCmdSetEvent = reinterpret_cast<PFN_vkCmdSetEvent>(dlsym(libvulkan, "vkCmdSetEvent"));
+    vkCmdResetEvent = reinterpret_cast<PFN_vkCmdResetEvent>(dlsym(libvulkan, "vkCmdResetEvent"));
+    vkCmdWaitEvents = reinterpret_cast<PFN_vkCmdWaitEvents>(dlsym(libvulkan, "vkCmdWaitEvents"));
+    vkCmdPipelineBarrier = reinterpret_cast<PFN_vkCmdPipelineBarrier>(dlsym(libvulkan, "vkCmdPipelineBarrier"));
+    vkCmdBeginQuery = reinterpret_cast<PFN_vkCmdBeginQuery>(dlsym(libvulkan, "vkCmdBeginQuery"));
+    vkCmdEndQuery = reinterpret_cast<PFN_vkCmdEndQuery>(dlsym(libvulkan, "vkCmdEndQuery"));
+    vkCmdResetQueryPool = reinterpret_cast<PFN_vkCmdResetQueryPool>(dlsym(libvulkan, "vkCmdResetQueryPool"));
+    vkCmdWriteTimestamp = reinterpret_cast<PFN_vkCmdWriteTimestamp>(dlsym(libvulkan, "vkCmdWriteTimestamp"));
+    vkCmdCopyQueryPoolResults = reinterpret_cast<PFN_vkCmdCopyQueryPoolResults>(dlsym(libvulkan, "vkCmdCopyQueryPoolResults"));
+    vkCmdPushConstants = reinterpret_cast<PFN_vkCmdPushConstants>(dlsym(libvulkan, "vkCmdPushConstants"));
+    vkCmdBeginRenderPass = reinterpret_cast<PFN_vkCmdBeginRenderPass>(dlsym(libvulkan, "vkCmdBeginRenderPass"));
+    vkCmdNextSubpass = reinterpret_cast<PFN_vkCmdNextSubpass>(dlsym(libvulkan, "vkCmdNextSubpass"));
+    vkCmdEndRenderPass = reinterpret_cast<PFN_vkCmdEndRenderPass>(dlsym(libvulkan, "vkCmdEndRenderPass"));
+    vkCmdExecuteCommands = reinterpret_cast<PFN_vkCmdExecuteCommands>(dlsym(libvulkan, "vkCmdExecuteCommands"));
+    vkEnumerateInstanceVersion = reinterpret_cast<PFN_vkEnumerateInstanceVersion>(dlsym(libvulkan, "vkEnumerateInstanceVersion"));
+    vkBindBufferMemory2 = reinterpret_cast<PFN_vkBindBufferMemory2>(dlsym(libvulkan, "vkBindBufferMemory2"));
+    vkBindImageMemory2 = reinterpret_cast<PFN_vkBindImageMemory2>(dlsym(libvulkan, "vkBindImageMemory2"));
+    vkGetDeviceGroupPeerMemoryFeatures =
+        reinterpret_cast<PFN_vkGetDeviceGroupPeerMemoryFeatures>(dlsym(libvulkan, "vkGetDeviceGroupPeerMemoryFeatures"));
+    vkCmdSetDeviceMask = reinterpret_cast<PFN_vkCmdSetDeviceMask>(dlsym(libvulkan, "vkCmdSetDeviceMask"));
+    vkCmdDispatchBase = reinterpret_cast<PFN_vkCmdDispatchBase>(dlsym(libvulkan, "vkCmdDispatchBase"));
+    vkEnumeratePhysicalDeviceGroups =
+        reinterpret_cast<PFN_vkEnumeratePhysicalDeviceGroups>(dlsym(libvulkan, "vkEnumeratePhysicalDeviceGroups"));
+    vkGetImageMemoryRequirements2 =
+        reinterpret_cast<PFN_vkGetImageMemoryRequirements2>(dlsym(libvulkan, "vkGetImageMemoryRequirements2"));
+    vkGetBufferMemoryRequirements2 =
+        reinterpret_cast<PFN_vkGetBufferMemoryRequirements2>(dlsym(libvulkan, "vkGetBufferMemoryRequirements2"));
+    vkGetImageSparseMemoryRequirements2 =
+        reinterpret_cast<PFN_vkGetImageSparseMemoryRequirements2>(dlsym(libvulkan, "vkGetImageSparseMemoryRequirements2"));
+    vkGetPhysicalDeviceFeatures2 =
+        reinterpret_cast<PFN_vkGetPhysicalDeviceFeatures2>(dlsym(libvulkan, "vkGetPhysicalDeviceFeatures2"));
+    vkGetPhysicalDeviceProperties2 =
+        reinterpret_cast<PFN_vkGetPhysicalDeviceProperties2>(dlsym(libvulkan, "vkGetPhysicalDeviceProperties2"));
+    vkGetPhysicalDeviceFormatProperties2 =
+        reinterpret_cast<PFN_vkGetPhysicalDeviceFormatProperties2>(dlsym(libvulkan, "vkGetPhysicalDeviceFormatProperties2"));
+    vkGetPhysicalDeviceImageFormatProperties2 = reinterpret_cast<PFN_vkGetPhysicalDeviceImageFormatProperties2>(
+        dlsym(libvulkan, "vkGetPhysicalDeviceImageFormatProperties2"));
+    vkGetPhysicalDeviceQueueFamilyProperties2 = reinterpret_cast<PFN_vkGetPhysicalDeviceQueueFamilyProperties2>(
+        dlsym(libvulkan, "vkGetPhysicalDeviceQueueFamilyProperties2"));
+    vkGetPhysicalDeviceMemoryProperties2 =
+        reinterpret_cast<PFN_vkGetPhysicalDeviceMemoryProperties2>(dlsym(libvulkan, "vkGetPhysicalDeviceMemoryProperties2"));
+    vkGetPhysicalDeviceSparseImageFormatProperties2 = reinterpret_cast<PFN_vkGetPhysicalDeviceSparseImageFormatProperties2>(
+        dlsym(libvulkan, "vkGetPhysicalDeviceSparseImageFormatProperties2"));
+    vkTrimCommandPool = reinterpret_cast<PFN_vkTrimCommandPool>(dlsym(libvulkan, "vkTrimCommandPool"));
+    vkGetDeviceQueue2 = reinterpret_cast<PFN_vkGetDeviceQueue2>(dlsym(libvulkan, "vkGetDeviceQueue2"));
+    vkCreateSamplerYcbcrConversion =
+        reinterpret_cast<PFN_vkCreateSamplerYcbcrConversion>(dlsym(libvulkan, "vkCreateSamplerYcbcrConversion"));
+    vkDestroySamplerYcbcrConversion =
+        reinterpret_cast<PFN_vkDestroySamplerYcbcrConversion>(dlsym(libvulkan, "vkDestroySamplerYcbcrConversion"));
+    vkCreateDescriptorUpdateTemplate =
+        reinterpret_cast<PFN_vkCreateDescriptorUpdateTemplate>(dlsym(libvulkan, "vkCreateDescriptorUpdateTemplate"));
+    vkDestroyDescriptorUpdateTemplate =
+        reinterpret_cast<PFN_vkDestroyDescriptorUpdateTemplate>(dlsym(libvulkan, "vkDestroyDescriptorUpdateTemplate"));
+    vkUpdateDescriptorSetWithTemplate =
+        reinterpret_cast<PFN_vkUpdateDescriptorSetWithTemplate>(dlsym(libvulkan, "vkUpdateDescriptorSetWithTemplate"));
+    vkGetPhysicalDeviceExternalBufferProperties = reinterpret_cast<PFN_vkGetPhysicalDeviceExternalBufferProperties>(
+        dlsym(libvulkan, "vkGetPhysicalDeviceExternalBufferProperties"));
+    vkGetPhysicalDeviceExternalFenceProperties = reinterpret_cast<PFN_vkGetPhysicalDeviceExternalFenceProperties>(
+        dlsym(libvulkan, "vkGetPhysicalDeviceExternalFenceProperties"));
+    vkGetPhysicalDeviceExternalSemaphoreProperties = reinterpret_cast<PFN_vkGetPhysicalDeviceExternalSemaphoreProperties>(
+        dlsym(libvulkan, "vkGetPhysicalDeviceExternalSemaphoreProperties"));
+    vkGetDescriptorSetLayoutSupport =
+        reinterpret_cast<PFN_vkGetDescriptorSetLayoutSupport>(dlsym(libvulkan, "vkGetDescriptorSetLayoutSupport"));
+    vkDestroySurfaceKHR = reinterpret_cast<PFN_vkDestroySurfaceKHR>(dlsym(libvulkan, "vkDestroySurfaceKHR"));
+    vkGetPhysicalDeviceSurfaceSupportKHR =
+        reinterpret_cast<PFN_vkGetPhysicalDeviceSurfaceSupportKHR>(dlsym(libvulkan, "vkGetPhysicalDeviceSurfaceSupportKHR"));
+    vkGetPhysicalDeviceSurfaceCapabilitiesKHR = reinterpret_cast<PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR>(
+        dlsym(libvulkan, "vkGetPhysicalDeviceSurfaceCapabilitiesKHR"));
+    vkGetPhysicalDeviceSurfaceFormatsKHR =
+        reinterpret_cast<PFN_vkGetPhysicalDeviceSurfaceFormatsKHR>(dlsym(libvulkan, "vkGetPhysicalDeviceSurfaceFormatsKHR"));
+    vkGetPhysicalDeviceSurfacePresentModesKHR = reinterpret_cast<PFN_vkGetPhysicalDeviceSurfacePresentModesKHR>(
+        dlsym(libvulkan, "vkGetPhysicalDeviceSurfacePresentModesKHR"));
+    vkCreateSwapchainKHR = reinterpret_cast<PFN_vkCreateSwapchainKHR>(dlsym(libvulkan, "vkCreateSwapchainKHR"));
+    vkDestroySwapchainKHR = reinterpret_cast<PFN_vkDestroySwapchainKHR>(dlsym(libvulkan, "vkDestroySwapchainKHR"));
+    vkGetSwapchainImagesKHR = reinterpret_cast<PFN_vkGetSwapchainImagesKHR>(dlsym(libvulkan, "vkGetSwapchainImagesKHR"));
+    vkAcquireNextImageKHR = reinterpret_cast<PFN_vkAcquireNextImageKHR>(dlsym(libvulkan, "vkAcquireNextImageKHR"));
+    vkQueuePresentKHR = reinterpret_cast<PFN_vkQueuePresentKHR>(dlsym(libvulkan, "vkQueuePresentKHR"));
+    vkGetDeviceGroupPresentCapabilitiesKHR =
+        reinterpret_cast<PFN_vkGetDeviceGroupPresentCapabilitiesKHR>(dlsym(libvulkan, "vkGetDeviceGroupPresentCapabilitiesKHR"));
+    vkGetDeviceGroupSurfacePresentModesKHR =
+        reinterpret_cast<PFN_vkGetDeviceGroupSurfacePresentModesKHR>(dlsym(libvulkan, "vkGetDeviceGroupSurfacePresentModesKHR"));
+    vkGetPhysicalDevicePresentRectanglesKHR =
+        reinterpret_cast<PFN_vkGetPhysicalDevicePresentRectanglesKHR>(dlsym(libvulkan, "vkGetPhysicalDevicePresentRectanglesKHR"));
+    vkAcquireNextImage2KHR = reinterpret_cast<PFN_vkAcquireNextImage2KHR>(dlsym(libvulkan, "vkAcquireNextImage2KHR"));
+    vkGetPhysicalDeviceDisplayPropertiesKHR =
+        reinterpret_cast<PFN_vkGetPhysicalDeviceDisplayPropertiesKHR>(dlsym(libvulkan, "vkGetPhysicalDeviceDisplayPropertiesKHR"));
+    vkGetPhysicalDeviceDisplayPlanePropertiesKHR = reinterpret_cast<PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR>(
+        dlsym(libvulkan, "vkGetPhysicalDeviceDisplayPlanePropertiesKHR"));
+    vkGetDisplayPlaneSupportedDisplaysKHR =
+        reinterpret_cast<PFN_vkGetDisplayPlaneSupportedDisplaysKHR>(dlsym(libvulkan, "vkGetDisplayPlaneSupportedDisplaysKHR"));
+    vkGetDisplayModePropertiesKHR =
+        reinterpret_cast<PFN_vkGetDisplayModePropertiesKHR>(dlsym(libvulkan, "vkGetDisplayModePropertiesKHR"));
+    vkCreateDisplayModeKHR = reinterpret_cast<PFN_vkCreateDisplayModeKHR>(dlsym(libvulkan, "vkCreateDisplayModeKHR"));
+    vkGetDisplayPlaneCapabilitiesKHR =
+        reinterpret_cast<PFN_vkGetDisplayPlaneCapabilitiesKHR>(dlsym(libvulkan, "vkGetDisplayPlaneCapabilitiesKHR"));
+    vkCreateDisplayPlaneSurfaceKHR =
+        reinterpret_cast<PFN_vkCreateDisplayPlaneSurfaceKHR>(dlsym(libvulkan, "vkCreateDisplayPlaneSurfaceKHR"));
+    vkCreateSharedSwapchainsKHR =
+        reinterpret_cast<PFN_vkCreateSharedSwapchainsKHR>(dlsym(libvulkan, "vkCreateSharedSwapchainsKHR"));
+    vkGetPhysicalDeviceFeatures2KHR =
+        reinterpret_cast<PFN_vkGetPhysicalDeviceFeatures2KHR>(dlsym(libvulkan, "vkGetPhysicalDeviceFeatures2KHR"));
+    vkGetPhysicalDeviceProperties2KHR =
+        reinterpret_cast<PFN_vkGetPhysicalDeviceProperties2KHR>(dlsym(libvulkan, "vkGetPhysicalDeviceProperties2KHR"));
+    vkGetPhysicalDeviceFormatProperties2KHR =
+        reinterpret_cast<PFN_vkGetPhysicalDeviceFormatProperties2KHR>(dlsym(libvulkan, "vkGetPhysicalDeviceFormatProperties2KHR"));
+    vkGetPhysicalDeviceImageFormatProperties2KHR = reinterpret_cast<PFN_vkGetPhysicalDeviceImageFormatProperties2KHR>(
+        dlsym(libvulkan, "vkGetPhysicalDeviceImageFormatProperties2KHR"));
+    vkGetPhysicalDeviceQueueFamilyProperties2KHR = reinterpret_cast<PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR>(
+        dlsym(libvulkan, "vkGetPhysicalDeviceQueueFamilyProperties2KHR"));
+    vkGetPhysicalDeviceMemoryProperties2KHR =
+        reinterpret_cast<PFN_vkGetPhysicalDeviceMemoryProperties2KHR>(dlsym(libvulkan, "vkGetPhysicalDeviceMemoryProperties2KHR"));
+    vkGetPhysicalDeviceSparseImageFormatProperties2KHR = reinterpret_cast<PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR>(
+        dlsym(libvulkan, "vkGetPhysicalDeviceSparseImageFormatProperties2KHR"));
+    vkGetDeviceGroupPeerMemoryFeaturesKHR =
+        reinterpret_cast<PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR>(dlsym(libvulkan, "vkGetDeviceGroupPeerMemoryFeaturesKHR"));
+    vkCmdSetDeviceMaskKHR = reinterpret_cast<PFN_vkCmdSetDeviceMaskKHR>(dlsym(libvulkan, "vkCmdSetDeviceMaskKHR"));
+    vkCmdDispatchBaseKHR = reinterpret_cast<PFN_vkCmdDispatchBaseKHR>(dlsym(libvulkan, "vkCmdDispatchBaseKHR"));
+    vkTrimCommandPoolKHR = reinterpret_cast<PFN_vkTrimCommandPoolKHR>(dlsym(libvulkan, "vkTrimCommandPoolKHR"));
+    vkEnumeratePhysicalDeviceGroupsKHR =
+        reinterpret_cast<PFN_vkEnumeratePhysicalDeviceGroupsKHR>(dlsym(libvulkan, "vkEnumeratePhysicalDeviceGroupsKHR"));
+    vkGetPhysicalDeviceExternalBufferPropertiesKHR = reinterpret_cast<PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR>(
+        dlsym(libvulkan, "vkGetPhysicalDeviceExternalBufferPropertiesKHR"));
+    vkGetMemoryFdKHR = reinterpret_cast<PFN_vkGetMemoryFdKHR>(dlsym(libvulkan, "vkGetMemoryFdKHR"));
+    vkGetMemoryFdPropertiesKHR = reinterpret_cast<PFN_vkGetMemoryFdPropertiesKHR>(dlsym(libvulkan, "vkGetMemoryFdPropertiesKHR"));
+    vkGetPhysicalDeviceExternalSemaphorePropertiesKHR = reinterpret_cast<PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR>(
+        dlsym(libvulkan, "vkGetPhysicalDeviceExternalSemaphorePropertiesKHR"));
+    vkImportSemaphoreFdKHR = reinterpret_cast<PFN_vkImportSemaphoreFdKHR>(dlsym(libvulkan, "vkImportSemaphoreFdKHR"));
+    vkGetSemaphoreFdKHR = reinterpret_cast<PFN_vkGetSemaphoreFdKHR>(dlsym(libvulkan, "vkGetSemaphoreFdKHR"));
+    vkCmdPushDescriptorSetKHR = reinterpret_cast<PFN_vkCmdPushDescriptorSetKHR>(dlsym(libvulkan, "vkCmdPushDescriptorSetKHR"));
+    vkCmdPushDescriptorSetWithTemplateKHR =
+        reinterpret_cast<PFN_vkCmdPushDescriptorSetWithTemplateKHR>(dlsym(libvulkan, "vkCmdPushDescriptorSetWithTemplateKHR"));
+    vkCreateDescriptorUpdateTemplateKHR =
+        reinterpret_cast<PFN_vkCreateDescriptorUpdateTemplateKHR>(dlsym(libvulkan, "vkCreateDescriptorUpdateTemplateKHR"));
+    vkDestroyDescriptorUpdateTemplateKHR =
+        reinterpret_cast<PFN_vkDestroyDescriptorUpdateTemplateKHR>(dlsym(libvulkan, "vkDestroyDescriptorUpdateTemplateKHR"));
+    vkUpdateDescriptorSetWithTemplateKHR =
+        reinterpret_cast<PFN_vkUpdateDescriptorSetWithTemplateKHR>(dlsym(libvulkan, "vkUpdateDescriptorSetWithTemplateKHR"));
+    vkCreateRenderPass2KHR = reinterpret_cast<PFN_vkCreateRenderPass2KHR>(dlsym(libvulkan, "vkCreateRenderPass2KHR"));
+    vkCmdBeginRenderPass2KHR = reinterpret_cast<PFN_vkCmdBeginRenderPass2KHR>(dlsym(libvulkan, "vkCmdBeginRenderPass2KHR"));
+    vkCmdNextSubpass2KHR = reinterpret_cast<PFN_vkCmdNextSubpass2KHR>(dlsym(libvulkan, "vkCmdNextSubpass2KHR"));
+    vkCmdEndRenderPass2KHR = reinterpret_cast<PFN_vkCmdEndRenderPass2KHR>(dlsym(libvulkan, "vkCmdEndRenderPass2KHR"));
+    vkGetSwapchainStatusKHR = reinterpret_cast<PFN_vkGetSwapchainStatusKHR>(dlsym(libvulkan, "vkGetSwapchainStatusKHR"));
+    vkGetPhysicalDeviceExternalFencePropertiesKHR = reinterpret_cast<PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR>(
+        dlsym(libvulkan, "vkGetPhysicalDeviceExternalFencePropertiesKHR"));
+    vkImportFenceFdKHR = reinterpret_cast<PFN_vkImportFenceFdKHR>(dlsym(libvulkan, "vkImportFenceFdKHR"));
+    vkGetFenceFdKHR = reinterpret_cast<PFN_vkGetFenceFdKHR>(dlsym(libvulkan, "vkGetFenceFdKHR"));
+    vkGetPhysicalDeviceSurfaceCapabilities2KHR = reinterpret_cast<PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR>(
+        dlsym(libvulkan, "vkGetPhysicalDeviceSurfaceCapabilities2KHR"));
+    vkGetPhysicalDeviceSurfaceFormats2KHR =
+        reinterpret_cast<PFN_vkGetPhysicalDeviceSurfaceFormats2KHR>(dlsym(libvulkan, "vkGetPhysicalDeviceSurfaceFormats2KHR"));
+    vkGetPhysicalDeviceDisplayProperties2KHR = reinterpret_cast<PFN_vkGetPhysicalDeviceDisplayProperties2KHR>(
+        dlsym(libvulkan, "vkGetPhysicalDeviceDisplayProperties2KHR"));
+    vkGetPhysicalDeviceDisplayPlaneProperties2KHR = reinterpret_cast<PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR>(
+        dlsym(libvulkan, "vkGetPhysicalDeviceDisplayPlaneProperties2KHR"));
+    vkGetDisplayModeProperties2KHR =
+        reinterpret_cast<PFN_vkGetDisplayModeProperties2KHR>(dlsym(libvulkan, "vkGetDisplayModeProperties2KHR"));
+    vkGetDisplayPlaneCapabilities2KHR =
+        reinterpret_cast<PFN_vkGetDisplayPlaneCapabilities2KHR>(dlsym(libvulkan, "vkGetDisplayPlaneCapabilities2KHR"));
+    vkGetImageMemoryRequirements2KHR =
+        reinterpret_cast<PFN_vkGetImageMemoryRequirements2KHR>(dlsym(libvulkan, "vkGetImageMemoryRequirements2KHR"));
+    vkGetBufferMemoryRequirements2KHR =
+        reinterpret_cast<PFN_vkGetBufferMemoryRequirements2KHR>(dlsym(libvulkan, "vkGetBufferMemoryRequirements2KHR"));
+    vkGetImageSparseMemoryRequirements2KHR =
+        reinterpret_cast<PFN_vkGetImageSparseMemoryRequirements2KHR>(dlsym(libvulkan, "vkGetImageSparseMemoryRequirements2KHR"));
+    vkCreateSamplerYcbcrConversionKHR =
+        reinterpret_cast<PFN_vkCreateSamplerYcbcrConversionKHR>(dlsym(libvulkan, "vkCreateSamplerYcbcrConversionKHR"));
+    vkDestroySamplerYcbcrConversionKHR =
+        reinterpret_cast<PFN_vkDestroySamplerYcbcrConversionKHR>(dlsym(libvulkan, "vkDestroySamplerYcbcrConversionKHR"));
+    vkBindBufferMemory2KHR = reinterpret_cast<PFN_vkBindBufferMemory2KHR>(dlsym(libvulkan, "vkBindBufferMemory2KHR"));
+    vkBindImageMemory2KHR = reinterpret_cast<PFN_vkBindImageMemory2KHR>(dlsym(libvulkan, "vkBindImageMemory2KHR"));
+    vkGetDescriptorSetLayoutSupportKHR =
+        reinterpret_cast<PFN_vkGetDescriptorSetLayoutSupportKHR>(dlsym(libvulkan, "vkGetDescriptorSetLayoutSupportKHR"));
+    vkCmdDrawIndirectCountKHR = reinterpret_cast<PFN_vkCmdDrawIndirectCountKHR>(dlsym(libvulkan, "vkCmdDrawIndirectCountKHR"));
+    vkCmdDrawIndexedIndirectCountKHR =
+        reinterpret_cast<PFN_vkCmdDrawIndexedIndirectCountKHR>(dlsym(libvulkan, "vkCmdDrawIndexedIndirectCountKHR"));
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+    vkCreateAndroidSurfaceKHR = reinterpret_cast<PFN_vkCreateAndroidSurfaceKHR>(dlsym(libvulkan, "vkCreateAndroidSurfaceKHR"));
+#endif
+
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+    vkCreateWaylandSurfaceKHR = reinterpret_cast<PFN_vkCreateWaylandSurfaceKHR>(dlsym(libvulkan, "vkCreateWaylandSurfaceKHR"));
+    vkGetPhysicalDeviceWaylandPresentationSupportKHR = reinterpret_cast<PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR>(
+        dlsym(libvulkan, "vkGetPhysicalDeviceWaylandPresentationSupportKHR"));
+#endif
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    vkCreateWin32SurfaceKHR = reinterpret_cast<PFN_vkCreateWin32SurfaceKHR>(dlsym(libvulkan, "vkCreateWin32SurfaceKHR"));
+    vkGetPhysicalDeviceWin32PresentationSupportKHR = reinterpret_cast<PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR>(
+        dlsym(libvulkan, "vkGetPhysicalDeviceWin32PresentationSupportKHR"));
+#endif
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    vkGetMemoryWin32HandleKHR = reinterpret_cast<PFN_vkGetMemoryWin32HandleKHR>(dlsym(libvulkan, "vkGetMemoryWin32HandleKHR"));
+    vkGetMemoryWin32HandlePropertiesKHR =
+        reinterpret_cast<PFN_vkGetMemoryWin32HandlePropertiesKHR>(dlsym(libvulkan, "vkGetMemoryWin32HandlePropertiesKHR"));
+#endif
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+#endif
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    vkImportSemaphoreWin32HandleKHR =
+        reinterpret_cast<PFN_vkImportSemaphoreWin32HandleKHR>(dlsym(libvulkan, "vkImportSemaphoreWin32HandleKHR"));
+    vkGetSemaphoreWin32HandleKHR =
+        reinterpret_cast<PFN_vkGetSemaphoreWin32HandleKHR>(dlsym(libvulkan, "vkGetSemaphoreWin32HandleKHR"));
+#endif
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    vkImportFenceWin32HandleKHR =
+        reinterpret_cast<PFN_vkImportFenceWin32HandleKHR>(dlsym(libvulkan, "vkImportFenceWin32HandleKHR"));
+    vkGetFenceWin32HandleKHR = reinterpret_cast<PFN_vkGetFenceWin32HandleKHR>(dlsym(libvulkan, "vkGetFenceWin32HandleKHR"));
+#endif
+
+#ifdef VK_USE_PLATFORM_XCB_KHR
+    vkCreateXcbSurfaceKHR = reinterpret_cast<PFN_vkCreateXcbSurfaceKHR>(dlsym(libvulkan, "vkCreateXcbSurfaceKHR"));
+    vkGetPhysicalDeviceXcbPresentationSupportKHR = reinterpret_cast<PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR>(
+        dlsym(libvulkan, "vkGetPhysicalDeviceXcbPresentationSupportKHR"));
+#endif
+
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+    vkCreateXlibSurfaceKHR = reinterpret_cast<PFN_vkCreateXlibSurfaceKHR>(dlsym(libvulkan, "vkCreateXlibSurfaceKHR"));
+    vkGetPhysicalDeviceXlibPresentationSupportKHR = reinterpret_cast<PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR>(
+        dlsym(libvulkan, "vkGetPhysicalDeviceXlibPresentationSupportKHR"));
+#endif
+    return 1;
+}
+
+// No Vulkan support, do not set function addresses
+PFN_vkCreateInstance vkCreateInstance;
+PFN_vkDestroyInstance vkDestroyInstance;
+PFN_vkEnumeratePhysicalDevices vkEnumeratePhysicalDevices;
+PFN_vkGetPhysicalDeviceFeatures vkGetPhysicalDeviceFeatures;
+PFN_vkGetPhysicalDeviceFormatProperties vkGetPhysicalDeviceFormatProperties;
+PFN_vkGetPhysicalDeviceImageFormatProperties vkGetPhysicalDeviceImageFormatProperties;
+PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties;
+PFN_vkGetPhysicalDeviceQueueFamilyProperties vkGetPhysicalDeviceQueueFamilyProperties;
+PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties;
+PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr;
+PFN_vkGetDeviceProcAddr vkGetDeviceProcAddr;
+PFN_vkCreateDevice vkCreateDevice;
+PFN_vkDestroyDevice vkDestroyDevice;
+PFN_vkEnumerateInstanceExtensionProperties vkEnumerateInstanceExtensionProperties;
+PFN_vkEnumerateDeviceExtensionProperties vkEnumerateDeviceExtensionProperties;
+PFN_vkEnumerateInstanceLayerProperties vkEnumerateInstanceLayerProperties;
+PFN_vkEnumerateDeviceLayerProperties vkEnumerateDeviceLayerProperties;
+PFN_vkGetDeviceQueue vkGetDeviceQueue;
+PFN_vkQueueSubmit vkQueueSubmit;
+PFN_vkQueueWaitIdle vkQueueWaitIdle;
+PFN_vkDeviceWaitIdle vkDeviceWaitIdle;
+PFN_vkAllocateMemory vkAllocateMemory;
+PFN_vkFreeMemory vkFreeMemory;
+PFN_vkMapMemory vkMapMemory;
+PFN_vkUnmapMemory vkUnmapMemory;
+PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges;
+PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges;
+PFN_vkGetDeviceMemoryCommitment vkGetDeviceMemoryCommitment;
+PFN_vkBindBufferMemory vkBindBufferMemory;
+PFN_vkBindImageMemory vkBindImageMemory;
+PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements;
+PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements;
+PFN_vkGetImageSparseMemoryRequirements vkGetImageSparseMemoryRequirements;
+PFN_vkGetPhysicalDeviceSparseImageFormatProperties vkGetPhysicalDeviceSparseImageFormatProperties;
+PFN_vkQueueBindSparse vkQueueBindSparse;
+PFN_vkCreateFence vkCreateFence;
+PFN_vkDestroyFence vkDestroyFence;
+PFN_vkResetFences vkResetFences;
+PFN_vkGetFenceStatus vkGetFenceStatus;
+PFN_vkWaitForFences vkWaitForFences;
+PFN_vkCreateSemaphore vkCreateSemaphore;
+PFN_vkDestroySemaphore vkDestroySemaphore;
+PFN_vkCreateEvent vkCreateEvent;
+PFN_vkDestroyEvent vkDestroyEvent;
+PFN_vkGetEventStatus vkGetEventStatus;
+PFN_vkSetEvent vkSetEvent;
+PFN_vkResetEvent vkResetEvent;
+PFN_vkCreateQueryPool vkCreateQueryPool;
+PFN_vkDestroyQueryPool vkDestroyQueryPool;
+PFN_vkGetQueryPoolResults vkGetQueryPoolResults;
+PFN_vkCreateBuffer vkCreateBuffer;
+PFN_vkDestroyBuffer vkDestroyBuffer;
+PFN_vkCreateBufferView vkCreateBufferView;
+PFN_vkDestroyBufferView vkDestroyBufferView;
+PFN_vkCreateImage vkCreateImage;
+PFN_vkDestroyImage vkDestroyImage;
+PFN_vkGetImageSubresourceLayout vkGetImageSubresourceLayout;
+PFN_vkCreateImageView vkCreateImageView;
+PFN_vkDestroyImageView vkDestroyImageView;
+PFN_vkCreateShaderModule vkCreateShaderModule;
+PFN_vkDestroyShaderModule vkDestroyShaderModule;
+PFN_vkCreatePipelineCache vkCreatePipelineCache;
+PFN_vkDestroyPipelineCache vkDestroyPipelineCache;
+PFN_vkGetPipelineCacheData vkGetPipelineCacheData;
+PFN_vkMergePipelineCaches vkMergePipelineCaches;
+PFN_vkCreateGraphicsPipelines vkCreateGraphicsPipelines;
+PFN_vkCreateComputePipelines vkCreateComputePipelines;
+PFN_vkDestroyPipeline vkDestroyPipeline;
+PFN_vkCreatePipelineLayout vkCreatePipelineLayout;
+PFN_vkDestroyPipelineLayout vkDestroyPipelineLayout;
+PFN_vkCreateSampler vkCreateSampler;
+PFN_vkDestroySampler vkDestroySampler;
+PFN_vkCreateDescriptorSetLayout vkCreateDescriptorSetLayout;
+PFN_vkDestroyDescriptorSetLayout vkDestroyDescriptorSetLayout;
+PFN_vkCreateDescriptorPool vkCreateDescriptorPool;
+PFN_vkDestroyDescriptorPool vkDestroyDescriptorPool;
+PFN_vkResetDescriptorPool vkResetDescriptorPool;
+PFN_vkAllocateDescriptorSets vkAllocateDescriptorSets;
+PFN_vkFreeDescriptorSets vkFreeDescriptorSets;
+PFN_vkUpdateDescriptorSets vkUpdateDescriptorSets;
+PFN_vkCreateFramebuffer vkCreateFramebuffer;
+PFN_vkDestroyFramebuffer vkDestroyFramebuffer;
+PFN_vkCreateRenderPass vkCreateRenderPass;
+PFN_vkDestroyRenderPass vkDestroyRenderPass;
+PFN_vkGetRenderAreaGranularity vkGetRenderAreaGranularity;
+PFN_vkCreateCommandPool vkCreateCommandPool;
+PFN_vkDestroyCommandPool vkDestroyCommandPool;
+PFN_vkResetCommandPool vkResetCommandPool;
+PFN_vkAllocateCommandBuffers vkAllocateCommandBuffers;
+PFN_vkFreeCommandBuffers vkFreeCommandBuffers;
+PFN_vkBeginCommandBuffer vkBeginCommandBuffer;
+PFN_vkEndCommandBuffer vkEndCommandBuffer;
+PFN_vkResetCommandBuffer vkResetCommandBuffer;
+PFN_vkCmdBindPipeline vkCmdBindPipeline;
+PFN_vkCmdSetViewport vkCmdSetViewport;
+PFN_vkCmdSetScissor vkCmdSetScissor;
+PFN_vkCmdSetLineWidth vkCmdSetLineWidth;
+PFN_vkCmdSetDepthBias vkCmdSetDepthBias;
+PFN_vkCmdSetBlendConstants vkCmdSetBlendConstants;
+PFN_vkCmdSetDepthBounds vkCmdSetDepthBounds;
+PFN_vkCmdSetStencilCompareMask vkCmdSetStencilCompareMask;
+PFN_vkCmdSetStencilWriteMask vkCmdSetStencilWriteMask;
+PFN_vkCmdSetStencilReference vkCmdSetStencilReference;
+PFN_vkCmdBindDescriptorSets vkCmdBindDescriptorSets;
+PFN_vkCmdBindIndexBuffer vkCmdBindIndexBuffer;
+PFN_vkCmdBindVertexBuffers vkCmdBindVertexBuffers;
+PFN_vkCmdDraw vkCmdDraw;
+PFN_vkCmdDrawIndexed vkCmdDrawIndexed;
+PFN_vkCmdDrawIndirect vkCmdDrawIndirect;
+PFN_vkCmdDrawIndexedIndirect vkCmdDrawIndexedIndirect;
+PFN_vkCmdDispatch vkCmdDispatch;
+PFN_vkCmdDispatchIndirect vkCmdDispatchIndirect;
+PFN_vkCmdCopyBuffer vkCmdCopyBuffer;
+PFN_vkCmdCopyImage vkCmdCopyImage;
+PFN_vkCmdBlitImage vkCmdBlitImage;
+PFN_vkCmdCopyBufferToImage vkCmdCopyBufferToImage;
+PFN_vkCmdCopyImageToBuffer vkCmdCopyImageToBuffer;
+PFN_vkCmdUpdateBuffer vkCmdUpdateBuffer;
+PFN_vkCmdFillBuffer vkCmdFillBuffer;
+PFN_vkCmdClearColorImage vkCmdClearColorImage;
+PFN_vkCmdClearDepthStencilImage vkCmdClearDepthStencilImage;
+PFN_vkCmdClearAttachments vkCmdClearAttachments;
+PFN_vkCmdResolveImage vkCmdResolveImage;
+PFN_vkCmdSetEvent vkCmdSetEvent;
+PFN_vkCmdResetEvent vkCmdResetEvent;
+PFN_vkCmdWaitEvents vkCmdWaitEvents;
+PFN_vkCmdPipelineBarrier vkCmdPipelineBarrier;
+PFN_vkCmdBeginQuery vkCmdBeginQuery;
+PFN_vkCmdEndQuery vkCmdEndQuery;
+PFN_vkCmdResetQueryPool vkCmdResetQueryPool;
+PFN_vkCmdWriteTimestamp vkCmdWriteTimestamp;
+PFN_vkCmdCopyQueryPoolResults vkCmdCopyQueryPoolResults;
+PFN_vkCmdPushConstants vkCmdPushConstants;
+PFN_vkCmdBeginRenderPass vkCmdBeginRenderPass;
+PFN_vkCmdNextSubpass vkCmdNextSubpass;
+PFN_vkCmdEndRenderPass vkCmdEndRenderPass;
+PFN_vkCmdExecuteCommands vkCmdExecuteCommands;
+PFN_vkEnumerateInstanceVersion vkEnumerateInstanceVersion;
+PFN_vkBindBufferMemory2 vkBindBufferMemory2;
+PFN_vkBindImageMemory2 vkBindImageMemory2;
+PFN_vkGetDeviceGroupPeerMemoryFeatures vkGetDeviceGroupPeerMemoryFeatures;
+PFN_vkCmdSetDeviceMask vkCmdSetDeviceMask;
+PFN_vkCmdDispatchBase vkCmdDispatchBase;
+PFN_vkEnumeratePhysicalDeviceGroups vkEnumeratePhysicalDeviceGroups;
+PFN_vkGetImageMemoryRequirements2 vkGetImageMemoryRequirements2;
+PFN_vkGetBufferMemoryRequirements2 vkGetBufferMemoryRequirements2;
+PFN_vkGetImageSparseMemoryRequirements2 vkGetImageSparseMemoryRequirements2;
+PFN_vkGetPhysicalDeviceFeatures2 vkGetPhysicalDeviceFeatures2;
+PFN_vkGetPhysicalDeviceProperties2 vkGetPhysicalDeviceProperties2;
+PFN_vkGetPhysicalDeviceFormatProperties2 vkGetPhysicalDeviceFormatProperties2;
+PFN_vkGetPhysicalDeviceImageFormatProperties2 vkGetPhysicalDeviceImageFormatProperties2;
+PFN_vkGetPhysicalDeviceQueueFamilyProperties2 vkGetPhysicalDeviceQueueFamilyProperties2;
+PFN_vkGetPhysicalDeviceMemoryProperties2 vkGetPhysicalDeviceMemoryProperties2;
+PFN_vkGetPhysicalDeviceSparseImageFormatProperties2 vkGetPhysicalDeviceSparseImageFormatProperties2;
+PFN_vkTrimCommandPool vkTrimCommandPool;
+PFN_vkGetDeviceQueue2 vkGetDeviceQueue2;
+PFN_vkCreateSamplerYcbcrConversion vkCreateSamplerYcbcrConversion;
+PFN_vkDestroySamplerYcbcrConversion vkDestroySamplerYcbcrConversion;
+PFN_vkCreateDescriptorUpdateTemplate vkCreateDescriptorUpdateTemplate;
+PFN_vkDestroyDescriptorUpdateTemplate vkDestroyDescriptorUpdateTemplate;
+PFN_vkUpdateDescriptorSetWithTemplate vkUpdateDescriptorSetWithTemplate;
+PFN_vkGetPhysicalDeviceExternalBufferProperties vkGetPhysicalDeviceExternalBufferProperties;
+PFN_vkGetPhysicalDeviceExternalFenceProperties vkGetPhysicalDeviceExternalFenceProperties;
+PFN_vkGetPhysicalDeviceExternalSemaphoreProperties vkGetPhysicalDeviceExternalSemaphoreProperties;
+PFN_vkGetDescriptorSetLayoutSupport vkGetDescriptorSetLayoutSupport;
+PFN_vkDestroySurfaceKHR vkDestroySurfaceKHR;
+PFN_vkGetPhysicalDeviceSurfaceSupportKHR vkGetPhysicalDeviceSurfaceSupportKHR;
+PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR vkGetPhysicalDeviceSurfaceCapabilitiesKHR;
+PFN_vkGetPhysicalDeviceSurfaceFormatsKHR vkGetPhysicalDeviceSurfaceFormatsKHR;
+PFN_vkGetPhysicalDeviceSurfacePresentModesKHR vkGetPhysicalDeviceSurfacePresentModesKHR;
+PFN_vkCreateSwapchainKHR vkCreateSwapchainKHR;
+PFN_vkDestroySwapchainKHR vkDestroySwapchainKHR;
+PFN_vkGetSwapchainImagesKHR vkGetSwapchainImagesKHR;
+PFN_vkAcquireNextImageKHR vkAcquireNextImageKHR;
+PFN_vkQueuePresentKHR vkQueuePresentKHR;
+PFN_vkGetDeviceGroupPresentCapabilitiesKHR vkGetDeviceGroupPresentCapabilitiesKHR;
+PFN_vkGetDeviceGroupSurfacePresentModesKHR vkGetDeviceGroupSurfacePresentModesKHR;
+PFN_vkGetPhysicalDevicePresentRectanglesKHR vkGetPhysicalDevicePresentRectanglesKHR;
+PFN_vkAcquireNextImage2KHR vkAcquireNextImage2KHR;
+PFN_vkGetPhysicalDeviceDisplayPropertiesKHR vkGetPhysicalDeviceDisplayPropertiesKHR;
+PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR vkGetPhysicalDeviceDisplayPlanePropertiesKHR;
+PFN_vkGetDisplayPlaneSupportedDisplaysKHR vkGetDisplayPlaneSupportedDisplaysKHR;
+PFN_vkGetDisplayModePropertiesKHR vkGetDisplayModePropertiesKHR;
+PFN_vkCreateDisplayModeKHR vkCreateDisplayModeKHR;
+PFN_vkGetDisplayPlaneCapabilitiesKHR vkGetDisplayPlaneCapabilitiesKHR;
+PFN_vkCreateDisplayPlaneSurfaceKHR vkCreateDisplayPlaneSurfaceKHR;
+PFN_vkCreateSharedSwapchainsKHR vkCreateSharedSwapchainsKHR;
+PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR;
+PFN_vkGetPhysicalDeviceProperties2KHR vkGetPhysicalDeviceProperties2KHR;
+PFN_vkGetPhysicalDeviceFormatProperties2KHR vkGetPhysicalDeviceFormatProperties2KHR;
+PFN_vkGetPhysicalDeviceImageFormatProperties2KHR vkGetPhysicalDeviceImageFormatProperties2KHR;
+PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR vkGetPhysicalDeviceQueueFamilyProperties2KHR;
+PFN_vkGetPhysicalDeviceMemoryProperties2KHR vkGetPhysicalDeviceMemoryProperties2KHR;
+PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR vkGetPhysicalDeviceSparseImageFormatProperties2KHR;
+PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR vkGetDeviceGroupPeerMemoryFeaturesKHR;
+PFN_vkCmdSetDeviceMaskKHR vkCmdSetDeviceMaskKHR;
+PFN_vkCmdDispatchBaseKHR vkCmdDispatchBaseKHR;
+PFN_vkTrimCommandPoolKHR vkTrimCommandPoolKHR;
+PFN_vkEnumeratePhysicalDeviceGroupsKHR vkEnumeratePhysicalDeviceGroupsKHR;
+PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR vkGetPhysicalDeviceExternalBufferPropertiesKHR;
+PFN_vkGetMemoryFdKHR vkGetMemoryFdKHR;
+PFN_vkGetMemoryFdPropertiesKHR vkGetMemoryFdPropertiesKHR;
+PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR vkGetPhysicalDeviceExternalSemaphorePropertiesKHR;
+PFN_vkImportSemaphoreFdKHR vkImportSemaphoreFdKHR;
+PFN_vkGetSemaphoreFdKHR vkGetSemaphoreFdKHR;
+PFN_vkCmdPushDescriptorSetKHR vkCmdPushDescriptorSetKHR;
+PFN_vkCmdPushDescriptorSetWithTemplateKHR vkCmdPushDescriptorSetWithTemplateKHR;
+PFN_vkCreateDescriptorUpdateTemplateKHR vkCreateDescriptorUpdateTemplateKHR;
+PFN_vkDestroyDescriptorUpdateTemplateKHR vkDestroyDescriptorUpdateTemplateKHR;
+PFN_vkUpdateDescriptorSetWithTemplateKHR vkUpdateDescriptorSetWithTemplateKHR;
+PFN_vkCreateRenderPass2KHR vkCreateRenderPass2KHR;
+PFN_vkCmdBeginRenderPass2KHR vkCmdBeginRenderPass2KHR;
+PFN_vkCmdNextSubpass2KHR vkCmdNextSubpass2KHR;
+PFN_vkCmdEndRenderPass2KHR vkCmdEndRenderPass2KHR;
+PFN_vkGetSwapchainStatusKHR vkGetSwapchainStatusKHR;
+PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR vkGetPhysicalDeviceExternalFencePropertiesKHR;
+PFN_vkImportFenceFdKHR vkImportFenceFdKHR;
+PFN_vkGetFenceFdKHR vkGetFenceFdKHR;
+PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR vkGetPhysicalDeviceSurfaceCapabilities2KHR;
+PFN_vkGetPhysicalDeviceSurfaceFormats2KHR vkGetPhysicalDeviceSurfaceFormats2KHR;
+PFN_vkGetPhysicalDeviceDisplayProperties2KHR vkGetPhysicalDeviceDisplayProperties2KHR;
+PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR vkGetPhysicalDeviceDisplayPlaneProperties2KHR;
+PFN_vkGetDisplayModeProperties2KHR vkGetDisplayModeProperties2KHR;
+PFN_vkGetDisplayPlaneCapabilities2KHR vkGetDisplayPlaneCapabilities2KHR;
+PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR;
+PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR;
+PFN_vkGetImageSparseMemoryRequirements2KHR vkGetImageSparseMemoryRequirements2KHR;
+PFN_vkCreateSamplerYcbcrConversionKHR vkCreateSamplerYcbcrConversionKHR;
+PFN_vkDestroySamplerYcbcrConversionKHR vkDestroySamplerYcbcrConversionKHR;
+PFN_vkBindBufferMemory2KHR vkBindBufferMemory2KHR;
+PFN_vkBindImageMemory2KHR vkBindImageMemory2KHR;
+PFN_vkGetDescriptorSetLayoutSupportKHR vkGetDescriptorSetLayoutSupportKHR;
+PFN_vkCmdDrawIndirectCountKHR vkCmdDrawIndirectCountKHR;
+PFN_vkCmdDrawIndexedIndirectCountKHR vkCmdDrawIndexedIndirectCountKHR;
+PFN_vkCreateDebugReportCallbackEXT vkCreateDebugReportCallbackEXT;
+PFN_vkDestroyDebugReportCallbackEXT vkDestroyDebugReportCallbackEXT;
+PFN_vkDebugReportMessageEXT vkDebugReportMessageEXT;
+PFN_vkDebugMarkerSetObjectTagEXT vkDebugMarkerSetObjectTagEXT;
+PFN_vkDebugMarkerSetObjectNameEXT vkDebugMarkerSetObjectNameEXT;
+PFN_vkCmdDebugMarkerBeginEXT vkCmdDebugMarkerBeginEXT;
+PFN_vkCmdDebugMarkerEndEXT vkCmdDebugMarkerEndEXT;
+PFN_vkCmdDebugMarkerInsertEXT vkCmdDebugMarkerInsertEXT;
+PFN_vkCmdBindTransformFeedbackBuffersEXT vkCmdBindTransformFeedbackBuffersEXT;
+PFN_vkCmdBeginTransformFeedbackEXT vkCmdBeginTransformFeedbackEXT;
+PFN_vkCmdEndTransformFeedbackEXT vkCmdEndTransformFeedbackEXT;
+PFN_vkCmdBeginQueryIndexedEXT vkCmdBeginQueryIndexedEXT;
+PFN_vkCmdEndQueryIndexedEXT vkCmdEndQueryIndexedEXT;
+PFN_vkCmdDrawIndirectByteCountEXT vkCmdDrawIndirectByteCountEXT;
+PFN_vkCmdDrawIndirectCountAMD vkCmdDrawIndirectCountAMD;
+PFN_vkCmdDrawIndexedIndirectCountAMD vkCmdDrawIndexedIndirectCountAMD;
+PFN_vkGetShaderInfoAMD vkGetShaderInfoAMD;
+PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV vkGetPhysicalDeviceExternalImageFormatPropertiesNV;
+PFN_vkCmdBeginConditionalRenderingEXT vkCmdBeginConditionalRenderingEXT;
+PFN_vkCmdEndConditionalRenderingEXT vkCmdEndConditionalRenderingEXT;
+PFN_vkCmdProcessCommandsNVX vkCmdProcessCommandsNVX;
+PFN_vkCmdReserveSpaceForCommandsNVX vkCmdReserveSpaceForCommandsNVX;
+PFN_vkCreateIndirectCommandsLayoutNVX vkCreateIndirectCommandsLayoutNVX;
+PFN_vkDestroyIndirectCommandsLayoutNVX vkDestroyIndirectCommandsLayoutNVX;
+PFN_vkCreateObjectTableNVX vkCreateObjectTableNVX;
+PFN_vkDestroyObjectTableNVX vkDestroyObjectTableNVX;
+PFN_vkRegisterObjectsNVX vkRegisterObjectsNVX;
+PFN_vkUnregisterObjectsNVX vkUnregisterObjectsNVX;
+PFN_vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX;
+PFN_vkCmdSetViewportWScalingNV vkCmdSetViewportWScalingNV;
+PFN_vkReleaseDisplayEXT vkReleaseDisplayEXT;
+PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT vkGetPhysicalDeviceSurfaceCapabilities2EXT;
+PFN_vkDisplayPowerControlEXT vkDisplayPowerControlEXT;
+PFN_vkRegisterDeviceEventEXT vkRegisterDeviceEventEXT;
+PFN_vkRegisterDisplayEventEXT vkRegisterDisplayEventEXT;
+PFN_vkGetSwapchainCounterEXT vkGetSwapchainCounterEXT;
+PFN_vkGetRefreshCycleDurationGOOGLE vkGetRefreshCycleDurationGOOGLE;
+PFN_vkGetPastPresentationTimingGOOGLE vkGetPastPresentationTimingGOOGLE;
+PFN_vkCmdSetDiscardRectangleEXT vkCmdSetDiscardRectangleEXT;
+PFN_vkSetHdrMetadataEXT vkSetHdrMetadataEXT;
+PFN_vkSetDebugUtilsObjectNameEXT vkSetDebugUtilsObjectNameEXT;
+PFN_vkSetDebugUtilsObjectTagEXT vkSetDebugUtilsObjectTagEXT;
+PFN_vkQueueBeginDebugUtilsLabelEXT vkQueueBeginDebugUtilsLabelEXT;
+PFN_vkQueueEndDebugUtilsLabelEXT vkQueueEndDebugUtilsLabelEXT;
+PFN_vkQueueInsertDebugUtilsLabelEXT vkQueueInsertDebugUtilsLabelEXT;
+PFN_vkCmdBeginDebugUtilsLabelEXT vkCmdBeginDebugUtilsLabelEXT;
+PFN_vkCmdEndDebugUtilsLabelEXT vkCmdEndDebugUtilsLabelEXT;
+PFN_vkCmdInsertDebugUtilsLabelEXT vkCmdInsertDebugUtilsLabelEXT;
+PFN_vkCreateDebugUtilsMessengerEXT vkCreateDebugUtilsMessengerEXT;
+PFN_vkDestroyDebugUtilsMessengerEXT vkDestroyDebugUtilsMessengerEXT;
+PFN_vkSubmitDebugUtilsMessageEXT vkSubmitDebugUtilsMessageEXT;
+PFN_vkCmdSetSampleLocationsEXT vkCmdSetSampleLocationsEXT;
+PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT vkGetPhysicalDeviceMultisamplePropertiesEXT;
+PFN_vkGetImageDrmFormatModifierPropertiesEXT vkGetImageDrmFormatModifierPropertiesEXT;
+PFN_vkCreateValidationCacheEXT vkCreateValidationCacheEXT;
+PFN_vkDestroyValidationCacheEXT vkDestroyValidationCacheEXT;
+PFN_vkMergeValidationCachesEXT vkMergeValidationCachesEXT;
+PFN_vkGetValidationCacheDataEXT vkGetValidationCacheDataEXT;
+PFN_vkCmdBindShadingRateImageNV vkCmdBindShadingRateImageNV;
+PFN_vkCmdSetViewportShadingRatePaletteNV vkCmdSetViewportShadingRatePaletteNV;
+PFN_vkCmdSetCoarseSampleOrderNV vkCmdSetCoarseSampleOrderNV;
+PFN_vkCreateAccelerationStructureNV vkCreateAccelerationStructureNV;
+PFN_vkDestroyAccelerationStructureNV vkDestroyAccelerationStructureNV;
+PFN_vkGetAccelerationStructureMemoryRequirementsNV vkGetAccelerationStructureMemoryRequirementsNV;
+PFN_vkBindAccelerationStructureMemoryNV vkBindAccelerationStructureMemoryNV;
+PFN_vkCmdBuildAccelerationStructureNV vkCmdBuildAccelerationStructureNV;
+PFN_vkCmdCopyAccelerationStructureNV vkCmdCopyAccelerationStructureNV;
+PFN_vkCmdTraceRaysNV vkCmdTraceRaysNV;
+PFN_vkCreateRayTracingPipelinesNV vkCreateRayTracingPipelinesNV;
+PFN_vkGetRayTracingShaderGroupHandlesNV vkGetRayTracingShaderGroupHandlesNV;
+PFN_vkGetAccelerationStructureHandleNV vkGetAccelerationStructureHandleNV;
+PFN_vkCmdWriteAccelerationStructuresPropertiesNV vkCmdWriteAccelerationStructuresPropertiesNV;
+PFN_vkCompileDeferredNV vkCompileDeferredNV;
+PFN_vkGetMemoryHostPointerPropertiesEXT vkGetMemoryHostPointerPropertiesEXT;
+PFN_vkCmdWriteBufferMarkerAMD vkCmdWriteBufferMarkerAMD;
+PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT vkGetPhysicalDeviceCalibrateableTimeDomainsEXT;
+PFN_vkGetCalibratedTimestampsEXT vkGetCalibratedTimestampsEXT;
+PFN_vkCmdDrawMeshTasksNV vkCmdDrawMeshTasksNV;
+PFN_vkCmdDrawMeshTasksIndirectNV vkCmdDrawMeshTasksIndirectNV;
+PFN_vkCmdDrawMeshTasksIndirectCountNV vkCmdDrawMeshTasksIndirectCountNV;
+PFN_vkCmdSetExclusiveScissorNV vkCmdSetExclusiveScissorNV;
+PFN_vkCmdSetCheckpointNV vkCmdSetCheckpointNV;
+PFN_vkGetQueueCheckpointDataNV vkGetQueueCheckpointDataNV;
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+PFN_vkCreateAndroidSurfaceKHR vkCreateAndroidSurfaceKHR;
+#endif
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+PFN_vkGetAndroidHardwareBufferPropertiesANDROID vkGetAndroidHardwareBufferPropertiesANDROID;
+PFN_vkGetMemoryAndroidHardwareBufferANDROID vkGetMemoryAndroidHardwareBufferANDROID;
+#endif
+
+#ifdef VK_USE_PLATFORM_FUCHSIA
+PFN_vkCreateImagePipeSurfaceFUCHSIA vkCreateImagePipeSurfaceFUCHSIA;
+#endif
+
+#ifdef VK_USE_PLATFORM_IOS_MVK
+PFN_vkCreateIOSSurfaceMVK vkCreateIOSSurfaceMVK;
+#endif
+
+#ifdef VK_USE_PLATFORM_MACOS_MVK
+PFN_vkCreateMacOSSurfaceMVK vkCreateMacOSSurfaceMVK;
+#endif
+
+#ifdef VK_USE_PLATFORM_VI_NN
+PFN_vkCreateViSurfaceNN vkCreateViSurfaceNN;
+#endif
+
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+PFN_vkCreateWaylandSurfaceKHR vkCreateWaylandSurfaceKHR;
+PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR vkGetPhysicalDeviceWaylandPresentationSupportKHR;
+#endif
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+PFN_vkCreateWin32SurfaceKHR vkCreateWin32SurfaceKHR;
+PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR vkGetPhysicalDeviceWin32PresentationSupportKHR;
+#endif
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+PFN_vkGetMemoryWin32HandleKHR vkGetMemoryWin32HandleKHR;
+PFN_vkGetMemoryWin32HandlePropertiesKHR vkGetMemoryWin32HandlePropertiesKHR;
+#endif
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+#endif
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+PFN_vkImportSemaphoreWin32HandleKHR vkImportSemaphoreWin32HandleKHR;
+PFN_vkGetSemaphoreWin32HandleKHR vkGetSemaphoreWin32HandleKHR;
+#endif
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+PFN_vkImportFenceWin32HandleKHR vkImportFenceWin32HandleKHR;
+PFN_vkGetFenceWin32HandleKHR vkGetFenceWin32HandleKHR;
+#endif
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+PFN_vkGetMemoryWin32HandleNV vkGetMemoryWin32HandleNV;
+#endif
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+#endif
+
+#ifdef VK_USE_PLATFORM_XCB_KHR
+PFN_vkCreateXcbSurfaceKHR vkCreateXcbSurfaceKHR;
+PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR vkGetPhysicalDeviceXcbPresentationSupportKHR;
+#endif
+
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+PFN_vkCreateXlibSurfaceKHR vkCreateXlibSurfaceKHR;
+PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR vkGetPhysicalDeviceXlibPresentationSupportKHR;
+#endif
+
+#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
+PFN_vkAcquireXlibDisplayEXT vkAcquireXlibDisplayEXT;
+PFN_vkGetRandROutputDisplayEXT vkGetRandROutputDisplayEXT;
+#endif
+
+#ifdef __cplusplus
+}
+#endif
diff --git a/src/third_party/vulkan-tools/src/common/vulkan_wrapper.h b/src/third_party/vulkan-tools/src/common/vulkan_wrapper.h
new file mode 100644
index 0000000..957e698
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/common/vulkan_wrapper.h
@@ -0,0 +1,411 @@
+/*
+ * Copyright 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+// This file is generated.
+#ifndef VULKAN_WRAPPER_H
+#define VULKAN_WRAPPER_H
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#define VK_NO_PROTOTYPES 1
+#include <vulkan/vulkan.h>
+
+/* Initialize the Vulkan function pointer variables declared in this header.
+ * Returns 0 if vulkan is not available, non-zero if it is available.
+ */
+int InitVulkan(void);
+
+// VK_core_0
+extern PFN_vkCreateInstance vkCreateInstance;
+extern PFN_vkDestroyInstance vkDestroyInstance;
+extern PFN_vkEnumeratePhysicalDevices vkEnumeratePhysicalDevices;
+extern PFN_vkGetPhysicalDeviceFeatures vkGetPhysicalDeviceFeatures;
+extern PFN_vkGetPhysicalDeviceFormatProperties vkGetPhysicalDeviceFormatProperties;
+extern PFN_vkGetPhysicalDeviceImageFormatProperties vkGetPhysicalDeviceImageFormatProperties;
+extern PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties;
+extern PFN_vkGetPhysicalDeviceQueueFamilyProperties vkGetPhysicalDeviceQueueFamilyProperties;
+extern PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties;
+extern PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr;
+extern PFN_vkGetDeviceProcAddr vkGetDeviceProcAddr;
+extern PFN_vkCreateDevice vkCreateDevice;
+extern PFN_vkDestroyDevice vkDestroyDevice;
+extern PFN_vkEnumerateInstanceExtensionProperties vkEnumerateInstanceExtensionProperties;
+extern PFN_vkEnumerateDeviceExtensionProperties vkEnumerateDeviceExtensionProperties;
+extern PFN_vkEnumerateInstanceLayerProperties vkEnumerateInstanceLayerProperties;
+extern PFN_vkEnumerateDeviceLayerProperties vkEnumerateDeviceLayerProperties;
+extern PFN_vkGetDeviceQueue vkGetDeviceQueue;
+extern PFN_vkQueueSubmit vkQueueSubmit;
+extern PFN_vkQueueWaitIdle vkQueueWaitIdle;
+extern PFN_vkDeviceWaitIdle vkDeviceWaitIdle;
+extern PFN_vkAllocateMemory vkAllocateMemory;
+extern PFN_vkFreeMemory vkFreeMemory;
+extern PFN_vkMapMemory vkMapMemory;
+extern PFN_vkUnmapMemory vkUnmapMemory;
+extern PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges;
+extern PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges;
+extern PFN_vkGetDeviceMemoryCommitment vkGetDeviceMemoryCommitment;
+extern PFN_vkBindBufferMemory vkBindBufferMemory;
+extern PFN_vkBindImageMemory vkBindImageMemory;
+extern PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements;
+extern PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements;
+extern PFN_vkGetImageSparseMemoryRequirements vkGetImageSparseMemoryRequirements;
+extern PFN_vkGetPhysicalDeviceSparseImageFormatProperties vkGetPhysicalDeviceSparseImageFormatProperties;
+extern PFN_vkQueueBindSparse vkQueueBindSparse;
+extern PFN_vkCreateFence vkCreateFence;
+extern PFN_vkDestroyFence vkDestroyFence;
+extern PFN_vkResetFences vkResetFences;
+extern PFN_vkGetFenceStatus vkGetFenceStatus;
+extern PFN_vkWaitForFences vkWaitForFences;
+extern PFN_vkCreateSemaphore vkCreateSemaphore;
+extern PFN_vkDestroySemaphore vkDestroySemaphore;
+extern PFN_vkCreateEvent vkCreateEvent;
+extern PFN_vkDestroyEvent vkDestroyEvent;
+extern PFN_vkGetEventStatus vkGetEventStatus;
+extern PFN_vkSetEvent vkSetEvent;
+extern PFN_vkResetEvent vkResetEvent;
+extern PFN_vkCreateQueryPool vkCreateQueryPool;
+extern PFN_vkDestroyQueryPool vkDestroyQueryPool;
+extern PFN_vkGetQueryPoolResults vkGetQueryPoolResults;
+extern PFN_vkCreateBuffer vkCreateBuffer;
+extern PFN_vkDestroyBuffer vkDestroyBuffer;
+extern PFN_vkCreateBufferView vkCreateBufferView;
+extern PFN_vkDestroyBufferView vkDestroyBufferView;
+extern PFN_vkCreateImage vkCreateImage;
+extern PFN_vkDestroyImage vkDestroyImage;
+extern PFN_vkGetImageSubresourceLayout vkGetImageSubresourceLayout;
+extern PFN_vkCreateImageView vkCreateImageView;
+extern PFN_vkDestroyImageView vkDestroyImageView;
+extern PFN_vkCreateShaderModule vkCreateShaderModule;
+extern PFN_vkDestroyShaderModule vkDestroyShaderModule;
+extern PFN_vkCreatePipelineCache vkCreatePipelineCache;
+extern PFN_vkDestroyPipelineCache vkDestroyPipelineCache;
+extern PFN_vkGetPipelineCacheData vkGetPipelineCacheData;
+extern PFN_vkMergePipelineCaches vkMergePipelineCaches;
+extern PFN_vkCreateGraphicsPipelines vkCreateGraphicsPipelines;
+extern PFN_vkCreateComputePipelines vkCreateComputePipelines;
+extern PFN_vkDestroyPipeline vkDestroyPipeline;
+extern PFN_vkCreatePipelineLayout vkCreatePipelineLayout;
+extern PFN_vkDestroyPipelineLayout vkDestroyPipelineLayout;
+extern PFN_vkCreateSampler vkCreateSampler;
+extern PFN_vkDestroySampler vkDestroySampler;
+extern PFN_vkCreateDescriptorSetLayout vkCreateDescriptorSetLayout;
+extern PFN_vkDestroyDescriptorSetLayout vkDestroyDescriptorSetLayout;
+extern PFN_vkCreateDescriptorPool vkCreateDescriptorPool;
+extern PFN_vkDestroyDescriptorPool vkDestroyDescriptorPool;
+extern PFN_vkResetDescriptorPool vkResetDescriptorPool;
+extern PFN_vkAllocateDescriptorSets vkAllocateDescriptorSets;
+extern PFN_vkFreeDescriptorSets vkFreeDescriptorSets;
+extern PFN_vkUpdateDescriptorSets vkUpdateDescriptorSets;
+extern PFN_vkCreateFramebuffer vkCreateFramebuffer;
+extern PFN_vkDestroyFramebuffer vkDestroyFramebuffer;
+extern PFN_vkCreateRenderPass vkCreateRenderPass;
+extern PFN_vkDestroyRenderPass vkDestroyRenderPass;
+extern PFN_vkGetRenderAreaGranularity vkGetRenderAreaGranularity;
+extern PFN_vkCreateCommandPool vkCreateCommandPool;
+extern PFN_vkDestroyCommandPool vkDestroyCommandPool;
+extern PFN_vkResetCommandPool vkResetCommandPool;
+extern PFN_vkAllocateCommandBuffers vkAllocateCommandBuffers;
+extern PFN_vkFreeCommandBuffers vkFreeCommandBuffers;
+extern PFN_vkBeginCommandBuffer vkBeginCommandBuffer;
+extern PFN_vkEndCommandBuffer vkEndCommandBuffer;
+extern PFN_vkResetCommandBuffer vkResetCommandBuffer;
+extern PFN_vkCmdBindPipeline vkCmdBindPipeline;
+extern PFN_vkCmdSetViewport vkCmdSetViewport;
+extern PFN_vkCmdSetScissor vkCmdSetScissor;
+extern PFN_vkCmdSetLineWidth vkCmdSetLineWidth;
+extern PFN_vkCmdSetDepthBias vkCmdSetDepthBias;
+extern PFN_vkCmdSetBlendConstants vkCmdSetBlendConstants;
+extern PFN_vkCmdSetDepthBounds vkCmdSetDepthBounds;
+extern PFN_vkCmdSetStencilCompareMask vkCmdSetStencilCompareMask;
+extern PFN_vkCmdSetStencilWriteMask vkCmdSetStencilWriteMask;
+extern PFN_vkCmdSetStencilReference vkCmdSetStencilReference;
+extern PFN_vkCmdBindDescriptorSets vkCmdBindDescriptorSets;
+extern PFN_vkCmdBindIndexBuffer vkCmdBindIndexBuffer;
+extern PFN_vkCmdBindVertexBuffers vkCmdBindVertexBuffers;
+extern PFN_vkCmdDraw vkCmdDraw;
+extern PFN_vkCmdDrawIndexed vkCmdDrawIndexed;
+extern PFN_vkCmdDrawIndirect vkCmdDrawIndirect;
+extern PFN_vkCmdDrawIndexedIndirect vkCmdDrawIndexedIndirect;
+extern PFN_vkCmdDispatch vkCmdDispatch;
+extern PFN_vkCmdDispatchIndirect vkCmdDispatchIndirect;
+extern PFN_vkCmdCopyBuffer vkCmdCopyBuffer;
+extern PFN_vkCmdCopyImage vkCmdCopyImage;
+extern PFN_vkCmdBlitImage vkCmdBlitImage;
+extern PFN_vkCmdCopyBufferToImage vkCmdCopyBufferToImage;
+extern PFN_vkCmdCopyImageToBuffer vkCmdCopyImageToBuffer;
+extern PFN_vkCmdUpdateBuffer vkCmdUpdateBuffer;
+extern PFN_vkCmdFillBuffer vkCmdFillBuffer;
+extern PFN_vkCmdClearColorImage vkCmdClearColorImage;
+extern PFN_vkCmdClearDepthStencilImage vkCmdClearDepthStencilImage;
+extern PFN_vkCmdClearAttachments vkCmdClearAttachments;
+extern PFN_vkCmdResolveImage vkCmdResolveImage;
+extern PFN_vkCmdSetEvent vkCmdSetEvent;
+extern PFN_vkCmdResetEvent vkCmdResetEvent;
+extern PFN_vkCmdWaitEvents vkCmdWaitEvents;
+extern PFN_vkCmdPipelineBarrier vkCmdPipelineBarrier;
+extern PFN_vkCmdBeginQuery vkCmdBeginQuery;
+extern PFN_vkCmdEndQuery vkCmdEndQuery;
+extern PFN_vkCmdResetQueryPool vkCmdResetQueryPool;
+extern PFN_vkCmdWriteTimestamp vkCmdWriteTimestamp;
+extern PFN_vkCmdCopyQueryPoolResults vkCmdCopyQueryPoolResults;
+extern PFN_vkCmdPushConstants vkCmdPushConstants;
+extern PFN_vkCmdBeginRenderPass vkCmdBeginRenderPass;
+extern PFN_vkCmdNextSubpass vkCmdNextSubpass;
+extern PFN_vkCmdEndRenderPass vkCmdEndRenderPass;
+extern PFN_vkCmdExecuteCommands vkCmdExecuteCommands;
+
+// VK_core_1
+extern PFN_vkEnumerateInstanceVersion vkEnumerateInstanceVersion;
+extern PFN_vkBindBufferMemory2 vkBindBufferMemory2;
+extern PFN_vkBindImageMemory2 vkBindImageMemory2;
+extern PFN_vkGetDeviceGroupPeerMemoryFeatures vkGetDeviceGroupPeerMemoryFeatures;
+extern PFN_vkCmdSetDeviceMask vkCmdSetDeviceMask;
+extern PFN_vkCmdDispatchBase vkCmdDispatchBase;
+extern PFN_vkEnumeratePhysicalDeviceGroups vkEnumeratePhysicalDeviceGroups;
+extern PFN_vkGetImageMemoryRequirements2 vkGetImageMemoryRequirements2;
+extern PFN_vkGetBufferMemoryRequirements2 vkGetBufferMemoryRequirements2;
+extern PFN_vkGetImageSparseMemoryRequirements2 vkGetImageSparseMemoryRequirements2;
+extern PFN_vkGetPhysicalDeviceFeatures2 vkGetPhysicalDeviceFeatures2;
+extern PFN_vkGetPhysicalDeviceProperties2 vkGetPhysicalDeviceProperties2;
+extern PFN_vkGetPhysicalDeviceFormatProperties2 vkGetPhysicalDeviceFormatProperties2;
+extern PFN_vkGetPhysicalDeviceImageFormatProperties2 vkGetPhysicalDeviceImageFormatProperties2;
+extern PFN_vkGetPhysicalDeviceQueueFamilyProperties2 vkGetPhysicalDeviceQueueFamilyProperties2;
+extern PFN_vkGetPhysicalDeviceMemoryProperties2 vkGetPhysicalDeviceMemoryProperties2;
+extern PFN_vkGetPhysicalDeviceSparseImageFormatProperties2 vkGetPhysicalDeviceSparseImageFormatProperties2;
+extern PFN_vkTrimCommandPool vkTrimCommandPool;
+extern PFN_vkGetDeviceQueue2 vkGetDeviceQueue2;
+extern PFN_vkCreateSamplerYcbcrConversion vkCreateSamplerYcbcrConversion;
+extern PFN_vkDestroySamplerYcbcrConversion vkDestroySamplerYcbcrConversion;
+extern PFN_vkCreateDescriptorUpdateTemplate vkCreateDescriptorUpdateTemplate;
+extern PFN_vkDestroyDescriptorUpdateTemplate vkDestroyDescriptorUpdateTemplate;
+extern PFN_vkUpdateDescriptorSetWithTemplate vkUpdateDescriptorSetWithTemplate;
+extern PFN_vkGetPhysicalDeviceExternalBufferProperties vkGetPhysicalDeviceExternalBufferProperties;
+extern PFN_vkGetPhysicalDeviceExternalFenceProperties vkGetPhysicalDeviceExternalFenceProperties;
+extern PFN_vkGetPhysicalDeviceExternalSemaphoreProperties vkGetPhysicalDeviceExternalSemaphoreProperties;
+extern PFN_vkGetDescriptorSetLayoutSupport vkGetDescriptorSetLayoutSupport;
+
+// VK_KHR_surface
+extern PFN_vkDestroySurfaceKHR vkDestroySurfaceKHR;
+extern PFN_vkGetPhysicalDeviceSurfaceSupportKHR vkGetPhysicalDeviceSurfaceSupportKHR;
+extern PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR vkGetPhysicalDeviceSurfaceCapabilitiesKHR;
+extern PFN_vkGetPhysicalDeviceSurfaceFormatsKHR vkGetPhysicalDeviceSurfaceFormatsKHR;
+extern PFN_vkGetPhysicalDeviceSurfacePresentModesKHR vkGetPhysicalDeviceSurfacePresentModesKHR;
+
+// VK_KHR_swapchain
+extern PFN_vkCreateSwapchainKHR vkCreateSwapchainKHR;
+extern PFN_vkDestroySwapchainKHR vkDestroySwapchainKHR;
+extern PFN_vkGetSwapchainImagesKHR vkGetSwapchainImagesKHR;
+extern PFN_vkAcquireNextImageKHR vkAcquireNextImageKHR;
+extern PFN_vkQueuePresentKHR vkQueuePresentKHR;
+extern PFN_vkGetDeviceGroupPresentCapabilitiesKHR vkGetDeviceGroupPresentCapabilitiesKHR;
+extern PFN_vkGetDeviceGroupSurfacePresentModesKHR vkGetDeviceGroupSurfacePresentModesKHR;
+extern PFN_vkGetPhysicalDevicePresentRectanglesKHR vkGetPhysicalDevicePresentRectanglesKHR;
+extern PFN_vkAcquireNextImage2KHR vkAcquireNextImage2KHR;
+
+// VK_KHR_display
+extern PFN_vkGetPhysicalDeviceDisplayPropertiesKHR vkGetPhysicalDeviceDisplayPropertiesKHR;
+extern PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR vkGetPhysicalDeviceDisplayPlanePropertiesKHR;
+extern PFN_vkGetDisplayPlaneSupportedDisplaysKHR vkGetDisplayPlaneSupportedDisplaysKHR;
+extern PFN_vkGetDisplayModePropertiesKHR vkGetDisplayModePropertiesKHR;
+extern PFN_vkCreateDisplayModeKHR vkCreateDisplayModeKHR;
+extern PFN_vkGetDisplayPlaneCapabilitiesKHR vkGetDisplayPlaneCapabilitiesKHR;
+extern PFN_vkCreateDisplayPlaneSurfaceKHR vkCreateDisplayPlaneSurfaceKHR;
+
+// VK_KHR_display_swapchain
+extern PFN_vkCreateSharedSwapchainsKHR vkCreateSharedSwapchainsKHR;
+
+// VK_KHR_sampler_mirror_clamp_to_edge
+
+// VK_KHR_multiview
+
+// VK_KHR_get_physical_device_properties2
+extern PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR;
+extern PFN_vkGetPhysicalDeviceProperties2KHR vkGetPhysicalDeviceProperties2KHR;
+extern PFN_vkGetPhysicalDeviceFormatProperties2KHR vkGetPhysicalDeviceFormatProperties2KHR;
+extern PFN_vkGetPhysicalDeviceImageFormatProperties2KHR vkGetPhysicalDeviceImageFormatProperties2KHR;
+extern PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR vkGetPhysicalDeviceQueueFamilyProperties2KHR;
+extern PFN_vkGetPhysicalDeviceMemoryProperties2KHR vkGetPhysicalDeviceMemoryProperties2KHR;
+extern PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR vkGetPhysicalDeviceSparseImageFormatProperties2KHR;
+
+// VK_KHR_device_group
+extern PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR vkGetDeviceGroupPeerMemoryFeaturesKHR;
+extern PFN_vkCmdSetDeviceMaskKHR vkCmdSetDeviceMaskKHR;
+extern PFN_vkCmdDispatchBaseKHR vkCmdDispatchBaseKHR;
+
+// VK_KHR_shader_draw_parameters
+
+// VK_KHR_maintenance1
+extern PFN_vkTrimCommandPoolKHR vkTrimCommandPoolKHR;
+
+// VK_KHR_device_group_creation
+extern PFN_vkEnumeratePhysicalDeviceGroupsKHR vkEnumeratePhysicalDeviceGroupsKHR;
+
+// VK_KHR_external_memory_capabilities
+extern PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR vkGetPhysicalDeviceExternalBufferPropertiesKHR;
+
+// VK_KHR_external_memory
+
+// VK_KHR_external_memory_fd
+extern PFN_vkGetMemoryFdKHR vkGetMemoryFdKHR;
+extern PFN_vkGetMemoryFdPropertiesKHR vkGetMemoryFdPropertiesKHR;
+
+// VK_KHR_external_semaphore_capabilities
+extern PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR vkGetPhysicalDeviceExternalSemaphorePropertiesKHR;
+
+// VK_KHR_external_semaphore
+
+// VK_KHR_external_semaphore_fd
+extern PFN_vkImportSemaphoreFdKHR vkImportSemaphoreFdKHR;
+extern PFN_vkGetSemaphoreFdKHR vkGetSemaphoreFdKHR;
+
+// VK_KHR_push_descriptor
+extern PFN_vkCmdPushDescriptorSetKHR vkCmdPushDescriptorSetKHR;
+extern PFN_vkCmdPushDescriptorSetWithTemplateKHR vkCmdPushDescriptorSetWithTemplateKHR;
+
+// VK_KHR_16bit_storage
+
+// VK_KHR_incremental_present
+
+// VK_KHR_descriptor_update_template
+extern PFN_vkCreateDescriptorUpdateTemplateKHR vkCreateDescriptorUpdateTemplateKHR;
+extern PFN_vkDestroyDescriptorUpdateTemplateKHR vkDestroyDescriptorUpdateTemplateKHR;
+extern PFN_vkUpdateDescriptorSetWithTemplateKHR vkUpdateDescriptorSetWithTemplateKHR;
+
+// VK_KHR_create_renderpass2
+extern PFN_vkCreateRenderPass2KHR vkCreateRenderPass2KHR;
+extern PFN_vkCmdBeginRenderPass2KHR vkCmdBeginRenderPass2KHR;
+extern PFN_vkCmdNextSubpass2KHR vkCmdNextSubpass2KHR;
+extern PFN_vkCmdEndRenderPass2KHR vkCmdEndRenderPass2KHR;
+
+// VK_KHR_shared_presentable_image
+extern PFN_vkGetSwapchainStatusKHR vkGetSwapchainStatusKHR;
+
+// VK_KHR_external_fence_capabilities
+extern PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR vkGetPhysicalDeviceExternalFencePropertiesKHR;
+
+// VK_KHR_external_fence
+
+// VK_KHR_external_fence_fd
+extern PFN_vkImportFenceFdKHR vkImportFenceFdKHR;
+extern PFN_vkGetFenceFdKHR vkGetFenceFdKHR;
+
+// VK_KHR_maintenance2
+
+// VK_KHR_get_surface_capabilities2
+extern PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR vkGetPhysicalDeviceSurfaceCapabilities2KHR;
+extern PFN_vkGetPhysicalDeviceSurfaceFormats2KHR vkGetPhysicalDeviceSurfaceFormats2KHR;
+
+// VK_KHR_variable_pointers
+
+// VK_KHR_get_display_properties2
+extern PFN_vkGetPhysicalDeviceDisplayProperties2KHR vkGetPhysicalDeviceDisplayProperties2KHR;
+extern PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR vkGetPhysicalDeviceDisplayPlaneProperties2KHR;
+extern PFN_vkGetDisplayModeProperties2KHR vkGetDisplayModeProperties2KHR;
+extern PFN_vkGetDisplayPlaneCapabilities2KHR vkGetDisplayPlaneCapabilities2KHR;
+
+// VK_KHR_dedicated_allocation
+
+// VK_KHR_storage_buffer_storage_class
+
+// VK_KHR_relaxed_block_layout
+
+// VK_KHR_get_memory_requirements2
+extern PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR;
+extern PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR;
+extern PFN_vkGetImageSparseMemoryRequirements2KHR vkGetImageSparseMemoryRequirements2KHR;
+
+// VK_KHR_image_format_list
+
+// VK_KHR_sampler_ycbcr_conversion
+extern PFN_vkCreateSamplerYcbcrConversionKHR vkCreateSamplerYcbcrConversionKHR;
+extern PFN_vkDestroySamplerYcbcrConversionKHR vkDestroySamplerYcbcrConversionKHR;
+
+// VK_KHR_bind_memory2
+extern PFN_vkBindBufferMemory2KHR vkBindBufferMemory2KHR;
+extern PFN_vkBindImageMemory2KHR vkBindImageMemory2KHR;
+
+// VK_KHR_maintenance3
+extern PFN_vkGetDescriptorSetLayoutSupportKHR vkGetDescriptorSetLayoutSupportKHR;
+
+// VK_KHR_draw_indirect_count
+extern PFN_vkCmdDrawIndirectCountKHR vkCmdDrawIndirectCountKHR;
+extern PFN_vkCmdDrawIndexedIndirectCountKHR vkCmdDrawIndexedIndirectCountKHR;
+
+// VK_KHR_8bit_storage
+
+// VK_KHR_shader_atomic_int64
+
+// VK_KHR_driver_properties
+
+// VK_KHR_vulkan_memory_model
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+// VK_KHR_android_surface
+extern PFN_vkCreateAndroidSurfaceKHR vkCreateAndroidSurfaceKHR;
+#endif
+
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+// VK_KHR_wayland_surface
+extern PFN_vkCreateWaylandSurfaceKHR vkCreateWaylandSurfaceKHR;
+extern PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR vkGetPhysicalDeviceWaylandPresentationSupportKHR;
+#endif
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+// VK_KHR_win32_surface
+extern PFN_vkCreateWin32SurfaceKHR vkCreateWin32SurfaceKHR;
+extern PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR vkGetPhysicalDeviceWin32PresentationSupportKHR;
+#endif
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+// VK_KHR_external_memory_win32
+extern PFN_vkGetMemoryWin32HandleKHR vkGetMemoryWin32HandleKHR;
+extern PFN_vkGetMemoryWin32HandlePropertiesKHR vkGetMemoryWin32HandlePropertiesKHR;
+#endif
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+// VK_KHR_win32_keyed_mutex
+#endif
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+// VK_KHR_external_semaphore_win32
+extern PFN_vkImportSemaphoreWin32HandleKHR vkImportSemaphoreWin32HandleKHR;
+extern PFN_vkGetSemaphoreWin32HandleKHR vkGetSemaphoreWin32HandleKHR;
+#endif
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+// VK_KHR_external_fence_win32
+extern PFN_vkImportFenceWin32HandleKHR vkImportFenceWin32HandleKHR;
+extern PFN_vkGetFenceWin32HandleKHR vkGetFenceWin32HandleKHR;
+#endif
+
+#ifdef VK_USE_PLATFORM_XCB_KHR
+// VK_KHR_xcb_surface
+extern PFN_vkCreateXcbSurfaceKHR vkCreateXcbSurfaceKHR;
+extern PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR vkGetPhysicalDeviceXcbPresentationSupportKHR;
+#endif
+
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+// VK_KHR_xlib_surface
+extern PFN_vkCreateXlibSurfaceKHR vkCreateXlibSurfaceKHR;
+extern PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR vkGetPhysicalDeviceXlibPresentationSupportKHR;
+#endif
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif  // VULKAN_WRAPPER_H
diff --git a/src/third_party/vulkan-tools/src/cube/CMakeLists.txt b/src/third_party/vulkan-tools/src/cube/CMakeLists.txt
new file mode 100644
index 0000000..78430df
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/cube/CMakeLists.txt
@@ -0,0 +1,255 @@
+# ~~~
+# Copyright (c) 2018 Valve Corporation
+# Copyright (c) 2018 LunarG, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ~~~
+
+set(CUBE_INCLUDE_DIRS ${CMAKE_CURRENT_BINARY_DIR} ${CMAKE_CURRENT_BINARY_DIR}/.. ${VulkanHeaders_INCLUDE_DIR})
+
+set(SCRIPTS_DIR "${PROJECT_SOURCE_DIR}/scripts")
+
+if (NOT GLSLANG_INSTALL_DIR AND DEFINED ENV{GLSLANG_INSTALL_DIR})
+    set(GLSLANG_INSTALL_DIR $ENV{GLSLANG_INSTALL_DIR})
+endif()
+
+if(GLSLANG_INSTALL_DIR)
+    message(STATUS "Using GLSLANG_INSTALL_DIR to look for glslangValidator")
+    find_program(GLSLANG_VALIDATOR names glslangValidator HINTS "${GLSLANG_INSTALL_DIR}/bin")
+else()
+    set(GLSLANG_VALIDATOR_NAME "glslangValidator")
+    message(STATUS "Using cmake find_program to look for glslangValidator")
+    if(WIN32)
+        execute_process(
+            COMMAND ${PYTHON_EXECUTABLE} ${SCRIPTS_DIR}/fetch_glslangvalidator.py glslang-master-windows-x64-Release.zip)
+        set(GLSLANG_VALIDATOR_NAME "glslangValidator.exe")
+    elseif(APPLE)
+        execute_process(COMMAND ${PYTHON_EXECUTABLE} ${SCRIPTS_DIR}/fetch_glslangvalidator.py glslang-master-osx-Release.zip)
+    elseif(UNIX AND NOT APPLE) # i.e. Linux
+        execute_process(COMMAND ${PYTHON_EXECUTABLE} ${SCRIPTS_DIR}/fetch_glslangvalidator.py glslang-master-linux-Release.zip)
+    endif()
+    find_program(GLSLANG_VALIDATOR NAMES ${GLSLANG_VALIDATOR_NAME} HINTS "${PROJECT_SOURCE_DIR}/glslang/bin")
+endif()
+
+if(UNIX AND NOT APPLE) # i.e. Linux
+    include(FindPkgConfig)
+    option(BUILD_WSI_XCB_SUPPORT "Build XCB WSI support" ON)
+    option(BUILD_WSI_XLIB_SUPPORT "Build Xlib WSI support" ON)
+    option(BUILD_WSI_WAYLAND_SUPPORT "Build Wayland WSI support" ON)
+    set(CUBE_WSI_SELECTION "XCB" CACHE STRING "Select WSI target for vkcube (XCB, XLIB, WAYLAND, DISPLAY)")
+
+    if(BUILD_WSI_XCB_SUPPORT)
+        find_package(XCB REQUIRED)
+    endif()
+
+    if(BUILD_WSI_XLIB_SUPPORT)
+        find_package(X11 REQUIRED)
+    endif()
+
+    if(BUILD_WSI_WAYLAND_SUPPORT)
+        find_package(Wayland REQUIRED)
+        include_directories(${WAYLAND_CLIENT_INCLUDE_DIR})
+    endif()
+endif()
+
+if(WIN32)
+    add_definitions(-DVK_USE_PLATFORM_WIN32_KHR -DWIN32_LEAN_AND_MEAN)
+    if(NOT MSVC_VERSION LESS 1900)
+        # Enable control flow guard
+        message(STATUS "Building vkcube with control flow guard")
+        add_compile_options("$<$<CXX_COMPILER_ID:MSVC>:/guard:cf>")
+        set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} /guard:cf")
+        set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} /guard:cf")
+    endif()
+elseif(ANDROID)
+    add_definitions(-DVK_USE_PLATFORM_ANDROID_KHR)
+elseif(APPLE)
+    add_definitions(-DVK_USE_PLATFORM_METAL_EXT)
+elseif(UNIX AND NOT APPLE) # i.e. Linux
+    if(NOT CUBE_WSI_SELECTION)
+        set(CUBE_WSI_SELECTION "XCB")
+    endif()
+
+    if(CUBE_WSI_SELECTION STREQUAL "XCB")
+        if(NOT BUILD_WSI_XCB_SUPPORT)
+            message(FATAL_ERROR "Selected XCB for vkcube build but not building Xcb support")
+        endif()
+        set(CUBE_INCLUDE_DIRS ${XCB_INCLUDE_DIRS} ${CUBE_INCLUDE_DIRS})
+        link_libraries(${XCB_LIBRARIES})
+        add_definitions(-DVK_USE_PLATFORM_XCB_KHR)
+    elseif(CUBE_WSI_SELECTION STREQUAL "XLIB")
+        if(NOT BUILD_WSI_XLIB_SUPPORT)
+            message(FATAL_ERROR "Selected XLIB for vkcube build but not building Xlib support")
+        endif()
+        set(CUBE_INCLUDE_DIRS ${X11_INCLUDE_DIR} ${CUBE_INCLUDE_DIRS})
+        link_libraries(${X11_LIBRARIES})
+        add_definitions(-DVK_USE_PLATFORM_XLIB_KHR)
+    elseif(CUBE_WSI_SELECTION STREQUAL "WAYLAND")
+        if(NOT BUILD_WSI_WAYLAND_SUPPORT)
+            message(FATAL_ERROR "Selected Wayland for vkcube build but not building Wayland support")
+        endif()
+        set(CUBE_INCLUDE_DIRS ${WAYLAND_CLIENT_INCLUDE_DIR} ${CUBE_INCLUDE_DIRS})
+        link_libraries(${WAYLAND_CLIENT_LIBRARIES})
+        add_definitions(-DVK_USE_PLATFORM_WAYLAND_KHR)
+    elseif(CUBE_WSI_SELECTION STREQUAL "DISPLAY")
+        add_definitions(-DVK_USE_PLATFORM_DISPLAY_KHR)
+    else()
+        message(FATAL_ERROR "Unrecognized value for CUBE_WSI_SELECTION: ${CUBE_WSI_SELECTION}")
+    endif()
+
+    link_libraries(${API_LOWERCASE} m)
+else()
+    message(FATAL_ERROR "Unsupported Platform!")
+endif()
+
+set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS}")
+set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}")
+
+if(WIN32)
+    # Use static MSVCRT libraries
+    foreach(configuration
+            in
+            CMAKE_C_FLAGS_DEBUG
+            CMAKE_C_FLAGS_MINSIZEREL
+            CMAKE_C_FLAGS_RELEASE
+            CMAKE_C_FLAGS_RELWITHDEBINFO
+            CMAKE_CXX_FLAGS_DEBUG
+            CMAKE_CXX_FLAGS_MINSIZEREL
+            CMAKE_CXX_FLAGS_RELEASE
+            CMAKE_CXX_FLAGS_RELWITHDEBINFO)
+        if(${configuration} MATCHES "/MD")
+            string(REGEX
+                   REPLACE "/MD"
+                           "/MT"
+                           ${configuration}
+                           "${${configuration}}")
+        endif()
+    endforeach()
+
+    file(COPY cube.vcxproj.user DESTINATION ${CMAKE_BINARY_DIR}/cube)
+endif()
+
+add_custom_command(COMMENT "Compiling cube vertex shader"
+                   OUTPUT cube.vert.inc
+                   COMMAND ${GLSLANG_VALIDATOR} -V -x -o ${CMAKE_CURRENT_BINARY_DIR}/cube.vert.inc
+                           ${PROJECT_SOURCE_DIR}/cube/cube.vert
+                   MAIN_DEPENDENCY ${PROJECT_SOURCE_DIR}/cube/cube.vert
+                   DEPENDS ${PROJECT_SOURCE_DIR}/cube/cube.vert ${GLSLANG_VALIDATOR})
+add_custom_command(COMMENT "Compiling cube fragment shader"
+                   OUTPUT cube.frag.inc
+                   COMMAND ${GLSLANG_VALIDATOR} -V -x -o ${CMAKE_CURRENT_BINARY_DIR}/cube.frag.inc
+                           ${PROJECT_SOURCE_DIR}/cube/cube.frag
+                   MAIN_DEPENDENCY ${PROJECT_SOURCE_DIR}/cube/cube.frag
+                   DEPENDS ${PROJECT_SOURCE_DIR}/cube/cube.frag ${GLSLANG_VALIDATOR})
+include_directories(${CMAKE_CURRENT_BINARY_DIR} ${CMAKE_CURRENT_SOURCE_DIR})
+
+if(WIN32)
+    set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -D_CRT_SECURE_NO_WARNINGS -D_USE_MATH_DEFINES")
+    set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -D_CRT_SECURE_NO_WARNINGS -D_USE_MATH_DEFINES")
+endif()
+
+include_directories(${CUBE_INCLUDE_DIRS})
+
+# ----------------------------------------------------------------------------
+# vkcube
+
+if(APPLE)
+    include(macOS/cube/cube.cmake)
+elseif(NOT WIN32)
+    if(${CMAKE_SYSTEM_PROCESSOR} STREQUAL ${CMAKE_HOST_SYSTEM_PROCESSOR})
+        add_executable(vkcube
+                       cube.c
+                       ${PROJECT_SOURCE_DIR}/cube/cube.vert
+                       ${PROJECT_SOURCE_DIR}/cube/cube.frag
+                       cube.vert.inc
+                       cube.frag.inc)
+        target_link_libraries(vkcube Vulkan::Vulkan)
+        CHECK_LIBRARY_EXISTS("rt" clock_gettime "" NEED_RT)
+        if (NEED_RT)
+            target_link_libraries(vkcube rt)
+        endif()
+    endif()
+else()
+    if(CMAKE_CL_64)
+        set(LIB_DIR "Win64")
+    else()
+        set(LIB_DIR "Win32")
+    endif()
+
+    add_executable(vkcube
+                   WIN32
+                   cube.c
+                   ${PROJECT_SOURCE_DIR}/cube/cube.vert
+                   ${PROJECT_SOURCE_DIR}/cube/cube.frag
+                   cube.vert.inc
+                   cube.frag.inc)
+    target_link_libraries(vkcube Vulkan::Vulkan)
+endif()
+
+if(APPLE)
+    # Keep RPATH so fixup_bundle can use it to find libraries
+    set_target_properties(vkcube PROPERTIES INSTALL_RPATH_USE_LINK_PATH TRUE)
+    install(TARGETS vkcube BUNDLE DESTINATION "cube")
+    # Fix up the library references to be self-contained within the bundle.
+    install(CODE "
+        include(BundleUtilities)
+        fixup_bundle(\${CMAKE_INSTALL_PREFIX}/cube/vkcube.app \"\" \"${Vulkan_LIBRARY_DIR}\")
+        ")
+else()
+    install(TARGETS vkcube RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR})
+endif()
+
+# ----------------------------------------------------------------------------
+# vkcubepp
+
+if(APPLE)
+    include(macOS/cubepp/cubepp.cmake)
+elseif(NOT WIN32)
+    if(${CMAKE_SYSTEM_PROCESSOR} STREQUAL ${CMAKE_HOST_SYSTEM_PROCESSOR})
+        add_executable(vkcubepp
+                       cube.cpp
+                       ${PROJECT_SOURCE_DIR}/cube/cube.vert
+                       ${PROJECT_SOURCE_DIR}/cube/cube.frag
+                       cube.vert.inc
+                       cube.frag.inc)
+        target_link_libraries(vkcubepp Vulkan::Vulkan)
+    endif()
+else()
+    if(CMAKE_CL_64)
+        set(LIB_DIR "Win64")
+    else()
+        set(LIB_DIR "Win32")
+    endif()
+
+    add_executable(vkcubepp
+                   WIN32
+                   cube.cpp
+                   ${PROJECT_SOURCE_DIR}/cube/cube.vert
+                   ${PROJECT_SOURCE_DIR}/cube/cube.frag
+                   cube.vert.inc
+                   cube.frag.inc)
+    target_link_libraries(vkcubepp Vulkan::Vulkan)
+endif()
+
+if(APPLE)
+    # Keep RPATH so fixup_bundle can use it to find libraries
+    set_target_properties(vkcubepp PROPERTIES INSTALL_RPATH_USE_LINK_PATH TRUE)
+    install(TARGETS vkcubepp BUNDLE DESTINATION "cube")
+    # Fix up the library references to be self-contained within the bundle.
+    install(CODE "
+        include(BundleUtilities)
+        fixup_bundle(\${CMAKE_INSTALL_PREFIX}/cube/vkcubepp.app \"\" \"${Vulkan_LIBRARY_DIR}\")
+        ")
+else()
+    install(TARGETS vkcubepp RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR})
+endif()
diff --git a/src/third_party/vulkan-tools/src/cube/android/cube-with-layers/AndroidManifest.xml b/src/third_party/vulkan-tools/src/cube/android/cube-with-layers/AndroidManifest.xml
new file mode 100644
index 0000000..50b216e
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/cube/android/cube-with-layers/AndroidManifest.xml
@@ -0,0 +1,27 @@
+<?xml version="1.0"?>
+<manifest xmlns:android="http://schemas.android.com/apk/res/android" package="com.example.VkCubeWithLayers" android:versionCode="1" android:versionName="1.0">
+
+    <!-- Allow this app to read and write files (for use by tracing libraries). -->
+    <uses-permission android:name="android.permission.READ_EXTERNAL_STORAGE"/>
+    <uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE"/>
+    <uses-permission android:name="android.permission.INTERNET"/>
+
+    <!-- This is the platform API where NativeActivity was introduced. -->
+    <uses-sdk android:minSdkVersion="23" android:targetSdkVersion="23"/>
+
+    <!-- This .apk has no Java code itself, so set hasCode to false. -->
+    <application android:label="@string/app_name" android:hasCode="false" android:debuggable='true'>
+
+        <!-- Our activity is the built-in NativeActivity framework class.
+             This will take care of integrating with our NDK code. -->
+        <activity android:name="android.app.NativeActivity" android:label="@string/app_name" android:exported="true">
+            <!-- Tell NativeActivity the name of or .so -->
+            <meta-data android:name="android.app.lib_name" android:value="VkCube"/>
+            <intent-filter>
+                <action android:name="android.intent.action.MAIN"/>
+                <category android:name="android.intent.category.LAUNCHER"/>
+            </intent-filter>
+        </activity>
+    </application>
+
+</manifest>
diff --git a/src/third_party/vulkan-tools/src/cube/android/cube-with-layers/custom_rules.xml b/src/third_party/vulkan-tools/src/cube/android/cube-with-layers/custom_rules.xml
new file mode 100644
index 0000000..0ca5933
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/cube/android/cube-with-layers/custom_rules.xml
@@ -0,0 +1,25 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project name="NativeActivity" default="help">
+
+<property name="cubeWithLayersDir" location="../libs-with-layers" />
+<property name="cubeDir" location="../libs" />
+<property name="layersDir" location="../../../build-android/libs" />
+
+<echo>VkCubeWithLayers: Creating libs-with-layers</echo>
+<mkdir dir="${cubeWithLayersDir}"/>
+
+<echo>VkCubeWithLayers: Copying libs from demos/android</echo>
+<copy todir="${cubeWithLayersDir}">
+<fileset dir="${cubeDir}"/>
+</copy>
+
+<echo>VkCubeWithLayers: Copying layers from build-android</echo>
+<copy todir="${cubeWithLayersDir}">
+<fileset dir="${layersDir}"/>
+</copy>
+
+<!-- Point ndk-build at the libs-with-layers common dir -->
+<echo>VkCubeWithLayers: Overriding native.libs.absolute.dir with ${cubeWithLayersDir}</echo>
+<property name="native.libs.absolute.dir" location="${cubeWithLayersDir}" />
+
+</project>
diff --git a/src/third_party/vulkan-tools/src/cube/android/cube-with-layers/res/values/strings.xml b/src/third_party/vulkan-tools/src/cube/android/cube-with-layers/res/values/strings.xml
new file mode 100644
index 0000000..ecd779e
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/cube/android/cube-with-layers/res/values/strings.xml
@@ -0,0 +1,24 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright 2013 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+
+<!-- This file contains resource definitions for displayed strings, allowing
+     them to be changed based on the locale and options. -->
+
+<resources>
+    <!-- Simple strings. -->
+    <string name="app_name">VkCubeWithLayers</string>
+
+</resources>
diff --git a/src/third_party/vulkan-tools/src/cube/android/cube/AndroidManifest.xml b/src/third_party/vulkan-tools/src/cube/android/cube/AndroidManifest.xml
new file mode 100644
index 0000000..1bf9080
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/cube/android/cube/AndroidManifest.xml
@@ -0,0 +1,27 @@
+<?xml version="1.0"?>
+<manifest xmlns:android="http://schemas.android.com/apk/res/android" package="com.example.VkCube" android:versionCode="1" android:versionName="1.0">
+
+    <!-- Allow this app to read and write files (for use by tracing libraries). -->
+    <uses-permission android:name="android.permission.READ_EXTERNAL_STORAGE"/>
+    <uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE"/>
+    <uses-permission android:name="android.permission.INTERNET"/>
+
+    <!-- This is the platform API where NativeActivity was introduced. -->
+    <uses-sdk android:minSdkVersion="23" android:targetSdkVersion="23"/>
+
+    <!-- This .apk has no Java code itself, so set hasCode to false. -->
+    <application android:label="@string/app_name" android:hasCode="false" android:debuggable='true'>
+
+        <!-- Our activity is the built-in NativeActivity framework class.
+             This will take care of integrating with our NDK code. -->
+        <activity android:name="android.app.NativeActivity" android:label="@string/app_name" android:exported="true">
+            <!-- Tell NativeActivity the name of or .so -->
+            <meta-data android:name="android.app.lib_name" android:value="VkCube"/>
+            <intent-filter>
+                <action android:name="android.intent.action.MAIN"/>
+                <category android:name="android.intent.category.LAUNCHER"/>
+            </intent-filter>
+        </activity>
+    </application>
+
+</manifest>
diff --git a/src/third_party/vulkan-tools/src/cube/android/cube/custom_rules.xml b/src/third_party/vulkan-tools/src/cube/android/cube/custom_rules.xml
new file mode 100644
index 0000000..31eb31a
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/cube/android/cube/custom_rules.xml
@@ -0,0 +1,6 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project name="NativeActivity" default="help">
+<!-- Point ndk-build at the libs created in common dir -->
+<echo>vkcube: Overriding native.libs.absolute.dir with ../libs</echo>
+<property name="native.libs.absolute.dir" location="../libs" />
+</project>
diff --git a/src/third_party/vulkan-tools/src/cube/android/cube/res/values/strings.xml b/src/third_party/vulkan-tools/src/cube/android/cube/res/values/strings.xml
new file mode 100644
index 0000000..8adead2
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/cube/android/cube/res/values/strings.xml
@@ -0,0 +1,24 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright 2013 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+
+<!-- This file contains resource definitions for displayed strings, allowing
+     them to be changed based on the locale and options. -->
+
+<resources>
+    <!-- Simple strings. -->
+    <string name="app_name">VkCube</string>
+
+</resources>
diff --git a/src/third_party/vulkan-tools/src/cube/android/include/cube.frag.h b/src/third_party/vulkan-tools/src/cube/android/include/cube.frag.h
new file mode 100644
index 0000000..1e6fd4a
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/cube/android/include/cube.frag.h
@@ -0,0 +1,73 @@
+#include <stdint.h>
+
+#if 0
+../cube.frag
+Warning, version 400 is not yet complete; most version-specific features are present, but some are missing.
+
+
+Linked fragment stage:
+
+
+// Module Version 10000
+// Generated by (magic number): 80001
+// Id's are bound by 21
+
+                              Capability Shader
+               1:             ExtInstImport  "GLSL.std.450"
+                              MemoryModel Logical GLSL450
+                              EntryPoint Fragment 4  "main" 9 16
+                              ExecutionMode 4 OriginUpperLeft
+                              Source GLSL 400
+                              SourceExtension  "GL_ARB_separate_shader_objects"
+                              SourceExtension  "GL_ARB_shading_language_420pack"
+                              Name 4  "main"
+                              Name 9  "uFragColor"
+                              Name 13  "tex"
+                              Name 16  "texcoord"
+                              Decorate 9(uFragColor) Location 0
+                              Decorate 13(tex) DescriptorSet 0
+                              Decorate 13(tex) Binding 1
+                              Decorate 16(texcoord) Location 0
+               2:             TypeVoid
+               3:             TypeFunction 2
+               6:             TypeFloat 32
+               7:             TypeVector 6(float) 4
+               8:             TypePointer Output 7(fvec4)
+   9(uFragColor):      8(ptr) Variable Output
+              10:             TypeImage 6(float) 2D sampled format:Unknown
+              11:             TypeSampledImage 10
+              12:             TypePointer UniformConstant 11
+         13(tex):     12(ptr) Variable UniformConstant
+              15:             TypePointer Input 7(fvec4)
+    16(texcoord):     15(ptr) Variable Input
+              17:             TypeVector 6(float) 2
+         4(main):           2 Function None 3
+               5:             Label
+              14:          11 Load 13(tex)
+              18:    7(fvec4) Load 16(texcoord)
+              19:   17(fvec2) VectorShuffle 18 18 0 1
+              20:    7(fvec4) ImageSampleImplicitLod 14 19
+                              Store 9(uFragColor) 20
+                              Return
+                              FunctionEnd
+#endif
+
+static const uint32_t cube_frag[164] = {
+    0x07230203, 0x00010000, 0x00080001, 0x00000015, 0x00000000, 0x00020011, 0x00000001, 0x0006000b, 0x00000001, 0x4c534c47,
+    0x6474732e, 0x3035342e, 0x00000000, 0x0003000e, 0x00000000, 0x00000001, 0x0007000f, 0x00000004, 0x00000004, 0x6e69616d,
+    0x00000000, 0x00000009, 0x00000010, 0x00030010, 0x00000004, 0x00000007, 0x00030003, 0x00000002, 0x00000190, 0x00090004,
+    0x415f4c47, 0x735f4252, 0x72617065, 0x5f657461, 0x64616873, 0x6f5f7265, 0x63656a62, 0x00007374, 0x00090004, 0x415f4c47,
+    0x735f4252, 0x69646168, 0x6c5f676e, 0x75676e61, 0x5f656761, 0x70303234, 0x006b6361, 0x00040005, 0x00000004, 0x6e69616d,
+    0x00000000, 0x00050005, 0x00000009, 0x61724675, 0x6c6f4367, 0x0000726f, 0x00030005, 0x0000000d, 0x00786574, 0x00050005,
+    0x00000010, 0x63786574, 0x64726f6f, 0x00000000, 0x00040047, 0x00000009, 0x0000001e, 0x00000000, 0x00040047, 0x0000000d,
+    0x00000022, 0x00000000, 0x00040047, 0x0000000d, 0x00000021, 0x00000001, 0x00040047, 0x00000010, 0x0000001e, 0x00000000,
+    0x00020013, 0x00000002, 0x00030021, 0x00000003, 0x00000002, 0x00030016, 0x00000006, 0x00000020, 0x00040017, 0x00000007,
+    0x00000006, 0x00000004, 0x00040020, 0x00000008, 0x00000003, 0x00000007, 0x0004003b, 0x00000008, 0x00000009, 0x00000003,
+    0x00090019, 0x0000000a, 0x00000006, 0x00000001, 0x00000000, 0x00000000, 0x00000000, 0x00000001, 0x00000000, 0x0003001b,
+    0x0000000b, 0x0000000a, 0x00040020, 0x0000000c, 0x00000000, 0x0000000b, 0x0004003b, 0x0000000c, 0x0000000d, 0x00000000,
+    0x00040020, 0x0000000f, 0x00000001, 0x00000007, 0x0004003b, 0x0000000f, 0x00000010, 0x00000001, 0x00040017, 0x00000011,
+    0x00000006, 0x00000002, 0x00050036, 0x00000002, 0x00000004, 0x00000000, 0x00000003, 0x000200f8, 0x00000005, 0x0004003d,
+    0x0000000b, 0x0000000e, 0x0000000d, 0x0004003d, 0x00000007, 0x00000012, 0x00000010, 0x0007004f, 0x00000011, 0x00000013,
+    0x00000012, 0x00000012, 0x00000000, 0x00000001, 0x00050057, 0x00000007, 0x00000014, 0x0000000e, 0x00000013, 0x0003003e,
+    0x00000009, 0x00000014, 0x000100fd, 0x00010038,
+};
diff --git a/src/third_party/vulkan-tools/src/cube/android/include/cube.frag.inc b/src/third_party/vulkan-tools/src/cube/android/include/cube.frag.inc
new file mode 100644
index 0000000..e80adbb
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/cube/android/include/cube.frag.inc
@@ -0,0 +1,22 @@
+	// Overload400-PrecQual.2000 12-Apr-2017
+	0x07230203,0x00010000,0x00080002,0x00000015,0x00000000,0x00020011,0x00000001,0x0006000b,
+	0x00000001,0x4c534c47,0x6474732e,0x3035342e,0x00000000,0x0003000e,0x00000000,0x00000001,
+	0x0007000f,0x00000004,0x00000004,0x6e69616d,0x00000000,0x00000009,0x00000010,0x00030010,
+	0x00000004,0x00000007,0x00030003,0x00000002,0x00000190,0x00090004,0x415f4c47,0x735f4252,
+	0x72617065,0x5f657461,0x64616873,0x6f5f7265,0x63656a62,0x00007374,0x00090004,0x415f4c47,
+	0x735f4252,0x69646168,0x6c5f676e,0x75676e61,0x5f656761,0x70303234,0x006b6361,0x00040005,
+	0x00000004,0x6e69616d,0x00000000,0x00050005,0x00000009,0x61724675,0x6c6f4367,0x0000726f,
+	0x00030005,0x0000000d,0x00786574,0x00050005,0x00000010,0x63786574,0x64726f6f,0x00000000,
+	0x00040047,0x00000009,0x0000001e,0x00000000,0x00040047,0x0000000d,0x00000022,0x00000000,
+	0x00040047,0x0000000d,0x00000021,0x00000001,0x00040047,0x00000010,0x0000001e,0x00000000,
+	0x00020013,0x00000002,0x00030021,0x00000003,0x00000002,0x00030016,0x00000006,0x00000020,
+	0x00040017,0x00000007,0x00000006,0x00000004,0x00040020,0x00000008,0x00000003,0x00000007,
+	0x0004003b,0x00000008,0x00000009,0x00000003,0x00090019,0x0000000a,0x00000006,0x00000001,
+	0x00000000,0x00000000,0x00000000,0x00000001,0x00000000,0x0003001b,0x0000000b,0x0000000a,
+	0x00040020,0x0000000c,0x00000000,0x0000000b,0x0004003b,0x0000000c,0x0000000d,0x00000000,
+	0x00040020,0x0000000f,0x00000001,0x00000007,0x0004003b,0x0000000f,0x00000010,0x00000001,
+	0x00040017,0x00000011,0x00000006,0x00000002,0x00050036,0x00000002,0x00000004,0x00000000,
+	0x00000003,0x000200f8,0x00000005,0x0004003d,0x0000000b,0x0000000e,0x0000000d,0x0004003d,
+	0x00000007,0x00000012,0x00000010,0x0007004f,0x00000011,0x00000013,0x00000012,0x00000012,
+	0x00000000,0x00000001,0x00050057,0x00000007,0x00000014,0x0000000e,0x00000013,0x0003003e,
+	0x00000009,0x00000014,0x000100fd,0x00010038
diff --git a/src/third_party/vulkan-tools/src/cube/android/include/cube.vert.h b/src/third_party/vulkan-tools/src/cube/android/include/cube.vert.h
new file mode 100644
index 0000000..4e349e7
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/cube/android/include/cube.vert.h
@@ -0,0 +1,149 @@
+#include <stdint.h>
+
+#if 0
+../cube.vert
+Warning, version 400 is not yet complete; most version-specific features are present, but some are missing.
+
+
+Linked vertex stage:
+
+
+// Module Version 10000
+// Generated by (magic number): 80001
+// Id's are bound by 55
+
+                              Capability Shader
+               1:             ExtInstImport  "GLSL.std.450"
+                              MemoryModel Logical GLSL450
+                              EntryPoint Vertex 4  "main" 9 21 28
+                              Source GLSL 400
+                              SourceExtension  "GL_ARB_separate_shader_objects"
+                              SourceExtension  "GL_ARB_shading_language_420pack"
+                              Name 4  "main"
+                              Name 9  "texcoord"
+                              Name 15  "buf"
+                              MemberName 15(buf) 0  "MVP"
+                              MemberName 15(buf) 1  "position"
+                              MemberName 15(buf) 2  "attr"
+                              Name 17  "ubuf"
+                              Name 21  "gl_VertexIndex"
+                              Name 26  "gl_PerVertex"
+                              MemberName 26(gl_PerVertex) 0  "gl_Position"
+                              Name 28  ""
+                              Decorate 9(texcoord) Location 0
+                              Decorate 13 ArrayStride 16
+                              Decorate 14 ArrayStride 16
+                              MemberDecorate 15(buf) 0 ColMajor
+                              MemberDecorate 15(buf) 0 Offset 0
+                              MemberDecorate 15(buf) 0 MatrixStride 16
+                              MemberDecorate 15(buf) 1 Offset 64
+                              MemberDecorate 15(buf) 2 Offset 640
+                              Decorate 15(buf) Block
+                              Decorate 17(ubuf) DescriptorSet 0
+                              Decorate 17(ubuf) Binding 0
+                              Decorate 21(gl_VertexIndex) BuiltIn VertexIndex
+                              MemberDecorate 26(gl_PerVertex) 0 BuiltIn Position
+                              Decorate 26(gl_PerVertex) Block
+               2:             TypeVoid
+               3:             TypeFunction 2
+               6:             TypeFloat 32
+               7:             TypeVector 6(float) 4
+               8:             TypePointer Output 7(fvec4)
+     9(texcoord):      8(ptr) Variable Output
+              10:             TypeMatrix 7(fvec4) 4
+              11:             TypeInt 32 0
+              12:     11(int) Constant 36
+              13:             TypeArray 7(fvec4) 12
+              14:             TypeArray 7(fvec4) 12
+         15(buf):             TypeStruct 10 13 14
+              16:             TypePointer Uniform 15(buf)
+        17(ubuf):     16(ptr) Variable Uniform
+              18:             TypeInt 32 1
+              19:     18(int) Constant 2
+              20:             TypePointer Input 18(int)
+21(gl_VertexIndex):     20(ptr) Variable Input
+              23:             TypePointer Uniform 7(fvec4)
+26(gl_PerVertex):             TypeStruct 7(fvec4)
+              27:             TypePointer Output 26(gl_PerVertex)
+              28:     27(ptr) Variable Output
+              29:     18(int) Constant 0
+              30:             TypePointer Uniform 10
+              33:     18(int) Constant 1
+              39:     11(int) Constant 1
+              40:             TypePointer Output 6(float)
+              45:     11(int) Constant 2
+              48:     11(int) Constant 3
+              52:    6(float) Constant 1073741824
+         4(main):           2 Function None 3
+               5:             Label
+              22:     18(int) Load 21(gl_VertexIndex)
+              24:     23(ptr) AccessChain 17(ubuf) 19 22
+              25:    7(fvec4) Load 24
+                              Store 9(texcoord) 25
+              31:     30(ptr) AccessChain 17(ubuf) 29
+              32:          10 Load 31
+              34:     18(int) Load 21(gl_VertexIndex)
+              35:     23(ptr) AccessChain 17(ubuf) 33 34
+              36:    7(fvec4) Load 35
+              37:    7(fvec4) MatrixTimesVector 32 36
+              38:      8(ptr) AccessChain 28 29
+                              Store 38 37
+              41:     40(ptr) AccessChain 28 29 39
+              42:    6(float) Load 41
+              43:    6(float) FNegate 42
+              44:     40(ptr) AccessChain 28 29 39
+                              Store 44 43
+              46:     40(ptr) AccessChain 28 29 45
+              47:    6(float) Load 46
+              49:     40(ptr) AccessChain 28 29 48
+              50:    6(float) Load 49
+              51:    6(float) FAdd 47 50
+              53:    6(float) FDiv 51 52
+              54:     40(ptr) AccessChain 28 29 45
+                              Store 54 53
+                              Return
+                              FunctionEnd
+#endif
+
+static const uint32_t cube_vert[396] = {
+    0x07230203, 0x00010000, 0x00080001, 0x00000037, 0x00000000, 0x00020011, 0x00000001, 0x0006000b, 0x00000001, 0x4c534c47,
+    0x6474732e, 0x3035342e, 0x00000000, 0x0003000e, 0x00000000, 0x00000001, 0x0008000f, 0x00000000, 0x00000004, 0x6e69616d,
+    0x00000000, 0x00000009, 0x00000015, 0x0000001c, 0x00030003, 0x00000002, 0x00000190, 0x00090004, 0x415f4c47, 0x735f4252,
+    0x72617065, 0x5f657461, 0x64616873, 0x6f5f7265, 0x63656a62, 0x00007374, 0x00090004, 0x415f4c47, 0x735f4252, 0x69646168,
+    0x6c5f676e, 0x75676e61, 0x5f656761, 0x70303234, 0x006b6361, 0x00040005, 0x00000004, 0x6e69616d, 0x00000000, 0x00050005,
+    0x00000009, 0x63786574, 0x64726f6f, 0x00000000, 0x00030005, 0x0000000f, 0x00667562, 0x00040006, 0x0000000f, 0x00000000,
+    0x0050564d, 0x00060006, 0x0000000f, 0x00000001, 0x69736f70, 0x6e6f6974, 0x00000000, 0x00050006, 0x0000000f, 0x00000002,
+    0x72747461, 0x00000000, 0x00040005, 0x00000011, 0x66756275, 0x00000000, 0x00060005, 0x00000015, 0x565f6c67, 0x65747265,
+    0x646e4978, 0x00007865, 0x00060005, 0x0000001a, 0x505f6c67, 0x65567265, 0x78657472, 0x00000000, 0x00060006, 0x0000001a,
+    0x00000000, 0x505f6c67, 0x7469736f, 0x006e6f69, 0x00030005, 0x0000001c, 0x00000000, 0x00040047, 0x00000009, 0x0000001e,
+    0x00000000, 0x00040047, 0x0000000d, 0x00000006, 0x00000010, 0x00040047, 0x0000000e, 0x00000006, 0x00000010, 0x00040048,
+    0x0000000f, 0x00000000, 0x00000005, 0x00050048, 0x0000000f, 0x00000000, 0x00000023, 0x00000000, 0x00050048, 0x0000000f,
+    0x00000000, 0x00000007, 0x00000010, 0x00050048, 0x0000000f, 0x00000001, 0x00000023, 0x00000040, 0x00050048, 0x0000000f,
+    0x00000002, 0x00000023, 0x00000280, 0x00030047, 0x0000000f, 0x00000002, 0x00040047, 0x00000011, 0x00000022, 0x00000000,
+    0x00040047, 0x00000011, 0x00000021, 0x00000000, 0x00040047, 0x00000015, 0x0000000b, 0x0000002a, 0x00050048, 0x0000001a,
+    0x00000000, 0x0000000b, 0x00000000, 0x00030047, 0x0000001a, 0x00000002, 0x00020013, 0x00000002, 0x00030021, 0x00000003,
+    0x00000002, 0x00030016, 0x00000006, 0x00000020, 0x00040017, 0x00000007, 0x00000006, 0x00000004, 0x00040020, 0x00000008,
+    0x00000003, 0x00000007, 0x0004003b, 0x00000008, 0x00000009, 0x00000003, 0x00040018, 0x0000000a, 0x00000007, 0x00000004,
+    0x00040015, 0x0000000b, 0x00000020, 0x00000000, 0x0004002b, 0x0000000b, 0x0000000c, 0x00000024, 0x0004001c, 0x0000000d,
+    0x00000007, 0x0000000c, 0x0004001c, 0x0000000e, 0x00000007, 0x0000000c, 0x0005001e, 0x0000000f, 0x0000000a, 0x0000000d,
+    0x0000000e, 0x00040020, 0x00000010, 0x00000002, 0x0000000f, 0x0004003b, 0x00000010, 0x00000011, 0x00000002, 0x00040015,
+    0x00000012, 0x00000020, 0x00000001, 0x0004002b, 0x00000012, 0x00000013, 0x00000002, 0x00040020, 0x00000014, 0x00000001,
+    0x00000012, 0x0004003b, 0x00000014, 0x00000015, 0x00000001, 0x00040020, 0x00000017, 0x00000002, 0x00000007, 0x0003001e,
+    0x0000001a, 0x00000007, 0x00040020, 0x0000001b, 0x00000003, 0x0000001a, 0x0004003b, 0x0000001b, 0x0000001c, 0x00000003,
+    0x0004002b, 0x00000012, 0x0000001d, 0x00000000, 0x00040020, 0x0000001e, 0x00000002, 0x0000000a, 0x0004002b, 0x00000012,
+    0x00000021, 0x00000001, 0x0004002b, 0x0000000b, 0x00000027, 0x00000001, 0x00040020, 0x00000028, 0x00000003, 0x00000006,
+    0x0004002b, 0x0000000b, 0x0000002d, 0x00000002, 0x0004002b, 0x0000000b, 0x00000030, 0x00000003, 0x0004002b, 0x00000006,
+    0x00000034, 0x40000000, 0x00050036, 0x00000002, 0x00000004, 0x00000000, 0x00000003, 0x000200f8, 0x00000005, 0x0004003d,
+    0x00000012, 0x00000016, 0x00000015, 0x00060041, 0x00000017, 0x00000018, 0x00000011, 0x00000013, 0x00000016, 0x0004003d,
+    0x00000007, 0x00000019, 0x00000018, 0x0003003e, 0x00000009, 0x00000019, 0x00050041, 0x0000001e, 0x0000001f, 0x00000011,
+    0x0000001d, 0x0004003d, 0x0000000a, 0x00000020, 0x0000001f, 0x0004003d, 0x00000012, 0x00000022, 0x00000015, 0x00060041,
+    0x00000017, 0x00000023, 0x00000011, 0x00000021, 0x00000022, 0x0004003d, 0x00000007, 0x00000024, 0x00000023, 0x00050091,
+    0x00000007, 0x00000025, 0x00000020, 0x00000024, 0x00050041, 0x00000008, 0x00000026, 0x0000001c, 0x0000001d, 0x0003003e,
+    0x00000026, 0x00000025, 0x00060041, 0x00000028, 0x00000029, 0x0000001c, 0x0000001d, 0x00000027, 0x0004003d, 0x00000006,
+    0x0000002a, 0x00000029, 0x0004007f, 0x00000006, 0x0000002b, 0x0000002a, 0x00060041, 0x00000028, 0x0000002c, 0x0000001c,
+    0x0000001d, 0x00000027, 0x0003003e, 0x0000002c, 0x0000002b, 0x00060041, 0x00000028, 0x0000002e, 0x0000001c, 0x0000001d,
+    0x0000002d, 0x0004003d, 0x00000006, 0x0000002f, 0x0000002e, 0x00060041, 0x00000028, 0x00000031, 0x0000001c, 0x0000001d,
+    0x00000030, 0x0004003d, 0x00000006, 0x00000032, 0x00000031, 0x00050081, 0x00000006, 0x00000033, 0x0000002f, 0x00000032,
+    0x00050088, 0x00000006, 0x00000035, 0x00000033, 0x00000034, 0x00060041, 0x00000028, 0x00000036, 0x0000001c, 0x0000001d,
+    0x0000002d, 0x0003003e, 0x00000036, 0x00000035, 0x000100fd, 0x00010038,
+};
diff --git a/src/third_party/vulkan-tools/src/cube/android/include/cube.vert.inc b/src/third_party/vulkan-tools/src/cube/android/include/cube.vert.inc
new file mode 100644
index 0000000..bbfc192
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/cube/android/include/cube.vert.inc
@@ -0,0 +1,45 @@
+	// Overload400-PrecQual.2000 12-Apr-2017
+	0x07230203,0x00010000,0x00080002,0x00000029,0x00000000,0x00020011,0x00000001,0x0006000b,
+	0x00000001,0x4c534c47,0x6474732e,0x3035342e,0x00000000,0x0003000e,0x00000000,0x00000001,
+	0x0008000f,0x00000000,0x00000004,0x6e69616d,0x00000000,0x00000009,0x00000015,0x0000001e,
+	0x00030003,0x00000002,0x00000190,0x00090004,0x415f4c47,0x735f4252,0x72617065,0x5f657461,
+	0x64616873,0x6f5f7265,0x63656a62,0x00007374,0x00090004,0x415f4c47,0x735f4252,0x69646168,
+	0x6c5f676e,0x75676e61,0x5f656761,0x70303234,0x006b6361,0x00040005,0x00000004,0x6e69616d,
+	0x00000000,0x00050005,0x00000009,0x63786574,0x64726f6f,0x00000000,0x00030005,0x0000000f,
+	0x00667562,0x00040006,0x0000000f,0x00000000,0x0050564d,0x00060006,0x0000000f,0x00000001,
+	0x69736f70,0x6e6f6974,0x00000000,0x00050006,0x0000000f,0x00000002,0x72747461,0x00000000,
+	0x00040005,0x00000011,0x66756275,0x00000000,0x00060005,0x00000015,0x565f6c67,0x65747265,
+	0x646e4978,0x00007865,0x00060005,0x0000001c,0x505f6c67,0x65567265,0x78657472,0x00000000,
+	0x00060006,0x0000001c,0x00000000,0x505f6c67,0x7469736f,0x006e6f69,0x00070006,0x0000001c,
+	0x00000001,0x505f6c67,0x746e696f,0x657a6953,0x00000000,0x00070006,0x0000001c,0x00000002,
+	0x435f6c67,0x4470696c,0x61747369,0x0065636e,0x00030005,0x0000001e,0x00000000,0x00040047,
+	0x00000009,0x0000001e,0x00000000,0x00040047,0x0000000d,0x00000006,0x00000010,0x00040047,
+	0x0000000e,0x00000006,0x00000010,0x00040048,0x0000000f,0x00000000,0x00000005,0x00050048,
+	0x0000000f,0x00000000,0x00000023,0x00000000,0x00050048,0x0000000f,0x00000000,0x00000007,
+	0x00000010,0x00050048,0x0000000f,0x00000001,0x00000023,0x00000040,0x00050048,0x0000000f,
+	0x00000002,0x00000023,0x00000280,0x00030047,0x0000000f,0x00000002,0x00040047,0x00000011,
+	0x00000022,0x00000000,0x00040047,0x00000011,0x00000021,0x00000000,0x00040047,0x00000015,
+	0x0000000b,0x0000002a,0x00050048,0x0000001c,0x00000000,0x0000000b,0x00000000,0x00050048,
+	0x0000001c,0x00000001,0x0000000b,0x00000001,0x00050048,0x0000001c,0x00000002,0x0000000b,
+	0x00000003,0x00030047,0x0000001c,0x00000002,0x00020013,0x00000002,0x00030021,0x00000003,
+	0x00000002,0x00030016,0x00000006,0x00000020,0x00040017,0x00000007,0x00000006,0x00000004,
+	0x00040020,0x00000008,0x00000003,0x00000007,0x0004003b,0x00000008,0x00000009,0x00000003,
+	0x00040018,0x0000000a,0x00000007,0x00000004,0x00040015,0x0000000b,0x00000020,0x00000000,
+	0x0004002b,0x0000000b,0x0000000c,0x00000024,0x0004001c,0x0000000d,0x00000007,0x0000000c,
+	0x0004001c,0x0000000e,0x00000007,0x0000000c,0x0005001e,0x0000000f,0x0000000a,0x0000000d,
+	0x0000000e,0x00040020,0x00000010,0x00000002,0x0000000f,0x0004003b,0x00000010,0x00000011,
+	0x00000002,0x00040015,0x00000012,0x00000020,0x00000001,0x0004002b,0x00000012,0x00000013,
+	0x00000002,0x00040020,0x00000014,0x00000001,0x00000012,0x0004003b,0x00000014,0x00000015,
+	0x00000001,0x00040020,0x00000017,0x00000002,0x00000007,0x0004002b,0x0000000b,0x0000001a,
+	0x00000001,0x0004001c,0x0000001b,0x00000006,0x0000001a,0x0005001e,0x0000001c,0x00000007,
+	0x00000006,0x0000001b,0x00040020,0x0000001d,0x00000003,0x0000001c,0x0004003b,0x0000001d,
+	0x0000001e,0x00000003,0x0004002b,0x00000012,0x0000001f,0x00000000,0x00040020,0x00000020,
+	0x00000002,0x0000000a,0x0004002b,0x00000012,0x00000023,0x00000001,0x00050036,0x00000002,
+	0x00000004,0x00000000,0x00000003,0x000200f8,0x00000005,0x0004003d,0x00000012,0x00000016,
+	0x00000015,0x00060041,0x00000017,0x00000018,0x00000011,0x00000013,0x00000016,0x0004003d,
+	0x00000007,0x00000019,0x00000018,0x0003003e,0x00000009,0x00000019,0x00050041,0x00000020,
+	0x00000021,0x00000011,0x0000001f,0x0004003d,0x0000000a,0x00000022,0x00000021,0x0004003d,
+	0x00000012,0x00000024,0x00000015,0x00060041,0x00000017,0x00000025,0x00000011,0x00000023,
+	0x00000024,0x0004003d,0x00000007,0x00000026,0x00000025,0x00050091,0x00000007,0x00000027,
+	0x00000022,0x00000026,0x00050041,0x00000008,0x00000028,0x0000001e,0x0000001f,0x0003003e,
+	0x00000028,0x00000027,0x000100fd,0x00010038
diff --git a/src/third_party/vulkan-tools/src/cube/android/jni/Android.mk b/src/third_party/vulkan-tools/src/cube/android/jni/Android.mk
new file mode 100644
index 0000000..cc4b93c
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/cube/android/jni/Android.mk
@@ -0,0 +1,36 @@
+# Copyright 2015 The Android Open Source Project
+# Copyright (C) 2015 Valve Corporation
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#      http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+LOCAL_PATH := $(abspath $(call my-dir))
+SRC_DIR := $(LOCAL_PATH)/../../..
+DEMO_DIR := $(SRC_DIR)/cube
+
+include $(CLEAR_VARS)
+LOCAL_MODULE := VkCube
+LOCAL_SRC_FILES += $(DEMO_DIR)/cube.c \
+                   $(SRC_DIR)/common/vulkan_wrapper.cpp \
+                   $(SRC_DIR)/common/android_util.cpp
+LOCAL_C_INCLUDES += $(SRC_DIR)/build-android/third_party/Vulkan-Headers/include \
+                    $(DEMO_DIR)/android/include \
+                    $(SRC_DIR)/libs \
+                    $(SRC_DIR)/common \
+                    $(SRC_DIR)/build-android/generated/include
+LOCAL_CFLAGS += -DVK_USE_PLATFORM_ANDROID_KHR --include=$(SRC_DIR)/common/vulkan_wrapper.h
+LOCAL_WHOLE_STATIC_LIBRARIES += android_native_app_glue
+LOCAL_LDLIBS    := -llog -landroid
+LOCAL_LDFLAGS   := -u ANativeActivity_onCreate
+include $(BUILD_SHARED_LIBRARY)
+
+$(call import-module,android/native_app_glue)
diff --git a/src/third_party/vulkan-tools/src/cube/android/jni/Application.mk b/src/third_party/vulkan-tools/src/cube/android/jni/Application.mk
new file mode 100644
index 0000000..a3d9754
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/cube/android/jni/Application.mk
@@ -0,0 +1,22 @@
+# Copyright 2015 The Android Open Source Project
+# Copyright (C) 2015 Valve Corporation
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#      http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+APP_ABI := armeabi-v7a arm64-v8a x86 x86_64
+APP_PLATFORM := android-23
+APP_STL := c++_static
+APP_MODULES := VkCube
+APP_CPPFLAGS += -std=c++11 -fexceptions -Wall -Werror -Wextra -Wno-unused-parameter -DVK_NO_PROTOTYES -DGLM_FORCE_RADIANS
+APP_CFLAGS += -Wall -Werror -Wextra -Wno-unused-parameter -DVK_NO_PROTOTYES -DGLM_FORCE_RADIANS
+NDK_TOOLCHAIN_VERSION := clang
diff --git a/src/third_party/vulkan-tools/src/cube/cube.c b/src/third_party/vulkan-tools/src/cube/cube.c
new file mode 100644
index 0000000..a7b097b
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/cube/cube.c
@@ -0,0 +1,4018 @@
+/*
+ * Copyright (c) 2015-2019 The Khronos Group Inc.
+ * Copyright (c) 2015-2019 Valve Corporation
+ * Copyright (c) 2015-2019 LunarG, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Chia-I Wu <olv@lunarg.com>
+ * Author: Courtney Goeltzenleuchter <courtney@LunarG.com>
+ * Author: Ian Elliott <ian@LunarG.com>
+ * Author: Ian Elliott <ianelliott@google.com>
+ * Author: Jon Ashburn <jon@lunarg.com>
+ * Author: Gwan-gyeong Mun <elongbug@gmail.com>
+ * Author: Tony Barbour <tony@LunarG.com>
+ * Author: Bill Hollings <bill.hollings@brenwill.com>
+ */
+
+#define _GNU_SOURCE
+#include <stdio.h>
+#include <stdarg.h>
+#include <stdlib.h>
+#include <string.h>
+#include <stdbool.h>
+#include <assert.h>
+#include <signal.h>
+#if defined(VK_USE_PLATFORM_XLIB_KHR) || defined(VK_USE_PLATFORM_XCB_KHR)
+#include <X11/Xutil.h>
+#elif defined(VK_USE_PLATFORM_WAYLAND_KHR)
+#include <linux/input.h>
+#endif
+
+#ifdef _WIN32
+#pragma comment(linker, "/subsystem:windows")
+#define APP_NAME_STR_LEN 80
+#endif  // _WIN32
+
+#ifdef ANDROID
+#include "vulkan_wrapper.h"
+#else
+#include <vulkan/vulkan.h>
+#endif
+
+#include <vulkan/vk_sdk_platform.h>
+#include "linmath.h"
+#include "object_type_string_helper.h"
+
+#include "gettime.h"
+#include "inttypes.h"
+#define MILLION 1000000L
+#define BILLION 1000000000L
+
+#define DEMO_TEXTURE_COUNT 1
+#define APP_SHORT_NAME "vkcube"
+#define APP_LONG_NAME "Vulkan Cube"
+
+// Allow a maximum of two outstanding presentation operations.
+#define FRAME_LAG 2
+
+#define ARRAY_SIZE(a) (sizeof(a) / sizeof(a[0]))
+
+#if defined(NDEBUG) && defined(__GNUC__)
+#define U_ASSERT_ONLY __attribute__((unused))
+#else
+#define U_ASSERT_ONLY
+#endif
+
+#if defined(__GNUC__)
+#define UNUSED __attribute__((unused))
+#else
+#define UNUSED
+#endif
+
+#ifdef _WIN32
+bool in_callback = false;
+#define ERR_EXIT(err_msg, err_class)                                             \
+    do {                                                                         \
+        if (!demo->suppress_popups) MessageBox(NULL, err_msg, err_class, MB_OK); \
+        exit(1);                                                                 \
+    } while (0)
+void DbgMsg(char *fmt, ...) {
+    va_list va;
+    va_start(va, fmt);
+    vprintf(fmt, va);
+    va_end(va);
+    fflush(stdout);
+}
+
+#elif defined __ANDROID__
+#include <android/log.h>
+#define ERR_EXIT(err_msg, err_class)                                           \
+    do {                                                                       \
+        ((void)__android_log_print(ANDROID_LOG_INFO, "Vulkan Cube", err_msg)); \
+        exit(1);                                                               \
+    } while (0)
+#ifdef VARARGS_WORKS_ON_ANDROID
+void DbgMsg(const char *fmt, ...) {
+    va_list va;
+    va_start(va, fmt);
+    __android_log_print(ANDROID_LOG_INFO, "Vulkan Cube", fmt, va);
+    va_end(va);
+}
+#else  // VARARGS_WORKS_ON_ANDROID
+#define DbgMsg(fmt, ...)                                                                  \
+    do {                                                                                  \
+        ((void)__android_log_print(ANDROID_LOG_INFO, "Vulkan Cube", fmt, ##__VA_ARGS__)); \
+    } while (0)
+#endif  // VARARGS_WORKS_ON_ANDROID
+#else
+#define ERR_EXIT(err_msg, err_class) \
+    do {                             \
+        printf("%s\n", err_msg);     \
+        fflush(stdout);              \
+        exit(1);                     \
+    } while (0)
+void DbgMsg(char *fmt, ...) {
+    va_list va;
+    va_start(va, fmt);
+    vprintf(fmt, va);
+    va_end(va);
+    fflush(stdout);
+}
+#endif
+
+#define GET_INSTANCE_PROC_ADDR(inst, entrypoint)                                                              \
+    {                                                                                                         \
+        demo->fp##entrypoint = (PFN_vk##entrypoint)vkGetInstanceProcAddr(inst, "vk" #entrypoint);             \
+        if (demo->fp##entrypoint == NULL) {                                                                   \
+            ERR_EXIT("vkGetInstanceProcAddr failed to find vk" #entrypoint, "vkGetInstanceProcAddr Failure"); \
+        }                                                                                                     \
+    }
+
+static PFN_vkGetDeviceProcAddr g_gdpa = NULL;
+
+#define GET_DEVICE_PROC_ADDR(dev, entrypoint)                                                                    \
+    {                                                                                                            \
+        if (!g_gdpa) g_gdpa = (PFN_vkGetDeviceProcAddr)vkGetInstanceProcAddr(demo->inst, "vkGetDeviceProcAddr"); \
+        demo->fp##entrypoint = (PFN_vk##entrypoint)g_gdpa(dev, "vk" #entrypoint);                                \
+        if (demo->fp##entrypoint == NULL) {                                                                      \
+            ERR_EXIT("vkGetDeviceProcAddr failed to find vk" #entrypoint, "vkGetDeviceProcAddr Failure");        \
+        }                                                                                                        \
+    }
+
+/*
+ * structure to track all objects related to a texture.
+ */
+struct texture_object {
+    VkSampler sampler;
+
+    VkImage image;
+    VkBuffer buffer;
+    VkImageLayout imageLayout;
+
+    VkMemoryAllocateInfo mem_alloc;
+    VkDeviceMemory mem;
+    VkImageView view;
+    int32_t tex_width, tex_height;
+};
+
+static char *tex_files[] = {"lunarg.ppm"};
+
+static int validation_error = 0;
+
+struct vktexcube_vs_uniform {
+    // Must start with MVP
+    float mvp[4][4];
+    float position[12 * 3][4];
+    float attr[12 * 3][4];
+};
+
+//--------------------------------------------------------------------------------------
+// Mesh and VertexFormat Data
+//--------------------------------------------------------------------------------------
+// clang-format off
+static const float g_vertex_buffer_data[] = {
+    -1.0f,-1.0f,-1.0f,  // -X side
+    -1.0f,-1.0f, 1.0f,
+    -1.0f, 1.0f, 1.0f,
+    -1.0f, 1.0f, 1.0f,
+    -1.0f, 1.0f,-1.0f,
+    -1.0f,-1.0f,-1.0f,
+
+    -1.0f,-1.0f,-1.0f,  // -Z side
+     1.0f, 1.0f,-1.0f,
+     1.0f,-1.0f,-1.0f,
+    -1.0f,-1.0f,-1.0f,
+    -1.0f, 1.0f,-1.0f,
+     1.0f, 1.0f,-1.0f,
+
+    -1.0f,-1.0f,-1.0f,  // -Y side
+     1.0f,-1.0f,-1.0f,
+     1.0f,-1.0f, 1.0f,
+    -1.0f,-1.0f,-1.0f,
+     1.0f,-1.0f, 1.0f,
+    -1.0f,-1.0f, 1.0f,
+
+    -1.0f, 1.0f,-1.0f,  // +Y side
+    -1.0f, 1.0f, 1.0f,
+     1.0f, 1.0f, 1.0f,
+    -1.0f, 1.0f,-1.0f,
+     1.0f, 1.0f, 1.0f,
+     1.0f, 1.0f,-1.0f,
+
+     1.0f, 1.0f,-1.0f,  // +X side
+     1.0f, 1.0f, 1.0f,
+     1.0f,-1.0f, 1.0f,
+     1.0f,-1.0f, 1.0f,
+     1.0f,-1.0f,-1.0f,
+     1.0f, 1.0f,-1.0f,
+
+    -1.0f, 1.0f, 1.0f,  // +Z side
+    -1.0f,-1.0f, 1.0f,
+     1.0f, 1.0f, 1.0f,
+    -1.0f,-1.0f, 1.0f,
+     1.0f,-1.0f, 1.0f,
+     1.0f, 1.0f, 1.0f,
+};
+
+static const float g_uv_buffer_data[] = {
+    0.0f, 1.0f,  // -X side
+    1.0f, 1.0f,
+    1.0f, 0.0f,
+    1.0f, 0.0f,
+    0.0f, 0.0f,
+    0.0f, 1.0f,
+
+    1.0f, 1.0f,  // -Z side
+    0.0f, 0.0f,
+    0.0f, 1.0f,
+    1.0f, 1.0f,
+    1.0f, 0.0f,
+    0.0f, 0.0f,
+
+    1.0f, 0.0f,  // -Y side
+    1.0f, 1.0f,
+    0.0f, 1.0f,
+    1.0f, 0.0f,
+    0.0f, 1.0f,
+    0.0f, 0.0f,
+
+    1.0f, 0.0f,  // +Y side
+    0.0f, 0.0f,
+    0.0f, 1.0f,
+    1.0f, 0.0f,
+    0.0f, 1.0f,
+    1.0f, 1.0f,
+
+    1.0f, 0.0f,  // +X side
+    0.0f, 0.0f,
+    0.0f, 1.0f,
+    0.0f, 1.0f,
+    1.0f, 1.0f,
+    1.0f, 0.0f,
+
+    0.0f, 0.0f,  // +Z side
+    0.0f, 1.0f,
+    1.0f, 0.0f,
+    0.0f, 1.0f,
+    1.0f, 1.0f,
+    1.0f, 0.0f,
+};
+// clang-format on
+
+void dumpMatrix(const char *note, mat4x4 MVP) {
+    int i;
+
+    printf("%s: \n", note);
+    for (i = 0; i < 4; i++) {
+        printf("%f, %f, %f, %f\n", MVP[i][0], MVP[i][1], MVP[i][2], MVP[i][3]);
+    }
+    printf("\n");
+    fflush(stdout);
+}
+
+void dumpVec4(const char *note, vec4 vector) {
+    printf("%s: \n", note);
+    printf("%f, %f, %f, %f\n", vector[0], vector[1], vector[2], vector[3]);
+    printf("\n");
+    fflush(stdout);
+}
+
+typedef struct {
+    VkImage image;
+    VkCommandBuffer cmd;
+    VkCommandBuffer graphics_to_present_cmd;
+    VkImageView view;
+    VkBuffer uniform_buffer;
+    VkDeviceMemory uniform_memory;
+    VkFramebuffer framebuffer;
+    VkDescriptorSet descriptor_set;
+} SwapchainImageResources;
+
+struct demo {
+#if defined(VK_USE_PLATFORM_WIN32_KHR)
+#define APP_NAME_STR_LEN 80
+    HINSTANCE connection;         // hInstance - Windows Instance
+    char name[APP_NAME_STR_LEN];  // Name to put on the window/icon
+    HWND window;                  // hWnd - window handle
+    POINT minsize;                // minimum window size
+#elif defined(VK_USE_PLATFORM_XLIB_KHR)
+    Display *display;
+    Window xlib_window;
+    Atom xlib_wm_delete_window;
+#elif defined(VK_USE_PLATFORM_XCB_KHR)
+    Display *display;
+    xcb_connection_t *connection;
+    xcb_screen_t *screen;
+    xcb_window_t xcb_window;
+    xcb_intern_atom_reply_t *atom_wm_delete_window;
+#elif defined(VK_USE_PLATFORM_WAYLAND_KHR)
+    struct wl_display *display;
+    struct wl_registry *registry;
+    struct wl_compositor *compositor;
+    struct wl_surface *window;
+    struct wl_shell *shell;
+    struct wl_shell_surface *shell_surface;
+    struct wl_seat *seat;
+    struct wl_pointer *pointer;
+    struct wl_keyboard *keyboard;
+#elif defined(VK_USE_PLATFORM_ANDROID_KHR)
+    struct ANativeWindow *window;
+#elif defined(VK_USE_PLATFORM_METAL_EXT)
+    void *caMetalLayer;
+#endif
+    VkSurfaceKHR surface;
+    bool prepared;
+    bool use_staging_buffer;
+    bool separate_present_queue;
+    bool is_minimized;
+
+    bool VK_KHR_incremental_present_enabled;
+
+    bool VK_GOOGLE_display_timing_enabled;
+    bool syncd_with_actual_presents;
+    uint64_t refresh_duration;
+    uint64_t refresh_duration_multiplier;
+    uint64_t target_IPD;  // image present duration (inverse of frame rate)
+    uint64_t prev_desired_present_time;
+    uint32_t next_present_id;
+    uint32_t last_early_id;  // 0 if no early images
+    uint32_t last_late_id;   // 0 if no late images
+
+    VkInstance inst;
+    VkPhysicalDevice gpu;
+    VkDevice device;
+    VkQueue graphics_queue;
+    VkQueue present_queue;
+    uint32_t graphics_queue_family_index;
+    uint32_t present_queue_family_index;
+    VkSemaphore image_acquired_semaphores[FRAME_LAG];
+    VkSemaphore draw_complete_semaphores[FRAME_LAG];
+    VkSemaphore image_ownership_semaphores[FRAME_LAG];
+    VkPhysicalDeviceProperties gpu_props;
+    VkQueueFamilyProperties *queue_props;
+    VkPhysicalDeviceMemoryProperties memory_properties;
+
+    uint32_t enabled_extension_count;
+    uint32_t enabled_layer_count;
+    char *extension_names[64];
+    char *enabled_layers[64];
+
+    int width, height;
+    VkFormat format;
+    VkColorSpaceKHR color_space;
+
+    PFN_vkGetPhysicalDeviceSurfaceSupportKHR fpGetPhysicalDeviceSurfaceSupportKHR;
+    PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR fpGetPhysicalDeviceSurfaceCapabilitiesKHR;
+    PFN_vkGetPhysicalDeviceSurfaceFormatsKHR fpGetPhysicalDeviceSurfaceFormatsKHR;
+    PFN_vkGetPhysicalDeviceSurfacePresentModesKHR fpGetPhysicalDeviceSurfacePresentModesKHR;
+    PFN_vkCreateSwapchainKHR fpCreateSwapchainKHR;
+    PFN_vkDestroySwapchainKHR fpDestroySwapchainKHR;
+    PFN_vkGetSwapchainImagesKHR fpGetSwapchainImagesKHR;
+    PFN_vkAcquireNextImageKHR fpAcquireNextImageKHR;
+    PFN_vkQueuePresentKHR fpQueuePresentKHR;
+    PFN_vkGetRefreshCycleDurationGOOGLE fpGetRefreshCycleDurationGOOGLE;
+    PFN_vkGetPastPresentationTimingGOOGLE fpGetPastPresentationTimingGOOGLE;
+    uint32_t swapchainImageCount;
+    VkSwapchainKHR swapchain;
+    SwapchainImageResources *swapchain_image_resources;
+    VkPresentModeKHR presentMode;
+    VkFence fences[FRAME_LAG];
+    int frame_index;
+
+    VkCommandPool cmd_pool;
+    VkCommandPool present_cmd_pool;
+
+    struct {
+        VkFormat format;
+
+        VkImage image;
+        VkMemoryAllocateInfo mem_alloc;
+        VkDeviceMemory mem;
+        VkImageView view;
+    } depth;
+
+    struct texture_object textures[DEMO_TEXTURE_COUNT];
+    struct texture_object staging_texture;
+
+    VkCommandBuffer cmd;  // Buffer for initialization commands
+    VkPipelineLayout pipeline_layout;
+    VkDescriptorSetLayout desc_layout;
+    VkPipelineCache pipelineCache;
+    VkRenderPass render_pass;
+    VkPipeline pipeline;
+
+    mat4x4 projection_matrix;
+    mat4x4 view_matrix;
+    mat4x4 model_matrix;
+
+    float spin_angle;
+    float spin_increment;
+    bool pause;
+
+    VkShaderModule vert_shader_module;
+    VkShaderModule frag_shader_module;
+
+    VkDescriptorPool desc_pool;
+
+    bool quit;
+    int32_t curFrame;
+    int32_t frameCount;
+    bool validate;
+    bool validate_checks_disabled;
+    bool use_break;
+    bool suppress_popups;
+
+    PFN_vkCreateDebugUtilsMessengerEXT CreateDebugUtilsMessengerEXT;
+    PFN_vkDestroyDebugUtilsMessengerEXT DestroyDebugUtilsMessengerEXT;
+    PFN_vkSubmitDebugUtilsMessageEXT SubmitDebugUtilsMessageEXT;
+    PFN_vkCmdBeginDebugUtilsLabelEXT CmdBeginDebugUtilsLabelEXT;
+    PFN_vkCmdEndDebugUtilsLabelEXT CmdEndDebugUtilsLabelEXT;
+    PFN_vkCmdInsertDebugUtilsLabelEXT CmdInsertDebugUtilsLabelEXT;
+    PFN_vkSetDebugUtilsObjectNameEXT SetDebugUtilsObjectNameEXT;
+    VkDebugUtilsMessengerEXT dbg_messenger;
+
+    uint32_t current_buffer;
+    uint32_t queue_family_count;
+};
+
+VKAPI_ATTR VkBool32 VKAPI_CALL debug_messenger_callback(VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity,
+                                                        VkDebugUtilsMessageTypeFlagsEXT messageType,
+                                                        const VkDebugUtilsMessengerCallbackDataEXT *pCallbackData,
+                                                        void *pUserData) {
+    char prefix[64] = "";
+    char *message = (char *)malloc(strlen(pCallbackData->pMessage) + 5000);
+    assert(message);
+    struct demo *demo = (struct demo *)pUserData;
+
+    if (demo->use_break) {
+#ifndef WIN32
+        raise(SIGTRAP);
+#else
+        DebugBreak();
+#endif
+    }
+
+    if (messageSeverity & VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT) {
+        strcat(prefix, "VERBOSE : ");
+    } else if (messageSeverity & VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT) {
+        strcat(prefix, "INFO : ");
+    } else if (messageSeverity & VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT) {
+        strcat(prefix, "WARNING : ");
+    } else if (messageSeverity & VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT) {
+        strcat(prefix, "ERROR : ");
+    }
+
+    if (messageType & VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT) {
+        strcat(prefix, "GENERAL");
+    } else {
+        if (messageType & VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT) {
+            strcat(prefix, "VALIDATION");
+            validation_error = 1;
+        }
+        if (messageType & VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT) {
+            if (messageType & VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT) {
+                strcat(prefix, "|");
+            }
+            strcat(prefix, "PERFORMANCE");
+        }
+    }
+
+    sprintf(message, "%s - Message Id Number: %d | Message Id Name: %s\n\t%s\n", prefix, pCallbackData->messageIdNumber,
+            pCallbackData->pMessageIdName, pCallbackData->pMessage);
+    if (pCallbackData->objectCount > 0) {
+        char tmp_message[500];
+        sprintf(tmp_message, "\n\tObjects - %d\n", pCallbackData->objectCount);
+        strcat(message, tmp_message);
+        for (uint32_t object = 0; object < pCallbackData->objectCount; ++object) {
+            if (NULL != pCallbackData->pObjects[object].pObjectName && strlen(pCallbackData->pObjects[object].pObjectName) > 0) {
+                sprintf(tmp_message, "\t\tObject[%d] - %s, Handle %p, Name \"%s\"\n", object,
+                        string_VkObjectType(pCallbackData->pObjects[object].objectType),
+                        (void *)(pCallbackData->pObjects[object].objectHandle), pCallbackData->pObjects[object].pObjectName);
+            } else {
+                sprintf(tmp_message, "\t\tObject[%d] - %s, Handle %p\n", object,
+                        string_VkObjectType(pCallbackData->pObjects[object].objectType),
+                        (void *)(pCallbackData->pObjects[object].objectHandle));
+            }
+            strcat(message, tmp_message);
+        }
+    }
+    if (pCallbackData->cmdBufLabelCount > 0) {
+        char tmp_message[500];
+        sprintf(tmp_message, "\n\tCommand Buffer Labels - %d\n", pCallbackData->cmdBufLabelCount);
+        strcat(message, tmp_message);
+        for (uint32_t cmd_buf_label = 0; cmd_buf_label < pCallbackData->cmdBufLabelCount; ++cmd_buf_label) {
+            sprintf(tmp_message, "\t\tLabel[%d] - %s { %f, %f, %f, %f}\n", cmd_buf_label,
+                    pCallbackData->pCmdBufLabels[cmd_buf_label].pLabelName, pCallbackData->pCmdBufLabels[cmd_buf_label].color[0],
+                    pCallbackData->pCmdBufLabels[cmd_buf_label].color[1], pCallbackData->pCmdBufLabels[cmd_buf_label].color[2],
+                    pCallbackData->pCmdBufLabels[cmd_buf_label].color[3]);
+            strcat(message, tmp_message);
+        }
+    }
+
+#ifdef _WIN32
+
+    in_callback = true;
+    if (!demo->suppress_popups) MessageBox(NULL, message, "Alert", MB_OK);
+    in_callback = false;
+
+#elif defined(ANDROID)
+
+    if (messageSeverity & VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT) {
+        __android_log_print(ANDROID_LOG_INFO, APP_SHORT_NAME, "%s", message);
+    } else if (messageSeverity & VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT) {
+        __android_log_print(ANDROID_LOG_WARN, APP_SHORT_NAME, "%s", message);
+    } else if (messageSeverity & VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT) {
+        __android_log_print(ANDROID_LOG_ERROR, APP_SHORT_NAME, "%s", message);
+    } else if (messageSeverity & VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT) {
+        __android_log_print(ANDROID_LOG_VERBOSE, APP_SHORT_NAME, "%s", message);
+    } else {
+        __android_log_print(ANDROID_LOG_INFO, APP_SHORT_NAME, "%s", message);
+    }
+
+#else
+
+    printf("%s\n", message);
+    fflush(stdout);
+
+#endif
+
+    free(message);
+
+    // Don't bail out, but keep going.
+    return false;
+}
+
+bool ActualTimeLate(uint64_t desired, uint64_t actual, uint64_t rdur) {
+    // The desired time was the earliest time that the present should have
+    // occured.  In almost every case, the actual time should be later than the
+    // desired time.  We should only consider the actual time "late" if it is
+    // after "desired + rdur".
+    if (actual <= desired) {
+        // The actual time was before or equal to the desired time.  This will
+        // probably never happen, but in case it does, return false since the
+        // present was obviously NOT late.
+        return false;
+    }
+    uint64_t deadline = desired + rdur;
+    if (actual > deadline) {
+        return true;
+    } else {
+        return false;
+    }
+}
+bool CanPresentEarlier(uint64_t earliest, uint64_t actual, uint64_t margin, uint64_t rdur) {
+    if (earliest < actual) {
+        // Consider whether this present could have occured earlier.  Make sure
+        // that earliest time was at least 2msec earlier than actual time, and
+        // that the margin was at least 2msec:
+        uint64_t diff = actual - earliest;
+        if ((diff >= (2 * MILLION)) && (margin >= (2 * MILLION))) {
+            // This present could have occured earlier because both: 1) the
+            // earliest time was at least 2 msec before actual time, and 2) the
+            // margin was at least 2msec.
+            return true;
+        }
+    }
+    return false;
+}
+
+// Forward declarations:
+static void demo_resize(struct demo *demo);
+static void demo_create_surface(struct demo *demo);
+
+static bool memory_type_from_properties(struct demo *demo, uint32_t typeBits, VkFlags requirements_mask, uint32_t *typeIndex) {
+    // Search memtypes to find first index with those properties
+    for (uint32_t i = 0; i < VK_MAX_MEMORY_TYPES; i++) {
+        if ((typeBits & 1) == 1) {
+            // Type is available, does it match user properties?
+            if ((demo->memory_properties.memoryTypes[i].propertyFlags & requirements_mask) == requirements_mask) {
+                *typeIndex = i;
+                return true;
+            }
+        }
+        typeBits >>= 1;
+    }
+    // No memory types matched, return failure
+    return false;
+}
+
+static void demo_flush_init_cmd(struct demo *demo) {
+    VkResult U_ASSERT_ONLY err;
+
+    // This function could get called twice if the texture uses a staging buffer
+    // In that case the second call should be ignored
+    if (demo->cmd == VK_NULL_HANDLE) return;
+
+    err = vkEndCommandBuffer(demo->cmd);
+    assert(!err);
+
+    VkFence fence;
+    VkFenceCreateInfo fence_ci = {.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO, .pNext = NULL, .flags = 0};
+    err = vkCreateFence(demo->device, &fence_ci, NULL, &fence);
+    assert(!err);
+
+    const VkCommandBuffer cmd_bufs[] = {demo->cmd};
+    VkSubmitInfo submit_info = {.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO,
+                                .pNext = NULL,
+                                .waitSemaphoreCount = 0,
+                                .pWaitSemaphores = NULL,
+                                .pWaitDstStageMask = NULL,
+                                .commandBufferCount = 1,
+                                .pCommandBuffers = cmd_bufs,
+                                .signalSemaphoreCount = 0,
+                                .pSignalSemaphores = NULL};
+
+    err = vkQueueSubmit(demo->graphics_queue, 1, &submit_info, fence);
+    assert(!err);
+
+    err = vkWaitForFences(demo->device, 1, &fence, VK_TRUE, UINT64_MAX);
+    assert(!err);
+
+    vkFreeCommandBuffers(demo->device, demo->cmd_pool, 1, cmd_bufs);
+    vkDestroyFence(demo->device, fence, NULL);
+    demo->cmd = VK_NULL_HANDLE;
+}
+
+static void demo_set_image_layout(struct demo *demo, VkImage image, VkImageAspectFlags aspectMask, VkImageLayout old_image_layout,
+                                  VkImageLayout new_image_layout, VkAccessFlagBits srcAccessMask, VkPipelineStageFlags src_stages,
+                                  VkPipelineStageFlags dest_stages) {
+    assert(demo->cmd);
+
+    VkImageMemoryBarrier image_memory_barrier = {.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
+                                                 .pNext = NULL,
+                                                 .srcAccessMask = srcAccessMask,
+                                                 .dstAccessMask = 0,
+                                                 .srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
+                                                 .dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
+                                                 .oldLayout = old_image_layout,
+                                                 .newLayout = new_image_layout,
+                                                 .image = image,
+                                                 .subresourceRange = {aspectMask, 0, 1, 0, 1}};
+
+    switch (new_image_layout) {
+        case VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL:
+            /* Make sure anything that was copying from this image has completed */
+            image_memory_barrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
+            break;
+
+        case VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL:
+            image_memory_barrier.dstAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
+            break;
+
+        case VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL:
+            image_memory_barrier.dstAccessMask = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT;
+            break;
+
+        case VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL:
+            image_memory_barrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_INPUT_ATTACHMENT_READ_BIT;
+            break;
+
+        case VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL:
+            image_memory_barrier.dstAccessMask = VK_ACCESS_TRANSFER_READ_BIT;
+            break;
+
+        case VK_IMAGE_LAYOUT_PRESENT_SRC_KHR:
+            image_memory_barrier.dstAccessMask = VK_ACCESS_MEMORY_READ_BIT;
+            break;
+
+        default:
+            image_memory_barrier.dstAccessMask = 0;
+            break;
+    }
+
+    VkImageMemoryBarrier *pmemory_barrier = &image_memory_barrier;
+
+    vkCmdPipelineBarrier(demo->cmd, src_stages, dest_stages, 0, 0, NULL, 0, NULL, 1, pmemory_barrier);
+}
+
+static void demo_draw_build_cmd(struct demo *demo, VkCommandBuffer cmd_buf) {
+    VkDebugUtilsLabelEXT label;
+    memset(&label, 0, sizeof(label));
+    const VkCommandBufferBeginInfo cmd_buf_info = {
+        .sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
+        .pNext = NULL,
+        .flags = VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT,
+        .pInheritanceInfo = NULL,
+    };
+    const VkClearValue clear_values[2] = {
+        [0] = {.color.float32 = {0.2f, 0.2f, 0.2f, 0.2f}},
+        [1] = {.depthStencil = {1.0f, 0}},
+    };
+    const VkRenderPassBeginInfo rp_begin = {
+        .sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO,
+        .pNext = NULL,
+        .renderPass = demo->render_pass,
+        .framebuffer = demo->swapchain_image_resources[demo->current_buffer].framebuffer,
+        .renderArea.offset.x = 0,
+        .renderArea.offset.y = 0,
+        .renderArea.extent.width = demo->width,
+        .renderArea.extent.height = demo->height,
+        .clearValueCount = 2,
+        .pClearValues = clear_values,
+    };
+    VkResult U_ASSERT_ONLY err;
+
+    err = vkBeginCommandBuffer(cmd_buf, &cmd_buf_info);
+
+    if (demo->validate) {
+        // Set a name for the command buffer
+        VkDebugUtilsObjectNameInfoEXT cmd_buf_name = {
+            .sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT,
+            .pNext = NULL,
+            .objectType = VK_OBJECT_TYPE_COMMAND_BUFFER,
+            .objectHandle = (uint64_t)cmd_buf,
+            .pObjectName = "CubeDrawCommandBuf",
+        };
+        demo->SetDebugUtilsObjectNameEXT(demo->device, &cmd_buf_name);
+
+        label.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT;
+        label.pNext = NULL;
+        label.pLabelName = "DrawBegin";
+        label.color[0] = 0.4f;
+        label.color[1] = 0.3f;
+        label.color[2] = 0.2f;
+        label.color[3] = 0.1f;
+        demo->CmdBeginDebugUtilsLabelEXT(cmd_buf, &label);
+    }
+
+    assert(!err);
+    vkCmdBeginRenderPass(cmd_buf, &rp_begin, VK_SUBPASS_CONTENTS_INLINE);
+
+    if (demo->validate) {
+        label.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT;
+        label.pNext = NULL;
+        label.pLabelName = "InsideRenderPass";
+        label.color[0] = 8.4f;
+        label.color[1] = 7.3f;
+        label.color[2] = 6.2f;
+        label.color[3] = 7.1f;
+        demo->CmdBeginDebugUtilsLabelEXT(cmd_buf, &label);
+    }
+
+    vkCmdBindPipeline(cmd_buf, VK_PIPELINE_BIND_POINT_GRAPHICS, demo->pipeline);
+    vkCmdBindDescriptorSets(cmd_buf, VK_PIPELINE_BIND_POINT_GRAPHICS, demo->pipeline_layout, 0, 1,
+                            &demo->swapchain_image_resources[demo->current_buffer].descriptor_set, 0, NULL);
+    VkViewport viewport;
+    memset(&viewport, 0, sizeof(viewport));
+    float viewport_dimension;
+    if (demo->width < demo->height) {
+        viewport_dimension = (float)demo->width;
+        viewport.y = (demo->height - demo->width) / 2.0f;
+    } else {
+        viewport_dimension = (float)demo->height;
+        viewport.x = (demo->width - demo->height) / 2.0f;
+    }
+    viewport.height = viewport_dimension;
+    viewport.width = viewport_dimension;
+    viewport.minDepth = (float)0.0f;
+    viewport.maxDepth = (float)1.0f;
+    vkCmdSetViewport(cmd_buf, 0, 1, &viewport);
+
+    VkRect2D scissor;
+    memset(&scissor, 0, sizeof(scissor));
+    scissor.extent.width = demo->width;
+    scissor.extent.height = demo->height;
+    scissor.offset.x = 0;
+    scissor.offset.y = 0;
+    vkCmdSetScissor(cmd_buf, 0, 1, &scissor);
+
+    if (demo->validate) {
+        label.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT;
+        label.pNext = NULL;
+        label.pLabelName = "ActualDraw";
+        label.color[0] = -0.4f;
+        label.color[1] = -0.3f;
+        label.color[2] = -0.2f;
+        label.color[3] = -0.1f;
+        demo->CmdBeginDebugUtilsLabelEXT(cmd_buf, &label);
+    }
+
+    vkCmdDraw(cmd_buf, 12 * 3, 1, 0, 0);
+    if (demo->validate) {
+        demo->CmdEndDebugUtilsLabelEXT(cmd_buf);
+    }
+
+    // Note that ending the renderpass changes the image's layout from
+    // COLOR_ATTACHMENT_OPTIMAL to PRESENT_SRC_KHR
+    vkCmdEndRenderPass(cmd_buf);
+    if (demo->validate) {
+        demo->CmdEndDebugUtilsLabelEXT(cmd_buf);
+    }
+
+    if (demo->separate_present_queue) {
+        // We have to transfer ownership from the graphics queue family to the
+        // present queue family to be able to present.  Note that we don't have
+        // to transfer from present queue family back to graphics queue family at
+        // the start of the next frame because we don't care about the image's
+        // contents at that point.
+        VkImageMemoryBarrier image_ownership_barrier = {.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
+                                                        .pNext = NULL,
+                                                        .srcAccessMask = 0,
+                                                        .dstAccessMask = 0,
+                                                        .oldLayout = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR,
+                                                        .newLayout = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR,
+                                                        .srcQueueFamilyIndex = demo->graphics_queue_family_index,
+                                                        .dstQueueFamilyIndex = demo->present_queue_family_index,
+                                                        .image = demo->swapchain_image_resources[demo->current_buffer].image,
+                                                        .subresourceRange = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 1}};
+
+        vkCmdPipelineBarrier(cmd_buf, VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, 0, 0, NULL, 0,
+                             NULL, 1, &image_ownership_barrier);
+    }
+    if (demo->validate) {
+        demo->CmdEndDebugUtilsLabelEXT(cmd_buf);
+    }
+    err = vkEndCommandBuffer(cmd_buf);
+    assert(!err);
+}
+
+void demo_build_image_ownership_cmd(struct demo *demo, int i) {
+    VkResult U_ASSERT_ONLY err;
+
+    const VkCommandBufferBeginInfo cmd_buf_info = {
+        .sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
+        .pNext = NULL,
+        .flags = VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT,
+        .pInheritanceInfo = NULL,
+    };
+    err = vkBeginCommandBuffer(demo->swapchain_image_resources[i].graphics_to_present_cmd, &cmd_buf_info);
+    assert(!err);
+
+    VkImageMemoryBarrier image_ownership_barrier = {.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
+                                                    .pNext = NULL,
+                                                    .srcAccessMask = 0,
+                                                    .dstAccessMask = 0,
+                                                    .oldLayout = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR,
+                                                    .newLayout = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR,
+                                                    .srcQueueFamilyIndex = demo->graphics_queue_family_index,
+                                                    .dstQueueFamilyIndex = demo->present_queue_family_index,
+                                                    .image = demo->swapchain_image_resources[i].image,
+                                                    .subresourceRange = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 1}};
+
+    vkCmdPipelineBarrier(demo->swapchain_image_resources[i].graphics_to_present_cmd, VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT,
+                         VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, 0, 0, NULL, 0, NULL, 1, &image_ownership_barrier);
+    err = vkEndCommandBuffer(demo->swapchain_image_resources[i].graphics_to_present_cmd);
+    assert(!err);
+}
+
+void demo_update_data_buffer(struct demo *demo) {
+    mat4x4 MVP, Model, VP;
+    int matrixSize = sizeof(MVP);
+    uint8_t *pData;
+    VkResult U_ASSERT_ONLY err;
+
+    mat4x4_mul(VP, demo->projection_matrix, demo->view_matrix);
+
+    // Rotate around the Y axis
+    mat4x4_dup(Model, demo->model_matrix);
+    mat4x4_rotate(demo->model_matrix, Model, 0.0f, 1.0f, 0.0f, (float)degreesToRadians(demo->spin_angle));
+    mat4x4_mul(MVP, VP, demo->model_matrix);
+
+    err = vkMapMemory(demo->device, demo->swapchain_image_resources[demo->current_buffer].uniform_memory, 0, VK_WHOLE_SIZE, 0,
+                      (void **)&pData);
+    assert(!err);
+
+    memcpy(pData, (const void *)&MVP[0][0], matrixSize);
+
+    vkUnmapMemory(demo->device, demo->swapchain_image_resources[demo->current_buffer].uniform_memory);
+}
+
+void DemoUpdateTargetIPD(struct demo *demo) {
+    // Look at what happened to previous presents, and make appropriate
+    // adjustments in timing:
+    VkResult U_ASSERT_ONLY err;
+    VkPastPresentationTimingGOOGLE *past = NULL;
+    uint32_t count = 0;
+
+    err = demo->fpGetPastPresentationTimingGOOGLE(demo->device, demo->swapchain, &count, NULL);
+    assert(!err);
+    if (count) {
+        past = (VkPastPresentationTimingGOOGLE *)malloc(sizeof(VkPastPresentationTimingGOOGLE) * count);
+        assert(past);
+        err = demo->fpGetPastPresentationTimingGOOGLE(demo->device, demo->swapchain, &count, past);
+        assert(!err);
+
+        bool early = false;
+        bool late = false;
+        bool calibrate_next = false;
+        for (uint32_t i = 0; i < count; i++) {
+            if (!demo->syncd_with_actual_presents) {
+                // This is the first time that we've received an
+                // actualPresentTime for this swapchain.  In order to not
+                // perceive these early frames as "late", we need to sync-up
+                // our future desiredPresentTime's with the
+                // actualPresentTime(s) that we're receiving now.
+                calibrate_next = true;
+
+                // So that we don't suspect any pending presents as late,
+                // record them all as suspected-late presents:
+                demo->last_late_id = demo->next_present_id - 1;
+                demo->last_early_id = 0;
+                demo->syncd_with_actual_presents = true;
+                break;
+            } else if (CanPresentEarlier(past[i].earliestPresentTime, past[i].actualPresentTime, past[i].presentMargin,
+                                         demo->refresh_duration)) {
+                // This image could have been presented earlier.  We don't want
+                // to decrease the target_IPD until we've seen early presents
+                // for at least two seconds.
+                if (demo->last_early_id == past[i].presentID) {
+                    // We've now seen two seconds worth of early presents.
+                    // Flag it as such, and reset the counter:
+                    early = true;
+                    demo->last_early_id = 0;
+                } else if (demo->last_early_id == 0) {
+                    // This is the first early present we've seen.
+                    // Calculate the presentID for two seconds from now.
+                    uint64_t lastEarlyTime = past[i].actualPresentTime + (2 * BILLION);
+                    uint32_t howManyPresents = (uint32_t)((lastEarlyTime - past[i].actualPresentTime) / demo->target_IPD);
+                    demo->last_early_id = past[i].presentID + howManyPresents;
+                } else {
+                    // We are in the midst of a set of early images,
+                    // and so we won't do anything.
+                }
+                late = false;
+                demo->last_late_id = 0;
+            } else if (ActualTimeLate(past[i].desiredPresentTime, past[i].actualPresentTime, demo->refresh_duration)) {
+                // This image was presented after its desired time.  Since
+                // there's a delay between calling vkQueuePresentKHR and when
+                // we get the timing data, several presents may have been late.
+                // Thus, we need to threat all of the outstanding presents as
+                // being likely late, so that we only increase the target_IPD
+                // once for all of those presents.
+                if ((demo->last_late_id == 0) || (demo->last_late_id < past[i].presentID)) {
+                    late = true;
+                    // Record the last suspected-late present:
+                    demo->last_late_id = demo->next_present_id - 1;
+                } else {
+                    // We are in the midst of a set of likely-late images,
+                    // and so we won't do anything.
+                }
+                early = false;
+                demo->last_early_id = 0;
+            } else {
+                // Since this image was not presented early or late, reset
+                // any sets of early or late presentIDs:
+                early = false;
+                late = false;
+                calibrate_next = true;
+                demo->last_early_id = 0;
+                demo->last_late_id = 0;
+            }
+        }
+
+        if (early) {
+            // Since we've seen at least two-seconds worth of presnts that
+            // could have occured earlier than desired, let's decrease the
+            // target_IPD (i.e. increase the frame rate):
+            //
+            // TODO(ianelliott): Try to calculate a better target_IPD based
+            // on the most recently-seen present (this is overly-simplistic).
+            demo->refresh_duration_multiplier--;
+            if (demo->refresh_duration_multiplier == 0) {
+                // This should never happen, but in case it does, don't
+                // try to go faster.
+                demo->refresh_duration_multiplier = 1;
+            }
+            demo->target_IPD = demo->refresh_duration * demo->refresh_duration_multiplier;
+        }
+        if (late) {
+            // Since we found a new instance of a late present, we want to
+            // increase the target_IPD (i.e. decrease the frame rate):
+            //
+            // TODO(ianelliott): Try to calculate a better target_IPD based
+            // on the most recently-seen present (this is overly-simplistic).
+            demo->refresh_duration_multiplier++;
+            demo->target_IPD = demo->refresh_duration * demo->refresh_duration_multiplier;
+        }
+
+        if (calibrate_next) {
+            int64_t multiple = demo->next_present_id - past[count - 1].presentID;
+            demo->prev_desired_present_time = (past[count - 1].actualPresentTime + (multiple * demo->target_IPD));
+        }
+        free(past);
+    }
+}
+
+static void demo_draw(struct demo *demo) {
+    VkResult U_ASSERT_ONLY err;
+
+    // Ensure no more than FRAME_LAG renderings are outstanding
+    vkWaitForFences(demo->device, 1, &demo->fences[demo->frame_index], VK_TRUE, UINT64_MAX);
+    vkResetFences(demo->device, 1, &demo->fences[demo->frame_index]);
+
+    do {
+        // Get the index of the next available swapchain image:
+        err =
+            demo->fpAcquireNextImageKHR(demo->device, demo->swapchain, UINT64_MAX,
+                                        demo->image_acquired_semaphores[demo->frame_index], VK_NULL_HANDLE, &demo->current_buffer);
+
+        if (err == VK_ERROR_OUT_OF_DATE_KHR) {
+            // demo->swapchain is out of date (e.g. the window was resized) and
+            // must be recreated:
+            demo_resize(demo);
+        } else if (err == VK_SUBOPTIMAL_KHR) {
+            // demo->swapchain is not as optimal as it could be, but the platform's
+            // presentation engine will still present the image correctly.
+            break;
+        } else if (err == VK_ERROR_SURFACE_LOST_KHR) {
+            vkDestroySurfaceKHR(demo->inst, demo->surface, NULL);
+            demo_create_surface(demo);
+            demo_resize(demo);
+        } else {
+            assert(!err);
+        }
+    } while (err != VK_SUCCESS);
+
+    demo_update_data_buffer(demo);
+
+    if (demo->VK_GOOGLE_display_timing_enabled) {
+        // Look at what happened to previous presents, and make appropriate
+        // adjustments in timing:
+        DemoUpdateTargetIPD(demo);
+
+        // Note: a real application would position its geometry to that it's in
+        // the correct locatoin for when the next image is presented.  It might
+        // also wait, so that there's less latency between any input and when
+        // the next image is rendered/presented.  This demo program is so
+        // simple that it doesn't do either of those.
+    }
+
+    // Wait for the image acquired semaphore to be signaled to ensure
+    // that the image won't be rendered to until the presentation
+    // engine has fully released ownership to the application, and it is
+    // okay to render to the image.
+    VkPipelineStageFlags pipe_stage_flags;
+    VkSubmitInfo submit_info;
+    submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+    submit_info.pNext = NULL;
+    submit_info.pWaitDstStageMask = &pipe_stage_flags;
+    pipe_stage_flags = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
+    submit_info.waitSemaphoreCount = 1;
+    submit_info.pWaitSemaphores = &demo->image_acquired_semaphores[demo->frame_index];
+    submit_info.commandBufferCount = 1;
+    submit_info.pCommandBuffers = &demo->swapchain_image_resources[demo->current_buffer].cmd;
+    submit_info.signalSemaphoreCount = 1;
+    submit_info.pSignalSemaphores = &demo->draw_complete_semaphores[demo->frame_index];
+    err = vkQueueSubmit(demo->graphics_queue, 1, &submit_info, demo->fences[demo->frame_index]);
+    assert(!err);
+
+    if (demo->separate_present_queue) {
+        // If we are using separate queues, change image ownership to the
+        // present queue before presenting, waiting for the draw complete
+        // semaphore and signalling the ownership released semaphore when finished
+        VkFence nullFence = VK_NULL_HANDLE;
+        pipe_stage_flags = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
+        submit_info.waitSemaphoreCount = 1;
+        submit_info.pWaitSemaphores = &demo->draw_complete_semaphores[demo->frame_index];
+        submit_info.commandBufferCount = 1;
+        submit_info.pCommandBuffers = &demo->swapchain_image_resources[demo->current_buffer].graphics_to_present_cmd;
+        submit_info.signalSemaphoreCount = 1;
+        submit_info.pSignalSemaphores = &demo->image_ownership_semaphores[demo->frame_index];
+        err = vkQueueSubmit(demo->present_queue, 1, &submit_info, nullFence);
+        assert(!err);
+    }
+
+    // If we are using separate queues we have to wait for image ownership,
+    // otherwise wait for draw complete
+    VkPresentInfoKHR present = {
+        .sType = VK_STRUCTURE_TYPE_PRESENT_INFO_KHR,
+        .pNext = NULL,
+        .waitSemaphoreCount = 1,
+        .pWaitSemaphores = (demo->separate_present_queue) ? &demo->image_ownership_semaphores[demo->frame_index]
+                                                          : &demo->draw_complete_semaphores[demo->frame_index],
+        .swapchainCount = 1,
+        .pSwapchains = &demo->swapchain,
+        .pImageIndices = &demo->current_buffer,
+    };
+
+    VkRectLayerKHR rect;
+    VkPresentRegionKHR region;
+    VkPresentRegionsKHR regions;
+    if (demo->VK_KHR_incremental_present_enabled) {
+        // If using VK_KHR_incremental_present, we provide a hint of the region
+        // that contains changed content relative to the previously-presented
+        // image.  The implementation can use this hint in order to save
+        // work/power (by only copying the region in the hint).  The
+        // implementation is free to ignore the hint though, and so we must
+        // ensure that the entire image has the correctly-drawn content.
+        uint32_t eighthOfWidth = demo->width / 8;
+        uint32_t eighthOfHeight = demo->height / 8;
+
+        rect.offset.x = eighthOfWidth;
+        rect.offset.y = eighthOfHeight;
+        rect.extent.width = eighthOfWidth * 6;
+        rect.extent.height = eighthOfHeight * 6;
+        rect.layer = 0;
+
+        region.rectangleCount = 1;
+        region.pRectangles = &rect;
+
+        regions.sType = VK_STRUCTURE_TYPE_PRESENT_REGIONS_KHR;
+        regions.pNext = present.pNext;
+        regions.swapchainCount = present.swapchainCount;
+        regions.pRegions = &region;
+        present.pNext = &regions;
+    }
+
+    if (demo->VK_GOOGLE_display_timing_enabled) {
+        VkPresentTimeGOOGLE ptime;
+        if (demo->prev_desired_present_time == 0) {
+            // This must be the first present for this swapchain.
+            //
+            // We don't know where we are relative to the presentation engine's
+            // display's refresh cycle.  We also don't know how long rendering
+            // takes.  Let's make a grossly-simplified assumption that the
+            // desiredPresentTime should be half way between now and
+            // now+target_IPD.  We will adjust over time.
+            uint64_t curtime = getTimeInNanoseconds();
+            if (curtime == 0) {
+                // Since we didn't find out the current time, don't give a
+                // desiredPresentTime:
+                ptime.desiredPresentTime = 0;
+            } else {
+                ptime.desiredPresentTime = curtime + (demo->target_IPD >> 1);
+            }
+        } else {
+            ptime.desiredPresentTime = (demo->prev_desired_present_time + demo->target_IPD);
+        }
+        ptime.presentID = demo->next_present_id++;
+        demo->prev_desired_present_time = ptime.desiredPresentTime;
+
+        VkPresentTimesInfoGOOGLE present_time = {
+            .sType = VK_STRUCTURE_TYPE_PRESENT_TIMES_INFO_GOOGLE,
+            .pNext = present.pNext,
+            .swapchainCount = present.swapchainCount,
+            .pTimes = &ptime,
+        };
+        if (demo->VK_GOOGLE_display_timing_enabled) {
+            present.pNext = &present_time;
+        }
+    }
+
+    err = demo->fpQueuePresentKHR(demo->present_queue, &present);
+    demo->frame_index += 1;
+    demo->frame_index %= FRAME_LAG;
+
+    if (err == VK_ERROR_OUT_OF_DATE_KHR) {
+        // demo->swapchain is out of date (e.g. the window was resized) and
+        // must be recreated:
+        demo_resize(demo);
+    } else if (err == VK_SUBOPTIMAL_KHR) {
+        // demo->swapchain is not as optimal as it could be, but the platform's
+        // presentation engine will still present the image correctly.
+    } else if (err == VK_ERROR_SURFACE_LOST_KHR) {
+        vkDestroySurfaceKHR(demo->inst, demo->surface, NULL);
+        demo_create_surface(demo);
+        demo_resize(demo);
+    } else {
+        assert(!err);
+    }
+}
+
+static void demo_prepare_buffers(struct demo *demo) {
+    VkResult U_ASSERT_ONLY err;
+    VkSwapchainKHR oldSwapchain = demo->swapchain;
+
+    // Check the surface capabilities and formats
+    VkSurfaceCapabilitiesKHR surfCapabilities;
+    err = demo->fpGetPhysicalDeviceSurfaceCapabilitiesKHR(demo->gpu, demo->surface, &surfCapabilities);
+    assert(!err);
+
+    uint32_t presentModeCount;
+    err = demo->fpGetPhysicalDeviceSurfacePresentModesKHR(demo->gpu, demo->surface, &presentModeCount, NULL);
+    assert(!err);
+    VkPresentModeKHR *presentModes = (VkPresentModeKHR *)malloc(presentModeCount * sizeof(VkPresentModeKHR));
+    assert(presentModes);
+    err = demo->fpGetPhysicalDeviceSurfacePresentModesKHR(demo->gpu, demo->surface, &presentModeCount, presentModes);
+    assert(!err);
+
+    VkExtent2D swapchainExtent;
+    // width and height are either both 0xFFFFFFFF, or both not 0xFFFFFFFF.
+    if (surfCapabilities.currentExtent.width == 0xFFFFFFFF) {
+        // If the surface size is undefined, the size is set to the size
+        // of the images requested, which must fit within the minimum and
+        // maximum values.
+        swapchainExtent.width = demo->width;
+        swapchainExtent.height = demo->height;
+
+        if (swapchainExtent.width < surfCapabilities.minImageExtent.width) {
+            swapchainExtent.width = surfCapabilities.minImageExtent.width;
+        } else if (swapchainExtent.width > surfCapabilities.maxImageExtent.width) {
+            swapchainExtent.width = surfCapabilities.maxImageExtent.width;
+        }
+
+        if (swapchainExtent.height < surfCapabilities.minImageExtent.height) {
+            swapchainExtent.height = surfCapabilities.minImageExtent.height;
+        } else if (swapchainExtent.height > surfCapabilities.maxImageExtent.height) {
+            swapchainExtent.height = surfCapabilities.maxImageExtent.height;
+        }
+    } else {
+        // If the surface size is defined, the swap chain size must match
+        swapchainExtent = surfCapabilities.currentExtent;
+        demo->width = surfCapabilities.currentExtent.width;
+        demo->height = surfCapabilities.currentExtent.height;
+    }
+
+    if (demo->width == 0 || demo->height == 0) {
+        demo->is_minimized = true;
+        return;
+    } else {
+        demo->is_minimized = false;
+    }
+
+    // The FIFO present mode is guaranteed by the spec to be supported
+    // and to have no tearing.  It's a great default present mode to use.
+    VkPresentModeKHR swapchainPresentMode = VK_PRESENT_MODE_FIFO_KHR;
+
+    //  There are times when you may wish to use another present mode.  The
+    //  following code shows how to select them, and the comments provide some
+    //  reasons you may wish to use them.
+    //
+    // It should be noted that Vulkan 1.0 doesn't provide a method for
+    // synchronizing rendering with the presentation engine's display.  There
+    // is a method provided for throttling rendering with the display, but
+    // there are some presentation engines for which this method will not work.
+    // If an application doesn't throttle its rendering, and if it renders much
+    // faster than the refresh rate of the display, this can waste power on
+    // mobile devices.  That is because power is being spent rendering images
+    // that may never be seen.
+
+    // VK_PRESENT_MODE_IMMEDIATE_KHR is for applications that don't care about
+    // tearing, or have some way of synchronizing their rendering with the
+    // display.
+    // VK_PRESENT_MODE_MAILBOX_KHR may be useful for applications that
+    // generally render a new presentable image every refresh cycle, but are
+    // occasionally early.  In this case, the application wants the new image
+    // to be displayed instead of the previously-queued-for-presentation image
+    // that has not yet been displayed.
+    // VK_PRESENT_MODE_FIFO_RELAXED_KHR is for applications that generally
+    // render a new presentable image every refresh cycle, but are occasionally
+    // late.  In this case (perhaps because of stuttering/latency concerns),
+    // the application wants the late image to be immediately displayed, even
+    // though that may mean some tearing.
+
+    if (demo->presentMode != swapchainPresentMode) {
+        for (size_t i = 0; i < presentModeCount; ++i) {
+            if (presentModes[i] == demo->presentMode) {
+                swapchainPresentMode = demo->presentMode;
+                break;
+            }
+        }
+    }
+    if (swapchainPresentMode != demo->presentMode) {
+        ERR_EXIT("Present mode specified is not supported\n", "Present mode unsupported");
+    }
+
+    // Determine the number of VkImages to use in the swap chain.
+    // Application desires to acquire 3 images at a time for triple
+    // buffering
+    uint32_t desiredNumOfSwapchainImages = 3;
+    if (desiredNumOfSwapchainImages < surfCapabilities.minImageCount) {
+        desiredNumOfSwapchainImages = surfCapabilities.minImageCount;
+    }
+    // If maxImageCount is 0, we can ask for as many images as we want;
+    // otherwise we're limited to maxImageCount
+    if ((surfCapabilities.maxImageCount > 0) && (desiredNumOfSwapchainImages > surfCapabilities.maxImageCount)) {
+        // Application must settle for fewer images than desired:
+        desiredNumOfSwapchainImages = surfCapabilities.maxImageCount;
+    }
+
+    VkSurfaceTransformFlagsKHR preTransform;
+    if (surfCapabilities.supportedTransforms & VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR) {
+        preTransform = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR;
+    } else {
+        preTransform = surfCapabilities.currentTransform;
+    }
+
+    // Find a supported composite alpha mode - one of these is guaranteed to be set
+    VkCompositeAlphaFlagBitsKHR compositeAlpha = VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR;
+    VkCompositeAlphaFlagBitsKHR compositeAlphaFlags[4] = {
+        VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR,
+        VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR,
+        VK_COMPOSITE_ALPHA_POST_MULTIPLIED_BIT_KHR,
+        VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR,
+    };
+    for (uint32_t i = 0; i < ARRAY_SIZE(compositeAlphaFlags); i++) {
+        if (surfCapabilities.supportedCompositeAlpha & compositeAlphaFlags[i]) {
+            compositeAlpha = compositeAlphaFlags[i];
+            break;
+        }
+    }
+
+    VkSwapchainCreateInfoKHR swapchain_ci = {
+        .sType = VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR,
+        .pNext = NULL,
+        .surface = demo->surface,
+        .minImageCount = desiredNumOfSwapchainImages,
+        .imageFormat = demo->format,
+        .imageColorSpace = demo->color_space,
+        .imageExtent =
+            {
+                .width = swapchainExtent.width,
+                .height = swapchainExtent.height,
+            },
+        .imageUsage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT,
+        .preTransform = preTransform,
+        .compositeAlpha = compositeAlpha,
+        .imageArrayLayers = 1,
+        .imageSharingMode = VK_SHARING_MODE_EXCLUSIVE,
+        .queueFamilyIndexCount = 0,
+        .pQueueFamilyIndices = NULL,
+        .presentMode = swapchainPresentMode,
+        .oldSwapchain = oldSwapchain,
+        .clipped = true,
+    };
+    uint32_t i;
+    err = demo->fpCreateSwapchainKHR(demo->device, &swapchain_ci, NULL, &demo->swapchain);
+    assert(!err);
+
+    // If we just re-created an existing swapchain, we should destroy the old
+    // swapchain at this point.
+    // Note: destroying the swapchain also cleans up all its associated
+    // presentable images once the platform is done with them.
+    if (oldSwapchain != VK_NULL_HANDLE) {
+        demo->fpDestroySwapchainKHR(demo->device, oldSwapchain, NULL);
+    }
+
+    err = demo->fpGetSwapchainImagesKHR(demo->device, demo->swapchain, &demo->swapchainImageCount, NULL);
+    assert(!err);
+
+    VkImage *swapchainImages = (VkImage *)malloc(demo->swapchainImageCount * sizeof(VkImage));
+    assert(swapchainImages);
+    err = demo->fpGetSwapchainImagesKHR(demo->device, demo->swapchain, &demo->swapchainImageCount, swapchainImages);
+    assert(!err);
+
+    demo->swapchain_image_resources =
+        (SwapchainImageResources *)malloc(sizeof(SwapchainImageResources) * demo->swapchainImageCount);
+    assert(demo->swapchain_image_resources);
+
+    for (i = 0; i < demo->swapchainImageCount; i++) {
+        VkImageViewCreateInfo color_image_view = {
+            .sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
+            .pNext = NULL,
+            .format = demo->format,
+            .components =
+                {
+                    .r = VK_COMPONENT_SWIZZLE_R,
+                    .g = VK_COMPONENT_SWIZZLE_G,
+                    .b = VK_COMPONENT_SWIZZLE_B,
+                    .a = VK_COMPONENT_SWIZZLE_A,
+                },
+            .subresourceRange =
+                {.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT, .baseMipLevel = 0, .levelCount = 1, .baseArrayLayer = 0, .layerCount = 1},
+            .viewType = VK_IMAGE_VIEW_TYPE_2D,
+            .flags = 0,
+        };
+
+        demo->swapchain_image_resources[i].image = swapchainImages[i];
+
+        color_image_view.image = demo->swapchain_image_resources[i].image;
+
+        err = vkCreateImageView(demo->device, &color_image_view, NULL, &demo->swapchain_image_resources[i].view);
+        assert(!err);
+    }
+
+    if (demo->VK_GOOGLE_display_timing_enabled) {
+        VkRefreshCycleDurationGOOGLE rc_dur;
+        err = demo->fpGetRefreshCycleDurationGOOGLE(demo->device, demo->swapchain, &rc_dur);
+        assert(!err);
+        demo->refresh_duration = rc_dur.refreshDuration;
+
+        demo->syncd_with_actual_presents = false;
+        // Initially target 1X the refresh duration:
+        demo->target_IPD = demo->refresh_duration;
+        demo->refresh_duration_multiplier = 1;
+        demo->prev_desired_present_time = 0;
+        demo->next_present_id = 1;
+    }
+
+    if (NULL != swapchainImages) {
+        free(swapchainImages);
+    }
+
+    if (NULL != presentModes) {
+        free(presentModes);
+    }
+}
+
+static void demo_prepare_depth(struct demo *demo) {
+    const VkFormat depth_format = VK_FORMAT_D16_UNORM;
+    const VkImageCreateInfo image = {
+        .sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
+        .pNext = NULL,
+        .imageType = VK_IMAGE_TYPE_2D,
+        .format = depth_format,
+        .extent = {demo->width, demo->height, 1},
+        .mipLevels = 1,
+        .arrayLayers = 1,
+        .samples = VK_SAMPLE_COUNT_1_BIT,
+        .tiling = VK_IMAGE_TILING_OPTIMAL,
+        .usage = VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT,
+        .flags = 0,
+    };
+
+    VkImageViewCreateInfo view = {
+        .sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
+        .pNext = NULL,
+        .image = VK_NULL_HANDLE,
+        .format = depth_format,
+        .subresourceRange =
+            {.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT, .baseMipLevel = 0, .levelCount = 1, .baseArrayLayer = 0, .layerCount = 1},
+        .flags = 0,
+        .viewType = VK_IMAGE_VIEW_TYPE_2D,
+    };
+
+    VkMemoryRequirements mem_reqs;
+    VkResult U_ASSERT_ONLY err;
+    bool U_ASSERT_ONLY pass;
+
+    demo->depth.format = depth_format;
+
+    /* create image */
+    err = vkCreateImage(demo->device, &image, NULL, &demo->depth.image);
+    assert(!err);
+
+    vkGetImageMemoryRequirements(demo->device, demo->depth.image, &mem_reqs);
+    assert(!err);
+
+    demo->depth.mem_alloc.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+    demo->depth.mem_alloc.pNext = NULL;
+    demo->depth.mem_alloc.allocationSize = mem_reqs.size;
+    demo->depth.mem_alloc.memoryTypeIndex = 0;
+
+    pass = memory_type_from_properties(demo, mem_reqs.memoryTypeBits, VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT,
+                                       &demo->depth.mem_alloc.memoryTypeIndex);
+    assert(pass);
+
+    /* allocate memory */
+    err = vkAllocateMemory(demo->device, &demo->depth.mem_alloc, NULL, &demo->depth.mem);
+    assert(!err);
+
+    /* bind memory */
+    err = vkBindImageMemory(demo->device, demo->depth.image, demo->depth.mem, 0);
+    assert(!err);
+
+    /* create image view */
+    view.image = demo->depth.image;
+    err = vkCreateImageView(demo->device, &view, NULL, &demo->depth.view);
+    assert(!err);
+}
+
+/* Convert ppm image data from header file into RGBA texture image */
+#include "lunarg.ppm.h"
+bool loadTexture(const char *filename, uint8_t *rgba_data, VkSubresourceLayout *layout, int32_t *width, int32_t *height) {
+    (void)filename;
+    char *cPtr;
+    cPtr = (char *)lunarg_ppm;
+    if ((unsigned char *)cPtr >= (lunarg_ppm + lunarg_ppm_len) || strncmp(cPtr, "P6\n", 3)) {
+        return false;
+    }
+    while (strncmp(cPtr++, "\n", 1))
+        ;
+    sscanf(cPtr, "%u %u", width, height);
+    if (rgba_data == NULL) {
+        return true;
+    }
+    while (strncmp(cPtr++, "\n", 1))
+        ;
+    if ((unsigned char *)cPtr >= (lunarg_ppm + lunarg_ppm_len) || strncmp(cPtr, "255\n", 4)) {
+        return false;
+    }
+    while (strncmp(cPtr++, "\n", 1))
+        ;
+    for (int y = 0; y < *height; y++) {
+        uint8_t *rowPtr = rgba_data;
+        for (int x = 0; x < *width; x++) {
+            memcpy(rowPtr, cPtr, 3);
+            rowPtr[3] = 255; /* Alpha of 1 */
+            rowPtr += 4;
+            cPtr += 3;
+        }
+        rgba_data += layout->rowPitch;
+    }
+    return true;
+}
+
+static void demo_prepare_texture_buffer(struct demo *demo, const char *filename, struct texture_object *tex_obj) {
+    int32_t tex_width;
+    int32_t tex_height;
+    VkResult U_ASSERT_ONLY err;
+    bool U_ASSERT_ONLY pass;
+
+    if (!loadTexture(filename, NULL, NULL, &tex_width, &tex_height)) {
+        ERR_EXIT("Failed to load textures", "Load Texture Failure");
+    }
+
+    tex_obj->tex_width = tex_width;
+    tex_obj->tex_height = tex_height;
+
+    const VkBufferCreateInfo buffer_create_info = {.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,
+                                                   .pNext = NULL,
+                                                   .flags = 0,
+                                                   .size = tex_width * tex_height * 4,
+                                                   .usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT,
+                                                   .sharingMode = VK_SHARING_MODE_EXCLUSIVE,
+                                                   .queueFamilyIndexCount = 0,
+                                                   .pQueueFamilyIndices = NULL};
+
+    err = vkCreateBuffer(demo->device, &buffer_create_info, NULL, &tex_obj->buffer);
+    assert(!err);
+
+    VkMemoryRequirements mem_reqs;
+    vkGetBufferMemoryRequirements(demo->device, tex_obj->buffer, &mem_reqs);
+
+    tex_obj->mem_alloc.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+    tex_obj->mem_alloc.pNext = NULL;
+    tex_obj->mem_alloc.allocationSize = mem_reqs.size;
+    tex_obj->mem_alloc.memoryTypeIndex = 0;
+
+    VkFlags requirements = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
+    pass = memory_type_from_properties(demo, mem_reqs.memoryTypeBits, requirements, &tex_obj->mem_alloc.memoryTypeIndex);
+    assert(pass);
+
+    err = vkAllocateMemory(demo->device, &tex_obj->mem_alloc, NULL, &(tex_obj->mem));
+    assert(!err);
+
+    /* bind memory */
+    err = vkBindBufferMemory(demo->device, tex_obj->buffer, tex_obj->mem, 0);
+    assert(!err);
+
+    VkSubresourceLayout layout;
+    memset(&layout, 0, sizeof(layout));
+    layout.rowPitch = tex_width * 4;
+
+    void *data;
+    err = vkMapMemory(demo->device, tex_obj->mem, 0, tex_obj->mem_alloc.allocationSize, 0, &data);
+    assert(!err);
+
+    if (!loadTexture(filename, data, &layout, &tex_width, &tex_height)) {
+        fprintf(stderr, "Error loading texture: %s\n", filename);
+    }
+
+    vkUnmapMemory(demo->device, tex_obj->mem);
+}
+
+static void demo_prepare_texture_image(struct demo *demo, const char *filename, struct texture_object *tex_obj,
+                                       VkImageTiling tiling, VkImageUsageFlags usage, VkFlags required_props) {
+    const VkFormat tex_format = VK_FORMAT_R8G8B8A8_UNORM;
+    int32_t tex_width;
+    int32_t tex_height;
+    VkResult U_ASSERT_ONLY err;
+    bool U_ASSERT_ONLY pass;
+
+    if (!loadTexture(filename, NULL, NULL, &tex_width, &tex_height)) {
+        ERR_EXIT("Failed to load textures", "Load Texture Failure");
+    }
+
+    tex_obj->tex_width = tex_width;
+    tex_obj->tex_height = tex_height;
+
+    const VkImageCreateInfo image_create_info = {
+        .sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
+        .pNext = NULL,
+        .imageType = VK_IMAGE_TYPE_2D,
+        .format = tex_format,
+        .extent = {tex_width, tex_height, 1},
+        .mipLevels = 1,
+        .arrayLayers = 1,
+        .samples = VK_SAMPLE_COUNT_1_BIT,
+        .tiling = tiling,
+        .usage = usage,
+        .flags = 0,
+        .initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED,
+    };
+
+    VkMemoryRequirements mem_reqs;
+
+    err = vkCreateImage(demo->device, &image_create_info, NULL, &tex_obj->image);
+    assert(!err);
+
+    vkGetImageMemoryRequirements(demo->device, tex_obj->image, &mem_reqs);
+
+    tex_obj->mem_alloc.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+    tex_obj->mem_alloc.pNext = NULL;
+    tex_obj->mem_alloc.allocationSize = mem_reqs.size;
+    tex_obj->mem_alloc.memoryTypeIndex = 0;
+
+    pass = memory_type_from_properties(demo, mem_reqs.memoryTypeBits, required_props, &tex_obj->mem_alloc.memoryTypeIndex);
+    assert(pass);
+
+    /* allocate memory */
+    err = vkAllocateMemory(demo->device, &tex_obj->mem_alloc, NULL, &(tex_obj->mem));
+    assert(!err);
+
+    /* bind memory */
+    err = vkBindImageMemory(demo->device, tex_obj->image, tex_obj->mem, 0);
+    assert(!err);
+
+    if (required_props & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) {
+        const VkImageSubresource subres = {
+            .aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
+            .mipLevel = 0,
+            .arrayLayer = 0,
+        };
+        VkSubresourceLayout layout;
+        void *data;
+
+        vkGetImageSubresourceLayout(demo->device, tex_obj->image, &subres, &layout);
+
+        err = vkMapMemory(demo->device, tex_obj->mem, 0, tex_obj->mem_alloc.allocationSize, 0, &data);
+        assert(!err);
+
+        if (!loadTexture(filename, data, &layout, &tex_width, &tex_height)) {
+            fprintf(stderr, "Error loading texture: %s\n", filename);
+        }
+
+        vkUnmapMemory(demo->device, tex_obj->mem);
+    }
+
+    tex_obj->imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
+}
+
+static void demo_destroy_texture(struct demo *demo, struct texture_object *tex_objs) {
+    /* clean up staging resources */
+    vkFreeMemory(demo->device, tex_objs->mem, NULL);
+    if (tex_objs->image) vkDestroyImage(demo->device, tex_objs->image, NULL);
+    if (tex_objs->buffer) vkDestroyBuffer(demo->device, tex_objs->buffer, NULL);
+}
+
+static void demo_prepare_textures(struct demo *demo) {
+    const VkFormat tex_format = VK_FORMAT_R8G8B8A8_UNORM;
+    VkFormatProperties props;
+    uint32_t i;
+
+    vkGetPhysicalDeviceFormatProperties(demo->gpu, tex_format, &props);
+
+    for (i = 0; i < DEMO_TEXTURE_COUNT; i++) {
+        VkResult U_ASSERT_ONLY err;
+
+        if ((props.linearTilingFeatures & VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT) && !demo->use_staging_buffer) {
+            /* Device can texture using linear textures */
+            demo_prepare_texture_image(demo, tex_files[i], &demo->textures[i], VK_IMAGE_TILING_LINEAR, VK_IMAGE_USAGE_SAMPLED_BIT,
+                                       VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT);
+            // Nothing in the pipeline needs to be complete to start, and don't allow fragment
+            // shader to run until layout transition completes
+            demo_set_image_layout(demo, demo->textures[i].image, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_PREINITIALIZED,
+                                  demo->textures[i].imageLayout, 0, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT,
+                                  VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT);
+            demo->staging_texture.image = 0;
+        } else if (props.optimalTilingFeatures & VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT) {
+            /* Must use staging buffer to copy linear texture to optimized */
+
+            memset(&demo->staging_texture, 0, sizeof(demo->staging_texture));
+            demo_prepare_texture_buffer(demo, tex_files[i], &demo->staging_texture);
+
+            demo_prepare_texture_image(demo, tex_files[i], &demo->textures[i], VK_IMAGE_TILING_OPTIMAL,
+                                       (VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT),
+                                       VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT);
+
+            demo_set_image_layout(demo, demo->textures[i].image, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_PREINITIALIZED,
+                                  VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 0, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT,
+                                  VK_PIPELINE_STAGE_TRANSFER_BIT);
+
+            VkBufferImageCopy copy_region = {
+                .bufferOffset = 0,
+                .bufferRowLength = demo->staging_texture.tex_width,
+                .bufferImageHeight = demo->staging_texture.tex_height,
+                .imageSubresource = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 0, 1},
+                .imageOffset = {0, 0, 0},
+                .imageExtent = {demo->staging_texture.tex_width, demo->staging_texture.tex_height, 1},
+            };
+
+            vkCmdCopyBufferToImage(demo->cmd, demo->staging_texture.buffer, demo->textures[i].image,
+                                   VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, &copy_region);
+
+            demo_set_image_layout(demo, demo->textures[i].image, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
+                                  demo->textures[i].imageLayout, VK_ACCESS_TRANSFER_WRITE_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT,
+                                  VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT);
+
+        } else {
+            /* Can't support VK_FORMAT_R8G8B8A8_UNORM !? */
+            assert(!"No support for R8G8B8A8_UNORM as texture image format");
+        }
+
+        const VkSamplerCreateInfo sampler = {
+            .sType = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO,
+            .pNext = NULL,
+            .magFilter = VK_FILTER_NEAREST,
+            .minFilter = VK_FILTER_NEAREST,
+            .mipmapMode = VK_SAMPLER_MIPMAP_MODE_NEAREST,
+            .addressModeU = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,
+            .addressModeV = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,
+            .addressModeW = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,
+            .mipLodBias = 0.0f,
+            .anisotropyEnable = VK_FALSE,
+            .maxAnisotropy = 1,
+            .compareOp = VK_COMPARE_OP_NEVER,
+            .minLod = 0.0f,
+            .maxLod = 0.0f,
+            .borderColor = VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE,
+            .unnormalizedCoordinates = VK_FALSE,
+        };
+
+        VkImageViewCreateInfo view = {
+            .sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
+            .pNext = NULL,
+            .image = VK_NULL_HANDLE,
+            .viewType = VK_IMAGE_VIEW_TYPE_2D,
+            .format = tex_format,
+            .components =
+                {
+                    VK_COMPONENT_SWIZZLE_R,
+                    VK_COMPONENT_SWIZZLE_G,
+                    VK_COMPONENT_SWIZZLE_B,
+                    VK_COMPONENT_SWIZZLE_A,
+                },
+            .subresourceRange = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 1},
+            .flags = 0,
+        };
+
+        /* create sampler */
+        err = vkCreateSampler(demo->device, &sampler, NULL, &demo->textures[i].sampler);
+        assert(!err);
+
+        /* create image view */
+        view.image = demo->textures[i].image;
+        err = vkCreateImageView(demo->device, &view, NULL, &demo->textures[i].view);
+        assert(!err);
+    }
+}
+
+void demo_prepare_cube_data_buffers(struct demo *demo) {
+    VkBufferCreateInfo buf_info;
+    VkMemoryRequirements mem_reqs;
+    VkMemoryAllocateInfo mem_alloc;
+    uint8_t *pData;
+    mat4x4 MVP, VP;
+    VkResult U_ASSERT_ONLY err;
+    bool U_ASSERT_ONLY pass;
+    struct vktexcube_vs_uniform data;
+
+    mat4x4_mul(VP, demo->projection_matrix, demo->view_matrix);
+    mat4x4_mul(MVP, VP, demo->model_matrix);
+    memcpy(data.mvp, MVP, sizeof(MVP));
+    //    dumpMatrix("MVP", MVP);
+
+    for (unsigned int i = 0; i < 12 * 3; i++) {
+        data.position[i][0] = g_vertex_buffer_data[i * 3];
+        data.position[i][1] = g_vertex_buffer_data[i * 3 + 1];
+        data.position[i][2] = g_vertex_buffer_data[i * 3 + 2];
+        data.position[i][3] = 1.0f;
+        data.attr[i][0] = g_uv_buffer_data[2 * i];
+        data.attr[i][1] = g_uv_buffer_data[2 * i + 1];
+        data.attr[i][2] = 0;
+        data.attr[i][3] = 0;
+    }
+
+    memset(&buf_info, 0, sizeof(buf_info));
+    buf_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
+    buf_info.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT;
+    buf_info.size = sizeof(data);
+
+    for (unsigned int i = 0; i < demo->swapchainImageCount; i++) {
+        err = vkCreateBuffer(demo->device, &buf_info, NULL, &demo->swapchain_image_resources[i].uniform_buffer);
+        assert(!err);
+
+        vkGetBufferMemoryRequirements(demo->device, demo->swapchain_image_resources[i].uniform_buffer, &mem_reqs);
+
+        mem_alloc.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+        mem_alloc.pNext = NULL;
+        mem_alloc.allocationSize = mem_reqs.size;
+        mem_alloc.memoryTypeIndex = 0;
+
+        pass = memory_type_from_properties(demo, mem_reqs.memoryTypeBits,
+                                           VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT,
+                                           &mem_alloc.memoryTypeIndex);
+        assert(pass);
+
+        err = vkAllocateMemory(demo->device, &mem_alloc, NULL, &demo->swapchain_image_resources[i].uniform_memory);
+        assert(!err);
+
+        err = vkMapMemory(demo->device, demo->swapchain_image_resources[i].uniform_memory, 0, VK_WHOLE_SIZE, 0, (void **)&pData);
+        assert(!err);
+
+        memcpy(pData, &data, sizeof data);
+
+        vkUnmapMemory(demo->device, demo->swapchain_image_resources[i].uniform_memory);
+
+        err = vkBindBufferMemory(demo->device, demo->swapchain_image_resources[i].uniform_buffer,
+                                 demo->swapchain_image_resources[i].uniform_memory, 0);
+        assert(!err);
+    }
+}
+
+static void demo_prepare_descriptor_layout(struct demo *demo) {
+    const VkDescriptorSetLayoutBinding layout_bindings[2] = {
+        [0] =
+            {
+                .binding = 0,
+                .descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER,
+                .descriptorCount = 1,
+                .stageFlags = VK_SHADER_STAGE_VERTEX_BIT,
+                .pImmutableSamplers = NULL,
+            },
+        [1] =
+            {
+                .binding = 1,
+                .descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
+                .descriptorCount = DEMO_TEXTURE_COUNT,
+                .stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT,
+                .pImmutableSamplers = NULL,
+            },
+    };
+    const VkDescriptorSetLayoutCreateInfo descriptor_layout = {
+        .sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO,
+        .pNext = NULL,
+        .bindingCount = 2,
+        .pBindings = layout_bindings,
+    };
+    VkResult U_ASSERT_ONLY err;
+
+    err = vkCreateDescriptorSetLayout(demo->device, &descriptor_layout, NULL, &demo->desc_layout);
+    assert(!err);
+
+    const VkPipelineLayoutCreateInfo pPipelineLayoutCreateInfo = {
+        .sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO,
+        .pNext = NULL,
+        .setLayoutCount = 1,
+        .pSetLayouts = &demo->desc_layout,
+    };
+
+    err = vkCreatePipelineLayout(demo->device, &pPipelineLayoutCreateInfo, NULL, &demo->pipeline_layout);
+    assert(!err);
+}
+
+static void demo_prepare_render_pass(struct demo *demo) {
+    // The initial layout for the color and depth attachments will be LAYOUT_UNDEFINED
+    // because at the start of the renderpass, we don't care about their contents.
+    // At the start of the subpass, the color attachment's layout will be transitioned
+    // to LAYOUT_COLOR_ATTACHMENT_OPTIMAL and the depth stencil attachment's layout
+    // will be transitioned to LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL.  At the end of
+    // the renderpass, the color attachment's layout will be transitioned to
+    // LAYOUT_PRESENT_SRC_KHR to be ready to present.  This is all done as part of
+    // the renderpass, no barriers are necessary.
+    const VkAttachmentDescription attachments[2] = {
+        [0] =
+            {
+                .format = demo->format,
+                .flags = 0,
+                .samples = VK_SAMPLE_COUNT_1_BIT,
+                .loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR,
+                .storeOp = VK_ATTACHMENT_STORE_OP_STORE,
+                .stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE,
+                .stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE,
+                .initialLayout = VK_IMAGE_LAYOUT_UNDEFINED,
+                .finalLayout = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR,
+            },
+        [1] =
+            {
+                .format = demo->depth.format,
+                .flags = 0,
+                .samples = VK_SAMPLE_COUNT_1_BIT,
+                .loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR,
+                .storeOp = VK_ATTACHMENT_STORE_OP_DONT_CARE,
+                .stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE,
+                .stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE,
+                .initialLayout = VK_IMAGE_LAYOUT_UNDEFINED,
+                .finalLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL,
+            },
+    };
+    const VkAttachmentReference color_reference = {
+        .attachment = 0,
+        .layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
+    };
+    const VkAttachmentReference depth_reference = {
+        .attachment = 1,
+        .layout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL,
+    };
+    const VkSubpassDescription subpass = {
+        .pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS,
+        .flags = 0,
+        .inputAttachmentCount = 0,
+        .pInputAttachments = NULL,
+        .colorAttachmentCount = 1,
+        .pColorAttachments = &color_reference,
+        .pResolveAttachments = NULL,
+        .pDepthStencilAttachment = &depth_reference,
+        .preserveAttachmentCount = 0,
+        .pPreserveAttachments = NULL,
+    };
+
+    VkSubpassDependency attachmentDependencies[2] = {
+        [0] =
+            {
+                // Depth buffer is shared between swapchain images
+                .srcSubpass = VK_SUBPASS_EXTERNAL,
+                .dstSubpass = 0,
+                .srcStageMask = VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT | VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT,
+                .dstStageMask = VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT | VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT,
+                .srcAccessMask = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT,
+                .dstAccessMask = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT | VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT,
+                .dependencyFlags = 0,
+            },
+        [1] =
+            {
+                // Image Layout Transition
+                .srcSubpass = VK_SUBPASS_EXTERNAL,
+                .dstSubpass = 0,
+                .srcStageMask = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
+                .dstStageMask = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
+                .srcAccessMask = 0,
+                .dstAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT | VK_ACCESS_COLOR_ATTACHMENT_READ_BIT,
+                .dependencyFlags = 0,
+            },
+    };
+
+    const VkRenderPassCreateInfo rp_info = {
+        .sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO,
+        .pNext = NULL,
+        .flags = 0,
+        .attachmentCount = 2,
+        .pAttachments = attachments,
+        .subpassCount = 1,
+        .pSubpasses = &subpass,
+        .dependencyCount = 2,
+        .pDependencies = attachmentDependencies,
+    };
+    VkResult U_ASSERT_ONLY err;
+
+    err = vkCreateRenderPass(demo->device, &rp_info, NULL, &demo->render_pass);
+    assert(!err);
+}
+
+static VkShaderModule demo_prepare_shader_module(struct demo *demo, const uint32_t *code, size_t size) {
+    VkShaderModule module;
+    VkShaderModuleCreateInfo moduleCreateInfo;
+    VkResult U_ASSERT_ONLY err;
+
+    moduleCreateInfo.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
+    moduleCreateInfo.pNext = NULL;
+    moduleCreateInfo.flags = 0;
+    moduleCreateInfo.codeSize = size;
+    moduleCreateInfo.pCode = code;
+
+    err = vkCreateShaderModule(demo->device, &moduleCreateInfo, NULL, &module);
+    assert(!err);
+
+    return module;
+}
+
+static void demo_prepare_vs(struct demo *demo) {
+    const uint32_t vs_code[] = {
+#include "cube.vert.inc"
+    };
+    demo->vert_shader_module = demo_prepare_shader_module(demo, vs_code, sizeof(vs_code));
+}
+
+static void demo_prepare_fs(struct demo *demo) {
+    const uint32_t fs_code[] = {
+#include "cube.frag.inc"
+    };
+    demo->frag_shader_module = demo_prepare_shader_module(demo, fs_code, sizeof(fs_code));
+}
+
+static void demo_prepare_pipeline(struct demo *demo) {
+    VkGraphicsPipelineCreateInfo pipeline;
+    VkPipelineCacheCreateInfo pipelineCache;
+    VkPipelineVertexInputStateCreateInfo vi;
+    VkPipelineInputAssemblyStateCreateInfo ia;
+    VkPipelineRasterizationStateCreateInfo rs;
+    VkPipelineColorBlendStateCreateInfo cb;
+    VkPipelineDepthStencilStateCreateInfo ds;
+    VkPipelineViewportStateCreateInfo vp;
+    VkPipelineMultisampleStateCreateInfo ms;
+    VkDynamicState dynamicStateEnables[VK_DYNAMIC_STATE_RANGE_SIZE];
+    VkPipelineDynamicStateCreateInfo dynamicState;
+    VkResult U_ASSERT_ONLY err;
+
+    memset(dynamicStateEnables, 0, sizeof dynamicStateEnables);
+    memset(&dynamicState, 0, sizeof dynamicState);
+    dynamicState.sType = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO;
+    dynamicState.pDynamicStates = dynamicStateEnables;
+
+    memset(&pipeline, 0, sizeof(pipeline));
+    pipeline.sType = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO;
+    pipeline.layout = demo->pipeline_layout;
+
+    memset(&vi, 0, sizeof(vi));
+    vi.sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO;
+
+    memset(&ia, 0, sizeof(ia));
+    ia.sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO;
+    ia.topology = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST;
+
+    memset(&rs, 0, sizeof(rs));
+    rs.sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO;
+    rs.polygonMode = VK_POLYGON_MODE_FILL;
+    rs.cullMode = VK_CULL_MODE_BACK_BIT;
+    rs.frontFace = VK_FRONT_FACE_COUNTER_CLOCKWISE;
+    rs.depthClampEnable = VK_FALSE;
+    rs.rasterizerDiscardEnable = VK_FALSE;
+    rs.depthBiasEnable = VK_FALSE;
+    rs.lineWidth = 1.0f;
+
+    memset(&cb, 0, sizeof(cb));
+    cb.sType = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO;
+    VkPipelineColorBlendAttachmentState att_state[1];
+    memset(att_state, 0, sizeof(att_state));
+    att_state[0].colorWriteMask = 0xf;
+    att_state[0].blendEnable = VK_FALSE;
+    cb.attachmentCount = 1;
+    cb.pAttachments = att_state;
+
+    memset(&vp, 0, sizeof(vp));
+    vp.sType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO;
+    vp.viewportCount = 1;
+    dynamicStateEnables[dynamicState.dynamicStateCount++] = VK_DYNAMIC_STATE_VIEWPORT;
+    vp.scissorCount = 1;
+    dynamicStateEnables[dynamicState.dynamicStateCount++] = VK_DYNAMIC_STATE_SCISSOR;
+
+    memset(&ds, 0, sizeof(ds));
+    ds.sType = VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO;
+    ds.depthTestEnable = VK_TRUE;
+    ds.depthWriteEnable = VK_TRUE;
+    ds.depthCompareOp = VK_COMPARE_OP_LESS_OR_EQUAL;
+    ds.depthBoundsTestEnable = VK_FALSE;
+    ds.back.failOp = VK_STENCIL_OP_KEEP;
+    ds.back.passOp = VK_STENCIL_OP_KEEP;
+    ds.back.compareOp = VK_COMPARE_OP_ALWAYS;
+    ds.stencilTestEnable = VK_FALSE;
+    ds.front = ds.back;
+
+    memset(&ms, 0, sizeof(ms));
+    ms.sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO;
+    ms.pSampleMask = NULL;
+    ms.rasterizationSamples = VK_SAMPLE_COUNT_1_BIT;
+
+    demo_prepare_vs(demo);
+    demo_prepare_fs(demo);
+
+    // Two stages: vs and fs
+    VkPipelineShaderStageCreateInfo shaderStages[2];
+    memset(&shaderStages, 0, 2 * sizeof(VkPipelineShaderStageCreateInfo));
+
+    shaderStages[0].sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
+    shaderStages[0].stage = VK_SHADER_STAGE_VERTEX_BIT;
+    shaderStages[0].module = demo->vert_shader_module;
+    shaderStages[0].pName = "main";
+
+    shaderStages[1].sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
+    shaderStages[1].stage = VK_SHADER_STAGE_FRAGMENT_BIT;
+    shaderStages[1].module = demo->frag_shader_module;
+    shaderStages[1].pName = "main";
+
+    memset(&pipelineCache, 0, sizeof(pipelineCache));
+    pipelineCache.sType = VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO;
+
+    err = vkCreatePipelineCache(demo->device, &pipelineCache, NULL, &demo->pipelineCache);
+    assert(!err);
+
+    pipeline.pVertexInputState = &vi;
+    pipeline.pInputAssemblyState = &ia;
+    pipeline.pRasterizationState = &rs;
+    pipeline.pColorBlendState = &cb;
+    pipeline.pMultisampleState = &ms;
+    pipeline.pViewportState = &vp;
+    pipeline.pDepthStencilState = &ds;
+    pipeline.stageCount = ARRAY_SIZE(shaderStages);
+    pipeline.pStages = shaderStages;
+    pipeline.renderPass = demo->render_pass;
+    pipeline.pDynamicState = &dynamicState;
+
+    pipeline.renderPass = demo->render_pass;
+
+    err = vkCreateGraphicsPipelines(demo->device, demo->pipelineCache, 1, &pipeline, NULL, &demo->pipeline);
+    assert(!err);
+
+    vkDestroyShaderModule(demo->device, demo->frag_shader_module, NULL);
+    vkDestroyShaderModule(demo->device, demo->vert_shader_module, NULL);
+}
+
+static void demo_prepare_descriptor_pool(struct demo *demo) {
+    const VkDescriptorPoolSize type_counts[2] = {
+        [0] =
+            {
+                .type = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER,
+                .descriptorCount = demo->swapchainImageCount,
+            },
+        [1] =
+            {
+                .type = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
+                .descriptorCount = demo->swapchainImageCount * DEMO_TEXTURE_COUNT,
+            },
+    };
+    const VkDescriptorPoolCreateInfo descriptor_pool = {
+        .sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO,
+        .pNext = NULL,
+        .maxSets = demo->swapchainImageCount,
+        .poolSizeCount = 2,
+        .pPoolSizes = type_counts,
+    };
+    VkResult U_ASSERT_ONLY err;
+
+    err = vkCreateDescriptorPool(demo->device, &descriptor_pool, NULL, &demo->desc_pool);
+    assert(!err);
+}
+
+static void demo_prepare_descriptor_set(struct demo *demo) {
+    VkDescriptorImageInfo tex_descs[DEMO_TEXTURE_COUNT];
+    VkWriteDescriptorSet writes[2];
+    VkResult U_ASSERT_ONLY err;
+
+    VkDescriptorSetAllocateInfo alloc_info = {.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO,
+                                              .pNext = NULL,
+                                              .descriptorPool = demo->desc_pool,
+                                              .descriptorSetCount = 1,
+                                              .pSetLayouts = &demo->desc_layout};
+
+    VkDescriptorBufferInfo buffer_info;
+    buffer_info.offset = 0;
+    buffer_info.range = sizeof(struct vktexcube_vs_uniform);
+
+    memset(&tex_descs, 0, sizeof(tex_descs));
+    for (unsigned int i = 0; i < DEMO_TEXTURE_COUNT; i++) {
+        tex_descs[i].sampler = demo->textures[i].sampler;
+        tex_descs[i].imageView = demo->textures[i].view;
+        tex_descs[i].imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
+    }
+
+    memset(&writes, 0, sizeof(writes));
+
+    writes[0].sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
+    writes[0].descriptorCount = 1;
+    writes[0].descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
+    writes[0].pBufferInfo = &buffer_info;
+
+    writes[1].sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
+    writes[1].dstBinding = 1;
+    writes[1].descriptorCount = DEMO_TEXTURE_COUNT;
+    writes[1].descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
+    writes[1].pImageInfo = tex_descs;
+
+    for (unsigned int i = 0; i < demo->swapchainImageCount; i++) {
+        err = vkAllocateDescriptorSets(demo->device, &alloc_info, &demo->swapchain_image_resources[i].descriptor_set);
+        assert(!err);
+        buffer_info.buffer = demo->swapchain_image_resources[i].uniform_buffer;
+        writes[0].dstSet = demo->swapchain_image_resources[i].descriptor_set;
+        writes[1].dstSet = demo->swapchain_image_resources[i].descriptor_set;
+        vkUpdateDescriptorSets(demo->device, 2, writes, 0, NULL);
+    }
+}
+
+static void demo_prepare_framebuffers(struct demo *demo) {
+    VkImageView attachments[2];
+    attachments[1] = demo->depth.view;
+
+    const VkFramebufferCreateInfo fb_info = {
+        .sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO,
+        .pNext = NULL,
+        .renderPass = demo->render_pass,
+        .attachmentCount = 2,
+        .pAttachments = attachments,
+        .width = demo->width,
+        .height = demo->height,
+        .layers = 1,
+    };
+    VkResult U_ASSERT_ONLY err;
+    uint32_t i;
+
+    for (i = 0; i < demo->swapchainImageCount; i++) {
+        attachments[0] = demo->swapchain_image_resources[i].view;
+        err = vkCreateFramebuffer(demo->device, &fb_info, NULL, &demo->swapchain_image_resources[i].framebuffer);
+        assert(!err);
+    }
+}
+
+static void demo_prepare(struct demo *demo) {
+    VkResult U_ASSERT_ONLY err;
+    if (demo->cmd_pool == VK_NULL_HANDLE) {
+        const VkCommandPoolCreateInfo cmd_pool_info = {
+            .sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,
+            .pNext = NULL,
+            .queueFamilyIndex = demo->graphics_queue_family_index,
+            .flags = 0,
+        };
+        err = vkCreateCommandPool(demo->device, &cmd_pool_info, NULL, &demo->cmd_pool);
+        assert(!err);
+    }
+
+    const VkCommandBufferAllocateInfo cmd = {
+        .sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,
+        .pNext = NULL,
+        .commandPool = demo->cmd_pool,
+        .level = VK_COMMAND_BUFFER_LEVEL_PRIMARY,
+        .commandBufferCount = 1,
+    };
+    err = vkAllocateCommandBuffers(demo->device, &cmd, &demo->cmd);
+    assert(!err);
+    VkCommandBufferBeginInfo cmd_buf_info = {
+        .sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
+        .pNext = NULL,
+        .flags = 0,
+        .pInheritanceInfo = NULL,
+    };
+    err = vkBeginCommandBuffer(demo->cmd, &cmd_buf_info);
+    assert(!err);
+
+    demo_prepare_buffers(demo);
+
+    if (demo->is_minimized) {
+        demo->prepared = false;
+        return;
+    }
+
+    demo_prepare_depth(demo);
+    demo_prepare_textures(demo);
+    demo_prepare_cube_data_buffers(demo);
+
+    demo_prepare_descriptor_layout(demo);
+    demo_prepare_render_pass(demo);
+    demo_prepare_pipeline(demo);
+
+    for (uint32_t i = 0; i < demo->swapchainImageCount; i++) {
+        err = vkAllocateCommandBuffers(demo->device, &cmd, &demo->swapchain_image_resources[i].cmd);
+        assert(!err);
+    }
+
+    if (demo->separate_present_queue) {
+        const VkCommandPoolCreateInfo present_cmd_pool_info = {
+            .sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,
+            .pNext = NULL,
+            .queueFamilyIndex = demo->present_queue_family_index,
+            .flags = 0,
+        };
+        err = vkCreateCommandPool(demo->device, &present_cmd_pool_info, NULL, &demo->present_cmd_pool);
+        assert(!err);
+        const VkCommandBufferAllocateInfo present_cmd_info = {
+            .sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,
+            .pNext = NULL,
+            .commandPool = demo->present_cmd_pool,
+            .level = VK_COMMAND_BUFFER_LEVEL_PRIMARY,
+            .commandBufferCount = 1,
+        };
+        for (uint32_t i = 0; i < demo->swapchainImageCount; i++) {
+            err = vkAllocateCommandBuffers(demo->device, &present_cmd_info,
+                                           &demo->swapchain_image_resources[i].graphics_to_present_cmd);
+            assert(!err);
+            demo_build_image_ownership_cmd(demo, i);
+        }
+    }
+
+    demo_prepare_descriptor_pool(demo);
+    demo_prepare_descriptor_set(demo);
+
+    demo_prepare_framebuffers(demo);
+
+    for (uint32_t i = 0; i < demo->swapchainImageCount; i++) {
+        demo->current_buffer = i;
+        demo_draw_build_cmd(demo, demo->swapchain_image_resources[i].cmd);
+    }
+
+    /*
+     * Prepare functions above may generate pipeline commands
+     * that need to be flushed before beginning the render loop.
+     */
+    demo_flush_init_cmd(demo);
+    if (demo->staging_texture.buffer) {
+        demo_destroy_texture(demo, &demo->staging_texture);
+    }
+
+    demo->current_buffer = 0;
+    demo->prepared = true;
+}
+
+static void demo_cleanup(struct demo *demo) {
+    uint32_t i;
+
+    demo->prepared = false;
+    vkDeviceWaitIdle(demo->device);
+
+    // Wait for fences from present operations
+    for (i = 0; i < FRAME_LAG; i++) {
+        vkWaitForFences(demo->device, 1, &demo->fences[i], VK_TRUE, UINT64_MAX);
+        vkDestroyFence(demo->device, demo->fences[i], NULL);
+        vkDestroySemaphore(demo->device, demo->image_acquired_semaphores[i], NULL);
+        vkDestroySemaphore(demo->device, demo->draw_complete_semaphores[i], NULL);
+        if (demo->separate_present_queue) {
+            vkDestroySemaphore(demo->device, demo->image_ownership_semaphores[i], NULL);
+        }
+    }
+
+    // If the window is currently minimized, demo_resize has already done some cleanup for us.
+    if (!demo->is_minimized) {
+        for (i = 0; i < demo->swapchainImageCount; i++) {
+            vkDestroyFramebuffer(demo->device, demo->swapchain_image_resources[i].framebuffer, NULL);
+        }
+        vkDestroyDescriptorPool(demo->device, demo->desc_pool, NULL);
+
+        vkDestroyPipeline(demo->device, demo->pipeline, NULL);
+        vkDestroyPipelineCache(demo->device, demo->pipelineCache, NULL);
+        vkDestroyRenderPass(demo->device, demo->render_pass, NULL);
+        vkDestroyPipelineLayout(demo->device, demo->pipeline_layout, NULL);
+        vkDestroyDescriptorSetLayout(demo->device, demo->desc_layout, NULL);
+
+        for (i = 0; i < DEMO_TEXTURE_COUNT; i++) {
+            vkDestroyImageView(demo->device, demo->textures[i].view, NULL);
+            vkDestroyImage(demo->device, demo->textures[i].image, NULL);
+            vkFreeMemory(demo->device, demo->textures[i].mem, NULL);
+            vkDestroySampler(demo->device, demo->textures[i].sampler, NULL);
+        }
+        demo->fpDestroySwapchainKHR(demo->device, demo->swapchain, NULL);
+
+        vkDestroyImageView(demo->device, demo->depth.view, NULL);
+        vkDestroyImage(demo->device, demo->depth.image, NULL);
+        vkFreeMemory(demo->device, demo->depth.mem, NULL);
+
+        for (i = 0; i < demo->swapchainImageCount; i++) {
+            vkDestroyImageView(demo->device, demo->swapchain_image_resources[i].view, NULL);
+            vkFreeCommandBuffers(demo->device, demo->cmd_pool, 1, &demo->swapchain_image_resources[i].cmd);
+            vkDestroyBuffer(demo->device, demo->swapchain_image_resources[i].uniform_buffer, NULL);
+            vkFreeMemory(demo->device, demo->swapchain_image_resources[i].uniform_memory, NULL);
+        }
+        free(demo->swapchain_image_resources);
+        free(demo->queue_props);
+        vkDestroyCommandPool(demo->device, demo->cmd_pool, NULL);
+
+        if (demo->separate_present_queue) {
+            vkDestroyCommandPool(demo->device, demo->present_cmd_pool, NULL);
+        }
+    }
+    vkDeviceWaitIdle(demo->device);
+    vkDestroyDevice(demo->device, NULL);
+    if (demo->validate) {
+        demo->DestroyDebugUtilsMessengerEXT(demo->inst, demo->dbg_messenger, NULL);
+    }
+    vkDestroySurfaceKHR(demo->inst, demo->surface, NULL);
+
+#if defined(VK_USE_PLATFORM_XLIB_KHR)
+    XDestroyWindow(demo->display, demo->xlib_window);
+    XCloseDisplay(demo->display);
+#elif defined(VK_USE_PLATFORM_XCB_KHR)
+    xcb_destroy_window(demo->connection, demo->xcb_window);
+    xcb_disconnect(demo->connection);
+    free(demo->atom_wm_delete_window);
+#elif defined(VK_USE_PLATFORM_WAYLAND_KHR)
+    wl_keyboard_destroy(demo->keyboard);
+    wl_pointer_destroy(demo->pointer);
+    wl_seat_destroy(demo->seat);
+    wl_shell_surface_destroy(demo->shell_surface);
+    wl_surface_destroy(demo->window);
+    wl_shell_destroy(demo->shell);
+    wl_compositor_destroy(demo->compositor);
+    wl_registry_destroy(demo->registry);
+    wl_display_disconnect(demo->display);
+#endif
+
+    vkDestroyInstance(demo->inst, NULL);
+}
+
+static void demo_resize(struct demo *demo) {
+    uint32_t i;
+
+    // Don't react to resize until after first initialization.
+    if (!demo->prepared) {
+        if (demo->is_minimized) {
+            demo_prepare(demo);
+        }
+        return;
+    }
+    // In order to properly resize the window, we must re-create the swapchain
+    // AND redo the command buffers, etc.
+    //
+    // First, perform part of the demo_cleanup() function:
+    demo->prepared = false;
+    vkDeviceWaitIdle(demo->device);
+
+    for (i = 0; i < demo->swapchainImageCount; i++) {
+        vkDestroyFramebuffer(demo->device, demo->swapchain_image_resources[i].framebuffer, NULL);
+    }
+    vkDestroyDescriptorPool(demo->device, demo->desc_pool, NULL);
+
+    vkDestroyPipeline(demo->device, demo->pipeline, NULL);
+    vkDestroyPipelineCache(demo->device, demo->pipelineCache, NULL);
+    vkDestroyRenderPass(demo->device, demo->render_pass, NULL);
+    vkDestroyPipelineLayout(demo->device, demo->pipeline_layout, NULL);
+    vkDestroyDescriptorSetLayout(demo->device, demo->desc_layout, NULL);
+
+    for (i = 0; i < DEMO_TEXTURE_COUNT; i++) {
+        vkDestroyImageView(demo->device, demo->textures[i].view, NULL);
+        vkDestroyImage(demo->device, demo->textures[i].image, NULL);
+        vkFreeMemory(demo->device, demo->textures[i].mem, NULL);
+        vkDestroySampler(demo->device, demo->textures[i].sampler, NULL);
+    }
+
+    vkDestroyImageView(demo->device, demo->depth.view, NULL);
+    vkDestroyImage(demo->device, demo->depth.image, NULL);
+    vkFreeMemory(demo->device, demo->depth.mem, NULL);
+
+    for (i = 0; i < demo->swapchainImageCount; i++) {
+        vkDestroyImageView(demo->device, demo->swapchain_image_resources[i].view, NULL);
+        vkFreeCommandBuffers(demo->device, demo->cmd_pool, 1, &demo->swapchain_image_resources[i].cmd);
+        vkDestroyBuffer(demo->device, demo->swapchain_image_resources[i].uniform_buffer, NULL);
+        vkFreeMemory(demo->device, demo->swapchain_image_resources[i].uniform_memory, NULL);
+    }
+    vkDestroyCommandPool(demo->device, demo->cmd_pool, NULL);
+    demo->cmd_pool = VK_NULL_HANDLE;
+    if (demo->separate_present_queue) {
+        vkDestroyCommandPool(demo->device, demo->present_cmd_pool, NULL);
+    }
+    free(demo->swapchain_image_resources);
+
+    // Second, re-perform the demo_prepare() function, which will re-create the
+    // swapchain:
+    demo_prepare(demo);
+}
+
+// On MS-Windows, make this a global, so it's available to WndProc()
+struct demo demo;
+
+#if defined(VK_USE_PLATFORM_WIN32_KHR)
+static void demo_run(struct demo *demo) {
+    if (!demo->prepared) return;
+
+    demo_draw(demo);
+    demo->curFrame++;
+    if (demo->frameCount != INT32_MAX && demo->curFrame == demo->frameCount) {
+        PostQuitMessage(validation_error);
+    }
+}
+
+// MS-Windows event handling function:
+LRESULT CALLBACK WndProc(HWND hWnd, UINT uMsg, WPARAM wParam, LPARAM lParam) {
+    switch (uMsg) {
+        case WM_CLOSE:
+            PostQuitMessage(validation_error);
+            break;
+        case WM_PAINT:
+            // The validation callback calls MessageBox which can generate paint
+            // events - don't make more Vulkan calls if we got here from the
+            // callback
+            if (!in_callback) {
+                demo_run(&demo);
+            }
+            break;
+        case WM_GETMINMAXINFO:  // set window's minimum size
+            ((MINMAXINFO *)lParam)->ptMinTrackSize = demo.minsize;
+            return 0;
+        case WM_ERASEBKGND:
+            return 1;
+        case WM_SIZE:
+            // Resize the application to the new window size, except when
+            // it was minimized. Vulkan doesn't support images or swapchains
+            // with width=0 and height=0.
+            if (wParam != SIZE_MINIMIZED) {
+                demo.width = lParam & 0xffff;
+                demo.height = (lParam & 0xffff0000) >> 16;
+                demo_resize(&demo);
+            }
+            break;
+        case WM_KEYDOWN:
+            switch (wParam) {
+                case VK_ESCAPE:
+                    PostQuitMessage(validation_error);
+                    break;
+                case VK_LEFT:
+                    demo.spin_angle -= demo.spin_increment;
+                    break;
+                case VK_RIGHT:
+                    demo.spin_angle += demo.spin_increment;
+                    break;
+                case VK_SPACE:
+                    demo.pause = !demo.pause;
+                    break;
+            }
+            return 0;
+        default:
+            break;
+    }
+    return (DefWindowProc(hWnd, uMsg, wParam, lParam));
+}
+
+static void demo_create_window(struct demo *demo) {
+    WNDCLASSEX win_class;
+
+    // Initialize the window class structure:
+    win_class.cbSize = sizeof(WNDCLASSEX);
+    win_class.style = CS_HREDRAW | CS_VREDRAW;
+    win_class.lpfnWndProc = WndProc;
+    win_class.cbClsExtra = 0;
+    win_class.cbWndExtra = 0;
+    win_class.hInstance = demo->connection;  // hInstance
+    win_class.hIcon = LoadIcon(NULL, IDI_APPLICATION);
+    win_class.hCursor = LoadCursor(NULL, IDC_ARROW);
+    win_class.hbrBackground = (HBRUSH)GetStockObject(WHITE_BRUSH);
+    win_class.lpszMenuName = NULL;
+    win_class.lpszClassName = demo->name;
+    win_class.hIconSm = LoadIcon(NULL, IDI_WINLOGO);
+    // Register window class:
+    if (!RegisterClassEx(&win_class)) {
+        // It didn't work, so try to give a useful error:
+        printf("Unexpected error trying to start the application!\n");
+        fflush(stdout);
+        exit(1);
+    }
+    // Create window with the registered class:
+    RECT wr = {0, 0, demo->width, demo->height};
+    AdjustWindowRect(&wr, WS_OVERLAPPEDWINDOW, FALSE);
+    demo->window = CreateWindowEx(0,
+                                  demo->name,            // class name
+                                  demo->name,            // app name
+                                  WS_OVERLAPPEDWINDOW |  // window style
+                                      WS_VISIBLE | WS_SYSMENU,
+                                  100, 100,            // x/y coords
+                                  wr.right - wr.left,  // width
+                                  wr.bottom - wr.top,  // height
+                                  NULL,                // handle to parent
+                                  NULL,                // handle to menu
+                                  demo->connection,    // hInstance
+                                  NULL);               // no extra parameters
+    if (!demo->window) {
+        // It didn't work, so try to give a useful error:
+        printf("Cannot create a window in which to draw!\n");
+        fflush(stdout);
+        exit(1);
+    }
+    // Window client area size must be at least 1 pixel high, to prevent crash.
+    demo->minsize.x = GetSystemMetrics(SM_CXMINTRACK);
+    demo->minsize.y = GetSystemMetrics(SM_CYMINTRACK) + 1;
+}
+#elif defined(VK_USE_PLATFORM_XLIB_KHR)
+static void demo_create_xlib_window(struct demo *demo) {
+    const char *display_envar = getenv("DISPLAY");
+    if (display_envar == NULL || display_envar[0] == '\0') {
+        printf("Environment variable DISPLAY requires a valid value.\nExiting ...\n");
+        fflush(stdout);
+        exit(1);
+    }
+
+    XInitThreads();
+    demo->display = XOpenDisplay(NULL);
+    long visualMask = VisualScreenMask;
+    int numberOfVisuals;
+    XVisualInfo vInfoTemplate = {};
+    vInfoTemplate.screen = DefaultScreen(demo->display);
+    XVisualInfo *visualInfo = XGetVisualInfo(demo->display, visualMask, &vInfoTemplate, &numberOfVisuals);
+
+    Colormap colormap =
+        XCreateColormap(demo->display, RootWindow(demo->display, vInfoTemplate.screen), visualInfo->visual, AllocNone);
+
+    XSetWindowAttributes windowAttributes = {};
+    windowAttributes.colormap = colormap;
+    windowAttributes.background_pixel = 0xFFFFFFFF;
+    windowAttributes.border_pixel = 0;
+    windowAttributes.event_mask = KeyPressMask | KeyReleaseMask | StructureNotifyMask | ExposureMask;
+
+    demo->xlib_window = XCreateWindow(demo->display, RootWindow(demo->display, vInfoTemplate.screen), 0, 0, demo->width,
+                                      demo->height, 0, visualInfo->depth, InputOutput, visualInfo->visual,
+                                      CWBackPixel | CWBorderPixel | CWEventMask | CWColormap, &windowAttributes);
+
+    XSelectInput(demo->display, demo->xlib_window, ExposureMask | KeyPressMask);
+    XMapWindow(demo->display, demo->xlib_window);
+    XFlush(demo->display);
+    demo->xlib_wm_delete_window = XInternAtom(demo->display, "WM_DELETE_WINDOW", False);
+}
+static void demo_handle_xlib_event(struct demo *demo, const XEvent *event) {
+    switch (event->type) {
+        case ClientMessage:
+            if ((Atom)event->xclient.data.l[0] == demo->xlib_wm_delete_window) demo->quit = true;
+            break;
+        case KeyPress:
+            switch (event->xkey.keycode) {
+                case 0x9:  // Escape
+                    demo->quit = true;
+                    break;
+                case 0x71:  // left arrow key
+                    demo->spin_angle -= demo->spin_increment;
+                    break;
+                case 0x72:  // right arrow key
+                    demo->spin_angle += demo->spin_increment;
+                    break;
+                case 0x41:  // space bar
+                    demo->pause = !demo->pause;
+                    break;
+            }
+            break;
+        case ConfigureNotify:
+            if ((demo->width != event->xconfigure.width) || (demo->height != event->xconfigure.height)) {
+                demo->width = event->xconfigure.width;
+                demo->height = event->xconfigure.height;
+                demo_resize(demo);
+            }
+            break;
+        default:
+            break;
+    }
+}
+
+static void demo_run_xlib(struct demo *demo) {
+    while (!demo->quit) {
+        XEvent event;
+
+        if (demo->pause) {
+            XNextEvent(demo->display, &event);
+            demo_handle_xlib_event(demo, &event);
+        }
+        while (XPending(demo->display) > 0) {
+            XNextEvent(demo->display, &event);
+            demo_handle_xlib_event(demo, &event);
+        }
+
+        demo_draw(demo);
+        demo->curFrame++;
+        if (demo->frameCount != INT32_MAX && demo->curFrame == demo->frameCount) demo->quit = true;
+    }
+}
+#elif defined(VK_USE_PLATFORM_XCB_KHR)
+static void demo_handle_xcb_event(struct demo *demo, const xcb_generic_event_t *event) {
+    uint8_t event_code = event->response_type & 0x7f;
+    switch (event_code) {
+        case XCB_EXPOSE:
+            // TODO: Resize window
+            break;
+        case XCB_CLIENT_MESSAGE:
+            if ((*(xcb_client_message_event_t *)event).data.data32[0] == (*demo->atom_wm_delete_window).atom) {
+                demo->quit = true;
+            }
+            break;
+        case XCB_KEY_RELEASE: {
+            const xcb_key_release_event_t *key = (const xcb_key_release_event_t *)event;
+
+            switch (key->detail) {
+                case 0x9:  // Escape
+                    demo->quit = true;
+                    break;
+                case 0x71:  // left arrow key
+                    demo->spin_angle -= demo->spin_increment;
+                    break;
+                case 0x72:  // right arrow key
+                    demo->spin_angle += demo->spin_increment;
+                    break;
+                case 0x41:  // space bar
+                    demo->pause = !demo->pause;
+                    break;
+            }
+        } break;
+        case XCB_CONFIGURE_NOTIFY: {
+            const xcb_configure_notify_event_t *cfg = (const xcb_configure_notify_event_t *)event;
+            if ((demo->width != cfg->width) || (demo->height != cfg->height)) {
+                demo->width = cfg->width;
+                demo->height = cfg->height;
+                demo_resize(demo);
+            }
+        } break;
+        default:
+            break;
+    }
+}
+
+static void demo_run_xcb(struct demo *demo) {
+    xcb_flush(demo->connection);
+
+    while (!demo->quit) {
+        xcb_generic_event_t *event;
+
+        if (demo->pause) {
+            event = xcb_wait_for_event(demo->connection);
+        } else {
+            event = xcb_poll_for_event(demo->connection);
+        }
+        while (event) {
+            demo_handle_xcb_event(demo, event);
+            free(event);
+            event = xcb_poll_for_event(demo->connection);
+        }
+
+        demo_draw(demo);
+        demo->curFrame++;
+        if (demo->frameCount != INT32_MAX && demo->curFrame == demo->frameCount) demo->quit = true;
+    }
+}
+
+static void demo_create_xcb_window(struct demo *demo) {
+    uint32_t value_mask, value_list[32];
+
+    demo->xcb_window = xcb_generate_id(demo->connection);
+
+    value_mask = XCB_CW_BACK_PIXEL | XCB_CW_EVENT_MASK;
+    value_list[0] = demo->screen->black_pixel;
+    value_list[1] = XCB_EVENT_MASK_KEY_RELEASE | XCB_EVENT_MASK_EXPOSURE | XCB_EVENT_MASK_STRUCTURE_NOTIFY;
+
+    xcb_create_window(demo->connection, XCB_COPY_FROM_PARENT, demo->xcb_window, demo->screen->root, 0, 0, demo->width, demo->height,
+                      0, XCB_WINDOW_CLASS_INPUT_OUTPUT, demo->screen->root_visual, value_mask, value_list);
+
+    /* Magic code that will send notification when window is destroyed */
+    xcb_intern_atom_cookie_t cookie = xcb_intern_atom(demo->connection, 1, 12, "WM_PROTOCOLS");
+    xcb_intern_atom_reply_t *reply = xcb_intern_atom_reply(demo->connection, cookie, 0);
+
+    xcb_intern_atom_cookie_t cookie2 = xcb_intern_atom(demo->connection, 0, 16, "WM_DELETE_WINDOW");
+    demo->atom_wm_delete_window = xcb_intern_atom_reply(demo->connection, cookie2, 0);
+
+    xcb_change_property(demo->connection, XCB_PROP_MODE_REPLACE, demo->xcb_window, (*reply).atom, 4, 32, 1,
+                        &(*demo->atom_wm_delete_window).atom);
+    free(reply);
+
+    xcb_map_window(demo->connection, demo->xcb_window);
+
+    // Force the x/y coordinates to 100,100 results are identical in consecutive
+    // runs
+    const uint32_t coords[] = {100, 100};
+    xcb_configure_window(demo->connection, demo->xcb_window, XCB_CONFIG_WINDOW_X | XCB_CONFIG_WINDOW_Y, coords);
+}
+// VK_USE_PLATFORM_XCB_KHR
+#elif defined(VK_USE_PLATFORM_WAYLAND_KHR)
+static void demo_run(struct demo *demo) {
+    while (!demo->quit) {
+        if (demo->pause) {
+            wl_display_dispatch(demo->display);  // block and wait for input
+        } else {
+            wl_display_dispatch_pending(demo->display);  // don't block
+            demo_draw(demo);
+            demo->curFrame++;
+            if (demo->frameCount != INT32_MAX && demo->curFrame == demo->frameCount) demo->quit = true;
+        }
+    }
+}
+
+static void handle_ping(void *data UNUSED, struct wl_shell_surface *shell_surface, uint32_t serial) {
+    wl_shell_surface_pong(shell_surface, serial);
+}
+
+static void handle_configure(void *data UNUSED, struct wl_shell_surface *shell_surface UNUSED, uint32_t edges UNUSED,
+                             int32_t width UNUSED, int32_t height UNUSED) {}
+
+static void handle_popup_done(void *data UNUSED, struct wl_shell_surface *shell_surface UNUSED) {}
+
+static const struct wl_shell_surface_listener shell_surface_listener = {handle_ping, handle_configure, handle_popup_done};
+
+static void demo_create_window(struct demo *demo) {
+    demo->window = wl_compositor_create_surface(demo->compositor);
+    if (!demo->window) {
+        printf("Can not create wayland_surface from compositor!\n");
+        fflush(stdout);
+        exit(1);
+    }
+
+    demo->shell_surface = wl_shell_get_shell_surface(demo->shell, demo->window);
+    if (!demo->shell_surface) {
+        printf("Can not get shell_surface from wayland_surface!\n");
+        fflush(stdout);
+        exit(1);
+    }
+    wl_shell_surface_add_listener(demo->shell_surface, &shell_surface_listener, demo);
+    wl_shell_surface_set_toplevel(demo->shell_surface);
+    wl_shell_surface_set_title(demo->shell_surface, APP_SHORT_NAME);
+}
+#elif defined(VK_USE_PLATFORM_ANDROID_KHR)
+static void demo_run(struct demo *demo) {
+    if (!demo->prepared) return;
+
+    demo_draw(demo);
+    demo->curFrame++;
+}
+#elif defined(VK_USE_PLATFORM_METAL_EXT)
+static void demo_run(struct demo *demo) {
+    demo_draw(demo);
+    demo->curFrame++;
+    if (demo->frameCount != INT32_MAX && demo->curFrame == demo->frameCount) {
+        demo->quit = TRUE;
+    }
+}
+#elif defined(VK_USE_PLATFORM_DISPLAY_KHR)
+static VkResult demo_create_display_surface(struct demo *demo) {
+    VkResult U_ASSERT_ONLY err;
+    uint32_t display_count;
+    uint32_t mode_count;
+    uint32_t plane_count;
+    VkDisplayPropertiesKHR display_props;
+    VkDisplayKHR display;
+    VkDisplayModePropertiesKHR mode_props;
+    VkDisplayPlanePropertiesKHR *plane_props;
+    VkBool32 found_plane = VK_FALSE;
+    uint32_t plane_index;
+    VkExtent2D image_extent;
+    VkDisplaySurfaceCreateInfoKHR create_info;
+
+    // Get the first display
+    err = vkGetPhysicalDeviceDisplayPropertiesKHR(demo->gpu, &display_count, NULL);
+    assert(!err);
+
+    if (display_count == 0) {
+        printf("Cannot find any display!\n");
+        fflush(stdout);
+        exit(1);
+    }
+
+    display_count = 1;
+    err = vkGetPhysicalDeviceDisplayPropertiesKHR(demo->gpu, &display_count, &display_props);
+    assert(!err || (err == VK_INCOMPLETE));
+
+    display = display_props.display;
+
+    // Get the first mode of the display
+    err = vkGetDisplayModePropertiesKHR(demo->gpu, display, &mode_count, NULL);
+    assert(!err);
+
+    if (mode_count == 0) {
+        printf("Cannot find any mode for the display!\n");
+        fflush(stdout);
+        exit(1);
+    }
+
+    mode_count = 1;
+    err = vkGetDisplayModePropertiesKHR(demo->gpu, display, &mode_count, &mode_props);
+    assert(!err || (err == VK_INCOMPLETE));
+
+    // Get the list of planes
+    err = vkGetPhysicalDeviceDisplayPlanePropertiesKHR(demo->gpu, &plane_count, NULL);
+    assert(!err);
+
+    if (plane_count == 0) {
+        printf("Cannot find any plane!\n");
+        fflush(stdout);
+        exit(1);
+    }
+
+    plane_props = malloc(sizeof(VkDisplayPlanePropertiesKHR) * plane_count);
+    assert(plane_props);
+
+    err = vkGetPhysicalDeviceDisplayPlanePropertiesKHR(demo->gpu, &plane_count, plane_props);
+    assert(!err);
+
+    // Find a plane compatible with the display
+    for (plane_index = 0; plane_index < plane_count; plane_index++) {
+        uint32_t supported_count;
+        VkDisplayKHR *supported_displays;
+
+        // Disqualify planes that are bound to a different display
+        if ((plane_props[plane_index].currentDisplay != VK_NULL_HANDLE) && (plane_props[plane_index].currentDisplay != display)) {
+            continue;
+        }
+
+        err = vkGetDisplayPlaneSupportedDisplaysKHR(demo->gpu, plane_index, &supported_count, NULL);
+        assert(!err);
+
+        if (supported_count == 0) {
+            continue;
+        }
+
+        supported_displays = malloc(sizeof(VkDisplayKHR) * supported_count);
+        assert(supported_displays);
+
+        err = vkGetDisplayPlaneSupportedDisplaysKHR(demo->gpu, plane_index, &supported_count, supported_displays);
+        assert(!err);
+
+        for (uint32_t i = 0; i < supported_count; i++) {
+            if (supported_displays[i] == display) {
+                found_plane = VK_TRUE;
+                break;
+            }
+        }
+
+        free(supported_displays);
+
+        if (found_plane) {
+            break;
+        }
+    }
+
+    if (!found_plane) {
+        printf("Cannot find a plane compatible with the display!\n");
+        fflush(stdout);
+        exit(1);
+    }
+
+    free(plane_props);
+
+    VkDisplayPlaneCapabilitiesKHR planeCaps;
+    vkGetDisplayPlaneCapabilitiesKHR(demo->gpu, mode_props.displayMode, plane_index, &planeCaps);
+    // Find a supported alpha mode
+    VkCompositeAlphaFlagBitsKHR alphaMode = VK_DISPLAY_PLANE_ALPHA_OPAQUE_BIT_KHR;
+    VkCompositeAlphaFlagBitsKHR alphaModes[4] = {
+        VK_DISPLAY_PLANE_ALPHA_OPAQUE_BIT_KHR,
+        VK_DISPLAY_PLANE_ALPHA_GLOBAL_BIT_KHR,
+        VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_BIT_KHR,
+        VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_PREMULTIPLIED_BIT_KHR,
+    };
+    for (uint32_t i = 0; i < sizeof(alphaModes); i++) {
+        if (planeCaps.supportedAlpha & alphaModes[i]) {
+            alphaMode = alphaModes[i];
+            break;
+        }
+    }
+    image_extent.width = mode_props.parameters.visibleRegion.width;
+    image_extent.height = mode_props.parameters.visibleRegion.height;
+
+    create_info.sType = VK_STRUCTURE_TYPE_DISPLAY_SURFACE_CREATE_INFO_KHR;
+    create_info.pNext = NULL;
+    create_info.flags = 0;
+    create_info.displayMode = mode_props.displayMode;
+    create_info.planeIndex = plane_index;
+    create_info.planeStackIndex = plane_props[plane_index].currentStackIndex;
+    create_info.transform = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR;
+    create_info.alphaMode = alphaMode;
+    create_info.globalAlpha = 1.0f;
+    create_info.imageExtent = image_extent;
+
+    return vkCreateDisplayPlaneSurfaceKHR(demo->inst, &create_info, NULL, &demo->surface);
+}
+
+static void demo_run_display(struct demo *demo) {
+    while (!demo->quit) {
+        demo_draw(demo);
+        demo->curFrame++;
+
+        if (demo->frameCount != INT32_MAX && demo->curFrame == demo->frameCount) {
+            demo->quit = true;
+        }
+    }
+}
+#endif
+
+/*
+ * Return 1 (true) if all layer names specified in check_names
+ * can be found in given layer properties.
+ */
+static VkBool32 demo_check_layers(uint32_t check_count, char **check_names, uint32_t layer_count, VkLayerProperties *layers) {
+    for (uint32_t i = 0; i < check_count; i++) {
+        VkBool32 found = 0;
+        for (uint32_t j = 0; j < layer_count; j++) {
+            if (!strcmp(check_names[i], layers[j].layerName)) {
+                found = 1;
+                break;
+            }
+        }
+        if (!found) {
+            fprintf(stderr, "Cannot find layer: %s\n", check_names[i]);
+            return 0;
+        }
+    }
+    return 1;
+}
+
+static void demo_init_vk(struct demo *demo) {
+    VkResult err;
+    uint32_t instance_extension_count = 0;
+    uint32_t instance_layer_count = 0;
+    char *instance_validation_layers[] = {"VK_LAYER_KHRONOS_validation"};
+    demo->enabled_extension_count = 0;
+    demo->enabled_layer_count = 0;
+    demo->is_minimized = false;
+    demo->cmd_pool = VK_NULL_HANDLE;
+
+    // Look for validation layers
+    VkBool32 validation_found = 0;
+    if (demo->validate) {
+        err = vkEnumerateInstanceLayerProperties(&instance_layer_count, NULL);
+        assert(!err);
+
+        if (instance_layer_count > 0) {
+            VkLayerProperties *instance_layers = malloc(sizeof(VkLayerProperties) * instance_layer_count);
+            err = vkEnumerateInstanceLayerProperties(&instance_layer_count, instance_layers);
+            assert(!err);
+
+            validation_found = demo_check_layers(ARRAY_SIZE(instance_validation_layers), instance_validation_layers,
+                                                 instance_layer_count, instance_layers);
+            if (validation_found) {
+                demo->enabled_layer_count = ARRAY_SIZE(instance_validation_layers);
+                demo->enabled_layers[0] = "VK_LAYER_KHRONOS_validation";
+            }
+            free(instance_layers);
+        }
+
+        if (!validation_found) {
+            ERR_EXIT(
+                "vkEnumerateInstanceLayerProperties failed to find required validation layer.\n\n"
+                "Please look at the Getting Started guide for additional information.\n",
+                "vkCreateInstance Failure");
+        }
+    }
+
+    /* Look for instance extensions */
+    VkBool32 surfaceExtFound = 0;
+    VkBool32 platformSurfaceExtFound = 0;
+    memset(demo->extension_names, 0, sizeof(demo->extension_names));
+
+    err = vkEnumerateInstanceExtensionProperties(NULL, &instance_extension_count, NULL);
+    assert(!err);
+
+    if (instance_extension_count > 0) {
+        VkExtensionProperties *instance_extensions = malloc(sizeof(VkExtensionProperties) * instance_extension_count);
+        err = vkEnumerateInstanceExtensionProperties(NULL, &instance_extension_count, instance_extensions);
+        assert(!err);
+        for (uint32_t i = 0; i < instance_extension_count; i++) {
+            if (!strcmp(VK_KHR_SURFACE_EXTENSION_NAME, instance_extensions[i].extensionName)) {
+                surfaceExtFound = 1;
+                demo->extension_names[demo->enabled_extension_count++] = VK_KHR_SURFACE_EXTENSION_NAME;
+            }
+#if defined(VK_USE_PLATFORM_WIN32_KHR)
+            if (!strcmp(VK_KHR_WIN32_SURFACE_EXTENSION_NAME, instance_extensions[i].extensionName)) {
+                platformSurfaceExtFound = 1;
+                demo->extension_names[demo->enabled_extension_count++] = VK_KHR_WIN32_SURFACE_EXTENSION_NAME;
+            }
+#elif defined(VK_USE_PLATFORM_XLIB_KHR)
+            if (!strcmp(VK_KHR_XLIB_SURFACE_EXTENSION_NAME, instance_extensions[i].extensionName)) {
+                platformSurfaceExtFound = 1;
+                demo->extension_names[demo->enabled_extension_count++] = VK_KHR_XLIB_SURFACE_EXTENSION_NAME;
+            }
+#elif defined(VK_USE_PLATFORM_XCB_KHR)
+            if (!strcmp(VK_KHR_XCB_SURFACE_EXTENSION_NAME, instance_extensions[i].extensionName)) {
+                platformSurfaceExtFound = 1;
+                demo->extension_names[demo->enabled_extension_count++] = VK_KHR_XCB_SURFACE_EXTENSION_NAME;
+            }
+#elif defined(VK_USE_PLATFORM_WAYLAND_KHR)
+            if (!strcmp(VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME, instance_extensions[i].extensionName)) {
+                platformSurfaceExtFound = 1;
+                demo->extension_names[demo->enabled_extension_count++] = VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME;
+            }
+#elif defined(VK_USE_PLATFORM_DISPLAY_KHR)
+            if (!strcmp(VK_KHR_DISPLAY_EXTENSION_NAME, instance_extensions[i].extensionName)) {
+                platformSurfaceExtFound = 1;
+                demo->extension_names[demo->enabled_extension_count++] = VK_KHR_DISPLAY_EXTENSION_NAME;
+            }
+#elif defined(VK_USE_PLATFORM_ANDROID_KHR)
+            if (!strcmp(VK_KHR_ANDROID_SURFACE_EXTENSION_NAME, instance_extensions[i].extensionName)) {
+                platformSurfaceExtFound = 1;
+                demo->extension_names[demo->enabled_extension_count++] = VK_KHR_ANDROID_SURFACE_EXTENSION_NAME;
+            }
+#elif defined(VK_USE_PLATFORM_METAL_EXT)
+            if (!strcmp(VK_EXT_METAL_SURFACE_EXTENSION_NAME, instance_extensions[i].extensionName)) {
+                platformSurfaceExtFound = 1;
+                demo->extension_names[demo->enabled_extension_count++] = VK_EXT_METAL_SURFACE_EXTENSION_NAME;
+            }
+#endif
+            if (!strcmp(VK_EXT_DEBUG_UTILS_EXTENSION_NAME, instance_extensions[i].extensionName)) {
+                if (demo->validate) {
+                    demo->extension_names[demo->enabled_extension_count++] = VK_EXT_DEBUG_UTILS_EXTENSION_NAME;
+                }
+            }
+            assert(demo->enabled_extension_count < 64);
+        }
+
+        free(instance_extensions);
+    }
+
+    if (!surfaceExtFound) {
+        ERR_EXIT("vkEnumerateInstanceExtensionProperties failed to find the " VK_KHR_SURFACE_EXTENSION_NAME
+                 " extension.\n\n"
+                 "Do you have a compatible Vulkan installable client driver (ICD) installed?\n"
+                 "Please look at the Getting Started guide for additional information.\n",
+                 "vkCreateInstance Failure");
+    }
+    if (!platformSurfaceExtFound) {
+#if defined(VK_USE_PLATFORM_WIN32_KHR)
+        ERR_EXIT("vkEnumerateInstanceExtensionProperties failed to find the " VK_KHR_WIN32_SURFACE_EXTENSION_NAME
+                 " extension.\n\n"
+                 "Do you have a compatible Vulkan installable client driver (ICD) installed?\n"
+                 "Please look at the Getting Started guide for additional information.\n",
+                 "vkCreateInstance Failure");
+#elif defined(VK_USE_PLATFORM_METAL_EXT)
+        ERR_EXIT("vkEnumerateInstanceExtensionProperties failed to find the " VK_EXT_METAL_SURFACE_EXTENSION_NAME
+                 " extension.\n\n"
+                 "Do you have a compatible Vulkan installable client driver (ICD) installed?\n"
+                 "Please look at the Getting Started guide for additional information.\n",
+                 "vkCreateInstance Failure");
+#elif defined(VK_USE_PLATFORM_XCB_KHR)
+        ERR_EXIT("vkEnumerateInstanceExtensionProperties failed to find the " VK_KHR_XCB_SURFACE_EXTENSION_NAME
+                 " extension.\n\n"
+                 "Do you have a compatible Vulkan installable client driver (ICD) installed?\n"
+                 "Please look at the Getting Started guide for additional information.\n",
+                 "vkCreateInstance Failure");
+#elif defined(VK_USE_PLATFORM_WAYLAND_KHR)
+        ERR_EXIT("vkEnumerateInstanceExtensionProperties failed to find the " VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME
+                 " extension.\n\n"
+                 "Do you have a compatible Vulkan installable client driver (ICD) installed?\n"
+                 "Please look at the Getting Started guide for additional information.\n",
+                 "vkCreateInstance Failure");
+#elif defined(VK_USE_PLATFORM_DISPLAY_KHR)
+        ERR_EXIT("vkEnumerateInstanceExtensionProperties failed to find the " VK_KHR_DISPLAY_EXTENSION_NAME
+                 " extension.\n\n"
+                 "Do you have a compatible Vulkan installable client driver (ICD) installed?\n"
+                 "Please look at the Getting Started guide for additional information.\n",
+                 "vkCreateInstance Failure");
+#elif defined(VK_USE_PLATFORM_ANDROID_KHR)
+        ERR_EXIT("vkEnumerateInstanceExtensionProperties failed to find the " VK_KHR_ANDROID_SURFACE_EXTENSION_NAME
+                 " extension.\n\n"
+                 "Do you have a compatible Vulkan installable client driver (ICD) installed?\n"
+                 "Please look at the Getting Started guide for additional information.\n",
+                 "vkCreateInstance Failure");
+#elif defined(VK_USE_PLATFORM_XLIB_KHR)
+        ERR_EXIT("vkEnumerateInstanceExtensionProperties failed to find the " VK_KHR_XLIB_SURFACE_EXTENSION_NAME
+                 " extension.\n\n"
+                 "Do you have a compatible Vulkan installable client driver (ICD) installed?\n"
+                 "Please look at the Getting Started guide for additional information.\n",
+                 "vkCreateInstance Failure");
+#endif
+    }
+    const VkApplicationInfo app = {
+        .sType = VK_STRUCTURE_TYPE_APPLICATION_INFO,
+        .pNext = NULL,
+        .pApplicationName = APP_SHORT_NAME,
+        .applicationVersion = 0,
+        .pEngineName = APP_SHORT_NAME,
+        .engineVersion = 0,
+        .apiVersion = VK_API_VERSION_1_0,
+    };
+    VkInstanceCreateInfo inst_info = {
+        .sType = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO,
+        .pNext = NULL,
+        .pApplicationInfo = &app,
+        .enabledLayerCount = demo->enabled_layer_count,
+        .ppEnabledLayerNames = (const char *const *)instance_validation_layers,
+        .enabledExtensionCount = demo->enabled_extension_count,
+        .ppEnabledExtensionNames = (const char *const *)demo->extension_names,
+    };
+
+    /*
+     * This is info for a temp callback to use during CreateInstance.
+     * After the instance is created, we use the instance-based
+     * function to register the final callback.
+     */
+    VkDebugUtilsMessengerCreateInfoEXT dbg_messenger_create_info;
+    if (demo->validate) {
+        // VK_EXT_debug_utils style
+        dbg_messenger_create_info.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT;
+        dbg_messenger_create_info.pNext = NULL;
+        dbg_messenger_create_info.flags = 0;
+        dbg_messenger_create_info.messageSeverity =
+            VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT | VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT;
+        dbg_messenger_create_info.messageType = VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT |
+                                                VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT |
+                                                VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT;
+        dbg_messenger_create_info.pfnUserCallback = debug_messenger_callback;
+        dbg_messenger_create_info.pUserData = demo;
+        inst_info.pNext = &dbg_messenger_create_info;
+    }
+
+    uint32_t gpu_count;
+
+    err = vkCreateInstance(&inst_info, NULL, &demo->inst);
+    if (err == VK_ERROR_INCOMPATIBLE_DRIVER) {
+        ERR_EXIT(
+            "Cannot find a compatible Vulkan installable client driver (ICD).\n\n"
+            "Please look at the Getting Started guide for additional information.\n",
+            "vkCreateInstance Failure");
+    } else if (err == VK_ERROR_EXTENSION_NOT_PRESENT) {
+        ERR_EXIT(
+            "Cannot find a specified extension library.\n"
+            "Make sure your layers path is set appropriately.\n",
+            "vkCreateInstance Failure");
+    } else if (err) {
+        ERR_EXIT(
+            "vkCreateInstance failed.\n\n"
+            "Do you have a compatible Vulkan installable client driver (ICD) installed?\n"
+            "Please look at the Getting Started guide for additional information.\n",
+            "vkCreateInstance Failure");
+    }
+
+    /* Make initial call to query gpu_count, then second call for gpu info*/
+    err = vkEnumeratePhysicalDevices(demo->inst, &gpu_count, NULL);
+    assert(!err);
+
+    if (gpu_count > 0) {
+        VkPhysicalDevice *physical_devices = malloc(sizeof(VkPhysicalDevice) * gpu_count);
+        err = vkEnumeratePhysicalDevices(demo->inst, &gpu_count, physical_devices);
+        assert(!err);
+        /* For cube demo we just grab the first physical device */
+        demo->gpu = physical_devices[0];
+        free(physical_devices);
+    } else {
+        ERR_EXIT(
+            "vkEnumeratePhysicalDevices reported zero accessible devices.\n\n"
+            "Do you have a compatible Vulkan installable client driver (ICD) installed?\n"
+            "Please look at the Getting Started guide for additional information.\n",
+            "vkEnumeratePhysicalDevices Failure");
+    }
+
+    /* Look for device extensions */
+    uint32_t device_extension_count = 0;
+    VkBool32 swapchainExtFound = 0;
+    demo->enabled_extension_count = 0;
+    memset(demo->extension_names, 0, sizeof(demo->extension_names));
+
+    err = vkEnumerateDeviceExtensionProperties(demo->gpu, NULL, &device_extension_count, NULL);
+    assert(!err);
+
+    if (device_extension_count > 0) {
+        VkExtensionProperties *device_extensions = malloc(sizeof(VkExtensionProperties) * device_extension_count);
+        err = vkEnumerateDeviceExtensionProperties(demo->gpu, NULL, &device_extension_count, device_extensions);
+        assert(!err);
+
+        for (uint32_t i = 0; i < device_extension_count; i++) {
+            if (!strcmp(VK_KHR_SWAPCHAIN_EXTENSION_NAME, device_extensions[i].extensionName)) {
+                swapchainExtFound = 1;
+                demo->extension_names[demo->enabled_extension_count++] = VK_KHR_SWAPCHAIN_EXTENSION_NAME;
+            }
+            assert(demo->enabled_extension_count < 64);
+        }
+
+        if (demo->VK_KHR_incremental_present_enabled) {
+            // Even though the user "enabled" the extension via the command
+            // line, we must make sure that it's enumerated for use with the
+            // device.  Therefore, disable it here, and re-enable it again if
+            // enumerated.
+            demo->VK_KHR_incremental_present_enabled = false;
+            for (uint32_t i = 0; i < device_extension_count; i++) {
+                if (!strcmp(VK_KHR_INCREMENTAL_PRESENT_EXTENSION_NAME, device_extensions[i].extensionName)) {
+                    demo->extension_names[demo->enabled_extension_count++] = VK_KHR_INCREMENTAL_PRESENT_EXTENSION_NAME;
+                    demo->VK_KHR_incremental_present_enabled = true;
+                    DbgMsg("VK_KHR_incremental_present extension enabled\n");
+                }
+                assert(demo->enabled_extension_count < 64);
+            }
+            if (!demo->VK_KHR_incremental_present_enabled) {
+                DbgMsg("VK_KHR_incremental_present extension NOT AVAILABLE\n");
+            }
+        }
+
+        if (demo->VK_GOOGLE_display_timing_enabled) {
+            // Even though the user "enabled" the extension via the command
+            // line, we must make sure that it's enumerated for use with the
+            // device.  Therefore, disable it here, and re-enable it again if
+            // enumerated.
+            demo->VK_GOOGLE_display_timing_enabled = false;
+            for (uint32_t i = 0; i < device_extension_count; i++) {
+                if (!strcmp(VK_GOOGLE_DISPLAY_TIMING_EXTENSION_NAME, device_extensions[i].extensionName)) {
+                    demo->extension_names[demo->enabled_extension_count++] = VK_GOOGLE_DISPLAY_TIMING_EXTENSION_NAME;
+                    demo->VK_GOOGLE_display_timing_enabled = true;
+                    DbgMsg("VK_GOOGLE_display_timing extension enabled\n");
+                }
+                assert(demo->enabled_extension_count < 64);
+            }
+            if (!demo->VK_GOOGLE_display_timing_enabled) {
+                DbgMsg("VK_GOOGLE_display_timing extension NOT AVAILABLE\n");
+            }
+        }
+
+        free(device_extensions);
+    }
+
+    if (!swapchainExtFound) {
+        ERR_EXIT("vkEnumerateDeviceExtensionProperties failed to find the " VK_KHR_SWAPCHAIN_EXTENSION_NAME
+                 " extension.\n\nDo you have a compatible Vulkan installable client driver (ICD) installed?\n"
+                 "Please look at the Getting Started guide for additional information.\n",
+                 "vkCreateInstance Failure");
+    }
+
+    if (demo->validate) {
+        // Setup VK_EXT_debug_utils function pointers always (we use them for
+        // debug labels and names).
+        demo->CreateDebugUtilsMessengerEXT =
+            (PFN_vkCreateDebugUtilsMessengerEXT)vkGetInstanceProcAddr(demo->inst, "vkCreateDebugUtilsMessengerEXT");
+        demo->DestroyDebugUtilsMessengerEXT =
+            (PFN_vkDestroyDebugUtilsMessengerEXT)vkGetInstanceProcAddr(demo->inst, "vkDestroyDebugUtilsMessengerEXT");
+        demo->SubmitDebugUtilsMessageEXT =
+            (PFN_vkSubmitDebugUtilsMessageEXT)vkGetInstanceProcAddr(demo->inst, "vkSubmitDebugUtilsMessageEXT");
+        demo->CmdBeginDebugUtilsLabelEXT =
+            (PFN_vkCmdBeginDebugUtilsLabelEXT)vkGetInstanceProcAddr(demo->inst, "vkCmdBeginDebugUtilsLabelEXT");
+        demo->CmdEndDebugUtilsLabelEXT =
+            (PFN_vkCmdEndDebugUtilsLabelEXT)vkGetInstanceProcAddr(demo->inst, "vkCmdEndDebugUtilsLabelEXT");
+        demo->CmdInsertDebugUtilsLabelEXT =
+            (PFN_vkCmdInsertDebugUtilsLabelEXT)vkGetInstanceProcAddr(demo->inst, "vkCmdInsertDebugUtilsLabelEXT");
+        demo->SetDebugUtilsObjectNameEXT =
+            (PFN_vkSetDebugUtilsObjectNameEXT)vkGetInstanceProcAddr(demo->inst, "vkSetDebugUtilsObjectNameEXT");
+        if (NULL == demo->CreateDebugUtilsMessengerEXT || NULL == demo->DestroyDebugUtilsMessengerEXT ||
+            NULL == demo->SubmitDebugUtilsMessageEXT || NULL == demo->CmdBeginDebugUtilsLabelEXT ||
+            NULL == demo->CmdEndDebugUtilsLabelEXT || NULL == demo->CmdInsertDebugUtilsLabelEXT ||
+            NULL == demo->SetDebugUtilsObjectNameEXT) {
+            ERR_EXIT("GetProcAddr: Failed to init VK_EXT_debug_utils\n", "GetProcAddr: Failure");
+        }
+
+        err = demo->CreateDebugUtilsMessengerEXT(demo->inst, &dbg_messenger_create_info, NULL, &demo->dbg_messenger);
+        switch (err) {
+            case VK_SUCCESS:
+                break;
+            case VK_ERROR_OUT_OF_HOST_MEMORY:
+                ERR_EXIT("CreateDebugUtilsMessengerEXT: out of host memory\n", "CreateDebugUtilsMessengerEXT Failure");
+                break;
+            default:
+                ERR_EXIT("CreateDebugUtilsMessengerEXT: unknown failure\n", "CreateDebugUtilsMessengerEXT Failure");
+                break;
+        }
+    }
+    vkGetPhysicalDeviceProperties(demo->gpu, &demo->gpu_props);
+
+    /* Call with NULL data to get count */
+    vkGetPhysicalDeviceQueueFamilyProperties(demo->gpu, &demo->queue_family_count, NULL);
+    assert(demo->queue_family_count >= 1);
+
+    demo->queue_props = (VkQueueFamilyProperties *)malloc(demo->queue_family_count * sizeof(VkQueueFamilyProperties));
+    vkGetPhysicalDeviceQueueFamilyProperties(demo->gpu, &demo->queue_family_count, demo->queue_props);
+
+    // Query fine-grained feature support for this device.
+    //  If app has specific feature requirements it should check supported
+    //  features based on this query
+    VkPhysicalDeviceFeatures physDevFeatures;
+    vkGetPhysicalDeviceFeatures(demo->gpu, &physDevFeatures);
+
+    GET_INSTANCE_PROC_ADDR(demo->inst, GetPhysicalDeviceSurfaceSupportKHR);
+    GET_INSTANCE_PROC_ADDR(demo->inst, GetPhysicalDeviceSurfaceCapabilitiesKHR);
+    GET_INSTANCE_PROC_ADDR(demo->inst, GetPhysicalDeviceSurfaceFormatsKHR);
+    GET_INSTANCE_PROC_ADDR(demo->inst, GetPhysicalDeviceSurfacePresentModesKHR);
+    GET_INSTANCE_PROC_ADDR(demo->inst, GetSwapchainImagesKHR);
+}
+
+static void demo_create_device(struct demo *demo) {
+    VkResult U_ASSERT_ONLY err;
+    float queue_priorities[1] = {0.0};
+    VkDeviceQueueCreateInfo queues[2];
+    queues[0].sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO;
+    queues[0].pNext = NULL;
+    queues[0].queueFamilyIndex = demo->graphics_queue_family_index;
+    queues[0].queueCount = 1;
+    queues[0].pQueuePriorities = queue_priorities;
+    queues[0].flags = 0;
+
+    VkDeviceCreateInfo device = {
+        .sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO,
+        .pNext = NULL,
+        .queueCreateInfoCount = 1,
+        .pQueueCreateInfos = queues,
+        .enabledLayerCount = 0,
+        .ppEnabledLayerNames = NULL,
+        .enabledExtensionCount = demo->enabled_extension_count,
+        .ppEnabledExtensionNames = (const char *const *)demo->extension_names,
+        .pEnabledFeatures = NULL,  // If specific features are required, pass them in here
+    };
+    if (demo->separate_present_queue) {
+        queues[1].sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO;
+        queues[1].pNext = NULL;
+        queues[1].queueFamilyIndex = demo->present_queue_family_index;
+        queues[1].queueCount = 1;
+        queues[1].pQueuePriorities = queue_priorities;
+        queues[1].flags = 0;
+        device.queueCreateInfoCount = 2;
+    }
+    err = vkCreateDevice(demo->gpu, &device, NULL, &demo->device);
+    assert(!err);
+}
+
+static void demo_create_surface(struct demo *demo) {
+    VkResult U_ASSERT_ONLY err;
+
+// Create a WSI surface for the window:
+#if defined(VK_USE_PLATFORM_WIN32_KHR)
+    VkWin32SurfaceCreateInfoKHR createInfo;
+    createInfo.sType = VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR;
+    createInfo.pNext = NULL;
+    createInfo.flags = 0;
+    createInfo.hinstance = demo->connection;
+    createInfo.hwnd = demo->window;
+
+    err = vkCreateWin32SurfaceKHR(demo->inst, &createInfo, NULL, &demo->surface);
+#elif defined(VK_USE_PLATFORM_WAYLAND_KHR)
+    VkWaylandSurfaceCreateInfoKHR createInfo;
+    createInfo.sType = VK_STRUCTURE_TYPE_WAYLAND_SURFACE_CREATE_INFO_KHR;
+    createInfo.pNext = NULL;
+    createInfo.flags = 0;
+    createInfo.display = demo->display;
+    createInfo.surface = demo->window;
+
+    err = vkCreateWaylandSurfaceKHR(demo->inst, &createInfo, NULL, &demo->surface);
+#elif defined(VK_USE_PLATFORM_ANDROID_KHR)
+    VkAndroidSurfaceCreateInfoKHR createInfo;
+    createInfo.sType = VK_STRUCTURE_TYPE_ANDROID_SURFACE_CREATE_INFO_KHR;
+    createInfo.pNext = NULL;
+    createInfo.flags = 0;
+    createInfo.window = (struct ANativeWindow *)(demo->window);
+
+    err = vkCreateAndroidSurfaceKHR(demo->inst, &createInfo, NULL, &demo->surface);
+#elif defined(VK_USE_PLATFORM_XLIB_KHR)
+    VkXlibSurfaceCreateInfoKHR createInfo;
+    createInfo.sType = VK_STRUCTURE_TYPE_XLIB_SURFACE_CREATE_INFO_KHR;
+    createInfo.pNext = NULL;
+    createInfo.flags = 0;
+    createInfo.dpy = demo->display;
+    createInfo.window = demo->xlib_window;
+
+    err = vkCreateXlibSurfaceKHR(demo->inst, &createInfo, NULL, &demo->surface);
+#elif defined(VK_USE_PLATFORM_XCB_KHR)
+    VkXcbSurfaceCreateInfoKHR createInfo;
+    createInfo.sType = VK_STRUCTURE_TYPE_XCB_SURFACE_CREATE_INFO_KHR;
+    createInfo.pNext = NULL;
+    createInfo.flags = 0;
+    createInfo.connection = demo->connection;
+    createInfo.window = demo->xcb_window;
+
+    err = vkCreateXcbSurfaceKHR(demo->inst, &createInfo, NULL, &demo->surface);
+#elif defined(VK_USE_PLATFORM_DISPLAY_KHR)
+    err = demo_create_display_surface(demo);
+#elif defined(VK_USE_PLATFORM_METAL_EXT)
+    VkMetalSurfaceCreateInfoEXT surface;
+    surface.sType = VK_STRUCTURE_TYPE_METAL_SURFACE_CREATE_INFO_EXT;
+    surface.pNext = NULL;
+    surface.flags = 0;
+    surface.pLayer = demo->caMetalLayer;
+
+    err = vkCreateMetalSurfaceEXT(demo->inst, &surface, NULL, &demo->surface);
+#endif
+    assert(!err);
+}
+
+static void demo_init_vk_swapchain(struct demo *demo) {
+    VkResult U_ASSERT_ONLY err;
+
+    demo_create_surface(demo);
+
+    // Iterate over each queue to learn whether it supports presenting:
+    VkBool32 *supportsPresent = (VkBool32 *)malloc(demo->queue_family_count * sizeof(VkBool32));
+    for (uint32_t i = 0; i < demo->queue_family_count; i++) {
+        demo->fpGetPhysicalDeviceSurfaceSupportKHR(demo->gpu, i, demo->surface, &supportsPresent[i]);
+    }
+
+    // Search for a graphics and a present queue in the array of queue
+    // families, try to find one that supports both
+    uint32_t graphicsQueueFamilyIndex = UINT32_MAX;
+    uint32_t presentQueueFamilyIndex = UINT32_MAX;
+    for (uint32_t i = 0; i < demo->queue_family_count; i++) {
+        if ((demo->queue_props[i].queueFlags & VK_QUEUE_GRAPHICS_BIT) != 0) {
+            if (graphicsQueueFamilyIndex == UINT32_MAX) {
+                graphicsQueueFamilyIndex = i;
+            }
+
+            if (supportsPresent[i] == VK_TRUE) {
+                graphicsQueueFamilyIndex = i;
+                presentQueueFamilyIndex = i;
+                break;
+            }
+        }
+    }
+
+    if (presentQueueFamilyIndex == UINT32_MAX) {
+        // If didn't find a queue that supports both graphics and present, then
+        // find a separate present queue.
+        for (uint32_t i = 0; i < demo->queue_family_count; ++i) {
+            if (supportsPresent[i] == VK_TRUE) {
+                presentQueueFamilyIndex = i;
+                break;
+            }
+        }
+    }
+
+    // Generate error if could not find both a graphics and a present queue
+    if (graphicsQueueFamilyIndex == UINT32_MAX || presentQueueFamilyIndex == UINT32_MAX) {
+        ERR_EXIT("Could not find both graphics and present queues\n", "Swapchain Initialization Failure");
+    }
+
+    demo->graphics_queue_family_index = graphicsQueueFamilyIndex;
+    demo->present_queue_family_index = presentQueueFamilyIndex;
+    demo->separate_present_queue = (demo->graphics_queue_family_index != demo->present_queue_family_index);
+    free(supportsPresent);
+
+    demo_create_device(demo);
+
+    GET_DEVICE_PROC_ADDR(demo->device, CreateSwapchainKHR);
+    GET_DEVICE_PROC_ADDR(demo->device, DestroySwapchainKHR);
+    GET_DEVICE_PROC_ADDR(demo->device, GetSwapchainImagesKHR);
+    GET_DEVICE_PROC_ADDR(demo->device, AcquireNextImageKHR);
+    GET_DEVICE_PROC_ADDR(demo->device, QueuePresentKHR);
+    if (demo->VK_GOOGLE_display_timing_enabled) {
+        GET_DEVICE_PROC_ADDR(demo->device, GetRefreshCycleDurationGOOGLE);
+        GET_DEVICE_PROC_ADDR(demo->device, GetPastPresentationTimingGOOGLE);
+    }
+
+    vkGetDeviceQueue(demo->device, demo->graphics_queue_family_index, 0, &demo->graphics_queue);
+
+    if (!demo->separate_present_queue) {
+        demo->present_queue = demo->graphics_queue;
+    } else {
+        vkGetDeviceQueue(demo->device, demo->present_queue_family_index, 0, &demo->present_queue);
+    }
+
+    // Get the list of VkFormat's that are supported:
+    uint32_t formatCount;
+    err = demo->fpGetPhysicalDeviceSurfaceFormatsKHR(demo->gpu, demo->surface, &formatCount, NULL);
+    assert(!err);
+    VkSurfaceFormatKHR *surfFormats = (VkSurfaceFormatKHR *)malloc(formatCount * sizeof(VkSurfaceFormatKHR));
+    err = demo->fpGetPhysicalDeviceSurfaceFormatsKHR(demo->gpu, demo->surface, &formatCount, surfFormats);
+    assert(!err);
+    // If the format list includes just one entry of VK_FORMAT_UNDEFINED,
+    // the surface has no preferred format.  Otherwise, at least one
+    // supported format will be returned.
+    if (formatCount == 1 && surfFormats[0].format == VK_FORMAT_UNDEFINED) {
+        demo->format = VK_FORMAT_B8G8R8A8_UNORM;
+    } else {
+        assert(formatCount >= 1);
+        demo->format = surfFormats[0].format;
+    }
+    demo->color_space = surfFormats[0].colorSpace;
+    free(surfFormats);
+
+    demo->quit = false;
+    demo->curFrame = 0;
+
+    // Create semaphores to synchronize acquiring presentable buffers before
+    // rendering and waiting for drawing to be complete before presenting
+    VkSemaphoreCreateInfo semaphoreCreateInfo = {
+        .sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO,
+        .pNext = NULL,
+        .flags = 0,
+    };
+
+    // Create fences that we can use to throttle if we get too far
+    // ahead of the image presents
+    VkFenceCreateInfo fence_ci = {
+        .sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO, .pNext = NULL, .flags = VK_FENCE_CREATE_SIGNALED_BIT};
+    for (uint32_t i = 0; i < FRAME_LAG; i++) {
+        err = vkCreateFence(demo->device, &fence_ci, NULL, &demo->fences[i]);
+        assert(!err);
+
+        err = vkCreateSemaphore(demo->device, &semaphoreCreateInfo, NULL, &demo->image_acquired_semaphores[i]);
+        assert(!err);
+
+        err = vkCreateSemaphore(demo->device, &semaphoreCreateInfo, NULL, &demo->draw_complete_semaphores[i]);
+        assert(!err);
+
+        if (demo->separate_present_queue) {
+            err = vkCreateSemaphore(demo->device, &semaphoreCreateInfo, NULL, &demo->image_ownership_semaphores[i]);
+            assert(!err);
+        }
+    }
+    demo->frame_index = 0;
+
+    // Get Memory information and properties
+    vkGetPhysicalDeviceMemoryProperties(demo->gpu, &demo->memory_properties);
+}
+
+#if defined(VK_USE_PLATFORM_WAYLAND_KHR)
+static void pointer_handle_enter(void *data, struct wl_pointer *pointer, uint32_t serial, struct wl_surface *surface, wl_fixed_t sx,
+                                 wl_fixed_t sy) {}
+
+static void pointer_handle_leave(void *data, struct wl_pointer *pointer, uint32_t serial, struct wl_surface *surface) {}
+
+static void pointer_handle_motion(void *data, struct wl_pointer *pointer, uint32_t time, wl_fixed_t sx, wl_fixed_t sy) {}
+
+static void pointer_handle_button(void *data, struct wl_pointer *wl_pointer, uint32_t serial, uint32_t time, uint32_t button,
+                                  uint32_t state) {
+    struct demo *demo = data;
+    if (button == BTN_LEFT && state == WL_POINTER_BUTTON_STATE_PRESSED) {
+        wl_shell_surface_move(demo->shell_surface, demo->seat, serial);
+    }
+}
+
+static void pointer_handle_axis(void *data, struct wl_pointer *wl_pointer, uint32_t time, uint32_t axis, wl_fixed_t value) {}
+
+static const struct wl_pointer_listener pointer_listener = {
+    pointer_handle_enter, pointer_handle_leave, pointer_handle_motion, pointer_handle_button, pointer_handle_axis,
+};
+
+static void keyboard_handle_keymap(void *data, struct wl_keyboard *keyboard, uint32_t format, int fd, uint32_t size) {}
+
+static void keyboard_handle_enter(void *data, struct wl_keyboard *keyboard, uint32_t serial, struct wl_surface *surface,
+                                  struct wl_array *keys) {}
+
+static void keyboard_handle_leave(void *data, struct wl_keyboard *keyboard, uint32_t serial, struct wl_surface *surface) {}
+
+static void keyboard_handle_key(void *data, struct wl_keyboard *keyboard, uint32_t serial, uint32_t time, uint32_t key,
+                                uint32_t state) {
+    if (state != WL_KEYBOARD_KEY_STATE_RELEASED) return;
+    struct demo *demo = data;
+    switch (key) {
+        case KEY_ESC:  // Escape
+            demo->quit = true;
+            break;
+        case KEY_LEFT:  // left arrow key
+            demo->spin_angle -= demo->spin_increment;
+            break;
+        case KEY_RIGHT:  // right arrow key
+            demo->spin_angle += demo->spin_increment;
+            break;
+        case KEY_SPACE:  // space bar
+            demo->pause = !demo->pause;
+            break;
+    }
+}
+
+static void keyboard_handle_modifiers(void *data, struct wl_keyboard *keyboard, uint32_t serial, uint32_t mods_depressed,
+                                      uint32_t mods_latched, uint32_t mods_locked, uint32_t group) {}
+
+static const struct wl_keyboard_listener keyboard_listener = {
+    keyboard_handle_keymap, keyboard_handle_enter, keyboard_handle_leave, keyboard_handle_key, keyboard_handle_modifiers,
+};
+
+static void seat_handle_capabilities(void *data, struct wl_seat *seat, enum wl_seat_capability caps) {
+    // Subscribe to pointer events
+    struct demo *demo = data;
+    if ((caps & WL_SEAT_CAPABILITY_POINTER) && !demo->pointer) {
+        demo->pointer = wl_seat_get_pointer(seat);
+        wl_pointer_add_listener(demo->pointer, &pointer_listener, demo);
+    } else if (!(caps & WL_SEAT_CAPABILITY_POINTER) && demo->pointer) {
+        wl_pointer_destroy(demo->pointer);
+        demo->pointer = NULL;
+    }
+    // Subscribe to keyboard events
+    if (caps & WL_SEAT_CAPABILITY_KEYBOARD) {
+        demo->keyboard = wl_seat_get_keyboard(seat);
+        wl_keyboard_add_listener(demo->keyboard, &keyboard_listener, demo);
+    } else if (!(caps & WL_SEAT_CAPABILITY_KEYBOARD)) {
+        wl_keyboard_destroy(demo->keyboard);
+        demo->keyboard = NULL;
+    }
+}
+
+static const struct wl_seat_listener seat_listener = {
+    seat_handle_capabilities,
+};
+
+static void registry_handle_global(void *data, struct wl_registry *registry, uint32_t id, const char *interface, uint32_t version) {
+    struct demo *demo = data;
+    // pickup wayland objects when they appear
+    if (strcmp(interface, "wl_compositor") == 0) {
+        uint32_t minVersion = version < 4 ? version : 4;
+        demo->compositor = wl_registry_bind(registry, id, &wl_compositor_interface, minVersion);
+        if (demo->VK_KHR_incremental_present_enabled && minVersion < 4) {
+            fprintf(stderr, "Wayland compositor doesn't support VK_KHR_incremental_present, disabling.\n");
+            demo->VK_KHR_incremental_present_enabled = false;
+        }
+    } else if (strcmp(interface, "wl_shell") == 0) {
+        demo->shell = wl_registry_bind(registry, id, &wl_shell_interface, 1);
+    } else if (strcmp(interface, "wl_seat") == 0) {
+        demo->seat = wl_registry_bind(registry, id, &wl_seat_interface, 1);
+        wl_seat_add_listener(demo->seat, &seat_listener, demo);
+    }
+}
+
+static void registry_handle_global_remove(void *data UNUSED, struct wl_registry *registry UNUSED, uint32_t name UNUSED) {}
+
+static const struct wl_registry_listener registry_listener = {registry_handle_global, registry_handle_global_remove};
+#endif
+
+static void demo_init_connection(struct demo *demo) {
+#if defined(VK_USE_PLATFORM_XCB_KHR)
+    const xcb_setup_t *setup;
+    xcb_screen_iterator_t iter;
+    int scr;
+
+    const char *display_envar = getenv("DISPLAY");
+    if (display_envar == NULL || display_envar[0] == '\0') {
+        printf("Environment variable DISPLAY requires a valid value.\nExiting ...\n");
+        fflush(stdout);
+        exit(1);
+    }
+
+    demo->connection = xcb_connect(NULL, &scr);
+    if (xcb_connection_has_error(demo->connection) > 0) {
+        printf("Cannot find a compatible Vulkan installable client driver (ICD).\nExiting ...\n");
+        fflush(stdout);
+        exit(1);
+    }
+
+    setup = xcb_get_setup(demo->connection);
+    iter = xcb_setup_roots_iterator(setup);
+    while (scr-- > 0) xcb_screen_next(&iter);
+
+    demo->screen = iter.data;
+#elif defined(VK_USE_PLATFORM_WAYLAND_KHR)
+    demo->display = wl_display_connect(NULL);
+
+    if (demo->display == NULL) {
+        printf("Cannot find a compatible Vulkan installable client driver (ICD).\nExiting ...\n");
+        fflush(stdout);
+        exit(1);
+    }
+
+    demo->registry = wl_display_get_registry(demo->display);
+    wl_registry_add_listener(demo->registry, &registry_listener, demo);
+    wl_display_dispatch(demo->display);
+#endif
+}
+
+static void demo_init(struct demo *demo, int argc, char **argv) {
+    vec3 eye = {0.0f, 3.0f, 5.0f};
+    vec3 origin = {0, 0, 0};
+    vec3 up = {0.0f, 1.0f, 0.0};
+
+    memset(demo, 0, sizeof(*demo));
+    demo->presentMode = VK_PRESENT_MODE_FIFO_KHR;
+    demo->frameCount = INT32_MAX;
+
+    for (int i = 1; i < argc; i++) {
+        if (strcmp(argv[i], "--use_staging") == 0) {
+            demo->use_staging_buffer = true;
+            continue;
+        }
+        if ((strcmp(argv[i], "--present_mode") == 0) && (i < argc - 1)) {
+            demo->presentMode = atoi(argv[i + 1]);
+            i++;
+            continue;
+        }
+        if (strcmp(argv[i], "--break") == 0) {
+            demo->use_break = true;
+            continue;
+        }
+        if (strcmp(argv[i], "--validate") == 0) {
+            demo->validate = true;
+            continue;
+        }
+        if (strcmp(argv[i], "--validate-checks-disabled") == 0) {
+            demo->validate = true;
+            demo->validate_checks_disabled = true;
+            continue;
+        }
+        if (strcmp(argv[i], "--xlib") == 0) {
+            fprintf(stderr, "--xlib is deprecated and no longer does anything");
+            continue;
+        }
+        if (strcmp(argv[i], "--c") == 0 && demo->frameCount == INT32_MAX && i < argc - 1 &&
+            sscanf(argv[i + 1], "%d", &demo->frameCount) == 1 && demo->frameCount >= 0) {
+            i++;
+            continue;
+        }
+        if (strcmp(argv[i], "--suppress_popups") == 0) {
+            demo->suppress_popups = true;
+            continue;
+        }
+        if (strcmp(argv[i], "--display_timing") == 0) {
+            demo->VK_GOOGLE_display_timing_enabled = true;
+            continue;
+        }
+        if (strcmp(argv[i], "--incremental_present") == 0) {
+            demo->VK_KHR_incremental_present_enabled = true;
+            continue;
+        }
+
+#if defined(ANDROID)
+        ERR_EXIT("Usage: vkcube [--validate]\n", "Usage");
+#else
+        char *message =
+            "Usage:\n  %s\t[--use_staging] [--validate] [--validate-checks-disabled]\n"
+            "\t[--break] [--c <framecount>] [--suppress_popups]\n"
+            "\t[--incremental_present] [--display_timing]\n"
+            "\t[--present_mode <present mode enum>]\n"
+            "\t<present_mode_enum>\n"
+            "\t\tVK_PRESENT_MODE_IMMEDIATE_KHR = %d\n"
+            "\t\tVK_PRESENT_MODE_MAILBOX_KHR = %d\n"
+            "\t\tVK_PRESENT_MODE_FIFO_KHR = %d\n"
+            "\t\tVK_PRESENT_MODE_FIFO_RELAXED_KHR = %d\n";
+        int length = snprintf(NULL, 0, message, APP_SHORT_NAME, VK_PRESENT_MODE_IMMEDIATE_KHR, VK_PRESENT_MODE_MAILBOX_KHR,
+                              VK_PRESENT_MODE_FIFO_KHR, VK_PRESENT_MODE_FIFO_RELAXED_KHR);
+        char *usage = (char *)malloc(length + 1);
+        if (!usage) {
+            exit(1);
+        }
+        snprintf(usage, length + 1, message, APP_SHORT_NAME, VK_PRESENT_MODE_IMMEDIATE_KHR, VK_PRESENT_MODE_MAILBOX_KHR,
+                 VK_PRESENT_MODE_FIFO_KHR, VK_PRESENT_MODE_FIFO_RELAXED_KHR);
+#if defined(_WIN32)
+        if (!demo->suppress_popups) MessageBox(NULL, usage, "Usage Error", MB_OK);
+#else
+        fprintf(stderr, "%s", usage);
+        fflush(stderr);
+#endif
+        free(usage);
+        exit(1);
+#endif
+    }
+
+    demo_init_connection(demo);
+
+    demo_init_vk(demo);
+
+    demo->width = 500;
+    demo->height = 500;
+
+    demo->spin_angle = 4.0f;
+    demo->spin_increment = 0.2f;
+    demo->pause = false;
+
+    mat4x4_perspective(demo->projection_matrix, (float)degreesToRadians(45.0f), 1.0f, 0.1f, 100.0f);
+    mat4x4_look_at(demo->view_matrix, eye, origin, up);
+    mat4x4_identity(demo->model_matrix);
+
+    demo->projection_matrix[1][1] *= -1;  // Flip projection matrix from GL to Vulkan orientation.
+}
+
+#if defined(VK_USE_PLATFORM_WIN32_KHR)
+// Include header required for parsing the command line options.
+#include <shellapi.h>
+
+int WINAPI WinMain(HINSTANCE hInstance, HINSTANCE hPrevInstance, LPSTR pCmdLine, int nCmdShow) {
+    MSG msg;    // message
+    bool done;  // flag saying when app is complete
+    int argc;
+    char **argv;
+
+    // Ensure wParam is initialized.
+    msg.wParam = 0;
+
+    // Use the CommandLine functions to get the command line arguments.
+    // Unfortunately, Microsoft outputs
+    // this information as wide characters for Unicode, and we simply want the
+    // Ascii version to be compatible
+    // with the non-Windows side.  So, we have to convert the information to
+    // Ascii character strings.
+    LPWSTR *commandLineArgs = CommandLineToArgvW(GetCommandLineW(), &argc);
+    if (NULL == commandLineArgs) {
+        argc = 0;
+    }
+
+    if (argc > 0) {
+        argv = (char **)malloc(sizeof(char *) * argc);
+        if (argv == NULL) {
+            argc = 0;
+        } else {
+            for (int iii = 0; iii < argc; iii++) {
+                size_t wideCharLen = wcslen(commandLineArgs[iii]);
+                size_t numConverted = 0;
+
+                argv[iii] = (char *)malloc(sizeof(char) * (wideCharLen + 1));
+                if (argv[iii] != NULL) {
+                    wcstombs_s(&numConverted, argv[iii], wideCharLen + 1, commandLineArgs[iii], wideCharLen + 1);
+                }
+            }
+        }
+    } else {
+        argv = NULL;
+    }
+
+    demo_init(&demo, argc, argv);
+
+    // Free up the items we had to allocate for the command line arguments.
+    if (argc > 0 && argv != NULL) {
+        for (int iii = 0; iii < argc; iii++) {
+            if (argv[iii] != NULL) {
+                free(argv[iii]);
+            }
+        }
+        free(argv);
+    }
+
+    demo.connection = hInstance;
+    strncpy(demo.name, "Vulkan Cube", APP_NAME_STR_LEN);
+    demo_create_window(&demo);
+    demo_init_vk_swapchain(&demo);
+
+    demo_prepare(&demo);
+
+    done = false;  // initialize loop condition variable
+
+    // main message loop
+    while (!done) {
+        if (demo.pause) {
+            const BOOL succ = WaitMessage();
+
+            if (!succ) {
+                struct demo *tmp = &demo;
+                struct demo *demo = tmp;
+                ERR_EXIT("WaitMessage() failed on paused demo", "event loop error");
+            }
+        }
+        PeekMessage(&msg, NULL, 0, 0, PM_REMOVE);
+        if (msg.message == WM_QUIT)  // check for a quit message
+        {
+            done = true;  // if found, quit app
+        } else {
+            /* Translate and dispatch to event queue*/
+            TranslateMessage(&msg);
+            DispatchMessage(&msg);
+        }
+        RedrawWindow(demo.window, NULL, NULL, RDW_INTERNALPAINT);
+    }
+
+    demo_cleanup(&demo);
+
+    return (int)msg.wParam;
+}
+
+#elif defined(VK_USE_PLATFORM_METAL_EXT)
+static void demo_main(struct demo *demo, void *caMetalLayer, int argc, const char *argv[]) {
+    demo_init(demo, argc, (char **)argv);
+    demo->caMetalLayer = caMetalLayer;
+    demo_init_vk_swapchain(demo);
+    demo_prepare(demo);
+    demo->spin_angle = 0.4f;
+}
+
+#elif defined(VK_USE_PLATFORM_ANDROID_KHR)
+#include <android/log.h>
+#include <android_native_app_glue.h>
+#include "android_util.h"
+
+static bool initialized = false;
+static bool active = false;
+struct demo demo;
+
+static int32_t processInput(struct android_app *app, AInputEvent *event) { return 0; }
+
+static void processCommand(struct android_app *app, int32_t cmd) {
+    switch (cmd) {
+        case APP_CMD_INIT_WINDOW: {
+            if (app->window) {
+                // We're getting a new window.  If the app is starting up, we
+                // need to initialize.  If the app has already been
+                // initialized, that means that we lost our previous window,
+                // which means that we have a lot of work to do.  At a minimum,
+                // we need to destroy the swapchain and surface associated with
+                // the old window, and create a new surface and swapchain.
+                // However, since there are a lot of other objects/state that
+                // is tied to the swapchain, it's easiest to simply cleanup and
+                // start over (i.e. use a brute-force approach of re-starting
+                // the app)
+                if (demo.prepared) {
+                    demo_cleanup(&demo);
+                }
+
+                // Parse Intents into argc, argv
+                // Use the following key to send arguments, i.e.
+                // --es args "--validate"
+                const char key[] = "args";
+                char *appTag = (char *)APP_SHORT_NAME;
+                int argc = 0;
+                char **argv = get_args(app, key, appTag, &argc);
+
+                __android_log_print(ANDROID_LOG_INFO, appTag, "argc = %i", argc);
+                for (int i = 0; i < argc; i++) __android_log_print(ANDROID_LOG_INFO, appTag, "argv[%i] = %s", i, argv[i]);
+
+                demo_init(&demo, argc, argv);
+
+                // Free the argv malloc'd by get_args
+                for (int i = 0; i < argc; i++) free(argv[i]);
+
+                demo.window = (void *)app->window;
+                demo_init_vk_swapchain(&demo);
+                demo_prepare(&demo);
+                initialized = true;
+            }
+            break;
+        }
+        case APP_CMD_GAINED_FOCUS: {
+            active = true;
+            break;
+        }
+        case APP_CMD_LOST_FOCUS: {
+            active = false;
+            break;
+        }
+    }
+}
+
+void android_main(struct android_app *app) {
+#ifdef ANDROID
+    int vulkanSupport = InitVulkan();
+    if (vulkanSupport == 0) return;
+#endif
+
+    demo.prepared = false;
+
+    app->onAppCmd = processCommand;
+    app->onInputEvent = processInput;
+
+    while (1) {
+        int events;
+        struct android_poll_source *source;
+        while (ALooper_pollAll(active ? 0 : -1, NULL, &events, (void **)&source) >= 0) {
+            if (source) {
+                source->process(app, source);
+            }
+
+            if (app->destroyRequested != 0) {
+                demo_cleanup(&demo);
+                return;
+            }
+        }
+        if (initialized && active) {
+            demo_run(&demo);
+        }
+    }
+}
+#else
+int main(int argc, char **argv) {
+    struct demo demo;
+
+    demo_init(&demo, argc, argv);
+#if defined(VK_USE_PLATFORM_XCB_KHR)
+    demo_create_xcb_window(&demo);
+#elif defined(VK_USE_PLATFORM_XLIB_KHR)
+    demo_create_xlib_window(&demo);
+#elif defined(VK_USE_PLATFORM_WAYLAND_KHR)
+    demo_create_window(&demo);
+#endif
+
+    demo_init_vk_swapchain(&demo);
+
+    demo_prepare(&demo);
+
+#if defined(VK_USE_PLATFORM_XCB_KHR)
+    demo_run_xcb(&demo);
+#elif defined(VK_USE_PLATFORM_XLIB_KHR)
+    demo_run_xlib(&demo);
+#elif defined(VK_USE_PLATFORM_WAYLAND_KHR)
+    demo_run(&demo);
+#elif defined(VK_USE_PLATFORM_DISPLAY_KHR)
+    demo_run_display(&demo);
+#endif
+
+    demo_cleanup(&demo);
+
+    return validation_error;
+}
+#endif
diff --git a/src/third_party/vulkan-tools/src/cube/cube.cpp b/src/third_party/vulkan-tools/src/cube/cube.cpp
new file mode 100644
index 0000000..ddaa5a5
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/cube/cube.cpp
@@ -0,0 +1,3054 @@
+/*
+ * Copyright (c) 2015-2019 The Khronos Group Inc.
+ * Copyright (c) 2015-2019 Valve Corporation
+ * Copyright (c) 2015-2019 LunarG, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Jeremy Hayes <jeremy@lunarg.com>
+ */
+
+#if defined(VK_USE_PLATFORM_XLIB_KHR) || defined(VK_USE_PLATFORM_XCB_KHR)
+#include <X11/Xutil.h>
+#elif defined(VK_USE_PLATFORM_WAYLAND_KHR)
+#include <linux/input.h>
+#endif
+
+#include <cassert>
+#include <cinttypes>
+#include <cstdio>
+#include <cstdlib>
+#include <cstring>
+#include <csignal>
+#include <iostream>
+#include <sstream>
+#include <memory>
+
+#define VULKAN_HPP_NO_SMART_HANDLE
+#define VULKAN_HPP_NO_EXCEPTIONS
+#define VULKAN_HPP_TYPESAFE_CONVERSION
+#include <vulkan/vulkan.hpp>
+#include <vulkan/vk_sdk_platform.h>
+
+#include "linmath.h"
+
+#ifndef NDEBUG
+#define VERIFY(x) assert(x)
+#else
+#define VERIFY(x) ((void)(x))
+#endif
+
+#define APP_SHORT_NAME "vkcube"
+#ifdef _WIN32
+#define APP_NAME_STR_LEN 80
+#endif
+
+// Allow a maximum of two outstanding presentation operations.
+#define FRAME_LAG 2
+
+#define ARRAY_SIZE(a) (sizeof(a) / sizeof(a[0]))
+
+#ifdef _WIN32
+#define ERR_EXIT(err_msg, err_class)                                          \
+    do {                                                                      \
+        if (!suppress_popups) MessageBox(nullptr, err_msg, err_class, MB_OK); \
+        exit(1);                                                              \
+    } while (0)
+#else
+#define ERR_EXIT(err_msg, err_class) \
+    do {                             \
+        printf("%s\n", err_msg);     \
+        fflush(stdout);              \
+        exit(1);                     \
+    } while (0)
+#endif
+
+struct texture_object {
+    vk::Sampler sampler;
+
+    vk::Image image;
+    vk::Buffer buffer;
+    vk::ImageLayout imageLayout{vk::ImageLayout::eUndefined};
+
+    vk::MemoryAllocateInfo mem_alloc;
+    vk::DeviceMemory mem;
+    vk::ImageView view;
+
+    int32_t tex_width{0};
+    int32_t tex_height{0};
+};
+
+static char const *const tex_files[] = {"lunarg.ppm"};
+
+static int validation_error = 0;
+
+struct vkcube_vs_uniform {
+    // Must start with MVP
+    float mvp[4][4];
+    float position[12 * 3][4];
+    float color[12 * 3][4];
+};
+
+struct vktexcube_vs_uniform {
+    // Must start with MVP
+    float mvp[4][4];
+    float position[12 * 3][4];
+    float attr[12 * 3][4];
+};
+
+//--------------------------------------------------------------------------------------
+// Mesh and VertexFormat Data
+//--------------------------------------------------------------------------------------
+// clang-format off
+static const float g_vertex_buffer_data[] = {
+    -1.0f,-1.0f,-1.0f,  // -X side
+    -1.0f,-1.0f, 1.0f,
+    -1.0f, 1.0f, 1.0f,
+    -1.0f, 1.0f, 1.0f,
+    -1.0f, 1.0f,-1.0f,
+    -1.0f,-1.0f,-1.0f,
+
+    -1.0f,-1.0f,-1.0f,  // -Z side
+     1.0f, 1.0f,-1.0f,
+     1.0f,-1.0f,-1.0f,
+    -1.0f,-1.0f,-1.0f,
+    -1.0f, 1.0f,-1.0f,
+     1.0f, 1.0f,-1.0f,
+
+    -1.0f,-1.0f,-1.0f,  // -Y side
+     1.0f,-1.0f,-1.0f,
+     1.0f,-1.0f, 1.0f,
+    -1.0f,-1.0f,-1.0f,
+     1.0f,-1.0f, 1.0f,
+    -1.0f,-1.0f, 1.0f,
+
+    -1.0f, 1.0f,-1.0f,  // +Y side
+    -1.0f, 1.0f, 1.0f,
+     1.0f, 1.0f, 1.0f,
+    -1.0f, 1.0f,-1.0f,
+     1.0f, 1.0f, 1.0f,
+     1.0f, 1.0f,-1.0f,
+
+     1.0f, 1.0f,-1.0f,  // +X side
+     1.0f, 1.0f, 1.0f,
+     1.0f,-1.0f, 1.0f,
+     1.0f,-1.0f, 1.0f,
+     1.0f,-1.0f,-1.0f,
+     1.0f, 1.0f,-1.0f,
+
+    -1.0f, 1.0f, 1.0f,  // +Z side
+    -1.0f,-1.0f, 1.0f,
+     1.0f, 1.0f, 1.0f,
+    -1.0f,-1.0f, 1.0f,
+     1.0f,-1.0f, 1.0f,
+     1.0f, 1.0f, 1.0f,
+};
+
+static const float g_uv_buffer_data[] = {
+    0.0f, 1.0f,  // -X side
+    1.0f, 1.0f,
+    1.0f, 0.0f,
+    1.0f, 0.0f,
+    0.0f, 0.0f,
+    0.0f, 1.0f,
+
+    1.0f, 1.0f,  // -Z side
+    0.0f, 0.0f,
+    0.0f, 1.0f,
+    1.0f, 1.0f,
+    1.0f, 0.0f,
+    0.0f, 0.0f,
+
+    1.0f, 0.0f,  // -Y side
+    1.0f, 1.0f,
+    0.0f, 1.0f,
+    1.0f, 0.0f,
+    0.0f, 1.0f,
+    0.0f, 0.0f,
+
+    1.0f, 0.0f,  // +Y side
+    0.0f, 0.0f,
+    0.0f, 1.0f,
+    1.0f, 0.0f,
+    0.0f, 1.0f,
+    1.0f, 1.0f,
+
+    1.0f, 0.0f,  // +X side
+    0.0f, 0.0f,
+    0.0f, 1.0f,
+    0.0f, 1.0f,
+    1.0f, 1.0f,
+    1.0f, 0.0f,
+
+    0.0f, 0.0f,  // +Z side
+    0.0f, 1.0f,
+    1.0f, 0.0f,
+    0.0f, 1.0f,
+    1.0f, 1.0f,
+    1.0f, 0.0f,
+};
+// clang-format on
+
+typedef struct {
+    vk::Image image;
+    vk::CommandBuffer cmd;
+    vk::CommandBuffer graphics_to_present_cmd;
+    vk::ImageView view;
+    vk::Buffer uniform_buffer;
+    vk::DeviceMemory uniform_memory;
+    vk::Framebuffer framebuffer;
+    vk::DescriptorSet descriptor_set;
+} SwapchainImageResources;
+
+struct Demo {
+    Demo();
+    void build_image_ownership_cmd(uint32_t const &);
+    vk::Bool32 check_layers(uint32_t, const char *const *, uint32_t, vk::LayerProperties *);
+    void cleanup();
+    void create_device();
+    void destroy_texture(texture_object *);
+    void draw();
+    void draw_build_cmd(vk::CommandBuffer);
+    void flush_init_cmd();
+    void init(int, char **);
+    void init_connection();
+    void init_vk();
+    void init_vk_swapchain();
+    void prepare();
+    void prepare_buffers();
+    void prepare_cube_data_buffers();
+    void prepare_depth();
+    void prepare_descriptor_layout();
+    void prepare_descriptor_pool();
+    void prepare_descriptor_set();
+    void prepare_framebuffers();
+    vk::ShaderModule prepare_shader_module(const uint32_t *, size_t);
+    vk::ShaderModule prepare_vs();
+    vk::ShaderModule prepare_fs();
+    void prepare_pipeline();
+    void prepare_render_pass();
+    void prepare_texture_image(const char *, texture_object *, vk::ImageTiling, vk::ImageUsageFlags, vk::MemoryPropertyFlags);
+    void prepare_texture_buffer(const char *, texture_object *);
+    void prepare_textures();
+
+    void resize();
+    void create_surface();
+    void set_image_layout(vk::Image, vk::ImageAspectFlags, vk::ImageLayout, vk::ImageLayout, vk::AccessFlags,
+                          vk::PipelineStageFlags, vk::PipelineStageFlags);
+    void update_data_buffer();
+    bool loadTexture(const char *, uint8_t *, vk::SubresourceLayout *, int32_t *, int32_t *);
+    bool memory_type_from_properties(uint32_t, vk::MemoryPropertyFlags, uint32_t *);
+
+#if defined(VK_USE_PLATFORM_WIN32_KHR)
+    void run();
+    void create_window();
+#elif defined(VK_USE_PLATFORM_XLIB_KHR)
+    void create_xlib_window();
+    void handle_xlib_event(const XEvent *);
+    void run_xlib();
+#elif defined(VK_USE_PLATFORM_XCB_KHR)
+    void handle_xcb_event(const xcb_generic_event_t *);
+    void run_xcb();
+    void create_xcb_window();
+#elif defined(VK_USE_PLATFORM_WAYLAND_KHR)
+    void run();
+    void create_window();
+#elif defined(VK_USE_PLATFORM_METAL_EXT)
+    void run();
+#elif defined(VK_USE_PLATFORM_DISPLAY_KHR)
+    vk::Result create_display_surface();
+    void run_display();
+#endif
+
+#if defined(VK_USE_PLATFORM_WIN32_KHR)
+    HINSTANCE connection;         // hInstance - Windows Instance
+    HWND window;                  // hWnd - window handle
+    POINT minsize;                // minimum window size
+    char name[APP_NAME_STR_LEN];  // Name to put on the window/icon
+#elif defined(VK_USE_PLATFORM_XLIB_KHR)
+    Window xlib_window;
+    Atom xlib_wm_delete_window;
+    Display *display;
+#elif defined(VK_USE_PLATFORM_XCB_KHR)
+    xcb_window_t xcb_window;
+    xcb_screen_t *screen;
+    xcb_connection_t *connection;
+    xcb_intern_atom_reply_t *atom_wm_delete_window;
+#elif defined(VK_USE_PLATFORM_WAYLAND_KHR)
+    wl_display *display;
+    wl_registry *registry;
+    wl_compositor *compositor;
+    wl_surface *window;
+    wl_shell *shell;
+    wl_shell_surface *shell_surface;
+    wl_seat *seat;
+    wl_pointer *pointer;
+    wl_keyboard *keyboard;
+#elif defined(VK_USE_PLATFORM_METAL_EXT)
+    void *caMetalLayer;
+#endif
+
+    vk::SurfaceKHR surface;
+    bool prepared;
+    bool use_staging_buffer;
+    bool use_xlib;
+    bool separate_present_queue;
+
+    vk::Instance inst;
+    vk::PhysicalDevice gpu;
+    vk::Device device;
+    vk::Queue graphics_queue;
+    vk::Queue present_queue;
+    uint32_t graphics_queue_family_index;
+    uint32_t present_queue_family_index;
+    vk::Semaphore image_acquired_semaphores[FRAME_LAG];
+    vk::Semaphore draw_complete_semaphores[FRAME_LAG];
+    vk::Semaphore image_ownership_semaphores[FRAME_LAG];
+    vk::PhysicalDeviceProperties gpu_props;
+    std::unique_ptr<vk::QueueFamilyProperties[]> queue_props;
+    vk::PhysicalDeviceMemoryProperties memory_properties;
+
+    uint32_t enabled_extension_count;
+    uint32_t enabled_layer_count;
+    char const *extension_names[64];
+    char const *enabled_layers[64];
+
+    uint32_t width;
+    uint32_t height;
+    vk::Format format;
+    vk::ColorSpaceKHR color_space;
+
+    uint32_t swapchainImageCount;
+    vk::SwapchainKHR swapchain;
+    std::unique_ptr<SwapchainImageResources[]> swapchain_image_resources;
+    vk::PresentModeKHR presentMode;
+    vk::Fence fences[FRAME_LAG];
+    uint32_t frame_index;
+
+    vk::CommandPool cmd_pool;
+    vk::CommandPool present_cmd_pool;
+
+    struct {
+        vk::Format format;
+        vk::Image image;
+        vk::MemoryAllocateInfo mem_alloc;
+        vk::DeviceMemory mem;
+        vk::ImageView view;
+    } depth;
+
+    static int32_t const texture_count = 1;
+    texture_object textures[texture_count];
+    texture_object staging_texture;
+
+    struct {
+        vk::Buffer buf;
+        vk::MemoryAllocateInfo mem_alloc;
+        vk::DeviceMemory mem;
+        vk::DescriptorBufferInfo buffer_info;
+    } uniform_data;
+
+    vk::CommandBuffer cmd;  // Buffer for initialization commands
+    vk::PipelineLayout pipeline_layout;
+    vk::DescriptorSetLayout desc_layout;
+    vk::PipelineCache pipelineCache;
+    vk::RenderPass render_pass;
+    vk::Pipeline pipeline;
+
+    mat4x4 projection_matrix;
+    mat4x4 view_matrix;
+    mat4x4 model_matrix;
+
+    float spin_angle;
+    float spin_increment;
+    bool pause;
+
+    vk::ShaderModule vert_shader_module;
+    vk::ShaderModule frag_shader_module;
+
+    vk::DescriptorPool desc_pool;
+    vk::DescriptorSet desc_set;
+
+    std::unique_ptr<vk::Framebuffer[]> framebuffers;
+
+    bool quit;
+    uint32_t curFrame;
+    uint32_t frameCount;
+    bool validate;
+    bool use_break;
+    bool suppress_popups;
+
+    uint32_t current_buffer;
+    uint32_t queue_family_count;
+};
+
+#ifdef _WIN32
+// MS-Windows event handling function:
+LRESULT CALLBACK WndProc(HWND hWnd, UINT uMsg, WPARAM wParam, LPARAM lParam);
+#endif
+
+#if defined(VK_USE_PLATFORM_WAYLAND_KHR)
+static void handle_ping(void *data, wl_shell_surface *shell_surface, uint32_t serial) {
+    wl_shell_surface_pong(shell_surface, serial);
+}
+
+static void handle_configure(void *data, wl_shell_surface *shell_surface, uint32_t edges, int32_t width, int32_t height) {}
+
+static void handle_popup_done(void *data, wl_shell_surface *shell_surface) {}
+
+static const wl_shell_surface_listener shell_surface_listener = {handle_ping, handle_configure, handle_popup_done};
+
+static void pointer_handle_enter(void *data, struct wl_pointer *pointer, uint32_t serial, struct wl_surface *surface, wl_fixed_t sx,
+                                 wl_fixed_t sy) {}
+
+static void pointer_handle_leave(void *data, struct wl_pointer *pointer, uint32_t serial, struct wl_surface *surface) {}
+
+static void pointer_handle_motion(void *data, struct wl_pointer *pointer, uint32_t time, wl_fixed_t sx, wl_fixed_t sy) {}
+
+static void pointer_handle_button(void *data, struct wl_pointer *wl_pointer, uint32_t serial, uint32_t time, uint32_t button,
+                                  uint32_t state) {
+    Demo *demo = (Demo *)data;
+    if (button == BTN_LEFT && state == WL_POINTER_BUTTON_STATE_PRESSED) {
+        wl_shell_surface_move(demo->shell_surface, demo->seat, serial);
+    }
+}
+
+static void pointer_handle_axis(void *data, struct wl_pointer *wl_pointer, uint32_t time, uint32_t axis, wl_fixed_t value) {}
+
+static const struct wl_pointer_listener pointer_listener = {
+    pointer_handle_enter, pointer_handle_leave, pointer_handle_motion, pointer_handle_button, pointer_handle_axis,
+};
+
+static void keyboard_handle_keymap(void *data, struct wl_keyboard *keyboard, uint32_t format, int fd, uint32_t size) {}
+
+static void keyboard_handle_enter(void *data, struct wl_keyboard *keyboard, uint32_t serial, struct wl_surface *surface,
+                                  struct wl_array *keys) {}
+
+static void keyboard_handle_leave(void *data, struct wl_keyboard *keyboard, uint32_t serial, struct wl_surface *surface) {}
+
+static void keyboard_handle_key(void *data, struct wl_keyboard *keyboard, uint32_t serial, uint32_t time, uint32_t key,
+                                uint32_t state) {
+    if (state != WL_KEYBOARD_KEY_STATE_RELEASED) return;
+    Demo *demo = (Demo *)data;
+    switch (key) {
+        case KEY_ESC:  // Escape
+            demo->quit = true;
+            break;
+        case KEY_LEFT:  // left arrow key
+            demo->spin_angle -= demo->spin_increment;
+            break;
+        case KEY_RIGHT:  // right arrow key
+            demo->spin_angle += demo->spin_increment;
+            break;
+        case KEY_SPACE:  // space bar
+            demo->pause = !demo->pause;
+            break;
+    }
+}
+
+static void keyboard_handle_modifiers(void *data, wl_keyboard *keyboard, uint32_t serial, uint32_t mods_depressed,
+                                      uint32_t mods_latched, uint32_t mods_locked, uint32_t group) {}
+
+static const struct wl_keyboard_listener keyboard_listener = {
+    keyboard_handle_keymap, keyboard_handle_enter, keyboard_handle_leave, keyboard_handle_key, keyboard_handle_modifiers,
+};
+
+static void seat_handle_capabilities(void *data, wl_seat *seat, uint32_t caps) {
+    // Subscribe to pointer events
+    Demo *demo = (Demo *)data;
+    if ((caps & WL_SEAT_CAPABILITY_POINTER) && !demo->pointer) {
+        demo->pointer = wl_seat_get_pointer(seat);
+        wl_pointer_add_listener(demo->pointer, &pointer_listener, demo);
+    } else if (!(caps & WL_SEAT_CAPABILITY_POINTER) && demo->pointer) {
+        wl_pointer_destroy(demo->pointer);
+        demo->pointer = NULL;
+    }
+    // Subscribe to keyboard events
+    if (caps & WL_SEAT_CAPABILITY_KEYBOARD) {
+        demo->keyboard = wl_seat_get_keyboard(seat);
+        wl_keyboard_add_listener(demo->keyboard, &keyboard_listener, demo);
+    } else if (!(caps & WL_SEAT_CAPABILITY_KEYBOARD)) {
+        wl_keyboard_destroy(demo->keyboard);
+        demo->keyboard = NULL;
+    }
+}
+
+static const wl_seat_listener seat_listener = {
+    seat_handle_capabilities,
+};
+
+static void registry_handle_global(void *data, wl_registry *registry, uint32_t id, const char *interface, uint32_t version) {
+    Demo *demo = (Demo *)data;
+    // pickup wayland objects when they appear
+    if (strcmp(interface, "wl_compositor") == 0) {
+        demo->compositor = (wl_compositor *)wl_registry_bind(registry, id, &wl_compositor_interface, 1);
+    } else if (strcmp(interface, "wl_shell") == 0) {
+        demo->shell = (wl_shell *)wl_registry_bind(registry, id, &wl_shell_interface, 1);
+    } else if (strcmp(interface, "wl_seat") == 0) {
+        demo->seat = (wl_seat *)wl_registry_bind(registry, id, &wl_seat_interface, 1);
+        wl_seat_add_listener(demo->seat, &seat_listener, demo);
+    }
+}
+
+static void registry_handle_global_remove(void *data, wl_registry *registry, uint32_t name) {}
+
+static const wl_registry_listener registry_listener = {registry_handle_global, registry_handle_global_remove};
+#endif
+
+Demo::Demo()
+    :
+#if defined(VK_USE_PLATFORM_WIN32_KHR)
+      connection{nullptr},
+      window{nullptr},
+      minsize(POINT{0, 0}),  // Use explicit construction to avoid MSVC error C2797.
+#endif
+
+#if defined(VK_USE_PLATFORM_XLIB_KHR)
+      xlib_window{0},
+      xlib_wm_delete_window{0},
+      display{nullptr},
+#elif defined(VK_USE_PLATFORM_XCB_KHR)
+      xcb_window{0},
+      screen{nullptr},
+      connection{nullptr},
+#elif defined(VK_USE_PLATFORM_WAYLAND_KHR)
+      display{nullptr},
+      registry{nullptr},
+      compositor{nullptr},
+      window{nullptr},
+      shell{nullptr},
+      shell_surface{nullptr},
+      seat{nullptr},
+      pointer{nullptr},
+      keyboard{nullptr},
+#endif
+      prepared{false},
+      use_staging_buffer{false},
+      use_xlib{false},
+      graphics_queue_family_index{0},
+      present_queue_family_index{0},
+      enabled_extension_count{0},
+      enabled_layer_count{0},
+      width{0},
+      height{0},
+      swapchainImageCount{0},
+      presentMode{vk::PresentModeKHR::eFifo},
+      frame_index{0},
+      spin_angle{0.0f},
+      spin_increment{0.0f},
+      pause{false},
+      quit{false},
+      curFrame{0},
+      frameCount{0},
+      validate{false},
+      use_break{false},
+      suppress_popups{false},
+      current_buffer{0},
+      queue_family_count{0} {
+#if defined(VK_USE_PLATFORM_WIN32_KHR)
+    memset(name, '\0', APP_NAME_STR_LEN);
+#endif
+    memset(projection_matrix, 0, sizeof(projection_matrix));
+    memset(view_matrix, 0, sizeof(view_matrix));
+    memset(model_matrix, 0, sizeof(model_matrix));
+}
+
+void Demo::build_image_ownership_cmd(uint32_t const &i) {
+    auto const cmd_buf_info = vk::CommandBufferBeginInfo().setFlags(vk::CommandBufferUsageFlagBits::eSimultaneousUse);
+    auto result = swapchain_image_resources[i].graphics_to_present_cmd.begin(&cmd_buf_info);
+    VERIFY(result == vk::Result::eSuccess);
+
+    auto const image_ownership_barrier =
+        vk::ImageMemoryBarrier()
+            .setSrcAccessMask(vk::AccessFlags())
+            .setDstAccessMask(vk::AccessFlags())
+            .setOldLayout(vk::ImageLayout::ePresentSrcKHR)
+            .setNewLayout(vk::ImageLayout::ePresentSrcKHR)
+            .setSrcQueueFamilyIndex(graphics_queue_family_index)
+            .setDstQueueFamilyIndex(present_queue_family_index)
+            .setImage(swapchain_image_resources[i].image)
+            .setSubresourceRange(vk::ImageSubresourceRange(vk::ImageAspectFlagBits::eColor, 0, 1, 0, 1));
+
+    swapchain_image_resources[i].graphics_to_present_cmd.pipelineBarrier(
+        vk::PipelineStageFlagBits::eBottomOfPipe, vk::PipelineStageFlagBits::eBottomOfPipe, vk::DependencyFlagBits(), 0, nullptr, 0,
+        nullptr, 1, &image_ownership_barrier);
+
+    result = swapchain_image_resources[i].graphics_to_present_cmd.end();
+    VERIFY(result == vk::Result::eSuccess);
+}
+
+vk::Bool32 Demo::check_layers(uint32_t check_count, char const *const *const check_names, uint32_t layer_count,
+                              vk::LayerProperties *layers) {
+    for (uint32_t i = 0; i < check_count; i++) {
+        vk::Bool32 found = VK_FALSE;
+        for (uint32_t j = 0; j < layer_count; j++) {
+            if (!strcmp(check_names[i], layers[j].layerName)) {
+                found = VK_TRUE;
+                break;
+            }
+        }
+        if (!found) {
+            fprintf(stderr, "Cannot find layer: %s\n", check_names[i]);
+            return 0;
+        }
+    }
+    return VK_TRUE;
+}
+
+void Demo::cleanup() {
+    prepared = false;
+    device.waitIdle();
+
+    // Wait for fences from present operations
+    for (uint32_t i = 0; i < FRAME_LAG; i++) {
+        device.waitForFences(1, &fences[i], VK_TRUE, UINT64_MAX);
+        device.destroyFence(fences[i], nullptr);
+        device.destroySemaphore(image_acquired_semaphores[i], nullptr);
+        device.destroySemaphore(draw_complete_semaphores[i], nullptr);
+        if (separate_present_queue) {
+            device.destroySemaphore(image_ownership_semaphores[i], nullptr);
+        }
+    }
+
+    for (uint32_t i = 0; i < swapchainImageCount; i++) {
+        device.destroyFramebuffer(swapchain_image_resources[i].framebuffer, nullptr);
+    }
+    device.destroyDescriptorPool(desc_pool, nullptr);
+
+    device.destroyPipeline(pipeline, nullptr);
+    device.destroyPipelineCache(pipelineCache, nullptr);
+    device.destroyRenderPass(render_pass, nullptr);
+    device.destroyPipelineLayout(pipeline_layout, nullptr);
+    device.destroyDescriptorSetLayout(desc_layout, nullptr);
+
+    for (uint32_t i = 0; i < texture_count; i++) {
+        device.destroyImageView(textures[i].view, nullptr);
+        device.destroyImage(textures[i].image, nullptr);
+        device.freeMemory(textures[i].mem, nullptr);
+        device.destroySampler(textures[i].sampler, nullptr);
+    }
+    device.destroySwapchainKHR(swapchain, nullptr);
+
+    device.destroyImageView(depth.view, nullptr);
+    device.destroyImage(depth.image, nullptr);
+    device.freeMemory(depth.mem, nullptr);
+
+    for (uint32_t i = 0; i < swapchainImageCount; i++) {
+        device.destroyImageView(swapchain_image_resources[i].view, nullptr);
+        device.freeCommandBuffers(cmd_pool, 1, &swapchain_image_resources[i].cmd);
+        device.destroyBuffer(swapchain_image_resources[i].uniform_buffer, nullptr);
+        device.freeMemory(swapchain_image_resources[i].uniform_memory, nullptr);
+    }
+
+    device.destroyCommandPool(cmd_pool, nullptr);
+
+    if (separate_present_queue) {
+        device.destroyCommandPool(present_cmd_pool, nullptr);
+    }
+    device.waitIdle();
+    device.destroy(nullptr);
+    inst.destroySurfaceKHR(surface, nullptr);
+
+#if defined(VK_USE_PLATFORM_XLIB_KHR)
+    XDestroyWindow(display, xlib_window);
+    XCloseDisplay(display);
+#elif defined(VK_USE_PLATFORM_XCB_KHR)
+    xcb_destroy_window(connection, xcb_window);
+    xcb_disconnect(connection);
+    free(atom_wm_delete_window);
+#elif defined(VK_USE_PLATFORM_WAYLAND_KHR)
+    wl_keyboard_destroy(keyboard);
+    wl_pointer_destroy(pointer);
+    wl_seat_destroy(seat);
+    wl_shell_surface_destroy(shell_surface);
+    wl_surface_destroy(window);
+    wl_shell_destroy(shell);
+    wl_compositor_destroy(compositor);
+    wl_registry_destroy(registry);
+    wl_display_disconnect(display);
+#endif
+
+    inst.destroy(nullptr);
+}
+
+void Demo::create_device() {
+    float const priorities[1] = {0.0};
+
+    vk::DeviceQueueCreateInfo queues[2];
+    queues[0].setQueueFamilyIndex(graphics_queue_family_index);
+    queues[0].setQueueCount(1);
+    queues[0].setPQueuePriorities(priorities);
+
+    auto deviceInfo = vk::DeviceCreateInfo()
+                          .setQueueCreateInfoCount(1)
+                          .setPQueueCreateInfos(queues)
+                          .setEnabledLayerCount(0)
+                          .setPpEnabledLayerNames(nullptr)
+                          .setEnabledExtensionCount(enabled_extension_count)
+                          .setPpEnabledExtensionNames((const char *const *)extension_names)
+                          .setPEnabledFeatures(nullptr);
+
+    if (separate_present_queue) {
+        queues[1].setQueueFamilyIndex(present_queue_family_index);
+        queues[1].setQueueCount(1);
+        queues[1].setPQueuePriorities(priorities);
+        deviceInfo.setQueueCreateInfoCount(2);
+    }
+
+    auto result = gpu.createDevice(&deviceInfo, nullptr, &device);
+    VERIFY(result == vk::Result::eSuccess);
+}
+
+void Demo::destroy_texture(texture_object *tex_objs) {
+    // clean up staging resources
+    device.freeMemory(tex_objs->mem, nullptr);
+    if (tex_objs->image) device.destroyImage(tex_objs->image, nullptr);
+    if (tex_objs->buffer) device.destroyBuffer(tex_objs->buffer, nullptr);
+}
+
+void Demo::draw() {
+    // Ensure no more than FRAME_LAG renderings are outstanding
+    device.waitForFences(1, &fences[frame_index], VK_TRUE, UINT64_MAX);
+    device.resetFences(1, &fences[frame_index]);
+
+    vk::Result result;
+    do {
+        result =
+            device.acquireNextImageKHR(swapchain, UINT64_MAX, image_acquired_semaphores[frame_index], vk::Fence(), &current_buffer);
+        if (result == vk::Result::eErrorOutOfDateKHR) {
+            // demo->swapchain is out of date (e.g. the window was resized) and
+            // must be recreated:
+            resize();
+        } else if (result == vk::Result::eSuboptimalKHR) {
+            // swapchain is not as optimal as it could be, but the platform's
+            // presentation engine will still present the image correctly.
+            break;
+        } else if (result == vk::Result::eErrorSurfaceLostKHR) {
+            inst.destroySurfaceKHR(surface, nullptr);
+            create_surface();
+            resize();
+        } else {
+            VERIFY(result == vk::Result::eSuccess);
+        }
+    } while (result != vk::Result::eSuccess);
+
+    update_data_buffer();
+
+    // Wait for the image acquired semaphore to be signaled to ensure
+    // that the image won't be rendered to until the presentation
+    // engine has fully released ownership to the application, and it is
+    // okay to render to the image.
+    vk::PipelineStageFlags const pipe_stage_flags = vk::PipelineStageFlagBits::eColorAttachmentOutput;
+    auto const submit_info = vk::SubmitInfo()
+                                 .setPWaitDstStageMask(&pipe_stage_flags)
+                                 .setWaitSemaphoreCount(1)
+                                 .setPWaitSemaphores(&image_acquired_semaphores[frame_index])
+                                 .setCommandBufferCount(1)
+                                 .setPCommandBuffers(&swapchain_image_resources[current_buffer].cmd)
+                                 .setSignalSemaphoreCount(1)
+                                 .setPSignalSemaphores(&draw_complete_semaphores[frame_index]);
+
+    result = graphics_queue.submit(1, &submit_info, fences[frame_index]);
+    VERIFY(result == vk::Result::eSuccess);
+
+    if (separate_present_queue) {
+        // If we are using separate queues, change image ownership to the
+        // present queue before presenting, waiting for the draw complete
+        // semaphore and signalling the ownership released semaphore when
+        // finished
+        auto const present_submit_info = vk::SubmitInfo()
+                                             .setPWaitDstStageMask(&pipe_stage_flags)
+                                             .setWaitSemaphoreCount(1)
+                                             .setPWaitSemaphores(&draw_complete_semaphores[frame_index])
+                                             .setCommandBufferCount(1)
+                                             .setPCommandBuffers(&swapchain_image_resources[current_buffer].graphics_to_present_cmd)
+                                             .setSignalSemaphoreCount(1)
+                                             .setPSignalSemaphores(&image_ownership_semaphores[frame_index]);
+
+        result = present_queue.submit(1, &present_submit_info, vk::Fence());
+        VERIFY(result == vk::Result::eSuccess);
+    }
+
+    // If we are using separate queues we have to wait for image ownership,
+    // otherwise wait for draw complete
+    auto const presentInfo = vk::PresentInfoKHR()
+                                 .setWaitSemaphoreCount(1)
+                                 .setPWaitSemaphores(separate_present_queue ? &image_ownership_semaphores[frame_index]
+                                                                            : &draw_complete_semaphores[frame_index])
+                                 .setSwapchainCount(1)
+                                 .setPSwapchains(&swapchain)
+                                 .setPImageIndices(&current_buffer);
+
+    result = present_queue.presentKHR(&presentInfo);
+    frame_index += 1;
+    frame_index %= FRAME_LAG;
+    if (result == vk::Result::eErrorOutOfDateKHR) {
+        // swapchain is out of date (e.g. the window was resized) and
+        // must be recreated:
+        resize();
+    } else if (result == vk::Result::eSuboptimalKHR) {
+        // swapchain is not as optimal as it could be, but the platform's
+        // presentation engine will still present the image correctly.
+    } else if (result == vk::Result::eErrorSurfaceLostKHR) {
+        inst.destroySurfaceKHR(surface, nullptr);
+        create_surface();
+        resize();
+    } else {
+        VERIFY(result == vk::Result::eSuccess);
+    }
+}
+
+void Demo::draw_build_cmd(vk::CommandBuffer commandBuffer) {
+    auto const commandInfo = vk::CommandBufferBeginInfo().setFlags(vk::CommandBufferUsageFlagBits::eSimultaneousUse);
+
+    vk::ClearValue const clearValues[2] = {vk::ClearColorValue(std::array<float, 4>({{0.2f, 0.2f, 0.2f, 0.2f}})),
+                                           vk::ClearDepthStencilValue(1.0f, 0u)};
+
+    auto const passInfo = vk::RenderPassBeginInfo()
+                              .setRenderPass(render_pass)
+                              .setFramebuffer(swapchain_image_resources[current_buffer].framebuffer)
+                              .setRenderArea(vk::Rect2D(vk::Offset2D(0, 0), vk::Extent2D((uint32_t)width, (uint32_t)height)))
+                              .setClearValueCount(2)
+                              .setPClearValues(clearValues);
+
+    auto result = commandBuffer.begin(&commandInfo);
+    VERIFY(result == vk::Result::eSuccess);
+
+    commandBuffer.beginRenderPass(&passInfo, vk::SubpassContents::eInline);
+    commandBuffer.bindPipeline(vk::PipelineBindPoint::eGraphics, pipeline);
+    commandBuffer.bindDescriptorSets(vk::PipelineBindPoint::eGraphics, pipeline_layout, 0, 1,
+                                     &swapchain_image_resources[current_buffer].descriptor_set, 0, nullptr);
+    float viewport_dimension;
+    float viewport_x = 0.0f;
+    float viewport_y = 0.0f;
+    if (width < height) {
+        viewport_dimension = (float)width;
+        viewport_y = (height - width) / 2.0f;
+    } else {
+        viewport_dimension = (float)height;
+        viewport_x = (width - height) / 2.0f;
+    }
+    auto const viewport = vk::Viewport()
+                              .setX(viewport_x)
+                              .setY(viewport_y)
+                              .setWidth((float)viewport_dimension)
+                              .setHeight((float)viewport_dimension)
+                              .setMinDepth((float)0.0f)
+                              .setMaxDepth((float)1.0f);
+    commandBuffer.setViewport(0, 1, &viewport);
+
+    vk::Rect2D const scissor(vk::Offset2D(0, 0), vk::Extent2D(width, height));
+    commandBuffer.setScissor(0, 1, &scissor);
+    commandBuffer.draw(12 * 3, 1, 0, 0);
+    // Note that ending the renderpass changes the image's layout from
+    // COLOR_ATTACHMENT_OPTIMAL to PRESENT_SRC_KHR
+    commandBuffer.endRenderPass();
+
+    if (separate_present_queue) {
+        // We have to transfer ownership from the graphics queue family to
+        // the
+        // present queue family to be able to present.  Note that we don't
+        // have
+        // to transfer from present queue family back to graphics queue
+        // family at
+        // the start of the next frame because we don't care about the
+        // image's
+        // contents at that point.
+        auto const image_ownership_barrier =
+            vk::ImageMemoryBarrier()
+                .setSrcAccessMask(vk::AccessFlags())
+                .setDstAccessMask(vk::AccessFlags())
+                .setOldLayout(vk::ImageLayout::ePresentSrcKHR)
+                .setNewLayout(vk::ImageLayout::ePresentSrcKHR)
+                .setSrcQueueFamilyIndex(graphics_queue_family_index)
+                .setDstQueueFamilyIndex(present_queue_family_index)
+                .setImage(swapchain_image_resources[current_buffer].image)
+                .setSubresourceRange(vk::ImageSubresourceRange(vk::ImageAspectFlagBits::eColor, 0, 1, 0, 1));
+
+        commandBuffer.pipelineBarrier(vk::PipelineStageFlagBits::eBottomOfPipe, vk::PipelineStageFlagBits::eBottomOfPipe,
+                                      vk::DependencyFlagBits(), 0, nullptr, 0, nullptr, 1, &image_ownership_barrier);
+    }
+
+    result = commandBuffer.end();
+    VERIFY(result == vk::Result::eSuccess);
+}
+
+void Demo::flush_init_cmd() {
+    // TODO: hmm.
+    // This function could get called twice if the texture uses a staging
+    // buffer
+    // In that case the second call should be ignored
+    if (!cmd) {
+        return;
+    }
+
+    auto result = cmd.end();
+    VERIFY(result == vk::Result::eSuccess);
+
+    auto const fenceInfo = vk::FenceCreateInfo();
+    vk::Fence fence;
+    result = device.createFence(&fenceInfo, nullptr, &fence);
+    VERIFY(result == vk::Result::eSuccess);
+
+    vk::CommandBuffer const commandBuffers[] = {cmd};
+    auto const submitInfo = vk::SubmitInfo().setCommandBufferCount(1).setPCommandBuffers(commandBuffers);
+
+    result = graphics_queue.submit(1, &submitInfo, fence);
+    VERIFY(result == vk::Result::eSuccess);
+
+    result = device.waitForFences(1, &fence, VK_TRUE, UINT64_MAX);
+    VERIFY(result == vk::Result::eSuccess);
+
+    device.freeCommandBuffers(cmd_pool, 1, commandBuffers);
+    device.destroyFence(fence, nullptr);
+
+    cmd = vk::CommandBuffer();
+}
+
+void Demo::init(int argc, char **argv) {
+    vec3 eye = {0.0f, 3.0f, 5.0f};
+    vec3 origin = {0, 0, 0};
+    vec3 up = {0.0f, 1.0f, 0.0};
+
+    presentMode = vk::PresentModeKHR::eFifo;
+    frameCount = UINT32_MAX;
+    use_xlib = false;
+
+    for (int i = 1; i < argc; i++) {
+        if (strcmp(argv[i], "--use_staging") == 0) {
+            use_staging_buffer = true;
+            continue;
+        }
+        if ((strcmp(argv[i], "--present_mode") == 0) && (i < argc - 1)) {
+            presentMode = (vk::PresentModeKHR)atoi(argv[i + 1]);
+            i++;
+            continue;
+        }
+        if (strcmp(argv[i], "--break") == 0) {
+            use_break = true;
+            continue;
+        }
+        if (strcmp(argv[i], "--validate") == 0) {
+            validate = true;
+            continue;
+        }
+        if (strcmp(argv[i], "--xlib") == 0) {
+            fprintf(stderr, "--xlib is deprecated and no longer does anything");
+            continue;
+        }
+        if (strcmp(argv[i], "--c") == 0 && frameCount == UINT32_MAX && i < argc - 1 &&
+            sscanf(argv[i + 1], "%" SCNu32, &frameCount) == 1) {
+            i++;
+            continue;
+        }
+        if (strcmp(argv[i], "--suppress_popups") == 0) {
+            suppress_popups = true;
+            continue;
+        }
+
+        std::stringstream usage;
+        usage << "Usage:\n  " << APP_SHORT_NAME << "\t[--use_staging] [--validate]\n"
+              << "\t[--break] [--c <framecount>] [--suppress_popups]\n"
+              << "\t[--present_mode <present mode enum>]\n"
+              << "\t<present_mode_enum>\n"
+              << "\t\tVK_PRESENT_MODE_IMMEDIATE_KHR = " << VK_PRESENT_MODE_IMMEDIATE_KHR << "\n"
+              << "\t\tVK_PRESENT_MODE_MAILBOX_KHR = " << VK_PRESENT_MODE_MAILBOX_KHR << "\n"
+              << "\t\tVK_PRESENT_MODE_FIFO_KHR = " << VK_PRESENT_MODE_FIFO_KHR << "\n"
+              << "\t\tVK_PRESENT_MODE_FIFO_RELAXED_KHR = " << VK_PRESENT_MODE_FIFO_RELAXED_KHR;
+
+#if defined(_WIN32)
+        if (!suppress_popups) MessageBox(NULL, usage.str().c_str(), "Usage Error", MB_OK);
+#else
+        std::cerr << usage.str();
+        std::cerr.flush();
+#endif
+        exit(1);
+    }
+
+    if (!use_xlib) {
+        init_connection();
+    }
+
+    init_vk();
+
+    width = 500;
+    height = 500;
+
+    spin_angle = 4.0f;
+    spin_increment = 0.2f;
+    pause = false;
+
+    mat4x4_perspective(projection_matrix, (float)degreesToRadians(45.0f), 1.0f, 0.1f, 100.0f);
+    mat4x4_look_at(view_matrix, eye, origin, up);
+    mat4x4_identity(model_matrix);
+
+    projection_matrix[1][1] *= -1;  // Flip projection matrix from GL to Vulkan orientation.
+}
+
+void Demo::init_connection() {
+#if defined(VK_USE_PLATFORM_XCB_KHR)
+    const xcb_setup_t *setup;
+    xcb_screen_iterator_t iter;
+    int scr;
+
+    const char *display_envar = getenv("DISPLAY");
+    if (display_envar == nullptr || display_envar[0] == '\0') {
+        printf("Environment variable DISPLAY requires a valid value.\nExiting ...\n");
+        fflush(stdout);
+        exit(1);
+    }
+
+    connection = xcb_connect(nullptr, &scr);
+    if (xcb_connection_has_error(connection) > 0) {
+        printf(
+            "Cannot find a compatible Vulkan installable client driver "
+            "(ICD).\nExiting ...\n");
+        fflush(stdout);
+        exit(1);
+    }
+
+    setup = xcb_get_setup(connection);
+    iter = xcb_setup_roots_iterator(setup);
+    while (scr-- > 0) xcb_screen_next(&iter);
+
+    screen = iter.data;
+#elif defined(VK_USE_PLATFORM_WAYLAND_KHR)
+    display = wl_display_connect(nullptr);
+
+    if (display == nullptr) {
+        printf("Cannot find a compatible Vulkan installable client driver (ICD).\nExiting ...\n");
+        fflush(stdout);
+        exit(1);
+    }
+
+    registry = wl_display_get_registry(display);
+    wl_registry_add_listener(registry, &registry_listener, this);
+    wl_display_dispatch(display);
+#endif
+}
+
+void Demo::init_vk() {
+    uint32_t instance_extension_count = 0;
+    uint32_t instance_layer_count = 0;
+    char const *const instance_validation_layers[] = {"VK_LAYER_KHRONOS_validation"};
+    enabled_extension_count = 0;
+    enabled_layer_count = 0;
+
+    // Look for validation layers
+    vk::Bool32 validation_found = VK_FALSE;
+    if (validate) {
+        auto result = vk::enumerateInstanceLayerProperties(&instance_layer_count, static_cast<vk::LayerProperties *>(nullptr));
+        VERIFY(result == vk::Result::eSuccess);
+
+        if (instance_layer_count > 0) {
+            std::unique_ptr<vk::LayerProperties[]> instance_layers(new vk::LayerProperties[instance_layer_count]);
+            result = vk::enumerateInstanceLayerProperties(&instance_layer_count, instance_layers.get());
+            VERIFY(result == vk::Result::eSuccess);
+
+            validation_found = check_layers(ARRAY_SIZE(instance_validation_layers), instance_validation_layers,
+                                            instance_layer_count, instance_layers.get());
+            if (validation_found) {
+                enabled_layer_count = ARRAY_SIZE(instance_validation_layers);
+                enabled_layers[0] = "VK_LAYER_KHRONOS_validation";
+            }
+        }
+
+        if (!validation_found) {
+            ERR_EXIT(
+                "vkEnumerateInstanceLayerProperties failed to find required validation layer.\n\n"
+                "Please look at the Getting Started guide for additional information.\n",
+                "vkCreateInstance Failure");
+        }
+    }
+
+    /* Look for instance extensions */
+    vk::Bool32 surfaceExtFound = VK_FALSE;
+    vk::Bool32 platformSurfaceExtFound = VK_FALSE;
+    memset(extension_names, 0, sizeof(extension_names));
+
+    auto result = vk::enumerateInstanceExtensionProperties(nullptr, &instance_extension_count,
+                                                           static_cast<vk::ExtensionProperties *>(nullptr));
+    VERIFY(result == vk::Result::eSuccess);
+
+    if (instance_extension_count > 0) {
+        std::unique_ptr<vk::ExtensionProperties[]> instance_extensions(new vk::ExtensionProperties[instance_extension_count]);
+        result = vk::enumerateInstanceExtensionProperties(nullptr, &instance_extension_count, instance_extensions.get());
+        VERIFY(result == vk::Result::eSuccess);
+
+        for (uint32_t i = 0; i < instance_extension_count; i++) {
+            if (!strcmp(VK_KHR_SURFACE_EXTENSION_NAME, instance_extensions[i].extensionName)) {
+                surfaceExtFound = 1;
+                extension_names[enabled_extension_count++] = VK_KHR_SURFACE_EXTENSION_NAME;
+            }
+#if defined(VK_USE_PLATFORM_WIN32_KHR)
+            if (!strcmp(VK_KHR_WIN32_SURFACE_EXTENSION_NAME, instance_extensions[i].extensionName)) {
+                platformSurfaceExtFound = 1;
+                extension_names[enabled_extension_count++] = VK_KHR_WIN32_SURFACE_EXTENSION_NAME;
+            }
+#elif defined(VK_USE_PLATFORM_XLIB_KHR)
+            if (!strcmp(VK_KHR_XLIB_SURFACE_EXTENSION_NAME, instance_extensions[i].extensionName)) {
+                platformSurfaceExtFound = 1;
+                extension_names[enabled_extension_count++] = VK_KHR_XLIB_SURFACE_EXTENSION_NAME;
+            }
+#elif defined(VK_USE_PLATFORM_XCB_KHR)
+            if (!strcmp(VK_KHR_XCB_SURFACE_EXTENSION_NAME, instance_extensions[i].extensionName)) {
+                platformSurfaceExtFound = 1;
+                extension_names[enabled_extension_count++] = VK_KHR_XCB_SURFACE_EXTENSION_NAME;
+            }
+#elif defined(VK_USE_PLATFORM_WAYLAND_KHR)
+            if (!strcmp(VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME, instance_extensions[i].extensionName)) {
+                platformSurfaceExtFound = 1;
+                extension_names[enabled_extension_count++] = VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME;
+            }
+#elif defined(VK_USE_PLATFORM_DISPLAY_KHR)
+            if (!strcmp(VK_KHR_DISPLAY_EXTENSION_NAME, instance_extensions[i].extensionName)) {
+                platformSurfaceExtFound = 1;
+                extension_names[enabled_extension_count++] = VK_KHR_DISPLAY_EXTENSION_NAME;
+            }
+#elif defined(VK_USE_PLATFORM_METAL_EXT)
+            if (!strcmp(VK_EXT_METAL_SURFACE_EXTENSION_NAME, instance_extensions[i].extensionName)) {
+                platformSurfaceExtFound = 1;
+                extension_names[enabled_extension_count++] = VK_EXT_METAL_SURFACE_EXTENSION_NAME;
+            }
+
+#endif
+            assert(enabled_extension_count < 64);
+        }
+    }
+
+    if (!surfaceExtFound) {
+        ERR_EXIT("vkEnumerateInstanceExtensionProperties failed to find the " VK_KHR_SURFACE_EXTENSION_NAME
+                 " extension.\n\n"
+                 "Do you have a compatible Vulkan installable client driver (ICD) installed?\n"
+                 "Please look at the Getting Started guide for additional information.\n",
+                 "vkCreateInstance Failure");
+    }
+
+    if (!platformSurfaceExtFound) {
+#if defined(VK_USE_PLATFORM_WIN32_KHR)
+        ERR_EXIT("vkEnumerateInstanceExtensionProperties failed to find the " VK_KHR_WIN32_SURFACE_EXTENSION_NAME
+                 " extension.\n\n"
+                 "Do you have a compatible Vulkan installable client driver (ICD) installed?\n"
+                 "Please look at the Getting Started guide for additional information.\n",
+                 "vkCreateInstance Failure");
+#elif defined(VK_USE_PLATFORM_XCB_KHR)
+        ERR_EXIT("vkEnumerateInstanceExtensionProperties failed to find the " VK_KHR_XCB_SURFACE_EXTENSION_NAME
+                 " extension.\n\n"
+                 "Do you have a compatible Vulkan installable client driver (ICD) installed?\n"
+                 "Please look at the Getting Started guide for additional information.\n",
+                 "vkCreateInstance Failure");
+#elif defined(VK_USE_PLATFORM_WAYLAND_KHR)
+        ERR_EXIT("vkEnumerateInstanceExtensionProperties failed to find the " VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME
+                 " extension.\n\n"
+                 "Do you have a compatible Vulkan installable client driver (ICD) installed?\n"
+                 "Please look at the Getting Started guide for additional information.\n",
+                 "vkCreateInstance Failure");
+#elif defined(VK_USE_PLATFORM_XLIB_KHR)
+        ERR_EXIT("vkEnumerateInstanceExtensionProperties failed to find the " VK_KHR_XLIB_SURFACE_EXTENSION_NAME
+                 " extension.\n\n"
+                 "Do you have a compatible Vulkan installable client driver (ICD) installed?\n"
+                 "Please look at the Getting Started guide for additional information.\n",
+                 "vkCreateInstance Failure");
+#elif defined(VK_USE_PLATFORM_DISPLAY_KHR)
+        ERR_EXIT("vkEnumerateInstanceExtensionProperties failed to find the " VK_KHR_DISPLAY_EXTENSION_NAME
+                 " extension.\n\n"
+                 "Do you have a compatible Vulkan installable client driver (ICD) installed?\n"
+                 "Please look at the Getting Started guide for additional information.\n",
+                 "vkCreateInstance Failure");
+#elif defined(VK_USE_PLATFORM_METAL_EXT)
+        ERR_EXIT("vkEnumerateInstanceExtensionProperties failed to find the " VK_EXT_METAL_SURFACE_EXTENSION_NAME
+                 " extension.\n\nDo you have a compatible "
+                 "Vulkan installable client driver (ICD) installed?\nPlease "
+                 "look at the Getting Started guide for additional "
+                 "information.\n",
+                 "vkCreateInstance Failure");
+#endif
+    }
+    auto const app = vk::ApplicationInfo()
+                         .setPApplicationName(APP_SHORT_NAME)
+                         .setApplicationVersion(0)
+                         .setPEngineName(APP_SHORT_NAME)
+                         .setEngineVersion(0)
+                         .setApiVersion(VK_API_VERSION_1_0);
+    auto const inst_info = vk::InstanceCreateInfo()
+                               .setPApplicationInfo(&app)
+                               .setEnabledLayerCount(enabled_layer_count)
+                               .setPpEnabledLayerNames(instance_validation_layers)
+                               .setEnabledExtensionCount(enabled_extension_count)
+                               .setPpEnabledExtensionNames(extension_names);
+
+    result = vk::createInstance(&inst_info, nullptr, &inst);
+    if (result == vk::Result::eErrorIncompatibleDriver) {
+        ERR_EXIT(
+            "Cannot find a compatible Vulkan installable client driver (ICD).\n\n"
+            "Please look at the Getting Started guide for additional information.\n",
+            "vkCreateInstance Failure");
+    } else if (result == vk::Result::eErrorExtensionNotPresent) {
+        ERR_EXIT(
+            "Cannot find a specified extension library.\n"
+            "Make sure your layers path is set appropriately.\n",
+            "vkCreateInstance Failure");
+    } else if (result != vk::Result::eSuccess) {
+        ERR_EXIT(
+            "vkCreateInstance failed.\n\n"
+            "Do you have a compatible Vulkan installable client driver (ICD) installed?\n"
+            "Please look at the Getting Started guide for additional information.\n",
+            "vkCreateInstance Failure");
+    }
+
+    /* Make initial call to query gpu_count, then second call for gpu info*/
+    uint32_t gpu_count;
+    result = inst.enumeratePhysicalDevices(&gpu_count, static_cast<vk::PhysicalDevice *>(nullptr));
+    VERIFY(result == vk::Result::eSuccess);
+
+    if (gpu_count > 0) {
+        std::unique_ptr<vk::PhysicalDevice[]> physical_devices(new vk::PhysicalDevice[gpu_count]);
+        result = inst.enumeratePhysicalDevices(&gpu_count, physical_devices.get());
+        VERIFY(result == vk::Result::eSuccess);
+        /* For cube demo we just grab the first physical device */
+        gpu = physical_devices[0];
+    } else {
+        ERR_EXIT(
+            "vkEnumeratePhysicalDevices reported zero accessible devices.\n\n"
+            "Do you have a compatible Vulkan installable client driver (ICD) installed?\n"
+            "Please look at the Getting Started guide for additional information.\n",
+            "vkEnumeratePhysicalDevices Failure");
+    }
+
+    /* Look for device extensions */
+    uint32_t device_extension_count = 0;
+    vk::Bool32 swapchainExtFound = VK_FALSE;
+    enabled_extension_count = 0;
+    memset(extension_names, 0, sizeof(extension_names));
+
+    result =
+        gpu.enumerateDeviceExtensionProperties(nullptr, &device_extension_count, static_cast<vk::ExtensionProperties *>(nullptr));
+    VERIFY(result == vk::Result::eSuccess);
+
+    if (device_extension_count > 0) {
+        std::unique_ptr<vk::ExtensionProperties[]> device_extensions(new vk::ExtensionProperties[device_extension_count]);
+        result = gpu.enumerateDeviceExtensionProperties(nullptr, &device_extension_count, device_extensions.get());
+        VERIFY(result == vk::Result::eSuccess);
+
+        for (uint32_t i = 0; i < device_extension_count; i++) {
+            if (!strcmp(VK_KHR_SWAPCHAIN_EXTENSION_NAME, device_extensions[i].extensionName)) {
+                swapchainExtFound = 1;
+                extension_names[enabled_extension_count++] = VK_KHR_SWAPCHAIN_EXTENSION_NAME;
+            }
+            assert(enabled_extension_count < 64);
+        }
+    }
+
+    if (!swapchainExtFound) {
+        ERR_EXIT("vkEnumerateDeviceExtensionProperties failed to find the " VK_KHR_SWAPCHAIN_EXTENSION_NAME
+                 " extension.\n\n"
+                 "Do you have a compatible Vulkan installable client driver (ICD) installed?\n"
+                 "Please look at the Getting Started guide for additional information.\n",
+                 "vkCreateInstance Failure");
+    }
+
+    gpu.getProperties(&gpu_props);
+
+    /* Call with nullptr data to get count */
+    gpu.getQueueFamilyProperties(&queue_family_count, static_cast<vk::QueueFamilyProperties *>(nullptr));
+    assert(queue_family_count >= 1);
+
+    queue_props.reset(new vk::QueueFamilyProperties[queue_family_count]);
+    gpu.getQueueFamilyProperties(&queue_family_count, queue_props.get());
+
+    // Query fine-grained feature support for this device.
+    //  If app has specific feature requirements it should check supported
+    //  features based on this query
+    vk::PhysicalDeviceFeatures physDevFeatures;
+    gpu.getFeatures(&physDevFeatures);
+}
+
+void Demo::create_surface() {
+// Create a WSI surface for the window:
+#if defined(VK_USE_PLATFORM_WIN32_KHR)
+    {
+        auto const createInfo = vk::Win32SurfaceCreateInfoKHR().setHinstance(connection).setHwnd(window);
+
+        auto result = inst.createWin32SurfaceKHR(&createInfo, nullptr, &surface);
+        VERIFY(result == vk::Result::eSuccess);
+    }
+#elif defined(VK_USE_PLATFORM_WAYLAND_KHR)
+    {
+        auto const createInfo = vk::WaylandSurfaceCreateInfoKHR().setDisplay(display).setSurface(window);
+
+        auto result = inst.createWaylandSurfaceKHR(&createInfo, nullptr, &surface);
+        VERIFY(result == vk::Result::eSuccess);
+    }
+#elif defined(VK_USE_PLATFORM_XLIB_KHR)
+    {
+        auto const createInfo = vk::XlibSurfaceCreateInfoKHR().setDpy(display).setWindow(xlib_window);
+
+        auto result = inst.createXlibSurfaceKHR(&createInfo, nullptr, &surface);
+        VERIFY(result == vk::Result::eSuccess);
+    }
+#elif defined(VK_USE_PLATFORM_XCB_KHR)
+    {
+        auto const createInfo = vk::XcbSurfaceCreateInfoKHR().setConnection(connection).setWindow(xcb_window);
+
+        auto result = inst.createXcbSurfaceKHR(&createInfo, nullptr, &surface);
+        VERIFY(result == vk::Result::eSuccess);
+    }
+#elif defined(VK_USE_PLATFORM_METAL_EXT)
+    {
+        auto const createInfo = vk::MetalSurfaceCreateInfoEXT().setPLayer(static_cast<CAMetalLayer *>(caMetalLayer));
+
+        auto result = inst.createMetalSurfaceEXT(&createInfo, nullptr, &surface);
+        VERIFY(result == vk::Result::eSuccess);
+    }
+#elif defined(VK_USE_PLATFORM_DISPLAY_KHR)
+    {
+        auto result = create_display_surface();
+        VERIFY(result == vk::Result::eSuccess);
+    }
+#endif
+}
+
+void Demo::init_vk_swapchain() {
+    create_surface();
+    // Iterate over each queue to learn whether it supports presenting:
+    std::unique_ptr<vk::Bool32[]> supportsPresent(new vk::Bool32[queue_family_count]);
+    for (uint32_t i = 0; i < queue_family_count; i++) {
+        gpu.getSurfaceSupportKHR(i, surface, &supportsPresent[i]);
+    }
+
+    uint32_t graphicsQueueFamilyIndex = UINT32_MAX;
+    uint32_t presentQueueFamilyIndex = UINT32_MAX;
+    for (uint32_t i = 0; i < queue_family_count; i++) {
+        if (queue_props[i].queueFlags & vk::QueueFlagBits::eGraphics) {
+            if (graphicsQueueFamilyIndex == UINT32_MAX) {
+                graphicsQueueFamilyIndex = i;
+            }
+
+            if (supportsPresent[i] == VK_TRUE) {
+                graphicsQueueFamilyIndex = i;
+                presentQueueFamilyIndex = i;
+                break;
+            }
+        }
+    }
+
+    if (presentQueueFamilyIndex == UINT32_MAX) {
+        // If didn't find a queue that supports both graphics and present,
+        // then
+        // find a separate present queue.
+        for (uint32_t i = 0; i < queue_family_count; ++i) {
+            if (supportsPresent[i] == VK_TRUE) {
+                presentQueueFamilyIndex = i;
+                break;
+            }
+        }
+    }
+
+    // Generate error if could not find both a graphics and a present queue
+    if (graphicsQueueFamilyIndex == UINT32_MAX || presentQueueFamilyIndex == UINT32_MAX) {
+        ERR_EXIT("Could not find both graphics and present queues\n", "Swapchain Initialization Failure");
+    }
+
+    graphics_queue_family_index = graphicsQueueFamilyIndex;
+    present_queue_family_index = presentQueueFamilyIndex;
+    separate_present_queue = (graphics_queue_family_index != present_queue_family_index);
+
+    create_device();
+
+    device.getQueue(graphics_queue_family_index, 0, &graphics_queue);
+    if (!separate_present_queue) {
+        present_queue = graphics_queue;
+    } else {
+        device.getQueue(present_queue_family_index, 0, &present_queue);
+    }
+
+    // Get the list of VkFormat's that are supported:
+    uint32_t formatCount;
+    auto result = gpu.getSurfaceFormatsKHR(surface, &formatCount, static_cast<vk::SurfaceFormatKHR *>(nullptr));
+    VERIFY(result == vk::Result::eSuccess);
+
+    std::unique_ptr<vk::SurfaceFormatKHR[]> surfFormats(new vk::SurfaceFormatKHR[formatCount]);
+    result = gpu.getSurfaceFormatsKHR(surface, &formatCount, surfFormats.get());
+    VERIFY(result == vk::Result::eSuccess);
+
+    // If the format list includes just one entry of VK_FORMAT_UNDEFINED,
+    // the surface has no preferred format.  Otherwise, at least one
+    // supported format will be returned.
+    if (formatCount == 1 && surfFormats[0].format == vk::Format::eUndefined) {
+        format = vk::Format::eB8G8R8A8Unorm;
+    } else {
+        assert(formatCount >= 1);
+        format = surfFormats[0].format;
+    }
+    color_space = surfFormats[0].colorSpace;
+
+    quit = false;
+    curFrame = 0;
+
+    // Create semaphores to synchronize acquiring presentable buffers before
+    // rendering and waiting for drawing to be complete before presenting
+    auto const semaphoreCreateInfo = vk::SemaphoreCreateInfo();
+
+    // Create fences that we can use to throttle if we get too far
+    // ahead of the image presents
+    auto const fence_ci = vk::FenceCreateInfo().setFlags(vk::FenceCreateFlagBits::eSignaled);
+    for (uint32_t i = 0; i < FRAME_LAG; i++) {
+        result = device.createFence(&fence_ci, nullptr, &fences[i]);
+        VERIFY(result == vk::Result::eSuccess);
+
+        result = device.createSemaphore(&semaphoreCreateInfo, nullptr, &image_acquired_semaphores[i]);
+        VERIFY(result == vk::Result::eSuccess);
+
+        result = device.createSemaphore(&semaphoreCreateInfo, nullptr, &draw_complete_semaphores[i]);
+        VERIFY(result == vk::Result::eSuccess);
+
+        if (separate_present_queue) {
+            result = device.createSemaphore(&semaphoreCreateInfo, nullptr, &image_ownership_semaphores[i]);
+            VERIFY(result == vk::Result::eSuccess);
+        }
+    }
+    frame_index = 0;
+
+    // Get Memory information and properties
+    gpu.getMemoryProperties(&memory_properties);
+}
+
+void Demo::prepare() {
+    auto const cmd_pool_info = vk::CommandPoolCreateInfo().setQueueFamilyIndex(graphics_queue_family_index);
+    auto result = device.createCommandPool(&cmd_pool_info, nullptr, &cmd_pool);
+    VERIFY(result == vk::Result::eSuccess);
+
+    auto const cmd = vk::CommandBufferAllocateInfo()
+                         .setCommandPool(cmd_pool)
+                         .setLevel(vk::CommandBufferLevel::ePrimary)
+                         .setCommandBufferCount(1);
+
+    result = device.allocateCommandBuffers(&cmd, &this->cmd);
+    VERIFY(result == vk::Result::eSuccess);
+
+    auto const cmd_buf_info = vk::CommandBufferBeginInfo().setPInheritanceInfo(nullptr);
+
+    result = this->cmd.begin(&cmd_buf_info);
+    VERIFY(result == vk::Result::eSuccess);
+
+    prepare_buffers();
+    prepare_depth();
+    prepare_textures();
+    prepare_cube_data_buffers();
+
+    prepare_descriptor_layout();
+    prepare_render_pass();
+    prepare_pipeline();
+
+    for (uint32_t i = 0; i < swapchainImageCount; ++i) {
+        result = device.allocateCommandBuffers(&cmd, &swapchain_image_resources[i].cmd);
+        VERIFY(result == vk::Result::eSuccess);
+    }
+
+    if (separate_present_queue) {
+        auto const present_cmd_pool_info = vk::CommandPoolCreateInfo().setQueueFamilyIndex(present_queue_family_index);
+
+        result = device.createCommandPool(&present_cmd_pool_info, nullptr, &present_cmd_pool);
+        VERIFY(result == vk::Result::eSuccess);
+
+        auto const present_cmd = vk::CommandBufferAllocateInfo()
+                                     .setCommandPool(present_cmd_pool)
+                                     .setLevel(vk::CommandBufferLevel::ePrimary)
+                                     .setCommandBufferCount(1);
+
+        for (uint32_t i = 0; i < swapchainImageCount; i++) {
+            result = device.allocateCommandBuffers(&present_cmd, &swapchain_image_resources[i].graphics_to_present_cmd);
+            VERIFY(result == vk::Result::eSuccess);
+
+            build_image_ownership_cmd(i);
+        }
+    }
+
+    prepare_descriptor_pool();
+    prepare_descriptor_set();
+
+    prepare_framebuffers();
+
+    for (uint32_t i = 0; i < swapchainImageCount; ++i) {
+        current_buffer = i;
+        draw_build_cmd(swapchain_image_resources[i].cmd);
+    }
+
+    /*
+     * Prepare functions above may generate pipeline commands
+     * that need to be flushed before beginning the render loop.
+     */
+    flush_init_cmd();
+    if (staging_texture.buffer) {
+        destroy_texture(&staging_texture);
+    }
+
+    current_buffer = 0;
+    prepared = true;
+}
+
+void Demo::prepare_buffers() {
+    vk::SwapchainKHR oldSwapchain = swapchain;
+
+    // Check the surface capabilities and formats
+    vk::SurfaceCapabilitiesKHR surfCapabilities;
+    auto result = gpu.getSurfaceCapabilitiesKHR(surface, &surfCapabilities);
+    VERIFY(result == vk::Result::eSuccess);
+
+    uint32_t presentModeCount;
+    result = gpu.getSurfacePresentModesKHR(surface, &presentModeCount, static_cast<vk::PresentModeKHR *>(nullptr));
+    VERIFY(result == vk::Result::eSuccess);
+
+    std::unique_ptr<vk::PresentModeKHR[]> presentModes(new vk::PresentModeKHR[presentModeCount]);
+    result = gpu.getSurfacePresentModesKHR(surface, &presentModeCount, presentModes.get());
+    VERIFY(result == vk::Result::eSuccess);
+
+    vk::Extent2D swapchainExtent;
+    // width and height are either both -1, or both not -1.
+    if (surfCapabilities.currentExtent.width == (uint32_t)-1) {
+        // If the surface size is undefined, the size is set to
+        // the size of the images requested.
+        swapchainExtent.width = width;
+        swapchainExtent.height = height;
+    } else {
+        // If the surface size is defined, the swap chain size must match
+        swapchainExtent = surfCapabilities.currentExtent;
+        width = surfCapabilities.currentExtent.width;
+        height = surfCapabilities.currentExtent.height;
+    }
+
+    // The FIFO present mode is guaranteed by the spec to be supported
+    // and to have no tearing.  It's a great default present mode to use.
+    vk::PresentModeKHR swapchainPresentMode = vk::PresentModeKHR::eFifo;
+
+    //  There are times when you may wish to use another present mode.  The
+    //  following code shows how to select them, and the comments provide some
+    //  reasons you may wish to use them.
+    //
+    // It should be noted that Vulkan 1.0 doesn't provide a method for
+    // synchronizing rendering with the presentation engine's display.  There
+    // is a method provided for throttling rendering with the display, but
+    // there are some presentation engines for which this method will not work.
+    // If an application doesn't throttle its rendering, and if it renders much
+    // faster than the refresh rate of the display, this can waste power on
+    // mobile devices.  That is because power is being spent rendering images
+    // that may never be seen.
+
+    // VK_PRESENT_MODE_IMMEDIATE_KHR is for applications that don't care
+    // about
+    // tearing, or have some way of synchronizing their rendering with the
+    // display.
+    // VK_PRESENT_MODE_MAILBOX_KHR may be useful for applications that
+    // generally render a new presentable image every refresh cycle, but are
+    // occasionally early.  In this case, the application wants the new
+    // image
+    // to be displayed instead of the previously-queued-for-presentation
+    // image
+    // that has not yet been displayed.
+    // VK_PRESENT_MODE_FIFO_RELAXED_KHR is for applications that generally
+    // render a new presentable image every refresh cycle, but are
+    // occasionally
+    // late.  In this case (perhaps because of stuttering/latency concerns),
+    // the application wants the late image to be immediately displayed,
+    // even
+    // though that may mean some tearing.
+
+    if (presentMode != swapchainPresentMode) {
+        for (size_t i = 0; i < presentModeCount; ++i) {
+            if (presentModes[i] == presentMode) {
+                swapchainPresentMode = presentMode;
+                break;
+            }
+        }
+    }
+
+    if (swapchainPresentMode != presentMode) {
+        ERR_EXIT("Present mode specified is not supported\n", "Present mode unsupported");
+    }
+
+    // Determine the number of VkImages to use in the swap chain.
+    // Application desires to acquire 3 images at a time for triple
+    // buffering
+    uint32_t desiredNumOfSwapchainImages = 3;
+    if (desiredNumOfSwapchainImages < surfCapabilities.minImageCount) {
+        desiredNumOfSwapchainImages = surfCapabilities.minImageCount;
+    }
+
+    // If maxImageCount is 0, we can ask for as many images as we want,
+    // otherwise
+    // we're limited to maxImageCount
+    if ((surfCapabilities.maxImageCount > 0) && (desiredNumOfSwapchainImages > surfCapabilities.maxImageCount)) {
+        // Application must settle for fewer images than desired:
+        desiredNumOfSwapchainImages = surfCapabilities.maxImageCount;
+    }
+
+    vk::SurfaceTransformFlagBitsKHR preTransform;
+    if (surfCapabilities.supportedTransforms & vk::SurfaceTransformFlagBitsKHR::eIdentity) {
+        preTransform = vk::SurfaceTransformFlagBitsKHR::eIdentity;
+    } else {
+        preTransform = surfCapabilities.currentTransform;
+    }
+
+    // Find a supported composite alpha mode - one of these is guaranteed to be set
+    vk::CompositeAlphaFlagBitsKHR compositeAlpha = vk::CompositeAlphaFlagBitsKHR::eOpaque;
+    vk::CompositeAlphaFlagBitsKHR compositeAlphaFlags[4] = {
+        vk::CompositeAlphaFlagBitsKHR::eOpaque,
+        vk::CompositeAlphaFlagBitsKHR::ePreMultiplied,
+        vk::CompositeAlphaFlagBitsKHR::ePostMultiplied,
+        vk::CompositeAlphaFlagBitsKHR::eInherit,
+    };
+    for (uint32_t i = 0; i < ARRAY_SIZE(compositeAlphaFlags); i++) {
+        if (surfCapabilities.supportedCompositeAlpha & compositeAlphaFlags[i]) {
+            compositeAlpha = compositeAlphaFlags[i];
+            break;
+        }
+    }
+
+    auto const swapchain_ci = vk::SwapchainCreateInfoKHR()
+                                  .setSurface(surface)
+                                  .setMinImageCount(desiredNumOfSwapchainImages)
+                                  .setImageFormat(format)
+                                  .setImageColorSpace(color_space)
+                                  .setImageExtent({swapchainExtent.width, swapchainExtent.height})
+                                  .setImageArrayLayers(1)
+                                  .setImageUsage(vk::ImageUsageFlagBits::eColorAttachment)
+                                  .setImageSharingMode(vk::SharingMode::eExclusive)
+                                  .setQueueFamilyIndexCount(0)
+                                  .setPQueueFamilyIndices(nullptr)
+                                  .setPreTransform(preTransform)
+                                  .setCompositeAlpha(compositeAlpha)
+                                  .setPresentMode(swapchainPresentMode)
+                                  .setClipped(true)
+                                  .setOldSwapchain(oldSwapchain);
+
+    result = device.createSwapchainKHR(&swapchain_ci, nullptr, &swapchain);
+    VERIFY(result == vk::Result::eSuccess);
+
+    // If we just re-created an existing swapchain, we should destroy the
+    // old
+    // swapchain at this point.
+    // Note: destroying the swapchain also cleans up all its associated
+    // presentable images once the platform is done with them.
+    if (oldSwapchain) {
+        device.destroySwapchainKHR(oldSwapchain, nullptr);
+    }
+
+    result = device.getSwapchainImagesKHR(swapchain, &swapchainImageCount, static_cast<vk::Image *>(nullptr));
+    VERIFY(result == vk::Result::eSuccess);
+
+    std::unique_ptr<vk::Image[]> swapchainImages(new vk::Image[swapchainImageCount]);
+    result = device.getSwapchainImagesKHR(swapchain, &swapchainImageCount, swapchainImages.get());
+    VERIFY(result == vk::Result::eSuccess);
+
+    swapchain_image_resources.reset(new SwapchainImageResources[swapchainImageCount]);
+
+    for (uint32_t i = 0; i < swapchainImageCount; ++i) {
+        auto color_image_view = vk::ImageViewCreateInfo()
+                                    .setViewType(vk::ImageViewType::e2D)
+                                    .setFormat(format)
+                                    .setSubresourceRange(vk::ImageSubresourceRange(vk::ImageAspectFlagBits::eColor, 0, 1, 0, 1));
+
+        swapchain_image_resources[i].image = swapchainImages[i];
+
+        color_image_view.image = swapchain_image_resources[i].image;
+
+        result = device.createImageView(&color_image_view, nullptr, &swapchain_image_resources[i].view);
+        VERIFY(result == vk::Result::eSuccess);
+    }
+}
+
+void Demo::prepare_cube_data_buffers() {
+    mat4x4 VP;
+    mat4x4_mul(VP, projection_matrix, view_matrix);
+
+    mat4x4 MVP;
+    mat4x4_mul(MVP, VP, model_matrix);
+
+    vktexcube_vs_uniform data;
+    memcpy(data.mvp, MVP, sizeof(MVP));
+    //    dumpMatrix("MVP", MVP)
+
+    for (int32_t i = 0; i < 12 * 3; i++) {
+        data.position[i][0] = g_vertex_buffer_data[i * 3];
+        data.position[i][1] = g_vertex_buffer_data[i * 3 + 1];
+        data.position[i][2] = g_vertex_buffer_data[i * 3 + 2];
+        data.position[i][3] = 1.0f;
+        data.attr[i][0] = g_uv_buffer_data[2 * i];
+        data.attr[i][1] = g_uv_buffer_data[2 * i + 1];
+        data.attr[i][2] = 0;
+        data.attr[i][3] = 0;
+    }
+
+    auto const buf_info = vk::BufferCreateInfo().setSize(sizeof(data)).setUsage(vk::BufferUsageFlagBits::eUniformBuffer);
+
+    for (unsigned int i = 0; i < swapchainImageCount; i++) {
+        auto result = device.createBuffer(&buf_info, nullptr, &swapchain_image_resources[i].uniform_buffer);
+        VERIFY(result == vk::Result::eSuccess);
+
+        vk::MemoryRequirements mem_reqs;
+        device.getBufferMemoryRequirements(swapchain_image_resources[i].uniform_buffer, &mem_reqs);
+
+        auto mem_alloc = vk::MemoryAllocateInfo().setAllocationSize(mem_reqs.size).setMemoryTypeIndex(0);
+
+        bool const pass = memory_type_from_properties(
+            mem_reqs.memoryTypeBits, vk::MemoryPropertyFlagBits::eHostVisible | vk::MemoryPropertyFlagBits::eHostCoherent,
+            &mem_alloc.memoryTypeIndex);
+        VERIFY(pass);
+
+        result = device.allocateMemory(&mem_alloc, nullptr, &swapchain_image_resources[i].uniform_memory);
+        VERIFY(result == vk::Result::eSuccess);
+
+        auto pData = device.mapMemory(swapchain_image_resources[i].uniform_memory, 0, VK_WHOLE_SIZE, vk::MemoryMapFlags());
+        VERIFY(pData.result == vk::Result::eSuccess);
+
+        memcpy(pData.value, &data, sizeof data);
+
+        device.unmapMemory(swapchain_image_resources[i].uniform_memory);
+
+        result =
+            device.bindBufferMemory(swapchain_image_resources[i].uniform_buffer, swapchain_image_resources[i].uniform_memory, 0);
+        VERIFY(result == vk::Result::eSuccess);
+    }
+}
+
+void Demo::prepare_depth() {
+    depth.format = vk::Format::eD16Unorm;
+
+    auto const image = vk::ImageCreateInfo()
+                           .setImageType(vk::ImageType::e2D)
+                           .setFormat(depth.format)
+                           .setExtent({(uint32_t)width, (uint32_t)height, 1})
+                           .setMipLevels(1)
+                           .setArrayLayers(1)
+                           .setSamples(vk::SampleCountFlagBits::e1)
+                           .setTiling(vk::ImageTiling::eOptimal)
+                           .setUsage(vk::ImageUsageFlagBits::eDepthStencilAttachment)
+                           .setSharingMode(vk::SharingMode::eExclusive)
+                           .setQueueFamilyIndexCount(0)
+                           .setPQueueFamilyIndices(nullptr)
+                           .setInitialLayout(vk::ImageLayout::eUndefined);
+
+    auto result = device.createImage(&image, nullptr, &depth.image);
+    VERIFY(result == vk::Result::eSuccess);
+
+    vk::MemoryRequirements mem_reqs;
+    device.getImageMemoryRequirements(depth.image, &mem_reqs);
+
+    depth.mem_alloc.setAllocationSize(mem_reqs.size);
+    depth.mem_alloc.setMemoryTypeIndex(0);
+
+    auto const pass = memory_type_from_properties(mem_reqs.memoryTypeBits, vk::MemoryPropertyFlagBits::eDeviceLocal,
+                                                  &depth.mem_alloc.memoryTypeIndex);
+    VERIFY(pass);
+
+    result = device.allocateMemory(&depth.mem_alloc, nullptr, &depth.mem);
+    VERIFY(result == vk::Result::eSuccess);
+
+    result = device.bindImageMemory(depth.image, depth.mem, 0);
+    VERIFY(result == vk::Result::eSuccess);
+
+    auto const view = vk::ImageViewCreateInfo()
+                          .setImage(depth.image)
+                          .setViewType(vk::ImageViewType::e2D)
+                          .setFormat(depth.format)
+                          .setSubresourceRange(vk::ImageSubresourceRange(vk::ImageAspectFlagBits::eDepth, 0, 1, 0, 1));
+    result = device.createImageView(&view, nullptr, &depth.view);
+    VERIFY(result == vk::Result::eSuccess);
+}
+
+void Demo::prepare_descriptor_layout() {
+    vk::DescriptorSetLayoutBinding const layout_bindings[2] = {vk::DescriptorSetLayoutBinding()
+                                                                   .setBinding(0)
+                                                                   .setDescriptorType(vk::DescriptorType::eUniformBuffer)
+                                                                   .setDescriptorCount(1)
+                                                                   .setStageFlags(vk::ShaderStageFlagBits::eVertex)
+                                                                   .setPImmutableSamplers(nullptr),
+                                                               vk::DescriptorSetLayoutBinding()
+                                                                   .setBinding(1)
+                                                                   .setDescriptorType(vk::DescriptorType::eCombinedImageSampler)
+                                                                   .setDescriptorCount(texture_count)
+                                                                   .setStageFlags(vk::ShaderStageFlagBits::eFragment)
+                                                                   .setPImmutableSamplers(nullptr)};
+
+    auto const descriptor_layout = vk::DescriptorSetLayoutCreateInfo().setBindingCount(2).setPBindings(layout_bindings);
+
+    auto result = device.createDescriptorSetLayout(&descriptor_layout, nullptr, &desc_layout);
+    VERIFY(result == vk::Result::eSuccess);
+
+    auto const pPipelineLayoutCreateInfo = vk::PipelineLayoutCreateInfo().setSetLayoutCount(1).setPSetLayouts(&desc_layout);
+
+    result = device.createPipelineLayout(&pPipelineLayoutCreateInfo, nullptr, &pipeline_layout);
+    VERIFY(result == vk::Result::eSuccess);
+}
+
+void Demo::prepare_descriptor_pool() {
+    vk::DescriptorPoolSize const poolSizes[2] = {
+        vk::DescriptorPoolSize().setType(vk::DescriptorType::eUniformBuffer).setDescriptorCount(swapchainImageCount),
+        vk::DescriptorPoolSize()
+            .setType(vk::DescriptorType::eCombinedImageSampler)
+            .setDescriptorCount(swapchainImageCount * texture_count)};
+
+    auto const descriptor_pool =
+        vk::DescriptorPoolCreateInfo().setMaxSets(swapchainImageCount).setPoolSizeCount(2).setPPoolSizes(poolSizes);
+
+    auto result = device.createDescriptorPool(&descriptor_pool, nullptr, &desc_pool);
+    VERIFY(result == vk::Result::eSuccess);
+}
+
+void Demo::prepare_descriptor_set() {
+    auto const alloc_info =
+        vk::DescriptorSetAllocateInfo().setDescriptorPool(desc_pool).setDescriptorSetCount(1).setPSetLayouts(&desc_layout);
+
+    auto buffer_info = vk::DescriptorBufferInfo().setOffset(0).setRange(sizeof(struct vktexcube_vs_uniform));
+
+    vk::DescriptorImageInfo tex_descs[texture_count];
+    for (uint32_t i = 0; i < texture_count; i++) {
+        tex_descs[i].setSampler(textures[i].sampler);
+        tex_descs[i].setImageView(textures[i].view);
+        tex_descs[i].setImageLayout(vk::ImageLayout::eShaderReadOnlyOptimal);
+    }
+
+    vk::WriteDescriptorSet writes[2];
+
+    writes[0].setDescriptorCount(1);
+    writes[0].setDescriptorType(vk::DescriptorType::eUniformBuffer);
+    writes[0].setPBufferInfo(&buffer_info);
+
+    writes[1].setDstBinding(1);
+    writes[1].setDescriptorCount(texture_count);
+    writes[1].setDescriptorType(vk::DescriptorType::eCombinedImageSampler);
+    writes[1].setPImageInfo(tex_descs);
+
+    for (unsigned int i = 0; i < swapchainImageCount; i++) {
+        auto result = device.allocateDescriptorSets(&alloc_info, &swapchain_image_resources[i].descriptor_set);
+        VERIFY(result == vk::Result::eSuccess);
+
+        buffer_info.setBuffer(swapchain_image_resources[i].uniform_buffer);
+        writes[0].setDstSet(swapchain_image_resources[i].descriptor_set);
+        writes[1].setDstSet(swapchain_image_resources[i].descriptor_set);
+        device.updateDescriptorSets(2, writes, 0, nullptr);
+    }
+}
+
+void Demo::prepare_framebuffers() {
+    vk::ImageView attachments[2];
+    attachments[1] = depth.view;
+
+    auto const fb_info = vk::FramebufferCreateInfo()
+                             .setRenderPass(render_pass)
+                             .setAttachmentCount(2)
+                             .setPAttachments(attachments)
+                             .setWidth((uint32_t)width)
+                             .setHeight((uint32_t)height)
+                             .setLayers(1);
+
+    for (uint32_t i = 0; i < swapchainImageCount; i++) {
+        attachments[0] = swapchain_image_resources[i].view;
+        auto const result = device.createFramebuffer(&fb_info, nullptr, &swapchain_image_resources[i].framebuffer);
+        VERIFY(result == vk::Result::eSuccess);
+    }
+}
+
+vk::ShaderModule Demo::prepare_fs() {
+    const uint32_t fragShaderCode[] = {
+#include "cube.frag.inc"
+    };
+
+    frag_shader_module = prepare_shader_module(fragShaderCode, sizeof(fragShaderCode));
+
+    return frag_shader_module;
+}
+
+void Demo::prepare_pipeline() {
+    vk::PipelineCacheCreateInfo const pipelineCacheInfo;
+    auto result = device.createPipelineCache(&pipelineCacheInfo, nullptr, &pipelineCache);
+    VERIFY(result == vk::Result::eSuccess);
+
+    vk::PipelineShaderStageCreateInfo const shaderStageInfo[2] = {
+        vk::PipelineShaderStageCreateInfo().setStage(vk::ShaderStageFlagBits::eVertex).setModule(prepare_vs()).setPName("main"),
+        vk::PipelineShaderStageCreateInfo().setStage(vk::ShaderStageFlagBits::eFragment).setModule(prepare_fs()).setPName("main")};
+
+    vk::PipelineVertexInputStateCreateInfo const vertexInputInfo;
+
+    auto const inputAssemblyInfo = vk::PipelineInputAssemblyStateCreateInfo().setTopology(vk::PrimitiveTopology::eTriangleList);
+
+    // TODO: Where are pViewports and pScissors set?
+    auto const viewportInfo = vk::PipelineViewportStateCreateInfo().setViewportCount(1).setScissorCount(1);
+
+    auto const rasterizationInfo = vk::PipelineRasterizationStateCreateInfo()
+                                       .setDepthClampEnable(VK_FALSE)
+                                       .setRasterizerDiscardEnable(VK_FALSE)
+                                       .setPolygonMode(vk::PolygonMode::eFill)
+                                       .setCullMode(vk::CullModeFlagBits::eBack)
+                                       .setFrontFace(vk::FrontFace::eCounterClockwise)
+                                       .setDepthBiasEnable(VK_FALSE)
+                                       .setLineWidth(1.0f);
+
+    auto const multisampleInfo = vk::PipelineMultisampleStateCreateInfo();
+
+    auto const stencilOp =
+        vk::StencilOpState().setFailOp(vk::StencilOp::eKeep).setPassOp(vk::StencilOp::eKeep).setCompareOp(vk::CompareOp::eAlways);
+
+    auto const depthStencilInfo = vk::PipelineDepthStencilStateCreateInfo()
+                                      .setDepthTestEnable(VK_TRUE)
+                                      .setDepthWriteEnable(VK_TRUE)
+                                      .setDepthCompareOp(vk::CompareOp::eLessOrEqual)
+                                      .setDepthBoundsTestEnable(VK_FALSE)
+                                      .setStencilTestEnable(VK_FALSE)
+                                      .setFront(stencilOp)
+                                      .setBack(stencilOp);
+
+    vk::PipelineColorBlendAttachmentState const colorBlendAttachments[1] = {
+        vk::PipelineColorBlendAttachmentState().setColorWriteMask(vk::ColorComponentFlagBits::eR | vk::ColorComponentFlagBits::eG |
+                                                                  vk::ColorComponentFlagBits::eB | vk::ColorComponentFlagBits::eA)};
+
+    auto const colorBlendInfo =
+        vk::PipelineColorBlendStateCreateInfo().setAttachmentCount(1).setPAttachments(colorBlendAttachments);
+
+    vk::DynamicState const dynamicStates[2] = {vk::DynamicState::eViewport, vk::DynamicState::eScissor};
+
+    auto const dynamicStateInfo = vk::PipelineDynamicStateCreateInfo().setPDynamicStates(dynamicStates).setDynamicStateCount(2);
+
+    auto const pipeline = vk::GraphicsPipelineCreateInfo()
+                              .setStageCount(2)
+                              .setPStages(shaderStageInfo)
+                              .setPVertexInputState(&vertexInputInfo)
+                              .setPInputAssemblyState(&inputAssemblyInfo)
+                              .setPViewportState(&viewportInfo)
+                              .setPRasterizationState(&rasterizationInfo)
+                              .setPMultisampleState(&multisampleInfo)
+                              .setPDepthStencilState(&depthStencilInfo)
+                              .setPColorBlendState(&colorBlendInfo)
+                              .setPDynamicState(&dynamicStateInfo)
+                              .setLayout(pipeline_layout)
+                              .setRenderPass(render_pass);
+
+    result = device.createGraphicsPipelines(pipelineCache, 1, &pipeline, nullptr, &this->pipeline);
+    VERIFY(result == vk::Result::eSuccess);
+
+    device.destroyShaderModule(frag_shader_module, nullptr);
+    device.destroyShaderModule(vert_shader_module, nullptr);
+}
+
+void Demo::prepare_render_pass() {
+    // The initial layout for the color and depth attachments will be LAYOUT_UNDEFINED
+    // because at the start of the renderpass, we don't care about their contents.
+    // At the start of the subpass, the color attachment's layout will be transitioned
+    // to LAYOUT_COLOR_ATTACHMENT_OPTIMAL and the depth stencil attachment's layout
+    // will be transitioned to LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL.  At the end of
+    // the renderpass, the color attachment's layout will be transitioned to
+    // LAYOUT_PRESENT_SRC_KHR to be ready to present.  This is all done as part of
+    // the renderpass, no barriers are necessary.
+    const vk::AttachmentDescription attachments[2] = {vk::AttachmentDescription()
+                                                          .setFormat(format)
+                                                          .setSamples(vk::SampleCountFlagBits::e1)
+                                                          .setLoadOp(vk::AttachmentLoadOp::eClear)
+                                                          .setStoreOp(vk::AttachmentStoreOp::eStore)
+                                                          .setStencilLoadOp(vk::AttachmentLoadOp::eDontCare)
+                                                          .setStencilStoreOp(vk::AttachmentStoreOp::eDontCare)
+                                                          .setInitialLayout(vk::ImageLayout::eUndefined)
+                                                          .setFinalLayout(vk::ImageLayout::ePresentSrcKHR),
+                                                      vk::AttachmentDescription()
+                                                          .setFormat(depth.format)
+                                                          .setSamples(vk::SampleCountFlagBits::e1)
+                                                          .setLoadOp(vk::AttachmentLoadOp::eClear)
+                                                          .setStoreOp(vk::AttachmentStoreOp::eDontCare)
+                                                          .setStencilLoadOp(vk::AttachmentLoadOp::eDontCare)
+                                                          .setStencilStoreOp(vk::AttachmentStoreOp::eDontCare)
+                                                          .setInitialLayout(vk::ImageLayout::eUndefined)
+                                                          .setFinalLayout(vk::ImageLayout::eDepthStencilAttachmentOptimal)};
+
+    auto const color_reference = vk::AttachmentReference().setAttachment(0).setLayout(vk::ImageLayout::eColorAttachmentOptimal);
+
+    auto const depth_reference =
+        vk::AttachmentReference().setAttachment(1).setLayout(vk::ImageLayout::eDepthStencilAttachmentOptimal);
+
+    auto const subpass = vk::SubpassDescription()
+                             .setPipelineBindPoint(vk::PipelineBindPoint::eGraphics)
+                             .setInputAttachmentCount(0)
+                             .setPInputAttachments(nullptr)
+                             .setColorAttachmentCount(1)
+                             .setPColorAttachments(&color_reference)
+                             .setPResolveAttachments(nullptr)
+                             .setPDepthStencilAttachment(&depth_reference)
+                             .setPreserveAttachmentCount(0)
+                             .setPPreserveAttachments(nullptr);
+
+    vk::PipelineStageFlags stages = vk::PipelineStageFlagBits::eEarlyFragmentTests | vk::PipelineStageFlagBits::eLateFragmentTests;
+    vk::SubpassDependency const dependencies[2] = {
+        vk::SubpassDependency()  // Depth buffer is shared between swapchain images
+            .setSrcSubpass(VK_SUBPASS_EXTERNAL)
+            .setDstSubpass(0)
+            .setSrcStageMask(stages)
+            .setDstStageMask(stages)
+            .setSrcAccessMask(vk::AccessFlagBits::eDepthStencilAttachmentWrite)
+            .setDstAccessMask(vk::AccessFlagBits::eDepthStencilAttachmentRead | vk::AccessFlagBits::eDepthStencilAttachmentWrite)
+            .setDependencyFlags(vk::DependencyFlags()),
+        vk::SubpassDependency()  // Image layout transition
+            .setSrcSubpass(VK_SUBPASS_EXTERNAL)
+            .setDstSubpass(0)
+            .setSrcStageMask(vk::PipelineStageFlagBits::eColorAttachmentOutput)
+            .setDstStageMask(vk::PipelineStageFlagBits::eColorAttachmentOutput)
+            .setSrcAccessMask(vk::AccessFlagBits())
+            .setDstAccessMask(vk::AccessFlagBits::eColorAttachmentWrite | vk::AccessFlagBits::eColorAttachmentRead)
+            .setDependencyFlags(vk::DependencyFlags()),
+    };
+
+    auto const rp_info = vk::RenderPassCreateInfo()
+                             .setAttachmentCount(2)
+                             .setPAttachments(attachments)
+                             .setSubpassCount(1)
+                             .setPSubpasses(&subpass)
+                             .setDependencyCount(2)
+                             .setPDependencies(dependencies);
+
+    auto result = device.createRenderPass(&rp_info, nullptr, &render_pass);
+    VERIFY(result == vk::Result::eSuccess);
+}
+
+vk::ShaderModule Demo::prepare_shader_module(const uint32_t *code, size_t size) {
+    const auto moduleCreateInfo = vk::ShaderModuleCreateInfo().setCodeSize(size).setPCode(code);
+
+    vk::ShaderModule module;
+    auto result = device.createShaderModule(&moduleCreateInfo, nullptr, &module);
+    VERIFY(result == vk::Result::eSuccess);
+
+    return module;
+}
+
+void Demo::prepare_texture_buffer(const char *filename, texture_object *tex_obj) {
+    int32_t tex_width;
+    int32_t tex_height;
+
+    if (!loadTexture(filename, NULL, NULL, &tex_width, &tex_height)) {
+        ERR_EXIT("Failed to load textures", "Load Texture Failure");
+    }
+
+    tex_obj->tex_width = tex_width;
+    tex_obj->tex_height = tex_height;
+
+    auto const buffer_create_info = vk::BufferCreateInfo()
+                                        .setSize(tex_width * tex_height * 4)
+                                        .setUsage(vk::BufferUsageFlagBits::eTransferSrc)
+                                        .setSharingMode(vk::SharingMode::eExclusive)
+                                        .setQueueFamilyIndexCount(0)
+                                        .setPQueueFamilyIndices(nullptr);
+
+    auto result = device.createBuffer(&buffer_create_info, nullptr, &tex_obj->buffer);
+    VERIFY(result == vk::Result::eSuccess);
+
+    vk::MemoryRequirements mem_reqs;
+    device.getBufferMemoryRequirements(tex_obj->buffer, &mem_reqs);
+
+    tex_obj->mem_alloc.setAllocationSize(mem_reqs.size);
+    tex_obj->mem_alloc.setMemoryTypeIndex(0);
+
+    vk::MemoryPropertyFlags requirements = vk::MemoryPropertyFlagBits::eHostVisible | vk::MemoryPropertyFlagBits::eHostCoherent;
+    auto pass = memory_type_from_properties(mem_reqs.memoryTypeBits, requirements, &tex_obj->mem_alloc.memoryTypeIndex);
+    VERIFY(pass == true);
+
+    result = device.allocateMemory(&tex_obj->mem_alloc, nullptr, &(tex_obj->mem));
+    VERIFY(result == vk::Result::eSuccess);
+
+    result = device.bindBufferMemory(tex_obj->buffer, tex_obj->mem, 0);
+    VERIFY(result == vk::Result::eSuccess);
+
+    vk::SubresourceLayout layout;
+    memset(&layout, 0, sizeof(layout));
+    layout.rowPitch = tex_width * 4;
+    auto data = device.mapMemory(tex_obj->mem, 0, tex_obj->mem_alloc.allocationSize);
+    VERIFY(data.result == vk::Result::eSuccess);
+
+    if (!loadTexture(filename, (uint8_t *)data.value, &layout, &tex_width, &tex_height)) {
+        fprintf(stderr, "Error loading texture: %s\n", filename);
+    }
+
+    device.unmapMemory(tex_obj->mem);
+}
+
+void Demo::prepare_texture_image(const char *filename, texture_object *tex_obj, vk::ImageTiling tiling, vk::ImageUsageFlags usage,
+                                 vk::MemoryPropertyFlags required_props) {
+    int32_t tex_width;
+    int32_t tex_height;
+    if (!loadTexture(filename, nullptr, nullptr, &tex_width, &tex_height)) {
+        ERR_EXIT("Failed to load textures", "Load Texture Failure");
+    }
+
+    tex_obj->tex_width = tex_width;
+    tex_obj->tex_height = tex_height;
+
+    auto const image_create_info = vk::ImageCreateInfo()
+                                       .setImageType(vk::ImageType::e2D)
+                                       .setFormat(vk::Format::eR8G8B8A8Unorm)
+                                       .setExtent({(uint32_t)tex_width, (uint32_t)tex_height, 1})
+                                       .setMipLevels(1)
+                                       .setArrayLayers(1)
+                                       .setSamples(vk::SampleCountFlagBits::e1)
+                                       .setTiling(tiling)
+                                       .setUsage(usage)
+                                       .setSharingMode(vk::SharingMode::eExclusive)
+                                       .setQueueFamilyIndexCount(0)
+                                       .setPQueueFamilyIndices(nullptr)
+                                       .setInitialLayout(vk::ImageLayout::ePreinitialized);
+
+    auto result = device.createImage(&image_create_info, nullptr, &tex_obj->image);
+    VERIFY(result == vk::Result::eSuccess);
+
+    vk::MemoryRequirements mem_reqs;
+    device.getImageMemoryRequirements(tex_obj->image, &mem_reqs);
+
+    tex_obj->mem_alloc.setAllocationSize(mem_reqs.size);
+    tex_obj->mem_alloc.setMemoryTypeIndex(0);
+
+    auto pass = memory_type_from_properties(mem_reqs.memoryTypeBits, required_props, &tex_obj->mem_alloc.memoryTypeIndex);
+    VERIFY(pass == true);
+
+    result = device.allocateMemory(&tex_obj->mem_alloc, nullptr, &(tex_obj->mem));
+    VERIFY(result == vk::Result::eSuccess);
+
+    result = device.bindImageMemory(tex_obj->image, tex_obj->mem, 0);
+    VERIFY(result == vk::Result::eSuccess);
+
+    if (required_props & vk::MemoryPropertyFlagBits::eHostVisible) {
+        auto const subres = vk::ImageSubresource().setAspectMask(vk::ImageAspectFlagBits::eColor).setMipLevel(0).setArrayLayer(0);
+        vk::SubresourceLayout layout;
+        device.getImageSubresourceLayout(tex_obj->image, &subres, &layout);
+
+        auto data = device.mapMemory(tex_obj->mem, 0, tex_obj->mem_alloc.allocationSize);
+        VERIFY(data.result == vk::Result::eSuccess);
+
+        if (!loadTexture(filename, (uint8_t *)data.value, &layout, &tex_width, &tex_height)) {
+            fprintf(stderr, "Error loading texture: %s\n", filename);
+        }
+
+        device.unmapMemory(tex_obj->mem);
+    }
+
+    tex_obj->imageLayout = vk::ImageLayout::eShaderReadOnlyOptimal;
+}
+
+void Demo::prepare_textures() {
+    vk::Format const tex_format = vk::Format::eR8G8B8A8Unorm;
+    vk::FormatProperties props;
+    gpu.getFormatProperties(tex_format, &props);
+
+    for (uint32_t i = 0; i < texture_count; i++) {
+        if ((props.linearTilingFeatures & vk::FormatFeatureFlagBits::eSampledImage) && !use_staging_buffer) {
+            /* Device can texture using linear textures */
+            prepare_texture_image(tex_files[i], &textures[i], vk::ImageTiling::eLinear, vk::ImageUsageFlagBits::eSampled,
+                                  vk::MemoryPropertyFlagBits::eHostVisible | vk::MemoryPropertyFlagBits::eHostCoherent);
+            // Nothing in the pipeline needs to be complete to start, and don't allow fragment
+            // shader to run until layout transition completes
+            set_image_layout(textures[i].image, vk::ImageAspectFlagBits::eColor, vk::ImageLayout::ePreinitialized,
+                             textures[i].imageLayout, vk::AccessFlagBits(), vk::PipelineStageFlagBits::eTopOfPipe,
+                             vk::PipelineStageFlagBits::eFragmentShader);
+            staging_texture.image = vk::Image();
+        } else if (props.optimalTilingFeatures & vk::FormatFeatureFlagBits::eSampledImage) {
+            /* Must use staging buffer to copy linear texture to optimized */
+
+            prepare_texture_buffer(tex_files[i], &staging_texture);
+
+            prepare_texture_image(tex_files[i], &textures[i], vk::ImageTiling::eOptimal,
+                                  vk::ImageUsageFlagBits::eTransferDst | vk::ImageUsageFlagBits::eSampled,
+                                  vk::MemoryPropertyFlagBits::eDeviceLocal);
+
+            set_image_layout(textures[i].image, vk::ImageAspectFlagBits::eColor, vk::ImageLayout::ePreinitialized,
+                             vk::ImageLayout::eTransferDstOptimal, vk::AccessFlagBits(), vk::PipelineStageFlagBits::eTopOfPipe,
+                             vk::PipelineStageFlagBits::eTransfer);
+
+            auto const subresource = vk::ImageSubresourceLayers()
+                                         .setAspectMask(vk::ImageAspectFlagBits::eColor)
+                                         .setMipLevel(0)
+                                         .setBaseArrayLayer(0)
+                                         .setLayerCount(1);
+
+            auto const copy_region =
+                vk::BufferImageCopy()
+                    .setBufferOffset(0)
+                    .setBufferRowLength(staging_texture.tex_width)
+                    .setBufferImageHeight(staging_texture.tex_height)
+                    .setImageSubresource(subresource)
+                    .setImageOffset({0, 0, 0})
+                    .setImageExtent({(uint32_t)staging_texture.tex_width, (uint32_t)staging_texture.tex_height, 1});
+
+            cmd.copyBufferToImage(staging_texture.buffer, textures[i].image, vk::ImageLayout::eTransferDstOptimal, 1, &copy_region);
+
+            set_image_layout(textures[i].image, vk::ImageAspectFlagBits::eColor, vk::ImageLayout::eTransferDstOptimal,
+                             textures[i].imageLayout, vk::AccessFlagBits::eTransferWrite, vk::PipelineStageFlagBits::eTransfer,
+                             vk::PipelineStageFlagBits::eFragmentShader);
+        } else {
+            assert(!"No support for R8G8B8A8_UNORM as texture image format");
+        }
+
+        auto const samplerInfo = vk::SamplerCreateInfo()
+                                     .setMagFilter(vk::Filter::eNearest)
+                                     .setMinFilter(vk::Filter::eNearest)
+                                     .setMipmapMode(vk::SamplerMipmapMode::eNearest)
+                                     .setAddressModeU(vk::SamplerAddressMode::eClampToEdge)
+                                     .setAddressModeV(vk::SamplerAddressMode::eClampToEdge)
+                                     .setAddressModeW(vk::SamplerAddressMode::eClampToEdge)
+                                     .setMipLodBias(0.0f)
+                                     .setAnisotropyEnable(VK_FALSE)
+                                     .setMaxAnisotropy(1)
+                                     .setCompareEnable(VK_FALSE)
+                                     .setCompareOp(vk::CompareOp::eNever)
+                                     .setMinLod(0.0f)
+                                     .setMaxLod(0.0f)
+                                     .setBorderColor(vk::BorderColor::eFloatOpaqueWhite)
+                                     .setUnnormalizedCoordinates(VK_FALSE);
+
+        auto result = device.createSampler(&samplerInfo, nullptr, &textures[i].sampler);
+        VERIFY(result == vk::Result::eSuccess);
+
+        auto const viewInfo = vk::ImageViewCreateInfo()
+                                  .setImage(textures[i].image)
+                                  .setViewType(vk::ImageViewType::e2D)
+                                  .setFormat(tex_format)
+                                  .setSubresourceRange(vk::ImageSubresourceRange(vk::ImageAspectFlagBits::eColor, 0, 1, 0, 1));
+
+        result = device.createImageView(&viewInfo, nullptr, &textures[i].view);
+        VERIFY(result == vk::Result::eSuccess);
+    }
+}
+
+vk::ShaderModule Demo::prepare_vs() {
+    const uint32_t vertShaderCode[] = {
+#include "cube.vert.inc"
+    };
+
+    vert_shader_module = prepare_shader_module(vertShaderCode, sizeof(vertShaderCode));
+
+    return vert_shader_module;
+}
+
+void Demo::resize() {
+    uint32_t i;
+
+    // Don't react to resize until after first initialization.
+    if (!prepared) {
+        return;
+    }
+
+    // In order to properly resize the window, we must re-create the
+    // swapchain
+    // AND redo the command buffers, etc.
+    //
+    // First, perform part of the cleanup() function:
+    prepared = false;
+    auto result = device.waitIdle();
+    VERIFY(result == vk::Result::eSuccess);
+
+    for (i = 0; i < swapchainImageCount; i++) {
+        device.destroyFramebuffer(swapchain_image_resources[i].framebuffer, nullptr);
+    }
+
+    device.destroyDescriptorPool(desc_pool, nullptr);
+
+    device.destroyPipeline(pipeline, nullptr);
+    device.destroyPipelineCache(pipelineCache, nullptr);
+    device.destroyRenderPass(render_pass, nullptr);
+    device.destroyPipelineLayout(pipeline_layout, nullptr);
+    device.destroyDescriptorSetLayout(desc_layout, nullptr);
+
+    for (i = 0; i < texture_count; i++) {
+        device.destroyImageView(textures[i].view, nullptr);
+        device.destroyImage(textures[i].image, nullptr);
+        device.freeMemory(textures[i].mem, nullptr);
+        device.destroySampler(textures[i].sampler, nullptr);
+    }
+
+    device.destroyImageView(depth.view, nullptr);
+    device.destroyImage(depth.image, nullptr);
+    device.freeMemory(depth.mem, nullptr);
+
+    for (i = 0; i < swapchainImageCount; i++) {
+        device.destroyImageView(swapchain_image_resources[i].view, nullptr);
+        device.freeCommandBuffers(cmd_pool, 1, &swapchain_image_resources[i].cmd);
+        device.destroyBuffer(swapchain_image_resources[i].uniform_buffer, nullptr);
+        device.freeMemory(swapchain_image_resources[i].uniform_memory, nullptr);
+    }
+
+    device.destroyCommandPool(cmd_pool, nullptr);
+    if (separate_present_queue) {
+        device.destroyCommandPool(present_cmd_pool, nullptr);
+    }
+
+    // Second, re-perform the prepare() function, which will re-create the
+    // swapchain.
+    prepare();
+}
+
+void Demo::set_image_layout(vk::Image image, vk::ImageAspectFlags aspectMask, vk::ImageLayout oldLayout, vk::ImageLayout newLayout,
+                            vk::AccessFlags srcAccessMask, vk::PipelineStageFlags src_stages, vk::PipelineStageFlags dest_stages) {
+    assert(cmd);
+
+    auto DstAccessMask = [](vk::ImageLayout const &layout) {
+        vk::AccessFlags flags;
+
+        switch (layout) {
+            case vk::ImageLayout::eTransferDstOptimal:
+                // Make sure anything that was copying from this image has
+                // completed
+                flags = vk::AccessFlagBits::eTransferWrite;
+                break;
+            case vk::ImageLayout::eColorAttachmentOptimal:
+                flags = vk::AccessFlagBits::eColorAttachmentWrite;
+                break;
+            case vk::ImageLayout::eDepthStencilAttachmentOptimal:
+                flags = vk::AccessFlagBits::eDepthStencilAttachmentWrite;
+                break;
+            case vk::ImageLayout::eShaderReadOnlyOptimal:
+                // Make sure any Copy or CPU writes to image are flushed
+                flags = vk::AccessFlagBits::eShaderRead | vk::AccessFlagBits::eInputAttachmentRead;
+                break;
+            case vk::ImageLayout::eTransferSrcOptimal:
+                flags = vk::AccessFlagBits::eTransferRead;
+                break;
+            case vk::ImageLayout::ePresentSrcKHR:
+                flags = vk::AccessFlagBits::eMemoryRead;
+                break;
+            default:
+                break;
+        }
+
+        return flags;
+    };
+
+    auto const barrier = vk::ImageMemoryBarrier()
+                             .setSrcAccessMask(srcAccessMask)
+                             .setDstAccessMask(DstAccessMask(newLayout))
+                             .setOldLayout(oldLayout)
+                             .setNewLayout(newLayout)
+                             .setSrcQueueFamilyIndex(VK_QUEUE_FAMILY_IGNORED)
+                             .setDstQueueFamilyIndex(VK_QUEUE_FAMILY_IGNORED)
+                             .setImage(image)
+                             .setSubresourceRange(vk::ImageSubresourceRange(aspectMask, 0, 1, 0, 1));
+
+    cmd.pipelineBarrier(src_stages, dest_stages, vk::DependencyFlagBits(), 0, nullptr, 0, nullptr, 1, &barrier);
+}
+
+void Demo::update_data_buffer() {
+    mat4x4 VP;
+    mat4x4_mul(VP, projection_matrix, view_matrix);
+
+    // Rotate around the Y axis
+    mat4x4 Model;
+    mat4x4_dup(Model, model_matrix);
+    mat4x4_rotate(model_matrix, Model, 0.0f, 1.0f, 0.0f, (float)degreesToRadians(spin_angle));
+
+    mat4x4 MVP;
+    mat4x4_mul(MVP, VP, model_matrix);
+
+    auto data = device.mapMemory(swapchain_image_resources[current_buffer].uniform_memory, 0, VK_WHOLE_SIZE, vk::MemoryMapFlags());
+    VERIFY(data.result == vk::Result::eSuccess);
+
+    memcpy(data.value, (const void *)&MVP[0][0], sizeof(MVP));
+
+    device.unmapMemory(swapchain_image_resources[current_buffer].uniform_memory);
+}
+
+/* Convert ppm image data from header file into RGBA texture image */
+#include "lunarg.ppm.h"
+bool Demo::loadTexture(const char *filename, uint8_t *rgba_data, vk::SubresourceLayout *layout, int32_t *width, int32_t *height) {
+    (void)filename;
+    char *cPtr;
+    cPtr = (char *)lunarg_ppm;
+    if ((unsigned char *)cPtr >= (lunarg_ppm + lunarg_ppm_len) || strncmp(cPtr, "P6\n", 3)) {
+        return false;
+    }
+    while (strncmp(cPtr++, "\n", 1))
+        ;
+    sscanf(cPtr, "%u %u", width, height);
+    if (rgba_data == NULL) {
+        return true;
+    }
+    while (strncmp(cPtr++, "\n", 1))
+        ;
+    if ((unsigned char *)cPtr >= (lunarg_ppm + lunarg_ppm_len) || strncmp(cPtr, "255\n", 4)) {
+        return false;
+    }
+    while (strncmp(cPtr++, "\n", 1))
+        ;
+    for (int y = 0; y < *height; y++) {
+        uint8_t *rowPtr = rgba_data;
+        for (int x = 0; x < *width; x++) {
+            memcpy(rowPtr, cPtr, 3);
+            rowPtr[3] = 255; /* Alpha of 1 */
+            rowPtr += 4;
+            cPtr += 3;
+        }
+        rgba_data += layout->rowPitch;
+    }
+    return true;
+}
+
+bool Demo::memory_type_from_properties(uint32_t typeBits, vk::MemoryPropertyFlags requirements_mask, uint32_t *typeIndex) {
+    // Search memtypes to find first index with those properties
+    for (uint32_t i = 0; i < VK_MAX_MEMORY_TYPES; i++) {
+        if ((typeBits & 1) == 1) {
+            // Type is available, does it match user properties?
+            if ((memory_properties.memoryTypes[i].propertyFlags & requirements_mask) == requirements_mask) {
+                *typeIndex = i;
+                return true;
+            }
+        }
+        typeBits >>= 1;
+    }
+
+    // No memory types matched, return failure
+    return false;
+}
+
+#if defined(VK_USE_PLATFORM_WIN32_KHR)
+void Demo::run() {
+    if (!prepared) {
+        return;
+    }
+
+    draw();
+    curFrame++;
+
+    if (frameCount != UINT32_MAX && curFrame == frameCount) {
+        PostQuitMessage(validation_error);
+    }
+}
+
+void Demo::create_window() {
+    WNDCLASSEX win_class;
+
+    // Initialize the window class structure:
+    win_class.cbSize = sizeof(WNDCLASSEX);
+    win_class.style = CS_HREDRAW | CS_VREDRAW;
+    win_class.lpfnWndProc = WndProc;
+    win_class.cbClsExtra = 0;
+    win_class.cbWndExtra = 0;
+    win_class.hInstance = connection;  // hInstance
+    win_class.hIcon = LoadIcon(nullptr, IDI_APPLICATION);
+    win_class.hCursor = LoadCursor(nullptr, IDC_ARROW);
+    win_class.hbrBackground = (HBRUSH)GetStockObject(WHITE_BRUSH);
+    win_class.lpszMenuName = nullptr;
+    win_class.lpszClassName = name;
+    win_class.hIconSm = LoadIcon(nullptr, IDI_WINLOGO);
+
+    // Register window class:
+    if (!RegisterClassEx(&win_class)) {
+        // It didn't work, so try to give a useful error:
+        printf("Unexpected error trying to start the application!\n");
+        fflush(stdout);
+        exit(1);
+    }
+
+    // Create window with the registered class:
+    RECT wr = {0, 0, static_cast<LONG>(width), static_cast<LONG>(height)};
+    AdjustWindowRect(&wr, WS_OVERLAPPEDWINDOW, FALSE);
+    window = CreateWindowEx(0,
+                            name,                  // class name
+                            name,                  // app name
+                            WS_OVERLAPPEDWINDOW |  // window style
+                                WS_VISIBLE | WS_SYSMENU,
+                            100, 100,            // x/y coords
+                            wr.right - wr.left,  // width
+                            wr.bottom - wr.top,  // height
+                            nullptr,             // handle to parent
+                            nullptr,             // handle to menu
+                            connection,          // hInstance
+                            nullptr);            // no extra parameters
+
+    if (!window) {
+        // It didn't work, so try to give a useful error:
+        printf("Cannot create a window in which to draw!\n");
+        fflush(stdout);
+        exit(1);
+    }
+
+    // Window client area size must be at least 1 pixel high, to prevent
+    // crash.
+    minsize.x = GetSystemMetrics(SM_CXMINTRACK);
+    minsize.y = GetSystemMetrics(SM_CYMINTRACK) + 1;
+}
+#elif defined(VK_USE_PLATFORM_XLIB_KHR)
+
+void Demo::create_xlib_window() {
+    const char *display_envar = getenv("DISPLAY");
+    if (display_envar == nullptr || display_envar[0] == '\0') {
+        printf("Environment variable DISPLAY requires a valid value.\nExiting ...\n");
+        fflush(stdout);
+        exit(1);
+    }
+
+    XInitThreads();
+    display = XOpenDisplay(nullptr);
+    long visualMask = VisualScreenMask;
+    int numberOfVisuals;
+    XVisualInfo vInfoTemplate = {};
+    vInfoTemplate.screen = DefaultScreen(display);
+    XVisualInfo *visualInfo = XGetVisualInfo(display, visualMask, &vInfoTemplate, &numberOfVisuals);
+
+    Colormap colormap = XCreateColormap(display, RootWindow(display, vInfoTemplate.screen), visualInfo->visual, AllocNone);
+
+    XSetWindowAttributes windowAttributes = {};
+    windowAttributes.colormap = colormap;
+    windowAttributes.background_pixel = 0xFFFFFFFF;
+    windowAttributes.border_pixel = 0;
+    windowAttributes.event_mask = KeyPressMask | KeyReleaseMask | StructureNotifyMask | ExposureMask;
+
+    xlib_window =
+        XCreateWindow(display, RootWindow(display, vInfoTemplate.screen), 0, 0, width, height, 0, visualInfo->depth, InputOutput,
+                      visualInfo->visual, CWBackPixel | CWBorderPixel | CWEventMask | CWColormap, &windowAttributes);
+
+    XSelectInput(display, xlib_window, ExposureMask | KeyPressMask);
+    XMapWindow(display, xlib_window);
+    XFlush(display);
+    xlib_wm_delete_window = XInternAtom(display, "WM_DELETE_WINDOW", False);
+}
+
+void Demo::handle_xlib_event(const XEvent *event) {
+    switch (event->type) {
+        case ClientMessage:
+            if ((Atom)event->xclient.data.l[0] == xlib_wm_delete_window) {
+                quit = true;
+            }
+            break;
+        case KeyPress:
+            switch (event->xkey.keycode) {
+                case 0x9:  // Escape
+                    quit = true;
+                    break;
+                case 0x71:  // left arrow key
+                    spin_angle -= spin_increment;
+                    break;
+                case 0x72:  // right arrow key
+                    spin_angle += spin_increment;
+                    break;
+                case 0x41:  // space bar
+                    pause = !pause;
+                    break;
+            }
+            break;
+        case ConfigureNotify:
+            if (((int32_t)width != event->xconfigure.width) || ((int32_t)height != event->xconfigure.height)) {
+                width = event->xconfigure.width;
+                height = event->xconfigure.height;
+                resize();
+            }
+            break;
+        default:
+            break;
+    }
+}
+
+void Demo::run_xlib() {
+    while (!quit) {
+        XEvent event;
+
+        if (pause) {
+            XNextEvent(display, &event);
+            handle_xlib_event(&event);
+        }
+        while (XPending(display) > 0) {
+            XNextEvent(display, &event);
+            handle_xlib_event(&event);
+        }
+
+        draw();
+        curFrame++;
+
+        if (frameCount != UINT32_MAX && curFrame == frameCount) {
+            quit = true;
+        }
+    }
+}
+#elif defined(VK_USE_PLATFORM_XCB_KHR)
+
+void Demo::handle_xcb_event(const xcb_generic_event_t *event) {
+    uint8_t event_code = event->response_type & 0x7f;
+    switch (event_code) {
+        case XCB_EXPOSE:
+            // TODO: Resize window
+            break;
+        case XCB_CLIENT_MESSAGE:
+            if ((*(xcb_client_message_event_t *)event).data.data32[0] == (*atom_wm_delete_window).atom) {
+                quit = true;
+            }
+            break;
+        case XCB_KEY_RELEASE: {
+            const xcb_key_release_event_t *key = (const xcb_key_release_event_t *)event;
+
+            switch (key->detail) {
+                case 0x9:  // Escape
+                    quit = true;
+                    break;
+                case 0x71:  // left arrow key
+                    spin_angle -= spin_increment;
+                    break;
+                case 0x72:  // right arrow key
+                    spin_angle += spin_increment;
+                    break;
+                case 0x41:  // space bar
+                    pause = !pause;
+                    break;
+            }
+        } break;
+        case XCB_CONFIGURE_NOTIFY: {
+            const xcb_configure_notify_event_t *cfg = (const xcb_configure_notify_event_t *)event;
+            if ((width != cfg->width) || (height != cfg->height)) {
+                width = cfg->width;
+                height = cfg->height;
+                resize();
+            }
+        } break;
+        default:
+            break;
+    }
+}
+
+void Demo::run_xcb() {
+    xcb_flush(connection);
+
+    while (!quit) {
+        xcb_generic_event_t *event;
+
+        if (pause) {
+            event = xcb_wait_for_event(connection);
+        } else {
+            event = xcb_poll_for_event(connection);
+        }
+        while (event) {
+            handle_xcb_event(event);
+            free(event);
+            event = xcb_poll_for_event(connection);
+        }
+
+        draw();
+        curFrame++;
+        if (frameCount != UINT32_MAX && curFrame == frameCount) {
+            quit = true;
+        }
+    }
+}
+
+void Demo::create_xcb_window() {
+    uint32_t value_mask, value_list[32];
+
+    xcb_window = xcb_generate_id(connection);
+
+    value_mask = XCB_CW_BACK_PIXEL | XCB_CW_EVENT_MASK;
+    value_list[0] = screen->black_pixel;
+    value_list[1] = XCB_EVENT_MASK_KEY_RELEASE | XCB_EVENT_MASK_EXPOSURE | XCB_EVENT_MASK_STRUCTURE_NOTIFY;
+
+    xcb_create_window(connection, XCB_COPY_FROM_PARENT, xcb_window, screen->root, 0, 0, width, height, 0,
+                      XCB_WINDOW_CLASS_INPUT_OUTPUT, screen->root_visual, value_mask, value_list);
+
+    /* Magic code that will send notification when window is destroyed */
+    xcb_intern_atom_cookie_t cookie = xcb_intern_atom(connection, 1, 12, "WM_PROTOCOLS");
+    xcb_intern_atom_reply_t *reply = xcb_intern_atom_reply(connection, cookie, 0);
+
+    xcb_intern_atom_cookie_t cookie2 = xcb_intern_atom(connection, 0, 16, "WM_DELETE_WINDOW");
+    atom_wm_delete_window = xcb_intern_atom_reply(connection, cookie2, 0);
+
+    xcb_change_property(connection, XCB_PROP_MODE_REPLACE, xcb_window, (*reply).atom, 4, 32, 1, &(*atom_wm_delete_window).atom);
+
+    free(reply);
+
+    xcb_map_window(connection, xcb_window);
+
+    // Force the x/y coordinates to 100,100 results are identical in
+    // consecutive
+    // runs
+    const uint32_t coords[] = {100, 100};
+    xcb_configure_window(connection, xcb_window, XCB_CONFIG_WINDOW_X | XCB_CONFIG_WINDOW_Y, coords);
+}
+#elif defined(VK_USE_PLATFORM_WAYLAND_KHR)
+
+void Demo::run() {
+    while (!quit) {
+        if (pause) {
+            wl_display_dispatch(display);
+        } else {
+            wl_display_dispatch_pending(display);
+            update_data_buffer();
+            draw();
+            curFrame++;
+            if (frameCount != UINT32_MAX && curFrame == frameCount) {
+                quit = true;
+            }
+        }
+    }
+}
+
+void Demo::create_window() {
+    window = wl_compositor_create_surface(compositor);
+    if (!window) {
+        printf("Can not create wayland_surface from compositor!\n");
+        fflush(stdout);
+        exit(1);
+    }
+
+    shell_surface = wl_shell_get_shell_surface(shell, window);
+    if (!shell_surface) {
+        printf("Can not get shell_surface from wayland_surface!\n");
+        fflush(stdout);
+        exit(1);
+    }
+
+    wl_shell_surface_add_listener(shell_surface, &shell_surface_listener, this);
+    wl_shell_surface_set_toplevel(shell_surface);
+    wl_shell_surface_set_title(shell_surface, APP_SHORT_NAME);
+}
+#elif defined(VK_USE_PLATFORM_METAL_EXT)
+void Demo::run() {
+    draw();
+    curFrame++;
+    if (frameCount != UINT32_MAX && curFrame == frameCount) {
+        quit = true;
+    }
+}
+#elif defined(VK_USE_PLATFORM_DISPLAY_KHR)
+
+vk::Result Demo::create_display_surface() {
+    vk::Result result;
+    uint32_t display_count;
+    uint32_t mode_count;
+    uint32_t plane_count;
+    vk::DisplayPropertiesKHR display_props;
+    vk::DisplayKHR display;
+    vk::DisplayModePropertiesKHR mode_props;
+    vk::DisplayPlanePropertiesKHR *plane_props;
+    vk::Bool32 found_plane = VK_FALSE;
+    uint32_t plane_index;
+    vk::Extent2D image_extent;
+
+    // Get the first display
+    result = gpu.getDisplayPropertiesKHR(&display_count, nullptr);
+    VERIFY(result == vk::Result::eSuccess);
+
+    if (display_count == 0) {
+        printf("Cannot find any display!\n");
+        fflush(stdout);
+        exit(1);
+    }
+
+    display_count = 1;
+    result = gpu.getDisplayPropertiesKHR(&display_count, &display_props);
+    VERIFY((result == vk::Result::eSuccess) || (result == vk::Result::eIncomplete));
+
+    display = display_props.display;
+
+    // Get the first mode of the display
+    result = gpu.getDisplayModePropertiesKHR(display, &mode_count, nullptr);
+    VERIFY(result == vk::Result::eSuccess);
+
+    if (mode_count == 0) {
+        printf("Cannot find any mode for the display!\n");
+        fflush(stdout);
+        exit(1);
+    }
+
+    mode_count = 1;
+    result = gpu.getDisplayModePropertiesKHR(display, &mode_count, &mode_props);
+    VERIFY((result == vk::Result::eSuccess) || (result == vk::Result::eIncomplete));
+
+    // Get the list of planes
+    result = gpu.getDisplayPlanePropertiesKHR(&plane_count, nullptr);
+    VERIFY(result == vk::Result::eSuccess);
+
+    if (plane_count == 0) {
+        printf("Cannot find any plane!\n");
+        fflush(stdout);
+        exit(1);
+    }
+
+    plane_props = (vk::DisplayPlanePropertiesKHR *)malloc(sizeof(vk::DisplayPlanePropertiesKHR) * plane_count);
+    VERIFY(plane_props != nullptr);
+
+    result = gpu.getDisplayPlanePropertiesKHR(&plane_count, plane_props);
+    VERIFY(result == vk::Result::eSuccess);
+
+    // Find a plane compatible with the display
+    for (plane_index = 0; plane_index < plane_count; plane_index++) {
+        uint32_t supported_count;
+        vk::DisplayKHR *supported_displays;
+
+        // Disqualify planes that are bound to a different display
+        if (plane_props[plane_index].currentDisplay && (plane_props[plane_index].currentDisplay != display)) {
+            continue;
+        }
+
+        result = gpu.getDisplayPlaneSupportedDisplaysKHR(plane_index, &supported_count, nullptr);
+        VERIFY(result == vk::Result::eSuccess);
+
+        if (supported_count == 0) {
+            continue;
+        }
+
+        supported_displays = (vk::DisplayKHR *)malloc(sizeof(vk::DisplayKHR) * supported_count);
+        VERIFY(supported_displays != nullptr);
+
+        result = gpu.getDisplayPlaneSupportedDisplaysKHR(plane_index, &supported_count, supported_displays);
+        VERIFY(result == vk::Result::eSuccess);
+
+        for (uint32_t i = 0; i < supported_count; i++) {
+            if (supported_displays[i] == display) {
+                found_plane = VK_TRUE;
+                break;
+            }
+        }
+
+        free(supported_displays);
+
+        if (found_plane) {
+            break;
+        }
+    }
+
+    if (!found_plane) {
+        printf("Cannot find a plane compatible with the display!\n");
+        fflush(stdout);
+        exit(1);
+    }
+
+    free(plane_props);
+
+    vk::DisplayPlaneCapabilitiesKHR planeCaps;
+    gpu.getDisplayPlaneCapabilitiesKHR(mode_props.displayMode, plane_index, &planeCaps);
+    // Find a supported alpha mode
+    vk::DisplayPlaneAlphaFlagBitsKHR alphaMode = vk::DisplayPlaneAlphaFlagBitsKHR::eOpaque;
+    vk::DisplayPlaneAlphaFlagBitsKHR alphaModes[4] = {
+        vk::DisplayPlaneAlphaFlagBitsKHR::eOpaque,
+        vk::DisplayPlaneAlphaFlagBitsKHR::eGlobal,
+        vk::DisplayPlaneAlphaFlagBitsKHR::ePerPixel,
+        vk::DisplayPlaneAlphaFlagBitsKHR::ePerPixelPremultiplied,
+    };
+    for (uint32_t i = 0; i < sizeof(alphaModes); i++) {
+        if (planeCaps.supportedAlpha & alphaModes[i]) {
+            alphaMode = alphaModes[i];
+            break;
+        }
+    }
+
+    image_extent.setWidth(mode_props.parameters.visibleRegion.width);
+    image_extent.setHeight(mode_props.parameters.visibleRegion.height);
+
+    auto const createInfo = vk::DisplaySurfaceCreateInfoKHR()
+                                .setDisplayMode(mode_props.displayMode)
+                                .setPlaneIndex(plane_index)
+                                .setPlaneStackIndex(plane_props[plane_index].currentStackIndex)
+                                .setGlobalAlpha(1.0f)
+                                .setAlphaMode(alphaMode)
+                                .setImageExtent(image_extent);
+
+    return inst.createDisplayPlaneSurfaceKHR(&createInfo, nullptr, &surface);
+}
+
+void Demo::run_display() {
+    while (!quit) {
+        draw();
+        curFrame++;
+
+        if (frameCount != UINT32_MAX && curFrame == frameCount) {
+            quit = true;
+        }
+    }
+}
+#endif
+
+#if _WIN32
+// Include header required for parsing the command line options.
+#include <shellapi.h>
+
+Demo demo;
+
+// MS-Windows event handling function:
+LRESULT CALLBACK WndProc(HWND hWnd, UINT uMsg, WPARAM wParam, LPARAM lParam) {
+    switch (uMsg) {
+        case WM_CLOSE:
+            PostQuitMessage(validation_error);
+            break;
+        case WM_PAINT:
+            demo.run();
+            break;
+        case WM_GETMINMAXINFO:  // set window's minimum size
+            ((MINMAXINFO *)lParam)->ptMinTrackSize = demo.minsize;
+            return 0;
+        case WM_ERASEBKGND:
+            return 1;
+        case WM_SIZE:
+            // Resize the application to the new window size, except when
+            // it was minimized. Vulkan doesn't support images or swapchains
+            // with width=0 and height=0.
+            if (wParam != SIZE_MINIMIZED) {
+                demo.width = lParam & 0xffff;
+                demo.height = (lParam & 0xffff0000) >> 16;
+                demo.resize();
+            }
+            break;
+        case WM_KEYDOWN:
+            switch (wParam) {
+                case VK_ESCAPE:
+                    PostQuitMessage(validation_error);
+                    break;
+                case VK_LEFT:
+                    demo.spin_angle -= demo.spin_increment;
+                    break;
+                case VK_RIGHT:
+                    demo.spin_angle += demo.spin_increment;
+                    break;
+                case VK_SPACE:
+                    demo.pause = !demo.pause;
+                    break;
+            }
+            return 0;
+        default:
+            break;
+    }
+
+    return (DefWindowProc(hWnd, uMsg, wParam, lParam));
+}
+
+int WINAPI WinMain(HINSTANCE hInstance, HINSTANCE hPrevInstance, LPSTR pCmdLine, int nCmdShow) {
+    // TODO: Gah.. refactor. This isn't 1989.
+    MSG msg;    // message
+    bool done;  // flag saying when app is complete
+    int argc;
+    char **argv;
+
+    // Ensure wParam is initialized.
+    msg.wParam = 0;
+
+    // Use the CommandLine functions to get the command line arguments.
+    // Unfortunately, Microsoft outputs
+    // this information as wide characters for Unicode, and we simply want the
+    // Ascii version to be compatible
+    // with the non-Windows side.  So, we have to convert the information to
+    // Ascii character strings.
+    LPWSTR *commandLineArgs = CommandLineToArgvW(GetCommandLineW(), &argc);
+    if (nullptr == commandLineArgs) {
+        argc = 0;
+    }
+
+    if (argc > 0) {
+        argv = (char **)malloc(sizeof(char *) * argc);
+        if (argv == nullptr) {
+            argc = 0;
+        } else {
+            for (int iii = 0; iii < argc; iii++) {
+                size_t wideCharLen = wcslen(commandLineArgs[iii]);
+                size_t numConverted = 0;
+
+                argv[iii] = (char *)malloc(sizeof(char) * (wideCharLen + 1));
+                if (argv[iii] != nullptr) {
+                    wcstombs_s(&numConverted, argv[iii], wideCharLen + 1, commandLineArgs[iii], wideCharLen + 1);
+                }
+            }
+        }
+    } else {
+        argv = nullptr;
+    }
+
+    demo.init(argc, argv);
+
+    // Free up the items we had to allocate for the command line arguments.
+    if (argc > 0 && argv != nullptr) {
+        for (int iii = 0; iii < argc; iii++) {
+            if (argv[iii] != nullptr) {
+                free(argv[iii]);
+            }
+        }
+        free(argv);
+    }
+
+    demo.connection = hInstance;
+    strncpy(demo.name, "Vulkan Cube", APP_NAME_STR_LEN);
+    demo.create_window();
+    demo.init_vk_swapchain();
+
+    demo.prepare();
+
+    done = false;  // initialize loop condition variable
+
+    // main message loop
+    while (!done) {
+        if (demo.pause) {
+            const BOOL succ = WaitMessage();
+
+            if (!succ) {
+                const auto &suppress_popups = demo.suppress_popups;
+                ERR_EXIT("WaitMessage() failed on paused demo", "event loop error");
+            }
+        }
+
+        PeekMessage(&msg, nullptr, 0, 0, PM_REMOVE);
+        if (msg.message == WM_QUIT)  // check for a quit message
+        {
+            done = true;  // if found, quit app
+        } else {
+            /* Translate and dispatch to event queue*/
+            TranslateMessage(&msg);
+            DispatchMessage(&msg);
+        }
+        RedrawWindow(demo.window, nullptr, nullptr, RDW_INTERNALPAINT);
+    }
+
+    demo.cleanup();
+
+    return (int)msg.wParam;
+}
+
+#elif __linux__
+
+int main(int argc, char **argv) {
+    Demo demo;
+
+    demo.init(argc, argv);
+
+#if defined(VK_USE_PLATFORM_XCB_KHR)
+    demo.create_xcb_window();
+#elif defined(VK_USE_PLATFORM_XLIB_KHR)
+    demo.use_xlib = true;
+    demo.create_xlib_window();
+#elif defined(VK_USE_PLATFORM_WAYLAND_KHR)
+    demo.create_window();
+#endif
+
+    demo.init_vk_swapchain();
+
+    demo.prepare();
+
+#if defined(VK_USE_PLATFORM_XCB_KHR)
+    demo.run_xcb();
+#elif defined(VK_USE_PLATFORM_XLIB_KHR)
+    demo.run_xlib();
+#elif defined(VK_USE_PLATFORM_WAYLAND_KHR)
+    demo.run();
+#elif defined(VK_USE_PLATFORM_DISPLAY_KHR)
+    demo.run_display();
+#endif
+
+    demo.cleanup();
+
+    return validation_error;
+}
+
+#elif defined(VK_USE_PLATFORM_METAL_EXT)
+
+// Global function invoked from NS or UI views and controllers to create demo
+static void demo_main(struct Demo &demo, void *caMetalLayer, int argc, const char *argv[]) {
+    demo.init(argc, (char **)argv);
+    demo.caMetalLayer = caMetalLayer;
+    demo.init_vk_swapchain();
+    demo.prepare();
+    demo.spin_angle = 0.4f;
+}
+
+#else
+#error "Platform not supported"
+#endif
diff --git a/src/third_party/vulkan-tools/src/cube/cube.frag b/src/third_party/vulkan-tools/src/cube/cube.frag
new file mode 100644
index 0000000..5bf6507
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/cube/cube.frag
@@ -0,0 +1,38 @@
+/*
+ * Copyright (c) 2015-2016 The Khronos Group Inc.
+ * Copyright (c) 2015-2016 Valve Corporation
+ * Copyright (c) 2015-2016 LunarG, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+/*
+ * Fragment shader for cube demo
+ */
+#version 400
+#extension GL_ARB_separate_shader_objects : enable
+#extension GL_ARB_shading_language_420pack : enable
+layout (binding = 1) uniform sampler2D tex;
+
+layout (location = 0) in vec4 texcoord;
+layout (location = 1) in vec3 frag_pos;
+layout (location = 0) out vec4 uFragColor;
+
+const vec3 lightDir= vec3(0.424, 0.566, 0.707);
+
+void main() {
+   vec3 dX = dFdx(frag_pos);
+   vec3 dY = dFdy(frag_pos);
+   vec3 normal = normalize(cross(dX,dY));
+   float light = max(0.0, dot(lightDir, normal));
+   uFragColor = light * texture(tex, texcoord.xy);
+}
diff --git a/src/third_party/vulkan-tools/src/cube/cube.vcxproj.user b/src/third_party/vulkan-tools/src/cube/cube.vcxproj.user
new file mode 100755
index 0000000..591cdd9
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/cube/cube.vcxproj.user
@@ -0,0 +1,11 @@
+<?xml version="1.0" encoding="utf-8"?>
+<Project ToolsVersion="12.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
+  <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
+    <LocalDebuggerEnvironment>VK_LAYER_PATH=..\layers\Debug</LocalDebuggerEnvironment>
+    <DebuggerFlavor>WindowsLocalDebugger</DebuggerFlavor>
+  </PropertyGroup>
+  <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
+    <LocalDebuggerEnvironment>VK_LAYER_PATH=..\layers\Release</LocalDebuggerEnvironment>
+    <DebuggerFlavor>WindowsLocalDebugger</DebuggerFlavor>
+  </PropertyGroup>
+</Project>
diff --git a/src/third_party/vulkan-tools/src/cube/cube.vert b/src/third_party/vulkan-tools/src/cube/cube.vert
new file mode 100644
index 0000000..6338032
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/cube/cube.vert
@@ -0,0 +1,38 @@
+/*
+ * Copyright (c) 2015-2016 The Khronos Group Inc.
+ * Copyright (c) 2015-2016 Valve Corporation
+ * Copyright (c) 2015-2016 LunarG, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+/*
+ * Vertex shader used by Cube demo.
+ */
+#version 400
+#extension GL_ARB_separate_shader_objects : enable
+#extension GL_ARB_shading_language_420pack : enable
+layout(std140, binding = 0) uniform buf {
+        mat4 MVP;
+        vec4 position[12*3];
+        vec4 attr[12*3];
+} ubuf;
+
+layout (location = 0) out vec4 texcoord;
+layout (location = 1) out vec3 frag_pos;
+
+void main() 
+{
+   texcoord = ubuf.attr[gl_VertexIndex];
+   gl_Position = ubuf.MVP * ubuf.position[gl_VertexIndex];
+   frag_pos = gl_Position.xyz;
+}
diff --git a/src/third_party/vulkan-tools/src/cube/gettime.h b/src/third_party/vulkan-tools/src/cube/gettime.h
new file mode 100644
index 0000000..a4265cd
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/cube/gettime.h
@@ -0,0 +1,74 @@
+/**************************************************************************
+ *
+ * Copyright 2014, 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Ported from drawElements Utility Library (Google, Inc.)
+ * Port done by: Ian Elliott <ianelliott@google.com>
+ **************************************************************************/
+
+#include <time.h>
+#include <assert.h>
+#include <vulkan/vk_platform.h>
+
+#if defined(_WIN32)
+
+#include <windows.h>
+
+#elif defined(__unix__) || defined(__linux) || defined(__linux__) || defined(__ANDROID__) || defined(__EPOC32__) || defined(__QNX__)
+
+#include <time.h>
+
+#elif defined(__APPLE__)
+
+#include <sys/time.h>
+
+#endif
+
+uint64_t getTimeInNanoseconds(void) {
+#if defined(_WIN32)
+    LARGE_INTEGER freq;
+    LARGE_INTEGER count;
+    QueryPerformanceCounter(&count);
+    QueryPerformanceFrequency(&freq);
+    assert(freq.LowPart != 0 || freq.HighPart != 0);
+
+    if (count.QuadPart < MAXLONGLONG / 1000000) {
+        assert(freq.QuadPart != 0);
+        return count.QuadPart * 1000000 / freq.QuadPart;
+    } else {
+        assert(freq.QuadPart >= 1000000);
+        return count.QuadPart / (freq.QuadPart / 1000000);
+    }
+
+#elif defined(__unix__) || defined(__linux) || defined(__linux__) || defined(__ANDROID__) || defined(__QNX__)
+    struct timespec currTime;
+    clock_gettime(CLOCK_MONOTONIC, &currTime);
+    return (uint64_t)currTime.tv_sec * 1000000 + ((uint64_t)currTime.tv_nsec / 1000);
+
+#elif defined(__EPOC32__)
+    struct timespec currTime;
+    /* Symbian supports only realtime clock for clock_gettime. */
+    clock_gettime(CLOCK_REALTIME, &currTime);
+    return (uint64_t)currTime.tv_sec * 1000000 + ((uint64_t)currTime.tv_nsec / 1000);
+
+#elif defined(__APPLE__)
+    struct timeval currTime;
+    gettimeofday(&currTime, NULL);
+    return (uint64_t)currTime.tv_sec * 1000000 + (uint64_t)currTime.tv_usec;
+
+#else
+#error "Not implemented for target OS"
+#endif
+}
diff --git a/src/third_party/vulkan-tools/src/cube/linmath.h b/src/third_party/vulkan-tools/src/cube/linmath.h
new file mode 100644
index 0000000..b4d386c
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/cube/linmath.h
@@ -0,0 +1,501 @@
+/*
+ * Copyright (c) 2015-2016 The Khronos Group Inc.
+ * Copyright (c) 2015-2016 Valve Corporation
+ * Copyright (c) 2015-2016 LunarG, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Relicensed from the WTFPL (http://www.wtfpl.net/faq/).
+ */
+
+#ifndef LINMATH_H
+#define LINMATH_H
+
+#include <math.h>
+
+// Converts degrees to radians.
+#define degreesToRadians(angleDegrees) (angleDegrees * M_PI / 180.0)
+
+// Converts radians to degrees.
+#define radiansToDegrees(angleRadians) (angleRadians * 180.0 / M_PI)
+
+typedef float vec3[3];
+static inline void vec3_add(vec3 r, vec3 const a, vec3 const b) {
+    int i;
+    for (i = 0; i < 3; ++i) r[i] = a[i] + b[i];
+}
+static inline void vec3_sub(vec3 r, vec3 const a, vec3 const b) {
+    int i;
+    for (i = 0; i < 3; ++i) r[i] = a[i] - b[i];
+}
+static inline void vec3_scale(vec3 r, vec3 const v, float const s) {
+    int i;
+    for (i = 0; i < 3; ++i) r[i] = v[i] * s;
+}
+static inline float vec3_mul_inner(vec3 const a, vec3 const b) {
+    float p = 0.f;
+    int i;
+    for (i = 0; i < 3; ++i) p += b[i] * a[i];
+    return p;
+}
+static inline void vec3_mul_cross(vec3 r, vec3 const a, vec3 const b) {
+    r[0] = a[1] * b[2] - a[2] * b[1];
+    r[1] = a[2] * b[0] - a[0] * b[2];
+    r[2] = a[0] * b[1] - a[1] * b[0];
+}
+static inline float vec3_len(vec3 const v) { return sqrtf(vec3_mul_inner(v, v)); }
+static inline void vec3_norm(vec3 r, vec3 const v) {
+    float k = 1.f / vec3_len(v);
+    vec3_scale(r, v, k);
+}
+static inline void vec3_reflect(vec3 r, vec3 const v, vec3 const n) {
+    float p = 2.f * vec3_mul_inner(v, n);
+    int i;
+    for (i = 0; i < 3; ++i) r[i] = v[i] - p * n[i];
+}
+
+typedef float vec4[4];
+static inline void vec4_add(vec4 r, vec4 const a, vec4 const b) {
+    int i;
+    for (i = 0; i < 4; ++i) r[i] = a[i] + b[i];
+}
+static inline void vec4_sub(vec4 r, vec4 const a, vec4 const b) {
+    int i;
+    for (i = 0; i < 4; ++i) r[i] = a[i] - b[i];
+}
+static inline void vec4_scale(vec4 r, vec4 v, float s) {
+    int i;
+    for (i = 0; i < 4; ++i) r[i] = v[i] * s;
+}
+static inline float vec4_mul_inner(vec4 a, vec4 b) {
+    float p = 0.f;
+    int i;
+    for (i = 0; i < 4; ++i) p += b[i] * a[i];
+    return p;
+}
+static inline void vec4_mul_cross(vec4 r, vec4 a, vec4 b) {
+    r[0] = a[1] * b[2] - a[2] * b[1];
+    r[1] = a[2] * b[0] - a[0] * b[2];
+    r[2] = a[0] * b[1] - a[1] * b[0];
+    r[3] = 1.f;
+}
+static inline float vec4_len(vec4 v) { return sqrtf(vec4_mul_inner(v, v)); }
+static inline void vec4_norm(vec4 r, vec4 v) {
+    float k = 1.f / vec4_len(v);
+    vec4_scale(r, v, k);
+}
+static inline void vec4_reflect(vec4 r, vec4 v, vec4 n) {
+    float p = 2.f * vec4_mul_inner(v, n);
+    int i;
+    for (i = 0; i < 4; ++i) r[i] = v[i] - p * n[i];
+}
+
+typedef vec4 mat4x4[4];
+static inline void mat4x4_identity(mat4x4 M) {
+    int i, j;
+    for (i = 0; i < 4; ++i)
+        for (j = 0; j < 4; ++j) M[i][j] = i == j ? 1.f : 0.f;
+}
+static inline void mat4x4_dup(mat4x4 M, mat4x4 N) {
+    int i, j;
+    for (i = 0; i < 4; ++i)
+        for (j = 0; j < 4; ++j) M[i][j] = N[i][j];
+}
+static inline void mat4x4_row(vec4 r, mat4x4 M, int i) {
+    int k;
+    for (k = 0; k < 4; ++k) r[k] = M[k][i];
+}
+static inline void mat4x4_col(vec4 r, mat4x4 M, int i) {
+    int k;
+    for (k = 0; k < 4; ++k) r[k] = M[i][k];
+}
+static inline void mat4x4_transpose(mat4x4 M, mat4x4 N) {
+    int i, j;
+    for (j = 0; j < 4; ++j)
+        for (i = 0; i < 4; ++i) M[i][j] = N[j][i];
+}
+static inline void mat4x4_add(mat4x4 M, mat4x4 a, mat4x4 b) {
+    int i;
+    for (i = 0; i < 4; ++i) vec4_add(M[i], a[i], b[i]);
+}
+static inline void mat4x4_sub(mat4x4 M, mat4x4 a, mat4x4 b) {
+    int i;
+    for (i = 0; i < 4; ++i) vec4_sub(M[i], a[i], b[i]);
+}
+static inline void mat4x4_scale(mat4x4 M, mat4x4 a, float k) {
+    int i;
+    for (i = 0; i < 4; ++i) vec4_scale(M[i], a[i], k);
+}
+static inline void mat4x4_scale_aniso(mat4x4 M, mat4x4 a, float x, float y, float z) {
+    int i;
+    vec4_scale(M[0], a[0], x);
+    vec4_scale(M[1], a[1], y);
+    vec4_scale(M[2], a[2], z);
+    for (i = 0; i < 4; ++i) {
+        M[3][i] = a[3][i];
+    }
+}
+static inline void mat4x4_mul(mat4x4 M, mat4x4 a, mat4x4 b) {
+    int k, r, c;
+    for (c = 0; c < 4; ++c)
+        for (r = 0; r < 4; ++r) {
+            M[c][r] = 0.f;
+            for (k = 0; k < 4; ++k) M[c][r] += a[k][r] * b[c][k];
+        }
+}
+static inline void mat4x4_mul_vec4(vec4 r, mat4x4 M, vec4 v) {
+    int i, j;
+    for (j = 0; j < 4; ++j) {
+        r[j] = 0.f;
+        for (i = 0; i < 4; ++i) r[j] += M[i][j] * v[i];
+    }
+}
+static inline void mat4x4_translate(mat4x4 T, float x, float y, float z) {
+    mat4x4_identity(T);
+    T[3][0] = x;
+    T[3][1] = y;
+    T[3][2] = z;
+}
+static inline void mat4x4_translate_in_place(mat4x4 M, float x, float y, float z) {
+    vec4 t = {x, y, z, 0};
+    vec4 r;
+    int i;
+    for (i = 0; i < 4; ++i) {
+        mat4x4_row(r, M, i);
+        M[3][i] += vec4_mul_inner(r, t);
+    }
+}
+static inline void mat4x4_from_vec3_mul_outer(mat4x4 M, vec3 a, vec3 b) {
+    int i, j;
+    for (i = 0; i < 4; ++i)
+        for (j = 0; j < 4; ++j) M[i][j] = i < 3 && j < 3 ? a[i] * b[j] : 0.f;
+}
+static inline void mat4x4_rotate(mat4x4 R, mat4x4 M, float x, float y, float z, float angle) {
+    float s = sinf(angle);
+    float c = cosf(angle);
+    vec3 u = {x, y, z};
+
+    if (vec3_len(u) > 1e-4) {
+        vec3_norm(u, u);
+        mat4x4 T;
+        mat4x4_from_vec3_mul_outer(T, u, u);
+
+        mat4x4 S = {{0, u[2], -u[1], 0}, {-u[2], 0, u[0], 0}, {u[1], -u[0], 0, 0}, {0, 0, 0, 0}};
+        mat4x4_scale(S, S, s);
+
+        mat4x4 C;
+        mat4x4_identity(C);
+        mat4x4_sub(C, C, T);
+
+        mat4x4_scale(C, C, c);
+
+        mat4x4_add(T, T, C);
+        mat4x4_add(T, T, S);
+
+        T[3][3] = 1.;
+        mat4x4_mul(R, M, T);
+    } else {
+        mat4x4_dup(R, M);
+    }
+}
+static inline void mat4x4_rotate_X(mat4x4 Q, mat4x4 M, float angle) {
+    float s = sinf(angle);
+    float c = cosf(angle);
+    mat4x4 R = {{1.f, 0.f, 0.f, 0.f}, {0.f, c, s, 0.f}, {0.f, -s, c, 0.f}, {0.f, 0.f, 0.f, 1.f}};
+    mat4x4_mul(Q, M, R);
+}
+static inline void mat4x4_rotate_Y(mat4x4 Q, mat4x4 M, float angle) {
+    float s = sinf(angle);
+    float c = cosf(angle);
+    mat4x4 R = {{c, 0.f, s, 0.f}, {0.f, 1.f, 0.f, 0.f}, {-s, 0.f, c, 0.f}, {0.f, 0.f, 0.f, 1.f}};
+    mat4x4_mul(Q, M, R);
+}
+static inline void mat4x4_rotate_Z(mat4x4 Q, mat4x4 M, float angle) {
+    float s = sinf(angle);
+    float c = cosf(angle);
+    mat4x4 R = {{c, s, 0.f, 0.f}, {-s, c, 0.f, 0.f}, {0.f, 0.f, 1.f, 0.f}, {0.f, 0.f, 0.f, 1.f}};
+    mat4x4_mul(Q, M, R);
+}
+static inline void mat4x4_invert(mat4x4 T, mat4x4 M) {
+    float s[6];
+    float c[6];
+    s[0] = M[0][0] * M[1][1] - M[1][0] * M[0][1];
+    s[1] = M[0][0] * M[1][2] - M[1][0] * M[0][2];
+    s[2] = M[0][0] * M[1][3] - M[1][0] * M[0][3];
+    s[3] = M[0][1] * M[1][2] - M[1][1] * M[0][2];
+    s[4] = M[0][1] * M[1][3] - M[1][1] * M[0][3];
+    s[5] = M[0][2] * M[1][3] - M[1][2] * M[0][3];
+
+    c[0] = M[2][0] * M[3][1] - M[3][0] * M[2][1];
+    c[1] = M[2][0] * M[3][2] - M[3][0] * M[2][2];
+    c[2] = M[2][0] * M[3][3] - M[3][0] * M[2][3];
+    c[3] = M[2][1] * M[3][2] - M[3][1] * M[2][2];
+    c[4] = M[2][1] * M[3][3] - M[3][1] * M[2][3];
+    c[5] = M[2][2] * M[3][3] - M[3][2] * M[2][3];
+
+    /* Assumes it is invertible */
+    float idet = 1.0f / (s[0] * c[5] - s[1] * c[4] + s[2] * c[3] + s[3] * c[2] - s[4] * c[1] + s[5] * c[0]);
+
+    T[0][0] = (M[1][1] * c[5] - M[1][2] * c[4] + M[1][3] * c[3]) * idet;
+    T[0][1] = (-M[0][1] * c[5] + M[0][2] * c[4] - M[0][3] * c[3]) * idet;
+    T[0][2] = (M[3][1] * s[5] - M[3][2] * s[4] + M[3][3] * s[3]) * idet;
+    T[0][3] = (-M[2][1] * s[5] + M[2][2] * s[4] - M[2][3] * s[3]) * idet;
+
+    T[1][0] = (-M[1][0] * c[5] + M[1][2] * c[2] - M[1][3] * c[1]) * idet;
+    T[1][1] = (M[0][0] * c[5] - M[0][2] * c[2] + M[0][3] * c[1]) * idet;
+    T[1][2] = (-M[3][0] * s[5] + M[3][2] * s[2] - M[3][3] * s[1]) * idet;
+    T[1][3] = (M[2][0] * s[5] - M[2][2] * s[2] + M[2][3] * s[1]) * idet;
+
+    T[2][0] = (M[1][0] * c[4] - M[1][1] * c[2] + M[1][3] * c[0]) * idet;
+    T[2][1] = (-M[0][0] * c[4] + M[0][1] * c[2] - M[0][3] * c[0]) * idet;
+    T[2][2] = (M[3][0] * s[4] - M[3][1] * s[2] + M[3][3] * s[0]) * idet;
+    T[2][3] = (-M[2][0] * s[4] + M[2][1] * s[2] - M[2][3] * s[0]) * idet;
+
+    T[3][0] = (-M[1][0] * c[3] + M[1][1] * c[1] - M[1][2] * c[0]) * idet;
+    T[3][1] = (M[0][0] * c[3] - M[0][1] * c[1] + M[0][2] * c[0]) * idet;
+    T[3][2] = (-M[3][0] * s[3] + M[3][1] * s[1] - M[3][2] * s[0]) * idet;
+    T[3][3] = (M[2][0] * s[3] - M[2][1] * s[1] + M[2][2] * s[0]) * idet;
+}
+static inline void mat4x4_orthonormalize(mat4x4 R, mat4x4 M) {
+    mat4x4_dup(R, M);
+    float s = 1.;
+    vec3 h;
+
+    vec3_norm(R[2], R[2]);
+
+    s = vec3_mul_inner(R[1], R[2]);
+    vec3_scale(h, R[2], s);
+    vec3_sub(R[1], R[1], h);
+    vec3_norm(R[2], R[2]);
+
+    s = vec3_mul_inner(R[1], R[2]);
+    vec3_scale(h, R[2], s);
+    vec3_sub(R[1], R[1], h);
+    vec3_norm(R[1], R[1]);
+
+    s = vec3_mul_inner(R[0], R[1]);
+    vec3_scale(h, R[1], s);
+    vec3_sub(R[0], R[0], h);
+    vec3_norm(R[0], R[0]);
+}
+
+static inline void mat4x4_frustum(mat4x4 M, float l, float r, float b, float t, float n, float f) {
+    M[0][0] = 2.f * n / (r - l);
+    M[0][1] = M[0][2] = M[0][3] = 0.f;
+
+    M[1][1] = 2.f * n / (t - b);
+    M[1][0] = M[1][2] = M[1][3] = 0.f;
+
+    M[2][0] = (r + l) / (r - l);
+    M[2][1] = (t + b) / (t - b);
+    M[2][2] = -(f + n) / (f - n);
+    M[2][3] = -1.f;
+
+    M[3][2] = -2.f * (f * n) / (f - n);
+    M[3][0] = M[3][1] = M[3][3] = 0.f;
+}
+static inline void mat4x4_ortho(mat4x4 M, float l, float r, float b, float t, float n, float f) {
+    M[0][0] = 2.f / (r - l);
+    M[0][1] = M[0][2] = M[0][3] = 0.f;
+
+    M[1][1] = 2.f / (t - b);
+    M[1][0] = M[1][2] = M[1][3] = 0.f;
+
+    M[2][2] = -2.f / (f - n);
+    M[2][0] = M[2][1] = M[2][3] = 0.f;
+
+    M[3][0] = -(r + l) / (r - l);
+    M[3][1] = -(t + b) / (t - b);
+    M[3][2] = -(f + n) / (f - n);
+    M[3][3] = 1.f;
+}
+static inline void mat4x4_perspective(mat4x4 m, float y_fov, float aspect, float n, float f) {
+    /* NOTE: Degrees are an unhandy unit to work with.
+     * linmath.h uses radians for everything! */
+    float const a = (float)(1.f / tan(y_fov / 2.f));
+
+    m[0][0] = a / aspect;
+    m[0][1] = 0.f;
+    m[0][2] = 0.f;
+    m[0][3] = 0.f;
+
+    m[1][0] = 0.f;
+    m[1][1] = a;
+    m[1][2] = 0.f;
+    m[1][3] = 0.f;
+
+    m[2][0] = 0.f;
+    m[2][1] = 0.f;
+    m[2][2] = -((f + n) / (f - n));
+    m[2][3] = -1.f;
+
+    m[3][0] = 0.f;
+    m[3][1] = 0.f;
+    m[3][2] = -((2.f * f * n) / (f - n));
+    m[3][3] = 0.f;
+}
+static inline void mat4x4_look_at(mat4x4 m, vec3 eye, vec3 center, vec3 up) {
+    /* Adapted from Android's OpenGL Matrix.java.                        */
+    /* See the OpenGL GLUT documentation for gluLookAt for a description */
+    /* of the algorithm. We implement it in a straightforward way:       */
+
+    /* TODO: The negation of of can be spared by swapping the order of
+     *       operands in the following cross products in the right way. */
+    vec3 f;
+    vec3_sub(f, center, eye);
+    vec3_norm(f, f);
+
+    vec3 s;
+    vec3_mul_cross(s, f, up);
+    vec3_norm(s, s);
+
+    vec3 t;
+    vec3_mul_cross(t, s, f);
+
+    m[0][0] = s[0];
+    m[0][1] = t[0];
+    m[0][2] = -f[0];
+    m[0][3] = 0.f;
+
+    m[1][0] = s[1];
+    m[1][1] = t[1];
+    m[1][2] = -f[1];
+    m[1][3] = 0.f;
+
+    m[2][0] = s[2];
+    m[2][1] = t[2];
+    m[2][2] = -f[2];
+    m[2][3] = 0.f;
+
+    m[3][0] = 0.f;
+    m[3][1] = 0.f;
+    m[3][2] = 0.f;
+    m[3][3] = 1.f;
+
+    mat4x4_translate_in_place(m, -eye[0], -eye[1], -eye[2]);
+}
+
+typedef float quat[4];
+static inline void quat_identity(quat q) {
+    q[0] = q[1] = q[2] = 0.f;
+    q[3] = 1.f;
+}
+static inline void quat_add(quat r, quat a, quat b) {
+    int i;
+    for (i = 0; i < 4; ++i) r[i] = a[i] + b[i];
+}
+static inline void quat_sub(quat r, quat a, quat b) {
+    int i;
+    for (i = 0; i < 4; ++i) r[i] = a[i] - b[i];
+}
+static inline void quat_mul(quat r, quat p, quat q) {
+    vec3 w;
+    vec3_mul_cross(r, p, q);
+    vec3_scale(w, p, q[3]);
+    vec3_add(r, r, w);
+    vec3_scale(w, q, p[3]);
+    vec3_add(r, r, w);
+    r[3] = p[3] * q[3] - vec3_mul_inner(p, q);
+}
+static inline void quat_scale(quat r, quat v, float s) {
+    int i;
+    for (i = 0; i < 4; ++i) r[i] = v[i] * s;
+}
+static inline float quat_inner_product(quat a, quat b) {
+    float p = 0.f;
+    int i;
+    for (i = 0; i < 4; ++i) p += b[i] * a[i];
+    return p;
+}
+static inline void quat_conj(quat r, quat q) {
+    int i;
+    for (i = 0; i < 3; ++i) r[i] = -q[i];
+    r[3] = q[3];
+}
+#define quat_norm vec4_norm
+static inline void quat_mul_vec3(vec3 r, quat q, vec3 v) {
+    quat v_ = {v[0], v[1], v[2], 0.f};
+
+    quat_conj(r, q);
+    quat_norm(r, r);
+    quat_mul(r, v_, r);
+    quat_mul(r, q, r);
+}
+static inline void mat4x4_from_quat(mat4x4 M, quat q) {
+    float a = q[3];
+    float b = q[0];
+    float c = q[1];
+    float d = q[2];
+    float a2 = a * a;
+    float b2 = b * b;
+    float c2 = c * c;
+    float d2 = d * d;
+
+    M[0][0] = a2 + b2 - c2 - d2;
+    M[0][1] = 2.f * (b * c + a * d);
+    M[0][2] = 2.f * (b * d - a * c);
+    M[0][3] = 0.f;
+
+    M[1][0] = 2 * (b * c - a * d);
+    M[1][1] = a2 - b2 + c2 - d2;
+    M[1][2] = 2.f * (c * d + a * b);
+    M[1][3] = 0.f;
+
+    M[2][0] = 2.f * (b * d + a * c);
+    M[2][1] = 2.f * (c * d - a * b);
+    M[2][2] = a2 - b2 - c2 + d2;
+    M[2][3] = 0.f;
+
+    M[3][0] = M[3][1] = M[3][2] = 0.f;
+    M[3][3] = 1.f;
+}
+
+static inline void mat4x4o_mul_quat(mat4x4 R, mat4x4 M, quat q) {
+    /*  XXX: The way this is written only works for othogonal matrices. */
+    /* TODO: Take care of non-orthogonal case. */
+    quat_mul_vec3(R[0], q, M[0]);
+    quat_mul_vec3(R[1], q, M[1]);
+    quat_mul_vec3(R[2], q, M[2]);
+
+    R[3][0] = R[3][1] = R[3][2] = 0.f;
+    R[3][3] = 1.f;
+}
+static inline void quat_from_mat4x4(quat q, mat4x4 M) {
+    float r = 0.f;
+    int i;
+
+    int perm[] = {0, 1, 2, 0, 1};
+    int *p = perm;
+
+    for (i = 0; i < 3; i++) {
+        float m = M[i][i];
+        if (m < r) continue;
+        m = r;
+        p = &perm[i];
+    }
+
+    r = sqrtf(1.f + M[p[0]][p[0]] - M[p[1]][p[1]] - M[p[2]][p[2]]);
+
+    if (r < 1e-6) {
+        q[0] = 1.f;
+        q[1] = q[2] = q[3] = 0.f;
+        return;
+    }
+
+    q[0] = r / 2.f;
+    q[1] = (M[p[0]][p[1]] - M[p[1]][p[0]]) / (2.f * r);
+    q[2] = (M[p[2]][p[0]] - M[p[0]][p[2]]) / (2.f * r);
+    q[3] = (M[p[2]][p[1]] - M[p[1]][p[2]]) / (2.f * r);
+}
+
+#endif
diff --git a/src/third_party/vulkan-tools/src/cube/lunarg.ppm.h b/src/third_party/vulkan-tools/src/cube/lunarg.ppm.h
new file mode 100644
index 0000000..038a8c5
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/cube/lunarg.ppm.h
@@ -0,0 +1,9365 @@
+static unsigned char lunarg_ppm[] = {
+    0x50, 0x36, 0x0a, 0x32, 0x35, 0x36, 0x20, 0x32, 0x35, 0x36, 0x0a, 0x32, 0x35, 0x35, 0x0a, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77,
+    0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x77, 0x77, 0x77,
+    0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77,
+    0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77,
+    0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77,
+    0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77,
+    0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77,
+    0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77,
+    0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77,
+    0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77,
+    0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77,
+    0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77,
+    0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77,
+    0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78,
+    0x78, 0x78, 0x78, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78,
+    0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77,
+    0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x77, 0x77, 0x77,
+    0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77,
+    0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78,
+    0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78,
+    0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78,
+    0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78,
+    0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77,
+    0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77,
+    0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78,
+    0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78,
+    0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78,
+    0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78,
+    0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78,
+    0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77,
+    0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77,
+    0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78,
+    0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78,
+    0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79,
+    0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79,
+    0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x78, 0x78, 0x78,
+    0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78,
+    0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77,
+    0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77,
+    0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78,
+    0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x79, 0x79, 0x79,
+    0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79,
+    0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79,
+    0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79,
+    0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79,
+    0x79, 0x79, 0x79, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78,
+    0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77,
+    0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77,
+    0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x78, 0x78, 0x78,
+    0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x79, 0x79, 0x79,
+    0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79,
+    0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a,
+    0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a,
+    0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a,
+    0x7a, 0x7a, 0x7a, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79,
+    0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78,
+    0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77,
+    0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77,
+    0x77, 0x77, 0x77, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78,
+    0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79,
+    0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a,
+    0x7a, 0x7a, 0x7a, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b,
+    0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b,
+    0x7b, 0x7b, 0x7b, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a,
+    0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79,
+    0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78,
+    0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77,
+    0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77,
+    0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78,
+    0x78, 0x78, 0x78, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79,
+    0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7b, 0x7b, 0x7b,
+    0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c,
+    0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c,
+    0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c,
+    0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a,
+    0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79,
+    0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78,
+    0x78, 0x78, 0x78, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77,
+    0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78,
+    0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79,
+    0x79, 0x79, 0x79, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7b, 0x7b, 0x7b,
+    0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c,
+    0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d,
+    0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d,
+    0x7d, 0x7d, 0x7d, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c,
+    0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a,
+    0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79,
+    0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78,
+    0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x78, 0x78, 0x78,
+    0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79,
+    0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a,
+    0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c,
+    0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d,
+    0x7d, 0x7d, 0x7d, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e,
+    0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d,
+    0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c,
+    0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a,
+    0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79,
+    0x79, 0x79, 0x79, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x77, 0x77, 0x77,
+    0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77,
+    0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78,
+    0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a,
+    0x7a, 0x7a, 0x7a, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c,
+    0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e,
+    0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7f, 0x7f, 0x7f,
+    0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f,
+    0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e,
+    0x7e, 0x7e, 0x7e, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7c, 0x7c, 0x7c,
+    0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a,
+    0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79,
+    0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77,
+    0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x77, 0x77, 0x77,
+    0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78,
+    0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x7a, 0x7a, 0x7a,
+    0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c,
+    0x7c, 0x7c, 0x7c, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e,
+    0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f,
+    0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80,
+    0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f,
+    0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e,
+    0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7c, 0x7c, 0x7c,
+    0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a,
+    0x7a, 0x7a, 0x7a, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78,
+    0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77,
+    0x77, 0x77, 0x77, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77,
+    0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79,
+    0x79, 0x79, 0x79, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7b, 0x7b, 0x7b, 0x7c, 0x7c, 0x7c,
+    0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7e, 0x7e, 0x7e,
+    0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f,
+    0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x81, 0x81, 0x81,
+    0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81,
+    0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80,
+    0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7e, 0x7e, 0x7e,
+    0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7c, 0x7c, 0x7c,
+    0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a,
+    0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78,
+    0x78, 0x78, 0x78, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77,
+    0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78,
+    0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a,
+    0x7b, 0x7b, 0x7b, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d,
+    0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f,
+    0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81,
+    0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x82, 0x82, 0x82, 0x82, 0x82, 0x82, 0x82, 0x82, 0x82, 0x82, 0x82, 0x82, 0x82, 0x82, 0x82,
+    0x82, 0x82, 0x82, 0x82, 0x82, 0x82, 0x82, 0x82, 0x82, 0x82, 0x82, 0x82, 0x82, 0x82, 0x82, 0x82, 0x82, 0x82, 0x82, 0x82, 0x82,
+    0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x80, 0x80, 0x80,
+    0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7e, 0x7e, 0x7e,
+    0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c,
+    0x7c, 0x7c, 0x7c, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x79, 0x79, 0x79,
+    0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78,
+    0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78,
+    0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x7a, 0x7a, 0x7a,
+    0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7b, 0x7b, 0x7b, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7d, 0x7d, 0x7d,
+    0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f,
+    0x7f, 0x7f, 0x7f, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81,
+    0x82, 0x82, 0x82, 0x82, 0x82, 0x82, 0x83, 0x83, 0x83, 0x83, 0x83, 0x83, 0x83, 0x83, 0x83, 0x83, 0x83, 0x83, 0x83, 0x83, 0x83,
+    0x83, 0x83, 0x83, 0x83, 0x83, 0x83, 0x84, 0x84, 0x84, 0x84, 0x84, 0x84, 0x84, 0x84, 0x84, 0x84, 0x84, 0x84, 0x84, 0x84, 0x84,
+    0x84, 0x84, 0x84, 0x83, 0x83, 0x83, 0x83, 0x83, 0x83, 0x83, 0x83, 0x83, 0x83, 0x83, 0x83, 0x83, 0x83, 0x83, 0x83, 0x83, 0x83,
+    0x83, 0x83, 0x83, 0x82, 0x82, 0x82, 0x82, 0x82, 0x82, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81,
+    0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7e, 0x7e, 0x7e,
+    0x7e, 0x7e, 0x7e, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c,
+    0x7b, 0x7b, 0x7b, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79,
+    0x79, 0x79, 0x79, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77,
+    0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77,
+    0x77, 0x77, 0x77, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79,
+    0x79, 0x79, 0x79, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7c, 0x7c, 0x7c,
+    0x7c, 0x7c, 0x7c, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7f, 0x7f, 0x7f,
+    0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81,
+    0x81, 0x81, 0x81, 0x82, 0x82, 0x82, 0x83, 0x83, 0x83, 0x83, 0x83, 0x83, 0x83, 0x83, 0x83, 0x84, 0x84, 0x84, 0x84, 0x84, 0x84,
+    0x84, 0x84, 0x84, 0x84, 0x84, 0x84, 0x84, 0x84, 0x84, 0x84, 0x84, 0x84, 0x85, 0x85, 0x85, 0x85, 0x85, 0x85, 0x85, 0x85, 0x85,
+    0x85, 0x85, 0x85, 0x85, 0x85, 0x85, 0x85, 0x85, 0x85, 0x85, 0x85, 0x85, 0x85, 0x85, 0x85, 0x85, 0x85, 0x85, 0x85, 0x85, 0x85,
+    0x84, 0x84, 0x84, 0x84, 0x84, 0x84, 0x84, 0x84, 0x84, 0x84, 0x84, 0x84, 0x84, 0x84, 0x84, 0x83, 0x83, 0x83, 0x83, 0x83, 0x83,
+    0x83, 0x83, 0x83, 0x83, 0x83, 0x83, 0x83, 0x83, 0x83, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81,
+    0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e,
+    0x7e, 0x7e, 0x7e, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7b, 0x7b, 0x7b,
+    0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x78, 0x78, 0x78,
+    0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77,
+    0x77, 0x77, 0x77, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x77, 0x77, 0x77,
+    0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78,
+    0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7b, 0x7b, 0x7b,
+    0x7b, 0x7b, 0x7b, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7e, 0x7e, 0x7e,
+    0x7e, 0x7e, 0x7e, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x81, 0x81, 0x81,
+    0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x82, 0x82, 0x82, 0x83, 0x83, 0x83, 0x83, 0x83, 0x83, 0x84, 0x84, 0x84, 0x84, 0x84, 0x84,
+    0x84, 0x84, 0x84, 0x85, 0x85, 0x85, 0x85, 0x85, 0x85, 0x85, 0x85, 0x85, 0x85, 0x85, 0x85, 0x85, 0x85, 0x85, 0x86, 0x86, 0x86,
+    0x86, 0x86, 0x86, 0x86, 0x86, 0x86, 0x86, 0x86, 0x86, 0x86, 0x86, 0x86, 0x86, 0x86, 0x86, 0x86, 0x86, 0x86, 0x86, 0x86, 0x86,
+    0x86, 0x86, 0x86, 0x86, 0x86, 0x86, 0x86, 0x86, 0x86, 0x86, 0x86, 0x86, 0x86, 0x86, 0x86, 0x86, 0x86, 0x86, 0x85, 0x85, 0x85,
+    0x85, 0x85, 0x85, 0x85, 0x85, 0x85, 0x85, 0x85, 0x85, 0x85, 0x85, 0x85, 0x84, 0x84, 0x84, 0x84, 0x84, 0x84, 0x84, 0x84, 0x84,
+    0x83, 0x83, 0x83, 0x83, 0x83, 0x83, 0x83, 0x83, 0x83, 0x82, 0x82, 0x82, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x80, 0x80, 0x80,
+    0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7d, 0x7d, 0x7d,
+    0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7a, 0x7a, 0x7a,
+    0x7a, 0x7a, 0x7a, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78,
+    0x78, 0x78, 0x78, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x69, 0x74, 0x75, 0x33, 0x6d, 0x75, 0x14, 0x65, 0x71,
+    0x0f, 0x62, 0x6e, 0x0f, 0x62, 0x6e, 0x0f, 0x62, 0x6e, 0x0f, 0x62, 0x6e, 0x0f, 0x62, 0x6e, 0x0f, 0x62, 0x6e, 0x0f, 0x62, 0x6e,
+    0x0f, 0x62, 0x6e, 0x0f, 0x62, 0x6e, 0x0f, 0x62, 0x6e, 0x0f, 0x62, 0x6e, 0x0f, 0x62, 0x6e, 0x0f, 0x62, 0x6e, 0x0f, 0x62, 0x6e,
+    0x0f, 0x62, 0x6e, 0x0f, 0x62, 0x6e, 0x0f, 0x62, 0x6e, 0x0f, 0x62, 0x6e, 0x0f, 0x62, 0x6e, 0x0f, 0x62, 0x6e, 0x0f, 0x62, 0x6e,
+    0x0f, 0x62, 0x6e, 0x0f, 0x62, 0x6d, 0x0f, 0x62, 0x6d, 0x0f, 0x62, 0x6d, 0x0f, 0x61, 0x6d, 0x0f, 0x61, 0x6d, 0x0f, 0x61, 0x6d,
+    0x0f, 0x61, 0x6d, 0x0f, 0x61, 0x6d, 0x0f, 0x61, 0x6d, 0x0f, 0x61, 0x6d, 0x0f, 0x61, 0x6d, 0x0f, 0x61, 0x6d, 0x0f, 0x61, 0x6c,
+    0x0f, 0x61, 0x6c, 0x0f, 0x60, 0x6c, 0x0f, 0x60, 0x6c, 0x0f, 0x60, 0x6c, 0x0f, 0x60, 0x6c, 0x0f, 0x60, 0x6c, 0x0f, 0x60, 0x6c,
+    0x0f, 0x60, 0x6c, 0x0f, 0x60, 0x6b, 0x0f, 0x5f, 0x6b, 0x0f, 0x5f, 0x6b, 0x0f, 0x5f, 0x6b, 0x0f, 0x5f, 0x6b, 0x0f, 0x5f, 0x6b,
+    0x0f, 0x5f, 0x6a, 0x0f, 0x5f, 0x6a, 0x0f, 0x5e, 0x6a, 0x0f, 0x5e, 0x69, 0x0f, 0x5e, 0x69, 0x0f, 0x5e, 0x69, 0x0f, 0x5d, 0x69,
+    0x0d, 0x5d, 0x68, 0x0d, 0x5d, 0x68, 0x0d, 0x5d, 0x68, 0x0d, 0x5c, 0x68, 0x0d, 0x5c, 0x67, 0x0d, 0x5c, 0x67, 0x0d, 0x5c, 0x67,
+    0x0d, 0x5b, 0x66, 0x0d, 0x5b, 0x66, 0x0d, 0x5b, 0x66, 0x0d, 0x5a, 0x65, 0x0d, 0x5a, 0x65, 0x0d, 0x59, 0x64, 0x0e, 0x59, 0x64,
+    0x0e, 0x59, 0x63, 0x0e, 0x59, 0x63, 0x0e, 0x59, 0x62, 0x0e, 0x59, 0x62, 0x0e, 0x57, 0x61, 0x0e, 0x58, 0x60, 0x0f, 0x57, 0x5f,
+    0x0f, 0x57, 0x5f, 0x0e, 0x56, 0x5f, 0x0d, 0x56, 0x5f, 0x0d, 0x55, 0x5f, 0x0d, 0x54, 0x5e, 0x0d, 0x54, 0x5d, 0x0e, 0x53, 0x5e,
+    0x0e, 0x53, 0x5d, 0x0e, 0x52, 0x5c, 0x0e, 0x52, 0x5c, 0x0e, 0x51, 0x5b, 0x0e, 0x52, 0x5a, 0x0e, 0x51, 0x5a, 0x0e, 0x4f, 0x58,
+    0x0e, 0x4f, 0x57, 0x0e, 0x4e, 0x56, 0x0e, 0x4d, 0x56, 0x0f, 0x4c, 0x56, 0x0f, 0x4c, 0x55, 0x0d, 0x4b, 0x53, 0x0d, 0x4a, 0x53,
+    0x0d, 0x4a, 0x51, 0x0e, 0x4a, 0x51, 0x0e, 0x49, 0x51, 0x0d, 0x48, 0x50, 0x0d, 0x47, 0x4f, 0x0d, 0x46, 0x4e, 0x0d, 0x46, 0x4d,
+    0x0d, 0x44, 0x4b, 0x0d, 0x43, 0x4b, 0x0d, 0x42, 0x4b, 0x0d, 0x41, 0x49, 0x0d, 0x40, 0x48, 0x0d, 0x3f, 0x47, 0x0d, 0x3f, 0x46,
+    0x0c, 0x3e, 0x45, 0x0c, 0x3d, 0x43, 0x09, 0x37, 0x3f, 0x41, 0x69, 0x6e, 0xd7, 0xe4, 0xe6, 0xc4, 0xd7, 0xda, 0x5b, 0x91, 0x98,
+    0x46, 0x81, 0x8a, 0x46, 0x80, 0x89, 0x46, 0x80, 0x89, 0x46, 0x80, 0x89, 0x45, 0x80, 0x89, 0x45, 0x80, 0x89, 0x43, 0x7e, 0x88,
+    0x43, 0x7d, 0x86, 0x43, 0x7d, 0x86, 0x42, 0x7c, 0x85, 0x41, 0x7b, 0x84, 0x41, 0x7b, 0x84, 0x40, 0x7a, 0x84, 0x3f, 0x79, 0x83,
+    0x3e, 0x78, 0x82, 0x3d, 0x78, 0x81, 0x3c, 0x77, 0x80, 0x3b, 0x76, 0x7f, 0x3b, 0x76, 0x7f, 0x3a, 0x74, 0x7d, 0x39, 0x74, 0x7d,
+    0x38, 0x71, 0x7b, 0x37, 0x71, 0x7b, 0x36, 0x70, 0x7a, 0x35, 0x6f, 0x79, 0x34, 0x6f, 0x77, 0x33, 0x6f, 0x77, 0x32, 0x6c, 0x76,
+    0x32, 0x6b, 0x75, 0x31, 0x6b, 0x75, 0x2f, 0x69, 0x73, 0x2f, 0x69, 0x71, 0x2e, 0x68, 0x71, 0x2d, 0x68, 0x70, 0x2d, 0x67, 0x6f,
+    0x2c, 0x65, 0x6f, 0x2a, 0x64, 0x6d, 0x2a, 0x64, 0x6c, 0x29, 0x63, 0x6c, 0x29, 0x62, 0x6b, 0x28, 0x62, 0x6a, 0x27, 0x60, 0x69,
+    0x27, 0x60, 0x69, 0x27, 0x60, 0x69, 0x26, 0x5e, 0x67, 0x26, 0x5e, 0x67, 0x26, 0x5e, 0x65, 0x25, 0x5d, 0x64, 0x25, 0x5d, 0x64,
+    0x24, 0x5b, 0x63, 0x23, 0x5a, 0x63, 0x23, 0x59, 0x63, 0x23, 0x59, 0x62, 0x23, 0x58, 0x61, 0x22, 0x58, 0x61, 0x22, 0x57, 0x60,
+    0x22, 0x57, 0x60, 0x22, 0x56, 0x5f, 0x22, 0x56, 0x5f, 0x22, 0x55, 0x5d, 0x21, 0x55, 0x5d, 0x20, 0x53, 0x5b, 0x20, 0x52, 0x5b,
+    0x20, 0x52, 0x5b, 0x20, 0x52, 0x5b, 0x20, 0x52, 0x5a, 0x1f, 0x51, 0x59, 0x1f, 0x51, 0x59, 0x1f, 0x50, 0x58, 0x1e, 0x4f, 0x57,
+    0x1e, 0x4f, 0x57, 0x1e, 0x4d, 0x55, 0x1e, 0x4d, 0x55, 0x1e, 0x4d, 0x54, 0x1e, 0x4c, 0x54, 0x1d, 0x4c, 0x53, 0x1d, 0x4b, 0x52,
+    0x1c, 0x4a, 0x51, 0x1d, 0x4a, 0x51, 0x1d, 0x49, 0x50, 0x1d, 0x48, 0x4f, 0x1c, 0x47, 0x4e, 0x1c, 0x47, 0x4e, 0x1c, 0x46, 0x4d,
+    0x1c, 0x46, 0x4c, 0x1b, 0x45, 0x4b, 0x1b, 0x44, 0x4b, 0x1b, 0x43, 0x4a, 0x1b, 0x43, 0x49, 0x26, 0x4c, 0x52, 0x49, 0x62, 0x67,
+    0x72, 0x73, 0x73, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x56, 0x72, 0x76, 0x0c, 0x61, 0x6d, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65,
+    0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x63,
+    0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x57, 0x63, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x56, 0x62,
+    0x06, 0x56, 0x61, 0x06, 0x56, 0x61, 0x06, 0x55, 0x61, 0x06, 0x55, 0x60, 0x06, 0x55, 0x60, 0x06, 0x54, 0x60, 0x06, 0x54, 0x5f,
+    0x06, 0x54, 0x5f, 0x06, 0x53, 0x5f, 0x05, 0x53, 0x5e, 0x05, 0x53, 0x5e, 0x05, 0x53, 0x5d, 0x05, 0x52, 0x5d, 0x05, 0x52, 0x5c,
+    0x05, 0x51, 0x5c, 0x05, 0x51, 0x5b, 0x05, 0x50, 0x5b, 0x05, 0x50, 0x5a, 0x05, 0x4f, 0x5a, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x58,
+    0x05, 0x4e, 0x58, 0x05, 0x4d, 0x57, 0x05, 0x4d, 0x57, 0x05, 0x4c, 0x56, 0x05, 0x4b, 0x55, 0x05, 0x4b, 0x55, 0x05, 0x4a, 0x54,
+    0x05, 0x4a, 0x54, 0x05, 0x49, 0x53, 0x05, 0x48, 0x51, 0x05, 0x48, 0x51, 0x05, 0x47, 0x50, 0x05, 0x46, 0x50, 0x05, 0x46, 0x4f,
+    0x05, 0x45, 0x4e, 0x05, 0x44, 0x4d, 0x05, 0x43, 0x4c, 0x04, 0x43, 0x4b, 0x04, 0x42, 0x4b, 0x04, 0x41, 0x49, 0x04, 0x40, 0x49,
+    0x04, 0x3f, 0x48, 0x04, 0x3e, 0x47, 0x04, 0x3d, 0x45, 0x04, 0x3d, 0x45, 0x04, 0x3c, 0x44, 0x04, 0x3b, 0x43, 0x04, 0x39, 0x41,
+    0x04, 0x38, 0x40, 0x04, 0x38, 0x3f, 0x04, 0x36, 0x3e, 0x04, 0x35, 0x3d, 0x04, 0x34, 0x3c, 0x04, 0x33, 0x3a, 0x03, 0x32, 0x39,
+    0x03, 0x31, 0x38, 0x03, 0x30, 0x36, 0x03, 0x2f, 0x35, 0x03, 0x2e, 0x34, 0x03, 0x2c, 0x32, 0x03, 0x2b, 0x31, 0x03, 0x29, 0x2f,
+    0x00, 0x25, 0x2a, 0x5a, 0x7a, 0x7e, 0xff, 0xff, 0xff, 0xdc, 0xe9, 0xeb, 0x52, 0x90, 0x9a, 0x38, 0x7f, 0x8a, 0x38, 0x7f, 0x8a,
+    0x37, 0x7e, 0x89, 0x37, 0x7e, 0x89, 0x36, 0x7c, 0x88, 0x36, 0x7c, 0x88, 0x35, 0x7b, 0x86, 0x34, 0x7a, 0x85, 0x34, 0x7a, 0x84,
+    0x33, 0x79, 0x83, 0x32, 0x77, 0x83, 0x31, 0x77, 0x81, 0x30, 0x75, 0x81, 0x30, 0x75, 0x80, 0x2f, 0x74, 0x7e, 0x2e, 0x73, 0x7e,
+    0x2d, 0x72, 0x7c, 0x2c, 0x71, 0x7c, 0x2b, 0x70, 0x7a, 0x2a, 0x6e, 0x79, 0x29, 0x6d, 0x78, 0x27, 0x6d, 0x77, 0x26, 0x6b, 0x76,
+    0x26, 0x6a, 0x75, 0x25, 0x69, 0x73, 0x24, 0x68, 0x73, 0x23, 0x67, 0x71, 0x22, 0x65, 0x70, 0x21, 0x64, 0x6f, 0x1f, 0x62, 0x6d,
+    0x1e, 0x61, 0x6c, 0x1d, 0x60, 0x6b, 0x1c, 0x5f, 0x6a, 0x1c, 0x5f, 0x6a, 0x1b, 0x5e, 0x68, 0x1a, 0x5c, 0x67, 0x19, 0x5a, 0x65,
+    0x19, 0x5a, 0x65, 0x18, 0x59, 0x63, 0x18, 0x59, 0x62, 0x17, 0x57, 0x61, 0x17, 0x56, 0x61, 0x16, 0x55, 0x5f, 0x16, 0x54, 0x5e,
+    0x15, 0x53, 0x5d, 0x14, 0x52, 0x5c, 0x13, 0x50, 0x5a, 0x13, 0x50, 0x5a, 0x13, 0x4f, 0x59, 0x12, 0x4e, 0x57, 0x12, 0x4d, 0x57,
+    0x12, 0x4c, 0x56, 0x11, 0x4b, 0x55, 0x10, 0x4b, 0x54, 0x10, 0x4a, 0x53, 0x10, 0x4a, 0x53, 0x10, 0x49, 0x52, 0x10, 0x48, 0x51,
+    0x10, 0x47, 0x50, 0x0f, 0x46, 0x4f, 0x0f, 0x45, 0x4e, 0x0e, 0x43, 0x4c, 0x0e, 0x43, 0x4b, 0x0e, 0x43, 0x4b, 0x0e, 0x42, 0x4a,
+    0x0e, 0x41, 0x49, 0x0d, 0x40, 0x48, 0x0d, 0x3f, 0x47, 0x0d, 0x3e, 0x46, 0x0d, 0x3e, 0x45, 0x0d, 0x3c, 0x44, 0x0d, 0x3b, 0x43,
+    0x0c, 0x3b, 0x42, 0x0c, 0x3a, 0x41, 0x0c, 0x39, 0x40, 0x0c, 0x38, 0x3f, 0x0c, 0x38, 0x3e, 0x0c, 0x37, 0x3d, 0x0b, 0x36, 0x3c,
+    0x0b, 0x35, 0x3c, 0x0b, 0x34, 0x3b, 0x0b, 0x33, 0x3a, 0x0b, 0x32, 0x39, 0x0a, 0x31, 0x37, 0x0a, 0x30, 0x36, 0x0a, 0x2f, 0x35,
+    0x0a, 0x2e, 0x34, 0x0a, 0x2d, 0x33, 0x0a, 0x2c, 0x32, 0x09, 0x2b, 0x31, 0x21, 0x46, 0x4c, 0x69, 0x70, 0x71, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x63, 0x73, 0x76, 0x0b, 0x60, 0x6d, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65,
+    0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63,
+    0x06, 0x57, 0x63, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x56, 0x62, 0x06, 0x56, 0x61, 0x06, 0x56, 0x61,
+    0x06, 0x55, 0x61, 0x06, 0x55, 0x60, 0x06, 0x55, 0x60, 0x06, 0x54, 0x60, 0x06, 0x54, 0x5f, 0x06, 0x54, 0x5f, 0x06, 0x53, 0x5e,
+    0x06, 0x53, 0x5e, 0x05, 0x53, 0x5e, 0x05, 0x52, 0x5d, 0x05, 0x52, 0x5d, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5b,
+    0x05, 0x50, 0x5b, 0x05, 0x50, 0x5a, 0x05, 0x4f, 0x5a, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x58, 0x05, 0x4d, 0x57,
+    0x05, 0x4d, 0x57, 0x05, 0x4c, 0x56, 0x05, 0x4c, 0x56, 0x05, 0x4b, 0x55, 0x05, 0x4b, 0x55, 0x05, 0x4a, 0x54, 0x05, 0x49, 0x53,
+    0x05, 0x48, 0x52, 0x05, 0x48, 0x51, 0x05, 0x47, 0x51, 0x05, 0x47, 0x50, 0x05, 0x46, 0x4f, 0x05, 0x45, 0x4e, 0x05, 0x44, 0x4d,
+    0x05, 0x43, 0x4d, 0x04, 0x43, 0x4c, 0x04, 0x42, 0x4b, 0x04, 0x41, 0x4a, 0x04, 0x40, 0x49, 0x04, 0x3f, 0x48, 0x04, 0x3e, 0x47,
+    0x04, 0x3e, 0x46, 0x04, 0x3d, 0x45, 0x04, 0x3c, 0x44, 0x04, 0x3b, 0x43, 0x04, 0x3a, 0x42, 0x04, 0x39, 0x41, 0x04, 0x38, 0x3f,
+    0x04, 0x37, 0x3e, 0x04, 0x36, 0x3d, 0x04, 0x35, 0x3c, 0x04, 0x33, 0x3b, 0x03, 0x33, 0x3a, 0x03, 0x32, 0x38, 0x03, 0x31, 0x37,
+    0x03, 0x2f, 0x36, 0x03, 0x2e, 0x34, 0x03, 0x2d, 0x33, 0x03, 0x2c, 0x32, 0x01, 0x29, 0x2f, 0x00, 0x23, 0x29, 0x69, 0x87, 0x8b,
+    0xff, 0xff, 0xff, 0xe0, 0xec, 0xed, 0x55, 0x93, 0x9c, 0x3b, 0x81, 0x8c, 0x3b, 0x81, 0x8b, 0x3a, 0x81, 0x8b, 0x39, 0x7f, 0x8a,
+    0x39, 0x7e, 0x89, 0x38, 0x7e, 0x89, 0x37, 0x7d, 0x87, 0x37, 0x7c, 0x87, 0x36, 0x7c, 0x87, 0x35, 0x7b, 0x85, 0x34, 0x7a, 0x85,
+    0x33, 0x79, 0x83, 0x31, 0x77, 0x82, 0x31, 0x76, 0x81, 0x30, 0x75, 0x80, 0x2f, 0x75, 0x7f, 0x2e, 0x73, 0x7e, 0x2d, 0x72, 0x7c,
+    0x2c, 0x71, 0x7c, 0x2b, 0x70, 0x7a, 0x2a, 0x6f, 0x7a, 0x28, 0x6d, 0x78, 0x26, 0x6c, 0x76, 0x26, 0x6b, 0x76, 0x25, 0x6a, 0x74,
+    0x24, 0x68, 0x74, 0x23, 0x67, 0x72, 0x22, 0x66, 0x71, 0x21, 0x65, 0x6f, 0x20, 0x64, 0x6e, 0x1f, 0x62, 0x6e, 0x1e, 0x62, 0x6d,
+    0x1d, 0x61, 0x6c, 0x1c, 0x5f, 0x6b, 0x1b, 0x5f, 0x69, 0x1a, 0x5d, 0x68, 0x1a, 0x5d, 0x68, 0x19, 0x5b, 0x66, 0x19, 0x5a, 0x64,
+    0x18, 0x5a, 0x63, 0x17, 0x58, 0x63, 0x17, 0x58, 0x62, 0x16, 0x56, 0x60, 0x16, 0x55, 0x60, 0x15, 0x54, 0x5f, 0x15, 0x54, 0x5e,
+    0x14, 0x52, 0x5c, 0x14, 0x52, 0x5c, 0x13, 0x51, 0x5b, 0x12, 0x4f, 0x59, 0x12, 0x4e, 0x58, 0x12, 0x4e, 0x57, 0x12, 0x4d, 0x57,
+    0x11, 0x4c, 0x56, 0x11, 0x4c, 0x55, 0x10, 0x4b, 0x54, 0x10, 0x4b, 0x54, 0x10, 0x4a, 0x53, 0x10, 0x49, 0x52, 0x10, 0x48, 0x51,
+    0x0f, 0x46, 0x4f, 0x0f, 0x46, 0x4e, 0x0e, 0x45, 0x4e, 0x0e, 0x44, 0x4d, 0x0e, 0x43, 0x4c, 0x0e, 0x43, 0x4b, 0x0e, 0x42, 0x4a,
+    0x0e, 0x41, 0x49, 0x0d, 0x40, 0x48, 0x0d, 0x3f, 0x47, 0x0d, 0x3f, 0x46, 0x0d, 0x3e, 0x45, 0x0d, 0x3d, 0x45, 0x0d, 0x3c, 0x44,
+    0x0d, 0x3b, 0x43, 0x0c, 0x3b, 0x42, 0x0c, 0x3a, 0x41, 0x0c, 0x39, 0x40, 0x0c, 0x38, 0x3f, 0x0c, 0x37, 0x3e, 0x0c, 0x36, 0x3d,
+    0x0b, 0x35, 0x3c, 0x0b, 0x34, 0x3b, 0x0b, 0x34, 0x3a, 0x0b, 0x33, 0x39, 0x0b, 0x32, 0x38, 0x0a, 0x31, 0x37, 0x0a, 0x31, 0x37,
+    0x0a, 0x2f, 0x35, 0x09, 0x28, 0x2d, 0x20, 0x46, 0x4c, 0x73, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x24, 0x6a, 0x74, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65,
+    0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64,
+    0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x57, 0x63, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62,
+    0x06, 0x57, 0x62, 0x06, 0x56, 0x62, 0x06, 0x56, 0x61, 0x06, 0x56, 0x61, 0x06, 0x55, 0x61, 0x06, 0x55, 0x60, 0x06, 0x55, 0x60,
+    0x06, 0x54, 0x60, 0x06, 0x54, 0x5f, 0x06, 0x54, 0x5f, 0x06, 0x53, 0x5e, 0x06, 0x53, 0x5e, 0x05, 0x53, 0x5e, 0x05, 0x52, 0x5d,
+    0x05, 0x52, 0x5d, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5b, 0x05, 0x50, 0x5b, 0x05, 0x50, 0x5a, 0x05, 0x4f, 0x5a,
+    0x05, 0x4e, 0x59, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x58, 0x05, 0x4d, 0x57, 0x05, 0x4d, 0x57, 0x05, 0x4c, 0x56, 0x05, 0x4c, 0x56,
+    0x05, 0x4b, 0x55, 0x05, 0x4b, 0x55, 0x05, 0x4a, 0x54, 0x05, 0x49, 0x53, 0x05, 0x48, 0x52, 0x05, 0x48, 0x51, 0x05, 0x47, 0x51,
+    0x05, 0x47, 0x50, 0x05, 0x46, 0x4f, 0x05, 0x45, 0x4e, 0x05, 0x44, 0x4d, 0x05, 0x43, 0x4d, 0x04, 0x43, 0x4c, 0x04, 0x42, 0x4b,
+    0x04, 0x41, 0x4a, 0x04, 0x40, 0x49, 0x04, 0x3f, 0x48, 0x04, 0x3e, 0x47, 0x04, 0x3e, 0x46, 0x04, 0x3d, 0x45, 0x04, 0x3c, 0x44,
+    0x04, 0x3b, 0x43, 0x04, 0x3a, 0x42, 0x04, 0x39, 0x41, 0x04, 0x38, 0x3f, 0x04, 0x37, 0x3e, 0x04, 0x36, 0x3d, 0x04, 0x35, 0x3c,
+    0x04, 0x33, 0x3b, 0x03, 0x33, 0x3a, 0x03, 0x32, 0x38, 0x03, 0x31, 0x37, 0x03, 0x2f, 0x36, 0x03, 0x2e, 0x34, 0x03, 0x2d, 0x33,
+    0x03, 0x2c, 0x32, 0x03, 0x2b, 0x31, 0x01, 0x27, 0x2d, 0x01, 0x26, 0x2c, 0x80, 0x9a, 0x9d, 0xff, 0xff, 0xff, 0xe1, 0xec, 0xed,
+    0x58, 0x94, 0x9d, 0x3e, 0x83, 0x8e, 0x3c, 0x82, 0x8d, 0x3c, 0x81, 0x8c, 0x3c, 0x80, 0x8b, 0x3b, 0x7f, 0x8a, 0x3b, 0x7f, 0x8a,
+    0x3a, 0x7e, 0x89, 0x39, 0x7d, 0x88, 0x38, 0x7d, 0x87, 0x37, 0x7b, 0x86, 0x36, 0x7b, 0x85, 0x34, 0x79, 0x84, 0x33, 0x78, 0x83,
+    0x32, 0x77, 0x82, 0x31, 0x76, 0x81, 0x30, 0x74, 0x7f, 0x2f, 0x73, 0x7e, 0x2e, 0x72, 0x7d, 0x2d, 0x71, 0x7c, 0x2c, 0x70, 0x7b,
+    0x2a, 0x6e, 0x79, 0x28, 0x6d, 0x77, 0x27, 0x6c, 0x77, 0x26, 0x6b, 0x75, 0x26, 0x6a, 0x75, 0x25, 0x68, 0x73, 0x24, 0x68, 0x72,
+    0x23, 0x66, 0x70, 0x21, 0x65, 0x6f, 0x21, 0x64, 0x6f, 0x1f, 0x63, 0x6d, 0x1e, 0x61, 0x6d, 0x1d, 0x60, 0x6b, 0x1c, 0x5f, 0x6a,
+    0x1b, 0x5e, 0x68, 0x1a, 0x5d, 0x68, 0x1a, 0x5c, 0x67, 0x19, 0x5a, 0x65, 0x18, 0x5a, 0x63, 0x18, 0x59, 0x63, 0x17, 0x58, 0x62,
+    0x16, 0x56, 0x60, 0x16, 0x55, 0x60, 0x15, 0x54, 0x5f, 0x15, 0x54, 0x5e, 0x15, 0x53, 0x5d, 0x14, 0x52, 0x5c, 0x13, 0x51, 0x5b,
+    0x13, 0x50, 0x5a, 0x12, 0x4e, 0x58, 0x12, 0x4e, 0x57, 0x12, 0x4d, 0x57, 0x12, 0x4c, 0x56, 0x11, 0x4c, 0x55, 0x10, 0x4b, 0x54,
+    0x10, 0x4b, 0x54, 0x10, 0x4a, 0x53, 0x10, 0x49, 0x52, 0x10, 0x48, 0x51, 0x10, 0x47, 0x50, 0x0f, 0x46, 0x4e, 0x0e, 0x45, 0x4e,
+    0x0e, 0x44, 0x4d, 0x0e, 0x43, 0x4c, 0x0e, 0x43, 0x4b, 0x0e, 0x42, 0x4a, 0x0e, 0x41, 0x49, 0x0d, 0x40, 0x48, 0x0d, 0x3f, 0x47,
+    0x0d, 0x3f, 0x46, 0x0d, 0x3e, 0x45, 0x0d, 0x3d, 0x45, 0x0d, 0x3c, 0x44, 0x0d, 0x3b, 0x43, 0x0c, 0x3b, 0x42, 0x0c, 0x3a, 0x41,
+    0x0c, 0x39, 0x40, 0x0c, 0x38, 0x3f, 0x0c, 0x37, 0x3e, 0x0c, 0x36, 0x3d, 0x0b, 0x35, 0x3c, 0x0b, 0x34, 0x3b, 0x0b, 0x34, 0x3a,
+    0x0b, 0x33, 0x39, 0x0b, 0x32, 0x38, 0x0a, 0x31, 0x37, 0x0a, 0x30, 0x36, 0x0a, 0x2f, 0x35, 0x0a, 0x2d, 0x33, 0x09, 0x29, 0x2e,
+    0x45, 0x60, 0x64, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x0c, 0x61, 0x6d, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65,
+    0x06, 0x59, 0x65, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63,
+    0x06, 0x58, 0x63, 0x06, 0x57, 0x63, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x56, 0x62, 0x06, 0x56, 0x61,
+    0x06, 0x56, 0x61, 0x06, 0x55, 0x61, 0x06, 0x55, 0x60, 0x06, 0x55, 0x60, 0x06, 0x54, 0x60, 0x06, 0x54, 0x5f, 0x06, 0x54, 0x5f,
+    0x06, 0x53, 0x5e, 0x06, 0x53, 0x5e, 0x05, 0x53, 0x5e, 0x05, 0x52, 0x5d, 0x05, 0x52, 0x5d, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5c,
+    0x05, 0x51, 0x5b, 0x05, 0x50, 0x5b, 0x05, 0x50, 0x5a, 0x05, 0x4f, 0x5a, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x58,
+    0x05, 0x4d, 0x57, 0x05, 0x4d, 0x57, 0x05, 0x4c, 0x56, 0x05, 0x4c, 0x56, 0x05, 0x4b, 0x55, 0x05, 0x4b, 0x55, 0x05, 0x4a, 0x54,
+    0x05, 0x49, 0x53, 0x05, 0x48, 0x52, 0x05, 0x48, 0x51, 0x05, 0x47, 0x51, 0x05, 0x47, 0x50, 0x05, 0x46, 0x4f, 0x05, 0x45, 0x4e,
+    0x05, 0x44, 0x4d, 0x05, 0x43, 0x4d, 0x04, 0x43, 0x4c, 0x04, 0x42, 0x4b, 0x04, 0x41, 0x4a, 0x04, 0x40, 0x49, 0x04, 0x3f, 0x48,
+    0x04, 0x3e, 0x47, 0x04, 0x3e, 0x46, 0x04, 0x3d, 0x45, 0x04, 0x3c, 0x44, 0x04, 0x3b, 0x43, 0x04, 0x3a, 0x42, 0x04, 0x39, 0x41,
+    0x04, 0x38, 0x3f, 0x04, 0x37, 0x3e, 0x04, 0x36, 0x3d, 0x04, 0x35, 0x3c, 0x04, 0x33, 0x3b, 0x03, 0x33, 0x3a, 0x03, 0x32, 0x38,
+    0x03, 0x31, 0x37, 0x03, 0x2f, 0x36, 0x03, 0x2e, 0x34, 0x03, 0x2d, 0x33, 0x03, 0x2c, 0x32, 0x03, 0x2a, 0x30, 0x03, 0x29, 0x2f,
+    0x00, 0x25, 0x2a, 0x06, 0x2b, 0x2f, 0xa4, 0xb8, 0xba, 0xff, 0xff, 0xff, 0xde, 0xe9, 0xeb, 0x56, 0x92, 0x9b, 0x3f, 0x84, 0x8f,
+    0x3f, 0x83, 0x8e, 0x3e, 0x82, 0x8d, 0x3e, 0x82, 0x8d, 0x3d, 0x81, 0x8b, 0x3c, 0x80, 0x8b, 0x3c, 0x7f, 0x8a, 0x3b, 0x7f, 0x89,
+    0x3a, 0x7d, 0x88, 0x39, 0x7d, 0x87, 0x37, 0x7b, 0x85, 0x35, 0x79, 0x84, 0x34, 0x78, 0x83, 0x33, 0x78, 0x82, 0x32, 0x76, 0x81,
+    0x31, 0x74, 0x7f, 0x30, 0x74, 0x7e, 0x2f, 0x72, 0x7d, 0x2e, 0x71, 0x7c, 0x2c, 0x6f, 0x7a, 0x2a, 0x6f, 0x79, 0x29, 0x6d, 0x78,
+    0x28, 0x6d, 0x77, 0x26, 0x6a, 0x75, 0x26, 0x69, 0x74, 0x25, 0x68, 0x72, 0x24, 0x67, 0x71, 0x23, 0x66, 0x70, 0x22, 0x64, 0x70,
+    0x20, 0x64, 0x6e, 0x1f, 0x62, 0x6d, 0x1e, 0x61, 0x6c, 0x1d, 0x60, 0x6a, 0x1c, 0x5e, 0x69, 0x1b, 0x5e, 0x68, 0x1a, 0x5c, 0x67,
+    0x1a, 0x5b, 0x66, 0x18, 0x5a, 0x64, 0x18, 0x59, 0x63, 0x18, 0x59, 0x62, 0x17, 0x57, 0x61, 0x16, 0x55, 0x60, 0x16, 0x54, 0x5f,
+    0x15, 0x54, 0x5e, 0x15, 0x53, 0x5d, 0x15, 0x53, 0x5d, 0x13, 0x51, 0x5b, 0x13, 0x50, 0x5a, 0x13, 0x4f, 0x59, 0x12, 0x4e, 0x57,
+    0x12, 0x4d, 0x57, 0x12, 0x4c, 0x56, 0x12, 0x4c, 0x55, 0x10, 0x4b, 0x54, 0x10, 0x4b, 0x54, 0x10, 0x4a, 0x53, 0x10, 0x49, 0x52,
+    0x10, 0x48, 0x51, 0x10, 0x47, 0x50, 0x10, 0x47, 0x4f, 0x0e, 0x45, 0x4e, 0x0e, 0x44, 0x4d, 0x0e, 0x43, 0x4c, 0x0e, 0x43, 0x4b,
+    0x0e, 0x42, 0x4a, 0x0e, 0x41, 0x49, 0x0d, 0x40, 0x48, 0x0d, 0x3f, 0x47, 0x0d, 0x3f, 0x46, 0x0d, 0x3e, 0x45, 0x0d, 0x3d, 0x45,
+    0x0d, 0x3c, 0x44, 0x0d, 0x3b, 0x43, 0x0c, 0x3b, 0x42, 0x0c, 0x3a, 0x41, 0x0c, 0x39, 0x40, 0x0c, 0x38, 0x3f, 0x0c, 0x37, 0x3e,
+    0x0c, 0x36, 0x3d, 0x0b, 0x35, 0x3c, 0x0b, 0x34, 0x3b, 0x0b, 0x34, 0x3a, 0x0b, 0x33, 0x39, 0x0b, 0x32, 0x38, 0x0a, 0x31, 0x37,
+    0x0a, 0x30, 0x36, 0x0a, 0x2f, 0x35, 0x0a, 0x2e, 0x34, 0x09, 0x29, 0x2e, 0x23, 0x49, 0x4f, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x07, 0x5d, 0x69, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65,
+    0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64,
+    0x06, 0x58, 0x64, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x57, 0x63, 0x06, 0x57, 0x62,
+    0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x56, 0x62, 0x06, 0x56, 0x61, 0x06, 0x56, 0x61, 0x06, 0x55, 0x61, 0x06, 0x55, 0x60,
+    0x06, 0x55, 0x60, 0x06, 0x54, 0x60, 0x06, 0x54, 0x5f, 0x06, 0x54, 0x5f, 0x06, 0x53, 0x5e, 0x06, 0x53, 0x5e, 0x05, 0x53, 0x5e,
+    0x05, 0x52, 0x5d, 0x05, 0x52, 0x5d, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5b, 0x05, 0x50, 0x5b, 0x05, 0x50, 0x5a,
+    0x05, 0x4f, 0x5a, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x58, 0x05, 0x4d, 0x57, 0x05, 0x4d, 0x57, 0x05, 0x4c, 0x56,
+    0x05, 0x4c, 0x56, 0x05, 0x4b, 0x55, 0x05, 0x4b, 0x55, 0x05, 0x4a, 0x54, 0x05, 0x49, 0x53, 0x05, 0x48, 0x52, 0x05, 0x48, 0x51,
+    0x05, 0x47, 0x51, 0x05, 0x47, 0x50, 0x05, 0x46, 0x4f, 0x05, 0x45, 0x4e, 0x05, 0x44, 0x4d, 0x05, 0x43, 0x4d, 0x04, 0x43, 0x4c,
+    0x04, 0x42, 0x4b, 0x04, 0x41, 0x4a, 0x04, 0x40, 0x49, 0x04, 0x3f, 0x48, 0x04, 0x3e, 0x47, 0x04, 0x3e, 0x46, 0x04, 0x3d, 0x45,
+    0x04, 0x3c, 0x44, 0x04, 0x3b, 0x43, 0x04, 0x3a, 0x42, 0x04, 0x39, 0x41, 0x04, 0x38, 0x3f, 0x04, 0x37, 0x3e, 0x04, 0x36, 0x3d,
+    0x04, 0x35, 0x3c, 0x04, 0x33, 0x3b, 0x03, 0x33, 0x3a, 0x03, 0x32, 0x38, 0x03, 0x31, 0x37, 0x03, 0x2f, 0x36, 0x03, 0x2e, 0x34,
+    0x03, 0x2d, 0x33, 0x03, 0x2c, 0x32, 0x03, 0x2a, 0x30, 0x03, 0x29, 0x2f, 0x03, 0x28, 0x2e, 0x00, 0x22, 0x27, 0x12, 0x35, 0x3a,
+    0xcc, 0xd7, 0xd9, 0xff, 0xff, 0xff, 0xd6, 0xe5, 0xe7, 0x51, 0x8f, 0x99, 0x43, 0x86, 0x90, 0x41, 0x84, 0x8e, 0x41, 0x84, 0x8e,
+    0x40, 0x83, 0x8d, 0x3f, 0x82, 0x8c, 0x3e, 0x81, 0x8c, 0x3d, 0x80, 0x8a, 0x3c, 0x7f, 0x8a, 0x3c, 0x7e, 0x88, 0x39, 0x7c, 0x86,
+    0x38, 0x7b, 0x86, 0x37, 0x7a, 0x84, 0x36, 0x7a, 0x84, 0x34, 0x78, 0x82, 0x33, 0x76, 0x80, 0x32, 0x75, 0x80, 0x31, 0x73, 0x7e,
+    0x30, 0x73, 0x7d, 0x2e, 0x71, 0x7b, 0x2c, 0x70, 0x7a, 0x2b, 0x6e, 0x79, 0x29, 0x6d, 0x77, 0x28, 0x6c, 0x77, 0x27, 0x6a, 0x75,
+    0x26, 0x69, 0x74, 0x26, 0x68, 0x72, 0x24, 0x67, 0x71, 0x23, 0x65, 0x70, 0x21, 0x64, 0x6f, 0x20, 0x63, 0x6e, 0x1f, 0x61, 0x6d,
+    0x1e, 0x61, 0x6b, 0x1d, 0x5f, 0x69, 0x1c, 0x5e, 0x69, 0x1b, 0x5d, 0x67, 0x1a, 0x5b, 0x66, 0x19, 0x5b, 0x65, 0x18, 0x59, 0x64,
+    0x18, 0x59, 0x62, 0x18, 0x58, 0x61, 0x17, 0x56, 0x61, 0x16, 0x54, 0x5f, 0x16, 0x54, 0x5e, 0x15, 0x53, 0x5d, 0x15, 0x53, 0x5d,
+    0x14, 0x52, 0x5c, 0x13, 0x50, 0x5a, 0x13, 0x4f, 0x59, 0x13, 0x4f, 0x58, 0x12, 0x4d, 0x57, 0x12, 0x4c, 0x56, 0x12, 0x4c, 0x55,
+    0x10, 0x4b, 0x54, 0x10, 0x4b, 0x54, 0x10, 0x4a, 0x53, 0x10, 0x49, 0x52, 0x10, 0x48, 0x51, 0x10, 0x47, 0x50, 0x10, 0x47, 0x4f,
+    0x0e, 0x45, 0x4e, 0x0e, 0x44, 0x4d, 0x0e, 0x43, 0x4c, 0x0e, 0x43, 0x4b, 0x0e, 0x42, 0x4a, 0x0e, 0x41, 0x49, 0x0d, 0x40, 0x48,
+    0x0d, 0x3f, 0x47, 0x0d, 0x3f, 0x46, 0x0d, 0x3e, 0x45, 0x0d, 0x3d, 0x45, 0x0d, 0x3c, 0x44, 0x0d, 0x3b, 0x43, 0x0c, 0x3b, 0x42,
+    0x0c, 0x3a, 0x41, 0x0c, 0x39, 0x40, 0x0c, 0x38, 0x3f, 0x0c, 0x37, 0x3e, 0x0c, 0x36, 0x3d, 0x0b, 0x35, 0x3c, 0x0b, 0x34, 0x3b,
+    0x0b, 0x34, 0x3a, 0x0b, 0x33, 0x39, 0x0b, 0x32, 0x38, 0x0a, 0x31, 0x37, 0x0a, 0x30, 0x36, 0x0a, 0x2f, 0x35, 0x0a, 0x2e, 0x34,
+    0x09, 0x2a, 0x30, 0x19, 0x3f, 0x46, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x07, 0x5d, 0x69, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65,
+    0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63,
+    0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x57, 0x63, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x56, 0x62,
+    0x06, 0x56, 0x61, 0x06, 0x56, 0x61, 0x06, 0x55, 0x61, 0x06, 0x55, 0x60, 0x06, 0x55, 0x60, 0x06, 0x54, 0x60, 0x06, 0x54, 0x5f,
+    0x06, 0x54, 0x5f, 0x06, 0x53, 0x5e, 0x06, 0x53, 0x5e, 0x05, 0x53, 0x5e, 0x05, 0x52, 0x5d, 0x05, 0x52, 0x5d, 0x05, 0x51, 0x5c,
+    0x05, 0x51, 0x5c, 0x05, 0x51, 0x5b, 0x05, 0x50, 0x5b, 0x05, 0x50, 0x5a, 0x05, 0x4f, 0x5a, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x59,
+    0x05, 0x4e, 0x58, 0x05, 0x4d, 0x57, 0x05, 0x4d, 0x57, 0x05, 0x4c, 0x56, 0x05, 0x4c, 0x56, 0x05, 0x4b, 0x55, 0x05, 0x4b, 0x55,
+    0x05, 0x4a, 0x54, 0x05, 0x49, 0x53, 0x05, 0x48, 0x52, 0x05, 0x48, 0x51, 0x05, 0x47, 0x51, 0x05, 0x47, 0x50, 0x05, 0x46, 0x4f,
+    0x05, 0x45, 0x4e, 0x05, 0x44, 0x4d, 0x05, 0x43, 0x4d, 0x04, 0x43, 0x4c, 0x04, 0x42, 0x4b, 0x04, 0x41, 0x4a, 0x04, 0x40, 0x49,
+    0x04, 0x3f, 0x48, 0x04, 0x3e, 0x47, 0x04, 0x3e, 0x46, 0x04, 0x3d, 0x45, 0x04, 0x3c, 0x44, 0x04, 0x3b, 0x43, 0x04, 0x3a, 0x42,
+    0x04, 0x39, 0x41, 0x04, 0x38, 0x3f, 0x04, 0x37, 0x3e, 0x04, 0x36, 0x3d, 0x04, 0x35, 0x3c, 0x04, 0x33, 0x3b, 0x03, 0x33, 0x3a,
+    0x03, 0x32, 0x38, 0x03, 0x31, 0x37, 0x03, 0x2f, 0x36, 0x03, 0x2e, 0x34, 0x03, 0x2d, 0x33, 0x03, 0x2c, 0x32, 0x03, 0x2a, 0x30,
+    0x03, 0x29, 0x2f, 0x03, 0x28, 0x2e, 0x03, 0x27, 0x2c, 0x00, 0x1f, 0x23, 0x26, 0x49, 0x4e, 0xf1, 0xf5, 0xf5, 0xff, 0xff, 0xff,
+    0xc7, 0xdb, 0xde, 0x4a, 0x8a, 0x95, 0x45, 0x86, 0x90, 0x45, 0x86, 0x90, 0x44, 0x85, 0x8f, 0x42, 0x83, 0x8e, 0x41, 0x83, 0x8d,
+    0x40, 0x82, 0x8c, 0x3f, 0x81, 0x8b, 0x3d, 0x80, 0x8a, 0x3c, 0x7e, 0x88, 0x3b, 0x7d, 0x88, 0x3a, 0x7c, 0x86, 0x38, 0x7b, 0x85,
+    0x37, 0x79, 0x84, 0x36, 0x78, 0x82, 0x34, 0x77, 0x81, 0x33, 0x75, 0x80, 0x31, 0x74, 0x7e, 0x31, 0x72, 0x7d, 0x2e, 0x71, 0x7b,
+    0x2d, 0x70, 0x7a, 0x2b, 0x6e, 0x78, 0x2a, 0x6d, 0x78, 0x29, 0x6b, 0x76, 0x27, 0x6a, 0x74, 0x26, 0x69, 0x73, 0x26, 0x68, 0x72,
+    0x25, 0x66, 0x71, 0x22, 0x65, 0x6f, 0x21, 0x63, 0x6f, 0x20, 0x62, 0x6d, 0x1f, 0x61, 0x6c, 0x1e, 0x60, 0x6a, 0x1d, 0x5f, 0x69,
+    0x1c, 0x5d, 0x68, 0x1b, 0x5c, 0x66, 0x19, 0x5b, 0x65, 0x19, 0x5a, 0x65, 0x18, 0x59, 0x63, 0x18, 0x58, 0x61, 0x17, 0x56, 0x61,
+    0x17, 0x55, 0x60, 0x16, 0x54, 0x5e, 0x16, 0x53, 0x5d, 0x15, 0x53, 0x5d, 0x14, 0x52, 0x5c, 0x14, 0x51, 0x5b, 0x13, 0x4f, 0x59,
+    0x13, 0x4f, 0x58, 0x12, 0x4d, 0x57, 0x12, 0x4c, 0x56, 0x12, 0x4c, 0x55, 0x11, 0x4b, 0x54, 0x10, 0x4b, 0x54, 0x10, 0x4a, 0x53,
+    0x10, 0x49, 0x52, 0x10, 0x48, 0x51, 0x10, 0x47, 0x50, 0x10, 0x47, 0x4f, 0x0f, 0x46, 0x4f, 0x0e, 0x44, 0x4d, 0x0e, 0x43, 0x4c,
+    0x0e, 0x43, 0x4b, 0x0e, 0x42, 0x4a, 0x0e, 0x41, 0x49, 0x0d, 0x40, 0x48, 0x0d, 0x3f, 0x47, 0x0d, 0x3f, 0x46, 0x0d, 0x3e, 0x45,
+    0x0d, 0x3d, 0x45, 0x0d, 0x3c, 0x44, 0x0d, 0x3b, 0x43, 0x0c, 0x3b, 0x42, 0x0c, 0x3a, 0x41, 0x0c, 0x39, 0x40, 0x0c, 0x38, 0x3f,
+    0x0c, 0x37, 0x3e, 0x0c, 0x36, 0x3d, 0x0b, 0x35, 0x3c, 0x0b, 0x34, 0x3b, 0x0b, 0x34, 0x3a, 0x0b, 0x33, 0x39, 0x0b, 0x32, 0x38,
+    0x0a, 0x31, 0x37, 0x0a, 0x30, 0x36, 0x0a, 0x2f, 0x35, 0x0a, 0x2e, 0x34, 0x09, 0x2a, 0x30, 0x19, 0x3f, 0x46, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x07, 0x5d, 0x69, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x59, 0x65,
+    0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x58, 0x64,
+    0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x57, 0x63,
+    0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x56, 0x62, 0x06, 0x56, 0x61, 0x06, 0x56, 0x61, 0x06, 0x55, 0x61,
+    0x06, 0x55, 0x60, 0x06, 0x55, 0x60, 0x06, 0x54, 0x60, 0x06, 0x54, 0x5f, 0x06, 0x54, 0x5f, 0x06, 0x53, 0x5e, 0x06, 0x53, 0x5e,
+    0x05, 0x53, 0x5e, 0x05, 0x52, 0x5d, 0x05, 0x52, 0x5d, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5b, 0x05, 0x50, 0x5b,
+    0x05, 0x50, 0x5a, 0x05, 0x4f, 0x5a, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x58, 0x05, 0x4d, 0x57, 0x05, 0x4d, 0x57,
+    0x05, 0x4c, 0x56, 0x05, 0x4c, 0x56, 0x05, 0x4b, 0x55, 0x05, 0x4b, 0x55, 0x05, 0x4a, 0x54, 0x05, 0x49, 0x53, 0x05, 0x48, 0x52,
+    0x05, 0x48, 0x51, 0x05, 0x47, 0x51, 0x05, 0x47, 0x50, 0x05, 0x46, 0x4f, 0x05, 0x45, 0x4e, 0x05, 0x44, 0x4d, 0x05, 0x43, 0x4d,
+    0x04, 0x43, 0x4c, 0x04, 0x42, 0x4b, 0x04, 0x41, 0x4a, 0x04, 0x40, 0x49, 0x04, 0x3f, 0x48, 0x04, 0x3e, 0x47, 0x04, 0x3e, 0x46,
+    0x04, 0x3d, 0x45, 0x04, 0x3c, 0x44, 0x04, 0x3b, 0x43, 0x04, 0x3a, 0x42, 0x04, 0x39, 0x41, 0x04, 0x38, 0x3f, 0x04, 0x37, 0x3e,
+    0x04, 0x36, 0x3d, 0x04, 0x35, 0x3c, 0x04, 0x33, 0x3b, 0x03, 0x33, 0x3a, 0x03, 0x32, 0x38, 0x03, 0x31, 0x37, 0x03, 0x2f, 0x36,
+    0x03, 0x2e, 0x34, 0x03, 0x2d, 0x33, 0x03, 0x2c, 0x32, 0x03, 0x2a, 0x30, 0x03, 0x29, 0x2f, 0x03, 0x28, 0x2e, 0x03, 0x27, 0x2c,
+    0x02, 0x25, 0x2a, 0x00, 0x1d, 0x22, 0x4d, 0x6c, 0x71, 0xfe, 0xff, 0xff, 0xff, 0xff, 0xff, 0xb1, 0xcd, 0xd1, 0x48, 0x88, 0x93,
+    0x47, 0x88, 0x92, 0x47, 0x87, 0x91, 0x46, 0x86, 0x90, 0x45, 0x85, 0x90, 0x43, 0x84, 0x8e, 0x42, 0x83, 0x8d, 0x41, 0x82, 0x8c,
+    0x3e, 0x80, 0x8a, 0x3d, 0x7f, 0x89, 0x3d, 0x7e, 0x88, 0x3b, 0x7d, 0x87, 0x3a, 0x7b, 0x85, 0x38, 0x79, 0x83, 0x37, 0x79, 0x83,
+    0x35, 0x76, 0x81, 0x34, 0x76, 0x80, 0x32, 0x74, 0x7e, 0x30, 0x72, 0x7c, 0x2f, 0x71, 0x7c, 0x2d, 0x70, 0x79, 0x2c, 0x6e, 0x79,
+    0x2a, 0x6c, 0x77, 0x29, 0x6b, 0x75, 0x28, 0x6a, 0x74, 0x26, 0x69, 0x73, 0x26, 0x67, 0x72, 0x24, 0x66, 0x70, 0x23, 0x65, 0x70,
+    0x21, 0x62, 0x6e, 0x20, 0x62, 0x6c, 0x1f, 0x60, 0x6b, 0x1e, 0x60, 0x6a, 0x1d, 0x5e, 0x68, 0x1c, 0x5c, 0x67, 0x1a, 0x5c, 0x65,
+    0x19, 0x5a, 0x65, 0x18, 0x59, 0x63, 0x18, 0x58, 0x62, 0x18, 0x57, 0x61, 0x17, 0x55, 0x60, 0x17, 0x55, 0x5f, 0x16, 0x53, 0x5d,
+    0x16, 0x53, 0x5d, 0x14, 0x52, 0x5c, 0x14, 0x51, 0x5b, 0x13, 0x4f, 0x59, 0x13, 0x4f, 0x58, 0x13, 0x4e, 0x58, 0x12, 0x4c, 0x56,
+    0x12, 0x4c, 0x55, 0x11, 0x4b, 0x54, 0x11, 0x4b, 0x54, 0x10, 0x4a, 0x53, 0x10, 0x49, 0x52, 0x10, 0x48, 0x51, 0x10, 0x47, 0x50,
+    0x10, 0x47, 0x4f, 0x0f, 0x46, 0x4f, 0x0e, 0x44, 0x4d, 0x0e, 0x43, 0x4c, 0x0e, 0x43, 0x4b, 0x0e, 0x42, 0x4a, 0x0e, 0x41, 0x49,
+    0x0d, 0x40, 0x48, 0x0d, 0x3f, 0x47, 0x0d, 0x3f, 0x46, 0x0d, 0x3e, 0x45, 0x0d, 0x3d, 0x45, 0x0d, 0x3c, 0x44, 0x0d, 0x3b, 0x43,
+    0x0c, 0x3b, 0x42, 0x0c, 0x3a, 0x41, 0x0c, 0x39, 0x40, 0x0c, 0x38, 0x3f, 0x0c, 0x37, 0x3e, 0x0c, 0x36, 0x3d, 0x0b, 0x35, 0x3c,
+    0x0b, 0x34, 0x3b, 0x0b, 0x34, 0x3a, 0x0b, 0x33, 0x39, 0x0b, 0x32, 0x38, 0x0a, 0x31, 0x37, 0x0a, 0x30, 0x36, 0x0a, 0x2f, 0x35,
+    0x0a, 0x2e, 0x34, 0x09, 0x2a, 0x30, 0x19, 0x3f, 0x46, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x07, 0x5d, 0x69,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65,
+    0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x63,
+    0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x57, 0x63, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62,
+    0x06, 0x56, 0x62, 0x06, 0x56, 0x61, 0x06, 0x56, 0x61, 0x06, 0x55, 0x61, 0x06, 0x55, 0x60, 0x06, 0x55, 0x60, 0x06, 0x54, 0x60,
+    0x06, 0x54, 0x5f, 0x06, 0x54, 0x5f, 0x06, 0x53, 0x5e, 0x06, 0x53, 0x5e, 0x05, 0x53, 0x5e, 0x05, 0x52, 0x5d, 0x05, 0x52, 0x5d,
+    0x05, 0x51, 0x5c, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5b, 0x05, 0x50, 0x5b, 0x05, 0x50, 0x5a, 0x05, 0x4f, 0x5a, 0x05, 0x4e, 0x59,
+    0x05, 0x4e, 0x59, 0x05, 0x4e, 0x58, 0x05, 0x4d, 0x57, 0x05, 0x4d, 0x57, 0x05, 0x4c, 0x56, 0x05, 0x4c, 0x56, 0x05, 0x4b, 0x55,
+    0x05, 0x4b, 0x55, 0x05, 0x4a, 0x54, 0x05, 0x49, 0x53, 0x05, 0x48, 0x52, 0x05, 0x48, 0x51, 0x05, 0x47, 0x51, 0x05, 0x47, 0x50,
+    0x05, 0x46, 0x4f, 0x05, 0x45, 0x4e, 0x05, 0x44, 0x4d, 0x05, 0x43, 0x4d, 0x04, 0x43, 0x4c, 0x04, 0x42, 0x4b, 0x04, 0x41, 0x4a,
+    0x04, 0x40, 0x49, 0x04, 0x3f, 0x48, 0x04, 0x3e, 0x47, 0x04, 0x3e, 0x46, 0x04, 0x3d, 0x45, 0x04, 0x3c, 0x44, 0x04, 0x3b, 0x43,
+    0x04, 0x3a, 0x42, 0x04, 0x39, 0x41, 0x04, 0x38, 0x3f, 0x04, 0x37, 0x3e, 0x04, 0x36, 0x3d, 0x04, 0x35, 0x3c, 0x04, 0x33, 0x3b,
+    0x03, 0x33, 0x3a, 0x03, 0x32, 0x38, 0x03, 0x31, 0x37, 0x03, 0x2f, 0x36, 0x03, 0x2e, 0x34, 0x03, 0x2d, 0x33, 0x03, 0x2c, 0x32,
+    0x03, 0x2a, 0x30, 0x03, 0x29, 0x2f, 0x03, 0x28, 0x2e, 0x03, 0x27, 0x2c, 0x03, 0x25, 0x2a, 0x01, 0x23, 0x28, 0x00, 0x20, 0x25,
+    0x89, 0xa0, 0xa4, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x98, 0xbc, 0xc2, 0x4b, 0x8a, 0x94, 0x4a, 0x8a, 0x93, 0x48, 0x88, 0x92,
+    0x47, 0x87, 0x91, 0x47, 0x86, 0x90, 0x45, 0x84, 0x8f, 0x44, 0x84, 0x8d, 0x42, 0x82, 0x8c, 0x41, 0x81, 0x8b, 0x3f, 0x80, 0x8a,
+    0x3d, 0x7f, 0x88, 0x3d, 0x7d, 0x87, 0x3b, 0x7b, 0x85, 0x39, 0x7a, 0x84, 0x38, 0x78, 0x82, 0x36, 0x77, 0x81, 0x34, 0x75, 0x7f,
+    0x32, 0x74, 0x7e, 0x31, 0x72, 0x7d, 0x30, 0x72, 0x7b, 0x2e, 0x6f, 0x7a, 0x2c, 0x6d, 0x78, 0x2b, 0x6d, 0x76, 0x29, 0x6a, 0x74,
+    0x28, 0x6a, 0x74, 0x27, 0x68, 0x73, 0x25, 0x67, 0x71, 0x24, 0x65, 0x70, 0x23, 0x64, 0x6f, 0x21, 0x62, 0x6d, 0x20, 0x61, 0x6b,
+    0x1f, 0x60, 0x6b, 0x1e, 0x5f, 0x69, 0x1d, 0x5d, 0x67, 0x1b, 0x5c, 0x66, 0x1a, 0x5b, 0x65, 0x19, 0x5a, 0x64, 0x18, 0x58, 0x62,
+    0x18, 0x57, 0x62, 0x18, 0x56, 0x60, 0x17, 0x55, 0x5f, 0x17, 0x54, 0x5e, 0x16, 0x53, 0x5d, 0x14, 0x52, 0x5c, 0x14, 0x51, 0x5b,
+    0x14, 0x50, 0x5a, 0x13, 0x4f, 0x58, 0x13, 0x4e, 0x58, 0x13, 0x4d, 0x57, 0x12, 0x4c, 0x55, 0x11, 0x4b, 0x54, 0x11, 0x4b, 0x54,
+    0x10, 0x4a, 0x53, 0x10, 0x49, 0x52, 0x10, 0x48, 0x51, 0x10, 0x47, 0x50, 0x10, 0x47, 0x4f, 0x0f, 0x46, 0x4f, 0x0f, 0x45, 0x4e,
+    0x0e, 0x43, 0x4c, 0x0e, 0x43, 0x4b, 0x0e, 0x42, 0x4a, 0x0e, 0x41, 0x49, 0x0d, 0x40, 0x48, 0x0d, 0x3f, 0x47, 0x0d, 0x3f, 0x46,
+    0x0d, 0x3e, 0x45, 0x0d, 0x3d, 0x45, 0x0d, 0x3c, 0x44, 0x0d, 0x3b, 0x43, 0x0c, 0x3b, 0x42, 0x0c, 0x3a, 0x41, 0x0c, 0x39, 0x40,
+    0x0c, 0x38, 0x3f, 0x0c, 0x37, 0x3e, 0x0c, 0x36, 0x3d, 0x0b, 0x35, 0x3c, 0x0b, 0x34, 0x3b, 0x0b, 0x34, 0x3a, 0x0b, 0x33, 0x39,
+    0x0b, 0x32, 0x38, 0x0a, 0x31, 0x37, 0x0a, 0x30, 0x36, 0x0a, 0x2f, 0x35, 0x0a, 0x2e, 0x34, 0x09, 0x2a, 0x30, 0x19, 0x3f, 0x46,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x07, 0x5d, 0x69, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65,
+    0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63,
+    0x06, 0x57, 0x63, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x56, 0x62, 0x06, 0x56, 0x61, 0x06, 0x56, 0x61,
+    0x06, 0x55, 0x61, 0x06, 0x55, 0x60, 0x06, 0x55, 0x60, 0x06, 0x54, 0x60, 0x06, 0x54, 0x5f, 0x06, 0x54, 0x5f, 0x06, 0x53, 0x5e,
+    0x06, 0x53, 0x5e, 0x05, 0x53, 0x5e, 0x05, 0x52, 0x5d, 0x05, 0x52, 0x5d, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5b,
+    0x05, 0x50, 0x5b, 0x05, 0x50, 0x5a, 0x05, 0x4f, 0x5a, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x58, 0x05, 0x4d, 0x57,
+    0x05, 0x4d, 0x57, 0x05, 0x4c, 0x56, 0x05, 0x4c, 0x56, 0x05, 0x4b, 0x55, 0x05, 0x4b, 0x55, 0x05, 0x4a, 0x54, 0x05, 0x49, 0x53,
+    0x05, 0x48, 0x52, 0x05, 0x48, 0x51, 0x05, 0x47, 0x51, 0x05, 0x47, 0x50, 0x05, 0x46, 0x4f, 0x05, 0x45, 0x4e, 0x05, 0x44, 0x4d,
+    0x05, 0x43, 0x4d, 0x04, 0x43, 0x4c, 0x04, 0x42, 0x4b, 0x04, 0x41, 0x4a, 0x04, 0x40, 0x49, 0x04, 0x3f, 0x48, 0x04, 0x3e, 0x47,
+    0x04, 0x3e, 0x46, 0x04, 0x3d, 0x45, 0x04, 0x3c, 0x44, 0x04, 0x3b, 0x43, 0x04, 0x3a, 0x42, 0x04, 0x39, 0x41, 0x04, 0x38, 0x3f,
+    0x04, 0x37, 0x3e, 0x04, 0x36, 0x3d, 0x04, 0x35, 0x3c, 0x04, 0x33, 0x3b, 0x03, 0x33, 0x3a, 0x03, 0x32, 0x38, 0x03, 0x31, 0x37,
+    0x03, 0x2f, 0x36, 0x03, 0x2e, 0x34, 0x03, 0x2d, 0x33, 0x03, 0x2c, 0x32, 0x03, 0x2a, 0x30, 0x03, 0x29, 0x2f, 0x03, 0x28, 0x2e,
+    0x03, 0x27, 0x2c, 0x03, 0x25, 0x2a, 0x03, 0x24, 0x29, 0x00, 0x1e, 0x23, 0x0e, 0x2e, 0x33, 0xca, 0xd6, 0xd7, 0xff, 0xff, 0xff,
+    0xf9, 0xfb, 0xfb, 0x7a, 0xa9, 0xb0, 0x4e, 0x8c, 0x95, 0x4c, 0x8a, 0x94, 0x4b, 0x89, 0x93, 0x4a, 0x89, 0x92, 0x48, 0x87, 0x91,
+    0x47, 0x86, 0x90, 0x45, 0x84, 0x8e, 0x43, 0x82, 0x8d, 0x42, 0x82, 0x8b, 0x41, 0x81, 0x8b, 0x3e, 0x7e, 0x88, 0x3d, 0x7d, 0x87,
+    0x3c, 0x7c, 0x86, 0x3a, 0x7a, 0x84, 0x39, 0x79, 0x83, 0x37, 0x77, 0x81, 0x34, 0x76, 0x7f, 0x32, 0x73, 0x7e, 0x32, 0x73, 0x7c,
+    0x30, 0x71, 0x7b, 0x2e, 0x6f, 0x79, 0x2d, 0x6e, 0x78, 0x2b, 0x6c, 0x76, 0x2a, 0x6b, 0x75, 0x28, 0x69, 0x74, 0x26, 0x68, 0x72,
+    0x25, 0x66, 0x71, 0x24, 0x64, 0x70, 0x23, 0x64, 0x6e, 0x21, 0x62, 0x6c, 0x20, 0x61, 0x6b, 0x1f, 0x5f, 0x6a, 0x1e, 0x5e, 0x68,
+    0x1c, 0x5d, 0x67, 0x1b, 0x5b, 0x66, 0x1a, 0x5b, 0x64, 0x19, 0x59, 0x63, 0x19, 0x58, 0x63, 0x18, 0x56, 0x60, 0x18, 0x56, 0x60,
+    0x17, 0x54, 0x5e, 0x16, 0x53, 0x5d, 0x15, 0x52, 0x5c, 0x15, 0x51, 0x5b, 0x14, 0x50, 0x5a, 0x14, 0x50, 0x59, 0x13, 0x4e, 0x58,
+    0x13, 0x4d, 0x57, 0x12, 0x4c, 0x55, 0x11, 0x4b, 0x54, 0x11, 0x4b, 0x54, 0x11, 0x4a, 0x53, 0x10, 0x49, 0x52, 0x10, 0x48, 0x51,
+    0x10, 0x47, 0x50, 0x10, 0x47, 0x4f, 0x0f, 0x46, 0x4f, 0x0f, 0x45, 0x4e, 0x0f, 0x44, 0x4d, 0x0e, 0x43, 0x4b, 0x0e, 0x42, 0x4a,
+    0x0e, 0x41, 0x49, 0x0d, 0x40, 0x48, 0x0d, 0x3f, 0x47, 0x0d, 0x3f, 0x46, 0x0d, 0x3e, 0x45, 0x0d, 0x3d, 0x45, 0x0d, 0x3c, 0x44,
+    0x0d, 0x3b, 0x43, 0x0c, 0x3b, 0x42, 0x0c, 0x3a, 0x41, 0x0c, 0x39, 0x40, 0x0c, 0x38, 0x3f, 0x0c, 0x37, 0x3e, 0x0c, 0x36, 0x3d,
+    0x0b, 0x35, 0x3c, 0x0b, 0x34, 0x3b, 0x0b, 0x34, 0x3a, 0x0b, 0x33, 0x39, 0x0b, 0x32, 0x38, 0x0a, 0x31, 0x37, 0x0a, 0x30, 0x36,
+    0x0a, 0x2f, 0x35, 0x0a, 0x2e, 0x34, 0x09, 0x2a, 0x30, 0x19, 0x3f, 0x46, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x07, 0x5d, 0x69, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65,
+    0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64,
+    0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x57, 0x63, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62,
+    0x06, 0x57, 0x62, 0x06, 0x56, 0x62, 0x06, 0x56, 0x61, 0x06, 0x56, 0x61, 0x06, 0x55, 0x61, 0x06, 0x55, 0x60, 0x06, 0x55, 0x60,
+    0x06, 0x54, 0x60, 0x06, 0x54, 0x5f, 0x06, 0x54, 0x5f, 0x06, 0x53, 0x5e, 0x06, 0x53, 0x5e, 0x05, 0x53, 0x5e, 0x05, 0x52, 0x5d,
+    0x05, 0x52, 0x5d, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5b, 0x05, 0x50, 0x5b, 0x05, 0x50, 0x5a, 0x05, 0x4f, 0x5a,
+    0x05, 0x4e, 0x59, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x58, 0x05, 0x4d, 0x57, 0x05, 0x4d, 0x57, 0x05, 0x4c, 0x56, 0x05, 0x4c, 0x56,
+    0x05, 0x4b, 0x55, 0x05, 0x4b, 0x55, 0x05, 0x4a, 0x54, 0x05, 0x49, 0x53, 0x05, 0x48, 0x52, 0x05, 0x48, 0x51, 0x05, 0x47, 0x51,
+    0x05, 0x47, 0x50, 0x05, 0x46, 0x4f, 0x05, 0x45, 0x4e, 0x05, 0x44, 0x4d, 0x05, 0x43, 0x4d, 0x04, 0x43, 0x4c, 0x04, 0x42, 0x4b,
+    0x04, 0x41, 0x4a, 0x04, 0x40, 0x49, 0x04, 0x3f, 0x48, 0x04, 0x3e, 0x47, 0x04, 0x3e, 0x46, 0x04, 0x3d, 0x45, 0x04, 0x3c, 0x44,
+    0x04, 0x3b, 0x43, 0x04, 0x3a, 0x42, 0x04, 0x39, 0x41, 0x04, 0x38, 0x3f, 0x04, 0x37, 0x3e, 0x04, 0x36, 0x3d, 0x04, 0x35, 0x3c,
+    0x04, 0x33, 0x3b, 0x03, 0x33, 0x3a, 0x03, 0x32, 0x38, 0x03, 0x31, 0x37, 0x03, 0x2f, 0x36, 0x03, 0x2e, 0x34, 0x03, 0x2d, 0x33,
+    0x03, 0x2c, 0x32, 0x03, 0x2a, 0x30, 0x03, 0x29, 0x2f, 0x03, 0x28, 0x2e, 0x03, 0x27, 0x2c, 0x03, 0x25, 0x2a, 0x03, 0x24, 0x29,
+    0x02, 0x23, 0x27, 0x00, 0x1b, 0x1f, 0x31, 0x51, 0x56, 0xfe, 0xfe, 0xfe, 0xff, 0xff, 0xff, 0xe5, 0xee, 0xef, 0x64, 0x9a, 0xa2,
+    0x50, 0x8c, 0x96, 0x4f, 0x8c, 0x96, 0x4e, 0x8b, 0x94, 0x4c, 0x89, 0x93, 0x4b, 0x89, 0x92, 0x49, 0x87, 0x90, 0x47, 0x85, 0x8f,
+    0x45, 0x83, 0x8d, 0x43, 0x82, 0x8c, 0x41, 0x80, 0x8a, 0x40, 0x7f, 0x88, 0x3e, 0x7e, 0x87, 0x3d, 0x7b, 0x85, 0x3b, 0x7a, 0x84,
+    0x39, 0x78, 0x82, 0x36, 0x77, 0x80, 0x35, 0x75, 0x80, 0x33, 0x74, 0x7e, 0x32, 0x72, 0x7c, 0x31, 0x70, 0x7b, 0x2f, 0x6f, 0x79,
+    0x2d, 0x6d, 0x77, 0x2b, 0x6c, 0x76, 0x2a, 0x6a, 0x75, 0x27, 0x69, 0x73, 0x26, 0x67, 0x72, 0x25, 0x65, 0x70, 0x24, 0x64, 0x6f,
+    0x23, 0x63, 0x6d, 0x21, 0x62, 0x6c, 0x20, 0x60, 0x6a, 0x1f, 0x5e, 0x69, 0x1d, 0x5e, 0x67, 0x1c, 0x5c, 0x67, 0x1b, 0x5b, 0x65,
+    0x1a, 0x5a, 0x63, 0x19, 0x58, 0x63, 0x18, 0x56, 0x61, 0x18, 0x56, 0x60, 0x18, 0x55, 0x5f, 0x17, 0x54, 0x5e, 0x16, 0x53, 0x5d,
+    0x15, 0x51, 0x5b, 0x14, 0x50, 0x5a, 0x14, 0x50, 0x59, 0x13, 0x4e, 0x58, 0x13, 0x4d, 0x57, 0x13, 0x4d, 0x56, 0x11, 0x4b, 0x54,
+    0x11, 0x4b, 0x54, 0x11, 0x4a, 0x53, 0x11, 0x49, 0x52, 0x10, 0x48, 0x51, 0x10, 0x47, 0x50, 0x10, 0x47, 0x4f, 0x0f, 0x46, 0x4f,
+    0x0f, 0x45, 0x4e, 0x0f, 0x44, 0x4d, 0x0e, 0x43, 0x4b, 0x0e, 0x42, 0x4a, 0x0e, 0x41, 0x49, 0x0d, 0x40, 0x48, 0x0d, 0x3f, 0x47,
+    0x0d, 0x3f, 0x46, 0x0d, 0x3e, 0x45, 0x0d, 0x3d, 0x45, 0x0d, 0x3c, 0x44, 0x0d, 0x3b, 0x43, 0x0c, 0x3b, 0x42, 0x0c, 0x3a, 0x41,
+    0x0c, 0x39, 0x40, 0x0c, 0x38, 0x3f, 0x0c, 0x37, 0x3e, 0x0c, 0x36, 0x3d, 0x0b, 0x35, 0x3c, 0x0b, 0x34, 0x3b, 0x0b, 0x34, 0x3a,
+    0x0b, 0x33, 0x39, 0x0b, 0x32, 0x38, 0x0a, 0x31, 0x37, 0x0a, 0x30, 0x36, 0x0a, 0x2f, 0x35, 0x0a, 0x2e, 0x34, 0x09, 0x2a, 0x30,
+    0x19, 0x3f, 0x46, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x07, 0x5d, 0x69, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65,
+    0x06, 0x59, 0x65, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63,
+    0x06, 0x58, 0x63, 0x06, 0x57, 0x63, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x56, 0x62, 0x06, 0x56, 0x61,
+    0x06, 0x56, 0x61, 0x06, 0x55, 0x61, 0x06, 0x55, 0x60, 0x06, 0x55, 0x60, 0x06, 0x54, 0x60, 0x06, 0x54, 0x5f, 0x06, 0x54, 0x5f,
+    0x06, 0x53, 0x5e, 0x06, 0x53, 0x5e, 0x05, 0x53, 0x5e, 0x05, 0x52, 0x5d, 0x05, 0x52, 0x5d, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5c,
+    0x05, 0x51, 0x5b, 0x05, 0x50, 0x5b, 0x05, 0x50, 0x5a, 0x05, 0x4f, 0x5a, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x58,
+    0x05, 0x4d, 0x57, 0x05, 0x4d, 0x57, 0x05, 0x4c, 0x56, 0x05, 0x4c, 0x56, 0x05, 0x4b, 0x55, 0x05, 0x4b, 0x55, 0x05, 0x4a, 0x54,
+    0x05, 0x49, 0x53, 0x05, 0x48, 0x52, 0x05, 0x48, 0x51, 0x05, 0x47, 0x51, 0x05, 0x47, 0x50, 0x05, 0x46, 0x4f, 0x05, 0x45, 0x4e,
+    0x05, 0x44, 0x4d, 0x05, 0x43, 0x4d, 0x04, 0x43, 0x4c, 0x04, 0x42, 0x4b, 0x04, 0x41, 0x4a, 0x04, 0x40, 0x49, 0x04, 0x3f, 0x48,
+    0x04, 0x3e, 0x47, 0x04, 0x3e, 0x46, 0x04, 0x3d, 0x45, 0x04, 0x3c, 0x44, 0x04, 0x3b, 0x43, 0x04, 0x3a, 0x42, 0x04, 0x39, 0x41,
+    0x04, 0x38, 0x3f, 0x04, 0x37, 0x3e, 0x04, 0x36, 0x3d, 0x04, 0x35, 0x3c, 0x04, 0x33, 0x3b, 0x03, 0x33, 0x3a, 0x03, 0x32, 0x38,
+    0x03, 0x31, 0x37, 0x03, 0x2f, 0x36, 0x03, 0x2e, 0x34, 0x03, 0x2d, 0x33, 0x03, 0x2c, 0x32, 0x03, 0x2a, 0x30, 0x03, 0x29, 0x2f,
+    0x03, 0x28, 0x2e, 0x03, 0x27, 0x2c, 0x03, 0x25, 0x2a, 0x03, 0x24, 0x29, 0x02, 0x23, 0x27, 0x02, 0x20, 0x25, 0x00, 0x1b, 0x1f,
+    0x77, 0x90, 0x93, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xc5, 0xd9, 0xdc, 0x53, 0x8f, 0x98, 0x52, 0x8e, 0x98, 0x52, 0x8e, 0x96,
+    0x50, 0x8c, 0x95, 0x4e, 0x8a, 0x94, 0x4b, 0x88, 0x92, 0x4a, 0x87, 0x91, 0x49, 0x86, 0x8f, 0x47, 0x85, 0x8e, 0x45, 0x83, 0x8c,
+    0x43, 0x81, 0x8a, 0x41, 0x7f, 0x89, 0x3f, 0x7d, 0x87, 0x3d, 0x7c, 0x86, 0x3c, 0x7a, 0x84, 0x39, 0x79, 0x82, 0x37, 0x77, 0x81,
+    0x35, 0x75, 0x7f, 0x33, 0x73, 0x7e, 0x32, 0x72, 0x7c, 0x31, 0x70, 0x7a, 0x2f, 0x6e, 0x78, 0x2d, 0x6d, 0x77, 0x2c, 0x6b, 0x76,
+    0x29, 0x6a, 0x74, 0x27, 0x68, 0x73, 0x26, 0x66, 0x71, 0x25, 0x65, 0x6f, 0x24, 0x64, 0x6e, 0x23, 0x63, 0x6d, 0x21, 0x61, 0x6b,
+    0x20, 0x5f, 0x69, 0x1e, 0x5e, 0x68, 0x1d, 0x5d, 0x67, 0x1c, 0x5c, 0x66, 0x1a, 0x5a, 0x63, 0x1a, 0x59, 0x63, 0x19, 0x57, 0x62,
+    0x18, 0x56, 0x60, 0x18, 0x55, 0x5f, 0x17, 0x54, 0x5e, 0x16, 0x53, 0x5d, 0x15, 0x51, 0x5b, 0x15, 0x50, 0x5a, 0x14, 0x50, 0x59,
+    0x14, 0x4f, 0x59, 0x13, 0x4d, 0x57, 0x13, 0x4d, 0x56, 0x12, 0x4c, 0x55, 0x11, 0x4b, 0x54, 0x11, 0x4a, 0x53, 0x11, 0x49, 0x52,
+    0x10, 0x48, 0x51, 0x10, 0x47, 0x50, 0x10, 0x47, 0x4f, 0x0f, 0x46, 0x4f, 0x0f, 0x45, 0x4e, 0x0f, 0x44, 0x4d, 0x0f, 0x44, 0x4c,
+    0x0e, 0x42, 0x4a, 0x0e, 0x41, 0x49, 0x0d, 0x40, 0x48, 0x0d, 0x3f, 0x47, 0x0d, 0x3f, 0x46, 0x0d, 0x3e, 0x45, 0x0d, 0x3d, 0x45,
+    0x0d, 0x3c, 0x44, 0x0d, 0x3b, 0x43, 0x0c, 0x3b, 0x42, 0x0c, 0x3a, 0x41, 0x0c, 0x39, 0x40, 0x0c, 0x38, 0x3f, 0x0c, 0x37, 0x3e,
+    0x0c, 0x36, 0x3d, 0x0b, 0x35, 0x3c, 0x0b, 0x34, 0x3b, 0x0b, 0x34, 0x3a, 0x0b, 0x33, 0x39, 0x0b, 0x32, 0x38, 0x0a, 0x31, 0x37,
+    0x0a, 0x30, 0x36, 0x0a, 0x2f, 0x35, 0x0a, 0x2e, 0x34, 0x09, 0x2a, 0x30, 0x19, 0x3f, 0x46, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x07, 0x5d, 0x69, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65,
+    0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64,
+    0x06, 0x58, 0x64, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x57, 0x63, 0x06, 0x57, 0x62,
+    0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x56, 0x62, 0x06, 0x56, 0x61, 0x06, 0x56, 0x61, 0x06, 0x55, 0x61, 0x06, 0x55, 0x60,
+    0x06, 0x55, 0x60, 0x06, 0x54, 0x60, 0x06, 0x54, 0x5f, 0x06, 0x54, 0x5f, 0x06, 0x53, 0x5e, 0x06, 0x53, 0x5e, 0x05, 0x53, 0x5e,
+    0x05, 0x52, 0x5d, 0x05, 0x52, 0x5d, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5b, 0x05, 0x50, 0x5b, 0x05, 0x50, 0x5a,
+    0x05, 0x4f, 0x5a, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x58, 0x05, 0x4d, 0x57, 0x05, 0x4d, 0x57, 0x05, 0x4c, 0x56,
+    0x05, 0x4c, 0x56, 0x05, 0x4b, 0x55, 0x05, 0x4b, 0x55, 0x05, 0x4a, 0x54, 0x05, 0x49, 0x53, 0x05, 0x48, 0x52, 0x05, 0x48, 0x51,
+    0x05, 0x47, 0x51, 0x05, 0x47, 0x50, 0x05, 0x46, 0x4f, 0x05, 0x45, 0x4e, 0x05, 0x44, 0x4d, 0x05, 0x43, 0x4d, 0x04, 0x43, 0x4c,
+    0x04, 0x42, 0x4b, 0x04, 0x41, 0x4a, 0x04, 0x40, 0x49, 0x04, 0x3f, 0x48, 0x04, 0x3e, 0x47, 0x04, 0x3e, 0x46, 0x04, 0x3d, 0x45,
+    0x04, 0x3c, 0x44, 0x04, 0x3b, 0x43, 0x04, 0x3a, 0x42, 0x04, 0x39, 0x41, 0x04, 0x38, 0x3f, 0x04, 0x37, 0x3e, 0x04, 0x36, 0x3d,
+    0x04, 0x35, 0x3c, 0x04, 0x33, 0x3b, 0x03, 0x33, 0x3a, 0x03, 0x32, 0x38, 0x03, 0x31, 0x37, 0x03, 0x2f, 0x36, 0x03, 0x2e, 0x34,
+    0x03, 0x2d, 0x33, 0x03, 0x2c, 0x32, 0x03, 0x2a, 0x30, 0x03, 0x29, 0x2f, 0x03, 0x28, 0x2e, 0x03, 0x27, 0x2c, 0x03, 0x25, 0x2a,
+    0x03, 0x24, 0x29, 0x02, 0x23, 0x27, 0x02, 0x21, 0x26, 0x00, 0x1c, 0x20, 0x0d, 0x2b, 0x30, 0xcd, 0xd7, 0xd8, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0x9b, 0xbd, 0xc3, 0x56, 0x91, 0x9a, 0x55, 0x90, 0x99, 0x53, 0x8e, 0x98, 0x52, 0x8d, 0x96, 0x4f, 0x8b, 0x94,
+    0x4d, 0x89, 0x93, 0x4b, 0x88, 0x91, 0x49, 0x86, 0x90, 0x48, 0x84, 0x8e, 0x46, 0x82, 0x8c, 0x44, 0x81, 0x8b, 0x42, 0x7f, 0x89,
+    0x40, 0x7e, 0x88, 0x3e, 0x7c, 0x86, 0x3c, 0x7b, 0x84, 0x3a, 0x79, 0x83, 0x38, 0x77, 0x81, 0x36, 0x75, 0x7f, 0x34, 0x73, 0x7d,
+    0x32, 0x72, 0x7b, 0x31, 0x70, 0x79, 0x2f, 0x6e, 0x78, 0x2d, 0x6c, 0x77, 0x2b, 0x6b, 0x75, 0x29, 0x69, 0x74, 0x27, 0x67, 0x72,
+    0x26, 0x66, 0x70, 0x25, 0x64, 0x6e, 0x24, 0x64, 0x6e, 0x22, 0x61, 0x6c, 0x21, 0x60, 0x6a, 0x1f, 0x5f, 0x68, 0x1e, 0x5d, 0x68,
+    0x1c, 0x5c, 0x66, 0x1b, 0x5a, 0x64, 0x1a, 0x59, 0x63, 0x19, 0x57, 0x62, 0x18, 0x56, 0x60, 0x18, 0x55, 0x5f, 0x18, 0x55, 0x5f,
+    0x16, 0x53, 0x5d, 0x15, 0x51, 0x5b, 0x15, 0x50, 0x5a, 0x15, 0x50, 0x59, 0x14, 0x4f, 0x59, 0x14, 0x4e, 0x58, 0x13, 0x4d, 0x56,
+    0x12, 0x4c, 0x55, 0x11, 0x4b, 0x54, 0x11, 0x4a, 0x53, 0x11, 0x49, 0x52, 0x11, 0x48, 0x51, 0x10, 0x47, 0x50, 0x10, 0x47, 0x4f,
+    0x0f, 0x46, 0x4f, 0x0f, 0x45, 0x4e, 0x0f, 0x44, 0x4d, 0x0f, 0x44, 0x4c, 0x0f, 0x43, 0x4b, 0x0e, 0x41, 0x49, 0x0d, 0x40, 0x48,
+    0x0d, 0x3f, 0x47, 0x0d, 0x3f, 0x46, 0x0d, 0x3e, 0x45, 0x0d, 0x3d, 0x45, 0x0d, 0x3c, 0x44, 0x0d, 0x3b, 0x43, 0x0c, 0x3b, 0x42,
+    0x0c, 0x3a, 0x41, 0x0c, 0x39, 0x40, 0x0c, 0x38, 0x3f, 0x0c, 0x37, 0x3e, 0x0c, 0x36, 0x3d, 0x0b, 0x35, 0x3c, 0x0b, 0x34, 0x3b,
+    0x0b, 0x34, 0x3a, 0x0b, 0x33, 0x39, 0x0b, 0x32, 0x38, 0x0a, 0x31, 0x37, 0x0a, 0x30, 0x36, 0x0a, 0x2f, 0x35, 0x0a, 0x2e, 0x34,
+    0x09, 0x2a, 0x30, 0x19, 0x3f, 0x46, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x07, 0x5d, 0x69, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65,
+    0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63,
+    0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x57, 0x63, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x56, 0x62,
+    0x06, 0x56, 0x61, 0x06, 0x56, 0x61, 0x06, 0x55, 0x61, 0x06, 0x55, 0x60, 0x06, 0x55, 0x60, 0x06, 0x54, 0x60, 0x06, 0x54, 0x5f,
+    0x06, 0x54, 0x5f, 0x06, 0x53, 0x5e, 0x06, 0x53, 0x5e, 0x05, 0x53, 0x5e, 0x05, 0x52, 0x5d, 0x05, 0x52, 0x5d, 0x05, 0x51, 0x5c,
+    0x05, 0x51, 0x5c, 0x05, 0x51, 0x5b, 0x05, 0x50, 0x5b, 0x05, 0x50, 0x5a, 0x05, 0x4f, 0x5a, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x59,
+    0x05, 0x4e, 0x58, 0x05, 0x4d, 0x57, 0x05, 0x4d, 0x57, 0x05, 0x4c, 0x56, 0x05, 0x4c, 0x56, 0x05, 0x4b, 0x55, 0x05, 0x4b, 0x55,
+    0x05, 0x4a, 0x54, 0x05, 0x49, 0x53, 0x05, 0x48, 0x52, 0x05, 0x48, 0x51, 0x05, 0x47, 0x51, 0x05, 0x47, 0x50, 0x05, 0x46, 0x4f,
+    0x05, 0x45, 0x4e, 0x05, 0x44, 0x4d, 0x05, 0x43, 0x4d, 0x04, 0x43, 0x4c, 0x04, 0x42, 0x4b, 0x04, 0x41, 0x4a, 0x04, 0x40, 0x49,
+    0x04, 0x3f, 0x48, 0x04, 0x3e, 0x47, 0x04, 0x3e, 0x46, 0x04, 0x3d, 0x45, 0x04, 0x3c, 0x44, 0x04, 0x3b, 0x43, 0x04, 0x3a, 0x42,
+    0x04, 0x39, 0x41, 0x04, 0x38, 0x3f, 0x04, 0x37, 0x3e, 0x04, 0x36, 0x3d, 0x04, 0x35, 0x3c, 0x04, 0x33, 0x3b, 0x03, 0x33, 0x3a,
+    0x03, 0x32, 0x38, 0x03, 0x31, 0x37, 0x03, 0x2f, 0x36, 0x03, 0x2e, 0x34, 0x03, 0x2d, 0x33, 0x03, 0x2c, 0x32, 0x03, 0x2a, 0x30,
+    0x03, 0x29, 0x2f, 0x03, 0x28, 0x2e, 0x03, 0x27, 0x2c, 0x03, 0x25, 0x2a, 0x03, 0x24, 0x29, 0x02, 0x23, 0x27, 0x02, 0x21, 0x26,
+    0x02, 0x20, 0x24, 0x00, 0x18, 0x1c, 0x3b, 0x58, 0x5e, 0xfe, 0xfe, 0xfe, 0xff, 0xff, 0xff, 0xf2, 0xf6, 0xf7, 0x74, 0xa4, 0xac,
+    0x58, 0x92, 0x9a, 0x57, 0x90, 0x9a, 0x55, 0x8f, 0x98, 0x53, 0x8d, 0x96, 0x50, 0x8b, 0x94, 0x4e, 0x8a, 0x93, 0x4d, 0x89, 0x92,
+    0x4a, 0x86, 0x90, 0x49, 0x84, 0x8e, 0x47, 0x83, 0x8c, 0x45, 0x81, 0x8b, 0x43, 0x80, 0x89, 0x41, 0x7e, 0x87, 0x3e, 0x7c, 0x85,
+    0x3c, 0x7a, 0x84, 0x3a, 0x79, 0x82, 0x38, 0x76, 0x81, 0x36, 0x74, 0x7f, 0x34, 0x73, 0x7c, 0x32, 0x71, 0x7a, 0x31, 0x70, 0x79,
+    0x2f, 0x6d, 0x78, 0x2c, 0x6c, 0x76, 0x2b, 0x6b, 0x75, 0x29, 0x68, 0x73, 0x27, 0x67, 0x71, 0x26, 0x65, 0x6f, 0x25, 0x64, 0x6e,
+    0x24, 0x63, 0x6d, 0x22, 0x60, 0x6b, 0x20, 0x60, 0x69, 0x1e, 0x5d, 0x68, 0x1d, 0x5d, 0x66, 0x1c, 0x5b, 0x65, 0x1b, 0x59, 0x64,
+    0x1a, 0x58, 0x62, 0x19, 0x57, 0x61, 0x19, 0x56, 0x60, 0x18, 0x55, 0x5f, 0x17, 0x54, 0x5e, 0x16, 0x52, 0x5c, 0x16, 0x51, 0x5b,
+    0x15, 0x50, 0x59, 0x14, 0x4f, 0x59, 0x14, 0x4e, 0x58, 0x13, 0x4d, 0x56, 0x12, 0x4c, 0x55, 0x12, 0x4c, 0x55, 0x11, 0x4a, 0x53,
+    0x11, 0x49, 0x52, 0x11, 0x48, 0x51, 0x10, 0x47, 0x50, 0x10, 0x47, 0x4f, 0x0f, 0x46, 0x4f, 0x0f, 0x45, 0x4e, 0x0f, 0x44, 0x4d,
+    0x0f, 0x44, 0x4c, 0x0f, 0x43, 0x4b, 0x0e, 0x41, 0x49, 0x0d, 0x40, 0x48, 0x0d, 0x3f, 0x47, 0x0d, 0x3f, 0x46, 0x0d, 0x3e, 0x45,
+    0x0d, 0x3d, 0x45, 0x0d, 0x3c, 0x44, 0x0d, 0x3b, 0x43, 0x0c, 0x3b, 0x42, 0x0c, 0x3a, 0x41, 0x0c, 0x39, 0x40, 0x0c, 0x38, 0x3f,
+    0x0c, 0x37, 0x3e, 0x0c, 0x36, 0x3d, 0x0b, 0x35, 0x3c, 0x0b, 0x34, 0x3b, 0x0b, 0x34, 0x3a, 0x0b, 0x33, 0x39, 0x0b, 0x32, 0x38,
+    0x0a, 0x31, 0x37, 0x0a, 0x30, 0x36, 0x0a, 0x2f, 0x35, 0x0a, 0x2e, 0x34, 0x09, 0x2a, 0x30, 0x19, 0x3f, 0x46, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x07, 0x5d, 0x69, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x59, 0x65,
+    0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x58, 0x64,
+    0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x57, 0x63,
+    0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x56, 0x62, 0x06, 0x56, 0x61, 0x06, 0x56, 0x61, 0x06, 0x55, 0x61,
+    0x06, 0x55, 0x60, 0x06, 0x55, 0x60, 0x06, 0x54, 0x60, 0x06, 0x54, 0x5f, 0x06, 0x54, 0x5f, 0x06, 0x53, 0x5e, 0x06, 0x53, 0x5e,
+    0x05, 0x53, 0x5e, 0x05, 0x52, 0x5d, 0x05, 0x52, 0x5d, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5b, 0x05, 0x50, 0x5b,
+    0x05, 0x50, 0x5a, 0x05, 0x4f, 0x5a, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x58, 0x05, 0x4d, 0x57, 0x05, 0x4d, 0x57,
+    0x05, 0x4c, 0x56, 0x05, 0x4c, 0x56, 0x05, 0x4b, 0x55, 0x05, 0x4b, 0x55, 0x05, 0x4a, 0x54, 0x05, 0x49, 0x53, 0x05, 0x48, 0x52,
+    0x05, 0x48, 0x51, 0x05, 0x47, 0x51, 0x05, 0x47, 0x50, 0x05, 0x46, 0x4f, 0x05, 0x45, 0x4e, 0x05, 0x44, 0x4d, 0x05, 0x43, 0x4d,
+    0x04, 0x43, 0x4c, 0x04, 0x42, 0x4b, 0x04, 0x41, 0x4a, 0x04, 0x40, 0x49, 0x04, 0x3f, 0x48, 0x04, 0x3e, 0x47, 0x04, 0x3e, 0x46,
+    0x04, 0x3d, 0x45, 0x04, 0x3c, 0x44, 0x04, 0x3b, 0x43, 0x04, 0x3a, 0x42, 0x04, 0x39, 0x41, 0x04, 0x38, 0x3f, 0x04, 0x37, 0x3e,
+    0x04, 0x36, 0x3d, 0x04, 0x35, 0x3c, 0x04, 0x33, 0x3b, 0x03, 0x33, 0x3a, 0x03, 0x32, 0x38, 0x03, 0x31, 0x37, 0x03, 0x2f, 0x36,
+    0x03, 0x2e, 0x34, 0x03, 0x2d, 0x33, 0x03, 0x2c, 0x32, 0x03, 0x2a, 0x30, 0x03, 0x29, 0x2f, 0x03, 0x28, 0x2e, 0x03, 0x27, 0x2c,
+    0x03, 0x25, 0x2a, 0x03, 0x24, 0x29, 0x02, 0x23, 0x27, 0x02, 0x21, 0x26, 0x02, 0x20, 0x24, 0x01, 0x1d, 0x21, 0x02, 0x1c, 0x21,
+    0x98, 0xab, 0xae, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xcf, 0xdf, 0xe2, 0x5d, 0x95, 0x9d, 0x5b, 0x93, 0x9c, 0x59, 0x92, 0x9a,
+    0x56, 0x90, 0x98, 0x55, 0x8e, 0x97, 0x52, 0x8c, 0x95, 0x50, 0x8b, 0x94, 0x4e, 0x89, 0x92, 0x4c, 0x87, 0x90, 0x49, 0x85, 0x8e,
+    0x48, 0x83, 0x8c, 0x46, 0x82, 0x8b, 0x43, 0x7f, 0x89, 0x40, 0x7e, 0x87, 0x3e, 0x7c, 0x85, 0x3d, 0x7a, 0x83, 0x3a, 0x78, 0x82,
+    0x38, 0x76, 0x80, 0x36, 0x74, 0x7e, 0x34, 0x72, 0x7c, 0x32, 0x71, 0x7a, 0x31, 0x6f, 0x79, 0x2e, 0x6d, 0x77, 0x2c, 0x6b, 0x76,
+    0x2a, 0x69, 0x74, 0x29, 0x68, 0x72, 0x27, 0x66, 0x70, 0x26, 0x65, 0x6f, 0x25, 0x63, 0x6d, 0x23, 0x61, 0x6b, 0x21, 0x60, 0x6a,
+    0x1f, 0x5e, 0x68, 0x1e, 0x5d, 0x67, 0x1d, 0x5c, 0x65, 0x1c, 0x5a, 0x65, 0x1b, 0x59, 0x63, 0x1a, 0x58, 0x61, 0x19, 0x56, 0x60,
+    0x18, 0x55, 0x5f, 0x17, 0x54, 0x5e, 0x17, 0x53, 0x5d, 0x16, 0x51, 0x5b, 0x15, 0x50, 0x59, 0x15, 0x50, 0x59, 0x14, 0x4e, 0x58,
+    0x14, 0x4e, 0x57, 0x12, 0x4c, 0x55, 0x12, 0x4c, 0x55, 0x11, 0x4a, 0x53, 0x11, 0x49, 0x52, 0x11, 0x48, 0x51, 0x11, 0x47, 0x50,
+    0x10, 0x47, 0x4f, 0x0f, 0x46, 0x4f, 0x0f, 0x45, 0x4e, 0x0f, 0x44, 0x4d, 0x0f, 0x44, 0x4c, 0x0f, 0x43, 0x4b, 0x0e, 0x41, 0x49,
+    0x0d, 0x40, 0x48, 0x0d, 0x3f, 0x47, 0x0d, 0x3f, 0x46, 0x0d, 0x3e, 0x45, 0x0d, 0x3d, 0x45, 0x0d, 0x3c, 0x44, 0x0d, 0x3b, 0x43,
+    0x0c, 0x3b, 0x42, 0x0c, 0x3a, 0x41, 0x0c, 0x39, 0x40, 0x0c, 0x38, 0x3f, 0x0c, 0x37, 0x3e, 0x0c, 0x36, 0x3d, 0x0b, 0x35, 0x3c,
+    0x0b, 0x34, 0x3b, 0x0b, 0x34, 0x3a, 0x0b, 0x33, 0x39, 0x0b, 0x32, 0x38, 0x0a, 0x31, 0x37, 0x0a, 0x30, 0x36, 0x0a, 0x2f, 0x35,
+    0x0a, 0x2e, 0x34, 0x09, 0x2a, 0x30, 0x19, 0x3f, 0x46, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x07, 0x5d, 0x69,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65,
+    0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x63,
+    0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x57, 0x63, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62,
+    0x06, 0x56, 0x62, 0x06, 0x56, 0x61, 0x06, 0x56, 0x61, 0x06, 0x55, 0x61, 0x06, 0x55, 0x60, 0x06, 0x55, 0x60, 0x06, 0x54, 0x60,
+    0x06, 0x54, 0x5f, 0x06, 0x54, 0x5f, 0x06, 0x53, 0x5e, 0x06, 0x53, 0x5e, 0x05, 0x53, 0x5e, 0x05, 0x52, 0x5d, 0x05, 0x52, 0x5d,
+    0x05, 0x51, 0x5c, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5b, 0x05, 0x50, 0x5b, 0x05, 0x50, 0x5a, 0x05, 0x4f, 0x5a, 0x05, 0x4e, 0x59,
+    0x05, 0x4e, 0x59, 0x05, 0x4e, 0x58, 0x05, 0x4d, 0x57, 0x05, 0x4d, 0x57, 0x05, 0x4c, 0x56, 0x05, 0x4c, 0x56, 0x05, 0x4b, 0x55,
+    0x05, 0x4b, 0x55, 0x05, 0x4a, 0x54, 0x05, 0x49, 0x53, 0x05, 0x48, 0x52, 0x05, 0x48, 0x51, 0x05, 0x47, 0x51, 0x05, 0x47, 0x50,
+    0x05, 0x46, 0x4f, 0x05, 0x45, 0x4e, 0x05, 0x44, 0x4d, 0x05, 0x43, 0x4d, 0x04, 0x43, 0x4c, 0x04, 0x42, 0x4b, 0x04, 0x41, 0x4a,
+    0x04, 0x40, 0x49, 0x04, 0x3f, 0x48, 0x04, 0x3e, 0x47, 0x04, 0x3e, 0x46, 0x04, 0x3d, 0x45, 0x04, 0x3c, 0x44, 0x04, 0x3b, 0x43,
+    0x04, 0x3a, 0x42, 0x04, 0x39, 0x41, 0x04, 0x38, 0x3f, 0x04, 0x37, 0x3e, 0x04, 0x36, 0x3d, 0x04, 0x35, 0x3c, 0x04, 0x33, 0x3b,
+    0x03, 0x33, 0x3a, 0x03, 0x32, 0x38, 0x03, 0x31, 0x37, 0x03, 0x2f, 0x36, 0x03, 0x2e, 0x34, 0x03, 0x2d, 0x33, 0x03, 0x2c, 0x32,
+    0x03, 0x2a, 0x30, 0x03, 0x29, 0x2f, 0x03, 0x28, 0x2e, 0x03, 0x27, 0x2c, 0x03, 0x25, 0x2a, 0x03, 0x24, 0x29, 0x02, 0x23, 0x27,
+    0x02, 0x21, 0x26, 0x02, 0x20, 0x24, 0x02, 0x1e, 0x23, 0x00, 0x17, 0x1b, 0x1f, 0x3d, 0x41, 0xfa, 0xfc, 0xfc, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0x99, 0xbd, 0xc2, 0x5d, 0x95, 0x9e, 0x5c, 0x94, 0x9c, 0x59, 0x92, 0x9a, 0x57, 0x90, 0x99, 0x55, 0x8e, 0x97,
+    0x53, 0x8d, 0x95, 0x51, 0x8b, 0x94, 0x4f, 0x89, 0x92, 0x4c, 0x87, 0x90, 0x4a, 0x85, 0x8e, 0x49, 0x84, 0x8d, 0x46, 0x81, 0x8a,
+    0x43, 0x80, 0x88, 0x41, 0x7e, 0x87, 0x3e, 0x7c, 0x85, 0x3d, 0x7a, 0x83, 0x3b, 0x77, 0x81, 0x39, 0x76, 0x7f, 0x36, 0x73, 0x7d,
+    0x34, 0x72, 0x7c, 0x32, 0x70, 0x7a, 0x30, 0x6f, 0x78, 0x2e, 0x6d, 0x77, 0x2c, 0x6a, 0x75, 0x2a, 0x69, 0x73, 0x28, 0x67, 0x71,
+    0x26, 0x65, 0x70, 0x26, 0x64, 0x6e, 0x24, 0x62, 0x6c, 0x22, 0x61, 0x6a, 0x20, 0x5f, 0x69, 0x1f, 0x5e, 0x67, 0x1e, 0x5c, 0x66,
+    0x1d, 0x5b, 0x65, 0x1c, 0x59, 0x64, 0x1a, 0x58, 0x61, 0x19, 0x56, 0x60, 0x18, 0x55, 0x5f, 0x17, 0x55, 0x5e, 0x17, 0x53, 0x5d,
+    0x16, 0x51, 0x5b, 0x16, 0x51, 0x5a, 0x15, 0x50, 0x59, 0x15, 0x4f, 0x58, 0x14, 0x4e, 0x57, 0x12, 0x4c, 0x55, 0x12, 0x4c, 0x55,
+    0x12, 0x4b, 0x54, 0x11, 0x49, 0x52, 0x11, 0x48, 0x51, 0x11, 0x47, 0x50, 0x10, 0x47, 0x4f, 0x0f, 0x46, 0x4f, 0x0f, 0x45, 0x4e,
+    0x0f, 0x44, 0x4d, 0x0f, 0x44, 0x4c, 0x0f, 0x43, 0x4b, 0x0e, 0x41, 0x49, 0x0d, 0x40, 0x48, 0x0d, 0x3f, 0x47, 0x0d, 0x3f, 0x46,
+    0x0d, 0x3e, 0x45, 0x0d, 0x3d, 0x45, 0x0d, 0x3c, 0x44, 0x0d, 0x3b, 0x43, 0x0c, 0x3b, 0x42, 0x0c, 0x3a, 0x41, 0x0c, 0x39, 0x40,
+    0x0c, 0x38, 0x3f, 0x0c, 0x37, 0x3e, 0x0c, 0x36, 0x3d, 0x0b, 0x35, 0x3c, 0x0b, 0x34, 0x3b, 0x0b, 0x34, 0x3a, 0x0b, 0x33, 0x39,
+    0x0b, 0x32, 0x38, 0x0a, 0x31, 0x37, 0x0a, 0x30, 0x36, 0x0a, 0x2f, 0x35, 0x0a, 0x2e, 0x34, 0x09, 0x2a, 0x30, 0x19, 0x3f, 0x46,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x07, 0x5d, 0x69, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65,
+    0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63,
+    0x06, 0x57, 0x63, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x56, 0x62, 0x06, 0x56, 0x61, 0x06, 0x56, 0x61,
+    0x06, 0x55, 0x61, 0x06, 0x55, 0x60, 0x06, 0x55, 0x60, 0x06, 0x54, 0x60, 0x06, 0x54, 0x5f, 0x06, 0x54, 0x5f, 0x06, 0x53, 0x5e,
+    0x06, 0x53, 0x5e, 0x05, 0x53, 0x5e, 0x05, 0x52, 0x5d, 0x05, 0x52, 0x5d, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5b,
+    0x05, 0x50, 0x5b, 0x05, 0x50, 0x5a, 0x05, 0x4f, 0x5a, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x58, 0x05, 0x4d, 0x57,
+    0x05, 0x4d, 0x57, 0x05, 0x4c, 0x56, 0x05, 0x4c, 0x56, 0x05, 0x4b, 0x55, 0x05, 0x4b, 0x55, 0x05, 0x4a, 0x54, 0x05, 0x49, 0x53,
+    0x05, 0x48, 0x52, 0x05, 0x48, 0x51, 0x05, 0x47, 0x51, 0x05, 0x47, 0x50, 0x05, 0x46, 0x4f, 0x05, 0x45, 0x4e, 0x05, 0x44, 0x4d,
+    0x05, 0x43, 0x4d, 0x04, 0x43, 0x4c, 0x04, 0x42, 0x4b, 0x04, 0x41, 0x4a, 0x04, 0x40, 0x49, 0x04, 0x3f, 0x48, 0x04, 0x3e, 0x47,
+    0x04, 0x3e, 0x46, 0x04, 0x3d, 0x45, 0x04, 0x3c, 0x44, 0x04, 0x3b, 0x43, 0x04, 0x3a, 0x42, 0x04, 0x39, 0x41, 0x04, 0x38, 0x3f,
+    0x04, 0x37, 0x3e, 0x04, 0x36, 0x3d, 0x04, 0x35, 0x3c, 0x04, 0x33, 0x3b, 0x03, 0x33, 0x3a, 0x03, 0x32, 0x38, 0x03, 0x31, 0x37,
+    0x03, 0x2f, 0x36, 0x03, 0x2e, 0x34, 0x03, 0x2d, 0x33, 0x03, 0x2c, 0x32, 0x03, 0x2a, 0x30, 0x03, 0x29, 0x2f, 0x03, 0x28, 0x2e,
+    0x03, 0x27, 0x2c, 0x03, 0x25, 0x2a, 0x03, 0x24, 0x29, 0x02, 0x23, 0x27, 0x02, 0x21, 0x26, 0x02, 0x20, 0x24, 0x02, 0x1e, 0x23,
+    0x02, 0x1c, 0x21, 0x00, 0x16, 0x1a, 0x78, 0x8f, 0x92, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xea, 0xf1, 0xf2, 0x6f, 0xa0, 0xa8,
+    0x5f, 0x96, 0x9e, 0x5d, 0x94, 0x9c, 0x5a, 0x91, 0x9b, 0x58, 0x90, 0x99, 0x56, 0x8f, 0x98, 0x54, 0x8c, 0x95, 0x52, 0x8b, 0x93,
+    0x50, 0x89, 0x92, 0x4d, 0x87, 0x90, 0x4b, 0x85, 0x8f, 0x49, 0x83, 0x8c, 0x46, 0x82, 0x8a, 0x43, 0x7f, 0x88, 0x41, 0x7e, 0x86,
+    0x3f, 0x7b, 0x85, 0x3d, 0x79, 0x83, 0x3b, 0x77, 0x81, 0x39, 0x75, 0x7f, 0x36, 0x73, 0x7d, 0x34, 0x71, 0x7c, 0x32, 0x70, 0x7a,
+    0x30, 0x6e, 0x78, 0x2e, 0x6c, 0x76, 0x2c, 0x6a, 0x74, 0x2a, 0x68, 0x72, 0x28, 0x67, 0x71, 0x26, 0x65, 0x6f, 0x26, 0x63, 0x6d,
+    0x23, 0x62, 0x6b, 0x22, 0x60, 0x6a, 0x20, 0x5f, 0x68, 0x1f, 0x5d, 0x67, 0x25, 0x60, 0x6b, 0x30, 0x68, 0x72, 0x3b, 0x70, 0x78,
+    0x4b, 0x7b, 0x82, 0x56, 0x83, 0x8a, 0x58, 0x84, 0x8c, 0x51, 0x7f, 0x86, 0x4c, 0x79, 0x81, 0x3f, 0x70, 0x77, 0x31, 0x65, 0x6d,
+    0x26, 0x5b, 0x64, 0x18, 0x51, 0x5a, 0x13, 0x4d, 0x56, 0x12, 0x4c, 0x55, 0x12, 0x4b, 0x54, 0x11, 0x49, 0x52, 0x11, 0x48, 0x51,
+    0x11, 0x47, 0x50, 0x11, 0x47, 0x4f, 0x0f, 0x46, 0x4f, 0x0f, 0x45, 0x4e, 0x0f, 0x44, 0x4d, 0x0f, 0x44, 0x4c, 0x0f, 0x43, 0x4b,
+    0x0e, 0x41, 0x49, 0x0d, 0x40, 0x48, 0x0d, 0x3f, 0x47, 0x0d, 0x3f, 0x46, 0x0d, 0x3e, 0x45, 0x0d, 0x3d, 0x45, 0x0d, 0x3c, 0x44,
+    0x0d, 0x3b, 0x43, 0x0c, 0x3b, 0x42, 0x0c, 0x3a, 0x41, 0x0c, 0x39, 0x40, 0x0c, 0x38, 0x3f, 0x0c, 0x37, 0x3e, 0x0c, 0x36, 0x3d,
+    0x0b, 0x35, 0x3c, 0x0b, 0x34, 0x3b, 0x0b, 0x34, 0x3a, 0x0b, 0x33, 0x39, 0x0b, 0x32, 0x38, 0x0a, 0x31, 0x37, 0x0a, 0x30, 0x36,
+    0x0a, 0x2f, 0x35, 0x0a, 0x2e, 0x34, 0x09, 0x2a, 0x30, 0x19, 0x3f, 0x46, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x07, 0x5d, 0x69, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65,
+    0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64,
+    0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x57, 0x63, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62,
+    0x06, 0x57, 0x62, 0x06, 0x56, 0x62, 0x06, 0x56, 0x61, 0x06, 0x56, 0x61, 0x06, 0x55, 0x61, 0x06, 0x55, 0x60, 0x06, 0x55, 0x60,
+    0x06, 0x54, 0x60, 0x06, 0x54, 0x5f, 0x06, 0x54, 0x5f, 0x06, 0x53, 0x5e, 0x06, 0x53, 0x5e, 0x05, 0x53, 0x5e, 0x05, 0x52, 0x5d,
+    0x05, 0x52, 0x5d, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5b, 0x05, 0x50, 0x5b, 0x05, 0x50, 0x5a, 0x05, 0x4f, 0x5a,
+    0x05, 0x4e, 0x59, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x58, 0x05, 0x4d, 0x57, 0x05, 0x4d, 0x57, 0x05, 0x4c, 0x56, 0x05, 0x4c, 0x56,
+    0x05, 0x4b, 0x55, 0x05, 0x4b, 0x55, 0x05, 0x4a, 0x54, 0x05, 0x49, 0x53, 0x05, 0x48, 0x52, 0x05, 0x48, 0x51, 0x05, 0x47, 0x51,
+    0x05, 0x47, 0x50, 0x05, 0x46, 0x4f, 0x05, 0x45, 0x4e, 0x05, 0x44, 0x4d, 0x05, 0x43, 0x4d, 0x04, 0x43, 0x4c, 0x04, 0x42, 0x4b,
+    0x04, 0x41, 0x4a, 0x04, 0x40, 0x49, 0x04, 0x3f, 0x48, 0x04, 0x3e, 0x47, 0x04, 0x3e, 0x46, 0x04, 0x3d, 0x45, 0x04, 0x3c, 0x44,
+    0x04, 0x3b, 0x43, 0x04, 0x3a, 0x42, 0x04, 0x39, 0x41, 0x04, 0x38, 0x3f, 0x04, 0x37, 0x3e, 0x04, 0x36, 0x3d, 0x04, 0x35, 0x3c,
+    0x04, 0x33, 0x3b, 0x03, 0x33, 0x3a, 0x03, 0x32, 0x38, 0x03, 0x31, 0x37, 0x03, 0x2f, 0x36, 0x03, 0x2e, 0x34, 0x03, 0x2d, 0x33,
+    0x03, 0x2c, 0x32, 0x03, 0x2a, 0x30, 0x03, 0x29, 0x2f, 0x03, 0x28, 0x2e, 0x03, 0x27, 0x2c, 0x03, 0x25, 0x2a, 0x03, 0x24, 0x29,
+    0x02, 0x23, 0x27, 0x02, 0x21, 0x26, 0x02, 0x20, 0x24, 0x02, 0x1e, 0x23, 0x02, 0x1d, 0x21, 0x00, 0x17, 0x1b, 0x13, 0x2f, 0x34,
+    0xea, 0xef, 0xef, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xb8, 0xd1, 0xd4, 0x63, 0x98, 0xa0, 0x60, 0x96, 0x9f, 0x5e, 0x94, 0x9d,
+    0x5c, 0x93, 0x9b, 0x59, 0x91, 0x99, 0x57, 0x8f, 0x98, 0x55, 0x8d, 0x96, 0x53, 0x8b, 0x94, 0x50, 0x89, 0x91, 0x4e, 0x87, 0x90,
+    0x4b, 0x85, 0x8e, 0x49, 0x83, 0x8c, 0x46, 0x81, 0x8a, 0x44, 0x7f, 0x88, 0x41, 0x7d, 0x86, 0x3f, 0x7b, 0x84, 0x3d, 0x79, 0x82,
+    0x3b, 0x77, 0x80, 0x38, 0x75, 0x7e, 0x36, 0x73, 0x7d, 0x33, 0x71, 0x7b, 0x32, 0x6f, 0x7a, 0x2f, 0x6c, 0x77, 0x2d, 0x6b, 0x75,
+    0x2b, 0x69, 0x73, 0x29, 0x67, 0x71, 0x27, 0x65, 0x6f, 0x26, 0x64, 0x6e, 0x35, 0x6e, 0x78, 0x60, 0x8d, 0x94, 0x91, 0xb0, 0xb5,
+    0xb8, 0xcc, 0xcf, 0xd4, 0xe0, 0xe2, 0xec, 0xf1, 0xf2, 0xfd, 0xfe, 0xfe, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xf9, 0xfa, 0xfa, 0xe4, 0xeb, 0xec, 0xcb, 0xd8, 0xd9, 0xab, 0xbf, 0xc3,
+    0x7f, 0x9f, 0xa4, 0x49, 0x75, 0x7c, 0x20, 0x55, 0x5d, 0x11, 0x48, 0x51, 0x11, 0x47, 0x50, 0x11, 0x47, 0x4f, 0x0f, 0x46, 0x4f,
+    0x0f, 0x45, 0x4e, 0x0f, 0x44, 0x4d, 0x0f, 0x44, 0x4c, 0x0f, 0x43, 0x4b, 0x0e, 0x41, 0x49, 0x0d, 0x40, 0x48, 0x0d, 0x3f, 0x47,
+    0x0d, 0x3f, 0x46, 0x0d, 0x3e, 0x45, 0x0d, 0x3d, 0x45, 0x0d, 0x3c, 0x44, 0x0d, 0x3b, 0x43, 0x0c, 0x3b, 0x42, 0x0c, 0x3a, 0x41,
+    0x0c, 0x39, 0x40, 0x0c, 0x38, 0x3f, 0x0c, 0x37, 0x3e, 0x0c, 0x36, 0x3d, 0x0b, 0x35, 0x3c, 0x0b, 0x34, 0x3b, 0x0b, 0x34, 0x3a,
+    0x0b, 0x33, 0x39, 0x0b, 0x32, 0x38, 0x0a, 0x31, 0x37, 0x0a, 0x30, 0x36, 0x0a, 0x2f, 0x35, 0x0a, 0x2e, 0x34, 0x09, 0x2a, 0x30,
+    0x19, 0x3f, 0x46, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x07, 0x5d, 0x69, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65,
+    0x06, 0x59, 0x65, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63,
+    0x06, 0x58, 0x63, 0x06, 0x57, 0x63, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x56, 0x62, 0x06, 0x56, 0x61,
+    0x06, 0x56, 0x61, 0x06, 0x55, 0x61, 0x06, 0x55, 0x60, 0x06, 0x55, 0x60, 0x06, 0x54, 0x60, 0x06, 0x54, 0x5f, 0x06, 0x54, 0x5f,
+    0x06, 0x53, 0x5e, 0x06, 0x53, 0x5e, 0x05, 0x53, 0x5e, 0x05, 0x52, 0x5d, 0x05, 0x52, 0x5d, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5c,
+    0x05, 0x51, 0x5b, 0x05, 0x50, 0x5b, 0x05, 0x50, 0x5a, 0x05, 0x4f, 0x5a, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x58,
+    0x05, 0x4d, 0x57, 0x05, 0x4d, 0x57, 0x05, 0x4c, 0x56, 0x05, 0x4c, 0x56, 0x05, 0x4b, 0x55, 0x05, 0x4b, 0x55, 0x05, 0x4a, 0x54,
+    0x05, 0x49, 0x53, 0x05, 0x48, 0x52, 0x05, 0x48, 0x51, 0x05, 0x47, 0x51, 0x05, 0x47, 0x50, 0x05, 0x46, 0x4f, 0x05, 0x45, 0x4e,
+    0x05, 0x44, 0x4d, 0x05, 0x43, 0x4d, 0x04, 0x43, 0x4c, 0x04, 0x42, 0x4b, 0x04, 0x41, 0x4a, 0x04, 0x40, 0x49, 0x04, 0x3f, 0x48,
+    0x04, 0x3e, 0x47, 0x04, 0x3e, 0x46, 0x04, 0x3d, 0x45, 0x04, 0x3c, 0x44, 0x04, 0x3b, 0x43, 0x04, 0x3a, 0x42, 0x04, 0x39, 0x41,
+    0x04, 0x38, 0x3f, 0x04, 0x37, 0x3e, 0x04, 0x36, 0x3d, 0x04, 0x35, 0x3c, 0x04, 0x33, 0x3b, 0x03, 0x33, 0x3a, 0x03, 0x32, 0x38,
+    0x03, 0x31, 0x37, 0x03, 0x2f, 0x36, 0x03, 0x2e, 0x34, 0x03, 0x2d, 0x33, 0x03, 0x2c, 0x32, 0x03, 0x2a, 0x30, 0x03, 0x29, 0x2f,
+    0x03, 0x28, 0x2e, 0x03, 0x27, 0x2c, 0x03, 0x25, 0x2a, 0x03, 0x24, 0x29, 0x02, 0x23, 0x27, 0x02, 0x21, 0x26, 0x02, 0x20, 0x24,
+    0x02, 0x1e, 0x23, 0x02, 0x1d, 0x21, 0x02, 0x1b, 0x1f, 0x00, 0x14, 0x18, 0x69, 0x81, 0x85, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xf7, 0xfa, 0xfa, 0x82, 0xad, 0xb3, 0x64, 0x99, 0xa1, 0x61, 0x96, 0x9f, 0x60, 0x95, 0x9d, 0x5d, 0x93, 0x9c, 0x5b, 0x91, 0x9a,
+    0x58, 0x8f, 0x97, 0x55, 0x8d, 0x96, 0x54, 0x8b, 0x94, 0x51, 0x89, 0x92, 0x4f, 0x87, 0x90, 0x4b, 0x85, 0x8e, 0x49, 0x83, 0x8c,
+    0x47, 0x81, 0x8a, 0x44, 0x7f, 0x88, 0x41, 0x7c, 0x86, 0x3f, 0x7b, 0x84, 0x3d, 0x78, 0x81, 0x3b, 0x77, 0x80, 0x38, 0x74, 0x7e,
+    0x35, 0x73, 0x7c, 0x33, 0x70, 0x7a, 0x31, 0x6e, 0x78, 0x2f, 0x6c, 0x76, 0x2d, 0x6a, 0x74, 0x3e, 0x77, 0x80, 0x7b, 0xa1, 0xa7,
+    0xbb, 0xce, 0xd2, 0xee, 0xf3, 0xf3, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xe2, 0xe9, 0xea,
+    0xa9, 0xbd, 0xc0, 0x5e, 0x83, 0x89, 0x1e, 0x51, 0x59, 0x10, 0x46, 0x4f, 0x0f, 0x45, 0x4e, 0x0f, 0x44, 0x4d, 0x0f, 0x44, 0x4c,
+    0x0f, 0x43, 0x4b, 0x0e, 0x41, 0x49, 0x0d, 0x40, 0x48, 0x0d, 0x3f, 0x47, 0x0d, 0x3f, 0x46, 0x0d, 0x3e, 0x45, 0x0d, 0x3d, 0x45,
+    0x0d, 0x3c, 0x44, 0x0d, 0x3b, 0x43, 0x0c, 0x3b, 0x42, 0x0c, 0x3a, 0x41, 0x0c, 0x39, 0x40, 0x0c, 0x38, 0x3f, 0x0c, 0x37, 0x3e,
+    0x0c, 0x36, 0x3d, 0x0b, 0x35, 0x3c, 0x0b, 0x34, 0x3b, 0x0b, 0x34, 0x3a, 0x0b, 0x33, 0x39, 0x0b, 0x32, 0x38, 0x0a, 0x31, 0x37,
+    0x0a, 0x30, 0x36, 0x0a, 0x2f, 0x35, 0x0a, 0x2e, 0x34, 0x09, 0x2a, 0x30, 0x19, 0x3f, 0x46, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x07, 0x5d, 0x69, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65,
+    0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64,
+    0x06, 0x58, 0x64, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x57, 0x63, 0x06, 0x57, 0x62,
+    0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x56, 0x62, 0x06, 0x56, 0x61, 0x06, 0x56, 0x61, 0x06, 0x55, 0x61, 0x06, 0x55, 0x60,
+    0x06, 0x55, 0x60, 0x06, 0x54, 0x60, 0x06, 0x54, 0x5f, 0x06, 0x54, 0x5f, 0x06, 0x53, 0x5e, 0x06, 0x53, 0x5e, 0x05, 0x53, 0x5e,
+    0x05, 0x52, 0x5d, 0x05, 0x52, 0x5d, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5b, 0x05, 0x50, 0x5b, 0x05, 0x50, 0x5a,
+    0x05, 0x4f, 0x5a, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x58, 0x05, 0x4d, 0x57, 0x05, 0x4d, 0x57, 0x05, 0x4c, 0x56,
+    0x05, 0x4c, 0x56, 0x05, 0x4b, 0x55, 0x05, 0x4b, 0x55, 0x05, 0x4a, 0x54, 0x05, 0x49, 0x53, 0x05, 0x48, 0x52, 0x05, 0x48, 0x51,
+    0x05, 0x47, 0x51, 0x05, 0x47, 0x50, 0x05, 0x46, 0x4f, 0x05, 0x45, 0x4e, 0x05, 0x44, 0x4d, 0x05, 0x43, 0x4d, 0x04, 0x43, 0x4c,
+    0x04, 0x42, 0x4b, 0x04, 0x41, 0x4a, 0x04, 0x40, 0x49, 0x04, 0x3f, 0x48, 0x04, 0x3e, 0x47, 0x04, 0x3e, 0x46, 0x04, 0x3d, 0x45,
+    0x04, 0x3c, 0x44, 0x04, 0x3b, 0x43, 0x04, 0x3a, 0x42, 0x04, 0x39, 0x41, 0x04, 0x38, 0x3f, 0x04, 0x37, 0x3e, 0x04, 0x36, 0x3d,
+    0x04, 0x35, 0x3c, 0x04, 0x33, 0x3b, 0x03, 0x33, 0x3a, 0x03, 0x32, 0x38, 0x03, 0x31, 0x37, 0x03, 0x2f, 0x36, 0x03, 0x2e, 0x34,
+    0x03, 0x2d, 0x33, 0x03, 0x2c, 0x32, 0x03, 0x2a, 0x30, 0x03, 0x29, 0x2f, 0x03, 0x28, 0x2e, 0x03, 0x27, 0x2c, 0x03, 0x25, 0x2a,
+    0x03, 0x24, 0x29, 0x02, 0x23, 0x27, 0x02, 0x21, 0x26, 0x02, 0x20, 0x24, 0x02, 0x1e, 0x23, 0x02, 0x1d, 0x21, 0x02, 0x1b, 0x1f,
+    0x00, 0x16, 0x19, 0x11, 0x2c, 0x30, 0xe9, 0xed, 0xee, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xc9, 0xdc, 0xdf, 0x68, 0x9b, 0xa3,
+    0x65, 0x99, 0xa1, 0x63, 0x98, 0xa0, 0x60, 0x96, 0x9e, 0x5e, 0x93, 0x9b, 0x5b, 0x91, 0x99, 0x59, 0x8f, 0x98, 0x56, 0x8d, 0x96,
+    0x55, 0x8c, 0x94, 0x52, 0x89, 0x92, 0x4e, 0x87, 0x8f, 0x4b, 0x85, 0x8e, 0x49, 0x83, 0x8b, 0x47, 0x81, 0x8a, 0x44, 0x7e, 0x87,
+    0x41, 0x7c, 0x85, 0x3e, 0x79, 0x82, 0x3d, 0x78, 0x81, 0x3a, 0x75, 0x7f, 0x37, 0x74, 0x7d, 0x34, 0x71, 0x7b, 0x33, 0x6f, 0x79,
+    0x5d, 0x8d, 0x94, 0xad, 0xc5, 0xc9, 0xee, 0xf3, 0xf4, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xe3, 0xe9, 0xea,
+    0x92, 0xab, 0xaf, 0x34, 0x61, 0x69, 0x0f, 0x44, 0x4d, 0x0f, 0x44, 0x4c, 0x0f, 0x43, 0x4b, 0x0e, 0x41, 0x49, 0x0d, 0x40, 0x48,
+    0x0d, 0x3f, 0x47, 0x0d, 0x3f, 0x46, 0x0d, 0x3e, 0x45, 0x0d, 0x3d, 0x45, 0x0d, 0x3c, 0x44, 0x0d, 0x3b, 0x43, 0x0c, 0x3b, 0x42,
+    0x0c, 0x3a, 0x41, 0x0c, 0x39, 0x40, 0x0c, 0x38, 0x3f, 0x0c, 0x37, 0x3e, 0x0c, 0x36, 0x3d, 0x0b, 0x35, 0x3c, 0x0b, 0x34, 0x3b,
+    0x0b, 0x34, 0x3a, 0x0b, 0x33, 0x39, 0x0b, 0x32, 0x38, 0x0a, 0x31, 0x37, 0x0a, 0x30, 0x36, 0x0a, 0x2f, 0x35, 0x0a, 0x2e, 0x34,
+    0x09, 0x2a, 0x30, 0x19, 0x3f, 0x46, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x07, 0x5d, 0x69, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65,
+    0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63,
+    0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x57, 0x63, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x56, 0x62,
+    0x06, 0x56, 0x61, 0x06, 0x56, 0x61, 0x06, 0x55, 0x61, 0x06, 0x55, 0x60, 0x06, 0x55, 0x60, 0x06, 0x54, 0x60, 0x06, 0x54, 0x5f,
+    0x06, 0x54, 0x5f, 0x06, 0x53, 0x5e, 0x06, 0x53, 0x5e, 0x05, 0x53, 0x5e, 0x05, 0x52, 0x5d, 0x05, 0x52, 0x5d, 0x05, 0x51, 0x5c,
+    0x05, 0x51, 0x5c, 0x05, 0x51, 0x5b, 0x05, 0x50, 0x5b, 0x05, 0x50, 0x5a, 0x05, 0x4f, 0x5a, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x59,
+    0x05, 0x4e, 0x58, 0x05, 0x4d, 0x57, 0x05, 0x4d, 0x57, 0x05, 0x4c, 0x56, 0x05, 0x4c, 0x56, 0x05, 0x4b, 0x55, 0x05, 0x4b, 0x55,
+    0x05, 0x4a, 0x54, 0x05, 0x49, 0x53, 0x05, 0x48, 0x52, 0x05, 0x48, 0x51, 0x05, 0x47, 0x51, 0x05, 0x47, 0x50, 0x05, 0x46, 0x4f,
+    0x05, 0x45, 0x4e, 0x05, 0x44, 0x4d, 0x05, 0x43, 0x4d, 0x04, 0x43, 0x4c, 0x04, 0x42, 0x4b, 0x04, 0x41, 0x4a, 0x04, 0x40, 0x49,
+    0x04, 0x3f, 0x48, 0x04, 0x3e, 0x47, 0x04, 0x3e, 0x46, 0x04, 0x3d, 0x45, 0x04, 0x3c, 0x44, 0x04, 0x3b, 0x43, 0x04, 0x3a, 0x42,
+    0x04, 0x39, 0x41, 0x04, 0x38, 0x3f, 0x04, 0x37, 0x3e, 0x04, 0x36, 0x3d, 0x04, 0x35, 0x3c, 0x04, 0x33, 0x3b, 0x03, 0x33, 0x3a,
+    0x03, 0x32, 0x38, 0x03, 0x31, 0x37, 0x03, 0x2f, 0x36, 0x03, 0x2e, 0x34, 0x03, 0x2d, 0x33, 0x03, 0x2c, 0x32, 0x03, 0x2a, 0x30,
+    0x03, 0x29, 0x2f, 0x03, 0x28, 0x2e, 0x03, 0x27, 0x2c, 0x03, 0x25, 0x2a, 0x03, 0x24, 0x29, 0x02, 0x23, 0x27, 0x02, 0x21, 0x26,
+    0x02, 0x20, 0x24, 0x02, 0x1e, 0x23, 0x02, 0x1d, 0x21, 0x02, 0x1b, 0x1f, 0x02, 0x19, 0x1d, 0x00, 0x13, 0x17, 0x6e, 0x85, 0x89,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xfc, 0xfd, 0xfd, 0x8c, 0xb3, 0xb9, 0x69, 0x9b, 0xa4, 0x66, 0x99, 0xa1, 0x64, 0x98, 0xa0,
+    0x61, 0x96, 0x9e, 0x5f, 0x93, 0x9b, 0x5c, 0x91, 0x9a, 0x59, 0x8f, 0x97, 0x57, 0x8e, 0x96, 0x55, 0x8b, 0x94, 0x51, 0x89, 0x91,
+    0x4e, 0x86, 0x8f, 0x4b, 0x85, 0x8d, 0x49, 0x82, 0x8b, 0x46, 0x7f, 0x89, 0x44, 0x7e, 0x87, 0x41, 0x7b, 0x84, 0x3e, 0x79, 0x82,
+    0x3c, 0x76, 0x80, 0x39, 0x75, 0x7e, 0x63, 0x92, 0x9a, 0xc0, 0xd3, 0xd6, 0xfc, 0xfd, 0xfd, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xf7, 0xf8, 0xf9, 0xa5, 0xb9, 0xbc,
+    0x37, 0x63, 0x69, 0x0f, 0x43, 0x4b, 0x0e, 0x41, 0x49, 0x0d, 0x40, 0x48, 0x0d, 0x3f, 0x47, 0x0d, 0x3f, 0x46, 0x0d, 0x3e, 0x45,
+    0x0d, 0x3d, 0x45, 0x0d, 0x3c, 0x44, 0x0d, 0x3b, 0x43, 0x0c, 0x3b, 0x42, 0x0c, 0x3a, 0x41, 0x0c, 0x39, 0x40, 0x0c, 0x38, 0x3f,
+    0x0c, 0x37, 0x3e, 0x0c, 0x36, 0x3d, 0x0b, 0x35, 0x3c, 0x0b, 0x34, 0x3b, 0x0b, 0x34, 0x3a, 0x0b, 0x33, 0x39, 0x0b, 0x32, 0x38,
+    0x0a, 0x31, 0x37, 0x0a, 0x30, 0x36, 0x0a, 0x2f, 0x35, 0x0a, 0x2e, 0x34, 0x09, 0x2a, 0x30, 0x19, 0x3f, 0x46, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x07, 0x5d, 0x69, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x59, 0x65,
+    0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x58, 0x64,
+    0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x57, 0x63,
+    0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x56, 0x62, 0x06, 0x56, 0x61, 0x06, 0x56, 0x61, 0x06, 0x55, 0x61,
+    0x06, 0x55, 0x60, 0x06, 0x55, 0x60, 0x06, 0x54, 0x60, 0x06, 0x54, 0x5f, 0x06, 0x54, 0x5f, 0x06, 0x53, 0x5e, 0x06, 0x53, 0x5e,
+    0x05, 0x53, 0x5e, 0x05, 0x52, 0x5d, 0x05, 0x52, 0x5d, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5b, 0x05, 0x50, 0x5b,
+    0x05, 0x50, 0x5a, 0x05, 0x4f, 0x5a, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x58, 0x05, 0x4d, 0x57, 0x05, 0x4d, 0x57,
+    0x05, 0x4c, 0x56, 0x05, 0x4c, 0x56, 0x05, 0x4b, 0x55, 0x05, 0x4b, 0x55, 0x05, 0x4a, 0x54, 0x05, 0x49, 0x53, 0x05, 0x48, 0x52,
+    0x05, 0x48, 0x51, 0x05, 0x47, 0x51, 0x05, 0x47, 0x50, 0x05, 0x46, 0x4f, 0x05, 0x45, 0x4e, 0x05, 0x44, 0x4d, 0x05, 0x43, 0x4d,
+    0x04, 0x43, 0x4c, 0x04, 0x42, 0x4b, 0x04, 0x41, 0x4a, 0x04, 0x40, 0x49, 0x04, 0x3f, 0x48, 0x04, 0x3e, 0x47, 0x04, 0x3e, 0x46,
+    0x04, 0x3d, 0x45, 0x04, 0x3c, 0x44, 0x04, 0x3b, 0x43, 0x04, 0x3a, 0x42, 0x04, 0x39, 0x41, 0x04, 0x38, 0x3f, 0x04, 0x37, 0x3e,
+    0x04, 0x36, 0x3d, 0x04, 0x35, 0x3c, 0x04, 0x33, 0x3b, 0x03, 0x33, 0x3a, 0x03, 0x32, 0x38, 0x03, 0x31, 0x37, 0x03, 0x2f, 0x36,
+    0x03, 0x2e, 0x34, 0x03, 0x2d, 0x33, 0x03, 0x2c, 0x32, 0x03, 0x2a, 0x30, 0x03, 0x29, 0x2f, 0x03, 0x28, 0x2e, 0x03, 0x27, 0x2c,
+    0x03, 0x25, 0x2a, 0x03, 0x24, 0x29, 0x02, 0x23, 0x27, 0x02, 0x21, 0x26, 0x02, 0x20, 0x24, 0x02, 0x1e, 0x23, 0x02, 0x1d, 0x21,
+    0x02, 0x1b, 0x1f, 0x02, 0x1a, 0x1e, 0x00, 0x14, 0x17, 0x16, 0x31, 0x34, 0xf2, 0xf6, 0xf6, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xce, 0xdf, 0xe1, 0x6c, 0x9e, 0xa6, 0x6a, 0x9c, 0xa4, 0x67, 0x9a, 0xa2, 0x64, 0x98, 0x9f, 0x61, 0x95, 0x9d, 0x60, 0x94, 0x9c,
+    0x5d, 0x91, 0x9a, 0x5a, 0x8f, 0x98, 0x57, 0x8d, 0x95, 0x54, 0x8b, 0x93, 0x51, 0x88, 0x91, 0x4e, 0x86, 0x8f, 0x4b, 0x84, 0x8d,
+    0x49, 0x81, 0x8a, 0x46, 0x7f, 0x88, 0x43, 0x7c, 0x85, 0x40, 0x7b, 0x83, 0x55, 0x88, 0x91, 0xb7, 0xcc, 0xd0, 0xfc, 0xfd, 0xfd,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xf6, 0xf8, 0xf8, 0x95, 0xac, 0xaf, 0x22, 0x51, 0x58,
+    0x0d, 0x40, 0x48, 0x0d, 0x3f, 0x47, 0x0d, 0x3f, 0x46, 0x0d, 0x3e, 0x45, 0x0d, 0x3d, 0x45, 0x0d, 0x3c, 0x44, 0x0d, 0x3b, 0x43,
+    0x0c, 0x3b, 0x42, 0x0c, 0x3a, 0x41, 0x0c, 0x39, 0x40, 0x0c, 0x38, 0x3f, 0x0c, 0x37, 0x3e, 0x0c, 0x36, 0x3d, 0x0b, 0x35, 0x3c,
+    0x0b, 0x34, 0x3b, 0x0b, 0x34, 0x3a, 0x0b, 0x33, 0x39, 0x0b, 0x32, 0x38, 0x0a, 0x31, 0x37, 0x0a, 0x30, 0x36, 0x0a, 0x2f, 0x35,
+    0x0a, 0x2e, 0x34, 0x09, 0x2a, 0x30, 0x19, 0x3f, 0x46, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x07, 0x5d, 0x69,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65,
+    0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x63,
+    0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x57, 0x63, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62,
+    0x06, 0x56, 0x62, 0x06, 0x56, 0x61, 0x06, 0x56, 0x61, 0x06, 0x55, 0x61, 0x06, 0x55, 0x60, 0x06, 0x55, 0x60, 0x06, 0x54, 0x60,
+    0x06, 0x54, 0x5f, 0x06, 0x54, 0x5f, 0x06, 0x53, 0x5e, 0x06, 0x53, 0x5e, 0x05, 0x53, 0x5e, 0x05, 0x52, 0x5d, 0x05, 0x52, 0x5d,
+    0x05, 0x51, 0x5c, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5b, 0x05, 0x50, 0x5b, 0x05, 0x50, 0x5a, 0x05, 0x4f, 0x5a, 0x05, 0x4e, 0x59,
+    0x05, 0x4e, 0x59, 0x05, 0x4e, 0x58, 0x05, 0x4d, 0x57, 0x05, 0x4d, 0x57, 0x05, 0x4c, 0x56, 0x05, 0x4c, 0x56, 0x05, 0x4b, 0x55,
+    0x05, 0x4b, 0x55, 0x05, 0x4a, 0x54, 0x05, 0x49, 0x53, 0x05, 0x48, 0x52, 0x05, 0x48, 0x51, 0x05, 0x47, 0x51, 0x05, 0x47, 0x50,
+    0x05, 0x46, 0x4f, 0x05, 0x45, 0x4e, 0x05, 0x44, 0x4d, 0x05, 0x43, 0x4d, 0x04, 0x43, 0x4c, 0x04, 0x42, 0x4b, 0x04, 0x41, 0x4a,
+    0x04, 0x40, 0x49, 0x04, 0x3f, 0x48, 0x04, 0x3e, 0x47, 0x04, 0x3e, 0x46, 0x04, 0x3d, 0x45, 0x04, 0x3c, 0x44, 0x04, 0x3b, 0x43,
+    0x04, 0x3a, 0x42, 0x04, 0x39, 0x41, 0x04, 0x38, 0x3f, 0x04, 0x37, 0x3e, 0x04, 0x36, 0x3d, 0x04, 0x35, 0x3c, 0x04, 0x33, 0x3b,
+    0x03, 0x33, 0x3a, 0x03, 0x32, 0x38, 0x03, 0x31, 0x37, 0x03, 0x2f, 0x36, 0x03, 0x2e, 0x34, 0x03, 0x2d, 0x33, 0x03, 0x2c, 0x32,
+    0x03, 0x2a, 0x30, 0x03, 0x29, 0x2f, 0x03, 0x28, 0x2e, 0x03, 0x27, 0x2c, 0x03, 0x25, 0x2a, 0x03, 0x24, 0x29, 0x02, 0x23, 0x27,
+    0x02, 0x21, 0x26, 0x02, 0x20, 0x24, 0x02, 0x1e, 0x23, 0x02, 0x1d, 0x21, 0x02, 0x1b, 0x1f, 0x02, 0x1a, 0x1e, 0x02, 0x18, 0x1b,
+    0x00, 0x14, 0x17, 0x84, 0x98, 0x9c, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xfb, 0xfc, 0xfd, 0x8c, 0xb3, 0xb9, 0x6d, 0x9e, 0xa6,
+    0x6b, 0x9d, 0xa4, 0x68, 0x9a, 0xa2, 0x65, 0x98, 0x9f, 0x62, 0x96, 0x9e, 0x60, 0x93, 0x9b, 0x5d, 0x91, 0x9a, 0x5a, 0x8f, 0x97,
+    0x57, 0x8d, 0x95, 0x54, 0x8a, 0x93, 0x51, 0x88, 0x90, 0x4e, 0x86, 0x8f, 0x4b, 0x83, 0x8c, 0x49, 0x81, 0x8a, 0x46, 0x7e, 0x87,
+    0x8d, 0xaf, 0xb5, 0xef, 0xf4, 0xf5, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xe2, 0xe8, 0xe9, 0x5d, 0x7f, 0x84, 0x0d, 0x3f, 0x47, 0x0d, 0x3f, 0x46,
+    0x0d, 0x3e, 0x45, 0x0d, 0x3d, 0x45, 0x0d, 0x3c, 0x44, 0x0d, 0x3b, 0x43, 0x0c, 0x3b, 0x42, 0x0c, 0x3a, 0x41, 0x0c, 0x39, 0x40,
+    0x0c, 0x38, 0x3f, 0x0c, 0x37, 0x3e, 0x0c, 0x36, 0x3d, 0x0b, 0x35, 0x3c, 0x0b, 0x34, 0x3b, 0x0b, 0x34, 0x3a, 0x0b, 0x33, 0x39,
+    0x0b, 0x32, 0x38, 0x0a, 0x31, 0x37, 0x0a, 0x30, 0x36, 0x0a, 0x2f, 0x35, 0x0a, 0x2e, 0x34, 0x09, 0x2a, 0x30, 0x19, 0x3f, 0x46,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x07, 0x5d, 0x69, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65,
+    0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63,
+    0x06, 0x57, 0x63, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x56, 0x62, 0x06, 0x56, 0x61, 0x06, 0x56, 0x61,
+    0x06, 0x55, 0x61, 0x06, 0x55, 0x60, 0x06, 0x55, 0x60, 0x06, 0x54, 0x60, 0x06, 0x54, 0x5f, 0x06, 0x54, 0x5f, 0x06, 0x53, 0x5e,
+    0x06, 0x53, 0x5e, 0x05, 0x53, 0x5e, 0x05, 0x52, 0x5d, 0x05, 0x52, 0x5d, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5b,
+    0x05, 0x50, 0x5b, 0x05, 0x50, 0x5a, 0x05, 0x4f, 0x5a, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x58, 0x05, 0x4d, 0x57,
+    0x05, 0x4d, 0x57, 0x05, 0x4c, 0x56, 0x05, 0x4c, 0x56, 0x05, 0x4b, 0x55, 0x05, 0x4b, 0x55, 0x05, 0x4a, 0x54, 0x05, 0x49, 0x53,
+    0x05, 0x48, 0x52, 0x05, 0x48, 0x51, 0x05, 0x47, 0x51, 0x05, 0x47, 0x50, 0x05, 0x46, 0x4f, 0x05, 0x45, 0x4e, 0x05, 0x44, 0x4d,
+    0x05, 0x43, 0x4d, 0x04, 0x43, 0x4c, 0x04, 0x42, 0x4b, 0x04, 0x41, 0x4a, 0x04, 0x40, 0x49, 0x04, 0x3f, 0x48, 0x04, 0x3e, 0x47,
+    0x04, 0x3e, 0x46, 0x04, 0x3d, 0x45, 0x04, 0x3c, 0x44, 0x04, 0x3b, 0x43, 0x04, 0x3a, 0x42, 0x04, 0x39, 0x41, 0x04, 0x38, 0x3f,
+    0x04, 0x37, 0x3e, 0x04, 0x36, 0x3d, 0x04, 0x35, 0x3c, 0x04, 0x33, 0x3b, 0x03, 0x33, 0x3a, 0x03, 0x32, 0x38, 0x03, 0x31, 0x37,
+    0x03, 0x2f, 0x36, 0x03, 0x2e, 0x34, 0x03, 0x2d, 0x33, 0x03, 0x2c, 0x32, 0x03, 0x2a, 0x30, 0x03, 0x29, 0x2f, 0x03, 0x28, 0x2e,
+    0x03, 0x27, 0x2c, 0x03, 0x25, 0x2a, 0x03, 0x24, 0x29, 0x02, 0x23, 0x27, 0x02, 0x21, 0x26, 0x02, 0x20, 0x24, 0x02, 0x1e, 0x23,
+    0x02, 0x1d, 0x21, 0x02, 0x1b, 0x1f, 0x02, 0x1a, 0x1e, 0x02, 0x18, 0x1c, 0x00, 0x11, 0x14, 0x26, 0x41, 0x46, 0xfd, 0xfe, 0xfe,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xc7, 0xda, 0xdd, 0x71, 0xa1, 0xa8, 0x6e, 0x9f, 0xa6, 0x6c, 0x9c, 0xa4, 0x69, 0x9a, 0xa2,
+    0x66, 0x98, 0xa0, 0x62, 0x95, 0x9d, 0x60, 0x94, 0x9c, 0x5d, 0x91, 0x99, 0x59, 0x8f, 0x96, 0x57, 0x8c, 0x95, 0x54, 0x8a, 0x92,
+    0x50, 0x87, 0x90, 0x4d, 0x84, 0x8d, 0x5b, 0x8d, 0x95, 0xc3, 0xd5, 0xd8, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x9f, 0xb3, 0xb6, 0x1a, 0x4a, 0x50, 0x0d, 0x3e, 0x45, 0x0d, 0x3d, 0x45, 0x0d, 0x3c, 0x44,
+    0x0d, 0x3b, 0x43, 0x0c, 0x3b, 0x42, 0x0c, 0x3a, 0x41, 0x0c, 0x39, 0x40, 0x0c, 0x38, 0x3f, 0x0c, 0x37, 0x3e, 0x0c, 0x36, 0x3d,
+    0x0b, 0x35, 0x3c, 0x0b, 0x34, 0x3b, 0x0b, 0x34, 0x3a, 0x0b, 0x33, 0x39, 0x0b, 0x32, 0x38, 0x0a, 0x31, 0x37, 0x0a, 0x30, 0x36,
+    0x0a, 0x2f, 0x35, 0x0a, 0x2e, 0x34, 0x09, 0x2a, 0x30, 0x19, 0x3f, 0x46, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x07, 0x5d, 0x69, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65,
+    0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64,
+    0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x57, 0x63, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62,
+    0x06, 0x57, 0x62, 0x06, 0x56, 0x62, 0x06, 0x56, 0x61, 0x06, 0x56, 0x61, 0x06, 0x55, 0x61, 0x06, 0x55, 0x60, 0x06, 0x55, 0x60,
+    0x06, 0x54, 0x60, 0x06, 0x54, 0x5f, 0x06, 0x54, 0x5f, 0x06, 0x53, 0x5e, 0x06, 0x53, 0x5e, 0x05, 0x53, 0x5e, 0x05, 0x52, 0x5d,
+    0x05, 0x52, 0x5d, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5b, 0x05, 0x50, 0x5b, 0x05, 0x50, 0x5a, 0x05, 0x4f, 0x5a,
+    0x05, 0x4e, 0x59, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x58, 0x05, 0x4d, 0x57, 0x05, 0x4d, 0x57, 0x05, 0x4c, 0x56, 0x05, 0x4c, 0x56,
+    0x05, 0x4b, 0x55, 0x05, 0x4b, 0x55, 0x05, 0x4a, 0x54, 0x05, 0x49, 0x53, 0x05, 0x48, 0x52, 0x05, 0x48, 0x51, 0x05, 0x47, 0x51,
+    0x05, 0x47, 0x50, 0x05, 0x46, 0x4f, 0x05, 0x45, 0x4e, 0x05, 0x44, 0x4d, 0x05, 0x43, 0x4d, 0x04, 0x43, 0x4c, 0x04, 0x42, 0x4b,
+    0x04, 0x41, 0x4a, 0x04, 0x40, 0x49, 0x04, 0x3f, 0x48, 0x04, 0x3e, 0x47, 0x04, 0x3e, 0x46, 0x04, 0x3d, 0x45, 0x04, 0x3c, 0x44,
+    0x04, 0x3b, 0x43, 0x04, 0x3a, 0x42, 0x04, 0x39, 0x41, 0x04, 0x38, 0x3f, 0x04, 0x37, 0x3e, 0x04, 0x36, 0x3d, 0x04, 0x35, 0x3c,
+    0x04, 0x33, 0x3b, 0x03, 0x33, 0x3a, 0x03, 0x32, 0x38, 0x03, 0x31, 0x37, 0x03, 0x2f, 0x36, 0x03, 0x2e, 0x34, 0x03, 0x2d, 0x33,
+    0x03, 0x2c, 0x32, 0x03, 0x2a, 0x30, 0x03, 0x29, 0x2f, 0x03, 0x28, 0x2e, 0x03, 0x27, 0x2c, 0x03, 0x25, 0x2a, 0x03, 0x24, 0x29,
+    0x02, 0x23, 0x27, 0x02, 0x21, 0x26, 0x02, 0x20, 0x24, 0x02, 0x1e, 0x23, 0x02, 0x1d, 0x21, 0x02, 0x1b, 0x1f, 0x02, 0x1a, 0x1e,
+    0x02, 0x18, 0x1c, 0x00, 0x15, 0x19, 0x04, 0x18, 0x1b, 0xb6, 0xc3, 0xc4, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xf5, 0xf8, 0xf9,
+    0x85, 0xad, 0xb4, 0x72, 0xa1, 0xa9, 0x6f, 0x9f, 0xa6, 0x6c, 0x9c, 0xa4, 0x69, 0x9a, 0xa2, 0x66, 0x97, 0x9f, 0x63, 0x96, 0x9e,
+    0x60, 0x92, 0x9b, 0x5c, 0x91, 0x98, 0x59, 0x8e, 0x96, 0x56, 0x8b, 0x93, 0x53, 0x89, 0x92, 0x74, 0x9f, 0xa6, 0xe4, 0xec, 0xed,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xf9, 0xfb, 0xfb, 0xeb, 0xf0, 0xf1, 0xd6, 0xe1, 0xe2, 0xc4, 0xd4, 0xd6, 0xba, 0xcc, 0xcf,
+    0xb8, 0xcb, 0xce, 0xc0, 0xd0, 0xd3, 0xce, 0xdb, 0xdd, 0xe6, 0xec, 0xed, 0xf5, 0xf7, 0xf8, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xcf, 0xd9, 0xda, 0x34, 0x5d, 0x63, 0x0d, 0x3d, 0x45, 0x0d, 0x3c, 0x44, 0x0d, 0x3b, 0x43, 0x0c, 0x3b, 0x42, 0x0c, 0x3a, 0x41,
+    0x0c, 0x39, 0x40, 0x0c, 0x38, 0x3f, 0x0c, 0x37, 0x3e, 0x0c, 0x36, 0x3d, 0x0b, 0x35, 0x3c, 0x0b, 0x34, 0x3b, 0x0b, 0x34, 0x3a,
+    0x0b, 0x33, 0x39, 0x0b, 0x32, 0x38, 0x0a, 0x31, 0x37, 0x0a, 0x30, 0x36, 0x0a, 0x2f, 0x35, 0x0a, 0x2e, 0x34, 0x09, 0x2a, 0x30,
+    0x19, 0x3f, 0x46, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x07, 0x5d, 0x69, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65,
+    0x06, 0x59, 0x65, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63,
+    0x06, 0x58, 0x63, 0x06, 0x57, 0x63, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x56, 0x62, 0x06, 0x56, 0x61,
+    0x06, 0x56, 0x61, 0x06, 0x55, 0x61, 0x06, 0x55, 0x60, 0x06, 0x55, 0x60, 0x06, 0x54, 0x60, 0x06, 0x54, 0x5f, 0x06, 0x54, 0x5f,
+    0x06, 0x53, 0x5e, 0x06, 0x53, 0x5e, 0x05, 0x53, 0x5e, 0x05, 0x52, 0x5d, 0x05, 0x52, 0x5d, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5c,
+    0x05, 0x51, 0x5b, 0x05, 0x50, 0x5b, 0x05, 0x50, 0x5a, 0x05, 0x4f, 0x5a, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x58,
+    0x05, 0x4d, 0x57, 0x05, 0x4d, 0x57, 0x05, 0x4c, 0x56, 0x05, 0x4c, 0x56, 0x05, 0x4b, 0x55, 0x05, 0x4b, 0x55, 0x05, 0x4a, 0x54,
+    0x05, 0x49, 0x53, 0x05, 0x48, 0x52, 0x05, 0x48, 0x51, 0x05, 0x47, 0x51, 0x05, 0x47, 0x50, 0x05, 0x46, 0x4f, 0x05, 0x45, 0x4e,
+    0x05, 0x44, 0x4d, 0x05, 0x43, 0x4d, 0x04, 0x43, 0x4c, 0x04, 0x42, 0x4b, 0x04, 0x41, 0x4a, 0x04, 0x40, 0x49, 0x04, 0x3f, 0x48,
+    0x04, 0x3e, 0x47, 0x04, 0x3e, 0x46, 0x04, 0x3d, 0x45, 0x04, 0x3c, 0x44, 0x04, 0x3b, 0x43, 0x04, 0x3a, 0x42, 0x04, 0x39, 0x41,
+    0x04, 0x38, 0x3f, 0x04, 0x37, 0x3e, 0x04, 0x36, 0x3d, 0x04, 0x35, 0x3c, 0x04, 0x33, 0x3b, 0x03, 0x33, 0x3a, 0x03, 0x32, 0x38,
+    0x03, 0x31, 0x37, 0x03, 0x2f, 0x36, 0x03, 0x2e, 0x34, 0x03, 0x2d, 0x33, 0x03, 0x2c, 0x32, 0x03, 0x2a, 0x30, 0x03, 0x29, 0x2f,
+    0x03, 0x28, 0x2e, 0x03, 0x27, 0x2c, 0x03, 0x25, 0x2a, 0x03, 0x24, 0x29, 0x02, 0x23, 0x27, 0x02, 0x21, 0x26, 0x02, 0x20, 0x24,
+    0x02, 0x1e, 0x23, 0x02, 0x1d, 0x21, 0x02, 0x1b, 0x1f, 0x02, 0x1a, 0x1e, 0x02, 0x18, 0x1c, 0x02, 0x17, 0x1a, 0x00, 0x0f, 0x12,
+    0x4b, 0x65, 0x69, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xb6, 0xcf, 0xd3, 0x76, 0xa4, 0xab, 0x72, 0xa1, 0xa8,
+    0x6f, 0x9e, 0xa6, 0x6c, 0x9c, 0xa4, 0x69, 0x99, 0xa1, 0x66, 0x97, 0x9f, 0x63, 0x95, 0x9d, 0x5f, 0x92, 0x9a, 0x5c, 0x90, 0x98,
+    0x58, 0x8d, 0x95, 0x88, 0xae, 0xb3, 0xf3, 0xf7, 0xf8, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xfb, 0xfc, 0xfc, 0xd5, 0xe1, 0xe3, 0x9d, 0xb9, 0xbd, 0x70, 0x97, 0x9d, 0x4a, 0x7b, 0x83,
+    0x2d, 0x65, 0x6f, 0x24, 0x5f, 0x67, 0x1f, 0x5a, 0x64, 0x1d, 0x59, 0x62, 0x1b, 0x56, 0x60, 0x1a, 0x55, 0x5e, 0x1c, 0x56, 0x5f,
+    0x20, 0x58, 0x62, 0x37, 0x69, 0x71, 0x5c, 0x84, 0x8a, 0x89, 0xa5, 0xaa, 0xc2, 0xd1, 0xd4, 0xf5, 0xf7, 0xf8, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xe7, 0xec, 0xed, 0x4a, 0x6e, 0x74,
+    0x11, 0x3f, 0x47, 0x0d, 0x3b, 0x43, 0x0c, 0x3b, 0x42, 0x0c, 0x3a, 0x41, 0x0c, 0x39, 0x40, 0x0c, 0x38, 0x3f, 0x0c, 0x37, 0x3e,
+    0x0c, 0x36, 0x3d, 0x0b, 0x35, 0x3c, 0x0b, 0x34, 0x3b, 0x0b, 0x34, 0x3a, 0x0b, 0x33, 0x39, 0x0b, 0x32, 0x38, 0x0a, 0x31, 0x37,
+    0x0a, 0x30, 0x36, 0x0a, 0x2f, 0x35, 0x0a, 0x2e, 0x34, 0x09, 0x2a, 0x30, 0x19, 0x3f, 0x46, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x07, 0x5d, 0x69, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65,
+    0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64,
+    0x06, 0x58, 0x64, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x57, 0x63, 0x06, 0x57, 0x62,
+    0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x56, 0x62, 0x06, 0x56, 0x61, 0x06, 0x56, 0x61, 0x06, 0x55, 0x61, 0x06, 0x55, 0x60,
+    0x06, 0x55, 0x60, 0x06, 0x54, 0x60, 0x06, 0x54, 0x5f, 0x06, 0x54, 0x5f, 0x06, 0x53, 0x5e, 0x06, 0x53, 0x5e, 0x05, 0x53, 0x5e,
+    0x05, 0x52, 0x5d, 0x05, 0x52, 0x5d, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5b, 0x05, 0x50, 0x5b, 0x05, 0x50, 0x5a,
+    0x05, 0x4f, 0x5a, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x58, 0x05, 0x4d, 0x57, 0x05, 0x4d, 0x57, 0x05, 0x4c, 0x56,
+    0x05, 0x4c, 0x56, 0x05, 0x4b, 0x55, 0x05, 0x4b, 0x55, 0x05, 0x4a, 0x54, 0x05, 0x49, 0x53, 0x05, 0x48, 0x52, 0x05, 0x48, 0x51,
+    0x05, 0x47, 0x51, 0x05, 0x47, 0x50, 0x05, 0x46, 0x4f, 0x05, 0x45, 0x4e, 0x05, 0x44, 0x4d, 0x05, 0x43, 0x4d, 0x04, 0x43, 0x4c,
+    0x04, 0x42, 0x4b, 0x04, 0x41, 0x4a, 0x04, 0x40, 0x49, 0x04, 0x3f, 0x48, 0x04, 0x3e, 0x47, 0x04, 0x3e, 0x46, 0x04, 0x3d, 0x45,
+    0x04, 0x3c, 0x44, 0x04, 0x3b, 0x43, 0x04, 0x3a, 0x42, 0x04, 0x39, 0x41, 0x04, 0x38, 0x3f, 0x04, 0x37, 0x3e, 0x04, 0x36, 0x3d,
+    0x04, 0x35, 0x3c, 0x04, 0x33, 0x3b, 0x03, 0x33, 0x3a, 0x03, 0x32, 0x38, 0x03, 0x31, 0x37, 0x03, 0x2f, 0x36, 0x03, 0x2e, 0x34,
+    0x03, 0x2d, 0x33, 0x03, 0x2c, 0x32, 0x03, 0x2a, 0x30, 0x03, 0x29, 0x2f, 0x03, 0x28, 0x2e, 0x03, 0x27, 0x2c, 0x03, 0x25, 0x2a,
+    0x03, 0x24, 0x29, 0x02, 0x23, 0x27, 0x02, 0x21, 0x26, 0x02, 0x20, 0x24, 0x02, 0x1e, 0x23, 0x02, 0x1d, 0x21, 0x02, 0x1b, 0x1f,
+    0x02, 0x1a, 0x1e, 0x02, 0x18, 0x1c, 0x02, 0x17, 0x1a, 0x00, 0x11, 0x14, 0x10, 0x28, 0x2c, 0xeb, 0xee, 0xef, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xe6, 0xee, 0xef, 0x7d, 0xa9, 0xb0, 0x76, 0xa3, 0xaa, 0x73, 0xa1, 0xa8, 0x6f, 0x9e, 0xa6, 0x6c, 0x9c, 0xa3,
+    0x6a, 0x9a, 0xa2, 0x66, 0x97, 0x9f, 0x62, 0x94, 0x9c, 0x5f, 0x92, 0x9a, 0x94, 0xb6, 0xbb, 0xfb, 0xfc, 0xfd, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xe2, 0xea, 0xec, 0x99, 0xb6, 0xba, 0x53, 0x83, 0x8b,
+    0x2c, 0x67, 0x70, 0x28, 0x64, 0x6d, 0x26, 0x61, 0x6b, 0x25, 0x5f, 0x69, 0x24, 0x5f, 0x68, 0x21, 0x5c, 0x65, 0x20, 0x5b, 0x64,
+    0x1d, 0x59, 0x62, 0x1c, 0x57, 0x61, 0x1b, 0x55, 0x5f, 0x19, 0x54, 0x5d, 0x18, 0x52, 0x5c, 0x17, 0x51, 0x5a, 0x17, 0x50, 0x59,
+    0x16, 0x4f, 0x58, 0x15, 0x4e, 0x57, 0x32, 0x64, 0x6b, 0x7a, 0x99, 0x9e, 0xd0, 0xdb, 0xdd, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xee, 0xf1, 0xf2, 0x8d, 0xa3, 0xa7, 0x0d, 0x3b, 0x43, 0x0c, 0x3b, 0x42,
+    0x0c, 0x3a, 0x41, 0x0c, 0x39, 0x40, 0x0c, 0x38, 0x3f, 0x0c, 0x37, 0x3e, 0x0c, 0x36, 0x3d, 0x0b, 0x35, 0x3c, 0x0b, 0x34, 0x3b,
+    0x0b, 0x34, 0x3a, 0x0b, 0x33, 0x39, 0x0b, 0x32, 0x38, 0x0a, 0x31, 0x37, 0x0a, 0x30, 0x36, 0x0a, 0x2f, 0x35, 0x0a, 0x2e, 0x34,
+    0x09, 0x2a, 0x30, 0x19, 0x3f, 0x46, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x07, 0x5d, 0x69, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65,
+    0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63,
+    0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x57, 0x63, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x56, 0x62,
+    0x06, 0x56, 0x61, 0x06, 0x56, 0x61, 0x06, 0x55, 0x61, 0x06, 0x55, 0x60, 0x06, 0x55, 0x60, 0x06, 0x54, 0x60, 0x06, 0x54, 0x5f,
+    0x06, 0x54, 0x5f, 0x06, 0x53, 0x5e, 0x06, 0x53, 0x5e, 0x05, 0x53, 0x5e, 0x05, 0x52, 0x5d, 0x05, 0x52, 0x5d, 0x05, 0x51, 0x5c,
+    0x05, 0x51, 0x5c, 0x05, 0x51, 0x5b, 0x05, 0x50, 0x5b, 0x05, 0x50, 0x5a, 0x05, 0x4f, 0x5a, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x59,
+    0x05, 0x4e, 0x58, 0x05, 0x4d, 0x57, 0x05, 0x4d, 0x57, 0x05, 0x4c, 0x56, 0x05, 0x4c, 0x56, 0x05, 0x4b, 0x55, 0x05, 0x4b, 0x55,
+    0x05, 0x4a, 0x54, 0x05, 0x49, 0x53, 0x05, 0x48, 0x52, 0x05, 0x48, 0x51, 0x05, 0x47, 0x51, 0x05, 0x47, 0x50, 0x05, 0x46, 0x4f,
+    0x05, 0x45, 0x4e, 0x05, 0x44, 0x4d, 0x05, 0x43, 0x4d, 0x04, 0x43, 0x4c, 0x04, 0x42, 0x4b, 0x04, 0x41, 0x4a, 0x04, 0x40, 0x49,
+    0x04, 0x3f, 0x48, 0x04, 0x3e, 0x47, 0x04, 0x3e, 0x46, 0x04, 0x3d, 0x45, 0x04, 0x3c, 0x44, 0x04, 0x3b, 0x43, 0x04, 0x3a, 0x42,
+    0x04, 0x39, 0x41, 0x04, 0x38, 0x3f, 0x04, 0x37, 0x3e, 0x04, 0x36, 0x3d, 0x04, 0x35, 0x3c, 0x04, 0x33, 0x3b, 0x03, 0x33, 0x3a,
+    0x03, 0x32, 0x38, 0x03, 0x31, 0x37, 0x03, 0x2f, 0x36, 0x03, 0x2e, 0x34, 0x03, 0x2d, 0x33, 0x03, 0x2c, 0x32, 0x03, 0x2a, 0x30,
+    0x03, 0x29, 0x2f, 0x03, 0x28, 0x2e, 0x03, 0x27, 0x2c, 0x03, 0x25, 0x2a, 0x03, 0x24, 0x29, 0x02, 0x23, 0x27, 0x02, 0x21, 0x26,
+    0x02, 0x20, 0x24, 0x02, 0x1e, 0x23, 0x02, 0x1d, 0x21, 0x02, 0x1b, 0x1f, 0x02, 0x1a, 0x1e, 0x02, 0x18, 0x1c, 0x02, 0x17, 0x1a,
+    0x01, 0x14, 0x17, 0x00, 0x12, 0x15, 0x91, 0xa4, 0xa7, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x9f, 0xc0, 0xc5,
+    0x79, 0xa6, 0xad, 0x76, 0xa3, 0xaa, 0x73, 0xa1, 0xa8, 0x6f, 0x9e, 0xa5, 0x6c, 0x9b, 0xa3, 0x68, 0x98, 0xa0, 0x64, 0x96, 0x9d,
+    0x98, 0xb9, 0xbe, 0xfc, 0xfd, 0xfd, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xdd, 0xe7, 0xe8,
+    0x82, 0xa6, 0xac, 0x3d, 0x73, 0x7c, 0x2f, 0x6a, 0x73, 0x2d, 0x68, 0x72, 0x2b, 0x67, 0x70, 0x29, 0x65, 0x6e, 0x26, 0x62, 0x6c,
+    0x25, 0x5f, 0x69, 0x24, 0x5f, 0x68, 0x22, 0x5c, 0x66, 0x20, 0x5b, 0x64, 0x1e, 0x59, 0x63, 0x1d, 0x58, 0x61, 0x1b, 0x55, 0x5f,
+    0x1a, 0x55, 0x5d, 0x19, 0x53, 0x5d, 0x18, 0x51, 0x5b, 0x17, 0x51, 0x59, 0x16, 0x4f, 0x58, 0x15, 0x4e, 0x57, 0x14, 0x4d, 0x55,
+    0x14, 0x4c, 0x54, 0x17, 0x4d, 0x56, 0x59, 0x7f, 0x85, 0xc5, 0xd3, 0xd4, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xb0, 0xc0, 0xc2, 0x0d, 0x3b, 0x43, 0x0c, 0x3b, 0x42, 0x0c, 0x3a, 0x41, 0x0c, 0x39, 0x40, 0x0c, 0x38, 0x3f,
+    0x0c, 0x37, 0x3e, 0x0c, 0x36, 0x3d, 0x0b, 0x35, 0x3c, 0x0b, 0x34, 0x3b, 0x0b, 0x34, 0x3a, 0x0b, 0x33, 0x39, 0x0b, 0x32, 0x38,
+    0x0a, 0x31, 0x37, 0x0a, 0x30, 0x36, 0x0a, 0x2f, 0x35, 0x0a, 0x2e, 0x34, 0x09, 0x2a, 0x30, 0x19, 0x3f, 0x46, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x07, 0x5d, 0x69, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x59, 0x65,
+    0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x58, 0x64,
+    0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x57, 0x63,
+    0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x56, 0x62, 0x06, 0x56, 0x61, 0x06, 0x56, 0x61, 0x06, 0x55, 0x61,
+    0x06, 0x55, 0x60, 0x06, 0x55, 0x60, 0x06, 0x54, 0x60, 0x06, 0x54, 0x5f, 0x06, 0x54, 0x5f, 0x06, 0x53, 0x5e, 0x06, 0x53, 0x5e,
+    0x05, 0x53, 0x5e, 0x05, 0x52, 0x5d, 0x05, 0x52, 0x5d, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5b, 0x05, 0x50, 0x5b,
+    0x05, 0x50, 0x5a, 0x05, 0x4f, 0x5a, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x58, 0x05, 0x4d, 0x57, 0x05, 0x4d, 0x57,
+    0x05, 0x4c, 0x56, 0x05, 0x4c, 0x56, 0x05, 0x4b, 0x55, 0x05, 0x4b, 0x55, 0x05, 0x4a, 0x54, 0x05, 0x49, 0x53, 0x05, 0x48, 0x52,
+    0x05, 0x48, 0x51, 0x05, 0x47, 0x51, 0x05, 0x47, 0x50, 0x05, 0x46, 0x4f, 0x05, 0x45, 0x4e, 0x05, 0x44, 0x4d, 0x05, 0x43, 0x4d,
+    0x04, 0x43, 0x4c, 0x04, 0x42, 0x4b, 0x04, 0x41, 0x4a, 0x04, 0x40, 0x49, 0x04, 0x3f, 0x48, 0x04, 0x3e, 0x47, 0x04, 0x3e, 0x46,
+    0x04, 0x3d, 0x45, 0x04, 0x3c, 0x44, 0x04, 0x3b, 0x43, 0x04, 0x3a, 0x42, 0x04, 0x39, 0x41, 0x04, 0x38, 0x3f, 0x04, 0x37, 0x3e,
+    0x04, 0x36, 0x3d, 0x04, 0x35, 0x3c, 0x04, 0x33, 0x3b, 0x03, 0x33, 0x3a, 0x03, 0x32, 0x38, 0x03, 0x31, 0x37, 0x03, 0x2f, 0x36,
+    0x03, 0x2e, 0x34, 0x03, 0x2d, 0x33, 0x03, 0x2c, 0x32, 0x03, 0x2a, 0x30, 0x03, 0x29, 0x2f, 0x03, 0x28, 0x2e, 0x03, 0x27, 0x2c,
+    0x03, 0x25, 0x2a, 0x03, 0x24, 0x29, 0x02, 0x23, 0x27, 0x02, 0x21, 0x26, 0x02, 0x20, 0x24, 0x02, 0x1e, 0x23, 0x02, 0x1d, 0x21,
+    0x02, 0x1b, 0x1f, 0x02, 0x1a, 0x1e, 0x02, 0x18, 0x1c, 0x02, 0x17, 0x1a, 0x02, 0x15, 0x18, 0x00, 0x0e, 0x10, 0x3a, 0x54, 0x59,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xcd, 0xde, 0xe1, 0x7c, 0xa8, 0xae, 0x79, 0xa5, 0xac, 0x76, 0xa3, 0xaa,
+    0x72, 0xa0, 0xa7, 0x6e, 0x9d, 0xa4, 0x6c, 0x9b, 0xa2, 0x92, 0xb5, 0xba, 0xfa, 0xfc, 0xfc, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xee, 0xf3, 0xf4, 0x91, 0xb1, 0xb6, 0x41, 0x78, 0x81, 0x36, 0x70, 0x79, 0x33, 0x6d, 0x76, 0x31, 0x6c, 0x74,
+    0x2e, 0x69, 0x72, 0x2c, 0x67, 0x70, 0x29, 0x65, 0x6e, 0x27, 0x62, 0x6c, 0x26, 0x60, 0x6a, 0x24, 0x5f, 0x68, 0x22, 0x5c, 0x66,
+    0x21, 0x5c, 0x65, 0x1e, 0x59, 0x63, 0x1d, 0x58, 0x61, 0x1b, 0x55, 0x5f, 0x1a, 0x55, 0x5d, 0x19, 0x53, 0x5d, 0x18, 0x51, 0x5b,
+    0x17, 0x51, 0x59, 0x16, 0x4f, 0x58, 0x15, 0x4e, 0x57, 0x14, 0x4d, 0x55, 0x14, 0x4c, 0x54, 0x13, 0x4a, 0x53, 0x13, 0x49, 0x52,
+    0x14, 0x49, 0x51, 0x63, 0x86, 0x8c, 0xde, 0xe5, 0xe7, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xac, 0xbc, 0xbf, 0x0d, 0x3b, 0x43,
+    0x0c, 0x3b, 0x42, 0x0c, 0x3a, 0x41, 0x0c, 0x39, 0x40, 0x0c, 0x38, 0x3f, 0x0c, 0x37, 0x3e, 0x0c, 0x36, 0x3d, 0x0b, 0x35, 0x3c,
+    0x0b, 0x34, 0x3b, 0x0b, 0x34, 0x3a, 0x0b, 0x33, 0x39, 0x0b, 0x32, 0x38, 0x0a, 0x31, 0x37, 0x0a, 0x30, 0x36, 0x0a, 0x2f, 0x35,
+    0x0a, 0x2e, 0x34, 0x09, 0x2a, 0x30, 0x19, 0x3f, 0x46, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x07, 0x5d, 0x69,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65,
+    0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x63,
+    0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x57, 0x63, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62,
+    0x06, 0x56, 0x62, 0x06, 0x56, 0x61, 0x06, 0x56, 0x61, 0x06, 0x55, 0x61, 0x06, 0x55, 0x60, 0x06, 0x55, 0x60, 0x06, 0x54, 0x60,
+    0x06, 0x54, 0x5f, 0x06, 0x54, 0x5f, 0x06, 0x53, 0x5e, 0x06, 0x53, 0x5e, 0x05, 0x53, 0x5e, 0x05, 0x52, 0x5d, 0x05, 0x52, 0x5d,
+    0x05, 0x51, 0x5c, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5b, 0x05, 0x50, 0x5b, 0x05, 0x50, 0x5a, 0x05, 0x4f, 0x5a, 0x05, 0x4e, 0x59,
+    0x05, 0x4e, 0x59, 0x05, 0x4e, 0x58, 0x05, 0x4d, 0x57, 0x05, 0x4d, 0x57, 0x05, 0x4c, 0x56, 0x05, 0x4c, 0x56, 0x05, 0x4b, 0x55,
+    0x05, 0x4b, 0x55, 0x05, 0x4a, 0x54, 0x05, 0x49, 0x53, 0x05, 0x48, 0x52, 0x05, 0x48, 0x51, 0x05, 0x47, 0x51, 0x05, 0x47, 0x50,
+    0x05, 0x46, 0x4f, 0x05, 0x45, 0x4e, 0x05, 0x44, 0x4d, 0x05, 0x43, 0x4d, 0x04, 0x43, 0x4c, 0x04, 0x42, 0x4b, 0x04, 0x41, 0x4a,
+    0x04, 0x40, 0x49, 0x04, 0x3f, 0x48, 0x04, 0x3e, 0x47, 0x04, 0x3e, 0x46, 0x04, 0x3d, 0x45, 0x04, 0x3c, 0x44, 0x04, 0x3b, 0x43,
+    0x04, 0x3a, 0x42, 0x04, 0x39, 0x41, 0x04, 0x38, 0x3f, 0x04, 0x37, 0x3e, 0x04, 0x36, 0x3d, 0x04, 0x35, 0x3c, 0x04, 0x33, 0x3b,
+    0x03, 0x33, 0x3a, 0x03, 0x32, 0x38, 0x03, 0x31, 0x37, 0x03, 0x2f, 0x36, 0x03, 0x2e, 0x34, 0x03, 0x2d, 0x33, 0x03, 0x2c, 0x32,
+    0x03, 0x2a, 0x30, 0x03, 0x29, 0x2f, 0x03, 0x28, 0x2e, 0x03, 0x27, 0x2c, 0x03, 0x25, 0x2a, 0x03, 0x24, 0x29, 0x02, 0x23, 0x27,
+    0x02, 0x21, 0x26, 0x02, 0x20, 0x24, 0x02, 0x1e, 0x23, 0x02, 0x1d, 0x21, 0x02, 0x1b, 0x1f, 0x02, 0x1a, 0x1e, 0x02, 0x18, 0x1c,
+    0x02, 0x17, 0x1a, 0x02, 0x15, 0x18, 0x00, 0x11, 0x12, 0x0e, 0x25, 0x28, 0xe6, 0xec, 0xec, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xf0, 0xf5, 0xf6, 0x8a, 0xb1, 0xb7, 0x7c, 0xa7, 0xae, 0x78, 0xa5, 0xab, 0x75, 0xa1, 0xa9, 0x72, 0xa0, 0xa7, 0x86, 0xad, 0xb3,
+    0xf2, 0xf6, 0xf7, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xc3, 0xd5, 0xd8, 0x57, 0x89, 0x91, 0x3e, 0x76, 0x7f,
+    0x3b, 0x74, 0x7d, 0x38, 0x71, 0x7a, 0x35, 0x6e, 0x78, 0x32, 0x6c, 0x75, 0x2f, 0x69, 0x73, 0x2d, 0x68, 0x71, 0x2a, 0x65, 0x6e,
+    0x28, 0x63, 0x6d, 0x26, 0x61, 0x6b, 0x25, 0x5f, 0x69, 0x23, 0x5d, 0x66, 0x22, 0x5c, 0x66, 0x1f, 0x5a, 0x63, 0x1e, 0x58, 0x62,
+    0x1c, 0x56, 0x60, 0x1b, 0x55, 0x5e, 0x19, 0x53, 0x5d, 0x18, 0x51, 0x5b, 0x17, 0x51, 0x59, 0x16, 0x4f, 0x58, 0x15, 0x4e, 0x57,
+    0x15, 0x4d, 0x56, 0x14, 0x4c, 0x54, 0x13, 0x4a, 0x53, 0x13, 0x49, 0x52, 0x12, 0x48, 0x50, 0x11, 0x47, 0x50, 0x22, 0x53, 0x5b,
+    0xa0, 0xb5, 0xb8, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xac, 0xbc, 0xbf, 0x0d, 0x3b, 0x43, 0x0c, 0x3b, 0x42, 0x0c, 0x3a, 0x41, 0x0c, 0x39, 0x40,
+    0x0c, 0x38, 0x3f, 0x0c, 0x37, 0x3e, 0x0c, 0x36, 0x3d, 0x0b, 0x35, 0x3c, 0x0b, 0x34, 0x3b, 0x0b, 0x34, 0x3a, 0x0b, 0x33, 0x39,
+    0x0b, 0x32, 0x38, 0x0a, 0x31, 0x37, 0x0a, 0x30, 0x36, 0x0a, 0x2f, 0x35, 0x0a, 0x2e, 0x34, 0x09, 0x2a, 0x30, 0x19, 0x3f, 0x46,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x07, 0x5d, 0x69, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65,
+    0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63,
+    0x06, 0x57, 0x63, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x56, 0x62, 0x06, 0x56, 0x61, 0x06, 0x56, 0x61,
+    0x06, 0x55, 0x61, 0x06, 0x55, 0x60, 0x06, 0x55, 0x60, 0x06, 0x54, 0x60, 0x06, 0x54, 0x5f, 0x06, 0x54, 0x5f, 0x06, 0x53, 0x5e,
+    0x06, 0x53, 0x5e, 0x05, 0x53, 0x5e, 0x05, 0x52, 0x5d, 0x05, 0x52, 0x5d, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5b,
+    0x05, 0x50, 0x5b, 0x05, 0x50, 0x5a, 0x05, 0x4f, 0x5a, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x58, 0x05, 0x4d, 0x57,
+    0x05, 0x4d, 0x57, 0x05, 0x4c, 0x56, 0x05, 0x4c, 0x56, 0x05, 0x4b, 0x55, 0x05, 0x4b, 0x55, 0x05, 0x4a, 0x54, 0x05, 0x49, 0x53,
+    0x05, 0x48, 0x52, 0x05, 0x48, 0x51, 0x05, 0x47, 0x51, 0x05, 0x47, 0x50, 0x05, 0x46, 0x4f, 0x05, 0x45, 0x4e, 0x05, 0x44, 0x4d,
+    0x05, 0x43, 0x4d, 0x04, 0x43, 0x4c, 0x04, 0x42, 0x4b, 0x04, 0x41, 0x4a, 0x04, 0x40, 0x49, 0x04, 0x3f, 0x48, 0x04, 0x3e, 0x47,
+    0x04, 0x3e, 0x46, 0x04, 0x3d, 0x45, 0x04, 0x3c, 0x44, 0x04, 0x3b, 0x43, 0x04, 0x3a, 0x42, 0x04, 0x39, 0x41, 0x04, 0x38, 0x3f,
+    0x04, 0x37, 0x3e, 0x04, 0x36, 0x3d, 0x04, 0x35, 0x3c, 0x04, 0x33, 0x3b, 0x03, 0x33, 0x3a, 0x03, 0x32, 0x38, 0x03, 0x31, 0x37,
+    0x03, 0x2f, 0x36, 0x03, 0x2e, 0x34, 0x03, 0x2d, 0x33, 0x03, 0x2c, 0x32, 0x03, 0x2a, 0x30, 0x03, 0x29, 0x2f, 0x03, 0x28, 0x2e,
+    0x03, 0x27, 0x2c, 0x03, 0x25, 0x2a, 0x03, 0x24, 0x29, 0x02, 0x23, 0x27, 0x02, 0x21, 0x26, 0x02, 0x20, 0x24, 0x02, 0x1e, 0x23,
+    0x02, 0x1d, 0x21, 0x02, 0x1b, 0x1f, 0x02, 0x1a, 0x1e, 0x02, 0x18, 0x1c, 0x02, 0x17, 0x1a, 0x02, 0x15, 0x18, 0x01, 0x13, 0x16,
+    0x00, 0x11, 0x14, 0x9b, 0xac, 0xae, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xa9, 0xc5, 0xca, 0x7f, 0xa9, 0xaf,
+    0x7c, 0xa7, 0xae, 0x78, 0xa4, 0xab, 0x7e, 0xa7, 0xaf, 0xe1, 0xea, 0xec, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xfb, 0xfc, 0xfc,
+    0x98, 0xb7, 0xbc, 0x44, 0x7b, 0x85, 0x42, 0x7a, 0x83, 0x3f, 0x77, 0x80, 0x3c, 0x75, 0x7e, 0x39, 0x72, 0x7b, 0x36, 0x6f, 0x78,
+    0x33, 0x6d, 0x76, 0x31, 0x6b, 0x74, 0x2e, 0x69, 0x72, 0x2b, 0x66, 0x6f, 0x29, 0x64, 0x6e, 0x27, 0x61, 0x6b, 0x26, 0x60, 0x69,
+    0x24, 0x5e, 0x67, 0x22, 0x5c, 0x66, 0x1f, 0x5a, 0x63, 0x1e, 0x58, 0x62, 0x1d, 0x57, 0x60, 0x1b, 0x55, 0x5e, 0x1a, 0x54, 0x5d,
+    0x19, 0x52, 0x5c, 0x18, 0x51, 0x5a, 0x17, 0x50, 0x58, 0x16, 0x4f, 0x58, 0x15, 0x4d, 0x56, 0x14, 0x4c, 0x54, 0x14, 0x4b, 0x53,
+    0x13, 0x49, 0x52, 0x12, 0x48, 0x50, 0x11, 0x47, 0x50, 0x10, 0x45, 0x4e, 0x10, 0x44, 0x4d, 0x67, 0x88, 0x8d, 0xf2, 0xf5, 0xf5,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xac, 0xbc, 0xbf,
+    0x0d, 0x3b, 0x43, 0x0c, 0x3b, 0x42, 0x0c, 0x3a, 0x41, 0x0c, 0x39, 0x40, 0x0c, 0x38, 0x3f, 0x0c, 0x37, 0x3e, 0x0c, 0x36, 0x3d,
+    0x0b, 0x35, 0x3c, 0x0b, 0x34, 0x3b, 0x0b, 0x34, 0x3a, 0x0b, 0x33, 0x39, 0x0b, 0x32, 0x38, 0x0a, 0x31, 0x37, 0x0a, 0x30, 0x36,
+    0x0a, 0x2f, 0x35, 0x0a, 0x2e, 0x34, 0x09, 0x2a, 0x30, 0x19, 0x3f, 0x46, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x07, 0x5d, 0x69, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65,
+    0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64,
+    0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x57, 0x63, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62,
+    0x06, 0x57, 0x62, 0x06, 0x56, 0x62, 0x06, 0x56, 0x61, 0x06, 0x56, 0x61, 0x06, 0x55, 0x61, 0x06, 0x55, 0x60, 0x06, 0x55, 0x60,
+    0x06, 0x54, 0x60, 0x06, 0x54, 0x5f, 0x06, 0x54, 0x5f, 0x06, 0x53, 0x5e, 0x06, 0x53, 0x5e, 0x05, 0x53, 0x5e, 0x05, 0x52, 0x5d,
+    0x05, 0x52, 0x5d, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5b, 0x05, 0x50, 0x5b, 0x05, 0x50, 0x5a, 0x05, 0x4f, 0x5a,
+    0x05, 0x4e, 0x59, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x58, 0x05, 0x4d, 0x57, 0x05, 0x4d, 0x57, 0x05, 0x4c, 0x56, 0x05, 0x4c, 0x56,
+    0x05, 0x4b, 0x55, 0x05, 0x4b, 0x55, 0x05, 0x4a, 0x54, 0x05, 0x49, 0x53, 0x05, 0x48, 0x52, 0x05, 0x48, 0x51, 0x05, 0x47, 0x51,
+    0x05, 0x47, 0x50, 0x05, 0x46, 0x4f, 0x05, 0x45, 0x4e, 0x05, 0x44, 0x4d, 0x05, 0x43, 0x4d, 0x04, 0x43, 0x4c, 0x04, 0x42, 0x4b,
+    0x04, 0x41, 0x4a, 0x04, 0x40, 0x49, 0x04, 0x3f, 0x48, 0x04, 0x3e, 0x47, 0x04, 0x3e, 0x46, 0x04, 0x3d, 0x45, 0x04, 0x3c, 0x44,
+    0x04, 0x3b, 0x43, 0x04, 0x3a, 0x42, 0x04, 0x39, 0x41, 0x04, 0x38, 0x3f, 0x04, 0x37, 0x3e, 0x04, 0x36, 0x3d, 0x04, 0x35, 0x3c,
+    0x04, 0x33, 0x3b, 0x03, 0x33, 0x3a, 0x03, 0x32, 0x38, 0x03, 0x31, 0x37, 0x03, 0x2f, 0x36, 0x03, 0x2e, 0x34, 0x03, 0x2d, 0x33,
+    0x03, 0x2c, 0x32, 0x03, 0x2a, 0x30, 0x03, 0x29, 0x2f, 0x03, 0x28, 0x2e, 0x03, 0x27, 0x2c, 0x03, 0x25, 0x2a, 0x03, 0x24, 0x29,
+    0x02, 0x23, 0x27, 0x02, 0x21, 0x26, 0x02, 0x20, 0x24, 0x02, 0x1e, 0x23, 0x02, 0x1d, 0x21, 0x02, 0x1b, 0x1f, 0x02, 0x1a, 0x1e,
+    0x02, 0x18, 0x1c, 0x02, 0x17, 0x1a, 0x02, 0x15, 0x18, 0x02, 0x13, 0x16, 0x00, 0x0d, 0x0f, 0x47, 0x61, 0x65, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xd0, 0xe0, 0xe2, 0x83, 0xab, 0xb2, 0x7f, 0xa9, 0xaf, 0x7b, 0xa6, 0xad, 0xcb, 0xdb, 0xde,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xf1, 0xf5, 0xf6, 0x7d, 0xa5, 0xab, 0x4b, 0x81, 0x8a, 0x46, 0x7d, 0x86, 0x43, 0x7b, 0x83,
+    0x40, 0x78, 0x81, 0x3e, 0x76, 0x7f, 0x3b, 0x73, 0x7c, 0x38, 0x70, 0x7a, 0x34, 0x6e, 0x77, 0x32, 0x6b, 0x75, 0x2f, 0x69, 0x72,
+    0x2c, 0x67, 0x6f, 0x2a, 0x64, 0x6e, 0x28, 0x62, 0x6c, 0x26, 0x60, 0x69, 0x24, 0x5e, 0x67, 0x23, 0x5d, 0x66, 0x20, 0x5b, 0x64,
+    0x1e, 0x58, 0x62, 0x1d, 0x57, 0x60, 0x1c, 0x56, 0x5f, 0x1a, 0x54, 0x5d, 0x19, 0x52, 0x5c, 0x18, 0x51, 0x5a, 0x16, 0x4f, 0x58,
+    0x16, 0x4f, 0x58, 0x15, 0x4d, 0x56, 0x14, 0x4c, 0x54, 0x14, 0x4b, 0x53, 0x13, 0x49, 0x52, 0x13, 0x49, 0x51, 0x11, 0x47, 0x50,
+    0x11, 0x46, 0x4f, 0x10, 0x44, 0x4d, 0x10, 0x44, 0x4c, 0x45, 0x6d, 0x73, 0xe4, 0xe9, 0xea, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xac, 0xbc, 0xbf, 0x0d, 0x3b, 0x43, 0x0c, 0x3b, 0x42, 0x0c, 0x3a, 0x41,
+    0x0c, 0x39, 0x40, 0x0c, 0x38, 0x3f, 0x0c, 0x37, 0x3e, 0x0c, 0x36, 0x3d, 0x0b, 0x35, 0x3c, 0x0b, 0x34, 0x3b, 0x0b, 0x34, 0x3a,
+    0x0b, 0x33, 0x39, 0x0b, 0x32, 0x38, 0x0a, 0x31, 0x37, 0x0a, 0x30, 0x36, 0x0a, 0x2f, 0x35, 0x0a, 0x2e, 0x34, 0x09, 0x2a, 0x30,
+    0x19, 0x3f, 0x46, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x07, 0x5d, 0x69, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65,
+    0x06, 0x59, 0x65, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63,
+    0x06, 0x58, 0x63, 0x06, 0x57, 0x63, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x56, 0x62, 0x06, 0x56, 0x61,
+    0x06, 0x56, 0x61, 0x06, 0x55, 0x61, 0x06, 0x55, 0x60, 0x06, 0x55, 0x60, 0x06, 0x54, 0x60, 0x06, 0x54, 0x5f, 0x06, 0x54, 0x5f,
+    0x06, 0x53, 0x5e, 0x06, 0x53, 0x5e, 0x05, 0x53, 0x5e, 0x05, 0x52, 0x5d, 0x05, 0x52, 0x5d, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5c,
+    0x05, 0x51, 0x5b, 0x05, 0x50, 0x5b, 0x05, 0x50, 0x5a, 0x05, 0x4f, 0x5a, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x58,
+    0x05, 0x4d, 0x57, 0x05, 0x4d, 0x57, 0x05, 0x4c, 0x56, 0x05, 0x4c, 0x56, 0x05, 0x4b, 0x55, 0x05, 0x4b, 0x55, 0x05, 0x4a, 0x54,
+    0x05, 0x49, 0x53, 0x05, 0x48, 0x52, 0x05, 0x48, 0x51, 0x05, 0x47, 0x51, 0x05, 0x47, 0x50, 0x05, 0x46, 0x4f, 0x05, 0x45, 0x4e,
+    0x05, 0x44, 0x4d, 0x05, 0x43, 0x4d, 0x04, 0x43, 0x4c, 0x04, 0x42, 0x4b, 0x04, 0x41, 0x4a, 0x04, 0x40, 0x49, 0x04, 0x3f, 0x48,
+    0x04, 0x3e, 0x47, 0x04, 0x3e, 0x46, 0x04, 0x3d, 0x45, 0x04, 0x3c, 0x44, 0x04, 0x3b, 0x43, 0x04, 0x3a, 0x42, 0x04, 0x39, 0x41,
+    0x04, 0x38, 0x3f, 0x04, 0x37, 0x3e, 0x04, 0x36, 0x3d, 0x04, 0x35, 0x3c, 0x04, 0x33, 0x3b, 0x03, 0x33, 0x3a, 0x03, 0x32, 0x38,
+    0x03, 0x31, 0x37, 0x03, 0x2f, 0x36, 0x03, 0x2e, 0x34, 0x03, 0x2d, 0x33, 0x03, 0x2c, 0x32, 0x03, 0x2a, 0x30, 0x03, 0x29, 0x2f,
+    0x03, 0x28, 0x2e, 0x03, 0x27, 0x2c, 0x03, 0x25, 0x2a, 0x03, 0x24, 0x29, 0x02, 0x23, 0x27, 0x02, 0x21, 0x26, 0x02, 0x20, 0x24,
+    0x02, 0x1e, 0x23, 0x02, 0x1d, 0x21, 0x02, 0x1b, 0x1f, 0x02, 0x1a, 0x1e, 0x02, 0x18, 0x1c, 0x02, 0x17, 0x1a, 0x02, 0x15, 0x18,
+    0x02, 0x13, 0x16, 0x00, 0x0d, 0x10, 0x1a, 0x32, 0x36, 0xfb, 0xfd, 0xfd, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xec, 0xf3, 0xf4,
+    0x8c, 0xb2, 0xb7, 0x82, 0xab, 0xb1, 0xac, 0xc7, 0xcb, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xed, 0xf2, 0xf3, 0x76, 0x9f, 0xa6,
+    0x4e, 0x84, 0x8c, 0x4c, 0x81, 0x8a, 0x48, 0x7e, 0x87, 0x45, 0x7c, 0x85, 0x42, 0x79, 0x82, 0x3f, 0x77, 0x80, 0x3c, 0x74, 0x7d,
+    0x39, 0x71, 0x7a, 0x35, 0x6f, 0x77, 0x33, 0x6c, 0x76, 0x30, 0x6a, 0x73, 0x2e, 0x68, 0x71, 0x2b, 0x65, 0x6f, 0x29, 0x63, 0x6d,
+    0x27, 0x61, 0x6a, 0x25, 0x5e, 0x68, 0x23, 0x5d, 0x66, 0x20, 0x5b, 0x64, 0x1f, 0x59, 0x62, 0x1e, 0x57, 0x61, 0x1c, 0x56, 0x5f,
+    0x1b, 0x54, 0x5e, 0x19, 0x52, 0x5c, 0x18, 0x51, 0x5a, 0x17, 0x50, 0x58, 0x16, 0x4f, 0x58, 0x15, 0x4d, 0x56, 0x15, 0x4c, 0x55,
+    0x14, 0x4b, 0x53, 0x13, 0x49, 0x52, 0x13, 0x49, 0x51, 0x11, 0x47, 0x50, 0x11, 0x46, 0x4f, 0x10, 0x44, 0x4d, 0x10, 0x44, 0x4c,
+    0x10, 0x43, 0x4b, 0x38, 0x62, 0x68, 0xdf, 0xe6, 0xe7, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xac, 0xbc, 0xbf, 0x0d, 0x3b, 0x43, 0x0c, 0x3b, 0x42, 0x0c, 0x3a, 0x41, 0x0c, 0x39, 0x40, 0x0c, 0x38, 0x3f, 0x0c, 0x37, 0x3e,
+    0x0c, 0x36, 0x3d, 0x0b, 0x35, 0x3c, 0x0b, 0x34, 0x3b, 0x0b, 0x34, 0x3a, 0x0b, 0x33, 0x39, 0x0b, 0x32, 0x38, 0x0a, 0x31, 0x37,
+    0x0a, 0x30, 0x36, 0x0a, 0x2f, 0x35, 0x0a, 0x2e, 0x34, 0x09, 0x2a, 0x30, 0x19, 0x3f, 0x46, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x07, 0x5d, 0x69, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65,
+    0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64,
+    0x06, 0x58, 0x64, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x57, 0x63, 0x06, 0x57, 0x62,
+    0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x56, 0x62, 0x06, 0x56, 0x61, 0x06, 0x56, 0x61, 0x06, 0x55, 0x61, 0x06, 0x55, 0x60,
+    0x06, 0x55, 0x60, 0x06, 0x54, 0x60, 0x06, 0x54, 0x5f, 0x06, 0x54, 0x5f, 0x06, 0x53, 0x5e, 0x06, 0x53, 0x5e, 0x05, 0x53, 0x5e,
+    0x05, 0x52, 0x5d, 0x05, 0x52, 0x5d, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5b, 0x05, 0x50, 0x5b, 0x05, 0x50, 0x5a,
+    0x05, 0x4f, 0x5a, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x58, 0x05, 0x4d, 0x57, 0x05, 0x4d, 0x57, 0x05, 0x4c, 0x56,
+    0x05, 0x4c, 0x56, 0x05, 0x4b, 0x55, 0x05, 0x4b, 0x55, 0x05, 0x4a, 0x54, 0x05, 0x49, 0x53, 0x05, 0x48, 0x52, 0x05, 0x48, 0x51,
+    0x05, 0x47, 0x51, 0x05, 0x47, 0x50, 0x05, 0x46, 0x4f, 0x05, 0x45, 0x4e, 0x05, 0x44, 0x4d, 0x05, 0x43, 0x4d, 0x04, 0x43, 0x4c,
+    0x04, 0x42, 0x4b, 0x04, 0x41, 0x4a, 0x04, 0x40, 0x49, 0x04, 0x3f, 0x48, 0x04, 0x3e, 0x47, 0x04, 0x3e, 0x46, 0x04, 0x3d, 0x45,
+    0x04, 0x3c, 0x44, 0x04, 0x3b, 0x43, 0x04, 0x3a, 0x42, 0x04, 0x39, 0x41, 0x04, 0x38, 0x3f, 0x04, 0x37, 0x3e, 0x04, 0x36, 0x3d,
+    0x04, 0x35, 0x3c, 0x04, 0x33, 0x3b, 0x03, 0x33, 0x3a, 0x03, 0x32, 0x38, 0x03, 0x31, 0x37, 0x03, 0x2f, 0x36, 0x03, 0x2e, 0x34,
+    0x03, 0x2d, 0x33, 0x03, 0x2c, 0x32, 0x03, 0x2a, 0x30, 0x03, 0x29, 0x2f, 0x03, 0x28, 0x2e, 0x03, 0x27, 0x2c, 0x03, 0x25, 0x2a,
+    0x03, 0x24, 0x29, 0x02, 0x23, 0x27, 0x02, 0x21, 0x26, 0x02, 0x20, 0x24, 0x02, 0x1e, 0x23, 0x02, 0x1d, 0x21, 0x02, 0x1b, 0x1f,
+    0x02, 0x1a, 0x1e, 0x02, 0x18, 0x1c, 0x02, 0x17, 0x1a, 0x02, 0x15, 0x18, 0x02, 0x13, 0x16, 0x00, 0x11, 0x13, 0x04, 0x16, 0x19,
+    0xcb, 0xd4, 0xd6, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x9f, 0xbe, 0xc3, 0x8d, 0xb3, 0xb9, 0xf1, 0xf6, 0xf6,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xef, 0xf4, 0xf5, 0x79, 0xa2, 0xa8, 0x55, 0x88, 0x91, 0x50, 0x85, 0x8d, 0x4d, 0x82, 0x8b, 0x4a, 0x7f, 0x88,
+    0x46, 0x7d, 0x85, 0x43, 0x7a, 0x83, 0x40, 0x78, 0x81, 0x3d, 0x74, 0x7e, 0x3a, 0x72, 0x7b, 0x36, 0x70, 0x78, 0x34, 0x6d, 0x76,
+    0x31, 0x6b, 0x73, 0x2e, 0x68, 0x71, 0x2c, 0x66, 0x6f, 0x2a, 0x63, 0x6d, 0x28, 0x62, 0x6b, 0x26, 0x5f, 0x68, 0x24, 0x5e, 0x67,
+    0x22, 0x5c, 0x65, 0x1f, 0x59, 0x62, 0x1e, 0x57, 0x61, 0x1d, 0x57, 0x5f, 0x1b, 0x54, 0x5e, 0x1a, 0x53, 0x5c, 0x19, 0x52, 0x5b,
+    0x17, 0x50, 0x59, 0x17, 0x50, 0x58, 0x16, 0x4e, 0x57, 0x15, 0x4c, 0x55, 0x14, 0x4b, 0x53, 0x13, 0x49, 0x52, 0x13, 0x49, 0x51,
+    0x11, 0x47, 0x50, 0x11, 0x46, 0x4f, 0x10, 0x44, 0x4d, 0x10, 0x44, 0x4c, 0x10, 0x43, 0x4b, 0x0e, 0x41, 0x49, 0x3a, 0x63, 0x6a,
+    0xe7, 0xec, 0xed, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xac, 0xbc, 0xbf, 0x0d, 0x3b, 0x43, 0x0c, 0x3b, 0x42,
+    0x0c, 0x3a, 0x41, 0x0c, 0x39, 0x40, 0x0c, 0x38, 0x3f, 0x0c, 0x37, 0x3e, 0x0c, 0x36, 0x3d, 0x0b, 0x35, 0x3c, 0x0b, 0x34, 0x3b,
+    0x0b, 0x34, 0x3a, 0x0b, 0x33, 0x39, 0x0b, 0x32, 0x38, 0x0a, 0x31, 0x37, 0x0a, 0x30, 0x36, 0x0a, 0x2f, 0x35, 0x0a, 0x2e, 0x34,
+    0x09, 0x2a, 0x30, 0x19, 0x3f, 0x46, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x07, 0x5d, 0x69, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65,
+    0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63,
+    0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x57, 0x63, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x56, 0x62,
+    0x06, 0x56, 0x61, 0x06, 0x56, 0x61, 0x06, 0x55, 0x61, 0x06, 0x55, 0x60, 0x06, 0x55, 0x60, 0x06, 0x54, 0x60, 0x06, 0x54, 0x5f,
+    0x06, 0x54, 0x5f, 0x06, 0x53, 0x5e, 0x06, 0x53, 0x5e, 0x05, 0x53, 0x5e, 0x05, 0x52, 0x5d, 0x05, 0x52, 0x5d, 0x05, 0x51, 0x5c,
+    0x05, 0x51, 0x5c, 0x05, 0x51, 0x5b, 0x05, 0x50, 0x5b, 0x05, 0x50, 0x5a, 0x05, 0x4f, 0x5a, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x59,
+    0x05, 0x4e, 0x58, 0x05, 0x4d, 0x57, 0x05, 0x4d, 0x57, 0x05, 0x4c, 0x56, 0x05, 0x4c, 0x56, 0x05, 0x4b, 0x55, 0x05, 0x4b, 0x55,
+    0x05, 0x4a, 0x54, 0x05, 0x49, 0x53, 0x05, 0x48, 0x52, 0x05, 0x48, 0x51, 0x05, 0x47, 0x51, 0x05, 0x47, 0x50, 0x05, 0x46, 0x4f,
+    0x05, 0x45, 0x4e, 0x05, 0x44, 0x4d, 0x05, 0x43, 0x4d, 0x04, 0x43, 0x4c, 0x04, 0x42, 0x4b, 0x04, 0x41, 0x4a, 0x04, 0x40, 0x49,
+    0x04, 0x3f, 0x48, 0x04, 0x3e, 0x47, 0x04, 0x3e, 0x46, 0x04, 0x3d, 0x45, 0x04, 0x3c, 0x44, 0x04, 0x3b, 0x43, 0x04, 0x3a, 0x42,
+    0x04, 0x39, 0x41, 0x04, 0x38, 0x3f, 0x04, 0x37, 0x3e, 0x04, 0x36, 0x3d, 0x04, 0x35, 0x3c, 0x04, 0x33, 0x3b, 0x03, 0x33, 0x3a,
+    0x03, 0x32, 0x38, 0x03, 0x31, 0x37, 0x03, 0x2f, 0x36, 0x03, 0x2e, 0x34, 0x03, 0x2d, 0x33, 0x03, 0x2c, 0x32, 0x03, 0x2a, 0x30,
+    0x03, 0x29, 0x2f, 0x03, 0x28, 0x2e, 0x03, 0x27, 0x2c, 0x03, 0x25, 0x2a, 0x03, 0x24, 0x29, 0x02, 0x23, 0x27, 0x02, 0x21, 0x26,
+    0x02, 0x20, 0x24, 0x02, 0x1e, 0x23, 0x02, 0x1d, 0x21, 0x02, 0x1b, 0x1f, 0x02, 0x1a, 0x1e, 0x02, 0x18, 0x1c, 0x02, 0x17, 0x1a,
+    0x02, 0x15, 0x18, 0x02, 0x13, 0x16, 0x01, 0x12, 0x14, 0x00, 0x0d, 0x0f, 0x80, 0x94, 0x97, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xbe, 0xd3, 0xd7, 0xc7, 0xda, 0xdc, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xf8, 0xfa, 0xfb, 0x86, 0xab, 0xb1, 0x5a, 0x8d, 0x94,
+    0x57, 0x89, 0x92, 0x52, 0x87, 0x8f, 0x4e, 0x83, 0x8c, 0x4b, 0x80, 0x89, 0x48, 0x7e, 0x86, 0x44, 0x7b, 0x83, 0x41, 0x79, 0x81,
+    0x3f, 0x76, 0x7f, 0x3c, 0x73, 0x7c, 0x37, 0x70, 0x79, 0x34, 0x6d, 0x77, 0x32, 0x6b, 0x74, 0x2f, 0x69, 0x71, 0x2d, 0x66, 0x70,
+    0x2a, 0x63, 0x6d, 0x28, 0x62, 0x6b, 0x26, 0x5f, 0x68, 0x24, 0x5e, 0x67, 0x22, 0x5c, 0x65, 0x20, 0x5a, 0x63, 0x1e, 0x57, 0x61,
+    0x1d, 0x57, 0x5f, 0x1c, 0x55, 0x5f, 0x1a, 0x53, 0x5c, 0x19, 0x52, 0x5b, 0x17, 0x50, 0x59, 0x17, 0x50, 0x58, 0x16, 0x4e, 0x57,
+    0x15, 0x4c, 0x55, 0x14, 0x4b, 0x53, 0x14, 0x4a, 0x52, 0x13, 0x49, 0x51, 0x11, 0x47, 0x50, 0x11, 0x46, 0x4f, 0x10, 0x44, 0x4d,
+    0x10, 0x44, 0x4c, 0x10, 0x43, 0x4b, 0x0e, 0x41, 0x49, 0x0d, 0x40, 0x48, 0x4e, 0x72, 0x78, 0xf6, 0xf8, 0xf8, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xac, 0xbc, 0xbf, 0x0d, 0x3b, 0x43, 0x0c, 0x3b, 0x42, 0x0c, 0x3a, 0x41, 0x0c, 0x39, 0x40, 0x0c, 0x38, 0x3f,
+    0x0c, 0x37, 0x3e, 0x0c, 0x36, 0x3d, 0x0b, 0x35, 0x3c, 0x0b, 0x34, 0x3b, 0x0b, 0x34, 0x3a, 0x0b, 0x33, 0x39, 0x0b, 0x32, 0x38,
+    0x0a, 0x31, 0x37, 0x0a, 0x30, 0x36, 0x0a, 0x2f, 0x35, 0x0a, 0x2e, 0x34, 0x09, 0x2a, 0x30, 0x19, 0x3f, 0x46, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x07, 0x5d, 0x69, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x59, 0x65,
+    0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x58, 0x64,
+    0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x57, 0x63,
+    0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x56, 0x62, 0x06, 0x56, 0x61, 0x06, 0x56, 0x61, 0x06, 0x55, 0x61,
+    0x06, 0x55, 0x60, 0x06, 0x55, 0x60, 0x06, 0x54, 0x60, 0x06, 0x54, 0x5f, 0x06, 0x54, 0x5f, 0x06, 0x53, 0x5e, 0x06, 0x53, 0x5e,
+    0x05, 0x53, 0x5e, 0x05, 0x52, 0x5d, 0x05, 0x52, 0x5d, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5b, 0x05, 0x50, 0x5b,
+    0x05, 0x50, 0x5a, 0x05, 0x4f, 0x5a, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x58, 0x05, 0x4d, 0x57, 0x05, 0x4d, 0x57,
+    0x05, 0x4c, 0x56, 0x05, 0x4c, 0x56, 0x05, 0x4b, 0x55, 0x05, 0x4b, 0x55, 0x05, 0x4a, 0x54, 0x05, 0x49, 0x53, 0x05, 0x48, 0x52,
+    0x05, 0x48, 0x51, 0x05, 0x47, 0x51, 0x05, 0x47, 0x50, 0x05, 0x46, 0x4f, 0x05, 0x45, 0x4e, 0x05, 0x44, 0x4d, 0x05, 0x43, 0x4d,
+    0x04, 0x43, 0x4c, 0x04, 0x42, 0x4b, 0x04, 0x41, 0x4a, 0x04, 0x40, 0x49, 0x04, 0x3f, 0x48, 0x04, 0x3e, 0x47, 0x04, 0x3e, 0x46,
+    0x04, 0x3d, 0x45, 0x04, 0x3c, 0x44, 0x04, 0x3b, 0x43, 0x04, 0x3a, 0x42, 0x04, 0x39, 0x41, 0x04, 0x38, 0x3f, 0x04, 0x37, 0x3e,
+    0x04, 0x36, 0x3d, 0x04, 0x35, 0x3c, 0x04, 0x33, 0x3b, 0x03, 0x33, 0x3a, 0x03, 0x32, 0x38, 0x03, 0x31, 0x37, 0x03, 0x2f, 0x36,
+    0x03, 0x2e, 0x34, 0x03, 0x2d, 0x33, 0x03, 0x2c, 0x32, 0x03, 0x2a, 0x30, 0x03, 0x29, 0x2f, 0x03, 0x28, 0x2e, 0x03, 0x27, 0x2c,
+    0x03, 0x25, 0x2a, 0x03, 0x24, 0x29, 0x02, 0x23, 0x27, 0x02, 0x21, 0x26, 0x02, 0x20, 0x24, 0x02, 0x1e, 0x23, 0x02, 0x1d, 0x21,
+    0x02, 0x1b, 0x1f, 0x02, 0x1a, 0x1e, 0x02, 0x18, 0x1c, 0x02, 0x17, 0x1a, 0x02, 0x15, 0x18, 0x02, 0x13, 0x16, 0x01, 0x12, 0x14,
+    0x00, 0x0b, 0x0d, 0x41, 0x5a, 0x5e, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xf1, 0xf6, 0xf6, 0xf6, 0xf9, 0xf9,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xa0, 0xbd, 0xc1, 0x60, 0x90, 0x98, 0x5c, 0x8e, 0x95, 0x58, 0x8b, 0x93, 0x54, 0x88, 0x90, 0x50, 0x85, 0x8d,
+    0x4d, 0x81, 0x8a, 0x49, 0x7f, 0x87, 0x46, 0x7c, 0x84, 0x42, 0x79, 0x82, 0x40, 0x76, 0x7f, 0x3d, 0x74, 0x7d, 0x38, 0x71, 0x79,
+    0x35, 0x6e, 0x77, 0x33, 0x6c, 0x75, 0x30, 0x69, 0x72, 0x2d, 0x66, 0x70, 0x2b, 0x64, 0x6e, 0x29, 0x63, 0x6c, 0x26, 0x60, 0x69,
+    0x25, 0x5e, 0x68, 0x23, 0x5d, 0x66, 0x20, 0x5a, 0x63, 0x1f, 0x58, 0x62, 0x1e, 0x57, 0x60, 0x1c, 0x55, 0x5f, 0x1a, 0x53, 0x5c,
+    0x19, 0x52, 0x5b, 0x17, 0x50, 0x59, 0x17, 0x50, 0x58, 0x16, 0x4e, 0x57, 0x15, 0x4c, 0x55, 0x14, 0x4b, 0x53, 0x14, 0x4a, 0x52,
+    0x13, 0x49, 0x51, 0x12, 0x48, 0x51, 0x11, 0x46, 0x4f, 0x11, 0x45, 0x4e, 0x10, 0x44, 0x4c, 0x0f, 0x43, 0x4b, 0x0e, 0x41, 0x49,
+    0x0d, 0x40, 0x48, 0x0d, 0x3f, 0x47, 0x75, 0x92, 0x96, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xac, 0xbc, 0xbf, 0x0d, 0x3b, 0x43,
+    0x0c, 0x3b, 0x42, 0x0c, 0x3a, 0x41, 0x0c, 0x39, 0x40, 0x0c, 0x38, 0x3f, 0x0c, 0x37, 0x3e, 0x0c, 0x36, 0x3d, 0x0b, 0x35, 0x3c,
+    0x0b, 0x34, 0x3b, 0x0b, 0x34, 0x3a, 0x0b, 0x33, 0x39, 0x0b, 0x32, 0x38, 0x0a, 0x31, 0x37, 0x0a, 0x30, 0x36, 0x0a, 0x2f, 0x35,
+    0x0a, 0x2e, 0x34, 0x09, 0x2a, 0x30, 0x19, 0x3f, 0x46, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x07, 0x5d, 0x69,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65,
+    0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x63,
+    0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x57, 0x63, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62,
+    0x06, 0x56, 0x62, 0x06, 0x56, 0x61, 0x06, 0x56, 0x61, 0x06, 0x55, 0x61, 0x06, 0x55, 0x60, 0x06, 0x55, 0x60, 0x06, 0x54, 0x60,
+    0x06, 0x54, 0x5f, 0x06, 0x54, 0x5f, 0x06, 0x53, 0x5e, 0x06, 0x53, 0x5e, 0x05, 0x53, 0x5e, 0x05, 0x52, 0x5d, 0x05, 0x52, 0x5d,
+    0x05, 0x51, 0x5c, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5b, 0x05, 0x50, 0x5b, 0x05, 0x50, 0x5a, 0x05, 0x4f, 0x5a, 0x05, 0x4e, 0x59,
+    0x05, 0x4e, 0x59, 0x05, 0x4e, 0x58, 0x05, 0x4d, 0x57, 0x05, 0x4d, 0x57, 0x05, 0x4c, 0x56, 0x05, 0x4c, 0x56, 0x05, 0x4b, 0x55,
+    0x05, 0x4b, 0x55, 0x05, 0x4a, 0x54, 0x05, 0x49, 0x53, 0x05, 0x48, 0x52, 0x05, 0x48, 0x51, 0x05, 0x47, 0x51, 0x05, 0x47, 0x50,
+    0x05, 0x46, 0x4f, 0x05, 0x45, 0x4e, 0x05, 0x44, 0x4d, 0x05, 0x43, 0x4d, 0x04, 0x43, 0x4c, 0x04, 0x42, 0x4b, 0x04, 0x41, 0x4a,
+    0x04, 0x40, 0x49, 0x04, 0x3f, 0x48, 0x04, 0x3e, 0x47, 0x04, 0x3e, 0x46, 0x04, 0x3d, 0x45, 0x04, 0x3c, 0x44, 0x04, 0x3b, 0x43,
+    0x04, 0x3a, 0x42, 0x04, 0x39, 0x41, 0x04, 0x38, 0x3f, 0x04, 0x37, 0x3e, 0x04, 0x36, 0x3d, 0x04, 0x35, 0x3c, 0x04, 0x33, 0x3b,
+    0x03, 0x33, 0x3a, 0x03, 0x32, 0x38, 0x03, 0x31, 0x37, 0x03, 0x2f, 0x36, 0x03, 0x2e, 0x34, 0x03, 0x2d, 0x33, 0x03, 0x2c, 0x32,
+    0x03, 0x2a, 0x30, 0x03, 0x29, 0x2f, 0x03, 0x28, 0x2e, 0x03, 0x27, 0x2c, 0x03, 0x25, 0x2a, 0x03, 0x24, 0x29, 0x02, 0x23, 0x27,
+    0x02, 0x21, 0x26, 0x02, 0x20, 0x24, 0x02, 0x1e, 0x23, 0x02, 0x1d, 0x21, 0x02, 0x1b, 0x1f, 0x02, 0x1a, 0x1e, 0x02, 0x18, 0x1c,
+    0x02, 0x17, 0x1a, 0x02, 0x15, 0x18, 0x02, 0x13, 0x16, 0x01, 0x12, 0x14, 0x00, 0x0b, 0x0d, 0x1f, 0x37, 0x3a, 0xfd, 0xfe, 0xfe,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xc7, 0xd8, 0xdb, 0x66, 0x96, 0x9d, 0x62, 0x92, 0x99,
+    0x5e, 0x8f, 0x97, 0x5a, 0x8c, 0x94, 0x56, 0x89, 0x91, 0x52, 0x86, 0x8f, 0x4e, 0x83, 0x8c, 0x4b, 0x80, 0x88, 0x47, 0x7d, 0x85,
+    0x43, 0x7a, 0x83, 0x40, 0x77, 0x80, 0x3e, 0x74, 0x7d, 0x39, 0x72, 0x7a, 0x36, 0x6f, 0x78, 0x34, 0x6d, 0x75, 0x31, 0x6a, 0x73,
+    0x2e, 0x67, 0x71, 0x2c, 0x65, 0x6f, 0x29, 0x63, 0x6c, 0x27, 0x60, 0x6a, 0x26, 0x5f, 0x68, 0x24, 0x5d, 0x67, 0x22, 0x5b, 0x64,
+    0x20, 0x59, 0x62, 0x1e, 0x57, 0x60, 0x1c, 0x55, 0x5f, 0x1b, 0x53, 0x5d, 0x19, 0x52, 0x5b, 0x17, 0x50, 0x59, 0x17, 0x50, 0x58,
+    0x16, 0x4e, 0x57, 0x15, 0x4c, 0x55, 0x15, 0x4b, 0x54, 0x14, 0x4a, 0x52, 0x13, 0x49, 0x51, 0x12, 0x48, 0x51, 0x11, 0x46, 0x4f,
+    0x11, 0x45, 0x4e, 0x10, 0x44, 0x4c, 0x0f, 0x43, 0x4b, 0x0e, 0x41, 0x49, 0x0d, 0x40, 0x48, 0x0d, 0x3f, 0x47, 0x0d, 0x3f, 0x46,
+    0xb4, 0xc3, 0xc5, 0xff, 0xff, 0xff, 0xae, 0xbe, 0xc1, 0x0d, 0x3b, 0x43, 0x0c, 0x3b, 0x42, 0x0c, 0x3a, 0x41, 0x0c, 0x39, 0x40,
+    0x0c, 0x38, 0x3f, 0x0c, 0x37, 0x3e, 0x0c, 0x36, 0x3d, 0x0b, 0x35, 0x3c, 0x0b, 0x34, 0x3b, 0x0b, 0x34, 0x3a, 0x0b, 0x33, 0x39,
+    0x0b, 0x32, 0x38, 0x0a, 0x31, 0x37, 0x0a, 0x30, 0x36, 0x0a, 0x2f, 0x35, 0x0a, 0x2e, 0x34, 0x09, 0x2a, 0x30, 0x19, 0x3f, 0x46,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x07, 0x5d, 0x69, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65,
+    0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63,
+    0x06, 0x57, 0x63, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x56, 0x62, 0x06, 0x56, 0x61, 0x06, 0x56, 0x61,
+    0x06, 0x55, 0x61, 0x06, 0x55, 0x60, 0x06, 0x55, 0x60, 0x06, 0x54, 0x60, 0x06, 0x54, 0x5f, 0x06, 0x54, 0x5f, 0x06, 0x53, 0x5e,
+    0x06, 0x53, 0x5e, 0x05, 0x53, 0x5e, 0x05, 0x52, 0x5d, 0x05, 0x52, 0x5d, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5b,
+    0x05, 0x50, 0x5b, 0x05, 0x50, 0x5a, 0x05, 0x4f, 0x5a, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x58, 0x05, 0x4d, 0x57,
+    0x05, 0x4d, 0x57, 0x05, 0x4c, 0x56, 0x05, 0x4c, 0x56, 0x05, 0x4b, 0x55, 0x05, 0x4b, 0x55, 0x05, 0x4a, 0x54, 0x05, 0x49, 0x53,
+    0x05, 0x48, 0x52, 0x05, 0x48, 0x51, 0x05, 0x47, 0x51, 0x05, 0x47, 0x50, 0x05, 0x46, 0x4f, 0x05, 0x45, 0x4e, 0x05, 0x44, 0x4d,
+    0x05, 0x43, 0x4d, 0x04, 0x43, 0x4c, 0x04, 0x42, 0x4b, 0x04, 0x41, 0x4a, 0x04, 0x40, 0x49, 0x04, 0x3f, 0x48, 0x04, 0x3e, 0x47,
+    0x04, 0x3e, 0x46, 0x04, 0x3d, 0x45, 0x04, 0x3c, 0x44, 0x04, 0x3b, 0x43, 0x04, 0x3a, 0x42, 0x04, 0x39, 0x41, 0x04, 0x38, 0x3f,
+    0x04, 0x37, 0x3e, 0x04, 0x36, 0x3d, 0x04, 0x35, 0x3c, 0x04, 0x33, 0x3b, 0x03, 0x33, 0x3a, 0x03, 0x32, 0x38, 0x03, 0x31, 0x37,
+    0x03, 0x2f, 0x36, 0x03, 0x2e, 0x34, 0x03, 0x2d, 0x33, 0x03, 0x2c, 0x32, 0x03, 0x2a, 0x30, 0x03, 0x29, 0x2f, 0x03, 0x28, 0x2e,
+    0x03, 0x27, 0x2c, 0x03, 0x25, 0x2a, 0x03, 0x24, 0x29, 0x02, 0x23, 0x27, 0x02, 0x21, 0x26, 0x02, 0x20, 0x24, 0x02, 0x1e, 0x23,
+    0x02, 0x1d, 0x21, 0x02, 0x1b, 0x1f, 0x02, 0x1a, 0x1e, 0x02, 0x18, 0x1c, 0x02, 0x17, 0x1a, 0x02, 0x15, 0x18, 0x02, 0x13, 0x16,
+    0x01, 0x12, 0x14, 0x00, 0x0e, 0x10, 0x0a, 0x1c, 0x1f, 0xdb, 0xe1, 0xe2, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xf0, 0xf5, 0xf6, 0x7b, 0xa3, 0xab, 0x68, 0x97, 0x9e, 0x64, 0x93, 0x9a, 0x60, 0x90, 0x98, 0x5b, 0x8c, 0x95, 0x57, 0x8a, 0x92,
+    0x53, 0x87, 0x8f, 0x4f, 0x83, 0x8c, 0x4c, 0x81, 0x89, 0x48, 0x7d, 0x86, 0x44, 0x7b, 0x83, 0x41, 0x78, 0x81, 0x3f, 0x75, 0x7e,
+    0x3a, 0x72, 0x7b, 0x37, 0x6f, 0x79, 0x34, 0x6d, 0x76, 0x32, 0x6b, 0x73, 0x2f, 0x68, 0x71, 0x2c, 0x65, 0x6f, 0x2a, 0x63, 0x6c,
+    0x28, 0x61, 0x6a, 0x26, 0x5f, 0x68, 0x24, 0x5d, 0x67, 0x22, 0x5b, 0x64, 0x20, 0x59, 0x62, 0x1e, 0x57, 0x60, 0x1d, 0x56, 0x5f,
+    0x1b, 0x53, 0x5d, 0x1a, 0x53, 0x5b, 0x18, 0x51, 0x5a, 0x17, 0x50, 0x59, 0x17, 0x4f, 0x57, 0x16, 0x4d, 0x56, 0x15, 0x4b, 0x54,
+    0x14, 0x4a, 0x52, 0x13, 0x49, 0x51, 0x12, 0x48, 0x51, 0x11, 0x46, 0x4f, 0x11, 0x45, 0x4e, 0x10, 0x44, 0x4c, 0x0f, 0x43, 0x4b,
+    0x0e, 0x41, 0x49, 0x0d, 0x40, 0x48, 0x0d, 0x3f, 0x47, 0x0d, 0x3f, 0x46, 0x32, 0x5c, 0x61, 0xee, 0xf1, 0xf2, 0xb8, 0xc6, 0xc8,
+    0x0d, 0x3b, 0x43, 0x0c, 0x3b, 0x42, 0x0c, 0x3a, 0x41, 0x0c, 0x39, 0x40, 0x0c, 0x38, 0x3f, 0x0c, 0x37, 0x3e, 0x0c, 0x36, 0x3d,
+    0x0b, 0x35, 0x3c, 0x0b, 0x34, 0x3b, 0x0b, 0x34, 0x3a, 0x0b, 0x33, 0x39, 0x0b, 0x32, 0x38, 0x0a, 0x31, 0x37, 0x0a, 0x30, 0x36,
+    0x0a, 0x2f, 0x35, 0x0a, 0x2e, 0x34, 0x09, 0x2a, 0x30, 0x19, 0x3f, 0x46, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x07, 0x5d, 0x69, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65,
+    0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64,
+    0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x57, 0x63, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62,
+    0x06, 0x57, 0x62, 0x06, 0x56, 0x62, 0x06, 0x56, 0x61, 0x06, 0x56, 0x61, 0x06, 0x55, 0x61, 0x06, 0x55, 0x60, 0x06, 0x55, 0x60,
+    0x06, 0x54, 0x60, 0x06, 0x54, 0x5f, 0x06, 0x54, 0x5f, 0x06, 0x53, 0x5e, 0x06, 0x53, 0x5e, 0x05, 0x53, 0x5e, 0x05, 0x52, 0x5d,
+    0x05, 0x52, 0x5d, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5b, 0x05, 0x50, 0x5b, 0x05, 0x50, 0x5a, 0x05, 0x4f, 0x5a,
+    0x05, 0x4e, 0x59, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x58, 0x05, 0x4d, 0x57, 0x05, 0x4d, 0x57, 0x05, 0x4c, 0x56, 0x05, 0x4c, 0x56,
+    0x05, 0x4b, 0x55, 0x05, 0x4b, 0x55, 0x05, 0x4a, 0x54, 0x05, 0x49, 0x53, 0x05, 0x48, 0x52, 0x05, 0x48, 0x51, 0x05, 0x47, 0x51,
+    0x05, 0x47, 0x50, 0x05, 0x46, 0x4f, 0x05, 0x45, 0x4e, 0x05, 0x44, 0x4d, 0x05, 0x43, 0x4d, 0x04, 0x43, 0x4c, 0x04, 0x42, 0x4b,
+    0x04, 0x41, 0x4a, 0x04, 0x40, 0x49, 0x04, 0x3f, 0x48, 0x04, 0x3e, 0x47, 0x04, 0x3e, 0x46, 0x04, 0x3d, 0x45, 0x04, 0x3c, 0x44,
+    0x04, 0x3b, 0x43, 0x04, 0x3a, 0x42, 0x04, 0x39, 0x41, 0x04, 0x38, 0x3f, 0x04, 0x37, 0x3e, 0x04, 0x36, 0x3d, 0x04, 0x35, 0x3c,
+    0x04, 0x33, 0x3b, 0x03, 0x33, 0x3a, 0x03, 0x32, 0x38, 0x03, 0x31, 0x37, 0x03, 0x2f, 0x36, 0x03, 0x2e, 0x34, 0x03, 0x2d, 0x33,
+    0x03, 0x2c, 0x32, 0x03, 0x2a, 0x30, 0x03, 0x29, 0x2f, 0x03, 0x28, 0x2e, 0x03, 0x27, 0x2c, 0x03, 0x25, 0x2a, 0x03, 0x24, 0x29,
+    0x02, 0x23, 0x27, 0x02, 0x21, 0x26, 0x02, 0x20, 0x24, 0x02, 0x1e, 0x23, 0x02, 0x1d, 0x21, 0x02, 0x1b, 0x1f, 0x02, 0x1a, 0x1e,
+    0x02, 0x18, 0x1c, 0x02, 0x17, 0x1a, 0x02, 0x15, 0x18, 0x02, 0x13, 0x16, 0x01, 0x12, 0x14, 0x01, 0x10, 0x12, 0x01, 0x0e, 0x11,
+    0xb5, 0xc1, 0xc3, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xae, 0xc8, 0xcc, 0x6e, 0x9b, 0xa2, 0x69, 0x97, 0x9e,
+    0x65, 0x94, 0x9b, 0x61, 0x91, 0x98, 0x5d, 0x8e, 0x96, 0x59, 0x8b, 0x93, 0x55, 0x88, 0x90, 0x50, 0x84, 0x8d, 0x4d, 0x81, 0x8a,
+    0x49, 0x7e, 0x86, 0x46, 0x7c, 0x84, 0x42, 0x78, 0x81, 0x40, 0x76, 0x7f, 0x3c, 0x74, 0x7c, 0x38, 0x70, 0x79, 0x35, 0x6e, 0x77,
+    0x33, 0x6b, 0x74, 0x30, 0x68, 0x72, 0x2e, 0x66, 0x70, 0x2b, 0x64, 0x6d, 0x28, 0x61, 0x6a, 0x26, 0x60, 0x69, 0x25, 0x5e, 0x67,
+    0x23, 0x5c, 0x65, 0x21, 0x5a, 0x63, 0x1f, 0x58, 0x61, 0x1d, 0x56, 0x5f, 0x1b, 0x53, 0x5d, 0x1a, 0x53, 0x5b, 0x18, 0x51, 0x5a,
+    0x17, 0x50, 0x59, 0x17, 0x4f, 0x57, 0x16, 0x4d, 0x56, 0x15, 0x4b, 0x54, 0x14, 0x4a, 0x52, 0x13, 0x49, 0x51, 0x12, 0x48, 0x51,
+    0x11, 0x46, 0x4f, 0x11, 0x45, 0x4e, 0x10, 0x44, 0x4c, 0x0f, 0x43, 0x4b, 0x0e, 0x41, 0x49, 0x0d, 0x40, 0x48, 0x0d, 0x3f, 0x47,
+    0x0d, 0x3f, 0x46, 0x0d, 0x3e, 0x45, 0x85, 0x9d, 0xa1, 0x98, 0xac, 0xaf, 0x0d, 0x3b, 0x43, 0x0c, 0x3b, 0x42, 0x0c, 0x3a, 0x41,
+    0x0c, 0x39, 0x40, 0x0c, 0x38, 0x3f, 0x0c, 0x37, 0x3e, 0x0c, 0x36, 0x3d, 0x0b, 0x35, 0x3c, 0x0b, 0x34, 0x3b, 0x0b, 0x34, 0x3a,
+    0x0b, 0x33, 0x39, 0x0b, 0x32, 0x38, 0x0a, 0x31, 0x37, 0x0a, 0x30, 0x36, 0x0a, 0x2f, 0x35, 0x0a, 0x2e, 0x34, 0x09, 0x2a, 0x30,
+    0x19, 0x3f, 0x46, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x07, 0x5d, 0x69, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65,
+    0x06, 0x59, 0x65, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63,
+    0x06, 0x58, 0x63, 0x06, 0x57, 0x63, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x56, 0x62, 0x06, 0x56, 0x61,
+    0x06, 0x56, 0x61, 0x06, 0x55, 0x61, 0x06, 0x55, 0x60, 0x06, 0x55, 0x60, 0x06, 0x54, 0x60, 0x06, 0x54, 0x5f, 0x06, 0x54, 0x5f,
+    0x06, 0x53, 0x5e, 0x06, 0x53, 0x5e, 0x05, 0x53, 0x5e, 0x05, 0x52, 0x5d, 0x05, 0x52, 0x5d, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5c,
+    0x05, 0x51, 0x5b, 0x05, 0x50, 0x5b, 0x05, 0x50, 0x5a, 0x05, 0x4f, 0x5a, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x58,
+    0x05, 0x4d, 0x57, 0x05, 0x4d, 0x57, 0x05, 0x4c, 0x56, 0x05, 0x4c, 0x56, 0x05, 0x4b, 0x55, 0x05, 0x4b, 0x55, 0x05, 0x4a, 0x54,
+    0x05, 0x49, 0x53, 0x05, 0x48, 0x52, 0x05, 0x48, 0x51, 0x05, 0x47, 0x51, 0x05, 0x47, 0x50, 0x05, 0x46, 0x4f, 0x05, 0x45, 0x4e,
+    0x05, 0x44, 0x4d, 0x05, 0x43, 0x4d, 0x04, 0x43, 0x4c, 0x04, 0x42, 0x4b, 0x04, 0x41, 0x4a, 0x04, 0x40, 0x49, 0x04, 0x3f, 0x48,
+    0x04, 0x3e, 0x47, 0x04, 0x3e, 0x46, 0x04, 0x3d, 0x45, 0x04, 0x3c, 0x44, 0x04, 0x3b, 0x43, 0x04, 0x3a, 0x42, 0x04, 0x39, 0x41,
+    0x04, 0x38, 0x3f, 0x04, 0x37, 0x3e, 0x04, 0x36, 0x3d, 0x04, 0x35, 0x3c, 0x04, 0x33, 0x3b, 0x03, 0x33, 0x3a, 0x03, 0x32, 0x38,
+    0x03, 0x31, 0x37, 0x03, 0x2f, 0x36, 0x03, 0x2e, 0x34, 0x03, 0x2d, 0x33, 0x03, 0x2c, 0x32, 0x03, 0x2a, 0x30, 0x03, 0x29, 0x2f,
+    0x03, 0x28, 0x2e, 0x03, 0x27, 0x2c, 0x03, 0x25, 0x2a, 0x03, 0x24, 0x29, 0x02, 0x23, 0x27, 0x02, 0x21, 0x26, 0x02, 0x20, 0x24,
+    0x02, 0x1e, 0x23, 0x02, 0x1d, 0x21, 0x02, 0x1b, 0x1f, 0x02, 0x1a, 0x1e, 0x02, 0x18, 0x1c, 0x02, 0x17, 0x1a, 0x02, 0x15, 0x18,
+    0x02, 0x13, 0x16, 0x01, 0x12, 0x14, 0x01, 0x10, 0x12, 0x00, 0x0c, 0x0e, 0x79, 0x8e, 0x91, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xe9, 0xf0, 0xf1, 0x7a, 0xa4, 0xaa, 0x70, 0x9c, 0xa3, 0x6b, 0x99, 0xa0, 0x66, 0x95, 0x9c, 0x63, 0x92, 0x9a, 0x5f, 0x8f, 0x97,
+    0x5a, 0x8c, 0x94, 0x56, 0x89, 0x91, 0x52, 0x85, 0x8e, 0x4e, 0x83, 0x8b, 0x4b, 0x7f, 0x88, 0x47, 0x7d, 0x85, 0x43, 0x79, 0x82,
+    0x40, 0x76, 0x7f, 0x3d, 0x74, 0x7c, 0x39, 0x71, 0x7a, 0x36, 0x6f, 0x77, 0x34, 0x6c, 0x75, 0x31, 0x69, 0x73, 0x2e, 0x66, 0x70,
+    0x2c, 0x65, 0x6e, 0x29, 0x62, 0x6b, 0x26, 0x60, 0x69, 0x25, 0x5e, 0x67, 0x23, 0x5c, 0x65, 0x21, 0x5a, 0x63, 0x1f, 0x58, 0x61,
+    0x1d, 0x56, 0x5f, 0x1c, 0x54, 0x5e, 0x1a, 0x53, 0x5b, 0x18, 0x51, 0x5a, 0x17, 0x50, 0x59, 0x17, 0x4f, 0x57, 0x16, 0x4d, 0x56,
+    0x15, 0x4b, 0x54, 0x14, 0x4a, 0x52, 0x14, 0x4a, 0x51, 0x12, 0x48, 0x51, 0x11, 0x46, 0x4f, 0x11, 0x45, 0x4e, 0x10, 0x44, 0x4c,
+    0x0f, 0x43, 0x4b, 0x0e, 0x41, 0x49, 0x0d, 0x40, 0x48, 0x0d, 0x3f, 0x47, 0x0d, 0x3f, 0x46, 0x0d, 0x3e, 0x45, 0x23, 0x4e, 0x56,
+    0x36, 0x5d, 0x64, 0x0d, 0x3b, 0x43, 0x0c, 0x3b, 0x42, 0x0c, 0x3a, 0x41, 0x0c, 0x39, 0x40, 0x0c, 0x38, 0x3f, 0x0c, 0x37, 0x3e,
+    0x0c, 0x36, 0x3d, 0x0b, 0x35, 0x3c, 0x0b, 0x34, 0x3b, 0x0b, 0x34, 0x3a, 0x0b, 0x33, 0x39, 0x0b, 0x32, 0x38, 0x0a, 0x31, 0x37,
+    0x0a, 0x30, 0x36, 0x0a, 0x2f, 0x35, 0x0a, 0x2e, 0x34, 0x09, 0x2a, 0x30, 0x19, 0x3f, 0x46, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x07, 0x5d, 0x69, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65,
+    0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64,
+    0x06, 0x58, 0x64, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x57, 0x63, 0x06, 0x57, 0x62,
+    0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x56, 0x62, 0x06, 0x56, 0x61, 0x06, 0x56, 0x61, 0x06, 0x55, 0x61, 0x06, 0x55, 0x60,
+    0x06, 0x55, 0x60, 0x06, 0x54, 0x60, 0x06, 0x54, 0x5f, 0x06, 0x54, 0x5f, 0x06, 0x53, 0x5e, 0x06, 0x53, 0x5e, 0x05, 0x53, 0x5e,
+    0x05, 0x52, 0x5d, 0x05, 0x52, 0x5d, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5b, 0x05, 0x50, 0x5b, 0x05, 0x50, 0x5a,
+    0x05, 0x4f, 0x5a, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x58, 0x05, 0x4d, 0x57, 0x05, 0x4d, 0x57, 0x05, 0x4c, 0x56,
+    0x05, 0x4c, 0x56, 0x05, 0x4b, 0x55, 0x05, 0x4b, 0x55, 0x05, 0x4a, 0x54, 0x05, 0x49, 0x53, 0x05, 0x48, 0x52, 0x05, 0x48, 0x51,
+    0x05, 0x47, 0x51, 0x05, 0x47, 0x50, 0x05, 0x46, 0x4f, 0x05, 0x45, 0x4e, 0x05, 0x44, 0x4d, 0x05, 0x43, 0x4d, 0x04, 0x43, 0x4c,
+    0x04, 0x42, 0x4b, 0x04, 0x41, 0x4a, 0x04, 0x40, 0x49, 0x04, 0x3f, 0x48, 0x04, 0x3e, 0x47, 0x04, 0x3e, 0x46, 0x04, 0x3d, 0x45,
+    0x04, 0x3c, 0x44, 0x04, 0x3b, 0x43, 0x04, 0x3a, 0x42, 0x04, 0x39, 0x41, 0x04, 0x38, 0x3f, 0x04, 0x37, 0x3e, 0x04, 0x36, 0x3d,
+    0x04, 0x35, 0x3c, 0x04, 0x33, 0x3b, 0x03, 0x33, 0x3a, 0x03, 0x32, 0x38, 0x03, 0x31, 0x37, 0x03, 0x2f, 0x36, 0x03, 0x2e, 0x34,
+    0x03, 0x2d, 0x33, 0x03, 0x2c, 0x32, 0x03, 0x2a, 0x30, 0x03, 0x29, 0x2f, 0x03, 0x28, 0x2e, 0x03, 0x27, 0x2c, 0x03, 0x25, 0x2a,
+    0x03, 0x24, 0x29, 0x02, 0x23, 0x27, 0x02, 0x21, 0x26, 0x02, 0x20, 0x24, 0x02, 0x1e, 0x23, 0x02, 0x1d, 0x21, 0x02, 0x1b, 0x1f,
+    0x02, 0x1a, 0x1e, 0x02, 0x18, 0x1c, 0x02, 0x17, 0x1a, 0x02, 0x15, 0x18, 0x02, 0x13, 0x16, 0x01, 0x12, 0x14, 0x01, 0x10, 0x13,
+    0x00, 0x0a, 0x0c, 0x4a, 0x63, 0x67, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xb2, 0xca, 0xce, 0x75, 0xa0, 0xa7, 0x70, 0x9d, 0xa4,
+    0x6d, 0x9a, 0xa1, 0x68, 0x96, 0x9d, 0x64, 0x94, 0x9b, 0x60, 0x90, 0x98, 0x5b, 0x8d, 0x95, 0x57, 0x89, 0x92, 0x53, 0x86, 0x8f,
+    0x4f, 0x83, 0x8b, 0x4c, 0x80, 0x88, 0x48, 0x7d, 0x86, 0x45, 0x7a, 0x83, 0x40, 0x76, 0x7f, 0x3d, 0x74, 0x7c, 0x3a, 0x72, 0x7b,
+    0x37, 0x6f, 0x78, 0x34, 0x6c, 0x75, 0x31, 0x69, 0x73, 0x2f, 0x67, 0x70, 0x2c, 0x65, 0x6e, 0x29, 0x62, 0x6b, 0x28, 0x61, 0x6a,
+    0x25, 0x5e, 0x67, 0x22, 0x5b, 0x64, 0x21, 0x5a, 0x63, 0x20, 0x59, 0x61, 0x1e, 0x57, 0x60, 0x1c, 0x54, 0x5e, 0x1b, 0x53, 0x5c,
+    0x18, 0x51, 0x5a, 0x17, 0x50, 0x59, 0x17, 0x4f, 0x57, 0x16, 0x4d, 0x56, 0x15, 0x4b, 0x54, 0x14, 0x4a, 0x52, 0x14, 0x4a, 0x51,
+    0x12, 0x48, 0x51, 0x11, 0x46, 0x4f, 0x11, 0x45, 0x4e, 0x11, 0x45, 0x4d, 0x10, 0x43, 0x4b, 0x0e, 0x41, 0x49, 0x0d, 0x40, 0x48,
+    0x0d, 0x3f, 0x47, 0x0d, 0x3f, 0x46, 0x0d, 0x3e, 0x45, 0x0d, 0x3d, 0x45, 0x0d, 0x3c, 0x44, 0x0d, 0x3b, 0x43, 0x0c, 0x3b, 0x42,
+    0x0c, 0x3a, 0x41, 0x0c, 0x39, 0x40, 0x0c, 0x38, 0x3f, 0x0c, 0x37, 0x3e, 0x0c, 0x36, 0x3d, 0x0b, 0x35, 0x3c, 0x0b, 0x34, 0x3b,
+    0x0b, 0x34, 0x3a, 0x0b, 0x33, 0x39, 0x0b, 0x32, 0x38, 0x0a, 0x31, 0x37, 0x0a, 0x30, 0x36, 0x0a, 0x2f, 0x35, 0x0a, 0x2e, 0x34,
+    0x09, 0x2a, 0x30, 0x19, 0x3f, 0x46, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x07, 0x5d, 0x69, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65,
+    0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63,
+    0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x57, 0x63, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x56, 0x62,
+    0x06, 0x56, 0x61, 0x06, 0x56, 0x61, 0x06, 0x55, 0x61, 0x06, 0x55, 0x60, 0x06, 0x55, 0x60, 0x06, 0x54, 0x60, 0x06, 0x54, 0x5f,
+    0x06, 0x54, 0x5f, 0x06, 0x53, 0x5e, 0x06, 0x53, 0x5e, 0x05, 0x53, 0x5e, 0x05, 0x52, 0x5d, 0x05, 0x52, 0x5d, 0x05, 0x51, 0x5c,
+    0x05, 0x51, 0x5c, 0x05, 0x51, 0x5b, 0x05, 0x50, 0x5b, 0x05, 0x50, 0x5a, 0x05, 0x4f, 0x5a, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x59,
+    0x05, 0x4e, 0x58, 0x05, 0x4d, 0x57, 0x05, 0x4d, 0x57, 0x05, 0x4c, 0x56, 0x05, 0x4c, 0x56, 0x05, 0x4b, 0x55, 0x05, 0x4b, 0x55,
+    0x05, 0x4a, 0x54, 0x05, 0x49, 0x53, 0x05, 0x48, 0x52, 0x05, 0x48, 0x51, 0x05, 0x47, 0x51, 0x05, 0x47, 0x50, 0x05, 0x46, 0x4f,
+    0x05, 0x45, 0x4e, 0x05, 0x44, 0x4d, 0x05, 0x43, 0x4d, 0x04, 0x43, 0x4c, 0x04, 0x42, 0x4b, 0x04, 0x41, 0x4a, 0x04, 0x40, 0x49,
+    0x04, 0x3f, 0x48, 0x04, 0x3e, 0x47, 0x04, 0x3e, 0x46, 0x04, 0x3d, 0x45, 0x04, 0x3c, 0x44, 0x04, 0x3b, 0x43, 0x04, 0x3a, 0x42,
+    0x04, 0x39, 0x41, 0x04, 0x38, 0x3f, 0x04, 0x37, 0x3e, 0x04, 0x36, 0x3d, 0x04, 0x35, 0x3c, 0x04, 0x33, 0x3b, 0x03, 0x33, 0x3a,
+    0x03, 0x32, 0x38, 0x03, 0x31, 0x37, 0x03, 0x2f, 0x36, 0x03, 0x2e, 0x34, 0x03, 0x2d, 0x33, 0x03, 0x2c, 0x32, 0x03, 0x2a, 0x30,
+    0x03, 0x29, 0x2f, 0x03, 0x28, 0x2e, 0x03, 0x27, 0x2c, 0x03, 0x25, 0x2a, 0x03, 0x24, 0x29, 0x02, 0x23, 0x27, 0x02, 0x21, 0x26,
+    0x02, 0x20, 0x24, 0x02, 0x1e, 0x23, 0x02, 0x1d, 0x21, 0x02, 0x1b, 0x1f, 0x02, 0x1a, 0x1e, 0x02, 0x18, 0x1c, 0x02, 0x17, 0x1a,
+    0x02, 0x15, 0x18, 0x02, 0x13, 0x16, 0x01, 0x12, 0x14, 0x01, 0x10, 0x13, 0x00, 0x0a, 0x0c, 0x2e, 0x47, 0x4b, 0xfe, 0xfe, 0xfe,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xf1, 0xf5, 0xf6, 0x88, 0xad, 0xb3, 0x77, 0xa2, 0xa9, 0x72, 0x9e, 0xa5, 0x6e, 0x9b, 0xa1, 0x6a, 0x97, 0x9e, 0x65, 0x94, 0x9b,
+    0x62, 0x91, 0x99, 0x5c, 0x8e, 0x95, 0x58, 0x8a, 0x92, 0x54, 0x86, 0x8f, 0x50, 0x84, 0x8c, 0x4d, 0x81, 0x89, 0x49, 0x7e, 0x86,
+    0x46, 0x7b, 0x84, 0x42, 0x78, 0x80, 0x3e, 0x75, 0x7d, 0x3b, 0x72, 0x7b, 0x37, 0x6f, 0x78, 0x34, 0x6d, 0x75, 0x32, 0x6a, 0x73,
+    0x2f, 0x67, 0x70, 0x2c, 0x65, 0x6e, 0x2a, 0x63, 0x6b, 0x28, 0x61, 0x6a, 0x25, 0x5e, 0x67, 0x24, 0x5d, 0x66, 0x22, 0x5a, 0x64,
+    0x20, 0x59, 0x61, 0x1e, 0x57, 0x60, 0x1d, 0x55, 0x5e, 0x1b, 0x53, 0x5c, 0x18, 0x51, 0x5a, 0x17, 0x50, 0x59, 0x17, 0x4f, 0x57,
+    0x16, 0x4d, 0x56, 0x15, 0x4b, 0x54, 0x15, 0x4a, 0x53, 0x14, 0x4a, 0x51, 0x12, 0x48, 0x51, 0x12, 0x47, 0x50, 0x11, 0x45, 0x4e,
+    0x11, 0x45, 0x4d, 0x10, 0x43, 0x4b, 0x0e, 0x41, 0x49, 0x0d, 0x40, 0x48, 0x0d, 0x3f, 0x47, 0x0d, 0x3f, 0x46, 0x0d, 0x3e, 0x45,
+    0x0d, 0x3d, 0x45, 0x0d, 0x3c, 0x44, 0x0d, 0x3b, 0x43, 0x0c, 0x3b, 0x42, 0x0c, 0x3a, 0x41, 0x0c, 0x39, 0x40, 0x0c, 0x38, 0x3f,
+    0x0c, 0x37, 0x3e, 0x0c, 0x36, 0x3d, 0x0b, 0x35, 0x3c, 0x0b, 0x34, 0x3b, 0x0b, 0x34, 0x3a, 0x0b, 0x33, 0x39, 0x0b, 0x32, 0x38,
+    0x0a, 0x31, 0x37, 0x0a, 0x30, 0x36, 0x0a, 0x2f, 0x35, 0x0a, 0x2e, 0x34, 0x09, 0x2a, 0x30, 0x19, 0x3f, 0x46, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x07, 0x5d, 0x69, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x59, 0x65,
+    0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x58, 0x64,
+    0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x57, 0x63,
+    0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x56, 0x62, 0x06, 0x56, 0x61, 0x06, 0x56, 0x61, 0x06, 0x55, 0x61,
+    0x06, 0x55, 0x60, 0x06, 0x55, 0x60, 0x06, 0x54, 0x60, 0x06, 0x54, 0x5f, 0x06, 0x54, 0x5f, 0x06, 0x53, 0x5e, 0x06, 0x53, 0x5e,
+    0x05, 0x53, 0x5e, 0x05, 0x52, 0x5d, 0x05, 0x52, 0x5d, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5b, 0x05, 0x50, 0x5b,
+    0x05, 0x50, 0x5a, 0x05, 0x4f, 0x5a, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x58, 0x05, 0x4d, 0x57, 0x05, 0x4d, 0x57,
+    0x05, 0x4c, 0x56, 0x05, 0x4c, 0x56, 0x05, 0x4b, 0x55, 0x05, 0x4b, 0x55, 0x05, 0x4a, 0x54, 0x05, 0x49, 0x53, 0x05, 0x48, 0x52,
+    0x05, 0x48, 0x51, 0x05, 0x47, 0x51, 0x05, 0x47, 0x50, 0x05, 0x46, 0x4f, 0x05, 0x45, 0x4e, 0x05, 0x44, 0x4d, 0x05, 0x43, 0x4d,
+    0x04, 0x43, 0x4c, 0x04, 0x42, 0x4b, 0x04, 0x41, 0x4a, 0x04, 0x40, 0x49, 0x04, 0x3f, 0x48, 0x04, 0x3e, 0x47, 0x04, 0x3e, 0x46,
+    0x04, 0x3d, 0x45, 0x04, 0x3c, 0x44, 0x04, 0x3b, 0x43, 0x04, 0x3a, 0x42, 0x04, 0x39, 0x41, 0x04, 0x38, 0x3f, 0x04, 0x37, 0x3e,
+    0x04, 0x36, 0x3d, 0x04, 0x35, 0x3c, 0x04, 0x33, 0x3b, 0x03, 0x33, 0x3a, 0x03, 0x32, 0x38, 0x03, 0x31, 0x37, 0x03, 0x2f, 0x36,
+    0x03, 0x2e, 0x34, 0x03, 0x2d, 0x33, 0x03, 0x2c, 0x32, 0x03, 0x2a, 0x30, 0x03, 0x29, 0x2f, 0x03, 0x28, 0x2e, 0x03, 0x27, 0x2c,
+    0x03, 0x25, 0x2a, 0x03, 0x24, 0x29, 0x02, 0x23, 0x27, 0x02, 0x21, 0x26, 0x02, 0x20, 0x24, 0x02, 0x1e, 0x23, 0x02, 0x1d, 0x21,
+    0x02, 0x1b, 0x1f, 0x02, 0x1a, 0x1e, 0x02, 0x18, 0x1c, 0x02, 0x17, 0x1a, 0x02, 0x15, 0x18, 0x02, 0x13, 0x16, 0x01, 0x12, 0x14,
+    0x01, 0x10, 0x13, 0x00, 0x0a, 0x0c, 0x1d, 0x34, 0x37, 0xf5, 0xf6, 0xf6, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xcd, 0xdd, 0xe0, 0x7d, 0xa6, 0xac, 0x78, 0xa2, 0xa9,
+    0x73, 0x9e, 0xa6, 0x70, 0x9c, 0xa3, 0x6b, 0x98, 0x9f, 0x66, 0x95, 0x9c, 0x63, 0x92, 0x9a, 0x5d, 0x8e, 0x96, 0x5a, 0x8b, 0x94,
+    0x55, 0x87, 0x90, 0x51, 0x85, 0x8d, 0x4d, 0x81, 0x89, 0x4a, 0x7f, 0x87, 0x47, 0x7c, 0x84, 0x43, 0x78, 0x81, 0x3f, 0x76, 0x7e,
+    0x3b, 0x72, 0x7b, 0x38, 0x70, 0x79, 0x35, 0x6d, 0x76, 0x33, 0x6a, 0x74, 0x30, 0x68, 0x71, 0x2d, 0x65, 0x6e, 0x2a, 0x63, 0x6b,
+    0x28, 0x61, 0x6a, 0x26, 0x5f, 0x68, 0x24, 0x5d, 0x66, 0x22, 0x5a, 0x64, 0x20, 0x59, 0x61, 0x1f, 0x57, 0x61, 0x1d, 0x55, 0x5e,
+    0x1b, 0x53, 0x5c, 0x19, 0x52, 0x5a, 0x18, 0x51, 0x5a, 0x17, 0x4f, 0x58, 0x17, 0x4e, 0x56, 0x16, 0x4c, 0x55, 0x15, 0x4a, 0x53,
+    0x14, 0x4a, 0x51, 0x12, 0x48, 0x51, 0x12, 0x47, 0x50, 0x11, 0x45, 0x4e, 0x11, 0x45, 0x4d, 0x10, 0x43, 0x4b, 0x0e, 0x41, 0x49,
+    0x0d, 0x40, 0x48, 0x0d, 0x3f, 0x47, 0x0d, 0x3f, 0x46, 0x0d, 0x3e, 0x45, 0x0d, 0x3d, 0x45, 0x0d, 0x3c, 0x44, 0x0d, 0x3b, 0x43,
+    0x0c, 0x3b, 0x42, 0x0c, 0x3a, 0x41, 0x0c, 0x39, 0x40, 0x0c, 0x38, 0x3f, 0x0c, 0x37, 0x3e, 0x0c, 0x36, 0x3d, 0x0b, 0x35, 0x3c,
+    0x0b, 0x34, 0x3b, 0x0b, 0x34, 0x3a, 0x0b, 0x33, 0x39, 0x0b, 0x32, 0x38, 0x0a, 0x31, 0x37, 0x0a, 0x30, 0x36, 0x0a, 0x2f, 0x35,
+    0x0a, 0x2e, 0x34, 0x09, 0x2a, 0x30, 0x19, 0x3f, 0x46, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x07, 0x5d, 0x69,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x8e, 0xb4, 0xba, 0xb4, 0xcd, 0xd1, 0x28, 0x72, 0x7c, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x21, 0x6c, 0x77, 0xb1, 0xcb, 0xcf, 0x94, 0xb7, 0xbd, 0x09, 0x5b, 0x67, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65,
+    0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x51, 0x8a, 0x93, 0xbb, 0xd1, 0xd4,
+    0x64, 0x97, 0x9e, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x57, 0x63, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62,
+    0x06, 0x56, 0x62, 0x06, 0x56, 0x61, 0x06, 0x56, 0x61, 0x06, 0x55, 0x61, 0x06, 0x55, 0x60, 0x06, 0x55, 0x60, 0x06, 0x54, 0x60,
+    0x5d, 0x90, 0x97, 0x0a, 0x57, 0x62, 0x06, 0x53, 0x5e, 0x06, 0x53, 0x5e, 0x05, 0x53, 0x5e, 0x05, 0x52, 0x5d, 0x05, 0x52, 0x5d,
+    0x05, 0x51, 0x5c, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5b, 0x05, 0x50, 0x5b, 0x44, 0x7c, 0x83, 0x8c, 0xae, 0xb3, 0x48, 0x7d, 0x85,
+    0x05, 0x4e, 0x59, 0x05, 0x4e, 0x58, 0x05, 0x4d, 0x57, 0x05, 0x4d, 0x57, 0x05, 0x4c, 0x56, 0x05, 0x4c, 0x56, 0x05, 0x4b, 0x55,
+    0x05, 0x4b, 0x55, 0x05, 0x4a, 0x54, 0x05, 0x49, 0x53, 0x05, 0x48, 0x52, 0x05, 0x48, 0x51, 0x05, 0x47, 0x51, 0x05, 0x47, 0x50,
+    0x05, 0x46, 0x4f, 0x05, 0x45, 0x4e, 0x05, 0x44, 0x4d, 0x05, 0x43, 0x4d, 0x36, 0x69, 0x70, 0x30, 0x63, 0x6b, 0x04, 0x41, 0x4a,
+    0x04, 0x40, 0x49, 0x04, 0x3f, 0x48, 0x04, 0x3e, 0x47, 0x04, 0x3e, 0x46, 0x04, 0x3d, 0x45, 0x04, 0x3c, 0x44, 0x04, 0x3b, 0x43,
+    0x04, 0x3a, 0x42, 0x04, 0x39, 0x41, 0x04, 0x38, 0x3f, 0x04, 0x37, 0x3e, 0x04, 0x36, 0x3d, 0x04, 0x35, 0x3c, 0x04, 0x33, 0x3b,
+    0x03, 0x33, 0x3a, 0x03, 0x32, 0x38, 0x10, 0x3c, 0x41, 0x97, 0xa9, 0xac, 0xb0, 0xbd, 0xbf, 0xaf, 0xbc, 0xbe, 0xaf, 0xbc, 0xbe,
+    0xaf, 0xbb, 0xbd, 0xaf, 0xbb, 0xbd, 0xb0, 0xbc, 0xbd, 0x98, 0xa7, 0xa9, 0x5c, 0x72, 0x75, 0x11, 0x30, 0x35, 0x02, 0x23, 0x27,
+    0x02, 0x21, 0x26, 0x02, 0x20, 0x24, 0x02, 0x1e, 0x23, 0x02, 0x1d, 0x21, 0x02, 0x1b, 0x1f, 0x02, 0x1a, 0x1e, 0x02, 0x18, 0x1c,
+    0x02, 0x17, 0x1a, 0x02, 0x15, 0x18, 0x02, 0x13, 0x16, 0x01, 0x12, 0x14, 0x01, 0x10, 0x13, 0x00, 0x0b, 0x0d, 0x10, 0x24, 0x27,
+    0xdc, 0xdf, 0xe0, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xa5, 0xc2, 0xc6, 0x7d, 0xa7, 0xad, 0x79, 0xa3, 0xaa, 0x75, 0xa0, 0xa7, 0x70, 0x9d, 0xa3, 0x6c, 0x99, 0xa0,
+    0x67, 0x96, 0x9d, 0x64, 0x92, 0x9a, 0x5e, 0x8f, 0x97, 0x5a, 0x8c, 0x94, 0x56, 0x88, 0x90, 0x52, 0x85, 0x8d, 0x4e, 0x82, 0x8a,
+    0x4b, 0x7f, 0x88, 0x48, 0x7c, 0x85, 0x44, 0x79, 0x82, 0x40, 0x76, 0x7e, 0x3c, 0x73, 0x7c, 0x39, 0x71, 0x79, 0x35, 0x6d, 0x76,
+    0x33, 0x6a, 0x74, 0x30, 0x68, 0x71, 0x2e, 0x66, 0x6f, 0x2b, 0x63, 0x6c, 0x28, 0x61, 0x6a, 0x26, 0x60, 0x69, 0x25, 0x5d, 0x66,
+    0x23, 0x5b, 0x64, 0x21, 0x5a, 0x62, 0x1f, 0x57, 0x61, 0x1d, 0x55, 0x5e, 0x1c, 0x54, 0x5d, 0x19, 0x52, 0x5a, 0x18, 0x51, 0x5a,
+    0x17, 0x4f, 0x58, 0x17, 0x4e, 0x56, 0x16, 0x4c, 0x55, 0x15, 0x4a, 0x53, 0x14, 0x4a, 0x51, 0x12, 0x48, 0x51, 0x12, 0x47, 0x50,
+    0x11, 0x45, 0x4e, 0x11, 0x45, 0x4d, 0x10, 0x43, 0x4b, 0x0e, 0x41, 0x49, 0x0d, 0x40, 0x48, 0x0d, 0x3f, 0x47, 0x0d, 0x3f, 0x46,
+    0x0d, 0x3e, 0x45, 0x0d, 0x3d, 0x45, 0x0d, 0x3c, 0x44, 0x0d, 0x3b, 0x43, 0x0c, 0x3b, 0x42, 0x0c, 0x3a, 0x41, 0x0c, 0x39, 0x40,
+    0x0c, 0x38, 0x3f, 0x0c, 0x37, 0x3e, 0x0c, 0x36, 0x3d, 0x0b, 0x35, 0x3c, 0x0b, 0x34, 0x3b, 0x0b, 0x34, 0x3a, 0x0b, 0x33, 0x39,
+    0x0b, 0x32, 0x38, 0x0a, 0x31, 0x37, 0x0a, 0x30, 0x36, 0x0a, 0x2f, 0x35, 0x0a, 0x2e, 0x34, 0x09, 0x2a, 0x30, 0x19, 0x3f, 0x46,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x07, 0x5d, 0x69, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0xd7, 0xe5, 0xe7,
+    0xff, 0xff, 0xff, 0x3b, 0x7e, 0x87, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x30, 0x76, 0x80, 0xff, 0xff, 0xff,
+    0xe0, 0xea, 0xec, 0x0b, 0x5c, 0x68, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65,
+    0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x79, 0xa5, 0xac, 0xff, 0xff, 0xff, 0x96, 0xb8, 0xbd, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63,
+    0x06, 0x57, 0x63, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x56, 0x62, 0x06, 0x56, 0x61, 0x06, 0x56, 0x61,
+    0x06, 0x55, 0x61, 0x06, 0x55, 0x60, 0x06, 0x55, 0x60, 0x06, 0x54, 0x60, 0xd0, 0xdf, 0xe1, 0x8e, 0xb1, 0xb6, 0x06, 0x53, 0x5e,
+    0x06, 0x53, 0x5e, 0x05, 0x53, 0x5e, 0x05, 0x52, 0x5d, 0x05, 0x52, 0x5d, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5b,
+    0x05, 0x50, 0x5b, 0x84, 0xa9, 0xae, 0xff, 0xff, 0xff, 0x8a, 0xac, 0xb2, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x58, 0x05, 0x4d, 0x57,
+    0x05, 0x4d, 0x57, 0x05, 0x4c, 0x56, 0x05, 0x4c, 0x56, 0x05, 0x4b, 0x55, 0x05, 0x4b, 0x55, 0x05, 0x4a, 0x54, 0x05, 0x49, 0x53,
+    0x05, 0x48, 0x52, 0x05, 0x48, 0x51, 0x05, 0x47, 0x51, 0x05, 0x47, 0x50, 0x05, 0x46, 0x4f, 0x05, 0x45, 0x4e, 0x05, 0x44, 0x4d,
+    0x05, 0x43, 0x4d, 0xa2, 0xba, 0xbd, 0xa4, 0xbb, 0xbe, 0x04, 0x41, 0x4a, 0x04, 0x40, 0x49, 0x04, 0x3f, 0x48, 0x04, 0x3e, 0x47,
+    0x04, 0x3e, 0x46, 0x04, 0x3d, 0x45, 0x04, 0x3c, 0x44, 0x04, 0x3b, 0x43, 0x04, 0x3a, 0x42, 0x04, 0x39, 0x41, 0x04, 0x38, 0x3f,
+    0x04, 0x37, 0x3e, 0x04, 0x36, 0x3d, 0x04, 0x35, 0x3c, 0x04, 0x33, 0x3b, 0x03, 0x33, 0x3a, 0x03, 0x32, 0x38, 0x17, 0x41, 0x47,
+    0xe7, 0xeb, 0xec, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xd1, 0xd7, 0xd8, 0x3a, 0x53, 0x56, 0x02, 0x21, 0x26, 0x02, 0x20, 0x24, 0x02, 0x1e, 0x23,
+    0x02, 0x1d, 0x21, 0x02, 0x1b, 0x1f, 0x02, 0x1a, 0x1e, 0x02, 0x18, 0x1c, 0x02, 0x17, 0x1a, 0x02, 0x15, 0x18, 0x02, 0x13, 0x16,
+    0x01, 0x12, 0x14, 0x01, 0x10, 0x13, 0x00, 0x0d, 0x0f, 0x07, 0x18, 0x1c, 0xc5, 0xcc, 0xcd, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xf2, 0xf6, 0xf7, 0x90, 0xb3, 0xb8, 0x7f, 0xa8, 0xae,
+    0x7b, 0xa4, 0xab, 0x76, 0xa0, 0xa7, 0x71, 0x9d, 0xa4, 0x6d, 0x99, 0xa0, 0x68, 0x96, 0x9d, 0x64, 0x93, 0x9b, 0x5f, 0x90, 0x97,
+    0x5b, 0x8c, 0x95, 0x57, 0x88, 0x91, 0x53, 0x86, 0x8e, 0x4e, 0x82, 0x8a, 0x4b, 0x7f, 0x88, 0x48, 0x7c, 0x85, 0x43, 0x78, 0x81,
+    0x40, 0x76, 0x7e, 0x3d, 0x74, 0x7c, 0x39, 0x71, 0x79, 0x36, 0x6e, 0x76, 0x34, 0x6b, 0x75, 0x31, 0x68, 0x72, 0x2e, 0x66, 0x6f,
+    0x2b, 0x63, 0x6c, 0x29, 0x62, 0x6b, 0x26, 0x5f, 0x68, 0x25, 0x5d, 0x66, 0x23, 0x5b, 0x64, 0x21, 0x5a, 0x62, 0x1f, 0x57, 0x61,
+    0x1d, 0x55, 0x5e, 0x1c, 0x54, 0x5d, 0x19, 0x52, 0x5a, 0x18, 0x51, 0x5a, 0x17, 0x4f, 0x58, 0x17, 0x4e, 0x56, 0x16, 0x4c, 0x55,
+    0x15, 0x4a, 0x53, 0x14, 0x4a, 0x51, 0x12, 0x48, 0x51, 0x12, 0x47, 0x50, 0x11, 0x45, 0x4e, 0x11, 0x45, 0x4d, 0x10, 0x43, 0x4b,
+    0x0e, 0x41, 0x49, 0x0d, 0x40, 0x48, 0x0d, 0x3f, 0x47, 0x0d, 0x3f, 0x46, 0x0d, 0x3e, 0x45, 0x0d, 0x3d, 0x45, 0x0d, 0x3c, 0x44,
+    0x0d, 0x3b, 0x43, 0x0c, 0x3b, 0x42, 0x0c, 0x3a, 0x41, 0x0c, 0x39, 0x40, 0x0c, 0x38, 0x3f, 0x0c, 0x37, 0x3e, 0x0c, 0x36, 0x3d,
+    0x0b, 0x35, 0x3c, 0x0b, 0x34, 0x3b, 0x0b, 0x34, 0x3a, 0x0b, 0x33, 0x39, 0x0b, 0x32, 0x38, 0x0a, 0x31, 0x37, 0x0a, 0x30, 0x36,
+    0x0a, 0x2f, 0x35, 0x0a, 0x2e, 0x34, 0x09, 0x2a, 0x30, 0x19, 0x3f, 0x46, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x07, 0x5d, 0x69, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0xce, 0xdf, 0xe1, 0xff, 0xff, 0xff, 0x39, 0x7c, 0x86, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x2e, 0x75, 0x7f, 0xff, 0xff, 0xff, 0xd6, 0xe4, 0xe6, 0x0b, 0x5c, 0x68, 0x06, 0x59, 0x65,
+    0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x74, 0xa2, 0xa9,
+    0xff, 0xff, 0xff, 0x90, 0xb4, 0xb9, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x57, 0x63, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62,
+    0x06, 0x57, 0x62, 0x06, 0x56, 0x62, 0x06, 0x56, 0x61, 0x06, 0x56, 0x61, 0x06, 0x55, 0x61, 0x06, 0x55, 0x60, 0x06, 0x55, 0x60,
+    0x06, 0x54, 0x60, 0xc7, 0xd9, 0xdb, 0xff, 0xff, 0xff, 0x6e, 0x9a, 0xa1, 0x06, 0x53, 0x5e, 0x05, 0x53, 0x5e, 0x05, 0x52, 0x5d,
+    0x05, 0x52, 0x5d, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5b, 0x05, 0x50, 0x5b, 0x7e, 0xa4, 0xaa, 0xff, 0xff, 0xff,
+    0x83, 0xa8, 0xad, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x58, 0x05, 0x4d, 0x57, 0x05, 0x4d, 0x57, 0x05, 0x4c, 0x56, 0x05, 0x4c, 0x56,
+    0x05, 0x4b, 0x55, 0x05, 0x4b, 0x55, 0x05, 0x4a, 0x54, 0x05, 0x49, 0x53, 0x05, 0x48, 0x52, 0x05, 0x48, 0x51, 0x05, 0x47, 0x51,
+    0x05, 0x47, 0x50, 0x05, 0x46, 0x4f, 0x05, 0x45, 0x4e, 0x05, 0x44, 0x4d, 0x1b, 0x53, 0x5c, 0xee, 0xf2, 0xf3, 0xee, 0xf2, 0xf3,
+    0x1a, 0x51, 0x5a, 0x04, 0x40, 0x49, 0x04, 0x3f, 0x48, 0x04, 0x3e, 0x47, 0x04, 0x3e, 0x46, 0x04, 0x3d, 0x45, 0x04, 0x3c, 0x44,
+    0x04, 0x3b, 0x43, 0x04, 0x3a, 0x42, 0x04, 0x39, 0x41, 0x04, 0x38, 0x3f, 0x04, 0x37, 0x3e, 0x04, 0x36, 0x3d, 0x04, 0x35, 0x3c,
+    0x04, 0x33, 0x3b, 0x03, 0x33, 0x3a, 0x03, 0x32, 0x38, 0x16, 0x40, 0x46, 0xe1, 0xe7, 0xe7, 0xf7, 0xf8, 0xf9, 0x66, 0x7f, 0x83,
+    0x4d, 0x6a, 0x6e, 0x50, 0x6b, 0x6f, 0x4f, 0x6a, 0x6e, 0x4f, 0x69, 0x6d, 0x74, 0x88, 0x8a, 0xcc, 0xd3, 0xd4, 0xff, 0xff, 0xff,
+    0xe7, 0xea, 0xeb, 0x25, 0x3f, 0x44, 0x02, 0x20, 0x24, 0x02, 0x1e, 0x23, 0x02, 0x1d, 0x21, 0x02, 0x1b, 0x1f, 0x02, 0x1a, 0x1e,
+    0x02, 0x18, 0x1c, 0x02, 0x17, 0x1a, 0x02, 0x15, 0x18, 0x02, 0x13, 0x16, 0x01, 0x12, 0x14, 0x01, 0x10, 0x13, 0x01, 0x0d, 0x10,
+    0x03, 0x10, 0x12, 0xb3, 0xbb, 0xbc, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xde, 0xe9, 0xea, 0x85, 0xac, 0xb2, 0x80, 0xa9, 0xae, 0x7c, 0xa5, 0xab, 0x77, 0xa1, 0xa8, 0x72, 0x9e, 0xa4,
+    0x6e, 0x9a, 0xa1, 0x69, 0x97, 0x9e, 0x65, 0x94, 0x9b, 0x60, 0x90, 0x98, 0x5c, 0x8d, 0x95, 0x58, 0x89, 0x92, 0x54, 0x86, 0x8e,
+    0x4f, 0x82, 0x8b, 0x4c, 0x80, 0x88, 0x49, 0x7d, 0x86, 0x45, 0x7a, 0x82, 0x41, 0x77, 0x7f, 0x3d, 0x74, 0x7c, 0x3a, 0x72, 0x7a,
+    0x36, 0x6e, 0x76, 0x34, 0x6b, 0x75, 0x31, 0x68, 0x72, 0x2e, 0x66, 0x6f, 0x2b, 0x63, 0x6c, 0x2a, 0x63, 0x6b, 0x26, 0x60, 0x69,
+    0x24, 0x5d, 0x66, 0x23, 0x5b, 0x64, 0x21, 0x5a, 0x62, 0x1f, 0x57, 0x61, 0x1d, 0x55, 0x5e, 0x1c, 0x54, 0x5d, 0x19, 0x52, 0x5a,
+    0x18, 0x51, 0x5a, 0x17, 0x4f, 0x58, 0x17, 0x4e, 0x56, 0x16, 0x4c, 0x55, 0x15, 0x4a, 0x53, 0x14, 0x4a, 0x51, 0x12, 0x48, 0x51,
+    0x12, 0x47, 0x50, 0x11, 0x45, 0x4e, 0x11, 0x45, 0x4d, 0x10, 0x43, 0x4b, 0x0e, 0x41, 0x49, 0x0d, 0x40, 0x48, 0x0d, 0x3f, 0x47,
+    0x0d, 0x3f, 0x46, 0x0d, 0x3e, 0x45, 0x0d, 0x3d, 0x45, 0x0d, 0x3c, 0x44, 0x0d, 0x3b, 0x43, 0x0c, 0x3b, 0x42, 0x0c, 0x3a, 0x41,
+    0x0c, 0x39, 0x40, 0x0c, 0x38, 0x3f, 0x0c, 0x37, 0x3e, 0x0c, 0x36, 0x3d, 0x0b, 0x35, 0x3c, 0x0b, 0x34, 0x3b, 0x0b, 0x34, 0x3a,
+    0x0b, 0x33, 0x39, 0x0b, 0x32, 0x38, 0x0a, 0x31, 0x37, 0x0a, 0x30, 0x36, 0x0a, 0x2f, 0x35, 0x0a, 0x2e, 0x34, 0x09, 0x2a, 0x30,
+    0x19, 0x3f, 0x46, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x07, 0x5d, 0x69, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0xce, 0xdf, 0xe1, 0xff, 0xff, 0xff, 0x39, 0x7c, 0x86, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x2e, 0x75, 0x7f,
+    0xff, 0xff, 0xff, 0xd6, 0xe4, 0xe6, 0x0b, 0x5c, 0x68, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65,
+    0x06, 0x59, 0x65, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x74, 0xa2, 0xa9, 0xff, 0xff, 0xff, 0x90, 0xb4, 0xb9, 0x06, 0x58, 0x63,
+    0x06, 0x58, 0x63, 0x06, 0x57, 0x63, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x56, 0x62, 0x06, 0x56, 0x61,
+    0x06, 0x56, 0x61, 0x06, 0x55, 0x61, 0x06, 0x55, 0x60, 0x06, 0x55, 0x60, 0x06, 0x54, 0x60, 0xc4, 0xd7, 0xd9, 0xff, 0xff, 0xff,
+    0xfd, 0xfe, 0xfe, 0x5b, 0x8e, 0x95, 0x05, 0x53, 0x5e, 0x05, 0x52, 0x5d, 0x05, 0x52, 0x5d, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5c,
+    0x05, 0x51, 0x5b, 0x05, 0x50, 0x5b, 0x7e, 0xa4, 0xaa, 0xff, 0xff, 0xff, 0x83, 0xa8, 0xad, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x58,
+    0x05, 0x4d, 0x57, 0x05, 0x4d, 0x57, 0x05, 0x4c, 0x56, 0x05, 0x4c, 0x56, 0x05, 0x4b, 0x55, 0x05, 0x4b, 0x55, 0x05, 0x4a, 0x54,
+    0x05, 0x49, 0x53, 0x05, 0x48, 0x52, 0x05, 0x48, 0x51, 0x05, 0x47, 0x51, 0x05, 0x47, 0x50, 0x05, 0x46, 0x4f, 0x05, 0x45, 0x4e,
+    0x05, 0x44, 0x4d, 0x6c, 0x90, 0x96, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x6f, 0x92, 0x97, 0x04, 0x40, 0x49, 0x04, 0x3f, 0x48,
+    0x04, 0x3e, 0x47, 0x04, 0x3e, 0x46, 0x04, 0x3d, 0x45, 0x04, 0x3c, 0x44, 0x04, 0x3b, 0x43, 0x04, 0x3a, 0x42, 0x04, 0x39, 0x41,
+    0x04, 0x38, 0x3f, 0x04, 0x37, 0x3e, 0x04, 0x36, 0x3d, 0x04, 0x35, 0x3c, 0x04, 0x33, 0x3b, 0x03, 0x33, 0x3a, 0x03, 0x32, 0x38,
+    0x16, 0x40, 0x46, 0xe3, 0xe8, 0xe9, 0xf1, 0xf4, 0xf4, 0x19, 0x3f, 0x45, 0x03, 0x2c, 0x32, 0x03, 0x2a, 0x30, 0x03, 0x29, 0x2f,
+    0x03, 0x28, 0x2e, 0x03, 0x27, 0x2c, 0x0a, 0x2b, 0x30, 0xb3, 0xbd, 0xbe, 0xff, 0xff, 0xff, 0x91, 0x9e, 0xa1, 0x02, 0x20, 0x24,
+    0x02, 0x1e, 0x23, 0x02, 0x1d, 0x21, 0x02, 0x1b, 0x1f, 0x02, 0x1a, 0x1e, 0x02, 0x18, 0x1c, 0x02, 0x17, 0x1a, 0x02, 0x15, 0x18,
+    0x02, 0x13, 0x16, 0x01, 0x12, 0x14, 0x01, 0x10, 0x13, 0x01, 0x0e, 0x10, 0x01, 0x0c, 0x0e, 0xa0, 0xab, 0xac, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xc8, 0xda, 0xdd, 0x86, 0xac, 0xb2,
+    0x81, 0xa9, 0xaf, 0x7d, 0xa5, 0xac, 0x78, 0xa2, 0xa9, 0x73, 0x9e, 0xa5, 0x6f, 0x9b, 0xa1, 0x6a, 0x97, 0x9e, 0x65, 0x94, 0x9b,
+    0x61, 0x91, 0x98, 0x5c, 0x8d, 0x95, 0x59, 0x8a, 0x92, 0x54, 0x86, 0x8e, 0x50, 0x83, 0x8b, 0x4d, 0x81, 0x89, 0x4a, 0x7e, 0x86,
+    0x46, 0x7a, 0x83, 0x42, 0x78, 0x80, 0x3e, 0x74, 0x7d, 0x3a, 0x72, 0x7a, 0x37, 0x6f, 0x77, 0x34, 0x6c, 0x75, 0x31, 0x68, 0x72,
+    0x2f, 0x67, 0x70, 0x2c, 0x64, 0x6d, 0x29, 0x62, 0x6b, 0x27, 0x60, 0x69, 0x25, 0x5d, 0x66, 0x22, 0x5a, 0x64, 0x20, 0x59, 0x61,
+    0x1f, 0x57, 0x61, 0x1e, 0x56, 0x5f, 0x1c, 0x54, 0x5d, 0x19, 0x52, 0x5a, 0x18, 0x51, 0x5a, 0x17, 0x4f, 0x58, 0x17, 0x4e, 0x56,
+    0x16, 0x4c, 0x55, 0x15, 0x4a, 0x53, 0x14, 0x4a, 0x51, 0x13, 0x49, 0x51, 0x12, 0x47, 0x50, 0x11, 0x45, 0x4e, 0x11, 0x45, 0x4d,
+    0x10, 0x43, 0x4b, 0x0e, 0x41, 0x49, 0x0d, 0x40, 0x48, 0x0d, 0x3f, 0x47, 0x0d, 0x3f, 0x46, 0x0d, 0x3e, 0x45, 0x0d, 0x3d, 0x45,
+    0x0d, 0x3c, 0x44, 0x0d, 0x3b, 0x43, 0x0c, 0x3b, 0x42, 0x0c, 0x3a, 0x41, 0x0c, 0x39, 0x40, 0x0c, 0x38, 0x3f, 0x0c, 0x37, 0x3e,
+    0x0c, 0x36, 0x3d, 0x0b, 0x35, 0x3c, 0x0b, 0x34, 0x3b, 0x0b, 0x34, 0x3a, 0x0b, 0x33, 0x39, 0x0b, 0x32, 0x38, 0x0a, 0x31, 0x37,
+    0x0a, 0x30, 0x36, 0x0a, 0x2f, 0x35, 0x0a, 0x2e, 0x34, 0x09, 0x2a, 0x30, 0x19, 0x3f, 0x46, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x07, 0x5d, 0x69, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0xce, 0xdf, 0xe1, 0xff, 0xff, 0xff, 0x39, 0x7c, 0x86,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x2e, 0x75, 0x7f, 0xff, 0xff, 0xff, 0xd6, 0xe4, 0xe6, 0x0b, 0x5c, 0x68,
+    0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64,
+    0x74, 0xa2, 0xa9, 0xff, 0xff, 0xff, 0x90, 0xb4, 0xb9, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x57, 0x63, 0x06, 0x57, 0x62,
+    0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x56, 0x62, 0x06, 0x56, 0x61, 0x06, 0x56, 0x61, 0x06, 0x55, 0x61, 0x06, 0x55, 0x60,
+    0x06, 0x55, 0x60, 0x06, 0x54, 0x60, 0xc4, 0xd7, 0xd9, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xf6, 0xf9, 0xf9, 0x4a, 0x82, 0x8a,
+    0x05, 0x52, 0x5d, 0x05, 0x52, 0x5d, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5b, 0x05, 0x50, 0x5b, 0x7e, 0xa4, 0xaa,
+    0xff, 0xff, 0xff, 0x83, 0xa8, 0xad, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x58, 0x05, 0x4d, 0x57, 0x05, 0x4d, 0x57, 0x05, 0x4c, 0x56,
+    0x05, 0x4c, 0x56, 0x05, 0x4b, 0x55, 0x05, 0x4b, 0x55, 0x05, 0x4a, 0x54, 0x05, 0x49, 0x53, 0x05, 0x48, 0x52, 0x05, 0x48, 0x51,
+    0x05, 0x47, 0x51, 0x05, 0x47, 0x50, 0x05, 0x46, 0x4f, 0x05, 0x45, 0x4e, 0x08, 0x46, 0x4f, 0xcb, 0xd8, 0xda, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xcf, 0xda, 0xdc, 0x0a, 0x44, 0x4d, 0x04, 0x3f, 0x48, 0x04, 0x3e, 0x47, 0x04, 0x3e, 0x46, 0x04, 0x3d, 0x45,
+    0x04, 0x3c, 0x44, 0x04, 0x3b, 0x43, 0x04, 0x3a, 0x42, 0x04, 0x39, 0x41, 0x04, 0x38, 0x3f, 0x04, 0x37, 0x3e, 0x04, 0x36, 0x3d,
+    0x04, 0x35, 0x3c, 0x04, 0x33, 0x3b, 0x03, 0x33, 0x3a, 0x03, 0x32, 0x38, 0x16, 0x40, 0x46, 0xe3, 0xe8, 0xe9, 0xf2, 0xf4, 0xf5,
+    0x23, 0x47, 0x4d, 0x03, 0x2c, 0x32, 0x03, 0x2a, 0x30, 0x03, 0x29, 0x2f, 0x03, 0x28, 0x2e, 0x03, 0x27, 0x2c, 0x03, 0x25, 0x2a,
+    0x3c, 0x56, 0x5a, 0xff, 0xff, 0xff, 0xd1, 0xd7, 0xd8, 0x06, 0x23, 0x27, 0x02, 0x1e, 0x23, 0x02, 0x1d, 0x21, 0x02, 0x1b, 0x1f,
+    0x02, 0x1a, 0x1e, 0x02, 0x18, 0x1c, 0x02, 0x17, 0x1a, 0x02, 0x15, 0x18, 0x02, 0x13, 0x16, 0x01, 0x12, 0x14, 0x01, 0x10, 0x13,
+    0x01, 0x0e, 0x10, 0x01, 0x0c, 0x0e, 0x88, 0x95, 0x97, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xb4, 0xcd, 0xd0, 0x87, 0xad, 0xb3, 0x82, 0xaa, 0xb0, 0x7d, 0xa6, 0xad, 0x79, 0xa2, 0xa9,
+    0x74, 0x9f, 0xa6, 0x70, 0x9b, 0xa2, 0x6b, 0x98, 0x9f, 0x66, 0x94, 0x9c, 0x62, 0x92, 0x99, 0x5d, 0x8e, 0x96, 0x59, 0x8a, 0x92,
+    0x55, 0x87, 0x8f, 0x50, 0x83, 0x8b, 0x4d, 0x81, 0x89, 0x4a, 0x7e, 0x86, 0x46, 0x7a, 0x83, 0x42, 0x78, 0x80, 0x3e, 0x74, 0x7d,
+    0x3b, 0x72, 0x7a, 0x37, 0x6f, 0x77, 0x34, 0x6c, 0x75, 0x32, 0x69, 0x72, 0x2f, 0x67, 0x70, 0x2c, 0x64, 0x6d, 0x2a, 0x63, 0x6b,
+    0x26, 0x60, 0x69, 0x26, 0x5e, 0x67, 0x24, 0x5c, 0x65, 0x22, 0x5a, 0x63, 0x20, 0x58, 0x61, 0x1e, 0x56, 0x5f, 0x1c, 0x54, 0x5d,
+    0x1a, 0x53, 0x5b, 0x18, 0x51, 0x5a, 0x17, 0x4f, 0x58, 0x17, 0x4e, 0x56, 0x16, 0x4c, 0x55, 0x15, 0x4a, 0x53, 0x14, 0x4a, 0x51,
+    0x13, 0x49, 0x51, 0x12, 0x47, 0x50, 0x11, 0x45, 0x4e, 0x11, 0x45, 0x4d, 0x10, 0x43, 0x4b, 0x0e, 0x41, 0x49, 0x0d, 0x40, 0x48,
+    0x0d, 0x3f, 0x47, 0x0d, 0x3f, 0x46, 0x0d, 0x3e, 0x45, 0x0d, 0x3d, 0x45, 0x0d, 0x3c, 0x44, 0x0d, 0x3b, 0x43, 0x0c, 0x3b, 0x42,
+    0x0c, 0x3a, 0x41, 0x0c, 0x39, 0x40, 0x0c, 0x38, 0x3f, 0x0c, 0x37, 0x3e, 0x0c, 0x36, 0x3d, 0x0b, 0x35, 0x3c, 0x0b, 0x34, 0x3b,
+    0x0b, 0x34, 0x3a, 0x0b, 0x33, 0x39, 0x0b, 0x32, 0x38, 0x0a, 0x31, 0x37, 0x0a, 0x30, 0x36, 0x0a, 0x2f, 0x35, 0x0a, 0x2e, 0x34,
+    0x09, 0x2a, 0x30, 0x19, 0x3f, 0x46, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x07, 0x5d, 0x69, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0xce, 0xdf, 0xe1, 0xff, 0xff, 0xff, 0x39, 0x7c, 0x86, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x2e, 0x75, 0x7f, 0xff, 0xff, 0xff, 0xd6, 0xe4, 0xe6, 0x0b, 0x5c, 0x68, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65,
+    0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x74, 0xa2, 0xa9, 0xff, 0xff, 0xff, 0x90, 0xb4, 0xb9,
+    0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x57, 0x63, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x56, 0x62,
+    0x06, 0x56, 0x61, 0x06, 0x56, 0x61, 0x06, 0x55, 0x61, 0x06, 0x55, 0x60, 0x06, 0x55, 0x60, 0x06, 0x54, 0x60, 0xc4, 0xd7, 0xd9,
+    0xff, 0xff, 0xff, 0xdc, 0xe7, 0xe8, 0xff, 0xff, 0xff, 0xec, 0xf2, 0xf3, 0x3a, 0x77, 0x7f, 0x05, 0x52, 0x5d, 0x05, 0x51, 0x5c,
+    0x05, 0x51, 0x5c, 0x05, 0x51, 0x5b, 0x05, 0x50, 0x5b, 0x7e, 0xa4, 0xaa, 0xff, 0xff, 0xff, 0x83, 0xa8, 0xad, 0x05, 0x4e, 0x59,
+    0x05, 0x4e, 0x58, 0x05, 0x4d, 0x57, 0x05, 0x4d, 0x57, 0x05, 0x4c, 0x56, 0x05, 0x4c, 0x56, 0x05, 0x4b, 0x55, 0x05, 0x4b, 0x55,
+    0x05, 0x4a, 0x54, 0x05, 0x49, 0x53, 0x05, 0x48, 0x52, 0x05, 0x48, 0x51, 0x05, 0x47, 0x51, 0x05, 0x47, 0x50, 0x05, 0x46, 0x4f,
+    0x05, 0x45, 0x4e, 0x44, 0x73, 0x7a, 0xfe, 0xfe, 0xfe, 0xf2, 0xf5, 0xf6, 0xf5, 0xf8, 0xf8, 0xff, 0xff, 0xff, 0x4a, 0x75, 0x7c,
+    0x04, 0x3f, 0x48, 0x04, 0x3e, 0x47, 0x04, 0x3e, 0x46, 0x04, 0x3d, 0x45, 0x04, 0x3c, 0x44, 0x04, 0x3b, 0x43, 0x04, 0x3a, 0x42,
+    0x04, 0x39, 0x41, 0x04, 0x38, 0x3f, 0x04, 0x37, 0x3e, 0x04, 0x36, 0x3d, 0x04, 0x35, 0x3c, 0x04, 0x33, 0x3b, 0x03, 0x33, 0x3a,
+    0x03, 0x32, 0x38, 0x16, 0x40, 0x46, 0xe3, 0xe8, 0xe9, 0xf2, 0xf4, 0xf5, 0x23, 0x47, 0x4d, 0x03, 0x2c, 0x32, 0x03, 0x2a, 0x30,
+    0x03, 0x29, 0x2f, 0x03, 0x28, 0x2e, 0x03, 0x27, 0x2c, 0x03, 0x25, 0x2a, 0x1b, 0x39, 0x3d, 0xfe, 0xfe, 0xfe, 0xe5, 0xe8, 0xe9,
+    0x0a, 0x27, 0x2b, 0x02, 0x1e, 0x23, 0x02, 0x1d, 0x21, 0x02, 0x1b, 0x1f, 0x02, 0x1a, 0x1e, 0x02, 0x18, 0x1c, 0x02, 0x17, 0x1a,
+    0x02, 0x15, 0x18, 0x02, 0x13, 0x16, 0x01, 0x12, 0x14, 0x01, 0x10, 0x13, 0x01, 0x0e, 0x10, 0x00, 0x0b, 0x0d, 0x78, 0x87, 0x8a,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xa6, 0xc3, 0xc7,
+    0x87, 0xad, 0xb3, 0x82, 0xaa, 0xb0, 0x7d, 0xa6, 0xad, 0x79, 0xa2, 0xa9, 0x74, 0x9f, 0xa6, 0x70, 0x9b, 0xa2, 0x6c, 0x99, 0xa0,
+    0x67, 0x95, 0x9d, 0x62, 0x92, 0x99, 0x5e, 0x8e, 0x97, 0x5a, 0x8a, 0x93, 0x55, 0x87, 0x8f, 0x51, 0x84, 0x8c, 0x4d, 0x81, 0x89,
+    0x4a, 0x7e, 0x86, 0x45, 0x7a, 0x82, 0x42, 0x78, 0x80, 0x3f, 0x75, 0x7e, 0x3b, 0x72, 0x7a, 0x38, 0x6f, 0x78, 0x34, 0x6c, 0x75,
+    0x32, 0x69, 0x72, 0x2f, 0x67, 0x70, 0x2d, 0x65, 0x6d, 0x2a, 0x63, 0x6b, 0x26, 0x60, 0x69, 0x26, 0x5e, 0x67, 0x24, 0x5c, 0x65,
+    0x22, 0x5a, 0x63, 0x20, 0x58, 0x61, 0x1e, 0x56, 0x5f, 0x1c, 0x54, 0x5d, 0x1a, 0x53, 0x5b, 0x18, 0x51, 0x5a, 0x17, 0x4f, 0x58,
+    0x17, 0x4e, 0x56, 0x16, 0x4c, 0x55, 0x15, 0x4a, 0x53, 0x14, 0x4a, 0x51, 0x13, 0x49, 0x51, 0x12, 0x47, 0x50, 0x11, 0x45, 0x4e,
+    0x11, 0x45, 0x4d, 0x10, 0x43, 0x4b, 0x0e, 0x41, 0x49, 0x0d, 0x40, 0x48, 0x0d, 0x3f, 0x47, 0x0d, 0x3f, 0x46, 0x0d, 0x3e, 0x45,
+    0x0d, 0x3d, 0x45, 0x0d, 0x3c, 0x44, 0x0d, 0x3b, 0x43, 0x0c, 0x3b, 0x42, 0x0c, 0x3a, 0x41, 0x0c, 0x39, 0x40, 0x0c, 0x38, 0x3f,
+    0x0c, 0x37, 0x3e, 0x0c, 0x36, 0x3d, 0x0b, 0x35, 0x3c, 0x0b, 0x34, 0x3b, 0x0b, 0x34, 0x3a, 0x0b, 0x33, 0x39, 0x0b, 0x32, 0x38,
+    0x0a, 0x31, 0x37, 0x0a, 0x30, 0x36, 0x0a, 0x2f, 0x35, 0x0a, 0x2e, 0x34, 0x09, 0x2a, 0x30, 0x19, 0x3f, 0x46, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x07, 0x5d, 0x69, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0xce, 0xdf, 0xe1, 0xff, 0xff, 0xff,
+    0x39, 0x7c, 0x86, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x2e, 0x75, 0x7f, 0xff, 0xff, 0xff, 0xd6, 0xe4, 0xe6,
+    0x0b, 0x5c, 0x68, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x58, 0x64,
+    0x06, 0x58, 0x64, 0x74, 0xa2, 0xa9, 0xff, 0xff, 0xff, 0x90, 0xb4, 0xb9, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x57, 0x63,
+    0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x56, 0x62, 0x06, 0x56, 0x61, 0x06, 0x56, 0x61, 0x06, 0x55, 0x61,
+    0x06, 0x55, 0x60, 0x06, 0x55, 0x60, 0x06, 0x54, 0x60, 0xc5, 0xd7, 0xda, 0xff, 0xff, 0xff, 0x4b, 0x83, 0x8b, 0xb4, 0xcb, 0xce,
+    0xff, 0xff, 0xff, 0xe0, 0xe9, 0xeb, 0x2a, 0x6c, 0x75, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5b, 0x05, 0x50, 0x5b,
+    0x7e, 0xa4, 0xaa, 0xff, 0xff, 0xff, 0x83, 0xa8, 0xad, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x58, 0x05, 0x4d, 0x57, 0x05, 0x4d, 0x57,
+    0x05, 0x4c, 0x56, 0x05, 0x4c, 0x56, 0x05, 0x4b, 0x55, 0x05, 0x4b, 0x55, 0x05, 0x4a, 0x54, 0x05, 0x49, 0x53, 0x05, 0x48, 0x52,
+    0x05, 0x48, 0x51, 0x05, 0x47, 0x51, 0x05, 0x47, 0x50, 0x05, 0x46, 0x4f, 0x05, 0x45, 0x4e, 0xa3, 0xba, 0xbd, 0xff, 0xff, 0xff,
+    0x94, 0xaf, 0xb2, 0x89, 0xa6, 0xaa, 0xff, 0xff, 0xff, 0xae, 0xc2, 0xc4, 0x04, 0x3f, 0x48, 0x04, 0x3e, 0x47, 0x04, 0x3e, 0x46,
+    0x04, 0x3d, 0x45, 0x04, 0x3c, 0x44, 0x04, 0x3b, 0x43, 0x04, 0x3a, 0x42, 0x04, 0x39, 0x41, 0x04, 0x38, 0x3f, 0x04, 0x37, 0x3e,
+    0x04, 0x36, 0x3d, 0x04, 0x35, 0x3c, 0x04, 0x33, 0x3b, 0x03, 0x33, 0x3a, 0x03, 0x32, 0x38, 0x16, 0x40, 0x46, 0xe3, 0xe8, 0xe9,
+    0xf2, 0xf4, 0xf5, 0x23, 0x47, 0x4d, 0x03, 0x2c, 0x32, 0x03, 0x2a, 0x30, 0x03, 0x29, 0x2f, 0x03, 0x28, 0x2e, 0x03, 0x27, 0x2c,
+    0x03, 0x25, 0x2a, 0x3a, 0x54, 0x58, 0xff, 0xff, 0xff, 0xd1, 0xd7, 0xd8, 0x06, 0x23, 0x27, 0x02, 0x1e, 0x23, 0x02, 0x1d, 0x21,
+    0x02, 0x1b, 0x1f, 0x02, 0x1a, 0x1e, 0x02, 0x18, 0x1c, 0x02, 0x17, 0x1a, 0x02, 0x15, 0x18, 0x02, 0x13, 0x16, 0x01, 0x12, 0x14,
+    0x01, 0x10, 0x13, 0x01, 0x0e, 0x11, 0x00, 0x0b, 0x0d, 0x6b, 0x7c, 0x7f, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xa0, 0xbf, 0xc4, 0x88, 0xae, 0xb4, 0x83, 0xaa, 0xb0, 0x7e, 0xa7, 0xad,
+    0x7a, 0xa3, 0xaa, 0x75, 0xa0, 0xa6, 0x70, 0x9c, 0xa3, 0x6c, 0x99, 0xa0, 0x67, 0x95, 0x9d, 0x63, 0x92, 0x9a, 0x5e, 0x8e, 0x97,
+    0x5a, 0x8a, 0x93, 0x56, 0x88, 0x90, 0x51, 0x84, 0x8c, 0x4d, 0x81, 0x89, 0x4b, 0x7e, 0x87, 0x47, 0x7b, 0x84, 0x42, 0x78, 0x80,
+    0x3f, 0x75, 0x7e, 0x3b, 0x72, 0x7a, 0x38, 0x6f, 0x78, 0x34, 0x6c, 0x75, 0x32, 0x69, 0x72, 0x2f, 0x67, 0x70, 0x2d, 0x65, 0x6d,
+    0x2a, 0x63, 0x6b, 0x27, 0x60, 0x69, 0x26, 0x5e, 0x67, 0x24, 0x5c, 0x65, 0x22, 0x5a, 0x63, 0x20, 0x58, 0x61, 0x1e, 0x56, 0x5f,
+    0x1d, 0x55, 0x5d, 0x1a, 0x53, 0x5b, 0x18, 0x51, 0x5a, 0x17, 0x4f, 0x58, 0x17, 0x4e, 0x56, 0x16, 0x4c, 0x55, 0x15, 0x4a, 0x53,
+    0x14, 0x4a, 0x51, 0x13, 0x49, 0x51, 0x12, 0x47, 0x50, 0x11, 0x45, 0x4e, 0x11, 0x45, 0x4d, 0x10, 0x43, 0x4b, 0x0e, 0x41, 0x49,
+    0x0d, 0x40, 0x48, 0x0d, 0x3f, 0x47, 0x0d, 0x3f, 0x46, 0x0d, 0x3e, 0x45, 0x0d, 0x3d, 0x45, 0x0d, 0x3c, 0x44, 0x0d, 0x3b, 0x43,
+    0x0c, 0x3b, 0x42, 0x0c, 0x3a, 0x41, 0x0c, 0x39, 0x40, 0x0c, 0x38, 0x3f, 0x0c, 0x37, 0x3e, 0x0c, 0x36, 0x3d, 0x0b, 0x35, 0x3c,
+    0x0b, 0x34, 0x3b, 0x0b, 0x34, 0x3a, 0x0b, 0x33, 0x39, 0x0b, 0x32, 0x38, 0x0a, 0x31, 0x37, 0x0a, 0x30, 0x36, 0x0a, 0x2f, 0x35,
+    0x0a, 0x2e, 0x34, 0x09, 0x2a, 0x30, 0x19, 0x3f, 0x46, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x07, 0x5d, 0x69,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0xce, 0xdf, 0xe1, 0xff, 0xff, 0xff, 0x39, 0x7c, 0x86, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x2e, 0x75, 0x7f, 0xff, 0xff, 0xff, 0xd6, 0xe4, 0xe6, 0x0b, 0x5c, 0x68, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65,
+    0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x74, 0xa2, 0xa9, 0xff, 0xff, 0xff,
+    0x90, 0xb4, 0xb9, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x57, 0x63, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62,
+    0x06, 0x56, 0x62, 0x06, 0x56, 0x61, 0x06, 0x56, 0x61, 0x06, 0x55, 0x61, 0x06, 0x55, 0x60, 0x06, 0x55, 0x60, 0x06, 0x54, 0x60,
+    0xc5, 0xd7, 0xda, 0xff, 0xff, 0xff, 0x38, 0x75, 0x7e, 0x12, 0x5b, 0x66, 0xcb, 0xdb, 0xde, 0xff, 0xff, 0xff, 0xd2, 0xe0, 0xe2,
+    0x1d, 0x61, 0x6b, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5b, 0x05, 0x50, 0x5b, 0x7e, 0xa4, 0xaa, 0xff, 0xff, 0xff, 0x83, 0xa8, 0xad,
+    0x05, 0x4e, 0x59, 0x05, 0x4e, 0x58, 0x05, 0x4d, 0x57, 0x05, 0x4d, 0x57, 0x05, 0x4c, 0x56, 0x05, 0x4c, 0x56, 0x05, 0x4b, 0x55,
+    0x05, 0x4b, 0x55, 0x05, 0x4a, 0x54, 0x05, 0x49, 0x53, 0x05, 0x48, 0x52, 0x05, 0x48, 0x51, 0x05, 0x47, 0x51, 0x05, 0x47, 0x50,
+    0x05, 0x46, 0x4f, 0x21, 0x5a, 0x62, 0xf1, 0xf5, 0xf5, 0xfe, 0xfe, 0xfe, 0x32, 0x66, 0x6d, 0x26, 0x5c, 0x64, 0xf9, 0xfb, 0xfb,
+    0xf7, 0xf9, 0xf9, 0x29, 0x5c, 0x63, 0x04, 0x3e, 0x47, 0x04, 0x3e, 0x46, 0x04, 0x3d, 0x45, 0x04, 0x3c, 0x44, 0x04, 0x3b, 0x43,
+    0x04, 0x3a, 0x42, 0x04, 0x39, 0x41, 0x04, 0x38, 0x3f, 0x04, 0x37, 0x3e, 0x04, 0x36, 0x3d, 0x04, 0x35, 0x3c, 0x04, 0x33, 0x3b,
+    0x03, 0x33, 0x3a, 0x03, 0x32, 0x38, 0x16, 0x40, 0x46, 0xe3, 0xe8, 0xe9, 0xf2, 0xf4, 0xf5, 0x23, 0x47, 0x4d, 0x03, 0x2c, 0x32,
+    0x03, 0x2a, 0x30, 0x03, 0x29, 0x2f, 0x03, 0x28, 0x2e, 0x03, 0x27, 0x2c, 0x12, 0x32, 0x37, 0xb9, 0xc2, 0xc3, 0xff, 0xff, 0xff,
+    0x89, 0x97, 0x9a, 0x02, 0x20, 0x24, 0x02, 0x1e, 0x23, 0x02, 0x1d, 0x21, 0x02, 0x1b, 0x1f, 0x02, 0x1a, 0x1e, 0x02, 0x18, 0x1c,
+    0x02, 0x17, 0x1a, 0x02, 0x15, 0x18, 0x02, 0x13, 0x16, 0x01, 0x12, 0x14, 0x01, 0x10, 0x13, 0x01, 0x0e, 0x11, 0x00, 0x0a, 0x0c,
+    0x64, 0x75, 0x78, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xfa, 0xfc, 0xfc,
+    0x9d, 0xbc, 0xc1, 0x89, 0xae, 0xb4, 0x83, 0xaa, 0xb0, 0x7e, 0xa7, 0xad, 0x7a, 0xa3, 0xaa, 0x75, 0xa0, 0xa6, 0x70, 0x9c, 0xa3,
+    0x6c, 0x99, 0xa0, 0x68, 0x96, 0x9d, 0x63, 0x92, 0x9a, 0x5e, 0x8e, 0x97, 0x5a, 0x8b, 0x93, 0x56, 0x88, 0x90, 0x52, 0x84, 0x8c,
+    0x4d, 0x81, 0x89, 0x4b, 0x7e, 0x87, 0x47, 0x7b, 0x84, 0x42, 0x78, 0x80, 0x3f, 0x75, 0x7e, 0x3b, 0x72, 0x7a, 0x38, 0x6f, 0x78,
+    0x35, 0x6c, 0x76, 0x32, 0x69, 0x72, 0x30, 0x68, 0x70, 0x2d, 0x65, 0x6d, 0x2a, 0x63, 0x6b, 0x27, 0x60, 0x69, 0x26, 0x5e, 0x67,
+    0x24, 0x5c, 0x65, 0x22, 0x5a, 0x63, 0x20, 0x58, 0x61, 0x1e, 0x56, 0x5f, 0x1d, 0x55, 0x5d, 0x1a, 0x53, 0x5b, 0x18, 0x51, 0x5a,
+    0x17, 0x4f, 0x58, 0x17, 0x4e, 0x56, 0x16, 0x4c, 0x55, 0x15, 0x4a, 0x53, 0x15, 0x4a, 0x52, 0x13, 0x49, 0x51, 0x12, 0x47, 0x50,
+    0x11, 0x45, 0x4e, 0x11, 0x45, 0x4d, 0x10, 0x43, 0x4b, 0x0e, 0x41, 0x49, 0x0d, 0x40, 0x48, 0x0d, 0x3f, 0x47, 0x0d, 0x3f, 0x46,
+    0x0d, 0x3e, 0x45, 0x0d, 0x3d, 0x45, 0x0d, 0x3c, 0x44, 0x0d, 0x3b, 0x43, 0x0c, 0x3b, 0x42, 0x0c, 0x3a, 0x41, 0x0c, 0x39, 0x40,
+    0x0c, 0x38, 0x3f, 0x0c, 0x37, 0x3e, 0x0c, 0x36, 0x3d, 0x0b, 0x35, 0x3c, 0x0b, 0x34, 0x3b, 0x0b, 0x34, 0x3a, 0x0b, 0x33, 0x39,
+    0x0b, 0x32, 0x38, 0x0a, 0x31, 0x37, 0x0a, 0x30, 0x36, 0x0a, 0x2f, 0x35, 0x0a, 0x2e, 0x34, 0x09, 0x2a, 0x30, 0x19, 0x3f, 0x46,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x07, 0x5d, 0x69, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0xce, 0xdf, 0xe1,
+    0xff, 0xff, 0xff, 0x39, 0x7c, 0x86, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x2e, 0x75, 0x7f, 0xff, 0xff, 0xff,
+    0xd6, 0xe4, 0xe6, 0x0b, 0x5c, 0x68, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65,
+    0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x74, 0xa2, 0xa9, 0xff, 0xff, 0xff, 0x90, 0xb4, 0xb9, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63,
+    0x06, 0x57, 0x63, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x56, 0x62, 0x06, 0x56, 0x61, 0x06, 0x56, 0x61,
+    0x06, 0x55, 0x61, 0x06, 0x55, 0x60, 0x06, 0x55, 0x60, 0x06, 0x54, 0x60, 0xc5, 0xd7, 0xda, 0xff, 0xff, 0xff, 0x43, 0x7d, 0x85,
+    0x06, 0x53, 0x5e, 0x23, 0x68, 0x72, 0xd8, 0xe4, 0xe6, 0xff, 0xff, 0xff, 0xc3, 0xd5, 0xd8, 0x11, 0x59, 0x64, 0x05, 0x51, 0x5b,
+    0x05, 0x50, 0x5b, 0x7e, 0xa4, 0xaa, 0xff, 0xff, 0xff, 0x83, 0xa8, 0xad, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x58, 0x05, 0x4d, 0x57,
+    0x05, 0x4d, 0x57, 0x05, 0x4c, 0x56, 0x05, 0x4c, 0x56, 0x05, 0x4b, 0x55, 0x05, 0x4b, 0x55, 0x05, 0x4a, 0x54, 0x05, 0x49, 0x53,
+    0x05, 0x48, 0x52, 0x05, 0x48, 0x51, 0x05, 0x47, 0x51, 0x05, 0x47, 0x50, 0x05, 0x46, 0x4f, 0x78, 0x9a, 0x9f, 0xff, 0xff, 0xff,
+    0xc2, 0xd1, 0xd4, 0x05, 0x44, 0x4d, 0x04, 0x42, 0x4b, 0xb4, 0xc6, 0xc9, 0xff, 0xff, 0xff, 0x89, 0xa5, 0xa9, 0x04, 0x3e, 0x47,
+    0x04, 0x3e, 0x46, 0x04, 0x3d, 0x45, 0x04, 0x3c, 0x44, 0x04, 0x3b, 0x43, 0x04, 0x3a, 0x42, 0x04, 0x39, 0x41, 0x04, 0x38, 0x3f,
+    0x04, 0x37, 0x3e, 0x04, 0x36, 0x3d, 0x04, 0x35, 0x3c, 0x04, 0x33, 0x3b, 0x03, 0x33, 0x3a, 0x03, 0x32, 0x38, 0x16, 0x40, 0x46,
+    0xe3, 0xe8, 0xe9, 0xf2, 0xf4, 0xf5, 0x20, 0x45, 0x4a, 0x33, 0x55, 0x59, 0x47, 0x64, 0x68, 0x46, 0x62, 0x66, 0x55, 0x6e, 0x72,
+    0x84, 0x96, 0x98, 0xdb, 0xe0, 0xe1, 0xff, 0xff, 0xff, 0xcd, 0xd4, 0xd5, 0x18, 0x34, 0x39, 0x02, 0x20, 0x24, 0x02, 0x1e, 0x23,
+    0x02, 0x1d, 0x21, 0x02, 0x1b, 0x1f, 0x02, 0x1a, 0x1e, 0x02, 0x18, 0x1c, 0x02, 0x17, 0x1a, 0x02, 0x15, 0x18, 0x02, 0x13, 0x16,
+    0x01, 0x12, 0x14, 0x01, 0x10, 0x13, 0x01, 0x0e, 0x11, 0x00, 0x0a, 0x0c, 0x61, 0x73, 0x76, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xf9, 0xfb, 0xfb, 0x9c, 0xbc, 0xc1, 0x89, 0xae, 0xb4, 0x84, 0xab, 0xb1,
+    0x7f, 0xa7, 0xae, 0x7b, 0xa4, 0xaa, 0x76, 0xa0, 0xa7, 0x71, 0x9d, 0xa3, 0x6d, 0x99, 0xa0, 0x68, 0x96, 0x9d, 0x63, 0x92, 0x9a,
+    0x5f, 0x8f, 0x97, 0x5a, 0x8b, 0x93, 0x56, 0x88, 0x90, 0x52, 0x84, 0x8c, 0x4d, 0x81, 0x89, 0x4b, 0x7e, 0x87, 0x47, 0x7b, 0x84,
+    0x42, 0x78, 0x80, 0x3f, 0x75, 0x7e, 0x3c, 0x73, 0x7b, 0x38, 0x6f, 0x78, 0x35, 0x6c, 0x76, 0x33, 0x6a, 0x73, 0x30, 0x68, 0x70,
+    0x2d, 0x65, 0x6d, 0x2a, 0x63, 0x6b, 0x27, 0x60, 0x69, 0x25, 0x5d, 0x66, 0x24, 0x5c, 0x65, 0x22, 0x5a, 0x63, 0x20, 0x58, 0x61,
+    0x1e, 0x56, 0x5f, 0x1d, 0x55, 0x5d, 0x1a, 0x53, 0x5b, 0x18, 0x51, 0x5a, 0x17, 0x4f, 0x58, 0x17, 0x4e, 0x56, 0x16, 0x4c, 0x55,
+    0x15, 0x4a, 0x53, 0x15, 0x4a, 0x52, 0x13, 0x49, 0x51, 0x12, 0x47, 0x50, 0x11, 0x45, 0x4e, 0x11, 0x45, 0x4d, 0x10, 0x43, 0x4b,
+    0x0e, 0x41, 0x49, 0x0d, 0x40, 0x48, 0x0d, 0x3f, 0x47, 0x0d, 0x3f, 0x46, 0x0d, 0x3e, 0x45, 0x0d, 0x3d, 0x45, 0x0d, 0x3c, 0x44,
+    0x0d, 0x3b, 0x43, 0x0c, 0x3b, 0x42, 0x0c, 0x3a, 0x41, 0x0c, 0x39, 0x40, 0x0c, 0x38, 0x3f, 0x0c, 0x37, 0x3e, 0x0c, 0x36, 0x3d,
+    0x0b, 0x35, 0x3c, 0x0b, 0x34, 0x3b, 0x0b, 0x34, 0x3a, 0x0b, 0x33, 0x39, 0x0b, 0x32, 0x38, 0x0a, 0x31, 0x37, 0x0a, 0x30, 0x36,
+    0x0a, 0x2f, 0x35, 0x0a, 0x2e, 0x34, 0x09, 0x2a, 0x30, 0x19, 0x3f, 0x46, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x07, 0x5d, 0x69, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0xce, 0xdf, 0xe1, 0xff, 0xff, 0xff, 0x39, 0x7c, 0x86, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x2e, 0x75, 0x7f, 0xff, 0xff, 0xff, 0xd6, 0xe4, 0xe6, 0x0b, 0x5c, 0x68, 0x06, 0x59, 0x65,
+    0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x74, 0xa2, 0xa9,
+    0xff, 0xff, 0xff, 0x90, 0xb4, 0xb9, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x57, 0x63, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62,
+    0x06, 0x57, 0x62, 0x06, 0x56, 0x62, 0x06, 0x56, 0x61, 0x06, 0x56, 0x61, 0x06, 0x55, 0x61, 0x06, 0x55, 0x60, 0x06, 0x55, 0x60,
+    0x06, 0x54, 0x60, 0xc5, 0xd7, 0xda, 0xff, 0xff, 0xff, 0x42, 0x7c, 0x85, 0x06, 0x53, 0x5e, 0x05, 0x53, 0x5e, 0x2f, 0x6f, 0x78,
+    0xe4, 0xec, 0xed, 0xff, 0xff, 0xff, 0xb3, 0xca, 0xcd, 0x0b, 0x55, 0x5f, 0x05, 0x50, 0x5b, 0x7e, 0xa4, 0xaa, 0xff, 0xff, 0xff,
+    0x83, 0xa8, 0xad, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x58, 0x05, 0x4d, 0x57, 0x05, 0x4d, 0x57, 0x05, 0x4c, 0x56, 0x05, 0x4c, 0x56,
+    0x05, 0x4b, 0x55, 0x05, 0x4b, 0x55, 0x05, 0x4a, 0x54, 0x05, 0x49, 0x53, 0x05, 0x48, 0x52, 0x05, 0x48, 0x51, 0x05, 0x47, 0x51,
+    0x05, 0x47, 0x50, 0x0c, 0x4b, 0x54, 0xd6, 0xe0, 0xe2, 0xff, 0xff, 0xff, 0x61, 0x88, 0x8f, 0x04, 0x43, 0x4c, 0x04, 0x42, 0x4b,
+    0x4f, 0x7a, 0x80, 0xff, 0xff, 0xff, 0xe1, 0xe8, 0xe9, 0x14, 0x4a, 0x53, 0x04, 0x3e, 0x46, 0x04, 0x3d, 0x45, 0x04, 0x3c, 0x44,
+    0x04, 0x3b, 0x43, 0x04, 0x3a, 0x42, 0x04, 0x39, 0x41, 0x04, 0x38, 0x3f, 0x04, 0x37, 0x3e, 0x04, 0x36, 0x3d, 0x04, 0x35, 0x3c,
+    0x04, 0x33, 0x3b, 0x03, 0x33, 0x3a, 0x03, 0x32, 0x38, 0x16, 0x40, 0x46, 0xe3, 0xe8, 0xe9, 0xf2, 0xf4, 0xf5, 0x19, 0x3f, 0x45,
+    0x4e, 0x6b, 0x6f, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xfe, 0xfe, 0xfe, 0x9e, 0xab, 0xad,
+    0x17, 0x35, 0x39, 0x02, 0x21, 0x26, 0x02, 0x20, 0x24, 0x02, 0x1e, 0x23, 0x02, 0x1d, 0x21, 0x02, 0x1b, 0x1f, 0x02, 0x1a, 0x1e,
+    0x02, 0x18, 0x1c, 0x02, 0x17, 0x1a, 0x02, 0x15, 0x18, 0x02, 0x13, 0x16, 0x01, 0x12, 0x14, 0x01, 0x10, 0x13, 0x01, 0x0e, 0x11,
+    0x00, 0x0a, 0x0c, 0x61, 0x73, 0x76, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xfb, 0xfc, 0xfc, 0x9e, 0xbd, 0xc2, 0x89, 0xae, 0xb4, 0x84, 0xab, 0xb1, 0x7f, 0xa7, 0xae, 0x7b, 0xa4, 0xaa, 0x76, 0xa0, 0xa7,
+    0x71, 0x9d, 0xa3, 0x6d, 0x99, 0xa0, 0x68, 0x96, 0x9d, 0x64, 0x93, 0x9a, 0x5f, 0x8f, 0x97, 0x5a, 0x8b, 0x93, 0x56, 0x88, 0x90,
+    0x52, 0x84, 0x8c, 0x4d, 0x81, 0x89, 0x4b, 0x7e, 0x87, 0x47, 0x7b, 0x84, 0x42, 0x78, 0x80, 0x3f, 0x75, 0x7e, 0x3c, 0x73, 0x7b,
+    0x38, 0x6f, 0x78, 0x35, 0x6c, 0x76, 0x33, 0x6a, 0x73, 0x30, 0x68, 0x70, 0x2d, 0x65, 0x6d, 0x2a, 0x63, 0x6b, 0x27, 0x60, 0x69,
+    0x25, 0x5d, 0x66, 0x24, 0x5c, 0x65, 0x22, 0x5a, 0x63, 0x20, 0x58, 0x61, 0x1e, 0x56, 0x5f, 0x1d, 0x55, 0x5d, 0x1a, 0x53, 0x5b,
+    0x18, 0x51, 0x5a, 0x17, 0x4f, 0x58, 0x17, 0x4e, 0x56, 0x16, 0x4c, 0x55, 0x15, 0x4a, 0x53, 0x15, 0x4a, 0x52, 0x13, 0x49, 0x51,
+    0x12, 0x47, 0x50, 0x11, 0x45, 0x4e, 0x11, 0x45, 0x4d, 0x10, 0x43, 0x4b, 0x0e, 0x41, 0x49, 0x0d, 0x40, 0x48, 0x0d, 0x3f, 0x47,
+    0x0d, 0x3f, 0x46, 0x0d, 0x3e, 0x45, 0x0d, 0x3d, 0x45, 0x0d, 0x3c, 0x44, 0x0d, 0x3b, 0x43, 0x0c, 0x3b, 0x42, 0x0c, 0x3a, 0x41,
+    0x0c, 0x39, 0x40, 0x0c, 0x38, 0x3f, 0x0c, 0x37, 0x3e, 0x0c, 0x36, 0x3d, 0x0b, 0x35, 0x3c, 0x0b, 0x34, 0x3b, 0x0b, 0x34, 0x3a,
+    0x0b, 0x33, 0x39, 0x0b, 0x32, 0x38, 0x0a, 0x31, 0x37, 0x0a, 0x30, 0x36, 0x0a, 0x2f, 0x35, 0x0a, 0x2e, 0x34, 0x09, 0x2a, 0x30,
+    0x19, 0x3f, 0x46, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x07, 0x5d, 0x69, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0xce, 0xdf, 0xe1, 0xff, 0xff, 0xff, 0x39, 0x7c, 0x86, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x2e, 0x75, 0x7f,
+    0xff, 0xff, 0xff, 0xd6, 0xe4, 0xe6, 0x0b, 0x5c, 0x68, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65,
+    0x06, 0x59, 0x65, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x74, 0xa2, 0xa9, 0xff, 0xff, 0xff, 0x90, 0xb4, 0xb9, 0x06, 0x58, 0x63,
+    0x06, 0x58, 0x63, 0x06, 0x57, 0x63, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x56, 0x62, 0x06, 0x56, 0x61,
+    0x06, 0x56, 0x61, 0x06, 0x55, 0x61, 0x06, 0x55, 0x60, 0x06, 0x55, 0x60, 0x06, 0x54, 0x60, 0xc5, 0xd7, 0xda, 0xff, 0xff, 0xff,
+    0x42, 0x7c, 0x85, 0x06, 0x53, 0x5e, 0x05, 0x53, 0x5e, 0x05, 0x52, 0x5d, 0x3a, 0x77, 0x7f, 0xec, 0xf2, 0xf3, 0xff, 0xff, 0xff,
+    0x9f, 0xbc, 0xc0, 0x05, 0x50, 0x5b, 0x7b, 0xa2, 0xa8, 0xff, 0xff, 0xff, 0x83, 0xa8, 0xad, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x58,
+    0x05, 0x4d, 0x57, 0x05, 0x4d, 0x57, 0x05, 0x4c, 0x56, 0x05, 0x4c, 0x56, 0x05, 0x4b, 0x55, 0x05, 0x4b, 0x55, 0x05, 0x4a, 0x54,
+    0x05, 0x49, 0x53, 0x05, 0x48, 0x52, 0x05, 0x48, 0x51, 0x05, 0x47, 0x51, 0x05, 0x47, 0x50, 0x4f, 0x7c, 0x83, 0xff, 0xff, 0xff,
+    0xe2, 0xe9, 0xea, 0x14, 0x4e, 0x57, 0x04, 0x43, 0x4c, 0x04, 0x42, 0x4b, 0x0a, 0x45, 0x4e, 0xd2, 0xdd, 0xde, 0xff, 0xff, 0xff,
+    0x62, 0x87, 0x8c, 0x04, 0x3e, 0x46, 0x04, 0x3d, 0x45, 0x04, 0x3c, 0x44, 0x04, 0x3b, 0x43, 0x04, 0x3a, 0x42, 0x04, 0x39, 0x41,
+    0x04, 0x38, 0x3f, 0x04, 0x37, 0x3e, 0x04, 0x36, 0x3d, 0x04, 0x35, 0x3c, 0x04, 0x33, 0x3b, 0x03, 0x33, 0x3a, 0x03, 0x32, 0x38,
+    0x16, 0x40, 0x46, 0xe3, 0xe8, 0xe9, 0xf2, 0xf4, 0xf5, 0x22, 0x47, 0x4c, 0x03, 0x2c, 0x32, 0x6a, 0x81, 0x84, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xa4, 0xb1, 0xb3, 0x27, 0x44, 0x48, 0x03, 0x24, 0x29, 0x02, 0x23, 0x27, 0x02, 0x21, 0x26, 0x02, 0x20, 0x24,
+    0x02, 0x1e, 0x23, 0x02, 0x1d, 0x21, 0x02, 0x1b, 0x1f, 0x02, 0x1a, 0x1e, 0x02, 0x18, 0x1c, 0x02, 0x17, 0x1a, 0x02, 0x15, 0x18,
+    0x02, 0x13, 0x16, 0x01, 0x12, 0x14, 0x01, 0x10, 0x13, 0x01, 0x0e, 0x11, 0x00, 0x0b, 0x0c, 0x66, 0x77, 0x7a, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xa2, 0xc0, 0xc4, 0x89, 0xae, 0xb4,
+    0x84, 0xab, 0xb1, 0x7f, 0xa7, 0xae, 0x7b, 0xa4, 0xaa, 0x76, 0xa0, 0xa7, 0x71, 0x9d, 0xa3, 0x6d, 0x99, 0xa0, 0x68, 0x96, 0x9d,
+    0x63, 0x92, 0x9a, 0x5f, 0x8f, 0x97, 0x5a, 0x8b, 0x93, 0x56, 0x88, 0x90, 0x52, 0x84, 0x8c, 0x4d, 0x81, 0x89, 0x4b, 0x7e, 0x87,
+    0x47, 0x7b, 0x84, 0x42, 0x78, 0x80, 0x3f, 0x75, 0x7e, 0x3c, 0x73, 0x7b, 0x38, 0x6f, 0x78, 0x35, 0x6c, 0x76, 0x33, 0x6a, 0x73,
+    0x30, 0x68, 0x70, 0x2d, 0x65, 0x6d, 0x2a, 0x63, 0x6b, 0x27, 0x60, 0x69, 0x25, 0x5d, 0x66, 0x24, 0x5c, 0x65, 0x22, 0x5a, 0x63,
+    0x20, 0x58, 0x61, 0x1e, 0x56, 0x5f, 0x1d, 0x55, 0x5d, 0x1a, 0x53, 0x5b, 0x18, 0x51, 0x5a, 0x17, 0x4f, 0x58, 0x17, 0x4e, 0x56,
+    0x16, 0x4c, 0x55, 0x15, 0x4a, 0x53, 0x15, 0x4a, 0x52, 0x13, 0x49, 0x51, 0x12, 0x47, 0x50, 0x11, 0x45, 0x4e, 0x11, 0x45, 0x4d,
+    0x10, 0x43, 0x4b, 0x0e, 0x41, 0x49, 0x0d, 0x40, 0x48, 0x0d, 0x3f, 0x47, 0x0d, 0x3f, 0x46, 0x0d, 0x3e, 0x45, 0x0d, 0x3d, 0x45,
+    0x0d, 0x3c, 0x44, 0x0d, 0x3b, 0x43, 0x0c, 0x3b, 0x42, 0x0c, 0x3a, 0x41, 0x0c, 0x39, 0x40, 0x0c, 0x38, 0x3f, 0x0c, 0x37, 0x3e,
+    0x0c, 0x36, 0x3d, 0x0b, 0x35, 0x3c, 0x0b, 0x34, 0x3b, 0x0b, 0x34, 0x3a, 0x0b, 0x33, 0x39, 0x0b, 0x32, 0x38, 0x0a, 0x31, 0x37,
+    0x0a, 0x30, 0x36, 0x0a, 0x2f, 0x35, 0x0a, 0x2e, 0x34, 0x09, 0x2a, 0x30, 0x19, 0x3f, 0x46, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x07, 0x5d, 0x69, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0xce, 0xdf, 0xe1, 0xff, 0xff, 0xff, 0x39, 0x7c, 0x86,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x2f, 0x75, 0x7f, 0xff, 0xff, 0xff, 0xd5, 0xe3, 0xe5, 0x0a, 0x5c, 0x67,
+    0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64,
+    0x72, 0xa1, 0xa7, 0xff, 0xff, 0xff, 0x91, 0xb5, 0xba, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x57, 0x63, 0x06, 0x57, 0x62,
+    0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x56, 0x62, 0x06, 0x56, 0x61, 0x06, 0x56, 0x61, 0x06, 0x55, 0x61, 0x06, 0x55, 0x60,
+    0x06, 0x55, 0x60, 0x06, 0x54, 0x60, 0xc5, 0xd7, 0xda, 0xff, 0xff, 0xff, 0x42, 0x7c, 0x85, 0x06, 0x53, 0x5e, 0x05, 0x53, 0x5e,
+    0x05, 0x52, 0x5d, 0x05, 0x52, 0x5d, 0x47, 0x7f, 0x87, 0xf5, 0xf8, 0xf9, 0xff, 0xff, 0xff, 0x7b, 0xa2, 0xa8, 0x78, 0xa0, 0xa6,
+    0xff, 0xff, 0xff, 0x83, 0xa8, 0xad, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x58, 0x05, 0x4d, 0x57, 0x05, 0x4d, 0x57, 0x05, 0x4c, 0x56,
+    0x05, 0x4c, 0x56, 0x05, 0x4b, 0x55, 0x05, 0x4b, 0x55, 0x05, 0x4a, 0x54, 0x05, 0x49, 0x53, 0x05, 0x48, 0x52, 0x05, 0x48, 0x51,
+    0x05, 0x47, 0x51, 0x05, 0x47, 0x50, 0xaf, 0xc4, 0xc6, 0xff, 0xff, 0xff, 0x88, 0xa6, 0xab, 0x05, 0x43, 0x4d, 0x04, 0x43, 0x4c,
+    0x04, 0x42, 0x4b, 0x04, 0x41, 0x4a, 0x6e, 0x91, 0x96, 0xff, 0xff, 0xff, 0xc5, 0xd2, 0xd4, 0x06, 0x40, 0x47, 0x04, 0x3d, 0x45,
+    0x04, 0x3c, 0x44, 0x04, 0x3b, 0x43, 0x04, 0x3a, 0x42, 0x04, 0x39, 0x41, 0x04, 0x38, 0x3f, 0x04, 0x37, 0x3e, 0x04, 0x36, 0x3d,
+    0x04, 0x35, 0x3c, 0x04, 0x33, 0x3b, 0x03, 0x33, 0x3a, 0x03, 0x32, 0x38, 0x16, 0x40, 0x46, 0xe3, 0xe8, 0xe9, 0xf2, 0xf4, 0xf5,
+    0x23, 0x47, 0x4d, 0x03, 0x2c, 0x32, 0x03, 0x2a, 0x30, 0x8e, 0x9f, 0xa2, 0xff, 0xff, 0xff, 0xdb, 0xe1, 0xe1, 0x26, 0x43, 0x47,
+    0x03, 0x24, 0x29, 0x02, 0x23, 0x27, 0x02, 0x21, 0x26, 0x02, 0x20, 0x24, 0x02, 0x1e, 0x23, 0x02, 0x1d, 0x21, 0x02, 0x1b, 0x1f,
+    0x02, 0x1a, 0x1e, 0x02, 0x18, 0x1c, 0x02, 0x17, 0x1a, 0x02, 0x15, 0x18, 0x02, 0x13, 0x16, 0x01, 0x12, 0x14, 0x01, 0x10, 0x13,
+    0x01, 0x0e, 0x11, 0x00, 0x0b, 0x0d, 0x6e, 0x7f, 0x82, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xa9, 0xc5, 0xc9, 0x89, 0xae, 0xb4, 0x84, 0xab, 0xb1, 0x7f, 0xa7, 0xae, 0x7b, 0xa4, 0xaa,
+    0x76, 0xa0, 0xa7, 0x71, 0x9d, 0xa3, 0x6d, 0x99, 0xa0, 0x68, 0x96, 0x9d, 0x63, 0x92, 0x9a, 0x5e, 0x8e, 0x97, 0x5a, 0x8b, 0x93,
+    0x56, 0x88, 0x90, 0x52, 0x84, 0x8c, 0x4d, 0x81, 0x89, 0x4b, 0x7e, 0x87, 0x47, 0x7b, 0x84, 0x42, 0x78, 0x80, 0x3f, 0x75, 0x7e,
+    0x3b, 0x72, 0x7a, 0x38, 0x6f, 0x78, 0x35, 0x6c, 0x76, 0x33, 0x6a, 0x73, 0x30, 0x68, 0x70, 0x2d, 0x65, 0x6d, 0x2a, 0x63, 0x6b,
+    0x27, 0x60, 0x69, 0x25, 0x5d, 0x66, 0x24, 0x5c, 0x65, 0x22, 0x5a, 0x63, 0x20, 0x58, 0x61, 0x1e, 0x56, 0x5f, 0x1d, 0x55, 0x5d,
+    0x1a, 0x53, 0x5b, 0x18, 0x51, 0x5a, 0x17, 0x4f, 0x58, 0x17, 0x4e, 0x56, 0x16, 0x4c, 0x55, 0x15, 0x4a, 0x53, 0x15, 0x4a, 0x52,
+    0x13, 0x49, 0x51, 0x12, 0x47, 0x50, 0x11, 0x45, 0x4e, 0x11, 0x45, 0x4d, 0x10, 0x43, 0x4b, 0x0e, 0x41, 0x49, 0x0d, 0x40, 0x48,
+    0x0d, 0x3f, 0x47, 0x0d, 0x3f, 0x46, 0x0d, 0x3e, 0x45, 0x0d, 0x3d, 0x45, 0x0d, 0x3c, 0x44, 0x0d, 0x3b, 0x43, 0x0c, 0x3b, 0x42,
+    0x0c, 0x3a, 0x41, 0x0c, 0x39, 0x40, 0x0c, 0x38, 0x3f, 0x0c, 0x37, 0x3e, 0x0c, 0x36, 0x3d, 0x0b, 0x35, 0x3c, 0x0b, 0x34, 0x3b,
+    0x0b, 0x34, 0x3a, 0x0b, 0x33, 0x39, 0x0b, 0x32, 0x38, 0x0a, 0x31, 0x37, 0x0a, 0x30, 0x36, 0x0a, 0x2f, 0x35, 0x0a, 0x2e, 0x34,
+    0x09, 0x2a, 0x30, 0x19, 0x3f, 0x46, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x07, 0x5d, 0x69, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0xce, 0xdf, 0xe1, 0xff, 0xff, 0xff, 0x39, 0x7c, 0x86, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x29, 0x71, 0x7c, 0xff, 0xff, 0xff, 0xe0, 0xea, 0xec, 0x0a, 0x5c, 0x67, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65,
+    0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x7b, 0xa7, 0xad, 0xff, 0xff, 0xff, 0x8d, 0xb2, 0xb7,
+    0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x57, 0x63, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x56, 0x62,
+    0x06, 0x56, 0x61, 0x06, 0x56, 0x61, 0x06, 0x55, 0x61, 0x06, 0x55, 0x60, 0x06, 0x55, 0x60, 0x06, 0x54, 0x60, 0xc5, 0xd7, 0xda,
+    0xff, 0xff, 0xff, 0x42, 0x7c, 0x85, 0x06, 0x53, 0x5e, 0x05, 0x53, 0x5e, 0x05, 0x52, 0x5d, 0x05, 0x52, 0x5d, 0x05, 0x51, 0x5c,
+    0x54, 0x88, 0x90, 0xfa, 0xfc, 0xfc, 0xff, 0xff, 0xff, 0xdb, 0xe6, 0xe7, 0xff, 0xff, 0xff, 0x83, 0xa8, 0xad, 0x05, 0x4e, 0x59,
+    0x05, 0x4e, 0x58, 0x05, 0x4d, 0x57, 0x05, 0x4d, 0x57, 0x05, 0x4c, 0x56, 0x05, 0x4c, 0x56, 0x05, 0x4b, 0x55, 0x05, 0x4b, 0x55,
+    0x05, 0x4a, 0x54, 0x05, 0x49, 0x53, 0x05, 0x48, 0x52, 0x05, 0x48, 0x51, 0x05, 0x47, 0x51, 0x2a, 0x62, 0x6a, 0xf6, 0xf8, 0xf9,
+    0xf9, 0xfb, 0xfb, 0x2c, 0x61, 0x69, 0x05, 0x43, 0x4d, 0x04, 0x43, 0x4c, 0x04, 0x42, 0x4b, 0x04, 0x41, 0x4a, 0x1b, 0x51, 0x59,
+    0xe9, 0xee, 0xef, 0xfd, 0xfd, 0xfe, 0x3e, 0x6b, 0x71, 0x04, 0x3d, 0x45, 0x04, 0x3c, 0x44, 0x04, 0x3b, 0x43, 0x04, 0x3a, 0x42,
+    0x04, 0x39, 0x41, 0x04, 0x38, 0x3f, 0x04, 0x37, 0x3e, 0x04, 0x36, 0x3d, 0x04, 0x35, 0x3c, 0x04, 0x33, 0x3b, 0x03, 0x33, 0x3a,
+    0x03, 0x32, 0x38, 0x16, 0x40, 0x46, 0xe3, 0xe8, 0xe9, 0xf2, 0xf4, 0xf5, 0x23, 0x47, 0x4d, 0x03, 0x2c, 0x32, 0x03, 0x2a, 0x30,
+    0x07, 0x2c, 0x32, 0xb1, 0xbc, 0xbe, 0xff, 0xff, 0xff, 0xd5, 0xdb, 0xdc, 0x1d, 0x3a, 0x3f, 0x02, 0x23, 0x27, 0x02, 0x21, 0x26,
+    0x02, 0x20, 0x24, 0x02, 0x1e, 0x23, 0x02, 0x1d, 0x21, 0x02, 0x1b, 0x1f, 0x02, 0x1a, 0x1e, 0x02, 0x18, 0x1c, 0x02, 0x17, 0x1a,
+    0x02, 0x15, 0x18, 0x02, 0x13, 0x16, 0x01, 0x12, 0x14, 0x01, 0x10, 0x13, 0x01, 0x0e, 0x10, 0x00, 0x0b, 0x0d, 0x7d, 0x8c, 0x8f,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xb8, 0xcf, 0xd3,
+    0x88, 0xae, 0xb4, 0x83, 0xaa, 0xb0, 0x7e, 0xa7, 0xad, 0x7a, 0xa3, 0xaa, 0x75, 0xa0, 0xa6, 0x70, 0x9c, 0xa3, 0x6c, 0x99, 0xa0,
+    0x67, 0x95, 0x9d, 0x63, 0x92, 0x9a, 0x5e, 0x8e, 0x97, 0x5a, 0x8b, 0x93, 0x56, 0x88, 0x90, 0x51, 0x84, 0x8c, 0x4d, 0x81, 0x89,
+    0x4b, 0x7e, 0x87, 0x47, 0x7b, 0x84, 0x42, 0x78, 0x80, 0x3f, 0x75, 0x7e, 0x3b, 0x72, 0x7a, 0x38, 0x6f, 0x78, 0x35, 0x6c, 0x76,
+    0x32, 0x69, 0x72, 0x30, 0x68, 0x70, 0x2d, 0x65, 0x6d, 0x2a, 0x63, 0x6b, 0x27, 0x60, 0x69, 0x26, 0x5e, 0x67, 0x24, 0x5c, 0x65,
+    0x22, 0x5a, 0x63, 0x20, 0x58, 0x61, 0x1e, 0x56, 0x5f, 0x1d, 0x55, 0x5d, 0x1a, 0x53, 0x5b, 0x18, 0x51, 0x5a, 0x17, 0x4f, 0x58,
+    0x17, 0x4e, 0x56, 0x16, 0x4c, 0x55, 0x15, 0x4a, 0x53, 0x14, 0x4a, 0x51, 0x13, 0x49, 0x51, 0x12, 0x47, 0x50, 0x11, 0x45, 0x4e,
+    0x11, 0x45, 0x4d, 0x10, 0x43, 0x4b, 0x0e, 0x41, 0x49, 0x0d, 0x40, 0x48, 0x0d, 0x3f, 0x47, 0x0d, 0x3f, 0x46, 0x0d, 0x3e, 0x45,
+    0x0d, 0x3d, 0x45, 0x0d, 0x3c, 0x44, 0x0d, 0x3b, 0x43, 0x0c, 0x3b, 0x42, 0x0c, 0x3a, 0x41, 0x0c, 0x39, 0x40, 0x0c, 0x38, 0x3f,
+    0x0c, 0x37, 0x3e, 0x0c, 0x36, 0x3d, 0x0b, 0x35, 0x3c, 0x0b, 0x34, 0x3b, 0x0b, 0x34, 0x3a, 0x0b, 0x33, 0x39, 0x0b, 0x32, 0x38,
+    0x0a, 0x31, 0x37, 0x0a, 0x30, 0x36, 0x0a, 0x2f, 0x35, 0x0a, 0x2e, 0x34, 0x09, 0x2a, 0x30, 0x19, 0x3f, 0x46, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x07, 0x5d, 0x69, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0xce, 0xdf, 0xe1, 0xff, 0xff, 0xff,
+    0x39, 0x7c, 0x86, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x12, 0x62, 0x6d, 0xe1, 0xeb, 0xec, 0xfc, 0xfd, 0xfd,
+    0x47, 0x85, 0x8d, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x58, 0x64,
+    0x07, 0x59, 0x65, 0xc7, 0xda, 0xdc, 0xff, 0xff, 0xff, 0x5f, 0x94, 0x9b, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x57, 0x63,
+    0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x56, 0x62, 0x06, 0x56, 0x61, 0x06, 0x56, 0x61, 0x06, 0x55, 0x61,
+    0x06, 0x55, 0x60, 0x06, 0x55, 0x60, 0x06, 0x54, 0x60, 0xc5, 0xd7, 0xda, 0xff, 0xff, 0xff, 0x42, 0x7c, 0x85, 0x06, 0x53, 0x5e,
+    0x05, 0x53, 0x5e, 0x05, 0x52, 0x5d, 0x05, 0x52, 0x5d, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5c, 0x63, 0x93, 0x99, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x83, 0xa8, 0xad, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x58, 0x05, 0x4d, 0x57, 0x05, 0x4d, 0x57,
+    0x05, 0x4c, 0x56, 0x05, 0x4c, 0x56, 0x05, 0x4b, 0x55, 0x05, 0x4b, 0x55, 0x05, 0x4a, 0x54, 0x05, 0x49, 0x53, 0x05, 0x48, 0x52,
+    0x05, 0x48, 0x51, 0x05, 0x47, 0x51, 0x83, 0xa4, 0xa9, 0xff, 0xff, 0xff, 0xb0, 0xc4, 0xc7, 0x05, 0x44, 0x4d, 0x05, 0x43, 0x4d,
+    0x04, 0x43, 0x4c, 0x04, 0x42, 0x4b, 0x04, 0x41, 0x4a, 0x04, 0x40, 0x49, 0x92, 0xab, 0xaf, 0xff, 0xff, 0xff, 0xa1, 0xb7, 0xba,
+    0x04, 0x3d, 0x45, 0x04, 0x3c, 0x44, 0x04, 0x3b, 0x43, 0x04, 0x3a, 0x42, 0x04, 0x39, 0x41, 0x04, 0x38, 0x3f, 0x04, 0x37, 0x3e,
+    0x04, 0x36, 0x3d, 0x04, 0x35, 0x3c, 0x04, 0x33, 0x3b, 0x03, 0x33, 0x3a, 0x03, 0x32, 0x38, 0x16, 0x40, 0x46, 0xe3, 0xe8, 0xe9,
+    0xf2, 0xf4, 0xf5, 0x23, 0x47, 0x4d, 0x03, 0x2c, 0x32, 0x03, 0x2a, 0x30, 0x03, 0x29, 0x2f, 0x15, 0x37, 0x3d, 0xcf, 0xd5, 0xd6,
+    0xff, 0xff, 0xff, 0xc1, 0xc9, 0xca, 0x0d, 0x2c, 0x30, 0x02, 0x21, 0x26, 0x02, 0x20, 0x24, 0x02, 0x1e, 0x23, 0x02, 0x1d, 0x21,
+    0x02, 0x1b, 0x1f, 0x02, 0x1a, 0x1e, 0x02, 0x18, 0x1c, 0x02, 0x17, 0x1a, 0x02, 0x15, 0x18, 0x02, 0x13, 0x16, 0x01, 0x12, 0x14,
+    0x01, 0x10, 0x13, 0x01, 0x0e, 0x10, 0x01, 0x0c, 0x0e, 0x90, 0x9d, 0x9f, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xcc, 0xdd, 0xdf, 0x88, 0xae, 0xb4, 0x83, 0xaa, 0xb0, 0x7e, 0xa7, 0xad,
+    0x7a, 0xa3, 0xaa, 0x75, 0xa0, 0xa6, 0x70, 0x9c, 0xa3, 0x6c, 0x99, 0xa0, 0x67, 0x95, 0x9d, 0x63, 0x92, 0x9a, 0x5e, 0x8e, 0x97,
+    0x5a, 0x8a, 0x93, 0x56, 0x88, 0x90, 0x51, 0x84, 0x8c, 0x4d, 0x81, 0x89, 0x4b, 0x7e, 0x87, 0x47, 0x7b, 0x84, 0x42, 0x78, 0x80,
+    0x3f, 0x75, 0x7e, 0x3b, 0x72, 0x7a, 0x38, 0x6f, 0x78, 0x34, 0x6c, 0x75, 0x32, 0x69, 0x72, 0x2f, 0x67, 0x70, 0x2d, 0x65, 0x6d,
+    0x2a, 0x63, 0x6b, 0x26, 0x60, 0x69, 0x26, 0x5e, 0x67, 0x24, 0x5c, 0x65, 0x22, 0x5a, 0x63, 0x20, 0x58, 0x61, 0x1e, 0x56, 0x5f,
+    0x1c, 0x54, 0x5d, 0x1a, 0x53, 0x5b, 0x18, 0x51, 0x5a, 0x17, 0x4f, 0x58, 0x17, 0x4e, 0x56, 0x16, 0x4c, 0x55, 0x15, 0x4a, 0x53,
+    0x14, 0x4a, 0x51, 0x13, 0x49, 0x51, 0x12, 0x47, 0x50, 0x11, 0x45, 0x4e, 0x11, 0x45, 0x4d, 0x10, 0x43, 0x4b, 0x0e, 0x41, 0x49,
+    0x0d, 0x40, 0x48, 0x0d, 0x3f, 0x47, 0x0d, 0x3f, 0x46, 0x0d, 0x3e, 0x45, 0x0d, 0x3d, 0x45, 0x0d, 0x3c, 0x44, 0x0d, 0x3b, 0x43,
+    0x0c, 0x3b, 0x42, 0x0c, 0x3a, 0x41, 0x0c, 0x39, 0x40, 0x0c, 0x38, 0x3f, 0x0c, 0x37, 0x3e, 0x0c, 0x36, 0x3d, 0x0b, 0x35, 0x3c,
+    0x0b, 0x34, 0x3b, 0x0b, 0x34, 0x3a, 0x0b, 0x33, 0x39, 0x0b, 0x32, 0x38, 0x0a, 0x31, 0x37, 0x0a, 0x30, 0x36, 0x0a, 0x2f, 0x35,
+    0x0a, 0x2e, 0x34, 0x09, 0x2a, 0x30, 0x19, 0x3f, 0x46, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x07, 0x5d, 0x69,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0xce, 0xdf, 0xe1, 0xff, 0xff, 0xff, 0x2e, 0x75, 0x7f, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x82, 0xac, 0xb2, 0xff, 0xff, 0xff, 0xd9, 0xe6, 0xe7, 0x35, 0x78, 0x82, 0x06, 0x59, 0x65,
+    0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x0f, 0x5e, 0x69, 0x93, 0xb6, 0xbc, 0xff, 0xff, 0xff, 0xdf, 0xe9, 0xeb,
+    0x16, 0x62, 0x6d, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x57, 0x63, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62,
+    0x06, 0x56, 0x62, 0x06, 0x56, 0x61, 0x06, 0x56, 0x61, 0x06, 0x55, 0x61, 0x06, 0x55, 0x60, 0x06, 0x55, 0x60, 0x06, 0x54, 0x60,
+    0xc5, 0xd7, 0xda, 0xff, 0xff, 0xff, 0x42, 0x7c, 0x85, 0x06, 0x53, 0x5e, 0x05, 0x53, 0x5e, 0x05, 0x52, 0x5d, 0x05, 0x52, 0x5d,
+    0x05, 0x51, 0x5c, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5b, 0x74, 0x9e, 0xa4, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x83, 0xa8, 0xad,
+    0x05, 0x4e, 0x59, 0x05, 0x4e, 0x58, 0x05, 0x4d, 0x57, 0x05, 0x4d, 0x57, 0x05, 0x4c, 0x56, 0x05, 0x4c, 0x56, 0x05, 0x4b, 0x55,
+    0x05, 0x4b, 0x55, 0x05, 0x4a, 0x54, 0x05, 0x49, 0x53, 0x05, 0x48, 0x52, 0x05, 0x48, 0x51, 0x10, 0x4f, 0x59, 0xdf, 0xe7, 0xe8,
+    0xff, 0xff, 0xff, 0x4e, 0x7b, 0x81, 0x05, 0x44, 0x4d, 0x05, 0x43, 0x4d, 0x04, 0x43, 0x4c, 0x04, 0x42, 0x4b, 0x04, 0x41, 0x4a,
+    0x04, 0x40, 0x49, 0x31, 0x62, 0x69, 0xf9, 0xfa, 0xfb, 0xf1, 0xf4, 0xf5, 0x22, 0x54, 0x5b, 0x04, 0x3c, 0x44, 0x04, 0x3b, 0x43,
+    0x04, 0x3a, 0x42, 0x04, 0x39, 0x41, 0x04, 0x38, 0x3f, 0x04, 0x37, 0x3e, 0x04, 0x36, 0x3d, 0x04, 0x35, 0x3c, 0x04, 0x33, 0x3b,
+    0x03, 0x33, 0x3a, 0x03, 0x32, 0x38, 0x16, 0x40, 0x46, 0xe3, 0xe8, 0xe9, 0xf2, 0xf4, 0xf5, 0x23, 0x47, 0x4d, 0x03, 0x2c, 0x32,
+    0x03, 0x2a, 0x30, 0x03, 0x29, 0x2f, 0x03, 0x28, 0x2e, 0x2c, 0x4a, 0x4e, 0xe6, 0xea, 0xea, 0xff, 0xff, 0xff, 0xa8, 0xb3, 0xb4,
+    0x04, 0x23, 0x28, 0x02, 0x20, 0x24, 0x02, 0x1e, 0x23, 0x02, 0x1d, 0x21, 0x02, 0x1b, 0x1f, 0x02, 0x1a, 0x1e, 0x02, 0x18, 0x1c,
+    0x02, 0x17, 0x1a, 0x02, 0x15, 0x18, 0x02, 0x13, 0x16, 0x01, 0x12, 0x14, 0x01, 0x10, 0x13, 0x01, 0x0e, 0x10, 0x01, 0x0d, 0x0f,
+    0xa6, 0xb1, 0xb2, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xe1, 0xeb, 0xec, 0x87, 0xad, 0xb3, 0x82, 0xaa, 0xb0, 0x7d, 0xa6, 0xad, 0x79, 0xa2, 0xa9, 0x74, 0x9f, 0xa6, 0x70, 0x9b, 0xa2,
+    0x6b, 0x98, 0x9f, 0x66, 0x94, 0x9c, 0x62, 0x92, 0x99, 0x5d, 0x8e, 0x96, 0x5a, 0x8a, 0x93, 0x55, 0x87, 0x8f, 0x51, 0x84, 0x8c,
+    0x4d, 0x81, 0x89, 0x4a, 0x7e, 0x86, 0x46, 0x7a, 0x83, 0x42, 0x78, 0x80, 0x3e, 0x74, 0x7d, 0x3b, 0x72, 0x7a, 0x37, 0x6f, 0x77,
+    0x34, 0x6c, 0x75, 0x32, 0x69, 0x72, 0x2f, 0x67, 0x70, 0x2c, 0x64, 0x6d, 0x2a, 0x63, 0x6b, 0x26, 0x60, 0x69, 0x26, 0x5e, 0x67,
+    0x24, 0x5c, 0x65, 0x22, 0x5a, 0x63, 0x20, 0x58, 0x61, 0x1e, 0x56, 0x5f, 0x1c, 0x54, 0x5d, 0x1a, 0x53, 0x5b, 0x18, 0x51, 0x5a,
+    0x17, 0x4f, 0x58, 0x17, 0x4e, 0x56, 0x16, 0x4c, 0x55, 0x15, 0x4a, 0x53, 0x14, 0x4a, 0x51, 0x13, 0x49, 0x51, 0x12, 0x47, 0x50,
+    0x11, 0x45, 0x4e, 0x11, 0x45, 0x4d, 0x10, 0x43, 0x4b, 0x0e, 0x41, 0x49, 0x0d, 0x40, 0x48, 0x0d, 0x3f, 0x47, 0x0d, 0x3f, 0x46,
+    0x0d, 0x3e, 0x45, 0x0d, 0x3d, 0x45, 0x0d, 0x3c, 0x44, 0x0d, 0x3b, 0x43, 0x0c, 0x3b, 0x42, 0x0c, 0x3a, 0x41, 0x0c, 0x39, 0x40,
+    0x0c, 0x38, 0x3f, 0x0c, 0x37, 0x3e, 0x0c, 0x36, 0x3d, 0x0b, 0x35, 0x3c, 0x0b, 0x34, 0x3b, 0x0b, 0x34, 0x3a, 0x0b, 0x33, 0x39,
+    0x0b, 0x32, 0x38, 0x0a, 0x31, 0x37, 0x0a, 0x30, 0x36, 0x0a, 0x2f, 0x35, 0x0a, 0x2e, 0x34, 0x09, 0x2a, 0x30, 0x19, 0x3f, 0x46,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x07, 0x5d, 0x69, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0xcd, 0xde, 0xe1,
+    0xff, 0xff, 0xff, 0x97, 0xbb, 0xc0, 0x7b, 0xa8, 0xaf, 0x7d, 0xa9, 0xb0, 0x7d, 0xa9, 0xb0, 0x7e, 0xaa, 0xb0, 0x77, 0xa6, 0xac,
+    0x1a, 0x68, 0x73, 0x06, 0x5b, 0x67, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x11, 0x61, 0x6d,
+    0xc1, 0xd5, 0xd8, 0xff, 0xff, 0xff, 0xf4, 0xf8, 0xf8, 0xa7, 0xc4, 0xc9, 0x80, 0xaa, 0xb0, 0x7b, 0xa7, 0xad, 0x92, 0xb6, 0xbb,
+    0xd4, 0xe2, 0xe4, 0xff, 0xff, 0xff, 0xf8, 0xfa, 0xfb, 0x4b, 0x86, 0x8e, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63,
+    0x06, 0x57, 0x63, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x56, 0x62, 0x06, 0x56, 0x61, 0x06, 0x56, 0x61,
+    0x06, 0x55, 0x61, 0x06, 0x55, 0x60, 0x06, 0x55, 0x60, 0x06, 0x54, 0x60, 0xc5, 0xd7, 0xda, 0xff, 0xff, 0xff, 0x42, 0x7c, 0x85,
+    0x06, 0x53, 0x5e, 0x05, 0x53, 0x5e, 0x05, 0x52, 0x5d, 0x05, 0x52, 0x5d, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5b,
+    0x05, 0x50, 0x5b, 0x85, 0xaa, 0xaf, 0xff, 0xff, 0xff, 0x85, 0xa9, 0xae, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x58, 0x05, 0x4d, 0x57,
+    0x05, 0x4d, 0x57, 0x05, 0x4c, 0x56, 0x05, 0x4c, 0x56, 0x05, 0x4b, 0x55, 0x05, 0x4b, 0x55, 0x05, 0x4a, 0x54, 0x05, 0x49, 0x53,
+    0x05, 0x48, 0x52, 0x05, 0x48, 0x51, 0x5a, 0x86, 0x8c, 0xff, 0xff, 0xff, 0xd4, 0xdf, 0xe1, 0x0c, 0x4a, 0x53, 0x05, 0x44, 0x4d,
+    0x05, 0x43, 0x4d, 0x04, 0x43, 0x4c, 0x04, 0x42, 0x4b, 0x04, 0x41, 0x4a, 0x04, 0x40, 0x49, 0x04, 0x3f, 0x48, 0xb4, 0xc5, 0xc8,
+    0xff, 0xff, 0xff, 0x7b, 0x99, 0x9d, 0x04, 0x3c, 0x44, 0x04, 0x3b, 0x43, 0x04, 0x3a, 0x42, 0x04, 0x39, 0x41, 0x04, 0x38, 0x3f,
+    0x04, 0x37, 0x3e, 0x04, 0x36, 0x3d, 0x04, 0x35, 0x3c, 0x04, 0x33, 0x3b, 0x03, 0x33, 0x3a, 0x03, 0x32, 0x38, 0x16, 0x40, 0x46,
+    0xe3, 0xe8, 0xe9, 0xf2, 0xf4, 0xf5, 0x23, 0x47, 0x4d, 0x03, 0x2c, 0x32, 0x03, 0x2a, 0x30, 0x03, 0x29, 0x2f, 0x03, 0x28, 0x2e,
+    0x03, 0x27, 0x2c, 0x45, 0x5e, 0x62, 0xf8, 0xf9, 0xf9, 0xff, 0xff, 0xff, 0x8d, 0x9b, 0x9d, 0x02, 0x20, 0x24, 0x02, 0x1e, 0x23,
+    0x02, 0x1d, 0x21, 0x02, 0x1b, 0x1f, 0x02, 0x1a, 0x1e, 0x02, 0x18, 0x1c, 0x02, 0x17, 0x1a, 0x02, 0x15, 0x18, 0x02, 0x13, 0x16,
+    0x01, 0x12, 0x14, 0x01, 0x10, 0x13, 0x01, 0x0d, 0x10, 0x03, 0x12, 0x15, 0xb8, 0xbf, 0xc1, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xf4, 0xf8, 0xf8, 0x93, 0xb5, 0xba, 0x81, 0xa9, 0xaf,
+    0x7d, 0xa5, 0xac, 0x78, 0xa2, 0xa9, 0x73, 0x9e, 0xa5, 0x70, 0x9b, 0xa2, 0x6b, 0x98, 0x9f, 0x66, 0x94, 0x9c, 0x61, 0x91, 0x98,
+    0x5d, 0x8e, 0x96, 0x59, 0x8a, 0x92, 0x55, 0x87, 0x8f, 0x50, 0x83, 0x8b, 0x4d, 0x81, 0x89, 0x4a, 0x7e, 0x86, 0x46, 0x7a, 0x83,
+    0x42, 0x78, 0x80, 0x3e, 0x74, 0x7d, 0x3a, 0x72, 0x7a, 0x37, 0x6f, 0x77, 0x34, 0x6c, 0x75, 0x32, 0x69, 0x72, 0x2f, 0x67, 0x70,
+    0x2c, 0x64, 0x6d, 0x2a, 0x63, 0x6b, 0x27, 0x60, 0x69, 0x25, 0x5d, 0x66, 0x23, 0x5b, 0x64, 0x21, 0x5a, 0x62, 0x20, 0x58, 0x61,
+    0x1e, 0x56, 0x5f, 0x1c, 0x54, 0x5d, 0x19, 0x52, 0x5a, 0x18, 0x51, 0x5a, 0x17, 0x4f, 0x58, 0x17, 0x4e, 0x56, 0x16, 0x4c, 0x55,
+    0x15, 0x4a, 0x53, 0x14, 0x4a, 0x51, 0x13, 0x49, 0x51, 0x12, 0x47, 0x50, 0x11, 0x45, 0x4e, 0x11, 0x45, 0x4d, 0x10, 0x43, 0x4b,
+    0x0e, 0x41, 0x49, 0x0d, 0x40, 0x48, 0x0d, 0x3f, 0x47, 0x0d, 0x3f, 0x46, 0x0d, 0x3e, 0x45, 0x0d, 0x3d, 0x45, 0x0d, 0x3c, 0x44,
+    0x0d, 0x3b, 0x43, 0x0c, 0x3b, 0x42, 0x0c, 0x3a, 0x41, 0x0c, 0x39, 0x40, 0x0c, 0x38, 0x3f, 0x0c, 0x37, 0x3e, 0x0c, 0x36, 0x3d,
+    0x0b, 0x35, 0x3c, 0x0b, 0x34, 0x3b, 0x0b, 0x34, 0x3a, 0x0b, 0x33, 0x39, 0x0b, 0x32, 0x38, 0x0a, 0x31, 0x37, 0x0a, 0x30, 0x36,
+    0x0a, 0x2f, 0x35, 0x0a, 0x2e, 0x34, 0x09, 0x2a, 0x30, 0x19, 0x3f, 0x46, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x07, 0x5d, 0x69, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0xd8, 0xe5, 0xe7, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x33, 0x79, 0x82, 0x06, 0x5b, 0x67, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x18, 0x65, 0x70, 0x9e, 0xbf, 0xc3, 0xf9, 0xfb, 0xfb,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xcf, 0xdf, 0xe1, 0x49, 0x85, 0x8e,
+    0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x57, 0x63, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62,
+    0x06, 0x57, 0x62, 0x06, 0x56, 0x62, 0x06, 0x56, 0x61, 0x06, 0x56, 0x61, 0x06, 0x55, 0x61, 0x06, 0x55, 0x60, 0x06, 0x55, 0x60,
+    0x06, 0x54, 0x60, 0xcf, 0xde, 0xe0, 0xff, 0xff, 0xff, 0x44, 0x7e, 0x86, 0x06, 0x53, 0x5e, 0x05, 0x53, 0x5e, 0x05, 0x52, 0x5d,
+    0x05, 0x52, 0x5d, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5b, 0x05, 0x50, 0x5b, 0x05, 0x50, 0x5a, 0xa6, 0xc0, 0xc4,
+    0x8e, 0xaf, 0xb4, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x58, 0x05, 0x4d, 0x57, 0x05, 0x4d, 0x57, 0x05, 0x4c, 0x56, 0x05, 0x4c, 0x56,
+    0x05, 0x4b, 0x55, 0x05, 0x4b, 0x55, 0x05, 0x4a, 0x54, 0x05, 0x49, 0x53, 0x05, 0x48, 0x52, 0x08, 0x4a, 0x53, 0xc5, 0xd4, 0xd7,
+    0xff, 0xff, 0xff, 0x76, 0x99, 0x9e, 0x05, 0x45, 0x4e, 0x05, 0x44, 0x4d, 0x05, 0x43, 0x4d, 0x04, 0x43, 0x4c, 0x04, 0x42, 0x4b,
+    0x04, 0x41, 0x4a, 0x04, 0x40, 0x49, 0x04, 0x3f, 0x48, 0x4f, 0x78, 0x7e, 0xff, 0xff, 0xff, 0xe5, 0xeb, 0xec, 0x15, 0x49, 0x50,
+    0x04, 0x3b, 0x43, 0x04, 0x3a, 0x42, 0x04, 0x39, 0x41, 0x04, 0x38, 0x3f, 0x04, 0x37, 0x3e, 0x04, 0x36, 0x3d, 0x04, 0x35, 0x3c,
+    0x04, 0x33, 0x3b, 0x03, 0x33, 0x3a, 0x03, 0x32, 0x38, 0x17, 0x41, 0x47, 0xef, 0xf2, 0xf2, 0xff, 0xff, 0xff, 0x25, 0x49, 0x4e,
+    0x03, 0x2c, 0x32, 0x03, 0x2a, 0x30, 0x03, 0x29, 0x2f, 0x03, 0x28, 0x2e, 0x03, 0x27, 0x2c, 0x03, 0x25, 0x2a, 0x69, 0x7c, 0x7f,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x7f, 0x8e, 0x90, 0x10, 0x2a, 0x2f, 0x02, 0x1d, 0x21, 0x02, 0x1b, 0x1f, 0x02, 0x1a, 0x1e,
+    0x02, 0x18, 0x1c, 0x02, 0x17, 0x1a, 0x02, 0x15, 0x18, 0x02, 0x13, 0x16, 0x01, 0x12, 0x14, 0x01, 0x10, 0x13, 0x00, 0x0d, 0x0f,
+    0x0a, 0x1c, 0x1f, 0xcc, 0xd1, 0xd2, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xaa, 0xc5, 0xc9, 0x81, 0xa9, 0xaf, 0x7d, 0xa5, 0xac, 0x78, 0xa2, 0xa9, 0x73, 0x9e, 0xa5,
+    0x6f, 0x9b, 0xa1, 0x6a, 0x97, 0x9e, 0x66, 0x94, 0x9c, 0x61, 0x91, 0x98, 0x5c, 0x8d, 0x95, 0x58, 0x89, 0x92, 0x54, 0x86, 0x8e,
+    0x50, 0x83, 0x8b, 0x4c, 0x80, 0x88, 0x49, 0x7d, 0x86, 0x44, 0x79, 0x82, 0x41, 0x77, 0x7f, 0x3e, 0x74, 0x7d, 0x3a, 0x72, 0x7a,
+    0x37, 0x6f, 0x77, 0x34, 0x6b, 0x75, 0x31, 0x68, 0x72, 0x2f, 0x67, 0x70, 0x2c, 0x64, 0x6d, 0x29, 0x62, 0x6b, 0x26, 0x60, 0x69,
+    0x25, 0x5d, 0x66, 0x23, 0x5b, 0x64, 0x21, 0x5a, 0x62, 0x1f, 0x57, 0x61, 0x1e, 0x56, 0x5f, 0x1c, 0x54, 0x5d, 0x19, 0x52, 0x5a,
+    0x18, 0x51, 0x5a, 0x17, 0x4f, 0x58, 0x17, 0x4e, 0x56, 0x16, 0x4c, 0x55, 0x15, 0x4a, 0x53, 0x14, 0x4a, 0x51, 0x13, 0x49, 0x51,
+    0x12, 0x47, 0x50, 0x11, 0x45, 0x4e, 0x34, 0x60, 0x67, 0x3f, 0x68, 0x6e, 0x3b, 0x65, 0x6b, 0x3b, 0x64, 0x6a, 0x3b, 0x63, 0x6a,
+    0x3b, 0x63, 0x69, 0x3b, 0x62, 0x68, 0x3b, 0x62, 0x68, 0x2b, 0x54, 0x5b, 0x0d, 0x3b, 0x43, 0x0c, 0x3b, 0x42, 0x0c, 0x3a, 0x41,
+    0x0c, 0x39, 0x40, 0x0c, 0x38, 0x3f, 0x0c, 0x37, 0x3e, 0x0c, 0x36, 0x3d, 0x0b, 0x35, 0x3c, 0x0b, 0x34, 0x3b, 0x0b, 0x34, 0x3a,
+    0x0b, 0x33, 0x39, 0x0b, 0x32, 0x38, 0x0a, 0x31, 0x37, 0x0a, 0x30, 0x36, 0x0a, 0x2f, 0x35, 0x0a, 0x2e, 0x34, 0x09, 0x2a, 0x30,
+    0x19, 0x3f, 0x46, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x07, 0x5d, 0x69, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x6b, 0x9d, 0xa4, 0x86, 0xaf, 0xb5, 0x84, 0xae, 0xb4, 0x84, 0xae, 0xb4, 0x84, 0xae, 0xb4, 0x84, 0xae, 0xb4, 0x85, 0xaf, 0xb4,
+    0x7e, 0xaa, 0xb0, 0x1b, 0x69, 0x74, 0x06, 0x5b, 0x67, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x32, 0x76, 0x80, 0x68, 0x9a, 0xa1, 0x83, 0xac, 0xb2, 0x85, 0xae, 0xb4,
+    0x77, 0xa5, 0xab, 0x4b, 0x86, 0x8f, 0x0f, 0x5e, 0x69, 0x06, 0x58, 0x64, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63,
+    0x06, 0x58, 0x63, 0x06, 0x57, 0x63, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x56, 0x62, 0x06, 0x56, 0x61,
+    0x06, 0x56, 0x61, 0x06, 0x55, 0x61, 0x06, 0x55, 0x60, 0x06, 0x55, 0x60, 0x06, 0x54, 0x60, 0x72, 0x9e, 0xa5, 0x98, 0xb9, 0xbd,
+    0x27, 0x6a, 0x73, 0x06, 0x53, 0x5e, 0x05, 0x53, 0x5e, 0x05, 0x52, 0x5d, 0x05, 0x52, 0x5d, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5c,
+    0x05, 0x51, 0x5b, 0x05, 0x50, 0x5b, 0x05, 0x50, 0x5a, 0x0f, 0x56, 0x60, 0x36, 0x71, 0x7a, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x58,
+    0x05, 0x4d, 0x57, 0x05, 0x4d, 0x57, 0x05, 0x4c, 0x56, 0x05, 0x4c, 0x56, 0x05, 0x4b, 0x55, 0x05, 0x4b, 0x55, 0x05, 0x4a, 0x54,
+    0x05, 0x49, 0x53, 0x05, 0x48, 0x52, 0x1c, 0x59, 0x61, 0xa7, 0xbe, 0xc2, 0xa7, 0xbe, 0xc1, 0x1c, 0x57, 0x5f, 0x05, 0x45, 0x4e,
+    0x05, 0x44, 0x4d, 0x05, 0x43, 0x4d, 0x04, 0x43, 0x4c, 0x04, 0x42, 0x4b, 0x04, 0x41, 0x4a, 0x04, 0x40, 0x49, 0x04, 0x3f, 0x48,
+    0x0a, 0x43, 0x4b, 0x96, 0xae, 0xb1, 0xb3, 0xc4, 0xc7, 0x2f, 0x5e, 0x64, 0x04, 0x3b, 0x43, 0x04, 0x3a, 0x42, 0x04, 0x39, 0x41,
+    0x04, 0x38, 0x3f, 0x04, 0x37, 0x3e, 0x04, 0x36, 0x3d, 0x04, 0x35, 0x3c, 0x04, 0x33, 0x3b, 0x03, 0x33, 0x3a, 0x03, 0x32, 0x38,
+    0x0d, 0x39, 0x3f, 0x74, 0x8c, 0x90, 0x7c, 0x92, 0x95, 0x13, 0x3a, 0x40, 0x03, 0x2c, 0x32, 0x03, 0x2a, 0x30, 0x03, 0x29, 0x2f,
+    0x03, 0x28, 0x2e, 0x03, 0x27, 0x2c, 0x03, 0x25, 0x2a, 0x03, 0x24, 0x29, 0x62, 0x77, 0x79, 0x86, 0x95, 0x97, 0x7e, 0x8d, 0x8f,
+    0x2e, 0x45, 0x49, 0x02, 0x1d, 0x21, 0x02, 0x1b, 0x1f, 0x02, 0x1a, 0x1e, 0x02, 0x18, 0x1c, 0x02, 0x17, 0x1a, 0x02, 0x15, 0x18,
+    0x02, 0x13, 0x16, 0x01, 0x12, 0x14, 0x01, 0x10, 0x13, 0x00, 0x0b, 0x0d, 0x14, 0x29, 0x2c, 0xe5, 0xe7, 0xe7, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xd1, 0xdf, 0xe2,
+    0x7f, 0xa8, 0xae, 0x7b, 0xa4, 0xab, 0x77, 0xa1, 0xa8, 0x72, 0x9e, 0xa4, 0x6e, 0x9a, 0xa1, 0x69, 0x97, 0x9e, 0x65, 0x94, 0x9b,
+    0x5f, 0x90, 0x97, 0x5b, 0x8c, 0x95, 0x58, 0x89, 0x92, 0x53, 0x86, 0x8e, 0x4f, 0x82, 0x8b, 0x4c, 0x80, 0x88, 0x49, 0x7d, 0x86,
+    0x45, 0x7a, 0x82, 0x41, 0x77, 0x7f, 0x3d, 0x74, 0x7c, 0x3a, 0x72, 0x7a, 0x36, 0x6e, 0x76, 0x34, 0x6b, 0x75, 0x31, 0x68, 0x72,
+    0x2e, 0x66, 0x6f, 0x2c, 0x64, 0x6d, 0x29, 0x62, 0x6b, 0x26, 0x60, 0x69, 0x25, 0x5d, 0x66, 0x23, 0x5b, 0x64, 0x21, 0x5a, 0x62,
+    0x1f, 0x57, 0x61, 0x1d, 0x55, 0x5e, 0x1c, 0x54, 0x5d, 0x19, 0x52, 0x5a, 0x18, 0x51, 0x5a, 0x17, 0x4f, 0x58, 0x17, 0x4e, 0x56,
+    0x16, 0x4c, 0x55, 0x15, 0x4a, 0x53, 0x14, 0x4a, 0x51, 0x12, 0x48, 0x51, 0x12, 0x47, 0x50, 0x11, 0x45, 0x4e, 0xcc, 0xd7, 0xd9,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xae, 0xbe, 0xc1, 0x0d, 0x3b, 0x43, 0x0c, 0x3b, 0x42, 0x0c, 0x3a, 0x41, 0x0c, 0x39, 0x40, 0x0c, 0x38, 0x3f, 0x0c, 0x37, 0x3e,
+    0x0c, 0x36, 0x3d, 0x0b, 0x35, 0x3c, 0x0b, 0x34, 0x3b, 0x0b, 0x34, 0x3a, 0x0b, 0x33, 0x39, 0x0b, 0x32, 0x38, 0x0a, 0x31, 0x37,
+    0x0a, 0x30, 0x36, 0x0a, 0x2f, 0x35, 0x0a, 0x2e, 0x34, 0x09, 0x2a, 0x30, 0x19, 0x3f, 0x46, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x07, 0x5d, 0x69, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65,
+    0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64,
+    0x06, 0x58, 0x64, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x57, 0x63, 0x06, 0x57, 0x62,
+    0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x56, 0x62, 0x06, 0x56, 0x61, 0x06, 0x56, 0x61, 0x06, 0x55, 0x61, 0x06, 0x55, 0x60,
+    0x06, 0x55, 0x60, 0x06, 0x54, 0x60, 0x06, 0x54, 0x5f, 0x06, 0x54, 0x5f, 0x06, 0x53, 0x5e, 0x06, 0x53, 0x5e, 0x05, 0x53, 0x5e,
+    0x05, 0x52, 0x5d, 0x05, 0x52, 0x5d, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5b, 0x05, 0x50, 0x5b, 0x05, 0x50, 0x5a,
+    0x05, 0x4f, 0x5a, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x58, 0x05, 0x4d, 0x57, 0x05, 0x4d, 0x57, 0x05, 0x4c, 0x56,
+    0x05, 0x4c, 0x56, 0x05, 0x4b, 0x55, 0x05, 0x4b, 0x55, 0x05, 0x4a, 0x54, 0x05, 0x49, 0x53, 0x05, 0x48, 0x52, 0x05, 0x48, 0x51,
+    0x05, 0x47, 0x51, 0x05, 0x47, 0x50, 0x05, 0x46, 0x4f, 0x05, 0x45, 0x4e, 0x05, 0x44, 0x4d, 0x05, 0x43, 0x4d, 0x04, 0x43, 0x4c,
+    0x04, 0x42, 0x4b, 0x04, 0x41, 0x4a, 0x04, 0x40, 0x49, 0x04, 0x3f, 0x48, 0x04, 0x3e, 0x47, 0x04, 0x3e, 0x46, 0x04, 0x3d, 0x45,
+    0x04, 0x3c, 0x44, 0x04, 0x3b, 0x43, 0x04, 0x3a, 0x42, 0x04, 0x39, 0x41, 0x04, 0x38, 0x3f, 0x04, 0x37, 0x3e, 0x04, 0x36, 0x3d,
+    0x04, 0x35, 0x3c, 0x04, 0x33, 0x3b, 0x03, 0x33, 0x3a, 0x03, 0x32, 0x38, 0x03, 0x31, 0x37, 0x03, 0x2f, 0x36, 0x03, 0x2e, 0x34,
+    0x03, 0x2d, 0x33, 0x03, 0x2c, 0x32, 0x03, 0x2a, 0x30, 0x03, 0x29, 0x2f, 0x03, 0x28, 0x2e, 0x03, 0x27, 0x2c, 0x03, 0x25, 0x2a,
+    0x03, 0x24, 0x29, 0x02, 0x23, 0x27, 0x02, 0x21, 0x26, 0x02, 0x20, 0x24, 0x02, 0x1e, 0x23, 0x02, 0x1d, 0x21, 0x02, 0x1b, 0x1f,
+    0x02, 0x1a, 0x1e, 0x02, 0x18, 0x1c, 0x02, 0x17, 0x1a, 0x02, 0x15, 0x18, 0x02, 0x13, 0x16, 0x01, 0x12, 0x14, 0x01, 0x10, 0x13,
+    0x00, 0x0a, 0x0c, 0x23, 0x3a, 0x3d, 0xfb, 0xfc, 0xfc, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xf2, 0xf6, 0xf7, 0x8b, 0xb0, 0xb5, 0x7a, 0xa4, 0xaa, 0x75, 0xa0, 0xa7,
+    0x71, 0x9d, 0xa4, 0x6d, 0x99, 0xa0, 0x68, 0x96, 0x9d, 0x64, 0x92, 0x9a, 0x5f, 0x90, 0x97, 0x5a, 0x8c, 0x94, 0x57, 0x88, 0x91,
+    0x52, 0x85, 0x8d, 0x4e, 0x82, 0x8a, 0x4b, 0x7f, 0x88, 0x48, 0x7c, 0x85, 0x44, 0x79, 0x82, 0x40, 0x76, 0x7e, 0x3d, 0x74, 0x7c,
+    0x39, 0x71, 0x79, 0x36, 0x6e, 0x76, 0x33, 0x6a, 0x74, 0x31, 0x68, 0x72, 0x2e, 0x66, 0x6f, 0x2b, 0x63, 0x6c, 0x29, 0x62, 0x6b,
+    0x26, 0x5f, 0x68, 0x25, 0x5d, 0x66, 0x23, 0x5b, 0x64, 0x21, 0x5a, 0x62, 0x1f, 0x57, 0x61, 0x1d, 0x55, 0x5e, 0x1b, 0x53, 0x5c,
+    0x19, 0x52, 0x5a, 0x18, 0x51, 0x5a, 0x17, 0x4f, 0x58, 0x17, 0x4e, 0x56, 0x16, 0x4c, 0x55, 0x15, 0x4a, 0x53, 0x14, 0x4a, 0x51,
+    0x12, 0x48, 0x51, 0x12, 0x47, 0x50, 0x11, 0x45, 0x4e, 0xc9, 0xd5, 0xd7, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xac, 0xbc, 0xbf, 0x0d, 0x3b, 0x43, 0x0c, 0x3b, 0x42,
+    0x0c, 0x3a, 0x41, 0x0c, 0x39, 0x40, 0x0c, 0x38, 0x3f, 0x0c, 0x37, 0x3e, 0x0c, 0x36, 0x3d, 0x0b, 0x35, 0x3c, 0x0b, 0x34, 0x3b,
+    0x0b, 0x34, 0x3a, 0x0b, 0x33, 0x39, 0x0b, 0x32, 0x38, 0x0a, 0x31, 0x37, 0x0a, 0x30, 0x36, 0x0a, 0x2f, 0x35, 0x0a, 0x2e, 0x34,
+    0x09, 0x2a, 0x30, 0x19, 0x3f, 0x46, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x07, 0x5d, 0x69, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65,
+    0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63,
+    0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x57, 0x63, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x56, 0x62,
+    0x06, 0x56, 0x61, 0x06, 0x56, 0x61, 0x06, 0x55, 0x61, 0x06, 0x55, 0x60, 0x06, 0x55, 0x60, 0x06, 0x54, 0x60, 0x06, 0x54, 0x5f,
+    0x06, 0x54, 0x5f, 0x06, 0x53, 0x5e, 0x06, 0x53, 0x5e, 0x05, 0x53, 0x5e, 0x05, 0x52, 0x5d, 0x05, 0x52, 0x5d, 0x05, 0x51, 0x5c,
+    0x05, 0x51, 0x5c, 0x05, 0x51, 0x5b, 0x05, 0x50, 0x5b, 0x05, 0x50, 0x5a, 0x05, 0x4f, 0x5a, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x59,
+    0x05, 0x4e, 0x58, 0x05, 0x4d, 0x57, 0x05, 0x4d, 0x57, 0x05, 0x4c, 0x56, 0x05, 0x4c, 0x56, 0x05, 0x4b, 0x55, 0x05, 0x4b, 0x55,
+    0x05, 0x4a, 0x54, 0x05, 0x49, 0x53, 0x05, 0x48, 0x52, 0x05, 0x48, 0x51, 0x05, 0x47, 0x51, 0x05, 0x47, 0x50, 0x05, 0x46, 0x4f,
+    0x05, 0x45, 0x4e, 0x05, 0x44, 0x4d, 0x05, 0x43, 0x4d, 0x04, 0x43, 0x4c, 0x04, 0x42, 0x4b, 0x04, 0x41, 0x4a, 0x04, 0x40, 0x49,
+    0x04, 0x3f, 0x48, 0x04, 0x3e, 0x47, 0x04, 0x3e, 0x46, 0x04, 0x3d, 0x45, 0x04, 0x3c, 0x44, 0x04, 0x3b, 0x43, 0x04, 0x3a, 0x42,
+    0x04, 0x39, 0x41, 0x04, 0x38, 0x3f, 0x04, 0x37, 0x3e, 0x04, 0x36, 0x3d, 0x04, 0x35, 0x3c, 0x04, 0x33, 0x3b, 0x03, 0x33, 0x3a,
+    0x03, 0x32, 0x38, 0x03, 0x31, 0x37, 0x03, 0x2f, 0x36, 0x03, 0x2e, 0x34, 0x03, 0x2d, 0x33, 0x03, 0x2c, 0x32, 0x03, 0x2a, 0x30,
+    0x03, 0x29, 0x2f, 0x03, 0x28, 0x2e, 0x03, 0x27, 0x2c, 0x03, 0x25, 0x2a, 0x03, 0x24, 0x29, 0x02, 0x23, 0x27, 0x02, 0x21, 0x26,
+    0x02, 0x20, 0x24, 0x02, 0x1e, 0x23, 0x02, 0x1d, 0x21, 0x02, 0x1b, 0x1f, 0x02, 0x1a, 0x1e, 0x02, 0x18, 0x1c, 0x02, 0x17, 0x1a,
+    0x02, 0x15, 0x18, 0x02, 0x13, 0x16, 0x01, 0x12, 0x14, 0x01, 0x10, 0x13, 0x00, 0x0a, 0x0c, 0x37, 0x4f, 0x53, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xb4, 0xcc, 0xd0, 0x79, 0xa3, 0xaa, 0x74, 0x9f, 0xa6, 0x70, 0x9c, 0xa3, 0x6c, 0x99, 0xa0, 0x67, 0x96, 0x9d,
+    0x63, 0x92, 0x9a, 0x5e, 0x8f, 0x97, 0x5a, 0x8b, 0x94, 0x56, 0x88, 0x90, 0x51, 0x85, 0x8d, 0x4d, 0x81, 0x89, 0x4a, 0x7f, 0x87,
+    0x47, 0x7c, 0x84, 0x42, 0x78, 0x80, 0x3f, 0x76, 0x7e, 0x3c, 0x73, 0x7c, 0x38, 0x70, 0x79, 0x35, 0x6d, 0x76, 0x33, 0x6a, 0x74,
+    0x30, 0x68, 0x71, 0x2d, 0x65, 0x6e, 0x2a, 0x63, 0x6b, 0x28, 0x61, 0x6a, 0x26, 0x5f, 0x68, 0x24, 0x5d, 0x66, 0x21, 0x5a, 0x63,
+    0x20, 0x59, 0x61, 0x1f, 0x57, 0x61, 0x1d, 0x55, 0x5e, 0x1b, 0x53, 0x5c, 0x19, 0x52, 0x5a, 0x18, 0x51, 0x5a, 0x17, 0x4f, 0x58,
+    0x17, 0x4e, 0x56, 0x16, 0x4c, 0x55, 0x15, 0x4a, 0x53, 0x14, 0x4a, 0x51, 0x12, 0x48, 0x51, 0x12, 0x47, 0x50, 0x11, 0x45, 0x4e,
+    0xc9, 0xd5, 0xd7, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xac, 0xbc, 0xbf, 0x0d, 0x3b, 0x43, 0x0c, 0x3b, 0x42, 0x0c, 0x3a, 0x41, 0x0c, 0x39, 0x40, 0x0c, 0x38, 0x3f,
+    0x0c, 0x37, 0x3e, 0x0c, 0x36, 0x3d, 0x0b, 0x35, 0x3c, 0x0b, 0x34, 0x3b, 0x0b, 0x34, 0x3a, 0x0b, 0x33, 0x39, 0x0b, 0x32, 0x38,
+    0x0a, 0x31, 0x37, 0x0a, 0x30, 0x36, 0x0a, 0x2f, 0x35, 0x0a, 0x2e, 0x34, 0x09, 0x2a, 0x30, 0x19, 0x3f, 0x46, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x07, 0x5d, 0x69, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x59, 0x65,
+    0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x58, 0x64,
+    0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x57, 0x63,
+    0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x56, 0x62, 0x06, 0x56, 0x61, 0x06, 0x56, 0x61, 0x06, 0x55, 0x61,
+    0x06, 0x55, 0x60, 0x06, 0x55, 0x60, 0x06, 0x54, 0x60, 0x06, 0x54, 0x5f, 0x06, 0x54, 0x5f, 0x06, 0x53, 0x5e, 0x06, 0x53, 0x5e,
+    0x05, 0x53, 0x5e, 0x05, 0x52, 0x5d, 0x05, 0x52, 0x5d, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5b, 0x05, 0x50, 0x5b,
+    0x05, 0x50, 0x5a, 0x05, 0x4f, 0x5a, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x58, 0x05, 0x4d, 0x57, 0x05, 0x4d, 0x57,
+    0x05, 0x4c, 0x56, 0x05, 0x4c, 0x56, 0x05, 0x4b, 0x55, 0x05, 0x4b, 0x55, 0x05, 0x4a, 0x54, 0x05, 0x49, 0x53, 0x05, 0x48, 0x52,
+    0x05, 0x48, 0x51, 0x05, 0x47, 0x51, 0x05, 0x47, 0x50, 0x05, 0x46, 0x4f, 0x05, 0x45, 0x4e, 0x05, 0x44, 0x4d, 0x05, 0x43, 0x4d,
+    0x04, 0x43, 0x4c, 0x04, 0x42, 0x4b, 0x04, 0x41, 0x4a, 0x04, 0x40, 0x49, 0x04, 0x3f, 0x48, 0x04, 0x3e, 0x47, 0x04, 0x3e, 0x46,
+    0x04, 0x3d, 0x45, 0x04, 0x3c, 0x44, 0x04, 0x3b, 0x43, 0x04, 0x3a, 0x42, 0x04, 0x39, 0x41, 0x04, 0x38, 0x3f, 0x04, 0x37, 0x3e,
+    0x04, 0x36, 0x3d, 0x04, 0x35, 0x3c, 0x04, 0x33, 0x3b, 0x03, 0x33, 0x3a, 0x03, 0x32, 0x38, 0x03, 0x31, 0x37, 0x03, 0x2f, 0x36,
+    0x03, 0x2e, 0x34, 0x03, 0x2d, 0x33, 0x03, 0x2c, 0x32, 0x03, 0x2a, 0x30, 0x03, 0x29, 0x2f, 0x03, 0x28, 0x2e, 0x03, 0x27, 0x2c,
+    0x03, 0x25, 0x2a, 0x03, 0x24, 0x29, 0x02, 0x23, 0x27, 0x02, 0x21, 0x26, 0x02, 0x20, 0x24, 0x02, 0x1e, 0x23, 0x02, 0x1d, 0x21,
+    0x02, 0x1b, 0x1f, 0x02, 0x1a, 0x1e, 0x02, 0x18, 0x1c, 0x02, 0x17, 0x1a, 0x02, 0x15, 0x18, 0x02, 0x13, 0x16, 0x01, 0x12, 0x14,
+    0x01, 0x10, 0x12, 0x00, 0x0b, 0x0d, 0x59, 0x71, 0x75, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xe9, 0xf0, 0xf1, 0x7e, 0xa6, 0xad,
+    0x73, 0x9e, 0xa6, 0x6f, 0x9b, 0xa2, 0x6a, 0x97, 0x9e, 0x66, 0x95, 0x9c, 0x62, 0x91, 0x99, 0x5d, 0x8e, 0x96, 0x59, 0x8a, 0x93,
+    0x55, 0x87, 0x90, 0x50, 0x84, 0x8c, 0x4d, 0x81, 0x89, 0x49, 0x7e, 0x86, 0x45, 0x7a, 0x83, 0x42, 0x78, 0x80, 0x3f, 0x76, 0x7e,
+    0x3b, 0x72, 0x7b, 0x38, 0x70, 0x79, 0x34, 0x6d, 0x75, 0x32, 0x6a, 0x73, 0x30, 0x68, 0x71, 0x2d, 0x65, 0x6e, 0x2a, 0x63, 0x6b,
+    0x28, 0x61, 0x6a, 0x25, 0x5e, 0x67, 0x24, 0x5d, 0x66, 0x22, 0x5a, 0x64, 0x1f, 0x58, 0x61, 0x1e, 0x57, 0x60, 0x1d, 0x55, 0x5e,
+    0x1b, 0x53, 0x5c, 0x19, 0x52, 0x5a, 0x17, 0x50, 0x59, 0x17, 0x4f, 0x57, 0x16, 0x4d, 0x56, 0x16, 0x4c, 0x55, 0x15, 0x4a, 0x53,
+    0x14, 0x4a, 0x51, 0x12, 0x48, 0x51, 0x12, 0x47, 0x50, 0x11, 0x45, 0x4e, 0xc9, 0xd5, 0xd7, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xac, 0xbc, 0xbf, 0x0d, 0x3b, 0x43,
+    0x0c, 0x3b, 0x42, 0x0c, 0x3a, 0x41, 0x0c, 0x39, 0x40, 0x0c, 0x38, 0x3f, 0x0c, 0x37, 0x3e, 0x0c, 0x36, 0x3d, 0x0b, 0x35, 0x3c,
+    0x0b, 0x34, 0x3b, 0x0b, 0x34, 0x3a, 0x0b, 0x33, 0x39, 0x0b, 0x32, 0x38, 0x0a, 0x31, 0x37, 0x0a, 0x30, 0x36, 0x0a, 0x2f, 0x35,
+    0x0a, 0x2e, 0x34, 0x09, 0x2a, 0x30, 0x19, 0x3f, 0x46, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x07, 0x5d, 0x69,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65,
+    0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x63,
+    0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x57, 0x63, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62,
+    0x06, 0x56, 0x62, 0x06, 0x56, 0x61, 0x06, 0x56, 0x61, 0x06, 0x55, 0x61, 0x06, 0x55, 0x60, 0x06, 0x55, 0x60, 0x06, 0x54, 0x60,
+    0x06, 0x54, 0x5f, 0x06, 0x54, 0x5f, 0x06, 0x53, 0x5e, 0x06, 0x53, 0x5e, 0x05, 0x53, 0x5e, 0x05, 0x52, 0x5d, 0x05, 0x52, 0x5d,
+    0x05, 0x51, 0x5c, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5b, 0x05, 0x50, 0x5b, 0x05, 0x50, 0x5a, 0x05, 0x4f, 0x5a, 0x05, 0x4e, 0x59,
+    0x05, 0x4e, 0x59, 0x05, 0x4e, 0x58, 0x05, 0x4d, 0x57, 0x05, 0x4d, 0x57, 0x05, 0x4c, 0x56, 0x05, 0x4c, 0x56, 0x05, 0x4b, 0x55,
+    0x05, 0x4b, 0x55, 0x05, 0x4a, 0x54, 0x05, 0x49, 0x53, 0x05, 0x48, 0x52, 0x05, 0x48, 0x51, 0x05, 0x47, 0x51, 0x05, 0x47, 0x50,
+    0x05, 0x46, 0x4f, 0x05, 0x45, 0x4e, 0x05, 0x44, 0x4d, 0x05, 0x43, 0x4d, 0x04, 0x43, 0x4c, 0x04, 0x42, 0x4b, 0x04, 0x41, 0x4a,
+    0x04, 0x40, 0x49, 0x04, 0x3f, 0x48, 0x04, 0x3e, 0x47, 0x04, 0x3e, 0x46, 0x04, 0x3d, 0x45, 0x04, 0x3c, 0x44, 0x04, 0x3b, 0x43,
+    0x04, 0x3a, 0x42, 0x04, 0x39, 0x41, 0x04, 0x38, 0x3f, 0x04, 0x37, 0x3e, 0x04, 0x36, 0x3d, 0x04, 0x35, 0x3c, 0x04, 0x33, 0x3b,
+    0x03, 0x33, 0x3a, 0x03, 0x32, 0x38, 0x03, 0x31, 0x37, 0x03, 0x2f, 0x36, 0x03, 0x2e, 0x34, 0x03, 0x2d, 0x33, 0x03, 0x2c, 0x32,
+    0x03, 0x2a, 0x30, 0x03, 0x29, 0x2f, 0x03, 0x28, 0x2e, 0x03, 0x27, 0x2c, 0x03, 0x25, 0x2a, 0x03, 0x24, 0x29, 0x02, 0x23, 0x27,
+    0x02, 0x21, 0x26, 0x02, 0x20, 0x24, 0x02, 0x1e, 0x23, 0x02, 0x1d, 0x21, 0x02, 0x1b, 0x1f, 0x02, 0x1a, 0x1e, 0x02, 0x18, 0x1c,
+    0x02, 0x17, 0x1a, 0x02, 0x15, 0x18, 0x02, 0x13, 0x16, 0x01, 0x12, 0x14, 0x01, 0x10, 0x12, 0x00, 0x0d, 0x0f, 0x8e, 0x9f, 0xa2,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xae, 0xc7, 0xcb, 0x71, 0x9d, 0xa4, 0x6e, 0x9b, 0xa1, 0x69, 0x97, 0x9e,
+    0x64, 0x94, 0x9b, 0x61, 0x90, 0x98, 0x5b, 0x8d, 0x95, 0x58, 0x8a, 0x92, 0x54, 0x86, 0x8f, 0x4f, 0x83, 0x8b, 0x4c, 0x80, 0x88,
+    0x48, 0x7d, 0x86, 0x44, 0x7a, 0x82, 0x41, 0x77, 0x80, 0x3e, 0x75, 0x7d, 0x3a, 0x72, 0x7b, 0x37, 0x6f, 0x78, 0x34, 0x6d, 0x75,
+    0x32, 0x6a, 0x73, 0x2f, 0x67, 0x70, 0x2c, 0x65, 0x6e, 0x2a, 0x63, 0x6b, 0x27, 0x60, 0x6a, 0x26, 0x5f, 0x68, 0x24, 0x5d, 0x66,
+    0x22, 0x5a, 0x64, 0x20, 0x59, 0x61, 0x1e, 0x57, 0x60, 0x1c, 0x54, 0x5e, 0x1b, 0x53, 0x5c, 0x18, 0x51, 0x5a, 0x17, 0x50, 0x59,
+    0x17, 0x4f, 0x57, 0x16, 0x4d, 0x56, 0x15, 0x4b, 0x54, 0x15, 0x4a, 0x53, 0x14, 0x4a, 0x51, 0x12, 0x48, 0x51, 0x12, 0x47, 0x50,
+    0x11, 0x45, 0x4e, 0xc9, 0xd5, 0xd7, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xac, 0xbc, 0xbf, 0x0d, 0x3b, 0x43, 0x0c, 0x3b, 0x42, 0x0c, 0x3a, 0x41, 0x0c, 0x39, 0x40,
+    0x0c, 0x38, 0x3f, 0x0c, 0x37, 0x3e, 0x0c, 0x36, 0x3d, 0x0b, 0x35, 0x3c, 0x0b, 0x34, 0x3b, 0x0b, 0x34, 0x3a, 0x0b, 0x33, 0x39,
+    0x0b, 0x32, 0x38, 0x0a, 0x31, 0x37, 0x0a, 0x30, 0x36, 0x0a, 0x2f, 0x35, 0x0a, 0x2e, 0x34, 0x09, 0x2a, 0x30, 0x19, 0x3f, 0x46,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x07, 0x5d, 0x69, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65,
+    0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63,
+    0x06, 0x57, 0x63, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x56, 0x62, 0x06, 0x56, 0x61, 0x06, 0x56, 0x61,
+    0x06, 0x55, 0x61, 0x06, 0x55, 0x60, 0x06, 0x55, 0x60, 0x06, 0x54, 0x60, 0x06, 0x54, 0x5f, 0x06, 0x54, 0x5f, 0x06, 0x53, 0x5e,
+    0x06, 0x53, 0x5e, 0x05, 0x53, 0x5e, 0x05, 0x52, 0x5d, 0x05, 0x52, 0x5d, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5b,
+    0x05, 0x50, 0x5b, 0x05, 0x50, 0x5a, 0x05, 0x4f, 0x5a, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x58, 0x05, 0x4d, 0x57,
+    0x05, 0x4d, 0x57, 0x05, 0x4c, 0x56, 0x05, 0x4c, 0x56, 0x05, 0x4b, 0x55, 0x05, 0x4b, 0x55, 0x05, 0x4a, 0x54, 0x05, 0x49, 0x53,
+    0x05, 0x48, 0x52, 0x05, 0x48, 0x51, 0x05, 0x47, 0x51, 0x05, 0x47, 0x50, 0x05, 0x46, 0x4f, 0x05, 0x45, 0x4e, 0x05, 0x44, 0x4d,
+    0x05, 0x43, 0x4d, 0x04, 0x43, 0x4c, 0x04, 0x42, 0x4b, 0x04, 0x41, 0x4a, 0x04, 0x40, 0x49, 0x04, 0x3f, 0x48, 0x04, 0x3e, 0x47,
+    0x04, 0x3e, 0x46, 0x04, 0x3d, 0x45, 0x04, 0x3c, 0x44, 0x04, 0x3b, 0x43, 0x04, 0x3a, 0x42, 0x04, 0x39, 0x41, 0x04, 0x38, 0x3f,
+    0x04, 0x37, 0x3e, 0x04, 0x36, 0x3d, 0x04, 0x35, 0x3c, 0x04, 0x33, 0x3b, 0x03, 0x33, 0x3a, 0x03, 0x32, 0x38, 0x03, 0x31, 0x37,
+    0x03, 0x2f, 0x36, 0x03, 0x2e, 0x34, 0x03, 0x2d, 0x33, 0x03, 0x2c, 0x32, 0x03, 0x2a, 0x30, 0x03, 0x29, 0x2f, 0x03, 0x28, 0x2e,
+    0x03, 0x27, 0x2c, 0x03, 0x25, 0x2a, 0x03, 0x24, 0x29, 0x02, 0x23, 0x27, 0x02, 0x21, 0x26, 0x02, 0x20, 0x24, 0x02, 0x1e, 0x23,
+    0x02, 0x1d, 0x21, 0x02, 0x1b, 0x1f, 0x02, 0x1a, 0x1e, 0x02, 0x18, 0x1c, 0x02, 0x17, 0x1a, 0x02, 0x15, 0x18, 0x02, 0x13, 0x16,
+    0x01, 0x12, 0x14, 0x01, 0x0f, 0x12, 0x03, 0x12, 0x14, 0xc5, 0xcf, 0xd1, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xed, 0xf3, 0xf4, 0x7b, 0xa5, 0xab, 0x6c, 0x99, 0xa0, 0x67, 0x96, 0x9d, 0x64, 0x93, 0x9a, 0x5f, 0x8f, 0x97, 0x5a, 0x8c, 0x94,
+    0x57, 0x89, 0x92, 0x52, 0x85, 0x8e, 0x4e, 0x83, 0x8b, 0x4b, 0x7f, 0x88, 0x47, 0x7d, 0x85, 0x44, 0x7a, 0x82, 0x41, 0x77, 0x80,
+    0x3d, 0x74, 0x7c, 0x39, 0x71, 0x7a, 0x36, 0x6f, 0x77, 0x34, 0x6c, 0x75, 0x31, 0x69, 0x73, 0x2f, 0x67, 0x70, 0x2c, 0x65, 0x6e,
+    0x29, 0x62, 0x6b, 0x27, 0x60, 0x6a, 0x25, 0x5e, 0x67, 0x23, 0x5c, 0x65, 0x20, 0x59, 0x62, 0x1f, 0x58, 0x61, 0x1e, 0x57, 0x60,
+    0x1c, 0x54, 0x5e, 0x1a, 0x53, 0x5b, 0x18, 0x51, 0x5a, 0x17, 0x50, 0x59, 0x17, 0x4f, 0x57, 0x16, 0x4d, 0x56, 0x15, 0x4b, 0x54,
+    0x14, 0x4a, 0x52, 0x14, 0x4a, 0x51, 0x12, 0x48, 0x51, 0x11, 0x46, 0x4f, 0x11, 0x45, 0x4e, 0xc9, 0xd4, 0xd6, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xac, 0xbc, 0xbf,
+    0x0d, 0x3b, 0x43, 0x0c, 0x3b, 0x42, 0x0c, 0x3a, 0x41, 0x0c, 0x39, 0x40, 0x0c, 0x38, 0x3f, 0x0c, 0x37, 0x3e, 0x0c, 0x36, 0x3d,
+    0x0b, 0x35, 0x3c, 0x0b, 0x34, 0x3b, 0x0b, 0x34, 0x3a, 0x0b, 0x33, 0x39, 0x0b, 0x32, 0x38, 0x0a, 0x31, 0x37, 0x0a, 0x30, 0x36,
+    0x0a, 0x2f, 0x35, 0x0a, 0x2e, 0x34, 0x09, 0x2a, 0x30, 0x19, 0x3f, 0x46, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x07, 0x5d, 0x69, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65,
+    0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64,
+    0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x57, 0x63, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62,
+    0x06, 0x57, 0x62, 0x06, 0x56, 0x62, 0x06, 0x56, 0x61, 0x06, 0x56, 0x61, 0x06, 0x55, 0x61, 0x06, 0x55, 0x60, 0x06, 0x55, 0x60,
+    0x06, 0x54, 0x60, 0x06, 0x54, 0x5f, 0x06, 0x54, 0x5f, 0x06, 0x53, 0x5e, 0x06, 0x53, 0x5e, 0x05, 0x53, 0x5e, 0x05, 0x52, 0x5d,
+    0x05, 0x52, 0x5d, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5b, 0x05, 0x50, 0x5b, 0x05, 0x50, 0x5a, 0x05, 0x4f, 0x5a,
+    0x05, 0x4e, 0x59, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x58, 0x05, 0x4d, 0x57, 0x05, 0x4d, 0x57, 0x05, 0x4c, 0x56, 0x05, 0x4c, 0x56,
+    0x05, 0x4b, 0x55, 0x05, 0x4b, 0x55, 0x05, 0x4a, 0x54, 0x05, 0x49, 0x53, 0x05, 0x48, 0x52, 0x05, 0x48, 0x51, 0x05, 0x47, 0x51,
+    0x05, 0x47, 0x50, 0x05, 0x46, 0x4f, 0x05, 0x45, 0x4e, 0x05, 0x44, 0x4d, 0x05, 0x43, 0x4d, 0x04, 0x43, 0x4c, 0x04, 0x42, 0x4b,
+    0x04, 0x41, 0x4a, 0x04, 0x40, 0x49, 0x04, 0x3f, 0x48, 0x04, 0x3e, 0x47, 0x04, 0x3e, 0x46, 0x04, 0x3d, 0x45, 0x04, 0x3c, 0x44,
+    0x04, 0x3b, 0x43, 0x04, 0x3a, 0x42, 0x04, 0x39, 0x41, 0x04, 0x38, 0x3f, 0x04, 0x37, 0x3e, 0x04, 0x36, 0x3d, 0x04, 0x35, 0x3c,
+    0x04, 0x33, 0x3b, 0x03, 0x33, 0x3a, 0x03, 0x32, 0x38, 0x03, 0x31, 0x37, 0x03, 0x2f, 0x36, 0x03, 0x2e, 0x34, 0x03, 0x2d, 0x33,
+    0x03, 0x2c, 0x32, 0x03, 0x2a, 0x30, 0x03, 0x29, 0x2f, 0x03, 0x28, 0x2e, 0x03, 0x27, 0x2c, 0x03, 0x25, 0x2a, 0x03, 0x24, 0x29,
+    0x02, 0x23, 0x27, 0x02, 0x21, 0x26, 0x02, 0x20, 0x24, 0x02, 0x1e, 0x23, 0x02, 0x1d, 0x21, 0x02, 0x1b, 0x1f, 0x02, 0x1a, 0x1e,
+    0x02, 0x18, 0x1c, 0x02, 0x17, 0x1a, 0x02, 0x15, 0x18, 0x02, 0x13, 0x16, 0x01, 0x12, 0x14, 0x00, 0x0e, 0x0f, 0x10, 0x24, 0x27,
+    0xe8, 0xec, 0xed, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xbf, 0xd3, 0xd6, 0x6a, 0x98, 0x9f,
+    0x66, 0x95, 0x9c, 0x62, 0x92, 0x99, 0x5e, 0x8e, 0x97, 0x5a, 0x8c, 0x94, 0x55, 0x88, 0x90, 0x51, 0x85, 0x8d, 0x4d, 0x82, 0x8a,
+    0x4a, 0x7f, 0x87, 0x46, 0x7c, 0x84, 0x43, 0x79, 0x82, 0x40, 0x76, 0x7f, 0x3c, 0x74, 0x7c, 0x39, 0x71, 0x7a, 0x35, 0x6e, 0x77,
+    0x33, 0x6b, 0x74, 0x30, 0x68, 0x72, 0x2e, 0x66, 0x70, 0x2b, 0x64, 0x6d, 0x28, 0x61, 0x6a, 0x26, 0x60, 0x69, 0x25, 0x5e, 0x67,
+    0x23, 0x5c, 0x65, 0x21, 0x5a, 0x63, 0x1e, 0x57, 0x60, 0x1d, 0x56, 0x5f, 0x1c, 0x54, 0x5e, 0x1a, 0x53, 0x5b, 0x18, 0x51, 0x5a,
+    0x17, 0x50, 0x59, 0x17, 0x4f, 0x57, 0x16, 0x4d, 0x56, 0x15, 0x4b, 0x54, 0x14, 0x4a, 0x52, 0x14, 0x4a, 0x51, 0x12, 0x48, 0x51,
+    0x11, 0x46, 0x4f, 0x11, 0x45, 0x4e, 0xc9, 0xd4, 0xd6, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xac, 0xbc, 0xbf, 0x0d, 0x3b, 0x43, 0x0c, 0x3b, 0x42, 0x0c, 0x3a, 0x41,
+    0x0c, 0x39, 0x40, 0x0c, 0x38, 0x3f, 0x0c, 0x37, 0x3e, 0x0c, 0x36, 0x3d, 0x0b, 0x35, 0x3c, 0x0b, 0x34, 0x3b, 0x0b, 0x34, 0x3a,
+    0x0b, 0x33, 0x39, 0x0b, 0x32, 0x38, 0x0a, 0x31, 0x37, 0x0a, 0x30, 0x36, 0x0a, 0x2f, 0x35, 0x0a, 0x2e, 0x34, 0x09, 0x2a, 0x30,
+    0x19, 0x3f, 0x46, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x07, 0x5d, 0x69, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65,
+    0x06, 0x59, 0x65, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63,
+    0x06, 0x58, 0x63, 0x06, 0x57, 0x63, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x56, 0x62, 0x06, 0x56, 0x61,
+    0x06, 0x56, 0x61, 0x06, 0x55, 0x61, 0x06, 0x55, 0x60, 0x06, 0x55, 0x60, 0x06, 0x54, 0x60, 0x06, 0x54, 0x5f, 0x06, 0x54, 0x5f,
+    0x06, 0x53, 0x5e, 0x06, 0x53, 0x5e, 0x05, 0x53, 0x5e, 0x05, 0x52, 0x5d, 0x05, 0x52, 0x5d, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5c,
+    0x05, 0x51, 0x5b, 0x05, 0x50, 0x5b, 0x05, 0x50, 0x5a, 0x05, 0x4f, 0x5a, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x58,
+    0x05, 0x4d, 0x57, 0x05, 0x4d, 0x57, 0x05, 0x4c, 0x56, 0x05, 0x4c, 0x56, 0x05, 0x4b, 0x55, 0x05, 0x4b, 0x55, 0x05, 0x4a, 0x54,
+    0x05, 0x49, 0x53, 0x05, 0x48, 0x52, 0x05, 0x48, 0x51, 0x05, 0x47, 0x51, 0x05, 0x47, 0x50, 0x05, 0x46, 0x4f, 0x05, 0x45, 0x4e,
+    0x05, 0x44, 0x4d, 0x05, 0x43, 0x4d, 0x04, 0x43, 0x4c, 0x04, 0x42, 0x4b, 0x04, 0x41, 0x4a, 0x04, 0x40, 0x49, 0x04, 0x3f, 0x48,
+    0x04, 0x3e, 0x47, 0x04, 0x3e, 0x46, 0x04, 0x3d, 0x45, 0x04, 0x3c, 0x44, 0x04, 0x3b, 0x43, 0x04, 0x3a, 0x42, 0x04, 0x39, 0x41,
+    0x04, 0x38, 0x3f, 0x04, 0x37, 0x3e, 0x04, 0x36, 0x3d, 0x04, 0x35, 0x3c, 0x04, 0x33, 0x3b, 0x03, 0x33, 0x3a, 0x03, 0x32, 0x38,
+    0x03, 0x31, 0x37, 0x03, 0x2f, 0x36, 0x03, 0x2e, 0x34, 0x03, 0x2d, 0x33, 0x03, 0x2c, 0x32, 0x03, 0x2a, 0x30, 0x03, 0x29, 0x2f,
+    0x03, 0x28, 0x2e, 0x03, 0x27, 0x2c, 0x03, 0x25, 0x2a, 0x03, 0x24, 0x29, 0x02, 0x23, 0x27, 0x02, 0x21, 0x26, 0x02, 0x20, 0x24,
+    0x02, 0x1e, 0x23, 0x02, 0x1d, 0x21, 0x02, 0x1b, 0x1f, 0x02, 0x1a, 0x1e, 0x02, 0x18, 0x1c, 0x02, 0x17, 0x1a, 0x02, 0x15, 0x18,
+    0x02, 0x13, 0x16, 0x01, 0x12, 0x14, 0x00, 0x0b, 0x0d, 0x28, 0x41, 0x44, 0xfd, 0xfe, 0xfe, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xfd, 0xfe, 0xfe, 0x95, 0xb6, 0xbb, 0x64, 0x94, 0x9b, 0x61, 0x91, 0x98, 0x5c, 0x8d, 0x95,
+    0x58, 0x8b, 0x92, 0x54, 0x87, 0x90, 0x50, 0x84, 0x8d, 0x4d, 0x81, 0x8a, 0x49, 0x7e, 0x86, 0x45, 0x7b, 0x84, 0x42, 0x78, 0x81,
+    0x40, 0x76, 0x7f, 0x3b, 0x73, 0x7b, 0x38, 0x70, 0x79, 0x35, 0x6e, 0x77, 0x32, 0x6b, 0x73, 0x30, 0x68, 0x72, 0x2d, 0x65, 0x6f,
+    0x2a, 0x63, 0x6c, 0x29, 0x62, 0x6b, 0x26, 0x60, 0x69, 0x23, 0x5d, 0x66, 0x22, 0x5b, 0x64, 0x20, 0x59, 0x62, 0x1f, 0x58, 0x61,
+    0x1d, 0x56, 0x5f, 0x1b, 0x53, 0x5d, 0x1a, 0x53, 0x5b, 0x18, 0x51, 0x5a, 0x17, 0x50, 0x59, 0x17, 0x4f, 0x57, 0x16, 0x4d, 0x56,
+    0x15, 0x4b, 0x54, 0x14, 0x4a, 0x52, 0x13, 0x49, 0x51, 0x12, 0x48, 0x51, 0x11, 0x46, 0x4f, 0x11, 0x45, 0x4e, 0xc9, 0xd4, 0xd6,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xac, 0xbc, 0xbf, 0x0d, 0x3b, 0x43, 0x0c, 0x3b, 0x42, 0x0c, 0x3a, 0x41, 0x0c, 0x39, 0x40, 0x0c, 0x38, 0x3f, 0x0c, 0x37, 0x3e,
+    0x0c, 0x36, 0x3d, 0x0b, 0x35, 0x3c, 0x0b, 0x34, 0x3b, 0x0b, 0x34, 0x3a, 0x0b, 0x33, 0x39, 0x0b, 0x32, 0x38, 0x0a, 0x31, 0x37,
+    0x0a, 0x30, 0x36, 0x0a, 0x2f, 0x35, 0x0a, 0x2e, 0x34, 0x09, 0x2a, 0x30, 0x19, 0x3f, 0x46, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x07, 0x5d, 0x69, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65,
+    0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64,
+    0x06, 0x58, 0x64, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x57, 0x63, 0x06, 0x57, 0x62,
+    0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x56, 0x62, 0x06, 0x56, 0x61, 0x06, 0x56, 0x61, 0x06, 0x55, 0x61, 0x06, 0x55, 0x60,
+    0x06, 0x55, 0x60, 0x06, 0x54, 0x60, 0x06, 0x54, 0x5f, 0x06, 0x54, 0x5f, 0x06, 0x53, 0x5e, 0x06, 0x53, 0x5e, 0x05, 0x53, 0x5e,
+    0x05, 0x52, 0x5d, 0x05, 0x52, 0x5d, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5b, 0x05, 0x50, 0x5b, 0x05, 0x50, 0x5a,
+    0x05, 0x4f, 0x5a, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x58, 0x05, 0x4d, 0x57, 0x05, 0x4d, 0x57, 0x05, 0x4c, 0x56,
+    0x05, 0x4c, 0x56, 0x05, 0x4b, 0x55, 0x05, 0x4b, 0x55, 0x05, 0x4a, 0x54, 0x05, 0x49, 0x53, 0x05, 0x48, 0x52, 0x05, 0x48, 0x51,
+    0x05, 0x47, 0x51, 0x05, 0x47, 0x50, 0x05, 0x46, 0x4f, 0x05, 0x45, 0x4e, 0x05, 0x44, 0x4d, 0x05, 0x43, 0x4d, 0x04, 0x43, 0x4c,
+    0x04, 0x42, 0x4b, 0x04, 0x41, 0x4a, 0x04, 0x40, 0x49, 0x04, 0x3f, 0x48, 0x04, 0x3e, 0x47, 0x04, 0x3e, 0x46, 0x04, 0x3d, 0x45,
+    0x04, 0x3c, 0x44, 0x04, 0x3b, 0x43, 0x04, 0x3a, 0x42, 0x04, 0x39, 0x41, 0x04, 0x38, 0x3f, 0x04, 0x37, 0x3e, 0x04, 0x36, 0x3d,
+    0x04, 0x35, 0x3c, 0x04, 0x33, 0x3b, 0x03, 0x33, 0x3a, 0x03, 0x32, 0x38, 0x03, 0x31, 0x37, 0x03, 0x2f, 0x36, 0x03, 0x2e, 0x34,
+    0x03, 0x2d, 0x33, 0x03, 0x2c, 0x32, 0x03, 0x2a, 0x30, 0x03, 0x29, 0x2f, 0x03, 0x28, 0x2e, 0x03, 0x27, 0x2c, 0x03, 0x25, 0x2a,
+    0x03, 0x24, 0x29, 0x02, 0x23, 0x27, 0x02, 0x21, 0x26, 0x02, 0x20, 0x24, 0x02, 0x1e, 0x23, 0x02, 0x1d, 0x21, 0x02, 0x1b, 0x1f,
+    0x02, 0x1a, 0x1e, 0x02, 0x18, 0x1c, 0x02, 0x17, 0x1a, 0x02, 0x15, 0x18, 0x02, 0x13, 0x16, 0x01, 0x12, 0x14, 0x00, 0x0b, 0x0e,
+    0x50, 0x6a, 0x6e, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xeb, 0xf2, 0xf3, 0xf5, 0xf8, 0xf9, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xeb, 0xf1, 0xf2, 0x75, 0x9e, 0xa5, 0x5f, 0x90, 0x97, 0x5b, 0x8c, 0x95, 0x56, 0x89, 0x91, 0x52, 0x86, 0x8f, 0x4f, 0x83, 0x8c,
+    0x4c, 0x81, 0x89, 0x48, 0x7d, 0x86, 0x44, 0x7b, 0x83, 0x40, 0x77, 0x80, 0x3e, 0x74, 0x7d, 0x3a, 0x72, 0x7b, 0x37, 0x6f, 0x79,
+    0x34, 0x6d, 0x76, 0x32, 0x6b, 0x73, 0x2f, 0x68, 0x71, 0x2c, 0x65, 0x6f, 0x2a, 0x63, 0x6c, 0x27, 0x60, 0x6a, 0x26, 0x5f, 0x68,
+    0x24, 0x5d, 0x67, 0x22, 0x5b, 0x64, 0x1f, 0x58, 0x62, 0x1e, 0x57, 0x60, 0x1d, 0x56, 0x5f, 0x1b, 0x53, 0x5d, 0x1a, 0x53, 0x5b,
+    0x18, 0x51, 0x5a, 0x17, 0x50, 0x58, 0x16, 0x4e, 0x57, 0x16, 0x4d, 0x56, 0x15, 0x4b, 0x54, 0x14, 0x4a, 0x52, 0x13, 0x49, 0x51,
+    0x12, 0x48, 0x51, 0x11, 0x46, 0x4f, 0x11, 0x45, 0x4e, 0xc9, 0xd4, 0xd6, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xac, 0xbc, 0xbf, 0x0d, 0x3b, 0x43, 0x0c, 0x3b, 0x42,
+    0x0c, 0x3a, 0x41, 0x0c, 0x39, 0x40, 0x0c, 0x38, 0x3f, 0x0c, 0x37, 0x3e, 0x0c, 0x36, 0x3d, 0x0b, 0x35, 0x3c, 0x0b, 0x34, 0x3b,
+    0x0b, 0x34, 0x3a, 0x0b, 0x33, 0x39, 0x0b, 0x32, 0x38, 0x0a, 0x31, 0x37, 0x0a, 0x30, 0x36, 0x0a, 0x2f, 0x35, 0x0a, 0x2e, 0x34,
+    0x09, 0x2a, 0x30, 0x19, 0x3f, 0x46, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x07, 0x5d, 0x69, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65,
+    0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63,
+    0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x57, 0x63, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x56, 0x62,
+    0x06, 0x56, 0x61, 0x06, 0x56, 0x61, 0x06, 0x55, 0x61, 0x06, 0x55, 0x60, 0x06, 0x55, 0x60, 0x06, 0x54, 0x60, 0x06, 0x54, 0x5f,
+    0x06, 0x54, 0x5f, 0x06, 0x53, 0x5e, 0x06, 0x53, 0x5e, 0x05, 0x53, 0x5e, 0x05, 0x52, 0x5d, 0x05, 0x52, 0x5d, 0x05, 0x51, 0x5c,
+    0x05, 0x51, 0x5c, 0x05, 0x51, 0x5b, 0x05, 0x50, 0x5b, 0x05, 0x50, 0x5a, 0x05, 0x4f, 0x5a, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x59,
+    0x05, 0x4e, 0x58, 0x05, 0x4d, 0x57, 0x05, 0x4d, 0x57, 0x05, 0x4c, 0x56, 0x05, 0x4c, 0x56, 0x05, 0x4b, 0x55, 0x05, 0x4b, 0x55,
+    0x05, 0x4a, 0x54, 0x05, 0x49, 0x53, 0x05, 0x48, 0x52, 0x05, 0x48, 0x51, 0x05, 0x47, 0x51, 0x05, 0x47, 0x50, 0x05, 0x46, 0x4f,
+    0x05, 0x45, 0x4e, 0x05, 0x44, 0x4d, 0x05, 0x43, 0x4d, 0x04, 0x43, 0x4c, 0x04, 0x42, 0x4b, 0x04, 0x41, 0x4a, 0x04, 0x40, 0x49,
+    0x04, 0x3f, 0x48, 0x04, 0x3e, 0x47, 0x04, 0x3e, 0x46, 0x04, 0x3d, 0x45, 0x04, 0x3c, 0x44, 0x04, 0x3b, 0x43, 0x04, 0x3a, 0x42,
+    0x04, 0x39, 0x41, 0x04, 0x38, 0x3f, 0x04, 0x37, 0x3e, 0x04, 0x36, 0x3d, 0x04, 0x35, 0x3c, 0x04, 0x33, 0x3b, 0x03, 0x33, 0x3a,
+    0x03, 0x32, 0x38, 0x03, 0x31, 0x37, 0x03, 0x2f, 0x36, 0x03, 0x2e, 0x34, 0x03, 0x2d, 0x33, 0x03, 0x2c, 0x32, 0x03, 0x2a, 0x30,
+    0x03, 0x29, 0x2f, 0x03, 0x28, 0x2e, 0x03, 0x27, 0x2c, 0x03, 0x25, 0x2a, 0x03, 0x24, 0x29, 0x02, 0x23, 0x27, 0x02, 0x21, 0x26,
+    0x02, 0x20, 0x24, 0x02, 0x1e, 0x23, 0x02, 0x1d, 0x21, 0x02, 0x1b, 0x1f, 0x02, 0x1a, 0x1e, 0x02, 0x18, 0x1c, 0x02, 0x17, 0x1a,
+    0x02, 0x15, 0x18, 0x02, 0x13, 0x16, 0x01, 0x11, 0x14, 0x00, 0x0f, 0x12, 0x9c, 0xac, 0xae, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xb5, 0xcd, 0xd1, 0xc8, 0xda, 0xdd, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xd5, 0xe2, 0xe4, 0x64, 0x93, 0x9b,
+    0x59, 0x8b, 0x94, 0x55, 0x89, 0x90, 0x51, 0x85, 0x8e, 0x4d, 0x82, 0x8b, 0x4a, 0x7f, 0x88, 0x46, 0x7c, 0x84, 0x43, 0x7a, 0x83,
+    0x40, 0x77, 0x80, 0x3d, 0x74, 0x7d, 0x39, 0x72, 0x7a, 0x36, 0x6f, 0x78, 0x34, 0x6d, 0x75, 0x31, 0x6a, 0x73, 0x2e, 0x67, 0x71,
+    0x2b, 0x64, 0x6e, 0x29, 0x63, 0x6c, 0x27, 0x60, 0x6a, 0x25, 0x5e, 0x68, 0x22, 0x5c, 0x65, 0x21, 0x5a, 0x64, 0x20, 0x59, 0x62,
+    0x1d, 0x57, 0x5f, 0x1c, 0x55, 0x5f, 0x1b, 0x53, 0x5d, 0x19, 0x52, 0x5b, 0x17, 0x50, 0x59, 0x17, 0x50, 0x58, 0x16, 0x4e, 0x57,
+    0x15, 0x4c, 0x55, 0x15, 0x4b, 0x54, 0x14, 0x4a, 0x52, 0x13, 0x49, 0x51, 0x12, 0x48, 0x51, 0x11, 0x46, 0x4f, 0x11, 0x45, 0x4e,
+    0xc9, 0xd4, 0xd6, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xac, 0xbc, 0xbf, 0x0d, 0x3b, 0x43, 0x0c, 0x3b, 0x42, 0x0c, 0x3a, 0x41, 0x0c, 0x39, 0x40, 0x0c, 0x38, 0x3f,
+    0x0c, 0x37, 0x3e, 0x0c, 0x36, 0x3d, 0x0b, 0x35, 0x3c, 0x0b, 0x34, 0x3b, 0x0b, 0x34, 0x3a, 0x0b, 0x33, 0x39, 0x0b, 0x32, 0x38,
+    0x0a, 0x31, 0x37, 0x0a, 0x30, 0x36, 0x0a, 0x2f, 0x35, 0x0a, 0x2e, 0x34, 0x09, 0x2a, 0x30, 0x19, 0x3f, 0x46, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x07, 0x5d, 0x69, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x59, 0x65,
+    0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x58, 0x64,
+    0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x57, 0x63,
+    0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x56, 0x62, 0x06, 0x56, 0x61, 0x06, 0x56, 0x61, 0x06, 0x55, 0x61,
+    0x06, 0x55, 0x60, 0x06, 0x55, 0x60, 0x06, 0x54, 0x60, 0x06, 0x54, 0x5f, 0x06, 0x54, 0x5f, 0x06, 0x53, 0x5e, 0x06, 0x53, 0x5e,
+    0x05, 0x53, 0x5e, 0x05, 0x52, 0x5d, 0x05, 0x52, 0x5d, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5b, 0x05, 0x50, 0x5b,
+    0x05, 0x50, 0x5a, 0x05, 0x4f, 0x5a, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x58, 0x05, 0x4d, 0x57, 0x05, 0x4d, 0x57,
+    0x05, 0x4c, 0x56, 0x05, 0x4c, 0x56, 0x05, 0x4b, 0x55, 0x05, 0x4b, 0x55, 0x05, 0x4a, 0x54, 0x05, 0x49, 0x53, 0x05, 0x48, 0x52,
+    0x05, 0x48, 0x51, 0x05, 0x47, 0x51, 0x05, 0x47, 0x50, 0x05, 0x46, 0x4f, 0x05, 0x45, 0x4e, 0x05, 0x44, 0x4d, 0x05, 0x43, 0x4d,
+    0x04, 0x43, 0x4c, 0x04, 0x42, 0x4b, 0x04, 0x41, 0x4a, 0x04, 0x40, 0x49, 0x04, 0x3f, 0x48, 0x04, 0x3e, 0x47, 0x04, 0x3e, 0x46,
+    0x04, 0x3d, 0x45, 0x04, 0x3c, 0x44, 0x04, 0x3b, 0x43, 0x04, 0x3a, 0x42, 0x04, 0x39, 0x41, 0x04, 0x38, 0x3f, 0x04, 0x37, 0x3e,
+    0x04, 0x36, 0x3d, 0x04, 0x35, 0x3c, 0x04, 0x33, 0x3b, 0x03, 0x33, 0x3a, 0x03, 0x32, 0x38, 0x03, 0x31, 0x37, 0x03, 0x2f, 0x36,
+    0x03, 0x2e, 0x34, 0x03, 0x2d, 0x33, 0x03, 0x2c, 0x32, 0x03, 0x2a, 0x30, 0x03, 0x29, 0x2f, 0x03, 0x28, 0x2e, 0x03, 0x27, 0x2c,
+    0x03, 0x25, 0x2a, 0x03, 0x24, 0x29, 0x02, 0x23, 0x27, 0x02, 0x21, 0x26, 0x02, 0x20, 0x24, 0x02, 0x1e, 0x23, 0x02, 0x1d, 0x21,
+    0x02, 0x1b, 0x1f, 0x02, 0x1a, 0x1e, 0x02, 0x18, 0x1c, 0x02, 0x17, 0x1a, 0x02, 0x15, 0x18, 0x02, 0x13, 0x16, 0x00, 0x10, 0x12,
+    0x09, 0x1c, 0x20, 0xdb, 0xe1, 0xe2, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xfc, 0xfd, 0xfd, 0x9a, 0xbc, 0xc0, 0x90, 0xb4, 0xba,
+    0xef, 0xf4, 0xf5, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xc7, 0xd8, 0xdb, 0x5a, 0x8c, 0x94, 0x53, 0x87, 0x8f, 0x4f, 0x84, 0x8d,
+    0x4c, 0x81, 0x8a, 0x48, 0x7e, 0x86, 0x45, 0x7b, 0x84, 0x42, 0x79, 0x82, 0x3f, 0x76, 0x7f, 0x3c, 0x73, 0x7c, 0x38, 0x71, 0x79,
+    0x35, 0x6e, 0x77, 0x33, 0x6c, 0x75, 0x30, 0x69, 0x72, 0x2d, 0x66, 0x70, 0x2b, 0x64, 0x6e, 0x28, 0x62, 0x6b, 0x26, 0x60, 0x69,
+    0x25, 0x5e, 0x68, 0x23, 0x5d, 0x66, 0x21, 0x5a, 0x64, 0x1f, 0x58, 0x62, 0x1d, 0x57, 0x5f, 0x1c, 0x55, 0x5f, 0x1a, 0x53, 0x5c,
+    0x19, 0x52, 0x5b, 0x17, 0x50, 0x59, 0x17, 0x50, 0x58, 0x16, 0x4e, 0x57, 0x15, 0x4c, 0x55, 0x14, 0x4b, 0x53, 0x14, 0x4a, 0x52,
+    0x13, 0x49, 0x51, 0x11, 0x47, 0x50, 0x11, 0x46, 0x4f, 0x11, 0x45, 0x4e, 0xc9, 0xd4, 0xd6, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xac, 0xbc, 0xbf, 0x0d, 0x3b, 0x43,
+    0x0c, 0x3b, 0x42, 0x0c, 0x3a, 0x41, 0x0c, 0x39, 0x40, 0x0c, 0x38, 0x3f, 0x0c, 0x37, 0x3e, 0x0c, 0x36, 0x3d, 0x0b, 0x35, 0x3c,
+    0x0b, 0x34, 0x3b, 0x0b, 0x34, 0x3a, 0x0b, 0x33, 0x39, 0x0b, 0x32, 0x38, 0x0a, 0x31, 0x37, 0x0a, 0x30, 0x36, 0x0a, 0x2f, 0x35,
+    0x0a, 0x2e, 0x34, 0x09, 0x2a, 0x30, 0x19, 0x3f, 0x46, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x07, 0x5d, 0x69,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65,
+    0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x63,
+    0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x57, 0x63, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62,
+    0x06, 0x56, 0x62, 0x06, 0x56, 0x61, 0x06, 0x56, 0x61, 0x06, 0x55, 0x61, 0x06, 0x55, 0x60, 0x06, 0x55, 0x60, 0x06, 0x54, 0x60,
+    0x06, 0x54, 0x5f, 0x06, 0x54, 0x5f, 0x06, 0x53, 0x5e, 0x06, 0x53, 0x5e, 0x05, 0x53, 0x5e, 0x05, 0x52, 0x5d, 0x05, 0x52, 0x5d,
+    0x05, 0x51, 0x5c, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5b, 0x05, 0x50, 0x5b, 0x05, 0x50, 0x5a, 0x05, 0x4f, 0x5a, 0x05, 0x4e, 0x59,
+    0x05, 0x4e, 0x59, 0x05, 0x4e, 0x58, 0x05, 0x4d, 0x57, 0x05, 0x4d, 0x57, 0x05, 0x4c, 0x56, 0x05, 0x4c, 0x56, 0x05, 0x4b, 0x55,
+    0x05, 0x4b, 0x55, 0x05, 0x4a, 0x54, 0x05, 0x49, 0x53, 0x05, 0x48, 0x52, 0x05, 0x48, 0x51, 0x05, 0x47, 0x51, 0x05, 0x47, 0x50,
+    0x05, 0x46, 0x4f, 0x05, 0x45, 0x4e, 0x05, 0x44, 0x4d, 0x05, 0x43, 0x4d, 0x04, 0x43, 0x4c, 0x04, 0x42, 0x4b, 0x04, 0x41, 0x4a,
+    0x04, 0x40, 0x49, 0x04, 0x3f, 0x48, 0x04, 0x3e, 0x47, 0x04, 0x3e, 0x46, 0x04, 0x3d, 0x45, 0x04, 0x3c, 0x44, 0x04, 0x3b, 0x43,
+    0x04, 0x3a, 0x42, 0x04, 0x39, 0x41, 0x04, 0x38, 0x3f, 0x04, 0x37, 0x3e, 0x04, 0x36, 0x3d, 0x04, 0x35, 0x3c, 0x04, 0x33, 0x3b,
+    0x03, 0x33, 0x3a, 0x03, 0x32, 0x38, 0x03, 0x31, 0x37, 0x03, 0x2f, 0x36, 0x03, 0x2e, 0x34, 0x03, 0x2d, 0x33, 0x03, 0x2c, 0x32,
+    0x03, 0x2a, 0x30, 0x03, 0x29, 0x2f, 0x03, 0x28, 0x2e, 0x03, 0x27, 0x2c, 0x03, 0x25, 0x2a, 0x03, 0x24, 0x29, 0x02, 0x23, 0x27,
+    0x02, 0x21, 0x26, 0x02, 0x20, 0x24, 0x02, 0x1e, 0x23, 0x02, 0x1d, 0x21, 0x02, 0x1b, 0x1f, 0x02, 0x1a, 0x1e, 0x02, 0x18, 0x1c,
+    0x02, 0x17, 0x1a, 0x02, 0x15, 0x18, 0x02, 0x13, 0x16, 0x00, 0x0d, 0x0f, 0x27, 0x3f, 0x44, 0xfe, 0xfe, 0xfe, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xe4, 0xed, 0xee, 0x8a, 0xb1, 0xb7, 0x85, 0xad, 0xb3, 0xab, 0xc6, 0xca, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xc1, 0xd4, 0xd7, 0x56, 0x89, 0x91, 0x4d, 0x83, 0x8c, 0x4b, 0x80, 0x89, 0x47, 0x7d, 0x86, 0x44, 0x7b, 0x83,
+    0x40, 0x78, 0x81, 0x3e, 0x75, 0x7e, 0x3b, 0x72, 0x7b, 0x37, 0x70, 0x79, 0x34, 0x6d, 0x77, 0x32, 0x6b, 0x74, 0x2f, 0x69, 0x71,
+    0x2c, 0x66, 0x6f, 0x2a, 0x63, 0x6d, 0x27, 0x61, 0x6a, 0x26, 0x5f, 0x68, 0x24, 0x5e, 0x67, 0x21, 0x5b, 0x65, 0x20, 0x5a, 0x63,
+    0x1e, 0x57, 0x61, 0x1d, 0x57, 0x5f, 0x1b, 0x54, 0x5e, 0x1a, 0x53, 0x5c, 0x19, 0x52, 0x5b, 0x17, 0x50, 0x59, 0x17, 0x50, 0x58,
+    0x16, 0x4e, 0x57, 0x15, 0x4c, 0x55, 0x14, 0x4b, 0x53, 0x14, 0x4a, 0x52, 0x13, 0x49, 0x51, 0x11, 0x47, 0x50, 0x11, 0x46, 0x4f,
+    0x10, 0x44, 0x4d, 0xc9, 0xd4, 0xd6, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xac, 0xbc, 0xbf, 0x0d, 0x3b, 0x43, 0x0c, 0x3b, 0x42, 0x0c, 0x3a, 0x41, 0x0c, 0x39, 0x40,
+    0x0c, 0x38, 0x3f, 0x0c, 0x37, 0x3e, 0x0c, 0x36, 0x3d, 0x0b, 0x35, 0x3c, 0x0b, 0x34, 0x3b, 0x0b, 0x34, 0x3a, 0x0b, 0x33, 0x39,
+    0x0b, 0x32, 0x38, 0x0a, 0x31, 0x37, 0x0a, 0x30, 0x36, 0x0a, 0x2f, 0x35, 0x0a, 0x2e, 0x34, 0x09, 0x2a, 0x30, 0x19, 0x3f, 0x46,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x07, 0x5d, 0x69, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65,
+    0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63,
+    0x06, 0x57, 0x63, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x56, 0x62, 0x06, 0x56, 0x61, 0x06, 0x56, 0x61,
+    0x06, 0x55, 0x61, 0x06, 0x55, 0x60, 0x06, 0x55, 0x60, 0x06, 0x54, 0x60, 0x06, 0x54, 0x5f, 0x06, 0x54, 0x5f, 0x06, 0x53, 0x5e,
+    0x06, 0x53, 0x5e, 0x05, 0x53, 0x5e, 0x05, 0x52, 0x5d, 0x05, 0x52, 0x5d, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5b,
+    0x05, 0x50, 0x5b, 0x05, 0x50, 0x5a, 0x05, 0x4f, 0x5a, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x58, 0x05, 0x4d, 0x57,
+    0x05, 0x4d, 0x57, 0x05, 0x4c, 0x56, 0x05, 0x4c, 0x56, 0x05, 0x4b, 0x55, 0x05, 0x4b, 0x55, 0x05, 0x4a, 0x54, 0x05, 0x49, 0x53,
+    0x05, 0x48, 0x52, 0x05, 0x48, 0x51, 0x05, 0x47, 0x51, 0x05, 0x47, 0x50, 0x05, 0x46, 0x4f, 0x05, 0x45, 0x4e, 0x05, 0x44, 0x4d,
+    0x05, 0x43, 0x4d, 0x04, 0x43, 0x4c, 0x04, 0x42, 0x4b, 0x04, 0x41, 0x4a, 0x04, 0x40, 0x49, 0x04, 0x3f, 0x48, 0x04, 0x3e, 0x47,
+    0x04, 0x3e, 0x46, 0x04, 0x3d, 0x45, 0x04, 0x3c, 0x44, 0x04, 0x3b, 0x43, 0x04, 0x3a, 0x42, 0x04, 0x39, 0x41, 0x04, 0x38, 0x3f,
+    0x04, 0x37, 0x3e, 0x04, 0x36, 0x3d, 0x04, 0x35, 0x3c, 0x04, 0x33, 0x3b, 0x03, 0x33, 0x3a, 0x03, 0x32, 0x38, 0x03, 0x31, 0x37,
+    0x03, 0x2f, 0x36, 0x03, 0x2e, 0x34, 0x03, 0x2d, 0x33, 0x03, 0x2c, 0x32, 0x03, 0x2a, 0x30, 0x03, 0x29, 0x2f, 0x03, 0x28, 0x2e,
+    0x03, 0x27, 0x2c, 0x03, 0x25, 0x2a, 0x03, 0x24, 0x29, 0x02, 0x23, 0x27, 0x02, 0x21, 0x26, 0x02, 0x20, 0x24, 0x02, 0x1e, 0x23,
+    0x02, 0x1d, 0x21, 0x02, 0x1b, 0x1f, 0x02, 0x1a, 0x1e, 0x02, 0x18, 0x1c, 0x02, 0x17, 0x1a, 0x02, 0x15, 0x18, 0x01, 0x13, 0x16,
+    0x00, 0x0e, 0x10, 0x5d, 0x75, 0x79, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xc6, 0xd9, 0xdc, 0x86, 0xae, 0xb4,
+    0x83, 0xab, 0xb2, 0x7f, 0xa8, 0xaf, 0xc6, 0xd8, 0xdb, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xc8, 0xd9, 0xdc,
+    0x59, 0x8a, 0x93, 0x49, 0x7f, 0x88, 0x46, 0x7d, 0x85, 0x42, 0x79, 0x82, 0x40, 0x77, 0x80, 0x3d, 0x74, 0x7e, 0x39, 0x71, 0x7a,
+    0x35, 0x6f, 0x77, 0x34, 0x6d, 0x76, 0x31, 0x6b, 0x73, 0x2e, 0x68, 0x71, 0x2c, 0x66, 0x6f, 0x2a, 0x63, 0x6d, 0x27, 0x61, 0x6a,
+    0x25, 0x5e, 0x68, 0x24, 0x5e, 0x67, 0x21, 0x5b, 0x65, 0x20, 0x5a, 0x63, 0x1e, 0x57, 0x61, 0x1c, 0x56, 0x5f, 0x1b, 0x54, 0x5e,
+    0x1a, 0x53, 0x5c, 0x18, 0x51, 0x5a, 0x17, 0x50, 0x58, 0x16, 0x4f, 0x58, 0x16, 0x4e, 0x57, 0x15, 0x4c, 0x55, 0x14, 0x4b, 0x53,
+    0x13, 0x49, 0x52, 0x13, 0x49, 0x51, 0x11, 0x47, 0x50, 0x11, 0x46, 0x4f, 0x10, 0x44, 0x4d, 0xc8, 0xd4, 0xd6, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xac, 0xbc, 0xbf,
+    0x0d, 0x3b, 0x43, 0x0c, 0x3b, 0x42, 0x0c, 0x3a, 0x41, 0x0c, 0x39, 0x40, 0x0c, 0x38, 0x3f, 0x0c, 0x37, 0x3e, 0x0c, 0x36, 0x3d,
+    0x0b, 0x35, 0x3c, 0x0b, 0x34, 0x3b, 0x0b, 0x34, 0x3a, 0x0b, 0x33, 0x39, 0x0b, 0x32, 0x38, 0x0a, 0x31, 0x37, 0x0a, 0x30, 0x36,
+    0x0a, 0x2f, 0x35, 0x0a, 0x2e, 0x34, 0x09, 0x2a, 0x30, 0x19, 0x3f, 0x46, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x07, 0x5d, 0x69, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65,
+    0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64,
+    0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x57, 0x63, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62,
+    0x06, 0x57, 0x62, 0x06, 0x56, 0x62, 0x06, 0x56, 0x61, 0x06, 0x56, 0x61, 0x06, 0x55, 0x61, 0x06, 0x55, 0x60, 0x06, 0x55, 0x60,
+    0x06, 0x54, 0x60, 0x06, 0x54, 0x5f, 0x06, 0x54, 0x5f, 0x06, 0x53, 0x5e, 0x06, 0x53, 0x5e, 0x05, 0x53, 0x5e, 0x05, 0x52, 0x5d,
+    0x05, 0x52, 0x5d, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5b, 0x05, 0x50, 0x5b, 0x05, 0x50, 0x5a, 0x05, 0x4f, 0x5a,
+    0x05, 0x4e, 0x59, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x58, 0x05, 0x4d, 0x57, 0x05, 0x4d, 0x57, 0x05, 0x4c, 0x56, 0x05, 0x4c, 0x56,
+    0x05, 0x4b, 0x55, 0x05, 0x4b, 0x55, 0x05, 0x4a, 0x54, 0x05, 0x49, 0x53, 0x05, 0x48, 0x52, 0x05, 0x48, 0x51, 0x05, 0x47, 0x51,
+    0x05, 0x47, 0x50, 0x05, 0x46, 0x4f, 0x05, 0x45, 0x4e, 0x05, 0x44, 0x4d, 0x05, 0x43, 0x4d, 0x04, 0x43, 0x4c, 0x04, 0x42, 0x4b,
+    0x04, 0x41, 0x4a, 0x04, 0x40, 0x49, 0x04, 0x3f, 0x48, 0x04, 0x3e, 0x47, 0x04, 0x3e, 0x46, 0x04, 0x3d, 0x45, 0x04, 0x3c, 0x44,
+    0x04, 0x3b, 0x43, 0x04, 0x3a, 0x42, 0x04, 0x39, 0x41, 0x04, 0x38, 0x3f, 0x04, 0x37, 0x3e, 0x04, 0x36, 0x3d, 0x04, 0x35, 0x3c,
+    0x04, 0x33, 0x3b, 0x03, 0x33, 0x3a, 0x03, 0x32, 0x38, 0x03, 0x31, 0x37, 0x03, 0x2f, 0x36, 0x03, 0x2e, 0x34, 0x03, 0x2d, 0x33,
+    0x03, 0x2c, 0x32, 0x03, 0x2a, 0x30, 0x03, 0x29, 0x2f, 0x03, 0x28, 0x2e, 0x03, 0x27, 0x2c, 0x03, 0x25, 0x2a, 0x03, 0x24, 0x29,
+    0x02, 0x23, 0x27, 0x02, 0x21, 0x26, 0x02, 0x20, 0x24, 0x02, 0x1e, 0x23, 0x02, 0x1d, 0x21, 0x02, 0x1b, 0x1f, 0x02, 0x1a, 0x1e,
+    0x02, 0x18, 0x1c, 0x02, 0x17, 0x1a, 0x02, 0x15, 0x18, 0x01, 0x12, 0x15, 0x03, 0x14, 0x17, 0xbb, 0xc8, 0xc9, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xa0, 0xc0, 0xc4, 0x84, 0xac, 0xb2, 0x80, 0xa9, 0xb0, 0x7c, 0xa6, 0xad, 0x7d, 0xa8, 0xae,
+    0xda, 0xe6, 0xe8, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xdd, 0xe7, 0xe9, 0x68, 0x94, 0x9d, 0x44, 0x7b, 0x84,
+    0x41, 0x79, 0x81, 0x3f, 0x77, 0x80, 0x3b, 0x73, 0x7c, 0x38, 0x70, 0x7a, 0x34, 0x6e, 0x77, 0x32, 0x6b, 0x75, 0x30, 0x6a, 0x73,
+    0x2d, 0x67, 0x70, 0x2a, 0x64, 0x6e, 0x28, 0x62, 0x6c, 0x26, 0x61, 0x6a, 0x25, 0x5e, 0x68, 0x23, 0x5d, 0x66, 0x20, 0x5b, 0x64,
+    0x1f, 0x59, 0x62, 0x1d, 0x57, 0x60, 0x1c, 0x56, 0x5f, 0x1a, 0x54, 0x5d, 0x19, 0x52, 0x5c, 0x18, 0x51, 0x5a, 0x17, 0x50, 0x58,
+    0x16, 0x4f, 0x58, 0x15, 0x4d, 0x56, 0x15, 0x4c, 0x55, 0x14, 0x4b, 0x53, 0x13, 0x49, 0x52, 0x13, 0x49, 0x51, 0x11, 0x47, 0x50,
+    0x11, 0x46, 0x4f, 0x2d, 0x5b, 0x63, 0xe2, 0xe8, 0xe9, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xac, 0xbc, 0xbf, 0x0d, 0x3b, 0x43, 0x0c, 0x3b, 0x42, 0x0c, 0x3a, 0x41,
+    0x0c, 0x39, 0x40, 0x0c, 0x38, 0x3f, 0x0c, 0x37, 0x3e, 0x0c, 0x36, 0x3d, 0x0b, 0x35, 0x3c, 0x0b, 0x34, 0x3b, 0x0b, 0x34, 0x3a,
+    0x0b, 0x33, 0x39, 0x0b, 0x32, 0x38, 0x0a, 0x31, 0x37, 0x0a, 0x30, 0x36, 0x0a, 0x2f, 0x35, 0x0a, 0x2e, 0x34, 0x09, 0x2a, 0x30,
+    0x19, 0x3f, 0x46, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x07, 0x5d, 0x69, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65,
+    0x06, 0x59, 0x65, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63,
+    0x06, 0x58, 0x63, 0x06, 0x57, 0x63, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x56, 0x62, 0x06, 0x56, 0x61,
+    0x06, 0x56, 0x61, 0x06, 0x55, 0x61, 0x06, 0x55, 0x60, 0x06, 0x55, 0x60, 0x06, 0x54, 0x60, 0x06, 0x54, 0x5f, 0x06, 0x54, 0x5f,
+    0x06, 0x53, 0x5e, 0x06, 0x53, 0x5e, 0x05, 0x53, 0x5e, 0x05, 0x52, 0x5d, 0x05, 0x52, 0x5d, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5c,
+    0x05, 0x51, 0x5b, 0x05, 0x50, 0x5b, 0x05, 0x50, 0x5a, 0x05, 0x4f, 0x5a, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x58,
+    0x05, 0x4d, 0x57, 0x05, 0x4d, 0x57, 0x05, 0x4c, 0x56, 0x05, 0x4c, 0x56, 0x05, 0x4b, 0x55, 0x05, 0x4b, 0x55, 0x05, 0x4a, 0x54,
+    0x05, 0x49, 0x53, 0x05, 0x48, 0x52, 0x05, 0x48, 0x51, 0x05, 0x47, 0x51, 0x05, 0x47, 0x50, 0x05, 0x46, 0x4f, 0x05, 0x45, 0x4e,
+    0x05, 0x44, 0x4d, 0x05, 0x43, 0x4d, 0x04, 0x43, 0x4c, 0x04, 0x42, 0x4b, 0x04, 0x41, 0x4a, 0x04, 0x40, 0x49, 0x04, 0x3f, 0x48,
+    0x04, 0x3e, 0x47, 0x04, 0x3e, 0x46, 0x04, 0x3d, 0x45, 0x04, 0x3c, 0x44, 0x04, 0x3b, 0x43, 0x04, 0x3a, 0x42, 0x04, 0x39, 0x41,
+    0x04, 0x38, 0x3f, 0x04, 0x37, 0x3e, 0x04, 0x36, 0x3d, 0x04, 0x35, 0x3c, 0x04, 0x33, 0x3b, 0x03, 0x33, 0x3a, 0x03, 0x32, 0x38,
+    0x03, 0x31, 0x37, 0x03, 0x2f, 0x36, 0x03, 0x2e, 0x34, 0x03, 0x2d, 0x33, 0x03, 0x2c, 0x32, 0x03, 0x2a, 0x30, 0x03, 0x29, 0x2f,
+    0x03, 0x28, 0x2e, 0x03, 0x27, 0x2c, 0x03, 0x25, 0x2a, 0x03, 0x24, 0x29, 0x02, 0x23, 0x27, 0x02, 0x21, 0x26, 0x02, 0x20, 0x24,
+    0x02, 0x1e, 0x23, 0x02, 0x1d, 0x21, 0x02, 0x1b, 0x1f, 0x02, 0x1a, 0x1e, 0x02, 0x18, 0x1c, 0x02, 0x17, 0x1a, 0x02, 0x15, 0x18,
+    0x00, 0x0f, 0x11, 0x1a, 0x32, 0x36, 0xf8, 0xfa, 0xfa, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xe6, 0xef, 0xf0, 0x87, 0xaf, 0xb5,
+    0x81, 0xaa, 0xb1, 0x7d, 0xa8, 0xae, 0x79, 0xa5, 0xab, 0x76, 0xa2, 0xa9, 0x81, 0xa9, 0xaf, 0xe8, 0xef, 0xf0, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xf3, 0xf7, 0xf7, 0x91, 0xb2, 0xb7, 0x42, 0x79, 0x81, 0x3d, 0x75, 0x7e, 0x3a, 0x72, 0x7c,
+    0x37, 0x70, 0x79, 0x34, 0x6e, 0x76, 0x31, 0x6b, 0x74, 0x2e, 0x69, 0x72, 0x2c, 0x67, 0x6f, 0x2a, 0x64, 0x6e, 0x28, 0x62, 0x6c,
+    0x26, 0x60, 0x69, 0x24, 0x5e, 0x67, 0x22, 0x5c, 0x66, 0x20, 0x5b, 0x64, 0x1e, 0x58, 0x62, 0x1d, 0x57, 0x60, 0x1b, 0x55, 0x5e,
+    0x1a, 0x54, 0x5d, 0x19, 0x52, 0x5c, 0x18, 0x51, 0x5a, 0x17, 0x50, 0x58, 0x16, 0x4f, 0x58, 0x15, 0x4d, 0x56, 0x14, 0x4c, 0x54,
+    0x14, 0x4b, 0x53, 0x13, 0x49, 0x52, 0x12, 0x48, 0x50, 0x11, 0x47, 0x50, 0x61, 0x84, 0x8a, 0xe5, 0xea, 0xeb, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xac, 0xbc, 0xbf, 0x0d, 0x3b, 0x43, 0x0c, 0x3b, 0x42, 0x0c, 0x3a, 0x41, 0x0c, 0x39, 0x40, 0x0c, 0x38, 0x3f, 0x0c, 0x37, 0x3e,
+    0x0c, 0x36, 0x3d, 0x0b, 0x35, 0x3c, 0x0b, 0x34, 0x3b, 0x0b, 0x34, 0x3a, 0x0b, 0x33, 0x39, 0x0b, 0x32, 0x38, 0x0a, 0x31, 0x37,
+    0x0a, 0x30, 0x36, 0x0a, 0x2f, 0x35, 0x0a, 0x2e, 0x34, 0x09, 0x2a, 0x30, 0x19, 0x3f, 0x46, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x07, 0x5d, 0x69, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65,
+    0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64,
+    0x06, 0x58, 0x64, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x57, 0x63, 0x06, 0x57, 0x62,
+    0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x56, 0x62, 0x06, 0x56, 0x61, 0x06, 0x56, 0x61, 0x06, 0x55, 0x61, 0x06, 0x55, 0x60,
+    0x06, 0x55, 0x60, 0x06, 0x54, 0x60, 0x06, 0x54, 0x5f, 0x06, 0x54, 0x5f, 0x06, 0x53, 0x5e, 0x06, 0x53, 0x5e, 0x05, 0x53, 0x5e,
+    0x05, 0x52, 0x5d, 0x05, 0x52, 0x5d, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5b, 0x05, 0x50, 0x5b, 0x05, 0x50, 0x5a,
+    0x05, 0x4f, 0x5a, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x58, 0x05, 0x4d, 0x57, 0x05, 0x4d, 0x57, 0x05, 0x4c, 0x56,
+    0x05, 0x4c, 0x56, 0x05, 0x4b, 0x55, 0x05, 0x4b, 0x55, 0x05, 0x4a, 0x54, 0x05, 0x49, 0x53, 0x05, 0x48, 0x52, 0x05, 0x48, 0x51,
+    0x05, 0x47, 0x51, 0x05, 0x47, 0x50, 0x05, 0x46, 0x4f, 0x05, 0x45, 0x4e, 0x05, 0x44, 0x4d, 0x05, 0x43, 0x4d, 0x04, 0x43, 0x4c,
+    0x04, 0x42, 0x4b, 0x04, 0x41, 0x4a, 0x04, 0x40, 0x49, 0x04, 0x3f, 0x48, 0x04, 0x3e, 0x47, 0x04, 0x3e, 0x46, 0x04, 0x3d, 0x45,
+    0x04, 0x3c, 0x44, 0x04, 0x3b, 0x43, 0x04, 0x3a, 0x42, 0x04, 0x39, 0x41, 0x04, 0x38, 0x3f, 0x04, 0x37, 0x3e, 0x04, 0x36, 0x3d,
+    0x04, 0x35, 0x3c, 0x04, 0x33, 0x3b, 0x03, 0x33, 0x3a, 0x03, 0x32, 0x38, 0x03, 0x31, 0x37, 0x03, 0x2f, 0x36, 0x03, 0x2e, 0x34,
+    0x03, 0x2d, 0x33, 0x03, 0x2c, 0x32, 0x03, 0x2a, 0x30, 0x03, 0x29, 0x2f, 0x03, 0x28, 0x2e, 0x03, 0x27, 0x2c, 0x03, 0x25, 0x2a,
+    0x03, 0x24, 0x29, 0x02, 0x23, 0x27, 0x02, 0x21, 0x26, 0x02, 0x20, 0x24, 0x02, 0x1e, 0x23, 0x02, 0x1d, 0x21, 0x02, 0x1b, 0x1f,
+    0x02, 0x1a, 0x1e, 0x02, 0x18, 0x1c, 0x02, 0x17, 0x1a, 0x02, 0x15, 0x18, 0x00, 0x0e, 0x11, 0x53, 0x6c, 0x70, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xc1, 0xd6, 0xd9, 0x82, 0xab, 0xb2, 0x7e, 0xa8, 0xaf, 0x7a, 0xa6, 0xad, 0x77, 0xa3, 0xaa,
+    0x73, 0xa0, 0xa7, 0x6f, 0x9d, 0xa4, 0x85, 0xac, 0xb2, 0xed, 0xf3, 0xf4, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xcd, 0xdb, 0xde, 0x66, 0x93, 0x9a, 0x39, 0x72, 0x7b, 0x36, 0x6f, 0x78, 0x33, 0x6d, 0x76, 0x30, 0x6a, 0x74,
+    0x2d, 0x68, 0x71, 0x2b, 0x66, 0x6f, 0x28, 0x63, 0x6d, 0x27, 0x61, 0x6b, 0x26, 0x61, 0x6a, 0x24, 0x5e, 0x67, 0x22, 0x5c, 0x66,
+    0x20, 0x5b, 0x64, 0x1e, 0x58, 0x62, 0x1c, 0x56, 0x60, 0x1b, 0x55, 0x5e, 0x1a, 0x54, 0x5d, 0x18, 0x51, 0x5b, 0x17, 0x51, 0x59,
+    0x17, 0x50, 0x58, 0x16, 0x4f, 0x58, 0x15, 0x4d, 0x56, 0x14, 0x4c, 0x54, 0x13, 0x4a, 0x53, 0x13, 0x49, 0x52, 0x3a, 0x67, 0x6e,
+    0xb2, 0xc4, 0xc7, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xac, 0xbc, 0xbf, 0x0d, 0x3b, 0x43, 0x0c, 0x3b, 0x42,
+    0x0c, 0x3a, 0x41, 0x0c, 0x39, 0x40, 0x0c, 0x38, 0x3f, 0x0c, 0x37, 0x3e, 0x0c, 0x36, 0x3d, 0x0b, 0x35, 0x3c, 0x0b, 0x34, 0x3b,
+    0x0b, 0x34, 0x3a, 0x0b, 0x33, 0x39, 0x0b, 0x32, 0x38, 0x0a, 0x31, 0x37, 0x0a, 0x30, 0x36, 0x0a, 0x2f, 0x35, 0x0a, 0x2e, 0x34,
+    0x09, 0x2a, 0x30, 0x19, 0x3f, 0x46, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x07, 0x5d, 0x69, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65,
+    0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63,
+    0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x57, 0x63, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x56, 0x62,
+    0x06, 0x56, 0x61, 0x06, 0x56, 0x61, 0x06, 0x55, 0x61, 0x06, 0x55, 0x60, 0x06, 0x55, 0x60, 0x06, 0x54, 0x60, 0x06, 0x54, 0x5f,
+    0x06, 0x54, 0x5f, 0x06, 0x53, 0x5e, 0x06, 0x53, 0x5e, 0x05, 0x53, 0x5e, 0x05, 0x52, 0x5d, 0x05, 0x52, 0x5d, 0x05, 0x51, 0x5c,
+    0x05, 0x51, 0x5c, 0x05, 0x51, 0x5b, 0x05, 0x50, 0x5b, 0x05, 0x50, 0x5a, 0x05, 0x4f, 0x5a, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x59,
+    0x05, 0x4e, 0x58, 0x05, 0x4d, 0x57, 0x05, 0x4d, 0x57, 0x05, 0x4c, 0x56, 0x05, 0x4c, 0x56, 0x05, 0x4b, 0x55, 0x05, 0x4b, 0x55,
+    0x05, 0x4a, 0x54, 0x05, 0x49, 0x53, 0x05, 0x48, 0x52, 0x05, 0x48, 0x51, 0x05, 0x47, 0x51, 0x05, 0x47, 0x50, 0x05, 0x46, 0x4f,
+    0x05, 0x45, 0x4e, 0x05, 0x44, 0x4d, 0x05, 0x43, 0x4d, 0x04, 0x43, 0x4c, 0x04, 0x42, 0x4b, 0x04, 0x41, 0x4a, 0x04, 0x40, 0x49,
+    0x04, 0x3f, 0x48, 0x04, 0x3e, 0x47, 0x04, 0x3e, 0x46, 0x04, 0x3d, 0x45, 0x04, 0x3c, 0x44, 0x04, 0x3b, 0x43, 0x04, 0x3a, 0x42,
+    0x04, 0x39, 0x41, 0x04, 0x38, 0x3f, 0x04, 0x37, 0x3e, 0x04, 0x36, 0x3d, 0x04, 0x35, 0x3c, 0x04, 0x33, 0x3b, 0x03, 0x33, 0x3a,
+    0x03, 0x32, 0x38, 0x03, 0x31, 0x37, 0x03, 0x2f, 0x36, 0x03, 0x2e, 0x34, 0x03, 0x2d, 0x33, 0x03, 0x2c, 0x32, 0x03, 0x2a, 0x30,
+    0x03, 0x29, 0x2f, 0x03, 0x28, 0x2e, 0x03, 0x27, 0x2c, 0x03, 0x25, 0x2a, 0x03, 0x24, 0x29, 0x02, 0x23, 0x27, 0x02, 0x21, 0x26,
+    0x02, 0x20, 0x24, 0x02, 0x1e, 0x23, 0x02, 0x1d, 0x21, 0x02, 0x1b, 0x1f, 0x02, 0x1a, 0x1e, 0x02, 0x18, 0x1c, 0x02, 0x17, 0x1a,
+    0x01, 0x14, 0x17, 0x03, 0x15, 0x19, 0xb8, 0xc4, 0xc6, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xfb, 0xfc, 0xfd, 0x97, 0xba, 0xbf,
+    0x7e, 0xa9, 0xaf, 0x7a, 0xa6, 0xad, 0x78, 0xa4, 0xab, 0x74, 0xa1, 0xa8, 0x70, 0x9e, 0xa6, 0x6c, 0x9b, 0xa3, 0x69, 0x99, 0xa0,
+    0x81, 0xa9, 0xb0, 0xec, 0xf2, 0xf3, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xfb, 0xfc, 0xfc,
+    0xb9, 0xcd, 0xd1, 0x5f, 0x8d, 0x94, 0x33, 0x6d, 0x76, 0x2f, 0x69, 0x73, 0x2c, 0x67, 0x70, 0x2a, 0x65, 0x6e, 0x27, 0x62, 0x6c,
+    0x26, 0x60, 0x6a, 0x24, 0x5f, 0x68, 0x23, 0x5d, 0x66, 0x21, 0x5c, 0x65, 0x1f, 0x5a, 0x63, 0x1d, 0x58, 0x61, 0x1c, 0x56, 0x60,
+    0x1b, 0x55, 0x5e, 0x19, 0x53, 0x5d, 0x18, 0x51, 0x5b, 0x17, 0x51, 0x59, 0x16, 0x4f, 0x58, 0x15, 0x4e, 0x57, 0x15, 0x4d, 0x56,
+    0x14, 0x4c, 0x54, 0x40, 0x6d, 0x74, 0xa4, 0xb9, 0xbd, 0xf7, 0xf9, 0xf9, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xae, 0xbe, 0xc1, 0x0d, 0x3b, 0x43, 0x0c, 0x3b, 0x42, 0x0c, 0x3a, 0x41, 0x0c, 0x39, 0x40, 0x0c, 0x38, 0x3f,
+    0x0c, 0x37, 0x3e, 0x0c, 0x36, 0x3d, 0x0b, 0x35, 0x3c, 0x0b, 0x34, 0x3b, 0x0b, 0x34, 0x3a, 0x0b, 0x33, 0x39, 0x0b, 0x32, 0x38,
+    0x0a, 0x31, 0x37, 0x0a, 0x30, 0x36, 0x0a, 0x2f, 0x35, 0x0a, 0x2e, 0x34, 0x09, 0x2a, 0x30, 0x19, 0x3f, 0x46, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x07, 0x5d, 0x69, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x59, 0x65,
+    0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x58, 0x64,
+    0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x57, 0x63,
+    0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x56, 0x62, 0x06, 0x56, 0x61, 0x06, 0x56, 0x61, 0x06, 0x55, 0x61,
+    0x06, 0x55, 0x60, 0x06, 0x55, 0x60, 0x06, 0x54, 0x60, 0x06, 0x54, 0x5f, 0x06, 0x54, 0x5f, 0x06, 0x53, 0x5e, 0x06, 0x53, 0x5e,
+    0x05, 0x53, 0x5e, 0x05, 0x52, 0x5d, 0x05, 0x52, 0x5d, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5b, 0x05, 0x50, 0x5b,
+    0x05, 0x50, 0x5a, 0x05, 0x4f, 0x5a, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x58, 0x05, 0x4d, 0x57, 0x05, 0x4d, 0x57,
+    0x05, 0x4c, 0x56, 0x05, 0x4c, 0x56, 0x05, 0x4b, 0x55, 0x05, 0x4b, 0x55, 0x05, 0x4a, 0x54, 0x05, 0x49, 0x53, 0x05, 0x48, 0x52,
+    0x05, 0x48, 0x51, 0x05, 0x47, 0x51, 0x05, 0x47, 0x50, 0x05, 0x46, 0x4f, 0x05, 0x45, 0x4e, 0x05, 0x44, 0x4d, 0x05, 0x43, 0x4d,
+    0x04, 0x43, 0x4c, 0x04, 0x42, 0x4b, 0x04, 0x41, 0x4a, 0x04, 0x40, 0x49, 0x04, 0x3f, 0x48, 0x04, 0x3e, 0x47, 0x04, 0x3e, 0x46,
+    0x04, 0x3d, 0x45, 0x04, 0x3c, 0x44, 0x04, 0x3b, 0x43, 0x04, 0x3a, 0x42, 0x04, 0x39, 0x41, 0x04, 0x38, 0x3f, 0x04, 0x37, 0x3e,
+    0x04, 0x36, 0x3d, 0x04, 0x35, 0x3c, 0x04, 0x33, 0x3b, 0x03, 0x33, 0x3a, 0x03, 0x32, 0x38, 0x03, 0x31, 0x37, 0x03, 0x2f, 0x36,
+    0x03, 0x2e, 0x34, 0x03, 0x2d, 0x33, 0x03, 0x2c, 0x32, 0x03, 0x2a, 0x30, 0x03, 0x29, 0x2f, 0x03, 0x28, 0x2e, 0x03, 0x27, 0x2c,
+    0x03, 0x25, 0x2a, 0x03, 0x24, 0x29, 0x02, 0x23, 0x27, 0x02, 0x21, 0x26, 0x02, 0x20, 0x24, 0x02, 0x1e, 0x23, 0x02, 0x1d, 0x21,
+    0x02, 0x1b, 0x1f, 0x02, 0x1a, 0x1e, 0x02, 0x18, 0x1c, 0x02, 0x17, 0x1a, 0x00, 0x10, 0x13, 0x1f, 0x39, 0x3d, 0xfe, 0xfe, 0xfe,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xda, 0xe7, 0xe9, 0x7e, 0xa9, 0xb0, 0x7b, 0xa7, 0xae, 0x78, 0xa4, 0xab, 0x74, 0xa1, 0xa9,
+    0x71, 0x9f, 0xa6, 0x6d, 0x9c, 0xa4, 0x6a, 0x99, 0xa1, 0x66, 0x97, 0x9f, 0x63, 0x94, 0x9c, 0x79, 0xa4, 0xa9, 0xe4, 0xed, 0xee,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xfc, 0xfd, 0xfd, 0xc9, 0xd8, 0xda,
+    0x81, 0xa4, 0xaa, 0x42, 0x78, 0x80, 0x29, 0x65, 0x6e, 0x27, 0x62, 0x6c, 0x25, 0x5f, 0x69, 0x24, 0x5f, 0x68, 0x23, 0x5d, 0x66,
+    0x20, 0x5b, 0x64, 0x1e, 0x59, 0x63, 0x1d, 0x58, 0x61, 0x1b, 0x55, 0x5f, 0x1a, 0x55, 0x5d, 0x19, 0x53, 0x5d, 0x18, 0x51, 0x5b,
+    0x17, 0x51, 0x59, 0x17, 0x50, 0x59, 0x31, 0x63, 0x6b, 0x70, 0x93, 0x98, 0xbe, 0xce, 0xd0, 0xfa, 0xfb, 0xfc, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xb4, 0xc3, 0xc5, 0x54, 0x75, 0x7b, 0x0d, 0x3b, 0x43,
+    0x0c, 0x3b, 0x42, 0x0c, 0x3a, 0x41, 0x0c, 0x39, 0x40, 0x0c, 0x38, 0x3f, 0x0c, 0x37, 0x3e, 0x0c, 0x36, 0x3d, 0x0b, 0x35, 0x3c,
+    0x0b, 0x34, 0x3b, 0x0b, 0x34, 0x3a, 0x0b, 0x33, 0x39, 0x0b, 0x32, 0x38, 0x0a, 0x31, 0x37, 0x0a, 0x30, 0x36, 0x0a, 0x2f, 0x35,
+    0x0a, 0x2e, 0x34, 0x09, 0x2a, 0x30, 0x19, 0x3f, 0x46, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x07, 0x5d, 0x69,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65,
+    0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x63,
+    0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x57, 0x63, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62,
+    0x06, 0x56, 0x62, 0x06, 0x56, 0x61, 0x06, 0x56, 0x61, 0x06, 0x55, 0x61, 0x06, 0x55, 0x60, 0x06, 0x55, 0x60, 0x06, 0x54, 0x60,
+    0x06, 0x54, 0x5f, 0x06, 0x54, 0x5f, 0x06, 0x53, 0x5e, 0x06, 0x53, 0x5e, 0x05, 0x53, 0x5e, 0x05, 0x52, 0x5d, 0x05, 0x52, 0x5d,
+    0x05, 0x51, 0x5c, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5b, 0x05, 0x50, 0x5b, 0x05, 0x50, 0x5a, 0x05, 0x4f, 0x5a, 0x05, 0x4e, 0x59,
+    0x05, 0x4e, 0x59, 0x05, 0x4e, 0x58, 0x05, 0x4d, 0x57, 0x05, 0x4d, 0x57, 0x05, 0x4c, 0x56, 0x05, 0x4c, 0x56, 0x05, 0x4b, 0x55,
+    0x05, 0x4b, 0x55, 0x05, 0x4a, 0x54, 0x05, 0x49, 0x53, 0x05, 0x48, 0x52, 0x05, 0x48, 0x51, 0x05, 0x47, 0x51, 0x05, 0x47, 0x50,
+    0x05, 0x46, 0x4f, 0x05, 0x45, 0x4e, 0x05, 0x44, 0x4d, 0x05, 0x43, 0x4d, 0x04, 0x43, 0x4c, 0x04, 0x42, 0x4b, 0x04, 0x41, 0x4a,
+    0x04, 0x40, 0x49, 0x04, 0x3f, 0x48, 0x04, 0x3e, 0x47, 0x04, 0x3e, 0x46, 0x04, 0x3d, 0x45, 0x04, 0x3c, 0x44, 0x04, 0x3b, 0x43,
+    0x04, 0x3a, 0x42, 0x04, 0x39, 0x41, 0x04, 0x38, 0x3f, 0x04, 0x37, 0x3e, 0x04, 0x36, 0x3d, 0x04, 0x35, 0x3c, 0x04, 0x33, 0x3b,
+    0x03, 0x33, 0x3a, 0x03, 0x32, 0x38, 0x03, 0x31, 0x37, 0x03, 0x2f, 0x36, 0x03, 0x2e, 0x34, 0x03, 0x2d, 0x33, 0x03, 0x2c, 0x32,
+    0x03, 0x2a, 0x30, 0x03, 0x29, 0x2f, 0x03, 0x28, 0x2e, 0x03, 0x27, 0x2c, 0x03, 0x25, 0x2a, 0x03, 0x24, 0x29, 0x02, 0x23, 0x27,
+    0x02, 0x21, 0x26, 0x02, 0x20, 0x24, 0x02, 0x1e, 0x23, 0x02, 0x1d, 0x21, 0x02, 0x1b, 0x1f, 0x02, 0x1a, 0x1e, 0x02, 0x18, 0x1c,
+    0x02, 0x16, 0x1a, 0x00, 0x10, 0x13, 0x6a, 0x82, 0x85, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xaa, 0xc6, 0xcb,
+    0x7a, 0xa7, 0xae, 0x78, 0xa4, 0xab, 0x75, 0xa2, 0xa9, 0x71, 0xa0, 0xa7, 0x6e, 0x9d, 0xa4, 0x6b, 0x9b, 0xa2, 0x68, 0x98, 0xa0,
+    0x64, 0x96, 0x9d, 0x60, 0x92, 0x9a, 0x5d, 0x91, 0x98, 0x69, 0x98, 0xa0, 0xd1, 0xdf, 0xe2, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xf4, 0xf7, 0xf8, 0xc8, 0xd7, 0xda,
+    0x98, 0xb5, 0xb9, 0x72, 0x97, 0x9e, 0x52, 0x80, 0x87, 0x3b, 0x6f, 0x77, 0x2d, 0x65, 0x6d, 0x29, 0x61, 0x6a, 0x27, 0x5f, 0x68,
+    0x29, 0x60, 0x69, 0x39, 0x6c, 0x73, 0x4f, 0x7b, 0x83, 0x6e, 0x92, 0x98, 0x95, 0xb0, 0xb3, 0xc7, 0xd5, 0xd7, 0xf4, 0xf7, 0xf7,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0x9d, 0xb1, 0xb4, 0x13, 0x42, 0x49, 0x0d, 0x3c, 0x44, 0x0d, 0x3b, 0x43, 0x0c, 0x3b, 0x42, 0x0c, 0x3a, 0x41, 0x0c, 0x39, 0x40,
+    0x0c, 0x38, 0x3f, 0x0c, 0x37, 0x3e, 0x0c, 0x36, 0x3d, 0x0b, 0x35, 0x3c, 0x0b, 0x34, 0x3b, 0x0b, 0x34, 0x3a, 0x0b, 0x33, 0x39,
+    0x0b, 0x32, 0x38, 0x0a, 0x31, 0x37, 0x0a, 0x30, 0x36, 0x0a, 0x2f, 0x35, 0x0a, 0x2e, 0x34, 0x09, 0x2a, 0x30, 0x19, 0x3f, 0x46,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x07, 0x5d, 0x69, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65,
+    0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63,
+    0x06, 0x57, 0x63, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x56, 0x62, 0x06, 0x56, 0x61, 0x06, 0x56, 0x61,
+    0x06, 0x55, 0x61, 0x06, 0x55, 0x60, 0x06, 0x55, 0x60, 0x06, 0x54, 0x60, 0x06, 0x54, 0x5f, 0x06, 0x54, 0x5f, 0x06, 0x53, 0x5e,
+    0x06, 0x53, 0x5e, 0x05, 0x53, 0x5e, 0x05, 0x52, 0x5d, 0x05, 0x52, 0x5d, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5b,
+    0x05, 0x50, 0x5b, 0x05, 0x50, 0x5a, 0x05, 0x4f, 0x5a, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x58, 0x05, 0x4d, 0x57,
+    0x05, 0x4d, 0x57, 0x05, 0x4c, 0x56, 0x05, 0x4c, 0x56, 0x05, 0x4b, 0x55, 0x05, 0x4b, 0x55, 0x05, 0x4a, 0x54, 0x05, 0x49, 0x53,
+    0x05, 0x48, 0x52, 0x05, 0x48, 0x51, 0x05, 0x47, 0x51, 0x05, 0x47, 0x50, 0x05, 0x46, 0x4f, 0x05, 0x45, 0x4e, 0x05, 0x44, 0x4d,
+    0x05, 0x43, 0x4d, 0x04, 0x43, 0x4c, 0x04, 0x42, 0x4b, 0x04, 0x41, 0x4a, 0x04, 0x40, 0x49, 0x04, 0x3f, 0x48, 0x04, 0x3e, 0x47,
+    0x04, 0x3e, 0x46, 0x04, 0x3d, 0x45, 0x04, 0x3c, 0x44, 0x04, 0x3b, 0x43, 0x04, 0x3a, 0x42, 0x04, 0x39, 0x41, 0x04, 0x38, 0x3f,
+    0x04, 0x37, 0x3e, 0x04, 0x36, 0x3d, 0x04, 0x35, 0x3c, 0x04, 0x33, 0x3b, 0x03, 0x33, 0x3a, 0x03, 0x32, 0x38, 0x03, 0x31, 0x37,
+    0x03, 0x2f, 0x36, 0x03, 0x2e, 0x34, 0x03, 0x2d, 0x33, 0x03, 0x2c, 0x32, 0x03, 0x2a, 0x30, 0x03, 0x29, 0x2f, 0x03, 0x28, 0x2e,
+    0x03, 0x27, 0x2c, 0x03, 0x25, 0x2a, 0x03, 0x24, 0x29, 0x02, 0x23, 0x27, 0x02, 0x21, 0x26, 0x02, 0x20, 0x24, 0x02, 0x1e, 0x23,
+    0x02, 0x1d, 0x21, 0x02, 0x1b, 0x1f, 0x02, 0x1a, 0x1e, 0x02, 0x18, 0x1c, 0x00, 0x14, 0x18, 0x0a, 0x21, 0x25, 0xdb, 0xe1, 0xe3,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xe9, 0xf0, 0xf1, 0x80, 0xab, 0xb1, 0x78, 0xa5, 0xac, 0x74, 0xa2, 0xa9, 0x71, 0xa0, 0xa7,
+    0x6e, 0x9e, 0xa5, 0x6b, 0x9b, 0xa2, 0x67, 0x98, 0xa0, 0x65, 0x96, 0x9e, 0x61, 0x94, 0x9b, 0x5d, 0x91, 0x99, 0x5a, 0x8f, 0x96,
+    0x57, 0x8b, 0x94, 0x58, 0x8c, 0x94, 0xae, 0xc7, 0xcb, 0xfe, 0xfe, 0xfe, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xfc, 0xfc, 0xfd,
+    0xf5, 0xf7, 0xf8, 0xee, 0xf3, 0xf3, 0xea, 0xef, 0xf0, 0xea, 0xef, 0xf0, 0xf0, 0xf4, 0xf4, 0xf5, 0xf8, 0xf8, 0xfd, 0xfe, 0xfe,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xfa, 0xfb, 0xfb, 0x7b, 0x96, 0x9a, 0x0d, 0x3e, 0x45, 0x0d, 0x3d, 0x45, 0x0d, 0x3c, 0x44,
+    0x0d, 0x3b, 0x43, 0x0c, 0x3b, 0x42, 0x0c, 0x3a, 0x41, 0x0c, 0x39, 0x40, 0x0c, 0x38, 0x3f, 0x0c, 0x37, 0x3e, 0x0c, 0x36, 0x3d,
+    0x0b, 0x35, 0x3c, 0x0b, 0x34, 0x3b, 0x0b, 0x34, 0x3a, 0x0b, 0x33, 0x39, 0x0b, 0x32, 0x38, 0x0a, 0x31, 0x37, 0x0a, 0x30, 0x36,
+    0x0a, 0x2f, 0x35, 0x0a, 0x2e, 0x34, 0x09, 0x2a, 0x30, 0x19, 0x3f, 0x46, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x07, 0x5d, 0x69, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65,
+    0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64,
+    0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x57, 0x63, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62,
+    0x06, 0x57, 0x62, 0x06, 0x56, 0x62, 0x06, 0x56, 0x61, 0x06, 0x56, 0x61, 0x06, 0x55, 0x61, 0x06, 0x55, 0x60, 0x06, 0x55, 0x60,
+    0x06, 0x54, 0x60, 0x06, 0x54, 0x5f, 0x06, 0x54, 0x5f, 0x06, 0x53, 0x5e, 0x06, 0x53, 0x5e, 0x05, 0x53, 0x5e, 0x05, 0x52, 0x5d,
+    0x05, 0x52, 0x5d, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5b, 0x05, 0x50, 0x5b, 0x05, 0x50, 0x5a, 0x05, 0x4f, 0x5a,
+    0x05, 0x4e, 0x59, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x58, 0x05, 0x4d, 0x57, 0x05, 0x4d, 0x57, 0x05, 0x4c, 0x56, 0x05, 0x4c, 0x56,
+    0x05, 0x4b, 0x55, 0x05, 0x4b, 0x55, 0x05, 0x4a, 0x54, 0x05, 0x49, 0x53, 0x05, 0x48, 0x52, 0x05, 0x48, 0x51, 0x05, 0x47, 0x51,
+    0x05, 0x47, 0x50, 0x05, 0x46, 0x4f, 0x05, 0x45, 0x4e, 0x05, 0x44, 0x4d, 0x05, 0x43, 0x4d, 0x04, 0x43, 0x4c, 0x04, 0x42, 0x4b,
+    0x04, 0x41, 0x4a, 0x04, 0x40, 0x49, 0x04, 0x3f, 0x48, 0x04, 0x3e, 0x47, 0x04, 0x3e, 0x46, 0x04, 0x3d, 0x45, 0x04, 0x3c, 0x44,
+    0x04, 0x3b, 0x43, 0x04, 0x3a, 0x42, 0x04, 0x39, 0x41, 0x04, 0x38, 0x3f, 0x04, 0x37, 0x3e, 0x04, 0x36, 0x3d, 0x04, 0x35, 0x3c,
+    0x04, 0x33, 0x3b, 0x03, 0x33, 0x3a, 0x03, 0x32, 0x38, 0x03, 0x31, 0x37, 0x03, 0x2f, 0x36, 0x03, 0x2e, 0x34, 0x03, 0x2d, 0x33,
+    0x03, 0x2c, 0x32, 0x03, 0x2a, 0x30, 0x03, 0x29, 0x2f, 0x03, 0x28, 0x2e, 0x03, 0x27, 0x2c, 0x03, 0x25, 0x2a, 0x03, 0x24, 0x29,
+    0x02, 0x23, 0x27, 0x02, 0x21, 0x26, 0x02, 0x20, 0x24, 0x02, 0x1e, 0x23, 0x02, 0x1d, 0x21, 0x02, 0x1b, 0x1f, 0x02, 0x1a, 0x1e,
+    0x02, 0x18, 0x1c, 0x00, 0x11, 0x14, 0x40, 0x5c, 0x5f, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xb5, 0xce, 0xd2,
+    0x77, 0xa4, 0xab, 0x74, 0xa3, 0xaa, 0x71, 0xa0, 0xa7, 0x6e, 0x9e, 0xa5, 0x6b, 0x9b, 0xa3, 0x68, 0x99, 0xa0, 0x65, 0x97, 0x9f,
+    0x61, 0x94, 0x9c, 0x5e, 0x92, 0x99, 0x5b, 0x8f, 0x98, 0x57, 0x8d, 0x95, 0x54, 0x89, 0x92, 0x51, 0x87, 0x90, 0x4e, 0x85, 0x8d,
+    0x7e, 0xa5, 0xac, 0xe4, 0xed, 0xee, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xdb, 0xe3, 0xe4, 0x4a, 0x6f, 0x75,
+    0x0d, 0x3f, 0x46, 0x0d, 0x3e, 0x45, 0x0d, 0x3d, 0x45, 0x0d, 0x3c, 0x44, 0x0d, 0x3b, 0x43, 0x0c, 0x3b, 0x42, 0x0c, 0x3a, 0x41,
+    0x0c, 0x39, 0x40, 0x0c, 0x38, 0x3f, 0x0c, 0x37, 0x3e, 0x0c, 0x36, 0x3d, 0x0b, 0x35, 0x3c, 0x0b, 0x34, 0x3b, 0x0b, 0x34, 0x3a,
+    0x0b, 0x33, 0x39, 0x0b, 0x32, 0x38, 0x0a, 0x31, 0x37, 0x0a, 0x30, 0x36, 0x0a, 0x2f, 0x35, 0x0a, 0x2e, 0x34, 0x09, 0x2a, 0x30,
+    0x19, 0x3f, 0x46, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x07, 0x5d, 0x69, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65,
+    0x06, 0x59, 0x65, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63,
+    0x06, 0x58, 0x63, 0x06, 0x57, 0x63, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x56, 0x62, 0x06, 0x56, 0x61,
+    0x06, 0x56, 0x61, 0x06, 0x55, 0x61, 0x06, 0x55, 0x60, 0x06, 0x55, 0x60, 0x06, 0x54, 0x60, 0x06, 0x54, 0x5f, 0x06, 0x54, 0x5f,
+    0x06, 0x53, 0x5e, 0x06, 0x53, 0x5e, 0x05, 0x53, 0x5e, 0x05, 0x52, 0x5d, 0x05, 0x52, 0x5d, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5c,
+    0x05, 0x51, 0x5b, 0x05, 0x50, 0x5b, 0x05, 0x50, 0x5a, 0x05, 0x4f, 0x5a, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x58,
+    0x05, 0x4d, 0x57, 0x05, 0x4d, 0x57, 0x05, 0x4c, 0x56, 0x05, 0x4c, 0x56, 0x05, 0x4b, 0x55, 0x05, 0x4b, 0x55, 0x05, 0x4a, 0x54,
+    0x05, 0x49, 0x53, 0x05, 0x48, 0x52, 0x05, 0x48, 0x51, 0x05, 0x47, 0x51, 0x05, 0x47, 0x50, 0x05, 0x46, 0x4f, 0x05, 0x45, 0x4e,
+    0x05, 0x44, 0x4d, 0x05, 0x43, 0x4d, 0x04, 0x43, 0x4c, 0x04, 0x42, 0x4b, 0x04, 0x41, 0x4a, 0x04, 0x40, 0x49, 0x04, 0x3f, 0x48,
+    0x04, 0x3e, 0x47, 0x04, 0x3e, 0x46, 0x04, 0x3d, 0x45, 0x04, 0x3c, 0x44, 0x04, 0x3b, 0x43, 0x04, 0x3a, 0x42, 0x04, 0x39, 0x41,
+    0x04, 0x38, 0x3f, 0x04, 0x37, 0x3e, 0x04, 0x36, 0x3d, 0x04, 0x35, 0x3c, 0x04, 0x33, 0x3b, 0x03, 0x33, 0x3a, 0x03, 0x32, 0x38,
+    0x03, 0x31, 0x37, 0x03, 0x2f, 0x36, 0x03, 0x2e, 0x34, 0x03, 0x2d, 0x33, 0x03, 0x2c, 0x32, 0x03, 0x2a, 0x30, 0x03, 0x29, 0x2f,
+    0x03, 0x28, 0x2e, 0x03, 0x27, 0x2c, 0x03, 0x25, 0x2a, 0x03, 0x24, 0x29, 0x02, 0x23, 0x27, 0x02, 0x21, 0x26, 0x02, 0x20, 0x24,
+    0x02, 0x1e, 0x23, 0x02, 0x1d, 0x21, 0x02, 0x1b, 0x1f, 0x02, 0x1a, 0x1e, 0x00, 0x17, 0x1b, 0x03, 0x19, 0x1c, 0xae, 0xbd, 0xbf,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xf0, 0xf5, 0xf6, 0x81, 0xab, 0xb2, 0x73, 0xa2, 0xa9, 0x70, 0xa0, 0xa7, 0x6d, 0x9e, 0xa5,
+    0x6b, 0x9b, 0xa3, 0x68, 0x99, 0xa1, 0x64, 0x96, 0x9e, 0x62, 0x95, 0x9d, 0x5f, 0x92, 0x9a, 0x5b, 0x90, 0x98, 0x58, 0x8d, 0x96,
+    0x55, 0x8b, 0x93, 0x52, 0x88, 0x91, 0x4e, 0x85, 0x8e, 0x4c, 0x84, 0x8c, 0x49, 0x80, 0x89, 0x54, 0x88, 0x90, 0xac, 0xc5, 0xc9,
+    0xfa, 0xfb, 0xfc, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xfb, 0xfc, 0xfc, 0x9a, 0xaf, 0xb3, 0x1e, 0x4d, 0x55, 0x0d, 0x3f, 0x47, 0x0d, 0x3f, 0x46, 0x0d, 0x3e, 0x45, 0x0d, 0x3d, 0x45,
+    0x0d, 0x3c, 0x44, 0x0d, 0x3b, 0x43, 0x0c, 0x3b, 0x42, 0x0c, 0x3a, 0x41, 0x0c, 0x39, 0x40, 0x0c, 0x38, 0x3f, 0x0c, 0x37, 0x3e,
+    0x0c, 0x36, 0x3d, 0x0b, 0x35, 0x3c, 0x0b, 0x34, 0x3b, 0x0b, 0x34, 0x3a, 0x0b, 0x33, 0x39, 0x0b, 0x32, 0x38, 0x0a, 0x31, 0x37,
+    0x0a, 0x30, 0x36, 0x0a, 0x2f, 0x35, 0x0a, 0x2e, 0x34, 0x09, 0x2a, 0x30, 0x19, 0x3f, 0x46, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x07, 0x5d, 0x69, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65,
+    0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64,
+    0x06, 0x58, 0x64, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x57, 0x63, 0x06, 0x57, 0x62,
+    0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x56, 0x62, 0x06, 0x56, 0x61, 0x06, 0x56, 0x61, 0x06, 0x55, 0x61, 0x06, 0x55, 0x60,
+    0x06, 0x55, 0x60, 0x06, 0x54, 0x60, 0x06, 0x54, 0x5f, 0x06, 0x54, 0x5f, 0x06, 0x53, 0x5e, 0x06, 0x53, 0x5e, 0x05, 0x53, 0x5e,
+    0x05, 0x52, 0x5d, 0x05, 0x52, 0x5d, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5b, 0x05, 0x50, 0x5b, 0x05, 0x50, 0x5a,
+    0x05, 0x4f, 0x5a, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x58, 0x05, 0x4d, 0x57, 0x05, 0x4d, 0x57, 0x05, 0x4c, 0x56,
+    0x05, 0x4c, 0x56, 0x05, 0x4b, 0x55, 0x05, 0x4b, 0x55, 0x05, 0x4a, 0x54, 0x05, 0x49, 0x53, 0x05, 0x48, 0x52, 0x05, 0x48, 0x51,
+    0x05, 0x47, 0x51, 0x05, 0x47, 0x50, 0x05, 0x46, 0x4f, 0x05, 0x45, 0x4e, 0x05, 0x44, 0x4d, 0x05, 0x43, 0x4d, 0x04, 0x43, 0x4c,
+    0x04, 0x42, 0x4b, 0x04, 0x41, 0x4a, 0x04, 0x40, 0x49, 0x04, 0x3f, 0x48, 0x04, 0x3e, 0x47, 0x04, 0x3e, 0x46, 0x04, 0x3d, 0x45,
+    0x04, 0x3c, 0x44, 0x04, 0x3b, 0x43, 0x04, 0x3a, 0x42, 0x04, 0x39, 0x41, 0x04, 0x38, 0x3f, 0x04, 0x37, 0x3e, 0x04, 0x36, 0x3d,
+    0x04, 0x35, 0x3c, 0x04, 0x33, 0x3b, 0x03, 0x33, 0x3a, 0x03, 0x32, 0x38, 0x03, 0x31, 0x37, 0x03, 0x2f, 0x36, 0x03, 0x2e, 0x34,
+    0x03, 0x2d, 0x33, 0x03, 0x2c, 0x32, 0x03, 0x2a, 0x30, 0x03, 0x29, 0x2f, 0x03, 0x28, 0x2e, 0x03, 0x27, 0x2c, 0x03, 0x25, 0x2a,
+    0x03, 0x24, 0x29, 0x02, 0x23, 0x27, 0x02, 0x21, 0x26, 0x02, 0x20, 0x24, 0x02, 0x1e, 0x23, 0x02, 0x1d, 0x21, 0x02, 0x1b, 0x1f,
+    0x02, 0x1a, 0x1e, 0x00, 0x13, 0x16, 0x29, 0x46, 0x4b, 0xfe, 0xfe, 0xfe, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xb9, 0xd1, 0xd5,
+    0x72, 0xa1, 0xa9, 0x6f, 0xa0, 0xa7, 0x6c, 0x9e, 0xa5, 0x6a, 0x9b, 0xa3, 0x67, 0x99, 0xa1, 0x64, 0x97, 0x9f, 0x61, 0x94, 0x9c,
+    0x5f, 0x93, 0x9b, 0x5c, 0x90, 0x98, 0x58, 0x8e, 0x96, 0x56, 0x8b, 0x94, 0x52, 0x89, 0x91, 0x4f, 0x86, 0x8f, 0x4c, 0x84, 0x8d,
+    0x4a, 0x82, 0x8a, 0x47, 0x7f, 0x88, 0x44, 0x7d, 0x86, 0x41, 0x7a, 0x84, 0x66, 0x94, 0x9b, 0xc9, 0xd9, 0xdc, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xc2, 0xcf, 0xd1, 0x47, 0x6e, 0x75, 0x0e, 0x41, 0x49, 0x0d, 0x40, 0x48,
+    0x0d, 0x3f, 0x47, 0x0d, 0x3f, 0x46, 0x0d, 0x3e, 0x45, 0x0d, 0x3d, 0x45, 0x0d, 0x3c, 0x44, 0x0d, 0x3b, 0x43, 0x0c, 0x3b, 0x42,
+    0x0c, 0x3a, 0x41, 0x0c, 0x39, 0x40, 0x0c, 0x38, 0x3f, 0x0c, 0x37, 0x3e, 0x0c, 0x36, 0x3d, 0x0b, 0x35, 0x3c, 0x0b, 0x34, 0x3b,
+    0x0b, 0x34, 0x3a, 0x0b, 0x33, 0x39, 0x0b, 0x32, 0x38, 0x0a, 0x31, 0x37, 0x0a, 0x30, 0x36, 0x0a, 0x2f, 0x35, 0x0a, 0x2e, 0x34,
+    0x09, 0x2a, 0x30, 0x19, 0x3f, 0x46, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x07, 0x5d, 0x69, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65,
+    0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63,
+    0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x57, 0x63, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x56, 0x62,
+    0x06, 0x56, 0x61, 0x06, 0x56, 0x61, 0x06, 0x55, 0x61, 0x06, 0x55, 0x60, 0x06, 0x55, 0x60, 0x06, 0x54, 0x60, 0x06, 0x54, 0x5f,
+    0x06, 0x54, 0x5f, 0x06, 0x53, 0x5e, 0x06, 0x53, 0x5e, 0x05, 0x53, 0x5e, 0x05, 0x52, 0x5d, 0x05, 0x52, 0x5d, 0x05, 0x51, 0x5c,
+    0x05, 0x51, 0x5c, 0x05, 0x51, 0x5b, 0x05, 0x50, 0x5b, 0x05, 0x50, 0x5a, 0x05, 0x4f, 0x5a, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x59,
+    0x05, 0x4e, 0x58, 0x05, 0x4d, 0x57, 0x05, 0x4d, 0x57, 0x05, 0x4c, 0x56, 0x05, 0x4c, 0x56, 0x05, 0x4b, 0x55, 0x05, 0x4b, 0x55,
+    0x05, 0x4a, 0x54, 0x05, 0x49, 0x53, 0x05, 0x48, 0x52, 0x05, 0x48, 0x51, 0x05, 0x47, 0x51, 0x05, 0x47, 0x50, 0x05, 0x46, 0x4f,
+    0x05, 0x45, 0x4e, 0x05, 0x44, 0x4d, 0x05, 0x43, 0x4d, 0x04, 0x43, 0x4c, 0x04, 0x42, 0x4b, 0x04, 0x41, 0x4a, 0x04, 0x40, 0x49,
+    0x04, 0x3f, 0x48, 0x04, 0x3e, 0x47, 0x04, 0x3e, 0x46, 0x04, 0x3d, 0x45, 0x04, 0x3c, 0x44, 0x04, 0x3b, 0x43, 0x04, 0x3a, 0x42,
+    0x04, 0x39, 0x41, 0x04, 0x38, 0x3f, 0x04, 0x37, 0x3e, 0x04, 0x36, 0x3d, 0x04, 0x35, 0x3c, 0x04, 0x33, 0x3b, 0x03, 0x33, 0x3a,
+    0x03, 0x32, 0x38, 0x03, 0x31, 0x37, 0x03, 0x2f, 0x36, 0x03, 0x2e, 0x34, 0x03, 0x2d, 0x33, 0x03, 0x2c, 0x32, 0x03, 0x2a, 0x30,
+    0x03, 0x29, 0x2f, 0x03, 0x28, 0x2e, 0x03, 0x27, 0x2c, 0x03, 0x25, 0x2a, 0x03, 0x24, 0x29, 0x02, 0x23, 0x27, 0x02, 0x21, 0x26,
+    0x02, 0x20, 0x24, 0x02, 0x1e, 0x23, 0x02, 0x1d, 0x21, 0x02, 0x1b, 0x1f, 0x02, 0x19, 0x1d, 0x01, 0x18, 0x1b, 0x96, 0xa9, 0xac,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xef, 0xf5, 0xf5, 0x7e, 0xaa, 0xb1, 0x6e, 0x9f, 0xa7, 0x6c, 0x9d, 0xa5, 0x69, 0x9b, 0xa3,
+    0x66, 0x99, 0xa1, 0x64, 0x97, 0x9f, 0x61, 0x95, 0x9d, 0x5e, 0x92, 0x9a, 0x5c, 0x91, 0x99, 0x59, 0x8e, 0x97, 0x56, 0x8c, 0x94,
+    0x53, 0x8a, 0x92, 0x50, 0x88, 0x90, 0x4d, 0x85, 0x8e, 0x4a, 0x82, 0x8b, 0x48, 0x80, 0x89, 0x45, 0x7e, 0x86, 0x42, 0x7c, 0x85,
+    0x3f, 0x79, 0x83, 0x3c, 0x77, 0x80, 0x3a, 0x75, 0x7f, 0x73, 0x9c, 0xa3, 0xcd, 0xdc, 0xde, 0xfe, 0xfe, 0xfe, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xfe, 0xfe, 0xfe, 0xc4, 0xd1, 0xd3, 0x58, 0x7d, 0x83,
+    0x11, 0x45, 0x4d, 0x0f, 0x43, 0x4b, 0x0e, 0x41, 0x49, 0x0d, 0x40, 0x48, 0x0d, 0x3f, 0x47, 0x0d, 0x3f, 0x46, 0x0d, 0x3e, 0x45,
+    0x0d, 0x3d, 0x45, 0x0d, 0x3c, 0x44, 0x0d, 0x3b, 0x43, 0x0c, 0x3b, 0x42, 0x0c, 0x3a, 0x41, 0x0c, 0x39, 0x40, 0x0c, 0x38, 0x3f,
+    0x0c, 0x37, 0x3e, 0x0c, 0x36, 0x3d, 0x0b, 0x35, 0x3c, 0x0b, 0x34, 0x3b, 0x0b, 0x34, 0x3a, 0x0b, 0x33, 0x39, 0x0b, 0x32, 0x38,
+    0x0a, 0x31, 0x37, 0x0a, 0x30, 0x36, 0x0a, 0x2f, 0x35, 0x0a, 0x2e, 0x34, 0x09, 0x2a, 0x30, 0x19, 0x3f, 0x46, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x07, 0x5d, 0x69, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x59, 0x65,
+    0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x58, 0x64,
+    0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x57, 0x63,
+    0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x56, 0x62, 0x06, 0x56, 0x61, 0x06, 0x56, 0x61, 0x06, 0x55, 0x61,
+    0x06, 0x55, 0x60, 0x06, 0x55, 0x60, 0x06, 0x54, 0x60, 0x06, 0x54, 0x5f, 0x06, 0x54, 0x5f, 0x06, 0x53, 0x5e, 0x06, 0x53, 0x5e,
+    0x05, 0x53, 0x5e, 0x05, 0x52, 0x5d, 0x05, 0x52, 0x5d, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5b, 0x05, 0x50, 0x5b,
+    0x05, 0x50, 0x5a, 0x05, 0x4f, 0x5a, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x58, 0x05, 0x4d, 0x57, 0x05, 0x4d, 0x57,
+    0x05, 0x4c, 0x56, 0x05, 0x4c, 0x56, 0x05, 0x4b, 0x55, 0x05, 0x4b, 0x55, 0x05, 0x4a, 0x54, 0x05, 0x49, 0x53, 0x05, 0x48, 0x52,
+    0x05, 0x48, 0x51, 0x05, 0x47, 0x51, 0x05, 0x47, 0x50, 0x05, 0x46, 0x4f, 0x05, 0x45, 0x4e, 0x05, 0x44, 0x4d, 0x05, 0x43, 0x4d,
+    0x04, 0x43, 0x4c, 0x04, 0x42, 0x4b, 0x04, 0x41, 0x4a, 0x04, 0x40, 0x49, 0x04, 0x3f, 0x48, 0x04, 0x3e, 0x47, 0x04, 0x3e, 0x46,
+    0x04, 0x3d, 0x45, 0x04, 0x3c, 0x44, 0x04, 0x3b, 0x43, 0x04, 0x3a, 0x42, 0x04, 0x39, 0x41, 0x04, 0x38, 0x3f, 0x04, 0x37, 0x3e,
+    0x04, 0x36, 0x3d, 0x04, 0x35, 0x3c, 0x04, 0x33, 0x3b, 0x03, 0x33, 0x3a, 0x03, 0x32, 0x38, 0x03, 0x31, 0x37, 0x03, 0x2f, 0x36,
+    0x03, 0x2e, 0x34, 0x03, 0x2d, 0x33, 0x03, 0x2c, 0x32, 0x03, 0x2a, 0x30, 0x03, 0x29, 0x2f, 0x03, 0x28, 0x2e, 0x03, 0x27, 0x2c,
+    0x03, 0x25, 0x2a, 0x03, 0x24, 0x29, 0x02, 0x23, 0x27, 0x02, 0x21, 0x26, 0x02, 0x20, 0x24, 0x02, 0x1e, 0x23, 0x02, 0x1d, 0x21,
+    0x02, 0x1b, 0x1f, 0x00, 0x14, 0x18, 0x23, 0x40, 0x45, 0xfe, 0xfe, 0xfe, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xb3, 0xcd, 0xd1,
+    0x6d, 0x9f, 0xa6, 0x6b, 0x9d, 0xa5, 0x69, 0x9b, 0xa3, 0x66, 0x99, 0xa1, 0x63, 0x97, 0x9f, 0x60, 0x94, 0x9d, 0x5e, 0x93, 0x9b,
+    0x5c, 0x91, 0x99, 0x59, 0x8f, 0x97, 0x56, 0x8c, 0x95, 0x53, 0x8a, 0x92, 0x50, 0x88, 0x91, 0x4d, 0x86, 0x8e, 0x4b, 0x83, 0x8c,
+    0x48, 0x80, 0x8a, 0x45, 0x7f, 0x87, 0x42, 0x7c, 0x85, 0x3f, 0x7a, 0x83, 0x3e, 0x78, 0x82, 0x3a, 0x76, 0x7f, 0x37, 0x73, 0x7d,
+    0x35, 0x71, 0x7b, 0x35, 0x70, 0x79, 0x66, 0x92, 0x99, 0xb4, 0xca, 0xcd, 0xf1, 0xf5, 0xf6, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xec, 0xf1, 0xf1,
+    0xa4, 0xb9, 0xbc, 0x4a, 0x73, 0x7a, 0x12, 0x46, 0x4f, 0x0f, 0x44, 0x4d, 0x0f, 0x44, 0x4c, 0x0f, 0x43, 0x4b, 0x0e, 0x41, 0x49,
+    0x0d, 0x40, 0x48, 0x0d, 0x3f, 0x47, 0x0d, 0x3f, 0x46, 0x0d, 0x3e, 0x45, 0x0d, 0x3d, 0x45, 0x0d, 0x3c, 0x44, 0x0d, 0x3b, 0x43,
+    0x0c, 0x3b, 0x42, 0x0c, 0x3a, 0x41, 0x0c, 0x39, 0x40, 0x0c, 0x38, 0x3f, 0x0c, 0x37, 0x3e, 0x0c, 0x36, 0x3d, 0x0b, 0x35, 0x3c,
+    0x0b, 0x34, 0x3b, 0x0b, 0x34, 0x3a, 0x0b, 0x33, 0x39, 0x0b, 0x32, 0x38, 0x0a, 0x31, 0x37, 0x0a, 0x30, 0x36, 0x0a, 0x2f, 0x35,
+    0x0a, 0x2e, 0x34, 0x09, 0x2a, 0x30, 0x19, 0x3f, 0x46, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x07, 0x5d, 0x69,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65,
+    0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x63,
+    0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x57, 0x63, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62,
+    0x06, 0x56, 0x62, 0x06, 0x56, 0x61, 0x06, 0x56, 0x61, 0x06, 0x55, 0x61, 0x06, 0x55, 0x60, 0x06, 0x55, 0x60, 0x06, 0x54, 0x60,
+    0x06, 0x54, 0x5f, 0x06, 0x54, 0x5f, 0x06, 0x53, 0x5e, 0x06, 0x53, 0x5e, 0x05, 0x53, 0x5e, 0x05, 0x52, 0x5d, 0x05, 0x52, 0x5d,
+    0x05, 0x51, 0x5c, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5b, 0x05, 0x50, 0x5b, 0x05, 0x50, 0x5a, 0x05, 0x4f, 0x5a, 0x05, 0x4e, 0x59,
+    0x05, 0x4e, 0x59, 0x05, 0x4e, 0x58, 0x05, 0x4d, 0x57, 0x05, 0x4d, 0x57, 0x05, 0x4c, 0x56, 0x05, 0x4c, 0x56, 0x05, 0x4b, 0x55,
+    0x05, 0x4b, 0x55, 0x05, 0x4a, 0x54, 0x05, 0x49, 0x53, 0x05, 0x48, 0x52, 0x05, 0x48, 0x51, 0x05, 0x47, 0x51, 0x05, 0x47, 0x50,
+    0x05, 0x46, 0x4f, 0x05, 0x45, 0x4e, 0x05, 0x44, 0x4d, 0x05, 0x43, 0x4d, 0x04, 0x43, 0x4c, 0x04, 0x42, 0x4b, 0x04, 0x41, 0x4a,
+    0x04, 0x40, 0x49, 0x04, 0x3f, 0x48, 0x04, 0x3e, 0x47, 0x04, 0x3e, 0x46, 0x04, 0x3d, 0x45, 0x04, 0x3c, 0x44, 0x04, 0x3b, 0x43,
+    0x04, 0x3a, 0x42, 0x04, 0x39, 0x41, 0x04, 0x38, 0x3f, 0x04, 0x37, 0x3e, 0x04, 0x36, 0x3d, 0x04, 0x35, 0x3c, 0x04, 0x33, 0x3b,
+    0x03, 0x33, 0x3a, 0x03, 0x32, 0x38, 0x03, 0x31, 0x37, 0x03, 0x2f, 0x36, 0x03, 0x2e, 0x34, 0x03, 0x2d, 0x33, 0x03, 0x2c, 0x32,
+    0x03, 0x2a, 0x30, 0x03, 0x29, 0x2f, 0x03, 0x28, 0x2e, 0x03, 0x27, 0x2c, 0x03, 0x25, 0x2a, 0x03, 0x24, 0x29, 0x02, 0x23, 0x27,
+    0x02, 0x21, 0x26, 0x02, 0x20, 0x24, 0x02, 0x1e, 0x23, 0x02, 0x1d, 0x21, 0x02, 0x1a, 0x1e, 0x01, 0x18, 0x1c, 0x94, 0xa7, 0xaa,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xe8, 0xf0, 0xf1, 0x74, 0xa3, 0xab, 0x6a, 0x9d, 0xa4, 0x67, 0x9a, 0xa2, 0x65, 0x99, 0xa1,
+    0x62, 0x97, 0x9f, 0x60, 0x94, 0x9d, 0x5d, 0x92, 0x9a, 0x5b, 0x91, 0x99, 0x58, 0x8e, 0x97, 0x55, 0x8c, 0x95, 0x54, 0x8a, 0x93,
+    0x50, 0x88, 0x91, 0x4d, 0x86, 0x8f, 0x4b, 0x84, 0x8c, 0x48, 0x81, 0x8b, 0x45, 0x7f, 0x88, 0x43, 0x7d, 0x86, 0x40, 0x7b, 0x83,
+    0x3e, 0x79, 0x82, 0x3b, 0x76, 0x80, 0x38, 0x75, 0x7e, 0x35, 0x72, 0x7c, 0x33, 0x70, 0x7a, 0x32, 0x6e, 0x78, 0x2f, 0x6b, 0x75,
+    0x2d, 0x6a, 0x74, 0x40, 0x77, 0x80, 0x7b, 0xa1, 0xa7, 0xb6, 0xcb, 0xce, 0xe6, 0xed, 0xee, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xfe, 0xfe, 0xfe,
+    0xde, 0xe6, 0xe7, 0xa9, 0xbd, 0xc0, 0x65, 0x89, 0x8e, 0x25, 0x56, 0x5e, 0x11, 0x47, 0x4f, 0x10, 0x46, 0x4f, 0x0f, 0x45, 0x4e,
+    0x0f, 0x44, 0x4d, 0x0f, 0x44, 0x4c, 0x0f, 0x43, 0x4b, 0x0e, 0x41, 0x49, 0x0d, 0x40, 0x48, 0x0d, 0x3f, 0x47, 0x0d, 0x3f, 0x46,
+    0x0d, 0x3e, 0x45, 0x0d, 0x3d, 0x45, 0x0d, 0x3c, 0x44, 0x0d, 0x3b, 0x43, 0x0c, 0x3b, 0x42, 0x0c, 0x3a, 0x41, 0x0c, 0x39, 0x40,
+    0x0c, 0x38, 0x3f, 0x0c, 0x37, 0x3e, 0x0c, 0x36, 0x3d, 0x0b, 0x35, 0x3c, 0x0b, 0x34, 0x3b, 0x0b, 0x34, 0x3a, 0x0b, 0x33, 0x39,
+    0x0b, 0x32, 0x38, 0x0a, 0x31, 0x37, 0x0a, 0x30, 0x36, 0x0a, 0x2f, 0x35, 0x0a, 0x2e, 0x34, 0x09, 0x2a, 0x30, 0x19, 0x3f, 0x46,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x07, 0x5d, 0x69, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65,
+    0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63,
+    0x06, 0x57, 0x63, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x56, 0x62, 0x06, 0x56, 0x61, 0x06, 0x56, 0x61,
+    0x06, 0x55, 0x61, 0x06, 0x55, 0x60, 0x06, 0x55, 0x60, 0x06, 0x54, 0x60, 0x06, 0x54, 0x5f, 0x06, 0x54, 0x5f, 0x06, 0x53, 0x5e,
+    0x06, 0x53, 0x5e, 0x05, 0x53, 0x5e, 0x05, 0x52, 0x5d, 0x05, 0x52, 0x5d, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5b,
+    0x05, 0x50, 0x5b, 0x05, 0x50, 0x5a, 0x05, 0x4f, 0x5a, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x58, 0x05, 0x4d, 0x57,
+    0x05, 0x4d, 0x57, 0x05, 0x4c, 0x56, 0x05, 0x4c, 0x56, 0x05, 0x4b, 0x55, 0x05, 0x4b, 0x55, 0x05, 0x4a, 0x54, 0x05, 0x49, 0x53,
+    0x05, 0x48, 0x52, 0x05, 0x48, 0x51, 0x05, 0x47, 0x51, 0x05, 0x47, 0x50, 0x05, 0x46, 0x4f, 0x05, 0x45, 0x4e, 0x05, 0x44, 0x4d,
+    0x05, 0x43, 0x4d, 0x04, 0x43, 0x4c, 0x04, 0x42, 0x4b, 0x04, 0x41, 0x4a, 0x04, 0x40, 0x49, 0x04, 0x3f, 0x48, 0x04, 0x3e, 0x47,
+    0x04, 0x3e, 0x46, 0x04, 0x3d, 0x45, 0x04, 0x3c, 0x44, 0x04, 0x3b, 0x43, 0x04, 0x3a, 0x42, 0x04, 0x39, 0x41, 0x04, 0x38, 0x3f,
+    0x04, 0x37, 0x3e, 0x04, 0x36, 0x3d, 0x04, 0x35, 0x3c, 0x04, 0x33, 0x3b, 0x03, 0x33, 0x3a, 0x03, 0x32, 0x38, 0x03, 0x31, 0x37,
+    0x03, 0x2f, 0x36, 0x03, 0x2e, 0x34, 0x03, 0x2d, 0x33, 0x03, 0x2c, 0x32, 0x03, 0x2a, 0x30, 0x03, 0x29, 0x2f, 0x03, 0x28, 0x2e,
+    0x03, 0x27, 0x2c, 0x03, 0x25, 0x2a, 0x03, 0x24, 0x29, 0x02, 0x23, 0x27, 0x02, 0x21, 0x26, 0x02, 0x20, 0x24, 0x02, 0x1e, 0x23,
+    0x02, 0x1d, 0x21, 0x00, 0x16, 0x19, 0x29, 0x47, 0x4c, 0xfe, 0xfe, 0xfe, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xa0, 0xc1, 0xc6,
+    0x69, 0x9c, 0xa4, 0x66, 0x9a, 0xa2, 0x63, 0x98, 0xa0, 0x61, 0x96, 0x9e, 0x5f, 0x95, 0x9d, 0x5c, 0x92, 0x9a, 0x5a, 0x90, 0x99,
+    0x57, 0x8e, 0x97, 0x55, 0x8c, 0x95, 0x53, 0x8a, 0x93, 0x50, 0x88, 0x91, 0x4d, 0x87, 0x8f, 0x4b, 0x84, 0x8d, 0x48, 0x82, 0x8b,
+    0x45, 0x7f, 0x89, 0x43, 0x7d, 0x87, 0x40, 0x7b, 0x84, 0x3e, 0x79, 0x82, 0x3c, 0x77, 0x80, 0x39, 0x75, 0x7f, 0x36, 0x73, 0x7d,
+    0x33, 0x70, 0x7b, 0x32, 0x6e, 0x79, 0x30, 0x6d, 0x77, 0x2e, 0x6b, 0x74, 0x2c, 0x69, 0x73, 0x2a, 0x67, 0x71, 0x27, 0x64, 0x6e,
+    0x26, 0x64, 0x6d, 0x31, 0x6a, 0x75, 0x52, 0x83, 0x8a, 0x7e, 0xa2, 0xa7, 0xa2, 0xbc, 0xc0, 0xbc, 0xce, 0xd1, 0xd0, 0xdd, 0xdf,
+    0xdf, 0xe7, 0xe9, 0xe9, 0xef, 0xf0, 0xec, 0xf1, 0xf2, 0xeb, 0xf0, 0xf1, 0xe6, 0xed, 0xee, 0xdb, 0xe4, 0xe5, 0xca, 0xd7, 0xd9,
+    0xb4, 0xc6, 0xca, 0x97, 0xb1, 0xb5, 0x6e, 0x91, 0x97, 0x3e, 0x6e, 0x75, 0x1d, 0x53, 0x5c, 0x12, 0x4a, 0x53, 0x12, 0x49, 0x52,
+    0x11, 0x47, 0x50, 0x11, 0x47, 0x4f, 0x10, 0x46, 0x4f, 0x0f, 0x45, 0x4e, 0x0f, 0x44, 0x4d, 0x0f, 0x44, 0x4c, 0x0f, 0x43, 0x4b,
+    0x0e, 0x41, 0x49, 0x0d, 0x40, 0x48, 0x0d, 0x3f, 0x47, 0x0d, 0x3f, 0x46, 0x0d, 0x3e, 0x45, 0x0d, 0x3d, 0x45, 0x0d, 0x3c, 0x44,
+    0x0d, 0x3b, 0x43, 0x0c, 0x3b, 0x42, 0x0c, 0x3a, 0x41, 0x0c, 0x39, 0x40, 0x0c, 0x38, 0x3f, 0x0c, 0x37, 0x3e, 0x0c, 0x36, 0x3d,
+    0x0b, 0x35, 0x3c, 0x0b, 0x34, 0x3b, 0x0b, 0x34, 0x3a, 0x0b, 0x33, 0x39, 0x0b, 0x32, 0x38, 0x0a, 0x31, 0x37, 0x0a, 0x30, 0x36,
+    0x0a, 0x2f, 0x35, 0x0a, 0x2e, 0x34, 0x09, 0x2a, 0x30, 0x19, 0x3f, 0x46, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x07, 0x5d, 0x69, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65,
+    0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64,
+    0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x57, 0x63, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62,
+    0x06, 0x57, 0x62, 0x06, 0x56, 0x62, 0x06, 0x56, 0x61, 0x06, 0x56, 0x61, 0x06, 0x55, 0x61, 0x06, 0x55, 0x60, 0x06, 0x55, 0x60,
+    0x06, 0x54, 0x60, 0x06, 0x54, 0x5f, 0x06, 0x54, 0x5f, 0x06, 0x53, 0x5e, 0x06, 0x53, 0x5e, 0x05, 0x53, 0x5e, 0x05, 0x52, 0x5d,
+    0x05, 0x52, 0x5d, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5b, 0x05, 0x50, 0x5b, 0x05, 0x50, 0x5a, 0x05, 0x4f, 0x5a,
+    0x05, 0x4e, 0x59, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x58, 0x05, 0x4d, 0x57, 0x05, 0x4d, 0x57, 0x05, 0x4c, 0x56, 0x05, 0x4c, 0x56,
+    0x05, 0x4b, 0x55, 0x05, 0x4b, 0x55, 0x05, 0x4a, 0x54, 0x05, 0x49, 0x53, 0x05, 0x48, 0x52, 0x05, 0x48, 0x51, 0x05, 0x47, 0x51,
+    0x05, 0x47, 0x50, 0x05, 0x46, 0x4f, 0x05, 0x45, 0x4e, 0x05, 0x44, 0x4d, 0x05, 0x43, 0x4d, 0x04, 0x43, 0x4c, 0x04, 0x42, 0x4b,
+    0x04, 0x41, 0x4a, 0x04, 0x40, 0x49, 0x04, 0x3f, 0x48, 0x04, 0x3e, 0x47, 0x04, 0x3e, 0x46, 0x04, 0x3d, 0x45, 0x04, 0x3c, 0x44,
+    0x04, 0x3b, 0x43, 0x04, 0x3a, 0x42, 0x04, 0x39, 0x41, 0x04, 0x38, 0x3f, 0x04, 0x37, 0x3e, 0x04, 0x36, 0x3d, 0x04, 0x35, 0x3c,
+    0x04, 0x33, 0x3b, 0x03, 0x33, 0x3a, 0x03, 0x32, 0x38, 0x03, 0x31, 0x37, 0x03, 0x2f, 0x36, 0x03, 0x2e, 0x34, 0x03, 0x2d, 0x33,
+    0x03, 0x2c, 0x32, 0x03, 0x2a, 0x30, 0x03, 0x29, 0x2f, 0x03, 0x28, 0x2e, 0x03, 0x27, 0x2c, 0x03, 0x25, 0x2a, 0x03, 0x24, 0x29,
+    0x02, 0x23, 0x27, 0x02, 0x21, 0x26, 0x02, 0x20, 0x24, 0x02, 0x1e, 0x23, 0x01, 0x1b, 0x1f, 0x03, 0x1d, 0x21, 0xa6, 0xb6, 0xb9,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xd6, 0xe4, 0xe7, 0x68, 0x9b, 0xa3, 0x65, 0x9a, 0xa1, 0x62, 0x98, 0xa0, 0x60, 0x96, 0x9f,
+    0x5e, 0x94, 0x9c, 0x5b, 0x92, 0x9a, 0x59, 0x90, 0x99, 0x56, 0x8e, 0x96, 0x55, 0x8c, 0x95, 0x52, 0x8a, 0x93, 0x50, 0x89, 0x91,
+    0x4d, 0x86, 0x8f, 0x4b, 0x85, 0x8d, 0x48, 0x82, 0x8b, 0x45, 0x80, 0x89, 0x43, 0x7e, 0x88, 0x40, 0x7b, 0x85, 0x3e, 0x7a, 0x83,
+    0x3c, 0x77, 0x80, 0x3a, 0x76, 0x7f, 0x37, 0x73, 0x7d, 0x34, 0x72, 0x7b, 0x33, 0x70, 0x7a, 0x30, 0x6d, 0x77, 0x2e, 0x6c, 0x75,
+    0x2c, 0x69, 0x73, 0x2a, 0x68, 0x72, 0x28, 0x66, 0x70, 0x26, 0x64, 0x6e, 0x25, 0x63, 0x6c, 0x23, 0x61, 0x6b, 0x22, 0x60, 0x69,
+    0x20, 0x5e, 0x67, 0x1f, 0x5c, 0x67, 0x1e, 0x5b, 0x65, 0x21, 0x5d, 0x67, 0x28, 0x62, 0x6a, 0x2c, 0x64, 0x6c, 0x2b, 0x63, 0x6c,
+    0x2a, 0x62, 0x6a, 0x27, 0x5f, 0x68, 0x21, 0x59, 0x62, 0x1a, 0x53, 0x5d, 0x15, 0x4f, 0x58, 0x15, 0x4f, 0x57, 0x13, 0x4d, 0x56,
+    0x13, 0x4d, 0x56, 0x12, 0x4b, 0x54, 0x12, 0x4a, 0x53, 0x11, 0x48, 0x51, 0x11, 0x47, 0x50, 0x11, 0x47, 0x4f, 0x0f, 0x46, 0x4f,
+    0x0f, 0x45, 0x4e, 0x0f, 0x44, 0x4d, 0x0f, 0x44, 0x4c, 0x0f, 0x43, 0x4b, 0x0e, 0x41, 0x49, 0x0d, 0x40, 0x48, 0x0d, 0x3f, 0x47,
+    0x0d, 0x3f, 0x46, 0x0d, 0x3e, 0x45, 0x0d, 0x3d, 0x45, 0x0d, 0x3c, 0x44, 0x0d, 0x3b, 0x43, 0x0c, 0x3b, 0x42, 0x0c, 0x3a, 0x41,
+    0x0c, 0x39, 0x40, 0x0c, 0x38, 0x3f, 0x0c, 0x37, 0x3e, 0x0c, 0x36, 0x3d, 0x0b, 0x35, 0x3c, 0x0b, 0x34, 0x3b, 0x0b, 0x34, 0x3a,
+    0x0b, 0x33, 0x39, 0x0b, 0x32, 0x38, 0x0a, 0x31, 0x37, 0x0a, 0x30, 0x36, 0x0a, 0x2f, 0x35, 0x0a, 0x2e, 0x34, 0x09, 0x2a, 0x30,
+    0x19, 0x3f, 0x46, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x07, 0x5d, 0x69, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65,
+    0x06, 0x59, 0x65, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63,
+    0x06, 0x58, 0x63, 0x06, 0x57, 0x63, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x56, 0x62, 0x06, 0x56, 0x61,
+    0x06, 0x56, 0x61, 0x06, 0x55, 0x61, 0x06, 0x55, 0x60, 0x06, 0x55, 0x60, 0x06, 0x54, 0x60, 0x06, 0x54, 0x5f, 0x06, 0x54, 0x5f,
+    0x06, 0x53, 0x5e, 0x06, 0x53, 0x5e, 0x05, 0x53, 0x5e, 0x05, 0x52, 0x5d, 0x05, 0x52, 0x5d, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5c,
+    0x05, 0x51, 0x5b, 0x05, 0x50, 0x5b, 0x05, 0x50, 0x5a, 0x05, 0x4f, 0x5a, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x58,
+    0x05, 0x4d, 0x57, 0x05, 0x4d, 0x57, 0x05, 0x4c, 0x56, 0x05, 0x4c, 0x56, 0x05, 0x4b, 0x55, 0x05, 0x4b, 0x55, 0x05, 0x4a, 0x54,
+    0x05, 0x49, 0x53, 0x05, 0x48, 0x52, 0x05, 0x48, 0x51, 0x05, 0x47, 0x51, 0x05, 0x47, 0x50, 0x05, 0x46, 0x4f, 0x05, 0x45, 0x4e,
+    0x05, 0x44, 0x4d, 0x05, 0x43, 0x4d, 0x04, 0x43, 0x4c, 0x04, 0x42, 0x4b, 0x04, 0x41, 0x4a, 0x04, 0x40, 0x49, 0x04, 0x3f, 0x48,
+    0x04, 0x3e, 0x47, 0x04, 0x3e, 0x46, 0x04, 0x3d, 0x45, 0x04, 0x3c, 0x44, 0x04, 0x3b, 0x43, 0x04, 0x3a, 0x42, 0x04, 0x39, 0x41,
+    0x04, 0x38, 0x3f, 0x04, 0x37, 0x3e, 0x04, 0x36, 0x3d, 0x04, 0x35, 0x3c, 0x04, 0x33, 0x3b, 0x03, 0x33, 0x3a, 0x03, 0x32, 0x38,
+    0x03, 0x31, 0x37, 0x03, 0x2f, 0x36, 0x03, 0x2e, 0x34, 0x03, 0x2d, 0x33, 0x03, 0x2c, 0x32, 0x03, 0x2a, 0x30, 0x03, 0x29, 0x2f,
+    0x03, 0x28, 0x2e, 0x03, 0x27, 0x2c, 0x03, 0x25, 0x2a, 0x03, 0x24, 0x29, 0x02, 0x23, 0x27, 0x02, 0x21, 0x26, 0x02, 0x20, 0x24,
+    0x02, 0x1e, 0x23, 0x00, 0x17, 0x1a, 0x3d, 0x5b, 0x5f, 0xfe, 0xff, 0xff, 0xff, 0xff, 0xff, 0xf8, 0xfb, 0xfb, 0x86, 0xaf, 0xb6,
+    0x63, 0x98, 0xa1, 0x61, 0x97, 0x9f, 0x5f, 0x95, 0x9d, 0x5d, 0x93, 0x9c, 0x5a, 0x91, 0x9a, 0x58, 0x90, 0x99, 0x55, 0x8e, 0x96,
+    0x54, 0x8c, 0x95, 0x51, 0x8a, 0x93, 0x4f, 0x88, 0x91, 0x4d, 0x87, 0x90, 0x4a, 0x84, 0x8d, 0x48, 0x83, 0x8b, 0x45, 0x80, 0x8a,
+    0x43, 0x7f, 0x88, 0x40, 0x7c, 0x86, 0x3e, 0x7a, 0x84, 0x3c, 0x78, 0x81, 0x3a, 0x76, 0x7f, 0x37, 0x74, 0x7d, 0x35, 0x72, 0x7c,
+    0x33, 0x71, 0x7a, 0x31, 0x6e, 0x79, 0x2f, 0x6c, 0x77, 0x2d, 0x6b, 0x75, 0x2b, 0x69, 0x73, 0x29, 0x67, 0x71, 0x27, 0x65, 0x6f,
+    0x26, 0x63, 0x6d, 0x24, 0x62, 0x6c, 0x22, 0x60, 0x6a, 0x21, 0x5f, 0x69, 0x1f, 0x5d, 0x67, 0x1e, 0x5b, 0x66, 0x1d, 0x5a, 0x64,
+    0x1c, 0x59, 0x63, 0x1a, 0x57, 0x60, 0x19, 0x56, 0x60, 0x17, 0x55, 0x5e, 0x17, 0x54, 0x5d, 0x17, 0x52, 0x5c, 0x16, 0x51, 0x5a,
+    0x16, 0x50, 0x5a, 0x15, 0x4f, 0x58, 0x14, 0x4e, 0x57, 0x13, 0x4d, 0x56, 0x12, 0x4c, 0x55, 0x12, 0x4b, 0x54, 0x12, 0x4a, 0x53,
+    0x11, 0x48, 0x51, 0x11, 0x47, 0x50, 0x11, 0x47, 0x4f, 0x0f, 0x46, 0x4f, 0x0f, 0x45, 0x4e, 0x0f, 0x44, 0x4d, 0x0f, 0x44, 0x4c,
+    0x0f, 0x43, 0x4b, 0x0e, 0x41, 0x49, 0x0d, 0x40, 0x48, 0x0d, 0x3f, 0x47, 0x0d, 0x3f, 0x46, 0x0d, 0x3e, 0x45, 0x0d, 0x3d, 0x45,
+    0x0d, 0x3c, 0x44, 0x0d, 0x3b, 0x43, 0x0c, 0x3b, 0x42, 0x0c, 0x3a, 0x41, 0x0c, 0x39, 0x40, 0x0c, 0x38, 0x3f, 0x0c, 0x37, 0x3e,
+    0x0c, 0x36, 0x3d, 0x0b, 0x35, 0x3c, 0x0b, 0x34, 0x3b, 0x0b, 0x34, 0x3a, 0x0b, 0x33, 0x39, 0x0b, 0x32, 0x38, 0x0a, 0x31, 0x37,
+    0x0a, 0x30, 0x36, 0x0a, 0x2f, 0x35, 0x0a, 0x2e, 0x34, 0x09, 0x2a, 0x30, 0x19, 0x3f, 0x46, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x07, 0x5d, 0x69, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65,
+    0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64,
+    0x06, 0x58, 0x64, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x57, 0x63, 0x06, 0x57, 0x62,
+    0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x56, 0x62, 0x06, 0x56, 0x61, 0x06, 0x56, 0x61, 0x06, 0x55, 0x61, 0x06, 0x55, 0x60,
+    0x06, 0x55, 0x60, 0x06, 0x54, 0x60, 0x06, 0x54, 0x5f, 0x06, 0x54, 0x5f, 0x06, 0x53, 0x5e, 0x06, 0x53, 0x5e, 0x05, 0x53, 0x5e,
+    0x05, 0x52, 0x5d, 0x05, 0x52, 0x5d, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5b, 0x05, 0x50, 0x5b, 0x05, 0x50, 0x5a,
+    0x05, 0x4f, 0x5a, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x58, 0x05, 0x4d, 0x57, 0x05, 0x4d, 0x57, 0x05, 0x4c, 0x56,
+    0x05, 0x4c, 0x56, 0x05, 0x4b, 0x55, 0x05, 0x4b, 0x55, 0x05, 0x4a, 0x54, 0x05, 0x49, 0x53, 0x05, 0x48, 0x52, 0x05, 0x48, 0x51,
+    0x05, 0x47, 0x51, 0x05, 0x47, 0x50, 0x05, 0x46, 0x4f, 0x05, 0x45, 0x4e, 0x05, 0x44, 0x4d, 0x05, 0x43, 0x4d, 0x04, 0x43, 0x4c,
+    0x04, 0x42, 0x4b, 0x04, 0x41, 0x4a, 0x04, 0x40, 0x49, 0x04, 0x3f, 0x48, 0x04, 0x3e, 0x47, 0x04, 0x3e, 0x46, 0x04, 0x3d, 0x45,
+    0x04, 0x3c, 0x44, 0x04, 0x3b, 0x43, 0x04, 0x3a, 0x42, 0x04, 0x39, 0x41, 0x04, 0x38, 0x3f, 0x04, 0x37, 0x3e, 0x04, 0x36, 0x3d,
+    0x04, 0x35, 0x3c, 0x04, 0x33, 0x3b, 0x03, 0x33, 0x3a, 0x03, 0x32, 0x38, 0x03, 0x31, 0x37, 0x03, 0x2f, 0x36, 0x03, 0x2e, 0x34,
+    0x03, 0x2d, 0x33, 0x03, 0x2c, 0x32, 0x03, 0x2a, 0x30, 0x03, 0x29, 0x2f, 0x03, 0x28, 0x2e, 0x03, 0x27, 0x2c, 0x03, 0x25, 0x2a,
+    0x03, 0x24, 0x29, 0x02, 0x23, 0x27, 0x02, 0x21, 0x26, 0x02, 0x20, 0x24, 0x00, 0x1b, 0x1f, 0x0b, 0x28, 0x2c, 0xca, 0xd5, 0xd6,
+    0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xb1, 0xcc, 0xd1, 0x61, 0x98, 0xa0, 0x5f, 0x96, 0x9f, 0x5d, 0x95, 0x9d, 0x5b, 0x93, 0x9b,
+    0x59, 0x91, 0x9a, 0x57, 0x90, 0x98, 0x55, 0x8e, 0x97, 0x53, 0x8c, 0x95, 0x51, 0x8a, 0x93, 0x4e, 0x88, 0x91, 0x4c, 0x86, 0x8f,
+    0x4a, 0x85, 0x8e, 0x48, 0x82, 0x8c, 0x45, 0x81, 0x8a, 0x42, 0x7e, 0x88, 0x40, 0x7d, 0x86, 0x3e, 0x7a, 0x84, 0x3c, 0x78, 0x82,
+    0x3a, 0x77, 0x80, 0x38, 0x75, 0x7e, 0x35, 0x73, 0x7c, 0x33, 0x71, 0x7b, 0x31, 0x6f, 0x79, 0x2f, 0x6d, 0x78, 0x2d, 0x6b, 0x76,
+    0x2b, 0x6a, 0x74, 0x29, 0x67, 0x71, 0x27, 0x66, 0x70, 0x26, 0x65, 0x6f, 0x25, 0x62, 0x6c, 0x22, 0x61, 0x6a, 0x21, 0x5f, 0x6a,
+    0x20, 0x5f, 0x68, 0x1f, 0x5d, 0x67, 0x1d, 0x5b, 0x65, 0x1c, 0x59, 0x64, 0x1b, 0x59, 0x62, 0x1a, 0x57, 0x60, 0x19, 0x56, 0x60,
+    0x17, 0x55, 0x5e, 0x17, 0x53, 0x5d, 0x17, 0x52, 0x5c, 0x16, 0x51, 0x5a, 0x15, 0x50, 0x59, 0x15, 0x4f, 0x58, 0x14, 0x4e, 0x57,
+    0x13, 0x4d, 0x56, 0x12, 0x4c, 0x55, 0x12, 0x4b, 0x54, 0x11, 0x49, 0x52, 0x11, 0x48, 0x51, 0x11, 0x47, 0x50, 0x11, 0x47, 0x4f,
+    0x0f, 0x46, 0x4f, 0x0f, 0x45, 0x4e, 0x0f, 0x44, 0x4d, 0x0f, 0x44, 0x4c, 0x0f, 0x43, 0x4b, 0x0e, 0x41, 0x49, 0x0d, 0x40, 0x48,
+    0x0d, 0x3f, 0x47, 0x0d, 0x3f, 0x46, 0x0d, 0x3e, 0x45, 0x0d, 0x3d, 0x45, 0x0d, 0x3c, 0x44, 0x0d, 0x3b, 0x43, 0x0c, 0x3b, 0x42,
+    0x0c, 0x3a, 0x41, 0x0c, 0x39, 0x40, 0x0c, 0x38, 0x3f, 0x0c, 0x37, 0x3e, 0x0c, 0x36, 0x3d, 0x0b, 0x35, 0x3c, 0x0b, 0x34, 0x3b,
+    0x0b, 0x34, 0x3a, 0x0b, 0x33, 0x39, 0x0b, 0x32, 0x38, 0x0a, 0x31, 0x37, 0x0a, 0x30, 0x36, 0x0a, 0x2f, 0x35, 0x0a, 0x2e, 0x34,
+    0x09, 0x2a, 0x30, 0x19, 0x3f, 0x46, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x07, 0x5d, 0x69, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65,
+    0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63,
+    0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x57, 0x63, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x56, 0x62,
+    0x06, 0x56, 0x61, 0x06, 0x56, 0x61, 0x06, 0x55, 0x61, 0x06, 0x55, 0x60, 0x06, 0x55, 0x60, 0x06, 0x54, 0x60, 0x06, 0x54, 0x5f,
+    0x06, 0x54, 0x5f, 0x06, 0x53, 0x5e, 0x06, 0x53, 0x5e, 0x05, 0x53, 0x5e, 0x05, 0x52, 0x5d, 0x05, 0x52, 0x5d, 0x05, 0x51, 0x5c,
+    0x05, 0x51, 0x5c, 0x05, 0x51, 0x5b, 0x05, 0x50, 0x5b, 0x05, 0x50, 0x5a, 0x05, 0x4f, 0x5a, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x59,
+    0x05, 0x4e, 0x58, 0x05, 0x4d, 0x57, 0x05, 0x4d, 0x57, 0x05, 0x4c, 0x56, 0x05, 0x4c, 0x56, 0x05, 0x4b, 0x55, 0x05, 0x4b, 0x55,
+    0x05, 0x4a, 0x54, 0x05, 0x49, 0x53, 0x05, 0x48, 0x52, 0x05, 0x48, 0x51, 0x05, 0x47, 0x51, 0x05, 0x47, 0x50, 0x05, 0x46, 0x4f,
+    0x05, 0x45, 0x4e, 0x05, 0x44, 0x4d, 0x05, 0x43, 0x4d, 0x04, 0x43, 0x4c, 0x04, 0x42, 0x4b, 0x04, 0x41, 0x4a, 0x04, 0x40, 0x49,
+    0x04, 0x3f, 0x48, 0x04, 0x3e, 0x47, 0x04, 0x3e, 0x46, 0x04, 0x3d, 0x45, 0x04, 0x3c, 0x44, 0x04, 0x3b, 0x43, 0x04, 0x3a, 0x42,
+    0x04, 0x39, 0x41, 0x04, 0x38, 0x3f, 0x04, 0x37, 0x3e, 0x04, 0x36, 0x3d, 0x04, 0x35, 0x3c, 0x04, 0x33, 0x3b, 0x03, 0x33, 0x3a,
+    0x03, 0x32, 0x38, 0x03, 0x31, 0x37, 0x03, 0x2f, 0x36, 0x03, 0x2e, 0x34, 0x03, 0x2d, 0x33, 0x03, 0x2c, 0x32, 0x03, 0x2a, 0x30,
+    0x03, 0x29, 0x2f, 0x03, 0x28, 0x2e, 0x03, 0x27, 0x2c, 0x03, 0x25, 0x2a, 0x03, 0x24, 0x29, 0x02, 0x23, 0x27, 0x02, 0x21, 0x26,
+    0x02, 0x1f, 0x24, 0x00, 0x19, 0x1d, 0x6a, 0x83, 0x87, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xdd, 0xe9, 0xeb, 0x64, 0x9a, 0xa2,
+    0x5d, 0x95, 0x9e, 0x5c, 0x94, 0x9d, 0x5a, 0x92, 0x9b, 0x57, 0x90, 0x99, 0x55, 0x8e, 0x98, 0x55, 0x8e, 0x97, 0x52, 0x8c, 0x95,
+    0x4f, 0x89, 0x92, 0x4d, 0x87, 0x90, 0x4b, 0x86, 0x8f, 0x49, 0x84, 0x8d, 0x48, 0x83, 0x8c, 0x45, 0x80, 0x8a, 0x42, 0x7f, 0x88,
+    0x3f, 0x7c, 0x86, 0x3e, 0x7b, 0x84, 0x3c, 0x79, 0x83, 0x3a, 0x77, 0x81, 0x37, 0x75, 0x7e, 0x35, 0x73, 0x7c, 0x33, 0x72, 0x7b,
+    0x32, 0x6f, 0x7a, 0x2f, 0x6e, 0x78, 0x2d, 0x6c, 0x77, 0x2b, 0x6a, 0x74, 0x29, 0x68, 0x72, 0x28, 0x67, 0x71, 0x26, 0x65, 0x70,
+    0x25, 0x63, 0x6d, 0x24, 0x62, 0x6c, 0x21, 0x60, 0x6a, 0x20, 0x5f, 0x69, 0x1f, 0x5e, 0x67, 0x1e, 0x5c, 0x66, 0x1c, 0x5a, 0x65,
+    0x1b, 0x59, 0x63, 0x1a, 0x58, 0x61, 0x19, 0x56, 0x60, 0x19, 0x56, 0x60, 0x17, 0x54, 0x5e, 0x17, 0x53, 0x5d, 0x16, 0x51, 0x5b,
+    0x16, 0x51, 0x5a, 0x15, 0x50, 0x59, 0x14, 0x4e, 0x58, 0x14, 0x4e, 0x57, 0x12, 0x4c, 0x55, 0x12, 0x4c, 0x55, 0x12, 0x4b, 0x54,
+    0x11, 0x49, 0x52, 0x11, 0x48, 0x51, 0x11, 0x47, 0x50, 0x10, 0x47, 0x4f, 0x0f, 0x46, 0x4f, 0x0f, 0x45, 0x4e, 0x0f, 0x44, 0x4d,
+    0x0f, 0x44, 0x4c, 0x0f, 0x43, 0x4b, 0x0e, 0x41, 0x49, 0x0d, 0x40, 0x48, 0x0d, 0x3f, 0x47, 0x0d, 0x3f, 0x46, 0x0d, 0x3e, 0x45,
+    0x0d, 0x3d, 0x45, 0x0d, 0x3c, 0x44, 0x0d, 0x3b, 0x43, 0x0c, 0x3b, 0x42, 0x0c, 0x3a, 0x41, 0x0c, 0x39, 0x40, 0x0c, 0x38, 0x3f,
+    0x0c, 0x37, 0x3e, 0x0c, 0x36, 0x3d, 0x0b, 0x35, 0x3c, 0x0b, 0x34, 0x3b, 0x0b, 0x34, 0x3a, 0x0b, 0x33, 0x39, 0x0b, 0x32, 0x38,
+    0x0a, 0x31, 0x37, 0x0a, 0x30, 0x36, 0x0a, 0x2f, 0x35, 0x0a, 0x2e, 0x34, 0x09, 0x2a, 0x30, 0x19, 0x3f, 0x46, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x07, 0x5d, 0x69, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x59, 0x65,
+    0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x58, 0x64,
+    0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x57, 0x63,
+    0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x56, 0x62, 0x06, 0x56, 0x61, 0x06, 0x56, 0x61, 0x06, 0x55, 0x61,
+    0x06, 0x55, 0x60, 0x06, 0x55, 0x60, 0x06, 0x54, 0x60, 0x06, 0x54, 0x5f, 0x06, 0x54, 0x5f, 0x06, 0x53, 0x5e, 0x06, 0x53, 0x5e,
+    0x05, 0x53, 0x5e, 0x05, 0x52, 0x5d, 0x05, 0x52, 0x5d, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5b, 0x05, 0x50, 0x5b,
+    0x05, 0x50, 0x5a, 0x05, 0x4f, 0x5a, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x58, 0x05, 0x4d, 0x57, 0x05, 0x4d, 0x57,
+    0x05, 0x4c, 0x56, 0x05, 0x4c, 0x56, 0x05, 0x4b, 0x55, 0x05, 0x4b, 0x55, 0x05, 0x4a, 0x54, 0x05, 0x49, 0x53, 0x05, 0x48, 0x52,
+    0x05, 0x48, 0x51, 0x05, 0x47, 0x51, 0x05, 0x47, 0x50, 0x05, 0x46, 0x4f, 0x05, 0x45, 0x4e, 0x05, 0x44, 0x4d, 0x05, 0x43, 0x4d,
+    0x04, 0x43, 0x4c, 0x04, 0x42, 0x4b, 0x04, 0x41, 0x4a, 0x04, 0x40, 0x49, 0x04, 0x3f, 0x48, 0x04, 0x3e, 0x47, 0x04, 0x3e, 0x46,
+    0x04, 0x3d, 0x45, 0x04, 0x3c, 0x44, 0x04, 0x3b, 0x43, 0x04, 0x3a, 0x42, 0x04, 0x39, 0x41, 0x04, 0x38, 0x3f, 0x04, 0x37, 0x3e,
+    0x04, 0x36, 0x3d, 0x04, 0x35, 0x3c, 0x04, 0x33, 0x3b, 0x03, 0x33, 0x3a, 0x03, 0x32, 0x38, 0x03, 0x31, 0x37, 0x03, 0x2f, 0x36,
+    0x03, 0x2e, 0x34, 0x03, 0x2d, 0x33, 0x03, 0x2c, 0x32, 0x03, 0x2a, 0x30, 0x03, 0x29, 0x2f, 0x03, 0x28, 0x2e, 0x03, 0x27, 0x2c,
+    0x03, 0x25, 0x2a, 0x03, 0x24, 0x29, 0x02, 0x23, 0x27, 0x02, 0x21, 0x26, 0x00, 0x1a, 0x1e, 0x22, 0x42, 0x47, 0xf9, 0xfb, 0xfb,
+    0xff, 0xff, 0xff, 0xf7, 0xfa, 0xfa, 0x81, 0xad, 0xb4, 0x5c, 0x94, 0x9d, 0x5b, 0x94, 0x9c, 0x59, 0x92, 0x9b, 0x57, 0x90, 0x99,
+    0x55, 0x8f, 0x97, 0x52, 0x8c, 0x96, 0x50, 0x8b, 0x94, 0x4f, 0x8a, 0x93, 0x4c, 0x88, 0x91, 0x4a, 0x86, 0x8f, 0x49, 0x84, 0x8e,
+    0x47, 0x82, 0x8c, 0x44, 0x80, 0x8a, 0x42, 0x7e, 0x88, 0x3f, 0x7d, 0x86, 0x3e, 0x7b, 0x85, 0x3b, 0x79, 0x82, 0x39, 0x77, 0x81,
+    0x37, 0x75, 0x7f, 0x35, 0x74, 0x7d, 0x33, 0x72, 0x7b, 0x32, 0x70, 0x7a, 0x30, 0x6e, 0x79, 0x2d, 0x6d, 0x77, 0x2b, 0x6b, 0x75,
+    0x2a, 0x69, 0x74, 0x28, 0x68, 0x72, 0x26, 0x65, 0x70, 0x26, 0x65, 0x6f, 0x24, 0x63, 0x6d, 0x23, 0x61, 0x6b, 0x20, 0x60, 0x69,
+    0x1f, 0x5e, 0x68, 0x1e, 0x5d, 0x67, 0x1d, 0x5c, 0x65, 0x1c, 0x5a, 0x65, 0x1a, 0x58, 0x62, 0x19, 0x57, 0x61, 0x18, 0x55, 0x5f,
+    0x18, 0x55, 0x5f, 0x17, 0x54, 0x5e, 0x16, 0x52, 0x5c, 0x16, 0x51, 0x5b, 0x15, 0x50, 0x59, 0x15, 0x50, 0x59, 0x14, 0x4e, 0x58,
+    0x14, 0x4e, 0x57, 0x12, 0x4c, 0x55, 0x12, 0x4c, 0x55, 0x11, 0x4a, 0x53, 0x11, 0x49, 0x52, 0x11, 0x48, 0x51, 0x11, 0x47, 0x50,
+    0x10, 0x47, 0x4f, 0x0f, 0x46, 0x4f, 0x0f, 0x45, 0x4e, 0x0f, 0x44, 0x4d, 0x0f, 0x44, 0x4c, 0x0f, 0x43, 0x4b, 0x0e, 0x41, 0x49,
+    0x0d, 0x40, 0x48, 0x0d, 0x3f, 0x47, 0x0d, 0x3f, 0x46, 0x0d, 0x3e, 0x45, 0x0d, 0x3d, 0x45, 0x0d, 0x3c, 0x44, 0x0d, 0x3b, 0x43,
+    0x0c, 0x3b, 0x42, 0x0c, 0x3a, 0x41, 0x0c, 0x39, 0x40, 0x0c, 0x38, 0x3f, 0x0c, 0x37, 0x3e, 0x0c, 0x36, 0x3d, 0x0b, 0x35, 0x3c,
+    0x0b, 0x34, 0x3b, 0x0b, 0x34, 0x3a, 0x0b, 0x33, 0x39, 0x0b, 0x32, 0x38, 0x0a, 0x31, 0x37, 0x0a, 0x30, 0x36, 0x0a, 0x2f, 0x35,
+    0x0a, 0x2e, 0x34, 0x09, 0x2a, 0x30, 0x19, 0x3f, 0x46, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x07, 0x5d, 0x69,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65,
+    0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x63,
+    0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x57, 0x63, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62,
+    0x06, 0x56, 0x62, 0x06, 0x56, 0x61, 0x06, 0x56, 0x61, 0x06, 0x55, 0x61, 0x06, 0x55, 0x60, 0x06, 0x55, 0x60, 0x06, 0x54, 0x60,
+    0x06, 0x54, 0x5f, 0x06, 0x54, 0x5f, 0x06, 0x53, 0x5e, 0x06, 0x53, 0x5e, 0x05, 0x53, 0x5e, 0x05, 0x52, 0x5d, 0x05, 0x52, 0x5d,
+    0x05, 0x51, 0x5c, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5b, 0x05, 0x50, 0x5b, 0x05, 0x50, 0x5a, 0x05, 0x4f, 0x5a, 0x05, 0x4e, 0x59,
+    0x05, 0x4e, 0x59, 0x05, 0x4e, 0x58, 0x05, 0x4d, 0x57, 0x05, 0x4d, 0x57, 0x05, 0x4c, 0x56, 0x05, 0x4c, 0x56, 0x05, 0x4b, 0x55,
+    0x05, 0x4b, 0x55, 0x05, 0x4a, 0x54, 0x05, 0x49, 0x53, 0x05, 0x48, 0x52, 0x05, 0x48, 0x51, 0x05, 0x47, 0x51, 0x05, 0x47, 0x50,
+    0x05, 0x46, 0x4f, 0x05, 0x45, 0x4e, 0x05, 0x44, 0x4d, 0x05, 0x43, 0x4d, 0x04, 0x43, 0x4c, 0x04, 0x42, 0x4b, 0x04, 0x41, 0x4a,
+    0x04, 0x40, 0x49, 0x04, 0x3f, 0x48, 0x04, 0x3e, 0x47, 0x04, 0x3e, 0x46, 0x04, 0x3d, 0x45, 0x04, 0x3c, 0x44, 0x04, 0x3b, 0x43,
+    0x04, 0x3a, 0x42, 0x04, 0x39, 0x41, 0x04, 0x38, 0x3f, 0x04, 0x37, 0x3e, 0x04, 0x36, 0x3d, 0x04, 0x35, 0x3c, 0x04, 0x33, 0x3b,
+    0x03, 0x33, 0x3a, 0x03, 0x32, 0x38, 0x03, 0x31, 0x37, 0x03, 0x2f, 0x36, 0x03, 0x2e, 0x34, 0x03, 0x2d, 0x33, 0x03, 0x2c, 0x32,
+    0x03, 0x2a, 0x30, 0x03, 0x29, 0x2f, 0x03, 0x28, 0x2e, 0x03, 0x27, 0x2c, 0x03, 0x25, 0x2a, 0x03, 0x24, 0x29, 0x02, 0x23, 0x27,
+    0x00, 0x1f, 0x24, 0x05, 0x24, 0x29, 0xae, 0xbe, 0xc1, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xa4, 0xc4, 0xc9, 0x5a, 0x93, 0x9c,
+    0x58, 0x92, 0x9b, 0x57, 0x91, 0x9a, 0x55, 0x8f, 0x99, 0x52, 0x8d, 0x96, 0x50, 0x8c, 0x94, 0x4f, 0x8a, 0x94, 0x4d, 0x89, 0x92,
+    0x4b, 0x88, 0x91, 0x49, 0x86, 0x8f, 0x48, 0x84, 0x8d, 0x46, 0x82, 0x8c, 0x43, 0x80, 0x89, 0x41, 0x7f, 0x88, 0x3f, 0x7c, 0x86,
+    0x3d, 0x7b, 0x84, 0x3b, 0x79, 0x83, 0x39, 0x78, 0x81, 0x37, 0x76, 0x80, 0x35, 0x74, 0x7e, 0x33, 0x72, 0x7c, 0x32, 0x70, 0x7a,
+    0x30, 0x6f, 0x79, 0x2e, 0x6d, 0x77, 0x2c, 0x6c, 0x76, 0x2a, 0x6a, 0x75, 0x28, 0x68, 0x73, 0x26, 0x66, 0x70, 0x26, 0x65, 0x6f,
+    0x24, 0x64, 0x6e, 0x23, 0x62, 0x6c, 0x22, 0x60, 0x6b, 0x1f, 0x5f, 0x68, 0x1e, 0x5d, 0x68, 0x1d, 0x5d, 0x66, 0x1c, 0x5b, 0x65,
+    0x1b, 0x59, 0x64, 0x1a, 0x58, 0x62, 0x19, 0x57, 0x61, 0x18, 0x55, 0x5f, 0x18, 0x55, 0x5f, 0x17, 0x54, 0x5e, 0x16, 0x52, 0x5c,
+    0x15, 0x50, 0x5a, 0x15, 0x50, 0x59, 0x14, 0x4f, 0x59, 0x14, 0x4e, 0x58, 0x13, 0x4d, 0x56, 0x12, 0x4c, 0x55, 0x12, 0x4c, 0x55,
+    0x11, 0x4a, 0x53, 0x11, 0x49, 0x52, 0x11, 0x48, 0x51, 0x10, 0x47, 0x50, 0x10, 0x47, 0x4f, 0x0f, 0x46, 0x4f, 0x0f, 0x45, 0x4e,
+    0x0f, 0x44, 0x4d, 0x0f, 0x44, 0x4c, 0x0f, 0x43, 0x4b, 0x0e, 0x41, 0x49, 0x0d, 0x40, 0x48, 0x0d, 0x3f, 0x47, 0x0d, 0x3f, 0x46,
+    0x0d, 0x3e, 0x45, 0x0d, 0x3d, 0x45, 0x0d, 0x3c, 0x44, 0x0d, 0x3b, 0x43, 0x0c, 0x3b, 0x42, 0x0c, 0x3a, 0x41, 0x0c, 0x39, 0x40,
+    0x0c, 0x38, 0x3f, 0x0c, 0x37, 0x3e, 0x0c, 0x36, 0x3d, 0x0b, 0x35, 0x3c, 0x0b, 0x34, 0x3b, 0x0b, 0x34, 0x3a, 0x0b, 0x33, 0x39,
+    0x0b, 0x32, 0x38, 0x0a, 0x31, 0x37, 0x0a, 0x30, 0x36, 0x0a, 0x2f, 0x35, 0x0a, 0x2e, 0x34, 0x09, 0x2a, 0x30, 0x19, 0x3f, 0x46,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x07, 0x5d, 0x69, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65,
+    0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63,
+    0x06, 0x57, 0x63, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x56, 0x62, 0x06, 0x56, 0x61, 0x06, 0x56, 0x61,
+    0x06, 0x55, 0x61, 0x06, 0x55, 0x60, 0x06, 0x55, 0x60, 0x06, 0x54, 0x60, 0x06, 0x54, 0x5f, 0x06, 0x54, 0x5f, 0x06, 0x53, 0x5e,
+    0x06, 0x53, 0x5e, 0x05, 0x53, 0x5e, 0x05, 0x52, 0x5d, 0x05, 0x52, 0x5d, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5b,
+    0x05, 0x50, 0x5b, 0x05, 0x50, 0x5a, 0x05, 0x4f, 0x5a, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x58, 0x05, 0x4d, 0x57,
+    0x05, 0x4d, 0x57, 0x05, 0x4c, 0x56, 0x05, 0x4c, 0x56, 0x05, 0x4b, 0x55, 0x05, 0x4b, 0x55, 0x05, 0x4a, 0x54, 0x05, 0x49, 0x53,
+    0x05, 0x48, 0x52, 0x05, 0x48, 0x51, 0x05, 0x47, 0x51, 0x05, 0x47, 0x50, 0x05, 0x46, 0x4f, 0x05, 0x45, 0x4e, 0x05, 0x44, 0x4d,
+    0x05, 0x43, 0x4d, 0x04, 0x43, 0x4c, 0x04, 0x42, 0x4b, 0x04, 0x41, 0x4a, 0x04, 0x40, 0x49, 0x04, 0x3f, 0x48, 0x04, 0x3e, 0x47,
+    0x04, 0x3e, 0x46, 0x04, 0x3d, 0x45, 0x04, 0x3c, 0x44, 0x04, 0x3b, 0x43, 0x04, 0x3a, 0x42, 0x04, 0x39, 0x41, 0x04, 0x38, 0x3f,
+    0x04, 0x37, 0x3e, 0x04, 0x36, 0x3d, 0x04, 0x35, 0x3c, 0x04, 0x33, 0x3b, 0x03, 0x33, 0x3a, 0x03, 0x32, 0x38, 0x03, 0x31, 0x37,
+    0x03, 0x2f, 0x36, 0x03, 0x2e, 0x34, 0x03, 0x2d, 0x33, 0x03, 0x2c, 0x32, 0x03, 0x2a, 0x30, 0x03, 0x29, 0x2f, 0x03, 0x28, 0x2e,
+    0x03, 0x27, 0x2c, 0x03, 0x25, 0x2a, 0x03, 0x24, 0x29, 0x02, 0x22, 0x27, 0x00, 0x1b, 0x20, 0x5e, 0x7a, 0x7e, 0xff, 0xff, 0xff,
+    0xff, 0xff, 0xff, 0xc7, 0xdb, 0xde, 0x57, 0x92, 0x9b, 0x55, 0x90, 0x99, 0x54, 0x8f, 0x99, 0x53, 0x8f, 0x98, 0x52, 0x8d, 0x96,
+    0x50, 0x8c, 0x95, 0x4d, 0x8a, 0x93, 0x4c, 0x88, 0x92, 0x4a, 0x87, 0x90, 0x49, 0x86, 0x8f, 0x47, 0x84, 0x8d, 0x45, 0x82, 0x8b,
+    0x42, 0x80, 0x8a, 0x40, 0x7e, 0x88, 0x3e, 0x7d, 0x87, 0x3d, 0x7b, 0x85, 0x3b, 0x7a, 0x83, 0x39, 0x78, 0x82, 0x37, 0x77, 0x80,
+    0x35, 0x75, 0x7f, 0x33, 0x72, 0x7d, 0x32, 0x71, 0x7b, 0x30, 0x6f, 0x79, 0x2e, 0x6e, 0x77, 0x2d, 0x6c, 0x77, 0x2a, 0x6b, 0x75,
+    0x28, 0x69, 0x73, 0x27, 0x67, 0x72, 0x26, 0x66, 0x70, 0x25, 0x64, 0x6e, 0x23, 0x63, 0x6d, 0x22, 0x61, 0x6c, 0x20, 0x5f, 0x69,
+    0x1e, 0x5e, 0x68, 0x1d, 0x5d, 0x67, 0x1c, 0x5c, 0x66, 0x1b, 0x5a, 0x64, 0x1a, 0x59, 0x63, 0x19, 0x57, 0x62, 0x19, 0x57, 0x61,
+    0x18, 0x55, 0x5f, 0x18, 0x55, 0x5f, 0x16, 0x53, 0x5d, 0x16, 0x52, 0x5c, 0x15, 0x50, 0x5a, 0x14, 0x50, 0x59, 0x14, 0x4f, 0x59,
+    0x13, 0x4d, 0x57, 0x13, 0x4d, 0x56, 0x12, 0x4c, 0x55, 0x11, 0x4b, 0x54, 0x11, 0x4a, 0x53, 0x11, 0x49, 0x52, 0x11, 0x48, 0x51,
+    0x10, 0x47, 0x50, 0x10, 0x47, 0x4f, 0x0f, 0x46, 0x4f, 0x0f, 0x45, 0x4e, 0x0f, 0x44, 0x4d, 0x0f, 0x44, 0x4c, 0x0e, 0x42, 0x4a,
+    0x0e, 0x41, 0x49, 0x0d, 0x40, 0x48, 0x0d, 0x3f, 0x47, 0x0d, 0x3f, 0x46, 0x0d, 0x3e, 0x45, 0x0d, 0x3d, 0x45, 0x0d, 0x3c, 0x44,
+    0x0d, 0x3b, 0x43, 0x0c, 0x3b, 0x42, 0x0c, 0x3a, 0x41, 0x0c, 0x39, 0x40, 0x0c, 0x38, 0x3f, 0x0c, 0x37, 0x3e, 0x0c, 0x36, 0x3d,
+    0x0b, 0x35, 0x3c, 0x0b, 0x34, 0x3b, 0x0b, 0x34, 0x3a, 0x0b, 0x33, 0x39, 0x0b, 0x32, 0x38, 0x0a, 0x31, 0x37, 0x0a, 0x30, 0x36,
+    0x0a, 0x2f, 0x35, 0x0a, 0x2e, 0x34, 0x09, 0x2a, 0x30, 0x19, 0x3f, 0x46, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x07, 0x5d, 0x69, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65,
+    0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64,
+    0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x57, 0x63, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62,
+    0x06, 0x57, 0x62, 0x06, 0x56, 0x62, 0x06, 0x56, 0x61, 0x06, 0x56, 0x61, 0x06, 0x55, 0x61, 0x06, 0x55, 0x60, 0x06, 0x55, 0x60,
+    0x06, 0x54, 0x60, 0x06, 0x54, 0x5f, 0x06, 0x54, 0x5f, 0x06, 0x53, 0x5e, 0x06, 0x53, 0x5e, 0x05, 0x53, 0x5e, 0x05, 0x52, 0x5d,
+    0x05, 0x52, 0x5d, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5b, 0x05, 0x50, 0x5b, 0x05, 0x50, 0x5a, 0x05, 0x4f, 0x5a,
+    0x05, 0x4e, 0x59, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x58, 0x05, 0x4d, 0x57, 0x05, 0x4d, 0x57, 0x05, 0x4c, 0x56, 0x05, 0x4c, 0x56,
+    0x05, 0x4b, 0x55, 0x05, 0x4b, 0x55, 0x05, 0x4a, 0x54, 0x05, 0x49, 0x53, 0x05, 0x48, 0x52, 0x05, 0x48, 0x51, 0x05, 0x47, 0x51,
+    0x05, 0x47, 0x50, 0x05, 0x46, 0x4f, 0x05, 0x45, 0x4e, 0x05, 0x44, 0x4d, 0x05, 0x43, 0x4d, 0x04, 0x43, 0x4c, 0x04, 0x42, 0x4b,
+    0x04, 0x41, 0x4a, 0x04, 0x40, 0x49, 0x04, 0x3f, 0x48, 0x04, 0x3e, 0x47, 0x04, 0x3e, 0x46, 0x04, 0x3d, 0x45, 0x04, 0x3c, 0x44,
+    0x04, 0x3b, 0x43, 0x04, 0x3a, 0x42, 0x04, 0x39, 0x41, 0x04, 0x38, 0x3f, 0x04, 0x37, 0x3e, 0x04, 0x36, 0x3d, 0x04, 0x35, 0x3c,
+    0x04, 0x33, 0x3b, 0x03, 0x33, 0x3a, 0x03, 0x32, 0x38, 0x03, 0x31, 0x37, 0x03, 0x2f, 0x36, 0x03, 0x2e, 0x34, 0x03, 0x2d, 0x33,
+    0x03, 0x2c, 0x32, 0x03, 0x2a, 0x30, 0x03, 0x29, 0x2f, 0x03, 0x28, 0x2e, 0x03, 0x27, 0x2c, 0x03, 0x25, 0x2a, 0x03, 0x24, 0x29,
+    0x00, 0x1c, 0x21, 0x27, 0x48, 0x4e, 0xf9, 0xfb, 0xfb, 0xff, 0xff, 0xff, 0xe2, 0xec, 0xee, 0x64, 0x9a, 0xa4, 0x53, 0x90, 0x98,
+    0x52, 0x8e, 0x97, 0x51, 0x8d, 0x97, 0x50, 0x8c, 0x95, 0x4e, 0x8a, 0x94, 0x4c, 0x89, 0x92, 0x4a, 0x88, 0x91, 0x49, 0x86, 0x90,
+    0x47, 0x85, 0x8e, 0x45, 0x83, 0x8d, 0x43, 0x81, 0x8b, 0x41, 0x7f, 0x89, 0x3f, 0x7e, 0x88, 0x3d, 0x7c, 0x86, 0x3d, 0x7b, 0x85,
+    0x3b, 0x79, 0x83, 0x38, 0x78, 0x81, 0x36, 0x76, 0x80, 0x34, 0x75, 0x7e, 0x32, 0x73, 0x7d, 0x32, 0x71, 0x7c, 0x30, 0x70, 0x79,
+    0x2e, 0x6e, 0x77, 0x2c, 0x6c, 0x76, 0x2b, 0x6b, 0x76, 0x28, 0x6a, 0x73, 0x27, 0x68, 0x73, 0x26, 0x66, 0x71, 0x25, 0x65, 0x6f,
+    0x23, 0x63, 0x6d, 0x22, 0x62, 0x6c, 0x21, 0x61, 0x6b, 0x1f, 0x5e, 0x69, 0x1d, 0x5e, 0x67, 0x1c, 0x5c, 0x67, 0x1b, 0x5b, 0x65,
+    0x1a, 0x5a, 0x63, 0x19, 0x58, 0x63, 0x18, 0x56, 0x61, 0x18, 0x56, 0x60, 0x18, 0x55, 0x5f, 0x17, 0x54, 0x5e, 0x16, 0x53, 0x5d,
+    0x15, 0x51, 0x5b, 0x15, 0x50, 0x5a, 0x14, 0x50, 0x59, 0x14, 0x4f, 0x59, 0x13, 0x4d, 0x57, 0x13, 0x4d, 0x56, 0x11, 0x4b, 0x54,
+    0x11, 0x4b, 0x54, 0x11, 0x4a, 0x53, 0x11, 0x49, 0x52, 0x10, 0x48, 0x51, 0x10, 0x47, 0x50, 0x10, 0x47, 0x4f, 0x0f, 0x46, 0x4f,
+    0x0f, 0x45, 0x4e, 0x0f, 0x44, 0x4d, 0x0e, 0x43, 0x4b, 0x0e, 0x42, 0x4a, 0x0e, 0x41, 0x49, 0x0d, 0x40, 0x48, 0x0d, 0x3f, 0x47,
+    0x0d, 0x3f, 0x46, 0x0d, 0x3e, 0x45, 0x0d, 0x3d, 0x45, 0x0d, 0x3c, 0x44, 0x0d, 0x3b, 0x43, 0x0c, 0x3b, 0x42, 0x0c, 0x3a, 0x41,
+    0x0c, 0x39, 0x40, 0x0c, 0x38, 0x3f, 0x0c, 0x37, 0x3e, 0x0c, 0x36, 0x3d, 0x0b, 0x35, 0x3c, 0x0b, 0x34, 0x3b, 0x0b, 0x34, 0x3a,
+    0x0b, 0x33, 0x39, 0x0b, 0x32, 0x38, 0x0a, 0x31, 0x37, 0x0a, 0x30, 0x36, 0x0a, 0x2f, 0x35, 0x0a, 0x2e, 0x34, 0x09, 0x2a, 0x30,
+    0x19, 0x3f, 0x46, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x07, 0x5d, 0x69, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65,
+    0x06, 0x59, 0x65, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63,
+    0x06, 0x58, 0x63, 0x06, 0x57, 0x63, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x56, 0x62, 0x06, 0x56, 0x61,
+    0x06, 0x56, 0x61, 0x06, 0x55, 0x61, 0x06, 0x55, 0x60, 0x06, 0x55, 0x60, 0x06, 0x54, 0x60, 0x06, 0x54, 0x5f, 0x06, 0x54, 0x5f,
+    0x06, 0x53, 0x5e, 0x06, 0x53, 0x5e, 0x05, 0x53, 0x5e, 0x05, 0x52, 0x5d, 0x05, 0x52, 0x5d, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5c,
+    0x05, 0x51, 0x5b, 0x05, 0x50, 0x5b, 0x05, 0x50, 0x5a, 0x05, 0x4f, 0x5a, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x58,
+    0x05, 0x4d, 0x57, 0x05, 0x4d, 0x57, 0x05, 0x4c, 0x56, 0x05, 0x4c, 0x56, 0x05, 0x4b, 0x55, 0x05, 0x4b, 0x55, 0x05, 0x4a, 0x54,
+    0x05, 0x49, 0x53, 0x05, 0x48, 0x52, 0x05, 0x48, 0x51, 0x05, 0x47, 0x51, 0x05, 0x47, 0x50, 0x05, 0x46, 0x4f, 0x05, 0x45, 0x4e,
+    0x05, 0x44, 0x4d, 0x05, 0x43, 0x4d, 0x04, 0x43, 0x4c, 0x04, 0x42, 0x4b, 0x04, 0x41, 0x4a, 0x04, 0x40, 0x49, 0x04, 0x3f, 0x48,
+    0x04, 0x3e, 0x47, 0x04, 0x3e, 0x46, 0x04, 0x3d, 0x45, 0x04, 0x3c, 0x44, 0x04, 0x3b, 0x43, 0x04, 0x3a, 0x42, 0x04, 0x39, 0x41,
+    0x04, 0x38, 0x3f, 0x04, 0x37, 0x3e, 0x04, 0x36, 0x3d, 0x04, 0x35, 0x3c, 0x04, 0x33, 0x3b, 0x03, 0x33, 0x3a, 0x03, 0x32, 0x38,
+    0x03, 0x31, 0x37, 0x03, 0x2f, 0x36, 0x03, 0x2e, 0x34, 0x03, 0x2d, 0x33, 0x03, 0x2c, 0x32, 0x03, 0x2a, 0x30, 0x03, 0x29, 0x2f,
+    0x03, 0x28, 0x2e, 0x03, 0x27, 0x2c, 0x03, 0x25, 0x2b, 0x00, 0x20, 0x25, 0x0d, 0x2d, 0x33, 0xc3, 0xd0, 0xd2, 0xff, 0xff, 0xff,
+    0xf3, 0xf8, 0xf8, 0x76, 0xa5, 0xad, 0x51, 0x8e, 0x97, 0x50, 0x8d, 0x96, 0x4e, 0x8b, 0x95, 0x4d, 0x8b, 0x94, 0x4c, 0x8a, 0x93,
+    0x4a, 0x88, 0x92, 0x49, 0x87, 0x91, 0x47, 0x85, 0x8f, 0x45, 0x83, 0x8e, 0x44, 0x83, 0x8c, 0x42, 0x82, 0x8b, 0x40, 0x80, 0x89,
+    0x3f, 0x7e, 0x88, 0x3d, 0x7c, 0x86, 0x3c, 0x7b, 0x85, 0x3a, 0x7a, 0x84, 0x38, 0x77, 0x82, 0x35, 0x76, 0x80, 0x34, 0x75, 0x7f,
+    0x32, 0x73, 0x7d, 0x31, 0x71, 0x7c, 0x30, 0x70, 0x7a, 0x2e, 0x6f, 0x78, 0x2c, 0x6c, 0x76, 0x2b, 0x6c, 0x76, 0x29, 0x6a, 0x74,
+    0x27, 0x69, 0x73, 0x26, 0x67, 0x72, 0x25, 0x65, 0x70, 0x23, 0x64, 0x6e, 0x22, 0x62, 0x6c, 0x21, 0x62, 0x6c, 0x20, 0x60, 0x6a,
+    0x1e, 0x5e, 0x68, 0x1c, 0x5d, 0x67, 0x1b, 0x5b, 0x66, 0x1a, 0x5b, 0x64, 0x19, 0x59, 0x63, 0x19, 0x58, 0x63, 0x18, 0x56, 0x61,
+    0x18, 0x56, 0x60, 0x17, 0x54, 0x5e, 0x17, 0x54, 0x5e, 0x15, 0x52, 0x5c, 0x15, 0x51, 0x5b, 0x14, 0x50, 0x5a, 0x14, 0x50, 0x59,
+    0x13, 0x4e, 0x58, 0x13, 0x4d, 0x57, 0x13, 0x4d, 0x56, 0x11, 0x4b, 0x54, 0x11, 0x4b, 0x54, 0x11, 0x4a, 0x53, 0x10, 0x49, 0x52,
+    0x10, 0x48, 0x51, 0x10, 0x47, 0x50, 0x10, 0x47, 0x4f, 0x0f, 0x46, 0x4f, 0x0f, 0x45, 0x4e, 0x0f, 0x44, 0x4d, 0x0e, 0x43, 0x4b,
+    0x0e, 0x42, 0x4a, 0x0e, 0x41, 0x49, 0x0d, 0x40, 0x48, 0x0d, 0x3f, 0x47, 0x0d, 0x3f, 0x46, 0x0d, 0x3e, 0x45, 0x0d, 0x3d, 0x45,
+    0x0d, 0x3c, 0x44, 0x0d, 0x3b, 0x43, 0x0c, 0x3b, 0x42, 0x0c, 0x3a, 0x41, 0x0c, 0x39, 0x40, 0x0c, 0x38, 0x3f, 0x0c, 0x37, 0x3e,
+    0x0c, 0x36, 0x3d, 0x0b, 0x35, 0x3c, 0x0b, 0x34, 0x3b, 0x0b, 0x34, 0x3a, 0x0b, 0x33, 0x39, 0x0b, 0x32, 0x38, 0x0a, 0x31, 0x37,
+    0x0a, 0x30, 0x36, 0x0a, 0x2f, 0x35, 0x0a, 0x2e, 0x34, 0x09, 0x2a, 0x30, 0x19, 0x3f, 0x46, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x07, 0x5d, 0x69, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65,
+    0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64,
+    0x06, 0x58, 0x64, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x57, 0x63, 0x06, 0x57, 0x62,
+    0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x56, 0x62, 0x06, 0x56, 0x61, 0x06, 0x56, 0x61, 0x06, 0x55, 0x61, 0x06, 0x55, 0x60,
+    0x06, 0x55, 0x60, 0x06, 0x54, 0x60, 0x06, 0x54, 0x5f, 0x06, 0x54, 0x5f, 0x06, 0x53, 0x5e, 0x06, 0x53, 0x5e, 0x05, 0x53, 0x5e,
+    0x05, 0x52, 0x5d, 0x05, 0x52, 0x5d, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5b, 0x05, 0x50, 0x5b, 0x05, 0x50, 0x5a,
+    0x05, 0x4f, 0x5a, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x58, 0x05, 0x4d, 0x57, 0x05, 0x4d, 0x57, 0x05, 0x4c, 0x56,
+    0x05, 0x4c, 0x56, 0x05, 0x4b, 0x55, 0x05, 0x4b, 0x55, 0x05, 0x4a, 0x54, 0x05, 0x49, 0x53, 0x05, 0x48, 0x52, 0x05, 0x48, 0x51,
+    0x05, 0x47, 0x51, 0x05, 0x47, 0x50, 0x05, 0x46, 0x4f, 0x05, 0x45, 0x4e, 0x05, 0x44, 0x4d, 0x05, 0x43, 0x4d, 0x04, 0x43, 0x4c,
+    0x04, 0x42, 0x4b, 0x04, 0x41, 0x4a, 0x04, 0x40, 0x49, 0x04, 0x3f, 0x48, 0x04, 0x3e, 0x47, 0x04, 0x3e, 0x46, 0x04, 0x3d, 0x45,
+    0x04, 0x3c, 0x44, 0x04, 0x3b, 0x43, 0x04, 0x3a, 0x42, 0x04, 0x39, 0x41, 0x04, 0x38, 0x3f, 0x04, 0x37, 0x3e, 0x04, 0x36, 0x3d,
+    0x04, 0x35, 0x3c, 0x04, 0x33, 0x3b, 0x03, 0x33, 0x3a, 0x03, 0x32, 0x38, 0x03, 0x31, 0x37, 0x03, 0x2f, 0x36, 0x03, 0x2e, 0x34,
+    0x03, 0x2d, 0x33, 0x03, 0x2c, 0x32, 0x03, 0x2a, 0x30, 0x03, 0x29, 0x2f, 0x03, 0x28, 0x2e, 0x03, 0x27, 0x2c, 0x01, 0x24, 0x29,
+    0x01, 0x22, 0x27, 0x89, 0x9f, 0xa3, 0xff, 0xff, 0xff, 0xfe, 0xff, 0xff, 0x8b, 0xb4, 0xba, 0x4e, 0x8c, 0x96, 0x4d, 0x8b, 0x95,
+    0x4c, 0x8b, 0x94, 0x4b, 0x89, 0x93, 0x49, 0x88, 0x92, 0x48, 0x88, 0x91, 0x47, 0x86, 0x90, 0x46, 0x85, 0x8f, 0x44, 0x84, 0x8d,
+    0x42, 0x82, 0x8c, 0x40, 0x80, 0x8a, 0x3f, 0x80, 0x8a, 0x3d, 0x7e, 0x88, 0x3c, 0x7c, 0x86, 0x3b, 0x7b, 0x85, 0x39, 0x79, 0x83,
+    0x37, 0x78, 0x82, 0x36, 0x76, 0x80, 0x33, 0x75, 0x7e, 0x32, 0x73, 0x7d, 0x31, 0x72, 0x7c, 0x2f, 0x70, 0x7b, 0x2e, 0x6f, 0x79,
+    0x2c, 0x6d, 0x77, 0x2a, 0x6b, 0x75, 0x29, 0x6a, 0x74, 0x27, 0x68, 0x73, 0x26, 0x68, 0x72, 0x25, 0x66, 0x71, 0x23, 0x64, 0x6f,
+    0x22, 0x63, 0x6d, 0x21, 0x62, 0x6c, 0x20, 0x61, 0x6b, 0x1f, 0x5f, 0x6a, 0x1e, 0x5e, 0x68, 0x1b, 0x5c, 0x66, 0x1a, 0x5b, 0x65,
+    0x1a, 0x5b, 0x64, 0x19, 0x59, 0x63, 0x18, 0x57, 0x62, 0x18, 0x56, 0x61, 0x17, 0x55, 0x5f, 0x17, 0x54, 0x5e, 0x17, 0x54, 0x5e,
+    0x15, 0x52, 0x5c, 0x14, 0x51, 0x5b, 0x14, 0x50, 0x5a, 0x13, 0x4f, 0x58, 0x13, 0x4e, 0x58, 0x13, 0x4d, 0x57, 0x12, 0x4c, 0x55,
+    0x11, 0x4b, 0x54, 0x11, 0x4b, 0x54, 0x11, 0x4a, 0x53, 0x10, 0x49, 0x52, 0x10, 0x48, 0x51, 0x10, 0x47, 0x50, 0x10, 0x47, 0x4f,
+    0x0f, 0x46, 0x4f, 0x0f, 0x45, 0x4e, 0x0e, 0x43, 0x4c, 0x0e, 0x43, 0x4b, 0x0e, 0x42, 0x4a, 0x0e, 0x41, 0x49, 0x0d, 0x40, 0x48,
+    0x0d, 0x3f, 0x47, 0x0d, 0x3f, 0x46, 0x0d, 0x3e, 0x45, 0x0d, 0x3d, 0x45, 0x0d, 0x3c, 0x44, 0x0d, 0x3b, 0x43, 0x0c, 0x3b, 0x42,
+    0x0c, 0x3a, 0x41, 0x0c, 0x39, 0x40, 0x0c, 0x38, 0x3f, 0x0c, 0x37, 0x3e, 0x0c, 0x36, 0x3d, 0x0b, 0x35, 0x3c, 0x0b, 0x34, 0x3b,
+    0x0b, 0x34, 0x3a, 0x0b, 0x33, 0x39, 0x0b, 0x32, 0x38, 0x0a, 0x31, 0x37, 0x0a, 0x30, 0x36, 0x0a, 0x2f, 0x35, 0x0a, 0x2e, 0x34,
+    0x09, 0x2a, 0x30, 0x19, 0x3f, 0x46, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x07, 0x5d, 0x69, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65,
+    0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63,
+    0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x57, 0x63, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x56, 0x62,
+    0x06, 0x56, 0x61, 0x06, 0x56, 0x61, 0x06, 0x55, 0x61, 0x06, 0x55, 0x60, 0x06, 0x55, 0x60, 0x06, 0x54, 0x60, 0x06, 0x54, 0x5f,
+    0x06, 0x54, 0x5f, 0x06, 0x53, 0x5e, 0x06, 0x53, 0x5e, 0x05, 0x53, 0x5e, 0x05, 0x52, 0x5d, 0x05, 0x52, 0x5d, 0x05, 0x51, 0x5c,
+    0x05, 0x51, 0x5c, 0x05, 0x51, 0x5b, 0x05, 0x50, 0x5b, 0x05, 0x50, 0x5a, 0x05, 0x4f, 0x5a, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x59,
+    0x05, 0x4e, 0x58, 0x05, 0x4d, 0x57, 0x05, 0x4d, 0x57, 0x05, 0x4c, 0x56, 0x05, 0x4c, 0x56, 0x05, 0x4b, 0x55, 0x05, 0x4b, 0x55,
+    0x05, 0x4a, 0x54, 0x05, 0x49, 0x53, 0x05, 0x48, 0x52, 0x05, 0x48, 0x51, 0x05, 0x47, 0x51, 0x05, 0x47, 0x50, 0x05, 0x46, 0x4f,
+    0x05, 0x45, 0x4e, 0x05, 0x44, 0x4d, 0x05, 0x43, 0x4d, 0x04, 0x43, 0x4c, 0x04, 0x42, 0x4b, 0x04, 0x41, 0x4a, 0x04, 0x40, 0x49,
+    0x04, 0x3f, 0x48, 0x04, 0x3e, 0x47, 0x04, 0x3e, 0x46, 0x04, 0x3d, 0x45, 0x04, 0x3c, 0x44, 0x04, 0x3b, 0x43, 0x04, 0x3a, 0x42,
+    0x04, 0x39, 0x41, 0x04, 0x38, 0x3f, 0x04, 0x37, 0x3e, 0x04, 0x36, 0x3d, 0x04, 0x35, 0x3c, 0x04, 0x33, 0x3b, 0x03, 0x33, 0x3a,
+    0x03, 0x32, 0x38, 0x03, 0x31, 0x37, 0x03, 0x2f, 0x36, 0x03, 0x2e, 0x34, 0x03, 0x2d, 0x33, 0x03, 0x2c, 0x32, 0x03, 0x2a, 0x30,
+    0x03, 0x29, 0x2f, 0x03, 0x28, 0x2e, 0x02, 0x26, 0x2c, 0x00, 0x1f, 0x24, 0x56, 0x73, 0x78, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+    0x9d, 0xc0, 0xc6, 0x4b, 0x8b, 0x95, 0x4a, 0x8a, 0x94, 0x49, 0x89, 0x93, 0x48, 0x88, 0x92, 0x47, 0x87, 0x91, 0x47, 0x86, 0x91,
+    0x45, 0x85, 0x8f, 0x44, 0x84, 0x8e, 0x43, 0x83, 0x8d, 0x41, 0x82, 0x8b, 0x3f, 0x80, 0x8a, 0x3e, 0x7f, 0x89, 0x3d, 0x7e, 0x88,
+    0x3b, 0x7c, 0x86, 0x3a, 0x7a, 0x84, 0x38, 0x79, 0x83, 0x37, 0x78, 0x82, 0x35, 0x76, 0x81, 0x33, 0x74, 0x7f, 0x32, 0x74, 0x7d,
+    0x30, 0x72, 0x7c, 0x2f, 0x71, 0x7b, 0x2d, 0x6f, 0x79, 0x2c, 0x6d, 0x78, 0x2a, 0x6c, 0x76, 0x29, 0x6a, 0x74, 0x27, 0x69, 0x73,
+    0x26, 0x68, 0x73, 0x25, 0x67, 0x71, 0x23, 0x65, 0x70, 0x22, 0x63, 0x6e, 0x21, 0x62, 0x6d, 0x20, 0x61, 0x6b, 0x1f, 0x60, 0x6b,
+    0x1e, 0x5f, 0x69, 0x1d, 0x5d, 0x67, 0x1b, 0x5c, 0x66, 0x1a, 0x5b, 0x65, 0x19, 0x5a, 0x64, 0x18, 0x58, 0x62, 0x18, 0x57, 0x61,
+    0x18, 0x56, 0x60, 0x17, 0x55, 0x5f, 0x16, 0x53, 0x5d, 0x16, 0x53, 0x5d, 0x15, 0x52, 0x5c, 0x14, 0x51, 0x5b, 0x14, 0x50, 0x5a,
+    0x13, 0x4f, 0x58, 0x13, 0x4e, 0x58, 0x12, 0x4c, 0x56, 0x12, 0x4c, 0x55, 0x11, 0x4b, 0x54, 0x11, 0x4b, 0x54, 0x10, 0x4a, 0x53,
+    0x10, 0x49, 0x52, 0x10, 0x48, 0x51, 0x10, 0x47, 0x50, 0x10, 0x47, 0x4f, 0x0f, 0x46, 0x4f, 0x0f, 0x45, 0x4e, 0x0e, 0x43, 0x4c,
+    0x0e, 0x43, 0x4b, 0x0e, 0x42, 0x4a, 0x0e, 0x41, 0x49, 0x0d, 0x40, 0x48, 0x0d, 0x3f, 0x47, 0x0d, 0x3f, 0x46, 0x0d, 0x3e, 0x45,
+    0x0d, 0x3d, 0x45, 0x0d, 0x3c, 0x44, 0x0d, 0x3b, 0x43, 0x0c, 0x3b, 0x42, 0x0c, 0x3a, 0x41, 0x0c, 0x39, 0x40, 0x0c, 0x38, 0x3f,
+    0x0c, 0x37, 0x3e, 0x0c, 0x36, 0x3d, 0x0b, 0x35, 0x3c, 0x0b, 0x34, 0x3b, 0x0b, 0x34, 0x3a, 0x0b, 0x33, 0x39, 0x0b, 0x32, 0x38,
+    0x0a, 0x31, 0x37, 0x0a, 0x30, 0x36, 0x0a, 0x2f, 0x35, 0x0a, 0x2e, 0x34, 0x09, 0x2a, 0x30, 0x19, 0x3f, 0x45, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x07, 0x5e, 0x6a, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x59, 0x65,
+    0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x58, 0x64,
+    0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x57, 0x63,
+    0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x56, 0x62, 0x06, 0x56, 0x61, 0x06, 0x56, 0x61, 0x06, 0x55, 0x61,
+    0x06, 0x55, 0x60, 0x06, 0x55, 0x60, 0x06, 0x54, 0x60, 0x06, 0x54, 0x5f, 0x06, 0x54, 0x5f, 0x06, 0x53, 0x5e, 0x06, 0x53, 0x5e,
+    0x05, 0x53, 0x5e, 0x05, 0x52, 0x5d, 0x05, 0x52, 0x5d, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5b, 0x05, 0x50, 0x5b,
+    0x05, 0x50, 0x5a, 0x05, 0x4f, 0x5a, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x58, 0x05, 0x4d, 0x57, 0x05, 0x4d, 0x57,
+    0x05, 0x4c, 0x56, 0x05, 0x4c, 0x56, 0x05, 0x4b, 0x55, 0x05, 0x4b, 0x55, 0x05, 0x4a, 0x54, 0x05, 0x49, 0x53, 0x05, 0x48, 0x52,
+    0x05, 0x48, 0x51, 0x05, 0x47, 0x51, 0x05, 0x47, 0x50, 0x05, 0x46, 0x4f, 0x05, 0x45, 0x4e, 0x05, 0x44, 0x4d, 0x05, 0x43, 0x4d,
+    0x04, 0x43, 0x4c, 0x04, 0x42, 0x4b, 0x04, 0x41, 0x4a, 0x04, 0x40, 0x49, 0x04, 0x3f, 0x48, 0x04, 0x3e, 0x47, 0x04, 0x3e, 0x46,
+    0x04, 0x3d, 0x45, 0x04, 0x3c, 0x44, 0x04, 0x3b, 0x43, 0x04, 0x3a, 0x42, 0x04, 0x39, 0x41, 0x04, 0x38, 0x3f, 0x04, 0x37, 0x3e,
+    0x04, 0x36, 0x3d, 0x04, 0x35, 0x3c, 0x04, 0x33, 0x3b, 0x03, 0x33, 0x3a, 0x03, 0x32, 0x38, 0x03, 0x31, 0x37, 0x03, 0x2f, 0x36,
+    0x03, 0x2e, 0x34, 0x03, 0x2d, 0x33, 0x03, 0x2c, 0x32, 0x03, 0x2a, 0x30, 0x03, 0x29, 0x2f, 0x03, 0x28, 0x2e, 0x00, 0x1f, 0x25,
+    0x32, 0x55, 0x5a, 0xfa, 0xfc, 0xfc, 0xff, 0xff, 0xff, 0xaa, 0xc8, 0xcd, 0x48, 0x8a, 0x94, 0x47, 0x89, 0x93, 0x47, 0x88, 0x92,
+    0x47, 0x87, 0x91, 0x46, 0x87, 0x90, 0x44, 0x85, 0x8f, 0x43, 0x84, 0x8e, 0x42, 0x83, 0x8d, 0x41, 0x82, 0x8d, 0x3f, 0x81, 0x8b,
+    0x3d, 0x7f, 0x89, 0x3d, 0x7e, 0x89, 0x3b, 0x7d, 0x87, 0x3a, 0x7c, 0x86, 0x38, 0x7a, 0x84, 0x37, 0x79, 0x83, 0x36, 0x78, 0x82,
+    0x34, 0x76, 0x80, 0x32, 0x75, 0x7f, 0x31, 0x73, 0x7e, 0x2f, 0x72, 0x7c, 0x2e, 0x70, 0x7b, 0x2d, 0x70, 0x79, 0x2b, 0x6e, 0x78,
+    0x2a, 0x6c, 0x77, 0x28, 0x6b, 0x75, 0x27, 0x69, 0x73, 0x26, 0x69, 0x73, 0x25, 0x66, 0x71, 0x23, 0x66, 0x70, 0x22, 0x64, 0x6f,
+    0x21, 0x62, 0x6e, 0x20, 0x62, 0x6c, 0x1f, 0x60, 0x6b, 0x1e, 0x60, 0x6a, 0x1d, 0x5e, 0x68, 0x1c, 0x5c, 0x67, 0x1a, 0x5c, 0x65,
+    0x19, 0x5a, 0x65, 0x18, 0x59, 0x63, 0x18, 0x58, 0x61, 0x18, 0x57, 0x61, 0x17, 0x55, 0x60, 0x17, 0x55, 0x5f, 0x16, 0x53, 0x5d,
+    0x15, 0x53, 0x5d, 0x14, 0x52, 0x5c, 0x14, 0x51, 0x5b, 0x13, 0x4f, 0x59, 0x13, 0x4f, 0x58, 0x13, 0x4e, 0x58, 0x12, 0x4c, 0x56,
+    0x12, 0x4c, 0x55, 0x11, 0x4b, 0x54, 0x10, 0x4b, 0x54, 0x10, 0x4a, 0x53, 0x10, 0x49, 0x52, 0x10, 0x48, 0x51, 0x10, 0x47, 0x50,
+    0x10, 0x47, 0x4f, 0x0f, 0x46, 0x4f, 0x0e, 0x44, 0x4d, 0x0e, 0x43, 0x4c, 0x0e, 0x43, 0x4b, 0x0e, 0x42, 0x4a, 0x0e, 0x41, 0x49,
+    0x0d, 0x40, 0x48, 0x0d, 0x3f, 0x47, 0x0d, 0x3f, 0x46, 0x0d, 0x3e, 0x45, 0x0d, 0x3d, 0x45, 0x0d, 0x3c, 0x44, 0x0d, 0x3b, 0x43,
+    0x0c, 0x3b, 0x42, 0x0c, 0x3a, 0x41, 0x0c, 0x39, 0x40, 0x0c, 0x38, 0x3f, 0x0c, 0x37, 0x3e, 0x0c, 0x36, 0x3d, 0x0b, 0x35, 0x3c,
+    0x0b, 0x34, 0x3b, 0x0b, 0x34, 0x3a, 0x0b, 0x33, 0x39, 0x0b, 0x32, 0x38, 0x0a, 0x31, 0x37, 0x0a, 0x30, 0x36, 0x0a, 0x2f, 0x35,
+    0x0a, 0x2e, 0x34, 0x09, 0x2a, 0x2f, 0x1b, 0x42, 0x48, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x11, 0x64, 0x70,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65,
+    0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x63,
+    0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x57, 0x63, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62,
+    0x06, 0x56, 0x62, 0x06, 0x56, 0x61, 0x06, 0x56, 0x61, 0x06, 0x55, 0x61, 0x06, 0x55, 0x60, 0x06, 0x55, 0x60, 0x06, 0x54, 0x60,
+    0x06, 0x54, 0x5f, 0x06, 0x54, 0x5f, 0x06, 0x53, 0x5e, 0x06, 0x53, 0x5e, 0x05, 0x53, 0x5e, 0x05, 0x52, 0x5d, 0x05, 0x52, 0x5d,
+    0x05, 0x51, 0x5c, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5b, 0x05, 0x50, 0x5b, 0x05, 0x50, 0x5a, 0x05, 0x4f, 0x5a, 0x05, 0x4e, 0x59,
+    0x05, 0x4e, 0x59, 0x05, 0x4e, 0x58, 0x05, 0x4d, 0x57, 0x05, 0x4d, 0x57, 0x05, 0x4c, 0x56, 0x05, 0x4c, 0x56, 0x05, 0x4b, 0x55,
+    0x05, 0x4b, 0x55, 0x05, 0x4a, 0x54, 0x05, 0x49, 0x53, 0x05, 0x48, 0x52, 0x05, 0x48, 0x51, 0x05, 0x47, 0x51, 0x05, 0x47, 0x50,
+    0x05, 0x46, 0x4f, 0x05, 0x45, 0x4e, 0x05, 0x44, 0x4d, 0x05, 0x43, 0x4d, 0x04, 0x43, 0x4c, 0x04, 0x42, 0x4b, 0x04, 0x41, 0x4a,
+    0x04, 0x40, 0x49, 0x04, 0x3f, 0x48, 0x04, 0x3e, 0x47, 0x04, 0x3e, 0x46, 0x04, 0x3d, 0x45, 0x04, 0x3c, 0x44, 0x04, 0x3b, 0x43,
+    0x04, 0x3a, 0x42, 0x04, 0x39, 0x41, 0x04, 0x38, 0x3f, 0x04, 0x37, 0x3e, 0x04, 0x36, 0x3d, 0x04, 0x35, 0x3c, 0x04, 0x33, 0x3b,
+    0x03, 0x33, 0x3a, 0x03, 0x32, 0x38, 0x03, 0x31, 0x37, 0x03, 0x2f, 0x36, 0x03, 0x2e, 0x34, 0x03, 0x2d, 0x33, 0x03, 0x2c, 0x32,
+    0x03, 0x2a, 0x30, 0x03, 0x29, 0x2f, 0x00, 0x22, 0x27, 0x1c, 0x41, 0x46, 0xde, 0xe5, 0xe6, 0xff, 0xff, 0xff, 0xb3, 0xce, 0xd2,
+    0x46, 0x88, 0x92, 0x45, 0x88, 0x92, 0x45, 0x87, 0x91, 0x43, 0x85, 0x8f, 0x43, 0x85, 0x8f, 0x42, 0x84, 0x8e, 0x41, 0x83, 0x8d,
+    0x40, 0x82, 0x8d, 0x3f, 0x82, 0x8b, 0x3d, 0x80, 0x8a, 0x3c, 0x7f, 0x89, 0x3b, 0x7e, 0x88, 0x3a, 0x7c, 0x87, 0x38, 0x7b, 0x85,
+    0x37, 0x7a, 0x84, 0x36, 0x79, 0x83, 0x34, 0x77, 0x81, 0x33, 0x76, 0x80, 0x31, 0x74, 0x7e, 0x31, 0x73, 0x7e, 0x30, 0x72, 0x7c,
+    0x2d, 0x71, 0x7a, 0x2c, 0x6f, 0x7a, 0x2b, 0x6e, 0x78, 0x29, 0x6c, 0x77, 0x28, 0x6b, 0x76, 0x27, 0x6a, 0x74, 0x26, 0x68, 0x72,
+    0x25, 0x67, 0x71, 0x24, 0x66, 0x71, 0x22, 0x65, 0x6f, 0x21, 0x63, 0x6f, 0x20, 0x62, 0x6d, 0x1f, 0x61, 0x6c, 0x1e, 0x60, 0x6a,
+    0x1d, 0x5f, 0x69, 0x1c, 0x5d, 0x68, 0x1b, 0x5c, 0x66, 0x19, 0x5b, 0x65, 0x18, 0x59, 0x64, 0x18, 0x59, 0x63, 0x18, 0x58, 0x61,
+    0x17, 0x56, 0x61, 0x17, 0x55, 0x60, 0x16, 0x54, 0x5e, 0x16, 0x53, 0x5d, 0x15, 0x53, 0x5d, 0x14, 0x52, 0x5c, 0x13, 0x50, 0x5a,
+    0x13, 0x4f, 0x59, 0x13, 0x4f, 0x58, 0x12, 0x4d, 0x57, 0x12, 0x4c, 0x56, 0x12, 0x4c, 0x55, 0x11, 0x4b, 0x54, 0x10, 0x4b, 0x54,
+    0x10, 0x4a, 0x53, 0x10, 0x49, 0x52, 0x10, 0x48, 0x51, 0x10, 0x47, 0x50, 0x10, 0x47, 0x4f, 0x0f, 0x46, 0x4f, 0x0e, 0x44, 0x4d,
+    0x0e, 0x43, 0x4c, 0x0e, 0x43, 0x4b, 0x0e, 0x42, 0x4a, 0x0e, 0x41, 0x49, 0x0d, 0x40, 0x48, 0x0d, 0x3f, 0x47, 0x0d, 0x3f, 0x46,
+    0x0d, 0x3e, 0x45, 0x0d, 0x3d, 0x45, 0x0d, 0x3c, 0x44, 0x0d, 0x3b, 0x43, 0x0c, 0x3b, 0x42, 0x0c, 0x3a, 0x41, 0x0c, 0x39, 0x40,
+    0x0c, 0x38, 0x3f, 0x0c, 0x37, 0x3e, 0x0c, 0x36, 0x3d, 0x0b, 0x35, 0x3c, 0x0b, 0x34, 0x3b, 0x0b, 0x34, 0x3a, 0x0b, 0x33, 0x39,
+    0x0b, 0x32, 0x38, 0x0a, 0x31, 0x37, 0x0a, 0x30, 0x36, 0x0a, 0x2f, 0x35, 0x0a, 0x2e, 0x34, 0x09, 0x28, 0x2d, 0x2b, 0x4f, 0x55,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x37, 0x6d, 0x76, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65,
+    0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63,
+    0x06, 0x57, 0x63, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x56, 0x62, 0x06, 0x56, 0x61, 0x06, 0x56, 0x61,
+    0x06, 0x55, 0x61, 0x06, 0x55, 0x60, 0x06, 0x55, 0x60, 0x06, 0x54, 0x60, 0x06, 0x54, 0x5f, 0x06, 0x54, 0x5f, 0x06, 0x53, 0x5e,
+    0x06, 0x53, 0x5e, 0x05, 0x53, 0x5e, 0x05, 0x52, 0x5d, 0x05, 0x52, 0x5d, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5b,
+    0x05, 0x50, 0x5b, 0x05, 0x50, 0x5a, 0x05, 0x4f, 0x5a, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x58, 0x05, 0x4d, 0x57,
+    0x05, 0x4d, 0x57, 0x05, 0x4c, 0x56, 0x05, 0x4c, 0x56, 0x05, 0x4b, 0x55, 0x05, 0x4b, 0x55, 0x05, 0x4a, 0x54, 0x05, 0x49, 0x53,
+    0x05, 0x48, 0x52, 0x05, 0x48, 0x51, 0x05, 0x47, 0x51, 0x05, 0x47, 0x50, 0x05, 0x46, 0x4f, 0x05, 0x45, 0x4e, 0x05, 0x44, 0x4d,
+    0x05, 0x43, 0x4d, 0x04, 0x43, 0x4c, 0x04, 0x42, 0x4b, 0x04, 0x41, 0x4a, 0x04, 0x40, 0x49, 0x04, 0x3f, 0x48, 0x04, 0x3e, 0x47,
+    0x04, 0x3e, 0x46, 0x04, 0x3d, 0x45, 0x04, 0x3c, 0x44, 0x04, 0x3b, 0x43, 0x04, 0x3a, 0x42, 0x04, 0x39, 0x41, 0x04, 0x38, 0x3f,
+    0x04, 0x37, 0x3e, 0x04, 0x36, 0x3d, 0x04, 0x35, 0x3c, 0x04, 0x33, 0x3b, 0x03, 0x33, 0x3a, 0x03, 0x32, 0x38, 0x03, 0x31, 0x37,
+    0x03, 0x2f, 0x36, 0x03, 0x2e, 0x34, 0x03, 0x2d, 0x33, 0x03, 0x2c, 0x32, 0x03, 0x2b, 0x31, 0x00, 0x24, 0x2a, 0x12, 0x36, 0x3b,
+    0xc7, 0xd4, 0xd6, 0xff, 0xff, 0xff, 0xb6, 0xd0, 0xd5, 0x47, 0x88, 0x93, 0x43, 0x86, 0x90, 0x41, 0x85, 0x90, 0x41, 0x84, 0x8f,
+    0x40, 0x83, 0x8e, 0x3f, 0x82, 0x8d, 0x3f, 0x82, 0x8c, 0x3e, 0x81, 0x8c, 0x3c, 0x80, 0x8b, 0x3c, 0x7f, 0x89, 0x3b, 0x7e, 0x89,
+    0x3a, 0x7d, 0x87, 0x38, 0x7c, 0x86, 0x37, 0x7a, 0x85, 0x36, 0x7a, 0x84, 0x34, 0x78, 0x83, 0x33, 0x77, 0x81, 0x32, 0x75, 0x80,
+    0x31, 0x74, 0x7f, 0x30, 0x73, 0x7d, 0x2f, 0x72, 0x7d, 0x2d, 0x70, 0x7b, 0x2b, 0x6f, 0x79, 0x2a, 0x6e, 0x79, 0x29, 0x6d, 0x77,
+    0x27, 0x6b, 0x76, 0x26, 0x69, 0x74, 0x26, 0x69, 0x73, 0x25, 0x67, 0x71, 0x24, 0x67, 0x71, 0x23, 0x65, 0x70, 0x20, 0x64, 0x6e,
+    0x1f, 0x62, 0x6d, 0x1e, 0x61, 0x6c, 0x1e, 0x61, 0x6b, 0x1d, 0x5f, 0x69, 0x1c, 0x5e, 0x69, 0x1b, 0x5d, 0x67, 0x1a, 0x5b, 0x66,
+    0x18, 0x5a, 0x64, 0x18, 0x59, 0x64, 0x18, 0x59, 0x62, 0x17, 0x57, 0x61, 0x17, 0x56, 0x61, 0x16, 0x54, 0x5f, 0x16, 0x54, 0x5e,
+    0x15, 0x53, 0x5d, 0x15, 0x53, 0x5d, 0x14, 0x52, 0x5c, 0x13, 0x50, 0x5a, 0x13, 0x4f, 0x59, 0x12, 0x4e, 0x57, 0x12, 0x4d, 0x57,
+    0x12, 0x4c, 0x56, 0x12, 0x4c, 0x55, 0x10, 0x4b, 0x54, 0x10, 0x4b, 0x54, 0x10, 0x4a, 0x53, 0x10, 0x49, 0x52, 0x10, 0x48, 0x51,
+    0x10, 0x47, 0x50, 0x10, 0x47, 0x4f, 0x0e, 0x45, 0x4e, 0x0e, 0x44, 0x4d, 0x0e, 0x43, 0x4c, 0x0e, 0x43, 0x4b, 0x0e, 0x42, 0x4a,
+    0x0e, 0x41, 0x49, 0x0d, 0x40, 0x48, 0x0d, 0x3f, 0x47, 0x0d, 0x3f, 0x46, 0x0d, 0x3e, 0x45, 0x0d, 0x3d, 0x45, 0x0d, 0x3c, 0x44,
+    0x0d, 0x3b, 0x43, 0x0c, 0x3b, 0x42, 0x0c, 0x3a, 0x41, 0x0c, 0x39, 0x40, 0x0c, 0x38, 0x3f, 0x0c, 0x37, 0x3e, 0x0c, 0x36, 0x3d,
+    0x0b, 0x35, 0x3c, 0x0b, 0x34, 0x3b, 0x0b, 0x34, 0x3a, 0x0b, 0x33, 0x39, 0x0b, 0x32, 0x38, 0x0a, 0x31, 0x37, 0x0a, 0x30, 0x36,
+    0x0a, 0x30, 0x36, 0x0a, 0x2d, 0x32, 0x0a, 0x2e, 0x34, 0x56, 0x68, 0x6b, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x73, 0x75, 0x75, 0x19, 0x67, 0x72, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65,
+    0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64,
+    0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x57, 0x63, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62,
+    0x06, 0x57, 0x62, 0x06, 0x56, 0x62, 0x06, 0x56, 0x61, 0x06, 0x56, 0x61, 0x06, 0x55, 0x61, 0x06, 0x55, 0x60, 0x06, 0x55, 0x60,
+    0x06, 0x54, 0x60, 0x06, 0x54, 0x5f, 0x06, 0x54, 0x5f, 0x06, 0x53, 0x5e, 0x06, 0x53, 0x5e, 0x05, 0x53, 0x5e, 0x05, 0x52, 0x5d,
+    0x05, 0x52, 0x5d, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5b, 0x05, 0x50, 0x5b, 0x05, 0x50, 0x5a, 0x05, 0x4f, 0x5a,
+    0x05, 0x4e, 0x59, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x58, 0x05, 0x4d, 0x57, 0x05, 0x4d, 0x57, 0x05, 0x4c, 0x56, 0x05, 0x4c, 0x56,
+    0x05, 0x4b, 0x55, 0x05, 0x4b, 0x55, 0x05, 0x4a, 0x54, 0x05, 0x49, 0x53, 0x05, 0x48, 0x52, 0x05, 0x48, 0x51, 0x05, 0x47, 0x51,
+    0x05, 0x47, 0x50, 0x05, 0x46, 0x4f, 0x05, 0x45, 0x4e, 0x05, 0x44, 0x4d, 0x05, 0x43, 0x4d, 0x04, 0x43, 0x4c, 0x04, 0x42, 0x4b,
+    0x04, 0x41, 0x4a, 0x04, 0x40, 0x49, 0x04, 0x3f, 0x48, 0x04, 0x3e, 0x47, 0x04, 0x3e, 0x46, 0x04, 0x3d, 0x45, 0x04, 0x3c, 0x44,
+    0x04, 0x3b, 0x43, 0x04, 0x3a, 0x42, 0x04, 0x39, 0x41, 0x04, 0x38, 0x3f, 0x04, 0x37, 0x3e, 0x04, 0x36, 0x3d, 0x04, 0x35, 0x3c,
+    0x04, 0x33, 0x3b, 0x03, 0x33, 0x3a, 0x03, 0x32, 0x38, 0x03, 0x31, 0x37, 0x03, 0x2f, 0x36, 0x03, 0x2e, 0x34, 0x03, 0x2d, 0x33,
+    0x03, 0x2c, 0x32, 0x00, 0x26, 0x2c, 0x0b, 0x31, 0x37, 0xad, 0xbf, 0xc2, 0xff, 0xff, 0xff, 0xb4, 0xcf, 0xd4, 0x44, 0x87, 0x92,
+    0x40, 0x84, 0x8f, 0x40, 0x84, 0x8f, 0x3e, 0x83, 0x8e, 0x3d, 0x82, 0x8d, 0x3d, 0x81, 0x8c, 0x3c, 0x81, 0x8b, 0x3c, 0x80, 0x8a,
+    0x3b, 0x7f, 0x8a, 0x3a, 0x7e, 0x89, 0x39, 0x7d, 0x88, 0x38, 0x7c, 0x87, 0x37, 0x7b, 0x86, 0x35, 0x7a, 0x84, 0x34, 0x78, 0x84,
+    0x33, 0x78, 0x82, 0x32, 0x77, 0x82, 0x31, 0x76, 0x80, 0x30, 0x74, 0x7e, 0x2f, 0x73, 0x7e, 0x2e, 0x71, 0x7c, 0x2d, 0x71, 0x7c,
+    0x2b, 0x6f, 0x7a, 0x29, 0x6e, 0x78, 0x28, 0x6d, 0x77, 0x27, 0x6c, 0x76, 0x26, 0x6a, 0x75, 0x26, 0x69, 0x74, 0x24, 0x68, 0x72,
+    0x23, 0x66, 0x70, 0x22, 0x65, 0x70, 0x21, 0x64, 0x6f, 0x1f, 0x63, 0x6d, 0x1e, 0x61, 0x6d, 0x1d, 0x60, 0x6b, 0x1c, 0x5f, 0x6a,
+    0x1c, 0x5e, 0x69, 0x1b, 0x5e, 0x68, 0x1a, 0x5c, 0x67, 0x19, 0x5a, 0x65, 0x18, 0x5a, 0x64, 0x18, 0x59, 0x63, 0x17, 0x58, 0x62,
+    0x17, 0x57, 0x61, 0x16, 0x55, 0x60, 0x16, 0x54, 0x5f, 0x15, 0x54, 0x5e, 0x15, 0x53, 0x5d, 0x14, 0x52, 0x5c, 0x13, 0x51, 0x5b,
+    0x13, 0x50, 0x5a, 0x13, 0x4f, 0x59, 0x12, 0x4e, 0x57, 0x12, 0x4d, 0x57, 0x12, 0x4c, 0x56, 0x11, 0x4c, 0x55, 0x10, 0x4b, 0x54,
+    0x10, 0x4b, 0x54, 0x10, 0x4a, 0x53, 0x10, 0x49, 0x52, 0x10, 0x48, 0x51, 0x10, 0x47, 0x50, 0x0f, 0x46, 0x4e, 0x0e, 0x45, 0x4e,
+    0x0e, 0x44, 0x4d, 0x0e, 0x43, 0x4c, 0x0e, 0x43, 0x4b, 0x0e, 0x42, 0x4a, 0x0e, 0x41, 0x49, 0x0d, 0x40, 0x48, 0x0d, 0x3f, 0x47,
+    0x0d, 0x3f, 0x46, 0x0d, 0x3e, 0x45, 0x0d, 0x3d, 0x45, 0x0d, 0x3c, 0x44, 0x0d, 0x3b, 0x43, 0x0c, 0x3b, 0x42, 0x0c, 0x3a, 0x41,
+    0x0c, 0x39, 0x40, 0x0c, 0x38, 0x3f, 0x0c, 0x37, 0x3e, 0x0c, 0x36, 0x3d, 0x0b, 0x35, 0x3c, 0x0b, 0x34, 0x3b, 0x0b, 0x34, 0x3a,
+    0x0b, 0x33, 0x39, 0x0b, 0x32, 0x38, 0x0a, 0x31, 0x37, 0x0a, 0x30, 0x36, 0x0a, 0x2d, 0x32, 0x09, 0x29, 0x2f, 0x36, 0x57, 0x5c,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x6f, 0x74, 0x75, 0x23, 0x6a, 0x74,
+    0x07, 0x5d, 0x69, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67,
+    0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5b, 0x67, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66, 0x06, 0x5a, 0x66,
+    0x06, 0x5a, 0x66, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65, 0x06, 0x59, 0x65,
+    0x06, 0x59, 0x65, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x64, 0x06, 0x58, 0x63, 0x06, 0x58, 0x63, 0x06, 0x57, 0x63,
+    0x06, 0x57, 0x63, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x57, 0x62, 0x06, 0x56, 0x62, 0x06, 0x56, 0x61, 0x06, 0x56, 0x61,
+    0x06, 0x55, 0x61, 0x06, 0x55, 0x60, 0x06, 0x55, 0x60, 0x06, 0x54, 0x60, 0x06, 0x54, 0x5f, 0x06, 0x54, 0x5f, 0x06, 0x53, 0x5f,
+    0x05, 0x53, 0x5e, 0x05, 0x53, 0x5e, 0x05, 0x53, 0x5d, 0x05, 0x52, 0x5d, 0x05, 0x52, 0x5c, 0x05, 0x51, 0x5c, 0x05, 0x51, 0x5b,
+    0x05, 0x50, 0x5b, 0x05, 0x4f, 0x5a, 0x05, 0x4f, 0x5a, 0x05, 0x4e, 0x59, 0x05, 0x4e, 0x58, 0x05, 0x4e, 0x58, 0x05, 0x4d, 0x57,
+    0x05, 0x4d, 0x57, 0x05, 0x4c, 0x56, 0x05, 0x4b, 0x55, 0x05, 0x4b, 0x55, 0x05, 0x4a, 0x54, 0x05, 0x4a, 0x54, 0x05, 0x49, 0x53,
+    0x05, 0x48, 0x51, 0x05, 0x48, 0x51, 0x05, 0x47, 0x50, 0x05, 0x46, 0x50, 0x05, 0x46, 0x4f, 0x05, 0x45, 0x4e, 0x05, 0x44, 0x4d,
+    0x05, 0x43, 0x4c, 0x04, 0x42, 0x4b, 0x04, 0x42, 0x4a, 0x04, 0x41, 0x49, 0x04, 0x40, 0x49, 0x04, 0x3f, 0x48, 0x04, 0x3e, 0x47,
+    0x04, 0x3d, 0x45, 0x04, 0x3c, 0x44, 0x04, 0x3b, 0x43, 0x04, 0x3a, 0x42, 0x04, 0x39, 0x41, 0x04, 0x38, 0x40, 0x04, 0x38, 0x3f,
+    0x04, 0x36, 0x3e, 0x04, 0x35, 0x3d, 0x04, 0x34, 0x3b, 0x03, 0x33, 0x3a, 0x03, 0x32, 0x38, 0x03, 0x31, 0x38, 0x03, 0x30, 0x36,
+    0x03, 0x2e, 0x35, 0x03, 0x2d, 0x33, 0x03, 0x2c, 0x32, 0x03, 0x2b, 0x31, 0x01, 0x28, 0x2e, 0x08, 0x30, 0x35, 0x9d, 0xb2, 0xb5,
+    0xff, 0xff, 0xff, 0xac, 0xca, 0xce, 0x40, 0x85, 0x8f, 0x3c, 0x82, 0x8d, 0x3c, 0x81, 0x8c, 0x3c, 0x81, 0x8c, 0x3c, 0x80, 0x8b,
+    0x3b, 0x7f, 0x8a, 0x3a, 0x7e, 0x89, 0x3a, 0x7e, 0x88, 0x39, 0x7d, 0x88, 0x38, 0x7c, 0x86, 0x37, 0x7b, 0x86, 0x36, 0x7a, 0x85,
+    0x34, 0x79, 0x84, 0x33, 0x78, 0x83, 0x32, 0x77, 0x82, 0x31, 0x76, 0x80, 0x31, 0x75, 0x80, 0x30, 0x74, 0x7e, 0x2f, 0x73, 0x7e,
+    0x2e, 0x71, 0x7c, 0x2d, 0x70, 0x7b, 0x2c, 0x6f, 0x7a, 0x2a, 0x6e, 0x79, 0x28, 0x6d, 0x77, 0x27, 0x6b, 0x75, 0x26, 0x69, 0x74,
+    0x26, 0x69, 0x73, 0x25, 0x68, 0x72, 0x24, 0x67, 0x71, 0x23, 0x65, 0x70, 0x21, 0x63, 0x6f, 0x20, 0x63, 0x6d, 0x1f, 0x61, 0x6d,
+    0x1e, 0x61, 0x6b, 0x1d, 0x60, 0x6a, 0x1c, 0x5e, 0x69, 0x1b, 0x5d, 0x67, 0x1b, 0x5c, 0x66, 0x1a, 0x5b, 0x66, 0x18, 0x59, 0x63,
+    0x18, 0x58, 0x63, 0x18, 0x57, 0x61, 0x17, 0x56, 0x60, 0x17, 0x55, 0x60, 0x16, 0x54, 0x5e, 0x16, 0x53, 0x5d, 0x14, 0x52, 0x5c,
+    0x14, 0x51, 0x5b, 0x13, 0x50, 0x5a, 0x13, 0x4f, 0x59, 0x13, 0x4f, 0x58, 0x13, 0x4e, 0x58, 0x12, 0x4c, 0x56, 0x11, 0x4b, 0x54,
+    0x11, 0x4b, 0x54, 0x11, 0x4a, 0x53, 0x10, 0x49, 0x52, 0x10, 0x48, 0x51, 0x10, 0x48, 0x51, 0x10, 0x47, 0x50, 0x0f, 0x46, 0x4f,
+    0x0f, 0x45, 0x4e, 0x0f, 0x44, 0x4d, 0x0e, 0x43, 0x4b, 0x0e, 0x43, 0x4b, 0x0e, 0x41, 0x49, 0x0d, 0x40, 0x48, 0x0d, 0x3f, 0x47,
+    0x0d, 0x3f, 0x47, 0x0d, 0x3e, 0x45, 0x0d, 0x3d, 0x45, 0x0d, 0x3c, 0x44, 0x0d, 0x3b, 0x43, 0x0c, 0x3b, 0x42, 0x0c, 0x3a, 0x41,
+    0x0c, 0x39, 0x40, 0x0c, 0x38, 0x3f, 0x0c, 0x37, 0x3e, 0x0c, 0x36, 0x3d, 0x0b, 0x35, 0x3c, 0x0b, 0x34, 0x3b, 0x0b, 0x34, 0x3a,
+    0x0b, 0x33, 0x39, 0x0b, 0x32, 0x38, 0x0a, 0x31, 0x37, 0x0a, 0x30, 0x36, 0x0a, 0x30, 0x36, 0x0a, 0x2e, 0x34, 0x0a, 0x2d, 0x32,
+    0x0a, 0x2c, 0x32, 0x12, 0x37, 0x3e, 0x3e, 0x5c, 0x61, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x5b, 0x73, 0x76, 0x39, 0x6e, 0x76, 0x2d, 0x6c, 0x75,
+    0x2d, 0x6c, 0x75, 0x2d, 0x6c, 0x75, 0x2d, 0x6c, 0x75, 0x2d, 0x6c, 0x75, 0x2d, 0x6c, 0x75, 0x2d, 0x6c, 0x75, 0x2d, 0x6c, 0x75,
+    0x2d, 0x6c, 0x75, 0x2d, 0x6c, 0x75, 0x2d, 0x6c, 0x75, 0x2d, 0x6c, 0x75, 0x2d, 0x6c, 0x75, 0x2d, 0x6c, 0x75, 0x2d, 0x6c, 0x75,
+    0x2d, 0x6c, 0x75, 0x2d, 0x6c, 0x75, 0x2d, 0x6c, 0x75, 0x2d, 0x6c, 0x75, 0x2d, 0x6c, 0x75, 0x2d, 0x6c, 0x75, 0x2d, 0x6c, 0x75,
+    0x2d, 0x6c, 0x75, 0x2d, 0x6c, 0x75, 0x2d, 0x6c, 0x75, 0x2d, 0x6c, 0x75, 0x2d, 0x6c, 0x75, 0x2d, 0x6c, 0x75, 0x2d, 0x6c, 0x75,
+    0x2d, 0x6c, 0x75, 0x2d, 0x6c, 0x75, 0x2d, 0x6b, 0x74, 0x2d, 0x6b, 0x74, 0x2d, 0x6b, 0x74, 0x2d, 0x6b, 0x74, 0x2d, 0x6b, 0x74,
+    0x2d, 0x6b, 0x74, 0x2d, 0x6b, 0x74, 0x2d, 0x6b, 0x74, 0x2d, 0x6b, 0x74, 0x2d, 0x6b, 0x74, 0x2d, 0x6b, 0x73, 0x2d, 0x6b, 0x73,
+    0x2d, 0x6b, 0x73, 0x2d, 0x6b, 0x73, 0x2c, 0x6a, 0x73, 0x2c, 0x69, 0x72, 0x2c, 0x69, 0x72, 0x2c, 0x69, 0x72, 0x2d, 0x69, 0x72,
+    0x2d, 0x69, 0x72, 0x2d, 0x69, 0x72, 0x2d, 0x69, 0x72, 0x2d, 0x69, 0x72, 0x2d, 0x69, 0x72, 0x2d, 0x69, 0x72, 0x2d, 0x69, 0x72,
+    0x2d, 0x69, 0x71, 0x2d, 0x69, 0x71, 0x2c, 0x68, 0x71, 0x2c, 0x68, 0x71, 0x2c, 0x68, 0x72, 0x2c, 0x68, 0x72, 0x2c, 0x68, 0x70,
+    0x2c, 0x68, 0x70, 0x2c, 0x67, 0x70, 0x2c, 0x67, 0x70, 0x2c, 0x67, 0x70, 0x2d, 0x67, 0x70, 0x2d, 0x67, 0x6f, 0x2c, 0x67, 0x6f,
+    0x2d, 0x67, 0x6f, 0x2d, 0x66, 0x6f, 0x2d, 0x66, 0x6f, 0x2e, 0x67, 0x6f, 0x2e, 0x67, 0x6f, 0x2e, 0x67, 0x6f, 0x2e, 0x67, 0x6f,
+    0x2d, 0x67, 0x6f, 0x2e, 0x66, 0x6f, 0x2e, 0x66, 0x6e, 0x2e, 0x66, 0x6e, 0x2e, 0x66, 0x6e, 0x2f, 0x66, 0x6d, 0x2e, 0x65, 0x6e,
+    0x2f, 0x65, 0x6e, 0x2f, 0x65, 0x6e, 0x30, 0x65, 0x6d, 0x2f, 0x65, 0x6c, 0x30, 0x65, 0x6c, 0x31, 0x65, 0x6c, 0x30, 0x64, 0x6c,
+    0x30, 0x64, 0x6b, 0x31, 0x64, 0x6c, 0x31, 0x64, 0x6b, 0x31, 0x64, 0x6b, 0x31, 0x63, 0x6b, 0x31, 0x63, 0x6a, 0x31, 0x62, 0x6a,
+    0x31, 0x62, 0x6a, 0x31, 0x62, 0x69, 0x31, 0x61, 0x69, 0x32, 0x61, 0x69, 0x31, 0x61, 0x69, 0x31, 0x61, 0x67, 0x31, 0x60, 0x66,
+    0x31, 0x60, 0x66, 0x31, 0x5f, 0x66, 0x31, 0x5f, 0x65, 0x31, 0x5f, 0x65, 0x30, 0x5e, 0x64, 0x30, 0x5d, 0x63, 0x30, 0x5d, 0x63,
+    0x30, 0x5b, 0x62, 0x2c, 0x56, 0x5d, 0x69, 0x89, 0x8d, 0xaf, 0xc2, 0xc4, 0x9d, 0xb4, 0xb7, 0x67, 0x89, 0x8f, 0x64, 0x87, 0x8c,
+    0x64, 0x87, 0x8c, 0x63, 0x87, 0x8c, 0x63, 0x87, 0x8c, 0x62, 0x85, 0x8b, 0x62, 0x85, 0x8b, 0x62, 0x84, 0x8a, 0x62, 0x84, 0x8a,
+    0x61, 0x84, 0x8a, 0x60, 0x83, 0x88, 0x60, 0x83, 0x88, 0x5f, 0x83, 0x88, 0x5e, 0x81, 0x87, 0x5e, 0x81, 0x86, 0x5d, 0x80, 0x86,
+    0x5c, 0x7f, 0x85, 0x5b, 0x7f, 0x85, 0x5a, 0x7e, 0x84, 0x59, 0x7d, 0x82, 0x59, 0x7c, 0x82, 0x58, 0x7c, 0x82, 0x57, 0x7b, 0x81,
+    0x57, 0x7b, 0x80, 0x55, 0x7a, 0x7f, 0x54, 0x79, 0x7e, 0x54, 0x77, 0x7d, 0x54, 0x77, 0x7d, 0x52, 0x77, 0x7c, 0x51, 0x76, 0x7b,
+    0x50, 0x75, 0x7b, 0x4f, 0x74, 0x79, 0x4f, 0x73, 0x79, 0x4e, 0x72, 0x78, 0x4d, 0x72, 0x78, 0x4c, 0x71, 0x77, 0x4b, 0x70, 0x76,
+    0x4b, 0x70, 0x76, 0x4a, 0x70, 0x75, 0x49, 0x6f, 0x75, 0x49, 0x6e, 0x74, 0x48, 0x6d, 0x73, 0x47, 0x6c, 0x71, 0x47, 0x6c, 0x71,
+    0x46, 0x6b, 0x71, 0x46, 0x6b, 0x71, 0x45, 0x6a, 0x70, 0x45, 0x6a, 0x70, 0x45, 0x6a, 0x70, 0x45, 0x68, 0x6e, 0x45, 0x68, 0x6e,
+    0x45, 0x68, 0x6e, 0x44, 0x68, 0x6d, 0x44, 0x68, 0x6d, 0x43, 0x67, 0x6c, 0x43, 0x67, 0x6c, 0x42, 0x65, 0x6c, 0x42, 0x65, 0x6c,
+    0x42, 0x65, 0x6b, 0x42, 0x65, 0x6b, 0x41, 0x65, 0x6b, 0x41, 0x64, 0x6a, 0x41, 0x64, 0x6a, 0x40, 0x63, 0x69, 0x40, 0x63, 0x69,
+    0x40, 0x63, 0x69, 0x3f, 0x63, 0x68, 0x3f, 0x63, 0x68, 0x3f, 0x62, 0x68, 0x3f, 0x61, 0x67, 0x3f, 0x61, 0x67, 0x3f, 0x61, 0x67,
+    0x3f, 0x60, 0x67, 0x3e, 0x60, 0x65, 0x3e, 0x60, 0x65, 0x3e, 0x60, 0x65, 0x3e, 0x5f, 0x65, 0x3e, 0x5f, 0x65, 0x3d, 0x5f, 0x64,
+    0x3d, 0x5f, 0x64, 0x3d, 0x5e, 0x64, 0x3d, 0x5e, 0x63, 0x3d, 0x5e, 0x63, 0x3d, 0x5d, 0x63, 0x3d, 0x5d, 0x63, 0x3c, 0x5c, 0x62,
+    0x3b, 0x5c, 0x61, 0x3c, 0x5b, 0x60, 0x3c, 0x5b, 0x60, 0x3c, 0x5b, 0x60, 0x4a, 0x63, 0x67, 0x67, 0x70, 0x71, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77,
+    0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79,
+    0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c,
+    0x7c, 0x7c, 0x7c, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7f, 0x7f, 0x7f,
+    0x7f, 0x7f, 0x7f, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x82, 0x82, 0x82, 0x83, 0x83, 0x83,
+    0x83, 0x83, 0x83, 0x84, 0x84, 0x84, 0x84, 0x84, 0x84, 0x85, 0x85, 0x85, 0x85, 0x85, 0x85, 0x85, 0x85, 0x85, 0x86, 0x86, 0x86,
+    0x86, 0x86, 0x86, 0x87, 0x87, 0x87, 0x87, 0x87, 0x87, 0x87, 0x87, 0x87, 0x88, 0x88, 0x88, 0x88, 0x88, 0x88, 0x88, 0x88, 0x88,
+    0x89, 0x89, 0x89, 0x89, 0x89, 0x89, 0x89, 0x89, 0x89, 0x89, 0x89, 0x89, 0x89, 0x89, 0x89, 0x89, 0x89, 0x89, 0x89, 0x89, 0x89,
+    0x89, 0x89, 0x89, 0x89, 0x89, 0x89, 0x89, 0x89, 0x89, 0x89, 0x89, 0x89, 0x89, 0x89, 0x89, 0x88, 0x88, 0x88, 0x88, 0x88, 0x88,
+    0x87, 0x87, 0x87, 0x87, 0x87, 0x87, 0x87, 0x87, 0x87, 0x87, 0x87, 0x87, 0x86, 0x86, 0x86, 0x86, 0x86, 0x86, 0x85, 0x85, 0x85,
+    0x85, 0x85, 0x85, 0x85, 0x85, 0x85, 0x84, 0x84, 0x84, 0x84, 0x84, 0x84, 0x84, 0x84, 0x84, 0x83, 0x83, 0x83, 0x83, 0x83, 0x83,
+    0x82, 0x82, 0x82, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f,
+    0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c,
+    0x7b, 0x7b, 0x7b, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x78, 0x78, 0x78,
+    0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77,
+    0x77, 0x77, 0x77, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77,
+    0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79,
+    0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c,
+    0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f,
+    0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x82, 0x82, 0x82, 0x83, 0x83, 0x83, 0x83, 0x83, 0x83,
+    0x83, 0x83, 0x83, 0x84, 0x84, 0x84, 0x84, 0x84, 0x84, 0x85, 0x85, 0x85, 0x85, 0x85, 0x85, 0x85, 0x85, 0x85, 0x86, 0x86, 0x86,
+    0x86, 0x86, 0x86, 0x86, 0x86, 0x86, 0x86, 0x86, 0x86, 0x86, 0x86, 0x86, 0x87, 0x87, 0x87, 0x87, 0x87, 0x87, 0x87, 0x87, 0x87,
+    0x87, 0x87, 0x87, 0x87, 0x87, 0x87, 0x87, 0x87, 0x87, 0x87, 0x87, 0x87, 0x87, 0x87, 0x87, 0x87, 0x87, 0x87, 0x87, 0x87, 0x87,
+    0x87, 0x87, 0x87, 0x87, 0x87, 0x87, 0x86, 0x86, 0x86, 0x86, 0x86, 0x86, 0x86, 0x86, 0x86, 0x86, 0x86, 0x86, 0x86, 0x86, 0x86,
+    0x85, 0x85, 0x85, 0x85, 0x85, 0x85, 0x85, 0x85, 0x85, 0x84, 0x84, 0x84, 0x84, 0x84, 0x84, 0x84, 0x84, 0x84, 0x83, 0x83, 0x83,
+    0x83, 0x83, 0x83, 0x82, 0x82, 0x82, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80,
+    0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d,
+    0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7b, 0x7b, 0x7b, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x79, 0x79, 0x79,
+    0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x77, 0x77, 0x77,
+    0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78,
+    0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a,
+    0x7a, 0x7a, 0x7a, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d,
+    0x7d, 0x7d, 0x7d, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x80, 0x80, 0x80,
+    0x80, 0x80, 0x80, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x82, 0x82, 0x82, 0x83, 0x83, 0x83, 0x83, 0x83, 0x83,
+    0x83, 0x83, 0x83, 0x84, 0x84, 0x84, 0x84, 0x84, 0x84, 0x84, 0x84, 0x84, 0x84, 0x84, 0x84, 0x85, 0x85, 0x85, 0x85, 0x85, 0x85,
+    0x85, 0x85, 0x85, 0x85, 0x85, 0x85, 0x85, 0x85, 0x85, 0x86, 0x86, 0x86, 0x86, 0x86, 0x86, 0x86, 0x86, 0x86, 0x86, 0x86, 0x86,
+    0x86, 0x86, 0x86, 0x86, 0x86, 0x86, 0x86, 0x86, 0x86, 0x85, 0x85, 0x85, 0x85, 0x85, 0x85, 0x85, 0x85, 0x85, 0x85, 0x85, 0x85,
+    0x85, 0x85, 0x85, 0x85, 0x85, 0x85, 0x84, 0x84, 0x84, 0x84, 0x84, 0x84, 0x84, 0x84, 0x84, 0x84, 0x84, 0x84, 0x83, 0x83, 0x83,
+    0x83, 0x83, 0x83, 0x83, 0x83, 0x83, 0x82, 0x82, 0x82, 0x82, 0x82, 0x82, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x80, 0x80, 0x80,
+    0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e,
+    0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7b, 0x7b, 0x7b, 0x7a, 0x7a, 0x7a,
+    0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78,
+    0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77,
+    0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78,
+    0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a,
+    0x7b, 0x7b, 0x7b, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d,
+    0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80,
+    0x80, 0x80, 0x80, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x82, 0x82, 0x82, 0x82, 0x82, 0x82, 0x83, 0x83, 0x83,
+    0x83, 0x83, 0x83, 0x83, 0x83, 0x83, 0x83, 0x83, 0x83, 0x84, 0x84, 0x84, 0x84, 0x84, 0x84, 0x84, 0x84, 0x84, 0x84, 0x84, 0x84,
+    0x84, 0x84, 0x84, 0x84, 0x84, 0x84, 0x84, 0x84, 0x84, 0x84, 0x84, 0x84, 0x84, 0x84, 0x84, 0x84, 0x84, 0x84, 0x84, 0x84, 0x84,
+    0x84, 0x84, 0x84, 0x84, 0x84, 0x84, 0x84, 0x84, 0x84, 0x84, 0x84, 0x84, 0x84, 0x84, 0x84, 0x83, 0x83, 0x83, 0x83, 0x83, 0x83,
+    0x83, 0x83, 0x83, 0x83, 0x83, 0x83, 0x83, 0x83, 0x83, 0x82, 0x82, 0x82, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81,
+    0x81, 0x81, 0x81, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7e, 0x7e, 0x7e,
+    0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c,
+    0x7b, 0x7b, 0x7b, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79,
+    0x79, 0x79, 0x79, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77,
+    0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77,
+    0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x79, 0x79, 0x79,
+    0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7b, 0x7b, 0x7b,
+    0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7e, 0x7e, 0x7e,
+    0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80,
+    0x80, 0x80, 0x80, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x82, 0x82, 0x82, 0x82, 0x82, 0x82,
+    0x82, 0x82, 0x82, 0x83, 0x83, 0x83, 0x83, 0x83, 0x83, 0x83, 0x83, 0x83, 0x83, 0x83, 0x83, 0x83, 0x83, 0x83, 0x83, 0x83, 0x83,
+    0x83, 0x83, 0x83, 0x83, 0x83, 0x83, 0x83, 0x83, 0x83, 0x83, 0x83, 0x83, 0x83, 0x83, 0x83, 0x83, 0x83, 0x83, 0x83, 0x83, 0x83,
+    0x83, 0x83, 0x83, 0x82, 0x82, 0x82, 0x82, 0x82, 0x82, 0x82, 0x82, 0x82, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81,
+    0x81, 0x81, 0x81, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f,
+    0x7f, 0x7f, 0x7f, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7c, 0x7c, 0x7c,
+    0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a,
+    0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78,
+    0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77,
+    0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79,
+    0x79, 0x79, 0x79, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7c, 0x7c, 0x7c,
+    0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e,
+    0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80,
+    0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81,
+    0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81,
+    0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81,
+    0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f,
+    0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d,
+    0x7d, 0x7d, 0x7d, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7a, 0x7a, 0x7a,
+    0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x78, 0x78, 0x78,
+    0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77,
+    0x77, 0x77, 0x77, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78,
+    0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79,
+    0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c,
+    0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e,
+    0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f,
+    0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80,
+    0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80,
+    0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f,
+    0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7d, 0x7d, 0x7d,
+    0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7b, 0x7b, 0x7b,
+    0x7b, 0x7b, 0x7b, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79,
+    0x79, 0x79, 0x79, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x77, 0x77, 0x77,
+    0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77,
+    0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78,
+    0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x7a, 0x7a, 0x7a,
+    0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c,
+    0x7c, 0x7c, 0x7c, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7e, 0x7e, 0x7e,
+    0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f,
+    0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f,
+    0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f, 0x7f,
+    0x7f, 0x7f, 0x7f, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e,
+    0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c,
+    0x7c, 0x7c, 0x7c, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a,
+    0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78,
+    0x78, 0x78, 0x78, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77,
+    0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78,
+    0x78, 0x78, 0x78, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a,
+    0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c,
+    0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d,
+    0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e,
+    0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e,
+    0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7e, 0x7d, 0x7d, 0x7d,
+    0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c,
+    0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a,
+    0x7a, 0x7a, 0x7a, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x78, 0x78, 0x78,
+    0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77,
+    0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77,
+    0x77, 0x77, 0x77, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78,
+    0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a,
+    0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c,
+    0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d,
+    0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d,
+    0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d,
+    0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7d, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c,
+    0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a,
+    0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79,
+    0x79, 0x79, 0x79, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x77, 0x77, 0x77,
+    0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77,
+    0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x79, 0x79, 0x79,
+    0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a,
+    0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7c, 0x7c, 0x7c,
+    0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c,
+    0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c,
+    0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c,
+    0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7a, 0x7a, 0x7a,
+    0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79,
+    0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78,
+    0x78, 0x78, 0x78, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x77, 0x77, 0x77,
+    0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x78, 0x78, 0x78,
+    0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79,
+    0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a,
+    0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b,
+    0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c,
+    0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7c, 0x7b, 0x7b, 0x7b,
+    0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7b, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a,
+    0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79,
+    0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78,
+    0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77,
+    0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77,
+    0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x78, 0x78, 0x78,
+    0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79,
+    0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a,
+    0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a,
+    0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a,
+    0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a,
+    0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79,
+    0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78,
+    0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77,
+    0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77,
+    0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78,
+    0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x79, 0x79, 0x79,
+    0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79,
+    0x79, 0x79, 0x79, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a,
+    0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a,
+    0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x7a, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79,
+    0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x78, 0x78, 0x78,
+    0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x77, 0x77, 0x77,
+    0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77,
+    0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78,
+    0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78,
+    0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79,
+    0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79,
+    0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79,
+    0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78,
+    0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78,
+    0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77,
+    0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77,
+    0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78,
+    0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78,
+    0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79,
+    0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79,
+    0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x79, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78,
+    0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78,
+    0x78, 0x78, 0x78, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77,
+    0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77,
+    0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x78, 0x78, 0x78,
+    0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78,
+    0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78,
+    0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78,
+    0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78,
+    0x78, 0x78, 0x78, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77,
+    0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77,
+    0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77,
+    0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78,
+    0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78,
+    0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x77, 0x77, 0x77,
+    0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77,
+    0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77,
+    0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77,
+    0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78,
+    0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77,
+    0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77,
+    0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77,
+    0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77,
+    0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77,
+    0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77,
+    0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77,
+    0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77,
+    0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x77, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76,
+    0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x76, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75,
+    0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75, 0x75};
+static unsigned int lunarg_ppm_len = 196623;
diff --git a/src/third_party/vulkan-tools/src/cube/macOS/cube/AppDelegate.h b/src/third_party/vulkan-tools/src/cube/macOS/cube/AppDelegate.h
new file mode 100644
index 0000000..0b1dfe4
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/cube/macOS/cube/AppDelegate.h
@@ -0,0 +1,23 @@
+/*
+ * AppDelegate.h
+ *
+ * Copyright (c) 2014-2018 The Brenwill Workshop Ltd. (http://www.brenwill.com)
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#import <Cocoa/Cocoa.h>
+
+@interface AppDelegate : NSObject<NSApplicationDelegate>
+
+@end
diff --git a/src/third_party/vulkan-tools/src/cube/macOS/cube/AppDelegate.m b/src/third_party/vulkan-tools/src/cube/macOS/cube/AppDelegate.m
new file mode 100644
index 0000000..2a7bcaf
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/cube/macOS/cube/AppDelegate.m
@@ -0,0 +1,39 @@
+/*
+ * AppDelegate.m
+ *
+ * Copyright (c) 2014-2018 The Brenwill Workshop Ltd. (http://www.brenwill.com)
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#import "AppDelegate.h"
+
+@interface AppDelegate ()
+
+@end
+
+@implementation AppDelegate
+
+- (void)applicationDidFinishLaunching:(NSNotification *)aNotification {
+    // Insert code here to initialize your application
+}
+
+- (void)applicationWillTerminate:(NSNotification *)aNotification {
+    // Insert code here to tear down your application
+}
+
+- (BOOL)applicationShouldTerminateAfterLastWindowClosed:(NSApplication *)sender {
+    return YES;
+}
+
+@end
diff --git a/src/third_party/vulkan-tools/src/cube/macOS/cube/DemoViewController.h b/src/third_party/vulkan-tools/src/cube/macOS/cube/DemoViewController.h
new file mode 100644
index 0000000..7f90cc7
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/cube/macOS/cube/DemoViewController.h
@@ -0,0 +1,33 @@
+/*
+ * DemoViewController.h
+ *
+ * Copyright (c) 2014-2018 The Brenwill Workshop Ltd. (http://www.brenwill.com)
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#import <AppKit/AppKit.h>
+
+#pragma mark -
+#pragma mark DemoViewController
+
+/** The main view controller for the demo storyboard. */
+@interface DemoViewController : NSViewController
+@end
+
+#pragma mark -
+#pragma mark DemoView
+
+/** The Metal-compatibile view for the demo Storyboard. */
+@interface DemoView : NSView
+@end
diff --git a/src/third_party/vulkan-tools/src/cube/macOS/cube/DemoViewController.m b/src/third_party/vulkan-tools/src/cube/macOS/cube/DemoViewController.m
new file mode 100644
index 0000000..c6e229d
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/cube/macOS/cube/DemoViewController.m
@@ -0,0 +1,118 @@
+/*
+ * DemoViewController.m
+ *
+ * Copyright (c) 2014-2018 The Brenwill Workshop Ltd. (http://www.brenwill.com)
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#import "DemoViewController.h"
+#import <QuartzCore/CAMetalLayer.h>
+
+#include <MoltenVK/mvk_vulkan.h>
+
+#include "cube.c"
+
+#pragma mark -
+#pragma mark DemoViewController
+
+@implementation DemoViewController {
+    CVDisplayLinkRef _displayLink;
+    struct demo demo;
+    NSTimer* _timer;
+}
+
+- (void)dealloc {
+    demo_cleanup(&demo);
+    CVDisplayLinkRelease(_displayLink);
+    [super dealloc];
+}
+
+/** Since this is a single-view app, initialize Vulkan during view loading. */
+- (void)viewDidLoad {
+    [super viewDidLoad];
+
+    self.view.wantsLayer = YES;  // Back the view with a layer created by the makeBackingLayer method.
+
+    // Convert incoming args to "C" argc/argv strings
+    NSArray *args = [[NSProcessInfo processInfo] arguments];
+    const char** argv = (const char**) alloca(sizeof(char*) * args.count);
+    for(unsigned int i = 0; i < args.count; i++) {
+        NSString *s = args[i];
+        argv[i] = s.UTF8String;
+    }
+
+    demo_main(&demo, self.view.layer, args.count, argv);
+
+    // Monitor the rendering loop for a quit condition
+    _timer = [NSTimer scheduledTimerWithTimeInterval: 0.2
+                                              target: self
+                                            selector: @selector(onTick:)
+                                            userInfo: self
+                                             repeats: YES];
+
+    // Start the rendering loop
+    CVDisplayLinkCreateWithActiveCGDisplays(&_displayLink);
+    CVDisplayLinkSetOutputCallback(_displayLink, &DisplayLinkCallback, &demo);
+    CVDisplayLinkStart(_displayLink);
+
+}
+
+// Close the window if the demo is in a Quit state
+-(void)onTick:(NSTimer*)timer {
+    if (demo.quit) {
+        [[[self view] window] close];
+    }
+}
+
+#pragma mark Display loop callback function
+
+/** Rendering loop callback function for use with a CVDisplayLink. */
+static CVReturn DisplayLinkCallback(CVDisplayLinkRef displayLink, const CVTimeStamp* now,
+                                    const CVTimeStamp* outputTime,
+                                    CVOptionFlags flagsIn, CVOptionFlags* flagsOut, void* target) {
+    struct demo* demo = (struct demo*)target;
+    demo_run(demo);
+    if (demo->quit) {
+        CVDisplayLinkStop(displayLink);
+        CVDisplayLinkRelease(displayLink);
+    }
+    return kCVReturnSuccess;
+}
+
+@end
+
+#pragma mark -
+#pragma mark DemoView
+
+@implementation DemoView
+
+/** Indicates that the view wants to draw using the backing layer instead of using drawRect:.  */
+- (BOOL)wantsUpdateLayer {
+    return YES;
+}
+
+/** Returns a Metal-compatible layer. */
++ (Class)layerClass {
+    return [CAMetalLayer class];
+}
+
+/** If the wantsLayer property is set to YES, this method will be invoked to return a layer instance. */
+- (CALayer*)makeBackingLayer {
+    CALayer* layer = [self.class.layerClass layer];
+    CGSize viewScale = [self convertSizeToBacking:CGSizeMake(1.0, 1.0)];
+    layer.contentsScale = MIN(viewScale.width, viewScale.height);
+    return layer;
+}
+
+@end
diff --git a/src/third_party/vulkan-tools/src/cube/macOS/cube/Info.plist b/src/third_party/vulkan-tools/src/cube/macOS/cube/Info.plist
new file mode 100644
index 0000000..55145d3
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/cube/macOS/cube/Info.plist
@@ -0,0 +1,38 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE plist PUBLIC "-//Apple Computer//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
+<plist version="1.0">
+<dict>
+	<key>CFBundleDevelopmentRegion</key>
+	<string>English</string>
+	<key>CFBundleExecutable</key>
+	<string>${MACOSX_BUNDLE_EXECUTABLE_NAME}</string>
+	<key>CFBundleGetInfoString</key>
+	<string>VkCube</string>
+	<key>CFBundleIconFile</key>
+	<string>VulkanIcon.icns</string>
+	<key>CFBundleIdentifier</key>
+	<string>com.lunarg.vkcube</string>
+	<key>CFBundleInfoDictionaryVersion</key>
+	<string>6.0</string>
+	<key>CFBundleLongVersionString</key>
+	<string>1.0</string>
+	<key>CFBundleName</key>
+	<string>VkCube</string>
+	<key>CFBundlePackageType</key>
+	<string>APPL</string>
+	<key>CFBundleShortVersionString</key>
+	<string>1.0</string>
+	<key>CFBundleSignature</key>
+	<string>????</string>
+	<key>CFBundleVersion</key>
+	<string>1.0</string>
+	<key>CSResourcesFileMapped</key>
+	<true/>
+	<key>NSHumanReadableCopyright</key>
+	<string>Copyright (c) 2018 The Khronos Group Inc. LunarG Inc. All rights reserved.</string>
+	<key>NSMainStoryboardFile</key>
+	<string>Main</string>
+	<key>NSPrincipalClass</key>
+	<string>NSApplication</string>
+</dict>
+</plist>
diff --git a/src/third_party/vulkan-tools/src/cube/macOS/cube/Resources/Main.storyboard b/src/third_party/vulkan-tools/src/cube/macOS/cube/Resources/Main.storyboard
new file mode 100644
index 0000000..d21c149
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/cube/macOS/cube/Resources/Main.storyboard
@@ -0,0 +1,131 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<document type="com.apple.InterfaceBuilder3.Cocoa.Storyboard.XIB" version="3.0" toolsVersion="10117" systemVersion="15G26a" targetRuntime="MacOSX.Cocoa" propertyAccessControl="none" useAutolayout="YES" initialViewController="B8D-0N-5wS">
+    <dependencies>
+        <deployment identifier="macosx"/>
+        <plugIn identifier="com.apple.InterfaceBuilder.CocoaPlugin" version="10117"/>
+    </dependencies>
+    <scenes>
+        <!--Application-->
+        <scene sceneID="JPo-4y-FX3">
+            <objects>
+                <application id="hnw-xV-0zn" sceneMemberID="viewController">
+                    <menu key="mainMenu" title="Main Menu" systemMenu="main" id="AYu-sK-qS6">
+                        <items>
+                            <menuItem title="MoltenVK Demo" id="1Xt-HY-uBw">
+                                <modifierMask key="keyEquivalentModifierMask"/>
+                                <menu key="submenu" title="MoltenVK Demo" systemMenu="apple" id="uQy-DD-JDr">
+                                    <items>
+                                        <menuItem title="About Demo" id="5kV-Vb-QxS">
+                                            <modifierMask key="keyEquivalentModifierMask"/>
+                                            <connections>
+                                                <action selector="orderFrontStandardAboutPanel:" target="Ady-hI-5gd" id="Exp-CZ-Vem"/>
+                                            </connections>
+                                        </menuItem>
+                                        <menuItem isSeparatorItem="YES" id="VOq-y0-SEH"/>
+                                        <menuItem title="Hide Demo" keyEquivalent="h" id="Olw-nP-bQN">
+                                            <connections>
+                                                <action selector="hide:" target="Ady-hI-5gd" id="PnN-Uc-m68"/>
+                                            </connections>
+                                        </menuItem>
+                                        <menuItem title="Hide Others" keyEquivalent="h" id="Vdr-fp-XzO">
+                                            <modifierMask key="keyEquivalentModifierMask" option="YES" command="YES"/>
+                                            <connections>
+                                                <action selector="hideOtherApplications:" target="Ady-hI-5gd" id="VT4-aY-XCT"/>
+                                            </connections>
+                                        </menuItem>
+                                        <menuItem title="Show All" id="Kd2-mp-pUS">
+                                            <modifierMask key="keyEquivalentModifierMask"/>
+                                            <connections>
+                                                <action selector="unhideAllApplications:" target="Ady-hI-5gd" id="Dhg-Le-xox"/>
+                                            </connections>
+                                        </menuItem>
+                                        <menuItem isSeparatorItem="YES" id="kCx-OE-vgT"/>
+                                        <menuItem title="Quit Demo" keyEquivalent="q" id="4sb-4s-VLi">
+                                            <connections>
+                                                <action selector="terminate:" target="Ady-hI-5gd" id="Te7-pn-YzF"/>
+                                            </connections>
+                                        </menuItem>
+                                    </items>
+                                </menu>
+                            </menuItem>
+                            <menuItem title="Window" id="aUF-d1-5bR">
+                                <modifierMask key="keyEquivalentModifierMask"/>
+                                <menu key="submenu" title="Window" systemMenu="window" id="Td7-aD-5lo">
+                                    <items>
+                                        <menuItem title="Minimize" keyEquivalent="m" id="OY7-WF-poV">
+                                            <connections>
+                                                <action selector="performMiniaturize:" target="Ady-hI-5gd" id="VwT-WD-YPe"/>
+                                            </connections>
+                                        </menuItem>
+                                        <menuItem title="Zoom" id="R4o-n2-Eq4">
+                                            <modifierMask key="keyEquivalentModifierMask"/>
+                                            <connections>
+                                                <action selector="performZoom:" target="Ady-hI-5gd" id="DIl-cC-cCs"/>
+                                            </connections>
+                                        </menuItem>
+                                        <menuItem isSeparatorItem="YES" id="eu3-7i-yIM"/>
+                                        <menuItem title="Bring All to Front" id="LE2-aR-0XJ">
+                                            <modifierMask key="keyEquivalentModifierMask"/>
+                                            <connections>
+                                                <action selector="arrangeInFront:" target="Ady-hI-5gd" id="DRN-fu-gQh"/>
+                                            </connections>
+                                        </menuItem>
+                                    </items>
+                                </menu>
+                            </menuItem>
+                            <menuItem title="Help" id="wpr-3q-Mcd">
+                                <modifierMask key="keyEquivalentModifierMask"/>
+                                <menu key="submenu" title="Help" systemMenu="help" id="F2S-fz-NVQ">
+                                    <items>
+                                        <menuItem title="MoltenVK Demo Help" keyEquivalent="?" id="FKE-Sm-Kum">
+                                            <connections>
+                                                <action selector="showHelp:" target="Ady-hI-5gd" id="y7X-2Q-9no"/>
+                                            </connections>
+                                        </menuItem>
+                                    </items>
+                                </menu>
+                            </menuItem>
+                        </items>
+                    </menu>
+                    <connections>
+                        <outlet property="delegate" destination="Voe-Tx-rLC" id="PrD-fu-P6m"/>
+                    </connections>
+                </application>
+                <customObject id="Voe-Tx-rLC" customClass="AppDelegate"/>
+                <customObject id="Ady-hI-5gd" userLabel="First Responder" customClass="NSResponder" sceneMemberID="firstResponder"/>
+            </objects>
+            <point key="canvasLocation" x="83.5" y="-47"/>
+        </scene>
+        <!--Window Controller-->
+        <scene sceneID="R2V-B0-nI4">
+            <objects>
+                <windowController id="B8D-0N-5wS" sceneMemberID="viewController">
+                    <window key="window" title="MoltenVK Demo" allowsToolTipsWhenApplicationIsInactive="NO" autorecalculatesKeyViewLoop="NO" oneShot="NO" releasedWhenClosed="NO" showsToolbarButton="NO" visibleAtLaunch="NO" animationBehavior="default" id="IQv-IB-iLA">
+                        <windowStyleMask key="styleMask" titled="YES" closable="YES" miniaturizable="YES" resizable="YES"/>
+                        <rect key="contentRect" x="1051" y="656" width="300" height="200"/>
+                        <rect key="screenRect" x="0.0" y="0.0" width="2560" height="1417"/>
+                        <value key="minSize" type="size" width="300" height="200"/>
+                    </window>
+                    <connections>
+                        <segue destination="XfG-lQ-9wD" kind="relationship" relationship="window.shadowedContentViewController" id="cq2-FE-JQM"/>
+                    </connections>
+                </windowController>
+                <customObject id="Oky-zY-oP4" userLabel="First Responder" customClass="NSResponder" sceneMemberID="firstResponder"/>
+            </objects>
+            <point key="canvasLocation" x="83" y="146"/>
+        </scene>
+        <!--Demo View Controller-->
+        <scene sceneID="hIz-AP-VOD">
+            <objects>
+                <viewController id="XfG-lQ-9wD" customClass="DemoViewController" sceneMemberID="viewController">
+                    <view key="view" id="m2S-Jp-Qdl" customClass="DemoView">
+                        <rect key="frame" x="0.0" y="0.0" width="400" height="300"/>
+                        <autoresizingMask key="autoresizingMask"/>
+                    </view>
+                </viewController>
+                <customObject id="rPt-NT-nkU" userLabel="First Responder" customClass="NSResponder" sceneMemberID="firstResponder"/>
+            </objects>
+            <point key="canvasLocation" x="83" y="564"/>
+        </scene>
+    </scenes>
+</document>
diff --git a/src/third_party/vulkan-tools/src/cube/macOS/cube/Resources/VulkanIcon.icns b/src/third_party/vulkan-tools/src/cube/macOS/cube/Resources/VulkanIcon.icns
new file mode 100644
index 0000000..fb82fb7
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/cube/macOS/cube/Resources/VulkanIcon.icns
Binary files differ
diff --git a/src/third_party/vulkan-tools/src/cube/macOS/cube/cube.cmake b/src/third_party/vulkan-tools/src/cube/macOS/cube/cube.cmake
new file mode 100644
index 0000000..18bcbad
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/cube/macOS/cube/cube.cmake
@@ -0,0 +1,83 @@
+# ~~~
+# Copyright (c) 2018 Valve Corporation
+# Copyright (c) 2018 LunarG, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ~~~
+
+# VkCube Application Bundle
+
+set(cube_SRCS
+    ${CMAKE_CURRENT_SOURCE_DIR}/macOS/cube/main.m
+    ${CMAKE_CURRENT_SOURCE_DIR}/macOS/cube/AppDelegate.m
+    ${CMAKE_CURRENT_SOURCE_DIR}/macOS/cube/DemoViewController.m)
+
+set(cube_HDRS ${CMAKE_CURRENT_SOURCE_DIR}/macOS/cube/AppDelegate.h ${CMAKE_CURRENT_SOURCE_DIR}/macOS/cube/DemoViewController.h)
+
+set(cube_RESOURCES ${CMAKE_BINARY_DIR}/staging-json/MoltenVK_icd.json
+    ${CMAKE_CURRENT_SOURCE_DIR}/macOS/cube/Resources/VulkanIcon.icns)
+
+# Have Xcode handle the Storyboard
+if(${CMAKE_GENERATOR} MATCHES "^Xcode.*")
+    set(cube_RESOURCES ${cube_RESOURCES} ${CMAKE_CURRENT_SOURCE_DIR}/macOS/cube/Resources/Main.storyboard)
+endif()
+
+add_executable(vkcube MACOSX_BUNDLE ${cube_SRCS} ${cube_HDRS} ${cube_RESOURCES} cube.vert.inc cube.frag.inc)
+
+# Handle the Storyboard ourselves
+if(NOT ${CMAKE_GENERATOR} MATCHES "^Xcode.*")
+    # Compile the storyboard file with the ibtool.
+    add_custom_command(TARGET vkcube POST_BUILD
+                       COMMAND ${IBTOOL}
+                               --errors
+                               --warnings
+                               --notices
+                               --output-format human-readable-text
+                               --compile ${CMAKE_CURRENT_BINARY_DIR}/vkcube.app/Contents/Resources/Main.storyboardc
+                                         ${CMAKE_CURRENT_SOURCE_DIR}/macOS/cube/Resources/Main.storyboard
+                       COMMENT "Compiling storyboard")
+endif()
+
+add_dependencies(vkcube MoltenVK_icd-staging-json)
+
+# Include demo source code dir because the MacOS cube's Objective-C source includes the "original" cube application C source code.
+# Also include the MoltenVK helper files.
+target_include_directories(vkcube PRIVATE ${CMAKE_CURRENT_SOURCE_DIR} ${MOLTENVK_DIR}/MoltenVK/include)
+
+# We do this so vulkaninfo is linked to an individual library and NOT a framework.
+target_link_libraries(vkcube ${Vulkan_LIBRARY} "-framework Cocoa -framework QuartzCore")
+
+set_target_properties(vkcube PROPERTIES MACOSX_BUNDLE_INFO_PLIST ${CMAKE_CURRENT_SOURCE_DIR}/macOS/cube/Info.plist)
+
+# The RESOURCE target property cannot be used in conjunction with the MACOSX_PACKAGE_LOCATION property.  We need fine-grained
+# control over the Resource directory, so we have to specify the destination of all the resource files on a per-destination-
+# directory basis. If all the files went into the top-level Resource directory, then we could simply set the RESOURCE property to a
+# list of all the resource files.
+set_source_files_properties(${cube_RESOURCES} PROPERTIES MACOSX_PACKAGE_LOCATION "Resources")
+set_source_files_properties("${CMAKE_BINARY_DIR}/staging-json/MoltenVK_icd.json"
+                            PROPERTIES
+                            MACOSX_PACKAGE_LOCATION
+                            "Resources/vulkan/icd.d")
+
+# Copy the MoltenVK lib into the bundle.
+if(${CMAKE_GENERATOR} MATCHES "^Xcode.*")
+    add_custom_command(TARGET vkcube POST_BUILD
+                       COMMAND ${CMAKE_COMMAND} -E copy "${MOLTENVK_DIR}/MoltenVK/macOS/dynamic/libMoltenVK.dylib"
+                               ${CMAKE_CURRENT_BINARY_DIR}/$<CONFIG>/vkcube.app/Contents/Frameworks/libMoltenVK.dylib
+                       DEPENDS vulkan)
+else()
+    add_custom_command(TARGET vkcube POST_BUILD
+                       COMMAND ${CMAKE_COMMAND} -E copy "${MOLTENVK_DIR}/MoltenVK/macOS/dynamic/libMoltenVK.dylib"
+                               ${CMAKE_CURRENT_BINARY_DIR}/vkcube.app/Contents/Frameworks/libMoltenVK.dylib
+                       DEPENDS vulkan)
+endif()
diff --git a/src/third_party/vulkan-tools/src/cube/macOS/cube/main.m b/src/third_party/vulkan-tools/src/cube/macOS/cube/main.m
new file mode 100644
index 0000000..cf9e0d8
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/cube/macOS/cube/main.m
@@ -0,0 +1,21 @@
+/*
+ * main.m
+ *
+ * Copyright (c) 2014-2018 The Brenwill Workshop Ltd. (http://www.brenwill.com)
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#import <Cocoa/Cocoa.h>
+
+int main(int argc, const char* argv[]) { return NSApplicationMain(argc, argv); }
diff --git a/src/third_party/vulkan-tools/src/cube/macOS/cubepp/AppDelegate.h b/src/third_party/vulkan-tools/src/cube/macOS/cubepp/AppDelegate.h
new file mode 100644
index 0000000..0b1dfe4
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/cube/macOS/cubepp/AppDelegate.h
@@ -0,0 +1,23 @@
+/*
+ * AppDelegate.h
+ *
+ * Copyright (c) 2014-2018 The Brenwill Workshop Ltd. (http://www.brenwill.com)
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#import <Cocoa/Cocoa.h>
+
+@interface AppDelegate : NSObject<NSApplicationDelegate>
+
+@end
diff --git a/src/third_party/vulkan-tools/src/cube/macOS/cubepp/AppDelegate.mm b/src/third_party/vulkan-tools/src/cube/macOS/cubepp/AppDelegate.mm
new file mode 100644
index 0000000..2a7bcaf
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/cube/macOS/cubepp/AppDelegate.mm
@@ -0,0 +1,39 @@
+/*
+ * AppDelegate.m
+ *
+ * Copyright (c) 2014-2018 The Brenwill Workshop Ltd. (http://www.brenwill.com)
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#import "AppDelegate.h"
+
+@interface AppDelegate ()
+
+@end
+
+@implementation AppDelegate
+
+- (void)applicationDidFinishLaunching:(NSNotification *)aNotification {
+    // Insert code here to initialize your application
+}
+
+- (void)applicationWillTerminate:(NSNotification *)aNotification {
+    // Insert code here to tear down your application
+}
+
+- (BOOL)applicationShouldTerminateAfterLastWindowClosed:(NSApplication *)sender {
+    return YES;
+}
+
+@end
diff --git a/src/third_party/vulkan-tools/src/cube/macOS/cubepp/DemoViewController.h b/src/third_party/vulkan-tools/src/cube/macOS/cubepp/DemoViewController.h
new file mode 100644
index 0000000..7f90cc7
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/cube/macOS/cubepp/DemoViewController.h
@@ -0,0 +1,33 @@
+/*
+ * DemoViewController.h
+ *
+ * Copyright (c) 2014-2018 The Brenwill Workshop Ltd. (http://www.brenwill.com)
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#import <AppKit/AppKit.h>
+
+#pragma mark -
+#pragma mark DemoViewController
+
+/** The main view controller for the demo storyboard. */
+@interface DemoViewController : NSViewController
+@end
+
+#pragma mark -
+#pragma mark DemoView
+
+/** The Metal-compatibile view for the demo Storyboard. */
+@interface DemoView : NSView
+@end
diff --git a/src/third_party/vulkan-tools/src/cube/macOS/cubepp/DemoViewController.mm b/src/third_party/vulkan-tools/src/cube/macOS/cubepp/DemoViewController.mm
new file mode 100644
index 0000000..9e003b9
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/cube/macOS/cubepp/DemoViewController.mm
@@ -0,0 +1,117 @@
+/*
+ * DemoViewController.m
+ *
+ * Copyright (c) 2014-2018 The Brenwill Workshop Ltd. (http://www.brenwill.com)
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#import "DemoViewController.h"
+#import <QuartzCore/CAMetalLayer.h>
+
+#include <MoltenVK/mvk_vulkan.h>
+
+#include "cube.cpp"
+
+#pragma mark -
+#pragma mark DemoViewController
+
+@implementation DemoViewController {
+    CVDisplayLinkRef _displayLink;
+    struct Demo demo;
+    NSTimer* _timer;
+}
+
+- (void)dealloc {
+    demo.cleanup();
+    CVDisplayLinkRelease(_displayLink);
+    [super dealloc];
+}
+
+/** Since this is a single-view app, initialize Vulkan during view loading. */
+- (void)viewDidLoad {
+    [super viewDidLoad];
+
+    self.view.wantsLayer = YES;  // Back the view with a layer created by the makeBackingLayer method.
+
+    // Convert incoming args to "C" argc/argv strings
+    NSArray *args = [[NSProcessInfo processInfo] arguments];
+    const char** argv = (const char**) alloca(sizeof(char*) * args.count);
+    for(unsigned int i = 0; i < args.count; i++) {
+        NSString *s = args[i];
+        argv[i] = s.UTF8String;
+    }
+
+    demo_main(demo, self.view.layer, args.count, argv);
+
+    // Monitor the rendering loop for a quit condition
+    _timer = [NSTimer scheduledTimerWithTimeInterval: 0.2
+                                              target: self
+                                            selector: @selector(onTick:)
+                                            userInfo: self
+                                             repeats: YES];
+
+    // Start the rendering loop
+    CVDisplayLinkCreateWithActiveCGDisplays(&_displayLink);
+    CVDisplayLinkSetOutputCallback(_displayLink, &DisplayLinkCallback, &demo);
+    CVDisplayLinkStart(_displayLink);
+}
+
+// Close the window if the demo is in a Quit state
+-(void)onTick:(NSTimer*)timer {
+    if (demo.quit) {
+        [[[self view] window] close];
+    }
+}
+
+#pragma mark Display loop callback function
+
+/** Rendering loop callback function for use with a CVDisplayLink. */
+static CVReturn DisplayLinkCallback(CVDisplayLinkRef displayLink, const CVTimeStamp* now,
+                                    const CVTimeStamp* outputTime,
+                                    CVOptionFlags flagsIn, CVOptionFlags* flagsOut, void* target) {
+    struct Demo* demo = (struct Demo*)target;
+    demo->run();
+    if (demo->quit) {
+        CVDisplayLinkStop(displayLink);
+        CVDisplayLinkRelease(displayLink);
+    }
+    return kCVReturnSuccess;
+}
+
+@end
+
+#pragma mark -
+#pragma mark DemoView
+
+@implementation DemoView
+
+/** Indicates that the view wants to draw using the backing layer instead of using drawRect:.  */
+- (BOOL)wantsUpdateLayer {
+    return YES;
+}
+
+/** Returns a Metal-compatible layer. */
++ (Class)layerClass {
+    return [CAMetalLayer class];
+}
+
+/** If the wantsLayer property is set to YES, this method will be invoked to return a layer instance. */
+- (CALayer*)makeBackingLayer {
+    CALayer* layer = [self.class.layerClass layer];
+    CGSize viewScale = [self convertSizeToBacking:CGSizeMake(1.0, 1.0)];
+    layer.contentsScale = MIN(viewScale.width, viewScale.height);
+    return layer;
+}
+
+@end
diff --git a/src/third_party/vulkan-tools/src/cube/macOS/cubepp/Info.plist b/src/third_party/vulkan-tools/src/cube/macOS/cubepp/Info.plist
new file mode 100644
index 0000000..d0b74e0
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/cube/macOS/cubepp/Info.plist
@@ -0,0 +1,38 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE plist PUBLIC "-//Apple Computer//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
+<plist version="1.0">
+<dict>
+	<key>CFBundleDevelopmentRegion</key>
+	<string>English</string>
+	<key>CFBundleExecutable</key>
+	<string>${MACOSX_BUNDLE_EXECUTABLE_NAME}</string>
+	<key>CFBundleGetInfoString</key>
+	<string>VkCubepp</string>
+	<key>CFBundleIconFile</key>
+	<string>VulkanIcon.icns</string>
+	<key>CFBundleIdentifier</key>
+	<string>com.lunarg.cubepp</string>
+	<key>CFBundleInfoDictionaryVersion</key>
+	<string>6.0</string>
+	<key>CFBundleLongVersionString</key>
+	<string>1.0</string>
+	<key>CFBundleName</key>
+	<string>VkCubepp</string>
+	<key>CFBundlePackageType</key>
+	<string>APPL</string>
+	<key>CFBundleShortVersionString</key>
+	<string>1.0</string>
+	<key>CFBundleSignature</key>
+	<string>????</string>
+	<key>CFBundleVersion</key>
+	<string>1.0</string>
+	<key>CSResourcesFileMapped</key>
+	<true/>
+	<key>NSHumanReadableCopyright</key>
+	<string>Copyright (c) 2018 The Khronos Group Inc. LunarG Inc. All rights reserved.</string>
+	<key>NSMainStoryboardFile</key>
+	<string>Main</string>
+	<key>NSPrincipalClass</key>
+	<string>NSApplication</string>
+</dict>
+</plist>
diff --git a/src/third_party/vulkan-tools/src/cube/macOS/cubepp/Resources/Main.storyboard b/src/third_party/vulkan-tools/src/cube/macOS/cubepp/Resources/Main.storyboard
new file mode 100644
index 0000000..d21c149
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/cube/macOS/cubepp/Resources/Main.storyboard
@@ -0,0 +1,131 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<document type="com.apple.InterfaceBuilder3.Cocoa.Storyboard.XIB" version="3.0" toolsVersion="10117" systemVersion="15G26a" targetRuntime="MacOSX.Cocoa" propertyAccessControl="none" useAutolayout="YES" initialViewController="B8D-0N-5wS">
+    <dependencies>
+        <deployment identifier="macosx"/>
+        <plugIn identifier="com.apple.InterfaceBuilder.CocoaPlugin" version="10117"/>
+    </dependencies>
+    <scenes>
+        <!--Application-->
+        <scene sceneID="JPo-4y-FX3">
+            <objects>
+                <application id="hnw-xV-0zn" sceneMemberID="viewController">
+                    <menu key="mainMenu" title="Main Menu" systemMenu="main" id="AYu-sK-qS6">
+                        <items>
+                            <menuItem title="MoltenVK Demo" id="1Xt-HY-uBw">
+                                <modifierMask key="keyEquivalentModifierMask"/>
+                                <menu key="submenu" title="MoltenVK Demo" systemMenu="apple" id="uQy-DD-JDr">
+                                    <items>
+                                        <menuItem title="About Demo" id="5kV-Vb-QxS">
+                                            <modifierMask key="keyEquivalentModifierMask"/>
+                                            <connections>
+                                                <action selector="orderFrontStandardAboutPanel:" target="Ady-hI-5gd" id="Exp-CZ-Vem"/>
+                                            </connections>
+                                        </menuItem>
+                                        <menuItem isSeparatorItem="YES" id="VOq-y0-SEH"/>
+                                        <menuItem title="Hide Demo" keyEquivalent="h" id="Olw-nP-bQN">
+                                            <connections>
+                                                <action selector="hide:" target="Ady-hI-5gd" id="PnN-Uc-m68"/>
+                                            </connections>
+                                        </menuItem>
+                                        <menuItem title="Hide Others" keyEquivalent="h" id="Vdr-fp-XzO">
+                                            <modifierMask key="keyEquivalentModifierMask" option="YES" command="YES"/>
+                                            <connections>
+                                                <action selector="hideOtherApplications:" target="Ady-hI-5gd" id="VT4-aY-XCT"/>
+                                            </connections>
+                                        </menuItem>
+                                        <menuItem title="Show All" id="Kd2-mp-pUS">
+                                            <modifierMask key="keyEquivalentModifierMask"/>
+                                            <connections>
+                                                <action selector="unhideAllApplications:" target="Ady-hI-5gd" id="Dhg-Le-xox"/>
+                                            </connections>
+                                        </menuItem>
+                                        <menuItem isSeparatorItem="YES" id="kCx-OE-vgT"/>
+                                        <menuItem title="Quit Demo" keyEquivalent="q" id="4sb-4s-VLi">
+                                            <connections>
+                                                <action selector="terminate:" target="Ady-hI-5gd" id="Te7-pn-YzF"/>
+                                            </connections>
+                                        </menuItem>
+                                    </items>
+                                </menu>
+                            </menuItem>
+                            <menuItem title="Window" id="aUF-d1-5bR">
+                                <modifierMask key="keyEquivalentModifierMask"/>
+                                <menu key="submenu" title="Window" systemMenu="window" id="Td7-aD-5lo">
+                                    <items>
+                                        <menuItem title="Minimize" keyEquivalent="m" id="OY7-WF-poV">
+                                            <connections>
+                                                <action selector="performMiniaturize:" target="Ady-hI-5gd" id="VwT-WD-YPe"/>
+                                            </connections>
+                                        </menuItem>
+                                        <menuItem title="Zoom" id="R4o-n2-Eq4">
+                                            <modifierMask key="keyEquivalentModifierMask"/>
+                                            <connections>
+                                                <action selector="performZoom:" target="Ady-hI-5gd" id="DIl-cC-cCs"/>
+                                            </connections>
+                                        </menuItem>
+                                        <menuItem isSeparatorItem="YES" id="eu3-7i-yIM"/>
+                                        <menuItem title="Bring All to Front" id="LE2-aR-0XJ">
+                                            <modifierMask key="keyEquivalentModifierMask"/>
+                                            <connections>
+                                                <action selector="arrangeInFront:" target="Ady-hI-5gd" id="DRN-fu-gQh"/>
+                                            </connections>
+                                        </menuItem>
+                                    </items>
+                                </menu>
+                            </menuItem>
+                            <menuItem title="Help" id="wpr-3q-Mcd">
+                                <modifierMask key="keyEquivalentModifierMask"/>
+                                <menu key="submenu" title="Help" systemMenu="help" id="F2S-fz-NVQ">
+                                    <items>
+                                        <menuItem title="MoltenVK Demo Help" keyEquivalent="?" id="FKE-Sm-Kum">
+                                            <connections>
+                                                <action selector="showHelp:" target="Ady-hI-5gd" id="y7X-2Q-9no"/>
+                                            </connections>
+                                        </menuItem>
+                                    </items>
+                                </menu>
+                            </menuItem>
+                        </items>
+                    </menu>
+                    <connections>
+                        <outlet property="delegate" destination="Voe-Tx-rLC" id="PrD-fu-P6m"/>
+                    </connections>
+                </application>
+                <customObject id="Voe-Tx-rLC" customClass="AppDelegate"/>
+                <customObject id="Ady-hI-5gd" userLabel="First Responder" customClass="NSResponder" sceneMemberID="firstResponder"/>
+            </objects>
+            <point key="canvasLocation" x="83.5" y="-47"/>
+        </scene>
+        <!--Window Controller-->
+        <scene sceneID="R2V-B0-nI4">
+            <objects>
+                <windowController id="B8D-0N-5wS" sceneMemberID="viewController">
+                    <window key="window" title="MoltenVK Demo" allowsToolTipsWhenApplicationIsInactive="NO" autorecalculatesKeyViewLoop="NO" oneShot="NO" releasedWhenClosed="NO" showsToolbarButton="NO" visibleAtLaunch="NO" animationBehavior="default" id="IQv-IB-iLA">
+                        <windowStyleMask key="styleMask" titled="YES" closable="YES" miniaturizable="YES" resizable="YES"/>
+                        <rect key="contentRect" x="1051" y="656" width="300" height="200"/>
+                        <rect key="screenRect" x="0.0" y="0.0" width="2560" height="1417"/>
+                        <value key="minSize" type="size" width="300" height="200"/>
+                    </window>
+                    <connections>
+                        <segue destination="XfG-lQ-9wD" kind="relationship" relationship="window.shadowedContentViewController" id="cq2-FE-JQM"/>
+                    </connections>
+                </windowController>
+                <customObject id="Oky-zY-oP4" userLabel="First Responder" customClass="NSResponder" sceneMemberID="firstResponder"/>
+            </objects>
+            <point key="canvasLocation" x="83" y="146"/>
+        </scene>
+        <!--Demo View Controller-->
+        <scene sceneID="hIz-AP-VOD">
+            <objects>
+                <viewController id="XfG-lQ-9wD" customClass="DemoViewController" sceneMemberID="viewController">
+                    <view key="view" id="m2S-Jp-Qdl" customClass="DemoView">
+                        <rect key="frame" x="0.0" y="0.0" width="400" height="300"/>
+                        <autoresizingMask key="autoresizingMask"/>
+                    </view>
+                </viewController>
+                <customObject id="rPt-NT-nkU" userLabel="First Responder" customClass="NSResponder" sceneMemberID="firstResponder"/>
+            </objects>
+            <point key="canvasLocation" x="83" y="564"/>
+        </scene>
+    </scenes>
+</document>
diff --git a/src/third_party/vulkan-tools/src/cube/macOS/cubepp/Resources/VulkanIcon.icns b/src/third_party/vulkan-tools/src/cube/macOS/cubepp/Resources/VulkanIcon.icns
new file mode 100644
index 0000000..fb82fb7
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/cube/macOS/cubepp/Resources/VulkanIcon.icns
Binary files differ
diff --git a/src/third_party/vulkan-tools/src/cube/macOS/cubepp/cubepp.cmake b/src/third_party/vulkan-tools/src/cube/macOS/cubepp/cubepp.cmake
new file mode 100644
index 0000000..266dc53
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/cube/macOS/cubepp/cubepp.cmake
@@ -0,0 +1,85 @@
+# ~~~
+# Copyright (c) 2018 Valve Corporation
+# Copyright (c) 2018 LunarG, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ~~~
+
+# VkCube Application Bundle
+
+set(cubepp_SRCS
+    ${CMAKE_CURRENT_SOURCE_DIR}/macOS/cubepp/main.mm
+    ${CMAKE_CURRENT_SOURCE_DIR}/macOS/cubepp/AppDelegate.mm
+    ${CMAKE_CURRENT_SOURCE_DIR}/macOS/cubepp/DemoViewController.mm)
+
+set(
+    cubepp_HDRS ${CMAKE_CURRENT_SOURCE_DIR}/macOS/cubepp/AppDelegate.h ${CMAKE_CURRENT_SOURCE_DIR}/macOS/cubepp/DemoViewController.h
+    )
+
+set(cubepp_RESOURCES ${CMAKE_BINARY_DIR}/staging-json/MoltenVK_icd.json
+    ${CMAKE_CURRENT_SOURCE_DIR}/macOS/cubepp/Resources/VulkanIcon.icns)
+
+# Have Xcode handle the Storyboard
+if(${CMAKE_GENERATOR} MATCHES "^Xcode.*")
+    set(cubepp_RESOURCES ${cubepp_RESOURCES} ${CMAKE_CURRENT_SOURCE_DIR}/macOS/cubepp/Resources/Main.storyboard)
+endif()
+
+add_executable(vkcubepp MACOSX_BUNDLE ${cubepp_SRCS} ${cubepp_HDRS} ${cubepp_RESOURCES} cube.vert.inc cube.frag.inc)
+
+# Handle the Storyboard ourselves
+if(NOT ${CMAKE_GENERATOR} MATCHES "^Xcode.*")
+    # Compile the storyboard file with the ibtool.
+    add_custom_command(TARGET vkcubepp POST_BUILD
+                       COMMAND ${IBTOOL}
+                               --errors
+                               --warnings
+                               --notices
+                               --output-format human-readable-text
+                               --compile ${CMAKE_CURRENT_BINARY_DIR}/vkcubepp.app/Contents/Resources/Main.storyboardc
+                                         ${CMAKE_CURRENT_SOURCE_DIR}/macOS/cubepp/Resources/Main.storyboard
+                       COMMENT "Compiling storyboard")
+endif()
+
+add_dependencies(vkcubepp MoltenVK_icd-staging-json)
+
+# Include demo source code dir because the MacOS vkcubepp's Objective-C source includes the "original" vkcubepp application C++ source
+# code. Also include the MoltenVK helper files.
+target_include_directories(vkcubepp PRIVATE ${CMAKE_CURRENT_SOURCE_DIR} ${MOLTENVK_DIR}/MoltenVK/include)
+
+# We do this so vulkaninfo is linked to an individual library and NOT a framework.
+target_link_libraries(vkcubepp ${Vulkan_LIBRARY} "-framework Cocoa -framework QuartzCore")
+
+set_target_properties(vkcubepp PROPERTIES MACOSX_BUNDLE_INFO_PLIST ${CMAKE_CURRENT_SOURCE_DIR}/macOS/cubepp/Info.plist)
+
+# The RESOURCE target property cannot be used in conjunction with the MACOSX_PACKAGE_LOCATION property.  We need fine-grained
+# control over the Resource directory, so we have to specify the destination of all the resource files on a per-destination-
+# directory basis. If all the files went into the top-level Resource directory, then we could simply set the RESOURCE property to a
+# list of all the resource files.
+set_source_files_properties(${cubepp_RESOURCES} PROPERTIES MACOSX_PACKAGE_LOCATION "Resources")
+set_source_files_properties("${CMAKE_BINARY_DIR}/staging-json/MoltenVK_icd.json"
+                            PROPERTIES
+                            MACOSX_PACKAGE_LOCATION
+                            "Resources/vulkan/icd.d")
+
+# Copy the MoltenVK lib into the bundle.
+if(${CMAKE_GENERATOR} MATCHES "^Xcode.*")
+    add_custom_command(TARGET vkcubepp POST_BUILD
+                       COMMAND ${CMAKE_COMMAND} -E copy "${MOLTENVK_DIR}/MoltenVK/macOS/dynamic/libMoltenVK.dylib"
+                               ${CMAKE_CURRENT_BINARY_DIR}/$<CONFIG>/vkcubepp.app/Contents/Frameworks/libMoltenVK.dylib
+                       DEPENDS vulkan)
+else()
+    add_custom_command(TARGET vkcubepp POST_BUILD
+                       COMMAND ${CMAKE_COMMAND} -E copy "${MOLTENVK_DIR}/MoltenVK/macOS/dynamic/libMoltenVK.dylib"
+                               ${CMAKE_CURRENT_BINARY_DIR}/vkcubepp.app/Contents/Frameworks/libMoltenVK.dylib
+                       DEPENDS vulkan)
+endif()
diff --git a/src/third_party/vulkan-tools/src/cube/macOS/cubepp/main.mm b/src/third_party/vulkan-tools/src/cube/macOS/cubepp/main.mm
new file mode 100644
index 0000000..cf9e0d8
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/cube/macOS/cubepp/main.mm
@@ -0,0 +1,21 @@
+/*
+ * main.m
+ *
+ * Copyright (c) 2014-2018 The Brenwill Workshop Ltd. (http://www.brenwill.com)
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#import <Cocoa/Cocoa.h>
+
+int main(int argc, const char* argv[]) { return NSApplicationMain(argc, argv); }
diff --git a/src/third_party/vulkan-tools/src/cube/object_type_string_helper.h b/src/third_party/vulkan-tools/src/cube/object_type_string_helper.h
new file mode 100644
index 0000000..eb79ba3
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/cube/object_type_string_helper.h
@@ -0,0 +1,112 @@
+/***************************************************************************
+ *
+ * Copyright (c) 2018 The Khronos Group Inc.
+ * Copyright (c) 2018 Valve Corporation
+ * Copyright (c) 2018 LunarG, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Mark Lobodzinski <mark@lunarg.com>
+ *
+ ****************************************************************************/
+
+
+#pragma once
+#ifdef _WIN32
+#pragma warning( disable : 4065 )
+#endif
+
+#include <vulkan/vulkan.h>
+
+static inline const char* string_VkObjectType(VkObjectType input_value)
+{
+    switch ((VkObjectType)input_value)
+    {
+        case VK_OBJECT_TYPE_QUERY_POOL:
+            return "VK_OBJECT_TYPE_QUERY_POOL";
+        case VK_OBJECT_TYPE_OBJECT_TABLE_NVX:
+            return "VK_OBJECT_TYPE_OBJECT_TABLE_NVX";
+        case VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION:
+            return "VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION";
+        case VK_OBJECT_TYPE_SEMAPHORE:
+            return "VK_OBJECT_TYPE_SEMAPHORE";
+        case VK_OBJECT_TYPE_SHADER_MODULE:
+            return "VK_OBJECT_TYPE_SHADER_MODULE";
+        case VK_OBJECT_TYPE_SWAPCHAIN_KHR:
+            return "VK_OBJECT_TYPE_SWAPCHAIN_KHR";
+        case VK_OBJECT_TYPE_SAMPLER:
+            return "VK_OBJECT_TYPE_SAMPLER";
+        case VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX:
+            return "VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX";
+        case VK_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT:
+            return "VK_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT";
+        case VK_OBJECT_TYPE_IMAGE:
+            return "VK_OBJECT_TYPE_IMAGE";
+        case VK_OBJECT_TYPE_UNKNOWN:
+            return "VK_OBJECT_TYPE_UNKNOWN";
+        case VK_OBJECT_TYPE_DESCRIPTOR_POOL:
+            return "VK_OBJECT_TYPE_DESCRIPTOR_POOL";
+        case VK_OBJECT_TYPE_COMMAND_BUFFER:
+            return "VK_OBJECT_TYPE_COMMAND_BUFFER";
+        case VK_OBJECT_TYPE_BUFFER:
+            return "VK_OBJECT_TYPE_BUFFER";
+        case VK_OBJECT_TYPE_SURFACE_KHR:
+            return "VK_OBJECT_TYPE_SURFACE_KHR";
+        case VK_OBJECT_TYPE_INSTANCE:
+            return "VK_OBJECT_TYPE_INSTANCE";
+        case VK_OBJECT_TYPE_VALIDATION_CACHE_EXT:
+            return "VK_OBJECT_TYPE_VALIDATION_CACHE_EXT";
+        case VK_OBJECT_TYPE_IMAGE_VIEW:
+            return "VK_OBJECT_TYPE_IMAGE_VIEW";
+        case VK_OBJECT_TYPE_DESCRIPTOR_SET:
+            return "VK_OBJECT_TYPE_DESCRIPTOR_SET";
+        case VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT:
+            return "VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT";
+        case VK_OBJECT_TYPE_COMMAND_POOL:
+            return "VK_OBJECT_TYPE_COMMAND_POOL";
+        case VK_OBJECT_TYPE_PHYSICAL_DEVICE:
+            return "VK_OBJECT_TYPE_PHYSICAL_DEVICE";
+        case VK_OBJECT_TYPE_DISPLAY_KHR:
+            return "VK_OBJECT_TYPE_DISPLAY_KHR";
+        case VK_OBJECT_TYPE_BUFFER_VIEW:
+            return "VK_OBJECT_TYPE_BUFFER_VIEW";
+        case VK_OBJECT_TYPE_DEBUG_UTILS_MESSENGER_EXT:
+            return "VK_OBJECT_TYPE_DEBUG_UTILS_MESSENGER_EXT";
+        case VK_OBJECT_TYPE_FRAMEBUFFER:
+            return "VK_OBJECT_TYPE_FRAMEBUFFER";
+        case VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE:
+            return "VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE";
+        case VK_OBJECT_TYPE_PIPELINE_CACHE:
+            return "VK_OBJECT_TYPE_PIPELINE_CACHE";
+        case VK_OBJECT_TYPE_PIPELINE_LAYOUT:
+            return "VK_OBJECT_TYPE_PIPELINE_LAYOUT";
+        case VK_OBJECT_TYPE_DEVICE_MEMORY:
+            return "VK_OBJECT_TYPE_DEVICE_MEMORY";
+        case VK_OBJECT_TYPE_FENCE:
+            return "VK_OBJECT_TYPE_FENCE";
+        case VK_OBJECT_TYPE_QUEUE:
+            return "VK_OBJECT_TYPE_QUEUE";
+        case VK_OBJECT_TYPE_DEVICE:
+            return "VK_OBJECT_TYPE_DEVICE";
+        case VK_OBJECT_TYPE_RENDER_PASS:
+            return "VK_OBJECT_TYPE_RENDER_PASS";
+        case VK_OBJECT_TYPE_DISPLAY_MODE_KHR:
+            return "VK_OBJECT_TYPE_DISPLAY_MODE_KHR";
+        case VK_OBJECT_TYPE_EVENT:
+            return "VK_OBJECT_TYPE_EVENT";
+        case VK_OBJECT_TYPE_PIPELINE:
+            return "VK_OBJECT_TYPE_PIPELINE";
+        default:
+            return "Unhandled VkObjectType";
+    }
+}
diff --git a/src/third_party/vulkan-tools/src/external/x64/lib/vulkan-1.lib b/src/third_party/vulkan-tools/src/external/x64/lib/vulkan-1.lib
new file mode 100644
index 0000000..3e23a7b
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/external/x64/lib/vulkan-1.lib
Binary files differ
diff --git a/src/third_party/vulkan-tools/src/external/x86/lib/vulkan-1.lib b/src/third_party/vulkan-tools/src/external/x86/lib/vulkan-1.lib
new file mode 100644
index 0000000..3caa7ba
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/external/x86/lib/vulkan-1.lib
Binary files differ
diff --git a/src/third_party/vulkan-tools/src/icd/CMakeLists.txt b/src/third_party/vulkan-tools/src/icd/CMakeLists.txt
new file mode 100644
index 0000000..278536b
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/icd/CMakeLists.txt
@@ -0,0 +1,177 @@
+# ~~~
+# Copyright (c) 2014-2018 Valve Corporation
+# Copyright (c) 2014-2018 LunarG, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ~~~
+
+if(WIN32)
+    add_definitions(-DVK_USE_PLATFORM_WIN32_KHR -DVK_USE_PLATFORM_WIN32_KHX -DWIN32_LEAN_AND_MEAN)
+elseif(ANDROID)
+    add_definitions(-DVK_USE_PLATFORM_ANDROID_KHR -DVK_USE_PLATFORM_ANDROID_KHX)
+elseif(APPLE)
+    add_definitions(-DVK_USE_PLATFORM_MACOS_MVK)
+elseif(UNIX AND NOT APPLE) # i.e. Linux
+    if(BUILD_WSI_XCB_SUPPORT)
+        add_definitions(-DVK_USE_PLATFORM_XCB_KHR -DVK_USE_PLATFORM_XCB_KHX)
+    endif()
+
+    if(BUILD_WSI_XLIB_SUPPORT)
+        add_definitions(-DVK_USE_PLATFORM_XLIB_KHR -DVK_USE_PLATFORM_XLIB_KHX -DVK_USE_PLATFORM_XLIB_XRANDR_EXT)
+    endif()
+
+    if(BUILD_WSI_WAYLAND_SUPPORT)
+        add_definitions(-DVK_USE_PLATFORM_WAYLAND_KHR -DVK_USE_PLATFORM_WAYLAND_KHX)
+    endif()
+else()
+    message(FATAL_ERROR "Unsupported Platform!")
+endif()
+
+# Copy or link the JSON files to the binary directory for ease of use in the build tree.
+set(ICD_JSON_FILES VkICD_mock_icd)
+if(WIN32)
+    # extra setup for out-of-tree builds
+    if(NOT (CMAKE_CURRENT_SOURCE_DIR STREQUAL CMAKE_CURRENT_BINARY_DIR))
+        if(CMAKE_GENERATOR MATCHES "^Visual Studio.*")
+            foreach(config_file ${ICD_JSON_FILES})
+                file(TO_NATIVE_PATH ${CMAKE_CURRENT_SOURCE_DIR}/windows/${config_file}.json src_json)
+                file(TO_NATIVE_PATH ${CMAKE_CURRENT_BINARY_DIR}/$<CONFIG>/${config_file}.json dst_json)
+                add_custom_target(${config_file}-json ALL COMMAND copy ${src_json} ${dst_json} VERBATIM)
+                set_target_properties(${config_file}-json PROPERTIES FOLDER ${TOOLS_HELPER_FOLDER})
+            endforeach(config_file)
+        else()
+            foreach(config_file ${ICD_JSON_FILES})
+                file(TO_NATIVE_PATH ${CMAKE_CURRENT_SOURCE_DIR}/windows/${config_file}.json src_json)
+                file(TO_NATIVE_PATH ${CMAKE_CURRENT_BINARY_DIR}/${config_file}.json dst_json)
+                add_custom_target(${config_file}-json ALL COMMAND copy ${src_json} ${dst_json} VERBATIM)
+            endforeach(config_file)
+        endif()
+    endif()
+elseif(APPLE)
+    # extra setup for out-of-tree builds
+    if(NOT (CMAKE_CURRENT_SOURCE_DIR STREQUAL CMAKE_CURRENT_BINARY_DIR))
+        if(CMAKE_GENERATOR MATCHES "^Xcode.*")
+            add_custom_target(mk_icd_config_dir ALL
+                              COMMAND ${CMAKE_COMMAND} -E make_directory ${CMAKE_CURRENT_BINARY_DIR}/$<CONFIG>)
+            foreach(config_file ${ICD_JSON_FILES})
+                add_custom_target(${config_file}-json ALL
+                                  DEPENDS mk_icd_config_dir
+                                  COMMAND ${CMAKE_COMMAND} -E create_symlink ${CMAKE_CURRENT_SOURCE_DIR}/macos/${config_file}.json
+                                          $<CONFIG> ${CMAKE_CURRENT_BINARY_DIR}/$<CONFIG>/${config_file}.json
+                                  VERBATIM)
+            endforeach(config_file)
+        else()
+            foreach(config_file ${ICD_JSON_FILES})
+                add_custom_target(${config_file}-json ALL
+                                  COMMAND ${CMAKE_COMMAND} -E create_symlink ${CMAKE_CURRENT_SOURCE_DIR}/macos/${config_file}.json
+                                          ${config_file}.json
+                                  VERBATIM)
+            endforeach(config_file)
+        endif()
+    endif()
+else()
+    # extra setup for out-of-tree builds
+    if(NOT (CMAKE_CURRENT_SOURCE_DIR STREQUAL CMAKE_CURRENT_BINARY_DIR))
+        foreach(config_file ${ICD_JSON_FILES})
+            add_custom_target(${config_file}-json ALL
+                              COMMAND ${CMAKE_COMMAND} -E create_symlink ${CMAKE_CURRENT_SOURCE_DIR}/linux/${config_file}.json
+                                      ${config_file}.json
+                              VERBATIM)
+        endforeach(config_file)
+    endif()
+endif()
+
+# For ICD with a direct dependency on a project with the same name, use it.
+if(NOT (CMAKE_CURRENT_SOURCE_DIR STREQUAL CMAKE_CURRENT_BINARY_DIR))
+    foreach(config_file ${ICD_JSON_FILES})
+        add_dependencies(${config_file}-json ${config_file})
+    endforeach(config_file)
+endif()
+add_custom_target(generate_icd_files DEPENDS mock_icd.h mock_icd.cpp)
+set_target_properties(generate_icd_files PROPERTIES FOLDER ${TOOLS_HELPER_FOLDER})
+
+if(WIN32)
+    macro(add_vk_icd target)
+        file(TO_NATIVE_PATH ${CMAKE_CURRENT_SOURCE_DIR}/VkICD_${target}.def DEF_FILE)
+        add_custom_target(copy-${target}-def-file ALL
+                          COMMAND ${CMAKE_COMMAND} -E copy_if_different ${DEF_FILE} VkICD_${target}.def
+                          VERBATIM)
+        set_target_properties(copy-${target}-def-file PROPERTIES FOLDER ${TOOLS_HELPER_FOLDER})
+        add_library(VkICD_${target} SHARED ${ARGN} VkICD_${target}.def)
+        if(INSTALL_ICD)
+            install(TARGETS VkICD_${target} DESTINATION ${CMAKE_INSTALL_LIBDIR})
+        endif()
+    endmacro()
+elseif(APPLE)
+    macro(add_vk_icd target)
+        add_library(VkICD_${target} SHARED ${ARGN})
+        set_target_properties(VkICD_${target} PROPERTIES LINK_FLAGS "-Wl")
+        if(INSTALL_ICD)
+            install(TARGETS VkICD_${target} DESTINATION ${CMAKE_CURRENT_BINARY_DIR})
+        endif()
+    endmacro()
+else()
+    macro(add_vk_icd target)
+        add_library(VkICD_${target} SHARED ${ARGN})
+        set_target_properties(VkICD_${target} PROPERTIES LINK_FLAGS "-Wl,-export-dynamic,-Bsymbolic,--exclude-libs,ALL")
+        if((UNIX AND NOT APPLE) AND INSTALL_ICD) # i.e. Linux
+            install(TARGETS VkICD_${target} DESTINATION ${CMAKE_INSTALL_LIBDIR})
+        endif()
+    endmacro()
+endif()
+
+include_directories(${CMAKE_CURRENT_SOURCE_DIR}
+                    ${VulkanHeaders_INCLUDE_DIR}
+                    ${WAYLAND_CLIENT_INCLUDE_DIR}
+                    ${CMAKE_CURRENT_BINARY_DIR}
+                    ${PROJECT_BINARY_DIR}
+                    ${CMAKE_BINARY_DIR})
+
+if(WIN32)
+    set(CMAKE_CXX_FLAGS_RELEASE "${CMAKE_CXX_FLAGS_RELEASE} -D_CRT_SECURE_NO_WARNINGS")
+    set(CMAKE_C_FLAGS_RELEASE "${CMAKE_C_FLAGS_RELEASE} -D_CRT_SECURE_NO_WARNINGS")
+    set(CMAKE_CXX_FLAGS_RELWITHDEBINFO "${CMAKE_CXX_FLAGS_RELWITHDEBINFO} -D_CRT_SECURE_NO_WARNINGS")
+    set(CMAKE_C_FLAGS_RELWITHDEBINFO "${CMAKE_C_FLAGS_RELWITHDEBINFO} -D_CRT_SECURE_NO_WARNINGS")
+    set(CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG} -D_CRT_SECURE_NO_WARNINGS /bigobj")
+    set(CMAKE_C_FLAGS_DEBUG "${CMAKE_C_FLAGS_DEBUG} -D_CRT_SECURE_NO_WARNINGS /bigobj")
+    # Turn off transitional "changed behavior" warning message for Visual Studio versions prior to 2015. The changed behavior is
+    # that constructor initializers are now fixed to clear the struct members.
+    add_compile_options("$<$<AND:$<CXX_COMPILER_ID:MSVC>,$<VERSION_LESS:$<CXX_COMPILER_VERSION>,19>>:/wd4351>")
+else()
+    set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wpointer-arith -Wno-unused-function -Wno-sign-compare")
+    set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -Wpointer-arith -Wno-unused-function -Wno-sign-compare")
+endif()
+
+add_vk_icd(mock_icd generated/mock_icd.cpp generated/mock_icd.h)
+
+# JSON file(s) install targets. For Linux, need to remove the "./" from the library path before installing to system directories.
+if((UNIX AND NOT APPLE) AND INSTALL_ICD) # i.e. Linux
+    foreach(config_file ${ICD_JSON_FILES})
+        add_custom_target(${config_file}-staging-json ALL
+                          COMMAND mkdir -p ${CMAKE_CURRENT_BINARY_DIR}/staging-json
+                          COMMAND cp ${CMAKE_CURRENT_SOURCE_DIR}/linux/${config_file}.json ${CMAKE_CURRENT_BINARY_DIR}/staging-json
+                          COMMAND sed -i -e "/\"library_path\":/s$./libVkICD$libVkICD$"
+                                  ${CMAKE_CURRENT_BINARY_DIR}/staging-json/${config_file}.json
+                          VERBATIM
+                          DEPENDS ${CMAKE_CURRENT_SOURCE_DIR}/linux/${config_file}.json)
+        install(FILES ${CMAKE_CURRENT_BINARY_DIR}/staging-json/${config_file}.json
+                DESTINATION ${CMAKE_INSTALL_DATAROOTDIR}/vulkan/icd.d)
+    endforeach(config_file)
+endif()
+
+# Windows uses the JSON file as-is.
+if(WIN32 AND INSTALL_ICD)
+    foreach(config_file ${ICD_JSON_FILES})
+        install(FILES ${CMAKE_CURRENT_SOURCE_DIR}/windows/${config_file}.json DESTINATION ${CMAKE_INSTALL_LIBDIR})
+    endforeach(config_file)
+endif()
diff --git a/src/third_party/vulkan-tools/src/icd/README.md b/src/third_party/vulkan-tools/src/icd/README.md
new file mode 100644
index 0000000..5554bfc
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/icd/README.md
@@ -0,0 +1,87 @@
+# Vulkan Mock ICD
+
+This directory contains a mock ICD driver designed for validation layer testing.
+
+## Introduction
+
+The mock ICD is focused on enabling validation testing apart from an actual device. Because the validation layers
+sit on top of the ICD and don't depend upon the results of Vulkan rendering, they can be tested without having actual
+GPU hardware backing the ICD. The final mock driver will be composed of three main features: a null driver, flexible
+device configuration, and entrypoint tracking & verification.
+
+### Null Driver
+The intial mock driver features just the null driver capability. This allows all of the validation tests to be run
+on a fixed device configuration that is hard-coded into the ICD.
+
+### Device Configuration
+Device configuration can be added by running the DevSim layer below the validation layers, but above the mock driver.
+The current plan is that if device customization beyond DevSim's capabilities are required for validation layer testing
+then the DevSim layer will be enhanced with the required features. The mock layer itself is just planned to have some
+hard-coded device settings that will enable it to run all of the validation tests.
+
+### Entrypoint Tracking & Verification
+Entrypoint tracking and verification will be added to the mock layer as a later feature. The idea is that all expected
+Vulkan function calls and their parameters can be stored in the ICD and then a separate call can be made to verify that
+the exepected calls and parameters actually entered the ICD. This allows verification that the validation layers are
+correctly passing calls and their parameters through to the ICD unchanged.
+
+## Using the Mock ICD
+
+To enable the mock ICD, set VK\_ICD\_FILENAMES environment variable to point to your {BUILD_DIR}/icd/VkICD\_mock\_icd.json.
+
+## Plans
+
+The initial mock ICD is just the null driver which can be used in combination with DevSim to test validation layers on
+simulated devices. Here's a rough sequence of tasks planned for the mock driver going forward:
+- [X] Get all LVL tests passing on the bare null driver
+- [X] Get failing tests passing
+- [X] Get skipped tests passing as able
+- [ ] Get all LVL tests to run without unexpected errors
+- [X] Develop automated test flow using mock ICD (alternative to or replacement for run\_all\_tests.sh)
+- [X] Get all LVL tests to pass on a selection of device profiles using DevSim layer
+- [ ] Update LVL tests with device dependencies to target specific device profiles
+- [ ] Add entrypoint tracking & verification
+- [ ] Initially track expected calls
+- [ ] Update some tests to verify expected capability
+- [ ] Expand tracking to include parameters
+
+## Beyond Validation Layer Testing
+
+The focus of the mock icd is for validation testing, but the code is available to use and enhance for anyone wishing to apply it for alternative
+purposes.
+With the following enhancements, the mock driver state available to the app should very closely mimic an actual ICD:
+- Update various function return codes
+- Simulated synchronization objects
+- Simulated query with mock data
+- Basic command buffer state tracking to note synch object transitions and query state updates
+
+Beyond that it's certainly possible that the mock icd could be hooked up to a SW renderer and serve as a virtual GPU with complete rendering/compute
+capabilities.
+
+## Status
+
+This is a temporary section used for tracking as the mock icd is being developed. Once all tests are passing with the mock, this section can be removed.
+Currently 333/333 tests are passing with the mock icd, but many passing tests have unexpected validation errors that need to be cleaned up.
+
+### Failing Tests
+
+NONE
+
+### Passing Tests With Unexpected Errors
+
+- VkLayerTest.RenderPassInUseDestroyedSignaled
+- VkLayerTest.RenderPassIncompatible
+
+### Skipped Tests
+
+- VkLayerTest.BindImageInvalidMemoryType
+- VkLayerTest.CreatePipelineBadVertexAttributeFormat
+- VkLayerTest.MiscBlitImageTests
+- VkLayerTest.TemporaryExternalSemaphore
+- VkLayerTest.TemporaryExternalFence
+- VkLayerTest.InvalidBarriers
+- VkLayerTest.CommandQueueFlags
+- VkPositiveLayerTest.TwoQueuesEnsureCorrectRetirementWithWorkStolen
+- VkPositiveLayerTest.ExternalSemaphore
+- VkPositiveLayerTest.ExternalFence
+- VkPositiveLayerTest.ExternalMemory
diff --git a/src/third_party/vulkan-tools/src/icd/VkICD_mock_icd.def b/src/third_party/vulkan-tools/src/icd/VkICD_mock_icd.def
new file mode 100644
index 0000000..490fb3e
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/icd/VkICD_mock_icd.def
@@ -0,0 +1,36 @@
+
+;;;; Begin Copyright Notice ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+;
+; Copyright (c) 2015-2017 The Khronos Group Inc.
+; Copyright (c) 2015-2017 Valve Corporation
+; Copyright (c) 2015-2017 LunarG, Inc.
+; Copyright (c) 2015-2017 Google Inc.
+;
+; Licensed under the Apache License, Version 2.0 (the "License");
+; you may not use this file except in compliance with the License.
+; You may obtain a copy of the License at
+;
+;     http://www.apache.org/licenses/LICENSE-2.0
+;
+; Unless required by applicable law or agreed to in writing, software
+; distributed under the License is distributed on an "AS IS" BASIS,
+; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+; See the License for the specific language governing permissions and
+; limitations under the License.
+;
+;  Author: Tobin Ehlis <tobine@google.com>
+;
+;;;;  End Copyright Notice ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+
+LIBRARY VkICD_mock_icd
+EXPORTS
+vk_icdGetInstanceProcAddr
+vk_icdGetPhysicalDeviceProcAddr
+vk_icdNegotiateLoaderICDInterfaceVersion
+vkDestroySurfaceKHR
+vkGetPhysicalDeviceSurfaceSupportKHR
+vkGetPhysicalDeviceSurfaceCapabilitiesKHR
+vkGetPhysicalDeviceSurfaceFormatsKHR
+vkGetPhysicalDeviceSurfacePresentModesKHR
+vkCreateDisplayPlaneSurfaceKHR
+vkCreateWin32SurfaceKHR
diff --git a/src/third_party/vulkan-tools/src/icd/generated/.clang-format b/src/third_party/vulkan-tools/src/icd/generated/.clang-format
new file mode 100644
index 0000000..3bb983a
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/icd/generated/.clang-format
@@ -0,0 +1,5 @@
+---
+# Disable clang-format for generated code
+DisableFormat: true
+SortIncludes: false
+...
diff --git a/src/third_party/vulkan-tools/src/icd/generated/mock_icd.cpp b/src/third_party/vulkan-tools/src/icd/generated/mock_icd.cpp
new file mode 100644
index 0000000..658a456
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/icd/generated/mock_icd.cpp
@@ -0,0 +1,4463 @@
+/*
+** Copyright (c) 2015-2018 The Khronos Group Inc.
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+/*
+** This header is generated from the Khronos Vulkan XML API Registry.
+**
+*/
+
+#include "mock_icd.h"
+#include <stdlib.h>
+#include <vector>
+#include "vk_typemap_helper.h"
+namespace vkmock {
+
+
+using std::unordered_map;
+
+// Map device memory handle to any mapped allocations that we'll need to free on unmap
+static unordered_map<VkDeviceMemory, std::vector<void*>> mapped_memory_map;
+
+static VkPhysicalDevice physical_device = nullptr;
+static unordered_map<VkDevice, unordered_map<uint32_t, unordered_map<uint32_t, VkQueue>>> queue_map;
+static unordered_map<VkDevice, unordered_map<VkBuffer, VkBufferCreateInfo>> buffer_map;
+
+// TODO: Would like to codegen this but limits aren't in XML
+static VkPhysicalDeviceLimits SetLimits(VkPhysicalDeviceLimits *limits) {
+    limits->maxImageDimension1D = 4096;
+    limits->maxImageDimension2D = 4096;
+    limits->maxImageDimension3D = 256;
+    limits->maxImageDimensionCube = 4096;
+    limits->maxImageArrayLayers = 256;
+    limits->maxTexelBufferElements = 65536;
+    limits->maxUniformBufferRange = 16384;
+    limits->maxStorageBufferRange = 134217728;
+    limits->maxPushConstantsSize = 128;
+    limits->maxMemoryAllocationCount = 4096;
+    limits->maxSamplerAllocationCount = 4000;
+    limits->bufferImageGranularity = 1;
+    limits->sparseAddressSpaceSize = 2147483648;
+    limits->maxBoundDescriptorSets = 4;
+    limits->maxPerStageDescriptorSamplers = 16;
+    limits->maxPerStageDescriptorUniformBuffers = 12;
+    limits->maxPerStageDescriptorStorageBuffers = 4;
+    limits->maxPerStageDescriptorSampledImages = 16;
+    limits->maxPerStageDescriptorStorageImages = 4;
+    limits->maxPerStageDescriptorInputAttachments = 4;
+    limits->maxPerStageResources = 128;
+    limits->maxDescriptorSetSamplers = 96;
+    limits->maxDescriptorSetUniformBuffers = 72;
+    limits->maxDescriptorSetUniformBuffersDynamic = 8;
+    limits->maxDescriptorSetStorageBuffers = 24;
+    limits->maxDescriptorSetStorageBuffersDynamic = 4;
+    limits->maxDescriptorSetSampledImages = 96;
+    limits->maxDescriptorSetStorageImages = 24;
+    limits->maxDescriptorSetInputAttachments = 4;
+    limits->maxVertexInputAttributes = 16;
+    limits->maxVertexInputBindings = 16;
+    limits->maxVertexInputAttributeOffset = 2047;
+    limits->maxVertexInputBindingStride = 2048;
+    limits->maxVertexOutputComponents = 64;
+    limits->maxTessellationGenerationLevel = 64;
+    limits->maxTessellationPatchSize = 32;
+    limits->maxTessellationControlPerVertexInputComponents = 64;
+    limits->maxTessellationControlPerVertexOutputComponents = 64;
+    limits->maxTessellationControlPerPatchOutputComponents = 120;
+    limits->maxTessellationControlTotalOutputComponents = 2048;
+    limits->maxTessellationEvaluationInputComponents = 64;
+    limits->maxTessellationEvaluationOutputComponents = 64;
+    limits->maxGeometryShaderInvocations = 32;
+    limits->maxGeometryInputComponents = 64;
+    limits->maxGeometryOutputComponents = 64;
+    limits->maxGeometryOutputVertices = 256;
+    limits->maxGeometryTotalOutputComponents = 1024;
+    limits->maxFragmentInputComponents = 64;
+    limits->maxFragmentOutputAttachments = 4;
+    limits->maxFragmentDualSrcAttachments = 1;
+    limits->maxFragmentCombinedOutputResources = 4;
+    limits->maxComputeSharedMemorySize = 16384;
+    limits->maxComputeWorkGroupCount[0] = 65535;
+    limits->maxComputeWorkGroupCount[1] = 65535;
+    limits->maxComputeWorkGroupCount[2] = 65535;
+    limits->maxComputeWorkGroupInvocations = 128;
+    limits->maxComputeWorkGroupSize[0] = 128;
+    limits->maxComputeWorkGroupSize[1] = 128;
+    limits->maxComputeWorkGroupSize[2] = 64;
+    limits->subPixelPrecisionBits = 4;
+    limits->subTexelPrecisionBits = 4;
+    limits->mipmapPrecisionBits = 4;
+    limits->maxDrawIndexedIndexValue = UINT32_MAX;
+    limits->maxDrawIndirectCount = UINT16_MAX;
+    limits->maxSamplerLodBias = 2.0f;
+    limits->maxSamplerAnisotropy = 16;
+    limits->maxViewports = 16;
+    limits->maxViewportDimensions[0] = 4096;
+    limits->maxViewportDimensions[1] = 4096;
+    limits->viewportBoundsRange[0] = -8192;
+    limits->viewportBoundsRange[1] = 8191;
+    limits->viewportSubPixelBits = 0;
+    limits->minMemoryMapAlignment = 64;
+    limits->minTexelBufferOffsetAlignment = 16;
+    limits->minUniformBufferOffsetAlignment = 16;
+    limits->minStorageBufferOffsetAlignment = 16;
+    limits->minTexelOffset = -8;
+    limits->maxTexelOffset = 7;
+    limits->minTexelGatherOffset = -8;
+    limits->maxTexelGatherOffset = 7;
+    limits->minInterpolationOffset = 0.0f;
+    limits->maxInterpolationOffset = 0.5f;
+    limits->subPixelInterpolationOffsetBits = 4;
+    limits->maxFramebufferWidth = 4096;
+    limits->maxFramebufferHeight = 4096;
+    limits->maxFramebufferLayers = 256;
+    limits->framebufferColorSampleCounts = 0x7F;
+    limits->framebufferDepthSampleCounts = 0x7F;
+    limits->framebufferStencilSampleCounts = 0x7F;
+    limits->framebufferNoAttachmentsSampleCounts = 0x7F;
+    limits->maxColorAttachments = 4;
+    limits->sampledImageColorSampleCounts = 0x7F;
+    limits->sampledImageIntegerSampleCounts = 0x7F;
+    limits->sampledImageDepthSampleCounts = 0x7F;
+    limits->sampledImageStencilSampleCounts = 0x7F;
+    limits->storageImageSampleCounts = 0x7F;
+    limits->maxSampleMaskWords = 1;
+    limits->timestampComputeAndGraphics = VK_TRUE;
+    limits->timestampPeriod = 1;
+    limits->maxClipDistances = 8;
+    limits->maxCullDistances = 8;
+    limits->maxCombinedClipAndCullDistances = 8;
+    limits->discreteQueuePriorities = 2;
+    limits->pointSizeRange[0] = 1.0f;
+    limits->pointSizeRange[1] = 64.0f;
+    limits->lineWidthRange[0] = 1.0f;
+    limits->lineWidthRange[1] = 8.0f;
+    limits->pointSizeGranularity = 1.0f;
+    limits->lineWidthGranularity = 1.0f;
+    limits->strictLines = VK_TRUE;
+    limits->standardSampleLocations = VK_TRUE;
+    limits->optimalBufferCopyOffsetAlignment = 1;
+    limits->optimalBufferCopyRowPitchAlignment = 1;
+    limits->nonCoherentAtomSize = 256;
+
+    return *limits;
+}
+
+void SetBoolArrayTrue(VkBool32* bool_array, uint32_t num_bools)
+{
+    for (uint32_t i = 0; i < num_bools; ++i) {
+        bool_array[i] = VK_TRUE;
+    }
+}
+
+
+
+static VKAPI_ATTR VkResult VKAPI_CALL CreateInstance(
+    const VkInstanceCreateInfo*                 pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkInstance*                                 pInstance)
+{
+
+    // TODO: If loader ver <=4 ICD must fail with VK_ERROR_INCOMPATIBLE_DRIVER for all vkCreateInstance calls with
+    //  apiVersion set to > Vulkan 1.0 because the loader is still at interface version <= 4. Otherwise, the
+    //  ICD should behave as normal.
+    if (loader_interface_version <= 4) {
+        return VK_ERROR_INCOMPATIBLE_DRIVER;
+    }
+    *pInstance = (VkInstance)CreateDispObjHandle();
+    // TODO: If emulating specific device caps, will need to add intelligence here
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR void VKAPI_CALL DestroyInstance(
+    VkInstance                                  instance,
+    const VkAllocationCallbacks*                pAllocator)
+{
+
+    // Destroy physical device
+    DestroyDispObjHandle((void*)physical_device);
+
+    DestroyDispObjHandle((void*)instance);
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL EnumeratePhysicalDevices(
+    VkInstance                                  instance,
+    uint32_t*                                   pPhysicalDeviceCount,
+    VkPhysicalDevice*                           pPhysicalDevices)
+{
+    if (pPhysicalDevices) {
+        if (!physical_device) {
+            physical_device = (VkPhysicalDevice)CreateDispObjHandle();
+        }
+        *pPhysicalDevices = physical_device;
+    } else {
+        *pPhysicalDeviceCount = 1;
+    }
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceFeatures(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceFeatures*                   pFeatures)
+{
+    uint32_t num_bools = sizeof(VkPhysicalDeviceFeatures) / sizeof(VkBool32);
+    VkBool32 *bool_array = &pFeatures->robustBufferAccess;
+    SetBoolArrayTrue(bool_array, num_bools);
+}
+
+static VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceFormatProperties(
+    VkPhysicalDevice                            physicalDevice,
+    VkFormat                                    format,
+    VkFormatProperties*                         pFormatProperties)
+{
+    if (VK_FORMAT_UNDEFINED == format) {
+        *pFormatProperties = { 0x0, 0x0, 0x0 };
+    } else {
+        // TODO: Just returning full support for everything initially
+        *pFormatProperties = { 0x00FFFFFF, 0x00FFFFFF, 0x00FFFFFF };
+    }
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceImageFormatProperties(
+    VkPhysicalDevice                            physicalDevice,
+    VkFormat                                    format,
+    VkImageType                                 type,
+    VkImageTiling                               tiling,
+    VkImageUsageFlags                           usage,
+    VkImageCreateFlags                          flags,
+    VkImageFormatProperties*                    pImageFormatProperties)
+{
+    // A hardcoded unsupported format
+    if (format == VK_FORMAT_E5B9G9R9_UFLOAT_PACK32) {
+        return VK_ERROR_FORMAT_NOT_SUPPORTED;
+    }
+
+    // TODO: Just hard-coding some values for now
+    // TODO: If tiling is linear, limit the mips, levels, & sample count
+    if (VK_IMAGE_TILING_LINEAR == tiling) {
+        *pImageFormatProperties = { { 4096, 4096, 256 }, 1, 1, VK_SAMPLE_COUNT_1_BIT, 4294967296 };
+    } else {
+        // We hard-code support for all sample counts except 64 bits.
+        *pImageFormatProperties = { { 4096, 4096, 256 }, 12, 256, 0x7F & ~VK_SAMPLE_COUNT_64_BIT, 4294967296 };
+    }
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceProperties(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceProperties*                 pProperties)
+{
+    // TODO: Just hard-coding some values for now
+    pProperties->apiVersion = VK_API_VERSION_1_0;
+    pProperties->driverVersion = 1;
+    pProperties->vendorID = 0xba5eba11;
+    pProperties->deviceID = 0xf005ba11;
+    pProperties->deviceType = VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU;
+    //std::string devName = "Vulkan Mock Device";
+    strcpy(pProperties->deviceName, "Vulkan Mock Device");
+    pProperties->pipelineCacheUUID[0] = 18;
+    pProperties->limits = SetLimits(&pProperties->limits);
+    pProperties->sparseProperties = { VK_TRUE, VK_TRUE, VK_TRUE, VK_TRUE, VK_TRUE };
+}
+
+static VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceQueueFamilyProperties(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pQueueFamilyPropertyCount,
+    VkQueueFamilyProperties*                    pQueueFamilyProperties)
+{
+    if (!pQueueFamilyProperties) {
+        *pQueueFamilyPropertyCount = 1;
+    } else {
+        if (*pQueueFamilyPropertyCount) {
+            pQueueFamilyProperties[0].queueFlags = VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT | VK_QUEUE_TRANSFER_BIT | VK_QUEUE_SPARSE_BINDING_BIT;
+            pQueueFamilyProperties[0].queueCount = 1;
+            pQueueFamilyProperties[0].timestampValidBits = 0;
+            pQueueFamilyProperties[0].minImageTransferGranularity = {1,1,1};
+        }
+    }
+}
+
+static VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceMemoryProperties(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceMemoryProperties*           pMemoryProperties)
+{
+    pMemoryProperties->memoryTypeCount = 2;
+    pMemoryProperties->memoryTypes[0].propertyFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
+    pMemoryProperties->memoryTypes[0].heapIndex = 0;
+    pMemoryProperties->memoryTypes[1].propertyFlags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT | VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
+    pMemoryProperties->memoryTypes[1].heapIndex = 1;
+    pMemoryProperties->memoryHeapCount = 2;
+    pMemoryProperties->memoryHeaps[0].flags = 0;
+    pMemoryProperties->memoryHeaps[0].size = 8000000000;
+    pMemoryProperties->memoryHeaps[1].flags = VK_MEMORY_HEAP_DEVICE_LOCAL_BIT;
+    pMemoryProperties->memoryHeaps[1].size = 8000000000;
+}
+
+static VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL GetInstanceProcAddr(
+    VkInstance                                  instance,
+    const char*                                 pName)
+{
+
+    if (!negotiate_loader_icd_interface_called) {
+        loader_interface_version = 0;
+    }
+    const auto &item = name_to_funcptr_map.find(pName);
+    if (item != name_to_funcptr_map.end()) {
+        return reinterpret_cast<PFN_vkVoidFunction>(item->second);
+    }
+    // Mock should intercept all functions so if we get here just return null
+    return nullptr;
+}
+
+static VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL GetDeviceProcAddr(
+    VkDevice                                    device,
+    const char*                                 pName)
+{
+
+    return GetInstanceProcAddr(nullptr, pName);
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL CreateDevice(
+    VkPhysicalDevice                            physicalDevice,
+    const VkDeviceCreateInfo*                   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDevice*                                   pDevice)
+{
+
+    *pDevice = (VkDevice)CreateDispObjHandle();
+    // TODO: If emulating specific device caps, will need to add intelligence here
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR void VKAPI_CALL DestroyDevice(
+    VkDevice                                    device,
+    const VkAllocationCallbacks*                pAllocator)
+{
+
+    unique_lock_t lock(global_lock);
+    // First destroy sub-device objects
+    // Destroy Queues
+    for (auto dev_queue_map_pair : queue_map) {
+        for (auto queue_family_map_pair : queue_map[dev_queue_map_pair.first]) {
+            for (auto index_queue_pair : queue_map[dev_queue_map_pair.first][queue_family_map_pair.first]) {
+                DestroyDispObjHandle((void*)index_queue_pair.second);
+            }
+        }
+    }
+    queue_map.clear();
+    // Now destroy device
+    DestroyDispObjHandle((void*)device);
+    // TODO: If emulating specific device caps, will need to add intelligence here
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL EnumerateInstanceExtensionProperties(
+    const char*                                 pLayerName,
+    uint32_t*                                   pPropertyCount,
+    VkExtensionProperties*                      pProperties)
+{
+
+    // If requesting number of extensions, return that
+    if (!pLayerName) {
+        if (!pProperties) {
+            *pPropertyCount = (uint32_t)instance_extension_map.size();
+        } else {
+            uint32_t i = 0;
+            for (const auto &name_ver_pair : instance_extension_map) {
+                if (i == *pPropertyCount) {
+                    break;
+                }
+                std::strncpy(pProperties[i].extensionName, name_ver_pair.first.c_str(), sizeof(pProperties[i].extensionName));
+                pProperties[i].extensionName[sizeof(pProperties[i].extensionName) - 1] = 0;
+                pProperties[i].specVersion = name_ver_pair.second;
+                ++i;
+            }
+            if (i != instance_extension_map.size()) {
+                return VK_INCOMPLETE;
+            }
+        }
+    }
+    // If requesting extension properties, fill in data struct for number of extensions
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL EnumerateDeviceExtensionProperties(
+    VkPhysicalDevice                            physicalDevice,
+    const char*                                 pLayerName,
+    uint32_t*                                   pPropertyCount,
+    VkExtensionProperties*                      pProperties)
+{
+
+    // If requesting number of extensions, return that
+    if (!pLayerName) {
+        if (!pProperties) {
+            *pPropertyCount = (uint32_t)device_extension_map.size();
+        } else {
+            uint32_t i = 0;
+            for (const auto &name_ver_pair : device_extension_map) {
+                if (i == *pPropertyCount) {
+                    break;
+                }
+                std::strncpy(pProperties[i].extensionName, name_ver_pair.first.c_str(), sizeof(pProperties[i].extensionName));
+                pProperties[i].extensionName[sizeof(pProperties[i].extensionName) - 1] = 0;
+                pProperties[i].specVersion = name_ver_pair.second;
+                ++i;
+            }
+            if (i != device_extension_map.size()) {
+                return VK_INCOMPLETE;
+            }
+        }
+    }
+    // If requesting extension properties, fill in data struct for number of extensions
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL EnumerateInstanceLayerProperties(
+    uint32_t*                                   pPropertyCount,
+    VkLayerProperties*                          pProperties)
+{
+
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL EnumerateDeviceLayerProperties(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pPropertyCount,
+    VkLayerProperties*                          pProperties)
+{
+
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR void VKAPI_CALL GetDeviceQueue(
+    VkDevice                                    device,
+    uint32_t                                    queueFamilyIndex,
+    uint32_t                                    queueIndex,
+    VkQueue*                                    pQueue)
+{
+    unique_lock_t lock(global_lock);
+    auto queue = queue_map[device][queueFamilyIndex][queueIndex];
+    if (queue) {
+        *pQueue = queue;
+    } else {
+        *pQueue = queue_map[device][queueFamilyIndex][queueIndex] = (VkQueue)CreateDispObjHandle();
+    }
+    // TODO: If emulating specific device caps, will need to add intelligence here
+    return;
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL QueueSubmit(
+    VkQueue                                     queue,
+    uint32_t                                    submitCount,
+    const VkSubmitInfo*                         pSubmits,
+    VkFence                                     fence)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL QueueWaitIdle(
+    VkQueue                                     queue)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL DeviceWaitIdle(
+    VkDevice                                    device)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL AllocateMemory(
+    VkDevice                                    device,
+    const VkMemoryAllocateInfo*                 pAllocateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDeviceMemory*                             pMemory)
+{
+    unique_lock_t lock(global_lock);
+    *pMemory = (VkDeviceMemory)global_unique_handle++;
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR void VKAPI_CALL FreeMemory(
+    VkDevice                                    device,
+    VkDeviceMemory                              memory,
+    const VkAllocationCallbacks*                pAllocator)
+{
+//Destroy object
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL MapMemory(
+    VkDevice                                    device,
+    VkDeviceMemory                              memory,
+    VkDeviceSize                                offset,
+    VkDeviceSize                                size,
+    VkMemoryMapFlags                            flags,
+    void**                                      ppData)
+{
+    unique_lock_t lock(global_lock);
+    // TODO: Just hard-coding 64k whole size for now
+    if (VK_WHOLE_SIZE == size)
+        size = 0x10000;
+    void* map_addr = malloc((size_t)size);
+    mapped_memory_map[memory].push_back(map_addr);
+    *ppData = map_addr;
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR void VKAPI_CALL UnmapMemory(
+    VkDevice                                    device,
+    VkDeviceMemory                              memory)
+{
+    unique_lock_t lock(global_lock);
+    for (auto map_addr : mapped_memory_map[memory]) {
+        free(map_addr);
+    }
+    mapped_memory_map.erase(memory);
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL FlushMappedMemoryRanges(
+    VkDevice                                    device,
+    uint32_t                                    memoryRangeCount,
+    const VkMappedMemoryRange*                  pMemoryRanges)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL InvalidateMappedMemoryRanges(
+    VkDevice                                    device,
+    uint32_t                                    memoryRangeCount,
+    const VkMappedMemoryRange*                  pMemoryRanges)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR void VKAPI_CALL GetDeviceMemoryCommitment(
+    VkDevice                                    device,
+    VkDeviceMemory                              memory,
+    VkDeviceSize*                               pCommittedMemoryInBytes)
+{
+//Not a CREATE or DESTROY function
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL BindBufferMemory(
+    VkDevice                                    device,
+    VkBuffer                                    buffer,
+    VkDeviceMemory                              memory,
+    VkDeviceSize                                memoryOffset)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL BindImageMemory(
+    VkDevice                                    device,
+    VkImage                                     image,
+    VkDeviceMemory                              memory,
+    VkDeviceSize                                memoryOffset)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR void VKAPI_CALL GetBufferMemoryRequirements(
+    VkDevice                                    device,
+    VkBuffer                                    buffer,
+    VkMemoryRequirements*                       pMemoryRequirements)
+{
+    // TODO: Just hard-coding reqs for now
+    pMemoryRequirements->size = 4096;
+    pMemoryRequirements->alignment = 1;
+    pMemoryRequirements->memoryTypeBits = 0xFFFF;
+    // Return a better size based on the buffer size from the create info.
+    auto d_iter = buffer_map.find(device);
+    if (d_iter != buffer_map.end()) {
+        auto iter = d_iter->second.find(buffer);
+        if (iter != d_iter->second.end()) {
+            pMemoryRequirements->size = ((iter->second.size + 4095) / 4096) * 4096;
+        }
+    }
+}
+
+static VKAPI_ATTR void VKAPI_CALL GetImageMemoryRequirements(
+    VkDevice                                    device,
+    VkImage                                     image,
+    VkMemoryRequirements*                       pMemoryRequirements)
+{
+    // TODO: Just hard-coding reqs for now
+    pMemoryRequirements->size = 4096;
+    pMemoryRequirements->alignment = 1;
+
+    // Here we hard-code that the memory type at index 3 doesn't support this image.
+    pMemoryRequirements->memoryTypeBits = 0xFFFF & ~(0x1 << 3);
+}
+
+static VKAPI_ATTR void VKAPI_CALL GetImageSparseMemoryRequirements(
+    VkDevice                                    device,
+    VkImage                                     image,
+    uint32_t*                                   pSparseMemoryRequirementCount,
+    VkSparseImageMemoryRequirements*            pSparseMemoryRequirements)
+{
+//Not a CREATE or DESTROY function
+}
+
+static VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceSparseImageFormatProperties(
+    VkPhysicalDevice                            physicalDevice,
+    VkFormat                                    format,
+    VkImageType                                 type,
+    VkSampleCountFlagBits                       samples,
+    VkImageUsageFlags                           usage,
+    VkImageTiling                               tiling,
+    uint32_t*                                   pPropertyCount,
+    VkSparseImageFormatProperties*              pProperties)
+{
+//Not a CREATE or DESTROY function
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL QueueBindSparse(
+    VkQueue                                     queue,
+    uint32_t                                    bindInfoCount,
+    const VkBindSparseInfo*                     pBindInfo,
+    VkFence                                     fence)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL CreateFence(
+    VkDevice                                    device,
+    const VkFenceCreateInfo*                    pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkFence*                                    pFence)
+{
+    unique_lock_t lock(global_lock);
+    *pFence = (VkFence)global_unique_handle++;
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR void VKAPI_CALL DestroyFence(
+    VkDevice                                    device,
+    VkFence                                     fence,
+    const VkAllocationCallbacks*                pAllocator)
+{
+//Destroy object
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL ResetFences(
+    VkDevice                                    device,
+    uint32_t                                    fenceCount,
+    const VkFence*                              pFences)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetFenceStatus(
+    VkDevice                                    device,
+    VkFence                                     fence)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL WaitForFences(
+    VkDevice                                    device,
+    uint32_t                                    fenceCount,
+    const VkFence*                              pFences,
+    VkBool32                                    waitAll,
+    uint64_t                                    timeout)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL CreateSemaphore(
+    VkDevice                                    device,
+    const VkSemaphoreCreateInfo*                pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSemaphore*                                pSemaphore)
+{
+    unique_lock_t lock(global_lock);
+    *pSemaphore = (VkSemaphore)global_unique_handle++;
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR void VKAPI_CALL DestroySemaphore(
+    VkDevice                                    device,
+    VkSemaphore                                 semaphore,
+    const VkAllocationCallbacks*                pAllocator)
+{
+//Destroy object
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL CreateEvent(
+    VkDevice                                    device,
+    const VkEventCreateInfo*                    pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkEvent*                                    pEvent)
+{
+    unique_lock_t lock(global_lock);
+    *pEvent = (VkEvent)global_unique_handle++;
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR void VKAPI_CALL DestroyEvent(
+    VkDevice                                    device,
+    VkEvent                                     event,
+    const VkAllocationCallbacks*                pAllocator)
+{
+//Destroy object
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetEventStatus(
+    VkDevice                                    device,
+    VkEvent                                     event)
+{
+//Not a CREATE or DESTROY function
+    return VK_EVENT_SET;
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL SetEvent(
+    VkDevice                                    device,
+    VkEvent                                     event)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL ResetEvent(
+    VkDevice                                    device,
+    VkEvent                                     event)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL CreateQueryPool(
+    VkDevice                                    device,
+    const VkQueryPoolCreateInfo*                pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkQueryPool*                                pQueryPool)
+{
+    unique_lock_t lock(global_lock);
+    *pQueryPool = (VkQueryPool)global_unique_handle++;
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR void VKAPI_CALL DestroyQueryPool(
+    VkDevice                                    device,
+    VkQueryPool                                 queryPool,
+    const VkAllocationCallbacks*                pAllocator)
+{
+//Destroy object
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetQueryPoolResults(
+    VkDevice                                    device,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    firstQuery,
+    uint32_t                                    queryCount,
+    size_t                                      dataSize,
+    void*                                       pData,
+    VkDeviceSize                                stride,
+    VkQueryResultFlags                          flags)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL CreateBuffer(
+    VkDevice                                    device,
+    const VkBufferCreateInfo*                   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkBuffer*                                   pBuffer)
+{
+    unique_lock_t lock(global_lock);
+    *pBuffer = (VkBuffer)global_unique_handle++;
+    buffer_map[device][*pBuffer] = *pCreateInfo;
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR void VKAPI_CALL DestroyBuffer(
+    VkDevice                                    device,
+    VkBuffer                                    buffer,
+    const VkAllocationCallbacks*                pAllocator)
+{
+    unique_lock_t lock(global_lock);
+    buffer_map[device].erase(buffer);
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL CreateBufferView(
+    VkDevice                                    device,
+    const VkBufferViewCreateInfo*               pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkBufferView*                               pView)
+{
+    unique_lock_t lock(global_lock);
+    *pView = (VkBufferView)global_unique_handle++;
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR void VKAPI_CALL DestroyBufferView(
+    VkDevice                                    device,
+    VkBufferView                                bufferView,
+    const VkAllocationCallbacks*                pAllocator)
+{
+//Destroy object
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL CreateImage(
+    VkDevice                                    device,
+    const VkImageCreateInfo*                    pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkImage*                                    pImage)
+{
+    unique_lock_t lock(global_lock);
+    *pImage = (VkImage)global_unique_handle++;
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR void VKAPI_CALL DestroyImage(
+    VkDevice                                    device,
+    VkImage                                     image,
+    const VkAllocationCallbacks*                pAllocator)
+{
+//Destroy object
+}
+
+static VKAPI_ATTR void VKAPI_CALL GetImageSubresourceLayout(
+    VkDevice                                    device,
+    VkImage                                     image,
+    const VkImageSubresource*                   pSubresource,
+    VkSubresourceLayout*                        pLayout)
+{
+    // Need safe values. Callers are computing memory offsets from pLayout, with no return code to flag failure. 
+    *pLayout = VkSubresourceLayout(); // Default constructor zero values.
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL CreateImageView(
+    VkDevice                                    device,
+    const VkImageViewCreateInfo*                pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkImageView*                                pView)
+{
+    unique_lock_t lock(global_lock);
+    *pView = (VkImageView)global_unique_handle++;
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR void VKAPI_CALL DestroyImageView(
+    VkDevice                                    device,
+    VkImageView                                 imageView,
+    const VkAllocationCallbacks*                pAllocator)
+{
+//Destroy object
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL CreateShaderModule(
+    VkDevice                                    device,
+    const VkShaderModuleCreateInfo*             pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkShaderModule*                             pShaderModule)
+{
+    unique_lock_t lock(global_lock);
+    *pShaderModule = (VkShaderModule)global_unique_handle++;
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR void VKAPI_CALL DestroyShaderModule(
+    VkDevice                                    device,
+    VkShaderModule                              shaderModule,
+    const VkAllocationCallbacks*                pAllocator)
+{
+//Destroy object
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL CreatePipelineCache(
+    VkDevice                                    device,
+    const VkPipelineCacheCreateInfo*            pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkPipelineCache*                            pPipelineCache)
+{
+    unique_lock_t lock(global_lock);
+    *pPipelineCache = (VkPipelineCache)global_unique_handle++;
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR void VKAPI_CALL DestroyPipelineCache(
+    VkDevice                                    device,
+    VkPipelineCache                             pipelineCache,
+    const VkAllocationCallbacks*                pAllocator)
+{
+//Destroy object
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetPipelineCacheData(
+    VkDevice                                    device,
+    VkPipelineCache                             pipelineCache,
+    size_t*                                     pDataSize,
+    void*                                       pData)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL MergePipelineCaches(
+    VkDevice                                    device,
+    VkPipelineCache                             dstCache,
+    uint32_t                                    srcCacheCount,
+    const VkPipelineCache*                      pSrcCaches)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL CreateGraphicsPipelines(
+    VkDevice                                    device,
+    VkPipelineCache                             pipelineCache,
+    uint32_t                                    createInfoCount,
+    const VkGraphicsPipelineCreateInfo*         pCreateInfos,
+    const VkAllocationCallbacks*                pAllocator,
+    VkPipeline*                                 pPipelines)
+{
+    unique_lock_t lock(global_lock);
+    for (uint32_t i = 0; i < createInfoCount; ++i) {
+        pPipelines[i] = (VkPipeline)global_unique_handle++;
+    }
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL CreateComputePipelines(
+    VkDevice                                    device,
+    VkPipelineCache                             pipelineCache,
+    uint32_t                                    createInfoCount,
+    const VkComputePipelineCreateInfo*          pCreateInfos,
+    const VkAllocationCallbacks*                pAllocator,
+    VkPipeline*                                 pPipelines)
+{
+    unique_lock_t lock(global_lock);
+    for (uint32_t i = 0; i < createInfoCount; ++i) {
+        pPipelines[i] = (VkPipeline)global_unique_handle++;
+    }
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR void VKAPI_CALL DestroyPipeline(
+    VkDevice                                    device,
+    VkPipeline                                  pipeline,
+    const VkAllocationCallbacks*                pAllocator)
+{
+//Destroy object
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL CreatePipelineLayout(
+    VkDevice                                    device,
+    const VkPipelineLayoutCreateInfo*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkPipelineLayout*                           pPipelineLayout)
+{
+    unique_lock_t lock(global_lock);
+    *pPipelineLayout = (VkPipelineLayout)global_unique_handle++;
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR void VKAPI_CALL DestroyPipelineLayout(
+    VkDevice                                    device,
+    VkPipelineLayout                            pipelineLayout,
+    const VkAllocationCallbacks*                pAllocator)
+{
+//Destroy object
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL CreateSampler(
+    VkDevice                                    device,
+    const VkSamplerCreateInfo*                  pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSampler*                                  pSampler)
+{
+    unique_lock_t lock(global_lock);
+    *pSampler = (VkSampler)global_unique_handle++;
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR void VKAPI_CALL DestroySampler(
+    VkDevice                                    device,
+    VkSampler                                   sampler,
+    const VkAllocationCallbacks*                pAllocator)
+{
+//Destroy object
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL CreateDescriptorSetLayout(
+    VkDevice                                    device,
+    const VkDescriptorSetLayoutCreateInfo*      pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDescriptorSetLayout*                      pSetLayout)
+{
+    unique_lock_t lock(global_lock);
+    *pSetLayout = (VkDescriptorSetLayout)global_unique_handle++;
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR void VKAPI_CALL DestroyDescriptorSetLayout(
+    VkDevice                                    device,
+    VkDescriptorSetLayout                       descriptorSetLayout,
+    const VkAllocationCallbacks*                pAllocator)
+{
+//Destroy object
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL CreateDescriptorPool(
+    VkDevice                                    device,
+    const VkDescriptorPoolCreateInfo*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDescriptorPool*                           pDescriptorPool)
+{
+    unique_lock_t lock(global_lock);
+    *pDescriptorPool = (VkDescriptorPool)global_unique_handle++;
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR void VKAPI_CALL DestroyDescriptorPool(
+    VkDevice                                    device,
+    VkDescriptorPool                            descriptorPool,
+    const VkAllocationCallbacks*                pAllocator)
+{
+//Destroy object
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL ResetDescriptorPool(
+    VkDevice                                    device,
+    VkDescriptorPool                            descriptorPool,
+    VkDescriptorPoolResetFlags                  flags)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL AllocateDescriptorSets(
+    VkDevice                                    device,
+    const VkDescriptorSetAllocateInfo*          pAllocateInfo,
+    VkDescriptorSet*                            pDescriptorSets)
+{
+    unique_lock_t lock(global_lock);
+    for (uint32_t i = 0; i < pAllocateInfo->descriptorSetCount; ++i) {
+        pDescriptorSets[i] = (VkDescriptorSet)global_unique_handle++;
+    }
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL FreeDescriptorSets(
+    VkDevice                                    device,
+    VkDescriptorPool                            descriptorPool,
+    uint32_t                                    descriptorSetCount,
+    const VkDescriptorSet*                      pDescriptorSets)
+{
+//Destroy object
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR void VKAPI_CALL UpdateDescriptorSets(
+    VkDevice                                    device,
+    uint32_t                                    descriptorWriteCount,
+    const VkWriteDescriptorSet*                 pDescriptorWrites,
+    uint32_t                                    descriptorCopyCount,
+    const VkCopyDescriptorSet*                  pDescriptorCopies)
+{
+//Not a CREATE or DESTROY function
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL CreateFramebuffer(
+    VkDevice                                    device,
+    const VkFramebufferCreateInfo*              pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkFramebuffer*                              pFramebuffer)
+{
+    unique_lock_t lock(global_lock);
+    *pFramebuffer = (VkFramebuffer)global_unique_handle++;
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR void VKAPI_CALL DestroyFramebuffer(
+    VkDevice                                    device,
+    VkFramebuffer                               framebuffer,
+    const VkAllocationCallbacks*                pAllocator)
+{
+//Destroy object
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL CreateRenderPass(
+    VkDevice                                    device,
+    const VkRenderPassCreateInfo*               pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkRenderPass*                               pRenderPass)
+{
+    unique_lock_t lock(global_lock);
+    *pRenderPass = (VkRenderPass)global_unique_handle++;
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR void VKAPI_CALL DestroyRenderPass(
+    VkDevice                                    device,
+    VkRenderPass                                renderPass,
+    const VkAllocationCallbacks*                pAllocator)
+{
+//Destroy object
+}
+
+static VKAPI_ATTR void VKAPI_CALL GetRenderAreaGranularity(
+    VkDevice                                    device,
+    VkRenderPass                                renderPass,
+    VkExtent2D*                                 pGranularity)
+{
+//Not a CREATE or DESTROY function
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL CreateCommandPool(
+    VkDevice                                    device,
+    const VkCommandPoolCreateInfo*              pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkCommandPool*                              pCommandPool)
+{
+    unique_lock_t lock(global_lock);
+    *pCommandPool = (VkCommandPool)global_unique_handle++;
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR void VKAPI_CALL DestroyCommandPool(
+    VkDevice                                    device,
+    VkCommandPool                               commandPool,
+    const VkAllocationCallbacks*                pAllocator)
+{
+//Destroy object
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL ResetCommandPool(
+    VkDevice                                    device,
+    VkCommandPool                               commandPool,
+    VkCommandPoolResetFlags                     flags)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL AllocateCommandBuffers(
+    VkDevice                                    device,
+    const VkCommandBufferAllocateInfo*          pAllocateInfo,
+    VkCommandBuffer*                            pCommandBuffers)
+{
+    unique_lock_t lock(global_lock);
+    for (uint32_t i = 0; i < pAllocateInfo->commandBufferCount; ++i) {
+        pCommandBuffers[i] = (VkCommandBuffer)CreateDispObjHandle();
+    }
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR void VKAPI_CALL FreeCommandBuffers(
+    VkDevice                                    device,
+    VkCommandPool                               commandPool,
+    uint32_t                                    commandBufferCount,
+    const VkCommandBuffer*                      pCommandBuffers)
+{
+//Destroy object
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL BeginCommandBuffer(
+    VkCommandBuffer                             commandBuffer,
+    const VkCommandBufferBeginInfo*             pBeginInfo)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL EndCommandBuffer(
+    VkCommandBuffer                             commandBuffer)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL ResetCommandBuffer(
+    VkCommandBuffer                             commandBuffer,
+    VkCommandBufferResetFlags                   flags)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR void VKAPI_CALL CmdBindPipeline(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineBindPoint                         pipelineBindPoint,
+    VkPipeline                                  pipeline)
+{
+//Not a CREATE or DESTROY function
+}
+
+static VKAPI_ATTR void VKAPI_CALL CmdSetViewport(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstViewport,
+    uint32_t                                    viewportCount,
+    const VkViewport*                           pViewports)
+{
+//Not a CREATE or DESTROY function
+}
+
+static VKAPI_ATTR void VKAPI_CALL CmdSetScissor(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstScissor,
+    uint32_t                                    scissorCount,
+    const VkRect2D*                             pScissors)
+{
+//Not a CREATE or DESTROY function
+}
+
+static VKAPI_ATTR void VKAPI_CALL CmdSetLineWidth(
+    VkCommandBuffer                             commandBuffer,
+    float                                       lineWidth)
+{
+//Not a CREATE or DESTROY function
+}
+
+static VKAPI_ATTR void VKAPI_CALL CmdSetDepthBias(
+    VkCommandBuffer                             commandBuffer,
+    float                                       depthBiasConstantFactor,
+    float                                       depthBiasClamp,
+    float                                       depthBiasSlopeFactor)
+{
+//Not a CREATE or DESTROY function
+}
+
+static VKAPI_ATTR void VKAPI_CALL CmdSetBlendConstants(
+    VkCommandBuffer                             commandBuffer,
+    const float                                 blendConstants[4])
+{
+//Not a CREATE or DESTROY function
+}
+
+static VKAPI_ATTR void VKAPI_CALL CmdSetDepthBounds(
+    VkCommandBuffer                             commandBuffer,
+    float                                       minDepthBounds,
+    float                                       maxDepthBounds)
+{
+//Not a CREATE or DESTROY function
+}
+
+static VKAPI_ATTR void VKAPI_CALL CmdSetStencilCompareMask(
+    VkCommandBuffer                             commandBuffer,
+    VkStencilFaceFlags                          faceMask,
+    uint32_t                                    compareMask)
+{
+//Not a CREATE or DESTROY function
+}
+
+static VKAPI_ATTR void VKAPI_CALL CmdSetStencilWriteMask(
+    VkCommandBuffer                             commandBuffer,
+    VkStencilFaceFlags                          faceMask,
+    uint32_t                                    writeMask)
+{
+//Not a CREATE or DESTROY function
+}
+
+static VKAPI_ATTR void VKAPI_CALL CmdSetStencilReference(
+    VkCommandBuffer                             commandBuffer,
+    VkStencilFaceFlags                          faceMask,
+    uint32_t                                    reference)
+{
+//Not a CREATE or DESTROY function
+}
+
+static VKAPI_ATTR void VKAPI_CALL CmdBindDescriptorSets(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineBindPoint                         pipelineBindPoint,
+    VkPipelineLayout                            layout,
+    uint32_t                                    firstSet,
+    uint32_t                                    descriptorSetCount,
+    const VkDescriptorSet*                      pDescriptorSets,
+    uint32_t                                    dynamicOffsetCount,
+    const uint32_t*                             pDynamicOffsets)
+{
+//Not a CREATE or DESTROY function
+}
+
+static VKAPI_ATTR void VKAPI_CALL CmdBindIndexBuffer(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkIndexType                                 indexType)
+{
+//Not a CREATE or DESTROY function
+}
+
+static VKAPI_ATTR void VKAPI_CALL CmdBindVertexBuffers(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstBinding,
+    uint32_t                                    bindingCount,
+    const VkBuffer*                             pBuffers,
+    const VkDeviceSize*                         pOffsets)
+{
+//Not a CREATE or DESTROY function
+}
+
+static VKAPI_ATTR void VKAPI_CALL CmdDraw(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    vertexCount,
+    uint32_t                                    instanceCount,
+    uint32_t                                    firstVertex,
+    uint32_t                                    firstInstance)
+{
+//Not a CREATE or DESTROY function
+}
+
+static VKAPI_ATTR void VKAPI_CALL CmdDrawIndexed(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    indexCount,
+    uint32_t                                    instanceCount,
+    uint32_t                                    firstIndex,
+    int32_t                                     vertexOffset,
+    uint32_t                                    firstInstance)
+{
+//Not a CREATE or DESTROY function
+}
+
+static VKAPI_ATTR void VKAPI_CALL CmdDrawIndirect(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    uint32_t                                    drawCount,
+    uint32_t                                    stride)
+{
+//Not a CREATE or DESTROY function
+}
+
+static VKAPI_ATTR void VKAPI_CALL CmdDrawIndexedIndirect(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    uint32_t                                    drawCount,
+    uint32_t                                    stride)
+{
+//Not a CREATE or DESTROY function
+}
+
+static VKAPI_ATTR void VKAPI_CALL CmdDispatch(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    groupCountX,
+    uint32_t                                    groupCountY,
+    uint32_t                                    groupCountZ)
+{
+//Not a CREATE or DESTROY function
+}
+
+static VKAPI_ATTR void VKAPI_CALL CmdDispatchIndirect(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset)
+{
+//Not a CREATE or DESTROY function
+}
+
+static VKAPI_ATTR void VKAPI_CALL CmdCopyBuffer(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    srcBuffer,
+    VkBuffer                                    dstBuffer,
+    uint32_t                                    regionCount,
+    const VkBufferCopy*                         pRegions)
+{
+//Not a CREATE or DESTROY function
+}
+
+static VKAPI_ATTR void VKAPI_CALL CmdCopyImage(
+    VkCommandBuffer                             commandBuffer,
+    VkImage                                     srcImage,
+    VkImageLayout                               srcImageLayout,
+    VkImage                                     dstImage,
+    VkImageLayout                               dstImageLayout,
+    uint32_t                                    regionCount,
+    const VkImageCopy*                          pRegions)
+{
+//Not a CREATE or DESTROY function
+}
+
+static VKAPI_ATTR void VKAPI_CALL CmdBlitImage(
+    VkCommandBuffer                             commandBuffer,
+    VkImage                                     srcImage,
+    VkImageLayout                               srcImageLayout,
+    VkImage                                     dstImage,
+    VkImageLayout                               dstImageLayout,
+    uint32_t                                    regionCount,
+    const VkImageBlit*                          pRegions,
+    VkFilter                                    filter)
+{
+//Not a CREATE or DESTROY function
+}
+
+static VKAPI_ATTR void VKAPI_CALL CmdCopyBufferToImage(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    srcBuffer,
+    VkImage                                     dstImage,
+    VkImageLayout                               dstImageLayout,
+    uint32_t                                    regionCount,
+    const VkBufferImageCopy*                    pRegions)
+{
+//Not a CREATE or DESTROY function
+}
+
+static VKAPI_ATTR void VKAPI_CALL CmdCopyImageToBuffer(
+    VkCommandBuffer                             commandBuffer,
+    VkImage                                     srcImage,
+    VkImageLayout                               srcImageLayout,
+    VkBuffer                                    dstBuffer,
+    uint32_t                                    regionCount,
+    const VkBufferImageCopy*                    pRegions)
+{
+//Not a CREATE or DESTROY function
+}
+
+static VKAPI_ATTR void VKAPI_CALL CmdUpdateBuffer(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    dstBuffer,
+    VkDeviceSize                                dstOffset,
+    VkDeviceSize                                dataSize,
+    const void*                                 pData)
+{
+//Not a CREATE or DESTROY function
+}
+
+static VKAPI_ATTR void VKAPI_CALL CmdFillBuffer(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    dstBuffer,
+    VkDeviceSize                                dstOffset,
+    VkDeviceSize                                size,
+    uint32_t                                    data)
+{
+//Not a CREATE or DESTROY function
+}
+
+static VKAPI_ATTR void VKAPI_CALL CmdClearColorImage(
+    VkCommandBuffer                             commandBuffer,
+    VkImage                                     image,
+    VkImageLayout                               imageLayout,
+    const VkClearColorValue*                    pColor,
+    uint32_t                                    rangeCount,
+    const VkImageSubresourceRange*              pRanges)
+{
+//Not a CREATE or DESTROY function
+}
+
+static VKAPI_ATTR void VKAPI_CALL CmdClearDepthStencilImage(
+    VkCommandBuffer                             commandBuffer,
+    VkImage                                     image,
+    VkImageLayout                               imageLayout,
+    const VkClearDepthStencilValue*             pDepthStencil,
+    uint32_t                                    rangeCount,
+    const VkImageSubresourceRange*              pRanges)
+{
+//Not a CREATE or DESTROY function
+}
+
+static VKAPI_ATTR void VKAPI_CALL CmdClearAttachments(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    attachmentCount,
+    const VkClearAttachment*                    pAttachments,
+    uint32_t                                    rectCount,
+    const VkClearRect*                          pRects)
+{
+//Not a CREATE or DESTROY function
+}
+
+static VKAPI_ATTR void VKAPI_CALL CmdResolveImage(
+    VkCommandBuffer                             commandBuffer,
+    VkImage                                     srcImage,
+    VkImageLayout                               srcImageLayout,
+    VkImage                                     dstImage,
+    VkImageLayout                               dstImageLayout,
+    uint32_t                                    regionCount,
+    const VkImageResolve*                       pRegions)
+{
+//Not a CREATE or DESTROY function
+}
+
+static VKAPI_ATTR void VKAPI_CALL CmdSetEvent(
+    VkCommandBuffer                             commandBuffer,
+    VkEvent                                     event,
+    VkPipelineStageFlags                        stageMask)
+{
+//Not a CREATE or DESTROY function
+}
+
+static VKAPI_ATTR void VKAPI_CALL CmdResetEvent(
+    VkCommandBuffer                             commandBuffer,
+    VkEvent                                     event,
+    VkPipelineStageFlags                        stageMask)
+{
+//Not a CREATE or DESTROY function
+}
+
+static VKAPI_ATTR void VKAPI_CALL CmdWaitEvents(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    eventCount,
+    const VkEvent*                              pEvents,
+    VkPipelineStageFlags                        srcStageMask,
+    VkPipelineStageFlags                        dstStageMask,
+    uint32_t                                    memoryBarrierCount,
+    const VkMemoryBarrier*                      pMemoryBarriers,
+    uint32_t                                    bufferMemoryBarrierCount,
+    const VkBufferMemoryBarrier*                pBufferMemoryBarriers,
+    uint32_t                                    imageMemoryBarrierCount,
+    const VkImageMemoryBarrier*                 pImageMemoryBarriers)
+{
+//Not a CREATE or DESTROY function
+}
+
+static VKAPI_ATTR void VKAPI_CALL CmdPipelineBarrier(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineStageFlags                        srcStageMask,
+    VkPipelineStageFlags                        dstStageMask,
+    VkDependencyFlags                           dependencyFlags,
+    uint32_t                                    memoryBarrierCount,
+    const VkMemoryBarrier*                      pMemoryBarriers,
+    uint32_t                                    bufferMemoryBarrierCount,
+    const VkBufferMemoryBarrier*                pBufferMemoryBarriers,
+    uint32_t                                    imageMemoryBarrierCount,
+    const VkImageMemoryBarrier*                 pImageMemoryBarriers)
+{
+//Not a CREATE or DESTROY function
+}
+
+static VKAPI_ATTR void VKAPI_CALL CmdBeginQuery(
+    VkCommandBuffer                             commandBuffer,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    query,
+    VkQueryControlFlags                         flags)
+{
+//Not a CREATE or DESTROY function
+}
+
+static VKAPI_ATTR void VKAPI_CALL CmdEndQuery(
+    VkCommandBuffer                             commandBuffer,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    query)
+{
+//Not a CREATE or DESTROY function
+}
+
+static VKAPI_ATTR void VKAPI_CALL CmdResetQueryPool(
+    VkCommandBuffer                             commandBuffer,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    firstQuery,
+    uint32_t                                    queryCount)
+{
+//Not a CREATE or DESTROY function
+}
+
+static VKAPI_ATTR void VKAPI_CALL CmdWriteTimestamp(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineStageFlagBits                     pipelineStage,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    query)
+{
+//Not a CREATE or DESTROY function
+}
+
+static VKAPI_ATTR void VKAPI_CALL CmdCopyQueryPoolResults(
+    VkCommandBuffer                             commandBuffer,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    firstQuery,
+    uint32_t                                    queryCount,
+    VkBuffer                                    dstBuffer,
+    VkDeviceSize                                dstOffset,
+    VkDeviceSize                                stride,
+    VkQueryResultFlags                          flags)
+{
+//Not a CREATE or DESTROY function
+}
+
+static VKAPI_ATTR void VKAPI_CALL CmdPushConstants(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineLayout                            layout,
+    VkShaderStageFlags                          stageFlags,
+    uint32_t                                    offset,
+    uint32_t                                    size,
+    const void*                                 pValues)
+{
+//Not a CREATE or DESTROY function
+}
+
+static VKAPI_ATTR void VKAPI_CALL CmdBeginRenderPass(
+    VkCommandBuffer                             commandBuffer,
+    const VkRenderPassBeginInfo*                pRenderPassBegin,
+    VkSubpassContents                           contents)
+{
+//Not a CREATE or DESTROY function
+}
+
+static VKAPI_ATTR void VKAPI_CALL CmdNextSubpass(
+    VkCommandBuffer                             commandBuffer,
+    VkSubpassContents                           contents)
+{
+//Not a CREATE or DESTROY function
+}
+
+static VKAPI_ATTR void VKAPI_CALL CmdEndRenderPass(
+    VkCommandBuffer                             commandBuffer)
+{
+//Not a CREATE or DESTROY function
+}
+
+static VKAPI_ATTR void VKAPI_CALL CmdExecuteCommands(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    commandBufferCount,
+    const VkCommandBuffer*                      pCommandBuffers)
+{
+//Not a CREATE or DESTROY function
+}
+
+
+static VKAPI_ATTR VkResult VKAPI_CALL EnumerateInstanceVersion(
+    uint32_t*                                   pApiVersion)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL BindBufferMemory2(
+    VkDevice                                    device,
+    uint32_t                                    bindInfoCount,
+    const VkBindBufferMemoryInfo*               pBindInfos)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL BindImageMemory2(
+    VkDevice                                    device,
+    uint32_t                                    bindInfoCount,
+    const VkBindImageMemoryInfo*                pBindInfos)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR void VKAPI_CALL GetDeviceGroupPeerMemoryFeatures(
+    VkDevice                                    device,
+    uint32_t                                    heapIndex,
+    uint32_t                                    localDeviceIndex,
+    uint32_t                                    remoteDeviceIndex,
+    VkPeerMemoryFeatureFlags*                   pPeerMemoryFeatures)
+{
+//Not a CREATE or DESTROY function
+}
+
+static VKAPI_ATTR void VKAPI_CALL CmdSetDeviceMask(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    deviceMask)
+{
+//Not a CREATE or DESTROY function
+}
+
+static VKAPI_ATTR void VKAPI_CALL CmdDispatchBase(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    baseGroupX,
+    uint32_t                                    baseGroupY,
+    uint32_t                                    baseGroupZ,
+    uint32_t                                    groupCountX,
+    uint32_t                                    groupCountY,
+    uint32_t                                    groupCountZ)
+{
+//Not a CREATE or DESTROY function
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL EnumeratePhysicalDeviceGroups(
+    VkInstance                                  instance,
+    uint32_t*                                   pPhysicalDeviceGroupCount,
+    VkPhysicalDeviceGroupProperties*            pPhysicalDeviceGroupProperties)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR void VKAPI_CALL GetImageMemoryRequirements2(
+    VkDevice                                    device,
+    const VkImageMemoryRequirementsInfo2*       pInfo,
+    VkMemoryRequirements2*                      pMemoryRequirements)
+{
+    GetImageMemoryRequirements2KHR(device, pInfo, pMemoryRequirements);
+}
+
+static VKAPI_ATTR void VKAPI_CALL GetBufferMemoryRequirements2(
+    VkDevice                                    device,
+    const VkBufferMemoryRequirementsInfo2*      pInfo,
+    VkMemoryRequirements2*                      pMemoryRequirements)
+{
+    GetBufferMemoryRequirements2KHR(device, pInfo, pMemoryRequirements);
+}
+
+static VKAPI_ATTR void VKAPI_CALL GetImageSparseMemoryRequirements2(
+    VkDevice                                    device,
+    const VkImageSparseMemoryRequirementsInfo2* pInfo,
+    uint32_t*                                   pSparseMemoryRequirementCount,
+    VkSparseImageMemoryRequirements2*           pSparseMemoryRequirements)
+{
+//Not a CREATE or DESTROY function
+}
+
+static VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceFeatures2(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceFeatures2*                  pFeatures)
+{
+    GetPhysicalDeviceFeatures2KHR(physicalDevice, pFeatures);
+}
+
+static VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceProperties2(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceProperties2*                pProperties)
+{
+    GetPhysicalDeviceProperties2KHR(physicalDevice, pProperties);
+}
+
+static VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceFormatProperties2(
+    VkPhysicalDevice                            physicalDevice,
+    VkFormat                                    format,
+    VkFormatProperties2*                        pFormatProperties)
+{
+    GetPhysicalDeviceFormatProperties2KHR(physicalDevice, format, pFormatProperties);
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceImageFormatProperties2(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceImageFormatInfo2*     pImageFormatInfo,
+    VkImageFormatProperties2*                   pImageFormatProperties)
+{
+    return GetPhysicalDeviceImageFormatProperties2KHR(physicalDevice, pImageFormatInfo, pImageFormatProperties);
+}
+
+static VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceQueueFamilyProperties2(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pQueueFamilyPropertyCount,
+    VkQueueFamilyProperties2*                   pQueueFamilyProperties)
+{
+    GetPhysicalDeviceQueueFamilyProperties2KHR(physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties);
+}
+
+static VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceMemoryProperties2(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceMemoryProperties2*          pMemoryProperties)
+{
+    GetPhysicalDeviceMemoryProperties2KHR(physicalDevice, pMemoryProperties);
+}
+
+static VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceSparseImageFormatProperties2(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo,
+    uint32_t*                                   pPropertyCount,
+    VkSparseImageFormatProperties2*             pProperties)
+{
+//Not a CREATE or DESTROY function
+}
+
+static VKAPI_ATTR void VKAPI_CALL TrimCommandPool(
+    VkDevice                                    device,
+    VkCommandPool                               commandPool,
+    VkCommandPoolTrimFlags                      flags)
+{
+//Not a CREATE or DESTROY function
+}
+
+static VKAPI_ATTR void VKAPI_CALL GetDeviceQueue2(
+    VkDevice                                    device,
+    const VkDeviceQueueInfo2*                   pQueueInfo,
+    VkQueue*                                    pQueue)
+{
+    GetDeviceQueue(device, pQueueInfo->queueFamilyIndex, pQueueInfo->queueIndex, pQueue);
+    // TODO: Add further support for GetDeviceQueue2 features
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL CreateSamplerYcbcrConversion(
+    VkDevice                                    device,
+    const VkSamplerYcbcrConversionCreateInfo*   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSamplerYcbcrConversion*                   pYcbcrConversion)
+{
+    unique_lock_t lock(global_lock);
+    *pYcbcrConversion = (VkSamplerYcbcrConversion)global_unique_handle++;
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR void VKAPI_CALL DestroySamplerYcbcrConversion(
+    VkDevice                                    device,
+    VkSamplerYcbcrConversion                    ycbcrConversion,
+    const VkAllocationCallbacks*                pAllocator)
+{
+//Destroy object
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL CreateDescriptorUpdateTemplate(
+    VkDevice                                    device,
+    const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDescriptorUpdateTemplate*                 pDescriptorUpdateTemplate)
+{
+    unique_lock_t lock(global_lock);
+    *pDescriptorUpdateTemplate = (VkDescriptorUpdateTemplate)global_unique_handle++;
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR void VKAPI_CALL DestroyDescriptorUpdateTemplate(
+    VkDevice                                    device,
+    VkDescriptorUpdateTemplate                  descriptorUpdateTemplate,
+    const VkAllocationCallbacks*                pAllocator)
+{
+//Destroy object
+}
+
+static VKAPI_ATTR void VKAPI_CALL UpdateDescriptorSetWithTemplate(
+    VkDevice                                    device,
+    VkDescriptorSet                             descriptorSet,
+    VkDescriptorUpdateTemplate                  descriptorUpdateTemplate,
+    const void*                                 pData)
+{
+//Not a CREATE or DESTROY function
+}
+
+static VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceExternalBufferProperties(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceExternalBufferInfo*   pExternalBufferInfo,
+    VkExternalBufferProperties*                 pExternalBufferProperties)
+{
+    // Hard-code support for all handle types and features
+    pExternalBufferProperties->externalMemoryProperties.externalMemoryFeatures = 0x7;
+    pExternalBufferProperties->externalMemoryProperties.exportFromImportedHandleTypes = 0x1FF;
+    pExternalBufferProperties->externalMemoryProperties.compatibleHandleTypes = 0x1FF;
+}
+
+static VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceExternalFenceProperties(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceExternalFenceInfo*    pExternalFenceInfo,
+    VkExternalFenceProperties*                  pExternalFenceProperties)
+{
+    // Hard-code support for all handle types and features
+    pExternalFenceProperties->exportFromImportedHandleTypes = 0xF;
+    pExternalFenceProperties->compatibleHandleTypes = 0xF;
+    pExternalFenceProperties->externalFenceFeatures = 0x3;
+}
+
+static VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceExternalSemaphoreProperties(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo,
+    VkExternalSemaphoreProperties*              pExternalSemaphoreProperties)
+{
+    // Hard code support for all handle types and features
+    pExternalSemaphoreProperties->exportFromImportedHandleTypes = 0x1F;
+    pExternalSemaphoreProperties->compatibleHandleTypes = 0x1F;
+    pExternalSemaphoreProperties->externalSemaphoreFeatures = 0x3;
+}
+
+static VKAPI_ATTR void VKAPI_CALL GetDescriptorSetLayoutSupport(
+    VkDevice                                    device,
+    const VkDescriptorSetLayoutCreateInfo*      pCreateInfo,
+    VkDescriptorSetLayoutSupport*               pSupport)
+{
+//Not a CREATE or DESTROY function
+}
+
+
+static VKAPI_ATTR void VKAPI_CALL DestroySurfaceKHR(
+    VkInstance                                  instance,
+    VkSurfaceKHR                                surface,
+    const VkAllocationCallbacks*                pAllocator)
+{
+//Destroy object
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceSurfaceSupportKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t                                    queueFamilyIndex,
+    VkSurfaceKHR                                surface,
+    VkBool32*                                   pSupported)
+{
+    // Currently say that all surface/queue combos are supported
+    *pSupported = VK_TRUE;
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceSurfaceCapabilitiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkSurfaceKHR                                surface,
+    VkSurfaceCapabilitiesKHR*                   pSurfaceCapabilities)
+{
+    // In general just say max supported is available for requested surface
+    pSurfaceCapabilities->minImageCount = 1;
+    pSurfaceCapabilities->maxImageCount = 0;
+    pSurfaceCapabilities->currentExtent.width = 0xFFFFFFFF;
+    pSurfaceCapabilities->currentExtent.height = 0xFFFFFFFF;
+    pSurfaceCapabilities->minImageExtent.width = 1;
+    pSurfaceCapabilities->minImageExtent.height = 1;
+    pSurfaceCapabilities->maxImageExtent.width = 3840;
+    pSurfaceCapabilities->maxImageExtent.height = 2160;
+    pSurfaceCapabilities->maxImageArrayLayers = 128;
+    pSurfaceCapabilities->supportedTransforms = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR |
+                                                VK_SURFACE_TRANSFORM_ROTATE_90_BIT_KHR |
+                                                VK_SURFACE_TRANSFORM_ROTATE_180_BIT_KHR |
+                                                VK_SURFACE_TRANSFORM_ROTATE_270_BIT_KHR |
+                                                VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_BIT_KHR |
+                                                VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_90_BIT_KHR |
+                                                VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_180_BIT_KHR |
+                                                VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_270_BIT_KHR |
+                                                VK_SURFACE_TRANSFORM_INHERIT_BIT_KHR;
+    pSurfaceCapabilities->currentTransform = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR;
+    pSurfaceCapabilities->supportedCompositeAlpha = VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR |
+                                                    VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR |
+                                                    VK_COMPOSITE_ALPHA_POST_MULTIPLIED_BIT_KHR |
+                                                    VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR;
+    pSurfaceCapabilities->supportedUsageFlags = VK_IMAGE_USAGE_TRANSFER_SRC_BIT |
+                                                VK_IMAGE_USAGE_TRANSFER_DST_BIT |
+                                                VK_IMAGE_USAGE_SAMPLED_BIT |
+                                                VK_IMAGE_USAGE_STORAGE_BIT |
+                                                VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT |
+                                                VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT |
+                                                VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT |
+                                                VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT;
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceSurfaceFormatsKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkSurfaceKHR                                surface,
+    uint32_t*                                   pSurfaceFormatCount,
+    VkSurfaceFormatKHR*                         pSurfaceFormats)
+{
+    // Currently always say that RGBA8 & BGRA8 are supported
+    if (!pSurfaceFormats) {
+        *pSurfaceFormatCount = 2;
+    } else {
+        // Intentionally falling through and just filling however many types are requested
+        switch(*pSurfaceFormatCount) {
+        case 2:
+            pSurfaceFormats[1].format = VK_FORMAT_R8G8B8A8_UNORM;
+            pSurfaceFormats[1].colorSpace = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR;
+            // fall through
+        default:
+            pSurfaceFormats[0].format = VK_FORMAT_B8G8R8A8_UNORM;
+            pSurfaceFormats[0].colorSpace = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR;
+            break;
+        }
+    }
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceSurfacePresentModesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkSurfaceKHR                                surface,
+    uint32_t*                                   pPresentModeCount,
+    VkPresentModeKHR*                           pPresentModes)
+{
+    // Currently always say that all present modes are supported
+    if (!pPresentModes) {
+        *pPresentModeCount = 6;
+    } else {
+        // Intentionally falling through and just filling however many modes are requested
+        switch(*pPresentModeCount) {
+        case 6:
+            pPresentModes[5] = VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR;
+            // fall through
+        case 5:
+            pPresentModes[4] = VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR;
+            // fall through
+        case 4:
+            pPresentModes[3] = VK_PRESENT_MODE_FIFO_RELAXED_KHR;
+            // fall through
+        case 3:
+            pPresentModes[2] = VK_PRESENT_MODE_FIFO_KHR;
+            // fall through
+        case 2:
+            pPresentModes[1] = VK_PRESENT_MODE_MAILBOX_KHR;
+            // fall through
+        default:
+            pPresentModes[0] = VK_PRESENT_MODE_IMMEDIATE_KHR;
+            break;
+        }
+    }
+    return VK_SUCCESS;
+}
+
+
+static VKAPI_ATTR VkResult VKAPI_CALL CreateSwapchainKHR(
+    VkDevice                                    device,
+    const VkSwapchainCreateInfoKHR*             pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSwapchainKHR*                             pSwapchain)
+{
+    unique_lock_t lock(global_lock);
+    *pSwapchain = (VkSwapchainKHR)global_unique_handle++;
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR void VKAPI_CALL DestroySwapchainKHR(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    const VkAllocationCallbacks*                pAllocator)
+{
+//Destroy object
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetSwapchainImagesKHR(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    uint32_t*                                   pSwapchainImageCount,
+    VkImage*                                    pSwapchainImages)
+{
+    if (!pSwapchainImages) {
+        *pSwapchainImageCount = 1;
+    } else if (*pSwapchainImageCount > 0) {
+        pSwapchainImages[0] = (VkImage)global_unique_handle++;
+        if (*pSwapchainImageCount != 1) {
+            return VK_INCOMPLETE;
+        }
+    }
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL AcquireNextImageKHR(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    uint64_t                                    timeout,
+    VkSemaphore                                 semaphore,
+    VkFence                                     fence,
+    uint32_t*                                   pImageIndex)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL QueuePresentKHR(
+    VkQueue                                     queue,
+    const VkPresentInfoKHR*                     pPresentInfo)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetDeviceGroupPresentCapabilitiesKHR(
+    VkDevice                                    device,
+    VkDeviceGroupPresentCapabilitiesKHR*        pDeviceGroupPresentCapabilities)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetDeviceGroupSurfacePresentModesKHR(
+    VkDevice                                    device,
+    VkSurfaceKHR                                surface,
+    VkDeviceGroupPresentModeFlagsKHR*           pModes)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDevicePresentRectanglesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkSurfaceKHR                                surface,
+    uint32_t*                                   pRectCount,
+    VkRect2D*                                   pRects)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL AcquireNextImage2KHR(
+    VkDevice                                    device,
+    const VkAcquireNextImageInfoKHR*            pAcquireInfo,
+    uint32_t*                                   pImageIndex)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceDisplayPropertiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pPropertyCount,
+    VkDisplayPropertiesKHR*                     pProperties)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceDisplayPlanePropertiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pPropertyCount,
+    VkDisplayPlanePropertiesKHR*                pProperties)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetDisplayPlaneSupportedDisplaysKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t                                    planeIndex,
+    uint32_t*                                   pDisplayCount,
+    VkDisplayKHR*                               pDisplays)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetDisplayModePropertiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkDisplayKHR                                display,
+    uint32_t*                                   pPropertyCount,
+    VkDisplayModePropertiesKHR*                 pProperties)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL CreateDisplayModeKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkDisplayKHR                                display,
+    const VkDisplayModeCreateInfoKHR*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDisplayModeKHR*                           pMode)
+{
+    unique_lock_t lock(global_lock);
+    *pMode = (VkDisplayModeKHR)global_unique_handle++;
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetDisplayPlaneCapabilitiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkDisplayModeKHR                            mode,
+    uint32_t                                    planeIndex,
+    VkDisplayPlaneCapabilitiesKHR*              pCapabilities)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL CreateDisplayPlaneSurfaceKHR(
+    VkInstance                                  instance,
+    const VkDisplaySurfaceCreateInfoKHR*        pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface)
+{
+    unique_lock_t lock(global_lock);
+    *pSurface = (VkSurfaceKHR)global_unique_handle++;
+    return VK_SUCCESS;
+}
+
+
+static VKAPI_ATTR VkResult VKAPI_CALL CreateSharedSwapchainsKHR(
+    VkDevice                                    device,
+    uint32_t                                    swapchainCount,
+    const VkSwapchainCreateInfoKHR*             pCreateInfos,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSwapchainKHR*                             pSwapchains)
+{
+    unique_lock_t lock(global_lock);
+    for (uint32_t i = 0; i < swapchainCount; ++i) {
+        pSwapchains[i] = (VkSwapchainKHR)global_unique_handle++;
+    }
+    return VK_SUCCESS;
+}
+
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+
+static VKAPI_ATTR VkResult VKAPI_CALL CreateXlibSurfaceKHR(
+    VkInstance                                  instance,
+    const VkXlibSurfaceCreateInfoKHR*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface)
+{
+    unique_lock_t lock(global_lock);
+    *pSurface = (VkSurfaceKHR)global_unique_handle++;
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR VkBool32 VKAPI_CALL GetPhysicalDeviceXlibPresentationSupportKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t                                    queueFamilyIndex,
+    Display*                                    dpy,
+    VisualID                                    visualID)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+#endif /* VK_USE_PLATFORM_XLIB_KHR */
+
+#ifdef VK_USE_PLATFORM_XCB_KHR
+
+static VKAPI_ATTR VkResult VKAPI_CALL CreateXcbSurfaceKHR(
+    VkInstance                                  instance,
+    const VkXcbSurfaceCreateInfoKHR*            pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface)
+{
+    unique_lock_t lock(global_lock);
+    *pSurface = (VkSurfaceKHR)global_unique_handle++;
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR VkBool32 VKAPI_CALL GetPhysicalDeviceXcbPresentationSupportKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t                                    queueFamilyIndex,
+    xcb_connection_t*                           connection,
+    xcb_visualid_t                              visual_id)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+#endif /* VK_USE_PLATFORM_XCB_KHR */
+
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+
+static VKAPI_ATTR VkResult VKAPI_CALL CreateWaylandSurfaceKHR(
+    VkInstance                                  instance,
+    const VkWaylandSurfaceCreateInfoKHR*        pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface)
+{
+    unique_lock_t lock(global_lock);
+    *pSurface = (VkSurfaceKHR)global_unique_handle++;
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR VkBool32 VKAPI_CALL GetPhysicalDeviceWaylandPresentationSupportKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t                                    queueFamilyIndex,
+    struct wl_display*                          display)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+#endif /* VK_USE_PLATFORM_WAYLAND_KHR */
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+
+static VKAPI_ATTR VkResult VKAPI_CALL CreateAndroidSurfaceKHR(
+    VkInstance                                  instance,
+    const VkAndroidSurfaceCreateInfoKHR*        pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface)
+{
+    unique_lock_t lock(global_lock);
+    *pSurface = (VkSurfaceKHR)global_unique_handle++;
+    return VK_SUCCESS;
+}
+#endif /* VK_USE_PLATFORM_ANDROID_KHR */
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+static VKAPI_ATTR VkResult VKAPI_CALL CreateWin32SurfaceKHR(
+    VkInstance                                  instance,
+    const VkWin32SurfaceCreateInfoKHR*          pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface)
+{
+    unique_lock_t lock(global_lock);
+    *pSurface = (VkSurfaceKHR)global_unique_handle++;
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR VkBool32 VKAPI_CALL GetPhysicalDeviceWin32PresentationSupportKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t                                    queueFamilyIndex)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+#endif /* VK_USE_PLATFORM_WIN32_KHR */
+
+
+
+
+static VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceFeatures2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceFeatures2*                  pFeatures)
+{
+    GetPhysicalDeviceFeatures(physicalDevice, &pFeatures->features);
+    uint32_t num_bools = 0; // Count number of VkBool32s in extension structs
+    VkBool32* feat_bools = nullptr;
+    const auto *desc_idx_features = lvl_find_in_chain<VkPhysicalDeviceDescriptorIndexingFeaturesEXT>(pFeatures->pNext);
+    if (desc_idx_features) {
+        const auto bool_size = sizeof(VkPhysicalDeviceDescriptorIndexingFeaturesEXT) - offsetof(VkPhysicalDeviceDescriptorIndexingFeaturesEXT, shaderInputAttachmentArrayDynamicIndexing);
+        num_bools = bool_size/sizeof(VkBool32);
+        feat_bools = (VkBool32*)&desc_idx_features->shaderInputAttachmentArrayDynamicIndexing;
+        SetBoolArrayTrue(feat_bools, num_bools);
+    }
+    const auto *blendop_features = lvl_find_in_chain<VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT>(pFeatures->pNext);
+    if (blendop_features) {
+        const auto bool_size = sizeof(VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT) - offsetof(VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT, advancedBlendCoherentOperations);
+        num_bools = bool_size/sizeof(VkBool32);
+        feat_bools = (VkBool32*)&blendop_features->advancedBlendCoherentOperations;
+        SetBoolArrayTrue(feat_bools, num_bools);
+    }
+}
+
+static VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceProperties2*                pProperties)
+{
+    GetPhysicalDeviceProperties(physicalDevice, &pProperties->properties);
+    const auto *desc_idx_props = lvl_find_in_chain<VkPhysicalDeviceDescriptorIndexingPropertiesEXT>(pProperties->pNext);
+    if (desc_idx_props) {
+        VkPhysicalDeviceDescriptorIndexingPropertiesEXT* write_props = (VkPhysicalDeviceDescriptorIndexingPropertiesEXT*)desc_idx_props;
+        write_props->maxUpdateAfterBindDescriptorsInAllPools = 500000;
+        write_props->shaderUniformBufferArrayNonUniformIndexingNative = false;
+        write_props->shaderSampledImageArrayNonUniformIndexingNative = false;
+        write_props->shaderStorageBufferArrayNonUniformIndexingNative = false;
+        write_props->shaderStorageImageArrayNonUniformIndexingNative = false;
+        write_props->shaderInputAttachmentArrayNonUniformIndexingNative = false;
+        write_props->robustBufferAccessUpdateAfterBind = true;
+        write_props->quadDivergentImplicitLod = true;
+        write_props->maxPerStageDescriptorUpdateAfterBindSamplers = 500000;
+        write_props->maxPerStageDescriptorUpdateAfterBindUniformBuffers = 500000;
+        write_props->maxPerStageDescriptorUpdateAfterBindStorageBuffers = 500000;
+        write_props->maxPerStageDescriptorUpdateAfterBindSampledImages = 500000;
+        write_props->maxPerStageDescriptorUpdateAfterBindStorageImages = 500000;
+        write_props->maxPerStageDescriptorUpdateAfterBindInputAttachments = 500000;
+        write_props->maxPerStageUpdateAfterBindResources = 500000;
+        write_props->maxDescriptorSetUpdateAfterBindSamplers = 500000;
+        write_props->maxDescriptorSetUpdateAfterBindUniformBuffers = 96;
+        write_props->maxDescriptorSetUpdateAfterBindUniformBuffersDynamic = 8;
+        write_props->maxDescriptorSetUpdateAfterBindStorageBuffers = 500000;
+        write_props->maxDescriptorSetUpdateAfterBindStorageBuffersDynamic = 4;
+        write_props->maxDescriptorSetUpdateAfterBindSampledImages = 500000;
+        write_props->maxDescriptorSetUpdateAfterBindStorageImages = 500000;
+        write_props->maxDescriptorSetUpdateAfterBindInputAttachments = 500000;
+    }
+
+    const auto *push_descriptor_props = lvl_find_in_chain<VkPhysicalDevicePushDescriptorPropertiesKHR>(pProperties->pNext);
+    if (push_descriptor_props) {
+        VkPhysicalDevicePushDescriptorPropertiesKHR* write_props = (VkPhysicalDevicePushDescriptorPropertiesKHR*)push_descriptor_props;
+        write_props->maxPushDescriptors = 256;
+    }
+
+    const auto *depth_stencil_resolve_props = lvl_find_in_chain<VkPhysicalDeviceDepthStencilResolvePropertiesKHR>(pProperties->pNext);
+    if (depth_stencil_resolve_props) {
+        VkPhysicalDeviceDepthStencilResolvePropertiesKHR* write_props = (VkPhysicalDeviceDepthStencilResolvePropertiesKHR*)depth_stencil_resolve_props;
+        write_props->supportedDepthResolveModes = VK_RESOLVE_MODE_SAMPLE_ZERO_BIT_KHR;
+        write_props->supportedStencilResolveModes = VK_RESOLVE_MODE_SAMPLE_ZERO_BIT_KHR;
+    }
+}
+
+static VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceFormatProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkFormat                                    format,
+    VkFormatProperties2*                        pFormatProperties)
+{
+    GetPhysicalDeviceFormatProperties(physicalDevice, format, &pFormatProperties->formatProperties);
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceImageFormatProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceImageFormatInfo2*     pImageFormatInfo,
+    VkImageFormatProperties2*                   pImageFormatProperties)
+{
+    GetPhysicalDeviceImageFormatProperties(physicalDevice, pImageFormatInfo->format, pImageFormatInfo->type, pImageFormatInfo->tiling, pImageFormatInfo->usage, pImageFormatInfo->flags, &pImageFormatProperties->imageFormatProperties);
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceQueueFamilyProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pQueueFamilyPropertyCount,
+    VkQueueFamilyProperties2*                   pQueueFamilyProperties)
+{
+    if (pQueueFamilyPropertyCount && pQueueFamilyProperties) {
+        GetPhysicalDeviceQueueFamilyProperties(physicalDevice, pQueueFamilyPropertyCount, &pQueueFamilyProperties->queueFamilyProperties);
+    } else {
+        GetPhysicalDeviceQueueFamilyProperties(physicalDevice, pQueueFamilyPropertyCount, nullptr);
+    }
+}
+
+static VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceMemoryProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceMemoryProperties2*          pMemoryProperties)
+{
+    GetPhysicalDeviceMemoryProperties(physicalDevice, &pMemoryProperties->memoryProperties);
+}
+
+static VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceSparseImageFormatProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo,
+    uint32_t*                                   pPropertyCount,
+    VkSparseImageFormatProperties2*             pProperties)
+{
+//Not a CREATE or DESTROY function
+}
+
+
+static VKAPI_ATTR void VKAPI_CALL GetDeviceGroupPeerMemoryFeaturesKHR(
+    VkDevice                                    device,
+    uint32_t                                    heapIndex,
+    uint32_t                                    localDeviceIndex,
+    uint32_t                                    remoteDeviceIndex,
+    VkPeerMemoryFeatureFlags*                   pPeerMemoryFeatures)
+{
+//Not a CREATE or DESTROY function
+}
+
+static VKAPI_ATTR void VKAPI_CALL CmdSetDeviceMaskKHR(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    deviceMask)
+{
+//Not a CREATE or DESTROY function
+}
+
+static VKAPI_ATTR void VKAPI_CALL CmdDispatchBaseKHR(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    baseGroupX,
+    uint32_t                                    baseGroupY,
+    uint32_t                                    baseGroupZ,
+    uint32_t                                    groupCountX,
+    uint32_t                                    groupCountY,
+    uint32_t                                    groupCountZ)
+{
+//Not a CREATE or DESTROY function
+}
+
+
+
+static VKAPI_ATTR void VKAPI_CALL TrimCommandPoolKHR(
+    VkDevice                                    device,
+    VkCommandPool                               commandPool,
+    VkCommandPoolTrimFlags                      flags)
+{
+//Not a CREATE or DESTROY function
+}
+
+
+static VKAPI_ATTR VkResult VKAPI_CALL EnumeratePhysicalDeviceGroupsKHR(
+    VkInstance                                  instance,
+    uint32_t*                                   pPhysicalDeviceGroupCount,
+    VkPhysicalDeviceGroupProperties*            pPhysicalDeviceGroupProperties)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+
+
+static VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceExternalBufferPropertiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceExternalBufferInfo*   pExternalBufferInfo,
+    VkExternalBufferProperties*                 pExternalBufferProperties)
+{
+    GetPhysicalDeviceExternalBufferProperties(physicalDevice, pExternalBufferInfo, pExternalBufferProperties);
+}
+
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetMemoryWin32HandleKHR(
+    VkDevice                                    device,
+    const VkMemoryGetWin32HandleInfoKHR*        pGetWin32HandleInfo,
+    HANDLE*                                     pHandle)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetMemoryWin32HandlePropertiesKHR(
+    VkDevice                                    device,
+    VkExternalMemoryHandleTypeFlagBits          handleType,
+    HANDLE                                      handle,
+    VkMemoryWin32HandlePropertiesKHR*           pMemoryWin32HandleProperties)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+#endif /* VK_USE_PLATFORM_WIN32_KHR */
+
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetMemoryFdKHR(
+    VkDevice                                    device,
+    const VkMemoryGetFdInfoKHR*                 pGetFdInfo,
+    int*                                        pFd)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetMemoryFdPropertiesKHR(
+    VkDevice                                    device,
+    VkExternalMemoryHandleTypeFlagBits          handleType,
+    int                                         fd,
+    VkMemoryFdPropertiesKHR*                    pMemoryFdProperties)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+#endif /* VK_USE_PLATFORM_WIN32_KHR */
+
+
+static VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceExternalSemaphorePropertiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo,
+    VkExternalSemaphoreProperties*              pExternalSemaphoreProperties)
+{
+    GetPhysicalDeviceExternalSemaphoreProperties(physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties);
+}
+
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+static VKAPI_ATTR VkResult VKAPI_CALL ImportSemaphoreWin32HandleKHR(
+    VkDevice                                    device,
+    const VkImportSemaphoreWin32HandleInfoKHR*  pImportSemaphoreWin32HandleInfo)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetSemaphoreWin32HandleKHR(
+    VkDevice                                    device,
+    const VkSemaphoreGetWin32HandleInfoKHR*     pGetWin32HandleInfo,
+    HANDLE*                                     pHandle)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+#endif /* VK_USE_PLATFORM_WIN32_KHR */
+
+
+static VKAPI_ATTR VkResult VKAPI_CALL ImportSemaphoreFdKHR(
+    VkDevice                                    device,
+    const VkImportSemaphoreFdInfoKHR*           pImportSemaphoreFdInfo)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetSemaphoreFdKHR(
+    VkDevice                                    device,
+    const VkSemaphoreGetFdInfoKHR*              pGetFdInfo,
+    int*                                        pFd)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+
+
+static VKAPI_ATTR void VKAPI_CALL CmdPushDescriptorSetKHR(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineBindPoint                         pipelineBindPoint,
+    VkPipelineLayout                            layout,
+    uint32_t                                    set,
+    uint32_t                                    descriptorWriteCount,
+    const VkWriteDescriptorSet*                 pDescriptorWrites)
+{
+//Not a CREATE or DESTROY function
+}
+
+static VKAPI_ATTR void VKAPI_CALL CmdPushDescriptorSetWithTemplateKHR(
+    VkCommandBuffer                             commandBuffer,
+    VkDescriptorUpdateTemplate                  descriptorUpdateTemplate,
+    VkPipelineLayout                            layout,
+    uint32_t                                    set,
+    const void*                                 pData)
+{
+//Not a CREATE or DESTROY function
+}
+
+
+
+
+
+static VKAPI_ATTR VkResult VKAPI_CALL CreateDescriptorUpdateTemplateKHR(
+    VkDevice                                    device,
+    const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDescriptorUpdateTemplate*                 pDescriptorUpdateTemplate)
+{
+    unique_lock_t lock(global_lock);
+    *pDescriptorUpdateTemplate = (VkDescriptorUpdateTemplate)global_unique_handle++;
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR void VKAPI_CALL DestroyDescriptorUpdateTemplateKHR(
+    VkDevice                                    device,
+    VkDescriptorUpdateTemplate                  descriptorUpdateTemplate,
+    const VkAllocationCallbacks*                pAllocator)
+{
+//Destroy object
+}
+
+static VKAPI_ATTR void VKAPI_CALL UpdateDescriptorSetWithTemplateKHR(
+    VkDevice                                    device,
+    VkDescriptorSet                             descriptorSet,
+    VkDescriptorUpdateTemplate                  descriptorUpdateTemplate,
+    const void*                                 pData)
+{
+//Not a CREATE or DESTROY function
+}
+
+
+
+static VKAPI_ATTR VkResult VKAPI_CALL CreateRenderPass2KHR(
+    VkDevice                                    device,
+    const VkRenderPassCreateInfo2KHR*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkRenderPass*                               pRenderPass)
+{
+    unique_lock_t lock(global_lock);
+    *pRenderPass = (VkRenderPass)global_unique_handle++;
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR void VKAPI_CALL CmdBeginRenderPass2KHR(
+    VkCommandBuffer                             commandBuffer,
+    const VkRenderPassBeginInfo*                pRenderPassBegin,
+    const VkSubpassBeginInfoKHR*                pSubpassBeginInfo)
+{
+//Not a CREATE or DESTROY function
+}
+
+static VKAPI_ATTR void VKAPI_CALL CmdNextSubpass2KHR(
+    VkCommandBuffer                             commandBuffer,
+    const VkSubpassBeginInfoKHR*                pSubpassBeginInfo,
+    const VkSubpassEndInfoKHR*                  pSubpassEndInfo)
+{
+//Not a CREATE or DESTROY function
+}
+
+static VKAPI_ATTR void VKAPI_CALL CmdEndRenderPass2KHR(
+    VkCommandBuffer                             commandBuffer,
+    const VkSubpassEndInfoKHR*                  pSubpassEndInfo)
+{
+//Not a CREATE or DESTROY function
+}
+
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetSwapchainStatusKHR(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+
+
+static VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceExternalFencePropertiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceExternalFenceInfo*    pExternalFenceInfo,
+    VkExternalFenceProperties*                  pExternalFenceProperties)
+{
+    GetPhysicalDeviceExternalFenceProperties(physicalDevice, pExternalFenceInfo, pExternalFenceProperties);
+}
+
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+static VKAPI_ATTR VkResult VKAPI_CALL ImportFenceWin32HandleKHR(
+    VkDevice                                    device,
+    const VkImportFenceWin32HandleInfoKHR*      pImportFenceWin32HandleInfo)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetFenceWin32HandleKHR(
+    VkDevice                                    device,
+    const VkFenceGetWin32HandleInfoKHR*         pGetWin32HandleInfo,
+    HANDLE*                                     pHandle)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+#endif /* VK_USE_PLATFORM_WIN32_KHR */
+
+
+static VKAPI_ATTR VkResult VKAPI_CALL ImportFenceFdKHR(
+    VkDevice                                    device,
+    const VkImportFenceFdInfoKHR*               pImportFenceFdInfo)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetFenceFdKHR(
+    VkDevice                                    device,
+    const VkFenceGetFdInfoKHR*                  pGetFdInfo,
+    int*                                        pFd)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+
+
+static VKAPI_ATTR VkResult VKAPI_CALL EnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t                                    queueFamilyIndex,
+    uint32_t*                                   pCounterCount,
+    VkPerformanceCounterKHR*                    pCounters,
+    VkPerformanceCounterDescriptionKHR*         pCounterDescriptions)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkQueryPoolPerformanceCreateInfoKHR*  pPerformanceQueryCreateInfo,
+    uint32_t*                                   pNumPasses)
+{
+//Not a CREATE or DESTROY function
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL AcquireProfilingLockKHR(
+    VkDevice                                    device,
+    const VkAcquireProfilingLockInfoKHR*        pInfo)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR void VKAPI_CALL ReleaseProfilingLockKHR(
+    VkDevice                                    device)
+{
+//Not a CREATE or DESTROY function
+}
+
+
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceSurfaceCapabilities2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceSurfaceInfo2KHR*      pSurfaceInfo,
+    VkSurfaceCapabilities2KHR*                  pSurfaceCapabilities)
+{
+    GetPhysicalDeviceSurfaceCapabilitiesKHR(physicalDevice, pSurfaceInfo->surface, &pSurfaceCapabilities->surfaceCapabilities);
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceSurfaceFormats2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceSurfaceInfo2KHR*      pSurfaceInfo,
+    uint32_t*                                   pSurfaceFormatCount,
+    VkSurfaceFormat2KHR*                        pSurfaceFormats)
+{
+    // Currently always say that RGBA8 & BGRA8 are supported
+    if (!pSurfaceFormats) {
+        *pSurfaceFormatCount = 2;
+    } else {
+        // Intentionally falling through and just filling however many types are requested
+        switch(*pSurfaceFormatCount) {
+        case 2:
+            pSurfaceFormats[1].pNext = nullptr;
+            pSurfaceFormats[1].surfaceFormat.format = VK_FORMAT_R8G8B8A8_UNORM;
+            pSurfaceFormats[1].surfaceFormat.colorSpace = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR;
+            // fall through
+        default:
+            pSurfaceFormats[1].pNext = nullptr;
+            pSurfaceFormats[0].surfaceFormat.format = VK_FORMAT_B8G8R8A8_UNORM;
+            pSurfaceFormats[0].surfaceFormat.colorSpace = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR;
+            break;
+        }
+    }
+    return VK_SUCCESS;
+}
+
+
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceDisplayProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pPropertyCount,
+    VkDisplayProperties2KHR*                    pProperties)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceDisplayPlaneProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pPropertyCount,
+    VkDisplayPlaneProperties2KHR*               pProperties)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetDisplayModeProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkDisplayKHR                                display,
+    uint32_t*                                   pPropertyCount,
+    VkDisplayModeProperties2KHR*                pProperties)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetDisplayPlaneCapabilities2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkDisplayPlaneInfo2KHR*               pDisplayPlaneInfo,
+    VkDisplayPlaneCapabilities2KHR*             pCapabilities)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+
+
+
+
+
+static VKAPI_ATTR void VKAPI_CALL GetImageMemoryRequirements2KHR(
+    VkDevice                                    device,
+    const VkImageMemoryRequirementsInfo2*       pInfo,
+    VkMemoryRequirements2*                      pMemoryRequirements)
+{
+    GetImageMemoryRequirements(device, pInfo->image, &pMemoryRequirements->memoryRequirements);
+}
+
+static VKAPI_ATTR void VKAPI_CALL GetBufferMemoryRequirements2KHR(
+    VkDevice                                    device,
+    const VkBufferMemoryRequirementsInfo2*      pInfo,
+    VkMemoryRequirements2*                      pMemoryRequirements)
+{
+    GetBufferMemoryRequirements(device, pInfo->buffer, &pMemoryRequirements->memoryRequirements);
+}
+
+static VKAPI_ATTR void VKAPI_CALL GetImageSparseMemoryRequirements2KHR(
+    VkDevice                                    device,
+    const VkImageSparseMemoryRequirementsInfo2* pInfo,
+    uint32_t*                                   pSparseMemoryRequirementCount,
+    VkSparseImageMemoryRequirements2*           pSparseMemoryRequirements)
+{
+//Not a CREATE or DESTROY function
+}
+
+
+
+static VKAPI_ATTR VkResult VKAPI_CALL CreateSamplerYcbcrConversionKHR(
+    VkDevice                                    device,
+    const VkSamplerYcbcrConversionCreateInfo*   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSamplerYcbcrConversion*                   pYcbcrConversion)
+{
+    unique_lock_t lock(global_lock);
+    *pYcbcrConversion = (VkSamplerYcbcrConversion)global_unique_handle++;
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR void VKAPI_CALL DestroySamplerYcbcrConversionKHR(
+    VkDevice                                    device,
+    VkSamplerYcbcrConversion                    ycbcrConversion,
+    const VkAllocationCallbacks*                pAllocator)
+{
+//Destroy object
+}
+
+
+static VKAPI_ATTR VkResult VKAPI_CALL BindBufferMemory2KHR(
+    VkDevice                                    device,
+    uint32_t                                    bindInfoCount,
+    const VkBindBufferMemoryInfo*               pBindInfos)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL BindImageMemory2KHR(
+    VkDevice                                    device,
+    uint32_t                                    bindInfoCount,
+    const VkBindImageMemoryInfo*                pBindInfos)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+
+
+static VKAPI_ATTR void VKAPI_CALL GetDescriptorSetLayoutSupportKHR(
+    VkDevice                                    device,
+    const VkDescriptorSetLayoutCreateInfo*      pCreateInfo,
+    VkDescriptorSetLayoutSupport*               pSupport)
+{
+//Not a CREATE or DESTROY function
+}
+
+
+static VKAPI_ATTR void VKAPI_CALL CmdDrawIndirectCountKHR(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkBuffer                                    countBuffer,
+    VkDeviceSize                                countBufferOffset,
+    uint32_t                                    maxDrawCount,
+    uint32_t                                    stride)
+{
+//Not a CREATE or DESTROY function
+}
+
+static VKAPI_ATTR void VKAPI_CALL CmdDrawIndexedIndirectCountKHR(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkBuffer                                    countBuffer,
+    VkDeviceSize                                countBufferOffset,
+    uint32_t                                    maxDrawCount,
+    uint32_t                                    stride)
+{
+//Not a CREATE or DESTROY function
+}
+
+
+
+
+
+
+
+
+
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetSemaphoreCounterValueKHR(
+    VkDevice                                    device,
+    VkSemaphore                                 semaphore,
+    uint64_t*                                   pValue)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL WaitSemaphoresKHR(
+    VkDevice                                    device,
+    const VkSemaphoreWaitInfoKHR*               pWaitInfo,
+    uint64_t                                    timeout)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL SignalSemaphoreKHR(
+    VkDevice                                    device,
+    const VkSemaphoreSignalInfoKHR*             pSignalInfo)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+
+
+
+
+
+
+
+static VKAPI_ATTR VkDeviceAddress VKAPI_CALL GetBufferDeviceAddressKHR(
+    VkDevice                                    device,
+    const VkBufferDeviceAddressInfoKHR*         pInfo)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR uint64_t VKAPI_CALL GetBufferOpaqueCaptureAddressKHR(
+    VkDevice                                    device,
+    const VkBufferDeviceAddressInfoKHR*         pInfo)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR uint64_t VKAPI_CALL GetDeviceMemoryOpaqueCaptureAddressKHR(
+    VkDevice                                    device,
+    const VkDeviceMemoryOpaqueCaptureAddressInfoKHR* pInfo)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetPipelineExecutablePropertiesKHR(
+    VkDevice                                    device,
+    const VkPipelineInfoKHR*                    pPipelineInfo,
+    uint32_t*                                   pExecutableCount,
+    VkPipelineExecutablePropertiesKHR*          pProperties)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetPipelineExecutableStatisticsKHR(
+    VkDevice                                    device,
+    const VkPipelineExecutableInfoKHR*          pExecutableInfo,
+    uint32_t*                                   pStatisticCount,
+    VkPipelineExecutableStatisticKHR*           pStatistics)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetPipelineExecutableInternalRepresentationsKHR(
+    VkDevice                                    device,
+    const VkPipelineExecutableInfoKHR*          pExecutableInfo,
+    uint32_t*                                   pInternalRepresentationCount,
+    VkPipelineExecutableInternalRepresentationKHR* pInternalRepresentations)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+
+
+static VKAPI_ATTR VkResult VKAPI_CALL CreateDebugReportCallbackEXT(
+    VkInstance                                  instance,
+    const VkDebugReportCallbackCreateInfoEXT*   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDebugReportCallbackEXT*                   pCallback)
+{
+    unique_lock_t lock(global_lock);
+    *pCallback = (VkDebugReportCallbackEXT)global_unique_handle++;
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR void VKAPI_CALL DestroyDebugReportCallbackEXT(
+    VkInstance                                  instance,
+    VkDebugReportCallbackEXT                    callback,
+    const VkAllocationCallbacks*                pAllocator)
+{
+//Destroy object
+}
+
+static VKAPI_ATTR void VKAPI_CALL DebugReportMessageEXT(
+    VkInstance                                  instance,
+    VkDebugReportFlagsEXT                       flags,
+    VkDebugReportObjectTypeEXT                  objectType,
+    uint64_t                                    object,
+    size_t                                      location,
+    int32_t                                     messageCode,
+    const char*                                 pLayerPrefix,
+    const char*                                 pMessage)
+{
+//Not a CREATE or DESTROY function
+}
+
+
+
+
+
+
+
+
+static VKAPI_ATTR VkResult VKAPI_CALL DebugMarkerSetObjectTagEXT(
+    VkDevice                                    device,
+    const VkDebugMarkerObjectTagInfoEXT*        pTagInfo)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL DebugMarkerSetObjectNameEXT(
+    VkDevice                                    device,
+    const VkDebugMarkerObjectNameInfoEXT*       pNameInfo)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR void VKAPI_CALL CmdDebugMarkerBeginEXT(
+    VkCommandBuffer                             commandBuffer,
+    const VkDebugMarkerMarkerInfoEXT*           pMarkerInfo)
+{
+//Not a CREATE or DESTROY function
+}
+
+static VKAPI_ATTR void VKAPI_CALL CmdDebugMarkerEndEXT(
+    VkCommandBuffer                             commandBuffer)
+{
+//Not a CREATE or DESTROY function
+}
+
+static VKAPI_ATTR void VKAPI_CALL CmdDebugMarkerInsertEXT(
+    VkCommandBuffer                             commandBuffer,
+    const VkDebugMarkerMarkerInfoEXT*           pMarkerInfo)
+{
+//Not a CREATE or DESTROY function
+}
+
+
+
+
+static VKAPI_ATTR void VKAPI_CALL CmdBindTransformFeedbackBuffersEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstBinding,
+    uint32_t                                    bindingCount,
+    const VkBuffer*                             pBuffers,
+    const VkDeviceSize*                         pOffsets,
+    const VkDeviceSize*                         pSizes)
+{
+//Not a CREATE or DESTROY function
+}
+
+static VKAPI_ATTR void VKAPI_CALL CmdBeginTransformFeedbackEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstCounterBuffer,
+    uint32_t                                    counterBufferCount,
+    const VkBuffer*                             pCounterBuffers,
+    const VkDeviceSize*                         pCounterBufferOffsets)
+{
+//Not a CREATE or DESTROY function
+}
+
+static VKAPI_ATTR void VKAPI_CALL CmdEndTransformFeedbackEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstCounterBuffer,
+    uint32_t                                    counterBufferCount,
+    const VkBuffer*                             pCounterBuffers,
+    const VkDeviceSize*                         pCounterBufferOffsets)
+{
+//Not a CREATE or DESTROY function
+}
+
+static VKAPI_ATTR void VKAPI_CALL CmdBeginQueryIndexedEXT(
+    VkCommandBuffer                             commandBuffer,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    query,
+    VkQueryControlFlags                         flags,
+    uint32_t                                    index)
+{
+//Not a CREATE or DESTROY function
+}
+
+static VKAPI_ATTR void VKAPI_CALL CmdEndQueryIndexedEXT(
+    VkCommandBuffer                             commandBuffer,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    query,
+    uint32_t                                    index)
+{
+//Not a CREATE or DESTROY function
+}
+
+static VKAPI_ATTR void VKAPI_CALL CmdDrawIndirectByteCountEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    instanceCount,
+    uint32_t                                    firstInstance,
+    VkBuffer                                    counterBuffer,
+    VkDeviceSize                                counterBufferOffset,
+    uint32_t                                    counterOffset,
+    uint32_t                                    vertexStride)
+{
+//Not a CREATE or DESTROY function
+}
+
+
+static VKAPI_ATTR uint32_t VKAPI_CALL GetImageViewHandleNVX(
+    VkDevice                                    device,
+    const VkImageViewHandleInfoNVX*             pInfo)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+
+
+static VKAPI_ATTR void VKAPI_CALL CmdDrawIndirectCountAMD(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkBuffer                                    countBuffer,
+    VkDeviceSize                                countBufferOffset,
+    uint32_t                                    maxDrawCount,
+    uint32_t                                    stride)
+{
+//Not a CREATE or DESTROY function
+}
+
+static VKAPI_ATTR void VKAPI_CALL CmdDrawIndexedIndirectCountAMD(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkBuffer                                    countBuffer,
+    VkDeviceSize                                countBufferOffset,
+    uint32_t                                    maxDrawCount,
+    uint32_t                                    stride)
+{
+//Not a CREATE or DESTROY function
+}
+
+
+
+
+
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetShaderInfoAMD(
+    VkDevice                                    device,
+    VkPipeline                                  pipeline,
+    VkShaderStageFlagBits                       shaderStage,
+    VkShaderInfoTypeAMD                         infoType,
+    size_t*                                     pInfoSize,
+    void*                                       pInfo)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+
+
+#ifdef VK_USE_PLATFORM_GGP
+
+static VKAPI_ATTR VkResult VKAPI_CALL CreateStreamDescriptorSurfaceGGP(
+    VkInstance                                  instance,
+    const VkStreamDescriptorSurfaceCreateInfoGGP* pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface)
+{
+    unique_lock_t lock(global_lock);
+    *pSurface = (VkSurfaceKHR)global_unique_handle++;
+    return VK_SUCCESS;
+}
+#endif /* VK_USE_PLATFORM_GGP */
+
+
+
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceExternalImageFormatPropertiesNV(
+    VkPhysicalDevice                            physicalDevice,
+    VkFormat                                    format,
+    VkImageType                                 type,
+    VkImageTiling                               tiling,
+    VkImageUsageFlags                           usage,
+    VkImageCreateFlags                          flags,
+    VkExternalMemoryHandleTypeFlagsNV           externalHandleType,
+    VkExternalImageFormatPropertiesNV*          pExternalImageFormatProperties)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetMemoryWin32HandleNV(
+    VkDevice                                    device,
+    VkDeviceMemory                              memory,
+    VkExternalMemoryHandleTypeFlagsNV           handleType,
+    HANDLE*                                     pHandle)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+#endif /* VK_USE_PLATFORM_WIN32_KHR */
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+#endif /* VK_USE_PLATFORM_WIN32_KHR */
+
+
+#ifdef VK_USE_PLATFORM_VI_NN
+
+static VKAPI_ATTR VkResult VKAPI_CALL CreateViSurfaceNN(
+    VkInstance                                  instance,
+    const VkViSurfaceCreateInfoNN*              pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface)
+{
+    unique_lock_t lock(global_lock);
+    *pSurface = (VkSurfaceKHR)global_unique_handle++;
+    return VK_SUCCESS;
+}
+#endif /* VK_USE_PLATFORM_VI_NN */
+
+
+
+
+
+
+static VKAPI_ATTR void VKAPI_CALL CmdBeginConditionalRenderingEXT(
+    VkCommandBuffer                             commandBuffer,
+    const VkConditionalRenderingBeginInfoEXT*   pConditionalRenderingBegin)
+{
+//Not a CREATE or DESTROY function
+}
+
+static VKAPI_ATTR void VKAPI_CALL CmdEndConditionalRenderingEXT(
+    VkCommandBuffer                             commandBuffer)
+{
+//Not a CREATE or DESTROY function
+}
+
+
+static VKAPI_ATTR void VKAPI_CALL CmdProcessCommandsNVX(
+    VkCommandBuffer                             commandBuffer,
+    const VkCmdProcessCommandsInfoNVX*          pProcessCommandsInfo)
+{
+//Not a CREATE or DESTROY function
+}
+
+static VKAPI_ATTR void VKAPI_CALL CmdReserveSpaceForCommandsNVX(
+    VkCommandBuffer                             commandBuffer,
+    const VkCmdReserveSpaceForCommandsInfoNVX*  pReserveSpaceInfo)
+{
+//Not a CREATE or DESTROY function
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL CreateIndirectCommandsLayoutNVX(
+    VkDevice                                    device,
+    const VkIndirectCommandsLayoutCreateInfoNVX* pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkIndirectCommandsLayoutNVX*                pIndirectCommandsLayout)
+{
+    unique_lock_t lock(global_lock);
+    *pIndirectCommandsLayout = (VkIndirectCommandsLayoutNVX)global_unique_handle++;
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR void VKAPI_CALL DestroyIndirectCommandsLayoutNVX(
+    VkDevice                                    device,
+    VkIndirectCommandsLayoutNVX                 indirectCommandsLayout,
+    const VkAllocationCallbacks*                pAllocator)
+{
+//Destroy object
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL CreateObjectTableNVX(
+    VkDevice                                    device,
+    const VkObjectTableCreateInfoNVX*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkObjectTableNVX*                           pObjectTable)
+{
+    unique_lock_t lock(global_lock);
+    *pObjectTable = (VkObjectTableNVX)global_unique_handle++;
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR void VKAPI_CALL DestroyObjectTableNVX(
+    VkDevice                                    device,
+    VkObjectTableNVX                            objectTable,
+    const VkAllocationCallbacks*                pAllocator)
+{
+//Destroy object
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL RegisterObjectsNVX(
+    VkDevice                                    device,
+    VkObjectTableNVX                            objectTable,
+    uint32_t                                    objectCount,
+    const VkObjectTableEntryNVX* const*         ppObjectTableEntries,
+    const uint32_t*                             pObjectIndices)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL UnregisterObjectsNVX(
+    VkDevice                                    device,
+    VkObjectTableNVX                            objectTable,
+    uint32_t                                    objectCount,
+    const VkObjectEntryTypeNVX*                 pObjectEntryTypes,
+    const uint32_t*                             pObjectIndices)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceGeneratedCommandsPropertiesNVX(
+    VkPhysicalDevice                            physicalDevice,
+    VkDeviceGeneratedCommandsFeaturesNVX*       pFeatures,
+    VkDeviceGeneratedCommandsLimitsNVX*         pLimits)
+{
+//Not a CREATE or DESTROY function
+}
+
+
+static VKAPI_ATTR void VKAPI_CALL CmdSetViewportWScalingNV(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstViewport,
+    uint32_t                                    viewportCount,
+    const VkViewportWScalingNV*                 pViewportWScalings)
+{
+//Not a CREATE or DESTROY function
+}
+
+
+static VKAPI_ATTR VkResult VKAPI_CALL ReleaseDisplayEXT(
+    VkPhysicalDevice                            physicalDevice,
+    VkDisplayKHR                                display)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+
+#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
+
+static VKAPI_ATTR VkResult VKAPI_CALL AcquireXlibDisplayEXT(
+    VkPhysicalDevice                            physicalDevice,
+    Display*                                    dpy,
+    VkDisplayKHR                                display)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetRandROutputDisplayEXT(
+    VkPhysicalDevice                            physicalDevice,
+    Display*                                    dpy,
+    RROutput                                    rrOutput,
+    VkDisplayKHR*                               pDisplay)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+#endif /* VK_USE_PLATFORM_XLIB_XRANDR_EXT */
+
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceSurfaceCapabilities2EXT(
+    VkPhysicalDevice                            physicalDevice,
+    VkSurfaceKHR                                surface,
+    VkSurfaceCapabilities2EXT*                  pSurfaceCapabilities)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+
+
+static VKAPI_ATTR VkResult VKAPI_CALL DisplayPowerControlEXT(
+    VkDevice                                    device,
+    VkDisplayKHR                                display,
+    const VkDisplayPowerInfoEXT*                pDisplayPowerInfo)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL RegisterDeviceEventEXT(
+    VkDevice                                    device,
+    const VkDeviceEventInfoEXT*                 pDeviceEventInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkFence*                                    pFence)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL RegisterDisplayEventEXT(
+    VkDevice                                    device,
+    VkDisplayKHR                                display,
+    const VkDisplayEventInfoEXT*                pDisplayEventInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkFence*                                    pFence)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetSwapchainCounterEXT(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    VkSurfaceCounterFlagBitsEXT                 counter,
+    uint64_t*                                   pCounterValue)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetRefreshCycleDurationGOOGLE(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    VkRefreshCycleDurationGOOGLE*               pDisplayTimingProperties)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetPastPresentationTimingGOOGLE(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    uint32_t*                                   pPresentationTimingCount,
+    VkPastPresentationTimingGOOGLE*             pPresentationTimings)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+
+
+
+
+
+
+
+static VKAPI_ATTR void VKAPI_CALL CmdSetDiscardRectangleEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstDiscardRectangle,
+    uint32_t                                    discardRectangleCount,
+    const VkRect2D*                             pDiscardRectangles)
+{
+//Not a CREATE or DESTROY function
+}
+
+
+
+
+
+static VKAPI_ATTR void VKAPI_CALL SetHdrMetadataEXT(
+    VkDevice                                    device,
+    uint32_t                                    swapchainCount,
+    const VkSwapchainKHR*                       pSwapchains,
+    const VkHdrMetadataEXT*                     pMetadata)
+{
+//Not a CREATE or DESTROY function
+}
+
+#ifdef VK_USE_PLATFORM_IOS_MVK
+
+static VKAPI_ATTR VkResult VKAPI_CALL CreateIOSSurfaceMVK(
+    VkInstance                                  instance,
+    const VkIOSSurfaceCreateInfoMVK*            pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface)
+{
+    unique_lock_t lock(global_lock);
+    *pSurface = (VkSurfaceKHR)global_unique_handle++;
+    return VK_SUCCESS;
+}
+#endif /* VK_USE_PLATFORM_IOS_MVK */
+
+#ifdef VK_USE_PLATFORM_MACOS_MVK
+
+static VKAPI_ATTR VkResult VKAPI_CALL CreateMacOSSurfaceMVK(
+    VkInstance                                  instance,
+    const VkMacOSSurfaceCreateInfoMVK*          pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface)
+{
+    unique_lock_t lock(global_lock);
+    *pSurface = (VkSurfaceKHR)global_unique_handle++;
+    return VK_SUCCESS;
+}
+#endif /* VK_USE_PLATFORM_MACOS_MVK */
+
+
+
+
+static VKAPI_ATTR VkResult VKAPI_CALL SetDebugUtilsObjectNameEXT(
+    VkDevice                                    device,
+    const VkDebugUtilsObjectNameInfoEXT*        pNameInfo)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL SetDebugUtilsObjectTagEXT(
+    VkDevice                                    device,
+    const VkDebugUtilsObjectTagInfoEXT*         pTagInfo)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR void VKAPI_CALL QueueBeginDebugUtilsLabelEXT(
+    VkQueue                                     queue,
+    const VkDebugUtilsLabelEXT*                 pLabelInfo)
+{
+//Not a CREATE or DESTROY function
+}
+
+static VKAPI_ATTR void VKAPI_CALL QueueEndDebugUtilsLabelEXT(
+    VkQueue                                     queue)
+{
+//Not a CREATE or DESTROY function
+}
+
+static VKAPI_ATTR void VKAPI_CALL QueueInsertDebugUtilsLabelEXT(
+    VkQueue                                     queue,
+    const VkDebugUtilsLabelEXT*                 pLabelInfo)
+{
+//Not a CREATE or DESTROY function
+}
+
+static VKAPI_ATTR void VKAPI_CALL CmdBeginDebugUtilsLabelEXT(
+    VkCommandBuffer                             commandBuffer,
+    const VkDebugUtilsLabelEXT*                 pLabelInfo)
+{
+//Not a CREATE or DESTROY function
+}
+
+static VKAPI_ATTR void VKAPI_CALL CmdEndDebugUtilsLabelEXT(
+    VkCommandBuffer                             commandBuffer)
+{
+//Not a CREATE or DESTROY function
+}
+
+static VKAPI_ATTR void VKAPI_CALL CmdInsertDebugUtilsLabelEXT(
+    VkCommandBuffer                             commandBuffer,
+    const VkDebugUtilsLabelEXT*                 pLabelInfo)
+{
+//Not a CREATE or DESTROY function
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL CreateDebugUtilsMessengerEXT(
+    VkInstance                                  instance,
+    const VkDebugUtilsMessengerCreateInfoEXT*   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDebugUtilsMessengerEXT*                   pMessenger)
+{
+    unique_lock_t lock(global_lock);
+    *pMessenger = (VkDebugUtilsMessengerEXT)global_unique_handle++;
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR void VKAPI_CALL DestroyDebugUtilsMessengerEXT(
+    VkInstance                                  instance,
+    VkDebugUtilsMessengerEXT                    messenger,
+    const VkAllocationCallbacks*                pAllocator)
+{
+//Destroy object
+}
+
+static VKAPI_ATTR void VKAPI_CALL SubmitDebugUtilsMessageEXT(
+    VkInstance                                  instance,
+    VkDebugUtilsMessageSeverityFlagBitsEXT      messageSeverity,
+    VkDebugUtilsMessageTypeFlagsEXT             messageTypes,
+    const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData)
+{
+//Not a CREATE or DESTROY function
+}
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetAndroidHardwareBufferPropertiesANDROID(
+    VkDevice                                    device,
+    const struct AHardwareBuffer*               buffer,
+    VkAndroidHardwareBufferPropertiesANDROID*   pProperties)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetMemoryAndroidHardwareBufferANDROID(
+    VkDevice                                    device,
+    const VkMemoryGetAndroidHardwareBufferInfoANDROID* pInfo,
+    struct AHardwareBuffer**                    pBuffer)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+#endif /* VK_USE_PLATFORM_ANDROID_KHR */
+
+
+
+
+
+
+
+
+static VKAPI_ATTR void VKAPI_CALL CmdSetSampleLocationsEXT(
+    VkCommandBuffer                             commandBuffer,
+    const VkSampleLocationsInfoEXT*             pSampleLocationsInfo)
+{
+//Not a CREATE or DESTROY function
+}
+
+static VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceMultisamplePropertiesEXT(
+    VkPhysicalDevice                            physicalDevice,
+    VkSampleCountFlagBits                       samples,
+    VkMultisamplePropertiesEXT*                 pMultisampleProperties)
+{
+//Not a CREATE or DESTROY function
+}
+
+
+
+
+
+
+
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetImageDrmFormatModifierPropertiesEXT(
+    VkDevice                                    device,
+    VkImage                                     image,
+    VkImageDrmFormatModifierPropertiesEXT*      pProperties)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+
+
+static VKAPI_ATTR VkResult VKAPI_CALL CreateValidationCacheEXT(
+    VkDevice                                    device,
+    const VkValidationCacheCreateInfoEXT*       pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkValidationCacheEXT*                       pValidationCache)
+{
+    unique_lock_t lock(global_lock);
+    *pValidationCache = (VkValidationCacheEXT)global_unique_handle++;
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR void VKAPI_CALL DestroyValidationCacheEXT(
+    VkDevice                                    device,
+    VkValidationCacheEXT                        validationCache,
+    const VkAllocationCallbacks*                pAllocator)
+{
+//Destroy object
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL MergeValidationCachesEXT(
+    VkDevice                                    device,
+    VkValidationCacheEXT                        dstCache,
+    uint32_t                                    srcCacheCount,
+    const VkValidationCacheEXT*                 pSrcCaches)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetValidationCacheDataEXT(
+    VkDevice                                    device,
+    VkValidationCacheEXT                        validationCache,
+    size_t*                                     pDataSize,
+    void*                                       pData)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+
+
+
+
+static VKAPI_ATTR void VKAPI_CALL CmdBindShadingRateImageNV(
+    VkCommandBuffer                             commandBuffer,
+    VkImageView                                 imageView,
+    VkImageLayout                               imageLayout)
+{
+//Not a CREATE or DESTROY function
+}
+
+static VKAPI_ATTR void VKAPI_CALL CmdSetViewportShadingRatePaletteNV(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstViewport,
+    uint32_t                                    viewportCount,
+    const VkShadingRatePaletteNV*               pShadingRatePalettes)
+{
+//Not a CREATE or DESTROY function
+}
+
+static VKAPI_ATTR void VKAPI_CALL CmdSetCoarseSampleOrderNV(
+    VkCommandBuffer                             commandBuffer,
+    VkCoarseSampleOrderTypeNV                   sampleOrderType,
+    uint32_t                                    customSampleOrderCount,
+    const VkCoarseSampleOrderCustomNV*          pCustomSampleOrders)
+{
+//Not a CREATE or DESTROY function
+}
+
+
+static VKAPI_ATTR VkResult VKAPI_CALL CreateAccelerationStructureNV(
+    VkDevice                                    device,
+    const VkAccelerationStructureCreateInfoNV*  pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkAccelerationStructureNV*                  pAccelerationStructure)
+{
+    unique_lock_t lock(global_lock);
+    *pAccelerationStructure = (VkAccelerationStructureNV)global_unique_handle++;
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR void VKAPI_CALL DestroyAccelerationStructureNV(
+    VkDevice                                    device,
+    VkAccelerationStructureNV                   accelerationStructure,
+    const VkAllocationCallbacks*                pAllocator)
+{
+//Destroy object
+}
+
+static VKAPI_ATTR void VKAPI_CALL GetAccelerationStructureMemoryRequirementsNV(
+    VkDevice                                    device,
+    const VkAccelerationStructureMemoryRequirementsInfoNV* pInfo,
+    VkMemoryRequirements2KHR*                   pMemoryRequirements)
+{
+//Not a CREATE or DESTROY function
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL BindAccelerationStructureMemoryNV(
+    VkDevice                                    device,
+    uint32_t                                    bindInfoCount,
+    const VkBindAccelerationStructureMemoryInfoNV* pBindInfos)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR void VKAPI_CALL CmdBuildAccelerationStructureNV(
+    VkCommandBuffer                             commandBuffer,
+    const VkAccelerationStructureInfoNV*        pInfo,
+    VkBuffer                                    instanceData,
+    VkDeviceSize                                instanceOffset,
+    VkBool32                                    update,
+    VkAccelerationStructureNV                   dst,
+    VkAccelerationStructureNV                   src,
+    VkBuffer                                    scratch,
+    VkDeviceSize                                scratchOffset)
+{
+//Not a CREATE or DESTROY function
+}
+
+static VKAPI_ATTR void VKAPI_CALL CmdCopyAccelerationStructureNV(
+    VkCommandBuffer                             commandBuffer,
+    VkAccelerationStructureNV                   dst,
+    VkAccelerationStructureNV                   src,
+    VkCopyAccelerationStructureModeNV           mode)
+{
+//Not a CREATE or DESTROY function
+}
+
+static VKAPI_ATTR void VKAPI_CALL CmdTraceRaysNV(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    raygenShaderBindingTableBuffer,
+    VkDeviceSize                                raygenShaderBindingOffset,
+    VkBuffer                                    missShaderBindingTableBuffer,
+    VkDeviceSize                                missShaderBindingOffset,
+    VkDeviceSize                                missShaderBindingStride,
+    VkBuffer                                    hitShaderBindingTableBuffer,
+    VkDeviceSize                                hitShaderBindingOffset,
+    VkDeviceSize                                hitShaderBindingStride,
+    VkBuffer                                    callableShaderBindingTableBuffer,
+    VkDeviceSize                                callableShaderBindingOffset,
+    VkDeviceSize                                callableShaderBindingStride,
+    uint32_t                                    width,
+    uint32_t                                    height,
+    uint32_t                                    depth)
+{
+//Not a CREATE or DESTROY function
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL CreateRayTracingPipelinesNV(
+    VkDevice                                    device,
+    VkPipelineCache                             pipelineCache,
+    uint32_t                                    createInfoCount,
+    const VkRayTracingPipelineCreateInfoNV*     pCreateInfos,
+    const VkAllocationCallbacks*                pAllocator,
+    VkPipeline*                                 pPipelines)
+{
+    unique_lock_t lock(global_lock);
+    for (uint32_t i = 0; i < createInfoCount; ++i) {
+        pPipelines[i] = (VkPipeline)global_unique_handle++;
+    }
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetRayTracingShaderGroupHandlesNV(
+    VkDevice                                    device,
+    VkPipeline                                  pipeline,
+    uint32_t                                    firstGroup,
+    uint32_t                                    groupCount,
+    size_t                                      dataSize,
+    void*                                       pData)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetAccelerationStructureHandleNV(
+    VkDevice                                    device,
+    VkAccelerationStructureNV                   accelerationStructure,
+    size_t                                      dataSize,
+    void*                                       pData)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR void VKAPI_CALL CmdWriteAccelerationStructuresPropertiesNV(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    accelerationStructureCount,
+    const VkAccelerationStructureNV*            pAccelerationStructures,
+    VkQueryType                                 queryType,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    firstQuery)
+{
+//Not a CREATE or DESTROY function
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL CompileDeferredNV(
+    VkDevice                                    device,
+    VkPipeline                                  pipeline,
+    uint32_t                                    shader)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+
+
+
+
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetMemoryHostPointerPropertiesEXT(
+    VkDevice                                    device,
+    VkExternalMemoryHandleTypeFlagBits          handleType,
+    const void*                                 pHostPointer,
+    VkMemoryHostPointerPropertiesEXT*           pMemoryHostPointerProperties)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+
+
+static VKAPI_ATTR void VKAPI_CALL CmdWriteBufferMarkerAMD(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineStageFlagBits                     pipelineStage,
+    VkBuffer                                    dstBuffer,
+    VkDeviceSize                                dstOffset,
+    uint32_t                                    marker)
+{
+//Not a CREATE or DESTROY function
+}
+
+
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceCalibrateableTimeDomainsEXT(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pTimeDomainCount,
+    VkTimeDomainEXT*                            pTimeDomains)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetCalibratedTimestampsEXT(
+    VkDevice                                    device,
+    uint32_t                                    timestampCount,
+    const VkCalibratedTimestampInfoEXT*         pTimestampInfos,
+    uint64_t*                                   pTimestamps,
+    uint64_t*                                   pMaxDeviation)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+
+
+
+
+#ifdef VK_USE_PLATFORM_GGP
+#endif /* VK_USE_PLATFORM_GGP */
+
+
+
+
+
+static VKAPI_ATTR void VKAPI_CALL CmdDrawMeshTasksNV(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    taskCount,
+    uint32_t                                    firstTask)
+{
+//Not a CREATE or DESTROY function
+}
+
+static VKAPI_ATTR void VKAPI_CALL CmdDrawMeshTasksIndirectNV(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    uint32_t                                    drawCount,
+    uint32_t                                    stride)
+{
+//Not a CREATE or DESTROY function
+}
+
+static VKAPI_ATTR void VKAPI_CALL CmdDrawMeshTasksIndirectCountNV(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkBuffer                                    countBuffer,
+    VkDeviceSize                                countBufferOffset,
+    uint32_t                                    maxDrawCount,
+    uint32_t                                    stride)
+{
+//Not a CREATE or DESTROY function
+}
+
+
+
+
+static VKAPI_ATTR void VKAPI_CALL CmdSetExclusiveScissorNV(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstExclusiveScissor,
+    uint32_t                                    exclusiveScissorCount,
+    const VkRect2D*                             pExclusiveScissors)
+{
+//Not a CREATE or DESTROY function
+}
+
+
+static VKAPI_ATTR void VKAPI_CALL CmdSetCheckpointNV(
+    VkCommandBuffer                             commandBuffer,
+    const void*                                 pCheckpointMarker)
+{
+//Not a CREATE or DESTROY function
+}
+
+static VKAPI_ATTR void VKAPI_CALL GetQueueCheckpointDataNV(
+    VkQueue                                     queue,
+    uint32_t*                                   pCheckpointDataCount,
+    VkCheckpointDataNV*                         pCheckpointData)
+{
+//Not a CREATE or DESTROY function
+}
+
+
+
+static VKAPI_ATTR VkResult VKAPI_CALL InitializePerformanceApiINTEL(
+    VkDevice                                    device,
+    const VkInitializePerformanceApiInfoINTEL*  pInitializeInfo)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR void VKAPI_CALL UninitializePerformanceApiINTEL(
+    VkDevice                                    device)
+{
+//Not a CREATE or DESTROY function
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL CmdSetPerformanceMarkerINTEL(
+    VkCommandBuffer                             commandBuffer,
+    const VkPerformanceMarkerInfoINTEL*         pMarkerInfo)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL CmdSetPerformanceStreamMarkerINTEL(
+    VkCommandBuffer                             commandBuffer,
+    const VkPerformanceStreamMarkerInfoINTEL*   pMarkerInfo)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL CmdSetPerformanceOverrideINTEL(
+    VkCommandBuffer                             commandBuffer,
+    const VkPerformanceOverrideInfoINTEL*       pOverrideInfo)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL AcquirePerformanceConfigurationINTEL(
+    VkDevice                                    device,
+    const VkPerformanceConfigurationAcquireInfoINTEL* pAcquireInfo,
+    VkPerformanceConfigurationINTEL*            pConfiguration)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL ReleasePerformanceConfigurationINTEL(
+    VkDevice                                    device,
+    VkPerformanceConfigurationINTEL             configuration)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL QueueSetPerformanceConfigurationINTEL(
+    VkQueue                                     queue,
+    VkPerformanceConfigurationINTEL             configuration)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetPerformanceParameterINTEL(
+    VkDevice                                    device,
+    VkPerformanceParameterTypeINTEL             parameter,
+    VkPerformanceValueINTEL*                    pValue)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+
+
+
+static VKAPI_ATTR void VKAPI_CALL SetLocalDimmingAMD(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapChain,
+    VkBool32                                    localDimmingEnable)
+{
+//Not a CREATE or DESTROY function
+}
+
+#ifdef VK_USE_PLATFORM_FUCHSIA
+
+static VKAPI_ATTR VkResult VKAPI_CALL CreateImagePipeSurfaceFUCHSIA(
+    VkInstance                                  instance,
+    const VkImagePipeSurfaceCreateInfoFUCHSIA*  pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface)
+{
+    unique_lock_t lock(global_lock);
+    *pSurface = (VkSurfaceKHR)global_unique_handle++;
+    return VK_SUCCESS;
+}
+#endif /* VK_USE_PLATFORM_FUCHSIA */
+
+#ifdef VK_USE_PLATFORM_METAL_EXT
+
+static VKAPI_ATTR VkResult VKAPI_CALL CreateMetalSurfaceEXT(
+    VkInstance                                  instance,
+    const VkMetalSurfaceCreateInfoEXT*          pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface)
+{
+    unique_lock_t lock(global_lock);
+    *pSurface = (VkSurfaceKHR)global_unique_handle++;
+    return VK_SUCCESS;
+}
+#endif /* VK_USE_PLATFORM_METAL_EXT */
+
+
+
+
+
+
+
+
+
+
+
+
+static VKAPI_ATTR VkDeviceAddress VKAPI_CALL GetBufferDeviceAddressEXT(
+    VkDevice                                    device,
+    const VkBufferDeviceAddressInfoKHR*         pInfo)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceToolPropertiesEXT(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pToolCount,
+    VkPhysicalDeviceToolPropertiesEXT*          pToolProperties)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+
+
+
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceCooperativeMatrixPropertiesNV(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pPropertyCount,
+    VkCooperativeMatrixPropertiesNV*            pProperties)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pCombinationCount,
+    VkFramebufferMixedSamplesCombinationNV*     pCombinations)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+
+
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceSurfacePresentModes2EXT(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceSurfaceInfo2KHR*      pSurfaceInfo,
+    uint32_t*                                   pPresentModeCount,
+    VkPresentModeKHR*                           pPresentModes)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL AcquireFullScreenExclusiveModeEXT(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL ReleaseFullScreenExclusiveModeEXT(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetDeviceGroupSurfacePresentModes2EXT(
+    VkDevice                                    device,
+    const VkPhysicalDeviceSurfaceInfo2KHR*      pSurfaceInfo,
+    VkDeviceGroupPresentModeFlagsKHR*           pModes)
+{
+//Not a CREATE or DESTROY function
+    return VK_SUCCESS;
+}
+#endif /* VK_USE_PLATFORM_WIN32_KHR */
+
+
+static VKAPI_ATTR VkResult VKAPI_CALL CreateHeadlessSurfaceEXT(
+    VkInstance                                  instance,
+    const VkHeadlessSurfaceCreateInfoEXT*       pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface)
+{
+    unique_lock_t lock(global_lock);
+    *pSurface = (VkSurfaceKHR)global_unique_handle++;
+    return VK_SUCCESS;
+}
+
+
+static VKAPI_ATTR void VKAPI_CALL CmdSetLineStippleEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    lineStippleFactor,
+    uint16_t                                    lineStipplePattern)
+{
+//Not a CREATE or DESTROY function
+}
+
+
+static VKAPI_ATTR void VKAPI_CALL ResetQueryPoolEXT(
+    VkDevice                                    device,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    firstQuery,
+    uint32_t                                    queryCount)
+{
+//Not a CREATE or DESTROY function
+}
+
+
+
+
+
+
+
+static VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL GetPhysicalDeviceProcAddr(VkInstance instance, const char *funcName) {
+    // TODO: This function should only care about physical device functions and return nullptr for other functions
+    const auto &item = name_to_funcptr_map.find(funcName);
+    if (item != name_to_funcptr_map.end()) {
+        return reinterpret_cast<PFN_vkVoidFunction>(item->second);
+    }
+    // Mock should intercept all functions so if we get here just return null
+    return nullptr;
+}
+
+} // namespace vkmock
+
+#if defined(__GNUC__) && __GNUC__ >= 4
+#define EXPORT __attribute__((visibility("default")))
+#elif defined(__SUNPRO_C) && (__SUNPRO_C >= 0x590)
+#define EXPORT __attribute__((visibility("default")))
+#else
+#define EXPORT
+#endif
+
+extern "C" {
+
+EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vk_icdGetInstanceProcAddr(VkInstance instance, const char* pName) {
+    if (!vkmock::negotiate_loader_icd_interface_called) {
+        vkmock::loader_interface_version = 1;
+    }
+    return vkmock::GetInstanceProcAddr(instance, pName);
+}
+
+EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vk_icdGetPhysicalDeviceProcAddr(VkInstance instance, const char* pName) {
+    return vkmock::GetPhysicalDeviceProcAddr(instance, pName);
+}
+
+EXPORT VKAPI_ATTR VkResult VKAPI_CALL vk_icdNegotiateLoaderICDInterfaceVersion(uint32_t* pSupportedVersion) {
+    vkmock::negotiate_loader_icd_interface_called = true;
+    vkmock::loader_interface_version = *pSupportedVersion;
+    if (*pSupportedVersion > vkmock::SUPPORTED_LOADER_ICD_INTERFACE_VERSION) {
+        *pSupportedVersion = vkmock::SUPPORTED_LOADER_ICD_INTERFACE_VERSION;
+    }
+    return VK_SUCCESS;
+}
+
+
+EXPORT VKAPI_ATTR void VKAPI_CALL vkDestroySurfaceKHR(
+    VkInstance                                  instance,
+    VkSurfaceKHR                                surface,
+    const VkAllocationCallbacks*                pAllocator)
+{
+    vkmock::DestroySurfaceKHR(instance, surface, pAllocator);
+}
+
+EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceSupportKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t                                    queueFamilyIndex,
+    VkSurfaceKHR                                surface,
+    VkBool32*                                   pSupported)
+{
+    return vkmock::GetPhysicalDeviceSurfaceSupportKHR(physicalDevice, queueFamilyIndex, surface, pSupported);
+}
+
+EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceCapabilitiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkSurfaceKHR                                surface,
+    VkSurfaceCapabilitiesKHR*                   pSurfaceCapabilities)
+{
+    return vkmock::GetPhysicalDeviceSurfaceCapabilitiesKHR(physicalDevice, surface, pSurfaceCapabilities);
+}
+
+EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceFormatsKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkSurfaceKHR                                surface,
+    uint32_t*                                   pSurfaceFormatCount,
+    VkSurfaceFormatKHR*                         pSurfaceFormats)
+{
+    return vkmock::GetPhysicalDeviceSurfaceFormatsKHR(physicalDevice, surface, pSurfaceFormatCount, pSurfaceFormats);
+}
+
+EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfacePresentModesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkSurfaceKHR                                surface,
+    uint32_t*                                   pPresentModeCount,
+    VkPresentModeKHR*                           pPresentModes)
+{
+    return vkmock::GetPhysicalDeviceSurfacePresentModesKHR(physicalDevice, surface, pPresentModeCount, pPresentModes);
+}
+
+EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateDisplayPlaneSurfaceKHR(
+    VkInstance                                  instance,
+    const VkDisplaySurfaceCreateInfoKHR*        pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface)
+{
+    return vkmock::CreateDisplayPlaneSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
+}
+
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+
+EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateXlibSurfaceKHR(
+    VkInstance                                  instance,
+    const VkXlibSurfaceCreateInfoKHR*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface)
+{
+    return vkmock::CreateXlibSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
+}
+#endif /* VK_USE_PLATFORM_XLIB_KHR */
+
+#ifdef VK_USE_PLATFORM_XCB_KHR
+
+EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateXcbSurfaceKHR(
+    VkInstance                                  instance,
+    const VkXcbSurfaceCreateInfoKHR*            pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface)
+{
+    return vkmock::CreateXcbSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
+}
+#endif /* VK_USE_PLATFORM_XCB_KHR */
+
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+
+EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateWaylandSurfaceKHR(
+    VkInstance                                  instance,
+    const VkWaylandSurfaceCreateInfoKHR*        pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface)
+{
+    return vkmock::CreateWaylandSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
+}
+#endif /* VK_USE_PLATFORM_WAYLAND_KHR */
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+
+EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateAndroidSurfaceKHR(
+    VkInstance                                  instance,
+    const VkAndroidSurfaceCreateInfoKHR*        pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface)
+{
+    return vkmock::CreateAndroidSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
+}
+#endif /* VK_USE_PLATFORM_ANDROID_KHR */
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateWin32SurfaceKHR(
+    VkInstance                                  instance,
+    const VkWin32SurfaceCreateInfoKHR*          pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface)
+{
+    return vkmock::CreateWin32SurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
+}
+#endif /* VK_USE_PLATFORM_WIN32_KHR */
+
+EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkGetDeviceGroupSurfacePresentModesKHR(
+    VkDevice                                    device,
+    VkSurfaceKHR                                surface,
+    VkDeviceGroupPresentModeFlagsKHR*           pModes)
+{
+    return vkmock::GetDeviceGroupSurfacePresentModesKHR(device, surface, pModes);
+}
+
+EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDevicePresentRectanglesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkSurfaceKHR                                surface,
+    uint32_t*                                   pRectCount,
+    VkRect2D*                                   pRects)
+{
+    return vkmock::GetPhysicalDevicePresentRectanglesKHR(physicalDevice, surface, pRectCount, pRects);
+}
+
+#ifdef VK_USE_PLATFORM_VI_NN
+
+EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateViSurfaceNN(
+    VkInstance                                  instance,
+    const VkViSurfaceCreateInfoNN*              pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface)
+{
+    return vkmock::CreateViSurfaceNN(instance, pCreateInfo, pAllocator, pSurface);
+}
+#endif /* VK_USE_PLATFORM_VI_NN */
+
+EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceCapabilities2EXT(
+    VkPhysicalDevice                            physicalDevice,
+    VkSurfaceKHR                                surface,
+    VkSurfaceCapabilities2EXT*                  pSurfaceCapabilities)
+{
+    return vkmock::GetPhysicalDeviceSurfaceCapabilities2EXT(physicalDevice, surface, pSurfaceCapabilities);
+}
+
+#ifdef VK_USE_PLATFORM_IOS_MVK
+
+EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateIOSSurfaceMVK(
+    VkInstance                                  instance,
+    const VkIOSSurfaceCreateInfoMVK*            pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface)
+{
+    return vkmock::CreateIOSSurfaceMVK(instance, pCreateInfo, pAllocator, pSurface);
+}
+#endif /* VK_USE_PLATFORM_IOS_MVK */
+
+#ifdef VK_USE_PLATFORM_MACOS_MVK
+
+EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateMacOSSurfaceMVK(
+    VkInstance                                  instance,
+    const VkMacOSSurfaceCreateInfoMVK*          pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface)
+{
+    return vkmock::CreateMacOSSurfaceMVK(instance, pCreateInfo, pAllocator, pSurface);
+}
+#endif /* VK_USE_PLATFORM_MACOS_MVK */
+
+} // end extern "C"
+
+
diff --git a/src/third_party/vulkan-tools/src/icd/generated/mock_icd.h b/src/third_party/vulkan-tools/src/icd/generated/mock_icd.h
new file mode 100644
index 0000000..e510002
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/icd/generated/mock_icd.h
@@ -0,0 +1,3051 @@
+#ifndef __mock_icd_h_
+#define __mock_icd_h_ 1
+
+/*
+** Copyright (c) 2015-2018 The Khronos Group Inc.
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+/*
+** This header is generated from the Khronos Vulkan XML API Registry.
+**
+*/
+
+#include <unordered_map>
+#include <mutex>
+#include <string>
+#include <cstring>
+#include "vulkan/vk_icd.h"
+namespace vkmock {
+
+
+using mutex_t = std::mutex;
+using lock_guard_t = std::lock_guard<mutex_t>;
+using unique_lock_t = std::unique_lock<mutex_t>;
+
+static mutex_t global_lock;
+static uint64_t global_unique_handle = 1;
+static const uint32_t SUPPORTED_LOADER_ICD_INTERFACE_VERSION = 5;
+static uint32_t loader_interface_version = 0;
+static bool negotiate_loader_icd_interface_called = false;
+static void* CreateDispObjHandle() {
+    auto handle = new VK_LOADER_DATA;
+    set_loader_magic_value(handle);
+    return handle;
+}
+static void DestroyDispObjHandle(void* handle) {
+    delete reinterpret_cast<VK_LOADER_DATA*>(handle);
+}
+
+// Map of instance extension name to version
+static const std::unordered_map<std::string, uint32_t> instance_extension_map = {
+    {"VK_KHR_surface", 25},
+    {"VK_KHR_display", 23},
+    {"VK_KHR_xlib_surface", 6},
+    {"VK_KHR_xcb_surface", 6},
+    {"VK_KHR_wayland_surface", 6},
+    {"VK_KHR_android_surface", 6},
+    {"VK_KHR_win32_surface", 6},
+    {"VK_EXT_debug_report", 9},
+    {"VK_GGP_stream_descriptor_surface", 1},
+    {"VK_NV_external_memory_capabilities", 1},
+    {"VK_KHR_get_physical_device_properties2", 2},
+    {"VK_EXT_validation_flags", 2},
+    {"VK_NN_vi_surface", 1},
+    {"VK_KHR_device_group_creation", 1},
+    {"VK_KHR_external_memory_capabilities", 1},
+    {"VK_KHR_external_semaphore_capabilities", 1},
+    {"VK_EXT_direct_mode_display", 1},
+    {"VK_EXT_acquire_xlib_display", 1},
+    {"VK_EXT_display_surface_counter", 1},
+    {"VK_EXT_swapchain_colorspace", 4},
+    {"VK_KHR_external_fence_capabilities", 1},
+    {"VK_KHR_get_surface_capabilities2", 1},
+    {"VK_KHR_get_display_properties2", 1},
+    {"VK_MVK_ios_surface", 2},
+    {"VK_MVK_macos_surface", 2},
+    {"VK_EXT_debug_utils", 1},
+    {"VK_FUCHSIA_imagepipe_surface", 1},
+    {"VK_EXT_metal_surface", 1},
+    {"VK_KHR_surface_protected_capabilities", 1},
+    {"VK_EXT_validation_features", 2},
+    {"VK_EXT_headless_surface", 1},
+};
+// Map of device extension name to version
+static const std::unordered_map<std::string, uint32_t> device_extension_map = {
+    {"VK_KHR_swapchain", 70},
+    {"VK_KHR_display_swapchain", 10},
+    {"VK_NV_glsl_shader", 1},
+    {"VK_EXT_depth_range_unrestricted", 1},
+    {"VK_KHR_sampler_mirror_clamp_to_edge", 3},
+    {"VK_IMG_filter_cubic", 1},
+    {"VK_AMD_rasterization_order", 1},
+    {"VK_AMD_shader_trinary_minmax", 1},
+    {"VK_AMD_shader_explicit_vertex_parameter", 1},
+    {"VK_EXT_debug_marker", 4},
+    {"VK_AMD_gcn_shader", 1},
+    {"VK_NV_dedicated_allocation", 1},
+    {"VK_EXT_transform_feedback", 1},
+    {"VK_NVX_image_view_handle", 1},
+    {"VK_AMD_draw_indirect_count", 2},
+    {"VK_AMD_negative_viewport_height", 1},
+    {"VK_AMD_gpu_shader_half_float", 2},
+    {"VK_AMD_shader_ballot", 1},
+    {"VK_AMD_texture_gather_bias_lod", 1},
+    {"VK_AMD_shader_info", 1},
+    {"VK_AMD_shader_image_load_store_lod", 1},
+    {"VK_NV_corner_sampled_image", 2},
+    {"VK_KHR_multiview", 1},
+    {"VK_IMG_format_pvrtc", 1},
+    {"VK_NV_external_memory", 1},
+    {"VK_NV_external_memory_win32", 1},
+    {"VK_NV_win32_keyed_mutex", 2},
+    {"VK_KHR_device_group", 4},
+    {"VK_KHR_shader_draw_parameters", 1},
+    {"VK_EXT_shader_subgroup_ballot", 1},
+    {"VK_EXT_shader_subgroup_vote", 1},
+    {"VK_EXT_texture_compression_astc_hdr", 1},
+    {"VK_EXT_astc_decode_mode", 1},
+    {"VK_KHR_maintenance1", 2},
+    {"VK_KHR_external_memory", 1},
+    {"VK_KHR_external_memory_win32", 1},
+    {"VK_KHR_external_memory_fd", 1},
+    {"VK_KHR_win32_keyed_mutex", 1},
+    {"VK_KHR_external_semaphore", 1},
+    {"VK_KHR_external_semaphore_win32", 1},
+    {"VK_KHR_external_semaphore_fd", 1},
+    {"VK_KHR_push_descriptor", 2},
+    {"VK_EXT_conditional_rendering", 2},
+    {"VK_KHR_shader_float16_int8", 1},
+    {"VK_KHR_16bit_storage", 1},
+    {"VK_KHR_incremental_present", 1},
+    {"VK_KHR_descriptor_update_template", 1},
+    {"VK_NVX_device_generated_commands", 3},
+    {"VK_NV_clip_space_w_scaling", 1},
+    {"VK_EXT_display_control", 1},
+    {"VK_GOOGLE_display_timing", 1},
+    {"VK_NV_sample_mask_override_coverage", 1},
+    {"VK_NV_geometry_shader_passthrough", 1},
+    {"VK_NV_viewport_array2", 1},
+    {"VK_NVX_multiview_per_view_attributes", 1},
+    {"VK_NV_viewport_swizzle", 1},
+    {"VK_EXT_discard_rectangles", 1},
+    {"VK_EXT_conservative_rasterization", 1},
+    {"VK_EXT_depth_clip_enable", 1},
+    {"VK_EXT_hdr_metadata", 2},
+    {"VK_KHR_imageless_framebuffer", 1},
+    {"VK_KHR_create_renderpass2", 1},
+    {"VK_KHR_shared_presentable_image", 1},
+    {"VK_KHR_external_fence", 1},
+    {"VK_KHR_external_fence_win32", 1},
+    {"VK_KHR_external_fence_fd", 1},
+    {"VK_KHR_performance_query", 1},
+    {"VK_KHR_maintenance2", 1},
+    {"VK_KHR_variable_pointers", 1},
+    {"VK_EXT_external_memory_dma_buf", 1},
+    {"VK_EXT_queue_family_foreign", 1},
+    {"VK_KHR_dedicated_allocation", 3},
+    {"VK_ANDROID_external_memory_android_hardware_buffer", 3},
+    {"VK_EXT_sampler_filter_minmax", 2},
+    {"VK_KHR_storage_buffer_storage_class", 1},
+    {"VK_AMD_gpu_shader_int16", 2},
+    {"VK_AMD_mixed_attachment_samples", 1},
+    {"VK_AMD_shader_fragment_mask", 1},
+    {"VK_EXT_inline_uniform_block", 1},
+    {"VK_EXT_shader_stencil_export", 1},
+    {"VK_EXT_sample_locations", 1},
+    {"VK_KHR_relaxed_block_layout", 1},
+    {"VK_KHR_get_memory_requirements2", 1},
+    {"VK_KHR_image_format_list", 1},
+    {"VK_EXT_blend_operation_advanced", 2},
+    {"VK_NV_fragment_coverage_to_color", 1},
+    {"VK_NV_framebuffer_mixed_samples", 1},
+    {"VK_NV_fill_rectangle", 1},
+    {"VK_NV_shader_sm_builtins", 1},
+    {"VK_EXT_post_depth_coverage", 1},
+    {"VK_KHR_sampler_ycbcr_conversion", 14},
+    {"VK_KHR_bind_memory2", 1},
+    {"VK_EXT_image_drm_format_modifier", 1},
+    {"VK_EXT_descriptor_indexing", 2},
+    {"VK_EXT_shader_viewport_index_layer", 1},
+    {"VK_NV_shading_rate_image", 3},
+    {"VK_NV_ray_tracing", 3},
+    {"VK_NV_representative_fragment_test", 2},
+    {"VK_KHR_maintenance3", 1},
+    {"VK_KHR_draw_indirect_count", 1},
+    {"VK_EXT_filter_cubic", 2},
+    {"VK_EXT_global_priority", 2},
+    {"VK_KHR_shader_subgroup_extended_types", 1},
+    {"VK_KHR_8bit_storage", 1},
+    {"VK_EXT_external_memory_host", 1},
+    {"VK_AMD_buffer_marker", 1},
+    {"VK_KHR_shader_atomic_int64", 1},
+    {"VK_KHR_shader_clock", 1},
+    {"VK_AMD_pipeline_compiler_control", 1},
+    {"VK_EXT_calibrated_timestamps", 1},
+    {"VK_AMD_shader_core_properties", 2},
+    {"VK_AMD_memory_overallocation_behavior", 1},
+    {"VK_EXT_vertex_attribute_divisor", 3},
+    {"VK_GGP_frame_token", 1},
+    {"VK_EXT_pipeline_creation_feedback", 1},
+    {"VK_KHR_driver_properties", 1},
+    {"VK_KHR_shader_float_controls", 4},
+    {"VK_NV_shader_subgroup_partitioned", 1},
+    {"VK_KHR_depth_stencil_resolve", 1},
+    {"VK_KHR_swapchain_mutable_format", 1},
+    {"VK_NV_compute_shader_derivatives", 1},
+    {"VK_NV_mesh_shader", 1},
+    {"VK_NV_fragment_shader_barycentric", 1},
+    {"VK_NV_shader_image_footprint", 2},
+    {"VK_NV_scissor_exclusive", 1},
+    {"VK_NV_device_diagnostic_checkpoints", 2},
+    {"VK_KHR_timeline_semaphore", 2},
+    {"VK_INTEL_shader_integer_functions2", 1},
+    {"VK_INTEL_performance_query", 1},
+    {"VK_KHR_vulkan_memory_model", 3},
+    {"VK_EXT_pci_bus_info", 2},
+    {"VK_AMD_display_native_hdr", 1},
+    {"VK_EXT_fragment_density_map", 1},
+    {"VK_EXT_scalar_block_layout", 1},
+    {"VK_GOOGLE_hlsl_functionality1", 1},
+    {"VK_GOOGLE_decorate_string", 1},
+    {"VK_EXT_subgroup_size_control", 2},
+    {"VK_AMD_shader_core_properties2", 1},
+    {"VK_AMD_device_coherent_memory", 1},
+    {"VK_KHR_spirv_1_4", 1},
+    {"VK_EXT_memory_budget", 1},
+    {"VK_EXT_memory_priority", 1},
+    {"VK_NV_dedicated_allocation_image_aliasing", 1},
+    {"VK_KHR_separate_depth_stencil_layouts", 1},
+    {"VK_EXT_buffer_device_address", 2},
+    {"VK_EXT_tooling_info", 1},
+    {"VK_EXT_separate_stencil_usage", 1},
+    {"VK_NV_cooperative_matrix", 1},
+    {"VK_NV_coverage_reduction_mode", 1},
+    {"VK_EXT_fragment_shader_interlock", 1},
+    {"VK_EXT_ycbcr_image_arrays", 1},
+    {"VK_KHR_uniform_buffer_standard_layout", 1},
+    {"VK_EXT_full_screen_exclusive", 4},
+    {"VK_KHR_buffer_device_address", 1},
+    {"VK_EXT_line_rasterization", 1},
+    {"VK_EXT_host_query_reset", 1},
+    {"VK_EXT_index_type_uint8", 1},
+    {"VK_KHR_pipeline_executable_properties", 1},
+    {"VK_EXT_shader_demote_to_helper_invocation", 1},
+    {"VK_EXT_texel_buffer_alignment", 1},
+    {"VK_GOOGLE_user_type", 1},
+};
+
+
+static VKAPI_ATTR VkResult VKAPI_CALL CreateInstance(
+    const VkInstanceCreateInfo*                 pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkInstance*                                 pInstance);
+
+static VKAPI_ATTR void VKAPI_CALL DestroyInstance(
+    VkInstance                                  instance,
+    const VkAllocationCallbacks*                pAllocator);
+
+static VKAPI_ATTR VkResult VKAPI_CALL EnumeratePhysicalDevices(
+    VkInstance                                  instance,
+    uint32_t*                                   pPhysicalDeviceCount,
+    VkPhysicalDevice*                           pPhysicalDevices);
+
+static VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceFeatures(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceFeatures*                   pFeatures);
+
+static VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceFormatProperties(
+    VkPhysicalDevice                            physicalDevice,
+    VkFormat                                    format,
+    VkFormatProperties*                         pFormatProperties);
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceImageFormatProperties(
+    VkPhysicalDevice                            physicalDevice,
+    VkFormat                                    format,
+    VkImageType                                 type,
+    VkImageTiling                               tiling,
+    VkImageUsageFlags                           usage,
+    VkImageCreateFlags                          flags,
+    VkImageFormatProperties*                    pImageFormatProperties);
+
+static VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceProperties(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceProperties*                 pProperties);
+
+static VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceQueueFamilyProperties(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pQueueFamilyPropertyCount,
+    VkQueueFamilyProperties*                    pQueueFamilyProperties);
+
+static VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceMemoryProperties(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceMemoryProperties*           pMemoryProperties);
+
+static VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL GetInstanceProcAddr(
+    VkInstance                                  instance,
+    const char*                                 pName);
+
+static VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL GetDeviceProcAddr(
+    VkDevice                                    device,
+    const char*                                 pName);
+
+static VKAPI_ATTR VkResult VKAPI_CALL CreateDevice(
+    VkPhysicalDevice                            physicalDevice,
+    const VkDeviceCreateInfo*                   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDevice*                                   pDevice);
+
+static VKAPI_ATTR void VKAPI_CALL DestroyDevice(
+    VkDevice                                    device,
+    const VkAllocationCallbacks*                pAllocator);
+
+static VKAPI_ATTR VkResult VKAPI_CALL EnumerateInstanceExtensionProperties(
+    const char*                                 pLayerName,
+    uint32_t*                                   pPropertyCount,
+    VkExtensionProperties*                      pProperties);
+
+static VKAPI_ATTR VkResult VKAPI_CALL EnumerateDeviceExtensionProperties(
+    VkPhysicalDevice                            physicalDevice,
+    const char*                                 pLayerName,
+    uint32_t*                                   pPropertyCount,
+    VkExtensionProperties*                      pProperties);
+
+static VKAPI_ATTR VkResult VKAPI_CALL EnumerateInstanceLayerProperties(
+    uint32_t*                                   pPropertyCount,
+    VkLayerProperties*                          pProperties);
+
+static VKAPI_ATTR VkResult VKAPI_CALL EnumerateDeviceLayerProperties(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pPropertyCount,
+    VkLayerProperties*                          pProperties);
+
+static VKAPI_ATTR void VKAPI_CALL GetDeviceQueue(
+    VkDevice                                    device,
+    uint32_t                                    queueFamilyIndex,
+    uint32_t                                    queueIndex,
+    VkQueue*                                    pQueue);
+
+static VKAPI_ATTR VkResult VKAPI_CALL QueueSubmit(
+    VkQueue                                     queue,
+    uint32_t                                    submitCount,
+    const VkSubmitInfo*                         pSubmits,
+    VkFence                                     fence);
+
+static VKAPI_ATTR VkResult VKAPI_CALL QueueWaitIdle(
+    VkQueue                                     queue);
+
+static VKAPI_ATTR VkResult VKAPI_CALL DeviceWaitIdle(
+    VkDevice                                    device);
+
+static VKAPI_ATTR VkResult VKAPI_CALL AllocateMemory(
+    VkDevice                                    device,
+    const VkMemoryAllocateInfo*                 pAllocateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDeviceMemory*                             pMemory);
+
+static VKAPI_ATTR void VKAPI_CALL FreeMemory(
+    VkDevice                                    device,
+    VkDeviceMemory                              memory,
+    const VkAllocationCallbacks*                pAllocator);
+
+static VKAPI_ATTR VkResult VKAPI_CALL MapMemory(
+    VkDevice                                    device,
+    VkDeviceMemory                              memory,
+    VkDeviceSize                                offset,
+    VkDeviceSize                                size,
+    VkMemoryMapFlags                            flags,
+    void**                                      ppData);
+
+static VKAPI_ATTR void VKAPI_CALL UnmapMemory(
+    VkDevice                                    device,
+    VkDeviceMemory                              memory);
+
+static VKAPI_ATTR VkResult VKAPI_CALL FlushMappedMemoryRanges(
+    VkDevice                                    device,
+    uint32_t                                    memoryRangeCount,
+    const VkMappedMemoryRange*                  pMemoryRanges);
+
+static VKAPI_ATTR VkResult VKAPI_CALL InvalidateMappedMemoryRanges(
+    VkDevice                                    device,
+    uint32_t                                    memoryRangeCount,
+    const VkMappedMemoryRange*                  pMemoryRanges);
+
+static VKAPI_ATTR void VKAPI_CALL GetDeviceMemoryCommitment(
+    VkDevice                                    device,
+    VkDeviceMemory                              memory,
+    VkDeviceSize*                               pCommittedMemoryInBytes);
+
+static VKAPI_ATTR VkResult VKAPI_CALL BindBufferMemory(
+    VkDevice                                    device,
+    VkBuffer                                    buffer,
+    VkDeviceMemory                              memory,
+    VkDeviceSize                                memoryOffset);
+
+static VKAPI_ATTR VkResult VKAPI_CALL BindImageMemory(
+    VkDevice                                    device,
+    VkImage                                     image,
+    VkDeviceMemory                              memory,
+    VkDeviceSize                                memoryOffset);
+
+static VKAPI_ATTR void VKAPI_CALL GetBufferMemoryRequirements(
+    VkDevice                                    device,
+    VkBuffer                                    buffer,
+    VkMemoryRequirements*                       pMemoryRequirements);
+
+static VKAPI_ATTR void VKAPI_CALL GetImageMemoryRequirements(
+    VkDevice                                    device,
+    VkImage                                     image,
+    VkMemoryRequirements*                       pMemoryRequirements);
+
+static VKAPI_ATTR void VKAPI_CALL GetImageSparseMemoryRequirements(
+    VkDevice                                    device,
+    VkImage                                     image,
+    uint32_t*                                   pSparseMemoryRequirementCount,
+    VkSparseImageMemoryRequirements*            pSparseMemoryRequirements);
+
+static VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceSparseImageFormatProperties(
+    VkPhysicalDevice                            physicalDevice,
+    VkFormat                                    format,
+    VkImageType                                 type,
+    VkSampleCountFlagBits                       samples,
+    VkImageUsageFlags                           usage,
+    VkImageTiling                               tiling,
+    uint32_t*                                   pPropertyCount,
+    VkSparseImageFormatProperties*              pProperties);
+
+static VKAPI_ATTR VkResult VKAPI_CALL QueueBindSparse(
+    VkQueue                                     queue,
+    uint32_t                                    bindInfoCount,
+    const VkBindSparseInfo*                     pBindInfo,
+    VkFence                                     fence);
+
+static VKAPI_ATTR VkResult VKAPI_CALL CreateFence(
+    VkDevice                                    device,
+    const VkFenceCreateInfo*                    pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkFence*                                    pFence);
+
+static VKAPI_ATTR void VKAPI_CALL DestroyFence(
+    VkDevice                                    device,
+    VkFence                                     fence,
+    const VkAllocationCallbacks*                pAllocator);
+
+static VKAPI_ATTR VkResult VKAPI_CALL ResetFences(
+    VkDevice                                    device,
+    uint32_t                                    fenceCount,
+    const VkFence*                              pFences);
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetFenceStatus(
+    VkDevice                                    device,
+    VkFence                                     fence);
+
+static VKAPI_ATTR VkResult VKAPI_CALL WaitForFences(
+    VkDevice                                    device,
+    uint32_t                                    fenceCount,
+    const VkFence*                              pFences,
+    VkBool32                                    waitAll,
+    uint64_t                                    timeout);
+
+static VKAPI_ATTR VkResult VKAPI_CALL CreateSemaphore(
+    VkDevice                                    device,
+    const VkSemaphoreCreateInfo*                pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSemaphore*                                pSemaphore);
+
+static VKAPI_ATTR void VKAPI_CALL DestroySemaphore(
+    VkDevice                                    device,
+    VkSemaphore                                 semaphore,
+    const VkAllocationCallbacks*                pAllocator);
+
+static VKAPI_ATTR VkResult VKAPI_CALL CreateEvent(
+    VkDevice                                    device,
+    const VkEventCreateInfo*                    pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkEvent*                                    pEvent);
+
+static VKAPI_ATTR void VKAPI_CALL DestroyEvent(
+    VkDevice                                    device,
+    VkEvent                                     event,
+    const VkAllocationCallbacks*                pAllocator);
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetEventStatus(
+    VkDevice                                    device,
+    VkEvent                                     event);
+
+static VKAPI_ATTR VkResult VKAPI_CALL SetEvent(
+    VkDevice                                    device,
+    VkEvent                                     event);
+
+static VKAPI_ATTR VkResult VKAPI_CALL ResetEvent(
+    VkDevice                                    device,
+    VkEvent                                     event);
+
+static VKAPI_ATTR VkResult VKAPI_CALL CreateQueryPool(
+    VkDevice                                    device,
+    const VkQueryPoolCreateInfo*                pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkQueryPool*                                pQueryPool);
+
+static VKAPI_ATTR void VKAPI_CALL DestroyQueryPool(
+    VkDevice                                    device,
+    VkQueryPool                                 queryPool,
+    const VkAllocationCallbacks*                pAllocator);
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetQueryPoolResults(
+    VkDevice                                    device,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    firstQuery,
+    uint32_t                                    queryCount,
+    size_t                                      dataSize,
+    void*                                       pData,
+    VkDeviceSize                                stride,
+    VkQueryResultFlags                          flags);
+
+static VKAPI_ATTR VkResult VKAPI_CALL CreateBuffer(
+    VkDevice                                    device,
+    const VkBufferCreateInfo*                   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkBuffer*                                   pBuffer);
+
+static VKAPI_ATTR void VKAPI_CALL DestroyBuffer(
+    VkDevice                                    device,
+    VkBuffer                                    buffer,
+    const VkAllocationCallbacks*                pAllocator);
+
+static VKAPI_ATTR VkResult VKAPI_CALL CreateBufferView(
+    VkDevice                                    device,
+    const VkBufferViewCreateInfo*               pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkBufferView*                               pView);
+
+static VKAPI_ATTR void VKAPI_CALL DestroyBufferView(
+    VkDevice                                    device,
+    VkBufferView                                bufferView,
+    const VkAllocationCallbacks*                pAllocator);
+
+static VKAPI_ATTR VkResult VKAPI_CALL CreateImage(
+    VkDevice                                    device,
+    const VkImageCreateInfo*                    pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkImage*                                    pImage);
+
+static VKAPI_ATTR void VKAPI_CALL DestroyImage(
+    VkDevice                                    device,
+    VkImage                                     image,
+    const VkAllocationCallbacks*                pAllocator);
+
+static VKAPI_ATTR void VKAPI_CALL GetImageSubresourceLayout(
+    VkDevice                                    device,
+    VkImage                                     image,
+    const VkImageSubresource*                   pSubresource,
+    VkSubresourceLayout*                        pLayout);
+
+static VKAPI_ATTR VkResult VKAPI_CALL CreateImageView(
+    VkDevice                                    device,
+    const VkImageViewCreateInfo*                pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkImageView*                                pView);
+
+static VKAPI_ATTR void VKAPI_CALL DestroyImageView(
+    VkDevice                                    device,
+    VkImageView                                 imageView,
+    const VkAllocationCallbacks*                pAllocator);
+
+static VKAPI_ATTR VkResult VKAPI_CALL CreateShaderModule(
+    VkDevice                                    device,
+    const VkShaderModuleCreateInfo*             pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkShaderModule*                             pShaderModule);
+
+static VKAPI_ATTR void VKAPI_CALL DestroyShaderModule(
+    VkDevice                                    device,
+    VkShaderModule                              shaderModule,
+    const VkAllocationCallbacks*                pAllocator);
+
+static VKAPI_ATTR VkResult VKAPI_CALL CreatePipelineCache(
+    VkDevice                                    device,
+    const VkPipelineCacheCreateInfo*            pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkPipelineCache*                            pPipelineCache);
+
+static VKAPI_ATTR void VKAPI_CALL DestroyPipelineCache(
+    VkDevice                                    device,
+    VkPipelineCache                             pipelineCache,
+    const VkAllocationCallbacks*                pAllocator);
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetPipelineCacheData(
+    VkDevice                                    device,
+    VkPipelineCache                             pipelineCache,
+    size_t*                                     pDataSize,
+    void*                                       pData);
+
+static VKAPI_ATTR VkResult VKAPI_CALL MergePipelineCaches(
+    VkDevice                                    device,
+    VkPipelineCache                             dstCache,
+    uint32_t                                    srcCacheCount,
+    const VkPipelineCache*                      pSrcCaches);
+
+static VKAPI_ATTR VkResult VKAPI_CALL CreateGraphicsPipelines(
+    VkDevice                                    device,
+    VkPipelineCache                             pipelineCache,
+    uint32_t                                    createInfoCount,
+    const VkGraphicsPipelineCreateInfo*         pCreateInfos,
+    const VkAllocationCallbacks*                pAllocator,
+    VkPipeline*                                 pPipelines);
+
+static VKAPI_ATTR VkResult VKAPI_CALL CreateComputePipelines(
+    VkDevice                                    device,
+    VkPipelineCache                             pipelineCache,
+    uint32_t                                    createInfoCount,
+    const VkComputePipelineCreateInfo*          pCreateInfos,
+    const VkAllocationCallbacks*                pAllocator,
+    VkPipeline*                                 pPipelines);
+
+static VKAPI_ATTR void VKAPI_CALL DestroyPipeline(
+    VkDevice                                    device,
+    VkPipeline                                  pipeline,
+    const VkAllocationCallbacks*                pAllocator);
+
+static VKAPI_ATTR VkResult VKAPI_CALL CreatePipelineLayout(
+    VkDevice                                    device,
+    const VkPipelineLayoutCreateInfo*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkPipelineLayout*                           pPipelineLayout);
+
+static VKAPI_ATTR void VKAPI_CALL DestroyPipelineLayout(
+    VkDevice                                    device,
+    VkPipelineLayout                            pipelineLayout,
+    const VkAllocationCallbacks*                pAllocator);
+
+static VKAPI_ATTR VkResult VKAPI_CALL CreateSampler(
+    VkDevice                                    device,
+    const VkSamplerCreateInfo*                  pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSampler*                                  pSampler);
+
+static VKAPI_ATTR void VKAPI_CALL DestroySampler(
+    VkDevice                                    device,
+    VkSampler                                   sampler,
+    const VkAllocationCallbacks*                pAllocator);
+
+static VKAPI_ATTR VkResult VKAPI_CALL CreateDescriptorSetLayout(
+    VkDevice                                    device,
+    const VkDescriptorSetLayoutCreateInfo*      pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDescriptorSetLayout*                      pSetLayout);
+
+static VKAPI_ATTR void VKAPI_CALL DestroyDescriptorSetLayout(
+    VkDevice                                    device,
+    VkDescriptorSetLayout                       descriptorSetLayout,
+    const VkAllocationCallbacks*                pAllocator);
+
+static VKAPI_ATTR VkResult VKAPI_CALL CreateDescriptorPool(
+    VkDevice                                    device,
+    const VkDescriptorPoolCreateInfo*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDescriptorPool*                           pDescriptorPool);
+
+static VKAPI_ATTR void VKAPI_CALL DestroyDescriptorPool(
+    VkDevice                                    device,
+    VkDescriptorPool                            descriptorPool,
+    const VkAllocationCallbacks*                pAllocator);
+
+static VKAPI_ATTR VkResult VKAPI_CALL ResetDescriptorPool(
+    VkDevice                                    device,
+    VkDescriptorPool                            descriptorPool,
+    VkDescriptorPoolResetFlags                  flags);
+
+static VKAPI_ATTR VkResult VKAPI_CALL AllocateDescriptorSets(
+    VkDevice                                    device,
+    const VkDescriptorSetAllocateInfo*          pAllocateInfo,
+    VkDescriptorSet*                            pDescriptorSets);
+
+static VKAPI_ATTR VkResult VKAPI_CALL FreeDescriptorSets(
+    VkDevice                                    device,
+    VkDescriptorPool                            descriptorPool,
+    uint32_t                                    descriptorSetCount,
+    const VkDescriptorSet*                      pDescriptorSets);
+
+static VKAPI_ATTR void VKAPI_CALL UpdateDescriptorSets(
+    VkDevice                                    device,
+    uint32_t                                    descriptorWriteCount,
+    const VkWriteDescriptorSet*                 pDescriptorWrites,
+    uint32_t                                    descriptorCopyCount,
+    const VkCopyDescriptorSet*                  pDescriptorCopies);
+
+static VKAPI_ATTR VkResult VKAPI_CALL CreateFramebuffer(
+    VkDevice                                    device,
+    const VkFramebufferCreateInfo*              pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkFramebuffer*                              pFramebuffer);
+
+static VKAPI_ATTR void VKAPI_CALL DestroyFramebuffer(
+    VkDevice                                    device,
+    VkFramebuffer                               framebuffer,
+    const VkAllocationCallbacks*                pAllocator);
+
+static VKAPI_ATTR VkResult VKAPI_CALL CreateRenderPass(
+    VkDevice                                    device,
+    const VkRenderPassCreateInfo*               pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkRenderPass*                               pRenderPass);
+
+static VKAPI_ATTR void VKAPI_CALL DestroyRenderPass(
+    VkDevice                                    device,
+    VkRenderPass                                renderPass,
+    const VkAllocationCallbacks*                pAllocator);
+
+static VKAPI_ATTR void VKAPI_CALL GetRenderAreaGranularity(
+    VkDevice                                    device,
+    VkRenderPass                                renderPass,
+    VkExtent2D*                                 pGranularity);
+
+static VKAPI_ATTR VkResult VKAPI_CALL CreateCommandPool(
+    VkDevice                                    device,
+    const VkCommandPoolCreateInfo*              pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkCommandPool*                              pCommandPool);
+
+static VKAPI_ATTR void VKAPI_CALL DestroyCommandPool(
+    VkDevice                                    device,
+    VkCommandPool                               commandPool,
+    const VkAllocationCallbacks*                pAllocator);
+
+static VKAPI_ATTR VkResult VKAPI_CALL ResetCommandPool(
+    VkDevice                                    device,
+    VkCommandPool                               commandPool,
+    VkCommandPoolResetFlags                     flags);
+
+static VKAPI_ATTR VkResult VKAPI_CALL AllocateCommandBuffers(
+    VkDevice                                    device,
+    const VkCommandBufferAllocateInfo*          pAllocateInfo,
+    VkCommandBuffer*                            pCommandBuffers);
+
+static VKAPI_ATTR void VKAPI_CALL FreeCommandBuffers(
+    VkDevice                                    device,
+    VkCommandPool                               commandPool,
+    uint32_t                                    commandBufferCount,
+    const VkCommandBuffer*                      pCommandBuffers);
+
+static VKAPI_ATTR VkResult VKAPI_CALL BeginCommandBuffer(
+    VkCommandBuffer                             commandBuffer,
+    const VkCommandBufferBeginInfo*             pBeginInfo);
+
+static VKAPI_ATTR VkResult VKAPI_CALL EndCommandBuffer(
+    VkCommandBuffer                             commandBuffer);
+
+static VKAPI_ATTR VkResult VKAPI_CALL ResetCommandBuffer(
+    VkCommandBuffer                             commandBuffer,
+    VkCommandBufferResetFlags                   flags);
+
+static VKAPI_ATTR void VKAPI_CALL CmdBindPipeline(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineBindPoint                         pipelineBindPoint,
+    VkPipeline                                  pipeline);
+
+static VKAPI_ATTR void VKAPI_CALL CmdSetViewport(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstViewport,
+    uint32_t                                    viewportCount,
+    const VkViewport*                           pViewports);
+
+static VKAPI_ATTR void VKAPI_CALL CmdSetScissor(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstScissor,
+    uint32_t                                    scissorCount,
+    const VkRect2D*                             pScissors);
+
+static VKAPI_ATTR void VKAPI_CALL CmdSetLineWidth(
+    VkCommandBuffer                             commandBuffer,
+    float                                       lineWidth);
+
+static VKAPI_ATTR void VKAPI_CALL CmdSetDepthBias(
+    VkCommandBuffer                             commandBuffer,
+    float                                       depthBiasConstantFactor,
+    float                                       depthBiasClamp,
+    float                                       depthBiasSlopeFactor);
+
+static VKAPI_ATTR void VKAPI_CALL CmdSetBlendConstants(
+    VkCommandBuffer                             commandBuffer,
+    const float                                 blendConstants[4]);
+
+static VKAPI_ATTR void VKAPI_CALL CmdSetDepthBounds(
+    VkCommandBuffer                             commandBuffer,
+    float                                       minDepthBounds,
+    float                                       maxDepthBounds);
+
+static VKAPI_ATTR void VKAPI_CALL CmdSetStencilCompareMask(
+    VkCommandBuffer                             commandBuffer,
+    VkStencilFaceFlags                          faceMask,
+    uint32_t                                    compareMask);
+
+static VKAPI_ATTR void VKAPI_CALL CmdSetStencilWriteMask(
+    VkCommandBuffer                             commandBuffer,
+    VkStencilFaceFlags                          faceMask,
+    uint32_t                                    writeMask);
+
+static VKAPI_ATTR void VKAPI_CALL CmdSetStencilReference(
+    VkCommandBuffer                             commandBuffer,
+    VkStencilFaceFlags                          faceMask,
+    uint32_t                                    reference);
+
+static VKAPI_ATTR void VKAPI_CALL CmdBindDescriptorSets(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineBindPoint                         pipelineBindPoint,
+    VkPipelineLayout                            layout,
+    uint32_t                                    firstSet,
+    uint32_t                                    descriptorSetCount,
+    const VkDescriptorSet*                      pDescriptorSets,
+    uint32_t                                    dynamicOffsetCount,
+    const uint32_t*                             pDynamicOffsets);
+
+static VKAPI_ATTR void VKAPI_CALL CmdBindIndexBuffer(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkIndexType                                 indexType);
+
+static VKAPI_ATTR void VKAPI_CALL CmdBindVertexBuffers(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstBinding,
+    uint32_t                                    bindingCount,
+    const VkBuffer*                             pBuffers,
+    const VkDeviceSize*                         pOffsets);
+
+static VKAPI_ATTR void VKAPI_CALL CmdDraw(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    vertexCount,
+    uint32_t                                    instanceCount,
+    uint32_t                                    firstVertex,
+    uint32_t                                    firstInstance);
+
+static VKAPI_ATTR void VKAPI_CALL CmdDrawIndexed(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    indexCount,
+    uint32_t                                    instanceCount,
+    uint32_t                                    firstIndex,
+    int32_t                                     vertexOffset,
+    uint32_t                                    firstInstance);
+
+static VKAPI_ATTR void VKAPI_CALL CmdDrawIndirect(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    uint32_t                                    drawCount,
+    uint32_t                                    stride);
+
+static VKAPI_ATTR void VKAPI_CALL CmdDrawIndexedIndirect(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    uint32_t                                    drawCount,
+    uint32_t                                    stride);
+
+static VKAPI_ATTR void VKAPI_CALL CmdDispatch(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    groupCountX,
+    uint32_t                                    groupCountY,
+    uint32_t                                    groupCountZ);
+
+static VKAPI_ATTR void VKAPI_CALL CmdDispatchIndirect(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset);
+
+static VKAPI_ATTR void VKAPI_CALL CmdCopyBuffer(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    srcBuffer,
+    VkBuffer                                    dstBuffer,
+    uint32_t                                    regionCount,
+    const VkBufferCopy*                         pRegions);
+
+static VKAPI_ATTR void VKAPI_CALL CmdCopyImage(
+    VkCommandBuffer                             commandBuffer,
+    VkImage                                     srcImage,
+    VkImageLayout                               srcImageLayout,
+    VkImage                                     dstImage,
+    VkImageLayout                               dstImageLayout,
+    uint32_t                                    regionCount,
+    const VkImageCopy*                          pRegions);
+
+static VKAPI_ATTR void VKAPI_CALL CmdBlitImage(
+    VkCommandBuffer                             commandBuffer,
+    VkImage                                     srcImage,
+    VkImageLayout                               srcImageLayout,
+    VkImage                                     dstImage,
+    VkImageLayout                               dstImageLayout,
+    uint32_t                                    regionCount,
+    const VkImageBlit*                          pRegions,
+    VkFilter                                    filter);
+
+static VKAPI_ATTR void VKAPI_CALL CmdCopyBufferToImage(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    srcBuffer,
+    VkImage                                     dstImage,
+    VkImageLayout                               dstImageLayout,
+    uint32_t                                    regionCount,
+    const VkBufferImageCopy*                    pRegions);
+
+static VKAPI_ATTR void VKAPI_CALL CmdCopyImageToBuffer(
+    VkCommandBuffer                             commandBuffer,
+    VkImage                                     srcImage,
+    VkImageLayout                               srcImageLayout,
+    VkBuffer                                    dstBuffer,
+    uint32_t                                    regionCount,
+    const VkBufferImageCopy*                    pRegions);
+
+static VKAPI_ATTR void VKAPI_CALL CmdUpdateBuffer(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    dstBuffer,
+    VkDeviceSize                                dstOffset,
+    VkDeviceSize                                dataSize,
+    const void*                                 pData);
+
+static VKAPI_ATTR void VKAPI_CALL CmdFillBuffer(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    dstBuffer,
+    VkDeviceSize                                dstOffset,
+    VkDeviceSize                                size,
+    uint32_t                                    data);
+
+static VKAPI_ATTR void VKAPI_CALL CmdClearColorImage(
+    VkCommandBuffer                             commandBuffer,
+    VkImage                                     image,
+    VkImageLayout                               imageLayout,
+    const VkClearColorValue*                    pColor,
+    uint32_t                                    rangeCount,
+    const VkImageSubresourceRange*              pRanges);
+
+static VKAPI_ATTR void VKAPI_CALL CmdClearDepthStencilImage(
+    VkCommandBuffer                             commandBuffer,
+    VkImage                                     image,
+    VkImageLayout                               imageLayout,
+    const VkClearDepthStencilValue*             pDepthStencil,
+    uint32_t                                    rangeCount,
+    const VkImageSubresourceRange*              pRanges);
+
+static VKAPI_ATTR void VKAPI_CALL CmdClearAttachments(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    attachmentCount,
+    const VkClearAttachment*                    pAttachments,
+    uint32_t                                    rectCount,
+    const VkClearRect*                          pRects);
+
+static VKAPI_ATTR void VKAPI_CALL CmdResolveImage(
+    VkCommandBuffer                             commandBuffer,
+    VkImage                                     srcImage,
+    VkImageLayout                               srcImageLayout,
+    VkImage                                     dstImage,
+    VkImageLayout                               dstImageLayout,
+    uint32_t                                    regionCount,
+    const VkImageResolve*                       pRegions);
+
+static VKAPI_ATTR void VKAPI_CALL CmdSetEvent(
+    VkCommandBuffer                             commandBuffer,
+    VkEvent                                     event,
+    VkPipelineStageFlags                        stageMask);
+
+static VKAPI_ATTR void VKAPI_CALL CmdResetEvent(
+    VkCommandBuffer                             commandBuffer,
+    VkEvent                                     event,
+    VkPipelineStageFlags                        stageMask);
+
+static VKAPI_ATTR void VKAPI_CALL CmdWaitEvents(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    eventCount,
+    const VkEvent*                              pEvents,
+    VkPipelineStageFlags                        srcStageMask,
+    VkPipelineStageFlags                        dstStageMask,
+    uint32_t                                    memoryBarrierCount,
+    const VkMemoryBarrier*                      pMemoryBarriers,
+    uint32_t                                    bufferMemoryBarrierCount,
+    const VkBufferMemoryBarrier*                pBufferMemoryBarriers,
+    uint32_t                                    imageMemoryBarrierCount,
+    const VkImageMemoryBarrier*                 pImageMemoryBarriers);
+
+static VKAPI_ATTR void VKAPI_CALL CmdPipelineBarrier(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineStageFlags                        srcStageMask,
+    VkPipelineStageFlags                        dstStageMask,
+    VkDependencyFlags                           dependencyFlags,
+    uint32_t                                    memoryBarrierCount,
+    const VkMemoryBarrier*                      pMemoryBarriers,
+    uint32_t                                    bufferMemoryBarrierCount,
+    const VkBufferMemoryBarrier*                pBufferMemoryBarriers,
+    uint32_t                                    imageMemoryBarrierCount,
+    const VkImageMemoryBarrier*                 pImageMemoryBarriers);
+
+static VKAPI_ATTR void VKAPI_CALL CmdBeginQuery(
+    VkCommandBuffer                             commandBuffer,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    query,
+    VkQueryControlFlags                         flags);
+
+static VKAPI_ATTR void VKAPI_CALL CmdEndQuery(
+    VkCommandBuffer                             commandBuffer,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    query);
+
+static VKAPI_ATTR void VKAPI_CALL CmdResetQueryPool(
+    VkCommandBuffer                             commandBuffer,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    firstQuery,
+    uint32_t                                    queryCount);
+
+static VKAPI_ATTR void VKAPI_CALL CmdWriteTimestamp(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineStageFlagBits                     pipelineStage,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    query);
+
+static VKAPI_ATTR void VKAPI_CALL CmdCopyQueryPoolResults(
+    VkCommandBuffer                             commandBuffer,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    firstQuery,
+    uint32_t                                    queryCount,
+    VkBuffer                                    dstBuffer,
+    VkDeviceSize                                dstOffset,
+    VkDeviceSize                                stride,
+    VkQueryResultFlags                          flags);
+
+static VKAPI_ATTR void VKAPI_CALL CmdPushConstants(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineLayout                            layout,
+    VkShaderStageFlags                          stageFlags,
+    uint32_t                                    offset,
+    uint32_t                                    size,
+    const void*                                 pValues);
+
+static VKAPI_ATTR void VKAPI_CALL CmdBeginRenderPass(
+    VkCommandBuffer                             commandBuffer,
+    const VkRenderPassBeginInfo*                pRenderPassBegin,
+    VkSubpassContents                           contents);
+
+static VKAPI_ATTR void VKAPI_CALL CmdNextSubpass(
+    VkCommandBuffer                             commandBuffer,
+    VkSubpassContents                           contents);
+
+static VKAPI_ATTR void VKAPI_CALL CmdEndRenderPass(
+    VkCommandBuffer                             commandBuffer);
+
+static VKAPI_ATTR void VKAPI_CALL CmdExecuteCommands(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    commandBufferCount,
+    const VkCommandBuffer*                      pCommandBuffers);
+
+
+static VKAPI_ATTR VkResult VKAPI_CALL EnumerateInstanceVersion(
+    uint32_t*                                   pApiVersion);
+
+static VKAPI_ATTR VkResult VKAPI_CALL BindBufferMemory2(
+    VkDevice                                    device,
+    uint32_t                                    bindInfoCount,
+    const VkBindBufferMemoryInfo*               pBindInfos);
+
+static VKAPI_ATTR VkResult VKAPI_CALL BindImageMemory2(
+    VkDevice                                    device,
+    uint32_t                                    bindInfoCount,
+    const VkBindImageMemoryInfo*                pBindInfos);
+
+static VKAPI_ATTR void VKAPI_CALL GetDeviceGroupPeerMemoryFeatures(
+    VkDevice                                    device,
+    uint32_t                                    heapIndex,
+    uint32_t                                    localDeviceIndex,
+    uint32_t                                    remoteDeviceIndex,
+    VkPeerMemoryFeatureFlags*                   pPeerMemoryFeatures);
+
+static VKAPI_ATTR void VKAPI_CALL CmdSetDeviceMask(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    deviceMask);
+
+static VKAPI_ATTR void VKAPI_CALL CmdDispatchBase(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    baseGroupX,
+    uint32_t                                    baseGroupY,
+    uint32_t                                    baseGroupZ,
+    uint32_t                                    groupCountX,
+    uint32_t                                    groupCountY,
+    uint32_t                                    groupCountZ);
+
+static VKAPI_ATTR VkResult VKAPI_CALL EnumeratePhysicalDeviceGroups(
+    VkInstance                                  instance,
+    uint32_t*                                   pPhysicalDeviceGroupCount,
+    VkPhysicalDeviceGroupProperties*            pPhysicalDeviceGroupProperties);
+
+static VKAPI_ATTR void VKAPI_CALL GetImageMemoryRequirements2(
+    VkDevice                                    device,
+    const VkImageMemoryRequirementsInfo2*       pInfo,
+    VkMemoryRequirements2*                      pMemoryRequirements);
+
+static VKAPI_ATTR void VKAPI_CALL GetBufferMemoryRequirements2(
+    VkDevice                                    device,
+    const VkBufferMemoryRequirementsInfo2*      pInfo,
+    VkMemoryRequirements2*                      pMemoryRequirements);
+
+static VKAPI_ATTR void VKAPI_CALL GetImageSparseMemoryRequirements2(
+    VkDevice                                    device,
+    const VkImageSparseMemoryRequirementsInfo2* pInfo,
+    uint32_t*                                   pSparseMemoryRequirementCount,
+    VkSparseImageMemoryRequirements2*           pSparseMemoryRequirements);
+
+static VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceFeatures2(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceFeatures2*                  pFeatures);
+
+static VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceProperties2(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceProperties2*                pProperties);
+
+static VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceFormatProperties2(
+    VkPhysicalDevice                            physicalDevice,
+    VkFormat                                    format,
+    VkFormatProperties2*                        pFormatProperties);
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceImageFormatProperties2(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceImageFormatInfo2*     pImageFormatInfo,
+    VkImageFormatProperties2*                   pImageFormatProperties);
+
+static VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceQueueFamilyProperties2(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pQueueFamilyPropertyCount,
+    VkQueueFamilyProperties2*                   pQueueFamilyProperties);
+
+static VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceMemoryProperties2(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceMemoryProperties2*          pMemoryProperties);
+
+static VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceSparseImageFormatProperties2(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo,
+    uint32_t*                                   pPropertyCount,
+    VkSparseImageFormatProperties2*             pProperties);
+
+static VKAPI_ATTR void VKAPI_CALL TrimCommandPool(
+    VkDevice                                    device,
+    VkCommandPool                               commandPool,
+    VkCommandPoolTrimFlags                      flags);
+
+static VKAPI_ATTR void VKAPI_CALL GetDeviceQueue2(
+    VkDevice                                    device,
+    const VkDeviceQueueInfo2*                   pQueueInfo,
+    VkQueue*                                    pQueue);
+
+static VKAPI_ATTR VkResult VKAPI_CALL CreateSamplerYcbcrConversion(
+    VkDevice                                    device,
+    const VkSamplerYcbcrConversionCreateInfo*   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSamplerYcbcrConversion*                   pYcbcrConversion);
+
+static VKAPI_ATTR void VKAPI_CALL DestroySamplerYcbcrConversion(
+    VkDevice                                    device,
+    VkSamplerYcbcrConversion                    ycbcrConversion,
+    const VkAllocationCallbacks*                pAllocator);
+
+static VKAPI_ATTR VkResult VKAPI_CALL CreateDescriptorUpdateTemplate(
+    VkDevice                                    device,
+    const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDescriptorUpdateTemplate*                 pDescriptorUpdateTemplate);
+
+static VKAPI_ATTR void VKAPI_CALL DestroyDescriptorUpdateTemplate(
+    VkDevice                                    device,
+    VkDescriptorUpdateTemplate                  descriptorUpdateTemplate,
+    const VkAllocationCallbacks*                pAllocator);
+
+static VKAPI_ATTR void VKAPI_CALL UpdateDescriptorSetWithTemplate(
+    VkDevice                                    device,
+    VkDescriptorSet                             descriptorSet,
+    VkDescriptorUpdateTemplate                  descriptorUpdateTemplate,
+    const void*                                 pData);
+
+static VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceExternalBufferProperties(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceExternalBufferInfo*   pExternalBufferInfo,
+    VkExternalBufferProperties*                 pExternalBufferProperties);
+
+static VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceExternalFenceProperties(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceExternalFenceInfo*    pExternalFenceInfo,
+    VkExternalFenceProperties*                  pExternalFenceProperties);
+
+static VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceExternalSemaphoreProperties(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo,
+    VkExternalSemaphoreProperties*              pExternalSemaphoreProperties);
+
+static VKAPI_ATTR void VKAPI_CALL GetDescriptorSetLayoutSupport(
+    VkDevice                                    device,
+    const VkDescriptorSetLayoutCreateInfo*      pCreateInfo,
+    VkDescriptorSetLayoutSupport*               pSupport);
+
+
+static VKAPI_ATTR void VKAPI_CALL DestroySurfaceKHR(
+    VkInstance                                  instance,
+    VkSurfaceKHR                                surface,
+    const VkAllocationCallbacks*                pAllocator);
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceSurfaceSupportKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t                                    queueFamilyIndex,
+    VkSurfaceKHR                                surface,
+    VkBool32*                                   pSupported);
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceSurfaceCapabilitiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkSurfaceKHR                                surface,
+    VkSurfaceCapabilitiesKHR*                   pSurfaceCapabilities);
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceSurfaceFormatsKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkSurfaceKHR                                surface,
+    uint32_t*                                   pSurfaceFormatCount,
+    VkSurfaceFormatKHR*                         pSurfaceFormats);
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceSurfacePresentModesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkSurfaceKHR                                surface,
+    uint32_t*                                   pPresentModeCount,
+    VkPresentModeKHR*                           pPresentModes);
+
+
+static VKAPI_ATTR VkResult VKAPI_CALL CreateSwapchainKHR(
+    VkDevice                                    device,
+    const VkSwapchainCreateInfoKHR*             pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSwapchainKHR*                             pSwapchain);
+
+static VKAPI_ATTR void VKAPI_CALL DestroySwapchainKHR(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    const VkAllocationCallbacks*                pAllocator);
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetSwapchainImagesKHR(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    uint32_t*                                   pSwapchainImageCount,
+    VkImage*                                    pSwapchainImages);
+
+static VKAPI_ATTR VkResult VKAPI_CALL AcquireNextImageKHR(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    uint64_t                                    timeout,
+    VkSemaphore                                 semaphore,
+    VkFence                                     fence,
+    uint32_t*                                   pImageIndex);
+
+static VKAPI_ATTR VkResult VKAPI_CALL QueuePresentKHR(
+    VkQueue                                     queue,
+    const VkPresentInfoKHR*                     pPresentInfo);
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetDeviceGroupPresentCapabilitiesKHR(
+    VkDevice                                    device,
+    VkDeviceGroupPresentCapabilitiesKHR*        pDeviceGroupPresentCapabilities);
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetDeviceGroupSurfacePresentModesKHR(
+    VkDevice                                    device,
+    VkSurfaceKHR                                surface,
+    VkDeviceGroupPresentModeFlagsKHR*           pModes);
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDevicePresentRectanglesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkSurfaceKHR                                surface,
+    uint32_t*                                   pRectCount,
+    VkRect2D*                                   pRects);
+
+static VKAPI_ATTR VkResult VKAPI_CALL AcquireNextImage2KHR(
+    VkDevice                                    device,
+    const VkAcquireNextImageInfoKHR*            pAcquireInfo,
+    uint32_t*                                   pImageIndex);
+
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceDisplayPropertiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pPropertyCount,
+    VkDisplayPropertiesKHR*                     pProperties);
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceDisplayPlanePropertiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pPropertyCount,
+    VkDisplayPlanePropertiesKHR*                pProperties);
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetDisplayPlaneSupportedDisplaysKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t                                    planeIndex,
+    uint32_t*                                   pDisplayCount,
+    VkDisplayKHR*                               pDisplays);
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetDisplayModePropertiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkDisplayKHR                                display,
+    uint32_t*                                   pPropertyCount,
+    VkDisplayModePropertiesKHR*                 pProperties);
+
+static VKAPI_ATTR VkResult VKAPI_CALL CreateDisplayModeKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkDisplayKHR                                display,
+    const VkDisplayModeCreateInfoKHR*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDisplayModeKHR*                           pMode);
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetDisplayPlaneCapabilitiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkDisplayModeKHR                            mode,
+    uint32_t                                    planeIndex,
+    VkDisplayPlaneCapabilitiesKHR*              pCapabilities);
+
+static VKAPI_ATTR VkResult VKAPI_CALL CreateDisplayPlaneSurfaceKHR(
+    VkInstance                                  instance,
+    const VkDisplaySurfaceCreateInfoKHR*        pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface);
+
+
+static VKAPI_ATTR VkResult VKAPI_CALL CreateSharedSwapchainsKHR(
+    VkDevice                                    device,
+    uint32_t                                    swapchainCount,
+    const VkSwapchainCreateInfoKHR*             pCreateInfos,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSwapchainKHR*                             pSwapchains);
+
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+
+static VKAPI_ATTR VkResult VKAPI_CALL CreateXlibSurfaceKHR(
+    VkInstance                                  instance,
+    const VkXlibSurfaceCreateInfoKHR*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface);
+
+static VKAPI_ATTR VkBool32 VKAPI_CALL GetPhysicalDeviceXlibPresentationSupportKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t                                    queueFamilyIndex,
+    Display*                                    dpy,
+    VisualID                                    visualID);
+#endif /* VK_USE_PLATFORM_XLIB_KHR */
+
+#ifdef VK_USE_PLATFORM_XCB_KHR
+
+static VKAPI_ATTR VkResult VKAPI_CALL CreateXcbSurfaceKHR(
+    VkInstance                                  instance,
+    const VkXcbSurfaceCreateInfoKHR*            pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface);
+
+static VKAPI_ATTR VkBool32 VKAPI_CALL GetPhysicalDeviceXcbPresentationSupportKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t                                    queueFamilyIndex,
+    xcb_connection_t*                           connection,
+    xcb_visualid_t                              visual_id);
+#endif /* VK_USE_PLATFORM_XCB_KHR */
+
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+
+static VKAPI_ATTR VkResult VKAPI_CALL CreateWaylandSurfaceKHR(
+    VkInstance                                  instance,
+    const VkWaylandSurfaceCreateInfoKHR*        pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface);
+
+static VKAPI_ATTR VkBool32 VKAPI_CALL GetPhysicalDeviceWaylandPresentationSupportKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t                                    queueFamilyIndex,
+    struct wl_display*                          display);
+#endif /* VK_USE_PLATFORM_WAYLAND_KHR */
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+
+static VKAPI_ATTR VkResult VKAPI_CALL CreateAndroidSurfaceKHR(
+    VkInstance                                  instance,
+    const VkAndroidSurfaceCreateInfoKHR*        pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface);
+#endif /* VK_USE_PLATFORM_ANDROID_KHR */
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+static VKAPI_ATTR VkResult VKAPI_CALL CreateWin32SurfaceKHR(
+    VkInstance                                  instance,
+    const VkWin32SurfaceCreateInfoKHR*          pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface);
+
+static VKAPI_ATTR VkBool32 VKAPI_CALL GetPhysicalDeviceWin32PresentationSupportKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t                                    queueFamilyIndex);
+#endif /* VK_USE_PLATFORM_WIN32_KHR */
+
+
+
+
+static VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceFeatures2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceFeatures2*                  pFeatures);
+
+static VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceProperties2*                pProperties);
+
+static VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceFormatProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkFormat                                    format,
+    VkFormatProperties2*                        pFormatProperties);
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceImageFormatProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceImageFormatInfo2*     pImageFormatInfo,
+    VkImageFormatProperties2*                   pImageFormatProperties);
+
+static VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceQueueFamilyProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pQueueFamilyPropertyCount,
+    VkQueueFamilyProperties2*                   pQueueFamilyProperties);
+
+static VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceMemoryProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceMemoryProperties2*          pMemoryProperties);
+
+static VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceSparseImageFormatProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo,
+    uint32_t*                                   pPropertyCount,
+    VkSparseImageFormatProperties2*             pProperties);
+
+
+static VKAPI_ATTR void VKAPI_CALL GetDeviceGroupPeerMemoryFeaturesKHR(
+    VkDevice                                    device,
+    uint32_t                                    heapIndex,
+    uint32_t                                    localDeviceIndex,
+    uint32_t                                    remoteDeviceIndex,
+    VkPeerMemoryFeatureFlags*                   pPeerMemoryFeatures);
+
+static VKAPI_ATTR void VKAPI_CALL CmdSetDeviceMaskKHR(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    deviceMask);
+
+static VKAPI_ATTR void VKAPI_CALL CmdDispatchBaseKHR(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    baseGroupX,
+    uint32_t                                    baseGroupY,
+    uint32_t                                    baseGroupZ,
+    uint32_t                                    groupCountX,
+    uint32_t                                    groupCountY,
+    uint32_t                                    groupCountZ);
+
+
+
+static VKAPI_ATTR void VKAPI_CALL TrimCommandPoolKHR(
+    VkDevice                                    device,
+    VkCommandPool                               commandPool,
+    VkCommandPoolTrimFlags                      flags);
+
+
+static VKAPI_ATTR VkResult VKAPI_CALL EnumeratePhysicalDeviceGroupsKHR(
+    VkInstance                                  instance,
+    uint32_t*                                   pPhysicalDeviceGroupCount,
+    VkPhysicalDeviceGroupProperties*            pPhysicalDeviceGroupProperties);
+
+
+static VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceExternalBufferPropertiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceExternalBufferInfo*   pExternalBufferInfo,
+    VkExternalBufferProperties*                 pExternalBufferProperties);
+
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetMemoryWin32HandleKHR(
+    VkDevice                                    device,
+    const VkMemoryGetWin32HandleInfoKHR*        pGetWin32HandleInfo,
+    HANDLE*                                     pHandle);
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetMemoryWin32HandlePropertiesKHR(
+    VkDevice                                    device,
+    VkExternalMemoryHandleTypeFlagBits          handleType,
+    HANDLE                                      handle,
+    VkMemoryWin32HandlePropertiesKHR*           pMemoryWin32HandleProperties);
+#endif /* VK_USE_PLATFORM_WIN32_KHR */
+
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetMemoryFdKHR(
+    VkDevice                                    device,
+    const VkMemoryGetFdInfoKHR*                 pGetFdInfo,
+    int*                                        pFd);
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetMemoryFdPropertiesKHR(
+    VkDevice                                    device,
+    VkExternalMemoryHandleTypeFlagBits          handleType,
+    int                                         fd,
+    VkMemoryFdPropertiesKHR*                    pMemoryFdProperties);
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+#endif /* VK_USE_PLATFORM_WIN32_KHR */
+
+
+static VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceExternalSemaphorePropertiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo,
+    VkExternalSemaphoreProperties*              pExternalSemaphoreProperties);
+
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+static VKAPI_ATTR VkResult VKAPI_CALL ImportSemaphoreWin32HandleKHR(
+    VkDevice                                    device,
+    const VkImportSemaphoreWin32HandleInfoKHR*  pImportSemaphoreWin32HandleInfo);
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetSemaphoreWin32HandleKHR(
+    VkDevice                                    device,
+    const VkSemaphoreGetWin32HandleInfoKHR*     pGetWin32HandleInfo,
+    HANDLE*                                     pHandle);
+#endif /* VK_USE_PLATFORM_WIN32_KHR */
+
+
+static VKAPI_ATTR VkResult VKAPI_CALL ImportSemaphoreFdKHR(
+    VkDevice                                    device,
+    const VkImportSemaphoreFdInfoKHR*           pImportSemaphoreFdInfo);
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetSemaphoreFdKHR(
+    VkDevice                                    device,
+    const VkSemaphoreGetFdInfoKHR*              pGetFdInfo,
+    int*                                        pFd);
+
+
+static VKAPI_ATTR void VKAPI_CALL CmdPushDescriptorSetKHR(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineBindPoint                         pipelineBindPoint,
+    VkPipelineLayout                            layout,
+    uint32_t                                    set,
+    uint32_t                                    descriptorWriteCount,
+    const VkWriteDescriptorSet*                 pDescriptorWrites);
+
+static VKAPI_ATTR void VKAPI_CALL CmdPushDescriptorSetWithTemplateKHR(
+    VkCommandBuffer                             commandBuffer,
+    VkDescriptorUpdateTemplate                  descriptorUpdateTemplate,
+    VkPipelineLayout                            layout,
+    uint32_t                                    set,
+    const void*                                 pData);
+
+
+
+
+
+static VKAPI_ATTR VkResult VKAPI_CALL CreateDescriptorUpdateTemplateKHR(
+    VkDevice                                    device,
+    const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDescriptorUpdateTemplate*                 pDescriptorUpdateTemplate);
+
+static VKAPI_ATTR void VKAPI_CALL DestroyDescriptorUpdateTemplateKHR(
+    VkDevice                                    device,
+    VkDescriptorUpdateTemplate                  descriptorUpdateTemplate,
+    const VkAllocationCallbacks*                pAllocator);
+
+static VKAPI_ATTR void VKAPI_CALL UpdateDescriptorSetWithTemplateKHR(
+    VkDevice                                    device,
+    VkDescriptorSet                             descriptorSet,
+    VkDescriptorUpdateTemplate                  descriptorUpdateTemplate,
+    const void*                                 pData);
+
+
+
+static VKAPI_ATTR VkResult VKAPI_CALL CreateRenderPass2KHR(
+    VkDevice                                    device,
+    const VkRenderPassCreateInfo2KHR*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkRenderPass*                               pRenderPass);
+
+static VKAPI_ATTR void VKAPI_CALL CmdBeginRenderPass2KHR(
+    VkCommandBuffer                             commandBuffer,
+    const VkRenderPassBeginInfo*                pRenderPassBegin,
+    const VkSubpassBeginInfoKHR*                pSubpassBeginInfo);
+
+static VKAPI_ATTR void VKAPI_CALL CmdNextSubpass2KHR(
+    VkCommandBuffer                             commandBuffer,
+    const VkSubpassBeginInfoKHR*                pSubpassBeginInfo,
+    const VkSubpassEndInfoKHR*                  pSubpassEndInfo);
+
+static VKAPI_ATTR void VKAPI_CALL CmdEndRenderPass2KHR(
+    VkCommandBuffer                             commandBuffer,
+    const VkSubpassEndInfoKHR*                  pSubpassEndInfo);
+
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetSwapchainStatusKHR(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain);
+
+
+static VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceExternalFencePropertiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceExternalFenceInfo*    pExternalFenceInfo,
+    VkExternalFenceProperties*                  pExternalFenceProperties);
+
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+static VKAPI_ATTR VkResult VKAPI_CALL ImportFenceWin32HandleKHR(
+    VkDevice                                    device,
+    const VkImportFenceWin32HandleInfoKHR*      pImportFenceWin32HandleInfo);
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetFenceWin32HandleKHR(
+    VkDevice                                    device,
+    const VkFenceGetWin32HandleInfoKHR*         pGetWin32HandleInfo,
+    HANDLE*                                     pHandle);
+#endif /* VK_USE_PLATFORM_WIN32_KHR */
+
+
+static VKAPI_ATTR VkResult VKAPI_CALL ImportFenceFdKHR(
+    VkDevice                                    device,
+    const VkImportFenceFdInfoKHR*               pImportFenceFdInfo);
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetFenceFdKHR(
+    VkDevice                                    device,
+    const VkFenceGetFdInfoKHR*                  pGetFdInfo,
+    int*                                        pFd);
+
+
+static VKAPI_ATTR VkResult VKAPI_CALL EnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t                                    queueFamilyIndex,
+    uint32_t*                                   pCounterCount,
+    VkPerformanceCounterKHR*                    pCounters,
+    VkPerformanceCounterDescriptionKHR*         pCounterDescriptions);
+
+static VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkQueryPoolPerformanceCreateInfoKHR*  pPerformanceQueryCreateInfo,
+    uint32_t*                                   pNumPasses);
+
+static VKAPI_ATTR VkResult VKAPI_CALL AcquireProfilingLockKHR(
+    VkDevice                                    device,
+    const VkAcquireProfilingLockInfoKHR*        pInfo);
+
+static VKAPI_ATTR void VKAPI_CALL ReleaseProfilingLockKHR(
+    VkDevice                                    device);
+
+
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceSurfaceCapabilities2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceSurfaceInfo2KHR*      pSurfaceInfo,
+    VkSurfaceCapabilities2KHR*                  pSurfaceCapabilities);
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceSurfaceFormats2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceSurfaceInfo2KHR*      pSurfaceInfo,
+    uint32_t*                                   pSurfaceFormatCount,
+    VkSurfaceFormat2KHR*                        pSurfaceFormats);
+
+
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceDisplayProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pPropertyCount,
+    VkDisplayProperties2KHR*                    pProperties);
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceDisplayPlaneProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pPropertyCount,
+    VkDisplayPlaneProperties2KHR*               pProperties);
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetDisplayModeProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkDisplayKHR                                display,
+    uint32_t*                                   pPropertyCount,
+    VkDisplayModeProperties2KHR*                pProperties);
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetDisplayPlaneCapabilities2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkDisplayPlaneInfo2KHR*               pDisplayPlaneInfo,
+    VkDisplayPlaneCapabilities2KHR*             pCapabilities);
+
+
+
+
+
+static VKAPI_ATTR void VKAPI_CALL GetImageMemoryRequirements2KHR(
+    VkDevice                                    device,
+    const VkImageMemoryRequirementsInfo2*       pInfo,
+    VkMemoryRequirements2*                      pMemoryRequirements);
+
+static VKAPI_ATTR void VKAPI_CALL GetBufferMemoryRequirements2KHR(
+    VkDevice                                    device,
+    const VkBufferMemoryRequirementsInfo2*      pInfo,
+    VkMemoryRequirements2*                      pMemoryRequirements);
+
+static VKAPI_ATTR void VKAPI_CALL GetImageSparseMemoryRequirements2KHR(
+    VkDevice                                    device,
+    const VkImageSparseMemoryRequirementsInfo2* pInfo,
+    uint32_t*                                   pSparseMemoryRequirementCount,
+    VkSparseImageMemoryRequirements2*           pSparseMemoryRequirements);
+
+
+
+static VKAPI_ATTR VkResult VKAPI_CALL CreateSamplerYcbcrConversionKHR(
+    VkDevice                                    device,
+    const VkSamplerYcbcrConversionCreateInfo*   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSamplerYcbcrConversion*                   pYcbcrConversion);
+
+static VKAPI_ATTR void VKAPI_CALL DestroySamplerYcbcrConversionKHR(
+    VkDevice                                    device,
+    VkSamplerYcbcrConversion                    ycbcrConversion,
+    const VkAllocationCallbacks*                pAllocator);
+
+
+static VKAPI_ATTR VkResult VKAPI_CALL BindBufferMemory2KHR(
+    VkDevice                                    device,
+    uint32_t                                    bindInfoCount,
+    const VkBindBufferMemoryInfo*               pBindInfos);
+
+static VKAPI_ATTR VkResult VKAPI_CALL BindImageMemory2KHR(
+    VkDevice                                    device,
+    uint32_t                                    bindInfoCount,
+    const VkBindImageMemoryInfo*                pBindInfos);
+
+
+static VKAPI_ATTR void VKAPI_CALL GetDescriptorSetLayoutSupportKHR(
+    VkDevice                                    device,
+    const VkDescriptorSetLayoutCreateInfo*      pCreateInfo,
+    VkDescriptorSetLayoutSupport*               pSupport);
+
+
+static VKAPI_ATTR void VKAPI_CALL CmdDrawIndirectCountKHR(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkBuffer                                    countBuffer,
+    VkDeviceSize                                countBufferOffset,
+    uint32_t                                    maxDrawCount,
+    uint32_t                                    stride);
+
+static VKAPI_ATTR void VKAPI_CALL CmdDrawIndexedIndirectCountKHR(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkBuffer                                    countBuffer,
+    VkDeviceSize                                countBufferOffset,
+    uint32_t                                    maxDrawCount,
+    uint32_t                                    stride);
+
+
+
+
+
+
+
+
+
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetSemaphoreCounterValueKHR(
+    VkDevice                                    device,
+    VkSemaphore                                 semaphore,
+    uint64_t*                                   pValue);
+
+static VKAPI_ATTR VkResult VKAPI_CALL WaitSemaphoresKHR(
+    VkDevice                                    device,
+    const VkSemaphoreWaitInfoKHR*               pWaitInfo,
+    uint64_t                                    timeout);
+
+static VKAPI_ATTR VkResult VKAPI_CALL SignalSemaphoreKHR(
+    VkDevice                                    device,
+    const VkSemaphoreSignalInfoKHR*             pSignalInfo);
+
+
+
+
+
+
+
+static VKAPI_ATTR VkDeviceAddress VKAPI_CALL GetBufferDeviceAddressKHR(
+    VkDevice                                    device,
+    const VkBufferDeviceAddressInfoKHR*         pInfo);
+
+static VKAPI_ATTR uint64_t VKAPI_CALL GetBufferOpaqueCaptureAddressKHR(
+    VkDevice                                    device,
+    const VkBufferDeviceAddressInfoKHR*         pInfo);
+
+static VKAPI_ATTR uint64_t VKAPI_CALL GetDeviceMemoryOpaqueCaptureAddressKHR(
+    VkDevice                                    device,
+    const VkDeviceMemoryOpaqueCaptureAddressInfoKHR* pInfo);
+
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetPipelineExecutablePropertiesKHR(
+    VkDevice                                    device,
+    const VkPipelineInfoKHR*                    pPipelineInfo,
+    uint32_t*                                   pExecutableCount,
+    VkPipelineExecutablePropertiesKHR*          pProperties);
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetPipelineExecutableStatisticsKHR(
+    VkDevice                                    device,
+    const VkPipelineExecutableInfoKHR*          pExecutableInfo,
+    uint32_t*                                   pStatisticCount,
+    VkPipelineExecutableStatisticKHR*           pStatistics);
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetPipelineExecutableInternalRepresentationsKHR(
+    VkDevice                                    device,
+    const VkPipelineExecutableInfoKHR*          pExecutableInfo,
+    uint32_t*                                   pInternalRepresentationCount,
+    VkPipelineExecutableInternalRepresentationKHR* pInternalRepresentations);
+
+
+static VKAPI_ATTR VkResult VKAPI_CALL CreateDebugReportCallbackEXT(
+    VkInstance                                  instance,
+    const VkDebugReportCallbackCreateInfoEXT*   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDebugReportCallbackEXT*                   pCallback);
+
+static VKAPI_ATTR void VKAPI_CALL DestroyDebugReportCallbackEXT(
+    VkInstance                                  instance,
+    VkDebugReportCallbackEXT                    callback,
+    const VkAllocationCallbacks*                pAllocator);
+
+static VKAPI_ATTR void VKAPI_CALL DebugReportMessageEXT(
+    VkInstance                                  instance,
+    VkDebugReportFlagsEXT                       flags,
+    VkDebugReportObjectTypeEXT                  objectType,
+    uint64_t                                    object,
+    size_t                                      location,
+    int32_t                                     messageCode,
+    const char*                                 pLayerPrefix,
+    const char*                                 pMessage);
+
+
+
+
+
+
+
+
+static VKAPI_ATTR VkResult VKAPI_CALL DebugMarkerSetObjectTagEXT(
+    VkDevice                                    device,
+    const VkDebugMarkerObjectTagInfoEXT*        pTagInfo);
+
+static VKAPI_ATTR VkResult VKAPI_CALL DebugMarkerSetObjectNameEXT(
+    VkDevice                                    device,
+    const VkDebugMarkerObjectNameInfoEXT*       pNameInfo);
+
+static VKAPI_ATTR void VKAPI_CALL CmdDebugMarkerBeginEXT(
+    VkCommandBuffer                             commandBuffer,
+    const VkDebugMarkerMarkerInfoEXT*           pMarkerInfo);
+
+static VKAPI_ATTR void VKAPI_CALL CmdDebugMarkerEndEXT(
+    VkCommandBuffer                             commandBuffer);
+
+static VKAPI_ATTR void VKAPI_CALL CmdDebugMarkerInsertEXT(
+    VkCommandBuffer                             commandBuffer,
+    const VkDebugMarkerMarkerInfoEXT*           pMarkerInfo);
+
+
+
+
+static VKAPI_ATTR void VKAPI_CALL CmdBindTransformFeedbackBuffersEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstBinding,
+    uint32_t                                    bindingCount,
+    const VkBuffer*                             pBuffers,
+    const VkDeviceSize*                         pOffsets,
+    const VkDeviceSize*                         pSizes);
+
+static VKAPI_ATTR void VKAPI_CALL CmdBeginTransformFeedbackEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstCounterBuffer,
+    uint32_t                                    counterBufferCount,
+    const VkBuffer*                             pCounterBuffers,
+    const VkDeviceSize*                         pCounterBufferOffsets);
+
+static VKAPI_ATTR void VKAPI_CALL CmdEndTransformFeedbackEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstCounterBuffer,
+    uint32_t                                    counterBufferCount,
+    const VkBuffer*                             pCounterBuffers,
+    const VkDeviceSize*                         pCounterBufferOffsets);
+
+static VKAPI_ATTR void VKAPI_CALL CmdBeginQueryIndexedEXT(
+    VkCommandBuffer                             commandBuffer,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    query,
+    VkQueryControlFlags                         flags,
+    uint32_t                                    index);
+
+static VKAPI_ATTR void VKAPI_CALL CmdEndQueryIndexedEXT(
+    VkCommandBuffer                             commandBuffer,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    query,
+    uint32_t                                    index);
+
+static VKAPI_ATTR void VKAPI_CALL CmdDrawIndirectByteCountEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    instanceCount,
+    uint32_t                                    firstInstance,
+    VkBuffer                                    counterBuffer,
+    VkDeviceSize                                counterBufferOffset,
+    uint32_t                                    counterOffset,
+    uint32_t                                    vertexStride);
+
+
+static VKAPI_ATTR uint32_t VKAPI_CALL GetImageViewHandleNVX(
+    VkDevice                                    device,
+    const VkImageViewHandleInfoNVX*             pInfo);
+
+
+static VKAPI_ATTR void VKAPI_CALL CmdDrawIndirectCountAMD(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkBuffer                                    countBuffer,
+    VkDeviceSize                                countBufferOffset,
+    uint32_t                                    maxDrawCount,
+    uint32_t                                    stride);
+
+static VKAPI_ATTR void VKAPI_CALL CmdDrawIndexedIndirectCountAMD(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkBuffer                                    countBuffer,
+    VkDeviceSize                                countBufferOffset,
+    uint32_t                                    maxDrawCount,
+    uint32_t                                    stride);
+
+
+
+
+
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetShaderInfoAMD(
+    VkDevice                                    device,
+    VkPipeline                                  pipeline,
+    VkShaderStageFlagBits                       shaderStage,
+    VkShaderInfoTypeAMD                         infoType,
+    size_t*                                     pInfoSize,
+    void*                                       pInfo);
+
+
+#ifdef VK_USE_PLATFORM_GGP
+
+static VKAPI_ATTR VkResult VKAPI_CALL CreateStreamDescriptorSurfaceGGP(
+    VkInstance                                  instance,
+    const VkStreamDescriptorSurfaceCreateInfoGGP* pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface);
+#endif /* VK_USE_PLATFORM_GGP */
+
+
+
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceExternalImageFormatPropertiesNV(
+    VkPhysicalDevice                            physicalDevice,
+    VkFormat                                    format,
+    VkImageType                                 type,
+    VkImageTiling                               tiling,
+    VkImageUsageFlags                           usage,
+    VkImageCreateFlags                          flags,
+    VkExternalMemoryHandleTypeFlagsNV           externalHandleType,
+    VkExternalImageFormatPropertiesNV*          pExternalImageFormatProperties);
+
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetMemoryWin32HandleNV(
+    VkDevice                                    device,
+    VkDeviceMemory                              memory,
+    VkExternalMemoryHandleTypeFlagsNV           handleType,
+    HANDLE*                                     pHandle);
+#endif /* VK_USE_PLATFORM_WIN32_KHR */
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+#endif /* VK_USE_PLATFORM_WIN32_KHR */
+
+
+#ifdef VK_USE_PLATFORM_VI_NN
+
+static VKAPI_ATTR VkResult VKAPI_CALL CreateViSurfaceNN(
+    VkInstance                                  instance,
+    const VkViSurfaceCreateInfoNN*              pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface);
+#endif /* VK_USE_PLATFORM_VI_NN */
+
+
+
+
+
+
+static VKAPI_ATTR void VKAPI_CALL CmdBeginConditionalRenderingEXT(
+    VkCommandBuffer                             commandBuffer,
+    const VkConditionalRenderingBeginInfoEXT*   pConditionalRenderingBegin);
+
+static VKAPI_ATTR void VKAPI_CALL CmdEndConditionalRenderingEXT(
+    VkCommandBuffer                             commandBuffer);
+
+
+static VKAPI_ATTR void VKAPI_CALL CmdProcessCommandsNVX(
+    VkCommandBuffer                             commandBuffer,
+    const VkCmdProcessCommandsInfoNVX*          pProcessCommandsInfo);
+
+static VKAPI_ATTR void VKAPI_CALL CmdReserveSpaceForCommandsNVX(
+    VkCommandBuffer                             commandBuffer,
+    const VkCmdReserveSpaceForCommandsInfoNVX*  pReserveSpaceInfo);
+
+static VKAPI_ATTR VkResult VKAPI_CALL CreateIndirectCommandsLayoutNVX(
+    VkDevice                                    device,
+    const VkIndirectCommandsLayoutCreateInfoNVX* pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkIndirectCommandsLayoutNVX*                pIndirectCommandsLayout);
+
+static VKAPI_ATTR void VKAPI_CALL DestroyIndirectCommandsLayoutNVX(
+    VkDevice                                    device,
+    VkIndirectCommandsLayoutNVX                 indirectCommandsLayout,
+    const VkAllocationCallbacks*                pAllocator);
+
+static VKAPI_ATTR VkResult VKAPI_CALL CreateObjectTableNVX(
+    VkDevice                                    device,
+    const VkObjectTableCreateInfoNVX*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkObjectTableNVX*                           pObjectTable);
+
+static VKAPI_ATTR void VKAPI_CALL DestroyObjectTableNVX(
+    VkDevice                                    device,
+    VkObjectTableNVX                            objectTable,
+    const VkAllocationCallbacks*                pAllocator);
+
+static VKAPI_ATTR VkResult VKAPI_CALL RegisterObjectsNVX(
+    VkDevice                                    device,
+    VkObjectTableNVX                            objectTable,
+    uint32_t                                    objectCount,
+    const VkObjectTableEntryNVX* const*         ppObjectTableEntries,
+    const uint32_t*                             pObjectIndices);
+
+static VKAPI_ATTR VkResult VKAPI_CALL UnregisterObjectsNVX(
+    VkDevice                                    device,
+    VkObjectTableNVX                            objectTable,
+    uint32_t                                    objectCount,
+    const VkObjectEntryTypeNVX*                 pObjectEntryTypes,
+    const uint32_t*                             pObjectIndices);
+
+static VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceGeneratedCommandsPropertiesNVX(
+    VkPhysicalDevice                            physicalDevice,
+    VkDeviceGeneratedCommandsFeaturesNVX*       pFeatures,
+    VkDeviceGeneratedCommandsLimitsNVX*         pLimits);
+
+
+static VKAPI_ATTR void VKAPI_CALL CmdSetViewportWScalingNV(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstViewport,
+    uint32_t                                    viewportCount,
+    const VkViewportWScalingNV*                 pViewportWScalings);
+
+
+static VKAPI_ATTR VkResult VKAPI_CALL ReleaseDisplayEXT(
+    VkPhysicalDevice                            physicalDevice,
+    VkDisplayKHR                                display);
+
+#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
+
+static VKAPI_ATTR VkResult VKAPI_CALL AcquireXlibDisplayEXT(
+    VkPhysicalDevice                            physicalDevice,
+    Display*                                    dpy,
+    VkDisplayKHR                                display);
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetRandROutputDisplayEXT(
+    VkPhysicalDevice                            physicalDevice,
+    Display*                                    dpy,
+    RROutput                                    rrOutput,
+    VkDisplayKHR*                               pDisplay);
+#endif /* VK_USE_PLATFORM_XLIB_XRANDR_EXT */
+
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceSurfaceCapabilities2EXT(
+    VkPhysicalDevice                            physicalDevice,
+    VkSurfaceKHR                                surface,
+    VkSurfaceCapabilities2EXT*                  pSurfaceCapabilities);
+
+
+static VKAPI_ATTR VkResult VKAPI_CALL DisplayPowerControlEXT(
+    VkDevice                                    device,
+    VkDisplayKHR                                display,
+    const VkDisplayPowerInfoEXT*                pDisplayPowerInfo);
+
+static VKAPI_ATTR VkResult VKAPI_CALL RegisterDeviceEventEXT(
+    VkDevice                                    device,
+    const VkDeviceEventInfoEXT*                 pDeviceEventInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkFence*                                    pFence);
+
+static VKAPI_ATTR VkResult VKAPI_CALL RegisterDisplayEventEXT(
+    VkDevice                                    device,
+    VkDisplayKHR                                display,
+    const VkDisplayEventInfoEXT*                pDisplayEventInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkFence*                                    pFence);
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetSwapchainCounterEXT(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    VkSurfaceCounterFlagBitsEXT                 counter,
+    uint64_t*                                   pCounterValue);
+
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetRefreshCycleDurationGOOGLE(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    VkRefreshCycleDurationGOOGLE*               pDisplayTimingProperties);
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetPastPresentationTimingGOOGLE(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    uint32_t*                                   pPresentationTimingCount,
+    VkPastPresentationTimingGOOGLE*             pPresentationTimings);
+
+
+
+
+
+
+
+static VKAPI_ATTR void VKAPI_CALL CmdSetDiscardRectangleEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstDiscardRectangle,
+    uint32_t                                    discardRectangleCount,
+    const VkRect2D*                             pDiscardRectangles);
+
+
+
+
+
+static VKAPI_ATTR void VKAPI_CALL SetHdrMetadataEXT(
+    VkDevice                                    device,
+    uint32_t                                    swapchainCount,
+    const VkSwapchainKHR*                       pSwapchains,
+    const VkHdrMetadataEXT*                     pMetadata);
+
+#ifdef VK_USE_PLATFORM_IOS_MVK
+
+static VKAPI_ATTR VkResult VKAPI_CALL CreateIOSSurfaceMVK(
+    VkInstance                                  instance,
+    const VkIOSSurfaceCreateInfoMVK*            pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface);
+#endif /* VK_USE_PLATFORM_IOS_MVK */
+
+#ifdef VK_USE_PLATFORM_MACOS_MVK
+
+static VKAPI_ATTR VkResult VKAPI_CALL CreateMacOSSurfaceMVK(
+    VkInstance                                  instance,
+    const VkMacOSSurfaceCreateInfoMVK*          pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface);
+#endif /* VK_USE_PLATFORM_MACOS_MVK */
+
+
+
+
+static VKAPI_ATTR VkResult VKAPI_CALL SetDebugUtilsObjectNameEXT(
+    VkDevice                                    device,
+    const VkDebugUtilsObjectNameInfoEXT*        pNameInfo);
+
+static VKAPI_ATTR VkResult VKAPI_CALL SetDebugUtilsObjectTagEXT(
+    VkDevice                                    device,
+    const VkDebugUtilsObjectTagInfoEXT*         pTagInfo);
+
+static VKAPI_ATTR void VKAPI_CALL QueueBeginDebugUtilsLabelEXT(
+    VkQueue                                     queue,
+    const VkDebugUtilsLabelEXT*                 pLabelInfo);
+
+static VKAPI_ATTR void VKAPI_CALL QueueEndDebugUtilsLabelEXT(
+    VkQueue                                     queue);
+
+static VKAPI_ATTR void VKAPI_CALL QueueInsertDebugUtilsLabelEXT(
+    VkQueue                                     queue,
+    const VkDebugUtilsLabelEXT*                 pLabelInfo);
+
+static VKAPI_ATTR void VKAPI_CALL CmdBeginDebugUtilsLabelEXT(
+    VkCommandBuffer                             commandBuffer,
+    const VkDebugUtilsLabelEXT*                 pLabelInfo);
+
+static VKAPI_ATTR void VKAPI_CALL CmdEndDebugUtilsLabelEXT(
+    VkCommandBuffer                             commandBuffer);
+
+static VKAPI_ATTR void VKAPI_CALL CmdInsertDebugUtilsLabelEXT(
+    VkCommandBuffer                             commandBuffer,
+    const VkDebugUtilsLabelEXT*                 pLabelInfo);
+
+static VKAPI_ATTR VkResult VKAPI_CALL CreateDebugUtilsMessengerEXT(
+    VkInstance                                  instance,
+    const VkDebugUtilsMessengerCreateInfoEXT*   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDebugUtilsMessengerEXT*                   pMessenger);
+
+static VKAPI_ATTR void VKAPI_CALL DestroyDebugUtilsMessengerEXT(
+    VkInstance                                  instance,
+    VkDebugUtilsMessengerEXT                    messenger,
+    const VkAllocationCallbacks*                pAllocator);
+
+static VKAPI_ATTR void VKAPI_CALL SubmitDebugUtilsMessageEXT(
+    VkInstance                                  instance,
+    VkDebugUtilsMessageSeverityFlagBitsEXT      messageSeverity,
+    VkDebugUtilsMessageTypeFlagsEXT             messageTypes,
+    const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData);
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetAndroidHardwareBufferPropertiesANDROID(
+    VkDevice                                    device,
+    const struct AHardwareBuffer*               buffer,
+    VkAndroidHardwareBufferPropertiesANDROID*   pProperties);
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetMemoryAndroidHardwareBufferANDROID(
+    VkDevice                                    device,
+    const VkMemoryGetAndroidHardwareBufferInfoANDROID* pInfo,
+    struct AHardwareBuffer**                    pBuffer);
+#endif /* VK_USE_PLATFORM_ANDROID_KHR */
+
+
+
+
+
+
+
+
+static VKAPI_ATTR void VKAPI_CALL CmdSetSampleLocationsEXT(
+    VkCommandBuffer                             commandBuffer,
+    const VkSampleLocationsInfoEXT*             pSampleLocationsInfo);
+
+static VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceMultisamplePropertiesEXT(
+    VkPhysicalDevice                            physicalDevice,
+    VkSampleCountFlagBits                       samples,
+    VkMultisamplePropertiesEXT*                 pMultisampleProperties);
+
+
+
+
+
+
+
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetImageDrmFormatModifierPropertiesEXT(
+    VkDevice                                    device,
+    VkImage                                     image,
+    VkImageDrmFormatModifierPropertiesEXT*      pProperties);
+
+
+static VKAPI_ATTR VkResult VKAPI_CALL CreateValidationCacheEXT(
+    VkDevice                                    device,
+    const VkValidationCacheCreateInfoEXT*       pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkValidationCacheEXT*                       pValidationCache);
+
+static VKAPI_ATTR void VKAPI_CALL DestroyValidationCacheEXT(
+    VkDevice                                    device,
+    VkValidationCacheEXT                        validationCache,
+    const VkAllocationCallbacks*                pAllocator);
+
+static VKAPI_ATTR VkResult VKAPI_CALL MergeValidationCachesEXT(
+    VkDevice                                    device,
+    VkValidationCacheEXT                        dstCache,
+    uint32_t                                    srcCacheCount,
+    const VkValidationCacheEXT*                 pSrcCaches);
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetValidationCacheDataEXT(
+    VkDevice                                    device,
+    VkValidationCacheEXT                        validationCache,
+    size_t*                                     pDataSize,
+    void*                                       pData);
+
+
+
+
+static VKAPI_ATTR void VKAPI_CALL CmdBindShadingRateImageNV(
+    VkCommandBuffer                             commandBuffer,
+    VkImageView                                 imageView,
+    VkImageLayout                               imageLayout);
+
+static VKAPI_ATTR void VKAPI_CALL CmdSetViewportShadingRatePaletteNV(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstViewport,
+    uint32_t                                    viewportCount,
+    const VkShadingRatePaletteNV*               pShadingRatePalettes);
+
+static VKAPI_ATTR void VKAPI_CALL CmdSetCoarseSampleOrderNV(
+    VkCommandBuffer                             commandBuffer,
+    VkCoarseSampleOrderTypeNV                   sampleOrderType,
+    uint32_t                                    customSampleOrderCount,
+    const VkCoarseSampleOrderCustomNV*          pCustomSampleOrders);
+
+
+static VKAPI_ATTR VkResult VKAPI_CALL CreateAccelerationStructureNV(
+    VkDevice                                    device,
+    const VkAccelerationStructureCreateInfoNV*  pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkAccelerationStructureNV*                  pAccelerationStructure);
+
+static VKAPI_ATTR void VKAPI_CALL DestroyAccelerationStructureNV(
+    VkDevice                                    device,
+    VkAccelerationStructureNV                   accelerationStructure,
+    const VkAllocationCallbacks*                pAllocator);
+
+static VKAPI_ATTR void VKAPI_CALL GetAccelerationStructureMemoryRequirementsNV(
+    VkDevice                                    device,
+    const VkAccelerationStructureMemoryRequirementsInfoNV* pInfo,
+    VkMemoryRequirements2KHR*                   pMemoryRequirements);
+
+static VKAPI_ATTR VkResult VKAPI_CALL BindAccelerationStructureMemoryNV(
+    VkDevice                                    device,
+    uint32_t                                    bindInfoCount,
+    const VkBindAccelerationStructureMemoryInfoNV* pBindInfos);
+
+static VKAPI_ATTR void VKAPI_CALL CmdBuildAccelerationStructureNV(
+    VkCommandBuffer                             commandBuffer,
+    const VkAccelerationStructureInfoNV*        pInfo,
+    VkBuffer                                    instanceData,
+    VkDeviceSize                                instanceOffset,
+    VkBool32                                    update,
+    VkAccelerationStructureNV                   dst,
+    VkAccelerationStructureNV                   src,
+    VkBuffer                                    scratch,
+    VkDeviceSize                                scratchOffset);
+
+static VKAPI_ATTR void VKAPI_CALL CmdCopyAccelerationStructureNV(
+    VkCommandBuffer                             commandBuffer,
+    VkAccelerationStructureNV                   dst,
+    VkAccelerationStructureNV                   src,
+    VkCopyAccelerationStructureModeNV           mode);
+
+static VKAPI_ATTR void VKAPI_CALL CmdTraceRaysNV(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    raygenShaderBindingTableBuffer,
+    VkDeviceSize                                raygenShaderBindingOffset,
+    VkBuffer                                    missShaderBindingTableBuffer,
+    VkDeviceSize                                missShaderBindingOffset,
+    VkDeviceSize                                missShaderBindingStride,
+    VkBuffer                                    hitShaderBindingTableBuffer,
+    VkDeviceSize                                hitShaderBindingOffset,
+    VkDeviceSize                                hitShaderBindingStride,
+    VkBuffer                                    callableShaderBindingTableBuffer,
+    VkDeviceSize                                callableShaderBindingOffset,
+    VkDeviceSize                                callableShaderBindingStride,
+    uint32_t                                    width,
+    uint32_t                                    height,
+    uint32_t                                    depth);
+
+static VKAPI_ATTR VkResult VKAPI_CALL CreateRayTracingPipelinesNV(
+    VkDevice                                    device,
+    VkPipelineCache                             pipelineCache,
+    uint32_t                                    createInfoCount,
+    const VkRayTracingPipelineCreateInfoNV*     pCreateInfos,
+    const VkAllocationCallbacks*                pAllocator,
+    VkPipeline*                                 pPipelines);
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetRayTracingShaderGroupHandlesNV(
+    VkDevice                                    device,
+    VkPipeline                                  pipeline,
+    uint32_t                                    firstGroup,
+    uint32_t                                    groupCount,
+    size_t                                      dataSize,
+    void*                                       pData);
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetAccelerationStructureHandleNV(
+    VkDevice                                    device,
+    VkAccelerationStructureNV                   accelerationStructure,
+    size_t                                      dataSize,
+    void*                                       pData);
+
+static VKAPI_ATTR void VKAPI_CALL CmdWriteAccelerationStructuresPropertiesNV(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    accelerationStructureCount,
+    const VkAccelerationStructureNV*            pAccelerationStructures,
+    VkQueryType                                 queryType,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    firstQuery);
+
+static VKAPI_ATTR VkResult VKAPI_CALL CompileDeferredNV(
+    VkDevice                                    device,
+    VkPipeline                                  pipeline,
+    uint32_t                                    shader);
+
+
+
+
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetMemoryHostPointerPropertiesEXT(
+    VkDevice                                    device,
+    VkExternalMemoryHandleTypeFlagBits          handleType,
+    const void*                                 pHostPointer,
+    VkMemoryHostPointerPropertiesEXT*           pMemoryHostPointerProperties);
+
+
+static VKAPI_ATTR void VKAPI_CALL CmdWriteBufferMarkerAMD(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineStageFlagBits                     pipelineStage,
+    VkBuffer                                    dstBuffer,
+    VkDeviceSize                                dstOffset,
+    uint32_t                                    marker);
+
+
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceCalibrateableTimeDomainsEXT(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pTimeDomainCount,
+    VkTimeDomainEXT*                            pTimeDomains);
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetCalibratedTimestampsEXT(
+    VkDevice                                    device,
+    uint32_t                                    timestampCount,
+    const VkCalibratedTimestampInfoEXT*         pTimestampInfos,
+    uint64_t*                                   pTimestamps,
+    uint64_t*                                   pMaxDeviation);
+
+
+
+
+#ifdef VK_USE_PLATFORM_GGP
+#endif /* VK_USE_PLATFORM_GGP */
+
+
+
+
+
+static VKAPI_ATTR void VKAPI_CALL CmdDrawMeshTasksNV(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    taskCount,
+    uint32_t                                    firstTask);
+
+static VKAPI_ATTR void VKAPI_CALL CmdDrawMeshTasksIndirectNV(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    uint32_t                                    drawCount,
+    uint32_t                                    stride);
+
+static VKAPI_ATTR void VKAPI_CALL CmdDrawMeshTasksIndirectCountNV(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkBuffer                                    countBuffer,
+    VkDeviceSize                                countBufferOffset,
+    uint32_t                                    maxDrawCount,
+    uint32_t                                    stride);
+
+
+
+
+static VKAPI_ATTR void VKAPI_CALL CmdSetExclusiveScissorNV(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstExclusiveScissor,
+    uint32_t                                    exclusiveScissorCount,
+    const VkRect2D*                             pExclusiveScissors);
+
+
+static VKAPI_ATTR void VKAPI_CALL CmdSetCheckpointNV(
+    VkCommandBuffer                             commandBuffer,
+    const void*                                 pCheckpointMarker);
+
+static VKAPI_ATTR void VKAPI_CALL GetQueueCheckpointDataNV(
+    VkQueue                                     queue,
+    uint32_t*                                   pCheckpointDataCount,
+    VkCheckpointDataNV*                         pCheckpointData);
+
+
+
+static VKAPI_ATTR VkResult VKAPI_CALL InitializePerformanceApiINTEL(
+    VkDevice                                    device,
+    const VkInitializePerformanceApiInfoINTEL*  pInitializeInfo);
+
+static VKAPI_ATTR void VKAPI_CALL UninitializePerformanceApiINTEL(
+    VkDevice                                    device);
+
+static VKAPI_ATTR VkResult VKAPI_CALL CmdSetPerformanceMarkerINTEL(
+    VkCommandBuffer                             commandBuffer,
+    const VkPerformanceMarkerInfoINTEL*         pMarkerInfo);
+
+static VKAPI_ATTR VkResult VKAPI_CALL CmdSetPerformanceStreamMarkerINTEL(
+    VkCommandBuffer                             commandBuffer,
+    const VkPerformanceStreamMarkerInfoINTEL*   pMarkerInfo);
+
+static VKAPI_ATTR VkResult VKAPI_CALL CmdSetPerformanceOverrideINTEL(
+    VkCommandBuffer                             commandBuffer,
+    const VkPerformanceOverrideInfoINTEL*       pOverrideInfo);
+
+static VKAPI_ATTR VkResult VKAPI_CALL AcquirePerformanceConfigurationINTEL(
+    VkDevice                                    device,
+    const VkPerformanceConfigurationAcquireInfoINTEL* pAcquireInfo,
+    VkPerformanceConfigurationINTEL*            pConfiguration);
+
+static VKAPI_ATTR VkResult VKAPI_CALL ReleasePerformanceConfigurationINTEL(
+    VkDevice                                    device,
+    VkPerformanceConfigurationINTEL             configuration);
+
+static VKAPI_ATTR VkResult VKAPI_CALL QueueSetPerformanceConfigurationINTEL(
+    VkQueue                                     queue,
+    VkPerformanceConfigurationINTEL             configuration);
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetPerformanceParameterINTEL(
+    VkDevice                                    device,
+    VkPerformanceParameterTypeINTEL             parameter,
+    VkPerformanceValueINTEL*                    pValue);
+
+
+
+static VKAPI_ATTR void VKAPI_CALL SetLocalDimmingAMD(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapChain,
+    VkBool32                                    localDimmingEnable);
+
+#ifdef VK_USE_PLATFORM_FUCHSIA
+
+static VKAPI_ATTR VkResult VKAPI_CALL CreateImagePipeSurfaceFUCHSIA(
+    VkInstance                                  instance,
+    const VkImagePipeSurfaceCreateInfoFUCHSIA*  pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface);
+#endif /* VK_USE_PLATFORM_FUCHSIA */
+
+#ifdef VK_USE_PLATFORM_METAL_EXT
+
+static VKAPI_ATTR VkResult VKAPI_CALL CreateMetalSurfaceEXT(
+    VkInstance                                  instance,
+    const VkMetalSurfaceCreateInfoEXT*          pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface);
+#endif /* VK_USE_PLATFORM_METAL_EXT */
+
+
+
+
+
+
+
+
+
+
+
+
+static VKAPI_ATTR VkDeviceAddress VKAPI_CALL GetBufferDeviceAddressEXT(
+    VkDevice                                    device,
+    const VkBufferDeviceAddressInfoKHR*         pInfo);
+
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceToolPropertiesEXT(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pToolCount,
+    VkPhysicalDeviceToolPropertiesEXT*          pToolProperties);
+
+
+
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceCooperativeMatrixPropertiesNV(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pPropertyCount,
+    VkCooperativeMatrixPropertiesNV*            pProperties);
+
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pCombinationCount,
+    VkFramebufferMixedSamplesCombinationNV*     pCombinations);
+
+
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceSurfacePresentModes2EXT(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceSurfaceInfo2KHR*      pSurfaceInfo,
+    uint32_t*                                   pPresentModeCount,
+    VkPresentModeKHR*                           pPresentModes);
+
+static VKAPI_ATTR VkResult VKAPI_CALL AcquireFullScreenExclusiveModeEXT(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain);
+
+static VKAPI_ATTR VkResult VKAPI_CALL ReleaseFullScreenExclusiveModeEXT(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain);
+
+static VKAPI_ATTR VkResult VKAPI_CALL GetDeviceGroupSurfacePresentModes2EXT(
+    VkDevice                                    device,
+    const VkPhysicalDeviceSurfaceInfo2KHR*      pSurfaceInfo,
+    VkDeviceGroupPresentModeFlagsKHR*           pModes);
+#endif /* VK_USE_PLATFORM_WIN32_KHR */
+
+
+static VKAPI_ATTR VkResult VKAPI_CALL CreateHeadlessSurfaceEXT(
+    VkInstance                                  instance,
+    const VkHeadlessSurfaceCreateInfoEXT*       pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface);
+
+
+static VKAPI_ATTR void VKAPI_CALL CmdSetLineStippleEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    lineStippleFactor,
+    uint16_t                                    lineStipplePattern);
+
+
+static VKAPI_ATTR void VKAPI_CALL ResetQueryPoolEXT(
+    VkDevice                                    device,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    firstQuery,
+    uint32_t                                    queryCount);
+
+
+
+
+
+// Map of all APIs to be intercepted by this layer
+static const std::unordered_map<std::string, void*> name_to_funcptr_map = {
+    {"vkCreateInstance", (void*)CreateInstance},
+    {"vkDestroyInstance", (void*)DestroyInstance},
+    {"vkEnumeratePhysicalDevices", (void*)EnumeratePhysicalDevices},
+    {"vkGetPhysicalDeviceFeatures", (void*)GetPhysicalDeviceFeatures},
+    {"vkGetPhysicalDeviceFormatProperties", (void*)GetPhysicalDeviceFormatProperties},
+    {"vkGetPhysicalDeviceImageFormatProperties", (void*)GetPhysicalDeviceImageFormatProperties},
+    {"vkGetPhysicalDeviceProperties", (void*)GetPhysicalDeviceProperties},
+    {"vkGetPhysicalDeviceQueueFamilyProperties", (void*)GetPhysicalDeviceQueueFamilyProperties},
+    {"vkGetPhysicalDeviceMemoryProperties", (void*)GetPhysicalDeviceMemoryProperties},
+    {"vkGetInstanceProcAddr", (void*)GetInstanceProcAddr},
+    {"vkGetDeviceProcAddr", (void*)GetDeviceProcAddr},
+    {"vkCreateDevice", (void*)CreateDevice},
+    {"vkDestroyDevice", (void*)DestroyDevice},
+    {"vkEnumerateInstanceExtensionProperties", (void*)EnumerateInstanceExtensionProperties},
+    {"vkEnumerateDeviceExtensionProperties", (void*)EnumerateDeviceExtensionProperties},
+    {"vkEnumerateInstanceLayerProperties", (void*)EnumerateInstanceLayerProperties},
+    {"vkEnumerateDeviceLayerProperties", (void*)EnumerateDeviceLayerProperties},
+    {"vkGetDeviceQueue", (void*)GetDeviceQueue},
+    {"vkQueueSubmit", (void*)QueueSubmit},
+    {"vkQueueWaitIdle", (void*)QueueWaitIdle},
+    {"vkDeviceWaitIdle", (void*)DeviceWaitIdle},
+    {"vkAllocateMemory", (void*)AllocateMemory},
+    {"vkFreeMemory", (void*)FreeMemory},
+    {"vkMapMemory", (void*)MapMemory},
+    {"vkUnmapMemory", (void*)UnmapMemory},
+    {"vkFlushMappedMemoryRanges", (void*)FlushMappedMemoryRanges},
+    {"vkInvalidateMappedMemoryRanges", (void*)InvalidateMappedMemoryRanges},
+    {"vkGetDeviceMemoryCommitment", (void*)GetDeviceMemoryCommitment},
+    {"vkBindBufferMemory", (void*)BindBufferMemory},
+    {"vkBindImageMemory", (void*)BindImageMemory},
+    {"vkGetBufferMemoryRequirements", (void*)GetBufferMemoryRequirements},
+    {"vkGetImageMemoryRequirements", (void*)GetImageMemoryRequirements},
+    {"vkGetImageSparseMemoryRequirements", (void*)GetImageSparseMemoryRequirements},
+    {"vkGetPhysicalDeviceSparseImageFormatProperties", (void*)GetPhysicalDeviceSparseImageFormatProperties},
+    {"vkQueueBindSparse", (void*)QueueBindSparse},
+    {"vkCreateFence", (void*)CreateFence},
+    {"vkDestroyFence", (void*)DestroyFence},
+    {"vkResetFences", (void*)ResetFences},
+    {"vkGetFenceStatus", (void*)GetFenceStatus},
+    {"vkWaitForFences", (void*)WaitForFences},
+    {"vkCreateSemaphore", (void*)CreateSemaphore},
+    {"vkDestroySemaphore", (void*)DestroySemaphore},
+    {"vkCreateEvent", (void*)CreateEvent},
+    {"vkDestroyEvent", (void*)DestroyEvent},
+    {"vkGetEventStatus", (void*)GetEventStatus},
+    {"vkSetEvent", (void*)SetEvent},
+    {"vkResetEvent", (void*)ResetEvent},
+    {"vkCreateQueryPool", (void*)CreateQueryPool},
+    {"vkDestroyQueryPool", (void*)DestroyQueryPool},
+    {"vkGetQueryPoolResults", (void*)GetQueryPoolResults},
+    {"vkCreateBuffer", (void*)CreateBuffer},
+    {"vkDestroyBuffer", (void*)DestroyBuffer},
+    {"vkCreateBufferView", (void*)CreateBufferView},
+    {"vkDestroyBufferView", (void*)DestroyBufferView},
+    {"vkCreateImage", (void*)CreateImage},
+    {"vkDestroyImage", (void*)DestroyImage},
+    {"vkGetImageSubresourceLayout", (void*)GetImageSubresourceLayout},
+    {"vkCreateImageView", (void*)CreateImageView},
+    {"vkDestroyImageView", (void*)DestroyImageView},
+    {"vkCreateShaderModule", (void*)CreateShaderModule},
+    {"vkDestroyShaderModule", (void*)DestroyShaderModule},
+    {"vkCreatePipelineCache", (void*)CreatePipelineCache},
+    {"vkDestroyPipelineCache", (void*)DestroyPipelineCache},
+    {"vkGetPipelineCacheData", (void*)GetPipelineCacheData},
+    {"vkMergePipelineCaches", (void*)MergePipelineCaches},
+    {"vkCreateGraphicsPipelines", (void*)CreateGraphicsPipelines},
+    {"vkCreateComputePipelines", (void*)CreateComputePipelines},
+    {"vkDestroyPipeline", (void*)DestroyPipeline},
+    {"vkCreatePipelineLayout", (void*)CreatePipelineLayout},
+    {"vkDestroyPipelineLayout", (void*)DestroyPipelineLayout},
+    {"vkCreateSampler", (void*)CreateSampler},
+    {"vkDestroySampler", (void*)DestroySampler},
+    {"vkCreateDescriptorSetLayout", (void*)CreateDescriptorSetLayout},
+    {"vkDestroyDescriptorSetLayout", (void*)DestroyDescriptorSetLayout},
+    {"vkCreateDescriptorPool", (void*)CreateDescriptorPool},
+    {"vkDestroyDescriptorPool", (void*)DestroyDescriptorPool},
+    {"vkResetDescriptorPool", (void*)ResetDescriptorPool},
+    {"vkAllocateDescriptorSets", (void*)AllocateDescriptorSets},
+    {"vkFreeDescriptorSets", (void*)FreeDescriptorSets},
+    {"vkUpdateDescriptorSets", (void*)UpdateDescriptorSets},
+    {"vkCreateFramebuffer", (void*)CreateFramebuffer},
+    {"vkDestroyFramebuffer", (void*)DestroyFramebuffer},
+    {"vkCreateRenderPass", (void*)CreateRenderPass},
+    {"vkDestroyRenderPass", (void*)DestroyRenderPass},
+    {"vkGetRenderAreaGranularity", (void*)GetRenderAreaGranularity},
+    {"vkCreateCommandPool", (void*)CreateCommandPool},
+    {"vkDestroyCommandPool", (void*)DestroyCommandPool},
+    {"vkResetCommandPool", (void*)ResetCommandPool},
+    {"vkAllocateCommandBuffers", (void*)AllocateCommandBuffers},
+    {"vkFreeCommandBuffers", (void*)FreeCommandBuffers},
+    {"vkBeginCommandBuffer", (void*)BeginCommandBuffer},
+    {"vkEndCommandBuffer", (void*)EndCommandBuffer},
+    {"vkResetCommandBuffer", (void*)ResetCommandBuffer},
+    {"vkCmdBindPipeline", (void*)CmdBindPipeline},
+    {"vkCmdSetViewport", (void*)CmdSetViewport},
+    {"vkCmdSetScissor", (void*)CmdSetScissor},
+    {"vkCmdSetLineWidth", (void*)CmdSetLineWidth},
+    {"vkCmdSetDepthBias", (void*)CmdSetDepthBias},
+    {"vkCmdSetBlendConstants", (void*)CmdSetBlendConstants},
+    {"vkCmdSetDepthBounds", (void*)CmdSetDepthBounds},
+    {"vkCmdSetStencilCompareMask", (void*)CmdSetStencilCompareMask},
+    {"vkCmdSetStencilWriteMask", (void*)CmdSetStencilWriteMask},
+    {"vkCmdSetStencilReference", (void*)CmdSetStencilReference},
+    {"vkCmdBindDescriptorSets", (void*)CmdBindDescriptorSets},
+    {"vkCmdBindIndexBuffer", (void*)CmdBindIndexBuffer},
+    {"vkCmdBindVertexBuffers", (void*)CmdBindVertexBuffers},
+    {"vkCmdDraw", (void*)CmdDraw},
+    {"vkCmdDrawIndexed", (void*)CmdDrawIndexed},
+    {"vkCmdDrawIndirect", (void*)CmdDrawIndirect},
+    {"vkCmdDrawIndexedIndirect", (void*)CmdDrawIndexedIndirect},
+    {"vkCmdDispatch", (void*)CmdDispatch},
+    {"vkCmdDispatchIndirect", (void*)CmdDispatchIndirect},
+    {"vkCmdCopyBuffer", (void*)CmdCopyBuffer},
+    {"vkCmdCopyImage", (void*)CmdCopyImage},
+    {"vkCmdBlitImage", (void*)CmdBlitImage},
+    {"vkCmdCopyBufferToImage", (void*)CmdCopyBufferToImage},
+    {"vkCmdCopyImageToBuffer", (void*)CmdCopyImageToBuffer},
+    {"vkCmdUpdateBuffer", (void*)CmdUpdateBuffer},
+    {"vkCmdFillBuffer", (void*)CmdFillBuffer},
+    {"vkCmdClearColorImage", (void*)CmdClearColorImage},
+    {"vkCmdClearDepthStencilImage", (void*)CmdClearDepthStencilImage},
+    {"vkCmdClearAttachments", (void*)CmdClearAttachments},
+    {"vkCmdResolveImage", (void*)CmdResolveImage},
+    {"vkCmdSetEvent", (void*)CmdSetEvent},
+    {"vkCmdResetEvent", (void*)CmdResetEvent},
+    {"vkCmdWaitEvents", (void*)CmdWaitEvents},
+    {"vkCmdPipelineBarrier", (void*)CmdPipelineBarrier},
+    {"vkCmdBeginQuery", (void*)CmdBeginQuery},
+    {"vkCmdEndQuery", (void*)CmdEndQuery},
+    {"vkCmdResetQueryPool", (void*)CmdResetQueryPool},
+    {"vkCmdWriteTimestamp", (void*)CmdWriteTimestamp},
+    {"vkCmdCopyQueryPoolResults", (void*)CmdCopyQueryPoolResults},
+    {"vkCmdPushConstants", (void*)CmdPushConstants},
+    {"vkCmdBeginRenderPass", (void*)CmdBeginRenderPass},
+    {"vkCmdNextSubpass", (void*)CmdNextSubpass},
+    {"vkCmdEndRenderPass", (void*)CmdEndRenderPass},
+    {"vkCmdExecuteCommands", (void*)CmdExecuteCommands},
+    {"vkEnumerateInstanceVersion", (void*)EnumerateInstanceVersion},
+    {"vkBindBufferMemory2", (void*)BindBufferMemory2},
+    {"vkBindImageMemory2", (void*)BindImageMemory2},
+    {"vkGetDeviceGroupPeerMemoryFeatures", (void*)GetDeviceGroupPeerMemoryFeatures},
+    {"vkCmdSetDeviceMask", (void*)CmdSetDeviceMask},
+    {"vkCmdDispatchBase", (void*)CmdDispatchBase},
+    {"vkEnumeratePhysicalDeviceGroups", (void*)EnumeratePhysicalDeviceGroups},
+    {"vkGetImageMemoryRequirements2", (void*)GetImageMemoryRequirements2},
+    {"vkGetBufferMemoryRequirements2", (void*)GetBufferMemoryRequirements2},
+    {"vkGetImageSparseMemoryRequirements2", (void*)GetImageSparseMemoryRequirements2},
+    {"vkGetPhysicalDeviceFeatures2", (void*)GetPhysicalDeviceFeatures2},
+    {"vkGetPhysicalDeviceProperties2", (void*)GetPhysicalDeviceProperties2},
+    {"vkGetPhysicalDeviceFormatProperties2", (void*)GetPhysicalDeviceFormatProperties2},
+    {"vkGetPhysicalDeviceImageFormatProperties2", (void*)GetPhysicalDeviceImageFormatProperties2},
+    {"vkGetPhysicalDeviceQueueFamilyProperties2", (void*)GetPhysicalDeviceQueueFamilyProperties2},
+    {"vkGetPhysicalDeviceMemoryProperties2", (void*)GetPhysicalDeviceMemoryProperties2},
+    {"vkGetPhysicalDeviceSparseImageFormatProperties2", (void*)GetPhysicalDeviceSparseImageFormatProperties2},
+    {"vkTrimCommandPool", (void*)TrimCommandPool},
+    {"vkGetDeviceQueue2", (void*)GetDeviceQueue2},
+    {"vkCreateSamplerYcbcrConversion", (void*)CreateSamplerYcbcrConversion},
+    {"vkDestroySamplerYcbcrConversion", (void*)DestroySamplerYcbcrConversion},
+    {"vkCreateDescriptorUpdateTemplate", (void*)CreateDescriptorUpdateTemplate},
+    {"vkDestroyDescriptorUpdateTemplate", (void*)DestroyDescriptorUpdateTemplate},
+    {"vkUpdateDescriptorSetWithTemplate", (void*)UpdateDescriptorSetWithTemplate},
+    {"vkGetPhysicalDeviceExternalBufferProperties", (void*)GetPhysicalDeviceExternalBufferProperties},
+    {"vkGetPhysicalDeviceExternalFenceProperties", (void*)GetPhysicalDeviceExternalFenceProperties},
+    {"vkGetPhysicalDeviceExternalSemaphoreProperties", (void*)GetPhysicalDeviceExternalSemaphoreProperties},
+    {"vkGetDescriptorSetLayoutSupport", (void*)GetDescriptorSetLayoutSupport},
+    {"vkDestroySurfaceKHR", (void*)DestroySurfaceKHR},
+    {"vkGetPhysicalDeviceSurfaceSupportKHR", (void*)GetPhysicalDeviceSurfaceSupportKHR},
+    {"vkGetPhysicalDeviceSurfaceCapabilitiesKHR", (void*)GetPhysicalDeviceSurfaceCapabilitiesKHR},
+    {"vkGetPhysicalDeviceSurfaceFormatsKHR", (void*)GetPhysicalDeviceSurfaceFormatsKHR},
+    {"vkGetPhysicalDeviceSurfacePresentModesKHR", (void*)GetPhysicalDeviceSurfacePresentModesKHR},
+    {"vkCreateSwapchainKHR", (void*)CreateSwapchainKHR},
+    {"vkDestroySwapchainKHR", (void*)DestroySwapchainKHR},
+    {"vkGetSwapchainImagesKHR", (void*)GetSwapchainImagesKHR},
+    {"vkAcquireNextImageKHR", (void*)AcquireNextImageKHR},
+    {"vkQueuePresentKHR", (void*)QueuePresentKHR},
+    {"vkGetDeviceGroupPresentCapabilitiesKHR", (void*)GetDeviceGroupPresentCapabilitiesKHR},
+    {"vkGetDeviceGroupSurfacePresentModesKHR", (void*)GetDeviceGroupSurfacePresentModesKHR},
+    {"vkGetPhysicalDevicePresentRectanglesKHR", (void*)GetPhysicalDevicePresentRectanglesKHR},
+    {"vkAcquireNextImage2KHR", (void*)AcquireNextImage2KHR},
+    {"vkGetPhysicalDeviceDisplayPropertiesKHR", (void*)GetPhysicalDeviceDisplayPropertiesKHR},
+    {"vkGetPhysicalDeviceDisplayPlanePropertiesKHR", (void*)GetPhysicalDeviceDisplayPlanePropertiesKHR},
+    {"vkGetDisplayPlaneSupportedDisplaysKHR", (void*)GetDisplayPlaneSupportedDisplaysKHR},
+    {"vkGetDisplayModePropertiesKHR", (void*)GetDisplayModePropertiesKHR},
+    {"vkCreateDisplayModeKHR", (void*)CreateDisplayModeKHR},
+    {"vkGetDisplayPlaneCapabilitiesKHR", (void*)GetDisplayPlaneCapabilitiesKHR},
+    {"vkCreateDisplayPlaneSurfaceKHR", (void*)CreateDisplayPlaneSurfaceKHR},
+    {"vkCreateSharedSwapchainsKHR", (void*)CreateSharedSwapchainsKHR},
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+    {"vkCreateXlibSurfaceKHR", (void*)CreateXlibSurfaceKHR},
+#endif
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+    {"vkGetPhysicalDeviceXlibPresentationSupportKHR", (void*)GetPhysicalDeviceXlibPresentationSupportKHR},
+#endif
+#ifdef VK_USE_PLATFORM_XCB_KHR
+    {"vkCreateXcbSurfaceKHR", (void*)CreateXcbSurfaceKHR},
+#endif
+#ifdef VK_USE_PLATFORM_XCB_KHR
+    {"vkGetPhysicalDeviceXcbPresentationSupportKHR", (void*)GetPhysicalDeviceXcbPresentationSupportKHR},
+#endif
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+    {"vkCreateWaylandSurfaceKHR", (void*)CreateWaylandSurfaceKHR},
+#endif
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+    {"vkGetPhysicalDeviceWaylandPresentationSupportKHR", (void*)GetPhysicalDeviceWaylandPresentationSupportKHR},
+#endif
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+    {"vkCreateAndroidSurfaceKHR", (void*)CreateAndroidSurfaceKHR},
+#endif
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    {"vkCreateWin32SurfaceKHR", (void*)CreateWin32SurfaceKHR},
+#endif
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    {"vkGetPhysicalDeviceWin32PresentationSupportKHR", (void*)GetPhysicalDeviceWin32PresentationSupportKHR},
+#endif
+    {"vkGetPhysicalDeviceFeatures2KHR", (void*)GetPhysicalDeviceFeatures2KHR},
+    {"vkGetPhysicalDeviceProperties2KHR", (void*)GetPhysicalDeviceProperties2KHR},
+    {"vkGetPhysicalDeviceFormatProperties2KHR", (void*)GetPhysicalDeviceFormatProperties2KHR},
+    {"vkGetPhysicalDeviceImageFormatProperties2KHR", (void*)GetPhysicalDeviceImageFormatProperties2KHR},
+    {"vkGetPhysicalDeviceQueueFamilyProperties2KHR", (void*)GetPhysicalDeviceQueueFamilyProperties2KHR},
+    {"vkGetPhysicalDeviceMemoryProperties2KHR", (void*)GetPhysicalDeviceMemoryProperties2KHR},
+    {"vkGetPhysicalDeviceSparseImageFormatProperties2KHR", (void*)GetPhysicalDeviceSparseImageFormatProperties2KHR},
+    {"vkGetDeviceGroupPeerMemoryFeaturesKHR", (void*)GetDeviceGroupPeerMemoryFeaturesKHR},
+    {"vkCmdSetDeviceMaskKHR", (void*)CmdSetDeviceMaskKHR},
+    {"vkCmdDispatchBaseKHR", (void*)CmdDispatchBaseKHR},
+    {"vkTrimCommandPoolKHR", (void*)TrimCommandPoolKHR},
+    {"vkEnumeratePhysicalDeviceGroupsKHR", (void*)EnumeratePhysicalDeviceGroupsKHR},
+    {"vkGetPhysicalDeviceExternalBufferPropertiesKHR", (void*)GetPhysicalDeviceExternalBufferPropertiesKHR},
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    {"vkGetMemoryWin32HandleKHR", (void*)GetMemoryWin32HandleKHR},
+#endif
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    {"vkGetMemoryWin32HandlePropertiesKHR", (void*)GetMemoryWin32HandlePropertiesKHR},
+#endif
+    {"vkGetMemoryFdKHR", (void*)GetMemoryFdKHR},
+    {"vkGetMemoryFdPropertiesKHR", (void*)GetMemoryFdPropertiesKHR},
+    {"vkGetPhysicalDeviceExternalSemaphorePropertiesKHR", (void*)GetPhysicalDeviceExternalSemaphorePropertiesKHR},
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    {"vkImportSemaphoreWin32HandleKHR", (void*)ImportSemaphoreWin32HandleKHR},
+#endif
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    {"vkGetSemaphoreWin32HandleKHR", (void*)GetSemaphoreWin32HandleKHR},
+#endif
+    {"vkImportSemaphoreFdKHR", (void*)ImportSemaphoreFdKHR},
+    {"vkGetSemaphoreFdKHR", (void*)GetSemaphoreFdKHR},
+    {"vkCmdPushDescriptorSetKHR", (void*)CmdPushDescriptorSetKHR},
+    {"vkCmdPushDescriptorSetWithTemplateKHR", (void*)CmdPushDescriptorSetWithTemplateKHR},
+    {"vkCreateDescriptorUpdateTemplateKHR", (void*)CreateDescriptorUpdateTemplateKHR},
+    {"vkDestroyDescriptorUpdateTemplateKHR", (void*)DestroyDescriptorUpdateTemplateKHR},
+    {"vkUpdateDescriptorSetWithTemplateKHR", (void*)UpdateDescriptorSetWithTemplateKHR},
+    {"vkCreateRenderPass2KHR", (void*)CreateRenderPass2KHR},
+    {"vkCmdBeginRenderPass2KHR", (void*)CmdBeginRenderPass2KHR},
+    {"vkCmdNextSubpass2KHR", (void*)CmdNextSubpass2KHR},
+    {"vkCmdEndRenderPass2KHR", (void*)CmdEndRenderPass2KHR},
+    {"vkGetSwapchainStatusKHR", (void*)GetSwapchainStatusKHR},
+    {"vkGetPhysicalDeviceExternalFencePropertiesKHR", (void*)GetPhysicalDeviceExternalFencePropertiesKHR},
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    {"vkImportFenceWin32HandleKHR", (void*)ImportFenceWin32HandleKHR},
+#endif
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    {"vkGetFenceWin32HandleKHR", (void*)GetFenceWin32HandleKHR},
+#endif
+    {"vkImportFenceFdKHR", (void*)ImportFenceFdKHR},
+    {"vkGetFenceFdKHR", (void*)GetFenceFdKHR},
+    {"vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR", (void*)EnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR},
+    {"vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR", (void*)GetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR},
+    {"vkAcquireProfilingLockKHR", (void*)AcquireProfilingLockKHR},
+    {"vkReleaseProfilingLockKHR", (void*)ReleaseProfilingLockKHR},
+    {"vkGetPhysicalDeviceSurfaceCapabilities2KHR", (void*)GetPhysicalDeviceSurfaceCapabilities2KHR},
+    {"vkGetPhysicalDeviceSurfaceFormats2KHR", (void*)GetPhysicalDeviceSurfaceFormats2KHR},
+    {"vkGetPhysicalDeviceDisplayProperties2KHR", (void*)GetPhysicalDeviceDisplayProperties2KHR},
+    {"vkGetPhysicalDeviceDisplayPlaneProperties2KHR", (void*)GetPhysicalDeviceDisplayPlaneProperties2KHR},
+    {"vkGetDisplayModeProperties2KHR", (void*)GetDisplayModeProperties2KHR},
+    {"vkGetDisplayPlaneCapabilities2KHR", (void*)GetDisplayPlaneCapabilities2KHR},
+    {"vkGetImageMemoryRequirements2KHR", (void*)GetImageMemoryRequirements2KHR},
+    {"vkGetBufferMemoryRequirements2KHR", (void*)GetBufferMemoryRequirements2KHR},
+    {"vkGetImageSparseMemoryRequirements2KHR", (void*)GetImageSparseMemoryRequirements2KHR},
+    {"vkCreateSamplerYcbcrConversionKHR", (void*)CreateSamplerYcbcrConversionKHR},
+    {"vkDestroySamplerYcbcrConversionKHR", (void*)DestroySamplerYcbcrConversionKHR},
+    {"vkBindBufferMemory2KHR", (void*)BindBufferMemory2KHR},
+    {"vkBindImageMemory2KHR", (void*)BindImageMemory2KHR},
+    {"vkGetDescriptorSetLayoutSupportKHR", (void*)GetDescriptorSetLayoutSupportKHR},
+    {"vkCmdDrawIndirectCountKHR", (void*)CmdDrawIndirectCountKHR},
+    {"vkCmdDrawIndexedIndirectCountKHR", (void*)CmdDrawIndexedIndirectCountKHR},
+    {"vkGetSemaphoreCounterValueKHR", (void*)GetSemaphoreCounterValueKHR},
+    {"vkWaitSemaphoresKHR", (void*)WaitSemaphoresKHR},
+    {"vkSignalSemaphoreKHR", (void*)SignalSemaphoreKHR},
+    {"vkGetBufferDeviceAddressKHR", (void*)GetBufferDeviceAddressKHR},
+    {"vkGetBufferOpaqueCaptureAddressKHR", (void*)GetBufferOpaqueCaptureAddressKHR},
+    {"vkGetDeviceMemoryOpaqueCaptureAddressKHR", (void*)GetDeviceMemoryOpaqueCaptureAddressKHR},
+    {"vkGetPipelineExecutablePropertiesKHR", (void*)GetPipelineExecutablePropertiesKHR},
+    {"vkGetPipelineExecutableStatisticsKHR", (void*)GetPipelineExecutableStatisticsKHR},
+    {"vkGetPipelineExecutableInternalRepresentationsKHR", (void*)GetPipelineExecutableInternalRepresentationsKHR},
+    {"vkCreateDebugReportCallbackEXT", (void*)CreateDebugReportCallbackEXT},
+    {"vkDestroyDebugReportCallbackEXT", (void*)DestroyDebugReportCallbackEXT},
+    {"vkDebugReportMessageEXT", (void*)DebugReportMessageEXT},
+    {"vkDebugMarkerSetObjectTagEXT", (void*)DebugMarkerSetObjectTagEXT},
+    {"vkDebugMarkerSetObjectNameEXT", (void*)DebugMarkerSetObjectNameEXT},
+    {"vkCmdDebugMarkerBeginEXT", (void*)CmdDebugMarkerBeginEXT},
+    {"vkCmdDebugMarkerEndEXT", (void*)CmdDebugMarkerEndEXT},
+    {"vkCmdDebugMarkerInsertEXT", (void*)CmdDebugMarkerInsertEXT},
+    {"vkCmdBindTransformFeedbackBuffersEXT", (void*)CmdBindTransformFeedbackBuffersEXT},
+    {"vkCmdBeginTransformFeedbackEXT", (void*)CmdBeginTransformFeedbackEXT},
+    {"vkCmdEndTransformFeedbackEXT", (void*)CmdEndTransformFeedbackEXT},
+    {"vkCmdBeginQueryIndexedEXT", (void*)CmdBeginQueryIndexedEXT},
+    {"vkCmdEndQueryIndexedEXT", (void*)CmdEndQueryIndexedEXT},
+    {"vkCmdDrawIndirectByteCountEXT", (void*)CmdDrawIndirectByteCountEXT},
+    {"vkGetImageViewHandleNVX", (void*)GetImageViewHandleNVX},
+    {"vkCmdDrawIndirectCountAMD", (void*)CmdDrawIndirectCountAMD},
+    {"vkCmdDrawIndexedIndirectCountAMD", (void*)CmdDrawIndexedIndirectCountAMD},
+    {"vkGetShaderInfoAMD", (void*)GetShaderInfoAMD},
+#ifdef VK_USE_PLATFORM_GGP
+    {"vkCreateStreamDescriptorSurfaceGGP", (void*)CreateStreamDescriptorSurfaceGGP},
+#endif
+    {"vkGetPhysicalDeviceExternalImageFormatPropertiesNV", (void*)GetPhysicalDeviceExternalImageFormatPropertiesNV},
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    {"vkGetMemoryWin32HandleNV", (void*)GetMemoryWin32HandleNV},
+#endif
+#ifdef VK_USE_PLATFORM_VI_NN
+    {"vkCreateViSurfaceNN", (void*)CreateViSurfaceNN},
+#endif
+    {"vkCmdBeginConditionalRenderingEXT", (void*)CmdBeginConditionalRenderingEXT},
+    {"vkCmdEndConditionalRenderingEXT", (void*)CmdEndConditionalRenderingEXT},
+    {"vkCmdProcessCommandsNVX", (void*)CmdProcessCommandsNVX},
+    {"vkCmdReserveSpaceForCommandsNVX", (void*)CmdReserveSpaceForCommandsNVX},
+    {"vkCreateIndirectCommandsLayoutNVX", (void*)CreateIndirectCommandsLayoutNVX},
+    {"vkDestroyIndirectCommandsLayoutNVX", (void*)DestroyIndirectCommandsLayoutNVX},
+    {"vkCreateObjectTableNVX", (void*)CreateObjectTableNVX},
+    {"vkDestroyObjectTableNVX", (void*)DestroyObjectTableNVX},
+    {"vkRegisterObjectsNVX", (void*)RegisterObjectsNVX},
+    {"vkUnregisterObjectsNVX", (void*)UnregisterObjectsNVX},
+    {"vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX", (void*)GetPhysicalDeviceGeneratedCommandsPropertiesNVX},
+    {"vkCmdSetViewportWScalingNV", (void*)CmdSetViewportWScalingNV},
+    {"vkReleaseDisplayEXT", (void*)ReleaseDisplayEXT},
+#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
+    {"vkAcquireXlibDisplayEXT", (void*)AcquireXlibDisplayEXT},
+#endif
+#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
+    {"vkGetRandROutputDisplayEXT", (void*)GetRandROutputDisplayEXT},
+#endif
+    {"vkGetPhysicalDeviceSurfaceCapabilities2EXT", (void*)GetPhysicalDeviceSurfaceCapabilities2EXT},
+    {"vkDisplayPowerControlEXT", (void*)DisplayPowerControlEXT},
+    {"vkRegisterDeviceEventEXT", (void*)RegisterDeviceEventEXT},
+    {"vkRegisterDisplayEventEXT", (void*)RegisterDisplayEventEXT},
+    {"vkGetSwapchainCounterEXT", (void*)GetSwapchainCounterEXT},
+    {"vkGetRefreshCycleDurationGOOGLE", (void*)GetRefreshCycleDurationGOOGLE},
+    {"vkGetPastPresentationTimingGOOGLE", (void*)GetPastPresentationTimingGOOGLE},
+    {"vkCmdSetDiscardRectangleEXT", (void*)CmdSetDiscardRectangleEXT},
+    {"vkSetHdrMetadataEXT", (void*)SetHdrMetadataEXT},
+#ifdef VK_USE_PLATFORM_IOS_MVK
+    {"vkCreateIOSSurfaceMVK", (void*)CreateIOSSurfaceMVK},
+#endif
+#ifdef VK_USE_PLATFORM_MACOS_MVK
+    {"vkCreateMacOSSurfaceMVK", (void*)CreateMacOSSurfaceMVK},
+#endif
+    {"vkSetDebugUtilsObjectNameEXT", (void*)SetDebugUtilsObjectNameEXT},
+    {"vkSetDebugUtilsObjectTagEXT", (void*)SetDebugUtilsObjectTagEXT},
+    {"vkQueueBeginDebugUtilsLabelEXT", (void*)QueueBeginDebugUtilsLabelEXT},
+    {"vkQueueEndDebugUtilsLabelEXT", (void*)QueueEndDebugUtilsLabelEXT},
+    {"vkQueueInsertDebugUtilsLabelEXT", (void*)QueueInsertDebugUtilsLabelEXT},
+    {"vkCmdBeginDebugUtilsLabelEXT", (void*)CmdBeginDebugUtilsLabelEXT},
+    {"vkCmdEndDebugUtilsLabelEXT", (void*)CmdEndDebugUtilsLabelEXT},
+    {"vkCmdInsertDebugUtilsLabelEXT", (void*)CmdInsertDebugUtilsLabelEXT},
+    {"vkCreateDebugUtilsMessengerEXT", (void*)CreateDebugUtilsMessengerEXT},
+    {"vkDestroyDebugUtilsMessengerEXT", (void*)DestroyDebugUtilsMessengerEXT},
+    {"vkSubmitDebugUtilsMessageEXT", (void*)SubmitDebugUtilsMessageEXT},
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+    {"vkGetAndroidHardwareBufferPropertiesANDROID", (void*)GetAndroidHardwareBufferPropertiesANDROID},
+#endif
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+    {"vkGetMemoryAndroidHardwareBufferANDROID", (void*)GetMemoryAndroidHardwareBufferANDROID},
+#endif
+    {"vkCmdSetSampleLocationsEXT", (void*)CmdSetSampleLocationsEXT},
+    {"vkGetPhysicalDeviceMultisamplePropertiesEXT", (void*)GetPhysicalDeviceMultisamplePropertiesEXT},
+    {"vkGetImageDrmFormatModifierPropertiesEXT", (void*)GetImageDrmFormatModifierPropertiesEXT},
+    {"vkCreateValidationCacheEXT", (void*)CreateValidationCacheEXT},
+    {"vkDestroyValidationCacheEXT", (void*)DestroyValidationCacheEXT},
+    {"vkMergeValidationCachesEXT", (void*)MergeValidationCachesEXT},
+    {"vkGetValidationCacheDataEXT", (void*)GetValidationCacheDataEXT},
+    {"vkCmdBindShadingRateImageNV", (void*)CmdBindShadingRateImageNV},
+    {"vkCmdSetViewportShadingRatePaletteNV", (void*)CmdSetViewportShadingRatePaletteNV},
+    {"vkCmdSetCoarseSampleOrderNV", (void*)CmdSetCoarseSampleOrderNV},
+    {"vkCreateAccelerationStructureNV", (void*)CreateAccelerationStructureNV},
+    {"vkDestroyAccelerationStructureNV", (void*)DestroyAccelerationStructureNV},
+    {"vkGetAccelerationStructureMemoryRequirementsNV", (void*)GetAccelerationStructureMemoryRequirementsNV},
+    {"vkBindAccelerationStructureMemoryNV", (void*)BindAccelerationStructureMemoryNV},
+    {"vkCmdBuildAccelerationStructureNV", (void*)CmdBuildAccelerationStructureNV},
+    {"vkCmdCopyAccelerationStructureNV", (void*)CmdCopyAccelerationStructureNV},
+    {"vkCmdTraceRaysNV", (void*)CmdTraceRaysNV},
+    {"vkCreateRayTracingPipelinesNV", (void*)CreateRayTracingPipelinesNV},
+    {"vkGetRayTracingShaderGroupHandlesNV", (void*)GetRayTracingShaderGroupHandlesNV},
+    {"vkGetAccelerationStructureHandleNV", (void*)GetAccelerationStructureHandleNV},
+    {"vkCmdWriteAccelerationStructuresPropertiesNV", (void*)CmdWriteAccelerationStructuresPropertiesNV},
+    {"vkCompileDeferredNV", (void*)CompileDeferredNV},
+    {"vkGetMemoryHostPointerPropertiesEXT", (void*)GetMemoryHostPointerPropertiesEXT},
+    {"vkCmdWriteBufferMarkerAMD", (void*)CmdWriteBufferMarkerAMD},
+    {"vkGetPhysicalDeviceCalibrateableTimeDomainsEXT", (void*)GetPhysicalDeviceCalibrateableTimeDomainsEXT},
+    {"vkGetCalibratedTimestampsEXT", (void*)GetCalibratedTimestampsEXT},
+    {"vkCmdDrawMeshTasksNV", (void*)CmdDrawMeshTasksNV},
+    {"vkCmdDrawMeshTasksIndirectNV", (void*)CmdDrawMeshTasksIndirectNV},
+    {"vkCmdDrawMeshTasksIndirectCountNV", (void*)CmdDrawMeshTasksIndirectCountNV},
+    {"vkCmdSetExclusiveScissorNV", (void*)CmdSetExclusiveScissorNV},
+    {"vkCmdSetCheckpointNV", (void*)CmdSetCheckpointNV},
+    {"vkGetQueueCheckpointDataNV", (void*)GetQueueCheckpointDataNV},
+    {"vkInitializePerformanceApiINTEL", (void*)InitializePerformanceApiINTEL},
+    {"vkUninitializePerformanceApiINTEL", (void*)UninitializePerformanceApiINTEL},
+    {"vkCmdSetPerformanceMarkerINTEL", (void*)CmdSetPerformanceMarkerINTEL},
+    {"vkCmdSetPerformanceStreamMarkerINTEL", (void*)CmdSetPerformanceStreamMarkerINTEL},
+    {"vkCmdSetPerformanceOverrideINTEL", (void*)CmdSetPerformanceOverrideINTEL},
+    {"vkAcquirePerformanceConfigurationINTEL", (void*)AcquirePerformanceConfigurationINTEL},
+    {"vkReleasePerformanceConfigurationINTEL", (void*)ReleasePerformanceConfigurationINTEL},
+    {"vkQueueSetPerformanceConfigurationINTEL", (void*)QueueSetPerformanceConfigurationINTEL},
+    {"vkGetPerformanceParameterINTEL", (void*)GetPerformanceParameterINTEL},
+    {"vkSetLocalDimmingAMD", (void*)SetLocalDimmingAMD},
+#ifdef VK_USE_PLATFORM_FUCHSIA
+    {"vkCreateImagePipeSurfaceFUCHSIA", (void*)CreateImagePipeSurfaceFUCHSIA},
+#endif
+#ifdef VK_USE_PLATFORM_METAL_EXT
+    {"vkCreateMetalSurfaceEXT", (void*)CreateMetalSurfaceEXT},
+#endif
+    {"vkGetBufferDeviceAddressEXT", (void*)GetBufferDeviceAddressEXT},
+    {"vkGetPhysicalDeviceToolPropertiesEXT", (void*)GetPhysicalDeviceToolPropertiesEXT},
+    {"vkGetPhysicalDeviceCooperativeMatrixPropertiesNV", (void*)GetPhysicalDeviceCooperativeMatrixPropertiesNV},
+    {"vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV", (void*)GetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV},
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    {"vkGetPhysicalDeviceSurfacePresentModes2EXT", (void*)GetPhysicalDeviceSurfacePresentModes2EXT},
+#endif
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    {"vkAcquireFullScreenExclusiveModeEXT", (void*)AcquireFullScreenExclusiveModeEXT},
+#endif
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    {"vkReleaseFullScreenExclusiveModeEXT", (void*)ReleaseFullScreenExclusiveModeEXT},
+#endif
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    {"vkGetDeviceGroupSurfacePresentModes2EXT", (void*)GetDeviceGroupSurfacePresentModes2EXT},
+#endif
+    {"vkCreateHeadlessSurfaceEXT", (void*)CreateHeadlessSurfaceEXT},
+    {"vkCmdSetLineStippleEXT", (void*)CmdSetLineStippleEXT},
+    {"vkResetQueryPoolEXT", (void*)ResetQueryPoolEXT},
+};
+
+
+} // namespace vkmock
+
+#endif
diff --git a/src/third_party/vulkan-tools/src/icd/generated/vk_typemap_helper.h b/src/third_party/vulkan-tools/src/icd/generated/vk_typemap_helper.h
new file mode 100644
index 0000000..18fbedf
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/icd/generated/vk_typemap_helper.h
@@ -0,0 +1,3686 @@
+// *** THIS FILE IS GENERATED - DO NOT EDIT ***
+// See vulkan_tools_helper_file_generator.py for modifications
+
+
+/***************************************************************************
+ *
+ * Copyright (c) 2015-2017 The Khronos Group Inc.
+ * Copyright (c) 2015-2017 Valve Corporation
+ * Copyright (c) 2015-2017 LunarG, Inc.
+ * Copyright (c) 2015-2017 Google Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Mark Lobodzinski <mark@lunarg.com>
+ * Author: Courtney Goeltzenleuchter <courtneygo@google.com>
+ * Author: Tobin Ehlis <tobine@google.com>
+ * Author: Chris Forbes <chrisforbes@google.com>
+ * Author: John Zulauf<jzulauf@lunarg.com>
+ *
+ ****************************************************************************/
+
+#pragma once
+#include <vulkan/vulkan.h>
+
+// These empty generic templates are specialized for each type with sType
+// members and for each sType -- providing a two way map between structure
+// types and sTypes
+
+template <VkStructureType id> struct LvlSTypeMap {};
+template <typename T> struct LvlTypeMap {};
+
+// Map type VkApplicationInfo to id VK_STRUCTURE_TYPE_APPLICATION_INFO
+template <> struct LvlTypeMap<VkApplicationInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_APPLICATION_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_APPLICATION_INFO> {
+    typedef VkApplicationInfo Type;
+};
+
+// Map type VkInstanceCreateInfo to id VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO
+template <> struct LvlTypeMap<VkInstanceCreateInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO> {
+    typedef VkInstanceCreateInfo Type;
+};
+
+// Map type VkDeviceQueueCreateInfo to id VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO
+template <> struct LvlTypeMap<VkDeviceQueueCreateInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO> {
+    typedef VkDeviceQueueCreateInfo Type;
+};
+
+// Map type VkDeviceCreateInfo to id VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO
+template <> struct LvlTypeMap<VkDeviceCreateInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO> {
+    typedef VkDeviceCreateInfo Type;
+};
+
+// Map type VkSubmitInfo to id VK_STRUCTURE_TYPE_SUBMIT_INFO
+template <> struct LvlTypeMap<VkSubmitInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_SUBMIT_INFO> {
+    typedef VkSubmitInfo Type;
+};
+
+// Map type VkMemoryAllocateInfo to id VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO
+template <> struct LvlTypeMap<VkMemoryAllocateInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO> {
+    typedef VkMemoryAllocateInfo Type;
+};
+
+// Map type VkMappedMemoryRange to id VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE
+template <> struct LvlTypeMap<VkMappedMemoryRange> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE> {
+    typedef VkMappedMemoryRange Type;
+};
+
+// Map type VkBindSparseInfo to id VK_STRUCTURE_TYPE_BIND_SPARSE_INFO
+template <> struct LvlTypeMap<VkBindSparseInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_BIND_SPARSE_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_BIND_SPARSE_INFO> {
+    typedef VkBindSparseInfo Type;
+};
+
+// Map type VkFenceCreateInfo to id VK_STRUCTURE_TYPE_FENCE_CREATE_INFO
+template <> struct LvlTypeMap<VkFenceCreateInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_FENCE_CREATE_INFO> {
+    typedef VkFenceCreateInfo Type;
+};
+
+// Map type VkSemaphoreCreateInfo to id VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO
+template <> struct LvlTypeMap<VkSemaphoreCreateInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO> {
+    typedef VkSemaphoreCreateInfo Type;
+};
+
+// Map type VkEventCreateInfo to id VK_STRUCTURE_TYPE_EVENT_CREATE_INFO
+template <> struct LvlTypeMap<VkEventCreateInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_EVENT_CREATE_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_EVENT_CREATE_INFO> {
+    typedef VkEventCreateInfo Type;
+};
+
+// Map type VkQueryPoolCreateInfo to id VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO
+template <> struct LvlTypeMap<VkQueryPoolCreateInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO> {
+    typedef VkQueryPoolCreateInfo Type;
+};
+
+// Map type VkBufferCreateInfo to id VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO
+template <> struct LvlTypeMap<VkBufferCreateInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO> {
+    typedef VkBufferCreateInfo Type;
+};
+
+// Map type VkBufferViewCreateInfo to id VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO
+template <> struct LvlTypeMap<VkBufferViewCreateInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO> {
+    typedef VkBufferViewCreateInfo Type;
+};
+
+// Map type VkImageCreateInfo to id VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO
+template <> struct LvlTypeMap<VkImageCreateInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO> {
+    typedef VkImageCreateInfo Type;
+};
+
+// Map type VkImageViewCreateInfo to id VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO
+template <> struct LvlTypeMap<VkImageViewCreateInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO> {
+    typedef VkImageViewCreateInfo Type;
+};
+
+// Map type VkShaderModuleCreateInfo to id VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO
+template <> struct LvlTypeMap<VkShaderModuleCreateInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO> {
+    typedef VkShaderModuleCreateInfo Type;
+};
+
+// Map type VkPipelineCacheCreateInfo to id VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO
+template <> struct LvlTypeMap<VkPipelineCacheCreateInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO> {
+    typedef VkPipelineCacheCreateInfo Type;
+};
+
+// Map type VkPipelineShaderStageCreateInfo to id VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO
+template <> struct LvlTypeMap<VkPipelineShaderStageCreateInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO> {
+    typedef VkPipelineShaderStageCreateInfo Type;
+};
+
+// Map type VkPipelineVertexInputStateCreateInfo to id VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO
+template <> struct LvlTypeMap<VkPipelineVertexInputStateCreateInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO> {
+    typedef VkPipelineVertexInputStateCreateInfo Type;
+};
+
+// Map type VkPipelineInputAssemblyStateCreateInfo to id VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO
+template <> struct LvlTypeMap<VkPipelineInputAssemblyStateCreateInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO> {
+    typedef VkPipelineInputAssemblyStateCreateInfo Type;
+};
+
+// Map type VkPipelineTessellationStateCreateInfo to id VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO
+template <> struct LvlTypeMap<VkPipelineTessellationStateCreateInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO> {
+    typedef VkPipelineTessellationStateCreateInfo Type;
+};
+
+// Map type VkPipelineViewportStateCreateInfo to id VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO
+template <> struct LvlTypeMap<VkPipelineViewportStateCreateInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO> {
+    typedef VkPipelineViewportStateCreateInfo Type;
+};
+
+// Map type VkPipelineRasterizationStateCreateInfo to id VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO
+template <> struct LvlTypeMap<VkPipelineRasterizationStateCreateInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO> {
+    typedef VkPipelineRasterizationStateCreateInfo Type;
+};
+
+// Map type VkPipelineMultisampleStateCreateInfo to id VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO
+template <> struct LvlTypeMap<VkPipelineMultisampleStateCreateInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO> {
+    typedef VkPipelineMultisampleStateCreateInfo Type;
+};
+
+// Map type VkPipelineDepthStencilStateCreateInfo to id VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO
+template <> struct LvlTypeMap<VkPipelineDepthStencilStateCreateInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO> {
+    typedef VkPipelineDepthStencilStateCreateInfo Type;
+};
+
+// Map type VkPipelineColorBlendStateCreateInfo to id VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO
+template <> struct LvlTypeMap<VkPipelineColorBlendStateCreateInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO> {
+    typedef VkPipelineColorBlendStateCreateInfo Type;
+};
+
+// Map type VkPipelineDynamicStateCreateInfo to id VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO
+template <> struct LvlTypeMap<VkPipelineDynamicStateCreateInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO> {
+    typedef VkPipelineDynamicStateCreateInfo Type;
+};
+
+// Map type VkGraphicsPipelineCreateInfo to id VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO
+template <> struct LvlTypeMap<VkGraphicsPipelineCreateInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO> {
+    typedef VkGraphicsPipelineCreateInfo Type;
+};
+
+// Map type VkComputePipelineCreateInfo to id VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO
+template <> struct LvlTypeMap<VkComputePipelineCreateInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO> {
+    typedef VkComputePipelineCreateInfo Type;
+};
+
+// Map type VkPipelineLayoutCreateInfo to id VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO
+template <> struct LvlTypeMap<VkPipelineLayoutCreateInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO> {
+    typedef VkPipelineLayoutCreateInfo Type;
+};
+
+// Map type VkSamplerCreateInfo to id VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO
+template <> struct LvlTypeMap<VkSamplerCreateInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO> {
+    typedef VkSamplerCreateInfo Type;
+};
+
+// Map type VkDescriptorSetLayoutCreateInfo to id VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO
+template <> struct LvlTypeMap<VkDescriptorSetLayoutCreateInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO> {
+    typedef VkDescriptorSetLayoutCreateInfo Type;
+};
+
+// Map type VkDescriptorPoolCreateInfo to id VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO
+template <> struct LvlTypeMap<VkDescriptorPoolCreateInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO> {
+    typedef VkDescriptorPoolCreateInfo Type;
+};
+
+// Map type VkDescriptorSetAllocateInfo to id VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO
+template <> struct LvlTypeMap<VkDescriptorSetAllocateInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO> {
+    typedef VkDescriptorSetAllocateInfo Type;
+};
+
+// Map type VkWriteDescriptorSet to id VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET
+template <> struct LvlTypeMap<VkWriteDescriptorSet> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET> {
+    typedef VkWriteDescriptorSet Type;
+};
+
+// Map type VkCopyDescriptorSet to id VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET
+template <> struct LvlTypeMap<VkCopyDescriptorSet> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET> {
+    typedef VkCopyDescriptorSet Type;
+};
+
+// Map type VkFramebufferCreateInfo to id VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO
+template <> struct LvlTypeMap<VkFramebufferCreateInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO> {
+    typedef VkFramebufferCreateInfo Type;
+};
+
+// Map type VkRenderPassCreateInfo to id VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO
+template <> struct LvlTypeMap<VkRenderPassCreateInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO> {
+    typedef VkRenderPassCreateInfo Type;
+};
+
+// Map type VkCommandPoolCreateInfo to id VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO
+template <> struct LvlTypeMap<VkCommandPoolCreateInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO> {
+    typedef VkCommandPoolCreateInfo Type;
+};
+
+// Map type VkCommandBufferAllocateInfo to id VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO
+template <> struct LvlTypeMap<VkCommandBufferAllocateInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO> {
+    typedef VkCommandBufferAllocateInfo Type;
+};
+
+// Map type VkCommandBufferInheritanceInfo to id VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO
+template <> struct LvlTypeMap<VkCommandBufferInheritanceInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO> {
+    typedef VkCommandBufferInheritanceInfo Type;
+};
+
+// Map type VkCommandBufferBeginInfo to id VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO
+template <> struct LvlTypeMap<VkCommandBufferBeginInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO> {
+    typedef VkCommandBufferBeginInfo Type;
+};
+
+// Map type VkMemoryBarrier to id VK_STRUCTURE_TYPE_MEMORY_BARRIER
+template <> struct LvlTypeMap<VkMemoryBarrier> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_MEMORY_BARRIER;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_MEMORY_BARRIER> {
+    typedef VkMemoryBarrier Type;
+};
+
+// Map type VkBufferMemoryBarrier to id VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER
+template <> struct LvlTypeMap<VkBufferMemoryBarrier> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER> {
+    typedef VkBufferMemoryBarrier Type;
+};
+
+// Map type VkImageMemoryBarrier to id VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER
+template <> struct LvlTypeMap<VkImageMemoryBarrier> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER> {
+    typedef VkImageMemoryBarrier Type;
+};
+
+// Map type VkRenderPassBeginInfo to id VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO
+template <> struct LvlTypeMap<VkRenderPassBeginInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO> {
+    typedef VkRenderPassBeginInfo Type;
+};
+
+// Map type VkPhysicalDeviceSubgroupProperties to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_PROPERTIES
+template <> struct LvlTypeMap<VkPhysicalDeviceSubgroupProperties> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_PROPERTIES;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_PROPERTIES> {
+    typedef VkPhysicalDeviceSubgroupProperties Type;
+};
+
+// Map type VkBindBufferMemoryInfo to id VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO
+template <> struct LvlTypeMap<VkBindBufferMemoryInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO> {
+    typedef VkBindBufferMemoryInfo Type;
+};
+
+// Map type VkBindImageMemoryInfo to id VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO
+template <> struct LvlTypeMap<VkBindImageMemoryInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO> {
+    typedef VkBindImageMemoryInfo Type;
+};
+
+// Map type VkPhysicalDevice16BitStorageFeatures to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES
+template <> struct LvlTypeMap<VkPhysicalDevice16BitStorageFeatures> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES> {
+    typedef VkPhysicalDevice16BitStorageFeatures Type;
+};
+
+// Map type VkMemoryDedicatedRequirements to id VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS
+template <> struct LvlTypeMap<VkMemoryDedicatedRequirements> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS> {
+    typedef VkMemoryDedicatedRequirements Type;
+};
+
+// Map type VkMemoryDedicatedAllocateInfo to id VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO
+template <> struct LvlTypeMap<VkMemoryDedicatedAllocateInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO> {
+    typedef VkMemoryDedicatedAllocateInfo Type;
+};
+
+// Map type VkMemoryAllocateFlagsInfo to id VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO
+template <> struct LvlTypeMap<VkMemoryAllocateFlagsInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO> {
+    typedef VkMemoryAllocateFlagsInfo Type;
+};
+
+// Map type VkDeviceGroupRenderPassBeginInfo to id VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO
+template <> struct LvlTypeMap<VkDeviceGroupRenderPassBeginInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO> {
+    typedef VkDeviceGroupRenderPassBeginInfo Type;
+};
+
+// Map type VkDeviceGroupCommandBufferBeginInfo to id VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO
+template <> struct LvlTypeMap<VkDeviceGroupCommandBufferBeginInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO> {
+    typedef VkDeviceGroupCommandBufferBeginInfo Type;
+};
+
+// Map type VkDeviceGroupSubmitInfo to id VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO
+template <> struct LvlTypeMap<VkDeviceGroupSubmitInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO> {
+    typedef VkDeviceGroupSubmitInfo Type;
+};
+
+// Map type VkDeviceGroupBindSparseInfo to id VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO
+template <> struct LvlTypeMap<VkDeviceGroupBindSparseInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO> {
+    typedef VkDeviceGroupBindSparseInfo Type;
+};
+
+// Map type VkBindBufferMemoryDeviceGroupInfo to id VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO
+template <> struct LvlTypeMap<VkBindBufferMemoryDeviceGroupInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO> {
+    typedef VkBindBufferMemoryDeviceGroupInfo Type;
+};
+
+// Map type VkBindImageMemoryDeviceGroupInfo to id VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO
+template <> struct LvlTypeMap<VkBindImageMemoryDeviceGroupInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO> {
+    typedef VkBindImageMemoryDeviceGroupInfo Type;
+};
+
+// Map type VkPhysicalDeviceGroupProperties to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES
+template <> struct LvlTypeMap<VkPhysicalDeviceGroupProperties> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES> {
+    typedef VkPhysicalDeviceGroupProperties Type;
+};
+
+// Map type VkDeviceGroupDeviceCreateInfo to id VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO
+template <> struct LvlTypeMap<VkDeviceGroupDeviceCreateInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO> {
+    typedef VkDeviceGroupDeviceCreateInfo Type;
+};
+
+// Map type VkBufferMemoryRequirementsInfo2 to id VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2
+template <> struct LvlTypeMap<VkBufferMemoryRequirementsInfo2> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2> {
+    typedef VkBufferMemoryRequirementsInfo2 Type;
+};
+
+// Map type VkImageMemoryRequirementsInfo2 to id VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2
+template <> struct LvlTypeMap<VkImageMemoryRequirementsInfo2> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2> {
+    typedef VkImageMemoryRequirementsInfo2 Type;
+};
+
+// Map type VkImageSparseMemoryRequirementsInfo2 to id VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2
+template <> struct LvlTypeMap<VkImageSparseMemoryRequirementsInfo2> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2> {
+    typedef VkImageSparseMemoryRequirementsInfo2 Type;
+};
+
+// Map type VkMemoryRequirements2 to id VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2
+template <> struct LvlTypeMap<VkMemoryRequirements2> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2> {
+    typedef VkMemoryRequirements2 Type;
+};
+
+// Map type VkSparseImageMemoryRequirements2 to id VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2
+template <> struct LvlTypeMap<VkSparseImageMemoryRequirements2> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2> {
+    typedef VkSparseImageMemoryRequirements2 Type;
+};
+
+// Map type VkPhysicalDeviceFeatures2 to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2
+template <> struct LvlTypeMap<VkPhysicalDeviceFeatures2> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2> {
+    typedef VkPhysicalDeviceFeatures2 Type;
+};
+
+// Map type VkPhysicalDeviceProperties2 to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2
+template <> struct LvlTypeMap<VkPhysicalDeviceProperties2> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2> {
+    typedef VkPhysicalDeviceProperties2 Type;
+};
+
+// Map type VkFormatProperties2 to id VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2
+template <> struct LvlTypeMap<VkFormatProperties2> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2> {
+    typedef VkFormatProperties2 Type;
+};
+
+// Map type VkImageFormatProperties2 to id VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2
+template <> struct LvlTypeMap<VkImageFormatProperties2> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2> {
+    typedef VkImageFormatProperties2 Type;
+};
+
+// Map type VkPhysicalDeviceImageFormatInfo2 to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2
+template <> struct LvlTypeMap<VkPhysicalDeviceImageFormatInfo2> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2> {
+    typedef VkPhysicalDeviceImageFormatInfo2 Type;
+};
+
+// Map type VkQueueFamilyProperties2 to id VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2
+template <> struct LvlTypeMap<VkQueueFamilyProperties2> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2> {
+    typedef VkQueueFamilyProperties2 Type;
+};
+
+// Map type VkPhysicalDeviceMemoryProperties2 to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2
+template <> struct LvlTypeMap<VkPhysicalDeviceMemoryProperties2> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2> {
+    typedef VkPhysicalDeviceMemoryProperties2 Type;
+};
+
+// Map type VkSparseImageFormatProperties2 to id VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2
+template <> struct LvlTypeMap<VkSparseImageFormatProperties2> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2> {
+    typedef VkSparseImageFormatProperties2 Type;
+};
+
+// Map type VkPhysicalDeviceSparseImageFormatInfo2 to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2
+template <> struct LvlTypeMap<VkPhysicalDeviceSparseImageFormatInfo2> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2> {
+    typedef VkPhysicalDeviceSparseImageFormatInfo2 Type;
+};
+
+// Map type VkPhysicalDevicePointClippingProperties to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES
+template <> struct LvlTypeMap<VkPhysicalDevicePointClippingProperties> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES> {
+    typedef VkPhysicalDevicePointClippingProperties Type;
+};
+
+// Map type VkRenderPassInputAttachmentAspectCreateInfo to id VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO
+template <> struct LvlTypeMap<VkRenderPassInputAttachmentAspectCreateInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO> {
+    typedef VkRenderPassInputAttachmentAspectCreateInfo Type;
+};
+
+// Map type VkImageViewUsageCreateInfo to id VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO
+template <> struct LvlTypeMap<VkImageViewUsageCreateInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO> {
+    typedef VkImageViewUsageCreateInfo Type;
+};
+
+// Map type VkPipelineTessellationDomainOriginStateCreateInfo to id VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO
+template <> struct LvlTypeMap<VkPipelineTessellationDomainOriginStateCreateInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO> {
+    typedef VkPipelineTessellationDomainOriginStateCreateInfo Type;
+};
+
+// Map type VkRenderPassMultiviewCreateInfo to id VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO
+template <> struct LvlTypeMap<VkRenderPassMultiviewCreateInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO> {
+    typedef VkRenderPassMultiviewCreateInfo Type;
+};
+
+// Map type VkPhysicalDeviceMultiviewFeatures to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES
+template <> struct LvlTypeMap<VkPhysicalDeviceMultiviewFeatures> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES> {
+    typedef VkPhysicalDeviceMultiviewFeatures Type;
+};
+
+// Map type VkPhysicalDeviceMultiviewProperties to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES
+template <> struct LvlTypeMap<VkPhysicalDeviceMultiviewProperties> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES> {
+    typedef VkPhysicalDeviceMultiviewProperties Type;
+};
+
+// Map type VkPhysicalDeviceVariablePointersFeatures to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES
+template <> struct LvlTypeMap<VkPhysicalDeviceVariablePointersFeatures> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES> {
+    typedef VkPhysicalDeviceVariablePointersFeatures Type;
+};
+
+// Map type VkPhysicalDeviceProtectedMemoryFeatures to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_FEATURES
+template <> struct LvlTypeMap<VkPhysicalDeviceProtectedMemoryFeatures> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_FEATURES;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_FEATURES> {
+    typedef VkPhysicalDeviceProtectedMemoryFeatures Type;
+};
+
+// Map type VkPhysicalDeviceProtectedMemoryProperties to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_PROPERTIES
+template <> struct LvlTypeMap<VkPhysicalDeviceProtectedMemoryProperties> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_PROPERTIES;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_PROPERTIES> {
+    typedef VkPhysicalDeviceProtectedMemoryProperties Type;
+};
+
+// Map type VkDeviceQueueInfo2 to id VK_STRUCTURE_TYPE_DEVICE_QUEUE_INFO_2
+template <> struct LvlTypeMap<VkDeviceQueueInfo2> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_INFO_2;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DEVICE_QUEUE_INFO_2> {
+    typedef VkDeviceQueueInfo2 Type;
+};
+
+// Map type VkProtectedSubmitInfo to id VK_STRUCTURE_TYPE_PROTECTED_SUBMIT_INFO
+template <> struct LvlTypeMap<VkProtectedSubmitInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PROTECTED_SUBMIT_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PROTECTED_SUBMIT_INFO> {
+    typedef VkProtectedSubmitInfo Type;
+};
+
+// Map type VkSamplerYcbcrConversionCreateInfo to id VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO
+template <> struct LvlTypeMap<VkSamplerYcbcrConversionCreateInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO> {
+    typedef VkSamplerYcbcrConversionCreateInfo Type;
+};
+
+// Map type VkSamplerYcbcrConversionInfo to id VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO
+template <> struct LvlTypeMap<VkSamplerYcbcrConversionInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO> {
+    typedef VkSamplerYcbcrConversionInfo Type;
+};
+
+// Map type VkBindImagePlaneMemoryInfo to id VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO
+template <> struct LvlTypeMap<VkBindImagePlaneMemoryInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO> {
+    typedef VkBindImagePlaneMemoryInfo Type;
+};
+
+// Map type VkImagePlaneMemoryRequirementsInfo to id VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO
+template <> struct LvlTypeMap<VkImagePlaneMemoryRequirementsInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO> {
+    typedef VkImagePlaneMemoryRequirementsInfo Type;
+};
+
+// Map type VkPhysicalDeviceSamplerYcbcrConversionFeatures to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES
+template <> struct LvlTypeMap<VkPhysicalDeviceSamplerYcbcrConversionFeatures> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES> {
+    typedef VkPhysicalDeviceSamplerYcbcrConversionFeatures Type;
+};
+
+// Map type VkSamplerYcbcrConversionImageFormatProperties to id VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES
+template <> struct LvlTypeMap<VkSamplerYcbcrConversionImageFormatProperties> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES> {
+    typedef VkSamplerYcbcrConversionImageFormatProperties Type;
+};
+
+// Map type VkDescriptorUpdateTemplateCreateInfo to id VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO
+template <> struct LvlTypeMap<VkDescriptorUpdateTemplateCreateInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO> {
+    typedef VkDescriptorUpdateTemplateCreateInfo Type;
+};
+
+// Map type VkPhysicalDeviceExternalImageFormatInfo to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO
+template <> struct LvlTypeMap<VkPhysicalDeviceExternalImageFormatInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO> {
+    typedef VkPhysicalDeviceExternalImageFormatInfo Type;
+};
+
+// Map type VkExternalImageFormatProperties to id VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES
+template <> struct LvlTypeMap<VkExternalImageFormatProperties> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES> {
+    typedef VkExternalImageFormatProperties Type;
+};
+
+// Map type VkPhysicalDeviceExternalBufferInfo to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO
+template <> struct LvlTypeMap<VkPhysicalDeviceExternalBufferInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO> {
+    typedef VkPhysicalDeviceExternalBufferInfo Type;
+};
+
+// Map type VkExternalBufferProperties to id VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES
+template <> struct LvlTypeMap<VkExternalBufferProperties> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES> {
+    typedef VkExternalBufferProperties Type;
+};
+
+// Map type VkPhysicalDeviceIDProperties to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES
+template <> struct LvlTypeMap<VkPhysicalDeviceIDProperties> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES> {
+    typedef VkPhysicalDeviceIDProperties Type;
+};
+
+// Map type VkExternalMemoryImageCreateInfo to id VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO
+template <> struct LvlTypeMap<VkExternalMemoryImageCreateInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO> {
+    typedef VkExternalMemoryImageCreateInfo Type;
+};
+
+// Map type VkExternalMemoryBufferCreateInfo to id VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO
+template <> struct LvlTypeMap<VkExternalMemoryBufferCreateInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO> {
+    typedef VkExternalMemoryBufferCreateInfo Type;
+};
+
+// Map type VkExportMemoryAllocateInfo to id VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO
+template <> struct LvlTypeMap<VkExportMemoryAllocateInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO> {
+    typedef VkExportMemoryAllocateInfo Type;
+};
+
+// Map type VkPhysicalDeviceExternalFenceInfo to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO
+template <> struct LvlTypeMap<VkPhysicalDeviceExternalFenceInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO> {
+    typedef VkPhysicalDeviceExternalFenceInfo Type;
+};
+
+// Map type VkExternalFenceProperties to id VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES
+template <> struct LvlTypeMap<VkExternalFenceProperties> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES> {
+    typedef VkExternalFenceProperties Type;
+};
+
+// Map type VkExportFenceCreateInfo to id VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO
+template <> struct LvlTypeMap<VkExportFenceCreateInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO> {
+    typedef VkExportFenceCreateInfo Type;
+};
+
+// Map type VkExportSemaphoreCreateInfo to id VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO
+template <> struct LvlTypeMap<VkExportSemaphoreCreateInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO> {
+    typedef VkExportSemaphoreCreateInfo Type;
+};
+
+// Map type VkPhysicalDeviceExternalSemaphoreInfo to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO
+template <> struct LvlTypeMap<VkPhysicalDeviceExternalSemaphoreInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO> {
+    typedef VkPhysicalDeviceExternalSemaphoreInfo Type;
+};
+
+// Map type VkExternalSemaphoreProperties to id VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES
+template <> struct LvlTypeMap<VkExternalSemaphoreProperties> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES> {
+    typedef VkExternalSemaphoreProperties Type;
+};
+
+// Map type VkPhysicalDeviceMaintenance3Properties to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES
+template <> struct LvlTypeMap<VkPhysicalDeviceMaintenance3Properties> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES> {
+    typedef VkPhysicalDeviceMaintenance3Properties Type;
+};
+
+// Map type VkDescriptorSetLayoutSupport to id VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT
+template <> struct LvlTypeMap<VkDescriptorSetLayoutSupport> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT> {
+    typedef VkDescriptorSetLayoutSupport Type;
+};
+
+// Map type VkPhysicalDeviceShaderDrawParametersFeatures to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES
+template <> struct LvlTypeMap<VkPhysicalDeviceShaderDrawParametersFeatures> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES> {
+    typedef VkPhysicalDeviceShaderDrawParametersFeatures Type;
+};
+
+// Map type VkSwapchainCreateInfoKHR to id VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR
+template <> struct LvlTypeMap<VkSwapchainCreateInfoKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR> {
+    typedef VkSwapchainCreateInfoKHR Type;
+};
+
+// Map type VkPresentInfoKHR to id VK_STRUCTURE_TYPE_PRESENT_INFO_KHR
+template <> struct LvlTypeMap<VkPresentInfoKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PRESENT_INFO_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PRESENT_INFO_KHR> {
+    typedef VkPresentInfoKHR Type;
+};
+
+// Map type VkImageSwapchainCreateInfoKHR to id VK_STRUCTURE_TYPE_IMAGE_SWAPCHAIN_CREATE_INFO_KHR
+template <> struct LvlTypeMap<VkImageSwapchainCreateInfoKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_IMAGE_SWAPCHAIN_CREATE_INFO_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_IMAGE_SWAPCHAIN_CREATE_INFO_KHR> {
+    typedef VkImageSwapchainCreateInfoKHR Type;
+};
+
+// Map type VkBindImageMemorySwapchainInfoKHR to id VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_SWAPCHAIN_INFO_KHR
+template <> struct LvlTypeMap<VkBindImageMemorySwapchainInfoKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_SWAPCHAIN_INFO_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_SWAPCHAIN_INFO_KHR> {
+    typedef VkBindImageMemorySwapchainInfoKHR Type;
+};
+
+// Map type VkAcquireNextImageInfoKHR to id VK_STRUCTURE_TYPE_ACQUIRE_NEXT_IMAGE_INFO_KHR
+template <> struct LvlTypeMap<VkAcquireNextImageInfoKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_ACQUIRE_NEXT_IMAGE_INFO_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_ACQUIRE_NEXT_IMAGE_INFO_KHR> {
+    typedef VkAcquireNextImageInfoKHR Type;
+};
+
+// Map type VkDeviceGroupPresentCapabilitiesKHR to id VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_CAPABILITIES_KHR
+template <> struct LvlTypeMap<VkDeviceGroupPresentCapabilitiesKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_CAPABILITIES_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_CAPABILITIES_KHR> {
+    typedef VkDeviceGroupPresentCapabilitiesKHR Type;
+};
+
+// Map type VkDeviceGroupPresentInfoKHR to id VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_INFO_KHR
+template <> struct LvlTypeMap<VkDeviceGroupPresentInfoKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_INFO_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_INFO_KHR> {
+    typedef VkDeviceGroupPresentInfoKHR Type;
+};
+
+// Map type VkDeviceGroupSwapchainCreateInfoKHR to id VK_STRUCTURE_TYPE_DEVICE_GROUP_SWAPCHAIN_CREATE_INFO_KHR
+template <> struct LvlTypeMap<VkDeviceGroupSwapchainCreateInfoKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_DEVICE_GROUP_SWAPCHAIN_CREATE_INFO_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DEVICE_GROUP_SWAPCHAIN_CREATE_INFO_KHR> {
+    typedef VkDeviceGroupSwapchainCreateInfoKHR Type;
+};
+
+// Map type VkDisplayModeCreateInfoKHR to id VK_STRUCTURE_TYPE_DISPLAY_MODE_CREATE_INFO_KHR
+template <> struct LvlTypeMap<VkDisplayModeCreateInfoKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_DISPLAY_MODE_CREATE_INFO_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DISPLAY_MODE_CREATE_INFO_KHR> {
+    typedef VkDisplayModeCreateInfoKHR Type;
+};
+
+// Map type VkDisplaySurfaceCreateInfoKHR to id VK_STRUCTURE_TYPE_DISPLAY_SURFACE_CREATE_INFO_KHR
+template <> struct LvlTypeMap<VkDisplaySurfaceCreateInfoKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_DISPLAY_SURFACE_CREATE_INFO_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DISPLAY_SURFACE_CREATE_INFO_KHR> {
+    typedef VkDisplaySurfaceCreateInfoKHR Type;
+};
+
+// Map type VkDisplayPresentInfoKHR to id VK_STRUCTURE_TYPE_DISPLAY_PRESENT_INFO_KHR
+template <> struct LvlTypeMap<VkDisplayPresentInfoKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_DISPLAY_PRESENT_INFO_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DISPLAY_PRESENT_INFO_KHR> {
+    typedef VkDisplayPresentInfoKHR Type;
+};
+
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+// Map type VkXlibSurfaceCreateInfoKHR to id VK_STRUCTURE_TYPE_XLIB_SURFACE_CREATE_INFO_KHR
+template <> struct LvlTypeMap<VkXlibSurfaceCreateInfoKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_XLIB_SURFACE_CREATE_INFO_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_XLIB_SURFACE_CREATE_INFO_KHR> {
+    typedef VkXlibSurfaceCreateInfoKHR Type;
+};
+
+#endif // VK_USE_PLATFORM_XLIB_KHR
+#ifdef VK_USE_PLATFORM_XCB_KHR
+// Map type VkXcbSurfaceCreateInfoKHR to id VK_STRUCTURE_TYPE_XCB_SURFACE_CREATE_INFO_KHR
+template <> struct LvlTypeMap<VkXcbSurfaceCreateInfoKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_XCB_SURFACE_CREATE_INFO_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_XCB_SURFACE_CREATE_INFO_KHR> {
+    typedef VkXcbSurfaceCreateInfoKHR Type;
+};
+
+#endif // VK_USE_PLATFORM_XCB_KHR
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+// Map type VkWaylandSurfaceCreateInfoKHR to id VK_STRUCTURE_TYPE_WAYLAND_SURFACE_CREATE_INFO_KHR
+template <> struct LvlTypeMap<VkWaylandSurfaceCreateInfoKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_WAYLAND_SURFACE_CREATE_INFO_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_WAYLAND_SURFACE_CREATE_INFO_KHR> {
+    typedef VkWaylandSurfaceCreateInfoKHR Type;
+};
+
+#endif // VK_USE_PLATFORM_WAYLAND_KHR
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+// Map type VkAndroidSurfaceCreateInfoKHR to id VK_STRUCTURE_TYPE_ANDROID_SURFACE_CREATE_INFO_KHR
+template <> struct LvlTypeMap<VkAndroidSurfaceCreateInfoKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_ANDROID_SURFACE_CREATE_INFO_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_ANDROID_SURFACE_CREATE_INFO_KHR> {
+    typedef VkAndroidSurfaceCreateInfoKHR Type;
+};
+
+#endif // VK_USE_PLATFORM_ANDROID_KHR
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+// Map type VkWin32SurfaceCreateInfoKHR to id VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR
+template <> struct LvlTypeMap<VkWin32SurfaceCreateInfoKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR> {
+    typedef VkWin32SurfaceCreateInfoKHR Type;
+};
+
+#endif // VK_USE_PLATFORM_WIN32_KHR
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+// Map type VkImportMemoryWin32HandleInfoKHR to id VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_KHR
+template <> struct LvlTypeMap<VkImportMemoryWin32HandleInfoKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_KHR> {
+    typedef VkImportMemoryWin32HandleInfoKHR Type;
+};
+
+#endif // VK_USE_PLATFORM_WIN32_KHR
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+// Map type VkExportMemoryWin32HandleInfoKHR to id VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_KHR
+template <> struct LvlTypeMap<VkExportMemoryWin32HandleInfoKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_KHR> {
+    typedef VkExportMemoryWin32HandleInfoKHR Type;
+};
+
+#endif // VK_USE_PLATFORM_WIN32_KHR
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+// Map type VkMemoryWin32HandlePropertiesKHR to id VK_STRUCTURE_TYPE_MEMORY_WIN32_HANDLE_PROPERTIES_KHR
+template <> struct LvlTypeMap<VkMemoryWin32HandlePropertiesKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_MEMORY_WIN32_HANDLE_PROPERTIES_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_MEMORY_WIN32_HANDLE_PROPERTIES_KHR> {
+    typedef VkMemoryWin32HandlePropertiesKHR Type;
+};
+
+#endif // VK_USE_PLATFORM_WIN32_KHR
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+// Map type VkMemoryGetWin32HandleInfoKHR to id VK_STRUCTURE_TYPE_MEMORY_GET_WIN32_HANDLE_INFO_KHR
+template <> struct LvlTypeMap<VkMemoryGetWin32HandleInfoKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_MEMORY_GET_WIN32_HANDLE_INFO_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_MEMORY_GET_WIN32_HANDLE_INFO_KHR> {
+    typedef VkMemoryGetWin32HandleInfoKHR Type;
+};
+
+#endif // VK_USE_PLATFORM_WIN32_KHR
+// Map type VkImportMemoryFdInfoKHR to id VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR
+template <> struct LvlTypeMap<VkImportMemoryFdInfoKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR> {
+    typedef VkImportMemoryFdInfoKHR Type;
+};
+
+// Map type VkMemoryFdPropertiesKHR to id VK_STRUCTURE_TYPE_MEMORY_FD_PROPERTIES_KHR
+template <> struct LvlTypeMap<VkMemoryFdPropertiesKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_MEMORY_FD_PROPERTIES_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_MEMORY_FD_PROPERTIES_KHR> {
+    typedef VkMemoryFdPropertiesKHR Type;
+};
+
+// Map type VkMemoryGetFdInfoKHR to id VK_STRUCTURE_TYPE_MEMORY_GET_FD_INFO_KHR
+template <> struct LvlTypeMap<VkMemoryGetFdInfoKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_MEMORY_GET_FD_INFO_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_MEMORY_GET_FD_INFO_KHR> {
+    typedef VkMemoryGetFdInfoKHR Type;
+};
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+// Map type VkWin32KeyedMutexAcquireReleaseInfoKHR to id VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_KHR
+template <> struct LvlTypeMap<VkWin32KeyedMutexAcquireReleaseInfoKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_KHR> {
+    typedef VkWin32KeyedMutexAcquireReleaseInfoKHR Type;
+};
+
+#endif // VK_USE_PLATFORM_WIN32_KHR
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+// Map type VkImportSemaphoreWin32HandleInfoKHR to id VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR
+template <> struct LvlTypeMap<VkImportSemaphoreWin32HandleInfoKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR> {
+    typedef VkImportSemaphoreWin32HandleInfoKHR Type;
+};
+
+#endif // VK_USE_PLATFORM_WIN32_KHR
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+// Map type VkExportSemaphoreWin32HandleInfoKHR to id VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR
+template <> struct LvlTypeMap<VkExportSemaphoreWin32HandleInfoKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR> {
+    typedef VkExportSemaphoreWin32HandleInfoKHR Type;
+};
+
+#endif // VK_USE_PLATFORM_WIN32_KHR
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+// Map type VkD3D12FenceSubmitInfoKHR to id VK_STRUCTURE_TYPE_D3D12_FENCE_SUBMIT_INFO_KHR
+template <> struct LvlTypeMap<VkD3D12FenceSubmitInfoKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_D3D12_FENCE_SUBMIT_INFO_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_D3D12_FENCE_SUBMIT_INFO_KHR> {
+    typedef VkD3D12FenceSubmitInfoKHR Type;
+};
+
+#endif // VK_USE_PLATFORM_WIN32_KHR
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+// Map type VkSemaphoreGetWin32HandleInfoKHR to id VK_STRUCTURE_TYPE_SEMAPHORE_GET_WIN32_HANDLE_INFO_KHR
+template <> struct LvlTypeMap<VkSemaphoreGetWin32HandleInfoKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_SEMAPHORE_GET_WIN32_HANDLE_INFO_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_SEMAPHORE_GET_WIN32_HANDLE_INFO_KHR> {
+    typedef VkSemaphoreGetWin32HandleInfoKHR Type;
+};
+
+#endif // VK_USE_PLATFORM_WIN32_KHR
+// Map type VkImportSemaphoreFdInfoKHR to id VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_FD_INFO_KHR
+template <> struct LvlTypeMap<VkImportSemaphoreFdInfoKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_FD_INFO_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_FD_INFO_KHR> {
+    typedef VkImportSemaphoreFdInfoKHR Type;
+};
+
+// Map type VkSemaphoreGetFdInfoKHR to id VK_STRUCTURE_TYPE_SEMAPHORE_GET_FD_INFO_KHR
+template <> struct LvlTypeMap<VkSemaphoreGetFdInfoKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_SEMAPHORE_GET_FD_INFO_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_SEMAPHORE_GET_FD_INFO_KHR> {
+    typedef VkSemaphoreGetFdInfoKHR Type;
+};
+
+// Map type VkPhysicalDevicePushDescriptorPropertiesKHR to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR
+template <> struct LvlTypeMap<VkPhysicalDevicePushDescriptorPropertiesKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR> {
+    typedef VkPhysicalDevicePushDescriptorPropertiesKHR Type;
+};
+
+// Map type VkPhysicalDeviceShaderFloat16Int8FeaturesKHR to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES_KHR
+template <> struct LvlTypeMap<VkPhysicalDeviceShaderFloat16Int8FeaturesKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES_KHR> {
+    typedef VkPhysicalDeviceShaderFloat16Int8FeaturesKHR Type;
+};
+
+// Map type VkPresentRegionsKHR to id VK_STRUCTURE_TYPE_PRESENT_REGIONS_KHR
+template <> struct LvlTypeMap<VkPresentRegionsKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PRESENT_REGIONS_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PRESENT_REGIONS_KHR> {
+    typedef VkPresentRegionsKHR Type;
+};
+
+// Map type VkPhysicalDeviceImagelessFramebufferFeaturesKHR to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES_KHR
+template <> struct LvlTypeMap<VkPhysicalDeviceImagelessFramebufferFeaturesKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES_KHR> {
+    typedef VkPhysicalDeviceImagelessFramebufferFeaturesKHR Type;
+};
+
+// Map type VkFramebufferAttachmentImageInfoKHR to id VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO_KHR
+template <> struct LvlTypeMap<VkFramebufferAttachmentImageInfoKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO_KHR> {
+    typedef VkFramebufferAttachmentImageInfoKHR Type;
+};
+
+// Map type VkFramebufferAttachmentsCreateInfoKHR to id VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO_KHR
+template <> struct LvlTypeMap<VkFramebufferAttachmentsCreateInfoKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO_KHR> {
+    typedef VkFramebufferAttachmentsCreateInfoKHR Type;
+};
+
+// Map type VkRenderPassAttachmentBeginInfoKHR to id VK_STRUCTURE_TYPE_RENDER_PASS_ATTACHMENT_BEGIN_INFO_KHR
+template <> struct LvlTypeMap<VkRenderPassAttachmentBeginInfoKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_RENDER_PASS_ATTACHMENT_BEGIN_INFO_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_RENDER_PASS_ATTACHMENT_BEGIN_INFO_KHR> {
+    typedef VkRenderPassAttachmentBeginInfoKHR Type;
+};
+
+// Map type VkAttachmentDescription2KHR to id VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_2_KHR
+template <> struct LvlTypeMap<VkAttachmentDescription2KHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_2_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_2_KHR> {
+    typedef VkAttachmentDescription2KHR Type;
+};
+
+// Map type VkAttachmentReference2KHR to id VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2_KHR
+template <> struct LvlTypeMap<VkAttachmentReference2KHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2_KHR> {
+    typedef VkAttachmentReference2KHR Type;
+};
+
+// Map type VkSubpassDescription2KHR to id VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_2_KHR
+template <> struct LvlTypeMap<VkSubpassDescription2KHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_2_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_2_KHR> {
+    typedef VkSubpassDescription2KHR Type;
+};
+
+// Map type VkSubpassDependency2KHR to id VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2_KHR
+template <> struct LvlTypeMap<VkSubpassDependency2KHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2_KHR> {
+    typedef VkSubpassDependency2KHR Type;
+};
+
+// Map type VkRenderPassCreateInfo2KHR to id VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO_2_KHR
+template <> struct LvlTypeMap<VkRenderPassCreateInfo2KHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO_2_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO_2_KHR> {
+    typedef VkRenderPassCreateInfo2KHR Type;
+};
+
+// Map type VkSubpassBeginInfoKHR to id VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO_KHR
+template <> struct LvlTypeMap<VkSubpassBeginInfoKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO_KHR> {
+    typedef VkSubpassBeginInfoKHR Type;
+};
+
+// Map type VkSubpassEndInfoKHR to id VK_STRUCTURE_TYPE_SUBPASS_END_INFO_KHR
+template <> struct LvlTypeMap<VkSubpassEndInfoKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_SUBPASS_END_INFO_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_SUBPASS_END_INFO_KHR> {
+    typedef VkSubpassEndInfoKHR Type;
+};
+
+// Map type VkSharedPresentSurfaceCapabilitiesKHR to id VK_STRUCTURE_TYPE_SHARED_PRESENT_SURFACE_CAPABILITIES_KHR
+template <> struct LvlTypeMap<VkSharedPresentSurfaceCapabilitiesKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_SHARED_PRESENT_SURFACE_CAPABILITIES_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_SHARED_PRESENT_SURFACE_CAPABILITIES_KHR> {
+    typedef VkSharedPresentSurfaceCapabilitiesKHR Type;
+};
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+// Map type VkImportFenceWin32HandleInfoKHR to id VK_STRUCTURE_TYPE_IMPORT_FENCE_WIN32_HANDLE_INFO_KHR
+template <> struct LvlTypeMap<VkImportFenceWin32HandleInfoKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_IMPORT_FENCE_WIN32_HANDLE_INFO_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_IMPORT_FENCE_WIN32_HANDLE_INFO_KHR> {
+    typedef VkImportFenceWin32HandleInfoKHR Type;
+};
+
+#endif // VK_USE_PLATFORM_WIN32_KHR
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+// Map type VkExportFenceWin32HandleInfoKHR to id VK_STRUCTURE_TYPE_EXPORT_FENCE_WIN32_HANDLE_INFO_KHR
+template <> struct LvlTypeMap<VkExportFenceWin32HandleInfoKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_EXPORT_FENCE_WIN32_HANDLE_INFO_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_EXPORT_FENCE_WIN32_HANDLE_INFO_KHR> {
+    typedef VkExportFenceWin32HandleInfoKHR Type;
+};
+
+#endif // VK_USE_PLATFORM_WIN32_KHR
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+// Map type VkFenceGetWin32HandleInfoKHR to id VK_STRUCTURE_TYPE_FENCE_GET_WIN32_HANDLE_INFO_KHR
+template <> struct LvlTypeMap<VkFenceGetWin32HandleInfoKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_FENCE_GET_WIN32_HANDLE_INFO_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_FENCE_GET_WIN32_HANDLE_INFO_KHR> {
+    typedef VkFenceGetWin32HandleInfoKHR Type;
+};
+
+#endif // VK_USE_PLATFORM_WIN32_KHR
+// Map type VkImportFenceFdInfoKHR to id VK_STRUCTURE_TYPE_IMPORT_FENCE_FD_INFO_KHR
+template <> struct LvlTypeMap<VkImportFenceFdInfoKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_IMPORT_FENCE_FD_INFO_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_IMPORT_FENCE_FD_INFO_KHR> {
+    typedef VkImportFenceFdInfoKHR Type;
+};
+
+// Map type VkFenceGetFdInfoKHR to id VK_STRUCTURE_TYPE_FENCE_GET_FD_INFO_KHR
+template <> struct LvlTypeMap<VkFenceGetFdInfoKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_FENCE_GET_FD_INFO_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_FENCE_GET_FD_INFO_KHR> {
+    typedef VkFenceGetFdInfoKHR Type;
+};
+
+// Map type VkPhysicalDevicePerformanceQueryFeaturesKHR to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PERFORMANCE_QUERY_FEATURES_KHR
+template <> struct LvlTypeMap<VkPhysicalDevicePerformanceQueryFeaturesKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PERFORMANCE_QUERY_FEATURES_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PERFORMANCE_QUERY_FEATURES_KHR> {
+    typedef VkPhysicalDevicePerformanceQueryFeaturesKHR Type;
+};
+
+// Map type VkPhysicalDevicePerformanceQueryPropertiesKHR to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PERFORMANCE_QUERY_PROPERTIES_KHR
+template <> struct LvlTypeMap<VkPhysicalDevicePerformanceQueryPropertiesKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PERFORMANCE_QUERY_PROPERTIES_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PERFORMANCE_QUERY_PROPERTIES_KHR> {
+    typedef VkPhysicalDevicePerformanceQueryPropertiesKHR Type;
+};
+
+// Map type VkPerformanceCounterKHR to id VK_STRUCTURE_TYPE_PERFORMANCE_COUNTER_KHR
+template <> struct LvlTypeMap<VkPerformanceCounterKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PERFORMANCE_COUNTER_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PERFORMANCE_COUNTER_KHR> {
+    typedef VkPerformanceCounterKHR Type;
+};
+
+// Map type VkPerformanceCounterDescriptionKHR to id VK_STRUCTURE_TYPE_PERFORMANCE_COUNTER_DESCRIPTION_KHR
+template <> struct LvlTypeMap<VkPerformanceCounterDescriptionKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PERFORMANCE_COUNTER_DESCRIPTION_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PERFORMANCE_COUNTER_DESCRIPTION_KHR> {
+    typedef VkPerformanceCounterDescriptionKHR Type;
+};
+
+// Map type VkQueryPoolPerformanceCreateInfoKHR to id VK_STRUCTURE_TYPE_QUERY_POOL_PERFORMANCE_CREATE_INFO_KHR
+template <> struct LvlTypeMap<VkQueryPoolPerformanceCreateInfoKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_QUERY_POOL_PERFORMANCE_CREATE_INFO_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_QUERY_POOL_PERFORMANCE_CREATE_INFO_KHR> {
+    typedef VkQueryPoolPerformanceCreateInfoKHR Type;
+};
+
+// Map type VkAcquireProfilingLockInfoKHR to id VK_STRUCTURE_TYPE_ACQUIRE_PROFILING_LOCK_INFO_KHR
+template <> struct LvlTypeMap<VkAcquireProfilingLockInfoKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_ACQUIRE_PROFILING_LOCK_INFO_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_ACQUIRE_PROFILING_LOCK_INFO_KHR> {
+    typedef VkAcquireProfilingLockInfoKHR Type;
+};
+
+// Map type VkPerformanceQuerySubmitInfoKHR to id VK_STRUCTURE_TYPE_PERFORMANCE_QUERY_SUBMIT_INFO_KHR
+template <> struct LvlTypeMap<VkPerformanceQuerySubmitInfoKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PERFORMANCE_QUERY_SUBMIT_INFO_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PERFORMANCE_QUERY_SUBMIT_INFO_KHR> {
+    typedef VkPerformanceQuerySubmitInfoKHR Type;
+};
+
+// Map type VkPhysicalDeviceSurfaceInfo2KHR to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SURFACE_INFO_2_KHR
+template <> struct LvlTypeMap<VkPhysicalDeviceSurfaceInfo2KHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SURFACE_INFO_2_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SURFACE_INFO_2_KHR> {
+    typedef VkPhysicalDeviceSurfaceInfo2KHR Type;
+};
+
+// Map type VkSurfaceCapabilities2KHR to id VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_KHR
+template <> struct LvlTypeMap<VkSurfaceCapabilities2KHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_KHR> {
+    typedef VkSurfaceCapabilities2KHR Type;
+};
+
+// Map type VkSurfaceFormat2KHR to id VK_STRUCTURE_TYPE_SURFACE_FORMAT_2_KHR
+template <> struct LvlTypeMap<VkSurfaceFormat2KHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_SURFACE_FORMAT_2_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_SURFACE_FORMAT_2_KHR> {
+    typedef VkSurfaceFormat2KHR Type;
+};
+
+// Map type VkDisplayProperties2KHR to id VK_STRUCTURE_TYPE_DISPLAY_PROPERTIES_2_KHR
+template <> struct LvlTypeMap<VkDisplayProperties2KHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_DISPLAY_PROPERTIES_2_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DISPLAY_PROPERTIES_2_KHR> {
+    typedef VkDisplayProperties2KHR Type;
+};
+
+// Map type VkDisplayPlaneProperties2KHR to id VK_STRUCTURE_TYPE_DISPLAY_PLANE_PROPERTIES_2_KHR
+template <> struct LvlTypeMap<VkDisplayPlaneProperties2KHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_DISPLAY_PLANE_PROPERTIES_2_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DISPLAY_PLANE_PROPERTIES_2_KHR> {
+    typedef VkDisplayPlaneProperties2KHR Type;
+};
+
+// Map type VkDisplayModeProperties2KHR to id VK_STRUCTURE_TYPE_DISPLAY_MODE_PROPERTIES_2_KHR
+template <> struct LvlTypeMap<VkDisplayModeProperties2KHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_DISPLAY_MODE_PROPERTIES_2_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DISPLAY_MODE_PROPERTIES_2_KHR> {
+    typedef VkDisplayModeProperties2KHR Type;
+};
+
+// Map type VkDisplayPlaneInfo2KHR to id VK_STRUCTURE_TYPE_DISPLAY_PLANE_INFO_2_KHR
+template <> struct LvlTypeMap<VkDisplayPlaneInfo2KHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_DISPLAY_PLANE_INFO_2_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DISPLAY_PLANE_INFO_2_KHR> {
+    typedef VkDisplayPlaneInfo2KHR Type;
+};
+
+// Map type VkDisplayPlaneCapabilities2KHR to id VK_STRUCTURE_TYPE_DISPLAY_PLANE_CAPABILITIES_2_KHR
+template <> struct LvlTypeMap<VkDisplayPlaneCapabilities2KHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_DISPLAY_PLANE_CAPABILITIES_2_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DISPLAY_PLANE_CAPABILITIES_2_KHR> {
+    typedef VkDisplayPlaneCapabilities2KHR Type;
+};
+
+// Map type VkImageFormatListCreateInfoKHR to id VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO_KHR
+template <> struct LvlTypeMap<VkImageFormatListCreateInfoKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO_KHR> {
+    typedef VkImageFormatListCreateInfoKHR Type;
+};
+
+// Map type VkPhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES_KHR
+template <> struct LvlTypeMap<VkPhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES_KHR> {
+    typedef VkPhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR Type;
+};
+
+// Map type VkPhysicalDevice8BitStorageFeaturesKHR to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES_KHR
+template <> struct LvlTypeMap<VkPhysicalDevice8BitStorageFeaturesKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES_KHR> {
+    typedef VkPhysicalDevice8BitStorageFeaturesKHR Type;
+};
+
+// Map type VkPhysicalDeviceShaderAtomicInt64FeaturesKHR to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES_KHR
+template <> struct LvlTypeMap<VkPhysicalDeviceShaderAtomicInt64FeaturesKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES_KHR> {
+    typedef VkPhysicalDeviceShaderAtomicInt64FeaturesKHR Type;
+};
+
+// Map type VkPhysicalDeviceShaderClockFeaturesKHR to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CLOCK_FEATURES_KHR
+template <> struct LvlTypeMap<VkPhysicalDeviceShaderClockFeaturesKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CLOCK_FEATURES_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CLOCK_FEATURES_KHR> {
+    typedef VkPhysicalDeviceShaderClockFeaturesKHR Type;
+};
+
+// Map type VkPhysicalDeviceDriverPropertiesKHR to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES_KHR
+template <> struct LvlTypeMap<VkPhysicalDeviceDriverPropertiesKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES_KHR> {
+    typedef VkPhysicalDeviceDriverPropertiesKHR Type;
+};
+
+// Map type VkPhysicalDeviceFloatControlsPropertiesKHR to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES_KHR
+template <> struct LvlTypeMap<VkPhysicalDeviceFloatControlsPropertiesKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES_KHR> {
+    typedef VkPhysicalDeviceFloatControlsPropertiesKHR Type;
+};
+
+// Map type VkSubpassDescriptionDepthStencilResolveKHR to id VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE_KHR
+template <> struct LvlTypeMap<VkSubpassDescriptionDepthStencilResolveKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE_KHR> {
+    typedef VkSubpassDescriptionDepthStencilResolveKHR Type;
+};
+
+// Map type VkPhysicalDeviceDepthStencilResolvePropertiesKHR to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES_KHR
+template <> struct LvlTypeMap<VkPhysicalDeviceDepthStencilResolvePropertiesKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES_KHR> {
+    typedef VkPhysicalDeviceDepthStencilResolvePropertiesKHR Type;
+};
+
+// Map type VkPhysicalDeviceTimelineSemaphoreFeaturesKHR to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES_KHR
+template <> struct LvlTypeMap<VkPhysicalDeviceTimelineSemaphoreFeaturesKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES_KHR> {
+    typedef VkPhysicalDeviceTimelineSemaphoreFeaturesKHR Type;
+};
+
+// Map type VkPhysicalDeviceTimelineSemaphorePropertiesKHR to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES_KHR
+template <> struct LvlTypeMap<VkPhysicalDeviceTimelineSemaphorePropertiesKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES_KHR> {
+    typedef VkPhysicalDeviceTimelineSemaphorePropertiesKHR Type;
+};
+
+// Map type VkSemaphoreTypeCreateInfoKHR to id VK_STRUCTURE_TYPE_SEMAPHORE_TYPE_CREATE_INFO_KHR
+template <> struct LvlTypeMap<VkSemaphoreTypeCreateInfoKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_SEMAPHORE_TYPE_CREATE_INFO_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_SEMAPHORE_TYPE_CREATE_INFO_KHR> {
+    typedef VkSemaphoreTypeCreateInfoKHR Type;
+};
+
+// Map type VkTimelineSemaphoreSubmitInfoKHR to id VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO_KHR
+template <> struct LvlTypeMap<VkTimelineSemaphoreSubmitInfoKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO_KHR> {
+    typedef VkTimelineSemaphoreSubmitInfoKHR Type;
+};
+
+// Map type VkSemaphoreWaitInfoKHR to id VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO_KHR
+template <> struct LvlTypeMap<VkSemaphoreWaitInfoKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO_KHR> {
+    typedef VkSemaphoreWaitInfoKHR Type;
+};
+
+// Map type VkSemaphoreSignalInfoKHR to id VK_STRUCTURE_TYPE_SEMAPHORE_SIGNAL_INFO_KHR
+template <> struct LvlTypeMap<VkSemaphoreSignalInfoKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_SEMAPHORE_SIGNAL_INFO_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_SEMAPHORE_SIGNAL_INFO_KHR> {
+    typedef VkSemaphoreSignalInfoKHR Type;
+};
+
+// Map type VkPhysicalDeviceVulkanMemoryModelFeaturesKHR to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES_KHR
+template <> struct LvlTypeMap<VkPhysicalDeviceVulkanMemoryModelFeaturesKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES_KHR> {
+    typedef VkPhysicalDeviceVulkanMemoryModelFeaturesKHR Type;
+};
+
+// Map type VkSurfaceProtectedCapabilitiesKHR to id VK_STRUCTURE_TYPE_SURFACE_PROTECTED_CAPABILITIES_KHR
+template <> struct LvlTypeMap<VkSurfaceProtectedCapabilitiesKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_SURFACE_PROTECTED_CAPABILITIES_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_SURFACE_PROTECTED_CAPABILITIES_KHR> {
+    typedef VkSurfaceProtectedCapabilitiesKHR Type;
+};
+
+// Map type VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES_KHR
+template <> struct LvlTypeMap<VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES_KHR> {
+    typedef VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR Type;
+};
+
+// Map type VkAttachmentReferenceStencilLayoutKHR to id VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_STENCIL_LAYOUT_KHR
+template <> struct LvlTypeMap<VkAttachmentReferenceStencilLayoutKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_STENCIL_LAYOUT_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_STENCIL_LAYOUT_KHR> {
+    typedef VkAttachmentReferenceStencilLayoutKHR Type;
+};
+
+// Map type VkAttachmentDescriptionStencilLayoutKHR to id VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT_KHR
+template <> struct LvlTypeMap<VkAttachmentDescriptionStencilLayoutKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT_KHR> {
+    typedef VkAttachmentDescriptionStencilLayoutKHR Type;
+};
+
+// Map type VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES_KHR
+template <> struct LvlTypeMap<VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES_KHR> {
+    typedef VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR Type;
+};
+
+// Map type VkPhysicalDeviceBufferDeviceAddressFeaturesKHR to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_KHR
+template <> struct LvlTypeMap<VkPhysicalDeviceBufferDeviceAddressFeaturesKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_KHR> {
+    typedef VkPhysicalDeviceBufferDeviceAddressFeaturesKHR Type;
+};
+
+// Map type VkBufferDeviceAddressInfoKHR to id VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO_KHR
+template <> struct LvlTypeMap<VkBufferDeviceAddressInfoKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO_KHR> {
+    typedef VkBufferDeviceAddressInfoKHR Type;
+};
+
+// Map type VkBufferOpaqueCaptureAddressCreateInfoKHR to id VK_STRUCTURE_TYPE_BUFFER_OPAQUE_CAPTURE_ADDRESS_CREATE_INFO_KHR
+template <> struct LvlTypeMap<VkBufferOpaqueCaptureAddressCreateInfoKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_BUFFER_OPAQUE_CAPTURE_ADDRESS_CREATE_INFO_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_BUFFER_OPAQUE_CAPTURE_ADDRESS_CREATE_INFO_KHR> {
+    typedef VkBufferOpaqueCaptureAddressCreateInfoKHR Type;
+};
+
+// Map type VkMemoryOpaqueCaptureAddressAllocateInfoKHR to id VK_STRUCTURE_TYPE_MEMORY_OPAQUE_CAPTURE_ADDRESS_ALLOCATE_INFO_KHR
+template <> struct LvlTypeMap<VkMemoryOpaqueCaptureAddressAllocateInfoKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_MEMORY_OPAQUE_CAPTURE_ADDRESS_ALLOCATE_INFO_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_MEMORY_OPAQUE_CAPTURE_ADDRESS_ALLOCATE_INFO_KHR> {
+    typedef VkMemoryOpaqueCaptureAddressAllocateInfoKHR Type;
+};
+
+// Map type VkDeviceMemoryOpaqueCaptureAddressInfoKHR to id VK_STRUCTURE_TYPE_DEVICE_MEMORY_OPAQUE_CAPTURE_ADDRESS_INFO_KHR
+template <> struct LvlTypeMap<VkDeviceMemoryOpaqueCaptureAddressInfoKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_DEVICE_MEMORY_OPAQUE_CAPTURE_ADDRESS_INFO_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DEVICE_MEMORY_OPAQUE_CAPTURE_ADDRESS_INFO_KHR> {
+    typedef VkDeviceMemoryOpaqueCaptureAddressInfoKHR Type;
+};
+
+// Map type VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_EXECUTABLE_PROPERTIES_FEATURES_KHR
+template <> struct LvlTypeMap<VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_EXECUTABLE_PROPERTIES_FEATURES_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_EXECUTABLE_PROPERTIES_FEATURES_KHR> {
+    typedef VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR Type;
+};
+
+// Map type VkPipelineInfoKHR to id VK_STRUCTURE_TYPE_PIPELINE_INFO_KHR
+template <> struct LvlTypeMap<VkPipelineInfoKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_INFO_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_INFO_KHR> {
+    typedef VkPipelineInfoKHR Type;
+};
+
+// Map type VkPipelineExecutablePropertiesKHR to id VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_PROPERTIES_KHR
+template <> struct LvlTypeMap<VkPipelineExecutablePropertiesKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_PROPERTIES_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_PROPERTIES_KHR> {
+    typedef VkPipelineExecutablePropertiesKHR Type;
+};
+
+// Map type VkPipelineExecutableInfoKHR to id VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INFO_KHR
+template <> struct LvlTypeMap<VkPipelineExecutableInfoKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INFO_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INFO_KHR> {
+    typedef VkPipelineExecutableInfoKHR Type;
+};
+
+// Map type VkPipelineExecutableStatisticKHR to id VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_STATISTIC_KHR
+template <> struct LvlTypeMap<VkPipelineExecutableStatisticKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_STATISTIC_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_STATISTIC_KHR> {
+    typedef VkPipelineExecutableStatisticKHR Type;
+};
+
+// Map type VkPipelineExecutableInternalRepresentationKHR to id VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INTERNAL_REPRESENTATION_KHR
+template <> struct LvlTypeMap<VkPipelineExecutableInternalRepresentationKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INTERNAL_REPRESENTATION_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INTERNAL_REPRESENTATION_KHR> {
+    typedef VkPipelineExecutableInternalRepresentationKHR Type;
+};
+
+// Map type VkDebugReportCallbackCreateInfoEXT to id VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT
+template <> struct LvlTypeMap<VkDebugReportCallbackCreateInfoEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT> {
+    typedef VkDebugReportCallbackCreateInfoEXT Type;
+};
+
+// Map type VkPipelineRasterizationStateRasterizationOrderAMD to id VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_RASTERIZATION_ORDER_AMD
+template <> struct LvlTypeMap<VkPipelineRasterizationStateRasterizationOrderAMD> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_RASTERIZATION_ORDER_AMD;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_RASTERIZATION_ORDER_AMD> {
+    typedef VkPipelineRasterizationStateRasterizationOrderAMD Type;
+};
+
+// Map type VkDebugMarkerObjectNameInfoEXT to id VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_NAME_INFO_EXT
+template <> struct LvlTypeMap<VkDebugMarkerObjectNameInfoEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_NAME_INFO_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_NAME_INFO_EXT> {
+    typedef VkDebugMarkerObjectNameInfoEXT Type;
+};
+
+// Map type VkDebugMarkerObjectTagInfoEXT to id VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_TAG_INFO_EXT
+template <> struct LvlTypeMap<VkDebugMarkerObjectTagInfoEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_TAG_INFO_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_TAG_INFO_EXT> {
+    typedef VkDebugMarkerObjectTagInfoEXT Type;
+};
+
+// Map type VkDebugMarkerMarkerInfoEXT to id VK_STRUCTURE_TYPE_DEBUG_MARKER_MARKER_INFO_EXT
+template <> struct LvlTypeMap<VkDebugMarkerMarkerInfoEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_DEBUG_MARKER_MARKER_INFO_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DEBUG_MARKER_MARKER_INFO_EXT> {
+    typedef VkDebugMarkerMarkerInfoEXT Type;
+};
+
+// Map type VkDedicatedAllocationImageCreateInfoNV to id VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_IMAGE_CREATE_INFO_NV
+template <> struct LvlTypeMap<VkDedicatedAllocationImageCreateInfoNV> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_IMAGE_CREATE_INFO_NV;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_IMAGE_CREATE_INFO_NV> {
+    typedef VkDedicatedAllocationImageCreateInfoNV Type;
+};
+
+// Map type VkDedicatedAllocationBufferCreateInfoNV to id VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_BUFFER_CREATE_INFO_NV
+template <> struct LvlTypeMap<VkDedicatedAllocationBufferCreateInfoNV> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_BUFFER_CREATE_INFO_NV;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_BUFFER_CREATE_INFO_NV> {
+    typedef VkDedicatedAllocationBufferCreateInfoNV Type;
+};
+
+// Map type VkDedicatedAllocationMemoryAllocateInfoNV to id VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_MEMORY_ALLOCATE_INFO_NV
+template <> struct LvlTypeMap<VkDedicatedAllocationMemoryAllocateInfoNV> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_MEMORY_ALLOCATE_INFO_NV;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_MEMORY_ALLOCATE_INFO_NV> {
+    typedef VkDedicatedAllocationMemoryAllocateInfoNV Type;
+};
+
+// Map type VkPhysicalDeviceTransformFeedbackFeaturesEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_FEATURES_EXT
+template <> struct LvlTypeMap<VkPhysicalDeviceTransformFeedbackFeaturesEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_FEATURES_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_FEATURES_EXT> {
+    typedef VkPhysicalDeviceTransformFeedbackFeaturesEXT Type;
+};
+
+// Map type VkPhysicalDeviceTransformFeedbackPropertiesEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_PROPERTIES_EXT
+template <> struct LvlTypeMap<VkPhysicalDeviceTransformFeedbackPropertiesEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_PROPERTIES_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_PROPERTIES_EXT> {
+    typedef VkPhysicalDeviceTransformFeedbackPropertiesEXT Type;
+};
+
+// Map type VkPipelineRasterizationStateStreamCreateInfoEXT to id VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_STREAM_CREATE_INFO_EXT
+template <> struct LvlTypeMap<VkPipelineRasterizationStateStreamCreateInfoEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_STREAM_CREATE_INFO_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_STREAM_CREATE_INFO_EXT> {
+    typedef VkPipelineRasterizationStateStreamCreateInfoEXT Type;
+};
+
+// Map type VkImageViewHandleInfoNVX to id VK_STRUCTURE_TYPE_IMAGE_VIEW_HANDLE_INFO_NVX
+template <> struct LvlTypeMap<VkImageViewHandleInfoNVX> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_IMAGE_VIEW_HANDLE_INFO_NVX;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_IMAGE_VIEW_HANDLE_INFO_NVX> {
+    typedef VkImageViewHandleInfoNVX Type;
+};
+
+// Map type VkTextureLODGatherFormatPropertiesAMD to id VK_STRUCTURE_TYPE_TEXTURE_LOD_GATHER_FORMAT_PROPERTIES_AMD
+template <> struct LvlTypeMap<VkTextureLODGatherFormatPropertiesAMD> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_TEXTURE_LOD_GATHER_FORMAT_PROPERTIES_AMD;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_TEXTURE_LOD_GATHER_FORMAT_PROPERTIES_AMD> {
+    typedef VkTextureLODGatherFormatPropertiesAMD Type;
+};
+
+#ifdef VK_USE_PLATFORM_GGP
+// Map type VkStreamDescriptorSurfaceCreateInfoGGP to id VK_STRUCTURE_TYPE_STREAM_DESCRIPTOR_SURFACE_CREATE_INFO_GGP
+template <> struct LvlTypeMap<VkStreamDescriptorSurfaceCreateInfoGGP> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_STREAM_DESCRIPTOR_SURFACE_CREATE_INFO_GGP;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_STREAM_DESCRIPTOR_SURFACE_CREATE_INFO_GGP> {
+    typedef VkStreamDescriptorSurfaceCreateInfoGGP Type;
+};
+
+#endif // VK_USE_PLATFORM_GGP
+// Map type VkPhysicalDeviceCornerSampledImageFeaturesNV to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CORNER_SAMPLED_IMAGE_FEATURES_NV
+template <> struct LvlTypeMap<VkPhysicalDeviceCornerSampledImageFeaturesNV> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CORNER_SAMPLED_IMAGE_FEATURES_NV;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CORNER_SAMPLED_IMAGE_FEATURES_NV> {
+    typedef VkPhysicalDeviceCornerSampledImageFeaturesNV Type;
+};
+
+// Map type VkExternalMemoryImageCreateInfoNV to id VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_NV
+template <> struct LvlTypeMap<VkExternalMemoryImageCreateInfoNV> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_NV;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_NV> {
+    typedef VkExternalMemoryImageCreateInfoNV Type;
+};
+
+// Map type VkExportMemoryAllocateInfoNV to id VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_NV
+template <> struct LvlTypeMap<VkExportMemoryAllocateInfoNV> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_NV;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_NV> {
+    typedef VkExportMemoryAllocateInfoNV Type;
+};
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+// Map type VkImportMemoryWin32HandleInfoNV to id VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_NV
+template <> struct LvlTypeMap<VkImportMemoryWin32HandleInfoNV> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_NV;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_NV> {
+    typedef VkImportMemoryWin32HandleInfoNV Type;
+};
+
+#endif // VK_USE_PLATFORM_WIN32_KHR
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+// Map type VkExportMemoryWin32HandleInfoNV to id VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_NV
+template <> struct LvlTypeMap<VkExportMemoryWin32HandleInfoNV> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_NV;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_NV> {
+    typedef VkExportMemoryWin32HandleInfoNV Type;
+};
+
+#endif // VK_USE_PLATFORM_WIN32_KHR
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+// Map type VkWin32KeyedMutexAcquireReleaseInfoNV to id VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_NV
+template <> struct LvlTypeMap<VkWin32KeyedMutexAcquireReleaseInfoNV> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_NV;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_NV> {
+    typedef VkWin32KeyedMutexAcquireReleaseInfoNV Type;
+};
+
+#endif // VK_USE_PLATFORM_WIN32_KHR
+// Map type VkValidationFlagsEXT to id VK_STRUCTURE_TYPE_VALIDATION_FLAGS_EXT
+template <> struct LvlTypeMap<VkValidationFlagsEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_VALIDATION_FLAGS_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_VALIDATION_FLAGS_EXT> {
+    typedef VkValidationFlagsEXT Type;
+};
+
+#ifdef VK_USE_PLATFORM_VI_NN
+// Map type VkViSurfaceCreateInfoNN to id VK_STRUCTURE_TYPE_VI_SURFACE_CREATE_INFO_NN
+template <> struct LvlTypeMap<VkViSurfaceCreateInfoNN> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_VI_SURFACE_CREATE_INFO_NN;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_VI_SURFACE_CREATE_INFO_NN> {
+    typedef VkViSurfaceCreateInfoNN Type;
+};
+
+#endif // VK_USE_PLATFORM_VI_NN
+// Map type VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXTURE_COMPRESSION_ASTC_HDR_FEATURES_EXT
+template <> struct LvlTypeMap<VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXTURE_COMPRESSION_ASTC_HDR_FEATURES_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXTURE_COMPRESSION_ASTC_HDR_FEATURES_EXT> {
+    typedef VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT Type;
+};
+
+// Map type VkImageViewASTCDecodeModeEXT to id VK_STRUCTURE_TYPE_IMAGE_VIEW_ASTC_DECODE_MODE_EXT
+template <> struct LvlTypeMap<VkImageViewASTCDecodeModeEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_IMAGE_VIEW_ASTC_DECODE_MODE_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_IMAGE_VIEW_ASTC_DECODE_MODE_EXT> {
+    typedef VkImageViewASTCDecodeModeEXT Type;
+};
+
+// Map type VkPhysicalDeviceASTCDecodeFeaturesEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ASTC_DECODE_FEATURES_EXT
+template <> struct LvlTypeMap<VkPhysicalDeviceASTCDecodeFeaturesEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ASTC_DECODE_FEATURES_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ASTC_DECODE_FEATURES_EXT> {
+    typedef VkPhysicalDeviceASTCDecodeFeaturesEXT Type;
+};
+
+// Map type VkConditionalRenderingBeginInfoEXT to id VK_STRUCTURE_TYPE_CONDITIONAL_RENDERING_BEGIN_INFO_EXT
+template <> struct LvlTypeMap<VkConditionalRenderingBeginInfoEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_CONDITIONAL_RENDERING_BEGIN_INFO_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_CONDITIONAL_RENDERING_BEGIN_INFO_EXT> {
+    typedef VkConditionalRenderingBeginInfoEXT Type;
+};
+
+// Map type VkPhysicalDeviceConditionalRenderingFeaturesEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONDITIONAL_RENDERING_FEATURES_EXT
+template <> struct LvlTypeMap<VkPhysicalDeviceConditionalRenderingFeaturesEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONDITIONAL_RENDERING_FEATURES_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONDITIONAL_RENDERING_FEATURES_EXT> {
+    typedef VkPhysicalDeviceConditionalRenderingFeaturesEXT Type;
+};
+
+// Map type VkCommandBufferInheritanceConditionalRenderingInfoEXT to id VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_CONDITIONAL_RENDERING_INFO_EXT
+template <> struct LvlTypeMap<VkCommandBufferInheritanceConditionalRenderingInfoEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_CONDITIONAL_RENDERING_INFO_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_CONDITIONAL_RENDERING_INFO_EXT> {
+    typedef VkCommandBufferInheritanceConditionalRenderingInfoEXT Type;
+};
+
+// Map type VkDeviceGeneratedCommandsFeaturesNVX to id VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_FEATURES_NVX
+template <> struct LvlTypeMap<VkDeviceGeneratedCommandsFeaturesNVX> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_FEATURES_NVX;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_FEATURES_NVX> {
+    typedef VkDeviceGeneratedCommandsFeaturesNVX Type;
+};
+
+// Map type VkDeviceGeneratedCommandsLimitsNVX to id VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_LIMITS_NVX
+template <> struct LvlTypeMap<VkDeviceGeneratedCommandsLimitsNVX> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_LIMITS_NVX;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_LIMITS_NVX> {
+    typedef VkDeviceGeneratedCommandsLimitsNVX Type;
+};
+
+// Map type VkIndirectCommandsLayoutCreateInfoNVX to id VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_NVX
+template <> struct LvlTypeMap<VkIndirectCommandsLayoutCreateInfoNVX> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_NVX;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_NVX> {
+    typedef VkIndirectCommandsLayoutCreateInfoNVX Type;
+};
+
+// Map type VkCmdProcessCommandsInfoNVX to id VK_STRUCTURE_TYPE_CMD_PROCESS_COMMANDS_INFO_NVX
+template <> struct LvlTypeMap<VkCmdProcessCommandsInfoNVX> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_CMD_PROCESS_COMMANDS_INFO_NVX;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_CMD_PROCESS_COMMANDS_INFO_NVX> {
+    typedef VkCmdProcessCommandsInfoNVX Type;
+};
+
+// Map type VkCmdReserveSpaceForCommandsInfoNVX to id VK_STRUCTURE_TYPE_CMD_RESERVE_SPACE_FOR_COMMANDS_INFO_NVX
+template <> struct LvlTypeMap<VkCmdReserveSpaceForCommandsInfoNVX> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_CMD_RESERVE_SPACE_FOR_COMMANDS_INFO_NVX;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_CMD_RESERVE_SPACE_FOR_COMMANDS_INFO_NVX> {
+    typedef VkCmdReserveSpaceForCommandsInfoNVX Type;
+};
+
+// Map type VkObjectTableCreateInfoNVX to id VK_STRUCTURE_TYPE_OBJECT_TABLE_CREATE_INFO_NVX
+template <> struct LvlTypeMap<VkObjectTableCreateInfoNVX> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_OBJECT_TABLE_CREATE_INFO_NVX;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_OBJECT_TABLE_CREATE_INFO_NVX> {
+    typedef VkObjectTableCreateInfoNVX Type;
+};
+
+// Map type VkPipelineViewportWScalingStateCreateInfoNV to id VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_W_SCALING_STATE_CREATE_INFO_NV
+template <> struct LvlTypeMap<VkPipelineViewportWScalingStateCreateInfoNV> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_W_SCALING_STATE_CREATE_INFO_NV;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_W_SCALING_STATE_CREATE_INFO_NV> {
+    typedef VkPipelineViewportWScalingStateCreateInfoNV Type;
+};
+
+// Map type VkSurfaceCapabilities2EXT to id VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_EXT
+template <> struct LvlTypeMap<VkSurfaceCapabilities2EXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_EXT> {
+    typedef VkSurfaceCapabilities2EXT Type;
+};
+
+// Map type VkDisplayPowerInfoEXT to id VK_STRUCTURE_TYPE_DISPLAY_POWER_INFO_EXT
+template <> struct LvlTypeMap<VkDisplayPowerInfoEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_DISPLAY_POWER_INFO_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DISPLAY_POWER_INFO_EXT> {
+    typedef VkDisplayPowerInfoEXT Type;
+};
+
+// Map type VkDeviceEventInfoEXT to id VK_STRUCTURE_TYPE_DEVICE_EVENT_INFO_EXT
+template <> struct LvlTypeMap<VkDeviceEventInfoEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_DEVICE_EVENT_INFO_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DEVICE_EVENT_INFO_EXT> {
+    typedef VkDeviceEventInfoEXT Type;
+};
+
+// Map type VkDisplayEventInfoEXT to id VK_STRUCTURE_TYPE_DISPLAY_EVENT_INFO_EXT
+template <> struct LvlTypeMap<VkDisplayEventInfoEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_DISPLAY_EVENT_INFO_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DISPLAY_EVENT_INFO_EXT> {
+    typedef VkDisplayEventInfoEXT Type;
+};
+
+// Map type VkSwapchainCounterCreateInfoEXT to id VK_STRUCTURE_TYPE_SWAPCHAIN_COUNTER_CREATE_INFO_EXT
+template <> struct LvlTypeMap<VkSwapchainCounterCreateInfoEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_SWAPCHAIN_COUNTER_CREATE_INFO_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_SWAPCHAIN_COUNTER_CREATE_INFO_EXT> {
+    typedef VkSwapchainCounterCreateInfoEXT Type;
+};
+
+// Map type VkPresentTimesInfoGOOGLE to id VK_STRUCTURE_TYPE_PRESENT_TIMES_INFO_GOOGLE
+template <> struct LvlTypeMap<VkPresentTimesInfoGOOGLE> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PRESENT_TIMES_INFO_GOOGLE;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PRESENT_TIMES_INFO_GOOGLE> {
+    typedef VkPresentTimesInfoGOOGLE Type;
+};
+
+// Map type VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_ATTRIBUTES_PROPERTIES_NVX
+template <> struct LvlTypeMap<VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_ATTRIBUTES_PROPERTIES_NVX;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_ATTRIBUTES_PROPERTIES_NVX> {
+    typedef VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX Type;
+};
+
+// Map type VkPipelineViewportSwizzleStateCreateInfoNV to id VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SWIZZLE_STATE_CREATE_INFO_NV
+template <> struct LvlTypeMap<VkPipelineViewportSwizzleStateCreateInfoNV> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SWIZZLE_STATE_CREATE_INFO_NV;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SWIZZLE_STATE_CREATE_INFO_NV> {
+    typedef VkPipelineViewportSwizzleStateCreateInfoNV Type;
+};
+
+// Map type VkPhysicalDeviceDiscardRectanglePropertiesEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DISCARD_RECTANGLE_PROPERTIES_EXT
+template <> struct LvlTypeMap<VkPhysicalDeviceDiscardRectanglePropertiesEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DISCARD_RECTANGLE_PROPERTIES_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DISCARD_RECTANGLE_PROPERTIES_EXT> {
+    typedef VkPhysicalDeviceDiscardRectanglePropertiesEXT Type;
+};
+
+// Map type VkPipelineDiscardRectangleStateCreateInfoEXT to id VK_STRUCTURE_TYPE_PIPELINE_DISCARD_RECTANGLE_STATE_CREATE_INFO_EXT
+template <> struct LvlTypeMap<VkPipelineDiscardRectangleStateCreateInfoEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_DISCARD_RECTANGLE_STATE_CREATE_INFO_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_DISCARD_RECTANGLE_STATE_CREATE_INFO_EXT> {
+    typedef VkPipelineDiscardRectangleStateCreateInfoEXT Type;
+};
+
+// Map type VkPhysicalDeviceConservativeRasterizationPropertiesEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONSERVATIVE_RASTERIZATION_PROPERTIES_EXT
+template <> struct LvlTypeMap<VkPhysicalDeviceConservativeRasterizationPropertiesEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONSERVATIVE_RASTERIZATION_PROPERTIES_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONSERVATIVE_RASTERIZATION_PROPERTIES_EXT> {
+    typedef VkPhysicalDeviceConservativeRasterizationPropertiesEXT Type;
+};
+
+// Map type VkPipelineRasterizationConservativeStateCreateInfoEXT to id VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_CONSERVATIVE_STATE_CREATE_INFO_EXT
+template <> struct LvlTypeMap<VkPipelineRasterizationConservativeStateCreateInfoEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_CONSERVATIVE_STATE_CREATE_INFO_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_CONSERVATIVE_STATE_CREATE_INFO_EXT> {
+    typedef VkPipelineRasterizationConservativeStateCreateInfoEXT Type;
+};
+
+// Map type VkPhysicalDeviceDepthClipEnableFeaturesEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLIP_ENABLE_FEATURES_EXT
+template <> struct LvlTypeMap<VkPhysicalDeviceDepthClipEnableFeaturesEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLIP_ENABLE_FEATURES_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLIP_ENABLE_FEATURES_EXT> {
+    typedef VkPhysicalDeviceDepthClipEnableFeaturesEXT Type;
+};
+
+// Map type VkPipelineRasterizationDepthClipStateCreateInfoEXT to id VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_DEPTH_CLIP_STATE_CREATE_INFO_EXT
+template <> struct LvlTypeMap<VkPipelineRasterizationDepthClipStateCreateInfoEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_DEPTH_CLIP_STATE_CREATE_INFO_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_DEPTH_CLIP_STATE_CREATE_INFO_EXT> {
+    typedef VkPipelineRasterizationDepthClipStateCreateInfoEXT Type;
+};
+
+// Map type VkHdrMetadataEXT to id VK_STRUCTURE_TYPE_HDR_METADATA_EXT
+template <> struct LvlTypeMap<VkHdrMetadataEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_HDR_METADATA_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_HDR_METADATA_EXT> {
+    typedef VkHdrMetadataEXT Type;
+};
+
+#ifdef VK_USE_PLATFORM_IOS_MVK
+// Map type VkIOSSurfaceCreateInfoMVK to id VK_STRUCTURE_TYPE_IOS_SURFACE_CREATE_INFO_MVK
+template <> struct LvlTypeMap<VkIOSSurfaceCreateInfoMVK> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_IOS_SURFACE_CREATE_INFO_MVK;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_IOS_SURFACE_CREATE_INFO_MVK> {
+    typedef VkIOSSurfaceCreateInfoMVK Type;
+};
+
+#endif // VK_USE_PLATFORM_IOS_MVK
+#ifdef VK_USE_PLATFORM_MACOS_MVK
+// Map type VkMacOSSurfaceCreateInfoMVK to id VK_STRUCTURE_TYPE_MACOS_SURFACE_CREATE_INFO_MVK
+template <> struct LvlTypeMap<VkMacOSSurfaceCreateInfoMVK> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_MACOS_SURFACE_CREATE_INFO_MVK;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_MACOS_SURFACE_CREATE_INFO_MVK> {
+    typedef VkMacOSSurfaceCreateInfoMVK Type;
+};
+
+#endif // VK_USE_PLATFORM_MACOS_MVK
+// Map type VkDebugUtilsObjectNameInfoEXT to id VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT
+template <> struct LvlTypeMap<VkDebugUtilsObjectNameInfoEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT> {
+    typedef VkDebugUtilsObjectNameInfoEXT Type;
+};
+
+// Map type VkDebugUtilsObjectTagInfoEXT to id VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_TAG_INFO_EXT
+template <> struct LvlTypeMap<VkDebugUtilsObjectTagInfoEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_TAG_INFO_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_TAG_INFO_EXT> {
+    typedef VkDebugUtilsObjectTagInfoEXT Type;
+};
+
+// Map type VkDebugUtilsLabelEXT to id VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT
+template <> struct LvlTypeMap<VkDebugUtilsLabelEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT> {
+    typedef VkDebugUtilsLabelEXT Type;
+};
+
+// Map type VkDebugUtilsMessengerCallbackDataEXT to id VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CALLBACK_DATA_EXT
+template <> struct LvlTypeMap<VkDebugUtilsMessengerCallbackDataEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CALLBACK_DATA_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CALLBACK_DATA_EXT> {
+    typedef VkDebugUtilsMessengerCallbackDataEXT Type;
+};
+
+// Map type VkDebugUtilsMessengerCreateInfoEXT to id VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT
+template <> struct LvlTypeMap<VkDebugUtilsMessengerCreateInfoEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT> {
+    typedef VkDebugUtilsMessengerCreateInfoEXT Type;
+};
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+// Map type VkAndroidHardwareBufferUsageANDROID to id VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_USAGE_ANDROID
+template <> struct LvlTypeMap<VkAndroidHardwareBufferUsageANDROID> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_USAGE_ANDROID;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_USAGE_ANDROID> {
+    typedef VkAndroidHardwareBufferUsageANDROID Type;
+};
+
+#endif // VK_USE_PLATFORM_ANDROID_KHR
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+// Map type VkAndroidHardwareBufferPropertiesANDROID to id VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID
+template <> struct LvlTypeMap<VkAndroidHardwareBufferPropertiesANDROID> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID> {
+    typedef VkAndroidHardwareBufferPropertiesANDROID Type;
+};
+
+#endif // VK_USE_PLATFORM_ANDROID_KHR
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+// Map type VkAndroidHardwareBufferFormatPropertiesANDROID to id VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID
+template <> struct LvlTypeMap<VkAndroidHardwareBufferFormatPropertiesANDROID> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID> {
+    typedef VkAndroidHardwareBufferFormatPropertiesANDROID Type;
+};
+
+#endif // VK_USE_PLATFORM_ANDROID_KHR
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+// Map type VkImportAndroidHardwareBufferInfoANDROID to id VK_STRUCTURE_TYPE_IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID
+template <> struct LvlTypeMap<VkImportAndroidHardwareBufferInfoANDROID> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID> {
+    typedef VkImportAndroidHardwareBufferInfoANDROID Type;
+};
+
+#endif // VK_USE_PLATFORM_ANDROID_KHR
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+// Map type VkMemoryGetAndroidHardwareBufferInfoANDROID to id VK_STRUCTURE_TYPE_MEMORY_GET_ANDROID_HARDWARE_BUFFER_INFO_ANDROID
+template <> struct LvlTypeMap<VkMemoryGetAndroidHardwareBufferInfoANDROID> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_MEMORY_GET_ANDROID_HARDWARE_BUFFER_INFO_ANDROID;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_MEMORY_GET_ANDROID_HARDWARE_BUFFER_INFO_ANDROID> {
+    typedef VkMemoryGetAndroidHardwareBufferInfoANDROID Type;
+};
+
+#endif // VK_USE_PLATFORM_ANDROID_KHR
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+// Map type VkExternalFormatANDROID to id VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID
+template <> struct LvlTypeMap<VkExternalFormatANDROID> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID> {
+    typedef VkExternalFormatANDROID Type;
+};
+
+#endif // VK_USE_PLATFORM_ANDROID_KHR
+// Map type VkSamplerReductionModeCreateInfoEXT to id VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO_EXT
+template <> struct LvlTypeMap<VkSamplerReductionModeCreateInfoEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO_EXT> {
+    typedef VkSamplerReductionModeCreateInfoEXT Type;
+};
+
+// Map type VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES_EXT
+template <> struct LvlTypeMap<VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES_EXT> {
+    typedef VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT Type;
+};
+
+// Map type VkPhysicalDeviceInlineUniformBlockFeaturesEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES_EXT
+template <> struct LvlTypeMap<VkPhysicalDeviceInlineUniformBlockFeaturesEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES_EXT> {
+    typedef VkPhysicalDeviceInlineUniformBlockFeaturesEXT Type;
+};
+
+// Map type VkPhysicalDeviceInlineUniformBlockPropertiesEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_PROPERTIES_EXT
+template <> struct LvlTypeMap<VkPhysicalDeviceInlineUniformBlockPropertiesEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_PROPERTIES_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_PROPERTIES_EXT> {
+    typedef VkPhysicalDeviceInlineUniformBlockPropertiesEXT Type;
+};
+
+// Map type VkWriteDescriptorSetInlineUniformBlockEXT to id VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK_EXT
+template <> struct LvlTypeMap<VkWriteDescriptorSetInlineUniformBlockEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK_EXT> {
+    typedef VkWriteDescriptorSetInlineUniformBlockEXT Type;
+};
+
+// Map type VkDescriptorPoolInlineUniformBlockCreateInfoEXT to id VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO_EXT
+template <> struct LvlTypeMap<VkDescriptorPoolInlineUniformBlockCreateInfoEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO_EXT> {
+    typedef VkDescriptorPoolInlineUniformBlockCreateInfoEXT Type;
+};
+
+// Map type VkSampleLocationsInfoEXT to id VK_STRUCTURE_TYPE_SAMPLE_LOCATIONS_INFO_EXT
+template <> struct LvlTypeMap<VkSampleLocationsInfoEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_SAMPLE_LOCATIONS_INFO_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_SAMPLE_LOCATIONS_INFO_EXT> {
+    typedef VkSampleLocationsInfoEXT Type;
+};
+
+// Map type VkRenderPassSampleLocationsBeginInfoEXT to id VK_STRUCTURE_TYPE_RENDER_PASS_SAMPLE_LOCATIONS_BEGIN_INFO_EXT
+template <> struct LvlTypeMap<VkRenderPassSampleLocationsBeginInfoEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_RENDER_PASS_SAMPLE_LOCATIONS_BEGIN_INFO_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_RENDER_PASS_SAMPLE_LOCATIONS_BEGIN_INFO_EXT> {
+    typedef VkRenderPassSampleLocationsBeginInfoEXT Type;
+};
+
+// Map type VkPipelineSampleLocationsStateCreateInfoEXT to id VK_STRUCTURE_TYPE_PIPELINE_SAMPLE_LOCATIONS_STATE_CREATE_INFO_EXT
+template <> struct LvlTypeMap<VkPipelineSampleLocationsStateCreateInfoEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_SAMPLE_LOCATIONS_STATE_CREATE_INFO_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_SAMPLE_LOCATIONS_STATE_CREATE_INFO_EXT> {
+    typedef VkPipelineSampleLocationsStateCreateInfoEXT Type;
+};
+
+// Map type VkPhysicalDeviceSampleLocationsPropertiesEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLE_LOCATIONS_PROPERTIES_EXT
+template <> struct LvlTypeMap<VkPhysicalDeviceSampleLocationsPropertiesEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLE_LOCATIONS_PROPERTIES_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLE_LOCATIONS_PROPERTIES_EXT> {
+    typedef VkPhysicalDeviceSampleLocationsPropertiesEXT Type;
+};
+
+// Map type VkMultisamplePropertiesEXT to id VK_STRUCTURE_TYPE_MULTISAMPLE_PROPERTIES_EXT
+template <> struct LvlTypeMap<VkMultisamplePropertiesEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_MULTISAMPLE_PROPERTIES_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_MULTISAMPLE_PROPERTIES_EXT> {
+    typedef VkMultisamplePropertiesEXT Type;
+};
+
+// Map type VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_FEATURES_EXT
+template <> struct LvlTypeMap<VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_FEATURES_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_FEATURES_EXT> {
+    typedef VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT Type;
+};
+
+// Map type VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_PROPERTIES_EXT
+template <> struct LvlTypeMap<VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_PROPERTIES_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_PROPERTIES_EXT> {
+    typedef VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT Type;
+};
+
+// Map type VkPipelineColorBlendAdvancedStateCreateInfoEXT to id VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_ADVANCED_STATE_CREATE_INFO_EXT
+template <> struct LvlTypeMap<VkPipelineColorBlendAdvancedStateCreateInfoEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_ADVANCED_STATE_CREATE_INFO_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_ADVANCED_STATE_CREATE_INFO_EXT> {
+    typedef VkPipelineColorBlendAdvancedStateCreateInfoEXT Type;
+};
+
+// Map type VkPipelineCoverageToColorStateCreateInfoNV to id VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_TO_COLOR_STATE_CREATE_INFO_NV
+template <> struct LvlTypeMap<VkPipelineCoverageToColorStateCreateInfoNV> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_TO_COLOR_STATE_CREATE_INFO_NV;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_TO_COLOR_STATE_CREATE_INFO_NV> {
+    typedef VkPipelineCoverageToColorStateCreateInfoNV Type;
+};
+
+// Map type VkPipelineCoverageModulationStateCreateInfoNV to id VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_MODULATION_STATE_CREATE_INFO_NV
+template <> struct LvlTypeMap<VkPipelineCoverageModulationStateCreateInfoNV> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_MODULATION_STATE_CREATE_INFO_NV;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_MODULATION_STATE_CREATE_INFO_NV> {
+    typedef VkPipelineCoverageModulationStateCreateInfoNV Type;
+};
+
+// Map type VkPhysicalDeviceShaderSMBuiltinsPropertiesNV to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_PROPERTIES_NV
+template <> struct LvlTypeMap<VkPhysicalDeviceShaderSMBuiltinsPropertiesNV> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_PROPERTIES_NV;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_PROPERTIES_NV> {
+    typedef VkPhysicalDeviceShaderSMBuiltinsPropertiesNV Type;
+};
+
+// Map type VkPhysicalDeviceShaderSMBuiltinsFeaturesNV to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_FEATURES_NV
+template <> struct LvlTypeMap<VkPhysicalDeviceShaderSMBuiltinsFeaturesNV> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_FEATURES_NV;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_FEATURES_NV> {
+    typedef VkPhysicalDeviceShaderSMBuiltinsFeaturesNV Type;
+};
+
+// Map type VkDrmFormatModifierPropertiesListEXT to id VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT
+template <> struct LvlTypeMap<VkDrmFormatModifierPropertiesListEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT> {
+    typedef VkDrmFormatModifierPropertiesListEXT Type;
+};
+
+// Map type VkPhysicalDeviceImageDrmFormatModifierInfoEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_DRM_FORMAT_MODIFIER_INFO_EXT
+template <> struct LvlTypeMap<VkPhysicalDeviceImageDrmFormatModifierInfoEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_DRM_FORMAT_MODIFIER_INFO_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_DRM_FORMAT_MODIFIER_INFO_EXT> {
+    typedef VkPhysicalDeviceImageDrmFormatModifierInfoEXT Type;
+};
+
+// Map type VkImageDrmFormatModifierListCreateInfoEXT to id VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_LIST_CREATE_INFO_EXT
+template <> struct LvlTypeMap<VkImageDrmFormatModifierListCreateInfoEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_LIST_CREATE_INFO_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_LIST_CREATE_INFO_EXT> {
+    typedef VkImageDrmFormatModifierListCreateInfoEXT Type;
+};
+
+// Map type VkImageDrmFormatModifierExplicitCreateInfoEXT to id VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_EXPLICIT_CREATE_INFO_EXT
+template <> struct LvlTypeMap<VkImageDrmFormatModifierExplicitCreateInfoEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_EXPLICIT_CREATE_INFO_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_EXPLICIT_CREATE_INFO_EXT> {
+    typedef VkImageDrmFormatModifierExplicitCreateInfoEXT Type;
+};
+
+// Map type VkImageDrmFormatModifierPropertiesEXT to id VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT
+template <> struct LvlTypeMap<VkImageDrmFormatModifierPropertiesEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT> {
+    typedef VkImageDrmFormatModifierPropertiesEXT Type;
+};
+
+// Map type VkValidationCacheCreateInfoEXT to id VK_STRUCTURE_TYPE_VALIDATION_CACHE_CREATE_INFO_EXT
+template <> struct LvlTypeMap<VkValidationCacheCreateInfoEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_VALIDATION_CACHE_CREATE_INFO_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_VALIDATION_CACHE_CREATE_INFO_EXT> {
+    typedef VkValidationCacheCreateInfoEXT Type;
+};
+
+// Map type VkShaderModuleValidationCacheCreateInfoEXT to id VK_STRUCTURE_TYPE_SHADER_MODULE_VALIDATION_CACHE_CREATE_INFO_EXT
+template <> struct LvlTypeMap<VkShaderModuleValidationCacheCreateInfoEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_SHADER_MODULE_VALIDATION_CACHE_CREATE_INFO_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_SHADER_MODULE_VALIDATION_CACHE_CREATE_INFO_EXT> {
+    typedef VkShaderModuleValidationCacheCreateInfoEXT Type;
+};
+
+// Map type VkDescriptorSetLayoutBindingFlagsCreateInfoEXT to id VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT
+template <> struct LvlTypeMap<VkDescriptorSetLayoutBindingFlagsCreateInfoEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT> {
+    typedef VkDescriptorSetLayoutBindingFlagsCreateInfoEXT Type;
+};
+
+// Map type VkPhysicalDeviceDescriptorIndexingFeaturesEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES_EXT
+template <> struct LvlTypeMap<VkPhysicalDeviceDescriptorIndexingFeaturesEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES_EXT> {
+    typedef VkPhysicalDeviceDescriptorIndexingFeaturesEXT Type;
+};
+
+// Map type VkPhysicalDeviceDescriptorIndexingPropertiesEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES_EXT
+template <> struct LvlTypeMap<VkPhysicalDeviceDescriptorIndexingPropertiesEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES_EXT> {
+    typedef VkPhysicalDeviceDescriptorIndexingPropertiesEXT Type;
+};
+
+// Map type VkDescriptorSetVariableDescriptorCountAllocateInfoEXT to id VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO_EXT
+template <> struct LvlTypeMap<VkDescriptorSetVariableDescriptorCountAllocateInfoEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO_EXT> {
+    typedef VkDescriptorSetVariableDescriptorCountAllocateInfoEXT Type;
+};
+
+// Map type VkDescriptorSetVariableDescriptorCountLayoutSupportEXT to id VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT_EXT
+template <> struct LvlTypeMap<VkDescriptorSetVariableDescriptorCountLayoutSupportEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT_EXT> {
+    typedef VkDescriptorSetVariableDescriptorCountLayoutSupportEXT Type;
+};
+
+// Map type VkPipelineViewportShadingRateImageStateCreateInfoNV to id VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SHADING_RATE_IMAGE_STATE_CREATE_INFO_NV
+template <> struct LvlTypeMap<VkPipelineViewportShadingRateImageStateCreateInfoNV> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SHADING_RATE_IMAGE_STATE_CREATE_INFO_NV;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SHADING_RATE_IMAGE_STATE_CREATE_INFO_NV> {
+    typedef VkPipelineViewportShadingRateImageStateCreateInfoNV Type;
+};
+
+// Map type VkPhysicalDeviceShadingRateImageFeaturesNV to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_FEATURES_NV
+template <> struct LvlTypeMap<VkPhysicalDeviceShadingRateImageFeaturesNV> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_FEATURES_NV;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_FEATURES_NV> {
+    typedef VkPhysicalDeviceShadingRateImageFeaturesNV Type;
+};
+
+// Map type VkPhysicalDeviceShadingRateImagePropertiesNV to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_PROPERTIES_NV
+template <> struct LvlTypeMap<VkPhysicalDeviceShadingRateImagePropertiesNV> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_PROPERTIES_NV;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_PROPERTIES_NV> {
+    typedef VkPhysicalDeviceShadingRateImagePropertiesNV Type;
+};
+
+// Map type VkPipelineViewportCoarseSampleOrderStateCreateInfoNV to id VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_COARSE_SAMPLE_ORDER_STATE_CREATE_INFO_NV
+template <> struct LvlTypeMap<VkPipelineViewportCoarseSampleOrderStateCreateInfoNV> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_COARSE_SAMPLE_ORDER_STATE_CREATE_INFO_NV;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_COARSE_SAMPLE_ORDER_STATE_CREATE_INFO_NV> {
+    typedef VkPipelineViewportCoarseSampleOrderStateCreateInfoNV Type;
+};
+
+// Map type VkRayTracingShaderGroupCreateInfoNV to id VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV
+template <> struct LvlTypeMap<VkRayTracingShaderGroupCreateInfoNV> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV> {
+    typedef VkRayTracingShaderGroupCreateInfoNV Type;
+};
+
+// Map type VkRayTracingPipelineCreateInfoNV to id VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_NV
+template <> struct LvlTypeMap<VkRayTracingPipelineCreateInfoNV> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_NV;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_NV> {
+    typedef VkRayTracingPipelineCreateInfoNV Type;
+};
+
+// Map type VkGeometryTrianglesNV to id VK_STRUCTURE_TYPE_GEOMETRY_TRIANGLES_NV
+template <> struct LvlTypeMap<VkGeometryTrianglesNV> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_GEOMETRY_TRIANGLES_NV;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_GEOMETRY_TRIANGLES_NV> {
+    typedef VkGeometryTrianglesNV Type;
+};
+
+// Map type VkGeometryAABBNV to id VK_STRUCTURE_TYPE_GEOMETRY_AABB_NV
+template <> struct LvlTypeMap<VkGeometryAABBNV> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_GEOMETRY_AABB_NV;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_GEOMETRY_AABB_NV> {
+    typedef VkGeometryAABBNV Type;
+};
+
+// Map type VkGeometryNV to id VK_STRUCTURE_TYPE_GEOMETRY_NV
+template <> struct LvlTypeMap<VkGeometryNV> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_GEOMETRY_NV;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_GEOMETRY_NV> {
+    typedef VkGeometryNV Type;
+};
+
+// Map type VkAccelerationStructureInfoNV to id VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_INFO_NV
+template <> struct LvlTypeMap<VkAccelerationStructureInfoNV> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_INFO_NV;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_INFO_NV> {
+    typedef VkAccelerationStructureInfoNV Type;
+};
+
+// Map type VkAccelerationStructureCreateInfoNV to id VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_NV
+template <> struct LvlTypeMap<VkAccelerationStructureCreateInfoNV> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_NV;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_NV> {
+    typedef VkAccelerationStructureCreateInfoNV Type;
+};
+
+// Map type VkBindAccelerationStructureMemoryInfoNV to id VK_STRUCTURE_TYPE_BIND_ACCELERATION_STRUCTURE_MEMORY_INFO_NV
+template <> struct LvlTypeMap<VkBindAccelerationStructureMemoryInfoNV> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_BIND_ACCELERATION_STRUCTURE_MEMORY_INFO_NV;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_BIND_ACCELERATION_STRUCTURE_MEMORY_INFO_NV> {
+    typedef VkBindAccelerationStructureMemoryInfoNV Type;
+};
+
+// Map type VkWriteDescriptorSetAccelerationStructureNV to id VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_NV
+template <> struct LvlTypeMap<VkWriteDescriptorSetAccelerationStructureNV> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_NV;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_NV> {
+    typedef VkWriteDescriptorSetAccelerationStructureNV Type;
+};
+
+// Map type VkAccelerationStructureMemoryRequirementsInfoNV to id VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV
+template <> struct LvlTypeMap<VkAccelerationStructureMemoryRequirementsInfoNV> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV> {
+    typedef VkAccelerationStructureMemoryRequirementsInfoNV Type;
+};
+
+// Map type VkPhysicalDeviceRayTracingPropertiesNV to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PROPERTIES_NV
+template <> struct LvlTypeMap<VkPhysicalDeviceRayTracingPropertiesNV> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PROPERTIES_NV;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PROPERTIES_NV> {
+    typedef VkPhysicalDeviceRayTracingPropertiesNV Type;
+};
+
+// Map type VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_REPRESENTATIVE_FRAGMENT_TEST_FEATURES_NV
+template <> struct LvlTypeMap<VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_REPRESENTATIVE_FRAGMENT_TEST_FEATURES_NV;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_REPRESENTATIVE_FRAGMENT_TEST_FEATURES_NV> {
+    typedef VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV Type;
+};
+
+// Map type VkPipelineRepresentativeFragmentTestStateCreateInfoNV to id VK_STRUCTURE_TYPE_PIPELINE_REPRESENTATIVE_FRAGMENT_TEST_STATE_CREATE_INFO_NV
+template <> struct LvlTypeMap<VkPipelineRepresentativeFragmentTestStateCreateInfoNV> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_REPRESENTATIVE_FRAGMENT_TEST_STATE_CREATE_INFO_NV;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_REPRESENTATIVE_FRAGMENT_TEST_STATE_CREATE_INFO_NV> {
+    typedef VkPipelineRepresentativeFragmentTestStateCreateInfoNV Type;
+};
+
+// Map type VkPhysicalDeviceImageViewImageFormatInfoEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_VIEW_IMAGE_FORMAT_INFO_EXT
+template <> struct LvlTypeMap<VkPhysicalDeviceImageViewImageFormatInfoEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_VIEW_IMAGE_FORMAT_INFO_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_VIEW_IMAGE_FORMAT_INFO_EXT> {
+    typedef VkPhysicalDeviceImageViewImageFormatInfoEXT Type;
+};
+
+// Map type VkFilterCubicImageViewImageFormatPropertiesEXT to id VK_STRUCTURE_TYPE_FILTER_CUBIC_IMAGE_VIEW_IMAGE_FORMAT_PROPERTIES_EXT
+template <> struct LvlTypeMap<VkFilterCubicImageViewImageFormatPropertiesEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_FILTER_CUBIC_IMAGE_VIEW_IMAGE_FORMAT_PROPERTIES_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_FILTER_CUBIC_IMAGE_VIEW_IMAGE_FORMAT_PROPERTIES_EXT> {
+    typedef VkFilterCubicImageViewImageFormatPropertiesEXT Type;
+};
+
+// Map type VkDeviceQueueGlobalPriorityCreateInfoEXT to id VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_EXT
+template <> struct LvlTypeMap<VkDeviceQueueGlobalPriorityCreateInfoEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_EXT> {
+    typedef VkDeviceQueueGlobalPriorityCreateInfoEXT Type;
+};
+
+// Map type VkImportMemoryHostPointerInfoEXT to id VK_STRUCTURE_TYPE_IMPORT_MEMORY_HOST_POINTER_INFO_EXT
+template <> struct LvlTypeMap<VkImportMemoryHostPointerInfoEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_IMPORT_MEMORY_HOST_POINTER_INFO_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_IMPORT_MEMORY_HOST_POINTER_INFO_EXT> {
+    typedef VkImportMemoryHostPointerInfoEXT Type;
+};
+
+// Map type VkMemoryHostPointerPropertiesEXT to id VK_STRUCTURE_TYPE_MEMORY_HOST_POINTER_PROPERTIES_EXT
+template <> struct LvlTypeMap<VkMemoryHostPointerPropertiesEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_MEMORY_HOST_POINTER_PROPERTIES_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_MEMORY_HOST_POINTER_PROPERTIES_EXT> {
+    typedef VkMemoryHostPointerPropertiesEXT Type;
+};
+
+// Map type VkPhysicalDeviceExternalMemoryHostPropertiesEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_HOST_PROPERTIES_EXT
+template <> struct LvlTypeMap<VkPhysicalDeviceExternalMemoryHostPropertiesEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_HOST_PROPERTIES_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_HOST_PROPERTIES_EXT> {
+    typedef VkPhysicalDeviceExternalMemoryHostPropertiesEXT Type;
+};
+
+// Map type VkPipelineCompilerControlCreateInfoAMD to id VK_STRUCTURE_TYPE_PIPELINE_COMPILER_CONTROL_CREATE_INFO_AMD
+template <> struct LvlTypeMap<VkPipelineCompilerControlCreateInfoAMD> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_COMPILER_CONTROL_CREATE_INFO_AMD;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_COMPILER_CONTROL_CREATE_INFO_AMD> {
+    typedef VkPipelineCompilerControlCreateInfoAMD Type;
+};
+
+// Map type VkCalibratedTimestampInfoEXT to id VK_STRUCTURE_TYPE_CALIBRATED_TIMESTAMP_INFO_EXT
+template <> struct LvlTypeMap<VkCalibratedTimestampInfoEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_CALIBRATED_TIMESTAMP_INFO_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_CALIBRATED_TIMESTAMP_INFO_EXT> {
+    typedef VkCalibratedTimestampInfoEXT Type;
+};
+
+// Map type VkPhysicalDeviceShaderCorePropertiesAMD to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_AMD
+template <> struct LvlTypeMap<VkPhysicalDeviceShaderCorePropertiesAMD> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_AMD;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_AMD> {
+    typedef VkPhysicalDeviceShaderCorePropertiesAMD Type;
+};
+
+// Map type VkDeviceMemoryOverallocationCreateInfoAMD to id VK_STRUCTURE_TYPE_DEVICE_MEMORY_OVERALLOCATION_CREATE_INFO_AMD
+template <> struct LvlTypeMap<VkDeviceMemoryOverallocationCreateInfoAMD> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_DEVICE_MEMORY_OVERALLOCATION_CREATE_INFO_AMD;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DEVICE_MEMORY_OVERALLOCATION_CREATE_INFO_AMD> {
+    typedef VkDeviceMemoryOverallocationCreateInfoAMD Type;
+};
+
+// Map type VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_EXT
+template <> struct LvlTypeMap<VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_EXT> {
+    typedef VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT Type;
+};
+
+// Map type VkPipelineVertexInputDivisorStateCreateInfoEXT to id VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_EXT
+template <> struct LvlTypeMap<VkPipelineVertexInputDivisorStateCreateInfoEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_EXT> {
+    typedef VkPipelineVertexInputDivisorStateCreateInfoEXT Type;
+};
+
+// Map type VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_EXT
+template <> struct LvlTypeMap<VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_EXT> {
+    typedef VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT Type;
+};
+
+#ifdef VK_USE_PLATFORM_GGP
+// Map type VkPresentFrameTokenGGP to id VK_STRUCTURE_TYPE_PRESENT_FRAME_TOKEN_GGP
+template <> struct LvlTypeMap<VkPresentFrameTokenGGP> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PRESENT_FRAME_TOKEN_GGP;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PRESENT_FRAME_TOKEN_GGP> {
+    typedef VkPresentFrameTokenGGP Type;
+};
+
+#endif // VK_USE_PLATFORM_GGP
+// Map type VkPipelineCreationFeedbackCreateInfoEXT to id VK_STRUCTURE_TYPE_PIPELINE_CREATION_FEEDBACK_CREATE_INFO_EXT
+template <> struct LvlTypeMap<VkPipelineCreationFeedbackCreateInfoEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_CREATION_FEEDBACK_CREATE_INFO_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_CREATION_FEEDBACK_CREATE_INFO_EXT> {
+    typedef VkPipelineCreationFeedbackCreateInfoEXT Type;
+};
+
+// Map type VkPhysicalDeviceComputeShaderDerivativesFeaturesNV to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_FEATURES_NV
+template <> struct LvlTypeMap<VkPhysicalDeviceComputeShaderDerivativesFeaturesNV> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_FEATURES_NV;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_FEATURES_NV> {
+    typedef VkPhysicalDeviceComputeShaderDerivativesFeaturesNV Type;
+};
+
+// Map type VkPhysicalDeviceMeshShaderFeaturesNV to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_FEATURES_NV
+template <> struct LvlTypeMap<VkPhysicalDeviceMeshShaderFeaturesNV> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_FEATURES_NV;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_FEATURES_NV> {
+    typedef VkPhysicalDeviceMeshShaderFeaturesNV Type;
+};
+
+// Map type VkPhysicalDeviceMeshShaderPropertiesNV to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_PROPERTIES_NV
+template <> struct LvlTypeMap<VkPhysicalDeviceMeshShaderPropertiesNV> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_PROPERTIES_NV;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_PROPERTIES_NV> {
+    typedef VkPhysicalDeviceMeshShaderPropertiesNV Type;
+};
+
+// Map type VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_FEATURES_NV
+template <> struct LvlTypeMap<VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_FEATURES_NV;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_FEATURES_NV> {
+    typedef VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV Type;
+};
+
+// Map type VkPhysicalDeviceShaderImageFootprintFeaturesNV to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_IMAGE_FOOTPRINT_FEATURES_NV
+template <> struct LvlTypeMap<VkPhysicalDeviceShaderImageFootprintFeaturesNV> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_IMAGE_FOOTPRINT_FEATURES_NV;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_IMAGE_FOOTPRINT_FEATURES_NV> {
+    typedef VkPhysicalDeviceShaderImageFootprintFeaturesNV Type;
+};
+
+// Map type VkPipelineViewportExclusiveScissorStateCreateInfoNV to id VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_EXCLUSIVE_SCISSOR_STATE_CREATE_INFO_NV
+template <> struct LvlTypeMap<VkPipelineViewportExclusiveScissorStateCreateInfoNV> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_EXCLUSIVE_SCISSOR_STATE_CREATE_INFO_NV;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_EXCLUSIVE_SCISSOR_STATE_CREATE_INFO_NV> {
+    typedef VkPipelineViewportExclusiveScissorStateCreateInfoNV Type;
+};
+
+// Map type VkPhysicalDeviceExclusiveScissorFeaturesNV to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXCLUSIVE_SCISSOR_FEATURES_NV
+template <> struct LvlTypeMap<VkPhysicalDeviceExclusiveScissorFeaturesNV> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXCLUSIVE_SCISSOR_FEATURES_NV;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXCLUSIVE_SCISSOR_FEATURES_NV> {
+    typedef VkPhysicalDeviceExclusiveScissorFeaturesNV Type;
+};
+
+// Map type VkQueueFamilyCheckpointPropertiesNV to id VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_NV
+template <> struct LvlTypeMap<VkQueueFamilyCheckpointPropertiesNV> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_NV;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_NV> {
+    typedef VkQueueFamilyCheckpointPropertiesNV Type;
+};
+
+// Map type VkCheckpointDataNV to id VK_STRUCTURE_TYPE_CHECKPOINT_DATA_NV
+template <> struct LvlTypeMap<VkCheckpointDataNV> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_CHECKPOINT_DATA_NV;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_CHECKPOINT_DATA_NV> {
+    typedef VkCheckpointDataNV Type;
+};
+
+// Map type VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_FUNCTIONS_2_FEATURES_INTEL
+template <> struct LvlTypeMap<VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_FUNCTIONS_2_FEATURES_INTEL;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_FUNCTIONS_2_FEATURES_INTEL> {
+    typedef VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL Type;
+};
+
+// Map type VkInitializePerformanceApiInfoINTEL to id VK_STRUCTURE_TYPE_INITIALIZE_PERFORMANCE_API_INFO_INTEL
+template <> struct LvlTypeMap<VkInitializePerformanceApiInfoINTEL> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_INITIALIZE_PERFORMANCE_API_INFO_INTEL;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_INITIALIZE_PERFORMANCE_API_INFO_INTEL> {
+    typedef VkInitializePerformanceApiInfoINTEL Type;
+};
+
+// Map type VkQueryPoolCreateInfoINTEL to id VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO_INTEL
+template <> struct LvlTypeMap<VkQueryPoolCreateInfoINTEL> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO_INTEL;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO_INTEL> {
+    typedef VkQueryPoolCreateInfoINTEL Type;
+};
+
+// Map type VkPerformanceMarkerInfoINTEL to id VK_STRUCTURE_TYPE_PERFORMANCE_MARKER_INFO_INTEL
+template <> struct LvlTypeMap<VkPerformanceMarkerInfoINTEL> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PERFORMANCE_MARKER_INFO_INTEL;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PERFORMANCE_MARKER_INFO_INTEL> {
+    typedef VkPerformanceMarkerInfoINTEL Type;
+};
+
+// Map type VkPerformanceStreamMarkerInfoINTEL to id VK_STRUCTURE_TYPE_PERFORMANCE_STREAM_MARKER_INFO_INTEL
+template <> struct LvlTypeMap<VkPerformanceStreamMarkerInfoINTEL> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PERFORMANCE_STREAM_MARKER_INFO_INTEL;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PERFORMANCE_STREAM_MARKER_INFO_INTEL> {
+    typedef VkPerformanceStreamMarkerInfoINTEL Type;
+};
+
+// Map type VkPerformanceOverrideInfoINTEL to id VK_STRUCTURE_TYPE_PERFORMANCE_OVERRIDE_INFO_INTEL
+template <> struct LvlTypeMap<VkPerformanceOverrideInfoINTEL> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PERFORMANCE_OVERRIDE_INFO_INTEL;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PERFORMANCE_OVERRIDE_INFO_INTEL> {
+    typedef VkPerformanceOverrideInfoINTEL Type;
+};
+
+// Map type VkPerformanceConfigurationAcquireInfoINTEL to id VK_STRUCTURE_TYPE_PERFORMANCE_CONFIGURATION_ACQUIRE_INFO_INTEL
+template <> struct LvlTypeMap<VkPerformanceConfigurationAcquireInfoINTEL> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PERFORMANCE_CONFIGURATION_ACQUIRE_INFO_INTEL;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PERFORMANCE_CONFIGURATION_ACQUIRE_INFO_INTEL> {
+    typedef VkPerformanceConfigurationAcquireInfoINTEL Type;
+};
+
+// Map type VkPhysicalDevicePCIBusInfoPropertiesEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PCI_BUS_INFO_PROPERTIES_EXT
+template <> struct LvlTypeMap<VkPhysicalDevicePCIBusInfoPropertiesEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PCI_BUS_INFO_PROPERTIES_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PCI_BUS_INFO_PROPERTIES_EXT> {
+    typedef VkPhysicalDevicePCIBusInfoPropertiesEXT Type;
+};
+
+// Map type VkDisplayNativeHdrSurfaceCapabilitiesAMD to id VK_STRUCTURE_TYPE_DISPLAY_NATIVE_HDR_SURFACE_CAPABILITIES_AMD
+template <> struct LvlTypeMap<VkDisplayNativeHdrSurfaceCapabilitiesAMD> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_DISPLAY_NATIVE_HDR_SURFACE_CAPABILITIES_AMD;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DISPLAY_NATIVE_HDR_SURFACE_CAPABILITIES_AMD> {
+    typedef VkDisplayNativeHdrSurfaceCapabilitiesAMD Type;
+};
+
+// Map type VkSwapchainDisplayNativeHdrCreateInfoAMD to id VK_STRUCTURE_TYPE_SWAPCHAIN_DISPLAY_NATIVE_HDR_CREATE_INFO_AMD
+template <> struct LvlTypeMap<VkSwapchainDisplayNativeHdrCreateInfoAMD> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_SWAPCHAIN_DISPLAY_NATIVE_HDR_CREATE_INFO_AMD;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_SWAPCHAIN_DISPLAY_NATIVE_HDR_CREATE_INFO_AMD> {
+    typedef VkSwapchainDisplayNativeHdrCreateInfoAMD Type;
+};
+
+#ifdef VK_USE_PLATFORM_FUCHSIA
+// Map type VkImagePipeSurfaceCreateInfoFUCHSIA to id VK_STRUCTURE_TYPE_IMAGEPIPE_SURFACE_CREATE_INFO_FUCHSIA
+template <> struct LvlTypeMap<VkImagePipeSurfaceCreateInfoFUCHSIA> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_IMAGEPIPE_SURFACE_CREATE_INFO_FUCHSIA;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_IMAGEPIPE_SURFACE_CREATE_INFO_FUCHSIA> {
+    typedef VkImagePipeSurfaceCreateInfoFUCHSIA Type;
+};
+
+#endif // VK_USE_PLATFORM_FUCHSIA
+#ifdef VK_USE_PLATFORM_METAL_EXT
+// Map type VkMetalSurfaceCreateInfoEXT to id VK_STRUCTURE_TYPE_METAL_SURFACE_CREATE_INFO_EXT
+template <> struct LvlTypeMap<VkMetalSurfaceCreateInfoEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_METAL_SURFACE_CREATE_INFO_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_METAL_SURFACE_CREATE_INFO_EXT> {
+    typedef VkMetalSurfaceCreateInfoEXT Type;
+};
+
+#endif // VK_USE_PLATFORM_METAL_EXT
+// Map type VkPhysicalDeviceFragmentDensityMapFeaturesEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_FEATURES_EXT
+template <> struct LvlTypeMap<VkPhysicalDeviceFragmentDensityMapFeaturesEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_FEATURES_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_FEATURES_EXT> {
+    typedef VkPhysicalDeviceFragmentDensityMapFeaturesEXT Type;
+};
+
+// Map type VkPhysicalDeviceFragmentDensityMapPropertiesEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_PROPERTIES_EXT
+template <> struct LvlTypeMap<VkPhysicalDeviceFragmentDensityMapPropertiesEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_PROPERTIES_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_PROPERTIES_EXT> {
+    typedef VkPhysicalDeviceFragmentDensityMapPropertiesEXT Type;
+};
+
+// Map type VkRenderPassFragmentDensityMapCreateInfoEXT to id VK_STRUCTURE_TYPE_RENDER_PASS_FRAGMENT_DENSITY_MAP_CREATE_INFO_EXT
+template <> struct LvlTypeMap<VkRenderPassFragmentDensityMapCreateInfoEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_RENDER_PASS_FRAGMENT_DENSITY_MAP_CREATE_INFO_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_RENDER_PASS_FRAGMENT_DENSITY_MAP_CREATE_INFO_EXT> {
+    typedef VkRenderPassFragmentDensityMapCreateInfoEXT Type;
+};
+
+// Map type VkPhysicalDeviceScalarBlockLayoutFeaturesEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES_EXT
+template <> struct LvlTypeMap<VkPhysicalDeviceScalarBlockLayoutFeaturesEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES_EXT> {
+    typedef VkPhysicalDeviceScalarBlockLayoutFeaturesEXT Type;
+};
+
+// Map type VkPhysicalDeviceSubgroupSizeControlFeaturesEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES_EXT
+template <> struct LvlTypeMap<VkPhysicalDeviceSubgroupSizeControlFeaturesEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES_EXT> {
+    typedef VkPhysicalDeviceSubgroupSizeControlFeaturesEXT Type;
+};
+
+// Map type VkPhysicalDeviceSubgroupSizeControlPropertiesEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES_EXT
+template <> struct LvlTypeMap<VkPhysicalDeviceSubgroupSizeControlPropertiesEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES_EXT> {
+    typedef VkPhysicalDeviceSubgroupSizeControlPropertiesEXT Type;
+};
+
+// Map type VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT to id VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO_EXT
+template <> struct LvlTypeMap<VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO_EXT> {
+    typedef VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT Type;
+};
+
+// Map type VkPhysicalDeviceShaderCoreProperties2AMD to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_2_AMD
+template <> struct LvlTypeMap<VkPhysicalDeviceShaderCoreProperties2AMD> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_2_AMD;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_2_AMD> {
+    typedef VkPhysicalDeviceShaderCoreProperties2AMD Type;
+};
+
+// Map type VkPhysicalDeviceCoherentMemoryFeaturesAMD to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COHERENT_MEMORY_FEATURES_AMD
+template <> struct LvlTypeMap<VkPhysicalDeviceCoherentMemoryFeaturesAMD> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COHERENT_MEMORY_FEATURES_AMD;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COHERENT_MEMORY_FEATURES_AMD> {
+    typedef VkPhysicalDeviceCoherentMemoryFeaturesAMD Type;
+};
+
+// Map type VkPhysicalDeviceMemoryBudgetPropertiesEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT
+template <> struct LvlTypeMap<VkPhysicalDeviceMemoryBudgetPropertiesEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT> {
+    typedef VkPhysicalDeviceMemoryBudgetPropertiesEXT Type;
+};
+
+// Map type VkPhysicalDeviceMemoryPriorityFeaturesEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PRIORITY_FEATURES_EXT
+template <> struct LvlTypeMap<VkPhysicalDeviceMemoryPriorityFeaturesEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PRIORITY_FEATURES_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PRIORITY_FEATURES_EXT> {
+    typedef VkPhysicalDeviceMemoryPriorityFeaturesEXT Type;
+};
+
+// Map type VkMemoryPriorityAllocateInfoEXT to id VK_STRUCTURE_TYPE_MEMORY_PRIORITY_ALLOCATE_INFO_EXT
+template <> struct LvlTypeMap<VkMemoryPriorityAllocateInfoEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_MEMORY_PRIORITY_ALLOCATE_INFO_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_MEMORY_PRIORITY_ALLOCATE_INFO_EXT> {
+    typedef VkMemoryPriorityAllocateInfoEXT Type;
+};
+
+// Map type VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEDICATED_ALLOCATION_IMAGE_ALIASING_FEATURES_NV
+template <> struct LvlTypeMap<VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEDICATED_ALLOCATION_IMAGE_ALIASING_FEATURES_NV;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEDICATED_ALLOCATION_IMAGE_ALIASING_FEATURES_NV> {
+    typedef VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV Type;
+};
+
+// Map type VkPhysicalDeviceBufferDeviceAddressFeaturesEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_EXT
+template <> struct LvlTypeMap<VkPhysicalDeviceBufferDeviceAddressFeaturesEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_EXT> {
+    typedef VkPhysicalDeviceBufferDeviceAddressFeaturesEXT Type;
+};
+
+// Map type VkBufferDeviceAddressCreateInfoEXT to id VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_CREATE_INFO_EXT
+template <> struct LvlTypeMap<VkBufferDeviceAddressCreateInfoEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_CREATE_INFO_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_CREATE_INFO_EXT> {
+    typedef VkBufferDeviceAddressCreateInfoEXT Type;
+};
+
+// Map type VkPhysicalDeviceToolPropertiesEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TOOL_PROPERTIES_EXT
+template <> struct LvlTypeMap<VkPhysicalDeviceToolPropertiesEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TOOL_PROPERTIES_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TOOL_PROPERTIES_EXT> {
+    typedef VkPhysicalDeviceToolPropertiesEXT Type;
+};
+
+// Map type VkImageStencilUsageCreateInfoEXT to id VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO_EXT
+template <> struct LvlTypeMap<VkImageStencilUsageCreateInfoEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO_EXT> {
+    typedef VkImageStencilUsageCreateInfoEXT Type;
+};
+
+// Map type VkValidationFeaturesEXT to id VK_STRUCTURE_TYPE_VALIDATION_FEATURES_EXT
+template <> struct LvlTypeMap<VkValidationFeaturesEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_VALIDATION_FEATURES_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_VALIDATION_FEATURES_EXT> {
+    typedef VkValidationFeaturesEXT Type;
+};
+
+// Map type VkCooperativeMatrixPropertiesNV to id VK_STRUCTURE_TYPE_COOPERATIVE_MATRIX_PROPERTIES_NV
+template <> struct LvlTypeMap<VkCooperativeMatrixPropertiesNV> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_COOPERATIVE_MATRIX_PROPERTIES_NV;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_COOPERATIVE_MATRIX_PROPERTIES_NV> {
+    typedef VkCooperativeMatrixPropertiesNV Type;
+};
+
+// Map type VkPhysicalDeviceCooperativeMatrixFeaturesNV to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_FEATURES_NV
+template <> struct LvlTypeMap<VkPhysicalDeviceCooperativeMatrixFeaturesNV> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_FEATURES_NV;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_FEATURES_NV> {
+    typedef VkPhysicalDeviceCooperativeMatrixFeaturesNV Type;
+};
+
+// Map type VkPhysicalDeviceCooperativeMatrixPropertiesNV to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_PROPERTIES_NV
+template <> struct LvlTypeMap<VkPhysicalDeviceCooperativeMatrixPropertiesNV> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_PROPERTIES_NV;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_PROPERTIES_NV> {
+    typedef VkPhysicalDeviceCooperativeMatrixPropertiesNV Type;
+};
+
+// Map type VkPhysicalDeviceCoverageReductionModeFeaturesNV to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COVERAGE_REDUCTION_MODE_FEATURES_NV
+template <> struct LvlTypeMap<VkPhysicalDeviceCoverageReductionModeFeaturesNV> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COVERAGE_REDUCTION_MODE_FEATURES_NV;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COVERAGE_REDUCTION_MODE_FEATURES_NV> {
+    typedef VkPhysicalDeviceCoverageReductionModeFeaturesNV Type;
+};
+
+// Map type VkPipelineCoverageReductionStateCreateInfoNV to id VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_REDUCTION_STATE_CREATE_INFO_NV
+template <> struct LvlTypeMap<VkPipelineCoverageReductionStateCreateInfoNV> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_REDUCTION_STATE_CREATE_INFO_NV;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_REDUCTION_STATE_CREATE_INFO_NV> {
+    typedef VkPipelineCoverageReductionStateCreateInfoNV Type;
+};
+
+// Map type VkFramebufferMixedSamplesCombinationNV to id VK_STRUCTURE_TYPE_FRAMEBUFFER_MIXED_SAMPLES_COMBINATION_NV
+template <> struct LvlTypeMap<VkFramebufferMixedSamplesCombinationNV> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_FRAMEBUFFER_MIXED_SAMPLES_COMBINATION_NV;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_FRAMEBUFFER_MIXED_SAMPLES_COMBINATION_NV> {
+    typedef VkFramebufferMixedSamplesCombinationNV Type;
+};
+
+// Map type VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_INTERLOCK_FEATURES_EXT
+template <> struct LvlTypeMap<VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_INTERLOCK_FEATURES_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_INTERLOCK_FEATURES_EXT> {
+    typedef VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT Type;
+};
+
+// Map type VkPhysicalDeviceYcbcrImageArraysFeaturesEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_YCBCR_IMAGE_ARRAYS_FEATURES_EXT
+template <> struct LvlTypeMap<VkPhysicalDeviceYcbcrImageArraysFeaturesEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_YCBCR_IMAGE_ARRAYS_FEATURES_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_YCBCR_IMAGE_ARRAYS_FEATURES_EXT> {
+    typedef VkPhysicalDeviceYcbcrImageArraysFeaturesEXT Type;
+};
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+// Map type VkSurfaceFullScreenExclusiveInfoEXT to id VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_INFO_EXT
+template <> struct LvlTypeMap<VkSurfaceFullScreenExclusiveInfoEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_INFO_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_INFO_EXT> {
+    typedef VkSurfaceFullScreenExclusiveInfoEXT Type;
+};
+
+#endif // VK_USE_PLATFORM_WIN32_KHR
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+// Map type VkSurfaceCapabilitiesFullScreenExclusiveEXT to id VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_FULL_SCREEN_EXCLUSIVE_EXT
+template <> struct LvlTypeMap<VkSurfaceCapabilitiesFullScreenExclusiveEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_FULL_SCREEN_EXCLUSIVE_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_FULL_SCREEN_EXCLUSIVE_EXT> {
+    typedef VkSurfaceCapabilitiesFullScreenExclusiveEXT Type;
+};
+
+#endif // VK_USE_PLATFORM_WIN32_KHR
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+// Map type VkSurfaceFullScreenExclusiveWin32InfoEXT to id VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_WIN32_INFO_EXT
+template <> struct LvlTypeMap<VkSurfaceFullScreenExclusiveWin32InfoEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_WIN32_INFO_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_WIN32_INFO_EXT> {
+    typedef VkSurfaceFullScreenExclusiveWin32InfoEXT Type;
+};
+
+#endif // VK_USE_PLATFORM_WIN32_KHR
+// Map type VkHeadlessSurfaceCreateInfoEXT to id VK_STRUCTURE_TYPE_HEADLESS_SURFACE_CREATE_INFO_EXT
+template <> struct LvlTypeMap<VkHeadlessSurfaceCreateInfoEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_HEADLESS_SURFACE_CREATE_INFO_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_HEADLESS_SURFACE_CREATE_INFO_EXT> {
+    typedef VkHeadlessSurfaceCreateInfoEXT Type;
+};
+
+// Map type VkPhysicalDeviceLineRasterizationFeaturesEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_EXT
+template <> struct LvlTypeMap<VkPhysicalDeviceLineRasterizationFeaturesEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_EXT> {
+    typedef VkPhysicalDeviceLineRasterizationFeaturesEXT Type;
+};
+
+// Map type VkPhysicalDeviceLineRasterizationPropertiesEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES_EXT
+template <> struct LvlTypeMap<VkPhysicalDeviceLineRasterizationPropertiesEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES_EXT> {
+    typedef VkPhysicalDeviceLineRasterizationPropertiesEXT Type;
+};
+
+// Map type VkPipelineRasterizationLineStateCreateInfoEXT to id VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO_EXT
+template <> struct LvlTypeMap<VkPipelineRasterizationLineStateCreateInfoEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO_EXT> {
+    typedef VkPipelineRasterizationLineStateCreateInfoEXT Type;
+};
+
+// Map type VkPhysicalDeviceHostQueryResetFeaturesEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES_EXT
+template <> struct LvlTypeMap<VkPhysicalDeviceHostQueryResetFeaturesEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES_EXT> {
+    typedef VkPhysicalDeviceHostQueryResetFeaturesEXT Type;
+};
+
+// Map type VkPhysicalDeviceIndexTypeUint8FeaturesEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_EXT
+template <> struct LvlTypeMap<VkPhysicalDeviceIndexTypeUint8FeaturesEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_EXT> {
+    typedef VkPhysicalDeviceIndexTypeUint8FeaturesEXT Type;
+};
+
+// Map type VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES_EXT
+template <> struct LvlTypeMap<VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES_EXT> {
+    typedef VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT Type;
+};
+
+// Map type VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_FEATURES_EXT
+template <> struct LvlTypeMap<VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_FEATURES_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_FEATURES_EXT> {
+    typedef VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT Type;
+};
+
+// Map type VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES_EXT
+template <> struct LvlTypeMap<VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES_EXT> {
+    typedef VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT Type;
+};
+
+// Header "base class" for pNext chain traversal
+struct LvlGenericHeader {
+   VkStructureType sType;
+   const LvlGenericHeader *pNext;
+};
+
+// Find an entry of the given type in the pNext chain
+template <typename T> const T *lvl_find_in_chain(const void *next) {
+    const LvlGenericHeader *current = reinterpret_cast<const LvlGenericHeader *>(next);
+    const T *found = nullptr;
+    while (current) {
+        if (LvlTypeMap<T>::kSType == current->sType) {
+            found = reinterpret_cast<const T*>(current);
+            current = nullptr;
+        } else {
+            current = current->pNext;
+        }
+    }
+    return found;
+}
+
+// Init the header of an sType struct with pNext
+template <typename T> T lvl_init_struct(void *p_next) {
+    T out = {};
+    out.sType = LvlTypeMap<T>::kSType;
+    out.pNext = p_next;
+    return out;
+}
+
+// Init the header of an sType struct
+template <typename T> T lvl_init_struct() {
+    T out = {};
+    out.sType = LvlTypeMap<T>::kSType;
+    return out;
+}
+
diff --git a/src/third_party/vulkan-tools/src/icd/linux/VkICD_mock_icd.json b/src/third_party/vulkan-tools/src/icd/linux/VkICD_mock_icd.json
new file mode 100644
index 0000000..bf32f8e
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/icd/linux/VkICD_mock_icd.json
@@ -0,0 +1,12 @@
+{
+    "file_format_version" : "1.0.1",
+    "ICD": {
+        "library_path": "./libVkICD_mock_icd.so",
+        "api_version": "1.1.97"
+    }
+}
+
+
+
+
+
diff --git a/src/third_party/vulkan-tools/src/icd/macos/VkICD_mock_icd.json b/src/third_party/vulkan-tools/src/icd/macos/VkICD_mock_icd.json
new file mode 100644
index 0000000..7101986
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/icd/macos/VkICD_mock_icd.json
@@ -0,0 +1,8 @@
+{
+    "file_format_version" : "1.0.1",
+    "ICD": {
+        "library_path": "./libVkICD_mock_icd.dylib",
+        "api_version": "1.1.97"
+    }
+}
+
diff --git a/src/third_party/vulkan-tools/src/icd/windows/VkICD_mock_icd.json b/src/third_party/vulkan-tools/src/icd/windows/VkICD_mock_icd.json
new file mode 100644
index 0000000..d192f5c
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/icd/windows/VkICD_mock_icd.json
@@ -0,0 +1,12 @@
+{
+    "file_format_version" : "1.0.1",
+    "ICD": {
+        "library_path": ".\\VkICD_mock_icd.dll",
+        "api_version": "1.1.97"
+    }
+}
+
+
+
+
+
diff --git a/src/third_party/vulkan-tools/src/mac_common.cmake b/src/third_party/vulkan-tools/src/mac_common.cmake
new file mode 100644
index 0000000..bad3c41
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/mac_common.cmake
@@ -0,0 +1,56 @@
+# ~~~
+# Copyright (c) 2018 Valve Corporation
+# Copyright (c) 2018 LunarG, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ~~~
+
+# Set up common settings for building all demos on Apple platforms.
+
+# Source for the MoltenVK ICD library and JSON file
+set(MOLTENVK_DIR ${MOLTENVK_REPO_ROOT})
+
+# MoltenVK JSON File
+
+execute_process(COMMAND mkdir -p ${CMAKE_BINARY_DIR}/staging-json)
+execute_process(COMMAND sed -e "/\"library_path\":/s$:[[:space:]]*\"[[:space:]]*[\\.\\/]*$: \"..\\/..\\/..\\/Frameworks\\/$"
+                        ${MOLTENVK_DIR}/MoltenVK/icd/MoltenVK_icd.json
+                OUTPUT_FILE ${CMAKE_BINARY_DIR}/staging-json/MoltenVK_icd.json)
+
+# ~~~
+# Modify the ICD JSON file to adjust the library path.
+# The ICD JSON file goes in the Resources/vulkan/icd.d directory, so adjust the
+# library_path to the relative path to the Frameworks directory in the bundle.
+# The regex does: substitute ':<whitespace>"<whitespace><all occurences of . and />' with:
+# ': "../../../Frameworks/'
+# ~~~
+add_custom_target(MoltenVK_icd-staging-json ALL
+                  COMMAND mkdir -p ${CMAKE_BINARY_DIR}/staging-json
+                  COMMAND sed -e "/\"library_path\":/s$:[[:space:]]*\"[[:space:]]*[\\.\\/]*$: \"..\\/..\\/..\\/Frameworks\\/$"
+                          ${MOLTENVK_DIR}/MoltenVK/icd/MoltenVK_icd.json > ${CMAKE_BINARY_DIR}/staging-json/MoltenVK_icd.json
+                  VERBATIM
+                  DEPENDS "${MOLTENVK_DIR}/MoltenVK/icd/MoltenVK_icd.json")
+set_source_files_properties(${CMAKE_BINARY_DIR}/staging-json/MoltenVK_icd.json PROPERTIES GENERATED TRUE)
+
+find_library(COCOA NAMES Cocoa)
+
+# Locate Interface Builder Tool, needed to build things like Storyboards outside of Xcode.
+if(NOT ${CMAKE_GENERATOR} MATCHES "^Xcode.*")
+    # Make sure we can find the 'ibtool' program. If we can NOT find it we skip generation of this project.
+    find_program(IBTOOL ibtool HINTS "/usr/bin" "${OSX_DEVELOPER_ROOT}/usr/bin")
+    if(${IBTOOL} STREQUAL "IBTOOL-NOTFOUND")
+        message(SEND_ERROR "ibtool can not be found and is needed to compile the .xib files. "
+                           "It should have been installed with the Apple developer tools. "
+                           "The default system paths were searched in addition to ${OSX_DEVELOPER_ROOT}/usr/bin.")
+    endif()
+endif()
diff --git a/src/third_party/vulkan-tools/src/scripts/check_code_format.sh b/src/third_party/vulkan-tools/src/scripts/check_code_format.sh
new file mode 100755
index 0000000..dde4379
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/scripts/check_code_format.sh
@@ -0,0 +1,41 @@
+#!/bin/bash
+# Copyright (c) 2017 Google Inc.
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Script to determine if source code in Pull Request is properly formatted.
+# Exits with non 0 exit code if formatting is needed.
+#
+# This script assumes to be invoked at the project root directory.
+
+RED='\033[0;31m'
+GREEN='\033[0;32m'
+NC='\033[0m' # No Color
+
+FILES_TO_CHECK=$(git diff --name-only master | grep -v -E "^include/vulkan" | grep -E ".*\.(cpp|cc|c\+\+|cxx|c|h|hpp)$")
+
+if [ -z "${FILES_TO_CHECK}" ]; then
+  echo -e "${GREEN}No source code to check for formatting.${NC}"
+  exit 0
+fi
+
+FORMAT_DIFF=$(git diff -U0 master -- ${FILES_TO_CHECK} | python ./scripts/clang-format-diff.py -p1 -style=file)
+
+if [ -z "${FORMAT_DIFF}" ]; then
+  echo -e "${GREEN}All source code in PR properly formatted.${NC}"
+  exit 0
+else
+  echo -e "${RED}Found formatting errors!${NC}"
+  echo "${FORMAT_DIFF}"
+  exit 1
+fi
diff --git a/src/third_party/vulkan-tools/src/scripts/check_commit_message_format.sh b/src/third_party/vulkan-tools/src/scripts/check_commit_message_format.sh
new file mode 100755
index 0000000..2966635
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/scripts/check_commit_message_format.sh
@@ -0,0 +1,102 @@
+#!/bin/bash
+# Copyright (c) 2018 Valve Corporation
+# Copyright (c) 2018 LunarG, Inc.
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Checks commit messages against project standards in CONTRIBUTING.md document
+# Script to determine if commit messages in Pull Request are properly formatted.
+# Exits with non 0 exit code if reformatting is needed.
+
+# Disable subshells
+shopt -s lastpipe
+
+RED='\033[0;31m'
+GREEN='\033[0;32m'
+NC='\033[0m' # No Color
+
+# TRAVIS_COMMIT_RANGE contains range of commits for this PR
+
+# Get user-supplied commit message text for applicable commits and insert
+# a unique separator string identifier. The git command returns ONLY the
+# subject line and body for each of the commits.
+COMMIT_TEXT=$(git log ${TRAVIS_COMMIT_RANGE} --pretty=format:"XXXNEWLINEXXX"%n%B)
+
+# Bail if there are none
+if [ -z "${COMMIT_TEXT}" ]; then
+  echo -e "${GREEN}No commit messgages to check for formatting.${NC}"
+  exit 0
+elif ! echo $TRAVIS_COMMIT_RANGE | grep -q "\.\.\."; then
+  echo -e "${GREEN}No commit messgages to check for formatting.${NC}"
+  exit 0
+fi
+
+# Process commit messages
+success=1
+current_line=0
+prevline=""
+
+# Process each line of the commit message output, resetting counter on separator
+printf %s "$COMMIT_TEXT" | while IFS='' read -r line; do
+  # echo "Count = $current_line <Line> = $line"
+  current_line=$((current_line+1))
+  if [ "$line" = "XXXNEWLINEXXX" ]; then
+    current_line=0
+  fi
+  chars=${#line}
+  if [ $current_line -eq 1 ]; then
+    # Subject line should be 50 chars or less (but give some slack here)
+    if [ $chars -gt 54 ]; then
+      echo "The following subject line exceeds 50 characters in length."
+      echo "     '$line'"
+      success=0
+    fi
+    i=$(($chars-1))
+    last_char=${line:$i:1}
+    # Output error if last char of subject line is not alpha-numeric
+    if [[ ! $last_char =~ [0-9a-zA-Z] ]]; then
+      echo "For the following commit, the last character of the subject line must not be non-alphanumeric."
+      echo "     '$line'"
+      success=0
+    fi
+    # Checking if subject line doesn't start with 'module: '
+    prefix=$(echo $line | cut -f1 -d " ")
+    if [ "${prefix: -1}" != ":" ]; then
+      echo "The following subject line must start with a single word specifying the functional area of the change, followed by a colon and space. I.e., 'layers: Subject line here'"
+      echo "     '$line'"
+      success=0
+    fi
+  elif [ $current_line -eq 2 ]; then
+    # Commit message must have a blank line between subject and body
+    if [ $chars -ne 0 ]; then
+      echo "The following subject line must be followed by a blank line."
+      echo "     '$prevline'"
+      success=0
+    fi
+  else
+    # Lines in a commit message body must be less than 72 characters in length (but give some slack)
+    if [ $chars -gt 76 ]; then
+      echo "The following commit message body line exceeds the 72 character limit."
+      echo "'$line\'"
+      success=0
+    fi
+  fi
+  prevline=$line
+done
+
+if [ $success -eq 1 ]; then
+  echo -e "${GREEN}All commit messages in pull request are properly formatted.${NC}"
+  exit 0
+else
+  exit 1
+fi
diff --git a/src/third_party/vulkan-tools/src/scripts/common_codegen.py b/src/third_party/vulkan-tools/src/scripts/common_codegen.py
new file mode 100644
index 0000000..d56ebac
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/scripts/common_codegen.py
@@ -0,0 +1,74 @@
+#!/usr/bin/python3 -i
+#
+# Copyright (c) 2015-2017, 2019 The Khronos Group Inc.
+# Copyright (c) 2015-2017, 2019 Valve Corporation
+# Copyright (c) 2015-2017, 2019 LunarG, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Author: Mark Lobodzinski <mark@lunarg.com>
+
+import os
+
+# Copyright text prefixing all headers (list of strings).
+prefixStrings = [
+    '/*',
+    '** Copyright (c) 2015-2017, 2019 The Khronos Group Inc.',
+    '** Copyright (c) 2015-2017, 2019 Valve Corporation',
+    '** Copyright (c) 2015-2017, 2019 LunarG, Inc.',
+    '** Copyright (c) 2015-2017, 2019 Google Inc.',
+    '**',
+    '** Licensed under the Apache License, Version 2.0 (the "License");',
+    '** you may not use this file except in compliance with the License.',
+    '** You may obtain a copy of the License at',
+    '**',
+    '**     http://www.apache.org/licenses/LICENSE-2.0',
+    '**',
+    '** Unless required by applicable law or agreed to in writing, software',
+    '** distributed under the License is distributed on an "AS IS" BASIS,',
+    '** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.',
+    '** See the License for the specific language governing permissions and',
+    '** limitations under the License.',
+    '*/',
+    ''
+]
+
+
+platform_dict = {
+    'android' : 'VK_USE_PLATFORM_ANDROID_KHR',
+    'fuchsia' : 'VK_USE_PLATFORM_FUCHSIA',
+    'ggp': 'VK_USE_PLATFORM_GGP',
+    'ios' : 'VK_USE_PLATFORM_IOS_MVK',
+    'macos' : 'VK_USE_PLATFORM_MACOS_MVK',
+    'metal' : 'VK_USE_PLATFORM_METAL_EXT',
+    'vi' : 'VK_USE_PLATFORM_VI_NN',
+    'wayland' : 'VK_USE_PLATFORM_WAYLAND_KHR',
+    'win32' : 'VK_USE_PLATFORM_WIN32_KHR',
+    'xcb' : 'VK_USE_PLATFORM_XCB_KHR',
+    'xlib' : 'VK_USE_PLATFORM_XLIB_KHR',
+    'xlib_xrandr' : 'VK_USE_PLATFORM_XLIB_XRANDR_EXT',
+}
+
+#
+# Return appropriate feature protect string from 'platform' tag on feature
+def GetFeatureProtect(interface):
+    """Get platform protection string"""
+    platform = interface.get('platform')
+    protect = None
+    if platform is not None:
+        protect = platform_dict[platform]
+    return protect
+
+# helper to define paths relative to the repo root
+def repo_relative(path):
+    return os.path.abspath(os.path.join(os.path.dirname(__file__), '..', path))
diff --git a/src/third_party/vulkan-tools/src/scripts/determine_vs_version.py b/src/third_party/vulkan-tools/src/scripts/determine_vs_version.py
new file mode 100755
index 0000000..7982bf7
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/scripts/determine_vs_version.py
@@ -0,0 +1,119 @@
+#!/usr/bin/env python3
+#
+# Copyright (c) 2016 The Khronos Group Inc.
+# Copyright (c) 2016 Valve Corporation
+# Copyright (c) 2016 LunarG, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Author: Mark Young <marky@lunarg.com>
+
+import sys
+import os
+import subprocess
+
+# Following function code snippet was found on StackOverflow (with a change to lower
+# camel-case on the variable names):
+#   http://stackoverflow.com/questions/377017/test-if-executable-exists-in-python
+def find_executable(program):
+    def is_exe(fPath):
+        return os.path.isfile(fPath) and os.access(fPath, os.X_OK)
+
+    fPath, fName = os.path.split(program)
+    if fPath:
+        if is_exe(program):
+            return program
+    else:
+        for path in os.environ["PATH"].split(os.pathsep):
+            path = path.strip('"')
+            exe_file = os.path.join(path, program)
+            if is_exe(exe_file):
+                return exe_file
+
+    return None
+    
+def determine_year(version):
+    if version == 8:
+        return 2005
+    elif version == 9:
+        return 2008
+    elif version == 10:
+        return 2010
+    elif version == 11:
+        return 2012
+    elif version == 12:
+        return 2013
+    elif version == 14:
+        return 2015
+    elif version == 15:
+        return 2017
+    else:
+        return 0000
+    
+# Determine if msbuild is in the path, then call it to determine the version and parse
+# it into a format we can use, which is "<version_num> <version_year>".
+if __name__ == '__main__':
+    exeName     = 'msbuild.exe'
+    arguments   = '/ver'
+
+    # Determine if the executable exists in the path, this is critical.
+    #
+    foundExeName = find_executable(exeName)
+
+    # If not found, return an invalid number but in the appropriate format so it will
+    # fail if the program above tries to use it.
+    if foundExeName == None:
+        print('00 0000')
+        print('Executable ' + exeName + ' not found in PATH!')
+    else:
+        proc = subprocess.Popen([exeName, arguments], stdout=subprocess.PIPE)
+        sysCallOut = proc.stdout.readline().decode('iso-8859-1').rstrip()
+        
+        version = None
+
+        # Split around any spaces first
+        spaceList  = sysCallOut.split(' ')
+        for spaceString in spaceList:
+
+            # If we've already found it, bail.
+            if version != None:
+                break
+        
+            # Now split around line feeds
+            lineList = spaceString.split('\n')
+            for curLine in lineList:
+
+                # If we've already found it, bail.
+                if version != None:
+                    break
+            
+                # We only want to continue if there's a period in the list
+                if '.' not in curLine:
+                    continue
+
+                # Get the first element and determine if it is a number, if so, we've
+                # got our number.
+                splitAroundPeriod = curLine.split('.')
+                if splitAroundPeriod[0].isdigit():
+                    version = int (splitAroundPeriod[0])
+                    break
+        
+        # Failsafe to return a number in the proper format, but one that will fail.
+        if version == None:
+            version = 00
+
+        # Determine the year associated with that version
+        year = determine_year(version)
+        
+        # Output the string we need for Cmake to properly build for this version
+        print(str(version) + ' ' + str(year))
diff --git a/src/third_party/vulkan-tools/src/scripts/fetch_glslangvalidator.py b/src/third_party/vulkan-tools/src/scripts/fetch_glslangvalidator.py
new file mode 100755
index 0000000..8be68e8
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/scripts/fetch_glslangvalidator.py
@@ -0,0 +1,77 @@
+#!/usr/bin/env python3
+#
+# Copyright (c) 2018 The Khronos Group Inc.
+# Copyright (c) 2018 Valve Corporation
+# Copyright (c) 2018 LunarG, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Author: Mark Lobodzinski <mark@lunarg.com>
+
+
+# This script will download the latest glslang release binary and extract the
+# glslangValidator binary needed by the vkcube and vkcubepp applications.
+#
+# It takes as its lone argument the filname (no path) describing the release
+# binary name from the glslang github releases page.
+
+import sys
+import os
+import shutil
+import ssl
+import subprocess
+import urllib.request
+import zipfile
+
+SCRIPTS_DIR = os.path.dirname(os.path.abspath(__file__))
+REPO_DIR = os.path.join(SCRIPTS_DIR, '..')
+GLSLANG_URL = "https://github.com/KhronosGroup/glslang/releases/download/7.9.2888"
+
+if __name__ == '__main__':
+    if len(sys.argv) != 2:
+        print("ERROR -- must include a single glslang release zipfile name argument")
+        sys.exit();
+
+    GLSLANG_FILENAME = sys.argv[1]
+    GLSLANG_COMPLETE_URL = GLSLANG_URL + "/" + GLSLANG_FILENAME
+    GLSLANG_OUTFILENAME = os.path.join(REPO_DIR, "glslang", GLSLANG_FILENAME)
+    GLSLANG_VALIDATOR_PATH = os.path.join(REPO_DIR, "glslang", "bin")
+    GLSLANG_VALIDATOR_FULL_PATH = os.path.join(REPO_DIR, "glslang", "bin", "glslangValidator")
+    GLSLANG_DIR = os.path.join(REPO_DIR, "glslang")
+
+    if os.path.isdir(GLSLANG_DIR):
+        if os.path.isdir(GLSLANG_VALIDATOR_PATH):
+            dir_contents = os.listdir(GLSLANG_VALIDATOR_PATH)
+            for afile in dir_contents:
+                if "glslangValidator" in afile:
+                    print("   Using glslangValidator at %s" % GLSLANG_VALIDATOR_PATH)
+                    sys.exit();
+    else:
+        os.mkdir(GLSLANG_DIR)
+    print("   Downloading glslangValidator binary from glslang releases dir")
+    sys.stdout.flush()
+
+    # Download release zip file from glslang github releases site
+    with urllib.request.urlopen(GLSLANG_COMPLETE_URL, context=ssl._create_unverified_context()) as response, open(GLSLANG_OUTFILENAME, 'wb') as out_file:
+        shutil.copyfileobj(response, out_file)
+    # Unzip the glslang binary archive
+    zipped_file = zipfile.ZipFile(GLSLANG_OUTFILENAME, 'r')
+    namelist = zipped_file.namelist()
+    for afile in namelist:
+        if "glslangValidator" in afile:
+            EXE_FILE_PATH = os.path.join(GLSLANG_DIR, afile)
+            zipped_file.extract(afile, GLSLANG_DIR)
+            os.chmod(EXE_FILE_PATH, 0o775)
+            break
+    zipped_file.close()
+    sys.exit();
diff --git a/src/third_party/vulkan-tools/src/scripts/generate_source.py b/src/third_party/vulkan-tools/src/scripts/generate_source.py
new file mode 100755
index 0000000..c4be24f
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/scripts/generate_source.py
@@ -0,0 +1,109 @@
+#!/usr/bin/env python3
+# Copyright (c) 2019 The Khronos Group Inc.
+# Copyright (c) 2019 Valve Corporation
+# Copyright (c) 2019 LunarG, Inc.
+# Copyright (c) 2019 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Author: Mike Schuchardt <mikes@lunarg.com>
+
+import argparse
+import filecmp
+import os
+import shutil
+import subprocess
+import sys
+import tempfile
+
+import common_codegen
+
+# files to exclude from --verify check
+verify_exclude = ['.clang-format']
+
+def main(argv):
+    parser = argparse.ArgumentParser(description='Generate source code for this repository')
+    parser.add_argument('registry', metavar='REGISTRY_PATH', help='path to the Vulkan-Headers registry directory')
+    group = parser.add_mutually_exclusive_group()
+    group.add_argument('-i', '--incremental', action='store_true', help='only update repo files that change')
+    group.add_argument('-v', '--verify', action='store_true', help='verify repo files match generator output')
+    args = parser.parse_args(argv)
+
+    gen_cmds = [[common_codegen.repo_relative('scripts/kvt_genvk.py'),
+                 '-registry', os.path.abspath(os.path.join(args.registry,  'vk.xml')),
+                 '-quiet',
+                 filename] for filename in ['vk_typemap_helper.h',
+                                            'mock_icd.h',
+                                            'mock_icd.cpp']]
+
+    repo_dir = common_codegen.repo_relative('icd/generated')
+
+    # get directory where generators will run
+    if args.verify or args.incremental:
+        # generate in temp directory so we can compare or copy later
+        temp_obj = tempfile.TemporaryDirectory(prefix='VulkanLoader_generated_source_')
+        temp_dir = temp_obj.name
+        gen_dir = temp_dir
+    else:
+        # generate directly in the repo
+        gen_dir = repo_dir
+
+    # run each code generator
+    for cmd in gen_cmds:
+        print(' '.join(cmd))
+        try:
+            subprocess.check_call([sys.executable] + cmd, cwd=gen_dir)
+        except Exception as e:
+            print('ERROR:', str(e))
+            return 1
+
+    # optional post-generation steps
+    if args.verify:
+        # compare contents of temp dir and repo
+        temp_files = set(os.listdir(temp_dir))
+        repo_files = set(os.listdir(repo_dir))
+        files_match = True
+        for filename in sorted((temp_files | repo_files) - set(verify_exclude)):
+            if filename not in repo_files:
+                print('ERROR: Missing repo file', filename)
+                files_match = False
+            elif filename not in temp_files:
+                print('ERROR: Missing generator for', filename)
+                files_match = False
+            elif not filecmp.cmp(os.path.join(temp_dir, filename),
+                               os.path.join(repo_dir, filename),
+                               shallow=False):
+                print('ERROR: Repo files do not match generator output for', filename)
+                files_match = False
+
+        # return code for test scripts
+        if files_match:
+            print('SUCCESS: Repo files match generator output')
+            return 0
+        return 1
+
+    elif args.incremental:
+        # copy missing or differing files from temp directory to repo
+        for filename in os.listdir(temp_dir):
+            temp_filename = os.path.join(temp_dir, filename)
+            repo_filename = os.path.join(repo_dir, filename)
+            if not os.path.exists(repo_filename) or \
+               not filecmp.cmp(temp_filename, repo_filename, shallow=False):
+                print('update', repo_filename)
+                shutil.copyfile(temp_filename, repo_filename)
+
+    return 0
+
+if __name__ == '__main__':
+    sys.exit(main(sys.argv[1:]))
+
diff --git a/src/third_party/vulkan-tools/src/scripts/generate_vulkan_wrapper.py b/src/third_party/vulkan-tools/src/scripts/generate_vulkan_wrapper.py
new file mode 100755
index 0000000..908f66c
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/scripts/generate_vulkan_wrapper.py
@@ -0,0 +1,1229 @@
+#!/usr/bin/env python3
+#
+# Copyright (C) 2018 Google, Inc.
+#
+# Permission is hereby granted, free of charge, to any person obtaining a
+# copy of this software and associated documentation files (the "Software"),
+# to deal in the Software without restriction, including without limitation
+# the rights to use, copy, modify, merge, publish, distribute, sublicense,
+# and/or sell copies of the Software, and to permit persons to whom the
+# Software is furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
+# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+# DEALINGS IN THE SOFTWARE.
+
+"""Generate Vulkan wrapper to support Android without libvulkan
+"""
+
+import os
+import sys
+
+class Command(object):
+    PLATFORM = 0
+    LOADER = 1
+    INSTANCE = 2
+    DEVICE = 3
+
+    def __init__(self, name, dispatch):
+        self.name = name
+        self.dispatch = dispatch
+        self.ty = self._get_type()
+
+    @staticmethod
+    def valid_c_typedef(c):
+        return (c.startswith("typedef") and
+                c.endswith(");") and
+                "*PFN_vkVoidFunction" not in c)
+
+    @classmethod
+    def from_c_typedef(cls, c):
+        name_begin = c.find("*PFN_vk") + 5 # instead of 7 to restore vk
+        name_end = c.find(")(", name_begin)
+        name = c[name_begin:name_end]
+
+        dispatch_begin = name_end + 2
+        dispatch_end = c.find(" ", dispatch_begin)
+        dispatch = c[dispatch_begin:dispatch_end]
+        if not dispatch.startswith("Vk"):
+            dispatch = None
+
+        return cls(name, dispatch)
+
+    def _get_type(self):
+        if self.dispatch:
+            if self.dispatch in ["VkDevice", "VkQueue", "VkCommandBuffer"]:
+                return self.DEVICE
+            else:
+                return self.INSTANCE
+        else:
+            if self.name in ["GetInstanceProcAddr"]:
+                return self.PLATFORM
+            else:
+                return self.LOADER
+
+    def __repr__(self):
+        return "Command(name=%s, dispatch=%s)" % \
+                (repr(self.name), repr(self.dispatch))
+
+class Extension(object):
+    def __init__(self, name, version, guard=None, commands=[]):
+        self.name = name
+        self.version = version
+        self.guard = guard
+        self.commands = commands[:]
+
+    def add_command(self, cmd):
+        self.commands.append(cmd)
+
+    def __repr__(self):
+        lines = []
+        lines.append("Extension(name=%s, version=%s, guard=%s, commands=[" %
+                (repr(self.name), repr(self.version), repr(self.guard)))
+
+        for cmd in self.commands:
+            lines.append("    %s," % repr(cmd))
+
+        lines.append("])")
+
+        return "\n".join(lines)
+
+# generated by "generate_vulkan_wrapper.py parse vulkan.h"
+VK_core_0 = Extension(name='VK_core_0', version=0, guard=None, commands=[
+    Command(name='vkCreateInstance', dispatch=None),
+    Command(name='vkDestroyInstance', dispatch='VkInstance'),
+    Command(name='vkEnumeratePhysicalDevices', dispatch='VkInstance'),
+    Command(name='vkGetPhysicalDeviceFeatures', dispatch='VkPhysicalDevice'),
+    Command(name='vkGetPhysicalDeviceFormatProperties', dispatch='VkPhysicalDevice'),
+    Command(name='vkGetPhysicalDeviceImageFormatProperties', dispatch='VkPhysicalDevice'),
+    Command(name='vkGetPhysicalDeviceProperties', dispatch='VkPhysicalDevice'),
+    Command(name='vkGetPhysicalDeviceQueueFamilyProperties', dispatch='VkPhysicalDevice'),
+    Command(name='vkGetPhysicalDeviceMemoryProperties', dispatch='VkPhysicalDevice'),
+    Command(name='vkGetInstanceProcAddr', dispatch='VkInstance'),
+    Command(name='vkGetDeviceProcAddr', dispatch='VkDevice'),
+    Command(name='vkCreateDevice', dispatch='VkPhysicalDevice'),
+    Command(name='vkDestroyDevice', dispatch='VkDevice'),
+    Command(name='vkEnumerateInstanceExtensionProperties', dispatch=None),
+    Command(name='vkEnumerateDeviceExtensionProperties', dispatch='VkPhysicalDevice'),
+    Command(name='vkEnumerateInstanceLayerProperties', dispatch=None),
+    Command(name='vkEnumerateDeviceLayerProperties', dispatch='VkPhysicalDevice'),
+    Command(name='vkGetDeviceQueue', dispatch='VkDevice'),
+    Command(name='vkQueueSubmit', dispatch='VkQueue'),
+    Command(name='vkQueueWaitIdle', dispatch='VkQueue'),
+    Command(name='vkDeviceWaitIdle', dispatch='VkDevice'),
+    Command(name='vkAllocateMemory', dispatch='VkDevice'),
+    Command(name='vkFreeMemory', dispatch='VkDevice'),
+    Command(name='vkMapMemory', dispatch='VkDevice'),
+    Command(name='vkUnmapMemory', dispatch='VkDevice'),
+    Command(name='vkFlushMappedMemoryRanges', dispatch='VkDevice'),
+    Command(name='vkInvalidateMappedMemoryRanges', dispatch='VkDevice'),
+    Command(name='vkGetDeviceMemoryCommitment', dispatch='VkDevice'),
+    Command(name='vkBindBufferMemory', dispatch='VkDevice'),
+    Command(name='vkBindImageMemory', dispatch='VkDevice'),
+    Command(name='vkGetBufferMemoryRequirements', dispatch='VkDevice'),
+    Command(name='vkGetImageMemoryRequirements', dispatch='VkDevice'),
+    Command(name='vkGetImageSparseMemoryRequirements', dispatch='VkDevice'),
+    Command(name='vkGetPhysicalDeviceSparseImageFormatProperties', dispatch='VkPhysicalDevice'),
+    Command(name='vkQueueBindSparse', dispatch='VkQueue'),
+    Command(name='vkCreateFence', dispatch='VkDevice'),
+    Command(name='vkDestroyFence', dispatch='VkDevice'),
+    Command(name='vkResetFences', dispatch='VkDevice'),
+    Command(name='vkGetFenceStatus', dispatch='VkDevice'),
+    Command(name='vkWaitForFences', dispatch='VkDevice'),
+    Command(name='vkCreateSemaphore', dispatch='VkDevice'),
+    Command(name='vkDestroySemaphore', dispatch='VkDevice'),
+    Command(name='vkCreateEvent', dispatch='VkDevice'),
+    Command(name='vkDestroyEvent', dispatch='VkDevice'),
+    Command(name='vkGetEventStatus', dispatch='VkDevice'),
+    Command(name='vkSetEvent', dispatch='VkDevice'),
+    Command(name='vkResetEvent', dispatch='VkDevice'),
+    Command(name='vkCreateQueryPool', dispatch='VkDevice'),
+    Command(name='vkDestroyQueryPool', dispatch='VkDevice'),
+    Command(name='vkGetQueryPoolResults', dispatch='VkDevice'),
+    Command(name='vkCreateBuffer', dispatch='VkDevice'),
+    Command(name='vkDestroyBuffer', dispatch='VkDevice'),
+    Command(name='vkCreateBufferView', dispatch='VkDevice'),
+    Command(name='vkDestroyBufferView', dispatch='VkDevice'),
+    Command(name='vkCreateImage', dispatch='VkDevice'),
+    Command(name='vkDestroyImage', dispatch='VkDevice'),
+    Command(name='vkGetImageSubresourceLayout', dispatch='VkDevice'),
+    Command(name='vkCreateImageView', dispatch='VkDevice'),
+    Command(name='vkDestroyImageView', dispatch='VkDevice'),
+    Command(name='vkCreateShaderModule', dispatch='VkDevice'),
+    Command(name='vkDestroyShaderModule', dispatch='VkDevice'),
+    Command(name='vkCreatePipelineCache', dispatch='VkDevice'),
+    Command(name='vkDestroyPipelineCache', dispatch='VkDevice'),
+    Command(name='vkGetPipelineCacheData', dispatch='VkDevice'),
+    Command(name='vkMergePipelineCaches', dispatch='VkDevice'),
+    Command(name='vkCreateGraphicsPipelines', dispatch='VkDevice'),
+    Command(name='vkCreateComputePipelines', dispatch='VkDevice'),
+    Command(name='vkDestroyPipeline', dispatch='VkDevice'),
+    Command(name='vkCreatePipelineLayout', dispatch='VkDevice'),
+    Command(name='vkDestroyPipelineLayout', dispatch='VkDevice'),
+    Command(name='vkCreateSampler', dispatch='VkDevice'),
+    Command(name='vkDestroySampler', dispatch='VkDevice'),
+    Command(name='vkCreateDescriptorSetLayout', dispatch='VkDevice'),
+    Command(name='vkDestroyDescriptorSetLayout', dispatch='VkDevice'),
+    Command(name='vkCreateDescriptorPool', dispatch='VkDevice'),
+    Command(name='vkDestroyDescriptorPool', dispatch='VkDevice'),
+    Command(name='vkResetDescriptorPool', dispatch='VkDevice'),
+    Command(name='vkAllocateDescriptorSets', dispatch='VkDevice'),
+    Command(name='vkFreeDescriptorSets', dispatch='VkDevice'),
+    Command(name='vkUpdateDescriptorSets', dispatch='VkDevice'),
+    Command(name='vkCreateFramebuffer', dispatch='VkDevice'),
+    Command(name='vkDestroyFramebuffer', dispatch='VkDevice'),
+    Command(name='vkCreateRenderPass', dispatch='VkDevice'),
+    Command(name='vkDestroyRenderPass', dispatch='VkDevice'),
+    Command(name='vkGetRenderAreaGranularity', dispatch='VkDevice'),
+    Command(name='vkCreateCommandPool', dispatch='VkDevice'),
+    Command(name='vkDestroyCommandPool', dispatch='VkDevice'),
+    Command(name='vkResetCommandPool', dispatch='VkDevice'),
+    Command(name='vkAllocateCommandBuffers', dispatch='VkDevice'),
+    Command(name='vkFreeCommandBuffers', dispatch='VkDevice'),
+    Command(name='vkBeginCommandBuffer', dispatch='VkCommandBuffer'),
+    Command(name='vkEndCommandBuffer', dispatch='VkCommandBuffer'),
+    Command(name='vkResetCommandBuffer', dispatch='VkCommandBuffer'),
+    Command(name='vkCmdBindPipeline', dispatch='VkCommandBuffer'),
+    Command(name='vkCmdSetViewport', dispatch='VkCommandBuffer'),
+    Command(name='vkCmdSetScissor', dispatch='VkCommandBuffer'),
+    Command(name='vkCmdSetLineWidth', dispatch='VkCommandBuffer'),
+    Command(name='vkCmdSetDepthBias', dispatch='VkCommandBuffer'),
+    Command(name='vkCmdSetBlendConstants', dispatch='VkCommandBuffer'),
+    Command(name='vkCmdSetDepthBounds', dispatch='VkCommandBuffer'),
+    Command(name='vkCmdSetStencilCompareMask', dispatch='VkCommandBuffer'),
+    Command(name='vkCmdSetStencilWriteMask', dispatch='VkCommandBuffer'),
+    Command(name='vkCmdSetStencilReference', dispatch='VkCommandBuffer'),
+    Command(name='vkCmdBindDescriptorSets', dispatch='VkCommandBuffer'),
+    Command(name='vkCmdBindIndexBuffer', dispatch='VkCommandBuffer'),
+    Command(name='vkCmdBindVertexBuffers', dispatch='VkCommandBuffer'),
+    Command(name='vkCmdDraw', dispatch='VkCommandBuffer'),
+    Command(name='vkCmdDrawIndexed', dispatch='VkCommandBuffer'),
+    Command(name='vkCmdDrawIndirect', dispatch='VkCommandBuffer'),
+    Command(name='vkCmdDrawIndexedIndirect', dispatch='VkCommandBuffer'),
+    Command(name='vkCmdDispatch', dispatch='VkCommandBuffer'),
+    Command(name='vkCmdDispatchIndirect', dispatch='VkCommandBuffer'),
+    Command(name='vkCmdCopyBuffer', dispatch='VkCommandBuffer'),
+    Command(name='vkCmdCopyImage', dispatch='VkCommandBuffer'),
+    Command(name='vkCmdBlitImage', dispatch='VkCommandBuffer'),
+    Command(name='vkCmdCopyBufferToImage', dispatch='VkCommandBuffer'),
+    Command(name='vkCmdCopyImageToBuffer', dispatch='VkCommandBuffer'),
+    Command(name='vkCmdUpdateBuffer', dispatch='VkCommandBuffer'),
+    Command(name='vkCmdFillBuffer', dispatch='VkCommandBuffer'),
+    Command(name='vkCmdClearColorImage', dispatch='VkCommandBuffer'),
+    Command(name='vkCmdClearDepthStencilImage', dispatch='VkCommandBuffer'),
+    Command(name='vkCmdClearAttachments', dispatch='VkCommandBuffer'),
+    Command(name='vkCmdResolveImage', dispatch='VkCommandBuffer'),
+    Command(name='vkCmdSetEvent', dispatch='VkCommandBuffer'),
+    Command(name='vkCmdResetEvent', dispatch='VkCommandBuffer'),
+    Command(name='vkCmdWaitEvents', dispatch='VkCommandBuffer'),
+    Command(name='vkCmdPipelineBarrier', dispatch='VkCommandBuffer'),
+    Command(name='vkCmdBeginQuery', dispatch='VkCommandBuffer'),
+    Command(name='vkCmdEndQuery', dispatch='VkCommandBuffer'),
+    Command(name='vkCmdResetQueryPool', dispatch='VkCommandBuffer'),
+    Command(name='vkCmdWriteTimestamp', dispatch='VkCommandBuffer'),
+    Command(name='vkCmdCopyQueryPoolResults', dispatch='VkCommandBuffer'),
+    Command(name='vkCmdPushConstants', dispatch='VkCommandBuffer'),
+    Command(name='vkCmdBeginRenderPass', dispatch='VkCommandBuffer'),
+    Command(name='vkCmdNextSubpass', dispatch='VkCommandBuffer'),
+    Command(name='vkCmdEndRenderPass', dispatch='VkCommandBuffer'),
+    Command(name='vkCmdExecuteCommands', dispatch='VkCommandBuffer'),
+])
+
+VK_core_1 = Extension(name='VK_core_1', version=1, guard=None, commands=[
+    Command(name='vkEnumerateInstanceVersion', dispatch=None),
+    Command(name='vkBindBufferMemory2', dispatch='VkDevice'),
+    Command(name='vkBindImageMemory2', dispatch='VkDevice'),
+    Command(name='vkGetDeviceGroupPeerMemoryFeatures', dispatch='VkDevice'),
+    Command(name='vkCmdSetDeviceMask', dispatch='VkCommandBuffer'),
+    Command(name='vkCmdDispatchBase', dispatch='VkCommandBuffer'),
+    Command(name='vkEnumeratePhysicalDeviceGroups', dispatch='VkInstance'),
+    Command(name='vkGetImageMemoryRequirements2', dispatch='VkDevice'),
+    Command(name='vkGetBufferMemoryRequirements2', dispatch='VkDevice'),
+    Command(name='vkGetImageSparseMemoryRequirements2', dispatch='VkDevice'),
+    Command(name='vkGetPhysicalDeviceFeatures2', dispatch='VkPhysicalDevice'),
+    Command(name='vkGetPhysicalDeviceProperties2', dispatch='VkPhysicalDevice'),
+    Command(name='vkGetPhysicalDeviceFormatProperties2', dispatch='VkPhysicalDevice'),
+    Command(name='vkGetPhysicalDeviceImageFormatProperties2', dispatch='VkPhysicalDevice'),
+    Command(name='vkGetPhysicalDeviceQueueFamilyProperties2', dispatch='VkPhysicalDevice'),
+    Command(name='vkGetPhysicalDeviceMemoryProperties2', dispatch='VkPhysicalDevice'),
+    Command(name='vkGetPhysicalDeviceSparseImageFormatProperties2', dispatch='VkPhysicalDevice'),
+    Command(name='vkTrimCommandPool', dispatch='VkDevice'),
+    Command(name='vkGetDeviceQueue2', dispatch='VkDevice'),
+    Command(name='vkCreateSamplerYcbcrConversion', dispatch='VkDevice'),
+    Command(name='vkDestroySamplerYcbcrConversion', dispatch='VkDevice'),
+    Command(name='vkCreateDescriptorUpdateTemplate', dispatch='VkDevice'),
+    Command(name='vkDestroyDescriptorUpdateTemplate', dispatch='VkDevice'),
+    Command(name='vkUpdateDescriptorSetWithTemplate', dispatch='VkDevice'),
+    Command(name='vkGetPhysicalDeviceExternalBufferProperties', dispatch='VkPhysicalDevice'),
+    Command(name='vkGetPhysicalDeviceExternalFenceProperties', dispatch='VkPhysicalDevice'),
+    Command(name='vkGetPhysicalDeviceExternalSemaphoreProperties', dispatch='VkPhysicalDevice'),
+    Command(name='vkGetDescriptorSetLayoutSupport', dispatch='VkDevice'),
+])
+
+VK_KHR_surface = Extension(name='VK_KHR_surface', version=25, guard=None, commands=[
+    Command(name='vkDestroySurfaceKHR', dispatch='VkInstance'),
+    Command(name='vkGetPhysicalDeviceSurfaceSupportKHR', dispatch='VkPhysicalDevice'),
+    Command(name='vkGetPhysicalDeviceSurfaceCapabilitiesKHR', dispatch='VkPhysicalDevice'),
+    Command(name='vkGetPhysicalDeviceSurfaceFormatsKHR', dispatch='VkPhysicalDevice'),
+    Command(name='vkGetPhysicalDeviceSurfacePresentModesKHR', dispatch='VkPhysicalDevice'),
+])
+
+VK_KHR_swapchain = Extension(name='VK_KHR_swapchain', version=70, guard=None, commands=[
+    Command(name='vkCreateSwapchainKHR', dispatch='VkDevice'),
+    Command(name='vkDestroySwapchainKHR', dispatch='VkDevice'),
+    Command(name='vkGetSwapchainImagesKHR', dispatch='VkDevice'),
+    Command(name='vkAcquireNextImageKHR', dispatch='VkDevice'),
+    Command(name='vkQueuePresentKHR', dispatch='VkQueue'),
+    Command(name='vkGetDeviceGroupPresentCapabilitiesKHR', dispatch='VkDevice'),
+    Command(name='vkGetDeviceGroupSurfacePresentModesKHR', dispatch='VkDevice'),
+    Command(name='vkGetPhysicalDevicePresentRectanglesKHR', dispatch='VkPhysicalDevice'),
+    Command(name='vkAcquireNextImage2KHR', dispatch='VkDevice'),
+])
+
+VK_KHR_display = Extension(name='VK_KHR_display', version=21, guard=None, commands=[
+    Command(name='vkGetPhysicalDeviceDisplayPropertiesKHR', dispatch='VkPhysicalDevice'),
+    Command(name='vkGetPhysicalDeviceDisplayPlanePropertiesKHR', dispatch='VkPhysicalDevice'),
+    Command(name='vkGetDisplayPlaneSupportedDisplaysKHR', dispatch='VkPhysicalDevice'),
+    Command(name='vkGetDisplayModePropertiesKHR', dispatch='VkPhysicalDevice'),
+    Command(name='vkCreateDisplayModeKHR', dispatch='VkPhysicalDevice'),
+    Command(name='vkGetDisplayPlaneCapabilitiesKHR', dispatch='VkPhysicalDevice'),
+    Command(name='vkCreateDisplayPlaneSurfaceKHR', dispatch='VkInstance'),
+])
+
+VK_KHR_display_swapchain = Extension(name='VK_KHR_display_swapchain', version=9, guard=None, commands=[
+    Command(name='vkCreateSharedSwapchainsKHR', dispatch='VkDevice'),
+])
+
+VK_KHR_sampler_mirror_clamp_to_edge = Extension(name='VK_KHR_sampler_mirror_clamp_to_edge', version=1, guard=None, commands=[
+])
+
+VK_KHR_multiview = Extension(name='VK_KHR_multiview', version=1, guard=None, commands=[
+])
+
+VK_KHR_get_physical_device_properties2 = Extension(name='VK_KHR_get_physical_device_properties2', version=1, guard=None, commands=[
+    Command(name='vkGetPhysicalDeviceFeatures2KHR', dispatch='VkPhysicalDevice'),
+    Command(name='vkGetPhysicalDeviceProperties2KHR', dispatch='VkPhysicalDevice'),
+    Command(name='vkGetPhysicalDeviceFormatProperties2KHR', dispatch='VkPhysicalDevice'),
+    Command(name='vkGetPhysicalDeviceImageFormatProperties2KHR', dispatch='VkPhysicalDevice'),
+    Command(name='vkGetPhysicalDeviceQueueFamilyProperties2KHR', dispatch='VkPhysicalDevice'),
+    Command(name='vkGetPhysicalDeviceMemoryProperties2KHR', dispatch='VkPhysicalDevice'),
+    Command(name='vkGetPhysicalDeviceSparseImageFormatProperties2KHR', dispatch='VkPhysicalDevice'),
+])
+
+VK_KHR_device_group = Extension(name='VK_KHR_device_group', version=3, guard=None, commands=[
+    Command(name='vkGetDeviceGroupPeerMemoryFeaturesKHR', dispatch='VkDevice'),
+    Command(name='vkCmdSetDeviceMaskKHR', dispatch='VkCommandBuffer'),
+    Command(name='vkCmdDispatchBaseKHR', dispatch='VkCommandBuffer'),
+])
+
+VK_KHR_shader_draw_parameters = Extension(name='VK_KHR_shader_draw_parameters', version=1, guard=None, commands=[
+])
+
+VK_KHR_maintenance1 = Extension(name='VK_KHR_maintenance1', version=2, guard=None, commands=[
+    Command(name='vkTrimCommandPoolKHR', dispatch='VkDevice'),
+])
+
+VK_KHR_device_group_creation = Extension(name='VK_KHR_device_group_creation', version=1, guard=None, commands=[
+    Command(name='vkEnumeratePhysicalDeviceGroupsKHR', dispatch='VkInstance'),
+])
+
+VK_KHR_external_memory_capabilities = Extension(name='VK_KHR_external_memory_capabilities', version=1, guard=None, commands=[
+    Command(name='vkGetPhysicalDeviceExternalBufferPropertiesKHR', dispatch='VkPhysicalDevice'),
+])
+
+VK_KHR_external_memory = Extension(name='VK_KHR_external_memory', version=1, guard=None, commands=[
+])
+
+VK_KHR_external_memory_fd = Extension(name='VK_KHR_external_memory_fd', version=1, guard=None, commands=[
+    Command(name='vkGetMemoryFdKHR', dispatch='VkDevice'),
+    Command(name='vkGetMemoryFdPropertiesKHR', dispatch='VkDevice'),
+])
+
+VK_KHR_external_semaphore_capabilities = Extension(name='VK_KHR_external_semaphore_capabilities', version=1, guard=None, commands=[
+    Command(name='vkGetPhysicalDeviceExternalSemaphorePropertiesKHR', dispatch='VkPhysicalDevice'),
+])
+
+VK_KHR_external_semaphore = Extension(name='VK_KHR_external_semaphore', version=1, guard=None, commands=[
+])
+
+VK_KHR_external_semaphore_fd = Extension(name='VK_KHR_external_semaphore_fd', version=1, guard=None, commands=[
+    Command(name='vkImportSemaphoreFdKHR', dispatch='VkDevice'),
+    Command(name='vkGetSemaphoreFdKHR', dispatch='VkDevice'),
+])
+
+VK_KHR_push_descriptor = Extension(name='VK_KHR_push_descriptor', version=2, guard=None, commands=[
+    Command(name='vkCmdPushDescriptorSetKHR', dispatch='VkCommandBuffer'),
+    Command(name='vkCmdPushDescriptorSetWithTemplateKHR', dispatch='VkCommandBuffer'),
+])
+
+VK_KHR_16bit_storage = Extension(name='VK_KHR_16bit_storage', version=1, guard=None, commands=[
+])
+
+VK_KHR_incremental_present = Extension(name='VK_KHR_incremental_present', version=1, guard=None, commands=[
+])
+
+VK_KHR_descriptor_update_template = Extension(name='VK_KHR_descriptor_update_template', version=1, guard=None, commands=[
+    Command(name='vkCreateDescriptorUpdateTemplateKHR', dispatch='VkDevice'),
+    Command(name='vkDestroyDescriptorUpdateTemplateKHR', dispatch='VkDevice'),
+    Command(name='vkUpdateDescriptorSetWithTemplateKHR', dispatch='VkDevice'),
+])
+
+VK_KHR_create_renderpass2 = Extension(name='VK_KHR_create_renderpass2', version=1, guard=None, commands=[
+    Command(name='vkCreateRenderPass2KHR', dispatch='VkDevice'),
+    Command(name='vkCmdBeginRenderPass2KHR', dispatch='VkCommandBuffer'),
+    Command(name='vkCmdNextSubpass2KHR', dispatch='VkCommandBuffer'),
+    Command(name='vkCmdEndRenderPass2KHR', dispatch='VkCommandBuffer'),
+])
+
+VK_KHR_shared_presentable_image = Extension(name='VK_KHR_shared_presentable_image', version=1, guard=None, commands=[
+    Command(name='vkGetSwapchainStatusKHR', dispatch='VkDevice'),
+])
+
+VK_KHR_external_fence_capabilities = Extension(name='VK_KHR_external_fence_capabilities', version=1, guard=None, commands=[
+    Command(name='vkGetPhysicalDeviceExternalFencePropertiesKHR', dispatch='VkPhysicalDevice'),
+])
+
+VK_KHR_external_fence = Extension(name='VK_KHR_external_fence', version=1, guard=None, commands=[
+])
+
+VK_KHR_external_fence_fd = Extension(name='VK_KHR_external_fence_fd', version=1, guard=None, commands=[
+    Command(name='vkImportFenceFdKHR', dispatch='VkDevice'),
+    Command(name='vkGetFenceFdKHR', dispatch='VkDevice'),
+])
+
+VK_KHR_maintenance2 = Extension(name='VK_KHR_maintenance2', version=1, guard=None, commands=[
+])
+
+VK_KHR_get_surface_capabilities2 = Extension(name='VK_KHR_get_surface_capabilities2', version=1, guard=None, commands=[
+    Command(name='vkGetPhysicalDeviceSurfaceCapabilities2KHR', dispatch='VkPhysicalDevice'),
+    Command(name='vkGetPhysicalDeviceSurfaceFormats2KHR', dispatch='VkPhysicalDevice'),
+])
+
+VK_KHR_variable_pointers = Extension(name='VK_KHR_variable_pointers', version=1, guard=None, commands=[
+])
+
+VK_KHR_get_display_properties2 = Extension(name='VK_KHR_get_display_properties2', version=1, guard=None, commands=[
+    Command(name='vkGetPhysicalDeviceDisplayProperties2KHR', dispatch='VkPhysicalDevice'),
+    Command(name='vkGetPhysicalDeviceDisplayPlaneProperties2KHR', dispatch='VkPhysicalDevice'),
+    Command(name='vkGetDisplayModeProperties2KHR', dispatch='VkPhysicalDevice'),
+    Command(name='vkGetDisplayPlaneCapabilities2KHR', dispatch='VkPhysicalDevice'),
+])
+
+VK_KHR_dedicated_allocation = Extension(name='VK_KHR_dedicated_allocation', version=3, guard=None, commands=[
+])
+
+VK_KHR_storage_buffer_storage_class = Extension(name='VK_KHR_storage_buffer_storage_class', version=1, guard=None, commands=[
+])
+
+VK_KHR_relaxed_block_layout = Extension(name='VK_KHR_relaxed_block_layout', version=1, guard=None, commands=[
+])
+
+VK_KHR_get_memory_requirements2 = Extension(name='VK_KHR_get_memory_requirements2', version=1, guard=None, commands=[
+    Command(name='vkGetImageMemoryRequirements2KHR', dispatch='VkDevice'),
+    Command(name='vkGetBufferMemoryRequirements2KHR', dispatch='VkDevice'),
+    Command(name='vkGetImageSparseMemoryRequirements2KHR', dispatch='VkDevice'),
+])
+
+VK_KHR_image_format_list = Extension(name='VK_KHR_image_format_list', version=1, guard=None, commands=[
+])
+
+VK_KHR_sampler_ycbcr_conversion = Extension(name='VK_KHR_sampler_ycbcr_conversion', version=1, guard=None, commands=[
+    Command(name='vkCreateSamplerYcbcrConversionKHR', dispatch='VkDevice'),
+    Command(name='vkDestroySamplerYcbcrConversionKHR', dispatch='VkDevice'),
+])
+
+VK_KHR_bind_memory2 = Extension(name='VK_KHR_bind_memory2', version=1, guard=None, commands=[
+    Command(name='vkBindBufferMemory2KHR', dispatch='VkDevice'),
+    Command(name='vkBindImageMemory2KHR', dispatch='VkDevice'),
+])
+
+VK_KHR_maintenance3 = Extension(name='VK_KHR_maintenance3', version=1, guard=None, commands=[
+    Command(name='vkGetDescriptorSetLayoutSupportKHR', dispatch='VkDevice'),
+])
+
+VK_KHR_draw_indirect_count = Extension(name='VK_KHR_draw_indirect_count', version=1, guard=None, commands=[
+    Command(name='vkCmdDrawIndirectCountKHR', dispatch='VkCommandBuffer'),
+    Command(name='vkCmdDrawIndexedIndirectCountKHR', dispatch='VkCommandBuffer'),
+])
+
+VK_KHR_8bit_storage = Extension(name='VK_KHR_8bit_storage', version=1, guard=None, commands=[
+])
+
+VK_KHR_shader_atomic_int64 = Extension(name='VK_KHR_shader_atomic_int64', version=1, guard=None, commands=[
+])
+
+VK_KHR_driver_properties = Extension(name='VK_KHR_driver_properties', version=1, guard=None, commands=[
+])
+
+VK_KHR_vulkan_memory_model = Extension(name='VK_KHR_vulkan_memory_model', version=2, guard=None, commands=[
+])
+
+VK_EXT_debug_report = Extension(name='VK_EXT_debug_report', version=9, guard=None, commands=[
+    Command(name='vkCreateDebugReportCallbackEXT', dispatch='VkInstance'),
+    Command(name='vkDestroyDebugReportCallbackEXT', dispatch='VkInstance'),
+    Command(name='vkDebugReportMessageEXT', dispatch='VkInstance'),
+])
+
+VK_NV_glsl_shader = Extension(name='VK_NV_glsl_shader', version=1, guard=None, commands=[
+])
+
+VK_EXT_depth_range_unrestricted = Extension(name='VK_EXT_depth_range_unrestricted', version=1, guard=None, commands=[
+])
+
+VK_IMG_filter_cubic = Extension(name='VK_IMG_filter_cubic', version=1, guard=None, commands=[
+])
+
+VK_AMD_rasterization_order = Extension(name='VK_AMD_rasterization_order', version=1, guard=None, commands=[
+])
+
+VK_AMD_shader_trinary_minmax = Extension(name='VK_AMD_shader_trinary_minmax', version=1, guard=None, commands=[
+])
+
+VK_AMD_shader_explicit_vertex_parameter = Extension(name='VK_AMD_shader_explicit_vertex_parameter', version=1, guard=None, commands=[
+])
+
+VK_EXT_debug_marker = Extension(name='VK_EXT_debug_marker', version=4, guard=None, commands=[
+    Command(name='vkDebugMarkerSetObjectTagEXT', dispatch='VkDevice'),
+    Command(name='vkDebugMarkerSetObjectNameEXT', dispatch='VkDevice'),
+    Command(name='vkCmdDebugMarkerBeginEXT', dispatch='VkCommandBuffer'),
+    Command(name='vkCmdDebugMarkerEndEXT', dispatch='VkCommandBuffer'),
+    Command(name='vkCmdDebugMarkerInsertEXT', dispatch='VkCommandBuffer'),
+])
+
+VK_AMD_gcn_shader = Extension(name='VK_AMD_gcn_shader', version=1, guard=None, commands=[
+])
+
+VK_NV_dedicated_allocation = Extension(name='VK_NV_dedicated_allocation', version=1, guard=None, commands=[
+])
+
+VK_EXT_transform_feedback = Extension(name='VK_EXT_transform_feedback', version=1, guard=None, commands=[
+    Command(name='vkCmdBindTransformFeedbackBuffersEXT', dispatch='VkCommandBuffer'),
+    Command(name='vkCmdBeginTransformFeedbackEXT', dispatch='VkCommandBuffer'),
+    Command(name='vkCmdEndTransformFeedbackEXT', dispatch='VkCommandBuffer'),
+    Command(name='vkCmdBeginQueryIndexedEXT', dispatch='VkCommandBuffer'),
+    Command(name='vkCmdEndQueryIndexedEXT', dispatch='VkCommandBuffer'),
+    Command(name='vkCmdDrawIndirectByteCountEXT', dispatch='VkCommandBuffer'),
+])
+
+VK_AMD_draw_indirect_count = Extension(name='VK_AMD_draw_indirect_count', version=1, guard=None, commands=[
+    Command(name='vkCmdDrawIndirectCountAMD', dispatch='VkCommandBuffer'),
+    Command(name='vkCmdDrawIndexedIndirectCountAMD', dispatch='VkCommandBuffer'),
+])
+
+VK_AMD_negative_viewport_height = Extension(name='VK_AMD_negative_viewport_height', version=1, guard=None, commands=[
+])
+
+VK_AMD_gpu_shader_half_float = Extension(name='VK_AMD_gpu_shader_half_float', version=1, guard=None, commands=[
+])
+
+VK_AMD_shader_ballot = Extension(name='VK_AMD_shader_ballot', version=1, guard=None, commands=[
+])
+
+VK_AMD_texture_gather_bias_lod = Extension(name='VK_AMD_texture_gather_bias_lod', version=1, guard=None, commands=[
+])
+
+VK_AMD_shader_info = Extension(name='VK_AMD_shader_info', version=1, guard=None, commands=[
+    Command(name='vkGetShaderInfoAMD', dispatch='VkDevice'),
+])
+
+VK_AMD_shader_image_load_store_lod = Extension(name='VK_AMD_shader_image_load_store_lod', version=1, guard=None, commands=[
+])
+
+VK_NV_corner_sampled_image = Extension(name='VK_NV_corner_sampled_image', version=2, guard=None, commands=[
+])
+
+VK_IMG_format_pvrtc = Extension(name='VK_IMG_format_pvrtc', version=1, guard=None, commands=[
+])
+
+VK_NV_external_memory_capabilities = Extension(name='VK_NV_external_memory_capabilities', version=1, guard=None, commands=[
+    Command(name='vkGetPhysicalDeviceExternalImageFormatPropertiesNV', dispatch='VkPhysicalDevice'),
+])
+
+VK_NV_external_memory = Extension(name='VK_NV_external_memory', version=1, guard=None, commands=[
+])
+
+VK_EXT_validation_flags = Extension(name='VK_EXT_validation_flags', version=1, guard=None, commands=[
+])
+
+VK_EXT_shader_subgroup_ballot = Extension(name='VK_EXT_shader_subgroup_ballot', version=1, guard=None, commands=[
+])
+
+VK_EXT_shader_subgroup_vote = Extension(name='VK_EXT_shader_subgroup_vote', version=1, guard=None, commands=[
+])
+
+VK_EXT_astc_decode_mode = Extension(name='VK_EXT_astc_decode_mode', version=1, guard=None, commands=[
+])
+
+VK_EXT_conditional_rendering = Extension(name='VK_EXT_conditional_rendering', version=1, guard=None, commands=[
+    Command(name='vkCmdBeginConditionalRenderingEXT', dispatch='VkCommandBuffer'),
+    Command(name='vkCmdEndConditionalRenderingEXT', dispatch='VkCommandBuffer'),
+])
+
+VK_NVX_device_generated_commands = Extension(name='VK_NVX_device_generated_commands', version=3, guard=None, commands=[
+    Command(name='vkCmdProcessCommandsNVX', dispatch='VkCommandBuffer'),
+    Command(name='vkCmdReserveSpaceForCommandsNVX', dispatch='VkCommandBuffer'),
+    Command(name='vkCreateIndirectCommandsLayoutNVX', dispatch='VkDevice'),
+    Command(name='vkDestroyIndirectCommandsLayoutNVX', dispatch='VkDevice'),
+    Command(name='vkCreateObjectTableNVX', dispatch='VkDevice'),
+    Command(name='vkDestroyObjectTableNVX', dispatch='VkDevice'),
+    Command(name='vkRegisterObjectsNVX', dispatch='VkDevice'),
+    Command(name='vkUnregisterObjectsNVX', dispatch='VkDevice'),
+    Command(name='vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX', dispatch='VkPhysicalDevice'),
+])
+
+VK_NV_clip_space_w_scaling = Extension(name='VK_NV_clip_space_w_scaling', version=1, guard=None, commands=[
+    Command(name='vkCmdSetViewportWScalingNV', dispatch='VkCommandBuffer'),
+])
+
+VK_EXT_direct_mode_display = Extension(name='VK_EXT_direct_mode_display', version=1, guard=None, commands=[
+    Command(name='vkReleaseDisplayEXT', dispatch='VkPhysicalDevice'),
+])
+
+VK_EXT_display_surface_counter = Extension(name='VK_EXT_display_surface_counter', version=1, guard=None, commands=[
+    Command(name='vkGetPhysicalDeviceSurfaceCapabilities2EXT', dispatch='VkPhysicalDevice'),
+])
+
+VK_EXT_display_control = Extension(name='VK_EXT_display_control', version=1, guard=None, commands=[
+    Command(name='vkDisplayPowerControlEXT', dispatch='VkDevice'),
+    Command(name='vkRegisterDeviceEventEXT', dispatch='VkDevice'),
+    Command(name='vkRegisterDisplayEventEXT', dispatch='VkDevice'),
+    Command(name='vkGetSwapchainCounterEXT', dispatch='VkDevice'),
+])
+
+VK_GOOGLE_display_timing = Extension(name='VK_GOOGLE_display_timing', version=1, guard=None, commands=[
+    Command(name='vkGetRefreshCycleDurationGOOGLE', dispatch='VkDevice'),
+    Command(name='vkGetPastPresentationTimingGOOGLE', dispatch='VkDevice'),
+])
+
+VK_NV_sample_mask_override_coverage = Extension(name='VK_NV_sample_mask_override_coverage', version=1, guard=None, commands=[
+])
+
+VK_NV_geometry_shader_passthrough = Extension(name='VK_NV_geometry_shader_passthrough', version=1, guard=None, commands=[
+])
+
+VK_NV_viewport_array2 = Extension(name='VK_NV_viewport_array2', version=1, guard=None, commands=[
+])
+
+VK_NVX_multiview_per_view_attributes = Extension(name='VK_NVX_multiview_per_view_attributes', version=1, guard=None, commands=[
+])
+
+VK_NV_viewport_swizzle = Extension(name='VK_NV_viewport_swizzle', version=1, guard=None, commands=[
+])
+
+VK_EXT_discard_rectangles = Extension(name='VK_EXT_discard_rectangles', version=1, guard=None, commands=[
+    Command(name='vkCmdSetDiscardRectangleEXT', dispatch='VkCommandBuffer'),
+])
+
+VK_EXT_conservative_rasterization = Extension(name='VK_EXT_conservative_rasterization', version=1, guard=None, commands=[
+])
+
+VK_EXT_swapchain_colorspace = Extension(name='VK_EXT_swapchain_colorspace', version=3, guard=None, commands=[
+])
+
+VK_EXT_hdr_metadata = Extension(name='VK_EXT_hdr_metadata', version=1, guard=None, commands=[
+    Command(name='vkSetHdrMetadataEXT', dispatch='VkDevice'),
+])
+
+VK_EXT_external_memory_dma_buf = Extension(name='VK_EXT_external_memory_dma_buf', version=1, guard=None, commands=[
+])
+
+VK_EXT_queue_family_foreign = Extension(name='VK_EXT_queue_family_foreign', version=1, guard=None, commands=[
+])
+
+VK_EXT_debug_utils = Extension(name='VK_EXT_debug_utils', version=1, guard=None, commands=[
+    Command(name='vkSetDebugUtilsObjectNameEXT', dispatch='VkDevice'),
+    Command(name='vkSetDebugUtilsObjectTagEXT', dispatch='VkDevice'),
+    Command(name='vkQueueBeginDebugUtilsLabelEXT', dispatch='VkQueue'),
+    Command(name='vkQueueEndDebugUtilsLabelEXT', dispatch='VkQueue'),
+    Command(name='vkQueueInsertDebugUtilsLabelEXT', dispatch='VkQueue'),
+    Command(name='vkCmdBeginDebugUtilsLabelEXT', dispatch='VkCommandBuffer'),
+    Command(name='vkCmdEndDebugUtilsLabelEXT', dispatch='VkCommandBuffer'),
+    Command(name='vkCmdInsertDebugUtilsLabelEXT', dispatch='VkCommandBuffer'),
+    Command(name='vkCreateDebugUtilsMessengerEXT', dispatch='VkInstance'),
+    Command(name='vkDestroyDebugUtilsMessengerEXT', dispatch='VkInstance'),
+    Command(name='vkSubmitDebugUtilsMessageEXT', dispatch='VkInstance'),
+])
+
+VK_EXT_sampler_filter_minmax = Extension(name='VK_EXT_sampler_filter_minmax', version=1, guard=None, commands=[
+])
+
+VK_AMD_gpu_shader_int16 = Extension(name='VK_AMD_gpu_shader_int16', version=1, guard=None, commands=[
+])
+
+VK_AMD_mixed_attachment_samples = Extension(name='VK_AMD_mixed_attachment_samples', version=1, guard=None, commands=[
+])
+
+VK_AMD_shader_fragment_mask = Extension(name='VK_AMD_shader_fragment_mask', version=1, guard=None, commands=[
+])
+
+VK_EXT_inline_uniform_block = Extension(name='VK_EXT_inline_uniform_block', version=1, guard=None, commands=[
+])
+
+VK_EXT_shader_stencil_export = Extension(name='VK_EXT_shader_stencil_export', version=1, guard=None, commands=[
+])
+
+VK_EXT_sample_locations = Extension(name='VK_EXT_sample_locations', version=1, guard=None, commands=[
+    Command(name='vkCmdSetSampleLocationsEXT', dispatch='VkCommandBuffer'),
+    Command(name='vkGetPhysicalDeviceMultisamplePropertiesEXT', dispatch='VkPhysicalDevice'),
+])
+
+VK_EXT_blend_operation_advanced = Extension(name='VK_EXT_blend_operation_advanced', version=2, guard=None, commands=[
+])
+
+VK_NV_fragment_coverage_to_color = Extension(name='VK_NV_fragment_coverage_to_color', version=1, guard=None, commands=[
+])
+
+VK_NV_framebuffer_mixed_samples = Extension(name='VK_NV_framebuffer_mixed_samples', version=1, guard=None, commands=[
+])
+
+VK_NV_fill_rectangle = Extension(name='VK_NV_fill_rectangle', version=1, guard=None, commands=[
+])
+
+VK_EXT_post_depth_coverage = Extension(name='VK_EXT_post_depth_coverage', version=1, guard=None, commands=[
+])
+
+VK_EXT_extension_159 = Extension(name='VK_EXT_extension_159', version=0, guard=None, commands=[
+])
+
+VK_EXT_image_drm_format_modifier = Extension(name='VK_EXT_image_drm_format_modifier', version=1, guard=None, commands=[
+    Command(name='vkGetImageDrmFormatModifierPropertiesEXT', dispatch='VkDevice'),
+])
+
+VK_EXT_validation_cache = Extension(name='VK_EXT_validation_cache', version=1, guard=None, commands=[
+    Command(name='vkCreateValidationCacheEXT', dispatch='VkDevice'),
+    Command(name='vkDestroyValidationCacheEXT', dispatch='VkDevice'),
+    Command(name='vkMergeValidationCachesEXT', dispatch='VkDevice'),
+    Command(name='vkGetValidationCacheDataEXT', dispatch='VkDevice'),
+])
+
+VK_EXT_descriptor_indexing = Extension(name='VK_EXT_descriptor_indexing', version=2, guard=None, commands=[
+])
+
+VK_EXT_shader_viewport_index_layer = Extension(name='VK_EXT_shader_viewport_index_layer', version=1, guard=None, commands=[
+])
+
+VK_NV_shading_rate_image = Extension(name='VK_NV_shading_rate_image', version=3, guard=None, commands=[
+    Command(name='vkCmdBindShadingRateImageNV', dispatch='VkCommandBuffer'),
+    Command(name='vkCmdSetViewportShadingRatePaletteNV', dispatch='VkCommandBuffer'),
+    Command(name='vkCmdSetCoarseSampleOrderNV', dispatch='VkCommandBuffer'),
+])
+
+VK_NV_ray_tracing = Extension(name='VK_NV_ray_tracing', version=2, guard=None, commands=[
+    Command(name='vkCreateAccelerationStructureNV', dispatch='VkDevice'),
+    Command(name='vkDestroyAccelerationStructureNV', dispatch='VkDevice'),
+    Command(name='vkGetAccelerationStructureMemoryRequirementsNV', dispatch='VkDevice'),
+    Command(name='vkBindAccelerationStructureMemoryNV', dispatch='VkDevice'),
+    Command(name='vkCmdBuildAccelerationStructureNV', dispatch='VkCommandBuffer'),
+    Command(name='vkCmdCopyAccelerationStructureNV', dispatch='VkCommandBuffer'),
+    Command(name='vkCmdTraceRaysNV', dispatch='VkCommandBuffer'),
+    Command(name='vkCreateRayTracingPipelinesNV', dispatch='VkDevice'),
+    Command(name='vkGetRayTracingShaderGroupHandlesNV', dispatch='VkDevice'),
+    Command(name='vkGetAccelerationStructureHandleNV', dispatch='VkDevice'),
+    Command(name='vkCmdWriteAccelerationStructuresPropertiesNV', dispatch='VkCommandBuffer'),
+    Command(name='vkCompileDeferredNV', dispatch='VkDevice'),
+])
+
+VK_NV_representative_fragment_test = Extension(name='VK_NV_representative_fragment_test', version=1, guard=None, commands=[
+])
+
+VK_EXT_global_priority = Extension(name='VK_EXT_global_priority', version=2, guard=None, commands=[
+])
+
+VK_EXT_external_memory_host = Extension(name='VK_EXT_external_memory_host', version=1, guard=None, commands=[
+    Command(name='vkGetMemoryHostPointerPropertiesEXT', dispatch='VkDevice'),
+])
+
+VK_AMD_buffer_marker = Extension(name='VK_AMD_buffer_marker', version=1, guard=None, commands=[
+    Command(name='vkCmdWriteBufferMarkerAMD', dispatch='VkCommandBuffer'),
+])
+
+VK_EXT_calibrated_timestamps = Extension(name='VK_EXT_calibrated_timestamps', version=1, guard=None, commands=[
+    Command(name='vkGetPhysicalDeviceCalibrateableTimeDomainsEXT', dispatch='VkPhysicalDevice'),
+    Command(name='vkGetCalibratedTimestampsEXT', dispatch='VkDevice'),
+])
+
+VK_AMD_shader_core_properties = Extension(name='VK_AMD_shader_core_properties', version=1, guard=None, commands=[
+])
+
+VK_AMD_memory_overallocation_behavior = Extension(name='VK_AMD_memory_overallocation_behavior', version=1, guard=None, commands=[
+])
+
+VK_EXT_vertex_attribute_divisor = Extension(name='VK_EXT_vertex_attribute_divisor', version=3, guard=None, commands=[
+])
+
+VK_NV_shader_subgroup_partitioned = Extension(name='VK_NV_shader_subgroup_partitioned', version=1, guard=None, commands=[
+])
+
+VK_NV_compute_shader_derivatives = Extension(name='VK_NV_compute_shader_derivatives', version=1, guard=None, commands=[
+])
+
+VK_NV_mesh_shader = Extension(name='VK_NV_mesh_shader', version=1, guard=None, commands=[
+    Command(name='vkCmdDrawMeshTasksNV', dispatch='VkCommandBuffer'),
+    Command(name='vkCmdDrawMeshTasksIndirectNV', dispatch='VkCommandBuffer'),
+    Command(name='vkCmdDrawMeshTasksIndirectCountNV', dispatch='VkCommandBuffer'),
+])
+
+VK_NV_fragment_shader_barycentric = Extension(name='VK_NV_fragment_shader_barycentric', version=1, guard=None, commands=[
+])
+
+VK_NV_shader_image_footprint = Extension(name='VK_NV_shader_image_footprint', version=1, guard=None, commands=[
+])
+
+VK_NV_scissor_exclusive = Extension(name='VK_NV_scissor_exclusive', version=1, guard=None, commands=[
+    Command(name='vkCmdSetExclusiveScissorNV', dispatch='VkCommandBuffer'),
+])
+
+VK_NV_device_diagnostic_checkpoints = Extension(name='VK_NV_device_diagnostic_checkpoints', version=2, guard=None, commands=[
+    Command(name='vkCmdSetCheckpointNV', dispatch='VkCommandBuffer'),
+    Command(name='vkGetQueueCheckpointDataNV', dispatch='VkQueue'),
+])
+
+VK_EXT_pci_bus_info = Extension(name='VK_EXT_pci_bus_info', version=1, guard=None, commands=[
+])
+
+VK_GOOGLE_hlsl_functionality1 = Extension(name='VK_GOOGLE_hlsl_functionality1', version=0, guard=None, commands=[
+])
+
+VK_GOOGLE_decorate_string = Extension(name='VK_GOOGLE_decorate_string', version=0, guard=None, commands=[
+])
+
+VK_KHR_android_surface = Extension(name='VK_KHR_android_surface', version=6, guard='VK_USE_PLATFORM_ANDROID_KHR', commands=[
+    Command(name='vkCreateAndroidSurfaceKHR', dispatch='VkInstance'),
+])
+
+VK_ANDROID_external_memory_android_hardware_buffer = Extension(name='VK_ANDROID_external_memory_android_hardware_buffer', version=3, guard='VK_USE_PLATFORM_ANDROID_KHR', commands=[
+    Command(name='vkGetAndroidHardwareBufferPropertiesANDROID', dispatch='VkDevice'),
+    Command(name='vkGetMemoryAndroidHardwareBufferANDROID', dispatch='VkDevice'),
+])
+
+VK_FUCHSIA_imagepipe_surface = Extension(name='VK_FUCHSIA_imagepipe_surface', version=1, guard='VK_USE_PLATFORM_FUCHSIA', commands=[
+    Command(name='vkCreateImagePipeSurfaceFUCHSIA', dispatch='VkInstance'),
+])
+
+VK_MVK_ios_surface = Extension(name='VK_MVK_ios_surface', version=2, guard='VK_USE_PLATFORM_IOS_MVK', commands=[
+    Command(name='vkCreateIOSSurfaceMVK', dispatch='VkInstance'),
+])
+
+VK_MVK_macos_surface = Extension(name='VK_MVK_macos_surface', version=2, guard='VK_USE_PLATFORM_MACOS_MVK', commands=[
+    Command(name='vkCreateMacOSSurfaceMVK', dispatch='VkInstance'),
+])
+
+VK_NN_vi_surface = Extension(name='VK_NN_vi_surface', version=1, guard='VK_USE_PLATFORM_VI_NN', commands=[
+    Command(name='vkCreateViSurfaceNN', dispatch='VkInstance'),
+])
+
+VK_KHR_wayland_surface = Extension(name='VK_KHR_wayland_surface', version=6, guard='VK_USE_PLATFORM_WAYLAND_KHR', commands=[
+    Command(name='vkCreateWaylandSurfaceKHR', dispatch='VkInstance'),
+    Command(name='vkGetPhysicalDeviceWaylandPresentationSupportKHR', dispatch='VkPhysicalDevice'),
+])
+
+VK_KHR_win32_surface = Extension(name='VK_KHR_win32_surface', version=6, guard='VK_USE_PLATFORM_WIN32_KHR', commands=[
+    Command(name='vkCreateWin32SurfaceKHR', dispatch='VkInstance'),
+    Command(name='vkGetPhysicalDeviceWin32PresentationSupportKHR', dispatch='VkPhysicalDevice'),
+])
+
+VK_KHR_external_memory_win32 = Extension(name='VK_KHR_external_memory_win32', version=1, guard='VK_USE_PLATFORM_WIN32_KHR', commands=[
+    Command(name='vkGetMemoryWin32HandleKHR', dispatch='VkDevice'),
+    Command(name='vkGetMemoryWin32HandlePropertiesKHR', dispatch='VkDevice'),
+])
+
+VK_KHR_win32_keyed_mutex = Extension(name='VK_KHR_win32_keyed_mutex', version=1, guard='VK_USE_PLATFORM_WIN32_KHR', commands=[
+])
+
+VK_KHR_external_semaphore_win32 = Extension(name='VK_KHR_external_semaphore_win32', version=1, guard='VK_USE_PLATFORM_WIN32_KHR', commands=[
+    Command(name='vkImportSemaphoreWin32HandleKHR', dispatch='VkDevice'),
+    Command(name='vkGetSemaphoreWin32HandleKHR', dispatch='VkDevice'),
+])
+
+VK_KHR_external_fence_win32 = Extension(name='VK_KHR_external_fence_win32', version=1, guard='VK_USE_PLATFORM_WIN32_KHR', commands=[
+    Command(name='vkImportFenceWin32HandleKHR', dispatch='VkDevice'),
+    Command(name='vkGetFenceWin32HandleKHR', dispatch='VkDevice'),
+])
+
+VK_NV_external_memory_win32 = Extension(name='VK_NV_external_memory_win32', version=1, guard='VK_USE_PLATFORM_WIN32_KHR', commands=[
+    Command(name='vkGetMemoryWin32HandleNV', dispatch='VkDevice'),
+])
+
+VK_NV_win32_keyed_mutex = Extension(name='VK_NV_win32_keyed_mutex', version=1, guard='VK_USE_PLATFORM_WIN32_KHR', commands=[
+])
+
+VK_KHR_xcb_surface = Extension(name='VK_KHR_xcb_surface', version=6, guard='VK_USE_PLATFORM_XCB_KHR', commands=[
+    Command(name='vkCreateXcbSurfaceKHR', dispatch='VkInstance'),
+    Command(name='vkGetPhysicalDeviceXcbPresentationSupportKHR', dispatch='VkPhysicalDevice'),
+])
+
+VK_KHR_xlib_surface = Extension(name='VK_KHR_xlib_surface', version=6, guard='VK_USE_PLATFORM_XLIB_KHR', commands=[
+    Command(name='vkCreateXlibSurfaceKHR', dispatch='VkInstance'),
+    Command(name='vkGetPhysicalDeviceXlibPresentationSupportKHR', dispatch='VkPhysicalDevice'),
+])
+
+VK_EXT_acquire_xlib_display = Extension(name='VK_EXT_acquire_xlib_display', version=1, guard='VK_USE_PLATFORM_XLIB_XRANDR_EXT', commands=[
+    Command(name='vkAcquireXlibDisplayEXT', dispatch='VkPhysicalDevice'),
+    Command(name='vkGetRandROutputDisplayEXT', dispatch='VkPhysicalDevice'),
+])
+
+extensions = [
+    VK_core_0,
+    VK_core_1,
+    VK_KHR_surface,
+    VK_KHR_swapchain,
+    VK_KHR_display,
+    VK_KHR_display_swapchain,
+    VK_KHR_sampler_mirror_clamp_to_edge,
+    VK_KHR_multiview,
+    VK_KHR_get_physical_device_properties2,
+    VK_KHR_device_group,
+    VK_KHR_shader_draw_parameters,
+    VK_KHR_maintenance1,
+    VK_KHR_device_group_creation,
+    VK_KHR_external_memory_capabilities,
+    VK_KHR_external_memory,
+    VK_KHR_external_memory_fd,
+    VK_KHR_external_semaphore_capabilities,
+    VK_KHR_external_semaphore,
+    VK_KHR_external_semaphore_fd,
+    VK_KHR_push_descriptor,
+    VK_KHR_16bit_storage,
+    VK_KHR_incremental_present,
+    VK_KHR_descriptor_update_template,
+    VK_KHR_create_renderpass2,
+    VK_KHR_shared_presentable_image,
+    VK_KHR_external_fence_capabilities,
+    VK_KHR_external_fence,
+    VK_KHR_external_fence_fd,
+    VK_KHR_maintenance2,
+    VK_KHR_get_surface_capabilities2,
+    VK_KHR_variable_pointers,
+    VK_KHR_get_display_properties2,
+    VK_KHR_dedicated_allocation,
+    VK_KHR_storage_buffer_storage_class,
+    VK_KHR_relaxed_block_layout,
+    VK_KHR_get_memory_requirements2,
+    VK_KHR_image_format_list,
+    VK_KHR_sampler_ycbcr_conversion,
+    VK_KHR_bind_memory2,
+    VK_KHR_maintenance3,
+    VK_KHR_draw_indirect_count,
+    VK_KHR_8bit_storage,
+    VK_KHR_shader_atomic_int64,
+    VK_KHR_driver_properties,
+    VK_KHR_vulkan_memory_model,
+    VK_EXT_debug_report,
+    VK_NV_glsl_shader,
+    VK_EXT_depth_range_unrestricted,
+    VK_IMG_filter_cubic,
+    VK_AMD_rasterization_order,
+    VK_AMD_shader_trinary_minmax,
+    VK_AMD_shader_explicit_vertex_parameter,
+    VK_EXT_debug_marker,
+    VK_AMD_gcn_shader,
+    VK_NV_dedicated_allocation,
+    VK_EXT_transform_feedback,
+    VK_AMD_draw_indirect_count,
+    VK_AMD_negative_viewport_height,
+    VK_AMD_gpu_shader_half_float,
+    VK_AMD_shader_ballot,
+    VK_AMD_texture_gather_bias_lod,
+    VK_AMD_shader_info,
+    VK_AMD_shader_image_load_store_lod,
+    VK_NV_corner_sampled_image,
+    VK_IMG_format_pvrtc,
+    VK_NV_external_memory_capabilities,
+    VK_NV_external_memory,
+    VK_EXT_validation_flags,
+    VK_EXT_shader_subgroup_ballot,
+    VK_EXT_shader_subgroup_vote,
+    VK_EXT_astc_decode_mode,
+    VK_EXT_conditional_rendering,
+    VK_NVX_device_generated_commands,
+    VK_NV_clip_space_w_scaling,
+    VK_EXT_direct_mode_display,
+    VK_EXT_display_surface_counter,
+    VK_EXT_display_control,
+    VK_GOOGLE_display_timing,
+    VK_NV_sample_mask_override_coverage,
+    VK_NV_geometry_shader_passthrough,
+    VK_NV_viewport_array2,
+    VK_NVX_multiview_per_view_attributes,
+    VK_NV_viewport_swizzle,
+    VK_EXT_discard_rectangles,
+    VK_EXT_conservative_rasterization,
+    VK_EXT_swapchain_colorspace,
+    VK_EXT_hdr_metadata,
+    VK_EXT_external_memory_dma_buf,
+    VK_EXT_queue_family_foreign,
+    VK_EXT_debug_utils,
+    VK_EXT_sampler_filter_minmax,
+    VK_AMD_gpu_shader_int16,
+    VK_AMD_mixed_attachment_samples,
+    VK_AMD_shader_fragment_mask,
+    VK_EXT_inline_uniform_block,
+    VK_EXT_shader_stencil_export,
+    VK_EXT_sample_locations,
+    VK_EXT_blend_operation_advanced,
+    VK_NV_fragment_coverage_to_color,
+    VK_NV_framebuffer_mixed_samples,
+    VK_NV_fill_rectangle,
+    VK_EXT_post_depth_coverage,
+    VK_EXT_extension_159,
+    VK_EXT_image_drm_format_modifier,
+    VK_EXT_validation_cache,
+    VK_EXT_descriptor_indexing,
+    VK_EXT_shader_viewport_index_layer,
+    VK_NV_shading_rate_image,
+    VK_NV_ray_tracing,
+    VK_NV_representative_fragment_test,
+    VK_EXT_global_priority,
+    VK_EXT_external_memory_host,
+    VK_AMD_buffer_marker,
+    VK_EXT_calibrated_timestamps,
+    VK_AMD_shader_core_properties,
+    VK_AMD_memory_overallocation_behavior,
+    VK_EXT_vertex_attribute_divisor,
+    VK_NV_shader_subgroup_partitioned,
+    VK_NV_compute_shader_derivatives,
+    VK_NV_mesh_shader,
+    VK_NV_fragment_shader_barycentric,
+    VK_NV_shader_image_footprint,
+    VK_NV_scissor_exclusive,
+    VK_NV_device_diagnostic_checkpoints,
+    VK_EXT_pci_bus_info,
+    VK_GOOGLE_hlsl_functionality1,
+    VK_GOOGLE_decorate_string,
+    VK_KHR_android_surface,
+    VK_ANDROID_external_memory_android_hardware_buffer,
+    VK_FUCHSIA_imagepipe_surface,
+    VK_MVK_ios_surface,
+    VK_MVK_macos_surface,
+    VK_NN_vi_surface,
+    VK_KHR_wayland_surface,
+    VK_KHR_win32_surface,
+    VK_KHR_external_memory_win32,
+    VK_KHR_win32_keyed_mutex,
+    VK_KHR_external_semaphore_win32,
+    VK_KHR_external_fence_win32,
+    VK_NV_external_memory_win32,
+    VK_NV_win32_keyed_mutex,
+    VK_KHR_xcb_surface,
+    VK_KHR_xlib_surface,
+    VK_EXT_acquire_xlib_display,
+]
+# end of generated code
+
+def generate_wrapper_header(guard):
+    copyright = []
+    copyright.append("/*                                                                         ")
+    copyright.append(" * Copyright 2018 The Android Open Source Project                          ")
+    copyright.append(" *                                                                         ")
+    copyright.append(" * Licensed under the Apache License, Version 2.0 (the \"License\");       ")
+    copyright.append(" * you may not use this file except in compliance with the License.        ")
+    copyright.append(" * You may obtain a copy of the License at                                 ")
+    copyright.append(" *                                                                         ")
+    copyright.append(" *      http://www.apache.org/licenses/LICENSE-2.0                         ")
+    copyright.append(" *                                                                         ")
+    copyright.append(" * Unless required by applicable law or agreed to in writing, software     ")
+    copyright.append(" * distributed under the License is distributed on an \"AS IS\" BASIS,     ")
+    copyright.append(" * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.")
+    copyright.append(" * See the License for the specific language governing permissions and     ")
+    copyright.append(" * limitations under the License.                                          ")
+    copyright.append(" */                                                                        ")
+    lines = [line.rstrip() for line in copyright]
+
+    lines.append("// This file is generated.")
+    lines.append("#ifndef %s" % guard)
+    lines.append("#define %s" % guard)
+    lines.append("")
+    lines.append("#ifdef __cplusplus")
+    lines.append("extern \"C\" {")
+    lines.append("#endif")
+    lines.append("")
+    lines.append("#define VK_NO_PROTOTYPES 1")
+    lines.append("#include <vulkan/vulkan.h>")
+    lines.append("")
+    lines.append("/* Initialize the Vulkan function pointer variables declared in this header.")
+    lines.append(" * Returns 0 if vulkan is not available, non-zero if it is available.")
+    lines.append(" */")
+    lines.append("int InitVulkan(void);")
+    lines.append("")
+
+    for ext in extensions:
+        # Only wrap core and WSI functions
+        wrapped_exts = {'VK_core', 'VK_KHR'}
+        if not any(ext.name.startswith(s) for s in wrapped_exts):
+            continue
+
+        if ext.guard:
+            lines.append("#ifdef %s" % ext.guard)
+
+        lines.append("// %s" % ext.name)
+        for cmd in ext.commands:
+            lines.append("extern PFN_%s %s;" % (cmd.name, cmd.name))
+
+        if ext.guard:
+            lines.append("#endif")
+        lines.append("")
+
+    lines.append("")
+    lines.append("")
+    lines.append("#ifdef __cplusplus")
+    lines.append("}")
+    lines.append("#endif")
+    lines.append("")
+    lines.append("#endif  // %s" % guard)
+
+    return "\n".join(lines)
+
+def generate_wrapper_source(header):
+    copyright = []
+    copyright.append("/*                                                                         ")
+    copyright.append(" * Copyright 2018 The Android Open Source Project                          ")
+    copyright.append(" *                                                                         ")
+    copyright.append(" * Licensed under the Apache License, Version 2.0 (the \"License\");       ")
+    copyright.append(" * you may not use this file except in compliance with the License.        ")
+    copyright.append(" * You may obtain a copy of the License at                                 ")
+    copyright.append(" *                                                                         ")
+    copyright.append(" *      http://www.apache.org/licenses/LICENSE-2.0                         ")
+    copyright.append(" *                                                                         ")
+    copyright.append(" * Unless required by applicable law or agreed to in writing, software     ")
+    copyright.append(" * distributed under the License is distributed on an \"AS IS\" BASIS,     ")
+    copyright.append(" * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.")
+    copyright.append(" * See the License for the specific language governing permissions and     ")
+    copyright.append(" * limitations under the License.                                          ")
+    copyright.append(" */                                                                        ")
+    lines = [line.rstrip() for line in copyright]
+
+    lines.append("// This file is generated.")
+    lines.append("#ifdef __cplusplus")
+    lines.append("extern \"C\" {")
+    lines.append("#endif")
+    lines.append("")
+    lines.append("#include \"%s\"" % header)
+    lines.append("#include <dlfcn.h>")
+    lines.append("")
+
+    lines.append("int InitVulkan(void) {")
+    lines.append("    void* libvulkan = dlopen(\"libvulkan.so\", RTLD_NOW | RTLD_LOCAL);")
+    lines.append("    if (!libvulkan)")
+    lines.append("        return 0;")
+    lines.append("")
+    lines.append("    // Vulkan supported, set function addresses")
+    for ext in extensions:
+        # Only wrap core and WSI functions
+        wrapped_exts = {'VK_core', 'VK_KHR'}
+        if not any(ext.name.startswith(s) for s in wrapped_exts):
+            continue
+
+        if ext.guard:
+            lines.append("")
+            lines.append("#ifdef %s" % ext.guard)
+
+        for cmd in ext.commands:
+            lines.append("    %s = reinterpret_cast<PFN_%s>(dlsym(libvulkan, \"%s\"));" % (cmd.name, cmd.name, cmd.name))
+
+        if ext.guard:
+            lines.append("#endif")
+
+    lines.append("    return 1;")
+    lines.append("}")
+    lines.append("")
+
+    lines.append("// No Vulkan support, do not set function addresses")
+    for ext in extensions:
+        if ext.guard:
+            lines.append("")
+            lines.append("#ifdef %s" % ext.guard)
+
+        for cmd in ext.commands:
+            lines.append("PFN_%s %s;" % (cmd.name, cmd.name))
+
+        if ext.guard:
+            lines.append("#endif")
+
+    lines.append("")
+    lines.append("#ifdef __cplusplus")
+    lines.append("}")
+    lines.append("#endif")
+
+    return "\n".join(lines)
+
+def parse_subheader(filename, ext_guard):
+    sub_extensions = []
+
+    with open(filename, "r") as f:
+        current_ext = None
+        spec_version = None
+
+        for line in f:
+            line = line.strip();
+
+            if line.startswith("#define VK_API_VERSION"):
+                minor_end = line.rfind(",")
+                minor_begin = line.rfind(",", 0, minor_end) + 1
+                spec_version = int(line[minor_begin:minor_end])
+                # add core
+                current_ext = Extension("VK_core_%s" % spec_version, spec_version)
+                sub_extensions.append(current_ext)
+            elif Command.valid_c_typedef(line):
+                current_ext.add_command(Command.from_c_typedef(line))
+            elif line.startswith("#define") and "SPEC_VERSION " in line:
+                version_begin = line.rfind(" ") + 1
+                spec_version = int(line[version_begin:])
+            elif line.startswith("#define") and "EXTENSION_NAME " in line:
+                name_end = line.rfind("\"")
+                name_begin = line.rfind("\"", 0, name_end) + 1
+                name = line[name_begin:name_end]
+                # add extension
+                current_ext = Extension(name, spec_version, ext_guard)
+                sub_extensions.append(current_ext)
+
+    return sub_extensions;
+
+def parse_vulkan_h(filename):
+    extensions = []
+
+    with open(filename, "r") as f:
+        ext_guard = None
+
+        for line in f:
+            line = line.strip();
+
+            if line.startswith("#include \"vulkan_"):
+                # Extract the filename and parse it.  Must be local to script file (no path).
+                extensions.extend(parse_subheader(line[10:].replace('"', ''), ext_guard))
+            elif line.startswith("#ifdef VK_USE_PLATFORM"):
+                guard_begin = line.find(" ") + 1
+                ext_guard = line[guard_begin:]
+            elif ext_guard and line.startswith("#endif") and ext_guard in line:
+                ext_guard = None
+
+    for ext in extensions:
+        print("%s = %s" % (ext.name, repr(ext)))
+        print("")
+
+    print("extensions = [")
+    for ext in extensions:
+        print("    %s," % ext.name)
+    print("]")
+
+if __name__ == "__main__":
+    if sys.argv[1] == "parse":
+        parse_vulkan_h(sys.argv[2])
+    else:
+        filename = sys.argv[1]
+        base = os.path.basename(filename)
+        contents = []
+
+        if base.endswith(".h"):
+            contents = generate_wrapper_header(base.replace(".", "_").upper())
+        elif base.endswith(".cpp"):
+            contents = generate_wrapper_source(base.replace(".cpp", ".h"))
+
+        with open(filename, "w") as f:
+            print(contents, file=f)
diff --git a/src/third_party/vulkan-tools/src/scripts/known_good.json b/src/third_party/vulkan-tools/src/scripts/known_good.json
new file mode 100644
index 0000000..f256bd5
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/scripts/known_good.json
@@ -0,0 +1,66 @@
+{
+  "repos" : [
+    {
+      "name" : "glslang",
+      "url" : "https://github.com/KhronosGroup/glslang.git",
+      "sub_dir" : "glslang",
+      "build_dir" : "glslang/build",
+      "install_dir" : "glslang/build/install",
+      "commit" : "b131630e7c749a5dc19faa458024260c71fb170f",
+      "prebuild" : [
+        "python update_glslang_sources.py"
+      ]
+    },
+    {
+      "name" : "Vulkan-Headers",
+      "url" : "https://github.com/KhronosGroup/Vulkan-Headers.git",
+      "sub_dir" : "Vulkan-Headers",
+      "build_dir" : "Vulkan-Headers/build",
+      "install_dir" : "Vulkan-Headers/build/install",
+      "commit" : "v1.1.130"
+    },
+    {
+      "name" : "MoltenVK",
+      "url" : "https://github.com/KhronosGroup/MoltenVK.git",
+      "sub_dir" : "MoltenVK",
+      "build_dir" : "MoltenVK",
+      "install_dir" : "MoltenVK",
+      "commit" : "v1.0.38",
+      "custom_build" : [
+        "./fetchDependencies --glslang-root {0[glslang][repo_dir]}",
+        "xcodebuild -project MoltenVKPackaging.xcodeproj GCC_PREPROCESSOR_DEFINITIONS='$GCC_PREPROCESSOR_DEFINITIONS MVK_CONFIG_LOG_LEVEL=1' -scheme \"MoltenVK Package\" build"
+      ],
+      "build_step" : "custom",
+      "build_platforms" : [
+        "darwin"
+      ]
+    },
+    {
+      "name" : "Vulkan-Loader",
+      "url" : "https://github.com/KhronosGroup/Vulkan-Loader.git",
+      "sub_dir" : "Vulkan-Loader",
+      "build_dir" : "Vulkan-Loader/build",
+      "install_dir" : "Vulkan-Loader/build/install",
+      "commit" : "v1.1.130",
+      "deps" : [
+        {
+          "var_name" : "VULKAN_HEADERS_INSTALL_DIR",
+          "repo_name" : "Vulkan-Headers"
+        }
+      ],
+      "cmake_options" : [
+        "-DBUILD_TESTS=NO"
+      ],
+      "build_platforms" : [
+        "linux",
+        "darwin"
+      ]
+    }
+  ],
+  "install_names" : {
+      "glslang" : "GLSLANG_INSTALL_DIR",
+      "Vulkan-Headers" : "VULKAN_HEADERS_INSTALL_DIR",
+      "Vulkan-Loader" : "VULKAN_LOADER_INSTALL_DIR",
+      "MoltenVK" : "MOLTENVK_REPO_ROOT"
+    }
+}
diff --git a/src/third_party/vulkan-tools/src/scripts/kvt_genvk.py b/src/third_party/vulkan-tools/src/scripts/kvt_genvk.py
new file mode 100644
index 0000000..ce021a7
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/scripts/kvt_genvk.py
@@ -0,0 +1,403 @@
+#!/usr/bin/python3
+#
+# Copyright (c) 2013-2019 The Khronos Group Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import argparse
+import cProfile
+import pdb
+import string
+import sys
+import time
+import os
+
+# Simple timer functions
+startTime = None
+
+
+def startTimer(timeit):
+    global startTime
+    if timeit:
+        startTime = time.process_time()
+
+
+def endTimer(timeit, msg):
+    global startTime
+    if timeit:
+        endTime = time.process_time()
+        write(msg, endTime - startTime, file=sys.stderr)
+        startTime = None
+
+# Turn a list of strings into a regexp string matching exactly those strings
+
+
+def makeREstring(list, default=None):
+    if len(list) > 0 or default is None:
+        return '^(' + '|'.join(list) + ')$'
+    else:
+        return default
+
+# Returns a directory of [ generator function, generator options ] indexed
+# by specified short names. The generator options incorporate the following
+# parameters:
+#
+# args is an parsed argument object; see below for the fields that are used.
+
+
+def makeGenOpts(args):
+    global genOpts
+    genOpts = {}
+
+    # Default class of extensions to include, or None
+    defaultExtensions = args.defaultExtensions
+
+    # Additional extensions to include (list of extensions)
+    extensions = args.extension
+
+    # Extensions to remove (list of extensions)
+    removeExtensions = args.removeExtensions
+
+    # Extensions to emit (list of extensions)
+    emitExtensions = args.emitExtensions
+
+    # Features to include (list of features)
+    features = args.feature
+
+    # Whether to disable inclusion protect in headers
+    protect = args.protect
+
+    # Output target directory
+    directory = args.directory
+
+    # Descriptive names for various regexp patterns used to select
+    # versions and extensions
+    allFeatures = allExtensions = '.*'
+    noFeatures = noExtensions = None
+
+    # Turn lists of names/patterns into matching regular expressions
+    addExtensionsPat = makeREstring(extensions, None)
+    removeExtensionsPat = makeREstring(removeExtensions, None)
+    emitExtensionsPat = makeREstring(emitExtensions, allExtensions)
+    featuresPat = makeREstring(features, allFeatures)
+
+    # Copyright text prefixing all headers (list of strings).
+    prefixStrings = [
+        '/*',
+        '** Copyright (c) 2015-2018 The Khronos Group Inc.',
+        '**',
+        '** Licensed under the Apache License, Version 2.0 (the "License");',
+        '** you may not use this file except in compliance with the License.',
+        '** You may obtain a copy of the License at',
+        '**',
+        '**     http://www.apache.org/licenses/LICENSE-2.0',
+        '**',
+        '** Unless required by applicable law or agreed to in writing, software',
+        '** distributed under the License is distributed on an "AS IS" BASIS,',
+        '** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.',
+        '** See the License for the specific language governing permissions and',
+        '** limitations under the License.',
+        '*/',
+        ''
+    ]
+
+    # Text specific to Vulkan headers
+    vkPrefixStrings = [
+        '/*',
+        '** This header is generated from the Khronos Vulkan XML API Registry.',
+        '**',
+        '*/',
+        ''
+    ]
+
+    # Defaults for generating re-inclusion protection wrappers (or not)
+    protectFeature = protect
+
+    # An API style conventions object
+    conventions = VulkanConventions()
+
+    # Helper file generator options for typemap_helper.h
+    genOpts['vk_typemap_helper.h'] = [
+        HelperFileOutputGenerator,
+        HelperFileOutputGeneratorOptions(
+            conventions=conventions,
+            filename='vk_typemap_helper.h',
+            directory=directory,
+            apiname='vulkan',
+            profile=None,
+            versions=featuresPat,
+            emitversions=featuresPat,
+            defaultExtensions='vulkan',
+            addExtensions=addExtensionsPat,
+            removeExtensions=removeExtensionsPat,
+            emitExtensions=emitExtensionsPat,
+            prefixText=prefixStrings + vkPrefixStrings,
+            protectFeature=False,
+            apicall='VKAPI_ATTR ',
+            apientry='VKAPI_CALL ',
+            apientryp='VKAPI_PTR *',
+            alignFuncParam=48,
+            expandEnumerants=False,
+            helper_file_type='typemap_helper_header')
+    ]
+
+    # Options for mock ICD header
+    genOpts['mock_icd.h'] = [
+        MockICDOutputGenerator,
+        MockICDGeneratorOptions(
+            conventions=conventions,
+            filename='mock_icd.h',
+            directory=directory,
+            apiname='vulkan',
+            profile=None,
+            versions=featuresPat,
+            emitversions=featuresPat,
+            defaultExtensions='vulkan',
+            addExtensions=addExtensionsPat,
+            removeExtensions=removeExtensionsPat,
+            emitExtensions=emitExtensionsPat,
+            prefixText=prefixStrings + vkPrefixStrings,
+            protectFeature=False,
+            apicall='VKAPI_ATTR ',
+            apientry='VKAPI_CALL ',
+            apientryp='VKAPI_PTR *',
+            alignFuncParam=48,
+            expandEnumerants=False,
+            helper_file_type='mock_icd_header')
+    ]
+
+    # Options for mock ICD cpp
+    genOpts['mock_icd.cpp'] = [
+        MockICDOutputGenerator,
+        MockICDGeneratorOptions(
+            conventions=conventions,
+            filename='mock_icd.cpp',
+            directory=directory,
+            apiname='vulkan',
+            profile=None,
+            versions=featuresPat,
+            emitversions=featuresPat,
+            defaultExtensions='vulkan',
+            addExtensions=addExtensionsPat,
+            removeExtensions=removeExtensionsPat,
+            emitExtensions=emitExtensionsPat,
+            prefixText=prefixStrings + vkPrefixStrings,
+            protectFeature=False,
+            apicall='VKAPI_ATTR ',
+            apientry='VKAPI_CALL ',
+            apientryp='VKAPI_PTR *',
+            alignFuncParam=48,
+            expandEnumerants=False,
+            helper_file_type='mock_icd_source')
+    ]
+
+    # Options for vulkaninfo.hpp
+    genOpts['vulkaninfo.hpp'] = [
+        VulkanInfoGenerator,
+        VulkanInfoGeneratorOptions(
+            conventions=conventions,
+            filename='vulkaninfo.hpp',
+            directory=directory,
+            apiname='vulkan',
+            profile=None,
+            versions=featuresPat,
+            emitversions=featuresPat,
+            defaultExtensions='vulkan',
+            addExtensions=addExtensionsPat,
+            removeExtensions=removeExtensionsPat,
+            emitExtensions=emitExtensionsPat,
+            prefixText=prefixStrings + vkPrefixStrings,
+            protectFeature=False,
+            apicall='VKAPI_ATTR ',
+            apientry='VKAPI_CALL ',
+            apientryp='VKAPI_PTR *',
+            alignFuncParam=48,
+            expandEnumerants=False)
+    ]
+
+
+# Generate a target based on the options in the matching genOpts{} object.
+# This is encapsulated in a function so it can be profiled and/or timed.
+# The args parameter is an parsed argument object containing the following
+# fields that are used:
+#   target - target to generate
+#   directory - directory to generate it in
+#   protect - True if re-inclusion wrappers should be created
+#   extensions - list of additional extensions to include in generated
+#   interfaces
+def genTarget(args):
+    global genOpts
+
+    # Create generator options with specified parameters
+    makeGenOpts(args)
+
+    if (args.target in genOpts.keys()):
+        createGenerator = genOpts[args.target][0]
+        options = genOpts[args.target][1]
+
+        if not args.quiet:
+            write('* Building', options.filename, file=sys.stderr)
+            write('* options.versions          =',
+                  options.versions, file=sys.stderr)
+            write('* options.emitversions      =',
+                  options.emitversions, file=sys.stderr)
+            write('* options.defaultExtensions =',
+                  options.defaultExtensions, file=sys.stderr)
+            write('* options.addExtensions     =',
+                  options.addExtensions, file=sys.stderr)
+            write('* options.removeExtensions  =',
+                  options.removeExtensions, file=sys.stderr)
+            write('* options.emitExtensions    =',
+                  options.emitExtensions, file=sys.stderr)
+
+        startTimer(args.time)
+        gen = createGenerator(errFile=errWarn,
+                              warnFile=errWarn,
+                              diagFile=diag)
+        reg.setGenerator(gen)
+        reg.apiGen(options)
+
+        if not args.quiet:
+            write('* Generated', options.filename, file=sys.stderr)
+        endTimer(args.time, '* Time to generate ' + options.filename + ' =')
+    else:
+        write('No generator options for unknown target:',
+              args.target, file=sys.stderr)
+
+
+# -feature name
+# -extension name
+# For both, "name" may be a single name, or a space-separated list
+# of names, or a regular expression.
+if __name__ == '__main__':
+    parser = argparse.ArgumentParser()
+
+    parser.add_argument('-defaultExtensions', action='store',
+                        default='vulkan',
+                        help='Specify a single class of extensions to add to targets')
+    parser.add_argument('-extension', action='append',
+                        default=[],
+                        help='Specify an extension or extensions to add to targets')
+    parser.add_argument('-removeExtensions', action='append',
+                        default=[],
+                        help='Specify an extension or extensions to remove from targets')
+    parser.add_argument('-emitExtensions', action='append',
+                        default=[],
+                        help='Specify an extension or extensions to emit in targets')
+    parser.add_argument('-feature', action='append',
+                        default=[],
+                        help='Specify a core API feature name or names to add to targets')
+    parser.add_argument('-debug', action='store_true',
+                        help='Enable debugging')
+    parser.add_argument('-dump', action='store_true',
+                        help='Enable dump to stderr')
+    parser.add_argument('-diagfile', action='store',
+                        default=None,
+                        help='Write diagnostics to specified file')
+    parser.add_argument('-errfile', action='store',
+                        default=None,
+                        help='Write errors and warnings to specified file instead of stderr')
+    parser.add_argument('-noprotect', dest='protect', action='store_false',
+                        help='Disable inclusion protection in output headers')
+    parser.add_argument('-profile', action='store_true',
+                        help='Enable profiling')
+    parser.add_argument('-registry', action='store',
+                        default='vk.xml',
+                        help='Use specified registry file instead of vk.xml')
+    parser.add_argument('-time', action='store_true',
+                        help='Enable timing')
+    parser.add_argument('-validate', action='store_true',
+                        help='Enable group validation')
+    parser.add_argument('-o', action='store', dest='directory',
+                        default='.',
+                        help='Create target and related files in specified directory')
+    parser.add_argument('target', metavar='target', nargs='?',
+                        help='Specify target')
+    parser.add_argument('-quiet', action='store_true', default=True,
+                        help='Suppress script output during normal execution.')
+    parser.add_argument('-verbose', action='store_false', dest='quiet', default=True,
+                        help='Enable script output during normal execution.')
+
+    # This argument tells us where to load the script from the Vulkan-Headers registry
+    parser.add_argument('-scripts', action='store',
+                        help='Find additional scripts in this directory')
+
+    args = parser.parse_args()
+
+    # default scripts path to be same as registry
+    if not args.scripts:
+        args.scripts = os.path.dirname(args.registry)
+
+    scripts_directory_path = os.path.dirname(os.path.abspath(__file__))
+    registry_headers_path = os.path.join(scripts_directory_path, args.scripts)
+    sys.path.insert(0, registry_headers_path)
+
+    from reg import *
+    from generator import write
+    from cgenerator import CGeneratorOptions, COutputGenerator
+
+    # Generator Modifications
+    from mock_icd_generator import MockICDGeneratorOptions, MockICDOutputGenerator
+    from vulkan_tools_helper_file_generator import HelperFileOutputGenerator, HelperFileOutputGeneratorOptions
+    from vulkaninfo_generator import VulkanInfoGenerator, VulkanInfoGeneratorOptions
+    # Temporary workaround for vkconventions python2 compatibility
+    import abc
+    abc.ABC = abc.ABCMeta('ABC', (object,), {})
+    from vkconventions import VulkanConventions
+
+    # This splits arguments which are space-separated lists
+    args.feature = [name for arg in args.feature for name in arg.split()]
+    args.extension = [name for arg in args.extension for name in arg.split()]
+
+    # Load & parse registry
+    reg = Registry()
+
+    startTimer(args.time)
+    tree = etree.parse(args.registry)
+    endTimer(args.time, '* Time to make ElementTree =')
+
+    if args.debug:
+        pdb.run('reg.loadElementTree(tree)')
+    else:
+        startTimer(args.time)
+        reg.loadElementTree(tree)
+        endTimer(args.time, '* Time to parse ElementTree =')
+
+    if (args.validate):
+        reg.validateGroups()
+
+    if (args.dump):
+        write('* Dumping registry to regdump.txt', file=sys.stderr)
+        reg.dumpReg(filehandle=open('regdump.txt', 'w', encoding='utf-8'))
+
+    # create error/warning & diagnostic files
+    if (args.errfile):
+        errWarn = open(args.errfile, 'w', encoding='utf-8')
+    else:
+        errWarn = sys.stderr
+
+    if (args.diagfile):
+        diag = open(args.diagfile, 'w', encoding='utf-8')
+    else:
+        diag = None
+
+    if (args.debug):
+        pdb.run('genTarget(args)')
+    elif (args.profile):
+        import cProfile
+        import pstats
+        cProfile.run('genTarget(args)', 'profile.txt')
+        p = pstats.Stats('profile.txt')
+        p.strip_dirs().sort_stats('time').print_stats(50)
+    else:
+        genTarget(args)
diff --git a/src/third_party/vulkan-tools/src/scripts/mock_icd_generator.py b/src/third_party/vulkan-tools/src/scripts/mock_icd_generator.py
new file mode 100644
index 0000000..33f2cad
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/scripts/mock_icd_generator.py
@@ -0,0 +1,1318 @@
+#!/usr/bin/python3 -i
+#
+# Copyright (c) 2015-2017 The Khronos Group Inc.
+# Copyright (c) 2015-2017 Valve Corporation
+# Copyright (c) 2015-2017 LunarG, Inc.
+# Copyright (c) 2015-2017 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Author: Tobin Ehlis <tobine@google.com>
+#
+# This script generates a Mock ICD that intercepts almost all Vulkan
+#  functions. That layer is not intended to be useful or even compilable
+#  in its initial state. Rather it's intended to be a starting point that
+#  can be copied and customized to assist in creation of a new layer.
+
+import os,re,sys
+from generator import *
+from common_codegen import *
+
+
+# Mock header code
+HEADER_C_CODE = '''
+using mutex_t = std::mutex;
+using lock_guard_t = std::lock_guard<mutex_t>;
+using unique_lock_t = std::unique_lock<mutex_t>;
+
+static mutex_t global_lock;
+static uint64_t global_unique_handle = 1;
+static const uint32_t SUPPORTED_LOADER_ICD_INTERFACE_VERSION = 5;
+static uint32_t loader_interface_version = 0;
+static bool negotiate_loader_icd_interface_called = false;
+static void* CreateDispObjHandle() {
+    auto handle = new VK_LOADER_DATA;
+    set_loader_magic_value(handle);
+    return handle;
+}
+static void DestroyDispObjHandle(void* handle) {
+    delete reinterpret_cast<VK_LOADER_DATA*>(handle);
+}
+'''
+
+# Manual code at the top of the cpp source file
+SOURCE_CPP_PREFIX = '''
+using std::unordered_map;
+
+// Map device memory handle to any mapped allocations that we'll need to free on unmap
+static unordered_map<VkDeviceMemory, std::vector<void*>> mapped_memory_map;
+
+static VkPhysicalDevice physical_device = nullptr;
+static unordered_map<VkDevice, unordered_map<uint32_t, unordered_map<uint32_t, VkQueue>>> queue_map;
+static unordered_map<VkDevice, unordered_map<VkBuffer, VkBufferCreateInfo>> buffer_map;
+
+// TODO: Would like to codegen this but limits aren't in XML
+static VkPhysicalDeviceLimits SetLimits(VkPhysicalDeviceLimits *limits) {
+    limits->maxImageDimension1D = 4096;
+    limits->maxImageDimension2D = 4096;
+    limits->maxImageDimension3D = 256;
+    limits->maxImageDimensionCube = 4096;
+    limits->maxImageArrayLayers = 256;
+    limits->maxTexelBufferElements = 65536;
+    limits->maxUniformBufferRange = 16384;
+    limits->maxStorageBufferRange = 134217728;
+    limits->maxPushConstantsSize = 128;
+    limits->maxMemoryAllocationCount = 4096;
+    limits->maxSamplerAllocationCount = 4000;
+    limits->bufferImageGranularity = 1;
+    limits->sparseAddressSpaceSize = 2147483648;
+    limits->maxBoundDescriptorSets = 4;
+    limits->maxPerStageDescriptorSamplers = 16;
+    limits->maxPerStageDescriptorUniformBuffers = 12;
+    limits->maxPerStageDescriptorStorageBuffers = 4;
+    limits->maxPerStageDescriptorSampledImages = 16;
+    limits->maxPerStageDescriptorStorageImages = 4;
+    limits->maxPerStageDescriptorInputAttachments = 4;
+    limits->maxPerStageResources = 128;
+    limits->maxDescriptorSetSamplers = 96;
+    limits->maxDescriptorSetUniformBuffers = 72;
+    limits->maxDescriptorSetUniformBuffersDynamic = 8;
+    limits->maxDescriptorSetStorageBuffers = 24;
+    limits->maxDescriptorSetStorageBuffersDynamic = 4;
+    limits->maxDescriptorSetSampledImages = 96;
+    limits->maxDescriptorSetStorageImages = 24;
+    limits->maxDescriptorSetInputAttachments = 4;
+    limits->maxVertexInputAttributes = 16;
+    limits->maxVertexInputBindings = 16;
+    limits->maxVertexInputAttributeOffset = 2047;
+    limits->maxVertexInputBindingStride = 2048;
+    limits->maxVertexOutputComponents = 64;
+    limits->maxTessellationGenerationLevel = 64;
+    limits->maxTessellationPatchSize = 32;
+    limits->maxTessellationControlPerVertexInputComponents = 64;
+    limits->maxTessellationControlPerVertexOutputComponents = 64;
+    limits->maxTessellationControlPerPatchOutputComponents = 120;
+    limits->maxTessellationControlTotalOutputComponents = 2048;
+    limits->maxTessellationEvaluationInputComponents = 64;
+    limits->maxTessellationEvaluationOutputComponents = 64;
+    limits->maxGeometryShaderInvocations = 32;
+    limits->maxGeometryInputComponents = 64;
+    limits->maxGeometryOutputComponents = 64;
+    limits->maxGeometryOutputVertices = 256;
+    limits->maxGeometryTotalOutputComponents = 1024;
+    limits->maxFragmentInputComponents = 64;
+    limits->maxFragmentOutputAttachments = 4;
+    limits->maxFragmentDualSrcAttachments = 1;
+    limits->maxFragmentCombinedOutputResources = 4;
+    limits->maxComputeSharedMemorySize = 16384;
+    limits->maxComputeWorkGroupCount[0] = 65535;
+    limits->maxComputeWorkGroupCount[1] = 65535;
+    limits->maxComputeWorkGroupCount[2] = 65535;
+    limits->maxComputeWorkGroupInvocations = 128;
+    limits->maxComputeWorkGroupSize[0] = 128;
+    limits->maxComputeWorkGroupSize[1] = 128;
+    limits->maxComputeWorkGroupSize[2] = 64;
+    limits->subPixelPrecisionBits = 4;
+    limits->subTexelPrecisionBits = 4;
+    limits->mipmapPrecisionBits = 4;
+    limits->maxDrawIndexedIndexValue = UINT32_MAX;
+    limits->maxDrawIndirectCount = UINT16_MAX;
+    limits->maxSamplerLodBias = 2.0f;
+    limits->maxSamplerAnisotropy = 16;
+    limits->maxViewports = 16;
+    limits->maxViewportDimensions[0] = 4096;
+    limits->maxViewportDimensions[1] = 4096;
+    limits->viewportBoundsRange[0] = -8192;
+    limits->viewportBoundsRange[1] = 8191;
+    limits->viewportSubPixelBits = 0;
+    limits->minMemoryMapAlignment = 64;
+    limits->minTexelBufferOffsetAlignment = 16;
+    limits->minUniformBufferOffsetAlignment = 16;
+    limits->minStorageBufferOffsetAlignment = 16;
+    limits->minTexelOffset = -8;
+    limits->maxTexelOffset = 7;
+    limits->minTexelGatherOffset = -8;
+    limits->maxTexelGatherOffset = 7;
+    limits->minInterpolationOffset = 0.0f;
+    limits->maxInterpolationOffset = 0.5f;
+    limits->subPixelInterpolationOffsetBits = 4;
+    limits->maxFramebufferWidth = 4096;
+    limits->maxFramebufferHeight = 4096;
+    limits->maxFramebufferLayers = 256;
+    limits->framebufferColorSampleCounts = 0x7F;
+    limits->framebufferDepthSampleCounts = 0x7F;
+    limits->framebufferStencilSampleCounts = 0x7F;
+    limits->framebufferNoAttachmentsSampleCounts = 0x7F;
+    limits->maxColorAttachments = 4;
+    limits->sampledImageColorSampleCounts = 0x7F;
+    limits->sampledImageIntegerSampleCounts = 0x7F;
+    limits->sampledImageDepthSampleCounts = 0x7F;
+    limits->sampledImageStencilSampleCounts = 0x7F;
+    limits->storageImageSampleCounts = 0x7F;
+    limits->maxSampleMaskWords = 1;
+    limits->timestampComputeAndGraphics = VK_TRUE;
+    limits->timestampPeriod = 1;
+    limits->maxClipDistances = 8;
+    limits->maxCullDistances = 8;
+    limits->maxCombinedClipAndCullDistances = 8;
+    limits->discreteQueuePriorities = 2;
+    limits->pointSizeRange[0] = 1.0f;
+    limits->pointSizeRange[1] = 64.0f;
+    limits->lineWidthRange[0] = 1.0f;
+    limits->lineWidthRange[1] = 8.0f;
+    limits->pointSizeGranularity = 1.0f;
+    limits->lineWidthGranularity = 1.0f;
+    limits->strictLines = VK_TRUE;
+    limits->standardSampleLocations = VK_TRUE;
+    limits->optimalBufferCopyOffsetAlignment = 1;
+    limits->optimalBufferCopyRowPitchAlignment = 1;
+    limits->nonCoherentAtomSize = 256;
+
+    return *limits;
+}
+
+void SetBoolArrayTrue(VkBool32* bool_array, uint32_t num_bools)
+{
+    for (uint32_t i = 0; i < num_bools; ++i) {
+        bool_array[i] = VK_TRUE;
+    }
+}
+'''
+
+# Manual code at the end of the cpp source file
+SOURCE_CPP_POSTFIX = '''
+
+static VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL GetPhysicalDeviceProcAddr(VkInstance instance, const char *funcName) {
+    // TODO: This function should only care about physical device functions and return nullptr for other functions
+    const auto &item = name_to_funcptr_map.find(funcName);
+    if (item != name_to_funcptr_map.end()) {
+        return reinterpret_cast<PFN_vkVoidFunction>(item->second);
+    }
+    // Mock should intercept all functions so if we get here just return null
+    return nullptr;
+}
+
+} // namespace vkmock
+
+#if defined(__GNUC__) && __GNUC__ >= 4
+#define EXPORT __attribute__((visibility("default")))
+#elif defined(__SUNPRO_C) && (__SUNPRO_C >= 0x590)
+#define EXPORT __attribute__((visibility("default")))
+#else
+#define EXPORT
+#endif
+
+extern "C" {
+
+EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vk_icdGetInstanceProcAddr(VkInstance instance, const char* pName) {
+    if (!vkmock::negotiate_loader_icd_interface_called) {
+        vkmock::loader_interface_version = 1;
+    }
+    return vkmock::GetInstanceProcAddr(instance, pName);
+}
+
+EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vk_icdGetPhysicalDeviceProcAddr(VkInstance instance, const char* pName) {
+    return vkmock::GetPhysicalDeviceProcAddr(instance, pName);
+}
+
+EXPORT VKAPI_ATTR VkResult VKAPI_CALL vk_icdNegotiateLoaderICDInterfaceVersion(uint32_t* pSupportedVersion) {
+    vkmock::negotiate_loader_icd_interface_called = true;
+    vkmock::loader_interface_version = *pSupportedVersion;
+    if (*pSupportedVersion > vkmock::SUPPORTED_LOADER_ICD_INTERFACE_VERSION) {
+        *pSupportedVersion = vkmock::SUPPORTED_LOADER_ICD_INTERFACE_VERSION;
+    }
+    return VK_SUCCESS;
+}
+
+
+EXPORT VKAPI_ATTR void VKAPI_CALL vkDestroySurfaceKHR(
+    VkInstance                                  instance,
+    VkSurfaceKHR                                surface,
+    const VkAllocationCallbacks*                pAllocator)
+{
+    vkmock::DestroySurfaceKHR(instance, surface, pAllocator);
+}
+
+EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceSupportKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t                                    queueFamilyIndex,
+    VkSurfaceKHR                                surface,
+    VkBool32*                                   pSupported)
+{
+    return vkmock::GetPhysicalDeviceSurfaceSupportKHR(physicalDevice, queueFamilyIndex, surface, pSupported);
+}
+
+EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceCapabilitiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkSurfaceKHR                                surface,
+    VkSurfaceCapabilitiesKHR*                   pSurfaceCapabilities)
+{
+    return vkmock::GetPhysicalDeviceSurfaceCapabilitiesKHR(physicalDevice, surface, pSurfaceCapabilities);
+}
+
+EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceFormatsKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkSurfaceKHR                                surface,
+    uint32_t*                                   pSurfaceFormatCount,
+    VkSurfaceFormatKHR*                         pSurfaceFormats)
+{
+    return vkmock::GetPhysicalDeviceSurfaceFormatsKHR(physicalDevice, surface, pSurfaceFormatCount, pSurfaceFormats);
+}
+
+EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfacePresentModesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkSurfaceKHR                                surface,
+    uint32_t*                                   pPresentModeCount,
+    VkPresentModeKHR*                           pPresentModes)
+{
+    return vkmock::GetPhysicalDeviceSurfacePresentModesKHR(physicalDevice, surface, pPresentModeCount, pPresentModes);
+}
+
+EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateDisplayPlaneSurfaceKHR(
+    VkInstance                                  instance,
+    const VkDisplaySurfaceCreateInfoKHR*        pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface)
+{
+    return vkmock::CreateDisplayPlaneSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
+}
+
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+
+EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateXlibSurfaceKHR(
+    VkInstance                                  instance,
+    const VkXlibSurfaceCreateInfoKHR*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface)
+{
+    return vkmock::CreateXlibSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
+}
+#endif /* VK_USE_PLATFORM_XLIB_KHR */
+
+#ifdef VK_USE_PLATFORM_XCB_KHR
+
+EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateXcbSurfaceKHR(
+    VkInstance                                  instance,
+    const VkXcbSurfaceCreateInfoKHR*            pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface)
+{
+    return vkmock::CreateXcbSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
+}
+#endif /* VK_USE_PLATFORM_XCB_KHR */
+
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+
+EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateWaylandSurfaceKHR(
+    VkInstance                                  instance,
+    const VkWaylandSurfaceCreateInfoKHR*        pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface)
+{
+    return vkmock::CreateWaylandSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
+}
+#endif /* VK_USE_PLATFORM_WAYLAND_KHR */
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+
+EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateAndroidSurfaceKHR(
+    VkInstance                                  instance,
+    const VkAndroidSurfaceCreateInfoKHR*        pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface)
+{
+    return vkmock::CreateAndroidSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
+}
+#endif /* VK_USE_PLATFORM_ANDROID_KHR */
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateWin32SurfaceKHR(
+    VkInstance                                  instance,
+    const VkWin32SurfaceCreateInfoKHR*          pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface)
+{
+    return vkmock::CreateWin32SurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
+}
+#endif /* VK_USE_PLATFORM_WIN32_KHR */
+
+EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkGetDeviceGroupSurfacePresentModesKHR(
+    VkDevice                                    device,
+    VkSurfaceKHR                                surface,
+    VkDeviceGroupPresentModeFlagsKHR*           pModes)
+{
+    return vkmock::GetDeviceGroupSurfacePresentModesKHR(device, surface, pModes);
+}
+
+EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDevicePresentRectanglesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkSurfaceKHR                                surface,
+    uint32_t*                                   pRectCount,
+    VkRect2D*                                   pRects)
+{
+    return vkmock::GetPhysicalDevicePresentRectanglesKHR(physicalDevice, surface, pRectCount, pRects);
+}
+
+#ifdef VK_USE_PLATFORM_VI_NN
+
+EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateViSurfaceNN(
+    VkInstance                                  instance,
+    const VkViSurfaceCreateInfoNN*              pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface)
+{
+    return vkmock::CreateViSurfaceNN(instance, pCreateInfo, pAllocator, pSurface);
+}
+#endif /* VK_USE_PLATFORM_VI_NN */
+
+EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceCapabilities2EXT(
+    VkPhysicalDevice                            physicalDevice,
+    VkSurfaceKHR                                surface,
+    VkSurfaceCapabilities2EXT*                  pSurfaceCapabilities)
+{
+    return vkmock::GetPhysicalDeviceSurfaceCapabilities2EXT(physicalDevice, surface, pSurfaceCapabilities);
+}
+
+#ifdef VK_USE_PLATFORM_IOS_MVK
+
+EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateIOSSurfaceMVK(
+    VkInstance                                  instance,
+    const VkIOSSurfaceCreateInfoMVK*            pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface)
+{
+    return vkmock::CreateIOSSurfaceMVK(instance, pCreateInfo, pAllocator, pSurface);
+}
+#endif /* VK_USE_PLATFORM_IOS_MVK */
+
+#ifdef VK_USE_PLATFORM_MACOS_MVK
+
+EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateMacOSSurfaceMVK(
+    VkInstance                                  instance,
+    const VkMacOSSurfaceCreateInfoMVK*          pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface)
+{
+    return vkmock::CreateMacOSSurfaceMVK(instance, pCreateInfo, pAllocator, pSurface);
+}
+#endif /* VK_USE_PLATFORM_MACOS_MVK */
+
+} // end extern "C"
+
+'''
+
+CUSTOM_C_INTERCEPTS = {
+'vkCreateInstance': '''
+    // TODO: If loader ver <=4 ICD must fail with VK_ERROR_INCOMPATIBLE_DRIVER for all vkCreateInstance calls with
+    //  apiVersion set to > Vulkan 1.0 because the loader is still at interface version <= 4. Otherwise, the
+    //  ICD should behave as normal.
+    if (loader_interface_version <= 4) {
+        return VK_ERROR_INCOMPATIBLE_DRIVER;
+    }
+    *pInstance = (VkInstance)CreateDispObjHandle();
+    // TODO: If emulating specific device caps, will need to add intelligence here
+    return VK_SUCCESS;
+''',
+'vkDestroyInstance': '''
+    // Destroy physical device
+    DestroyDispObjHandle((void*)physical_device);
+
+    DestroyDispObjHandle((void*)instance);
+''',
+'vkEnumeratePhysicalDevices': '''
+    if (pPhysicalDevices) {
+        if (!physical_device) {
+            physical_device = (VkPhysicalDevice)CreateDispObjHandle();
+        }
+        *pPhysicalDevices = physical_device;
+    } else {
+        *pPhysicalDeviceCount = 1;
+    }
+    return VK_SUCCESS;
+''',
+'vkCreateDevice': '''
+    *pDevice = (VkDevice)CreateDispObjHandle();
+    // TODO: If emulating specific device caps, will need to add intelligence here
+    return VK_SUCCESS;
+''',
+'vkDestroyDevice': '''
+    unique_lock_t lock(global_lock);
+    // First destroy sub-device objects
+    // Destroy Queues
+    for (auto dev_queue_map_pair : queue_map) {
+        for (auto queue_family_map_pair : queue_map[dev_queue_map_pair.first]) {
+            for (auto index_queue_pair : queue_map[dev_queue_map_pair.first][queue_family_map_pair.first]) {
+                DestroyDispObjHandle((void*)index_queue_pair.second);
+            }
+        }
+    }
+    queue_map.clear();
+    // Now destroy device
+    DestroyDispObjHandle((void*)device);
+    // TODO: If emulating specific device caps, will need to add intelligence here
+''',
+'vkGetDeviceQueue': '''
+    unique_lock_t lock(global_lock);
+    auto queue = queue_map[device][queueFamilyIndex][queueIndex];
+    if (queue) {
+        *pQueue = queue;
+    } else {
+        *pQueue = queue_map[device][queueFamilyIndex][queueIndex] = (VkQueue)CreateDispObjHandle();
+    }
+    // TODO: If emulating specific device caps, will need to add intelligence here
+    return;
+''',
+'vkGetDeviceQueue2': '''
+    GetDeviceQueue(device, pQueueInfo->queueFamilyIndex, pQueueInfo->queueIndex, pQueue);
+    // TODO: Add further support for GetDeviceQueue2 features
+''',
+'vkEnumerateInstanceLayerProperties': '''
+    return VK_SUCCESS;
+''',
+'vkEnumerateDeviceLayerProperties': '''
+    return VK_SUCCESS;
+''',
+'vkEnumerateInstanceExtensionProperties': '''
+    // If requesting number of extensions, return that
+    if (!pLayerName) {
+        if (!pProperties) {
+            *pPropertyCount = (uint32_t)instance_extension_map.size();
+        } else {
+            uint32_t i = 0;
+            for (const auto &name_ver_pair : instance_extension_map) {
+                if (i == *pPropertyCount) {
+                    break;
+                }
+                std::strncpy(pProperties[i].extensionName, name_ver_pair.first.c_str(), sizeof(pProperties[i].extensionName));
+                pProperties[i].extensionName[sizeof(pProperties[i].extensionName) - 1] = 0;
+                pProperties[i].specVersion = name_ver_pair.second;
+                ++i;
+            }
+            if (i != instance_extension_map.size()) {
+                return VK_INCOMPLETE;
+            }
+        }
+    }
+    // If requesting extension properties, fill in data struct for number of extensions
+    return VK_SUCCESS;
+''',
+'vkEnumerateDeviceExtensionProperties': '''
+    // If requesting number of extensions, return that
+    if (!pLayerName) {
+        if (!pProperties) {
+            *pPropertyCount = (uint32_t)device_extension_map.size();
+        } else {
+            uint32_t i = 0;
+            for (const auto &name_ver_pair : device_extension_map) {
+                if (i == *pPropertyCount) {
+                    break;
+                }
+                std::strncpy(pProperties[i].extensionName, name_ver_pair.first.c_str(), sizeof(pProperties[i].extensionName));
+                pProperties[i].extensionName[sizeof(pProperties[i].extensionName) - 1] = 0;
+                pProperties[i].specVersion = name_ver_pair.second;
+                ++i;
+            }
+            if (i != device_extension_map.size()) {
+                return VK_INCOMPLETE;
+            }
+        }
+    }
+    // If requesting extension properties, fill in data struct for number of extensions
+    return VK_SUCCESS;
+''',
+'vkGetPhysicalDeviceSurfacePresentModesKHR': '''
+    // Currently always say that all present modes are supported
+    if (!pPresentModes) {
+        *pPresentModeCount = 6;
+    } else {
+        // Intentionally falling through and just filling however many modes are requested
+        switch(*pPresentModeCount) {
+        case 6:
+            pPresentModes[5] = VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR;
+            // fall through
+        case 5:
+            pPresentModes[4] = VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR;
+            // fall through
+        case 4:
+            pPresentModes[3] = VK_PRESENT_MODE_FIFO_RELAXED_KHR;
+            // fall through
+        case 3:
+            pPresentModes[2] = VK_PRESENT_MODE_FIFO_KHR;
+            // fall through
+        case 2:
+            pPresentModes[1] = VK_PRESENT_MODE_MAILBOX_KHR;
+            // fall through
+        default:
+            pPresentModes[0] = VK_PRESENT_MODE_IMMEDIATE_KHR;
+            break;
+        }
+    }
+    return VK_SUCCESS;
+''',
+'vkGetPhysicalDeviceSurfaceFormatsKHR': '''
+    // Currently always say that RGBA8 & BGRA8 are supported
+    if (!pSurfaceFormats) {
+        *pSurfaceFormatCount = 2;
+    } else {
+        // Intentionally falling through and just filling however many types are requested
+        switch(*pSurfaceFormatCount) {
+        case 2:
+            pSurfaceFormats[1].format = VK_FORMAT_R8G8B8A8_UNORM;
+            pSurfaceFormats[1].colorSpace = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR;
+            // fall through
+        default:
+            pSurfaceFormats[0].format = VK_FORMAT_B8G8R8A8_UNORM;
+            pSurfaceFormats[0].colorSpace = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR;
+            break;
+        }
+    }
+    return VK_SUCCESS;
+''',
+'vkGetPhysicalDeviceSurfaceFormats2KHR': '''
+    // Currently always say that RGBA8 & BGRA8 are supported
+    if (!pSurfaceFormats) {
+        *pSurfaceFormatCount = 2;
+    } else {
+        // Intentionally falling through and just filling however many types are requested
+        switch(*pSurfaceFormatCount) {
+        case 2:
+            pSurfaceFormats[1].pNext = nullptr;
+            pSurfaceFormats[1].surfaceFormat.format = VK_FORMAT_R8G8B8A8_UNORM;
+            pSurfaceFormats[1].surfaceFormat.colorSpace = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR;
+            // fall through
+        default:
+            pSurfaceFormats[1].pNext = nullptr;
+            pSurfaceFormats[0].surfaceFormat.format = VK_FORMAT_B8G8R8A8_UNORM;
+            pSurfaceFormats[0].surfaceFormat.colorSpace = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR;
+            break;
+        }
+    }
+    return VK_SUCCESS;
+''',
+'vkGetPhysicalDeviceSurfaceSupportKHR': '''
+    // Currently say that all surface/queue combos are supported
+    *pSupported = VK_TRUE;
+    return VK_SUCCESS;
+''',
+'vkGetPhysicalDeviceSurfaceCapabilitiesKHR': '''
+    // In general just say max supported is available for requested surface
+    pSurfaceCapabilities->minImageCount = 1;
+    pSurfaceCapabilities->maxImageCount = 0;
+    pSurfaceCapabilities->currentExtent.width = 0xFFFFFFFF;
+    pSurfaceCapabilities->currentExtent.height = 0xFFFFFFFF;
+    pSurfaceCapabilities->minImageExtent.width = 1;
+    pSurfaceCapabilities->minImageExtent.height = 1;
+    pSurfaceCapabilities->maxImageExtent.width = 3840;
+    pSurfaceCapabilities->maxImageExtent.height = 2160;
+    pSurfaceCapabilities->maxImageArrayLayers = 128;
+    pSurfaceCapabilities->supportedTransforms = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR |
+                                                VK_SURFACE_TRANSFORM_ROTATE_90_BIT_KHR |
+                                                VK_SURFACE_TRANSFORM_ROTATE_180_BIT_KHR |
+                                                VK_SURFACE_TRANSFORM_ROTATE_270_BIT_KHR |
+                                                VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_BIT_KHR |
+                                                VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_90_BIT_KHR |
+                                                VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_180_BIT_KHR |
+                                                VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_270_BIT_KHR |
+                                                VK_SURFACE_TRANSFORM_INHERIT_BIT_KHR;
+    pSurfaceCapabilities->currentTransform = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR;
+    pSurfaceCapabilities->supportedCompositeAlpha = VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR |
+                                                    VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR |
+                                                    VK_COMPOSITE_ALPHA_POST_MULTIPLIED_BIT_KHR |
+                                                    VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR;
+    pSurfaceCapabilities->supportedUsageFlags = VK_IMAGE_USAGE_TRANSFER_SRC_BIT |
+                                                VK_IMAGE_USAGE_TRANSFER_DST_BIT |
+                                                VK_IMAGE_USAGE_SAMPLED_BIT |
+                                                VK_IMAGE_USAGE_STORAGE_BIT |
+                                                VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT |
+                                                VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT |
+                                                VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT |
+                                                VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT;
+    return VK_SUCCESS;
+''',
+'vkGetPhysicalDeviceSurfaceCapabilities2KHR': '''
+    GetPhysicalDeviceSurfaceCapabilitiesKHR(physicalDevice, pSurfaceInfo->surface, &pSurfaceCapabilities->surfaceCapabilities);
+    return VK_SUCCESS;
+''',
+'vkGetInstanceProcAddr': '''
+    if (!negotiate_loader_icd_interface_called) {
+        loader_interface_version = 0;
+    }
+    const auto &item = name_to_funcptr_map.find(pName);
+    if (item != name_to_funcptr_map.end()) {
+        return reinterpret_cast<PFN_vkVoidFunction>(item->second);
+    }
+    // Mock should intercept all functions so if we get here just return null
+    return nullptr;
+''',
+'vkGetDeviceProcAddr': '''
+    return GetInstanceProcAddr(nullptr, pName);
+''',
+'vkGetPhysicalDeviceMemoryProperties': '''
+    pMemoryProperties->memoryTypeCount = 2;
+    pMemoryProperties->memoryTypes[0].propertyFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
+    pMemoryProperties->memoryTypes[0].heapIndex = 0;
+    pMemoryProperties->memoryTypes[1].propertyFlags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT | VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
+    pMemoryProperties->memoryTypes[1].heapIndex = 1;
+    pMemoryProperties->memoryHeapCount = 2;
+    pMemoryProperties->memoryHeaps[0].flags = 0;
+    pMemoryProperties->memoryHeaps[0].size = 8000000000;
+    pMemoryProperties->memoryHeaps[1].flags = VK_MEMORY_HEAP_DEVICE_LOCAL_BIT;
+    pMemoryProperties->memoryHeaps[1].size = 8000000000;
+''',
+'vkGetPhysicalDeviceMemoryProperties2KHR': '''
+    GetPhysicalDeviceMemoryProperties(physicalDevice, &pMemoryProperties->memoryProperties);
+''',
+'vkGetPhysicalDeviceQueueFamilyProperties': '''
+    if (!pQueueFamilyProperties) {
+        *pQueueFamilyPropertyCount = 1;
+    } else {
+        if (*pQueueFamilyPropertyCount) {
+            pQueueFamilyProperties[0].queueFlags = VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT | VK_QUEUE_TRANSFER_BIT | VK_QUEUE_SPARSE_BINDING_BIT;
+            pQueueFamilyProperties[0].queueCount = 1;
+            pQueueFamilyProperties[0].timestampValidBits = 0;
+            pQueueFamilyProperties[0].minImageTransferGranularity = {1,1,1};
+        }
+    }
+''',
+'vkGetPhysicalDeviceQueueFamilyProperties2KHR': '''
+    if (pQueueFamilyPropertyCount && pQueueFamilyProperties) {
+        GetPhysicalDeviceQueueFamilyProperties(physicalDevice, pQueueFamilyPropertyCount, &pQueueFamilyProperties->queueFamilyProperties);
+    } else {
+        GetPhysicalDeviceQueueFamilyProperties(physicalDevice, pQueueFamilyPropertyCount, nullptr);
+    }
+''',
+'vkGetPhysicalDeviceFeatures': '''
+    uint32_t num_bools = sizeof(VkPhysicalDeviceFeatures) / sizeof(VkBool32);
+    VkBool32 *bool_array = &pFeatures->robustBufferAccess;
+    SetBoolArrayTrue(bool_array, num_bools);
+''',
+'vkGetPhysicalDeviceFeatures2KHR': '''
+    GetPhysicalDeviceFeatures(physicalDevice, &pFeatures->features);
+    uint32_t num_bools = 0; // Count number of VkBool32s in extension structs
+    VkBool32* feat_bools = nullptr;
+    const auto *desc_idx_features = lvl_find_in_chain<VkPhysicalDeviceDescriptorIndexingFeaturesEXT>(pFeatures->pNext);
+    if (desc_idx_features) {
+        const auto bool_size = sizeof(VkPhysicalDeviceDescriptorIndexingFeaturesEXT) - offsetof(VkPhysicalDeviceDescriptorIndexingFeaturesEXT, shaderInputAttachmentArrayDynamicIndexing);
+        num_bools = bool_size/sizeof(VkBool32);
+        feat_bools = (VkBool32*)&desc_idx_features->shaderInputAttachmentArrayDynamicIndexing;
+        SetBoolArrayTrue(feat_bools, num_bools);
+    }
+    const auto *blendop_features = lvl_find_in_chain<VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT>(pFeatures->pNext);
+    if (blendop_features) {
+        const auto bool_size = sizeof(VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT) - offsetof(VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT, advancedBlendCoherentOperations);
+        num_bools = bool_size/sizeof(VkBool32);
+        feat_bools = (VkBool32*)&blendop_features->advancedBlendCoherentOperations;
+        SetBoolArrayTrue(feat_bools, num_bools);
+    }
+''',
+'vkGetPhysicalDeviceFormatProperties': '''
+    if (VK_FORMAT_UNDEFINED == format) {
+        *pFormatProperties = { 0x0, 0x0, 0x0 };
+    } else {
+        // TODO: Just returning full support for everything initially
+        *pFormatProperties = { 0x00FFFFFF, 0x00FFFFFF, 0x00FFFFFF };
+    }
+''',
+'vkGetPhysicalDeviceFormatProperties2KHR': '''
+    GetPhysicalDeviceFormatProperties(physicalDevice, format, &pFormatProperties->formatProperties);
+''',
+'vkGetPhysicalDeviceImageFormatProperties': '''
+    // A hardcoded unsupported format
+    if (format == VK_FORMAT_E5B9G9R9_UFLOAT_PACK32) {
+        return VK_ERROR_FORMAT_NOT_SUPPORTED;
+    }
+
+    // TODO: Just hard-coding some values for now
+    // TODO: If tiling is linear, limit the mips, levels, & sample count
+    if (VK_IMAGE_TILING_LINEAR == tiling) {
+        *pImageFormatProperties = { { 4096, 4096, 256 }, 1, 1, VK_SAMPLE_COUNT_1_BIT, 4294967296 };
+    } else {
+        // We hard-code support for all sample counts except 64 bits.
+        *pImageFormatProperties = { { 4096, 4096, 256 }, 12, 256, 0x7F & ~VK_SAMPLE_COUNT_64_BIT, 4294967296 };
+    }
+    return VK_SUCCESS;
+''',
+'vkGetPhysicalDeviceImageFormatProperties2KHR': '''
+    GetPhysicalDeviceImageFormatProperties(physicalDevice, pImageFormatInfo->format, pImageFormatInfo->type, pImageFormatInfo->tiling, pImageFormatInfo->usage, pImageFormatInfo->flags, &pImageFormatProperties->imageFormatProperties);
+    return VK_SUCCESS;
+''',
+'vkGetPhysicalDeviceProperties': '''
+    // TODO: Just hard-coding some values for now
+    pProperties->apiVersion = VK_API_VERSION_1_0;
+    pProperties->driverVersion = 1;
+    pProperties->vendorID = 0xba5eba11;
+    pProperties->deviceID = 0xf005ba11;
+    pProperties->deviceType = VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU;
+    //std::string devName = "Vulkan Mock Device";
+    strcpy(pProperties->deviceName, "Vulkan Mock Device");
+    pProperties->pipelineCacheUUID[0] = 18;
+    pProperties->limits = SetLimits(&pProperties->limits);
+    pProperties->sparseProperties = { VK_TRUE, VK_TRUE, VK_TRUE, VK_TRUE, VK_TRUE };
+''',
+'vkGetPhysicalDeviceProperties2KHR': '''
+    GetPhysicalDeviceProperties(physicalDevice, &pProperties->properties);
+    const auto *desc_idx_props = lvl_find_in_chain<VkPhysicalDeviceDescriptorIndexingPropertiesEXT>(pProperties->pNext);
+    if (desc_idx_props) {
+        VkPhysicalDeviceDescriptorIndexingPropertiesEXT* write_props = (VkPhysicalDeviceDescriptorIndexingPropertiesEXT*)desc_idx_props;
+        write_props->maxUpdateAfterBindDescriptorsInAllPools = 500000;
+        write_props->shaderUniformBufferArrayNonUniformIndexingNative = false;
+        write_props->shaderSampledImageArrayNonUniformIndexingNative = false;
+        write_props->shaderStorageBufferArrayNonUniformIndexingNative = false;
+        write_props->shaderStorageImageArrayNonUniformIndexingNative = false;
+        write_props->shaderInputAttachmentArrayNonUniformIndexingNative = false;
+        write_props->robustBufferAccessUpdateAfterBind = true;
+        write_props->quadDivergentImplicitLod = true;
+        write_props->maxPerStageDescriptorUpdateAfterBindSamplers = 500000;
+        write_props->maxPerStageDescriptorUpdateAfterBindUniformBuffers = 500000;
+        write_props->maxPerStageDescriptorUpdateAfterBindStorageBuffers = 500000;
+        write_props->maxPerStageDescriptorUpdateAfterBindSampledImages = 500000;
+        write_props->maxPerStageDescriptorUpdateAfterBindStorageImages = 500000;
+        write_props->maxPerStageDescriptorUpdateAfterBindInputAttachments = 500000;
+        write_props->maxPerStageUpdateAfterBindResources = 500000;
+        write_props->maxDescriptorSetUpdateAfterBindSamplers = 500000;
+        write_props->maxDescriptorSetUpdateAfterBindUniformBuffers = 96;
+        write_props->maxDescriptorSetUpdateAfterBindUniformBuffersDynamic = 8;
+        write_props->maxDescriptorSetUpdateAfterBindStorageBuffers = 500000;
+        write_props->maxDescriptorSetUpdateAfterBindStorageBuffersDynamic = 4;
+        write_props->maxDescriptorSetUpdateAfterBindSampledImages = 500000;
+        write_props->maxDescriptorSetUpdateAfterBindStorageImages = 500000;
+        write_props->maxDescriptorSetUpdateAfterBindInputAttachments = 500000;
+    }
+
+    const auto *push_descriptor_props = lvl_find_in_chain<VkPhysicalDevicePushDescriptorPropertiesKHR>(pProperties->pNext);
+    if (push_descriptor_props) {
+        VkPhysicalDevicePushDescriptorPropertiesKHR* write_props = (VkPhysicalDevicePushDescriptorPropertiesKHR*)push_descriptor_props;
+        write_props->maxPushDescriptors = 256;
+    }
+
+    const auto *depth_stencil_resolve_props = lvl_find_in_chain<VkPhysicalDeviceDepthStencilResolvePropertiesKHR>(pProperties->pNext);
+    if (depth_stencil_resolve_props) {
+        VkPhysicalDeviceDepthStencilResolvePropertiesKHR* write_props = (VkPhysicalDeviceDepthStencilResolvePropertiesKHR*)depth_stencil_resolve_props;
+        write_props->supportedDepthResolveModes = VK_RESOLVE_MODE_SAMPLE_ZERO_BIT_KHR;
+        write_props->supportedStencilResolveModes = VK_RESOLVE_MODE_SAMPLE_ZERO_BIT_KHR;
+    }
+''',
+'vkGetPhysicalDeviceExternalSemaphoreProperties':'''
+    // Hard code support for all handle types and features
+    pExternalSemaphoreProperties->exportFromImportedHandleTypes = 0x1F;
+    pExternalSemaphoreProperties->compatibleHandleTypes = 0x1F;
+    pExternalSemaphoreProperties->externalSemaphoreFeatures = 0x3;
+''',
+'vkGetPhysicalDeviceExternalSemaphorePropertiesKHR':'''
+    GetPhysicalDeviceExternalSemaphoreProperties(physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties);
+''',
+'vkGetPhysicalDeviceExternalFenceProperties':'''
+    // Hard-code support for all handle types and features
+    pExternalFenceProperties->exportFromImportedHandleTypes = 0xF;
+    pExternalFenceProperties->compatibleHandleTypes = 0xF;
+    pExternalFenceProperties->externalFenceFeatures = 0x3;
+''',
+'vkGetPhysicalDeviceExternalFencePropertiesKHR':'''
+    GetPhysicalDeviceExternalFenceProperties(physicalDevice, pExternalFenceInfo, pExternalFenceProperties);
+''',
+'vkGetPhysicalDeviceExternalBufferProperties':'''
+    // Hard-code support for all handle types and features
+    pExternalBufferProperties->externalMemoryProperties.externalMemoryFeatures = 0x7;
+    pExternalBufferProperties->externalMemoryProperties.exportFromImportedHandleTypes = 0x1FF;
+    pExternalBufferProperties->externalMemoryProperties.compatibleHandleTypes = 0x1FF;
+''',
+'vkGetPhysicalDeviceExternalBufferPropertiesKHR':'''
+    GetPhysicalDeviceExternalBufferProperties(physicalDevice, pExternalBufferInfo, pExternalBufferProperties);
+''',
+'vkGetBufferMemoryRequirements': '''
+    // TODO: Just hard-coding reqs for now
+    pMemoryRequirements->size = 4096;
+    pMemoryRequirements->alignment = 1;
+    pMemoryRequirements->memoryTypeBits = 0xFFFF;
+    // Return a better size based on the buffer size from the create info.
+    auto d_iter = buffer_map.find(device);
+    if (d_iter != buffer_map.end()) {
+        auto iter = d_iter->second.find(buffer);
+        if (iter != d_iter->second.end()) {
+            pMemoryRequirements->size = ((iter->second.size + 4095) / 4096) * 4096;
+        }
+    }
+''',
+'vkGetBufferMemoryRequirements2KHR': '''
+    GetBufferMemoryRequirements(device, pInfo->buffer, &pMemoryRequirements->memoryRequirements);
+''',
+'vkGetImageMemoryRequirements': '''
+    // TODO: Just hard-coding reqs for now
+    pMemoryRequirements->size = 4096;
+    pMemoryRequirements->alignment = 1;
+
+    // Here we hard-code that the memory type at index 3 doesn't support this image.
+    pMemoryRequirements->memoryTypeBits = 0xFFFF & ~(0x1 << 3);
+''',
+'vkGetImageMemoryRequirements2KHR': '''
+    GetImageMemoryRequirements(device, pInfo->image, &pMemoryRequirements->memoryRequirements);
+''',
+'vkMapMemory': '''
+    unique_lock_t lock(global_lock);
+    // TODO: Just hard-coding 64k whole size for now
+    if (VK_WHOLE_SIZE == size)
+        size = 0x10000;
+    void* map_addr = malloc((size_t)size);
+    mapped_memory_map[memory].push_back(map_addr);
+    *ppData = map_addr;
+    return VK_SUCCESS;
+''',
+'vkUnmapMemory': '''
+    unique_lock_t lock(global_lock);
+    for (auto map_addr : mapped_memory_map[memory]) {
+        free(map_addr);
+    }
+    mapped_memory_map.erase(memory);
+''',
+'vkGetImageSubresourceLayout': '''
+    // Need safe values. Callers are computing memory offsets from pLayout, with no return code to flag failure. 
+    *pLayout = VkSubresourceLayout(); // Default constructor zero values.
+''',
+'vkGetSwapchainImagesKHR': '''
+    if (!pSwapchainImages) {
+        *pSwapchainImageCount = 1;
+    } else if (*pSwapchainImageCount > 0) {
+        pSwapchainImages[0] = (VkImage)global_unique_handle++;
+        if (*pSwapchainImageCount != 1) {
+            return VK_INCOMPLETE;
+        }
+    }
+    return VK_SUCCESS;
+''',
+'vkAcquireNextImagesKHR': '''
+    *pImageIndex = 0;
+    return VK_SUCCESS;
+''',
+'vkCreateBuffer': '''
+    unique_lock_t lock(global_lock);
+    *pBuffer = (VkBuffer)global_unique_handle++;
+    buffer_map[device][*pBuffer] = *pCreateInfo;
+    return VK_SUCCESS;
+''',
+'vkDestroyBuffer': '''
+    unique_lock_t lock(global_lock);
+    buffer_map[device].erase(buffer);
+''',
+}
+
+# MockICDGeneratorOptions - subclass of GeneratorOptions.
+#
+# Adds options used by MockICDOutputGenerator objects during Mock
+# ICD generation.
+#
+# Additional members
+#   prefixText - list of strings to prefix generated header with
+#     (usually a copyright statement + calling convention macros).
+#   protectFile - True if multiple inclusion protection should be
+#     generated (based on the filename) around the entire header.
+#   protectFeature - True if #ifndef..#endif protection should be
+#     generated around a feature interface in the header file.
+#   genFuncPointers - True if function pointer typedefs should be
+#     generated
+#   protectProto - If conditional protection should be generated
+#     around prototype declarations, set to either '#ifdef'
+#     to require opt-in (#ifdef protectProtoStr) or '#ifndef'
+#     to require opt-out (#ifndef protectProtoStr). Otherwise
+#     set to None.
+#   protectProtoStr - #ifdef/#ifndef symbol to use around prototype
+#     declarations, if protectProto is set
+#   apicall - string to use for the function declaration prefix,
+#     such as APICALL on Windows.
+#   apientry - string to use for the calling convention macro,
+#     in typedefs, such as APIENTRY.
+#   apientryp - string to use for the calling convention macro
+#     in function pointer typedefs, such as APIENTRYP.
+#   indentFuncProto - True if prototype declarations should put each
+#     parameter on a separate line
+#   indentFuncPointer - True if typedefed function pointers should put each
+#     parameter on a separate line
+#   alignFuncParam - if nonzero and parameters are being put on a
+#     separate line, align parameter names at the specified column
+class MockICDGeneratorOptions(GeneratorOptions):
+    def __init__(self,
+                 conventions = None,
+                 filename = None,
+                 directory = '.',
+                 apiname = None,
+                 profile = None,
+                 versions = '.*',
+                 emitversions = '.*',
+                 defaultExtensions = None,
+                 addExtensions = None,
+                 removeExtensions = None,
+                 emitExtensions = None,
+                 sortProcedure = regSortFeatures,
+                 prefixText = "",
+                 genFuncPointers = True,
+                 protectFile = True,
+                 protectFeature = True,
+                 protectProto = None,
+                 protectProtoStr = None,
+                 apicall = '',
+                 apientry = '',
+                 apientryp = '',
+                 indentFuncProto = True,
+                 indentFuncPointer = False,
+                 alignFuncParam = 0,
+                 expandEnumerants = True,
+                 helper_file_type = ''):
+        GeneratorOptions.__init__(self, conventions, filename, directory, apiname, profile,
+                                  versions, emitversions, defaultExtensions,
+                                  addExtensions, removeExtensions, emitExtensions, sortProcedure)
+        self.prefixText      = prefixText
+        self.genFuncPointers = genFuncPointers
+        self.protectFile     = protectFile
+        self.protectFeature  = protectFeature
+        self.protectProto    = protectProto
+        self.protectProtoStr = protectProtoStr
+        self.apicall         = apicall
+        self.apientry        = apientry
+        self.apientryp       = apientryp
+        self.indentFuncProto = indentFuncProto
+        self.indentFuncPointer = indentFuncPointer
+        self.alignFuncParam  = alignFuncParam
+
+# MockICDOutputGenerator - subclass of OutputGenerator.
+# Generates a mock vulkan ICD.
+#  This is intended to be a minimal replacement for a vulkan device in order
+#  to enable Vulkan Validation testing.
+#
+# ---- methods ----
+# MockOutputGenerator(errFile, warnFile, diagFile) - args as for
+#   OutputGenerator. Defines additional internal state.
+# ---- methods overriding base class ----
+# beginFile(genOpts)
+# endFile()
+# beginFeature(interface, emit)
+# endFeature()
+# genType(typeinfo,name)
+# genStruct(typeinfo,name)
+# genGroup(groupinfo,name)
+# genEnum(enuminfo, name)
+# genCmd(cmdinfo)
+class MockICDOutputGenerator(OutputGenerator):
+    """Generate specified API interfaces in a specific style, such as a C header"""
+    # This is an ordered list of sections in the header file.
+    TYPE_SECTIONS = ['include', 'define', 'basetype', 'handle', 'enum',
+                     'group', 'bitmask', 'funcpointer', 'struct']
+    ALL_SECTIONS = TYPE_SECTIONS + ['command']
+    def __init__(self,
+                 errFile = sys.stderr,
+                 warnFile = sys.stderr,
+                 diagFile = sys.stdout):
+        OutputGenerator.__init__(self, errFile, warnFile, diagFile)
+        # Internal state - accumulators for different inner block text
+        self.sections = dict([(section, []) for section in self.ALL_SECTIONS])
+        self.intercepts = []
+
+    # Check if the parameter passed in is a pointer to an array
+    def paramIsArray(self, param):
+        return param.attrib.get('len') is not None
+
+    # Check if the parameter passed in is a pointer
+    def paramIsPointer(self, param):
+        ispointer = False
+        for elem in param:
+            if ((elem.tag != 'type') and (elem.tail is not None)) and '*' in elem.tail:
+                ispointer = True
+        return ispointer
+
+    # Check if an object is a non-dispatchable handle
+    def isHandleTypeNonDispatchable(self, handletype):
+        handle = self.registry.tree.find("types/type/[name='" + handletype + "'][@category='handle']")
+        if handle is not None and handle.find('type').text == 'VK_DEFINE_NON_DISPATCHABLE_HANDLE':
+            return True
+        else:
+            return False
+
+    # Check if an object is a dispatchable handle
+    def isHandleTypeDispatchable(self, handletype):
+        handle = self.registry.tree.find("types/type/[name='" + handletype + "'][@category='handle']")
+        if handle is not None and handle.find('type').text == 'VK_DEFINE_HANDLE':
+            return True
+        else:
+            return False
+
+    def beginFile(self, genOpts):
+        OutputGenerator.beginFile(self, genOpts)
+        # C-specific
+        #
+        # Multiple inclusion protection & C++ namespace.
+        self.header = False
+        if (genOpts.protectFile and self.genOpts.filename and 'h' == self.genOpts.filename[-1]):
+            self.header = True
+            headerSym = '__' + re.sub(r'\.h', '_h_', os.path.basename(self.genOpts.filename))
+            write('#ifndef', headerSym, file=self.outFile)
+            write('#define', headerSym, '1', file=self.outFile)
+            self.newline()
+        #
+        # User-supplied prefix text, if any (list of strings)
+        if (genOpts.prefixText):
+            for s in genOpts.prefixText:
+                write(s, file=self.outFile)
+        if self.header:
+            write('#include <unordered_map>', file=self.outFile)
+            write('#include <mutex>', file=self.outFile)
+            write('#include <string>', file=self.outFile)
+            write('#include <cstring>', file=self.outFile)
+            write('#include "vulkan/vk_icd.h"', file=self.outFile)
+        else:
+            write('#include "mock_icd.h"', file=self.outFile)
+            write('#include <stdlib.h>', file=self.outFile)
+            write('#include <vector>', file=self.outFile)
+            write('#include "vk_typemap_helper.h"', file=self.outFile)
+
+        write('namespace vkmock {', file=self.outFile)
+        if self.header:
+            self.newline()
+            write(HEADER_C_CODE, file=self.outFile)
+            # Include all of the extensions in ICD except specific ignored ones
+            device_exts = []
+            instance_exts = []
+            # Ignore extensions that ICDs should not implement or are not safe to report
+            ignore_exts = ['VK_EXT_validation_cache']
+            for ext in self.registry.tree.findall("extensions/extension"):
+                if ext.attrib['supported'] != 'disabled': # Only include enabled extensions
+                    if (ext.attrib['name'] in ignore_exts):
+                        pass
+                    elif (ext.attrib.get('type') and 'instance' == ext.attrib['type']):
+                        instance_exts.append('    {"%s", %s},' % (ext.attrib['name'], ext[0][0].attrib['value']))
+                    else:
+                        device_exts.append('    {"%s", %s},' % (ext.attrib['name'], ext[0][0].attrib['value']))
+            write('// Map of instance extension name to version', file=self.outFile)
+            write('static const std::unordered_map<std::string, uint32_t> instance_extension_map = {', file=self.outFile)
+            write('\n'.join(instance_exts), file=self.outFile)
+            write('};', file=self.outFile)
+            write('// Map of device extension name to version', file=self.outFile)
+            write('static const std::unordered_map<std::string, uint32_t> device_extension_map = {', file=self.outFile)
+            write('\n'.join(device_exts), file=self.outFile)
+            write('};', file=self.outFile)
+
+        else:
+            self.newline()
+            write(SOURCE_CPP_PREFIX, file=self.outFile)
+
+    def endFile(self):
+        # C-specific
+        # Finish C++ namespace and multiple inclusion protection
+        self.newline()
+        if self.header:
+            # record intercepted procedures
+            write('// Map of all APIs to be intercepted by this layer', file=self.outFile)
+            write('static const std::unordered_map<std::string, void*> name_to_funcptr_map = {', file=self.outFile)
+            write('\n'.join(self.intercepts), file=self.outFile)
+            write('};\n', file=self.outFile)
+            self.newline()
+            write('} // namespace vkmock', file=self.outFile)
+            self.newline()
+            write('#endif', file=self.outFile)
+        else: # Loader-layer-interface, need to implement global interface functions
+            write(SOURCE_CPP_POSTFIX, file=self.outFile)
+        # Finish processing in superclass
+        OutputGenerator.endFile(self)
+    def beginFeature(self, interface, emit):
+        #write('// starting beginFeature', file=self.outFile)
+        # Start processing in superclass
+        OutputGenerator.beginFeature(self, interface, emit)
+        self.featureExtraProtect = GetFeatureProtect(interface)
+        # C-specific
+        # Accumulate includes, defines, types, enums, function pointer typedefs,
+        # end function prototypes separately for this feature. They're only
+        # printed in endFeature().
+        self.sections = dict([(section, []) for section in self.ALL_SECTIONS])
+        #write('// ending beginFeature', file=self.outFile)
+    def endFeature(self):
+        # C-specific
+        # Actually write the interface to the output file.
+        #write('// starting endFeature', file=self.outFile)
+        if (self.emit):
+            self.newline()
+            if (self.genOpts.protectFeature):
+                write('#ifndef', self.featureName, file=self.outFile)
+            # If type declarations are needed by other features based on
+            # this one, it may be necessary to suppress the ExtraProtect,
+            # or move it below the 'for section...' loop.
+            #write('// endFeature looking at self.featureExtraProtect', file=self.outFile)
+            if (self.featureExtraProtect != None):
+                write('#ifdef', self.featureExtraProtect, file=self.outFile)
+            #write('#define', self.featureName, '1', file=self.outFile)
+            for section in self.TYPE_SECTIONS:
+                #write('// endFeature writing section'+section, file=self.outFile)
+                contents = self.sections[section]
+                if contents:
+                    write('\n'.join(contents), file=self.outFile)
+                    self.newline()
+            #write('// endFeature looking at self.sections[command]', file=self.outFile)
+            if (self.sections['command']):
+                write('\n'.join(self.sections['command']), end=u'', file=self.outFile)
+                self.newline()
+            if (self.featureExtraProtect != None):
+                write('#endif /*', self.featureExtraProtect, '*/', file=self.outFile)
+            if (self.genOpts.protectFeature):
+                write('#endif /*', self.featureName, '*/', file=self.outFile)
+        # Finish processing in superclass
+        OutputGenerator.endFeature(self)
+        #write('// ending endFeature', file=self.outFile)
+    #
+    # Append a definition to the specified section
+    def appendSection(self, section, text):
+        # self.sections[section].append('SECTION: ' + section + '\n')
+        self.sections[section].append(text)
+    #
+    # Type generation
+    def genType(self, typeinfo, name, alias):
+        pass
+    #
+    # Struct (e.g. C "struct" type) generation.
+    # This is a special case of the <type> tag where the contents are
+    # interpreted as a set of <member> tags instead of freeform C
+    # C type declarations. The <member> tags are just like <param>
+    # tags - they are a declaration of a struct or union member.
+    # Only simple member declarations are supported (no nested
+    # structs etc.)
+    def genStruct(self, typeinfo, typeName, alias):
+        OutputGenerator.genStruct(self, typeinfo, typeName, alias)
+        body = 'typedef ' + typeinfo.elem.get('category') + ' ' + typeName + ' {\n'
+        # paramdecl = self.makeCParamDecl(typeinfo.elem, self.genOpts.alignFuncParam)
+        for member in typeinfo.elem.findall('.//member'):
+            body += self.makeCParamDecl(member, self.genOpts.alignFuncParam)
+            body += ';\n'
+        body += '} ' + typeName + ';\n'
+        self.appendSection('struct', body)
+    #
+    # Group (e.g. C "enum" type) generation.
+    # These are concatenated together with other types.
+    def genGroup(self, groupinfo, groupName, alias):
+        pass
+    # Enumerant generation
+    # <enum> tags may specify their values in several ways, but are usually
+    # just integers.
+    def genEnum(self, enuminfo, name, alias):
+        pass
+    #
+    # Command generation
+    def genCmd(self, cmdinfo, name, alias):
+        decls = self.makeCDecls(cmdinfo.elem)
+        if self.header: # In the header declare all intercepts
+            self.appendSection('command', '')
+            self.appendSection('command', 'static %s' % (decls[0]))
+            if (self.featureExtraProtect != None):
+                self.intercepts += [ '#ifdef %s' % self.featureExtraProtect ]
+            self.intercepts += [ '    {"%s", (void*)%s},' % (name,name[2:]) ]
+            if (self.featureExtraProtect != None):
+                self.intercepts += [ '#endif' ]
+            return
+
+        manual_functions = [
+            # Include functions here to be interecpted w/ manually implemented function bodies
+            'vkGetDeviceProcAddr',
+            'vkGetInstanceProcAddr',
+            'vkCreateDevice',
+            'vkDestroyDevice',
+            'vkCreateInstance',
+            'vkDestroyInstance',
+            #'vkCreateDebugReportCallbackEXT',
+            #'vkDestroyDebugReportCallbackEXT',
+            'vkEnumerateInstanceLayerProperties',
+            'vkEnumerateInstanceExtensionProperties',
+            'vkEnumerateDeviceLayerProperties',
+            'vkEnumerateDeviceExtensionProperties',
+        ]
+        if name in manual_functions:
+            self.appendSection('command', '')
+            if name not in CUSTOM_C_INTERCEPTS:
+                self.appendSection('command', '// declare only')
+                self.appendSection('command', 'static %s' % (decls[0]))
+                self.appendSection('command', '// TODO: Implement custom intercept body')
+            else:
+                self.appendSection('command', 'static %s' % (decls[0][:-1]))
+                self.appendSection('command', '{\n%s}' % (CUSTOM_C_INTERCEPTS[name]))
+            self.intercepts += [ '    {"%s", (void*)%s},' % (name,name[2:]) ]
+            return
+        # record that the function will be intercepted
+        if (self.featureExtraProtect != None):
+            self.intercepts += [ '#ifdef %s' % self.featureExtraProtect ]
+        self.intercepts += [ '    {"%s", (void*)%s},' % (name,name[2:]) ]
+        if (self.featureExtraProtect != None):
+            self.intercepts += [ '#endif' ]
+
+        OutputGenerator.genCmd(self, cmdinfo, name, alias)
+        #
+        self.appendSection('command', '')
+        self.appendSection('command', 'static %s' % (decls[0][:-1]))
+        if name in CUSTOM_C_INTERCEPTS:
+            self.appendSection('command', '{%s}' % (CUSTOM_C_INTERCEPTS[name]))
+            return
+
+        # Declare result variable, if any.
+        resulttype = cmdinfo.elem.find('proto/type')
+        if (resulttype != None and resulttype.text == 'void'):
+            resulttype = None
+        # if the name w/ KHR postfix is in the CUSTOM_C_INTERCEPTS
+        # Call the KHR custom version instead of generating separate code
+        khr_name = name + "KHR"
+        if khr_name in CUSTOM_C_INTERCEPTS:
+            return_string = ''
+            if resulttype != None:
+                return_string = 'return '
+            params = cmdinfo.elem.findall('param/name')
+            param_names = []
+            for param in params:
+                param_names.append(param.text)
+            self.appendSection('command', '{\n    %s%s(%s);\n}' % (return_string, khr_name[2:], ", ".join(param_names)))
+            return
+        self.appendSection('command', '{')
+
+        api_function_name = cmdinfo.elem.attrib.get('name')
+        # GET THE TYPE OF FUNCTION
+        if True in [ftxt in api_function_name for ftxt in ['Create', 'Allocate']]:
+            # Get last param
+            last_param = cmdinfo.elem.findall('param')[-1]
+            lp_txt = last_param.find('name').text
+            lp_len = None
+            if ('len' in last_param.attrib):
+                lp_len = last_param.attrib['len']
+                lp_len = lp_len.replace('::', '->')
+            lp_type = last_param.find('type').text
+            handle_type = 'dispatchable'
+            allocator_txt = 'CreateDispObjHandle()';
+            if (self.isHandleTypeNonDispatchable(lp_type)):
+                handle_type = 'non-' + handle_type
+                allocator_txt = 'global_unique_handle++';
+            # Need to lock in both cases
+            self.appendSection('command', '    unique_lock_t lock(global_lock);')
+            if (lp_len != None):
+                #print("%s last params (%s) has len %s" % (handle_type, lp_txt, lp_len))
+                self.appendSection('command', '    for (uint32_t i = 0; i < %s; ++i) {' % (lp_len))
+                self.appendSection('command', '        %s[i] = (%s)%s;' % (lp_txt, lp_type, allocator_txt))
+                self.appendSection('command', '    }')
+            else:
+                #print("Single %s last param is '%s' w/ type '%s'" % (handle_type, lp_txt, lp_type))
+                self.appendSection('command', '    *%s = (%s)%s;' % (lp_txt, lp_type, allocator_txt))
+        elif True in [ftxt in api_function_name for ftxt in ['Destroy', 'Free']]:
+            self.appendSection('command', '//Destroy object')
+        else:
+            self.appendSection('command', '//Not a CREATE or DESTROY function')
+
+        # Return result variable, if any.
+        if (resulttype != None):
+            if api_function_name == 'vkGetEventStatus':
+                self.appendSection('command', '    return VK_EVENT_SET;')
+            else:
+                self.appendSection('command', '    return VK_SUCCESS;')
+        self.appendSection('command', '}')
+    #
+    # override makeProtoName to drop the "vk" prefix
+    def makeProtoName(self, name, tail):
+        return self.genOpts.apientry + name[2:] + tail
diff --git a/src/third_party/vulkan-tools/src/scripts/update_deps.py b/src/third_party/vulkan-tools/src/scripts/update_deps.py
new file mode 100755
index 0000000..f1fe36d
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/scripts/update_deps.py
@@ -0,0 +1,679 @@
+#!/usr/bin/env python
+
+# Copyright 2017 The Glslang Authors. All rights reserved.
+# Copyright (c) 2018 Valve Corporation
+# Copyright (c) 2018 LunarG, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This script was heavily leveraged from KhronosGroup/glslang
+# update_glslang_sources.py.
+"""update_deps.py
+
+Get and build dependent repositories using known-good commits.
+
+Purpose
+-------
+
+This program is intended to assist a developer of this repository
+(the "home" repository) by gathering and building the repositories that
+this home repository depend on.  It also checks out each dependent
+repository at a "known-good" commit in order to provide stability in
+the dependent repositories.
+
+Python Compatibility
+--------------------
+
+This program can be used with Python 2.7 and Python 3.
+
+Known-Good JSON Database
+------------------------
+
+This program expects to find a file named "known-good.json" in the
+same directory as the program file.  This JSON file is tailored for
+the needs of the home repository by including its dependent repositories.
+
+Program Options
+---------------
+
+See the help text (update_deps.py --help) for a complete list of options.
+
+Program Operation
+-----------------
+
+The program uses the user's current directory at the time of program
+invocation as the location for fetching and building the dependent
+repositories.  The user can override this by using the "--dir" option.
+
+For example, a directory named "build" in the repository's root directory
+is a good place to put the dependent repositories because that directory
+is not tracked by Git. (See the .gitignore file.)  The "external" directory
+may also be a suitable location.
+A user can issue:
+
+$ cd My-Repo
+$ mkdir build
+$ cd build
+$ ../scripts/update_deps.py
+
+or, to do the same thing, but using the --dir option:
+
+$ cd My-Repo
+$ mkdir build
+$ scripts/update_deps.py --dir=build
+
+With these commands, the "build" directory is considered the "top"
+directory where the program clones the dependent repositories.  The
+JSON file configures the build and install working directories to be
+within this "top" directory.
+
+Note that the "dir" option can also specify an absolute path:
+
+$ cd My-Repo
+$ scripts/update_deps.py --dir=/tmp/deps
+
+The "top" dir is then /tmp/deps (Linux filesystem example) and is
+where this program will clone and build the dependent repositories.
+
+Helper CMake Config File
+------------------------
+
+When the program finishes building the dependencies, it writes a file
+named "helper.cmake" to the "top" directory that contains CMake commands
+for setting CMake variables for locating the dependent repositories.
+This helper file can be used to set up the CMake build files for this
+"home" repository.
+
+A complete sequence might look like:
+
+$ git clone git@github.com:My-Group/My-Repo.git
+$ cd My-Repo
+$ mkdir build
+$ cd build
+$ ../scripts/update_deps.py
+$ cmake -C helper.cmake ..
+$ cmake --build .
+
+JSON File Schema
+----------------
+
+There's no formal schema for the "known-good" JSON file, but here is
+a description of its elements.  All elements are required except those
+marked as optional.  Please see the "known_good.json" file for
+examples of all of these elements.
+
+- name
+
+The name of the dependent repository.  This field can be referenced
+by the "deps.repo_name" structure to record a dependency.
+
+- url
+
+Specifies the URL of the repository.
+Example: https://github.com/KhronosGroup/Vulkan-Loader.git
+
+- sub_dir
+
+The directory where the program clones the repository, relative to
+the "top" directory.
+
+- build_dir
+
+The directory used to build the repository, relative to the "top"
+directory.
+
+- install_dir
+
+The directory used to store the installed build artifacts, relative
+to the "top" directory.
+
+- commit
+
+The commit used to checkout the repository.  This can be a SHA-1
+object name or a refname used with the remote name "origin".
+For example, this field can be set to "origin/sdk-1.1.77" to
+select the end of the sdk-1.1.77 branch.
+
+- deps (optional)
+
+An array of pairs consisting of a CMake variable name and a
+repository name to specify a dependent repo and a "link" to
+that repo's install artifacts.  For example:
+
+"deps" : [
+    {
+        "var_name" : "VULKAN_HEADERS_INSTALL_DIR",
+        "repo_name" : "Vulkan-Headers"
+    }
+]
+
+which represents that this repository depends on the Vulkan-Headers
+repository and uses the VULKAN_HEADERS_INSTALL_DIR CMake variable to
+specify the location where it expects to find the Vulkan-Headers install
+directory.
+Note that the "repo_name" element must match the "name" element of some
+other repository in the JSON file.
+
+- prebuild (optional)
+- prebuild_linux (optional)  (For Linux and MacOS)
+- prebuild_windows (optional)
+
+A list of commands to execute before building a dependent repository.
+This is useful for repositories that require the execution of some
+sort of "update" script or need to clone an auxillary repository like
+googletest.
+
+The commands listed in "prebuild" are executed first, and then the
+commands for the specific platform are executed.
+
+- custom_build (optional)
+
+A list of commands to execute as a custom build instead of using
+the built in CMake way of building. Requires "build_step" to be
+set to "custom"
+
+You can insert the following keywords into the commands listed in
+"custom_build" if they require runtime information (like whether the
+build config is "Debug" or "Release").
+
+Keywords:
+{0} reference to a dictionary of repos and their attributes
+{1} reference to the command line arguments set before start
+{2} reference to the CONFIG_MAP value of config.
+
+Example:
+{2} returns the CONFIG_MAP value of config e.g. debug -> Debug
+{1}.config returns the config variable set when you ran update_dep.py
+{0}[Vulkan-Headers][repo_root] returns the repo_root variable from
+                                   the Vulkan-Headers GoodRepo object.
+
+- cmake_options (optional)
+
+A list of options to pass to CMake during the generation phase.
+
+- ci_only (optional)
+
+A list of environment variables where one must be set to "true"
+(case-insensitive) in order for this repo to be fetched and built.
+This list can be used to specify repos that should be built only in CI.
+Typically, this list might contain "TRAVIS" and/or "APPVEYOR" because
+each of these CI systems sets an environment variable with its own
+name to "true".  Note that this could also be (ab)used to control
+the processing of the repo with any environment variable.  The default
+is an empty list, which means that the repo is always processed.
+
+- build_step (optional)
+
+Specifies if the dependent repository should be built or not. This can
+have a value of 'build', 'custom',  or 'skip'. The dependent repositories are
+built by default.
+
+- build_platforms (optional)
+
+A list of platforms the repository will be built on.
+Legal options include:
+"windows"
+"linux"
+"darwin"
+
+Builds on all platforms by default.
+
+Note
+----
+
+The "sub_dir", "build_dir", and "install_dir" elements are all relative
+to the effective "top" directory.  Specifying absolute paths is not
+supported.  However, the "top" directory specified with the "--dir"
+option can be a relative or absolute path.
+
+"""
+
+from __future__ import print_function
+
+import argparse
+import json
+import distutils.dir_util
+import os.path
+import subprocess
+import sys
+import platform
+import multiprocessing
+import shlex
+import shutil
+
+KNOWN_GOOD_FILE_NAME = 'known_good.json'
+
+CONFIG_MAP = {
+    'debug': 'Debug',
+    'release': 'Release',
+    'relwithdebinfo': 'RelWithDebInfo',
+    'minsizerel': 'MinSizeRel'
+}
+
+VERBOSE = False
+
+DEVNULL = open(os.devnull, 'wb')
+
+
+def command_output(cmd, directory, fail_ok=False):
+    """Runs a command in a directory and returns its standard output stream.
+
+    Captures the standard error stream and prints it if error.
+
+    Raises a RuntimeError if the command fails to launch or otherwise fails.
+    """
+    if VERBOSE:
+        print('In {d}: {cmd}'.format(d=directory, cmd=cmd))
+    p = subprocess.Popen(
+        cmd, cwd=directory, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+    (stdout, stderr) = p.communicate()
+    if p.returncode != 0:
+        print('*** Error ***\nstderr contents:\n{}'.format(stderr))
+        if not fail_ok:
+            raise RuntimeError('Failed to run {} in {}'.format(cmd, directory))
+    if VERBOSE:
+        print(stdout)
+    return stdout
+
+class GoodRepo(object):
+    """Represents a repository at a known-good commit."""
+
+    def __init__(self, json, args):
+        """Initializes this good repo object.
+
+        Args:
+        'json':  A fully populated JSON object describing the repo.
+        'args':  Results from ArgumentParser
+        """
+        self._json = json
+        self._args = args
+        # Required JSON elements
+        self.name = json['name']
+        self.url = json['url']
+        self.sub_dir = json['sub_dir']
+        self.commit = json['commit']
+        # Optional JSON elements
+        self.build_dir = None
+        self.install_dir = None
+        if json.get('build_dir'):
+            self.build_dir = os.path.normpath(json['build_dir'])
+        if json.get('install_dir'):
+            self.install_dir = os.path.normpath(json['install_dir'])
+        self.deps = json['deps'] if ('deps' in json) else []
+        self.prebuild = json['prebuild'] if ('prebuild' in json) else []
+        self.prebuild_linux = json['prebuild_linux'] if (
+            'prebuild_linux' in json) else []
+        self.prebuild_windows = json['prebuild_windows'] if (
+            'prebuild_windows' in json) else []
+        self.custom_build = json['custom_build'] if ('custom_build' in json) else []
+        self.cmake_options = json['cmake_options'] if (
+            'cmake_options' in json) else []
+        self.ci_only = json['ci_only'] if ('ci_only' in json) else []
+        self.build_step = json['build_step'] if ('build_step' in json) else 'build'
+        self.build_platforms = json['build_platforms'] if ('build_platforms' in json) else []
+        # Absolute paths for a repo's directories
+        dir_top = os.path.abspath(args.dir)
+        self.repo_dir = os.path.join(dir_top, self.sub_dir)
+        if self.build_dir:
+            self.build_dir = os.path.join(dir_top, self.build_dir)
+        if self.install_dir:
+            self.install_dir = os.path.join(dir_top, self.install_dir)
+	    # Check if platform is one to build on
+        self.on_build_platform = False
+        if self.build_platforms == [] or platform.system().lower() in self.build_platforms:
+            self.on_build_platform = True
+
+    def Clone(self):
+        distutils.dir_util.mkpath(self.repo_dir)
+        command_output(['git', 'clone', self.url, '.'], self.repo_dir)
+
+    def Fetch(self):
+        command_output(['git', 'fetch', 'origin'], self.repo_dir)
+
+    def Checkout(self):
+        print('Checking out {n} in {d}'.format(n=self.name, d=self.repo_dir))
+        if self._args.do_clean_repo:
+            shutil.rmtree(self.repo_dir, ignore_errors=True)
+        if not os.path.exists(os.path.join(self.repo_dir, '.git')):
+            self.Clone()
+        self.Fetch()
+        if len(self._args.ref):
+            command_output(['git', 'checkout', self._args.ref], self.repo_dir)
+        else:
+            command_output(['git', 'checkout', self.commit], self.repo_dir)
+        print(command_output(['git', 'status'], self.repo_dir))
+
+    def CustomPreProcess(self, cmd_str, repo_dict):
+        return cmd_str.format(repo_dict, self._args, CONFIG_MAP[self._args.config])
+
+    def PreBuild(self):
+        """Execute any prebuild steps from the repo root"""
+        for p in self.prebuild:
+            command_output(shlex.split(p), self.repo_dir)
+        if platform.system() == 'Linux' or platform.system() == 'Darwin':
+            for p in self.prebuild_linux:
+                command_output(shlex.split(p), self.repo_dir)
+        if platform.system() == 'Windows':
+            for p in self.prebuild_windows:
+                command_output(shlex.split(p), self.repo_dir)
+
+    def CustomBuild(self, repo_dict):
+        """Execute any custom_build steps from the repo root"""
+        for p in self.custom_build:
+            cmd = self.CustomPreProcess(p, repo_dict)
+            command_output(shlex.split(cmd), self.repo_dir)
+
+    def CMakeConfig(self, repos):
+        """Build CMake command for the configuration phase and execute it"""
+        if self._args.do_clean_build:
+            shutil.rmtree(self.build_dir)
+        if self._args.do_clean_install:
+            shutil.rmtree(self.install_dir)
+
+        # Create and change to build directory
+        distutils.dir_util.mkpath(self.build_dir)
+        os.chdir(self.build_dir)
+
+        cmake_cmd = [
+            'cmake', self.repo_dir,
+            '-DCMAKE_INSTALL_PREFIX=' + self.install_dir
+        ]
+
+        # For each repo this repo depends on, generate a CMake variable
+        # definitions for "...INSTALL_DIR" that points to that dependent
+        # repo's install dir.
+        for d in self.deps:
+            dep_commit = [r for r in repos if r.name == d['repo_name']]
+            if len(dep_commit):
+                cmake_cmd.append('-D{var_name}={install_dir}'.format(
+                    var_name=d['var_name'],
+                    install_dir=dep_commit[0].install_dir))
+
+        # Add any CMake options
+        for option in self.cmake_options:
+            cmake_cmd.append(option)
+
+        # Set build config for single-configuration generators
+        if platform.system() == 'Linux' or platform.system() == 'Darwin':
+            cmake_cmd.append('-DCMAKE_BUILD_TYPE={config}'.format(
+                config=CONFIG_MAP[self._args.config]))
+
+        # Use the CMake -A option to select the platform architecture
+        # without needing a Visual Studio generator.
+        if platform.system() == 'Windows':
+            if self._args.arch == '64' or self._args.arch == 'x64' or self._args.arch == 'win64':
+                cmake_cmd.append('-A')
+                cmake_cmd.append('x64')
+
+        # Apply a generator, if one is specified.  This can be used to supply
+        # a specific generator for the dependent repositories to match
+        # that of the main repository.
+        if self._args.generator is not None:
+            cmake_cmd.extend(['-G', self._args.generator])
+
+        if VERBOSE:
+            print("CMake command: " + " ".join(cmake_cmd))
+
+        ret_code = subprocess.call(cmake_cmd)
+        if ret_code != 0:
+            sys.exit(ret_code)
+
+    def CMakeBuild(self):
+        """Build CMake command for the build phase and execute it"""
+        cmake_cmd = ['cmake', '--build', self.build_dir, '--target', 'install']
+        if self._args.do_clean:
+            cmake_cmd.append('--clean-first')
+
+        if platform.system() == 'Windows':
+            cmake_cmd.append('--config')
+            cmake_cmd.append(CONFIG_MAP[self._args.config])
+
+        # Speed up the build.
+        if platform.system() == 'Linux' or platform.system() == 'Darwin':
+            cmake_cmd.append('--')
+            num_make_jobs = multiprocessing.cpu_count()
+            env_make_jobs = os.environ.get('MAKE_JOBS', None)
+            if env_make_jobs is not None:
+                try:
+                    num_make_jobs = min(num_make_jobs, int(env_make_jobs))
+                except ValueError:
+                    print('warning: environment variable MAKE_JOBS has non-numeric value "{}".  '
+                          'Using {} (CPU count) instead.'.format(env_make_jobs, num_make_jobs))
+            cmake_cmd.append('-j{}'.format(num_make_jobs))
+        if platform.system() == 'Windows':
+            cmake_cmd.append('--')
+            cmake_cmd.append('/maxcpucount')
+
+        if VERBOSE:
+            print("CMake command: " + " ".join(cmake_cmd))
+
+        ret_code = subprocess.call(cmake_cmd)
+        if ret_code != 0:
+            sys.exit(ret_code)
+
+    def Build(self, repos, repo_dict):
+        """Build the dependent repo"""
+        print('Building {n} in {d}'.format(n=self.name, d=self.repo_dir))
+        print('Build dir = {b}'.format(b=self.build_dir))
+        print('Install dir = {i}\n'.format(i=self.install_dir))
+
+        # Run any prebuild commands
+        self.PreBuild()
+
+        if self.build_step == 'custom':
+            self.CustomBuild(repo_dict)
+            return
+
+        # Build and execute CMake command for creating build files
+        self.CMakeConfig(repos)
+
+        # Build and execute CMake command for the build
+        self.CMakeBuild()
+
+
+def GetGoodRepos(args):
+    """Returns the latest list of GoodRepo objects.
+
+    The known-good file is expected to be in the same
+    directory as this script unless overridden by the 'known_good_dir'
+    parameter.
+    """
+    if args.known_good_dir:
+        known_good_file = os.path.join( os.path.abspath(args.known_good_dir),
+            KNOWN_GOOD_FILE_NAME)
+    else:
+        known_good_file = os.path.join(
+            os.path.dirname(os.path.abspath(__file__)), KNOWN_GOOD_FILE_NAME)
+    with open(known_good_file) as known_good:
+        return [
+            GoodRepo(repo, args)
+            for repo in json.loads(known_good.read())['repos']
+        ]
+
+
+def GetInstallNames(args):
+    """Returns the install names list.
+
+    The known-good file is expected to be in the same
+    directory as this script unless overridden by the 'known_good_dir'
+    parameter.
+    """
+    if args.known_good_dir:
+        known_good_file = os.path.join(os.path.abspath(args.known_good_dir),
+            KNOWN_GOOD_FILE_NAME)
+    else:
+        known_good_file = os.path.join(
+            os.path.dirname(os.path.abspath(__file__)), KNOWN_GOOD_FILE_NAME)
+    with open(known_good_file) as known_good:
+        install_info = json.loads(known_good.read())
+        if install_info.get('install_names'):
+            return install_info['install_names']
+        else:
+            return None
+
+
+def CreateHelper(args, repos, filename):
+    """Create a CMake config helper file.
+
+    The helper file is intended to be used with 'cmake -C <file>'
+    to build this home repo using the dependencies built by this script.
+
+    The install_names dictionary represents the CMake variables used by the
+    home repo to locate the install dirs of the dependent repos.
+    This information is baked into the CMake files of the home repo and so
+    this dictionary is kept with the repo via the json file.
+    """
+    def escape(path):
+        return path.replace('\\', '\\\\')
+    install_names = GetInstallNames(args)
+    with open(filename, 'w') as helper_file:
+        for repo in repos:
+            if install_names and repo.name in install_names and repo.on_build_platform:
+                helper_file.write('set({var} "{dir}" CACHE STRING "" FORCE)\n'
+                                  .format(
+                                      var=install_names[repo.name],
+                                      dir=escape(repo.install_dir)))
+
+
+def main():
+    parser = argparse.ArgumentParser(
+        description='Get and build dependent repos at known-good commits')
+    parser.add_argument(
+        '--known_good_dir',
+        dest='known_good_dir',
+        help="Specify directory for known_good.json file.")
+    parser.add_argument(
+        '--dir',
+        dest='dir',
+        default='.',
+        help="Set target directory for repository roots. Default is \'.\'.")
+    parser.add_argument(
+        '--ref',
+        dest='ref',
+        default='',
+        help="Override 'commit' with git reference. E.g., 'origin/master'")
+    parser.add_argument(
+        '--no-build',
+        dest='do_build',
+        action='store_false',
+        help=
+        "Clone/update repositories and generate build files without performing compilation",
+        default=True)
+    parser.add_argument(
+        '--clean',
+        dest='do_clean',
+        action='store_true',
+        help="Clean files generated by compiler and linker before building",
+        default=False)
+    parser.add_argument(
+        '--clean-repo',
+        dest='do_clean_repo',
+        action='store_true',
+        help="Delete repository directory before building",
+        default=False)
+    parser.add_argument(
+        '--clean-build',
+        dest='do_clean_build',
+        action='store_true',
+        help="Delete build directory before building",
+        default=False)
+    parser.add_argument(
+        '--clean-install',
+        dest='do_clean_install',
+        action='store_true',
+        help="Delete install directory before building",
+        default=False)
+    parser.add_argument(
+        '--arch',
+        dest='arch',
+        choices=['32', '64', 'x86', 'x64', 'win32', 'win64'],
+        type=str.lower,
+        help="Set build files architecture (Windows)",
+        default='64')
+    parser.add_argument(
+        '--config',
+        dest='config',
+        choices=['debug', 'release', 'relwithdebinfo', 'minsizerel'],
+        type=str.lower,
+        help="Set build files configuration",
+        default='debug')
+    parser.add_argument(
+        '--generator',
+        dest='generator',
+        help="Set the CMake generator",
+        default=None)
+
+    args = parser.parse_args()
+    save_cwd = os.getcwd()
+
+    # Create working "top" directory if needed
+    distutils.dir_util.mkpath(args.dir)
+    abs_top_dir = os.path.abspath(args.dir)
+
+    repos = GetGoodRepos(args)
+    repo_dict = {}
+
+    print('Starting builds in {d}'.format(d=abs_top_dir))
+    for repo in repos:
+        # If the repo has a platform whitelist, skip the repo
+        # unless we are building on a whitelisted platform.
+        if not repo.on_build_platform:
+            continue
+
+        field_list = ('url',
+                      'sub_dir',
+                      'commit',
+                      'build_dir',
+                      'install_dir',
+                      'deps',
+                      'prebuild',
+                      'prebuild_linux',
+                      'prebuild_windows',
+                      'custom_build',
+                      'cmake_options',
+                      'ci_only',
+                      'build_step',
+                      'build_platforms',
+                      'repo_dir',
+                      'on_build_platform')
+        repo_dict[repo.name] = {field: getattr(repo, field) for field in field_list}
+
+        # If the repo has a CI whitelist, skip the repo unless
+        # one of the CI's environment variable is set to true.
+        if len(repo.ci_only):
+            do_build = False
+            for env in repo.ci_only:
+                if not env in os.environ:
+                    continue
+                if os.environ[env].lower() == 'true':
+                    do_build = True
+                    break
+            if not do_build:
+                continue
+
+        # Clone/update the repository
+        repo.Checkout()
+
+        # Build the repository
+        if args.do_build and repo.build_step != 'skip':
+            repo.Build(repos, repo_dict)
+
+    # Need to restore original cwd in order for CreateHelper to find json file
+    os.chdir(save_cwd)
+    CreateHelper(args, repos, os.path.join(abs_top_dir, 'helper.cmake'))
+
+    sys.exit(0)
+
+
+if __name__ == '__main__':
+    main()
diff --git a/src/third_party/vulkan-tools/src/scripts/vulkan_tools_helper_file_generator.py b/src/third_party/vulkan-tools/src/scripts/vulkan_tools_helper_file_generator.py
new file mode 100644
index 0000000..ca5b240
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/scripts/vulkan_tools_helper_file_generator.py
@@ -0,0 +1,1232 @@
+#!/usr/bin/python3 -i
+#
+# Copyright (c) 2015-2017 The Khronos Group Inc.
+# Copyright (c) 2015-2017 Valve Corporation
+# Copyright (c) 2015-2017 LunarG, Inc.
+# Copyright (c) 2015-2017 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Author: Mark Lobodzinski <mark@lunarg.com>
+# Author: Tobin Ehlis <tobine@google.com>
+# Author: John Zulauf <jzulauf@lunarg.com>
+
+import os,re,sys
+import xml.etree.ElementTree as etree
+from generator import *
+from collections import namedtuple
+from common_codegen import *
+
+#
+# HelperFileOutputGeneratorOptions - subclass of GeneratorOptions.
+class HelperFileOutputGeneratorOptions(GeneratorOptions):
+    def __init__(self,
+                 conventions = None,
+                 filename = None,
+                 directory = '.',
+                 apiname = None,
+                 profile = None,
+                 versions = '.*',
+                 emitversions = '.*',
+                 defaultExtensions = None,
+                 addExtensions = None,
+                 removeExtensions = None,
+                 emitExtensions = None,
+                 sortProcedure = regSortFeatures,
+                 prefixText = "",
+                 genFuncPointers = True,
+                 protectFile = True,
+                 protectFeature = True,
+                 apicall = '',
+                 apientry = '',
+                 apientryp = '',
+                 alignFuncParam = 0,
+                 library_name = '',
+                 expandEnumerants = True,
+                 helper_file_type = ''):
+        GeneratorOptions.__init__(self, conventions, filename, directory, apiname, profile,
+                                  versions, emitversions, defaultExtensions,
+                                  addExtensions, removeExtensions, emitExtensions, sortProcedure)
+        self.prefixText       = prefixText
+        self.genFuncPointers  = genFuncPointers
+        self.protectFile      = protectFile
+        self.protectFeature   = protectFeature
+        self.apicall          = apicall
+        self.apientry         = apientry
+        self.apientryp        = apientryp
+        self.alignFuncParam   = alignFuncParam
+        self.library_name     = library_name
+        self.helper_file_type = helper_file_type
+#
+# HelperFileOutputGenerator - subclass of OutputGenerator. Outputs Vulkan helper files
+class HelperFileOutputGenerator(OutputGenerator):
+    """Generate helper file based on XML element attributes"""
+    def __init__(self,
+                 errFile = sys.stderr,
+                 warnFile = sys.stderr,
+                 diagFile = sys.stdout):
+        OutputGenerator.__init__(self, errFile, warnFile, diagFile)
+        # Internal state - accumulators for different inner block text
+        self.enum_output = ''                             # string built up of enum string routines
+        # Internal state - accumulators for different inner block text
+        self.structNames = []                             # List of Vulkan struct typenames
+        self.structTypes = dict()                         # Map of Vulkan struct typename to required VkStructureType
+        self.structMembers = []                           # List of StructMemberData records for all Vulkan structs
+        self.object_types = []                            # List of all handle types
+        self.object_type_aliases = []                     # Aliases to handles types (for handles that were extensions)
+        self.debug_report_object_types = []               # Handy copy of debug_report_object_type enum data
+        self.core_object_types = []                       # Handy copy of core_object_type enum data
+        self.device_extension_info = dict()               # Dict of device extension name defines and ifdef values
+        self.instance_extension_info = dict()             # Dict of instance extension name defines and ifdef values
+
+        # Named tuples to store struct and command data
+        self.StructType = namedtuple('StructType', ['name', 'value'])
+        self.CommandParam = namedtuple('CommandParam', ['type', 'name', 'ispointer', 'isstaticarray', 'isconst', 'iscount', 'len', 'extstructs', 'cdecl'])
+        self.StructMemberData = namedtuple('StructMemberData', ['name', 'members', 'ifdef_protect'])
+
+        self.custom_construct_params = {
+            # safe_VkGraphicsPipelineCreateInfo needs to know if subpass has color and\or depth\stencil attachments to use its pointers
+            'VkGraphicsPipelineCreateInfo' :
+                ', const bool uses_color_attachment, const bool uses_depthstencil_attachment',
+            # safe_VkPipelineViewportStateCreateInfo needs to know if viewport and scissor is dynamic to use its pointers
+            'VkPipelineViewportStateCreateInfo' :
+                ', const bool is_dynamic_viewports, const bool is_dynamic_scissors',
+        }
+    #
+    # Called once at the beginning of each run
+    def beginFile(self, genOpts):
+        OutputGenerator.beginFile(self, genOpts)
+        # User-supplied prefix text, if any (list of strings)
+        self.helper_file_type = genOpts.helper_file_type
+        self.library_name = genOpts.library_name
+        # File Comment
+        file_comment = '// *** THIS FILE IS GENERATED - DO NOT EDIT ***\n'
+        file_comment += '// See vulkan_tools_helper_file_generator.py for modifications\n'
+        write(file_comment, file=self.outFile)
+        # Copyright Notice
+        copyright = ''
+        copyright += '\n'
+        copyright += '/***************************************************************************\n'
+        copyright += ' *\n'
+        copyright += ' * Copyright (c) 2015-2017 The Khronos Group Inc.\n'
+        copyright += ' * Copyright (c) 2015-2017 Valve Corporation\n'
+        copyright += ' * Copyright (c) 2015-2017 LunarG, Inc.\n'
+        copyright += ' * Copyright (c) 2015-2017 Google Inc.\n'
+        copyright += ' *\n'
+        copyright += ' * Licensed under the Apache License, Version 2.0 (the "License");\n'
+        copyright += ' * you may not use this file except in compliance with the License.\n'
+        copyright += ' * You may obtain a copy of the License at\n'
+        copyright += ' *\n'
+        copyright += ' *     http://www.apache.org/licenses/LICENSE-2.0\n'
+        copyright += ' *\n'
+        copyright += ' * Unless required by applicable law or agreed to in writing, software\n'
+        copyright += ' * distributed under the License is distributed on an "AS IS" BASIS,\n'
+        copyright += ' * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n'
+        copyright += ' * See the License for the specific language governing permissions and\n'
+        copyright += ' * limitations under the License.\n'
+        copyright += ' *\n'
+        copyright += ' * Author: Mark Lobodzinski <mark@lunarg.com>\n'
+        copyright += ' * Author: Courtney Goeltzenleuchter <courtneygo@google.com>\n'
+        copyright += ' * Author: Tobin Ehlis <tobine@google.com>\n'
+        copyright += ' * Author: Chris Forbes <chrisforbes@google.com>\n'
+        copyright += ' * Author: John Zulauf<jzulauf@lunarg.com>\n'
+        copyright += ' *\n'
+        copyright += ' ****************************************************************************/\n'
+        write(copyright, file=self.outFile)
+    #
+    # Write generated file content to output file
+    def endFile(self):
+        dest_file = ''
+        dest_file += self.OutputDestFile()
+        # Remove blank lines at EOF
+        if dest_file.endswith('\n'):
+            dest_file = dest_file[:-1]
+        write(dest_file, file=self.outFile);
+        # Finish processing in superclass
+        OutputGenerator.endFile(self)
+    #
+    # Override parent class to be notified of the beginning of an extension
+    def beginFeature(self, interface, emit):
+        # Start processing in superclass
+        OutputGenerator.beginFeature(self, interface, emit)
+        self.featureExtraProtect = GetFeatureProtect(interface)
+
+        if self.featureName == 'VK_VERSION_1_0' or self.featureName == 'VK_VERSION_1_1':
+            return
+        name = self.featureName
+        nameElem = interface[0][1]
+        name_define = nameElem.get('name')
+        if 'EXTENSION_NAME' not in name_define:
+            print("Error in vk.xml file -- extension name is not available")
+        requires = interface.get('requires')
+        if requires is not None:
+            required_extensions = requires.split(',')
+        else:
+            required_extensions = list()
+        info = { 'define': name_define, 'ifdef':self.featureExtraProtect, 'reqs':required_extensions }
+        if interface.get('type') == 'instance':
+            self.instance_extension_info[name] = info
+        else:
+            self.device_extension_info[name] = info
+
+    #
+    # Override parent class to be notified of the end of an extension
+    def endFeature(self):
+        # Finish processing in superclass
+        OutputGenerator.endFeature(self)
+    #
+    # Grab group (e.g. C "enum" type) info to output for enum-string conversion helper
+    def genGroup(self, groupinfo, groupName, alias):
+        OutputGenerator.genGroup(self, groupinfo, groupName, alias)
+        groupElem = groupinfo.elem
+        # For enum_string_header
+        if self.helper_file_type == 'enum_string_header':
+            value_set = set()
+            for elem in groupElem.findall('enum'):
+                if elem.get('supported') != 'disabled' and elem.get('alias') == None:
+                    value_set.add(elem.get('name'))
+            self.enum_output += self.GenerateEnumStringConversion(groupName, value_set)
+        elif self.helper_file_type == 'object_types_header':
+            if groupName == 'VkDebugReportObjectTypeEXT':
+                for elem in groupElem.findall('enum'):
+                    if elem.get('supported') != 'disabled':
+                        item_name = elem.get('name')
+                        self.debug_report_object_types.append(item_name)
+            elif groupName == 'VkObjectType':
+                for elem in groupElem.findall('enum'):
+                    if elem.get('supported') != 'disabled':
+                        item_name = elem.get('name')
+                        self.core_object_types.append(item_name)
+
+    #
+    # Called for each type -- if the type is a struct/union, grab the metadata
+    def genType(self, typeinfo, name, alias):
+        OutputGenerator.genType(self, typeinfo, name, alias)
+        typeElem = typeinfo.elem
+        # If the type is a struct type, traverse the imbedded <member> tags generating a structure.
+        # Otherwise, emit the tag text.
+        category = typeElem.get('category')
+        if category == 'handle':
+            if alias:
+                self.object_type_aliases.append((name,alias))
+            else:
+                self.object_types.append(name)
+        elif (category == 'struct' or category == 'union'):
+            self.structNames.append(name)
+            self.genStruct(typeinfo, name, alias)
+    #
+    # Check if the parameter passed in is a pointer
+    def paramIsPointer(self, param):
+        ispointer = False
+        for elem in param:
+            if ((elem.tag is not 'type') and (elem.tail is not None)) and '*' in elem.tail:
+                ispointer = True
+        return ispointer
+    #
+    # Check if the parameter passed in is a static array
+    def paramIsStaticArray(self, param):
+        isstaticarray = 0
+        paramname = param.find('name')
+        if (paramname.tail is not None) and ('[' in paramname.tail):
+            isstaticarray = paramname.tail.count('[')
+        return isstaticarray
+    #
+    # Retrieve the type and name for a parameter
+    def getTypeNameTuple(self, param):
+        type = ''
+        name = ''
+        for elem in param:
+            if elem.tag == 'type':
+                type = noneStr(elem.text)
+            elif elem.tag == 'name':
+                name = noneStr(elem.text)
+        return (type, name)
+    # Extract length values from latexmath.  Currently an inflexible solution that looks for specific
+    # patterns that are found in vk.xml.  Will need to be updated when new patterns are introduced.
+    def parseLateXMath(self, source):
+        name = 'ERROR'
+        decoratedName = 'ERROR'
+        if 'mathit' in source:
+            # Matches expressions similar to 'latexmath:[\lceil{\mathit{rasterizationSamples} \over 32}\rceil]'
+            match = re.match(r'latexmath\s*\:\s*\[\s*\\l(\w+)\s*\{\s*\\mathit\s*\{\s*(\w+)\s*\}\s*\\over\s*(\d+)\s*\}\s*\\r(\w+)\s*\]', source)
+            if not match or match.group(1) != match.group(4):
+                raise 'Unrecognized latexmath expression'
+            name = match.group(2)
+            # Need to add 1 for ceiling function; otherwise, the allocated packet
+            # size will be less than needed during capture for some title which use
+            # this in VkPipelineMultisampleStateCreateInfo. based on ceiling function
+            # definition,it is '{0}%{1}?{0}/{1} + 1:{0}/{1}'.format(*match.group(2, 3)),
+            # its value <= '{}/{} + 1'.
+            if match.group(1) == 'ceil':
+                decoratedName = '{}/{} + 1'.format(*match.group(2, 3))
+            else:
+                decoratedName = '{}/{}'.format(*match.group(2, 3))
+        else:
+            # Matches expressions similar to 'latexmath : [dataSize \over 4]'
+            match = re.match(r'latexmath\s*\:\s*\[\s*(\\textrm\{)?(\w+)\}?\s*\\over\s*(\d+)\s*\]', source)
+            name = match.group(2)
+            decoratedName = '{}/{}'.format(*match.group(2, 3))
+        return name, decoratedName
+    #
+    # Retrieve the value of the len tag
+    def getLen(self, param):
+        result = None
+        len = param.attrib.get('len')
+        if len and len != 'null-terminated':
+            # For string arrays, 'len' can look like 'count,null-terminated', indicating that we
+            # have a null terminated array of strings.  We strip the null-terminated from the
+            # 'len' field and only return the parameter specifying the string count
+            if 'null-terminated' in len:
+                result = len.split(',')[0]
+            else:
+                result = len
+            if 'latexmath' in len:
+                param_type, param_name = self.getTypeNameTuple(param)
+                len_name, result = self.parseLateXMath(len)
+            # Spec has now notation for len attributes, using :: instead of platform specific pointer symbol
+            result = str(result).replace('::', '->')
+        return result
+    #
+    # Check if a structure is or contains a dispatchable (dispatchable = True) or
+    # non-dispatchable (dispatchable = False) handle
+    def TypeContainsObjectHandle(self, handle_type, dispatchable):
+        if dispatchable:
+            type_key = 'VK_DEFINE_HANDLE'
+        else:
+            type_key = 'VK_DEFINE_NON_DISPATCHABLE_HANDLE'
+        handle = self.registry.tree.find("types/type/[name='" + handle_type + "'][@category='handle']")
+        if handle is not None and handle.find('type').text == type_key:
+            return True
+        # if handle_type is a struct, search its members
+        if handle_type in self.structNames:
+            member_index = next((i for i, v in enumerate(self.structMembers) if v[0] == handle_type), None)
+            if member_index is not None:
+                for item in self.structMembers[member_index].members:
+                    handle = self.registry.tree.find("types/type/[name='" + item.type + "'][@category='handle']")
+                    if handle is not None and handle.find('type').text == type_key:
+                        return True
+        return False
+    #
+    # Generate local ready-access data describing Vulkan structures and unions from the XML metadata
+    def genStruct(self, typeinfo, typeName, alias):
+        OutputGenerator.genStruct(self, typeinfo, typeName, alias)
+        members = typeinfo.elem.findall('.//member')
+        # Iterate over members once to get length parameters for arrays
+        lens = set()
+        for member in members:
+            len = self.getLen(member)
+            if len:
+                lens.add(len)
+        # Generate member info
+        membersInfo = []
+        for member in members:
+            # Get the member's type and name
+            info = self.getTypeNameTuple(member)
+            type = info[0]
+            name = info[1]
+            cdecl = self.makeCParamDecl(member, 1)
+            # Process VkStructureType
+            if type == 'VkStructureType':
+                # Extract the required struct type value from the comments
+                # embedded in the original text defining the 'typeinfo' element
+                rawXml = etree.tostring(typeinfo.elem).decode('ascii')
+                result = re.search(r'VK_STRUCTURE_TYPE_\w+', rawXml)
+                if result:
+                    value = result.group(0)
+                    # Store the required type value
+                    self.structTypes[typeName] = self.StructType(name=name, value=value)
+            # Store pointer/array/string info
+            isstaticarray = self.paramIsStaticArray(member)
+            membersInfo.append(self.CommandParam(type=type,
+                                                 name=name,
+                                                 ispointer=self.paramIsPointer(member),
+                                                 isstaticarray=isstaticarray,
+                                                 isconst=True if 'const' in cdecl else False,
+                                                 iscount=True if name in lens else False,
+                                                 len=self.getLen(member),
+                                                 extstructs=self.registry.validextensionstructs[typeName] if name == 'pNext' else None,
+                                                 cdecl=cdecl))
+        self.structMembers.append(self.StructMemberData(name=typeName, members=membersInfo, ifdef_protect=self.featureExtraProtect))
+    #
+    # Enum_string_header: Create a routine to convert an enumerated value into a string
+    def GenerateEnumStringConversion(self, groupName, value_list):
+        outstring = '\n'
+        outstring += 'static inline const char* string_%s(%s input_value)\n' % (groupName, groupName)
+        outstring += '{\n'
+        outstring += '    switch ((%s)input_value)\n' % groupName
+        outstring += '    {\n'
+        for item in value_list:
+            outstring += '        case %s:\n' % item
+            outstring += '            return "%s";\n' % item
+        outstring += '        default:\n'
+        outstring += '            return "Unhandled %s";\n' % groupName
+        outstring += '    }\n'
+        outstring += '}\n'
+        return outstring
+    #
+    # Tack on a helper which, given an index into a VkPhysicalDeviceFeatures structure, will print the corresponding feature name
+    def DeIndexPhysDevFeatures(self):
+        pdev_members = None
+        for name, members, ifdef in self.structMembers:
+            if name == 'VkPhysicalDeviceFeatures':
+                pdev_members = members
+                break
+        deindex = '\n'
+        deindex += 'static inline const char * GetPhysDevFeatureString(uint32_t index) {\n'
+        deindex += '    const char * IndexToPhysDevFeatureString[] = {\n'
+        for feature in pdev_members:
+            deindex += '        "%s",\n' % feature.name
+        deindex += '    };\n\n'
+        deindex += '    return IndexToPhysDevFeatureString[index];\n'
+        deindex += '}\n'
+        return deindex
+    #
+    # Combine enum string helper header file preamble with body text and return
+    def GenerateEnumStringHelperHeader(self):
+            enum_string_helper_header = '\n'
+            enum_string_helper_header += '#pragma once\n'
+            enum_string_helper_header += '#ifdef _WIN32\n'
+            enum_string_helper_header += '#pragma warning( disable : 4065 )\n'
+            enum_string_helper_header += '#endif\n'
+            enum_string_helper_header += '\n'
+            enum_string_helper_header += '#include <vulkan/vulkan.h>\n'
+            enum_string_helper_header += '\n'
+            enum_string_helper_header += self.enum_output
+            enum_string_helper_header += self.DeIndexPhysDevFeatures()
+            return enum_string_helper_header
+    #
+    # Helper function for declaring a counter variable only once
+    def DeclareCounter(self, string_var, declare_flag):
+        if declare_flag == False:
+            string_var += '        uint32_t i = 0;\n'
+            declare_flag = True
+        return string_var, declare_flag
+    #
+    # Combine safe struct helper header file preamble with body text and return
+    def GenerateSafeStructHelperHeader(self):
+        safe_struct_helper_header = '\n'
+        safe_struct_helper_header += '#pragma once\n'
+        safe_struct_helper_header += '#include <vulkan/vulkan.h>\n'
+        safe_struct_helper_header += '\n'
+        safe_struct_helper_header += self.GenerateSafeStructHeader()
+        return safe_struct_helper_header
+    #
+    # safe_struct header: build function prototypes for header file
+    def GenerateSafeStructHeader(self):
+        safe_struct_header = ''
+        for item in self.structMembers:
+            if self.NeedSafeStruct(item) == True:
+                safe_struct_header += '\n'
+                if item.ifdef_protect != None:
+                    safe_struct_header += '#ifdef %s\n' % item.ifdef_protect
+                safe_struct_header += 'struct safe_%s {\n' % (item.name)
+                for member in item.members:
+                    if member.type in self.structNames:
+                        member_index = next((i for i, v in enumerate(self.structMembers) if v[0] == member.type), None)
+                        if member_index is not None and self.NeedSafeStruct(self.structMembers[member_index]) == True:
+                            if member.ispointer:
+                                safe_struct_header += '    safe_%s* %s;\n' % (member.type, member.name)
+                            else:
+                                safe_struct_header += '    safe_%s %s;\n' % (member.type, member.name)
+                            continue
+                    if member.len is not None and (self.TypeContainsObjectHandle(member.type, True) or self.TypeContainsObjectHandle(member.type, False)):
+                            safe_struct_header += '    %s* %s;\n' % (member.type, member.name)
+                    else:
+                        safe_struct_header += '%s;\n' % member.cdecl
+                safe_struct_header += '    safe_%s(const %s* in_struct%s);\n' % (item.name, item.name, self.custom_construct_params.get(item.name, ''))
+                safe_struct_header += '    safe_%s(const safe_%s& src);\n' % (item.name, item.name)
+                safe_struct_header += '    safe_%s& operator=(const safe_%s& src);\n' % (item.name, item.name)
+                safe_struct_header += '    safe_%s();\n' % item.name
+                safe_struct_header += '    ~safe_%s();\n' % item.name
+                safe_struct_header += '    void initialize(const %s* in_struct%s);\n' % (item.name, self.custom_construct_params.get(item.name, ''))
+                safe_struct_header += '    void initialize(const safe_%s* src);\n' % (item.name)
+                safe_struct_header += '    %s *ptr() { return reinterpret_cast<%s *>(this); }\n' % (item.name, item.name)
+                safe_struct_header += '    %s const *ptr() const { return reinterpret_cast<%s const *>(this); }\n' % (item.name, item.name)
+                safe_struct_header += '};\n'
+                if item.ifdef_protect != None:
+                    safe_struct_header += '#endif // %s\n' % item.ifdef_protect
+        return safe_struct_header
+    #
+    # Generate extension helper header file
+    def GenerateExtensionHelperHeader(self):
+
+        V_1_0_instance_extensions_promoted_to_core = [
+            'vk_khr_device_group_creation',
+            'vk_khr_external_fence_capabilities',
+            'vk_khr_external_memory_capabilities',
+            'vk_khr_external_semaphore_capabilities',
+            'vk_khr_get_physical_device_properties_2',
+            ]
+
+        V_1_0_device_extensions_promoted_to_core = [
+            'vk_khr_16bit_storage',
+            'vk_khr_bind_memory_2',
+            'vk_khr_dedicated_allocation',
+            'vk_khr_descriptor_update_template',
+            'vk_khr_device_group',
+            'vk_khr_external_fence',
+            'vk_khr_external_memory',
+            'vk_khr_external_semaphore',
+            'vk_khr_get_memory_requirements_2',
+            'vk_khr_maintenance1',
+            'vk_khr_maintenance2',
+            'vk_khr_maintenance3',
+            'vk_khr_multiview',
+            'vk_khr_relaxed_block_layout',
+            'vk_khr_sampler_ycbcr_conversion',
+            'vk_khr_shader_draw_parameters',
+            'vk_khr_storage_buffer_storage_class',
+            'vk_khr_variable_pointers',
+            ]
+
+        output = [
+            '',
+            '#ifndef VK_EXTENSION_HELPER_H_',
+            '#define VK_EXTENSION_HELPER_H_',
+            '#include <string>',
+            '#include <unordered_map>',
+            '#include <utility>',
+            '',
+            '#include <vulkan/vulkan.h>',
+            '']
+
+        def guarded(ifdef, value):
+            if ifdef is not None:
+                return '\n'.join([ '#ifdef %s' % ifdef, value, '#endif' ])
+            else:
+                return value
+
+        for type in ['Instance', 'Device']:
+            struct_type = '%sExtensions' % type
+            if type == 'Instance':
+                extension_dict = self.instance_extension_info
+                promoted_ext_list = V_1_0_instance_extensions_promoted_to_core
+                struct_decl = 'struct %s {' % struct_type
+                instance_struct_type = struct_type
+            else:
+                extension_dict = self.device_extension_info
+                promoted_ext_list = V_1_0_device_extensions_promoted_to_core
+                struct_decl = 'struct %s : public %s {' % (struct_type, instance_struct_type)
+
+            extension_items = sorted(extension_dict.items())
+
+            field_name = { ext_name: re.sub('_extension_name', '', info['define'].lower()) for ext_name, info in extension_items }
+            if type == 'Instance':
+                instance_field_name = field_name
+                instance_extension_dict = extension_dict
+            else:
+                # Get complete field name and extension data for both Instance and Device extensions
+                field_name.update(instance_field_name)
+                extension_dict = extension_dict.copy()  # Don't modify the self.<dict> we're pointing to
+                extension_dict.update(instance_extension_dict)
+
+            # Output the data member list
+            struct  = [struct_decl]
+            struct.extend([ '    bool %s{false};' % field_name[ext_name] for ext_name, info in extension_items])
+
+            # Construct the extension information map -- mapping name to data member (field), and required extensions
+            # The map is contained within a static function member for portability reasons.
+            info_type = '%sInfo' % type
+            info_map_type = '%sMap' % info_type
+            req_type = '%sReq' % type
+            req_vec_type = '%sVec' % req_type
+            struct.extend([
+                '',
+                '    struct %s {' % req_type,
+                '        const bool %s::* enabled;' % struct_type,
+                '        const char *name;',
+                '    };',
+                '    typedef std::vector<%s> %s;' % (req_type, req_vec_type),
+                '    struct %s {' % info_type,
+                '       %s(bool %s::* state_, const %s requires_): state(state_), requires(requires_) {}' % ( info_type, struct_type, req_vec_type),
+                '       bool %s::* state;' % struct_type,
+                '       %s requires;' % req_vec_type,
+                '    };',
+                '',
+                '    typedef std::unordered_map<std::string,%s> %s;' % (info_type, info_map_type),
+                '    static const %s &get_info(const char *name) {' %info_type,
+                '        static const %s info_map = {' % info_map_type ])
+
+            field_format = '&' + struct_type + '::%s'
+            req_format = '{' + field_format+ ', %s}'
+            req_indent = '\n                           '
+            req_join = ',' + req_indent
+            info_format = ('            std::make_pair(%s, ' + info_type + '(' + field_format + ', {%s})),')
+            def format_info(ext_name, info):
+                reqs = req_join.join([req_format % (field_name[req], extension_dict[req]['define']) for req in info['reqs']])
+                return info_format % (info['define'], field_name[ext_name], '{%s}' % (req_indent + reqs) if reqs else '')
+
+            struct.extend([guarded(info['ifdef'], format_info(ext_name, info)) for ext_name, info in extension_items])
+            struct.extend([
+                '        };',
+                '',
+                '        static const %s empty_info {nullptr, %s()};' % (info_type, req_vec_type),
+                '        %s::const_iterator info = info_map.find(name);' % info_map_type,
+                '        if ( info != info_map.cend()) {',
+                '            return info->second;',
+                '        }',
+                '        return empty_info;',
+                '    }',
+                ''])
+
+            if type == 'Instance':
+                struct.extend([
+                    '    uint32_t NormalizeApiVersion(uint32_t specified_version) {',
+                    '        uint32_t api_version = (specified_version < VK_API_VERSION_1_1) ? VK_API_VERSION_1_0 : VK_API_VERSION_1_1;',
+                    '        return api_version;',
+                    '    }',
+                    '',
+                    '    uint32_t InitFromInstanceCreateInfo(uint32_t requested_api_version, const VkInstanceCreateInfo *pCreateInfo) {'])
+            else:
+                struct.extend([
+                    '    %s() = default;' % struct_type,
+                    '    %s(const %s& instance_ext) : %s(instance_ext) {}' % (struct_type, instance_struct_type, instance_struct_type),
+                    '',
+                    '    uint32_t InitFromDeviceCreateInfo(const %s *instance_extensions, uint32_t requested_api_version,' % instance_struct_type,
+                    '                                      const VkDeviceCreateInfo *pCreateInfo) {',
+                    '        // Initialize: this to defaults,  base class fields to input.',
+                    '        assert(instance_extensions);',
+                    '        *this = %s(*instance_extensions);' % struct_type])
+
+            struct.extend([
+                '',
+                '        static const std::vector<const char *> V_1_0_promoted_%s_extensions = {' % type.lower() ])
+            struct.extend(['            %s_EXTENSION_NAME,' % ext_name.upper() for ext_name in promoted_ext_list])
+            struct.extend([
+                '        };',
+                '',
+                '        // Initialize struct data, robust to invalid pCreateInfo',
+                '        if (pCreateInfo->ppEnabledExtensionNames) {',
+                '            for (uint32_t i = 0; i < pCreateInfo->enabledExtensionCount; i++) {',
+                '                if (!pCreateInfo->ppEnabledExtensionNames[i]) continue;',
+                '                auto info = get_info(pCreateInfo->ppEnabledExtensionNames[i]);',
+                '                if(info.state) this->*(info.state) = true;',
+                '            }',
+                '        }',
+                '        uint32_t api_version = NormalizeApiVersion(requested_api_version);',
+                '        if (api_version >= VK_API_VERSION_1_1) {',
+                '            for (auto promoted_ext : V_1_0_promoted_%s_extensions) {' % type.lower(),
+                '                auto info = get_info(promoted_ext);',
+                '                assert(info.state);',
+                '                if (info.state) this->*(info.state) = true;',
+                '            }',
+                '        }',
+                '        return api_version;',
+                '    }',
+                '};'])
+
+            # Output reference lists of instance/device extension names
+            struct.extend(['', 'static const char * const k%sExtensionNames = ' % type])
+            struct.extend([guarded(info['ifdef'], '    %s' % info['define']) for ext_name, info in extension_items])
+            struct.extend([';', ''])
+            output.extend(struct)
+
+        output.extend(['', '#endif // VK_EXTENSION_HELPER_H_'])
+        return '\n'.join(output)
+    #
+    # Combine object types helper header file preamble with body text and return
+    def GenerateObjectTypesHelperHeader(self):
+        object_types_helper_header = '\n'
+        object_types_helper_header += '#pragma once\n'
+        object_types_helper_header += '\n'
+        object_types_helper_header += '#include <vulkan/vulkan.h>\n\n'
+        object_types_helper_header += self.GenerateObjectTypesHeader()
+        return object_types_helper_header
+    #
+    # Object types header: create object enum type header file
+    def GenerateObjectTypesHeader(self):
+        object_types_header = ''
+        object_types_header += '// Object Type enum for validation layer internal object handling\n'
+        object_types_header += 'typedef enum VulkanObjectType {\n'
+        object_types_header += '    kVulkanObjectTypeUnknown = 0,\n'
+        enum_num = 1
+        type_list = [];
+        enum_entry_map = {}
+
+        # Output enum definition as each handle is processed, saving the names to use for the conversion routine
+        for item in self.object_types:
+            fixup_name = item[2:]
+            enum_entry = 'kVulkanObjectType%s' % fixup_name
+            enum_entry_map[item] = enum_entry
+            object_types_header += '    ' + enum_entry
+            object_types_header += ' = %d,\n' % enum_num
+            enum_num += 1
+            type_list.append(enum_entry)
+        object_types_header += '    kVulkanObjectTypeMax = %d,\n' % enum_num
+        object_types_header += '    // Aliases for backwards compatibilty of "promoted" types\n'
+        for (name, alias) in self.object_type_aliases:
+            fixup_name = name[2:]
+            object_types_header += '    kVulkanObjectType{} = {},\n'.format(fixup_name, enum_entry_map[alias])
+        object_types_header += '} VulkanObjectType;\n\n'
+
+        # Output name string helper
+        object_types_header += '// Array of object name strings for OBJECT_TYPE enum conversion\n'
+        object_types_header += 'static const char * const object_string[kVulkanObjectTypeMax] = {\n'
+        object_types_header += '    "Unknown",\n'
+        for item in self.object_types:
+            fixup_name = item[2:]
+            object_types_header += '    "%s",\n' % fixup_name
+        object_types_header += '};\n'
+
+        # Key creation helper for map comprehensions that convert between k<Name> and VK<Name> symbols
+        def to_key(regex, raw_key): return re.search(regex, raw_key).group(1).lower().replace("_","")
+
+        # Output a conversion routine from the layer object definitions to the debug report definitions
+        # As the VK_DEBUG_REPORT types are not being updated, specify UNKNOWN for unmatched types
+        object_types_header += '\n'
+        object_types_header += '// Helper array to get Vulkan VK_EXT_debug_report object type enum from the internal layers version\n'
+        object_types_header += 'const VkDebugReportObjectTypeEXT get_debug_report_enum[] = {\n'
+        object_types_header += '    VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, // kVulkanObjectTypeUnknown\n'
+
+        dbg_re = '^VK_DEBUG_REPORT_OBJECT_TYPE_(.*)_EXT$'
+        dbg_map = {to_key(dbg_re, dbg) : dbg for dbg in self.debug_report_object_types}
+        dbg_default = 'VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT'
+        for object_type in type_list:
+            vk_object_type = dbg_map.get(object_type.replace("kVulkanObjectType", "").lower(), dbg_default)
+            object_types_header += '    %s,   // %s\n' % (vk_object_type, object_type)
+        object_types_header += '};\n'
+
+        # Output a conversion routine from the layer object definitions to the core object type definitions
+        # This will intentionally *fail* for unmatched types as the VK_OBJECT_TYPE list should match the kVulkanObjectType list
+        object_types_header += '\n'
+        object_types_header += '// Helper array to get Official Vulkan VkObjectType enum from the internal layers version\n'
+        object_types_header += 'const VkObjectType get_object_type_enum[] = {\n'
+        object_types_header += '    VK_OBJECT_TYPE_UNKNOWN, // kVulkanObjectTypeUnknown\n'
+
+        vko_re = '^VK_OBJECT_TYPE_(.*)'
+        vko_map = {to_key(vko_re, vko) : vko for vko in self.core_object_types}
+        for object_type in type_list:
+            vk_object_type = vko_map[object_type.replace("kVulkanObjectType", "").lower()]
+            object_types_header += '    %s,   // %s\n' % (vk_object_type, object_type)
+        object_types_header += '};\n'
+
+        # Create a function to convert from VkDebugReportObjectTypeEXT to VkObjectType
+        object_types_header += '\n'
+        object_types_header += '// Helper function to convert from VkDebugReportObjectTypeEXT to VkObjectType\n'
+        object_types_header += 'static inline VkObjectType convertDebugReportObjectToCoreObject(VkDebugReportObjectTypeEXT debug_report_obj){\n'
+        object_types_header += '    if (debug_report_obj == VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT) {\n'
+        object_types_header += '        return VK_OBJECT_TYPE_UNKNOWN;\n'
+        for core_object_type in self.core_object_types:
+            core_target_type = core_object_type.replace("VK_OBJECT_TYPE_", "").lower()
+            core_target_type = core_target_type.replace("_", "")
+            for dr_object_type in self.debug_report_object_types:
+                dr_target_type = dr_object_type.replace("VK_DEBUG_REPORT_OBJECT_TYPE_", "").lower()
+                dr_target_type = dr_target_type[:-4]
+                dr_target_type = dr_target_type.replace("_", "")
+                if core_target_type == dr_target_type:
+                    object_types_header += '    } else if (debug_report_obj == %s) {\n' % dr_object_type
+                    object_types_header += '        return %s;\n' % core_object_type
+                    break
+        object_types_header += '    }\n'
+        object_types_header += '    return VK_OBJECT_TYPE_UNKNOWN;\n'
+        object_types_header += '}\n'
+
+        # Create a function to convert from VkObjectType to VkDebugReportObjectTypeEXT
+        object_types_header += '\n'
+        object_types_header += '// Helper function to convert from VkDebugReportObjectTypeEXT to VkObjectType\n'
+        object_types_header += 'static inline VkDebugReportObjectTypeEXT convertCoreObjectToDebugReportObject(VkObjectType core_report_obj){\n'
+        object_types_header += '    if (core_report_obj == VK_OBJECT_TYPE_UNKNOWN) {\n'
+        object_types_header += '        return VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT;\n'
+        for core_object_type in self.core_object_types:
+            core_target_type = core_object_type.replace("VK_OBJECT_TYPE_", "").lower()
+            core_target_type = core_target_type.replace("_", "")
+            for dr_object_type in self.debug_report_object_types:
+                dr_target_type = dr_object_type.replace("VK_DEBUG_REPORT_OBJECT_TYPE_", "").lower()
+                dr_target_type = dr_target_type[:-4]
+                dr_target_type = dr_target_type.replace("_", "")
+                if core_target_type == dr_target_type:
+                    object_types_header += '    } else if (core_report_obj == %s) {\n' % core_object_type
+                    object_types_header += '        return %s;\n' % dr_object_type
+                    break
+        object_types_header += '    }\n'
+        object_types_header += '    return VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT;\n'
+        object_types_header += '}\n'
+        return object_types_header
+    #
+    # Determine if a structure needs a safe_struct helper function
+    # That is, it has an sType or one of its members is a pointer
+    def NeedSafeStruct(self, structure):
+        if 'sType' == structure.name:
+            return True
+        for member in structure.members:
+            if member.ispointer == True:
+                return True
+        return False
+    #
+    # Combine safe struct helper source file preamble with body text and return
+    def GenerateSafeStructHelperSource(self):
+        safe_struct_helper_source = '\n'
+        safe_struct_helper_source += '#include "vk_safe_struct.h"\n'
+        safe_struct_helper_source += '#include <string.h>\n'
+        safe_struct_helper_source += '#ifdef VK_USE_PLATFORM_ANDROID_KHR\n'
+        safe_struct_helper_source += '#if __ANDROID_API__ < __ANDROID_API_O__\n'
+        safe_struct_helper_source += 'struct AHardwareBuffer {};\n'
+        safe_struct_helper_source += '#endif\n'
+        safe_struct_helper_source += '#endif\n'
+
+        safe_struct_helper_source += '\n'
+        safe_struct_helper_source += self.GenerateSafeStructSource()
+        return safe_struct_helper_source
+    #
+    # safe_struct source -- create bodies of safe struct helper functions
+    def GenerateSafeStructSource(self):
+        safe_struct_body = []
+        wsi_structs = ['VkXlibSurfaceCreateInfoKHR',
+                       'VkXcbSurfaceCreateInfoKHR',
+                       'VkWaylandSurfaceCreateInfoKHR',
+                       'VkMirSurfaceCreateInfoKHR',
+                       'VkAndroidSurfaceCreateInfoKHR',
+                       'VkWin32SurfaceCreateInfoKHR'
+                       ]
+        for item in self.structMembers:
+            if self.NeedSafeStruct(item) == False:
+                continue
+            if item.name in wsi_structs:
+                continue
+            if item.ifdef_protect != None:
+                safe_struct_body.append("#ifdef %s\n" % item.ifdef_protect)
+            ss_name = "safe_%s" % item.name
+            init_list = ''          # list of members in struct constructor initializer
+            default_init_list = ''  # Default constructor just inits ptrs to nullptr in initializer
+            init_func_txt = ''      # Txt for initialize() function that takes struct ptr and inits members
+            construct_txt = ''      # Body of constuctor as well as body of initialize() func following init_func_txt
+            destruct_txt = ''
+
+            custom_construct_txt = {
+                # VkWriteDescriptorSet is special case because pointers may be non-null but ignored
+                'VkWriteDescriptorSet' :
+                    '    switch (descriptorType) {\n'
+                    '        case VK_DESCRIPTOR_TYPE_SAMPLER:\n'
+                    '        case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:\n'
+                    '        case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:\n'
+                    '        case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:\n'
+                    '        case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:\n'
+                    '        if (descriptorCount && in_struct->pImageInfo) {\n'
+                    '            pImageInfo = new VkDescriptorImageInfo[descriptorCount];\n'
+                    '            for (uint32_t i=0; i<descriptorCount; ++i) {\n'
+                    '                pImageInfo[i] = in_struct->pImageInfo[i];\n'
+                    '            }\n'
+                    '        }\n'
+                    '        break;\n'
+                    '        case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:\n'
+                    '        case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:\n'
+                    '        case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:\n'
+                    '        case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:\n'
+                    '        if (descriptorCount && in_struct->pBufferInfo) {\n'
+                    '            pBufferInfo = new VkDescriptorBufferInfo[descriptorCount];\n'
+                    '            for (uint32_t i=0; i<descriptorCount; ++i) {\n'
+                    '                pBufferInfo[i] = in_struct->pBufferInfo[i];\n'
+                    '            }\n'
+                    '        }\n'
+                    '        break;\n'
+                    '        case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:\n'
+                    '        case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:\n'
+                    '        if (descriptorCount && in_struct->pTexelBufferView) {\n'
+                    '            pTexelBufferView = new VkBufferView[descriptorCount];\n'
+                    '            for (uint32_t i=0; i<descriptorCount; ++i) {\n'
+                    '                pTexelBufferView[i] = in_struct->pTexelBufferView[i];\n'
+                    '            }\n'
+                    '        }\n'
+                    '        break;\n'
+                    '        default:\n'
+                    '        break;\n'
+                    '    }\n',
+                'VkShaderModuleCreateInfo' :
+                    '    if (in_struct->pCode) {\n'
+                    '        pCode = reinterpret_cast<uint32_t *>(new uint8_t[codeSize]);\n'
+                    '        memcpy((void *)pCode, (void *)in_struct->pCode, codeSize);\n'
+                    '    }\n',
+                # VkGraphicsPipelineCreateInfo is special case because its pointers may be non-null but ignored
+                'VkGraphicsPipelineCreateInfo' :
+                    '    if (stageCount && in_struct->pStages) {\n'
+                    '        pStages = new safe_VkPipelineShaderStageCreateInfo[stageCount];\n'
+                    '        for (uint32_t i=0; i<stageCount; ++i) {\n'
+                    '            pStages[i].initialize(&in_struct->pStages[i]);\n'
+                    '        }\n'
+                    '    }\n'
+                    '    if (in_struct->pVertexInputState)\n'
+                    '        pVertexInputState = new safe_VkPipelineVertexInputStateCreateInfo(in_struct->pVertexInputState);\n'
+                    '    else\n'
+                    '        pVertexInputState = NULL;\n'
+                    '    if (in_struct->pInputAssemblyState)\n'
+                    '        pInputAssemblyState = new safe_VkPipelineInputAssemblyStateCreateInfo(in_struct->pInputAssemblyState);\n'
+                    '    else\n'
+                    '        pInputAssemblyState = NULL;\n'
+                    '    bool has_tessellation_stage = false;\n'
+                    '    if (stageCount && pStages)\n'
+                    '        for (uint32_t i=0; i<stageCount && !has_tessellation_stage; ++i)\n'
+                    '            if (pStages[i].stage == VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT || pStages[i].stage == VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT)\n'
+                    '                has_tessellation_stage = true;\n'
+                    '    if (in_struct->pTessellationState && has_tessellation_stage)\n'
+                    '        pTessellationState = new safe_VkPipelineTessellationStateCreateInfo(in_struct->pTessellationState);\n'
+                    '    else\n'
+                    '        pTessellationState = NULL; // original pTessellationState pointer ignored\n'
+                    '    bool has_rasterization = in_struct->pRasterizationState ? !in_struct->pRasterizationState->rasterizerDiscardEnable : false;\n'
+                    '    if (in_struct->pViewportState && has_rasterization) {\n'
+                    '        bool is_dynamic_viewports = false;\n'
+                    '        bool is_dynamic_scissors = false;\n'
+                    '        if (in_struct->pDynamicState && in_struct->pDynamicState->pDynamicStates) {\n'
+                    '            for (uint32_t i = 0; i < in_struct->pDynamicState->dynamicStateCount && !is_dynamic_viewports; ++i)\n'
+                    '                if (in_struct->pDynamicState->pDynamicStates[i] == VK_DYNAMIC_STATE_VIEWPORT)\n'
+                    '                    is_dynamic_viewports = true;\n'
+                    '            for (uint32_t i = 0; i < in_struct->pDynamicState->dynamicStateCount && !is_dynamic_scissors; ++i)\n'
+                    '                if (in_struct->pDynamicState->pDynamicStates[i] == VK_DYNAMIC_STATE_SCISSOR)\n'
+                    '                    is_dynamic_scissors = true;\n'
+                    '        }\n'
+                    '        pViewportState = new safe_VkPipelineViewportStateCreateInfo(in_struct->pViewportState, is_dynamic_viewports, is_dynamic_scissors);\n'
+                    '    } else\n'
+                    '        pViewportState = NULL; // original pViewportState pointer ignored\n'
+                    '    if (in_struct->pRasterizationState)\n'
+                    '        pRasterizationState = new safe_VkPipelineRasterizationStateCreateInfo(in_struct->pRasterizationState);\n'
+                    '    else\n'
+                    '        pRasterizationState = NULL;\n'
+                    '    if (in_struct->pMultisampleState && has_rasterization)\n'
+                    '        pMultisampleState = new safe_VkPipelineMultisampleStateCreateInfo(in_struct->pMultisampleState);\n'
+                    '    else\n'
+                    '        pMultisampleState = NULL; // original pMultisampleState pointer ignored\n'
+                    '    // needs a tracked subpass state uses_depthstencil_attachment\n'
+                    '    if (in_struct->pDepthStencilState && has_rasterization && uses_depthstencil_attachment)\n'
+                    '        pDepthStencilState = new safe_VkPipelineDepthStencilStateCreateInfo(in_struct->pDepthStencilState);\n'
+                    '    else\n'
+                    '        pDepthStencilState = NULL; // original pDepthStencilState pointer ignored\n'
+                    '    // needs a tracked subpass state usesColorAttachment\n'
+                    '    if (in_struct->pColorBlendState && has_rasterization && uses_color_attachment)\n'
+                    '        pColorBlendState = new safe_VkPipelineColorBlendStateCreateInfo(in_struct->pColorBlendState);\n'
+                    '    else\n'
+                    '        pColorBlendState = NULL; // original pColorBlendState pointer ignored\n'
+                    '    if (in_struct->pDynamicState)\n'
+                    '        pDynamicState = new safe_VkPipelineDynamicStateCreateInfo(in_struct->pDynamicState);\n'
+                    '    else\n'
+                    '        pDynamicState = NULL;\n',
+                 # VkPipelineViewportStateCreateInfo is special case because its pointers may be non-null but ignored
+                'VkPipelineViewportStateCreateInfo' :
+                    '    if (in_struct->pViewports && !is_dynamic_viewports) {\n'
+                    '        pViewports = new VkViewport[in_struct->viewportCount];\n'
+                    '        memcpy ((void *)pViewports, (void *)in_struct->pViewports, sizeof(VkViewport)*in_struct->viewportCount);\n'
+                    '    }\n'
+                    '    else\n'
+                    '        pViewports = NULL;\n'
+                    '    if (in_struct->pScissors && !is_dynamic_scissors) {\n'
+                    '        pScissors = new VkRect2D[in_struct->scissorCount];\n'
+                    '        memcpy ((void *)pScissors, (void *)in_struct->pScissors, sizeof(VkRect2D)*in_struct->scissorCount);\n'
+                    '    }\n'
+                    '    else\n'
+                    '        pScissors = NULL;\n',
+                # VkDescriptorSetLayoutBinding is special case because its pImmutableSamplers pointer may be non-null but ignored
+                'VkDescriptorSetLayoutBinding' :
+                    '    const bool sampler_type = in_struct->descriptorType == VK_DESCRIPTOR_TYPE_SAMPLER || in_struct->descriptorType == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;\n'
+                    '    if (descriptorCount && in_struct->pImmutableSamplers && sampler_type) {\n'
+                    '        pImmutableSamplers = new VkSampler[descriptorCount];\n'
+                    '        for (uint32_t i=0; i<descriptorCount; ++i) {\n'
+                    '            pImmutableSamplers[i] = in_struct->pImmutableSamplers[i];\n'
+                    '        }\n'
+                    '    }\n',
+            }
+
+            custom_copy_txt = {
+                # VkGraphicsPipelineCreateInfo is special case because it has custom construct parameters
+                'VkGraphicsPipelineCreateInfo' :
+                    '    if (stageCount && src.pStages) {\n'
+                    '        pStages = new safe_VkPipelineShaderStageCreateInfo[stageCount];\n'
+                    '        for (uint32_t i=0; i<stageCount; ++i) {\n'
+                    '            pStages[i].initialize(&src.pStages[i]);\n'
+                    '        }\n'
+                    '    }\n'
+                    '    if (src.pVertexInputState)\n'
+                    '        pVertexInputState = new safe_VkPipelineVertexInputStateCreateInfo(*src.pVertexInputState);\n'
+                    '    else\n'
+                    '        pVertexInputState = NULL;\n'
+                    '    if (src.pInputAssemblyState)\n'
+                    '        pInputAssemblyState = new safe_VkPipelineInputAssemblyStateCreateInfo(*src.pInputAssemblyState);\n'
+                    '    else\n'
+                    '        pInputAssemblyState = NULL;\n'
+                    '    bool has_tessellation_stage = false;\n'
+                    '    if (stageCount && pStages)\n'
+                    '        for (uint32_t i=0; i<stageCount && !has_tessellation_stage; ++i)\n'
+                    '            if (pStages[i].stage == VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT || pStages[i].stage == VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT)\n'
+                    '                has_tessellation_stage = true;\n'
+                    '    if (src.pTessellationState && has_tessellation_stage)\n'
+                    '        pTessellationState = new safe_VkPipelineTessellationStateCreateInfo(*src.pTessellationState);\n'
+                    '    else\n'
+                    '        pTessellationState = NULL; // original pTessellationState pointer ignored\n'
+                    '    bool has_rasterization = src.pRasterizationState ? !src.pRasterizationState->rasterizerDiscardEnable : false;\n'
+                    '    if (src.pViewportState && has_rasterization) {\n'
+                    '        pViewportState = new safe_VkPipelineViewportStateCreateInfo(*src.pViewportState);\n'
+                    '    } else\n'
+                    '        pViewportState = NULL; // original pViewportState pointer ignored\n'
+                    '    if (src.pRasterizationState)\n'
+                    '        pRasterizationState = new safe_VkPipelineRasterizationStateCreateInfo(*src.pRasterizationState);\n'
+                    '    else\n'
+                    '        pRasterizationState = NULL;\n'
+                    '    if (src.pMultisampleState && has_rasterization)\n'
+                    '        pMultisampleState = new safe_VkPipelineMultisampleStateCreateInfo(*src.pMultisampleState);\n'
+                    '    else\n'
+                    '        pMultisampleState = NULL; // original pMultisampleState pointer ignored\n'
+                    '    if (src.pDepthStencilState && has_rasterization)\n'
+                    '        pDepthStencilState = new safe_VkPipelineDepthStencilStateCreateInfo(*src.pDepthStencilState);\n'
+                    '    else\n'
+                    '        pDepthStencilState = NULL; // original pDepthStencilState pointer ignored\n'
+                    '    if (src.pColorBlendState && has_rasterization)\n'
+                    '        pColorBlendState = new safe_VkPipelineColorBlendStateCreateInfo(*src.pColorBlendState);\n'
+                    '    else\n'
+                    '        pColorBlendState = NULL; // original pColorBlendState pointer ignored\n'
+                    '    if (src.pDynamicState)\n'
+                    '        pDynamicState = new safe_VkPipelineDynamicStateCreateInfo(*src.pDynamicState);\n'
+                    '    else\n'
+                    '        pDynamicState = NULL;\n',
+                 # VkPipelineViewportStateCreateInfo is special case because it has custom construct parameters
+                'VkPipelineViewportStateCreateInfo' :
+                    '    if (src.pViewports) {\n'
+                    '        pViewports = new VkViewport[src.viewportCount];\n'
+                    '        memcpy ((void *)pViewports, (void *)src.pViewports, sizeof(VkViewport)*src.viewportCount);\n'
+                    '    }\n'
+                    '    else\n'
+                    '        pViewports = NULL;\n'
+                    '    if (src.pScissors) {\n'
+                    '        pScissors = new VkRect2D[src.scissorCount];\n'
+                    '        memcpy ((void *)pScissors, (void *)src.pScissors, sizeof(VkRect2D)*src.scissorCount);\n'
+                    '    }\n'
+                    '    else\n'
+                    '        pScissors = NULL;\n',
+            }
+
+            custom_destruct_txt = {'VkShaderModuleCreateInfo' :
+                                   '    if (pCode)\n'
+                                   '        delete[] reinterpret_cast<const uint8_t *>(pCode);\n' }
+
+            for member in item.members:
+                m_type = member.type
+                if member.type in self.structNames:
+                    member_index = next((i for i, v in enumerate(self.structMembers) if v[0] == member.type), None)
+                    if member_index is not None and self.NeedSafeStruct(self.structMembers[member_index]) == True:
+                        m_type = 'safe_%s' % member.type
+                if member.ispointer and 'safe_' not in m_type and self.TypeContainsObjectHandle(member.type, False) == False:
+                    # Ptr types w/o a safe_struct, for non-null case need to allocate new ptr and copy data in
+                    if m_type in ['void', 'char']:
+                        # For these exceptions just copy initial value over for now
+                        init_list += '\n    %s(in_struct->%s),' % (member.name, member.name)
+                        init_func_txt += '    %s = in_struct->%s;\n' % (member.name, member.name)
+                    else:
+                        default_init_list += '\n    %s(nullptr),' % (member.name)
+                        init_list += '\n    %s(nullptr),' % (member.name)
+                        init_func_txt += '    %s = nullptr;\n' % (member.name)
+                        if 'pNext' != member.name and 'void' not in m_type:
+                            if not member.isstaticarray and (member.len is None or '/' in member.len):
+                                construct_txt += '    if (in_struct->%s) {\n' % member.name
+                                construct_txt += '        %s = new %s(*in_struct->%s);\n' % (member.name, m_type, member.name)
+                                construct_txt += '    }\n'
+                                destruct_txt += '    if (%s)\n' % member.name
+                                destruct_txt += '        delete %s;\n' % member.name
+                            else:
+                                construct_txt += '    if (in_struct->%s) {\n' % member.name
+                                construct_txt += '        %s = new %s[in_struct->%s];\n' % (member.name, m_type, member.len)
+                                construct_txt += '        memcpy ((void *)%s, (void *)in_struct->%s, sizeof(%s)*in_struct->%s);\n' % (member.name, member.name, m_type, member.len)
+                                construct_txt += '    }\n'
+                                destruct_txt += '    if (%s)\n' % member.name
+                                destruct_txt += '        delete[] %s;\n' % member.name
+                elif member.isstaticarray or member.len is not None:
+                    if member.len is None:
+                        # Extract length of static array by grabbing val between []
+                        static_array_size = re.match(r"[^[]*\[([^]]*)\]", member.cdecl)
+                        construct_txt += '    for (uint32_t i=0; i<%s; ++i) {\n' % static_array_size.group(1)
+                        construct_txt += '        %s[i] = in_struct->%s[i];\n' % (member.name, member.name)
+                        construct_txt += '    }\n'
+                    else:
+                        # Init array ptr to NULL
+                        default_init_list += '\n    %s(nullptr),' % member.name
+                        init_list += '\n    %s(nullptr),' % member.name
+                        init_func_txt += '    %s = nullptr;\n' % member.name
+                        array_element = 'in_struct->%s[i]' % member.name
+                        if member.type in self.structNames:
+                            member_index = next((i for i, v in enumerate(self.structMembers) if v[0] == member.type), None)
+                            if member_index is not None and self.NeedSafeStruct(self.structMembers[member_index]) == True:
+                                array_element = '%s(&in_struct->safe_%s[i])' % (member.type, member.name)
+                        construct_txt += '    if (%s && in_struct->%s) {\n' % (member.len, member.name)
+                        construct_txt += '        %s = new %s[%s];\n' % (member.name, m_type, member.len)
+                        destruct_txt += '    if (%s)\n' % member.name
+                        destruct_txt += '        delete[] %s;\n' % member.name
+                        construct_txt += '        for (uint32_t i=0; i<%s; ++i) {\n' % (member.len)
+                        if 'safe_' in m_type:
+                            construct_txt += '            %s[i].initialize(&in_struct->%s[i]);\n' % (member.name, member.name)
+                        else:
+                            construct_txt += '            %s[i] = %s;\n' % (member.name, array_element)
+                        construct_txt += '        }\n'
+                        construct_txt += '    }\n'
+                elif member.ispointer == True:
+                    construct_txt += '    if (in_struct->%s)\n' % member.name
+                    construct_txt += '        %s = new %s(in_struct->%s);\n' % (member.name, m_type, member.name)
+                    construct_txt += '    else\n'
+                    construct_txt += '        %s = NULL;\n' % member.name
+                    destruct_txt += '    if (%s)\n' % member.name
+                    destruct_txt += '        delete %s;\n' % member.name
+                elif 'safe_' in m_type:
+                    init_list += '\n    %s(&in_struct->%s),' % (member.name, member.name)
+                    init_func_txt += '    %s.initialize(&in_struct->%s);\n' % (member.name, member.name)
+                else:
+                    init_list += '\n    %s(in_struct->%s),' % (member.name, member.name)
+                    init_func_txt += '    %s = in_struct->%s;\n' % (member.name, member.name)
+            if '' != init_list:
+                init_list = init_list[:-1] # hack off final comma
+            if item.name in custom_construct_txt:
+                construct_txt = custom_construct_txt[item.name]
+            if item.name in custom_destruct_txt:
+                destruct_txt = custom_destruct_txt[item.name]
+            safe_struct_body.append("\n%s::%s(const %s* in_struct%s) :%s\n{\n%s}" % (ss_name, ss_name, item.name, self.custom_construct_params.get(item.name, ''), init_list, construct_txt))
+            if '' != default_init_list:
+                default_init_list = " :%s" % (default_init_list[:-1])
+            safe_struct_body.append("\n%s::%s()%s\n{}" % (ss_name, ss_name, default_init_list))
+            # Create slight variation of init and construct txt for copy constructor that takes a src object reference vs. struct ptr
+            copy_construct_init = init_func_txt.replace('in_struct->', 'src.')
+            copy_construct_txt = construct_txt.replace(' (in_struct->', ' (src.')     # Exclude 'if' blocks from next line
+            copy_construct_txt = copy_construct_txt.replace('(in_struct->', '(*src.') # Pass object to copy constructors
+            copy_construct_txt = copy_construct_txt.replace('in_struct->', 'src.')    # Modify remaining struct refs for src object
+            if item.name in custom_copy_txt:
+                copy_construct_txt = custom_copy_txt[item.name]
+            copy_assign_txt = '    if (&src == this) return *this;\n\n' + destruct_txt + '\n' + copy_construct_init + copy_construct_txt + '\n    return *this;'
+            safe_struct_body.append("\n%s::%s(const %s& src)\n{\n%s%s}" % (ss_name, ss_name, ss_name, copy_construct_init, copy_construct_txt)) # Copy constructor
+            safe_struct_body.append("\n%s& %s::operator=(const %s& src)\n{\n%s\n}" % (ss_name, ss_name, ss_name, copy_assign_txt)) # Copy assignment operator
+            safe_struct_body.append("\n%s::~%s()\n{\n%s}" % (ss_name, ss_name, destruct_txt))
+            safe_struct_body.append("\nvoid %s::initialize(const %s* in_struct%s)\n{\n%s%s}" % (ss_name, item.name, self.custom_construct_params.get(item.name, ''), init_func_txt, construct_txt))
+            # Copy initializer uses same txt as copy constructor but has a ptr and not a reference
+            init_copy = copy_construct_init.replace('src.', 'src->')
+            init_construct = copy_construct_txt.replace('src.', 'src->')
+            safe_struct_body.append("\nvoid %s::initialize(const %s* src)\n{\n%s%s}" % (ss_name, ss_name, init_copy, init_construct))
+            if item.ifdef_protect != None:
+                safe_struct_body.append("#endif // %s\n" % item.ifdef_protect)
+        return "\n".join(safe_struct_body)
+    #
+    # Generate the type map
+    def GenerateTypeMapHelperHeader(self):
+        prefix = 'Lvl'
+        fprefix = 'lvl_'
+        typemap = prefix + 'TypeMap'
+        idmap = prefix + 'STypeMap'
+        type_member = 'Type'
+        id_member = 'kSType'
+        id_decl = 'static const VkStructureType '
+        generic_header = prefix + 'GenericHeader'
+        typename_func = fprefix + 'typename'
+        idname_func = fprefix + 'stype_name'
+        find_func = fprefix + 'find_in_chain'
+        init_func = fprefix + 'init_struct'
+
+        explanatory_comment = '\n'.join((
+                '// These empty generic templates are specialized for each type with sType',
+                '// members and for each sType -- providing a two way map between structure',
+                '// types and sTypes'))
+
+        empty_typemap = 'template <typename T> struct ' + typemap + ' {};'
+        typemap_format  = 'template <> struct {template}<{typename}> {{\n'
+        typemap_format += '    {id_decl}{id_member} = {id_value};\n'
+        typemap_format += '}};\n'
+
+        empty_idmap = 'template <VkStructureType id> struct ' + idmap + ' {};'
+        idmap_format = ''.join((
+            'template <> struct {template}<{id_value}> {{\n',
+            '    typedef {typename} {typedef};\n',
+            '}};\n'))
+
+        # Define the utilities (here so any renaming stays consistent), if this grows large, refactor to a fixed .h file
+        utilities_format = '\n'.join((
+            '// Header "base class" for pNext chain traversal',
+            'struct {header} {{',
+            '   VkStructureType sType;',
+            '   const {header} *pNext;',
+            '}};',
+            '',
+            '// Find an entry of the given type in the pNext chain',
+            'template <typename T> const T *{find_func}(const void *next) {{',
+            '    const {header} *current = reinterpret_cast<const {header} *>(next);',
+            '    const T *found = nullptr;',
+            '    while (current) {{',
+            '        if ({type_map}<T>::{id_member} == current->sType) {{',
+            '            found = reinterpret_cast<const T*>(current);',
+            '            current = nullptr;',
+            '        }} else {{',
+            '            current = current->pNext;',
+            '        }}',
+            '    }}',
+            '    return found;',
+            '}}',
+            '',
+            '// Init the header of an sType struct with pNext',
+            'template <typename T> T {init_func}(void *p_next) {{',
+            '    T out = {{}};',
+            '    out.sType = {type_map}<T>::kSType;',
+            '    out.pNext = p_next;',
+            '    return out;',
+            '}}',
+                        '',
+            '// Init the header of an sType struct',
+            'template <typename T> T {init_func}() {{',
+            '    T out = {{}};',
+            '    out.sType = {type_map}<T>::kSType;',
+            '    return out;',
+            '}}',
+
+            ''))
+
+        code = []
+
+        # Generate header
+        code.append('\n'.join((
+            '#pragma once',
+            '#include <vulkan/vulkan.h>\n',
+            explanatory_comment, '',
+            empty_idmap,
+            empty_typemap, '')))
+
+        # Generate the specializations for each type and stype
+        for item in self.structMembers:
+            typename = item.name
+            info = self.structTypes.get(typename)
+            if not info:
+                continue
+
+            if item.ifdef_protect != None:
+                code.append('#ifdef %s' % item.ifdef_protect)
+
+            code.append('// Map type {} to id {}'.format(typename, info.value))
+            code.append(typemap_format.format(template=typemap, typename=typename, id_value=info.value,
+                id_decl=id_decl, id_member=id_member))
+            code.append(idmap_format.format(template=idmap, typename=typename, id_value=info.value, typedef=type_member))
+
+            if item.ifdef_protect != None:
+                code.append('#endif // %s' % item.ifdef_protect)
+
+        # Generate utilities for all types
+        code.append('\n'.join((
+            utilities_format.format(id_member=id_member, id_map=idmap, type_map=typemap,
+                type_member=type_member, header=generic_header, typename_func=typename_func, idname_func=idname_func,
+                find_func=find_func, init_func=init_func), ''
+            )))
+
+        return "\n".join(code)
+
+    #
+    # Create a helper file and return it as a string
+    def OutputDestFile(self):
+        if self.helper_file_type == 'enum_string_header':
+            return self.GenerateEnumStringHelperHeader()
+        elif self.helper_file_type == 'safe_struct_header':
+            return self.GenerateSafeStructHelperHeader()
+        elif self.helper_file_type == 'safe_struct_source':
+            return self.GenerateSafeStructHelperSource()
+        elif self.helper_file_type == 'object_types_header':
+            return self.GenerateObjectTypesHelperHeader()
+        elif self.helper_file_type == 'extension_helper_header':
+            return self.GenerateExtensionHelperHeader()
+        elif self.helper_file_type == 'typemap_helper_header':
+            return self.GenerateTypeMapHelperHeader()
+        else:
+            return 'Bad Helper File Generator Option %s' % self.helper_file_type
diff --git a/src/third_party/vulkan-tools/src/scripts/vulkaninfo_generator.py b/src/third_party/vulkan-tools/src/scripts/vulkaninfo_generator.py
new file mode 100644
index 0000000..aabfac9
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/scripts/vulkaninfo_generator.py
@@ -0,0 +1,897 @@
+#!/usr/bin/python3
+#
+# Copyright (c) 2019 Valve Corporation
+# Copyright (c) 2019 LunarG, Inc.
+# Copyright (c) 2019 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Author: Charles Giessen <charles@lunarg.com>
+
+import os
+import re
+import sys
+import string
+import xml.etree.ElementTree as etree
+import generator as gen
+import operator
+from collections import namedtuple
+from collections import OrderedDict
+from generator import *
+from common_codegen import *
+
+license_header = '''
+/*
+ * Copyright (c) 2019 The Khronos Group Inc.
+ * Copyright (c) 2019 Valve Corporation
+ * Copyright (c) 2019 LunarG, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Charles Giessen <charles@lunarg.com>
+ *
+ */
+
+/*
+ * This file is generated from the Khronos Vulkan XML API Registry.
+ */
+'''
+
+custom_formaters = '''
+std::ostream &operator<<(std::ostream &o, VkConformanceVersionKHR &c) {
+    return o << std::to_string(c.major) << "." << std::to_string(c.minor) << "." << std::to_string(c.subminor) << "."
+             << std::to_string(c.patch);
+}
+
+std::string VkExtent3DString(VkExtent3D e) {
+    return std::string("(") + std::to_string(e.width) + ", " + std::to_string(e.height) + ", " + std::to_string(e.depth) + ")";
+}
+
+template <typename T>
+std::string to_hex_str(T i) {
+    std::stringstream stream;
+    stream << "0x" << std::setfill('0') << std::setw(sizeof(T)) << std::hex << i;
+    return stream.str();
+}
+
+template <typename T>
+std::string to_hex_str(Printer &p, T i) {
+    if (p.Type() == OutputType::json)
+        return std::to_string(i);
+    else
+        return to_hex_str(i);
+}
+'''
+
+# used in the .cpp code
+structures_to_gen = ['VkExtent3D', 'VkExtent2D', 'VkPhysicalDeviceLimits', 'VkPhysicalDeviceFeatures',
+                     'VkPhysicalDeviceSparseProperties', 'VkSurfaceCapabilitiesKHR', 'VkSurfaceFormatKHR', 'VkLayerProperties']
+enums_to_gen = ['VkResult', 'VkFormat', 'VkPresentModeKHR',
+                'VkPhysicalDeviceType', 'VkImageTiling']
+flags_to_gen = ['VkSurfaceTransformFlagsKHR', 'VkCompositeAlphaFlagsKHR',
+                'VkDeviceGroupPresentModeFlagsKHR', 'VkFormatFeatureFlags', 'VkMemoryPropertyFlags', 'VkMemoryHeapFlags']
+flags_strings_to_gen = ['VkQueueFlags']
+
+struct_comparisons_to_gen = ['VkSurfaceFormatKHR', 'VkSurfaceFormat2KHR', 'VkSurfaceCapabilitiesKHR',
+                             'VkSurfaceCapabilities2KHR', 'VkSurfaceCapabilities2EXT']
+
+# iostream or custom outputter handles these types
+predefined_types = ['char', 'VkBool32', 'uint32_t', 'uint8_t', 'int32_t',
+                    'float', 'uint64_t', 'size_t', 'VkDeviceSize', 'VkConformanceVersionKHR']
+# need list of venders to blacklist vendor extensions
+vendor_abbreviations = ['_IMG', '_AMD', '_AMDX', '_ARM', '_FSL', '_BRCM', '_NXP', '_NV', '_NVX', '_VIV', '_VSI', '_KDAB',
+                        '_ANDROID', '_CHROMIUM', '_FUCHSIA', '_GGP', '_GOOGLE', '_QCOM', '_LUNARG', '_SAMSUNG', '_SEC', '_TIZEN',
+                        '_RENDERDOC', '_NN', '_MVK', '_KHX', '_MESA', '_INTEL']
+
+# Types that need pNext Chains built. 'extends' is the xml tag used in the structextends member. 'type' can be device, instance, or both
+EXTENSION_CATEGORIES = {'phys_device_props2': {'extends': 'VkPhysicalDeviceProperties2', 'type': 'device'},
+                        'phys_device_mem_props2': {'extends': 'VkPhysicalDeviceMemoryProperties2', 'type': 'device'},
+                        'phys_device_features2': {'extends': 'VkPhysicalDeviceFeatures2,VkDeviceCreateInfo', 'type': 'device'},
+                        'surface_capabilities2': {'extends': 'VkSurfaceCapabilities2KHR', 'type': 'both'},
+                        'format_properties2': {'extends': 'VkFormatProperties2', 'type': 'device'}
+                        }
+
+
+class VulkanInfoGeneratorOptions(GeneratorOptions):
+    def __init__(self,
+                 conventions=None,
+                 input=None,
+                 filename=None,
+                 directory='.',
+                 apiname=None,
+                 profile=None,
+                 versions='.*',
+                 emitversions='.*',
+                 defaultExtensions=None,
+                 addExtensions=None,
+                 removeExtensions=None,
+                 emitExtensions=None,
+                 sortProcedure=None,
+                 prefixText="",
+                 genFuncPointers=True,
+                 protectFile=True,
+                 protectFeature=True,
+                 protectProto=None,
+                 protectProtoStr=None,
+                 apicall='',
+                 apientry='',
+                 apientryp='',
+                 indentFuncProto=True,
+                 indentFuncPointer=False,
+                 alignFuncParam=0,
+                 expandEnumerants=True,
+                 ):
+        GeneratorOptions.__init__(self, conventions, filename, directory, apiname, profile,
+                                  versions, emitversions, defaultExtensions,
+                                  addExtensions, removeExtensions, emitExtensions, sortProcedure)
+        self.input = input
+        self.prefixText = prefixText
+        self.genFuncPointers = genFuncPointers
+        self.protectFile = protectFile
+        self.protectFeature = protectFeature
+        self.protectProto = protectProto
+        self.protectProtoStr = protectProtoStr
+        self.apicall = apicall
+        self.apientry = apientry
+        self.apientryp = apientryp
+        self.indentFuncProto = indentFuncProto
+        self.indentFuncPointer = indentFuncPointer
+        self.alignFuncParam = alignFuncParam
+
+# VulkanInfoGenerator - subclass of OutputGenerator.
+# Generates a vulkan info output helper function
+
+
+class VulkanInfoGenerator(OutputGenerator):
+
+    def __init__(self,
+                 errFile=sys.stderr,
+                 warnFile=sys.stderr,
+                 diagFile=sys.stdout):
+        OutputGenerator.__init__(self, errFile, warnFile, diagFile)
+
+        self.constants = OrderedDict()
+
+        self.extension_sets = OrderedDict()
+        for ext_cat in EXTENSION_CATEGORIES.keys():
+            self.extension_sets[ext_cat] = set()
+
+        self.enums = set()
+        self.flags = set()
+        self.bitmasks = set()
+        self.structures = set()
+        self.structs_to_comp = set()
+        self.all_structures = set()
+
+        self.types_to_gen = set()
+
+        self.extFuncs = OrderedDict()
+        self.extTypes = OrderedDict()
+
+    def beginFile(self, genOpts):
+        gen.OutputGenerator.beginFile(self, genOpts)
+
+        root = self.registry.reg
+
+        for node in self.registry.reg.findall('enums'):
+            if node.get('name') == 'API Constants':
+                for item in node.findall('enum'):
+                    self.constants[item.get('name')] = item.get('value')
+
+        for node in root.find('extensions').findall('extension'):
+            ext = VulkanExtension(node)
+            for item in ext.vktypes:
+                self.extTypes[item] = ext
+            for item in ext.vkfuncs:
+                self.extFuncs[item] = ext
+
+    def endFile(self):
+        types_to_gen = set()
+        for s in enums_to_gen:
+            types_to_gen.add(s)
+
+        for f in flags_to_gen:
+            types_to_gen.add(f)
+
+        types_to_gen = types_to_gen.union(GatherTypesToGen(self.structures))
+        for key, value in EXTENSION_CATEGORIES.items():
+            types_to_gen = types_to_gen.union(
+                GatherTypesToGen(self.extension_sets[key]))
+
+        structs_to_comp = set()
+        for s in struct_comparisons_to_gen:
+            structs_to_comp.add(s)
+        structs_to_comp = structs_to_comp.union(
+            GatherTypesToGen(self.structs_to_comp))
+
+        self.enums = sorted(self.enums, key=operator.attrgetter('name'))
+        self.flags = sorted(self.flags, key=operator.attrgetter('name'))
+        self.bitmasks = sorted(self.bitmasks, key=operator.attrgetter('name'))
+        self.structures = sorted(
+            self.structures, key=operator.attrgetter('name'))
+        self.all_structures = sorted(
+            self.all_structures, key=operator.attrgetter('name'))
+
+        for key, value in self.extension_sets.items():
+            self.extension_sets[key] = sorted(
+                value, key=operator.attrgetter('name'))
+
+        out = ''
+        out += license_header + "\n"
+        out += "#include \"vulkaninfo.h\"\n"
+        out += "#include \"outputprinter.h\"\n"
+        out += custom_formaters
+
+        for e in self.enums:
+            if e.name in types_to_gen:
+                out += PrintEnumToString(e, self)
+                out += PrintEnum(e, self)
+
+        for f in self.flags:
+            if f.name in types_to_gen:
+                for b in self.bitmasks:
+                    if b.name == f.enum:
+                        out += PrintFlags(f, b, self)
+                        out += PrintBitMask(b, f.name, self)
+
+            if f.name in flags_strings_to_gen:
+                for b in self.bitmasks:
+                    if b.name == f.enum:
+                        out += PrintBitMaskToString(b, f.name, self)
+
+        # find all structures needed to dump the requested structures
+        structure_names = set()
+        for s in self.all_structures:
+            if s.name in types_to_gen:
+                structure_names.add(s.name)
+
+        for s in self.all_structures:
+            if s.name in types_to_gen:
+                out += PrintForwardDeclaration(s, self)
+
+        for s in self.all_structures:
+            if s.name in types_to_gen:
+                out += PrintStructure(s, structure_names, self)
+
+        out += "pNextChainInfos get_chain_infos() {\n"
+        out += "    pNextChainInfos infos;\n"
+        for key, value in EXTENSION_CATEGORIES.items():
+            out += PrintChainBuilders(key, self.extension_sets[key])
+        out += "    return infos;\n}\n"
+
+        for key, value in EXTENSION_CATEGORIES.items():
+            out += PrintChainIterator(key,
+                                      self.extension_sets[key], value.get('type'))
+
+        for s in self.all_structures:
+            if s.name in structs_to_comp:
+                out += PrintStructComparisonForwardDef(s)
+
+        for s in self.all_structures:
+            if s.name in structs_to_comp:
+                out += PrintStructComparison(s)
+
+        gen.write(out, file=self.outFile)
+
+        gen.OutputGenerator.endFile(self)
+
+    def genCmd(self, cmd, name, alias):
+        gen.OutputGenerator.genCmd(self, cmd, name, alias)
+
+    # These are actually constants
+    def genEnum(self, enuminfo, name, alias):
+        gen.OutputGenerator.genEnum(self, enuminfo, name, alias)
+
+    # These are actually enums
+    def genGroup(self, groupinfo, groupName, alias):
+        gen.OutputGenerator.genGroup(self, groupinfo, groupName, alias)
+
+        if alias is not None:
+            return
+
+        if groupinfo.elem.get('type') == 'bitmask':
+            self.bitmasks.add(VulkanBitmask(groupinfo.elem))
+        elif groupinfo.elem.get('type') == 'enum':
+            self.enums.add(VulkanEnum(groupinfo.elem))
+
+    def genType(self, typeinfo, name, alias):
+        gen.OutputGenerator.genType(self, typeinfo, name, alias)
+
+        if alias is not None:
+            return
+
+        if typeinfo.elem.get('category') == 'bitmask':
+            self.flags.add(VulkanFlags(typeinfo.elem))
+
+        if typeinfo.elem.get('category') == 'struct' and name in structures_to_gen:
+            self.structures.add(VulkanStructure(
+                name, typeinfo.elem, self.constants, self.extTypes))
+
+        if typeinfo.elem.get('category') == 'struct' and name in struct_comparisons_to_gen:
+            self.structs_to_comp.add(VulkanStructure(
+                name, typeinfo.elem, self.constants, self.extTypes))
+
+        if typeinfo.elem.get('category') == 'struct':
+            self.all_structures.add(VulkanStructure(
+                name, typeinfo.elem, self.constants, self.extTypes))
+
+        for vendor in vendor_abbreviations:
+            for node in typeinfo.elem.findall('member'):
+                if(node.get('values') is not None):
+                    if(node.get('values').find(vendor)) != -1:
+                        return
+
+        for key, value in EXTENSION_CATEGORIES.items():
+            if typeinfo.elem.get('structextends') == value.get('extends'):
+                self.extension_sets[key].add(VulkanStructure(
+                    name, typeinfo.elem, self.constants, self.extTypes))
+
+
+def GatherTypesToGen(structures):
+    types = set()
+    added_stuff = True  # repeat until no new types are added
+    while added_stuff == True:
+        added_stuff = False
+        for s in structures:
+            size = len(types)
+            types.add(s.name)
+            if len(types) != size:
+                added_stuff = True
+            for m in s.members:
+                if m.typeID not in predefined_types and m.name not in ['sType', 'pNext']:
+                    types.add(m.typeID)
+    return types
+
+
+def GetExtension(name, generator):
+    if name in generator.extFuncs:
+        return generator.extFuncs[name]
+    elif name in generator.extTypes:
+        return generator.extTypes[name]
+    else:
+        return None
+
+
+def AddGuardHeader(obj):
+    if obj is not None and obj.guard is not None:
+        return "#ifdef {}\n".format(obj.guard)
+    else:
+        return ""
+
+
+def AddGuardFooter(obj):
+    if obj is not None and obj.guard is not None:
+        return "#endif  // {}\n".format(obj.guard)
+    else:
+        return ""
+
+
+def PrintEnumToString(e, gen):
+    out = ''
+    out += AddGuardHeader(GetExtension(e.name, gen))
+
+    out += "static const char *" + e.name + "String(" + e.name + " value) {\n"
+    out += "    switch (value) {\n"
+    for v in e.options:
+        out += "        case (" + str(v.value) + \
+            "): return \"" + v.name[3:] + "\";\n"
+    out += "        default: return \"UNKNOWN_" + e.name + "\";\n"
+    out += "    }\n}\n"
+    out += AddGuardFooter(GetExtension(e.name, gen))
+    return out
+
+
+def PrintEnum(e, gen):
+    out = ''
+    out += AddGuardHeader(GetExtension(e.name, gen))
+    out += "void Dump" + e.name + \
+        "(Printer &p, std::string name, " + \
+        e.name + " value, int width = 0) {\n"
+    out += "    if (p.Type() == OutputType::json) {\n"
+    out += "        p.PrintKeyValue(name, value, width);\n"
+    out += "        return;\n"
+    out += "    } else {\n"
+    out += "        p.PrintKeyValue(name, " + \
+        e.name + "String(value), width);\n    }\n"
+    out += "}\n"
+    out += AddGuardFooter(GetExtension(e.name, gen))
+    return out
+
+
+def PrintFlags(f, b, gen):
+    out = ''
+    out += AddGuardHeader(GetExtension(f.name, gen))
+
+    out += "void Dump" + f.name + \
+        "(Printer &p, std::string name, " + \
+        f.enum + " value, int width = 0) {\n"
+    out += "    if (value == 0) p.PrintElement(\"None\");\n"
+    for v in b.options:
+        out += "    if (" + str(v.value) + \
+            " & value) p.SetAsType().PrintElement(\"" + \
+            str(v.name[3:]) + "\");\n"
+    out += "}\n"
+
+    out += AddGuardFooter(GetExtension(f.name, gen))
+    return out
+
+
+def PrintBitMask(b, name, gen):
+    out = ''
+    out += AddGuardHeader(GetExtension(b.name, gen))
+    out += "void Dump" + name + \
+        "(Printer &p, std::string name, " + name + " value, int width = 0) {\n"
+    out += "    if (p.Type() == OutputType::json) { p.PrintKeyValue(name, value); return; }\n"
+    out += "    p.ObjectStart(name);\n"
+    out += "    Dump" + name + \
+        "(p, name, static_cast<" + b.name + ">(value), width);\n"
+    out += "    p.ObjectEnd();\n"
+    out += "}\n"
+    out += "void Dump" + b.name + \
+        "(Printer &p, std::string name, " + \
+        b.name + " value, int width = 0) {\n"
+    out += "    if (p.Type() == OutputType::json) { p.PrintKeyValue(name, value); return; }\n"
+    out += "    p.ObjectStart(name);\n"
+    out += "    Dump" + name + "(p, name, value, width);\n"
+    out += "    p.ObjectEnd();\n"
+    out += "}\n"
+    out += AddGuardFooter(GetExtension(b.name, gen))
+    return out
+
+
+def PrintBitMaskToString(b, name, gen):
+    out = ''
+    out += AddGuardHeader(GetExtension(b.name, gen))
+    out += "std::string " + name + \
+        "String(" + name + " value, int width = 0) {\n"
+    out += "    std::string out;\n"
+    out += "    bool is_first = true;\n"
+    for v in b.options:
+        out += "    if (" + str(v.value) + " & value) {\n"
+        out += "        if (is_first) { is_first = false; } else { out += \" | \"; }\n"
+        out += "        out += \"" + \
+            str(v.name).strip("VK_").strip("_BIT") + "\";\n"
+        out += "    }\n"
+    out += "    return out;\n"
+    out += "}\n"
+    out += AddGuardFooter(GetExtension(b.name, gen))
+    return out
+
+
+def PrintForwardDeclaration(struct, gen):
+    out = ''
+    out += AddGuardHeader(struct)
+    out += "void Dump" + struct.name + \
+        "(Printer &p, std::string name, " + struct.name + " &obj);\n"
+    out += AddGuardFooter(struct)
+
+    return out
+
+
+def PrintStructure(struct, structure_names, gen):
+    out = ''
+    out += AddGuardHeader(struct)
+    max_key_len = len(struct.members[0].name)
+    for v in struct.members:
+        if v.arrayLength is not None:
+            if len(v.name) + len(v.arrayLength) + 2 > max_key_len:
+                max_key_len = len(v.name) + len(v.arrayLength) + 2
+        elif v.typeID in predefined_types:
+            if len(v.name) > max_key_len:
+                max_key_len = len(v.name)
+
+    out += "void Dump" + struct.name + \
+        "(Printer &p, std::string name, " + struct.name + " &obj) {\n"
+    if struct.name == "VkPhysicalDeviceLimits":
+        out += "    if (p.Type() == OutputType::json)\n"
+        out += "        p.ObjectStart(\"limits\");\n"
+        out += "    else\n"
+        out += "        p.SetSubHeader().ObjectStart(name);\n"
+    elif struct.name == "VkPhysicalDeviceSparseProperties":
+        out += "    if (p.Type() == OutputType::json)\n"
+        out += "        p.ObjectStart(\"sparseProperties\");\n"
+        out += "    else\n"
+        out += "        p.SetSubHeader().ObjectStart(name);\n"
+    else:
+        out += "    p.ObjectStart(name);\n"
+
+    for v in struct.members:
+        # arrays
+        if v.arrayLength is not None:
+            # strings
+            if v.typeID == "char":
+                out += "    p.PrintKeyString(\"" + v.name + "\", obj." + \
+                    v.name + ", " + str(max_key_len) + ");\n"
+            # uuid's
+            elif (v.arrayLength == str(16) and v.typeID == "uint8_t"):  # VK_UUID_SIZE
+                out += "    p.PrintKeyString(\"" + v.name + "\", to_string_16(obj." + \
+                    v.name + "), " + str(max_key_len) + ");\n"
+            elif (v.arrayLength == str(8) and v.typeID == "uint8_t"):  # VK_LUID_SIZE
+                out += "    if (obj.deviceLUIDValid)"  # special case
+                out += " p.PrintKeyString(\"" + v.name + "\", to_string_8(obj." + \
+                    v.name + "), " + str(max_key_len) + ");\n"
+            elif v.arrayLength.isdigit():
+                out += "    p.ArrayStart(\"" + v.name + \
+                    "\", "+v.arrayLength+");\n"
+                for i in range(0, int(v.arrayLength)):
+                    out += "    p.PrintElement(obj." + \
+                        v.name + "[" + str(i) + "]);\n"
+                out += "    p.ArrayEnd();\n"
+            else:  # dynamic array length based on other member
+                out += "    p.ArrayStart(\"" + v.name + \
+                    "\", obj."+v.arrayLength+");\n"
+                out += "    for (uint32_t i = 0; i < obj." + \
+                    v.arrayLength+"; i++) {\n"
+                if v.typeID in structure_names:
+                    out += "        if (obj." + v.name + " != nullptr) {\n"
+                    out += "            p.SetElementIndex(i);\n"
+                    out += "            Dump" + v.typeID + \
+                        "(p, \"" + v.name + "\", obj." + v.name + "[i]);\n"
+                    out += "        }\n"
+                else:
+                    out += "        p.PrintElement(obj." + v.name + "[i]);\n"
+                out += "    }\n    p.ArrayEnd();\n"
+        elif v.typeID == "VkBool32":
+            out += "    p.PrintKeyBool(\"" + v.name + "\", static_cast<bool>(obj." + \
+                v.name + "), " + str(max_key_len) + ");\n"
+        elif v.typeID == "VkDeviceSize":
+            out += "    p.PrintKeyValue(\"" + v.name + "\", to_hex_str(p, obj." + \
+                v.name + "), " + str(max_key_len) + ");\n"
+        elif v.typeID in predefined_types:
+            out += "    p.PrintKeyValue(\"" + v.name + "\", obj." + \
+                v.name + ", " + str(max_key_len) + ");\n"
+        elif v.name not in ['sType', 'pNext']:
+            if v.typeID in structure_names:
+                out += "    Dump" + v.typeID + \
+                    "(p, \"" + v.name + "\", obj." + v.name + ");\n"
+            else:
+                out += "    Dump" + v.typeID + \
+                    "(p, \"" + v.name + "\", obj." + \
+                    v.name + ", " + str(max_key_len) + ");\n"
+
+    out += "    p.ObjectEnd();\n"
+    out += "}\n"
+
+    out += AddGuardFooter(struct)
+    return out
+
+
+def PrintChainBuilders(listName, structures):
+    out = ''
+    out += "    infos." + listName + " = {\n"
+    for s in structures:
+        out += AddGuardHeader(s)
+        if s.sTypeName is not None:
+            out += "        {" + s.sTypeName + ", sizeof(" + s.name + ")},\n"
+        out += AddGuardFooter(s)
+    out += "    };\n"
+    return out
+
+
+def PrintChainIterator(listName, structures, checkExtLoc):
+    sorted_structures = sorted(structures, key=operator.attrgetter("name"))
+
+    out = ''
+    out += "void chain_iterator_" + listName + "(Printer &p, "
+    if checkExtLoc == "device":
+        out += "AppGpu &gpu"
+    elif checkExtLoc == "instance":
+        out += "AppInstance &inst"
+    elif checkExtLoc == "both":
+        out += "AppInstance &inst, AppGpu &gpu"
+    out += ", void * place) {\n"
+
+    out += "    while (place) {\n"
+    out += "        struct VkStructureHeader *structure = (struct VkStructureHeader *)place;\n"
+    out += "        p.SetSubHeader();\n"
+    for s in sorted_structures:
+        out += AddGuardHeader(s)
+        if s.sTypeName is not None:
+            if s.extNameStr is not None:
+                out += "        if (structure->sType == " + \
+                    s.sTypeName + " &&\n"
+                if s.extType == "device":
+                    out += "            gpu.CheckPhysicalDeviceExtensionIncluded(" + \
+                        s.extNameStr + ")) {\n"
+                elif s.extType == "instance":
+                    out += "            inst.CheckExtensionEnabled(" + \
+                        s.extNameStr + ")) {\n"
+
+            else:
+                out += "        if (structure->sType == " + \
+                    s.sTypeName + ") {\n"
+
+            out += "            " + s.name + "* props = " + \
+                "("+s.name+"*)structure;\n"
+            out += "            Dump" + s.name + \
+                "(p, \"" + s.name + "\", *props);\n"
+            out += "            p.AddNewline();\n"
+            out += "        }\n"
+        out += AddGuardFooter(s)
+    out += "        place = structure->pNext;\n"
+    out += "    }\n"
+    out += "}\n"
+    return out
+
+
+def PrintStructComparisonForwardDef(structure):
+    out = ''
+    out += "bool operator==(const " + structure.name + \
+        " & a, const " + structure.name + " b);\n"
+    return out
+
+
+def PrintStructComparison(structure):
+    out = ''
+    out += "bool operator==(const " + structure.name + \
+        " & a, const " + structure.name + " b) {\n"
+    out += "    return "
+    is_first = True
+    for m in structure.members:
+        if m.name not in ['sType', 'pNext']:
+            if not is_first:
+                out += "\n        && "
+            else:
+                is_first = False
+            out += "a." + m.name + " == b." + m.name
+    out += ";\n"
+    out += "}\n"
+    return out
+
+
+def isPow2(num):
+    return num != 0 and ((num & (num - 1)) == 0)
+
+
+def StrToInt(s):
+    try:
+        return int(s)
+    except ValueError:
+        return int(s, 16)
+
+
+class VulkanEnum:
+    class Option:
+
+        def __init__(self, name, value, bitpos, comment):
+            self.name = name
+            self.comment = comment
+            self.multiValue = None
+
+            if value is not None:
+
+                self.multiValue = not isPow2(StrToInt(value))
+
+            if value == 0 or value is None:
+                value = 1 << int(bitpos)
+
+            self.value = value
+
+        def values(self):
+            return {
+                'optName': self.name,
+                'optValue': self.value,
+                'optComment': self.comment,
+                'optMultiValue': self.multiValue,
+            }
+
+    def __init__(self, rootNode):
+        self.name = rootNode.get('name')
+        self.type = rootNode.get('type')
+        self.options = []
+
+        for child in rootNode:
+            childName = child.get('name')
+            childValue = child.get('value')
+            childBitpos = child.get('bitpos')
+            childComment = child.get('comment')
+            childExtends = child.get('extends')
+            childOffset = child.get('offset')
+            childExtNum = child.get('extnumber')
+            support = child.get('supported')
+            if(support == "disabled"):
+                continue
+
+            if childName is None:
+                continue
+            if (childValue is None and childBitpos is None and childOffset is None):
+                continue
+
+            if childExtends is not None and childExtNum is not None and childOffset is not None:
+                enumNegative = False
+                extNum = int(childExtNum)
+                extOffset = int(childOffset)
+                extBase = 1000000000
+                extBlockSize = 1000
+                childValue = extBase + (extNum - 1) * extBlockSize + extOffset
+                if ('dir' in child.keys()):
+                    childValue = -childValue
+            duplicate = False
+            for o in self.options:
+                if o.values()['optName'] == childName:
+                    duplicate = True
+            if duplicate:
+                continue
+
+            self.options.append(VulkanEnum.Option(
+                childName, childValue, childBitpos, childComment))
+
+
+class VulkanBitmask:
+
+    def __init__(self, rootNode):
+        self.name = rootNode.get('name')
+        self.type = rootNode.get('type')
+
+        # Read each value that the enum contains
+        self.options = []
+        for child in rootNode:
+            childName = child.get('name')
+            childValue = child.get('value')
+            childBitpos = child.get('bitpos')
+            childComment = child.get('comment')
+            support = child.get('supported')
+            if childName is None or (childValue is None and childBitpos is None):
+                continue
+            if(support == "disabled"):
+                continue
+
+            self.options.append(VulkanEnum.Option(
+                childName, childValue, childBitpos, childComment))
+
+
+class VulkanFlags:
+
+    def __init__(self, rootNode):
+        self.name = rootNode.get('name')
+        self.type = rootNode.get('type')
+        self.enum = rootNode.get('requires')
+
+
+class VulkanVariable:
+    def __init__(self, rootNode, constants, parentName):
+        self.name = rootNode.find('name').text
+        # Typename, dereferenced and converted to a useable C++ token
+        self.typeID = rootNode.find('type').text
+        self.baseType = self.typeID
+        self.childType = None
+        self.arrayLength = None
+
+        self.text = ''
+        for node in rootNode.itertext():
+            comment = rootNode.find('comment')
+            if comment is not None and comment.text == node:
+                continue
+            self.text += node
+
+        typeMatch = re.search('.+?(?=' + self.name + ')', self.text)
+        self.type = typeMatch.string[typeMatch.start():typeMatch.end()]
+        self.type = ' '.join(self.type.split())
+        bracketMatch = re.search('(?<=\\[)[a-zA-Z0-9_]+(?=\\])', self.text)
+        if bracketMatch is not None:
+            matchText = bracketMatch.string[bracketMatch.start(
+            ):bracketMatch.end()]
+            self.childType = self.type
+            self.type += '[' + matchText + ']'
+            if matchText in constants:
+                self.arrayLength = constants[matchText]
+            else:
+                self.arrayLength = matchText
+
+        self.lengthMember = False
+        lengthString = rootNode.get('len')
+        lengths = []
+        if lengthString is not None:
+            lengths = re.split(',', lengthString)
+            lengths = list(filter(('null-terminated').__ne__, lengths))
+        assert(len(lengths) <= 1)
+        if self.arrayLength is None and len(lengths) > 0:
+            self.childType = '*'.join(self.type.split('*')[0:-1])
+            self.arrayLength = lengths[0]
+            self.lengthMember = True
+        if self.arrayLength is not None and self.arrayLength.startswith('latexmath'):
+            code = self.arrayLength[10:len(self.arrayLength)]
+            code = re.sub('\\[', '', code)
+            code = re.sub('\\]', '', code)
+            code = re.sub('\\\\(lceil|rceil)', '', code)
+            code = re.sub('{|}', '', code)
+            code = re.sub('\\\\mathit', '', code)
+            code = re.sub('\\\\over', '/', code)
+            code = re.sub('\\\\textrm', '', code)
+            self.arrayLength = code
+
+        # Dereference if necessary and handle members of variables
+        if self.arrayLength is not None:
+            self.arrayLength = re.sub('::', '->', self.arrayLength)
+            sections = self.arrayLength.split('->')
+            if sections[-1][0] == 'p' and sections[0][1].isupper():
+                self.arrayLength = '*' + self.arrayLength
+
+
+class VulkanStructure:
+    def __init__(self, name, rootNode, constants, extTypes):
+        self.name = name
+        self.members = []
+        self.guard = None
+        self.sTypeName = None
+        self.extNameStr = None
+        self.extType = None
+        for node in rootNode.findall('member'):
+            if(node.get('values') is not None):
+                self.sTypeName = node.get('values')
+            self.members.append(VulkanVariable(
+                node, constants, self.name))
+
+        for k, e in extTypes.items():
+            if k == self.name:
+                if e.guard is not None:
+                    self.guard = e.guard
+                if e.extNameStr is not None:
+                    self.extNameStr = e.extNameStr
+                if e.type is not None:
+                    self.extType = e.type
+
+
+class VulkanExtension:
+    def __init__(self, rootNode):
+        self.name = rootNode.get('name')
+        self.number = int(rootNode.get('number'))
+        self.type = rootNode.get('type')
+        self.dependency = rootNode.get('requires')
+        self.guard = GetFeatureProtect(rootNode)
+        self.supported = rootNode.get('supported')
+        self.extNameStr = None
+        self.vktypes = []
+        self.vkfuncs = []
+        self.constants = {}
+        self.enumValues = {}
+
+        for req in rootNode.findall('require'):
+            for ty in req.findall('type'):
+                self.vktypes.append(ty.get('name'))
+
+            for func in req.findall('command'):
+                self.vkfuncs.append(func.get('name'))
+
+            for enum in req.findall('enum'):
+                base = enum.get('extends')
+                name = enum.get('name')
+                value = enum.get('value')
+                bitpos = enum.get('bitpos')
+                offset = enum.get('offset')
+                # gets the VK_XXX_EXTENSION_NAME string
+                if value == "\"" + self.name + "\"":
+                    self.extNameStr = name
+
+                if value is None and bitpos is not None:
+                    value = 1 << int(bitpos)
+
+                if offset is not None:
+                    offset = int(offset)
+                if base is not None and offset is not None:
+                    enumValue = 1000000000 + 1000*(self.number - 1) + offset
+                    if enum.get('dir') == '-':
+                        enumValue = -enumValue
+                    self.enumValues[base] = (name, enumValue)
+                else:
+                    self.constants[name] = value
diff --git a/src/third_party/vulkan-tools/src/vulkaninfo/CMakeLists.txt b/src/third_party/vulkan-tools/src/vulkaninfo/CMakeLists.txt
new file mode 100644
index 0000000..14776d9
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/vulkaninfo/CMakeLists.txt
@@ -0,0 +1,131 @@
+# ~~~
+# Copyright (c) 2018-2019 Valve Corporation
+# Copyright (c) 2018-2019 LunarG, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ~~~
+
+# CMakeLists.txt file for building Vulkaninfo
+
+find_package(PythonInterp 3 QUIET)
+set (PYTHON_CMD ${PYTHON_EXECUTABLE})
+
+set(VKINFO_DIR ${CMAKE_CURRENT_SOURCE_DIR})
+set(KVULKANTOOLS_SCRIPTS_DIR ${CMAKE_SOURCE_DIR}/scripts)
+
+if(PYTHONINTERP_FOUND)
+    add_custom_target(generate_vulkaninfo_hpp
+        COMMAND ${PYTHON_CMD} ${KVULKANTOOLS_SCRIPTS_DIR}/kvt_genvk.py -registry ${VulkanRegistry_DIR}/vk.xml -scripts ${VulkanRegistry_DIR} vulkaninfo.hpp
+        DEPENDS ${VulkanRegistry_DIR}/vk.xml ${VulkanRegistry_DIR}/generator.py ${KVULKANTOOLS_SCRIPTS_DIR}/vulkaninfo_generator.py ${KVULKANTOOLS_SCRIPTS_DIR}/kvt_genvk.py ${VulkanRegistry_DIR}/reg.py
+        WORKING_DIRECTORY ${PROJECT_SOURCE_DIR}/vulkaninfo/generated
+        )
+    else()
+        message("WARNING: generate_vulkaninfo_hpp target requires python 3")
+endif()
+
+if(WIN32)
+    add_executable(vulkaninfo vulkaninfo.cpp vulkaninfo.rc)
+elseif(APPLE)
+    add_executable(vulkaninfo
+                   vulkaninfo.cpp
+                   ${CMAKE_CURRENT_SOURCE_DIR}/macOS/vulkaninfo/metal_view.mm
+                   ${CMAKE_CURRENT_SOURCE_DIR}/macOS/vulkaninfo/metal_view.h)
+else()
+    add_executable(vulkaninfo vulkaninfo.cpp)
+endif()
+
+target_include_directories(vulkaninfo PRIVATE ${CMAKE_SOURCE_DIR}/vulkaninfo)
+target_include_directories(vulkaninfo PRIVATE ${CMAKE_SOURCE_DIR}/vulkaninfo/generated)
+
+if(UNIX AND NOT APPLE) # i.e. Linux
+    include(FindPkgConfig)
+    option(BUILD_WSI_XCB_SUPPORT "Build XCB WSI support" ON)
+    option(BUILD_WSI_XLIB_SUPPORT "Build Xlib WSI support" ON)
+    option(BUILD_WSI_WAYLAND_SUPPORT "Build Wayland WSI support" ON)
+
+    if(BUILD_WSI_XCB_SUPPORT)
+        find_package(XCB REQUIRED)
+        target_include_directories(vulkaninfo PRIVATE ${XCB_INCLUDE_DIRS})
+        target_link_libraries(vulkaninfo ${XCB_LIBRARIES})
+        target_compile_definitions(vulkaninfo PRIVATE -DVK_USE_PLATFORM_XCB_KHR)
+    endif()
+
+    if(BUILD_WSI_XLIB_SUPPORT)
+        find_package(X11 REQUIRED)
+        target_include_directories(vulkaninfo PRIVATE ${X11_INCLUDE_DIR})
+        target_link_libraries(vulkaninfo ${X11_LIBRARIES})
+        target_compile_definitions(vulkaninfo PRIVATE -DVK_USE_PLATFORM_XLIB_KHR)
+    endif()
+
+    if(BUILD_WSI_WAYLAND_SUPPORT)
+        find_package(Wayland REQUIRED)
+        target_include_directories(vulkaninfo PRIVATE ${WAYLAND_CLIENT_INCLUDE_DIR})
+        target_link_libraries(vulkaninfo ${WAYLAND_CLIENT_LIBRARIES})
+        target_compile_definitions(vulkaninfo PRIVATE -DVK_USE_PLATFORM_WAYLAND_KHR)
+    endif()
+endif()
+
+if(APPLE)
+    # We do this so vulkaninfo is linked to an individual library and NOT a framework.
+    target_link_libraries(vulkaninfo ${Vulkan_LIBRARY} "-framework AppKit -framework QuartzCore")
+    target_include_directories(vulkaninfo PRIVATE ${CMAKE_CURRENT_SOURCE_DIR}/macOS/vulkaninfo ${VulkanHeaders_INCLUDE_DIR})
+else()
+    target_link_libraries(vulkaninfo Vulkan::Vulkan)
+endif()
+
+# Create vulkaninfo application bundle for MacOS
+if(APPLE)
+    include(${CMAKE_CURRENT_SOURCE_DIR}/macOS/vulkaninfo.cmake)
+endif()
+
+if(WIN32)
+    target_compile_definitions(vulkaninfo PUBLIC -DVK_USE_PLATFORM_WIN32_KHR -DWIN32_LEAN_AND_MEAN -D_CRT_SECURE_NO_WARNINGS)
+    if(NOT MSVC_VERSION LESS 1900)
+        # Enable control flow guard
+        message(STATUS "Building vulkaninfo with control flow guard")
+        add_compile_options("$<$<CXX_COMPILER_ID:MSVC>:/guard:cf>")
+        set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} /guard:cf")
+        set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} /guard:cf")
+    endif()
+
+    # Use static MSVCRT libraries
+    foreach(configuration
+            in
+            CMAKE_C_FLAGS_DEBUG
+            CMAKE_C_FLAGS_MINSIZEREL
+            CMAKE_C_FLAGS_RELEASE
+            CMAKE_C_FLAGS_RELWITHDEBINFO
+            CMAKE_CXX_FLAGS_DEBUG
+            CMAKE_CXX_FLAGS_MINSIZEREL
+            CMAKE_CXX_FLAGS_RELEASE
+            CMAKE_CXX_FLAGS_RELWITHDEBINFO)
+        if(${configuration} MATCHES "/MD")
+            string(REGEX
+                   REPLACE "/MD"
+                           "/MT"
+                           ${configuration}
+                           "${${configuration}}")
+        endif()
+    endforeach()
+
+    file(COPY vulkaninfo.vcxproj.user DESTINATION ${CMAKE_BINARY_DIR}/vulkaninfo)
+elseif(APPLE)
+    add_definitions(-DVK_USE_PLATFORM_MACOS_MVK -DVK_USE_PLATFORM_METAL_EXT)
+endif()
+
+if(APPLE)
+    install(TARGETS vulkaninfo RUNTIME DESTINATION "vulkaninfo")
+else()
+    install(TARGETS vulkaninfo RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR})
+endif()
+
diff --git a/src/third_party/vulkan-tools/src/vulkaninfo/generated/.clang-format b/src/third_party/vulkan-tools/src/vulkaninfo/generated/.clang-format
new file mode 100644
index 0000000..3bb983a
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/vulkaninfo/generated/.clang-format
@@ -0,0 +1,5 @@
+---
+# Disable clang-format for generated code
+DisableFormat: true
+SortIncludes: false
+...
diff --git a/src/third_party/vulkan-tools/src/vulkaninfo/generated/vulkaninfo.hpp b/src/third_party/vulkan-tools/src/vulkaninfo/generated/vulkaninfo.hpp
new file mode 100644
index 0000000..e966157
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/vulkaninfo/generated/vulkaninfo.hpp
@@ -0,0 +1,2243 @@
+
+/*
+ * Copyright (c) 2019 The Khronos Group Inc.
+ * Copyright (c) 2019 Valve Corporation
+ * Copyright (c) 2019 LunarG, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Charles Giessen <charles@lunarg.com>
+ *
+ */
+
+/*
+ * This file is generated from the Khronos Vulkan XML API Registry.
+ */
+
+#include "vulkaninfo.h"
+#include "outputprinter.h"
+
+std::ostream &operator<<(std::ostream &o, VkConformanceVersionKHR &c) {
+    return o << std::to_string(c.major) << "." << std::to_string(c.minor) << "." << std::to_string(c.subminor) << "."
+             << std::to_string(c.patch);
+}
+
+std::string VkExtent3DString(VkExtent3D e) {
+    return std::string("(") + std::to_string(e.width) + ", " + std::to_string(e.height) + ", " + std::to_string(e.depth) + ")";
+}
+
+template <typename T>
+std::string to_hex_str(T i) {
+    std::stringstream stream;
+    stream << "0x" << std::setfill('0') << std::setw(sizeof(T)) << std::hex << i;
+    return stream.str();
+}
+
+template <typename T>
+std::string to_hex_str(Printer &p, T i) {
+    if (p.Type() == OutputType::json)
+        return std::to_string(i);
+    else
+        return to_hex_str(i);
+}
+static const char *VkColorSpaceKHRString(VkColorSpaceKHR value) {
+    switch (value) {
+        case (0): return "COLOR_SPACE_SRGB_NONLINEAR_KHR";
+        case (1000104001): return "COLOR_SPACE_DISPLAY_P3_NONLINEAR_EXT";
+        case (1000104002): return "COLOR_SPACE_EXTENDED_SRGB_LINEAR_EXT";
+        case (1000104003): return "COLOR_SPACE_DISPLAY_P3_LINEAR_EXT";
+        case (1000104004): return "COLOR_SPACE_DCI_P3_NONLINEAR_EXT";
+        case (1000104005): return "COLOR_SPACE_BT709_LINEAR_EXT";
+        case (1000104006): return "COLOR_SPACE_BT709_NONLINEAR_EXT";
+        case (1000104007): return "COLOR_SPACE_BT2020_LINEAR_EXT";
+        case (1000104008): return "COLOR_SPACE_HDR10_ST2084_EXT";
+        case (1000104009): return "COLOR_SPACE_DOLBYVISION_EXT";
+        case (1000104010): return "COLOR_SPACE_HDR10_HLG_EXT";
+        case (1000104011): return "COLOR_SPACE_ADOBERGB_LINEAR_EXT";
+        case (1000104012): return "COLOR_SPACE_ADOBERGB_NONLINEAR_EXT";
+        case (1000104013): return "COLOR_SPACE_PASS_THROUGH_EXT";
+        case (1000104014): return "COLOR_SPACE_EXTENDED_SRGB_NONLINEAR_EXT";
+        case (1000213000): return "COLOR_SPACE_DISPLAY_NATIVE_AMD";
+        default: return "UNKNOWN_VkColorSpaceKHR";
+    }
+}
+void DumpVkColorSpaceKHR(Printer &p, std::string name, VkColorSpaceKHR value, int width = 0) {
+    if (p.Type() == OutputType::json) {
+        p.PrintKeyValue(name, value, width);
+        return;
+    } else {
+        p.PrintKeyValue(name, VkColorSpaceKHRString(value), width);
+    }
+}
+static const char *VkDriverIdKHRString(VkDriverIdKHR value) {
+    switch (value) {
+        case (1): return "DRIVER_ID_AMD_PROPRIETARY_KHR";
+        case (2): return "DRIVER_ID_AMD_OPEN_SOURCE_KHR";
+        case (3): return "DRIVER_ID_MESA_RADV_KHR";
+        case (4): return "DRIVER_ID_NVIDIA_PROPRIETARY_KHR";
+        case (5): return "DRIVER_ID_INTEL_PROPRIETARY_WINDOWS_KHR";
+        case (6): return "DRIVER_ID_INTEL_OPEN_SOURCE_MESA_KHR";
+        case (7): return "DRIVER_ID_IMAGINATION_PROPRIETARY_KHR";
+        case (8): return "DRIVER_ID_QUALCOMM_PROPRIETARY_KHR";
+        case (9): return "DRIVER_ID_ARM_PROPRIETARY_KHR";
+        case (10): return "DRIVER_ID_GOOGLE_SWIFTSHADER_KHR";
+        case (11): return "DRIVER_ID_GGP_PROPRIETARY_KHR";
+        case (12): return "DRIVER_ID_BROADCOM_PROPRIETARY_KHR";
+        default: return "UNKNOWN_VkDriverIdKHR";
+    }
+}
+void DumpVkDriverIdKHR(Printer &p, std::string name, VkDriverIdKHR value, int width = 0) {
+    if (p.Type() == OutputType::json) {
+        p.PrintKeyValue(name, value, width);
+        return;
+    } else {
+        p.PrintKeyValue(name, VkDriverIdKHRString(value), width);
+    }
+}
+static const char *VkFormatString(VkFormat value) {
+    switch (value) {
+        case (0): return "FORMAT_UNDEFINED";
+        case (1): return "FORMAT_R4G4_UNORM_PACK8";
+        case (2): return "FORMAT_R4G4B4A4_UNORM_PACK16";
+        case (3): return "FORMAT_B4G4R4A4_UNORM_PACK16";
+        case (4): return "FORMAT_R5G6B5_UNORM_PACK16";
+        case (5): return "FORMAT_B5G6R5_UNORM_PACK16";
+        case (6): return "FORMAT_R5G5B5A1_UNORM_PACK16";
+        case (7): return "FORMAT_B5G5R5A1_UNORM_PACK16";
+        case (8): return "FORMAT_A1R5G5B5_UNORM_PACK16";
+        case (9): return "FORMAT_R8_UNORM";
+        case (10): return "FORMAT_R8_SNORM";
+        case (11): return "FORMAT_R8_USCALED";
+        case (12): return "FORMAT_R8_SSCALED";
+        case (13): return "FORMAT_R8_UINT";
+        case (14): return "FORMAT_R8_SINT";
+        case (15): return "FORMAT_R8_SRGB";
+        case (16): return "FORMAT_R8G8_UNORM";
+        case (17): return "FORMAT_R8G8_SNORM";
+        case (18): return "FORMAT_R8G8_USCALED";
+        case (19): return "FORMAT_R8G8_SSCALED";
+        case (20): return "FORMAT_R8G8_UINT";
+        case (21): return "FORMAT_R8G8_SINT";
+        case (22): return "FORMAT_R8G8_SRGB";
+        case (23): return "FORMAT_R8G8B8_UNORM";
+        case (24): return "FORMAT_R8G8B8_SNORM";
+        case (25): return "FORMAT_R8G8B8_USCALED";
+        case (26): return "FORMAT_R8G8B8_SSCALED";
+        case (27): return "FORMAT_R8G8B8_UINT";
+        case (28): return "FORMAT_R8G8B8_SINT";
+        case (29): return "FORMAT_R8G8B8_SRGB";
+        case (30): return "FORMAT_B8G8R8_UNORM";
+        case (31): return "FORMAT_B8G8R8_SNORM";
+        case (32): return "FORMAT_B8G8R8_USCALED";
+        case (33): return "FORMAT_B8G8R8_SSCALED";
+        case (34): return "FORMAT_B8G8R8_UINT";
+        case (35): return "FORMAT_B8G8R8_SINT";
+        case (36): return "FORMAT_B8G8R8_SRGB";
+        case (37): return "FORMAT_R8G8B8A8_UNORM";
+        case (38): return "FORMAT_R8G8B8A8_SNORM";
+        case (39): return "FORMAT_R8G8B8A8_USCALED";
+        case (40): return "FORMAT_R8G8B8A8_SSCALED";
+        case (41): return "FORMAT_R8G8B8A8_UINT";
+        case (42): return "FORMAT_R8G8B8A8_SINT";
+        case (43): return "FORMAT_R8G8B8A8_SRGB";
+        case (44): return "FORMAT_B8G8R8A8_UNORM";
+        case (45): return "FORMAT_B8G8R8A8_SNORM";
+        case (46): return "FORMAT_B8G8R8A8_USCALED";
+        case (47): return "FORMAT_B8G8R8A8_SSCALED";
+        case (48): return "FORMAT_B8G8R8A8_UINT";
+        case (49): return "FORMAT_B8G8R8A8_SINT";
+        case (50): return "FORMAT_B8G8R8A8_SRGB";
+        case (51): return "FORMAT_A8B8G8R8_UNORM_PACK32";
+        case (52): return "FORMAT_A8B8G8R8_SNORM_PACK32";
+        case (53): return "FORMAT_A8B8G8R8_USCALED_PACK32";
+        case (54): return "FORMAT_A8B8G8R8_SSCALED_PACK32";
+        case (55): return "FORMAT_A8B8G8R8_UINT_PACK32";
+        case (56): return "FORMAT_A8B8G8R8_SINT_PACK32";
+        case (57): return "FORMAT_A8B8G8R8_SRGB_PACK32";
+        case (58): return "FORMAT_A2R10G10B10_UNORM_PACK32";
+        case (59): return "FORMAT_A2R10G10B10_SNORM_PACK32";
+        case (60): return "FORMAT_A2R10G10B10_USCALED_PACK32";
+        case (61): return "FORMAT_A2R10G10B10_SSCALED_PACK32";
+        case (62): return "FORMAT_A2R10G10B10_UINT_PACK32";
+        case (63): return "FORMAT_A2R10G10B10_SINT_PACK32";
+        case (64): return "FORMAT_A2B10G10R10_UNORM_PACK32";
+        case (65): return "FORMAT_A2B10G10R10_SNORM_PACK32";
+        case (66): return "FORMAT_A2B10G10R10_USCALED_PACK32";
+        case (67): return "FORMAT_A2B10G10R10_SSCALED_PACK32";
+        case (68): return "FORMAT_A2B10G10R10_UINT_PACK32";
+        case (69): return "FORMAT_A2B10G10R10_SINT_PACK32";
+        case (70): return "FORMAT_R16_UNORM";
+        case (71): return "FORMAT_R16_SNORM";
+        case (72): return "FORMAT_R16_USCALED";
+        case (73): return "FORMAT_R16_SSCALED";
+        case (74): return "FORMAT_R16_UINT";
+        case (75): return "FORMAT_R16_SINT";
+        case (76): return "FORMAT_R16_SFLOAT";
+        case (77): return "FORMAT_R16G16_UNORM";
+        case (78): return "FORMAT_R16G16_SNORM";
+        case (79): return "FORMAT_R16G16_USCALED";
+        case (80): return "FORMAT_R16G16_SSCALED";
+        case (81): return "FORMAT_R16G16_UINT";
+        case (82): return "FORMAT_R16G16_SINT";
+        case (83): return "FORMAT_R16G16_SFLOAT";
+        case (84): return "FORMAT_R16G16B16_UNORM";
+        case (85): return "FORMAT_R16G16B16_SNORM";
+        case (86): return "FORMAT_R16G16B16_USCALED";
+        case (87): return "FORMAT_R16G16B16_SSCALED";
+        case (88): return "FORMAT_R16G16B16_UINT";
+        case (89): return "FORMAT_R16G16B16_SINT";
+        case (90): return "FORMAT_R16G16B16_SFLOAT";
+        case (91): return "FORMAT_R16G16B16A16_UNORM";
+        case (92): return "FORMAT_R16G16B16A16_SNORM";
+        case (93): return "FORMAT_R16G16B16A16_USCALED";
+        case (94): return "FORMAT_R16G16B16A16_SSCALED";
+        case (95): return "FORMAT_R16G16B16A16_UINT";
+        case (96): return "FORMAT_R16G16B16A16_SINT";
+        case (97): return "FORMAT_R16G16B16A16_SFLOAT";
+        case (98): return "FORMAT_R32_UINT";
+        case (99): return "FORMAT_R32_SINT";
+        case (100): return "FORMAT_R32_SFLOAT";
+        case (101): return "FORMAT_R32G32_UINT";
+        case (102): return "FORMAT_R32G32_SINT";
+        case (103): return "FORMAT_R32G32_SFLOAT";
+        case (104): return "FORMAT_R32G32B32_UINT";
+        case (105): return "FORMAT_R32G32B32_SINT";
+        case (106): return "FORMAT_R32G32B32_SFLOAT";
+        case (107): return "FORMAT_R32G32B32A32_UINT";
+        case (108): return "FORMAT_R32G32B32A32_SINT";
+        case (109): return "FORMAT_R32G32B32A32_SFLOAT";
+        case (110): return "FORMAT_R64_UINT";
+        case (111): return "FORMAT_R64_SINT";
+        case (112): return "FORMAT_R64_SFLOAT";
+        case (113): return "FORMAT_R64G64_UINT";
+        case (114): return "FORMAT_R64G64_SINT";
+        case (115): return "FORMAT_R64G64_SFLOAT";
+        case (116): return "FORMAT_R64G64B64_UINT";
+        case (117): return "FORMAT_R64G64B64_SINT";
+        case (118): return "FORMAT_R64G64B64_SFLOAT";
+        case (119): return "FORMAT_R64G64B64A64_UINT";
+        case (120): return "FORMAT_R64G64B64A64_SINT";
+        case (121): return "FORMAT_R64G64B64A64_SFLOAT";
+        case (122): return "FORMAT_B10G11R11_UFLOAT_PACK32";
+        case (123): return "FORMAT_E5B9G9R9_UFLOAT_PACK32";
+        case (124): return "FORMAT_D16_UNORM";
+        case (125): return "FORMAT_X8_D24_UNORM_PACK32";
+        case (126): return "FORMAT_D32_SFLOAT";
+        case (127): return "FORMAT_S8_UINT";
+        case (128): return "FORMAT_D16_UNORM_S8_UINT";
+        case (129): return "FORMAT_D24_UNORM_S8_UINT";
+        case (130): return "FORMAT_D32_SFLOAT_S8_UINT";
+        case (131): return "FORMAT_BC1_RGB_UNORM_BLOCK";
+        case (132): return "FORMAT_BC1_RGB_SRGB_BLOCK";
+        case (133): return "FORMAT_BC1_RGBA_UNORM_BLOCK";
+        case (134): return "FORMAT_BC1_RGBA_SRGB_BLOCK";
+        case (135): return "FORMAT_BC2_UNORM_BLOCK";
+        case (136): return "FORMAT_BC2_SRGB_BLOCK";
+        case (137): return "FORMAT_BC3_UNORM_BLOCK";
+        case (138): return "FORMAT_BC3_SRGB_BLOCK";
+        case (139): return "FORMAT_BC4_UNORM_BLOCK";
+        case (140): return "FORMAT_BC4_SNORM_BLOCK";
+        case (141): return "FORMAT_BC5_UNORM_BLOCK";
+        case (142): return "FORMAT_BC5_SNORM_BLOCK";
+        case (143): return "FORMAT_BC6H_UFLOAT_BLOCK";
+        case (144): return "FORMAT_BC6H_SFLOAT_BLOCK";
+        case (145): return "FORMAT_BC7_UNORM_BLOCK";
+        case (146): return "FORMAT_BC7_SRGB_BLOCK";
+        case (147): return "FORMAT_ETC2_R8G8B8_UNORM_BLOCK";
+        case (148): return "FORMAT_ETC2_R8G8B8_SRGB_BLOCK";
+        case (149): return "FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK";
+        case (150): return "FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK";
+        case (151): return "FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK";
+        case (152): return "FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK";
+        case (153): return "FORMAT_EAC_R11_UNORM_BLOCK";
+        case (154): return "FORMAT_EAC_R11_SNORM_BLOCK";
+        case (155): return "FORMAT_EAC_R11G11_UNORM_BLOCK";
+        case (156): return "FORMAT_EAC_R11G11_SNORM_BLOCK";
+        case (157): return "FORMAT_ASTC_4x4_UNORM_BLOCK";
+        case (158): return "FORMAT_ASTC_4x4_SRGB_BLOCK";
+        case (159): return "FORMAT_ASTC_5x4_UNORM_BLOCK";
+        case (160): return "FORMAT_ASTC_5x4_SRGB_BLOCK";
+        case (161): return "FORMAT_ASTC_5x5_UNORM_BLOCK";
+        case (162): return "FORMAT_ASTC_5x5_SRGB_BLOCK";
+        case (163): return "FORMAT_ASTC_6x5_UNORM_BLOCK";
+        case (164): return "FORMAT_ASTC_6x5_SRGB_BLOCK";
+        case (165): return "FORMAT_ASTC_6x6_UNORM_BLOCK";
+        case (166): return "FORMAT_ASTC_6x6_SRGB_BLOCK";
+        case (167): return "FORMAT_ASTC_8x5_UNORM_BLOCK";
+        case (168): return "FORMAT_ASTC_8x5_SRGB_BLOCK";
+        case (169): return "FORMAT_ASTC_8x6_UNORM_BLOCK";
+        case (170): return "FORMAT_ASTC_8x6_SRGB_BLOCK";
+        case (171): return "FORMAT_ASTC_8x8_UNORM_BLOCK";
+        case (172): return "FORMAT_ASTC_8x8_SRGB_BLOCK";
+        case (173): return "FORMAT_ASTC_10x5_UNORM_BLOCK";
+        case (174): return "FORMAT_ASTC_10x5_SRGB_BLOCK";
+        case (175): return "FORMAT_ASTC_10x6_UNORM_BLOCK";
+        case (176): return "FORMAT_ASTC_10x6_SRGB_BLOCK";
+        case (177): return "FORMAT_ASTC_10x8_UNORM_BLOCK";
+        case (178): return "FORMAT_ASTC_10x8_SRGB_BLOCK";
+        case (179): return "FORMAT_ASTC_10x10_UNORM_BLOCK";
+        case (180): return "FORMAT_ASTC_10x10_SRGB_BLOCK";
+        case (181): return "FORMAT_ASTC_12x10_UNORM_BLOCK";
+        case (182): return "FORMAT_ASTC_12x10_SRGB_BLOCK";
+        case (183): return "FORMAT_ASTC_12x12_UNORM_BLOCK";
+        case (184): return "FORMAT_ASTC_12x12_SRGB_BLOCK";
+        case (1000156000): return "FORMAT_G8B8G8R8_422_UNORM";
+        case (1000156001): return "FORMAT_B8G8R8G8_422_UNORM";
+        case (1000156002): return "FORMAT_G8_B8_R8_3PLANE_420_UNORM";
+        case (1000156003): return "FORMAT_G8_B8R8_2PLANE_420_UNORM";
+        case (1000156004): return "FORMAT_G8_B8_R8_3PLANE_422_UNORM";
+        case (1000156005): return "FORMAT_G8_B8R8_2PLANE_422_UNORM";
+        case (1000156006): return "FORMAT_G8_B8_R8_3PLANE_444_UNORM";
+        case (1000156007): return "FORMAT_R10X6_UNORM_PACK16";
+        case (1000156008): return "FORMAT_R10X6G10X6_UNORM_2PACK16";
+        case (1000156009): return "FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16";
+        case (1000156010): return "FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16";
+        case (1000156011): return "FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16";
+        case (1000156012): return "FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16";
+        case (1000156013): return "FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16";
+        case (1000156014): return "FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16";
+        case (1000156015): return "FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16";
+        case (1000156016): return "FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16";
+        case (1000156017): return "FORMAT_R12X4_UNORM_PACK16";
+        case (1000156018): return "FORMAT_R12X4G12X4_UNORM_2PACK16";
+        case (1000156019): return "FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16";
+        case (1000156020): return "FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16";
+        case (1000156021): return "FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16";
+        case (1000156022): return "FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16";
+        case (1000156023): return "FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16";
+        case (1000156024): return "FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16";
+        case (1000156025): return "FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16";
+        case (1000156026): return "FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16";
+        case (1000156027): return "FORMAT_G16B16G16R16_422_UNORM";
+        case (1000156028): return "FORMAT_B16G16R16G16_422_UNORM";
+        case (1000156029): return "FORMAT_G16_B16_R16_3PLANE_420_UNORM";
+        case (1000156030): return "FORMAT_G16_B16R16_2PLANE_420_UNORM";
+        case (1000156031): return "FORMAT_G16_B16_R16_3PLANE_422_UNORM";
+        case (1000156032): return "FORMAT_G16_B16R16_2PLANE_422_UNORM";
+        case (1000156033): return "FORMAT_G16_B16_R16_3PLANE_444_UNORM";
+        case (1000054000): return "FORMAT_PVRTC1_2BPP_UNORM_BLOCK_IMG";
+        case (1000054001): return "FORMAT_PVRTC1_4BPP_UNORM_BLOCK_IMG";
+        case (1000054002): return "FORMAT_PVRTC2_2BPP_UNORM_BLOCK_IMG";
+        case (1000054003): return "FORMAT_PVRTC2_4BPP_UNORM_BLOCK_IMG";
+        case (1000054004): return "FORMAT_PVRTC1_2BPP_SRGB_BLOCK_IMG";
+        case (1000054005): return "FORMAT_PVRTC1_4BPP_SRGB_BLOCK_IMG";
+        case (1000054006): return "FORMAT_PVRTC2_2BPP_SRGB_BLOCK_IMG";
+        case (1000054007): return "FORMAT_PVRTC2_4BPP_SRGB_BLOCK_IMG";
+        case (1000066000): return "FORMAT_ASTC_4x4_SFLOAT_BLOCK_EXT";
+        case (1000066001): return "FORMAT_ASTC_5x4_SFLOAT_BLOCK_EXT";
+        case (1000066002): return "FORMAT_ASTC_5x5_SFLOAT_BLOCK_EXT";
+        case (1000066003): return "FORMAT_ASTC_6x5_SFLOAT_BLOCK_EXT";
+        case (1000066004): return "FORMAT_ASTC_6x6_SFLOAT_BLOCK_EXT";
+        case (1000066005): return "FORMAT_ASTC_8x5_SFLOAT_BLOCK_EXT";
+        case (1000066006): return "FORMAT_ASTC_8x6_SFLOAT_BLOCK_EXT";
+        case (1000066007): return "FORMAT_ASTC_8x8_SFLOAT_BLOCK_EXT";
+        case (1000066008): return "FORMAT_ASTC_10x5_SFLOAT_BLOCK_EXT";
+        case (1000066009): return "FORMAT_ASTC_10x6_SFLOAT_BLOCK_EXT";
+        case (1000066010): return "FORMAT_ASTC_10x8_SFLOAT_BLOCK_EXT";
+        case (1000066011): return "FORMAT_ASTC_10x10_SFLOAT_BLOCK_EXT";
+        case (1000066012): return "FORMAT_ASTC_12x10_SFLOAT_BLOCK_EXT";
+        case (1000066013): return "FORMAT_ASTC_12x12_SFLOAT_BLOCK_EXT";
+        default: return "UNKNOWN_VkFormat";
+    }
+}
+void DumpVkFormat(Printer &p, std::string name, VkFormat value, int width = 0) {
+    if (p.Type() == OutputType::json) {
+        p.PrintKeyValue(name, value, width);
+        return;
+    } else {
+        p.PrintKeyValue(name, VkFormatString(value), width);
+    }
+}
+static const char *VkImageTilingString(VkImageTiling value) {
+    switch (value) {
+        case (0): return "IMAGE_TILING_OPTIMAL";
+        case (1): return "IMAGE_TILING_LINEAR";
+        case (1000158000): return "IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT";
+        default: return "UNKNOWN_VkImageTiling";
+    }
+}
+void DumpVkImageTiling(Printer &p, std::string name, VkImageTiling value, int width = 0) {
+    if (p.Type() == OutputType::json) {
+        p.PrintKeyValue(name, value, width);
+        return;
+    } else {
+        p.PrintKeyValue(name, VkImageTilingString(value), width);
+    }
+}
+static const char *VkPhysicalDeviceTypeString(VkPhysicalDeviceType value) {
+    switch (value) {
+        case (0): return "PHYSICAL_DEVICE_TYPE_OTHER";
+        case (1): return "PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU";
+        case (2): return "PHYSICAL_DEVICE_TYPE_DISCRETE_GPU";
+        case (3): return "PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU";
+        case (4): return "PHYSICAL_DEVICE_TYPE_CPU";
+        default: return "UNKNOWN_VkPhysicalDeviceType";
+    }
+}
+void DumpVkPhysicalDeviceType(Printer &p, std::string name, VkPhysicalDeviceType value, int width = 0) {
+    if (p.Type() == OutputType::json) {
+        p.PrintKeyValue(name, value, width);
+        return;
+    } else {
+        p.PrintKeyValue(name, VkPhysicalDeviceTypeString(value), width);
+    }
+}
+static const char *VkPointClippingBehaviorString(VkPointClippingBehavior value) {
+    switch (value) {
+        case (0): return "POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES";
+        case (1): return "POINT_CLIPPING_BEHAVIOR_USER_CLIP_PLANES_ONLY";
+        default: return "UNKNOWN_VkPointClippingBehavior";
+    }
+}
+void DumpVkPointClippingBehavior(Printer &p, std::string name, VkPointClippingBehavior value, int width = 0) {
+    if (p.Type() == OutputType::json) {
+        p.PrintKeyValue(name, value, width);
+        return;
+    } else {
+        p.PrintKeyValue(name, VkPointClippingBehaviorString(value), width);
+    }
+}
+static const char *VkPresentModeKHRString(VkPresentModeKHR value) {
+    switch (value) {
+        case (0): return "PRESENT_MODE_IMMEDIATE_KHR";
+        case (1): return "PRESENT_MODE_MAILBOX_KHR";
+        case (2): return "PRESENT_MODE_FIFO_KHR";
+        case (3): return "PRESENT_MODE_FIFO_RELAXED_KHR";
+        case (1000111000): return "PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR";
+        case (1000111001): return "PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR";
+        default: return "UNKNOWN_VkPresentModeKHR";
+    }
+}
+void DumpVkPresentModeKHR(Printer &p, std::string name, VkPresentModeKHR value, int width = 0) {
+    if (p.Type() == OutputType::json) {
+        p.PrintKeyValue(name, value, width);
+        return;
+    } else {
+        p.PrintKeyValue(name, VkPresentModeKHRString(value), width);
+    }
+}
+static const char *VkResultString(VkResult value) {
+    switch (value) {
+        case (0): return "SUCCESS";
+        case (1): return "NOT_READY";
+        case (2): return "TIMEOUT";
+        case (3): return "EVENT_SET";
+        case (4): return "EVENT_RESET";
+        case (5): return "INCOMPLETE";
+        case (-1): return "ERROR_OUT_OF_HOST_MEMORY";
+        case (-2): return "ERROR_OUT_OF_DEVICE_MEMORY";
+        case (-3): return "ERROR_INITIALIZATION_FAILED";
+        case (-4): return "ERROR_DEVICE_LOST";
+        case (-5): return "ERROR_MEMORY_MAP_FAILED";
+        case (-6): return "ERROR_LAYER_NOT_PRESENT";
+        case (-7): return "ERROR_EXTENSION_NOT_PRESENT";
+        case (-8): return "ERROR_FEATURE_NOT_PRESENT";
+        case (-9): return "ERROR_INCOMPATIBLE_DRIVER";
+        case (-10): return "ERROR_TOO_MANY_OBJECTS";
+        case (-11): return "ERROR_FORMAT_NOT_SUPPORTED";
+        case (-12): return "ERROR_FRAGMENTED_POOL";
+        case (-1000069000): return "ERROR_OUT_OF_POOL_MEMORY";
+        case (-1000072003): return "ERROR_INVALID_EXTERNAL_HANDLE";
+        case (-1000000000): return "ERROR_SURFACE_LOST_KHR";
+        case (-1000000001): return "ERROR_NATIVE_WINDOW_IN_USE_KHR";
+        case (1000001003): return "SUBOPTIMAL_KHR";
+        case (-1000001004): return "ERROR_OUT_OF_DATE_KHR";
+        case (-1000003001): return "ERROR_INCOMPATIBLE_DISPLAY_KHR";
+        case (-1000011001): return "ERROR_VALIDATION_FAILED_EXT";
+        case (-1000012000): return "ERROR_INVALID_SHADER_NV";
+        case (-1000158000): return "ERROR_INVALID_DRM_FORMAT_MODIFIER_PLANE_LAYOUT_EXT";
+        case (-1000161000): return "ERROR_FRAGMENTATION_EXT";
+        case (-1000174001): return "ERROR_NOT_PERMITTED_EXT";
+        case (-1000244000): return "ERROR_INVALID_DEVICE_ADDRESS_EXT";
+        case (-1000255000): return "ERROR_FULL_SCREEN_EXCLUSIVE_MODE_LOST_EXT";
+        default: return "UNKNOWN_VkResult";
+    }
+}
+void DumpVkResult(Printer &p, std::string name, VkResult value, int width = 0) {
+    if (p.Type() == OutputType::json) {
+        p.PrintKeyValue(name, value, width);
+        return;
+    } else {
+        p.PrintKeyValue(name, VkResultString(value), width);
+    }
+}
+static const char *VkShaderFloatControlsIndependenceKHRString(VkShaderFloatControlsIndependenceKHR value) {
+    switch (value) {
+        case (0): return "SHADER_FLOAT_CONTROLS_INDEPENDENCE_32_BIT_ONLY_KHR";
+        case (1): return "SHADER_FLOAT_CONTROLS_INDEPENDENCE_ALL_KHR";
+        case (2): return "SHADER_FLOAT_CONTROLS_INDEPENDENCE_NONE_KHR";
+        default: return "UNKNOWN_VkShaderFloatControlsIndependenceKHR";
+    }
+}
+void DumpVkShaderFloatControlsIndependenceKHR(Printer &p, std::string name, VkShaderFloatControlsIndependenceKHR value, int width = 0) {
+    if (p.Type() == OutputType::json) {
+        p.PrintKeyValue(name, value, width);
+        return;
+    } else {
+        p.PrintKeyValue(name, VkShaderFloatControlsIndependenceKHRString(value), width);
+    }
+}
+void DumpVkCompositeAlphaFlagsKHR(Printer &p, std::string name, VkCompositeAlphaFlagBitsKHR value, int width = 0) {
+    if (value == 0) p.PrintElement("None");
+    if (1 & value) p.SetAsType().PrintElement("COMPOSITE_ALPHA_OPAQUE_BIT_KHR");
+    if (2 & value) p.SetAsType().PrintElement("COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR");
+    if (4 & value) p.SetAsType().PrintElement("COMPOSITE_ALPHA_POST_MULTIPLIED_BIT_KHR");
+    if (8 & value) p.SetAsType().PrintElement("COMPOSITE_ALPHA_INHERIT_BIT_KHR");
+}
+void DumpVkCompositeAlphaFlagsKHR(Printer &p, std::string name, VkCompositeAlphaFlagsKHR value, int width = 0) {
+    if (p.Type() == OutputType::json) { p.PrintKeyValue(name, value); return; }
+    p.ObjectStart(name);
+    DumpVkCompositeAlphaFlagsKHR(p, name, static_cast<VkCompositeAlphaFlagBitsKHR>(value), width);
+    p.ObjectEnd();
+}
+void DumpVkCompositeAlphaFlagBitsKHR(Printer &p, std::string name, VkCompositeAlphaFlagBitsKHR value, int width = 0) {
+    if (p.Type() == OutputType::json) { p.PrintKeyValue(name, value); return; }
+    p.ObjectStart(name);
+    DumpVkCompositeAlphaFlagsKHR(p, name, value, width);
+    p.ObjectEnd();
+}
+void DumpVkDeviceGroupPresentModeFlagsKHR(Printer &p, std::string name, VkDeviceGroupPresentModeFlagBitsKHR value, int width = 0) {
+    if (value == 0) p.PrintElement("None");
+    if (1 & value) p.SetAsType().PrintElement("DEVICE_GROUP_PRESENT_MODE_LOCAL_BIT_KHR");
+    if (2 & value) p.SetAsType().PrintElement("DEVICE_GROUP_PRESENT_MODE_REMOTE_BIT_KHR");
+    if (4 & value) p.SetAsType().PrintElement("DEVICE_GROUP_PRESENT_MODE_SUM_BIT_KHR");
+    if (8 & value) p.SetAsType().PrintElement("DEVICE_GROUP_PRESENT_MODE_LOCAL_MULTI_DEVICE_BIT_KHR");
+}
+void DumpVkDeviceGroupPresentModeFlagsKHR(Printer &p, std::string name, VkDeviceGroupPresentModeFlagsKHR value, int width = 0) {
+    if (p.Type() == OutputType::json) { p.PrintKeyValue(name, value); return; }
+    p.ObjectStart(name);
+    DumpVkDeviceGroupPresentModeFlagsKHR(p, name, static_cast<VkDeviceGroupPresentModeFlagBitsKHR>(value), width);
+    p.ObjectEnd();
+}
+void DumpVkDeviceGroupPresentModeFlagBitsKHR(Printer &p, std::string name, VkDeviceGroupPresentModeFlagBitsKHR value, int width = 0) {
+    if (p.Type() == OutputType::json) { p.PrintKeyValue(name, value); return; }
+    p.ObjectStart(name);
+    DumpVkDeviceGroupPresentModeFlagsKHR(p, name, value, width);
+    p.ObjectEnd();
+}
+void DumpVkFormatFeatureFlags(Printer &p, std::string name, VkFormatFeatureFlagBits value, int width = 0) {
+    if (value == 0) p.PrintElement("None");
+    if (1 & value) p.SetAsType().PrintElement("FORMAT_FEATURE_SAMPLED_IMAGE_BIT");
+    if (2 & value) p.SetAsType().PrintElement("FORMAT_FEATURE_STORAGE_IMAGE_BIT");
+    if (4 & value) p.SetAsType().PrintElement("FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT");
+    if (8 & value) p.SetAsType().PrintElement("FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT");
+    if (16 & value) p.SetAsType().PrintElement("FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT");
+    if (32 & value) p.SetAsType().PrintElement("FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_ATOMIC_BIT");
+    if (64 & value) p.SetAsType().PrintElement("FORMAT_FEATURE_VERTEX_BUFFER_BIT");
+    if (128 & value) p.SetAsType().PrintElement("FORMAT_FEATURE_COLOR_ATTACHMENT_BIT");
+    if (256 & value) p.SetAsType().PrintElement("FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT");
+    if (512 & value) p.SetAsType().PrintElement("FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT");
+    if (1024 & value) p.SetAsType().PrintElement("FORMAT_FEATURE_BLIT_SRC_BIT");
+    if (2048 & value) p.SetAsType().PrintElement("FORMAT_FEATURE_BLIT_DST_BIT");
+    if (4096 & value) p.SetAsType().PrintElement("FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT");
+    if (16384 & value) p.SetAsType().PrintElement("FORMAT_FEATURE_TRANSFER_SRC_BIT");
+    if (32768 & value) p.SetAsType().PrintElement("FORMAT_FEATURE_TRANSFER_DST_BIT");
+    if (131072 & value) p.SetAsType().PrintElement("FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT");
+    if (262144 & value) p.SetAsType().PrintElement("FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT");
+    if (524288 & value) p.SetAsType().PrintElement("FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT");
+    if (1048576 & value) p.SetAsType().PrintElement("FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT");
+    if (2097152 & value) p.SetAsType().PrintElement("FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT");
+    if (4194304 & value) p.SetAsType().PrintElement("FORMAT_FEATURE_DISJOINT_BIT");
+    if (8388608 & value) p.SetAsType().PrintElement("FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT");
+    if (8192 & value) p.SetAsType().PrintElement("FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_IMG");
+    if (65536 & value) p.SetAsType().PrintElement("FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT_EXT");
+    if (16777216 & value) p.SetAsType().PrintElement("FORMAT_FEATURE_FRAGMENT_DENSITY_MAP_BIT_EXT");
+}
+void DumpVkFormatFeatureFlags(Printer &p, std::string name, VkFormatFeatureFlags value, int width = 0) {
+    if (p.Type() == OutputType::json) { p.PrintKeyValue(name, value); return; }
+    p.ObjectStart(name);
+    DumpVkFormatFeatureFlags(p, name, static_cast<VkFormatFeatureFlagBits>(value), width);
+    p.ObjectEnd();
+}
+void DumpVkFormatFeatureFlagBits(Printer &p, std::string name, VkFormatFeatureFlagBits value, int width = 0) {
+    if (p.Type() == OutputType::json) { p.PrintKeyValue(name, value); return; }
+    p.ObjectStart(name);
+    DumpVkFormatFeatureFlags(p, name, value, width);
+    p.ObjectEnd();
+}
+void DumpVkImageUsageFlags(Printer &p, std::string name, VkImageUsageFlagBits value, int width = 0) {
+    if (value == 0) p.PrintElement("None");
+    if (1 & value) p.SetAsType().PrintElement("IMAGE_USAGE_TRANSFER_SRC_BIT");
+    if (2 & value) p.SetAsType().PrintElement("IMAGE_USAGE_TRANSFER_DST_BIT");
+    if (4 & value) p.SetAsType().PrintElement("IMAGE_USAGE_SAMPLED_BIT");
+    if (8 & value) p.SetAsType().PrintElement("IMAGE_USAGE_STORAGE_BIT");
+    if (16 & value) p.SetAsType().PrintElement("IMAGE_USAGE_COLOR_ATTACHMENT_BIT");
+    if (32 & value) p.SetAsType().PrintElement("IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT");
+    if (64 & value) p.SetAsType().PrintElement("IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT");
+    if (128 & value) p.SetAsType().PrintElement("IMAGE_USAGE_INPUT_ATTACHMENT_BIT");
+    if (256 & value) p.SetAsType().PrintElement("IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV");
+    if (512 & value) p.SetAsType().PrintElement("IMAGE_USAGE_FRAGMENT_DENSITY_MAP_BIT_EXT");
+}
+void DumpVkImageUsageFlags(Printer &p, std::string name, VkImageUsageFlags value, int width = 0) {
+    if (p.Type() == OutputType::json) { p.PrintKeyValue(name, value); return; }
+    p.ObjectStart(name);
+    DumpVkImageUsageFlags(p, name, static_cast<VkImageUsageFlagBits>(value), width);
+    p.ObjectEnd();
+}
+void DumpVkImageUsageFlagBits(Printer &p, std::string name, VkImageUsageFlagBits value, int width = 0) {
+    if (p.Type() == OutputType::json) { p.PrintKeyValue(name, value); return; }
+    p.ObjectStart(name);
+    DumpVkImageUsageFlags(p, name, value, width);
+    p.ObjectEnd();
+}
+void DumpVkMemoryHeapFlags(Printer &p, std::string name, VkMemoryHeapFlagBits value, int width = 0) {
+    if (value == 0) p.PrintElement("None");
+    if (1 & value) p.SetAsType().PrintElement("MEMORY_HEAP_DEVICE_LOCAL_BIT");
+    if (2 & value) p.SetAsType().PrintElement("MEMORY_HEAP_MULTI_INSTANCE_BIT");
+}
+void DumpVkMemoryHeapFlags(Printer &p, std::string name, VkMemoryHeapFlags value, int width = 0) {
+    if (p.Type() == OutputType::json) { p.PrintKeyValue(name, value); return; }
+    p.ObjectStart(name);
+    DumpVkMemoryHeapFlags(p, name, static_cast<VkMemoryHeapFlagBits>(value), width);
+    p.ObjectEnd();
+}
+void DumpVkMemoryHeapFlagBits(Printer &p, std::string name, VkMemoryHeapFlagBits value, int width = 0) {
+    if (p.Type() == OutputType::json) { p.PrintKeyValue(name, value); return; }
+    p.ObjectStart(name);
+    DumpVkMemoryHeapFlags(p, name, value, width);
+    p.ObjectEnd();
+}
+void DumpVkMemoryPropertyFlags(Printer &p, std::string name, VkMemoryPropertyFlagBits value, int width = 0) {
+    if (value == 0) p.PrintElement("None");
+    if (1 & value) p.SetAsType().PrintElement("MEMORY_PROPERTY_DEVICE_LOCAL_BIT");
+    if (2 & value) p.SetAsType().PrintElement("MEMORY_PROPERTY_HOST_VISIBLE_BIT");
+    if (4 & value) p.SetAsType().PrintElement("MEMORY_PROPERTY_HOST_COHERENT_BIT");
+    if (8 & value) p.SetAsType().PrintElement("MEMORY_PROPERTY_HOST_CACHED_BIT");
+    if (16 & value) p.SetAsType().PrintElement("MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT");
+    if (32 & value) p.SetAsType().PrintElement("MEMORY_PROPERTY_PROTECTED_BIT");
+    if (64 & value) p.SetAsType().PrintElement("MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD");
+    if (128 & value) p.SetAsType().PrintElement("MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD");
+}
+void DumpVkMemoryPropertyFlags(Printer &p, std::string name, VkMemoryPropertyFlags value, int width = 0) {
+    if (p.Type() == OutputType::json) { p.PrintKeyValue(name, value); return; }
+    p.ObjectStart(name);
+    DumpVkMemoryPropertyFlags(p, name, static_cast<VkMemoryPropertyFlagBits>(value), width);
+    p.ObjectEnd();
+}
+void DumpVkMemoryPropertyFlagBits(Printer &p, std::string name, VkMemoryPropertyFlagBits value, int width = 0) {
+    if (p.Type() == OutputType::json) { p.PrintKeyValue(name, value); return; }
+    p.ObjectStart(name);
+    DumpVkMemoryPropertyFlags(p, name, value, width);
+    p.ObjectEnd();
+}
+std::string VkQueueFlagsString(VkQueueFlags value, int width = 0) {
+    std::string out;
+    bool is_first = true;
+    if (1 & value) {
+        if (is_first) { is_first = false; } else { out += " | "; }
+        out += "QUEUE_GRAPHICS";
+    }
+    if (2 & value) {
+        if (is_first) { is_first = false; } else { out += " | "; }
+        out += "QUEUE_COMPUTE";
+    }
+    if (4 & value) {
+        if (is_first) { is_first = false; } else { out += " | "; }
+        out += "QUEUE_TRANSFER";
+    }
+    if (8 & value) {
+        if (is_first) { is_first = false; } else { out += " | "; }
+        out += "QUEUE_SPARSE_BINDING";
+    }
+    if (16 & value) {
+        if (is_first) { is_first = false; } else { out += " | "; }
+        out += "QUEUE_PROTECTED";
+    }
+    return out;
+}
+void DumpVkResolveModeFlagsKHR(Printer &p, std::string name, VkResolveModeFlagBitsKHR value, int width = 0) {
+    if (value == 0) p.PrintElement("None");
+    if (0 & value) p.SetAsType().PrintElement("RESOLVE_MODE_NONE_KHR");
+    if (1 & value) p.SetAsType().PrintElement("RESOLVE_MODE_SAMPLE_ZERO_BIT_KHR");
+    if (2 & value) p.SetAsType().PrintElement("RESOLVE_MODE_AVERAGE_BIT_KHR");
+    if (4 & value) p.SetAsType().PrintElement("RESOLVE_MODE_MIN_BIT_KHR");
+    if (8 & value) p.SetAsType().PrintElement("RESOLVE_MODE_MAX_BIT_KHR");
+}
+void DumpVkResolveModeFlagsKHR(Printer &p, std::string name, VkResolveModeFlagsKHR value, int width = 0) {
+    if (p.Type() == OutputType::json) { p.PrintKeyValue(name, value); return; }
+    p.ObjectStart(name);
+    DumpVkResolveModeFlagsKHR(p, name, static_cast<VkResolveModeFlagBitsKHR>(value), width);
+    p.ObjectEnd();
+}
+void DumpVkResolveModeFlagBitsKHR(Printer &p, std::string name, VkResolveModeFlagBitsKHR value, int width = 0) {
+    if (p.Type() == OutputType::json) { p.PrintKeyValue(name, value); return; }
+    p.ObjectStart(name);
+    DumpVkResolveModeFlagsKHR(p, name, value, width);
+    p.ObjectEnd();
+}
+void DumpVkSampleCountFlags(Printer &p, std::string name, VkSampleCountFlagBits value, int width = 0) {
+    if (value == 0) p.PrintElement("None");
+    if (1 & value) p.SetAsType().PrintElement("SAMPLE_COUNT_1_BIT");
+    if (2 & value) p.SetAsType().PrintElement("SAMPLE_COUNT_2_BIT");
+    if (4 & value) p.SetAsType().PrintElement("SAMPLE_COUNT_4_BIT");
+    if (8 & value) p.SetAsType().PrintElement("SAMPLE_COUNT_8_BIT");
+    if (16 & value) p.SetAsType().PrintElement("SAMPLE_COUNT_16_BIT");
+    if (32 & value) p.SetAsType().PrintElement("SAMPLE_COUNT_32_BIT");
+    if (64 & value) p.SetAsType().PrintElement("SAMPLE_COUNT_64_BIT");
+}
+void DumpVkSampleCountFlags(Printer &p, std::string name, VkSampleCountFlags value, int width = 0) {
+    if (p.Type() == OutputType::json) { p.PrintKeyValue(name, value); return; }
+    p.ObjectStart(name);
+    DumpVkSampleCountFlags(p, name, static_cast<VkSampleCountFlagBits>(value), width);
+    p.ObjectEnd();
+}
+void DumpVkSampleCountFlagBits(Printer &p, std::string name, VkSampleCountFlagBits value, int width = 0) {
+    if (p.Type() == OutputType::json) { p.PrintKeyValue(name, value); return; }
+    p.ObjectStart(name);
+    DumpVkSampleCountFlags(p, name, value, width);
+    p.ObjectEnd();
+}
+void DumpVkShaderStageFlags(Printer &p, std::string name, VkShaderStageFlagBits value, int width = 0) {
+    if (value == 0) p.PrintElement("None");
+    if (1 & value) p.SetAsType().PrintElement("SHADER_STAGE_VERTEX_BIT");
+    if (2 & value) p.SetAsType().PrintElement("SHADER_STAGE_TESSELLATION_CONTROL_BIT");
+    if (4 & value) p.SetAsType().PrintElement("SHADER_STAGE_TESSELLATION_EVALUATION_BIT");
+    if (8 & value) p.SetAsType().PrintElement("SHADER_STAGE_GEOMETRY_BIT");
+    if (16 & value) p.SetAsType().PrintElement("SHADER_STAGE_FRAGMENT_BIT");
+    if (32 & value) p.SetAsType().PrintElement("SHADER_STAGE_COMPUTE_BIT");
+    if (0x0000001F & value) p.SetAsType().PrintElement("SHADER_STAGE_ALL_GRAPHICS");
+    if (0x7FFFFFFF & value) p.SetAsType().PrintElement("SHADER_STAGE_ALL");
+    if (256 & value) p.SetAsType().PrintElement("SHADER_STAGE_RAYGEN_BIT_NV");
+    if (512 & value) p.SetAsType().PrintElement("SHADER_STAGE_ANY_HIT_BIT_NV");
+    if (1024 & value) p.SetAsType().PrintElement("SHADER_STAGE_CLOSEST_HIT_BIT_NV");
+    if (2048 & value) p.SetAsType().PrintElement("SHADER_STAGE_MISS_BIT_NV");
+    if (4096 & value) p.SetAsType().PrintElement("SHADER_STAGE_INTERSECTION_BIT_NV");
+    if (8192 & value) p.SetAsType().PrintElement("SHADER_STAGE_CALLABLE_BIT_NV");
+    if (64 & value) p.SetAsType().PrintElement("SHADER_STAGE_TASK_BIT_NV");
+    if (128 & value) p.SetAsType().PrintElement("SHADER_STAGE_MESH_BIT_NV");
+}
+void DumpVkShaderStageFlags(Printer &p, std::string name, VkShaderStageFlags value, int width = 0) {
+    if (p.Type() == OutputType::json) { p.PrintKeyValue(name, value); return; }
+    p.ObjectStart(name);
+    DumpVkShaderStageFlags(p, name, static_cast<VkShaderStageFlagBits>(value), width);
+    p.ObjectEnd();
+}
+void DumpVkShaderStageFlagBits(Printer &p, std::string name, VkShaderStageFlagBits value, int width = 0) {
+    if (p.Type() == OutputType::json) { p.PrintKeyValue(name, value); return; }
+    p.ObjectStart(name);
+    DumpVkShaderStageFlags(p, name, value, width);
+    p.ObjectEnd();
+}
+void DumpVkSubgroupFeatureFlags(Printer &p, std::string name, VkSubgroupFeatureFlagBits value, int width = 0) {
+    if (value == 0) p.PrintElement("None");
+    if (1 & value) p.SetAsType().PrintElement("SUBGROUP_FEATURE_BASIC_BIT");
+    if (2 & value) p.SetAsType().PrintElement("SUBGROUP_FEATURE_VOTE_BIT");
+    if (4 & value) p.SetAsType().PrintElement("SUBGROUP_FEATURE_ARITHMETIC_BIT");
+    if (8 & value) p.SetAsType().PrintElement("SUBGROUP_FEATURE_BALLOT_BIT");
+    if (16 & value) p.SetAsType().PrintElement("SUBGROUP_FEATURE_SHUFFLE_BIT");
+    if (32 & value) p.SetAsType().PrintElement("SUBGROUP_FEATURE_SHUFFLE_RELATIVE_BIT");
+    if (64 & value) p.SetAsType().PrintElement("SUBGROUP_FEATURE_CLUSTERED_BIT");
+    if (128 & value) p.SetAsType().PrintElement("SUBGROUP_FEATURE_QUAD_BIT");
+    if (256 & value) p.SetAsType().PrintElement("SUBGROUP_FEATURE_PARTITIONED_BIT_NV");
+}
+void DumpVkSubgroupFeatureFlags(Printer &p, std::string name, VkSubgroupFeatureFlags value, int width = 0) {
+    if (p.Type() == OutputType::json) { p.PrintKeyValue(name, value); return; }
+    p.ObjectStart(name);
+    DumpVkSubgroupFeatureFlags(p, name, static_cast<VkSubgroupFeatureFlagBits>(value), width);
+    p.ObjectEnd();
+}
+void DumpVkSubgroupFeatureFlagBits(Printer &p, std::string name, VkSubgroupFeatureFlagBits value, int width = 0) {
+    if (p.Type() == OutputType::json) { p.PrintKeyValue(name, value); return; }
+    p.ObjectStart(name);
+    DumpVkSubgroupFeatureFlags(p, name, value, width);
+    p.ObjectEnd();
+}
+void DumpVkSurfaceTransformFlagsKHR(Printer &p, std::string name, VkSurfaceTransformFlagBitsKHR value, int width = 0) {
+    if (value == 0) p.PrintElement("None");
+    if (1 & value) p.SetAsType().PrintElement("SURFACE_TRANSFORM_IDENTITY_BIT_KHR");
+    if (2 & value) p.SetAsType().PrintElement("SURFACE_TRANSFORM_ROTATE_90_BIT_KHR");
+    if (4 & value) p.SetAsType().PrintElement("SURFACE_TRANSFORM_ROTATE_180_BIT_KHR");
+    if (8 & value) p.SetAsType().PrintElement("SURFACE_TRANSFORM_ROTATE_270_BIT_KHR");
+    if (16 & value) p.SetAsType().PrintElement("SURFACE_TRANSFORM_HORIZONTAL_MIRROR_BIT_KHR");
+    if (32 & value) p.SetAsType().PrintElement("SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_90_BIT_KHR");
+    if (64 & value) p.SetAsType().PrintElement("SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_180_BIT_KHR");
+    if (128 & value) p.SetAsType().PrintElement("SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_270_BIT_KHR");
+    if (256 & value) p.SetAsType().PrintElement("SURFACE_TRANSFORM_INHERIT_BIT_KHR");
+}
+void DumpVkSurfaceTransformFlagsKHR(Printer &p, std::string name, VkSurfaceTransformFlagsKHR value, int width = 0) {
+    if (p.Type() == OutputType::json) { p.PrintKeyValue(name, value); return; }
+    p.ObjectStart(name);
+    DumpVkSurfaceTransformFlagsKHR(p, name, static_cast<VkSurfaceTransformFlagBitsKHR>(value), width);
+    p.ObjectEnd();
+}
+void DumpVkSurfaceTransformFlagBitsKHR(Printer &p, std::string name, VkSurfaceTransformFlagBitsKHR value, int width = 0) {
+    if (p.Type() == OutputType::json) { p.PrintKeyValue(name, value); return; }
+    p.ObjectStart(name);
+    DumpVkSurfaceTransformFlagsKHR(p, name, value, width);
+    p.ObjectEnd();
+}
+void DumpVkDrmFormatModifierPropertiesEXT(Printer &p, std::string name, VkDrmFormatModifierPropertiesEXT &obj);
+void DumpVkDrmFormatModifierPropertiesListEXT(Printer &p, std::string name, VkDrmFormatModifierPropertiesListEXT &obj);
+void DumpVkExtent2D(Printer &p, std::string name, VkExtent2D &obj);
+void DumpVkExtent3D(Printer &p, std::string name, VkExtent3D &obj);
+void DumpVkLayerProperties(Printer &p, std::string name, VkLayerProperties &obj);
+void DumpVkPhysicalDevice16BitStorageFeatures(Printer &p, std::string name, VkPhysicalDevice16BitStorageFeatures &obj);
+void DumpVkPhysicalDevice8BitStorageFeaturesKHR(Printer &p, std::string name, VkPhysicalDevice8BitStorageFeaturesKHR &obj);
+void DumpVkPhysicalDeviceASTCDecodeFeaturesEXT(Printer &p, std::string name, VkPhysicalDeviceASTCDecodeFeaturesEXT &obj);
+void DumpVkPhysicalDeviceBlendOperationAdvancedFeaturesEXT(Printer &p, std::string name, VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT &obj);
+void DumpVkPhysicalDeviceBlendOperationAdvancedPropertiesEXT(Printer &p, std::string name, VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT &obj);
+void DumpVkPhysicalDeviceBufferDeviceAddressFeaturesEXT(Printer &p, std::string name, VkPhysicalDeviceBufferDeviceAddressFeaturesEXT &obj);
+void DumpVkPhysicalDeviceConditionalRenderingFeaturesEXT(Printer &p, std::string name, VkPhysicalDeviceConditionalRenderingFeaturesEXT &obj);
+void DumpVkPhysicalDeviceConservativeRasterizationPropertiesEXT(Printer &p, std::string name, VkPhysicalDeviceConservativeRasterizationPropertiesEXT &obj);
+void DumpVkPhysicalDeviceDepthClipEnableFeaturesEXT(Printer &p, std::string name, VkPhysicalDeviceDepthClipEnableFeaturesEXT &obj);
+void DumpVkPhysicalDeviceDepthStencilResolvePropertiesKHR(Printer &p, std::string name, VkPhysicalDeviceDepthStencilResolvePropertiesKHR &obj);
+void DumpVkPhysicalDeviceDescriptorIndexingFeaturesEXT(Printer &p, std::string name, VkPhysicalDeviceDescriptorIndexingFeaturesEXT &obj);
+void DumpVkPhysicalDeviceDescriptorIndexingPropertiesEXT(Printer &p, std::string name, VkPhysicalDeviceDescriptorIndexingPropertiesEXT &obj);
+void DumpVkPhysicalDeviceDiscardRectanglePropertiesEXT(Printer &p, std::string name, VkPhysicalDeviceDiscardRectanglePropertiesEXT &obj);
+void DumpVkPhysicalDeviceDriverPropertiesKHR(Printer &p, std::string name, VkPhysicalDeviceDriverPropertiesKHR &obj);
+void DumpVkPhysicalDeviceExternalMemoryHostPropertiesEXT(Printer &p, std::string name, VkPhysicalDeviceExternalMemoryHostPropertiesEXT &obj);
+void DumpVkPhysicalDeviceFeatures(Printer &p, std::string name, VkPhysicalDeviceFeatures &obj);
+void DumpVkPhysicalDeviceFloatControlsPropertiesKHR(Printer &p, std::string name, VkPhysicalDeviceFloatControlsPropertiesKHR &obj);
+void DumpVkPhysicalDeviceFragmentDensityMapFeaturesEXT(Printer &p, std::string name, VkPhysicalDeviceFragmentDensityMapFeaturesEXT &obj);
+void DumpVkPhysicalDeviceFragmentDensityMapPropertiesEXT(Printer &p, std::string name, VkPhysicalDeviceFragmentDensityMapPropertiesEXT &obj);
+void DumpVkPhysicalDeviceFragmentShaderInterlockFeaturesEXT(Printer &p, std::string name, VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT &obj);
+void DumpVkPhysicalDeviceHostQueryResetFeaturesEXT(Printer &p, std::string name, VkPhysicalDeviceHostQueryResetFeaturesEXT &obj);
+void DumpVkPhysicalDeviceIDProperties(Printer &p, std::string name, VkPhysicalDeviceIDProperties &obj);
+void DumpVkPhysicalDeviceImagelessFramebufferFeaturesKHR(Printer &p, std::string name, VkPhysicalDeviceImagelessFramebufferFeaturesKHR &obj);
+void DumpVkPhysicalDeviceIndexTypeUint8FeaturesEXT(Printer &p, std::string name, VkPhysicalDeviceIndexTypeUint8FeaturesEXT &obj);
+void DumpVkPhysicalDeviceInlineUniformBlockFeaturesEXT(Printer &p, std::string name, VkPhysicalDeviceInlineUniformBlockFeaturesEXT &obj);
+void DumpVkPhysicalDeviceInlineUniformBlockPropertiesEXT(Printer &p, std::string name, VkPhysicalDeviceInlineUniformBlockPropertiesEXT &obj);
+void DumpVkPhysicalDeviceLimits(Printer &p, std::string name, VkPhysicalDeviceLimits &obj);
+void DumpVkPhysicalDeviceLineRasterizationFeaturesEXT(Printer &p, std::string name, VkPhysicalDeviceLineRasterizationFeaturesEXT &obj);
+void DumpVkPhysicalDeviceLineRasterizationPropertiesEXT(Printer &p, std::string name, VkPhysicalDeviceLineRasterizationPropertiesEXT &obj);
+void DumpVkPhysicalDeviceMaintenance3Properties(Printer &p, std::string name, VkPhysicalDeviceMaintenance3Properties &obj);
+void DumpVkPhysicalDeviceMemoryBudgetPropertiesEXT(Printer &p, std::string name, VkPhysicalDeviceMemoryBudgetPropertiesEXT &obj);
+void DumpVkPhysicalDeviceMemoryPriorityFeaturesEXT(Printer &p, std::string name, VkPhysicalDeviceMemoryPriorityFeaturesEXT &obj);
+void DumpVkPhysicalDeviceMultiviewFeatures(Printer &p, std::string name, VkPhysicalDeviceMultiviewFeatures &obj);
+void DumpVkPhysicalDeviceMultiviewProperties(Printer &p, std::string name, VkPhysicalDeviceMultiviewProperties &obj);
+void DumpVkPhysicalDevicePCIBusInfoPropertiesEXT(Printer &p, std::string name, VkPhysicalDevicePCIBusInfoPropertiesEXT &obj);
+void DumpVkPhysicalDevicePerformanceQueryFeaturesKHR(Printer &p, std::string name, VkPhysicalDevicePerformanceQueryFeaturesKHR &obj);
+void DumpVkPhysicalDevicePerformanceQueryPropertiesKHR(Printer &p, std::string name, VkPhysicalDevicePerformanceQueryPropertiesKHR &obj);
+void DumpVkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR(Printer &p, std::string name, VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR &obj);
+void DumpVkPhysicalDevicePointClippingProperties(Printer &p, std::string name, VkPhysicalDevicePointClippingProperties &obj);
+void DumpVkPhysicalDeviceProtectedMemoryFeatures(Printer &p, std::string name, VkPhysicalDeviceProtectedMemoryFeatures &obj);
+void DumpVkPhysicalDeviceProtectedMemoryProperties(Printer &p, std::string name, VkPhysicalDeviceProtectedMemoryProperties &obj);
+void DumpVkPhysicalDevicePushDescriptorPropertiesKHR(Printer &p, std::string name, VkPhysicalDevicePushDescriptorPropertiesKHR &obj);
+void DumpVkPhysicalDeviceSampleLocationsPropertiesEXT(Printer &p, std::string name, VkPhysicalDeviceSampleLocationsPropertiesEXT &obj);
+void DumpVkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT(Printer &p, std::string name, VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT &obj);
+void DumpVkPhysicalDeviceSamplerYcbcrConversionFeatures(Printer &p, std::string name, VkPhysicalDeviceSamplerYcbcrConversionFeatures &obj);
+void DumpVkPhysicalDeviceScalarBlockLayoutFeaturesEXT(Printer &p, std::string name, VkPhysicalDeviceScalarBlockLayoutFeaturesEXT &obj);
+void DumpVkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR(Printer &p, std::string name, VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR &obj);
+void DumpVkPhysicalDeviceShaderAtomicInt64FeaturesKHR(Printer &p, std::string name, VkPhysicalDeviceShaderAtomicInt64FeaturesKHR &obj);
+void DumpVkPhysicalDeviceShaderClockFeaturesKHR(Printer &p, std::string name, VkPhysicalDeviceShaderClockFeaturesKHR &obj);
+void DumpVkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT(Printer &p, std::string name, VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT &obj);
+void DumpVkPhysicalDeviceShaderDrawParametersFeatures(Printer &p, std::string name, VkPhysicalDeviceShaderDrawParametersFeatures &obj);
+void DumpVkPhysicalDeviceShaderFloat16Int8FeaturesKHR(Printer &p, std::string name, VkPhysicalDeviceShaderFloat16Int8FeaturesKHR &obj);
+void DumpVkPhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR(Printer &p, std::string name, VkPhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR &obj);
+void DumpVkPhysicalDeviceSparseProperties(Printer &p, std::string name, VkPhysicalDeviceSparseProperties &obj);
+void DumpVkPhysicalDeviceSubgroupProperties(Printer &p, std::string name, VkPhysicalDeviceSubgroupProperties &obj);
+void DumpVkPhysicalDeviceSubgroupSizeControlFeaturesEXT(Printer &p, std::string name, VkPhysicalDeviceSubgroupSizeControlFeaturesEXT &obj);
+void DumpVkPhysicalDeviceSubgroupSizeControlPropertiesEXT(Printer &p, std::string name, VkPhysicalDeviceSubgroupSizeControlPropertiesEXT &obj);
+void DumpVkPhysicalDeviceTexelBufferAlignmentFeaturesEXT(Printer &p, std::string name, VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT &obj);
+void DumpVkPhysicalDeviceTexelBufferAlignmentPropertiesEXT(Printer &p, std::string name, VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT &obj);
+void DumpVkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT(Printer &p, std::string name, VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT &obj);
+void DumpVkPhysicalDeviceTimelineSemaphoreFeaturesKHR(Printer &p, std::string name, VkPhysicalDeviceTimelineSemaphoreFeaturesKHR &obj);
+void DumpVkPhysicalDeviceTimelineSemaphorePropertiesKHR(Printer &p, std::string name, VkPhysicalDeviceTimelineSemaphorePropertiesKHR &obj);
+void DumpVkPhysicalDeviceTransformFeedbackFeaturesEXT(Printer &p, std::string name, VkPhysicalDeviceTransformFeedbackFeaturesEXT &obj);
+void DumpVkPhysicalDeviceTransformFeedbackPropertiesEXT(Printer &p, std::string name, VkPhysicalDeviceTransformFeedbackPropertiesEXT &obj);
+void DumpVkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR(Printer &p, std::string name, VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR &obj);
+void DumpVkPhysicalDeviceVariablePointersFeatures(Printer &p, std::string name, VkPhysicalDeviceVariablePointersFeatures &obj);
+void DumpVkPhysicalDeviceVertexAttributeDivisorFeaturesEXT(Printer &p, std::string name, VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT &obj);
+void DumpVkPhysicalDeviceVertexAttributeDivisorPropertiesEXT(Printer &p, std::string name, VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT &obj);
+void DumpVkPhysicalDeviceVulkanMemoryModelFeaturesKHR(Printer &p, std::string name, VkPhysicalDeviceVulkanMemoryModelFeaturesKHR &obj);
+void DumpVkPhysicalDeviceYcbcrImageArraysFeaturesEXT(Printer &p, std::string name, VkPhysicalDeviceYcbcrImageArraysFeaturesEXT &obj);
+void DumpVkSharedPresentSurfaceCapabilitiesKHR(Printer &p, std::string name, VkSharedPresentSurfaceCapabilitiesKHR &obj);
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+void DumpVkSurfaceCapabilitiesFullScreenExclusiveEXT(Printer &p, std::string name, VkSurfaceCapabilitiesFullScreenExclusiveEXT &obj);
+#endif  // VK_USE_PLATFORM_WIN32_KHR
+void DumpVkSurfaceCapabilitiesKHR(Printer &p, std::string name, VkSurfaceCapabilitiesKHR &obj);
+void DumpVkSurfaceFormatKHR(Printer &p, std::string name, VkSurfaceFormatKHR &obj);
+void DumpVkSurfaceProtectedCapabilitiesKHR(Printer &p, std::string name, VkSurfaceProtectedCapabilitiesKHR &obj);
+void DumpVkDrmFormatModifierPropertiesEXT(Printer &p, std::string name, VkDrmFormatModifierPropertiesEXT &obj) {
+    p.ObjectStart(name);
+    p.PrintKeyValue("drmFormatModifier", obj.drmFormatModifier, 27);
+    p.PrintKeyValue("drmFormatModifierPlaneCount", obj.drmFormatModifierPlaneCount, 27);
+    DumpVkFormatFeatureFlags(p, "drmFormatModifierTilingFeatures", obj.drmFormatModifierTilingFeatures, 27);
+    p.ObjectEnd();
+}
+void DumpVkDrmFormatModifierPropertiesListEXT(Printer &p, std::string name, VkDrmFormatModifierPropertiesListEXT &obj) {
+    p.ObjectStart(name);
+    p.PrintKeyValue("drmFormatModifierCount", obj.drmFormatModifierCount, 52);
+    p.ArrayStart("pDrmFormatModifierProperties", obj.drmFormatModifierCount);
+    for (uint32_t i = 0; i < obj.drmFormatModifierCount; i++) {
+        if (obj.pDrmFormatModifierProperties != nullptr) {
+            p.SetElementIndex(i);
+            DumpVkDrmFormatModifierPropertiesEXT(p, "pDrmFormatModifierProperties", obj.pDrmFormatModifierProperties[i]);
+        }
+    }
+    p.ArrayEnd();
+    p.ObjectEnd();
+}
+void DumpVkExtent2D(Printer &p, std::string name, VkExtent2D &obj) {
+    p.ObjectStart(name);
+    p.PrintKeyValue("width", obj.width, 6);
+    p.PrintKeyValue("height", obj.height, 6);
+    p.ObjectEnd();
+}
+void DumpVkExtent3D(Printer &p, std::string name, VkExtent3D &obj) {
+    p.ObjectStart(name);
+    p.PrintKeyValue("width", obj.width, 6);
+    p.PrintKeyValue("height", obj.height, 6);
+    p.PrintKeyValue("depth", obj.depth, 6);
+    p.ObjectEnd();
+}
+void DumpVkLayerProperties(Printer &p, std::string name, VkLayerProperties &obj) {
+    p.ObjectStart(name);
+    p.PrintKeyString("layerName", obj.layerName, 21);
+    p.PrintKeyValue("specVersion", obj.specVersion, 21);
+    p.PrintKeyValue("implementationVersion", obj.implementationVersion, 21);
+    p.PrintKeyString("description", obj.description, 21);
+    p.ObjectEnd();
+}
+void DumpVkPhysicalDevice16BitStorageFeatures(Printer &p, std::string name, VkPhysicalDevice16BitStorageFeatures &obj) {
+    p.ObjectStart(name);
+    p.PrintKeyBool("storageBuffer16BitAccess", static_cast<bool>(obj.storageBuffer16BitAccess), 34);
+    p.PrintKeyBool("uniformAndStorageBuffer16BitAccess", static_cast<bool>(obj.uniformAndStorageBuffer16BitAccess), 34);
+    p.PrintKeyBool("storagePushConstant16", static_cast<bool>(obj.storagePushConstant16), 34);
+    p.PrintKeyBool("storageInputOutput16", static_cast<bool>(obj.storageInputOutput16), 34);
+    p.ObjectEnd();
+}
+void DumpVkPhysicalDevice8BitStorageFeaturesKHR(Printer &p, std::string name, VkPhysicalDevice8BitStorageFeaturesKHR &obj) {
+    p.ObjectStart(name);
+    p.PrintKeyBool("storageBuffer8BitAccess", static_cast<bool>(obj.storageBuffer8BitAccess), 33);
+    p.PrintKeyBool("uniformAndStorageBuffer8BitAccess", static_cast<bool>(obj.uniformAndStorageBuffer8BitAccess), 33);
+    p.PrintKeyBool("storagePushConstant8", static_cast<bool>(obj.storagePushConstant8), 33);
+    p.ObjectEnd();
+}
+void DumpVkPhysicalDeviceASTCDecodeFeaturesEXT(Printer &p, std::string name, VkPhysicalDeviceASTCDecodeFeaturesEXT &obj) {
+    p.ObjectStart(name);
+    p.PrintKeyBool("decodeModeSharedExponent", static_cast<bool>(obj.decodeModeSharedExponent), 24);
+    p.ObjectEnd();
+}
+void DumpVkPhysicalDeviceBlendOperationAdvancedFeaturesEXT(Printer &p, std::string name, VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT &obj) {
+    p.ObjectStart(name);
+    p.PrintKeyBool("advancedBlendCoherentOperations", static_cast<bool>(obj.advancedBlendCoherentOperations), 31);
+    p.ObjectEnd();
+}
+void DumpVkPhysicalDeviceBlendOperationAdvancedPropertiesEXT(Printer &p, std::string name, VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT &obj) {
+    p.ObjectStart(name);
+    p.PrintKeyValue("advancedBlendMaxColorAttachments", obj.advancedBlendMaxColorAttachments, 37);
+    p.PrintKeyBool("advancedBlendIndependentBlend", static_cast<bool>(obj.advancedBlendIndependentBlend), 37);
+    p.PrintKeyBool("advancedBlendNonPremultipliedSrcColor", static_cast<bool>(obj.advancedBlendNonPremultipliedSrcColor), 37);
+    p.PrintKeyBool("advancedBlendNonPremultipliedDstColor", static_cast<bool>(obj.advancedBlendNonPremultipliedDstColor), 37);
+    p.PrintKeyBool("advancedBlendCorrelatedOverlap", static_cast<bool>(obj.advancedBlendCorrelatedOverlap), 37);
+    p.PrintKeyBool("advancedBlendAllOperations", static_cast<bool>(obj.advancedBlendAllOperations), 37);
+    p.ObjectEnd();
+}
+void DumpVkPhysicalDeviceBufferDeviceAddressFeaturesEXT(Printer &p, std::string name, VkPhysicalDeviceBufferDeviceAddressFeaturesEXT &obj) {
+    p.ObjectStart(name);
+    p.PrintKeyBool("bufferDeviceAddress", static_cast<bool>(obj.bufferDeviceAddress), 32);
+    p.PrintKeyBool("bufferDeviceAddressCaptureReplay", static_cast<bool>(obj.bufferDeviceAddressCaptureReplay), 32);
+    p.PrintKeyBool("bufferDeviceAddressMultiDevice", static_cast<bool>(obj.bufferDeviceAddressMultiDevice), 32);
+    p.ObjectEnd();
+}
+void DumpVkPhysicalDeviceConditionalRenderingFeaturesEXT(Printer &p, std::string name, VkPhysicalDeviceConditionalRenderingFeaturesEXT &obj) {
+    p.ObjectStart(name);
+    p.PrintKeyBool("conditionalRendering", static_cast<bool>(obj.conditionalRendering), 29);
+    p.PrintKeyBool("inheritedConditionalRendering", static_cast<bool>(obj.inheritedConditionalRendering), 29);
+    p.ObjectEnd();
+}
+void DumpVkPhysicalDeviceConservativeRasterizationPropertiesEXT(Printer &p, std::string name, VkPhysicalDeviceConservativeRasterizationPropertiesEXT &obj) {
+    p.ObjectStart(name);
+    p.PrintKeyValue("primitiveOverestimationSize", obj.primitiveOverestimationSize, 43);
+    p.PrintKeyValue("maxExtraPrimitiveOverestimationSize", obj.maxExtraPrimitiveOverestimationSize, 43);
+    p.PrintKeyValue("extraPrimitiveOverestimationSizeGranularity", obj.extraPrimitiveOverestimationSizeGranularity, 43);
+    p.PrintKeyBool("primitiveUnderestimation", static_cast<bool>(obj.primitiveUnderestimation), 43);
+    p.PrintKeyBool("conservativePointAndLineRasterization", static_cast<bool>(obj.conservativePointAndLineRasterization), 43);
+    p.PrintKeyBool("degenerateTrianglesRasterized", static_cast<bool>(obj.degenerateTrianglesRasterized), 43);
+    p.PrintKeyBool("degenerateLinesRasterized", static_cast<bool>(obj.degenerateLinesRasterized), 43);
+    p.PrintKeyBool("fullyCoveredFragmentShaderInputVariable", static_cast<bool>(obj.fullyCoveredFragmentShaderInputVariable), 43);
+    p.PrintKeyBool("conservativeRasterizationPostDepthCoverage", static_cast<bool>(obj.conservativeRasterizationPostDepthCoverage), 43);
+    p.ObjectEnd();
+}
+void DumpVkPhysicalDeviceDepthClipEnableFeaturesEXT(Printer &p, std::string name, VkPhysicalDeviceDepthClipEnableFeaturesEXT &obj) {
+    p.ObjectStart(name);
+    p.PrintKeyBool("depthClipEnable", static_cast<bool>(obj.depthClipEnable), 15);
+    p.ObjectEnd();
+}
+void DumpVkPhysicalDeviceDepthStencilResolvePropertiesKHR(Printer &p, std::string name, VkPhysicalDeviceDepthStencilResolvePropertiesKHR &obj) {
+    p.ObjectStart(name);
+    DumpVkResolveModeFlagsKHR(p, "supportedDepthResolveModes", obj.supportedDepthResolveModes, 22);
+    DumpVkResolveModeFlagsKHR(p, "supportedStencilResolveModes", obj.supportedStencilResolveModes, 22);
+    p.PrintKeyBool("independentResolveNone", static_cast<bool>(obj.independentResolveNone), 22);
+    p.PrintKeyBool("independentResolve", static_cast<bool>(obj.independentResolve), 22);
+    p.ObjectEnd();
+}
+void DumpVkPhysicalDeviceDescriptorIndexingFeaturesEXT(Printer &p, std::string name, VkPhysicalDeviceDescriptorIndexingFeaturesEXT &obj) {
+    p.ObjectStart(name);
+    p.PrintKeyBool("shaderInputAttachmentArrayDynamicIndexing", static_cast<bool>(obj.shaderInputAttachmentArrayDynamicIndexing), 50);
+    p.PrintKeyBool("shaderUniformTexelBufferArrayDynamicIndexing", static_cast<bool>(obj.shaderUniformTexelBufferArrayDynamicIndexing), 50);
+    p.PrintKeyBool("shaderStorageTexelBufferArrayDynamicIndexing", static_cast<bool>(obj.shaderStorageTexelBufferArrayDynamicIndexing), 50);
+    p.PrintKeyBool("shaderUniformBufferArrayNonUniformIndexing", static_cast<bool>(obj.shaderUniformBufferArrayNonUniformIndexing), 50);
+    p.PrintKeyBool("shaderSampledImageArrayNonUniformIndexing", static_cast<bool>(obj.shaderSampledImageArrayNonUniformIndexing), 50);
+    p.PrintKeyBool("shaderStorageBufferArrayNonUniformIndexing", static_cast<bool>(obj.shaderStorageBufferArrayNonUniformIndexing), 50);
+    p.PrintKeyBool("shaderStorageImageArrayNonUniformIndexing", static_cast<bool>(obj.shaderStorageImageArrayNonUniformIndexing), 50);
+    p.PrintKeyBool("shaderInputAttachmentArrayNonUniformIndexing", static_cast<bool>(obj.shaderInputAttachmentArrayNonUniformIndexing), 50);
+    p.PrintKeyBool("shaderUniformTexelBufferArrayNonUniformIndexing", static_cast<bool>(obj.shaderUniformTexelBufferArrayNonUniformIndexing), 50);
+    p.PrintKeyBool("shaderStorageTexelBufferArrayNonUniformIndexing", static_cast<bool>(obj.shaderStorageTexelBufferArrayNonUniformIndexing), 50);
+    p.PrintKeyBool("descriptorBindingUniformBufferUpdateAfterBind", static_cast<bool>(obj.descriptorBindingUniformBufferUpdateAfterBind), 50);
+    p.PrintKeyBool("descriptorBindingSampledImageUpdateAfterBind", static_cast<bool>(obj.descriptorBindingSampledImageUpdateAfterBind), 50);
+    p.PrintKeyBool("descriptorBindingStorageImageUpdateAfterBind", static_cast<bool>(obj.descriptorBindingStorageImageUpdateAfterBind), 50);
+    p.PrintKeyBool("descriptorBindingStorageBufferUpdateAfterBind", static_cast<bool>(obj.descriptorBindingStorageBufferUpdateAfterBind), 50);
+    p.PrintKeyBool("descriptorBindingUniformTexelBufferUpdateAfterBind", static_cast<bool>(obj.descriptorBindingUniformTexelBufferUpdateAfterBind), 50);
+    p.PrintKeyBool("descriptorBindingStorageTexelBufferUpdateAfterBind", static_cast<bool>(obj.descriptorBindingStorageTexelBufferUpdateAfterBind), 50);
+    p.PrintKeyBool("descriptorBindingUpdateUnusedWhilePending", static_cast<bool>(obj.descriptorBindingUpdateUnusedWhilePending), 50);
+    p.PrintKeyBool("descriptorBindingPartiallyBound", static_cast<bool>(obj.descriptorBindingPartiallyBound), 50);
+    p.PrintKeyBool("descriptorBindingVariableDescriptorCount", static_cast<bool>(obj.descriptorBindingVariableDescriptorCount), 50);
+    p.PrintKeyBool("runtimeDescriptorArray", static_cast<bool>(obj.runtimeDescriptorArray), 50);
+    p.ObjectEnd();
+}
+void DumpVkPhysicalDeviceDescriptorIndexingPropertiesEXT(Printer &p, std::string name, VkPhysicalDeviceDescriptorIndexingPropertiesEXT &obj) {
+    p.ObjectStart(name);
+    p.PrintKeyValue("maxUpdateAfterBindDescriptorsInAllPools", obj.maxUpdateAfterBindDescriptorsInAllPools, 52);
+    p.PrintKeyBool("shaderUniformBufferArrayNonUniformIndexingNative", static_cast<bool>(obj.shaderUniformBufferArrayNonUniformIndexingNative), 52);
+    p.PrintKeyBool("shaderSampledImageArrayNonUniformIndexingNative", static_cast<bool>(obj.shaderSampledImageArrayNonUniformIndexingNative), 52);
+    p.PrintKeyBool("shaderStorageBufferArrayNonUniformIndexingNative", static_cast<bool>(obj.shaderStorageBufferArrayNonUniformIndexingNative), 52);
+    p.PrintKeyBool("shaderStorageImageArrayNonUniformIndexingNative", static_cast<bool>(obj.shaderStorageImageArrayNonUniformIndexingNative), 52);
+    p.PrintKeyBool("shaderInputAttachmentArrayNonUniformIndexingNative", static_cast<bool>(obj.shaderInputAttachmentArrayNonUniformIndexingNative), 52);
+    p.PrintKeyBool("robustBufferAccessUpdateAfterBind", static_cast<bool>(obj.robustBufferAccessUpdateAfterBind), 52);
+    p.PrintKeyBool("quadDivergentImplicitLod", static_cast<bool>(obj.quadDivergentImplicitLod), 52);
+    p.PrintKeyValue("maxPerStageDescriptorUpdateAfterBindSamplers", obj.maxPerStageDescriptorUpdateAfterBindSamplers, 52);
+    p.PrintKeyValue("maxPerStageDescriptorUpdateAfterBindUniformBuffers", obj.maxPerStageDescriptorUpdateAfterBindUniformBuffers, 52);
+    p.PrintKeyValue("maxPerStageDescriptorUpdateAfterBindStorageBuffers", obj.maxPerStageDescriptorUpdateAfterBindStorageBuffers, 52);
+    p.PrintKeyValue("maxPerStageDescriptorUpdateAfterBindSampledImages", obj.maxPerStageDescriptorUpdateAfterBindSampledImages, 52);
+    p.PrintKeyValue("maxPerStageDescriptorUpdateAfterBindStorageImages", obj.maxPerStageDescriptorUpdateAfterBindStorageImages, 52);
+    p.PrintKeyValue("maxPerStageDescriptorUpdateAfterBindInputAttachments", obj.maxPerStageDescriptorUpdateAfterBindInputAttachments, 52);
+    p.PrintKeyValue("maxPerStageUpdateAfterBindResources", obj.maxPerStageUpdateAfterBindResources, 52);
+    p.PrintKeyValue("maxDescriptorSetUpdateAfterBindSamplers", obj.maxDescriptorSetUpdateAfterBindSamplers, 52);
+    p.PrintKeyValue("maxDescriptorSetUpdateAfterBindUniformBuffers", obj.maxDescriptorSetUpdateAfterBindUniformBuffers, 52);
+    p.PrintKeyValue("maxDescriptorSetUpdateAfterBindUniformBuffersDynamic", obj.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic, 52);
+    p.PrintKeyValue("maxDescriptorSetUpdateAfterBindStorageBuffers", obj.maxDescriptorSetUpdateAfterBindStorageBuffers, 52);
+    p.PrintKeyValue("maxDescriptorSetUpdateAfterBindStorageBuffersDynamic", obj.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic, 52);
+    p.PrintKeyValue("maxDescriptorSetUpdateAfterBindSampledImages", obj.maxDescriptorSetUpdateAfterBindSampledImages, 52);
+    p.PrintKeyValue("maxDescriptorSetUpdateAfterBindStorageImages", obj.maxDescriptorSetUpdateAfterBindStorageImages, 52);
+    p.PrintKeyValue("maxDescriptorSetUpdateAfterBindInputAttachments", obj.maxDescriptorSetUpdateAfterBindInputAttachments, 52);
+    p.ObjectEnd();
+}
+void DumpVkPhysicalDeviceDiscardRectanglePropertiesEXT(Printer &p, std::string name, VkPhysicalDeviceDiscardRectanglePropertiesEXT &obj) {
+    p.ObjectStart(name);
+    p.PrintKeyValue("maxDiscardRectangles", obj.maxDiscardRectangles, 20);
+    p.ObjectEnd();
+}
+void DumpVkPhysicalDeviceDriverPropertiesKHR(Printer &p, std::string name, VkPhysicalDeviceDriverPropertiesKHR &obj) {
+    p.ObjectStart(name);
+    DumpVkDriverIdKHR(p, "driverID", obj.driverID, 18);
+    p.PrintKeyString("driverName", obj.driverName, 18);
+    p.PrintKeyString("driverInfo", obj.driverInfo, 18);
+    p.PrintKeyValue("conformanceVersion", obj.conformanceVersion, 18);
+    p.ObjectEnd();
+}
+void DumpVkPhysicalDeviceExternalMemoryHostPropertiesEXT(Printer &p, std::string name, VkPhysicalDeviceExternalMemoryHostPropertiesEXT &obj) {
+    p.ObjectStart(name);
+    p.PrintKeyValue("minImportedHostPointerAlignment", to_hex_str(p, obj.minImportedHostPointerAlignment), 31);
+    p.ObjectEnd();
+}
+void DumpVkPhysicalDeviceFeatures(Printer &p, std::string name, VkPhysicalDeviceFeatures &obj) {
+    p.ObjectStart(name);
+    p.PrintKeyBool("robustBufferAccess", static_cast<bool>(obj.robustBufferAccess), 39);
+    p.PrintKeyBool("fullDrawIndexUint32", static_cast<bool>(obj.fullDrawIndexUint32), 39);
+    p.PrintKeyBool("imageCubeArray", static_cast<bool>(obj.imageCubeArray), 39);
+    p.PrintKeyBool("independentBlend", static_cast<bool>(obj.independentBlend), 39);
+    p.PrintKeyBool("geometryShader", static_cast<bool>(obj.geometryShader), 39);
+    p.PrintKeyBool("tessellationShader", static_cast<bool>(obj.tessellationShader), 39);
+    p.PrintKeyBool("sampleRateShading", static_cast<bool>(obj.sampleRateShading), 39);
+    p.PrintKeyBool("dualSrcBlend", static_cast<bool>(obj.dualSrcBlend), 39);
+    p.PrintKeyBool("logicOp", static_cast<bool>(obj.logicOp), 39);
+    p.PrintKeyBool("multiDrawIndirect", static_cast<bool>(obj.multiDrawIndirect), 39);
+    p.PrintKeyBool("drawIndirectFirstInstance", static_cast<bool>(obj.drawIndirectFirstInstance), 39);
+    p.PrintKeyBool("depthClamp", static_cast<bool>(obj.depthClamp), 39);
+    p.PrintKeyBool("depthBiasClamp", static_cast<bool>(obj.depthBiasClamp), 39);
+    p.PrintKeyBool("fillModeNonSolid", static_cast<bool>(obj.fillModeNonSolid), 39);
+    p.PrintKeyBool("depthBounds", static_cast<bool>(obj.depthBounds), 39);
+    p.PrintKeyBool("wideLines", static_cast<bool>(obj.wideLines), 39);
+    p.PrintKeyBool("largePoints", static_cast<bool>(obj.largePoints), 39);
+    p.PrintKeyBool("alphaToOne", static_cast<bool>(obj.alphaToOne), 39);
+    p.PrintKeyBool("multiViewport", static_cast<bool>(obj.multiViewport), 39);
+    p.PrintKeyBool("samplerAnisotropy", static_cast<bool>(obj.samplerAnisotropy), 39);
+    p.PrintKeyBool("textureCompressionETC2", static_cast<bool>(obj.textureCompressionETC2), 39);
+    p.PrintKeyBool("textureCompressionASTC_LDR", static_cast<bool>(obj.textureCompressionASTC_LDR), 39);
+    p.PrintKeyBool("textureCompressionBC", static_cast<bool>(obj.textureCompressionBC), 39);
+    p.PrintKeyBool("occlusionQueryPrecise", static_cast<bool>(obj.occlusionQueryPrecise), 39);
+    p.PrintKeyBool("pipelineStatisticsQuery", static_cast<bool>(obj.pipelineStatisticsQuery), 39);
+    p.PrintKeyBool("vertexPipelineStoresAndAtomics", static_cast<bool>(obj.vertexPipelineStoresAndAtomics), 39);
+    p.PrintKeyBool("fragmentStoresAndAtomics", static_cast<bool>(obj.fragmentStoresAndAtomics), 39);
+    p.PrintKeyBool("shaderTessellationAndGeometryPointSize", static_cast<bool>(obj.shaderTessellationAndGeometryPointSize), 39);
+    p.PrintKeyBool("shaderImageGatherExtended", static_cast<bool>(obj.shaderImageGatherExtended), 39);
+    p.PrintKeyBool("shaderStorageImageExtendedFormats", static_cast<bool>(obj.shaderStorageImageExtendedFormats), 39);
+    p.PrintKeyBool("shaderStorageImageMultisample", static_cast<bool>(obj.shaderStorageImageMultisample), 39);
+    p.PrintKeyBool("shaderStorageImageReadWithoutFormat", static_cast<bool>(obj.shaderStorageImageReadWithoutFormat), 39);
+    p.PrintKeyBool("shaderStorageImageWriteWithoutFormat", static_cast<bool>(obj.shaderStorageImageWriteWithoutFormat), 39);
+    p.PrintKeyBool("shaderUniformBufferArrayDynamicIndexing", static_cast<bool>(obj.shaderUniformBufferArrayDynamicIndexing), 39);
+    p.PrintKeyBool("shaderSampledImageArrayDynamicIndexing", static_cast<bool>(obj.shaderSampledImageArrayDynamicIndexing), 39);
+    p.PrintKeyBool("shaderStorageBufferArrayDynamicIndexing", static_cast<bool>(obj.shaderStorageBufferArrayDynamicIndexing), 39);
+    p.PrintKeyBool("shaderStorageImageArrayDynamicIndexing", static_cast<bool>(obj.shaderStorageImageArrayDynamicIndexing), 39);
+    p.PrintKeyBool("shaderClipDistance", static_cast<bool>(obj.shaderClipDistance), 39);
+    p.PrintKeyBool("shaderCullDistance", static_cast<bool>(obj.shaderCullDistance), 39);
+    p.PrintKeyBool("shaderFloat64", static_cast<bool>(obj.shaderFloat64), 39);
+    p.PrintKeyBool("shaderInt64", static_cast<bool>(obj.shaderInt64), 39);
+    p.PrintKeyBool("shaderInt16", static_cast<bool>(obj.shaderInt16), 39);
+    p.PrintKeyBool("shaderResourceResidency", static_cast<bool>(obj.shaderResourceResidency), 39);
+    p.PrintKeyBool("shaderResourceMinLod", static_cast<bool>(obj.shaderResourceMinLod), 39);
+    p.PrintKeyBool("sparseBinding", static_cast<bool>(obj.sparseBinding), 39);
+    p.PrintKeyBool("sparseResidencyBuffer", static_cast<bool>(obj.sparseResidencyBuffer), 39);
+    p.PrintKeyBool("sparseResidencyImage2D", static_cast<bool>(obj.sparseResidencyImage2D), 39);
+    p.PrintKeyBool("sparseResidencyImage3D", static_cast<bool>(obj.sparseResidencyImage3D), 39);
+    p.PrintKeyBool("sparseResidency2Samples", static_cast<bool>(obj.sparseResidency2Samples), 39);
+    p.PrintKeyBool("sparseResidency4Samples", static_cast<bool>(obj.sparseResidency4Samples), 39);
+    p.PrintKeyBool("sparseResidency8Samples", static_cast<bool>(obj.sparseResidency8Samples), 39);
+    p.PrintKeyBool("sparseResidency16Samples", static_cast<bool>(obj.sparseResidency16Samples), 39);
+    p.PrintKeyBool("sparseResidencyAliased", static_cast<bool>(obj.sparseResidencyAliased), 39);
+    p.PrintKeyBool("variableMultisampleRate", static_cast<bool>(obj.variableMultisampleRate), 39);
+    p.PrintKeyBool("inheritedQueries", static_cast<bool>(obj.inheritedQueries), 39);
+    p.ObjectEnd();
+}
+void DumpVkPhysicalDeviceFloatControlsPropertiesKHR(Printer &p, std::string name, VkPhysicalDeviceFloatControlsPropertiesKHR &obj) {
+    p.ObjectStart(name);
+    DumpVkShaderFloatControlsIndependenceKHR(p, "denormBehaviorIndependence", obj.denormBehaviorIndependence, 37);
+    DumpVkShaderFloatControlsIndependenceKHR(p, "roundingModeIndependence", obj.roundingModeIndependence, 37);
+    p.PrintKeyBool("shaderSignedZeroInfNanPreserveFloat16", static_cast<bool>(obj.shaderSignedZeroInfNanPreserveFloat16), 37);
+    p.PrintKeyBool("shaderSignedZeroInfNanPreserveFloat32", static_cast<bool>(obj.shaderSignedZeroInfNanPreserveFloat32), 37);
+    p.PrintKeyBool("shaderSignedZeroInfNanPreserveFloat64", static_cast<bool>(obj.shaderSignedZeroInfNanPreserveFloat64), 37);
+    p.PrintKeyBool("shaderDenormPreserveFloat16", static_cast<bool>(obj.shaderDenormPreserveFloat16), 37);
+    p.PrintKeyBool("shaderDenormPreserveFloat32", static_cast<bool>(obj.shaderDenormPreserveFloat32), 37);
+    p.PrintKeyBool("shaderDenormPreserveFloat64", static_cast<bool>(obj.shaderDenormPreserveFloat64), 37);
+    p.PrintKeyBool("shaderDenormFlushToZeroFloat16", static_cast<bool>(obj.shaderDenormFlushToZeroFloat16), 37);
+    p.PrintKeyBool("shaderDenormFlushToZeroFloat32", static_cast<bool>(obj.shaderDenormFlushToZeroFloat32), 37);
+    p.PrintKeyBool("shaderDenormFlushToZeroFloat64", static_cast<bool>(obj.shaderDenormFlushToZeroFloat64), 37);
+    p.PrintKeyBool("shaderRoundingModeRTEFloat16", static_cast<bool>(obj.shaderRoundingModeRTEFloat16), 37);
+    p.PrintKeyBool("shaderRoundingModeRTEFloat32", static_cast<bool>(obj.shaderRoundingModeRTEFloat32), 37);
+    p.PrintKeyBool("shaderRoundingModeRTEFloat64", static_cast<bool>(obj.shaderRoundingModeRTEFloat64), 37);
+    p.PrintKeyBool("shaderRoundingModeRTZFloat16", static_cast<bool>(obj.shaderRoundingModeRTZFloat16), 37);
+    p.PrintKeyBool("shaderRoundingModeRTZFloat32", static_cast<bool>(obj.shaderRoundingModeRTZFloat32), 37);
+    p.PrintKeyBool("shaderRoundingModeRTZFloat64", static_cast<bool>(obj.shaderRoundingModeRTZFloat64), 37);
+    p.ObjectEnd();
+}
+void DumpVkPhysicalDeviceFragmentDensityMapFeaturesEXT(Printer &p, std::string name, VkPhysicalDeviceFragmentDensityMapFeaturesEXT &obj) {
+    p.ObjectStart(name);
+    p.PrintKeyBool("fragmentDensityMap", static_cast<bool>(obj.fragmentDensityMap), 37);
+    p.PrintKeyBool("fragmentDensityMapDynamic", static_cast<bool>(obj.fragmentDensityMapDynamic), 37);
+    p.PrintKeyBool("fragmentDensityMapNonSubsampledImages", static_cast<bool>(obj.fragmentDensityMapNonSubsampledImages), 37);
+    p.ObjectEnd();
+}
+void DumpVkPhysicalDeviceFragmentDensityMapPropertiesEXT(Printer &p, std::string name, VkPhysicalDeviceFragmentDensityMapPropertiesEXT &obj) {
+    p.ObjectStart(name);
+    DumpVkExtent2D(p, "minFragmentDensityTexelSize", obj.minFragmentDensityTexelSize);
+    DumpVkExtent2D(p, "maxFragmentDensityTexelSize", obj.maxFragmentDensityTexelSize);
+    p.PrintKeyBool("fragmentDensityInvocations", static_cast<bool>(obj.fragmentDensityInvocations), 26);
+    p.ObjectEnd();
+}
+void DumpVkPhysicalDeviceFragmentShaderInterlockFeaturesEXT(Printer &p, std::string name, VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT &obj) {
+    p.ObjectStart(name);
+    p.PrintKeyBool("fragmentShaderSampleInterlock", static_cast<bool>(obj.fragmentShaderSampleInterlock), 34);
+    p.PrintKeyBool("fragmentShaderPixelInterlock", static_cast<bool>(obj.fragmentShaderPixelInterlock), 34);
+    p.PrintKeyBool("fragmentShaderShadingRateInterlock", static_cast<bool>(obj.fragmentShaderShadingRateInterlock), 34);
+    p.ObjectEnd();
+}
+void DumpVkPhysicalDeviceHostQueryResetFeaturesEXT(Printer &p, std::string name, VkPhysicalDeviceHostQueryResetFeaturesEXT &obj) {
+    p.ObjectStart(name);
+    p.PrintKeyBool("hostQueryReset", static_cast<bool>(obj.hostQueryReset), 14);
+    p.ObjectEnd();
+}
+void DumpVkPhysicalDeviceIDProperties(Printer &p, std::string name, VkPhysicalDeviceIDProperties &obj) {
+    p.ObjectStart(name);
+    p.PrintKeyString("deviceUUID", to_string_16(obj.deviceUUID), 15);
+    p.PrintKeyString("driverUUID", to_string_16(obj.driverUUID), 15);
+    if (obj.deviceLUIDValid) p.PrintKeyString("deviceLUID", to_string_8(obj.deviceLUID), 15);
+    p.PrintKeyValue("deviceNodeMask", obj.deviceNodeMask, 15);
+    p.PrintKeyBool("deviceLUIDValid", static_cast<bool>(obj.deviceLUIDValid), 15);
+    p.ObjectEnd();
+}
+void DumpVkPhysicalDeviceImagelessFramebufferFeaturesKHR(Printer &p, std::string name, VkPhysicalDeviceImagelessFramebufferFeaturesKHR &obj) {
+    p.ObjectStart(name);
+    p.PrintKeyBool("imagelessFramebuffer", static_cast<bool>(obj.imagelessFramebuffer), 20);
+    p.ObjectEnd();
+}
+void DumpVkPhysicalDeviceIndexTypeUint8FeaturesEXT(Printer &p, std::string name, VkPhysicalDeviceIndexTypeUint8FeaturesEXT &obj) {
+    p.ObjectStart(name);
+    p.PrintKeyBool("indexTypeUint8", static_cast<bool>(obj.indexTypeUint8), 14);
+    p.ObjectEnd();
+}
+void DumpVkPhysicalDeviceInlineUniformBlockFeaturesEXT(Printer &p, std::string name, VkPhysicalDeviceInlineUniformBlockFeaturesEXT &obj) {
+    p.ObjectStart(name);
+    p.PrintKeyBool("inlineUniformBlock", static_cast<bool>(obj.inlineUniformBlock), 50);
+    p.PrintKeyBool("descriptorBindingInlineUniformBlockUpdateAfterBind", static_cast<bool>(obj.descriptorBindingInlineUniformBlockUpdateAfterBind), 50);
+    p.ObjectEnd();
+}
+void DumpVkPhysicalDeviceInlineUniformBlockPropertiesEXT(Printer &p, std::string name, VkPhysicalDeviceInlineUniformBlockPropertiesEXT &obj) {
+    p.ObjectStart(name);
+    p.PrintKeyValue("maxInlineUniformBlockSize", obj.maxInlineUniformBlockSize, 55);
+    p.PrintKeyValue("maxPerStageDescriptorInlineUniformBlocks", obj.maxPerStageDescriptorInlineUniformBlocks, 55);
+    p.PrintKeyValue("maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks", obj.maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks, 55);
+    p.PrintKeyValue("maxDescriptorSetInlineUniformBlocks", obj.maxDescriptorSetInlineUniformBlocks, 55);
+    p.PrintKeyValue("maxDescriptorSetUpdateAfterBindInlineUniformBlocks", obj.maxDescriptorSetUpdateAfterBindInlineUniformBlocks, 55);
+    p.ObjectEnd();
+}
+void DumpVkPhysicalDeviceLimits(Printer &p, std::string name, VkPhysicalDeviceLimits &obj) {
+    if (p.Type() == OutputType::json)
+        p.ObjectStart("limits");
+    else
+        p.SetSubHeader().ObjectStart(name);
+    p.PrintKeyValue("maxImageDimension1D", obj.maxImageDimension1D, 47);
+    p.PrintKeyValue("maxImageDimension2D", obj.maxImageDimension2D, 47);
+    p.PrintKeyValue("maxImageDimension3D", obj.maxImageDimension3D, 47);
+    p.PrintKeyValue("maxImageDimensionCube", obj.maxImageDimensionCube, 47);
+    p.PrintKeyValue("maxImageArrayLayers", obj.maxImageArrayLayers, 47);
+    p.PrintKeyValue("maxTexelBufferElements", obj.maxTexelBufferElements, 47);
+    p.PrintKeyValue("maxUniformBufferRange", obj.maxUniformBufferRange, 47);
+    p.PrintKeyValue("maxStorageBufferRange", obj.maxStorageBufferRange, 47);
+    p.PrintKeyValue("maxPushConstantsSize", obj.maxPushConstantsSize, 47);
+    p.PrintKeyValue("maxMemoryAllocationCount", obj.maxMemoryAllocationCount, 47);
+    p.PrintKeyValue("maxSamplerAllocationCount", obj.maxSamplerAllocationCount, 47);
+    p.PrintKeyValue("bufferImageGranularity", to_hex_str(p, obj.bufferImageGranularity), 47);
+    p.PrintKeyValue("sparseAddressSpaceSize", to_hex_str(p, obj.sparseAddressSpaceSize), 47);
+    p.PrintKeyValue("maxBoundDescriptorSets", obj.maxBoundDescriptorSets, 47);
+    p.PrintKeyValue("maxPerStageDescriptorSamplers", obj.maxPerStageDescriptorSamplers, 47);
+    p.PrintKeyValue("maxPerStageDescriptorUniformBuffers", obj.maxPerStageDescriptorUniformBuffers, 47);
+    p.PrintKeyValue("maxPerStageDescriptorStorageBuffers", obj.maxPerStageDescriptorStorageBuffers, 47);
+    p.PrintKeyValue("maxPerStageDescriptorSampledImages", obj.maxPerStageDescriptorSampledImages, 47);
+    p.PrintKeyValue("maxPerStageDescriptorStorageImages", obj.maxPerStageDescriptorStorageImages, 47);
+    p.PrintKeyValue("maxPerStageDescriptorInputAttachments", obj.maxPerStageDescriptorInputAttachments, 47);
+    p.PrintKeyValue("maxPerStageResources", obj.maxPerStageResources, 47);
+    p.PrintKeyValue("maxDescriptorSetSamplers", obj.maxDescriptorSetSamplers, 47);
+    p.PrintKeyValue("maxDescriptorSetUniformBuffers", obj.maxDescriptorSetUniformBuffers, 47);
+    p.PrintKeyValue("maxDescriptorSetUniformBuffersDynamic", obj.maxDescriptorSetUniformBuffersDynamic, 47);
+    p.PrintKeyValue("maxDescriptorSetStorageBuffers", obj.maxDescriptorSetStorageBuffers, 47);
+    p.PrintKeyValue("maxDescriptorSetStorageBuffersDynamic", obj.maxDescriptorSetStorageBuffersDynamic, 47);
+    p.PrintKeyValue("maxDescriptorSetSampledImages", obj.maxDescriptorSetSampledImages, 47);
+    p.PrintKeyValue("maxDescriptorSetStorageImages", obj.maxDescriptorSetStorageImages, 47);
+    p.PrintKeyValue("maxDescriptorSetInputAttachments", obj.maxDescriptorSetInputAttachments, 47);
+    p.PrintKeyValue("maxVertexInputAttributes", obj.maxVertexInputAttributes, 47);
+    p.PrintKeyValue("maxVertexInputBindings", obj.maxVertexInputBindings, 47);
+    p.PrintKeyValue("maxVertexInputAttributeOffset", obj.maxVertexInputAttributeOffset, 47);
+    p.PrintKeyValue("maxVertexInputBindingStride", obj.maxVertexInputBindingStride, 47);
+    p.PrintKeyValue("maxVertexOutputComponents", obj.maxVertexOutputComponents, 47);
+    p.PrintKeyValue("maxTessellationGenerationLevel", obj.maxTessellationGenerationLevel, 47);
+    p.PrintKeyValue("maxTessellationPatchSize", obj.maxTessellationPatchSize, 47);
+    p.PrintKeyValue("maxTessellationControlPerVertexInputComponents", obj.maxTessellationControlPerVertexInputComponents, 47);
+    p.PrintKeyValue("maxTessellationControlPerVertexOutputComponents", obj.maxTessellationControlPerVertexOutputComponents, 47);
+    p.PrintKeyValue("maxTessellationControlPerPatchOutputComponents", obj.maxTessellationControlPerPatchOutputComponents, 47);
+    p.PrintKeyValue("maxTessellationControlTotalOutputComponents", obj.maxTessellationControlTotalOutputComponents, 47);
+    p.PrintKeyValue("maxTessellationEvaluationInputComponents", obj.maxTessellationEvaluationInputComponents, 47);
+    p.PrintKeyValue("maxTessellationEvaluationOutputComponents", obj.maxTessellationEvaluationOutputComponents, 47);
+    p.PrintKeyValue("maxGeometryShaderInvocations", obj.maxGeometryShaderInvocations, 47);
+    p.PrintKeyValue("maxGeometryInputComponents", obj.maxGeometryInputComponents, 47);
+    p.PrintKeyValue("maxGeometryOutputComponents", obj.maxGeometryOutputComponents, 47);
+    p.PrintKeyValue("maxGeometryOutputVertices", obj.maxGeometryOutputVertices, 47);
+    p.PrintKeyValue("maxGeometryTotalOutputComponents", obj.maxGeometryTotalOutputComponents, 47);
+    p.PrintKeyValue("maxFragmentInputComponents", obj.maxFragmentInputComponents, 47);
+    p.PrintKeyValue("maxFragmentOutputAttachments", obj.maxFragmentOutputAttachments, 47);
+    p.PrintKeyValue("maxFragmentDualSrcAttachments", obj.maxFragmentDualSrcAttachments, 47);
+    p.PrintKeyValue("maxFragmentCombinedOutputResources", obj.maxFragmentCombinedOutputResources, 47);
+    p.PrintKeyValue("maxComputeSharedMemorySize", obj.maxComputeSharedMemorySize, 47);
+    p.ArrayStart("maxComputeWorkGroupCount", 3);
+    p.PrintElement(obj.maxComputeWorkGroupCount[0]);
+    p.PrintElement(obj.maxComputeWorkGroupCount[1]);
+    p.PrintElement(obj.maxComputeWorkGroupCount[2]);
+    p.ArrayEnd();
+    p.PrintKeyValue("maxComputeWorkGroupInvocations", obj.maxComputeWorkGroupInvocations, 47);
+    p.ArrayStart("maxComputeWorkGroupSize", 3);
+    p.PrintElement(obj.maxComputeWorkGroupSize[0]);
+    p.PrintElement(obj.maxComputeWorkGroupSize[1]);
+    p.PrintElement(obj.maxComputeWorkGroupSize[2]);
+    p.ArrayEnd();
+    p.PrintKeyValue("subPixelPrecisionBits", obj.subPixelPrecisionBits, 47);
+    p.PrintKeyValue("subTexelPrecisionBits", obj.subTexelPrecisionBits, 47);
+    p.PrintKeyValue("mipmapPrecisionBits", obj.mipmapPrecisionBits, 47);
+    p.PrintKeyValue("maxDrawIndexedIndexValue", obj.maxDrawIndexedIndexValue, 47);
+    p.PrintKeyValue("maxDrawIndirectCount", obj.maxDrawIndirectCount, 47);
+    p.PrintKeyValue("maxSamplerLodBias", obj.maxSamplerLodBias, 47);
+    p.PrintKeyValue("maxSamplerAnisotropy", obj.maxSamplerAnisotropy, 47);
+    p.PrintKeyValue("maxViewports", obj.maxViewports, 47);
+    p.ArrayStart("maxViewportDimensions", 2);
+    p.PrintElement(obj.maxViewportDimensions[0]);
+    p.PrintElement(obj.maxViewportDimensions[1]);
+    p.ArrayEnd();
+    p.ArrayStart("viewportBoundsRange", 2);
+    p.PrintElement(obj.viewportBoundsRange[0]);
+    p.PrintElement(obj.viewportBoundsRange[1]);
+    p.ArrayEnd();
+    p.PrintKeyValue("viewportSubPixelBits", obj.viewportSubPixelBits, 47);
+    p.PrintKeyValue("minMemoryMapAlignment", obj.minMemoryMapAlignment, 47);
+    p.PrintKeyValue("minTexelBufferOffsetAlignment", to_hex_str(p, obj.minTexelBufferOffsetAlignment), 47);
+    p.PrintKeyValue("minUniformBufferOffsetAlignment", to_hex_str(p, obj.minUniformBufferOffsetAlignment), 47);
+    p.PrintKeyValue("minStorageBufferOffsetAlignment", to_hex_str(p, obj.minStorageBufferOffsetAlignment), 47);
+    p.PrintKeyValue("minTexelOffset", obj.minTexelOffset, 47);
+    p.PrintKeyValue("maxTexelOffset", obj.maxTexelOffset, 47);
+    p.PrintKeyValue("minTexelGatherOffset", obj.minTexelGatherOffset, 47);
+    p.PrintKeyValue("maxTexelGatherOffset", obj.maxTexelGatherOffset, 47);
+    p.PrintKeyValue("minInterpolationOffset", obj.minInterpolationOffset, 47);
+    p.PrintKeyValue("maxInterpolationOffset", obj.maxInterpolationOffset, 47);
+    p.PrintKeyValue("subPixelInterpolationOffsetBits", obj.subPixelInterpolationOffsetBits, 47);
+    p.PrintKeyValue("maxFramebufferWidth", obj.maxFramebufferWidth, 47);
+    p.PrintKeyValue("maxFramebufferHeight", obj.maxFramebufferHeight, 47);
+    p.PrintKeyValue("maxFramebufferLayers", obj.maxFramebufferLayers, 47);
+    DumpVkSampleCountFlags(p, "framebufferColorSampleCounts", obj.framebufferColorSampleCounts, 47);
+    DumpVkSampleCountFlags(p, "framebufferDepthSampleCounts", obj.framebufferDepthSampleCounts, 47);
+    DumpVkSampleCountFlags(p, "framebufferStencilSampleCounts", obj.framebufferStencilSampleCounts, 47);
+    DumpVkSampleCountFlags(p, "framebufferNoAttachmentsSampleCounts", obj.framebufferNoAttachmentsSampleCounts, 47);
+    p.PrintKeyValue("maxColorAttachments", obj.maxColorAttachments, 47);
+    DumpVkSampleCountFlags(p, "sampledImageColorSampleCounts", obj.sampledImageColorSampleCounts, 47);
+    DumpVkSampleCountFlags(p, "sampledImageIntegerSampleCounts", obj.sampledImageIntegerSampleCounts, 47);
+    DumpVkSampleCountFlags(p, "sampledImageDepthSampleCounts", obj.sampledImageDepthSampleCounts, 47);
+    DumpVkSampleCountFlags(p, "sampledImageStencilSampleCounts", obj.sampledImageStencilSampleCounts, 47);
+    DumpVkSampleCountFlags(p, "storageImageSampleCounts", obj.storageImageSampleCounts, 47);
+    p.PrintKeyValue("maxSampleMaskWords", obj.maxSampleMaskWords, 47);
+    p.PrintKeyBool("timestampComputeAndGraphics", static_cast<bool>(obj.timestampComputeAndGraphics), 47);
+    p.PrintKeyValue("timestampPeriod", obj.timestampPeriod, 47);
+    p.PrintKeyValue("maxClipDistances", obj.maxClipDistances, 47);
+    p.PrintKeyValue("maxCullDistances", obj.maxCullDistances, 47);
+    p.PrintKeyValue("maxCombinedClipAndCullDistances", obj.maxCombinedClipAndCullDistances, 47);
+    p.PrintKeyValue("discreteQueuePriorities", obj.discreteQueuePriorities, 47);
+    p.ArrayStart("pointSizeRange", 2);
+    p.PrintElement(obj.pointSizeRange[0]);
+    p.PrintElement(obj.pointSizeRange[1]);
+    p.ArrayEnd();
+    p.ArrayStart("lineWidthRange", 2);
+    p.PrintElement(obj.lineWidthRange[0]);
+    p.PrintElement(obj.lineWidthRange[1]);
+    p.ArrayEnd();
+    p.PrintKeyValue("pointSizeGranularity", obj.pointSizeGranularity, 47);
+    p.PrintKeyValue("lineWidthGranularity", obj.lineWidthGranularity, 47);
+    p.PrintKeyBool("strictLines", static_cast<bool>(obj.strictLines), 47);
+    p.PrintKeyBool("standardSampleLocations", static_cast<bool>(obj.standardSampleLocations), 47);
+    p.PrintKeyValue("optimalBufferCopyOffsetAlignment", to_hex_str(p, obj.optimalBufferCopyOffsetAlignment), 47);
+    p.PrintKeyValue("optimalBufferCopyRowPitchAlignment", to_hex_str(p, obj.optimalBufferCopyRowPitchAlignment), 47);
+    p.PrintKeyValue("nonCoherentAtomSize", to_hex_str(p, obj.nonCoherentAtomSize), 47);
+    p.ObjectEnd();
+}
+void DumpVkPhysicalDeviceLineRasterizationFeaturesEXT(Printer &p, std::string name, VkPhysicalDeviceLineRasterizationFeaturesEXT &obj) {
+    p.ObjectStart(name);
+    p.PrintKeyBool("rectangularLines", static_cast<bool>(obj.rectangularLines), 24);
+    p.PrintKeyBool("bresenhamLines", static_cast<bool>(obj.bresenhamLines), 24);
+    p.PrintKeyBool("smoothLines", static_cast<bool>(obj.smoothLines), 24);
+    p.PrintKeyBool("stippledRectangularLines", static_cast<bool>(obj.stippledRectangularLines), 24);
+    p.PrintKeyBool("stippledBresenhamLines", static_cast<bool>(obj.stippledBresenhamLines), 24);
+    p.PrintKeyBool("stippledSmoothLines", static_cast<bool>(obj.stippledSmoothLines), 24);
+    p.ObjectEnd();
+}
+void DumpVkPhysicalDeviceLineRasterizationPropertiesEXT(Printer &p, std::string name, VkPhysicalDeviceLineRasterizationPropertiesEXT &obj) {
+    p.ObjectStart(name);
+    p.PrintKeyValue("lineSubPixelPrecisionBits", obj.lineSubPixelPrecisionBits, 25);
+    p.ObjectEnd();
+}
+void DumpVkPhysicalDeviceMaintenance3Properties(Printer &p, std::string name, VkPhysicalDeviceMaintenance3Properties &obj) {
+    p.ObjectStart(name);
+    p.PrintKeyValue("maxPerSetDescriptors", obj.maxPerSetDescriptors, 23);
+    p.PrintKeyValue("maxMemoryAllocationSize", to_hex_str(p, obj.maxMemoryAllocationSize), 23);
+    p.ObjectEnd();
+}
+void DumpVkPhysicalDeviceMemoryBudgetPropertiesEXT(Printer &p, std::string name, VkPhysicalDeviceMemoryBudgetPropertiesEXT &obj) {
+    p.ObjectStart(name);
+    p.ArrayStart("heapBudget", 16);
+    p.PrintElement(obj.heapBudget[0]);
+    p.PrintElement(obj.heapBudget[1]);
+    p.PrintElement(obj.heapBudget[2]);
+    p.PrintElement(obj.heapBudget[3]);
+    p.PrintElement(obj.heapBudget[4]);
+    p.PrintElement(obj.heapBudget[5]);
+    p.PrintElement(obj.heapBudget[6]);
+    p.PrintElement(obj.heapBudget[7]);
+    p.PrintElement(obj.heapBudget[8]);
+    p.PrintElement(obj.heapBudget[9]);
+    p.PrintElement(obj.heapBudget[10]);
+    p.PrintElement(obj.heapBudget[11]);
+    p.PrintElement(obj.heapBudget[12]);
+    p.PrintElement(obj.heapBudget[13]);
+    p.PrintElement(obj.heapBudget[14]);
+    p.PrintElement(obj.heapBudget[15]);
+    p.ArrayEnd();
+    p.ArrayStart("heapUsage", 16);
+    p.PrintElement(obj.heapUsage[0]);
+    p.PrintElement(obj.heapUsage[1]);
+    p.PrintElement(obj.heapUsage[2]);
+    p.PrintElement(obj.heapUsage[3]);
+    p.PrintElement(obj.heapUsage[4]);
+    p.PrintElement(obj.heapUsage[5]);
+    p.PrintElement(obj.heapUsage[6]);
+    p.PrintElement(obj.heapUsage[7]);
+    p.PrintElement(obj.heapUsage[8]);
+    p.PrintElement(obj.heapUsage[9]);
+    p.PrintElement(obj.heapUsage[10]);
+    p.PrintElement(obj.heapUsage[11]);
+    p.PrintElement(obj.heapUsage[12]);
+    p.PrintElement(obj.heapUsage[13]);
+    p.PrintElement(obj.heapUsage[14]);
+    p.PrintElement(obj.heapUsage[15]);
+    p.ArrayEnd();
+    p.ObjectEnd();
+}
+void DumpVkPhysicalDeviceMemoryPriorityFeaturesEXT(Printer &p, std::string name, VkPhysicalDeviceMemoryPriorityFeaturesEXT &obj) {
+    p.ObjectStart(name);
+    p.PrintKeyBool("memoryPriority", static_cast<bool>(obj.memoryPriority), 14);
+    p.ObjectEnd();
+}
+void DumpVkPhysicalDeviceMultiviewFeatures(Printer &p, std::string name, VkPhysicalDeviceMultiviewFeatures &obj) {
+    p.ObjectStart(name);
+    p.PrintKeyBool("multiview", static_cast<bool>(obj.multiview), 27);
+    p.PrintKeyBool("multiviewGeometryShader", static_cast<bool>(obj.multiviewGeometryShader), 27);
+    p.PrintKeyBool("multiviewTessellationShader", static_cast<bool>(obj.multiviewTessellationShader), 27);
+    p.ObjectEnd();
+}
+void DumpVkPhysicalDeviceMultiviewProperties(Printer &p, std::string name, VkPhysicalDeviceMultiviewProperties &obj) {
+    p.ObjectStart(name);
+    p.PrintKeyValue("maxMultiviewViewCount", obj.maxMultiviewViewCount, 25);
+    p.PrintKeyValue("maxMultiviewInstanceIndex", obj.maxMultiviewInstanceIndex, 25);
+    p.ObjectEnd();
+}
+void DumpVkPhysicalDevicePCIBusInfoPropertiesEXT(Printer &p, std::string name, VkPhysicalDevicePCIBusInfoPropertiesEXT &obj) {
+    p.ObjectStart(name);
+    p.PrintKeyValue("pciDomain", obj.pciDomain, 11);
+    p.PrintKeyValue("pciBus", obj.pciBus, 11);
+    p.PrintKeyValue("pciDevice", obj.pciDevice, 11);
+    p.PrintKeyValue("pciFunction", obj.pciFunction, 11);
+    p.ObjectEnd();
+}
+void DumpVkPhysicalDevicePerformanceQueryFeaturesKHR(Printer &p, std::string name, VkPhysicalDevicePerformanceQueryFeaturesKHR &obj) {
+    p.ObjectStart(name);
+    p.PrintKeyBool("performanceCounterQueryPools", static_cast<bool>(obj.performanceCounterQueryPools), 36);
+    p.PrintKeyBool("performanceCounterMultipleQueryPools", static_cast<bool>(obj.performanceCounterMultipleQueryPools), 36);
+    p.ObjectEnd();
+}
+void DumpVkPhysicalDevicePerformanceQueryPropertiesKHR(Printer &p, std::string name, VkPhysicalDevicePerformanceQueryPropertiesKHR &obj) {
+    p.ObjectStart(name);
+    p.PrintKeyBool("allowCommandBufferQueryCopies", static_cast<bool>(obj.allowCommandBufferQueryCopies), 29);
+    p.ObjectEnd();
+}
+void DumpVkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR(Printer &p, std::string name, VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR &obj) {
+    p.ObjectStart(name);
+    p.PrintKeyBool("pipelineExecutableInfo", static_cast<bool>(obj.pipelineExecutableInfo), 22);
+    p.ObjectEnd();
+}
+void DumpVkPhysicalDevicePointClippingProperties(Printer &p, std::string name, VkPhysicalDevicePointClippingProperties &obj) {
+    p.ObjectStart(name);
+    DumpVkPointClippingBehavior(p, "pointClippingBehavior", obj.pointClippingBehavior, 5);
+    p.ObjectEnd();
+}
+void DumpVkPhysicalDeviceProtectedMemoryFeatures(Printer &p, std::string name, VkPhysicalDeviceProtectedMemoryFeatures &obj) {
+    p.ObjectStart(name);
+    p.PrintKeyBool("protectedMemory", static_cast<bool>(obj.protectedMemory), 15);
+    p.ObjectEnd();
+}
+void DumpVkPhysicalDeviceProtectedMemoryProperties(Printer &p, std::string name, VkPhysicalDeviceProtectedMemoryProperties &obj) {
+    p.ObjectStart(name);
+    p.PrintKeyBool("protectedNoFault", static_cast<bool>(obj.protectedNoFault), 16);
+    p.ObjectEnd();
+}
+void DumpVkPhysicalDevicePushDescriptorPropertiesKHR(Printer &p, std::string name, VkPhysicalDevicePushDescriptorPropertiesKHR &obj) {
+    p.ObjectStart(name);
+    p.PrintKeyValue("maxPushDescriptors", obj.maxPushDescriptors, 18);
+    p.ObjectEnd();
+}
+void DumpVkPhysicalDeviceSampleLocationsPropertiesEXT(Printer &p, std::string name, VkPhysicalDeviceSampleLocationsPropertiesEXT &obj) {
+    p.ObjectStart(name);
+    DumpVkSampleCountFlags(p, "sampleLocationSampleCounts", obj.sampleLocationSampleCounts, 32);
+    DumpVkExtent2D(p, "maxSampleLocationGridSize", obj.maxSampleLocationGridSize);
+    p.ArrayStart("sampleLocationCoordinateRange", 2);
+    p.PrintElement(obj.sampleLocationCoordinateRange[0]);
+    p.PrintElement(obj.sampleLocationCoordinateRange[1]);
+    p.ArrayEnd();
+    p.PrintKeyValue("sampleLocationSubPixelBits", obj.sampleLocationSubPixelBits, 32);
+    p.PrintKeyBool("variableSampleLocations", static_cast<bool>(obj.variableSampleLocations), 32);
+    p.ObjectEnd();
+}
+void DumpVkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT(Printer &p, std::string name, VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT &obj) {
+    p.ObjectStart(name);
+    p.PrintKeyBool("filterMinmaxSingleComponentFormats", static_cast<bool>(obj.filterMinmaxSingleComponentFormats), 34);
+    p.PrintKeyBool("filterMinmaxImageComponentMapping", static_cast<bool>(obj.filterMinmaxImageComponentMapping), 34);
+    p.ObjectEnd();
+}
+void DumpVkPhysicalDeviceSamplerYcbcrConversionFeatures(Printer &p, std::string name, VkPhysicalDeviceSamplerYcbcrConversionFeatures &obj) {
+    p.ObjectStart(name);
+    p.PrintKeyBool("samplerYcbcrConversion", static_cast<bool>(obj.samplerYcbcrConversion), 22);
+    p.ObjectEnd();
+}
+void DumpVkPhysicalDeviceScalarBlockLayoutFeaturesEXT(Printer &p, std::string name, VkPhysicalDeviceScalarBlockLayoutFeaturesEXT &obj) {
+    p.ObjectStart(name);
+    p.PrintKeyBool("scalarBlockLayout", static_cast<bool>(obj.scalarBlockLayout), 17);
+    p.ObjectEnd();
+}
+void DumpVkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR(Printer &p, std::string name, VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR &obj) {
+    p.ObjectStart(name);
+    p.PrintKeyBool("separateDepthStencilLayouts", static_cast<bool>(obj.separateDepthStencilLayouts), 27);
+    p.ObjectEnd();
+}
+void DumpVkPhysicalDeviceShaderAtomicInt64FeaturesKHR(Printer &p, std::string name, VkPhysicalDeviceShaderAtomicInt64FeaturesKHR &obj) {
+    p.ObjectStart(name);
+    p.PrintKeyBool("shaderBufferInt64Atomics", static_cast<bool>(obj.shaderBufferInt64Atomics), 24);
+    p.PrintKeyBool("shaderSharedInt64Atomics", static_cast<bool>(obj.shaderSharedInt64Atomics), 24);
+    p.ObjectEnd();
+}
+void DumpVkPhysicalDeviceShaderClockFeaturesKHR(Printer &p, std::string name, VkPhysicalDeviceShaderClockFeaturesKHR &obj) {
+    p.ObjectStart(name);
+    p.PrintKeyBool("shaderSubgroupClock", static_cast<bool>(obj.shaderSubgroupClock), 19);
+    p.PrintKeyBool("shaderDeviceClock", static_cast<bool>(obj.shaderDeviceClock), 19);
+    p.ObjectEnd();
+}
+void DumpVkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT(Printer &p, std::string name, VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT &obj) {
+    p.ObjectStart(name);
+    p.PrintKeyBool("shaderDemoteToHelperInvocation", static_cast<bool>(obj.shaderDemoteToHelperInvocation), 30);
+    p.ObjectEnd();
+}
+void DumpVkPhysicalDeviceShaderDrawParametersFeatures(Printer &p, std::string name, VkPhysicalDeviceShaderDrawParametersFeatures &obj) {
+    p.ObjectStart(name);
+    p.PrintKeyBool("shaderDrawParameters", static_cast<bool>(obj.shaderDrawParameters), 20);
+    p.ObjectEnd();
+}
+void DumpVkPhysicalDeviceShaderFloat16Int8FeaturesKHR(Printer &p, std::string name, VkPhysicalDeviceShaderFloat16Int8FeaturesKHR &obj) {
+    p.ObjectStart(name);
+    p.PrintKeyBool("shaderFloat16", static_cast<bool>(obj.shaderFloat16), 13);
+    p.PrintKeyBool("shaderInt8", static_cast<bool>(obj.shaderInt8), 13);
+    p.ObjectEnd();
+}
+void DumpVkPhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR(Printer &p, std::string name, VkPhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR &obj) {
+    p.ObjectStart(name);
+    p.PrintKeyBool("shaderSubgroupExtendedTypes", static_cast<bool>(obj.shaderSubgroupExtendedTypes), 27);
+    p.ObjectEnd();
+}
+void DumpVkPhysicalDeviceSparseProperties(Printer &p, std::string name, VkPhysicalDeviceSparseProperties &obj) {
+    if (p.Type() == OutputType::json)
+        p.ObjectStart("sparseProperties");
+    else
+        p.SetSubHeader().ObjectStart(name);
+    p.PrintKeyBool("residencyStandard2DBlockShape", static_cast<bool>(obj.residencyStandard2DBlockShape), 40);
+    p.PrintKeyBool("residencyStandard2DMultisampleBlockShape", static_cast<bool>(obj.residencyStandard2DMultisampleBlockShape), 40);
+    p.PrintKeyBool("residencyStandard3DBlockShape", static_cast<bool>(obj.residencyStandard3DBlockShape), 40);
+    p.PrintKeyBool("residencyAlignedMipSize", static_cast<bool>(obj.residencyAlignedMipSize), 40);
+    p.PrintKeyBool("residencyNonResidentStrict", static_cast<bool>(obj.residencyNonResidentStrict), 40);
+    p.ObjectEnd();
+}
+void DumpVkPhysicalDeviceSubgroupProperties(Printer &p, std::string name, VkPhysicalDeviceSubgroupProperties &obj) {
+    p.ObjectStart(name);
+    p.PrintKeyValue("subgroupSize", obj.subgroupSize, 25);
+    DumpVkShaderStageFlags(p, "supportedStages", obj.supportedStages, 25);
+    DumpVkSubgroupFeatureFlags(p, "supportedOperations", obj.supportedOperations, 25);
+    p.PrintKeyBool("quadOperationsInAllStages", static_cast<bool>(obj.quadOperationsInAllStages), 25);
+    p.ObjectEnd();
+}
+void DumpVkPhysicalDeviceSubgroupSizeControlFeaturesEXT(Printer &p, std::string name, VkPhysicalDeviceSubgroupSizeControlFeaturesEXT &obj) {
+    p.ObjectStart(name);
+    p.PrintKeyBool("subgroupSizeControl", static_cast<bool>(obj.subgroupSizeControl), 20);
+    p.PrintKeyBool("computeFullSubgroups", static_cast<bool>(obj.computeFullSubgroups), 20);
+    p.ObjectEnd();
+}
+void DumpVkPhysicalDeviceSubgroupSizeControlPropertiesEXT(Printer &p, std::string name, VkPhysicalDeviceSubgroupSizeControlPropertiesEXT &obj) {
+    p.ObjectStart(name);
+    p.PrintKeyValue("minSubgroupSize", obj.minSubgroupSize, 28);
+    p.PrintKeyValue("maxSubgroupSize", obj.maxSubgroupSize, 28);
+    p.PrintKeyValue("maxComputeWorkgroupSubgroups", obj.maxComputeWorkgroupSubgroups, 28);
+    DumpVkShaderStageFlags(p, "requiredSubgroupSizeStages", obj.requiredSubgroupSizeStages, 28);
+    p.ObjectEnd();
+}
+void DumpVkPhysicalDeviceTexelBufferAlignmentFeaturesEXT(Printer &p, std::string name, VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT &obj) {
+    p.ObjectStart(name);
+    p.PrintKeyBool("texelBufferAlignment", static_cast<bool>(obj.texelBufferAlignment), 20);
+    p.ObjectEnd();
+}
+void DumpVkPhysicalDeviceTexelBufferAlignmentPropertiesEXT(Printer &p, std::string name, VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT &obj) {
+    p.ObjectStart(name);
+    p.PrintKeyValue("storageTexelBufferOffsetAlignmentBytes", to_hex_str(p, obj.storageTexelBufferOffsetAlignmentBytes), 44);
+    p.PrintKeyBool("storageTexelBufferOffsetSingleTexelAlignment", static_cast<bool>(obj.storageTexelBufferOffsetSingleTexelAlignment), 44);
+    p.PrintKeyValue("uniformTexelBufferOffsetAlignmentBytes", to_hex_str(p, obj.uniformTexelBufferOffsetAlignmentBytes), 44);
+    p.PrintKeyBool("uniformTexelBufferOffsetSingleTexelAlignment", static_cast<bool>(obj.uniformTexelBufferOffsetSingleTexelAlignment), 44);
+    p.ObjectEnd();
+}
+void DumpVkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT(Printer &p, std::string name, VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT &obj) {
+    p.ObjectStart(name);
+    p.PrintKeyBool("textureCompressionASTC_HDR", static_cast<bool>(obj.textureCompressionASTC_HDR), 26);
+    p.ObjectEnd();
+}
+void DumpVkPhysicalDeviceTimelineSemaphoreFeaturesKHR(Printer &p, std::string name, VkPhysicalDeviceTimelineSemaphoreFeaturesKHR &obj) {
+    p.ObjectStart(name);
+    p.PrintKeyBool("timelineSemaphore", static_cast<bool>(obj.timelineSemaphore), 17);
+    p.ObjectEnd();
+}
+void DumpVkPhysicalDeviceTimelineSemaphorePropertiesKHR(Printer &p, std::string name, VkPhysicalDeviceTimelineSemaphorePropertiesKHR &obj) {
+    p.ObjectStart(name);
+    p.PrintKeyValue("maxTimelineSemaphoreValueDifference", obj.maxTimelineSemaphoreValueDifference, 35);
+    p.ObjectEnd();
+}
+void DumpVkPhysicalDeviceTransformFeedbackFeaturesEXT(Printer &p, std::string name, VkPhysicalDeviceTransformFeedbackFeaturesEXT &obj) {
+    p.ObjectStart(name);
+    p.PrintKeyBool("transformFeedback", static_cast<bool>(obj.transformFeedback), 17);
+    p.PrintKeyBool("geometryStreams", static_cast<bool>(obj.geometryStreams), 17);
+    p.ObjectEnd();
+}
+void DumpVkPhysicalDeviceTransformFeedbackPropertiesEXT(Printer &p, std::string name, VkPhysicalDeviceTransformFeedbackPropertiesEXT &obj) {
+    p.ObjectStart(name);
+    p.PrintKeyValue("maxTransformFeedbackStreams", obj.maxTransformFeedbackStreams, 42);
+    p.PrintKeyValue("maxTransformFeedbackBuffers", obj.maxTransformFeedbackBuffers, 42);
+    p.PrintKeyValue("maxTransformFeedbackBufferSize", to_hex_str(p, obj.maxTransformFeedbackBufferSize), 42);
+    p.PrintKeyValue("maxTransformFeedbackStreamDataSize", obj.maxTransformFeedbackStreamDataSize, 42);
+    p.PrintKeyValue("maxTransformFeedbackBufferDataSize", obj.maxTransformFeedbackBufferDataSize, 42);
+    p.PrintKeyValue("maxTransformFeedbackBufferDataStride", obj.maxTransformFeedbackBufferDataStride, 42);
+    p.PrintKeyBool("transformFeedbackQueries", static_cast<bool>(obj.transformFeedbackQueries), 42);
+    p.PrintKeyBool("transformFeedbackStreamsLinesTriangles", static_cast<bool>(obj.transformFeedbackStreamsLinesTriangles), 42);
+    p.PrintKeyBool("transformFeedbackRasterizationStreamSelect", static_cast<bool>(obj.transformFeedbackRasterizationStreamSelect), 42);
+    p.PrintKeyBool("transformFeedbackDraw", static_cast<bool>(obj.transformFeedbackDraw), 42);
+    p.ObjectEnd();
+}
+void DumpVkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR(Printer &p, std::string name, VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR &obj) {
+    p.ObjectStart(name);
+    p.PrintKeyBool("uniformBufferStandardLayout", static_cast<bool>(obj.uniformBufferStandardLayout), 27);
+    p.ObjectEnd();
+}
+void DumpVkPhysicalDeviceVariablePointersFeatures(Printer &p, std::string name, VkPhysicalDeviceVariablePointersFeatures &obj) {
+    p.ObjectStart(name);
+    p.PrintKeyBool("variablePointersStorageBuffer", static_cast<bool>(obj.variablePointersStorageBuffer), 29);
+    p.PrintKeyBool("variablePointers", static_cast<bool>(obj.variablePointers), 29);
+    p.ObjectEnd();
+}
+void DumpVkPhysicalDeviceVertexAttributeDivisorFeaturesEXT(Printer &p, std::string name, VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT &obj) {
+    p.ObjectStart(name);
+    p.PrintKeyBool("vertexAttributeInstanceRateDivisor", static_cast<bool>(obj.vertexAttributeInstanceRateDivisor), 38);
+    p.PrintKeyBool("vertexAttributeInstanceRateZeroDivisor", static_cast<bool>(obj.vertexAttributeInstanceRateZeroDivisor), 38);
+    p.ObjectEnd();
+}
+void DumpVkPhysicalDeviceVertexAttributeDivisorPropertiesEXT(Printer &p, std::string name, VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT &obj) {
+    p.ObjectStart(name);
+    p.PrintKeyValue("maxVertexAttribDivisor", obj.maxVertexAttribDivisor, 22);
+    p.ObjectEnd();
+}
+void DumpVkPhysicalDeviceVulkanMemoryModelFeaturesKHR(Printer &p, std::string name, VkPhysicalDeviceVulkanMemoryModelFeaturesKHR &obj) {
+    p.ObjectStart(name);
+    p.PrintKeyBool("vulkanMemoryModel", static_cast<bool>(obj.vulkanMemoryModel), 45);
+    p.PrintKeyBool("vulkanMemoryModelDeviceScope", static_cast<bool>(obj.vulkanMemoryModelDeviceScope), 45);
+    p.PrintKeyBool("vulkanMemoryModelAvailabilityVisibilityChains", static_cast<bool>(obj.vulkanMemoryModelAvailabilityVisibilityChains), 45);
+    p.ObjectEnd();
+}
+void DumpVkPhysicalDeviceYcbcrImageArraysFeaturesEXT(Printer &p, std::string name, VkPhysicalDeviceYcbcrImageArraysFeaturesEXT &obj) {
+    p.ObjectStart(name);
+    p.PrintKeyBool("ycbcrImageArrays", static_cast<bool>(obj.ycbcrImageArrays), 16);
+    p.ObjectEnd();
+}
+void DumpVkSharedPresentSurfaceCapabilitiesKHR(Printer &p, std::string name, VkSharedPresentSurfaceCapabilitiesKHR &obj) {
+    p.ObjectStart(name);
+    DumpVkImageUsageFlags(p, "sharedPresentSupportedUsageFlags", obj.sharedPresentSupportedUsageFlags, 5);
+    p.ObjectEnd();
+}
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+void DumpVkSurfaceCapabilitiesFullScreenExclusiveEXT(Printer &p, std::string name, VkSurfaceCapabilitiesFullScreenExclusiveEXT &obj) {
+    p.ObjectStart(name);
+    p.PrintKeyBool("fullScreenExclusiveSupported", static_cast<bool>(obj.fullScreenExclusiveSupported), 28);
+    p.ObjectEnd();
+}
+#endif  // VK_USE_PLATFORM_WIN32_KHR
+void DumpVkSurfaceCapabilitiesKHR(Printer &p, std::string name, VkSurfaceCapabilitiesKHR &obj) {
+    p.ObjectStart(name);
+    p.PrintKeyValue("minImageCount", obj.minImageCount, 19);
+    p.PrintKeyValue("maxImageCount", obj.maxImageCount, 19);
+    DumpVkExtent2D(p, "currentExtent", obj.currentExtent);
+    DumpVkExtent2D(p, "minImageExtent", obj.minImageExtent);
+    DumpVkExtent2D(p, "maxImageExtent", obj.maxImageExtent);
+    p.PrintKeyValue("maxImageArrayLayers", obj.maxImageArrayLayers, 19);
+    DumpVkSurfaceTransformFlagsKHR(p, "supportedTransforms", obj.supportedTransforms, 19);
+    DumpVkSurfaceTransformFlagBitsKHR(p, "currentTransform", obj.currentTransform, 19);
+    DumpVkCompositeAlphaFlagsKHR(p, "supportedCompositeAlpha", obj.supportedCompositeAlpha, 19);
+    DumpVkImageUsageFlags(p, "supportedUsageFlags", obj.supportedUsageFlags, 19);
+    p.ObjectEnd();
+}
+void DumpVkSurfaceFormatKHR(Printer &p, std::string name, VkSurfaceFormatKHR &obj) {
+    p.ObjectStart(name);
+    DumpVkFormat(p, "format", obj.format, 6);
+    DumpVkColorSpaceKHR(p, "colorSpace", obj.colorSpace, 6);
+    p.ObjectEnd();
+}
+void DumpVkSurfaceProtectedCapabilitiesKHR(Printer &p, std::string name, VkSurfaceProtectedCapabilitiesKHR &obj) {
+    p.ObjectStart(name);
+    p.PrintKeyBool("supportsProtected", static_cast<bool>(obj.supportsProtected), 17);
+    p.ObjectEnd();
+}
+pNextChainInfos get_chain_infos() {
+    pNextChainInfos infos;
+    infos.phys_device_props2 = {
+        {VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_PROPERTIES_EXT, sizeof(VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT)},
+        {VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONSERVATIVE_RASTERIZATION_PROPERTIES_EXT, sizeof(VkPhysicalDeviceConservativeRasterizationPropertiesEXT)},
+        {VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES_KHR, sizeof(VkPhysicalDeviceDepthStencilResolvePropertiesKHR)},
+        {VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES_EXT, sizeof(VkPhysicalDeviceDescriptorIndexingPropertiesEXT)},
+        {VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DISCARD_RECTANGLE_PROPERTIES_EXT, sizeof(VkPhysicalDeviceDiscardRectanglePropertiesEXT)},
+        {VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES_KHR, sizeof(VkPhysicalDeviceDriverPropertiesKHR)},
+        {VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_HOST_PROPERTIES_EXT, sizeof(VkPhysicalDeviceExternalMemoryHostPropertiesEXT)},
+        {VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES_KHR, sizeof(VkPhysicalDeviceFloatControlsPropertiesKHR)},
+        {VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_PROPERTIES_EXT, sizeof(VkPhysicalDeviceFragmentDensityMapPropertiesEXT)},
+        {VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES, sizeof(VkPhysicalDeviceIDProperties)},
+        {VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_PROPERTIES_EXT, sizeof(VkPhysicalDeviceInlineUniformBlockPropertiesEXT)},
+        {VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES_EXT, sizeof(VkPhysicalDeviceLineRasterizationPropertiesEXT)},
+        {VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES, sizeof(VkPhysicalDeviceMaintenance3Properties)},
+        {VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES, sizeof(VkPhysicalDeviceMultiviewProperties)},
+        {VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PCI_BUS_INFO_PROPERTIES_EXT, sizeof(VkPhysicalDevicePCIBusInfoPropertiesEXT)},
+        {VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PERFORMANCE_QUERY_PROPERTIES_KHR, sizeof(VkPhysicalDevicePerformanceQueryPropertiesKHR)},
+        {VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES, sizeof(VkPhysicalDevicePointClippingProperties)},
+        {VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_PROPERTIES, sizeof(VkPhysicalDeviceProtectedMemoryProperties)},
+        {VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR, sizeof(VkPhysicalDevicePushDescriptorPropertiesKHR)},
+        {VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLE_LOCATIONS_PROPERTIES_EXT, sizeof(VkPhysicalDeviceSampleLocationsPropertiesEXT)},
+        {VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES_EXT, sizeof(VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT)},
+        {VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_PROPERTIES, sizeof(VkPhysicalDeviceSubgroupProperties)},
+        {VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES_EXT, sizeof(VkPhysicalDeviceSubgroupSizeControlPropertiesEXT)},
+        {VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES_EXT, sizeof(VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT)},
+        {VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES_KHR, sizeof(VkPhysicalDeviceTimelineSemaphorePropertiesKHR)},
+        {VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_PROPERTIES_EXT, sizeof(VkPhysicalDeviceTransformFeedbackPropertiesEXT)},
+        {VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_EXT, sizeof(VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT)},
+    };
+    infos.phys_device_mem_props2 = {
+        {VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT, sizeof(VkPhysicalDeviceMemoryBudgetPropertiesEXT)},
+    };
+    infos.phys_device_features2 = {
+        {VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES, sizeof(VkPhysicalDevice16BitStorageFeatures)},
+        {VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES_KHR, sizeof(VkPhysicalDevice8BitStorageFeaturesKHR)},
+        {VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ASTC_DECODE_FEATURES_EXT, sizeof(VkPhysicalDeviceASTCDecodeFeaturesEXT)},
+        {VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_FEATURES_EXT, sizeof(VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT)},
+        {VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_EXT, sizeof(VkPhysicalDeviceBufferDeviceAddressFeaturesEXT)},
+        {VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONDITIONAL_RENDERING_FEATURES_EXT, sizeof(VkPhysicalDeviceConditionalRenderingFeaturesEXT)},
+        {VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLIP_ENABLE_FEATURES_EXT, sizeof(VkPhysicalDeviceDepthClipEnableFeaturesEXT)},
+        {VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES_EXT, sizeof(VkPhysicalDeviceDescriptorIndexingFeaturesEXT)},
+        {VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_FEATURES_EXT, sizeof(VkPhysicalDeviceFragmentDensityMapFeaturesEXT)},
+        {VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_INTERLOCK_FEATURES_EXT, sizeof(VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT)},
+        {VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES_EXT, sizeof(VkPhysicalDeviceHostQueryResetFeaturesEXT)},
+        {VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES_KHR, sizeof(VkPhysicalDeviceImagelessFramebufferFeaturesKHR)},
+        {VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_EXT, sizeof(VkPhysicalDeviceIndexTypeUint8FeaturesEXT)},
+        {VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES_EXT, sizeof(VkPhysicalDeviceInlineUniformBlockFeaturesEXT)},
+        {VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_EXT, sizeof(VkPhysicalDeviceLineRasterizationFeaturesEXT)},
+        {VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PRIORITY_FEATURES_EXT, sizeof(VkPhysicalDeviceMemoryPriorityFeaturesEXT)},
+        {VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES, sizeof(VkPhysicalDeviceMultiviewFeatures)},
+        {VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PERFORMANCE_QUERY_FEATURES_KHR, sizeof(VkPhysicalDevicePerformanceQueryFeaturesKHR)},
+        {VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_EXECUTABLE_PROPERTIES_FEATURES_KHR, sizeof(VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR)},
+        {VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_FEATURES, sizeof(VkPhysicalDeviceProtectedMemoryFeatures)},
+        {VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES, sizeof(VkPhysicalDeviceSamplerYcbcrConversionFeatures)},
+        {VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES_EXT, sizeof(VkPhysicalDeviceScalarBlockLayoutFeaturesEXT)},
+        {VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES_KHR, sizeof(VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR)},
+        {VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES_KHR, sizeof(VkPhysicalDeviceShaderAtomicInt64FeaturesKHR)},
+        {VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CLOCK_FEATURES_KHR, sizeof(VkPhysicalDeviceShaderClockFeaturesKHR)},
+        {VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES_EXT, sizeof(VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT)},
+        {VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES, sizeof(VkPhysicalDeviceShaderDrawParametersFeatures)},
+        {VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES_KHR, sizeof(VkPhysicalDeviceShaderFloat16Int8FeaturesKHR)},
+        {VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES_KHR, sizeof(VkPhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR)},
+        {VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES_EXT, sizeof(VkPhysicalDeviceSubgroupSizeControlFeaturesEXT)},
+        {VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_FEATURES_EXT, sizeof(VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT)},
+        {VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXTURE_COMPRESSION_ASTC_HDR_FEATURES_EXT, sizeof(VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT)},
+        {VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES_KHR, sizeof(VkPhysicalDeviceTimelineSemaphoreFeaturesKHR)},
+        {VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_FEATURES_EXT, sizeof(VkPhysicalDeviceTransformFeedbackFeaturesEXT)},
+        {VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES_KHR, sizeof(VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR)},
+        {VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES, sizeof(VkPhysicalDeviceVariablePointersFeatures)},
+        {VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_EXT, sizeof(VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT)},
+        {VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES_KHR, sizeof(VkPhysicalDeviceVulkanMemoryModelFeaturesKHR)},
+        {VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_YCBCR_IMAGE_ARRAYS_FEATURES_EXT, sizeof(VkPhysicalDeviceYcbcrImageArraysFeaturesEXT)},
+    };
+    infos.surface_capabilities2 = {
+        {VK_STRUCTURE_TYPE_SHARED_PRESENT_SURFACE_CAPABILITIES_KHR, sizeof(VkSharedPresentSurfaceCapabilitiesKHR)},
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+        {VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_FULL_SCREEN_EXCLUSIVE_EXT, sizeof(VkSurfaceCapabilitiesFullScreenExclusiveEXT)},
+#endif  // VK_USE_PLATFORM_WIN32_KHR
+        {VK_STRUCTURE_TYPE_SURFACE_PROTECTED_CAPABILITIES_KHR, sizeof(VkSurfaceProtectedCapabilitiesKHR)},
+    };
+    infos.format_properties2 = {
+        {VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT, sizeof(VkDrmFormatModifierPropertiesListEXT)},
+    };
+    return infos;
+}
+void chain_iterator_phys_device_props2(Printer &p, AppGpu &gpu, void * place) {
+    while (place) {
+        struct VkStructureHeader *structure = (struct VkStructureHeader *)place;
+        p.SetSubHeader();
+        if (structure->sType == VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_PROPERTIES_EXT &&
+            gpu.CheckPhysicalDeviceExtensionIncluded(VK_EXT_BLEND_OPERATION_ADVANCED_EXTENSION_NAME)) {
+            VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT* props = (VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT*)structure;
+            DumpVkPhysicalDeviceBlendOperationAdvancedPropertiesEXT(p, "VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT", *props);
+            p.AddNewline();
+        }
+        if (structure->sType == VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONSERVATIVE_RASTERIZATION_PROPERTIES_EXT &&
+            gpu.CheckPhysicalDeviceExtensionIncluded(VK_EXT_CONSERVATIVE_RASTERIZATION_EXTENSION_NAME)) {
+            VkPhysicalDeviceConservativeRasterizationPropertiesEXT* props = (VkPhysicalDeviceConservativeRasterizationPropertiesEXT*)structure;
+            DumpVkPhysicalDeviceConservativeRasterizationPropertiesEXT(p, "VkPhysicalDeviceConservativeRasterizationPropertiesEXT", *props);
+            p.AddNewline();
+        }
+        if (structure->sType == VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES_KHR &&
+            gpu.CheckPhysicalDeviceExtensionIncluded(VK_KHR_DEPTH_STENCIL_RESOLVE_EXTENSION_NAME)) {
+            VkPhysicalDeviceDepthStencilResolvePropertiesKHR* props = (VkPhysicalDeviceDepthStencilResolvePropertiesKHR*)structure;
+            DumpVkPhysicalDeviceDepthStencilResolvePropertiesKHR(p, "VkPhysicalDeviceDepthStencilResolvePropertiesKHR", *props);
+            p.AddNewline();
+        }
+        if (structure->sType == VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES_EXT &&
+            gpu.CheckPhysicalDeviceExtensionIncluded(VK_EXT_DESCRIPTOR_INDEXING_EXTENSION_NAME)) {
+            VkPhysicalDeviceDescriptorIndexingPropertiesEXT* props = (VkPhysicalDeviceDescriptorIndexingPropertiesEXT*)structure;
+            DumpVkPhysicalDeviceDescriptorIndexingPropertiesEXT(p, "VkPhysicalDeviceDescriptorIndexingPropertiesEXT", *props);
+            p.AddNewline();
+        }
+        if (structure->sType == VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DISCARD_RECTANGLE_PROPERTIES_EXT &&
+            gpu.CheckPhysicalDeviceExtensionIncluded(VK_EXT_DISCARD_RECTANGLES_EXTENSION_NAME)) {
+            VkPhysicalDeviceDiscardRectanglePropertiesEXT* props = (VkPhysicalDeviceDiscardRectanglePropertiesEXT*)structure;
+            DumpVkPhysicalDeviceDiscardRectanglePropertiesEXT(p, "VkPhysicalDeviceDiscardRectanglePropertiesEXT", *props);
+            p.AddNewline();
+        }
+        if (structure->sType == VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES_KHR &&
+            gpu.CheckPhysicalDeviceExtensionIncluded(VK_KHR_DRIVER_PROPERTIES_EXTENSION_NAME)) {
+            VkPhysicalDeviceDriverPropertiesKHR* props = (VkPhysicalDeviceDriverPropertiesKHR*)structure;
+            DumpVkPhysicalDeviceDriverPropertiesKHR(p, "VkPhysicalDeviceDriverPropertiesKHR", *props);
+            p.AddNewline();
+        }
+        if (structure->sType == VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_HOST_PROPERTIES_EXT &&
+            gpu.CheckPhysicalDeviceExtensionIncluded(VK_EXT_EXTERNAL_MEMORY_HOST_EXTENSION_NAME)) {
+            VkPhysicalDeviceExternalMemoryHostPropertiesEXT* props = (VkPhysicalDeviceExternalMemoryHostPropertiesEXT*)structure;
+            DumpVkPhysicalDeviceExternalMemoryHostPropertiesEXT(p, "VkPhysicalDeviceExternalMemoryHostPropertiesEXT", *props);
+            p.AddNewline();
+        }
+        if (structure->sType == VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES_KHR &&
+            gpu.CheckPhysicalDeviceExtensionIncluded(VK_KHR_SHADER_FLOAT_CONTROLS_EXTENSION_NAME)) {
+            VkPhysicalDeviceFloatControlsPropertiesKHR* props = (VkPhysicalDeviceFloatControlsPropertiesKHR*)structure;
+            DumpVkPhysicalDeviceFloatControlsPropertiesKHR(p, "VkPhysicalDeviceFloatControlsPropertiesKHR", *props);
+            p.AddNewline();
+        }
+        if (structure->sType == VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_PROPERTIES_EXT &&
+            gpu.CheckPhysicalDeviceExtensionIncluded(VK_EXT_FRAGMENT_DENSITY_MAP_EXTENSION_NAME)) {
+            VkPhysicalDeviceFragmentDensityMapPropertiesEXT* props = (VkPhysicalDeviceFragmentDensityMapPropertiesEXT*)structure;
+            DumpVkPhysicalDeviceFragmentDensityMapPropertiesEXT(p, "VkPhysicalDeviceFragmentDensityMapPropertiesEXT", *props);
+            p.AddNewline();
+        }
+        if (structure->sType == VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES) {
+            VkPhysicalDeviceIDProperties* props = (VkPhysicalDeviceIDProperties*)structure;
+            DumpVkPhysicalDeviceIDProperties(p, "VkPhysicalDeviceIDProperties", *props);
+            p.AddNewline();
+        }
+        if (structure->sType == VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_PROPERTIES_EXT &&
+            gpu.CheckPhysicalDeviceExtensionIncluded(VK_EXT_INLINE_UNIFORM_BLOCK_EXTENSION_NAME)) {
+            VkPhysicalDeviceInlineUniformBlockPropertiesEXT* props = (VkPhysicalDeviceInlineUniformBlockPropertiesEXT*)structure;
+            DumpVkPhysicalDeviceInlineUniformBlockPropertiesEXT(p, "VkPhysicalDeviceInlineUniformBlockPropertiesEXT", *props);
+            p.AddNewline();
+        }
+        if (structure->sType == VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES_EXT &&
+            gpu.CheckPhysicalDeviceExtensionIncluded(VK_EXT_LINE_RASTERIZATION_EXTENSION_NAME)) {
+            VkPhysicalDeviceLineRasterizationPropertiesEXT* props = (VkPhysicalDeviceLineRasterizationPropertiesEXT*)structure;
+            DumpVkPhysicalDeviceLineRasterizationPropertiesEXT(p, "VkPhysicalDeviceLineRasterizationPropertiesEXT", *props);
+            p.AddNewline();
+        }
+        if (structure->sType == VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES) {
+            VkPhysicalDeviceMaintenance3Properties* props = (VkPhysicalDeviceMaintenance3Properties*)structure;
+            DumpVkPhysicalDeviceMaintenance3Properties(p, "VkPhysicalDeviceMaintenance3Properties", *props);
+            p.AddNewline();
+        }
+        if (structure->sType == VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES) {
+            VkPhysicalDeviceMultiviewProperties* props = (VkPhysicalDeviceMultiviewProperties*)structure;
+            DumpVkPhysicalDeviceMultiviewProperties(p, "VkPhysicalDeviceMultiviewProperties", *props);
+            p.AddNewline();
+        }
+        if (structure->sType == VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PCI_BUS_INFO_PROPERTIES_EXT &&
+            gpu.CheckPhysicalDeviceExtensionIncluded(VK_EXT_PCI_BUS_INFO_EXTENSION_NAME)) {
+            VkPhysicalDevicePCIBusInfoPropertiesEXT* props = (VkPhysicalDevicePCIBusInfoPropertiesEXT*)structure;
+            DumpVkPhysicalDevicePCIBusInfoPropertiesEXT(p, "VkPhysicalDevicePCIBusInfoPropertiesEXT", *props);
+            p.AddNewline();
+        }
+        if (structure->sType == VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PERFORMANCE_QUERY_PROPERTIES_KHR &&
+            gpu.CheckPhysicalDeviceExtensionIncluded(VK_KHR_PERFORMANCE_QUERY_EXTENSION_NAME)) {
+            VkPhysicalDevicePerformanceQueryPropertiesKHR* props = (VkPhysicalDevicePerformanceQueryPropertiesKHR*)structure;
+            DumpVkPhysicalDevicePerformanceQueryPropertiesKHR(p, "VkPhysicalDevicePerformanceQueryPropertiesKHR", *props);
+            p.AddNewline();
+        }
+        if (structure->sType == VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES) {
+            VkPhysicalDevicePointClippingProperties* props = (VkPhysicalDevicePointClippingProperties*)structure;
+            DumpVkPhysicalDevicePointClippingProperties(p, "VkPhysicalDevicePointClippingProperties", *props);
+            p.AddNewline();
+        }
+        if (structure->sType == VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_PROPERTIES) {
+            VkPhysicalDeviceProtectedMemoryProperties* props = (VkPhysicalDeviceProtectedMemoryProperties*)structure;
+            DumpVkPhysicalDeviceProtectedMemoryProperties(p, "VkPhysicalDeviceProtectedMemoryProperties", *props);
+            p.AddNewline();
+        }
+        if (structure->sType == VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR &&
+            gpu.CheckPhysicalDeviceExtensionIncluded(VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME)) {
+            VkPhysicalDevicePushDescriptorPropertiesKHR* props = (VkPhysicalDevicePushDescriptorPropertiesKHR*)structure;
+            DumpVkPhysicalDevicePushDescriptorPropertiesKHR(p, "VkPhysicalDevicePushDescriptorPropertiesKHR", *props);
+            p.AddNewline();
+        }
+        if (structure->sType == VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLE_LOCATIONS_PROPERTIES_EXT &&
+            gpu.CheckPhysicalDeviceExtensionIncluded(VK_EXT_SAMPLE_LOCATIONS_EXTENSION_NAME)) {
+            VkPhysicalDeviceSampleLocationsPropertiesEXT* props = (VkPhysicalDeviceSampleLocationsPropertiesEXT*)structure;
+            DumpVkPhysicalDeviceSampleLocationsPropertiesEXT(p, "VkPhysicalDeviceSampleLocationsPropertiesEXT", *props);
+            p.AddNewline();
+        }
+        if (structure->sType == VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES_EXT &&
+            gpu.CheckPhysicalDeviceExtensionIncluded(VK_EXT_SAMPLER_FILTER_MINMAX_EXTENSION_NAME)) {
+            VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT* props = (VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT*)structure;
+            DumpVkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT(p, "VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT", *props);
+            p.AddNewline();
+        }
+        if (structure->sType == VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_PROPERTIES) {
+            VkPhysicalDeviceSubgroupProperties* props = (VkPhysicalDeviceSubgroupProperties*)structure;
+            DumpVkPhysicalDeviceSubgroupProperties(p, "VkPhysicalDeviceSubgroupProperties", *props);
+            p.AddNewline();
+        }
+        if (structure->sType == VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES_EXT &&
+            gpu.CheckPhysicalDeviceExtensionIncluded(VK_EXT_SUBGROUP_SIZE_CONTROL_EXTENSION_NAME)) {
+            VkPhysicalDeviceSubgroupSizeControlPropertiesEXT* props = (VkPhysicalDeviceSubgroupSizeControlPropertiesEXT*)structure;
+            DumpVkPhysicalDeviceSubgroupSizeControlPropertiesEXT(p, "VkPhysicalDeviceSubgroupSizeControlPropertiesEXT", *props);
+            p.AddNewline();
+        }
+        if (structure->sType == VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES_EXT &&
+            gpu.CheckPhysicalDeviceExtensionIncluded(VK_EXT_TEXEL_BUFFER_ALIGNMENT_EXTENSION_NAME)) {
+            VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT* props = (VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT*)structure;
+            DumpVkPhysicalDeviceTexelBufferAlignmentPropertiesEXT(p, "VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT", *props);
+            p.AddNewline();
+        }
+        if (structure->sType == VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES_KHR &&
+            gpu.CheckPhysicalDeviceExtensionIncluded(VK_KHR_TIMELINE_SEMAPHORE_EXTENSION_NAME)) {
+            VkPhysicalDeviceTimelineSemaphorePropertiesKHR* props = (VkPhysicalDeviceTimelineSemaphorePropertiesKHR*)structure;
+            DumpVkPhysicalDeviceTimelineSemaphorePropertiesKHR(p, "VkPhysicalDeviceTimelineSemaphorePropertiesKHR", *props);
+            p.AddNewline();
+        }
+        if (structure->sType == VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_PROPERTIES_EXT &&
+            gpu.CheckPhysicalDeviceExtensionIncluded(VK_EXT_TRANSFORM_FEEDBACK_EXTENSION_NAME)) {
+            VkPhysicalDeviceTransformFeedbackPropertiesEXT* props = (VkPhysicalDeviceTransformFeedbackPropertiesEXT*)structure;
+            DumpVkPhysicalDeviceTransformFeedbackPropertiesEXT(p, "VkPhysicalDeviceTransformFeedbackPropertiesEXT", *props);
+            p.AddNewline();
+        }
+        if (structure->sType == VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_EXT &&
+            gpu.CheckPhysicalDeviceExtensionIncluded(VK_EXT_VERTEX_ATTRIBUTE_DIVISOR_EXTENSION_NAME)) {
+            VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT* props = (VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT*)structure;
+            DumpVkPhysicalDeviceVertexAttributeDivisorPropertiesEXT(p, "VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT", *props);
+            p.AddNewline();
+        }
+        place = structure->pNext;
+    }
+}
+void chain_iterator_phys_device_mem_props2(Printer &p, AppGpu &gpu, void * place) {
+    while (place) {
+        struct VkStructureHeader *structure = (struct VkStructureHeader *)place;
+        p.SetSubHeader();
+        if (structure->sType == VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT &&
+            gpu.CheckPhysicalDeviceExtensionIncluded(VK_EXT_MEMORY_BUDGET_EXTENSION_NAME)) {
+            VkPhysicalDeviceMemoryBudgetPropertiesEXT* props = (VkPhysicalDeviceMemoryBudgetPropertiesEXT*)structure;
+            DumpVkPhysicalDeviceMemoryBudgetPropertiesEXT(p, "VkPhysicalDeviceMemoryBudgetPropertiesEXT", *props);
+            p.AddNewline();
+        }
+        place = structure->pNext;
+    }
+}
+void chain_iterator_phys_device_features2(Printer &p, AppGpu &gpu, void * place) {
+    while (place) {
+        struct VkStructureHeader *structure = (struct VkStructureHeader *)place;
+        p.SetSubHeader();
+        if (structure->sType == VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES) {
+            VkPhysicalDevice16BitStorageFeatures* props = (VkPhysicalDevice16BitStorageFeatures*)structure;
+            DumpVkPhysicalDevice16BitStorageFeatures(p, "VkPhysicalDevice16BitStorageFeatures", *props);
+            p.AddNewline();
+        }
+        if (structure->sType == VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES_KHR &&
+            gpu.CheckPhysicalDeviceExtensionIncluded(VK_KHR_8BIT_STORAGE_EXTENSION_NAME)) {
+            VkPhysicalDevice8BitStorageFeaturesKHR* props = (VkPhysicalDevice8BitStorageFeaturesKHR*)structure;
+            DumpVkPhysicalDevice8BitStorageFeaturesKHR(p, "VkPhysicalDevice8BitStorageFeaturesKHR", *props);
+            p.AddNewline();
+        }
+        if (structure->sType == VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ASTC_DECODE_FEATURES_EXT &&
+            gpu.CheckPhysicalDeviceExtensionIncluded(VK_EXT_ASTC_DECODE_MODE_EXTENSION_NAME)) {
+            VkPhysicalDeviceASTCDecodeFeaturesEXT* props = (VkPhysicalDeviceASTCDecodeFeaturesEXT*)structure;
+            DumpVkPhysicalDeviceASTCDecodeFeaturesEXT(p, "VkPhysicalDeviceASTCDecodeFeaturesEXT", *props);
+            p.AddNewline();
+        }
+        if (structure->sType == VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_FEATURES_EXT &&
+            gpu.CheckPhysicalDeviceExtensionIncluded(VK_EXT_BLEND_OPERATION_ADVANCED_EXTENSION_NAME)) {
+            VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT* props = (VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT*)structure;
+            DumpVkPhysicalDeviceBlendOperationAdvancedFeaturesEXT(p, "VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT", *props);
+            p.AddNewline();
+        }
+        if (structure->sType == VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_EXT &&
+            gpu.CheckPhysicalDeviceExtensionIncluded(VK_EXT_BUFFER_DEVICE_ADDRESS_EXTENSION_NAME)) {
+            VkPhysicalDeviceBufferDeviceAddressFeaturesEXT* props = (VkPhysicalDeviceBufferDeviceAddressFeaturesEXT*)structure;
+            DumpVkPhysicalDeviceBufferDeviceAddressFeaturesEXT(p, "VkPhysicalDeviceBufferDeviceAddressFeaturesEXT", *props);
+            p.AddNewline();
+        }
+        if (structure->sType == VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONDITIONAL_RENDERING_FEATURES_EXT &&
+            gpu.CheckPhysicalDeviceExtensionIncluded(VK_EXT_CONDITIONAL_RENDERING_EXTENSION_NAME)) {
+            VkPhysicalDeviceConditionalRenderingFeaturesEXT* props = (VkPhysicalDeviceConditionalRenderingFeaturesEXT*)structure;
+            DumpVkPhysicalDeviceConditionalRenderingFeaturesEXT(p, "VkPhysicalDeviceConditionalRenderingFeaturesEXT", *props);
+            p.AddNewline();
+        }
+        if (structure->sType == VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLIP_ENABLE_FEATURES_EXT &&
+            gpu.CheckPhysicalDeviceExtensionIncluded(VK_EXT_DEPTH_CLIP_ENABLE_EXTENSION_NAME)) {
+            VkPhysicalDeviceDepthClipEnableFeaturesEXT* props = (VkPhysicalDeviceDepthClipEnableFeaturesEXT*)structure;
+            DumpVkPhysicalDeviceDepthClipEnableFeaturesEXT(p, "VkPhysicalDeviceDepthClipEnableFeaturesEXT", *props);
+            p.AddNewline();
+        }
+        if (structure->sType == VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES_EXT &&
+            gpu.CheckPhysicalDeviceExtensionIncluded(VK_EXT_DESCRIPTOR_INDEXING_EXTENSION_NAME)) {
+            VkPhysicalDeviceDescriptorIndexingFeaturesEXT* props = (VkPhysicalDeviceDescriptorIndexingFeaturesEXT*)structure;
+            DumpVkPhysicalDeviceDescriptorIndexingFeaturesEXT(p, "VkPhysicalDeviceDescriptorIndexingFeaturesEXT", *props);
+            p.AddNewline();
+        }
+        if (structure->sType == VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_FEATURES_EXT &&
+            gpu.CheckPhysicalDeviceExtensionIncluded(VK_EXT_FRAGMENT_DENSITY_MAP_EXTENSION_NAME)) {
+            VkPhysicalDeviceFragmentDensityMapFeaturesEXT* props = (VkPhysicalDeviceFragmentDensityMapFeaturesEXT*)structure;
+            DumpVkPhysicalDeviceFragmentDensityMapFeaturesEXT(p, "VkPhysicalDeviceFragmentDensityMapFeaturesEXT", *props);
+            p.AddNewline();
+        }
+        if (structure->sType == VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_INTERLOCK_FEATURES_EXT &&
+            gpu.CheckPhysicalDeviceExtensionIncluded(VK_EXT_FRAGMENT_SHADER_INTERLOCK_EXTENSION_NAME)) {
+            VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT* props = (VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT*)structure;
+            DumpVkPhysicalDeviceFragmentShaderInterlockFeaturesEXT(p, "VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT", *props);
+            p.AddNewline();
+        }
+        if (structure->sType == VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES_EXT &&
+            gpu.CheckPhysicalDeviceExtensionIncluded(VK_EXT_HOST_QUERY_RESET_EXTENSION_NAME)) {
+            VkPhysicalDeviceHostQueryResetFeaturesEXT* props = (VkPhysicalDeviceHostQueryResetFeaturesEXT*)structure;
+            DumpVkPhysicalDeviceHostQueryResetFeaturesEXT(p, "VkPhysicalDeviceHostQueryResetFeaturesEXT", *props);
+            p.AddNewline();
+        }
+        if (structure->sType == VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES_KHR &&
+            gpu.CheckPhysicalDeviceExtensionIncluded(VK_KHR_IMAGELESS_FRAMEBUFFER_EXTENSION_NAME)) {
+            VkPhysicalDeviceImagelessFramebufferFeaturesKHR* props = (VkPhysicalDeviceImagelessFramebufferFeaturesKHR*)structure;
+            DumpVkPhysicalDeviceImagelessFramebufferFeaturesKHR(p, "VkPhysicalDeviceImagelessFramebufferFeaturesKHR", *props);
+            p.AddNewline();
+        }
+        if (structure->sType == VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_EXT &&
+            gpu.CheckPhysicalDeviceExtensionIncluded(VK_EXT_INDEX_TYPE_UINT8_EXTENSION_NAME)) {
+            VkPhysicalDeviceIndexTypeUint8FeaturesEXT* props = (VkPhysicalDeviceIndexTypeUint8FeaturesEXT*)structure;
+            DumpVkPhysicalDeviceIndexTypeUint8FeaturesEXT(p, "VkPhysicalDeviceIndexTypeUint8FeaturesEXT", *props);
+            p.AddNewline();
+        }
+        if (structure->sType == VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES_EXT &&
+            gpu.CheckPhysicalDeviceExtensionIncluded(VK_EXT_INLINE_UNIFORM_BLOCK_EXTENSION_NAME)) {
+            VkPhysicalDeviceInlineUniformBlockFeaturesEXT* props = (VkPhysicalDeviceInlineUniformBlockFeaturesEXT*)structure;
+            DumpVkPhysicalDeviceInlineUniformBlockFeaturesEXT(p, "VkPhysicalDeviceInlineUniformBlockFeaturesEXT", *props);
+            p.AddNewline();
+        }
+        if (structure->sType == VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_EXT &&
+            gpu.CheckPhysicalDeviceExtensionIncluded(VK_EXT_LINE_RASTERIZATION_EXTENSION_NAME)) {
+            VkPhysicalDeviceLineRasterizationFeaturesEXT* props = (VkPhysicalDeviceLineRasterizationFeaturesEXT*)structure;
+            DumpVkPhysicalDeviceLineRasterizationFeaturesEXT(p, "VkPhysicalDeviceLineRasterizationFeaturesEXT", *props);
+            p.AddNewline();
+        }
+        if (structure->sType == VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PRIORITY_FEATURES_EXT &&
+            gpu.CheckPhysicalDeviceExtensionIncluded(VK_EXT_MEMORY_PRIORITY_EXTENSION_NAME)) {
+            VkPhysicalDeviceMemoryPriorityFeaturesEXT* props = (VkPhysicalDeviceMemoryPriorityFeaturesEXT*)structure;
+            DumpVkPhysicalDeviceMemoryPriorityFeaturesEXT(p, "VkPhysicalDeviceMemoryPriorityFeaturesEXT", *props);
+            p.AddNewline();
+        }
+        if (structure->sType == VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES) {
+            VkPhysicalDeviceMultiviewFeatures* props = (VkPhysicalDeviceMultiviewFeatures*)structure;
+            DumpVkPhysicalDeviceMultiviewFeatures(p, "VkPhysicalDeviceMultiviewFeatures", *props);
+            p.AddNewline();
+        }
+        if (structure->sType == VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PERFORMANCE_QUERY_FEATURES_KHR &&
+            gpu.CheckPhysicalDeviceExtensionIncluded(VK_KHR_PERFORMANCE_QUERY_EXTENSION_NAME)) {
+            VkPhysicalDevicePerformanceQueryFeaturesKHR* props = (VkPhysicalDevicePerformanceQueryFeaturesKHR*)structure;
+            DumpVkPhysicalDevicePerformanceQueryFeaturesKHR(p, "VkPhysicalDevicePerformanceQueryFeaturesKHR", *props);
+            p.AddNewline();
+        }
+        if (structure->sType == VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_EXECUTABLE_PROPERTIES_FEATURES_KHR &&
+            gpu.CheckPhysicalDeviceExtensionIncluded(VK_KHR_PIPELINE_EXECUTABLE_PROPERTIES_EXTENSION_NAME)) {
+            VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR* props = (VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR*)structure;
+            DumpVkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR(p, "VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR", *props);
+            p.AddNewline();
+        }
+        if (structure->sType == VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_FEATURES) {
+            VkPhysicalDeviceProtectedMemoryFeatures* props = (VkPhysicalDeviceProtectedMemoryFeatures*)structure;
+            DumpVkPhysicalDeviceProtectedMemoryFeatures(p, "VkPhysicalDeviceProtectedMemoryFeatures", *props);
+            p.AddNewline();
+        }
+        if (structure->sType == VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES) {
+            VkPhysicalDeviceSamplerYcbcrConversionFeatures* props = (VkPhysicalDeviceSamplerYcbcrConversionFeatures*)structure;
+            DumpVkPhysicalDeviceSamplerYcbcrConversionFeatures(p, "VkPhysicalDeviceSamplerYcbcrConversionFeatures", *props);
+            p.AddNewline();
+        }
+        if (structure->sType == VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES_EXT &&
+            gpu.CheckPhysicalDeviceExtensionIncluded(VK_EXT_SCALAR_BLOCK_LAYOUT_EXTENSION_NAME)) {
+            VkPhysicalDeviceScalarBlockLayoutFeaturesEXT* props = (VkPhysicalDeviceScalarBlockLayoutFeaturesEXT*)structure;
+            DumpVkPhysicalDeviceScalarBlockLayoutFeaturesEXT(p, "VkPhysicalDeviceScalarBlockLayoutFeaturesEXT", *props);
+            p.AddNewline();
+        }
+        if (structure->sType == VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES_KHR &&
+            gpu.CheckPhysicalDeviceExtensionIncluded(VK_KHR_SEPARATE_DEPTH_STENCIL_LAYOUTS_EXTENSION_NAME)) {
+            VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR* props = (VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR*)structure;
+            DumpVkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR(p, "VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR", *props);
+            p.AddNewline();
+        }
+        if (structure->sType == VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES_KHR &&
+            gpu.CheckPhysicalDeviceExtensionIncluded(VK_KHR_SHADER_ATOMIC_INT64_EXTENSION_NAME)) {
+            VkPhysicalDeviceShaderAtomicInt64FeaturesKHR* props = (VkPhysicalDeviceShaderAtomicInt64FeaturesKHR*)structure;
+            DumpVkPhysicalDeviceShaderAtomicInt64FeaturesKHR(p, "VkPhysicalDeviceShaderAtomicInt64FeaturesKHR", *props);
+            p.AddNewline();
+        }
+        if (structure->sType == VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CLOCK_FEATURES_KHR &&
+            gpu.CheckPhysicalDeviceExtensionIncluded(VK_KHR_SHADER_CLOCK_EXTENSION_NAME)) {
+            VkPhysicalDeviceShaderClockFeaturesKHR* props = (VkPhysicalDeviceShaderClockFeaturesKHR*)structure;
+            DumpVkPhysicalDeviceShaderClockFeaturesKHR(p, "VkPhysicalDeviceShaderClockFeaturesKHR", *props);
+            p.AddNewline();
+        }
+        if (structure->sType == VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES_EXT &&
+            gpu.CheckPhysicalDeviceExtensionIncluded(VK_EXT_SHADER_DEMOTE_TO_HELPER_INVOCATION_EXTENSION_NAME)) {
+            VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT* props = (VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT*)structure;
+            DumpVkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT(p, "VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT", *props);
+            p.AddNewline();
+        }
+        if (structure->sType == VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES) {
+            VkPhysicalDeviceShaderDrawParametersFeatures* props = (VkPhysicalDeviceShaderDrawParametersFeatures*)structure;
+            DumpVkPhysicalDeviceShaderDrawParametersFeatures(p, "VkPhysicalDeviceShaderDrawParametersFeatures", *props);
+            p.AddNewline();
+        }
+        if (structure->sType == VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES_KHR &&
+            gpu.CheckPhysicalDeviceExtensionIncluded(VK_KHR_SHADER_FLOAT16_INT8_EXTENSION_NAME)) {
+            VkPhysicalDeviceShaderFloat16Int8FeaturesKHR* props = (VkPhysicalDeviceShaderFloat16Int8FeaturesKHR*)structure;
+            DumpVkPhysicalDeviceShaderFloat16Int8FeaturesKHR(p, "VkPhysicalDeviceShaderFloat16Int8FeaturesKHR", *props);
+            p.AddNewline();
+        }
+        if (structure->sType == VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES_KHR &&
+            gpu.CheckPhysicalDeviceExtensionIncluded(VK_KHR_SHADER_SUBGROUP_EXTENDED_TYPES_EXTENSION_NAME)) {
+            VkPhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR* props = (VkPhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR*)structure;
+            DumpVkPhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR(p, "VkPhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR", *props);
+            p.AddNewline();
+        }
+        if (structure->sType == VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES_EXT &&
+            gpu.CheckPhysicalDeviceExtensionIncluded(VK_EXT_SUBGROUP_SIZE_CONTROL_EXTENSION_NAME)) {
+            VkPhysicalDeviceSubgroupSizeControlFeaturesEXT* props = (VkPhysicalDeviceSubgroupSizeControlFeaturesEXT*)structure;
+            DumpVkPhysicalDeviceSubgroupSizeControlFeaturesEXT(p, "VkPhysicalDeviceSubgroupSizeControlFeaturesEXT", *props);
+            p.AddNewline();
+        }
+        if (structure->sType == VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_FEATURES_EXT &&
+            gpu.CheckPhysicalDeviceExtensionIncluded(VK_EXT_TEXEL_BUFFER_ALIGNMENT_EXTENSION_NAME)) {
+            VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT* props = (VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT*)structure;
+            DumpVkPhysicalDeviceTexelBufferAlignmentFeaturesEXT(p, "VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT", *props);
+            p.AddNewline();
+        }
+        if (structure->sType == VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXTURE_COMPRESSION_ASTC_HDR_FEATURES_EXT &&
+            gpu.CheckPhysicalDeviceExtensionIncluded(VK_EXT_TEXTURE_COMPRESSION_ASTC_HDR_EXTENSION_NAME)) {
+            VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT* props = (VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT*)structure;
+            DumpVkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT(p, "VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT", *props);
+            p.AddNewline();
+        }
+        if (structure->sType == VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES_KHR &&
+            gpu.CheckPhysicalDeviceExtensionIncluded(VK_KHR_TIMELINE_SEMAPHORE_EXTENSION_NAME)) {
+            VkPhysicalDeviceTimelineSemaphoreFeaturesKHR* props = (VkPhysicalDeviceTimelineSemaphoreFeaturesKHR*)structure;
+            DumpVkPhysicalDeviceTimelineSemaphoreFeaturesKHR(p, "VkPhysicalDeviceTimelineSemaphoreFeaturesKHR", *props);
+            p.AddNewline();
+        }
+        if (structure->sType == VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_FEATURES_EXT &&
+            gpu.CheckPhysicalDeviceExtensionIncluded(VK_EXT_TRANSFORM_FEEDBACK_EXTENSION_NAME)) {
+            VkPhysicalDeviceTransformFeedbackFeaturesEXT* props = (VkPhysicalDeviceTransformFeedbackFeaturesEXT*)structure;
+            DumpVkPhysicalDeviceTransformFeedbackFeaturesEXT(p, "VkPhysicalDeviceTransformFeedbackFeaturesEXT", *props);
+            p.AddNewline();
+        }
+        if (structure->sType == VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES_KHR &&
+            gpu.CheckPhysicalDeviceExtensionIncluded(VK_KHR_UNIFORM_BUFFER_STANDARD_LAYOUT_EXTENSION_NAME)) {
+            VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR* props = (VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR*)structure;
+            DumpVkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR(p, "VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR", *props);
+            p.AddNewline();
+        }
+        if (structure->sType == VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES) {
+            VkPhysicalDeviceVariablePointersFeatures* props = (VkPhysicalDeviceVariablePointersFeatures*)structure;
+            DumpVkPhysicalDeviceVariablePointersFeatures(p, "VkPhysicalDeviceVariablePointersFeatures", *props);
+            p.AddNewline();
+        }
+        if (structure->sType == VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_EXT &&
+            gpu.CheckPhysicalDeviceExtensionIncluded(VK_EXT_VERTEX_ATTRIBUTE_DIVISOR_EXTENSION_NAME)) {
+            VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT* props = (VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT*)structure;
+            DumpVkPhysicalDeviceVertexAttributeDivisorFeaturesEXT(p, "VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT", *props);
+            p.AddNewline();
+        }
+        if (structure->sType == VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES_KHR &&
+            gpu.CheckPhysicalDeviceExtensionIncluded(VK_KHR_VULKAN_MEMORY_MODEL_EXTENSION_NAME)) {
+            VkPhysicalDeviceVulkanMemoryModelFeaturesKHR* props = (VkPhysicalDeviceVulkanMemoryModelFeaturesKHR*)structure;
+            DumpVkPhysicalDeviceVulkanMemoryModelFeaturesKHR(p, "VkPhysicalDeviceVulkanMemoryModelFeaturesKHR", *props);
+            p.AddNewline();
+        }
+        if (structure->sType == VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_YCBCR_IMAGE_ARRAYS_FEATURES_EXT &&
+            gpu.CheckPhysicalDeviceExtensionIncluded(VK_EXT_YCBCR_IMAGE_ARRAYS_EXTENSION_NAME)) {
+            VkPhysicalDeviceYcbcrImageArraysFeaturesEXT* props = (VkPhysicalDeviceYcbcrImageArraysFeaturesEXT*)structure;
+            DumpVkPhysicalDeviceYcbcrImageArraysFeaturesEXT(p, "VkPhysicalDeviceYcbcrImageArraysFeaturesEXT", *props);
+            p.AddNewline();
+        }
+        place = structure->pNext;
+    }
+}
+void chain_iterator_surface_capabilities2(Printer &p, AppInstance &inst, AppGpu &gpu, void * place) {
+    while (place) {
+        struct VkStructureHeader *structure = (struct VkStructureHeader *)place;
+        p.SetSubHeader();
+        if (structure->sType == VK_STRUCTURE_TYPE_SHARED_PRESENT_SURFACE_CAPABILITIES_KHR &&
+            gpu.CheckPhysicalDeviceExtensionIncluded(VK_KHR_SHARED_PRESENTABLE_IMAGE_EXTENSION_NAME)) {
+            VkSharedPresentSurfaceCapabilitiesKHR* props = (VkSharedPresentSurfaceCapabilitiesKHR*)structure;
+            DumpVkSharedPresentSurfaceCapabilitiesKHR(p, "VkSharedPresentSurfaceCapabilitiesKHR", *props);
+            p.AddNewline();
+        }
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+        if (structure->sType == VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_FULL_SCREEN_EXCLUSIVE_EXT &&
+            gpu.CheckPhysicalDeviceExtensionIncluded(VK_EXT_FULL_SCREEN_EXCLUSIVE_EXTENSION_NAME)) {
+            VkSurfaceCapabilitiesFullScreenExclusiveEXT* props = (VkSurfaceCapabilitiesFullScreenExclusiveEXT*)structure;
+            DumpVkSurfaceCapabilitiesFullScreenExclusiveEXT(p, "VkSurfaceCapabilitiesFullScreenExclusiveEXT", *props);
+            p.AddNewline();
+        }
+#endif  // VK_USE_PLATFORM_WIN32_KHR
+        if (structure->sType == VK_STRUCTURE_TYPE_SURFACE_PROTECTED_CAPABILITIES_KHR &&
+            inst.CheckExtensionEnabled(VK_KHR_SURFACE_PROTECTED_CAPABILITIES_EXTENSION_NAME)) {
+            VkSurfaceProtectedCapabilitiesKHR* props = (VkSurfaceProtectedCapabilitiesKHR*)structure;
+            DumpVkSurfaceProtectedCapabilitiesKHR(p, "VkSurfaceProtectedCapabilitiesKHR", *props);
+            p.AddNewline();
+        }
+        place = structure->pNext;
+    }
+}
+void chain_iterator_format_properties2(Printer &p, AppGpu &gpu, void * place) {
+    while (place) {
+        struct VkStructureHeader *structure = (struct VkStructureHeader *)place;
+        p.SetSubHeader();
+        if (structure->sType == VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT &&
+            gpu.CheckPhysicalDeviceExtensionIncluded(VK_EXT_IMAGE_DRM_FORMAT_MODIFIER_EXTENSION_NAME)) {
+            VkDrmFormatModifierPropertiesListEXT* props = (VkDrmFormatModifierPropertiesListEXT*)structure;
+            DumpVkDrmFormatModifierPropertiesListEXT(p, "VkDrmFormatModifierPropertiesListEXT", *props);
+            p.AddNewline();
+        }
+        place = structure->pNext;
+    }
+}
+bool operator==(const VkExtent2D & a, const VkExtent2D b);
+bool operator==(const VkSurfaceCapabilities2EXT & a, const VkSurfaceCapabilities2EXT b);
+bool operator==(const VkSurfaceCapabilities2KHR & a, const VkSurfaceCapabilities2KHR b);
+bool operator==(const VkSurfaceCapabilitiesKHR & a, const VkSurfaceCapabilitiesKHR b);
+bool operator==(const VkSurfaceFormat2KHR & a, const VkSurfaceFormat2KHR b);
+bool operator==(const VkSurfaceFormatKHR & a, const VkSurfaceFormatKHR b);
+bool operator==(const VkExtent2D & a, const VkExtent2D b) {
+    return a.width == b.width
+        && a.height == b.height;
+}
+bool operator==(const VkSurfaceCapabilities2EXT & a, const VkSurfaceCapabilities2EXT b) {
+    return a.minImageCount == b.minImageCount
+        && a.maxImageCount == b.maxImageCount
+        && a.currentExtent == b.currentExtent
+        && a.minImageExtent == b.minImageExtent
+        && a.maxImageExtent == b.maxImageExtent
+        && a.maxImageArrayLayers == b.maxImageArrayLayers
+        && a.supportedTransforms == b.supportedTransforms
+        && a.currentTransform == b.currentTransform
+        && a.supportedCompositeAlpha == b.supportedCompositeAlpha
+        && a.supportedUsageFlags == b.supportedUsageFlags
+        && a.supportedSurfaceCounters == b.supportedSurfaceCounters;
+}
+bool operator==(const VkSurfaceCapabilities2KHR & a, const VkSurfaceCapabilities2KHR b) {
+    return a.surfaceCapabilities == b.surfaceCapabilities;
+}
+bool operator==(const VkSurfaceCapabilitiesKHR & a, const VkSurfaceCapabilitiesKHR b) {
+    return a.minImageCount == b.minImageCount
+        && a.maxImageCount == b.maxImageCount
+        && a.currentExtent == b.currentExtent
+        && a.minImageExtent == b.minImageExtent
+        && a.maxImageExtent == b.maxImageExtent
+        && a.maxImageArrayLayers == b.maxImageArrayLayers
+        && a.supportedTransforms == b.supportedTransforms
+        && a.currentTransform == b.currentTransform
+        && a.supportedCompositeAlpha == b.supportedCompositeAlpha
+        && a.supportedUsageFlags == b.supportedUsageFlags;
+}
+bool operator==(const VkSurfaceFormat2KHR & a, const VkSurfaceFormat2KHR b) {
+    return a.surfaceFormat == b.surfaceFormat;
+}
+bool operator==(const VkSurfaceFormatKHR & a, const VkSurfaceFormatKHR b) {
+    return a.format == b.format
+        && a.colorSpace == b.colorSpace;
+}
+
diff --git a/src/third_party/vulkan-tools/src/vulkaninfo/json_validation_process.md b/src/third_party/vulkan-tools/src/vulkaninfo/json_validation_process.md
new file mode 100644
index 0000000..9434528
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/vulkaninfo/json_validation_process.md
@@ -0,0 +1,38 @@
+# Validating [vulkaninfo](https://github.com/KhronosGroup/Vulkan-Tools/tree/master/vulkaninfo) JSON output
+
+The format of vulkaninfo's JSON output is designed to be used as input to the LunarG
+[Device Simulation](https://github.com/LunarG/VulkanTools/blob/master/layersvt/device_simulation.md)
+(DevSim) layer.
+When changes are made to vulkaninfo's JSON output, the formatting should be
+validated against DevSim's JSON schema, to ensure the text is correctly
+formatted.
+
+The DevSim JSON schema specifies exactly how its JSON input data must
+be structured.
+The schema may be found at
+https://schema.khronos.org/vulkan/devsim_1_0_0.json
+
+## Steps to validate JSON data against the DevSim schema
+
+1. Generate the text to be tested using `vulkaninfo --json` and save to a file.
+1. Download the [DevSim schema](https://schema.khronos.org/vulkan/devsim_1_0_0.json) to another file.
+1. For each of the on-line JSON validator tools listed below:
+   1. Paste the schema and and sample text into the `schema` and `data` fields.
+   1. Depending on the tool, it may validate automatically, or require clicking a `validate` button.
+   1. Ensure the tool reports no errors.
+
+## List of recommended JSON validator tools
+
+Each of these tools seem to have their own quirks and/or holes in coverage.
+I recommend using all of them, ensuring they all report that the data successfully
+validates against the schema.
+* https://www.jsonschemavalidator.net/
+* https://jsonschemalint.com/#/version/draft-04/markup/json
+* https://json-schema-validator.herokuapp.com/index.jsp
+
+If you have suggestions for better tools
+(e.g.: an official reference validator,
+an automatable tool to integrate with continuous integration)
+please create an
+[issue](https://github.com/KhronosGroup/Vulkan-Tools/issues)
+to recommend it.
diff --git a/src/third_party/vulkan-tools/src/vulkaninfo/macOS/Info.plist b/src/third_party/vulkan-tools/src/vulkaninfo/macOS/Info.plist
new file mode 100644
index 0000000..cc7b234
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/vulkaninfo/macOS/Info.plist
@@ -0,0 +1,34 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE plist PUBLIC "-//Apple Computer//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
+<plist version="1.0">
+<dict>
+	<key>CFBundleDevelopmentRegion</key>
+	<string>English</string>
+	<key>CFBundleExecutable</key>
+	<string>vulkaninfo.sh</string>
+	<key>CFBundleGetInfoString</key>
+	<string>VulkanInfo</string>
+	<key>CFBundleIconFile</key>
+	<string>VulkanIcon.icns</string>
+	<key>CFBundleIdentifier</key>
+	<string>com.lunarg.vulkaninfo</string>
+	<key>CFBundleInfoDictionaryVersion</key>
+	<string>6.0</string>
+	<key>CFBundleLongVersionString</key>
+	<string>1.0</string>
+	<key>CFBundleName</key>
+	<string>VulkanInfo</string>
+	<key>CFBundlePackageType</key>
+	<string>APPL</string>
+	<key>CFBundleShortVersionString</key>
+	<string>1.0</string>
+	<key>CFBundleSignature</key>
+	<string>????</string>
+	<key>CFBundleVersion</key>
+	<string>1.0</string>
+	<key>CSResourcesFileMapped</key>
+	<true/>
+	<key>NSHumanReadableCopyright</key>
+	<string>Copyright (c) 2018 The Khronos Group Inc. LunarG Inc. All rights reserved.</string>
+</dict>
+</plist>
diff --git a/src/third_party/vulkan-tools/src/vulkaninfo/macOS/Resources/VulkanIcon.icns b/src/third_party/vulkan-tools/src/vulkaninfo/macOS/Resources/VulkanIcon.icns
new file mode 100644
index 0000000..fb82fb7
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/vulkaninfo/macOS/Resources/VulkanIcon.icns
Binary files differ
diff --git a/src/third_party/vulkan-tools/src/vulkaninfo/macOS/vulkaninfo.cmake b/src/third_party/vulkan-tools/src/vulkaninfo/macOS/vulkaninfo.cmake
new file mode 100644
index 0000000..7625384
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/vulkaninfo/macOS/vulkaninfo.cmake
@@ -0,0 +1,73 @@
+# ~~~
+# Copyright (c) 2018-2019 Valve Corporation
+# Copyright (c) 2018-2019 LunarG, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ~~~
+
+# Vulkaninfo Application Bundle
+
+# We already have a "vulkaninfo" target, so create a new target with a different name and use the OUTPUT_NAME property to rename the
+# target to the desired name. The standalone binary is called "vulkaninfo" and the bundle is called "vulkaninfo.app". Note that the
+# executable is a script that launches Terminal to see the output.
+add_executable(vulkaninfo-bundle
+               MACOSX_BUNDLE
+               vulkaninfo.cpp
+               ${CMAKE_BINARY_DIR}/staging-json/MoltenVK_icd.json
+               ${CMAKE_CURRENT_SOURCE_DIR}/macOS/vulkaninfo.sh
+               ${CMAKE_CURRENT_SOURCE_DIR}/macOS/Resources/VulkanIcon.icns
+               ${CMAKE_CURRENT_SOURCE_DIR}/macOS/vulkaninfo/metal_view.mm
+               ${CMAKE_CURRENT_SOURCE_DIR}/macOS/vulkaninfo/metal_view.h)
+set_target_properties(vulkaninfo-bundle
+                      PROPERTIES OUTPUT_NAME
+                                 vulkaninfo
+                                 MACOSX_BUNDLE_INFO_PLIST
+                                 ${CMAKE_CURRENT_SOURCE_DIR}/macOS/Info.plist)
+# We do this so vulkaninfo is linked to an individual library and NOT a framework.
+target_link_libraries(vulkaninfo-bundle ${Vulkan_LIBRARY} "-framework AppKit -framework QuartzCore")
+target_include_directories(vulkaninfo-bundle PRIVATE ${CMAKE_CURRENT_SOURCE_DIR} ${CMAKE_CURRENT_SOURCE_DIR}/macOS/vulkaninfo ${CMAKE_CURRENT_SOURCE_DIR}/generated ${CMAKE_BINARY_DIR}/vulkaninfo ${VulkanHeaders_INCLUDE_DIR})
+add_dependencies(vulkaninfo-bundle MoltenVK_icd-staging-json)
+
+set_source_files_properties(${CMAKE_CURRENT_SOURCE_DIR}/macOS/vulkaninfo.sh PROPERTIES MACOSX_PACKAGE_LOCATION "MacOS")
+set_source_files_properties(${CMAKE_CURRENT_SOURCE_DIR}/macOS/Resources/VulkanIcon.icns
+                            PROPERTIES
+                            MACOSX_PACKAGE_LOCATION
+                            "Resources")
+set_source_files_properties(${CMAKE_BINARY_DIR}/staging-json/MoltenVK_icd.json
+                            PROPERTIES
+                            MACOSX_PACKAGE_LOCATION
+                            "Resources/vulkan/icd.d")
+
+# Xcode projects need some extra help with what would be install steps.
+if(${CMAKE_GENERATOR} MATCHES "^Xcode.*")
+    add_custom_command(TARGET vulkaninfo-bundle POST_BUILD
+                       COMMAND ${CMAKE_COMMAND} -E copy "${MOLTENVK_DIR}/MoltenVK/macOS/dynamic/libMoltenVK.dylib"
+                               ${CMAKE_CURRENT_BINARY_DIR}/$<CONFIG>/vulkaninfo.app/Contents/Frameworks/libMoltenVK.dylib
+                       DEPENDS vulkan)
+else()
+    add_custom_command(TARGET vulkaninfo-bundle POST_BUILD
+                       COMMAND ${CMAKE_COMMAND} -E copy "${MOLTENVK_DIR}/MoltenVK/macOS/dynamic/libMoltenVK.dylib"
+                               ${CMAKE_CURRENT_BINARY_DIR}/vulkaninfo.app/Contents/Frameworks/libMoltenVK.dylib
+                       DEPENDS vulkan)
+endif()
+
+# Keep RPATH so fixup_bundle can use it to find libraries
+set_target_properties(vulkaninfo-bundle PROPERTIES INSTALL_RPATH_USE_LINK_PATH TRUE)
+install(TARGETS vulkaninfo-bundle BUNDLE DESTINATION "vulkaninfo")
+# Fix up the library search path in the executable to find (loader) libraries in the bundle. When fixup_bundle() is passed a bundle
+# in the first argument, it looks at the Info.plist file to determine the BundleExecutable. In this case, the executable is a
+# script, which can't be fixed up. Instead pass it the explicit name of the executable.
+install(CODE "
+    include(BundleUtilities)
+    fixup_bundle(\${CMAKE_INSTALL_PREFIX}/vulkaninfo/vulkaninfo.app/Contents/MacOS/vulkaninfo \"\" \"${Vulkan_LIBRARY_DIR}\")
+    ")
diff --git a/src/third_party/vulkan-tools/src/vulkaninfo/macOS/vulkaninfo.sh b/src/third_party/vulkan-tools/src/vulkaninfo/macOS/vulkaninfo.sh
new file mode 100755
index 0000000..5769497
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/vulkaninfo/macOS/vulkaninfo.sh
@@ -0,0 +1,9 @@
+#!/bin/bash
+BASEDIR=`dirname $0`
+
+if [ -d /System/Applications/Utilities/Terminal.app ]
+then
+    open /System/Applications/Utilities/Terminal.app $BASEDIR/vulkaninfo
+else
+    open /Applications/Utilities/Terminal.app $BASEDIR/vulkaninfo
+fi
diff --git a/src/third_party/vulkan-tools/src/vulkaninfo/macOS/vulkaninfo/metal_view.h b/src/third_party/vulkan-tools/src/vulkaninfo/macOS/vulkaninfo/metal_view.h
new file mode 100644
index 0000000..31429d4
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/vulkaninfo/macOS/vulkaninfo/metal_view.h
@@ -0,0 +1,30 @@
+/*
+ * Copyright (c) 2018 The Khronos Group Inc.
+ * Copyright (c) 2018 Valve Corporation
+ * Copyright (c) 2018 LunarG, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Jeremy Kniager <jeremyk@lunarg.com>
+ */
+
+#ifndef metal_view_h
+#define metal_view_h
+
+void* CreateMetalView(uint32_t width, uint32_t height);
+
+void DestroyMetalView(void* view);
+
+void* GetCAMetalLayerFromMetalView(void* view);
+
+#endif /* metal_view_h */
diff --git a/src/third_party/vulkan-tools/src/vulkaninfo/macOS/vulkaninfo/metal_view.mm b/src/third_party/vulkan-tools/src/vulkaninfo/macOS/vulkaninfo/metal_view.mm
new file mode 100644
index 0000000..90bff4c
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/vulkaninfo/macOS/vulkaninfo/metal_view.mm
@@ -0,0 +1,53 @@
+/*
+ * Copyright (c) 2018 The Khronos Group Inc.
+ * Copyright (c) 2018 Valve Corporation
+ * Copyright (c) 2018 LunarG, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Jeremy Kniager <jeremyk@lunarg.com>
+ */
+
+#import <AppKit/AppKit.h>
+#import <QuartzCore/QuartzCore.h>
+
+@interface NativeMetalView : NSView
+@end
+
+@implementation NativeMetalView
+- (id)initWithFrame:(NSRect)frame {
+    if (self = [super initWithFrame:frame]) {
+        self.wantsLayer = YES;
+    }
+    return self;
+}
+
++ (Class)layerClass {
+    return [CAMetalLayer class];
+}
+
+- (CALayer*)makeBackingLayer {
+    CALayer* layer = [self.class.layerClass layer];
+    CGSize viewScale = [self convertSizeToBacking:CGSizeMake(1.0, 1.0)];
+    layer.contentsScale = MIN(viewScale.width, viewScale.height);
+    return layer;
+}
+@end
+
+void* CreateMetalView(uint32_t width, uint32_t height) {
+    return [[NativeMetalView alloc] initWithFrame:NSMakeRect(0, 0, width, height)];
+}
+
+void DestroyMetalView(void* view) { [(NativeMetalView*)view dealloc]; }
+
+void* GetCAMetalLayerFromMetalView(void* view) { return ((NativeMetalView*)view).layer; }
diff --git a/src/third_party/vulkan-tools/src/vulkaninfo/outputprinter.h b/src/third_party/vulkan-tools/src/vulkaninfo/outputprinter.h
new file mode 100644
index 0000000..53cf751
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/vulkaninfo/outputprinter.h
@@ -0,0 +1,561 @@
+/*
+ * Copyright (c) 2019 The Khronos Group Inc.
+ * Copyright (c) 2019 Valve Corporation
+ * Copyright (c) 2019 LunarG, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Charles Giessen <charles@lunarg.com>
+ *
+ */
+
+#pragma once
+
+#include <iomanip>
+#include <iostream>
+#include <ostream>
+#include <stack>
+#include <sstream>
+#include <string>
+
+#include <assert.h>
+
+std::string insert_quotes(std::string s) { return "\"" + s + "\""; }
+
+std::string to_string_16(uint8_t uid[16]) {
+    std::stringstream stream;
+    stream << std::setw(2) << std::hex;
+    stream << (int)uid[0] << (int)uid[1] << (int)uid[2] << (int)uid[3] << "-";
+    stream << (int)uid[4] << (int)uid[5] << "-";
+    stream << (int)uid[6] << (int)uid[7] << "-";
+    stream << (int)uid[8] << (int)uid[9] << "-";
+    stream << (int)uid[10] << (int)uid[11] << (int)uid[12] << (int)uid[13] << (int)uid[14] << (int)uid[15];
+
+    return stream.str();
+}
+
+std::string to_string_8(uint8_t uid[8]) {
+    std::stringstream stream;
+    stream << std::setw(2) << std::hex;
+    stream << (int)uid[0] << (int)uid[1] << (int)uid[2] << (int)uid[3] << "-";
+    stream << (int)uid[4] << (int)uid[5] << (int)uid[6] << (int)uid[7];
+
+    return stream.str();
+}
+
+std::string VkVersionString(uint32_t version) {
+    uint32_t major = version >> 22;
+    uint32_t minor = (version >> 12) & 0x3ff;
+    uint32_t patch = version & 0xfff;
+    return std::to_string(major) + "." + std::to_string(minor) + "." + std::to_string(patch);
+}
+
+std::string VkVersionString(VulkanVersion v) {
+    return std::to_string(v.major) + "." + std::to_string(v.minor) + "." + std::to_string(v.patch);
+}
+
+enum class OutputType { text, html, json };
+
+class Printer {
+   public:
+    Printer(OutputType output_type, std::ostream &out, const uint32_t selected_gpu, const VulkanVersion vulkan_version)
+        : output_type(output_type), out(out) {
+        switch (output_type) {
+            case (OutputType::text):
+                out << "==========\n";
+                out << "VULKANINFO\n";
+                out << "==========\n\n";
+                out << "Vulkan Instance Version: " << VkVersionString(vulkan_version) << "\n\n\n";
+
+                break;
+            case (OutputType::html):
+                out << "<!doctype html>\n";
+                out << "<html lang='en'>\n";
+                out << "\t<head>\n";
+                out << "\t\t<title>vulkaninfo</title>\n";
+                out << "\t\t<style>\n";
+                out << "\t\thtml {\n";
+                out << "\t\t\tbackground-color: #0b1e48;\n";
+                out << "\t\t\tbackground-image: url(\"https://vulkan.lunarg.com/img/bg-starfield.jpg\");\n";
+                out << "\t\t\tbackground-position: center;\n";
+                out << "\t\t\t-webkit-background-size: cover;\n";
+                out << "\t\t\t-moz-background-size: cover;\n";
+                out << "\t\t\t-o-background-size: cover;\n";
+                out << "\t\t\tbackground-size: cover;\n";
+                out << "\t\t\tbackground-attachment: fixed;\n";
+                out << "\t\t\tbackground-repeat: no-repeat;\n";
+                out << "\t\t\theight: 100%;\n";
+                out << "\t\t}\n";
+                out << "\t\t#header {\n";
+                out << "\t\t\tz-index: -1;\n";
+                out << "\t\t}\n";
+                out << "\t\t#header>img {\n";
+                out << "\t\t\tposition: absolute;\n";
+                out << "\t\t\twidth: 160px;\n";
+                out << "\t\t\tmargin-left: -280px;\n";
+                out << "\t\t\ttop: -10px;\n";
+                out << "\t\t\tleft: 50%;\n";
+                out << "\t\t}\n";
+                out << "\t\t#header>h1 {\n";
+                out << "\t\t\tfont-family: Arial, \"Helvetica Neue\", Helvetica, sans-serif;\n";
+                out << "\t\t\tfont-size: 44px;\n";
+                out << "\t\t\tfont-weight: 200;\n";
+                out << "\t\t\ttext-shadow: 4px 4px 5px #000;\n";
+                out << "\t\t\tcolor: #eee;\n";
+                out << "\t\t\tposition: absolute;\n";
+                out << "\t\t\twidth: 400px;\n";
+                out << "\t\t\tmargin-left: -80px;\n";
+                out << "\t\t\ttop: 8px;\n";
+                out << "\t\t\tleft: 50%;\n";
+                out << "\t\t}\n";
+                out << "\t\tbody {\n";
+                out << "\t\t\tfont-family: Consolas, monaco, monospace;\n";
+                out << "\t\t\tfont-size: 14px;\n";
+                out << "\t\t\tline-height: 20px;\n";
+                out << "\t\t\tcolor: #eee;\n";
+                out << "\t\t\theight: 100%;\n";
+                out << "\t\t\tmargin: 0;\n";
+                out << "\t\t\toverflow: hidden;\n";
+                out << "\t\t}\n";
+                out << "\t\t#wrapper {\n";
+                out << "\t\t\tbackground-color: rgba(0, 0, 0, 0.7);\n";
+                out << "\t\t\tborder: 1px solid #446;\n";
+                out << "\t\t\tbox-shadow: 0px 0px 10px #000;\n";
+                out << "\t\t\tpadding: 8px 12px;\n\n";
+                out << "\t\t\tdisplay: inline-block;\n";
+                out << "\t\t\tposition: absolute;\n";
+                out << "\t\t\ttop: 80px;\n";
+                out << "\t\t\tbottom: 25px;\n";
+                out << "\t\t\tleft: 50px;\n";
+                out << "\t\t\tright: 50px;\n";
+                out << "\t\t\toverflow: auto;\n";
+                out << "\t\t}\n";
+                out << "\t\tdetails>details {\n";
+                out << "\t\t\tmargin-left: 22px;\n";
+                out << "\t\t}\n";
+                out << "\t\tdetails>summary:only-child::-webkit-details-marker {\n";
+                out << "\t\t\tdisplay: none;\n";
+                out << "\t\t}\n";
+                out << "\t\t.var, .type, .val {\n";
+                out << "\t\t\tdisplay: inline;\n";
+                out << "\t\t}\n";
+                out << "\t\t.var {\n";
+                out << "\t\t}\n";
+                out << "\t\t.type {\n";
+                out << "\t\t\tcolor: #acf;\n";
+                out << "\t\t\tmargin: 0 12px;\n";
+                out << "\t\t}\n";
+                out << "\t\t.val {\n";
+                out << "\t\t\tcolor: #afa;\n";
+                out << "\t\t\tbackground: #222;\n";
+                out << "\t\t\ttext-align: right;\n";
+                out << "\t\t}\n";
+                out << "\t\t</style>\n";
+                out << "\t</head>\n";
+                out << "\t<body>\n";
+                out << "\t\t<div id='header'>\n";
+                out << "\t\t\t<h1>vulkaninfo</h1>\n";
+                out << "\t\t</div>\n";
+                out << "\t\t<div id='wrapper'>\n";
+
+                out << "\t\t\t<details><summary>Vulkan Instance Version: <span class='val'>" << VkVersionString(vulkan_version)
+                    << "</span></summary></details>\n\t\t\t<br />\n";
+                indents += 3;
+                break;
+            case (OutputType::json):
+                out << "{\n";
+                out << "\t\"$schema\": \"https://schema.khronos.org/vulkan/devsim_1_0_0.json#\",\n";
+                out << "\t\"comments\": {\n";
+                out << "\t\t\"desc\": \"JSON configuration file describing GPU " << selected_gpu
+                    << ". Generated using the vulkaninfo program.\",\n";
+                out << "\t\t\"vulkanApiVersion\": \"" << VkVersionString(vulkan_version) << "\"\n";
+                out << "\t}";
+                indents++;
+                is_first_item.push(false);
+                break;
+            default:
+                break;
+        }
+    }
+    ~Printer() {
+        switch (output_type) {
+            case (OutputType::text):
+
+                break;
+            case (OutputType::html):
+                out << "\t\t</div>\n";
+                out << "\t</body>\n";
+                out << "</html>\n";
+                indents -= 3;
+                break;
+            case (OutputType::json):
+                out << "\n}\n";
+                indents--;
+                is_first_item.pop();
+                assert(is_first_item.empty() && "mismatched number of ObjectStart/ObjectEnd or ArrayStart/ArrayEnd's");
+                break;
+        }
+        assert(indents == 0 && "indents must be zero at program end");
+    };
+
+    Printer(const Printer &) = delete;
+    const Printer &operator=(const Printer &) = delete;
+
+    OutputType Type() { return output_type; }
+
+    // Custom Formatting
+    // use by prepending with p.SetXXX().ObjectStart/ArrayStart
+
+    Printer &SetHeader() {
+        set_next_header = true;
+        return *this;
+    }
+
+    Printer &SetSubHeader() {
+        set_next_subheader = true;
+        return *this;
+    }
+
+    Printer &SetOpenDetails() {
+        set_details_open = true;
+        return *this;
+    }
+
+    Printer &SetTitleAsType() {
+        set_object_name_as_type = true;
+        return *this;
+    }
+
+    Printer &SetAsType() {
+        set_as_type = true;
+        return *this;
+    }
+
+    Printer &SetElementIndex(int index) {
+        assert(index >= 0 && "cannot set element index to a negative value");
+        element_index = index;
+        return *this;
+    }
+
+    void ObjectStart(std::string object_name) {
+        switch (output_type) {
+            case (OutputType::text): {
+                out << std::string(static_cast<size_t>(indents), '\t') << object_name;
+                if (element_index != -1) {
+                    out << "[" << element_index << "]";
+                }
+                out << ":\n";
+                size_t headersize = object_name.size() + 1;
+                if (element_index != -1) {
+                    headersize += 1 + std::to_string(element_index).size();
+                    element_index = -1;
+                }
+                PrintHeaderUnderlines(headersize);
+                break;
+            }
+            case (OutputType::html):
+                out << std::string(static_cast<size_t>(indents), '\t');
+                if (set_details_open) {
+                    out << "<details open>";
+                    set_details_open = false;
+                } else {
+                    out << "<details>";
+                }
+                out << "<summary>";
+                if (set_object_name_as_type) {
+                    out << "<span class='type'>" << object_name << "</span>";
+                    set_object_name_as_type = false;
+                } else {
+                    out << object_name;
+                }
+                if (element_index != -1) {
+                    out << "[<span class='val'>" << element_index << "</span>]";
+                    element_index = -1;
+                }
+                out << "</summary>\n";
+                break;
+            case (OutputType::json):
+                if (!is_first_item.top()) {
+                    out << ",\n";
+                } else {
+                    is_first_item.top() = false;
+                }
+                out << std::string(static_cast<size_t>(indents), '\t');
+                // Objects with no name are elements in an array of objects
+                if (object_name == "" || element_index != -1) {
+                    out << "{\n";
+                    element_index = -1;
+                } else {
+                    out << "\"" << object_name << "\": {\n";
+                }
+
+                is_first_item.push(true);
+                break;
+            default:
+                break;
+        }
+        indents++;
+    }
+    void ObjectEnd() {
+        indents--;
+        assert(indents >= 0 && "indents cannot go below zero");
+        switch (output_type) {
+            case (OutputType::text):
+
+                break;
+            case (OutputType::html):
+                out << std::string(static_cast<size_t>(indents), '\t') << "</details>\n";
+                break;
+            case (OutputType::json):
+                out << "\n" << std::string(static_cast<size_t>(indents), '\t') << "}";
+                is_first_item.pop();
+                break;
+            default:
+                break;
+        }
+    }
+    void ArrayStart(std::string array_name, size_t element_count = 0) {
+        switch (output_type) {
+            case (OutputType::text):
+                out << std::string(static_cast<size_t>(indents), '\t') << array_name << ": "
+                    << "count = " << element_count << "\n";
+                PrintHeaderUnderlines(array_name.size() + 1);
+                break;
+            case (OutputType::html):
+                out << std::string(static_cast<size_t>(indents), '\t');
+                if (set_details_open) {
+                    out << "<details open>";
+                    set_details_open = false;
+                } else {
+                    out << "<details>";
+                }
+                out << "<summary>" << array_name << ": count = <span class='val'>" << element_count << "</span></summary>\n";
+                break;
+            case (OutputType::json):
+                if (!is_first_item.top()) {
+                    out << ",\n";
+                } else {
+                    is_first_item.top() = false;
+                }
+                out << std::string(static_cast<size_t>(indents), '\t') << "\"" << array_name << "\": "
+                    << "[\n";
+                is_first_item.push(true);
+                break;
+            default:
+                break;
+        }
+        indents++;
+    }
+    void ArrayEnd() {
+        indents--;
+        assert(indents >= 0 && "indents cannot go below zero");
+        switch (output_type) {
+            case (OutputType::text):
+
+                break;
+            case (OutputType::html):
+                out << std::string(static_cast<size_t>(indents), '\t') << "</details>\n";
+                break;
+            case (OutputType::json):
+                out << "\n" << std::string(static_cast<size_t>(indents), '\t') << "]";
+                is_first_item.pop();
+                break;
+            default:
+                break;
+        }
+    }
+
+    // For printing key-value pairs.
+    // min_key_width lines up the values listed
+    // value_description is for reference information and is displayed inside parenthesis after the value
+    template <typename T>
+    void PrintKeyValue(std::string key, T value, size_t min_key_width = 0, std::string value_description = "") {
+        switch (output_type) {
+            case (OutputType::text):
+                if (min_key_width > key.size()) {
+                    out << std::string(static_cast<size_t>(indents), '\t') << key << std::string(min_key_width - key.size(), ' ');
+                } else {
+                    out << std::string(static_cast<size_t>(indents), '\t') << key;
+                }
+                out << " = " << value;
+                if (value_description != "") {
+                    out << " (" << value_description << ")";
+                }
+                out << "\n";
+                break;
+            case (OutputType::html):
+                out << std::string(static_cast<size_t>(indents), '\t') << "<details><summary>" << key;
+                if (min_key_width > key.size()) {
+                    out << std::string(min_key_width - key.size(), ' ');
+                }
+                if (set_as_type) {
+                    set_as_type = false;
+                    out << " = <span class='type'>" << value << "</span>";
+                } else {
+                    out << " = <span class='val'>" << value << "</span>";
+                }
+                if (value_description != "") {
+                    out << " (<span class='val'>" << value_description << "</span>)";
+                }
+                out << "</summary></details>\n";
+                break;
+            case (OutputType::json):
+                if (!is_first_item.top()) {
+                    out << ",\n";
+                } else {
+                    is_first_item.top() = false;
+                }
+                out << std::string(static_cast<size_t>(indents), '\t') << "\"" << key << "\": " << value;
+            default:
+                break;
+        }
+    }
+
+    // For printing key - string pairs (necessary because of json)
+    void PrintKeyString(std::string key, std::string value, size_t min_key_width = 0, std::string value_description = "") {
+        switch (output_type) {
+            case (OutputType::text):
+            case (OutputType::html):
+                PrintKeyValue(key, value, min_key_width, value_description);
+                break;
+            case (OutputType::json):
+                PrintKeyValue(key, std::string("\"") + value + "\"", min_key_width, value_description);
+                break;
+            default:
+                break;
+        }
+    }
+
+    // For printing key - string pairs (necessary because of json)
+    void PrintKeyBool(std::string key, bool value, size_t min_key_width = 0, std::string value_description = "") {
+        switch (output_type) {
+            case (OutputType::text):
+            case (OutputType::html):
+                PrintKeyValue(key, value ? "true" : "false", min_key_width, value_description);
+                break;
+            case (OutputType::json):
+                PrintKeyValue(key, value, min_key_width, value_description);
+                break;
+            default:
+                break;
+        }
+    }
+
+    // print inside array
+    template <typename T>
+    void PrintElement(T element, std::string value_description = "") {
+        switch (output_type) {
+            case (OutputType::text):
+                out << std::string(static_cast<size_t>(indents), '\t') << element;
+                if (value_description != "") {
+                    out << " (" << value_description << ")";
+                }
+                out << "\n";
+                break;
+            case (OutputType::html):
+                out << std::string(static_cast<size_t>(indents), '\t') << "<details><summary>";
+                if (set_as_type) {
+                    set_as_type = false;
+                    out << "<span class='type'>" << element << "</span>";
+                } else {
+                    out << "<span class='val'>" << element << "</span>";
+                }
+                if (value_description != "") {
+                    out << " (<span class='val'>" << value_description << "</span>)";
+                }
+                out << "</summary></details>\n";
+                break;
+            case (OutputType::json):
+                if (!is_first_item.top()) {
+                    out << ",\n";
+                } else {
+                    is_first_item.top() = false;
+                }
+                out << std::string(static_cast<size_t>(indents), '\t') << element;
+                break;
+            default:
+                break;
+        }
+    }
+    void PrintExtension(std::string ext_name, uint32_t revision, int min_width = 0) {
+        switch (output_type) {
+            case (OutputType::text):
+                out << std::string(static_cast<size_t>(indents), '\t') << ext_name << std::string(min_width - ext_name.size(), ' ')
+                    << " : extension revision " << revision << "\n";
+                break;
+            case (OutputType::html):
+                out << std::string(static_cast<size_t>(indents), '\t') << "<details><summary><span class='type'>" << ext_name
+                    << "</span>" << std::string(min_width - ext_name.size(), ' ') << " : extension revision <span class='val'>"
+                    << revision << "</span></summary></details>\n";
+                break;
+            case (OutputType::json):
+
+                break;
+            default:
+                break;
+        }
+    }
+    void AddNewline() {
+        if (output_type == OutputType::text) {
+            out << "\n";
+        }
+    }
+    void IndentIncrease() {
+        if (output_type == OutputType::text) {
+            indents++;
+        }
+    }
+    void IndentDecrease() {
+        if (output_type == OutputType::text) {
+            indents--;
+            assert(indents >= 0 && "indents cannot go below zero");
+        }
+    }
+
+   protected:
+    OutputType output_type;
+    std::ostream &out;
+    int indents = 0;
+
+    // header, subheader
+    bool set_next_header = false;
+    bool set_next_subheader = false;
+
+    // html coloring
+    bool set_as_type = false;
+
+    // open <details>
+    bool set_details_open = false;
+
+    // make object titles the color of types
+    bool set_object_name_as_type = false;
+
+    // objects which are in an array
+    int element_index = -1;  // negative one is the sentinel value
+
+    // json
+    std::stack<bool> is_first_item;
+
+    // utility
+    void PrintHeaderUnderlines(size_t length) {
+        assert(indents >= 0 && "indents must not be negative");
+        assert(length <= 10000 && "length shouldn't be unreasonably large");
+        if (set_next_header) {
+            out << std::string(static_cast<size_t>(indents), '\t') << std::string(length, '=') << "\n";
+            set_next_header = false;
+        } else if (set_next_subheader) {
+            out << std::string(static_cast<size_t>(indents), '\t') << std::string(length, '-') << "\n";
+            set_next_subheader = false;
+        }
+    }
+};
diff --git a/src/third_party/vulkan-tools/src/vulkaninfo/vulkaninfo.cpp b/src/third_party/vulkan-tools/src/vulkaninfo/vulkaninfo.cpp
new file mode 100644
index 0000000..5bac3ba
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/vulkaninfo/vulkaninfo.cpp
@@ -0,0 +1,817 @@
+/*
+ * Copyright (c) 2015-2019 The Khronos Group Inc.
+ * Copyright (c) 2015-2019 Valve Corporation
+ * Copyright (c) 2015-2019 LunarG, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Courtney Goeltzenleuchter <courtney@LunarG.com>
+ * Author: David Pinedo <david@lunarg.com>
+ * Author: Mark Lobodzinski <mark@lunarg.com>
+ * Author: Rene Lindsay <rene@lunarg.com>
+ * Author: Jeremy Kniager <jeremyk@lunarg.com>
+ * Author: Shannon McPherson <shannon@lunarg.com>
+ * Author: Bob Ellison <bob@lunarg.com>
+ * Author: Charles Giessen <charles@lunarg.com>
+ *
+ */
+
+#include "vulkaninfo.hpp"
+
+#ifdef _WIN32
+// Initialize User32 pointers
+PFN_AdjustWindowRect User32Handles::pfnAdjustWindowRect = nullptr;
+PFN_CreateWindowExA User32Handles::pfnCreateWindowExA = nullptr;
+PFN_DefWindowProcA User32Handles::pfnDefWindowProcA = nullptr;
+PFN_DestroyWindow User32Handles::pfnDestroyWindow = nullptr;
+PFN_LoadIconA User32Handles::pfnLoadIconA = nullptr;
+PFN_RegisterClassExA User32Handles::pfnRegisterClassExA = nullptr;
+
+HMODULE User32Handles::user32DllHandle = nullptr;
+
+#endif
+
+// =========== Dump Functions ========= //
+
+void DumpExtensions(Printer &p, std::string layer_name, std::vector<VkExtensionProperties> extensions) {
+    std::sort(extensions.begin(), extensions.end(), [](VkExtensionProperties &a, VkExtensionProperties &b) -> int {
+        return std::string(a.extensionName) < std::string(b.extensionName);
+    });
+
+    if (p.Type() == OutputType::json) return;
+
+    int max_length = 0;
+    if (extensions.size() > 0) {
+        max_length = static_cast<int>(strlen(extensions.at(0).extensionName));
+        for (auto &ext : extensions) {
+            int len = static_cast<int>(strlen(ext.extensionName));
+            if (len > max_length) max_length = len;
+        }
+    }
+
+    p.ArrayStart(layer_name + " Extensions", extensions.size());
+    for (auto &ext : extensions) {
+        p.PrintExtension(ext.extensionName, ext.specVersion, max_length);
+    }
+    p.ArrayEnd();
+}
+
+void DumpLayers(Printer &p, std::vector<LayerExtensionList> layers, const std::vector<std::unique_ptr<AppGpu>> &gpus) {
+    std::sort(layers.begin(), layers.end(), [](LayerExtensionList &left, LayerExtensionList &right) -> int {
+        const char *a = left.layer_properties.layerName;
+        const char *b = right.layer_properties.layerName;
+        return a && (!b || std::strcmp(a, b) < 0);
+    });
+
+    if (p.Type() == OutputType::text || p.Type() == OutputType::html) {
+        p.SetHeader().ArrayStart("Layers", layers.size());
+        p.IndentDecrease();
+        for (auto &layer : layers) {
+            auto v_str = VkVersionString(layer.layer_properties.specVersion);
+            auto props = layer.layer_properties;
+
+            std::string header;
+            if (p.Type() == OutputType::text)
+                header = std::string(props.layerName) + " (" + props.description + ") Vulkan version " + v_str +
+                         ", layer version " + std::to_string(props.implementationVersion);
+            else if (p.Type() == OutputType::html)
+                header = std::string("<span class='type'>") + props.layerName + "</span> (" + props.description +
+                         ") Vulkan version <span class='val'>" + v_str + "</span>, layer version <span class='val'>" +
+                         std::to_string(props.implementationVersion) + "</span>";
+
+            p.ObjectStart(header);
+            DumpExtensions(p, "Layer", layer.extension_properties);
+
+            p.ArrayStart("Devices", gpus.size());
+            for (auto &gpu : gpus) {
+                p.PrintElement(std::string("GPU id \t: ") + std::to_string(gpu->id), gpu->props.deviceName);
+                auto exts = gpu->AppGetPhysicalDeviceLayerExtensions(props.layerName);
+                DumpExtensions(p, "Layer-Device", exts);
+                p.AddNewline();
+            }
+            p.ArrayEnd();
+            p.ObjectEnd();
+        }
+        p.IndentIncrease();
+        p.ArrayEnd();
+    } else if (p.Type() == OutputType::json) {
+        p.ArrayStart("ArrayOfVkLayerProperties", layers.size());
+        int i = 0;
+        for (auto &layer : layers) {
+            p.SetElementIndex(i++);
+            DumpVkLayerProperties(p, "layerProperty", layer.layer_properties);
+        }
+        p.ArrayEnd();
+    }
+}
+
+void DumpSurfaceFormats(Printer &p, AppInstance &inst, AppSurface &surface) {
+    p.ArrayStart("Formats", surface.surf_formats2.size());
+    int i = 0;
+    if (inst.CheckExtensionEnabled(VK_KHR_GET_SURFACE_CAPABILITIES_2_EXTENSION_NAME)) {
+        for (auto &format : surface.surf_formats2) {
+            p.SetElementIndex(i++);
+            DumpVkSurfaceFormatKHR(p, "SurfaceFormat", format.surfaceFormat);
+        }
+    } else {
+        for (auto &format : surface.surf_formats) {
+            p.SetElementIndex(i++);
+            DumpVkSurfaceFormatKHR(p, "SurfaceFormat", format);
+        }
+    }
+    p.ArrayEnd();
+}
+
+void DumpPresentModes(Printer &p, AppSurface &surface) {
+    p.ArrayStart("Present Modes", surface.surf_present_modes.size());
+    for (auto &mode : surface.surf_present_modes) {
+        p.SetAsType().PrintElement(VkPresentModeKHRString(mode));
+    }
+    p.ArrayEnd();
+}
+
+void DumpSurfaceCapabilities(Printer &p, AppInstance &inst, AppGpu &gpu, AppSurface &surface) {
+    auto &surf_cap = surface.surface_capabilities;
+    p.SetSubHeader();
+    DumpVkSurfaceCapabilitiesKHR(p, "VkSurfaceCapabilitiesKHR", surf_cap);
+
+    p.SetSubHeader().ObjectStart("VkSurfaceCapabilities2EXT");
+    {
+        p.ObjectStart("supportedSurfaceCounters");
+        if (surface.surface_capabilities2_ext.supportedSurfaceCounters == 0) p.PrintElement("None");
+        if (surface.surface_capabilities2_ext.supportedSurfaceCounters & VK_SURFACE_COUNTER_VBLANK_EXT) {
+            p.SetAsType().PrintElement("VK_SURFACE_COUNTER_VBLANK_EXT");
+        }
+        p.ObjectEnd();
+    }
+    p.ObjectEnd();  // VkSurfaceCapabilities2EXT
+
+    chain_iterator_surface_capabilities2(p, inst, gpu, surface.surface_capabilities2_khr.pNext);
+}
+
+void DumpSurface(Printer &p, AppInstance &inst, AppGpu &gpu, AppSurface &surface, std::vector<std::string> surface_types) {
+    std::string header;
+    if (p.Type() == OutputType::text)
+        header = std::string("GPU id : ") + std::to_string(gpu.id) + " (" + gpu.props.deviceName + ")";
+    else if (p.Type() == OutputType::html)
+        header = std::string("GPU id : <span class='val'>") + std::to_string(gpu.id) + "</span> (" + gpu.props.deviceName + ")";
+    p.ObjectStart(header);
+
+    if (surface_types.size() == 0) {
+        p.SetAsType().PrintKeyValue("Surface type", surface.surface_extension.name);
+    } else {
+        p.ArrayStart("Surface types", surface_types.size());
+        for (auto &name : surface_types) {
+            p.PrintElement(name);
+        }
+        p.ArrayEnd();
+    }
+
+    DumpSurfaceFormats(p, inst, surface);
+
+    DumpPresentModes(p, surface);
+
+    DumpSurfaceCapabilities(p, inst, gpu, surface);
+
+    p.ObjectEnd();
+    p.AddNewline();
+}
+
+struct SurfaceTypeGroup {
+    AppSurface *surface;
+    std::vector<std::string> surface_types;
+    AppGpu *gpu;
+};
+
+bool operator==(AppSurface const &a, AppSurface const &b) {
+    return a.surf_present_modes == b.surf_present_modes && a.surf_formats == b.surf_formats && a.surf_formats2 == b.surf_formats2 &&
+           a.surface_capabilities == b.surface_capabilities && a.surface_capabilities2_khr == b.surface_capabilities2_khr &&
+           a.surface_capabilities2_ext == b.surface_capabilities2_ext;
+}
+
+void DumpPresentableSurfaces(Printer &p, AppInstance &inst, const std::vector<std::unique_ptr<AppGpu>> &gpus,
+                             const std::vector<std::unique_ptr<AppSurface>> &surfaces) {
+    p.SetHeader().ObjectStart("Presentable Surfaces");
+    p.IndentDecrease();
+    std::vector<SurfaceTypeGroup> surface_list;
+
+    for (auto &surface : surfaces) {
+        for (auto &gpu : gpus) {
+            auto exists = surface_list.end();
+            for (auto it = surface_list.begin(); it != surface_list.end(); it++) {
+                // use custom comparator to check if the surface has the same values
+                if (it->gpu == gpu.get() && *it->surface == *surface.get()) {
+                    exists = it;
+                }
+            }
+            if (exists != surface_list.end()) {
+                exists->surface_types.push_back(surface.get()->surface_extension.name);
+            } else {
+                surface_list.push_back({surface.get(), {surface.get()->surface_extension.name}, gpu.get()});
+            }
+        }
+    }
+    for (auto &group : surface_list) {
+        DumpSurface(p, inst, *group.gpu, *group.surface, group.surface_types);
+    }
+    p.IndentIncrease();
+    p.ObjectEnd();
+    p.AddNewline();
+}
+
+void DumpGroups(Printer &p, AppInstance &inst) {
+    if (inst.CheckExtensionEnabled(VK_KHR_DEVICE_GROUP_CREATION_EXTENSION_NAME)) {
+        p.SetHeader().ObjectStart("Groups");
+        auto groups = GetGroups(inst);
+        int group_id = 0;
+        for (auto &group : groups) {
+            p.ObjectStart("Device Group Properties (Group " + std::to_string(group_id) + ")");
+            auto group_props = GetGroupProps(group);
+            p.ArrayStart("physicalDeviceCount", group.physicalDeviceCount);
+            int id = 0;
+            for (auto &prop : group_props) {
+                std::string device_out = prop.deviceName;
+                if (p.Type() == OutputType::text) {
+                    device_out += " (ID: " + std::to_string(id++) + ")";
+                } else if (p.Type() == OutputType::html) {
+                    device_out += " (ID: <span class='val'>" + std::to_string(id++) + "</span>)";
+                }
+                p.PrintElement(device_out);
+            }
+            p.ArrayEnd();
+            p.PrintKeyValue("subsetAllocation", group.subsetAllocation);
+            p.ObjectEnd();
+            p.AddNewline();
+
+            p.ObjectStart("Device Group Present Capabilities (Group " + std::to_string(group_id) + ")");
+            auto group_capabilities = GetGroupCapabilities(inst, group);
+            if (group_capabilities.first == false) {
+                p.PrintElement("Group does not support VK_KHR_device_group, skipping printing capabilities");
+            } else {
+                for (uint32_t i = 0; i < group.physicalDeviceCount; i++) {
+                    std::string device_out;
+                    if (p.Type() == OutputType::text) {
+                        device_out = std::string(group_props[i].deviceName) + " (ID: " + std::to_string(i) + ")";
+                    } else if (p.Type() == OutputType::html) {
+                        device_out =
+                            std::string(group_props[i].deviceName) + " (ID: <span class='val'>" + std::to_string(i) + "</span>)";
+                    }
+                    p.PrintElement(device_out);
+                    p.ObjectStart("Can present images from the following devices");
+                    for (uint32_t j = 0; j < group.physicalDeviceCount; j++) {
+                        uint32_t mask = 1U << j;
+                        if (group_capabilities.second.presentMask[i] & mask) {
+                            if (p.Type() == OutputType::text)
+                                p.PrintElement(std::string(group_props[j].deviceName) + " (ID: " + std::to_string(j) + ")");
+                            if (p.Type() == OutputType::html)
+                                p.PrintElement(std::string(group_props[j].deviceName) + " (ID: <span class='val'>" +
+                                               std::to_string(j) + "</span>)");
+                        }
+                    }
+                    p.ObjectEnd();
+                }
+                DumpVkDeviceGroupPresentModeFlagsKHR(p, "Present modes", group_capabilities.second.modes);
+            }
+            p.ObjectEnd();
+            p.AddNewline();
+            group_id++;
+        }
+        p.ObjectEnd();
+        p.AddNewline();
+    }
+}
+
+void GpuDumpProps(Printer &p, AppGpu &gpu) {
+    auto props = gpu.GetDeviceProperties();
+    p.SetSubHeader().ObjectStart("VkPhysicalDeviceProperties");
+    p.PrintKeyValue("apiVersion", props.apiVersion, 14, VkVersionString(props.apiVersion));
+    p.PrintKeyValue("driverVersion", props.driverVersion, 14, to_hex_str(props.driverVersion));
+    if (p.Type() == OutputType::json) {
+        p.PrintKeyValue("vendorID", props.vendorID, 14);
+        p.PrintKeyValue("deviceID", props.deviceID, 14);
+        p.PrintKeyValue("deviceType", props.deviceType, 14);
+    } else {
+        p.PrintKeyValue("vendorID", to_hex_str(props.vendorID), 14);
+        p.PrintKeyValue("deviceID", to_hex_str(props.deviceID), 14);
+        p.PrintKeyString("deviceType", VkPhysicalDeviceTypeString(props.deviceType), 14);
+    }
+    p.PrintKeyString("deviceName", props.deviceName, 14);
+    if (p.Type() == OutputType::json) {
+        p.ArrayStart("pipelineCacheUUID");
+        for (uint32_t i = 0; i < VK_UUID_SIZE; ++i) {
+            p.PrintElement(static_cast<uint32_t>(props.pipelineCacheUUID[i]));
+        }
+        p.ArrayEnd();
+    }
+    p.AddNewline();
+    if (p.Type() != OutputType::json) {
+        p.ObjectEnd();  // limits and sparse props are not sub objects in the text and html output
+    }
+
+    if (gpu.inst.CheckExtensionEnabled(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+        DumpVkPhysicalDeviceLimits(p, "VkPhysicalDeviceLimits", gpu.props2.properties.limits);
+    } else {
+        DumpVkPhysicalDeviceLimits(p, "VkPhysicalDeviceLimits", gpu.props.limits);
+    }
+    p.AddNewline();
+    if (gpu.inst.CheckExtensionEnabled(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+        DumpVkPhysicalDeviceSparseProperties(p, "VkPhysicalDeviceSparseProperties", gpu.props2.properties.sparseProperties);
+    } else {
+        DumpVkPhysicalDeviceSparseProperties(p, "VkPhysicalDeviceSparseProperties", gpu.props.sparseProperties);
+    }
+    p.AddNewline();
+    if (p.Type() == OutputType::json) {
+        p.ObjectEnd();  // limits and sparse props are sub objects in the json output
+    }
+
+    if (p.Type() != OutputType::json) {
+        if (gpu.inst.CheckExtensionEnabled(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+            void *place = gpu.props2.pNext;
+            chain_iterator_phys_device_props2(p, gpu, place);
+        }
+    }
+    p.AddNewline();
+}
+void GpuDumpQueueProps(Printer &p, std::vector<SurfaceExtension> &surfaces, AppQueueFamilyProperties &queue) {
+    p.SetHeader().SetElementIndex(static_cast<int>(queue.queue_index)).ObjectStart("VkQueueFamilyProperties");
+    if (p.Type() == OutputType::json) {
+        DumpVkExtent3D(p, "minImageTransferGranularity", queue.props.minImageTransferGranularity);
+    } else {
+        p.PrintKeyString("minImageTransferGranularity", VkExtent3DString(queue.props.minImageTransferGranularity), 27);
+    }
+    p.PrintKeyValue("queueCount", queue.props.queueCount, 27);
+    if (p.Type() == OutputType::json) {
+        p.PrintKeyValue("queueFlags", queue.props.queueFlags, 27);
+    } else {
+        p.PrintKeyValue("queueFlags", VkQueueFlagsString(queue.props.queueFlags), 27);
+    }
+
+    p.PrintKeyValue("timestampValidBits", queue.props.timestampValidBits, 27);
+
+    if (p.Type() != OutputType::json) {
+        if (queue.is_present_platform_agnostic) {
+            p.PrintKeyString("present support", queue.platforms_support_present ? "true" : "false");
+        } else {
+            p.ObjectStart("present support");
+            for (auto &surface : surfaces) {
+                p.PrintKeyString(surface.name, surface.supports_present ? "true" : "false", 19);
+            }
+            p.ObjectEnd();
+        }
+    }
+    p.ObjectEnd();
+    p.AddNewline();
+}
+
+// This prints a number of bytes in a human-readable format according to prefixes of the International System of Quantities (ISQ),
+// defined in ISO/IEC 80000. The prefixes used here are not SI prefixes, but rather the binary prefixes based on powers of 1024
+// (kibi-, mebi-, gibi- etc.).
+#define kBufferSize 32
+
+std::string NumToNiceStr(const size_t sz) {
+    const char prefixes[] = "KMGTPEZY";
+    char buf[kBufferSize];
+    int which = -1;
+    double result = (double)sz;
+    while (result > 1024 && which < 7) {
+        result /= 1024;
+        ++which;
+    }
+
+    char unit[] = "\0i";
+    if (which >= 0) {
+        unit[0] = prefixes[which];
+    }
+#ifdef _WIN32
+    _snprintf_s(buf, kBufferSize * sizeof(char), kBufferSize, "%.2f %sB", result, unit);
+#else
+    snprintf(buf, kBufferSize, "%.2f %sB", result, unit);
+#endif
+    return std::string(buf);
+}
+
+void GpuDumpMemoryProps(Printer &p, AppGpu &gpu) {
+    p.SetHeader().ObjectStart("VkPhysicalDeviceMemoryProperties");
+    p.IndentDecrease();
+    p.ArrayStart("memoryHeaps", gpu.memory_props.memoryHeapCount);
+    for (uint32_t i = 0; i < gpu.memory_props.memoryHeapCount; ++i) {
+        const VkDeviceSize memSize = gpu.memory_props.memoryHeaps[i].size;
+        std::string mem_size_human_readable = NumToNiceStr(static_cast<size_t>(memSize));
+
+        std::string mem_size_str = std::to_string(memSize) + " (" + to_hex_str(memSize) + ") (" + mem_size_human_readable + ")";
+
+        p.SetElementIndex(static_cast<int>(i)).ObjectStart("memoryHeaps");
+        if (p.Type() != OutputType::json) {
+            p.PrintKeyValue("size", mem_size_str, 6);
+            p.PrintKeyValue("budget", gpu.heapBudget[i], 6);
+            p.PrintKeyValue("usage", gpu.heapUsage[i], 6);
+            DumpVkMemoryHeapFlags(p, "flags", gpu.memory_props.memoryHeaps[i].flags, 6);
+        } else {
+            p.PrintKeyValue("flags", gpu.memory_props.memoryHeaps[i].flags);
+            p.PrintKeyValue("size", memSize);
+        }
+        p.ObjectEnd();
+    }
+    p.ArrayEnd();
+
+    p.ArrayStart("memoryTypes", gpu.memory_props.memoryTypeCount);
+    for (uint32_t i = 0; i < gpu.memory_props.memoryTypeCount; ++i) {
+        p.SetElementIndex(static_cast<int>(i)).ObjectStart("memoryTypes");
+        p.PrintKeyValue("heapIndex", gpu.memory_props.memoryTypes[i].heapIndex, 13);
+        if (p.Type() == OutputType::json) {
+            p.PrintKeyValue("propertyFlags", gpu.memory_props.memoryTypes[i].propertyFlags, 13);
+        } else {
+            auto flags = gpu.memory_props.memoryTypes[i].propertyFlags;
+            DumpVkMemoryPropertyFlags(p, "propertyFlags = " + to_hex_str(flags), flags);
+
+            p.ObjectStart("usable for");
+            const uint32_t memtype_bit = 1U << i;
+
+            // only linear and optimal tiling considered
+            for (uint32_t tiling = VK_IMAGE_TILING_OPTIMAL; tiling < gpu.mem_type_res_support.image.size(); ++tiling) {
+                std::string usable;
+                usable += std::string(VkImageTilingString(VkImageTiling(tiling))) + ": ";
+                size_t orig_usable_str_size = usable.size();
+                bool first = true;
+                for (size_t fmt_i = 0; fmt_i < gpu.mem_type_res_support.image[tiling].size(); ++fmt_i) {
+                    const MemImageSupport *image_support = &gpu.mem_type_res_support.image[tiling][fmt_i];
+                    const bool regular_compatible =
+                        image_support->regular_supported && (image_support->regular_memtypes & memtype_bit);
+                    const bool sparse_compatible =
+                        image_support->sparse_supported && (image_support->sparse_memtypes & memtype_bit);
+                    const bool transient_compatible =
+                        image_support->transient_supported && (image_support->transient_memtypes & memtype_bit);
+
+                    if (regular_compatible || sparse_compatible || transient_compatible) {
+                        if (!first) usable += ", ";
+                        first = false;
+
+                        if (fmt_i == 0) {
+                            usable += "color images";
+                        } else {
+                            usable += VkFormatString(gpu.mem_type_res_support.image[tiling][fmt_i].format);
+                        }
+
+                        if (regular_compatible && !sparse_compatible && !transient_compatible && image_support->sparse_supported &&
+                            image_support->transient_supported) {
+                            usable += "(non-sparse, non-transient)";
+                        } else if (regular_compatible && !sparse_compatible && image_support->sparse_supported) {
+                            if (image_support->sparse_supported) usable += "(non-sparse)";
+                        } else if (regular_compatible && !transient_compatible && image_support->transient_supported) {
+                            if (image_support->transient_supported) usable += "(non-transient)";
+                        } else if (!regular_compatible && sparse_compatible && !transient_compatible &&
+                                   image_support->sparse_supported) {
+                            if (image_support->sparse_supported) usable += "(sparse only)";
+                        } else if (!regular_compatible && !sparse_compatible && transient_compatible &&
+                                   image_support->transient_supported) {
+                            if (image_support->transient_supported) usable += "(transient only)";
+                        } else if (!regular_compatible && sparse_compatible && transient_compatible &&
+                                   image_support->sparse_supported && image_support->transient_supported) {
+                            usable += "(sparse and transient only)";
+                        }
+                    }
+                }
+                if (usable.size() == orig_usable_str_size)  // not usable for anything
+                {
+                    usable += "None";
+                }
+                p.PrintElement(usable);
+            }
+            p.ObjectEnd();
+        }
+
+        p.ObjectEnd();
+    }
+    p.ArrayEnd();
+    p.IndentIncrease();
+    p.ObjectEnd();
+    p.AddNewline();
+}
+
+void GpuDumpFeatures(Printer &p, AppGpu &gpu) {
+    p.SetHeader();
+    DumpVkPhysicalDeviceFeatures(p, "VkPhysicalDeviceFeatures", gpu.features);
+    p.AddNewline();
+    if (p.Type() != OutputType::json) {
+        if (gpu.inst.CheckExtensionEnabled(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+            void *place = gpu.features2.pNext;
+            chain_iterator_phys_device_features2(p, gpu, place);
+        }
+    }
+}
+
+void GpuDumpFormatProperty(Printer &p, VkFormat fmt, VkFormatProperties prop) {
+    if (p.Type() == OutputType::text) {
+        p.ObjectStart("Properties");
+    } else if (p.Type() == OutputType::html) {
+        p.SetTitleAsType().ObjectStart(VkFormatString(fmt));
+    } else if (p.Type() == OutputType::json) {
+        p.ObjectStart("");
+    }
+    if (p.Type() == OutputType::html || p.Type() == OutputType::text) {
+        p.SetOpenDetails();
+        DumpVkFormatFeatureFlags(p, "linearTiling", prop.linearTilingFeatures);
+        p.SetOpenDetails();
+        DumpVkFormatFeatureFlags(p, "optimalTiling", prop.optimalTilingFeatures);
+        p.SetOpenDetails();
+        DumpVkFormatFeatureFlags(p, "bufferFeatures", prop.bufferFeatures);
+    } else if (p.Type() == OutputType::json) {
+        p.PrintKeyValue("formatID", fmt);
+        p.PrintKeyValue("linearTilingFeatures", prop.linearTilingFeatures);
+        p.PrintKeyValue("optimalTilingFeatures", prop.optimalTilingFeatures);
+        p.PrintKeyValue("bufferFeatures", prop.bufferFeatures);
+    }
+    p.ObjectEnd();
+}
+
+void GpuDevDump(Printer &p, AppGpu &gpu) {
+    if (p.Type() == OutputType::json) {
+        p.ArrayStart("ArrayOfVkFormatProperties");
+    } else {
+        p.SetHeader().ObjectStart("Format Properties");
+        p.IndentDecrease();
+    }
+
+    if (p.Type() == OutputType::text) {
+        auto fmtPropMap = FormatPropMap(gpu);
+
+        int counter = 0;
+        std::vector<VkFormat> unsupported_formats;
+        for (auto &prop : fmtPropMap) {
+            VkFormatProperties props;
+            props.linearTilingFeatures = prop.first.linear;
+            props.optimalTilingFeatures = prop.first.optimal;
+            props.bufferFeatures = prop.first.buffer;
+            if (props.linearTilingFeatures == 0 && props.optimalTilingFeatures == 0 && props.bufferFeatures == 0) {
+                unsupported_formats = prop.second;
+                continue;
+            }
+
+            p.SetElementIndex(counter++).ObjectStart("Common Format Group");
+            p.IndentDecrease();
+            p.ObjectStart("Formats");
+            for (auto &fmt : prop.second) {
+                p.SetAsType().PrintElement(VkFormatString(fmt));
+            }
+            p.ObjectEnd();
+
+            GpuDumpFormatProperty(p, VK_FORMAT_UNDEFINED, props);
+
+            p.IndentIncrease();
+            p.ObjectEnd();
+            p.AddNewline();
+        }
+
+        p.ObjectStart("Unsupported Formats");
+        for (auto &fmt : unsupported_formats) {
+            p.SetAsType().PrintElement(VkFormatString(fmt));
+        }
+        p.ObjectEnd();
+
+    } else {
+        for (auto &format : gpu.supported_format_ranges) {
+            if (gpu.FormatRangeSupported(format)) {
+                for (int32_t fmt_counter = format.first_format; fmt_counter <= format.last_format; ++fmt_counter) {
+                    VkFormat fmt = static_cast<VkFormat>(fmt_counter);
+
+                    VkFormatProperties props;
+                    vkGetPhysicalDeviceFormatProperties(gpu.phys_device, fmt, &props);
+
+                    // if json, don't print format properties that are unsupported
+                    if (p.Type() == OutputType::json &&
+                        (props.linearTilingFeatures || props.optimalTilingFeatures || props.bufferFeatures) == 0)
+                        continue;
+
+                    GpuDumpFormatProperty(p, fmt, props);
+                }
+            }
+        }
+    }
+
+    if (p.Type() == OutputType::json) {
+        p.ArrayEnd();
+    } else {
+        p.IndentIncrease();
+        p.ObjectEnd();
+    }
+
+    p.AddNewline();
+}
+
+void DumpGpu(Printer &p, AppGpu &gpu, bool show_formats) {
+    if (p.Type() != OutputType::json) {
+        p.ObjectStart("GPU" + std::to_string(gpu.id));
+        p.IndentDecrease();
+    }
+    GpuDumpProps(p, gpu);
+
+    if (p.Type() != OutputType::json) {
+        DumpExtensions(p, "Device", gpu.device_extensions);
+        p.AddNewline();
+    }
+
+    if (p.Type() == OutputType::json) p.ArrayStart("ArrayOfVkQueueFamilyProperties");
+    for (uint32_t i = 0; i < gpu.queue_count; i++) {
+        AppQueueFamilyProperties queue_props = AppQueueFamilyProperties(gpu, i);
+        GpuDumpQueueProps(p, gpu.inst.surface_extensions, queue_props);
+    }
+    if (p.Type() == OutputType::json) p.ArrayEnd();
+
+    GpuDumpMemoryProps(p, gpu);
+    GpuDumpFeatures(p, gpu);
+    if (p.Type() != OutputType::text || show_formats) {
+        GpuDevDump(p, gpu);
+    }
+
+    if (p.Type() != OutputType::json) {
+        p.IndentIncrease();
+        p.ObjectEnd();
+    }
+    p.AddNewline();
+}
+
+// ============ Printing Logic ============= //
+
+#ifdef _WIN32
+// Enlarges the console window to have a large scrollback size.
+static void ConsoleEnlarge() {
+    const HANDLE console_handle = GetStdHandle(STD_OUTPUT_HANDLE);
+
+    // make the console window bigger
+    CONSOLE_SCREEN_BUFFER_INFO csbi;
+    COORD buffer_size;
+    if (GetConsoleScreenBufferInfo(console_handle, &csbi)) {
+        buffer_size.X = csbi.dwSize.X + 30;
+        buffer_size.Y = 20000;
+        SetConsoleScreenBufferSize(console_handle, buffer_size);
+    }
+
+    SMALL_RECT r;
+    r.Left = r.Top = 0;
+    r.Right = csbi.dwSize.X - 1 + 30;
+    r.Bottom = 50;
+    SetConsoleWindowInfo(console_handle, true, &r);
+
+    // change the console window title
+    SetConsoleTitle(TEXT(app_short_name));
+}
+#endif
+
+void print_usage(const char *argv0) {
+    std::cout << "\nvulkaninfo - Summarize Vulkan information in relation to the current environment.\n\n";
+    std::cout << "USAGE: " << argv0 << " [options]\n\n";
+    std::cout << "OPTIONS:\n";
+    std::cout << "-h, --help            Print this help.\n";
+    std::cout << "--html                Produce an html version of vulkaninfo output, saved as\n";
+    std::cout << "                      \"vulkaninfo.html\" in the directory in which the command is\n";
+    std::cout << "                      run.\n";
+    std::cout << "-j, --json            Produce a json version of vulkaninfo to standard output of the\n";
+    std::cout << "                      first gpu in the system conforming to the DevSim schema.\n";
+    std::cout << "--json=<gpu-number>   For a multi-gpu system, a single gpu can be targetted by\n";
+    std::cout << "                      specifying the gpu-number associated with the gpu of \n";
+    std::cout << "                      interest. This number can be determined by running\n";
+    std::cout << "                      vulkaninfo without any options specified.\n";
+    std::cout << "--show-formats        Display the format properties of each physical device.\n";
+    std::cout << "                      Note: This option does not affect html or json output;\n";
+    std::cout << "                      they will always print format properties.\n\n";
+}
+
+int main(int argc, char **argv) {
+#ifdef _WIN32
+    if (ConsoleIsExclusive()) ConsoleEnlarge();
+    if (!LoadUser32Dll()) {
+        fprintf(stderr, "Failed to load user32.dll library!\n");
+        WAIT_FOR_CONSOLE_DESTROY;
+        exit(1);
+    }
+#endif
+
+    uint32_t selected_gpu = 0;
+    bool show_formats = false;
+
+    // Combinations of output: html only, html AND json, json only, human readable only
+    for (int i = 1; i < argc; ++i) {
+        if (strncmp("--json", argv[i], 6) == 0 || strcmp(argv[i], "-j") == 0) {
+            if (strlen(argv[i]) > 7 && strncmp("--json=", argv[i], 7) == 0) {
+                selected_gpu = static_cast<uint32_t>(strtol(argv[i] + 7, nullptr, 10));
+            }
+            human_readable_output = false;
+            json_output = true;
+        } else if (strcmp(argv[i], "--html") == 0) {
+            human_readable_output = false;
+            html_output = true;
+        } else if (strcmp(argv[i], "--show-formats") == 0) {
+            show_formats = true;
+        } else if (strcmp(argv[i], "--help") == 0 || strcmp(argv[i], "-h") == 0) {
+            print_usage(argv[0]);
+            return 1;
+        } else {
+            print_usage(argv[0]);
+            return 1;
+        }
+    }
+
+    AppInstance instance = {};
+    SetupWindowExtensions(instance);
+
+    auto pNext_chains = get_chain_infos();
+
+    auto phys_devices = instance.FindPhysicalDevices();
+
+    std::vector<std::unique_ptr<AppSurface>> surfaces;
+#if defined(VK_USE_PLATFORM_XCB_KHR) || defined(VK_USE_PLATFORM_XLIB_KHR) || defined(VK_USE_PLATFORM_WIN32_KHR) || \
+    defined(VK_USE_PLATFORM_MACOS_MVK) || defined(VK_USE_PLATFORM_METAL_EXT) || defined(VK_USE_PLATFORM_WAYLAND_KHR)
+    for (auto &surface_extension : instance.surface_extensions) {
+        surface_extension.create_window(instance);
+        surface_extension.surface = surface_extension.create_surface(instance);
+        for (auto &phys_device : phys_devices) {
+            surfaces.push_back(std::unique_ptr<AppSurface>(
+                new AppSurface(instance, phys_device, surface_extension, pNext_chains.surface_capabilities2)));
+        }
+    }
+#endif
+
+    std::vector<std::unique_ptr<AppGpu>> gpus;
+
+    uint32_t gpu_counter = 0;
+    for (auto &phys_device : phys_devices) {
+        gpus.push_back(std::unique_ptr<AppGpu>(new AppGpu(instance, gpu_counter++, phys_device, pNext_chains)));
+    }
+
+    if (selected_gpu >= gpus.size()) {
+        selected_gpu = 0;
+    }
+
+    std::vector<std::unique_ptr<Printer>> printers;
+
+    std::streambuf *buf;
+    buf = std::cout.rdbuf();
+    std::ostream out(buf);
+    std::ofstream html_out;
+
+    if (human_readable_output) {
+        printers.push_back(std::unique_ptr<Printer>(new Printer(OutputType::text, out, selected_gpu, instance.vk_version)));
+    }
+    if (html_output) {
+        html_out = std::ofstream("vulkaninfo.html");
+        printers.push_back(std::unique_ptr<Printer>(new Printer(OutputType::html, html_out, selected_gpu, instance.vk_version)));
+    }
+    if (json_output) {
+        printers.push_back(std::unique_ptr<Printer>(new Printer(OutputType::json, out, selected_gpu, instance.vk_version)));
+    }
+
+    for (auto &p : printers) {
+        p->SetHeader();
+        DumpExtensions(*p.get(), "Instance", instance.global_extensions);
+        p->AddNewline();
+
+        DumpLayers(*p.get(), instance.global_layers, gpus);
+
+        if (p->Type() != OutputType::json) {
+#if defined(VK_USE_PLATFORM_XCB_KHR) || defined(VK_USE_PLATFORM_XLIB_KHR) || defined(VK_USE_PLATFORM_WIN32_KHR) || \
+    defined(VK_USE_PLATFORM_MACOS_MVK) || defined(VK_USE_PLATFORM_METAL_EXT) || defined(VK_USE_PLATFORM_WAYLAND_KHR)
+            DumpPresentableSurfaces(*p.get(), instance, gpus, surfaces);
+#endif
+            DumpGroups(*p.get(), instance);
+
+            p->SetHeader().ObjectStart("Device Properties and Extensions");
+            p->IndentDecrease();
+        }
+        for (auto &gpu : gpus) {
+            if ((p->Type() == OutputType::json && gpu->id == selected_gpu) || p->Type() == OutputType::text ||
+                p->Type() == OutputType::html) {
+                DumpGpu(*p.get(), *gpu.get(), show_formats);
+            }
+        }
+        if (p->Type() != OutputType::json) {
+            p->IndentIncrease();
+            p->ObjectEnd();
+        }
+    }
+
+#if defined(VK_USE_PLATFORM_XCB_KHR) || defined(VK_USE_PLATFORM_XLIB_KHR) || defined(VK_USE_PLATFORM_WIN32_KHR) || \
+    defined(VK_USE_PLATFORM_MACOS_MVK) || defined(VK_USE_PLATFORM_METAL_EXT) || defined(VK_USE_PLATFORM_WAYLAND_KHR)
+
+    for (auto &surface_extension : instance.surface_extensions) {
+        AppDestroySurface(instance, surface_extension.surface);
+        surface_extension.destroy_window(instance);
+    }
+#endif
+
+    WAIT_FOR_CONSOLE_DESTROY;
+#ifdef _WIN32
+    FreeUser32Dll();
+#endif
+
+    return 0;
+}
diff --git a/src/third_party/vulkan-tools/src/vulkaninfo/vulkaninfo.h b/src/third_party/vulkan-tools/src/vulkaninfo/vulkaninfo.h
new file mode 100644
index 0000000..e9864ca
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/vulkaninfo/vulkaninfo.h
@@ -0,0 +1,1479 @@
+/*
+ * Copyright (c) 2015-2019 The Khronos Group Inc.
+ * Copyright (c) 2015-2019 Valve Corporation
+ * Copyright (c) 2015-2019 LunarG, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Courtney Goeltzenleuchter <courtney@LunarG.com>
+ * Author: David Pinedo <david@lunarg.com>
+ * Author: Mark Lobodzinski <mark@lunarg.com>
+ * Author: Rene Lindsay <rene@lunarg.com>
+ * Author: Jeremy Kniager <jeremyk@lunarg.com>
+ * Author: Shannon McPherson <shannon@lunarg.com>
+ * Author: Bob Ellison <bob@lunarg.com>
+ * Author: Charles Giessen <charles@lunarg.com>
+ *
+ */
+
+#include <algorithm>
+#include <array>
+#include <iostream>
+#include <fstream>
+#include <memory>
+#include <ostream>
+#include <string>
+#include <unordered_map>
+#include <vector>
+#include <utility>
+
+#include <assert.h>
+#include <stdint.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+#include <cstring>
+
+#ifdef __GNUC__
+#ifndef _POSIX_C_SOURCE
+#define _POSIX_C_SOURCE 200809L
+#endif
+#else
+#define strndup(p, n) strdup(p)
+#endif
+
+#ifdef _WIN32
+#include <fcntl.h>
+#include <io.h>
+#endif  // _WIN32
+
+#if defined(VK_USE_PLATFORM_XLIB_KHR) || defined(VK_USE_PLATFORM_XCB_KHR)
+#include <X11/Xutil.h>
+#endif
+
+#if defined(VK_USE_PLATFORM_MACOS_MVK) || defined(VK_USE_PLATFORM_METAL_EXT)
+#include "metal_view.h"
+#endif
+
+#include <vulkan/vulkan.h>
+
+#define ERR(err) std::cerr << __FILE__ << ":" << __LINE__ << ": failed with " << VkResultString(err) << "\n";
+
+// global configuration
+bool human_readable_output = true;
+bool html_output = false;
+bool json_output = false;
+
+#ifdef _WIN32
+
+#define strdup _strdup
+
+// Returns nonzero if the console is used only for this process. Will return
+// zero if another process (such as cmd.exe) is also attached.
+static int ConsoleIsExclusive(void) {
+    DWORD pids[2];
+    DWORD num_pids = GetConsoleProcessList(pids, ARRAYSIZE(pids));
+    return num_pids <= 1;
+}
+
+#define WAIT_FOR_CONSOLE_DESTROY                                            \
+    do {                                                                    \
+        if (ConsoleIsExclusive() && human_readable_output) Sleep(INFINITE); \
+    } while (0)
+#else
+#define WAIT_FOR_CONSOLE_DESTROY
+#endif
+
+#define ERR_EXIT(err)             \
+    do {                          \
+        ERR(err);                 \
+        fflush(stdout);           \
+        fflush(stderr);           \
+        WAIT_FOR_CONSOLE_DESTROY; \
+        exit(-1);                 \
+    } while (0)
+
+#ifdef _WIN32
+
+#define _CALL_PFN(pfn, ...) (pfn)
+#define CALL_PFN(fncName) _CALL_PFN(User32Handles::pfn##fncName)
+
+#define _CHECK_PFN(pfn, fncName)                                              \
+    do {                                                                      \
+        if (pfn == nullptr) {                                                 \
+            fprintf(stderr, "Failed to get %s function address!\n", fncName); \
+            WAIT_FOR_CONSOLE_DESTROY;                                         \
+            exit(1);                                                          \
+        }                                                                     \
+    } while (false)
+
+#define _SET_PFN(dllHandle, pfnType, pfn, fncName)                           \
+    do {                                                                     \
+        pfn = reinterpret_cast<pfnType>(GetProcAddress(dllHandle, fncName)); \
+        _CHECK_PFN(pfn, fncName);                                            \
+    } while (false)
+
+#define SET_PFN(dllHandle, fncName) _SET_PFN(User32Handles::dllHandle, PFN_##fncName, User32Handles::pfn##fncName, #fncName)
+
+// User32 function declarations
+typedef WINUSERAPI BOOL(WINAPI *PFN_AdjustWindowRect)(_Inout_ LPRECT, _In_ DWORD, _In_ BOOL);
+typedef WINUSERAPI HWND(WINAPI *PFN_CreateWindowExA)(_In_ DWORD, _In_opt_ LPCSTR, _In_opt_ LPCSTR, _In_ DWORD, _In_ int, _In_ int,
+                                                     _In_ int, _In_ int, _In_opt_ HWND, _In_opt_ HMENU, _In_opt_ HINSTANCE,
+                                                     _In_opt_ LPVOID);
+typedef WINUSERAPI LRESULT(WINAPI *PFN_DefWindowProcA)(_In_ HWND, _In_ UINT, _In_ WPARAM, _In_ LPARAM);
+typedef WINUSERAPI BOOL(WINAPI *PFN_DestroyWindow)(_In_ HWND);
+typedef WINUSERAPI HICON(WINAPI *PFN_LoadIconA)(_In_opt_ HINSTANCE, _In_ LPCSTR);
+typedef WINUSERAPI ATOM(WINAPI *PFN_RegisterClassExA)(_In_ CONST WNDCLASSEXA *);
+
+struct User32Handles {
+    // User32 function pointers
+    static PFN_AdjustWindowRect pfnAdjustWindowRect;
+    static PFN_CreateWindowExA pfnCreateWindowExA;
+    static PFN_DefWindowProcA pfnDefWindowProcA;
+    static PFN_DestroyWindow pfnDestroyWindow;
+    static PFN_LoadIconA pfnLoadIconA;
+    static PFN_RegisterClassExA pfnRegisterClassExA;
+
+    // User32 dll handle
+    static HMODULE user32DllHandle;
+};
+
+bool LoadUser32Dll() {
+    User32Handles::user32DllHandle = LoadLibraryExA("user32.dll", nullptr, 0);
+    if (User32Handles::user32DllHandle != NULL) {
+        SET_PFN(user32DllHandle, AdjustWindowRect);
+        SET_PFN(user32DllHandle, CreateWindowExA);
+        SET_PFN(user32DllHandle, DefWindowProcA);
+        SET_PFN(user32DllHandle, DestroyWindow);
+        SET_PFN(user32DllHandle, LoadIconA);
+        SET_PFN(user32DllHandle, RegisterClassExA);
+        return true;
+    }
+    return false;
+}
+
+void FreeUser32Dll() {
+    if (User32Handles::user32DllHandle != nullptr) {
+        FreeLibrary(User32Handles::user32DllHandle);
+        User32Handles::user32DllHandle = nullptr;
+    }
+}
+#endif  // _WIN32
+
+static const char *VkResultString(VkResult err);
+
+const char *app_short_name = "vulkaninfo";
+
+std::vector<const char *> get_c_str_array(std::vector<std::string> const &vec) {
+    std::vector<const char *> ret;
+    for (auto &str : vec) ret.push_back(str.c_str());
+    return ret;
+}
+
+static const char *VkDebugReportFlagsEXTString(const VkDebugReportFlagsEXT flags) {
+    switch (flags) {
+        case VK_DEBUG_REPORT_ERROR_BIT_EXT:
+            return "ERROR";
+        case VK_DEBUG_REPORT_WARNING_BIT_EXT:
+            return "WARNING";
+        case VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT:
+            return "PERF";
+        case VK_DEBUG_REPORT_INFORMATION_BIT_EXT:
+            return "INFO";
+        case VK_DEBUG_REPORT_DEBUG_BIT_EXT:
+            return "DEBUG";
+        default:
+            return "UNKNOWN";
+    }
+}
+static VKAPI_ATTR VkBool32 VKAPI_CALL DbgCallback(VkDebugReportFlagsEXT msgFlags, VkDebugReportObjectTypeEXT objType,
+                                                  uint64_t srcObject, size_t location, int32_t msgCode, const char *pLayerPrefix,
+                                                  const char *pMsg, void *pUserData) {
+    std::cerr << VkDebugReportFlagsEXTString(msgFlags) << ": [" << pLayerPrefix << "] Code " << msgCode << " : " << pMsg << "\n";
+
+    // True is reserved for layer developers, and MAY mean calls are not distributed down the layer chain after validation error.
+    // False SHOULD always be returned by apps:
+    return VK_FALSE;
+}
+
+// ----------- Instance Setup ------- //
+
+struct VkStructureHeader {
+    VkStructureType sType;
+    VkStructureHeader *pNext;
+};
+
+struct pNextChainBuildingBlockInfo {
+    VkStructureType sType;
+    uint32_t mem_size;
+};
+
+void buildpNextChain(VkStructureHeader *first, const std::vector<pNextChainBuildingBlockInfo> &chain_info) {
+    VkStructureHeader *place = first;
+
+    for (uint32_t i = 0; i < chain_info.size(); i++) {
+        place->pNext = static_cast<VkStructureHeader *>(malloc(chain_info[i].mem_size));
+        if (!place->pNext) {
+            ERR_EXIT(VK_ERROR_OUT_OF_HOST_MEMORY);
+        }
+        std::memset(place->pNext, 0, chain_info[i].mem_size);
+        place = place->pNext;
+        place->sType = chain_info[i].sType;
+    }
+
+    place->pNext = nullptr;
+}
+
+void freepNextChain(VkStructureHeader *first) {
+    VkStructureHeader *place = first;
+    VkStructureHeader *next = nullptr;
+
+    while (place) {
+        next = place->pNext;
+        free(place);
+        place = next;
+    }
+}
+
+struct LayerExtensionList {
+    VkLayerProperties layer_properties{};
+    std::vector<VkExtensionProperties> extension_properties;
+};
+
+struct AppInstance;
+
+struct SurfaceExtension {
+    std::string name;
+    void (*create_window)(AppInstance &) = nullptr;
+    VkSurfaceKHR (*create_surface)(AppInstance &) = nullptr;
+    void (*destroy_window)(AppInstance &) = nullptr;
+    VkSurfaceKHR surface = VK_NULL_HANDLE;
+    VkBool32 supports_present = 0;
+
+    bool operator==(const SurfaceExtension &other) {
+        return name == other.name && surface == other.surface && supports_present == other.supports_present;
+    }
+};
+
+struct VulkanVersion {
+    uint32_t major;
+    uint32_t minor;
+    uint32_t patch;
+};
+
+struct AppInstance {
+    VkInstance instance;
+    uint32_t instance_version;
+    VulkanVersion vk_version;
+
+    std::vector<LayerExtensionList> global_layers;
+
+    std::vector<VkExtensionProperties> global_extensions;  // Instance Extensions
+
+    std::vector<std::string> inst_extensions;
+
+    // Functions from vkGetInstanceProcAddress
+    PFN_vkGetPhysicalDeviceSurfaceSupportKHR vkGetPhysicalDeviceSurfaceSupportKHR;
+    PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR vkGetPhysicalDeviceSurfaceCapabilitiesKHR;
+    PFN_vkGetPhysicalDeviceSurfaceFormatsKHR vkGetPhysicalDeviceSurfaceFormatsKHR;
+    PFN_vkGetPhysicalDeviceSurfaceFormats2KHR vkGetPhysicalDeviceSurfaceFormats2KHR;
+    PFN_vkGetPhysicalDeviceSurfacePresentModesKHR vkGetPhysicalDeviceSurfacePresentModesKHR;
+    PFN_vkGetPhysicalDeviceProperties2KHR vkGetPhysicalDeviceProperties2KHR;
+    PFN_vkGetPhysicalDeviceFormatProperties2KHR vkGetPhysicalDeviceFormatProperties2KHR;
+    PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR vkGetPhysicalDeviceQueueFamilyProperties2KHR;
+    PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR;
+    PFN_vkGetPhysicalDeviceMemoryProperties2KHR vkGetPhysicalDeviceMemoryProperties2KHR;
+    PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR vkGetPhysicalDeviceSurfaceCapabilities2KHR;
+    PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT vkGetPhysicalDeviceSurfaceCapabilities2EXT;
+
+    std::vector<SurfaceExtension> surface_extensions;
+
+    int width = 256, height = 256;
+
+    VkSurfaceCapabilitiesKHR surface_capabilities;
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    HINSTANCE h_instance;  // Windows Instance
+    HWND h_wnd;            // window handle
+#endif
+#ifdef VK_USE_PLATFORM_XCB_KHR
+    xcb_connection_t *xcb_connection;
+    xcb_screen_t *xcb_screen;
+    xcb_window_t xcb_window;
+#endif
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+    Display *xlib_display;
+    Window xlib_window;
+#endif
+#ifdef VK_USE_PLATFORM_MACOS_MVK
+    void *macos_window;
+#endif
+#ifdef VK_USE_PLATFORM_METAL_EXT
+    void *metal_window;
+#endif
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+    wl_display *wayland_display;
+    wl_surface *wayland_surface;
+#endif
+#ifdef VK_USE_PLATFORM_ANDROID_KHR  // TODO
+    ANativeWindow *window;
+#endif
+    AppInstance() {
+        PFN_vkEnumerateInstanceVersion enumerate_instance_version =
+            reinterpret_cast<PFN_vkEnumerateInstanceVersion>(vkGetInstanceProcAddr(nullptr, "vkEnumerateInstanceVersion"));
+
+        if (!enumerate_instance_version) {
+            instance_version = VK_API_VERSION_1_0;
+        } else {
+            const VkResult err = enumerate_instance_version(&instance_version);
+            if (err) ERR_EXIT(err);
+        }
+
+        vk_version = {VK_VERSION_MAJOR(instance_version), VK_VERSION_MINOR(instance_version), VK_VERSION_PATCH(VK_HEADER_VERSION)};
+
+        AppGetInstanceExtensions();
+
+        const VkDebugReportCallbackCreateInfoEXT dbg_info = {VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT, nullptr,
+                                                             VK_DEBUG_REPORT_ERROR_BIT_EXT | VK_DEBUG_REPORT_WARNING_BIT_EXT,
+                                                             DbgCallback};
+
+        const VkApplicationInfo app_info = {
+            VK_STRUCTURE_TYPE_APPLICATION_INFO, nullptr, app_short_name, 1, nullptr, 0, VK_API_VERSION_1_0};
+
+        AppCompileInstanceExtensionsToEnable();
+
+        std::vector<const char *> inst_exts;
+        for (auto &ext : inst_extensions) inst_exts.push_back(ext.c_str());
+
+        const VkInstanceCreateInfo inst_info = {VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO,  &dbg_info,       0, &app_info, 0, nullptr,
+                                                static_cast<uint32_t>(inst_exts.size()), inst_exts.data()};
+
+        VkResult err = vkCreateInstance(&inst_info, nullptr, &instance);
+        if (err == VK_ERROR_INCOMPATIBLE_DRIVER) {
+            std::cerr << "Cannot create Vulkan instance.\n";
+            std::cerr << "This problem is often caused by a faulty installation of the Vulkan driver or attempting to use a GPU "
+                         "that does not support Vulkan.\n";
+            ERR_EXIT(err);
+        } else if (err) {
+            ERR_EXIT(err);
+        }
+
+        AppLoadInstanceCommands();
+    }
+
+    ~AppInstance() { vkDestroyInstance(instance, nullptr); }
+
+    AppInstance(const AppInstance &) = delete;
+    const AppInstance &operator=(const AppInstance &) = delete;
+
+    bool CheckExtensionEnabled(std::string extension_to_check) {
+        for (auto &extension : inst_extensions) {
+            if (extension_to_check == extension) {
+                return true;
+            }
+        }
+        return false;
+    }
+
+    /* Gets a list of layer and instance extensions */
+    void AppGetInstanceExtensions() {
+        /* Scan layers */
+        std::vector<VkLayerProperties> global_layer_properties;
+
+        VkResult err;
+        uint32_t count = 0;
+        do {
+            err = vkEnumerateInstanceLayerProperties(&count, nullptr);
+            if (err) ERR_EXIT(err);
+
+            global_layer_properties.resize(count);
+
+            err = vkEnumerateInstanceLayerProperties(&count, global_layer_properties.data());
+        } while (err == VK_INCOMPLETE);
+        if (err) ERR_EXIT(err);
+
+        global_layers.resize(count);
+        assert(global_layer_properties.size() == global_layers.size());
+
+        for (size_t i = 0; i < global_layer_properties.size(); i++) {
+            global_layers[i].layer_properties = global_layer_properties[i];
+
+            global_layers[i].extension_properties = AppGetGlobalLayerExtensions(global_layer_properties[i].layerName);
+        }
+
+        // Collect global extensions
+        // Gets instance extensions, if no layer was specified in the first paramteter
+        global_extensions = AppGetGlobalLayerExtensions(nullptr);
+    }
+    void AppCompileInstanceExtensionsToEnable() {
+        // Get all supported Instance extensions (excl. layer-provided ones)
+        for (auto &ext : global_extensions) {
+            inst_extensions.push_back(ext.extensionName);
+        }
+    }
+    void AppLoadInstanceCommands() {
+#define LOAD_INSTANCE_VK_CMD(cmd) cmd = reinterpret_cast<PFN_##cmd>(vkGetInstanceProcAddr(instance, #cmd))
+
+        LOAD_INSTANCE_VK_CMD(vkGetPhysicalDeviceSurfaceSupportKHR);
+        LOAD_INSTANCE_VK_CMD(vkGetPhysicalDeviceSurfaceCapabilitiesKHR);
+        LOAD_INSTANCE_VK_CMD(vkGetPhysicalDeviceSurfaceFormatsKHR);
+        LOAD_INSTANCE_VK_CMD(vkGetPhysicalDeviceSurfaceFormats2KHR);
+        LOAD_INSTANCE_VK_CMD(vkGetPhysicalDeviceSurfacePresentModesKHR);
+        LOAD_INSTANCE_VK_CMD(vkGetPhysicalDeviceProperties2KHR);
+        LOAD_INSTANCE_VK_CMD(vkGetPhysicalDeviceFormatProperties2KHR);
+        LOAD_INSTANCE_VK_CMD(vkGetPhysicalDeviceQueueFamilyProperties2KHR);
+        LOAD_INSTANCE_VK_CMD(vkGetPhysicalDeviceFeatures2KHR);
+        LOAD_INSTANCE_VK_CMD(vkGetPhysicalDeviceMemoryProperties2KHR);
+        LOAD_INSTANCE_VK_CMD(vkGetPhysicalDeviceSurfaceCapabilities2KHR);
+        LOAD_INSTANCE_VK_CMD(vkGetPhysicalDeviceSurfaceCapabilities2EXT);
+
+#undef LOAD_INSTANCE_VK_CMD
+    }
+
+    void AddSurfaceExtension(SurfaceExtension ext) { surface_extensions.push_back(ext); }
+
+    static std::vector<VkExtensionProperties> AppGetGlobalLayerExtensions(char *layer_name) {
+        std::vector<VkExtensionProperties> ext_props;
+        VkResult err;
+        uint32_t ext_count = 0;
+        do {
+            // gets the extension count if the last parameter is nullptr
+            err = vkEnumerateInstanceExtensionProperties(layer_name, &ext_count, nullptr);
+            if (err) ERR_EXIT(err);
+
+            ext_props.resize(ext_count);
+            // gets the extension properties if the last parameter is not nullptr
+            err = vkEnumerateInstanceExtensionProperties(layer_name, &ext_count, ext_props.data());
+        } while (err == VK_INCOMPLETE);
+        if (err) ERR_EXIT(err);
+        return ext_props;
+    }
+
+    std::vector<VkPhysicalDevice> FindPhysicalDevices() {
+        std::vector<VkPhysicalDevice> phys_devices;
+
+        VkResult err;
+        uint32_t gpu_count = 0;
+
+        /* repeat get until VK_INCOMPLETE goes away */
+        do {
+            err = vkEnumeratePhysicalDevices(instance, &gpu_count, nullptr);
+            if (err) ERR_EXIT(err);
+
+            phys_devices.resize(gpu_count);
+            err = vkEnumeratePhysicalDevices(instance, &gpu_count, phys_devices.data());
+            if (err) ERR_EXIT(err);
+            phys_devices.resize(gpu_count);
+        } while (err == VK_INCOMPLETE);
+
+        return phys_devices;
+    }
+};
+
+// --------- Platform Specific Presentation Calls --------- //
+
+//---------------------------Win32---------------------------
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+// MS-Windows event handling function:
+LRESULT CALLBACK WndProc(HWND hWnd, UINT uMsg, WPARAM wParam, LPARAM lParam) {
+    return (CALL_PFN(DefWindowProcA)(hWnd, uMsg, wParam, lParam));
+}
+
+static void AppCreateWin32Window(AppInstance &inst) {
+    inst.h_instance = GetModuleHandle(nullptr);
+
+    WNDCLASSEX win_class;
+
+    // Initialize the window class structure:
+    win_class.cbSize = sizeof(WNDCLASSEX);
+    win_class.style = CS_HREDRAW | CS_VREDRAW;
+    win_class.lpfnWndProc = WndProc;
+    win_class.cbClsExtra = 0;
+    win_class.cbWndExtra = 0;
+    win_class.hInstance = inst.h_instance;
+    win_class.hIcon = CALL_PFN(LoadIconA)(nullptr, IDI_APPLICATION);
+    win_class.hCursor = LoadCursor(nullptr, IDC_ARROW);
+    win_class.hbrBackground = (HBRUSH)GetStockObject(WHITE_BRUSH);
+    win_class.lpszMenuName = nullptr;
+    win_class.lpszClassName = app_short_name;
+    win_class.hInstance = inst.h_instance;
+    win_class.hIconSm = CALL_PFN(LoadIconA)(nullptr, IDI_WINLOGO);
+    // Register window class:
+    if (!CALL_PFN(RegisterClassExA)(&win_class)) {
+        // It didn't work, so try to give a useful error:
+        fprintf(stderr, "Failed to register the window class!\n");
+        exit(1);
+    }
+    // Create window with the registered class:
+    RECT wr = {0, 0, inst.width, inst.height};
+    CALL_PFN(AdjustWindowRect)(&wr, WS_OVERLAPPEDWINDOW, FALSE);
+    inst.h_wnd = CALL_PFN(CreateWindowExA)(0,
+                                           app_short_name,  // class name
+                                           app_short_name,  // app name
+                                           // WS_VISIBLE | WS_SYSMENU |
+                                           WS_OVERLAPPEDWINDOW,  // window style
+                                           100, 100,             // x/y coords
+                                           wr.right - wr.left,   // width
+                                           wr.bottom - wr.top,   // height
+                                           nullptr,              // handle to parent
+                                           nullptr,              // handle to menu
+                                           inst.h_instance,      // hInstance
+                                           nullptr);             // no extra parameters
+    if (!inst.h_wnd) {
+        // It didn't work, so try to give a useful error:
+        fprintf(stderr, "Failed to create a window!\n");
+        exit(1);
+    }
+}
+
+static VkSurfaceKHR AppCreateWin32Surface(AppInstance &inst) {
+    VkWin32SurfaceCreateInfoKHR createInfo;
+    createInfo.sType = VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR;
+    createInfo.pNext = nullptr;
+    createInfo.flags = 0;
+    createInfo.hinstance = inst.h_instance;
+    createInfo.hwnd = inst.h_wnd;
+
+    VkSurfaceKHR surface;
+    VkResult err = vkCreateWin32SurfaceKHR(inst.instance, &createInfo, nullptr, &surface);
+    if (err) ERR_EXIT(err);
+    return surface;
+}
+
+static void AppDestroyWin32Window(AppInstance &inst) { CALL_PFN(DestroyWindow)(inst.h_wnd); }
+#endif  // VK_USE_PLATFORM_WIN32_KHR
+//-----------------------------------------------------------
+
+#if defined(VK_USE_PLATFORM_XCB_KHR) || defined(VK_USE_PLATFORM_XLIB_KHR) || defined(VK_USE_PLATFORM_WIN32_KHR) ||      \
+    defined(VK_USE_PLATFORM_MACOS_MVK) || defined(VK_USE_PLATFORM_METAL_EXT) || defined(VK_USE_PLATFORM_WAYLAND_KHR) || \
+    defined(VK_USE_PLATFORM_ANDROID_KHR)
+static void AppDestroySurface(AppInstance &inst, VkSurfaceKHR surface) {  // same for all platforms
+    vkDestroySurfaceKHR(inst.instance, surface, nullptr);
+}
+#endif
+
+//----------------------------XCB----------------------------
+
+#ifdef VK_USE_PLATFORM_XCB_KHR
+static void AppCreateXcbWindow(AppInstance &inst) {
+    //--Init Connection--
+    const xcb_setup_t *setup;
+    xcb_screen_iterator_t iter;
+    int scr;
+
+    // API guarantees non-null xcb_connection
+    inst.xcb_connection = xcb_connect(nullptr, &scr);
+    int conn_error = xcb_connection_has_error(inst.xcb_connection);
+    if (conn_error) {
+        fprintf(stderr, "XCB failed to connect to the X server due to error:%d.\n", conn_error);
+        fflush(stderr);
+        xcb_disconnect(inst.xcb_connection);
+        inst.xcb_connection = nullptr;
+        return;
+    }
+
+    setup = xcb_get_setup(inst.xcb_connection);
+    iter = xcb_setup_roots_iterator(setup);
+    while (scr-- > 0) {
+        xcb_screen_next(&iter);
+    }
+
+    inst.xcb_screen = iter.data;
+    //-------------------
+
+    inst.xcb_window = xcb_generate_id(inst.xcb_connection);
+    xcb_create_window(inst.xcb_connection, XCB_COPY_FROM_PARENT, inst.xcb_window, inst.xcb_screen->root, 0, 0, inst.width,
+                      inst.height, 0, XCB_WINDOW_CLASS_INPUT_OUTPUT, inst.xcb_screen->root_visual, 0, nullptr);
+
+    xcb_intern_atom_cookie_t cookie = xcb_intern_atom(inst.xcb_connection, 1, 12, "WM_PROTOCOLS");
+    xcb_intern_atom_reply_t *reply = xcb_intern_atom_reply(inst.xcb_connection, cookie, 0);
+    free(reply);
+}
+
+static VkSurfaceKHR AppCreateXcbSurface(AppInstance &inst) {
+    if (!inst.xcb_connection) {
+        ERR_EXIT(VK_ERROR_INITIALIZATION_FAILED);
+    }
+
+    VkXcbSurfaceCreateInfoKHR xcb_createInfo;
+    xcb_createInfo.sType = VK_STRUCTURE_TYPE_XCB_SURFACE_CREATE_INFO_KHR;
+    xcb_createInfo.pNext = nullptr;
+    xcb_createInfo.flags = 0;
+    xcb_createInfo.connection = inst.xcb_connection;
+    xcb_createInfo.window = inst.xcb_window;
+
+    VkSurfaceKHR surface;
+    VkResult err = vkCreateXcbSurfaceKHR(inst.instance, &xcb_createInfo, nullptr, &surface);
+    if (err) ERR_EXIT(err);
+    return surface;
+}
+
+static void AppDestroyXcbWindow(AppInstance &inst) {
+    if (!inst.xcb_connection) {
+        return;  // Nothing to destroy
+    }
+
+    xcb_destroy_window(inst.xcb_connection, inst.xcb_window);
+    xcb_disconnect(inst.xcb_connection);
+}
+#endif  // VK_USE_PLATFORM_XCB_KHR
+//-----------------------------------------------------------
+
+//----------------------------XLib---------------------------
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+static void AppCreateXlibWindow(AppInstance &inst) {
+    long visualMask = VisualScreenMask;
+    int numberOfVisuals;
+
+    inst.xlib_display = XOpenDisplay(nullptr);
+    if (inst.xlib_display == nullptr) {
+        fprintf(stderr, "XLib failed to connect to the X server.\nExiting ...\n");
+        exit(1);
+    }
+
+    XVisualInfo vInfoTemplate = {};
+    vInfoTemplate.screen = DefaultScreen(inst.xlib_display);
+    XVisualInfo *visualInfo = XGetVisualInfo(inst.xlib_display, visualMask, &vInfoTemplate, &numberOfVisuals);
+    inst.xlib_window = XCreateWindow(inst.xlib_display, RootWindow(inst.xlib_display, vInfoTemplate.screen), 0, 0, inst.width,
+                                     inst.height, 0, visualInfo->depth, InputOutput, visualInfo->visual, 0, nullptr);
+
+    XSync(inst.xlib_display, false);
+    XFree(visualInfo);
+}
+
+static VkSurfaceKHR AppCreateXlibSurface(AppInstance &inst) {
+    VkXlibSurfaceCreateInfoKHR createInfo;
+    createInfo.sType = VK_STRUCTURE_TYPE_XLIB_SURFACE_CREATE_INFO_KHR;
+    createInfo.pNext = nullptr;
+    createInfo.flags = 0;
+    createInfo.dpy = inst.xlib_display;
+    createInfo.window = inst.xlib_window;
+
+    VkSurfaceKHR surface;
+    VkResult err = vkCreateXlibSurfaceKHR(inst.instance, &createInfo, nullptr, &surface);
+    if (err) ERR_EXIT(err);
+    return surface;
+}
+
+static void AppDestroyXlibWindow(AppInstance &inst) {
+    XDestroyWindow(inst.xlib_display, inst.xlib_window);
+    XCloseDisplay(inst.xlib_display);
+}
+#endif  // VK_USE_PLATFORM_XLIB_KHR
+//-----------------------------------------------------------
+
+//------------------------MACOS_MVK--------------------------
+#ifdef VK_USE_PLATFORM_MACOS_MVK
+static void AppCreateMacOSWindow(AppInstance &inst) {
+    inst.macos_window = CreateMetalView(inst.width, inst.height);
+    if (inst.macos_window == nullptr) {
+        fprintf(stderr, "Could not create a native Metal view.\nExiting...\n");
+        exit(1);
+    }
+}
+
+static VkSurfaceKHR AppCreateMacOSSurface(AppInstance &inst) {
+    VkMacOSSurfaceCreateInfoMVK createInfo;
+    createInfo.sType = VK_STRUCTURE_TYPE_MACOS_SURFACE_CREATE_INFO_MVK;
+    createInfo.pNext = nullptr;
+    createInfo.flags = 0;
+    createInfo.pView = inst.macos_window;
+
+    VkSurfaceKHR surface;
+    VkResult err = vkCreateMacOSSurfaceMVK(inst.instance, &createInfo, nullptr, &surface);
+    if (err) ERR_EXIT(err);
+    return surface;
+}
+
+static void AppDestroyMacOSWindow(AppInstance &inst) { DestroyMetalView(inst.macos_window); }
+#endif  // VK_USE_PLATFORM_MACOS_MVK
+//-----------------------------------------------------------
+
+//------------------------METAL_EXT--------------------------
+#ifdef VK_USE_PLATFORM_METAL_EXT
+static void AppCreateMetalWindow(AppInstance &inst) {
+    inst.metal_window = CreateMetalView(inst.width, inst.height);
+    if (inst.metal_window == nullptr) {
+        fprintf(stderr, "Could not create a native Metal view.\nExiting...\n");
+        exit(1);
+    }
+}
+
+static VkSurfaceKHR AppCreateMetalSurface(AppInstance &inst) {
+    VkMetalSurfaceCreateInfoEXT createInfo;
+    createInfo.sType = VK_STRUCTURE_TYPE_METAL_SURFACE_CREATE_INFO_EXT;
+    createInfo.pNext = nullptr;
+    createInfo.flags = 0;
+    createInfo.pLayer = static_cast<CAMetalLayer *>(GetCAMetalLayerFromMetalView(inst.metal_window));
+
+    VkSurfaceKHR surface;
+    VkResult err = vkCreateMetalSurfaceEXT(inst.instance, &createInfo, nullptr, &surface);
+    if (err) ERR_EXIT(err);
+    return surface;
+}
+
+static void AppDestroyMetalWindow(AppInstance &inst) { DestroyMetalView(inst.metal_window); }
+#endif  // VK_USE_PLATFORM_METAL_EXT
+//-----------------------------------------------------------
+
+//-------------------------WAYLAND---------------------------
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+static void wayland_registry_global(void *data, struct wl_registry *registry, uint32_t id, const char *interface,
+                                    uint32_t version) {
+    AppInstance &inst = *static_cast<AppInstance *>(data);
+    if (strcmp(interface, "wl_compositor") == 0) {
+        struct wl_compositor *compositor = (struct wl_compositor *)wl_registry_bind(registry, id, &wl_compositor_interface, 1);
+        inst.wayland_surface = wl_compositor_create_surface(compositor);
+    }
+}
+static void wayland_registry_global_remove(void *data, struct wl_registry *registry, uint32_t id) {}
+static const struct wl_registry_listener wayland_registry_listener = {wayland_registry_global, wayland_registry_global_remove};
+
+static void AppCreateWaylandWindow(AppInstance &inst) {
+    inst.wayland_display = wl_display_connect(nullptr);
+    struct wl_registry *registry = wl_display_get_registry(inst.wayland_display);
+    wl_registry_add_listener(wl_display_get_registry(inst.wayland_display), &wayland_registry_listener, static_cast<void *>(&inst));
+    wl_display_roundtrip(inst.wayland_display);
+    wl_registry_destroy(registry);
+}
+
+static VkSurfaceKHR AppCreateWaylandSurface(AppInstance &inst) {
+    VkWaylandSurfaceCreateInfoKHR createInfo;
+    createInfo.sType = VK_STRUCTURE_TYPE_WAYLAND_SURFACE_CREATE_INFO_KHR;
+    createInfo.pNext = nullptr;
+    createInfo.flags = 0;
+    createInfo.display = inst.wayland_display;
+    createInfo.surface = inst.wayland_surface;
+
+    VkSurfaceKHR surface;
+    VkResult err = vkCreateWaylandSurfaceKHR(inst.instance, &createInfo, nullptr, &surface);
+    if (err) ERR_EXIT(err);
+    return surface;
+}
+
+static void AppDestroyWaylandWindow(AppInstance &inst) { wl_display_disconnect(inst.wayland_display); }
+#endif  // VK_USE_PLATFORM_WAYLAND_KHR
+//-----------------------------------------------------------
+
+//-------------------------ANDROID---------------------------
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+static void AppCreateAndroidWindow(AppInstance &inst) {}
+static VkSurfaceKHR AppCreateAndroidSurface(AppInstance &inst) {
+    VkAndroidSurfaceCreateInfoKHR createInfo;
+    createInfo.sType = VK_STRUCTURE_TYPE_ANDROID_SURFACE_CREATE_INFO_KHR;
+    createInfo.pNext = NULL;
+    createInfo.flags = 0;
+    createInfo.window = (struct ANativeWindow *)(inst.window);
+
+    err = vkCreateAndroidSurfaceKHR(inst.inst, &createInfo, NULL, &inst.surface);
+    EXIT_ERR(err);
+}
+static VkSurfaceKHR AppDestroyAndroidSurface(AppInstance &inst) {}
+#endif
+//-----------------------------------------------------------
+
+// ------------ Setup Windows ------------- //
+
+void SetupWindowExtensions(AppInstance &inst) {
+#if defined(VK_USE_PLATFORM_XCB_KHR) || defined(VK_USE_PLATFORM_XLIB_KHR)
+    bool has_display = true;
+    const char *display_var = getenv("DISPLAY");
+    if (display_var == nullptr || strlen(display_var) == 0) {
+        fprintf(stderr, "'DISPLAY' environment variable not set... skipping surface info\n");
+        fflush(stderr);
+        has_display = false;
+    }
+#endif
+
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+    wl_display *wayland_display = wl_display_connect(nullptr);
+    bool has_wayland_display = false;
+    if (wayland_display != nullptr) {
+        wl_display_disconnect(wayland_display);
+        has_wayland_display = true;
+    }
+#endif
+
+//--WIN32--
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    SurfaceExtension surface_ext_win32;
+    if (inst.CheckExtensionEnabled(VK_KHR_WIN32_SURFACE_EXTENSION_NAME)) {
+        surface_ext_win32.name = VK_KHR_WIN32_SURFACE_EXTENSION_NAME;
+        surface_ext_win32.create_window = AppCreateWin32Window;
+        surface_ext_win32.create_surface = AppCreateWin32Surface;
+        surface_ext_win32.destroy_window = AppDestroyWin32Window;
+
+        inst.AddSurfaceExtension(surface_ext_win32);
+    }
+#endif
+//--XCB--
+#ifdef VK_USE_PLATFORM_XCB_KHR
+    SurfaceExtension surface_ext_xcb;
+    if (inst.CheckExtensionEnabled(VK_KHR_XCB_SURFACE_EXTENSION_NAME)) {
+        surface_ext_xcb.name = VK_KHR_XCB_SURFACE_EXTENSION_NAME;
+        surface_ext_xcb.create_window = AppCreateXcbWindow;
+        surface_ext_xcb.create_surface = AppCreateXcbSurface;
+        surface_ext_xcb.destroy_window = AppDestroyXcbWindow;
+        if (has_display) {
+            inst.AddSurfaceExtension(surface_ext_xcb);
+        }
+    }
+#endif
+//--XLIB--
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+    SurfaceExtension surface_ext_xlib;
+    if (inst.CheckExtensionEnabled(VK_KHR_XLIB_SURFACE_EXTENSION_NAME)) {
+        surface_ext_xlib.name = VK_KHR_XLIB_SURFACE_EXTENSION_NAME;
+        surface_ext_xlib.create_window = AppCreateXlibWindow;
+        surface_ext_xlib.create_surface = AppCreateXlibSurface;
+        surface_ext_xlib.destroy_window = AppDestroyXlibWindow;
+        if (has_display) {
+            inst.AddSurfaceExtension(surface_ext_xlib);
+        }
+    }
+#endif
+//--MACOS--
+#ifdef VK_USE_PLATFORM_MACOS_MVK
+    SurfaceExtension surface_ext_macos;
+    if (inst.CheckExtensionEnabled(VK_MVK_MACOS_SURFACE_EXTENSION_NAME)) {
+        surface_ext_macos.name = VK_MVK_MACOS_SURFACE_EXTENSION_NAME;
+        surface_ext_macos.create_window = AppCreateMacOSWindow;
+        surface_ext_macos.create_surface = AppCreateMacOSSurface;
+        surface_ext_macos.destroy_window = AppDestroyMacOSWindow;
+
+        inst.AddSurfaceExtension(surface_ext_macos);
+    }
+#endif
+
+#ifdef VK_USE_PLATFORM_METAL_EXT
+    SurfaceExtension surface_ext_metal;
+    if (inst.CheckExtensionEnabled(VK_EXT_METAL_SURFACE_EXTENSION_NAME)) {
+        surface_ext_metal.name = VK_EXT_METAL_SURFACE_EXTENSION_NAME;
+        surface_ext_metal.create_window = AppCreateMetalWindow;
+        surface_ext_metal.create_surface = AppCreateMetalSurface;
+        surface_ext_metal.destroy_window = AppDestroyMetalWindow;
+
+        inst.AddSurfaceExtension(surface_ext_metal);
+    }
+#endif
+//--WAYLAND--
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+    SurfaceExtension surface_ext_wayland;
+    if (inst.CheckExtensionEnabled(VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME)) {
+        surface_ext_wayland.name = VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME;
+        surface_ext_wayland.create_window = AppCreateWaylandWindow;
+        surface_ext_wayland.create_surface = AppCreateWaylandSurface;
+        surface_ext_wayland.destroy_window = AppDestroyWaylandWindow;
+        if (has_wayland_display) {
+            inst.AddSurfaceExtension(surface_ext_wayland);
+        }
+    }
+#endif
+//--ANDROID--
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+    SurfaceExtension surface_ext_android;
+    if (inst.CheckExtensionEnabled(VK_ANDROID_SURFACE_EXTENSION_NAME)) {
+        surface_ext_android.name = VK_ANDROID_SURFACE_EXTENSION_NAME;
+        surface_ext_android.create_window = AppCreateAndroidWindow;
+        surface_ext_android.create_surface = AppCreateAndroidSurface;
+        surface_ext_android.destroy_window = AppDestroyAndroidWindow;
+
+        inst.AddSurfaceExtension(surface_ext_android);
+    }
+#endif
+}
+
+// ---------- Surfaces -------------- //
+
+class AppSurface {
+   public:
+    AppInstance &inst;
+    SurfaceExtension surface_extension;
+
+    std::vector<VkPresentModeKHR> surf_present_modes;
+
+    std::vector<VkSurfaceFormatKHR> surf_formats;
+    std::vector<VkSurfaceFormat2KHR> surf_formats2;
+
+    VkSurfaceCapabilitiesKHR surface_capabilities;
+    VkSurfaceCapabilities2KHR surface_capabilities2_khr;
+    VkSurfaceCapabilities2EXT surface_capabilities2_ext;
+
+    AppSurface(AppInstance &inst, VkPhysicalDevice phys_device, SurfaceExtension surface_extension,
+               std::vector<pNextChainBuildingBlockInfo> &sur_extension_pNextChain)
+        : inst(inst), surface_extension(surface_extension) {
+        uint32_t present_mode_count = 0;
+        VkResult error =
+            inst.vkGetPhysicalDeviceSurfacePresentModesKHR(phys_device, surface_extension.surface, &present_mode_count, nullptr);
+        if (error) ERR_EXIT(error);
+
+        surf_present_modes.resize(present_mode_count);
+        error = inst.vkGetPhysicalDeviceSurfacePresentModesKHR(phys_device, surface_extension.surface, &present_mode_count,
+                                                               surf_present_modes.data());
+        if (error) ERR_EXIT(error);
+
+        const VkPhysicalDeviceSurfaceInfo2KHR surface_info2 = {VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SURFACE_INFO_2_KHR, nullptr,
+                                                               surface_extension.surface};
+
+        uint32_t format_count = 0;
+        if (inst.CheckExtensionEnabled(VK_KHR_GET_SURFACE_CAPABILITIES_2_EXTENSION_NAME)) {
+            VkResult err = inst.vkGetPhysicalDeviceSurfaceFormats2KHR(phys_device, &surface_info2, &format_count, nullptr);
+            if (err) ERR_EXIT(err);
+            surf_formats2.resize(format_count);
+            for (uint32_t i = 0; i < format_count; ++i) {
+                surf_formats2[i].sType = VK_STRUCTURE_TYPE_SURFACE_FORMAT_2_KHR;
+                surf_formats2[i].pNext = nullptr;
+            }
+            err = inst.vkGetPhysicalDeviceSurfaceFormats2KHR(phys_device, &surface_info2, &format_count, surf_formats2.data());
+            if (err) ERR_EXIT(err);
+        } else {
+            VkResult err =
+                inst.vkGetPhysicalDeviceSurfaceFormatsKHR(phys_device, surface_extension.surface, &format_count, nullptr);
+            if (err) ERR_EXIT(err);
+            surf_formats.resize(format_count);
+            err = inst.vkGetPhysicalDeviceSurfaceFormatsKHR(phys_device, surface_extension.surface, &format_count,
+                                                            surf_formats.data());
+            if (err) ERR_EXIT(err);
+        }
+
+        if (inst.CheckExtensionEnabled(VK_KHR_SURFACE_EXTENSION_NAME)) {
+            VkResult err =
+                inst.vkGetPhysicalDeviceSurfaceCapabilitiesKHR(phys_device, surface_extension.surface, &surface_capabilities);
+            if (err) ERR_EXIT(err);
+        }
+
+        if (inst.CheckExtensionEnabled(VK_KHR_GET_SURFACE_CAPABILITIES_2_EXTENSION_NAME)) {
+            surface_capabilities2_khr.sType = VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_KHR;
+            buildpNextChain((VkStructureHeader *)&surface_capabilities2_khr, sur_extension_pNextChain);
+
+            VkPhysicalDeviceSurfaceInfo2KHR surface_info;
+            surface_info.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SURFACE_INFO_2_KHR;
+            surface_info.pNext = nullptr;
+            surface_info.surface = surface_extension.surface;
+
+            VkResult err = inst.vkGetPhysicalDeviceSurfaceCapabilities2KHR(phys_device, &surface_info, &surface_capabilities2_khr);
+            if (err) ERR_EXIT(err);
+        }
+
+        if (inst.CheckExtensionEnabled(VK_EXT_DISPLAY_SURFACE_COUNTER_EXTENSION_NAME)) {
+            surface_capabilities2_ext.sType = VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_EXT;
+            surface_capabilities2_ext.pNext = nullptr;
+            VkResult err =
+                inst.vkGetPhysicalDeviceSurfaceCapabilities2EXT(phys_device, surface_extension.surface, &surface_capabilities2_ext);
+            if (err) ERR_EXIT(err);
+        }
+    }
+
+    ~AppSurface() {
+        if (inst.CheckExtensionEnabled(VK_KHR_GET_SURFACE_CAPABILITIES_2_EXTENSION_NAME)) {
+            freepNextChain(static_cast<VkStructureHeader *>(surface_capabilities2_khr.pNext));
+        }
+    }
+
+    AppSurface(const AppSurface &) = delete;
+    const AppSurface &operator=(const AppSurface &) = delete;
+};
+
+// -------------------- Device Groups ------------------------//
+
+std::vector<VkPhysicalDeviceGroupProperties> GetGroups(AppInstance &inst) {
+    if (inst.CheckExtensionEnabled(VK_KHR_DEVICE_GROUP_CREATION_EXTENSION_NAME)) {
+        PFN_vkEnumeratePhysicalDeviceGroupsKHR vkEnumeratePhysicalDeviceGroupsKHR =
+            reinterpret_cast<PFN_vkEnumeratePhysicalDeviceGroupsKHR>(
+                vkGetInstanceProcAddr(inst.instance, "vkEnumeratePhysicalDeviceGroupsKHR"));
+
+        std::vector<VkPhysicalDeviceGroupProperties> groups;
+        uint32_t group_count;
+        VkResult err;
+        do {
+            err = vkEnumeratePhysicalDeviceGroupsKHR(inst.instance, &group_count, NULL);
+            if (err != VK_SUCCESS && err != VK_INCOMPLETE) ERR_EXIT(err);
+            groups.resize(group_count);
+
+            err = vkEnumeratePhysicalDeviceGroupsKHR(inst.instance, &group_count, groups.data());
+            if (err != VK_SUCCESS && err != VK_INCOMPLETE) ERR_EXIT(err);
+        } while (err == VK_INCOMPLETE);
+        return groups;
+    }
+    return {};
+}
+
+std::vector<VkPhysicalDeviceProperties> GetGroupProps(VkPhysicalDeviceGroupProperties group) {
+    std::vector<VkPhysicalDeviceProperties> props(group.physicalDeviceCount);
+
+    for (uint32_t i = 0; i < group.physicalDeviceCount; ++i) {
+        vkGetPhysicalDeviceProperties(group.physicalDevices[i], &props[i]);
+    }
+
+    return props;
+}
+
+// The bool of the pair returns true if the extension VK_KHR_device_group is present
+std::pair<bool, VkDeviceGroupPresentCapabilitiesKHR> GetGroupCapabilities(AppInstance &inst,
+                                                                          VkPhysicalDeviceGroupProperties group) {
+    // Build create info for logical device made from all physical devices in this group.
+    std::vector<std::string> extensions_list = {VK_KHR_SWAPCHAIN_EXTENSION_NAME, VK_KHR_DEVICE_GROUP_EXTENSION_NAME};
+    VkDeviceGroupDeviceCreateInfoKHR dg_ci = {VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO_KHR, nullptr,
+                                              group.physicalDeviceCount, group.physicalDevices};
+
+    float queue_priority = 1.0f;
+
+    auto ext_list = get_c_str_array(extensions_list);
+
+    VkDeviceQueueCreateInfo q_ci = {VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO, nullptr, 0, 0, 1, &queue_priority};
+    VkDeviceCreateInfo device_ci = {VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO,   &dg_ci,         0, 1, &q_ci, 0, nullptr,
+                                    static_cast<uint32_t>(ext_list.size()), ext_list.data()};
+
+    VkDevice logical_device = VK_NULL_HANDLE;
+
+    VkResult err = vkCreateDevice(group.physicalDevices[0], &device_ci, nullptr, &logical_device);
+    if (err != VK_SUCCESS && err != VK_ERROR_EXTENSION_NOT_PRESENT) ERR_EXIT(err);
+
+    if (err == VK_ERROR_EXTENSION_NOT_PRESENT) {
+        VkDeviceGroupPresentCapabilitiesKHR group_capabilities = {VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_CAPABILITIES_KHR, nullptr};
+        vkDestroyDevice(logical_device, nullptr);
+        return std::pair<bool, VkDeviceGroupPresentCapabilitiesKHR>(false, group_capabilities);
+    }
+
+    VkDeviceGroupPresentCapabilitiesKHR group_capabilities = {VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_CAPABILITIES_KHR, nullptr};
+
+    // If the KHR_device_group extension is present, write the capabilities of the logical device into a struct for later
+    // output to user.
+    PFN_vkGetDeviceGroupPresentCapabilitiesKHR vkGetDeviceGroupPresentCapabilitiesKHR =
+        reinterpret_cast<PFN_vkGetDeviceGroupPresentCapabilitiesKHR>(
+            vkGetInstanceProcAddr(inst.instance, "vkGetDeviceGroupPresentCapabilitiesKHR"));
+    err = vkGetDeviceGroupPresentCapabilitiesKHR(logical_device, &group_capabilities);
+    if (err) ERR_EXIT(err);
+
+    vkDestroyDevice(logical_device, nullptr);
+
+    return std::pair<bool, VkDeviceGroupPresentCapabilitiesKHR>(true, group_capabilities);
+}
+
+// -------------------- Device Setup ------------------- //
+
+struct MemImageSupport {
+    bool regular_supported, sparse_supported, transient_supported;
+    VkFormat format;
+    uint32_t regular_memtypes, sparse_memtypes, transient_memtypes;
+};
+
+struct MemResSupport {
+    std::array<std::array<MemImageSupport, 8>, 2> image;
+    // TODO: buffers
+};
+
+struct pNextChainInfos {
+    std::vector<pNextChainBuildingBlockInfo> phys_device_props2;
+    std::vector<pNextChainBuildingBlockInfo> phys_device_mem_props2;
+    std::vector<pNextChainBuildingBlockInfo> phys_device_features2;
+    std::vector<pNextChainBuildingBlockInfo> surface_capabilities2;
+    std::vector<pNextChainBuildingBlockInfo> format_properties2;
+};
+
+struct FormatRange {
+    // the Vulkan standard version that supports this format range, or 0 if non-standard
+    uint32_t minimum_instance_version;
+
+    // The name of the extension that supports this format range, or NULL if the range
+    // is only part of the standard
+    const char *extension_name;
+
+    // The first and last supported formats within this range.
+    VkFormat first_format;
+    VkFormat last_format;
+};
+
+struct AppGpu {
+    AppInstance &inst;
+    uint32_t id;
+    VkPhysicalDevice phys_device;
+
+    VkPhysicalDeviceProperties props;
+    VkPhysicalDeviceProperties2KHR props2;
+
+    uint32_t queue_count;
+    std::vector<VkQueueFamilyProperties> queue_props;
+    std::vector<VkQueueFamilyProperties2KHR> queue_props2;
+
+    VkPhysicalDeviceMemoryProperties memory_props;
+    VkPhysicalDeviceMemoryProperties2KHR memory_props2;
+
+    MemResSupport mem_type_res_support;
+
+    VkPhysicalDeviceFeatures features;
+    VkPhysicalDeviceFeatures2KHR features2;
+    VkPhysicalDevice limits;
+
+    std::vector<VkExtensionProperties> device_extensions;
+
+    VkDevice dev;
+    VkPhysicalDeviceFeatures enabled_features;
+
+    std::array<VkDeviceSize, VK_MAX_MEMORY_HEAPS> heapBudget;
+    std::array<VkDeviceSize, VK_MAX_MEMORY_HEAPS> heapUsage;
+
+    std::vector<FormatRange> supported_format_ranges;
+
+    AppGpu(AppInstance &inst, uint32_t id, VkPhysicalDevice phys_device, pNextChainInfos chainInfos)
+        : inst(inst), id(id), phys_device(phys_device) {
+        vkGetPhysicalDeviceProperties(phys_device, &props);
+
+        if (inst.CheckExtensionEnabled(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+            props2.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2_KHR;
+            buildpNextChain((VkStructureHeader *)&props2, chainInfos.phys_device_props2);
+
+            inst.vkGetPhysicalDeviceProperties2KHR(phys_device, &props2);
+        }
+
+        /* get queue count */
+        vkGetPhysicalDeviceQueueFamilyProperties(phys_device, &queue_count, nullptr);
+
+        queue_props.resize(queue_count);
+
+        vkGetPhysicalDeviceQueueFamilyProperties(phys_device, &queue_count, queue_props.data());
+
+        if (inst.CheckExtensionEnabled(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+            queue_props2.resize(queue_count);
+
+            for (size_t i = 0; i < queue_count; ++i) {
+                queue_props2[i].sType = VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2_KHR;
+                queue_props2[i].pNext = nullptr;
+            }
+
+            inst.vkGetPhysicalDeviceQueueFamilyProperties2KHR(phys_device, &queue_count, queue_props2.data());
+        }
+
+        vkGetPhysicalDeviceMemoryProperties(phys_device, &memory_props);
+
+        vkGetPhysicalDeviceFeatures(phys_device, &features);
+
+        if (inst.CheckExtensionEnabled(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+            memory_props2.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2_KHR;
+            buildpNextChain((VkStructureHeader *)&memory_props2, chainInfos.phys_device_mem_props2);
+
+            inst.vkGetPhysicalDeviceMemoryProperties2KHR(phys_device, &memory_props2);
+
+            features2.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2_KHR;
+            buildpNextChain((VkStructureHeader *)&features2, chainInfos.phys_device_features2);
+
+            inst.vkGetPhysicalDeviceFeatures2KHR(phys_device, &features2);
+        }
+
+        device_extensions = AppGetPhysicalDeviceLayerExtensions(nullptr);
+
+        const float queue_priority = 1.0f;
+        const VkDeviceQueueCreateInfo q_ci = {VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO,
+                                              nullptr,
+                                              0,
+                                              0,  // just pick the first one and hope for the best
+                                              1,
+                                              &queue_priority};
+        enabled_features = VkPhysicalDeviceFeatures{0};
+        const VkDeviceCreateInfo device_ci = {
+            VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO, nullptr, 0, 1, &q_ci, 0, nullptr, 0, nullptr, &enabled_features};
+
+        VkResult err = vkCreateDevice(phys_device, &device_ci, nullptr, &dev);
+        if (err) ERR_EXIT(err);
+
+        const VkFormat color_format = VK_FORMAT_R8G8B8A8_UNORM;
+        const std::vector<VkFormat> formats = {
+            color_format,      VK_FORMAT_D16_UNORM,         VK_FORMAT_X8_D24_UNORM_PACK32, VK_FORMAT_D32_SFLOAT,
+            VK_FORMAT_S8_UINT, VK_FORMAT_D16_UNORM_S8_UINT, VK_FORMAT_D24_UNORM_S8_UINT,   VK_FORMAT_D32_SFLOAT_S8_UINT};
+        assert(mem_type_res_support.image[0].size() == formats.size());
+        const std::array<VkImageUsageFlags, 2> usages = {0, VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT};
+        const std::array<VkImageCreateFlags, 2> flagss = {0, VK_IMAGE_CREATE_SPARSE_BINDING_BIT};
+
+        for (size_t fmt_i = 0; fmt_i < formats.size(); ++fmt_i) {
+            // only iterate over VK_IMAGE_TILING_OPTIMAL and VK_IMAGE_TILING_LINEAR (0 and 1)
+            for (size_t tiling = VK_IMAGE_TILING_OPTIMAL; tiling <= VK_IMAGE_TILING_LINEAR; ++tiling) {
+                mem_type_res_support.image[tiling][fmt_i].format = formats[fmt_i];
+                mem_type_res_support.image[tiling][fmt_i].regular_supported = true;
+                mem_type_res_support.image[tiling][fmt_i].sparse_supported = true;
+                mem_type_res_support.image[tiling][fmt_i].transient_supported = true;
+
+                VkFormatProperties fmt_props;
+                vkGetPhysicalDeviceFormatProperties(phys_device, formats[fmt_i], &fmt_props);
+                if ((tiling == VK_IMAGE_TILING_OPTIMAL && fmt_props.optimalTilingFeatures == 0) ||
+                    (tiling == VK_IMAGE_TILING_LINEAR && fmt_props.linearTilingFeatures == 0)) {
+                    mem_type_res_support.image[tiling][fmt_i].regular_supported = false;
+                    mem_type_res_support.image[tiling][fmt_i].sparse_supported = false;
+                    mem_type_res_support.image[tiling][fmt_i].transient_supported = false;
+                    continue;
+                }
+
+                for (size_t u_i = 0; u_i < usages.size(); ++u_i) {
+                    for (size_t flg_i = 0; flg_i < flagss.size(); ++flg_i) {
+                        VkImageCreateInfo image_ci = {VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
+                                                      nullptr,
+                                                      flagss[flg_i],
+                                                      VK_IMAGE_TYPE_2D,
+                                                      formats[fmt_i],
+                                                      {8, 8, 1},
+                                                      1,
+                                                      1,
+                                                      VK_SAMPLE_COUNT_1_BIT,
+                                                      static_cast<VkImageTiling>(tiling),
+                                                      usages[u_i],
+                                                      VK_SHARING_MODE_EXCLUSIVE,
+                                                      0,
+                                                      nullptr,
+                                                      VK_IMAGE_LAYOUT_UNDEFINED};
+
+                        if ((image_ci.flags & VK_IMAGE_CREATE_SPARSE_BINDING_BIT) &&
+                            (image_ci.usage & VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT)) {
+                            continue;
+                        }
+
+                        if (image_ci.usage == 0 || (image_ci.usage & VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT)) {
+                            if (image_ci.format == color_format)
+                                image_ci.usage |= VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
+                            else
+                                image_ci.usage |= VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT;
+                        }
+
+                        if (!enabled_features.sparseBinding && (image_ci.flags & VK_IMAGE_CREATE_SPARSE_BINDING_BIT)) {
+                            mem_type_res_support.image[tiling][fmt_i].sparse_supported = false;
+                            continue;
+                        }
+
+                        VkImageFormatProperties img_props;
+                        err = vkGetPhysicalDeviceImageFormatProperties(phys_device, image_ci.format, image_ci.imageType,
+                                                                       image_ci.tiling, image_ci.usage, image_ci.flags, &img_props);
+
+                        uint32_t *memtypes;
+                        bool *support;
+
+                        if (image_ci.flags == 0 && !(image_ci.usage & VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT)) {
+                            memtypes = &mem_type_res_support.image[tiling][fmt_i].regular_memtypes;
+                            support = &mem_type_res_support.image[tiling][fmt_i].regular_supported;
+                        } else if ((image_ci.flags & VK_IMAGE_CREATE_SPARSE_BINDING_BIT) &&
+                                   !(image_ci.usage & VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT)) {
+                            memtypes = &mem_type_res_support.image[tiling][fmt_i].sparse_memtypes;
+                            support = &mem_type_res_support.image[tiling][fmt_i].sparse_supported;
+                        } else if (image_ci.flags == 0 && (image_ci.usage & VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT)) {
+                            memtypes = &mem_type_res_support.image[tiling][fmt_i].transient_memtypes;
+                            support = &mem_type_res_support.image[tiling][fmt_i].transient_supported;
+                        } else {
+                            assert(false);
+                            return;
+                        }
+
+                        if (err == VK_ERROR_FORMAT_NOT_SUPPORTED) {
+                            *support = false;
+                        } else {
+                            if (err) ERR_EXIT(err);
+
+                            VkImage dummy_img;
+                            err = vkCreateImage(dev, &image_ci, nullptr, &dummy_img);
+                            if (err) ERR_EXIT(err);
+
+                            VkMemoryRequirements mem_req;
+                            vkGetImageMemoryRequirements(dev, dummy_img, &mem_req);
+                            *memtypes = mem_req.memoryTypeBits;
+
+                            vkDestroyImage(dev, dummy_img, nullptr);
+                        }
+                    }
+                }
+            }
+        }
+
+        // Memory //
+
+        struct VkStructureHeader *structure = NULL;
+        if (inst.CheckExtensionEnabled(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+            structure = (struct VkStructureHeader *)memory_props2.pNext;
+
+            while (structure) {
+                if (structure->sType == VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT &&
+                    CheckPhysicalDeviceExtensionIncluded(VK_EXT_MEMORY_BUDGET_EXTENSION_NAME)) {
+                    VkPhysicalDeviceMemoryBudgetPropertiesEXT *mem_budget_props =
+                        (VkPhysicalDeviceMemoryBudgetPropertiesEXT *)structure;
+                    for (uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; i++) {
+                        heapBudget[i] = mem_budget_props->heapBudget[i];
+                        heapUsage[i] = mem_budget_props->heapUsage[i];
+                    }
+                }
+
+                structure = (struct VkStructureHeader *)structure->pNext;
+            }
+        }
+        // TODO buffer - memory type compatibility
+
+        supported_format_ranges = {
+            {
+                // Standard formats in Vulkan 1.0
+                VK_MAKE_VERSION(1, 0, 0),
+                NULL,
+                VK_FORMAT_BEGIN_RANGE,
+                VK_FORMAT_END_RANGE,
+            },
+            {
+                // YCBCR extension, standard in Vulkan 1.1
+                VK_MAKE_VERSION(1, 1, 0),
+                VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME,
+                VK_FORMAT_G8B8G8R8_422_UNORM,
+                VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM,
+            },
+            {
+                // PVRTC extension, not standardized
+                0,
+                VK_IMG_FORMAT_PVRTC_EXTENSION_NAME,
+                VK_FORMAT_PVRTC1_2BPP_UNORM_BLOCK_IMG,
+                VK_FORMAT_PVRTC2_4BPP_SRGB_BLOCK_IMG,
+            },
+        };
+    }
+    ~AppGpu() {
+        vkDestroyDevice(dev, nullptr);
+
+        if (inst.CheckExtensionEnabled(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+            freepNextChain(static_cast<VkStructureHeader *>(features2.pNext));
+            freepNextChain(static_cast<VkStructureHeader *>(props2.pNext));
+            freepNextChain(static_cast<VkStructureHeader *>(memory_props2.pNext));
+        }
+    }
+
+    AppGpu(const AppGpu &) = delete;
+    const AppGpu &operator=(const AppGpu &) = delete;
+
+    bool CheckPhysicalDeviceExtensionIncluded(std::string extension_to_check) {
+        for (auto &extension : device_extensions) {
+            if (extension_to_check == std::string(extension.extensionName)) {
+                return true;
+            }
+        }
+        return false;
+    }
+
+    std::vector<VkExtensionProperties> AppGetPhysicalDeviceLayerExtensions(char *layer_name) {
+        std::vector<VkExtensionProperties> extension_properties;
+
+        VkResult err;
+        uint32_t ext_count = 0;
+
+        /* repeat get until VK_INCOMPLETE goes away */
+        do {
+            err = vkEnumerateDeviceExtensionProperties(phys_device, layer_name, &ext_count, nullptr);
+            if (err) ERR_EXIT(err);
+
+            extension_properties.resize(ext_count);
+            err = vkEnumerateDeviceExtensionProperties(phys_device, layer_name, &ext_count, extension_properties.data());
+            if (err) ERR_EXIT(err);
+            extension_properties.resize(ext_count);
+        } while (err == VK_INCOMPLETE);
+
+        return extension_properties;
+    }
+
+    // Helper function to determine whether a format range is currently supported.
+    bool FormatRangeSupported(FormatRange &format_range) {
+        // True if standard and supported by both this instance and this GPU
+        if (format_range.minimum_instance_version > 0 && inst.instance_version >= format_range.minimum_instance_version &&
+            props.apiVersion >= format_range.minimum_instance_version) {
+            return true;
+        }
+
+        // True if this extension is present
+        if (format_range.extension_name != nullptr) {
+            return inst.CheckExtensionEnabled(format_range.extension_name);
+        }
+
+        // Otherwise, not supported.
+        return false;
+    }
+
+    VkPhysicalDeviceProperties GetDeviceProperties() {
+        if (inst.CheckExtensionEnabled(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+            return props2.properties;
+        } else {
+            return props;
+        }
+    }
+};
+struct AppQueueFamilyProperties {
+    VkQueueFamilyProperties props;
+    uint32_t queue_index;
+    bool is_present_platform_agnostic = true;
+    VkBool32 platforms_support_present = VK_FALSE;
+
+    AppQueueFamilyProperties(AppGpu &gpu, uint32_t queue_index) : queue_index(queue_index) {
+        if (gpu.inst.CheckExtensionEnabled(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+            props = gpu.queue_props2[queue_index].queueFamilyProperties;
+        } else {
+            props = gpu.queue_props[queue_index];
+        }
+
+        for (auto &surface_ext : gpu.inst.surface_extensions) {
+            VkResult err = vkGetPhysicalDeviceSurfaceSupportKHR(gpu.phys_device, queue_index, surface_ext.surface,
+                                                                &surface_ext.supports_present);
+            if (err) ERR_EXIT(err);
+
+            const bool first = (surface_ext == gpu.inst.surface_extensions.at(0));
+            if (!first && platforms_support_present != surface_ext.supports_present) {
+                is_present_platform_agnostic = false;
+
+                platforms_support_present = surface_ext.supports_present;
+            }
+        }
+    }
+};
+
+// --------- Format Properties ----------//
+
+struct PropFlags {
+    uint32_t linear;
+    uint32_t optimal;
+    uint32_t buffer;
+
+    bool operator==(const PropFlags &other) const {
+        return (linear == other.linear && optimal == other.optimal && buffer == other.buffer);
+    }
+};
+
+namespace std {
+template <>
+struct hash<PropFlags> {
+    std::size_t operator()(const PropFlags &k) const {
+        return ((std::hash<uint32_t>()(k.linear) ^ (std::hash<uint32_t>()(k.optimal) << 1)) >> 1) ^
+               (std::hash<uint32_t>()(k.buffer) << 1);
+    }
+};
+}  // namespace std
+
+// Used to sort the formats into buckets by their properties.
+std::unordered_map<PropFlags, std::vector<VkFormat>> FormatPropMap(AppGpu &gpu) {
+    std::unordered_map<PropFlags, std::vector<VkFormat>> map;
+    for (auto fmtRange : gpu.supported_format_ranges) {
+        for (int32_t fmt = fmtRange.first_format; fmt <= fmtRange.last_format; ++fmt) {
+            VkFormatProperties props;
+            vkGetPhysicalDeviceFormatProperties(gpu.phys_device, static_cast<VkFormat>(fmt), &props);
+
+            PropFlags pf = {props.linearTilingFeatures, props.optimalTilingFeatures, props.bufferFeatures};
+
+            map[pf].push_back(static_cast<VkFormat>(fmt));
+        }
+    }
+    return map;
+}
+
+VkFormatProperties2 GetFormatProperties2(AppGpu &gpu, VkFormat format, pNextChainInfos &chainInfos) {
+    VkFormatProperties2 props;
+    props.sType = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2;
+    buildpNextChain((VkStructureHeader *)&props, chainInfos.format_properties2);
+
+    gpu.inst.vkGetPhysicalDeviceFormatProperties2KHR(gpu.phys_device, format, &props);
+    return props;
+}
diff --git a/src/third_party/vulkan-tools/src/vulkaninfo/vulkaninfo.md b/src/third_party/vulkan-tools/src/vulkaninfo/vulkaninfo.md
new file mode 100644
index 0000000..4825596
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/vulkaninfo/vulkaninfo.md
@@ -0,0 +1,74 @@
+<!-- markdownlint-disable MD041 -->
+<!-- Copyright 2015-2019 LunarG, Inc. -->
+
+[![Khronos Vulkan][1]][2]
+
+[1]: https://vulkan.lunarg.com/img/Vulkan_100px_Dec16.png "https://www.khronos.org/vulkan/"
+[2]: https://www.khronos.org/vulkan/
+
+[![Creative Commons][3]][4]
+
+[3]: https://i.creativecommons.org/l/by-nd/4.0/88x31.png "Creative Commons License"
+[4]: https://creativecommons.org/licenses/by-nd/4.0/
+
+# Vulkan Information
+
+Vulkan Info is a program provided in the SDK which outputs various types of Vulkan information such as:
+- device properties of identified GPUs
+- Vulkan extensions supported by each GPU
+- recognized layers
+- supported image formats and format properties.
+
+## Running Vulkan Info
+
+After downloading and installing the SDK and setting up the runtime environment (see the [Getting Started](./getting_started.md#user-content-download-the-sdk) documentation) you will be able to run the Vulkan Info program from a command prompt.
+
+```
+vulkaninfo
+```
+
+Executing `vulkaninfo` without specifying the type of output will default to human-readable output to the console.
+
+```
+vulkaninfo --html
+```
+
+To organize output in a convenient HTML format use the `--html` option. Executing `vulkaninfo` with the `--html` option produces a file called `vulkaninfo.html` and can be found in your build directory.
+
+```
+vulkaninfo --json
+```
+
+ Use the `--json` option to produce [DevSim-schema](https://schema.khronos.org/vulkan/devsim_1_0_0.json)-compatible JSON output for your device. Additionally, JSON output can be specified with the `-j` option and for multi-GPU systems, a single GPU can be targeted using the `--json=`*`GPU-number`* option where the *`GPU-number`* indicates the GPU of interest (e.g., `--json=0`). To determine the GPU number corresponding to a particular GPU, execute `vulkaninfo` with the `--html` option (or none at all) first; doing so will summarize all GPUs in the system.
+ The generated configuration information can be used as input for the [`VK_LAYER_LUNARG_device_simulation`](./device_simulation_layer.html) layer.
+
+
+ Use the `--help` or `-h` option to produce a list of all available Vulkan Info options.
+```
+vulkaninfo - Summarize Vulkan information in relation to the current environment.
+
+USAGE: ./vulkaninfo [options]
+
+OPTIONS:
+-h, --help            Print this help.
+--html                Produce an html version of vulkaninfo output, saved as
+                      "vulkaninfo.html" in the directory in which the command is
+                      run.
+-j, --json            Produce a json version of vulkaninfo output to standard
+                      output.
+--json=<gpu-number>   For a multi-gpu system, a single gpu can be targetted by
+                      specifying the gpu-number associated with the gpu of
+                      interest. This number can be determined by running
+                      vulkaninfo without any options specified.
+--show-formats        Display the format properties of each physical device.
+                      Note: This option does not affect html or json output;
+                      they will always print format properties.
+
+```
+
+### Windows
+
+Vulkan Info can also be found as a shortcut under the Start Menu.
+* `Start Menu -> Vulkan SDK`*`version`*`-> vulkaninfo`
+
+Note: In order to review and/or save the output produced when using Visual Studio execute `vulkaninfo` with the JSON option, you will have to redirect output to a file by modifying the command line arguments in the debug options.
diff --git a/src/third_party/vulkan-tools/src/vulkaninfo/vulkaninfo.rc b/src/third_party/vulkan-tools/src/vulkaninfo/vulkaninfo.rc
new file mode 100644
index 0000000..e8c824e
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/vulkaninfo/vulkaninfo.rc
@@ -0,0 +1,98 @@
+//
+// Copyright (c) 2018-2019 The Khronos Group Inc.
+// Copyright (c) 2018-2019 Valve Corporation
+// Copyright (c) 2018-2019 LunarG, Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+// Author: Lenny Komow <david@lunarg.com>
+//
+
+///////////////////////////////////////////////////////////////////////////////
+///////////////////////////////////////////////////////////////////////////////
+// Start customize section
+// Edit this section for your build
+///////////////////////////////////////////////////////////////////////////////
+///////////////////////////////////////////////////////////////////////////////
+
+#define VERSION_MAJOR               1
+#define VERSION_MINOR               0
+#define VERSION_PATCH               1111
+#define VERSION_BUILDNO             2222
+
+#define VERSION_BUILD_DESCRIPTION   "Dev Build"
+
+// All builds except release builds should set this to 0.
+// Release builds should set this to 1.
+#define VERSION_IS_RELEASEBUILD        0
+
+
+///////////////////////////////////////////////////////////////////////////////
+///////////////////////////////////////////////////////////////////////////////
+// End of customize section
+///////////////////////////////////////////////////////////////////////////////
+///////////////////////////////////////////////////////////////////////////////
+
+#include "winres.h"
+
+#define VER_FILE_VERSION            VERSION_MAJOR, VERSION_MINOR, VERSION_PATCH, VERSION_BUILDNO
+
+#define STRINGIZE2(s) #s
+#define STRINGIZE(s) STRINGIZE2(s)
+
+#if VERSION_IS_RELEASEBUILD==1
+   #define VER_FILE_DESCRIPTION_STR    "Vulkan Info"
+   #define VER_FILE_VERSION_STR        STRINGIZE(VERSION_MAJOR)        \
+                                       "." STRINGIZE(VERSION_MINOR)    \
+                                       "." STRINGIZE(VERSION_PATCH)    \
+                                       "." STRINGIZE(VERSION_BUILDNO)
+#else
+   #define VER_FILE_DESCRIPTION_STR    "Vulkan Info - " VERSION_BUILD_DESCRIPTION
+   #define VER_FILE_VERSION_STR        STRINGIZE(VERSION_MAJOR)        \
+                                    "." STRINGIZE(VERSION_MINOR)    \
+                                    "." STRINGIZE(VERSION_PATCH)    \
+                                    "." STRINGIZE(VERSION_BUILDNO) \
+                                    "." VERSION_BUILD_DESCRIPTION
+#endif
+
+
+VS_VERSION_INFO VERSIONINFO
+ FILEVERSION VER_FILE_VERSION
+ PRODUCTVERSION VER_FILE_VERSION
+ FILEFLAGSMASK 0x3fL
+#ifdef _DEBUG
+ FILEFLAGS VS_FF_DEBUG
+#else
+ FILEFLAGS 0x0L
+#endif
+
+ FILEOS 0x00000L
+ FILETYPE VFT_DLL
+ FILESUBTYPE 0x0L
+BEGIN
+    BLOCK "StringFileInfo"
+    BEGIN
+        BLOCK "04090000"
+        BEGIN
+            VALUE "FileDescription", VER_FILE_DESCRIPTION_STR
+            VALUE "FileVersion", VER_FILE_VERSION_STR
+            VALUE "LegalCopyright", "Copyright (C) 2015-2019"
+            VALUE "ProductName", "Vulkan Runtime"
+            VALUE "ProductVersion", VER_FILE_VERSION_STR
+        END
+    END
+    BLOCK "VarFileInfo"
+    BEGIN
+        VALUE "Translation", 0x409, 0000
+    END
+END
diff --git a/src/third_party/vulkan-tools/src/vulkaninfo/vulkaninfo.vcxproj.user b/src/third_party/vulkan-tools/src/vulkaninfo/vulkaninfo.vcxproj.user
new file mode 100755
index 0000000..591cdd9
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/vulkaninfo/vulkaninfo.vcxproj.user
@@ -0,0 +1,11 @@
+<?xml version="1.0" encoding="utf-8"?>
+<Project ToolsVersion="12.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
+  <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
+    <LocalDebuggerEnvironment>VK_LAYER_PATH=..\layers\Debug</LocalDebuggerEnvironment>
+    <DebuggerFlavor>WindowsLocalDebugger</DebuggerFlavor>
+  </PropertyGroup>
+  <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
+    <LocalDebuggerEnvironment>VK_LAYER_PATH=..\layers\Release</LocalDebuggerEnvironment>
+    <DebuggerFlavor>WindowsLocalDebugger</DebuggerFlavor>
+  </PropertyGroup>
+</Project>
diff --git a/src/third_party/vulkan-tools/src/windows-runtime-installer/InstallerRT.nsi b/src/third_party/vulkan-tools/src/windows-runtime-installer/InstallerRT.nsi
new file mode 100644
index 0000000..63e555c
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/windows-runtime-installer/InstallerRT.nsi
@@ -0,0 +1,165 @@
+# The Vulkan runtime installer NSIS script
+
+!include LogicLib.nsh
+
+# Input file locations
+!define RES "."
+
+# Input parameters
+!ifndef MAJOR
+    !define MAJOR "1"
+    !define MINOR "1"
+    !define PATCH "73"
+    !define BUILD "0.dev"
+!endif
+!define VERSION "${MAJOR}.${MINOR}.${PATCH}.${BUILD}"
+!ifndef PUBLISHER
+    !define PUBLISHER "YourCompany, Inc."
+!endif
+!ifndef COPYRIGHT
+    !define COPYRIGHT ""
+!endif
+
+# Installer information
+Icon ${RES}\V.ico
+OutFile "VulkanRT-${VERSION}-Installer.exe"
+InstallDir "$PROGRAMFILES\VulkanRT"
+
+RequestExecutionLevel admin
+AddBrandingImage left 150
+Caption "Vulkan Runtime ${VERSION} Setup"
+Name "Vulkan Runtime ${VERSION}"
+LicenseData "${RES}\VulkanRT-License.txt"
+Page custom brandimage "" ": Brand Image"
+Page license
+Page instfiles
+
+VIProductVersion "${VERSION}"
+VIAddVersionKey  "ProductName" "Vulkan Runtime"
+VIAddVersionKey  "FileVersion" "${VERSION}"
+VIAddVersionKey  "ProductVersion" "${VERSION}"
+VIAddVersionKey  "LegalCopyright" "${COPYRIGHT}"
+VIAddVersionKey  "FileDescription" "Vulkan Runtime Installer"
+
+Function brandimage
+  SetOutPath "$TEMP"
+  SetFileAttributes V.bmp temporary
+  File "${RES}\V.bmp"
+  SetBrandingImage "$TEMP/V.bmp"
+Functionend
+
+# Utilties to check if a file is older than this installer or not
+Function NeedsReplacing
+    Pop $0
+
+    # Extract the version of the existing file
+    GetDllVersion "$0" $R0 $R1
+    IntOp $R2 $R0 >> 16
+    IntOp $R2 $R2 & 0xffff
+    IntOp $R3 $R0 & 0xffff
+    IntOp $R4 $R1 >> 16
+    IntOp $R4 $R4 & 0xffff
+    IntOp $R5 $R1 & 0xffff
+
+    # Check major versions
+    ${IF} ${MAJOR} > $R2
+        Push True
+    ${ELSEIF} ${MAJOR} < $R2
+        Push False
+
+    # Check minor versions
+    ${ELSEIF} ${MINOR} > $R3
+        Push True
+    ${ELSEIF} ${MINOR} < $R3
+        Push False
+
+    # Check patch versions
+    ${ELSEIF} ${PATCH} > $R4
+        Push True
+    ${ELSEIF} ${PATCH} < $R4
+        Push False
+
+    # Check build versions
+    ${ELSEIF} ${BUILD} > $R5
+        Push True
+    ${ELSEIF} ${BUILD} < $R5
+        Push False
+
+    # If they match exactly, then we update
+    ${ELSE}
+        Push True
+    ${ENDIF}
+FunctionEnd
+
+!macro InstallIfNewer SrcPath OutName
+    Push "$OUTDIR\${OutName}"
+    Call NeedsReplacing
+    Pop $0
+
+    ${IF} $0 == True
+        DetailPrint "File $OUTDIR\${OutName} (version $R2.$R3.$R4.$R5) will be upgraded to ${VERSION}"
+        File /oname=${OutName} "${SrcPath}"
+    ${ELSE}
+        DetailPrint "File $OUTDIR\${OutName} (version $R2.$R3.$R4.$R5) will not be replaced with ${VERSION}"
+    ${ENDIF}
+!macroend
+
+# Utilities to check if this is a 64-bit OS or not
+!define IsWow64 `"" IsWow64 ""`
+!macro _IsWow64 _a _b _t _f
+  !insertmacro _LOGICLIB_TEMP
+  System::Call kernel32::GetCurrentProcess()p.s
+  System::Call kernel32::IsWow64Process(ps,*i0s)
+  Pop $_LOGICLIB_TEMP
+  !insertmacro _!= $_LOGICLIB_TEMP 0 `${_t}` `${_f}`
+!macroend
+
+!define RunningX64 `"" RunningX64 ""`
+!macro _RunningX64 _a _b _t _f
+  !if ${NSIS_PTR_SIZE} > 4
+    !insertmacro LogicLib_JumpToBranch `${_t}` `${_f}`
+  !else
+    !insertmacro _IsWow64 `${_a}` `${_b}` `${_t}` `${_f}`
+  !endif
+!macroend
+
+# Installer
+Section
+    Delete "$INSTDIR\install.log"
+    Delete "$INSTDIR\VULKANRT_LICENSE.rtf"
+    LogSet on
+
+    # Disable filesystem redirection
+    System::Call kernel32::Wow64EnableWow64FsRedirection(i0)
+
+    ${IF} ${RunningX64}
+        SetOutPath $WINDIR\System32
+        !insertmacro InstallIfNewer "${LOADER64}" "vulkan-1.dll"
+        !insertmacro InstallIfNewer "${LOADER64}" "vulkan-1-999-0-0-0.dll"
+        !insertmacro InstallIfNewer "${VULKANINFO64}" "vulkaninfo.exe"
+        !insertmacro InstallIfNewer "${VULKANINFO64}" "vulkaninfo-1-999-0-0-0.exe"
+        SetOutPath $WINDIR\SysWOW64
+    ${ELSE}
+        SetOutPath $WINDIR\System32
+    ${ENDIF}
+
+    # Install 32-bit contents
+    !insertmacro InstallIfNewer "${LOADER32}" "vulkan-1.dll"
+    !insertmacro InstallIfNewer "${LOADER32}" "vulkan-1-999-0-0-0.dll"
+    !insertmacro InstallIfNewer "${VULKANINFO32}" "vulkaninfo.exe"
+    !insertmacro InstallIfNewer "${VULKANINFO32}" "vulkaninfo-1-999-0-0-0.exe"
+
+    # Dump licenses into a the installation directory
+    SetOutPath "$INSTDIR"
+    AccessControl::DisableFileInheritance $INSTDIR
+    AccessControl::SetFileOwner $INSTDIR "Administrators"
+    AccessControl::ClearOnFile  $INSTDIR "Administrators" "FullAccess"
+    AccessControl::SetOnFile    $INSTDIR "SYSTEM" "FullAccess"
+    AccessControl::GrantOnFile  $INSTDIR "Everyone" "ListDirectory"
+    AccessControl::GrantOnFile  $INSTDIR "Everyone" "GenericExecute"
+    AccessControl::GrantOnFile  $INSTDIR "Everyone" "GenericRead"
+    AccessControl::GrantOnFile  $INSTDIR "Everyone" "ReadAttributes"
+    File "${RES}\VulkanRT-License.txt"
+
+    LogSet off
+SectionEnd
diff --git a/src/third_party/vulkan-tools/src/windows-runtime-installer/NSIS_Security.patch b/src/third_party/vulkan-tools/src/windows-runtime-installer/NSIS_Security.patch
new file mode 100644
index 0000000..b275aae
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/windows-runtime-installer/NSIS_Security.patch
@@ -0,0 +1,58 @@
+diff --git a/nsis-3.0b3-src/SCons/Config/ms b/nsis-3.0b3-src/SCons/Config/ms
+index 32d3d33..d2c4d25 100644
+--- a/nsis-3.0b3-src/SCons/Config/ms
++++ b/nsis-3.0b3-src/SCons/Config/ms
+@@ -16,7 +16,7 @@ def entry(x,u):
+ 
+ defenv['ENTRY_FLAG'] = entry
+ defenv['MAP_FLAG'] = '/map'
+-defenv['NODEFLIBS_FLAG'] = '/NODEFAULTLIB'
++#defenv['NODEFLIBS_FLAG'] = '/NODEFAULTLIB'
+ defenv['C_FLAG'] = '/TC'
+ defenv['CPP_FLAG'] = '/TP'
+ defenv['CPP_REQUIRES_STDLIB'] = 0
+@@ -29,7 +29,7 @@ defenv['STDCALL'] = '__stdcall'
+ msvs_version = float(defenv['MSVS_VERSION'].replace('Exp',''))
+ if msvs_version >= 8.0:
+ 	defenv['EXCEPTION_FLAG'] = '/EHsc'
+-	defenv.Append(CCFLAGS = ['/GS-'])
++	defenv.Append(CCFLAGS = ['/GS'])
+ 	defenv.Append(CPPDEFINES = ['_CRT_SECURE_NO_WARNINGS', '_CRT_NONSTDC_NO_WARNINGS', '_CRT_SECURE_NO_DEPRECATE', '_CRT_NON_CONFORMING_SWPRINTFS'])
+ 	defenv['MSVCRT_FLAG'] = '/MT' # Avoid msvcr?0.dll dependency
+ else:
+@@ -43,6 +43,10 @@ if msvs_version >= 11.0:
+ 	defenv['SUBSYS_CON'] = '/subsystem:console,5.01' # support windows xp
+ 	defenv['SUBSYS_WIN'] = '/subsystem:windows,5.01' # support windows xp
+ 
++if msvs_version >= 14.0:
++	defenv.Append(CCFLAGS = ['/guard:cf'])
++	defenv.Append(LINKFLAGS = ['/guard:cf'])
++
+ ### defines
+ 
+ defenv.Append(CPPDEFINES = [('NSISCALL', '$STDCALL')])
+@@ -143,9 +147,9 @@ stub_env.Append(CCFLAGS = ['/Fa${TARGET}.lst'])    # listing file name
+ stub_env.Append(LINKFLAGS = ['$NODEFLIBS_FLAG'])   # no default libraries
+ stub_env.Append(LINKFLAGS = ['$MAP_FLAG'])         # generate map file
+ 
+-if msvs_version >= 10.0:
+-	# no relocations that our resource editor ignores
+-	stub_env.Append(LINKFLAGS = ['/FIXED'])
++#if msvs_version >= 10.0:
++#	# no relocations that our resource editor ignores
++#	stub_env.Append(LINKFLAGS = ['/FIXED'])
+ 
+ stub_uenv = stub_env.Clone()
+ stub_uenv.Append(CPPDEFINES = ['_UNICODE', 'UNICODE'])
+diff --git a/nsis-3.0b3-src/Source/exehead/Main.c b/nsis-3.0b3-src/Source/exehead/Main.c
+index eb231e7..e4642ce 100644
+--- a/nsis-3.0b3-src/Source/exehead/Main.c
++++ b/nsis-3.0b3-src/Source/exehead/Main.c
+@@ -84,6 +84,7 @@ void *g_SHGetFolderPath;
+ NSIS_ENTRYPOINT_GUINOCRT
+ EXTERN_C void NSISWinMainNOCRT()
+ {
++  __security_init_cookie();
+   int ret = 0;
+   const TCHAR *m_Err = _LANG_ERRORWRITINGTEMP;
+ 
diff --git a/src/third_party/vulkan-tools/src/windows-runtime-installer/README.md b/src/third_party/vulkan-tools/src/windows-runtime-installer/README.md
new file mode 100644
index 0000000..d0dd595
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/windows-runtime-installer/README.md
@@ -0,0 +1,38 @@
+
+## Windows Runtime Installer
+
+This directory contains the files required for building the Windows Vulkan Runtime Installer package.
+The runtime installer is a method of delivering a Vulkan loader to system.
+The runtime installer is used by the SDK installer.
+It is also used by some drivers to ensure that an adequate Vulkan loader is installed on a system.
+Additionally, applications may install a runtime to ensure that a minimum loader version is present.
+
+To build a runtime installer:
+1. Get a copy of the Nullsoft Install System (NSIS) version 3.0b3.
+   Other versions may work, but the patch included in this directory is built against version 3.0b3.
+   Apply the `NSIS_Security.patch` file provided in this directory to the NSIS source code.
+   This security patch adds the /DYNAMICBASE /GS, and /guard:cf options to the build.
+   In addition, it will be necessary to specify NSIS_CONFIG_LOG=yes and NSIS_MAX_STRLEN=8192 when compiling.
+   Once you have applied the patch, compile NSIS with the command:
+    ```
+    scons SKIPUTILS="NSIS Menu","MakeLangId" UNICODE=yes \
+        ZLIB_W32=<path_to_zlib>\zlib-1.2.7-win32-x86 NSIS_MAX_STRLEN=8192 \
+        NSIS_CONFIG_LOG=yes NSIS_CONFIG_LOG_TIMESTAMP=yes \
+        APPEND_CCFLAGS="/DYNAMICBASE /Zi" APPEND_LINKFLAGS="/DYNAMICBASE \
+        /DEBUG /OPT:REF /OPT:ICF" SKIPDOC=all dist-zip
+    ```
+
+2. The compilation should have created a zip file containing the new NSIS build.
+   Unzip this file to the location of your choosing.
+   Download the NSIS Access Control plugin in copy it into the plugin directory in the build you just unzipped.
+   It may be useful to prepend this NSIS binary directory to your system's path, so that this NSIS will be run when you type "makensis".
+   Otherwise, you will just have to specify the full path to makensis.exe in the following steps.
+   It may be useful to verify that all shared libraries in the build have the DYNAMIC_BASE and NX_COMPAT flags set.
+
+3. Build the Vulkan-Loader repository and this one.
+
+4. Build the runtime installer from this directory with the command:
+   ```
+   makensis InstallerRT.nsi -DLOADER64="?" -DLOADER32="?" -DVULKANINFO64="?" -DVULKANINFO32="?"
+   ```
+   where the question marks are replaced with the 64 and 32 bit versions of the loader and vulkaninfo builds.
diff --git a/src/third_party/vulkan-tools/src/windows-runtime-installer/V.bmp b/src/third_party/vulkan-tools/src/windows-runtime-installer/V.bmp
new file mode 100644
index 0000000..dea3353
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/windows-runtime-installer/V.bmp
Binary files differ
diff --git a/src/third_party/vulkan-tools/src/windows-runtime-installer/V.ico b/src/third_party/vulkan-tools/src/windows-runtime-installer/V.ico
new file mode 100644
index 0000000..6116698
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/windows-runtime-installer/V.ico
Binary files differ
diff --git a/src/third_party/vulkan-tools/src/windows-runtime-installer/VulkanRT-License.txt b/src/third_party/vulkan-tools/src/windows-runtime-installer/VulkanRT-License.txt
new file mode 100644
index 0000000..6d40d84
--- /dev/null
+++ b/src/third_party/vulkan-tools/src/windows-runtime-installer/VulkanRT-License.txt
@@ -0,0 +1,71 @@
+Copyright (c) 2015-2019 The Khronos Group Inc.
+Copyright (c) 2015-2019 LunarG, Inc.
+Copyright (c) 2015-2019 Valve Corporation
+
+Licensed under the Apache License, Version 2.0 (the "License"); you may not
+use this file except in compliance with the License. You may obtain a copy
+of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on as "AS IS" BASIS, WITHOUT
+WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See
+the License for the specific language governing permissions and limitations
+under the License.
+
+Unless otherwise noted in the LICENSE.txt file in the install folder, all
+components of the Vulkan Runtime are licensed under the above license.
+Licenses for any components not so licensed are listed in the LICENSE.txt
+file.
+
+=========================================================================================================================================
+
+Copyright (c) 2009 Dave Gamble
+Copyright (c) 2015-2016 The Khronos Group Inc.
+Copyright (c) 2015-2016 Valve Corporation
+Copyright (c) 2015-2016 LunarG, Inc.
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
+
+=========================================================================================================================================
+
+Copyright (c) 2014 joseph werle <joseph.werle@gmail.com>
+Copyright (c) 2015-2016 The Khronos Group Inc.
+Copyright (c) 2015-2016 Valve Corporation
+Copyright (c) 2015-2016 LunarG, Inc.
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and/or associated documentation files (the "Materials"), to
+deal in the Materials without restriction, including without limitation the
+rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+sell copies of the Materials, and to permit persons to whom the Materials are
+furnished to do so, subject to the following conditions:
+
+The above copyright notice(s) and this permission notice shall be included in
+all copies or substantial portions of the Materials.
+
+THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+
+IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE MATERIALS OR THE
+USE OR OTHER DEALINGS IN THE MATERIALS.
diff --git a/src/third_party/vulkan-validation-layers/src/.appveyor.yml b/src/third_party/vulkan-validation-layers/src/.appveyor.yml
new file mode 100644
index 0000000..1d7d5ee
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/.appveyor.yml
@@ -0,0 +1,82 @@
+# Windows Build Configuration for AppVeyor
+# http://www.appveyor.com/docs/appveyor-yml
+#
+
+# This version starts a separte job for each platform config
+# in order to get around the AppVeyor limit of 60 mins per job.
+
+# build version format
+version: "{build}"
+
+# Free accounts have a max of 1, but ask anyway.
+max_jobs: 4
+
+os:
+  - Visual Studio 2015
+
+init:
+  - git config --global core.autocrlf true
+
+environment:
+  PYTHON_PATH: "C:/Python35"
+  PYTHON_PACKAGE_PATH: "C:/Python35/Scripts"
+  CMAKE_URL: "http://cmake.org/files/v3.10/cmake-3.10.2-win64-x64.zip"
+
+branches:
+  only:
+    - master
+
+install:
+  - appveyor DownloadFile %CMAKE_URL% -FileName cmake.zip
+  - 7z x cmake.zip -oC:\cmake > nul
+  - set path=C:\cmake\bin;%path%
+  - cmake --version
+
+before_build:
+  - "SET PATH=C:\\Python35;C:\\Python35\\Scripts;%PATH%"
+  - echo.
+  - echo Starting build for %APPVEYOR_REPO_NAME% in %APPVEYOR_BUILD_FOLDER%
+  # Generate build files using CMake for the build step.
+  - echo Fetching googletest external dependencies for building validation layer tests
+  - cd %APPVEYOR_BUILD_FOLDER%
+  - git clone https://github.com/google/googletest.git external/googletest
+  - cd %APPVEYOR_BUILD_FOLDER%/external/googletest
+  - git checkout tags/release-1.8.1
+  - cd %APPVEYOR_BUILD_FOLDER%
+  - python scripts/update_deps.py --dir=external --arch=%PLATFORM% --config=%CONFIGURATION%
+  - echo Verifying consistency between source file generators and output
+  - python scripts/generate_source.py --verify external/Vulkan-Headers/registry
+  - echo Generating Vulkan-ValidationLayers CMake files for %PLATFORM% %CONFIGURATION%
+  - mkdir build
+  - cd build
+  - cmake -A %PLATFORM% -C../external/helper.cmake --config %CONFIGURATION% ..
+  - echo Building platform=%PLATFORM% configuration=%CONFIGURATION%
+  - mkdir layers\%CONFIGURATION%
+  - python ../scripts/vk_validation_stats.py ../external/Vulkan-Headers/registry/validusage.json -text layers/%CONFIGURATION%/vuid_coverage_database.txt
+
+platform:
+  - Win32
+  - x64
+
+configuration:
+  - Release
+  - Debug
+
+# Build only x64 Release and Win32(x86) Debug to reduce build time.
+# This should still provide adequate 32-bit vs 64-bit and
+# Release vs Debug coverage.
+matrix:
+  exclude:
+    - configuration: Release
+      platform: Win32
+    - configuration: Debug
+      platform: x64
+
+build:
+  parallel: true                              # enable MSBuild parallel builds
+  project: build/Vulkan-ValidationLayers.sln  # path to Visual Studio solution or project
+  verbosity: quiet                            # quiet|minimal|normal|detailed
+
+artifacts:
+  - path: build\layers\$(configuration)
+    name: Vulkan-ValidationLayers-$(platform)-$(configuration)
diff --git a/src/third_party/vulkan-validation-layers/src/.clang-format b/src/third_party/vulkan-validation-layers/src/.clang-format
new file mode 100644
index 0000000..453527f
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/.clang-format
@@ -0,0 +1,8 @@
+---
+# Use defaults from the Google style with the following exceptions:
+BasedOnStyle: Google
+IndentWidth: 4
+AccessModifierOffset: -2
+ColumnLimit: 132
+SortIncludes: false
+...
diff --git a/src/third_party/vulkan-validation-layers/src/.gn b/src/third_party/vulkan-validation-layers/src/.gn
new file mode 100644
index 0000000..e190259
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/.gn
@@ -0,0 +1,22 @@
+# Copyright (C) 2019 LunarG, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+buildconfig = "//build/config/BUILDCONFIG.gn"
+secondary_source = "//build-gn/secondary/"
+
+default_args = {
+    clang_use_chrome_plugins = false
+    use_custom_libcxx = false
+}
+
diff --git a/src/third_party/vulkan-validation-layers/src/.travis.yml b/src/third_party/vulkan-validation-layers/src/.travis.yml
new file mode 100644
index 0000000..2a8d53a
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/.travis.yml
@@ -0,0 +1,215 @@
+# Build Configuration for Travis CI
+# https://travis-ci.org
+
+dist: xenial
+sudo: required
+language: cpp
+
+matrix:
+  # Show final status immediately if a test fails.
+  fast_finish: true
+  allow_failures:
+    - env: CHECK_COMMIT_FORMAT=ON
+    - env: VULKAN_BUILD_TARGET=GN
+  include:
+    # Android build.
+    - os: linux
+      compiler: gcc
+      env: VULKAN_BUILD_TARGET=ANDROID ANDROID_TARGET=android-26 ANDROID_ABI=armeabi-v7a
+    # Android 64-bit build.
+    - os: linux
+      compiler: gcc
+      env: VULKAN_BUILD_TARGET=ANDROID ANDROID_TARGET=android-26 ANDROID_ABI=arm64-v8a
+    # Linux GCC debug build.
+    - os: linux
+      compiler: gcc
+      env: VULKAN_BUILD_TARGET=LINUX
+    # Linux clang debug build.
+    - os: linux
+      compiler: clang
+      env: VULKAN_BUILD_TARGET=LINUX
+    # Linux GN debug build.
+    - os: linux
+      env: VULKAN_BUILD_TARGET=GN
+    # Check for proper clang formatting in the pull request.
+    - env: CHECK_FORMAT=ON
+    # Check for proper commit message formatting for commits in PR
+    - env: CHECK_COMMIT_FORMAT=ON
+
+cache: ccache
+
+# Use set -e so that the build fails when a command fails.
+# The default action for Travis-CI is to continue running even if a command fails.
+# See https://github.com/travis-ci/travis-ci/issues/1066.
+# Use the YAML block scalar header (|) to allow easier multiline script coding.
+
+before_install:
+  - set -e
+  - CMAKE_VERSION=3.10.2
+  - |
+    if [[ "${TRAVIS_OS_NAME}" == "linux" ]]; then
+      # Upgrade to the desired version of CMake
+      CMAKE_URL="https://cmake.org/files/v${CMAKE_VERSION%.*}/cmake-${CMAKE_VERSION}-Linux-x86_64.tar.gz"
+      echo CMAKE_URL=${CMAKE_URL}
+      mkdir cmake-${CMAKE_VERSION} && travis_retry wget --no-check-certificate -O - ${CMAKE_URL} | tar --strip-components=1 -xz -C cmake-${CMAKE_VERSION}
+      export PATH=${PWD}/cmake-${CMAKE_VERSION}/bin:${PATH}
+    else
+      brew install cmake || brew upgrade cmake
+    fi
+    cmake --version
+  - |
+    if [[ "$VULKAN_BUILD_TARGET" == "LINUX" ]] || [[ "$VULKAN_BUILD_TARGET" == "GN" ]]; then
+      # Install the appropriate Linux packages.
+      sudo apt-get -qq update
+      sudo apt-get -y install libxkbcommon-dev libwayland-dev libmirclient-dev libxrandr-dev \
+                              libx11-xcb-dev libxcb-keysyms1 libxcb-keysyms1-dev libxcb-ewmh-dev \
+                              libxcb-randr0-dev python-pathlib
+    fi
+  - |
+    if [[ "$VULKAN_BUILD_TARGET" == "ANDROID" ]]; then
+      # Install the Android NDK.
+      export ARCH=`uname -m`
+      wget http://dl.google.com/android/repository/android-ndk-r18b-linux-${ARCH}.zip
+      unzip -u -q android-ndk-r18b-linux-${ARCH}.zip
+      export ANDROID_NDK_HOME=`pwd`/android-ndk-r18b
+      export JAVA_HOME="/usr/lib/jvm/java-8-oracle"
+      export PATH="$ANDROID_NDK_HOME:$PATH"
+    fi
+  - |
+    if [[ "$CHECK_FORMAT" == "ON" && "$TRAVIS_PULL_REQUEST" != "false" ]]; then
+      # Install the clang format diff tool, but only for pull requests.
+      curl -L http://llvm.org/svn/llvm-project/cfe/trunk/tools/clang-format/clang-format-diff.py \
+           -o scripts/clang-format-diff.py;
+    fi
+  # Misc setup
+  - export core_count=$(nproc || echo 4) && echo core_count = $core_count
+  - ccache --zero-stats
+  - set +e
+
+script:
+  - set -e
+  - |
+    if [[ "$VULKAN_BUILD_TARGET" == "LINUX" ]]; then
+      # Build Vulkan-Loader to run Vulkan-ValidationLayer tests
+      cd ${TRAVIS_BUILD_DIR}/external
+      git clone https://github.com/KhronosGroup/Vulkan-Loader.git
+      cd Vulkan-Loader
+      python scripts/update_deps.py --dir=external
+      mkdir build
+      cd build
+      cmake -DCMAKE_BUILD_TYPE=Debug -C../external/helper.cmake -DCMAKE_INSTALL_PREFIX=install ..
+      cmake --build . --target install -- -j$core_count
+      cd ${TRAVIS_BUILD_DIR}
+      # Build all remaining dependencies for Vulkan-ValidationLayers
+      if [[ "$TRAVIS_EVENT_TYPE" == "cron" ]]; then
+        # Build master branch when triggered by (daily) cron job
+        ${TRAVIS_BUILD_DIR}/scripts/update_deps.py --dir=${TRAVIS_BUILD_DIR}/external --ref=master
+      else
+        ${TRAVIS_BUILD_DIR}/scripts/update_deps.py --dir=${TRAVIS_BUILD_DIR}/external
+      fi
+    fi
+  - |
+    if [[ "$VULKAN_BUILD_TARGET" == "LINUX" ]]; then
+      # Fetch googletest component to build validation layer tests
+      echo Fetching googletest external dependencies for building validation layer tests
+      git clone https://github.com/google/googletest.git ${TRAVIS_BUILD_DIR}/external/googletest
+      pushd ${TRAVIS_BUILD_DIR}/external/googletest
+      git checkout tags/release-1.8.1
+      popd
+    fi
+  - |
+    if [[ "$VULKAN_BUILD_TARGET" == "LINUX" ]]; then
+      # Verify consistency between source file generators and output
+      echo Verifying consistency between source file generators and output
+      python3 ${TRAVIS_BUILD_DIR}/scripts/generate_source.py --verify ${TRAVIS_BUILD_DIR}/external/Vulkan-Headers/registry
+    fi
+  - |
+    if [[ "$VULKAN_BUILD_TARGET" == "LINUX" ]]; then
+      # Build Vulkan-ValidationLayers
+      cd ${TRAVIS_BUILD_DIR}
+      mkdir build
+      cd build
+      cmake -C ${TRAVIS_BUILD_DIR}/external/helper.cmake -DCMAKE_BUILD_TYPE=Debug \
+            -DVULKAN_LOADER_INSTALL_DIR=${TRAVIS_BUILD_DIR}/external/Vulkan-Loader/build/install \
+            -DCMAKE_INSTALL_PREFIX=${TRAVIS_BUILD_DIR}/build/install -DBUILD_LAYER_SUPPORT_FILES=ON \
+            -DUSE_CCACHE=ON ..
+      cmake --build . --target install -- -j$core_count
+    fi
+  - |
+    if [[ "$VULKAN_BUILD_TARGET" == "LINUX" ]]; then
+      # Build VulkanTools
+      # Note: VulkanTools has a build dependency on Vulkan-ValidationLayers so it must be built after V-VL is built
+      pushd ${TRAVIS_BUILD_DIR}/external/VulkanTools
+      ./update_external_sources.sh
+      mkdir build
+      cd build
+      cmake -DCMAKE_BUILD_TYPE=Debug \
+            -DBUILD_VIA=NO -DBUILD_VKTRACE=NO -DBUILD_VLF=NO -DBUILD_TESTS=NO -DBUILD_LAYERMGR=NO \
+            -DBUILD_VKTRACEVIEWER=NO -DBUILD_VKTRACE_LAYER=NO -DBUILD_VKTRACE_REPLAY=NO \
+            -DVULKAN_HEADERS_INSTALL_DIR=${TRAVIS_BUILD_DIR}/external/Vulkan-Headers/build/install \
+            -DVULKAN_LOADER_INSTALL_DIR=${TRAVIS_BUILD_DIR}/external/Vulkan-Loader/build/install \
+            -DVULKAN_VALIDATIONLAYERS_INSTALL_DIR=${TRAVIS_BUILD_DIR}/build/install \
+            ..
+      cmake --build . -- -j $core_count
+      popd
+    fi
+  - |
+    if [[ "$VULKAN_BUILD_TARGET" == "LINUX" ]]; then
+      # Run Tests
+      cd ${TRAVIS_BUILD_DIR}
+      export LD_LIBRARY_PATH=${TRAVIS_BUILD_DIR}/external/Vulkan-Loader/build/install/lib:${LD_LIBRARY_PATH}
+      export VK_LAYER_PATH=${TRAVIS_BUILD_DIR}/external/VulkanTools/build/layersvt:${TRAVIS_BUILD_DIR}/build/layers
+      export VK_ICD_FILENAMES=${TRAVIS_BUILD_DIR}/external/Vulkan-Tools/build/icd/VkICD_mock_icd.json
+      build/tests/vk_layer_validation_tests
+      for profile in tests/device_profiles/*.json
+      do
+        echo Testing with profile $profile
+        VK_DEVSIM_FILENAME=$profile build/tests/vk_layer_validation_tests --devsim
+      done
+    fi
+  - |
+    if [[ "$VULKAN_BUILD_TARGET" == "ANDROID" ]]; then
+      pushd build-android
+      ./update_external_sources_android.sh --abi $ANDROID_ABI --no-build
+      USE_CCACHE=1 NDK_CCACHE=ccache ndk-build APP_ABI=$ANDROID_ABI -j $core_count
+      popd
+    fi
+  - |
+    if [[ "$CHECK_FORMAT" == "ON" ]]; then
+      if [[ "$TRAVIS_PULL_REQUEST" != "false" ]]; then
+        # Run the clang format check only for pull request builds because the
+        # master branch is needed to do the git diff.
+        echo "Checking clang-format between TRAVIS_BRANCH=$TRAVIS_BRANCH and TRAVIS_PULL_REQUEST_BRANCH=$TRAVIS_PULL_REQUEST_BRANCH"
+        ./scripts/check_code_format.sh
+      else
+        echo "Skipping clang-format check since this is not a pull request."
+      fi
+    fi
+  - |
+    if [[ "$CHECK_COMMIT_FORMAT" == "ON" ]]; then
+      if [[ "$TRAVIS_PULL_REQUEST" != "false" ]]; then
+        echo "Checking commit message formats:  See CONTRIBUTING.md"
+        ./scripts/check_commit_message_format.sh
+      fi
+    fi
+  - |
+    if [[ "$VULKAN_BUILD_TARGET" == "GN" ]]; then
+      git clone https://chromium.googlesource.com/chromium/tools/depot_tools.git depot_tools
+      export PATH=$PATH:$PWD/depot_tools
+      ./build-gn/update_deps.sh
+      gn gen out/Debug
+      ninja -C out/Debug
+    fi
+  - ccache --show-stats
+  - set +e
+
+notifications:
+  email:
+    recipients:
+      - cnorthrop@google.com
+      - tobine@google.com
+      - chrisforbes@google.com
+      - shannon@lunarg.com
+      - mikes@lunarg.com
+    on_success: change
+    on_failure: always
diff --git a/src/third_party/vulkan-validation-layers/src/BUILD.gn b/src/third_party/vulkan-validation-layers/src/BUILD.gn
new file mode 100644
index 0000000..f33f08e
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/BUILD.gn
@@ -0,0 +1,282 @@
+# Copyright (C) 2018-2019 The ANGLE Project Authors.
+# Copyright (C) 2019 LunarG, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import("//build_overrides/vulkan_validation_layers.gni")
+
+# Fuchsia has non-upstream changes to the vulkan layers, so we don't want
+# to build it from upstream sources.
+assert(!is_fuchsia)
+
+vulkan_undefine_configs = []
+if (is_win) {
+  vulkan_undefine_configs += [
+    "//build/config/win:nominmax",
+    "//build/config/win:unicode",
+  ]
+}
+
+vulkan_gen_dir = "$target_gen_dir/$vulkan_gen_subdir"
+raw_vulkan_gen_dir = rebase_path(vulkan_gen_dir, root_build_dir)
+
+vulkan_data_dir = "$root_out_dir/$vulkan_data_subdir"
+raw_vulkan_data_dir = rebase_path(vulkan_data_dir, root_build_dir)
+
+raw_root_out_dir = rebase_path(root_out_dir, root_build_dir)
+
+# This special action is needed to remove old VVL objects that are now renamed.
+action("vulkan_clean_old_validation_layer_objects") {
+  script = "build-gn/remove_files.py"
+
+  # inputs is a (random) new file since the vvl roll, used to ensure the cleanup is done only once
+  inputs = [
+    "layers/gpu_validation.cpp",
+  ]
+  outputs = [
+    "$vulkan_gen_dir/old_vvl_files_are_removed",
+  ]
+  args = [
+    "$raw_vulkan_gen_dir/old_vvl_files_are_removed",
+    "$raw_root_out_dir/libVkLayer*",
+    "$raw_root_out_dir/VkLayer*",
+    "$raw_vulkan_data_dir/VkLayer*.json",
+  ]
+}
+
+config("vulkan_internal_config") {
+  defines = [
+    "VULKAN_NON_CMAKE_BUILD",
+    "API_NAME=\"Vulkan\"",
+  ]
+  if (is_clang || !is_win) {
+    cflags = [ "-Wno-unused-function" ]
+  }
+  if (is_linux) {
+    defines += [
+      "SYSCONFDIR=\"/etc\"",
+      "FALLBACK_CONFIG_DIRS=\"/etc/xdg\"",
+      "FALLBACK_DATA_DIRS=\"/usr/local/share:/usr/share\"",
+    ]
+  }
+}
+
+# The validation layers
+# ---------------------
+
+config("vulkan_layer_config") {
+  include_dirs = [
+    "layers",
+    "layers/generated",
+  ]
+}
+
+core_validation_sources = [
+  # This file is manually included in the layer
+  # "layers/generated/vk_safe_struct.cpp",
+  "layers/buffer_validation.cpp",
+  "layers/buffer_validation.h",
+  "layers/state_tracker.cpp",
+  "layers/state_tracker.h",
+  "layers/core_validation.cpp",
+  "layers/core_validation.h",
+  "layers/convert_to_renderpass2.cpp",
+  "layers/descriptor_sets.cpp",
+  "layers/descriptor_sets.h",
+  "layers/drawdispatch.cpp",
+  "layers/gpu_validation.cpp",
+  "layers/gpu_validation.h",
+  "layers/shader_validation.cpp",
+  "layers/shader_validation.h",
+  "layers/xxhash.c",
+  "layers/xxhash.h",
+]
+
+object_lifetimes_sources = [
+  "layers/generated/object_tracker.cpp",
+  "layers/generated/object_tracker.h",
+  "layers/object_tracker_utils.cpp",
+]
+
+stateless_validation_sources = [
+  "layers/generated/parameter_validation.cpp",
+  "layers/generated/parameter_validation.h",
+  "layers/parameter_validation_utils.cpp",
+]
+
+thread_safety_sources = [
+  "layers/generated/thread_safety.cpp",
+  "layers/generated/thread_safety.h",
+]
+
+unique_objects_sources = []
+
+best_practices_sources = [
+  "layers/best_practices.cpp",
+  "layers/best_practices.h",
+  "layers/best_practices_error_enums.h",
+]
+
+command_counter_sources = [
+  "layers/generated/command_counter_helper.cpp",
+  "layers/generated/command_counter_helper.h",
+  "layers/command_counter.h",
+]
+
+chassis_sources = [
+  "layers/state_tracker.h",
+  "layers/core_validation.h",
+  "layers/generated/vk_safe_struct.h",
+  "layers/generated/thread_safety.h",
+  "layers/generated/chassis.cpp",
+  "layers/generated/chassis.h",
+  "layers/generated/layer_chassis_dispatch.cpp",
+  "layers/generated/layer_chassis_dispatch.h",
+  "$vulkan_headers_dir/include/vulkan/vk_layer.h",
+  "$vulkan_headers_dir/include/vulkan/vulkan.h",
+]
+
+layers = [ [
+      "khronos_validation",
+      core_validation_sources + object_lifetimes_sources +
+          stateless_validation_sources + thread_safety_sources +
+          unique_objects_sources + best_practices_sources +
+          command_counter_sources + chassis_sources,
+      [ ":vulkan_core_validation_glslang" ],
+      [],
+    ] ]
+
+if (!is_android) {
+  action("vulkan_gen_json_files") {
+    script = "build-gn/generate_vulkan_layers_json.py"
+
+    # Make sure that the cleanup of old layer JSON files happens before the new ones are generated.
+    deps = [
+      ":vulkan_clean_old_validation_layer_objects",
+    ]
+    json_names = [
+      "VkLayer_standard_validation.json",
+      "VkLayer_khronos_validation.json",
+    ]
+    sources = [
+      "$vulkan_headers_dir/include/vulkan/vulkan_core.h",
+    ]
+    outputs = []
+    foreach(json_name, json_names) {
+      sources += [ "layers/json/$json_name.in" ]
+      outputs += [ "$vulkan_data_dir/$json_name" ]
+    }
+    args = [
+             rebase_path("layers/json", root_build_dir),
+             rebase_path(vulkan_data_dir, root_build_dir),
+           ] + rebase_path(sources, root_build_dir)
+
+    # The layer JSON files are part of the necessary data deps.
+    data = outputs
+  }
+}
+
+source_set("vulkan_layer_utils") {
+  include_dirs = [
+    "layers",
+    "layers/generated",
+  ]
+  sources = [
+    "$vulkan_headers_dir/include/vulkan/vk_layer.h",
+    "$vulkan_headers_dir/include/vulkan/vk_sdk_platform.h",
+    "$vulkan_headers_dir/include/vulkan/vulkan.h",
+    "layers/vk_format_utils.cpp",
+    "layers/vk_format_utils.h",
+    "layers/vk_layer_config.cpp",
+    "layers/vk_layer_config.h",
+    "layers/vk_layer_extension_utils.cpp",
+    "layers/vk_layer_extension_utils.h",
+    "layers/vk_layer_utils.cpp",
+    "layers/vk_layer_utils.h",
+  ]
+  public_configs = [
+    "$vulkan_headers_dir:vulkan_headers_config",
+    ":vulkan_internal_config",
+  ]
+  configs -= [ "//build/config/compiler:chromium_code" ]
+  configs += [ "//build/config/compiler:no_chromium_code" ]
+  public_deps = []
+  configs -= vulkan_undefine_configs
+}
+
+config("vulkan_core_validation_config") {
+  include_dirs = [ "$vvl_glslang_dir" ]
+}
+
+source_set("vulkan_core_validation_glslang") {
+  public_deps = [
+    "${vvl_spirv_tools_dir}:spvtools",
+    "${vvl_spirv_tools_dir}:spvtools_opt",
+    "${vvl_spirv_tools_dir}:spvtools_val",
+  ]
+  public_configs = [
+    "$vulkan_headers_dir:vulkan_headers_config",
+    ":vulkan_core_validation_config",
+  ]
+}
+
+config("vulkan_stateless_validation_config") {
+  if (is_clang) {
+    cflags_cc = [ "-Wno-unused-const-variable" ]
+  }
+}
+
+foreach(layer_info, layers) {
+  name = layer_info[0]
+  shared_library("VkLayer_$name") {
+    defines = []
+    configs -= [ "//build/config/compiler:chromium_code" ]
+    configs += [ "//build/config/compiler:no_chromium_code" ]
+    configs -= vulkan_undefine_configs
+    public_configs = [ ":vulkan_layer_config" ]
+    deps = [
+      # Make sure the cleanup of old layers happen before the new ones are compiled.
+      ":vulkan_clean_old_validation_layer_objects",
+      ":vulkan_layer_utils",
+    ]
+    if (layer_info[2] != "") {
+      deps += layer_info[2]
+    }
+    sources = layer_info[1]
+    if (is_win) {
+      sources += [ "layers/VkLayer_$name.def" ]
+    }
+    if (is_linux || is_android) {
+      ldflags = [ "-Wl,-Bsymbolic,--exclude-libs,ALL" ]
+    }
+    if (use_x11) {
+      defines += [ "VK_USE_PLATFORM_XLIB_KHR" ]
+    }
+    if (is_android) {
+      libs = [
+        "log",
+        "nativewindow",
+      ]
+      configs -= [ "//build/config/android:hide_all_but_jni_onload" ]
+    }
+    defines += layer_info[3]
+  }
+}
+
+group("vulkan_validation_layers") {
+  data_deps = []
+  foreach(layer_info, layers) {
+    name = layer_info[0]
+    data_deps += [ ":VkLayer_$name" ]
+  }
+}
diff --git a/src/third_party/vulkan-validation-layers/src/BUILD.md b/src/third_party/vulkan-validation-layers/src/BUILD.md
new file mode 100644
index 0000000..dc41215
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/BUILD.md
@@ -0,0 +1,846 @@
+# Build Instructions
+
+Instructions for building this repository on Linux, Windows, Android, and
+MacOS.
+
+## Index
+
+1. [Contributing](#contributing-to-the-repository)
+1. [Repository Content](#repository-content)
+1. [Repository Set-Up](#repository-set-up)
+1. [Windows Build](#building-on-windows)
+1. [Linux Build](#building-on-linux)
+1. [Android Build](#building-on-android)
+1. [MacOS build](#building-on-macos)
+
+## Contributing to the Repository
+
+If you intend to contribute, the preferred work flow is for you to develop
+your contribution in a fork of this repository in your GitHub account and then
+submit a pull request. Please see the [CONTRIBUTING.md](CONTRIBUTING.md) file
+in this repository for more details.
+
+## Repository Content
+
+This repository contains the source code necessary to build the Vulkan
+validation layers and their tests.
+
+### Installed Files
+
+The `install` target installs the following files under the directory
+indicated by *install_dir*:
+
+- *install_dir*`/lib` : The Vulkan validation layer libraries
+- *install_dir*`/share/vulkan/explicit_layer.d` : The Vulkan validation layer
+  JSON files (Linux and MacOS)
+
+The `uninstall` target can be used to remove the above files from the install
+directory.
+
+## Repository Set-Up
+
+### Display Drivers
+
+This repository does not contain a Vulkan-capable driver. You will need to
+obtain and install a Vulkan driver from your graphics hardware vendor or from
+some other suitable source if you intend to run Vulkan applications.
+
+### Download the Repository
+
+To create your local git repository:
+
+    git clone https://github.com/KhronosGroup/Vulkan-ValidationLayers.git
+
+### Repository Dependencies
+
+This repository attempts to resolve some of its dependencies by using
+components found from the following places, in this order:
+
+1. CMake or Environment variable overrides (e.g., -DVULKAN_HEADERS_INSTALL_DIR)
+1. LunarG Vulkan SDK, located by the `VULKAN_SDK` environment variable
+1. System-installed packages, mostly applicable on Linux
+
+Dependencies that cannot be resolved by the SDK or installed packages must be
+resolved with the "install directory" override and are listed below. The
+"install directory" override can also be used to force the use of a specific
+version of that dependency.
+
+#### Vulkan-Headers
+
+This repository has a required dependency on the
+[Vulkan Headers repository](https://github.com/KhronosGroup/Vulkan-Headers).
+You must clone the headers repository and build its `install` target before
+building this repository. The Vulkan-Headers repository is required because it
+contains the Vulkan API definition files (registry) that are required to build
+the validation layers. You must also take note of the headers' install
+directory and pass it on the CMake command line for building this repository,
+as described below.
+
+#### glslang
+
+This repository has a required dependency on the
+[glslang repository](https://github.com/KhronosGroup/glslang).
+The glslang repository is required because it contains components that are
+required to build the validation layers. You must clone the glslang repository
+and build its `install` target. Follow the build instructions in the glslang
+[README.md](https://github.com/KhronosGroup/glslang/blob/master/README.md)
+file. Ensure that the `update_glslang_sources.py` script has been run as part
+of building glslang. You must also take note of the glslang install directory
+and pass it on the CMake command line for building this repository, as
+described below.
+
+#### Google Test
+
+The validation layer tests depend on the
+[Google Test](https://github.com/google/googletest)
+framework and do not build unless this framework is downloaded into the
+repository's `external` directory.
+
+To obtain the framework, change your current directory to the top of your
+Vulkan-ValidationLayers repository and run:
+
+    git clone https://github.com/google/googletest.git external/googletest
+    cd external/googletest
+    git checkout tags/release-1.8.1
+
+before configuring your build with CMake.
+
+If you do not need the tests, there is no need to download this
+framework.
+
+#### Vulkan-Loader
+
+The validation layer tests depend on the Vulkan loader when they execute and
+so a loader is needed only if the tests are built and run.
+
+A loader can be used from an installed LunarG SDK, an installed Linux package,
+or from a driver installation on Windows.
+
+If a loader is not available from any of these methods and/or it is important
+to use a loader built from a repository, then you must build the
+[Vulkan-Loader repository](https://github.com/KhronosGroup/Vulkan-Loader.git)
+with its install target. Take note of its install directory location and pass
+it on the CMake command line for building this repository, as described below.
+
+If you do not intend to run the tests, you do not need a Vulkan loader.
+
+### Build and Install Directories
+
+A common convention is to place the build directory in the top directory of
+the repository with a name of `build` and place the install directory as a
+child of the build directory with the name `install`. The remainder of these
+instructions follow this convention, although you can use any name for these
+directories and place them in any location.
+
+### Building Dependent Repositories with Known-Good Revisions
+
+There is a Python utility script, `scripts/update_deps.py`, that you can use to
+gather and build the dependent repositories mentioned above. This script uses
+information stored in the `scripts/known_good.json` file to check out dependent
+repository revisions that are known to be compatible with the revision of this
+repository that you currently have checked out. As such, this script is useful
+as a quick-start tool for common use cases and default configurations.
+
+For all platforms, start with:
+
+    git clone git@github.com:KhronosGroup/Vulkan-ValidationLayers.git
+    cd Vulkan-ValidationLayers
+    mkdir build
+    cd build
+
+For 64-bit Linux and MacOS, continue with:
+
+    ../scripts/update_deps.py
+    cmake -C helper.cmake ..
+    cmake --build .
+
+For 64-bit Windows, continue with:
+
+    ..\scripts\update_deps.py --arch x64
+    cmake -A x64 -C helper.cmake ..
+    cmake --build .
+
+For 32-bit Windows, continue with:
+
+    ..\scripts\update_deps.py --arch Win32
+    cmake -A Win32 -C helper.cmake ..
+    cmake --build .
+
+Please see the more detailed build information later in this file if you have
+specific requirements for configuring and building these components.
+
+#### Notes
+
+- You may need to adjust some of the CMake options based on your platform. See
+  the platform-specific sections later in this document.
+- The `update_deps.py` script fetches and builds the dependent repositories in
+  the current directory when it is invoked. In this case, they are built in
+  the `build` directory.
+- The `build` directory is also being used to build this
+  (Vulkan-ValidationLayers) repository. But there shouldn't be any conflicts
+  inside the `build` directory between the dependent repositories and the
+  build files for this repository.
+- The `--dir` option for `update_deps.py` can be used to relocate the
+  dependent repositories to another arbitrary directory using an absolute or
+  relative path.
+- The `update_deps.py` script generates a file named `helper.cmake` and places
+  it in the same directory as the dependent repositories (`build` in this
+  case). This file contains CMake commands to set the CMake `*_INSTALL_DIR`
+  variables that are used to point to the install artifacts of the dependent
+  repositories. You can use this file with the `cmake -C` option to set these
+  variables when you generate your build files with CMake. This lets you avoid
+  entering several `*_INSTALL_DIR` variable settings on the CMake command line.
+- If using "MINGW" (Git For Windows), you may wish to run
+  `winpty update_deps.py` in order to avoid buffering all of the script's
+  "print" output until the end and to retain the ability to interrupt script
+  execution.
+- Please use `update_deps.py --help` to list additional options and read the
+  internal documentation in `update_deps.py` for further information.
+
+### Generated source code
+
+This repository contains generated source code in the `layers/generated`
+directory which is not intended to be modified directly. Instead, changes should be
+made to the corresponding generator in the `scripts` directory. The source files can
+then be regenerated using `scripts/generate_source.py`:
+
+    python3 scripts/generate_source.py PATH_TO_VULKAN_HEADERS_REGISTRY_DIR
+
+A helper CMake target `VulkanVL_generated_source` is also provided to simplify
+the invocation of `scripts/generate_source.py` from the build directory:
+
+    cmake --build . --target VulkanVL_generated_source
+
+### Build Options
+
+When generating native platform build files through CMake, several options can
+be specified to customize the build. Some of the options are binary on/off
+options, while others take a string as input. The following is a table of all
+on/off options currently supported by this repository:
+
+| Option | Platform | Default | Description |
+| ------ | -------- | ------- | ----------- |
+| BUILD_LAYERS | All | `ON` | Controls whether or not the validation layers are built. |
+| BUILD_LAYER_SUPPORT_FILES | All | `OFF` | Controls whether or not layer support files are built if the layers are not built. |
+| BUILD_TESTS | All | `???` | Controls whether or not the validation layer tests are built. The default is `ON` when the Google Test repository is cloned into the `external` directory.  Otherwise, the default is `OFF`. |
+| INSTALL_TESTS | All | `OFF` | Controls whether or not the validation layer tests are installed. This option is only available when a copy of Google Test is available
+| BUILD_WSI_XCB_SUPPORT | Linux | `ON` | Build the components with XCB support. |
+| BUILD_WSI_XLIB_SUPPORT | Linux | `ON` | Build the components with Xlib support. |
+| BUILD_WSI_WAYLAND_SUPPORT | Linux | `ON` | Build the components with Wayland support. |
+| USE_CCACHE | Linux | `OFF` | Enable caching with the CCache program. |
+
+The following is a table of all string options currently supported by this repository:
+
+| Option | Platform | Default | Description |
+| ------ | -------- | ------- | ----------- |
+| CMAKE_OSX_DEPLOYMENT_TARGET | MacOS | `10.12` | The minimum version of MacOS for loader deployment. |
+
+These variables should be set using the `-D` option when invoking CMake to
+generate the native platform files.
+
+## Building On Windows
+
+### Windows Development Environment Requirements
+
+- Windows
+  - Any Personal Computer version supported by Microsoft
+- Microsoft [Visual Studio](https://www.visualstudio.com/)
+  - Versions
+    - [2015](https://www.visualstudio.com/vs/older-downloads/)
+    - [2017](https://www.visualstudio.com/vs/downloads/)
+  - The Community Edition of each of the above versions is sufficient, as
+    well as any more capable edition.
+- [CMake 3.10.2](https://cmake.org/files/v3.10/cmake-3.10.2-win64-x64.zip) is recommended.
+  - Use the installer option to add CMake to the system PATH
+- Git Client Support
+  - [Git for Windows](http://git-scm.com/download/win) is a popular solution
+    for Windows
+  - Some IDEs (e.g., [Visual Studio](https://www.visualstudio.com/),
+    [GitHub Desktop](https://desktop.github.com/)) have integrated
+    Git client support
+
+### Windows Build - Microsoft Visual Studio
+
+The general approach is to run CMake to generate the Visual Studio project
+files. Then either run CMake with the `--build` option to build from the
+command line or use the Visual Studio IDE to open the generated solution and
+work with the solution interactively.
+
+#### Windows Quick Start
+
+    cd Vulkan-ValidationLayers
+    mkdir build
+    cd build
+    cmake -A x64 -DVULKAN_HEADERS_INSTALL_DIR=absolute_path_to_install_dir \
+                 -DGLSLANG_INSTALL_DIR=absolute_path_to_install_dir ..
+    cmake --build .
+
+The above commands instruct CMake to find and use the default Visual Studio
+installation to generate a Visual Studio solution and projects for the x64
+architecture. The second CMake command builds the Debug (default)
+configuration of the solution.
+
+See below for the details.
+
+#### Use `CMake` to Create the Visual Studio Project Files
+
+Change your current directory to the top of the cloned repository directory,
+create a build directory and generate the Visual Studio project files:
+
+    cd Vulkan-ValidationLayers
+    mkdir build
+    cd build
+    cmake -A x64 -DVULKAN_HEADERS_INSTALL_DIR=absolute_path_to_install_dir \
+                 -DGLSLANG_INSTALL_DIR=absolute_path_to_install_dir ..
+
+> Note: The `..` parameter tells `cmake` the location of the top of the
+> repository. If you place your build directory someplace else, you'll need to
+> specify the location of the repository top differently.
+
+The `-A` option is used to select either the "Win32" or "x64" architecture.
+
+If a generator for a specific version of Visual Studio is required, you can
+specify it for Visual Studio 2015, for example, with:
+
+    64-bit: -G "Visual Studio 14 2015 Win64"
+    32-bit: -G "Visual Studio 14 2015"
+
+See this [list](#cmake-visual-studio-generators) of other possible generators
+for Visual Studio.
+
+When generating the project files, the absolute path to a Vulkan-Headers
+install directory must be provided. This can be done by setting the
+`VULKAN_HEADERS_INSTALL_DIR` environment variable or by setting the
+`VULKAN_HEADERS_INSTALL_DIR` CMake variable with the `-D` CMake option. In
+either case, the variable should point to the installation directory of a
+Vulkan-Headers repository built with the install target.
+
+When generating the project files, the absolute path to a glslang install
+directory must be provided. This can be done by setting the
+`GLSLANG_INSTALL_DIR` environment variable or by setting the
+`GLSLANG_INSTALL_DIR` CMake variable with the `-D` CMake option. In either
+case, the variable should point to the installation directory of a glslang
+repository built with the install target.
+
+The above steps create a Windows solution file named
+`Vulkan-ValidationLayers.sln` in the build directory.
+
+At this point, you can build the solution from the command line or open the
+generated solution with Visual Studio.
+
+#### Build the Solution From the Command Line
+
+While still in the build directory:
+
+    cmake --build .
+
+to build the Debug configuration (the default), or:
+
+    cmake --build . --config Release
+
+to make a Release build.
+
+#### Build the Solution With Visual Studio
+
+Launch Visual Studio and open the "Vulkan-ValidationLayers.sln" solution file
+in the build folder. You may select "Debug" or "Release" from the Solution
+Configurations drop-down list. Start a build by selecting the Build->Build
+Solution menu item.
+
+#### Windows Install Target
+
+The CMake project also generates an "install" target that you can use to copy
+the primary build artifacts to a specific location using a "bin, include, lib"
+style directory structure. This may be useful for collecting the artifacts and
+providing them to another project that is dependent on them.
+
+The default location is `$CMAKE_BINARY_DIR\install`, but can be changed with
+the `CMAKE_INSTALL_PREFIX` variable when first generating the project build
+files with CMake.
+
+You can build the install target from the command line with:
+
+    cmake --build . --config Release --target install
+
+or build the `INSTALL` target from the Visual Studio solution explorer.
+
+#### Using a Loader Built from a Repository
+
+If you do need to build and use your own loader, build the Vulkan-Loader
+repository with the install target and modify your CMake invocation to add the
+location of the loader's install directory:
+
+    cmake -A x64 -DVULKAN_HEADERS_INSTALL_DIR=absolute_path_to_install_dir \
+                 -DVULKAN_LOADER_INSTALL_DIR=absolute_path_to_install_dir \
+                 -DGLSLANG_INSTALL_DIR=absolute_path_to_install_dir ..
+
+### Windows Tests and Demos
+
+After making any changes to the repository, you should perform some quick
+sanity tests, especially the included layer validation tests (vk_layer_validation_tests).
+In addition, running sample applications such as the
+[vkcube demo](https://www.github.com/KhronosGroup/Vulkan-Tools.git)
+with validation enabled is advised.
+
+### Windows Notes
+
+#### CMake Visual Studio Generators
+
+The chosen generator should match one of the Visual Studio versions that you
+have installed. Generator strings that correspond to versions of Visual Studio
+include:
+
+| Build Platform               | 64-bit Generator              | 32-bit Generator        |
+|------------------------------|-------------------------------|-------------------------|
+| Microsoft Visual Studio 2015 | "Visual Studio 14 2015 Win64" | "Visual Studio 14 2015" |
+| Microsoft Visual Studio 2017 | "Visual Studio 15 2017 Win64" | "Visual Studio 15 2017" |
+
+#### Using The Vulkan Loader Library in this Repository on Windows
+
+Vulkan programs must be able to find and use the Vulkan loader
+(`vulkan-1.dll`) library as well as any other libraries the program requires.
+One convenient way to do this is to copy the required libraries into the same
+directory as the program. If you provided a loader repository location via the
+`VULKAN_LOADER_INSTALL_DIR` variable, the projects in this solution copy the
+Vulkan loader library and the "googletest" libraries to the
+`build\tests\Debug` or the `build\tests\Release` directory, which is where the
+test executables are found, depending on what configuration you built. (The
+layer validation tests use the "googletest" testing framework.)
+
+## Building On Linux
+
+### Linux Build Requirements
+
+This repository has been built and tested on the two most recent Ubuntu LTS
+versions. Currently, the oldest supported version is Ubuntu 16.04, meaning
+that the minimum officially supported C++11 compiler version is GCC 5.4.0,
+although earlier versions may work. It should be straightforward to adapt this
+repository to other Linux distributions.
+
+[CMake 3.10.2](https://cmake.org/files/v3.10/cmake-3.10.2-Linux-x86_64.tar.gz) is recommended.
+
+#### Required Package List
+
+    sudo apt-get install git build-essential libx11-xcb-dev \
+        libxkbcommon-dev libwayland-dev libxrandr-dev \
+        libegl1-mesa-dev
+		
+##### Required package for Ubuntu 18.04 users
+
+For python2 users
+
+    sudo apt install python-distutils
+
+or for python3 users
+
+	sudo apt install python3-distutils
+
+### Linux Build
+
+The general approach is to run CMake to generate make files. Then either run
+CMake with the `--build` option or `make` to build from the command line.
+
+#### Linux Quick Start
+
+    cd Vulkan-ValidationLayers
+    mkdir build
+    cd build
+    cmake -DVULKAN_HEADERS_INSTALL_DIR=absolute_path_to_install_dir \
+          -DGLSLANG_INSTALL_DIR=absolute_path_to_install_dir ..
+    make
+
+See below for the details.
+
+#### Use CMake to Create the Make Files
+
+Change your current directory to the top of the cloned repository directory,
+create a build directory and generate the make files.
+
+    cd Vulkan-ValidationLayers
+    mkdir build
+    cd build
+    cmake -DCMAKE_BUILD_TYPE=Debug \
+          -DVULKAN_HEADERS_INSTALL_DIR=absolute_path_to_install_dir \
+          -DGLSLANG_INSTALL_DIR=absolute_path_to_install_dir \
+          -DCMAKE_INSTALL_PREFIX=install ..
+
+> Note: The `..` parameter tells `cmake` the location of the top of the
+> repository. If you place your `build` directory someplace else, you'll need
+> to specify the location of the repository top differently.
+
+Use `-DCMAKE_BUILD_TYPE` to specify a Debug or Release build.
+
+When generating the project files, the absolute path to a Vulkan-Headers
+install directory must be provided. This can be done by setting the
+`VULKAN_HEADERS_INSTALL_DIR` environment variable or by setting the
+`VULKAN_HEADERS_INSTALL_DIR` CMake variable with the `-D` CMake option. In
+either case, the variable should point to the installation directory of a
+Vulkan-Headers repository built with the install target.
+
+When generating the project files, the absolute path to a glslang install
+directory must be provided. This can be done by setting the
+`GLSLANG_INSTALL_DIR` environment variable or by setting the
+`GLSLANG_INSTALL_DIR` CMake variable with the `-D` CMake option. In either
+case, the variable should point to the installation directory of a glslang
+repository built with the install target.
+
+> Note: For Linux, the default value for `CMAKE_INSTALL_PREFIX` is
+> `/usr/local`, which would be used if you do not specify
+> `CMAKE_INSTALL_PREFIX`. In this case, you may need to use `sudo` to install
+> to system directories later when you run `make install`.
+
+#### Build the Project
+
+You can just run `make` to begin the build.
+
+To speed up the build on a multi-core machine, use the `-j` option for `make`
+to specify the number of cores to use for the build. For example:
+
+    make -j4
+
+You can also use
+
+    cmake --build .
+
+If your build system supports ccache, you can enable that via CMake option `-DUSE_CCACHE=On`
+
+### Linux Notes
+
+#### WSI Support Build Options
+
+By default, the repository components are built with support for the
+Vulkan-defined WSI display servers: Xcb, Xlib, and Wayland. It is recommended
+to build the repository components with support for these display servers to
+maximize their usability across Linux platforms. If it is necessary to build
+these modules without support for one of the display servers, the appropriate
+CMake option of the form `BUILD_WSI_xxx_SUPPORT` can be set to `OFF`.
+
+#### Linux Install to System Directories
+
+Installing the files resulting from your build to the systems directories is
+optional since environment variables can usually be used instead to locate the
+binaries. There are also risks with interfering with binaries installed by
+packages. If you are certain that you would like to install your binaries to
+system directories, you can proceed with these instructions.
+
+Assuming that you've built the code as described above and the current
+directory is still `build`, you can execute:
+
+    sudo make install
+
+This command installs files to `/usr/local` if no `CMAKE_INSTALL_PREFIX` is
+specified when creating the build files with CMake:
+
+- `/usr/local/lib`:  Vulkan layers shared objects
+- `/usr/local/share/vulkan/explicit_layer.d`:  Layer JSON files
+
+You may need to run `ldconfig` in order to refresh the system loader search
+cache on some Linux systems.
+
+You can further customize the installation location by setting additional
+CMake variables to override their defaults. For example, if you would like to
+install to `/tmp/build` instead of `/usr/local`, on your CMake command line
+specify:
+
+    -DCMAKE_INSTALL_PREFIX=/tmp/build
+
+Then run `make install` as before. The install step places the files in
+`/tmp/build`. This may be useful for collecting the artifacts and providing
+them to another project that is dependent on them.
+
+See the CMake documentation for more details on using these variables to
+further customize your installation.
+
+Also see the `LoaderAndLayerInterface` document in the `loader` folder of the
+Vulkan-Loader repository for more information about loader and layer
+operation.
+
+#### Linux Uninstall
+
+To uninstall the files from the system directories, you can execute:
+
+    sudo make uninstall
+
+#### Linux Tests
+
+Build and run the `vk_layer_validation_tests`, in the tests subdirectory.
+
+#### Linux 32-bit support
+
+Usage of this repository's contents in 32-bit Linux environments is not
+officially supported. However, since this repository is supported on 32-bit
+Windows, these modules should generally work on 32-bit Linux.
+
+Here are some notes for building 32-bit targets on a 64-bit Ubuntu "reference"
+platform:
+
+If not already installed, install the following 32-bit development libraries:
+
+`gcc-multilib g++-multilib libx11-dev:i386`
+
+This list may vary depending on your distribution and which windowing systems
+you are building for.
+
+Set up your environment for building 32-bit targets:
+
+    export ASFLAGS=--32
+    export CFLAGS=-m32
+    export CXXFLAGS=-m32
+    export PKG_CONFIG_LIBDIR=/usr/lib/i386-linux-gnu
+
+Again, your PKG_CONFIG configuration may be different, depending on your distribution.
+
+Finally, rebuild the repository using `cmake` and `make`, as explained above.
+
+#### Using the new layers
+
+    export VK_LAYER_PATH=<path to your repository root>/build/layers
+
+You can run the `vkcube` or `vulkaninfo` applications from the Vulkan-Tools
+repository to see which driver, loader and layers are being used.
+
+## Building On Android
+
+Install the required tools for Linux and Windows covered above, then add the
+following.
+
+### Android Build Requirements
+
+Note that the minimum supported Android SDK API Level is 26, revision
+level 3.
+
+- Install [Android Studio 2.3](https://developer.android.com/studio/index.html)
+  or later.
+- From the "Welcome to Android Studio" splash screen, add the following
+  components using Configure > SDK Manager:
+  - SDK Platforms > Android 8.0.0 and newer
+  - SDK Tools > Android SDK Build-Tools
+  - SDK Tools > Android SDK Platform-Tools
+  - SDK Tools > Android SDK Tools
+  - SDK Tools > NDK
+
+#### Add Android specifics to environment
+
+For each of the below, you may need to specify a different build-tools
+version, as Android Studio will roll it forward fairly regularly.
+
+On Linux:
+
+    export ANDROID_SDK_HOME=$HOME/Android/sdk
+    export ANDROID_NDK_HOME=$HOME/Android/sdk/ndk-bundle
+    export PATH=$ANDROID_SDK_HOME:$PATH
+    export PATH=$ANDROID_NDK_HOME:$PATH
+    export PATH=$ANDROID_SDK_HOME/build-tools/26.0.3:$PATH
+
+On Windows:
+
+    set ANDROID_SDK_HOME=%LOCALAPPDATA%\Android\sdk
+    set ANDROID_NDK_HOME=%LOCALAPPDATA%\Android\sdk\ndk-bundle
+    set PATH=%LOCALAPPDATA%\Android\sdk\ndk-bundle;%PATH%
+
+On OSX:
+
+    export ANDROID_SDK_HOME=$HOME/Library/Android/sdk
+    export ANDROID_NDK_HOME=$HOME/Library/Android/sdk/ndk-bundle
+    export PATH=$ANDROID_NDK_PATH:$PATH
+    export PATH=$ANDROID_SDK_HOME/build-tools/26.0.3:$PATH
+
+Note: If `jarsigner` is missing from your platform, you can find it in the
+Android Studio install or in your Java installation. If you do not have Java,
+you can get it with something like the following:
+
+  sudo apt-get install openjdk-8-jdk
+
+#### Additional OSX System Requirements
+
+Tested on OSX version 10.13.3
+
+Setup Homebrew and components
+
+- Follow instructions on [brew.sh](http://brew.sh) to get Homebrew installed.
+
+      /usr/bin/ruby -e "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install)"
+
+- Ensure Homebrew is at the beginning of your PATH:
+
+      export PATH=/usr/local/bin:$PATH
+
+- Add packages with the following:
+
+      brew install python
+
+### Android Build
+
+There are two options for building the Android layers. Either using the SPIRV
+tools provided as part of the Android NDK, or using upstream sources. To build
+with SPIRV tools from the NDK, remove the build-android/third_party directory
+created by running update_external_sources_android.sh, (or avoid running
+update_external_sources_android.sh). Use the following script to build
+everything in the repository for Android, including validation layers, tests,
+demos, and APK packaging: This script does retrieve and use the upstream SPRIV
+tools.
+
+    cd build-android
+    ./build_all.sh
+
+Resulting validation layer binaries will be in build-android/libs. Test and
+demo APKs can be installed on production devices with:
+
+    ./install_all.sh [-s <serial number>]
+
+Note that there are no equivalent scripts on Windows yet, that work needs to
+be completed. The following per platform commands can be used for layer only
+builds:
+
+#### Linux and OSX
+
+Follow the setup steps for Linux or OSX above, then from your terminal:
+
+    cd build-android
+    ./update_external_sources_android.sh --no-build
+    ndk-build -j4
+
+#### Windows
+
+Follow the setup steps for Windows above, then from Developer Command Prompt
+for VS2015:
+
+    cd build-android
+    update_external_sources_android.bat
+    ndk-build
+
+### Android Tests and Demos
+
+After making any changes to the repository you should perform some quick
+sanity tests, including the layer validation tests and the vkcube
+demo with validation enabled.
+
+#### Run Layer Validation Tests
+
+Use the following steps to build, install, and run the layer validation tests
+for Android:
+
+    cd build-android
+    ./build_all.sh
+    adb install -r bin/VulkanLayerValidationTests.apk
+    adb shell am start com.example.VulkanLayerValidationTests/android.app.NativeActivity
+
+Alternatively, you can use the test_APK script to install and run the layer
+validation tests:
+
+    test_APK.sh -s <serial number> -p <plaform name> -f <gtest_filter>
+
+## Building on MacOS
+
+### MacOS Build Requirements
+
+Tested on OSX version 10.12.6
+
+[CMake 3.10.2](https://cmake.org/files/v3.10/cmake-3.10.2-Darwin-x86_64.tar.gz) is recommended.
+
+Setup Homebrew and components
+
+- Follow instructions on [brew.sh](http://brew.sh) to get Homebrew installed.
+
+      /usr/bin/ruby -e "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install)"
+
+- Ensure Homebrew is at the beginning of your PATH:
+
+      export PATH=/usr/local/bin:$PATH
+
+- Add packages with the following (may need refinement)
+
+      brew install python python3 git
+
+### Clone the Repository
+
+Clone the Vulkan-ValidationLayers repository:
+
+    git clone https://github.com/KhronosGroup/Vulkan-ValidationLayers.git
+
+### MacOS build
+
+#### CMake Generators
+
+This repository uses CMake to generate build or project files that are then
+used to build the repository. The CMake generators explicitly supported in
+this repository are:
+
+- Unix Makefiles
+- Xcode
+
+#### Building with the Unix Makefiles Generator
+
+This generator is the default generator, so all that is needed for a debug
+build is:
+
+    mkdir build
+    cd build
+    cmake -DVULKAN_HEADERS_INSTALL_DIR=absolute_path_to_install_dir \
+          -DGLSLANG_INSTALL_DIR=absolute_path_to_install_dir \
+          -DCMAKE_BUILD_TYPE=Debug ..
+    make
+
+To speed up the build on a multi-core machine, use the `-j` option for `make`
+to specify the number of cores to use for the build. For example:
+
+    make -j4
+
+#### Building with the Xcode Generator
+
+To create and open an Xcode project:
+
+    mkdir build-xcode
+    cd build-xcode
+    cmake -DVULKAN_HEADERS_INSTALL_DIR=absolute_path_to_install_dir \
+          -DGLSLANG_INSTALL_DIR=absolute_path_to_install_dir \
+          -GXcode ..
+    open VULKAN.xcodeproj
+
+Within Xcode, you can select Debug or Release builds in the Build Settings of the project.
+
+#### Using the new layers on MacOS
+
+    export VK_LAYER_PATH=<path to your repository root>/build/layers
+
+You can run the `vulkaninfo` applications from the Vulkan-Tools repository to
+see which driver, loader and layers are being used.
+
+### MacOS Tests
+
+After making any changes to the repository, you should build and run the included
+vk_layer_validation_tests located in the tests subdirectory.
+
+These test require a manual path to an ICD to run properly on MacOS.
+
+You can use:
+
+- MoltenVK ICD
+- Mock ICD
+
+#### Using MoltenVK ICD
+
+Clone and build the [MoltenVK](https://github.com/KhronosGroup/MoltenVK) repository.
+
+You will have to direct the loader from Vulkan-Loader to the MoltenVK ICD:
+
+    export VK_ICD_FILENAMES=<path to MoltenVK repository>/Package/Latest/MoltenVK/macOS/MoltenVK_icd.json
+
+#### Using Mock ICD
+
+Clone and build the [Vulkan-Tools](https://github.com/KhronosGroup/Vulkan-Tools) repository.
+
+You will have to direct the loader from Vulkan-Loader to the Mock ICD:
+
+    export VK_ICD_FILENAMES=<path to Vulkan-Tools repository>/build/icd/VkICD_mock_icd.json
+
+#### Running the Tests
+
+In a terminal, change to the build/tests directory and run the vk_layer_validation_tests.
+
+Further testing and sanity checking can be achieved by running the vkcube and
+vulkaninfo applications in the
+[Vulkan-Tools](https://github.com/KhronosGroup/Vulkan-Tools)
+repository.
+
+Note that MoltenVK is still adding Vulkan features and some tests may fail.
diff --git a/src/third_party/vulkan-validation-layers/src/CMakeLists.txt b/src/third_party/vulkan-validation-layers/src/CMakeLists.txt
new file mode 100644
index 0000000..e4c26bc
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/CMakeLists.txt
@@ -0,0 +1,360 @@
+# ~~~
+# Copyright (c) 2014-2019 Valve Corporation
+# Copyright (c) 2014-2019 LunarG, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ~~~
+
+# CMake project initialization ---------------------------------------------------------------------------------------------------
+# This section contains pre-project() initialization, and ends with the project() command.
+
+cmake_minimum_required(VERSION 3.10.2)
+
+# Apple: Must be set before enable_language() or project() as it may influence configuration of the toolchain and flags.
+set(CMAKE_OSX_DEPLOYMENT_TARGET "10.12" CACHE STRING "Minimum OS X deployment version")
+
+project(Vulkan-ValidationLayers)
+
+enable_testing()
+
+# User-interface declarations ----------------------------------------------------------------------------------------------------
+# This section contains variables that affect development GUIs (e.g. CMake GUI and IDEs), such as option(), folders, and variables
+# with the CACHE property.
+
+# API_NAME allows renaming builds to avoid conflicts with installed SDKs.  It is referenced by layers/vk_loader_platform.h
+set(API_NAME "Vulkan" CACHE STRING "API name to use when building")
+string(TOLOWER ${API_NAME} API_LOWERCASE)
+add_definitions(-DAPI_NAME="${API_NAME}")
+
+set(CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} "${CMAKE_CURRENT_SOURCE_DIR}/cmake")
+
+find_package(PythonInterp 3 QUIET)
+
+if (TARGET Vulkan::Headers)
+    message(STATUS "Using Vulkan headers from Vulkan::Headers target")
+    get_target_property(VulkanHeaders_INCLUDE_DIRS Vulkan::Headers INTERFACE_INCLUDE_DIRECTORIES)
+    get_target_property(VulkanRegistry_DIR Vulkan::Registry INTERFACE_INCLUDE_DIRECTORIES)
+else()
+    find_package(VulkanHeaders REQUIRED)
+
+    # xxxnsubtil: this should eventually be replaced by exported targets
+    add_library(Vulkan-Headers INTERFACE)
+    target_include_directories(Vulkan-Headers INTERFACE ${VulkanHeaders_INCLUDE_DIRS})
+    add_library(Vulkan::Headers ALIAS Vulkan-Headers)
+endif()
+
+option(USE_CCACHE "Use ccache" OFF)
+if(USE_CCACHE)
+    find_program(CCACHE_FOUND ccache)
+    if(CCACHE_FOUND)
+        set_property(GLOBAL PROPERTY RULE_LAUNCH_COMPILE ccache)
+    endif()
+endif()
+
+set(CMAKE_POSITION_INDEPENDENT_CODE ON)
+
+include(GNUInstallDirs)
+
+if(WIN32 AND CMAKE_INSTALL_PREFIX_INITIALIZED_TO_DEFAULT)
+    # Windows: if install locations not set by user, set install prefix to "<build_dir>\install".
+    set(CMAKE_INSTALL_PREFIX "${CMAKE_BINARY_DIR}/install" CACHE PATH "default install path" FORCE)
+endif()
+
+if(APPLE)
+    # CMake versions 3 or later need CMAKE_MACOSX_RPATH defined. This avoids the CMP0042 policy message.
+    set(CMAKE_MACOSX_RPATH 1)
+endif()
+
+# Enable IDE GUI folders
+set_property(GLOBAL PROPERTY USE_FOLDERS ON)
+# "Helper" targets that don't have interesting source code should set their FOLDER property to this
+set(LAYERS_HELPER_FOLDER "Helper Targets")
+
+# Options for Linux only
+if(UNIX AND NOT APPLE) # i.e. Linux
+    include(FindPkgConfig)
+    option(BUILD_WSI_XCB_SUPPORT "Build XCB WSI support" ON)
+    option(BUILD_WSI_XLIB_SUPPORT "Build Xlib WSI support" ON)
+    option(BUILD_WSI_WAYLAND_SUPPORT "Build Wayland WSI support" ON)
+    set(DEMOS_WSI_SELECTION "XCB" CACHE STRING "Select WSI target for demos (XCB, XLIB, WAYLAND, DISPLAY)")
+
+    if(BUILD_WSI_XCB_SUPPORT)
+        find_package(XCB REQUIRED)
+    endif()
+
+    if(BUILD_WSI_XLIB_SUPPORT)
+        find_package(X11 REQUIRED)
+    endif()
+
+    if(BUILD_WSI_WAYLAND_SUPPORT)
+        find_package(Wayland REQUIRED)
+        include_directories(${WAYLAND_CLIENT_INCLUDE_DIR})
+    endif()
+endif()
+
+# Platform-specific compiler switches
+if(CMAKE_COMPILER_IS_GNUCC OR CMAKE_C_COMPILER_ID MATCHES "Clang")
+    add_compile_options(-Wall
+                        -Wextra
+                        -Wno-unused-parameter
+                        -Wno-missing-field-initializers
+                        -fno-strict-aliasing
+                        -fno-builtin-memcmp
+                        -fvisibility=hidden)
+
+    # Treat warnings as errors for versions of GCC and Clang that are shipped on Ubuntu 18.04 or older.
+    if((CMAKE_COMPILER_IS_GNUCXX AND NOT (CMAKE_CXX_COMPILER_VERSION VERSION_GREATER 7.3.0)) OR
+       (("${CMAKE_CXX_COMPILER_ID}" STREQUAL "Clang") AND NOT (CMAKE_CXX_COMPILER_VERSION VERSION_GREATER 3.8.0)))
+        add_compile_options(-Werror)
+    endif()
+
+    set(CMAKE_C_STANDARD 99)
+    set(CMAKE_CXX_STANDARD 11)
+    set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fno-rtti")
+
+    # For GCC version 7.1 or greater, we need to disable the implicit fallthrough warning since there's no consistent way to satisfy
+    # all compilers until they all accept the C++17 standard.
+    if(CMAKE_COMPILER_IS_GNUCC AND NOT (CMAKE_CXX_COMPILER_VERSION LESS 7.1))
+        add_compile_options(-Wimplicit-fallthrough=0)
+    endif()
+elseif(MSVC)
+    # Treat warnings as errors
+    add_compile_options("/WX")
+    # Disable RTTI
+    add_compile_options("/GR-")
+    # Warn about nested declarations
+    add_compile_options("/w34456")
+    # Warn about potentially uninitialized variables
+    add_compile_options("/w34701")
+    add_compile_options("/w34703")
+    # Warn about different indirection types.
+    add_compile_options("/w34057")
+    # Warn about signed/unsigned mismatch.
+    add_compile_options("/w34245")
+endif()
+
+if(TARGET gtest OR IS_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}/external/googletest)
+    option(BUILD_TESTS "Build tests" ON)
+else()
+    option(BUILD_TESTS "Build tests" OFF)
+endif()
+
+option(INSTALL_TESTS "Install tests" OFF)
+option(BUILD_LAYERS "Build layers" ON)
+option(BUILD_LAYER_SUPPORT_FILES "Generate layer files" OFF) # For generating files when not building layers
+
+if(BUILD_TESTS OR BUILD_LAYERS)
+
+    set(GLSLANG_INSTALL_DIR "GLSLANG-NOTFOUND" CACHE PATH "Absolute path to a glslang install directory")
+    if(NOT GLSLANG_INSTALL_DIR AND NOT DEFINED ENV{GLSLANG_INSTALL_DIR} AND NOT TARGET glslang)
+        message(FATAL_ERROR "Must define location of glslang binaries -- see BUILD.md")
+    endif()
+
+    # GLSLANG_INSTALL_DIR is used as the path to all dependent projects' install dirs
+    # CMake command line option overrides environment variable
+    if(NOT GLSLANG_INSTALL_DIR)
+        set(GLSLANG_INSTALL_DIR $ENV{GLSLANG_INSTALL_DIR})
+    endif()
+
+    if (NOT TARGET glslang)
+        message(STATUS "Using glslang install located at ${GLSLANG_INSTALL_DIR}")
+        set(GLSLANG_SEARCH_PATH "${GLSLANG_INSTALL_DIR}/lib")
+        set(GLSLANG_DEBUG_SEARCH_PATH "${GLSLANG_INSTALL_DIR}/lib")
+        set(GLSLANG_SPIRV_INCLUDE_DIR "${GLSLANG_INSTALL_DIR}/include/glslang" CACHE PATH "Path to glslang spirv headers")
+
+        find_library(GLSLANG_LIB NAMES glslang HINTS ${GLSLANG_SEARCH_PATH})
+        find_library(OGLCompiler_LIB NAMES OGLCompiler HINTS ${GLSLANG_SEARCH_PATH})
+        find_library(OSDependent_LIB NAMES OSDependent HINTS ${GLSLANG_SEARCH_PATH})
+        find_library(HLSL_LIB NAMES HLSL HINTS ${GLSLANG_SEARCH_PATH})
+        find_library(SPIRV_LIB NAMES SPIRV HINTS ${GLSLANG_SEARCH_PATH})
+        find_library(SPIRV_REMAPPER_LIB NAMES SPVRemapper HINTS ${GLSLANG_SEARCH_PATH})
+
+        if(WIN32)
+            add_library(glslang STATIC IMPORTED)
+            add_library(OGLCompiler STATIC IMPORTED)
+            add_library(OSDependent STATIC IMPORTED)
+            add_library(HLSL STATIC IMPORTED)
+            add_library(SPIRV STATIC IMPORTED)
+            add_library(SPVRemapper STATIC IMPORTED)
+
+            find_library(GLSLANG_DLIB NAMES glslangd HINTS ${GLSLANG_DEBUG_SEARCH_PATH})
+            find_library(OGLCompiler_DLIB NAMES OGLCompilerd HINTS ${GLSLANG_DEBUG_SEARCH_PATH})
+            find_library(OSDependent_DLIB NAMES OSDependentd HINTS ${GLSLANG_DEBUG_SEARCH_PATH})
+            find_library(HLSL_DLIB NAMES HLSLd HINTS ${GLSLANG_DEBUG_SEARCH_PATH})
+            find_library(SPIRV_DLIB NAMES SPIRVd HINTS ${GLSLANG_DEBUG_SEARCH_PATH})
+            find_library(SPIRV_REMAPPER_DLIB NAMES SPVRemapperd HINTS ${GLSLANG_DEBUG_SEARCH_PATH})
+
+            set_target_properties(glslang
+                                  PROPERTIES IMPORTED_LOCATION
+                                             "${GLSLANG_LIB}"
+                                             IMPORTED_LOCATION_DEBUG
+                                             "${GLSLANG_DLIB}")
+            set_target_properties(OGLCompiler
+                                  PROPERTIES IMPORTED_LOCATION
+                                             "${OGLCompiler_LIB}"
+                                             IMPORTED_LOCATION_DEBUG
+                                             "${OGLCompiler_DLIB}")
+            set_target_properties(OSDependent
+                                  PROPERTIES IMPORTED_LOCATION
+                                             "${OSDependent_LIB}"
+                                             IMPORTED_LOCATION_DEBUG
+                                             "${OSDependent_DLIB}")
+            set_target_properties(HLSL
+                                  PROPERTIES IMPORTED_LOCATION
+                                             "${HLSL_LIB}"
+                                             IMPORTED_LOCATION_DEBUG
+                                             "${HLSL_DLIB}")
+            set_target_properties(SPIRV
+                                  PROPERTIES IMPORTED_LOCATION
+                                             "${SPIRV_LIB}"
+                                             IMPORTED_LOCATION_DEBUG
+                                             "${SPIRV_DLIB}")
+            set_target_properties(SPVRemapper
+                                  PROPERTIES IMPORTED_LOCATION
+                                             "${SPIRV_REMAPPER_LIB}"
+                                             IMPORTED_LOCATION_DEBUG
+                                             "${SPIRV_REMAPPER_DLIB}")
+
+            set(GLSLANG_LIBRARIES glslang OGLCompiler OSDependent HLSL SPIRV SPVRemapper ${SPIRV_TOOLS_LIBRARIES})
+        else()
+            set(GLSLANG_LIBRARIES
+                ${GLSLANG_LIB}
+                ${OGLCompiler_LIB}
+                ${OSDependent_LIB}
+                ${HLSL_LIB}
+                ${SPIRV_LIB}
+                ${SPIRV_REMAPPER_LIB}
+                ${SPIRV_TOOLS_LIBRARIES})
+        endif()
+    else()
+        set(GLSLANG_SPIRV_INCLUDE_DIR "${glslang_SOURCE_DIR}" CACHE PATH "Path to glslang spirv headers")
+        set(GLSLANG_LIBRARIES glslang SPIRV SPVRemapper)
+    endif()
+
+    # spirv-tools
+    if (NOT TARGET SPIRV-Tools)
+        set(SPIRV_TOOLS_BINARY_ROOT "${GLSLANG_INSTALL_DIR}/lib"
+            CACHE PATH "User defined path to the SPIRV-Tools binaries for this project")
+        set(SPIRV_TOOLS_OPT_BINARY_ROOT "${GLSLANG_INSTALL_DIR}/lib"
+            CACHE PATH "User defined path to the SPIRV-Tools-opt binaries for this project")
+        set(SPIRV_TOOLS_INCLUDE_DIR "${GLSLANG_INSTALL_DIR}/include" CACHE PATH "Path to spirv tools headers")
+        set(SPIRV_TOOLS_SEARCH_PATH "${GLSLANG_INSTALL_DIR}/lib")
+        set(SPIRV_TOOLS_DEBUG_SEARCH_PATH "${GLSLANG_INSTALL_DIR}/lib")
+        set(SPIRV_TOOLS_OPT_SEARCH_PATH "${GLSLANG_INSTALL_DIR}/lib")
+        set(SPIRV_TOOLS_OPT_DEBUG_SEARCH_PATH "${GLSLANG_INSTALL_DIR}/lib")
+
+        find_library(SPIRV_TOOLS_LIB NAMES SPIRV-Tools HINTS ${SPIRV_TOOLS_SEARCH_PATH})
+        find_library(SPIRV_TOOLS_OPT_LIB NAMES SPIRV-Tools-opt HINTS ${SPIRV_TOOLS_OPT_SEARCH_PATH})
+
+        if(WIN32)
+            add_library(SPIRV-Tools-opt STATIC IMPORTED)
+            add_library(SPIRV-Tools STATIC IMPORTED)
+
+            find_library(SPIRV_TOOLS_DLIB NAMES SPIRV-Toolsd HINTS ${SPIRV_TOOLS_DEBUG_SEARCH_PATH})
+            find_library(SPIRV_TOOLS_OPT_DLIB NAMES SPIRV-Tools-optd HINTS ${SPIRV_TOOLS_OPT_DEBUG_SEARCH_PATH})
+
+            set_target_properties(SPIRV-Tools
+                                  PROPERTIES IMPORTED_LOCATION
+                                             "${SPIRV_TOOLS_LIB}"
+                                             IMPORTED_LOCATION_DEBUG
+                                             "${SPIRV_TOOLS_DLIB}")
+            set_target_properties(SPIRV-Tools-opt
+                                  PROPERTIES IMPORTED_LOCATION
+                                             "${SPIRV_TOOLS_OPT_LIB}"
+                                             IMPORTED_LOCATION_DEBUG
+                                             "${SPIRV_TOOLS_OPT_DLIB}")
+
+            set(SPIRV_TOOLS_LIBRARIES SPIRV-Tools-opt SPIRV-Tools)
+        else()
+            set(SPIRV_TOOLS_LIBRARIES ${SPIRV_TOOLS_OPT_LIB} ${SPIRV_TOOLS_LIB})
+        endif()
+    else()
+        set(SPIRV_TOOLS_LIBRARIES SPIRV-Tools SPIRV-Tools-opt)
+        set(SPIRV_TOOLS_INCLUDE_DIR "${spirv-tools_SOURCE_DIR}/include" CACHE PATH "Path to spirv tools headers")
+    endif()
+
+    set(GLSLANG_LIBRARIES ${GLSLANG_LIBRARIES} ${SPIRV_TOOLS_LIBRARIES})
+endif()
+
+# Generate dependent helper files ------------------------------------------------------------------------------------------------
+
+set(SCRIPTS_DIR "${PROJECT_SOURCE_DIR}/scripts")
+
+# VkLayer_utils library ----------------------------------------------------------------------------------------------------------
+# For Windows, we use a static lib because the Windows loader has a fairly restrictive loader search path that can't be easily
+# modified to point it to the same directory that contains the layers. TODO: This should not be a library -- in future, include
+# files directly in layers.
+
+add_library(VkLayer_utils
+            STATIC
+            layers/vk_layer_config.cpp
+            layers/vk_layer_extension_utils.cpp
+            layers/vk_layer_utils.cpp
+            layers/vk_format_utils.cpp)
+target_link_libraries(VkLayer_utils PUBLIC Vulkan::Headers)
+if(WIN32)
+    target_compile_definitions(VkLayer_utils PUBLIC _CRT_SECURE_NO_WARNINGS)
+endif()
+install(TARGETS VkLayer_utils DESTINATION ${CMAKE_INSTALL_LIBDIR})
+set_target_properties(VkLayer_utils PROPERTIES LINKER_LANGUAGE CXX)
+target_include_directories(VkLayer_utils
+                           PUBLIC ${CMAKE_CURRENT_SOURCE_DIR}/layers
+                                  ${CMAKE_CURRENT_SOURCE_DIR}/layers/generated
+                                  ${CMAKE_CURRENT_BINARY_DIR}
+                                  ${CMAKE_CURRENT_BINARY_DIR}/layers
+                                  ${PROJECT_BINARY_DIR}
+                                  ${VulkanHeaders_INCLUDE_DIR})
+
+# uninstall target ---------------------------------------------------------------------------------------------------------------
+if(NOT TARGET uninstall)
+    configure_file("${CMAKE_CURRENT_SOURCE_DIR}/cmake/cmake_uninstall.cmake.in"
+                   "${CMAKE_CURRENT_BINARY_DIR}/cmake_uninstall.cmake"
+                   IMMEDIATE
+                   @ONLY)
+    add_custom_target(uninstall COMMAND ${CMAKE_COMMAND} -P ${CMAKE_CURRENT_BINARY_DIR}/cmake_uninstall.cmake)
+    set_target_properties(uninstall PROPERTIES FOLDER ${LAYERS_HELPER_FOLDER})
+endif()
+
+# Fetch header version from vulkan_core.h ----------------------------------------------------------------------------------------
+file(STRINGS "${VulkanHeaders_INCLUDE_DIRS}/vulkan/vulkan_core.h" lines REGEX "^#define VK_HEADER_VERSION [0-9]+")
+list(LENGTH lines len)
+if(${len} EQUAL 1)
+    string(REGEX MATCHALL
+                 "[0-9]+"
+                 vk_header_version
+                 ${lines})
+else()
+    message(FATAL_ERROR "Unable to fetch version from vulkan_core.h")
+endif()
+
+# Optional codegen target --------------------------------------------------------------------------------------------------------
+if(PYTHONINTERP_FOUND)
+    add_custom_target(VulkanVL_generated_source
+                      COMMAND ${PYTHON_EXECUTABLE} ${PROJECT_SOURCE_DIR}/scripts/generate_source.py
+                              ${VulkanRegistry_DIR} --incremental
+                      WORKING_DIRECTORY ${PROJECT_SOURCE_DIR}/layers/generated
+                      )
+else()
+    message("WARNING: VulkanVL_generated_source target requires python 3")
+endif()
+
+# Add subprojects ----------------------------------------------------------------------------------------------------------------
+
+add_subdirectory(external)
+if(BUILD_TESTS)
+    add_subdirectory(tests)
+endif()
+
+if(BUILD_LAYERS OR BUILD_LAYER_SUPPORT_FILES)
+    add_subdirectory(layers)
+endif()
diff --git a/src/third_party/vulkan-validation-layers/src/CODE_OF_CONDUCT.md b/src/third_party/vulkan-validation-layers/src/CODE_OF_CONDUCT.md
new file mode 100644
index 0000000..a11610b
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/CODE_OF_CONDUCT.md
@@ -0,0 +1 @@
+A reminder that this issue tracker is managed by the Khronos Group. Interactions here should follow the Khronos Code of Conduct (https://www.khronos.org/developers/code-of-conduct), which prohibits aggressive or derogatory language. Please keep the discussion friendly and civil.
diff --git a/src/third_party/vulkan-validation-layers/src/CONTRIBUTING.md b/src/third_party/vulkan-validation-layers/src/CONTRIBUTING.md
new file mode 100644
index 0000000..0790d88
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/CONTRIBUTING.md
@@ -0,0 +1,171 @@
+# How to Contribute to Vulkan Source Repositories
+
+## **The Repository**
+
+The source code for The Vulkan-ValidationLayer components is sponsored by Khronos and LunarG.
+* [Khronos Vulkan-ValidationLayers](https://github.com/KhronosGroup/Vulkan-ValidationLayers)
+
+
+### **The Vulkan Ecosystem Needs Your Help**
+
+The Vulkan validation layers are one of the larger and more important components in this repository.
+While there are often active and organized development efforts underway to improve their coverage,
+there are always opportunities for anyone to help by contributing additional validation layer checks
+and tests for these validation checks.
+
+There are a couple of methods to identify areas of need:
+* Examine the [issues list](https://github.com/KhronosGroup/Vulkan-ValidationLayers/issues)
+in this repository and look for issues that are of interest
+* Alternatively, run the `vk_validation_stats.py` script (in the scripts directory) with the `-todo`
+command line argument to see a list of as-yet unimplemented validation checks.
+* Having selected a validation check to work on, it is often efficient to implement a block of related checks
+at once. Refer to the validation database output from `vk_validation_stats.py` (available in text, html,
+or csv format) to identify related checks that may be implemented simultaneously.
+
+Of course, if you have your own work in mind, please open an issue to describe it and assign it to yourself.
+Finally, please feel free to contact any of the developers that are actively contributing should you
+wish to coordinate further.
+Please see the [section about Validation Layers](#special-considerations-for-validation-layers)
+later on this page.
+
+Repository Issue labels:
+
+* _Bug_:          These issues refer to invalid or broken functionality and are the highest priority.
+* _Incomplete_:   These issues refer to missing validation checks that users have encountered during application
+development that would have been directly useful, and are high priority.
+* _Enhancement_:  These issues refer to ideas for extending or improving the validation layers.
+* _Triaged_:      These issues have been assessed and/or reviewed
+
+It is the maintainers goal for all issues to be assigned or triaged within one business day of their submission. If you choose
+to work on an issue that is assigned, simply coordinate with the current assignee.
+
+### **How to Submit Fixes**
+
+* **Ensure that the bug was not already reported or fixed** by searching on GitHub under Issues
+  and Pull Requests.
+* Use the existing GitHub forking and pull request process.
+  This will involve [forking the repository](https://help.github.com/articles/fork-a-repo/),
+  creating a branch with your commits, and then [submitting a pull request](https://help.github.com/articles/using-pull-requests/).
+* Please read and adhere to the style and process [guidelines ](#coding-conventions-and-formatting) enumerated below.
+* Please base your fixes on the master branch.  SDK branches are generally not updated except for critical fixes needed to repair an SDK release.
+* The resulting Pull Request will be assigned to a repository maintainer. It is the maintainer's responsibility to ensure the Pull Request
+  passes the Google/LunarG internal CI processes. Once the Pull Request has been approved and is passing internal CI, a repository maintainer
+  will merge the PR.
+
+
+#### **Coding Conventions and Formatting**
+* Use the **[Google style guide](https://google.github.io/styleguide/cppguide.html)** for source code with the following exceptions:
+    * The column limit is 132 (as opposed to the default value 80). The clang-format tool will handle this. See below.
+    * The indent is 4 spaces instead of the default 2 spaces. Access modifier (e.g. `public:`) is indented 2 spaces instead of the
+      default 1 space. Again, the clang-format tool will handle this.
+    * The C++ file extension is `*.cpp` instead of the default `*.cc`.
+    * If you can justify a reason for violating a rule in the guidelines, then you are free to do so. Be prepared to defend your
+decision during code review. This should be used responsibly. An example of a bad reason is "I don't like that rule." An example of
+a good reason is "This violates the style guide, but it improves type safety."
+
+* Run **clang-format** on your changes to maintain consistent formatting
+    * There are `.clang-format` files present in the repository to define clang-format settings
+      which are found and used automatically by clang-format.
+	* **clang-format** binaries are available from the LLVM orginization, here: [LLVM](https://clang.llvm.org/). Our CI system (Travis-CI)
+	  currently uses clang-format version 5.0.0 to check that the lines of code you have changed are formatted properly. It is
+	  recommended that you use the same version to format your code prior to submission.
+    * A sample git workflow may look like:
+
+>        # Make changes to the source.
+>        $ git add -u .
+>        $ git clang-format --style=file
+>        # Check to see if clang-format made any changes and if they are OK.
+>        $ git add -u .
+>        $ git commit
+
+* **Commit Messages**
+    * Limit the subject line to 64 characters -- this allows the information to display correctly in git/GitHub logs
+    * Begin subject line with a one-word component description followed by a colon (e.g. build, docs, layers, tests, etc.)
+    * Separate subject from body with a blank line
+    * Wrap the body at 72 characters
+    * Capitalize the subject line
+    * Do not end the subject line with a period
+    * Use the body to explain what and why vs. how
+    * Use the imperative mode in the subject line. This just means to write it as a command (e.g. Fix the sprocket)
+
+Strive for commits that implement a single or related set of functionality, using as many commits as is necessary (more is better).
+That said, please ensure that the repository compiles and passes tests without error for each commit in your pull request.  Note
+that to be accepted into the repository, the pull request must [pass all tests](#testing your changes) on all supported platforms
+-- the automatic Github Travis and AppVeyor continuous integration features will assist in enforcing this requirement.
+
+#### **Testing Your Changes**
+* Run the included layer validation tests (vk_layer_validation_tests) in the repository before and after each of your commits to check for any regressions.
+
+* Write additional layer validation tests that explicitly exercise your changes.
+
+* Feel free to subject your code changes to other tests as well!
+
+#### **GitHub Cloud CI Testing**
+Pull Requests to GitHub are tested in the cloud on Linux and Windows VMs. The Linux VMs use [Travis CI](https://travis-ci.org/KhronosGroup/Vulkan-ValidationLayers) with the sequence of commands driven by the [.travis.yml](https://github.com/KhronosGroup/Vulkan-ValidationLayers/blob/master/.travis.yml) file. The Windows VMs use [AppVeyor](https://ci.appveyor.com/project/Khronoswebmaster/vulkan-validationlayers/branch/master) with the sequence of commands driven by the [.appveyor.yml](https://github.com/KhronosGroup/Vulkan-ValidationLayers/blob/master/.appveyor.yml) file.
+
+The Linux testing includes iterating on all of the validation layer tests over multiple [different device](https://github.com/KhronosGroup/Vulkan-ValidationLayers/tree/master/tests/device_profiles) profiles using the [devsim layer](https://github.com/LunarG/VulkanTools/tree/master/layersvt) in combination with the [mock icd](https://github.com/KhronosGroup/Vulkan-Tools/tree/master/icd). This is a fast way to simulate testing across different devices. Any new tests must pass across all device profiles.
+
+#### **Special Considerations for Validation Layers**
+* **Validation Tests:**  If you are submitting a change that adds a new validation check, you should also construct a "negative" test function.
+The negative test function purposely violates the validation rule that the new validation check is looking for.
+The test should cause your new validation check to identify the violation and issue a validation error report.
+And finally, the test should check that the validation error report is generated and consider the test as "passing"
+if the report is received.  Otherwise, the test should indicate "failure".
+This new test should be added to the validation layer test program in the `tests` directory and contributed
+at the same time as the new validation check itself. There are many existing validation tests in this directory that can be
+used as a starting point.
+* **Validation Checks:**  Validation checks are carried out by the Khronos Validation layer. The CoreChecks validation object
+contains checks that require significant amounts of application state to carry out. In contrast, the stateless validation object contains
+checks that require (mostly) no state at all. Please inquire if you are unsure of the location for your contribution. The other
+validation objects (thread_safety, object lifetimes) are more special-purpose and are mostly code-generated from the specification.
+* **Validation Error/Warning Messages:**  Strive to give specific information describing the particulars of the failure, including
+output all of the applicable Vulkan Objects and related values. Also, ensure that when messages can give suggestions about _how_ to
+fix the problem, they should do so to better assist the user.
+* **Validation Statistics:** The `vk_validation_stats.py` script (in the scripts directory) inspects the layer and test source files
+and reports a variety of statistics on validation completeness and correctness. Before submitting a change you should run this
+script with the consistency check (`-c`) argument to ensure that your changes have not introduced any inconsistencies in the code.
+* **Generated Source Code:** The `layers/generated` directory contains source code that is created by several
+generator scripts in the `scripts` directory. All changes to these scripts _must_ be submitted with the
+corresponding generated output to keep the repository self-consistent. This requirement is enforced by both
+Travis CI and AppVeyor test configurations. Regenerate source files after modifying any of the generator
+scripts and before building and testing your changes. More details can be found in
+[BUILD.md](https://github.com/KhronosGroup/Vulkan-ValidationLayers/blob/master/BUILD.md#generated-source-code).
+
+#### Coding Conventions for [CMake](http://cmake.org) files
+
+* When editing configuration files for CMake, follow the style conventions of the surrounding code.
+  * The column limit is 132.
+  * The indent is 4 spaces.
+  * CMake functions are lower-case.
+  * Variable and keyword names are upper-case.
+* The format is defined by
+  [cmake-format](https://github.com/cheshirekow/cmake_format)
+  using the `cmake-format.py` file in the repository to define the settings.
+  See the cmake-format page for information about its simple markup for comments.
+* Disable reformatting of a block of comment lines by inserting
+  a `# ~~~` comment line before and after that block.
+* Disable any formatting of a block of lines by surrounding that block with
+  `# cmake-format: off` and `# cmake-format: on` comment lines.
+* To install: `sudo pip install cmake_format`
+* To run: `cmake-format --in-place $FILENAME`
+* **IMPORTANT (June 2018)** cmake-format v0.3.6 has a
+  [bug]( https://github.com/cheshirekow/cmake_format/issues/50)
+  that can corrupt the formatting of comment lines in CMake files.
+  A workaround is to use the following command _before_ running cmake-format:
+  `sed --in-place='' 's/^  *#/#/' $FILENAME`
+
+### **Contributor License Agreement (CLA)**
+
+You will be prompted with a one-time "click-through" CLA dialog as part of submitting your pull request
+or other contribution to GitHub.
+
+### **License and Copyrights**
+
+All contributions made to the Vulkan-ValidationLayers repository are Khronos branded and as such,
+any new files need to have the Khronos license (Apache 2.0 style) and copyright included.
+Please see an existing file in this repository for an example.
+
+All contributions made to the LunarG repositories are to be made under the Apache 2.0 license
+and any new files need to include this license and any applicable copyrights.
+
+You can include your individual copyright after any existing copyrights.
diff --git a/src/third_party/vulkan-validation-layers/src/GOVERNANCE.md b/src/third_party/vulkan-validation-layers/src/GOVERNANCE.md
new file mode 100644
index 0000000..f503dfe
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/GOVERNANCE.md
@@ -0,0 +1,53 @@
+## Vulkan Validation Layers Repository Management
+
+# **Open Source Project – Objectives**
+
+* Alignment with the Vulkan Specification
+  - The goal is for validation layer behavior to enforce the vulkan specification on applications. Questions on specification
+interpretations may require consulting with the Khronos Vulkan Workgroup for resolution
+    - [Core Specification](https://www.khronos.org/registry/vulkan/specs/1.0/html/vkspec.html)
+    - [Header Files](https://www.khronos.org/registry/vulkan/#headers)
+    - [SDK Resources](https://vulkan.lunarg.com)
+* ISV Enablement
+  - Updates of validation layer functionality should be available in a timely fashion
+  - Every effort will be made to be responsive to ISV issues with validation layers
+* Cross Platform Compatibility
+  - Google and LunarG collaboration:
+    - Google: Monitor for Android
+    - LunarG: Monitor for desktop (Windows and Linux)
+    - Continuous Integration: HW test farms operated by Google and LunarG monitor various hardware/software platforms
+* Repo Quality
+  - Repo remains in healthy state with all tests passing and good-quality, consistent codebase
+  - Continuous Integration: Along with Github, HW test farms operated by Google and LunarG perform pre-commit cloud testing
+on pull-requests
+
+# **Roles and Definitions**
+* Contributor, Commenter, User
+  - Submitting contributions, creating issues, or using the contents of the repository
+* Approver
+  - Experienced project members who have made significant technical contributions
+  - Write control: Approve pull/merge requests (verify submissions vs. acceptance criteria)
+* Technical Project Leads
+  - Lead the project in terms of versioning, quality assurance, and overarching objectives
+  - Monitor github issues and drive timely resolution
+  - Designate new approvers
+  - Ensure project information such as the Readme, Contributing docs, wiki, etc., kept up-to-date
+  - Act as a facilitator in resolving technical conflicts
+  - Is a point-of-contact for project-related questions
+
+The technical project leads for this repository are:
+* **Mark Lobodzinski** [mark@lunarg.com](mailto:mark@lunarg.com)
+* **Tobin Ehlis** [tobine@google.com](mailto:tobine@google.com)
+
+# **Acceptance Criteria and Process**
+  - All source code to include Khronos copyright and license (Apache 2.0).
+    - Additional copyrights of contributors appended
+  - Contributions are via pull requests
+    - Project leads will assigning approvers to contributor pull requests
+    - Approvers can self-assign their reviewers
+    - For complex or invasive contributions, Project Leads may request approval from specific reviewers
+    - At least one review approval is required to complete a pull request
+    - The goal is to be responsive to contributors while ensuring acceptance criteria is met and to facilitate their submissions
+    - Approval is dependent upon adherence to the guidelines in [CONTRIBUTING.md](CONTRIBUTING.md), and alignment with
+repository goals of maintainability, completeness, and quality
+    - Conflicts or questions will ultimately be resolved by the project leads
diff --git a/src/third_party/vulkan-validation-layers/src/LAYER_CONFIGURATION.md b/src/third_party/vulkan-validation-layers/src/LAYER_CONFIGURATION.md
new file mode 100644
index 0000000..5d3e68c
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/LAYER_CONFIGURATION.md
@@ -0,0 +1,143 @@
+<!-- markdownlint-disable MD041 -->
+<!-- Copyright 2015-2019 LunarG, Inc. -->
+
+[![Khronos Vulkan][1]][2]
+
+[1]: https://vulkan.lunarg.com/img/Vulkan_100px_Dec16.png "https://www.khronos.org/vulkan/"
+[2]: https://www.khronos.org/vulkan/
+
+# Layers Overview and Configuration
+
+[![Creative Commons][3]][4]
+
+[3]: https://i.creativecommons.org/l/by-nd/4.0/88x31.png "Creative Commons License"
+[4]: https://creativecommons.org/licenses/by-nd/4.0/
+
+Vulkan supports intercepting or hooking API entry points via a layer framework.  A layer can intercept all or any subset of Vulkan API entry points.  Multiple layers can be chained together to cascade their functionality in the appearance of a single, larger layer.
+
+Vulkan validation and utility layers give Vulkan application developers the ability to add additional functionality to applications without modifying the application itself, e.g., dumping API entry points or generating screenshots of specified frames.
+
+### Vulkan Configurator Tool
+
+Developers can use the Vulkan Configurator, `vkconfig`, to enable explicit layers and disable implicit layers as well as change layer settings through a graphical user interface.  Please see [the Vulkan Configurator documentation in the Vulkan SDK](https://vulkan.lunarg.com/doc/sdk/latest/windows/vkconfig.html) for more information on using the Vulkan Configurator. Continue reading for instructions on how to configure layers without using vkconfig.
+
+## Activating Layers on Windows
+Before or during execution of a Vulkan application, the loader must be informed of the layers to activate.
+This can be done in two ways:  programmatically, or by using environment variables.
+
+Applications may programmatically activate layers via the `vkCreateInstance()` entry point.
+
+Layers may also be activated by using the `VK_INSTANCE_LAYERS` environment variable.
+The variable should include a semi-colon separated list of layer names to activate.
+Note that order is relevant, with the initial layer being the closest to the application, and the final layer being closest to the driver.
+
+The list of layers to activate can be specified by executing the following in a Command Window:
+
+```
+C:\> set VK_INSTANCE_LAYERS=VK_LAYER_LUNARG_api_dump;VK_LAYER_KHRONOS_validation
+```
+In this example, the api_dump layer will be called _before_ the Khronos validation layer.
+
+`VK_INSTANCE_LAYERS` may also be set in the system environment variables.
+
+To activate layers located in a particular SDK installation, or layers built locally from source, specify the layer JSON manifest file directory using the `VK_LAYER_PATH` environment variable.
+For example, if a Vulkan SDK is installed in `C:\VulkanSDK\1.1.121`, execute the following in a Command Window:
+
+```
+C:\> set VK_LAYER_PATH=C:\VulkanSDK\1.1.121\Bin
+C:\> set VK_INSTANCE_LAYERS=VK_LAYER_LUNARG_api_dump;VK_LAYER_KHRONOS_validation
+C:\> vkcube
+```
+
+## Activating Layers on Linux and macOS
+Before or during execution of a Vulkan application, the loader must be informed of the layers to activate.
+This can be done in two ways:  programmatically, or by using environment variables.
+
+Applications may programmatically activate layers via the `vkCreateInstance()` entry point.
+
+Layers may also be activated by using the `VK_INSTANCE_LAYERS` environment variable.
+The variable should include a colon-separated list of layer names to activate.
+Note that order is relevant, with the initial layer being the closest to the application, and the final layer being closest to the driver.
+
+For example, the list of explicit layers to activate can be specified with:
+
+```
+$ export VK_INSTANCE_LAYERS=VK_LAYER_LUNARG_api_dump:VK_LAYER_KHRONOS_validation
+```
+
+To activate layers in a particular SDK installation, or layers built locally from source, identify certain library paths and the layer JSON manifest file directory in addition to the layers to activate.  
+
+For Linux, if the Vulkan SDK was locally installed to `/sdks`, `VULKAN_SDK=/sdks/VulkanSDK/1.1.121/x86_64`:
+
+```
+$ export VK_LAYER_PATH=$VULKAN_SDK/lib/vulkan/layers
+$ export LD_LIBRARY_PATH=$VULKAN_SDK/lib:$VULKAN_SDK/lib/vulkan/layers
+$ export VK_INSTANCE_LAYERS=VK_LAYER_LUNARG_api_dump:VK_LAYER_KHRONOS_validation
+$ ./vkcube
+```
+
+For macOS, if the Vulkan SDK was locally installed to `/sdks`, `VULKAN_SDK=/sdks/VulkanSDK/1.1.121/macOS`:
+
+```
+$ export VK_LAYER_PATH=$VULKAN_SDK/etc/vulkan/explicit_layers.d
+$ export DYLD_LIBRARY_PATH=$VULKAN_SDK/lib
+$ export VK_INSTANCE_LAYERS=VK_LAYER_LUNARG_api_dump:VK_LAYER_KHRONOS_validation
+$ cd $VULKAN_SDK/../Applications/
+$ open vkcube.app
+```
+
+### Layer Settings File
+In addition to activating the layers, available layer options can be set via the `vk_layer_settings.txt` settings file.
+By default, the settings file must be named `vk_layer_settings.txt` and reside in the working directory of the targeted application.
+If `VK_LAYER_SETTINGS_PATH` is set and is a directory, then the settings file must be a file called `vk_layer_settings.txt` in the directory given by `VK_LAYER_SETTINGS_PATH`.
+If `VK_LAYER_SETTINGS_PATH` is set and is not a directory, then it must point to a file (with any name) which is the layer settings file.
+
+Note:  To control layer reporting output, a layer settings file must be provided that identifies specific reporting levels for the layers enabled via the `VK_INSTANCE_LAYERS` environment variable.
+
+The settings file consists of comment lines and settings lines.  Comment lines begin with the `#` character.  Settings lines have the following format:
+
+   `<`*`LayerName`*`>.<`*`setting_name`*`> = <`*`setting_value`*`>`
+
+| Setting                  | Values                       | Description                                                      |
+| ------------------------ | ---------------------------- | ----------------------------------------------------------------- |
+| *`LayerName`*`.report_flags` | `info`      | Report information level messages                               |
+|                            | `warn`      | Report warning level messages                 |
+|                            | `perf`      | Report performance level warning messages                        |
+|                            | `error`     | Report error level messages                                 |
+|                            | `debug`      | Reserved                                                |
+| *`LayerName`*`.debug_action` | `VK_DBG_LAYER_ACTION_IGNORE`   | Ignore message reporting                                          |
+|                            | `VK_DBG_LAYER_ACTION_LOG_MSG`  | Report messages to log                                            |
+|                            | `VK_DBG_LAYER_ACTION_DEBUG_OUTPUT`    | (Windows) Report messages to debug console of Microsoft Visual Studio
+|                            | `VK_DBG_LAYER_ACTION_BREAK`    | Break on messages (not currently used)                                  |
+| *`LayerName`*`.log_filename` | *`filename`*`.txt`             | Name of file to log `report_flags` level messages; default is `stdout` |
+| *`LayerName`*`.enables` | comma separated list of `VkValidationFeatureEnableEXT` enum values as defined in the Vulkan Specification      | Enables the specified validation features         |
+| *`LayerName`*`.disables` | comma separated list of `VkValidationFeatureDisableEXT` enum values as defined in the Vulkan Specification      | Disables the specified validation features         |
+
+
+
+Sample layer settings file contents:
+
+```
+khronos_validation.report_flags = info,error
+khronos_validation.debug_action = VK_DBG_LAYER_ACTION_LOG_MSG
+khronos_validation.disable = VK_VALIDATION_FEATURE_DISABLE_UNIQUE_HANDLES_EXT 
+# VK_LAYER_LUNARG_api_dump custom settings
+lunarg_api_dump.no_addr = TRUE
+lunarg_api_dump.file = FALSE
+```
+In the Vulkan-ValidationLayers repository, a sample layer settings file can be found in the 'layers' directory, [here](https://github.com/KhronosGroup/Vulkan-ValidationLayers/blob/master/layers/vk_layer_settings.txt).
+In the Vulkan SDK, this Linux version of this file is located in `config/vk_layer_settings.txt` of your local Vulkan SDK install.
+On Windows, you can find a sample layer settings file in `Config\vk_layer_settings.txt` of your local Vulkan SDK install.
+Consult these sample layer settings files for additional information and detail related to available options and settings.
+
+Note: If layers are activated via `VK_INSTANCE_LAYERS` environment variable and if neither an application-defined callback is defined nor a layer settings file is present, the loader/layers will provide default callbacks enabling output of error-level messages to standard out (and via `OutputDebugString` on Windows).
+
+## Advanced Layer Configuration, Installation, and Discovery Details
+The Vulkan loader searches specific platform-specific locations to find installed layers.
+For additional details, see the `LoaderAndLayerInterface.md` available [here](https://vulkan.lunarg.com/doc/sdk/latest/windows/loader_and_layer_interface.html),
+or in the Vulkan-Loader repository, [here](https://github.com/KhronosGroup/Vulkan-Loader/blob/master/loader/LoaderAndLayerInterface.md).
+
+Setting the `VK_LAYER_PATH` environment variable overrides the default loader layer search mechanism.
+When set, the loader will search only the directory(s) identified by the `VK_LAYER_PATH` environment variable for layer manifest files.
+
+Applications can query available layers via the `vkEnumerateInstanceLayerProperties()` command.
diff --git a/src/third_party/vulkan-validation-layers/src/LICENSE.txt b/src/third_party/vulkan-validation-layers/src/LICENSE.txt
new file mode 100644
index 0000000..6599e31
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/LICENSE.txt
@@ -0,0 +1,207 @@
+The majority of files in this project use the Apache 2.0 License.
+There are a few exceptions and their license can be found in the source.
+Any license deviations from Apache 2.0 are "more permissive" licenses.
+
+===========================================================================================
+
+                                 Apache License
+                           Version 2.0, January 2004
+                        http://www.apache.org/licenses/
+
+   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+   1. Definitions.
+
+      "License" shall mean the terms and conditions for use, reproduction,
+      and distribution as defined by Sections 1 through 9 of this document.
+
+      "Licensor" shall mean the copyright owner or entity authorized by
+      the copyright owner that is granting the License.
+
+      "Legal Entity" shall mean the union of the acting entity and all
+      other entities that control, are controlled by, or are under common
+      control with that entity. For the purposes of this definition,
+      "control" means (i) the power, direct or indirect, to cause the
+      direction or management of such entity, whether by contract or
+      otherwise, or (ii) ownership of fifty percent (50%) or more of the
+      outstanding shares, or (iii) beneficial ownership of such entity.
+
+      "You" (or "Your") shall mean an individual or Legal Entity
+      exercising permissions granted by this License.
+
+      "Source" form shall mean the preferred form for making modifications,
+      including but not limited to software source code, documentation
+      source, and configuration files.
+
+      "Object" form shall mean any form resulting from mechanical
+      transformation or translation of a Source form, including but
+      not limited to compiled object code, generated documentation,
+      and conversions to other media types.
+
+      "Work" shall mean the work of authorship, whether in Source or
+      Object form, made available under the License, as indicated by a
+      copyright notice that is included in or attached to the work
+      (an example is provided in the Appendix below).
+
+      "Derivative Works" shall mean any work, whether in Source or Object
+      form, that is based on (or derived from) the Work and for which the
+      editorial revisions, annotations, elaborations, or other modifications
+      represent, as a whole, an original work of authorship. For the purposes
+      of this License, Derivative Works shall not include works that remain
+      separable from, or merely link (or bind by name) to the interfaces of,
+      the Work and Derivative Works thereof.
+
+      "Contribution" shall mean any work of authorship, including
+      the original version of the Work and any modifications or additions
+      to that Work or Derivative Works thereof, that is intentionally
+      submitted to Licensor for inclusion in the Work by the copyright owner
+      or by an individual or Legal Entity authorized to submit on behalf of
+      the copyright owner. For the purposes of this definition, "submitted"
+      means any form of electronic, verbal, or written communication sent
+      to the Licensor or its representatives, including but not limited to
+      communication on electronic mailing lists, source code control systems,
+      and issue tracking systems that are managed by, or on behalf of, the
+      Licensor for the purpose of discussing and improving the Work, but
+      excluding communication that is conspicuously marked or otherwise
+      designated in writing by the copyright owner as "Not a Contribution."
+
+      "Contributor" shall mean Licensor and any individual or Legal Entity
+      on behalf of whom a Contribution has been received by Licensor and
+      subsequently incorporated within the Work.
+
+   2. Grant of Copyright License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      copyright license to reproduce, prepare Derivative Works of,
+      publicly display, publicly perform, sublicense, and distribute the
+      Work and such Derivative Works in Source or Object form.
+
+   3. Grant of Patent License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      (except as stated in this section) patent license to make, have made,
+      use, offer to sell, sell, import, and otherwise transfer the Work,
+      where such license applies only to those patent claims licensable
+      by such Contributor that are necessarily infringed by their
+      Contribution(s) alone or by combination of their Contribution(s)
+      with the Work to which such Contribution(s) was submitted. If You
+      institute patent litigation against any entity (including a
+      cross-claim or counterclaim in a lawsuit) alleging that the Work
+      or a Contribution incorporated within the Work constitutes direct
+      or contributory patent infringement, then any patent licenses
+      granted to You under this License for that Work shall terminate
+      as of the date such litigation is filed.
+
+   4. Redistribution. You may reproduce and distribute copies of the
+      Work or Derivative Works thereof in any medium, with or without
+      modifications, and in Source or Object form, provided that You
+      meet the following conditions:
+
+      (a) You must give any other recipients of the Work or
+          Derivative Works a copy of this License; and
+
+      (b) You must cause any modified files to carry prominent notices
+          stating that You changed the files; and
+
+      (c) You must retain, in the Source form of any Derivative Works
+          that You distribute, all copyright, patent, trademark, and
+          attribution notices from the Source form of the Work,
+          excluding those notices that do not pertain to any part of
+          the Derivative Works; and
+
+      (d) If the Work includes a "NOTICE" text file as part of its
+          distribution, then any Derivative Works that You distribute must
+          include a readable copy of the attribution notices contained
+          within such NOTICE file, excluding those notices that do not
+          pertain to any part of the Derivative Works, in at least one
+          of the following places: within a NOTICE text file distributed
+          as part of the Derivative Works; within the Source form or
+          documentation, if provided along with the Derivative Works; or,
+          within a display generated by the Derivative Works, if and
+          wherever such third-party notices normally appear. The contents
+          of the NOTICE file are for informational purposes only and
+          do not modify the License. You may add Your own attribution
+          notices within Derivative Works that You distribute, alongside
+          or as an addendum to the NOTICE text from the Work, provided
+          that such additional attribution notices cannot be construed
+          as modifying the License.
+
+      You may add Your own copyright statement to Your modifications and
+      may provide additional or different license terms and conditions
+      for use, reproduction, or distribution of Your modifications, or
+      for any such Derivative Works as a whole, provided Your use,
+      reproduction, and distribution of the Work otherwise complies with
+      the conditions stated in this License.
+
+   5. Submission of Contributions. Unless You explicitly state otherwise,
+      any Contribution intentionally submitted for inclusion in the Work
+      by You to the Licensor shall be under the terms and conditions of
+      this License, without any additional terms or conditions.
+      Notwithstanding the above, nothing herein shall supersede or modify
+      the terms of any separate license agreement you may have executed
+      with Licensor regarding such Contributions.
+
+   6. Trademarks. This License does not grant permission to use the trade
+      names, trademarks, service marks, or product names of the Licensor,
+      except as required for reasonable and customary use in describing the
+      origin of the Work and reproducing the content of the NOTICE file.
+
+   7. Disclaimer of Warranty. Unless required by applicable law or
+      agreed to in writing, Licensor provides the Work (and each
+      Contributor provides its Contributions) on an "AS IS" BASIS,
+      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+      implied, including, without limitation, any warranties or conditions
+      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+      PARTICULAR PURPOSE. You are solely responsible for determining the
+      appropriateness of using or redistributing the Work and assume any
+      risks associated with Your exercise of permissions under this License.
+
+   8. Limitation of Liability. In no event and under no legal theory,
+      whether in tort (including negligence), contract, or otherwise,
+      unless required by applicable law (such as deliberate and grossly
+      negligent acts) or agreed to in writing, shall any Contributor be
+      liable to You for damages, including any direct, indirect, special,
+      incidental, or consequential damages of any character arising as a
+      result of this License or out of the use or inability to use the
+      Work (including but not limited to damages for loss of goodwill,
+      work stoppage, computer failure or malfunction, or any and all
+      other commercial damages or losses), even if such Contributor
+      has been advised of the possibility of such damages.
+
+   9. Accepting Warranty or Additional Liability. While redistributing
+      the Work or Derivative Works thereof, You may choose to offer,
+      and charge a fee for, acceptance of support, warranty, indemnity,
+      or other liability obligations and/or rights consistent with this
+      License. However, in accepting such obligations, You may act only
+      on Your own behalf and on Your sole responsibility, not on behalf
+      of any other Contributor, and only if You agree to indemnify,
+      defend, and hold each Contributor harmless for any liability
+      incurred by, or claims asserted against, such Contributor by reason
+      of your accepting any such warranty or additional liability.
+
+   END OF TERMS AND CONDITIONS
+
+   APPENDIX: How to apply the Apache License to your work.
+
+      To apply the Apache License to your work, attach the following
+      boilerplate notice, with the fields enclosed by brackets "[]"
+      replaced with your own identifying information. (Don't include
+      the brackets!)  The text should be enclosed in the appropriate
+      comment syntax for the file format. We also recommend that a
+      file or class name and description of purpose be included on the
+      same "printed page" as the copyright notice for easier
+      identification within third-party archives.
+
+   Copyright [yyyy] [name of copyright owner]
+
+   Licensed under the Apache License, Version 2.0 (the "License");
+   you may not use this file except in compliance with the License.
+   You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
diff --git a/src/third_party/vulkan-validation-layers/src/README.md b/src/third_party/vulkan-validation-layers/src/README.md
new file mode 100644
index 0000000..e734b95
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/README.md
@@ -0,0 +1,56 @@
+# Vulkan Ecosystem Components
+
+This project provides the Khronos official Vulkan validation layers for Windows, Linux, Android, and MacOS.
+
+## CI Build Status
+| Platform | Build Status |
+|:--------:|:------------:|
+| Linux/Android | [![Build Status](https://travis-ci.org/KhronosGroup/Vulkan-ValidationLayers.svg?branch=master)](https://travis-ci.org/KhronosGroup/Vulkan-ValidationLayers) |
+| Windows |[![Build status](https://ci.appveyor.com/api/projects/status/8wduq4r5d9j8nhdv?svg=true)](https://ci.appveyor.com/project/Khronoswebmaster/vulkan-validationlayers/branch/master) |
+
+
+## Introduction
+
+Vulkan is an Explicit API, enabling direct control over how GPUs actually work. By design, minimal error checking is done inside
+a Vulkan driver. Applications have full control and responsibility for correct operation. Any errors in
+how Vulkan is used can result in a crash. This project provides Vulkan validation layers that can be enabled
+to assist development by enabling developers to verify their applications correct use of the Vulkan API.
+
+The following components are available in this repository:
+- [*Validation Layers*](layers/)
+- [*Tests*](tests/)
+
+## Contact Information
+* [Tobin Ehlis](mailto:tobine@google.com)
+* [Mark Lobodzinski](mailto:mark@lunarg.com)
+
+## Information for Developing or Contributing:
+
+Please see the [CONTRIBUTING.md](CONTRIBUTING.md) file in this repository for more details.
+Please see the [GOVERNANCE.md](GOVERNANCE.md) file in this repository for repository management details.
+
+## How to Build and Run
+
+[BUILD.md](BUILD.md)
+Includes directions for building all components as well as running validation tests.
+
+Information on how to enable the various Validation layers is in [LAYER_CONFIGURATION.md](LAYER_CONFIGURATION.md).
+
+## Version Tagging Scheme
+
+Updates to the `Vulkan-ValidationLayers` repository which correspond to a new Vulkan specification release are tagged using the following format: `v<`_`version`_`>` (e.g., `v1.1.96`).
+
+**Note**: Marked version releases have undergone thorough testing but do not imply the same quality level as SDK tags. SDK tags follow the `sdk-<`_`version`_`>.<`_`patch`_`>` format (e.g., `sdk-1.1.92.0`).
+
+This scheme was adopted following the 1.1.96 Vulkan specification release.
+
+## License
+This work is released as open source under a Apache-style license from Khronos including a Khronos copyright.
+
+See [LICENSE.txt](LICENSE.txt) for a full list of licenses used in this repository.
+
+## Acknowledgements
+While this project has been developed primarily by LunarG, Inc., there are many other
+companies and individuals making this possible: Valve Corporation, funding
+project development; Google providing significant contributions to the validation layers;
+Khronos providing oversight and hosting of the project.
diff --git a/src/third_party/vulkan-validation-layers/src/build-android/AndroidManifest.xml b/src/third_party/vulkan-validation-layers/src/build-android/AndroidManifest.xml
new file mode 100644
index 0000000..d057d6c
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/build-android/AndroidManifest.xml
@@ -0,0 +1,25 @@
+<?xml version="1.0"?>
+<manifest xmlns:android="http://schemas.android.com/apk/res/android" package="com.example.VulkanLayerValidationTests" android:versionCode="1" android:versionName="1.0">
+
+    <!-- This is the platform API where NativeActivity was introduced. -->
+    <uses-sdk android:minSdkVersion="26" android:targetSdkVersion="26"/>
+
+    <!-- This .apk has no Java code itself, so set hasCode to false. -->
+    <application android:label="@string/app_name" android:hasCode="false" android:debuggable='false'>
+
+        <!-- This allows writing log files to sdcard -->
+        <uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE"/>
+
+        <!-- Our activity is the built-in NativeActivity framework class.
+             This will take care of integrating with our NDK code. -->
+        <activity android:name="android.app.NativeActivity" android:label="@string/app_name" android:exported="true">
+            <!-- Tell NativeActivity the name of or .so -->
+            <meta-data android:name="android.app.lib_name" android:value="VulkanLayerValidationTests"/>
+            <intent-filter>
+                <action android:name="android.intent.action.MAIN"/>
+                <category android:name="android.intent.category.LAUNCHER"/>
+            </intent-filter>
+        </activity>
+    </application>
+
+</manifest>
diff --git a/src/third_party/vulkan-validation-layers/src/build-android/build_all.sh b/src/third_party/vulkan-validation-layers/src/build-android/build_all.sh
new file mode 100755
index 0000000..36d3fbd
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/build-android/build_all.sh
@@ -0,0 +1,73 @@
+#!/bin/bash
+
+# Copyright 2017 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+if [ -z "${ANDROID_SDK_HOME}" ];
+then echo "Please set ANDROID_SDK_HOME, exiting"; exit 1;
+else echo "ANDROID_SDK_HOME is ${ANDROID_SDK_HOME}";
+fi
+
+if [ -z "${ANDROID_NDK_HOME}" ];
+then echo "Please set ANDROID_NDK_HOME, exiting"; exit 1;
+else echo "ANDROID_NDK_HOME is ${ANDROID_NDK_HOME}";
+fi
+
+if [[ $(uname) == "Linux" ]]; then
+    cores=$(nproc) || echo 4
+elif [[ $(uname) == "Darwin" ]]; then
+    cores=$(sysctl -n hw.ncpu) || echo 4
+fi
+
+function findtool() {
+    if [[ ! $(type -t $1) ]]; then
+        echo Command $1 not found, see ../BUILD.md;
+        exit 1;
+    fi
+}
+
+# Check for dependencies
+findtool aapt
+findtool zipalign
+findtool jarsigner
+
+set -ev
+
+LAYER_BUILD_DIR=$PWD
+DEMO_BUILD_DIR=$PWD/../demos/android
+echo LAYER_BUILD_DIR="${LAYER_BUILD_DIR}"
+echo DEMO_BUILD_DIR="${DEMO_BUILD_DIR}"
+
+function create_APK() {
+    aapt package -f -M AndroidManifest.xml -I "$ANDROID_SDK_HOME/platforms/android-26/android.jar" -S res -F bin/$1-unaligned.apk bin/libs
+    # update this logic to detect if key is already there.  If so, use it, otherwise create it.
+    jarsigner -verbose -keystore ~/.android/debug.keystore -storepass android -keypass android  bin/$1-unaligned.apk androiddebugkey
+    zipalign -f 4 bin/$1-unaligned.apk bin/$1.apk
+}
+
+#
+# build layers
+#
+./update_external_sources_android.sh --no-build
+ndk-build -j $cores
+
+#
+# build VulkanLayerValidationTests APK
+#
+mkdir -p bin/libs/lib
+cp -r $LAYER_BUILD_DIR/libs/* $LAYER_BUILD_DIR/bin/libs/lib/
+create_APK VulkanLayerValidationTests
+
+echo Builds succeeded
+exit 0
diff --git a/src/third_party/vulkan-validation-layers/src/build-android/cmake/README.md b/src/third_party/vulkan-validation-layers/src/build-android/cmake/README.md
new file mode 100644
index 0000000..149966c
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/build-android/cmake/README.md
@@ -0,0 +1,43 @@
+Build Validation Layers with Android CMake Plugin
+=================================================
+Gradle project in this directory builds layers into AAR.
+The project could be directly added into application's gradle projects.
+[Android Studio 3.0.0+](https://developer.android.com/studio/index.html)
+IS required: earlier versions only publish release libs by default.
+
+Pre-requirements
+----------------
+Build ShaderC binary
+- Building from Github Repo source
+1. cd android-build
+2. ./update_external_sources_android.sh
+3. ./android-generate.sh
+
+Extra Steps if building from NDK's source tree
+```
+   cd ${your ANDROID_NDK_ROOT}/sources/third_party/shaderc
+   ndk-build  APP_ABI=all APP_STL=c++_static NDK_TOOLCHAIN_VERSION=clang NDK_PROJECT_PATH=. APP_BUILD_SCRIPT=Android.mk libshaderc_combined
+```
+
+Adding layer module into Android Studio application project
+--------------------------------------------------------
+1. app's settings.gradle, add 
+```
+    include ':layerLib'
+    project(':layerLib').projectDir = new File('/path/to/cmake/layerlib')
+```
+2. app's build.gradle:
+```
+dependencies {
+    // Android Studio 3.0.0+ is required
+    implementation project(':layerLib')
+}
+```
+BUILD_IN_NDK variable in layerlib/CMakeLists.txt could detect whether the source is
+from NDK or github repo clone, and would configure the file paths accordingly
+
+Tested
+-----
+Build on Mac OS, tested on Google Pixel XL with Android Oreo.
+Equvilaent build scripts for Windows OS are at the same directory.
+
diff --git a/src/third_party/vulkan-validation-layers/src/build-android/cmake/layerlib/CMakeLists.txt b/src/third_party/vulkan-validation-layers/src/build-android/cmake/layerlib/CMakeLists.txt
new file mode 100644
index 0000000..cad6279
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/build-android/cmake/layerlib/CMakeLists.txt
@@ -0,0 +1,97 @@
+cmake_minimum_required(VERSION 3.4.1)
+
+# Validation layers could be built with code from
+#    github repo   OR
+#    ndk's snapshot
+# The file structure for glslang spir-V is different, adding BUILD_IN_NDK variable
+set(BUILD_IN_NDK OFF)
+if (CMAKE_CURRENT_SOURCE_DIR MATCHES "sources/third_party/vulkan/src")
+    set(BUILD_IN_NDK ON)
+endif()
+
+set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11 -Wall -Werror \
+        -Wno-unused-function -Wno-unused-const-variable \
+        -Wno-c++98-compat-pedantic -DVK_PROTOTYPES \
+        -DVK_USE_PLATFORM_ANDROID_KHR")
+
+if (BUILD_IN_NDK)
+    # NDK directory structure:
+    #  $ANDROID_NDK/sources/third_party/
+    #        vulkan/src
+    #        shaderc/third_party (for glslang & spirv-tools )
+    # this build uses combined lib libshaderc.a for libSPIRV-Tools.a purpose
+    get_filename_component(SRC_DIR
+        "${ANDROID_NDK}/sources/third_party/vulkan/src" ABSOLUTE)
+    get_filename_component(EXTERNAL_DIR
+        "${SRC_DIR}/../../shaderc/third_party" ABSOLUTE)
+    get_filename_component(SPIRV_LIB
+        "${SRC_DIR}/../../shaderc/libs/${ANDROID_STL}/${ANDROID_ABI}/libshaderc.a"
+        ABSOLUTE)
+else ()
+    # github directory structure:
+    #   ${SRC_DIR}
+    #   ${SRC_DIR}/build-android/external (for glslang, spirv-tools & shaderc )
+    get_filename_component(SRC_DIR "${CMAKE_CURRENT_SOURCE_DIR}/../../.."  ABSOLUTE)
+    set(EXTERNAL_DIR "${SRC_DIR}/build-android/third_party/shaderc/third_party")
+    set(SPIRV_LIB
+        "${SRC_DIR}/build-android/third_party/shaderc/android_test/obj/local/${ANDROID_ABI}/libSPIRV-Tools.a")
+endif()
+set(COMMON_DIR "${SRC_DIR}/build-android/generated")
+
+set(CMAKE_CXX_clone "${CMAKE_CXX_FLAGS}")
+set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -DVK_USE_PLATFORM_ANDROID_KHR \
+        -fvisibility=hidden")
+include_directories(${SRC_DIR}/include
+                    ${COMMON_DIR}/include
+                    ${SRC_DIR}/layers)
+add_library(layer_utils STATIC
+        ${SRC_DIR}/layers/vk_layer_config.cpp
+        ${SRC_DIR}/layers/vk_layer_extension_utils.cpp
+        ${SRC_DIR}/layers/vk_layer_utils.cpp
+        ${SRC_DIR}/layers/vk_format_utils.cpp)
+set(CMAKE_CXX_FLAGS "${CMAKE_CXX_clone}")
+
+# assume shaderc already built externally
+add_library(SPIRV-Tools-prebuilt STATIC IMPORTED)
+set_target_properties(SPIRV-Tools-prebuilt PROPERTIES IMPORTED_LOCATION
+    ${SPIRV_LIB})
+
+set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -DVK_USE_PLATFORM_ANDROID_KHR \
+                     -fvisibility=hidden")
+add_library(VkLayer_khronos_validation SHARED
+        ${SRC_DIR}/layers/state_tracker.cpp
+        ${SRC_DIR}/layers/core_validation.cpp
+        ${SRC_DIR}/layers/drawdispatch.cpp
+        ${SRC_DIR}/layers/convert_to_renderpass2.cpp
+        ${SRC_DIR}/layers/descriptor_sets.cpp
+        ${SRC_DIR}/layers/buffer_validation.cpp
+        ${SRC_DIR}/layers/shader_validation.cpp
+        ${SRC_DIR}/layers/gpu_validation.cpp
+        ${SRC_DIR}/layers/best_practices.cpp
+        ${COMMON_DIR}/include/layer_chassis_dispatch.cpp
+        ${COMMON_DIR}/include/chassis.cpp
+        ${COMMON_DIR}/include/parameter_validation.cpp
+        ${SRC_DIR}/layers/parameter_validation_utils.cpp
+        ${COMMON_DIR}/include/object_tracker.cpp
+        ${SRC_DIR}/layers/object_tracker_utils.cpp
+        ${COMMON_DIR}/include/thread_safety.cpp
+        ${SRC_DIR}/layers/xxhash.c)
+target_include_directories(VkLayer_khronos_validation PRIVATE
+        ${SRC_DIR}/include
+        ${SRC_DIR}/layers
+        ${COMMON_DIR}/include
+        ${EXTERNAL_DIR}/glslang
+        ${EXTERNAL_DIR}/spirv-tools/include)
+target_link_libraries(VkLayer_khronos_validation PRIVATE
+        log layer_utils SPIRV-Tools-prebuilt)
+if (NOT BUILD_IN_NDK)
+    set(SPIRV_OPT_LIB
+        "${SRC_DIR}/build-android/third_party/shaderc/android_test/obj/local/${ANDROID_ABI}/libSPIRV-Tools-opt.a")
+    add_library(SPIRV-Tools-opt-prebuilt STATIC IMPORTED)
+    set_target_properties(SPIRV-Tools-opt-prebuilt PROPERTIES IMPORTED_LOCATION
+        ${SPIRV_OPT_LIB})
+    target_link_libraries(VkLayer_khronos_validation PRIVATE
+        SPIRV-Tools-opt-prebuilt)
+endif()
+
+set(CMAKE_CXX_FLAGS "${CMAKE_CXX_clone}")
diff --git a/src/third_party/vulkan-validation-layers/src/build-android/cmake/layerlib/build.gradle b/src/third_party/vulkan-validation-layers/src/build-android/cmake/layerlib/build.gradle
new file mode 100644
index 0000000..889d232
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/build-android/cmake/layerlib/build.gradle
@@ -0,0 +1,32 @@
+apply plugin: 'com.android.library'
+
+android {
+    /*
+     * Required: Android Studio 3.0.0+!
+     */
+    compileSdkVersion 25
+
+    defaultConfig {
+        minSdkVersion 24
+        targetSdkVersion 24
+        versionCode 1
+        versionName "1.0"
+        ndk.abiFilters 'armeabi-v7a', 'arm64-v8a', 'x86', 'x86_64'
+        externalNativeBuild {
+            cmake.arguments '-DANDROID_TOOLCHAIN=clang',
+                            '-DANDROID_STL=c++_static',
+                            '-DANDROID_PLATFORM=android-24'
+        }
+    }
+    externalNativeBuild {
+        cmake.path 'CMakeLists.txt'
+    }
+    buildTypes {
+        release {
+            minifyEnabled false
+            proguardFiles getDefaultProguardFile('proguard-android.txt'),
+                    'proguard-rules.pro'
+        }
+    }
+}
+
diff --git a/src/third_party/vulkan-validation-layers/src/build-android/cmake/layerlib/proguard-rules.pro b/src/third_party/vulkan-validation-layers/src/build-android/cmake/layerlib/proguard-rules.pro
new file mode 100644
index 0000000..b41fe70
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/build-android/cmake/layerlib/proguard-rules.pro
@@ -0,0 +1,17 @@
+# Add project specific ProGuard rules here.
+# By default, the flags in this file are appended to flags specified
+# in ${ANDROID_SDK}/tools/proguard/proguard-android.txt
+# You can edit the include path and order by changing the proguardFiles
+# directive in build.gradle.
+#
+# For more details, see
+#   http://developer.android.com/guide/developing/tools/proguard.html
+
+# Add any project specific keep options here:
+
+# If your project uses WebView with JS, uncomment the following
+# and specify the fully qualified class name to the JavaScript interface
+# class:
+#-keepclassmembers class fqcn.of.javascript.interface.for.webview {
+#   public *;
+#}
diff --git a/src/third_party/vulkan-validation-layers/src/build-android/cmake/layerlib/src/main/AndroidManifest.xml b/src/third_party/vulkan-validation-layers/src/build-android/cmake/layerlib/src/main/AndroidManifest.xml
new file mode 100644
index 0000000..78c3704
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/build-android/cmake/layerlib/src/main/AndroidManifest.xml
@@ -0,0 +1,9 @@
+<manifest xmlns:android="http://schemas.android.com/apk/res/android"
+    package="com.example.layerlib">
+
+    <application android:allowBackup="true" android:label="@string/app_name"
+        android:supportsRtl="true">
+
+    </application>
+
+</manifest>
diff --git a/src/third_party/vulkan-validation-layers/src/build-android/cmake/layerlib/src/main/res/values/strings.xml b/src/third_party/vulkan-validation-layers/src/build-android/cmake/layerlib/src/main/res/values/strings.xml
new file mode 100644
index 0000000..66f8389
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/build-android/cmake/layerlib/src/main/res/values/strings.xml
@@ -0,0 +1,3 @@
+<resources>
+    <string name="app_name">Layer Library</string>
+</resources>
diff --git a/src/third_party/vulkan-validation-layers/src/build-android/install_all.sh b/src/third_party/vulkan-validation-layers/src/build-android/install_all.sh
new file mode 100755
index 0000000..2773510
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/build-android/install_all.sh
@@ -0,0 +1,76 @@
+#!/bin/bash
+
+# Copyright 2017 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+set -e
+
+#
+# Parse parameters
+#
+
+function printUsage {
+   echo "Supported parameters are:"
+   echo "    -s|--serial <target device serial number> (optional)"
+   echo
+   echo "i.e. ${0##*/} -s <serial number>"
+   exit 1
+}
+
+if [[ $(($# % 2)) -ne 0 ]]
+then
+    echo Parameters must be provided in pairs.
+    echo parameter count = $#
+    echo
+    printUsage
+    exit 1
+fi
+
+while [[ $# -gt 0 ]]
+do
+    case $1 in
+        -s|--serial)
+            # include the flag, because we need to leave it off if not provided
+            serial="$2"
+            shift 2
+            ;;
+        -*)
+            # unknown option
+            echo Unknown option: $1
+            echo
+            printUsage
+            exit 1
+            ;;
+    esac
+done
+
+if [[ $serial ]]; then
+    echo serial = "${serial}"
+    serialFlag="-s $serial"
+    if [[ $(adb devices) != *"$serial"* ]]
+    then
+        echo Device not found: "${serial}"
+        echo
+        printUsage
+        exit 1
+    fi
+else
+    echo Using device $(adb get-serialno)
+fi
+
+# Install everything built by build_all.sh
+echo "adb $serialFlag install -r bin/VulkanLayerValidationTests.apk"
+adb $serialFlag install -r bin/VulkanLayerValidationTests.apk
+
+exit $?
diff --git a/src/third_party/vulkan-validation-layers/src/build-android/jni/Android.mk b/src/third_party/vulkan-validation-layers/src/build-android/jni/Android.mk
new file mode 100644
index 0000000..4f6ffae
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/build-android/jni/Android.mk
@@ -0,0 +1,130 @@
+# Copyright 2015 The Android Open Source Project
+# Copyright (C) 2015 Valve Corporation
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#      http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+LOCAL_PATH := $(call my-dir)
+SRC_DIR := ../..
+LAYER_DIR := ../generated
+THIRD_PARTY := ../third_party
+
+VULKAN_INCLUDE := $(LOCAL_PATH)/$(THIRD_PARTY)/Vulkan-Headers/include
+
+include $(CLEAR_VARS)
+LOCAL_MODULE := layer_utils
+LOCAL_SRC_FILES += $(SRC_DIR)/layers/vk_layer_config.cpp
+LOCAL_SRC_FILES += $(SRC_DIR)/layers/vk_layer_extension_utils.cpp
+LOCAL_SRC_FILES += $(SRC_DIR)/layers/vk_layer_utils.cpp
+LOCAL_SRC_FILES += $(SRC_DIR)/layers/vk_format_utils.cpp
+LOCAL_C_INCLUDES += $(VULKAN_INCLUDE) \
+                    $(LOCAL_PATH)/$(SRC_DIR)/layers/generated \
+                    $(LOCAL_PATH)/$(SRC_DIR)/layers
+LOCAL_CPPFLAGS += -std=c++11 -Wall -Werror -Wno-unused-function -Wno-unused-const-variable
+LOCAL_CPPFLAGS += -DVK_USE_PLATFORM_ANDROID_KHR -DVK_PROTOTYPES -fvisibility=hidden
+include $(BUILD_STATIC_LIBRARY)
+
+include $(CLEAR_VARS)
+LOCAL_MODULE := VkLayer_khronos_validation
+LOCAL_SRC_FILES += $(SRC_DIR)/layers/state_tracker.cpp
+LOCAL_SRC_FILES += $(SRC_DIR)/layers/core_validation.cpp
+LOCAL_SRC_FILES += $(SRC_DIR)/layers/drawdispatch.cpp
+LOCAL_SRC_FILES += $(SRC_DIR)/layers/descriptor_sets.cpp
+LOCAL_SRC_FILES += $(SRC_DIR)/layers/buffer_validation.cpp
+LOCAL_SRC_FILES += $(SRC_DIR)/layers/shader_validation.cpp
+LOCAL_SRC_FILES += $(SRC_DIR)/layers/gpu_validation.cpp
+LOCAL_SRC_FILES += $(SRC_DIR)/layers/best_practices.cpp
+LOCAL_SRC_FILES += $(SRC_DIR)/layers/convert_to_renderpass2.cpp
+LOCAL_SRC_FILES += $(SRC_DIR)/layers/generated/layer_chassis_dispatch.cpp
+LOCAL_SRC_FILES += $(SRC_DIR)/layers/generated/chassis.cpp
+LOCAL_SRC_FILES += $(SRC_DIR)/layers/xxhash.c
+LOCAL_SRC_FILES += $(SRC_DIR)/layers/generated/parameter_validation.cpp
+LOCAL_SRC_FILES += $(SRC_DIR)/layers/parameter_validation_utils.cpp
+LOCAL_SRC_FILES += $(SRC_DIR)/layers/generated/object_tracker.cpp
+LOCAL_SRC_FILES += $(SRC_DIR)/layers/object_tracker_utils.cpp
+LOCAL_SRC_FILES += $(SRC_DIR)/layers/generated/thread_safety.cpp
+LOCAL_SRC_FILES += $(SRC_DIR)/layers/generated/command_counter_helper.cpp
+LOCAL_C_INCLUDES += $(VULKAN_INCLUDE) \
+                    $(LOCAL_PATH)/$(SRC_DIR)/layers \
+                    $(LOCAL_PATH)/$(SRC_DIR)/layers/generated
+LOCAL_STATIC_LIBRARIES += layer_utils glslang SPIRV-Tools SPIRV-Tools-opt
+LOCAL_CPPFLAGS += -std=c++11 -Wall -Werror -Wno-unused-function -Wno-unused-const-variable
+LOCAL_CPPFLAGS += -DVK_USE_PLATFORM_ANDROID_KHR -DVK_PROTOTYPES -fvisibility=hidden
+LOCAL_LDLIBS    := -llog -landroid
+LOCAL_LDFLAGS   += -Wl,-Bsymbolic
+LOCAL_LDFLAGS   += -Wl,--exclude-libs,ALL
+include $(BUILD_SHARED_LIBRARY)
+
+include $(CLEAR_VARS)
+LOCAL_MODULE := VkLayerValidationTests
+LOCAL_SRC_FILES += $(SRC_DIR)/tests/layer_validation_tests.cpp \
+				   $(SRC_DIR)/tests/vklayertests_pipeline_shader.cpp \
+				   $(SRC_DIR)/tests/vklayertests_buffer_image_memory_sampler.cpp \
+				   $(SRC_DIR)/tests/vklayertests_others.cpp \
+				   $(SRC_DIR)/tests/vklayertests_descriptor_renderpass_framebuffer.cpp \
+				   $(SRC_DIR)/tests/vklayertests_command.cpp \
+				   $(SRC_DIR)/tests/vkpositivelayertests.cpp \
+                   $(SRC_DIR)/tests/vktestbinding.cpp \
+                   $(SRC_DIR)/tests/vktestframeworkandroid.cpp \
+                   $(SRC_DIR)/tests/vkrenderframework.cpp \
+                   $(SRC_DIR)/layers/convert_to_renderpass2.cpp \
+                   $(SRC_DIR)/layers/generated/vk_safe_struct.cpp \
+                   $(SRC_DIR)/layers/generated/lvt_function_pointers.cpp \
+                   $(THIRD_PARTY)/Vulkan-Tools/common/vulkan_wrapper.cpp
+LOCAL_C_INCLUDES += $(VULKAN_INCLUDE) \
+                    $(LOCAL_PATH)/$(SRC_DIR)/layers/generated \
+                    $(LOCAL_PATH)/$(SRC_DIR)/layers \
+                    $(LOCAL_PATH)/$(SRC_DIR)/libs \
+                    $(LOCAL_PATH)/$(THIRD_PARTY)/Vulkan-Tools/common
+
+LOCAL_STATIC_LIBRARIES := googletest_main layer_utils shaderc
+LOCAL_CPPFLAGS += -std=c++11 -DVK_PROTOTYPES -Wall -Werror -Wno-unused-function -Wno-unused-const-variable
+LOCAL_CPPFLAGS += -DVK_USE_PLATFORM_ANDROID_KHR -DNV_EXTENSIONS -DAMD_EXTENSIONS -fvisibility=hidden --include=$(THIRD_PARTY)/Vulkan-Tools/common/vulkan_wrapper.h
+LOCAL_LDLIBS := -llog -landroid -ldl
+LOCAL_LDFLAGS   += -Wl,-Bsymbolic
+LOCAL_LDFLAGS   += -Wl,--exclude-libs,ALL
+include $(BUILD_EXECUTABLE)
+
+# Note: The following module is similar in name to the executable, but differs so that loader won't enumerate the resulting .so
+include $(CLEAR_VARS)
+LOCAL_MODULE := VulkanLayerValidationTests
+LOCAL_SRC_FILES += $(SRC_DIR)/tests/layer_validation_tests.cpp \
+				   $(SRC_DIR)/tests/vklayertests_pipeline_shader.cpp \
+				   $(SRC_DIR)/tests/vklayertests_buffer_image_memory_sampler.cpp \
+				   $(SRC_DIR)/tests/vklayertests_others.cpp \
+				   $(SRC_DIR)/tests/vklayertests_descriptor_renderpass_framebuffer.cpp \
+				   $(SRC_DIR)/tests/vklayertests_command.cpp \
+				   $(SRC_DIR)/tests/vkpositivelayertests.cpp \
+                   $(SRC_DIR)/tests/vktestbinding.cpp \
+                   $(SRC_DIR)/tests/vktestframeworkandroid.cpp \
+                   $(SRC_DIR)/tests/vkrenderframework.cpp \
+                   $(SRC_DIR)/layers/convert_to_renderpass2.cpp \
+                   $(SRC_DIR)/layers/generated/vk_safe_struct.cpp \
+                   $(SRC_DIR)/layers/generated/lvt_function_pointers.cpp \
+                   $(THIRD_PARTY)/Vulkan-Tools/common/vulkan_wrapper.cpp
+LOCAL_C_INCLUDES += $(VULKAN_INCLUDE) \
+                    $(LOCAL_PATH)/$(SRC_DIR)/layers/generated \
+                    $(LOCAL_PATH)/$(SRC_DIR)/layers \
+                    $(LOCAL_PATH)/$(SRC_DIR)/libs \
+                    $(LOCAL_PATH)/$(THIRD_PARTY)/Vulkan-Tools/common
+
+LOCAL_STATIC_LIBRARIES := googletest_main layer_utils shaderc
+LOCAL_CPPFLAGS += -std=c++11 -DVK_PROTOTYPES -Wall -Werror -Wno-unused-function -Wno-unused-const-variable
+LOCAL_CPPFLAGS += -DVK_USE_PLATFORM_ANDROID_KHR -DNV_EXTENSIONS -DAMD_EXTENSIONS -fvisibility=hidden -DVALIDATION_APK --include=$(THIRD_PARTY)/Vulkan-Tools/common/vulkan_wrapper.h
+LOCAL_WHOLE_STATIC_LIBRARIES += android_native_app_glue
+LOCAL_LDLIBS := -llog -landroid -ldl
+LOCAL_LDFLAGS := -u ANativeActivity_onCreate
+include $(BUILD_SHARED_LIBRARY)
+
+$(call import-module,android/native_app_glue)
+$(call import-module,third_party/googletest)
+$(call import-module,third_party/shaderc)
diff --git a/src/third_party/vulkan-validation-layers/src/build-android/jni/Application.mk b/src/third_party/vulkan-validation-layers/src/build-android/jni/Application.mk
new file mode 100644
index 0000000..243c285
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/build-android/jni/Application.mk
@@ -0,0 +1,21 @@
+# Copyright 2015 The Android Open Source Project
+# Copyright (C) 2015 Valve Corporation
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#      http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+APP_ABI := armeabi-v7a arm64-v8a x86 x86_64
+# APP_ABI := arm64-v8a   # just build for pixel2  (don't check in)
+APP_PLATFORM := android-26
+APP_STL := c++_static
+NDK_TOOLCHAIN_VERSION := clang
+NDK_MODULE_PATH := .
diff --git a/src/third_party/vulkan-validation-layers/src/build-android/jni/shaderc/Application.mk b/src/third_party/vulkan-validation-layers/src/build-android/jni/shaderc/Application.mk
new file mode 100644
index 0000000..5447415
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/build-android/jni/shaderc/Application.mk
@@ -0,0 +1,4 @@
+APP_ABI := all
+APP_BUILD_SCRIPT := Android.mk
+APP_STL := c++_static
+APP_PLATFORM := android-23
diff --git a/src/third_party/vulkan-validation-layers/src/build-android/known_good.json b/src/third_party/vulkan-validation-layers/src/build-android/known_good.json
new file mode 100644
index 0000000..247c5f7
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/build-android/known_good.json
@@ -0,0 +1,40 @@
+{
+  "repos" : [
+    {
+      "name" : "shaderc",
+      "url" : "https://github.com/google/shaderc.git",
+      "sub_dir" : "shaderc",
+      "commit" : "0597d39ec6378dc1c872686af1fa17988faf26c8"
+    },
+    {
+      "name" : "glslang",
+      "url" : "https://github.com/KhronosGroup/glslang.git",
+      "sub_dir" : "shaderc/third_party/glslang",
+      "commit" : "34953810a62c5230cc059cfdf42dc44c5aab2246"
+    },
+    {
+      "name" : "Vulkan-Headers",
+      "url" : "https://github.com/KhronosGroup/Vulkan-Headers.git",
+      "sub_dir" : "Vulkan-Headers",
+      "commit" : "v1.1.130"
+    },
+    {
+      "name" : "Vulkan-Tools",
+      "url" : "https://github.com/KhronosGroup/Vulkan-Tools.git",
+      "sub_dir" : "Vulkan-Tools",
+      "commit" : "5ceb7be9a3bf86f34ade5a7f72459248d99a4b76"
+    },
+    {
+      "name" : "SPIRV-Tools",
+      "url" : "https://github.com/KhronosGroup/SPIRV-Tools.git",
+      "sub_dir" : "shaderc/third_party/spirv-tools",
+      "commit" : "9b3cc3e05337358d0bd9fec1b7a51e3cbf55312b"
+    },
+    {
+      "name" : "SPIRV-Headers",
+      "url" : "https://github.com/KhronosGroup/SPIRV-Headers.git",
+      "sub_dir" : "shaderc/third_party/spirv-tools/external/spirv-headers",
+      "commit" : "38cafab379e5d16137cb97a485b9385191039b92"
+    }
+  ]
+}
diff --git a/src/third_party/vulkan-validation-layers/src/build-android/res/values/strings.xml b/src/third_party/vulkan-validation-layers/src/build-android/res/values/strings.xml
new file mode 100644
index 0000000..8ff71b0
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/build-android/res/values/strings.xml
@@ -0,0 +1,24 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright 2016 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+
+<!-- This file contains resource definitions for displayed strings, allowing
+     them to be changed based on the locale and options. -->
+
+<resources>
+    <!-- Simple strings. -->
+    <string name="app_name">VulkanLayerValidationTests</string>
+
+</resources>
diff --git a/src/third_party/vulkan-validation-layers/src/build-android/test_APK.sh b/src/third_party/vulkan-validation-layers/src/build-android/test_APK.sh
new file mode 100755
index 0000000..c272cd3
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/build-android/test_APK.sh
@@ -0,0 +1,291 @@
+#!/bin/bash
+
+# Copyright 2017 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Quiet by default
+set +x
+
+echo
+echo === Vulkan Validation Layers Tests ===
+echo Running test script: build-android/test_APK.sh
+echo
+
+#
+# Parse parameters
+#
+
+function printUsage {
+   echo "Supported parameters are:"
+   echo "    -p|--platform <platform> (optional)"
+   echo "    -f|--filter <gtest filter list> (optional)"
+   echo "    -s|--serial <target device serial number> (optional)"
+   echo "    -a|--abi <target abi> (optional)"
+   echo
+   echo "i.e. ${0##*/} -p <platform> -f <test filter> -s <serial number>"
+   exit 1
+}
+
+if [[ $(($# % 2)) -ne 0 ]]
+then
+    echo Parameters must be provided in pairs.
+    echo parameter count = $#
+    echo
+    printUsage
+    exit 1
+fi
+
+while [[ $# -gt 0 ]]
+do
+    case $1 in
+        -p|--platform)
+            platform="$2"
+            shift 2
+            ;;
+        -f|--filter)
+            filter="$2"
+            shift 2
+            ;;
+        -s|--serial)
+            serial="$2"
+            shift 2
+            ;;
+        -a|--abi)
+            abi="$2"
+            shift 2
+            ;;
+        -*)
+            # unknown option
+            echo Unknown option: $1
+            echo
+            printUsage
+            exit 1
+            ;;
+    esac
+done
+
+if [[ $serial ]]; then
+    serialFlag="-s $serial"
+    if [[ $(adb devices) != *"$serial"* ]]
+    then
+        echo Device not found: "${serial}"
+        echo
+        printUsage
+        exit 1
+    fi
+else
+    echo Using device $(adb get-serialno)
+fi
+
+if [[ -z $platform ]]
+then
+    echo No platform specified.
+    platform="UnspecifiedPlatform"
+fi
+
+if [[ -z $filter ]]
+then
+    echo No filter specified, running all tests.
+    filter="*"
+fi
+
+if [[ $platform ]]; then echo platform = "${platform}"; fi
+if [[ $filter ]]; then echo filter = "${filter}"; fi
+if [[ $serial ]]; then echo serial = "${serial}"; fi
+if [[ $abi ]]; then echo abi = "${abi}"; fi
+
+set -e
+
+echo
+echo Setting up...
+
+#
+# Start up
+#
+
+# Grab our Android test mutex
+# Wait for any existing test runs on the devices
+
+# Blow away the lock if tests run too long, avoiding infinite loop
+lock_seconds=1200                                # Duration in seconds.
+lock_end_time=$(( $(date +%s) + lock_seconds ))  # Calculate end time.
+
+until mkdir /var/tmp/VkLayerValidationTests.$serial.lock
+do
+    sleep 5
+    echo "Waiting for existing Android test to complete on $serial"
+
+    if [ $(date +%s) -gt $lock_end_time ]
+    then
+        echo "Lock timeout reached: $lock_seconds seconds"
+        echo "Deleting /var/tmp/VkLayerValidationTests.$serial.lock"
+        rm -r /var/tmp/VkLayerValidationTests.$serial.lock
+    fi
+done
+
+# Clean up our lock on any exit condition
+function finish {
+   rm -r /var/tmp/VkLayerValidationTests.$serial.lock
+}
+trap finish EXIT
+
+# Wake up the device - make sure each keycode has time to be processed
+adb $serialFlag shell input keyevent "KEYCODE_MENU"
+sleep 2
+adb $serialFlag shell input keyevent "KEYCODE_HOME"
+sleep 2
+
+# Clear the log
+adb $serialFlag logcat -c
+
+# Ensure any previous activity has stopped, otherwise it won't run tests
+adb $serialFlag shell am force-stop com.example.VulkanLayerValidationTests
+
+# Remove any existing APK that may have been installed from another host
+# Disable exit on error in case the APK is not present
+set +e
+adb $serialFlag shell pm list packages | grep com.example.VulkanLayerValidationTests
+if [ $? -eq 0 ]
+then
+    adb $serialFlag uninstall com.example.VulkanLayerValidationTests
+fi
+# Re-enable exit on error
+set -e
+
+echo
+echo Installing ./bin/VulkanLayerValidationTests.apk...
+
+# Install the current build
+if [[ -z $abi ]]
+then
+  adb $serialFlag install -r bin/VulkanLayerValidationTests.apk
+else
+  adb $serialFlag install --abi $abi -r bin/VulkanLayerValidationTests.apk
+fi
+
+echo
+echo Launching tests...
+
+# Kick off the tests with known exception list
+adb $serialFlag shell am start -a android.intent.action.MAIN -c android-intent.category.LAUNCH -n com.example.VulkanLayerValidationTests/android.app.NativeActivity --es args --gtest_filter="${filter}"
+
+#
+# Scrape the log until we get pass/fail/crash
+#
+
+# The following loop will give tests 20 minutes to pass/fail/crash
+seconds=1200                          # Duration in seconds.
+endTime=$(( $(date +%s) + seconds ))  # Calculate end time.
+
+exitCode=-1
+
+# Disable exit on error, we expect grep to fail multiple times in this loop
+set +e
+
+while [ $(date +%s) -lt $endTime ]; do  # Loop until interval has elapsed.
+
+    # The following line is printed from android_main on success
+    adb $serialFlag logcat -d | grep "==== Tests PASSED ===="
+    if [ $? -eq 0 ]
+    then
+        echo
+        echo VulkanLayerValidationTests PASSED!
+        echo
+        exitCode=0
+        break
+    fi
+
+    # The following line is printed from android_main on failure
+    adb $serialFlag logcat -d | grep "==== Tests FAILED ===="
+    if [ $? -eq 0 ]
+    then
+        echo
+        echo VulkanLayerValidationTests FAILED!
+        echo
+        exitCode=1
+        break
+    fi
+
+    # developer.android.com recommends searching for the following string to detect native crash
+    adb $serialFlag logcat -d | grep "\*\*\* \*\*\* \*\*\* \*\*\* \*\*\* \*\*\* \*\*\* \*\*\* \*\*\* \*\*\* \*\*\* \*\*\* \*\*\* \*\*\* \*\*\* \*\*\*"
+    if [ $? -eq 0 ]
+    then
+        exitCode=2
+        echo
+        echo VulkanLayerValidationTests CRASHED!
+        echo
+        break
+    fi
+
+    sleep 5
+
+done
+
+if [ $exitCode -eq -1 ]
+then
+    echo "VulkanLayerValidationTests hasn't completed in $seconds seconds. Script exiting."
+fi
+
+#
+# Cleanup
+#
+
+# Return to home screen to clear any error pop-ups
+adb $serialFlag shell input keyevent "KEYCODE_HOME"
+sleep 2
+
+# Stop the activity
+adb $serialFlag shell am force-stop com.example.VulkanLayerValidationTests
+
+echo
+echo Fetching test output and logcat text...
+
+# Avoid characters that are illegal in Windows filenames, so these
+# files can be archived to a Windows host system for later reference
+today=$(date +%Y%m%d-%H%M%S)
+outFile="VulkanLayerValidationTests.$platform.$today.out.txt"
+errFile="VulkanLayerValidationTests.$platform.$today.err.txt"
+logFile="VulkanLayerValidationTests.$platform.$today.logcat.txt"
+adb $serialFlag pull /sdcard/Android/data/com.example.VulkanLayerValidationTests/files/out.txt $outFile
+adb $serialFlag pull /sdcard/Android/data/com.example.VulkanLayerValidationTests/files/err.txt $errFile
+adb $serialFlag logcat -d > $logFile
+
+if [ -f $outFile ]; then
+    echo $outFile size $(wc -c < $outFile)
+fi
+
+if [ -f $errFile ]; then
+    echo $errFile size $(wc -c < $errFile)
+fi
+
+if [ -f $logFile ]; then
+    echo $logFile size $(wc -c < $logFile)
+fi
+
+if [ $exitCode -ne 0 ]
+then
+    echo 
+    echo VulkanLayerValidationTests result status is unsuccessful.  Dumping test output file:
+    echo =========================================================================================
+    cat $outFile
+    echo =========================================================================================
+    echo
+    echo 
+    echo Dumping logcat text, filtered by ''"VulkanLayerValidationTests"'':
+    echo =========================================================================================
+    cat $logFile | grep VulkanLayerValidationTests
+    echo =========================================================================================
+fi
+
+exit $exitCode
diff --git a/src/third_party/vulkan-validation-layers/src/build-android/update_external_sources_android.bat b/src/third_party/vulkan-validation-layers/src/build-android/update_external_sources_android.bat
new file mode 100755
index 0000000..96ce28d
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/build-android/update_external_sources_android.bat
@@ -0,0 +1,105 @@
+@echo off

+REM Update source for glslang, spirv-tools, and shaderc

+

+REM

+REM Copyright 2016 The Android Open Source Project

+REM Copyright (C) 2015 Valve Corporation

+REM Copyright 2018 LunarG, Inc.

+REM

+REM Licensed under the Apache License, Version 2.0 (the "License");

+REM you may not use this file except in compliance with the License.

+REM You may obtain a copy of the License at

+REM

+REM      http://www.apache.org/licenses/LICENSE-2.0

+REM

+REM Unless required by applicable law or agreed to in writing, software

+REM distributed under the License is distributed on an "AS IS" BASIS,

+REM WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

+REM See the License for the specific language governing permissions and

+REM limitations under the License.

+REM

+

+setlocal EnableDelayedExpansion

+set errorCode=0

+set ANDROID_BUILD_DIR=%~dp0

+set BUILD_DIR=%ANDROID_BUILD_DIR%

+set BASE_DIR=%BUILD_DIR%\third_party

+set SHADERC_DIR=%BASE_DIR%\shaderc

+

+for %%X in (where.exe) do (set FOUND=%%~$PATH:X)

+if not defined FOUND (

+   echo Dependency check failed:

+   echo   where.exe not found

+   echo   This script requires Windows Vista or later, which includes where.exe.

+   set errorCode=1

+)

+

+where /q git.exe

+if %ERRORLEVEL% equ 1 (

+   echo Dependency check failed:

+   echo   git.exe not found

+   echo   Git for Windows can be downloaded here:  https://git-scm.com/download/win

+   echo   Install and ensure git.exe makes it into your PATH

+   set errorCode=1

+)

+

+where /q ndk-build.cmd

+if %ERRORLEVEL% equ 1 (

+   echo Dependency check failed:

+   echo   ndk-build.cmd not found

+   echo   Android NDK can be downloaded here:  http://developer.android.com/ndk/guides/setup.html

+   echo   Install and ensure ndk-build.cmd makes it into your PATH

+   set errorCode=1

+)

+

+:main

+

+if %errorCode% neq 0 (goto:error)

+

+echo Creating and/or updating glslang, spirv-tools, spirv-headers, shaderc, vulkan-headers, vulkan-tools in %BASE_DIR%

+

+set build-shaderc=1

+

+REM Pull down or update external dependencies

+echo Update external dependencies based on the %ANDROID_BUILD_DIR%/known_good.json file

+py -3 ../scripts/update_deps.py --no-build --dir %BASE_DIR% --known_good_dir %BUILD_DIR%

+

+

+if %build-shaderc% equ 1 (

+   call:build_shaderc

+   if %errorCode% neq 0 (goto:error)

+)

+

+echo.

+echo Exiting

+goto:finish

+

+:error

+echo.

+echo Halting due to error

+goto:finish

+

+:finish

+if not "%cd%\" == "%BUILD_DIR%" ( cd %BUILD_DIR% )

+endlocal

+REM This needs a fix to return error, something like exit %errorCode%

+REM Right now it is returning 0

+goto:eof

+

+

+

+REM // ======== Functions ======== //

+

+:build_shaderc

+   echo.

+   echo Building %SHADERC_DIR%

+   cd %SHADERC_DIR%\android_test

+   echo Building shaderc with Android NDK

+   call ndk-build NDK_APPLICATION_MK=../../../jni/shaderc/Application.mk THIRD_PARTY_PATH=../third_party -j 4

+   REM Check for existence of one lib, even though we should check for all results

+   if not exist %SHADERC_DIR%\android_test\obj\local\x86\libshaderc.a (

+      echo.

+      echo shaderc build failed!

+      set errorCode=1

+   )

+goto:eof

diff --git a/src/third_party/vulkan-validation-layers/src/build-android/update_external_sources_android.sh b/src/third_party/vulkan-validation-layers/src/build-android/update_external_sources_android.sh
new file mode 100755
index 0000000..fe39a99
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/build-android/update_external_sources_android.sh
@@ -0,0 +1,101 @@
+#!/bin/bash
+# Update source for glslang, spirv-tools, shaderc
+
+# Copyright 2016 The Android Open Source Project
+# Copyright (C) 2015 Valve Corporation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+set -e
+
+ANDROIDBUILDDIR=$PWD
+BUILDDIR=$ANDROIDBUILDDIR
+BASEDIR=$BUILDDIR/third_party
+
+if [[ $(uname) == "Linux" ]]; then
+    cores="$(nproc || echo 4)"
+elif [[ $(uname) == "Darwin" ]]; then
+    cores=$(sysctl -n hw.ncpu)
+fi
+
+#
+# Parse parameters
+#
+
+function printUsage {
+   echo "Supported parameters are:"
+   echo "    --abi <abi> (optional)"
+   echo "    --no-build (optional)"
+   echo
+   echo "i.e. ${0##*/} --abi arm64-v8a \\"
+   exit 1
+}
+
+while [[ $# -gt 0 ]]
+do
+    case $1 in
+        --abi)
+            abi="$2"
+            shift 2
+            ;;
+        --no-build)
+            nobuild=1
+            shift 1
+            ;;
+        *)
+            # unknown option
+            echo Unknown option: $1
+            echo
+            printUsage
+            exit 1
+            ;;
+    esac
+done
+
+echo abi=$abi
+if [[ -z $abi ]]
+then
+    echo No abi provided, so building for all supported abis.
+fi
+
+
+
+
+echo no-build=$nobuild
+if [[ $nobuild ]]
+then
+    echo Skipping build.
+fi
+
+function build_shaderc () {
+   echo "Building $BASEDIR/shaderc"
+   cd $BASEDIR/shaderc/android_test
+   if [[ $abi ]]; then
+      ndk-build NDK_APPLICATION_MK=../../../jni/shaderc/Application.mk THIRD_PARTY_PATH=../third_party APP_ABI=$abi -j $cores;
+   else
+      ndk-build NDK_APPLICATION_MK=../../../jni/shaderc/Application.mk THIRD_PARTY_PATH=../third_party -j $cores;
+   fi
+}
+
+# Pull down or update external dependencies
+echo "Update external dependencies based on the $ANDROIDBUILDDIR/known_good.json file"
+python3 ../scripts/update_deps.py --no-build --dir $BASEDIR --known_good_dir $BUILDDIR
+
+if [[ -z $nobuild ]]
+then
+build_shaderc
+fi
+
+echo ""
+echo "${0##*/} finished."
+
diff --git a/src/third_party/vulkan-validation-layers/src/build-gn/commit_id.py b/src/third_party/vulkan-validation-layers/src/build-gn/commit_id.py
new file mode 100755
index 0000000..78e978e
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/build-gn/commit_id.py
@@ -0,0 +1,64 @@
+#!/usr/bin/env python
+
+# Copyright (C) 2018 The ANGLE Project Authors.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Generate commit.h with git commit hash.
+#
+
+import subprocess as sp
+import sys
+import os
+
+usage = """\
+Usage: commit_id.py check <angle_dir>                - check if git is present
+       commit_id.py gen <angle_dir> <file_to_write>  - generate commit.h"""
+
+def grab_output(command, cwd):
+    return sp.Popen(command, stdout=sp.PIPE, shell=True, cwd=cwd).communicate()[0].strip()
+
+if len(sys.argv) < 3:
+    sys.exit(usage)
+
+operation = sys.argv[1]
+cwd = sys.argv[2]
+
+if operation == 'check':
+    index_path = os.path.join(cwd, '.git', 'index')
+    if os.path.exists(index_path):
+        print("1")
+    else:
+        print("0")
+    sys.exit(0)
+
+if len(sys.argv) < 4 or operation != 'gen':
+    sys.exit(usage)
+
+output_file = sys.argv[3]
+commit_id_size = 12
+
+try:
+    commit_id = grab_output('git rev-parse --short=%d HEAD' % commit_id_size, cwd)
+    commit_date = grab_output('git show -s --format=%ci HEAD', cwd)
+except:
+    commit_id = 'invalid-hash'
+    commit_date = 'invalid-date'
+
+hfile = open(output_file, 'w')
+
+hfile.write('#define ANGLE_COMMIT_HASH "%s"\n'    % commit_id)
+hfile.write('#define ANGLE_COMMIT_HASH_SIZE %d\n' % commit_id_size)
+hfile.write('#define ANGLE_COMMIT_DATE "%s"\n'    % commit_date)
+
+hfile.close()
diff --git a/src/third_party/vulkan-validation-layers/src/build-gn/generate_vulkan_layers_json.py b/src/third_party/vulkan-validation-layers/src/build-gn/generate_vulkan_layers_json.py
new file mode 100755
index 0000000..2999cd8
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/build-gn/generate_vulkan_layers_json.py
@@ -0,0 +1,126 @@
+#!/usr/bin/env python
+
+# Copyright (C) 2016 The ANGLE Project Authors.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Generate copies of the Vulkan layers JSON files, with no paths, forcing
+Vulkan to use the default search path to look for layers."""
+
+from __future__ import print_function
+
+import argparse
+import glob
+import json
+import os
+import platform
+import sys
+
+
+def glob_slash(dirname):
+    """Like regular glob but replaces \ with / in returned paths."""
+    return [s.replace('\\', '/') for s in glob.glob(dirname)]
+
+
+def main():
+    parser = argparse.ArgumentParser(description=__doc__)
+    parser.add_argument('--icd', action='store_true')
+    parser.add_argument('source_dir')
+    parser.add_argument('target_dir')
+    parser.add_argument('version_header', help='path to vulkan_core.h')
+    parser.add_argument('json_files', nargs='*')
+    args = parser.parse_args()
+
+    source_dir = args.source_dir
+    target_dir = args.target_dir
+
+    json_files = [j for j in args.json_files if j.endswith('.json')]
+    json_in_files = [j for j in args.json_files if j.endswith('.json.in')]
+
+    data_key = 'ICD' if args.icd else 'layer'
+
+    if not os.path.isdir(source_dir):
+        print(source_dir + ' is not a directory.', file=sys.stderr)
+        return 1
+
+    if not os.path.exists(target_dir):
+        os.makedirs(target_dir)
+
+    # Copy the *.json files from source dir to target dir
+    if (set(glob_slash(os.path.join(source_dir, '*.json'))) != set(json_files)):
+        print(glob.glob(os.path.join(source_dir, '*.json')))
+        print('.json list in gn file is out-of-date', file=sys.stderr)
+        return 1
+
+    for json_fname in json_files:
+        if not json_fname.endswith('.json'):
+            continue
+        with open(json_fname) as infile:
+            data = json.load(infile)
+
+        # Update the path.
+        if not data_key in data:
+            raise Exception(
+                "Could not find '%s' key in %s" % (data_key, json_fname))
+
+        # The standard validation layer has no library path.
+        if 'library_path' in data[data_key]:
+            prev_name = os.path.basename(data[data_key]['library_path'])
+            data[data_key]['library_path'] = prev_name
+
+        target_fname = os.path.join(target_dir, os.path.basename(json_fname))
+        with open(target_fname, 'wb') as outfile:
+            json.dump(data, outfile)
+
+    # Get the Vulkan version from the vulkan_core.h file
+    vk_header_filename = args.version_header
+    vk_version = None
+    with open(vk_header_filename) as vk_header_file:
+        for line in vk_header_file:
+            if line.startswith('#define VK_HEADER_VERSION'):
+                vk_version = line.split()[-1]
+                break
+    if not vk_version:
+        print('failed to extract vk_version', file=sys.stderr)
+        return 1
+
+    # Set json file prefix and suffix for generating files, default to Linux.
+    relative_path_prefix = '../lib'
+    file_type_suffix = '.so'
+    if platform.system() == 'Windows':
+        relative_path_prefix = r'..\\'  # json-escaped, hence two backslashes.
+        file_type_suffix = '.dll'
+
+    # For each *.json.in template files in source dir generate actual json file
+    # in target dir
+    if (set(glob_slash(os.path.join(source_dir, '*.json.in'))) !=
+            set(json_in_files)):
+        print('.json.in list in gn file is out-of-date', file=sys.stderr)
+        return 1
+    for json_in_name in json_in_files:
+        if not json_in_name.endswith('.json.in'):
+            continue
+        json_in_fname = os.path.basename(json_in_name)
+        layer_name = json_in_fname[:-len('.json.in')]
+        layer_lib_name = layer_name + file_type_suffix
+        json_out_fname = os.path.join(target_dir, json_in_fname[:-len('.in')])
+        with open(json_out_fname,'w') as json_out_file, \
+             open(json_in_name) as infile:
+            for line in infile:
+                line = line.replace('@RELATIVE_LAYER_BINARY@',
+                                    relative_path_prefix + layer_lib_name)
+                line = line.replace('@VK_VERSION@', '1.1.' + vk_version)
+                json_out_file.write(line)
+
+if __name__ == '__main__':
+    sys.exit(main())
diff --git a/src/third_party/vulkan-validation-layers/src/build-gn/remove_files.py b/src/third_party/vulkan-validation-layers/src/build-gn/remove_files.py
new file mode 100755
index 0000000..7095913
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/build-gn/remove_files.py
@@ -0,0 +1,41 @@
+#!/usr/bin/python2
+
+# Copyright (C) 2019 The ANGLE Project Authors.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# remove_files.py:
+#   This special action is used to cleanup old files from the build directory.
+#   Otherwise ANGLE will pick up the old file(s), causing build or runtime errors.
+#
+
+import glob
+import os
+import sys
+
+if len(sys.argv) < 3:
+    print("Usage: " + sys.argv[0] + " <stamp_file> <remove_patterns>")
+
+stamp_file = sys.argv[1]
+
+for i in range(2, len(sys.argv)):
+    remove_pattern = sys.argv[i]
+    remove_files = glob.glob(remove_pattern)
+    for f in remove_files:
+        if os.path.isfile(f):
+            os.remove(f)
+
+# touch a dummy file to keep a timestamp
+with open(stamp_file, "w") as f:
+    f.write("blah")
+    f.close()
diff --git a/src/third_party/vulkan-validation-layers/src/build-gn/secondary/build_overrides/build.gni b/src/third_party/vulkan-validation-layers/src/build-gn/secondary/build_overrides/build.gni
new file mode 100644
index 0000000..c6c11fa
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/build-gn/secondary/build_overrides/build.gni
@@ -0,0 +1,18 @@
+# Copyright (c) 2019 LunarG, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+build_with_chromium = false
+ignore_elf32_limitations = true
+linux_use_bundled_binutils_override = false
+use_system_xcode = true
diff --git a/src/third_party/vulkan-validation-layers/src/build-gn/secondary/build_overrides/spirv_tools.gni b/src/third_party/vulkan-validation-layers/src/build-gn/secondary/build_overrides/spirv_tools.gni
new file mode 100644
index 0000000..cd58574
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/build-gn/secondary/build_overrides/spirv_tools.gni
@@ -0,0 +1,20 @@
+# Copyright (c) 2019 LunarG, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# We are building inside Vulkan-ValidationLayers
+spirv_tools_standalone = false
+
+# Paths to SPIRV-Tools dependencies
+spirv_tools_googletest_dir = "//external/googletest"
+spirv_tools_spirv_headers_dir = "//external/glslang/External/spirv-tools/external/spirv-headers"
diff --git a/src/third_party/vulkan-validation-layers/src/build-gn/secondary/build_overrides/vulkan_validation_layers.gni b/src/third_party/vulkan-validation-layers/src/build-gn/secondary/build_overrides/vulkan_validation_layers.gni
new file mode 100644
index 0000000..e2cd287
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/build-gn/secondary/build_overrides/vulkan_validation_layers.gni
@@ -0,0 +1,25 @@
+# Copyright (c) 2019 LunarG, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Paths to validation layer dependencies
+vulkan_headers_dir = "//external/Vulkan-Headers"
+vvl_spirv_tools_dir = "//external/glslang/External/spirv-tools"
+vvl_glslang_dir = "//external/glslang/"
+
+# Subdirectories for generated files
+vulkan_data_subdir = ""
+vulkan_gen_subdir = ""
+
+# Build options
+use_x11 = true
diff --git a/src/third_party/vulkan-validation-layers/src/build-gn/update_deps.sh b/src/third_party/vulkan-validation-layers/src/build-gn/update_deps.sh
new file mode 100755
index 0000000..f9792f7
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/build-gn/update_deps.sh
@@ -0,0 +1,38 @@
+#!/bin/sh
+
+# Copyright (c) 2019 LunarG, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Execute at repo root
+cd "$(dirname $0)/.."
+
+# Use update_deps.py to update source dependencies from /scripts/known_good.json
+scripts/update_deps.py --dir="external" --no-build
+(cd external/glslang; ./update_glslang_sources.py)
+
+# Use gclient to update toolchain dependencies from /build-gn/DEPS (from chromium)
+cat << EOF >> .gclient
+solutions = [
+  { "name"        : ".",
+    "url"         : "https://github.com/KhronosGroup/Vulkan-ValidationLayers",
+    "deps_file"   : "build-gn/DEPS",
+    "managed"     : False,
+    "custom_deps" : {
+    },
+    "custom_vars": {},
+  },
+]
+EOF
+gclient sync
+
diff --git a/src/third_party/vulkan-validation-layers/src/cmake/Copyright_cmake.txt b/src/third_party/vulkan-validation-layers/src/cmake/Copyright_cmake.txt
new file mode 100644
index 0000000..743c634
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/cmake/Copyright_cmake.txt
@@ -0,0 +1,126 @@
+CMake - Cross Platform Makefile Generator
+Copyright 2000-2018 Kitware, Inc. and Contributors
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions
+are met:
+
+* Redistributions of source code must retain the above copyright
+  notice, this list of conditions and the following disclaimer.
+
+* Redistributions in binary form must reproduce the above copyright
+  notice, this list of conditions and the following disclaimer in the
+  documentation and/or other materials provided with the distribution.
+
+* Neither the name of Kitware, Inc. nor the names of Contributors
+  may be used to endorse or promote products derived from this
+  software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+------------------------------------------------------------------------------
+
+The following individuals and institutions are among the Contributors:
+
+* Aaron C. Meadows <cmake@shadowguarddev.com>
+* Adriaan de Groot <groot@kde.org>
+* Aleksey Avdeev <solo@altlinux.ru>
+* Alexander Neundorf <neundorf@kde.org>
+* Alexander Smorkalov <alexander.smorkalov@itseez.com>
+* Alexey Sokolov <sokolov@google.com>
+* Alex Turbov <i.zaufi@gmail.com>
+* Andreas Pakulat <apaku@gmx.de>
+* Andreas Schneider <asn@cryptomilk.org>
+* André Rigland Brodtkorb <Andre.Brodtkorb@ifi.uio.no>
+* Axel Huebl, Helmholtz-Zentrum Dresden - Rossendorf
+* Benjamin Eikel
+* Bjoern Ricks <bjoern.ricks@gmail.com>
+* Brad Hards <bradh@kde.org>
+* Christopher Harvey
+* Christoph Grüninger <foss@grueninger.de>
+* Clement Creusot <creusot@cs.york.ac.uk>
+* Daniel Blezek <blezek@gmail.com>
+* Daniel Pfeifer <daniel@pfeifer-mail.de>
+* Enrico Scholz <enrico.scholz@informatik.tu-chemnitz.de>
+* Eran Ifrah <eran.ifrah@gmail.com>
+* Esben Mose Hansen, Ange Optimization ApS
+* Geoffrey Viola <geoffrey.viola@asirobots.com>
+* Google Inc
+* Gregor Jasny
+* Helio Chissini de Castro <helio@kde.org>
+* Ilya Lavrenov <ilya.lavrenov@itseez.com>
+* Insight Software Consortium <insightsoftwareconsortium.org>
+* Jan Woetzel
+* Kelly Thompson <kgt@lanl.gov>
+* Konstantin Podsvirov <konstantin@podsvirov.pro>
+* Mario Bensi <mbensi@ipsquad.net>
+* Mathieu Malaterre <mathieu.malaterre@gmail.com>
+* Matthaeus G. Chajdas
+* Matthias Kretz <kretz@kde.org>
+* Matthias Maennich <matthias@maennich.net>
+* Michael Stürmer
+* Miguel A. Figueroa-Villanueva
+* Mike Jackson
+* Mike McQuaid <mike@mikemcquaid.com>
+* Nicolas Bock <nicolasbock@gmail.com>
+* Nicolas Despres <nicolas.despres@gmail.com>
+* Nikita Krupen'ko <krnekit@gmail.com>
+* NVIDIA Corporation <www.nvidia.com>
+* OpenGamma Ltd. <opengamma.com>
+* Patrick Stotko <stotko@cs.uni-bonn.de>
+* Per Øyvind Karlsen <peroyvind@mandriva.org>
+* Peter Collingbourne <peter@pcc.me.uk>
+* Petr Gotthard <gotthard@honeywell.com>
+* Philip Lowman <philip@yhbt.com>
+* Philippe Proulx <pproulx@efficios.com>
+* Raffi Enficiaud, Max Planck Society
+* Raumfeld <raumfeld.com>
+* Roger Leigh <rleigh@codelibre.net>
+* Rolf Eike Beer <eike@sf-mail.de>
+* Roman Donchenko <roman.donchenko@itseez.com>
+* Roman Kharitonov <roman.kharitonov@itseez.com>
+* Ruslan Baratov
+* Sebastian Holtermann <sebholt@xwmw.org>
+* Stephen Kelly <steveire@gmail.com>
+* Sylvain Joubert <joubert.sy@gmail.com>
+* Thomas Sondergaard <ts@medical-insight.com>
+* Tobias Hunger <tobias.hunger@qt.io>
+* Todd Gamblin <tgamblin@llnl.gov>
+* Tristan Carel
+* University of Dundee
+* Vadim Zhukov
+* Will Dicharry <wdicharry@stellarscience.com>
+
+See version control history for details of individual contributions.
+
+The above copyright and license notice applies to distributions of
+CMake in source and binary form.  Third-party software packages supplied
+with CMake under compatible licenses provide their own copyright notices
+documented in corresponding subdirectories or source files.
+
+------------------------------------------------------------------------------
+
+CMake was initially developed by Kitware with the following sponsorship:
+
+ * National Library of Medicine at the National Institutes of Health
+   as part of the Insight Segmentation and Registration Toolkit (ITK).
+
+ * US National Labs (Los Alamos, Livermore, Sandia) ASC Parallel
+   Visualization Initiative.
+
+ * National Alliance for Medical Image Computing (NAMIC) is funded by the
+   National Institutes of Health through the NIH Roadmap for Medical Research,
+   Grant U54 EB005149.
+
+ * Kitware, Inc.
diff --git a/src/third_party/vulkan-validation-layers/src/cmake/FindUDev.cmake b/src/third_party/vulkan-validation-layers/src/cmake/FindUDev.cmake
new file mode 100644
index 0000000..e3d1699
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/cmake/FindUDev.cmake
@@ -0,0 +1,28 @@
+# - FindUDev
+#
+# Copyright (C) 2015 Valve Corporation
+
+find_package(PkgConfig)
+
+pkg_check_modules(PC_LIBUDEV QUIET libudev)
+
+find_path(UDEV_INCLUDE_DIR NAMES libudev.h
+    HINTS
+    ${PC_LIBUDEV_INCLUDEDIR}
+    ${PC_LIBUDEV_INCLUDE_DIRS}
+    )
+
+find_library(UDEV_LIBRARY NAMES udev
+    HINTS
+    ${PC_LIBUDEV_LIBDIR}
+    ${PC_LIBUDEV_LIBRARY_DIRS}
+    )
+
+include(FindPackageHandleStandardArgs)
+find_package_handle_standard_args(UDev DEFAULT_MSG
+    UDEV_INCLUDE_DIR UDEV_LIBRARY)
+
+mark_as_advanced(UDEV_INCLUDE_DIR UDEV_LIBRARY)
+
+set(UDEV_INCLUDE_DIRS ${UDEV_INCLUDE_DIR})
+set(UDEV_LIBRARIES ${UDEV_LIBRARY})
diff --git a/src/third_party/vulkan-validation-layers/src/cmake/FindVulkan.cmake b/src/third_party/vulkan-validation-layers/src/cmake/FindVulkan.cmake
new file mode 100644
index 0000000..1f4c8ad
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/cmake/FindVulkan.cmake
@@ -0,0 +1,80 @@
+# Distributed under the OSI-approved BSD 3-Clause License.  See accompanying
+# file Copyright.txt or https://cmake.org/licensing for details.
+
+#.rst:
+# FindVulkan
+# ----------
+#
+# Try to find Vulkan
+#
+# IMPORTED Targets
+# ^^^^^^^^^^^^^^^^
+#
+# This module defines :prop_tgt:`IMPORTED` target ``Vulkan::Vulkan``, if
+# Vulkan has been found.
+#
+# Result Variables
+# ^^^^^^^^^^^^^^^^
+#
+# This module defines the following variables::
+#
+#   Vulkan_FOUND          - True if Vulkan was found
+#   Vulkan_INCLUDE_DIRS   - include directories for Vulkan
+#   Vulkan_LIBRARIES      - link against this library to use Vulkan
+#
+# The module will also define two cache variables::
+#
+#   Vulkan_INCLUDE_DIR    - the Vulkan include directory
+#   Vulkan_LIBRARY        - the path to the Vulkan library
+#
+
+if(WIN32)
+  find_path(Vulkan_INCLUDE_DIR
+    NAMES vulkan/vulkan.h
+    PATHS
+      "$ENV{VULKAN_SDK}/Include"
+    )
+
+  if(CMAKE_SIZEOF_VOID_P EQUAL 8)
+    find_library(Vulkan_LIBRARY
+      NAMES vulkan-1
+      PATHS
+        "$ENV{VULKAN_SDK}/Lib"
+        "$ENV{VULKAN_SDK}/Bin"
+        )
+  elseif(CMAKE_SIZEOF_VOID_P EQUAL 4)
+    find_library(Vulkan_LIBRARY
+      NAMES vulkan-1
+      PATHS
+        "$ENV{VULKAN_SDK}/Lib32"
+        "$ENV{VULKAN_SDK}/Bin32"
+        NO_SYSTEM_ENVIRONMENT_PATH
+        )
+  endif()
+else()
+    find_path(Vulkan_INCLUDE_DIR
+      NAMES vulkan/vulkan.h
+      PATHS
+        "$ENV{VULKAN_SDK}/include")
+    find_library(Vulkan_LIBRARY
+      NAMES vulkan
+      PATHS
+        "$ENV{VULKAN_SDK}/lib")
+endif()
+
+set(Vulkan_LIBRARIES ${Vulkan_LIBRARY})
+set(Vulkan_INCLUDE_DIRS ${Vulkan_INCLUDE_DIR})
+
+include(FindPackageHandleStandardArgs)
+find_package_handle_standard_args(Vulkan
+  DEFAULT_MSG
+  Vulkan_LIBRARY Vulkan_INCLUDE_DIR)
+
+mark_as_advanced(Vulkan_INCLUDE_DIR Vulkan_LIBRARY)
+
+if(Vulkan_FOUND AND NOT TARGET Vulkan::Vulkan)
+  add_library(Vulkan::Vulkan UNKNOWN IMPORTED)
+  set_target_properties(Vulkan::Vulkan PROPERTIES
+    IMPORTED_LOCATION "${Vulkan_LIBRARIES}"
+    INTERFACE_INCLUDE_DIRECTORIES "${Vulkan_INCLUDE_DIRS}")
+endif()
diff --git a/src/third_party/vulkan-validation-layers/src/cmake/FindVulkanHeaders.cmake b/src/third_party/vulkan-validation-layers/src/cmake/FindVulkanHeaders.cmake
new file mode 100644
index 0000000..2378d67
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/cmake/FindVulkanHeaders.cmake
@@ -0,0 +1,86 @@
+#.rst:
+# FindVulkanHeaders
+# -----------------
+#
+# Try to find Vulkan Headers and Registry.
+#
+# This module is intended to be used by projects that build Vulkan
+# "system" components such as the loader and layers.
+# Vulkan applications should instead use the FindVulkan (or similar)
+# find module that locates the headers and the loader library.
+#
+# When using this find module to locate the headers and registry
+# in a Vulkan-Headers repository, the Vulkan-Headers repository
+# should be built with 'install' target and the following environment
+# or CMake variable set to the location of the install directory.
+#
+#    VULKAN_HEADERS_INSTALL_DIR
+#
+# IMPORTED Targets
+# ^^^^^^^^^^^^^^^^
+#
+# This module defines no IMPORTED targets
+#
+# Result Variables
+# ^^^^^^^^^^^^^^^^
+#
+# This module defines the following variables::
+#
+#   VulkanHeaders_FOUND          - True if VulkanHeaders was found
+#   VulkanHeaders_INCLUDE_DIRS   - include directories for VulkanHeaders
+#
+#   VulkanRegistry_FOUND         - True if VulkanRegistry was found
+#   VulkanRegistry_DIRS          - directories for VulkanRegistry
+#
+# The module will also define two cache variables::
+#
+#   VulkanHeaders_INCLUDE_DIR    - the VulkanHeaders include directory
+#   VulkanRegistry_DIR           - the VulkanRegistry directory
+#
+
+# Probe command-line arguments and the environment to see if they specify the
+# Vulkan headers installation path.
+if(NOT DEFINED VULKAN_HEADERS_INSTALL_DIR)
+  if (DEFINED ENV{VULKAN_HEADERS_INSTALL_DIR})
+    set(VULKAN_HEADERS_INSTALL_DIR "$ENV{VULKAN_HEADERS_INSTALL_DIR}")
+  elseif(DEFINED ENV{VULKAN_SDK})
+    set(VULKAN_HEADERS_INSTALL_DIR "$ENV{VULKAN_SDK}/include")
+  endif()
+endif()
+
+if(DEFINED VULKAN_HEADERS_INSTALL_DIR)
+  # When CMAKE_FIND_ROOT_PATH_INCLUDE is set to ONLY, the HINTS in find_path()
+  # are re-rooted, which prevents VULKAN_HEADERS_INSTALL_DIR to work as
+  # expected. So use NO_CMAKE_FIND_ROOT_PATH to avoid it.
+
+  # Use HINTS instead of PATH to search these locations before
+  # searching system environment variables like $PATH that may
+  # contain SDK directories.
+  find_path(VulkanHeaders_INCLUDE_DIR
+      NAMES vulkan/vulkan.h
+      HINTS ${VULKAN_HEADERS_INSTALL_DIR}/include
+      NO_CMAKE_FIND_ROOT_PATH)
+  find_path(VulkanRegistry_DIR
+      NAMES vk.xml
+      HINTS ${VULKAN_HEADERS_INSTALL_DIR}/share/vulkan/registry
+      NO_CMAKE_FIND_ROOT_PATH)
+else()
+  # If VULKAN_HEADERS_INSTALL_DIR, or one of its variants was not specified,
+  # do a normal search without hints.
+  find_path(VulkanHeaders_INCLUDE_DIR NAMES vulkan/vulkan.h)
+  get_filename_component(VULKAN_REGISTRY_PATH_HINT ${VulkanHeaders_INCLUDE_DIR} DIRECTORY)
+  find_path(VulkanRegistry_DIR NAMES vk.xml HINTS ${VULKAN_REGISTRY_PATH_HINT})
+endif()
+
+set(VulkanHeaders_INCLUDE_DIRS ${VulkanHeaders_INCLUDE_DIR})
+set(VulkanRegistry_DIRS ${VulkanRegistry_DIR})
+
+include(FindPackageHandleStandardArgs)
+find_package_handle_standard_args(VulkanHeaders
+    DEFAULT_MSG
+    VulkanHeaders_INCLUDE_DIR)
+find_package_handle_standard_args(VulkanRegistry
+    DEFAULT_MSG
+    VulkanRegistry_DIR)
+
+mark_as_advanced(VulkanHeaders_INCLUDE_DIR VulkanRegistry_DIR)
diff --git a/src/third_party/vulkan-validation-layers/src/cmake/FindWayland.cmake b/src/third_party/vulkan-validation-layers/src/cmake/FindWayland.cmake
new file mode 100644
index 0000000..f93218b
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/cmake/FindWayland.cmake
@@ -0,0 +1,66 @@
+# Try to find Wayland on a Unix system
+#
+# This will define:
+#
+#   WAYLAND_FOUND       - True if Wayland is found
+#   WAYLAND_LIBRARIES   - Link these to use Wayland
+#   WAYLAND_INCLUDE_DIR - Include directory for Wayland
+#   WAYLAND_DEFINITIONS - Compiler flags for using Wayland
+#
+# In addition the following more fine grained variables will be defined:
+#
+#   WAYLAND_CLIENT_FOUND  WAYLAND_CLIENT_INCLUDE_DIR  WAYLAND_CLIENT_LIBRARIES
+#   WAYLAND_SERVER_FOUND  WAYLAND_SERVER_INCLUDE_DIR  WAYLAND_SERVER_LIBRARIES
+#   WAYLAND_EGL_FOUND     WAYLAND_EGL_INCLUDE_DIR     WAYLAND_EGL_LIBRARIES
+#
+# Copyright (c) 2013 Martin Gräßlin <mgraesslin@kde.org>
+#
+# Redistribution and use is allowed according to the terms of the BSD license.
+# For details see the accompanying COPYING-CMAKE-SCRIPTS file.
+
+IF (NOT WIN32)
+  IF (WAYLAND_INCLUDE_DIR AND WAYLAND_LIBRARIES)
+    # In the cache already
+    SET(WAYLAND_FIND_QUIETLY TRUE)
+  ENDIF ()
+
+  # Use pkg-config to get the directories and then use these values
+  # in the FIND_PATH() and FIND_LIBRARY() calls
+  FIND_PACKAGE(PkgConfig)
+  PKG_CHECK_MODULES(PKG_WAYLAND QUIET wayland-client wayland-server wayland-egl wayland-cursor)
+
+  SET(WAYLAND_DEFINITIONS ${PKG_WAYLAND_CFLAGS})
+
+  FIND_PATH(WAYLAND_CLIENT_INCLUDE_DIR  NAMES wayland-client.h HINTS ${PKG_WAYLAND_INCLUDE_DIRS})
+  FIND_PATH(WAYLAND_SERVER_INCLUDE_DIR  NAMES wayland-server.h HINTS ${PKG_WAYLAND_INCLUDE_DIRS})
+  FIND_PATH(WAYLAND_EGL_INCLUDE_DIR     NAMES wayland-egl.h    HINTS ${PKG_WAYLAND_INCLUDE_DIRS})
+  FIND_PATH(WAYLAND_CURSOR_INCLUDE_DIR  NAMES wayland-cursor.h HINTS ${PKG_WAYLAND_INCLUDE_DIRS})
+
+  FIND_LIBRARY(WAYLAND_CLIENT_LIBRARIES NAMES wayland-client   HINTS ${PKG_WAYLAND_LIBRARY_DIRS})
+  FIND_LIBRARY(WAYLAND_SERVER_LIBRARIES NAMES wayland-server   HINTS ${PKG_WAYLAND_LIBRARY_DIRS})
+  FIND_LIBRARY(WAYLAND_EGL_LIBRARIES    NAMES wayland-egl      HINTS ${PKG_WAYLAND_LIBRARY_DIRS})
+  FIND_LIBRARY(WAYLAND_CURSOR_LIBRARIES NAMES wayland-cursor   HINTS ${PKG_WAYLAND_LIBRARY_DIRS})
+
+  set(WAYLAND_INCLUDE_DIR ${WAYLAND_CLIENT_INCLUDE_DIR} ${WAYLAND_SERVER_INCLUDE_DIR} ${WAYLAND_EGL_INCLUDE_DIR} ${WAYLAND_CURSOR_INCLUDE_DIR})
+
+  set(WAYLAND_LIBRARIES ${WAYLAND_CLIENT_LIBRARIES} ${WAYLAND_SERVER_LIBRARIES} ${WAYLAND_EGL_LIBRARIES} ${WAYLAND_CURSOR_LIBRARIES})
+
+  list(REMOVE_DUPLICATES WAYLAND_INCLUDE_DIR)
+
+  include(FindPackageHandleStandardArgs)
+
+  FIND_PACKAGE_HANDLE_STANDARD_ARGS(WAYLAND_CLIENT  DEFAULT_MSG  WAYLAND_CLIENT_LIBRARIES  WAYLAND_CLIENT_INCLUDE_DIR)
+  FIND_PACKAGE_HANDLE_STANDARD_ARGS(WAYLAND_SERVER  DEFAULT_MSG  WAYLAND_SERVER_LIBRARIES  WAYLAND_SERVER_INCLUDE_DIR)
+  FIND_PACKAGE_HANDLE_STANDARD_ARGS(WAYLAND_EGL     DEFAULT_MSG  WAYLAND_EGL_LIBRARIES     WAYLAND_EGL_INCLUDE_DIR)
+  FIND_PACKAGE_HANDLE_STANDARD_ARGS(WAYLAND_CURSOR  DEFAULT_MSG  WAYLAND_CURSOR_LIBRARIES  WAYLAND_CURSOR_INCLUDE_DIR)
+  FIND_PACKAGE_HANDLE_STANDARD_ARGS(WAYLAND         DEFAULT_MSG  WAYLAND_LIBRARIES         WAYLAND_INCLUDE_DIR)
+
+  MARK_AS_ADVANCED(
+        WAYLAND_INCLUDE_DIR         WAYLAND_LIBRARIES
+        WAYLAND_CLIENT_INCLUDE_DIR  WAYLAND_CLIENT_LIBRARIES
+        WAYLAND_SERVER_INCLUDE_DIR  WAYLAND_SERVER_LIBRARIES
+        WAYLAND_EGL_INCLUDE_DIR     WAYLAND_EGL_LIBRARIES
+        WAYLAND_CURSOR_INCLUDE_DIR  WAYLAND_CURSOR_LIBRARIES
+  )
+
+ENDIF ()
diff --git a/src/third_party/vulkan-validation-layers/src/cmake/FindX11_XCB.cmake b/src/third_party/vulkan-validation-layers/src/cmake/FindX11_XCB.cmake
new file mode 100644
index 0000000..956bf89
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/cmake/FindX11_XCB.cmake
@@ -0,0 +1,32 @@
+# - Try to find libX11-xcb
+# Once done this will define
+#
+# X11_XCB_FOUND - system has libX11-xcb
+# X11_XCB_LIBRARIES - Link these to use libX11-xcb
+# X11_XCB_INCLUDE_DIR - the libX11-xcb include dir
+# X11_XCB_DEFINITIONS - compiler switches required for using libX11-xcb
+
+# Copyright (c) 2011 Fredrik Höglund <fredrik@kde.org>
+# Copyright (c) 2008 Helio Chissini de Castro, <helio@kde.org>
+# Copyright (c) 2007 Matthias Kretz, <kretz@kde.org>
+#
+# Redistribution and use is allowed according to the terms of the BSD license.
+# For details see the accompanying COPYING-CMAKE-SCRIPTS file.
+
+IF (NOT WIN32)
+  # use pkg-config to get the directories and then use these values
+  # in the FIND_PATH() and FIND_LIBRARY() calls
+  FIND_PACKAGE(PkgConfig)
+  PKG_CHECK_MODULES(PKG_X11_XCB QUIET x11-xcb)
+
+  SET(X11_XCB_DEFINITIONS ${PKG_X11_XCB_CFLAGS})
+
+  FIND_PATH(X11_XCB_INCLUDE_DIR NAMES X11/Xlib-xcb.h HINTS ${PKG_X11_XCB_INCLUDE_DIRS})
+  FIND_LIBRARY(X11_XCB_LIBRARIES NAMES X11-xcb HINTS ${PKG_X11_XCB_LIBRARY_DIRS})
+
+  include(FindPackageHandleStandardArgs)
+  FIND_PACKAGE_HANDLE_STANDARD_ARGS(X11_XCB DEFAULT_MSG X11_XCB_LIBRARIES X11_XCB_INCLUDE_DIR)
+
+  MARK_AS_ADVANCED(X11_XCB_INCLUDE_DIR X11_XCB_LIBRARIES)
+ENDIF (NOT WIN32)
+
diff --git a/src/third_party/vulkan-validation-layers/src/cmake/FindXCB.cmake b/src/third_party/vulkan-validation-layers/src/cmake/FindXCB.cmake
new file mode 100644
index 0000000..2311591
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/cmake/FindXCB.cmake
@@ -0,0 +1,51 @@
+# - FindXCB
+#
+# Copyright (C) 2015 Valve Corporation
+
+find_package(PkgConfig)
+
+if(NOT XCB_FIND_COMPONENTS)
+    set(XCB_FIND_COMPONENTS xcb)
+endif()
+
+include(FindPackageHandleStandardArgs)
+set(XCB_FOUND true)
+set(XCB_INCLUDE_DIRS "")
+set(XCB_LIBRARIES "")
+foreach(comp ${XCB_FIND_COMPONENTS})
+    # component name
+    string(TOUPPER ${comp} compname)
+    string(REPLACE "-" "_" compname ${compname})
+    # header name
+    string(REPLACE "xcb-" "" headername xcb/${comp}.h)
+    # library name
+    set(libname ${comp})
+
+    pkg_check_modules(PC_${comp} QUIET ${comp})
+
+    find_path(${compname}_INCLUDE_DIR NAMES ${headername}
+        HINTS
+        ${PC_${comp}_INCLUDEDIR}
+        ${PC_${comp}_INCLUDE_DIRS}
+        )
+
+    find_library(${compname}_LIBRARY NAMES ${libname}
+        HINTS
+        ${PC_${comp}_LIBDIR}
+        ${PC_${comp}_LIBRARY_DIRS}
+        )
+
+    find_package_handle_standard_args(${comp}
+        FOUND_VAR ${comp}_FOUND
+        REQUIRED_VARS ${compname}_INCLUDE_DIR ${compname}_LIBRARY)
+    mark_as_advanced(${compname}_INCLUDE_DIR ${compname}_LIBRARY)
+
+    list(APPEND XCB_INCLUDE_DIRS ${${compname}_INCLUDE_DIR})
+    list(APPEND XCB_LIBRARIES ${${compname}_LIBRARY})
+
+    if(NOT ${comp}_FOUND)
+        set(XCB_FOUND false)
+    endif()
+endforeach()
+
+list(REMOVE_DUPLICATES XCB_INCLUDE_DIRS)
diff --git a/src/third_party/vulkan-validation-layers/src/cmake/cmake_uninstall.cmake.in b/src/third_party/vulkan-validation-layers/src/cmake/cmake_uninstall.cmake.in
new file mode 100644
index 0000000..2037e36
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/cmake/cmake_uninstall.cmake.in
@@ -0,0 +1,21 @@
+if(NOT EXISTS "@CMAKE_CURRENT_BINARY_DIR@/install_manifest.txt")
+  message(FATAL_ERROR "Cannot find install manifest: @CMAKE_CURRENT_BINARY_DIR@/install_manifest.txt")
+endif(NOT EXISTS "@CMAKE_CURRENT_BINARY_DIR@/install_manifest.txt")
+
+file(READ "@CMAKE_CURRENT_BINARY_DIR@/install_manifest.txt" files)
+string(REGEX REPLACE "\n" ";" files "${files}")
+foreach(file ${files})
+  message(STATUS "Uninstalling $ENV{DESTDIR}${file}")
+  if(IS_SYMLINK "$ENV{DESTDIR}${file}" OR EXISTS "$ENV{DESTDIR}${file}")
+    exec_program(
+      "@CMAKE_COMMAND@" ARGS "-E remove \"$ENV{DESTDIR}${file}\""
+      OUTPUT_VARIABLE rm_out
+      RETURN_VALUE rm_retval
+      )
+    if(NOT "${rm_retval}" STREQUAL 0)
+      message(FATAL_ERROR "Problem when removing $ENV{DESTDIR}${file}")
+    endif(NOT "${rm_retval}" STREQUAL 0)
+  else(IS_SYMLINK "$ENV{DESTDIR}${file}" OR EXISTS "$ENV{DESTDIR}${file}")
+    message(STATUS "File $ENV{DESTDIR}${file} does not exist.")
+  endif(IS_SYMLINK "$ENV{DESTDIR}${file}" OR EXISTS "$ENV{DESTDIR}${file}")
+endforeach(file)
diff --git a/src/third_party/vulkan-validation-layers/src/docs/best_practices.md b/src/third_party/vulkan-validation-layers/src/docs/best_practices.md
new file mode 100644
index 0000000..506eed9
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/docs/best_practices.md
@@ -0,0 +1,60 @@
+<!-- markdownlint-disable MD041 -->
+<!-- Copyright 2015-2019 LunarG, Inc. -->
+[![Khronos Vulkan][1]][2]
+
+[1]: https://vulkan.lunarg.com/img/Vulkan_100px_Dec16.png "https://www.khronos.org/vulkan/"
+[2]: https://www.khronos.org/vulkan/
+
+# Best Practices Validation
+
+[![Creative Commons][3]][4]
+
+[3]: https://i.creativecommons.org/l/by-nd/4.0/88x31.png "Creative Commons License"
+[4]: https://creativecommons.org/licenses/by-nd/4.0/
+
+Best Practices Validation
+
+Best Practices Validation is implemented in the `VK_LAYER_KHRONOS_validation layer`. When enabled, the Best Practices Object is
+intended to highlight potential performance issues, questionable usage patterns, common mistakes, and items not specifically prohibited
+by the Vulkan specification but that may lead to application problems.
+
+Best Practices will ideally be run periodically along with normal validation checks so that issues may be addressed in early stages of development.
+
+The specific areas covered by this layer are currently tracked in
+[Github Issue #24: Best Practices/Assistant/Performance Layer.](https://github.com/KhronosGroup/Vulkan-ValidationLayers/issues/24)
+Requests for additional checks can be posted through the same issue, or by creating a new Github issue.
+
+## Enabling Best Practices Validation
+
+Best Practices Validation is disabled by default. To turn on Best Practices validation, add the following to your layer settings file,
+`vk_layer_settings.txt`:
+
+```code
+khronos_validation.enables = VK_VALIDATION_FEATURE_ENABLE_BEST_PRACTICES_EXT
+```
+
+To enable using environment variables, set the following variable:
+
+```code
+VK_LAYER_ENABLES=VK_VALIDATION_FEATURE_ENABLE_BEST_PRACTICES_EXT
+```
+
+Some platforms do not support configuration of the validation layers with this configuration file.
+Programs running on these platforms must then use the programmatic interface.
+
+### Enabling and Specifying Options with the Programmatic Interface
+
+The `VK_EXT_validation_features` extension can be used to enable Best Practices Validation at CreateInstance time.
+
+Here is sample code illustrating how to enable it:
+
+```code
+VkValidationFeatureEnableEXT enables[] = {VK_VALIDATION_FEATURE_ENABLE_BEST_PRACTICES_EXT};
+VkValidationFeaturesEXT features = {};
+features.sType = VK_STRUCTURE_TYPE_VALIDATION_FEATURES_EXT;
+features.enabledValidationFeatureCount = 1;
+features.pEnabledValidationFeatures = enables;
+
+VkInstanceCreateInfo info = {};
+info.pNext = &features;
+```
diff --git a/src/third_party/vulkan-validation-layers/src/docs/core_checks.md b/src/third_party/vulkan-validation-layers/src/docs/core_checks.md
new file mode 100644
index 0000000..87648d1
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/docs/core_checks.md
@@ -0,0 +1,67 @@
+<!-- markdownlint-disable MD041 -->
+<!-- Copyright 2015-2019 LunarG, Inc. -->
+
+[![Khronos Vulkan][1]][2]
+
+[1]: https://vulkan.lunarg.com/img/Vulkan_100px_Dec16.png "https://www.khronos.org/vulkan/"
+[2]: https://www.khronos.org/vulkan/
+
+# Core Validation Checks
+
+[![Creative Commons][3]][4]
+
+[3]: https://i.creativecommons.org/l/by-nd/4.0/88x31.png "Creative Commons License"
+[4]: https://creativecommons.org/licenses/by-nd/4.0/
+Implemented as part of the `VK_LAYER_KHRONOS_validation` layer,  this validation object is reponsiblie for validating
+the status of descriptor sets, command buffers, shader modules, pipeline states, renderpass usage, synchronization,
+dynamic states, and many other types of valid usage. It is the main module responsible for validation requiring
+substantial background state.
+
+This module validates that:
+
+- the descriptor set state and pipeline state at each draw call are consistent
+- pipelines are created correctly, known when used and bound at draw time
+- descriptor sets are known and consist of valid types, formats, and layout
+- descriptor set regions are valid, bound, and updated appropriately
+- command buffers referenced are known and valid
+- command sequencing for specific state dependencies and renderpass use is correct
+- memory is available
+- dynamic state is correctly set.
+
+This validation object will print errors if validation checks are not correctly met, and provide context related to
+the failures.
+
+## Memory/Resource related functionality
+
+This validation additionally attempts to ensure that memory objects are managed correctly by the application.
+These memory objects may be bound to pipelines, objects, and command buffers, and then submitted to the GPU
+for work. Specifically the layer validates that:
+
+- the correct memory objects have been bound
+- memory objects are specified correctly upon command buffer submittal
+- only existing memory objects are referenced
+- destroyed memory objects are not referenced
+- the application has confirmed any memory objects to be reused or destroyed have been properly unbound
+- checks texture formats and render target formats.
+
+Errors will be printed if validation checks are not correctly met and warnings if improper (but not illegal) use of
+memory is detected.  Validation also dumps all memory references and bindings for each operation.
+
+## Shader validation functionality
+
+Additional checks apply to the VS->FS and FS->CB interfaces with the pipeline.  These checks include:
+
+- validating that all variables which are part of a shader interface are  decorated with either `spv::DecLocation` or `spv::DecBuiltin` (that is, only the SSO rendezvous-by-location model is supported)
+- emitting a warning if a location is declared only in the producing stage (useless work is being done)
+- emitting an error if a location is declared only in the consuming stage (garbage will be read).
+
+A special error checking case invoked when the FS stage writes a built-in corresponding to the legacy `gl_FragColor`.  In this case, an error is emitted if
+
+- the FS also writes any user-defined output
+- the CB has any attachment with a `UINT` or `SINT` type.
+
+These extra checks are to ensure that the legacy broadcast of `gl_FragColor` to all bound color attachments is well-defined.
+
+## Swapchain validation functionality
+
+This area of functionality validates the use of the WSI (Window System Integration) "swapchain" extensions (e.g., `VK_EXT_KHR_swapchain` and `VK_EXT_KHR_device_swapchain`).
diff --git a/src/third_party/vulkan-validation-layers/src/docs/gpu_validation.md b/src/third_party/vulkan-validation-layers/src/docs/gpu_validation.md
new file mode 100644
index 0000000..864025f
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/docs/gpu_validation.md
@@ -0,0 +1,967 @@
+<!-- markdownlint-disable MD041 -->
+<!-- Copyright 2015-2019 LunarG, Inc. -->
+[![Khronos Vulkan][1]][2]
+
+[1]: https://vulkan.lunarg.com/img/Vulkan_100px_Dec16.png "https://www.khronos.org/vulkan/"
+[2]: https://www.khronos.org/vulkan/
+
+# GPU-Assisted Validation
+
+[![Creative Commons][3]][4]
+
+[3]: https://i.creativecommons.org/l/by-nd/4.0/88x31.png "Creative Commons License"
+[4]: https://creativecommons.org/licenses/by-nd/4.0/
+
+GPU-Assisted Validation is implemented in the SPIR-V Tools optimizer and the `VK_LAYER_KHRONOS_validation` layer.
+This document covers the design of the layer portion of the implementation.
+
+## Basic Operation
+
+The basic operation of GPU-Assisted Validation is comprised of instrumenting shader code to perform run-time checking of shaders and
+reporting any error conditions to the layer.
+The layer then reports the errors to the user via the same reporting mechanisms used by the rest of the validation system.
+
+The layer instruments the shaders by passing the shader's SPIR-V bytecode to the SPIR-V optimizer component and
+instructs the optimizer to perform an instrumentation pass to add the additional instructions to perform the run-time checking.
+The layer then passes the resulting modified SPIR-V bytecode to the driver as part of the process of creating a ShaderModule.
+
+The layer also allocates a buffer that describes the length of all descriptor arrays and the write state of each element of each array.
+It only does this if the VK_EXT_descriptor_indexing extension is enabled.
+
+The layer also allocates a buffer that describes all addresses retrieved from vkGetBufferDeviceAddressEXT and the sizes of the corresponding buffers.
+It only does this if the VK_EXT_buffer_device_address extension is enabled.
+
+As the shader is executed, the instrumented shader code performs the run-time checks.
+If a check detects an error condition, the instrumentation code writes an error record into the GPU's device memory.
+This record is small and is on the order of a dozen 32-bit words.
+Since multiple shader stages and multiple invocations of a shader can all detect errors, the instrumentation code
+writes error records into consecutive memory locations as long as there is space available in the pre-allocated block of device memory.
+
+The layer inspects this device memory block after completion of a queue submission.
+If the GPU had written an error record to this memory block,
+the layer analyzes this error record and constructs a validation error message
+which is then reported in the same manner as other validation messages.
+If the shader was compiled with debug information (source code and SPIR-V instruction mapping to source code lines), the layer
+also provides the line of shader source code that provoked the error as part of the validation error message.
+
+## GPU-Assisted Validation Checks
+
+The initial release (Jan 2019) of GPU-Assisted Validation includes checking for out-of-bounds descriptor array indexing
+for image/texel descriptor types.
+
+The second release (Apr 2019) adds validation for out-of-bounds descriptor array indexing and use of unwritten descriptors when the 
+VK_EXT_descriptor_indexing extension is enabled.  Also added (June 2019) was validation for buffer descriptors.
+
+A third update (Aug 2019) adds validation of building top level acceleration structure for ray tracing when the
+VK_NV_ray_tracing extension is enabled.
+
+(August 2019) Add bounds checking for pointers retrieved from vkGetBufferDeviceAddressEXT.
+
+### Out-of-Bounds(OOB) Descriptor Array Indexing
+
+Checking for correct indexing of descriptor arrays is sometimes referred to as "bind-less validation".
+It is called "bind-less" because a binding in a descriptor set may contain an array of like descriptors.
+And unless there is a constant or compile-time indication of which descriptor in the array is selected,
+the descriptor binding status is considered to be ambiguous, leaving the actual binding to be determined at run-time.
+
+As an example, a fragment shader program may use a variable to index an array of combined image samplers.
+Such a line might look like:
+
+```glsl
+uFragColor = light * texture(tex[tex_ind], texcoord.xy);
+```
+
+The array of combined image samplers is `tex` and has 6 samplers in the array.
+The complete validation error message issued when `tex_ind` indexes past the array is:
+
+```terminal
+ERROR : VALIDATION - Message Id Number: 0 | Message Id Name: UNASSIGNED-Image descriptor index out of bounds
+        Index of 6 used to index descriptor array of length 6.  Command buffer (CubeDrawCommandBuf)(0xbc24b0).
+        Pipeline (0x45). Shader Module (0x43). Shader Instruction Index = 108.  Stage = Fragment.
+        Fragment coord (x,y) = (419.5, 254.5). Shader validation error occurred in file:
+        /home/user/src/Vulkan-ValidationLayers/external/Vulkan-Tools/cube/cube.frag at line 45.
+45:    uFragColor = light * texture(tex[tex_ind], texcoord.xy);
+```
+The VK_EXT_descriptor_indexing extension allows a shader to declare a descriptor array without specifying its size
+```glsl
+layout(set = 0, binding = 1) uniform sampler2D tex[];
+```
+In this case, the layer needs to tell the optimization code how big the descriptor array is so the code can determine what is out of 
+bounds and what is not.
+
+The extension also allows descriptor set bindings to be partially bound, meaning that as long as the shader doesn't use certain
+array elements, those elements are not required to have been written.
+The instrumentation code needs to know which elements of a descriptor array have been written, so that it can tell if one is used
+that has not been written.
+
+Note that currently, VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT validation is not working and all accesses are reported as valid.
+
+### Buffer device address checking
+The vkGetBufferDeviceAddressEXT routine can be used to get a GPU address that a shader can use to directly address a particular buffer.
+GPU-Assisted Validation code keeps track of all such addresses, along with the size of the associated buffer, and creates an input buffer listing all such address/size pairs
+Shader code is instrumented to validate buffer_reference addresses and report any reads or writes that do no fall within the listed address/size regions._
+
+## GPU-Assisted Validation Options
+
+Here are the options related to activating GPU-Assisted Validation:
+
+1. Enable GPU-Assisted Validation - GPU-Assisted Validation is off by default and must be enabled.
+
+    GPU-Assisted Validation is disabled by default because the shader instrumentation may introduce significant
+    shader performance degradation and additional resource consumption.
+    GPU-Assisted Validation requires additional resources such as device memory and descriptors.
+    It is desirable for the user to opt-in to this feature because of these requirements.
+    In addition, there are several limitations that may adversely affect application behavior,
+    as described later in this document.
+
+2. Reserve a Descriptor Set Binding Slot - Modifies the value of the `VkPhysicalDeviceLimits::maxBoundDescriptorSets`
+   property to return a value one less than the actual device's value to "reserve" a descriptor set binding slot for use by GPU validation.
+
+   This option is likely only of interest to applications that dynamically adjust their descriptor set bindings to adjust for
+   the limits of the device.
+
+### Enabling and Specifying Options with a Configuration File
+
+The existing layer configuration file mechanism can be used to enable GPU-Assisted Validation.
+This mechanism is described on the
+[LunarXchange website](https://vulkan.lunarg.com/doc/sdk/latest/windows/layer_configuration.html),
+in the "Layers Overview and Configuration" document.
+
+To turn on GPU validation, add the following to your layer settings file, which is often
+named `vk_layer_settings.txt`.
+
+```code
+khronos_validation.enables = VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_EXT 
+```
+
+To turn on GPU validation and request to reserve a binding slot:
+
+```code
+khronos_validation.enables = VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_EXT,VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_RESERVE_BINDING_SLOT_EXT 
+```
+
+Some platforms do not support configuration of the validation layers with this configuration file.
+Programs running on these platforms must then use the programmatic interface.
+
+### Enabling and Specifying Options with the Programmatic Interface
+
+The `VK_EXT_validation_features` extension can be used to enable GPU-Assisted Validation at CreateInstance time.
+
+Here is sample code illustrating how to enable it:
+
+```C
+VkValidationFeatureEnableEXT enables[] = {VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_EXT};
+VkValidationFeaturesEXT features = {};
+features.sType = VK_STRUCTURE_TYPE_VALIDATION_FEATURES_EXT;
+features.enabledValidationFeatureCount = 1;
+features.pEnabledValidationFeatures = enables;
+
+VkInstanceCreateInfo info = {};
+info.pNext = &features;
+```
+
+Use the `VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_RESERVE_BINDING_SLOT_EXT` enum to reserve a binding slot.
+
+## GPU-Assisted Validation Limitations
+
+There are several limitations that may impede the operation of GPU-Assisted Validation:
+
+### Vulkan 1.1
+
+Vulkan 1.1 or later is required because the GPU instrumentation code uses SPIR-V 1.3 features.
+Vulkan 1,1 is required to ensure that SPIR-V 1.3 is available.
+
+### Descriptor Types
+
+The current implementation works with image, texel, and buffer descriptor types.
+A complete list appears later in this document.
+
+### Descriptor Set Binding Limit
+
+This is probably the most important limitation and is related to the
+`VkPhysicalDeviceLimits::maxBoundDescriptorSets` device limit.
+
+When applications use all the available descriptor set binding slots,
+GPU-Assisted Validation cannot be performed because it needs a descriptor set to
+locate the memory for writing the error report record.
+
+This problem is most likely to occur on devices, often mobile, that support only the
+minimum required value for `VkPhysicalDeviceLimits::maxBoundDescriptorSets`, which is 4.
+Some applications may be written to use 4 slots since this is the highest value that
+is guaranteed by the specification.
+When such an application using 4 slots runs on a device with only 4 slots,
+then GPU-Assisted Validation cannot be performed.
+
+In this implementation, this condition is detected and gracefully recovered from by
+building the graphics pipeline with non-instrumented shaders instead of instrumented ones.
+An error message is also displayed informing the user of the condition.
+
+Applications don't have many options in this situation and it is anticipated that
+changing the application to free a slot is difficult.
+
+### Device Memory
+
+GPU-Assisted Validation does allocate device memory for the error report buffers, and if
+descriptor indexing is enabled, for the input buffer of descriptor sizes and write state.
+This can lead to a greater chance of memory exhaustion, especially in cases where
+the application is trying to use all of the available memory.
+The extra memory allocations are also not visible to the application, making it
+impossible for the application to account for them.
+
+Note that if descriptor indexing is enabled, the input buffer size will be equal to
+(1 + (number_of_sets * 2) + (binding_count * 2) + descriptor_count) words of memory where
+binding_count is the binding number of the largest binding in the set.  
+This means that sparsely populated sets and sets with a very large binding will cause
+the input buffer to be much larger than it could be with more densely packed binding numbers.
+As a best practice, when using GPU-Assisted Validation with descriptor indexing enabled,
+make sure descriptor bindings are densely packed.
+
+If GPU-Assisted Validation device memory allocations fail, the device could become
+unstable because some previously-built pipelines may contain instrumented shaders.
+This is a condition that is nearly impossible to recover from, so the layer just
+prints an error message and refrains from any further allocations or instrumentations.
+There is a reasonable chance to recover from these conditions,
+especially if the instrumentation does not write any error records.
+
+### Descriptors
+
+This is roughly the same problem as the device memory problem mentioned above,
+but for descriptors.
+Any failure to allocate a descriptor set means that the instrumented shader code
+won't have a place to write error records, resulting in unpredictable device
+behavior.
+
+### Other Device Limits
+
+This implementation uses additional resources that may count against the following limits,
+and possibly others:
+
+* `maxMemoryAllocationCount`
+* `maxBoundDescriptorSets`
+* `maxPerStageDescriptorStorageBuffers`
+* `maxPerStageResources`
+* `maxDescriptorSetStorageBuffers`
+* `maxFragmentCombinedOutputResources`
+
+The implementation does not take steps to avoid exceeding these limits
+and does not update the tracking performed by other validation functions.
+
+### A Note About the `VK_EXT_buffer_device_address` Extension
+
+The recently introduced `VK_EXT_buffer_device_address` extension can be used
+to implement GPU-Assisted Validation without some of the limitations described above.
+This approach would use this extension to obtain a GPU device pointer to a storage
+buffer and make it available to the shader via a specialization constant.
+This technique removes the need to create descriptors, use a descriptor set slot,
+modify pipeline layouts, etc, and would relax some of the limitations listed above.
+
+This alternate implementation is under consideration.
+
+## GPU-Assisted Validation Internal Design
+
+This section may be of interest to readers who are interested on how GPU-Assisted Validation is implemented.
+It isn't necessarily required for using the feature.
+
+### General
+
+In general, the implementation does:
+
+* For each draw, dispatch, and trace rays call, allocate a buffer with enough device memory to hold a single debug output record written by the
+    instrumented shader code.
+    If descriptor indexing is enabled, calculate the amount of memory needed to describe the descriptor arrays sizes and
+    write states and allocate device memory and a buffer for input to the instrumented shader.
+    The Vulkan Memory Allocator is used to handle this efficiently.
+
+    There is probably little advantage in providing a larger output buffer in order to obtain more debug records.
+    It is likely, especially for fragment shaders, that multiple errors occurring near each other have the same root cause.
+
+    A block is allocated on a per draw basis to make it possible to associate a shader debug error record with
+    a draw within a command buffer.
+    This is done partly to give the user more information in the error report, namely the command buffer handle/name and the draw within that command buffer.
+    An alternative design allocates this block on a per-device or per-queue basis and should work.
+    However, it is not possible to identify the command buffer that causes the error if multiple command buffers
+    are submitted at once.
+* For each draw, dispatch, and trace rays call, allocate a descriptor set and update it to point to the block of device memory just allocated.
+    If descriptor indexing is enabled, also update the descriptor set to point to the allocated input buffer.
+    Fill the DI input buffer with the size and write state information for each descriptor array.
+    There is a descriptor set manager to handle this efficiently.
+    If the buffer device address extension is enabled, allocate an input buffer to hold the address / size pairs for all addresses retrieved from vkGetBufferDeviceAddressEXT.
+    Also make an additional call down the chain to create a bind descriptor set command to bind our descriptor set at the desired index.
+    This has the effect of binding the device memory block belonging to this draw so that the GPU instrumentation
+    writes into this buffer for when the draw is executed.
+    The end result is that each draw call has its own buffer containing GPU instrumentation error
+    records, if any occurred while executing that draw.
+* Determine the descriptor set binding index that is eventually used to bind the descriptor set just allocated and updated.
+    Usually, it is `VkPhysicalDeviceLimits::maxBoundDescriptorSets` minus one.
+    For devices that have a very high or no limit on this bound, pick an index that isn't too high, but above most other device
+    maxima such as 32.
+* When creating a ShaderModule, pass the SPIR-V bytecode to the SPIR-V optimizer to perform the instrumentation pass.
+    Pass the desired descriptor set binding index to the optimizer via a parameter so that the instrumented
+    code knows which descriptor to use for writing error report data to the memory block.
+    If descriptor indexing is enabled, turn on OOB and write state checking in the instrumentation pass.
+    If the buffer_device_address extension is enabled, apply a pass to add instrumentation checking for out of bounds buffer references.
+    Use the instrumented bytecode to create the ShaderModule.
+* For all pipeline layouts, add our descriptor set to the layout, at the binding index determined earlier.
+    Fill any gaps with empty descriptor sets.
+
+    If the incoming layout already has a descriptor set placed at our desired index, the layer must not add its
+    descriptor set to the layout, replacing the one in the incoming layout.
+    Instead, the layer leaves the layout alone and later replaces the instrumented shaders with
+    non-instrumented ones when the pipeline layout is later used to create a graphics pipeline.
+    The layer issues an error message to report this condition.
+* When creating a GraphicsPipeline, ComputePipeline, or RayTracingPipeline, check to see if the pipeline is using the debug binding index.
+    If it is, replace the instrumented shaders in the pipeline with non-instrumented ones.
+* Before calling QueueSubmit, if descriptor indexing is enabled, check to see if there were any unwritten descriptors that were declared
+    update-after-bind.
+    If there were, update the write state of those elements.
+* After calling QueueSubmit, perform a wait on the queue to allow the queue to finish executing.
+    Then map and examine the device memory block for each draw or trace ray command that was submitted.
+    If any debug record is found, generate a validation error message for each record found.
+
+The above describes only the high-level details of GPU-Assisted Validation operation.
+More detail is found in the discussion of the individual hooked functions below.
+
+### Initialization
+
+When the validation layer loads, it examines the user options from both the layer settings file and the
+`VK_EXT_validation_features` extension.
+Note that it also processes the subsumed `VK_EXT_validation_flags` extension for simple backwards compatibility.
+From these options, the layer sets instance-scope flags in the validation layer tracking data to indicate if
+GPU-Assisted Validation has been requested, along with any other associated options.
+
+### "Calling Down the Chain"
+
+Much of the GPU-Assisted Validation implementation involves making "application level" Vulkan API
+calls outside of the application's API usage to create resources and perform its required operations
+inside of the validation layer.
+These calls are not routed up through the top of the loader/layer/driver call stack via the loader.
+Instead, they are simply dispatched via the containing layer's dispatch table.
+
+These calls therefore don't pass through any validation checks that occur before the GPU validation checks are run.
+This doesn't present any particular problem, but it does raise some issues:
+
+* The additional API calls are not fully validated
+
+  This implies that this additional code may never be checked for validation errors.
+  To address this, the code can "just" be written carefully so that it is "valid" Vulkan,
+  which is hard to do.
+
+  Or, this code can be checked by loading a Khronos validation layer with
+  GPU validation enabled on top of "normal" standard validation in the
+  layer stack, which effectively validates the API usage of this code.
+  This sort of checking is performed by layer developers to check that the additional
+  Vulkan usage is valid.
+
+  This validation can be accomplished by:
+
+  * Building the validation layer with a hack to force GPU-Assisted Validation to be enabled (don't use the exposed mechanisms because you probably don't want it enabled twice).
+  * Rename this layer binary to something else like "khronos_validation2" to keep it apart from the
+  "normal" Khronos validation.
+  * Create a new JSON file with the new layer name.
+  * Set up the layer stack so that the "khronos_validation2" layer is on top of or before the actual Khronos
+    validation layer.
+  * Then run tests and check for validation errors pointing to API usage in the "khronos_validation2" layer.
+
+  This should only need to be done after making any major changes to the implementation.
+
+  Another approach involves capturing an application trace with `vktrace` and then playing
+  it back with `vkreplay`.
+
+* The additional API calls are not state-tracked
+
+  This means that things like device memory allocations and descriptor allocations are not
+  tracked and do not show up in any of the bookkeeping performed by the validation layers.
+  For example, any device memory allocation performed by GPU-Assisted Validation won't be
+  counted towards the maximum number of allocations allowed by a device.
+  This could lead to an early allocation failure that is not accompanied by a validation error.
+
+  This shortcoming is left as not addressed in this implementation because it is anticipated that
+  a later implementation of GPU-Assisted Validation using the `VK_EXT_buffer_device_address`
+  extension will have less of a need to allocate these
+  tracked resources and it therefore becomes less of an issue.
+
+### Code Structure and Relationship to the Core Validation Layer
+
+The GPU-Assisted Validation code is largely contained in one
+[file](https://github.com/KhronosGroup/Vulkan-ValidationLayers/blob/master/layers/gpu_validation.cpp), with "hooks" in
+the other validation code that call functions in this file.
+These hooks in the validation code look something like this:
+
+```C
+if (GetEnables(dev_data)->gpu_validation) {
+    GpuPreCallRecordDestroyPipeline(dev_data, pipeline_state);
+}
+```
+
+The GPU-Assisted Validation code is linked into the shared library for the Khronos and core validation layers.
+
+#### Review of Khronos Validation Code Structure
+
+Each function for a Vulkan API command intercepted in the Khronos validation layer is usually split up
+into several decomposed functions in order to organize the implementation.
+These functions take the form of:
+
+* PreCallValidate&lt;foo&gt;: Perform validation steps before calling down the chain
+* PostCallValidate&lt;foo&gt;: Perform validation steps after calling down the chain
+* PreCallRecord&lt;foo&gt;: Perform state recording before calling down the chain
+* PostCallRecord&lt;foo&gt;: Perform state recording after calling down the chain
+
+The GPU-Assisted Validation functions follow this pattern not by hooking into the top-level validation API shim, but
+by hooking one of these decomposed functions.
+
+The design of each hooked function follows:
+
+#### GpuPreCallRecordCreateDevice
+
+* Modify the `VkPhysicalDeviceFeatures` to turn on two additional physical device features:
+  * `fragmentStoresAndAtomics`
+  * `vertexPipelineStoresAndAtomics`
+
+#### GpuPostCallRecordCreateDevice
+
+* Determine and record (save in device state) the desired descriptor set binding index
+* Initialize Vulkan Memory Allocator
+  * Determine error record block size based on the maximum size of the error record and alignment limits of the device
+* Initialize descriptor set manager
+* Make a descriptor set layout to describe our descriptor set
+* Make a descriptor set layout to describe a "dummy" descriptor set that contains no descriptors
+  * This is used to "pad" pipeline layouts to fill any gaps between the used bind indices and our bind index
+* Record these objects in the per-device state
+
+#### GpuPreCallRecordDestroyDevice
+
+* Destroy descriptor set layouts created in CreateDevice
+* Clean up descriptor set manager
+* Clean up Vulkan Memory Allocator (VMA)
+* Clean up device state
+
+#### GpuAllocateValidationResources
+
+* For each Draw, Dispatch, or TraceRays call:
+  * Get a descriptor set from the descriptor set manager
+  * Get an output buffer and associated memory from VMA
+  * If descriptor indexing is enabled, get an input buffer and fill with descriptor array information
+  * If buffer device address is enabled, get an input buffer and fill with address / size pairs for addresses retrieved from vkGetBufferDeviceAddressEXT
+  * Update (write) the descriptor set with the memory info
+  * Check to see if the layout for the pipeline just bound is using our selected bind index
+  * If no conflict, add an additional command to the command buffer to bind our descriptor set at our selected index
+* Record the above objects in the per-CB state;
+Note that the Draw and Dispatch calls include vkCmdDraw, vkCmdDrawIndexed, vkCmdDrawIndirect, vkCmdDrawIndexedIndirect, vkCmdDispatch, vkCmdDispatchIndirect, and vkCmdTraceRaysNV.
+
+#### GpuPreCallRecordFreeCommandBuffers
+
+* For each command buffer:
+  * Destroy the VMA buffer(s), releasing the memory
+  * Give the descriptor sets back to the descriptor set manager
+  * Clean up CB state
+
+#### GpuOverrideDispatchCreateShaderModule
+
+This function is called from PreCallRecordCreateShaderModule.
+This routine sets up to call the SPIR-V optimizer to run the "BindlessCheckPass", replacing the original SPIR-V with the instrumented SPIR-V
+which is then used in the call down the chain to CreateShaderModule.
+
+This function generates a "unique shader ID" that is passed to the SPIR-V optimizer,
+which the instrumented code puts in the debug error record to identify the shader.
+This ID is returned by this function so it can be recorded in the shader module at PostCallRecord time.
+It would have been convenient to use the shader module handle returned from the driver to use as this shader ID.
+But the shader needs to be instrumented before creating the shader module and therefore the handle is not available to use
+as this ID to pass to the optimizer.
+Therefore, the layer keeps a "counter" in per-device state that is incremented each time a shader is instrumented
+to generate unique IDs.
+This unique ID is given to the SPIR-V optimizer and is stored in the shader module state tracker after the shader module is created, which creates the necessary association between the ID and the shader module.
+
+The process of instrumenting the SPIR-V also includes passing the selected descriptor set binding index
+to the SPIR-V optimizer which the instrumented
+code uses to locate the memory block used to write the debug error record.
+An instrumented shader is now "hard-wired" to write error records via the descriptor set at that binding
+if it detects an error.
+This implies that the instrumented shaders should only be allowed to run when the correct bindings are in place.
+
+The original SPIR-V bytecode is left stored in the shader module tracking data.
+This is important because the layer may need to replace the instrumented shader with the original shader if, for example,
+there is a binding index conflict.
+The application cannot destroy the shader module until it has used the shader module to create the pipeline.
+This ensures that the original SPIR-V bytecode is available if we need it to replace the instrumented shader.
+
+#### GpuOverrideDispatchCreatePipelineLayout
+
+This is function is called through PreCallRecordCreatePipelineLayout.
+
+* Check for a descriptor set binding index conflict.
+  * If there is one, issue an error message and leave the pipeline layout unmodified
+  * If no conflict, for each pipeline layout:
+    * Create a new pipeline layout
+    * Copy the original descriptor set layouts into the new pipeline layout
+    * Pad the new pipeline layout with dummy descriptor set layouts up to but not including the last one
+    * Add our descriptor set layout as the last one in the new pipeline layout
+* Create the pipeline layouts by calling down the chain with the original or modified create info
+
+#### GpuPreCallQueueSubmit
+
+* For each primary and secondary command buffer in the submission:
+  * Call helper function to see if there are any update after bind descriptors whose write state may need to be updated
+    and if so, map the input buffer and update the state.
+
+#### GpuPostCallQueueSubmit
+
+* Submit a command buffer containing a memory barrier to make GPU writes available to the host domain.
+* Call QueueWaitIdle.
+* For each primary and secondary command buffer in the submission:
+  * Call a helper function to process the instrumentation debug buffers (described later)
+
+#### GpuPreCallValidateCmdWaitEvents
+
+* Report an error about a possible deadlock if CmdWaitEvents is recorded with VK_PIPELINE_STAGE_HOST_BIT set.
+
+#### GpuPreCallRecordCreateGraphicsPipelines
+
+* Examine the pipelines to see if any use the debug descriptor set binding index
+* For those that do:
+  * Create non-instrumented shader modules from the saved original SPIR-V
+  * Modify the CreateInfo data to use these non-instrumented shaders.
+    * This prevents instrumented shaders from using the application's descriptor set.
+
+#### GpuPostCallRecordCreateGraphicsPipelines
+
+* For every shader in the pipeline:
+  * Destroy the shader module created in GpuPreCallRecordCreateGraphicsPipelines, if any
+    * These are found in the CreateInfo used to create the pipeline and not in the shader_module
+  * Create a shader tracking record that saves:
+    * shader module handle
+    * unique shader id
+    * graphics pipeline handle
+    * shader bytecode if it contains debug info
+
+This tracker is used to attach the shader bytecode to the shader in case it is needed
+later to get the shader source code debug info.
+
+The current shader module tracker in the validation code stores the bytecode,
+but this tracker has the same life cycle as the shader module itself.
+It is possible for the application to destroy the shader module after
+creating graphics pipeline and before submitting work that uses the shader,
+making the shader bytecode unavailable if needed for later analysis.
+Therefore, the bytecode must be saved at this opportunity.
+
+This tracker exists as long as the graphics pipeline exists,
+so the graphics pipeline handle is also stored in this tracker so that it can
+be looked up when the graphics pipeline is destroyed.
+At that point, it is safe to free the bytecode since the pipeline is never used again.
+
+#### GpuPreCallRecordDestroyPipeline
+
+* Find the shader tracker(s) with the graphics pipeline handle and free the tracker, along with any bytecode it has stored in it.
+
+### Shader Instrumentation Scope
+
+The shader instrumentation process performed by the SPIR-V optimizer applies descriptor index bounds checking
+to descriptors of the following types:
+
+    VK_DESCRIPTOR_TYPE_STORAGE_IMAGE
+    VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE
+    VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER
+    VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER
+    VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER
+    VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER
+    VK_DESCRIPTOR_TYPE_STORAGE_BUFFER
+    VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC
+    VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC
+
+Instrumentation is applied to the following SPIR-V operations:
+
+    OpImageSampleImplicitLod
+    OpImageSampleExplicitLod
+    OpImageSampleDrefImplicitLod
+    OpImageSampleDrefExplicitLod
+    OpImageSampleProjImplicitLod
+    OpImageSampleProjExplicitLod
+    OpImageSampleProjDrefImplicitLod
+    OpImageSampleProjDrefExplicitLod
+    OpImageGather
+    OpImageDrefGather
+    OpImageQueryLod
+    OpImageSparseSampleImplicitLod
+    OpImageSparseSampleExplicitLod
+    OpImageSparseSampleDrefImplicitLod
+    OpImageSparseSampleDrefExplicitLod
+    OpImageSparseSampleProjImplicitLod
+    OpImageSparseSampleProjExplicitLod
+    OpImageSparseSampleProjDrefImplicitLod
+    OpImageSparseSampleProjDrefExplicitLod
+    OpImageSparseGather
+    OpImageSparseDrefGather
+    OpImageFetch
+    OpImageRead
+    OpImageQueryFormat
+    OpImageQueryOrder
+    OpImageQuerySizeLod
+    OpImageQuerySize
+    OpImageQueryLevels
+    OpImageQuerySamples
+    OpImageSparseFetch
+    OpImageSparseRead
+    OpImageWrite
+
+Also, OpLoad and OpStore with an AccessChain into a base of OpVariable with
+either Uniform or StorageBuffer storage class and a type which is either a
+struct decorated with Block, or a runtime or statically-sized array of such
+a struct.
+
+
+### Shader Instrumentation Error Record Format
+
+The instrumented shader code generates "error records" in a specific format.
+
+This description includes the support for future GPU-Assisted Validation features
+such as checking for uninitialized descriptors in the partially-bound scenario.
+These items are not used in the current implementation for descriptor array
+bounds checking, but are provided here to complete the description of the
+error record format.
+
+The format of this buffer is as follows:
+
+```C
+struct DebugOutputBuffer_t
+{
+   uint DataWrittenLength;
+   uint Data[];
+}
+```
+
+`DataWrittenLength` is the number of uint32_t words that have been attempted to be written.
+It should be initialized to 0.
+
+The `Data` array is the uint32_t words written by the shaders of the pipeline to record bindless validation errors.
+All elements of `Data` should be initialized to 0.
+Note that the `Data` array has runtime length.
+The shader queries the length of the `Data` array to make sure that it does not write past the end of `Data`.
+The shader only writes complete records.
+The layer uses the length of `Data` to control the number of records written by the shaders.
+
+The `DataWrittenLength` is atomically updated by the shaders so that shaders do not overwrite each others' data.
+The shader takes the value it gets from the atomic update.
+If the value plus the record length is greater than the length of `Data`, it does not write the record.
+
+Given this protocol, the value in `DataWrittenLength` is not very meaningful if it is greater than the length of `Data`.
+However, the format of the written records plus the fact that `Data` is initialized to 0 should be enough to determine
+the records that were written.
+
+### Record Format
+
+The format of an output record is the following:
+
+    Word 0: Record size
+    Word 1: Shader ID
+    Word 2: Instruction Index
+    Word 3: Stage
+    <Stage-Specific Words>
+    <Validation-Specific Words>
+
+The Record Size is the number of words in this record, including the the Record Size.
+
+The Shader ID is a handle that was provided by the layer when the shader was instrumented.
+
+The Instruction Index is the instruction within the original function at which the error occurred.
+For bindless, this will be the instruction which consumes the descriptor in question,
+or the instruction that consumes the OpSampledImage that consumes the descriptor.
+
+The Stage is the integer value used in SPIR-V for each of the Execution Models:
+
+| Stage         | Value |
+|---------------|:-----:|
+|Vertex         |0      |
+|TessCtrl       |1      |
+|TessEval       |2      |
+|Geometry       |3      |
+|Fragment       |4      |
+|Compute        |5      |
+|RayGenerationNV|5313   |
+|IntersectionNV |5314   |
+|AnyHitNV       |5315   |
+|ClosestHitNV   |5316   |
+|MissNV         |5317   |
+|CallableNV     |5318   |
+
+### Stage Specific Words
+
+These are words that identify which "instance" of the shader the validation error occurred in.
+Here are words for each stage:
+
+| Stage         | Word 0           | Word 1        | Word 2       |
+|---------------|------------------|---------------|---------------|
+|Vertex         |VertexID          |InstanceID     | unused        |
+|TessCntrl      |InvocationID      |PrimitiveID    | unused        |
+|TessEval       |PrimitiveID       |TessCoord.u    | TessCoord.v   |
+|Geometry       |PrimitiveID       |InvocationID   | unused        |
+|Fragment       |FragCoord.x       |FragCoord.y    | unused        |
+|Compute        |GlobalInvocID.x   |GlobalInvocID.y|GlobalInvocID.z|
+|RayGenerationNV|LaunchIdNV.x      |LaunchIdNV.y   |LaunchIdNV.z   |
+|IntersectionNV |LaunchIdNV.x      |LaunchIdNV.y   |LaunchIdNV.z   |
+|AnyHitNV       |LaunchIdNV.x      |LaunchIdNV.y   |LaunchIdNV.z   |
+|ClosestHitNV   |LaunchIdNV.x      |LaunchIdNV.y   |LaunchIdNV.z   |
+|MissNV         |LaunchIdNV.x      |LaunchIdNV.y   |LaunchIdNV.z   |
+|CallableNV     |LaunchIdNV.x      |LaunchIdNV.y   |LaunchIdNV.z   |
+
+"unused" means not relevant, but still present.
+
+### Validation-Specific Words
+
+These are words that are specific to the validation being done.
+For bindless validation, they are variable.
+
+The first word is the Error Code.
+
+For the *OutOfBounds errors, two words will follow: Word0:DescriptorIndex, Word1:DescriptorArrayLength
+
+For the *Uninitialized errors, one word will follow: Word0:DescriptorIndex
+
+| Error                       | Word 0              | Word 1                |
+|-----------------------------|---------------------|-----------------------|
+|IndexOutOfBounds             |Descriptor Index     |Descriptor Array Length|
+|DescriptorUninitialized      |Descriptor Index     |unused                 |
+|BufferDeviceAddrOOB          |Out of Bounds Address|unused                 |
+
+So the words written for an image descriptor bounds error in a fragment shader is:
+
+    Word 0: Record size (9)
+    Word 1: Shader ID
+    Word 2: Instruction Index
+    Word 3: Stage (4:Fragment)
+    Word 4: FragCoord.x
+    Word 5: FragCoord.y
+    Word 6: Error (0: ImageIndexOutOfBounds)
+    Word 7: DescriptorIndex
+    Word 8: DescriptorArrayLength
+
+If another error is encountered, that record is written starting at Word 10, if the whole record will not overflow Data.
+If overflow will happen, no words are written..
+
+The validation layer can continue to read valid records until it sees a Record Length of 0 or the end of Data is reached.
+
+#### Programmatic interface
+
+The programmatic interface for the above informal description is codified in the
+[SPIRV-Tools](https://github.com/KhronosGroup/SPIRV-Tools) repository in file
+[`instrument.hpp`](https://github.com/KhronosGroup/SPIRV-Tools/blob/master/include/spirv-tools/instrument.hpp).
+It consists largely of integer constant definitions for the codes and values mentioned above and
+offsets into the record for locating each item.
+
+## GPU-Assisted Validation Error Report
+
+This is a fairly simple process of mapping the debug report buffer associated with
+each draw in the command buffer that was just submitted and looking to see if the GPU instrumentation
+code wrote anything.
+Each draw in the command buffer should have a corresponding result buffer in the command buffer's list of result buffers.
+The report generating code loops through the result buffers, maps each of them, checks for errors, and unmaps them.
+The layer clears the buffer to zeros when it is allocated and after processing any
+buffer that was written to.
+The instrumented shader code expects these buffers to be cleared to zeros before it
+writes to them.
+
+The layer then prepares a "common" validation error message containing:
+
+* command buffer handle - This is easily obtained because we are looping over the command
+  buffers just submitted.
+* draw number - keep track of how many draws we've processed for a given command buffer.
+* pipeline handle - The shader tracker discussed earlier contains this handle
+* shader module handle - The "Shader ID" (Word 1 in the record) is used to lookup
+  the shader tracker which is then used to obtain the shader module and pipeline handles
+* instruction index - This is the SPIR-V instruction index where the invalid array access occurred.
+  It is not that useful by itself, since the user would have to use it to locate a SPIR-V instruction
+  in a SPIR-V disassembly and somehow relate it back to the shader source code.
+  But it could still be useful to some and it is easy to report.
+  The user can build the shader with debug information to get source-level information.
+
+For all objects, the layer also looks up the objects in the Debug Utils object name map in
+case the application used that extension to name any objects.
+If a name exists for that object, it is included in the error message.
+
+The layer then adds on error message text obtained from decoding the stage-specific and
+validation-specific data as described earlier.
+
+This completes the error report when there is no source-level debug information in the shader.
+
+### Source-Level Debug Information
+
+This is one of the more complicated and code-heavy parts of the GPU-Assisted Validation feature
+and all it really does is display source-level information when the shader is compiled
+with debugging info (`-g` option in the case of `glslangValidator`).
+
+The process breaks down into two steps:
+
+#### OpLine Processing
+
+The SPIR-V generator (e.g., glslangValidator) places an OpLine SPIR-V instruction in the
+shader program ahead of code generated for each source code statement.
+The OpLine instruction contains the filename id (for an OpString),
+the source code line number and the source code column number.
+It is possible to have two source code statements on the same line in the source file,
+which explains the need for the column number.
+
+The layer scans the SPIR-V looking for the last OpLine instruction that appears before the instruction
+at the instruction index obtained from the debug report.
+This OpLine then contains the correct filename id, line number, and column number of the
+statement causing the error.
+The filename itself is obtained by scanning the SPIR-V again for an OpString instruction that
+matches the id from the OpLine.
+This OpString contains the text string representing the filename.
+This information is added to the validation error message.
+
+For online compilation when there is no "file", only the line number information is reported.
+
+#### OpSource Processing
+
+The SPIR-V built with source-level debug info also contains OpSource instructions that
+have a string containing the source code, delimited by newlines.
+Due to possible pre-processing, the layer just cannot simply use the source file line number
+from the OpLine to index into this set of source code lines.
+
+Instead, the correct source code line is found by first locating the "#line" directive in the
+source that specifies a line number closest to and less than the source line number reported
+by the OpLine located in the previous step.
+The correct "#line" directive must also match its filename, if specified,
+with the filename from the OpLine.
+
+Then the difference between the "#line" line number and the OpLine line number is added
+to the place where the "#line" was found to locate the actual line of source, which is
+then added to the validation error message.
+
+For example, if the OpLine line number is 15, and there is a "#line 10" on line 40
+in the OpSource source, then line 45 in the OpSource contains the correct source line.
+
+### Shader Instrumentation Input Record Format for Descriptor Indexing
+
+Although the DI input buffer is a linear array of unsigned integers, conceptually there are arrays within the linear array.
+
+Word 1 starts an array (denoted by sets_to_sizes) that is number_of_sets long, with an index that indicates the start of that set's entries in the sizes array.
+
+After the sets_to_sizes array is the sizes array, that contains the array size (or 1 if descriptor is not an array) of each descriptor in the set.  Bindings with no descriptor are filled in with zeros.
+
+After the sizes array is the sets_to_bindings array that for each descriptor set, indexes into the bindings_to_written array.  Word 0 contains the index that is the start of the sets_to_bindings array.
+
+After the sets_to_bindings array, is the bindings_to_written array that for each binding in the set, indexes to the start of that binding's entries in the written array.
+
+Lastly comes the written array, which indicates whether a given binding / array element has been written.
+
+Example:
+```
+Assume Descriptor Set 0 looks like:                        And Descriptor Set 1 looks like:
+  Binding                                                    Binding
+     0          Array[3]                                       2          Array[4]
+     1          Non Array                                      3          Array[5]
+     3          Array[2]
+
+Here is what the input buffer should look like:
+
+   Index of                     sets_to_sizes                     sizes             sets_to_bindings                       bindings_to_written    written
+   sets_to_bindings
+
+     0 |11| sets_to_bindings     1 |3| set 0 sizes start at 3     3  |3| S0B0       11 |13| set 0 bindings start at 13        13 |21| S0B0        21 |1| S0B0I0 was written
+            starts at 11         2 |7| set 1 sizes start at 7     4  |1| S0B1       12 |17| set 1 bindings start at 17        14 |24| S0B1        22 |1| S0B0I1 was written
+                                                                  5  |0| S0B2                                                 15 |0 | S0B2        23 |1| S0B0I3 was written
+                                                                  6  |2| S0B3                                                 16 |25| S0B3        24 |1| S0B1 was written
+                                                                  7  |0| S1B0                                                 17 |0 | S1B0        25 |1| S0B3I0 was written
+                                                                  8  |0| S1B1                                                 18 |0 | S1B1        26 |1| S0B3I1 was written
+                                                                  9  |4| S1B2                                                 19 |27| S1B2        27 |0| S1B2I0 was not written
+                                                                  10 |5| S1B3                                                 20 |31| S1B3        28 |1| S1B2I1 was written
+                                                                                                                                                  29 |1| S1B2I2 was written
+                                                                                                                                                  30 |1| S1B2I3 was written
+                                                                                                                                                  31 |1| S1B3I0 was written
+                                                                                                                                                  32 |1| S1B3I1 was written
+                                                                                                                                                  33 |1| S1B3I2 was written
+                                                                                                                                                  34 |1| S1B3I3 was written
+                                                                                                                                                  35 |1| S1B3I4 was written
+```
+Alternately, you could describe the array size and write state data as:
+(set = s, binding = b, index = i) is not initialized if 
+```
+Input[ i + Input[ b + Input[ s + Input[ Input[0] ] ] ] ] == 0
+```
+and the array's size = Input[ Input[ s + 1 ] + b ] 
+
+### Shader Instrumentation Input Record Format for buffer device address
+The input buffer for buffer_reference accesses consists of all addresses retrieved from vkGetBufferDeviceAddressEXT and the sizes of the corresponding buffers.
+The addresses should be sorted in ascending order.
+```
+Word 0:   Index of start of buffer sizes (X+2)
+Word 1:   0x0000000000000000
+Word 2:   Device address of first buffer
+               .
+               .
+Word X:   Device address of last buffer
+Word X+1: 0xffffffffffffffff
+Word X+2: 0 (size of pretend buffer at word 1)
+Word X+3: Size of first buffer
+               .
+               .
+Word Y:   Size of last buffer
+Word Y+1: 0  (size of pretend buffer at word X+1)
+```
+### Acceleration Structure Building Validation
+
+Increasing performance of graphics hardware has made ray tracing a viable option for interactive rendering. The VK_NV_ray_tracing extension adds
+ray tracing support to Vulkan. With this extension, applications create and build VkAccelerationStructureNV objects for their scene geometry
+which allows implementations to manage the scene geometry as it is traversed during a ray tracing query.
+
+There are two types of acceleration structures, top level acceleration structures and bottom level acceleration structures. Bottom level acceleration
+structures are for an array of geometries and top level acceleration structures are for an array of instances of bottom level structures.
+
+The acceleration structure building validation feature of the GPU validation layer validates that the bottom level acceleration structure references
+found in the instance data used when building top level acceleration structures are valid.
+
+#### Implementation
+
+Because the instance data buffer used in vkCmdBuildAccelerationStructureNV could be a device local buffer and because commands are executed sometime
+in the future, validating the instance buffer must take place on the GPU. To accomplish this, the GPU validation layer tracks the known valid handles
+of bottom level acceleration structures at the time a command buffer is recorded and inserts an additional compute shader dispatch before commands
+which build top level acceleration structures to inspect and validate the instance buffer used. The compute shader iterates over the instance buffer
+and replaces unrecognized bottom level acceleration structure handles with a prebuilt valid bottom level acceleration structure handle. Upon queue
+submission and completion of the command buffer, the reported failures are read from a storage buffer written to by the compute shader and finally
+reported to the application.
+
+To help visualized, a command buffer that would originally have been recorded as:
+
+```cpp
+vkBeginCommandBuffer(...)
+
+... other commands ...
+
+vkCmdBuildAccelerationStructureNV(...) // build top level
+
+... other commands ...
+
+vkEndCommandBuffer(...)
+```
+
+would actually be recorded as:
+
+```cpp
+vkBeginCommandBuffer(...)
+
+... other commands ...
+
+vkCmdPipelineBarrier(...)               // ensure writes to instance buffer have completed
+
+vkCmdDispatch(...)                      // launch validation compute shader
+
+vkCmdPipelineBarrier(...)               // ensure validation compute shader writes have completed
+
+vkCmdBuildAccelerationStructureNV(...)  // build top level using modified instance buffer
+
+... other commands ...
+
+vkEndCommandBuffer(...)
+```
+## GPU-Assisted Validation Testing
+
+Validation Layer Tests (VLTs) exist for GPU-Assisted Validation.
+They cannot be run with the "mock ICD" in headless CI environments because they need to
+actually execute shaders.
+But they are still useful to run on real devices to check for regressions.
+
+There isn't anything else that remarkable or different about these tests.
+They activate GPU-Assisted Validation via the programmatic
+interface as described earlier.
+
+The tests exercise the extraction of source code information when the shader
+is built with debug info.
diff --git a/src/third_party/vulkan-validation-layers/src/docs/handle_wrapping.md b/src/third_party/vulkan-validation-layers/src/docs/handle_wrapping.md
new file mode 100644
index 0000000..2914434
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/docs/handle_wrapping.md
@@ -0,0 +1,17 @@
+<!-- markdownlint-disable MD041 -->
+[![Khronos Vulkan][1]][2]
+
+[1]: https://vulkan.lunarg.com/img/Vulkan_100px_Dec16.png "https://www.khronos.org/vulkan/"
+[2]: https://www.khronos.org/vulkan/
+
+# Handle Wrapping Functionality
+[![Creative Commons][3]][4]
+
+[3]: https://i.creativecommons.org/l/by-nd/4.0/88x31.png "Creative Commons License"
+[4]: https://creativecommons.org/licenses/by-nd/4.0/
+
+The handle wrapping facility is a feature of the Khronos Layer which aliases all non-dispatchable Vulkan objects with a unique identifier at object-creation time. The aliased handles are used during validation to ensure that duplicate object handles are correctly managed and tracked by the validation layers. This enables consistent and coherent validation in addition to proper operation on systems which return non-unique object handles.
+
+**Note**:
+
+* If you are developing Vulkan extensions which include new APIs taking one or more Vulkan dispatchable objects as parameters, you may find it necessary to disable handle-wrapping in order use the validation layers. Options for disabling this facility in the Khronos validation Layer include the VkConfig utility, the vk_layer_settings.txt configuration file, the VK_LAYER_DISABLES environment variable, or the VK_EXT_validation_features extension.
diff --git a/src/third_party/vulkan-validation-layers/src/docs/khronos_validation_layer.md b/src/third_party/vulkan-validation-layers/src/docs/khronos_validation_layer.md
new file mode 100644
index 0000000..ca9c9b4
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/docs/khronos_validation_layer.md
@@ -0,0 +1,26 @@
+<!-- markdownlint-disable MD041 -->
+<!-- Copyright 2015-2019 LunarG, Inc. -->
+
+[![Khronos Vulkan][1]][2]
+
+[1]: https://vulkan.lunarg.com/img/Vulkan_100px_Dec16.png "https://www.khronos.org/vulkan/"
+[2]: https://www.khronos.org/vulkan/
+
+# VK\_LAYER\_KHRONOS\_validation
+
+[![Creative Commons][3]][4]
+
+[3]: https://i.creativecommons.org/l/by-nd/4.0/88x31.png "Creative Commons License"
+[4]: https://creativecommons.org/licenses/by-nd/4.0/
+The `VK_LAYER_KHRONOS_validation` layer supports the following validation areas:
+
+- Thread safety validation
+- Stateless parameter validation
+- Object lifetime validation
+- Core validation checks
+- GPU-Assisted validation
+- Best practices validation
+- Handle wrapping functionality
+
+Details for the functionality contained in each of these areas can be viewed in their respective validation detail documents, located in this directory.
+
diff --git a/src/third_party/vulkan-validation-layers/src/docs/object_lifetimes.md b/src/third_party/vulkan-validation-layers/src/docs/object_lifetimes.md
new file mode 100644
index 0000000..76a8d86
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/docs/object_lifetimes.md
@@ -0,0 +1,27 @@
+<!-- markdownlint-disable MD041 -->
+<!-- Copyright 2015-2019 LunarG, Inc. -->
+[![Khronos Vulkan][1]][2]
+
+[1]: https://vulkan.lunarg.com/img/Vulkan_100px_Dec16.png "https://www.khronos.org/vulkan/"
+[2]: https://www.khronos.org/vulkan/
+
+# Object Lifetimes Validation
+
+[![Creative Commons][3]][4]
+
+[3]: https://i.creativecommons.org/l/by-nd/4.0/88x31.png "Creative Commons License"
+[4]: https://creativecommons.org/licenses/by-nd/4.0/
+
+The object tracking validation object tracks all Vulkan objects. Object lifetimes are validated
+along with issues related to unknown objects and object destruction and cleanup.
+
+All Vulkan dispatchable and non-dispatchable objects are tracked by this module.
+
+This layer validates that:
+
+- only known objects are referenced and destroyed
+- lookups are performed only on objects being tracked
+- objects are correctly freed/destroyed
+
+Validation will print errors if validation checks are not correctly met and warnings if improper
+reference of objects is detected.
diff --git a/src/third_party/vulkan-validation-layers/src/docs/stateless_validation.md b/src/third_party/vulkan-validation-layers/src/docs/stateless_validation.md
new file mode 100644
index 0000000..40dc933
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/docs/stateless_validation.md
@@ -0,0 +1,21 @@
+<!-- markdownlint-disable MD041 -->
+<!-- Copyright 2015-2019 LunarG, Inc. -->
+[![Khronos Vulkan][1]][2]
+
+[1]: https://vulkan.lunarg.com/img/Vulkan_100px_Dec16.png "https://www.khronos.org/vulkan/"
+[2]: https://www.khronos.org/vulkan/
+
+# Stateless Parameter Validation
+
+[![Creative Commons][3]][4]
+
+[3]: https://i.creativecommons.org/l/by-nd/4.0/88x31.png "Creative Commons License"
+[4]: https://creativecommons.org/licenses/by-nd/4.0/
+
+The stateless parameter validation object checks the input parameters to API calls for validity.
+This layer performs the following tasks:
+
+- validation of structures; structures are recursed if necessary
+- validation of enumerated type values
+- null pointer conditions
+- stateless valid usage checks
diff --git a/src/third_party/vulkan-validation-layers/src/docs/thread_safety.md b/src/third_party/vulkan-validation-layers/src/docs/thread_safety.md
new file mode 100644
index 0000000..94acb6e
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/docs/thread_safety.md
@@ -0,0 +1,14 @@
+<!-- markdownlint-disable MD041 -->
+[![Khronos Vulkan][1]][2]
+
+[1]: https://vulkan.lunarg.com/img/Vulkan_100px_Dec16.png "https://www.khronos.org/vulkan/"
+[2]: https://www.khronos.org/vulkan/
+
+# Thread Safety Validation
+[![Creative Commons][3]][4]
+
+[3]: https://i.creativecommons.org/l/by-nd/4.0/88x31.png "Creative Commons License"
+[4]: https://creativecommons.org/licenses/by-nd/4.0/
+
+The thread safety validation object checks multi-threading of API calls for validity.  Checks performed
+include ensuring that only one thread at a time uses an object in free-threaded API calls.
diff --git a/src/third_party/vulkan-validation-layers/src/external/CMakeLists.txt b/src/third_party/vulkan-validation-layers/src/external/CMakeLists.txt
new file mode 100644
index 0000000..b263e99
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/external/CMakeLists.txt
@@ -0,0 +1,45 @@
+# ~~~
+# Copyright (c) 2018 Valve Corporation
+# Copyright (c) 2018 LunarG, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ~~~
+
+# Add your optional dependencies in this "external" directory.
+
+# googletest is an optional external dependency for this repo.
+if(BUILD_TESTS)
+    # Attempt to enable googletest if available.
+
+    # Suppress all warnings from external projects (i.e.: in this directory's scope).
+    if(CMAKE_CXX_COMPILER_ID MATCHES "GNU" OR CMAKE_CXX_COMPILER_ID MATCHES "Clang")
+        set_property(DIRECTORY APPEND PROPERTY COMPILE_OPTIONS "-w")
+    endif()
+
+    if(TARGET gtest_main)
+        # Already enabled as a target (perhaps by a project enclosing this one)
+        message(STATUS "Vulkan-ValidationLayers/external: " "googletest already configured - using it")
+    elseif(IS_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}/googletest")
+        # The googletest directory exists, so enable it as a target.
+        message(STATUS "Vulkan-ValidationLayers/external: " "googletest found - configuring it for tests")
+        set(BUILD_GTEST ON CACHE BOOL "Builds the googletest subproject")
+        set(BUILD_GMOCK OFF CACHE BOOL "Builds the googlemock subproject")
+        set(gtest_force_shared_crt ON CACHE BOOL "Link gtest runtimes dynamically")
+        set(BUILD_SHARED_LIBS ON CACHE BOOL "Build shared libraries")
+        # EXCLUDE_FROM_ALL keeps the install target from installing GTEST files.
+        add_subdirectory("${CMAKE_CURRENT_SOURCE_DIR}/googletest" EXCLUDE_FROM_ALL)
+    else()
+        message(SEND_ERROR "Vulkan-ValidationLayers/external: " "Google Test was not found.  "
+                           "Provide Google Test in external/googletest or set BUILD_TESTS=OFF")
+    endif()
+endif()
diff --git a/src/third_party/vulkan-validation-layers/src/layers/CMakeLists.txt b/src/third_party/vulkan-validation-layers/src/layers/CMakeLists.txt
new file mode 100644
index 0000000..315de98
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/layers/CMakeLists.txt
@@ -0,0 +1,261 @@
+# ~~~
+# Copyright (c) 2014-2019 Valve Corporation
+# Copyright (c) 2014-2019 LunarG, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ~~~
+
+if(WIN32)
+    add_definitions(-DVK_USE_PLATFORM_WIN32_KHR -DVK_USE_PLATFORM_WIN32_KHX -DWIN32_LEAN_AND_MEAN)
+    add_custom_target(mk_layer_config_dir ALL
+                      COMMAND ${CMAKE_COMMAND} -E make_directory ${CMAKE_CURRENT_BINARY_DIR}/$<CONFIG>)
+    set_target_properties(mk_layer_config_dir PROPERTIES FOLDER ${LAYERS_HELPER_FOLDER})
+elseif(ANDROID)
+    add_definitions(-DVK_USE_PLATFORM_ANDROID_KHR -DVK_USE_PLATFORM_ANDROID_KHX)
+elseif(APPLE)
+    add_definitions(-DVK_USE_PLATFORM_MACOS_MVK)
+    if(CMAKE_GENERATOR MATCHES "^Xcode.*")
+        add_custom_target(mk_layer_config_dir ALL
+                          COMMAND ${CMAKE_COMMAND} -E make_directory ${CMAKE_CURRENT_BINARY_DIR}/$<CONFIG>)
+    endif()
+elseif(UNIX AND NOT APPLE) # i.e. Linux
+    if(BUILD_WSI_XCB_SUPPORT)
+        add_definitions(-DVK_USE_PLATFORM_XCB_KHR -DVK_USE_PLATFORM_XCB_KHX)
+    endif()
+
+    if(BUILD_WSI_XLIB_SUPPORT)
+        add_definitions(-DVK_USE_PLATFORM_XLIB_KHR -DVK_USE_PLATFORM_XLIB_KHX -DVK_USE_PLATFORM_XLIB_XRANDR_EXT)
+    endif()
+
+    if(BUILD_WSI_WAYLAND_SUPPORT)
+        add_definitions(-DVK_USE_PLATFORM_WAYLAND_KHR -DVK_USE_PLATFORM_WAYLAND_KHX)
+    endif()
+else()
+    message(FATAL_ERROR "Unsupported Platform!")
+endif()
+
+# Configure installation of source files that are dependencies of other repos.
+if(BUILD_LAYER_SUPPORT_FILES)
+    set(LAYER_UTIL_FILES
+        cast_utils.h
+        hash_util.h
+        hash_vk_types.h
+        vk_format_utils.h
+        vk_format_utils.cpp
+        vk_layer_config.h
+        vk_layer_config.cpp
+        vk_layer_data.h
+        vk_layer_extension_utils.h
+        vk_layer_extension_utils.cpp
+        vk_layer_logging.h
+        vk_layer_utils.h
+        vk_layer_utils.cpp
+        vk_loader_platform.h
+        generated/vk_validation_error_messages.h
+        generated/vk_layer_dispatch_table.h
+        generated/vk_dispatch_table_helper.h
+        generated/vk_safe_struct.h
+        generated/vk_safe_struct.cpp
+        generated/vk_enum_string_helper.h
+        generated/vk_object_types.h
+        generated/vk_extension_helper.h
+        generated/vk_typemap_helper.h)
+    install(FILES ${LAYER_UTIL_FILES} DESTINATION ${CMAKE_INSTALL_INCLUDEDIR})
+endif()
+
+set(TARGET_NAMES
+    VkLayer_khronos_validation
+    VkLayer_standard_validation)
+
+if(BUILD_LAYERS)
+    # Install the layer json files
+    if(WIN32)
+        if(CMAKE_GENERATOR MATCHES "^Visual Studio.*")
+            foreach(TARGET_NAME ${TARGET_NAMES})
+                install(FILES ${CMAKE_CURRENT_BINARY_DIR}/$<CONFIG>/${TARGET_NAME}.json DESTINATION ${CMAKE_INSTALL_LIBDIR})
+            endforeach()
+        else()
+            foreach(TARGET_NAME ${TARGET_NAMES})
+                install(FILES ${CMAKE_CURRENT_BINARY_DIR}/${TARGET_NAME}.json DESTINATION ${CMAKE_INSTALL_LIBDIR})
+            endforeach()
+        endif()
+    elseif(UNIX) # UNIX includes APPLE
+        foreach(TARGET_NAME ${TARGET_NAMES})
+            install(FILES ${CMAKE_CURRENT_BINARY_DIR}/staging-json/${TARGET_NAME}.json
+                    DESTINATION ${CMAKE_INSTALL_DATAROOTDIR}/vulkan/explicit_layer.d)
+        endforeach()
+    endif()
+endif()
+
+# System-specific macros to create a library target.
+if(WIN32)
+    macro(AddVkLayer target LAYER_COMPILE_DEFINITIONS)
+        file(TO_NATIVE_PATH ${CMAKE_CURRENT_SOURCE_DIR}/VkLayer_${target}.def DEF_FILE)
+        add_custom_target(copy-${target}-def-file ALL
+                          COMMAND ${CMAKE_COMMAND} -E copy_if_different ${DEF_FILE} VkLayer_${target}.def
+                          VERBATIM)
+        set_target_properties(copy-${target}-def-file PROPERTIES FOLDER ${LAYERS_HELPER_FOLDER})
+
+        add_library(VkLayer_${target} SHARED ${ARGN} VkLayer_${target}.def)
+        target_compile_definitions(VkLayer_${target} PUBLIC ${LAYER_COMPILE_DEFINITIONS})
+        target_link_libraries(VkLayer_${target} PRIVATE VkLayer_utils)
+        add_dependencies(VkLayer_${target} VkLayer_utils)
+        install(TARGETS VkLayer_${target} DESTINATION ${CMAKE_INSTALL_LIBDIR})
+    endmacro()
+elseif(APPLE)
+    macro(AddVkLayer target LAYER_COMPILE_DEFINITIONS)
+        add_library(VkLayer_${target} SHARED ${ARGN})
+        target_compile_definitions(VkLayer_${target} PUBLIC ${LAYER_COMPILE_DEFINITIONS})
+        target_link_libraries(VkLayer_${target} PRIVATE VkLayer_utils)
+        add_dependencies(VkLayer_${target} VkLayer_utils)
+        set_target_properties(VkLayer_${target}
+                              PROPERTIES LINK_FLAGS
+                                         "-Wl"
+                                         INSTALL_RPATH
+                                         "@loader_path/")
+        install(TARGETS VkLayer_${target} DESTINATION ${CMAKE_CURRENT_BINARY_DIR})
+    endmacro()
+else(UNIX AND NOT APPLE) # i.e.: Linux
+    macro(AddVkLayer target LAYER_COMPILE_DEFINITIONS)
+        add_library(VkLayer_${target} SHARED ${ARGN})
+        target_compile_definitions(VkLayer_${target} PUBLIC ${LAYER_COMPILE_DEFINITIONS})
+        target_link_libraries(VkLayer_${target} PRIVATE VkLayer_utils)
+        add_dependencies(VkLayer_${target} VkLayer_utils)
+        set_target_properties(VkLayer_${target} PROPERTIES LINK_FLAGS "-Wl,--version-script=${CMAKE_CURRENT_SOURCE_DIR}/libVkLayer_${target}.map,-Bsymbolic,--exclude-libs,ALL")
+        install(TARGETS VkLayer_${target} DESTINATION ${CMAKE_INSTALL_LIBDIR})
+    endmacro()
+endif()
+
+include_directories(${CMAKE_CURRENT_SOURCE_DIR} ${CMAKE_CURRENT_SOURCE_DIR}/generated ${VulkanHeaders_INCLUDE_DIR})
+
+if(WIN32)
+    # Applies to all configurations
+    add_definitions(-D_CRT_SECURE_NO_WARNINGS)
+    # Avoid: fatal error C1128: number of sections exceeded object file format limit: compile with /bigobj
+    add_compile_options("/bigobj")
+    # Allow Windows to use multiprocessor compilation
+    add_compile_options(/MP)
+    # Turn off transitional "changed behavior" warning message for Visual Studio versions prior to 2015. The changed behavior is
+    # that constructor initializers are now fixed to clear the struct members.
+    add_compile_options("$<$<AND:$<CXX_COMPILER_ID:MSVC>,$<VERSION_LESS:$<CXX_COMPILER_VERSION>,19>>:/wd4351>")
+else()
+    set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wpointer-arith -Wno-unused-function -Wno-sign-compare")
+    set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -Wpointer-arith -Wno-unused-function -Wno-sign-compare")
+endif()
+
+# Clang warns about unused const variables. Generated files may purposely contain unused consts, so silence this warning in Clang
+if(CMAKE_C_COMPILER_ID MATCHES "Clang")
+    set_source_files_properties(parameter_validation.cpp PROPERTIES COMPILE_FLAGS "-Wno-unused-const-variable")
+endif()
+
+set(CHASSIS_LIBRARY_FILES
+    generated/chassis.cpp
+    generated/layer_chassis_dispatch.cpp
+    generated/command_counter_helper.cpp
+    state_tracker.cpp)
+
+set(CORE_VALIDATION_LIBRARY_FILES
+    core_validation.cpp
+    drawdispatch.cpp
+    convert_to_renderpass2.cpp
+    descriptor_sets.cpp
+    buffer_validation.cpp
+    shader_validation.cpp
+    gpu_validation.cpp
+    xxhash.c)
+
+set(OBJECT_LIFETIMES_LIBRARY_FILES
+    generated/object_tracker.cpp
+    generated/object_tracker.h
+    object_tracker_utils.cpp)
+
+set(THREAD_SAFETY_LIBRARY_FILES
+    generated/thread_safety.cpp
+    generated/thread_safety.h)
+
+set(STATELESS_VALIDATION_LIBRARY_FILES
+    generated/parameter_validation.cpp
+    generated/parameter_validation.h
+    parameter_validation_utils.cpp)
+
+set(BEST_PRACTICES_LIBRARY_FILES
+    best_practices.cpp
+    best_practices.h)
+
+set(GPU_ASSISTED_LIBRARY_FILES
+    gpu_validation.cpp
+    gpu_validation.h)
+
+if(BUILD_LAYERS)
+    AddVkLayer(khronos_validation ""
+        ${CHASSIS_LIBRARY_FILES}
+        ${CORE_VALIDATION_LIBRARY_FILES}
+        ${OBJECT_LIFETIMES_LIBRARY_FILES}
+        ${THREAD_SAFETY_LIBRARY_FILES}
+        ${STATELESS_VALIDATION_LIBRARY_FILES}
+        ${BEST_PRACTICES_LIBRARY_FILES}
+        ${GPU_ASSISTED_LIBRARY_FILES})
+
+    # Khronos validation additional dependencies
+    target_include_directories(VkLayer_khronos_validation PRIVATE ${GLSLANG_SPIRV_INCLUDE_DIR})
+    target_include_directories(VkLayer_khronos_validation PRIVATE ${SPIRV_TOOLS_INCLUDE_DIR})
+    target_link_libraries(VkLayer_khronos_validation PRIVATE ${SPIRV_TOOLS_LIBRARIES})
+
+    # The output file needs Unix "/" separators or Windows "\" separators On top of that, Windows separators actually need to be doubled
+    # because the json format uses backslash escapes
+    file(TO_NATIVE_PATH "./" RELATIVE_PATH_PREFIX)
+    string(REPLACE "\\"
+                   "\\\\"
+                   RELATIVE_PATH_PREFIX
+                   "${RELATIVE_PATH_PREFIX}")
+
+    # Run each .json.in file through the generator We need to create the generator.cmake script so that the generator can be run at
+    # compile time, instead of configure time Running at compile time lets us use cmake generator expressions (TARGET_FILE_NAME and
+    # TARGET_FILE_DIR, specifically)
+    file(WRITE "${CMAKE_CURRENT_BINARY_DIR}/generator.cmake" "configure_file(\"\${INPUT_FILE}\" \"\${OUTPUT_FILE}\")")
+    foreach(TARGET_NAME ${TARGET_NAMES})
+        set(CONFIG_DEFINES -DINPUT_FILE="${CMAKE_CURRENT_SOURCE_DIR}/json/${TARGET_NAME}.json.in" -DVK_VERSION=1.1.${vk_header_version})
+        # If this json file is not a metalayer, get the needed properties from that target
+        if(TARGET ${TARGET_NAME})
+            set(CONFIG_DEFINES
+                ${CONFIG_DEFINES}
+                -DOUTPUT_FILE="$<TARGET_FILE_DIR:${TARGET_NAME}>/${TARGET_NAME}.json"
+                -DRELATIVE_LAYER_BINARY="${RELATIVE_PATH_PREFIX}$<TARGET_FILE_NAME:${TARGET_NAME}>")
+            # If this json file is a metalayer, make the output path match core validation, and there is no layer binary file
+        else()
+            set(CONFIG_DEFINES ${CONFIG_DEFINES} -DOUTPUT_FILE="$<TARGET_FILE_DIR:VkLayer_khronos_validation>/${TARGET_NAME}.json")
+        endif()
+        add_custom_target(${TARGET_NAME}-json ALL
+                          COMMAND ${CMAKE_COMMAND} ${CONFIG_DEFINES} -P "${CMAKE_CURRENT_BINARY_DIR}/generator.cmake")
+        if(CMAKE_GENERATOR MATCHES "^Visual Studio.*")
+            set_target_properties(${TARGET_NAME}-json PROPERTIES FOLDER ${LAYERS_HELPER_FOLDER})
+        endif()
+    endforeach()
+
+    # For UNIX-based systems, `library_path` should not contain a relative path (indicated by "./") before installing to system
+    # directories, so we do not include it in the staging-json files which are used for installation
+    if(UNIX)
+        foreach(TARGET_NAME ${TARGET_NAMES})
+            set(INSTALL_DEFINES
+                -DINPUT_FILE="${CMAKE_CURRENT_SOURCE_DIR}/json/${TARGET_NAME}.json.in"
+                -DOUTPUT_FILE="${CMAKE_CURRENT_BINARY_DIR}/staging-json/${TARGET_NAME}.json"
+                -DVK_VERSION=1.1.${vk_header_version})
+            # If this json file is not a metalayer, get the needed properties from that target
+            if(TARGET ${TARGET_NAME})
+                set(INSTALL_DEFINES ${INSTALL_DEFINES} -DRELATIVE_LAYER_BINARY="$<TARGET_FILE_NAME:${TARGET_NAME}>")
+            endif()
+            add_custom_target(${TARGET_NAME}-staging-json ALL
+                              COMMAND ${CMAKE_COMMAND} ${INSTALL_DEFINES} -P "${CMAKE_CURRENT_BINARY_DIR}/generator.cmake")
+        endforeach()
+    endif()
+endif()
diff --git a/src/third_party/vulkan-validation-layers/src/layers/VkLayer_khronos_validation.def b/src/third_party/vulkan-validation-layers/src/layers/VkLayer_khronos_validation.def
new file mode 100644
index 0000000..4cbfcf4
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/layers/VkLayer_khronos_validation.def
@@ -0,0 +1,30 @@
+
+;;;; Begin Copyright Notice ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+;
+; Copyright (c) 2015-2019 The Khronos Group Inc.
+; Copyright (c) 2015-2019 Valve Corporation
+; Copyright (c) 2015-2019 LunarG, Inc.
+;
+; Licensed under the Apache License, Version 2.0 (the "License");
+; you may not use this file except in compliance with the License.
+; You may obtain a copy of the License at
+;
+;     http://www.apache.org/licenses/LICENSE-2.0
+;
+; Unless required by applicable law or agreed to in writing, software
+; distributed under the License is distributed on an "AS IS" BASIS,
+; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+; See the License for the specific language governing permissions and
+; limitations under the License.
+;
+;  Author: Mark Lobodzinski <mark@LunarG.com>
+;
+;;;;  End Copyright Notice ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+
+LIBRARY VkLayer_khronos_validation
+EXPORTS
+vkGetInstanceProcAddr
+vkGetDeviceProcAddr
+vkEnumerateInstanceLayerProperties
+vkEnumerateInstanceExtensionProperties
+vkNegotiateLoaderLayerInterfaceVersion
diff --git a/src/third_party/vulkan-validation-layers/src/layers/android_ndk_types.h b/src/third_party/vulkan-validation-layers/src/layers/android_ndk_types.h
new file mode 100644
index 0000000..515f128
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/layers/android_ndk_types.h
@@ -0,0 +1,122 @@
+/* Copyright (c) 2018-2019 The Khronos Group Inc.
+ * Copyright (c) 2018-2019 Valve Corporation
+ * Copyright (c) 2018-2019 LunarG, Inc.
+ * Copyright (C) 2018-2019 Google Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Dave Houlton <daveh@lunarg.com>
+ */
+
+#ifndef ANDROID_NDK_TYPES_H_
+#define ANDROID_NDK_TYPES_H_
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+
+// All eums referenced by VK_ANDROID_external_memory_android_hardware_buffer are present in
+// the platform-28 (Android P) versions of the header files.  A partial set exists in the
+// platform-26 (O) headers, where hardware_buffer.h first appears in the NDK.
+//
+// Building Vulkan validation with NDK header files prior to platform-26 is not supported.
+//
+// Decoder ring for Android compile symbols found here: https://github.com/android-ndk/ndk/issues/407
+
+#ifdef __ANDROID__  // Compiling for Android
+#include <android/api-level.h>
+#include <android/hardware_buffer.h>  // First appearance in Android O (platform-26)
+
+// If NDK is O (platform-26 or -27), supplement the missing enums with pre-processor defined literals
+// If Android P or later, then all required enums are already defined
+#if defined(__ANDROID_API_O__) && !defined(__ANDROID_API_P__)
+// Formats
+#define AHARDWAREBUFFER_FORMAT_D16_UNORM 0x30
+#define AHARDWAREBUFFER_FORMAT_D24_UNORM 0x31
+#define AHARDWAREBUFFER_FORMAT_D24_UNORM_S8_UINT 0x32
+#define AHARDWAREBUFFER_FORMAT_D32_FLOAT 0x33
+#define AHARDWAREBUFFER_FORMAT_D32_FLOAT_S8_UINT 0x34
+#define AHARDWAREBUFFER_FORMAT_S8_UINT 0x35
+// Usage bits
+#define AHARDWAREBUFFER_USAGE_GPU_CUBE_MAP 0x2000000
+#define AHARDWAREBUFFER_USAGE_GPU_MIPMAP_COMPLETE 0x4000000
+#endif  // __ANDROID_API_O__ && !_P__
+
+#else  // Not __ANDROID__, but VK_USE_PLATFORM_ANDROID_KHR
+// This combination should not be seen in the wild, but can be used to allow testing
+// of the AHB extension validation on other platforms using MockICD
+//
+// Define the minimal set of NDK enums and structs needed to compile
+// VK_ANDROID_external_memory_android_hardware_buffer validation without an NDK present
+struct AHardwareBuffer {};
+
+// Enumerations of format and usage flags for Android opaque external memory blobs
+typedef enum AHardwareBufferFormat {
+    AHARDWAREBUFFER_FORMAT_R8G8B8A8_UNORM = 1,
+    AHARDWAREBUFFER_FORMAT_R8G8B8X8_UNORM = 2,
+    AHARDWAREBUFFER_FORMAT_R8G8B8_UNORM = 3,
+    AHARDWAREBUFFER_FORMAT_R5G6B5_UNORM = 4,
+    AHARDWAREBUFFER_FORMAT_R16G16B16A16_FLOAT = 0x16,
+    AHARDWAREBUFFER_FORMAT_R10G10B10A2_UNORM = 0x2b,
+    AHARDWAREBUFFER_FORMAT_D16_UNORM = 0x30,
+    AHARDWAREBUFFER_FORMAT_D24_UNORM = 0x31,
+    AHARDWAREBUFFER_FORMAT_D24_UNORM_S8_UINT = 0x32,
+    AHARDWAREBUFFER_FORMAT_D32_FLOAT = 0x33,
+    AHARDWAREBUFFER_FORMAT_D32_FLOAT_S8_UINT = 0x34,
+    AHARDWAREBUFFER_FORMAT_S8_UINT = 0x35,
+    AHARDWAREBUFFER_FORMAT_BLOB = 0x21
+} AHardwareBufferFormat;
+
+typedef enum AHardwareBufferUsage {
+    AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE = 0x100,
+    AHARDWAREBUFFER_USAGE_GPU_COLOR_OUTPUT = 0x200,
+    AHARDWAREBUFFER_USAGE_GPU_CUBE_MAP = 0x2000000,
+    AHARDWAREBUFFER_USAGE_GPU_MIPMAP_COMPLETE = 0x4000000,
+    AHARDWAREBUFFER_USAGE_PROTECTED_CONTENT = 0x4000,
+    AHARDWAREBUFFER_USAGE_GPU_DATA_BUFFER = 0x1000000
+} AHardwareBufferUsage;
+
+typedef struct AHardwareBuffer_Desc {
+    uint32_t format;  //	   One of AHARDWAREBUFFER_FORMAT_*.
+    uint32_t height;  //	   Height in pixels.
+    uint32_t layers;  //	   Number of images in an image array.
+    uint32_t rfu0;    //	   Initialize to zero, reserved for future use.
+    uint64_t rfu1;    //	   Initialize to zero, reserved for future use.
+    uint32_t stride;  //	   Row stride in pixels, ignored for AHardwareBuffer_allocate()
+    uint64_t usage;   //	   Combination of AHARDWAREBUFFER_USAGE_*.
+    uint32_t width;   //	   Width in pixels.
+} AHardwareBuffer_Desc;
+
+// Minimal NDK fxn stubs to allow testing on ndk-less platform
+static inline int AHardwareBuffer_allocate(const AHardwareBuffer_Desc *ahbDesc, AHardwareBuffer **buffer) {
+    size_t size = ahbDesc->height * ahbDesc->width * 8;  // Alloc for largest (64 bpp) format
+    if (size < sizeof(AHardwareBuffer_Desc)) size = sizeof(AHardwareBuffer_Desc);
+    *buffer = (AHardwareBuffer *)malloc(size);
+    memcpy((void *)(*buffer), (void *)ahbDesc, sizeof(AHardwareBuffer_Desc));
+    return 0;
+}
+
+static inline void AHardwareBuffer_release(AHardwareBuffer *buffer) {
+    if (buffer) free(buffer);
+}
+
+static inline void AHardwareBuffer_describe(const AHardwareBuffer *buffer, AHardwareBuffer_Desc *outDesc) {
+    if (buffer && outDesc) {
+        memcpy((void *)outDesc, (void *)buffer, sizeof(AHardwareBuffer_Desc));
+    }
+    return;
+}
+
+#endif  // __ANDROID__
+
+#endif  // VK_USE_PLATFORM_ANDROID_KHR
+
+#endif  // ANDROID_NDK_TYPES_H_
diff --git a/src/third_party/vulkan-validation-layers/src/layers/best_practices.cpp b/src/third_party/vulkan-validation-layers/src/layers/best_practices.cpp
new file mode 100644
index 0000000..ef88311
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/layers/best_practices.cpp
@@ -0,0 +1,864 @@
+/* Copyright (c) 2015-2019 The Khronos Group Inc.
+ * Copyright (c) 2015-2019 Valve Corporation
+ * Copyright (c) 2015-2019 LunarG, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Camden Stocker <camden@lunarg.com>
+ */
+
+#include "best_practices.h"
+#include "layer_chassis_dispatch.h"
+#include "best_practices_error_enums.h"
+
+#include <string>
+#include <iomanip>
+
+// get the API name is proper format
+std::string BestPractices::GetAPIVersionName(uint32_t version) const {
+    std::stringstream version_name;
+    uint32_t major = VK_VERSION_MAJOR(version);
+    uint32_t minor = VK_VERSION_MINOR(version);
+    uint32_t patch = VK_VERSION_PATCH(version);
+
+    version_name << major << "." << minor << "." << patch << " (0x" << std::setfill('0') << std::setw(8) << std::hex << version
+                 << ")";
+
+    return version_name.str();
+}
+
+bool BestPractices::PreCallValidateCreateInstance(const VkInstanceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator,
+                                                  VkInstance* pInstance) const {
+    bool skip = false;
+
+    for (uint32_t i = 0; i < pCreateInfo->enabledExtensionCount; i++) {
+        if (white_list(pCreateInfo->ppEnabledExtensionNames[i], kDeviceExtensionNames)) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                            kVUID_BestPractices_CreateInstance_ExtensionMismatch,
+                            "vkCreateInstance(): Attempting to enable Device Extension %s at CreateInstance time.",
+                            pCreateInfo->ppEnabledExtensionNames[i]);
+        }
+    }
+
+    return skip;
+}
+
+void BestPractices::PreCallRecordCreateInstance(const VkInstanceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator,
+                                                VkInstance* pInstance) {
+    instance_api_version = pCreateInfo->pApplicationInfo->apiVersion;
+}
+
+bool BestPractices::PreCallValidateCreateDevice(VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo* pCreateInfo,
+                                                const VkAllocationCallbacks* pAllocator, VkDevice* pDevice) const {
+    bool skip = false;
+
+    // get API version of physical device passed when creating device.
+    VkPhysicalDeviceProperties physical_device_properties{};
+    DispatchGetPhysicalDeviceProperties(physicalDevice, &physical_device_properties);
+    auto device_api_version = physical_device_properties.apiVersion;
+
+    // check api versions and warn if instance api Version is higher than version on device.
+    if (instance_api_version > device_api_version) {
+        std::string inst_api_name = GetAPIVersionName(instance_api_version);
+        std::string dev_api_name = GetAPIVersionName(device_api_version);
+
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                        kVUID_BestPractices_CreateDevice_API_Mismatch,
+                        "vkCreateDevice(): API Version of current instance, %s is higher than API Version on device, %s",
+                        inst_api_name.c_str(), dev_api_name.c_str());
+    }
+
+    for (uint32_t i = 0; i < pCreateInfo->enabledExtensionCount; i++) {
+        if (white_list(pCreateInfo->ppEnabledExtensionNames[i], kInstanceExtensionNames)) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                            kVUID_BestPractices_CreateInstance_ExtensionMismatch,
+                            "vkCreateDevice(): Attempting to enable Instance Extension %s at CreateDevice time.",
+                            pCreateInfo->ppEnabledExtensionNames[i]);
+        }
+    }
+
+    auto pd_state = GetPhysicalDeviceState(physicalDevice);
+    if ((pd_state->vkGetPhysicalDeviceFeaturesState == UNCALLED) && (pCreateInfo->pEnabledFeatures != NULL)) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                        kVUID_BestPractices_CreateDevice_PDFeaturesNotCalled,
+                        "vkCreateDevice() called before getting physical device features from vkGetPhysicalDeviceFeatures().");
+    }
+
+    return skip;
+}
+
+bool BestPractices::PreCallValidateCreateBuffer(VkDevice device, const VkBufferCreateInfo* pCreateInfo,
+                                                const VkAllocationCallbacks* pAllocator, VkBuffer* pBuffer) const {
+    bool skip = false;
+
+    if ((pCreateInfo->queueFamilyIndexCount > 1) && (pCreateInfo->sharingMode == VK_SHARING_MODE_EXCLUSIVE)) {
+        std::stringstream bufferHex;
+        bufferHex << "0x" << std::hex << HandleToUint64(pBuffer);
+
+        skip |=
+            log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                    kVUID_BestPractices_SharingModeExclusive,
+                    "Warning: Buffer (%s) specifies a sharing mode of VK_SHARING_MODE_EXCLUSIVE while specifying multiple queues "
+                    "(queueFamilyIndexCount of %" PRIu32 ").",
+                    bufferHex.str().c_str(), pCreateInfo->queueFamilyIndexCount);
+    }
+
+    return skip;
+}
+
+bool BestPractices::PreCallValidateCreateImage(VkDevice device, const VkImageCreateInfo* pCreateInfo,
+                                               const VkAllocationCallbacks* pAllocator, VkImage* pImage) const {
+    bool skip = false;
+
+    if ((pCreateInfo->queueFamilyIndexCount > 1) && (pCreateInfo->sharingMode == VK_SHARING_MODE_EXCLUSIVE)) {
+        std::stringstream imageHex;
+        imageHex << "0x" << std::hex << HandleToUint64(pImage);
+
+        skip |=
+            log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                    kVUID_BestPractices_SharingModeExclusive,
+                    "Warning: Image (%s) specifies a sharing mode of VK_SHARING_MODE_EXCLUSIVE while specifying multiple queues "
+                    "(queueFamilyIndexCount of %" PRIu32 ").",
+                    imageHex.str().c_str(), pCreateInfo->queueFamilyIndexCount);
+    }
+
+    return skip;
+}
+
+bool BestPractices::PreCallValidateCreateSwapchainKHR(VkDevice device, const VkSwapchainCreateInfoKHR* pCreateInfo,
+                                                      const VkAllocationCallbacks* pAllocator, VkSwapchainKHR* pSwapchain) const {
+    bool skip = false;
+
+    auto physical_device_state = GetPhysicalDeviceState();
+
+    if (physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHRState == UNCALLED) {
+        skip |= log_msg(
+            report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+            kVUID_BestPractices_Swapchain_GetSurfaceNotCalled,
+            "vkCreateSwapchainKHR() called before getting surface capabilities from vkGetPhysicalDeviceSurfaceCapabilitiesKHR().");
+    }
+
+    if (physical_device_state->vkGetPhysicalDeviceSurfacePresentModesKHRState != QUERY_DETAILS) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                        kVUID_BestPractices_Swapchain_GetSurfaceNotCalled,
+                        "vkCreateSwapchainKHR() called before getting surface present mode(s) from "
+                        "vkGetPhysicalDeviceSurfacePresentModesKHR().");
+    }
+
+    if (physical_device_state->vkGetPhysicalDeviceSurfaceFormatsKHRState != QUERY_DETAILS) {
+        skip |=
+            log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                    kVUID_BestPractices_Swapchain_GetSurfaceNotCalled,
+                    "vkCreateSwapchainKHR() called before getting surface format(s) from vkGetPhysicalDeviceSurfaceFormatsKHR().");
+    }
+
+    if ((pCreateInfo->queueFamilyIndexCount > 1) && (pCreateInfo->imageSharingMode == VK_SHARING_MODE_EXCLUSIVE)) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                        kVUID_BestPractices_SharingModeExclusive,
+                        "Warning: A Swapchain is being created which specifies a sharing mode of VK_SHARING_MODE_EXCULSIVE while "
+                        "specifying multiple queues (queueFamilyIndexCount of %" PRIu32 ").",
+                        pCreateInfo->queueFamilyIndexCount);
+    }
+
+    return skip;
+}
+
+bool BestPractices::PreCallValidateCreateSharedSwapchainsKHR(VkDevice device, uint32_t swapchainCount,
+                                                             const VkSwapchainCreateInfoKHR* pCreateInfos,
+                                                             const VkAllocationCallbacks* pAllocator,
+                                                             VkSwapchainKHR* pSwapchains) const {
+    bool skip = false;
+
+    for (uint32_t i = 0; i < swapchainCount; i++) {
+        if ((pCreateInfos[i].queueFamilyIndexCount > 1) && (pCreateInfos[i].imageSharingMode == VK_SHARING_MODE_EXCLUSIVE)) {
+            skip |=
+                log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                        kVUID_BestPractices_SharingModeExclusive,
+                        "Warning: A shared swapchain (index %" PRIu32
+                        ") is being created which specifies a sharing mode of VK_SHARING_MODE_EXCLUSIVE while specifying multiple "
+                        "queues (queueFamilyIndexCount of %" PRIu32 ").",
+                        i, pCreateInfos[i].queueFamilyIndexCount);
+        }
+    }
+
+    return skip;
+}
+
+bool BestPractices::PreCallValidateCreateRenderPass(VkDevice device, const VkRenderPassCreateInfo* pCreateInfo,
+                                                    const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass) const {
+    bool skip = false;
+
+    for (uint32_t i = 0; i < pCreateInfo->attachmentCount; ++i) {
+        VkFormat format = pCreateInfo->pAttachments[i].format;
+        if (pCreateInfo->pAttachments[i].initialLayout == VK_IMAGE_LAYOUT_UNDEFINED) {
+            if ((FormatIsColor(format) || FormatHasDepth(format)) &&
+                pCreateInfo->pAttachments[i].loadOp == VK_ATTACHMENT_LOAD_OP_LOAD) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                kVUID_BestPractices_RenderPass_Attatchment,
+                                "Render pass has an attachment with loadOp == VK_ATTACHMENT_LOAD_OP_LOAD and "
+                                "initialLayout == VK_IMAGE_LAYOUT_UNDEFINED.  This is probably not what you "
+                                "intended.  Consider using VK_ATTACHMENT_LOAD_OP_DONT_CARE instead if the "
+                                "image truely is undefined at the start of the render pass.");
+            }
+            if (FormatHasStencil(format) && pCreateInfo->pAttachments[i].stencilLoadOp == VK_ATTACHMENT_LOAD_OP_LOAD) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                kVUID_BestPractices_RenderPass_Attatchment,
+                                "Render pass has an attachment with stencilLoadOp == VK_ATTACHMENT_LOAD_OP_LOAD "
+                                "and initialLayout == VK_IMAGE_LAYOUT_UNDEFINED.  This is probably not what you "
+                                "intended.  Consider using VK_ATTACHMENT_LOAD_OP_DONT_CARE instead if the "
+                                "image truely is undefined at the start of the render pass.");
+            }
+        }
+    }
+
+    for (uint32_t dependency = 0; dependency < pCreateInfo->dependencyCount; dependency++) {
+        skip |= CheckPipelineStageFlags("vkCreateRenderPass", pCreateInfo->pDependencies[dependency].srcStageMask);
+        skip |= CheckPipelineStageFlags("vkCreateRenderPass", pCreateInfo->pDependencies[dependency].dstStageMask);
+    }
+
+    return skip;
+}
+
+bool BestPractices::PreCallValidateAllocateMemory(VkDevice device, const VkMemoryAllocateInfo* pAllocateInfo,
+                                                  const VkAllocationCallbacks* pAllocator, VkDeviceMemory* pMemory) const {
+    bool skip = false;
+
+    if (num_mem_objects + 1 > kMemoryObjectWarningLimit) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                        kVUID_BestPractices_AllocateMemory_TooManyObjects,
+                        "Performance Warning: This app has > %" PRIu32 " memory objects.", kMemoryObjectWarningLimit);
+    }
+
+    // TODO: Insert get check for GetPhysicalDeviceMemoryProperties once the state is tracked in the StateTracker
+
+    return skip;
+}
+
+void BestPractices::PostCallRecordAllocateMemory(VkDevice device, const VkMemoryAllocateInfo* pAllocateInfo,
+                                                 const VkAllocationCallbacks* pAllocator, VkDeviceMemory* pMemory,
+                                                 VkResult result) {
+    ValidationStateTracker::PostCallRecordAllocateMemory(device, pAllocateInfo, pAllocator, pMemory, result);
+
+    if (VK_SUCCESS == result) {
+        num_mem_objects++;
+    }
+}
+
+bool BestPractices::PreCallValidateFreeMemory(VkDevice device, VkDeviceMemory memory,
+                                              const VkAllocationCallbacks* pAllocator) const {
+    bool skip = false;
+
+    const DEVICE_MEMORY_STATE* mem_info = ValidationStateTracker::GetDevMemState(memory);
+
+    for (auto& obj : mem_info->obj_bindings) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, get_debug_report_enum[obj.type], 0, layer_name.c_str(),
+                        "VK Object %s still has a reference to mem obj %s.", report_data->FormatHandle(obj).c_str(),
+                        report_data->FormatHandle(mem_info->mem).c_str());
+    }
+
+    return skip;
+}
+
+void BestPractices::PreCallRecordFreeMemory(VkDevice device, VkDeviceMemory memory, const VkAllocationCallbacks* pAllocator) {
+    if (memory != VK_NULL_HANDLE) {
+        num_mem_objects--;
+    }
+}
+
+bool BestPractices::ValidateBindBufferMemory(VkBuffer buffer, const char* api_name) const {
+    bool skip = false;
+    const BUFFER_STATE* buffer_state = GetBufferState(buffer);
+
+    if (!buffer_state->memory_requirements_checked && !buffer_state->external_memory_handle) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                        kVUID_BestPractices_BufferMemReqNotCalled,
+                        "%s: Binding memory to %s but vkGetBufferMemoryRequirements() has not been called on that buffer.",
+                        api_name, report_data->FormatHandle(buffer).c_str());
+    }
+
+    return skip;
+}
+
+bool BestPractices::PreCallValidateBindBufferMemory(VkDevice device, VkBuffer buffer, VkDeviceMemory memory,
+                                                    VkDeviceSize memoryOffset) const {
+    bool skip = false;
+    const char* api_name = "BindBufferMemory()";
+
+    skip |= ValidateBindBufferMemory(buffer, api_name);
+
+    return skip;
+}
+
+bool BestPractices::PreCallValidateBindBufferMemory2(VkDevice device, uint32_t bindInfoCount,
+                                                     const VkBindBufferMemoryInfo* pBindInfos) const {
+    char api_name[64];
+    bool skip = false;
+
+    for (uint32_t i = 0; i < bindInfoCount; i++) {
+        sprintf(api_name, "vkBindBufferMemory2() pBindInfos[%u]", i);
+        skip |= ValidateBindBufferMemory(pBindInfos[i].buffer, api_name);
+    }
+
+    return skip;
+}
+
+bool BestPractices::PreCallValidateBindBufferMemory2KHR(VkDevice device, uint32_t bindInfoCount,
+                                                        const VkBindBufferMemoryInfo* pBindInfos) const {
+    char api_name[64];
+    bool skip = false;
+
+    for (uint32_t i = 0; i < bindInfoCount; i++) {
+        sprintf(api_name, "vkBindBufferMemory2KHR() pBindInfos[%u]", i);
+        skip |= ValidateBindBufferMemory(pBindInfos[i].buffer, api_name);
+    }
+
+    return skip;
+}
+
+bool BestPractices::ValidateBindImageMemory(VkImage image, const char* api_name) const {
+    bool skip = false;
+    const IMAGE_STATE* image_state = GetImageState(image);
+
+    if (!image_state->memory_requirements_checked && !image_state->external_memory_handle) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                        kVUID_BestPractices_ImageMemReqNotCalled,
+                        "%s: Binding memory to %s but vkGetImageMemoryRequirements() has not been called on that image.", api_name,
+                        report_data->FormatHandle(image).c_str());
+    }
+
+    return skip;
+}
+
+bool BestPractices::PreCallValidateBindImageMemory(VkDevice device, VkImage image, VkDeviceMemory memory,
+                                                   VkDeviceSize memoryOffset) const {
+    bool skip = false;
+    const char* api_name = "vkBindImageMemory()";
+
+    skip |= ValidateBindImageMemory(image, api_name);
+
+    return skip;
+}
+
+bool BestPractices::PreCallValidateBindImageMemory2(VkDevice device, uint32_t bindInfoCount,
+                                                    const VkBindImageMemoryInfo* pBindInfos) const {
+    char api_name[64];
+    bool skip = false;
+
+    for (uint32_t i = 0; i < bindInfoCount; i++) {
+        sprintf(api_name, "vkBindImageMemory2() pBindInfos[%u]", i);
+        skip |= ValidateBindImageMemory(pBindInfos[i].image, api_name);
+    }
+
+    return skip;
+}
+
+bool BestPractices::PreCallValidateBindImageMemory2KHR(VkDevice device, uint32_t bindInfoCount,
+                                                       const VkBindImageMemoryInfo* pBindInfos) const {
+    char api_name[64];
+    bool skip = false;
+
+    for (uint32_t i = 0; i < bindInfoCount; i++) {
+        sprintf(api_name, "vkBindImageMemory2KHR() pBindInfos[%u]", i);
+        skip |= ValidateBindImageMemory(pBindInfos[i].image, api_name);
+    }
+
+    return skip;
+}
+
+bool BestPractices::PreCallValidateCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount,
+                                                           const VkGraphicsPipelineCreateInfo* pCreateInfos,
+                                                           const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines,
+                                                           void* cgpl_state_data) const {
+    bool skip = StateTracker::PreCallValidateCreateGraphicsPipelines(device, pipelineCache, createInfoCount, pCreateInfos,
+                                                                     pAllocator, pPipelines, cgpl_state_data);
+
+    if ((createInfoCount > 1) && (!pipelineCache)) {
+        skip |=
+            log_msg(report_data, VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                    kVUID_BestPractices_CreatePipelines_MultiplePipelines,
+                    "Performance Warning: This vkCreateGraphicsPipelines call is creating multiple pipelines but is not using a "
+                    "pipeline cache, which may help with performance");
+    }
+
+    return skip;
+}
+
+bool BestPractices::PreCallValidateCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount,
+                                                          const VkComputePipelineCreateInfo* pCreateInfos,
+                                                          const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines,
+                                                          void* ccpl_state_data) const {
+    bool skip = StateTracker::PreCallValidateCreateComputePipelines(device, pipelineCache, createInfoCount, pCreateInfos,
+                                                                    pAllocator, pPipelines, ccpl_state_data);
+
+    if ((createInfoCount > 1) && (!pipelineCache)) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                        kVUID_BestPractices_CreatePipelines_MultiplePipelines,
+                        "Performance Warning: This vkCreateComputePipelines call is creating multiple pipelines but is not using a "
+                        "pipeline cache, which may help with performance");
+    }
+
+    return skip;
+}
+
+bool BestPractices::CheckPipelineStageFlags(std::string api_name, const VkPipelineStageFlags flags) const {
+    bool skip = false;
+
+    if (flags & VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                        kVUID_BestPractices_PipelineStageFlags,
+                        "You are using VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT when %s is called\n", api_name.c_str());
+    } else if (flags & VK_PIPELINE_STAGE_ALL_COMMANDS_BIT) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                        kVUID_BestPractices_PipelineStageFlags,
+                        "You are using VK_PIPELINE_STAGE_ALL_COMMANDS_BIT when %s is called\n", api_name.c_str());
+    }
+
+    return skip;
+}
+
+bool BestPractices::PreCallValidateQueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo* pSubmits,
+                                               VkFence fence) const {
+    bool skip = false;
+
+    for (uint32_t submit = 0; submit < submitCount; submit++) {
+        for (uint32_t semaphore = 0; semaphore < pSubmits[submit].waitSemaphoreCount; semaphore++) {
+            skip |= CheckPipelineStageFlags("vkQueueSubmit", pSubmits[submit].pWaitDstStageMask[semaphore]);
+        }
+    }
+
+    return skip;
+}
+
+bool BestPractices::PreCallValidateCmdSetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask) const {
+    bool skip = false;
+
+    skip |= CheckPipelineStageFlags("vkCmdSetEvent", stageMask);
+
+    return skip;
+}
+
+bool BestPractices::PreCallValidateCmdResetEvent(VkCommandBuffer commandBuffer, VkEvent event,
+                                                 VkPipelineStageFlags stageMask) const {
+    bool skip = false;
+
+    skip |= CheckPipelineStageFlags("vkCmdResetEvent", stageMask);
+
+    return skip;
+}
+
+bool BestPractices::PreCallValidateCmdWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent* pEvents,
+                                                 VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask,
+                                                 uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers,
+                                                 uint32_t bufferMemoryBarrierCount,
+                                                 const VkBufferMemoryBarrier* pBufferMemoryBarriers,
+                                                 uint32_t imageMemoryBarrierCount,
+                                                 const VkImageMemoryBarrier* pImageMemoryBarriers) const {
+    bool skip = false;
+
+    skip |= CheckPipelineStageFlags("vkCmdWaitEvents", srcStageMask);
+    skip |= CheckPipelineStageFlags("vkCmdWaitEvents", dstStageMask);
+
+    return skip;
+}
+
+bool BestPractices::PreCallValidateCmdPipelineBarrier(VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask,
+                                                      VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags,
+                                                      uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers,
+                                                      uint32_t bufferMemoryBarrierCount,
+                                                      const VkBufferMemoryBarrier* pBufferMemoryBarriers,
+                                                      uint32_t imageMemoryBarrierCount,
+                                                      const VkImageMemoryBarrier* pImageMemoryBarriers) const {
+    bool skip = false;
+
+    skip |= CheckPipelineStageFlags("vkCmdPipelineBarrier", srcStageMask);
+    skip |= CheckPipelineStageFlags("vkCmdPipelineBarrier", dstStageMask);
+
+    return skip;
+}
+
+bool BestPractices::PreCallValidateCmdWriteTimestamp(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage,
+                                                     VkQueryPool queryPool, uint32_t query) const {
+    bool skip = false;
+
+    skip |= CheckPipelineStageFlags("vkCmdWriteTimestamp", pipelineStage);
+
+    return skip;
+}
+
+bool BestPractices::PreCallValidateCmdDraw(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount,
+                                           uint32_t firstVertex, uint32_t firstInstance) const {
+    bool skip = false;
+
+    if (instanceCount == 0) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                        kVUID_BestPractices_CmdDraw_InstanceCountZero,
+                        "Warning: You are calling vkCmdDraw() with an instanceCount of Zero.");
+    }
+
+    return skip;
+}
+
+bool BestPractices::PreCallValidateCmdDrawIndexed(VkCommandBuffer commandBuffer, uint32_t indexCount, uint32_t instanceCount,
+                                                  uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance) const {
+    bool skip = false;
+
+    if (instanceCount == 0) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                        kVUID_BestPractices_CmdDraw_InstanceCountZero,
+                        "Warning: You are calling vkCmdDrawIndexed() with an instanceCount of Zero.");
+    }
+
+    return skip;
+}
+
+bool BestPractices::PreCallValidateCmdDrawIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
+                                                   uint32_t drawCount, uint32_t stride) const {
+    bool skip = false;
+
+    if (drawCount == 0) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                        kVUID_BestPractices_CmdDraw_DrawCountZero,
+                        "Warning: You are calling vkCmdDrawIndirect() with a drawCount of Zero.");
+    }
+
+    return skip;
+}
+
+bool BestPractices::PreCallValidateCmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
+                                                          uint32_t drawCount, uint32_t stride) const {
+    bool skip = false;
+
+    if (drawCount == 0) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                        kVUID_BestPractices_CmdDraw_DrawCountZero,
+                        "Warning: You are calling vkCmdDrawIndexedIndirect() with a drawCount of Zero.");
+    }
+
+    return skip;
+}
+
+bool BestPractices::PreCallValidateCmdDispatch(VkCommandBuffer commandBuffer, uint32_t groupCountX, uint32_t groupCountY,
+                                               uint32_t groupCountZ) const {
+    bool skip = false;
+
+    if ((groupCountX == 0) || (groupCountY == 0) || (groupCountZ == 0)) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                        kVUID_BestPractices_CmdDispatch_GroupCountZero,
+                        "Warning: You are calling vkCmdDispatch() while one or more groupCounts are zero (groupCountX = %" PRIu32
+                        ", groupCountY = %" PRIu32 ", groupCountZ = %" PRIu32 ").",
+                        groupCountX, groupCountY, groupCountZ);
+    }
+
+    return skip;
+}
+
+bool BestPractices::ValidateGetPhysicalDeviceDisplayPlanePropertiesKHRQuery(VkPhysicalDevice physicalDevice,
+                                                                            const char* api_name) const {
+    bool skip = false;
+    const auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
+
+    if (physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHRState == UNCALLED) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT,
+                        HandleToUint64(physicalDevice), kVUID_BestPractices_DisplayPlane_PropertiesNotCalled,
+                        "Potential problem with calling %s() without first retrieving properties from "
+                        "vkGetPhysicalDeviceDisplayPlanePropertiesKHR or vkGetPhysicalDeviceDisplayPlaneProperties2KHR.",
+                        api_name);
+    }
+
+    return skip;
+}
+
+bool BestPractices::PreCallValidateGetDisplayPlaneSupportedDisplaysKHR(VkPhysicalDevice physicalDevice, uint32_t planeIndex,
+                                                                       uint32_t* pDisplayCount, VkDisplayKHR* pDisplays) const {
+    bool skip = false;
+
+    skip |= ValidateGetPhysicalDeviceDisplayPlanePropertiesKHRQuery(physicalDevice, "vkGetDisplayPlaneSupportedDisplaysKHR");
+
+    return skip;
+}
+
+bool BestPractices::PreCallValidateGetDisplayPlaneCapabilitiesKHR(VkPhysicalDevice physicalDevice, VkDisplayModeKHR mode,
+                                                                  uint32_t planeIndex,
+                                                                  VkDisplayPlaneCapabilitiesKHR* pCapabilities) const {
+    bool skip = false;
+
+    skip |= ValidateGetPhysicalDeviceDisplayPlanePropertiesKHRQuery(physicalDevice, "vkGetDisplayPlaneCapabilitiesKHR");
+
+    return skip;
+}
+
+bool BestPractices::PreCallValidateGetDisplayPlaneCapabilities2KHR(VkPhysicalDevice physicalDevice,
+                                                                   const VkDisplayPlaneInfo2KHR* pDisplayPlaneInfo,
+                                                                   VkDisplayPlaneCapabilities2KHR* pCapabilities) const {
+    bool skip = false;
+
+    skip |= ValidateGetPhysicalDeviceDisplayPlanePropertiesKHRQuery(physicalDevice, "vkGetDisplayPlaneCapabilities2KHR");
+
+    return skip;
+}
+
+bool BestPractices::PreCallValidateGetSwapchainImagesKHR(VkDevice device, VkSwapchainKHR swapchain, uint32_t* pSwapchainImageCount,
+                                                         VkImage* pSwapchainImages) const {
+    bool skip = false;
+
+    auto swapchain_state = GetSwapchainState(swapchain);
+
+    if (swapchain_state && pSwapchainImages) {
+        // Compare the preliminary value of *pSwapchainImageCount with the value this time:
+        if (swapchain_state->vkGetSwapchainImagesKHRState == UNCALLED) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+                            HandleToUint64(device), kVUID_Core_Swapchain_PriorCount,
+                            "vkGetSwapchainImagesKHR() called with non-NULL pSwapchainImageCount; but no prior positive value has "
+                            "been seen for pSwapchainImages.");
+        }
+    }
+
+    return skip;
+}
+
+// Common function to handle validation for GetPhysicalDeviceQueueFamilyProperties & 2KHR version
+static bool ValidateCommonGetPhysicalDeviceQueueFamilyProperties(debug_report_data* report_data,
+                                                                 const PHYSICAL_DEVICE_STATE* pd_state,
+                                                                 uint32_t requested_queue_family_property_count, bool qfp_null,
+                                                                 const char* caller_name) {
+    bool skip = false;
+    if (!qfp_null) {
+        // Verify that for each physical device, this command is called first with NULL pQueueFamilyProperties in order to get count
+        if (UNCALLED == pd_state->vkGetPhysicalDeviceQueueFamilyPropertiesState) {
+            skip |= log_msg(
+                report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT,
+                HandleToUint64(pd_state->phys_device), kVUID_Core_DevLimit_MissingQueryCount,
+                "%s is called with non-NULL pQueueFamilyProperties before obtaining pQueueFamilyPropertyCount. It is recommended "
+                "to first call %s with NULL pQueueFamilyProperties in order to obtain the maximal pQueueFamilyPropertyCount.",
+                caller_name, caller_name);
+            // Then verify that pCount that is passed in on second call matches what was returned
+        } else if (pd_state->queue_family_known_count != requested_queue_family_property_count) {
+            skip |= log_msg(
+                report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT,
+                HandleToUint64(pd_state->phys_device), kVUID_Core_DevLimit_CountMismatch,
+                "%s is called with non-NULL pQueueFamilyProperties and pQueueFamilyPropertyCount value %" PRIu32
+                ", but the largest previously returned pQueueFamilyPropertyCount for this physicalDevice is %" PRIu32
+                ". It is recommended to instead receive all the properties by calling %s with pQueueFamilyPropertyCount that was "
+                "previously obtained by calling %s with NULL pQueueFamilyProperties.",
+                caller_name, requested_queue_family_property_count, pd_state->queue_family_known_count, caller_name, caller_name);
+        }
+    }
+
+    return skip;
+}
+
+bool BestPractices::PreCallValidateBindAccelerationStructureMemoryNV(
+    VkDevice device, uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoNV* pBindInfos) const {
+    bool skip = false;
+
+    for (uint32_t i = 0; i < bindInfoCount; i++) {
+        const ACCELERATION_STRUCTURE_STATE* as_state = GetAccelerationStructureState(pBindInfos[i].accelerationStructure);
+        if (!as_state->memory_requirements_checked) {
+            // There's not an explicit requirement in the spec to call vkGetImageMemoryRequirements() prior to calling
+            // BindAccelerationStructureMemoryNV but it's implied in that memory being bound must conform with
+            // VkAccelerationStructureMemoryRequirementsInfoNV from vkGetAccelerationStructureMemoryRequirementsNV
+            skip |= log_msg(
+                report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 0,
+                kVUID_BestPractices_BindAccelNV_NoMemReqQuery,
+                "vkBindAccelerationStructureMemoryNV(): "
+                "Binding memory to %s but vkGetAccelerationStructureMemoryRequirementsNV() has not been called on that structure.",
+                report_data->FormatHandle(pBindInfos[i].accelerationStructure).c_str());
+        }
+    }
+
+    return skip;
+}
+
+bool BestPractices::PreCallValidateGetPhysicalDeviceQueueFamilyProperties(VkPhysicalDevice physicalDevice,
+                                                                          uint32_t* pQueueFamilyPropertyCount,
+                                                                          VkQueueFamilyProperties* pQueueFamilyProperties) const {
+    const auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
+    assert(physical_device_state);
+    return ValidateCommonGetPhysicalDeviceQueueFamilyProperties(report_data, physical_device_state, *pQueueFamilyPropertyCount,
+                                                                (nullptr == pQueueFamilyProperties),
+                                                                "vkGetPhysicalDeviceQueueFamilyProperties()");
+}
+
+bool BestPractices::PreCallValidateGetPhysicalDeviceQueueFamilyProperties2(
+    VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount,
+    VkQueueFamilyProperties2KHR* pQueueFamilyProperties) const {
+    const auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
+    assert(physical_device_state);
+    return ValidateCommonGetPhysicalDeviceQueueFamilyProperties(report_data, physical_device_state, *pQueueFamilyPropertyCount,
+                                                                (nullptr == pQueueFamilyProperties),
+                                                                "vkGetPhysicalDeviceQueueFamilyProperties2()");
+}
+
+bool BestPractices::PreCallValidateGetPhysicalDeviceQueueFamilyProperties2KHR(
+    VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount,
+    VkQueueFamilyProperties2KHR* pQueueFamilyProperties) const {
+    auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
+    assert(physical_device_state);
+    return ValidateCommonGetPhysicalDeviceQueueFamilyProperties(report_data, physical_device_state, *pQueueFamilyPropertyCount,
+                                                                (nullptr == pQueueFamilyProperties),
+                                                                "vkGetPhysicalDeviceQueueFamilyProperties2KHR()");
+}
+
+bool BestPractices::PreCallValidateGetPhysicalDeviceSurfaceFormatsKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface,
+                                                                      uint32_t* pSurfaceFormatCount,
+                                                                      VkSurfaceFormatKHR* pSurfaceFormats) const {
+    if (!pSurfaceFormats) return false;
+    const auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
+    const auto& call_state = physical_device_state->vkGetPhysicalDeviceSurfaceFormatsKHRState;
+    bool skip = false;
+    if (call_state == UNCALLED) {
+        // Since we haven't recorded a preliminary value of *pSurfaceFormatCount, that likely means that the application didn't
+        // previously call this function with a NULL value of pSurfaceFormats:
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT,
+                        HandleToUint64(physicalDevice), kVUID_Core_DevLimit_MustQueryCount,
+                        "vkGetPhysicalDeviceSurfaceFormatsKHR() called with non-NULL pSurfaceFormatCount; but no prior "
+                        "positive value has been seen for pSurfaceFormats.");
+    } else {
+        auto prev_format_count = (uint32_t)physical_device_state->surface_formats.size();
+        if (*pSurfaceFormatCount > prev_format_count) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT,
+                            HandleToUint64(physicalDevice), kVUID_Core_DevLimit_CountMismatch,
+                            "vkGetPhysicalDeviceSurfaceFormatsKHR() called with non-NULL pSurfaceFormatCount, and with "
+                            "pSurfaceFormats set to a value (%u) that is greater than the value (%u) that was returned "
+                            "when pSurfaceFormatCount was NULL.",
+                            *pSurfaceFormatCount, prev_format_count);
+        }
+    }
+    return skip;
+}
+
+bool BestPractices::PreCallValidateQueueBindSparse(VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo* pBindInfo,
+                                                   VkFence fence) const {
+    bool skip = false;
+
+    for (uint32_t bindIdx = 0; bindIdx < bindInfoCount; bindIdx++) {
+        const VkBindSparseInfo& bindInfo = pBindInfo[bindIdx];
+        // Store sparse binding image_state and after binding is complete make sure that any requiring metadata have it bound
+        std::unordered_set<const IMAGE_STATE*> sparse_images;
+        // Track images getting metadata bound by this call in a set, it'll be recorded into the image_state
+        // in RecordQueueBindSparse.
+        std::unordered_set<const IMAGE_STATE*> sparse_images_with_metadata;
+        // If we're binding sparse image memory make sure reqs were queried and note if metadata is required and bound
+        for (uint32_t i = 0; i < bindInfo.imageBindCount; ++i) {
+            const auto& image_bind = bindInfo.pImageBinds[i];
+            auto image_state = GetImageState(image_bind.image);
+            if (!image_state)
+                continue;  // Param/Object validation should report image_bind.image handles being invalid, so just skip here.
+            sparse_images.insert(image_state);
+            if (image_state->createInfo.flags & VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT) {
+                if (!image_state->get_sparse_reqs_called || image_state->sparse_requirements.empty()) {
+                    // For now just warning if sparse image binding occurs without calling to get reqs first
+                    skip |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+                                    HandleToUint64(image_state->image), kVUID_Core_MemTrack_InvalidState,
+                                    "vkQueueBindSparse(): Binding sparse memory to %s without first calling "
+                                    "vkGetImageSparseMemoryRequirements[2KHR]() to retrieve requirements.",
+                                    report_data->FormatHandle(image_state->image).c_str());
+                }
+            }
+            if (!image_state->memory_requirements_checked) {
+                // For now just warning if sparse image binding occurs without calling to get reqs first
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+                                HandleToUint64(image_state->image), kVUID_Core_MemTrack_InvalidState,
+                                "vkQueueBindSparse(): Binding sparse memory to %s without first calling "
+                                "vkGetImageMemoryRequirements() to retrieve requirements.",
+                                report_data->FormatHandle(image_state->image).c_str());
+            }
+        }
+        for (uint32_t i = 0; i < bindInfo.imageOpaqueBindCount; ++i) {
+            const auto& image_opaque_bind = bindInfo.pImageOpaqueBinds[i];
+            auto image_state = GetImageState(bindInfo.pImageOpaqueBinds[i].image);
+            if (!image_state)
+                continue;  // Param/Object validation should report image_bind.image handles being invalid, so just skip here.
+            sparse_images.insert(image_state);
+            if (image_state->createInfo.flags & VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT) {
+                if (!image_state->get_sparse_reqs_called || image_state->sparse_requirements.empty()) {
+                    // For now just warning if sparse image binding occurs without calling to get reqs first
+                    skip |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+                                    HandleToUint64(image_state->image), kVUID_Core_MemTrack_InvalidState,
+                                    "vkQueueBindSparse(): Binding opaque sparse memory to %s without first calling "
+                                    "vkGetImageSparseMemoryRequirements[2KHR]() to retrieve requirements.",
+                                    report_data->FormatHandle(image_state->image).c_str());
+                }
+            }
+            if (!image_state->memory_requirements_checked) {
+                // For now just warning if sparse image binding occurs without calling to get reqs first
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+                                HandleToUint64(image_state->image), kVUID_Core_MemTrack_InvalidState,
+                                "vkQueueBindSparse(): Binding opaque sparse memory to %s without first calling "
+                                "vkGetImageMemoryRequirements() to retrieve requirements.",
+                                report_data->FormatHandle(image_state->image).c_str());
+            }
+            for (uint32_t j = 0; j < image_opaque_bind.bindCount; ++j) {
+                if (image_opaque_bind.pBinds[j].flags & VK_SPARSE_MEMORY_BIND_METADATA_BIT) {
+                    sparse_images_with_metadata.insert(image_state);
+                }
+            }
+        }
+        for (const auto& sparse_image_state : sparse_images) {
+            if (sparse_image_state->sparse_metadata_required && !sparse_image_state->sparse_metadata_bound &&
+                sparse_images_with_metadata.find(sparse_image_state) == sparse_images_with_metadata.end()) {
+                // Warn if sparse image binding metadata required for image with sparse binding, but metadata not bound
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+                                HandleToUint64(sparse_image_state->image), kVUID_Core_MemTrack_InvalidState,
+                                "vkQueueBindSparse(): Binding sparse memory to %s which requires a metadata aspect but no "
+                                "binding with VK_SPARSE_MEMORY_BIND_METADATA_BIT set was made.",
+                                report_data->FormatHandle(sparse_image_state->image).c_str());
+            }
+        }
+    }
+
+    return skip;
+}
+
+void BestPractices::PostCallRecordQueueBindSparse(VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo* pBindInfo,
+                                                  VkFence fence, VkResult result) {
+    if (result != VK_SUCCESS) return;
+
+    for (uint32_t bindIdx = 0; bindIdx < bindInfoCount; bindIdx++) {
+        const VkBindSparseInfo& bindInfo = pBindInfo[bindIdx];
+        for (uint32_t i = 0; i < bindInfo.imageOpaqueBindCount; ++i) {
+            const auto& image_opaque_bind = bindInfo.pImageOpaqueBinds[i];
+            auto image_state = GetImageState(bindInfo.pImageOpaqueBinds[i].image);
+            if (!image_state)
+                continue;  // Param/Object validation should report image_bind.image handles being invalid, so just skip here.
+            for (uint32_t j = 0; j < image_opaque_bind.bindCount; ++j) {
+                if (image_opaque_bind.pBinds[j].flags & VK_SPARSE_MEMORY_BIND_METADATA_BIT) {
+                    image_state->sparse_metadata_bound = true;
+                }
+            }
+        }
+    }
+}
+
+bool BestPractices::PreCallValidateCmdClearAttachments(VkCommandBuffer commandBuffer, uint32_t attachmentCount,
+                                                       const VkClearAttachment* pAttachments, uint32_t rectCount,
+                                                       const VkClearRect* pRects) const {
+    bool skip = false;
+    const CMD_BUFFER_STATE* cb_node = GetCBState(commandBuffer);
+    if (!cb_node) return skip;
+
+    // Warn if this is issued prior to Draw Cmd and clearing the entire attachment
+    if (!cb_node->hasDrawCmd && (cb_node->activeRenderPassBeginInfo.renderArea.extent.width == pRects[0].rect.extent.width) &&
+        (cb_node->activeRenderPassBeginInfo.renderArea.extent.height == pRects[0].rect.extent.height)) {
+        // There are times where app needs to use ClearAttachments (generally when reusing a buffer inside of a render pass)
+        // This warning should be made more specific. It'd be best to avoid triggering this test if it's a use that must call
+        // CmdClearAttachments.
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                        HandleToUint64(commandBuffer), kVUID_Core_DrawState_ClearCmdBeforeDraw,
+                        "vkCmdClearAttachments() issued on %s prior to any Draw Cmds. It is recommended you "
+                        "use RenderPass LOAD_OP_CLEAR on Attachments prior to any Draw.",
+                        report_data->FormatHandle(commandBuffer).c_str());
+    }
+
+    return skip;
+}
diff --git a/src/third_party/vulkan-validation-layers/src/layers/best_practices.h b/src/third_party/vulkan-validation-layers/src/layers/best_practices.h
new file mode 100644
index 0000000..1160803
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/layers/best_practices.h
@@ -0,0 +1,137 @@
+/* Copyright (c) 2015-2019 The Khronos Group Inc.
+ * Copyright (c) 2015-2019 Valve Corporation
+ * Copyright (c) 2015-2019 LunarG, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Camden Stocker <camden@lunarg.com>
+ */
+
+#pragma once
+
+#include "chassis.h"
+#include "state_tracker.h"
+#include <string>
+
+static const uint32_t kMemoryObjectWarningLimit = 250;
+
+class BestPractices : public ValidationStateTracker {
+  public:
+    using StateTracker = ValidationStateTracker;
+
+    std::string GetAPIVersionName(uint32_t version) const;
+
+    bool PreCallValidateCreateInstance(const VkInstanceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator,
+                                       VkInstance* pInstance) const;
+    void PreCallRecordCreateInstance(const VkInstanceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator,
+                                     VkInstance* pInstance);
+    bool PreCallValidateCreateDevice(VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo* pCreateInfo,
+                                     const VkAllocationCallbacks* pAllocator, VkDevice* pDevice) const;
+    bool PreCallValidateCreateBuffer(VkDevice device, const VkBufferCreateInfo* pCreateInfo,
+                                     const VkAllocationCallbacks* pAllocator, VkBuffer* pBuffer) const;
+    bool PreCallValidateCreateImage(VkDevice device, const VkImageCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator,
+                                    VkImage* pImage) const;
+    bool PreCallValidateCreateSwapchainKHR(VkDevice device, const VkSwapchainCreateInfoKHR* pCreateInfo,
+                                           const VkAllocationCallbacks* pAllocator, VkSwapchainKHR* pSwapchain) const;
+    bool PreCallValidateCreateSharedSwapchainsKHR(VkDevice device, uint32_t swapchainCount,
+                                                  const VkSwapchainCreateInfoKHR* pCreateInfos,
+                                                  const VkAllocationCallbacks* pAllocator, VkSwapchainKHR* pSwapchains) const;
+    bool PreCallValidateCreateRenderPass(VkDevice device, const VkRenderPassCreateInfo* pCreateInfo,
+                                         const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass) const;
+    bool PreCallValidateAllocateMemory(VkDevice device, const VkMemoryAllocateInfo* pAllocateInfo,
+                                       const VkAllocationCallbacks* pAllocator, VkDeviceMemory* pMemory) const;
+    void PostCallRecordAllocateMemory(VkDevice device, const VkMemoryAllocateInfo* pAllocateInfo,
+                                      const VkAllocationCallbacks* pAllocator, VkDeviceMemory* pMemory, VkResult result);
+    void PreCallRecordFreeMemory(VkDevice device, VkDeviceMemory memory, const VkAllocationCallbacks* pAllocator);
+    bool ValidateBindBufferMemory(VkBuffer buffer, const char* api_name) const;
+    bool PreCallValidateBindBufferMemory(VkDevice device, VkBuffer buffer, VkDeviceMemory memory, VkDeviceSize memoryOffset) const;
+    bool PreCallValidateBindBufferMemory2(VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo* pBindInfos) const;
+    bool PreCallValidateBindBufferMemory2KHR(VkDevice device, uint32_t bindInfoCount,
+                                             const VkBindBufferMemoryInfo* pBindInfos) const;
+    bool ValidateBindImageMemory(VkImage image, const char* api_name) const;
+    bool PreCallValidateBindImageMemory(VkDevice device, VkImage image, VkDeviceMemory memory, VkDeviceSize memoryOffset) const;
+    bool PreCallValidateBindImageMemory2(VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo* pBindInfos) const;
+    bool PreCallValidateBindImageMemory2KHR(VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo* pBindInfos) const;
+    bool PreCallValidateFreeMemory(VkDevice device, VkDeviceMemory memory, const VkAllocationCallbacks* pAllocator) const;
+    bool PreCallValidateCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount,
+                                                const VkGraphicsPipelineCreateInfo* pCreateInfos,
+                                                const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines,
+                                                void* cgpl_state) const;
+    bool PreCallValidateCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount,
+                                               const VkComputePipelineCreateInfo* pCreateInfos,
+                                               const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines,
+                                               void* pipe_state) const;
+
+    bool CheckPipelineStageFlags(std::string api_name, const VkPipelineStageFlags flags) const;
+    bool PreCallValidateQueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo* pSubmits, VkFence fence) const;
+    bool PreCallValidateCmdSetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask) const;
+    bool PreCallValidateCmdResetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask) const;
+    bool PreCallValidateCmdWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent* pEvents,
+                                      VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask,
+                                      uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers,
+                                      uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers,
+                                      uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers) const;
+    bool PreCallValidateCmdPipelineBarrier(VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask,
+                                           VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags,
+                                           uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers,
+                                           uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers,
+                                           uint32_t imageMemoryBarrierCount,
+                                           const VkImageMemoryBarrier* pImageMemoryBarriers) const;
+    bool PreCallValidateCmdWriteTimestamp(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage,
+                                          VkQueryPool queryPool, uint32_t query) const;
+    bool PreCallValidateCmdDraw(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex,
+                                uint32_t firstInstance) const;
+    bool PreCallValidateCmdDrawIndexed(VkCommandBuffer commandBuffer, uint32_t indexCount, uint32_t instanceCount,
+                                       uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance) const;
+    bool PreCallValidateCmdDrawIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount,
+                                        uint32_t stride) const;
+    bool PreCallValidateCmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
+                                               uint32_t drawCount, uint32_t stride) const;
+    bool PreCallValidateCmdDispatch(VkCommandBuffer commandBuffer, uint32_t groupCountX, uint32_t groupCountY,
+                                    uint32_t groupCountZ) const;
+    bool ValidateGetPhysicalDeviceDisplayPlanePropertiesKHRQuery(VkPhysicalDevice physicalDevice, const char* api_name) const;
+    bool PreCallValidateGetDisplayPlaneSupportedDisplaysKHR(VkPhysicalDevice physicalDevice, uint32_t planeIndex,
+                                                            uint32_t* pDisplayCount, VkDisplayKHR* pDisplays) const;
+    bool PreCallValidateGetDisplayPlaneCapabilitiesKHR(VkPhysicalDevice physicalDevice, VkDisplayModeKHR mode, uint32_t planeIndex,
+                                                       VkDisplayPlaneCapabilitiesKHR* pCapabilities) const;
+    bool PreCallValidateGetDisplayPlaneCapabilities2KHR(VkPhysicalDevice physicalDevice,
+                                                        const VkDisplayPlaneInfo2KHR* pDisplayPlaneInfo,
+                                                        VkDisplayPlaneCapabilities2KHR* pCapabilities) const;
+    bool PreCallValidateGetSwapchainImagesKHR(VkDevice device, VkSwapchainKHR swapchain, uint32_t* pSwapchainImageCount,
+                                              VkImage* pSwapchainImages) const;
+    bool PreCallValidateGetPhysicalDeviceQueueFamilyProperties(VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount,
+                                                               VkQueueFamilyProperties* pQueueFamilyProperties) const;
+    bool PreCallValidateGetPhysicalDeviceQueueFamilyProperties2(VkPhysicalDevice physicalDevice,
+                                                                uint32_t* pQueueFamilyPropertyCount,
+                                                                VkQueueFamilyProperties2* pQueueFamilyProperties) const;
+    bool PreCallValidateGetPhysicalDeviceQueueFamilyProperties2KHR(VkPhysicalDevice physicalDevice,
+                                                                   uint32_t* pQueueFamilyPropertyCount,
+                                                                   VkQueueFamilyProperties2* pQueueFamilyProperties) const;
+    bool PreCallValidateGetPhysicalDeviceSurfaceFormatsKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface,
+                                                           uint32_t* pSurfaceFormatCount,
+                                                           VkSurfaceFormatKHR* pSurfaceFormats) const;
+    bool PreCallValidateBindAccelerationStructureMemoryNV(VkDevice device, uint32_t bindInfoCount,
+                                                          const VkBindAccelerationStructureMemoryInfoNV* pBindInfos) const;
+    bool PreCallValidateQueueBindSparse(VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo* pBindInfo,
+                                        VkFence fence) const;
+    void PostCallRecordQueueBindSparse(VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo* pBindInfo, VkFence fence,
+                                       VkResult result);
+    bool PreCallValidateCmdClearAttachments(VkCommandBuffer commandBuffer, uint32_t attachmentCount,
+                                            const VkClearAttachment* pAttachments, uint32_t rectCount,
+                                            const VkClearRect* pRects) const;
+
+  private:
+    uint32_t instance_api_version;
+
+    uint32_t num_mem_objects = 0;
+};
diff --git a/src/third_party/vulkan-validation-layers/src/layers/best_practices_error_enums.h b/src/third_party/vulkan-validation-layers/src/layers/best_practices_error_enums.h
new file mode 100644
index 0000000..cb2e744
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/layers/best_practices_error_enums.h
@@ -0,0 +1,62 @@
+/* Copyright (c) 2015-2019 The Khronos Group Inc.
+ * Copyright (c) 2015-2019 Valve Corporation
+ * Copyright (c) 2015-2019 LunarG, Inc.
+ * Copyright (C) 2015-2019 Google Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Camden Stocker <camden@lunarg.com>
+ */
+
+#ifndef BEST_PRACTICES_ERROR_ENUMS_H_
+#define BEST_PRACTICES_ERROR_ENUMS_H_
+
+// Suppress unused warning on Linux
+#if defined(__GNUC__)
+#define DECORATE_UNUSED __attribute__((unused))
+#else
+#define DECORATE_UNUSED
+#endif
+
+static const char DECORATE_UNUSED *kVUID_BestPractices_CreateInstance_ExtensionMismatch =
+    "UNASSIGNED-BestPractices-vkCreateInstance-extension-mismatch";
+static const char DECORATE_UNUSED *kVUID_BestPractices_CreateDevice_API_Mismatch =
+    "UNASSIGNED-BestPractices-vkCreateDevice-API-version-mismatch";
+static const char DECORATE_UNUSED *kVUID_BestPractices_SharingModeExclusive =
+    "UNASSIGNED-BestPractices-vkCreateBuffer-sharing-mode-exclusive";
+static const char DECORATE_UNUSED *kVUID_BestPractices_RenderPass_Attatchment =
+    "UNASSIGNED-BestPractices-vkCreateRenderPass-attatchment";
+static const char DECORATE_UNUSED *kVUID_BestPractices_AllocateMemory_TooManyObjects =
+    "UNASSIGNED-BestPractices-vkAllocateMemory-too-many-objects";
+static const char DECORATE_UNUSED *kVUID_BestPractices_CreatePipelines_MultiplePipelines =
+    "UNASSIGNED-BestPractices-vkCreatePipelines-multiple-pipelines-no-cache";
+static const char DECORATE_UNUSED *kVUID_BestPractices_PipelineStageFlags = "UNASSIGNED-BestPractices-pipeline-stage-flags";
+static const char DECORATE_UNUSED *kVUID_BestPractices_CmdDraw_InstanceCountZero =
+    "UNASSIGNED-BestPractices-vkCmdDraw-instance-count-zero";
+static const char DECORATE_UNUSED *kVUID_BestPractices_CmdDraw_DrawCountZero = "UNASSIGNED-BestPractices-vkCmdDraw-draw-count-zero";
+static const char DECORATE_UNUSED *kVUID_BestPractices_CmdDispatch_GroupCountZero =
+    "UNASSIGNED-BestPractices-vkCmdDispatch-group-count-zero";
+static const char DECORATE_UNUSED *kVUID_BestPractices_CreateDevice_PDFeaturesNotCalled =
+    "UNASSIGNED-BestPractices-vkCreateDevice-physical-device-features-not-retrieved";
+static const char DECORATE_UNUSED *kVUID_BestPractices_Swapchain_GetSurfaceNotCalled =
+    "UNASSIGNED-BestPractices-vkCreateSwapchainKHR-surface-not-retrieved";
+static const char DECORATE_UNUSED *kVUID_BestPractices_DisplayPlane_PropertiesNotCalled =
+    "UNASSIGNED-BestPractices-vkGetDisplayPlaneSupportedDisplaysKHR-properties-not-retrieved";
+static const char DECORATE_UNUSED *kVUID_BestPractices_BufferMemReqNotCalled =
+    "UNASSIGNED-BestPractices-vkBindBufferMemory-requirements-not-retrieved";
+static const char DECORATE_UNUSED *kVUID_BestPractices_ImageMemReqNotCalled =
+    "UNASSIGNED-BestPractices-vkBindImageMemory-requirements-not-retrieved";
+static const char DECORATE_UNUSED *kVUID_BestPractices_BindAccelNV_NoMemReqQuery =
+    "UNASSIGNED-BestPractices-BindAccelerationStructureMemoryNV-requirements-not-retrieved";
+
+#endif
diff --git a/src/third_party/vulkan-validation-layers/src/layers/buffer_validation.cpp b/src/third_party/vulkan-validation-layers/src/layers/buffer_validation.cpp
new file mode 100644
index 0000000..1f7fdc3
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/layers/buffer_validation.cpp
@@ -0,0 +1,5529 @@
+/* Copyright (c) 2015-2019 The Khronos Group Inc.
+ * Copyright (c) 2015-2019 Valve Corporation
+ * Copyright (c) 2015-2019 LunarG, Inc.
+ * Copyright (C) 2015-2019 Google Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Mark Lobodzinski <mark@lunarg.com>
+ * Author: Dave Houlton <daveh@lunarg.com>
+ * Shannon McPherson <shannon@lunarg.com>
+ */
+
+// Allow use of STL min and max functions in Windows
+#define NOMINMAX
+
+#include <cmath>
+#include <set>
+#include <sstream>
+#include <string>
+
+#include "vk_enum_string_helper.h"
+#include "vk_format_utils.h"
+#include "vk_layer_data.h"
+#include "vk_layer_utils.h"
+#include "vk_layer_logging.h"
+#include "vk_typemap_helper.h"
+
+#include "chassis.h"
+#include "core_validation.h"
+#include "shader_validation.h"
+#include "descriptor_sets.h"
+#include "buffer_validation.h"
+
+// Transfer VkImageSubresourceLayers into VkImageSubresourceRange struct
+static VkImageSubresourceRange RangeFromLayers(const VkImageSubresourceLayers &subresource_layers) {
+    VkImageSubresourceRange subresource_range;
+    subresource_range.aspectMask = subresource_layers.aspectMask;
+    subresource_range.baseArrayLayer = subresource_layers.baseArrayLayer;
+    subresource_range.layerCount = subresource_layers.layerCount;
+    subresource_range.baseMipLevel = subresource_layers.mipLevel;
+    subresource_range.levelCount = 1;
+    return subresource_range;
+}
+
+IMAGE_STATE::IMAGE_STATE(VkImage img, const VkImageCreateInfo *pCreateInfo)
+    : image(img),
+      createInfo(*pCreateInfo),
+      valid(false),
+      acquired(false),
+      shared_presentable(false),
+      layout_locked(false),
+      get_sparse_reqs_called(false),
+      sparse_metadata_required(false),
+      sparse_metadata_bound(false),
+      imported_ahb(false),
+      has_ahb_format(false),
+      ahb_format(0),
+      full_range{},
+      create_from_swapchain(VK_NULL_HANDLE),
+      bind_swapchain(VK_NULL_HANDLE),
+      bind_swapchain_imageIndex(0),
+      sparse_requirements{} {
+    if ((createInfo.sharingMode == VK_SHARING_MODE_CONCURRENT) && (createInfo.queueFamilyIndexCount > 0)) {
+        uint32_t *pQueueFamilyIndices = new uint32_t[createInfo.queueFamilyIndexCount];
+        for (uint32_t i = 0; i < createInfo.queueFamilyIndexCount; i++) {
+            pQueueFamilyIndices[i] = pCreateInfo->pQueueFamilyIndices[i];
+        }
+        createInfo.pQueueFamilyIndices = pQueueFamilyIndices;
+    }
+
+    if (createInfo.flags & VK_IMAGE_CREATE_SPARSE_BINDING_BIT) {
+        sparse = true;
+    }
+    const auto format = createInfo.format;
+    VkImageSubresourceRange init_range{0, 0, VK_REMAINING_MIP_LEVELS, 0, VK_REMAINING_ARRAY_LAYERS};
+    if (FormatIsColor(format) || FormatIsMultiplane(format)) {
+        init_range.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;  // Normalization will expand this for multiplane
+    } else {
+        init_range.aspectMask =
+            (FormatHasDepth(format) ? VK_IMAGE_ASPECT_DEPTH_BIT : 0) | (FormatHasStencil(format) ? VK_IMAGE_ASPECT_STENCIL_BIT : 0);
+    }
+    full_range = NormalizeSubresourceRange(*this, init_range);
+
+    auto *externalMemoryInfo = lvl_find_in_chain<VkExternalMemoryImageCreateInfo>(pCreateInfo->pNext);
+    if (externalMemoryInfo) {
+        external_memory_handle = externalMemoryInfo->handleTypes;
+    }
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+    auto external_format = lvl_find_in_chain<VkExternalFormatANDROID>(createInfo.pNext);
+    if (external_format) {
+        external_format_android = external_format->externalFormat;
+    } else {
+        // If externalFormat is zero, the effect is as if the VkExternalFormatANDROID structure was not present.
+        external_format_android = 0;
+    }
+#endif  // VK_USE_PLATFORM_ANDROID_KHR
+}
+
+bool IMAGE_STATE::IsCreateInfoEqual(const VkImageCreateInfo &other_createInfo) const {
+    bool is_equal = (createInfo.sType == other_createInfo.sType) && (createInfo.flags == other_createInfo.flags);
+    is_equal = is_equal && IsImageTypeEqual(other_createInfo) && IsFormatEqual(other_createInfo);
+    is_equal = is_equal && IsMipLevelsEqual(other_createInfo) && IsArrayLayersEqual(other_createInfo);
+    is_equal = is_equal && IsUsageEqual(other_createInfo) && IsInitialLayoutEqual(other_createInfo);
+    is_equal = is_equal && IsExtentEqual(other_createInfo) && IsTilingEqual(other_createInfo);
+    is_equal = is_equal && IsSamplesEqual(other_createInfo) && IsSharingModeEqual(other_createInfo);
+    return is_equal && IsQueueFamilyIndicesEqual(other_createInfo);
+}
+
+// Check image compatibility rules for VK_NV_dedicated_allocation_image_aliasing
+bool IMAGE_STATE::IsCreateInfoDedicatedAllocationImageAliasingCompatible(const VkImageCreateInfo &other_createInfo) const {
+    bool is_compatible = (createInfo.sType == other_createInfo.sType) && (createInfo.flags == other_createInfo.flags);
+    is_compatible = is_compatible && IsImageTypeEqual(other_createInfo) && IsFormatEqual(other_createInfo);
+    is_compatible = is_compatible && IsMipLevelsEqual(other_createInfo);
+    is_compatible = is_compatible && IsUsageEqual(other_createInfo) && IsInitialLayoutEqual(other_createInfo);
+    is_compatible = is_compatible && IsSamplesEqual(other_createInfo) && IsSharingModeEqual(other_createInfo);
+    is_compatible = is_compatible && IsQueueFamilyIndicesEqual(other_createInfo) && IsTilingEqual(other_createInfo);
+
+    is_compatible = is_compatible && createInfo.extent.width <= other_createInfo.extent.width &&
+                    createInfo.extent.height <= other_createInfo.extent.height &&
+                    createInfo.extent.depth <= other_createInfo.extent.depth &&
+                    createInfo.arrayLayers <= other_createInfo.arrayLayers;
+    return is_compatible;
+}
+
+bool IMAGE_STATE::IsCompatibleAliasing(IMAGE_STATE *other_image_state) {
+    if (!(createInfo.flags & other_image_state->createInfo.flags & VK_IMAGE_CREATE_ALIAS_BIT)) return false;
+    if ((create_from_swapchain == VK_NULL_HANDLE) && (binding.mem == other_image_state->binding.mem) &&
+        (binding.mem != VK_NULL_HANDLE) && (binding.offset == other_image_state->binding.offset) &&
+        IsCreateInfoEqual(other_image_state->createInfo)) {
+        return true;
+    }
+    if ((bind_swapchain == other_image_state->bind_swapchain) && (bind_swapchain != VK_NULL_HANDLE)) {
+        return true;
+    }
+    return false;
+}
+
+IMAGE_VIEW_STATE::IMAGE_VIEW_STATE(const std::shared_ptr<IMAGE_STATE> &im, VkImageView iv, const VkImageViewCreateInfo *ci)
+    : image_view(iv),
+      create_info(*ci),
+      normalized_subresource_range(ci->subresourceRange),
+      samplerConversion(VK_NULL_HANDLE),
+      image_state(im) {
+    auto *conversionInfo = lvl_find_in_chain<VkSamplerYcbcrConversionInfo>(create_info.pNext);
+    if (conversionInfo) samplerConversion = conversionInfo->conversion;
+    if (image_state) {
+        // A light normalization of the createInfo range
+        auto &sub_res_range = create_info.subresourceRange;
+        sub_res_range.levelCount = ResolveRemainingLevels(&sub_res_range, image_state->createInfo.mipLevels);
+        sub_res_range.layerCount = ResolveRemainingLayers(&sub_res_range, image_state->createInfo.arrayLayers);
+
+        // Cache a full normalization (for "full image/whole image" comparisons)
+        normalized_subresource_range = NormalizeSubresourceRange(*image_state, ci->subresourceRange);
+        samples = image_state->createInfo.samples;
+        descriptor_format_bits = DescriptorRequirementsBitsFromFormat(create_info.format);
+    }
+}
+
+uint32_t FullMipChainLevels(uint32_t height, uint32_t width, uint32_t depth) {
+    // uint cast applies floor()
+    return 1u + (uint32_t)log2(std::max({height, width, depth}));
+}
+
+uint32_t FullMipChainLevels(VkExtent3D extent) { return FullMipChainLevels(extent.height, extent.width, extent.depth); }
+
+uint32_t FullMipChainLevels(VkExtent2D extent) { return FullMipChainLevels(extent.height, extent.width); }
+
+VkImageSubresourceRange NormalizeSubresourceRange(const IMAGE_STATE &image_state, const VkImageSubresourceRange &range) {
+    const VkImageCreateInfo &image_create_info = image_state.createInfo;
+    VkImageSubresourceRange norm = range;
+    norm.levelCount = ResolveRemainingLevels(&range, image_create_info.mipLevels);
+
+    // Special case for 3D images with VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT_KHR flag bit, where <extent.depth> and
+    // <arrayLayers> can potentially alias.
+    uint32_t layer_limit = (0 != (image_create_info.flags & VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT_KHR))
+                               ? image_create_info.extent.depth
+                               : image_create_info.arrayLayers;
+    norm.layerCount = ResolveRemainingLayers(&range, layer_limit);
+
+    // For multiplanar formats, IMAGE_ASPECT_COLOR is equivalent to adding the aspect of the individual planes
+    VkImageAspectFlags &aspect_mask = norm.aspectMask;
+    if (FormatIsMultiplane(image_create_info.format)) {
+        if (aspect_mask & VK_IMAGE_ASPECT_COLOR_BIT) {
+            aspect_mask &= ~VK_IMAGE_ASPECT_COLOR_BIT;
+            aspect_mask |= (VK_IMAGE_ASPECT_PLANE_0_BIT | VK_IMAGE_ASPECT_PLANE_1_BIT);
+            if (FormatPlaneCount(image_create_info.format) > 2) {
+                aspect_mask |= VK_IMAGE_ASPECT_PLANE_2_BIT;
+            }
+        }
+    }
+    return norm;
+}
+
+template <class OBJECT, class LAYOUT>
+void CoreChecks::SetLayout(OBJECT *pObject, VkImage image, VkImageSubresource range, const LAYOUT &layout) {
+    ImageSubresourcePair imgpair = {image, true, range};
+    SetLayout(pObject, imgpair, layout, VK_IMAGE_ASPECT_COLOR_BIT);
+    SetLayout(pObject, imgpair, layout, VK_IMAGE_ASPECT_DEPTH_BIT);
+    SetLayout(pObject, imgpair, layout, VK_IMAGE_ASPECT_STENCIL_BIT);
+    SetLayout(pObject, imgpair, layout, VK_IMAGE_ASPECT_METADATA_BIT);
+    if (device_extensions.vk_khr_sampler_ycbcr_conversion) {
+        SetLayout(pObject, imgpair, layout, VK_IMAGE_ASPECT_PLANE_0_BIT_KHR);
+        SetLayout(pObject, imgpair, layout, VK_IMAGE_ASPECT_PLANE_1_BIT_KHR);
+        SetLayout(pObject, imgpair, layout, VK_IMAGE_ASPECT_PLANE_2_BIT_KHR);
+    }
+}
+
+template <class OBJECT, class LAYOUT>
+void CoreChecks::SetLayout(OBJECT *pObject, ImageSubresourcePair imgpair, const LAYOUT &layout, VkImageAspectFlags aspectMask) {
+    if (imgpair.subresource.aspectMask & aspectMask) {
+        imgpair.subresource.aspectMask = aspectMask;
+        SetLayout(pObject, imgpair, layout);
+    }
+}
+
+// Set the layout in supplied map
+void CoreChecks::SetLayout(ImageSubresPairLayoutMap &imageLayoutMap, ImageSubresourcePair imgpair, VkImageLayout layout) {
+    auto it = imageLayoutMap.find(imgpair);
+    if (it != imageLayoutMap.end()) {
+        it->second.layout = layout;  // Update
+    } else {
+        imageLayoutMap[imgpair].layout = layout;  // Insert
+    }
+}
+
+bool CoreChecks::FindLayoutVerifyLayout(ImageSubresourcePair imgpair, VkImageLayout &layout, const VkImageAspectFlags aspectMask) {
+    if (!(imgpair.subresource.aspectMask & aspectMask)) {
+        return false;
+    }
+    VkImageAspectFlags oldAspectMask = imgpair.subresource.aspectMask;
+    imgpair.subresource.aspectMask = aspectMask;
+    auto imgsubIt = imageLayoutMap.find(imgpair);
+    if (imgsubIt == imageLayoutMap.end()) {
+        return false;
+    }
+    if (layout != VK_IMAGE_LAYOUT_MAX_ENUM && layout != imgsubIt->second.layout) {
+        log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, HandleToUint64(imgpair.image),
+                kVUID_Core_DrawState_InvalidLayout,
+                "Cannot query for %s layout when combined aspect mask %d has multiple layout types: %s and %s",
+                report_data->FormatHandle(imgpair.image).c_str(), oldAspectMask, string_VkImageLayout(layout),
+                string_VkImageLayout(imgsubIt->second.layout));
+    }
+    layout = imgsubIt->second.layout;
+    return true;
+}
+
+// Find layout(s) on the global level
+bool CoreChecks::FindGlobalLayout(ImageSubresourcePair imgpair, VkImageLayout &layout) {
+    layout = VK_IMAGE_LAYOUT_MAX_ENUM;
+    FindLayoutVerifyLayout(imgpair, layout, VK_IMAGE_ASPECT_COLOR_BIT);
+    FindLayoutVerifyLayout(imgpair, layout, VK_IMAGE_ASPECT_DEPTH_BIT);
+    FindLayoutVerifyLayout(imgpair, layout, VK_IMAGE_ASPECT_STENCIL_BIT);
+    FindLayoutVerifyLayout(imgpair, layout, VK_IMAGE_ASPECT_METADATA_BIT);
+    if (device_extensions.vk_khr_sampler_ycbcr_conversion) {
+        FindLayoutVerifyLayout(imgpair, layout, VK_IMAGE_ASPECT_PLANE_0_BIT_KHR);
+        FindLayoutVerifyLayout(imgpair, layout, VK_IMAGE_ASPECT_PLANE_1_BIT_KHR);
+        FindLayoutVerifyLayout(imgpair, layout, VK_IMAGE_ASPECT_PLANE_2_BIT_KHR);
+    }
+    if (layout == VK_IMAGE_LAYOUT_MAX_ENUM) {
+        imgpair = {imgpair.image, false, VkImageSubresource()};
+        auto imgsubIt = imageLayoutMap.find(imgpair);
+        if (imgsubIt == imageLayoutMap.end()) return false;
+        layout = imgsubIt->second.layout;
+    }
+    return true;
+}
+
+bool CoreChecks::FindLayouts(VkImage image, std::vector<VkImageLayout> &layouts) const {
+    auto sub_data = imageSubresourceMap.find(image);
+    if (sub_data == imageSubresourceMap.end()) return false;
+    auto image_state = GetImageState(image);
+    if (!image_state) return false;
+    bool ignoreGlobal = false;
+    // TODO: Make this robust for >1 aspect mask. Now it will just say ignore potential errors in this case.
+    if (sub_data->second.size() >= (image_state->createInfo.arrayLayers * image_state->createInfo.mipLevels + 1)) {
+        ignoreGlobal = true;
+    }
+    for (auto imgsubpair : sub_data->second) {
+        if (ignoreGlobal && !imgsubpair.hasSubresource) continue;
+        auto img_data = imageLayoutMap.find(imgsubpair);
+        if (img_data != imageLayoutMap.end()) {
+            layouts.push_back(img_data->second.layout);
+        }
+    }
+    return true;
+}
+
+bool CoreChecks::FindLayout(const ImageSubresPairLayoutMap &imageLayoutMap, ImageSubresourcePair imgpair, VkImageLayout &layout,
+                            const VkImageAspectFlags aspectMask) {
+    if (!(imgpair.subresource.aspectMask & aspectMask)) {
+        return false;
+    }
+    imgpair.subresource.aspectMask = aspectMask;
+    auto imgsubIt = imageLayoutMap.find(imgpair);
+    if (imgsubIt == imageLayoutMap.end()) {
+        return false;
+    }
+    layout = imgsubIt->second.layout;
+    return true;
+}
+
+// find layout in supplied map
+bool CoreChecks::FindLayout(const ImageSubresPairLayoutMap &imageLayoutMap, ImageSubresourcePair imgpair,
+                            VkImageLayout &layout) const {
+    layout = VK_IMAGE_LAYOUT_MAX_ENUM;
+    FindLayout(imageLayoutMap, imgpair, layout, VK_IMAGE_ASPECT_COLOR_BIT);
+    FindLayout(imageLayoutMap, imgpair, layout, VK_IMAGE_ASPECT_DEPTH_BIT);
+    FindLayout(imageLayoutMap, imgpair, layout, VK_IMAGE_ASPECT_STENCIL_BIT);
+    FindLayout(imageLayoutMap, imgpair, layout, VK_IMAGE_ASPECT_METADATA_BIT);
+    if (device_extensions.vk_khr_sampler_ycbcr_conversion) {
+        FindLayout(imageLayoutMap, imgpair, layout, VK_IMAGE_ASPECT_PLANE_0_BIT_KHR);
+        FindLayout(imageLayoutMap, imgpair, layout, VK_IMAGE_ASPECT_PLANE_1_BIT_KHR);
+        FindLayout(imageLayoutMap, imgpair, layout, VK_IMAGE_ASPECT_PLANE_2_BIT_KHR);
+    }
+    // Image+subresource not found, look for image handle w/o subresource
+    if (layout == VK_IMAGE_LAYOUT_MAX_ENUM) {
+        imgpair = {imgpair.image, false, VkImageSubresource()};
+        auto imgsubIt = imageLayoutMap.find(imgpair);
+        if (imgsubIt == imageLayoutMap.end()) return false;
+        layout = imgsubIt->second.layout;
+    }
+    return true;
+}
+
+// Set the layout on the global level
+void CoreChecks::SetGlobalLayout(ImageSubresourcePair imgpair, const VkImageLayout &layout) {
+    VkImage &image = imgpair.image;
+    auto data = imageLayoutMap.find(imgpair);
+    if (data != imageLayoutMap.end()) {
+        data->second.layout = layout;  // Update
+    } else {
+        imageLayoutMap[imgpair].layout = layout;  // Insert
+    }
+    auto &image_subresources = imageSubresourceMap[image];
+    auto subresource = std::find(image_subresources.begin(), image_subresources.end(), imgpair);
+    if (subresource == image_subresources.end()) {
+        image_subresources.push_back(imgpair);
+    }
+}
+
+// Set image layout for given VkImageSubresourceRange struct
+void CoreChecks::SetImageLayout(CMD_BUFFER_STATE *cb_node, const IMAGE_STATE &image_state,
+                                const VkImageSubresourceRange &image_subresource_range, VkImageLayout layout,
+                                VkImageLayout expected_layout) {
+    auto *subresource_map = GetImageSubresourceLayoutMap(cb_node, image_state);
+    assert(subresource_map);  // the non-const getter must return a valid pointer
+    if (subresource_map->SetSubresourceRangeLayout(*cb_node, image_subresource_range, layout, expected_layout)) {
+        cb_node->image_layout_change_count++;  // Change the version of this data to force revalidation
+    }
+}
+
+// Set the initial image layout for all slices of an image view
+void CoreChecks::SetImageViewInitialLayout(CMD_BUFFER_STATE *cb_node, const IMAGE_VIEW_STATE &view_state, VkImageLayout layout) {
+    if (disabled.image_layout_validation) {
+        return;
+    }
+    IMAGE_STATE *image_state = view_state.image_state.get();
+    auto *subresource_map = GetImageSubresourceLayoutMap(cb_node, *image_state);
+    subresource_map->SetSubresourceRangeInitialLayout(*cb_node, view_state.normalized_subresource_range, layout, &view_state);
+}
+
+// Set the initial image layout for a passed non-normalized subresource range
+void CoreChecks::SetImageInitialLayout(CMD_BUFFER_STATE *cb_node, const IMAGE_STATE &image_state,
+                                       const VkImageSubresourceRange &range, VkImageLayout layout) {
+    auto *subresource_map = GetImageSubresourceLayoutMap(cb_node, image_state);
+    assert(subresource_map);
+    subresource_map->SetSubresourceRangeInitialLayout(*cb_node, NormalizeSubresourceRange(image_state, range), layout);
+}
+
+void CoreChecks::SetImageInitialLayout(CMD_BUFFER_STATE *cb_node, VkImage image, const VkImageSubresourceRange &range,
+                                       VkImageLayout layout) {
+    const IMAGE_STATE *image_state = GetImageState(image);
+    if (!image_state) return;
+    SetImageInitialLayout(cb_node, *image_state, range, layout);
+};
+
+void CoreChecks::SetImageInitialLayout(CMD_BUFFER_STATE *cb_node, const IMAGE_STATE &image_state,
+                                       const VkImageSubresourceLayers &layers, VkImageLayout layout) {
+    SetImageInitialLayout(cb_node, image_state, RangeFromLayers(layers), layout);
+}
+
+// Set image layout for all slices of an image view
+void CoreChecks::SetImageViewLayout(CMD_BUFFER_STATE *cb_node, const IMAGE_VIEW_STATE &view_state, VkImageLayout layout,
+                                    VkImageLayout layoutStencil) {
+    IMAGE_STATE *image_state = view_state.image_state.get();
+
+    VkImageSubresourceRange sub_range = view_state.normalized_subresource_range;
+    // When changing the layout of a 3D image subresource via a 2D or 2D_ARRRAY image view, all depth slices of
+    // the subresource mip level(s) are transitioned, ignoring any layers restriction in the subresource info.
+    if ((image_state->createInfo.imageType == VK_IMAGE_TYPE_3D) && (view_state.create_info.viewType != VK_IMAGE_VIEW_TYPE_3D)) {
+        sub_range.baseArrayLayer = 0;
+        sub_range.layerCount = image_state->createInfo.extent.depth;
+    }
+
+    if (sub_range.aspectMask == (VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT) && layoutStencil != kInvalidLayout) {
+        sub_range.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
+        SetImageLayout(cb_node, *image_state, sub_range, layout);
+        sub_range.aspectMask = VK_IMAGE_ASPECT_STENCIL_BIT;
+        SetImageLayout(cb_node, *image_state, sub_range, layoutStencil);
+    } else {
+        SetImageLayout(cb_node, *image_state, sub_range, layout);
+    }
+}
+
+bool CoreChecks::ValidateRenderPassLayoutAgainstFramebufferImageUsage(RenderPassCreateVersion rp_version, VkImageLayout layout,
+                                                                      VkImage image, VkImageView image_view,
+                                                                      VkFramebuffer framebuffer, VkRenderPass renderpass,
+                                                                      uint32_t attachment_index, const char *variable_name) const {
+    bool skip = false;
+    auto image_state = GetImageState(image);
+    const char *vuid;
+    const bool use_rp2 = (rp_version == RENDER_PASS_VERSION_2);
+
+    if (!image_state) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, HandleToUint64(image),
+                        "VUID-VkRenderPassBeginInfo-framebuffer-parameter",
+                        "Render Pass begin with %s uses %s where pAttachments[%" PRIu32 "] = %s, which refers to an invalid image",
+                        report_data->FormatHandle(renderpass).c_str(), report_data->FormatHandle(framebuffer).c_str(),
+                        attachment_index, report_data->FormatHandle(image_view).c_str());
+        return skip;
+    }
+
+    auto image_usage = image_state->createInfo.usage;
+
+    // Check for layouts that mismatch image usages in the framebuffer
+    if (layout == VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL && !(image_usage & VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT)) {
+        vuid = use_rp2 ? "VUID-vkCmdBeginRenderPass2KHR-initialLayout-03094" : "VUID-vkCmdBeginRenderPass-initialLayout-00895";
+        skip |=
+            log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, HandleToUint64(image), vuid,
+                    "Layout/usage mismatch for attachment %u in %s"
+                    " - the %s is %s but the image attached to %s via %s"
+                    " was not created with VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT",
+                    attachment_index, report_data->FormatHandle(renderpass).c_str(), variable_name, string_VkImageLayout(layout),
+                    report_data->FormatHandle(framebuffer).c_str(), report_data->FormatHandle(image_view).c_str());
+    }
+
+    if (layout == VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL &&
+        !(image_usage & (VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT | VK_IMAGE_USAGE_SAMPLED_BIT))) {
+        vuid = use_rp2 ? "VUID-vkCmdBeginRenderPass2KHR-initialLayout-03097" : "VUID-vkCmdBeginRenderPass-initialLayout-00897";
+        skip |=
+            log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, HandleToUint64(image), vuid,
+                    "Layout/usage mismatch for attachment %u in %s"
+                    " - the %s is %s but the image attached to %s via %s"
+                    " was not created with VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT or VK_IMAGE_USAGE_SAMPLED_BIT",
+                    attachment_index, report_data->FormatHandle(renderpass).c_str(), variable_name, string_VkImageLayout(layout),
+                    report_data->FormatHandle(framebuffer).c_str(), report_data->FormatHandle(image_view).c_str());
+    }
+
+    if (layout == VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL && !(image_usage & VK_IMAGE_USAGE_TRANSFER_SRC_BIT)) {
+        vuid = use_rp2 ? "VUID-vkCmdBeginRenderPass2KHR-initialLayout-03098" : "VUID-vkCmdBeginRenderPass-initialLayout-00898";
+        skip |=
+            log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, HandleToUint64(image), vuid,
+                    "Layout/usage mismatch for attachment %u in %s"
+                    " - the %s is %s but the image attached to %s via %s"
+                    " was not created with VK_IMAGE_USAGE_TRANSFER_SRC_BIT",
+                    attachment_index, report_data->FormatHandle(renderpass).c_str(), variable_name, string_VkImageLayout(layout),
+                    report_data->FormatHandle(framebuffer).c_str(), report_data->FormatHandle(image_view).c_str());
+    }
+
+    if (layout == VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL && !(image_usage & VK_IMAGE_USAGE_TRANSFER_DST_BIT)) {
+        vuid = use_rp2 ? "VUID-vkCmdBeginRenderPass2KHR-initialLayout-03099" : "VUID-vkCmdBeginRenderPass-initialLayout-00899";
+        skip |=
+            log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, HandleToUint64(image), vuid,
+                    "Layout/usage mismatch for attachment %u in %s"
+                    " - the %s is %s but the image attached to %s via %s"
+                    " was not created with VK_IMAGE_USAGE_TRANSFER_DST_BIT",
+                    attachment_index, report_data->FormatHandle(renderpass).c_str(), variable_name, string_VkImageLayout(layout),
+                    report_data->FormatHandle(framebuffer).c_str(), report_data->FormatHandle(image_view).c_str());
+    }
+
+    if (device_extensions.vk_khr_maintenance2) {
+        if ((layout == VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL ||
+             layout == VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL ||
+             layout == VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL ||
+             layout == VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL) &&
+            !(image_usage & VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT)) {
+            vuid = use_rp2 ? "VUID-vkCmdBeginRenderPass2KHR-initialLayout-03096" : "VUID-vkCmdBeginRenderPass-initialLayout-01758";
+            skip |= log_msg(
+                report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, HandleToUint64(image), vuid,
+                "Layout/usage mismatch for attachment %u in %s"
+                " - the %s is %s but the image attached to %s via %s"
+                " was not created with VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT",
+                attachment_index, report_data->FormatHandle(renderpass).c_str(), variable_name, string_VkImageLayout(layout),
+                report_data->FormatHandle(framebuffer).c_str(), report_data->FormatHandle(image_view).c_str());
+        }
+    } else {
+        // The create render pass 2 extension requires maintenance 2 (the previous branch), so no vuid switch needed here.
+        if ((layout == VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL ||
+             layout == VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL) &&
+            !(image_usage & VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT)) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+                            HandleToUint64(image), "VUID-vkCmdBeginRenderPass-initialLayout-00896",
+                            "Layout/usage mismatch for attachment %u in %s"
+                            " - the %s is %s but the image attached to %s via %s"
+                            " was not created with VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT",
+                            attachment_index, report_data->FormatHandle(renderpass).c_str(), variable_name,
+                            string_VkImageLayout(layout), report_data->FormatHandle(framebuffer).c_str(),
+                            report_data->FormatHandle(image_view).c_str());
+        }
+    }
+    return skip;
+}
+
+bool CoreChecks::VerifyFramebufferAndRenderPassLayouts(RenderPassCreateVersion rp_version, const CMD_BUFFER_STATE *pCB,
+                                                       const VkRenderPassBeginInfo *pRenderPassBegin,
+                                                       const FRAMEBUFFER_STATE *framebuffer_state) const {
+    bool skip = false;
+    auto const pRenderPassInfo = GetRenderPassState(pRenderPassBegin->renderPass)->createInfo.ptr();
+    auto const &framebufferInfo = framebuffer_state->createInfo;
+    const VkImageView *attachments = framebufferInfo.pAttachments;
+
+    auto render_pass = GetRenderPassState(pRenderPassBegin->renderPass)->renderPass;
+    auto framebuffer = framebuffer_state->framebuffer;
+
+    if (pRenderPassInfo->attachmentCount != framebufferInfo.attachmentCount) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                        HandleToUint64(pCB->commandBuffer), kVUID_Core_DrawState_InvalidRenderpass,
+                        "You cannot start a render pass using a framebuffer with a different number of attachments.");
+    }
+
+    const auto *attachmentInfo = lvl_find_in_chain<VkRenderPassAttachmentBeginInfoKHR>(pRenderPassBegin->pNext);
+    if (((framebufferInfo.flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR) != 0) && attachmentInfo != nullptr) {
+        attachments = attachmentInfo->pAttachments;
+    }
+
+    if (attachments != nullptr) {
+        const auto *const_pCB = static_cast<const CMD_BUFFER_STATE *>(pCB);
+        for (uint32_t i = 0; i < pRenderPassInfo->attachmentCount; ++i) {
+            auto image_view = attachments[i];
+            auto view_state = GetImageViewState(image_view);
+
+            if (!view_state) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
+                                HandleToUint64(pRenderPassBegin->renderPass), "VUID-VkRenderPassBeginInfo-framebuffer-parameter",
+                                "vkCmdBeginRenderPass(): %s pAttachments[%" PRIu32 "] = %s is not a valid VkImageView handle",
+                                report_data->FormatHandle(framebuffer_state->framebuffer).c_str(), i,
+                                report_data->FormatHandle(image_view).c_str());
+                continue;
+            }
+
+            const VkImage image = view_state->create_info.image;
+            const IMAGE_STATE *image_state = GetImageState(image);
+
+            if (!image_state) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
+                                HandleToUint64(pRenderPassBegin->renderPass), "VUID-VkRenderPassBeginInfo-framebuffer-parameter",
+                                "vkCmdBeginRenderPass(): %s pAttachments[%" PRIu32 "] =  %s references non-extant %s.",
+                                report_data->FormatHandle(framebuffer_state->framebuffer).c_str(), i,
+                                report_data->FormatHandle(image_view).c_str(), report_data->FormatHandle(image).c_str());
+                continue;
+            }
+            auto attachment_initial_layout = pRenderPassInfo->pAttachments[i].initialLayout;
+            auto final_layout = pRenderPassInfo->pAttachments[i].finalLayout;
+
+            // Cast pCB to const because we don't want to create entries that don't exist here (in case the key changes to something
+            // in common with the non-const version.)
+            const ImageSubresourceLayoutMap *subresource_map =
+                (attachment_initial_layout != VK_IMAGE_LAYOUT_UNDEFINED) ? GetImageSubresourceLayoutMap(const_pCB, image) : nullptr;
+
+            if (subresource_map) {  // If no layout information for image yet, will be checked at QueueSubmit time
+                LayoutUseCheckAndMessage layout_check(subresource_map);
+                bool subres_skip = false;
+                auto subresource_cb = [this, i, attachment_initial_layout, &layout_check, &subres_skip](
+                                          const VkImageSubresource &subres, VkImageLayout layout, VkImageLayout initial_layout) {
+                    if (!layout_check.Check(subres, attachment_initial_layout, layout, initial_layout)) {
+                        subres_skip |=
+                            log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                    kVUID_Core_DrawState_InvalidRenderpass,
+                                    "You cannot start a render pass using attachment %u where the render pass initial layout is %s "
+                                    "and the %s layout of the attachment is %s. The layouts must match, or the render "
+                                    "pass initial layout for the attachment must be VK_IMAGE_LAYOUT_UNDEFINED",
+                                    i, string_VkImageLayout(attachment_initial_layout), layout_check.message,
+                                    string_VkImageLayout(layout_check.layout));
+                    }
+                    return !subres_skip;  // quit checking subresources once we fail once
+                };
+
+                subresource_map->ForRange(view_state->normalized_subresource_range, subresource_cb);
+                skip |= subres_skip;
+            }
+
+            ValidateRenderPassLayoutAgainstFramebufferImageUsage(rp_version, attachment_initial_layout, image, image_view,
+                                                                 framebuffer, render_pass, i, "initial layout");
+
+            ValidateRenderPassLayoutAgainstFramebufferImageUsage(rp_version, final_layout, image, image_view, framebuffer,
+                                                                 render_pass, i, "final layout");
+        }
+
+        for (uint32_t j = 0; j < pRenderPassInfo->subpassCount; ++j) {
+            auto &subpass = pRenderPassInfo->pSubpasses[j];
+            for (uint32_t k = 0; k < pRenderPassInfo->pSubpasses[j].inputAttachmentCount; ++k) {
+                auto &attachment_ref = subpass.pInputAttachments[k];
+                if (attachment_ref.attachment != VK_ATTACHMENT_UNUSED) {
+                    auto image_view = attachments[attachment_ref.attachment];
+                    auto view_state = GetImageViewState(image_view);
+
+                    if (view_state) {
+                        auto image = view_state->create_info.image;
+                        ValidateRenderPassLayoutAgainstFramebufferImageUsage(rp_version, attachment_ref.layout, image, image_view,
+                                                                             framebuffer, render_pass, attachment_ref.attachment,
+                                                                             "input attachment layout");
+                    }
+                }
+            }
+
+            for (uint32_t k = 0; k < pRenderPassInfo->pSubpasses[j].colorAttachmentCount; ++k) {
+                auto &attachment_ref = subpass.pColorAttachments[k];
+                if (attachment_ref.attachment != VK_ATTACHMENT_UNUSED) {
+                    auto image_view = attachments[attachment_ref.attachment];
+                    auto view_state = GetImageViewState(image_view);
+
+                    if (view_state) {
+                        auto image = view_state->create_info.image;
+                        ValidateRenderPassLayoutAgainstFramebufferImageUsage(rp_version, attachment_ref.layout, image, image_view,
+                                                                             framebuffer, render_pass, attachment_ref.attachment,
+                                                                             "color attachment layout");
+                        if (subpass.pResolveAttachments) {
+                            ValidateRenderPassLayoutAgainstFramebufferImageUsage(
+                                rp_version, attachment_ref.layout, image, image_view, framebuffer, render_pass,
+                                attachment_ref.attachment, "resolve attachment layout");
+                        }
+                    }
+                }
+            }
+
+            if (pRenderPassInfo->pSubpasses[j].pDepthStencilAttachment) {
+                auto &attachment_ref = *subpass.pDepthStencilAttachment;
+                if (attachment_ref.attachment != VK_ATTACHMENT_UNUSED) {
+                    auto image_view = attachments[attachment_ref.attachment];
+                    auto view_state = GetImageViewState(image_view);
+
+                    if (view_state) {
+                        auto image = view_state->create_info.image;
+                        ValidateRenderPassLayoutAgainstFramebufferImageUsage(rp_version, attachment_ref.layout, image, image_view,
+                                                                             framebuffer, render_pass, attachment_ref.attachment,
+                                                                             "input attachment layout");
+                    }
+                }
+            }
+        }
+    }
+    return skip;
+}
+
+void CoreChecks::TransitionAttachmentRefLayout(CMD_BUFFER_STATE *pCB, FRAMEBUFFER_STATE *pFramebuffer,
+                                               const safe_VkAttachmentReference2KHR &ref) {
+    if (ref.attachment != VK_ATTACHMENT_UNUSED) {
+        auto image_view = GetAttachmentImageViewState(pFramebuffer, ref.attachment);
+        if (image_view) {
+            VkImageLayout stencil_layout = kInvalidLayout;
+            const auto *attachment_reference_stencil_layout = lvl_find_in_chain<VkAttachmentReferenceStencilLayoutKHR>(ref.pNext);
+            if (attachment_reference_stencil_layout) {
+                stencil_layout = attachment_reference_stencil_layout->stencilLayout;
+            }
+
+            SetImageViewLayout(pCB, *image_view, ref.layout, stencil_layout);
+        }
+    }
+}
+
+void CoreChecks::TransitionSubpassLayouts(CMD_BUFFER_STATE *pCB, const RENDER_PASS_STATE *render_pass_state,
+                                          const int subpass_index, FRAMEBUFFER_STATE *framebuffer_state) {
+    assert(render_pass_state);
+
+    if (framebuffer_state) {
+        auto const &subpass = render_pass_state->createInfo.pSubpasses[subpass_index];
+        for (uint32_t j = 0; j < subpass.inputAttachmentCount; ++j) {
+            TransitionAttachmentRefLayout(pCB, framebuffer_state, subpass.pInputAttachments[j]);
+        }
+        for (uint32_t j = 0; j < subpass.colorAttachmentCount; ++j) {
+            TransitionAttachmentRefLayout(pCB, framebuffer_state, subpass.pColorAttachments[j]);
+        }
+        if (subpass.pDepthStencilAttachment) {
+            TransitionAttachmentRefLayout(pCB, framebuffer_state, *subpass.pDepthStencilAttachment);
+        }
+    }
+}
+
+// Transition the layout state for renderpass attachments based on the BeginRenderPass() call. This includes:
+// 1. Transition into initialLayout state
+// 2. Transition from initialLayout to layout used in subpass 0
+void CoreChecks::TransitionBeginRenderPassLayouts(CMD_BUFFER_STATE *cb_state, const RENDER_PASS_STATE *render_pass_state,
+                                                  FRAMEBUFFER_STATE *framebuffer_state) {
+    // First transition into initialLayout
+    auto const rpci = render_pass_state->createInfo.ptr();
+    for (uint32_t i = 0; i < rpci->attachmentCount; ++i) {
+        auto view_state = GetAttachmentImageViewState(framebuffer_state, i);
+        if (view_state) {
+            VkImageLayout stencil_layout = kInvalidLayout;
+            const auto *attachment_description_stencil_layout =
+                lvl_find_in_chain<VkAttachmentDescriptionStencilLayoutKHR>(rpci->pAttachments[i].pNext);
+            if (attachment_description_stencil_layout) {
+                stencil_layout = attachment_description_stencil_layout->stencilInitialLayout;
+            }
+
+            SetImageViewLayout(cb_state, *view_state, rpci->pAttachments[i].initialLayout, stencil_layout);
+        }
+    }
+    // Now transition for first subpass (index 0)
+    TransitionSubpassLayouts(cb_state, render_pass_state, 0, framebuffer_state);
+}
+
+bool VerifyAspectsPresent(VkImageAspectFlags aspect_mask, VkFormat format) {
+    if ((aspect_mask & VK_IMAGE_ASPECT_COLOR_BIT) != 0) {
+        if (!(FormatIsColor(format) || FormatIsMultiplane(format))) return false;
+    }
+    if ((aspect_mask & VK_IMAGE_ASPECT_DEPTH_BIT) != 0) {
+        if (!FormatHasDepth(format)) return false;
+    }
+    if ((aspect_mask & VK_IMAGE_ASPECT_STENCIL_BIT) != 0) {
+        if (!FormatHasStencil(format)) return false;
+    }
+    if (0 !=
+        (aspect_mask & (VK_IMAGE_ASPECT_PLANE_0_BIT_KHR | VK_IMAGE_ASPECT_PLANE_1_BIT_KHR | VK_IMAGE_ASPECT_PLANE_2_BIT_KHR))) {
+        if (FormatPlaneCount(format) == 1) return false;
+    }
+    return true;
+}
+
+// Verify an ImageMemoryBarrier's old/new ImageLayouts are compatible with the Image's ImageUsageFlags.
+bool CoreChecks::ValidateBarrierLayoutToImageUsage(const VkImageMemoryBarrier &img_barrier, bool new_not_old,
+                                                   VkImageUsageFlags usage_flags, const char *func_name,
+                                                   const char *barrier_pname) const {
+    bool skip = false;
+    const VkImageLayout layout = (new_not_old) ? img_barrier.newLayout : img_barrier.oldLayout;
+    const char *msg_code = kVUIDUndefined;  // sentinel value meaning "no error"
+
+    switch (layout) {
+        case VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL:
+            if ((usage_flags & VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT) == 0) {
+                msg_code = "VUID-VkImageMemoryBarrier-oldLayout-01208";
+            }
+            break;
+        case VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL:
+            if ((usage_flags & VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT) == 0) {
+                msg_code = "VUID-VkImageMemoryBarrier-oldLayout-01209";
+            }
+            break;
+        case VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL:
+            if ((usage_flags & VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT) == 0) {
+                msg_code = "VUID-VkImageMemoryBarrier-oldLayout-01210";
+            }
+            break;
+        case VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL:
+            if ((usage_flags & (VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT)) == 0) {
+                msg_code = "VUID-VkImageMemoryBarrier-oldLayout-01211";
+            }
+            break;
+        case VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL:
+            if ((usage_flags & VK_IMAGE_USAGE_TRANSFER_SRC_BIT) == 0) {
+                msg_code = "VUID-VkImageMemoryBarrier-oldLayout-01212";
+            }
+            break;
+        case VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL:
+            if ((usage_flags & VK_IMAGE_USAGE_TRANSFER_DST_BIT) == 0) {
+                msg_code = "VUID-VkImageMemoryBarrier-oldLayout-01213";
+            }
+            break;
+        case VK_IMAGE_LAYOUT_SHADING_RATE_OPTIMAL_NV:
+            if ((usage_flags & VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV) == 0) {
+                msg_code = "VUID-VkImageMemoryBarrier-oldLayout-02088";
+            }
+            break;
+        default:
+            // Other VkImageLayout values do not have VUs defined in this context.
+            break;
+    }
+
+    if (msg_code != kVUIDUndefined) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+                        HandleToUint64(img_barrier.image), msg_code,
+                        "%s: Image barrier %s %s Layout=%s is not compatible with %s usage flags 0x%" PRIx32 ".", func_name,
+                        barrier_pname, ((new_not_old) ? "new" : "old"), string_VkImageLayout(layout),
+                        report_data->FormatHandle(img_barrier.image).c_str(), usage_flags);
+    }
+    return skip;
+}
+
+// Verify image barriers are compatible with the images they reference.
+bool CoreChecks::ValidateBarriersToImages(const CMD_BUFFER_STATE *cb_state, uint32_t imageMemoryBarrierCount,
+                                          const VkImageMemoryBarrier *pImageMemoryBarriers, const char *func_name) const {
+    bool skip = false;
+
+    // Scoreboard for checking for duplicate and inconsistent barriers to images
+    struct ImageBarrierScoreboardEntry {
+        uint32_t index;
+        // This is designed for temporary storage within the scope of the API call.  If retained storage of the barriers is
+        // required, copies should be made and smart or unique pointers used in some other stucture (or this one refactored)
+        const VkImageMemoryBarrier *barrier;
+    };
+    using ImageBarrierScoreboardSubresMap = std::unordered_map<VkImageSubresourceRange, ImageBarrierScoreboardEntry>;
+    using ImageBarrierScoreboardImageMap = std::unordered_map<VkImage, ImageBarrierScoreboardSubresMap>;
+
+    // Scoreboard for duplicate layout transition barriers within the list
+    // Pointers retained in the scoreboard only have the lifetime of *this* call (i.e. within the scope of the API call)
+    ImageBarrierScoreboardImageMap layout_transitions;
+
+    for (uint32_t i = 0; i < imageMemoryBarrierCount; ++i) {
+        const auto &img_barrier = pImageMemoryBarriers[i];
+        const std::string barrier_pname = "pImageMemoryBarrier[" + std::to_string(i) + "]";
+
+        // Update the scoreboard of layout transitions and check for barriers affecting the same image and subresource
+        // TODO: a higher precision could be gained by adapting the command_buffer image_layout_map logic looking for conflicts
+        // at a per sub-resource level
+        if (img_barrier.oldLayout != img_barrier.newLayout) {
+            const ImageBarrierScoreboardEntry new_entry{i, &img_barrier};
+            const auto image_it = layout_transitions.find(img_barrier.image);
+            if (image_it != layout_transitions.end()) {
+                auto &subres_map = image_it->second;
+                auto subres_it = subres_map.find(img_barrier.subresourceRange);
+                if (subres_it != subres_map.end()) {
+                    auto &entry = subres_it->second;
+                    if ((entry.barrier->newLayout != img_barrier.oldLayout) &&
+                        (img_barrier.oldLayout != VK_IMAGE_LAYOUT_UNDEFINED)) {
+                        const VkImageSubresourceRange &range = img_barrier.subresourceRange;
+                        skip = log_msg(
+                            report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                            HandleToUint64(cb_state->commandBuffer), "VUID-VkImageMemoryBarrier-oldLayout-01197",
+                            "%s: %s conflicts with earlier entry pImageMemoryBarrier[%u]. %s"
+                            " subresourceRange: aspectMask=%u baseMipLevel=%u levelCount=%u, baseArrayLayer=%u, layerCount=%u; "
+                            "conflicting barrier transitions image layout from %s when earlier barrier transitioned to layout %s.",
+                            func_name, barrier_pname.c_str(), entry.index, report_data->FormatHandle(img_barrier.image).c_str(),
+                            range.aspectMask, range.baseMipLevel, range.levelCount, range.baseArrayLayer, range.layerCount,
+                            string_VkImageLayout(img_barrier.oldLayout), string_VkImageLayout(entry.barrier->newLayout));
+                    }
+                    entry = new_entry;
+                } else {
+                    subres_map[img_barrier.subresourceRange] = new_entry;
+                }
+            } else {
+                layout_transitions[img_barrier.image][img_barrier.subresourceRange] = new_entry;
+            }
+        }
+
+        auto image_state = GetImageState(img_barrier.image);
+        if (image_state) {
+            VkImageUsageFlags usage_flags = image_state->createInfo.usage;
+            skip |= ValidateBarrierLayoutToImageUsage(img_barrier, false, usage_flags, func_name, barrier_pname.c_str());
+            skip |= ValidateBarrierLayoutToImageUsage(img_barrier, true, usage_flags, func_name, barrier_pname.c_str());
+
+            // Make sure layout is able to be transitioned, currently only presented shared presentable images are locked
+            if (image_state->layout_locked) {
+                // TODO: Add unique id for error when available
+                skip |= log_msg(
+                    report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+                    HandleToUint64(img_barrier.image), 0,
+                    "Attempting to transition shared presentable %s"
+                    " from layout %s to layout %s, but image has already been presented and cannot have its layout transitioned.",
+                    report_data->FormatHandle(img_barrier.image).c_str(), string_VkImageLayout(img_barrier.oldLayout),
+                    string_VkImageLayout(img_barrier.newLayout));
+            }
+
+            const VkImageCreateInfo &image_create_info = image_state->createInfo;
+            // For a Depth/Stencil image both aspects MUST be set
+            if (FormatIsDepthAndStencil(image_create_info.format)) {
+                auto const aspect_mask = img_barrier.subresourceRange.aspectMask;
+                if (enabled_features.separate_depth_stencil_layouts_features.separateDepthStencilLayouts) {
+                    if (!(aspect_mask & (VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT))) {
+                        skip |=
+                            log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+                                    HandleToUint64(img_barrier.image), "VUID-VkImageMemoryBarrier-image-03319",
+                                    "%s: Image barrier %s references %s of format %s that must have either the depth or stencil "
+                                    "aspects set, but its aspectMask is 0x%" PRIx32 ".",
+                                    func_name, barrier_pname.c_str(), report_data->FormatHandle(img_barrier.image).c_str(),
+                                    string_VkFormat(image_create_info.format), aspect_mask);
+                    }
+                } else {
+                    auto const ds_mask = VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT;
+                    if ((aspect_mask & ds_mask) != (ds_mask)) {
+                        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+                                        HandleToUint64(img_barrier.image), "VUID-VkImageMemoryBarrier-image-03320",
+                                        "%s: Image barrier %s references %s of format %s that must have the depth and stencil "
+                                        "aspects set, but its aspectMask is 0x%" PRIx32 ".",
+                                        func_name, barrier_pname.c_str(), report_data->FormatHandle(img_barrier.image).c_str(),
+                                        string_VkFormat(image_create_info.format), aspect_mask);
+                    }
+                }
+            }
+
+            const auto *subresource_map = GetImageSubresourceLayoutMap(cb_state, img_barrier.image);
+            if (img_barrier.oldLayout == VK_IMAGE_LAYOUT_UNDEFINED) {
+                // TODO: Set memory invalid which is in mem_tracker currently
+                // Not sure if this needs to be in the ForRange traversal, pulling it out as it is currently invariant with
+                // subresource.
+            } else if (subresource_map) {
+                bool subres_skip = false;
+                LayoutUseCheckAndMessage layout_check(subresource_map);
+                VkImageSubresourceRange normalized_isr = NormalizeSubresourceRange(*image_state, img_barrier.subresourceRange);
+                auto subres_callback = [this, img_barrier, cb_state, &layout_check, &subres_skip](
+                                           const VkImageSubresource &subres, VkImageLayout layout, VkImageLayout initial_layout) {
+                    if (!layout_check.Check(subres, img_barrier.oldLayout, layout, initial_layout)) {
+                        subres_skip =
+                            log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                                    HandleToUint64(cb_state->commandBuffer), "VUID-VkImageMemoryBarrier-oldLayout-01197",
+                                    "For %s you cannot transition the layout of aspect=%d level=%d layer=%d from %s when the "
+                                    "%s layout is %s.",
+                                    report_data->FormatHandle(img_barrier.image).c_str(), subres.aspectMask, subres.mipLevel,
+                                    subres.arrayLayer, string_VkImageLayout(img_barrier.oldLayout), layout_check.message,
+                                    string_VkImageLayout(layout_check.layout));
+                    }
+                    return !subres_skip;
+                };
+                subresource_map->ForRange(normalized_isr, subres_callback);
+                skip |= subres_skip;
+            }
+        }
+    }
+    return skip;
+}
+
+bool CoreChecks::IsReleaseOp(CMD_BUFFER_STATE *cb_state, const VkImageMemoryBarrier &barrier) const {
+    if (!IsTransferOp(&barrier)) return false;
+
+    auto pool = cb_state->command_pool.get();
+    return pool && TempIsReleaseOp<VkImageMemoryBarrier, true>(pool, &barrier);
+}
+
+template <typename Barrier>
+bool CoreChecks::ValidateQFOTransferBarrierUniqueness(const char *func_name, const CMD_BUFFER_STATE *cb_state,
+                                                      uint32_t barrier_count, const Barrier *barriers) const {
+    using BarrierRecord = QFOTransferBarrier<Barrier>;
+    bool skip = false;
+    auto pool = cb_state->command_pool.get();
+    auto &barrier_sets = GetQFOBarrierSets(cb_state, typename BarrierRecord::Tag());
+    const char *barrier_name = BarrierRecord::BarrierName();
+    const char *handle_name = BarrierRecord::HandleName();
+    const char *transfer_type = nullptr;
+    for (uint32_t b = 0; b < barrier_count; b++) {
+        if (!IsTransferOp(&barriers[b])) continue;
+        const BarrierRecord *barrier_record = nullptr;
+        if (TempIsReleaseOp<Barrier, true /* Assume IsTransfer */>(pool, &barriers[b]) &&
+            !QueueFamilyIsSpecial(barriers[b].dstQueueFamilyIndex)) {
+            const auto found = barrier_sets.release.find(barriers[b]);
+            if (found != barrier_sets.release.cend()) {
+                barrier_record = &(*found);
+                transfer_type = "releasing";
+            }
+        } else if (IsAcquireOp<Barrier, true /*Assume IsTransfer */>(pool, &barriers[b]) &&
+                   !QueueFamilyIsSpecial(barriers[b].srcQueueFamilyIndex)) {
+            const auto found = barrier_sets.acquire.find(barriers[b]);
+            if (found != barrier_sets.acquire.cend()) {
+                barrier_record = &(*found);
+                transfer_type = "acquiring";
+            }
+        }
+        if (barrier_record != nullptr) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                            HandleToUint64(cb_state->commandBuffer), BarrierRecord::ErrMsgDuplicateQFOInCB(),
+                            "%s: %s at index %" PRIu32 " %s queue ownership of %s (%s), from srcQueueFamilyIndex %" PRIu32
+                            " to dstQueueFamilyIndex %" PRIu32 " duplicates existing barrier recorded in this command buffer.",
+                            func_name, barrier_name, b, transfer_type, handle_name,
+                            report_data->FormatHandle(barrier_record->handle).c_str(), barrier_record->srcQueueFamilyIndex,
+                            barrier_record->dstQueueFamilyIndex);
+        }
+    }
+    return skip;
+}
+
+VulkanTypedHandle BarrierTypedHandle(const VkImageMemoryBarrier &barrier) {
+    return VulkanTypedHandle(barrier.image, kVulkanObjectTypeImage);
+}
+
+const IMAGE_STATE *BarrierHandleState(const ValidationStateTracker &device_state, const VkImageMemoryBarrier &barrier) {
+    return device_state.GetImageState(barrier.image);
+}
+
+VulkanTypedHandle BarrierTypedHandle(const VkBufferMemoryBarrier &barrier) {
+    return VulkanTypedHandle(barrier.buffer, kVulkanObjectTypeBuffer);
+}
+
+const BUFFER_STATE *BarrierHandleState(const ValidationStateTracker &device_state, const VkBufferMemoryBarrier &barrier) {
+    return device_state.GetBufferState(barrier.buffer);
+}
+
+VkBuffer BarrierHandle(const VkBufferMemoryBarrier &barrier) { return barrier.buffer; }
+
+template <typename Barrier>
+void CoreChecks::RecordBarrierArrayValidationInfo(const char *func_name, CMD_BUFFER_STATE *cb_state, uint32_t barrier_count,
+                                                  const Barrier *barriers) {
+    auto pool = cb_state->command_pool.get();
+    auto &barrier_sets = GetQFOBarrierSets(cb_state, typename QFOTransferBarrier<Barrier>::Tag());
+    for (uint32_t b = 0; b < barrier_count; b++) {
+        auto &barrier = barriers[b];
+        if (IsTransferOp(&barrier)) {
+            if (TempIsReleaseOp<Barrier, true /* Assume IsTransfer*/>(pool, &barrier) &&
+                !QueueFamilyIsSpecial(barrier.dstQueueFamilyIndex)) {
+                barrier_sets.release.emplace(barrier);
+            } else if (IsAcquireOp<Barrier, true /*Assume IsTransfer */>(pool, &barrier) &&
+                       !QueueFamilyIsSpecial(barrier.srcQueueFamilyIndex)) {
+                barrier_sets.acquire.emplace(barrier);
+            }
+        }
+
+        const uint32_t src_queue_family = barrier.srcQueueFamilyIndex;
+        const uint32_t dst_queue_family = barrier.dstQueueFamilyIndex;
+        if (!QueueFamilyIsIgnored(src_queue_family) && !QueueFamilyIsIgnored(dst_queue_family)) {
+            // Only enqueue submit time check if it is needed. If more submit time checks are added, change the criteria
+            // TODO create a better named list, or rename the submit time lists to something that matches the broader usage...
+            auto handle_state = BarrierHandleState(*this, barrier);
+            bool mode_concurrent = handle_state ? handle_state->createInfo.sharingMode == VK_SHARING_MODE_CONCURRENT : false;
+            if (!mode_concurrent) {
+                const auto typed_handle = BarrierTypedHandle(barrier);
+                cb_state->queue_submit_functions.emplace_back(
+                    [func_name, cb_state, typed_handle, src_queue_family, dst_queue_family](
+                        const ValidationStateTracker *device_data, const QUEUE_STATE *queue_state) {
+                        return ValidateConcurrentBarrierAtSubmit(device_data, queue_state, func_name, cb_state, typed_handle,
+                                                                 src_queue_family, dst_queue_family);
+                    });
+            }
+        }
+    }
+}
+
+bool CoreChecks::ValidateBarriersQFOTransferUniqueness(const char *func_name, const CMD_BUFFER_STATE *cb_state,
+                                                       uint32_t bufferBarrierCount, const VkBufferMemoryBarrier *pBufferMemBarriers,
+                                                       uint32_t imageMemBarrierCount,
+                                                       const VkImageMemoryBarrier *pImageMemBarriers) const {
+    bool skip = false;
+    skip |= ValidateQFOTransferBarrierUniqueness(func_name, cb_state, bufferBarrierCount, pBufferMemBarriers);
+    skip |= ValidateQFOTransferBarrierUniqueness(func_name, cb_state, imageMemBarrierCount, pImageMemBarriers);
+    return skip;
+}
+
+void CoreChecks::RecordBarrierValidationInfo(const char *func_name, CMD_BUFFER_STATE *cb_state, uint32_t bufferBarrierCount,
+                                             const VkBufferMemoryBarrier *pBufferMemBarriers, uint32_t imageMemBarrierCount,
+                                             const VkImageMemoryBarrier *pImageMemBarriers) {
+    RecordBarrierArrayValidationInfo(func_name, cb_state, bufferBarrierCount, pBufferMemBarriers);
+    RecordBarrierArrayValidationInfo(func_name, cb_state, imageMemBarrierCount, pImageMemBarriers);
+}
+
+template <typename BarrierRecord, typename Scoreboard>
+bool CoreChecks::ValidateAndUpdateQFOScoreboard(const debug_report_data *report_data, const CMD_BUFFER_STATE *cb_state,
+                                                const char *operation, const BarrierRecord &barrier, Scoreboard *scoreboard) const {
+    // Record to the scoreboard or report that we have a duplication
+    bool skip = false;
+    auto inserted = scoreboard->insert(std::make_pair(barrier, cb_state));
+    if (!inserted.second && inserted.first->second != cb_state) {
+        // This is a duplication (but don't report duplicates from the same CB, as we do that at record time
+        skip = log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                       HandleToUint64(cb_state->commandBuffer), BarrierRecord::ErrMsgDuplicateQFOInSubmit(),
+                       "%s: %s %s queue ownership of %s (%s), from srcQueueFamilyIndex %" PRIu32 " to dstQueueFamilyIndex %" PRIu32
+                       " duplicates existing barrier submitted in this batch from %s.",
+                       "vkQueueSubmit()", BarrierRecord::BarrierName(), operation, BarrierRecord::HandleName(),
+                       report_data->FormatHandle(barrier.handle).c_str(), barrier.srcQueueFamilyIndex, barrier.dstQueueFamilyIndex,
+                       report_data->FormatHandle(inserted.first->second->commandBuffer).c_str());
+    }
+    return skip;
+}
+
+template <typename Barrier>
+bool CoreChecks::ValidateQueuedQFOTransferBarriers(const CMD_BUFFER_STATE *cb_state,
+                                                   QFOTransferCBScoreboards<Barrier> *scoreboards) const {
+    using BarrierRecord = QFOTransferBarrier<Barrier>;
+    using TypeTag = typename BarrierRecord::Tag;
+    bool skip = false;
+    const auto &cb_barriers = GetQFOBarrierSets(cb_state, TypeTag());
+    const GlobalQFOTransferBarrierMap<Barrier> &global_release_barriers = GetGlobalQFOReleaseBarrierMap(TypeTag());
+    const char *barrier_name = BarrierRecord::BarrierName();
+    const char *handle_name = BarrierRecord::HandleName();
+    // No release should have an extant duplicate (WARNING)
+    for (const auto &release : cb_barriers.release) {
+        // Check the global pending release barriers
+        const auto set_it = global_release_barriers.find(release.handle);
+        if (set_it != global_release_barriers.cend()) {
+            const QFOTransferBarrierSet<Barrier> &set_for_handle = set_it->second;
+            const auto found = set_for_handle.find(release);
+            if (found != set_for_handle.cend()) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                                HandleToUint64(cb_state->commandBuffer), BarrierRecord::ErrMsgDuplicateQFOSubmitted(),
+                                "%s: %s releasing queue ownership of %s (%s), from srcQueueFamilyIndex %" PRIu32
+                                " to dstQueueFamilyIndex %" PRIu32
+                                " duplicates existing barrier queued for execution, without intervening acquire operation.",
+                                "vkQueueSubmit()", barrier_name, handle_name, report_data->FormatHandle(found->handle).c_str(),
+                                found->srcQueueFamilyIndex, found->dstQueueFamilyIndex);
+            }
+        }
+        skip |= ValidateAndUpdateQFOScoreboard(report_data, cb_state, "releasing", release, &scoreboards->release);
+    }
+    // Each acquire must have a matching release (ERROR)
+    for (const auto &acquire : cb_barriers.acquire) {
+        const auto set_it = global_release_barriers.find(acquire.handle);
+        bool matching_release_found = false;
+        if (set_it != global_release_barriers.cend()) {
+            const QFOTransferBarrierSet<Barrier> &set_for_handle = set_it->second;
+            matching_release_found = set_for_handle.find(acquire) != set_for_handle.cend();
+        }
+        if (!matching_release_found) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                            HandleToUint64(cb_state->commandBuffer), BarrierRecord::ErrMsgMissingQFOReleaseInSubmit(),
+                            "%s: in submitted command buffer %s acquiring ownership of %s (%s), from srcQueueFamilyIndex %" PRIu32
+                            " to dstQueueFamilyIndex %" PRIu32 " has no matching release barrier queued for execution.",
+                            "vkQueueSubmit()", barrier_name, handle_name, report_data->FormatHandle(acquire.handle).c_str(),
+                            acquire.srcQueueFamilyIndex, acquire.dstQueueFamilyIndex);
+        }
+        skip |= ValidateAndUpdateQFOScoreboard(report_data, cb_state, "acquiring", acquire, &scoreboards->acquire);
+    }
+    return skip;
+}
+
+bool CoreChecks::ValidateQueuedQFOTransfers(const CMD_BUFFER_STATE *cb_state,
+                                            QFOTransferCBScoreboards<VkImageMemoryBarrier> *qfo_image_scoreboards,
+                                            QFOTransferCBScoreboards<VkBufferMemoryBarrier> *qfo_buffer_scoreboards) const {
+    bool skip = false;
+    skip |= ValidateQueuedQFOTransferBarriers<VkImageMemoryBarrier>(cb_state, qfo_image_scoreboards);
+    skip |= ValidateQueuedQFOTransferBarriers<VkBufferMemoryBarrier>(cb_state, qfo_buffer_scoreboards);
+    return skip;
+}
+
+template <typename Barrier>
+void CoreChecks::RecordQueuedQFOTransferBarriers(CMD_BUFFER_STATE *cb_state) {
+    using BarrierRecord = QFOTransferBarrier<Barrier>;
+    using TypeTag = typename BarrierRecord::Tag;
+    const auto &cb_barriers = GetQFOBarrierSets(cb_state, TypeTag());
+    GlobalQFOTransferBarrierMap<Barrier> &global_release_barriers = GetGlobalQFOReleaseBarrierMap(TypeTag());
+
+    // Add release barriers from this submit to the global map
+    for (const auto &release : cb_barriers.release) {
+        // the global barrier list is mapped by resource handle to allow cleanup on resource destruction
+        // NOTE: We're using [] because creation of a Set is a needed side effect for new handles
+        global_release_barriers[release.handle].insert(release);
+    }
+
+    // Erase acquired barriers from this submit from the global map -- essentially marking releases as consumed
+    for (const auto &acquire : cb_barriers.acquire) {
+        // NOTE: We're not using [] because we don't want to create entries for missing releases
+        auto set_it = global_release_barriers.find(acquire.handle);
+        if (set_it != global_release_barriers.end()) {
+            QFOTransferBarrierSet<Barrier> &set_for_handle = set_it->second;
+            set_for_handle.erase(acquire);
+            if (set_for_handle.size() == 0) {  // Clean up empty sets
+                global_release_barriers.erase(set_it);
+            }
+        }
+    }
+}
+
+void CoreChecks::RecordQueuedQFOTransfers(CMD_BUFFER_STATE *cb_state) {
+    RecordQueuedQFOTransferBarriers<VkImageMemoryBarrier>(cb_state);
+    RecordQueuedQFOTransferBarriers<VkBufferMemoryBarrier>(cb_state);
+}
+
+// Avoid making the template globally visible by exporting the one instance of it we need.
+void CoreChecks::EraseQFOImageRelaseBarriers(const VkImage &image) { EraseQFOReleaseBarriers<VkImageMemoryBarrier>(image); }
+
+void CoreChecks::TransitionImageLayouts(CMD_BUFFER_STATE *cb_state, uint32_t memBarrierCount,
+                                        const VkImageMemoryBarrier *pImgMemBarriers) {
+    for (uint32_t i = 0; i < memBarrierCount; ++i) {
+        const auto &mem_barrier = pImgMemBarriers[i];
+
+        // For ownership transfers, the barrier is specified twice; as a release
+        // operation on the yielding queue family, and as an acquire operation
+        // on the acquiring queue family. This barrier may also include a layout
+        // transition, which occurs 'between' the two operations. For validation
+        // purposes it doesn't seem important which side performs the layout
+        // transition, but it must not be performed twice. We'll arbitrarily
+        // choose to perform it as part of the acquire operation.
+        if (IsReleaseOp(cb_state, mem_barrier)) {
+            continue;
+        }
+
+        auto *image_state = GetImageState(mem_barrier.image);
+        if (!image_state) continue;
+        RecordTransitionImageLayout(cb_state, image_state, mem_barrier);
+        for (const auto &image : image_state->aliasing_images) {
+            image_state = GetImageState(image);
+            RecordTransitionImageLayout(cb_state, image_state, mem_barrier);
+        }
+    }
+}
+
+void CoreChecks::RecordTransitionImageLayout(CMD_BUFFER_STATE *cb_state, const IMAGE_STATE *image_state,
+                                             const VkImageMemoryBarrier &mem_barrier) {
+    VkImageSubresourceRange normalized_isr = NormalizeSubresourceRange(*image_state, mem_barrier.subresourceRange);
+    const auto &image_create_info = image_state->createInfo;
+
+    // Special case for 3D images with VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT_KHR flag bit, where <extent.depth> and
+    // <arrayLayers> can potentially alias. When recording layout for the entire image, pre-emptively record layouts
+    // for all (potential) layer sub_resources.
+    if (0 != (image_create_info.flags & VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT_KHR)) {
+        normalized_isr.baseArrayLayer = 0;
+        normalized_isr.layerCount = image_create_info.extent.depth;  // Treat each depth slice as a layer subresource
+    }
+
+    SetImageLayout(cb_state, *image_state, normalized_isr, mem_barrier.newLayout, mem_barrier.oldLayout);
+}
+
+bool CoreChecks::VerifyImageLayout(const CMD_BUFFER_STATE *cb_node, const IMAGE_STATE *image_state,
+                                   const VkImageSubresourceRange &range, VkImageAspectFlags aspect_mask,
+                                   VkImageLayout explicit_layout, VkImageLayout optimal_layout, const char *caller,
+                                   const char *layout_invalid_msg_code, const char *layout_mismatch_msg_code, bool *error) const {
+    if (disabled.image_layout_validation) return false;
+    assert(cb_node);
+    assert(image_state);
+    const auto image = image_state->image;
+    bool skip = false;
+
+    const auto *subresource_map = GetImageSubresourceLayoutMap(cb_node, image);
+    if (subresource_map) {
+        bool subres_skip = false;
+        LayoutUseCheckAndMessage layout_check(subresource_map, aspect_mask);
+        auto subresource_cb = [this, explicit_layout, cb_node, layout_mismatch_msg_code, caller, image, &layout_check, &error,
+                               &subres_skip](const VkImageSubresource &subres, VkImageLayout layout, VkImageLayout initial_layout) {
+            if (!layout_check.Check(subres, explicit_layout, layout, initial_layout)) {
+                *error = true;
+                subres_skip |=
+                    log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                            HandleToUint64(cb_node->commandBuffer), layout_mismatch_msg_code,
+                            "%s: Cannot use %s (layer=%u mip=%u) with specific layout %s that doesn't match the "
+                            "%s layout %s.",
+                            caller, report_data->FormatHandle(image).c_str(), subres.arrayLayer, subres.mipLevel,
+                            string_VkImageLayout(explicit_layout), layout_check.message, string_VkImageLayout(layout_check.layout));
+            }
+            return !subres_skip;
+        };
+        subresource_map->ForRange(range, subresource_cb);
+        skip |= subres_skip;
+    }
+
+    // If optimal_layout is not UNDEFINED, check that layout matches optimal for this case
+    if ((VK_IMAGE_LAYOUT_UNDEFINED != optimal_layout) && (explicit_layout != optimal_layout)) {
+        if (VK_IMAGE_LAYOUT_GENERAL == explicit_layout) {
+            if (image_state->createInfo.tiling != VK_IMAGE_TILING_LINEAR) {
+                // LAYOUT_GENERAL is allowed, but may not be performance optimal, flag as perf warning.
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT,
+                                VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, HandleToUint64(cb_node->commandBuffer),
+                                kVUID_Core_DrawState_InvalidImageLayout,
+                                "%s: For optimal performance %s layout should be %s instead of GENERAL.", caller,
+                                report_data->FormatHandle(image).c_str(), string_VkImageLayout(optimal_layout));
+            }
+        } else if (device_extensions.vk_khr_shared_presentable_image) {
+            if (image_state->shared_presentable) {
+                if (VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR != explicit_layout) {
+                    skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                    layout_invalid_msg_code,
+                                    "Layout for shared presentable image is %s but must be VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR.",
+                                    string_VkImageLayout(optimal_layout));
+                }
+            }
+        } else {
+            *error = true;
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                            HandleToUint64(cb_node->commandBuffer), layout_invalid_msg_code,
+                            "%s: Layout for %s is %s but can only be %s or VK_IMAGE_LAYOUT_GENERAL.", caller,
+                            report_data->FormatHandle(image).c_str(), string_VkImageLayout(explicit_layout),
+                            string_VkImageLayout(optimal_layout));
+        }
+    }
+    return skip;
+}
+bool CoreChecks::VerifyImageLayout(const CMD_BUFFER_STATE *cb_node, const IMAGE_STATE *image_state,
+                                   const VkImageSubresourceLayers &subLayers, VkImageLayout explicit_layout,
+                                   VkImageLayout optimal_layout, const char *caller, const char *layout_invalid_msg_code,
+                                   const char *layout_mismatch_msg_code, bool *error) const {
+    return VerifyImageLayout(cb_node, image_state, RangeFromLayers(subLayers), explicit_layout, optimal_layout, caller,
+                             layout_invalid_msg_code, layout_mismatch_msg_code, error);
+}
+
+void CoreChecks::TransitionFinalSubpassLayouts(CMD_BUFFER_STATE *pCB, const VkRenderPassBeginInfo *pRenderPassBegin,
+                                               FRAMEBUFFER_STATE *framebuffer_state) {
+    auto renderPass = GetRenderPassState(pRenderPassBegin->renderPass);
+    if (!renderPass) return;
+
+    const VkRenderPassCreateInfo2KHR *pRenderPassInfo = renderPass->createInfo.ptr();
+    if (framebuffer_state) {
+        for (uint32_t i = 0; i < pRenderPassInfo->attachmentCount; ++i) {
+            auto view_state = GetAttachmentImageViewState(framebuffer_state, i);
+            if (view_state) {
+                VkImageLayout stencil_layout = kInvalidLayout;
+                const auto *attachment_description_stencil_layout =
+                    lvl_find_in_chain<VkAttachmentDescriptionStencilLayoutKHR>(pRenderPassInfo->pAttachments[i].pNext);
+                if (attachment_description_stencil_layout) {
+                    stencil_layout = attachment_description_stencil_layout->stencilFinalLayout;
+                }
+
+                SetImageViewLayout(pCB, *view_state, pRenderPassInfo->pAttachments[i].finalLayout, stencil_layout);
+            }
+        }
+    }
+}
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+// Android-specific validation that uses types defined only with VK_USE_PLATFORM_ANDROID_KHR
+// This could also move into a seperate core_validation_android.cpp file... ?
+
+//
+// AHB-specific validation within non-AHB APIs
+//
+bool CoreChecks::ValidateCreateImageANDROID(const debug_report_data *report_data, const VkImageCreateInfo *create_info) const {
+    bool skip = false;
+
+    const VkExternalFormatANDROID *ext_fmt_android = lvl_find_in_chain<VkExternalFormatANDROID>(create_info->pNext);
+    if (ext_fmt_android) {
+        if (0 != ext_fmt_android->externalFormat) {
+            if (VK_FORMAT_UNDEFINED != create_info->format) {
+                skip |=
+                    log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                            "VUID-VkImageCreateInfo-pNext-01974",
+                            "vkCreateImage(): VkImageCreateInfo struct has a chained VkExternalFormatANDROID struct with non-zero "
+                            "externalFormat, but the VkImageCreateInfo's format is not VK_FORMAT_UNDEFINED.");
+            }
+
+            if (0 != (VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT & create_info->flags)) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                "VUID-VkImageCreateInfo-pNext-02396",
+                                "vkCreateImage(): VkImageCreateInfo struct has a chained VkExternalFormatANDROID struct with "
+                                "non-zero externalFormat, but flags include VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT.");
+            }
+
+            if (0 != (~VK_IMAGE_USAGE_SAMPLED_BIT & create_info->usage)) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                "VUID-VkImageCreateInfo-pNext-02397",
+                                "vkCreateImage(): VkImageCreateInfo struct has a chained VkExternalFormatANDROID struct with "
+                                "non-zero externalFormat, but usage includes bits other than VK_IMAGE_USAGE_SAMPLED_BIT.");
+            }
+
+            if (VK_IMAGE_TILING_OPTIMAL != create_info->tiling) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                "VUID-VkImageCreateInfo-pNext-02398",
+                                "vkCreateImage(): VkImageCreateInfo struct has a chained VkExternalFormatANDROID struct with "
+                                "non-zero externalFormat, but layout is not VK_IMAGE_TILING_OPTIMAL.");
+            }
+        }
+
+        if ((0 != ext_fmt_android->externalFormat) && (0 == ahb_ext_formats_set.count(ext_fmt_android->externalFormat))) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                            "VUID-VkExternalFormatANDROID-externalFormat-01894",
+                            "vkCreateImage(): Chained VkExternalFormatANDROID struct contains a non-zero externalFormat which has "
+                            "not been previously retrieved by vkGetAndroidHardwareBufferPropertiesANDROID().");
+        }
+    }
+
+    if ((nullptr == ext_fmt_android) || (0 == ext_fmt_android->externalFormat)) {
+        if (VK_FORMAT_UNDEFINED == create_info->format) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                            "VUID-VkImageCreateInfo-pNext-01975",
+                            "vkCreateImage(): VkImageCreateInfo struct's format is VK_FORMAT_UNDEFINED, but either does not have a "
+                            "chained VkExternalFormatANDROID struct or the struct exists but has an externalFormat of 0.");
+        }
+    }
+
+    const VkExternalMemoryImageCreateInfo *emici = lvl_find_in_chain<VkExternalMemoryImageCreateInfo>(create_info->pNext);
+    if (emici && (emici->handleTypes & VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID)) {
+        if (create_info->imageType != VK_IMAGE_TYPE_2D) {
+            skip |=
+                log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                        "VUID-VkImageCreateInfo-pNext-02393",
+                        "vkCreateImage(): VkImageCreateInfo struct with imageType %s has chained VkExternalMemoryImageCreateInfo "
+                        "struct with handleType VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID.",
+                        string_VkImageType(create_info->imageType));
+        }
+
+        if ((create_info->mipLevels != 1) && (create_info->mipLevels != FullMipChainLevels(create_info->extent))) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                            "VUID-VkImageCreateInfo-pNext-02394",
+                            "vkCreateImage(): VkImageCreateInfo struct with chained VkExternalMemoryImageCreateInfo struct of "
+                            "handleType VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID "
+                            "specifies mipLevels = %" PRId32 " (full chain mipLevels are %" PRId32 ").",
+                            create_info->mipLevels, FullMipChainLevels(create_info->extent));
+        }
+    }
+
+    return skip;
+}
+
+bool CoreChecks::ValidateCreateImageViewANDROID(const VkImageViewCreateInfo *create_info) const {
+    bool skip = false;
+    const IMAGE_STATE *image_state = GetImageState(create_info->image);
+
+    if (image_state->has_ahb_format) {
+        if (VK_FORMAT_UNDEFINED != create_info->format) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+                            HandleToUint64(create_info->image), "VUID-VkImageViewCreateInfo-image-02399",
+                            "vkCreateImageView(): VkImageViewCreateInfo struct has a chained VkExternalFormatANDROID struct, but "
+                            "format member is %s.",
+                            string_VkFormat(create_info->format));
+        }
+
+        // Chain must include a compatible ycbcr conversion
+        bool conv_found = false;
+        uint64_t external_format = 0;
+        const VkSamplerYcbcrConversionInfo *ycbcr_conv_info = lvl_find_in_chain<VkSamplerYcbcrConversionInfo>(create_info->pNext);
+        if (ycbcr_conv_info != nullptr) {
+            VkSamplerYcbcrConversion conv_handle = ycbcr_conv_info->conversion;
+            if (ycbcr_conversion_ahb_fmt_map.find(conv_handle) != ycbcr_conversion_ahb_fmt_map.end()) {
+                conv_found = true;
+                external_format = ycbcr_conversion_ahb_fmt_map.at(conv_handle);
+            }
+        }
+        if ((!conv_found) || (external_format != image_state->ahb_format)) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+                            HandleToUint64(create_info->image), "VUID-VkImageViewCreateInfo-image-02400",
+                            "vkCreateImageView(): VkImageViewCreateInfo struct has a chained VkExternalFormatANDROID struct, but "
+                            "without a chained VkSamplerYcbcrConversionInfo struct with the same external format.");
+        }
+
+        // Errors in create_info swizzles
+        if ((create_info->components.r != VK_COMPONENT_SWIZZLE_IDENTITY) ||
+            (create_info->components.g != VK_COMPONENT_SWIZZLE_IDENTITY) ||
+            (create_info->components.b != VK_COMPONENT_SWIZZLE_IDENTITY) ||
+            (create_info->components.a != VK_COMPONENT_SWIZZLE_IDENTITY)) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+                            HandleToUint64(create_info->image), "VUID-VkImageViewCreateInfo-image-02401",
+                            "vkCreateImageView(): VkImageViewCreateInfo struct has a chained VkExternalFormatANDROID struct, but "
+                            "includes one or more non-identity component swizzles.");
+        }
+    }
+
+    return skip;
+}
+
+bool CoreChecks::ValidateGetImageSubresourceLayoutANDROID(const VkImage image) const {
+    bool skip = false;
+
+    const IMAGE_STATE *image_state = GetImageState(image);
+    if (image_state->imported_ahb && (0 == image_state->GetBoundMemory().size())) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, HandleToUint64(image),
+                        "VUID-vkGetImageSubresourceLayout-image-01895",
+                        "vkGetImageSubresourceLayout(): Attempt to query layout from an image created with "
+                        "VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID handleType which has not yet been "
+                        "bound to memory.");
+    }
+    return skip;
+}
+
+#else
+
+bool CoreChecks::ValidateCreateImageANDROID(const debug_report_data *report_data, const VkImageCreateInfo *create_info) const {
+    return false;
+}
+
+bool CoreChecks::ValidateCreateImageViewANDROID(const VkImageViewCreateInfo *create_info) const { return false; }
+
+bool CoreChecks::ValidateGetImageSubresourceLayoutANDROID(const VkImage image) const { return false; }
+
+#endif  // VK_USE_PLATFORM_ANDROID_KHR
+
+bool CoreChecks::PreCallValidateCreateImage(VkDevice device, const VkImageCreateInfo *pCreateInfo,
+                                            const VkAllocationCallbacks *pAllocator, VkImage *pImage) const {
+    bool skip = false;
+
+    if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
+        skip |= ValidateCreateImageANDROID(report_data, pCreateInfo);
+    } else {  // These checks are omitted or replaced when Android HW Buffer extension is active
+        if (pCreateInfo->format == VK_FORMAT_UNDEFINED) {
+            return log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                           "VUID-VkImageCreateInfo-format-00943",
+                           "vkCreateImage(): VkFormat for image must not be VK_FORMAT_UNDEFINED.");
+        }
+    }
+
+    if (pCreateInfo->flags & VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT) {
+        if (VK_IMAGE_TYPE_2D != pCreateInfo->imageType) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                            "VUID-VkImageCreateInfo-flags-00949",
+                            "vkCreateImage(): Image type must be VK_IMAGE_TYPE_2D when VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT "
+                            "flag bit is set");
+        }
+
+        if ((pCreateInfo->extent.width != pCreateInfo->extent.height) || (pCreateInfo->arrayLayers < 6)) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                            "VUID-VkImageCreateInfo-imageType-00954",
+                            "vkCreateImage(): If VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT flag bit is set, width (%d) must equal "
+                            "height (%d) and arrayLayers (%d) must be >= 6.",
+                            pCreateInfo->extent.width, pCreateInfo->extent.height, pCreateInfo->arrayLayers);
+        }
+    }
+
+    const VkPhysicalDeviceLimits *device_limits = &phys_dev_props.limits;
+    VkImageUsageFlags attach_flags = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT |
+                                     VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT | VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT;
+    if ((pCreateInfo->usage & attach_flags) && (pCreateInfo->extent.width > device_limits->maxFramebufferWidth)) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                        "VUID-VkImageCreateInfo-usage-00964",
+                        "vkCreateImage(): Image usage flags include a frame buffer attachment bit and image width exceeds device "
+                        "maxFramebufferWidth.");
+    }
+
+    if ((pCreateInfo->usage & attach_flags) && (pCreateInfo->extent.height > device_limits->maxFramebufferHeight)) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                        "VUID-VkImageCreateInfo-usage-00965",
+                        "vkCreateImage(): Image usage flags include a frame buffer attachment bit and image height exceeds device "
+                        "maxFramebufferHeight");
+    }
+
+    if (device_extensions.vk_ext_fragment_density_map) {
+        uint32_t ceiling_width =
+            (uint32_t)ceil((float)device_limits->maxFramebufferWidth /
+                           std::max((float)phys_dev_ext_props.fragment_density_map_props.minFragmentDensityTexelSize.width, 1.0f));
+        if ((pCreateInfo->usage & VK_IMAGE_USAGE_FRAGMENT_DENSITY_MAP_BIT_EXT) && (pCreateInfo->extent.width > ceiling_width)) {
+            skip |=
+                log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                        "VUID-VkImageCreateInfo-usage-02559",
+                        "vkCreateImage(): Image usage flags include a fragment density map bit and image width (%u) exceeds the "
+                        "ceiling of device "
+                        "maxFramebufferWidth (%u) / minFragmentDensityTexelSize.width (%u). The ceiling value: %u",
+                        pCreateInfo->extent.width, device_limits->maxFramebufferWidth,
+                        phys_dev_ext_props.fragment_density_map_props.minFragmentDensityTexelSize.width, ceiling_width);
+        }
+
+        uint32_t ceiling_height =
+            (uint32_t)ceil((float)device_limits->maxFramebufferHeight /
+                           std::max((float)phys_dev_ext_props.fragment_density_map_props.minFragmentDensityTexelSize.height, 1.0f));
+        if ((pCreateInfo->usage & VK_IMAGE_USAGE_FRAGMENT_DENSITY_MAP_BIT_EXT) && (pCreateInfo->extent.height > ceiling_height)) {
+            skip |=
+                log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                        "VUID-VkImageCreateInfo-usage-02560",
+                        "vkCreateImage(): Image usage flags include a fragment density map bit and image height (%u) exceeds the "
+                        "ceiling of device "
+                        "maxFramebufferHeight (%u) / minFragmentDensityTexelSize.height (%u). The ceiling value: %u",
+                        pCreateInfo->extent.height, device_limits->maxFramebufferHeight,
+                        phys_dev_ext_props.fragment_density_map_props.minFragmentDensityTexelSize.height, ceiling_height);
+        }
+    }
+
+    VkImageFormatProperties format_limits = {};
+    VkResult result = VK_SUCCESS;
+    if (pCreateInfo->tiling != VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT) {
+        result = DispatchGetPhysicalDeviceImageFormatProperties(physical_device, pCreateInfo->format, pCreateInfo->imageType,
+                                                                pCreateInfo->tiling, pCreateInfo->usage, pCreateInfo->flags,
+                                                                &format_limits);
+    } else {
+        auto image_format_info = lvl_init_struct<VkPhysicalDeviceImageFormatInfo2>();
+        auto image_format_properties = lvl_init_struct<VkImageFormatProperties2>();
+        image_format_info.type = pCreateInfo->imageType;
+        image_format_info.tiling = pCreateInfo->tiling;
+        image_format_info.usage = pCreateInfo->usage;
+        image_format_info.flags = pCreateInfo->flags;
+        result = DispatchGetPhysicalDeviceImageFormatProperties2(physical_device, &image_format_info, &image_format_properties);
+        format_limits = image_format_properties.imageFormatProperties;
+    }
+
+    if (result == VK_ERROR_FORMAT_NOT_SUPPORTED) {
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+        if (!lvl_find_in_chain<VkExternalFormatANDROID>(pCreateInfo->pNext))
+#endif  // VK_USE_PLATFORM_ANDROID_KHR
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, kVUIDUndefined,
+                            "vkCreateImage(): Format %s is not supported for this combination of parameters.",
+                            string_VkFormat(pCreateInfo->format));
+    } else {
+        if (pCreateInfo->mipLevels > format_limits.maxMipLevels) {
+            const char *format_string = string_VkFormat(pCreateInfo->format);
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                            "VUID-VkImageCreateInfo-mipLevels-02255",
+                            "vkCreateImage(): Image mip levels=%d exceed image format maxMipLevels=%d for format %s.",
+                            pCreateInfo->mipLevels, format_limits.maxMipLevels, format_string);
+        }
+
+        uint64_t texel_count = (uint64_t)pCreateInfo->extent.width * (uint64_t)pCreateInfo->extent.height *
+                               (uint64_t)pCreateInfo->extent.depth * (uint64_t)pCreateInfo->arrayLayers *
+                               (uint64_t)pCreateInfo->samples;
+        uint64_t total_size = (uint64_t)std::ceil(FormatTexelSize(pCreateInfo->format) * texel_count);
+
+        // Round up to imageGranularity boundary
+        VkDeviceSize imageGranularity = phys_dev_props.limits.bufferImageGranularity;
+        uint64_t ig_mask = imageGranularity - 1;
+        total_size = (total_size + ig_mask) & ~ig_mask;
+
+        if (total_size > format_limits.maxResourceSize) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 0,
+                            kVUID_Core_Image_InvalidFormatLimitsViolation,
+                            "vkCreateImage(): resource size exceeds allowable maximum Image resource size = 0x%" PRIxLEAST64
+                            ", maximum resource size = 0x%" PRIxLEAST64 " ",
+                            total_size, format_limits.maxResourceSize);
+        }
+
+        if (pCreateInfo->arrayLayers > format_limits.maxArrayLayers) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 0,
+                            "VUID-VkImageCreateInfo-arrayLayers-02256",
+                            "vkCreateImage(): arrayLayers=%d exceeds allowable maximum supported by format of %d.",
+                            pCreateInfo->arrayLayers, format_limits.maxArrayLayers);
+        }
+
+        if (device_extensions.vk_khr_sampler_ycbcr_conversion && FormatRequiresYcbcrConversion(pCreateInfo->format) &&
+            !device_extensions.vk_ext_ycbcr_image_arrays && pCreateInfo->arrayLayers > 1) {
+            skip |= log_msg(
+                report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 0,
+                "VUID-VkImageCreateInfo-format-02653",
+                "vkCreateImage(): arrayLayers=%d exceeds the maximum allowed of 1 for formats requiring sampler ycbcr conversion",
+                pCreateInfo->arrayLayers);
+        }
+
+        if ((pCreateInfo->samples & format_limits.sampleCounts) == 0) {
+            skip |=
+                log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 0,
+                        "VUID-VkImageCreateInfo-samples-02258", "vkCreateImage(): samples %s is not supported by format 0x%.8X.",
+                        string_VkSampleCountFlagBits(pCreateInfo->samples), format_limits.sampleCounts);
+        }
+    }
+
+    if ((pCreateInfo->flags & VK_IMAGE_CREATE_SPARSE_ALIASED_BIT) && (!enabled_features.core.sparseResidencyAliased)) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                        "VUID-VkImageCreateInfo-flags-01924",
+                        "vkCreateImage(): the sparseResidencyAliased device feature is disabled: Images cannot be created with the "
+                        "VK_IMAGE_CREATE_SPARSE_ALIASED_BIT set.");
+    }
+
+    if (device_extensions.vk_khr_maintenance2) {
+        if (pCreateInfo->flags & VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT_KHR) {
+            if (!(FormatIsCompressed_BC(pCreateInfo->format) || FormatIsCompressed_ASTC_LDR(pCreateInfo->format) ||
+                  FormatIsCompressed_ETC2_EAC(pCreateInfo->format))) {
+                // TODO: Add Maintenance2 VUID
+                skip |=
+                    log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, kVUIDUndefined,
+                            "vkCreateImage(): If pCreateInfo->flags contains VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT_KHR, "
+                            "format must be block, ETC or ASTC compressed, but is %s",
+                            string_VkFormat(pCreateInfo->format));
+            }
+            if (!(pCreateInfo->flags & VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT)) {
+                // TODO: Add Maintenance2 VUID
+                skip |=
+                    log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, kVUIDUndefined,
+                            "vkCreateImage(): If pCreateInfo->flags contains VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT_KHR, "
+                            "flags must also contain VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT.");
+            }
+        }
+    }
+
+    if (pCreateInfo->sharingMode == VK_SHARING_MODE_CONCURRENT && pCreateInfo->pQueueFamilyIndices) {
+        skip |= ValidateQueueFamilies(pCreateInfo->queueFamilyIndexCount, pCreateInfo->pQueueFamilyIndices, "vkCreateImage",
+                                      "pCreateInfo->pQueueFamilyIndices", "VUID-VkImageCreateInfo-sharingMode-01420",
+                                      "VUID-VkImageCreateInfo-sharingMode-01420", false);
+    }
+
+    if (!FormatIsMultiplane(pCreateInfo->format) && !(pCreateInfo->flags & VK_IMAGE_CREATE_ALIAS_BIT) &&
+        (pCreateInfo->flags & VK_IMAGE_CREATE_DISJOINT_BIT)) {
+        skip |=
+            log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                    "VUID-VkImageCreateInfo-format-01577",
+                    "vkCreateImage(): format is %s and flags are %s. The flags should not include VK_IMAGE_CREATE_DISJOINT_BIT.",
+                    string_VkFormat(pCreateInfo->format), string_VkImageCreateFlags(pCreateInfo->flags).c_str());
+    }
+    return skip;
+}
+
+void CoreChecks::PostCallRecordCreateImage(VkDevice device, const VkImageCreateInfo *pCreateInfo,
+                                           const VkAllocationCallbacks *pAllocator, VkImage *pImage, VkResult result) {
+    if (VK_SUCCESS != result) return;
+
+    StateTracker::PostCallRecordCreateImage(device, pCreateInfo, pAllocator, pImage, result);
+
+    IMAGE_LAYOUT_STATE image_state;
+    image_state.layout = pCreateInfo->initialLayout;
+    image_state.format = pCreateInfo->format;
+    ImageSubresourcePair subpair{*pImage, false, VkImageSubresource()};
+    imageSubresourceMap[*pImage].push_back(subpair);
+    imageLayoutMap[subpair] = image_state;
+}
+
+bool CoreChecks::PreCallValidateDestroyImage(VkDevice device, VkImage image, const VkAllocationCallbacks *pAllocator) const {
+    const IMAGE_STATE *image_state = GetImageState(image);
+    const VulkanTypedHandle obj_struct(image, kVulkanObjectTypeImage);
+    bool skip = false;
+    if (image_state) {
+        skip |= ValidateObjectNotInUse(image_state, obj_struct, "vkDestroyImage", "VUID-vkDestroyImage-image-01000");
+    }
+    return skip;
+}
+
+void CoreChecks::PreCallRecordDestroyImage(VkDevice device, VkImage image, const VkAllocationCallbacks *pAllocator) {
+    // Clean up validation specific data
+    EraseQFOReleaseBarriers<VkImageMemoryBarrier>(image);
+
+    const auto &sub_entry = imageSubresourceMap.find(image);
+    if (sub_entry != imageSubresourceMap.end()) {
+        for (const auto &pair : sub_entry->second) {
+            imageLayoutMap.erase(pair);
+        }
+        imageSubresourceMap.erase(sub_entry);
+    }
+
+    // Clean up generic image state
+    StateTracker::PreCallRecordDestroyImage(device, image, pAllocator);
+}
+
+bool CoreChecks::ValidateImageAttributes(const IMAGE_STATE *image_state, const VkImageSubresourceRange &range) const {
+    bool skip = false;
+
+    if (range.aspectMask != VK_IMAGE_ASPECT_COLOR_BIT) {
+        char const str[] = "vkCmdClearColorImage aspectMasks for all subresource ranges must be set to VK_IMAGE_ASPECT_COLOR_BIT";
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+                        HandleToUint64(image_state->image), kVUID_Core_DrawState_InvalidImageAspect, str);
+    }
+
+    if (FormatIsDepthOrStencil(image_state->createInfo.format)) {
+        char const str[] = "vkCmdClearColorImage called with depth/stencil image.";
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+                        HandleToUint64(image_state->image), "VUID-vkCmdClearColorImage-image-00007", "%s.", str);
+    } else if (FormatIsCompressed(image_state->createInfo.format)) {
+        char const str[] = "vkCmdClearColorImage called with compressed image.";
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+                        HandleToUint64(image_state->image), "VUID-vkCmdClearColorImage-image-00007", "%s.", str);
+    }
+
+    if (!(image_state->createInfo.usage & VK_IMAGE_USAGE_TRANSFER_DST_BIT)) {
+        char const str[] = "vkCmdClearColorImage called with image created without VK_IMAGE_USAGE_TRANSFER_DST_BIT.";
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+                        HandleToUint64(image_state->image), "VUID-vkCmdClearColorImage-image-00002", "%s.", str);
+    }
+    return skip;
+}
+
+uint32_t ResolveRemainingLevels(const VkImageSubresourceRange *range, uint32_t mip_levels) {
+    // Return correct number of mip levels taking into account VK_REMAINING_MIP_LEVELS
+    uint32_t mip_level_count = range->levelCount;
+    if (range->levelCount == VK_REMAINING_MIP_LEVELS) {
+        mip_level_count = mip_levels - range->baseMipLevel;
+    }
+    return mip_level_count;
+}
+
+uint32_t ResolveRemainingLayers(const VkImageSubresourceRange *range, uint32_t layers) {
+    // Return correct number of layers taking into account VK_REMAINING_ARRAY_LAYERS
+    uint32_t array_layer_count = range->layerCount;
+    if (range->layerCount == VK_REMAINING_ARRAY_LAYERS) {
+        array_layer_count = layers - range->baseArrayLayer;
+    }
+    return array_layer_count;
+}
+
+bool CoreChecks::VerifyClearImageLayout(const CMD_BUFFER_STATE *cb_node, const IMAGE_STATE *image_state,
+                                        const VkImageSubresourceRange &range, VkImageLayout dest_image_layout,
+                                        const char *func_name) const {
+    bool skip = false;
+
+    if (dest_image_layout != VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL) {
+        if (dest_image_layout == VK_IMAGE_LAYOUT_GENERAL) {
+            if (image_state->createInfo.tiling != VK_IMAGE_TILING_LINEAR) {
+                // LAYOUT_GENERAL is allowed, but may not be performance optimal, flag as perf warning.
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+                                HandleToUint64(image_state->image), kVUID_Core_DrawState_InvalidImageLayout,
+                                "%s: Layout for cleared image should be TRANSFER_DST_OPTIMAL instead of GENERAL.", func_name);
+            }
+        } else if (VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR == dest_image_layout) {
+            if (!device_extensions.vk_khr_shared_presentable_image) {
+                // TODO: Add unique error id when available.
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+                                HandleToUint64(image_state->image), 0,
+                                "Must enable VK_KHR_shared_presentable_image extension before creating images with a layout type "
+                                "of VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR.");
+
+            } else {
+                if (image_state->shared_presentable) {
+                    skip |= log_msg(
+                        report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+                        HandleToUint64(image_state->image), 0,
+                        "Layout for shared presentable cleared image is %s but can only be VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR.",
+                        string_VkImageLayout(dest_image_layout));
+                }
+            }
+        } else {
+            const char *error_code = "VUID-vkCmdClearColorImage-imageLayout-00005";
+            if (strcmp(func_name, "vkCmdClearDepthStencilImage()") == 0) {
+                error_code = "VUID-vkCmdClearDepthStencilImage-imageLayout-00012";
+            } else {
+                assert(strcmp(func_name, "vkCmdClearColorImage()") == 0);
+            }
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+                            HandleToUint64(image_state->image), error_code,
+                            "%s: Layout for cleared image is %s but can only be TRANSFER_DST_OPTIMAL or GENERAL.", func_name,
+                            string_VkImageLayout(dest_image_layout));
+        }
+    }
+
+    // Cast to const to prevent creation at validate time.
+    const auto *subresource_map = GetImageSubresourceLayoutMap(cb_node, image_state->image);
+    if (subresource_map) {
+        bool subres_skip = false;
+        LayoutUseCheckAndMessage layout_check(subresource_map);
+        VkImageSubresourceRange normalized_isr = NormalizeSubresourceRange(*image_state, range);
+        auto subres_callback = [this, cb_node, dest_image_layout, func_name, &layout_check, &subres_skip](
+                                   const VkImageSubresource &subres, VkImageLayout layout, VkImageLayout initial_layout) {
+            if (!layout_check.Check(subres, dest_image_layout, layout, initial_layout)) {
+                const char *error_code = "VUID-vkCmdClearColorImage-imageLayout-00004";
+                if (strcmp(func_name, "vkCmdClearDepthStencilImage()") == 0) {
+                    error_code = "VUID-vkCmdClearDepthStencilImage-imageLayout-00011";
+                } else {
+                    assert(strcmp(func_name, "vkCmdClearColorImage()") == 0);
+                }
+                subres_skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                                       HandleToUint64(cb_node->commandBuffer), error_code,
+                                       "%s: Cannot clear an image whose layout is %s and doesn't match the %s layout %s.",
+                                       func_name, string_VkImageLayout(dest_image_layout), layout_check.message,
+                                       string_VkImageLayout(layout_check.layout));
+            }
+            return !subres_skip;
+        };
+        subresource_map->ForRange(normalized_isr, subres_callback);
+        skip |= subres_skip;
+    }
+
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateCmdClearColorImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout,
+                                                   const VkClearColorValue *pColor, uint32_t rangeCount,
+                                                   const VkImageSubresourceRange *pRanges) const {
+    bool skip = false;
+    // TODO : Verify memory is in VK_IMAGE_STATE_CLEAR state
+    const auto *cb_node = GetCBState(commandBuffer);
+    const auto *image_state = GetImageState(image);
+    if (cb_node && image_state) {
+        skip |= ValidateMemoryIsBoundToImage(image_state, "vkCmdClearColorImage()", "VUID-vkCmdClearColorImage-image-00003");
+        skip |= ValidateCmdQueueFlags(cb_node, "vkCmdClearColorImage()", VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT,
+                                      "VUID-vkCmdClearColorImage-commandBuffer-cmdpool");
+        skip |= ValidateCmd(cb_node, CMD_CLEARCOLORIMAGE, "vkCmdClearColorImage()");
+        if (api_version >= VK_API_VERSION_1_1 || device_extensions.vk_khr_maintenance1) {
+            skip |=
+                ValidateImageFormatFeatureFlags(image_state, VK_FORMAT_FEATURE_TRANSFER_DST_BIT, "vkCmdClearColorImage",
+                                                "VUID-vkCmdClearColorImage-image-01993", "VUID-vkCmdClearColorImage-image-01993");
+        }
+        skip |= InsideRenderPass(cb_node, "vkCmdClearColorImage()", "VUID-vkCmdClearColorImage-renderpass");
+        for (uint32_t i = 0; i < rangeCount; ++i) {
+            std::string param_name = "pRanges[" + std::to_string(i) + "]";
+            skip |= ValidateCmdClearColorSubresourceRange(image_state, pRanges[i], param_name.c_str());
+            skip |= ValidateImageAttributes(image_state, pRanges[i]);
+            skip |= VerifyClearImageLayout(cb_node, image_state, pRanges[i], imageLayout, "vkCmdClearColorImage()");
+        }
+    }
+    return skip;
+}
+
+void CoreChecks::PreCallRecordCmdClearColorImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout,
+                                                 const VkClearColorValue *pColor, uint32_t rangeCount,
+                                                 const VkImageSubresourceRange *pRanges) {
+    StateTracker::PreCallRecordCmdClearColorImage(commandBuffer, image, imageLayout, pColor, rangeCount, pRanges);
+
+    auto cb_node = GetCBState(commandBuffer);
+    auto image_state = GetImageState(image);
+    if (cb_node && image_state) {
+        for (uint32_t i = 0; i < rangeCount; ++i) {
+            SetImageInitialLayout(cb_node, image, pRanges[i], imageLayout);
+        }
+    }
+}
+
+bool CoreChecks::PreCallValidateCmdClearDepthStencilImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout,
+                                                          const VkClearDepthStencilValue *pDepthStencil, uint32_t rangeCount,
+                                                          const VkImageSubresourceRange *pRanges) const {
+    bool skip = false;
+
+    // TODO : Verify memory is in VK_IMAGE_STATE_CLEAR state
+    const auto *cb_node = GetCBState(commandBuffer);
+    const auto *image_state = GetImageState(image);
+    if (cb_node && image_state) {
+        skip |= ValidateMemoryIsBoundToImage(image_state, "vkCmdClearDepthStencilImage()",
+                                             "VUID-vkCmdClearDepthStencilImage-image-00010");
+        skip |= ValidateCmdQueueFlags(cb_node, "vkCmdClearDepthStencilImage()", VK_QUEUE_GRAPHICS_BIT,
+                                      "VUID-vkCmdClearDepthStencilImage-commandBuffer-cmdpool");
+        skip |= ValidateCmd(cb_node, CMD_CLEARDEPTHSTENCILIMAGE, "vkCmdClearDepthStencilImage()");
+        if (api_version >= VK_API_VERSION_1_1 || device_extensions.vk_khr_maintenance1) {
+            skip |= ValidateImageFormatFeatureFlags(image_state, VK_FORMAT_FEATURE_TRANSFER_DST_BIT, "vkCmdClearDepthStencilImage",
+                                                    "VUID-vkCmdClearDepthStencilImage-image-01994",
+                                                    "VUID-vkCmdClearDepthStencilImage-image-01994");
+        }
+        skip |= InsideRenderPass(cb_node, "vkCmdClearDepthStencilImage()", "VUID-vkCmdClearDepthStencilImage-renderpass");
+        for (uint32_t i = 0; i < rangeCount; ++i) {
+            std::string param_name = "pRanges[" + std::to_string(i) + "]";
+            skip |= ValidateCmdClearDepthSubresourceRange(image_state, pRanges[i], param_name.c_str());
+            skip |= VerifyClearImageLayout(cb_node, image_state, pRanges[i], imageLayout, "vkCmdClearDepthStencilImage()");
+            // Image aspect must be depth or stencil or both
+            VkImageAspectFlags valid_aspects = VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT;
+            if (((pRanges[i].aspectMask & valid_aspects) == 0) || ((pRanges[i].aspectMask & ~valid_aspects) != 0)) {
+                char const str[] =
+                    "vkCmdClearDepthStencilImage aspectMasks for all subresource ranges must be set to VK_IMAGE_ASPECT_DEPTH_BIT "
+                    "and/or VK_IMAGE_ASPECT_STENCIL_BIT";
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                                HandleToUint64(commandBuffer), kVUID_Core_DrawState_InvalidImageAspect, str);
+            }
+        }
+        if (image_state && !FormatIsDepthOrStencil(image_state->createInfo.format)) {
+            char const str[] = "vkCmdClearDepthStencilImage called without a depth/stencil image.";
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+                            HandleToUint64(image), "VUID-vkCmdClearDepthStencilImage-image-00014", "%s.", str);
+        }
+        if (VK_IMAGE_USAGE_TRANSFER_DST_BIT != (VK_IMAGE_USAGE_TRANSFER_DST_BIT & image_state->createInfo.usage)) {
+            char const str[] =
+                "vkCmdClearDepthStencilImage() called with an image that was not created with the VK_IMAGE_USAGE_TRANSFER_DST_BIT "
+                "set.";
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+                            HandleToUint64(image), "VUID-vkCmdClearDepthStencilImage-image-00009", "%s.", str);
+        }
+    }
+    return skip;
+}
+
+void CoreChecks::PreCallRecordCmdClearDepthStencilImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout,
+                                                        const VkClearDepthStencilValue *pDepthStencil, uint32_t rangeCount,
+                                                        const VkImageSubresourceRange *pRanges) {
+    StateTracker::PreCallRecordCmdClearDepthStencilImage(commandBuffer, image, imageLayout, pDepthStencil, rangeCount, pRanges);
+    auto cb_node = GetCBState(commandBuffer);
+    auto image_state = GetImageState(image);
+    if (cb_node && image_state) {
+        for (uint32_t i = 0; i < rangeCount; ++i) {
+            SetImageInitialLayout(cb_node, image, pRanges[i], imageLayout);
+        }
+    }
+}
+
+// Returns true if [x, xoffset] and [y, yoffset] overlap
+static bool RangesIntersect(int32_t start, uint32_t start_offset, int32_t end, uint32_t end_offset) {
+    bool result = false;
+    uint32_t intersection_min = std::max(static_cast<uint32_t>(start), static_cast<uint32_t>(end));
+    uint32_t intersection_max = std::min(static_cast<uint32_t>(start) + start_offset, static_cast<uint32_t>(end) + end_offset);
+
+    if (intersection_max > intersection_min) {
+        result = true;
+    }
+    return result;
+}
+
+// Returns true if source area of first copy region intersects dest area of second region
+// It is assumed that these are copy regions within a single image (otherwise no possibility of collision)
+static bool RegionIntersects(const VkImageCopy *rgn0, const VkImageCopy *rgn1, VkImageType type, bool is_multiplane) {
+    bool result = false;
+
+    // Separate planes within a multiplane image cannot intersect
+    if (is_multiplane && (rgn0->srcSubresource.aspectMask != rgn1->dstSubresource.aspectMask)) {
+        return result;
+    }
+
+    if ((rgn0->srcSubresource.mipLevel == rgn1->dstSubresource.mipLevel) &&
+        (RangesIntersect(rgn0->srcSubresource.baseArrayLayer, rgn0->srcSubresource.layerCount, rgn1->dstSubresource.baseArrayLayer,
+                         rgn1->dstSubresource.layerCount))) {
+        result = true;
+        switch (type) {
+            case VK_IMAGE_TYPE_3D:
+                result &= RangesIntersect(rgn0->srcOffset.z, rgn0->extent.depth, rgn1->dstOffset.z, rgn1->extent.depth);
+                // fall through
+            case VK_IMAGE_TYPE_2D:
+                result &= RangesIntersect(rgn0->srcOffset.y, rgn0->extent.height, rgn1->dstOffset.y, rgn1->extent.height);
+                // fall through
+            case VK_IMAGE_TYPE_1D:
+                result &= RangesIntersect(rgn0->srcOffset.x, rgn0->extent.width, rgn1->dstOffset.x, rgn1->extent.width);
+                break;
+            default:
+                // Unrecognized or new IMAGE_TYPE enums will be caught in parameter_validation
+                assert(false);
+        }
+    }
+    return result;
+}
+
+// Returns non-zero if offset and extent exceed image extents
+static const uint32_t x_bit = 1;
+static const uint32_t y_bit = 2;
+static const uint32_t z_bit = 4;
+static uint32_t ExceedsBounds(const VkOffset3D *offset, const VkExtent3D *extent, const VkExtent3D *image_extent) {
+    uint32_t result = 0;
+    // Extents/depths cannot be negative but checks left in for clarity
+    if ((offset->z + extent->depth > image_extent->depth) || (offset->z < 0) ||
+        ((offset->z + static_cast<int32_t>(extent->depth)) < 0)) {
+        result |= z_bit;
+    }
+    if ((offset->y + extent->height > image_extent->height) || (offset->y < 0) ||
+        ((offset->y + static_cast<int32_t>(extent->height)) < 0)) {
+        result |= y_bit;
+    }
+    if ((offset->x + extent->width > image_extent->width) || (offset->x < 0) ||
+        ((offset->x + static_cast<int32_t>(extent->width)) < 0)) {
+        result |= x_bit;
+    }
+    return result;
+}
+
+// Test if two VkExtent3D structs are equivalent
+static inline bool IsExtentEqual(const VkExtent3D *extent, const VkExtent3D *other_extent) {
+    bool result = true;
+    if ((extent->width != other_extent->width) || (extent->height != other_extent->height) ||
+        (extent->depth != other_extent->depth)) {
+        result = false;
+    }
+    return result;
+}
+
+// For image copies between compressed/uncompressed formats, the extent is provided in source image texels
+// Destination image texel extents must be adjusted by block size for the dest validation checks
+VkExtent3D GetAdjustedDestImageExtent(VkFormat src_format, VkFormat dst_format, VkExtent3D extent) {
+    VkExtent3D adjusted_extent = extent;
+    if ((FormatIsCompressed(src_format) || FormatIsSinglePlane_422(src_format)) &&
+        !(FormatIsCompressed(dst_format) || FormatIsSinglePlane_422(dst_format))) {
+        VkExtent3D block_size = FormatTexelBlockExtent(src_format);
+        adjusted_extent.width /= block_size.width;
+        adjusted_extent.height /= block_size.height;
+        adjusted_extent.depth /= block_size.depth;
+    } else if (!(FormatIsCompressed(src_format) || FormatIsSinglePlane_422(src_format)) &&
+               (FormatIsCompressed(dst_format) || FormatIsSinglePlane_422(dst_format))) {
+        VkExtent3D block_size = FormatTexelBlockExtent(dst_format);
+        adjusted_extent.width *= block_size.width;
+        adjusted_extent.height *= block_size.height;
+        adjusted_extent.depth *= block_size.depth;
+    }
+    return adjusted_extent;
+}
+
+// Returns the effective extent of an image subresource, adjusted for mip level and array depth.
+static inline VkExtent3D GetImageSubresourceExtent(const IMAGE_STATE *img, const VkImageSubresourceLayers *subresource) {
+    const uint32_t mip = subresource->mipLevel;
+
+    // Return zero extent if mip level doesn't exist
+    if (mip >= img->createInfo.mipLevels) {
+        return VkExtent3D{0, 0, 0};
+    }
+
+    // Don't allow mip adjustment to create 0 dim, but pass along a 0 if that's what subresource specified
+    VkExtent3D extent = img->createInfo.extent;
+
+    // If multi-plane, adjust per-plane extent
+    if (FormatIsMultiplane(img->createInfo.format)) {
+        VkExtent2D divisors = FindMultiplaneExtentDivisors(img->createInfo.format, subresource->aspectMask);
+        extent.width /= divisors.width;
+        extent.height /= divisors.height;
+    }
+
+    if (img->createInfo.flags & VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV) {
+        extent.width = (0 == extent.width ? 0 : std::max(2U, 1 + ((extent.width - 1) >> mip)));
+        extent.height = (0 == extent.height ? 0 : std::max(2U, 1 + ((extent.height - 1) >> mip)));
+        extent.depth = (0 == extent.depth ? 0 : std::max(2U, 1 + ((extent.depth - 1) >> mip)));
+    } else {
+        extent.width = (0 == extent.width ? 0 : std::max(1U, extent.width >> mip));
+        extent.height = (0 == extent.height ? 0 : std::max(1U, extent.height >> mip));
+        extent.depth = (0 == extent.depth ? 0 : std::max(1U, extent.depth >> mip));
+    }
+
+    // Image arrays have an effective z extent that isn't diminished by mip level
+    if (VK_IMAGE_TYPE_3D != img->createInfo.imageType) {
+        extent.depth = img->createInfo.arrayLayers;
+    }
+
+    return extent;
+}
+
+// Test if the extent argument has all dimensions set to 0.
+static inline bool IsExtentAllZeroes(const VkExtent3D *extent) {
+    return ((extent->width == 0) && (extent->height == 0) && (extent->depth == 0));
+}
+
+// Test if the extent argument has any dimensions set to 0.
+static inline bool IsExtentSizeZero(const VkExtent3D *extent) {
+    return ((extent->width == 0) || (extent->height == 0) || (extent->depth == 0));
+}
+
+// Returns the image transfer granularity for a specific image scaled by compressed block size if necessary.
+VkExtent3D CoreChecks::GetScaledItg(const CMD_BUFFER_STATE *cb_node, const IMAGE_STATE *img) const {
+    // Default to (0, 0, 0) granularity in case we can't find the real granularity for the physical device.
+    VkExtent3D granularity = {0, 0, 0};
+    auto pPool = cb_node->command_pool.get();
+    if (pPool) {
+        granularity = GetPhysicalDeviceState()->queue_family_properties[pPool->queueFamilyIndex].minImageTransferGranularity;
+        if (FormatIsCompressed(img->createInfo.format) || FormatIsSinglePlane_422(img->createInfo.format)) {
+            auto block_size = FormatTexelBlockExtent(img->createInfo.format);
+            granularity.width *= block_size.width;
+            granularity.height *= block_size.height;
+        }
+    }
+    return granularity;
+}
+
+// Test elements of a VkExtent3D structure against alignment constraints contained in another VkExtent3D structure
+static inline bool IsExtentAligned(const VkExtent3D *extent, const VkExtent3D *granularity) {
+    bool valid = true;
+    if ((SafeModulo(extent->depth, granularity->depth) != 0) || (SafeModulo(extent->width, granularity->width) != 0) ||
+        (SafeModulo(extent->height, granularity->height) != 0)) {
+        valid = false;
+    }
+    return valid;
+}
+
+// Check elements of a VkOffset3D structure against a queue family's Image Transfer Granularity values
+bool CoreChecks::CheckItgOffset(const CMD_BUFFER_STATE *cb_node, const VkOffset3D *offset, const VkExtent3D *granularity,
+                                const uint32_t i, const char *function, const char *member, const char *vuid) const {
+    bool skip = false;
+    VkExtent3D offset_extent = {};
+    offset_extent.width = static_cast<uint32_t>(abs(offset->x));
+    offset_extent.height = static_cast<uint32_t>(abs(offset->y));
+    offset_extent.depth = static_cast<uint32_t>(abs(offset->z));
+    if (IsExtentAllZeroes(granularity)) {
+        // If the queue family image transfer granularity is (0, 0, 0), then the offset must always be (0, 0, 0)
+        if (IsExtentAllZeroes(&offset_extent) == false) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                            HandleToUint64(cb_node->commandBuffer), vuid,
+                            "%s: pRegion[%d].%s (x=%d, y=%d, z=%d) must be (x=0, y=0, z=0) when the command buffer's queue family "
+                            "image transfer granularity is (w=0, h=0, d=0).",
+                            function, i, member, offset->x, offset->y, offset->z);
+        }
+    } else {
+        // If the queue family image transfer granularity is not (0, 0, 0), then the offset dimensions must always be even
+        // integer multiples of the image transfer granularity.
+        if (IsExtentAligned(&offset_extent, granularity) == false) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                            HandleToUint64(cb_node->commandBuffer), vuid,
+                            "%s: pRegion[%d].%s (x=%d, y=%d, z=%d) dimensions must be even integer multiples of this command "
+                            "buffer's queue family image transfer granularity (w=%d, h=%d, d=%d).",
+                            function, i, member, offset->x, offset->y, offset->z, granularity->width, granularity->height,
+                            granularity->depth);
+        }
+    }
+    return skip;
+}
+
+// Check elements of a VkExtent3D structure against a queue family's Image Transfer Granularity values
+bool CoreChecks::CheckItgExtent(const CMD_BUFFER_STATE *cb_node, const VkExtent3D *extent, const VkOffset3D *offset,
+                                const VkExtent3D *granularity, const VkExtent3D *subresource_extent, const VkImageType image_type,
+                                const uint32_t i, const char *function, const char *member, const char *vuid) const {
+    bool skip = false;
+    if (IsExtentAllZeroes(granularity)) {
+        // If the queue family image transfer granularity is (0, 0, 0), then the extent must always match the image
+        // subresource extent.
+        if (IsExtentEqual(extent, subresource_extent) == false) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                            HandleToUint64(cb_node->commandBuffer), vuid,
+                            "%s: pRegion[%d].%s (w=%d, h=%d, d=%d) must match the image subresource extents (w=%d, h=%d, d=%d) "
+                            "when the command buffer's queue family image transfer granularity is (w=0, h=0, d=0).",
+                            function, i, member, extent->width, extent->height, extent->depth, subresource_extent->width,
+                            subresource_extent->height, subresource_extent->depth);
+        }
+    } else {
+        // If the queue family image transfer granularity is not (0, 0, 0), then the extent dimensions must always be even
+        // integer multiples of the image transfer granularity or the offset + extent dimensions must always match the image
+        // subresource extent dimensions.
+        VkExtent3D offset_extent_sum = {};
+        offset_extent_sum.width = static_cast<uint32_t>(abs(offset->x)) + extent->width;
+        offset_extent_sum.height = static_cast<uint32_t>(abs(offset->y)) + extent->height;
+        offset_extent_sum.depth = static_cast<uint32_t>(abs(offset->z)) + extent->depth;
+        bool x_ok = true;
+        bool y_ok = true;
+        bool z_ok = true;
+        switch (image_type) {
+            case VK_IMAGE_TYPE_3D:
+                z_ok = ((0 == SafeModulo(extent->depth, granularity->depth)) ||
+                        (subresource_extent->depth == offset_extent_sum.depth));
+                // fall through
+            case VK_IMAGE_TYPE_2D:
+                y_ok = ((0 == SafeModulo(extent->height, granularity->height)) ||
+                        (subresource_extent->height == offset_extent_sum.height));
+                // fall through
+            case VK_IMAGE_TYPE_1D:
+                x_ok = ((0 == SafeModulo(extent->width, granularity->width)) ||
+                        (subresource_extent->width == offset_extent_sum.width));
+                break;
+            default:
+                // Unrecognized or new IMAGE_TYPE enums will be caught in parameter_validation
+                assert(false);
+        }
+        if (!(x_ok && y_ok && z_ok)) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                            HandleToUint64(cb_node->commandBuffer), vuid,
+                            "%s: pRegion[%d].%s (w=%d, h=%d, d=%d) dimensions must be even integer multiples of this command "
+                            "buffer's queue family image transfer granularity (w=%d, h=%d, d=%d) or offset (x=%d, y=%d, z=%d) + "
+                            "extent (w=%d, h=%d, d=%d) must match the image subresource extents (w=%d, h=%d, d=%d).",
+                            function, i, member, extent->width, extent->height, extent->depth, granularity->width,
+                            granularity->height, granularity->depth, offset->x, offset->y, offset->z, extent->width, extent->height,
+                            extent->depth, subresource_extent->width, subresource_extent->height, subresource_extent->depth);
+        }
+    }
+    return skip;
+}
+
+bool CoreChecks::ValidateImageMipLevel(const CMD_BUFFER_STATE *cb_node, const IMAGE_STATE *img, uint32_t mip_level,
+                                       const uint32_t i, const char *function, const char *member, const char *vuid) const {
+    bool skip = false;
+    if (mip_level >= img->createInfo.mipLevels) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                        HandleToUint64(cb_node->commandBuffer), vuid,
+                        "In %s, pRegions[%u].%s.mipLevel is %u, but provided %s has %u mip levels.", function, i, member, mip_level,
+                        report_data->FormatHandle(img->image).c_str(), img->createInfo.mipLevels);
+    }
+    return skip;
+}
+
+bool CoreChecks::ValidateImageArrayLayerRange(const CMD_BUFFER_STATE *cb_node, const IMAGE_STATE *img, const uint32_t base_layer,
+                                              const uint32_t layer_count, const uint32_t i, const char *function,
+                                              const char *member, const char *vuid) const {
+    bool skip = false;
+    if (base_layer >= img->createInfo.arrayLayers || layer_count > img->createInfo.arrayLayers ||
+        (base_layer + layer_count) > img->createInfo.arrayLayers) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                        HandleToUint64(cb_node->commandBuffer), vuid,
+                        "In %s, pRegions[%u].%s.baseArrayLayer is %u and .layerCount is "
+                        "%u, but provided %s has %u array layers.",
+                        function, i, member, base_layer, layer_count, report_data->FormatHandle(img->image).c_str(),
+                        img->createInfo.arrayLayers);
+    }
+    return skip;
+}
+
+// Check valid usage Image Transfer Granularity requirements for elements of a VkBufferImageCopy structure
+bool CoreChecks::ValidateCopyBufferImageTransferGranularityRequirements(const CMD_BUFFER_STATE *cb_node, const IMAGE_STATE *img,
+                                                                        const VkBufferImageCopy *region, const uint32_t i,
+                                                                        const char *function, const char *vuid) const {
+    bool skip = false;
+    VkExtent3D granularity = GetScaledItg(cb_node, img);
+    skip |= CheckItgOffset(cb_node, &region->imageOffset, &granularity, i, function, "imageOffset", vuid);
+    VkExtent3D subresource_extent = GetImageSubresourceExtent(img, &region->imageSubresource);
+    skip |= CheckItgExtent(cb_node, &region->imageExtent, &region->imageOffset, &granularity, &subresource_extent,
+                           img->createInfo.imageType, i, function, "imageExtent", vuid);
+    return skip;
+}
+
+// Check valid usage Image Transfer Granularity requirements for elements of a VkImageCopy structure
+bool CoreChecks::ValidateCopyImageTransferGranularityRequirements(const CMD_BUFFER_STATE *cb_node, const IMAGE_STATE *src_img,
+                                                                  const IMAGE_STATE *dst_img, const VkImageCopy *region,
+                                                                  const uint32_t i, const char *function) const {
+    bool skip = false;
+    // Source image checks
+    VkExtent3D granularity = GetScaledItg(cb_node, src_img);
+    skip |=
+        CheckItgOffset(cb_node, &region->srcOffset, &granularity, i, function, "srcOffset", "VUID-vkCmdCopyImage-srcOffset-01783");
+    VkExtent3D subresource_extent = GetImageSubresourceExtent(src_img, &region->srcSubresource);
+    const VkExtent3D extent = region->extent;
+    skip |= CheckItgExtent(cb_node, &extent, &region->srcOffset, &granularity, &subresource_extent, src_img->createInfo.imageType,
+                           i, function, "extent", "VUID-vkCmdCopyImage-srcOffset-01783");
+
+    // Destination image checks
+    granularity = GetScaledItg(cb_node, dst_img);
+    skip |=
+        CheckItgOffset(cb_node, &region->dstOffset, &granularity, i, function, "dstOffset", "VUID-vkCmdCopyImage-dstOffset-01784");
+    // Adjust dest extent, if necessary
+    const VkExtent3D dest_effective_extent =
+        GetAdjustedDestImageExtent(src_img->createInfo.format, dst_img->createInfo.format, extent);
+    subresource_extent = GetImageSubresourceExtent(dst_img, &region->dstSubresource);
+    skip |= CheckItgExtent(cb_node, &dest_effective_extent, &region->dstOffset, &granularity, &subresource_extent,
+                           dst_img->createInfo.imageType, i, function, "extent", "VUID-vkCmdCopyImage-dstOffset-01784");
+    return skip;
+}
+
+// Validate contents of a VkImageCopy struct
+bool CoreChecks::ValidateImageCopyData(const uint32_t regionCount, const VkImageCopy *ic_regions, const IMAGE_STATE *src_state,
+                                       const IMAGE_STATE *dst_state) const {
+    bool skip = false;
+
+    for (uint32_t i = 0; i < regionCount; i++) {
+        const VkImageCopy region = ic_regions[i];
+
+        // For comp<->uncomp copies, the copy extent for the dest image must be adjusted
+        const VkExtent3D src_copy_extent = region.extent;
+        const VkExtent3D dst_copy_extent =
+            GetAdjustedDestImageExtent(src_state->createInfo.format, dst_state->createInfo.format, region.extent);
+
+        bool slice_override = false;
+        uint32_t depth_slices = 0;
+
+        // Special case for copying between a 1D/2D array and a 3D image
+        // TBD: This seems like the only way to reconcile 3 mutually-exclusive VU checks for 2D/3D copies. Heads up.
+        if ((VK_IMAGE_TYPE_3D == src_state->createInfo.imageType) && (VK_IMAGE_TYPE_3D != dst_state->createInfo.imageType)) {
+            depth_slices = region.dstSubresource.layerCount;  // Slice count from 2D subresource
+            slice_override = (depth_slices != 1);
+        } else if ((VK_IMAGE_TYPE_3D == dst_state->createInfo.imageType) && (VK_IMAGE_TYPE_3D != src_state->createInfo.imageType)) {
+            depth_slices = region.srcSubresource.layerCount;  // Slice count from 2D subresource
+            slice_override = (depth_slices != 1);
+        }
+
+        // Do all checks on source image
+        //
+        if (src_state->createInfo.imageType == VK_IMAGE_TYPE_1D) {
+            if ((0 != region.srcOffset.y) || (1 != src_copy_extent.height)) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+                                HandleToUint64(src_state->image), "VUID-VkImageCopy-srcImage-00146",
+                                "vkCmdCopyImage(): pRegion[%d] srcOffset.y is %d and extent.height is %d. For 1D images these must "
+                                "be 0 and 1, respectively.",
+                                i, region.srcOffset.y, src_copy_extent.height);
+            }
+        }
+
+        // VUID-VkImageCopy-srcImage-01785
+        if ((src_state->createInfo.imageType == VK_IMAGE_TYPE_1D) && ((0 != region.srcOffset.z) || (1 != src_copy_extent.depth))) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+                            HandleToUint64(src_state->image), "VUID-VkImageCopy-srcImage-01785",
+                            "vkCmdCopyImage(): pRegion[%d] srcOffset.z is %d and extent.depth is %d. For 1D images "
+                            "these must be 0 and 1, respectively.",
+                            i, region.srcOffset.z, src_copy_extent.depth);
+        }
+
+        // VUID-VkImageCopy-srcImage-01787
+        if ((src_state->createInfo.imageType == VK_IMAGE_TYPE_2D) && (0 != region.srcOffset.z)) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+                            HandleToUint64(src_state->image), "VUID-VkImageCopy-srcImage-01787",
+                            "vkCmdCopyImage(): pRegion[%d] srcOffset.z is %d. For 2D images the z-offset must be 0.", i,
+                            region.srcOffset.z);
+        }
+
+        if (device_extensions.vk_khr_maintenance1) {
+            if (src_state->createInfo.imageType == VK_IMAGE_TYPE_3D) {
+                if ((0 != region.srcSubresource.baseArrayLayer) || (1 != region.srcSubresource.layerCount)) {
+                    skip |=
+                        log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+                                HandleToUint64(src_state->image), "VUID-VkImageCopy-srcImage-00141",
+                                "vkCmdCopyImage(): pRegion[%d] srcSubresource.baseArrayLayer is %d and srcSubresource.layerCount "
+                                "is %d. For VK_IMAGE_TYPE_3D images these must be 0 and 1, respectively.",
+                                i, region.srcSubresource.baseArrayLayer, region.srcSubresource.layerCount);
+                }
+            }
+        } else {  // Pre maint 1
+            if (src_state->createInfo.imageType == VK_IMAGE_TYPE_3D || dst_state->createInfo.imageType == VK_IMAGE_TYPE_3D) {
+                if ((0 != region.srcSubresource.baseArrayLayer) || (1 != region.srcSubresource.layerCount)) {
+                    skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+                                    HandleToUint64(src_state->image), "VUID-VkImageCopy-srcImage-00141",
+                                    "vkCmdCopyImage(): pRegion[%d] srcSubresource.baseArrayLayer is %d and "
+                                    "srcSubresource.layerCount is %d. For copies with either source or dest of type "
+                                    "VK_IMAGE_TYPE_3D, these must be 0 and 1, respectively.",
+                                    i, region.srcSubresource.baseArrayLayer, region.srcSubresource.layerCount);
+                }
+            }
+        }
+
+        // Source checks that apply only to compressed images (or to _422 images if ycbcr enabled)
+        bool ext_ycbcr = IsExtEnabled(device_extensions.vk_khr_sampler_ycbcr_conversion);
+        if (FormatIsCompressed(src_state->createInfo.format) ||
+            (ext_ycbcr && FormatIsSinglePlane_422(src_state->createInfo.format))) {
+            const VkExtent3D block_size = FormatTexelBlockExtent(src_state->createInfo.format);
+            //  image offsets must be multiples of block dimensions
+            if ((SafeModulo(region.srcOffset.x, block_size.width) != 0) ||
+                (SafeModulo(region.srcOffset.y, block_size.height) != 0) ||
+                (SafeModulo(region.srcOffset.z, block_size.depth) != 0)) {
+                const char *vuid = ext_ycbcr ? "VUID-VkImageCopy-srcImage-01727" : "VUID-VkImageCopy-srcOffset-00157";
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+                                HandleToUint64(src_state->image), vuid,
+                                "vkCmdCopyImage(): pRegion[%d] srcOffset (%d, %d) must be multiples of the compressed image's "
+                                "texel width & height (%d, %d).",
+                                i, region.srcOffset.x, region.srcOffset.y, block_size.width, block_size.height);
+            }
+
+            const VkExtent3D mip_extent = GetImageSubresourceExtent(src_state, &(region.srcSubresource));
+            if ((SafeModulo(src_copy_extent.width, block_size.width) != 0) &&
+                (src_copy_extent.width + region.srcOffset.x != mip_extent.width)) {
+                const char *vuid = ext_ycbcr ? "VUID-VkImageCopy-srcImage-01728" : "VUID-VkImageCopy-extent-00158";
+                skip |=
+                    log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+                            HandleToUint64(src_state->image), vuid,
+                            "vkCmdCopyImage(): pRegion[%d] extent width (%d) must be a multiple of the compressed texture block "
+                            "width (%d), or when added to srcOffset.x (%d) must equal the image subresource width (%d).",
+                            i, src_copy_extent.width, block_size.width, region.srcOffset.x, mip_extent.width);
+            }
+
+            // Extent height must be a multiple of block height, or extent+offset height must equal subresource height
+            if ((SafeModulo(src_copy_extent.height, block_size.height) != 0) &&
+                (src_copy_extent.height + region.srcOffset.y != mip_extent.height)) {
+                const char *vuid = ext_ycbcr ? "VUID-VkImageCopy-srcImage-01729" : "VUID-VkImageCopy-extent-00159";
+                skip |=
+                    log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+                            HandleToUint64(src_state->image), vuid,
+                            "vkCmdCopyImage(): pRegion[%d] extent height (%d) must be a multiple of the compressed texture block "
+                            "height (%d), or when added to srcOffset.y (%d) must equal the image subresource height (%d).",
+                            i, src_copy_extent.height, block_size.height, region.srcOffset.y, mip_extent.height);
+            }
+
+            // Extent depth must be a multiple of block depth, or extent+offset depth must equal subresource depth
+            uint32_t copy_depth = (slice_override ? depth_slices : src_copy_extent.depth);
+            if ((SafeModulo(copy_depth, block_size.depth) != 0) && (copy_depth + region.srcOffset.z != mip_extent.depth)) {
+                const char *vuid = ext_ycbcr ? "VUID-VkImageCopy-srcImage-01730" : "VUID-VkImageCopy-extent-00160";
+                skip |=
+                    log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+                            HandleToUint64(src_state->image), vuid,
+                            "vkCmdCopyImage(): pRegion[%d] extent width (%d) must be a multiple of the compressed texture block "
+                            "depth (%d), or when added to srcOffset.z (%d) must equal the image subresource depth (%d).",
+                            i, src_copy_extent.depth, block_size.depth, region.srcOffset.z, mip_extent.depth);
+            }
+        }  // Compressed
+
+        // Do all checks on dest image
+        //
+        if (dst_state->createInfo.imageType == VK_IMAGE_TYPE_1D) {
+            if ((0 != region.dstOffset.y) || (1 != dst_copy_extent.height)) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+                                HandleToUint64(dst_state->image), "VUID-VkImageCopy-dstImage-00152",
+                                "vkCmdCopyImage(): pRegion[%d] dstOffset.y is %d and dst_copy_extent.height is %d. For 1D images "
+                                "these must be 0 and 1, respectively.",
+                                i, region.dstOffset.y, dst_copy_extent.height);
+            }
+        }
+
+        // VUID-VkImageCopy-dstImage-01786
+        if ((dst_state->createInfo.imageType == VK_IMAGE_TYPE_1D) && ((0 != region.dstOffset.z) || (1 != dst_copy_extent.depth))) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+                            HandleToUint64(dst_state->image), "VUID-VkImageCopy-dstImage-01786",
+                            "vkCmdCopyImage(): pRegion[%d] dstOffset.z is %d and extent.depth is %d. For 1D images these must be 0 "
+                            "and 1, respectively.",
+                            i, region.dstOffset.z, dst_copy_extent.depth);
+        }
+
+        // VUID-VkImageCopy-dstImage-01788
+        if ((dst_state->createInfo.imageType == VK_IMAGE_TYPE_2D) && (0 != region.dstOffset.z)) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+                            HandleToUint64(dst_state->image), "VUID-VkImageCopy-dstImage-01788",
+                            "vkCmdCopyImage(): pRegion[%d] dstOffset.z is %d. For 2D images the z-offset must be 0.", i,
+                            region.dstOffset.z);
+        }
+
+        if (dst_state->createInfo.imageType == VK_IMAGE_TYPE_3D) {
+            if ((0 != region.dstSubresource.baseArrayLayer) || (1 != region.dstSubresource.layerCount)) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+                                HandleToUint64(dst_state->image), "VUID-VkImageCopy-srcImage-00141",
+                                "vkCmdCopyImage(): pRegion[%d] dstSubresource.baseArrayLayer is %d and dstSubresource.layerCount "
+                                "is %d. For VK_IMAGE_TYPE_3D images these must be 0 and 1, respectively.",
+                                i, region.dstSubresource.baseArrayLayer, region.dstSubresource.layerCount);
+            }
+        }
+        // VU01199 changed with mnt1
+        if (device_extensions.vk_khr_maintenance1) {
+            if (dst_state->createInfo.imageType == VK_IMAGE_TYPE_3D) {
+                if ((0 != region.dstSubresource.baseArrayLayer) || (1 != region.dstSubresource.layerCount)) {
+                    skip |=
+                        log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+                                HandleToUint64(dst_state->image), "VUID-VkImageCopy-srcImage-00141",
+                                "vkCmdCopyImage(): pRegion[%d] dstSubresource.baseArrayLayer is %d and dstSubresource.layerCount "
+                                "is %d. For VK_IMAGE_TYPE_3D images these must be 0 and 1, respectively.",
+                                i, region.dstSubresource.baseArrayLayer, region.dstSubresource.layerCount);
+                }
+            }
+        } else {  // Pre maint 1
+            if (src_state->createInfo.imageType == VK_IMAGE_TYPE_3D || dst_state->createInfo.imageType == VK_IMAGE_TYPE_3D) {
+                if ((0 != region.dstSubresource.baseArrayLayer) || (1 != region.dstSubresource.layerCount)) {
+                    skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+                                    HandleToUint64(dst_state->image), "VUID-VkImageCopy-srcImage-00141",
+                                    "vkCmdCopyImage(): pRegion[%d] dstSubresource.baseArrayLayer is %d and "
+                                    "dstSubresource.layerCount is %d. For copies with either source or dest of type "
+                                    "VK_IMAGE_TYPE_3D, these must be 0 and 1, respectively.",
+                                    i, region.dstSubresource.baseArrayLayer, region.dstSubresource.layerCount);
+                }
+            }
+        }
+
+        // Dest checks that apply only to compressed images (or to _422 images if ycbcr enabled)
+        if (FormatIsCompressed(dst_state->createInfo.format) ||
+            (ext_ycbcr && FormatIsSinglePlane_422(dst_state->createInfo.format))) {
+            const VkExtent3D block_size = FormatTexelBlockExtent(dst_state->createInfo.format);
+
+            //  image offsets must be multiples of block dimensions
+            if ((SafeModulo(region.dstOffset.x, block_size.width) != 0) ||
+                (SafeModulo(region.dstOffset.y, block_size.height) != 0) ||
+                (SafeModulo(region.dstOffset.z, block_size.depth) != 0)) {
+                const char *vuid = ext_ycbcr ? "VUID-VkImageCopy-dstImage-01731" : "VUID-VkImageCopy-dstOffset-00162";
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+                                HandleToUint64(dst_state->image), vuid,
+                                "vkCmdCopyImage(): pRegion[%d] dstOffset (%d, %d) must be multiples of the compressed image's "
+                                "texel width & height (%d, %d).",
+                                i, region.dstOffset.x, region.dstOffset.y, block_size.width, block_size.height);
+            }
+
+            const VkExtent3D mip_extent = GetImageSubresourceExtent(dst_state, &(region.dstSubresource));
+            if ((SafeModulo(dst_copy_extent.width, block_size.width) != 0) &&
+                (dst_copy_extent.width + region.dstOffset.x != mip_extent.width)) {
+                const char *vuid = ext_ycbcr ? "VUID-VkImageCopy-dstImage-01732" : "VUID-VkImageCopy-extent-00163";
+                skip |=
+                    log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+                            HandleToUint64(dst_state->image), vuid,
+                            "vkCmdCopyImage(): pRegion[%d] dst_copy_extent width (%d) must be a multiple of the compressed texture "
+                            "block width (%d), or when added to dstOffset.x (%d) must equal the image subresource width (%d).",
+                            i, dst_copy_extent.width, block_size.width, region.dstOffset.x, mip_extent.width);
+            }
+
+            // Extent height must be a multiple of block height, or dst_copy_extent+offset height must equal subresource height
+            if ((SafeModulo(dst_copy_extent.height, block_size.height) != 0) &&
+                (dst_copy_extent.height + region.dstOffset.y != mip_extent.height)) {
+                const char *vuid = ext_ycbcr ? "VUID-VkImageCopy-dstImage-01733" : "VUID-VkImageCopy-extent-00164";
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+                                HandleToUint64(dst_state->image), vuid,
+                                "vkCmdCopyImage(): pRegion[%d] dst_copy_extent height (%d) must be a multiple of the compressed "
+                                "texture block height (%d), or when added to dstOffset.y (%d) must equal the image subresource "
+                                "height (%d).",
+                                i, dst_copy_extent.height, block_size.height, region.dstOffset.y, mip_extent.height);
+            }
+
+            // Extent depth must be a multiple of block depth, or dst_copy_extent+offset depth must equal subresource depth
+            uint32_t copy_depth = (slice_override ? depth_slices : dst_copy_extent.depth);
+            if ((SafeModulo(copy_depth, block_size.depth) != 0) && (copy_depth + region.dstOffset.z != mip_extent.depth)) {
+                const char *vuid = ext_ycbcr ? "VUID-VkImageCopy-dstImage-01734" : "VUID-VkImageCopy-extent-00165";
+                skip |=
+                    log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+                            HandleToUint64(dst_state->image), vuid,
+                            "vkCmdCopyImage(): pRegion[%d] dst_copy_extent width (%d) must be a multiple of the compressed texture "
+                            "block depth (%d), or when added to dstOffset.z (%d) must equal the image subresource depth (%d).",
+                            i, dst_copy_extent.depth, block_size.depth, region.dstOffset.z, mip_extent.depth);
+            }
+        }  // Compressed
+    }
+    return skip;
+}
+
+// vkCmdCopyImage checks that only apply if the multiplane extension is enabled
+bool CoreChecks::CopyImageMultiplaneValidation(VkCommandBuffer command_buffer, const IMAGE_STATE *src_image_state,
+                                               const IMAGE_STATE *dst_image_state, const VkImageCopy region) const {
+    bool skip = false;
+
+    // Neither image is multiplane
+    if ((!FormatIsMultiplane(src_image_state->createInfo.format)) && (!FormatIsMultiplane(dst_image_state->createInfo.format))) {
+        // If neither image is multi-plane the aspectMask member of src and dst must match
+        if (region.srcSubresource.aspectMask != region.dstSubresource.aspectMask) {
+            std::stringstream ss;
+            ss << "vkCmdCopyImage(): Copy between non-multiplane images with differing aspectMasks ( 0x" << std::hex
+               << region.srcSubresource.aspectMask << " and 0x" << region.dstSubresource.aspectMask << " )";
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                            HandleToUint64(command_buffer), "VUID-VkImageCopy-srcImage-01551", "%s.", ss.str().c_str());
+        }
+    } else {
+        // Source image multiplane checks
+        uint32_t planes = FormatPlaneCount(src_image_state->createInfo.format);
+        VkImageAspectFlags aspect = region.srcSubresource.aspectMask;
+        if ((2 == planes) && (aspect != VK_IMAGE_ASPECT_PLANE_0_BIT_KHR) && (aspect != VK_IMAGE_ASPECT_PLANE_1_BIT_KHR)) {
+            std::stringstream ss;
+            ss << "vkCmdCopyImage(): Source image aspect mask (0x" << std::hex << aspect << ") is invalid for 2-plane format";
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                            HandleToUint64(command_buffer), "VUID-VkImageCopy-srcImage-01552", "%s.", ss.str().c_str());
+        }
+        if ((3 == planes) && (aspect != VK_IMAGE_ASPECT_PLANE_0_BIT_KHR) && (aspect != VK_IMAGE_ASPECT_PLANE_1_BIT_KHR) &&
+            (aspect != VK_IMAGE_ASPECT_PLANE_2_BIT_KHR)) {
+            std::stringstream ss;
+            ss << "vkCmdCopyImage(): Source image aspect mask (0x" << std::hex << aspect << ") is invalid for 3-plane format";
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                            HandleToUint64(command_buffer), "VUID-VkImageCopy-srcImage-01553", "%s.", ss.str().c_str());
+        }
+        // Single-plane to multi-plane
+        if ((!FormatIsMultiplane(src_image_state->createInfo.format)) && (FormatIsMultiplane(dst_image_state->createInfo.format)) &&
+            (VK_IMAGE_ASPECT_COLOR_BIT != aspect)) {
+            std::stringstream ss;
+            ss << "vkCmdCopyImage(): Source image aspect mask (0x" << std::hex << aspect << ") is not VK_IMAGE_ASPECT_COLOR_BIT";
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                            HandleToUint64(command_buffer), "VUID-VkImageCopy-dstImage-01557", "%s.", ss.str().c_str());
+        }
+
+        // Dest image multiplane checks
+        planes = FormatPlaneCount(dst_image_state->createInfo.format);
+        aspect = region.dstSubresource.aspectMask;
+        if ((2 == planes) && (aspect != VK_IMAGE_ASPECT_PLANE_0_BIT_KHR) && (aspect != VK_IMAGE_ASPECT_PLANE_1_BIT_KHR)) {
+            std::stringstream ss;
+            ss << "vkCmdCopyImage(): Dest image aspect mask (0x" << std::hex << aspect << ") is invalid for 2-plane format";
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                            HandleToUint64(command_buffer), "VUID-VkImageCopy-dstImage-01554", "%s.", ss.str().c_str());
+        }
+        if ((3 == planes) && (aspect != VK_IMAGE_ASPECT_PLANE_0_BIT_KHR) && (aspect != VK_IMAGE_ASPECT_PLANE_1_BIT_KHR) &&
+            (aspect != VK_IMAGE_ASPECT_PLANE_2_BIT_KHR)) {
+            std::stringstream ss;
+            ss << "vkCmdCopyImage(): Dest image aspect mask (0x" << std::hex << aspect << ") is invalid for 3-plane format";
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                            HandleToUint64(command_buffer), "VUID-VkImageCopy-dstImage-01555", "%s.", ss.str().c_str());
+        }
+        // Multi-plane to single-plane
+        if ((FormatIsMultiplane(src_image_state->createInfo.format)) && (!FormatIsMultiplane(dst_image_state->createInfo.format)) &&
+            (VK_IMAGE_ASPECT_COLOR_BIT != aspect)) {
+            std::stringstream ss;
+            ss << "vkCmdCopyImage(): Dest image aspect mask (0x" << std::hex << aspect << ") is not VK_IMAGE_ASPECT_COLOR_BIT";
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                            HandleToUint64(command_buffer), "VUID-VkImageCopy-srcImage-01556", "%s.", ss.str().c_str());
+        }
+    }
+
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateCmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
+                                             VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount,
+                                             const VkImageCopy *pRegions) const {
+    const auto *cb_node = GetCBState(commandBuffer);
+    const auto *src_image_state = GetImageState(srcImage);
+    const auto *dst_image_state = GetImageState(dstImage);
+    bool skip = false;
+
+    skip = ValidateImageCopyData(regionCount, pRegions, src_image_state, dst_image_state);
+
+    VkCommandBuffer command_buffer = cb_node->commandBuffer;
+
+    for (uint32_t i = 0; i < regionCount; i++) {
+        const VkImageCopy region = pRegions[i];
+
+        // For comp/uncomp copies, the copy extent for the dest image must be adjusted
+        VkExtent3D src_copy_extent = region.extent;
+        VkExtent3D dst_copy_extent =
+            GetAdjustedDestImageExtent(src_image_state->createInfo.format, dst_image_state->createInfo.format, region.extent);
+
+        bool slice_override = false;
+        uint32_t depth_slices = 0;
+
+        // Special case for copying between a 1D/2D array and a 3D image
+        // TBD: This seems like the only way to reconcile 3 mutually-exclusive VU checks for 2D/3D copies. Heads up.
+        if ((VK_IMAGE_TYPE_3D == src_image_state->createInfo.imageType) &&
+            (VK_IMAGE_TYPE_3D != dst_image_state->createInfo.imageType)) {
+            depth_slices = region.dstSubresource.layerCount;  // Slice count from 2D subresource
+            slice_override = (depth_slices != 1);
+        } else if ((VK_IMAGE_TYPE_3D == dst_image_state->createInfo.imageType) &&
+                   (VK_IMAGE_TYPE_3D != src_image_state->createInfo.imageType)) {
+            depth_slices = region.srcSubresource.layerCount;  // Slice count from 2D subresource
+            slice_override = (depth_slices != 1);
+        }
+
+        skip |= ValidateImageSubresourceLayers(cb_node, &region.srcSubresource, "vkCmdCopyImage", "srcSubresource", i);
+        skip |= ValidateImageSubresourceLayers(cb_node, &region.dstSubresource, "vkCmdCopyImage", "dstSubresource", i);
+        skip |= ValidateImageMipLevel(cb_node, src_image_state, region.srcSubresource.mipLevel, i, "vkCmdCopyImage",
+                                      "srcSubresource", "VUID-vkCmdCopyImage-srcSubresource-01696");
+        skip |= ValidateImageMipLevel(cb_node, dst_image_state, region.dstSubresource.mipLevel, i, "vkCmdCopyImage",
+                                      "dstSubresource", "VUID-vkCmdCopyImage-dstSubresource-01697");
+        skip |= ValidateImageArrayLayerRange(cb_node, src_image_state, region.srcSubresource.baseArrayLayer,
+                                             region.srcSubresource.layerCount, i, "vkCmdCopyImage", "srcSubresource",
+                                             "VUID-vkCmdCopyImage-srcSubresource-01698");
+        skip |= ValidateImageArrayLayerRange(cb_node, dst_image_state, region.dstSubresource.baseArrayLayer,
+                                             region.dstSubresource.layerCount, i, "vkCmdCopyImage", "dstSubresource",
+                                             "VUID-vkCmdCopyImage-dstSubresource-01699");
+
+        if (device_extensions.vk_khr_maintenance1) {
+            // No chance of mismatch if we're overriding depth slice count
+            if (!slice_override) {
+                // The number of depth slices in srcSubresource and dstSubresource must match
+                // Depth comes from layerCount for 1D,2D resources, from extent.depth for 3D
+                uint32_t src_slices =
+                    (VK_IMAGE_TYPE_3D == src_image_state->createInfo.imageType ? src_copy_extent.depth
+                                                                               : region.srcSubresource.layerCount);
+                uint32_t dst_slices =
+                    (VK_IMAGE_TYPE_3D == dst_image_state->createInfo.imageType ? dst_copy_extent.depth
+                                                                               : region.dstSubresource.layerCount);
+                if (src_slices != dst_slices) {
+                    std::stringstream ss;
+                    ss << "vkCmdCopyImage(): number of depth slices in source and destination subresources for pRegions[" << i
+                       << "] do not match";
+                    skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                                    HandleToUint64(command_buffer), "VUID-VkImageCopy-extent-00140", "%s.", ss.str().c_str());
+                }
+            }
+        } else {
+            // For each region the layerCount member of srcSubresource and dstSubresource must match
+            if (region.srcSubresource.layerCount != region.dstSubresource.layerCount) {
+                std::stringstream ss;
+                ss << "vkCmdCopyImage(): number of layers in source and destination subresources for pRegions[" << i
+                   << "] do not match";
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                                HandleToUint64(command_buffer), "VUID-VkImageCopy-extent-00140", "%s.", ss.str().c_str());
+            }
+        }
+
+        // Do multiplane-specific checks, if extension enabled
+        if (device_extensions.vk_khr_sampler_ycbcr_conversion) {
+            skip |= CopyImageMultiplaneValidation(command_buffer, src_image_state, dst_image_state, region);
+        }
+
+        if (!device_extensions.vk_khr_sampler_ycbcr_conversion) {
+            // not multi-plane, the aspectMask member of srcSubresource and dstSubresource must match
+            if (region.srcSubresource.aspectMask != region.dstSubresource.aspectMask) {
+                char const str[] = "vkCmdCopyImage(): Src and dest aspectMasks for each region must match";
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                                HandleToUint64(command_buffer), "VUID-VkImageCopy-aspectMask-00137", "%s.", str);
+            }
+        }
+
+        // For each region, the aspectMask member of srcSubresource must be present in the source image
+        if (!VerifyAspectsPresent(region.srcSubresource.aspectMask, src_image_state->createInfo.format)) {
+            std::stringstream ss;
+            ss << "vkCmdCopyImage(): pRegion[" << i
+               << "] srcSubresource.aspectMask cannot specify aspects not present in source image";
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                            HandleToUint64(command_buffer), "VUID-VkImageCopy-aspectMask-00142", "%s.", ss.str().c_str());
+        }
+
+        // For each region, the aspectMask member of dstSubresource must be present in the destination image
+        if (!VerifyAspectsPresent(region.dstSubresource.aspectMask, dst_image_state->createInfo.format)) {
+            std::stringstream ss;
+            ss << "vkCmdCopyImage(): pRegion[" << i
+               << "] dstSubresource.aspectMask cannot specify aspects not present in dest image";
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                            HandleToUint64(command_buffer), "VUID-VkImageCopy-aspectMask-00143", "%s.", ss.str().c_str());
+        }
+
+        // Check region extents for 1D-1D, 2D-2D, and 3D-3D copies
+        if (src_image_state->createInfo.imageType == dst_image_state->createInfo.imageType) {
+            // The source region specified by a given element of regions must be a region that is contained within srcImage
+            VkExtent3D img_extent = GetImageSubresourceExtent(src_image_state, &(region.srcSubresource));
+            if (0 != ExceedsBounds(&region.srcOffset, &src_copy_extent, &img_extent)) {
+                std::stringstream ss;
+                ss << "vkCmdCopyImage(): Source pRegion[" << i << "] with mipLevel [ " << region.srcSubresource.mipLevel
+                   << " ], offset [ " << region.srcOffset.x << ", " << region.srcOffset.y << ", " << region.srcOffset.z
+                   << " ], extent [ " << src_copy_extent.width << ", " << src_copy_extent.height << ", " << src_copy_extent.depth
+                   << " ] exceeds the source image dimensions";
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                                HandleToUint64(command_buffer), "VUID-vkCmdCopyImage-pRegions-00122", "%s.", ss.str().c_str());
+            }
+
+            // The destination region specified by a given element of regions must be a region that is contained within dst_image
+            img_extent = GetImageSubresourceExtent(dst_image_state, &(region.dstSubresource));
+            if (0 != ExceedsBounds(&region.dstOffset, &dst_copy_extent, &img_extent)) {
+                std::stringstream ss;
+                ss << "vkCmdCopyImage(): Dest pRegion[" << i << "] with mipLevel [ " << region.dstSubresource.mipLevel
+                   << " ], offset [ " << region.dstOffset.x << ", " << region.dstOffset.y << ", " << region.dstOffset.z
+                   << " ], extent [ " << dst_copy_extent.width << ", " << dst_copy_extent.height << ", " << dst_copy_extent.depth
+                   << " ] exceeds the destination image dimensions";
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                                HandleToUint64(command_buffer), "VUID-vkCmdCopyImage-pRegions-00123", "%s.", ss.str().c_str());
+            }
+        }
+
+        // Each dimension offset + extent limits must fall with image subresource extent
+        VkExtent3D subresource_extent = GetImageSubresourceExtent(src_image_state, &(region.srcSubresource));
+        if (slice_override) src_copy_extent.depth = depth_slices;
+        uint32_t extent_check = ExceedsBounds(&(region.srcOffset), &src_copy_extent, &subresource_extent);
+        if (extent_check & x_bit) {
+            skip |=
+                log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                        HandleToUint64(command_buffer), "VUID-VkImageCopy-srcOffset-00144",
+                        "vkCmdCopyImage(): Source image pRegion %1d x-dimension offset [%1d] + extent [%1d] exceeds subResource "
+                        "width [%1d].",
+                        i, region.srcOffset.x, src_copy_extent.width, subresource_extent.width);
+        }
+
+        if (extent_check & y_bit) {
+            skip |=
+                log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                        HandleToUint64(command_buffer), "VUID-VkImageCopy-srcOffset-00145",
+                        "vkCmdCopyImage(): Source image pRegion %1d y-dimension offset [%1d] + extent [%1d] exceeds subResource "
+                        "height [%1d].",
+                        i, region.srcOffset.y, src_copy_extent.height, subresource_extent.height);
+        }
+        if (extent_check & z_bit) {
+            skip |=
+                log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                        HandleToUint64(command_buffer), "VUID-VkImageCopy-srcOffset-00147",
+                        "vkCmdCopyImage(): Source image pRegion %1d z-dimension offset [%1d] + extent [%1d] exceeds subResource "
+                        "depth [%1d].",
+                        i, region.srcOffset.z, src_copy_extent.depth, subresource_extent.depth);
+        }
+
+        // Adjust dest extent if necessary
+        subresource_extent = GetImageSubresourceExtent(dst_image_state, &(region.dstSubresource));
+        if (slice_override) dst_copy_extent.depth = depth_slices;
+
+        extent_check = ExceedsBounds(&(region.dstOffset), &dst_copy_extent, &subresource_extent);
+        if (extent_check & x_bit) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                            HandleToUint64(command_buffer), "VUID-VkImageCopy-dstOffset-00150",
+                            "vkCmdCopyImage(): Dest image pRegion %1d x-dimension offset [%1d] + extent [%1d] exceeds subResource "
+                            "width [%1d].",
+                            i, region.dstOffset.x, dst_copy_extent.width, subresource_extent.width);
+        }
+        if (extent_check & y_bit) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                            HandleToUint64(command_buffer), "VUID-VkImageCopy-dstOffset-00151",
+                            "vkCmdCopyImage(): Dest image pRegion %1d y-dimension offset [%1d] + extent [%1d] exceeds subResource "
+                            "height [%1d].",
+                            i, region.dstOffset.y, dst_copy_extent.height, subresource_extent.height);
+        }
+        if (extent_check & z_bit) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                            HandleToUint64(command_buffer), "VUID-VkImageCopy-dstOffset-00153",
+                            "vkCmdCopyImage(): Dest image pRegion %1d z-dimension offset [%1d] + extent [%1d] exceeds subResource "
+                            "depth [%1d].",
+                            i, region.dstOffset.z, dst_copy_extent.depth, subresource_extent.depth);
+        }
+
+        // The union of all source regions, and the union of all destination regions, specified by the elements of regions,
+        // must not overlap in memory
+        if (src_image_state->image == dst_image_state->image) {
+            for (uint32_t j = 0; j < regionCount; j++) {
+                if (RegionIntersects(&region, &pRegions[j], src_image_state->createInfo.imageType,
+                                     FormatIsMultiplane(src_image_state->createInfo.format))) {
+                    std::stringstream ss;
+                    ss << "vkCmdCopyImage(): pRegions[" << i << "] src overlaps with pRegions[" << j << "].";
+                    skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                                    HandleToUint64(command_buffer), "VUID-vkCmdCopyImage-pRegions-00124", "%s.", ss.str().c_str());
+                }
+            }
+        }
+    }
+
+    // The formats of src_image and dst_image must be compatible. Formats are considered compatible if their texel size in bytes
+    // is the same between both formats. For example, VK_FORMAT_R8G8B8A8_UNORM is compatible with VK_FORMAT_R32_UINT because
+    // because both texels are 4 bytes in size. Depth/stencil formats must match exactly.
+    if (FormatIsDepthOrStencil(src_image_state->createInfo.format) || FormatIsDepthOrStencil(dst_image_state->createInfo.format)) {
+        if (src_image_state->createInfo.format != dst_image_state->createInfo.format) {
+            char const str[] = "vkCmdCopyImage called with unmatched source and dest image depth/stencil formats.";
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                            HandleToUint64(command_buffer), kVUID_Core_DrawState_MismatchedImageFormat, str);
+        }
+    } else {
+        if (!FormatSizesAreEqual(src_image_state->createInfo.format, dst_image_state->createInfo.format, regionCount, pRegions)) {
+            char const str[] = "vkCmdCopyImage called with unmatched source and dest image format sizes.";
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                            HandleToUint64(command_buffer), "VUID-vkCmdCopyImage-srcImage-00135", "%s.", str);
+        }
+    }
+
+    // Source and dest image sample counts must match
+    if (src_image_state->createInfo.samples != dst_image_state->createInfo.samples) {
+        char const str[] = "vkCmdCopyImage() called on image pair with non-identical sample counts.";
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                        HandleToUint64(command_buffer), "VUID-vkCmdCopyImage-srcImage-00136", "%s", str);
+    }
+
+    skip |= ValidateMemoryIsBoundToImage(src_image_state, "vkCmdCopyImage()", "VUID-vkCmdCopyImage-srcImage-00127");
+    skip |= ValidateMemoryIsBoundToImage(dst_image_state, "vkCmdCopyImage()", "VUID-vkCmdCopyImage-dstImage-00132");
+    // Validate that SRC & DST images have correct usage flags set
+    skip |= ValidateImageUsageFlags(src_image_state, VK_IMAGE_USAGE_TRANSFER_SRC_BIT, true, "VUID-vkCmdCopyImage-srcImage-00126",
+                                    "vkCmdCopyImage()", "VK_IMAGE_USAGE_TRANSFER_SRC_BIT");
+    skip |= ValidateImageUsageFlags(dst_image_state, VK_IMAGE_USAGE_TRANSFER_DST_BIT, true, "VUID-vkCmdCopyImage-dstImage-00131",
+                                    "vkCmdCopyImage()", "VK_IMAGE_USAGE_TRANSFER_DST_BIT");
+    if (api_version >= VK_API_VERSION_1_1 || device_extensions.vk_khr_maintenance1) {
+        skip |= ValidateImageFormatFeatureFlags(src_image_state, VK_FORMAT_FEATURE_TRANSFER_SRC_BIT, "vkCmdCopyImage()",
+                                                "VUID-vkCmdCopyImage-srcImage-01995", "VUID-vkCmdCopyImage-srcImage-01995");
+        skip |= ValidateImageFormatFeatureFlags(dst_image_state, VK_FORMAT_FEATURE_TRANSFER_DST_BIT, "vkCmdCopyImage()",
+                                                "VUID-vkCmdCopyImage-dstImage-01996", "VUID-vkCmdCopyImage-dstImage-01996");
+    }
+    skip |= ValidateCmdQueueFlags(cb_node, "vkCmdCopyImage()", VK_QUEUE_TRANSFER_BIT | VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT,
+                                  "VUID-vkCmdCopyImage-commandBuffer-cmdpool");
+    skip |= ValidateCmd(cb_node, CMD_COPYIMAGE, "vkCmdCopyImage()");
+    skip |= InsideRenderPass(cb_node, "vkCmdCopyImage()", "VUID-vkCmdCopyImage-renderpass");
+    bool hit_error = false;
+    const char *invalid_src_layout_vuid = (src_image_state->shared_presentable && device_extensions.vk_khr_shared_presentable_image)
+                                              ? "VUID-vkCmdCopyImage-srcImageLayout-01917"
+                                              : "VUID-vkCmdCopyImage-srcImageLayout-00129";
+    const char *invalid_dst_layout_vuid = (dst_image_state->shared_presentable && device_extensions.vk_khr_shared_presentable_image)
+                                              ? "VUID-vkCmdCopyImage-dstImageLayout-01395"
+                                              : "VUID-vkCmdCopyImage-dstImageLayout-00134";
+    for (uint32_t i = 0; i < regionCount; ++i) {
+        skip |= VerifyImageLayout(cb_node, src_image_state, pRegions[i].srcSubresource, srcImageLayout,
+                                  VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, "vkCmdCopyImage()", invalid_src_layout_vuid,
+                                  "VUID-vkCmdCopyImage-srcImageLayout-00128", &hit_error);
+        skip |= VerifyImageLayout(cb_node, dst_image_state, pRegions[i].dstSubresource, dstImageLayout,
+                                  VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, "vkCmdCopyImage()", invalid_dst_layout_vuid,
+                                  "VUID-vkCmdCopyImage-dstImageLayout-00133", &hit_error);
+        skip |= ValidateCopyImageTransferGranularityRequirements(cb_node, src_image_state, dst_image_state, &pRegions[i], i,
+                                                                 "vkCmdCopyImage()");
+    }
+
+    return skip;
+}
+
+void CoreChecks::PreCallRecordCmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
+                                           VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount,
+                                           const VkImageCopy *pRegions) {
+    StateTracker::PreCallRecordCmdCopyImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount,
+                                            pRegions);
+    auto cb_node = GetCBState(commandBuffer);
+    auto src_image_state = GetImageState(srcImage);
+    auto dst_image_state = GetImageState(dstImage);
+
+    // Make sure that all image slices are updated to correct layout
+    for (uint32_t i = 0; i < regionCount; ++i) {
+        SetImageInitialLayout(cb_node, *src_image_state, pRegions[i].srcSubresource, srcImageLayout);
+        SetImageInitialLayout(cb_node, *dst_image_state, pRegions[i].dstSubresource, dstImageLayout);
+    }
+}
+
+// Returns true if sub_rect is entirely contained within rect
+static inline bool ContainsRect(VkRect2D rect, VkRect2D sub_rect) {
+    if ((sub_rect.offset.x < rect.offset.x) || (sub_rect.offset.x + sub_rect.extent.width > rect.offset.x + rect.extent.width) ||
+        (sub_rect.offset.y < rect.offset.y) || (sub_rect.offset.y + sub_rect.extent.height > rect.offset.y + rect.extent.height))
+        return false;
+    return true;
+}
+
+bool CoreChecks::ValidateClearAttachmentExtent(VkCommandBuffer command_buffer, uint32_t attachment_index,
+                                               const FRAMEBUFFER_STATE *framebuffer, uint32_t fb_attachment,
+                                               const VkRect2D &render_area, uint32_t rect_count,
+                                               const VkClearRect *clear_rects) const {
+    bool skip = false;
+    const IMAGE_VIEW_STATE *image_view_state = nullptr;
+    if (framebuffer && (fb_attachment != VK_ATTACHMENT_UNUSED) && (fb_attachment < framebuffer->createInfo.attachmentCount)) {
+        image_view_state = GetImageViewState(framebuffer->createInfo.pAttachments[fb_attachment]);
+    }
+
+    for (uint32_t j = 0; j < rect_count; j++) {
+        if (!ContainsRect(render_area, clear_rects[j].rect)) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                            HandleToUint64(command_buffer), "VUID-vkCmdClearAttachments-pRects-00016",
+                            "vkCmdClearAttachments(): The area defined by pRects[%d] is not contained in the area of "
+                            "the current render pass instance.",
+                            j);
+        }
+
+        if (image_view_state) {
+            // The layers specified by a given element of pRects must be contained within every attachment that
+            // pAttachments refers to
+            const auto attachment_layer_count = image_view_state->create_info.subresourceRange.layerCount;
+            if ((clear_rects[j].baseArrayLayer >= attachment_layer_count) ||
+                (clear_rects[j].baseArrayLayer + clear_rects[j].layerCount > attachment_layer_count)) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                                HandleToUint64(command_buffer), "VUID-vkCmdClearAttachments-pRects-00017",
+                                "vkCmdClearAttachments(): The layers defined in pRects[%d] are not contained in the layers "
+                                "of pAttachment[%d].",
+                                j, attachment_index);
+            }
+        }
+    }
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateCmdClearAttachments(VkCommandBuffer commandBuffer, uint32_t attachmentCount,
+                                                    const VkClearAttachment *pAttachments, uint32_t rectCount,
+                                                    const VkClearRect *pRects) const {
+    bool skip = false;
+    const CMD_BUFFER_STATE *cb_node = GetCBState(commandBuffer);  // TODO: Should be const, and never modified during validation
+    if (!cb_node) return skip;
+
+    skip |= ValidateCmdQueueFlags(cb_node, "vkCmdClearAttachments()", VK_QUEUE_GRAPHICS_BIT,
+                                  "VUID-vkCmdClearAttachments-commandBuffer-cmdpool");
+    skip |= ValidateCmd(cb_node, CMD_CLEARATTACHMENTS, "vkCmdClearAttachments()");
+    skip |= OutsideRenderPass(cb_node, "vkCmdClearAttachments()", "VUID-vkCmdClearAttachments-renderpass");
+
+    // Validate that attachment is in reference list of active subpass
+    if (cb_node->activeRenderPass) {
+        const VkRenderPassCreateInfo2KHR *renderpass_create_info = cb_node->activeRenderPass->createInfo.ptr();
+        const uint32_t renderpass_attachment_count = renderpass_create_info->attachmentCount;
+        const VkSubpassDescription2KHR *subpass_desc = &renderpass_create_info->pSubpasses[cb_node->activeSubpass];
+        const auto *framebuffer = GetFramebufferState(cb_node->activeFramebuffer);
+        const auto &render_area = cb_node->activeRenderPassBeginInfo.renderArea;
+
+        for (uint32_t attachment_index = 0; attachment_index < attachmentCount; attachment_index++) {
+            auto clear_desc = &pAttachments[attachment_index];
+            uint32_t fb_attachment = VK_ATTACHMENT_UNUSED;
+
+            if (0 == clear_desc->aspectMask) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                                HandleToUint64(commandBuffer), "VUID-VkClearAttachment-aspectMask-requiredbitmask", " ");
+            } else if (clear_desc->aspectMask & VK_IMAGE_ASPECT_METADATA_BIT) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                                HandleToUint64(commandBuffer), "VUID-VkClearAttachment-aspectMask-00020", " ");
+            } else if (clear_desc->aspectMask & VK_IMAGE_ASPECT_COLOR_BIT) {
+                uint32_t color_attachment = VK_ATTACHMENT_UNUSED;
+                if (clear_desc->colorAttachment < subpass_desc->colorAttachmentCount) {
+                    color_attachment = subpass_desc->pColorAttachments[clear_desc->colorAttachment].attachment;
+                    if ((color_attachment != VK_ATTACHMENT_UNUSED) && (color_attachment >= renderpass_attachment_count)) {
+                        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                                        HandleToUint64(commandBuffer), "VUID-vkCmdClearAttachments-aspectMask-02501",
+                                        "vkCmdClearAttachments() pAttachments[%u].colorAttachment=%u is not VK_ATTACHMENT_UNUSED "
+                                        "and not a valid attachment for %s attachmentCount=%u. Subpass %u pColorAttachment[%u]=%u.",
+                                        attachment_index, clear_desc->colorAttachment,
+                                        report_data->FormatHandle(cb_node->activeRenderPass->renderPass).c_str(),
+                                        cb_node->activeSubpass, clear_desc->colorAttachment, color_attachment,
+                                        renderpass_attachment_count);
+
+                        color_attachment = VK_ATTACHMENT_UNUSED;  // Defensive, prevent lookup past end of renderpass attachment
+                    }
+                } else {
+                    skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                                    HandleToUint64(commandBuffer), "VUID-vkCmdClearAttachments-aspectMask-02501",
+                                    "vkCmdClearAttachments() pAttachments[%u].colorAttachment=%u out of range for %s"
+                                    " subpass %u. colorAttachmentCount=%u",
+                                    attachment_index, clear_desc->colorAttachment,
+                                    report_data->FormatHandle(cb_node->activeRenderPass->renderPass).c_str(),
+                                    cb_node->activeSubpass, subpass_desc->colorAttachmentCount);
+                }
+                fb_attachment = color_attachment;
+
+                if ((clear_desc->aspectMask & VK_IMAGE_ASPECT_DEPTH_BIT) ||
+                    (clear_desc->aspectMask & VK_IMAGE_ASPECT_STENCIL_BIT)) {
+                    char const str[] =
+                        "vkCmdClearAttachments() aspectMask [%d] must set only VK_IMAGE_ASPECT_COLOR_BIT of a color attachment.";
+                    skip |=
+                        log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                                HandleToUint64(commandBuffer), "VUID-VkClearAttachment-aspectMask-00019", str, attachment_index);
+                }
+            } else {  // Must be depth and/or stencil
+                if (((clear_desc->aspectMask & VK_IMAGE_ASPECT_DEPTH_BIT) != VK_IMAGE_ASPECT_DEPTH_BIT) &&
+                    ((clear_desc->aspectMask & VK_IMAGE_ASPECT_STENCIL_BIT) != VK_IMAGE_ASPECT_STENCIL_BIT)) {
+                    char const str[] = "vkCmdClearAttachments() aspectMask [%d] is not a valid combination of bits.";
+                    skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                                    HandleToUint64(commandBuffer), "VUID-VkClearAttachment-aspectMask-parameter", str,
+                                    attachment_index);
+                }
+                if (!subpass_desc->pDepthStencilAttachment ||
+                    (subpass_desc->pDepthStencilAttachment->attachment == VK_ATTACHMENT_UNUSED)) {
+                    skip |= log_msg(
+                        report_data, VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                        HandleToUint64(commandBuffer), kVUID_Core_DrawState_MissingAttachmentReference,
+                        "vkCmdClearAttachments() depth/stencil clear with no depth/stencil attachment in subpass; ignored");
+                } else {
+                    fb_attachment = subpass_desc->pDepthStencilAttachment->attachment;
+                }
+            }
+            if (cb_node->createInfo.level == VK_COMMAND_BUFFER_LEVEL_PRIMARY) {
+                skip |= ValidateClearAttachmentExtent(commandBuffer, attachment_index, framebuffer, fb_attachment, render_area,
+                                                      rectCount, pRects);
+            }
+        }
+    }
+    return skip;
+}
+
+void CoreChecks::PreCallRecordCmdClearAttachments(VkCommandBuffer commandBuffer, uint32_t attachmentCount,
+                                                  const VkClearAttachment *pAttachments, uint32_t rectCount,
+                                                  const VkClearRect *pRects) {
+    auto *cb_node = GetCBState(commandBuffer);
+    if (cb_node->activeRenderPass && (cb_node->createInfo.level == VK_COMMAND_BUFFER_LEVEL_SECONDARY)) {
+        const VkRenderPassCreateInfo2KHR *renderpass_create_info = cb_node->activeRenderPass->createInfo.ptr();
+        const VkSubpassDescription2KHR *subpass_desc = &renderpass_create_info->pSubpasses[cb_node->activeSubpass];
+        std::shared_ptr<std::vector<VkClearRect>> clear_rect_copy;
+        for (uint32_t attachment_index = 0; attachment_index < attachmentCount; attachment_index++) {
+            const auto clear_desc = &pAttachments[attachment_index];
+            uint32_t fb_attachment = VK_ATTACHMENT_UNUSED;
+            if ((clear_desc->aspectMask & VK_IMAGE_ASPECT_COLOR_BIT) &&
+                (clear_desc->colorAttachment < subpass_desc->colorAttachmentCount)) {
+                fb_attachment = subpass_desc->pColorAttachments[clear_desc->colorAttachment].attachment;
+            } else if ((clear_desc->aspectMask & (VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT)) &&
+                       subpass_desc->pDepthStencilAttachment) {
+                fb_attachment = subpass_desc->pDepthStencilAttachment->attachment;
+            }
+            if (fb_attachment != VK_ATTACHMENT_UNUSED) {
+                if (!clear_rect_copy) {
+                    // We need a copy of the clear rectangles that will persist until the last lambda executes
+                    // but we want to create it as lazily as possible
+                    clear_rect_copy.reset(new std::vector<VkClearRect>(pRects, pRects + rectCount));
+                }
+                // if a secondary level command buffer inherits the framebuffer from the primary command buffer
+                // (see VkCommandBufferInheritanceInfo), this validation must be deferred until queue submit time
+                auto val_fn = [this, commandBuffer, attachment_index, fb_attachment, rectCount, clear_rect_copy](
+                                  const CMD_BUFFER_STATE *prim_cb, VkFramebuffer fb) {
+                    assert(rectCount == clear_rect_copy->size());
+                    const FRAMEBUFFER_STATE *framebuffer = GetFramebufferState(fb);
+                    const auto &render_area = prim_cb->activeRenderPassBeginInfo.renderArea;
+                    bool skip = false;
+                    skip = ValidateClearAttachmentExtent(commandBuffer, attachment_index, framebuffer, fb_attachment, render_area,
+                                                         rectCount, clear_rect_copy->data());
+                    return skip;
+                };
+                cb_node->cmd_execute_commands_functions.emplace_back(val_fn);
+            }
+        }
+    }
+}
+
+bool CoreChecks::PreCallValidateCmdResolveImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
+                                                VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount,
+                                                const VkImageResolve *pRegions) const {
+    const auto *cb_node = GetCBState(commandBuffer);
+    const auto *src_image_state = GetImageState(srcImage);
+    const auto *dst_image_state = GetImageState(dstImage);
+
+    bool skip = false;
+    if (cb_node && src_image_state && dst_image_state) {
+        skip |= ValidateMemoryIsBoundToImage(src_image_state, "vkCmdResolveImage()", "VUID-vkCmdResolveImage-srcImage-00256");
+        skip |= ValidateMemoryIsBoundToImage(dst_image_state, "vkCmdResolveImage()", "VUID-vkCmdResolveImage-dstImage-00258");
+        skip |= ValidateCmdQueueFlags(cb_node, "vkCmdResolveImage()", VK_QUEUE_GRAPHICS_BIT,
+                                      "VUID-vkCmdResolveImage-commandBuffer-cmdpool");
+        skip |= ValidateCmd(cb_node, CMD_RESOLVEIMAGE, "vkCmdResolveImage()");
+        skip |= InsideRenderPass(cb_node, "vkCmdResolveImage()", "VUID-vkCmdResolveImage-renderpass");
+        skip |= ValidateImageFormatFeatureFlags(dst_image_state, VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT, "vkCmdResolveImage()",
+                                                "VUID-vkCmdResolveImage-dstImage-02003", "VUID-vkCmdResolveImage-dstImage-02003");
+
+        bool hit_error = false;
+        const char *invalid_src_layout_vuid =
+            (src_image_state->shared_presentable && device_extensions.vk_khr_shared_presentable_image)
+                ? "VUID-vkCmdResolveImage-srcImageLayout-01400"
+                : "VUID-vkCmdResolveImage-srcImageLayout-00261";
+        const char *invalid_dst_layout_vuid =
+            (dst_image_state->shared_presentable && device_extensions.vk_khr_shared_presentable_image)
+                ? "VUID-vkCmdResolveImage-dstImageLayout-01401"
+                : "VUID-vkCmdResolveImage-dstImageLayout-00263";
+        // For each region, the number of layers in the image subresource should not be zero
+        // For each region, src and dest image aspect must be color only
+        for (uint32_t i = 0; i < regionCount; i++) {
+            skip |=
+                ValidateImageSubresourceLayers(cb_node, &pRegions[i].srcSubresource, "vkCmdResolveImage()", "srcSubresource", i);
+            skip |=
+                ValidateImageSubresourceLayers(cb_node, &pRegions[i].dstSubresource, "vkCmdResolveImage()", "dstSubresource", i);
+            skip |= VerifyImageLayout(cb_node, src_image_state, pRegions[i].srcSubresource, srcImageLayout,
+                                      VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, "vkCmdResolveImage()", invalid_src_layout_vuid,
+                                      "VUID-vkCmdResolveImage-srcImageLayout-00260", &hit_error);
+            skip |= VerifyImageLayout(cb_node, dst_image_state, pRegions[i].dstSubresource, dstImageLayout,
+                                      VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, "vkCmdResolveImage()", invalid_dst_layout_vuid,
+                                      "VUID-vkCmdResolveImage-dstImageLayout-00262", &hit_error);
+            skip |= ValidateImageMipLevel(cb_node, src_image_state, pRegions[i].srcSubresource.mipLevel, i, "vkCmdResolveImage()",
+                                          "srcSubresource", "VUID-vkCmdResolveImage-srcSubresource-01709");
+            skip |= ValidateImageMipLevel(cb_node, dst_image_state, pRegions[i].dstSubresource.mipLevel, i, "vkCmdResolveImage()",
+                                          "dstSubresource", "VUID-vkCmdResolveImage-dstSubresource-01710");
+            skip |= ValidateImageArrayLayerRange(cb_node, src_image_state, pRegions[i].srcSubresource.baseArrayLayer,
+                                                 pRegions[i].srcSubresource.layerCount, i, "vkCmdResolveImage()", "srcSubresource",
+                                                 "VUID-vkCmdResolveImage-srcSubresource-01711");
+            skip |= ValidateImageArrayLayerRange(cb_node, dst_image_state, pRegions[i].dstSubresource.baseArrayLayer,
+                                                 pRegions[i].dstSubresource.layerCount, i, "vkCmdResolveImage()", "srcSubresource",
+                                                 "VUID-vkCmdResolveImage-dstSubresource-01712");
+
+            // layer counts must match
+            if (pRegions[i].srcSubresource.layerCount != pRegions[i].dstSubresource.layerCount) {
+                skip |= log_msg(
+                    report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                    HandleToUint64(cb_node->commandBuffer), "VUID-VkImageResolve-layerCount-00267",
+                    "vkCmdResolveImage(): layerCount in source and destination subresource of pRegions[%d] does not match.", i);
+            }
+            // For each region, src and dest image aspect must be color only
+            if ((pRegions[i].srcSubresource.aspectMask != VK_IMAGE_ASPECT_COLOR_BIT) ||
+                (pRegions[i].dstSubresource.aspectMask != VK_IMAGE_ASPECT_COLOR_BIT)) {
+                char const str[] =
+                    "vkCmdResolveImage(): src and dest aspectMasks for each region must specify only VK_IMAGE_ASPECT_COLOR_BIT";
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                                HandleToUint64(cb_node->commandBuffer), "VUID-VkImageResolve-aspectMask-00266", "%s.", str);
+            }
+        }
+
+        if (src_image_state->createInfo.format != dst_image_state->createInfo.format) {
+            char const str[] = "vkCmdResolveImage called with unmatched source and dest formats.";
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                            HandleToUint64(cb_node->commandBuffer), kVUID_Core_DrawState_MismatchedImageFormat, str);
+        }
+        if (src_image_state->createInfo.imageType != dst_image_state->createInfo.imageType) {
+            char const str[] = "vkCmdResolveImage called with unmatched source and dest image types.";
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                            HandleToUint64(cb_node->commandBuffer), kVUID_Core_DrawState_MismatchedImageType, str);
+        }
+        if (src_image_state->createInfo.samples == VK_SAMPLE_COUNT_1_BIT) {
+            char const str[] = "vkCmdResolveImage called with source sample count less than 2.";
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                            HandleToUint64(cb_node->commandBuffer), "VUID-vkCmdResolveImage-srcImage-00257", "%s.", str);
+        }
+        if (dst_image_state->createInfo.samples != VK_SAMPLE_COUNT_1_BIT) {
+            char const str[] = "vkCmdResolveImage called with dest sample count greater than 1.";
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                            HandleToUint64(cb_node->commandBuffer), "VUID-vkCmdResolveImage-dstImage-00259", "%s.", str);
+        }
+    } else {
+        assert(0);
+    }
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateCmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
+                                             VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount,
+                                             const VkImageBlit *pRegions, VkFilter filter) const {
+    const auto *cb_node = GetCBState(commandBuffer);
+    const auto *src_image_state = GetImageState(srcImage);
+    const auto *dst_image_state = GetImageState(dstImage);
+
+    bool skip = false;
+    if (cb_node) {
+        skip |= ValidateCmd(cb_node, CMD_BLITIMAGE, "vkCmdBlitImage()");
+    }
+    if (cb_node && src_image_state && dst_image_state) {
+        skip |= ValidateImageSampleCount(src_image_state, VK_SAMPLE_COUNT_1_BIT, "vkCmdBlitImage(): srcImage",
+                                         "VUID-vkCmdBlitImage-srcImage-00233");
+        skip |= ValidateImageSampleCount(dst_image_state, VK_SAMPLE_COUNT_1_BIT, "vkCmdBlitImage(): dstImage",
+                                         "VUID-vkCmdBlitImage-dstImage-00234");
+        skip |= ValidateMemoryIsBoundToImage(src_image_state, "vkCmdBlitImage()", "VUID-vkCmdBlitImage-srcImage-00220");
+        skip |= ValidateMemoryIsBoundToImage(dst_image_state, "vkCmdBlitImage()", "VUID-vkCmdBlitImage-dstImage-00225");
+        skip |=
+            ValidateImageUsageFlags(src_image_state, VK_IMAGE_USAGE_TRANSFER_SRC_BIT, true, "VUID-vkCmdBlitImage-srcImage-00219",
+                                    "vkCmdBlitImage()", "VK_IMAGE_USAGE_TRANSFER_SRC_BIT");
+        skip |=
+            ValidateImageUsageFlags(dst_image_state, VK_IMAGE_USAGE_TRANSFER_DST_BIT, true, "VUID-vkCmdBlitImage-dstImage-00224",
+                                    "vkCmdBlitImage()", "VK_IMAGE_USAGE_TRANSFER_DST_BIT");
+        skip |=
+            ValidateCmdQueueFlags(cb_node, "vkCmdBlitImage()", VK_QUEUE_GRAPHICS_BIT, "VUID-vkCmdBlitImage-commandBuffer-cmdpool");
+        skip |= ValidateCmd(cb_node, CMD_BLITIMAGE, "vkCmdBlitImage()");
+        skip |= InsideRenderPass(cb_node, "vkCmdBlitImage()", "VUID-vkCmdBlitImage-renderpass");
+        skip |= ValidateImageFormatFeatureFlags(src_image_state, VK_FORMAT_FEATURE_BLIT_SRC_BIT, "vkCmdBlitImage()",
+                                                "VUID-vkCmdBlitImage-srcImage-01999", "VUID-vkCmdBlitImage-srcImage-01999");
+        skip |= ValidateImageFormatFeatureFlags(dst_image_state, VK_FORMAT_FEATURE_BLIT_DST_BIT, "vkCmdBlitImage()",
+                                                "VUID-vkCmdBlitImage-dstImage-02000", "VUID-vkCmdBlitImage-dstImage-02000");
+
+        // TODO: Need to validate image layouts, which will include layout validation for shared presentable images
+
+        VkFormat src_format = src_image_state->createInfo.format;
+        VkFormat dst_format = dst_image_state->createInfo.format;
+        VkImageType src_type = src_image_state->createInfo.imageType;
+        VkImageType dst_type = dst_image_state->createInfo.imageType;
+
+        if (VK_FILTER_LINEAR == filter) {
+            skip |= ValidateImageFormatFeatureFlags(src_image_state, VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT,
+                                                    "vkCmdBlitImage()", "VUID-vkCmdBlitImage-filter-02001",
+                                                    "VUID-vkCmdBlitImage-filter-02001");
+        } else if (VK_FILTER_CUBIC_IMG == filter) {
+            skip |= ValidateImageFormatFeatureFlags(src_image_state, VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_IMG,
+                                                    "vkCmdBlitImage()", "VUID-vkCmdBlitImage-filter-02002",
+                                                    "VUID-vkCmdBlitImage-filter-02002");
+        }
+
+        if ((VK_FILTER_CUBIC_IMG == filter) && (VK_IMAGE_TYPE_3D != src_type)) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                            HandleToUint64(cb_node->commandBuffer), "VUID-vkCmdBlitImage-filter-00237",
+                            "vkCmdBlitImage(): source image type must be VK_IMAGE_TYPE_3D when cubic filtering is specified.");
+        }
+
+        if ((VK_SAMPLE_COUNT_1_BIT != src_image_state->createInfo.samples) ||
+            (VK_SAMPLE_COUNT_1_BIT != dst_image_state->createInfo.samples)) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                            HandleToUint64(cb_node->commandBuffer), "VUID-vkCmdBlitImage-srcImage-00228",
+                            "vkCmdBlitImage(): source or dest image has sample count other than VK_SAMPLE_COUNT_1_BIT.");
+        }
+
+        // Validate consistency for unsigned formats
+        if (FormatIsUInt(src_format) != FormatIsUInt(dst_format)) {
+            std::stringstream ss;
+            ss << "vkCmdBlitImage(): If one of srcImage and dstImage images has unsigned integer format, "
+               << "the other one must also have unsigned integer format.  "
+               << "Source format is " << string_VkFormat(src_format) << " Destination format is " << string_VkFormat(dst_format);
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                            HandleToUint64(cb_node->commandBuffer), "VUID-vkCmdBlitImage-srcImage-00230", "%s.", ss.str().c_str());
+        }
+
+        // Validate consistency for signed formats
+        if (FormatIsSInt(src_format) != FormatIsSInt(dst_format)) {
+            std::stringstream ss;
+            ss << "vkCmdBlitImage(): If one of srcImage and dstImage images has signed integer format, "
+               << "the other one must also have signed integer format.  "
+               << "Source format is " << string_VkFormat(src_format) << " Destination format is " << string_VkFormat(dst_format);
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                            HandleToUint64(cb_node->commandBuffer), "VUID-vkCmdBlitImage-srcImage-00229", "%s.", ss.str().c_str());
+        }
+
+        // Validate filter for Depth/Stencil formats
+        if (FormatIsDepthOrStencil(src_format) && (filter != VK_FILTER_NEAREST)) {
+            std::stringstream ss;
+            ss << "vkCmdBlitImage(): If the format of srcImage is a depth, stencil, or depth stencil "
+               << "then filter must be VK_FILTER_NEAREST.";
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                            HandleToUint64(cb_node->commandBuffer), "VUID-vkCmdBlitImage-srcImage-00232", "%s.", ss.str().c_str());
+        }
+
+        // Validate aspect bits and formats for depth/stencil images
+        if (FormatIsDepthOrStencil(src_format) || FormatIsDepthOrStencil(dst_format)) {
+            if (src_format != dst_format) {
+                std::stringstream ss;
+                ss << "vkCmdBlitImage(): If one of srcImage and dstImage images has a format of depth, stencil or depth "
+                   << "stencil, the other one must have exactly the same format.  "
+                   << "Source format is " << string_VkFormat(src_format) << " Destination format is "
+                   << string_VkFormat(dst_format);
+                skip |=
+                    log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                            HandleToUint64(cb_node->commandBuffer), "VUID-vkCmdBlitImage-srcImage-00231", "%s.", ss.str().c_str());
+            }
+        }  // Depth or Stencil
+
+        // Do per-region checks
+        const char *invalid_src_layout_vuid =
+            (src_image_state->shared_presentable && device_extensions.vk_khr_shared_presentable_image)
+                ? "VUID-vkCmdBlitImage-srcImageLayout-01398"
+                : "VUID-vkCmdBlitImage-srcImageLayout-00222";
+        const char *invalid_dst_layout_vuid =
+            (dst_image_state->shared_presentable && device_extensions.vk_khr_shared_presentable_image)
+                ? "VUID-vkCmdBlitImage-dstImageLayout-01399"
+                : "VUID-vkCmdBlitImage-dstImageLayout-00227";
+        for (uint32_t i = 0; i < regionCount; i++) {
+            const VkImageBlit rgn = pRegions[i];
+            bool hit_error = false;
+            skip |= VerifyImageLayout(cb_node, src_image_state, rgn.srcSubresource, srcImageLayout,
+                                      VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, "vkCmdBlitImage()", invalid_src_layout_vuid,
+                                      "VUID-vkCmdBlitImage-srcImageLayout-00221", &hit_error);
+            skip |= VerifyImageLayout(cb_node, dst_image_state, rgn.dstSubresource, dstImageLayout,
+                                      VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, "vkCmdBlitImage()", invalid_dst_layout_vuid,
+                                      "VUID-vkCmdBlitImage-dstImageLayout-00226", &hit_error);
+            skip |= ValidateImageSubresourceLayers(cb_node, &rgn.srcSubresource, "vkCmdBlitImage()", "srcSubresource", i);
+            skip |= ValidateImageSubresourceLayers(cb_node, &rgn.dstSubresource, "vkCmdBlitImage()", "dstSubresource", i);
+            skip |= ValidateImageMipLevel(cb_node, src_image_state, rgn.srcSubresource.mipLevel, i, "vkCmdBlitImage()",
+                                          "srcSubresource", "VUID-vkCmdBlitImage-srcSubresource-01705");
+            skip |= ValidateImageMipLevel(cb_node, dst_image_state, rgn.dstSubresource.mipLevel, i, "vkCmdBlitImage()",
+                                          "dstSubresource", "VUID-vkCmdBlitImage-dstSubresource-01706");
+            skip |= ValidateImageArrayLayerRange(cb_node, src_image_state, rgn.srcSubresource.baseArrayLayer,
+                                                 rgn.srcSubresource.layerCount, i, "vkCmdBlitImage()", "srcSubresource",
+                                                 "VUID-vkCmdBlitImage-srcSubresource-01707");
+            skip |= ValidateImageArrayLayerRange(cb_node, dst_image_state, rgn.dstSubresource.baseArrayLayer,
+                                                 rgn.dstSubresource.layerCount, i, "vkCmdBlitImage()", "dstSubresource",
+                                                 "VUID-vkCmdBlitImage-dstSubresource-01708");
+            // Warn for zero-sized regions
+            if ((rgn.srcOffsets[0].x == rgn.srcOffsets[1].x) || (rgn.srcOffsets[0].y == rgn.srcOffsets[1].y) ||
+                (rgn.srcOffsets[0].z == rgn.srcOffsets[1].z)) {
+                std::stringstream ss;
+                ss << "vkCmdBlitImage(): pRegions[" << i << "].srcOffsets specify a zero-volume area.";
+                skip |=
+                    log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                            HandleToUint64(cb_node->commandBuffer), kVUID_Core_DrawState_InvalidExtents, "%s", ss.str().c_str());
+            }
+            if ((rgn.dstOffsets[0].x == rgn.dstOffsets[1].x) || (rgn.dstOffsets[0].y == rgn.dstOffsets[1].y) ||
+                (rgn.dstOffsets[0].z == rgn.dstOffsets[1].z)) {
+                std::stringstream ss;
+                ss << "vkCmdBlitImage(): pRegions[" << i << "].dstOffsets specify a zero-volume area.";
+                skip |=
+                    log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                            HandleToUint64(cb_node->commandBuffer), kVUID_Core_DrawState_InvalidExtents, "%s", ss.str().c_str());
+            }
+
+            // Check that src/dst layercounts match
+            if (rgn.srcSubresource.layerCount != rgn.dstSubresource.layerCount) {
+                skip |= log_msg(
+                    report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                    HandleToUint64(cb_node->commandBuffer), "VUID-VkImageBlit-layerCount-00239",
+                    "vkCmdBlitImage(): layerCount in source and destination subresource of pRegions[%d] does not match.", i);
+            }
+
+            if (rgn.srcSubresource.aspectMask != rgn.dstSubresource.aspectMask) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                                HandleToUint64(cb_node->commandBuffer), "VUID-VkImageBlit-aspectMask-00238",
+                                "vkCmdBlitImage(): aspectMask members for pRegion[%d] do not match.", i);
+            }
+
+            if (!VerifyAspectsPresent(rgn.srcSubresource.aspectMask, src_format)) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                                HandleToUint64(cb_node->commandBuffer), "VUID-VkImageBlit-aspectMask-00241",
+                                "vkCmdBlitImage(): region [%d] source aspectMask (0x%x) specifies aspects not present in source "
+                                "image format %s.",
+                                i, rgn.srcSubresource.aspectMask, string_VkFormat(src_format));
+            }
+
+            if (!VerifyAspectsPresent(rgn.dstSubresource.aspectMask, dst_format)) {
+                skip |= log_msg(
+                    report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                    HandleToUint64(cb_node->commandBuffer), "VUID-VkImageBlit-aspectMask-00242",
+                    "vkCmdBlitImage(): region [%d] dest aspectMask (0x%x) specifies aspects not present in dest image format %s.",
+                    i, rgn.dstSubresource.aspectMask, string_VkFormat(dst_format));
+            }
+
+            // Validate source image offsets
+            VkExtent3D src_extent = GetImageSubresourceExtent(src_image_state, &(rgn.srcSubresource));
+            if (VK_IMAGE_TYPE_1D == src_type) {
+                if ((0 != rgn.srcOffsets[0].y) || (1 != rgn.srcOffsets[1].y)) {
+                    skip |=
+                        log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                                HandleToUint64(cb_node->commandBuffer), "VUID-VkImageBlit-srcImage-00245",
+                                "vkCmdBlitImage(): region [%d], source image of type VK_IMAGE_TYPE_1D with srcOffset[].y values "
+                                "of (%1d, %1d). These must be (0, 1).",
+                                i, rgn.srcOffsets[0].y, rgn.srcOffsets[1].y);
+                }
+            }
+
+            if ((VK_IMAGE_TYPE_1D == src_type) || (VK_IMAGE_TYPE_2D == src_type)) {
+                if ((0 != rgn.srcOffsets[0].z) || (1 != rgn.srcOffsets[1].z)) {
+                    skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                                    HandleToUint64(cb_node->commandBuffer), "VUID-VkImageBlit-srcImage-00247",
+                                    "vkCmdBlitImage(): region [%d], source image of type VK_IMAGE_TYPE_1D or VK_IMAGE_TYPE_2D with "
+                                    "srcOffset[].z values of (%1d, %1d). These must be (0, 1).",
+                                    i, rgn.srcOffsets[0].z, rgn.srcOffsets[1].z);
+                }
+            }
+
+            bool oob = false;
+            if ((rgn.srcOffsets[0].x < 0) || (rgn.srcOffsets[0].x > static_cast<int32_t>(src_extent.width)) ||
+                (rgn.srcOffsets[1].x < 0) || (rgn.srcOffsets[1].x > static_cast<int32_t>(src_extent.width))) {
+                oob = true;
+                skip |= log_msg(
+                    report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                    HandleToUint64(cb_node->commandBuffer), "VUID-VkImageBlit-srcOffset-00243",
+                    "vkCmdBlitImage(): region [%d] srcOffset[].x values (%1d, %1d) exceed srcSubresource width extent (%1d).", i,
+                    rgn.srcOffsets[0].x, rgn.srcOffsets[1].x, src_extent.width);
+            }
+            if ((rgn.srcOffsets[0].y < 0) || (rgn.srcOffsets[0].y > static_cast<int32_t>(src_extent.height)) ||
+                (rgn.srcOffsets[1].y < 0) || (rgn.srcOffsets[1].y > static_cast<int32_t>(src_extent.height))) {
+                oob = true;
+                skip |= log_msg(
+                    report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                    HandleToUint64(cb_node->commandBuffer), "VUID-VkImageBlit-srcOffset-00244",
+                    "vkCmdBlitImage(): region [%d] srcOffset[].y values (%1d, %1d) exceed srcSubresource height extent (%1d).", i,
+                    rgn.srcOffsets[0].y, rgn.srcOffsets[1].y, src_extent.height);
+            }
+            if ((rgn.srcOffsets[0].z < 0) || (rgn.srcOffsets[0].z > static_cast<int32_t>(src_extent.depth)) ||
+                (rgn.srcOffsets[1].z < 0) || (rgn.srcOffsets[1].z > static_cast<int32_t>(src_extent.depth))) {
+                oob = true;
+                skip |= log_msg(
+                    report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                    HandleToUint64(cb_node->commandBuffer), "VUID-VkImageBlit-srcOffset-00246",
+                    "vkCmdBlitImage(): region [%d] srcOffset[].z values (%1d, %1d) exceed srcSubresource depth extent (%1d).", i,
+                    rgn.srcOffsets[0].z, rgn.srcOffsets[1].z, src_extent.depth);
+            }
+            if (oob) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                                HandleToUint64(cb_node->commandBuffer), "VUID-vkCmdBlitImage-pRegions-00215",
+                                "vkCmdBlitImage(): region [%d] source image blit region exceeds image dimensions.", i);
+            }
+
+            // Validate dest image offsets
+            VkExtent3D dst_extent = GetImageSubresourceExtent(dst_image_state, &(rgn.dstSubresource));
+            if (VK_IMAGE_TYPE_1D == dst_type) {
+                if ((0 != rgn.dstOffsets[0].y) || (1 != rgn.dstOffsets[1].y)) {
+                    skip |=
+                        log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                                HandleToUint64(cb_node->commandBuffer), "VUID-VkImageBlit-dstImage-00250",
+                                "vkCmdBlitImage(): region [%d], dest image of type VK_IMAGE_TYPE_1D with dstOffset[].y values of "
+                                "(%1d, %1d). These must be (0, 1).",
+                                i, rgn.dstOffsets[0].y, rgn.dstOffsets[1].y);
+                }
+            }
+
+            if ((VK_IMAGE_TYPE_1D == dst_type) || (VK_IMAGE_TYPE_2D == dst_type)) {
+                if ((0 != rgn.dstOffsets[0].z) || (1 != rgn.dstOffsets[1].z)) {
+                    skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                                    HandleToUint64(cb_node->commandBuffer), "VUID-VkImageBlit-dstImage-00252",
+                                    "vkCmdBlitImage(): region [%d], dest image of type VK_IMAGE_TYPE_1D or VK_IMAGE_TYPE_2D with "
+                                    "dstOffset[].z values of (%1d, %1d). These must be (0, 1).",
+                                    i, rgn.dstOffsets[0].z, rgn.dstOffsets[1].z);
+                }
+            }
+
+            oob = false;
+            if ((rgn.dstOffsets[0].x < 0) || (rgn.dstOffsets[0].x > static_cast<int32_t>(dst_extent.width)) ||
+                (rgn.dstOffsets[1].x < 0) || (rgn.dstOffsets[1].x > static_cast<int32_t>(dst_extent.width))) {
+                oob = true;
+                skip |= log_msg(
+                    report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                    HandleToUint64(cb_node->commandBuffer), "VUID-VkImageBlit-dstOffset-00248",
+                    "vkCmdBlitImage(): region [%d] dstOffset[].x values (%1d, %1d) exceed dstSubresource width extent (%1d).", i,
+                    rgn.dstOffsets[0].x, rgn.dstOffsets[1].x, dst_extent.width);
+            }
+            if ((rgn.dstOffsets[0].y < 0) || (rgn.dstOffsets[0].y > static_cast<int32_t>(dst_extent.height)) ||
+                (rgn.dstOffsets[1].y < 0) || (rgn.dstOffsets[1].y > static_cast<int32_t>(dst_extent.height))) {
+                oob = true;
+                skip |= log_msg(
+                    report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                    HandleToUint64(cb_node->commandBuffer), "VUID-VkImageBlit-dstOffset-00249",
+                    "vkCmdBlitImage(): region [%d] dstOffset[].y values (%1d, %1d) exceed dstSubresource height extent (%1d).", i,
+                    rgn.dstOffsets[0].y, rgn.dstOffsets[1].y, dst_extent.height);
+            }
+            if ((rgn.dstOffsets[0].z < 0) || (rgn.dstOffsets[0].z > static_cast<int32_t>(dst_extent.depth)) ||
+                (rgn.dstOffsets[1].z < 0) || (rgn.dstOffsets[1].z > static_cast<int32_t>(dst_extent.depth))) {
+                oob = true;
+                skip |= log_msg(
+                    report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                    HandleToUint64(cb_node->commandBuffer), "VUID-VkImageBlit-dstOffset-00251",
+                    "vkCmdBlitImage(): region [%d] dstOffset[].z values (%1d, %1d) exceed dstSubresource depth extent (%1d).", i,
+                    rgn.dstOffsets[0].z, rgn.dstOffsets[1].z, dst_extent.depth);
+            }
+            if (oob) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                                HandleToUint64(cb_node->commandBuffer), "VUID-vkCmdBlitImage-pRegions-00216",
+                                "vkCmdBlitImage(): region [%d] destination image blit region exceeds image dimensions.", i);
+            }
+
+            if ((VK_IMAGE_TYPE_3D == src_type) || (VK_IMAGE_TYPE_3D == dst_type)) {
+                if ((0 != rgn.srcSubresource.baseArrayLayer) || (1 != rgn.srcSubresource.layerCount) ||
+                    (0 != rgn.dstSubresource.baseArrayLayer) || (1 != rgn.dstSubresource.layerCount)) {
+                    skip |=
+                        log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                                HandleToUint64(cb_node->commandBuffer), "VUID-VkImageBlit-srcImage-00240",
+                                "vkCmdBlitImage(): region [%d] blit to/from a 3D image type with a non-zero baseArrayLayer, or a "
+                                "layerCount other than 1.",
+                                i);
+                }
+            }
+        }  // per-region checks
+    } else {
+        assert(0);
+    }
+    return skip;
+}
+
+void CoreChecks::PreCallRecordCmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
+                                           VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount,
+                                           const VkImageBlit *pRegions, VkFilter filter) {
+    StateTracker::PreCallRecordCmdBlitImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount,
+                                            pRegions, filter);
+    auto cb_node = GetCBState(commandBuffer);
+    auto src_image_state = GetImageState(srcImage);
+    auto dst_image_state = GetImageState(dstImage);
+
+    // Make sure that all image slices are updated to correct layout
+    for (uint32_t i = 0; i < regionCount; ++i) {
+        SetImageInitialLayout(cb_node, *src_image_state, pRegions[i].srcSubresource, srcImageLayout);
+        SetImageInitialLayout(cb_node, *dst_image_state, pRegions[i].dstSubresource, dstImageLayout);
+    }
+}
+
+// This validates that the initial layout specified in the command buffer for the IMAGE is the same as the global IMAGE layout
+bool CoreChecks::ValidateCmdBufImageLayouts(const CMD_BUFFER_STATE *pCB, const ImageSubresPairLayoutMap &globalImageLayoutMap,
+                                            ImageSubresPairLayoutMap *overlayLayoutMap_arg) const {
+    if (disabled.image_layout_validation) return false;
+    bool skip = false;
+    ImageSubresPairLayoutMap &overlayLayoutMap = *overlayLayoutMap_arg;
+    // Iterate over the layout maps for each referenced image
+    for (const auto &layout_map_entry : pCB->image_layout_map) {
+        const auto image = layout_map_entry.first;
+        const auto *image_state = GetImageState(image);
+        if (!image_state) continue;  // Can't check layouts of a dead image
+        const auto &subres_map = layout_map_entry.second;
+        ImageSubresourcePair isr_pair;
+        isr_pair.image = image;
+        isr_pair.hasSubresource = true;
+        // Validate the initial_uses for each subresource referenced
+        for (auto it_init = subres_map->BeginInitialUse(); !it_init.AtEnd(); ++it_init) {
+            isr_pair.subresource = (*it_init).subresource;
+            VkImageLayout initial_layout = (*it_init).layout;
+            VkImageLayout image_layout;
+            if (FindLayout(overlayLayoutMap, isr_pair, image_layout) || FindLayout(globalImageLayoutMap, isr_pair, image_layout)) {
+                if (initial_layout == VK_IMAGE_LAYOUT_UNDEFINED) {
+                    // TODO: Set memory invalid which is in mem_tracker currently
+                } else if (image_layout != initial_layout) {
+                    // Need to look up the inital layout *state* to get a bit more information
+                    const auto *initial_layout_state = subres_map->GetSubresourceInitialLayoutState(isr_pair.subresource);
+                    assert(initial_layout_state);  // There's no way we should have an initial layout without matching state...
+                    bool matches = ImageLayoutMatches(initial_layout_state->aspect_mask, image_layout, initial_layout);
+                    if (!matches) {
+                        std::string formatted_label = FormatDebugLabel(" ", pCB->debug_label);
+                        skip |= log_msg(
+                            report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                            HandleToUint64(pCB->commandBuffer), kVUID_Core_DrawState_InvalidImageLayout,
+                            "Submitted command buffer expects %s (subresource: aspectMask 0x%X array layer %u, mip level %u) "
+                            "to be in layout %s--instead, current layout is %s.%s",
+                            report_data->FormatHandle(image).c_str(), isr_pair.subresource.aspectMask,
+                            isr_pair.subresource.arrayLayer, isr_pair.subresource.mipLevel, string_VkImageLayout(initial_layout),
+                            string_VkImageLayout(image_layout), formatted_label.c_str());
+                    }
+                }
+            }
+        }
+
+        // Update all layout set operations (which will be a subset of the initial_layouts
+        for (auto it_set = subres_map->BeginSetLayout(); !it_set.AtEnd(); ++it_set) {
+            VkImageLayout layout = (*it_set).layout;
+            isr_pair.subresource = (*it_set).subresource;
+            SetLayout(overlayLayoutMap, isr_pair, layout);
+        }
+    }
+
+    return skip;
+}
+
+void CoreChecks::UpdateCmdBufImageLayouts(CMD_BUFFER_STATE *pCB) {
+    for (const auto &layout_map_entry : pCB->image_layout_map) {
+        const auto image = layout_map_entry.first;
+        const auto *image_state = GetImageState(image);
+        if (!image_state) continue;  // Can't set layouts of a dead image
+        const auto &subres_map = layout_map_entry.second;
+        ImageSubresourcePair isr_pair;
+        isr_pair.image = image;
+        isr_pair.hasSubresource = true;
+
+        // Update all layout set operations (which will be a subset of the initial_layouts
+        for (auto it_set = subres_map->BeginSetLayout(); !it_set.AtEnd(); ++it_set) {
+            VkImageLayout layout = (*it_set).layout;
+            isr_pair.subresource = (*it_set).subresource;
+            SetGlobalLayout(isr_pair, layout);
+        }
+    }
+}
+
+// ValidateLayoutVsAttachmentDescription is a general function where we can validate various state associated with the
+// VkAttachmentDescription structs that are used by the sub-passes of a renderpass. Initial check is to make sure that READ_ONLY
+// layout attachments don't have CLEAR as their loadOp.
+bool CoreChecks::ValidateLayoutVsAttachmentDescription(const debug_report_data *report_data, RenderPassCreateVersion rp_version,
+                                                       const VkImageLayout first_layout, const uint32_t attachment,
+                                                       const VkAttachmentDescription2KHR &attachment_description) const {
+    bool skip = false;
+    const char *vuid;
+    const bool use_rp2 = (rp_version == RENDER_PASS_VERSION_2);
+
+    // Verify that initial loadOp on READ_ONLY attachments is not CLEAR
+    if (attachment_description.loadOp == VK_ATTACHMENT_LOAD_OP_CLEAR) {
+        if (use_rp2 && ((first_layout == VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL) ||
+                        (first_layout == VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL) ||
+                        (first_layout == VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL))) {
+            skip |=
+                log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                        "VUID-VkRenderPassCreateInfo2KHR-pAttachments-02522",
+                        "Cannot clear attachment %d with invalid first layout %s.", attachment, string_VkImageLayout(first_layout));
+        } else if (!use_rp2 && ((first_layout == VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL) ||
+                                (first_layout == VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL))) {
+            skip |=
+                log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                        "VUID-VkRenderPassCreateInfo-pAttachments-00836",
+                        "Cannot clear attachment %d with invalid first layout %s.", attachment, string_VkImageLayout(first_layout));
+        }
+    }
+    if (attachment_description.loadOp == VK_ATTACHMENT_LOAD_OP_CLEAR) {
+        if (first_layout == VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL) {
+            vuid = use_rp2 ? kVUID_Core_DrawState_InvalidRenderpass : "VUID-VkRenderPassCreateInfo-pAttachments-01566";
+            skip |=
+                log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
+                        "Cannot clear attachment %d with invalid first layout %s.", attachment, string_VkImageLayout(first_layout));
+        }
+    }
+
+    if (attachment_description.stencilLoadOp == VK_ATTACHMENT_LOAD_OP_CLEAR) {
+        if (first_layout == VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL) {
+            vuid = use_rp2 ? kVUID_Core_DrawState_InvalidRenderpass : "VUID-VkRenderPassCreateInfo-pAttachments-01567";
+            skip |=
+                log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
+                        "Cannot clear attachment %d with invalid first layout %s.", attachment, string_VkImageLayout(first_layout));
+        }
+    }
+    return skip;
+}
+
+bool CoreChecks::ValidateLayouts(RenderPassCreateVersion rp_version, VkDevice device,
+                                 const VkRenderPassCreateInfo2KHR *pCreateInfo) const {
+    bool skip = false;
+    const char *vuid;
+    const bool use_rp2 = (rp_version == RENDER_PASS_VERSION_2);
+    const char *const function_name = use_rp2 ? "vkCreateRenderPass2KHR()" : "vkCreateRenderPass()";
+
+    for (uint32_t i = 0; i < pCreateInfo->attachmentCount; ++i) {
+        VkFormat format = pCreateInfo->pAttachments[i].format;
+        if (pCreateInfo->pAttachments[i].initialLayout == VK_IMAGE_LAYOUT_UNDEFINED) {
+            if ((FormatIsColor(format) || FormatHasDepth(format)) &&
+                pCreateInfo->pAttachments[i].loadOp == VK_ATTACHMENT_LOAD_OP_LOAD) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                kVUID_Core_DrawState_InvalidRenderpass,
+                                "Render pass has an attachment with loadOp == VK_ATTACHMENT_LOAD_OP_LOAD and initialLayout == "
+                                "VK_IMAGE_LAYOUT_UNDEFINED.  This is probably not what you intended.  Consider using "
+                                "VK_ATTACHMENT_LOAD_OP_DONT_CARE instead if the image truely is undefined at the start of the "
+                                "render pass.");
+            }
+            if (FormatHasStencil(format) && pCreateInfo->pAttachments[i].stencilLoadOp == VK_ATTACHMENT_LOAD_OP_LOAD) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                kVUID_Core_DrawState_InvalidRenderpass,
+                                "Render pass has an attachment with stencilLoadOp == VK_ATTACHMENT_LOAD_OP_LOAD and initialLayout "
+                                "== VK_IMAGE_LAYOUT_UNDEFINED.  This is probably not what you intended.  Consider using "
+                                "VK_ATTACHMENT_LOAD_OP_DONT_CARE instead if the image truely is undefined at the start of the "
+                                "render pass.");
+            }
+        }
+    }
+
+    // Track when we're observing the first use of an attachment
+    std::vector<bool> attach_first_use(pCreateInfo->attachmentCount, true);
+    for (uint32_t i = 0; i < pCreateInfo->subpassCount; ++i) {
+        const VkSubpassDescription2KHR &subpass = pCreateInfo->pSubpasses[i];
+
+        // Check input attachments first, so we can detect first-use-as-input for VU #00349
+        for (uint32_t j = 0; j < subpass.inputAttachmentCount; ++j) {
+            auto attach_index = subpass.pInputAttachments[j].attachment;
+            if (attach_index == VK_ATTACHMENT_UNUSED) continue;
+            switch (subpass.pInputAttachments[j].layout) {
+                case VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL:
+                case VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL:
+                    // These are ideal.
+                    break;
+
+                case VK_IMAGE_LAYOUT_GENERAL:
+                    // May not be optimal. TODO: reconsider this warning based on other constraints.
+                    skip |= log_msg(report_data, VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT,
+                                    VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, kVUID_Core_DrawState_InvalidImageLayout,
+                                    "Layout for input attachment is GENERAL but should be READ_ONLY_OPTIMAL.");
+                    break;
+
+                case VK_IMAGE_LAYOUT_UNDEFINED:
+                case VK_IMAGE_LAYOUT_PREINITIALIZED:
+                    vuid = use_rp2 ? "VUID-VkAttachmentReference2KHR-layout-03077" : "VUID-VkAttachmentReference-layout-00857";
+                    skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
+                                    "Layout for input attachment reference %u in subpass %u is %s but must be "
+                                    "DEPTH_STENCIL_READ_ONLY, SHADER_READ_ONLY_OPTIMAL, or GENERAL.",
+                                    j, i, string_VkImageLayout(subpass.pInputAttachments[j].layout));
+                    break;
+
+                case VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR:
+                case VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR:
+                case VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR:
+                case VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR:
+                    if (!enabled_features.separate_depth_stencil_layouts_features.separateDepthStencilLayouts) {
+                        skip |= log_msg(
+                            report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                            "VUID-VkAttachmentReference2KHR-separateDepthStencilLayouts-03313",
+                            "Layout for input attachment reference %u in subpass %u is %s but must not be "
+                            "VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR, VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR, "
+                            "VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR, or VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR.",
+                            j, i, string_VkImageLayout(subpass.pInputAttachments[j].layout));
+                    } else if (subpass.pInputAttachments[j].aspectMask & VK_IMAGE_ASPECT_COLOR_BIT) {
+                        skip |= log_msg(
+                            report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                            "VUID-VkAttachmentReference2KHR-attachment-03314",
+                            "Layout for input attachment reference %u in subpass %u is %s but must not be "
+                            "VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR, VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR, "
+                            "VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR, or VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR.",
+                            j, i, string_VkImageLayout(subpass.pInputAttachments[j].layout));
+                    } else if ((subpass.pInputAttachments[j].aspectMask &
+                                (VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT)) ==
+                               (VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT)) {
+                        if (subpass.pInputAttachments[j].layout == VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR ||
+                            subpass.pInputAttachments[j].layout == VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR) {
+                            const auto *attachment_reference_stencil_layout =
+                                lvl_find_in_chain<VkAttachmentReferenceStencilLayoutKHR>(subpass.pInputAttachments[j].pNext);
+                            if (attachment_reference_stencil_layout) {
+                                if (attachment_reference_stencil_layout->stencilLayout == VK_IMAGE_LAYOUT_UNDEFINED ||
+                                    attachment_reference_stencil_layout->stencilLayout == VK_IMAGE_LAYOUT_PREINITIALIZED ||
+                                    attachment_reference_stencil_layout->stencilLayout ==
+                                        VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL ||
+                                    attachment_reference_stencil_layout->stencilLayout ==
+                                        VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR ||
+                                    attachment_reference_stencil_layout->stencilLayout ==
+                                        VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR ||
+                                    attachment_reference_stencil_layout->stencilLayout ==
+                                        VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL ||
+                                    attachment_reference_stencil_layout->stencilLayout ==
+                                        VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL ||
+                                    attachment_reference_stencil_layout->stencilLayout ==
+                                        VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL ||
+                                    attachment_reference_stencil_layout->stencilLayout ==
+                                        VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL ||
+                                    attachment_reference_stencil_layout->stencilLayout == VK_IMAGE_LAYOUT_PRESENT_SRC_KHR) {
+                                    skip |=
+                                        log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT,
+                                                0, "VUID-VkAttachmentReferenceStencilLayoutKHR-stencilLayout-03318",
+                                                "In the attachment reference %u in subpass %u with pNext chain instance "
+                                                "VkAttachmentReferenceStencilLayoutKHR"
+                                                "the stencilLayout member but must not be "
+                                                "VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_PREINITIALIZED, "
+                                                "VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, "
+                                                "VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR, "
+                                                "VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR, "
+                                                "VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL, "
+                                                "VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL, "
+                                                "VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL, "
+                                                "VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL, or "
+                                                "VK_IMAGE_LAYOUT_PRESENT_SRC_KHR.",
+                                                j, i);
+                                }
+                            } else {
+                                skip |=
+                                    log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                            "VUID-VkAttachmentReference2KHR-attachment-03315",
+                                            "When the layout for input attachment reference %u in subpass %u is "
+                                            "VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR or "
+                                            "VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR then the pNext chain must include a valid "
+                                            "VkAttachmentReferenceStencilLayout instance.",
+                                            j, i);
+                            }
+                        }
+                    } else if (subpass.pInputAttachments[j].aspectMask & VK_IMAGE_ASPECT_DEPTH_BIT) {
+                        if (subpass.pInputAttachments[j].layout == VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR ||
+                            subpass.pInputAttachments[j].layout == VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR) {
+                            skip |= log_msg(
+                                report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                "VUID-VkAttachmentReference2KHR-attachment-03315",
+                                "When the aspectMask for input attachment reference %u in subpass %u is VK_IMAGE_ASPECT_DEPTH_BIT "
+                                "then the layout must not be VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR, or "
+                                "VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR.",
+                                j, i);
+                        }
+                    } else if (subpass.pInputAttachments[j].aspectMask & VK_IMAGE_ASPECT_STENCIL_BIT) {
+                        if (subpass.pInputAttachments[j].layout == VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR ||
+                            subpass.pInputAttachments[j].layout == VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR) {
+                            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                            "VUID-VkAttachmentReference2KHR-attachment-03317",
+                                            "When the aspectMask for input attachment reference %u in subpass %u is "
+                                            "VK_IMAGE_ASPECT_STENCIL_BIT "
+                                            "then the layout must not be VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL, or "
+                                            "VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL.",
+                                            j, i);
+                        }
+                    }
+                    break;
+
+                case VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL_KHR:
+                case VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL_KHR:
+                    if (device_extensions.vk_khr_maintenance2) {
+                        break;
+                    } else {
+                        // Intentionally fall through to generic error message
+                    }
+                    // fall through
+
+                default:
+                    // No other layouts are acceptable
+                    skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                    kVUID_Core_DrawState_InvalidImageLayout,
+                                    "Layout for input attachment is %s but can only be READ_ONLY_OPTIMAL or GENERAL.",
+                                    string_VkImageLayout(subpass.pInputAttachments[j].layout));
+            }
+
+            if (attach_first_use[attach_index]) {
+                skip |= ValidateLayoutVsAttachmentDescription(report_data, rp_version, subpass.pInputAttachments[j].layout,
+                                                              attach_index, pCreateInfo->pAttachments[attach_index]);
+
+                bool used_as_depth =
+                    (subpass.pDepthStencilAttachment != NULL && subpass.pDepthStencilAttachment->attachment == attach_index);
+                bool used_as_color = false;
+                for (uint32_t k = 0; !used_as_depth && !used_as_color && k < subpass.colorAttachmentCount; ++k) {
+                    used_as_color = (subpass.pColorAttachments[k].attachment == attach_index);
+                }
+                if (!used_as_depth && !used_as_color &&
+                    pCreateInfo->pAttachments[attach_index].loadOp == VK_ATTACHMENT_LOAD_OP_CLEAR) {
+                    vuid = use_rp2 ? "VUID-VkSubpassDescription2KHR-loadOp-03064" : "VUID-VkSubpassDescription-loadOp-00846";
+                    skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
+                                    "%s: attachment %u is first used as an input attachment in subpass %u with loadOp=CLEAR.",
+                                    function_name, attach_index, attach_index);
+                }
+            }
+            attach_first_use[attach_index] = false;
+        }
+
+        for (uint32_t j = 0; j < subpass.colorAttachmentCount; ++j) {
+            auto attach_index = subpass.pColorAttachments[j].attachment;
+            if (attach_index == VK_ATTACHMENT_UNUSED) continue;
+
+            // TODO: Need a way to validate shared presentable images here, currently just allowing
+            // VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR
+            //  as an acceptable layout, but need to make sure shared presentable images ONLY use that layout
+            switch (subpass.pColorAttachments[j].layout) {
+                case VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL:
+                // This is ideal.
+                case VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR:
+                    // TODO: See note above, just assuming that attachment is shared presentable and allowing this for now.
+                    break;
+
+                case VK_IMAGE_LAYOUT_GENERAL:
+                    // May not be optimal; TODO: reconsider this warning based on other constraints?
+                    skip |= log_msg(report_data, VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT,
+                                    VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, kVUID_Core_DrawState_InvalidImageLayout,
+                                    "Layout for color attachment is GENERAL but should be COLOR_ATTACHMENT_OPTIMAL.");
+                    break;
+
+                case VK_IMAGE_LAYOUT_UNDEFINED:
+                case VK_IMAGE_LAYOUT_PREINITIALIZED:
+                    vuid = use_rp2 ? "VUID-VkAttachmentReference2KHR-layout-03077" : "VUID-VkAttachmentReference-layout-00857";
+                    skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
+                                    "Layout for color attachment reference %u in subpass %u is %s but should be "
+                                    "COLOR_ATTACHMENT_OPTIMAL or GENERAL.",
+                                    j, i, string_VkImageLayout(subpass.pColorAttachments[j].layout));
+                    break;
+
+                case VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR:
+                case VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR:
+                case VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR:
+                case VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR:
+                    if (!enabled_features.separate_depth_stencil_layouts_features.separateDepthStencilLayouts) {
+                        skip |= log_msg(
+                            report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                            "VUID-VkAttachmentReference2KHR-separateDepthStencilLayouts-03313",
+                            "Layout for color attachment reference %u in subpass %u is %s but must not be "
+                            "VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR, VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR, "
+                            "VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR, or VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR.",
+                            j, i, string_VkImageLayout(subpass.pColorAttachments[j].layout));
+                    } else if (subpass.pColorAttachments[j].aspectMask & VK_IMAGE_ASPECT_COLOR_BIT) {
+                        skip |= log_msg(
+                            report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                            "VUID-VkAttachmentReference2KHR-attachment-03314",
+                            "Layout for color attachment reference %u in subpass %u is %s but must not be "
+                            "VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR, VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR, "
+                            "VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR, or VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR.",
+                            j, i, string_VkImageLayout(subpass.pColorAttachments[j].layout));
+                    } else if ((subpass.pColorAttachments[j].aspectMask &
+                                (VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT)) ==
+                               (VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT)) {
+                        if (subpass.pColorAttachments[j].layout == VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR ||
+                            subpass.pColorAttachments[j].layout == VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR) {
+                            const auto *attachment_reference_stencil_layout =
+                                lvl_find_in_chain<VkAttachmentReferenceStencilLayoutKHR>(subpass.pColorAttachments[j].pNext);
+                            if (attachment_reference_stencil_layout) {
+                                if (attachment_reference_stencil_layout->stencilLayout == VK_IMAGE_LAYOUT_UNDEFINED ||
+                                    attachment_reference_stencil_layout->stencilLayout == VK_IMAGE_LAYOUT_PREINITIALIZED ||
+                                    attachment_reference_stencil_layout->stencilLayout ==
+                                        VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL ||
+                                    attachment_reference_stencil_layout->stencilLayout ==
+                                        VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR ||
+                                    attachment_reference_stencil_layout->stencilLayout ==
+                                        VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR ||
+                                    attachment_reference_stencil_layout->stencilLayout ==
+                                        VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL ||
+                                    attachment_reference_stencil_layout->stencilLayout ==
+                                        VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL ||
+                                    attachment_reference_stencil_layout->stencilLayout ==
+                                        VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL ||
+                                    attachment_reference_stencil_layout->stencilLayout ==
+                                        VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL ||
+                                    attachment_reference_stencil_layout->stencilLayout == VK_IMAGE_LAYOUT_PRESENT_SRC_KHR) {
+                                    skip |=
+                                        log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT,
+                                                0, "VUID-VkAttachmentReferenceStencilLayoutKHR-stencilLayout-03318",
+                                                "In the attachment reference %u in subpass %u with pNext chain instance "
+                                                "VkAttachmentReferenceStencilLayoutKHR"
+                                                "the stencilLayout member but must not be "
+                                                "VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_PREINITIALIZED, "
+                                                "VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, "
+                                                "VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR, "
+                                                "VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR, "
+                                                "VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL, "
+                                                "VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL, "
+                                                "VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL, "
+                                                "VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL, or "
+                                                "VK_IMAGE_LAYOUT_PRESENT_SRC_KHR.",
+                                                j, i);
+                                }
+                            } else {
+                                skip |=
+                                    log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                            "VUID-VkAttachmentReference2KHR-attachment-03315",
+                                            "When the layout for color attachment reference %u in subpass %u is "
+                                            "VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR or "
+                                            "VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR then the pNext chain must include a valid "
+                                            "VkAttachmentReferenceStencilLayout instance.",
+                                            j, i);
+                            }
+                        }
+                    } else if (subpass.pColorAttachments[j].aspectMask & VK_IMAGE_ASPECT_DEPTH_BIT) {
+                        if (subpass.pColorAttachments[j].layout == VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR ||
+                            subpass.pColorAttachments[j].layout == VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR) {
+                            skip |= log_msg(
+                                report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                "VUID-VkAttachmentReference2KHR-attachment-03315",
+                                "When the aspectMask for color attachment reference %u in subpass %u is VK_IMAGE_ASPECT_DEPTH_BIT "
+                                "then the layout must not be VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR, or "
+                                "VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR.",
+                                j, i);
+                        }
+                    } else if (subpass.pColorAttachments[j].aspectMask & VK_IMAGE_ASPECT_STENCIL_BIT) {
+                        if (subpass.pColorAttachments[j].layout == VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR ||
+                            subpass.pColorAttachments[j].layout == VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR) {
+                            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                            "VUID-VkAttachmentReference2KHR-attachment-03317",
+                                            "When the aspectMask for color attachment reference %u in subpass %u is "
+                                            "VK_IMAGE_ASPECT_STENCIL_BIT "
+                                            "then the layout must not be VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL, or "
+                                            "VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL.",
+                                            j, i);
+                        }
+                    }
+                    break;
+
+                default:
+                    skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                    kVUID_Core_DrawState_InvalidImageLayout,
+                                    "Layout for color attachment is %s but can only be COLOR_ATTACHMENT_OPTIMAL or GENERAL.",
+                                    string_VkImageLayout(subpass.pColorAttachments[j].layout));
+            }
+
+            if (subpass.pResolveAttachments && (subpass.pResolveAttachments[j].attachment != VK_ATTACHMENT_UNUSED) &&
+                (subpass.pResolveAttachments[j].layout == VK_IMAGE_LAYOUT_UNDEFINED ||
+                 subpass.pResolveAttachments[j].layout == VK_IMAGE_LAYOUT_PREINITIALIZED)) {
+                vuid = use_rp2 ? "VUID-VkAttachmentReference2KHR-layout-03077" : "VUID-VkAttachmentReference-layout-00857";
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
+                                "Layout for resolve attachment reference %u in subpass %u is %s but should be "
+                                "COLOR_ATTACHMENT_OPTIMAL or GENERAL.",
+                                j, i, string_VkImageLayout(subpass.pResolveAttachments[j].layout));
+            }
+
+            if (attach_first_use[attach_index]) {
+                skip |= ValidateLayoutVsAttachmentDescription(report_data, rp_version, subpass.pColorAttachments[j].layout,
+                                                              attach_index, pCreateInfo->pAttachments[attach_index]);
+            }
+            attach_first_use[attach_index] = false;
+        }
+
+        if (subpass.pDepthStencilAttachment && subpass.pDepthStencilAttachment->attachment != VK_ATTACHMENT_UNUSED) {
+            switch (subpass.pDepthStencilAttachment->layout) {
+                case VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL:
+                case VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL:
+                    // These are ideal.
+                    break;
+
+                case VK_IMAGE_LAYOUT_GENERAL:
+                    // May not be optimal; TODO: reconsider this warning based on other constraints? GENERAL can be better than
+                    // doing a bunch of transitions.
+                    skip |= log_msg(report_data, VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT,
+                                    VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, kVUID_Core_DrawState_InvalidImageLayout,
+                                    "GENERAL layout for depth attachment may not give optimal performance.");
+                    break;
+
+                case VK_IMAGE_LAYOUT_UNDEFINED:
+                case VK_IMAGE_LAYOUT_PREINITIALIZED:
+                    vuid = use_rp2 ? "VUID-VkAttachmentReference2KHR-layout-03077" : "VUID-VkAttachmentReference-layout-00857";
+                    skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
+                                    "Layout for depth attachment reference in subpass %u is %s but must be a valid depth/stencil "
+                                    "layout or GENERAL.",
+                                    i, string_VkImageLayout(subpass.pDepthStencilAttachment->layout));
+                    break;
+
+                case VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR:
+                case VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR:
+                case VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR:
+                case VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR:
+                    if (!enabled_features.separate_depth_stencil_layouts_features.separateDepthStencilLayouts) {
+                        skip |= log_msg(
+                            report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                            "VUID-VkAttachmentReference2KHR-separateDepthStencilLayouts-03313",
+                            "Layout for depth attachment reference in subpass %u is %s but must not be "
+                            "VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR, VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR, "
+                            "VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR, or VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR.",
+                            i, string_VkImageLayout(subpass.pDepthStencilAttachment->layout));
+                    } else if (subpass.pDepthStencilAttachment->aspectMask & VK_IMAGE_ASPECT_COLOR_BIT) {
+                        skip |= log_msg(
+                            report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                            "VUID-VkAttachmentReference2KHR-attachment-03314",
+                            "Layout for depth attachment reference in subpass %u is %s but must not be "
+                            "VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR, VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR, "
+                            "VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR, or VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR.",
+                            i, string_VkImageLayout(subpass.pDepthStencilAttachment->layout));
+                    } else if ((subpass.pDepthStencilAttachment->aspectMask &
+                                (VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT)) ==
+                               (VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT)) {
+                        if (subpass.pDepthStencilAttachment->layout == VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR ||
+                            subpass.pDepthStencilAttachment->layout == VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR) {
+                            const auto *attachment_reference_stencil_layout =
+                                lvl_find_in_chain<VkAttachmentReferenceStencilLayoutKHR>(subpass.pDepthStencilAttachment->pNext);
+                            if (attachment_reference_stencil_layout) {
+                                if (attachment_reference_stencil_layout->stencilLayout == VK_IMAGE_LAYOUT_UNDEFINED ||
+                                    attachment_reference_stencil_layout->stencilLayout == VK_IMAGE_LAYOUT_PREINITIALIZED ||
+                                    attachment_reference_stencil_layout->stencilLayout ==
+                                        VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL ||
+                                    attachment_reference_stencil_layout->stencilLayout ==
+                                        VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR ||
+                                    attachment_reference_stencil_layout->stencilLayout ==
+                                        VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR ||
+                                    attachment_reference_stencil_layout->stencilLayout ==
+                                        VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL ||
+                                    attachment_reference_stencil_layout->stencilLayout ==
+                                        VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL ||
+                                    attachment_reference_stencil_layout->stencilLayout ==
+                                        VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL ||
+                                    attachment_reference_stencil_layout->stencilLayout ==
+                                        VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL ||
+                                    attachment_reference_stencil_layout->stencilLayout == VK_IMAGE_LAYOUT_PRESENT_SRC_KHR) {
+                                    skip |=
+                                        log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT,
+                                                0, "VUID-VkAttachmentReferenceStencilLayoutKHR-stencilLayout-03318",
+                                                "In the attachment reference in subpass %u with pNext chain instance "
+                                                "VkAttachmentReferenceStencilLayoutKHR"
+                                                "the stencilLayout member but must not be "
+                                                "VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_PREINITIALIZED, "
+                                                "VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, "
+                                                "VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR, "
+                                                "VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR, "
+                                                "VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL, "
+                                                "VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL, "
+                                                "VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL, "
+                                                "VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL, or "
+                                                "VK_IMAGE_LAYOUT_PRESENT_SRC_KHR.",
+                                                i);
+                                }
+                            } else {
+                                skip |=
+                                    log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                            "VUID-VkAttachmentReference2KHR-attachment-03315",
+                                            "When the layout for depth attachment reference in subpass %u is "
+                                            "VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR or "
+                                            "VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR then the pNext chain must include a valid "
+                                            "VkAttachmentReferenceStencilLayout instance.",
+                                            i);
+                            }
+                        }
+                    } else if (subpass.pDepthStencilAttachment->aspectMask & VK_IMAGE_ASPECT_DEPTH_BIT) {
+                        if (subpass.pDepthStencilAttachment->layout == VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR ||
+                            subpass.pDepthStencilAttachment->layout == VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR) {
+                            skip |= log_msg(
+                                report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                "VUID-VkAttachmentReference2KHR-attachment-03315",
+                                "When the aspectMask for depth attachment reference in subpass %u is VK_IMAGE_ASPECT_DEPTH_BIT "
+                                "then the layout must not be VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR, or "
+                                "VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR.",
+                                i);
+                        }
+                    } else if (subpass.pDepthStencilAttachment->aspectMask & VK_IMAGE_ASPECT_STENCIL_BIT) {
+                        if (subpass.pDepthStencilAttachment->layout == VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR ||
+                            subpass.pDepthStencilAttachment->layout == VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR) {
+                            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                            "VUID-VkAttachmentReference2KHR-attachment-03317",
+                                            "When the aspectMask for depth attachment reference in subpass %u is "
+                                            "VK_IMAGE_ASPECT_STENCIL_BIT "
+                                            "then the layout must not be VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL, or "
+                                            "VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL.",
+                                            i);
+                        }
+                    }
+                    break;
+
+                case VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL_KHR:
+                case VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL_KHR:
+                    if (device_extensions.vk_khr_maintenance2) {
+                        break;
+                    } else {
+                        // Intentionally fall through to generic error message
+                    }
+                    // fall through
+
+                default:
+                    // No other layouts are acceptable
+                    skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                    kVUID_Core_DrawState_InvalidImageLayout,
+                                    "Layout for depth attachment is %s but can only be DEPTH_STENCIL_ATTACHMENT_OPTIMAL, "
+                                    "DEPTH_STENCIL_READ_ONLY_OPTIMAL or GENERAL.",
+                                    string_VkImageLayout(subpass.pDepthStencilAttachment->layout));
+            }
+
+            auto attach_index = subpass.pDepthStencilAttachment->attachment;
+            if (attach_first_use[attach_index]) {
+                skip |= ValidateLayoutVsAttachmentDescription(report_data, rp_version, subpass.pDepthStencilAttachment->layout,
+                                                              attach_index, pCreateInfo->pAttachments[attach_index]);
+            }
+            attach_first_use[attach_index] = false;
+        }
+    }
+    return skip;
+}
+
+// Helper function to validate correct usage bits set for buffers or images. Verify that (actual & desired) flags != 0 or, if strict
+// is true, verify that (actual & desired) flags == desired
+bool CoreChecks::ValidateUsageFlags(VkFlags actual, VkFlags desired, VkBool32 strict, const VulkanTypedHandle &typed_handle,
+                                    const char *msgCode, char const *func_name, char const *usage_str) const {
+    bool correct_usage = false;
+    bool skip = false;
+    const char *type_str = object_string[typed_handle.type];
+    if (strict) {
+        correct_usage = ((actual & desired) == desired);
+    } else {
+        correct_usage = ((actual & desired) != 0);
+    }
+    if (!correct_usage) {
+        if (msgCode == kVUIDUndefined) {
+            // TODO: Fix callers with kVUIDUndefined to use correct validation checks.
+            skip = log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, get_debug_report_enum[typed_handle.type],
+                           typed_handle.handle, kVUID_Core_MemTrack_InvalidUsageFlag,
+                           "Invalid usage flag for %s used by %s. In this case, %s should have %s set during creation.",
+                           report_data->FormatHandle(typed_handle).c_str(), func_name, type_str, usage_str);
+        } else {
+            skip =
+                log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, get_debug_report_enum[typed_handle.type], typed_handle.handle,
+                        msgCode, "Invalid usage flag for %s used by %s. In this case, %s should have %s set during creation.",
+                        report_data->FormatHandle(typed_handle).c_str(), func_name, type_str, usage_str);
+        }
+    }
+    return skip;
+}
+
+// Helper function to validate usage flags for buffers. For given buffer_state send actual vs. desired usage off to helper above
+// where an error will be flagged if usage is not correct
+bool CoreChecks::ValidateImageUsageFlags(IMAGE_STATE const *image_state, VkFlags desired, bool strict, const char *msgCode,
+                                         char const *func_name, char const *usage_string) const {
+    return ValidateUsageFlags(image_state->createInfo.usage, desired, strict,
+                              VulkanTypedHandle(image_state->image, kVulkanObjectTypeImage), msgCode, func_name, usage_string);
+}
+
+bool CoreChecks::ValidateImageFormatFeatureFlags(IMAGE_STATE const *image_state, VkFormatFeatureFlags desired,
+                                                 char const *func_name, const char *linear_vuid, const char *optimal_vuid) const {
+    VkFormatProperties format_properties = GetPDFormatProperties(image_state->createInfo.format);
+    bool skip = false;
+    if (image_state->createInfo.tiling == VK_IMAGE_TILING_LINEAR) {
+        if ((format_properties.linearTilingFeatures & desired) != desired) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+                            HandleToUint64(image_state->image), linear_vuid,
+                            "In %s, invalid linearTilingFeatures (0x%08X) for format %u used by %s.", func_name,
+                            format_properties.linearTilingFeatures, image_state->createInfo.format,
+                            report_data->FormatHandle(image_state->image).c_str());
+        }
+    } else if (image_state->createInfo.tiling == VK_IMAGE_TILING_OPTIMAL) {
+        if ((format_properties.optimalTilingFeatures & desired) != desired) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+                            HandleToUint64(image_state->image), optimal_vuid,
+                            "In %s, invalid optimalTilingFeatures (0x%08X) for format %u used by %s.", func_name,
+                            format_properties.optimalTilingFeatures, image_state->createInfo.format,
+                            report_data->FormatHandle(image_state->image).c_str());
+        }
+    }
+    return skip;
+}
+
+bool CoreChecks::ValidateImageSubresourceLayers(const CMD_BUFFER_STATE *cb_node, const VkImageSubresourceLayers *subresource_layers,
+                                                char const *func_name, char const *member, uint32_t i) const {
+    bool skip = false;
+    // layerCount must not be zero
+    if (subresource_layers->layerCount == 0) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                        HandleToUint64(cb_node->commandBuffer), "VUID-VkImageSubresourceLayers-layerCount-01700",
+                        "In %s, pRegions[%u].%s.layerCount must not be zero.", func_name, i, member);
+    }
+    // aspectMask must not contain VK_IMAGE_ASPECT_METADATA_BIT
+    if (subresource_layers->aspectMask & VK_IMAGE_ASPECT_METADATA_BIT) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                        HandleToUint64(cb_node->commandBuffer), "VUID-VkImageSubresourceLayers-aspectMask-00168",
+                        "In %s, pRegions[%u].%s.aspectMask has VK_IMAGE_ASPECT_METADATA_BIT set.", func_name, i, member);
+    }
+    // if aspectMask contains COLOR, it must not contain either DEPTH or STENCIL
+    if ((subresource_layers->aspectMask & VK_IMAGE_ASPECT_COLOR_BIT) &&
+        (subresource_layers->aspectMask & (VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT))) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                        HandleToUint64(cb_node->commandBuffer), "VUID-VkImageSubresourceLayers-aspectMask-00167",
+                        "In %s, pRegions[%u].%s.aspectMask has VK_IMAGE_ASPECT_COLOR_BIT and either VK_IMAGE_ASPECT_DEPTH_BIT or "
+                        "VK_IMAGE_ASPECT_STENCIL_BIT set.",
+                        func_name, i, member);
+    }
+    return skip;
+}
+
+// Helper function to validate usage flags for buffers. For given buffer_state send actual vs. desired usage off to helper above
+// where an error will be flagged if usage is not correct
+bool CoreChecks::ValidateBufferUsageFlags(BUFFER_STATE const *buffer_state, VkFlags desired, bool strict, const char *msgCode,
+                                          char const *func_name, char const *usage_string) const {
+    return ValidateUsageFlags(buffer_state->createInfo.usage, desired, strict,
+                              VulkanTypedHandle(buffer_state->buffer, kVulkanObjectTypeBuffer), msgCode, func_name, usage_string);
+}
+
+bool CoreChecks::ValidateBufferViewRange(const BUFFER_STATE *buffer_state, const VkBufferViewCreateInfo *pCreateInfo,
+                                         const VkPhysicalDeviceLimits *device_limits) const {
+    bool skip = false;
+
+    const VkDeviceSize &range = pCreateInfo->range;
+    if (range != VK_WHOLE_SIZE) {
+        // Range must be greater than 0
+        if (range <= 0) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT,
+                            HandleToUint64(buffer_state->buffer), "VUID-VkBufferViewCreateInfo-range-00928",
+                            "If VkBufferViewCreateInfo range (%" PRIuLEAST64
+                            ") does not equal VK_WHOLE_SIZE, range must be greater than 0.",
+                            range);
+        }
+        // Range must be a multiple of the element size of format
+        const uint32_t format_size = FormatElementSize(pCreateInfo->format);
+        if (SafeModulo(range, format_size) != 0) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT,
+                            HandleToUint64(buffer_state->buffer), "VUID-VkBufferViewCreateInfo-range-00929",
+                            "If VkBufferViewCreateInfo range (%" PRIuLEAST64
+                            ") does not equal VK_WHOLE_SIZE, range must be a multiple of the element size of the format "
+                            "(%" PRIu32 ").",
+                            range, format_size);
+        }
+        // Range divided by the element size of format must be less than or equal to VkPhysicalDeviceLimits::maxTexelBufferElements
+        if (SafeDivision(range, format_size) > device_limits->maxTexelBufferElements) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT,
+                            HandleToUint64(buffer_state->buffer), "VUID-VkBufferViewCreateInfo-range-00930",
+                            "If VkBufferViewCreateInfo range (%" PRIuLEAST64
+                            ") does not equal VK_WHOLE_SIZE, range divided by the element size of the format (%" PRIu32
+                            ") must be less than or equal to VkPhysicalDeviceLimits::maxTexelBufferElements (%" PRIuLEAST32 ").",
+                            range, format_size, device_limits->maxTexelBufferElements);
+        }
+        // The sum of range and offset must be less than or equal to the size of buffer
+        if (range + pCreateInfo->offset > buffer_state->createInfo.size) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT,
+                            HandleToUint64(buffer_state->buffer), "VUID-VkBufferViewCreateInfo-offset-00931",
+                            "If VkBufferViewCreateInfo range (%" PRIuLEAST64
+                            ") does not equal VK_WHOLE_SIZE, the sum of offset (%" PRIuLEAST64
+                            ") and range must be less than or equal to the size of the buffer (%" PRIuLEAST64 ").",
+                            range, pCreateInfo->offset, buffer_state->createInfo.size);
+        }
+    }
+    return skip;
+}
+
+bool CoreChecks::ValidateBufferViewBuffer(const BUFFER_STATE *buffer_state, const VkBufferViewCreateInfo *pCreateInfo) const {
+    bool skip = false;
+    const VkFormatProperties format_properties = GetPDFormatProperties(pCreateInfo->format);
+    if ((buffer_state->createInfo.usage & VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT) &&
+        !(format_properties.bufferFeatures & VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT)) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT,
+                        HandleToUint64(buffer_state->buffer), "VUID-VkBufferViewCreateInfo-buffer-00933",
+                        "If buffer was created with `usage` containing VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT, format must "
+                        "be supported for uniform texel buffers");
+    }
+    if ((buffer_state->createInfo.usage & VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT) &&
+        !(format_properties.bufferFeatures & VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT)) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT,
+                        HandleToUint64(buffer_state->buffer), "VUID-VkBufferViewCreateInfo-buffer-00934",
+                        "If buffer was created with `usage` containing VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT, format must "
+                        "be supported for storage texel buffers");
+    }
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateCreateBuffer(VkDevice device, const VkBufferCreateInfo *pCreateInfo,
+                                             const VkAllocationCallbacks *pAllocator, VkBuffer *pBuffer) const {
+    bool skip = false;
+
+    // TODO: Add check for "VUID-vkCreateBuffer-flags-00911"        (sparse address space accounting)
+
+    if ((pCreateInfo->flags & VK_BUFFER_CREATE_SPARSE_BINDING_BIT) && (!enabled_features.core.sparseBinding)) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                        "VUID-VkBufferCreateInfo-flags-00915",
+                        "vkCreateBuffer(): the sparseBinding device feature is disabled: Buffers cannot be created with the "
+                        "VK_BUFFER_CREATE_SPARSE_BINDING_BIT set.");
+    }
+
+    if ((pCreateInfo->flags & VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT) && (!enabled_features.core.sparseResidencyBuffer)) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                        "VUID-VkBufferCreateInfo-flags-00916",
+                        "vkCreateBuffer(): the sparseResidencyBuffer device feature is disabled: Buffers cannot be created with "
+                        "the VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT set.");
+    }
+
+    if ((pCreateInfo->flags & VK_BUFFER_CREATE_SPARSE_ALIASED_BIT) && (!enabled_features.core.sparseResidencyAliased)) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                        "VUID-VkBufferCreateInfo-flags-00917",
+                        "vkCreateBuffer(): the sparseResidencyAliased device feature is disabled: Buffers cannot be created with "
+                        "the VK_BUFFER_CREATE_SPARSE_ALIASED_BIT set.");
+    }
+
+    auto chained_devaddr_struct = lvl_find_in_chain<VkBufferDeviceAddressCreateInfoEXT>(pCreateInfo->pNext);
+    if (chained_devaddr_struct) {
+        if (!(pCreateInfo->flags & VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_EXT) &&
+            chained_devaddr_struct->deviceAddress != 0) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                            "VUID-VkBufferCreateInfo-deviceAddress-02604",
+                            "vkCreateBuffer(): Non-zero VkBufferDeviceAddressCreateInfoEXT::deviceAddress "
+                            "requires VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_EXT.");
+        }
+    }
+
+    auto chained_opaqueaddr_struct = lvl_find_in_chain<VkBufferOpaqueCaptureAddressCreateInfoKHR>(pCreateInfo->pNext);
+    if (chained_opaqueaddr_struct) {
+        if (!(pCreateInfo->flags & VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_KHR) &&
+            chained_opaqueaddr_struct->opaqueCaptureAddress != 0) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                            "VUID-VkBufferCreateInfo-opaqueCaptureAddress-03337",
+                            "vkCreateBuffer(): Non-zero VkBufferOpaqueCaptureAddressCreateInfoKHR::opaqueCaptureAddress"
+                            "requires VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_KHR.");
+        }
+    }
+
+    if ((pCreateInfo->flags & VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_KHR) &&
+        !enabled_features.buffer_device_address.bufferDeviceAddressCaptureReplay &&
+        !enabled_features.buffer_device_address_ext.bufferDeviceAddressCaptureReplay) {
+        skip |= log_msg(
+            report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+            "VUID-VkBufferCreateInfo-flags-03338",
+            "vkCreateBuffer(): the bufferDeviceAddressCaptureReplay device feature is disabled: Buffers cannot be created with "
+            "the VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_EXT set.");
+    }
+
+    if (pCreateInfo->sharingMode == VK_SHARING_MODE_CONCURRENT && pCreateInfo->pQueueFamilyIndices) {
+        skip |= ValidateQueueFamilies(pCreateInfo->queueFamilyIndexCount, pCreateInfo->pQueueFamilyIndices, "vkCreateBuffer",
+                                      "pCreateInfo->pQueueFamilyIndices", "VUID-VkBufferCreateInfo-sharingMode-01419",
+                                      "VUID-VkBufferCreateInfo-sharingMode-01419", false);
+    }
+
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateCreateBufferView(VkDevice device, const VkBufferViewCreateInfo *pCreateInfo,
+                                                 const VkAllocationCallbacks *pAllocator, VkBufferView *pView) const {
+    bool skip = false;
+    const BUFFER_STATE *buffer_state = GetBufferState(pCreateInfo->buffer);
+    // If this isn't a sparse buffer, it needs to have memory backing it at CreateBufferView time
+    if (buffer_state) {
+        skip |= ValidateMemoryIsBoundToBuffer(buffer_state, "vkCreateBufferView()", "VUID-VkBufferViewCreateInfo-buffer-00935");
+        // In order to create a valid buffer view, the buffer must have been created with at least one of the following flags:
+        // UNIFORM_TEXEL_BUFFER_BIT or STORAGE_TEXEL_BUFFER_BIT
+        skip |= ValidateBufferUsageFlags(buffer_state,
+                                         VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT | VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT, false,
+                                         "VUID-VkBufferViewCreateInfo-buffer-00932", "vkCreateBufferView()",
+                                         "VK_BUFFER_USAGE_[STORAGE|UNIFORM]_TEXEL_BUFFER_BIT");
+
+        // Buffer view offset must be less than the size of buffer
+        if (pCreateInfo->offset >= buffer_state->createInfo.size) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT,
+                            HandleToUint64(buffer_state->buffer), "VUID-VkBufferViewCreateInfo-offset-00925",
+                            "VkBufferViewCreateInfo offset (%" PRIuLEAST64
+                            ") must be less than the size of the buffer (%" PRIuLEAST64 ").",
+                            pCreateInfo->offset, buffer_state->createInfo.size);
+        }
+
+        const VkPhysicalDeviceLimits *device_limits = &phys_dev_props.limits;
+        // Buffer view offset must be a multiple of VkPhysicalDeviceLimits::minTexelBufferOffsetAlignment
+        if ((pCreateInfo->offset % device_limits->minTexelBufferOffsetAlignment) != 0 &&
+            !enabled_features.texel_buffer_alignment_features.texelBufferAlignment) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT,
+                            HandleToUint64(buffer_state->buffer), "VUID-VkBufferViewCreateInfo-offset-02749",
+                            "VkBufferViewCreateInfo offset (%" PRIuLEAST64
+                            ") must be a multiple of VkPhysicalDeviceLimits::minTexelBufferOffsetAlignment (%" PRIuLEAST64 ").",
+                            pCreateInfo->offset, device_limits->minTexelBufferOffsetAlignment);
+        }
+
+        if (enabled_features.texel_buffer_alignment_features.texelBufferAlignment) {
+            VkDeviceSize elementSize = FormatElementSize(pCreateInfo->format);
+            if ((elementSize % 3) == 0) {
+                elementSize /= 3;
+            }
+            if (buffer_state->createInfo.usage & VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT) {
+                VkDeviceSize alignmentRequirement =
+                    phys_dev_ext_props.texel_buffer_alignment_props.storageTexelBufferOffsetAlignmentBytes;
+                if (phys_dev_ext_props.texel_buffer_alignment_props.storageTexelBufferOffsetSingleTexelAlignment) {
+                    alignmentRequirement = std::min(alignmentRequirement, elementSize);
+                }
+                if (SafeModulo(pCreateInfo->offset, alignmentRequirement) != 0) {
+                    skip |= log_msg(
+                        report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT,
+                        HandleToUint64(buffer_state->buffer), "VUID-VkBufferViewCreateInfo-buffer-02750",
+                        "If buffer was created with usage containing VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT, "
+                        "VkBufferViewCreateInfo offset (%" PRIuLEAST64
+                        ") must be a multiple of the lesser of "
+                        "VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT::storageTexelBufferOffsetAlignmentBytes (%" PRIuLEAST64
+                        ") or, if VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT::storageTexelBufferOffsetSingleTexelAlignment "
+                        "(%" PRId32
+                        ") is VK_TRUE, the size of a texel of the requested format. "
+                        "If the size of a texel is a multiple of three bytes, then the size of a "
+                        "single component of format is used instead",
+                        pCreateInfo->offset, phys_dev_ext_props.texel_buffer_alignment_props.storageTexelBufferOffsetAlignmentBytes,
+                        phys_dev_ext_props.texel_buffer_alignment_props.storageTexelBufferOffsetSingleTexelAlignment);
+                }
+            }
+            if (buffer_state->createInfo.usage & VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT) {
+                VkDeviceSize alignmentRequirement =
+                    phys_dev_ext_props.texel_buffer_alignment_props.uniformTexelBufferOffsetAlignmentBytes;
+                if (phys_dev_ext_props.texel_buffer_alignment_props.uniformTexelBufferOffsetSingleTexelAlignment) {
+                    alignmentRequirement = std::min(alignmentRequirement, elementSize);
+                }
+                if (SafeModulo(pCreateInfo->offset, alignmentRequirement) != 0) {
+                    skip |= log_msg(
+                        report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT,
+                        HandleToUint64(buffer_state->buffer), "VUID-VkBufferViewCreateInfo-buffer-02751",
+                        "If buffer was created with usage containing VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT, "
+                        "VkBufferViewCreateInfo offset (%" PRIuLEAST64
+                        ") must be a multiple of the lesser of "
+                        "VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT::uniformTexelBufferOffsetAlignmentBytes (%" PRIuLEAST64
+                        ") or, if VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT::uniformTexelBufferOffsetSingleTexelAlignment "
+                        "(%" PRId32
+                        ") is VK_TRUE, the size of a texel of the requested format. "
+                        "If the size of a texel is a multiple of three bytes, then the size of a "
+                        "single component of format is used instead",
+                        pCreateInfo->offset, phys_dev_ext_props.texel_buffer_alignment_props.uniformTexelBufferOffsetAlignmentBytes,
+                        phys_dev_ext_props.texel_buffer_alignment_props.uniformTexelBufferOffsetSingleTexelAlignment);
+                }
+            }
+        }
+
+        skip |= ValidateBufferViewRange(buffer_state, pCreateInfo, device_limits);
+
+        skip |= ValidateBufferViewBuffer(buffer_state, pCreateInfo);
+    }
+    return skip;
+}
+
+// For the given format verify that the aspect masks make sense
+bool CoreChecks::ValidateImageAspectMask(VkImage image, VkFormat format, VkImageAspectFlags aspect_mask, const char *func_name,
+                                         const char *vuid) const {
+    bool skip = false;
+    VkDebugReportObjectTypeEXT objectType = VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT;
+    if (image != VK_NULL_HANDLE) {
+        objectType = VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT;
+    }
+
+    if (FormatIsColor(format)) {
+        if ((aspect_mask & VK_IMAGE_ASPECT_COLOR_BIT) != VK_IMAGE_ASPECT_COLOR_BIT) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, objectType, HandleToUint64(image), vuid,
+                            "%s: Color image formats must have the VK_IMAGE_ASPECT_COLOR_BIT set.", func_name);
+        } else if ((aspect_mask & VK_IMAGE_ASPECT_COLOR_BIT) != aspect_mask) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, objectType, HandleToUint64(image), vuid,
+                            "%s: Color image formats must have ONLY the VK_IMAGE_ASPECT_COLOR_BIT set.", func_name);
+        }
+    } else if (FormatIsDepthAndStencil(format)) {
+        if ((aspect_mask & (VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT)) == 0) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, objectType, HandleToUint64(image), vuid,
+                            "%s: Depth/stencil image formats must have at least one of VK_IMAGE_ASPECT_DEPTH_BIT and "
+                            "VK_IMAGE_ASPECT_STENCIL_BIT set.",
+                            func_name);
+        } else if ((aspect_mask & (VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT)) != aspect_mask) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, objectType, HandleToUint64(image), vuid,
+                            "%s: Combination depth/stencil image formats can have only the VK_IMAGE_ASPECT_DEPTH_BIT and "
+                            "VK_IMAGE_ASPECT_STENCIL_BIT set.",
+                            func_name);
+        }
+    } else if (FormatIsDepthOnly(format)) {
+        if ((aspect_mask & VK_IMAGE_ASPECT_DEPTH_BIT) != VK_IMAGE_ASPECT_DEPTH_BIT) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, objectType, HandleToUint64(image), vuid,
+                            "%s: Depth-only image formats must have the VK_IMAGE_ASPECT_DEPTH_BIT set.", func_name);
+        } else if ((aspect_mask & VK_IMAGE_ASPECT_DEPTH_BIT) != aspect_mask) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, objectType, HandleToUint64(image), vuid,
+                            "%s: Depth-only image formats can have only the VK_IMAGE_ASPECT_DEPTH_BIT set.", func_name);
+        }
+    } else if (FormatIsStencilOnly(format)) {
+        if ((aspect_mask & VK_IMAGE_ASPECT_STENCIL_BIT) != VK_IMAGE_ASPECT_STENCIL_BIT) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, objectType, HandleToUint64(image), vuid,
+                            "%s: Stencil-only image formats must have the VK_IMAGE_ASPECT_STENCIL_BIT set.", func_name);
+        } else if ((aspect_mask & VK_IMAGE_ASPECT_STENCIL_BIT) != aspect_mask) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, objectType, HandleToUint64(image), vuid,
+                            "%s: Stencil-only image formats can have only the VK_IMAGE_ASPECT_STENCIL_BIT set.", func_name);
+        }
+    } else if (FormatIsMultiplane(format)) {
+        VkImageAspectFlags valid_flags = VK_IMAGE_ASPECT_COLOR_BIT | VK_IMAGE_ASPECT_PLANE_0_BIT | VK_IMAGE_ASPECT_PLANE_1_BIT;
+        if (3 == FormatPlaneCount(format)) {
+            valid_flags = valid_flags | VK_IMAGE_ASPECT_PLANE_2_BIT;
+        }
+        if ((aspect_mask & valid_flags) != aspect_mask) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, objectType, HandleToUint64(image), vuid,
+                            "%s: Multi-plane image formats may have only VK_IMAGE_ASPECT_COLOR_BIT or VK_IMAGE_ASPECT_PLANE_n_BITs "
+                            "set, where n = [0, 1, 2].",
+                            func_name);
+        }
+    }
+    return skip;
+}
+
+bool CoreChecks::ValidateImageSubresourceRange(const uint32_t image_mip_count, const uint32_t image_layer_count,
+                                               const VkImageSubresourceRange &subresourceRange, const char *cmd_name,
+                                               const char *param_name, const char *image_layer_count_var_name,
+                                               const uint64_t image_handle, SubresourceRangeErrorCodes errorCodes) const {
+    bool skip = false;
+
+    // Validate mip levels
+    if (subresourceRange.baseMipLevel >= image_mip_count) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, image_handle,
+                        errorCodes.base_mip_err,
+                        "%s: %s.baseMipLevel (= %" PRIu32
+                        ") is greater or equal to the mip level count of the image (i.e. greater or equal to %" PRIu32 ").",
+                        cmd_name, param_name, subresourceRange.baseMipLevel, image_mip_count);
+    }
+
+    if (subresourceRange.levelCount != VK_REMAINING_MIP_LEVELS) {
+        if (subresourceRange.levelCount == 0) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, image_handle,
+                            errorCodes.mip_count_err, "%s: %s.levelCount is 0.", cmd_name, param_name);
+        } else {
+            const uint64_t necessary_mip_count = uint64_t{subresourceRange.baseMipLevel} + uint64_t{subresourceRange.levelCount};
+
+            if (necessary_mip_count > image_mip_count) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, image_handle,
+                                errorCodes.mip_count_err,
+                                "%s: %s.baseMipLevel + .levelCount (= %" PRIu32 " + %" PRIu32 " = %" PRIu64
+                                ") is greater than the mip level count of the image (i.e. greater than %" PRIu32 ").",
+                                cmd_name, param_name, subresourceRange.baseMipLevel, subresourceRange.levelCount,
+                                necessary_mip_count, image_mip_count);
+            }
+        }
+    }
+
+    // Validate array layers
+    if (subresourceRange.baseArrayLayer >= image_layer_count) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, image_handle,
+                        errorCodes.base_layer_err,
+                        "%s: %s.baseArrayLayer (= %" PRIu32
+                        ") is greater or equal to the %s of the image when it was created (i.e. greater or equal to %" PRIu32 ").",
+                        cmd_name, param_name, subresourceRange.baseArrayLayer, image_layer_count_var_name, image_layer_count);
+    }
+
+    if (subresourceRange.layerCount != VK_REMAINING_ARRAY_LAYERS) {
+        if (subresourceRange.layerCount == 0) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, image_handle,
+                            errorCodes.layer_count_err, "%s: %s.layerCount is 0.", cmd_name, param_name);
+        } else {
+            const uint64_t necessary_layer_count =
+                uint64_t{subresourceRange.baseArrayLayer} + uint64_t{subresourceRange.layerCount};
+
+            if (necessary_layer_count > image_layer_count) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, image_handle,
+                                errorCodes.layer_count_err,
+                                "%s: %s.baseArrayLayer + .layerCount (= %" PRIu32 " + %" PRIu32 " = %" PRIu64
+                                ") is greater than the %s of the image when it was created (i.e. greater than %" PRIu32 ").",
+                                cmd_name, param_name, subresourceRange.baseArrayLayer, subresourceRange.layerCount,
+                                necessary_layer_count, image_layer_count_var_name, image_layer_count);
+            }
+        }
+    }
+
+    return skip;
+}
+
+bool CoreChecks::ValidateCreateImageViewSubresourceRange(const IMAGE_STATE *image_state, bool is_imageview_2d_type,
+                                                         const VkImageSubresourceRange &subresourceRange) const {
+    bool is_khr_maintenance1 = IsExtEnabled(device_extensions.vk_khr_maintenance1);
+    bool is_image_slicable = image_state->createInfo.imageType == VK_IMAGE_TYPE_3D &&
+                             (image_state->createInfo.flags & VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT_KHR);
+    bool is_3D_to_2D_map = is_khr_maintenance1 && is_image_slicable && is_imageview_2d_type;
+
+    const auto image_layer_count = is_3D_to_2D_map ? image_state->createInfo.extent.depth : image_state->createInfo.arrayLayers;
+    const auto image_layer_count_var_name = is_3D_to_2D_map ? "extent.depth" : "arrayLayers";
+
+    SubresourceRangeErrorCodes subresourceRangeErrorCodes = {};
+    subresourceRangeErrorCodes.base_mip_err = "VUID-VkImageViewCreateInfo-subresourceRange-01478";
+    subresourceRangeErrorCodes.mip_count_err = "VUID-VkImageViewCreateInfo-subresourceRange-01718";
+    subresourceRangeErrorCodes.base_layer_err = is_khr_maintenance1 ? (is_3D_to_2D_map ? "VUID-VkImageViewCreateInfo-image-02724"
+                                                                                       : "VUID-VkImageViewCreateInfo-image-01482")
+                                                                    : "VUID-VkImageViewCreateInfo-subresourceRange-01480";
+    subresourceRangeErrorCodes.layer_count_err = is_khr_maintenance1
+                                                     ? (is_3D_to_2D_map ? "VUID-VkImageViewCreateInfo-subresourceRange-02725"
+                                                                        : "VUID-VkImageViewCreateInfo-subresourceRange-01483")
+                                                     : "VUID-VkImageViewCreateInfo-subresourceRange-01719";
+
+    return ValidateImageSubresourceRange(image_state->createInfo.mipLevels, image_layer_count, subresourceRange,
+                                         "vkCreateImageView", "pCreateInfo->subresourceRange", image_layer_count_var_name,
+                                         HandleToUint64(image_state->image), subresourceRangeErrorCodes);
+}
+
+bool CoreChecks::ValidateCmdClearColorSubresourceRange(const IMAGE_STATE *image_state,
+                                                       const VkImageSubresourceRange &subresourceRange,
+                                                       const char *param_name) const {
+    SubresourceRangeErrorCodes subresourceRangeErrorCodes = {};
+    subresourceRangeErrorCodes.base_mip_err = "VUID-vkCmdClearColorImage-baseMipLevel-01470";
+    subresourceRangeErrorCodes.mip_count_err = "VUID-vkCmdClearColorImage-pRanges-01692";
+    subresourceRangeErrorCodes.base_layer_err = "VUID-vkCmdClearColorImage-baseArrayLayer-01472";
+    subresourceRangeErrorCodes.layer_count_err = "VUID-vkCmdClearColorImage-pRanges-01693";
+
+    return ValidateImageSubresourceRange(image_state->createInfo.mipLevels, image_state->createInfo.arrayLayers, subresourceRange,
+                                         "vkCmdClearColorImage", param_name, "arrayLayers", HandleToUint64(image_state->image),
+                                         subresourceRangeErrorCodes);
+}
+
+bool CoreChecks::ValidateCmdClearDepthSubresourceRange(const IMAGE_STATE *image_state,
+                                                       const VkImageSubresourceRange &subresourceRange,
+                                                       const char *param_name) const {
+    SubresourceRangeErrorCodes subresourceRangeErrorCodes = {};
+    subresourceRangeErrorCodes.base_mip_err = "VUID-vkCmdClearDepthStencilImage-baseMipLevel-01474";
+    subresourceRangeErrorCodes.mip_count_err = "VUID-vkCmdClearDepthStencilImage-pRanges-01694";
+    subresourceRangeErrorCodes.base_layer_err = "VUID-vkCmdClearDepthStencilImage-baseArrayLayer-01476";
+    subresourceRangeErrorCodes.layer_count_err = "VUID-vkCmdClearDepthStencilImage-pRanges-01695";
+
+    return ValidateImageSubresourceRange(image_state->createInfo.mipLevels, image_state->createInfo.arrayLayers, subresourceRange,
+                                         "vkCmdClearDepthStencilImage", param_name, "arrayLayers",
+                                         HandleToUint64(image_state->image), subresourceRangeErrorCodes);
+}
+
+bool CoreChecks::ValidateImageBarrierSubresourceRange(const IMAGE_STATE *image_state,
+                                                      const VkImageSubresourceRange &subresourceRange, const char *cmd_name,
+                                                      const char *param_name) const {
+    SubresourceRangeErrorCodes subresourceRangeErrorCodes = {};
+    subresourceRangeErrorCodes.base_mip_err = "VUID-VkImageMemoryBarrier-subresourceRange-01486";
+    subresourceRangeErrorCodes.mip_count_err = "VUID-VkImageMemoryBarrier-subresourceRange-01724";
+    subresourceRangeErrorCodes.base_layer_err = "VUID-VkImageMemoryBarrier-subresourceRange-01488";
+    subresourceRangeErrorCodes.layer_count_err = "VUID-VkImageMemoryBarrier-subresourceRange-01725";
+
+    return ValidateImageSubresourceRange(image_state->createInfo.mipLevels, image_state->createInfo.arrayLayers, subresourceRange,
+                                         cmd_name, param_name, "arrayLayers", HandleToUint64(image_state->image),
+                                         subresourceRangeErrorCodes);
+}
+
+bool CoreChecks::PreCallValidateCreateImageView(VkDevice device, const VkImageViewCreateInfo *pCreateInfo,
+                                                const VkAllocationCallbacks *pAllocator, VkImageView *pView) const {
+    bool skip = false;
+    const IMAGE_STATE *image_state = GetImageState(pCreateInfo->image);
+    if (image_state) {
+        skip |=
+            ValidateImageUsageFlags(image_state,
+                                    VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_STORAGE_BIT | VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT |
+                                        VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT |
+                                        VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV | VK_IMAGE_USAGE_FRAGMENT_DENSITY_MAP_BIT_EXT,
+                                    false, kVUIDUndefined, "vkCreateImageView()",
+                                    "VK_IMAGE_USAGE_[SAMPLED|STORAGE|COLOR_ATTACHMENT|DEPTH_STENCIL_ATTACHMENT|INPUT_ATTACHMENT|"
+                                    "SHADING_RATE_IMAGE|FRAGMENT_DENSITY_MAP]_BIT");
+        // If this isn't a sparse image, it needs to have memory backing it at CreateImageView time
+        skip |= ValidateMemoryIsBoundToImage(image_state, "vkCreateImageView()", "VUID-VkImageViewCreateInfo-image-01020");
+        // Checks imported from image layer
+        skip |= ValidateCreateImageViewSubresourceRange(
+            image_state, pCreateInfo->viewType == VK_IMAGE_VIEW_TYPE_2D || pCreateInfo->viewType == VK_IMAGE_VIEW_TYPE_2D_ARRAY,
+            pCreateInfo->subresourceRange);
+
+        VkImageCreateFlags image_flags = image_state->createInfo.flags;
+        VkFormat image_format = image_state->createInfo.format;
+        VkImageUsageFlags image_usage = image_state->createInfo.usage;
+        VkImageTiling image_tiling = image_state->createInfo.tiling;
+        VkFormat view_format = pCreateInfo->format;
+        VkImageAspectFlags aspect_mask = pCreateInfo->subresourceRange.aspectMask;
+        VkImageType image_type = image_state->createInfo.imageType;
+        VkImageViewType view_type = pCreateInfo->viewType;
+
+        // If there's a chained VkImageViewUsageCreateInfo struct, modify image_usage to match
+        auto chained_ivuci_struct = lvl_find_in_chain<VkImageViewUsageCreateInfoKHR>(pCreateInfo->pNext);
+        if (chained_ivuci_struct) {
+            image_usage = chained_ivuci_struct->usage;
+        }
+
+        // Validate VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT state, if view/image formats differ
+        if ((image_flags & VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT) && (image_format != view_format)) {
+            if (FormatIsMultiplane(image_format)) {
+                VkFormat compat_format = FindMultiplaneCompatibleFormat(image_format, aspect_mask);
+                if (view_format != compat_format) {
+                    // View format must match the multiplane compatible format
+                    std::stringstream ss;
+                    ss << "vkCreateImageView(): ImageView format " << string_VkFormat(view_format)
+                       << " is not compatible with plane " << GetPlaneIndex(aspect_mask) << " of underlying image format "
+                       << string_VkFormat(image_format) << ", must be " << string_VkFormat(compat_format) << ".";
+                    skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+                                    HandleToUint64(pCreateInfo->image), "VUID-VkImageViewCreateInfo-image-01586", "%s",
+                                    ss.str().c_str());
+                }
+            } else {
+                if ((!device_extensions.vk_khr_maintenance2 ||
+                     !(image_flags & VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT_KHR))) {
+                    // Format MUST be compatible (in the same format compatibility class) as the format the image was created with
+                    if (FormatCompatibilityClass(image_format) != FormatCompatibilityClass(view_format)) {
+                        std::stringstream ss;
+                        ss << "vkCreateImageView(): ImageView format " << string_VkFormat(view_format)
+                           << " is not in the same format compatibility class as "
+                           << report_data->FormatHandle(pCreateInfo->image).c_str() << "  format " << string_VkFormat(image_format)
+                           << ".  Images created with the VK_IMAGE_CREATE_MUTABLE_FORMAT BIT "
+                           << "can support ImageViews with differing formats but they must be in the same compatibility class.";
+                        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+                                        HandleToUint64(pCreateInfo->image), "VUID-VkImageViewCreateInfo-image-01018", "%s",
+                                        ss.str().c_str());
+                    }
+                }
+            }
+        } else {
+            // Format MUST be IDENTICAL to the format the image was created with
+            if (image_format != view_format) {
+                std::stringstream ss;
+                ss << "vkCreateImageView() format " << string_VkFormat(view_format) << " differs from "
+                   << report_data->FormatHandle(pCreateInfo->image).c_str() << " format " << string_VkFormat(image_format)
+                   << ".  Formats MUST be IDENTICAL unless VK_IMAGE_CREATE_MUTABLE_FORMAT BIT was set on image creation.";
+                skip |=
+                    log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+                            HandleToUint64(pCreateInfo->image), "VUID-VkImageViewCreateInfo-image-01019", "%s", ss.str().c_str());
+            }
+        }
+
+        // Validate correct image aspect bits for desired formats and format consistency
+        skip |= ValidateImageAspectMask(image_state->image, image_format, aspect_mask, "vkCreateImageView()");
+
+        switch (image_type) {
+            case VK_IMAGE_TYPE_1D:
+                if (view_type != VK_IMAGE_VIEW_TYPE_1D && view_type != VK_IMAGE_VIEW_TYPE_1D_ARRAY) {
+                    skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+                                    HandleToUint64(pCreateInfo->image), "VUID-VkImageViewCreateInfo-subResourceRange-01021",
+                                    "vkCreateImageView(): pCreateInfo->viewType %s is not compatible with image type %s.",
+                                    string_VkImageViewType(view_type), string_VkImageType(image_type));
+                }
+                break;
+            case VK_IMAGE_TYPE_2D:
+                if (view_type != VK_IMAGE_VIEW_TYPE_2D && view_type != VK_IMAGE_VIEW_TYPE_2D_ARRAY) {
+                    if ((view_type == VK_IMAGE_VIEW_TYPE_CUBE || view_type == VK_IMAGE_VIEW_TYPE_CUBE_ARRAY) &&
+                        !(image_flags & VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT)) {
+                        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+                                        HandleToUint64(pCreateInfo->image), "VUID-VkImageViewCreateInfo-image-01003",
+                                        "vkCreateImageView(): pCreateInfo->viewType %s is not compatible with image type %s.",
+                                        string_VkImageViewType(view_type), string_VkImageType(image_type));
+                    } else if (view_type != VK_IMAGE_VIEW_TYPE_CUBE && view_type != VK_IMAGE_VIEW_TYPE_CUBE_ARRAY) {
+                        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+                                        HandleToUint64(pCreateInfo->image), "VUID-VkImageViewCreateInfo-subResourceRange-01021",
+                                        "vkCreateImageView(): pCreateInfo->viewType %s is not compatible with image type %s.",
+                                        string_VkImageViewType(view_type), string_VkImageType(image_type));
+                    }
+                }
+                break;
+            case VK_IMAGE_TYPE_3D:
+                if (device_extensions.vk_khr_maintenance1) {
+                    if (view_type != VK_IMAGE_VIEW_TYPE_3D) {
+                        if ((view_type == VK_IMAGE_VIEW_TYPE_2D || view_type == VK_IMAGE_VIEW_TYPE_2D_ARRAY)) {
+                            if (!(image_flags & VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT_KHR)) {
+                                skip |=
+                                    log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+                                            HandleToUint64(pCreateInfo->image), "VUID-VkImageViewCreateInfo-image-01005",
+                                            "vkCreateImageView(): pCreateInfo->viewType %s is not compatible with image type %s.",
+                                            string_VkImageViewType(view_type), string_VkImageType(image_type));
+                            } else if ((image_flags & (VK_IMAGE_CREATE_SPARSE_BINDING_BIT | VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT |
+                                                       VK_IMAGE_CREATE_SPARSE_ALIASED_BIT))) {
+                                skip |=
+                                    log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+                                            HandleToUint64(pCreateInfo->image), "VUID-VkImageViewCreateInfo-subResourceRange-01021",
+                                            "vkCreateImageView(): pCreateInfo->viewType %s is not compatible with image type %s "
+                                            "when the VK_IMAGE_CREATE_SPARSE_BINDING_BIT, VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT, or "
+                                            "VK_IMAGE_CREATE_SPARSE_ALIASED_BIT flags are enabled.",
+                                            string_VkImageViewType(view_type), string_VkImageType(image_type));
+                            }
+                        } else {
+                            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+                                            HandleToUint64(pCreateInfo->image), "VUID-VkImageViewCreateInfo-subResourceRange-01021",
+                                            "vkCreateImageView(): pCreateInfo->viewType %s is not compatible with image type %s.",
+                                            string_VkImageViewType(view_type), string_VkImageType(image_type));
+                        }
+                    }
+                } else {
+                    if (view_type != VK_IMAGE_VIEW_TYPE_3D) {
+                        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+                                        HandleToUint64(pCreateInfo->image), "VUID-VkImageViewCreateInfo-subResourceRange-01021",
+                                        "vkCreateImageView(): pCreateInfo->viewType %s is not compatible with image type %s.",
+                                        string_VkImageViewType(view_type), string_VkImageType(image_type));
+                    }
+                }
+                break;
+            default:
+                break;
+        }
+
+        // External format checks needed when VK_ANDROID_external_memory_android_hardware_buffer enabled
+        if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
+            skip |= ValidateCreateImageViewANDROID(pCreateInfo);
+        }
+
+        VkFormatProperties format_properties = GetPDFormatProperties(view_format);
+        VkFormatFeatureFlags tiling_features = (image_tiling & VK_IMAGE_TILING_LINEAR) ? format_properties.linearTilingFeatures
+                                                                                       : format_properties.optimalTilingFeatures;
+
+        if (tiling_features == 0) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+                            HandleToUint64(pCreateInfo->image), "VUID-VkImageViewCreateInfo-None-02273",
+                            "vkCreateImageView(): pCreateInfo->format %s with tiling %s has no supported format features on this "
+                            "physical device.",
+                            string_VkFormat(view_format), string_VkImageTiling(image_tiling));
+        } else if ((image_usage & VK_IMAGE_USAGE_SAMPLED_BIT) && !(tiling_features & VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT)) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+                            HandleToUint64(pCreateInfo->image), "VUID-VkImageViewCreateInfo-usage-02274",
+                            "vkCreateImageView(): pCreateInfo->format %s with tiling %s does not support usage that includes "
+                            "VK_IMAGE_USAGE_SAMPLED_BIT.",
+                            string_VkFormat(view_format), string_VkImageTiling(image_tiling));
+        } else if ((image_usage & VK_IMAGE_USAGE_STORAGE_BIT) && !(tiling_features & VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT)) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+                            HandleToUint64(pCreateInfo->image), "VUID-VkImageViewCreateInfo-usage-02275",
+                            "vkCreateImageView(): pCreateInfo->format %s with tiling %s does not support usage that includes "
+                            "VK_IMAGE_USAGE_STORAGE_BIT.",
+                            string_VkFormat(view_format), string_VkImageTiling(image_tiling));
+        } else if ((image_usage & VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT) &&
+                   !(tiling_features & VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT)) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+                            HandleToUint64(pCreateInfo->image), "VUID-VkImageViewCreateInfo-usage-02276",
+                            "vkCreateImageView(): pCreateInfo->format %s with tiling %s does not support usage that includes "
+                            "VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT.",
+                            string_VkFormat(view_format), string_VkImageTiling(image_tiling));
+        } else if ((image_usage & VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT) &&
+                   !(tiling_features & VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT)) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+                            HandleToUint64(pCreateInfo->image), "VUID-VkImageViewCreateInfo-usage-02277",
+                            "vkCreateImageView(): pCreateInfo->format %s with tiling %s does not support usage that includes "
+                            "VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT.",
+                            string_VkFormat(view_format), string_VkImageTiling(image_tiling));
+        }
+
+        if (image_usage & VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV) {
+            if (view_type != VK_IMAGE_VIEW_TYPE_2D && view_type != VK_IMAGE_VIEW_TYPE_2D_ARRAY) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+                                HandleToUint64(pCreateInfo->image), "VUID-VkImageViewCreateInfo-image-02086",
+                                "vkCreateImageView() If image was created with usage containing "
+                                "VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV, viewType must be "
+                                "VK_IMAGE_VIEW_TYPE_2D or VK_IMAGE_VIEW_TYPE_2D_ARRAY.");
+            }
+            if (view_format != VK_FORMAT_R8_UINT) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+                                HandleToUint64(pCreateInfo->image), "VUID-VkImageViewCreateInfo-image-02087",
+                                "vkCreateImageView() If image was created with usage containing "
+                                "VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV, format must be VK_FORMAT_R8_UINT.");
+            }
+        }
+    }
+    return skip;
+}
+
+static inline bool ValidateCmdCopyBufferBounds(const debug_report_data *report_data, const BUFFER_STATE *src_buffer_state,
+                                               const BUFFER_STATE *dst_buffer_state, uint32_t regionCount,
+                                               const VkBufferCopy *pRegions) {
+    bool skip = false;
+
+    VkDeviceSize src_buffer_size = src_buffer_state->createInfo.size;
+    VkDeviceSize dst_buffer_size = dst_buffer_state->createInfo.size;
+    VkDeviceSize src_min = UINT64_MAX;
+    VkDeviceSize src_max = 0;
+    VkDeviceSize dst_min = UINT64_MAX;
+    VkDeviceSize dst_max = 0;
+
+    for (uint32_t i = 0; i < regionCount; i++) {
+        src_min = std::min(src_min, pRegions[i].srcOffset);
+        src_max = std::max(src_max, (pRegions[i].srcOffset + pRegions[i].size));
+        dst_min = std::min(dst_min, pRegions[i].dstOffset);
+        dst_max = std::max(dst_max, (pRegions[i].dstOffset + pRegions[i].size));
+
+        // The srcOffset member of each element of pRegions must be less than the size of srcBuffer
+        if (pRegions[i].srcOffset >= src_buffer_size) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT,
+                            HandleToUint64(src_buffer_state->buffer), "VUID-vkCmdCopyBuffer-srcOffset-00113",
+                            "vkCmdCopyBuffer(): pRegions[%d].srcOffset (%" PRIuLEAST64
+                            ") is greater than pRegions[%d].size (%" PRIuLEAST64 ").",
+                            i, pRegions[i].srcOffset, i, pRegions[i].size);
+        }
+
+        // The dstOffset member of each element of pRegions must be less than the size of dstBuffer
+        if (pRegions[i].dstOffset >= dst_buffer_size) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT,
+                            HandleToUint64(dst_buffer_state->buffer), "VUID-vkCmdCopyBuffer-dstOffset-00114",
+                            "vkCmdCopyBuffer(): pRegions[%d].dstOffset (%" PRIuLEAST64
+                            ") is greater than pRegions[%d].size (%" PRIuLEAST64 ").",
+                            i, pRegions[i].dstOffset, i, pRegions[i].size);
+        }
+
+        // The size member of each element of pRegions must be less than or equal to the size of srcBuffer minus srcOffset
+        if (pRegions[i].size > (src_buffer_size - pRegions[i].srcOffset)) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT,
+                            HandleToUint64(src_buffer_state->buffer), "VUID-vkCmdCopyBuffer-size-00115",
+                            "vkCmdCopyBuffer(): pRegions[%d].size (%" PRIuLEAST64
+                            ") is greater than the source buffer size (%" PRIuLEAST64
+                            ") minus pRegions[%d].srcOffset (%" PRIuLEAST64 ").",
+                            i, pRegions[i].size, src_buffer_size, i, pRegions[i].srcOffset);
+        }
+
+        // The size member of each element of pRegions must be less than or equal to the size of dstBuffer minus dstOffset
+        if (pRegions[i].size > (dst_buffer_size - pRegions[i].dstOffset)) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT,
+                            HandleToUint64(dst_buffer_state->buffer), "VUID-vkCmdCopyBuffer-size-00116",
+                            "vkCmdCopyBuffer(): pRegions[%d].size (%" PRIuLEAST64
+                            ") is greater than the destination buffer size (%" PRIuLEAST64
+                            ") minus pRegions[%d].dstOffset (%" PRIuLEAST64 ").",
+                            i, pRegions[i].size, dst_buffer_size, i, pRegions[i].dstOffset);
+        }
+    }
+
+    // The union of the source regions, and the union of the destination regions, must not overlap in memory
+    if (src_buffer_state->buffer == dst_buffer_state->buffer) {
+        if (((src_min > dst_min) && (src_min < dst_max)) || ((src_max > dst_min) && (src_max < dst_max))) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT,
+                            HandleToUint64(src_buffer_state->buffer), "VUID-vkCmdCopyBuffer-pRegions-00117",
+                            "vkCmdCopyBuffer(): Detected overlap between source and dest regions in memory.");
+        }
+    }
+
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateCmdCopyBuffer(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer,
+                                              uint32_t regionCount, const VkBufferCopy *pRegions) const {
+    const auto cb_node = GetCBState(commandBuffer);
+    const auto src_buffer_state = GetBufferState(srcBuffer);
+    const auto dst_buffer_state = GetBufferState(dstBuffer);
+
+    bool skip = false;
+    skip |= ValidateMemoryIsBoundToBuffer(src_buffer_state, "vkCmdCopyBuffer()", "VUID-vkCmdCopyBuffer-srcBuffer-00119");
+    skip |= ValidateMemoryIsBoundToBuffer(dst_buffer_state, "vkCmdCopyBuffer()", "VUID-vkCmdCopyBuffer-dstBuffer-00121");
+    // Validate that SRC & DST buffers have correct usage flags set
+    skip |=
+        ValidateBufferUsageFlags(src_buffer_state, VK_BUFFER_USAGE_TRANSFER_SRC_BIT, true, "VUID-vkCmdCopyBuffer-srcBuffer-00118",
+                                 "vkCmdCopyBuffer()", "VK_BUFFER_USAGE_TRANSFER_SRC_BIT");
+    skip |=
+        ValidateBufferUsageFlags(dst_buffer_state, VK_BUFFER_USAGE_TRANSFER_DST_BIT, true, "VUID-vkCmdCopyBuffer-dstBuffer-00120",
+                                 "vkCmdCopyBuffer()", "VK_BUFFER_USAGE_TRANSFER_DST_BIT");
+    skip |=
+        ValidateCmdQueueFlags(cb_node, "vkCmdCopyBuffer()", VK_QUEUE_TRANSFER_BIT | VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT,
+                              "VUID-vkCmdCopyBuffer-commandBuffer-cmdpool");
+    skip |= ValidateCmd(cb_node, CMD_COPYBUFFER, "vkCmdCopyBuffer()");
+    skip |= InsideRenderPass(cb_node, "vkCmdCopyBuffer()", "VUID-vkCmdCopyBuffer-renderpass");
+    skip |= ValidateCmdCopyBufferBounds(report_data, src_buffer_state, dst_buffer_state, regionCount, pRegions);
+    return skip;
+}
+
+bool CoreChecks::ValidateIdleBuffer(VkBuffer buffer) const {
+    bool skip = false;
+    auto buffer_state = GetBufferState(buffer);
+    if (!buffer_state) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, HandleToUint64(buffer),
+                        kVUID_Core_DrawState_DoubleDestroy, "Cannot free %s that has not been allocated.",
+                        report_data->FormatHandle(buffer).c_str());
+    } else {
+        if (buffer_state->in_use.load()) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT,
+                            HandleToUint64(buffer), "VUID-vkDestroyBuffer-buffer-00922",
+                            "Cannot free %s that is in use by a command buffer.", report_data->FormatHandle(buffer).c_str());
+        }
+    }
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateDestroyImageView(VkDevice device, VkImageView imageView,
+                                                 const VkAllocationCallbacks *pAllocator) const {
+    const IMAGE_VIEW_STATE *image_view_state = GetImageViewState(imageView);
+    const VulkanTypedHandle obj_struct(imageView, kVulkanObjectTypeImageView);
+
+    bool skip = false;
+    if (image_view_state) {
+        skip |=
+            ValidateObjectNotInUse(image_view_state, obj_struct, "vkDestroyImageView", "VUID-vkDestroyImageView-imageView-01026");
+    }
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateDestroyBuffer(VkDevice device, VkBuffer buffer, const VkAllocationCallbacks *pAllocator) const {
+    auto buffer_state = GetBufferState(buffer);
+
+    bool skip = false;
+    if (buffer_state) {
+        skip |= ValidateIdleBuffer(buffer);
+    }
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateDestroyBufferView(VkDevice device, VkBufferView bufferView,
+                                                  const VkAllocationCallbacks *pAllocator) const {
+    auto buffer_view_state = GetBufferViewState(bufferView);
+    const VulkanTypedHandle obj_struct(bufferView, kVulkanObjectTypeBufferView);
+    bool skip = false;
+    if (buffer_view_state) {
+        skip |= ValidateObjectNotInUse(buffer_view_state, obj_struct, "vkDestroyBufferView",
+                                       "VUID-vkDestroyBufferView-bufferView-00936");
+    }
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateCmdFillBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset,
+                                              VkDeviceSize size, uint32_t data) const {
+    auto cb_node = GetCBState(commandBuffer);
+    auto buffer_state = GetBufferState(dstBuffer);
+    bool skip = false;
+    skip |= ValidateMemoryIsBoundToBuffer(buffer_state, "vkCmdFillBuffer()", "VUID-vkCmdFillBuffer-dstBuffer-00031");
+    skip |=
+        ValidateCmdQueueFlags(cb_node, "vkCmdFillBuffer()", VK_QUEUE_TRANSFER_BIT | VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT,
+                              "VUID-vkCmdFillBuffer-commandBuffer-cmdpool");
+    skip |= ValidateCmd(cb_node, CMD_FILLBUFFER, "vkCmdFillBuffer()");
+    // Validate that DST buffer has correct usage flags set
+    skip |= ValidateBufferUsageFlags(buffer_state, VK_BUFFER_USAGE_TRANSFER_DST_BIT, true, "VUID-vkCmdFillBuffer-dstBuffer-00029",
+                                     "vkCmdFillBuffer()", "VK_BUFFER_USAGE_TRANSFER_DST_BIT");
+    skip |= InsideRenderPass(cb_node, "vkCmdFillBuffer()", "VUID-vkCmdFillBuffer-renderpass");
+
+    if (dstOffset >= buffer_state->createInfo.size) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT,
+                        HandleToUint64(dstBuffer), "VUID-vkCmdFillBuffer-dstOffset-00024",
+                        "vkCmdFillBuffer(): dstOffset (0x%" PRIxLEAST64
+                        ") is not less than destination buffer (%s) size (0x%" PRIxLEAST64 ").",
+                        dstOffset, report_data->FormatHandle(dstBuffer).c_str(), buffer_state->createInfo.size);
+    }
+
+    if ((size != VK_WHOLE_SIZE) && (size > (buffer_state->createInfo.size - dstOffset))) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT,
+                        HandleToUint64(dstBuffer), "VUID-vkCmdFillBuffer-size-00027",
+                        "vkCmdFillBuffer(): size (0x%" PRIxLEAST64 ") is greater than dstBuffer (%s) size (0x%" PRIxLEAST64
+                        ") minus dstOffset (0x%" PRIxLEAST64 ").",
+                        size, report_data->FormatHandle(dstBuffer).c_str(), buffer_state->createInfo.size, dstOffset);
+    }
+
+    return skip;
+}
+
+bool CoreChecks::ValidateBufferImageCopyData(uint32_t regionCount, const VkBufferImageCopy *pRegions,
+                                             const IMAGE_STATE *image_state, const char *function) const {
+    bool skip = false;
+
+    for (uint32_t i = 0; i < regionCount; i++) {
+        if (image_state->createInfo.imageType == VK_IMAGE_TYPE_1D) {
+            if ((pRegions[i].imageOffset.y != 0) || (pRegions[i].imageExtent.height != 1)) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+                                HandleToUint64(image_state->image), "VUID-VkBufferImageCopy-srcImage-00199",
+                                "%s(): pRegion[%d] imageOffset.y is %d and imageExtent.height is %d. For 1D images these must be 0 "
+                                "and 1, respectively.",
+                                function, i, pRegions[i].imageOffset.y, pRegions[i].imageExtent.height);
+            }
+        }
+
+        if ((image_state->createInfo.imageType == VK_IMAGE_TYPE_1D) || (image_state->createInfo.imageType == VK_IMAGE_TYPE_2D)) {
+            if ((pRegions[i].imageOffset.z != 0) || (pRegions[i].imageExtent.depth != 1)) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+                                HandleToUint64(image_state->image), "VUID-VkBufferImageCopy-srcImage-00201",
+                                "%s(): pRegion[%d] imageOffset.z is %d and imageExtent.depth is %d. For 1D and 2D images these "
+                                "must be 0 and 1, respectively.",
+                                function, i, pRegions[i].imageOffset.z, pRegions[i].imageExtent.depth);
+            }
+        }
+
+        if (image_state->createInfo.imageType == VK_IMAGE_TYPE_3D) {
+            if ((0 != pRegions[i].imageSubresource.baseArrayLayer) || (1 != pRegions[i].imageSubresource.layerCount)) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+                                HandleToUint64(image_state->image), "VUID-VkBufferImageCopy-baseArrayLayer-00213",
+                                "%s(): pRegion[%d] imageSubresource.baseArrayLayer is %d and imageSubresource.layerCount is %d. "
+                                "For 3D images these must be 0 and 1, respectively.",
+                                function, i, pRegions[i].imageSubresource.baseArrayLayer, pRegions[i].imageSubresource.layerCount);
+            }
+        }
+
+        // If the the calling command's VkImage parameter's format is not a depth/stencil format,
+        // then bufferOffset must be a multiple of the calling command's VkImage parameter's element size
+        uint32_t element_size = FormatElementSize(image_state->createInfo.format, pRegions[i].imageSubresource.aspectMask);
+
+        if (!FormatIsDepthAndStencil(image_state->createInfo.format) && SafeModulo(pRegions[i].bufferOffset, element_size) != 0) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+                            HandleToUint64(image_state->image), "VUID-VkBufferImageCopy-bufferOffset-00193",
+                            "%s(): pRegion[%d] bufferOffset 0x%" PRIxLEAST64
+                            " must be a multiple of this format's texel size (%" PRIu32 ").",
+                            function, i, pRegions[i].bufferOffset, element_size);
+        }
+
+        //  BufferOffset must be a multiple of 4
+        if (SafeModulo(pRegions[i].bufferOffset, 4) != 0) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+                            HandleToUint64(image_state->image), "VUID-VkBufferImageCopy-bufferOffset-00194",
+                            "%s(): pRegion[%d] bufferOffset 0x%" PRIxLEAST64 " must be a multiple of 4.", function, i,
+                            pRegions[i].bufferOffset);
+        }
+
+        //  BufferRowLength must be 0, or greater than or equal to the width member of imageExtent
+        if ((pRegions[i].bufferRowLength != 0) && (pRegions[i].bufferRowLength < pRegions[i].imageExtent.width)) {
+            skip |=
+                log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+                        HandleToUint64(image_state->image), "VUID-VkBufferImageCopy-bufferRowLength-00195",
+                        "%s(): pRegion[%d] bufferRowLength (%d) must be zero or greater-than-or-equal-to imageExtent.width (%d).",
+                        function, i, pRegions[i].bufferRowLength, pRegions[i].imageExtent.width);
+        }
+
+        //  BufferImageHeight must be 0, or greater than or equal to the height member of imageExtent
+        if ((pRegions[i].bufferImageHeight != 0) && (pRegions[i].bufferImageHeight < pRegions[i].imageExtent.height)) {
+            skip |= log_msg(
+                report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+                HandleToUint64(image_state->image), "VUID-VkBufferImageCopy-bufferImageHeight-00196",
+                "%s(): pRegion[%d] bufferImageHeight (%d) must be zero or greater-than-or-equal-to imageExtent.height (%d).",
+                function, i, pRegions[i].bufferImageHeight, pRegions[i].imageExtent.height);
+        }
+
+        // Calculate adjusted image extent, accounting for multiplane image factors
+        VkExtent3D adusted_image_extent = GetImageSubresourceExtent(image_state, &pRegions[i].imageSubresource);
+        // imageOffset.x and (imageExtent.width + imageOffset.x) must both be >= 0 and <= image subresource width
+        if ((pRegions[i].imageOffset.x < 0) || (pRegions[i].imageOffset.x > static_cast<int32_t>(adusted_image_extent.width)) ||
+            ((pRegions[i].imageOffset.x + pRegions[i].imageExtent.width) > static_cast<int32_t>(adusted_image_extent.width))) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+                            HandleToUint64(image_state->image), "VUID-VkBufferImageCopy-imageOffset-00197",
+                            "%s(): Both pRegion[%d] imageoffset.x (%d) and (imageExtent.width + imageOffset.x) (%d) must be >= "
+                            "zero or <= image subresource width (%d).",
+                            function, i, pRegions[i].imageOffset.x, (pRegions[i].imageOffset.x + pRegions[i].imageExtent.width),
+                            adusted_image_extent.width);
+        }
+
+        // imageOffset.y and (imageExtent.height + imageOffset.y) must both be >= 0 and <= image subresource height
+        if ((pRegions[i].imageOffset.y < 0) || (pRegions[i].imageOffset.y > static_cast<int32_t>(adusted_image_extent.height)) ||
+            ((pRegions[i].imageOffset.y + pRegions[i].imageExtent.height) > static_cast<int32_t>(adusted_image_extent.height))) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+                            HandleToUint64(image_state->image), "VUID-VkBufferImageCopy-imageOffset-00198",
+                            "%s(): Both pRegion[%d] imageoffset.y (%d) and (imageExtent.height + imageOffset.y) (%d) must be >= "
+                            "zero or <= image subresource height (%d).",
+                            function, i, pRegions[i].imageOffset.y, (pRegions[i].imageOffset.y + pRegions[i].imageExtent.height),
+                            adusted_image_extent.height);
+        }
+
+        // imageOffset.z and (imageExtent.depth + imageOffset.z) must both be >= 0 and <= image subresource depth
+        if ((pRegions[i].imageOffset.z < 0) || (pRegions[i].imageOffset.z > static_cast<int32_t>(adusted_image_extent.depth)) ||
+            ((pRegions[i].imageOffset.z + pRegions[i].imageExtent.depth) > static_cast<int32_t>(adusted_image_extent.depth))) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+                            HandleToUint64(image_state->image), "VUID-VkBufferImageCopy-imageOffset-00200",
+                            "%s(): Both pRegion[%d] imageoffset.z (%d) and (imageExtent.depth + imageOffset.z) (%d) must be >= "
+                            "zero or <= image subresource depth (%d).",
+                            function, i, pRegions[i].imageOffset.z, (pRegions[i].imageOffset.z + pRegions[i].imageExtent.depth),
+                            adusted_image_extent.depth);
+        }
+
+        // subresource aspectMask must have exactly 1 bit set
+        const int num_bits = sizeof(VkFlags) * CHAR_BIT;
+        std::bitset<num_bits> aspect_mask_bits(pRegions[i].imageSubresource.aspectMask);
+        if (aspect_mask_bits.count() != 1) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+                            HandleToUint64(image_state->image), "VUID-VkBufferImageCopy-aspectMask-00212",
+                            "%s: aspectMasks for imageSubresource in each region must have only a single bit set.", function);
+        }
+
+        // image subresource aspect bit must match format
+        if (!VerifyAspectsPresent(pRegions[i].imageSubresource.aspectMask, image_state->createInfo.format)) {
+            skip |= log_msg(
+                report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+                HandleToUint64(image_state->image), "VUID-VkBufferImageCopy-aspectMask-00211",
+                "%s(): pRegion[%d] subresource aspectMask 0x%x specifies aspects that are not present in image format 0x%x.",
+                function, i, pRegions[i].imageSubresource.aspectMask, image_state->createInfo.format);
+        }
+
+        // Checks that apply only to compressed images
+        if (FormatIsCompressed(image_state->createInfo.format) || FormatIsSinglePlane_422(image_state->createInfo.format)) {
+            auto block_size = FormatTexelBlockExtent(image_state->createInfo.format);
+
+            //  BufferRowLength must be a multiple of block width
+            if (SafeModulo(pRegions[i].bufferRowLength, block_size.width) != 0) {
+                skip |= log_msg(
+                    report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+                    HandleToUint64(image_state->image), "VUID-VkBufferImageCopy-bufferRowLength-00203",
+                    "%s(): pRegion[%d] bufferRowLength (%d) must be a multiple of the compressed image's texel width (%d)..",
+                    function, i, pRegions[i].bufferRowLength, block_size.width);
+            }
+
+            //  BufferRowHeight must be a multiple of block height
+            if (SafeModulo(pRegions[i].bufferImageHeight, block_size.height) != 0) {
+                skip |= log_msg(
+                    report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+                    HandleToUint64(image_state->image), "VUID-VkBufferImageCopy-bufferImageHeight-00204",
+                    "%s(): pRegion[%d] bufferImageHeight (%d) must be a multiple of the compressed image's texel height (%d)..",
+                    function, i, pRegions[i].bufferImageHeight, block_size.height);
+            }
+
+            //  image offsets must be multiples of block dimensions
+            if ((SafeModulo(pRegions[i].imageOffset.x, block_size.width) != 0) ||
+                (SafeModulo(pRegions[i].imageOffset.y, block_size.height) != 0) ||
+                (SafeModulo(pRegions[i].imageOffset.z, block_size.depth) != 0)) {
+                skip |=
+                    log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+                            HandleToUint64(image_state->image), "VUID-VkBufferImageCopy-imageOffset-00205",
+                            "%s(): pRegion[%d] imageOffset(x,y) (%d, %d) must be multiples of the compressed image's texel "
+                            "width & height (%d, %d)..",
+                            function, i, pRegions[i].imageOffset.x, pRegions[i].imageOffset.y, block_size.width, block_size.height);
+            }
+
+            // bufferOffset must be a multiple of block size (linear bytes)
+            uint32_t block_size_in_bytes = FormatElementSize(image_state->createInfo.format);
+            if (SafeModulo(pRegions[i].bufferOffset, block_size_in_bytes) != 0) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+                                HandleToUint64(image_state->image), "VUID-VkBufferImageCopy-bufferOffset-00206",
+                                "%s(): pRegion[%d] bufferOffset (0x%" PRIxLEAST64
+                                ") must be a multiple of the compressed image's texel block size (%" PRIu32 ")..",
+                                function, i, pRegions[i].bufferOffset, block_size_in_bytes);
+            }
+
+            // imageExtent width must be a multiple of block width, or extent+offset width must equal subresource width
+            VkExtent3D mip_extent = GetImageSubresourceExtent(image_state, &(pRegions[i].imageSubresource));
+            if ((SafeModulo(pRegions[i].imageExtent.width, block_size.width) != 0) &&
+                (pRegions[i].imageExtent.width + pRegions[i].imageOffset.x != mip_extent.width)) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+                                HandleToUint64(image_state->image), "VUID-VkBufferImageCopy-imageExtent-00207",
+                                "%s(): pRegion[%d] extent width (%d) must be a multiple of the compressed texture block width "
+                                "(%d), or when added to offset.x (%d) must equal the image subresource width (%d)..",
+                                function, i, pRegions[i].imageExtent.width, block_size.width, pRegions[i].imageOffset.x,
+                                mip_extent.width);
+            }
+
+            // imageExtent height must be a multiple of block height, or extent+offset height must equal subresource height
+            if ((SafeModulo(pRegions[i].imageExtent.height, block_size.height) != 0) &&
+                (pRegions[i].imageExtent.height + pRegions[i].imageOffset.y != mip_extent.height)) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+                                HandleToUint64(image_state->image), "VUID-VkBufferImageCopy-imageExtent-00208",
+                                "%s(): pRegion[%d] extent height (%d) must be a multiple of the compressed texture block height "
+                                "(%d), or when added to offset.y (%d) must equal the image subresource height (%d)..",
+                                function, i, pRegions[i].imageExtent.height, block_size.height, pRegions[i].imageOffset.y,
+                                mip_extent.height);
+            }
+
+            // imageExtent depth must be a multiple of block depth, or extent+offset depth must equal subresource depth
+            if ((SafeModulo(pRegions[i].imageExtent.depth, block_size.depth) != 0) &&
+                (pRegions[i].imageExtent.depth + pRegions[i].imageOffset.z != mip_extent.depth)) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+                                HandleToUint64(image_state->image), "VUID-VkBufferImageCopy-imageExtent-00209",
+                                "%s(): pRegion[%d] extent width (%d) must be a multiple of the compressed texture block depth "
+                                "(%d), or when added to offset.z (%d) must equal the image subresource depth (%d)..",
+                                function, i, pRegions[i].imageExtent.depth, block_size.depth, pRegions[i].imageOffset.z,
+                                mip_extent.depth);
+            }
+        }
+    }
+
+    return skip;
+}
+
+static bool ValidateImageBounds(const debug_report_data *report_data, const IMAGE_STATE *image_state, const uint32_t regionCount,
+                                const VkBufferImageCopy *pRegions, const char *func_name, const char *msg_code) {
+    bool skip = false;
+    const VkImageCreateInfo *image_info = &(image_state->createInfo);
+
+    for (uint32_t i = 0; i < regionCount; i++) {
+        VkExtent3D extent = pRegions[i].imageExtent;
+        VkOffset3D offset = pRegions[i].imageOffset;
+
+        if (IsExtentSizeZero(&extent))  // Warn on zero area subresource
+        {
+            skip |=
+                log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, (uint64_t)0,
+                        kVUID_Core_Image_ZeroAreaSubregion, "%s: pRegion[%d] imageExtent of {%1d, %1d, %1d} has zero area",
+                        func_name, i, extent.width, extent.height, extent.depth);
+        }
+
+        VkExtent3D image_extent = GetImageSubresourceExtent(image_state, &(pRegions[i].imageSubresource));
+
+        // If we're using a compressed format, valid extent is rounded up to multiple of block size (per 18.1)
+        if (FormatIsCompressed(image_info->format) || FormatIsSinglePlane_422(image_state->createInfo.format)) {
+            auto block_extent = FormatTexelBlockExtent(image_info->format);
+            if (image_extent.width % block_extent.width) {
+                image_extent.width += (block_extent.width - (image_extent.width % block_extent.width));
+            }
+            if (image_extent.height % block_extent.height) {
+                image_extent.height += (block_extent.height - (image_extent.height % block_extent.height));
+            }
+            if (image_extent.depth % block_extent.depth) {
+                image_extent.depth += (block_extent.depth - (image_extent.depth % block_extent.depth));
+            }
+        }
+
+        if (0 != ExceedsBounds(&offset, &extent, &image_extent)) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, (uint64_t)0,
+                            msg_code, "%s: pRegion[%d] exceeds image bounds..", func_name, i);
+        }
+    }
+
+    return skip;
+}
+
+static inline bool ValidateBufferBounds(const debug_report_data *report_data, const IMAGE_STATE *image_state,
+                                        const BUFFER_STATE *buff_state, uint32_t regionCount, const VkBufferImageCopy *pRegions,
+                                        const char *func_name, const char *msg_code) {
+    bool skip = false;
+
+    VkDeviceSize buffer_size = buff_state->createInfo.size;
+
+    for (uint32_t i = 0; i < regionCount; i++) {
+        VkExtent3D copy_extent = pRegions[i].imageExtent;
+
+        VkDeviceSize buffer_width = (0 == pRegions[i].bufferRowLength ? copy_extent.width : pRegions[i].bufferRowLength);
+        VkDeviceSize buffer_height = (0 == pRegions[i].bufferImageHeight ? copy_extent.height : pRegions[i].bufferImageHeight);
+        VkDeviceSize unit_size = FormatElementSize(image_state->createInfo.format,
+                                                   pRegions[i].imageSubresource.aspectMask);  // size (bytes) of texel or block
+
+        if (FormatIsCompressed(image_state->createInfo.format) || FormatIsSinglePlane_422(image_state->createInfo.format)) {
+            // Switch to texel block units, rounding up for any partially-used blocks
+            auto block_dim = FormatTexelBlockExtent(image_state->createInfo.format);
+            buffer_width = (buffer_width + block_dim.width - 1) / block_dim.width;
+            buffer_height = (buffer_height + block_dim.height - 1) / block_dim.height;
+
+            copy_extent.width = (copy_extent.width + block_dim.width - 1) / block_dim.width;
+            copy_extent.height = (copy_extent.height + block_dim.height - 1) / block_dim.height;
+            copy_extent.depth = (copy_extent.depth + block_dim.depth - 1) / block_dim.depth;
+        }
+
+        // Either depth or layerCount may be greater than 1 (not both). This is the number of 'slices' to copy
+        uint32_t z_copies = std::max(copy_extent.depth, pRegions[i].imageSubresource.layerCount);
+        if (IsExtentSizeZero(&copy_extent) || (0 == z_copies)) {
+            // TODO: Issue warning here? Already warned in ValidateImageBounds()...
+        } else {
+            // Calculate buffer offset of final copied byte, + 1.
+            VkDeviceSize max_buffer_offset = (z_copies - 1) * buffer_height * buffer_width;      // offset to slice
+            max_buffer_offset += ((copy_extent.height - 1) * buffer_width) + copy_extent.width;  // add row,col
+            max_buffer_offset *= unit_size;                                                      // convert to bytes
+            max_buffer_offset += pRegions[i].bufferOffset;                                       // add initial offset (bytes)
+
+            if (buffer_size < max_buffer_offset) {
+                skip |=
+                    log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, (uint64_t)0,
+                            msg_code, "%s: pRegion[%d] exceeds buffer size of %" PRIu64 " bytes..", func_name, i, buffer_size);
+            }
+        }
+    }
+
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
+                                                     VkBuffer dstBuffer, uint32_t regionCount,
+                                                     const VkBufferImageCopy *pRegions) const {
+    const auto cb_node = GetCBState(commandBuffer);
+    const auto src_image_state = GetImageState(srcImage);
+    const auto dst_buffer_state = GetBufferState(dstBuffer);
+
+    bool skip = ValidateBufferImageCopyData(regionCount, pRegions, src_image_state, "vkCmdCopyImageToBuffer");
+
+    // Validate command buffer state
+    skip |= ValidateCmd(cb_node, CMD_COPYIMAGETOBUFFER, "vkCmdCopyImageToBuffer()");
+
+    // Command pool must support graphics, compute, or transfer operations
+    const auto pPool = cb_node->command_pool.get();
+
+    VkQueueFlags queue_flags = GetPhysicalDeviceState()->queue_family_properties[pPool->queueFamilyIndex].queueFlags;
+
+    if (0 == (queue_flags & (VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT | VK_QUEUE_TRANSFER_BIT))) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                        HandleToUint64(cb_node->createInfo.commandPool), "VUID-vkCmdCopyImageToBuffer-commandBuffer-cmdpool",
+                        "Cannot call vkCmdCopyImageToBuffer() on a command buffer allocated from a pool without graphics, compute, "
+                        "or transfer capabilities..");
+    }
+    skip |= ValidateImageBounds(report_data, src_image_state, regionCount, pRegions, "vkCmdCopyImageToBuffer()",
+                                "VUID-vkCmdCopyImageToBuffer-pRegions-00182");
+    skip |= ValidateBufferBounds(report_data, src_image_state, dst_buffer_state, regionCount, pRegions, "vkCmdCopyImageToBuffer()",
+                                 "VUID-vkCmdCopyImageToBuffer-pRegions-00183");
+
+    skip |= ValidateImageSampleCount(src_image_state, VK_SAMPLE_COUNT_1_BIT, "vkCmdCopyImageToBuffer(): srcImage",
+                                     "VUID-vkCmdCopyImageToBuffer-srcImage-00188");
+    skip |= ValidateMemoryIsBoundToImage(src_image_state, "vkCmdCopyImageToBuffer()", "VUID-vkCmdCopyImageToBuffer-srcImage-00187");
+    skip |=
+        ValidateMemoryIsBoundToBuffer(dst_buffer_state, "vkCmdCopyImageToBuffer()", "VUID-vkCmdCopyImageToBuffer-dstBuffer-00192");
+
+    // Validate that SRC image & DST buffer have correct usage flags set
+    skip |= ValidateImageUsageFlags(src_image_state, VK_IMAGE_USAGE_TRANSFER_SRC_BIT, true,
+                                    "VUID-vkCmdCopyImageToBuffer-srcImage-00186", "vkCmdCopyImageToBuffer()",
+                                    "VK_IMAGE_USAGE_TRANSFER_SRC_BIT");
+    skip |= ValidateBufferUsageFlags(dst_buffer_state, VK_BUFFER_USAGE_TRANSFER_DST_BIT, true,
+                                     "VUID-vkCmdCopyImageToBuffer-dstBuffer-00191", "vkCmdCopyImageToBuffer()",
+                                     "VK_BUFFER_USAGE_TRANSFER_DST_BIT");
+    if (api_version >= VK_API_VERSION_1_1 || device_extensions.vk_khr_maintenance1) {
+        skip |= ValidateImageFormatFeatureFlags(src_image_state, VK_FORMAT_FEATURE_TRANSFER_SRC_BIT, "vkCmdCopyImageToBuffer()",
+                                                "VUID-vkCmdCopyImageToBuffer-srcImage-01998",
+                                                "VUID-vkCmdCopyImageToBuffer-srcImage-01998");
+    }
+    skip |= InsideRenderPass(cb_node, "vkCmdCopyImageToBuffer()", "VUID-vkCmdCopyImageToBuffer-renderpass");
+    bool hit_error = false;
+    const char *src_invalid_layout_vuid = (src_image_state->shared_presentable && device_extensions.vk_khr_shared_presentable_image)
+                                              ? "VUID-vkCmdCopyImageToBuffer-srcImageLayout-01397"
+                                              : "VUID-vkCmdCopyImageToBuffer-srcImageLayout-00190";
+    for (uint32_t i = 0; i < regionCount; ++i) {
+        skip |= ValidateImageSubresourceLayers(cb_node, &pRegions[i].imageSubresource, "vkCmdCopyImageToBuffer()",
+                                               "imageSubresource", i);
+        skip |= VerifyImageLayout(cb_node, src_image_state, pRegions[i].imageSubresource, srcImageLayout,
+                                  VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, "vkCmdCopyImageToBuffer()", src_invalid_layout_vuid,
+                                  "VUID-vkCmdCopyImageToBuffer-srcImageLayout-00189", &hit_error);
+        skip |= ValidateCopyBufferImageTransferGranularityRequirements(
+            cb_node, src_image_state, &pRegions[i], i, "vkCmdCopyImageToBuffer()", "VUID-vkCmdCopyImageToBuffer-imageOffset-01794");
+        skip |=
+            ValidateImageMipLevel(cb_node, src_image_state, pRegions[i].imageSubresource.mipLevel, i, "vkCmdCopyImageToBuffer()",
+                                  "imageSubresource", "VUID-vkCmdCopyImageToBuffer-imageSubresource-01703");
+        skip |= ValidateImageArrayLayerRange(cb_node, src_image_state, pRegions[i].imageSubresource.baseArrayLayer,
+                                             pRegions[i].imageSubresource.layerCount, i, "vkCmdCopyImageToBuffer()",
+                                             "imageSubresource", "VUID-vkCmdCopyImageToBuffer-imageSubresource-01704");
+    }
+    return skip;
+}
+
+void CoreChecks::PreCallRecordCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
+                                                   VkBuffer dstBuffer, uint32_t regionCount, const VkBufferImageCopy *pRegions) {
+    StateTracker::PreCallRecordCmdCopyImageToBuffer(commandBuffer, srcImage, srcImageLayout, dstBuffer, regionCount, pRegions);
+
+    auto cb_node = GetCBState(commandBuffer);
+    auto src_image_state = GetImageState(srcImage);
+    // Make sure that all image slices record referenced layout
+    for (uint32_t i = 0; i < regionCount; ++i) {
+        SetImageInitialLayout(cb_node, *src_image_state, pRegions[i].imageSubresource, srcImageLayout);
+    }
+}
+
+bool CoreChecks::PreCallValidateCmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage,
+                                                     VkImageLayout dstImageLayout, uint32_t regionCount,
+                                                     const VkBufferImageCopy *pRegions) const {
+    const auto cb_node = GetCBState(commandBuffer);
+    const auto src_buffer_state = GetBufferState(srcBuffer);
+    const auto dst_image_state = GetImageState(dstImage);
+
+    bool skip = ValidateBufferImageCopyData(regionCount, pRegions, dst_image_state, "vkCmdCopyBufferToImage");
+
+    // Validate command buffer state
+    skip |= ValidateCmd(cb_node, CMD_COPYBUFFERTOIMAGE, "vkCmdCopyBufferToImage()");
+
+    // Command pool must support graphics, compute, or transfer operations
+    const auto pPool = cb_node->command_pool.get();
+    VkQueueFlags queue_flags = GetPhysicalDeviceState()->queue_family_properties[pPool->queueFamilyIndex].queueFlags;
+    if (0 == (queue_flags & (VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT | VK_QUEUE_TRANSFER_BIT))) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                        HandleToUint64(cb_node->createInfo.commandPool), "VUID-vkCmdCopyBufferToImage-commandBuffer-cmdpool",
+                        "Cannot call vkCmdCopyBufferToImage() on a command buffer allocated from a pool without graphics, compute, "
+                        "or transfer capabilities..");
+    }
+    skip |= ValidateImageBounds(report_data, dst_image_state, regionCount, pRegions, "vkCmdCopyBufferToImage()",
+                                "VUID-vkCmdCopyBufferToImage-pRegions-00172");
+    skip |= ValidateBufferBounds(report_data, dst_image_state, src_buffer_state, regionCount, pRegions, "vkCmdCopyBufferToImage()",
+                                 "VUID-vkCmdCopyBufferToImage-pRegions-00171");
+    skip |= ValidateImageSampleCount(dst_image_state, VK_SAMPLE_COUNT_1_BIT, "vkCmdCopyBufferToImage(): dstImage",
+                                     "VUID-vkCmdCopyBufferToImage-dstImage-00179");
+    skip |=
+        ValidateMemoryIsBoundToBuffer(src_buffer_state, "vkCmdCopyBufferToImage()", "VUID-vkCmdCopyBufferToImage-srcBuffer-00176");
+    skip |= ValidateMemoryIsBoundToImage(dst_image_state, "vkCmdCopyBufferToImage()", "VUID-vkCmdCopyBufferToImage-dstImage-00178");
+    skip |= ValidateBufferUsageFlags(src_buffer_state, VK_BUFFER_USAGE_TRANSFER_SRC_BIT, true,
+                                     "VUID-vkCmdCopyBufferToImage-srcBuffer-00174", "vkCmdCopyBufferToImage()",
+                                     "VK_BUFFER_USAGE_TRANSFER_SRC_BIT");
+    skip |= ValidateImageUsageFlags(dst_image_state, VK_IMAGE_USAGE_TRANSFER_DST_BIT, true,
+                                    "VUID-vkCmdCopyBufferToImage-dstImage-00177", "vkCmdCopyBufferToImage()",
+                                    "VK_IMAGE_USAGE_TRANSFER_DST_BIT");
+    if (api_version >= VK_API_VERSION_1_1 || device_extensions.vk_khr_maintenance1) {
+        skip |= ValidateImageFormatFeatureFlags(dst_image_state, VK_FORMAT_FEATURE_TRANSFER_DST_BIT, "vkCmdCopyBufferToImage()",
+                                                "VUID-vkCmdCopyBufferToImage-dstImage-01997",
+                                                "VUID-vkCmdCopyBufferToImage-dstImage-01997");
+    }
+    skip |= InsideRenderPass(cb_node, "vkCmdCopyBufferToImage()", "VUID-vkCmdCopyBufferToImage-renderpass");
+    bool hit_error = false;
+    const char *dst_invalid_layout_vuid = (dst_image_state->shared_presentable && device_extensions.vk_khr_shared_presentable_image)
+                                              ? "VUID-vkCmdCopyBufferToImage-dstImageLayout-01396"
+                                              : "VUID-vkCmdCopyBufferToImage-dstImageLayout-00181";
+    for (uint32_t i = 0; i < regionCount; ++i) {
+        skip |= ValidateImageSubresourceLayers(cb_node, &pRegions[i].imageSubresource, "vkCmdCopyBufferToImage()",
+                                               "imageSubresource", i);
+        skip |= VerifyImageLayout(cb_node, dst_image_state, pRegions[i].imageSubresource, dstImageLayout,
+                                  VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, "vkCmdCopyBufferToImage()", dst_invalid_layout_vuid,
+                                  "VUID-vkCmdCopyBufferToImage-dstImageLayout-00180", &hit_error);
+        skip |= ValidateCopyBufferImageTransferGranularityRequirements(
+            cb_node, dst_image_state, &pRegions[i], i, "vkCmdCopyBufferToImage()", "VUID-vkCmdCopyBufferToImage-imageOffset-01793");
+        skip |=
+            ValidateImageMipLevel(cb_node, dst_image_state, pRegions[i].imageSubresource.mipLevel, i, "vkCmdCopyBufferToImage()",
+                                  "imageSubresource", "VUID-vkCmdCopyBufferToImage-imageSubresource-01701");
+        skip |= ValidateImageArrayLayerRange(cb_node, dst_image_state, pRegions[i].imageSubresource.baseArrayLayer,
+                                             pRegions[i].imageSubresource.layerCount, i, "vkCmdCopyBufferToImage()",
+                                             "imageSubresource", "VUID-vkCmdCopyBufferToImage-imageSubresource-01702");
+    }
+    return skip;
+}
+
+void CoreChecks::PreCallRecordCmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage,
+                                                   VkImageLayout dstImageLayout, uint32_t regionCount,
+                                                   const VkBufferImageCopy *pRegions) {
+    StateTracker::PreCallRecordCmdCopyBufferToImage(commandBuffer, srcBuffer, dstImage, dstImageLayout, regionCount, pRegions);
+
+    auto cb_node = GetCBState(commandBuffer);
+    auto dst_image_state = GetImageState(dstImage);
+    // Make sure that all image slices are record referenced layout
+    for (uint32_t i = 0; i < regionCount; ++i) {
+        SetImageInitialLayout(cb_node, *dst_image_state, pRegions[i].imageSubresource, dstImageLayout);
+    }
+}
+
+bool CoreChecks::PreCallValidateGetImageSubresourceLayout(VkDevice device, VkImage image, const VkImageSubresource *pSubresource,
+                                                          VkSubresourceLayout *pLayout) const {
+    bool skip = false;
+    const VkImageAspectFlags sub_aspect = pSubresource->aspectMask;
+
+    // The aspectMask member of pSubresource must only have a single bit set
+    const int num_bits = sizeof(sub_aspect) * CHAR_BIT;
+    std::bitset<num_bits> aspect_mask_bits(sub_aspect);
+    if (aspect_mask_bits.count() != 1) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, HandleToUint64(image),
+                        "VUID-vkGetImageSubresourceLayout-aspectMask-00997",
+                        "vkGetImageSubresourceLayout(): VkImageSubresource.aspectMask must have exactly 1 bit set.");
+    }
+
+    const IMAGE_STATE *image_entry = GetImageState(image);
+    if (!image_entry) {
+        return skip;
+    }
+
+    // Image must have been created with tiling equal to VK_IMAGE_TILING_LINEAR
+    if (device_extensions.vk_ext_image_drm_format_modifier) {
+        if ((image_entry->createInfo.tiling != VK_IMAGE_TILING_LINEAR) &&
+            (image_entry->createInfo.tiling != VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT)) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+                            HandleToUint64(image), "VUID-vkGetImageSubresourceLayout-image-02270",
+                            "vkGetImageSubresourceLayout(): Image must have tiling of VK_IMAGE_TILING_LINEAR or "
+                            "VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT.");
+        }
+    } else {
+        if (image_entry->createInfo.tiling != VK_IMAGE_TILING_LINEAR) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+                            HandleToUint64(image), "VUID-vkGetImageSubresourceLayout-image-00996",
+                            "vkGetImageSubresourceLayout(): Image must have tiling of VK_IMAGE_TILING_LINEAR.");
+        }
+    }
+
+    // mipLevel must be less than the mipLevels specified in VkImageCreateInfo when the image was created
+    if (pSubresource->mipLevel >= image_entry->createInfo.mipLevels) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, HandleToUint64(image),
+                        "VUID-vkGetImageSubresourceLayout-mipLevel-01716",
+                        "vkGetImageSubresourceLayout(): pSubresource.mipLevel (%d) must be less than %d.", pSubresource->mipLevel,
+                        image_entry->createInfo.mipLevels);
+    }
+
+    // arrayLayer must be less than the arrayLayers specified in VkImageCreateInfo when the image was created
+    if (pSubresource->arrayLayer >= image_entry->createInfo.arrayLayers) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, HandleToUint64(image),
+                        "VUID-vkGetImageSubresourceLayout-arrayLayer-01717",
+                        "vkGetImageSubresourceLayout(): pSubresource.arrayLayer (%d) must be less than %d.",
+                        pSubresource->arrayLayer, image_entry->createInfo.arrayLayers);
+    }
+
+    // subresource's aspect must be compatible with image's format.
+    const VkFormat img_format = image_entry->createInfo.format;
+    if (image_entry->createInfo.tiling == VK_IMAGE_TILING_LINEAR) {
+        if (FormatIsMultiplane(img_format)) {
+            VkImageAspectFlags allowed_flags = (VK_IMAGE_ASPECT_PLANE_0_BIT_KHR | VK_IMAGE_ASPECT_PLANE_1_BIT_KHR);
+            const char *vuid = "VUID-vkGetImageSubresourceLayout-format-01581";  // 2-plane version
+            if (FormatPlaneCount(img_format) > 2u) {
+                allowed_flags |= VK_IMAGE_ASPECT_PLANE_2_BIT_KHR;
+                vuid = "VUID-vkGetImageSubresourceLayout-format-01582";  // 3-plane version
+            }
+            if (sub_aspect != (sub_aspect & allowed_flags)) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+                                HandleToUint64(image), vuid,
+                                "vkGetImageSubresourceLayout(): For multi-planar images, VkImageSubresource.aspectMask (0x%" PRIx32
+                                ") must be a single-plane specifier flag.",
+                                sub_aspect);
+            }
+        } else if (FormatIsColor(img_format)) {
+            if (sub_aspect != VK_IMAGE_ASPECT_COLOR_BIT) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+                                HandleToUint64(image), kVUID_Core_DrawState_InvalidImageAspect,
+                                "vkGetImageSubresourceLayout(): For color formats, VkImageSubresource.aspectMask must be "
+                                "VK_IMAGE_ASPECT_COLOR.");
+            }
+        } else if (FormatIsDepthOrStencil(img_format)) {
+            if ((sub_aspect != VK_IMAGE_ASPECT_DEPTH_BIT) && (sub_aspect != VK_IMAGE_ASPECT_STENCIL_BIT)) {
+            }
+        }
+    } else if (image_entry->createInfo.tiling == VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT) {
+        if ((sub_aspect != VK_IMAGE_ASPECT_MEMORY_PLANE_0_BIT_EXT) && (sub_aspect != VK_IMAGE_ASPECT_MEMORY_PLANE_1_BIT_EXT) &&
+            (sub_aspect != VK_IMAGE_ASPECT_MEMORY_PLANE_2_BIT_EXT) && (sub_aspect != VK_IMAGE_ASPECT_MEMORY_PLANE_3_BIT_EXT)) {
+            // TODO: This VU also needs to ensure that the DRM index is in range and valid.
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+                            HandleToUint64(image), "VUID-vkGetImageSubresourceLayout-tiling-02271",
+                            "vkGetImageSubresourceLayout(): VkImageSubresource.aspectMask must be "
+                            "VK_IMAGE_ASPECT_MEMORY_PLANE_i_BIT_EXT.");
+        }
+    }
+
+    if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
+        skip |= ValidateGetImageSubresourceLayoutANDROID(image);
+    }
+
+    return skip;
+}
diff --git a/src/third_party/vulkan-validation-layers/src/layers/buffer_validation.h b/src/third_party/vulkan-validation-layers/src/layers/buffer_validation.h
new file mode 100644
index 0000000..a4cf548
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/layers/buffer_validation.h
@@ -0,0 +1,42 @@
+/* Copyright (c) 2015-2019 The Khronos Group Inc.
+ * Copyright (c) 2015-2019 Valve Corporation
+ * Copyright (c) 2015-2019 LunarG, Inc.
+ * Copyright (C) 2015-2019 Google Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Mark Lobodzinski <mark@lunarg.com>
+ * Dave Houlton <daveh@lunarg.com>
+ */
+#ifndef CORE_VALIDATION_BUFFER_VALIDATION_H_
+#define CORE_VALIDATION_BUFFER_VALIDATION_H_
+
+#include "vulkan/vk_layer.h"
+#include <limits.h>
+#include <memory>
+#include <unordered_map>
+#include <vector>
+#include <utility>
+#include <algorithm>
+#include <bitset>
+
+uint32_t FullMipChainLevels(uint32_t height, uint32_t width = 1, uint32_t depth = 1);
+uint32_t FullMipChainLevels(VkExtent3D);
+uint32_t FullMipChainLevels(VkExtent2D);
+
+uint32_t ResolveRemainingLevels(const VkImageSubresourceRange *range, uint32_t mip_levels);
+
+uint32_t ResolveRemainingLayers(const VkImageSubresourceRange *range, uint32_t layers);
+VkImageSubresourceRange NormalizeSubresourceRange(const IMAGE_STATE &image_state, const VkImageSubresourceRange &range);
+
+#endif  // CORE_VALIDATION_BUFFER_VALIDATION_H_
diff --git a/src/third_party/vulkan-validation-layers/src/layers/cast_utils.h b/src/third_party/vulkan-validation-layers/src/layers/cast_utils.h
new file mode 100644
index 0000000..b421145
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/layers/cast_utils.h
@@ -0,0 +1,93 @@
+/* Copyright (c) 2019 The Khronos Group Inc.
+ * Copyright (c) 2019 Valve Corporation
+ * Copyright (c) 2019 LunarG, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: John Zulauf <jzulauf@lunarg.com>
+ *
+ */
+#pragma once
+#ifndef CAST_UTILS_H_
+#define CAST_UTILS_H_
+
+#include <cassert>
+#include <cstddef>
+#include <cstdint>
+#include <functional>
+
+#define CAST_TO_FROM_UTILS
+// Casts to allow various types of less than 64 bits to be cast to and from uint64_t safely and portably
+template <typename HandleType, typename Uint>
+static inline HandleType CastFromUint(Uint untyped_handle) {
+    static_assert(sizeof(HandleType) == sizeof(Uint), "HandleType must be the same size as untyped handle");
+    return *reinterpret_cast<HandleType *>(&untyped_handle);
+}
+template <typename HandleType, typename Uint>
+static inline Uint CastToUint(HandleType handle) {
+    static_assert(sizeof(HandleType) == sizeof(Uint), "HandleType must be the same size as untyped handle");
+    return *reinterpret_cast<Uint *>(&handle);
+}
+
+// Ensure that the size changing casts are *static* to ensure portability
+template <typename HandleType>
+static inline HandleType CastFromUint64(uint64_t untyped_handle) {
+    static_assert(sizeof(HandleType) <= sizeof(uint64_t), "HandleType must be not larger than the untyped handle size");
+    typedef
+        typename std::conditional<sizeof(HandleType) == sizeof(uint8_t), uint8_t,
+                                  typename std::conditional<sizeof(HandleType) == sizeof(uint16_t), uint16_t,
+                                                            typename std::conditional<sizeof(HandleType) == sizeof(uint32_t),
+                                                                                      uint32_t, uint64_t>::type>::type>::type Uint;
+    return CastFromUint<HandleType, Uint>(static_cast<Uint>(untyped_handle));
+}
+
+template <typename HandleType>
+static inline uint64_t CastToUint64(HandleType handle) {
+    static_assert(sizeof(HandleType) <= sizeof(uint64_t), "HandleType must be not larger than the untyped handle size");
+    typedef
+        typename std::conditional<sizeof(HandleType) == sizeof(uint8_t), uint8_t,
+                                  typename std::conditional<sizeof(HandleType) == sizeof(uint16_t), uint16_t,
+                                                            typename std::conditional<sizeof(HandleType) == sizeof(uint32_t),
+                                                                                      uint32_t, uint64_t>::type>::type>::type Uint;
+    return static_cast<uint64_t>(CastToUint<HandleType, Uint>(handle));
+}
+
+// Convenience functions to case between handles and the types the handles abstract, reflecting the Vulkan handle scheme, where
+// Handles are either pointers (dispatchable) or sizeof(uint64_t) (non-dispatchable), s.t. full size-safe casts are used and
+// we ensure that handles are large enough to contain the underlying type.
+template <typename HandleType, typename ValueType>
+void CastToHandle(ValueType value, HandleType *handle) {
+    static_assert(sizeof(HandleType) >= sizeof(ValueType), "HandleType must large enough to hold internal value");
+    *handle = CastFromUint64<HandleType>(CastToUint64<ValueType>(value));
+}
+// This form is conveniently "inline", you should only need to specify the handle type (the value type being deducible from the arg)
+template <typename HandleType, typename ValueType>
+HandleType CastToHandle(ValueType value) {
+    HandleType handle;
+    CastToHandle(value, &handle);
+    return handle;
+}
+
+template <typename ValueType, typename HandleType>
+void CastFromHandle(HandleType handle, ValueType *value) {
+    static_assert(sizeof(HandleType) >= sizeof(ValueType), "HandleType must large enough to hold internal value");
+    *value = CastFromUint64<ValueType>(CastToUint64<HandleType>(handle));
+}
+template <typename ValueType, typename HandleType>
+ValueType CastFromHandle(HandleType handle) {
+    ValueType value;
+    CastFromHandle(handle, &value);
+    return value;
+}
+
+#endif  // CAST_UTILS_H_
diff --git a/src/third_party/vulkan-validation-layers/src/layers/command_counter.h b/src/third_party/vulkan-validation-layers/src/layers/command_counter.h
new file mode 100644
index 0000000..31eb365
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/layers/command_counter.h
@@ -0,0 +1,32 @@
+/* Copyright (C) 2019 Intel Corporation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Lionel Landwerlin <lionel.g.landwerlin@intel.com>
+ */
+
+#pragma once
+#include "core_validation.h"
+
+class CommandCounter : public ValidationObject {
+  public:
+    CommandCounter(CoreChecks *coreChecks) : coreChecks(coreChecks) {}
+    virtual ~CommandCounter() {}
+
+    virtual write_lock_guard_t write_lock() { return coreChecks->write_lock(); }
+
+#include "command_counter_helper.h"
+
+  private:
+    CoreChecks *coreChecks;
+};
diff --git a/src/third_party/vulkan-validation-layers/src/layers/convert_to_renderpass2.cpp b/src/third_party/vulkan-validation-layers/src/layers/convert_to_renderpass2.cpp
new file mode 100644
index 0000000..d7eda0c
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/layers/convert_to_renderpass2.cpp
@@ -0,0 +1,213 @@
+/* Copyright (c) 2015-2019 The Khronos Group Inc.
+ * Copyright (c) 2015-2019 Valve Corporation
+ * Copyright (c) 2015-2019 LunarG, Inc.
+ * Copyright (C) 2015-2019 Google Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Tobias Hector <@tobski>
+ */
+
+#include "convert_to_renderpass2.h"
+
+#include <vector>
+
+#include "vk_format_utils.h"
+#include "vk_typemap_helper.h"
+
+static safe_VkAttachmentDescription2KHR ToV2KHR(const VkAttachmentDescription& in_struct) {
+    safe_VkAttachmentDescription2KHR v2;
+    v2.sType = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_2_KHR;
+    v2.pNext = nullptr;
+    v2.flags = in_struct.flags;
+    v2.format = in_struct.format;
+    v2.samples = in_struct.samples;
+    v2.loadOp = in_struct.loadOp;
+    v2.storeOp = in_struct.storeOp;
+    v2.stencilLoadOp = in_struct.stencilLoadOp;
+    v2.stencilStoreOp = in_struct.stencilStoreOp;
+    v2.initialLayout = in_struct.initialLayout;
+    v2.finalLayout = in_struct.finalLayout;
+
+    return v2;
+}
+
+static safe_VkAttachmentReference2KHR ToV2KHR(const VkAttachmentReference& in_struct, const VkImageAspectFlags aspectMask = 0) {
+    safe_VkAttachmentReference2KHR v2;
+    v2.sType = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2_KHR;
+    v2.pNext = nullptr;
+    v2.attachment = in_struct.attachment;
+    v2.layout = in_struct.layout;
+    v2.aspectMask = aspectMask;
+
+    return v2;
+}
+
+static safe_VkSubpassDescription2KHR ToV2KHR(const VkSubpassDescription& in_struct, const uint32_t viewMask,
+                                             const VkImageAspectFlags* input_attachment_aspect_masks) {
+    safe_VkSubpassDescription2KHR v2;
+    v2.sType = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_2_KHR;
+    v2.pNext = nullptr;
+    v2.flags = in_struct.flags;
+    v2.pipelineBindPoint = in_struct.pipelineBindPoint;
+    v2.viewMask = viewMask;
+    v2.inputAttachmentCount = in_struct.inputAttachmentCount;
+    v2.pInputAttachments = nullptr;  // to be filled
+    v2.colorAttachmentCount = in_struct.colorAttachmentCount;
+    v2.pColorAttachments = nullptr;        // to be filled
+    v2.pResolveAttachments = nullptr;      // to be filled
+    v2.pDepthStencilAttachment = nullptr;  // to be filled
+    v2.preserveAttachmentCount = in_struct.preserveAttachmentCount;
+    v2.pPreserveAttachments = nullptr;  // to be filled
+
+    if (v2.inputAttachmentCount && in_struct.pInputAttachments) {
+        v2.pInputAttachments = new safe_VkAttachmentReference2KHR[v2.inputAttachmentCount];
+        for (uint32_t i = 0; i < v2.inputAttachmentCount; ++i) {
+            v2.pInputAttachments[i] = ToV2KHR(in_struct.pInputAttachments[i], input_attachment_aspect_masks[i]);
+        }
+    }
+    if (v2.colorAttachmentCount && in_struct.pColorAttachments) {
+        v2.pColorAttachments = new safe_VkAttachmentReference2KHR[v2.colorAttachmentCount];
+        for (uint32_t i = 0; i < v2.colorAttachmentCount; ++i) {
+            v2.pColorAttachments[i] = ToV2KHR(in_struct.pColorAttachments[i]);
+        }
+    }
+    if (v2.colorAttachmentCount && in_struct.pResolveAttachments) {
+        v2.pResolveAttachments = new safe_VkAttachmentReference2KHR[v2.colorAttachmentCount];
+        for (uint32_t i = 0; i < v2.colorAttachmentCount; ++i) {
+            v2.pResolveAttachments[i] = ToV2KHR(in_struct.pResolveAttachments[i]);
+        }
+    }
+    if (in_struct.pDepthStencilAttachment) {
+        v2.pDepthStencilAttachment = new safe_VkAttachmentReference2KHR();
+        *v2.pDepthStencilAttachment = ToV2KHR(*in_struct.pDepthStencilAttachment);
+    }
+    if (v2.preserveAttachmentCount && in_struct.pPreserveAttachments) {
+        auto preserve_attachments = new uint32_t[v2.preserveAttachmentCount];
+        for (uint32_t i = 0; i < v2.preserveAttachmentCount; ++i) {
+            preserve_attachments[i] = in_struct.pPreserveAttachments[i];
+        }
+        v2.pPreserveAttachments = preserve_attachments;
+    }
+
+    return v2;
+}
+
+static safe_VkSubpassDependency2KHR ToV2KHR(const VkSubpassDependency& in_struct, int32_t viewOffset = 0) {
+    safe_VkSubpassDependency2KHR v2;
+    v2.sType = VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2_KHR;
+    v2.pNext = nullptr;
+    v2.srcSubpass = in_struct.srcSubpass;
+    v2.dstSubpass = in_struct.dstSubpass;
+    v2.srcStageMask = in_struct.srcStageMask;
+    v2.dstStageMask = in_struct.dstStageMask;
+    v2.srcAccessMask = in_struct.srcAccessMask;
+    v2.dstAccessMask = in_struct.dstAccessMask;
+    v2.dependencyFlags = in_struct.dependencyFlags;
+    v2.viewOffset = viewOffset;
+
+    return v2;
+}
+
+void ConvertVkRenderPassCreateInfoToV2KHR(const VkRenderPassCreateInfo& in_struct, safe_VkRenderPassCreateInfo2KHR* out_struct) {
+    using std::vector;
+    const auto multiview_info = lvl_find_in_chain<VkRenderPassMultiviewCreateInfo>(in_struct.pNext);
+    const auto* input_attachment_aspect_info = lvl_find_in_chain<VkRenderPassInputAttachmentAspectCreateInfo>(in_struct.pNext);
+
+    out_struct->~safe_VkRenderPassCreateInfo2KHR();
+    out_struct->sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO_2_KHR;
+    out_struct->pNext = nullptr;
+    out_struct->flags = in_struct.flags;
+    out_struct->attachmentCount = in_struct.attachmentCount;
+    out_struct->pAttachments = nullptr;  // to be filled
+    out_struct->subpassCount = in_struct.subpassCount;
+    out_struct->pSubpasses = nullptr;  // to be filled
+    out_struct->dependencyCount = in_struct.dependencyCount;
+    out_struct->pDependencies = nullptr;  // to be filled
+    out_struct->correlatedViewMaskCount = multiview_info ? multiview_info->correlationMaskCount : 0;
+    out_struct->pCorrelatedViewMasks = nullptr;  // to be filled
+
+    // TODO: This should support VkRenderPassFragmentDensityMapCreateInfoEXT somehow
+    // see https://github.com/KhronosGroup/Vulkan-Docs/issues/1027
+
+    if (out_struct->attachmentCount && in_struct.pAttachments) {
+        out_struct->pAttachments = new safe_VkAttachmentDescription2KHR[out_struct->attachmentCount];
+        for (uint32_t i = 0; i < out_struct->attachmentCount; ++i) {
+            out_struct->pAttachments[i] = ToV2KHR(in_struct.pAttachments[i]);
+        }
+    }
+
+    // translate VkRenderPassInputAttachmentAspectCreateInfo into vector
+    vector<vector<VkImageAspectFlags>> input_attachment_aspect_masks(out_struct->subpassCount);
+    // set defaults
+    for (uint32_t si = 0; si < out_struct->subpassCount; ++si) {
+        if (in_struct.pSubpasses) {
+            input_attachment_aspect_masks[si].resize(in_struct.pSubpasses[si].inputAttachmentCount, 0);
+
+            for (uint32_t iai = 0; iai < in_struct.pSubpasses[si].inputAttachmentCount; ++iai) {
+                if (out_struct->pAttachments && in_struct.pSubpasses[si].pInputAttachments) {
+                    const auto& input_attachment = in_struct.pSubpasses[si].pInputAttachments[iai];
+                    const auto format = out_struct->pAttachments[input_attachment.attachment].format;
+
+                    if (FormatIsColor(format)) input_attachment_aspect_masks[si][iai] |= VK_IMAGE_ASPECT_COLOR_BIT;
+                    if (FormatHasDepth(format)) input_attachment_aspect_masks[si][iai] |= VK_IMAGE_ASPECT_DEPTH_BIT;
+                    if (FormatHasStencil(format)) input_attachment_aspect_masks[si][iai] |= VK_IMAGE_ASPECT_STENCIL_BIT;
+                    if (FormatPlaneCount(format) > 1) {
+                        input_attachment_aspect_masks[si][iai] |= VK_IMAGE_ASPECT_PLANE_0_BIT;
+                        input_attachment_aspect_masks[si][iai] |= VK_IMAGE_ASPECT_PLANE_1_BIT;
+                    }
+                    if (FormatPlaneCount(format) > 2) input_attachment_aspect_masks[si][iai] |= VK_IMAGE_ASPECT_PLANE_2_BIT;
+                }
+            }
+        }
+    }
+    // translate VkRenderPassInputAttachmentAspectCreateInfo
+    if (input_attachment_aspect_info && input_attachment_aspect_info->pAspectReferences) {
+        for (uint32_t i = 0; i < input_attachment_aspect_info->aspectReferenceCount; ++i) {
+            const uint32_t subpass = input_attachment_aspect_info->pAspectReferences[i].subpass;
+            const uint32_t input_attachment = input_attachment_aspect_info->pAspectReferences[i].inputAttachmentIndex;
+            const VkImageAspectFlags aspectMask = input_attachment_aspect_info->pAspectReferences[i].aspectMask;
+
+            if (subpass < input_attachment_aspect_masks.size() &&
+                input_attachment < input_attachment_aspect_masks[subpass].size()) {
+                input_attachment_aspect_masks[subpass][input_attachment] = aspectMask;
+            }
+        }
+    }
+
+    const bool has_viewMask = multiview_info && multiview_info->subpassCount && multiview_info->pViewMasks;
+    if (out_struct->subpassCount && in_struct.pSubpasses) {
+        out_struct->pSubpasses = new safe_VkSubpassDescription2KHR[out_struct->subpassCount];
+        for (uint32_t i = 0; i < out_struct->subpassCount; ++i) {
+            const uint32_t viewMask = has_viewMask ? multiview_info->pViewMasks[i] : 0;
+            out_struct->pSubpasses[i] = ToV2KHR(in_struct.pSubpasses[i], viewMask, input_attachment_aspect_masks[i].data());
+        }
+    }
+
+    const bool has_viewOffset = multiview_info && multiview_info->dependencyCount && multiview_info->pViewOffsets;
+    if (out_struct->dependencyCount && in_struct.pDependencies) {
+        out_struct->pDependencies = new safe_VkSubpassDependency2KHR[out_struct->dependencyCount];
+        for (uint32_t i = 0; i < out_struct->dependencyCount; ++i) {
+            const int32_t viewOffset = has_viewOffset ? multiview_info->pViewOffsets[i] : 0;
+            out_struct->pDependencies[i] = ToV2KHR(in_struct.pDependencies[i], viewOffset);
+        }
+    }
+
+    if (out_struct->correlatedViewMaskCount && multiview_info->pCorrelationMasks) {
+        auto correlated_view_masks = new uint32_t[out_struct->correlatedViewMaskCount];
+        for (uint32_t i = 0; i < out_struct->correlatedViewMaskCount; ++i) {
+            correlated_view_masks[i] = multiview_info->pCorrelationMasks[i];
+        }
+        out_struct->pCorrelatedViewMasks = correlated_view_masks;
+    }
+}
diff --git a/src/third_party/vulkan-validation-layers/src/layers/convert_to_renderpass2.h b/src/third_party/vulkan-validation-layers/src/layers/convert_to_renderpass2.h
new file mode 100644
index 0000000..664ccf4
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/layers/convert_to_renderpass2.h
@@ -0,0 +1,24 @@
+/* Copyright (c) 2015-2019 The Khronos Group Inc.
+ * Copyright (c) 2015-2019 Valve Corporation
+ * Copyright (c) 2015-2019 LunarG, Inc.
+ * Copyright (C) 2015-2019 Google Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Tobias Hector <@tobski>
+ */
+
+#pragma once
+#include "vk_safe_struct.h"
+
+void ConvertVkRenderPassCreateInfoToV2KHR(const VkRenderPassCreateInfo& in_struct, safe_VkRenderPassCreateInfo2KHR* out_struct);
diff --git a/src/third_party/vulkan-validation-layers/src/layers/core_validation.cpp b/src/third_party/vulkan-validation-layers/src/layers/core_validation.cpp
new file mode 100644
index 0000000..0e391e8
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/layers/core_validation.cpp
@@ -0,0 +1,11269 @@
+/* Copyright (c) 2015-2019 The Khronos Group Inc.
+ * Copyright (c) 2015-2019 Valve Corporation
+ * Copyright (c) 2015-2019 LunarG, Inc.
+ * Copyright (C) 2015-2019 Google Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Cody Northrop <cnorthrop@google.com>
+ * Author: Michael Lentine <mlentine@google.com>
+ * Author: Tobin Ehlis <tobine@google.com>
+ * Author: Chia-I Wu <olv@google.com>
+ * Author: Chris Forbes <chrisf@ijw.co.nz>
+ * Author: Mark Lobodzinski <mark@lunarg.com>
+ * Author: Ian Elliott <ianelliott@google.com>
+ * Author: Dave Houlton <daveh@lunarg.com>
+ * Author: Dustin Graves <dustin@lunarg.com>
+ * Author: Jeremy Hayes <jeremy@lunarg.com>
+ * Author: Jon Ashburn <jon@lunarg.com>
+ * Author: Karl Schultz <karl@lunarg.com>
+ * Author: Mark Young <marky@lunarg.com>
+ * Author: Mike Schuchardt <mikes@lunarg.com>
+ * Author: Mike Weiblen <mikew@lunarg.com>
+ * Author: Tony Barbour <tony@LunarG.com>
+ * Author: John Zulauf <jzulauf@lunarg.com>
+ * Author: Shannon McPherson <shannon@lunarg.com>
+ */
+
+// Allow use of STL min and max functions in Windows
+#define NOMINMAX
+
+#include <algorithm>
+#include <array>
+#include <assert.h>
+#include <cmath>
+#include <iostream>
+#include <list>
+#include <map>
+#include <memory>
+#include <mutex>
+#include <set>
+#include <sstream>
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+#include <string>
+#include <valarray>
+
+#include "vk_loader_platform.h"
+#include "vk_dispatch_table_helper.h"
+#include "vk_enum_string_helper.h"
+#include "chassis.h"
+#include "convert_to_renderpass2.h"
+#include "core_validation.h"
+#include "buffer_validation.h"
+#include "shader_validation.h"
+#include "vk_layer_utils.h"
+
+// Array of command names indexed by CMD_TYPE enum
+static const std::array<const char *, CMD_RANGE_SIZE> command_name_list = {{VUID_CMD_NAME_LIST}};
+
+// These functions are defined *outside* the core_validation namespace as their type
+// is also defined outside that namespace
+size_t PipelineLayoutCompatDef::hash() const {
+    hash_util::HashCombiner hc;
+    // The set number is integral to the CompatDef's distinctiveness
+    hc << set << push_constant_ranges.get();
+    const auto &descriptor_set_layouts = *set_layouts_id.get();
+    for (uint32_t i = 0; i <= set; i++) {
+        hc << descriptor_set_layouts[i].get();
+    }
+    return hc.Value();
+}
+
+bool PipelineLayoutCompatDef::operator==(const PipelineLayoutCompatDef &other) const {
+    if ((set != other.set) || (push_constant_ranges != other.push_constant_ranges)) {
+        return false;
+    }
+
+    if (set_layouts_id == other.set_layouts_id) {
+        // if it's the same set_layouts_id, then *any* subset will match
+        return true;
+    }
+
+    // They aren't exactly the same PipelineLayoutSetLayouts, so we need to check if the required subsets match
+    const auto &descriptor_set_layouts = *set_layouts_id.get();
+    assert(set < descriptor_set_layouts.size());
+    const auto &other_ds_layouts = *other.set_layouts_id.get();
+    assert(set < other_ds_layouts.size());
+    for (uint32_t i = 0; i <= set; i++) {
+        if (descriptor_set_layouts[i] != other_ds_layouts[i]) {
+            return false;
+        }
+    }
+    return true;
+}
+
+using std::max;
+using std::string;
+using std::stringstream;
+using std::unique_ptr;
+using std::unordered_map;
+using std::unordered_set;
+using std::vector;
+
+// Get the global maps of pending releases
+const GlobalQFOTransferBarrierMap<VkImageMemoryBarrier> &CoreChecks::GetGlobalQFOReleaseBarrierMap(
+    const QFOTransferBarrier<VkImageMemoryBarrier>::Tag &type_tag) const {
+    return qfo_release_image_barrier_map;
+}
+const GlobalQFOTransferBarrierMap<VkBufferMemoryBarrier> &CoreChecks::GetGlobalQFOReleaseBarrierMap(
+    const QFOTransferBarrier<VkBufferMemoryBarrier>::Tag &type_tag) const {
+    return qfo_release_buffer_barrier_map;
+}
+GlobalQFOTransferBarrierMap<VkImageMemoryBarrier> &CoreChecks::GetGlobalQFOReleaseBarrierMap(
+    const QFOTransferBarrier<VkImageMemoryBarrier>::Tag &type_tag) {
+    return qfo_release_image_barrier_map;
+}
+GlobalQFOTransferBarrierMap<VkBufferMemoryBarrier> &CoreChecks::GetGlobalQFOReleaseBarrierMap(
+    const QFOTransferBarrier<VkBufferMemoryBarrier>::Tag &type_tag) {
+    return qfo_release_buffer_barrier_map;
+}
+
+ImageSubresourceLayoutMap::InitialLayoutState::InitialLayoutState(const CMD_BUFFER_STATE &cb_state,
+                                                                  const IMAGE_VIEW_STATE *view_state)
+    : image_view(VK_NULL_HANDLE), aspect_mask(0), label(cb_state.debug_label) {
+    if (view_state) {
+        image_view = view_state->image_view;
+        aspect_mask = view_state->create_info.subresourceRange.aspectMask;
+    }
+}
+
+std::string FormatDebugLabel(const char *prefix, const LoggingLabel &label) {
+    if (label.Empty()) return std::string();
+    std::string out;
+    string_sprintf(&out, "%sVkDebugUtilsLabel(name='%s' color=[%g, %g %g, %g])", prefix, label.name.c_str(), label.color[0],
+                   label.color[1], label.color[2], label.color[3]);
+    return out;
+}
+
+// the ImageLayoutMap implementation bakes in the number of valid aspects -- we have to choose the correct one at construction time
+template <uint32_t kThreshold>
+static std::unique_ptr<ImageSubresourceLayoutMap> LayoutMapFactoryByAspect(const IMAGE_STATE &image_state) {
+    ImageSubresourceLayoutMap *map = nullptr;
+    switch (image_state.full_range.aspectMask) {
+        case VK_IMAGE_ASPECT_COLOR_BIT:
+            map = new ImageSubresourceLayoutMapImpl<ColorAspectTraits, kThreshold>(image_state);
+            break;
+        case VK_IMAGE_ASPECT_DEPTH_BIT:
+            map = new ImageSubresourceLayoutMapImpl<DepthAspectTraits, kThreshold>(image_state);
+            break;
+        case VK_IMAGE_ASPECT_STENCIL_BIT:
+            map = new ImageSubresourceLayoutMapImpl<StencilAspectTraits, kThreshold>(image_state);
+            break;
+        case VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT:
+            map = new ImageSubresourceLayoutMapImpl<DepthStencilAspectTraits, kThreshold>(image_state);
+            break;
+        case VK_IMAGE_ASPECT_PLANE_0_BIT | VK_IMAGE_ASPECT_PLANE_1_BIT:
+            map = new ImageSubresourceLayoutMapImpl<Multiplane2AspectTraits, kThreshold>(image_state);
+            break;
+        case VK_IMAGE_ASPECT_PLANE_0_BIT | VK_IMAGE_ASPECT_PLANE_1_BIT | VK_IMAGE_ASPECT_PLANE_2_BIT:
+            map = new ImageSubresourceLayoutMapImpl<Multiplane3AspectTraits, kThreshold>(image_state);
+            break;
+    }
+
+    assert(map);  // We shouldn't be able to get here null unless the traits cases are incomplete
+    return std::unique_ptr<ImageSubresourceLayoutMap>(map);
+}
+
+static std::unique_ptr<ImageSubresourceLayoutMap> LayoutMapFactory(const IMAGE_STATE &image_state) {
+    std::unique_ptr<ImageSubresourceLayoutMap> map;
+    const uint32_t kAlwaysDenseLimit = 16;  // About a cacheline on deskop architectures
+    if (image_state.full_range.layerCount <= kAlwaysDenseLimit) {
+        // Create a dense row map
+        map = LayoutMapFactoryByAspect<0>(image_state);
+    } else {
+        // Create an initially sparse row map
+        map = LayoutMapFactoryByAspect<kAlwaysDenseLimit>(image_state);
+    }
+    return map;
+}
+
+// The const variant only need the image as it is the key for the map
+const ImageSubresourceLayoutMap *GetImageSubresourceLayoutMap(const CMD_BUFFER_STATE *cb_state, VkImage image) {
+    auto it = cb_state->image_layout_map.find(image);
+    if (it == cb_state->image_layout_map.cend()) {
+        return nullptr;
+    }
+    return it->second.get();
+}
+
+// The non-const variant only needs the image state, as the factory requires it to construct a new entry
+ImageSubresourceLayoutMap *GetImageSubresourceLayoutMap(CMD_BUFFER_STATE *cb_state, const IMAGE_STATE &image_state) {
+    auto it = cb_state->image_layout_map.find(image_state.image);
+    if (it == cb_state->image_layout_map.end()) {
+        // Empty slot... fill it in.
+        auto insert_pair = cb_state->image_layout_map.insert(std::make_pair(image_state.image, LayoutMapFactory(image_state)));
+        assert(insert_pair.second);
+        ImageSubresourceLayoutMap *new_map = insert_pair.first->second.get();
+        assert(new_map);
+        return new_map;
+    }
+    return it->second.get();
+}
+
+// Tracks the number of commands recorded in a command buffer.
+void CoreChecks::IncrementCommandCount(VkCommandBuffer commandBuffer) {
+    CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+    cb_state->commandCount++;
+}
+
+// For given mem object, verify that it is not null or UNBOUND, if it is, report error. Return skip value.
+bool CoreChecks::VerifyBoundMemoryIsValid(VkDeviceMemory mem, const VulkanTypedHandle &typed_handle, const char *api_name,
+                                          const char *error_code) const {
+    bool result = false;
+    auto type_name = object_string[typed_handle.type];
+    if (VK_NULL_HANDLE == mem) {
+        result = log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, typed_handle.handle,
+                         error_code, "%s: %s used with no memory bound. Memory should be bound by calling vkBind%sMemory().",
+                         api_name, report_data->FormatHandle(typed_handle).c_str(), type_name + 2);
+    } else if (MEMORY_UNBOUND == mem) {
+        result = log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, typed_handle.handle,
+                         error_code,
+                         "%s: %s used with no memory bound and previously bound memory was freed. Memory must not be freed "
+                         "prior to this operation.",
+                         api_name, report_data->FormatHandle(typed_handle).c_str());
+    }
+    return result;
+}
+
+// Check to see if memory was ever bound to this image
+bool CoreChecks::ValidateMemoryIsBoundToImage(const IMAGE_STATE *image_state, const char *api_name, const char *error_code) const {
+    bool result = false;
+    if (image_state->create_from_swapchain != VK_NULL_HANDLE) {
+        if (image_state->bind_swapchain == VK_NULL_HANDLE) {
+            log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+                    HandleToUint64(image_state->image), error_code,
+                    "%s: %s is created by %s, and the image should be bound by calling vkBindImageMemory2(), and the pNext chain "
+                    "includes VkBindImageMemorySwapchainInfoKHR.",
+                    api_name, report_data->FormatHandle(image_state->image).c_str(),
+                    report_data->FormatHandle(image_state->create_from_swapchain).c_str());
+        } else if (image_state->create_from_swapchain != image_state->bind_swapchain) {
+            log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+                    HandleToUint64(image_state->image), error_code,
+                    "%s: %s is created by %s, but the image is bound by %s. The image should be created and bound by the same "
+                    "swapchain",
+                    api_name, report_data->FormatHandle(image_state->image).c_str(),
+                    report_data->FormatHandle(image_state->create_from_swapchain).c_str(),
+                    report_data->FormatHandle(image_state->bind_swapchain).c_str());
+        }
+    } else if (0 == (static_cast<uint32_t>(image_state->createInfo.flags) & VK_IMAGE_CREATE_SPARSE_BINDING_BIT)) {
+        result = VerifyBoundMemoryIsValid(image_state->binding.mem, VulkanTypedHandle(image_state->image, kVulkanObjectTypeImage),
+                                          api_name, error_code);
+    }
+    return result;
+}
+
+// Check to see if memory was bound to this buffer
+bool CoreChecks::ValidateMemoryIsBoundToBuffer(const BUFFER_STATE *buffer_state, const char *api_name,
+                                               const char *error_code) const {
+    bool result = false;
+    if (0 == (static_cast<uint32_t>(buffer_state->createInfo.flags) & VK_BUFFER_CREATE_SPARSE_BINDING_BIT)) {
+        result = VerifyBoundMemoryIsValid(buffer_state->binding.mem,
+                                          VulkanTypedHandle(buffer_state->buffer, kVulkanObjectTypeBuffer), api_name, error_code);
+    }
+    return result;
+}
+
+// Check to see if memory was bound to this acceleration structure
+bool CoreChecks::ValidateMemoryIsBoundToAccelerationStructure(const ACCELERATION_STRUCTURE_STATE *as_state, const char *api_name,
+                                                              const char *error_code) const {
+    return VerifyBoundMemoryIsValid(as_state->binding.mem,
+                                    VulkanTypedHandle(as_state->acceleration_structure, kVulkanObjectTypeAccelerationStructureNV),
+                                    api_name, error_code);
+}
+
+// Valid usage checks for a call to SetMemBinding().
+// For NULL mem case, output warning
+// Make sure given object is in global object map
+//  IF a previous binding existed, output validation error
+//  Otherwise, add reference from objectInfo to memoryInfo
+//  Add reference off of objInfo
+// TODO: We may need to refactor or pass in multiple valid usage statements to handle multiple valid usage conditions.
+bool CoreChecks::ValidateSetMemBinding(VkDeviceMemory mem, const VulkanTypedHandle &typed_handle, const char *apiName) const {
+    bool skip = false;
+    // It's an error to bind an object to NULL memory
+    if (mem != VK_NULL_HANDLE) {
+        const BINDABLE *mem_binding = ValidationStateTracker::GetObjectMemBinding(typed_handle);
+        assert(mem_binding);
+        if (mem_binding->sparse) {
+            const char *error_code = "VUID-vkBindImageMemory-image-01045";
+            const char *handle_type = "IMAGE";
+            if (typed_handle.type == kVulkanObjectTypeBuffer) {
+                error_code = "VUID-vkBindBufferMemory-buffer-01030";
+                handle_type = "BUFFER";
+            } else {
+                assert(typed_handle.type == kVulkanObjectTypeImage);
+            }
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,
+                            HandleToUint64(mem), error_code,
+                            "In %s, attempting to bind %s to %s which was created with sparse memory flags "
+                            "(VK_%s_CREATE_SPARSE_*_BIT).",
+                            apiName, report_data->FormatHandle(mem).c_str(), report_data->FormatHandle(typed_handle).c_str(),
+                            handle_type);
+        }
+        const DEVICE_MEMORY_STATE *mem_info = ValidationStateTracker::GetDevMemState(mem);
+        if (mem_info) {
+            const DEVICE_MEMORY_STATE *prev_binding = ValidationStateTracker::GetDevMemState(mem_binding->binding.mem);
+            if (prev_binding) {
+                const char *error_code = "VUID-vkBindImageMemory-image-01044";
+                if (typed_handle.type == kVulkanObjectTypeBuffer) {
+                    error_code = "VUID-vkBindBufferMemory-buffer-01029";
+                } else {
+                    assert(typed_handle.type == kVulkanObjectTypeImage);
+                }
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,
+                                HandleToUint64(mem), error_code,
+                                "In %s, attempting to bind %s to %s which has already been bound to %s.", apiName,
+                                report_data->FormatHandle(mem).c_str(), report_data->FormatHandle(typed_handle).c_str(),
+                                report_data->FormatHandle(prev_binding->mem).c_str());
+            } else if (mem_binding->binding.mem == MEMORY_UNBOUND) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,
+                                HandleToUint64(mem), kVUID_Core_MemTrack_RebindObject,
+                                "In %s, attempting to bind %s to %s which was previous bound to memory that has "
+                                "since been freed. Memory bindings are immutable in "
+                                "Vulkan so this attempt to bind to new memory is not allowed.",
+                                apiName, report_data->FormatHandle(mem).c_str(), report_data->FormatHandle(typed_handle).c_str());
+            }
+        }
+    }
+    return skip;
+}
+
+bool CoreChecks::ValidateDeviceQueueFamily(uint32_t queue_family, const char *cmd_name, const char *parameter_name,
+                                           const char *error_code, bool optional = false) const {
+    bool skip = false;
+    if (!optional && queue_family == VK_QUEUE_FAMILY_IGNORED) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
+                        error_code,
+                        "%s: %s is VK_QUEUE_FAMILY_IGNORED, but it is required to provide a valid queue family index value.",
+                        cmd_name, parameter_name);
+    } else if (queue_family_index_map.find(queue_family) == queue_family_index_map.end()) {
+        skip |= log_msg(
+            report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device), error_code,
+            "%s: %s (= %" PRIu32
+            ") is not one of the queue families given via VkDeviceQueueCreateInfo structures when the device was created.",
+            cmd_name, parameter_name, queue_family);
+    }
+
+    return skip;
+}
+
+bool CoreChecks::ValidateQueueFamilies(uint32_t queue_family_count, const uint32_t *queue_families, const char *cmd_name,
+                                       const char *array_parameter_name, const char *unique_error_code,
+                                       const char *valid_error_code, bool optional = false) const {
+    bool skip = false;
+    if (queue_families) {
+        std::unordered_set<uint32_t> set;
+        for (uint32_t i = 0; i < queue_family_count; ++i) {
+            std::string parameter_name = std::string(array_parameter_name) + "[" + std::to_string(i) + "]";
+
+            if (set.count(queue_families[i])) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+                                HandleToUint64(device), unique_error_code, "%s: %s (=%" PRIu32 ") is not unique within %s array.",
+                                cmd_name, parameter_name.c_str(), queue_families[i], array_parameter_name);
+            } else {
+                set.insert(queue_families[i]);
+                skip |= ValidateDeviceQueueFamily(queue_families[i], cmd_name, parameter_name.c_str(), valid_error_code, optional);
+            }
+        }
+    }
+    return skip;
+}
+
+// Check object status for selected flag state
+bool CoreChecks::ValidateStatus(const CMD_BUFFER_STATE *pNode, CBStatusFlags status_mask, VkFlags msg_flags, const char *fail_msg,
+                                const char *msg_code) const {
+    if (!(pNode->status & status_mask)) {
+        return log_msg(report_data, msg_flags, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, HandleToUint64(pNode->commandBuffer),
+                       msg_code, "%s: %s..", report_data->FormatHandle(pNode->commandBuffer).c_str(), fail_msg);
+    }
+    return false;
+}
+
+// Return true if for a given PSO, the given state enum is dynamic, else return false
+static bool IsDynamic(const PIPELINE_STATE *pPipeline, const VkDynamicState state) {
+    if (pPipeline && pPipeline->graphicsPipelineCI.pDynamicState) {
+        for (uint32_t i = 0; i < pPipeline->graphicsPipelineCI.pDynamicState->dynamicStateCount; i++) {
+            if (state == pPipeline->graphicsPipelineCI.pDynamicState->pDynamicStates[i]) return true;
+        }
+    }
+    return false;
+}
+
+// Validate state stored as flags at time of draw call
+bool CoreChecks::ValidateDrawStateFlags(const CMD_BUFFER_STATE *pCB, const PIPELINE_STATE *pPipe, bool indexed,
+                                        const char *msg_code) const {
+    bool result = false;
+    if (pPipe->topology_at_rasterizer == VK_PRIMITIVE_TOPOLOGY_LINE_LIST ||
+        pPipe->topology_at_rasterizer == VK_PRIMITIVE_TOPOLOGY_LINE_STRIP) {
+        result |= ValidateStatus(pCB, CBSTATUS_LINE_WIDTH_SET, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                 "Dynamic line width state not set for this command buffer", msg_code);
+    }
+    if (pPipe->graphicsPipelineCI.pRasterizationState &&
+        (pPipe->graphicsPipelineCI.pRasterizationState->depthBiasEnable == VK_TRUE)) {
+        result |= ValidateStatus(pCB, CBSTATUS_DEPTH_BIAS_SET, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                 "Dynamic depth bias state not set for this command buffer", msg_code);
+    }
+    if (pPipe->blendConstantsEnabled) {
+        result |= ValidateStatus(pCB, CBSTATUS_BLEND_CONSTANTS_SET, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                 "Dynamic blend constants state not set for this command buffer", msg_code);
+    }
+    if (pPipe->graphicsPipelineCI.pDepthStencilState &&
+        (pPipe->graphicsPipelineCI.pDepthStencilState->depthBoundsTestEnable == VK_TRUE)) {
+        result |= ValidateStatus(pCB, CBSTATUS_DEPTH_BOUNDS_SET, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                 "Dynamic depth bounds state not set for this command buffer", msg_code);
+    }
+    if (pPipe->graphicsPipelineCI.pDepthStencilState &&
+        (pPipe->graphicsPipelineCI.pDepthStencilState->stencilTestEnable == VK_TRUE)) {
+        result |= ValidateStatus(pCB, CBSTATUS_STENCIL_READ_MASK_SET, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                 "Dynamic stencil read mask state not set for this command buffer", msg_code);
+        result |= ValidateStatus(pCB, CBSTATUS_STENCIL_WRITE_MASK_SET, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                 "Dynamic stencil write mask state not set for this command buffer", msg_code);
+        result |= ValidateStatus(pCB, CBSTATUS_STENCIL_REFERENCE_SET, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                 "Dynamic stencil reference state not set for this command buffer", msg_code);
+    }
+    if (indexed) {
+        result |= ValidateStatus(pCB, CBSTATUS_INDEX_BUFFER_BOUND, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                 "Index buffer object not bound to this command buffer when Indexed Draw attempted", msg_code);
+    }
+    if (pPipe->topology_at_rasterizer == VK_PRIMITIVE_TOPOLOGY_LINE_LIST ||
+        pPipe->topology_at_rasterizer == VK_PRIMITIVE_TOPOLOGY_LINE_STRIP) {
+        const auto *line_state =
+            lvl_find_in_chain<VkPipelineRasterizationLineStateCreateInfoEXT>(pPipe->graphicsPipelineCI.pRasterizationState->pNext);
+        if (line_state && line_state->stippledLineEnable) {
+            result |= ValidateStatus(pCB, CBSTATUS_LINE_STIPPLE_SET, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                     "Dynamic line stipple state not set for this command buffer", msg_code);
+        }
+    }
+
+    return result;
+}
+
+bool CoreChecks::LogInvalidAttachmentMessage(const char *type1_string, const RENDER_PASS_STATE *rp1_state, const char *type2_string,
+                                             const RENDER_PASS_STATE *rp2_state, uint32_t primary_attach, uint32_t secondary_attach,
+                                             const char *msg, const char *caller, const char *error_code) const {
+    return log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
+                   HandleToUint64(rp1_state->renderPass), error_code,
+                   "%s: RenderPasses incompatible between %s w/ %s and %s w/ %s Attachment %u is not "
+                   "compatible with %u: %s.",
+                   caller, type1_string, report_data->FormatHandle(rp1_state->renderPass).c_str(), type2_string,
+                   report_data->FormatHandle(rp2_state->renderPass).c_str(), primary_attach, secondary_attach, msg);
+}
+
+bool CoreChecks::ValidateAttachmentCompatibility(const char *type1_string, const RENDER_PASS_STATE *rp1_state,
+                                                 const char *type2_string, const RENDER_PASS_STATE *rp2_state,
+                                                 uint32_t primary_attach, uint32_t secondary_attach, const char *caller,
+                                                 const char *error_code) const {
+    bool skip = false;
+    const auto &primaryPassCI = rp1_state->createInfo;
+    const auto &secondaryPassCI = rp2_state->createInfo;
+    if (primaryPassCI.attachmentCount <= primary_attach) {
+        primary_attach = VK_ATTACHMENT_UNUSED;
+    }
+    if (secondaryPassCI.attachmentCount <= secondary_attach) {
+        secondary_attach = VK_ATTACHMENT_UNUSED;
+    }
+    if (primary_attach == VK_ATTACHMENT_UNUSED && secondary_attach == VK_ATTACHMENT_UNUSED) {
+        return skip;
+    }
+    if (primary_attach == VK_ATTACHMENT_UNUSED) {
+        skip |= LogInvalidAttachmentMessage(type1_string, rp1_state, type2_string, rp2_state, primary_attach, secondary_attach,
+                                            "The first is unused while the second is not.", caller, error_code);
+        return skip;
+    }
+    if (secondary_attach == VK_ATTACHMENT_UNUSED) {
+        skip |= LogInvalidAttachmentMessage(type1_string, rp1_state, type2_string, rp2_state, primary_attach, secondary_attach,
+                                            "The second is unused while the first is not.", caller, error_code);
+        return skip;
+    }
+    if (primaryPassCI.pAttachments[primary_attach].format != secondaryPassCI.pAttachments[secondary_attach].format) {
+        skip |= LogInvalidAttachmentMessage(type1_string, rp1_state, type2_string, rp2_state, primary_attach, secondary_attach,
+                                            "They have different formats.", caller, error_code);
+    }
+    if (primaryPassCI.pAttachments[primary_attach].samples != secondaryPassCI.pAttachments[secondary_attach].samples) {
+        skip |= LogInvalidAttachmentMessage(type1_string, rp1_state, type2_string, rp2_state, primary_attach, secondary_attach,
+                                            "They have different samples.", caller, error_code);
+    }
+    if (primaryPassCI.pAttachments[primary_attach].flags != secondaryPassCI.pAttachments[secondary_attach].flags) {
+        skip |= LogInvalidAttachmentMessage(type1_string, rp1_state, type2_string, rp2_state, primary_attach, secondary_attach,
+                                            "They have different flags.", caller, error_code);
+    }
+
+    return skip;
+}
+
+bool CoreChecks::ValidateSubpassCompatibility(const char *type1_string, const RENDER_PASS_STATE *rp1_state,
+                                              const char *type2_string, const RENDER_PASS_STATE *rp2_state, const int subpass,
+                                              const char *caller, const char *error_code) const {
+    bool skip = false;
+    const auto &primary_desc = rp1_state->createInfo.pSubpasses[subpass];
+    const auto &secondary_desc = rp2_state->createInfo.pSubpasses[subpass];
+    uint32_t maxInputAttachmentCount = std::max(primary_desc.inputAttachmentCount, secondary_desc.inputAttachmentCount);
+    for (uint32_t i = 0; i < maxInputAttachmentCount; ++i) {
+        uint32_t primary_input_attach = VK_ATTACHMENT_UNUSED, secondary_input_attach = VK_ATTACHMENT_UNUSED;
+        if (i < primary_desc.inputAttachmentCount) {
+            primary_input_attach = primary_desc.pInputAttachments[i].attachment;
+        }
+        if (i < secondary_desc.inputAttachmentCount) {
+            secondary_input_attach = secondary_desc.pInputAttachments[i].attachment;
+        }
+        skip |= ValidateAttachmentCompatibility(type1_string, rp1_state, type2_string, rp2_state, primary_input_attach,
+                                                secondary_input_attach, caller, error_code);
+    }
+    uint32_t maxColorAttachmentCount = std::max(primary_desc.colorAttachmentCount, secondary_desc.colorAttachmentCount);
+    for (uint32_t i = 0; i < maxColorAttachmentCount; ++i) {
+        uint32_t primary_color_attach = VK_ATTACHMENT_UNUSED, secondary_color_attach = VK_ATTACHMENT_UNUSED;
+        if (i < primary_desc.colorAttachmentCount) {
+            primary_color_attach = primary_desc.pColorAttachments[i].attachment;
+        }
+        if (i < secondary_desc.colorAttachmentCount) {
+            secondary_color_attach = secondary_desc.pColorAttachments[i].attachment;
+        }
+        skip |= ValidateAttachmentCompatibility(type1_string, rp1_state, type2_string, rp2_state, primary_color_attach,
+                                                secondary_color_attach, caller, error_code);
+        if (rp1_state->createInfo.subpassCount > 1) {
+            uint32_t primary_resolve_attach = VK_ATTACHMENT_UNUSED, secondary_resolve_attach = VK_ATTACHMENT_UNUSED;
+            if (i < primary_desc.colorAttachmentCount && primary_desc.pResolveAttachments) {
+                primary_resolve_attach = primary_desc.pResolveAttachments[i].attachment;
+            }
+            if (i < secondary_desc.colorAttachmentCount && secondary_desc.pResolveAttachments) {
+                secondary_resolve_attach = secondary_desc.pResolveAttachments[i].attachment;
+            }
+            skip |= ValidateAttachmentCompatibility(type1_string, rp1_state, type2_string, rp2_state, primary_resolve_attach,
+                                                    secondary_resolve_attach, caller, error_code);
+        }
+    }
+    uint32_t primary_depthstencil_attach = VK_ATTACHMENT_UNUSED, secondary_depthstencil_attach = VK_ATTACHMENT_UNUSED;
+    if (primary_desc.pDepthStencilAttachment) {
+        primary_depthstencil_attach = primary_desc.pDepthStencilAttachment[0].attachment;
+    }
+    if (secondary_desc.pDepthStencilAttachment) {
+        secondary_depthstencil_attach = secondary_desc.pDepthStencilAttachment[0].attachment;
+    }
+    skip |= ValidateAttachmentCompatibility(type1_string, rp1_state, type2_string, rp2_state, primary_depthstencil_attach,
+                                            secondary_depthstencil_attach, caller, error_code);
+    return skip;
+}
+
+// Verify that given renderPass CreateInfo for primary and secondary command buffers are compatible.
+//  This function deals directly with the CreateInfo, there are overloaded versions below that can take the renderPass handle and
+//  will then feed into this function
+bool CoreChecks::ValidateRenderPassCompatibility(const char *type1_string, const RENDER_PASS_STATE *rp1_state,
+                                                 const char *type2_string, const RENDER_PASS_STATE *rp2_state, const char *caller,
+                                                 const char *error_code) const {
+    bool skip = false;
+
+    if (rp1_state->createInfo.subpassCount != rp2_state->createInfo.subpassCount) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
+                        HandleToUint64(rp1_state->renderPass), error_code,
+                        "%s: RenderPasses incompatible between %s w/ %s with a subpassCount of %u and %s w/ "
+                        "%s with a subpassCount of %u.",
+                        caller, type1_string, report_data->FormatHandle(rp1_state->renderPass).c_str(),
+                        rp1_state->createInfo.subpassCount, type2_string, report_data->FormatHandle(rp2_state->renderPass).c_str(),
+                        rp2_state->createInfo.subpassCount);
+    } else {
+        for (uint32_t i = 0; i < rp1_state->createInfo.subpassCount; ++i) {
+            skip |= ValidateSubpassCompatibility(type1_string, rp1_state, type2_string, rp2_state, i, caller, error_code);
+        }
+    }
+    return skip;
+}
+
+// For given pipeline, return number of MSAA samples, or one if MSAA disabled
+static VkSampleCountFlagBits GetNumSamples(PIPELINE_STATE const *pipe) {
+    if (pipe->graphicsPipelineCI.pMultisampleState != NULL &&
+        VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO == pipe->graphicsPipelineCI.pMultisampleState->sType) {
+        return pipe->graphicsPipelineCI.pMultisampleState->rasterizationSamples;
+    }
+    return VK_SAMPLE_COUNT_1_BIT;
+}
+
+static void ListBits(std::ostream &s, uint32_t bits) {
+    for (int i = 0; i < 32 && bits; i++) {
+        if (bits & (1 << i)) {
+            s << i;
+            bits &= ~(1 << i);
+            if (bits) {
+                s << ",";
+            }
+        }
+    }
+}
+
+// Validate draw-time state related to the PSO
+bool CoreChecks::ValidatePipelineDrawtimeState(const LAST_BOUND_STATE &state, const CMD_BUFFER_STATE *pCB, CMD_TYPE cmd_type,
+                                               const PIPELINE_STATE *pPipeline, const char *caller) const {
+    bool skip = false;
+    const auto &current_vtx_bfr_binding_info = pCB->current_vertex_buffer_binding_info.vertex_buffer_bindings;
+
+    // Verify vertex binding
+    if (pPipeline->vertex_binding_descriptions_.size() > 0) {
+        for (size_t i = 0; i < pPipeline->vertex_binding_descriptions_.size(); i++) {
+            const auto vertex_binding = pPipeline->vertex_binding_descriptions_[i].binding;
+            if ((current_vtx_bfr_binding_info.size() < (vertex_binding + 1)) ||
+                (current_vtx_bfr_binding_info[vertex_binding].buffer == VK_NULL_HANDLE)) {
+                skip |=
+                    log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                            HandleToUint64(pCB->commandBuffer), kVUID_Core_DrawState_VtxIndexOutOfBounds,
+                            "%s expects that this Command Buffer's vertex binding Index %u should be set via "
+                            "vkCmdBindVertexBuffers. This is because VkVertexInputBindingDescription struct at "
+                            "index " PRINTF_SIZE_T_SPECIFIER " of pVertexBindingDescriptions has a binding value of %u.",
+                            report_data->FormatHandle(state.pipeline_state->pipeline).c_str(), vertex_binding, i, vertex_binding);
+            }
+        }
+
+        // Verify vertex attribute address alignment
+        for (size_t i = 0; i < pPipeline->vertex_attribute_descriptions_.size(); i++) {
+            const auto &attribute_description = pPipeline->vertex_attribute_descriptions_[i];
+            const auto vertex_binding = attribute_description.binding;
+            const auto attribute_offset = attribute_description.offset;
+
+            const auto &vertex_binding_map_it = pPipeline->vertex_binding_to_index_map_.find(vertex_binding);
+            if ((vertex_binding_map_it != pPipeline->vertex_binding_to_index_map_.cend()) &&
+                (vertex_binding < current_vtx_bfr_binding_info.size()) &&
+                (current_vtx_bfr_binding_info[vertex_binding].buffer != VK_NULL_HANDLE)) {
+                const auto vertex_buffer_stride = pPipeline->vertex_binding_descriptions_[vertex_binding_map_it->second].stride;
+                const auto vertex_buffer_offset = current_vtx_bfr_binding_info[vertex_binding].offset;
+
+                // Use 1 as vertex/instance index to use buffer stride as well
+                const auto attrib_address = vertex_buffer_offset + vertex_buffer_stride + attribute_offset;
+
+                VkDeviceSize vtx_attrib_req_alignment = pPipeline->vertex_attribute_alignments_[i];
+
+                if (SafeModulo(attrib_address, vtx_attrib_req_alignment) != 0) {
+                    skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT,
+                                    HandleToUint64(current_vtx_bfr_binding_info[vertex_binding].buffer),
+                                    kVUID_Core_DrawState_InvalidVtxAttributeAlignment,
+                                    "Invalid attribAddress alignment for vertex attribute " PRINTF_SIZE_T_SPECIFIER
+                                    " from %s and vertex %s.",
+                                    i, report_data->FormatHandle(state.pipeline_state->pipeline).c_str(),
+                                    report_data->FormatHandle(current_vtx_bfr_binding_info[vertex_binding].buffer).c_str());
+                }
+            }
+        }
+    } else {
+        if ((!current_vtx_bfr_binding_info.empty()) && (!pCB->vertex_buffer_used)) {
+            skip |=
+                log_msg(report_data, VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                        HandleToUint64(pCB->commandBuffer), kVUID_Core_DrawState_VtxIndexOutOfBounds,
+                        "Vertex buffers are bound to %s but no vertex buffers are attached to %s.",
+                        report_data->FormatHandle(pCB->commandBuffer).c_str(),
+                        report_data->FormatHandle(state.pipeline_state->pipeline).c_str());
+        }
+    }
+
+    // If Viewport or scissors are dynamic, verify that dynamic count matches PSO count.
+    // Skip check if rasterization is disabled or there is no viewport.
+    if ((!pPipeline->graphicsPipelineCI.pRasterizationState ||
+         (pPipeline->graphicsPipelineCI.pRasterizationState->rasterizerDiscardEnable == VK_FALSE)) &&
+        pPipeline->graphicsPipelineCI.pViewportState) {
+        bool dynViewport = IsDynamic(pPipeline, VK_DYNAMIC_STATE_VIEWPORT);
+        bool dynScissor = IsDynamic(pPipeline, VK_DYNAMIC_STATE_SCISSOR);
+
+        if (dynViewport) {
+            const auto requiredViewportsMask = (1 << pPipeline->graphicsPipelineCI.pViewportState->viewportCount) - 1;
+            const auto missingViewportMask = ~pCB->viewportMask & requiredViewportsMask;
+            if (missingViewportMask) {
+                std::stringstream ss;
+                ss << "Dynamic viewport(s) ";
+                ListBits(ss, missingViewportMask);
+                ss << " are used by pipeline state object, but were not provided via calls to vkCmdSetViewport().";
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                kVUID_Core_DrawState_ViewportScissorMismatch, "%s", ss.str().c_str());
+            }
+        }
+
+        if (dynScissor) {
+            const auto requiredScissorMask = (1 << pPipeline->graphicsPipelineCI.pViewportState->scissorCount) - 1;
+            const auto missingScissorMask = ~pCB->scissorMask & requiredScissorMask;
+            if (missingScissorMask) {
+                std::stringstream ss;
+                ss << "Dynamic scissor(s) ";
+                ListBits(ss, missingScissorMask);
+                ss << " are used by pipeline state object, but were not provided via calls to vkCmdSetScissor().";
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                kVUID_Core_DrawState_ViewportScissorMismatch, "%s", ss.str().c_str());
+            }
+        }
+    }
+
+    // Verify that any MSAA request in PSO matches sample# in bound FB
+    // Skip the check if rasterization is disabled.
+    if (!pPipeline->graphicsPipelineCI.pRasterizationState ||
+        (pPipeline->graphicsPipelineCI.pRasterizationState->rasterizerDiscardEnable == VK_FALSE)) {
+        VkSampleCountFlagBits pso_num_samples = GetNumSamples(pPipeline);
+        if (pCB->activeRenderPass) {
+            const auto render_pass_info = pCB->activeRenderPass->createInfo.ptr();
+            const VkSubpassDescription2KHR *subpass_desc = &render_pass_info->pSubpasses[pCB->activeSubpass];
+            uint32_t i;
+            unsigned subpass_num_samples = 0;
+
+            for (i = 0; i < subpass_desc->colorAttachmentCount; i++) {
+                const auto attachment = subpass_desc->pColorAttachments[i].attachment;
+                if (attachment != VK_ATTACHMENT_UNUSED)
+                    subpass_num_samples |= (unsigned)render_pass_info->pAttachments[attachment].samples;
+            }
+
+            if (subpass_desc->pDepthStencilAttachment &&
+                subpass_desc->pDepthStencilAttachment->attachment != VK_ATTACHMENT_UNUSED) {
+                const auto attachment = subpass_desc->pDepthStencilAttachment->attachment;
+                subpass_num_samples |= (unsigned)render_pass_info->pAttachments[attachment].samples;
+            }
+
+            if (!(device_extensions.vk_amd_mixed_attachment_samples || device_extensions.vk_nv_framebuffer_mixed_samples) &&
+                ((subpass_num_samples & static_cast<unsigned>(pso_num_samples)) != subpass_num_samples)) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
+                                HandleToUint64(pPipeline->pipeline), kVUID_Core_DrawState_NumSamplesMismatch,
+                                "Num samples mismatch! At draw-time in %s with %u samples while current %s w/ "
+                                "%u samples!",
+                                report_data->FormatHandle(pPipeline->pipeline).c_str(), pso_num_samples,
+                                report_data->FormatHandle(pCB->activeRenderPass->renderPass).c_str(), subpass_num_samples);
+            }
+        } else {
+            skip |=
+                log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
+                        HandleToUint64(pPipeline->pipeline), kVUID_Core_DrawState_NoActiveRenderpass,
+                        "No active render pass found at draw-time in %s!", report_data->FormatHandle(pPipeline->pipeline).c_str());
+        }
+    }
+    // Verify that PSO creation renderPass is compatible with active renderPass
+    if (pCB->activeRenderPass) {
+        // TODO: Move all of the error codes common across different Draws into a LUT accessed by cmd_type
+        // TODO: AMD extension codes are included here, but actual function entrypoints are not yet intercepted
+        // Error codes for renderpass and subpass mismatches
+        auto rp_error = "VUID-vkCmdDraw-renderPass-02684", sp_error = "VUID-vkCmdDraw-subpass-02685";
+        switch (cmd_type) {
+            case CMD_DRAWINDEXED:
+                rp_error = "VUID-vkCmdDrawIndexed-renderPass-02684";
+                sp_error = "VUID-vkCmdDrawIndexed-subpass-02685";
+                break;
+            case CMD_DRAWINDIRECT:
+                rp_error = "VUID-vkCmdDrawIndirect-renderPass-02684";
+                sp_error = "VUID-vkCmdDrawIndirect-subpass-02685";
+                break;
+            case CMD_DRAWINDIRECTCOUNTKHR:
+                rp_error = "VUID-vkCmdDrawIndirectCountKHR-renderPass-02684";
+                sp_error = "VUID-vkCmdDrawIndirectCountKHR-subpass-02685";
+                break;
+            case CMD_DRAWINDEXEDINDIRECT:
+                rp_error = "VUID-vkCmdDrawIndexedIndirect-renderPass-02684";
+                sp_error = "VUID-vkCmdDrawIndexedIndirect-subpass-02685";
+                break;
+            case CMD_DRAWINDEXEDINDIRECTCOUNTKHR:
+                rp_error = "VUID-vkCmdDrawIndexedIndirectCountKHR-renderPass-02684";
+                sp_error = "VUID-vkCmdDrawIndexedIndirectCountKHR-subpass-02685";
+                break;
+            case CMD_DRAWMESHTASKSNV:
+                rp_error = "VUID-vkCmdDrawMeshTasksNV-renderPass-02684";
+                sp_error = "VUID-vkCmdDrawMeshTasksNV-subpass-02685";
+                break;
+            case CMD_DRAWMESHTASKSINDIRECTNV:
+                rp_error = "VUID-vkCmdDrawMeshTasksIndirectNV-renderPass-02684";
+                sp_error = "VUID-vkCmdDrawMeshTasksIndirectNV-subpass-02685";
+                break;
+            case CMD_DRAWMESHTASKSINDIRECTCOUNTNV:
+                rp_error = "VUID-vkCmdDrawMeshTasksIndirectCountNV-renderPass-02684";
+                sp_error = "VUID-vkCmdDrawMeshTasksIndirectCountNV-subpass-02685";
+                break;
+            default:
+                assert(CMD_DRAW == cmd_type);
+                break;
+        }
+        if (pCB->activeRenderPass->renderPass != pPipeline->rp_state->renderPass) {
+            // renderPass that PSO was created with must be compatible with active renderPass that PSO is being used with
+            skip |= ValidateRenderPassCompatibility("active render pass", pCB->activeRenderPass, "pipeline state object",
+                                                    pPipeline->rp_state.get(), caller, rp_error);
+        }
+        if (pPipeline->graphicsPipelineCI.subpass != pCB->activeSubpass) {
+            skip |=
+                log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
+                        HandleToUint64(pPipeline->pipeline), sp_error, "Pipeline was built for subpass %u but used in subpass %u.",
+                        pPipeline->graphicsPipelineCI.subpass, pCB->activeSubpass);
+        }
+    }
+
+    return skip;
+}
+
+// For given cvdescriptorset::DescriptorSet, verify that its Set is compatible w/ the setLayout corresponding to
+// pipelineLayout[layoutIndex]
+static bool VerifySetLayoutCompatibility(const debug_report_data *report_data, const cvdescriptorset::DescriptorSet *descriptor_set,
+                                         PIPELINE_LAYOUT_STATE const *pipeline_layout, const uint32_t layoutIndex,
+                                         string &errorMsg) {
+    auto num_sets = pipeline_layout->set_layouts.size();
+    if (layoutIndex >= num_sets) {
+        stringstream errorStr;
+        errorStr << report_data->FormatHandle(pipeline_layout->layout) << ") only contains " << num_sets
+                 << " setLayouts corresponding to sets 0-" << num_sets - 1 << ", but you're attempting to bind set to index "
+                 << layoutIndex;
+        errorMsg = errorStr.str();
+        return false;
+    }
+    if (descriptor_set->IsPushDescriptor()) return true;
+    auto layout_node = pipeline_layout->set_layouts[layoutIndex].get();
+    return cvdescriptorset::VerifySetLayoutCompatibility(report_data, layout_node, descriptor_set->GetLayout().get(), &errorMsg);
+}
+
+static const char *string_VuidNotCompatibleForSet(CMD_TYPE cmd_type) {
+    const static std::map<CMD_TYPE, const char *> incompatible_for_set_vuid = {
+        {CMD_DISPATCH, "VUID-vkCmdDispatch-None-02697"},
+        {CMD_DISPATCHINDIRECT, "VUID-vkCmdDispatchIndirect-None-02697"},
+        {CMD_DRAW, "VUID-vkCmdDraw-None-02697"},
+        {CMD_DRAWINDEXED, "VUID-vkCmdDrawIndexed-None-02697"},
+        {CMD_DRAWINDEXEDINDIRECT, "VUID-vkCmdDrawIndexedIndirect-None-02697"},
+        {CMD_DRAWINDEXEDINDIRECTCOUNTKHR, "VUID-vkCmdDrawIndexedIndirectCountKHR-None-02697"},
+        {CMD_DRAWINDIRECT, "VUID-vkCmdDrawIndirect-None-02697"},
+        {CMD_DRAWINDIRECTCOUNTKHR, "VUID-vkCmdDrawIndirectCountKHR-None-02697"},
+        {CMD_DRAWMESHTASKSINDIRECTCOUNTNV, "VUID-vkCmdDrawMeshTasksIndirectCountNV-None-02697"},
+        {CMD_DRAWMESHTASKSINDIRECTNV, "VUID-vkCmdDrawMeshTasksIndirectNV-None-02697"},
+        {CMD_DRAWMESHTASKSNV, "VUID-vkCmdDrawMeshTasksNV-None-02697"},
+
+        // Not implemented on this path...
+        // { CMD_DRAWDISPATCHBASE, "VUID-vkCmdDispatchBase-None-02697" },
+        // { CMD_DRAWINDIRECTBYTECOUNTEXT, "VUID-vkCmdDrawIndirectByteCountEXT-None-02697"},
+        {CMD_TRACERAYSNV, "VUID-vkCmdTraceRaysNV-None-02697"},
+    };
+    auto find_it = incompatible_for_set_vuid.find(cmd_type);
+    if (find_it == incompatible_for_set_vuid.cend()) {
+        assert(find_it != incompatible_for_set_vuid.cend());
+        return "BAD VUID -- Unknown Command Type";
+    }
+    return find_it->second;
+}
+// Validate overall state at the time of a draw call
+bool CoreChecks::ValidateCmdBufDrawState(const CMD_BUFFER_STATE *cb_node, CMD_TYPE cmd_type, const bool indexed,
+                                         const VkPipelineBindPoint bind_point, const char *function, const char *pipe_err_code,
+                                         const char *state_err_code) const {
+    const auto last_bound_it = cb_node->lastBound.find(bind_point);
+    const PIPELINE_STATE *pPipe = nullptr;
+    if (last_bound_it != cb_node->lastBound.cend()) {
+        pPipe = last_bound_it->second.pipeline_state;
+    }
+
+    if (nullptr == pPipe) {
+        return log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                       HandleToUint64(cb_node->commandBuffer), pipe_err_code,
+                       "Must not call %s on this command buffer while there is no %s pipeline bound.", function,
+                       bind_point == VK_PIPELINE_BIND_POINT_GRAPHICS ? "Graphics" : "Compute");
+    }
+
+    bool result = false;
+    auto const &state = last_bound_it->second;
+
+    // First check flag states
+    if (VK_PIPELINE_BIND_POINT_GRAPHICS == bind_point) result = ValidateDrawStateFlags(cb_node, pPipe, indexed, state_err_code);
+
+    // Now complete other state checks
+    string errorString;
+    auto const &pipeline_layout = pPipe->pipeline_layout.get();
+
+    // Check if the current pipeline is compatible for the maximum used set with the bound sets.
+    if (pPipe->active_slots.size() > 0 && !CompatForSet(pPipe->max_active_slot, state, pipeline_layout->compat_for_set)) {
+        result |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
+                          HandleToUint64(pPipe->pipeline), string_VuidNotCompatibleForSet(cmd_type),
+                          "%s(): %s defined with %s is not compatible for maximum set statically used %" PRIu32
+                          " with bound descriptor sets, last bound with %s",
+                          command_name_list[cmd_type], report_data->FormatHandle(pPipe->pipeline).c_str(),
+                          report_data->FormatHandle(pipeline_layout->layout).c_str(), pPipe->max_active_slot,
+                          report_data->FormatHandle(state.pipeline_layout).c_str());
+    }
+
+    for (const auto &set_binding_pair : pPipe->active_slots) {
+        uint32_t setIndex = set_binding_pair.first;
+        // If valid set is not bound throw an error
+        if ((state.per_set.size() <= setIndex) || (!state.per_set[setIndex].bound_descriptor_set)) {
+            result |=
+                log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                        HandleToUint64(cb_node->commandBuffer), kVUID_Core_DrawState_DescriptorSetNotBound,
+                        "%s uses set #%u but that set is not bound.", report_data->FormatHandle(pPipe->pipeline).c_str(), setIndex);
+        } else if (!VerifySetLayoutCompatibility(report_data, state.per_set[setIndex].bound_descriptor_set, pipeline_layout,
+                                                 setIndex, errorString)) {
+            // Set is bound but not compatible w/ overlapping pipeline_layout from PSO
+            VkDescriptorSet setHandle = state.per_set[setIndex].bound_descriptor_set->GetSet();
+            result |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT,
+                              HandleToUint64(setHandle), kVUID_Core_DrawState_PipelineLayoutsIncompatible,
+                              "%s bound as set #%u is not compatible with overlapping %s due to: %s",
+                              report_data->FormatHandle(setHandle).c_str(), setIndex,
+                              report_data->FormatHandle(pipeline_layout->layout).c_str(), errorString.c_str());
+        } else {  // Valid set is bound and layout compatible, validate that it's updated
+            // Pull the set node
+            const cvdescriptorset::DescriptorSet *descriptor_set = state.per_set[setIndex].bound_descriptor_set;
+            // Validate the draw-time state for this descriptor set
+            std::string err_str;
+            if (!descriptor_set->IsPushDescriptor()) {
+                // For the "bindless" style resource usage with many descriptors, need to optimize command <-> descriptor
+                // binding validation. Take the requested binding set and prefilter it to eliminate redundant validation checks.
+                // Here, the currently bound pipeline determines whether an image validation check is redundant...
+                // for images are the "req" portion of the binding_req is indirectly (but tightly) coupled to the pipeline.
+                cvdescriptorset::PrefilterBindRequestMap reduced_map(*descriptor_set, set_binding_pair.second);
+                const auto &binding_req_map = reduced_map.FilteredMap(*cb_node, *pPipe);
+
+                // We can skip validating the descriptor set if "nothing" has changed since the last validation.
+                // Same set, no image layout changes, and same "pipeline state" (binding_req_map). If there are
+                // any dynamic descriptors, always revalidate rather than caching the values. We currently only
+                // apply this optimization if IsManyDescriptors is true, to avoid the overhead of copying the
+                // binding_req_map which could potentially be expensive.
+                bool descriptor_set_changed =
+                    !reduced_map.IsManyDescriptors() ||
+                    // Revalidate each time if the set has dynamic offsets
+                    state.per_set[setIndex].dynamicOffsets.size() > 0 ||
+                    // Revalidate if descriptor set (or contents) has changed
+                    state.per_set[setIndex].validated_set != descriptor_set ||
+                    state.per_set[setIndex].validated_set_change_count != descriptor_set->GetChangeCount() ||
+                    (!disabled.image_layout_validation &&
+                     state.per_set[setIndex].validated_set_image_layout_change_count != cb_node->image_layout_change_count);
+                bool need_validate = descriptor_set_changed ||
+                                     // Revalidate if previous bindingReqMap doesn't include new bindingReqMap
+                                     !std::includes(state.per_set[setIndex].validated_set_binding_req_map.begin(),
+                                                    state.per_set[setIndex].validated_set_binding_req_map.end(),
+                                                    binding_req_map.begin(), binding_req_map.end());
+
+                if (need_validate) {
+                    bool success;
+                    if (!descriptor_set_changed && reduced_map.IsManyDescriptors()) {
+                        // Only validate the bindings that haven't already been validated
+                        BindingReqMap delta_reqs;
+                        std::set_difference(binding_req_map.begin(), binding_req_map.end(),
+                                            state.per_set[setIndex].validated_set_binding_req_map.begin(),
+                                            state.per_set[setIndex].validated_set_binding_req_map.end(),
+                                            std::inserter(delta_reqs, delta_reqs.begin()));
+                        success = ValidateDrawState(descriptor_set, delta_reqs, state.per_set[setIndex].dynamicOffsets, cb_node,
+                                                    function, &err_str);
+                    } else {
+                        success = ValidateDrawState(descriptor_set, binding_req_map, state.per_set[setIndex].dynamicOffsets,
+                                                    cb_node, function, &err_str);
+                    }
+                    if (!success) {
+                        auto set = descriptor_set->GetSet();
+                        result |=
+                            log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT,
+                                    HandleToUint64(set), kVUID_Core_DrawState_DescriptorSetNotUpdated,
+                                    "%s bound as set #%u encountered the following validation error at %s time: %s",
+                                    report_data->FormatHandle(set).c_str(), setIndex, function, err_str.c_str());
+                    }
+                }
+            }
+        }
+    }
+
+    // Check general pipeline state that needs to be validated at drawtime
+    if (VK_PIPELINE_BIND_POINT_GRAPHICS == bind_point)
+        result |= ValidatePipelineDrawtimeState(state, cb_node, cmd_type, pPipe, function);
+
+    return result;
+}
+
+bool CoreChecks::ValidatePipelineLocked(std::vector<std::shared_ptr<PIPELINE_STATE>> const &pPipelines, int pipelineIndex) const {
+    bool skip = false;
+
+    const PIPELINE_STATE *pPipeline = pPipelines[pipelineIndex].get();
+
+    // If create derivative bit is set, check that we've specified a base
+    // pipeline correctly, and that the base pipeline was created to allow
+    // derivatives.
+    if (pPipeline->graphicsPipelineCI.flags & VK_PIPELINE_CREATE_DERIVATIVE_BIT) {
+        const PIPELINE_STATE *pBasePipeline = nullptr;
+        if (!((pPipeline->graphicsPipelineCI.basePipelineHandle != VK_NULL_HANDLE) ^
+              (pPipeline->graphicsPipelineCI.basePipelineIndex != -1))) {
+            // This check is a superset of "VUID-VkGraphicsPipelineCreateInfo-flags-00724" and
+            // "VUID-VkGraphicsPipelineCreateInfo-flags-00725"
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+                            HandleToUint64(device), kVUID_Core_DrawState_InvalidPipelineCreateState,
+                            "Invalid Pipeline CreateInfo: exactly one of base pipeline index and handle must be specified");
+        } else if (pPipeline->graphicsPipelineCI.basePipelineIndex != -1) {
+            if (pPipeline->graphicsPipelineCI.basePipelineIndex >= pipelineIndex) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+                                HandleToUint64(device), "VUID-vkCreateGraphicsPipelines-flags-00720",
+                                "Invalid Pipeline CreateInfo: base pipeline must occur earlier in array than derivative pipeline.");
+            } else {
+                pBasePipeline = pPipelines[pPipeline->graphicsPipelineCI.basePipelineIndex].get();
+            }
+        } else if (pPipeline->graphicsPipelineCI.basePipelineHandle != VK_NULL_HANDLE) {
+            pBasePipeline = GetPipelineState(pPipeline->graphicsPipelineCI.basePipelineHandle);
+        }
+
+        if (pBasePipeline && !(pBasePipeline->graphicsPipelineCI.flags & VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT)) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+                            HandleToUint64(device), kVUID_Core_DrawState_InvalidPipelineCreateState,
+                            "Invalid Pipeline CreateInfo: base pipeline does not allow derivatives.");
+        }
+    }
+
+    return skip;
+}
+
+// UNLOCKED pipeline validation. DO NOT lookup objects in the CoreChecks->* maps in this function.
+bool CoreChecks::ValidatePipelineUnlocked(const PIPELINE_STATE *pPipeline, uint32_t pipelineIndex) const {
+    bool skip = false;
+
+    // Ensure the subpass index is valid. If not, then ValidateGraphicsPipelineShaderState
+    // produces nonsense errors that confuse users. Other layers should already
+    // emit errors for renderpass being invalid.
+    auto subpass_desc = &pPipeline->rp_state->createInfo.pSubpasses[pPipeline->graphicsPipelineCI.subpass];
+    if (pPipeline->graphicsPipelineCI.subpass >= pPipeline->rp_state->createInfo.subpassCount) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
+                        "VUID-VkGraphicsPipelineCreateInfo-subpass-00759",
+                        "Invalid Pipeline CreateInfo State: Subpass index %u is out of range for this renderpass (0..%u).",
+                        pPipeline->graphicsPipelineCI.subpass, pPipeline->rp_state->createInfo.subpassCount - 1);
+        subpass_desc = nullptr;
+    }
+
+    if (pPipeline->graphicsPipelineCI.pColorBlendState != NULL) {
+        const safe_VkPipelineColorBlendStateCreateInfo *color_blend_state = pPipeline->graphicsPipelineCI.pColorBlendState;
+        if (subpass_desc && color_blend_state->attachmentCount != subpass_desc->colorAttachmentCount) {
+            skip |=
+                log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
+                        "VUID-VkGraphicsPipelineCreateInfo-attachmentCount-00746",
+                        "vkCreateGraphicsPipelines(): %s subpass %u has colorAttachmentCount of %u which doesn't "
+                        "match the pColorBlendState->attachmentCount of %u.",
+                        report_data->FormatHandle(pPipeline->rp_state->renderPass).c_str(), pPipeline->graphicsPipelineCI.subpass,
+                        subpass_desc->colorAttachmentCount, color_blend_state->attachmentCount);
+        }
+        if (!enabled_features.core.independentBlend) {
+            if (pPipeline->attachments.size() > 1) {
+                const VkPipelineColorBlendAttachmentState *const pAttachments = &pPipeline->attachments[0];
+                for (size_t i = 1; i < pPipeline->attachments.size(); i++) {
+                    // Quoting the spec: "If [the independent blend] feature is not enabled, the VkPipelineColorBlendAttachmentState
+                    // settings for all color attachments must be identical." VkPipelineColorBlendAttachmentState contains
+                    // only attachment state, so memcmp is best suited for the comparison
+                    if (memcmp(static_cast<const void *>(pAttachments), static_cast<const void *>(&pAttachments[i]),
+                               sizeof(pAttachments[0]))) {
+                        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+                                        HandleToUint64(device), "VUID-VkPipelineColorBlendStateCreateInfo-pAttachments-00605",
+                                        "Invalid Pipeline CreateInfo: If independent blend feature not enabled, all elements of "
+                                        "pAttachments must be identical.");
+                        break;
+                    }
+                }
+            }
+        }
+        if (!enabled_features.core.logicOp && (pPipeline->graphicsPipelineCI.pColorBlendState->logicOpEnable != VK_FALSE)) {
+            skip |=
+                log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
+                        "VUID-VkPipelineColorBlendStateCreateInfo-logicOpEnable-00606",
+                        "Invalid Pipeline CreateInfo: If logic operations feature not enabled, logicOpEnable must be VK_FALSE.");
+        }
+        for (size_t i = 0; i < pPipeline->attachments.size(); i++) {
+            if ((pPipeline->attachments[i].srcColorBlendFactor == VK_BLEND_FACTOR_SRC1_COLOR) ||
+                (pPipeline->attachments[i].srcColorBlendFactor == VK_BLEND_FACTOR_ONE_MINUS_SRC1_COLOR) ||
+                (pPipeline->attachments[i].srcColorBlendFactor == VK_BLEND_FACTOR_SRC1_ALPHA) ||
+                (pPipeline->attachments[i].srcColorBlendFactor == VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA)) {
+                if (!enabled_features.core.dualSrcBlend) {
+                    skip |=
+                        log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+                                HandleToUint64(device), "VUID-VkPipelineColorBlendAttachmentState-srcColorBlendFactor-00608",
+                                "vkCreateGraphicsPipelines(): pPipelines[%d].pColorBlendState.pAttachments[" PRINTF_SIZE_T_SPECIFIER
+                                "].srcColorBlendFactor uses a dual-source blend factor (%d), but this device feature is not "
+                                "enabled.",
+                                pipelineIndex, i, pPipeline->attachments[i].srcColorBlendFactor);
+                }
+            }
+            if ((pPipeline->attachments[i].dstColorBlendFactor == VK_BLEND_FACTOR_SRC1_COLOR) ||
+                (pPipeline->attachments[i].dstColorBlendFactor == VK_BLEND_FACTOR_ONE_MINUS_SRC1_COLOR) ||
+                (pPipeline->attachments[i].dstColorBlendFactor == VK_BLEND_FACTOR_SRC1_ALPHA) ||
+                (pPipeline->attachments[i].dstColorBlendFactor == VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA)) {
+                if (!enabled_features.core.dualSrcBlend) {
+                    skip |=
+                        log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+                                HandleToUint64(device), "VUID-VkPipelineColorBlendAttachmentState-dstColorBlendFactor-00609",
+                                "vkCreateGraphicsPipelines(): pPipelines[%d].pColorBlendState.pAttachments[" PRINTF_SIZE_T_SPECIFIER
+                                "].dstColorBlendFactor uses a dual-source blend factor (%d), but this device feature is not "
+                                "enabled.",
+                                pipelineIndex, i, pPipeline->attachments[i].dstColorBlendFactor);
+                }
+            }
+            if ((pPipeline->attachments[i].srcAlphaBlendFactor == VK_BLEND_FACTOR_SRC1_COLOR) ||
+                (pPipeline->attachments[i].srcAlphaBlendFactor == VK_BLEND_FACTOR_ONE_MINUS_SRC1_COLOR) ||
+                (pPipeline->attachments[i].srcAlphaBlendFactor == VK_BLEND_FACTOR_SRC1_ALPHA) ||
+                (pPipeline->attachments[i].srcAlphaBlendFactor == VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA)) {
+                if (!enabled_features.core.dualSrcBlend) {
+                    skip |=
+                        log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+                                HandleToUint64(device), "VUID-VkPipelineColorBlendAttachmentState-srcAlphaBlendFactor-00610",
+                                "vkCreateGraphicsPipelines(): pPipelines[%d].pColorBlendState.pAttachments[" PRINTF_SIZE_T_SPECIFIER
+                                "].srcAlphaBlendFactor uses a dual-source blend factor (%d), but this device feature is not "
+                                "enabled.",
+                                pipelineIndex, i, pPipeline->attachments[i].srcAlphaBlendFactor);
+                }
+            }
+            if ((pPipeline->attachments[i].dstAlphaBlendFactor == VK_BLEND_FACTOR_SRC1_COLOR) ||
+                (pPipeline->attachments[i].dstAlphaBlendFactor == VK_BLEND_FACTOR_ONE_MINUS_SRC1_COLOR) ||
+                (pPipeline->attachments[i].dstAlphaBlendFactor == VK_BLEND_FACTOR_SRC1_ALPHA) ||
+                (pPipeline->attachments[i].dstAlphaBlendFactor == VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA)) {
+                if (!enabled_features.core.dualSrcBlend) {
+                    skip |=
+                        log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+                                HandleToUint64(device), "VUID-VkPipelineColorBlendAttachmentState-dstAlphaBlendFactor-00611",
+                                "vkCreateGraphicsPipelines(): pPipelines[%d].pColorBlendState.pAttachments[" PRINTF_SIZE_T_SPECIFIER
+                                "].dstAlphaBlendFactor uses a dual-source blend factor (%d), but this device feature is not "
+                                "enabled.",
+                                pipelineIndex, i, pPipeline->attachments[i].dstAlphaBlendFactor);
+                }
+            }
+        }
+    }
+
+    if (ValidateGraphicsPipelineShaderState(pPipeline)) {
+        skip = true;
+    }
+    // Each shader's stage must be unique
+    if (pPipeline->duplicate_shaders) {
+        for (uint32_t stage = VK_SHADER_STAGE_VERTEX_BIT; stage & VK_SHADER_STAGE_ALL_GRAPHICS; stage <<= 1) {
+            if (pPipeline->duplicate_shaders & stage) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+                                HandleToUint64(device), kVUID_Core_DrawState_InvalidPipelineCreateState,
+                                "Invalid Pipeline CreateInfo State: Multiple shaders provided for stage %s",
+                                string_VkShaderStageFlagBits(VkShaderStageFlagBits(stage)));
+            }
+        }
+    }
+    if (device_extensions.vk_nv_mesh_shader) {
+        // VS or mesh is required
+        if (!(pPipeline->active_shaders & (VK_SHADER_STAGE_VERTEX_BIT | VK_SHADER_STAGE_MESH_BIT_NV))) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+                            HandleToUint64(device), "VUID-VkGraphicsPipelineCreateInfo-stage-02096",
+                            "Invalid Pipeline CreateInfo State: Vertex Shader or Mesh Shader required.");
+        }
+        // Can't mix mesh and VTG
+        if ((pPipeline->active_shaders & (VK_SHADER_STAGE_MESH_BIT_NV | VK_SHADER_STAGE_TASK_BIT_NV)) &&
+            (pPipeline->active_shaders &
+             (VK_SHADER_STAGE_VERTEX_BIT | VK_SHADER_STAGE_GEOMETRY_BIT | VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT |
+              VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT))) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+                            HandleToUint64(device), "VUID-VkGraphicsPipelineCreateInfo-pStages-02095",
+                            "Invalid Pipeline CreateInfo State: Geometric shader stages must either be all mesh (mesh | task) "
+                            "or all VTG (vertex, tess control, tess eval, geom).");
+        }
+    } else {
+        // VS is required
+        if (!(pPipeline->active_shaders & VK_SHADER_STAGE_VERTEX_BIT)) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+                            HandleToUint64(device), "VUID-VkGraphicsPipelineCreateInfo-stage-00727",
+                            "Invalid Pipeline CreateInfo State: Vertex Shader required.");
+        }
+    }
+
+    if (!enabled_features.mesh_shader.meshShader && (pPipeline->active_shaders & VK_SHADER_STAGE_MESH_BIT_NV)) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
+                        "VUID-VkPipelineShaderStageCreateInfo-stage-02091",
+                        "Invalid Pipeline CreateInfo State: Mesh Shader not supported.");
+    }
+
+    if (!enabled_features.mesh_shader.taskShader && (pPipeline->active_shaders & VK_SHADER_STAGE_TASK_BIT_NV)) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
+                        "VUID-VkPipelineShaderStageCreateInfo-stage-02092",
+                        "Invalid Pipeline CreateInfo State: Task Shader not supported.");
+    }
+
+    // Either both or neither TC/TE shaders should be defined
+    bool has_control = (pPipeline->active_shaders & VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT) != 0;
+    bool has_eval = (pPipeline->active_shaders & VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT) != 0;
+    if (has_control && !has_eval) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
+                        "VUID-VkGraphicsPipelineCreateInfo-pStages-00729",
+                        "Invalid Pipeline CreateInfo State: TE and TC shaders must be included or excluded as a pair.");
+    }
+    if (!has_control && has_eval) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
+                        "VUID-VkGraphicsPipelineCreateInfo-pStages-00730",
+                        "Invalid Pipeline CreateInfo State: TE and TC shaders must be included or excluded as a pair.");
+    }
+    // Compute shaders should be specified independent of Gfx shaders
+    if (pPipeline->active_shaders & VK_SHADER_STAGE_COMPUTE_BIT) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
+                        "VUID-VkGraphicsPipelineCreateInfo-stage-00728",
+                        "Invalid Pipeline CreateInfo State: Do not specify Compute Shader for Gfx Pipeline.");
+    }
+
+    if ((pPipeline->active_shaders & VK_SHADER_STAGE_VERTEX_BIT) && !pPipeline->graphicsPipelineCI.pInputAssemblyState) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
+                        "VUID-VkGraphicsPipelineCreateInfo-pStages-02098",
+                        "Invalid Pipeline CreateInfo State: Missing pInputAssemblyState.");
+    }
+
+    // VK_PRIMITIVE_TOPOLOGY_PATCH_LIST primitive topology is only valid for tessellation pipelines.
+    // Mismatching primitive topology and tessellation fails graphics pipeline creation.
+    if (has_control && has_eval &&
+        (!pPipeline->graphicsPipelineCI.pInputAssemblyState ||
+         pPipeline->graphicsPipelineCI.pInputAssemblyState->topology != VK_PRIMITIVE_TOPOLOGY_PATCH_LIST)) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
+                        "VUID-VkGraphicsPipelineCreateInfo-pStages-00736",
+                        "Invalid Pipeline CreateInfo State: VK_PRIMITIVE_TOPOLOGY_PATCH_LIST must be set as IA topology for "
+                        "tessellation pipelines.");
+    }
+    if (pPipeline->graphicsPipelineCI.pInputAssemblyState) {
+        if (pPipeline->graphicsPipelineCI.pInputAssemblyState->topology == VK_PRIMITIVE_TOPOLOGY_PATCH_LIST) {
+            if (!has_control || !has_eval) {
+                skip |=
+                    log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+                            HandleToUint64(device), "VUID-VkGraphicsPipelineCreateInfo-topology-00737",
+                            "Invalid Pipeline CreateInfo State: VK_PRIMITIVE_TOPOLOGY_PATCH_LIST primitive topology is only valid "
+                            "for tessellation pipelines.");
+            }
+        }
+
+        if ((pPipeline->graphicsPipelineCI.pInputAssemblyState->primitiveRestartEnable == VK_TRUE) &&
+            (pPipeline->graphicsPipelineCI.pInputAssemblyState->topology == VK_PRIMITIVE_TOPOLOGY_POINT_LIST ||
+             pPipeline->graphicsPipelineCI.pInputAssemblyState->topology == VK_PRIMITIVE_TOPOLOGY_LINE_LIST ||
+             pPipeline->graphicsPipelineCI.pInputAssemblyState->topology == VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST ||
+             pPipeline->graphicsPipelineCI.pInputAssemblyState->topology == VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY ||
+             pPipeline->graphicsPipelineCI.pInputAssemblyState->topology == VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY ||
+             pPipeline->graphicsPipelineCI.pInputAssemblyState->topology == VK_PRIMITIVE_TOPOLOGY_PATCH_LIST)) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+                            HandleToUint64(device), "VUID-VkPipelineInputAssemblyStateCreateInfo-topology-00428",
+                            "topology is %s and primitiveRestartEnable is VK_TRUE. It is invalid.",
+                            string_VkPrimitiveTopology(pPipeline->graphicsPipelineCI.pInputAssemblyState->topology));
+        }
+        if ((enabled_features.core.geometryShader == VK_FALSE) &&
+            (pPipeline->graphicsPipelineCI.pInputAssemblyState->topology == VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY ||
+             pPipeline->graphicsPipelineCI.pInputAssemblyState->topology == VK_PRIMITIVE_TOPOLOGY_LINE_STRIP_WITH_ADJACENCY ||
+             pPipeline->graphicsPipelineCI.pInputAssemblyState->topology == VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY ||
+             pPipeline->graphicsPipelineCI.pInputAssemblyState->topology == VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY)) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+                            HandleToUint64(device), "VUID-VkPipelineInputAssemblyStateCreateInfo-topology-00429",
+                            "topology is %s and geometry shaders feature is not enabled. It is invalid.",
+                            string_VkPrimitiveTopology(pPipeline->graphicsPipelineCI.pInputAssemblyState->topology));
+        }
+        if ((enabled_features.core.tessellationShader == VK_FALSE) &&
+            (pPipeline->graphicsPipelineCI.pInputAssemblyState->topology == VK_PRIMITIVE_TOPOLOGY_PATCH_LIST)) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+                            HandleToUint64(device), "VUID-VkPipelineInputAssemblyStateCreateInfo-topology-00430",
+                            "topology is %s and tessellation shaders feature is not enabled. It is invalid.",
+                            string_VkPrimitiveTopology(pPipeline->graphicsPipelineCI.pInputAssemblyState->topology));
+        }
+    }
+
+    // If a rasterization state is provided...
+    if (pPipeline->graphicsPipelineCI.pRasterizationState) {
+        if ((pPipeline->graphicsPipelineCI.pRasterizationState->depthClampEnable == VK_TRUE) &&
+            (!enabled_features.core.depthClamp)) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+                            HandleToUint64(device), "VUID-VkPipelineRasterizationStateCreateInfo-depthClampEnable-00782",
+                            "vkCreateGraphicsPipelines(): the depthClamp device feature is disabled: the depthClampEnable member "
+                            "of the VkPipelineRasterizationStateCreateInfo structure must be set to VK_FALSE.");
+        }
+
+        if (!IsDynamic(pPipeline, VK_DYNAMIC_STATE_DEPTH_BIAS) &&
+            (pPipeline->graphicsPipelineCI.pRasterizationState->depthBiasClamp != 0.0) && (!enabled_features.core.depthBiasClamp)) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+                            HandleToUint64(device), kVUID_Core_DrawState_InvalidFeature,
+                            "vkCreateGraphicsPipelines(): the depthBiasClamp device feature is disabled: the depthBiasClamp member "
+                            "of the VkPipelineRasterizationStateCreateInfo structure must be set to 0.0 unless the "
+                            "VK_DYNAMIC_STATE_DEPTH_BIAS dynamic state is enabled");
+        }
+
+        // If rasterization is enabled...
+        if (pPipeline->graphicsPipelineCI.pRasterizationState->rasterizerDiscardEnable == VK_FALSE) {
+            if ((pPipeline->graphicsPipelineCI.pMultisampleState->alphaToOneEnable == VK_TRUE) &&
+                (!enabled_features.core.alphaToOne)) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+                                HandleToUint64(device), "VUID-VkPipelineMultisampleStateCreateInfo-alphaToOneEnable-00785",
+                                "vkCreateGraphicsPipelines(): the alphaToOne device feature is disabled: the alphaToOneEnable "
+                                "member of the VkPipelineMultisampleStateCreateInfo structure must be set to VK_FALSE.");
+            }
+
+            // If subpass uses a depth/stencil attachment, pDepthStencilState must be a pointer to a valid structure
+            if (subpass_desc && subpass_desc->pDepthStencilAttachment &&
+                subpass_desc->pDepthStencilAttachment->attachment != VK_ATTACHMENT_UNUSED) {
+                if (!pPipeline->graphicsPipelineCI.pDepthStencilState) {
+                    skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+                                    HandleToUint64(device), "VUID-VkGraphicsPipelineCreateInfo-rasterizerDiscardEnable-00752",
+                                    "Invalid Pipeline CreateInfo State: pDepthStencilState is NULL when rasterization is enabled "
+                                    "and subpass uses a depth/stencil attachment.");
+
+                } else if ((pPipeline->graphicsPipelineCI.pDepthStencilState->depthBoundsTestEnable == VK_TRUE) &&
+                           (!enabled_features.core.depthBounds)) {
+                    skip |=
+                        log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+                                HandleToUint64(device), "VUID-VkPipelineDepthStencilStateCreateInfo-depthBoundsTestEnable-00598",
+                                "vkCreateGraphicsPipelines(): the depthBounds device feature is disabled: the "
+                                "depthBoundsTestEnable member of the VkPipelineDepthStencilStateCreateInfo structure must be "
+                                "set to VK_FALSE.");
+                }
+            }
+
+            // If subpass uses color attachments, pColorBlendState must be valid pointer
+            if (subpass_desc) {
+                uint32_t color_attachment_count = 0;
+                for (uint32_t i = 0; i < subpass_desc->colorAttachmentCount; ++i) {
+                    if (subpass_desc->pColorAttachments[i].attachment != VK_ATTACHMENT_UNUSED) {
+                        ++color_attachment_count;
+                    }
+                }
+                if (color_attachment_count > 0 && pPipeline->graphicsPipelineCI.pColorBlendState == nullptr) {
+                    skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+                                    HandleToUint64(device), "VUID-VkGraphicsPipelineCreateInfo-rasterizerDiscardEnable-00753",
+                                    "Invalid Pipeline CreateInfo State: pColorBlendState is NULL when rasterization is enabled and "
+                                    "subpass uses color attachments.");
+                }
+            }
+        }
+    }
+
+    if ((pPipeline->active_shaders & VK_SHADER_STAGE_VERTEX_BIT) && !pPipeline->graphicsPipelineCI.pVertexInputState) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
+                        "VUID-VkGraphicsPipelineCreateInfo-pStages-02097",
+                        "Invalid Pipeline CreateInfo State: Missing pVertexInputState.");
+    }
+
+    auto vi = pPipeline->graphicsPipelineCI.pVertexInputState;
+    if (vi != NULL) {
+        for (uint32_t j = 0; j < vi->vertexAttributeDescriptionCount; j++) {
+            VkFormat format = vi->pVertexAttributeDescriptions[j].format;
+            // Internal call to get format info.  Still goes through layers, could potentially go directly to ICD.
+            VkFormatProperties properties;
+            DispatchGetPhysicalDeviceFormatProperties(physical_device, format, &properties);
+            if ((properties.bufferFeatures & VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT) == 0) {
+                skip |=
+                    log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+                            HandleToUint64(device), "VUID-VkVertexInputAttributeDescription-format-00623",
+                            "vkCreateGraphicsPipelines: pCreateInfo[%d].pVertexInputState->vertexAttributeDescriptions[%d].format "
+                            "(%s) is not a supported vertex buffer format.",
+                            pipelineIndex, j, string_VkFormat(format));
+            }
+        }
+    }
+
+    if (subpass_desc && pPipeline->graphicsPipelineCI.pMultisampleState) {
+        auto accumColorSamples = [subpass_desc, pPipeline](uint32_t &samples) {
+            for (uint32_t i = 0; i < subpass_desc->colorAttachmentCount; i++) {
+                const auto attachment = subpass_desc->pColorAttachments[i].attachment;
+                if (attachment != VK_ATTACHMENT_UNUSED) {
+                    samples |= static_cast<uint32_t>(pPipeline->rp_state->createInfo.pAttachments[attachment].samples);
+                }
+            }
+        };
+
+        if (!(device_extensions.vk_amd_mixed_attachment_samples || device_extensions.vk_nv_framebuffer_mixed_samples)) {
+            uint32_t raster_samples = static_cast<uint32_t>(GetNumSamples(pPipeline));
+            uint32_t subpass_num_samples = 0;
+
+            accumColorSamples(subpass_num_samples);
+
+            if (subpass_desc->pDepthStencilAttachment &&
+                subpass_desc->pDepthStencilAttachment->attachment != VK_ATTACHMENT_UNUSED) {
+                const auto attachment = subpass_desc->pDepthStencilAttachment->attachment;
+                subpass_num_samples |= static_cast<uint32_t>(pPipeline->rp_state->createInfo.pAttachments[attachment].samples);
+            }
+
+            // subpass_num_samples is 0 when the subpass has no attachments or if all attachments are VK_ATTACHMENT_UNUSED.
+            // Only validate the value of subpass_num_samples if the subpass has attachments that are not VK_ATTACHMENT_UNUSED.
+            if (subpass_num_samples && (!IsPowerOfTwo(subpass_num_samples) || (subpass_num_samples != raster_samples))) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+                                HandleToUint64(device), "VUID-VkGraphicsPipelineCreateInfo-subpass-00757",
+                                "vkCreateGraphicsPipelines: pCreateInfo[%d].pMultisampleState->rasterizationSamples (%u) "
+                                "does not match the number of samples of the RenderPass color and/or depth attachment.",
+                                pipelineIndex, raster_samples);
+            }
+        }
+
+        if (device_extensions.vk_amd_mixed_attachment_samples) {
+            VkSampleCountFlagBits max_sample_count = static_cast<VkSampleCountFlagBits>(0);
+            for (uint32_t i = 0; i < subpass_desc->colorAttachmentCount; ++i) {
+                if (subpass_desc->pColorAttachments[i].attachment != VK_ATTACHMENT_UNUSED) {
+                    max_sample_count = std::max(
+                        max_sample_count,
+                        pPipeline->rp_state->createInfo.pAttachments[subpass_desc->pColorAttachments[i].attachment].samples);
+                }
+            }
+            if (subpass_desc->pDepthStencilAttachment &&
+                subpass_desc->pDepthStencilAttachment->attachment != VK_ATTACHMENT_UNUSED) {
+                max_sample_count = std::max(
+                    max_sample_count,
+                    pPipeline->rp_state->createInfo.pAttachments[subpass_desc->pDepthStencilAttachment->attachment].samples);
+            }
+            if ((pPipeline->graphicsPipelineCI.pRasterizationState->rasterizerDiscardEnable == VK_FALSE) &&
+                (pPipeline->graphicsPipelineCI.pMultisampleState->rasterizationSamples != max_sample_count)) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+                                HandleToUint64(device), "VUID-VkGraphicsPipelineCreateInfo-subpass-01505",
+                                "vkCreateGraphicsPipelines: pCreateInfo[%d].pMultisampleState->rasterizationSamples (%s) != max "
+                                "attachment samples (%s) used in subpass %u.",
+                                pipelineIndex,
+                                string_VkSampleCountFlagBits(pPipeline->graphicsPipelineCI.pMultisampleState->rasterizationSamples),
+                                string_VkSampleCountFlagBits(max_sample_count), pPipeline->graphicsPipelineCI.subpass);
+            }
+        }
+
+        if (device_extensions.vk_nv_framebuffer_mixed_samples) {
+            uint32_t raster_samples = static_cast<uint32_t>(GetNumSamples(pPipeline));
+            uint32_t subpass_color_samples = 0;
+
+            accumColorSamples(subpass_color_samples);
+
+            if (subpass_desc->pDepthStencilAttachment &&
+                subpass_desc->pDepthStencilAttachment->attachment != VK_ATTACHMENT_UNUSED) {
+                const auto attachment = subpass_desc->pDepthStencilAttachment->attachment;
+                const uint32_t subpass_depth_samples =
+                    static_cast<uint32_t>(pPipeline->rp_state->createInfo.pAttachments[attachment].samples);
+
+                if (pPipeline->graphicsPipelineCI.pDepthStencilState) {
+                    const bool ds_test_enabled =
+                        (pPipeline->graphicsPipelineCI.pDepthStencilState->depthTestEnable == VK_TRUE) ||
+                        (pPipeline->graphicsPipelineCI.pDepthStencilState->depthBoundsTestEnable == VK_TRUE) ||
+                        (pPipeline->graphicsPipelineCI.pDepthStencilState->stencilTestEnable == VK_TRUE);
+
+                    if (ds_test_enabled && (!IsPowerOfTwo(subpass_depth_samples) || (raster_samples != subpass_depth_samples))) {
+                        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+                                        HandleToUint64(device), "VUID-VkGraphicsPipelineCreateInfo-subpass-01411",
+                                        "vkCreateGraphicsPipelines: pCreateInfo[%d].pMultisampleState->rasterizationSamples (%u) "
+                                        "does not match the number of samples of the RenderPass depth attachment (%u).",
+                                        pipelineIndex, raster_samples, subpass_depth_samples);
+                    }
+                }
+            }
+
+            if (IsPowerOfTwo(subpass_color_samples)) {
+                if (raster_samples < subpass_color_samples) {
+                    skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+                                    HandleToUint64(device), "VUID-VkGraphicsPipelineCreateInfo-subpass-01412",
+                                    "vkCreateGraphicsPipelines: pCreateInfo[%d].pMultisampleState->rasterizationSamples (%u) "
+                                    "is not greater or equal to the number of samples of the RenderPass color attachment (%u).",
+                                    pipelineIndex, raster_samples, subpass_color_samples);
+                }
+
+                if (pPipeline->graphicsPipelineCI.pMultisampleState) {
+                    if ((raster_samples > subpass_color_samples) &&
+                        (pPipeline->graphicsPipelineCI.pMultisampleState->sampleShadingEnable == VK_TRUE)) {
+                        skip |=
+                            log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+                                    HandleToUint64(device), "VUID-VkPipelineMultisampleStateCreateInfo-rasterizationSamples-01415",
+                                    "vkCreateGraphicsPipelines: pCreateInfo[%d].pMultisampleState->sampleShadingEnable must be "
+                                    "VK_FALSE when "
+                                    "pCreateInfo[%d].pMultisampleState->rasterizationSamples (%u) is greater than the number of "
+                                    "samples of the "
+                                    "subpass color attachment (%u).",
+                                    pipelineIndex, pipelineIndex, raster_samples, subpass_color_samples);
+                    }
+
+                    const auto *coverage_modulation_state = lvl_find_in_chain<VkPipelineCoverageModulationStateCreateInfoNV>(
+                        pPipeline->graphicsPipelineCI.pMultisampleState->pNext);
+
+                    if (coverage_modulation_state && (coverage_modulation_state->coverageModulationTableEnable == VK_TRUE)) {
+                        if (coverage_modulation_state->coverageModulationTableCount != (raster_samples / subpass_color_samples)) {
+                            skip |=
+                                log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+                                        HandleToUint64(device),
+                                        "VUID-VkPipelineCoverageModulationStateCreateInfoNV-coverageModulationTableEnable-01405",
+                                        "vkCreateGraphicsPipelines: pCreateInfos[%d] VkPipelineCoverageModulationStateCreateInfoNV "
+                                        "coverageModulationTableCount of %u is invalid.",
+                                        pipelineIndex, coverage_modulation_state->coverageModulationTableCount);
+                        }
+                    }
+                }
+            }
+        }
+
+        if (device_extensions.vk_nv_fragment_coverage_to_color) {
+            const auto coverage_to_color_state =
+                lvl_find_in_chain<VkPipelineCoverageToColorStateCreateInfoNV>(pPipeline->graphicsPipelineCI.pMultisampleState);
+
+            if (coverage_to_color_state && coverage_to_color_state->coverageToColorEnable == VK_TRUE) {
+                bool attachment_is_valid = false;
+                std::string error_detail;
+
+                if (coverage_to_color_state->coverageToColorLocation < subpass_desc->colorAttachmentCount) {
+                    const auto color_attachment_ref =
+                        subpass_desc->pColorAttachments[coverage_to_color_state->coverageToColorLocation];
+                    if (color_attachment_ref.attachment != VK_ATTACHMENT_UNUSED) {
+                        const auto color_attachment = pPipeline->rp_state->createInfo.pAttachments[color_attachment_ref.attachment];
+
+                        switch (color_attachment.format) {
+                            case VK_FORMAT_R8_UINT:
+                            case VK_FORMAT_R8_SINT:
+                            case VK_FORMAT_R16_UINT:
+                            case VK_FORMAT_R16_SINT:
+                            case VK_FORMAT_R32_UINT:
+                            case VK_FORMAT_R32_SINT:
+                                attachment_is_valid = true;
+                                break;
+                            default:
+                                string_sprintf(&error_detail, "references an attachment with an invalid format (%s).",
+                                               string_VkFormat(color_attachment.format));
+                                break;
+                        }
+                    } else {
+                        string_sprintf(&error_detail,
+                                       "references an invalid attachment. The subpass pColorAttachments[%" PRIu32
+                                       "].attachment has the value "
+                                       "VK_ATTACHMENT_UNUSED.",
+                                       coverage_to_color_state->coverageToColorLocation);
+                    }
+                } else {
+                    string_sprintf(&error_detail,
+                                   "references an non-existing attachment since the subpass colorAttachmentCount is %" PRIu32 ".",
+                                   subpass_desc->colorAttachmentCount);
+                }
+
+                if (!attachment_is_valid) {
+                    skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+                                    HandleToUint64(device),
+                                    "VUID-VkPipelineCoverageToColorStateCreateInfoNV-coverageToColorEnable-01404",
+                                    "vkCreateGraphicsPipelines: pCreateInfos[%" PRId32
+                                    "].pMultisampleState VkPipelineCoverageToColorStateCreateInfoNV "
+                                    "coverageToColorLocation = %" PRIu32 " %s",
+                                    pipelineIndex, coverage_to_color_state->coverageToColorLocation, error_detail.c_str());
+                }
+            }
+        }
+    }
+
+    return skip;
+}
+
+// Block of code at start here specifically for managing/tracking DSs
+
+// Validate that given set is valid and that it's not being used by an in-flight CmdBuffer
+// func_str is the name of the calling function
+// Return false if no errors occur
+// Return true if validation error occurs and callback returns true (to skip upcoming API call down the chain)
+bool CoreChecks::ValidateIdleDescriptorSet(VkDescriptorSet set, const char *func_str) const {
+    if (disabled.idle_descriptor_set) return false;
+    bool skip = false;
+    auto set_node = setMap.find(set);
+    if (set_node == setMap.end()) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT,
+                        HandleToUint64(set), kVUID_Core_DrawState_DoubleDestroy,
+                        "Cannot call %s() on %s that has not been allocated.", func_str, report_data->FormatHandle(set).c_str());
+    } else {
+        // TODO : This covers various error cases so should pass error enum into this function and use passed in enum here
+        if (set_node->second->in_use.load()) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT,
+                            HandleToUint64(set), "VUID-vkFreeDescriptorSets-pDescriptorSets-00309",
+                            "Cannot call %s() on %s that is in use by a command buffer.", func_str,
+                            report_data->FormatHandle(set).c_str());
+        }
+    }
+    return skip;
+}
+
+// If a renderpass is active, verify that the given command type is appropriate for current subpass state
+bool CoreChecks::ValidateCmdSubpassState(const CMD_BUFFER_STATE *pCB, const CMD_TYPE cmd_type) const {
+    if (!pCB->activeRenderPass) return false;
+    bool skip = false;
+    if (pCB->activeSubpassContents == VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS &&
+        (cmd_type != CMD_EXECUTECOMMANDS && cmd_type != CMD_NEXTSUBPASS && cmd_type != CMD_ENDRENDERPASS &&
+         cmd_type != CMD_NEXTSUBPASS2KHR && cmd_type != CMD_ENDRENDERPASS2KHR)) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                        HandleToUint64(pCB->commandBuffer), kVUID_Core_DrawState_InvalidCommandBuffer,
+                        "Commands cannot be called in a subpass using secondary command buffers.");
+    } else if (pCB->activeSubpassContents == VK_SUBPASS_CONTENTS_INLINE && cmd_type == CMD_EXECUTECOMMANDS) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                        HandleToUint64(pCB->commandBuffer), kVUID_Core_DrawState_InvalidCommandBuffer,
+                        "vkCmdExecuteCommands() cannot be called in a subpass using inline commands.");
+    }
+    return skip;
+}
+
+bool CoreChecks::ValidateCmdQueueFlags(const CMD_BUFFER_STATE *cb_node, const char *caller_name, VkQueueFlags required_flags,
+                                       const char *error_code) const {
+    auto pool = cb_node->command_pool.get();
+    if (pool) {
+        VkQueueFlags queue_flags = GetPhysicalDeviceState()->queue_family_properties[pool->queueFamilyIndex].queueFlags;
+        if (!(required_flags & queue_flags)) {
+            string required_flags_string;
+            for (auto flag : {VK_QUEUE_TRANSFER_BIT, VK_QUEUE_GRAPHICS_BIT, VK_QUEUE_COMPUTE_BIT}) {
+                if (flag & required_flags) {
+                    if (required_flags_string.size()) {
+                        required_flags_string += " or ";
+                    }
+                    required_flags_string += string_VkQueueFlagBits(flag);
+                }
+            }
+            return log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                           HandleToUint64(cb_node->commandBuffer), error_code,
+                           "Cannot call %s on a command buffer allocated from a pool without %s capabilities..", caller_name,
+                           required_flags_string.c_str());
+        }
+    }
+    return false;
+}
+
+static char const *GetCauseStr(VulkanTypedHandle obj) {
+    if (obj.type == kVulkanObjectTypeDescriptorSet) return "destroyed or updated";
+    if (obj.type == kVulkanObjectTypeCommandBuffer) return "destroyed or rerecorded";
+    return "destroyed";
+}
+
+bool CoreChecks::ReportInvalidCommandBuffer(const CMD_BUFFER_STATE *cb_state, const char *call_source) const {
+    bool skip = false;
+    for (auto obj : cb_state->broken_bindings) {
+        const char *cause_str = GetCauseStr(obj);
+        string VUID;
+        string_sprintf(&VUID, "%s-%s", kVUID_Core_DrawState_InvalidCommandBuffer, object_string[obj.type]);
+        skip |=
+            log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                    HandleToUint64(cb_state->commandBuffer), VUID.c_str(),
+                    "You are adding %s to %s that is invalid because bound %s was %s.", call_source,
+                    report_data->FormatHandle(cb_state->commandBuffer).c_str(), report_data->FormatHandle(obj).c_str(), cause_str);
+    }
+    return skip;
+}
+
+// 'commandBuffer must be in the recording state' valid usage error code for each command
+// Autogenerated as part of the vk_validation_error_message.h codegen
+static const std::array<const char *, CMD_RANGE_SIZE> must_be_recording_list = {{VUID_MUST_BE_RECORDING_LIST}};
+
+// Validate the given command being added to the specified cmd buffer, flagging errors if CB is not in the recording state or if
+// there's an issue with the Cmd ordering
+bool CoreChecks::ValidateCmd(const CMD_BUFFER_STATE *cb_state, const CMD_TYPE cmd, const char *caller_name) const {
+    switch (cb_state->state) {
+        case CB_RECORDING:
+            return ValidateCmdSubpassState(cb_state, cmd);
+
+        case CB_INVALID_COMPLETE:
+        case CB_INVALID_INCOMPLETE:
+            return ReportInvalidCommandBuffer(cb_state, caller_name);
+
+        default:
+            assert(cmd != CMD_NONE);
+            const auto error = must_be_recording_list[cmd];
+            return log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                           HandleToUint64(cb_state->commandBuffer), error,
+                           "You must call vkBeginCommandBuffer() before this call to %s.", caller_name);
+    }
+}
+
+bool CoreChecks::ValidateDeviceMaskToPhysicalDeviceCount(uint32_t deviceMask, VkDebugReportObjectTypeEXT VUID_handle_type,
+                                                         uint64_t VUID_handle, const char *VUID) const {
+    bool skip = false;
+    uint32_t count = 1 << physical_device_count;
+    if (count <= deviceMask) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VUID_handle_type, VUID_handle, VUID,
+                        "deviceMask(0x%" PRIx32 ") is invaild. Physical device count is %" PRIu32 ".", deviceMask,
+                        physical_device_count);
+    }
+    return skip;
+}
+
+bool CoreChecks::ValidateDeviceMaskToZero(uint32_t deviceMask, VkDebugReportObjectTypeEXT VUID_handle_type, uint64_t VUID_handle,
+                                          const char *VUID) const {
+    bool skip = false;
+    if (deviceMask == 0) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VUID_handle_type, VUID_handle, VUID,
+                        "deviceMask(0x%" PRIx32 ") must be non-zero.", deviceMask);
+    }
+    return skip;
+}
+
+bool CoreChecks::ValidateDeviceMaskToCommandBuffer(const CMD_BUFFER_STATE *pCB, uint32_t deviceMask,
+                                                   VkDebugReportObjectTypeEXT VUID_handle_type, uint64_t VUID_handle,
+                                                   const char *VUID) const {
+    bool skip = false;
+    if ((deviceMask & pCB->initial_device_mask) != deviceMask) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VUID_handle_type, VUID_handle, VUID,
+                        "deviceMask(0x%" PRIx32 ") is not a subset of %s initial device mask(0x%" PRIx32 ").", deviceMask,
+                        report_data->FormatHandle(pCB->commandBuffer).c_str(), pCB->initial_device_mask);
+    }
+    return skip;
+}
+
+bool CoreChecks::ValidateDeviceMaskToRenderPass(const CMD_BUFFER_STATE *pCB, uint32_t deviceMask,
+                                                VkDebugReportObjectTypeEXT VUID_handle_type, uint64_t VUID_handle,
+                                                const char *VUID) const {
+    bool skip = false;
+    if ((deviceMask & pCB->active_render_pass_device_mask) != deviceMask) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VUID_handle_type, VUID_handle, VUID,
+                        "deviceMask(0x%" PRIx32 ") is not a subset of %s device mask(0x%" PRIx32 ").", deviceMask,
+                        report_data->FormatHandle(pCB->activeRenderPass->renderPass).c_str(), pCB->active_render_pass_device_mask);
+    }
+    return skip;
+}
+
+// Flags validation error if the associated call is made inside a render pass. The apiName routine should ONLY be called outside a
+// render pass.
+bool CoreChecks::InsideRenderPass(const CMD_BUFFER_STATE *pCB, const char *apiName, const char *msgCode) const {
+    bool inside = false;
+    if (pCB->activeRenderPass) {
+        inside = log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                         HandleToUint64(pCB->commandBuffer), msgCode, "%s: It is invalid to issue this call inside an active %s.",
+                         apiName, report_data->FormatHandle(pCB->activeRenderPass->renderPass).c_str());
+    }
+    return inside;
+}
+
+// Flags validation error if the associated call is made outside a render pass. The apiName
+// routine should ONLY be called inside a render pass.
+bool CoreChecks::OutsideRenderPass(const CMD_BUFFER_STATE *pCB, const char *apiName, const char *msgCode) const {
+    bool outside = false;
+    if (((pCB->createInfo.level == VK_COMMAND_BUFFER_LEVEL_PRIMARY) && (!pCB->activeRenderPass)) ||
+        ((pCB->createInfo.level == VK_COMMAND_BUFFER_LEVEL_SECONDARY) && (!pCB->activeRenderPass) &&
+         !(pCB->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT))) {
+        outside = log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                          HandleToUint64(pCB->commandBuffer), msgCode, "%s: This call must be issued inside an active render pass.",
+                          apiName);
+    }
+    return outside;
+}
+
+bool CoreChecks::ValidateQueueFamilyIndex(const PHYSICAL_DEVICE_STATE *pd_state, uint32_t requested_queue_family,
+                                          const char *err_code, const char *cmd_name, const char *queue_family_var_name) const {
+    bool skip = false;
+
+    if (requested_queue_family >= pd_state->queue_family_known_count) {
+        const char *conditional_ext_cmd =
+            instance_extensions.vk_khr_get_physical_device_properties_2 ? " or vkGetPhysicalDeviceQueueFamilyProperties2[KHR]" : "";
+
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT,
+                        HandleToUint64(pd_state->phys_device), err_code,
+                        "%s: %s (= %" PRIu32
+                        ") is not less than any previously obtained pQueueFamilyPropertyCount from "
+                        "vkGetPhysicalDeviceQueueFamilyProperties%s (i.e. is not less than %s).",
+                        cmd_name, queue_family_var_name, requested_queue_family, conditional_ext_cmd,
+                        std::to_string(pd_state->queue_family_known_count).c_str());
+    }
+    return skip;
+}
+
+// Verify VkDeviceQueueCreateInfos
+bool CoreChecks::ValidateDeviceQueueCreateInfos(const PHYSICAL_DEVICE_STATE *pd_state, uint32_t info_count,
+                                                const VkDeviceQueueCreateInfo *infos) const {
+    bool skip = false;
+
+    std::unordered_set<uint32_t> queue_family_set;
+
+    for (uint32_t i = 0; i < info_count; ++i) {
+        const auto requested_queue_family = infos[i].queueFamilyIndex;
+
+        std::string queue_family_var_name = "pCreateInfo->pQueueCreateInfos[" + std::to_string(i) + "].queueFamilyIndex";
+        skip |= ValidateQueueFamilyIndex(pd_state, requested_queue_family, "VUID-VkDeviceQueueCreateInfo-queueFamilyIndex-00381",
+                                         "vkCreateDevice", queue_family_var_name.c_str());
+
+        if (queue_family_set.insert(requested_queue_family).second == false) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+                            HandleToUint64(pd_state->phys_device), "VUID-VkDeviceCreateInfo-queueFamilyIndex-00372",
+                            "CreateDevice(): %s (=%" PRIu32 ") is not unique within pQueueCreateInfos.",
+                            queue_family_var_name.c_str(), requested_queue_family);
+        }
+
+        // Verify that requested queue count of queue family is known to be valid at this point in time
+        if (requested_queue_family < pd_state->queue_family_known_count) {
+            const auto requested_queue_count = infos[i].queueCount;
+            const bool queue_family_has_props = requested_queue_family < pd_state->queue_family_properties.size();
+            // spec guarantees at least one queue for each queue family
+            const uint32_t available_queue_count =
+                queue_family_has_props ? pd_state->queue_family_properties[requested_queue_family].queueCount : 1;
+            const char *conditional_ext_cmd = instance_extensions.vk_khr_get_physical_device_properties_2
+                                                  ? " or vkGetPhysicalDeviceQueueFamilyProperties2[KHR]"
+                                                  : "";
+
+            if (requested_queue_count > available_queue_count) {
+                const std::string count_note =
+                    queue_family_has_props
+                        ? "i.e. is not less than or equal to " +
+                              std::to_string(pd_state->queue_family_properties[requested_queue_family].queueCount)
+                        : "the pQueueFamilyProperties[" + std::to_string(requested_queue_family) + "] was never obtained";
+
+                skip |= log_msg(
+                    report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT,
+                    HandleToUint64(pd_state->phys_device), "VUID-VkDeviceQueueCreateInfo-queueCount-00382",
+                    "vkCreateDevice: pCreateInfo->pQueueCreateInfos[%" PRIu32 "].queueCount (=%" PRIu32
+                    ") is not less than or equal to available queue count for this pCreateInfo->pQueueCreateInfos[%" PRIu32
+                    "].queueFamilyIndex} (=%" PRIu32 ") obtained previously from vkGetPhysicalDeviceQueueFamilyProperties%s (%s).",
+                    i, requested_queue_count, i, requested_queue_family, conditional_ext_cmd, count_note.c_str());
+            }
+        }
+    }
+
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateCreateDevice(VkPhysicalDevice gpu, const VkDeviceCreateInfo *pCreateInfo,
+                                             const VkAllocationCallbacks *pAllocator, VkDevice *pDevice) const {
+    bool skip = false;
+    auto pd_state = GetPhysicalDeviceState(gpu);
+
+    // TODO: object_tracker should perhaps do this instead
+    //       and it does not seem to currently work anyway -- the loader just crashes before this point
+    if (!pd_state) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, 0,
+                        kVUID_Core_DevLimit_MustQueryCount,
+                        "Invalid call to vkCreateDevice() w/o first calling vkEnumeratePhysicalDevices().");
+    } else {
+        skip |= ValidateDeviceQueueCreateInfos(pd_state, pCreateInfo->queueCreateInfoCount, pCreateInfo->pQueueCreateInfos);
+    }
+    return skip;
+}
+
+void CoreChecks::PostCallRecordCreateDevice(VkPhysicalDevice gpu, const VkDeviceCreateInfo *pCreateInfo,
+                                            const VkAllocationCallbacks *pAllocator, VkDevice *pDevice, VkResult result) {
+    // The state tracker sets up the device state
+    StateTracker::PostCallRecordCreateDevice(gpu, pCreateInfo, pAllocator, pDevice, result);
+
+    // Add the callback hooks for the functions that are either broadly or deeply used and that the ValidationStateTracker refactor
+    // would be messier without.
+    // TODO: Find a good way to do this hooklessly.
+    ValidationObject *device_object = GetLayerDataPtr(get_dispatch_key(*pDevice), layer_data_map);
+    ValidationObject *validation_data = GetValidationObject(device_object->object_dispatch, LayerObjectTypeCoreValidation);
+    CoreChecks *core_checks = static_cast<CoreChecks *>(validation_data);
+    core_checks->SetSetImageViewInitialLayoutCallback(
+        [core_checks](CMD_BUFFER_STATE *cb_node, const IMAGE_VIEW_STATE &iv_state, VkImageLayout layout) -> void {
+            core_checks->SetImageViewInitialLayout(cb_node, iv_state, layout);
+        });
+}
+
+void CoreChecks::PreCallRecordDestroyDevice(VkDevice device, const VkAllocationCallbacks *pAllocator) {
+    if (!device) return;
+    imageSubresourceMap.clear();
+    imageLayoutMap.clear();
+
+    StateTracker::PreCallRecordDestroyDevice(device, pAllocator);
+}
+
+// For given stage mask, if Geometry shader stage is on w/o GS being enabled, report geo_error_id
+//   and if Tessellation Control or Evaluation shader stages are on w/o TS being enabled, report tess_error_id.
+// Similarly for mesh and task shaders.
+bool CoreChecks::ValidateStageMaskGsTsEnables(VkPipelineStageFlags stageMask, const char *caller, const char *geo_error_id,
+                                              const char *tess_error_id, const char *mesh_error_id,
+                                              const char *task_error_id) const {
+    bool skip = false;
+    if (!enabled_features.core.geometryShader && (stageMask & VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT)) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, geo_error_id,
+                        "%s call includes a stageMask with VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT bit set when device does not have "
+                        "geometryShader feature enabled.",
+                        caller);
+    }
+    if (!enabled_features.core.tessellationShader &&
+        (stageMask & (VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT | VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT))) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, tess_error_id,
+                        "%s call includes a stageMask with VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT and/or "
+                        "VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT bit(s) set when device does not have "
+                        "tessellationShader feature enabled.",
+                        caller);
+    }
+    if (!enabled_features.mesh_shader.meshShader && (stageMask & VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV)) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, mesh_error_id,
+                        "%s call includes a stageMask with VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV bit set when device does not have "
+                        "VkPhysicalDeviceMeshShaderFeaturesNV::meshShader feature enabled.",
+                        caller);
+    }
+    if (!enabled_features.mesh_shader.taskShader && (stageMask & VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV)) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, task_error_id,
+                        "%s call includes a stageMask with VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV bit set when device does not have "
+                        "VkPhysicalDeviceMeshShaderFeaturesNV::taskShader feature enabled.",
+                        caller);
+    }
+    return skip;
+}
+
+// Note: This function assumes that the global lock is held by the calling thread.
+// For the given queue, verify the queue state up to the given seq number.
+// Currently the only check is to make sure that if there are events to be waited on prior to
+//  a QueryReset, make sure that all such events have been signalled.
+bool CoreChecks::VerifyQueueStateToSeq(const QUEUE_STATE *initial_queue, uint64_t initial_seq) const {
+    bool skip = false;
+
+    // sequence number we want to validate up to, per queue
+    std::unordered_map<const QUEUE_STATE *, uint64_t> target_seqs{{initial_queue, initial_seq}};
+    // sequence number we've completed validation for, per queue
+    std::unordered_map<const QUEUE_STATE *, uint64_t> done_seqs;
+    std::vector<const QUEUE_STATE *> worklist{initial_queue};
+
+    while (worklist.size()) {
+        auto queue = worklist.back();
+        worklist.pop_back();
+
+        auto target_seq = target_seqs[queue];
+        auto seq = std::max(done_seqs[queue], queue->seq);
+        auto sub_it = queue->submissions.begin() + int(seq - queue->seq);  // seq >= queue->seq
+
+        for (; seq < target_seq; ++sub_it, ++seq) {
+            for (auto &wait : sub_it->waitSemaphores) {
+                auto other_queue = GetQueueState(wait.queue);
+
+                if (other_queue == queue) continue;  // semaphores /always/ point backwards, so no point here.
+
+                auto other_target_seq = std::max(target_seqs[other_queue], wait.seq);
+                auto other_done_seq = std::max(done_seqs[other_queue], other_queue->seq);
+
+                // if this wait is for another queue, and covers new sequence
+                // numbers beyond what we've already validated, mark the new
+                // target seq and (possibly-re)add the queue to the worklist.
+                if (other_done_seq < other_target_seq) {
+                    target_seqs[other_queue] = other_target_seq;
+                    worklist.push_back(other_queue);
+                }
+            }
+        }
+
+        // finally mark the point we've now validated this queue to.
+        done_seqs[queue] = seq;
+    }
+
+    return skip;
+}
+
+// When the given fence is retired, verify outstanding queue operations through the point of the fence
+bool CoreChecks::VerifyQueueStateToFence(VkFence fence) const {
+    auto fence_state = GetFenceState(fence);
+    if (fence_state && fence_state->scope == kSyncScopeInternal && VK_NULL_HANDLE != fence_state->signaler.first) {
+        return VerifyQueueStateToSeq(GetQueueState(fence_state->signaler.first), fence_state->signaler.second);
+    }
+    return false;
+}
+
+bool CoreChecks::ValidateCommandBufferSimultaneousUse(const CMD_BUFFER_STATE *pCB, int current_submit_count) const {
+    bool skip = false;
+    if ((pCB->in_use.load() || current_submit_count > 1) &&
+        !(pCB->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT)) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 0,
+                        "VUID-vkQueueSubmit-pCommandBuffers-00071", "%s is already in use and is not marked for simultaneous use.",
+                        report_data->FormatHandle(pCB->commandBuffer).c_str());
+    }
+    return skip;
+}
+
+bool CoreChecks::ValidateCommandBufferState(const CMD_BUFFER_STATE *cb_state, const char *call_source, int current_submit_count,
+                                            const char *vu_id) const {
+    bool skip = false;
+    if (disabled.command_buffer_state) return skip;
+    // Validate ONE_TIME_SUBMIT_BIT CB is not being submitted more than once
+    if ((cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT) &&
+        (cb_state->submitCount + current_submit_count > 1)) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 0,
+                        kVUID_Core_DrawState_CommandBufferSingleSubmitViolation,
+                        "%s was begun w/ VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT set, but has been submitted 0x%" PRIxLEAST64
+                        "times.",
+                        report_data->FormatHandle(cb_state->commandBuffer).c_str(), cb_state->submitCount + current_submit_count);
+    }
+
+    // Validate that cmd buffers have been updated
+    switch (cb_state->state) {
+        case CB_INVALID_INCOMPLETE:
+        case CB_INVALID_COMPLETE:
+            skip |= ReportInvalidCommandBuffer(cb_state, call_source);
+            break;
+
+        case CB_NEW:
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                            (uint64_t)(cb_state->commandBuffer), vu_id,
+                            "%s used in the call to %s is unrecorded and contains no commands.",
+                            report_data->FormatHandle(cb_state->commandBuffer).c_str(), call_source);
+            break;
+
+        case CB_RECORDING:
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                            HandleToUint64(cb_state->commandBuffer), kVUID_Core_DrawState_NoEndCommandBuffer,
+                            "You must call vkEndCommandBuffer() on %s before this call to %s!",
+                            report_data->FormatHandle(cb_state->commandBuffer).c_str(), call_source);
+            break;
+
+        default: /* recorded */
+            break;
+    }
+    return skip;
+}
+
+// Check that the queue family index of 'queue' matches one of the entries in pQueueFamilyIndices
+bool CoreChecks::ValidImageBufferQueue(const CMD_BUFFER_STATE *cb_node, const VulkanTypedHandle &object, uint32_t queueFamilyIndex,
+                                       uint32_t count, const uint32_t *indices) const {
+    bool found = false;
+    bool skip = false;
+    for (uint32_t i = 0; i < count; i++) {
+        if (indices[i] == queueFamilyIndex) {
+            found = true;
+            break;
+        }
+    }
+
+    if (!found) {
+        skip = log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, get_debug_report_enum[object.type], object.handle,
+                       kVUID_Core_DrawState_InvalidQueueFamily,
+                       "vkQueueSubmit: %s contains %s which was not created allowing concurrent access to "
+                       "this queue family %d.",
+                       report_data->FormatHandle(cb_node->commandBuffer).c_str(), report_data->FormatHandle(object).c_str(),
+                       queueFamilyIndex);
+    }
+    return skip;
+}
+
+// Validate that queueFamilyIndices of primary command buffers match this queue
+// Secondary command buffers were previously validated in vkCmdExecuteCommands().
+bool CoreChecks::ValidateQueueFamilyIndices(const CMD_BUFFER_STATE *pCB, VkQueue queue) const {
+    bool skip = false;
+    auto pPool = pCB->command_pool.get();
+    auto queue_state = GetQueueState(queue);
+
+    if (pPool && queue_state) {
+        if (pPool->queueFamilyIndex != queue_state->queueFamilyIndex) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                            HandleToUint64(pCB->commandBuffer), "VUID-vkQueueSubmit-pCommandBuffers-00074",
+                            "vkQueueSubmit: Primary %s created in queue family %d is being submitted on %s "
+                            "from queue family %d.",
+                            report_data->FormatHandle(pCB->commandBuffer).c_str(), pPool->queueFamilyIndex,
+                            report_data->FormatHandle(queue).c_str(), queue_state->queueFamilyIndex);
+        }
+
+        // Ensure that any bound images or buffers created with SHARING_MODE_CONCURRENT have access to the current queue family
+        for (const auto &object : pCB->object_bindings) {
+            if (object.type == kVulkanObjectTypeImage) {
+                auto image_state = object.node ? (IMAGE_STATE *)object.node : GetImageState(object.Cast<VkImage>());
+                if (image_state && image_state->createInfo.sharingMode == VK_SHARING_MODE_CONCURRENT) {
+                    skip |= ValidImageBufferQueue(pCB, object, queue_state->queueFamilyIndex,
+                                                  image_state->createInfo.queueFamilyIndexCount,
+                                                  image_state->createInfo.pQueueFamilyIndices);
+                }
+            } else if (object.type == kVulkanObjectTypeBuffer) {
+                auto buffer_state = object.node ? (BUFFER_STATE *)object.node : GetBufferState(object.Cast<VkBuffer>());
+                if (buffer_state && buffer_state->createInfo.sharingMode == VK_SHARING_MODE_CONCURRENT) {
+                    skip |= ValidImageBufferQueue(pCB, object, queue_state->queueFamilyIndex,
+                                                  buffer_state->createInfo.queueFamilyIndexCount,
+                                                  buffer_state->createInfo.pQueueFamilyIndices);
+                }
+            }
+        }
+    }
+
+    return skip;
+}
+
+// Validate that a command buffer submitted had the performance locked hold
+// when recording command if it contains performance queries.
+bool CoreChecks::ValidatePerformanceQueries(const CMD_BUFFER_STATE *pCB, VkQueue queue, VkQueryPool &first_query_pool,
+                                            uint32_t counterPassIndex) const {
+    bool skip = false;
+    bool different_pools = false;
+    bool indexed_different_pool = false;
+
+    if (pCB->createInfo.level == VK_COMMAND_BUFFER_LEVEL_PRIMARY) {
+        for (const auto &secondaryCB : pCB->linkedCommandBuffers)
+            skip |= ValidatePerformanceQueries(secondaryCB, queue, first_query_pool, counterPassIndex);
+    }
+
+    for (const auto &query : pCB->startedQueries) {
+        const auto query_pool_state = GetQueryPoolState(query.pool);
+
+        if (query_pool_state->createInfo.queryType != VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR) continue;
+
+        if (counterPassIndex >= query_pool_state->n_performance_passes) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                            HandleToUint64(pCB->commandBuffer), "VUID-VkPerformanceQuerySubmitInfoKHR-counterPassIndex-03221",
+                            "Invalid counterPassIndex (%u, maximum allowed %u) value for query pool %s.", counterPassIndex,
+                            query_pool_state->n_performance_passes, report_data->FormatHandle(query.pool).c_str());
+        }
+
+        if (!pCB->performance_lock_acquired || pCB->performance_lock_released) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                            HandleToUint64(pCB->commandBuffer), "VUID-vkQueueSubmit-pCommandBuffers-03220",
+                            "Commandbuffer %s was submitted and contains a performance query but the"
+                            "profiling lock was not held continuously throughout the recording of commands.",
+                            report_data->FormatHandle(pCB->commandBuffer).c_str());
+        }
+
+        if (query_pool_state->has_perf_scope_command_buffer && (pCB->commandCount - 1) != query.endCommandIndex) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                            HandleToUint64(pCB->commandBuffer), "VUID-vkCmdEndQuery-queryPool-03227",
+                            "vkCmdEndQuery: Query pool %s was created with a counter of scope"
+                            "VK_QUERY_SCOPE_COMMAND_BUFFER_KHR but the end of the query is not the last "
+                            "command in the command buffer %s.",
+                            report_data->FormatHandle(query.pool).c_str(), report_data->FormatHandle(pCB->commandBuffer).c_str());
+        }
+
+        if (first_query_pool != VK_NULL_HANDLE) {
+            if (query_pool_state->pool != first_query_pool) {
+                different_pools = true;
+                indexed_different_pool = query.indexed;
+            }
+        } else
+            first_query_pool = query_pool_state->pool;
+    }
+
+    if (different_pools && !enabled_features.performance_query_features.performanceCounterMultipleQueryPools) {
+        skip |= log_msg(
+            report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+            HandleToUint64(pCB->commandBuffer),
+            indexed_different_pool ? "VUID-vkCmdBeginQueryIndexedEXT-queryPool-03226" : "VUID-vkCmdBeginQuery-queryPool-03226",
+            "Commandbuffer %s contains more than one performance query pool but "
+            "performanceCounterMultipleQueryPools is not enabled.",
+            report_data->FormatHandle(pCB->commandBuffer).c_str());
+    }
+
+    return skip;
+}
+
+bool CoreChecks::ValidatePrimaryCommandBufferState(const CMD_BUFFER_STATE *pCB, int current_submit_count,
+                                                   QFOTransferCBScoreboards<VkImageMemoryBarrier> *qfo_image_scoreboards,
+                                                   QFOTransferCBScoreboards<VkBufferMemoryBarrier> *qfo_buffer_scoreboards) const {
+    // Track in-use for resources off of primary and any secondary CBs
+    bool skip = false;
+
+    if (pCB->createInfo.level == VK_COMMAND_BUFFER_LEVEL_SECONDARY) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                        HandleToUint64(pCB->commandBuffer), "VUID-VkSubmitInfo-pCommandBuffers-00075",
+                        "Command buffer %s was included in the pCommandBuffers array of QueueSubmit but was allocated with "
+                        "VK_COMMAND_BUFFER_LEVEL_SECONDARY.",
+                        report_data->FormatHandle(pCB->commandBuffer).c_str());
+    } else {
+        for (auto pSubCB : pCB->linkedCommandBuffers) {
+            skip |= ValidateQueuedQFOTransfers(pSubCB, qfo_image_scoreboards, qfo_buffer_scoreboards);
+            // TODO: replace with InvalidateCommandBuffers() at recording.
+            if ((pSubCB->primaryCommandBuffer != pCB->commandBuffer) &&
+                !(pSubCB->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT)) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 0,
+                                "VUID-vkQueueSubmit-pCommandBuffers-00073",
+                                "%s was submitted with secondary %s but that buffer has subsequently been bound to "
+                                "primary %s and it does not have VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT set.",
+                                report_data->FormatHandle(pCB->commandBuffer).c_str(),
+                                report_data->FormatHandle(pSubCB->commandBuffer).c_str(),
+                                report_data->FormatHandle(pSubCB->primaryCommandBuffer).c_str());
+            }
+        }
+    }
+
+    // If USAGE_SIMULTANEOUS_USE_BIT not set then CB cannot already be executing on device
+    skip |= ValidateCommandBufferSimultaneousUse(pCB, current_submit_count);
+
+    skip |= ValidateQueuedQFOTransfers(pCB, qfo_image_scoreboards, qfo_buffer_scoreboards);
+
+    skip |= ValidateCommandBufferState(pCB, "vkQueueSubmit()", current_submit_count, "VUID-vkQueueSubmit-pCommandBuffers-00072");
+
+    return skip;
+}
+
+bool CoreChecks::ValidateFenceForSubmit(const FENCE_STATE *pFence) const {
+    bool skip = false;
+
+    if (pFence && pFence->scope == kSyncScopeInternal) {
+        if (pFence->state == FENCE_INFLIGHT) {
+            // TODO: opportunities for "VUID-vkQueueSubmit-fence-00064", "VUID-vkQueueBindSparse-fence-01114",
+            // "VUID-vkAcquireNextImageKHR-fence-01287"
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT,
+                            HandleToUint64(pFence->fence), kVUID_Core_DrawState_InvalidFence,
+                            "%s is already in use by another submission.", report_data->FormatHandle(pFence->fence).c_str());
+        }
+
+        else if (pFence->state == FENCE_RETIRED) {
+            // TODO: opportunities for "VUID-vkQueueSubmit-fence-00063", "VUID-vkQueueBindSparse-fence-01113",
+            // "VUID-vkAcquireNextImageKHR-fence-01287"
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT,
+                            HandleToUint64(pFence->fence), kVUID_Core_MemTrack_FenceState,
+                            "%s submitted in SIGNALED state.  Fences must be reset before being submitted",
+                            report_data->FormatHandle(pFence->fence).c_str());
+        }
+    }
+
+    return skip;
+}
+
+void CoreChecks::PostCallRecordQueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo *pSubmits, VkFence fence,
+                                           VkResult result) {
+    StateTracker::PostCallRecordQueueSubmit(queue, submitCount, pSubmits, fence, result);
+
+    // The triply nested for duplicates that in the StateTracker, but avoids the need for two additional callbacks.
+    for (uint32_t submit_idx = 0; submit_idx < submitCount; submit_idx++) {
+        const VkSubmitInfo *submit = &pSubmits[submit_idx];
+        for (uint32_t i = 0; i < submit->commandBufferCount; i++) {
+            auto cb_node = GetCBState(submit->pCommandBuffers[i]);
+            if (cb_node) {
+                for (auto secondaryCmdBuffer : cb_node->linkedCommandBuffers) {
+                    UpdateCmdBufImageLayouts(secondaryCmdBuffer);
+                    RecordQueuedQFOTransfers(secondaryCmdBuffer);
+                }
+                UpdateCmdBufImageLayouts(cb_node);
+                RecordQueuedQFOTransfers(cb_node);
+            }
+        }
+    }
+}
+bool CoreChecks::ValidateSemaphoresForSubmit(VkQueue queue, const VkSubmitInfo *submit,
+                                             unordered_set<VkSemaphore> *unsignaled_sema_arg,
+                                             unordered_set<VkSemaphore> *signaled_sema_arg,
+                                             unordered_set<VkSemaphore> *internal_sema_arg,
+                                             unordered_map<VkSemaphore, std::set<uint64_t>> *timeline_values_arg) const {
+    bool skip = false;
+    auto &signaled_semaphores = *signaled_sema_arg;
+    auto &unsignaled_semaphores = *unsignaled_sema_arg;
+    auto &internal_semaphores = *internal_sema_arg;
+    auto &timeline_values = *timeline_values_arg;
+    unordered_map<VkSemaphore, std::set<uint64_t>>::iterator it;
+    auto *timeline_semaphore_submit_info = lvl_find_in_chain<VkTimelineSemaphoreSubmitInfoKHR>(submit->pNext);
+
+    for (uint32_t i = 0; i < submit->waitSemaphoreCount; ++i) {
+        skip |=
+            ValidateStageMaskGsTsEnables(submit->pWaitDstStageMask[i], "vkQueueSubmit()",
+                                         "VUID-VkSubmitInfo-pWaitDstStageMask-00076", "VUID-VkSubmitInfo-pWaitDstStageMask-00077",
+                                         "VUID-VkSubmitInfo-pWaitDstStageMask-02089", "VUID-VkSubmitInfo-pWaitDstStageMask-02090");
+        VkSemaphore semaphore = submit->pWaitSemaphores[i];
+        const auto *pSemaphore = GetSemaphoreState(semaphore);
+        if (pSemaphore && pSemaphore->type == VK_SEMAPHORE_TYPE_TIMELINE_KHR && !timeline_semaphore_submit_info) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT,
+                            HandleToUint64(semaphore), "VUID-VkSubmitInfo-pWaitSemaphores-03239",
+                            "VkQueueSubmit: %s is a timeline semaphore, but pBindInfo does not"
+                            "include an instance of VkTimelineSemaphoreSubmitInfoKHR",
+                            report_data->FormatHandle(semaphore).c_str());
+        }
+        if (pSemaphore && pSemaphore->type == VK_SEMAPHORE_TYPE_TIMELINE_KHR && timeline_semaphore_submit_info &&
+            submit->waitSemaphoreCount != timeline_semaphore_submit_info->waitSemaphoreValueCount) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT,
+                            HandleToUint64(semaphore), "VUID-VkSubmitInfo-pNext-03240",
+                            "VkQueueSubmit: %s is a timeline semaphore, it contains an instance of"
+                            "VkTimelineSemaphoreSubmitInfoKHR, but waitSemaphoreValueCount is different than "
+                            "waitSemaphoreCount",
+                            report_data->FormatHandle(semaphore).c_str());
+        }
+        if (pSemaphore && pSemaphore->type == VK_SEMAPHORE_TYPE_BINARY_KHR &&
+            (pSemaphore->scope == kSyncScopeInternal || internal_semaphores.count(semaphore))) {
+            if (unsignaled_semaphores.count(semaphore) || (!(signaled_semaphores.count(semaphore)) && !(pSemaphore->signaled))) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT,
+                                HandleToUint64(semaphore), kVUID_Core_DrawState_QueueForwardProgress,
+                                "%s is waiting on %s that has no way to be signaled.", report_data->FormatHandle(queue).c_str(),
+                                report_data->FormatHandle(semaphore).c_str());
+            } else {
+                signaled_semaphores.erase(semaphore);
+                unsignaled_semaphores.insert(semaphore);
+            }
+        }
+        if (pSemaphore && pSemaphore->type == VK_SEMAPHORE_TYPE_BINARY_KHR && pSemaphore->scope == kSyncScopeExternalTemporary) {
+            internal_semaphores.insert(semaphore);
+        }
+        if (!skip && pSemaphore && pSemaphore->type == VK_SEMAPHORE_TYPE_TIMELINE_KHR) {
+            auto &values = timeline_values[semaphore];
+            if (values.empty()) {
+                values.insert(pSemaphore->payload);
+            }
+            values.insert(timeline_semaphore_submit_info->pWaitSemaphoreValues[i]);
+        }
+    }
+    for (uint32_t i = 0; i < submit->signalSemaphoreCount; ++i) {
+        VkSemaphore semaphore = submit->pSignalSemaphores[i];
+        const auto *pSemaphore = GetSemaphoreState(semaphore);
+        if (pSemaphore && pSemaphore->type == VK_SEMAPHORE_TYPE_TIMELINE_KHR && !timeline_semaphore_submit_info) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT,
+                            HandleToUint64(semaphore), "VUID-VkSubmitInfo-pWaitSemaphores-03239",
+                            "VkQueueSubmit: %s is a timeline semaphore, but pBindInfo does not"
+                            "include an instance of VkTimelineSemaphoreSubmitInfoKHR",
+                            report_data->FormatHandle(semaphore).c_str());
+        }
+        if (pSemaphore && pSemaphore->type == VK_SEMAPHORE_TYPE_TIMELINE_KHR && timeline_semaphore_submit_info &&
+            submit->signalSemaphoreCount != timeline_semaphore_submit_info->signalSemaphoreValueCount) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT,
+                            HandleToUint64(semaphore), "VUID-VkSubmitInfo-pNext-03241",
+                            "VkQueueSubmit: %s is a timeline semaphore, it contains an instance of"
+                            "VkTimelineSemaphoreSubmitInfoKHR, but signalSemaphoreValueCount is different than "
+                            "signalSemaphoreCount",
+                            report_data->FormatHandle(semaphore).c_str());
+        }
+        if (pSemaphore && pSemaphore->type == VK_SEMAPHORE_TYPE_TIMELINE_KHR && timeline_semaphore_submit_info &&
+            timeline_semaphore_submit_info->pSignalSemaphoreValues[i] <= pSemaphore->payload) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT,
+                            HandleToUint64(semaphore), "VUID-VkSubmitInfo-pSignalSemaphores-03242",
+                            "VkQueueSubmit: signal value in %s must be greater than current timeline semaphore %s value",
+                            report_data->FormatHandle(queue).c_str(), report_data->FormatHandle(semaphore).c_str());
+        }
+        if (pSemaphore && pSemaphore->type == VK_SEMAPHORE_TYPE_BINARY_KHR &&
+            (pSemaphore->scope == kSyncScopeInternal || internal_semaphores.count(semaphore))) {
+            if (signaled_semaphores.count(semaphore) || (!(unsignaled_semaphores.count(semaphore)) && pSemaphore->signaled)) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT,
+                                HandleToUint64(semaphore), kVUID_Core_DrawState_QueueForwardProgress,
+                                "%s is signaling %s that was previously signaled by %s but has not since "
+                                "been waited on by any queue.",
+                                report_data->FormatHandle(queue).c_str(), report_data->FormatHandle(semaphore).c_str(),
+                                report_data->FormatHandle(pSemaphore->signaler.first).c_str());
+            } else {
+                unsignaled_semaphores.erase(semaphore);
+                signaled_semaphores.insert(semaphore);
+            }
+        }
+        if (!skip && pSemaphore && pSemaphore->type == VK_SEMAPHORE_TYPE_TIMELINE_KHR) {
+            auto &values = timeline_values[semaphore];
+            if (values.empty()) {
+                values.insert(pSemaphore->payload);
+            }
+            values.insert(timeline_semaphore_submit_info->pSignalSemaphoreValues[i]);
+        }
+    }
+
+    return skip;
+}
+
+bool CoreChecks::ValidateMaxTimelineSemaphoreValueDifference(VkQueue queue, VkSemaphore semaphore, uint64_t semaphoreTriggerValue,
+                                                             unordered_map<VkSemaphore, std::set<uint64_t>> *timeline_values_arg,
+                                                             const char *func_name, const char *vuid) const {
+    bool skip = false;
+
+    auto &timeline_values = *timeline_values_arg;
+    const auto pSemaphore = GetSemaphoreState(semaphore);
+    if (pSemaphore && pSemaphore->type == VK_SEMAPHORE_TYPE_TIMELINE_KHR) {
+        assert(semaphoreTriggerValue > 0);
+
+        // This set contains the current payload value, plus all the wait/signal
+        // values the semaphore can take, in order
+        auto &values = timeline_values[semaphore];
+
+        // Search for the previous value and check if the difference is bigger
+        // than allowed
+        auto it = values.find(semaphoreTriggerValue);
+
+        if (it == begin(values)) {
+            return false;
+        }
+
+        if (semaphoreTriggerValue - *(--it) > phys_dev_ext_props.timeline_semaphore_props.maxTimelineSemaphoreValueDifference) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT,
+                            HandleToUint64(semaphore), vuid,
+                            "%s: %s contains timeline sempahore %s that sets its wait value with a margin "
+                            "greater than maxTimelineSemaphoreValueDifference",
+                            func_name, report_data->FormatHandle(queue).c_str(), report_data->FormatHandle(semaphore).c_str());
+        }
+    }
+
+    return skip;
+}
+
+bool CoreChecks::ValidateCommandBuffersForSubmit(VkQueue queue, const VkSubmitInfo *submit,
+                                                 ImageSubresPairLayoutMap *localImageLayoutMap_arg,
+                                                 QueryMap *local_query_to_state_map,
+                                                 vector<VkCommandBuffer> *current_cmds_arg) const {
+    bool skip = false;
+    auto queue_state = GetQueueState(queue);
+
+    ImageSubresPairLayoutMap &localImageLayoutMap = *localImageLayoutMap_arg;
+    vector<VkCommandBuffer> &current_cmds = *current_cmds_arg;
+
+    QFOTransferCBScoreboards<VkImageMemoryBarrier> qfo_image_scoreboards;
+    QFOTransferCBScoreboards<VkBufferMemoryBarrier> qfo_buffer_scoreboards;
+    EventToStageMap localEventToStageMap;
+
+    const auto perf_submit = lvl_find_in_chain<VkPerformanceQuerySubmitInfoKHR>(submit->pNext);
+
+    for (uint32_t i = 0; i < submit->commandBufferCount; i++) {
+        const auto *cb_node = GetCBState(submit->pCommandBuffers[i]);
+        if (cb_node) {
+            skip |= ValidateCmdBufImageLayouts(cb_node, imageLayoutMap, &localImageLayoutMap);
+            current_cmds.push_back(submit->pCommandBuffers[i]);
+            skip |= ValidatePrimaryCommandBufferState(
+                cb_node, (int)std::count(current_cmds.begin(), current_cmds.end(), submit->pCommandBuffers[i]),
+                &qfo_image_scoreboards, &qfo_buffer_scoreboards);
+            skip |= ValidateQueueFamilyIndices(cb_node, queue);
+            VkQueryPool first_query_pool = VK_NULL_HANDLE;
+            skip |= ValidatePerformanceQueries(cb_node, queue, first_query_pool, perf_submit ? perf_submit->counterPassIndex : 0);
+
+            for (auto descriptorSet : cb_node->validate_descriptorsets_in_queuesubmit) {
+                const cvdescriptorset::DescriptorSet *set_node = GetSetNode(descriptorSet.first);
+                if (set_node) {
+                    for (auto pipe : descriptorSet.second) {
+                        for (auto binding : pipe.second) {
+                            std::string error;
+                            std::vector<uint32_t> dynamicOffsets;
+                            // dynamic data isn't allowed in UPDATE_AFTER_BIND, so dynamicOffsets is always empty.
+                            if (!ValidateDescriptorSetBindingData(cb_node, set_node, dynamicOffsets, binding.first, binding.second,
+                                                                  "vkQueueSubmit()", &error)) {
+                                skip |= log_msg(
+                                    report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT,
+                                    HandleToUint64(descriptorSet.first), kVUID_Core_DrawState_DescriptorSetNotUpdated,
+                                    "%s bound the following validation error at %s time: %s",
+                                    report_data->FormatHandle(descriptorSet.first).c_str(), "vkQueueSubmit()", error.c_str());
+                            }
+                        }
+                    }
+                }
+            }
+
+            // Potential early exit here as bad object state may crash in delayed function calls
+            if (skip) {
+                return true;
+            }
+
+            // Call submit-time functions to validate or update local mirrors of state (to preserve const-ness at validate time)
+            for (auto &function : cb_node->queue_submit_functions) {
+                skip |= function(this, queue_state);
+            }
+            for (auto &function : cb_node->eventUpdates) {
+                skip |= function(this, /*do_validate*/ true, &localEventToStageMap);
+            }
+            for (auto &function : cb_node->queryUpdates) {
+                skip |= function(this, /*do_validate*/ true, local_query_to_state_map);
+            }
+        }
+    }
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateQueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo *pSubmits,
+                                            VkFence fence) const {
+    const auto *pFence = GetFenceState(fence);
+    bool skip = ValidateFenceForSubmit(pFence);
+    if (skip) {
+        return true;
+    }
+
+    unordered_set<VkSemaphore> signaled_semaphores;
+    unordered_set<VkSemaphore> unsignaled_semaphores;
+    unordered_set<VkSemaphore> internal_semaphores;
+    unordered_map<VkSemaphore, std::set<uint64_t>> timeline_values;
+    vector<VkCommandBuffer> current_cmds;
+    ImageSubresPairLayoutMap localImageLayoutMap;
+    QueryMap local_query_to_state_map;
+
+    // Now verify each individual submit
+    for (uint32_t submit_idx = 0; submit_idx < submitCount; submit_idx++) {
+        const VkSubmitInfo *submit = &pSubmits[submit_idx];
+        skip |= ValidateSemaphoresForSubmit(queue, submit, &unsignaled_semaphores, &signaled_semaphores, &internal_semaphores,
+                                            &timeline_values);
+        skip |= ValidateCommandBuffersForSubmit(queue, submit, &localImageLayoutMap, &local_query_to_state_map, &current_cmds);
+
+        auto chained_device_group_struct = lvl_find_in_chain<VkDeviceGroupSubmitInfo>(submit->pNext);
+        if (chained_device_group_struct && chained_device_group_struct->commandBufferCount > 0) {
+            for (uint32_t i = 0; i < chained_device_group_struct->commandBufferCount; ++i) {
+                skip |= ValidateDeviceMaskToPhysicalDeviceCount(chained_device_group_struct->pCommandBufferDeviceMasks[i],
+                                                                VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT, HandleToUint64(queue),
+                                                                "VUID-VkDeviceGroupSubmitInfo-pCommandBufferDeviceMasks-00086");
+            }
+        }
+    }
+
+    if (skip) return skip;
+
+    // Now verify maxTimelineSemaphoreValueDifference
+    for (uint32_t submit_idx = 0; submit_idx < submitCount; submit_idx++) {
+        const VkSubmitInfo *submit = &pSubmits[submit_idx];
+        auto *info = lvl_find_in_chain<VkTimelineSemaphoreSubmitInfoKHR>(submit->pNext);
+        for (uint32_t i = 0; i < submit->waitSemaphoreCount; ++i) {
+            VkSemaphore semaphore = submit->pWaitSemaphores[i];
+            skip |= ValidateMaxTimelineSemaphoreValueDifference(queue, semaphore, info ? info->pWaitSemaphoreValues[i] : 0,
+                                                                &timeline_values, "VkQueueSubmit",
+                                                                "VUID-VkSubmitInfo-pWaitSemaphores-03243");
+        }
+        for (uint32_t i = 0; i < submit->signalSemaphoreCount; ++i) {
+            VkSemaphore semaphore = submit->pSignalSemaphores[i];
+            skip |= ValidateMaxTimelineSemaphoreValueDifference(queue, semaphore, info ? info->pSignalSemaphoreValues[i] : 0,
+                                                                &timeline_values, "VkQueueSubmit",
+                                                                "VUID-VkSubmitInfo-pSignalSemaphores-03244");
+        }
+    }
+
+    return skip;
+}
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+// Android-specific validation that uses types defined only on Android and only for NDK versions
+// that support the VK_ANDROID_external_memory_android_hardware_buffer extension.
+// This chunk could move into a seperate core_validation_android.cpp file... ?
+
+// clang-format off
+
+// Map external format and usage flags to/from equivalent Vulkan flags
+// (Tables as of v1.1.92)
+
+// AHardwareBuffer Format                       Vulkan Format
+// ======================                       =============
+// AHARDWAREBUFFER_FORMAT_R8G8B8A8_UNORM        VK_FORMAT_R8G8B8A8_UNORM
+// AHARDWAREBUFFER_FORMAT_R8G8B8X8_UNORM        VK_FORMAT_R8G8B8A8_UNORM
+// AHARDWAREBUFFER_FORMAT_R8G8B8_UNORM          VK_FORMAT_R8G8B8_UNORM
+// AHARDWAREBUFFER_FORMAT_R5G6B5_UNORM          VK_FORMAT_R5G6B5_UNORM_PACK16
+// AHARDWAREBUFFER_FORMAT_R16G16B16A16_FLOAT    VK_FORMAT_R16G16B16A16_SFLOAT
+// AHARDWAREBUFFER_FORMAT_R10G10B10A2_UNORM     VK_FORMAT_A2B10G10R10_UNORM_PACK32
+// AHARDWAREBUFFER_FORMAT_D16_UNORM             VK_FORMAT_D16_UNORM
+// AHARDWAREBUFFER_FORMAT_D24_UNORM             VK_FORMAT_X8_D24_UNORM_PACK32
+// AHARDWAREBUFFER_FORMAT_D24_UNORM_S8_UINT     VK_FORMAT_D24_UNORM_S8_UINT
+// AHARDWAREBUFFER_FORMAT_D32_FLOAT             VK_FORMAT_D32_SFLOAT
+// AHARDWAREBUFFER_FORMAT_D32_FLOAT_S8_UINT     VK_FORMAT_D32_SFLOAT_S8_UINT
+// AHARDWAREBUFFER_FORMAT_S8_UINT               VK_FORMAT_S8_UINT
+
+// The AHARDWAREBUFFER_FORMAT_* are an enum in the NDK headers, but get passed in to Vulkan
+// as uint32_t. Casting the enums here avoids scattering casts around in the code.
+std::map<uint32_t, VkFormat> ahb_format_map_a2v = {
+    { (uint32_t)AHARDWAREBUFFER_FORMAT_R8G8B8A8_UNORM,        VK_FORMAT_R8G8B8A8_UNORM },
+    { (uint32_t)AHARDWAREBUFFER_FORMAT_R8G8B8X8_UNORM,        VK_FORMAT_R8G8B8A8_UNORM },
+    { (uint32_t)AHARDWAREBUFFER_FORMAT_R8G8B8_UNORM,          VK_FORMAT_R8G8B8_UNORM },
+    { (uint32_t)AHARDWAREBUFFER_FORMAT_R5G6B5_UNORM,          VK_FORMAT_R5G6B5_UNORM_PACK16 },
+    { (uint32_t)AHARDWAREBUFFER_FORMAT_R16G16B16A16_FLOAT,    VK_FORMAT_R16G16B16A16_SFLOAT },
+    { (uint32_t)AHARDWAREBUFFER_FORMAT_R10G10B10A2_UNORM,     VK_FORMAT_A2B10G10R10_UNORM_PACK32 },
+    { (uint32_t)AHARDWAREBUFFER_FORMAT_D16_UNORM,             VK_FORMAT_D16_UNORM },
+    { (uint32_t)AHARDWAREBUFFER_FORMAT_D24_UNORM,             VK_FORMAT_X8_D24_UNORM_PACK32 },
+    { (uint32_t)AHARDWAREBUFFER_FORMAT_D24_UNORM_S8_UINT,     VK_FORMAT_D24_UNORM_S8_UINT },
+    { (uint32_t)AHARDWAREBUFFER_FORMAT_D32_FLOAT,             VK_FORMAT_D32_SFLOAT },
+    { (uint32_t)AHARDWAREBUFFER_FORMAT_D32_FLOAT_S8_UINT,     VK_FORMAT_D32_SFLOAT_S8_UINT },
+    { (uint32_t)AHARDWAREBUFFER_FORMAT_S8_UINT,               VK_FORMAT_S8_UINT }
+};
+
+// AHardwareBuffer Usage                        Vulkan Usage or Creation Flag (Intermixed - Aargh!)
+// =====================                        ===================================================
+// None                                         VK_IMAGE_USAGE_TRANSFER_SRC_BIT
+// None                                         VK_IMAGE_USAGE_TRANSFER_DST_BIT
+// AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE      VK_IMAGE_USAGE_SAMPLED_BIT
+// AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE      VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT
+// AHARDWAREBUFFER_USAGE_GPU_COLOR_OUTPUT       VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT
+// AHARDWAREBUFFER_USAGE_GPU_CUBE_MAP           VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT
+// AHARDWAREBUFFER_USAGE_GPU_MIPMAP_COMPLETE    None
+// AHARDWAREBUFFER_USAGE_PROTECTED_CONTENT      VK_IMAGE_CREATE_PROTECTED_BIT
+// None                                         VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT
+// None                                         VK_IMAGE_CREATE_EXTENDED_USAGE_BIT
+
+// Same casting rationale. De-mixing the table to prevent type confusion and aliasing
+std::map<uint64_t, VkImageUsageFlags> ahb_usage_map_a2v = {
+    { (uint64_t)AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE,    (VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT) },
+    { (uint64_t)AHARDWAREBUFFER_USAGE_GPU_COLOR_OUTPUT,     VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT },
+    { (uint64_t)AHARDWAREBUFFER_USAGE_GPU_MIPMAP_COMPLETE,  0 },   // No equivalent
+};
+
+std::map<uint64_t, VkImageCreateFlags> ahb_create_map_a2v = {
+    { (uint64_t)AHARDWAREBUFFER_USAGE_GPU_CUBE_MAP,         VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT },
+    { (uint64_t)AHARDWAREBUFFER_USAGE_PROTECTED_CONTENT,    VK_IMAGE_CREATE_PROTECTED_BIT },
+    { (uint64_t)AHARDWAREBUFFER_USAGE_GPU_MIPMAP_COMPLETE,  0 },   // No equivalent
+};
+
+std::map<VkImageUsageFlags, uint64_t> ahb_usage_map_v2a = {
+    { VK_IMAGE_USAGE_SAMPLED_BIT,           (uint64_t)AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE },
+    { VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT,  (uint64_t)AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE },
+    { VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT,  (uint64_t)AHARDWAREBUFFER_USAGE_GPU_COLOR_OUTPUT  },
+};
+
+std::map<VkImageCreateFlags, uint64_t> ahb_create_map_v2a = {
+    { VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT,  (uint64_t)AHARDWAREBUFFER_USAGE_GPU_CUBE_MAP },
+    { VK_IMAGE_CREATE_PROTECTED_BIT,        (uint64_t)AHARDWAREBUFFER_USAGE_PROTECTED_CONTENT },
+};
+
+// clang-format on
+
+//
+// AHB-extension new APIs
+//
+bool CoreChecks::PreCallValidateGetAndroidHardwareBufferPropertiesANDROID(
+    VkDevice device, const struct AHardwareBuffer *buffer, VkAndroidHardwareBufferPropertiesANDROID *pProperties) const {
+    bool skip = false;
+    //  buffer must be a valid Android hardware buffer object with at least one of the AHARDWAREBUFFER_USAGE_GPU_* usage flags.
+    AHardwareBuffer_Desc ahb_desc;
+    AHardwareBuffer_describe(buffer, &ahb_desc);
+    uint32_t required_flags = AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE | AHARDWAREBUFFER_USAGE_GPU_COLOR_OUTPUT |
+                              AHARDWAREBUFFER_USAGE_GPU_CUBE_MAP | AHARDWAREBUFFER_USAGE_GPU_MIPMAP_COMPLETE |
+                              AHARDWAREBUFFER_USAGE_GPU_DATA_BUFFER;
+    if (0 == (ahb_desc.usage & required_flags)) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
+                        "VUID-vkGetAndroidHardwareBufferPropertiesANDROID-buffer-01884",
+                        "vkGetAndroidHardwareBufferPropertiesANDROID: The AHardwareBuffer's AHardwareBuffer_Desc.usage (0x%" PRIx64
+                        ") does not have any AHARDWAREBUFFER_USAGE_GPU_* flags set.",
+                        ahb_desc.usage);
+    }
+    return skip;
+}
+
+void CoreChecks::PostCallRecordGetAndroidHardwareBufferPropertiesANDROID(VkDevice device, const struct AHardwareBuffer *buffer,
+                                                                         VkAndroidHardwareBufferPropertiesANDROID *pProperties,
+                                                                         VkResult result) {
+    if (VK_SUCCESS != result) return;
+    auto ahb_format_props = lvl_find_in_chain<VkAndroidHardwareBufferFormatPropertiesANDROID>(pProperties->pNext);
+    if (ahb_format_props) {
+        ahb_ext_formats_set.insert(ahb_format_props->externalFormat);
+    }
+}
+
+bool CoreChecks::PreCallValidateGetMemoryAndroidHardwareBufferANDROID(VkDevice device,
+                                                                      const VkMemoryGetAndroidHardwareBufferInfoANDROID *pInfo,
+                                                                      struct AHardwareBuffer **pBuffer) const {
+    bool skip = false;
+    const DEVICE_MEMORY_STATE *mem_info = GetDevMemState(pInfo->memory);
+
+    // VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID must have been included in
+    // VkExportMemoryAllocateInfoKHR::handleTypes when memory was created.
+    if (!mem_info->is_export ||
+        (0 == (mem_info->export_handle_type_flags & VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID))) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
+                        "VUID-VkMemoryGetAndroidHardwareBufferInfoANDROID-handleTypes-01882",
+                        "vkGetMemoryAndroidHardwareBufferANDROID: %s was not allocated for export, or the "
+                        "export handleTypes (0x%" PRIx32
+                        ") did not contain VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID.",
+                        report_data->FormatHandle(pInfo->memory).c_str(), mem_info->export_handle_type_flags);
+    }
+
+    // If the pNext chain of the VkMemoryAllocateInfo used to allocate memory included a VkMemoryDedicatedAllocateInfo
+    // with non-NULL image member, then that image must already be bound to memory.
+    if (mem_info->is_dedicated && (VK_NULL_HANDLE != mem_info->dedicated_image)) {
+        const auto image_state = GetImageState(mem_info->dedicated_image);
+        if ((nullptr == image_state) || (0 == (image_state->GetBoundMemory().count(pInfo->memory)))) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+                            HandleToUint64(device), "VUID-VkMemoryGetAndroidHardwareBufferInfoANDROID-pNext-01883",
+                            "vkGetMemoryAndroidHardwareBufferANDROID: %s was allocated using a dedicated "
+                            "%s, but that image is not bound to the VkDeviceMemory object.",
+                            report_data->FormatHandle(pInfo->memory).c_str(),
+                            report_data->FormatHandle(mem_info->dedicated_image).c_str());
+        }
+    }
+
+    return skip;
+}
+
+//
+// AHB-specific validation within non-AHB APIs
+//
+bool CoreChecks::ValidateAllocateMemoryANDROID(const VkMemoryAllocateInfo *alloc_info) const {
+    bool skip = false;
+    auto import_ahb_info = lvl_find_in_chain<VkImportAndroidHardwareBufferInfoANDROID>(alloc_info->pNext);
+    auto exp_mem_alloc_info = lvl_find_in_chain<VkExportMemoryAllocateInfo>(alloc_info->pNext);
+    auto mem_ded_alloc_info = lvl_find_in_chain<VkMemoryDedicatedAllocateInfo>(alloc_info->pNext);
+
+    if ((import_ahb_info) && (NULL != import_ahb_info->buffer)) {
+        // This is an import with handleType of VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID
+        AHardwareBuffer_Desc ahb_desc = {};
+        AHardwareBuffer_describe(import_ahb_info->buffer, &ahb_desc);
+
+        //  If buffer is not NULL, it must be a valid Android hardware buffer object with AHardwareBuffer_Desc::format and
+        //  AHardwareBuffer_Desc::usage compatible with Vulkan as described in Android Hardware Buffers.
+        //
+        //  BLOB & GPU_DATA_BUFFER combo specifically allowed
+        if ((AHARDWAREBUFFER_FORMAT_BLOB != ahb_desc.format) || (0 == (ahb_desc.usage & AHARDWAREBUFFER_USAGE_GPU_DATA_BUFFER))) {
+            // Otherwise, must be a combination from the AHardwareBuffer Format and Usage Equivalence tables
+            // Usage must have at least one bit from the table. It may have additional bits not in the table
+            uint64_t ahb_equiv_usage_bits = AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE | AHARDWAREBUFFER_USAGE_GPU_COLOR_OUTPUT |
+                                            AHARDWAREBUFFER_USAGE_GPU_CUBE_MAP | AHARDWAREBUFFER_USAGE_GPU_MIPMAP_COMPLETE |
+                                            AHARDWAREBUFFER_USAGE_PROTECTED_CONTENT;
+            if ((0 == (ahb_desc.usage & ahb_equiv_usage_bits)) || (0 == ahb_format_map_a2v.count(ahb_desc.format))) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+                                HandleToUint64(device), "VUID-VkImportAndroidHardwareBufferInfoANDROID-buffer-01881",
+                                "vkAllocateMemory: The AHardwareBuffer_Desc's format ( %u ) and/or usage ( 0x%" PRIx64
+                                " ) are not compatible with Vulkan.",
+                                ahb_desc.format, ahb_desc.usage);
+            }
+        }
+
+        // Collect external buffer info
+        VkPhysicalDeviceExternalBufferInfo pdebi = {};
+        pdebi.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO;
+        pdebi.handleType = VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID;
+        if (AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE & ahb_desc.usage) {
+            pdebi.usage |= ahb_usage_map_a2v[AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE];
+        }
+        if (AHARDWAREBUFFER_USAGE_GPU_COLOR_OUTPUT & ahb_desc.usage) {
+            pdebi.usage |= ahb_usage_map_a2v[AHARDWAREBUFFER_USAGE_GPU_COLOR_OUTPUT];
+        }
+        VkExternalBufferProperties ext_buf_props = {};
+        ext_buf_props.sType = VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES;
+
+        DispatchGetPhysicalDeviceExternalBufferProperties(physical_device, &pdebi, &ext_buf_props);
+
+        // Collect external format info
+        VkPhysicalDeviceExternalImageFormatInfo pdeifi = {};
+        pdeifi.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO;
+        pdeifi.handleType = VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID;
+        VkPhysicalDeviceImageFormatInfo2 pdifi2 = {};
+        pdifi2.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2;
+        pdifi2.pNext = &pdeifi;
+        if (0 < ahb_format_map_a2v.count(ahb_desc.format)) pdifi2.format = ahb_format_map_a2v[ahb_desc.format];
+        pdifi2.type = VK_IMAGE_TYPE_2D;           // Seems likely
+        pdifi2.tiling = VK_IMAGE_TILING_OPTIMAL;  // Ditto
+        if (AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE & ahb_desc.usage) {
+            pdifi2.usage |= ahb_usage_map_a2v[AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE];
+        }
+        if (AHARDWAREBUFFER_USAGE_GPU_COLOR_OUTPUT & ahb_desc.usage) {
+            pdifi2.usage |= ahb_usage_map_a2v[AHARDWAREBUFFER_USAGE_GPU_COLOR_OUTPUT];
+        }
+        if (AHARDWAREBUFFER_USAGE_GPU_CUBE_MAP & ahb_desc.usage) {
+            pdifi2.flags |= ahb_create_map_a2v[AHARDWAREBUFFER_USAGE_GPU_CUBE_MAP];
+        }
+        if (AHARDWAREBUFFER_USAGE_PROTECTED_CONTENT & ahb_desc.usage) {
+            pdifi2.flags |= ahb_create_map_a2v[AHARDWAREBUFFER_USAGE_PROTECTED_CONTENT];
+        }
+
+        VkExternalImageFormatProperties ext_img_fmt_props = {};
+        ext_img_fmt_props.sType = VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES;
+        VkImageFormatProperties2 ifp2 = {};
+        ifp2.sType = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2;
+        ifp2.pNext = &ext_img_fmt_props;
+
+        VkResult fmt_lookup_result = DispatchGetPhysicalDeviceImageFormatProperties2(physical_device, &pdifi2, &ifp2);
+
+        //  If buffer is not NULL, Android hardware buffers must be supported for import, as reported by
+        //  VkExternalImageFormatProperties or VkExternalBufferProperties.
+        if (0 == (ext_buf_props.externalMemoryProperties.externalMemoryFeatures & VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT)) {
+            if ((VK_SUCCESS != fmt_lookup_result) || (0 == (ext_img_fmt_props.externalMemoryProperties.externalMemoryFeatures &
+                                                            VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT))) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+                                HandleToUint64(device), "VUID-VkImportAndroidHardwareBufferInfoANDROID-buffer-01880",
+                                "vkAllocateMemory: Neither the VkExternalImageFormatProperties nor the VkExternalBufferProperties "
+                                "structs for the AHardwareBuffer include the VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT flag.");
+            }
+        }
+
+        // Retrieve buffer and format properties of the provided AHardwareBuffer
+        VkAndroidHardwareBufferFormatPropertiesANDROID ahb_format_props = {};
+        ahb_format_props.sType = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID;
+        VkAndroidHardwareBufferPropertiesANDROID ahb_props = {};
+        ahb_props.sType = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID;
+        ahb_props.pNext = &ahb_format_props;
+        DispatchGetAndroidHardwareBufferPropertiesANDROID(device, import_ahb_info->buffer, &ahb_props);
+
+        // allocationSize must be the size returned by vkGetAndroidHardwareBufferPropertiesANDROID for the Android hardware buffer
+        if (alloc_info->allocationSize != ahb_props.allocationSize) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+                            HandleToUint64(device), "VUID-VkMemoryAllocateInfo-allocationSize-02383",
+                            "vkAllocateMemory: VkMemoryAllocateInfo struct with chained VkImportAndroidHardwareBufferInfoANDROID "
+                            "struct, allocationSize (%" PRId64
+                            ") does not match the AHardwareBuffer's reported allocationSize (%" PRId64 ").",
+                            alloc_info->allocationSize, ahb_props.allocationSize);
+        }
+
+        // memoryTypeIndex must be one of those returned by vkGetAndroidHardwareBufferPropertiesANDROID for the AHardwareBuffer
+        // Note: memoryTypeIndex is an index, memoryTypeBits is a bitmask
+        uint32_t mem_type_bitmask = 1 << alloc_info->memoryTypeIndex;
+        if (0 == (mem_type_bitmask & ahb_props.memoryTypeBits)) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+                            HandleToUint64(device), "VUID-VkMemoryAllocateInfo-memoryTypeIndex-02385",
+                            "vkAllocateMemory: VkMemoryAllocateInfo struct with chained VkImportAndroidHardwareBufferInfoANDROID "
+                            "struct, memoryTypeIndex (%" PRId32
+                            ") does not correspond to a bit set in AHardwareBuffer's reported "
+                            "memoryTypeBits bitmask (0x%" PRIx32 ").",
+                            alloc_info->memoryTypeIndex, ahb_props.memoryTypeBits);
+        }
+
+        // Checks for allocations without a dedicated allocation requirement
+        if ((nullptr == mem_ded_alloc_info) || (VK_NULL_HANDLE == mem_ded_alloc_info->image)) {
+            // the Android hardware buffer must have a format of AHARDWAREBUFFER_FORMAT_BLOB and a usage that includes
+            // AHARDWAREBUFFER_USAGE_GPU_DATA_BUFFER
+            if (((uint64_t)AHARDWAREBUFFER_FORMAT_BLOB != ahb_desc.format) ||
+                (0 == (ahb_desc.usage & AHARDWAREBUFFER_USAGE_GPU_DATA_BUFFER))) {
+                skip |= log_msg(
+                    report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
+                    "VUID-VkMemoryAllocateInfo-pNext-02384",
+                    "vkAllocateMemory: VkMemoryAllocateInfo struct with chained VkImportAndroidHardwareBufferInfoANDROID "
+                    "struct without a dedicated allocation requirement, while the AHardwareBuffer_Desc's format ( %u ) is not "
+                    "AHARDWAREBUFFER_FORMAT_BLOB or usage (0x%" PRIx64 ") does not include AHARDWAREBUFFER_USAGE_GPU_DATA_BUFFER.",
+                    ahb_desc.format, ahb_desc.usage);
+            }
+        } else {  // Checks specific to import with a dedicated allocation requirement
+            const VkImageCreateInfo *ici = &(GetImageState(mem_ded_alloc_info->image)->createInfo);
+
+            // The Android hardware buffer's usage must include at least one of AHARDWAREBUFFER_USAGE_GPU_COLOR_OUTPUT or
+            // AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE
+            if (0 == (ahb_desc.usage & (AHARDWAREBUFFER_USAGE_GPU_COLOR_OUTPUT | AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE))) {
+                skip |= log_msg(
+                    report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
+                    "VUID-VkMemoryAllocateInfo-pNext-02386",
+                    "vkAllocateMemory: VkMemoryAllocateInfo struct with chained VkImportAndroidHardwareBufferInfoANDROID and a "
+                    "dedicated allocation requirement, while the AHardwareBuffer's usage (0x%" PRIx64
+                    ") contains neither AHARDWAREBUFFER_USAGE_GPU_COLOR_OUTPUT nor AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE.",
+                    ahb_desc.usage);
+            }
+
+            //  the format of image must be VK_FORMAT_UNDEFINED or the format returned by
+            //  vkGetAndroidHardwareBufferPropertiesANDROID
+            if ((ici->format != ahb_format_props.format) && (VK_FORMAT_UNDEFINED != ici->format)) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+                                HandleToUint64(device), "VUID-VkMemoryAllocateInfo-pNext-02387",
+                                "vkAllocateMemory: VkMemoryAllocateInfo struct with chained "
+                                "VkImportAndroidHardwareBufferInfoANDROID, the dedicated allocation image's "
+                                "format (%s) is not VK_FORMAT_UNDEFINED and does not match the AHardwareBuffer's format (%s).",
+                                string_VkFormat(ici->format), string_VkFormat(ahb_format_props.format));
+            }
+
+            // The width, height, and array layer dimensions of image and the Android hardwarebuffer must be identical
+            if ((ici->extent.width != ahb_desc.width) || (ici->extent.height != ahb_desc.height) ||
+                (ici->arrayLayers != ahb_desc.layers)) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+                                HandleToUint64(device), "VUID-VkMemoryAllocateInfo-pNext-02388",
+                                "vkAllocateMemory: VkMemoryAllocateInfo struct with chained "
+                                "VkImportAndroidHardwareBufferInfoANDROID, the dedicated allocation image's "
+                                "width, height, and arrayLayers (%" PRId32 " %" PRId32 " %" PRId32
+                                ") do not match those of the AHardwareBuffer (%" PRId32 " %" PRId32 " %" PRId32 ").",
+                                ici->extent.width, ici->extent.height, ici->arrayLayers, ahb_desc.width, ahb_desc.height,
+                                ahb_desc.layers);
+            }
+
+            // If the Android hardware buffer's usage includes AHARDWAREBUFFER_USAGE_GPU_MIPMAP_COMPLETE, the image must
+            // have either a full mipmap chain or exactly 1 mip level.
+            //
+            // NOTE! The language of this VUID contradicts the language in the spec (1.1.93), which says "The
+            // AHARDWAREBUFFER_USAGE_GPU_MIPMAP_COMPLETE flag does not correspond to a Vulkan image usage or creation flag. Instead,
+            // its presence indicates that the Android hardware buffer contains a complete mipmap chain, and its absence indicates
+            // that the Android hardware buffer contains only a single mip level."
+            //
+            // TODO: This code implements the VUID's meaning, but it seems likely that the spec text is actually correct.
+            // Clarification requested.
+            if ((ahb_desc.usage & AHARDWAREBUFFER_USAGE_GPU_MIPMAP_COMPLETE) && (ici->mipLevels != 1) &&
+                (ici->mipLevels != FullMipChainLevels(ici->extent))) {
+                skip |=
+                    log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+                            HandleToUint64(device), "VUID-VkMemoryAllocateInfo-pNext-02389",
+                            "vkAllocateMemory: VkMemoryAllocateInfo struct with chained VkImportAndroidHardwareBufferInfoANDROID, "
+                            "usage includes AHARDWAREBUFFER_USAGE_GPU_MIPMAP_COMPLETE but mipLevels (%" PRId32
+                            ") is neither 1 nor full mip "
+                            "chain levels (%" PRId32 ").",
+                            ici->mipLevels, FullMipChainLevels(ici->extent));
+            }
+
+            // each bit set in the usage of image must be listed in AHardwareBuffer Usage Equivalence, and if there is a
+            // corresponding AHARDWAREBUFFER_USAGE bit listed that bit must be included in the Android hardware buffer's
+            // AHardwareBuffer_Desc::usage
+            if (ici->usage &
+                ~(VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT |
+                  VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT)) {
+                skip |=
+                    log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+                            HandleToUint64(device), "VUID-VkMemoryAllocateInfo-pNext-02390",
+                            "vkAllocateMemory: VkMemoryAllocateInfo struct with chained VkImportAndroidHardwareBufferInfoANDROID, "
+                            "dedicated image usage bits include one or more with no AHardwareBuffer equivalent.");
+            }
+
+            bool illegal_usage = false;
+            std::vector<VkImageUsageFlags> usages = {VK_IMAGE_USAGE_SAMPLED_BIT, VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT,
+                                                     VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT};
+            for (VkImageUsageFlags ubit : usages) {
+                if (ici->usage & ubit) {
+                    uint64_t ahb_usage = ahb_usage_map_v2a[ubit];
+                    if (0 == (ahb_usage & ahb_desc.usage)) illegal_usage = true;
+                }
+            }
+            if (illegal_usage) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+                                HandleToUint64(device), "VUID-VkMemoryAllocateInfo-pNext-02390",
+                                "vkAllocateMemory: VkMemoryAllocateInfo struct with chained "
+                                "VkImportAndroidHardwareBufferInfoANDROID, one or more AHardwareBuffer usage bits equivalent to "
+                                "the provided image's usage bits are missing from AHardwareBuffer_Desc.usage.");
+            }
+        }
+    } else {  // Not an import
+        if ((exp_mem_alloc_info) && (mem_ded_alloc_info) &&
+            (0 != (VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID & exp_mem_alloc_info->handleTypes)) &&
+            (VK_NULL_HANDLE != mem_ded_alloc_info->image)) {
+            // This is an Android HW Buffer export
+            if (0 != alloc_info->allocationSize) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+                                HandleToUint64(device), "VUID-VkMemoryAllocateInfo-pNext-01874",
+                                "vkAllocateMemory: pNext chain indicates a dedicated Android Hardware Buffer export allocation, "
+                                "but allocationSize is non-zero.");
+            }
+        } else {
+            if (0 == alloc_info->allocationSize) {
+                skip |= log_msg(
+                    report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
+                    "VUID-VkMemoryAllocateInfo-pNext-01874",
+                    "vkAllocateMemory: pNext chain does not indicate a dedicated export allocation, but allocationSize is 0.");
+            };
+        }
+    }
+    return skip;
+}
+
+bool CoreChecks::ValidateGetImageMemoryRequirements2ANDROID(const VkImage image) const {
+    bool skip = false;
+
+    const IMAGE_STATE *image_state = GetImageState(image);
+    if (image_state->imported_ahb && (0 == image_state->GetBoundMemory().size())) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, HandleToUint64(image),
+                        "VUID-VkImageMemoryRequirementsInfo2-image-01897",
+                        "vkGetImageMemoryRequirements2: Attempt to query layout from an image created with "
+                        "VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID handleType, which has not yet been "
+                        "bound to memory.");
+    }
+    return skip;
+}
+
+static bool ValidateGetPhysicalDeviceImageFormatProperties2ANDROID(const debug_report_data *report_data,
+                                                                   const VkPhysicalDeviceImageFormatInfo2 *pImageFormatInfo,
+                                                                   const VkImageFormatProperties2 *pImageFormatProperties) {
+    bool skip = false;
+    const VkAndroidHardwareBufferUsageANDROID *ahb_usage =
+        lvl_find_in_chain<VkAndroidHardwareBufferUsageANDROID>(pImageFormatProperties->pNext);
+    if (nullptr != ahb_usage) {
+        const VkPhysicalDeviceExternalImageFormatInfo *pdeifi =
+            lvl_find_in_chain<VkPhysicalDeviceExternalImageFormatInfo>(pImageFormatInfo->pNext);
+        if ((nullptr == pdeifi) || (VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID != pdeifi->handleType)) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                            "VUID-vkGetPhysicalDeviceImageFormatProperties2-pNext-01868",
+                            "vkGetPhysicalDeviceImageFormatProperties2: pImageFormatProperties includes a chained "
+                            "VkAndroidHardwareBufferUsageANDROID struct, but pImageFormatInfo does not include a chained "
+                            "VkPhysicalDeviceExternalImageFormatInfo struct with handleType "
+                            "VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID.");
+        }
+    }
+    return skip;
+}
+
+bool CoreChecks::ValidateCreateSamplerYcbcrConversionANDROID(const VkSamplerYcbcrConversionCreateInfo *create_info) const {
+    const VkExternalFormatANDROID *ext_format_android = lvl_find_in_chain<VkExternalFormatANDROID>(create_info->pNext);
+    if ((nullptr != ext_format_android) && (0 != ext_format_android->externalFormat)) {
+        if (VK_FORMAT_UNDEFINED != create_info->format) {
+            return log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT, 0,
+                           "VUID-VkSamplerYcbcrConversionCreateInfo-format-01904",
+                           "vkCreateSamplerYcbcrConversion[KHR]: CreateInfo format is not VK_FORMAT_UNDEFINED while "
+                           "there is a chained VkExternalFormatANDROID struct.");
+        }
+    } else if (VK_FORMAT_UNDEFINED == create_info->format) {
+        return log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT, 0,
+                       "VUID-VkSamplerYcbcrConversionCreateInfo-format-01904",
+                       "vkCreateSamplerYcbcrConversion[KHR]: CreateInfo format is VK_FORMAT_UNDEFINED with no chained "
+                       "VkExternalFormatANDROID struct.");
+    }
+    return false;
+}
+
+#else  // !VK_USE_PLATFORM_ANDROID_KHR
+
+bool CoreChecks::ValidateAllocateMemoryANDROID(const VkMemoryAllocateInfo *alloc_info) const { return false; }
+
+static bool ValidateGetPhysicalDeviceImageFormatProperties2ANDROID(const debug_report_data *report_data,
+                                                                   const VkPhysicalDeviceImageFormatInfo2 *pImageFormatInfo,
+                                                                   const VkImageFormatProperties2 *pImageFormatProperties) {
+    return false;
+}
+
+bool CoreChecks::ValidateCreateSamplerYcbcrConversionANDROID(const VkSamplerYcbcrConversionCreateInfo *create_info) const {
+    return false;
+}
+
+bool CoreChecks::ValidateGetImageMemoryRequirements2ANDROID(const VkImage image) const { return false; }
+
+#endif  // VK_USE_PLATFORM_ANDROID_KHR
+
+bool CoreChecks::PreCallValidateAllocateMemory(VkDevice device, const VkMemoryAllocateInfo *pAllocateInfo,
+                                               const VkAllocationCallbacks *pAllocator, VkDeviceMemory *pMemory) const {
+    bool skip = false;
+    if (memObjMap.size() >= phys_dev_props.limits.maxMemoryAllocationCount) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
+                        kVUIDUndefined, "Number of currently valid memory objects is not less than the maximum allowed (%u).",
+                        phys_dev_props.limits.maxMemoryAllocationCount);
+    }
+
+    if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
+        skip |= ValidateAllocateMemoryANDROID(pAllocateInfo);
+    } else {
+        if (0 == pAllocateInfo->allocationSize) {
+            skip |=
+                log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
+                        "VUID-VkMemoryAllocateInfo-allocationSize-00638", "vkAllocateMemory: allocationSize is 0.");
+        };
+    }
+
+    auto chained_flags_struct = lvl_find_in_chain<VkMemoryAllocateFlagsInfo>(pAllocateInfo->pNext);
+    if (chained_flags_struct && chained_flags_struct->flags == VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT) {
+        skip |= ValidateDeviceMaskToPhysicalDeviceCount(chained_flags_struct->deviceMask, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+                                                        HandleToUint64(device), "VUID-VkMemoryAllocateFlagsInfo-deviceMask-00675");
+        skip |= ValidateDeviceMaskToZero(chained_flags_struct->deviceMask, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+                                         HandleToUint64(device), "VUID-VkMemoryAllocateFlagsInfo-deviceMask-00676");
+    }
+
+    if (pAllocateInfo->memoryTypeIndex >= phys_dev_mem_props.memoryTypeCount) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
+                        "VUID-vkAllocateMemory-pAllocateInfo-01714",
+                        "vkAllocateMemory: attempting to allocate memory type %u, which is not a valid index. Device only "
+                        "advertises %u memory types.",
+                        pAllocateInfo->memoryTypeIndex, phys_dev_mem_props.memoryTypeCount);
+    } else {
+        if (pAllocateInfo->allocationSize >
+            phys_dev_mem_props.memoryHeaps[phys_dev_mem_props.memoryTypes[pAllocateInfo->memoryTypeIndex].heapIndex].size) {
+            skip |= log_msg(
+                report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
+                "VUID-vkAllocateMemory-pAllocateInfo-01713",
+                "vkAllocateMemory: attempting to allocate %" PRIu64
+                " bytes from heap %u,"
+                "but size of that heap is only %" PRIu64 " bytes.",
+                pAllocateInfo->allocationSize, phys_dev_mem_props.memoryTypes[pAllocateInfo->memoryTypeIndex].heapIndex,
+                phys_dev_mem_props.memoryHeaps[phys_dev_mem_props.memoryTypes[pAllocateInfo->memoryTypeIndex].heapIndex].size);
+        }
+
+        if (!enabled_features.device_coherent_memory_features.deviceCoherentMemory &&
+            ((phys_dev_mem_props.memoryTypes[pAllocateInfo->memoryTypeIndex].propertyFlags &
+              VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD) != 0)) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+                            HandleToUint64(device), "VUID-vkAllocateMemory-deviceCoherentMemory-02790",
+                            "vkAllocateMemory: attempting to allocate memory type %u, which includes the "
+                            "VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD memory property, but the deviceCoherentMemory feature "
+                            "is not enabled.",
+                            pAllocateInfo->memoryTypeIndex);
+        }
+    }
+
+    // TODO: VUIDs ending in 00643, 00644, 00646, 00647, 01742, 01743, 01745, 00645, 00648, 01744
+    return skip;
+}
+
+// For given obj node, if it is use, flag a validation error and return callback result, else return false
+bool CoreChecks::ValidateObjectNotInUse(const BASE_NODE *obj_node, const VulkanTypedHandle &obj_struct, const char *caller_name,
+                                        const char *error_code) const {
+    if (disabled.object_in_use) return false;
+    bool skip = false;
+    if (obj_node->in_use.load()) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, get_debug_report_enum[obj_struct.type], obj_struct.handle,
+                        error_code, "Cannot call %s on %s that is currently in use by a command buffer.", caller_name,
+                        report_data->FormatHandle(obj_struct).c_str());
+    }
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateFreeMemory(VkDevice device, VkDeviceMemory mem, const VkAllocationCallbacks *pAllocator) const {
+    const DEVICE_MEMORY_STATE *mem_info = GetDevMemState(mem);
+    const VulkanTypedHandle obj_struct(mem, kVulkanObjectTypeDeviceMemory);
+    bool skip = false;
+    if (mem_info) {
+        skip |= ValidateObjectNotInUse(mem_info, obj_struct, "vkFreeMemory", "VUID-vkFreeMemory-memory-00677");
+    }
+    return skip;
+}
+
+// Validate that given Map memory range is valid. This means that the memory should not already be mapped,
+//  and that the size of the map range should be:
+//  1. Not zero
+//  2. Within the size of the memory allocation
+bool CoreChecks::ValidateMapMemRange(const DEVICE_MEMORY_STATE *mem_info, VkDeviceSize offset, VkDeviceSize size) const {
+    bool skip = false;
+    assert(mem_info);
+    const auto mem = mem_info->mem;
+    if (size == 0) {
+        skip =
+            log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, HandleToUint64(mem),
+                    kVUID_Core_MemTrack_InvalidMap, "VkMapMemory: Attempting to map memory range of size zero");
+    }
+
+    // It is an application error to call VkMapMemory on an object that is already mapped
+    if (mem_info->mapped_range.size != 0) {
+        skip = log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,
+                       HandleToUint64(mem), kVUID_Core_MemTrack_InvalidMap,
+                       "VkMapMemory: Attempting to map memory on an already-mapped %s.", report_data->FormatHandle(mem).c_str());
+    }
+
+    // Validate that offset + size is within object's allocationSize
+    if (size == VK_WHOLE_SIZE) {
+        if (offset >= mem_info->alloc_info.allocationSize) {
+            skip = log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,
+                           HandleToUint64(mem), kVUID_Core_MemTrack_InvalidMap,
+                           "Mapping Memory from 0x%" PRIx64 " to 0x%" PRIx64
+                           " with size of VK_WHOLE_SIZE oversteps total array size 0x%" PRIx64,
+                           offset, mem_info->alloc_info.allocationSize, mem_info->alloc_info.allocationSize);
+        }
+    } else {
+        if ((offset + size) > mem_info->alloc_info.allocationSize) {
+            skip = log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,
+                           HandleToUint64(mem), "VUID-vkMapMemory-size-00681",
+                           "Mapping Memory from 0x%" PRIx64 " to 0x%" PRIx64 " oversteps total array size 0x%" PRIx64 ".", offset,
+                           size + offset, mem_info->alloc_info.allocationSize);
+        }
+    }
+    return skip;
+}
+
+// Guard value for pad data
+static char NoncoherentMemoryFillValue = 0xb;
+
+void CoreChecks::InitializeShadowMemory(VkDeviceMemory mem, VkDeviceSize offset, VkDeviceSize size, void **ppData) {
+    auto mem_info = GetDevMemState(mem);
+    if (mem_info) {
+        uint32_t index = mem_info->alloc_info.memoryTypeIndex;
+        if (phys_dev_mem_props.memoryTypes[index].propertyFlags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) {
+            mem_info->shadow_copy = 0;
+        } else {
+            if (size == VK_WHOLE_SIZE) {
+                size = mem_info->alloc_info.allocationSize - offset;
+            }
+            mem_info->shadow_pad_size = phys_dev_props.limits.minMemoryMapAlignment;
+            assert(SafeModulo(mem_info->shadow_pad_size, phys_dev_props.limits.minMemoryMapAlignment) == 0);
+            // Ensure start of mapped region reflects hardware alignment constraints
+            uint64_t map_alignment = phys_dev_props.limits.minMemoryMapAlignment;
+
+            // From spec: (ppData - offset) must be aligned to at least limits::minMemoryMapAlignment.
+            uint64_t start_offset = offset % map_alignment;
+            // Data passed to driver will be wrapped by a guardband of data to detect over- or under-writes.
+            mem_info->shadow_copy_base =
+                malloc(static_cast<size_t>(2 * mem_info->shadow_pad_size + size + map_alignment + start_offset));
+
+            mem_info->shadow_copy =
+                reinterpret_cast<char *>((reinterpret_cast<uintptr_t>(mem_info->shadow_copy_base) + map_alignment) &
+                                         ~(map_alignment - 1)) +
+                start_offset;
+            assert(SafeModulo(reinterpret_cast<uintptr_t>(mem_info->shadow_copy) + mem_info->shadow_pad_size - start_offset,
+                              map_alignment) == 0);
+
+            memset(mem_info->shadow_copy, NoncoherentMemoryFillValue, static_cast<size_t>(2 * mem_info->shadow_pad_size + size));
+            *ppData = static_cast<char *>(mem_info->shadow_copy) + mem_info->shadow_pad_size;
+        }
+    }
+}
+
+bool CoreChecks::PreCallValidateWaitForFences(VkDevice device, uint32_t fenceCount, const VkFence *pFences, VkBool32 waitAll,
+                                              uint64_t timeout) const {
+    // Verify fence status of submitted fences
+    bool skip = false;
+    for (uint32_t i = 0; i < fenceCount; i++) {
+        skip |= VerifyQueueStateToFence(pFences[i]);
+    }
+    return skip;
+}
+
+bool CoreChecks::ValidateGetDeviceQueue(uint32_t queueFamilyIndex, uint32_t queueIndex, VkQueue *pQueue, const char *valid_qfi_vuid,
+                                        const char *qfi_in_range_vuid) const {
+    bool skip = false;
+
+    skip |= ValidateDeviceQueueFamily(queueFamilyIndex, "vkGetDeviceQueue", "queueFamilyIndex", valid_qfi_vuid);
+    const auto &queue_data = queue_family_index_map.find(queueFamilyIndex);
+    if (queue_data != queue_family_index_map.end() && queue_data->second <= queueIndex) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
+                        qfi_in_range_vuid,
+                        "vkGetDeviceQueue: queueIndex (=%" PRIu32
+                        ") is not less than the number of queues requested from queueFamilyIndex (=%" PRIu32
+                        ") when the device was created (i.e. is not less than %" PRIu32 ").",
+                        queueIndex, queueFamilyIndex, queue_data->second);
+    }
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateGetDeviceQueue(VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex,
+                                               VkQueue *pQueue) const {
+    return ValidateGetDeviceQueue(queueFamilyIndex, queueIndex, pQueue, "VUID-vkGetDeviceQueue-queueFamilyIndex-00384",
+                                  "VUID-vkGetDeviceQueue-queueIndex-00385");
+}
+
+bool CoreChecks::PreCallValidateQueueWaitIdle(VkQueue queue) const {
+    const QUEUE_STATE *queue_state = GetQueueState(queue);
+    return VerifyQueueStateToSeq(queue_state, queue_state->seq + queue_state->submissions.size());
+}
+
+bool CoreChecks::PreCallValidateDeviceWaitIdle(VkDevice device) const {
+    bool skip = false;
+    const auto &const_queue_map = queueMap;
+    for (auto &queue : const_queue_map) {
+        skip |= VerifyQueueStateToSeq(&queue.second, queue.second.seq + queue.second.submissions.size());
+    }
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateCreateSemaphore(VkDevice device, const VkSemaphoreCreateInfo *pCreateInfo,
+                                                const VkAllocationCallbacks *pAllocator, VkSemaphore *pSemaphore) const {
+    bool skip = false;
+    auto *sem_type_create_info = lvl_find_in_chain<VkSemaphoreTypeCreateInfoKHR>(pCreateInfo->pNext);
+
+    if (sem_type_create_info && sem_type_create_info->semaphoreType == VK_SEMAPHORE_TYPE_TIMELINE_KHR &&
+        !enabled_features.timeline_semaphore_features.timelineSemaphore) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT, 0,
+                        "VUID-VkSemaphoreTypeCreateInfoKHR-timelineSemaphore-03252",
+                        "VkCreateSemaphore: timelineSemaphore feature is not enabled, can not create timeline semaphores");
+    }
+
+    if (sem_type_create_info && sem_type_create_info->semaphoreType == VK_SEMAPHORE_TYPE_BINARY_KHR &&
+        sem_type_create_info->initialValue != 0) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT, 0,
+                        "VUID-VkSemaphoreTypeCreateInfoKHR-semaphoreType-03279",
+                        "vkCreateSemaphore: if semaphoreType is VK_SEMAPHORE_TYPE_BINARY_KHR, initialValue must be zero");
+    }
+
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateWaitSemaphoresKHR(VkDevice device, const VkSemaphoreWaitInfoKHR *pWaitInfo,
+                                                  uint64_t timeout) const {
+    bool skip = false;
+
+    for (uint32_t i = 0; i < pWaitInfo->semaphoreCount; i++) {
+        auto *pSemaphore = GetSemaphoreState(pWaitInfo->pSemaphores[i]);
+        if (pSemaphore && pSemaphore->type != VK_SEMAPHORE_TYPE_TIMELINE_KHR) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT,
+                            HandleToUint64(pWaitInfo->pSemaphores[i]), "VUID-VkSemaphoreWaitInfoKHR-pSemaphores-03256",
+                            "VkWaitSemaphoresKHR: all semaphores in pWaitInfo must be timeline semaphores, but %s is not",
+                            report_data->FormatHandle(pWaitInfo->pSemaphores[i]).c_str());
+        }
+    }
+
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateDestroyFence(VkDevice device, VkFence fence, const VkAllocationCallbacks *pAllocator) const {
+    const FENCE_STATE *fence_node = GetFenceState(fence);
+    bool skip = false;
+    if (fence_node) {
+        if (fence_node->scope == kSyncScopeInternal && fence_node->state == FENCE_INFLIGHT) {
+            skip |=
+                log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT, HandleToUint64(fence),
+                        "VUID-vkDestroyFence-fence-01120", "%s is in use.", report_data->FormatHandle(fence).c_str());
+        }
+    }
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateDestroySemaphore(VkDevice device, VkSemaphore semaphore,
+                                                 const VkAllocationCallbacks *pAllocator) const {
+    const SEMAPHORE_STATE *sema_node = GetSemaphoreState(semaphore);
+    const VulkanTypedHandle obj_struct(semaphore, kVulkanObjectTypeSemaphore);
+    bool skip = false;
+    if (sema_node) {
+        skip |= ValidateObjectNotInUse(sema_node, obj_struct, "vkDestroySemaphore", "VUID-vkDestroySemaphore-semaphore-01137");
+    }
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateDestroyEvent(VkDevice device, VkEvent event, const VkAllocationCallbacks *pAllocator) const {
+    const EVENT_STATE *event_state = GetEventState(event);
+    const VulkanTypedHandle obj_struct(event, kVulkanObjectTypeEvent);
+    bool skip = false;
+    if (event_state) {
+        skip |= ValidateObjectNotInUse(event_state, obj_struct, "vkDestroyEvent", "VUID-vkDestroyEvent-event-01145");
+    }
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateDestroyQueryPool(VkDevice device, VkQueryPool queryPool,
+                                                 const VkAllocationCallbacks *pAllocator) const {
+    if (disabled.query_validation) return false;
+    const QUERY_POOL_STATE *qp_state = GetQueryPoolState(queryPool);
+    const VulkanTypedHandle obj_struct(queryPool, kVulkanObjectTypeQueryPool);
+    bool skip = false;
+    if (qp_state) {
+        skip |= ValidateObjectNotInUse(qp_state, obj_struct, "vkDestroyQueryPool", "VUID-vkDestroyQueryPool-queryPool-00793");
+    }
+    return skip;
+}
+
+bool CoreChecks::ValidatePerformanceQueryResults(const char *cmd_name, const QUERY_POOL_STATE *query_pool_state,
+                                                 uint32_t firstQuery, uint32_t queryCount, VkQueryResultFlags flags) const {
+    bool skip = false;
+
+    if (flags & (VK_QUERY_RESULT_WITH_AVAILABILITY_BIT | VK_QUERY_RESULT_PARTIAL_BIT | VK_QUERY_RESULT_64_BIT)) {
+        string invalid_flags_string;
+        for (auto flag : {VK_QUERY_RESULT_WITH_AVAILABILITY_BIT, VK_QUERY_RESULT_PARTIAL_BIT, VK_QUERY_RESULT_64_BIT}) {
+            if (flag & flags) {
+                if (invalid_flags_string.size()) {
+                    invalid_flags_string += " and ";
+                }
+                invalid_flags_string += string_VkQueryResultFlagBits(flag);
+            }
+        }
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT, 0,
+                        strcmp(cmd_name, "vkGetQueryPoolResults") == 0 ? "VUID-vkGetQueryPoolResults-queryType-03230"
+                                                                       : "VUID-vkCmdCopyQueryPoolResults-queryType-03233",
+                        "%s: QueryPool %s was created with a queryType of"
+                        "VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR but flags contains %s.",
+                        cmd_name, report_data->FormatHandle(query_pool_state->pool).c_str(), invalid_flags_string.c_str());
+    }
+
+    QueryObject query_obj{query_pool_state->pool, 0u};
+    for (uint32_t queryIndex = firstQuery; queryIndex < queryCount; queryIndex++) {
+        query_obj.query = queryIndex;
+        uint32_t submitted = 0;
+        for (uint32_t passIndex = 0; passIndex < query_pool_state->n_performance_passes; passIndex++) {
+            auto query_pass_iter = queryPassToStateMap.find(QueryObjectPass(query_obj, passIndex));
+            if (query_pass_iter != queryPassToStateMap.end() && query_pass_iter->second == QUERYSTATE_AVAILABLE) submitted++;
+        }
+        if (submitted < query_pool_state->n_performance_passes) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT, 0,
+                            "VUID-vkGetQueryPoolResults-queryType-03231",
+                            "%s: QueryPool %s has %u performance query passes, but the query has only been "
+                            "submitted for %u of the passes.",
+                            cmd_name, report_data->FormatHandle(query_pool_state->pool).c_str(),
+                            query_pool_state->n_performance_passes, submitted);
+        }
+    }
+
+    return skip;
+}
+
+bool CoreChecks::ValidateGetQueryPoolPerformanceResults(VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount,
+                                                        void *pData, VkDeviceSize stride, VkQueryResultFlags flags) const {
+    bool skip = false;
+    const auto query_pool_state = GetQueryPoolState(queryPool);
+
+    if (!query_pool_state || query_pool_state->createInfo.queryType != VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR) return skip;
+
+    if (((((uintptr_t)pData) % sizeof(VkPerformanceCounterResultKHR)) != 0 ||
+         (stride % sizeof(VkPerformanceCounterResultKHR)) != 0)) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT, 0,
+                        "VUID-vkGetQueryPoolResults-queryType-03229",
+                        "QueryPool %s was created with a queryType of "
+                        "VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR but pData & stride are not multiple of the "
+                        "size of VkPerformanceCounterResultKHR.",
+                        report_data->FormatHandle(queryPool).c_str());
+    }
+
+    skip |= ValidatePerformanceQueryResults("vkGetQueryPoolResults", query_pool_state, firstQuery, queryCount, flags);
+
+    return skip;
+}
+
+bool CoreChecks::ValidateGetQueryPoolResultsFlags(VkQueryPool queryPool, VkQueryResultFlags flags) const {
+    bool skip = false;
+    const auto query_pool_state = GetQueryPoolState(queryPool);
+    if (query_pool_state) {
+        if ((query_pool_state->createInfo.queryType == VK_QUERY_TYPE_TIMESTAMP) && (flags & VK_QUERY_RESULT_PARTIAL_BIT)) {
+            skip |= log_msg(
+                report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT, HandleToUint64(queryPool),
+                "VUID-vkGetQueryPoolResults-queryType-00818",
+                "%s was created with a queryType of VK_QUERY_TYPE_TIMESTAMP but flags contains VK_QUERY_RESULT_PARTIAL_BIT.",
+                report_data->FormatHandle(queryPool).c_str());
+        }
+    }
+    return skip;
+}
+
+bool CoreChecks::ValidateGetQueryPoolResultsQueries(VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount) const {
+    bool skip = false;
+    QueryObject query_obj{queryPool, 0u};
+    for (uint32_t i = 0; i < queryCount; ++i) {
+        query_obj.query = firstQuery + i;
+        if (queryToStateMap.count(query_obj) == 0) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT,
+                            HandleToUint64(queryPool), kVUID_Core_DrawState_InvalidQuery,
+                            "vkGetQueryPoolResults() on %s and query %" PRIu32 ": unknown query",
+                            report_data->FormatHandle(queryPool).c_str(), query_obj.query);
+        }
+    }
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateGetQueryPoolResults(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery,
+                                                    uint32_t queryCount, size_t dataSize, void *pData, VkDeviceSize stride,
+                                                    VkQueryResultFlags flags) const {
+    if (disabled.query_validation) return false;
+    bool skip = false;
+    skip |= ValidateQueryPoolStride("VUID-vkGetQueryPoolResults-flags-02827", "VUID-vkGetQueryPoolResults-flags-00815", stride,
+                                    "dataSize", dataSize, flags);
+    skip |= ValidateGetQueryPoolResultsFlags(queryPool, flags);
+    skip |= ValidateGetQueryPoolResultsQueries(queryPool, firstQuery, queryCount);
+    skip |= ValidateGetQueryPoolPerformanceResults(queryPool, firstQuery, queryCount, pData, stride, flags);
+
+    return skip;
+}
+
+bool CoreChecks::ValidateInsertMemoryRange(const VulkanTypedHandle &typed_handle, const DEVICE_MEMORY_STATE *mem_info,
+                                           VkDeviceSize memoryOffset, const VkMemoryRequirements &memRequirements, bool is_linear,
+                                           const char *api_name) const {
+    bool skip = false;
+
+    if (memoryOffset >= mem_info->alloc_info.allocationSize) {
+        const char *error_code = nullptr;
+        if (typed_handle.type == kVulkanObjectTypeBuffer) {
+            error_code = "VUID-vkBindBufferMemory-memoryOffset-01031";
+        } else if (typed_handle.type == kVulkanObjectTypeImage) {
+            error_code = "VUID-vkBindImageMemory-memoryOffset-01046";
+        } else if (typed_handle.type == kVulkanObjectTypeAccelerationStructureNV) {
+            error_code = "VUID-VkBindAccelerationStructureMemoryInfoNV-memoryOffset-02451";
+        } else {
+            // Unsupported object type
+            assert(false);
+        }
+
+        skip = log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,
+                       HandleToUint64(mem_info->mem), error_code,
+                       "In %s, attempting to bind %s to %s, memoryOffset=0x%" PRIxLEAST64
+                       " must be less than the memory allocation size 0x%" PRIxLEAST64 ".",
+                       api_name, report_data->FormatHandle(mem_info->mem).c_str(), report_data->FormatHandle(typed_handle).c_str(),
+                       memoryOffset, mem_info->alloc_info.allocationSize);
+    }
+
+    return skip;
+}
+
+bool CoreChecks::ValidateInsertImageMemoryRange(VkImage image, const DEVICE_MEMORY_STATE *mem_info, VkDeviceSize mem_offset,
+                                                const VkMemoryRequirements &mem_reqs, bool is_linear, const char *api_name) const {
+    return ValidateInsertMemoryRange(VulkanTypedHandle(image, kVulkanObjectTypeImage), mem_info, mem_offset, mem_reqs, is_linear,
+                                     api_name);
+}
+
+bool CoreChecks::ValidateInsertBufferMemoryRange(VkBuffer buffer, const DEVICE_MEMORY_STATE *mem_info, VkDeviceSize mem_offset,
+                                                 const VkMemoryRequirements &mem_reqs, const char *api_name) const {
+    return ValidateInsertMemoryRange(VulkanTypedHandle(buffer, kVulkanObjectTypeBuffer), mem_info, mem_offset, mem_reqs, true,
+                                     api_name);
+}
+
+bool CoreChecks::ValidateInsertAccelerationStructureMemoryRange(VkAccelerationStructureNV as, const DEVICE_MEMORY_STATE *mem_info,
+                                                                VkDeviceSize mem_offset, const VkMemoryRequirements &mem_reqs,
+                                                                const char *api_name) const {
+    return ValidateInsertMemoryRange(VulkanTypedHandle(as, kVulkanObjectTypeAccelerationStructureNV), mem_info, mem_offset,
+                                     mem_reqs, true, api_name);
+}
+
+bool CoreChecks::ValidateMemoryTypes(const DEVICE_MEMORY_STATE *mem_info, const uint32_t memory_type_bits, const char *funcName,
+                                     const char *msgCode) const {
+    bool skip = false;
+    if (((1 << mem_info->alloc_info.memoryTypeIndex) & memory_type_bits) == 0) {
+        skip = log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,
+                       HandleToUint64(mem_info->mem), msgCode,
+                       "%s(): MemoryRequirements->memoryTypeBits (0x%X) for this object type are not compatible with the memory "
+                       "type (0x%X) of %s.",
+                       funcName, memory_type_bits, mem_info->alloc_info.memoryTypeIndex,
+                       report_data->FormatHandle(mem_info->mem).c_str());
+    }
+    return skip;
+}
+
+bool CoreChecks::ValidateBindBufferMemory(VkBuffer buffer, VkDeviceMemory mem, VkDeviceSize memoryOffset,
+                                          const char *api_name) const {
+    const BUFFER_STATE *buffer_state = GetBufferState(buffer);
+
+    bool skip = false;
+    if (buffer_state) {
+        // Track objects tied to memory
+        uint64_t buffer_handle = HandleToUint64(buffer);
+        const VulkanTypedHandle obj_struct(buffer, kVulkanObjectTypeBuffer);
+        skip = ValidateSetMemBinding(mem, obj_struct, api_name);
+
+        // Validate bound memory range information
+        const auto mem_info = GetDevMemState(mem);
+        if (mem_info) {
+            skip |= ValidateInsertBufferMemoryRange(buffer, mem_info, memoryOffset, buffer_state->requirements, api_name);
+            skip |= ValidateMemoryTypes(mem_info, buffer_state->requirements.memoryTypeBits, api_name,
+                                        "VUID-vkBindBufferMemory-memory-01035");
+        }
+
+        // Validate memory requirements alignment
+        if (SafeModulo(memoryOffset, buffer_state->requirements.alignment) != 0) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, buffer_handle,
+                            "VUID-vkBindBufferMemory-memoryOffset-01036",
+                            "%s: memoryOffset is 0x%" PRIxLEAST64
+                            " but must be an integer multiple of the VkMemoryRequirements::alignment value 0x%" PRIxLEAST64
+                            ", returned from a call to vkGetBufferMemoryRequirements with buffer.",
+                            api_name, memoryOffset, buffer_state->requirements.alignment);
+        }
+
+        if (mem_info) {
+            // Validate memory requirements size
+            if (buffer_state->requirements.size > (mem_info->alloc_info.allocationSize - memoryOffset)) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, buffer_handle,
+                                "VUID-vkBindBufferMemory-size-01037",
+                                "%s: memory size minus memoryOffset is 0x%" PRIxLEAST64
+                                " but must be at least as large as VkMemoryRequirements::size value 0x%" PRIxLEAST64
+                                ", returned from a call to vkGetBufferMemoryRequirements with buffer.",
+                                api_name, mem_info->alloc_info.allocationSize - memoryOffset, buffer_state->requirements.size);
+            }
+
+            // Validate dedicated allocation
+            if (mem_info->is_dedicated && ((mem_info->dedicated_buffer != buffer) || (memoryOffset != 0))) {
+                // TODO: Add vkBindBufferMemory2KHR error message when added to spec.
+                auto validation_error = kVUIDUndefined;
+                if (strcmp(api_name, "vkBindBufferMemory()") == 0) {
+                    validation_error = "VUID-vkBindBufferMemory-memory-01508";
+                }
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, buffer_handle,
+                                validation_error,
+                                "%s: for dedicated %s, VkMemoryDedicatedAllocateInfoKHR::buffer %s must be equal "
+                                "to %s and memoryOffset 0x%" PRIxLEAST64 " must be zero.",
+                                api_name, report_data->FormatHandle(mem).c_str(),
+                                report_data->FormatHandle(mem_info->dedicated_buffer).c_str(),
+                                report_data->FormatHandle(buffer).c_str(), memoryOffset);
+            }
+
+            auto chained_flags_struct = lvl_find_in_chain<VkMemoryAllocateFlagsInfo>(mem_info->alloc_info.pNext);
+            if (enabled_features.buffer_device_address.bufferDeviceAddress &&
+                (buffer_state->createInfo.usage & VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_KHR) &&
+                (!chained_flags_struct || !(chained_flags_struct->flags & VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT_KHR))) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, buffer_handle,
+                                "VUID-vkBindBufferMemory-bufferDeviceAddress-03339",
+                                "%s: If buffer was created with the VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_KHR bit set, "
+                                "memory must have been allocated with the VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT_KHR bit set.",
+                                api_name);
+            }
+        }
+    }
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateBindBufferMemory(VkDevice device, VkBuffer buffer, VkDeviceMemory mem,
+                                                 VkDeviceSize memoryOffset) const {
+    const char *api_name = "vkBindBufferMemory()";
+    return ValidateBindBufferMemory(buffer, mem, memoryOffset, api_name);
+}
+
+bool CoreChecks::PreCallValidateBindBufferMemory2(VkDevice device, uint32_t bindInfoCount,
+                                                  const VkBindBufferMemoryInfoKHR *pBindInfos) const {
+    char api_name[64];
+    bool skip = false;
+
+    for (uint32_t i = 0; i < bindInfoCount; i++) {
+        sprintf(api_name, "vkBindBufferMemory2() pBindInfos[%u]", i);
+        skip |= ValidateBindBufferMemory(pBindInfos[i].buffer, pBindInfos[i].memory, pBindInfos[i].memoryOffset, api_name);
+    }
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateBindBufferMemory2KHR(VkDevice device, uint32_t bindInfoCount,
+                                                     const VkBindBufferMemoryInfoKHR *pBindInfos) const {
+    char api_name[64];
+    bool skip = false;
+
+    for (uint32_t i = 0; i < bindInfoCount; i++) {
+        sprintf(api_name, "vkBindBufferMemory2KHR() pBindInfos[%u]", i);
+        skip |= ValidateBindBufferMemory(pBindInfos[i].buffer, pBindInfos[i].memory, pBindInfos[i].memoryOffset, api_name);
+    }
+    return skip;
+}
+
+bool CoreChecks::ValidateGetImageMemoryRequirements2(const VkImageMemoryRequirementsInfo2 *pInfo) const {
+    bool skip = false;
+    if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
+        skip |= ValidateGetImageMemoryRequirements2ANDROID(pInfo->image);
+    }
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateGetImageMemoryRequirements2(VkDevice device, const VkImageMemoryRequirementsInfo2 *pInfo,
+                                                            VkMemoryRequirements2 *pMemoryRequirements) const {
+    return ValidateGetImageMemoryRequirements2(pInfo);
+}
+
+bool CoreChecks::PreCallValidateGetImageMemoryRequirements2KHR(VkDevice device, const VkImageMemoryRequirementsInfo2 *pInfo,
+                                                               VkMemoryRequirements2 *pMemoryRequirements) const {
+    return ValidateGetImageMemoryRequirements2(pInfo);
+}
+
+bool CoreChecks::PreCallValidateGetPhysicalDeviceImageFormatProperties2(VkPhysicalDevice physicalDevice,
+                                                                        const VkPhysicalDeviceImageFormatInfo2 *pImageFormatInfo,
+                                                                        VkImageFormatProperties2 *pImageFormatProperties) const {
+    // Can't wrap AHB-specific validation in a device extension check here, but no harm
+    bool skip = ValidateGetPhysicalDeviceImageFormatProperties2ANDROID(report_data, pImageFormatInfo, pImageFormatProperties);
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateGetPhysicalDeviceImageFormatProperties2KHR(VkPhysicalDevice physicalDevice,
+                                                                           const VkPhysicalDeviceImageFormatInfo2 *pImageFormatInfo,
+                                                                           VkImageFormatProperties2 *pImageFormatProperties) const {
+    // Can't wrap AHB-specific validation in a device extension check here, but no harm
+    bool skip = ValidateGetPhysicalDeviceImageFormatProperties2ANDROID(report_data, pImageFormatInfo, pImageFormatProperties);
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateDestroyPipeline(VkDevice device, VkPipeline pipeline,
+                                                const VkAllocationCallbacks *pAllocator) const {
+    const PIPELINE_STATE *pipeline_state = GetPipelineState(pipeline);
+    const VulkanTypedHandle obj_struct(pipeline, kVulkanObjectTypePipeline);
+    bool skip = false;
+    if (pipeline_state) {
+        skip |= ValidateObjectNotInUse(pipeline_state, obj_struct, "vkDestroyPipeline", "VUID-vkDestroyPipeline-pipeline-00765");
+    }
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateDestroySampler(VkDevice device, VkSampler sampler, const VkAllocationCallbacks *pAllocator) const {
+    const SAMPLER_STATE *sampler_state = GetSamplerState(sampler);
+    const VulkanTypedHandle obj_struct(sampler, kVulkanObjectTypeSampler);
+    bool skip = false;
+    if (sampler_state) {
+        skip |= ValidateObjectNotInUse(sampler_state, obj_struct, "vkDestroySampler", "VUID-vkDestroySampler-sampler-01082");
+    }
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateDestroyDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool,
+                                                      const VkAllocationCallbacks *pAllocator) const {
+    const DESCRIPTOR_POOL_STATE *desc_pool_state = GetDescriptorPoolState(descriptorPool);
+    const VulkanTypedHandle obj_struct(descriptorPool, kVulkanObjectTypeDescriptorPool);
+    bool skip = false;
+    if (desc_pool_state) {
+        skip |= ValidateObjectNotInUse(desc_pool_state, obj_struct, "vkDestroyDescriptorPool",
+                                       "VUID-vkDestroyDescriptorPool-descriptorPool-00303");
+    }
+    return skip;
+}
+
+// Verify cmdBuffer in given cb_node is not in global in-flight set, and return skip result
+//  If this is a secondary command buffer, then make sure its primary is also in-flight
+//  If primary is not in-flight, then remove secondary from global in-flight set
+// This function is only valid at a point when cmdBuffer is being reset or freed
+bool CoreChecks::CheckCommandBufferInFlight(const CMD_BUFFER_STATE *cb_node, const char *action, const char *error_code) const {
+    bool skip = false;
+    if (cb_node->in_use.load()) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                        HandleToUint64(cb_node->commandBuffer), error_code, "Attempt to %s %s which is in use.", action,
+                        report_data->FormatHandle(cb_node->commandBuffer).c_str());
+    }
+    return skip;
+}
+
+// Iterate over all cmdBuffers in given commandPool and verify that each is not in use
+bool CoreChecks::CheckCommandBuffersInFlight(const COMMAND_POOL_STATE *pPool, const char *action, const char *error_code) const {
+    bool skip = false;
+    for (auto cmd_buffer : pPool->commandBuffers) {
+        skip |= CheckCommandBufferInFlight(GetCBState(cmd_buffer), action, error_code);
+    }
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateFreeCommandBuffers(VkDevice device, VkCommandPool commandPool, uint32_t commandBufferCount,
+                                                   const VkCommandBuffer *pCommandBuffers) const {
+    bool skip = false;
+    for (uint32_t i = 0; i < commandBufferCount; i++) {
+        const auto *cb_node = GetCBState(pCommandBuffers[i]);
+        // Delete CB information structure, and remove from commandBufferMap
+        if (cb_node) {
+            skip |= CheckCommandBufferInFlight(cb_node, "free", "VUID-vkFreeCommandBuffers-pCommandBuffers-00047");
+        }
+    }
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateCreateCommandPool(VkDevice device, const VkCommandPoolCreateInfo *pCreateInfo,
+                                                  const VkAllocationCallbacks *pAllocator, VkCommandPool *pCommandPool) const {
+    return ValidateDeviceQueueFamily(pCreateInfo->queueFamilyIndex, "vkCreateCommandPool", "pCreateInfo->queueFamilyIndex",
+                                     "VUID-vkCreateCommandPool-queueFamilyIndex-01937");
+}
+
+bool CoreChecks::PreCallValidateCreateQueryPool(VkDevice device, const VkQueryPoolCreateInfo *pCreateInfo,
+                                                const VkAllocationCallbacks *pAllocator, VkQueryPool *pQueryPool) const {
+    if (disabled.query_validation) return false;
+    bool skip = false;
+    if (pCreateInfo && pCreateInfo->queryType == VK_QUERY_TYPE_PIPELINE_STATISTICS) {
+        if (!enabled_features.core.pipelineStatisticsQuery) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT, 0,
+                            "VUID-VkQueryPoolCreateInfo-queryType-00791",
+                            "Query pool with type VK_QUERY_TYPE_PIPELINE_STATISTICS created on a device with "
+                            "VkDeviceCreateInfo.pEnabledFeatures.pipelineStatisticsQuery == VK_FALSE.");
+        }
+    }
+    if (pCreateInfo && pCreateInfo->queryType == VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR) {
+        if (!enabled_features.performance_query_features.performanceCounterQueryPools) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT, 0,
+                            "VUID-VkQueryPoolPerformanceCreateInfoKHR-performanceCounterQueryPools-03237",
+                            "Query pool with type VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR created on a device with "
+                            "VkPhysicalDevicePerformanceQueryFeaturesKHR.performanceCounterQueryPools == VK_FALSE.");
+        }
+
+        auto perf_ci = lvl_find_in_chain<VkQueryPoolPerformanceCreateInfoKHR>(pCreateInfo->pNext);
+        if (!perf_ci) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT, 0,
+                            "VUID-VkQueryPoolCreateInfo-queryType-03222",
+                            "Query pool with type VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR created but the pNext chain of "
+                            "pCreateInfo does not contain in instance of VkQueryPoolPerformanceCreateInfoKHR.");
+        } else {
+            const auto &perf_counter_iter = physical_device_state->perf_counters.find(perf_ci->queueFamilyIndex);
+            if (perf_counter_iter == physical_device_state->perf_counters.end()) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT, 0,
+                                "VUID-VkQueryPoolPerformanceCreateInfoKHR-queueFamilyIndex-03236",
+                                "VkQueryPerformanceCreateInfoKHR::queueFamilyIndex is not a valid queue family index.");
+            } else {
+                const QUEUE_FAMILY_PERF_COUNTERS *perf_counters = perf_counter_iter->second.get();
+                for (uint32_t idx = 0; idx < perf_ci->counterIndexCount; idx++) {
+                    if (perf_ci->pCounterIndices[idx] >= perf_counters->counters.size()) {
+                        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT, 0,
+                                        "VUID-VkQueryPoolPerformanceCreateInfoKHR-pCounterIndices-03321",
+                                        "VkQueryPerformanceCreateInfoKHR::pCounterIndices[%u] = %u is not a valid "
+                                        "counter index.",
+                                        idx, perf_ci->pCounterIndices[idx]);
+                    }
+                }
+            }
+        }
+    }
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateDestroyCommandPool(VkDevice device, VkCommandPool commandPool,
+                                                   const VkAllocationCallbacks *pAllocator) const {
+    const COMMAND_POOL_STATE *cp_state = GetCommandPoolState(commandPool);
+    bool skip = false;
+    if (cp_state) {
+        // Verify that command buffers in pool are complete (not in-flight)
+        skip |= CheckCommandBuffersInFlight(cp_state, "destroy command pool with", "VUID-vkDestroyCommandPool-commandPool-00041");
+    }
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateResetCommandPool(VkDevice device, VkCommandPool commandPool, VkCommandPoolResetFlags flags) const {
+    const auto *command_pool_state = GetCommandPoolState(commandPool);
+    return CheckCommandBuffersInFlight(command_pool_state, "reset command pool with", "VUID-vkResetCommandPool-commandPool-00040");
+}
+
+bool CoreChecks::PreCallValidateResetFences(VkDevice device, uint32_t fenceCount, const VkFence *pFences) const {
+    bool skip = false;
+    for (uint32_t i = 0; i < fenceCount; ++i) {
+        const auto pFence = GetFenceState(pFences[i]);
+        if (pFence && pFence->scope == kSyncScopeInternal && pFence->state == FENCE_INFLIGHT) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT,
+                            HandleToUint64(pFences[i]), "VUID-vkResetFences-pFences-01123", "%s is in use.",
+                            report_data->FormatHandle(pFences[i]).c_str());
+        }
+    }
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateDestroyFramebuffer(VkDevice device, VkFramebuffer framebuffer,
+                                                   const VkAllocationCallbacks *pAllocator) const {
+    const FRAMEBUFFER_STATE *framebuffer_state = GetFramebufferState(framebuffer);
+    const VulkanTypedHandle obj_struct(framebuffer, kVulkanObjectTypeFramebuffer);
+    bool skip = false;
+    if (framebuffer_state) {
+        skip |= ValidateObjectNotInUse(framebuffer_state, obj_struct, "vkDestroyFramebuffer",
+                                       "VUID-vkDestroyFramebuffer-framebuffer-00892");
+    }
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateDestroyRenderPass(VkDevice device, VkRenderPass renderPass,
+                                                  const VkAllocationCallbacks *pAllocator) const {
+    const RENDER_PASS_STATE *rp_state = GetRenderPassState(renderPass);
+    const VulkanTypedHandle obj_struct(renderPass, kVulkanObjectTypeRenderPass);
+    bool skip = false;
+    if (rp_state) {
+        skip |= ValidateObjectNotInUse(rp_state, obj_struct, "vkDestroyRenderPass", "VUID-vkDestroyRenderPass-renderPass-00873");
+    }
+    return skip;
+}
+
+// Access helper functions for external modules
+VkFormatProperties CoreChecks::GetPDFormatProperties(const VkFormat format) const {
+    VkFormatProperties format_properties;
+    DispatchGetPhysicalDeviceFormatProperties(physical_device, format, &format_properties);
+    return format_properties;
+}
+
+bool CoreChecks::ValidatePipelineVertexDivisors(std::vector<std::shared_ptr<PIPELINE_STATE>> const &pipe_state_vec,
+                                                const uint32_t count, const VkGraphicsPipelineCreateInfo *pipe_cis) const {
+    bool skip = false;
+    const VkPhysicalDeviceLimits *device_limits = &phys_dev_props.limits;
+
+    for (uint32_t i = 0; i < count; i++) {
+        auto pvids_ci = lvl_find_in_chain<VkPipelineVertexInputDivisorStateCreateInfoEXT>(pipe_cis[i].pVertexInputState->pNext);
+        if (nullptr == pvids_ci) continue;
+
+        const PIPELINE_STATE *pipe_state = pipe_state_vec[i].get();
+        for (uint32_t j = 0; j < pvids_ci->vertexBindingDivisorCount; j++) {
+            const VkVertexInputBindingDivisorDescriptionEXT *vibdd = &(pvids_ci->pVertexBindingDivisors[j]);
+            if (vibdd->binding >= device_limits->maxVertexInputBindings) {
+                skip |= log_msg(
+                    report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
+                    "VUID-VkVertexInputBindingDivisorDescriptionEXT-binding-01869",
+                    "vkCreateGraphicsPipelines(): Pipeline[%1u] with chained VkPipelineVertexInputDivisorStateCreateInfoEXT, "
+                    "pVertexBindingDivisors[%1u] binding index of (%1u) exceeds device maxVertexInputBindings (%1u).",
+                    i, j, vibdd->binding, device_limits->maxVertexInputBindings);
+            }
+            if (vibdd->divisor > phys_dev_ext_props.vtx_attrib_divisor_props.maxVertexAttribDivisor) {
+                skip |= log_msg(
+                    report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
+                    "VUID-VkVertexInputBindingDivisorDescriptionEXT-divisor-01870",
+                    "vkCreateGraphicsPipelines(): Pipeline[%1u] with chained VkPipelineVertexInputDivisorStateCreateInfoEXT, "
+                    "pVertexBindingDivisors[%1u] divisor of (%1u) exceeds extension maxVertexAttribDivisor (%1u).",
+                    i, j, vibdd->divisor, phys_dev_ext_props.vtx_attrib_divisor_props.maxVertexAttribDivisor);
+            }
+            if ((0 == vibdd->divisor) && !enabled_features.vtx_attrib_divisor_features.vertexAttributeInstanceRateZeroDivisor) {
+                skip |= log_msg(
+                    report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
+                    "VUID-VkVertexInputBindingDivisorDescriptionEXT-vertexAttributeInstanceRateZeroDivisor-02228",
+                    "vkCreateGraphicsPipelines(): Pipeline[%1u] with chained VkPipelineVertexInputDivisorStateCreateInfoEXT, "
+                    "pVertexBindingDivisors[%1u] divisor must not be 0 when vertexAttributeInstanceRateZeroDivisor feature is not "
+                    "enabled.",
+                    i, j);
+            }
+            if ((1 != vibdd->divisor) && !enabled_features.vtx_attrib_divisor_features.vertexAttributeInstanceRateDivisor) {
+                skip |= log_msg(
+                    report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
+                    "VUID-VkVertexInputBindingDivisorDescriptionEXT-vertexAttributeInstanceRateDivisor-02229",
+                    "vkCreateGraphicsPipelines(): Pipeline[%1u] with chained VkPipelineVertexInputDivisorStateCreateInfoEXT, "
+                    "pVertexBindingDivisors[%1u] divisor (%1u) must be 1 when vertexAttributeInstanceRateDivisor feature is not "
+                    "enabled.",
+                    i, j, vibdd->divisor);
+            }
+
+            // Find the corresponding binding description and validate input rate setting
+            bool failed_01871 = true;
+            for (size_t k = 0; k < pipe_state->vertex_binding_descriptions_.size(); k++) {
+                if ((vibdd->binding == pipe_state->vertex_binding_descriptions_[k].binding) &&
+                    (VK_VERTEX_INPUT_RATE_INSTANCE == pipe_state->vertex_binding_descriptions_[k].inputRate)) {
+                    failed_01871 = false;
+                    break;
+                }
+            }
+            if (failed_01871) {  // Description not found, or has incorrect inputRate value
+                skip |= log_msg(
+                    report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
+                    "VUID-VkVertexInputBindingDivisorDescriptionEXT-inputRate-01871",
+                    "vkCreateGraphicsPipelines(): Pipeline[%1u] with chained VkPipelineVertexInputDivisorStateCreateInfoEXT, "
+                    "pVertexBindingDivisors[%1u] specifies binding index (%1u), but that binding index's "
+                    "VkVertexInputBindingDescription.inputRate member is not VK_VERTEX_INPUT_RATE_INSTANCE.",
+                    i, j, vibdd->binding);
+            }
+        }
+    }
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
+                                                        const VkGraphicsPipelineCreateInfo *pCreateInfos,
+                                                        const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
+                                                        void *cgpl_state_data) const {
+    bool skip = StateTracker::PreCallValidateCreateGraphicsPipelines(device, pipelineCache, count, pCreateInfos, pAllocator,
+                                                                     pPipelines, cgpl_state_data);
+    create_graphics_pipeline_api_state *cgpl_state = reinterpret_cast<create_graphics_pipeline_api_state *>(cgpl_state_data);
+
+    for (uint32_t i = 0; i < count; i++) {
+        skip |= ValidatePipelineLocked(cgpl_state->pipe_state, i);
+    }
+
+    for (uint32_t i = 0; i < count; i++) {
+        skip |= ValidatePipelineUnlocked(cgpl_state->pipe_state[i].get(), i);
+    }
+
+    if (device_extensions.vk_ext_vertex_attribute_divisor) {
+        skip |= ValidatePipelineVertexDivisors(cgpl_state->pipe_state, count, pCreateInfos);
+    }
+
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
+                                                       const VkComputePipelineCreateInfo *pCreateInfos,
+                                                       const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
+                                                       void *ccpl_state_data) const {
+    bool skip = StateTracker::PreCallValidateCreateComputePipelines(device, pipelineCache, count, pCreateInfos, pAllocator,
+                                                                    pPipelines, ccpl_state_data);
+
+    auto *ccpl_state = reinterpret_cast<create_compute_pipeline_api_state *>(ccpl_state_data);
+    for (uint32_t i = 0; i < count; i++) {
+        // TODO: Add Compute Pipeline Verification
+        skip |= ValidateComputePipeline(ccpl_state->pipe_state.back().get());
+    }
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateCreateRayTracingPipelinesNV(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
+                                                            const VkRayTracingPipelineCreateInfoNV *pCreateInfos,
+                                                            const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
+                                                            void *crtpl_state_data) const {
+    bool skip = StateTracker::PreCallValidateCreateRayTracingPipelinesNV(device, pipelineCache, count, pCreateInfos, pAllocator,
+                                                                         pPipelines, crtpl_state_data);
+
+    auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_api_state *>(crtpl_state_data);
+    for (uint32_t i = 0; i < count; i++) {
+        skip |= ValidateRayTracingPipelineNV(crtpl_state->pipe_state[i].get());
+    }
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateGetPipelineExecutablePropertiesKHR(VkDevice device, const VkPipelineInfoKHR *pPipelineInfo,
+                                                                   uint32_t *pExecutableCount,
+                                                                   VkPipelineExecutablePropertiesKHR *pProperties) const {
+    bool skip = false;
+
+    if (!enabled_features.pipeline_exe_props_features.pipelineExecutableInfo) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
+                        "VUID-vkGetPipelineExecutablePropertiesKHR-pipelineExecutableInfo-03270",
+                        "vkGetPipelineExecutablePropertiesKHR called when pipelineExecutableInfo feature is not enabled.");
+    }
+
+    return skip;
+}
+
+bool CoreChecks::ValidatePipelineExecutableInfo(VkDevice device, const VkPipelineExecutableInfoKHR *pExecutableInfo) const {
+    bool skip = false;
+
+    if (!enabled_features.pipeline_exe_props_features.pipelineExecutableInfo) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
+                        "VUID-vkGetPipelineExecutableStatisticsKHR-pipelineExecutableInfo-03272",
+                        "vkGetPipelineExecutableStatisticsKHR called when pipelineExecutableInfo feature is not enabled.");
+    }
+
+    VkPipelineInfoKHR pi = {};
+    pi.sType = VK_STRUCTURE_TYPE_PIPELINE_INFO_KHR;
+    pi.pipeline = pExecutableInfo->pipeline;
+
+    // We could probably cache this instead of fetching it every time
+    uint32_t executableCount = 0;
+    DispatchGetPipelineExecutablePropertiesKHR(device, &pi, &executableCount, NULL);
+
+    if (pExecutableInfo->executableIndex >= executableCount) {
+        skip |=
+            log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
+                    HandleToUint64(pExecutableInfo->pipeline), "VUID-VkPipelineExecutableInfoKHR-executableIndex-03275",
+                    "VkPipelineExecutableInfo::executableIndex (%1u) must be less than the number of executables associated with "
+                    "the pipeline (%1u) as returned by vkGetPipelineExecutablePropertiessKHR",
+                    pExecutableInfo->executableIndex, executableCount);
+    }
+
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateGetPipelineExecutableStatisticsKHR(VkDevice device,
+                                                                   const VkPipelineExecutableInfoKHR *pExecutableInfo,
+                                                                   uint32_t *pStatisticCount,
+                                                                   VkPipelineExecutableStatisticKHR *pStatistics) const {
+    bool skip = ValidatePipelineExecutableInfo(device, pExecutableInfo);
+
+    const PIPELINE_STATE *pipeline_state = GetPipelineState(pExecutableInfo->pipeline);
+    if (!(pipeline_state->getPipelineCreateFlags() & VK_PIPELINE_CREATE_CAPTURE_STATISTICS_BIT_KHR)) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
+                        HandleToUint64(pExecutableInfo->pipeline), "VUID-vkGetPipelineExecutableStatisticsKHR-pipeline-03274",
+                        "vkGetPipelineExecutableStatisticsKHR called on a pipeline created without the "
+                        "VK_PIPELINE_CREATE_CAPTURE_STATISTICS_BIT_KHR flag set");
+    }
+
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateGetPipelineExecutableInternalRepresentationsKHR(
+    VkDevice device, const VkPipelineExecutableInfoKHR *pExecutableInfo, uint32_t *pInternalRepresentationCount,
+    VkPipelineExecutableInternalRepresentationKHR *pStatistics) const {
+    bool skip = ValidatePipelineExecutableInfo(device, pExecutableInfo);
+
+    const PIPELINE_STATE *pipeline_state = GetPipelineState(pExecutableInfo->pipeline);
+    if (!(pipeline_state->getPipelineCreateFlags() & VK_PIPELINE_CREATE_CAPTURE_INTERNAL_REPRESENTATIONS_BIT_KHR)) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
+                        HandleToUint64(pExecutableInfo->pipeline),
+                        "VUID-vkGetPipelineExecutableInternalRepresentationsKHR-pipeline-03278",
+                        "vkGetPipelineExecutableInternalRepresentationsKHR called on a pipeline created without the "
+                        "VK_PIPELINE_CREATE_CAPTURE_INTERNAL_REPRESENTATIONS_BIT_KHR flag set");
+    }
+
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateCreateDescriptorSetLayout(VkDevice device, const VkDescriptorSetLayoutCreateInfo *pCreateInfo,
+                                                          const VkAllocationCallbacks *pAllocator,
+                                                          VkDescriptorSetLayout *pSetLayout) const {
+    return cvdescriptorset::ValidateDescriptorSetLayoutCreateInfo(
+        report_data, pCreateInfo, IsExtEnabled(device_extensions.vk_khr_push_descriptor), phys_dev_ext_props.max_push_descriptors,
+        IsExtEnabled(device_extensions.vk_ext_descriptor_indexing), &enabled_features.descriptor_indexing,
+        &enabled_features.inline_uniform_block, &phys_dev_ext_props.inline_uniform_block_props, &device_extensions);
+}
+
+// Used by CreatePipelineLayout and CmdPushConstants.
+// Note that the index argument is optional and only used by CreatePipelineLayout.
+bool CoreChecks::ValidatePushConstantRange(const uint32_t offset, const uint32_t size, const char *caller_name,
+                                           uint32_t index = 0) const {
+    if (disabled.push_constant_range) return false;
+    uint32_t const maxPushConstantsSize = phys_dev_props.limits.maxPushConstantsSize;
+    bool skip = false;
+    // Check that offset + size don't exceed the max.
+    // Prevent arithetic overflow here by avoiding addition and testing in this order.
+    if ((offset >= maxPushConstantsSize) || (size > maxPushConstantsSize - offset)) {
+        // This is a pain just to adapt the log message to the caller, but better to sort it out only when there is a problem.
+        if (0 == strcmp(caller_name, "vkCreatePipelineLayout()")) {
+            if (offset >= maxPushConstantsSize) {
+                skip |= log_msg(
+                    report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                    "VUID-VkPushConstantRange-offset-00294",
+                    "%s call has push constants index %u with offset %u that exceeds this device's maxPushConstantSize of %u.",
+                    caller_name, index, offset, maxPushConstantsSize);
+            }
+            if (size > maxPushConstantsSize - offset) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                "VUID-VkPushConstantRange-size-00298",
+                                "%s call has push constants index %u with offset %u and size %u that exceeds this device's "
+                                "maxPushConstantSize of %u.",
+                                caller_name, index, offset, size, maxPushConstantsSize);
+            }
+        } else if (0 == strcmp(caller_name, "vkCmdPushConstants()")) {
+            if (offset >= maxPushConstantsSize) {
+                skip |= log_msg(
+                    report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                    "VUID-vkCmdPushConstants-offset-00370",
+                    "%s call has push constants index %u with offset %u that exceeds this device's maxPushConstantSize of %u.",
+                    caller_name, index, offset, maxPushConstantsSize);
+            }
+            if (size > maxPushConstantsSize - offset) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                "VUID-vkCmdPushConstants-size-00371",
+                                "%s call has push constants index %u with offset %u and size %u that exceeds this device's "
+                                "maxPushConstantSize of %u.",
+                                caller_name, index, offset, size, maxPushConstantsSize);
+            }
+        } else {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                            kVUID_Core_DrawState_InternalError, "%s caller not supported.", caller_name);
+        }
+    }
+    // size needs to be non-zero and a multiple of 4.
+    if ((size == 0) || ((size & 0x3) != 0)) {
+        if (0 == strcmp(caller_name, "vkCreatePipelineLayout()")) {
+            if (size == 0) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                "VUID-VkPushConstantRange-size-00296",
+                                "%s call has push constants index %u with size %u. Size must be greater than zero.", caller_name,
+                                index, size);
+            }
+            if (size & 0x3) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                "VUID-VkPushConstantRange-size-00297",
+                                "%s call has push constants index %u with size %u. Size must be a multiple of 4.", caller_name,
+                                index, size);
+            }
+        } else if (0 == strcmp(caller_name, "vkCmdPushConstants()")) {
+            if (size == 0) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                "VUID-vkCmdPushConstants-size-arraylength",
+                                "%s call has push constants index %u with size %u. Size must be greater than zero.", caller_name,
+                                index, size);
+            }
+            if (size & 0x3) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                "VUID-vkCmdPushConstants-size-00369",
+                                "%s call has push constants index %u with size %u. Size must be a multiple of 4.", caller_name,
+                                index, size);
+            }
+        } else {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                            kVUID_Core_DrawState_InternalError, "%s caller not supported.", caller_name);
+        }
+    }
+    // offset needs to be a multiple of 4.
+    if ((offset & 0x3) != 0) {
+        if (0 == strcmp(caller_name, "vkCreatePipelineLayout()")) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                            "VUID-VkPushConstantRange-offset-00295",
+                            "%s call has push constants index %u with offset %u. Offset must be a multiple of 4.", caller_name,
+                            index, offset);
+        } else if (0 == strcmp(caller_name, "vkCmdPushConstants()")) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                            "VUID-vkCmdPushConstants-offset-00368",
+                            "%s call has push constants with offset %u. Offset must be a multiple of 4.", caller_name, offset);
+        } else {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                            kVUID_Core_DrawState_InternalError, "%s caller not supported.", caller_name);
+        }
+    }
+    return skip;
+}
+
+enum DSL_DESCRIPTOR_GROUPS {
+    DSL_TYPE_SAMPLERS = 0,
+    DSL_TYPE_UNIFORM_BUFFERS,
+    DSL_TYPE_STORAGE_BUFFERS,
+    DSL_TYPE_SAMPLED_IMAGES,
+    DSL_TYPE_STORAGE_IMAGES,
+    DSL_TYPE_INPUT_ATTACHMENTS,
+    DSL_TYPE_INLINE_UNIFORM_BLOCK,
+    DSL_NUM_DESCRIPTOR_GROUPS
+};
+
+// Used by PreCallValidateCreatePipelineLayout.
+// Returns an array of size DSL_NUM_DESCRIPTOR_GROUPS of the maximum number of descriptors used in any single pipeline stage
+std::valarray<uint32_t> GetDescriptorCountMaxPerStage(
+    const DeviceFeatures *enabled_features,
+    const std::vector<std::shared_ptr<cvdescriptorset::DescriptorSetLayout const>> &set_layouts, bool skip_update_after_bind) {
+    // Identify active pipeline stages
+    std::vector<VkShaderStageFlags> stage_flags = {VK_SHADER_STAGE_VERTEX_BIT, VK_SHADER_STAGE_FRAGMENT_BIT,
+                                                   VK_SHADER_STAGE_COMPUTE_BIT};
+    if (enabled_features->core.geometryShader) {
+        stage_flags.push_back(VK_SHADER_STAGE_GEOMETRY_BIT);
+    }
+    if (enabled_features->core.tessellationShader) {
+        stage_flags.push_back(VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT);
+        stage_flags.push_back(VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT);
+    }
+
+    // Allow iteration over enum values
+    std::vector<DSL_DESCRIPTOR_GROUPS> dsl_groups = {
+        DSL_TYPE_SAMPLERS,       DSL_TYPE_UNIFORM_BUFFERS,   DSL_TYPE_STORAGE_BUFFERS,     DSL_TYPE_SAMPLED_IMAGES,
+        DSL_TYPE_STORAGE_IMAGES, DSL_TYPE_INPUT_ATTACHMENTS, DSL_TYPE_INLINE_UNIFORM_BLOCK};
+
+    // Sum by layouts per stage, then pick max of stages per type
+    std::valarray<uint32_t> max_sum(0U, DSL_NUM_DESCRIPTOR_GROUPS);  // max descriptor sum among all pipeline stages
+    for (auto stage : stage_flags) {
+        std::valarray<uint32_t> stage_sum(0U, DSL_NUM_DESCRIPTOR_GROUPS);  // per-stage sums
+        for (auto dsl : set_layouts) {
+            if (skip_update_after_bind &&
+                (dsl->GetCreateFlags() & VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT)) {
+                continue;
+            }
+
+            for (uint32_t binding_idx = 0; binding_idx < dsl->GetBindingCount(); binding_idx++) {
+                const VkDescriptorSetLayoutBinding *binding = dsl->GetDescriptorSetLayoutBindingPtrFromIndex(binding_idx);
+                // Bindings with a descriptorCount of 0 are "reserved" and should be skipped
+                if (0 != (stage & binding->stageFlags) && binding->descriptorCount > 0) {
+                    switch (binding->descriptorType) {
+                        case VK_DESCRIPTOR_TYPE_SAMPLER:
+                            stage_sum[DSL_TYPE_SAMPLERS] += binding->descriptorCount;
+                            break;
+                        case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
+                        case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
+                            stage_sum[DSL_TYPE_UNIFORM_BUFFERS] += binding->descriptorCount;
+                            break;
+                        case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
+                        case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
+                            stage_sum[DSL_TYPE_STORAGE_BUFFERS] += binding->descriptorCount;
+                            break;
+                        case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
+                        case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
+                            stage_sum[DSL_TYPE_SAMPLED_IMAGES] += binding->descriptorCount;
+                            break;
+                        case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
+                        case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
+                            stage_sum[DSL_TYPE_STORAGE_IMAGES] += binding->descriptorCount;
+                            break;
+                        case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
+                            stage_sum[DSL_TYPE_SAMPLED_IMAGES] += binding->descriptorCount;
+                            stage_sum[DSL_TYPE_SAMPLERS] += binding->descriptorCount;
+                            break;
+                        case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
+                            stage_sum[DSL_TYPE_INPUT_ATTACHMENTS] += binding->descriptorCount;
+                            break;
+                        case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT:
+                            // count one block per binding. descriptorCount is number of bytes
+                            stage_sum[DSL_TYPE_INLINE_UNIFORM_BLOCK]++;
+                            break;
+                        default:
+                            break;
+                    }
+                }
+            }
+        }
+        for (auto type : dsl_groups) {
+            max_sum[type] = std::max(stage_sum[type], max_sum[type]);
+        }
+    }
+    return max_sum;
+}
+
+// Used by PreCallValidateCreatePipelineLayout.
+// Returns a map indexed by VK_DESCRIPTOR_TYPE_* enum of the summed descriptors by type.
+// Note: descriptors only count against the limit once even if used by multiple stages.
+std::map<uint32_t, uint32_t> GetDescriptorSum(
+    const std::vector<std::shared_ptr<cvdescriptorset::DescriptorSetLayout const>> &set_layouts, bool skip_update_after_bind) {
+    std::map<uint32_t, uint32_t> sum_by_type;
+    for (auto dsl : set_layouts) {
+        if (skip_update_after_bind && (dsl->GetCreateFlags() & VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT)) {
+            continue;
+        }
+
+        for (uint32_t binding_idx = 0; binding_idx < dsl->GetBindingCount(); binding_idx++) {
+            const VkDescriptorSetLayoutBinding *binding = dsl->GetDescriptorSetLayoutBindingPtrFromIndex(binding_idx);
+            // Bindings with a descriptorCount of 0 are "reserved" and should be skipped
+            if (binding->descriptorCount > 0) {
+                if (binding->descriptorType == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT) {
+                    // count one block per binding. descriptorCount is number of bytes
+                    sum_by_type[binding->descriptorType]++;
+                } else {
+                    sum_by_type[binding->descriptorType] += binding->descriptorCount;
+                }
+            }
+        }
+    }
+    return sum_by_type;
+}
+
+bool CoreChecks::PreCallValidateCreatePipelineLayout(VkDevice device, const VkPipelineLayoutCreateInfo *pCreateInfo,
+                                                     const VkAllocationCallbacks *pAllocator,
+                                                     VkPipelineLayout *pPipelineLayout) const {
+    bool skip = false;
+
+    // Validate layout count against device physical limit
+    if (pCreateInfo->setLayoutCount > phys_dev_props.limits.maxBoundDescriptorSets) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                        "VUID-VkPipelineLayoutCreateInfo-setLayoutCount-00286",
+                        "vkCreatePipelineLayout(): setLayoutCount (%d) exceeds physical device maxBoundDescriptorSets limit (%d).",
+                        pCreateInfo->setLayoutCount, phys_dev_props.limits.maxBoundDescriptorSets);
+    }
+
+    // Validate Push Constant ranges
+    uint32_t i, j;
+    for (i = 0; i < pCreateInfo->pushConstantRangeCount; ++i) {
+        skip |= ValidatePushConstantRange(pCreateInfo->pPushConstantRanges[i].offset, pCreateInfo->pPushConstantRanges[i].size,
+                                          "vkCreatePipelineLayout()", i);
+        if (0 == pCreateInfo->pPushConstantRanges[i].stageFlags) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                            "VUID-VkPushConstantRange-stageFlags-requiredbitmask",
+                            "vkCreatePipelineLayout() call has no stageFlags set.");
+        }
+    }
+
+    // As of 1.0.28, there is a VU that states that a stage flag cannot appear more than once in the list of push constant ranges.
+    for (i = 0; i < pCreateInfo->pushConstantRangeCount; ++i) {
+        for (j = i + 1; j < pCreateInfo->pushConstantRangeCount; ++j) {
+            if (0 != (pCreateInfo->pPushConstantRanges[i].stageFlags & pCreateInfo->pPushConstantRanges[j].stageFlags)) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                "VUID-VkPipelineLayoutCreateInfo-pPushConstantRanges-00292",
+                                "vkCreatePipelineLayout() Duplicate stage flags found in ranges %d and %d.", i, j);
+            }
+        }
+    }
+
+    // Early-out
+    if (skip) return skip;
+
+    std::vector<std::shared_ptr<cvdescriptorset::DescriptorSetLayout const>> set_layouts(pCreateInfo->setLayoutCount, nullptr);
+    unsigned int push_descriptor_set_count = 0;
+    {
+        for (i = 0; i < pCreateInfo->setLayoutCount; ++i) {
+            set_layouts[i] = GetDescriptorSetLayoutShared(pCreateInfo->pSetLayouts[i]);
+            if (set_layouts[i]->IsPushDescriptor()) ++push_descriptor_set_count;
+        }
+    }
+
+    if (push_descriptor_set_count > 1) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                        "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-00293",
+                        "vkCreatePipelineLayout() Multiple push descriptor sets found.");
+    }
+
+    // Max descriptors by type, within a single pipeline stage
+    std::valarray<uint32_t> max_descriptors_per_stage = GetDescriptorCountMaxPerStage(&enabled_features, set_layouts, true);
+    // Samplers
+    if (max_descriptors_per_stage[DSL_TYPE_SAMPLERS] > phys_dev_props.limits.maxPerStageDescriptorSamplers) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                        "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-00287",
+                        "vkCreatePipelineLayout(): max per-stage sampler bindings count (%d) exceeds device "
+                        "maxPerStageDescriptorSamplers limit (%d).",
+                        max_descriptors_per_stage[DSL_TYPE_SAMPLERS], phys_dev_props.limits.maxPerStageDescriptorSamplers);
+    }
+
+    // Uniform buffers
+    if (max_descriptors_per_stage[DSL_TYPE_UNIFORM_BUFFERS] > phys_dev_props.limits.maxPerStageDescriptorUniformBuffers) {
+        skip |=
+            log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                    "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-00288",
+                    "vkCreatePipelineLayout(): max per-stage uniform buffer bindings count (%d) exceeds device "
+                    "maxPerStageDescriptorUniformBuffers limit (%d).",
+                    max_descriptors_per_stage[DSL_TYPE_UNIFORM_BUFFERS], phys_dev_props.limits.maxPerStageDescriptorUniformBuffers);
+    }
+
+    // Storage buffers
+    if (max_descriptors_per_stage[DSL_TYPE_STORAGE_BUFFERS] > phys_dev_props.limits.maxPerStageDescriptorStorageBuffers) {
+        skip |=
+            log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                    "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-00289",
+                    "vkCreatePipelineLayout(): max per-stage storage buffer bindings count (%d) exceeds device "
+                    "maxPerStageDescriptorStorageBuffers limit (%d).",
+                    max_descriptors_per_stage[DSL_TYPE_STORAGE_BUFFERS], phys_dev_props.limits.maxPerStageDescriptorStorageBuffers);
+    }
+
+    // Sampled images
+    if (max_descriptors_per_stage[DSL_TYPE_SAMPLED_IMAGES] > phys_dev_props.limits.maxPerStageDescriptorSampledImages) {
+        skip |=
+            log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                    "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-00290",
+                    "vkCreatePipelineLayout(): max per-stage sampled image bindings count (%d) exceeds device "
+                    "maxPerStageDescriptorSampledImages limit (%d).",
+                    max_descriptors_per_stage[DSL_TYPE_SAMPLED_IMAGES], phys_dev_props.limits.maxPerStageDescriptorSampledImages);
+    }
+
+    // Storage images
+    if (max_descriptors_per_stage[DSL_TYPE_STORAGE_IMAGES] > phys_dev_props.limits.maxPerStageDescriptorStorageImages) {
+        skip |=
+            log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                    "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-00291",
+                    "vkCreatePipelineLayout(): max per-stage storage image bindings count (%d) exceeds device "
+                    "maxPerStageDescriptorStorageImages limit (%d).",
+                    max_descriptors_per_stage[DSL_TYPE_STORAGE_IMAGES], phys_dev_props.limits.maxPerStageDescriptorStorageImages);
+    }
+
+    // Input attachments
+    if (max_descriptors_per_stage[DSL_TYPE_INPUT_ATTACHMENTS] > phys_dev_props.limits.maxPerStageDescriptorInputAttachments) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                        "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01676",
+                        "vkCreatePipelineLayout(): max per-stage input attachment bindings count (%d) exceeds device "
+                        "maxPerStageDescriptorInputAttachments limit (%d).",
+                        max_descriptors_per_stage[DSL_TYPE_INPUT_ATTACHMENTS],
+                        phys_dev_props.limits.maxPerStageDescriptorInputAttachments);
+    }
+
+    // Inline uniform blocks
+    if (max_descriptors_per_stage[DSL_TYPE_INLINE_UNIFORM_BLOCK] >
+        phys_dev_ext_props.inline_uniform_block_props.maxPerStageDescriptorInlineUniformBlocks) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                        "VUID-VkPipelineLayoutCreateInfo-descriptorType-02214",
+                        "vkCreatePipelineLayout(): max per-stage inline uniform block bindings count (%d) exceeds device "
+                        "maxPerStageDescriptorInlineUniformBlocks limit (%d).",
+                        max_descriptors_per_stage[DSL_TYPE_INLINE_UNIFORM_BLOCK],
+                        phys_dev_ext_props.inline_uniform_block_props.maxPerStageDescriptorInlineUniformBlocks);
+    }
+
+    // Total descriptors by type
+    //
+    std::map<uint32_t, uint32_t> sum_all_stages = GetDescriptorSum(set_layouts, true);
+    // Samplers
+    uint32_t sum = sum_all_stages[VK_DESCRIPTOR_TYPE_SAMPLER] + sum_all_stages[VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER];
+    if (sum > phys_dev_props.limits.maxDescriptorSetSamplers) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                        "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01677",
+                        "vkCreatePipelineLayout(): sum of sampler bindings among all stages (%d) exceeds device "
+                        "maxDescriptorSetSamplers limit (%d).",
+                        sum, phys_dev_props.limits.maxDescriptorSetSamplers);
+    }
+
+    // Uniform buffers
+    if (sum_all_stages[VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER] > phys_dev_props.limits.maxDescriptorSetUniformBuffers) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                        "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01678",
+                        "vkCreatePipelineLayout(): sum of uniform buffer bindings among all stages (%d) exceeds device "
+                        "maxDescriptorSetUniformBuffers limit (%d).",
+                        sum_all_stages[VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER], phys_dev_props.limits.maxDescriptorSetUniformBuffers);
+    }
+
+    // Dynamic uniform buffers
+    if (sum_all_stages[VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC] > phys_dev_props.limits.maxDescriptorSetUniformBuffersDynamic) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                        "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01679",
+                        "vkCreatePipelineLayout(): sum of dynamic uniform buffer bindings among all stages (%d) exceeds device "
+                        "maxDescriptorSetUniformBuffersDynamic limit (%d).",
+                        sum_all_stages[VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC],
+                        phys_dev_props.limits.maxDescriptorSetUniformBuffersDynamic);
+    }
+
+    // Storage buffers
+    if (sum_all_stages[VK_DESCRIPTOR_TYPE_STORAGE_BUFFER] > phys_dev_props.limits.maxDescriptorSetStorageBuffers) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                        "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01680",
+                        "vkCreatePipelineLayout(): sum of storage buffer bindings among all stages (%d) exceeds device "
+                        "maxDescriptorSetStorageBuffers limit (%d).",
+                        sum_all_stages[VK_DESCRIPTOR_TYPE_STORAGE_BUFFER], phys_dev_props.limits.maxDescriptorSetStorageBuffers);
+    }
+
+    // Dynamic storage buffers
+    if (sum_all_stages[VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC] > phys_dev_props.limits.maxDescriptorSetStorageBuffersDynamic) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                        "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01681",
+                        "vkCreatePipelineLayout(): sum of dynamic storage buffer bindings among all stages (%d) exceeds device "
+                        "maxDescriptorSetStorageBuffersDynamic limit (%d).",
+                        sum_all_stages[VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC],
+                        phys_dev_props.limits.maxDescriptorSetStorageBuffersDynamic);
+    }
+
+    //  Sampled images
+    sum = sum_all_stages[VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE] + sum_all_stages[VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER] +
+          sum_all_stages[VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER];
+    if (sum > phys_dev_props.limits.maxDescriptorSetSampledImages) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                        "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01682",
+                        "vkCreatePipelineLayout(): sum of sampled image bindings among all stages (%d) exceeds device "
+                        "maxDescriptorSetSampledImages limit (%d).",
+                        sum, phys_dev_props.limits.maxDescriptorSetSampledImages);
+    }
+
+    //  Storage images
+    sum = sum_all_stages[VK_DESCRIPTOR_TYPE_STORAGE_IMAGE] + sum_all_stages[VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER];
+    if (sum > phys_dev_props.limits.maxDescriptorSetStorageImages) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                        "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01683",
+                        "vkCreatePipelineLayout(): sum of storage image bindings among all stages (%d) exceeds device "
+                        "maxDescriptorSetStorageImages limit (%d).",
+                        sum, phys_dev_props.limits.maxDescriptorSetStorageImages);
+    }
+
+    // Input attachments
+    if (sum_all_stages[VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT] > phys_dev_props.limits.maxDescriptorSetInputAttachments) {
+        skip |=
+            log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                    "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01684",
+                    "vkCreatePipelineLayout(): sum of input attachment bindings among all stages (%d) exceeds device "
+                    "maxDescriptorSetInputAttachments limit (%d).",
+                    sum_all_stages[VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT], phys_dev_props.limits.maxDescriptorSetInputAttachments);
+    }
+
+    // Inline uniform blocks
+    if (sum_all_stages[VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT] >
+        phys_dev_ext_props.inline_uniform_block_props.maxDescriptorSetInlineUniformBlocks) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                        "VUID-VkPipelineLayoutCreateInfo-descriptorType-02216",
+                        "vkCreatePipelineLayout(): sum of inline uniform block bindings among all stages (%d) exceeds device "
+                        "maxDescriptorSetInlineUniformBlocks limit (%d).",
+                        sum_all_stages[VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT],
+                        phys_dev_ext_props.inline_uniform_block_props.maxDescriptorSetInlineUniformBlocks);
+    }
+
+    if (device_extensions.vk_ext_descriptor_indexing) {
+        // XXX TODO: replace with correct VU messages
+
+        // Max descriptors by type, within a single pipeline stage
+        std::valarray<uint32_t> max_descriptors_per_stage_update_after_bind =
+            GetDescriptorCountMaxPerStage(&enabled_features, set_layouts, false);
+        // Samplers
+        if (max_descriptors_per_stage_update_after_bind[DSL_TYPE_SAMPLERS] >
+            phys_dev_ext_props.descriptor_indexing_props.maxPerStageDescriptorUpdateAfterBindSamplers) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                            "VUID-VkPipelineLayoutCreateInfo-descriptorType-03022",
+                            "vkCreatePipelineLayout(): max per-stage sampler bindings count (%d) exceeds device "
+                            "maxPerStageDescriptorUpdateAfterBindSamplers limit (%d).",
+                            max_descriptors_per_stage_update_after_bind[DSL_TYPE_SAMPLERS],
+                            phys_dev_ext_props.descriptor_indexing_props.maxPerStageDescriptorUpdateAfterBindSamplers);
+        }
+
+        // Uniform buffers
+        if (max_descriptors_per_stage_update_after_bind[DSL_TYPE_UNIFORM_BUFFERS] >
+            phys_dev_ext_props.descriptor_indexing_props.maxPerStageDescriptorUpdateAfterBindUniformBuffers) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                            "VUID-VkPipelineLayoutCreateInfo-descriptorType-03023",
+                            "vkCreatePipelineLayout(): max per-stage uniform buffer bindings count (%d) exceeds device "
+                            "maxPerStageDescriptorUpdateAfterBindUniformBuffers limit (%d).",
+                            max_descriptors_per_stage_update_after_bind[DSL_TYPE_UNIFORM_BUFFERS],
+                            phys_dev_ext_props.descriptor_indexing_props.maxPerStageDescriptorUpdateAfterBindUniformBuffers);
+        }
+
+        // Storage buffers
+        if (max_descriptors_per_stage_update_after_bind[DSL_TYPE_STORAGE_BUFFERS] >
+            phys_dev_ext_props.descriptor_indexing_props.maxPerStageDescriptorUpdateAfterBindStorageBuffers) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                            "VUID-VkPipelineLayoutCreateInfo-descriptorType-03024",
+                            "vkCreatePipelineLayout(): max per-stage storage buffer bindings count (%d) exceeds device "
+                            "maxPerStageDescriptorUpdateAfterBindStorageBuffers limit (%d).",
+                            max_descriptors_per_stage_update_after_bind[DSL_TYPE_STORAGE_BUFFERS],
+                            phys_dev_ext_props.descriptor_indexing_props.maxPerStageDescriptorUpdateAfterBindStorageBuffers);
+        }
+
+        // Sampled images
+        if (max_descriptors_per_stage_update_after_bind[DSL_TYPE_SAMPLED_IMAGES] >
+            phys_dev_ext_props.descriptor_indexing_props.maxPerStageDescriptorUpdateAfterBindSampledImages) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                            "VUID-VkPipelineLayoutCreateInfo-descriptorType-03025",
+                            "vkCreatePipelineLayout(): max per-stage sampled image bindings count (%d) exceeds device "
+                            "maxPerStageDescriptorUpdateAfterBindSampledImages limit (%d).",
+                            max_descriptors_per_stage_update_after_bind[DSL_TYPE_SAMPLED_IMAGES],
+                            phys_dev_ext_props.descriptor_indexing_props.maxPerStageDescriptorUpdateAfterBindSampledImages);
+        }
+
+        // Storage images
+        if (max_descriptors_per_stage_update_after_bind[DSL_TYPE_STORAGE_IMAGES] >
+            phys_dev_ext_props.descriptor_indexing_props.maxPerStageDescriptorUpdateAfterBindStorageImages) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                            "VUID-VkPipelineLayoutCreateInfo-descriptorType-03026",
+                            "vkCreatePipelineLayout(): max per-stage storage image bindings count (%d) exceeds device "
+                            "maxPerStageDescriptorUpdateAfterBindStorageImages limit (%d).",
+                            max_descriptors_per_stage_update_after_bind[DSL_TYPE_STORAGE_IMAGES],
+                            phys_dev_ext_props.descriptor_indexing_props.maxPerStageDescriptorUpdateAfterBindStorageImages);
+        }
+
+        // Input attachments
+        if (max_descriptors_per_stage_update_after_bind[DSL_TYPE_INPUT_ATTACHMENTS] >
+            phys_dev_ext_props.descriptor_indexing_props.maxPerStageDescriptorUpdateAfterBindInputAttachments) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                            "VUID-VkPipelineLayoutCreateInfo-descriptorType-03027",
+                            "vkCreatePipelineLayout(): max per-stage input attachment bindings count (%d) exceeds device "
+                            "maxPerStageDescriptorUpdateAfterBindInputAttachments limit (%d).",
+                            max_descriptors_per_stage_update_after_bind[DSL_TYPE_INPUT_ATTACHMENTS],
+                            phys_dev_ext_props.descriptor_indexing_props.maxPerStageDescriptorUpdateAfterBindInputAttachments);
+        }
+
+        // Inline uniform blocks
+        if (max_descriptors_per_stage_update_after_bind[DSL_TYPE_INLINE_UNIFORM_BLOCK] >
+            phys_dev_ext_props.inline_uniform_block_props.maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                            "VUID-VkPipelineLayoutCreateInfo-descriptorType-02215",
+                            "vkCreatePipelineLayout(): max per-stage inline uniform block bindings count (%d) exceeds device "
+                            "maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks limit (%d).",
+                            max_descriptors_per_stage_update_after_bind[DSL_TYPE_INLINE_UNIFORM_BLOCK],
+                            phys_dev_ext_props.inline_uniform_block_props.maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks);
+        }
+
+        // Total descriptors by type, summed across all pipeline stages
+        //
+        std::map<uint32_t, uint32_t> sum_all_stages_update_after_bind = GetDescriptorSum(set_layouts, false);
+        // Samplers
+        sum = sum_all_stages_update_after_bind[VK_DESCRIPTOR_TYPE_SAMPLER] +
+              sum_all_stages_update_after_bind[VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER];
+        if (sum > phys_dev_ext_props.descriptor_indexing_props.maxDescriptorSetUpdateAfterBindSamplers) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                            "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-03036",
+                            "vkCreatePipelineLayout(): sum of sampler bindings among all stages (%d) exceeds device "
+                            "maxDescriptorSetUpdateAfterBindSamplers limit (%d).",
+                            sum, phys_dev_ext_props.descriptor_indexing_props.maxDescriptorSetUpdateAfterBindSamplers);
+        }
+
+        // Uniform buffers
+        if (sum_all_stages_update_after_bind[VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER] >
+            phys_dev_ext_props.descriptor_indexing_props.maxDescriptorSetUpdateAfterBindUniformBuffers) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                            "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-03037",
+                            "vkCreatePipelineLayout(): sum of uniform buffer bindings among all stages (%d) exceeds device "
+                            "maxDescriptorSetUpdateAfterBindUniformBuffers limit (%d).",
+                            sum_all_stages_update_after_bind[VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER],
+                            phys_dev_ext_props.descriptor_indexing_props.maxDescriptorSetUpdateAfterBindUniformBuffers);
+        }
+
+        // Dynamic uniform buffers
+        if (sum_all_stages_update_after_bind[VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC] >
+            phys_dev_ext_props.descriptor_indexing_props.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                            "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-03038",
+                            "vkCreatePipelineLayout(): sum of dynamic uniform buffer bindings among all stages (%d) exceeds device "
+                            "maxDescriptorSetUpdateAfterBindUniformBuffersDynamic limit (%d).",
+                            sum_all_stages_update_after_bind[VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC],
+                            phys_dev_ext_props.descriptor_indexing_props.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic);
+        }
+
+        // Storage buffers
+        if (sum_all_stages_update_after_bind[VK_DESCRIPTOR_TYPE_STORAGE_BUFFER] >
+            phys_dev_ext_props.descriptor_indexing_props.maxDescriptorSetUpdateAfterBindStorageBuffers) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                            "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-03039",
+                            "vkCreatePipelineLayout(): sum of storage buffer bindings among all stages (%d) exceeds device "
+                            "maxDescriptorSetUpdateAfterBindStorageBuffers limit (%d).",
+                            sum_all_stages_update_after_bind[VK_DESCRIPTOR_TYPE_STORAGE_BUFFER],
+                            phys_dev_ext_props.descriptor_indexing_props.maxDescriptorSetUpdateAfterBindStorageBuffers);
+        }
+
+        // Dynamic storage buffers
+        if (sum_all_stages_update_after_bind[VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC] >
+            phys_dev_ext_props.descriptor_indexing_props.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                            "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-03040",
+                            "vkCreatePipelineLayout(): sum of dynamic storage buffer bindings among all stages (%d) exceeds device "
+                            "maxDescriptorSetUpdateAfterBindStorageBuffersDynamic limit (%d).",
+                            sum_all_stages_update_after_bind[VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC],
+                            phys_dev_ext_props.descriptor_indexing_props.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic);
+        }
+
+        //  Sampled images
+        sum = sum_all_stages_update_after_bind[VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE] +
+              sum_all_stages_update_after_bind[VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER] +
+              sum_all_stages_update_after_bind[VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER];
+        if (sum > phys_dev_ext_props.descriptor_indexing_props.maxDescriptorSetUpdateAfterBindSampledImages) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                            "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-03041",
+                            "vkCreatePipelineLayout(): sum of sampled image bindings among all stages (%d) exceeds device "
+                            "maxDescriptorSetUpdateAfterBindSampledImages limit (%d).",
+                            sum, phys_dev_ext_props.descriptor_indexing_props.maxDescriptorSetUpdateAfterBindSampledImages);
+        }
+
+        //  Storage images
+        sum = sum_all_stages_update_after_bind[VK_DESCRIPTOR_TYPE_STORAGE_IMAGE] +
+              sum_all_stages_update_after_bind[VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER];
+        if (sum > phys_dev_ext_props.descriptor_indexing_props.maxDescriptorSetUpdateAfterBindStorageImages) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                            "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-03042",
+                            "vkCreatePipelineLayout(): sum of storage image bindings among all stages (%d) exceeds device "
+                            "maxDescriptorSetUpdateAfterBindStorageImages limit (%d).",
+                            sum, phys_dev_ext_props.descriptor_indexing_props.maxDescriptorSetUpdateAfterBindStorageImages);
+        }
+
+        // Input attachments
+        if (sum_all_stages_update_after_bind[VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT] >
+            phys_dev_ext_props.descriptor_indexing_props.maxDescriptorSetUpdateAfterBindInputAttachments) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                            "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-03043",
+                            "vkCreatePipelineLayout(): sum of input attachment bindings among all stages (%d) exceeds device "
+                            "maxDescriptorSetUpdateAfterBindInputAttachments limit (%d).",
+                            sum_all_stages_update_after_bind[VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT],
+                            phys_dev_ext_props.descriptor_indexing_props.maxDescriptorSetUpdateAfterBindInputAttachments);
+        }
+
+        // Inline uniform blocks
+        if (sum_all_stages_update_after_bind[VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT] >
+            phys_dev_ext_props.inline_uniform_block_props.maxDescriptorSetUpdateAfterBindInlineUniformBlocks) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                            "VUID-VkPipelineLayoutCreateInfo-descriptorType-02217",
+                            "vkCreatePipelineLayout(): sum of inline uniform block bindings among all stages (%d) exceeds device "
+                            "maxDescriptorSetUpdateAfterBindInlineUniformBlocks limit (%d).",
+                            sum_all_stages_update_after_bind[VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT],
+                            phys_dev_ext_props.inline_uniform_block_props.maxDescriptorSetUpdateAfterBindInlineUniformBlocks);
+        }
+    }
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateResetDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool,
+                                                    VkDescriptorPoolResetFlags flags) const {
+    // Make sure sets being destroyed are not currently in-use
+    if (disabled.idle_descriptor_set) return false;
+    bool skip = false;
+    const DESCRIPTOR_POOL_STATE *pPool = GetDescriptorPoolState(descriptorPool);
+    if (pPool != nullptr) {
+        for (auto ds : pPool->sets) {
+            if (ds && ds->in_use.load()) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT,
+                                HandleToUint64(descriptorPool), "VUID-vkResetDescriptorPool-descriptorPool-00313",
+                                "It is invalid to call vkResetDescriptorPool() with descriptor sets in use by a command buffer.");
+                if (skip) break;
+            }
+        }
+    }
+    return skip;
+}
+
+// Ensure the pool contains enough descriptors and descriptor sets to satisfy
+// an allocation request. Fills common_data with the total number of descriptors of each type required,
+// as well as DescriptorSetLayout ptrs used for later update.
+bool CoreChecks::PreCallValidateAllocateDescriptorSets(VkDevice device, const VkDescriptorSetAllocateInfo *pAllocateInfo,
+                                                       VkDescriptorSet *pDescriptorSets, void *ads_state_data) const {
+    StateTracker::PreCallValidateAllocateDescriptorSets(device, pAllocateInfo, pDescriptorSets, ads_state_data);
+
+    cvdescriptorset::AllocateDescriptorSetsData *ads_state =
+        reinterpret_cast<cvdescriptorset::AllocateDescriptorSetsData *>(ads_state_data);
+    // All state checks for AllocateDescriptorSets is done in single function
+    return ValidateAllocateDescriptorSets(pAllocateInfo, ads_state);
+}
+
+bool CoreChecks::PreCallValidateFreeDescriptorSets(VkDevice device, VkDescriptorPool descriptorPool, uint32_t count,
+                                                   const VkDescriptorSet *pDescriptorSets) const {
+    // Make sure that no sets being destroyed are in-flight
+    bool skip = false;
+    // First make sure sets being destroyed are not currently in-use
+    for (uint32_t i = 0; i < count; ++i) {
+        if (pDescriptorSets[i] != VK_NULL_HANDLE) {
+            skip |= ValidateIdleDescriptorSet(pDescriptorSets[i], "vkFreeDescriptorSets");
+        }
+    }
+    const DESCRIPTOR_POOL_STATE *pool_state = GetDescriptorPoolState(descriptorPool);
+    if (pool_state && !(VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT & pool_state->createInfo.flags)) {
+        // Can't Free from a NON_FREE pool
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT,
+                        HandleToUint64(descriptorPool), "VUID-vkFreeDescriptorSets-descriptorPool-00312",
+                        "It is invalid to call vkFreeDescriptorSets() with a pool created without setting "
+                        "VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT.");
+    }
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateUpdateDescriptorSets(VkDevice device, uint32_t descriptorWriteCount,
+                                                     const VkWriteDescriptorSet *pDescriptorWrites, uint32_t descriptorCopyCount,
+                                                     const VkCopyDescriptorSet *pDescriptorCopies) const {
+    // First thing to do is perform map look-ups.
+    // NOTE : UpdateDescriptorSets is somewhat unique in that it's operating on a number of DescriptorSets
+    //  so we can't just do a single map look-up up-front, but do them individually in functions below
+
+    // Now make call(s) that validate state, but don't perform state updates in this function
+    // Note, here DescriptorSets is unique in that we don't yet have an instance. Using a helper function in the
+    //  namespace which will parse params and make calls into specific class instances
+    return ValidateUpdateDescriptorSets(descriptorWriteCount, pDescriptorWrites, descriptorCopyCount, pDescriptorCopies,
+                                        "vkUpdateDescriptorSets()");
+}
+
+bool CoreChecks::PreCallValidateBeginCommandBuffer(VkCommandBuffer commandBuffer,
+                                                   const VkCommandBufferBeginInfo *pBeginInfo) const {
+    const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+    if (!cb_state) return false;
+    bool skip = false;
+    if (cb_state->in_use.load()) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                        HandleToUint64(commandBuffer), "VUID-vkBeginCommandBuffer-commandBuffer-00049",
+                        "Calling vkBeginCommandBuffer() on active %s before it has completed. You must check "
+                        "command buffer fence before this call.",
+                        report_data->FormatHandle(commandBuffer).c_str());
+    }
+    if (cb_state->createInfo.level != VK_COMMAND_BUFFER_LEVEL_PRIMARY) {
+        // Secondary Command Buffer
+        const VkCommandBufferInheritanceInfo *pInfo = pBeginInfo->pInheritanceInfo;
+        if (!pInfo) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                            HandleToUint64(commandBuffer), "VUID-vkBeginCommandBuffer-commandBuffer-00051",
+                            "vkBeginCommandBuffer(): Secondary %s must have inheritance info.",
+                            report_data->FormatHandle(commandBuffer).c_str());
+        } else {
+            if (pBeginInfo->flags & VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT) {
+                assert(pInfo->renderPass);
+                const auto *framebuffer = GetFramebufferState(pInfo->framebuffer);
+                if (framebuffer) {
+                    if (framebuffer->createInfo.renderPass != pInfo->renderPass) {
+                        const auto *render_pass = GetRenderPassState(pInfo->renderPass);
+                        // renderPass that framebuffer was created with must be compatible with local renderPass
+                        skip |= ValidateRenderPassCompatibility("framebuffer", framebuffer->rp_state.get(), "command buffer",
+                                                                render_pass, "vkBeginCommandBuffer()",
+                                                                "VUID-VkCommandBufferBeginInfo-flags-00055");
+                    }
+                }
+            }
+            if ((pInfo->occlusionQueryEnable == VK_FALSE || enabled_features.core.occlusionQueryPrecise == VK_FALSE) &&
+                (pInfo->queryFlags & VK_QUERY_CONTROL_PRECISE_BIT)) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                                HandleToUint64(commandBuffer), "VUID-vkBeginCommandBuffer-commandBuffer-00052",
+                                "vkBeginCommandBuffer(): Secondary %s must not have VK_QUERY_CONTROL_PRECISE_BIT if "
+                                "occulusionQuery is disabled or the device does not support precise occlusion queries.",
+                                report_data->FormatHandle(commandBuffer).c_str());
+            }
+        }
+        if (pInfo && pInfo->renderPass != VK_NULL_HANDLE) {
+            const auto *renderPass = GetRenderPassState(pInfo->renderPass);
+            if (renderPass) {
+                if (pInfo->subpass >= renderPass->createInfo.subpassCount) {
+                    skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                                    HandleToUint64(commandBuffer), "VUID-VkCommandBufferBeginInfo-flags-00054",
+                                    "vkBeginCommandBuffer(): Secondary %s must have a subpass index (%d) that is "
+                                    "less than the number of subpasses (%d).",
+                                    report_data->FormatHandle(commandBuffer).c_str(), pInfo->subpass,
+                                    renderPass->createInfo.subpassCount);
+                }
+            }
+        }
+    }
+    if (CB_RECORDING == cb_state->state) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                        HandleToUint64(commandBuffer), "VUID-vkBeginCommandBuffer-commandBuffer-00049",
+                        "vkBeginCommandBuffer(): Cannot call Begin on %s in the RECORDING state. Must first call "
+                        "vkEndCommandBuffer().",
+                        report_data->FormatHandle(commandBuffer).c_str());
+    } else if (CB_RECORDED == cb_state->state || CB_INVALID_COMPLETE == cb_state->state) {
+        VkCommandPool cmdPool = cb_state->createInfo.commandPool;
+        const auto *pPool = cb_state->command_pool.get();
+        if (!(VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT & pPool->createFlags)) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                            HandleToUint64(commandBuffer), "VUID-vkBeginCommandBuffer-commandBuffer-00050",
+                            "Call to vkBeginCommandBuffer() on %s attempts to implicitly reset cmdBuffer created from "
+                            "%s that does NOT have the VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT bit set.",
+                            report_data->FormatHandle(commandBuffer).c_str(), report_data->FormatHandle(cmdPool).c_str());
+        }
+    }
+    auto chained_device_group_struct = lvl_find_in_chain<VkDeviceGroupCommandBufferBeginInfo>(pBeginInfo->pNext);
+    if (chained_device_group_struct) {
+        skip |= ValidateDeviceMaskToPhysicalDeviceCount(
+            chained_device_group_struct->deviceMask, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, HandleToUint64(commandBuffer),
+            "VUID-VkDeviceGroupCommandBufferBeginInfo-deviceMask-00106");
+        skip |=
+            ValidateDeviceMaskToZero(chained_device_group_struct->deviceMask, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                                     HandleToUint64(commandBuffer), "VUID-VkDeviceGroupCommandBufferBeginInfo-deviceMask-00107");
+    }
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateEndCommandBuffer(VkCommandBuffer commandBuffer) const {
+    const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+    if (!cb_state) return false;
+    bool skip = false;
+    if ((VK_COMMAND_BUFFER_LEVEL_PRIMARY == cb_state->createInfo.level) ||
+        !(cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT)) {
+        // This needs spec clarification to update valid usage, see comments in PR:
+        // https://github.com/KhronosGroup/Vulkan-ValidationLayers/issues/165
+        skip |= InsideRenderPass(cb_state, "vkEndCommandBuffer()", "VUID-vkEndCommandBuffer-commandBuffer-00060");
+    }
+
+    skip |= ValidateCmd(cb_state, CMD_ENDCOMMANDBUFFER, "vkEndCommandBuffer()");
+    for (auto query : cb_state->activeQueries) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                        HandleToUint64(commandBuffer), "VUID-vkEndCommandBuffer-commandBuffer-00061",
+                        "Ending command buffer with in progress query: %s, query %d.",
+                        report_data->FormatHandle(query.pool).c_str(), query.query);
+    }
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateResetCommandBuffer(VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags) const {
+    bool skip = false;
+    const CMD_BUFFER_STATE *pCB = GetCBState(commandBuffer);
+    if (!pCB) return false;
+    VkCommandPool cmdPool = pCB->createInfo.commandPool;
+    const auto *pPool = pCB->command_pool.get();
+
+    if (!(VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT & pPool->createFlags)) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                        HandleToUint64(commandBuffer), "VUID-vkResetCommandBuffer-commandBuffer-00046",
+                        "Attempt to reset %s created from %s that does NOT have the "
+                        "VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT bit set.",
+                        report_data->FormatHandle(commandBuffer).c_str(), report_data->FormatHandle(cmdPool).c_str());
+    }
+    skip |= CheckCommandBufferInFlight(pCB, "reset", "VUID-vkResetCommandBuffer-commandBuffer-00045");
+
+    return skip;
+}
+
+static const char *GetPipelineTypeName(VkPipelineBindPoint pipelineBindPoint) {
+    switch (pipelineBindPoint) {
+        case VK_PIPELINE_BIND_POINT_GRAPHICS:
+            return "graphics";
+        case VK_PIPELINE_BIND_POINT_COMPUTE:
+            return "compute";
+        case VK_PIPELINE_BIND_POINT_RAY_TRACING_NV:
+            return "ray-tracing";
+        default:
+            return "unknown";
+    }
+}
+
+bool CoreChecks::PreCallValidateCmdBindPipeline(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint,
+                                                VkPipeline pipeline) const {
+    const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+    assert(cb_state);
+
+    bool skip = ValidateCmdQueueFlags(cb_state, "vkCmdBindPipeline()", VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT,
+                                      "VUID-vkCmdBindPipeline-commandBuffer-cmdpool");
+    skip |= ValidateCmd(cb_state, CMD_BINDPIPELINE, "vkCmdBindPipeline()");
+    static const std::map<VkPipelineBindPoint, std::string> bindpoint_errors = {
+        std::make_pair(VK_PIPELINE_BIND_POINT_GRAPHICS, "VUID-vkCmdBindPipeline-pipelineBindPoint-00777"),
+        std::make_pair(VK_PIPELINE_BIND_POINT_COMPUTE, "VUID-vkCmdBindPipeline-pipelineBindPoint-00778"),
+        std::make_pair(VK_PIPELINE_BIND_POINT_RAY_TRACING_NV, "VUID-vkCmdBindPipeline-pipelineBindPoint-02391")};
+
+    skip |= ValidatePipelineBindPoint(cb_state, pipelineBindPoint, "vkCmdBindPipeline()", bindpoint_errors);
+
+    const auto *pipeline_state = GetPipelineState(pipeline);
+    assert(pipeline_state);
+
+    const auto &pipeline_state_bind_point = pipeline_state->getPipelineType();
+
+    if (pipelineBindPoint != pipeline_state_bind_point) {
+        if (pipelineBindPoint == VK_PIPELINE_BIND_POINT_GRAPHICS) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                            HandleToUint64(cb_state->commandBuffer), "VUID-vkCmdBindPipeline-pipelineBindPoint-00779",
+                            "Cannot bind a pipeline of type %s to the graphics pipeline bind point",
+                            GetPipelineTypeName(pipeline_state_bind_point));
+        } else if (pipelineBindPoint == VK_PIPELINE_BIND_POINT_COMPUTE) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                            HandleToUint64(cb_state->commandBuffer), "VUID-vkCmdBindPipeline-pipelineBindPoint-00780",
+                            "Cannot bind a pipeline of type %s to the compute pipeline bind point",
+                            GetPipelineTypeName(pipeline_state_bind_point));
+        } else if (pipelineBindPoint == VK_PIPELINE_BIND_POINT_RAY_TRACING_NV) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                            HandleToUint64(cb_state->commandBuffer), "VUID-vkCmdBindPipeline-pipelineBindPoint-02392",
+                            "Cannot bind a pipeline of type %s to the ray-tracing pipeline bind point",
+                            GetPipelineTypeName(pipeline_state_bind_point));
+        }
+    }
+
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateCmdSetViewport(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount,
+                                               const VkViewport *pViewports) const {
+    const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+    assert(cb_state);
+    bool skip =
+        ValidateCmdQueueFlags(cb_state, "vkCmdSetViewport()", VK_QUEUE_GRAPHICS_BIT, "VUID-vkCmdSetViewport-commandBuffer-cmdpool");
+    skip |= ValidateCmd(cb_state, CMD_SETVIEWPORT, "vkCmdSetViewport()");
+    if (cb_state->static_status & CBSTATUS_VIEWPORT_SET) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                        HandleToUint64(commandBuffer), "VUID-vkCmdSetViewport-None-01221",
+                        "vkCmdSetViewport(): pipeline was created without VK_DYNAMIC_STATE_VIEWPORT flag.");
+    }
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateCmdSetScissor(VkCommandBuffer commandBuffer, uint32_t firstScissor, uint32_t scissorCount,
+                                              const VkRect2D *pScissors) const {
+    const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+    assert(cb_state);
+    bool skip =
+        ValidateCmdQueueFlags(cb_state, "vkCmdSetScissor()", VK_QUEUE_GRAPHICS_BIT, "VUID-vkCmdSetScissor-commandBuffer-cmdpool");
+    skip |= ValidateCmd(cb_state, CMD_SETSCISSOR, "vkCmdSetScissor()");
+    if (cb_state->static_status & CBSTATUS_SCISSOR_SET) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                        HandleToUint64(commandBuffer), "VUID-vkCmdSetScissor-None-00590",
+                        "vkCmdSetScissor(): pipeline was created without VK_DYNAMIC_STATE_SCISSOR flag..");
+    }
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateCmdSetExclusiveScissorNV(VkCommandBuffer commandBuffer, uint32_t firstExclusiveScissor,
+                                                         uint32_t exclusiveScissorCount, const VkRect2D *pExclusiveScissors) const {
+    const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+    assert(cb_state);
+    bool skip = ValidateCmdQueueFlags(cb_state, "vkCmdSetExclusiveScissorNV()", VK_QUEUE_GRAPHICS_BIT,
+                                      "VUID-vkCmdSetExclusiveScissorNV-commandBuffer-cmdpool");
+    skip |= ValidateCmd(cb_state, CMD_SETEXCLUSIVESCISSORNV, "vkCmdSetExclusiveScissorNV()");
+    if (cb_state->static_status & CBSTATUS_EXCLUSIVE_SCISSOR_SET) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                        HandleToUint64(commandBuffer), "VUID-vkCmdSetExclusiveScissorNV-None-02032",
+                        "vkCmdSetExclusiveScissorNV(): pipeline was created without VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_NV flag.");
+    }
+
+    if (!enabled_features.exclusive_scissor.exclusiveScissor) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                        HandleToUint64(commandBuffer), "VUID-vkCmdSetExclusiveScissorNV-None-02031",
+                        "vkCmdSetExclusiveScissorNV: The exclusiveScissor feature is disabled.");
+    }
+
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateCmdBindShadingRateImageNV(VkCommandBuffer commandBuffer, VkImageView imageView,
+                                                          VkImageLayout imageLayout) const {
+    const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+    assert(cb_state);
+    bool skip = ValidateCmdQueueFlags(cb_state, "vkCmdBindShadingRateImageNV()", VK_QUEUE_GRAPHICS_BIT,
+                                      "VUID-vkCmdBindShadingRateImageNV-commandBuffer-cmdpool");
+
+    skip |= ValidateCmd(cb_state, CMD_BINDSHADINGRATEIMAGENV, "vkCmdBindShadingRateImageNV()");
+
+    if (!enabled_features.shading_rate_image.shadingRateImage) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                        HandleToUint64(commandBuffer), "VUID-vkCmdBindShadingRateImageNV-None-02058",
+                        "vkCmdBindShadingRateImageNV: The shadingRateImage feature is disabled.");
+    }
+
+    if (imageView != VK_NULL_HANDLE) {
+        const auto view_state = GetImageViewState(imageView);
+        auto &ivci = view_state->create_info;
+
+        if (!view_state || (ivci.viewType != VK_IMAGE_VIEW_TYPE_2D && ivci.viewType != VK_IMAGE_VIEW_TYPE_2D_ARRAY)) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT,
+                            HandleToUint64(imageView), "VUID-vkCmdBindShadingRateImageNV-imageView-02059",
+                            "vkCmdBindShadingRateImageNV: If imageView is not VK_NULL_HANDLE, it must be a valid "
+                            "VkImageView handle of type VK_IMAGE_VIEW_TYPE_2D or VK_IMAGE_VIEW_TYPE_2D_ARRAY.");
+        }
+
+        if (view_state && ivci.format != VK_FORMAT_R8_UINT) {
+            skip |= log_msg(
+                report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT, HandleToUint64(imageView),
+                "VUID-vkCmdBindShadingRateImageNV-imageView-02060",
+                "vkCmdBindShadingRateImageNV: If imageView is not VK_NULL_HANDLE, it must have a format of VK_FORMAT_R8_UINT.");
+        }
+
+        const VkImageCreateInfo *ici = view_state ? &GetImageState(view_state->create_info.image)->createInfo : nullptr;
+        if (ici && !(ici->usage & VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV)) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT,
+                            HandleToUint64(imageView), "VUID-vkCmdBindShadingRateImageNV-imageView-02061",
+                            "vkCmdBindShadingRateImageNV: If imageView is not VK_NULL_HANDLE, the image must have been "
+                            "created with VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV set.");
+        }
+
+        if (view_state) {
+            const auto image_state = GetImageState(view_state->create_info.image);
+            bool hit_error = false;
+
+            // XXX TODO: While the VUID says "each subresource", only the base mip level is
+            // actually used. Since we don't have an existing convenience function to iterate
+            // over all mip levels, just don't bother with non-base levels.
+            const VkImageSubresourceRange &range = view_state->create_info.subresourceRange;
+            VkImageSubresourceLayers subresource = {range.aspectMask, range.baseMipLevel, range.baseArrayLayer, range.layerCount};
+
+            if (image_state) {
+                skip |= VerifyImageLayout(cb_state, image_state, subresource, imageLayout, VK_IMAGE_LAYOUT_SHADING_RATE_OPTIMAL_NV,
+                                          "vkCmdCopyImage()", "VUID-vkCmdBindShadingRateImageNV-imageLayout-02063",
+                                          "VUID-vkCmdBindShadingRateImageNV-imageView-02062", &hit_error);
+            }
+        }
+    }
+
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateCmdSetViewportShadingRatePaletteNV(VkCommandBuffer commandBuffer, uint32_t firstViewport,
+                                                                   uint32_t viewportCount,
+                                                                   const VkShadingRatePaletteNV *pShadingRatePalettes) const {
+    const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+    assert(cb_state);
+    bool skip = ValidateCmdQueueFlags(cb_state, "vkCmdSetViewportShadingRatePaletteNV()", VK_QUEUE_GRAPHICS_BIT,
+                                      "VUID-vkCmdSetViewportShadingRatePaletteNV-commandBuffer-cmdpool");
+
+    skip |= ValidateCmd(cb_state, CMD_SETVIEWPORTSHADINGRATEPALETTENV, "vkCmdSetViewportShadingRatePaletteNV()");
+
+    if (!enabled_features.shading_rate_image.shadingRateImage) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                        HandleToUint64(commandBuffer), "VUID-vkCmdSetViewportShadingRatePaletteNV-None-02064",
+                        "vkCmdSetViewportShadingRatePaletteNV: The shadingRateImage feature is disabled.");
+    }
+
+    if (cb_state->static_status & CBSTATUS_SHADING_RATE_PALETTE_SET) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                        HandleToUint64(commandBuffer), "VUID-vkCmdSetViewportShadingRatePaletteNV-None-02065",
+                        "vkCmdSetViewportShadingRatePaletteNV(): pipeline was created without "
+                        "VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV flag.");
+    }
+
+    for (uint32_t i = 0; i < viewportCount; ++i) {
+        auto *palette = &pShadingRatePalettes[i];
+        if (palette->shadingRatePaletteEntryCount == 0 ||
+            palette->shadingRatePaletteEntryCount > phys_dev_ext_props.shading_rate_image_props.shadingRatePaletteSize) {
+            skip |= log_msg(
+                report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                HandleToUint64(commandBuffer), "VUID-VkShadingRatePaletteNV-shadingRatePaletteEntryCount-02071",
+                "vkCmdSetViewportShadingRatePaletteNV: shadingRatePaletteEntryCount must be between 1 and shadingRatePaletteSize.");
+        }
+    }
+
+    return skip;
+}
+
+bool CoreChecks::ValidateGeometryTrianglesNV(const VkGeometryTrianglesNV &triangles, VkDebugReportObjectTypeEXT object_type,
+                                             uint64_t object_handle, const char *func_name) const {
+    bool skip = false;
+
+    const BUFFER_STATE *vb_state = GetBufferState(triangles.vertexData);
+    if (vb_state != nullptr && vb_state->binding.size <= triangles.vertexOffset) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, object_type, object_handle,
+                        "VUID-VkGeometryTrianglesNV-vertexOffset-02428", "%s", func_name);
+    }
+
+    const BUFFER_STATE *ib_state = GetBufferState(triangles.indexData);
+    if (ib_state != nullptr && ib_state->binding.size <= triangles.indexOffset) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, object_type, object_handle,
+                        "VUID-VkGeometryTrianglesNV-indexOffset-02431", "%s", func_name);
+    }
+
+    const BUFFER_STATE *td_state = GetBufferState(triangles.transformData);
+    if (td_state != nullptr && td_state->binding.size <= triangles.transformOffset) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, object_type, object_handle,
+                        "VUID-VkGeometryTrianglesNV-transformOffset-02437", "%s", func_name);
+    }
+
+    return skip;
+}
+
+bool CoreChecks::ValidateGeometryAABBNV(const VkGeometryAABBNV &aabbs, VkDebugReportObjectTypeEXT object_type,
+                                        uint64_t object_handle, const char *func_name) const {
+    bool skip = false;
+
+    const BUFFER_STATE *aabb_state = GetBufferState(aabbs.aabbData);
+    if (aabb_state != nullptr && aabb_state->binding.size > 0 && aabb_state->binding.size <= aabbs.offset) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, object_type, object_handle,
+                        "VUID-VkGeometryAABBNV-offset-02439", "%s", func_name);
+    }
+
+    return skip;
+}
+
+bool CoreChecks::ValidateGeometryNV(const VkGeometryNV &geometry, VkDebugReportObjectTypeEXT object_type, uint64_t object_handle,
+                                    const char *func_name) const {
+    bool skip = false;
+    if (geometry.geometryType == VK_GEOMETRY_TYPE_TRIANGLES_NV) {
+        skip = ValidateGeometryTrianglesNV(geometry.geometry.triangles, object_type, object_handle, func_name);
+    } else if (geometry.geometryType == VK_GEOMETRY_TYPE_AABBS_NV) {
+        skip = ValidateGeometryAABBNV(geometry.geometry.aabbs, object_type, object_handle, func_name);
+    }
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateCreateAccelerationStructureNV(VkDevice device,
+                                                              const VkAccelerationStructureCreateInfoNV *pCreateInfo,
+                                                              const VkAllocationCallbacks *pAllocator,
+                                                              VkAccelerationStructureNV *pAccelerationStructure) const {
+    bool skip = false;
+    if (pCreateInfo != nullptr && pCreateInfo->info.type == VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_NV) {
+        for (uint32_t i = 0; i < pCreateInfo->info.geometryCount; i++) {
+            skip |= ValidateGeometryNV(pCreateInfo->info.pGeometries[i], VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+                                       HandleToUint64(device), "vkCreateAccelerationStructureNV():");
+        }
+    }
+    return skip;
+}
+
+bool CoreChecks::ValidateBindAccelerationStructureMemoryNV(VkDevice device,
+                                                           const VkBindAccelerationStructureMemoryInfoNV &info) const {
+    bool skip = false;
+
+    const ACCELERATION_STRUCTURE_STATE *as_state = GetAccelerationStructureState(info.accelerationStructure);
+    if (!as_state) {
+        return skip;
+    }
+    uint64_t as_handle = HandleToUint64(info.accelerationStructure);
+    if (!as_state->GetBoundMemory().empty()) {
+        skip |=
+            log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV_EXT,
+                    as_handle, "VUID-VkBindAccelerationStructureMemoryInfoNV-accelerationStructure-02450",
+                    "vkBindAccelerationStructureMemoryNV(): accelerationStructure must not already be backed by a memory object.");
+    }
+
+    // Validate bound memory range information
+    const auto mem_info = GetDevMemState(info.memory);
+    if (mem_info) {
+        skip |= ValidateInsertAccelerationStructureMemoryRange(info.accelerationStructure, mem_info, info.memoryOffset,
+                                                               as_state->memory_requirements.memoryRequirements,
+                                                               "vkBindAccelerationStructureMemoryNV()");
+        skip |= ValidateMemoryTypes(mem_info, as_state->memory_requirements.memoryRequirements.memoryTypeBits,
+                                    "vkBindAccelerationStructureMemoryNV()",
+                                    "VUID-VkBindAccelerationStructureMemoryInfoNV-memory-02593");
+    }
+
+    // Validate memory requirements alignment
+    if (SafeModulo(info.memoryOffset, as_state->memory_requirements.memoryRequirements.alignment) != 0) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV_EXT,
+                        as_handle, "VUID-VkBindAccelerationStructureMemoryInfoNV-memoryOffset-02594",
+                        "vkBindAccelerationStructureMemoryNV(): memoryOffset is 0x%" PRIxLEAST64
+                        " but must be an integer multiple of the VkMemoryRequirements::alignment value 0x%" PRIxLEAST64
+                        ", returned from a call to vkGetAccelerationStructureMemoryRequirementsNV with accelerationStructure"
+                        "and type of VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV.",
+                        info.memoryOffset, as_state->memory_requirements.memoryRequirements.alignment);
+    }
+
+    if (mem_info) {
+        // Validate memory requirements size
+        if (as_state->memory_requirements.memoryRequirements.size > (mem_info->alloc_info.allocationSize - info.memoryOffset)) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, as_handle,
+                            "VUID-VkBindAccelerationStructureMemoryInfoNV-size-02595",
+                            "vkBindAccelerationStructureMemoryNV(): memory size minus memoryOffset is 0x%" PRIxLEAST64
+                            " but must be at least as large as VkMemoryRequirements::size value 0x%" PRIxLEAST64
+                            ", returned from a call to vkGetAccelerationStructureMemoryRequirementsNV with accelerationStructure"
+                            "and type of VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV.",
+                            mem_info->alloc_info.allocationSize - info.memoryOffset,
+                            as_state->memory_requirements.memoryRequirements.size);
+        }
+    }
+
+    return skip;
+}
+bool CoreChecks::PreCallValidateBindAccelerationStructureMemoryNV(VkDevice device, uint32_t bindInfoCount,
+                                                                  const VkBindAccelerationStructureMemoryInfoNV *pBindInfos) const {
+    bool skip = false;
+    for (uint32_t i = 0; i < bindInfoCount; i++) {
+        skip |= ValidateBindAccelerationStructureMemoryNV(device, pBindInfos[i]);
+    }
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateGetAccelerationStructureHandleNV(VkDevice device, VkAccelerationStructureNV accelerationStructure,
+                                                                 size_t dataSize, void *pData) const {
+    bool skip = false;
+
+    const ACCELERATION_STRUCTURE_STATE *as_state = GetAccelerationStructureState(accelerationStructure);
+    if (as_state != nullptr) {
+        // TODO: update the fake VUID below once the real one is generated.
+        skip = ValidateMemoryIsBoundToAccelerationStructure(
+            as_state, "vkGetAccelerationStructureHandleNV",
+            "UNASSIGNED-vkGetAccelerationStructureHandleNV-accelerationStructure-XXXX");
+    }
+
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateCmdBuildAccelerationStructureNV(VkCommandBuffer commandBuffer,
+                                                                const VkAccelerationStructureInfoNV *pInfo, VkBuffer instanceData,
+                                                                VkDeviceSize instanceOffset, VkBool32 update,
+                                                                VkAccelerationStructureNV dst, VkAccelerationStructureNV src,
+                                                                VkBuffer scratch, VkDeviceSize scratchOffset) const {
+    const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+    assert(cb_state);
+    bool skip = ValidateCmdQueueFlags(cb_state, "vkCmdBuildAccelerationStructureNV()", VK_QUEUE_COMPUTE_BIT,
+                                      "VUID-vkCmdBuildAccelerationStructureNV-commandBuffer-cmdpool");
+
+    skip |= ValidateCmd(cb_state, CMD_BUILDACCELERATIONSTRUCTURENV, "vkCmdBuildAccelerationStructureNV()");
+
+    if (pInfo != nullptr && pInfo->type == VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_NV) {
+        for (uint32_t i = 0; i < pInfo->geometryCount; i++) {
+            skip |= ValidateGeometryNV(pInfo->pGeometries[i], VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV_EXT,
+                                       HandleToUint64(device), "vkCmdBuildAccelerationStructureNV():");
+        }
+    }
+
+    if (pInfo != nullptr && pInfo->geometryCount > phys_dev_ext_props.ray_tracing_props.maxGeometryCount) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                        HandleToUint64(commandBuffer), "VUID-vkCmdBuildAccelerationStructureNV-geometryCount-02241",
+                        "vkCmdBuildAccelerationStructureNV(): geometryCount [%d] must be less than or equal to "
+                        "VkPhysicalDeviceRayTracingPropertiesNV::maxGeometryCount.",
+                        pInfo->geometryCount);
+    }
+
+    const ACCELERATION_STRUCTURE_STATE *dst_as_state = GetAccelerationStructureState(dst);
+    const ACCELERATION_STRUCTURE_STATE *src_as_state = GetAccelerationStructureState(src);
+    const BUFFER_STATE *scratch_buffer_state = GetBufferState(scratch);
+
+    if (dst_as_state != nullptr && pInfo != nullptr) {
+        if (dst_as_state->create_info.info.type != pInfo->type) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                            HandleToUint64(commandBuffer), "VUID-vkCmdBuildAccelerationStructureNV-dst-02488",
+                            "vkCmdBuildAccelerationStructureNV(): create info VkAccelerationStructureInfoNV::type"
+                            "[%s] must be identical to build info VkAccelerationStructureInfoNV::type [%s].",
+                            string_VkAccelerationStructureTypeNV(dst_as_state->create_info.info.type),
+                            string_VkAccelerationStructureTypeNV(pInfo->type));
+        }
+        if (dst_as_state->create_info.info.flags != pInfo->flags) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                            HandleToUint64(commandBuffer), "VUID-vkCmdBuildAccelerationStructureNV-dst-02488",
+                            "vkCmdBuildAccelerationStructureNV(): create info VkAccelerationStructureInfoNV::flags"
+                            "[0x%X] must be identical to build info VkAccelerationStructureInfoNV::flags [0x%X].",
+                            dst_as_state->create_info.info.flags, pInfo->flags);
+        }
+        if (dst_as_state->create_info.info.instanceCount < pInfo->instanceCount) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                            HandleToUint64(commandBuffer), "VUID-vkCmdBuildAccelerationStructureNV-dst-02488",
+                            "vkCmdBuildAccelerationStructureNV(): create info VkAccelerationStructureInfoNV::instanceCount "
+                            "[%d] must be greater than or equal to build info VkAccelerationStructureInfoNV::instanceCount [%d].",
+                            dst_as_state->create_info.info.instanceCount, pInfo->instanceCount);
+        }
+        if (dst_as_state->create_info.info.geometryCount < pInfo->geometryCount) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                            HandleToUint64(commandBuffer), "VUID-vkCmdBuildAccelerationStructureNV-dst-02488",
+                            "vkCmdBuildAccelerationStructureNV(): create info VkAccelerationStructureInfoNV::geometryCount"
+                            "[%d] must be greater than or equal to build info VkAccelerationStructureInfoNV::geometryCount [%d].",
+                            dst_as_state->create_info.info.geometryCount, pInfo->geometryCount);
+        } else {
+            for (uint32_t i = 0; i < pInfo->geometryCount; i++) {
+                const VkGeometryDataNV &create_geometry_data = dst_as_state->create_info.info.pGeometries[i].geometry;
+                const VkGeometryDataNV &build_geometry_data = pInfo->pGeometries[i].geometry;
+                if (create_geometry_data.triangles.vertexCount < build_geometry_data.triangles.vertexCount) {
+                    skip |= log_msg(
+                        report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                        HandleToUint64(commandBuffer), "VUID-vkCmdBuildAccelerationStructureNV-dst-02488",
+                        "vkCmdBuildAccelerationStructureNV(): create info pGeometries[%d].geometry.triangles.vertexCount [%d]"
+                        "must be greater than or equal to build info pGeometries[%d].geometry.triangles.vertexCount [%d].",
+                        i, create_geometry_data.triangles.vertexCount, i, build_geometry_data.triangles.vertexCount);
+                    break;
+                }
+                if (create_geometry_data.triangles.indexCount < build_geometry_data.triangles.indexCount) {
+                    skip |= log_msg(
+                        report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                        HandleToUint64(commandBuffer), "VUID-vkCmdBuildAccelerationStructureNV-dst-02488",
+                        "vkCmdBuildAccelerationStructureNV(): create info pGeometries[%d].geometry.triangles.indexCount [%d]"
+                        "must be greater than or equal to build info pGeometries[%d].geometry.triangles.indexCount [%d].",
+                        i, create_geometry_data.triangles.indexCount, i, build_geometry_data.triangles.indexCount);
+                    break;
+                }
+                if (create_geometry_data.aabbs.numAABBs < build_geometry_data.aabbs.numAABBs) {
+                    skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                                    HandleToUint64(commandBuffer), "VUID-vkCmdBuildAccelerationStructureNV-dst-02488",
+                                    "vkCmdBuildAccelerationStructureNV(): create info pGeometries[%d].geometry.aabbs.numAABBs [%d]"
+                                    "must be greater than or equal to build info pGeometries[%d].geometry.aabbs.numAABBs [%d].",
+                                    i, create_geometry_data.aabbs.numAABBs, i, build_geometry_data.aabbs.numAABBs);
+                    break;
+                }
+            }
+        }
+    }
+
+    if (dst_as_state != nullptr) {
+        skip |= ValidateMemoryIsBoundToAccelerationStructure(
+            dst_as_state, "vkCmdBuildAccelerationStructureNV()",
+            "UNASSIGNED-CoreValidation-DrawState-InvalidCommandBuffer-VkAccelerationStructureNV");
+    }
+
+    if (update == VK_TRUE) {
+        if (src == VK_NULL_HANDLE) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                            HandleToUint64(commandBuffer), "VUID-vkCmdBuildAccelerationStructureNV-update-02489",
+                            "vkCmdBuildAccelerationStructureNV(): If update is VK_TRUE, src must not be VK_NULL_HANDLE.");
+        } else {
+            if (src_as_state == nullptr || !src_as_state->built ||
+                !(src_as_state->build_info.flags & VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_NV)) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                                HandleToUint64(commandBuffer), "VUID-vkCmdBuildAccelerationStructureNV-update-02489",
+                                "vkCmdBuildAccelerationStructureNV(): If update is VK_TRUE, src must have been built before "
+                                "with VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_NV set in "
+                                "VkAccelerationStructureInfoNV::flags.");
+            }
+        }
+        if (dst_as_state != nullptr && !dst_as_state->update_scratch_memory_requirements_checked) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV_EXT,
+                            HandleToUint64(dst), kVUID_Core_CmdBuildAccelNV_NoUpdateMemReqQuery,
+                            "vkCmdBuildAccelerationStructureNV(): Updating %s but vkGetAccelerationStructureMemoryRequirementsNV() "
+                            "has not been called for update scratch memory.",
+                            report_data->FormatHandle(dst_as_state->acceleration_structure).c_str());
+            // Use requirements fetched at create time
+        }
+        if (scratch_buffer_state != nullptr && dst_as_state != nullptr &&
+            dst_as_state->update_scratch_memory_requirements.memoryRequirements.size >
+                (scratch_buffer_state->binding.size - scratchOffset)) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                            HandleToUint64(commandBuffer), "VUID-vkCmdBuildAccelerationStructureNV-update-02492",
+                            "vkCmdBuildAccelerationStructureNV(): If update is VK_TRUE, The size member of the "
+                            "VkMemoryRequirements structure returned from a call to "
+                            "vkGetAccelerationStructureMemoryRequirementsNV with "
+                            "VkAccelerationStructureMemoryRequirementsInfoNV::accelerationStructure set to dst and "
+                            "VkAccelerationStructureMemoryRequirementsInfoNV::type set to "
+                            "VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV must be less than "
+                            "or equal to the size of scratch minus scratchOffset");
+        }
+    } else {
+        if (dst_as_state != nullptr && !dst_as_state->build_scratch_memory_requirements_checked) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV_EXT,
+                            HandleToUint64(dst), kVUID_Core_CmdBuildAccelNV_NoScratchMemReqQuery,
+                            "vkCmdBuildAccelerationStructureNV(): Assigning scratch buffer to %s but "
+                            "vkGetAccelerationStructureMemoryRequirementsNV() has not been called for scratch memory.",
+                            report_data->FormatHandle(dst_as_state->acceleration_structure).c_str());
+            // Use requirements fetched at create time
+        }
+        if (scratch_buffer_state != nullptr && dst_as_state != nullptr &&
+            dst_as_state->build_scratch_memory_requirements.memoryRequirements.size >
+                (scratch_buffer_state->binding.size - scratchOffset)) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                            HandleToUint64(commandBuffer), "VUID-vkCmdBuildAccelerationStructureNV-update-02491",
+                            "vkCmdBuildAccelerationStructureNV(): If update is VK_FALSE, The size member of the "
+                            "VkMemoryRequirements structure returned from a call to "
+                            "vkGetAccelerationStructureMemoryRequirementsNV with "
+                            "VkAccelerationStructureMemoryRequirementsInfoNV::accelerationStructure set to dst and "
+                            "VkAccelerationStructureMemoryRequirementsInfoNV::type set to "
+                            "VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV must be less than "
+                            "or equal to the size of scratch minus scratchOffset");
+        }
+    }
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateCmdCopyAccelerationStructureNV(VkCommandBuffer commandBuffer, VkAccelerationStructureNV dst,
+                                                               VkAccelerationStructureNV src,
+                                                               VkCopyAccelerationStructureModeNV mode) const {
+    const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+    assert(cb_state);
+    bool skip = ValidateCmdQueueFlags(cb_state, "vkCmdCopyAccelerationStructureNV()", VK_QUEUE_COMPUTE_BIT,
+                                      "VUID-vkCmdCopyAccelerationStructureNV-commandBuffer-cmdpool");
+
+    skip |= ValidateCmd(cb_state, CMD_COPYACCELERATIONSTRUCTURENV, "vkCmdCopyAccelerationStructureNV()");
+
+    const ACCELERATION_STRUCTURE_STATE *dst_as_state = GetAccelerationStructureState(dst);
+    const ACCELERATION_STRUCTURE_STATE *src_as_state = GetAccelerationStructureState(src);
+
+    if (dst_as_state != nullptr) {
+        skip |= ValidateMemoryIsBoundToAccelerationStructure(
+            dst_as_state, "vkCmdBuildAccelerationStructureNV()",
+            "UNASSIGNED-CoreValidation-DrawState-InvalidCommandBuffer-VkAccelerationStructureNV");
+    }
+
+    if (mode == VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_NV) {
+        if (src_as_state != nullptr &&
+            (!src_as_state->built || !(src_as_state->build_info.flags & VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_NV))) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                            HandleToUint64(commandBuffer), "VUID-vkCmdCopyAccelerationStructureNV-src-02497",
+                            "vkCmdCopyAccelerationStructureNV(): src must have been built with "
+                            "VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_NV if mode is "
+                            "VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_NV.");
+        }
+    }
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateDestroyAccelerationStructureNV(VkDevice device, VkAccelerationStructureNV accelerationStructure,
+                                                               const VkAllocationCallbacks *pAllocator) const {
+    const ACCELERATION_STRUCTURE_STATE *as_state = GetAccelerationStructureState(accelerationStructure);
+    const VulkanTypedHandle obj_struct(accelerationStructure, kVulkanObjectTypeAccelerationStructureNV);
+    bool skip = false;
+    if (as_state) {
+        skip |= ValidateObjectNotInUse(as_state, obj_struct, "vkDestroyAccelerationStructureNV",
+                                       "VUID-vkDestroyAccelerationStructureNV-accelerationStructure-02442");
+    }
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateCmdSetViewportWScalingNV(VkCommandBuffer commandBuffer, uint32_t firstViewport,
+                                                         uint32_t viewportCount,
+                                                         const VkViewportWScalingNV *pViewportWScalings) const {
+    const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+    assert(cb_state);
+    bool skip = ValidateCmdQueueFlags(cb_state, "vkCmdSetViewportWScalingNV()", VK_QUEUE_GRAPHICS_BIT,
+                                      "VUID-vkCmdSetViewportWScalingNV-commandBuffer-cmdpool");
+
+    skip |= ValidateCmd(cb_state, CMD_SETVIEWPORTWSCALINGNV, "vkCmdSetViewportWScalingNV()");
+
+    if (cb_state->static_status & CBSTATUS_VIEWPORT_W_SCALING_SET) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                        HandleToUint64(commandBuffer), "VUID-vkCmdSetViewportWScalingNV-None-01322",
+                        "vkCmdSetViewportWScalingNV(): pipeline was created without VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV flag.");
+    }
+
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateCmdSetLineWidth(VkCommandBuffer commandBuffer, float lineWidth) const {
+    const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+    assert(cb_state);
+    bool skip = ValidateCmdQueueFlags(cb_state, "vkCmdSetLineWidth()", VK_QUEUE_GRAPHICS_BIT,
+                                      "VUID-vkCmdSetLineWidth-commandBuffer-cmdpool");
+    skip |= ValidateCmd(cb_state, CMD_SETLINEWIDTH, "vkCmdSetLineWidth()");
+
+    if (cb_state->static_status & CBSTATUS_LINE_WIDTH_SET) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                        HandleToUint64(commandBuffer), "VUID-vkCmdSetLineWidth-None-00787",
+                        "vkCmdSetLineWidth called but pipeline was created without VK_DYNAMIC_STATE_LINE_WIDTH flag.");
+    }
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateCmdSetLineStippleEXT(VkCommandBuffer commandBuffer, uint32_t lineStippleFactor,
+                                                     uint16_t lineStipplePattern) const {
+    const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+    assert(cb_state);
+    bool skip = ValidateCmdQueueFlags(cb_state, "vkCmdSetLineStippleEXT()", VK_QUEUE_GRAPHICS_BIT,
+                                      "VUID-vkCmdSetLineStippleEXT-commandBuffer-cmdpool");
+    skip |= ValidateCmd(cb_state, CMD_SETLINESTIPPLEEXT, "vkCmdSetLineStippleEXT()");
+
+    if (cb_state->static_status & CBSTATUS_LINE_STIPPLE_SET) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                        HandleToUint64(commandBuffer), "VUID-vkCmdSetLineStippleEXT-None-02775",
+                        "vkCmdSetLineStippleEXT called but pipeline was created without VK_DYNAMIC_STATE_LINE_STIPPLE_EXT flag.");
+    }
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateCmdSetDepthBias(VkCommandBuffer commandBuffer, float depthBiasConstantFactor, float depthBiasClamp,
+                                                float depthBiasSlopeFactor) const {
+    const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+    assert(cb_state);
+    bool skip = ValidateCmdQueueFlags(cb_state, "vkCmdSetDepthBias()", VK_QUEUE_GRAPHICS_BIT,
+                                      "VUID-vkCmdSetDepthBias-commandBuffer-cmdpool");
+    skip |= ValidateCmd(cb_state, CMD_SETDEPTHBIAS, "vkCmdSetDepthBias()");
+    if (cb_state->static_status & CBSTATUS_DEPTH_BIAS_SET) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                        HandleToUint64(commandBuffer), "VUID-vkCmdSetDepthBias-None-00789",
+                        "vkCmdSetDepthBias(): pipeline was created without VK_DYNAMIC_STATE_DEPTH_BIAS flag..");
+    }
+    if ((depthBiasClamp != 0.0) && (!enabled_features.core.depthBiasClamp)) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                        HandleToUint64(commandBuffer), "VUID-vkCmdSetDepthBias-depthBiasClamp-00790",
+                        "vkCmdSetDepthBias(): the depthBiasClamp device feature is disabled: the depthBiasClamp parameter must "
+                        "be set to 0.0.");
+    }
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateCmdSetBlendConstants(VkCommandBuffer commandBuffer, const float blendConstants[4]) const {
+    const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+    assert(cb_state);
+    bool skip = ValidateCmdQueueFlags(cb_state, "vkCmdSetBlendConstants()", VK_QUEUE_GRAPHICS_BIT,
+                                      "VUID-vkCmdSetBlendConstants-commandBuffer-cmdpool");
+    skip |= ValidateCmd(cb_state, CMD_SETBLENDCONSTANTS, "vkCmdSetBlendConstants()");
+    if (cb_state->static_status & CBSTATUS_BLEND_CONSTANTS_SET) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                        HandleToUint64(commandBuffer), "VUID-vkCmdSetBlendConstants-None-00612",
+                        "vkCmdSetBlendConstants(): pipeline was created without VK_DYNAMIC_STATE_BLEND_CONSTANTS flag..");
+    }
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateCmdSetDepthBounds(VkCommandBuffer commandBuffer, float minDepthBounds, float maxDepthBounds) const {
+    const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+    assert(cb_state);
+    bool skip = ValidateCmdQueueFlags(cb_state, "vkCmdSetDepthBounds()", VK_QUEUE_GRAPHICS_BIT,
+                                      "VUID-vkCmdSetDepthBounds-commandBuffer-cmdpool");
+    skip |= ValidateCmd(cb_state, CMD_SETDEPTHBOUNDS, "vkCmdSetDepthBounds()");
+    if (cb_state->static_status & CBSTATUS_DEPTH_BOUNDS_SET) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                        HandleToUint64(commandBuffer), "VUID-vkCmdSetDepthBounds-None-00599",
+                        "vkCmdSetDepthBounds(): pipeline was created without VK_DYNAMIC_STATE_DEPTH_BOUNDS flag..");
+    }
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateCmdSetStencilCompareMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
+                                                         uint32_t compareMask) const {
+    const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+    assert(cb_state);
+    bool skip = ValidateCmdQueueFlags(cb_state, "vkCmdSetStencilCompareMask()", VK_QUEUE_GRAPHICS_BIT,
+                                      "VUID-vkCmdSetStencilCompareMask-commandBuffer-cmdpool");
+    skip |= ValidateCmd(cb_state, CMD_SETSTENCILCOMPAREMASK, "vkCmdSetStencilCompareMask()");
+    if (cb_state->static_status & CBSTATUS_STENCIL_READ_MASK_SET) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                        HandleToUint64(commandBuffer), "VUID-vkCmdSetStencilCompareMask-None-00602",
+                        "vkCmdSetStencilCompareMask(): pipeline was created without VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK flag..");
+    }
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateCmdSetStencilWriteMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
+                                                       uint32_t writeMask) const {
+    const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+    assert(cb_state);
+    bool skip = ValidateCmdQueueFlags(cb_state, "vkCmdSetStencilWriteMask()", VK_QUEUE_GRAPHICS_BIT,
+                                      "VUID-vkCmdSetStencilWriteMask-commandBuffer-cmdpool");
+    skip |= ValidateCmd(cb_state, CMD_SETSTENCILWRITEMASK, "vkCmdSetStencilWriteMask()");
+    if (cb_state->static_status & CBSTATUS_STENCIL_WRITE_MASK_SET) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                        HandleToUint64(commandBuffer), "VUID-vkCmdSetStencilWriteMask-None-00603",
+                        "vkCmdSetStencilWriteMask(): pipeline was created without VK_DYNAMIC_STATE_STENCIL_WRITE_MASK flag..");
+    }
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateCmdSetStencilReference(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
+                                                       uint32_t reference) const {
+    const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+    assert(cb_state);
+    bool skip = ValidateCmdQueueFlags(cb_state, "vkCmdSetStencilReference()", VK_QUEUE_GRAPHICS_BIT,
+                                      "VUID-vkCmdSetStencilReference-commandBuffer-cmdpool");
+    skip |= ValidateCmd(cb_state, CMD_SETSTENCILREFERENCE, "vkCmdSetStencilReference()");
+    if (cb_state->static_status & CBSTATUS_STENCIL_REFERENCE_SET) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                        HandleToUint64(commandBuffer), "VUID-vkCmdSetStencilReference-None-00604",
+                        "vkCmdSetStencilReference(): pipeline was created without VK_DYNAMIC_STATE_STENCIL_REFERENCE flag..");
+    }
+    return skip;
+}
+
+static bool ValidateDynamicOffsetAlignment(const debug_report_data *report_data, const VkDescriptorSetLayoutBinding *binding,
+                                           VkDescriptorType test_type, VkDeviceSize alignment, const uint32_t *pDynamicOffsets,
+                                           const char *err_msg, const char *limit_name, uint32_t *offset_idx) {
+    bool skip = false;
+    if (binding->descriptorType == test_type) {
+        const auto end_idx = *offset_idx + binding->descriptorCount;
+        for (uint32_t current_idx = *offset_idx; current_idx < end_idx; current_idx++) {
+            if (SafeModulo(pDynamicOffsets[current_idx], alignment) != 0) {
+                skip |= log_msg(
+                    report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, 0, err_msg,
+                    "vkCmdBindDescriptorSets(): pDynamicOffsets[%d] is %d but must be a multiple of device limit %s 0x%" PRIxLEAST64
+                    ".",
+                    current_idx, pDynamicOffsets[current_idx], limit_name, alignment);
+            }
+        }
+        *offset_idx = end_idx;
+    }
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateCmdBindDescriptorSets(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint,
+                                                      VkPipelineLayout layout, uint32_t firstSet, uint32_t setCount,
+                                                      const VkDescriptorSet *pDescriptorSets, uint32_t dynamicOffsetCount,
+                                                      const uint32_t *pDynamicOffsets) const {
+    const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+    assert(cb_state);
+    bool skip = false;
+    skip |= ValidateCmdQueueFlags(cb_state, "vkCmdBindDescriptorSets()", VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT,
+                                  "VUID-vkCmdBindDescriptorSets-commandBuffer-cmdpool");
+    skip |= ValidateCmd(cb_state, CMD_BINDDESCRIPTORSETS, "vkCmdBindDescriptorSets()");
+    // Track total count of dynamic descriptor types to make sure we have an offset for each one
+    uint32_t total_dynamic_descriptors = 0;
+    string error_string = "";
+
+    const auto *pipeline_layout = GetPipelineLayout(layout);
+    for (uint32_t set_idx = 0; set_idx < setCount; set_idx++) {
+        const cvdescriptorset::DescriptorSet *descriptor_set = GetSetNode(pDescriptorSets[set_idx]);
+        if (descriptor_set) {
+            // Verify that set being bound is compatible with overlapping setLayout of pipelineLayout
+            if (!VerifySetLayoutCompatibility(report_data, descriptor_set, pipeline_layout, set_idx + firstSet, error_string)) {
+                skip |=
+                    log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT,
+                            HandleToUint64(pDescriptorSets[set_idx]), "VUID-vkCmdBindDescriptorSets-pDescriptorSets-00358",
+                            "descriptorSet #%u being bound is not compatible with overlapping descriptorSetLayout at index %u of "
+                            "%s due to: %s.",
+                            set_idx, set_idx + firstSet, report_data->FormatHandle(layout).c_str(), error_string.c_str());
+            }
+
+            auto set_dynamic_descriptor_count = descriptor_set->GetDynamicDescriptorCount();
+            if (set_dynamic_descriptor_count) {
+                // First make sure we won't overstep bounds of pDynamicOffsets array
+                if ((total_dynamic_descriptors + set_dynamic_descriptor_count) > dynamicOffsetCount) {
+                    // Test/report this here, such that we don't run past the end of pDynamicOffsets in the else clause
+                    skip |=
+                        log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT,
+                                HandleToUint64(pDescriptorSets[set_idx]), "VUID-vkCmdBindDescriptorSets-dynamicOffsetCount-00359",
+                                "descriptorSet #%u (%s) requires %u dynamicOffsets, but only %u dynamicOffsets are left in "
+                                "pDynamicOffsets array. There must be one dynamic offset for each dynamic descriptor being bound.",
+                                set_idx, report_data->FormatHandle(pDescriptorSets[set_idx]).c_str(),
+                                descriptor_set->GetDynamicDescriptorCount(), (dynamicOffsetCount - total_dynamic_descriptors));
+                    // Set the number found to the maximum to prevent duplicate messages, or subsquent descriptor sets from
+                    // testing against the "short tail" we're skipping below.
+                    total_dynamic_descriptors = dynamicOffsetCount;
+                } else {  // Validate dynamic offsets and Dynamic Offset Minimums
+                    uint32_t cur_dyn_offset = total_dynamic_descriptors;
+                    const auto dsl = descriptor_set->GetLayout();
+                    const auto binding_count = dsl->GetBindingCount();
+                    const auto &limits = phys_dev_props.limits;
+                    for (uint32_t binding_idx = 0; binding_idx < binding_count; binding_idx++) {
+                        const auto *binding = dsl->GetDescriptorSetLayoutBindingPtrFromIndex(binding_idx);
+                        skip |= ValidateDynamicOffsetAlignment(report_data, binding, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC,
+                                                               limits.minUniformBufferOffsetAlignment, pDynamicOffsets,
+                                                               "VUID-vkCmdBindDescriptorSets-pDynamicOffsets-01971",
+                                                               "minUniformBufferOffsetAlignment", &cur_dyn_offset);
+                        skip |= ValidateDynamicOffsetAlignment(report_data, binding, VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC,
+                                                               limits.minStorageBufferOffsetAlignment, pDynamicOffsets,
+                                                               "VUID-vkCmdBindDescriptorSets-pDynamicOffsets-01972",
+                                                               "minStorageBufferOffsetAlignment", &cur_dyn_offset);
+                    }
+                    // Keep running total of dynamic descriptor count to verify at the end
+                    total_dynamic_descriptors += set_dynamic_descriptor_count;
+                }
+            }
+        } else {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT,
+                            HandleToUint64(pDescriptorSets[set_idx]), kVUID_Core_DrawState_InvalidSet,
+                            "Attempt to bind %s that doesn't exist!", report_data->FormatHandle(pDescriptorSets[set_idx]).c_str());
+        }
+    }
+    //  dynamicOffsetCount must equal the total number of dynamic descriptors in the sets being bound
+    if (total_dynamic_descriptors != dynamicOffsetCount) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                        HandleToUint64(cb_state->commandBuffer), "VUID-vkCmdBindDescriptorSets-dynamicOffsetCount-00359",
+                        "Attempting to bind %u descriptorSets with %u dynamic descriptors, but dynamicOffsetCount is %u. It should "
+                        "exactly match the number of dynamic descriptors.",
+                        setCount, total_dynamic_descriptors, dynamicOffsetCount);
+    }
+    return skip;
+}
+
+// Validates that the supplied bind point is supported for the command buffer (vis. the command pool)
+// Takes array of error codes as some of the VUID's (e.g. vkCmdBindPipeline) are written per bindpoint
+// TODO add vkCmdBindPipeline bind_point validation using this call.
+bool CoreChecks::ValidatePipelineBindPoint(const CMD_BUFFER_STATE *cb_state, VkPipelineBindPoint bind_point, const char *func_name,
+                                           const std::map<VkPipelineBindPoint, std::string> &bind_errors) const {
+    bool skip = false;
+    auto pool = cb_state->command_pool.get();
+    if (pool) {  // The loss of a pool in a recording cmd is reported in DestroyCommandPool
+        static const std::map<VkPipelineBindPoint, VkQueueFlags> flag_mask = {
+            std::make_pair(VK_PIPELINE_BIND_POINT_GRAPHICS, static_cast<VkQueueFlags>(VK_QUEUE_GRAPHICS_BIT)),
+            std::make_pair(VK_PIPELINE_BIND_POINT_COMPUTE, static_cast<VkQueueFlags>(VK_QUEUE_COMPUTE_BIT)),
+            std::make_pair(VK_PIPELINE_BIND_POINT_RAY_TRACING_NV,
+                           static_cast<VkQueueFlags>(VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT)),
+        };
+        const auto &qfp = GetPhysicalDeviceState()->queue_family_properties[pool->queueFamilyIndex];
+        if (0 == (qfp.queueFlags & flag_mask.at(bind_point))) {
+            const std::string &error = bind_errors.at(bind_point);
+            auto cb_u64 = HandleToUint64(cb_state->commandBuffer);
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, cb_u64,
+                            error, "%s: %s was allocated from %s that does not support bindpoint %s.", func_name,
+                            report_data->FormatHandle(cb_state->commandBuffer).c_str(),
+                            report_data->FormatHandle(cb_state->createInfo.commandPool).c_str(),
+                            string_VkPipelineBindPoint(bind_point));
+        }
+    }
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateCmdPushDescriptorSetKHR(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint,
+                                                        VkPipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount,
+                                                        const VkWriteDescriptorSet *pDescriptorWrites) const {
+    const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+    assert(cb_state);
+    const char *func_name = "vkCmdPushDescriptorSetKHR()";
+    bool skip = false;
+    skip |= ValidateCmd(cb_state, CMD_PUSHDESCRIPTORSETKHR, func_name);
+    skip |= ValidateCmdQueueFlags(cb_state, func_name, (VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT),
+                                  "VUID-vkCmdPushDescriptorSetKHR-commandBuffer-cmdpool");
+
+    static const std::map<VkPipelineBindPoint, std::string> bind_errors = {
+        std::make_pair(VK_PIPELINE_BIND_POINT_GRAPHICS, "VUID-vkCmdPushDescriptorSetKHR-pipelineBindPoint-00363"),
+        std::make_pair(VK_PIPELINE_BIND_POINT_COMPUTE, "VUID-vkCmdPushDescriptorSetKHR-pipelineBindPoint-00363"),
+        std::make_pair(VK_PIPELINE_BIND_POINT_RAY_TRACING_NV, "VUID-vkCmdPushDescriptorSetKHR-pipelineBindPoint-00363")};
+
+    skip |= ValidatePipelineBindPoint(cb_state, pipelineBindPoint, func_name, bind_errors);
+    const auto layout_data = GetPipelineLayout(layout);
+
+    // Validate the set index points to a push descriptor set and is in range
+    if (layout_data) {
+        const auto &set_layouts = layout_data->set_layouts;
+        const auto layout_u64 = HandleToUint64(layout);
+        if (set < set_layouts.size()) {
+            const auto dsl = set_layouts[set];
+            if (dsl) {
+                if (!dsl->IsPushDescriptor()) {
+                    skip = log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT,
+                                   layout_u64, "VUID-vkCmdPushDescriptorSetKHR-set-00365",
+                                   "%s: Set index %" PRIu32 " does not match push descriptor set layout index for %s.", func_name,
+                                   set, report_data->FormatHandle(layout).c_str());
+                } else {
+                    // Create an empty proxy in order to use the existing descriptor set update validation
+                    // TODO move the validation (like this) that doesn't need descriptor set state to the DSL object so we
+                    // don't have to do this.
+                    cvdescriptorset::DescriptorSet proxy_ds(VK_NULL_HANDLE, nullptr, dsl, 0, nullptr, report_data);
+                    skip |= ValidatePushDescriptorsUpdate(&proxy_ds, descriptorWriteCount, pDescriptorWrites, func_name);
+                }
+            }
+        } else {
+            skip = log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT, layout_u64,
+                           "VUID-vkCmdPushDescriptorSetKHR-set-00364",
+                           "%s: Set index %" PRIu32 " is outside of range for %s (set < %" PRIu32 ").", func_name, set,
+                           report_data->FormatHandle(layout).c_str(), static_cast<uint32_t>(set_layouts.size()));
+        }
+    }
+
+    return skip;
+}
+
+static VkDeviceSize GetIndexAlignment(VkIndexType indexType) {
+    switch (indexType) {
+        case VK_INDEX_TYPE_UINT16:
+            return 2;
+        case VK_INDEX_TYPE_UINT32:
+            return 4;
+        case VK_INDEX_TYPE_UINT8_EXT:
+            return 1;
+        default:
+            // Not a real index type. Express no alignment requirement here; we expect upper layer
+            // to have already picked up on the enum being nonsense.
+            return 1;
+    }
+}
+
+bool CoreChecks::PreCallValidateCmdBindIndexBuffer(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
+                                                   VkIndexType indexType) const {
+    const auto buffer_state = GetBufferState(buffer);
+    const auto cb_node = GetCBState(commandBuffer);
+    assert(buffer_state);
+    assert(cb_node);
+
+    bool skip =
+        ValidateBufferUsageFlags(buffer_state, VK_BUFFER_USAGE_INDEX_BUFFER_BIT, true, "VUID-vkCmdBindIndexBuffer-buffer-00433",
+                                 "vkCmdBindIndexBuffer()", "VK_BUFFER_USAGE_INDEX_BUFFER_BIT");
+    skip |= ValidateCmdQueueFlags(cb_node, "vkCmdBindIndexBuffer()", VK_QUEUE_GRAPHICS_BIT,
+                                  "VUID-vkCmdBindIndexBuffer-commandBuffer-cmdpool");
+    skip |= ValidateCmd(cb_node, CMD_BINDINDEXBUFFER, "vkCmdBindIndexBuffer()");
+    skip |= ValidateMemoryIsBoundToBuffer(buffer_state, "vkCmdBindIndexBuffer()", "VUID-vkCmdBindIndexBuffer-buffer-00434");
+    const auto offset_align = GetIndexAlignment(indexType);
+    if (offset % offset_align) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                        HandleToUint64(commandBuffer), "VUID-vkCmdBindIndexBuffer-offset-00432",
+                        "vkCmdBindIndexBuffer() offset (0x%" PRIxLEAST64 ") does not fall on alignment (%s) boundary.", offset,
+                        string_VkIndexType(indexType));
+    }
+
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateCmdBindVertexBuffers(VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount,
+                                                     const VkBuffer *pBuffers, const VkDeviceSize *pOffsets) const {
+    const auto cb_state = GetCBState(commandBuffer);
+    assert(cb_state);
+
+    bool skip = ValidateCmdQueueFlags(cb_state, "vkCmdBindVertexBuffers()", VK_QUEUE_GRAPHICS_BIT,
+                                      "VUID-vkCmdBindVertexBuffers-commandBuffer-cmdpool");
+    skip |= ValidateCmd(cb_state, CMD_BINDVERTEXBUFFERS, "vkCmdBindVertexBuffers()");
+    for (uint32_t i = 0; i < bindingCount; ++i) {
+        const auto buffer_state = GetBufferState(pBuffers[i]);
+        assert(buffer_state);
+        skip |= ValidateBufferUsageFlags(buffer_state, VK_BUFFER_USAGE_VERTEX_BUFFER_BIT, true,
+                                         "VUID-vkCmdBindVertexBuffers-pBuffers-00627", "vkCmdBindVertexBuffers()",
+                                         "VK_BUFFER_USAGE_VERTEX_BUFFER_BIT");
+        skip |=
+            ValidateMemoryIsBoundToBuffer(buffer_state, "vkCmdBindVertexBuffers()", "VUID-vkCmdBindVertexBuffers-pBuffers-00628");
+        if (pOffsets[i] >= buffer_state->createInfo.size) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT,
+                            HandleToUint64(buffer_state->buffer), "VUID-vkCmdBindVertexBuffers-pOffsets-00626",
+                            "vkCmdBindVertexBuffers() offset (0x%" PRIxLEAST64 ") is beyond the end of the buffer.", pOffsets[i]);
+        }
+    }
+    return skip;
+}
+
+// Validate that an image's sampleCount matches the requirement for a specific API call
+bool CoreChecks::ValidateImageSampleCount(const IMAGE_STATE *image_state, VkSampleCountFlagBits sample_count, const char *location,
+                                          const std::string &msgCode) const {
+    bool skip = false;
+    if (image_state->createInfo.samples != sample_count) {
+        skip =
+            log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+                    HandleToUint64(image_state->image), msgCode, "%s for %s was created with a sample count of %s but must be %s.",
+                    location, report_data->FormatHandle(image_state->image).c_str(),
+                    string_VkSampleCountFlagBits(image_state->createInfo.samples), string_VkSampleCountFlagBits(sample_count));
+    }
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateCmdUpdateBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset,
+                                                VkDeviceSize dataSize, const void *pData) const {
+    const auto cb_state = GetCBState(commandBuffer);
+    assert(cb_state);
+    const auto dst_buffer_state = GetBufferState(dstBuffer);
+    assert(dst_buffer_state);
+
+    bool skip = false;
+    skip |= ValidateMemoryIsBoundToBuffer(dst_buffer_state, "vkCmdUpdateBuffer()", "VUID-vkCmdUpdateBuffer-dstBuffer-00035");
+    // Validate that DST buffer has correct usage flags set
+    skip |=
+        ValidateBufferUsageFlags(dst_buffer_state, VK_BUFFER_USAGE_TRANSFER_DST_BIT, true, "VUID-vkCmdUpdateBuffer-dstBuffer-00034",
+                                 "vkCmdUpdateBuffer()", "VK_BUFFER_USAGE_TRANSFER_DST_BIT");
+    skip |=
+        ValidateCmdQueueFlags(cb_state, "vkCmdUpdateBuffer()", VK_QUEUE_TRANSFER_BIT | VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT,
+                              "VUID-vkCmdUpdateBuffer-commandBuffer-cmdpool");
+    skip |= ValidateCmd(cb_state, CMD_UPDATEBUFFER, "vkCmdUpdateBuffer()");
+    skip |= InsideRenderPass(cb_state, "vkCmdUpdateBuffer()", "VUID-vkCmdUpdateBuffer-renderpass");
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateCmdSetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask) const {
+    const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+    assert(cb_state);
+    bool skip = ValidateCmdQueueFlags(cb_state, "vkCmdSetEvent()", VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT,
+                                      "VUID-vkCmdSetEvent-commandBuffer-cmdpool");
+    skip |= ValidateCmd(cb_state, CMD_SETEVENT, "vkCmdSetEvent()");
+    skip |= InsideRenderPass(cb_state, "vkCmdSetEvent()", "VUID-vkCmdSetEvent-renderpass");
+    skip |= ValidateStageMaskGsTsEnables(stageMask, "vkCmdSetEvent()", "VUID-vkCmdSetEvent-stageMask-01150",
+                                         "VUID-vkCmdSetEvent-stageMask-01151", "VUID-vkCmdSetEvent-stageMask-02107",
+                                         "VUID-vkCmdSetEvent-stageMask-02108");
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateCmdResetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask) const {
+    const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+    assert(cb_state);
+
+    bool skip = ValidateCmdQueueFlags(cb_state, "vkCmdResetEvent()", VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT,
+                                      "VUID-vkCmdResetEvent-commandBuffer-cmdpool");
+    skip |= ValidateCmd(cb_state, CMD_RESETEVENT, "vkCmdResetEvent()");
+    skip |= InsideRenderPass(cb_state, "vkCmdResetEvent()", "VUID-vkCmdResetEvent-renderpass");
+    skip |= ValidateStageMaskGsTsEnables(stageMask, "vkCmdResetEvent()", "VUID-vkCmdResetEvent-stageMask-01154",
+                                         "VUID-vkCmdResetEvent-stageMask-01155", "VUID-vkCmdResetEvent-stageMask-02109",
+                                         "VUID-vkCmdResetEvent-stageMask-02110");
+    return skip;
+}
+
+// Return input pipeline stage flags, expanded for individual bits if VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT is set
+static VkPipelineStageFlags ExpandPipelineStageFlags(const DeviceExtensions &extensions, VkPipelineStageFlags inflags) {
+    if (~inflags & VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT) return inflags;
+
+    return (inflags & ~VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT) |
+           (VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT | VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT |
+            (extensions.vk_nv_mesh_shader ? (VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV | VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV) : 0) |
+            VK_PIPELINE_STAGE_VERTEX_INPUT_BIT | VK_PIPELINE_STAGE_VERTEX_SHADER_BIT |
+            VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT | VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT |
+            VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT |
+            VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT | VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT |
+            VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT | VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT |
+            (extensions.vk_ext_conditional_rendering ? VK_PIPELINE_STAGE_CONDITIONAL_RENDERING_BIT_EXT : 0) |
+            (extensions.vk_ext_transform_feedback ? VK_PIPELINE_STAGE_TRANSFORM_FEEDBACK_BIT_EXT : 0) |
+            (extensions.vk_nv_shading_rate_image ? VK_PIPELINE_STAGE_SHADING_RATE_IMAGE_BIT_NV : 0) |
+            (extensions.vk_ext_fragment_density_map ? VK_PIPELINE_STAGE_FRAGMENT_DENSITY_PROCESS_BIT_EXT : 0));
+}
+
+static bool HasNonFramebufferStagePipelineStageFlags(VkPipelineStageFlags inflags) {
+    return (inflags & ~(VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT |
+                        VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT | VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT)) != 0;
+}
+
+static int GetGraphicsPipelineStageLogicalOrdinal(VkPipelineStageFlagBits flag) {
+    // Note that the list (and lookup) ignore invalid-for-enabled-extension condition.  This should be checked elsewhere
+    // and would greatly complicate this intentionally simple implementation
+    // clang-format off
+    const VkPipelineStageFlagBits ordered_array[] = {
+        VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT,
+        VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT,
+        VK_PIPELINE_STAGE_VERTEX_INPUT_BIT,
+        VK_PIPELINE_STAGE_VERTEX_SHADER_BIT,
+        VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT,
+        VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT,
+        VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT,
+        VK_PIPELINE_STAGE_TRANSFORM_FEEDBACK_BIT_EXT,
+
+        // Including the task/mesh shaders here is not technically correct, as they are in a
+        // separate logical pipeline - but it works for the case this is currently used, and
+        // fixing it would require significant rework and end up with the code being far more
+        // verbose for no practical gain.
+        // However, worth paying attention to this if using this function in a new way.
+        VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV,
+        VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV,
+
+        VK_PIPELINE_STAGE_SHADING_RATE_IMAGE_BIT_NV,
+        VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT,
+        VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT,
+        VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT,
+        VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
+        VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT
+    };
+    // clang-format on
+
+    const int ordered_array_length = sizeof(ordered_array) / sizeof(VkPipelineStageFlagBits);
+
+    for (int i = 0; i < ordered_array_length; ++i) {
+        if (ordered_array[i] == flag) {
+            return i;
+        }
+    }
+
+    return -1;
+}
+
+// The following two functions technically have O(N^2) complexity, but it's for a value of O that's largely
+// stable and also rather tiny - this could definitely be rejigged to work more efficiently, but the impact
+// on runtime is currently negligible, so it wouldn't gain very much.
+// If we add a lot more graphics pipeline stages, this set of functions should be rewritten to accomodate.
+static VkPipelineStageFlagBits GetLogicallyEarliestGraphicsPipelineStage(VkPipelineStageFlags inflags) {
+    VkPipelineStageFlagBits earliest_bit = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT;
+    int earliest_bit_order = GetGraphicsPipelineStageLogicalOrdinal(earliest_bit);
+
+    for (std::size_t i = 0; i < sizeof(VkPipelineStageFlagBits); ++i) {
+        VkPipelineStageFlagBits current_flag = (VkPipelineStageFlagBits)((inflags & 0x1u) << i);
+        if (current_flag) {
+            int new_order = GetGraphicsPipelineStageLogicalOrdinal(current_flag);
+            if (new_order != -1 && new_order < earliest_bit_order) {
+                earliest_bit_order = new_order;
+                earliest_bit = current_flag;
+            }
+        }
+        inflags = inflags >> 1;
+    }
+    return earliest_bit;
+}
+
+static VkPipelineStageFlagBits GetLogicallyLatestGraphicsPipelineStage(VkPipelineStageFlags inflags) {
+    VkPipelineStageFlagBits latest_bit = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
+    int latest_bit_order = GetGraphicsPipelineStageLogicalOrdinal(latest_bit);
+
+    for (std::size_t i = 0; i < sizeof(VkPipelineStageFlagBits); ++i) {
+        if (inflags & 0x1u) {
+            int new_order = GetGraphicsPipelineStageLogicalOrdinal((VkPipelineStageFlagBits)((inflags & 0x1u) << i));
+            if (new_order != -1 && new_order > latest_bit_order) {
+                latest_bit_order = new_order;
+                latest_bit = (VkPipelineStageFlagBits)((inflags & 0x1u) << i);
+            }
+        }
+        inflags = inflags >> 1;
+    }
+    return latest_bit;
+}
+
+// Verify image barrier image state and that the image is consistent with FB image
+bool CoreChecks::ValidateImageBarrierAttachment(const char *funcName, CMD_BUFFER_STATE const *cb_state, VkFramebuffer framebuffer,
+                                                uint32_t active_subpass, const safe_VkSubpassDescription2KHR &sub_desc,
+                                                const VulkanTypedHandle &rp_handle, uint32_t img_index,
+                                                const VkImageMemoryBarrier &img_barrier) const {
+    bool skip = false;
+    const auto &fb_state = GetFramebufferState(framebuffer);
+    assert(fb_state);
+    const auto img_bar_image = img_barrier.image;
+    bool image_match = false;
+    bool sub_image_found = false;  // Do we find a corresponding subpass description
+    VkImageLayout sub_image_layout = VK_IMAGE_LAYOUT_UNDEFINED;
+    uint32_t attach_index = 0;
+    // Verify that a framebuffer image matches barrier image
+    const auto attachmentCount = fb_state->createInfo.attachmentCount;
+    for (uint32_t attachment = 0; attachment < attachmentCount; ++attachment) {
+        auto view_state = GetAttachmentImageViewState(fb_state, attachment);
+        if (view_state && (img_bar_image == view_state->create_info.image)) {
+            image_match = true;
+            attach_index = attachment;
+            break;
+        }
+    }
+    if (image_match) {  // Make sure subpass is referring to matching attachment
+        if (sub_desc.pDepthStencilAttachment && sub_desc.pDepthStencilAttachment->attachment == attach_index) {
+            sub_image_layout = sub_desc.pDepthStencilAttachment->layout;
+            sub_image_found = true;
+        }
+        if (!sub_image_found && device_extensions.vk_khr_depth_stencil_resolve) {
+            const auto *resolve = lvl_find_in_chain<VkSubpassDescriptionDepthStencilResolveKHR>(sub_desc.pNext);
+            if (resolve && resolve->pDepthStencilResolveAttachment &&
+                resolve->pDepthStencilResolveAttachment->attachment == attach_index) {
+                sub_image_layout = resolve->pDepthStencilResolveAttachment->layout;
+                sub_image_found = true;
+            }
+        }
+        if (!sub_image_found) {
+            for (uint32_t j = 0; j < sub_desc.colorAttachmentCount; ++j) {
+                if (sub_desc.pColorAttachments && sub_desc.pColorAttachments[j].attachment == attach_index) {
+                    sub_image_layout = sub_desc.pColorAttachments[j].layout;
+                    sub_image_found = true;
+                    break;
+                }
+                if (!sub_image_found && sub_desc.pResolveAttachments &&
+                    sub_desc.pResolveAttachments[j].attachment == attach_index) {
+                    sub_image_layout = sub_desc.pResolveAttachments[j].layout;
+                    sub_image_found = true;
+                    break;
+                }
+            }
+        }
+        if (!sub_image_found) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
+                            rp_handle.handle, "VUID-vkCmdPipelineBarrier-image-02635",
+                            "%s: Barrier pImageMemoryBarriers[%d].%s is not referenced by the VkSubpassDescription for "
+                            "active subpass (%d) of current %s.",
+                            funcName, img_index, report_data->FormatHandle(img_bar_image).c_str(), active_subpass,
+                            report_data->FormatHandle(rp_handle).c_str());
+        }
+    } else {  // !image_match
+        auto const fb_handle = HandleToUint64(fb_state->framebuffer);
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT, fb_handle,
+                        "VUID-vkCmdPipelineBarrier-image-02635",
+                        "%s: Barrier pImageMemoryBarriers[%d].%s does not match an image from the current %s.", funcName, img_index,
+                        report_data->FormatHandle(img_bar_image).c_str(), report_data->FormatHandle(fb_state->framebuffer).c_str());
+    }
+    if (img_barrier.oldLayout != img_barrier.newLayout) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                        HandleToUint64(cb_state->commandBuffer), "VUID-vkCmdPipelineBarrier-oldLayout-01181",
+                        "%s: As the Image Barrier for %s is being executed within a render pass instance, oldLayout must "
+                        "equal newLayout yet they are %s and %s.",
+                        funcName, report_data->FormatHandle(img_barrier.image).c_str(), string_VkImageLayout(img_barrier.oldLayout),
+                        string_VkImageLayout(img_barrier.newLayout));
+    } else {
+        if (sub_image_found && sub_image_layout != img_barrier.oldLayout) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
+                            rp_handle.handle, "VUID-vkCmdPipelineBarrier-oldLayout-02636",
+                            "%s: Barrier pImageMemoryBarriers[%d].%s is referenced by the VkSubpassDescription for active "
+                            "subpass (%d) of current %s as having layout %s, but image barrier has layout %s.",
+                            funcName, img_index, report_data->FormatHandle(img_bar_image).c_str(), active_subpass,
+                            report_data->FormatHandle(rp_handle).c_str(), string_VkImageLayout(sub_image_layout),
+                            string_VkImageLayout(img_barrier.oldLayout));
+        }
+    }
+    return skip;
+}
+
+// Validate image barriers within a renderPass
+bool CoreChecks::ValidateRenderPassImageBarriers(const char *funcName, const CMD_BUFFER_STATE *cb_state, uint32_t active_subpass,
+                                                 const safe_VkSubpassDescription2KHR &sub_desc, const VulkanTypedHandle &rp_handle,
+                                                 const safe_VkSubpassDependency2KHR *dependencies,
+                                                 const std::vector<uint32_t> &self_dependencies, uint32_t image_mem_barrier_count,
+                                                 const VkImageMemoryBarrier *image_barriers) const {
+    bool skip = false;
+    for (uint32_t i = 0; i < image_mem_barrier_count; ++i) {
+        const auto &img_barrier = image_barriers[i];
+        const auto &img_src_access_mask = img_barrier.srcAccessMask;
+        const auto &img_dst_access_mask = img_barrier.dstAccessMask;
+        bool access_mask_match = false;
+        for (const auto self_dep_index : self_dependencies) {
+            const auto &sub_dep = dependencies[self_dep_index];
+            access_mask_match = (img_src_access_mask == (sub_dep.srcAccessMask & img_src_access_mask)) &&
+                                (img_dst_access_mask == (sub_dep.dstAccessMask & img_dst_access_mask));
+            if (access_mask_match) break;
+        }
+        if (!access_mask_match) {
+            std::stringstream self_dep_ss;
+            stream_join(self_dep_ss, ", ", self_dependencies);
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
+                            rp_handle.handle, "VUID-vkCmdPipelineBarrier-pDependencies-02285",
+                            "%s: Barrier pImageMemoryBarriers[%d].srcAccessMask(0x%X) is not a subset of VkSubpassDependency "
+                            "srcAccessMask of subpass %d of %s. Candidate VkSubpassDependency are pDependencies entries [%s].",
+                            funcName, i, img_src_access_mask, active_subpass, report_data->FormatHandle(rp_handle).c_str(),
+                            self_dep_ss.str().c_str());
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
+                            rp_handle.handle, "VUID-vkCmdPipelineBarrier-pDependencies-02285",
+                            "%s: Barrier pImageMemoryBarriers[%d].dstAccessMask(0x%X) is not a subset of VkSubpassDependency "
+                            "dstAccessMask of subpass %d of %s. Candidate VkSubpassDependency are pDependencies entries [%s].",
+                            funcName, i, img_dst_access_mask, active_subpass, report_data->FormatHandle(rp_handle).c_str(),
+                            self_dep_ss.str().c_str());
+        }
+        if (VK_QUEUE_FAMILY_IGNORED != img_barrier.srcQueueFamilyIndex ||
+            VK_QUEUE_FAMILY_IGNORED != img_barrier.dstQueueFamilyIndex) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
+                            rp_handle.handle, "VUID-vkCmdPipelineBarrier-srcQueueFamilyIndex-01182",
+                            "%s: Barrier pImageMemoryBarriers[%d].srcQueueFamilyIndex is %d and "
+                            "pImageMemoryBarriers[%d].dstQueueFamilyIndex is %d but both must be VK_QUEUE_FAMILY_IGNORED.",
+                            funcName, i, img_barrier.srcQueueFamilyIndex, i, img_barrier.dstQueueFamilyIndex);
+        }
+        // Secondary CBs can have null framebuffer so record will queue up validation in that case 'til FB is known
+        if (VK_NULL_HANDLE != cb_state->activeFramebuffer) {
+            skip |= ValidateImageBarrierAttachment(funcName, cb_state, cb_state->activeFramebuffer, active_subpass, sub_desc,
+                                                   rp_handle, i, img_barrier);
+        }
+    }
+    return skip;
+}
+
+// Validate VUs for Pipeline Barriers that are within a renderPass
+// Pre: cb_state->activeRenderPass must be a pointer to valid renderPass state
+bool CoreChecks::ValidateRenderPassPipelineBarriers(const char *funcName, const CMD_BUFFER_STATE *cb_state,
+                                                    VkPipelineStageFlags src_stage_mask, VkPipelineStageFlags dst_stage_mask,
+                                                    VkDependencyFlags dependency_flags, uint32_t mem_barrier_count,
+                                                    const VkMemoryBarrier *mem_barriers, uint32_t buffer_mem_barrier_count,
+                                                    const VkBufferMemoryBarrier *buffer_mem_barriers,
+                                                    uint32_t image_mem_barrier_count,
+                                                    const VkImageMemoryBarrier *image_barriers) const {
+    bool skip = false;
+    const auto rp_state = cb_state->activeRenderPass;
+    const auto active_subpass = cb_state->activeSubpass;
+    const VulkanTypedHandle rp_handle(rp_state->renderPass, kVulkanObjectTypeRenderPass);
+    const auto &self_dependencies = rp_state->self_dependencies[active_subpass];
+    const auto &dependencies = rp_state->createInfo.pDependencies;
+    if (self_dependencies.size() == 0) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT, rp_handle.handle,
+                        "VUID-vkCmdPipelineBarrier-pDependencies-02285",
+                        "%s: Barriers cannot be set during subpass %d of %s with no self-dependency specified.", funcName,
+                        active_subpass, report_data->FormatHandle(rp_handle).c_str());
+    } else {
+        // Grab ref to current subpassDescription up-front for use below
+        const auto &sub_desc = rp_state->createInfo.pSubpasses[active_subpass];
+        // Look for matching mask in any self-dependency
+        bool stage_mask_match = false;
+        for (const auto self_dep_index : self_dependencies) {
+            const auto &sub_dep = dependencies[self_dep_index];
+            const auto &sub_src_stage_mask = ExpandPipelineStageFlags(device_extensions, sub_dep.srcStageMask);
+            const auto &sub_dst_stage_mask = ExpandPipelineStageFlags(device_extensions, sub_dep.dstStageMask);
+            stage_mask_match = ((sub_src_stage_mask == VK_PIPELINE_STAGE_ALL_COMMANDS_BIT) ||
+                                (src_stage_mask == (sub_src_stage_mask & src_stage_mask))) &&
+                               ((sub_dst_stage_mask == VK_PIPELINE_STAGE_ALL_COMMANDS_BIT) ||
+                                (dst_stage_mask == (sub_dst_stage_mask & dst_stage_mask)));
+            if (stage_mask_match) break;
+        }
+        if (!stage_mask_match) {
+            std::stringstream self_dep_ss;
+            stream_join(self_dep_ss, ", ", self_dependencies);
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
+                            rp_handle.handle, "VUID-vkCmdPipelineBarrier-pDependencies-02285",
+                            "%s: Barrier srcStageMask(0x%X) is not a subset of VkSubpassDependency srcStageMask of any "
+                            "self-dependency of subpass %d of %s for which dstStageMask is also a subset. "
+                            "Candidate VkSubpassDependency are pDependencies entries [%s].",
+                            funcName, src_stage_mask, active_subpass, report_data->FormatHandle(rp_handle).c_str(),
+                            self_dep_ss.str().c_str());
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
+                            rp_handle.handle, "VUID-vkCmdPipelineBarrier-pDependencies-02285",
+                            "%s: Barrier dstStageMask(0x%X) is not a subset of VkSubpassDependency dstStageMask of any "
+                            "self-dependency of subpass %d of %s for which srcStageMask is also a subset. "
+                            "Candidate VkSubpassDependency are pDependencies entries [%s].",
+                            funcName, dst_stage_mask, active_subpass, report_data->FormatHandle(rp_handle).c_str(),
+                            self_dep_ss.str().c_str());
+        }
+
+        if (0 != buffer_mem_barrier_count) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
+                            rp_handle.handle, "VUID-vkCmdPipelineBarrier-bufferMemoryBarrierCount-01178",
+                            "%s: bufferMemoryBarrierCount is non-zero (%d) for subpass %d of %s.", funcName,
+                            buffer_mem_barrier_count, active_subpass, report_data->FormatHandle(rp_handle).c_str());
+        }
+        for (uint32_t i = 0; i < mem_barrier_count; ++i) {
+            const auto &mb_src_access_mask = mem_barriers[i].srcAccessMask;
+            const auto &mb_dst_access_mask = mem_barriers[i].dstAccessMask;
+            bool access_mask_match = false;
+            for (const auto self_dep_index : self_dependencies) {
+                const auto &sub_dep = dependencies[self_dep_index];
+                access_mask_match = (mb_src_access_mask == (sub_dep.srcAccessMask & mb_src_access_mask)) &&
+                                    (mb_dst_access_mask == (sub_dep.dstAccessMask & mb_dst_access_mask));
+                if (access_mask_match) break;
+            }
+
+            if (!access_mask_match) {
+                std::stringstream self_dep_ss;
+                stream_join(self_dep_ss, ", ", self_dependencies);
+                skip |= log_msg(
+                    report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT, rp_handle.handle,
+                    "VUID-vkCmdPipelineBarrier-pDependencies-02285",
+                    "%s: Barrier pMemoryBarriers[%d].srcAccessMask(0x%X) is not a subset of VkSubpassDependency srcAccessMask "
+                    "for any self-dependency of subpass %d of %s for which dstAccessMask is also a subset. "
+                    "Candidate VkSubpassDependency are pDependencies entries [%s].",
+                    funcName, i, mb_src_access_mask, active_subpass, report_data->FormatHandle(rp_handle).c_str(),
+                    self_dep_ss.str().c_str());
+                skip |= log_msg(
+                    report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT, rp_handle.handle,
+                    "VUID-vkCmdPipelineBarrier-pDependencies-02285",
+                    "%s: Barrier pMemoryBarriers[%d].dstAccessMask(0x%X) is not a subset of VkSubpassDependency dstAccessMask "
+                    "for any self-dependency of subpass %d of %s for which srcAccessMask is also a subset. "
+                    "Candidate VkSubpassDependency are pDependencies entries [%s].",
+                    funcName, i, mb_dst_access_mask, active_subpass, report_data->FormatHandle(rp_handle).c_str(),
+                    self_dep_ss.str().c_str());
+            }
+        }
+
+        skip |= ValidateRenderPassImageBarriers(funcName, cb_state, active_subpass, sub_desc, rp_handle, dependencies,
+                                                self_dependencies, image_mem_barrier_count, image_barriers);
+
+        bool flag_match = false;
+        for (const auto self_dep_index : self_dependencies) {
+            const auto &sub_dep = dependencies[self_dep_index];
+            flag_match = sub_dep.dependencyFlags == dependency_flags;
+            if (flag_match) break;
+        }
+        if (!flag_match) {
+            std::stringstream self_dep_ss;
+            stream_join(self_dep_ss, ", ", self_dependencies);
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
+                            rp_handle.handle, "VUID-vkCmdPipelineBarrier-pDependencies-02285",
+                            "%s: dependencyFlags param (0x%X) does not equal VkSubpassDependency dependencyFlags value for any "
+                            "self-dependency of subpass %d of %s. Candidate VkSubpassDependency are pDependencies entries [%s].",
+                            funcName, dependency_flags, cb_state->activeSubpass, report_data->FormatHandle(rp_handle).c_str(),
+                            self_dep_ss.str().c_str());
+        }
+    }
+    return skip;
+}
+
+// Array to mask individual accessMask to corresponding stageMask
+//  accessMask active bit position (0-31) maps to index
+const static VkPipelineStageFlags AccessMaskToPipeStage[28] = {
+    // VK_ACCESS_INDIRECT_COMMAND_READ_BIT = 0
+    VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT,
+    // VK_ACCESS_INDEX_READ_BIT = 1
+    VK_PIPELINE_STAGE_VERTEX_INPUT_BIT,
+    // VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT = 2
+    VK_PIPELINE_STAGE_VERTEX_INPUT_BIT,
+    // VK_ACCESS_UNIFORM_READ_BIT = 3
+    VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT |
+        VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT | VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT |
+        VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT | VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV |
+        VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV | VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_NV,
+    // VK_ACCESS_INPUT_ATTACHMENT_READ_BIT = 4
+    VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT,
+    // VK_ACCESS_SHADER_READ_BIT = 5
+    VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT |
+        VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT | VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT |
+        VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT | VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV |
+        VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV | VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_NV,
+    // VK_ACCESS_SHADER_WRITE_BIT = 6
+    VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT |
+        VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT | VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT |
+        VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT | VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV |
+        VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV | VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_NV,
+    // VK_ACCESS_COLOR_ATTACHMENT_READ_BIT = 7
+    VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
+    // VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT = 8
+    VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
+    // VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT = 9
+    VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT | VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT,
+    // VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT = 10
+    VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT | VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT,
+    // VK_ACCESS_TRANSFER_READ_BIT = 11
+    VK_PIPELINE_STAGE_TRANSFER_BIT,
+    // VK_ACCESS_TRANSFER_WRITE_BIT = 12
+    VK_PIPELINE_STAGE_TRANSFER_BIT,
+    // VK_ACCESS_HOST_READ_BIT = 13
+    VK_PIPELINE_STAGE_HOST_BIT,
+    // VK_ACCESS_HOST_WRITE_BIT = 14
+    VK_PIPELINE_STAGE_HOST_BIT,
+    // VK_ACCESS_MEMORY_READ_BIT = 15
+    VK_ACCESS_FLAG_BITS_MAX_ENUM,  // Always match
+    // VK_ACCESS_MEMORY_WRITE_BIT = 16
+    VK_ACCESS_FLAG_BITS_MAX_ENUM,  // Always match
+    // VK_ACCESS_COMMAND_PROCESS_READ_BIT_NVX = 17
+    VK_PIPELINE_STAGE_COMMAND_PROCESS_BIT_NVX,
+    // VK_ACCESS_COMMAND_PROCESS_WRITE_BIT_NVX = 18
+    VK_PIPELINE_STAGE_COMMAND_PROCESS_BIT_NVX,
+    // VK_ACCESS_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT = 19
+    VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
+    // VK_ACCESS_CONDITIONAL_RENDERING_READ_BIT_EXT = 20
+    VK_PIPELINE_STAGE_CONDITIONAL_RENDERING_BIT_EXT,
+    // VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_NV = 21
+    VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_NV | VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_NV,
+    // VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_NV = 22
+    VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_NV,
+    // VK_ACCESS_SHADING_RATE_IMAGE_READ_BIT_NV = 23
+    VK_PIPELINE_STAGE_SHADING_RATE_IMAGE_BIT_NV,
+    // VK_ACCESS_FRAGMENT_DENSITY_MAP_READ_BIT_EXT = 24
+    VK_PIPELINE_STAGE_FRAGMENT_DENSITY_PROCESS_BIT_EXT,
+    // VK_ACCESS_TRANSFORM_FEEDBACK_WRITE_BIT_EXT = 25
+    VK_PIPELINE_STAGE_TRANSFORM_FEEDBACK_BIT_EXT,
+    // VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_READ_BIT_EXT = 26
+    VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT,
+    // VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_WRITE_BIT_EXT = 27
+    VK_PIPELINE_STAGE_TRANSFORM_FEEDBACK_BIT_EXT,
+};
+
+// Verify that all bits of access_mask are supported by the src_stage_mask
+static bool ValidateAccessMaskPipelineStage(const DeviceExtensions &extensions, VkAccessFlags access_mask,
+                                            VkPipelineStageFlags stage_mask) {
+    // Early out if all commands set, or access_mask NULL
+    if ((stage_mask & VK_PIPELINE_STAGE_ALL_COMMANDS_BIT) || (0 == access_mask)) return true;
+
+    stage_mask = ExpandPipelineStageFlags(extensions, stage_mask);
+    int index = 0;
+    // for each of the set bits in access_mask, make sure that supporting stage mask bit(s) are set
+    while (access_mask) {
+        index = (u_ffs(access_mask) - 1);
+        assert(index >= 0);
+        // Must have "!= 0" compare to prevent warning from MSVC
+        if ((AccessMaskToPipeStage[index] & stage_mask) == 0) return false;  // early out
+        access_mask &= ~(1 << index);                                        // Mask off bit that's been checked
+    }
+    return true;
+}
+
+namespace barrier_queue_families {
+enum VuIndex {
+    kSrcOrDstMustBeIgnore,
+    kSpecialOrIgnoreOnly,
+    kSrcIgnoreRequiresDstIgnore,
+    kDstValidOrSpecialIfNotIgnore,
+    kSrcValidOrSpecialIfNotIgnore,
+    kSrcAndDestMustBeIgnore,
+    kBothIgnoreOrBothValid,
+    kSubmitQueueMustMatchSrcOrDst
+};
+static const char *vu_summary[] = {"Source or destination queue family must be ignored.",
+                                   "Source or destination queue family must be special or ignored.",
+                                   "Destination queue family must be ignored if source queue family is.",
+                                   "Destination queue family must be valid, ignored, or special.",
+                                   "Source queue family must be valid, ignored, or special.",
+                                   "Source and destination queue family must both be ignored.",
+                                   "Source and destination queue family must both be ignore or both valid.",
+                                   "Source or destination queue family must match submit queue family, if not ignored."};
+
+static const std::string image_error_codes[] = {
+    "VUID-VkImageMemoryBarrier-image-01381",  //   kSrcOrDstMustBeIgnore
+    "VUID-VkImageMemoryBarrier-image-01766",  //   kSpecialOrIgnoreOnly
+    "VUID-VkImageMemoryBarrier-image-01201",  //   kSrcIgnoreRequiresDstIgnore
+    "VUID-VkImageMemoryBarrier-image-01768",  //   kDstValidOrSpecialIfNotIgnore
+    "VUID-VkImageMemoryBarrier-image-01767",  //   kSrcValidOrSpecialIfNotIgnore
+    "VUID-VkImageMemoryBarrier-image-01199",  //   kSrcAndDestMustBeIgnore
+    "VUID-VkImageMemoryBarrier-image-01200",  //   kBothIgnoreOrBothValid
+    "VUID-VkImageMemoryBarrier-image-01205",  //   kSubmitQueueMustMatchSrcOrDst
+};
+
+static const std::string buffer_error_codes[] = {
+    "VUID-VkBufferMemoryBarrier-buffer-01191",  //  kSrcOrDstMustBeIgnore
+    "VUID-VkBufferMemoryBarrier-buffer-01763",  //  kSpecialOrIgnoreOnly
+    "VUID-VkBufferMemoryBarrier-buffer-01193",  //  kSrcIgnoreRequiresDstIgnore
+    "VUID-VkBufferMemoryBarrier-buffer-01765",  //  kDstValidOrSpecialIfNotIgnore
+    "VUID-VkBufferMemoryBarrier-buffer-01764",  //  kSrcValidOrSpecialIfNotIgnore
+    "VUID-VkBufferMemoryBarrier-buffer-01190",  //  kSrcAndDestMustBeIgnore
+    "VUID-VkBufferMemoryBarrier-buffer-01192",  //  kBothIgnoreOrBothValid
+    "VUID-VkBufferMemoryBarrier-buffer-01196",  //  kSubmitQueueMustMatchSrcOrDst
+};
+
+class ValidatorState {
+  public:
+    ValidatorState(const ValidationStateTracker *device_data, const char *func_name, const CMD_BUFFER_STATE *cb_state,
+                   const VulkanTypedHandle &barrier_handle, const VkSharingMode sharing_mode)
+        : report_data_(device_data->report_data),
+          func_name_(func_name),
+          cb_handle64_(HandleToUint64(cb_state->commandBuffer)),
+          barrier_handle_(barrier_handle),
+          sharing_mode_(sharing_mode),
+          val_codes_(barrier_handle.type == kVulkanObjectTypeImage ? image_error_codes : buffer_error_codes),
+          limit_(static_cast<uint32_t>(device_data->physical_device_state->queue_family_properties.size())),
+          mem_ext_(IsExtEnabled(device_data->device_extensions.vk_khr_external_memory)) {}
+
+    // Log the messages using boilerplate from object state, and Vu specific information from the template arg
+    // One and two family versions, in the single family version, Vu holds the name of the passed parameter
+    bool LogMsg(VuIndex vu_index, uint32_t family, const char *param_name) const {
+        const std::string &val_code = val_codes_[vu_index];
+        const char *annotation = GetFamilyAnnotation(family);
+        return log_msg(report_data_, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, cb_handle64_,
+                       val_code, "%s: Barrier using %s %s created with sharingMode %s, has %s %u%s. %s", func_name_,
+                       GetTypeString(), report_data_->FormatHandle(barrier_handle_).c_str(), GetModeString(), param_name, family,
+                       annotation, vu_summary[vu_index]);
+    }
+
+    bool LogMsg(VuIndex vu_index, uint32_t src_family, uint32_t dst_family) const {
+        const std::string &val_code = val_codes_[vu_index];
+        const char *src_annotation = GetFamilyAnnotation(src_family);
+        const char *dst_annotation = GetFamilyAnnotation(dst_family);
+        return log_msg(
+            report_data_, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, cb_handle64_, val_code,
+            "%s: Barrier using %s %s created with sharingMode %s, has srcQueueFamilyIndex %u%s and dstQueueFamilyIndex %u%s. %s",
+            func_name_, GetTypeString(), report_data_->FormatHandle(barrier_handle_).c_str(), GetModeString(), src_family,
+            src_annotation, dst_family, dst_annotation, vu_summary[vu_index]);
+    }
+
+    // This abstract Vu can only be tested at submit time, thus we need a callback from the closure containing the needed
+    // data. Note that the mem_barrier is copied to the closure as the lambda lifespan exceed the guarantees of validity for
+    // application input.
+    static bool ValidateAtQueueSubmit(const QUEUE_STATE *queue_state, const ValidationStateTracker *device_data,
+                                      uint32_t src_family, uint32_t dst_family, const ValidatorState &val) {
+        uint32_t queue_family = queue_state->queueFamilyIndex;
+        if ((src_family != queue_family) && (dst_family != queue_family)) {
+            const std::string &val_code = val.val_codes_[kSubmitQueueMustMatchSrcOrDst];
+            const char *src_annotation = val.GetFamilyAnnotation(src_family);
+            const char *dst_annotation = val.GetFamilyAnnotation(dst_family);
+            return log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT,
+                           HandleToUint64(queue_state->queue), val_code,
+                           "%s: Barrier submitted to queue with family index %u, using %s %s created with sharingMode %s, has "
+                           "srcQueueFamilyIndex %u%s and dstQueueFamilyIndex %u%s. %s",
+                           "vkQueueSubmit", queue_family, val.GetTypeString(),
+                           device_data->report_data->FormatHandle(val.barrier_handle_).c_str(), val.GetModeString(), src_family,
+                           src_annotation, dst_family, dst_annotation, vu_summary[kSubmitQueueMustMatchSrcOrDst]);
+        }
+        return false;
+    }
+    // Logical helpers for semantic clarity
+    inline bool KhrExternalMem() const { return mem_ext_; }
+    inline bool IsValid(uint32_t queue_family) const { return (queue_family < limit_); }
+    inline bool IsValidOrSpecial(uint32_t queue_family) const {
+        return IsValid(queue_family) || (mem_ext_ && QueueFamilyIsSpecial(queue_family));
+    }
+
+    // Helpers for LogMsg (and log_msg)
+    const char *GetModeString() const { return string_VkSharingMode(sharing_mode_); }
+
+    // Descriptive text for the various types of queue family index
+    const char *GetFamilyAnnotation(uint32_t family) const {
+        const char *external = " (VK_QUEUE_FAMILY_EXTERNAL_KHR)";
+        const char *foreign = " (VK_QUEUE_FAMILY_FOREIGN_EXT)";
+        const char *ignored = " (VK_QUEUE_FAMILY_IGNORED)";
+        const char *valid = " (VALID)";
+        const char *invalid = " (INVALID)";
+        switch (family) {
+            case VK_QUEUE_FAMILY_EXTERNAL_KHR:
+                return external;
+            case VK_QUEUE_FAMILY_FOREIGN_EXT:
+                return foreign;
+            case VK_QUEUE_FAMILY_IGNORED:
+                return ignored;
+            default:
+                if (IsValid(family)) {
+                    return valid;
+                }
+                return invalid;
+        };
+    }
+    const char *GetTypeString() const { return object_string[barrier_handle_.type]; }
+    VkSharingMode GetSharingMode() const { return sharing_mode_; }
+
+  protected:
+    const debug_report_data *const report_data_;
+    const char *const func_name_;
+    const uint64_t cb_handle64_;
+    const VulkanTypedHandle barrier_handle_;
+    const VkSharingMode sharing_mode_;
+    const std::string *val_codes_;
+    const uint32_t limit_;
+    const bool mem_ext_;
+};
+
+bool Validate(const CoreChecks *device_data, const char *func_name, const CMD_BUFFER_STATE *cb_state, const ValidatorState &val,
+              const uint32_t src_queue_family, const uint32_t dst_queue_family) {
+    bool skip = false;
+
+    const bool mode_concurrent = val.GetSharingMode() == VK_SHARING_MODE_CONCURRENT;
+    const bool src_ignored = QueueFamilyIsIgnored(src_queue_family);
+    const bool dst_ignored = QueueFamilyIsIgnored(dst_queue_family);
+    if (val.KhrExternalMem()) {
+        if (mode_concurrent) {
+            if (!(src_ignored || dst_ignored)) {
+                skip |= val.LogMsg(kSrcOrDstMustBeIgnore, src_queue_family, dst_queue_family);
+            }
+            if ((src_ignored && !(dst_ignored || QueueFamilyIsSpecial(dst_queue_family))) ||
+                (dst_ignored && !(src_ignored || QueueFamilyIsSpecial(src_queue_family)))) {
+                skip |= val.LogMsg(kSpecialOrIgnoreOnly, src_queue_family, dst_queue_family);
+            }
+        } else {
+            // VK_SHARING_MODE_EXCLUSIVE
+            if (src_ignored && !dst_ignored) {
+                skip |= val.LogMsg(kSrcIgnoreRequiresDstIgnore, src_queue_family, dst_queue_family);
+            }
+            if (!dst_ignored && !val.IsValidOrSpecial(dst_queue_family)) {
+                skip |= val.LogMsg(kDstValidOrSpecialIfNotIgnore, dst_queue_family, "dstQueueFamilyIndex");
+            }
+            if (!src_ignored && !val.IsValidOrSpecial(src_queue_family)) {
+                skip |= val.LogMsg(kSrcValidOrSpecialIfNotIgnore, src_queue_family, "srcQueueFamilyIndex");
+            }
+        }
+    } else {
+        // No memory extension
+        if (mode_concurrent) {
+            if (!src_ignored || !dst_ignored) {
+                skip |= val.LogMsg(kSrcAndDestMustBeIgnore, src_queue_family, dst_queue_family);
+            }
+        } else {
+            // VK_SHARING_MODE_EXCLUSIVE
+            if (!((src_ignored && dst_ignored) || (val.IsValid(src_queue_family) && val.IsValid(dst_queue_family)))) {
+                skip |= val.LogMsg(kBothIgnoreOrBothValid, src_queue_family, dst_queue_family);
+            }
+        }
+    }
+    return skip;
+}
+}  // namespace barrier_queue_families
+
+bool CoreChecks::ValidateConcurrentBarrierAtSubmit(const ValidationStateTracker *state_data, const QUEUE_STATE *queue_state,
+                                                   const char *func_name, const CMD_BUFFER_STATE *cb_state,
+                                                   const VulkanTypedHandle &typed_handle, uint32_t src_queue_family,
+                                                   uint32_t dst_queue_family) {
+    using barrier_queue_families::ValidatorState;
+    ValidatorState val(state_data, func_name, cb_state, typed_handle, VK_SHARING_MODE_CONCURRENT);
+    return ValidatorState::ValidateAtQueueSubmit(queue_state, state_data, src_queue_family, dst_queue_family, val);
+}
+
+// Type specific wrapper for image barriers
+bool CoreChecks::ValidateBarrierQueueFamilies(const char *func_name, const CMD_BUFFER_STATE *cb_state,
+                                              const VkImageMemoryBarrier &barrier, const IMAGE_STATE *state_data) const {
+    // State data is required
+    if (!state_data) {
+        return false;
+    }
+
+    // Create the validator state from the image state
+    barrier_queue_families::ValidatorState val(this, func_name, cb_state, VulkanTypedHandle(barrier.image, kVulkanObjectTypeImage),
+                                               state_data->createInfo.sharingMode);
+    const uint32_t src_queue_family = barrier.srcQueueFamilyIndex;
+    const uint32_t dst_queue_family = barrier.dstQueueFamilyIndex;
+    return barrier_queue_families::Validate(this, func_name, cb_state, val, src_queue_family, dst_queue_family);
+}
+
+// Type specific wrapper for buffer barriers
+bool CoreChecks::ValidateBarrierQueueFamilies(const char *func_name, const CMD_BUFFER_STATE *cb_state,
+                                              const VkBufferMemoryBarrier &barrier, const BUFFER_STATE *state_data) const {
+    // State data is required
+    if (!state_data) {
+        return false;
+    }
+
+    // Create the validator state from the buffer state
+    barrier_queue_families::ValidatorState val(
+        this, func_name, cb_state, VulkanTypedHandle(barrier.buffer, kVulkanObjectTypeBuffer), state_data->createInfo.sharingMode);
+    const uint32_t src_queue_family = barrier.srcQueueFamilyIndex;
+    const uint32_t dst_queue_family = barrier.dstQueueFamilyIndex;
+    return barrier_queue_families::Validate(this, func_name, cb_state, val, src_queue_family, dst_queue_family);
+}
+
+bool CoreChecks::ValidateBarriers(const char *funcName, const CMD_BUFFER_STATE *cb_state, VkPipelineStageFlags src_stage_mask,
+                                  VkPipelineStageFlags dst_stage_mask, uint32_t memBarrierCount,
+                                  const VkMemoryBarrier *pMemBarriers, uint32_t bufferBarrierCount,
+                                  const VkBufferMemoryBarrier *pBufferMemBarriers, uint32_t imageMemBarrierCount,
+                                  const VkImageMemoryBarrier *pImageMemBarriers) const {
+    bool skip = false;
+    for (uint32_t i = 0; i < memBarrierCount; ++i) {
+        const auto &mem_barrier = pMemBarriers[i];
+        if (!ValidateAccessMaskPipelineStage(device_extensions, mem_barrier.srcAccessMask, src_stage_mask)) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                            HandleToUint64(cb_state->commandBuffer), "VUID-vkCmdPipelineBarrier-srcAccessMask-02815",
+                            "%s: pMemBarriers[%d].srcAccessMask (0x%X) is not supported by srcStageMask (0x%X).", funcName, i,
+                            mem_barrier.srcAccessMask, src_stage_mask);
+        }
+        if (!ValidateAccessMaskPipelineStage(device_extensions, mem_barrier.dstAccessMask, dst_stage_mask)) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                            HandleToUint64(cb_state->commandBuffer), "VUID-vkCmdPipelineBarrier-dstAccessMask-02816",
+                            "%s: pMemBarriers[%d].dstAccessMask (0x%X) is not supported by dstStageMask (0x%X).", funcName, i,
+                            mem_barrier.dstAccessMask, dst_stage_mask);
+        }
+    }
+    for (uint32_t i = 0; i < imageMemBarrierCount; ++i) {
+        const auto &mem_barrier = pImageMemBarriers[i];
+        if (!ValidateAccessMaskPipelineStage(device_extensions, mem_barrier.srcAccessMask, src_stage_mask)) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                            HandleToUint64(cb_state->commandBuffer), "VUID-vkCmdPipelineBarrier-srcAccessMask-02815",
+                            "%s: pImageMemBarriers[%d].srcAccessMask (0x%X) is not supported by srcStageMask (0x%X).", funcName, i,
+                            mem_barrier.srcAccessMask, src_stage_mask);
+        }
+        if (!ValidateAccessMaskPipelineStage(device_extensions, mem_barrier.dstAccessMask, dst_stage_mask)) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                            HandleToUint64(cb_state->commandBuffer), "VUID-vkCmdPipelineBarrier-dstAccessMask-02816",
+                            "%s: pImageMemBarriers[%d].dstAccessMask (0x%X) is not supported by dstStageMask (0x%X).", funcName, i,
+                            mem_barrier.dstAccessMask, dst_stage_mask);
+        }
+
+        auto image_data = GetImageState(mem_barrier.image);
+        skip |= ValidateBarrierQueueFamilies(funcName, cb_state, mem_barrier, image_data);
+
+        if (mem_barrier.newLayout == VK_IMAGE_LAYOUT_UNDEFINED || mem_barrier.newLayout == VK_IMAGE_LAYOUT_PREINITIALIZED) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                            HandleToUint64(cb_state->commandBuffer), "VUID-VkImageMemoryBarrier-newLayout-01198",
+                            "%s: Image Layout cannot be transitioned to UNDEFINED or PREINITIALIZED.", funcName);
+        }
+
+        if (image_data) {
+            // There is no VUID for this, but there is blanket text:
+            //     "Non-sparse resources must be bound completely and contiguously to a single VkDeviceMemory object before
+            //     recording commands in a command buffer."
+            // TODO: Update this when VUID is defined
+            skip |= ValidateMemoryIsBoundToImage(image_data, funcName, kVUIDUndefined);
+
+            const auto aspect_mask = mem_barrier.subresourceRange.aspectMask;
+            skip |= ValidateImageAspectMask(image_data->image, image_data->createInfo.format, aspect_mask, funcName);
+
+            const std::string param_name = "pImageMemoryBarriers[" + std::to_string(i) + "].subresourceRange";
+            skip |= ValidateImageBarrierSubresourceRange(image_data, mem_barrier.subresourceRange, funcName, param_name.c_str());
+        }
+    }
+
+    for (uint32_t i = 0; i < bufferBarrierCount; ++i) {
+        const auto &mem_barrier = pBufferMemBarriers[i];
+
+        if (!ValidateAccessMaskPipelineStage(device_extensions, mem_barrier.srcAccessMask, src_stage_mask)) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                            HandleToUint64(cb_state->commandBuffer), "VUID-vkCmdPipelineBarrier-srcAccessMask-02815",
+                            "%s: pBufferMemBarriers[%d].srcAccessMask (0x%X) is not supported by srcStageMask (0x%X).", funcName, i,
+                            mem_barrier.srcAccessMask, src_stage_mask);
+        }
+        if (!ValidateAccessMaskPipelineStage(device_extensions, mem_barrier.dstAccessMask, dst_stage_mask)) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                            HandleToUint64(cb_state->commandBuffer), "VUID-vkCmdPipelineBarrier-dstAccessMask-02816",
+                            "%s: pBufferMemBarriers[%d].dstAccessMask (0x%X) is not supported by dstStageMask (0x%X).", funcName, i,
+                            mem_barrier.dstAccessMask, dst_stage_mask);
+        }
+        // Validate buffer barrier queue family indices
+        auto buffer_state = GetBufferState(mem_barrier.buffer);
+        skip |= ValidateBarrierQueueFamilies(funcName, cb_state, mem_barrier, buffer_state);
+
+        if (buffer_state) {
+            // There is no VUID for this, but there is blanket text:
+            //     "Non-sparse resources must be bound completely and contiguously to a single VkDeviceMemory object before
+            //     recording commands in a command buffer"
+            // TODO: Update this when VUID is defined
+            skip |= ValidateMemoryIsBoundToBuffer(buffer_state, funcName, kVUIDUndefined);
+
+            auto buffer_size = buffer_state->createInfo.size;
+            if (mem_barrier.offset >= buffer_size) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                                HandleToUint64(cb_state->commandBuffer), "VUID-VkBufferMemoryBarrier-offset-01187",
+                                "%s: Buffer Barrier %s has offset 0x%" PRIx64 " which is not less than total size 0x%" PRIx64 ".",
+                                funcName, report_data->FormatHandle(mem_barrier.buffer).c_str(), HandleToUint64(mem_barrier.offset),
+                                HandleToUint64(buffer_size));
+            } else if (mem_barrier.size != VK_WHOLE_SIZE && (mem_barrier.offset + mem_barrier.size > buffer_size)) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                                HandleToUint64(cb_state->commandBuffer), "VUID-VkBufferMemoryBarrier-size-01189",
+                                "%s: Buffer Barrier %s has offset 0x%" PRIx64 " and size 0x%" PRIx64
+                                " whose sum is greater than total size 0x%" PRIx64 ".",
+                                funcName, report_data->FormatHandle(mem_barrier.buffer).c_str(), HandleToUint64(mem_barrier.offset),
+                                HandleToUint64(mem_barrier.size), HandleToUint64(buffer_size));
+            }
+        }
+    }
+
+    skip |= ValidateBarriersQFOTransferUniqueness(funcName, cb_state, bufferBarrierCount, pBufferMemBarriers, imageMemBarrierCount,
+                                                  pImageMemBarriers);
+
+    return skip;
+}
+
+bool CoreChecks::ValidateEventStageMask(const ValidationStateTracker *state_data, const CMD_BUFFER_STATE *pCB, size_t eventCount,
+                                        size_t firstEventIndex, VkPipelineStageFlags sourceStageMask,
+                                        EventToStageMap *localEventToStageMap) {
+    bool skip = false;
+    VkPipelineStageFlags stageMask = 0;
+    const auto max_event = std::min((firstEventIndex + eventCount), pCB->events.size());
+    for (size_t event_index = firstEventIndex; event_index < max_event; ++event_index) {
+        auto event = pCB->events[event_index];
+        auto event_data = localEventToStageMap->find(event);
+        if (event_data != localEventToStageMap->end()) {
+            stageMask |= event_data->second;
+        } else {
+            auto global_event_data = state_data->GetEventState(event);
+            if (!global_event_data) {
+                skip |= log_msg(state_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT,
+                                HandleToUint64(event), kVUID_Core_DrawState_InvalidEvent,
+                                "%s cannot be waited on if it has never been set.",
+                                state_data->report_data->FormatHandle(event).c_str());
+            } else {
+                stageMask |= global_event_data->stageMask;
+            }
+        }
+    }
+    // TODO: Need to validate that host_bit is only set if set event is called
+    // but set event can be called at any time.
+    if (sourceStageMask != stageMask && sourceStageMask != (stageMask | VK_PIPELINE_STAGE_HOST_BIT)) {
+        skip |= log_msg(state_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                        HandleToUint64(pCB->commandBuffer), "VUID-vkCmdWaitEvents-srcStageMask-parameter",
+                        "Submitting cmdbuffer with call to VkCmdWaitEvents using srcStageMask 0x%X which must be the bitwise OR of "
+                        "the stageMask parameters used in calls to vkCmdSetEvent and VK_PIPELINE_STAGE_HOST_BIT if used with "
+                        "vkSetEvent but instead is 0x%X.",
+                        sourceStageMask, stageMask);
+    }
+    return skip;
+}
+
+// Note that we only check bits that HAVE required queueflags -- don't care entries are skipped
+static std::unordered_map<VkPipelineStageFlags, VkQueueFlags> supported_pipeline_stages_table = {
+    {VK_PIPELINE_STAGE_COMMAND_PROCESS_BIT_NVX, VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT},
+    {VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT, VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT},
+    {VK_PIPELINE_STAGE_VERTEX_INPUT_BIT, VK_QUEUE_GRAPHICS_BIT},
+    {VK_PIPELINE_STAGE_VERTEX_SHADER_BIT, VK_QUEUE_GRAPHICS_BIT},
+    {VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT, VK_QUEUE_GRAPHICS_BIT},
+    {VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT, VK_QUEUE_GRAPHICS_BIT},
+    {VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT, VK_QUEUE_GRAPHICS_BIT},
+    {VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, VK_QUEUE_GRAPHICS_BIT},
+    {VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT, VK_QUEUE_GRAPHICS_BIT},
+    {VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT, VK_QUEUE_GRAPHICS_BIT},
+    {VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, VK_QUEUE_GRAPHICS_BIT},
+    {VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, VK_QUEUE_COMPUTE_BIT},
+    {VK_PIPELINE_STAGE_TRANSFER_BIT, VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT | VK_QUEUE_TRANSFER_BIT},
+    {VK_PIPELINE_STAGE_CONDITIONAL_RENDERING_BIT_EXT, VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT},
+    {VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT, VK_QUEUE_GRAPHICS_BIT}};
+
+static const VkPipelineStageFlags stage_flag_bit_array[] = {VK_PIPELINE_STAGE_COMMAND_PROCESS_BIT_NVX,
+                                                            VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT,
+                                                            VK_PIPELINE_STAGE_VERTEX_INPUT_BIT,
+                                                            VK_PIPELINE_STAGE_VERTEX_SHADER_BIT,
+                                                            VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT,
+                                                            VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT,
+                                                            VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT,
+                                                            VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT,
+                                                            VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT,
+                                                            VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT,
+                                                            VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
+                                                            VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT,
+                                                            VK_PIPELINE_STAGE_TRANSFER_BIT,
+                                                            VK_PIPELINE_STAGE_CONDITIONAL_RENDERING_BIT_EXT,
+                                                            VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT};
+
+bool CoreChecks::CheckStageMaskQueueCompatibility(VkCommandBuffer command_buffer, VkPipelineStageFlags stage_mask,
+                                                  VkQueueFlags queue_flags, const char *function, const char *src_or_dest,
+                                                  const char *error_code) const {
+    bool skip = false;
+    // Lookup each bit in the stagemask and check for overlap between its table bits and queue_flags
+    for (const auto &item : stage_flag_bit_array) {
+        if (stage_mask & item) {
+            if ((supported_pipeline_stages_table[item] & queue_flags) == 0) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                                HandleToUint64(command_buffer), error_code,
+                                "%s(): %s flag %s is not compatible with the queue family properties of this command buffer.",
+                                function, src_or_dest, string_VkPipelineStageFlagBits(static_cast<VkPipelineStageFlagBits>(item)));
+            }
+        }
+    }
+    return skip;
+}
+
+// Check if all barriers are of a given operation type.
+template <typename Barrier, typename OpCheck>
+bool AllTransferOp(const COMMAND_POOL_STATE *pool, OpCheck &op_check, uint32_t count, const Barrier *barriers) {
+    if (!pool) return false;
+
+    for (uint32_t b = 0; b < count; b++) {
+        if (!op_check(pool, barriers + b)) return false;
+    }
+    return true;
+}
+
+// Look at the barriers to see if we they are all release or all acquire, the result impacts queue properties validation
+BarrierOperationsType CoreChecks::ComputeBarrierOperationsType(const CMD_BUFFER_STATE *cb_state, uint32_t buffer_barrier_count,
+                                                               const VkBufferMemoryBarrier *buffer_barriers,
+                                                               uint32_t image_barrier_count,
+                                                               const VkImageMemoryBarrier *image_barriers) const {
+    auto pool = cb_state->command_pool.get();
+    BarrierOperationsType op_type = kGeneral;
+
+    // Look at the barrier details only if they exist
+    // Note: AllTransferOp returns true for count == 0
+    if ((buffer_barrier_count + image_barrier_count) != 0) {
+        if (AllTransferOp(pool, TempIsReleaseOp<VkBufferMemoryBarrier>, buffer_barrier_count, buffer_barriers) &&
+            AllTransferOp(pool, TempIsReleaseOp<VkImageMemoryBarrier>, image_barrier_count, image_barriers)) {
+            op_type = kAllRelease;
+        } else if (AllTransferOp(pool, IsAcquireOp<VkBufferMemoryBarrier>, buffer_barrier_count, buffer_barriers) &&
+                   AllTransferOp(pool, IsAcquireOp<VkImageMemoryBarrier>, image_barrier_count, image_barriers)) {
+            op_type = kAllAcquire;
+        }
+    }
+
+    return op_type;
+}
+
+bool CoreChecks::ValidateStageMasksAgainstQueueCapabilities(const CMD_BUFFER_STATE *cb_state,
+                                                            VkPipelineStageFlags source_stage_mask,
+                                                            VkPipelineStageFlags dest_stage_mask,
+                                                            BarrierOperationsType barrier_op_type, const char *function,
+                                                            const char *error_code) const {
+    bool skip = false;
+    uint32_t queue_family_index = cb_state->command_pool->queueFamilyIndex;
+    auto physical_device_state = GetPhysicalDeviceState();
+
+    // Any pipeline stage included in srcStageMask or dstStageMask must be supported by the capabilities of the queue family
+    // specified by the queueFamilyIndex member of the VkCommandPoolCreateInfo structure that was used to create the VkCommandPool
+    // that commandBuffer was allocated from, as specified in the table of supported pipeline stages.
+
+    if (queue_family_index < physical_device_state->queue_family_properties.size()) {
+        VkQueueFlags specified_queue_flags = physical_device_state->queue_family_properties[queue_family_index].queueFlags;
+
+        // Only check the source stage mask if any barriers aren't "acquire ownership"
+        if ((barrier_op_type != kAllAcquire) && (source_stage_mask & VK_PIPELINE_STAGE_ALL_COMMANDS_BIT) == 0) {
+            skip |= CheckStageMaskQueueCompatibility(cb_state->commandBuffer, source_stage_mask, specified_queue_flags, function,
+                                                     "srcStageMask", error_code);
+        }
+        // Only check the dest stage mask if any barriers aren't "release ownership"
+        if ((barrier_op_type != kAllRelease) && (dest_stage_mask & VK_PIPELINE_STAGE_ALL_COMMANDS_BIT) == 0) {
+            skip |= CheckStageMaskQueueCompatibility(cb_state->commandBuffer, dest_stage_mask, specified_queue_flags, function,
+                                                     "dstStageMask", error_code);
+        }
+    }
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateCmdWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent *pEvents,
+                                              VkPipelineStageFlags sourceStageMask, VkPipelineStageFlags dstStageMask,
+                                              uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
+                                              uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier *pBufferMemoryBarriers,
+                                              uint32_t imageMemoryBarrierCount,
+                                              const VkImageMemoryBarrier *pImageMemoryBarriers) const {
+    const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+    assert(cb_state);
+
+    auto barrier_op_type = ComputeBarrierOperationsType(cb_state, bufferMemoryBarrierCount, pBufferMemoryBarriers,
+                                                        imageMemoryBarrierCount, pImageMemoryBarriers);
+    bool skip = ValidateStageMasksAgainstQueueCapabilities(cb_state, sourceStageMask, dstStageMask, barrier_op_type,
+                                                           "vkCmdWaitEvents", "VUID-vkCmdWaitEvents-srcStageMask-01164");
+    skip |= ValidateStageMaskGsTsEnables(sourceStageMask, "vkCmdWaitEvents()", "VUID-vkCmdWaitEvents-srcStageMask-01159",
+                                         "VUID-vkCmdWaitEvents-srcStageMask-01161", "VUID-vkCmdWaitEvents-srcStageMask-02111",
+                                         "VUID-vkCmdWaitEvents-srcStageMask-02112");
+    skip |= ValidateStageMaskGsTsEnables(dstStageMask, "vkCmdWaitEvents()", "VUID-vkCmdWaitEvents-dstStageMask-01160",
+                                         "VUID-vkCmdWaitEvents-dstStageMask-01162", "VUID-vkCmdWaitEvents-dstStageMask-02113",
+                                         "VUID-vkCmdWaitEvents-dstStageMask-02114");
+    skip |= ValidateCmdQueueFlags(cb_state, "vkCmdWaitEvents()", VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT,
+                                  "VUID-vkCmdWaitEvents-commandBuffer-cmdpool");
+    skip |= ValidateCmd(cb_state, CMD_WAITEVENTS, "vkCmdWaitEvents()");
+    skip |= ValidateBarriersToImages(cb_state, imageMemoryBarrierCount, pImageMemoryBarriers, "vkCmdWaitEvents()");
+    skip |= ValidateBarriers("vkCmdWaitEvents()", cb_state, sourceStageMask, dstStageMask, memoryBarrierCount, pMemoryBarriers,
+                             bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers);
+    return skip;
+}
+
+void CoreChecks::PreCallRecordCmdWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent *pEvents,
+                                            VkPipelineStageFlags sourceStageMask, VkPipelineStageFlags dstStageMask,
+                                            uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
+                                            uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier *pBufferMemoryBarriers,
+                                            uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier *pImageMemoryBarriers) {
+    CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+    // The StateTracker added will add to the events vector.
+    auto first_event_index = cb_state->events.size();
+    StateTracker::PreCallRecordCmdWaitEvents(commandBuffer, eventCount, pEvents, sourceStageMask, dstStageMask, memoryBarrierCount,
+                                             pMemoryBarriers, bufferMemoryBarrierCount, pBufferMemoryBarriers,
+                                             imageMemoryBarrierCount, pImageMemoryBarriers);
+    auto event_added_count = cb_state->events.size() - first_event_index;
+
+    const CMD_BUFFER_STATE *cb_state_const = cb_state;
+    cb_state->eventUpdates.emplace_back(
+        [cb_state_const, event_added_count, first_event_index, sourceStageMask](
+            const ValidationStateTracker *device_data, bool do_validate, EventToStageMap *localEventToStageMap) {
+            if (!do_validate) return false;
+            return ValidateEventStageMask(device_data, cb_state_const, event_added_count, first_event_index, sourceStageMask,
+                                          localEventToStageMap);
+        });
+    TransitionImageLayouts(cb_state, imageMemoryBarrierCount, pImageMemoryBarriers);
+}
+
+void CoreChecks::PostCallRecordCmdWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent *pEvents,
+                                             VkPipelineStageFlags sourceStageMask, VkPipelineStageFlags dstStageMask,
+                                             uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
+                                             uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier *pBufferMemoryBarriers,
+                                             uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier *pImageMemoryBarriers) {
+    CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+    RecordBarrierValidationInfo("vkCmdWaitEvents", cb_state, bufferMemoryBarrierCount, pBufferMemoryBarriers,
+                                imageMemoryBarrierCount, pImageMemoryBarriers);
+}
+
+bool CoreChecks::PreCallValidateCmdPipelineBarrier(VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask,
+                                                   VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags,
+                                                   uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
+                                                   uint32_t bufferMemoryBarrierCount,
+                                                   const VkBufferMemoryBarrier *pBufferMemoryBarriers,
+                                                   uint32_t imageMemoryBarrierCount,
+                                                   const VkImageMemoryBarrier *pImageMemoryBarriers) const {
+    const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+    assert(cb_state);
+
+    bool skip = false;
+    auto barrier_op_type = ComputeBarrierOperationsType(cb_state, bufferMemoryBarrierCount, pBufferMemoryBarriers,
+                                                        imageMemoryBarrierCount, pImageMemoryBarriers);
+    skip |= ValidateStageMasksAgainstQueueCapabilities(cb_state, srcStageMask, dstStageMask, barrier_op_type,
+                                                       "vkCmdPipelineBarrier", "VUID-vkCmdPipelineBarrier-srcStageMask-01183");
+    skip |= ValidateCmdQueueFlags(cb_state, "vkCmdPipelineBarrier()",
+                                  VK_QUEUE_TRANSFER_BIT | VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT,
+                                  "VUID-vkCmdPipelineBarrier-commandBuffer-cmdpool");
+    skip |= ValidateCmd(cb_state, CMD_PIPELINEBARRIER, "vkCmdPipelineBarrier()");
+    skip |=
+        ValidateStageMaskGsTsEnables(srcStageMask, "vkCmdPipelineBarrier()", "VUID-vkCmdPipelineBarrier-srcStageMask-01168",
+                                     "VUID-vkCmdPipelineBarrier-srcStageMask-01170", "VUID-vkCmdPipelineBarrier-srcStageMask-02115",
+                                     "VUID-vkCmdPipelineBarrier-srcStageMask-02116");
+    skip |=
+        ValidateStageMaskGsTsEnables(dstStageMask, "vkCmdPipelineBarrier()", "VUID-vkCmdPipelineBarrier-dstStageMask-01169",
+                                     "VUID-vkCmdPipelineBarrier-dstStageMask-01171", "VUID-vkCmdPipelineBarrier-dstStageMask-02117",
+                                     "VUID-vkCmdPipelineBarrier-dstStageMask-02118");
+    if (cb_state->activeRenderPass) {
+        skip |= ValidateRenderPassPipelineBarriers("vkCmdPipelineBarrier()", cb_state, srcStageMask, dstStageMask, dependencyFlags,
+                                                   memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount,
+                                                   pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers);
+        if (skip) return true;  // Early return to avoid redundant errors from below calls
+    }
+    skip |= ValidateBarriersToImages(cb_state, imageMemoryBarrierCount, pImageMemoryBarriers, "vkCmdPipelineBarrier()");
+    skip |= ValidateBarriers("vkCmdPipelineBarrier()", cb_state, srcStageMask, dstStageMask, memoryBarrierCount, pMemoryBarriers,
+                             bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers);
+    return skip;
+}
+
+void CoreChecks::EnqueueSubmitTimeValidateImageBarrierAttachment(const char *func_name, CMD_BUFFER_STATE *cb_state,
+                                                                 uint32_t imageMemBarrierCount,
+                                                                 const VkImageMemoryBarrier *pImageMemBarriers) {
+    // Secondary CBs can have null framebuffer so queue up validation in that case 'til FB is known
+    if ((cb_state->activeRenderPass) && (VK_NULL_HANDLE == cb_state->activeFramebuffer) &&
+        (VK_COMMAND_BUFFER_LEVEL_SECONDARY == cb_state->createInfo.level)) {
+        const auto active_subpass = cb_state->activeSubpass;
+        const auto rp_state = cb_state->activeRenderPass;
+        const auto &sub_desc = rp_state->createInfo.pSubpasses[active_subpass];
+        const VulkanTypedHandle rp_handle(rp_state->renderPass, kVulkanObjectTypeRenderPass);
+        for (uint32_t i = 0; i < imageMemBarrierCount; ++i) {
+            const auto &img_barrier = pImageMemBarriers[i];
+            // Secondary CB case w/o FB specified delay validation
+            cb_state->cmd_execute_commands_functions.emplace_back([=](const CMD_BUFFER_STATE *primary_cb, VkFramebuffer fb) {
+                return ValidateImageBarrierAttachment(func_name, cb_state, fb, active_subpass, sub_desc, rp_handle, i, img_barrier);
+            });
+        }
+    }
+}
+
+void CoreChecks::PreCallRecordCmdPipelineBarrier(VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask,
+                                                 VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags,
+                                                 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
+                                                 uint32_t bufferMemoryBarrierCount,
+                                                 const VkBufferMemoryBarrier *pBufferMemoryBarriers,
+                                                 uint32_t imageMemoryBarrierCount,
+                                                 const VkImageMemoryBarrier *pImageMemoryBarriers) {
+    CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+    const char *func_name = "vkCmdPipelineBarrier";
+
+    RecordBarrierValidationInfo(func_name, cb_state, bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount,
+                                pImageMemoryBarriers);
+
+    EnqueueSubmitTimeValidateImageBarrierAttachment(func_name, cb_state, imageMemoryBarrierCount, pImageMemoryBarriers);
+    TransitionImageLayouts(cb_state, imageMemoryBarrierCount, pImageMemoryBarriers);
+}
+
+bool CoreChecks::ValidateBeginQuery(const CMD_BUFFER_STATE *cb_state, const QueryObject &query_obj, VkFlags flags, CMD_TYPE cmd,
+                                    const char *cmd_name, const char *vuid_queue_flags, const char *vuid_queue_feedback,
+                                    const char *vuid_queue_occlusion, const char *vuid_precise,
+                                    const char *vuid_query_count) const {
+    bool skip = false;
+    const auto *query_pool_state = GetQueryPoolState(query_obj.pool);
+    const auto &query_pool_ci = query_pool_state->createInfo;
+
+    if (query_pool_ci.queryType == VK_QUERY_TYPE_TIMESTAMP) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                        HandleToUint64(cb_state->commandBuffer), "VUID-vkCmdBeginQuery-queryType-02804",
+                        "%s: The querypool's query type must not be VK_QUERY_TYPE_TIMESTAMP.", cmd_name);
+    }
+
+    // There are tighter queue constraints to test for certain query pools
+    if (query_pool_ci.queryType == VK_QUERY_TYPE_TRANSFORM_FEEDBACK_STREAM_EXT) {
+        skip |= ValidateCmdQueueFlags(cb_state, cmd_name, VK_QUEUE_GRAPHICS_BIT, vuid_queue_feedback);
+    }
+    if (query_pool_ci.queryType == VK_QUERY_TYPE_OCCLUSION) {
+        skip |= ValidateCmdQueueFlags(cb_state, cmd_name, VK_QUEUE_GRAPHICS_BIT, vuid_queue_occlusion);
+    }
+    if (query_pool_ci.queryType == VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR) {
+        if (!cb_state->performance_lock_acquired) {
+            skip |= log_msg(
+                report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                HandleToUint64(cb_state->commandBuffer),
+                query_obj.indexed ? "VUID-vkCmdBeginQueryIndexedEXT-queryPool-03223" : "VUID-vkCmdBeginQuery-queryPool-03223",
+                "%s: profiling lock must be held before vkBeginCommandBuffer is called on "
+                "a command buffer where performance queries are recorded.",
+                cmd_name);
+        }
+
+        if (query_pool_state->has_perf_scope_command_buffer && cb_state->commandCount > 0) {
+            skip |= log_msg(
+                report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                HandleToUint64(cb_state->commandBuffer),
+                query_obj.indexed ? "VUID-vkCmdBeginQueryIndexedEXT-queryPool-03224" : "VUID-vkCmdBeginQuery-queryPool-03224",
+                "%s: Query pool %s was created with a counter of scope "
+                "VK_QUERY_SCOPE_COMMAND_BUFFER_KHR but %s is not the first recorded "
+                "command in the command buffer.",
+                cmd_name, report_data->FormatHandle(query_obj.pool).c_str(), cmd_name);
+        }
+
+        if (query_pool_state->has_perf_scope_render_pass && cb_state->activeRenderPass) {
+            skip |= log_msg(
+                report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                HandleToUint64(cb_state->commandBuffer),
+                query_obj.indexed ? "VUID-vkCmdBeginQueryIndexedEXT-queryPool-03225" : "VUID-vkCmdBeginQuery-queryPool-03225",
+                "%s: Query pool %s was created with a counter of scope "
+                "VK_QUERY_SCOPE_RENDER_PASS_KHR but %s is inside a render pass.",
+                cmd_name, report_data->FormatHandle(query_obj.pool).c_str(), cmd_name);
+        }
+    }
+
+    skip |= ValidateCmdQueueFlags(cb_state, cmd_name, VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT, vuid_queue_flags);
+
+    if (flags & VK_QUERY_CONTROL_PRECISE_BIT) {
+        if (!enabled_features.core.occlusionQueryPrecise) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                            HandleToUint64(cb_state->commandBuffer), vuid_precise,
+                            "%s: VK_QUERY_CONTROL_PRECISE_BIT provided, but precise occlusion queries not enabled on the device.",
+                            cmd_name);
+        }
+
+        if (query_pool_ci.queryType != VK_QUERY_TYPE_OCCLUSION) {
+            skip |=
+                log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                        HandleToUint64(cb_state->commandBuffer), vuid_precise,
+                        "%s: VK_QUERY_CONTROL_PRECISE_BIT provided, but pool query type is not VK_QUERY_TYPE_OCCLUSION", cmd_name);
+        }
+    }
+
+    if (query_obj.query >= query_pool_ci.queryCount) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                        HandleToUint64(cb_state->commandBuffer), vuid_query_count,
+                        "%s: Query index %" PRIu32 " must be less than query count %" PRIu32 " of %s.", cmd_name, query_obj.query,
+                        query_pool_ci.queryCount, report_data->FormatHandle(query_obj.pool).c_str());
+    }
+
+    skip |= ValidateCmd(cb_state, cmd, cmd_name);
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateCmdBeginQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t slot,
+                                              VkFlags flags) const {
+    if (disabled.query_validation) return false;
+    const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+    assert(cb_state);
+    QueryObject query_obj(queryPool, slot);
+    return ValidateBeginQuery(cb_state, query_obj, flags, CMD_BEGINQUERY, "vkCmdBeginQuery()",
+                              "VUID-vkCmdBeginQuery-commandBuffer-cmdpool", "VUID-vkCmdBeginQuery-queryType-02327",
+                              "VUID-vkCmdBeginQuery-queryType-00803", "VUID-vkCmdBeginQuery-queryType-00800",
+                              "VUID-vkCmdBeginQuery-query-00802");
+}
+
+bool CoreChecks::VerifyQueryIsReset(const ValidationStateTracker *state_data, VkCommandBuffer commandBuffer, QueryObject query_obj,
+                                    const char *func_name, QueryMap *localQueryToStateMap) {
+    bool skip = false;
+
+    QueryState state = state_data->GetQueryState(localQueryToStateMap, query_obj.pool, query_obj.query);
+    if (state != QUERYSTATE_RESET) {
+        skip |= log_msg(state_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                        HandleToUint64(commandBuffer), kVUID_Core_DrawState_QueryNotReset,
+                        "%s: %s and query %" PRIu32
+                        ": query not reset. "
+                        "After query pool creation, each query must be reset before it is used. "
+                        "Queries must also be reset between uses.",
+                        func_name, state_data->report_data->FormatHandle(query_obj.pool).c_str(), query_obj.query);
+    }
+
+    return skip;
+}
+
+void CoreChecks::EnqueueVerifyBeginQuery(VkCommandBuffer command_buffer, const QueryObject &query_obj, const char *func_name) {
+    CMD_BUFFER_STATE *cb_state = GetCBState(command_buffer);
+
+    // Enqueue the submit time validation here, ahead of the submit time state update in the StateTracker's PostCallRecord
+    cb_state->queryUpdates.emplace_back([command_buffer, query_obj, func_name](const ValidationStateTracker *device_data,
+                                                                               bool do_validate, QueryMap *localQueryToStateMap) {
+        if (!do_validate) return false;
+        return VerifyQueryIsReset(device_data, command_buffer, query_obj, func_name, localQueryToStateMap);
+    });
+}
+
+void CoreChecks::PreCallRecordCmdBeginQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t slot, VkFlags flags) {
+    if (disabled.query_validation) return;
+    QueryObject query_obj = {queryPool, slot};
+    EnqueueVerifyBeginQuery(commandBuffer, query_obj, "vkCmdBeginQuery()");
+}
+
+bool CoreChecks::ValidateCmdEndQuery(const CMD_BUFFER_STATE *cb_state, const QueryObject &query_obj, CMD_TYPE cmd,
+                                     const char *cmd_name, const char *vuid_queue_flags, const char *vuid_active_queries) const {
+    bool skip = false;
+    if (!cb_state->activeQueries.count(query_obj)) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                        HandleToUint64(cb_state->commandBuffer), vuid_active_queries,
+                        "%s: Ending a query before it was started: %s, index %d.", cmd_name,
+                        report_data->FormatHandle(query_obj.pool).c_str(), query_obj.query);
+    }
+    const auto *query_pool_state = GetQueryPoolState(query_obj.pool);
+    const auto &query_pool_ci = query_pool_state->createInfo;
+    if (query_pool_ci.queryType == VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR && query_pool_state->has_perf_scope_render_pass &&
+        cb_state->activeRenderPass) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                        HandleToUint64(cb_state->commandBuffer), "VUID-vkCmdEndQuery-queryPool-03228",
+                        "%s: Query pool %s was created with a counter of scope "
+                        "VK_QUERY_SCOPE_RENDER_PASS_KHR but %s is inside a render pass.",
+                        cmd_name, report_data->FormatHandle(query_obj.pool).c_str(), cmd_name);
+    }
+    skip |= ValidateCmdQueueFlags(cb_state, cmd_name, VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT, vuid_queue_flags);
+    skip |= ValidateCmd(cb_state, cmd, cmd_name);
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateCmdEndQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t slot) const {
+    if (disabled.query_validation) return false;
+    QueryObject query_obj = {queryPool, slot};
+    const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+    assert(cb_state);
+    return ValidateCmdEndQuery(cb_state, query_obj, CMD_ENDQUERY, "vkCmdEndQuery()", "VUID-vkCmdEndQuery-commandBuffer-cmdpool",
+                               "VUID-vkCmdEndQuery-None-01923");
+}
+
+bool CoreChecks::PreCallValidateCmdResetQueryPool(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery,
+                                                  uint32_t queryCount) const {
+    if (disabled.query_validation) return false;
+    const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+
+    bool skip = InsideRenderPass(cb_state, "vkCmdResetQueryPool()", "VUID-vkCmdResetQueryPool-renderpass");
+    skip |= ValidateCmd(cb_state, CMD_RESETQUERYPOOL, "VkCmdResetQueryPool()");
+    skip |= ValidateCmdQueueFlags(cb_state, "VkCmdResetQueryPool()", VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT,
+                                  "VUID-vkCmdResetQueryPool-commandBuffer-cmdpool");
+    return skip;
+}
+
+static QueryResultType GetQueryResultType(QueryState state, VkQueryResultFlags flags) {
+    switch (state) {
+        case QUERYSTATE_UNKNOWN:
+            return QUERYRESULT_UNKNOWN;
+        case QUERYSTATE_RESET:
+        case QUERYSTATE_RUNNING:
+            if (flags & VK_QUERY_RESULT_WAIT_BIT) {
+                return ((state == QUERYSTATE_RESET) ? QUERYRESULT_WAIT_ON_RESET : QUERYRESULT_WAIT_ON_RUNNING);
+            } else if ((flags & VK_QUERY_RESULT_PARTIAL_BIT) || (flags & VK_QUERY_RESULT_WITH_AVAILABILITY_BIT)) {
+                return QUERYRESULT_SOME_DATA;
+            } else {
+                return QUERYRESULT_NO_DATA;
+            }
+        case QUERYSTATE_ENDED:
+            if ((flags & VK_QUERY_RESULT_WAIT_BIT) || (flags & VK_QUERY_RESULT_PARTIAL_BIT) ||
+                (flags & VK_QUERY_RESULT_WITH_AVAILABILITY_BIT)) {
+                return QUERYRESULT_SOME_DATA;
+            } else {
+                return QUERYRESULT_MAYBE_NO_DATA;
+            }
+        case QUERYSTATE_AVAILABLE:
+            return QUERYRESULT_SOME_DATA;
+    }
+    assert(false);
+    return QUERYRESULT_UNKNOWN;
+}
+
+bool CoreChecks::ValidateCopyQueryPoolResults(const ValidationStateTracker *state_data, VkCommandBuffer commandBuffer,
+                                              VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount,
+                                              VkQueryResultFlags flags, QueryMap *localQueryToStateMap) {
+    bool skip = false;
+    for (uint32_t i = 0; i < queryCount; i++) {
+        QueryState state = state_data->GetQueryState(localQueryToStateMap, queryPool, firstQuery + i);
+        QueryResultType result_type = GetQueryResultType(state, flags);
+        if (result_type != QUERYRESULT_SOME_DATA) {
+            skip |= log_msg(state_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                            HandleToUint64(commandBuffer), kVUID_Core_DrawState_InvalidQuery,
+                            "vkCmdCopyQueryPoolResults(): Requesting a copy from query to buffer on %s query %" PRIu32 ": %s",
+                            state_data->report_data->FormatHandle(queryPool).c_str(), firstQuery + i,
+                            string_QueryResultType(result_type));
+        }
+    }
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateCmdCopyQueryPoolResults(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery,
+                                                        uint32_t queryCount, VkBuffer dstBuffer, VkDeviceSize dstOffset,
+                                                        VkDeviceSize stride, VkQueryResultFlags flags) const {
+    if (disabled.query_validation) return false;
+    const auto cb_state = GetCBState(commandBuffer);
+    const auto dst_buff_state = GetBufferState(dstBuffer);
+    assert(cb_state);
+    assert(dst_buff_state);
+    bool skip = ValidateMemoryIsBoundToBuffer(dst_buff_state, "vkCmdCopyQueryPoolResults()",
+                                              "VUID-vkCmdCopyQueryPoolResults-dstBuffer-00826");
+    skip |= ValidateQueryPoolStride("VUID-vkCmdCopyQueryPoolResults-flags-00822", "VUID-vkCmdCopyQueryPoolResults-flags-00823",
+                                    stride, "dstOffset", dstOffset, flags);
+    // Validate that DST buffer has correct usage flags set
+    skip |= ValidateBufferUsageFlags(dst_buff_state, VK_BUFFER_USAGE_TRANSFER_DST_BIT, true,
+                                     "VUID-vkCmdCopyQueryPoolResults-dstBuffer-00825", "vkCmdCopyQueryPoolResults()",
+                                     "VK_BUFFER_USAGE_TRANSFER_DST_BIT");
+    skip |= ValidateCmdQueueFlags(cb_state, "vkCmdCopyQueryPoolResults()", VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT,
+                                  "VUID-vkCmdCopyQueryPoolResults-commandBuffer-cmdpool");
+    skip |= ValidateCmd(cb_state, CMD_COPYQUERYPOOLRESULTS, "vkCmdCopyQueryPoolResults()");
+    skip |= InsideRenderPass(cb_state, "vkCmdCopyQueryPoolResults()", "VUID-vkCmdCopyQueryPoolResults-renderpass");
+
+    auto query_pool_state_iter = queryPoolMap.find(queryPool);
+    if (query_pool_state_iter != queryPoolMap.end()) {
+        auto query_pool_state = query_pool_state_iter->second.get();
+        if (query_pool_state->createInfo.queryType == VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR) {
+            skip |= ValidatePerformanceQueryResults("vkCmdCopyQueryPoolResults", query_pool_state, firstQuery, queryCount, flags);
+            if (!phys_dev_ext_props.performance_query_props.allowCommandBufferQueryCopies) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT,
+                                HandleToUint64(commandBuffer), "VUID-vkCmdCopyQueryPoolResults-queryType-03232",
+                                "vkCmdCopyQueryPoolResults called with query pool %s but "
+                                "VkPhysicalDevicePerformanceQueryPropertiesKHR::allowCommandBufferQueryCopies "
+                                "is not set.",
+                                report_data->FormatHandle(queryPool).c_str());
+            }
+        }
+    }
+
+    return skip;
+}
+
+void CoreChecks::PreCallRecordCmdCopyQueryPoolResults(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery,
+                                                      uint32_t queryCount, VkBuffer dstBuffer, VkDeviceSize dstOffset,
+                                                      VkDeviceSize stride, VkQueryResultFlags flags) {
+    if (disabled.query_validation) return;
+    auto cb_state = GetCBState(commandBuffer);
+    cb_state->queryUpdates.emplace_back(
+        [commandBuffer, queryPool, firstQuery, queryCount, flags](const ValidationStateTracker *device_data, bool do_validate,
+                                                                  QueryMap *localQueryToStateMap) {
+            if (!do_validate) return false;
+            return ValidateCopyQueryPoolResults(device_data, commandBuffer, queryPool, firstQuery, queryCount, flags,
+                                                localQueryToStateMap);
+        });
+}
+
+bool CoreChecks::PreCallValidateCmdPushConstants(VkCommandBuffer commandBuffer, VkPipelineLayout layout,
+                                                 VkShaderStageFlags stageFlags, uint32_t offset, uint32_t size,
+                                                 const void *pValues) const {
+    bool skip = false;
+    const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+    assert(cb_state);
+    skip |= ValidateCmdQueueFlags(cb_state, "vkCmdPushConstants()", VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT,
+                                  "VUID-vkCmdPushConstants-commandBuffer-cmdpool");
+    skip |= ValidateCmd(cb_state, CMD_PUSHCONSTANTS, "vkCmdPushConstants()");
+    skip |= ValidatePushConstantRange(offset, size, "vkCmdPushConstants()");
+    if (0 == stageFlags) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                        HandleToUint64(commandBuffer), "VUID-vkCmdPushConstants-stageFlags-requiredbitmask",
+                        "vkCmdPushConstants() call has no stageFlags set.");
+    }
+
+    // Check if pipeline_layout VkPushConstantRange(s) overlapping offset, size have stageFlags set for each stage in the command
+    // stageFlags argument, *and* that the command stageFlags argument has bits set for the stageFlags in each overlapping range.
+    if (!skip) {
+        const auto &ranges = *GetPipelineLayout(layout)->push_constant_ranges;
+        VkShaderStageFlags found_stages = 0;
+        for (const auto &range : ranges) {
+            if ((offset >= range.offset) && (offset + size <= range.offset + range.size)) {
+                VkShaderStageFlags matching_stages = range.stageFlags & stageFlags;
+                if (matching_stages != range.stageFlags) {
+                    // "VUID-vkCmdPushConstants-offset-01796" VUID-vkCmdPushConstants-offset-01796
+                    skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                                    HandleToUint64(commandBuffer), "VUID-vkCmdPushConstants-offset-01796",
+                                    "vkCmdPushConstants(): stageFlags (0x%" PRIx32 ", offset (%" PRIu32 "), and size (%" PRIu32
+                                    "),  must contain all stages in overlapping VkPushConstantRange stageFlags (0x%" PRIx32
+                                    "), offset (%" PRIu32 "), and size (%" PRIu32 ") in %s.",
+                                    (uint32_t)stageFlags, offset, size, (uint32_t)range.stageFlags, range.offset, range.size,
+                                    report_data->FormatHandle(layout).c_str());
+                }
+
+                // Accumulate all stages we've found
+                found_stages = matching_stages | found_stages;
+            }
+        }
+        if (found_stages != stageFlags) {
+            // "VUID-vkCmdPushConstants-offset-01795" VUID-vkCmdPushConstants-offset-01795
+            uint32_t missing_stages = ~found_stages & stageFlags;
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                            HandleToUint64(commandBuffer), "VUID-vkCmdPushConstants-offset-01795",
+                            "vkCmdPushConstants(): stageFlags = 0x%" PRIx32
+                            ", VkPushConstantRange in %s overlapping offset = %d and size = %d, do not contain "
+                            "stageFlags 0x%" PRIx32 ".",
+                            (uint32_t)stageFlags, report_data->FormatHandle(layout).c_str(), offset, size, missing_stages);
+        }
+    }
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateCmdWriteTimestamp(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage,
+                                                  VkQueryPool queryPool, uint32_t slot) const {
+    if (disabled.query_validation) return false;
+    const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+    assert(cb_state);
+    bool skip = ValidateCmdQueueFlags(cb_state, "vkCmdWriteTimestamp()",
+                                      VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT | VK_QUEUE_TRANSFER_BIT,
+                                      "VUID-vkCmdWriteTimestamp-commandBuffer-cmdpool");
+    skip |= ValidateCmd(cb_state, CMD_WRITETIMESTAMP, "vkCmdWriteTimestamp()");
+    return skip;
+}
+
+void CoreChecks::PreCallRecordCmdWriteTimestamp(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage,
+                                                VkQueryPool queryPool, uint32_t slot) {
+    if (disabled.query_validation) return;
+    // Enqueue the submit time validation check here, before the submit time state update in StateTracker::PostCall...
+    CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+    QueryObject query = {queryPool, slot};
+    const char *func_name = "vkCmdWriteTimestamp()";
+    cb_state->queryUpdates.emplace_back([commandBuffer, query, func_name](const ValidationStateTracker *device_data,
+                                                                          bool do_validate, QueryMap *localQueryToStateMap) {
+        if (!do_validate) return false;
+        return VerifyQueryIsReset(device_data, commandBuffer, query, func_name, localQueryToStateMap);
+    });
+}
+
+bool CoreChecks::MatchUsage(uint32_t count, const VkAttachmentReference2KHR *attachments, const VkFramebufferCreateInfo *fbci,
+                            VkImageUsageFlagBits usage_flag, const char *error_code) const {
+    bool skip = false;
+
+    if (attachments) {
+        for (uint32_t attach = 0; attach < count; attach++) {
+            if (attachments[attach].attachment != VK_ATTACHMENT_UNUSED) {
+                // Attachment counts are verified elsewhere, but prevent an invalid access
+                if (attachments[attach].attachment < fbci->attachmentCount) {
+                    if ((fbci->flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR) == 0) {
+                        const VkImageView *image_view = &fbci->pAttachments[attachments[attach].attachment];
+                        auto view_state = GetImageViewState(*image_view);
+                        if (view_state) {
+                            const VkImageCreateInfo *ici = &GetImageState(view_state->create_info.image)->createInfo;
+                            if (ici != nullptr) {
+                                if ((ici->usage & usage_flag) == 0) {
+                                    skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                                    VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, error_code,
+                                                    "vkCreateFramebuffer:  Framebuffer Attachment (%d) conflicts with the image's "
+                                                    "IMAGE_USAGE flags (%s).",
+                                                    attachments[attach].attachment, string_VkImageUsageFlagBits(usage_flag));
+                                }
+                            }
+                        }
+                    } else {
+                        const VkFramebufferAttachmentsCreateInfoKHR *fbaci =
+                            lvl_find_in_chain<VkFramebufferAttachmentsCreateInfoKHR>(fbci->pNext);
+                        if (fbaci != nullptr && fbaci->pAttachmentImageInfos != nullptr &&
+                            fbaci->attachmentImageInfoCount > attachments[attach].attachment) {
+                            uint32_t image_usage = fbaci->pAttachmentImageInfos[attachments[attach].attachment].usage;
+                            if ((image_usage & usage_flag) == 0) {
+                                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT,
+                                                0, error_code,
+                                                "vkCreateFramebuffer:  Framebuffer attachment info (%d) conflicts with the image's "
+                                                "IMAGE_USAGE flags (%s).",
+                                                attachments[attach].attachment, string_VkImageUsageFlagBits(usage_flag));
+                            }
+                        }
+                    }
+                }
+            }
+        }
+    }
+    return skip;
+}
+
+// Validate VkFramebufferCreateInfo which includes:
+// 1. attachmentCount equals renderPass attachmentCount
+// 2. corresponding framebuffer and renderpass attachments have matching formats
+// 3. corresponding framebuffer and renderpass attachments have matching sample counts
+// 4. fb attachments only have a single mip level
+// 5. fb attachment dimensions are each at least as large as the fb
+// 6. fb attachments use idenity swizzle
+// 7. fb attachments used by renderPass for color/input/ds have correct usage bit set
+// 8. fb dimensions are within physical device limits
+bool CoreChecks::ValidateFramebufferCreateInfo(const VkFramebufferCreateInfo *pCreateInfo) const {
+    bool skip = false;
+
+    const VkFramebufferAttachmentsCreateInfoKHR *pFramebufferAttachmentsCreateInfo =
+        lvl_find_in_chain<VkFramebufferAttachmentsCreateInfoKHR>(pCreateInfo->pNext);
+    if ((pCreateInfo->flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR) != 0) {
+        if (!enabled_features.imageless_framebuffer_features.imagelessFramebuffer) {
+            skip |=
+                log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                        "VUID-VkFramebufferCreateInfo-flags-03189",
+                        "vkCreateFramebuffer(): VkFramebufferCreateInfo flags includes VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR, "
+                        "but the imagelessFramebuffer feature is not enabled.");
+        }
+
+        if (pFramebufferAttachmentsCreateInfo == nullptr) {
+            skip |=
+                log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                        "VUID-VkFramebufferCreateInfo-flags-03190",
+                        "vkCreateFramebuffer(): VkFramebufferCreateInfo flags includes VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR, "
+                        "but no instance of VkFramebufferAttachmentsCreateInfoKHR is present in the pNext chain.");
+        } else {
+            if (pFramebufferAttachmentsCreateInfo->attachmentImageInfoCount != 0 &&
+                pFramebufferAttachmentsCreateInfo->attachmentImageInfoCount != pCreateInfo->attachmentCount) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                "VUID-VkFramebufferCreateInfo-flags-03191",
+                                "vkCreateFramebuffer(): VkFramebufferCreateInfo attachmentCount is %u, but "
+                                "VkFramebufferAttachmentsCreateInfoKHR attachmentImageInfoCount is %u.",
+                                pCreateInfo->attachmentCount, pFramebufferAttachmentsCreateInfo->attachmentImageInfoCount);
+            }
+        }
+    }
+
+    auto rp_state = GetRenderPassState(pCreateInfo->renderPass);
+    if (rp_state) {
+        const VkRenderPassCreateInfo2KHR *rpci = rp_state->createInfo.ptr();
+        if (rpci->attachmentCount != pCreateInfo->attachmentCount) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
+                            HandleToUint64(pCreateInfo->renderPass), "VUID-VkFramebufferCreateInfo-attachmentCount-00876",
+                            "vkCreateFramebuffer(): VkFramebufferCreateInfo attachmentCount of %u does not match attachmentCount "
+                            "of %u of %s being used to create Framebuffer.",
+                            pCreateInfo->attachmentCount, rpci->attachmentCount,
+                            report_data->FormatHandle(pCreateInfo->renderPass).c_str());
+        } else {
+            // attachmentCounts match, so make sure corresponding attachment details line up
+            if ((pCreateInfo->flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR) == 0) {
+                const VkImageView *image_views = pCreateInfo->pAttachments;
+                for (uint32_t i = 0; i < pCreateInfo->attachmentCount; ++i) {
+                    auto view_state = GetImageViewState(image_views[i]);
+                    if (view_state == nullptr) {
+                        skip |=
+                            log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT,
+                                    HandleToUint64(image_views[i]), "VUID-VkFramebufferCreateInfo-flags-03188",
+                                    "vkCreateFramebuffer(): VkFramebufferCreateInfo attachment #%u is not a valid VkImageView.", i);
+                    } else {
+                        auto &ivci = view_state->create_info;
+                        if (ivci.format != rpci->pAttachments[i].format) {
+                            skip |= log_msg(
+                                report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
+                                HandleToUint64(pCreateInfo->renderPass), "VUID-VkFramebufferCreateInfo-pAttachments-00880",
+                                "vkCreateFramebuffer(): VkFramebufferCreateInfo attachment #%u has format of %s that does not "
+                                "match the format of %s used by the corresponding attachment for %s.",
+                                i, string_VkFormat(ivci.format), string_VkFormat(rpci->pAttachments[i].format),
+                                report_data->FormatHandle(pCreateInfo->renderPass).c_str());
+                        }
+                        const VkImageCreateInfo *ici = &GetImageState(ivci.image)->createInfo;
+                        if (ici->samples != rpci->pAttachments[i].samples) {
+                            skip |=
+                                log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
+                                        HandleToUint64(pCreateInfo->renderPass), "VUID-VkFramebufferCreateInfo-pAttachments-00881",
+                                        "vkCreateFramebuffer(): VkFramebufferCreateInfo attachment #%u has %s samples that do not "
+                                        "match the %s "
+                                        "samples used by the corresponding attachment for %s.",
+                                        i, string_VkSampleCountFlagBits(ici->samples),
+                                        string_VkSampleCountFlagBits(rpci->pAttachments[i].samples),
+                                        report_data->FormatHandle(pCreateInfo->renderPass).c_str());
+                        }
+                        // Verify that view only has a single mip level
+                        if (ivci.subresourceRange.levelCount != 1) {
+                            skip |= log_msg(
+                                report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                "VUID-VkFramebufferCreateInfo-pAttachments-00883",
+                                "vkCreateFramebuffer(): VkFramebufferCreateInfo attachment #%u has mip levelCount of %u but "
+                                "only a single mip level (levelCount ==  1) is allowed when creating a Framebuffer.",
+                                i, ivci.subresourceRange.levelCount);
+                        }
+                        const uint32_t mip_level = ivci.subresourceRange.baseMipLevel;
+                        uint32_t mip_width = max(1u, ici->extent.width >> mip_level);
+                        uint32_t mip_height = max(1u, ici->extent.height >> mip_level);
+                        if (!(rpci->pAttachments[i].initialLayout == VK_IMAGE_LAYOUT_FRAGMENT_DENSITY_MAP_OPTIMAL_EXT ||
+                              rpci->pAttachments[i].finalLayout == VK_IMAGE_LAYOUT_FRAGMENT_DENSITY_MAP_OPTIMAL_EXT)) {
+                            if ((ivci.subresourceRange.layerCount < pCreateInfo->layers) || (mip_width < pCreateInfo->width) ||
+                                (mip_height < pCreateInfo->height)) {
+                                skip |= log_msg(
+                                    report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                    "VUID-VkFramebufferCreateInfo-pAttachments-00882",
+                                    "vkCreateFramebuffer(): VkFramebufferCreateInfo attachment #%u mip level %u has dimensions "
+                                    "smaller than the corresponding framebuffer dimensions. Here are the respective dimensions for "
+                                    "attachment #%u, framebuffer:\n"
+                                    "width: %u, %u\n"
+                                    "height: %u, %u\n"
+                                    "layerCount: %u, %u\n",
+                                    i, ivci.subresourceRange.baseMipLevel, i, mip_width, pCreateInfo->width, mip_height,
+                                    pCreateInfo->height, ivci.subresourceRange.layerCount, pCreateInfo->layers);
+                            }
+                        } else {
+                            if (device_extensions.vk_ext_fragment_density_map) {
+                                uint32_t ceiling_width = (uint32_t)ceil(
+                                    (float)pCreateInfo->width /
+                                    std::max((float)phys_dev_ext_props.fragment_density_map_props.maxFragmentDensityTexelSize.width,
+                                             1.0f));
+                                if (mip_width < ceiling_width) {
+                                    skip |= log_msg(
+                                        report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                        "VUID-VkFramebufferCreateInfo-pAttachments-02555",
+                                        "vkCreateFramebuffer(): VkFramebufferCreateInfo attachment #%u mip level %u has width "
+                                        "smaller than the corresponding the ceiling of framebuffer width / "
+                                        "maxFragmentDensityTexelSize.width "
+                                        "Here are the respective dimensions for attachment #%u, the ceiling value:\n "
+                                        "attachment #%u, framebuffer:\n"
+                                        "width: %u, the ceiling value: %u\n",
+                                        i, ivci.subresourceRange.baseMipLevel, i, i, mip_width, ceiling_width);
+                                }
+                                uint32_t ceiling_height = (uint32_t)ceil(
+                                    (float)pCreateInfo->height /
+                                    std::max(
+                                        (float)phys_dev_ext_props.fragment_density_map_props.maxFragmentDensityTexelSize.height,
+                                        1.0f));
+                                if (mip_height < ceiling_height) {
+                                    skip |= log_msg(
+                                        report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                        "VUID-VkFramebufferCreateInfo-pAttachments-02556",
+                                        "vkCreateFramebuffer(): VkFramebufferCreateInfo attachment #%u mip level %u has height "
+                                        "smaller than the corresponding the ceiling of framebuffer height / "
+                                        "maxFragmentDensityTexelSize.height "
+                                        "Here are the respective dimensions for attachment #%u, the ceiling value:\n "
+                                        "attachment #%u, framebuffer:\n"
+                                        "height: %u, the ceiling value: %u\n",
+                                        i, ivci.subresourceRange.baseMipLevel, i, i, mip_height, ceiling_height);
+                                }
+                            }
+                        }
+                        if (((ivci.components.r != VK_COMPONENT_SWIZZLE_IDENTITY) &&
+                             (ivci.components.r != VK_COMPONENT_SWIZZLE_R)) ||
+                            ((ivci.components.g != VK_COMPONENT_SWIZZLE_IDENTITY) &&
+                             (ivci.components.g != VK_COMPONENT_SWIZZLE_G)) ||
+                            ((ivci.components.b != VK_COMPONENT_SWIZZLE_IDENTITY) &&
+                             (ivci.components.b != VK_COMPONENT_SWIZZLE_B)) ||
+                            ((ivci.components.a != VK_COMPONENT_SWIZZLE_IDENTITY) &&
+                             (ivci.components.a != VK_COMPONENT_SWIZZLE_A))) {
+                            skip |= log_msg(
+                                report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                "VUID-VkFramebufferCreateInfo-pAttachments-00884",
+                                "vkCreateFramebuffer(): VkFramebufferCreateInfo attachment #%u has non-identy swizzle. All "
+                                "framebuffer attachments must have been created with the identity swizzle. Here are the actual "
+                                "swizzle values:\n"
+                                "r swizzle = %s\n"
+                                "g swizzle = %s\n"
+                                "b swizzle = %s\n"
+                                "a swizzle = %s\n",
+                                i, string_VkComponentSwizzle(ivci.components.r), string_VkComponentSwizzle(ivci.components.g),
+                                string_VkComponentSwizzle(ivci.components.b), string_VkComponentSwizzle(ivci.components.a));
+                        }
+                    }
+                }
+            } else if (pFramebufferAttachmentsCreateInfo) {
+                // VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR is set
+                for (uint32_t i = 0; i < pCreateInfo->attachmentCount; ++i) {
+                    auto &aii = pFramebufferAttachmentsCreateInfo->pAttachmentImageInfos[i];
+                    bool formatFound = false;
+                    for (uint32_t j = 0; j < aii.viewFormatCount; ++j) {
+                        if (aii.pViewFormats[j] == rpci->pAttachments[i].format) {
+                            formatFound = true;
+                        }
+                    }
+                    if (!formatFound) {
+                        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
+                                        HandleToUint64(pCreateInfo->renderPass), "VUID-VkFramebufferCreateInfo-flags-03205",
+                                        "vkCreateFramebuffer(): VkFramebufferCreateInfo attachment info #%u does not include "
+                                        "format %s used "
+                                        "by the corresponding attachment for renderPass (%s).",
+                                        i, string_VkFormat(rpci->pAttachments[i].format),
+                                        report_data->FormatHandle(pCreateInfo->renderPass).c_str());
+                    }
+
+                    const char *mismatchedLayersNoMultiviewVuid = device_extensions.vk_khr_multiview
+                                                                      ? "VUID-VkFramebufferCreateInfo-renderPass-03199"
+                                                                      : "VUID-VkFramebufferCreateInfo-flags-03200";
+                    if ((rpci->subpassCount == 0) || (rpci->pSubpasses[0].viewMask == 0)) {
+                        if (aii.layerCount < pCreateInfo->layers) {
+                            skip |=
+                                log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                        mismatchedLayersNoMultiviewVuid,
+                                        "vkCreateFramebuffer(): VkFramebufferCreateInfo attachment info #%u has only #%u layers, "
+                                        "but framebuffer has #%u layers.",
+                                        i, aii.layerCount, pCreateInfo->layers);
+                        }
+                    }
+
+                    if (!device_extensions.vk_ext_fragment_density_map) {
+                        if (aii.width < pCreateInfo->width) {
+                            skip |= log_msg(
+                                report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                "VUID-VkFramebufferCreateInfo-flags-03192",
+                                "vkCreateFramebuffer(): VkFramebufferCreateInfo attachment info #%u has a width of only #%u, "
+                                "but framebuffer has a width of #%u.",
+                                i, aii.width, pCreateInfo->width);
+                        }
+
+                        if (aii.height < pCreateInfo->height) {
+                            skip |= log_msg(
+                                report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                "VUID-VkFramebufferCreateInfo-flags-03193",
+                                "vkCreateFramebuffer(): VkFramebufferCreateInfo attachment info #%u has a height of only #%u, "
+                                "but framebuffer has a height of #%u.",
+                                i, aii.height, pCreateInfo->height);
+                        }
+                    }
+                }
+
+                // Validate image usage
+                uint32_t attachment_index = VK_ATTACHMENT_UNUSED;
+                for (uint32_t i = 0; i < rpci->subpassCount; ++i) {
+                    skip |= MatchUsage(rpci->pSubpasses[i].colorAttachmentCount, rpci->pSubpasses[i].pColorAttachments, pCreateInfo,
+                                       VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, "VUID-VkFramebufferCreateInfo-flags-03201");
+                    skip |=
+                        MatchUsage(rpci->pSubpasses[i].colorAttachmentCount, rpci->pSubpasses[i].pResolveAttachments, pCreateInfo,
+                                   VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, "VUID-VkFramebufferCreateInfo-flags-03201");
+                    skip |= MatchUsage(1, rpci->pSubpasses[i].pDepthStencilAttachment, pCreateInfo,
+                                       VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT, "VUID-VkFramebufferCreateInfo-flags-03202");
+                    skip |= MatchUsage(rpci->pSubpasses[i].inputAttachmentCount, rpci->pSubpasses[i].pInputAttachments, pCreateInfo,
+                                       VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT, "VUID-VkFramebufferCreateInfo-flags-03204");
+
+                    const VkSubpassDescriptionDepthStencilResolveKHR *pDepthStencilResolve =
+                        lvl_find_in_chain<VkSubpassDescriptionDepthStencilResolveKHR>(rpci->pSubpasses[i].pNext);
+                    if (device_extensions.vk_khr_depth_stencil_resolve && pDepthStencilResolve != nullptr) {
+                        skip |= MatchUsage(1, pDepthStencilResolve->pDepthStencilResolveAttachment, pCreateInfo,
+                                           VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT, "VUID-VkFramebufferCreateInfo-flags-03203");
+                    }
+                }
+
+                if (device_extensions.vk_khr_multiview) {
+                    if ((rpci->subpassCount > 0) && (rpci->pSubpasses[0].viewMask != 0)) {
+                        for (uint32_t i = 0; i < rpci->subpassCount; ++i) {
+                            const VkSubpassDescriptionDepthStencilResolveKHR *pDepthStencilResolve =
+                                lvl_find_in_chain<VkSubpassDescriptionDepthStencilResolveKHR>(rpci->pSubpasses[i].pNext);
+                            uint32_t view_bits = rpci->pSubpasses[i].viewMask;
+                            uint32_t highest_view_bit = 0;
+
+                            for (int j = 0; j < 32; ++j) {
+                                if (((view_bits >> j) & 1) != 0) {
+                                    highest_view_bit = j;
+                                }
+                            }
+
+                            for (uint32_t j = 0; j < rpci->pSubpasses[i].colorAttachmentCount; ++j) {
+                                attachment_index = rpci->pSubpasses[i].pColorAttachments[j].attachment;
+                                if (attachment_index != VK_ATTACHMENT_UNUSED) {
+                                    uint32_t layer_count =
+                                        pFramebufferAttachmentsCreateInfo->pAttachmentImageInfos[attachment_index].layerCount;
+                                    if (layer_count <= highest_view_bit) {
+                                        skip |= log_msg(
+                                            report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
+                                            HandleToUint64(pCreateInfo->renderPass),
+                                            "VUID-VkFramebufferCreateInfo-renderPass-03198",
+                                            "vkCreateFramebuffer(): VkFramebufferCreateInfo attachment info %u "
+                                            "only specifies %u layers, but the view mask for subpass %u in renderPass (%s) "
+                                            "includes layer %u, with that attachment specified as a color attachment %u.",
+                                            attachment_index, layer_count, i,
+                                            report_data->FormatHandle(pCreateInfo->renderPass).c_str(), highest_view_bit, j);
+                                    }
+                                }
+                                if (rpci->pSubpasses[i].pResolveAttachments) {
+                                    attachment_index = rpci->pSubpasses[i].pResolveAttachments[j].attachment;
+                                    if (attachment_index != VK_ATTACHMENT_UNUSED) {
+                                        uint32_t layer_count =
+                                            pFramebufferAttachmentsCreateInfo->pAttachmentImageInfos[attachment_index].layerCount;
+                                        if (layer_count <= highest_view_bit) {
+                                            skip |= log_msg(
+                                                report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                                VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
+                                                HandleToUint64(pCreateInfo->renderPass),
+                                                "VUID-VkFramebufferCreateInfo-renderPass-03198",
+                                                "vkCreateFramebuffer(): VkFramebufferCreateInfo attachment info %u "
+                                                "only specifies %u layers, but the view mask for subpass %u in renderPass (%s) "
+                                                "includes layer %u, with that attachment specified as a resolve attachment %u.",
+                                                attachment_index, layer_count, i,
+                                                report_data->FormatHandle(pCreateInfo->renderPass).c_str(), highest_view_bit, j);
+                                        }
+                                    }
+                                }
+                            }
+
+                            for (uint32_t j = 0; j < rpci->pSubpasses[i].inputAttachmentCount; ++j) {
+                                attachment_index = rpci->pSubpasses[i].pInputAttachments[j].attachment;
+                                if (attachment_index != VK_ATTACHMENT_UNUSED) {
+                                    uint32_t layer_count =
+                                        pFramebufferAttachmentsCreateInfo->pAttachmentImageInfos[attachment_index].layerCount;
+                                    if (layer_count <= highest_view_bit) {
+                                        skip |= log_msg(
+                                            report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
+                                            HandleToUint64(pCreateInfo->renderPass),
+                                            "VUID-VkFramebufferCreateInfo-renderPass-03198",
+                                            "vkCreateFramebuffer(): VkFramebufferCreateInfo attachment info %u "
+                                            "only specifies %u layers, but the view mask for subpass %u in renderPass (%s) "
+                                            "includes layer %u, with that attachment specified as an input attachment %u.",
+                                            attachment_index, layer_count, i,
+                                            report_data->FormatHandle(pCreateInfo->renderPass).c_str(), highest_view_bit, j);
+                                    }
+                                }
+                            }
+
+                            if (rpci->pSubpasses[i].pDepthStencilAttachment != nullptr) {
+                                attachment_index = rpci->pSubpasses[i].pDepthStencilAttachment->attachment;
+                                if (attachment_index != VK_ATTACHMENT_UNUSED) {
+                                    uint32_t layer_count =
+                                        pFramebufferAttachmentsCreateInfo->pAttachmentImageInfos[attachment_index].layerCount;
+                                    if (layer_count <= highest_view_bit) {
+                                        skip |= log_msg(
+                                            report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
+                                            HandleToUint64(pCreateInfo->renderPass),
+                                            "VUID-VkFramebufferCreateInfo-renderPass-03198",
+                                            "vkCreateFramebuffer(): VkFramebufferCreateInfo attachment info %u "
+                                            "only specifies %u layers, but the view mask for subpass %u in renderPass (%s) "
+                                            "includes layer %u, with that attachment specified as a depth/stencil attachment.",
+                                            attachment_index, layer_count, i,
+                                            report_data->FormatHandle(pCreateInfo->renderPass).c_str(), highest_view_bit);
+                                    }
+                                }
+
+                                if (device_extensions.vk_khr_depth_stencil_resolve && pDepthStencilResolve != nullptr &&
+                                    pDepthStencilResolve->pDepthStencilResolveAttachment != nullptr) {
+                                    attachment_index = pDepthStencilResolve->pDepthStencilResolveAttachment->attachment;
+                                    if (attachment_index != VK_ATTACHMENT_UNUSED) {
+                                        uint32_t layer_count =
+                                            pFramebufferAttachmentsCreateInfo->pAttachmentImageInfos[attachment_index].layerCount;
+                                        if (layer_count <= highest_view_bit) {
+                                            skip |= log_msg(
+                                                report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                                VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
+                                                HandleToUint64(pCreateInfo->renderPass),
+                                                "VUID-VkFramebufferCreateInfo-renderPass-03198",
+                                                "vkCreateFramebuffer(): VkFramebufferCreateInfo attachment info %u "
+                                                "only specifies %u layers, but the view mask for subpass %u in renderPass (%s) "
+                                                "includes layer %u, with that attachment specified as a depth/stencil resolve "
+                                                "attachment.",
+                                                attachment_index, layer_count, i,
+                                                report_data->FormatHandle(pCreateInfo->renderPass).c_str(), highest_view_bit);
+                                        }
+                                    }
+                                }
+                            }
+                        }
+                    }
+                }
+            }
+
+            if ((pCreateInfo->flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR) == 0) {
+                // Verify correct attachment usage flags
+                for (uint32_t subpass = 0; subpass < rpci->subpassCount; subpass++) {
+                    // Verify input attachments:
+                    skip |= MatchUsage(rpci->pSubpasses[subpass].inputAttachmentCount, rpci->pSubpasses[subpass].pInputAttachments,
+                                       pCreateInfo, VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT,
+                                       "VUID-VkFramebufferCreateInfo-pAttachments-00879");
+                    // Verify color attachments:
+                    skip |= MatchUsage(rpci->pSubpasses[subpass].colorAttachmentCount, rpci->pSubpasses[subpass].pColorAttachments,
+                                       pCreateInfo, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT,
+                                       "VUID-VkFramebufferCreateInfo-pAttachments-00877");
+                    // Verify depth/stencil attachments:
+                    skip |=
+                        MatchUsage(1, rpci->pSubpasses[subpass].pDepthStencilAttachment, pCreateInfo,
+                                   VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT, "VUID-VkFramebufferCreateInfo-pAttachments-02633");
+                }
+            }
+        }
+    }
+    // Verify FB dimensions are within physical device limits
+    if (pCreateInfo->width > phys_dev_props.limits.maxFramebufferWidth) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                        "VUID-VkFramebufferCreateInfo-width-00886",
+                        "vkCreateFramebuffer(): Requested VkFramebufferCreateInfo width exceeds physical device limits. Requested "
+                        "width: %u, device max: %u\n",
+                        pCreateInfo->width, phys_dev_props.limits.maxFramebufferWidth);
+    }
+    if (pCreateInfo->height > phys_dev_props.limits.maxFramebufferHeight) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                        "VUID-VkFramebufferCreateInfo-height-00888",
+                        "vkCreateFramebuffer(): Requested VkFramebufferCreateInfo height exceeds physical device limits. Requested "
+                        "height: %u, device max: %u\n",
+                        pCreateInfo->height, phys_dev_props.limits.maxFramebufferHeight);
+    }
+    if (pCreateInfo->layers > phys_dev_props.limits.maxFramebufferLayers) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                        "VUID-VkFramebufferCreateInfo-layers-00890",
+                        "vkCreateFramebuffer(): Requested VkFramebufferCreateInfo layers exceeds physical device limits. Requested "
+                        "layers: %u, device max: %u\n",
+                        pCreateInfo->layers, phys_dev_props.limits.maxFramebufferLayers);
+    }
+    // Verify FB dimensions are greater than zero
+    if (pCreateInfo->width <= 0) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                        "VUID-VkFramebufferCreateInfo-width-00885",
+                        "vkCreateFramebuffer(): Requested VkFramebufferCreateInfo width must be greater than zero.");
+    }
+    if (pCreateInfo->height <= 0) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                        "VUID-VkFramebufferCreateInfo-height-00887",
+                        "vkCreateFramebuffer(): Requested VkFramebufferCreateInfo height must be greater than zero.");
+    }
+    if (pCreateInfo->layers <= 0) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                        "VUID-VkFramebufferCreateInfo-layers-00889",
+                        "vkCreateFramebuffer(): Requested VkFramebufferCreateInfo layers must be greater than zero.");
+    }
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateCreateFramebuffer(VkDevice device, const VkFramebufferCreateInfo *pCreateInfo,
+                                                  const VkAllocationCallbacks *pAllocator, VkFramebuffer *pFramebuffer) const {
+    // TODO : Verify that renderPass FB is created with is compatible with FB
+    bool skip = false;
+    skip |= ValidateFramebufferCreateInfo(pCreateInfo);
+    return skip;
+}
+
+static bool FindDependency(const uint32_t index, const uint32_t dependent, const std::vector<DAGNode> &subpass_to_node,
+                           std::unordered_set<uint32_t> &processed_nodes) {
+    // If we have already checked this node we have not found a dependency path so return false.
+    if (processed_nodes.count(index)) return false;
+    processed_nodes.insert(index);
+    const DAGNode &node = subpass_to_node[index];
+    // Look for a dependency path. If one exists return true else recurse on the previous nodes.
+    if (std::find(node.prev.begin(), node.prev.end(), dependent) == node.prev.end()) {
+        for (auto elem : node.prev) {
+            if (FindDependency(elem, dependent, subpass_to_node, processed_nodes)) return true;
+        }
+    } else {
+        return true;
+    }
+    return false;
+}
+
+bool CoreChecks::IsImageLayoutReadOnly(VkImageLayout layout) const {
+    if ((layout == VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL) || (layout == VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL) ||
+        (layout == VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL) ||
+        (layout == VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL)) {
+        return true;
+    }
+    return false;
+}
+
+bool CoreChecks::CheckDependencyExists(const uint32_t subpass, const VkImageLayout layout,
+                                       const std::vector<SubpassLayout> &dependent_subpasses,
+                                       const std::vector<DAGNode> &subpass_to_node, bool &skip) const {
+    bool result = true;
+    bool bImageLayoutReadOnly = IsImageLayoutReadOnly(layout);
+    // Loop through all subpasses that share the same attachment and make sure a dependency exists
+    for (uint32_t k = 0; k < dependent_subpasses.size(); ++k) {
+        const SubpassLayout &sp = dependent_subpasses[k];
+        if (subpass == sp.index) continue;
+        if (bImageLayoutReadOnly && IsImageLayoutReadOnly(sp.layout)) continue;
+
+        const DAGNode &node = subpass_to_node[subpass];
+        // Check for a specified dependency between the two nodes. If one exists we are done.
+        auto prev_elem = std::find(node.prev.begin(), node.prev.end(), sp.index);
+        auto next_elem = std::find(node.next.begin(), node.next.end(), sp.index);
+        if (prev_elem == node.prev.end() && next_elem == node.next.end()) {
+            // If no dependency exits an implicit dependency still might. If not, throw an error.
+            std::unordered_set<uint32_t> processed_nodes;
+            if (!(FindDependency(subpass, sp.index, subpass_to_node, processed_nodes) ||
+                  FindDependency(sp.index, subpass, subpass_to_node, processed_nodes))) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                kVUID_Core_DrawState_InvalidRenderpass,
+                                "A dependency between subpasses %d and %d must exist but one is not specified.", subpass, sp.index);
+                result = false;
+            }
+        }
+    }
+    return result;
+}
+
+bool CoreChecks::CheckPreserved(const VkRenderPassCreateInfo2KHR *pCreateInfo, const int index, const uint32_t attachment,
+                                const std::vector<DAGNode> &subpass_to_node, int depth, bool &skip) const {
+    const DAGNode &node = subpass_to_node[index];
+    // If this node writes to the attachment return true as next nodes need to preserve the attachment.
+    const VkSubpassDescription2KHR &subpass = pCreateInfo->pSubpasses[index];
+    for (uint32_t j = 0; j < subpass.colorAttachmentCount; ++j) {
+        if (attachment == subpass.pColorAttachments[j].attachment) return true;
+    }
+    for (uint32_t j = 0; j < subpass.inputAttachmentCount; ++j) {
+        if (attachment == subpass.pInputAttachments[j].attachment) return true;
+    }
+    if (subpass.pDepthStencilAttachment && subpass.pDepthStencilAttachment->attachment != VK_ATTACHMENT_UNUSED) {
+        if (attachment == subpass.pDepthStencilAttachment->attachment) return true;
+    }
+    bool result = false;
+    // Loop through previous nodes and see if any of them write to the attachment.
+    for (auto elem : node.prev) {
+        result |= CheckPreserved(pCreateInfo, elem, attachment, subpass_to_node, depth + 1, skip);
+    }
+    // If the attachment was written to by a previous node than this node needs to preserve it.
+    if (result && depth > 0) {
+        bool has_preserved = false;
+        for (uint32_t j = 0; j < subpass.preserveAttachmentCount; ++j) {
+            if (subpass.pPreserveAttachments[j] == attachment) {
+                has_preserved = true;
+                break;
+            }
+        }
+        if (!has_preserved) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                            kVUID_Core_DrawState_InvalidRenderpass,
+                            "Attachment %d is used by a later subpass and must be preserved in subpass %d.", attachment, index);
+        }
+    }
+    return result;
+}
+
+template <class T>
+bool IsRangeOverlapping(T offset1, T size1, T offset2, T size2) {
+    return (((offset1 + size1) > offset2) && ((offset1 + size1) < (offset2 + size2))) ||
+           ((offset1 > offset2) && (offset1 < (offset2 + size2)));
+}
+
+bool IsRegionOverlapping(VkImageSubresourceRange range1, VkImageSubresourceRange range2) {
+    return (IsRangeOverlapping(range1.baseMipLevel, range1.levelCount, range2.baseMipLevel, range2.levelCount) &&
+            IsRangeOverlapping(range1.baseArrayLayer, range1.layerCount, range2.baseArrayLayer, range2.layerCount));
+}
+
+bool CoreChecks::ValidateDependencies(FRAMEBUFFER_STATE const *framebuffer, RENDER_PASS_STATE const *renderPass) const {
+    bool skip = false;
+    auto const pFramebufferInfo = framebuffer->createInfo.ptr();
+    auto const pCreateInfo = renderPass->createInfo.ptr();
+    auto const &subpass_to_node = renderPass->subpassToNode;
+
+    struct Attachment {
+        std::vector<SubpassLayout> outputs;
+        std::vector<SubpassLayout> inputs;
+        std::vector<uint32_t> overlapping;
+    };
+
+    std::vector<Attachment> attachments(pCreateInfo->attachmentCount);
+
+    // Find overlapping attachments
+    for (uint32_t i = 0; i < pCreateInfo->attachmentCount; ++i) {
+        for (uint32_t j = i + 1; j < pCreateInfo->attachmentCount; ++j) {
+            VkImageView viewi = pFramebufferInfo->pAttachments[i];
+            VkImageView viewj = pFramebufferInfo->pAttachments[j];
+            if (viewi == viewj) {
+                attachments[i].overlapping.emplace_back(j);
+                attachments[j].overlapping.emplace_back(i);
+                continue;
+            }
+            auto view_state_i = GetImageViewState(viewi);
+            auto view_state_j = GetImageViewState(viewj);
+            if (!view_state_i || !view_state_j) {
+                continue;
+            }
+            auto view_ci_i = view_state_i->create_info;
+            auto view_ci_j = view_state_j->create_info;
+            if (view_ci_i.image == view_ci_j.image && IsRegionOverlapping(view_ci_i.subresourceRange, view_ci_j.subresourceRange)) {
+                attachments[i].overlapping.emplace_back(j);
+                attachments[j].overlapping.emplace_back(i);
+                continue;
+            }
+            auto image_data_i = GetImageState(view_ci_i.image);
+            auto image_data_j = GetImageState(view_ci_j.image);
+            if (!image_data_i || !image_data_j) {
+                continue;
+            }
+            if (image_data_i->binding.mem == image_data_j->binding.mem &&
+                IsRangeOverlapping(image_data_i->binding.offset, image_data_i->binding.size, image_data_j->binding.offset,
+                                   image_data_j->binding.size)) {
+                attachments[i].overlapping.emplace_back(j);
+                attachments[j].overlapping.emplace_back(i);
+            }
+        }
+    }
+    // Find for each attachment the subpasses that use them.
+    unordered_set<uint32_t> attachmentIndices;
+    for (uint32_t i = 0; i < pCreateInfo->subpassCount; ++i) {
+        const VkSubpassDescription2KHR &subpass = pCreateInfo->pSubpasses[i];
+        attachmentIndices.clear();
+        for (uint32_t j = 0; j < subpass.inputAttachmentCount; ++j) {
+            uint32_t attachment = subpass.pInputAttachments[j].attachment;
+            if (attachment == VK_ATTACHMENT_UNUSED) continue;
+            SubpassLayout sp = {i, subpass.pInputAttachments[j].layout};
+            attachments[attachment].inputs.emplace_back(sp);
+            for (auto overlapping_attachment : attachments[attachment].overlapping) {
+                attachments[overlapping_attachment].inputs.emplace_back(sp);
+            }
+        }
+        for (uint32_t j = 0; j < subpass.colorAttachmentCount; ++j) {
+            uint32_t attachment = subpass.pColorAttachments[j].attachment;
+            if (attachment == VK_ATTACHMENT_UNUSED) continue;
+            SubpassLayout sp = {i, subpass.pColorAttachments[j].layout};
+            attachments[attachment].outputs.emplace_back(sp);
+            for (auto overlapping_attachment : attachments[attachment].overlapping) {
+                attachments[overlapping_attachment].outputs.emplace_back(sp);
+            }
+            attachmentIndices.insert(attachment);
+        }
+        if (subpass.pDepthStencilAttachment && subpass.pDepthStencilAttachment->attachment != VK_ATTACHMENT_UNUSED) {
+            uint32_t attachment = subpass.pDepthStencilAttachment->attachment;
+            SubpassLayout sp = {i, subpass.pDepthStencilAttachment->layout};
+            attachments[attachment].outputs.emplace_back(sp);
+            for (auto overlapping_attachment : attachments[attachment].overlapping) {
+                attachments[overlapping_attachment].outputs.emplace_back(sp);
+            }
+
+            if (attachmentIndices.count(attachment)) {
+                skip |=
+                    log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                            kVUID_Core_DrawState_InvalidRenderpass,
+                            "Cannot use same attachment (%u) as both color and depth output in same subpass (%u).", attachment, i);
+            }
+        }
+    }
+    // If there is a dependency needed make sure one exists
+    for (uint32_t i = 0; i < pCreateInfo->subpassCount; ++i) {
+        const VkSubpassDescription2KHR &subpass = pCreateInfo->pSubpasses[i];
+        // If the attachment is an input then all subpasses that output must have a dependency relationship
+        for (uint32_t j = 0; j < subpass.inputAttachmentCount; ++j) {
+            uint32_t attachment = subpass.pInputAttachments[j].attachment;
+            if (attachment == VK_ATTACHMENT_UNUSED) continue;
+            CheckDependencyExists(i, subpass.pInputAttachments[j].layout, attachments[attachment].outputs, subpass_to_node, skip);
+        }
+        // If the attachment is an output then all subpasses that use the attachment must have a dependency relationship
+        for (uint32_t j = 0; j < subpass.colorAttachmentCount; ++j) {
+            uint32_t attachment = subpass.pColorAttachments[j].attachment;
+            if (attachment == VK_ATTACHMENT_UNUSED) continue;
+            CheckDependencyExists(i, subpass.pColorAttachments[j].layout, attachments[attachment].outputs, subpass_to_node, skip);
+            CheckDependencyExists(i, subpass.pColorAttachments[j].layout, attachments[attachment].inputs, subpass_to_node, skip);
+        }
+        if (subpass.pDepthStencilAttachment && subpass.pDepthStencilAttachment->attachment != VK_ATTACHMENT_UNUSED) {
+            const uint32_t &attachment = subpass.pDepthStencilAttachment->attachment;
+            CheckDependencyExists(i, subpass.pDepthStencilAttachment->layout, attachments[attachment].outputs, subpass_to_node,
+                                  skip);
+            CheckDependencyExists(i, subpass.pDepthStencilAttachment->layout, attachments[attachment].inputs, subpass_to_node,
+                                  skip);
+        }
+    }
+    // Loop through implicit dependencies, if this pass reads make sure the attachment is preserved for all passes after it was
+    // written.
+    for (uint32_t i = 0; i < pCreateInfo->subpassCount; ++i) {
+        const VkSubpassDescription2KHR &subpass = pCreateInfo->pSubpasses[i];
+        for (uint32_t j = 0; j < subpass.inputAttachmentCount; ++j) {
+            CheckPreserved(pCreateInfo, i, subpass.pInputAttachments[j].attachment, subpass_to_node, 0, skip);
+        }
+    }
+    return skip;
+}
+
+bool CoreChecks::ValidateRenderPassDAG(RenderPassCreateVersion rp_version, const VkRenderPassCreateInfo2KHR *pCreateInfo) const {
+    bool skip = false;
+    const char *vuid;
+    const bool use_rp2 = (rp_version == RENDER_PASS_VERSION_2);
+
+    for (uint32_t i = 0; i < pCreateInfo->dependencyCount; ++i) {
+        const VkSubpassDependency2KHR &dependency = pCreateInfo->pDependencies[i];
+        VkPipelineStageFlagBits latest_src_stage = GetLogicallyLatestGraphicsPipelineStage(dependency.srcStageMask);
+        VkPipelineStageFlagBits earliest_dst_stage = GetLogicallyEarliestGraphicsPipelineStage(dependency.dstStageMask);
+
+        // The first subpass here serves as a good proxy for "is multiview enabled" - since all view masks need to be non-zero if
+        // any are, which enables multiview.
+        if (use_rp2 && (dependency.dependencyFlags & VK_DEPENDENCY_VIEW_LOCAL_BIT) && (pCreateInfo->pSubpasses[0].viewMask == 0)) {
+            skip |= log_msg(
+                report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                "VUID-VkRenderPassCreateInfo2KHR-viewMask-03059",
+                "Dependency %u specifies the VK_DEPENDENCY_VIEW_LOCAL_BIT, but multiview is not enabled for this render pass.", i);
+        } else if (use_rp2 && !(dependency.dependencyFlags & VK_DEPENDENCY_VIEW_LOCAL_BIT) && dependency.viewOffset != 0) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                            "VUID-VkSubpassDependency2KHR-dependencyFlags-03092",
+                            "Dependency %u specifies the VK_DEPENDENCY_VIEW_LOCAL_BIT, but also specifies a view offset of %u.", i,
+                            dependency.viewOffset);
+        } else if (dependency.srcSubpass == VK_SUBPASS_EXTERNAL || dependency.dstSubpass == VK_SUBPASS_EXTERNAL) {
+            if (dependency.srcSubpass == dependency.dstSubpass) {
+                vuid = use_rp2 ? "VUID-VkSubpassDependency2KHR-srcSubpass-03085" : "VUID-VkSubpassDependency-srcSubpass-00865";
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
+                                "The src and dst subpasses in dependency %u are both external.", i);
+            } else if (dependency.dependencyFlags & VK_DEPENDENCY_VIEW_LOCAL_BIT) {
+                if (dependency.srcSubpass == VK_SUBPASS_EXTERNAL) {
+                    vuid = "VUID-VkSubpassDependency-dependencyFlags-02520";
+                } else {  // dependency.dstSubpass == VK_SUBPASS_EXTERNAL
+                    vuid = "VUID-VkSubpassDependency-dependencyFlags-02521";
+                }
+                if (use_rp2) {
+                    // Create render pass 2 distinguishes between source and destination external dependencies.
+                    if (dependency.srcSubpass == VK_SUBPASS_EXTERNAL) {
+                        vuid = "VUID-VkSubpassDependency2KHR-dependencyFlags-03090";
+                    } else {
+                        vuid = "VUID-VkSubpassDependency2KHR-dependencyFlags-03091";
+                    }
+                }
+                skip |=
+                    log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
+                            "Dependency %u specifies an external dependency but also specifies VK_DEPENDENCY_VIEW_LOCAL_BIT.", i);
+            }
+        } else if (dependency.srcSubpass > dependency.dstSubpass) {
+            vuid = use_rp2 ? "VUID-VkSubpassDependency2KHR-srcSubpass-03084" : "VUID-VkSubpassDependency-srcSubpass-00864";
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
+                            "Dependency %u specifies a dependency from a later subpass (%u) to an earlier subpass (%u), which is "
+                            "disallowed to prevent cyclic dependencies.",
+                            i, dependency.srcSubpass, dependency.dstSubpass);
+        } else if (dependency.srcSubpass == dependency.dstSubpass) {
+            if (dependency.viewOffset != 0) {
+                vuid = use_rp2 ? kVUID_Core_DrawState_InvalidRenderpass : "VUID-VkRenderPassCreateInfo-pNext-01930";
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
+                                "Dependency %u specifies a self-dependency but has a non-zero view offset of %u", i,
+                                dependency.viewOffset);
+            } else if ((dependency.dependencyFlags | VK_DEPENDENCY_VIEW_LOCAL_BIT) != dependency.dependencyFlags &&
+                       pCreateInfo->pSubpasses[dependency.srcSubpass].viewMask > 1) {
+                vuid =
+                    use_rp2 ? "VUID-VkRenderPassCreateInfo2KHR-pDependencies-03060" : "VUID-VkSubpassDependency-srcSubpass-00872";
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
+                                "Dependency %u specifies a self-dependency for subpass %u with a non-zero view mask, but does not "
+                                "specify VK_DEPENDENCY_VIEW_LOCAL_BIT.",
+                                i, dependency.srcSubpass);
+            } else if ((HasNonFramebufferStagePipelineStageFlags(dependency.srcStageMask) ||
+                        HasNonFramebufferStagePipelineStageFlags(dependency.dstStageMask)) &&
+                       (GetGraphicsPipelineStageLogicalOrdinal(latest_src_stage) >
+                        GetGraphicsPipelineStageLogicalOrdinal(earliest_dst_stage))) {
+                vuid = use_rp2 ? "VUID-VkSubpassDependency2KHR-srcSubpass-03087" : "VUID-VkSubpassDependency-srcSubpass-00867";
+                skip |= log_msg(
+                    report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
+                    "Dependency %u specifies a self-dependency from logically-later stage (%s) to a logically-earlier stage (%s).",
+                    i, string_VkPipelineStageFlagBits(latest_src_stage), string_VkPipelineStageFlagBits(earliest_dst_stage));
+            }
+        }
+    }
+    return skip;
+}
+
+bool CoreChecks::ValidateAttachmentIndex(RenderPassCreateVersion rp_version, uint32_t attachment, uint32_t attachment_count,
+                                         const char *type) const {
+    bool skip = false;
+    const bool use_rp2 = (rp_version == RENDER_PASS_VERSION_2);
+    const char *const function_name = use_rp2 ? "vkCreateRenderPass2KHR()" : "vkCreateRenderPass()";
+
+    if (attachment >= attachment_count && attachment != VK_ATTACHMENT_UNUSED) {
+        const char *vuid =
+            use_rp2 ? "VUID-VkRenderPassCreateInfo2KHR-attachment-03051" : "VUID-VkRenderPassCreateInfo-attachment-00834";
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
+                        "%s: %s attachment %d must be less than the total number of attachments %d.", type, function_name,
+                        attachment, attachment_count);
+    }
+    return skip;
+}
+
+enum AttachmentType {
+    ATTACHMENT_COLOR = 1,
+    ATTACHMENT_DEPTH = 2,
+    ATTACHMENT_INPUT = 4,
+    ATTACHMENT_PRESERVE = 8,
+    ATTACHMENT_RESOLVE = 16,
+};
+
+char const *StringAttachmentType(uint8_t type) {
+    switch (type) {
+        case ATTACHMENT_COLOR:
+            return "color";
+        case ATTACHMENT_DEPTH:
+            return "depth";
+        case ATTACHMENT_INPUT:
+            return "input";
+        case ATTACHMENT_PRESERVE:
+            return "preserve";
+        case ATTACHMENT_RESOLVE:
+            return "resolve";
+        default:
+            return "(multiple)";
+    }
+}
+
+bool CoreChecks::AddAttachmentUse(RenderPassCreateVersion rp_version, uint32_t subpass, std::vector<uint8_t> &attachment_uses,
+                                  std::vector<VkImageLayout> &attachment_layouts, uint32_t attachment, uint8_t new_use,
+                                  VkImageLayout new_layout) const {
+    if (attachment >= attachment_uses.size()) return false; /* out of range, but already reported */
+
+    bool skip = false;
+    auto &uses = attachment_uses[attachment];
+    const bool use_rp2 = (rp_version == RENDER_PASS_VERSION_2);
+    const char *vuid;
+    const char *const function_name = use_rp2 ? "vkCreateRenderPass2KHR()" : "vkCreateRenderPass()";
+
+    if (uses & new_use) {
+        if (attachment_layouts[attachment] != new_layout) {
+            vuid = use_rp2 ? "VUID-VkSubpassDescription2KHR-layout-02528" : "VUID-VkSubpassDescription-layout-02519";
+            log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
+                    "%s: subpass %u already uses attachment %u with a different image layout (%s vs %s).", function_name, subpass,
+                    attachment, string_VkImageLayout(attachment_layouts[attachment]), string_VkImageLayout(new_layout));
+        }
+    } else if (uses & ~ATTACHMENT_INPUT || (uses && (new_use == ATTACHMENT_RESOLVE || new_use == ATTACHMENT_PRESERVE))) {
+        /* Note: input attachments are assumed to be done first. */
+        vuid = use_rp2 ? "VUID-VkSubpassDescription2KHR-pPreserveAttachments-03074"
+                       : "VUID-VkSubpassDescription-pPreserveAttachments-00854";
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
+                        "%s: subpass %u uses attachment %u as both %s and %s attachment.", function_name, subpass, attachment,
+                        StringAttachmentType(uses), StringAttachmentType(new_use));
+    } else {
+        attachment_layouts[attachment] = new_layout;
+        uses |= new_use;
+    }
+
+    return skip;
+}
+
+bool CoreChecks::ValidateRenderpassAttachmentUsage(RenderPassCreateVersion rp_version,
+                                                   const VkRenderPassCreateInfo2KHR *pCreateInfo) const {
+    bool skip = false;
+    const bool use_rp2 = (rp_version == RENDER_PASS_VERSION_2);
+    const char *vuid;
+    const char *const function_name = use_rp2 ? "vkCreateRenderPass2KHR()" : "vkCreateRenderPass()";
+
+    for (uint32_t i = 0; i < pCreateInfo->subpassCount; ++i) {
+        const VkSubpassDescription2KHR &subpass = pCreateInfo->pSubpasses[i];
+        std::vector<uint8_t> attachment_uses(pCreateInfo->attachmentCount);
+        std::vector<VkImageLayout> attachment_layouts(pCreateInfo->attachmentCount);
+
+        if (subpass.pipelineBindPoint != VK_PIPELINE_BIND_POINT_GRAPHICS) {
+            vuid = use_rp2 ? "VUID-VkSubpassDescription2KHR-pipelineBindPoint-03062"
+                           : "VUID-VkSubpassDescription-pipelineBindPoint-00844";
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
+                            "%s: Pipeline bind point for subpass %d must be VK_PIPELINE_BIND_POINT_GRAPHICS.", function_name, i);
+        }
+
+        for (uint32_t j = 0; j < subpass.inputAttachmentCount; ++j) {
+            auto const &attachment_ref = subpass.pInputAttachments[j];
+            if (attachment_ref.attachment != VK_ATTACHMENT_UNUSED) {
+                skip |= ValidateAttachmentIndex(rp_version, attachment_ref.attachment, pCreateInfo->attachmentCount, "Input");
+
+                if (attachment_ref.aspectMask & VK_IMAGE_ASPECT_METADATA_BIT) {
+                    vuid =
+                        use_rp2 ? kVUID_Core_DrawState_InvalidRenderpass : "VUID-VkInputAttachmentAspectReference-aspectMask-01964";
+                    skip |= log_msg(
+                        report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
+                        "%s: Aspect mask for input attachment reference %d in subpass %d includes VK_IMAGE_ASPECT_METADATA_BIT.",
+                        function_name, i, j);
+                }
+
+                if (attachment_ref.attachment < pCreateInfo->attachmentCount) {
+                    skip |= AddAttachmentUse(rp_version, i, attachment_uses, attachment_layouts, attachment_ref.attachment,
+                                             ATTACHMENT_INPUT, attachment_ref.layout);
+
+                    vuid = use_rp2 ? kVUID_Core_DrawState_InvalidRenderpass : "VUID-VkRenderPassCreateInfo-pNext-01963";
+                    skip |= ValidateImageAspectMask(VK_NULL_HANDLE, pCreateInfo->pAttachments[attachment_ref.attachment].format,
+                                                    attachment_ref.aspectMask, function_name, vuid);
+                }
+
+                if (rp_version == RENDER_PASS_VERSION_2) {
+                    // These are validated automatically as part of parameter validation for create renderpass 1
+                    // as they are in a struct that only applies to input attachments - not so for v2.
+
+                    // Check for 0
+                    if (attachment_ref.aspectMask == 0) {
+                        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                        "VUID-VkSubpassDescription2KHR-attachment-02800",
+                                        "%s:  Input attachment (%d) aspect mask must not be 0.", function_name, j);
+                    } else {
+                        const VkImageAspectFlags valid_bits =
+                            (VK_IMAGE_ASPECT_COLOR_BIT | VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT |
+                             VK_IMAGE_ASPECT_METADATA_BIT | VK_IMAGE_ASPECT_PLANE_0_BIT | VK_IMAGE_ASPECT_PLANE_1_BIT |
+                             VK_IMAGE_ASPECT_PLANE_2_BIT | VK_IMAGE_ASPECT_MEMORY_PLANE_0_BIT_EXT |
+                             VK_IMAGE_ASPECT_MEMORY_PLANE_1_BIT_EXT | VK_IMAGE_ASPECT_MEMORY_PLANE_2_BIT_EXT |
+                             VK_IMAGE_ASPECT_MEMORY_PLANE_3_BIT_EXT);
+
+                        // Check for valid aspect mask bits
+                        if (attachment_ref.aspectMask & ~valid_bits) {
+                            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                            "VUID-VkSubpassDescription2KHR-attachment-02799",
+                                            "%s:  Input attachment (%d) aspect mask (0x%" PRIx32 ")is invalid.", function_name, j,
+                                            attachment_ref.aspectMask);
+                        }
+                    }
+                }
+            }
+        }
+
+        for (uint32_t j = 0; j < subpass.preserveAttachmentCount; ++j) {
+            uint32_t attachment = subpass.pPreserveAttachments[j];
+            if (attachment == VK_ATTACHMENT_UNUSED) {
+                vuid = use_rp2 ? "VUID-VkSubpassDescription2KHR-attachment-03073" : "VUID-VkSubpassDescription-attachment-00853";
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
+                                "%s:  Preserve attachment (%d) must not be VK_ATTACHMENT_UNUSED.", function_name, j);
+            } else {
+                skip |= ValidateAttachmentIndex(rp_version, attachment, pCreateInfo->attachmentCount, "Preserve");
+                if (attachment < pCreateInfo->attachmentCount) {
+                    skip |= AddAttachmentUse(rp_version, i, attachment_uses, attachment_layouts, attachment, ATTACHMENT_PRESERVE,
+                                             VkImageLayout(0) /* preserve doesn't have any layout */);
+                }
+            }
+        }
+
+        bool subpass_performs_resolve = false;
+
+        for (uint32_t j = 0; j < subpass.colorAttachmentCount; ++j) {
+            if (subpass.pResolveAttachments) {
+                auto const &attachment_ref = subpass.pResolveAttachments[j];
+                if (attachment_ref.attachment != VK_ATTACHMENT_UNUSED) {
+                    skip |= ValidateAttachmentIndex(rp_version, attachment_ref.attachment, pCreateInfo->attachmentCount, "Resolve");
+
+                    if (attachment_ref.attachment < pCreateInfo->attachmentCount) {
+                        skip |= AddAttachmentUse(rp_version, i, attachment_uses, attachment_layouts, attachment_ref.attachment,
+                                                 ATTACHMENT_RESOLVE, attachment_ref.layout);
+
+                        subpass_performs_resolve = true;
+
+                        if (pCreateInfo->pAttachments[attachment_ref.attachment].samples != VK_SAMPLE_COUNT_1_BIT) {
+                            vuid = use_rp2 ? "VUID-VkSubpassDescription2KHR-pResolveAttachments-03067"
+                                           : "VUID-VkSubpassDescription-pResolveAttachments-00849";
+                            skip |= log_msg(
+                                report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
+                                "%s:  Subpass %u requests multisample resolve into attachment %u, which must "
+                                "have VK_SAMPLE_COUNT_1_BIT but has %s.",
+                                function_name, i, attachment_ref.attachment,
+                                string_VkSampleCountFlagBits(pCreateInfo->pAttachments[attachment_ref.attachment].samples));
+                        }
+                    }
+                }
+            }
+        }
+
+        if (subpass.pDepthStencilAttachment) {
+            if (subpass.pDepthStencilAttachment->attachment != VK_ATTACHMENT_UNUSED) {
+                skip |= ValidateAttachmentIndex(rp_version, subpass.pDepthStencilAttachment->attachment,
+                                                pCreateInfo->attachmentCount, "Depth");
+                if (subpass.pDepthStencilAttachment->attachment < pCreateInfo->attachmentCount) {
+                    skip |= AddAttachmentUse(rp_version, i, attachment_uses, attachment_layouts,
+                                             subpass.pDepthStencilAttachment->attachment, ATTACHMENT_DEPTH,
+                                             subpass.pDepthStencilAttachment->layout);
+                }
+            }
+        }
+
+        uint32_t last_sample_count_attachment = VK_ATTACHMENT_UNUSED;
+        for (uint32_t j = 0; j < subpass.colorAttachmentCount; ++j) {
+            auto const &attachment_ref = subpass.pColorAttachments[j];
+            skip |= ValidateAttachmentIndex(rp_version, attachment_ref.attachment, pCreateInfo->attachmentCount, "Color");
+            if (attachment_ref.attachment != VK_ATTACHMENT_UNUSED && attachment_ref.attachment < pCreateInfo->attachmentCount) {
+                skip |= AddAttachmentUse(rp_version, i, attachment_uses, attachment_layouts, attachment_ref.attachment,
+                                         ATTACHMENT_COLOR, attachment_ref.layout);
+
+                VkSampleCountFlagBits current_sample_count = pCreateInfo->pAttachments[attachment_ref.attachment].samples;
+                if (last_sample_count_attachment != VK_ATTACHMENT_UNUSED) {
+                    VkSampleCountFlagBits last_sample_count =
+                        pCreateInfo->pAttachments[subpass.pColorAttachments[last_sample_count_attachment].attachment].samples;
+                    if (current_sample_count != last_sample_count) {
+                        vuid = use_rp2 ? "VUID-VkSubpassDescription2KHR-pColorAttachments-03069"
+                                       : "VUID-VkSubpassDescription-pColorAttachments-01417";
+                        skip |=
+                            log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
+                                    "%s:  Subpass %u attempts to render to color attachments with inconsistent sample counts."
+                                    "Color attachment ref %u has sample count %s, whereas previous color attachment ref %u has "
+                                    "sample count %s.",
+                                    function_name, i, j, string_VkSampleCountFlagBits(current_sample_count),
+                                    last_sample_count_attachment, string_VkSampleCountFlagBits(last_sample_count));
+                    }
+                }
+                last_sample_count_attachment = j;
+
+                if (subpass_performs_resolve && current_sample_count == VK_SAMPLE_COUNT_1_BIT) {
+                    vuid = use_rp2 ? "VUID-VkSubpassDescription2KHR-pResolveAttachments-03066"
+                                   : "VUID-VkSubpassDescription-pResolveAttachments-00848";
+                    skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
+                                    "%s:  Subpass %u requests multisample resolve from attachment %u which has "
+                                    "VK_SAMPLE_COUNT_1_BIT.",
+                                    function_name, i, attachment_ref.attachment);
+                }
+
+                if (subpass.pDepthStencilAttachment && subpass.pDepthStencilAttachment->attachment != VK_ATTACHMENT_UNUSED &&
+                    subpass.pDepthStencilAttachment->attachment < pCreateInfo->attachmentCount) {
+                    const auto depth_stencil_sample_count =
+                        pCreateInfo->pAttachments[subpass.pDepthStencilAttachment->attachment].samples;
+
+                    if (device_extensions.vk_amd_mixed_attachment_samples) {
+                        if (pCreateInfo->pAttachments[attachment_ref.attachment].samples > depth_stencil_sample_count) {
+                            vuid = use_rp2 ? "VUID-VkSubpassDescription2KHR-pColorAttachments-03070"
+                                           : "VUID-VkSubpassDescription-pColorAttachments-01506";
+                            skip |= log_msg(
+                                report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
+                                "%s:  Subpass %u pColorAttachments[%u] has %s which is larger than "
+                                "depth/stencil attachment %s.",
+                                function_name, i, j,
+                                string_VkSampleCountFlagBits(pCreateInfo->pAttachments[attachment_ref.attachment].samples),
+                                string_VkSampleCountFlagBits(depth_stencil_sample_count));
+                            break;
+                        }
+                    }
+
+                    if (!device_extensions.vk_amd_mixed_attachment_samples && !device_extensions.vk_nv_framebuffer_mixed_samples &&
+                        current_sample_count != depth_stencil_sample_count) {
+                        vuid = use_rp2 ? "VUID-VkSubpassDescription2KHR-pDepthStencilAttachment-03071"
+                                       : "VUID-VkSubpassDescription-pDepthStencilAttachment-01418";
+                        skip |= log_msg(
+                            report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
+                            "%s:  Subpass %u attempts to render to use a depth/stencil attachment with sample count that differs "
+                            "from color attachment %u."
+                            "The depth attachment ref has sample count %s, whereas color attachment ref %u has sample count %s.",
+                            function_name, i, j, string_VkSampleCountFlagBits(depth_stencil_sample_count), j,
+                            string_VkSampleCountFlagBits(current_sample_count));
+                        break;
+                    }
+                }
+            }
+
+            if (subpass_performs_resolve && subpass.pResolveAttachments[j].attachment != VK_ATTACHMENT_UNUSED &&
+                subpass.pResolveAttachments[j].attachment < pCreateInfo->attachmentCount) {
+                if (attachment_ref.attachment == VK_ATTACHMENT_UNUSED) {
+                    vuid = use_rp2 ? "VUID-VkSubpassDescription2KHR-pResolveAttachments-03065"
+                                   : "VUID-VkSubpassDescription-pResolveAttachments-00847";
+                    skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
+                                    "%s:  Subpass %u requests multisample resolve from attachment %u which has "
+                                    "attachment=VK_ATTACHMENT_UNUSED.",
+                                    function_name, i, attachment_ref.attachment);
+                } else {
+                    const auto &color_desc = pCreateInfo->pAttachments[attachment_ref.attachment];
+                    const auto &resolve_desc = pCreateInfo->pAttachments[subpass.pResolveAttachments[j].attachment];
+                    if (color_desc.format != resolve_desc.format) {
+                        vuid = use_rp2 ? "VUID-VkSubpassDescription2KHR-pResolveAttachments-03068"
+                                       : "VUID-VkSubpassDescription-pResolveAttachments-00850";
+                        skip |=
+                            log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
+                                    "%s:  Subpass %u pColorAttachments[%u] resolves to an attachment with a "
+                                    "different format. color format: %u, resolve format: %u.",
+                                    function_name, i, j, color_desc.format, resolve_desc.format);
+                    }
+                }
+            }
+        }
+    }
+    return skip;
+}
+
+bool CoreChecks::ValidateCreateRenderPass(VkDevice device, RenderPassCreateVersion rp_version,
+                                          const VkRenderPassCreateInfo2KHR *pCreateInfo) const {
+    bool skip = false;
+    const bool use_rp2 = (rp_version == RENDER_PASS_VERSION_2);
+    const char *vuid;
+    const char *const function_name = use_rp2 ? "vkCreateRenderPass2KHR()" : "vkCreateRenderPass()";
+
+    // TODO: As part of wrapping up the mem_tracker/core_validation merge the following routine should be consolidated with
+    //       ValidateLayouts.
+    skip |= ValidateRenderpassAttachmentUsage(rp_version, pCreateInfo);
+
+    skip |= ValidateRenderPassDAG(rp_version, pCreateInfo);
+
+    // Validate multiview correlation and view masks
+    bool viewMaskZero = false;
+    bool viewMaskNonZero = false;
+
+    for (uint32_t i = 0; i < pCreateInfo->subpassCount; ++i) {
+        const VkSubpassDescription2KHR &subpass = pCreateInfo->pSubpasses[i];
+        if (subpass.viewMask != 0) {
+            viewMaskNonZero = true;
+        } else {
+            viewMaskZero = true;
+        }
+
+        if ((subpass.flags & VK_SUBPASS_DESCRIPTION_PER_VIEW_POSITION_X_ONLY_BIT_NVX) != 0 &&
+            (subpass.flags & VK_SUBPASS_DESCRIPTION_PER_VIEW_ATTRIBUTES_BIT_NVX) == 0) {
+            vuid = use_rp2 ? "VUID-VkSubpassDescription2KHR-flags-03076" : "VUID-VkSubpassDescription-flags-00856";
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
+                            "%s: The flags parameter of subpass description %u includes "
+                            "VK_SUBPASS_DESCRIPTION_PER_VIEW_POSITION_X_ONLY_BIT_NVX but does not also include "
+                            "VK_SUBPASS_DESCRIPTION_PER_VIEW_ATTRIBUTES_BIT_NVX.",
+                            function_name, i);
+        }
+    }
+
+    if (rp_version == RENDER_PASS_VERSION_2) {
+        if (viewMaskNonZero && viewMaskZero) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                            "VUID-VkRenderPassCreateInfo2KHR-viewMask-03058",
+                            "%s: Some view masks are non-zero whilst others are zero.", function_name);
+        }
+
+        if (viewMaskZero && pCreateInfo->correlatedViewMaskCount != 0) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                            "VUID-VkRenderPassCreateInfo2KHR-viewMask-03057",
+                            "%s: Multiview is not enabled but correlation masks are still provided", function_name);
+        }
+    }
+    uint32_t aggregated_cvms = 0;
+    for (uint32_t i = 0; i < pCreateInfo->correlatedViewMaskCount; ++i) {
+        if (aggregated_cvms & pCreateInfo->pCorrelatedViewMasks[i]) {
+            vuid = use_rp2 ? "VUID-VkRenderPassCreateInfo2KHR-pCorrelatedViewMasks-03056"
+                           : "VUID-VkRenderPassMultiviewCreateInfo-pCorrelationMasks-00841";
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
+                            "%s: pCorrelatedViewMasks[%u] contains a previously appearing view bit.", function_name, i);
+        }
+        aggregated_cvms |= pCreateInfo->pCorrelatedViewMasks[i];
+    }
+
+    for (uint32_t i = 0; i < pCreateInfo->dependencyCount; ++i) {
+        auto const &dependency = pCreateInfo->pDependencies[i];
+        if (rp_version == RENDER_PASS_VERSION_2) {
+            skip |= ValidateStageMaskGsTsEnables(
+                dependency.srcStageMask, function_name, "VUID-VkSubpassDependency2KHR-srcStageMask-03080",
+                "VUID-VkSubpassDependency2KHR-srcStageMask-03082", "VUID-VkSubpassDependency2KHR-srcStageMask-02103",
+                "VUID-VkSubpassDependency2KHR-srcStageMask-02104");
+            skip |= ValidateStageMaskGsTsEnables(
+                dependency.dstStageMask, function_name, "VUID-VkSubpassDependency2KHR-dstStageMask-03081",
+                "VUID-VkSubpassDependency2KHR-dstStageMask-03083", "VUID-VkSubpassDependency2KHR-dstStageMask-02105",
+                "VUID-VkSubpassDependency2KHR-dstStageMask-02106");
+        } else {
+            skip |= ValidateStageMaskGsTsEnables(
+                dependency.srcStageMask, function_name, "VUID-VkSubpassDependency-srcStageMask-00860",
+                "VUID-VkSubpassDependency-srcStageMask-00862", "VUID-VkSubpassDependency-srcStageMask-02099",
+                "VUID-VkSubpassDependency-srcStageMask-02100");
+            skip |= ValidateStageMaskGsTsEnables(
+                dependency.dstStageMask, function_name, "VUID-VkSubpassDependency-dstStageMask-00861",
+                "VUID-VkSubpassDependency-dstStageMask-00863", "VUID-VkSubpassDependency-dstStageMask-02101",
+                "VUID-VkSubpassDependency-dstStageMask-02102");
+        }
+
+        if (!ValidateAccessMaskPipelineStage(device_extensions, dependency.srcAccessMask, dependency.srcStageMask)) {
+            vuid = use_rp2 ? "VUID-VkSubpassDependency2KHR-srcAccessMask-03088" : "VUID-VkSubpassDependency-srcAccessMask-00868";
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
+                            "%s: pDependencies[%u].srcAccessMask (0x%" PRIx32 ") is not supported by srcStageMask (0x%" PRIx32 ").",
+                            function_name, i, dependency.srcAccessMask, dependency.srcStageMask);
+        }
+
+        if (!ValidateAccessMaskPipelineStage(device_extensions, dependency.dstAccessMask, dependency.dstStageMask)) {
+            vuid = use_rp2 ? "VUID-VkSubpassDependency2KHR-dstAccessMask-03089" : "VUID-VkSubpassDependency-dstAccessMask-00869";
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
+                            "%s: pDependencies[%u].dstAccessMask (0x%" PRIx32 ") is not supported by dstStageMask (0x%" PRIx32 ").",
+                            function_name, i, dependency.dstAccessMask, dependency.dstStageMask);
+        }
+    }
+    if (!skip) {
+        skip |= ValidateLayouts(rp_version, device, pCreateInfo);
+    }
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateCreateRenderPass(VkDevice device, const VkRenderPassCreateInfo *pCreateInfo,
+                                                 const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass) const {
+    bool skip = false;
+    // Handle extension structs from KHR_multiview and KHR_maintenance2 that can only be validated for RP1 (indices out of bounds)
+    const VkRenderPassMultiviewCreateInfo *pMultiviewInfo = lvl_find_in_chain<VkRenderPassMultiviewCreateInfo>(pCreateInfo->pNext);
+    if (pMultiviewInfo) {
+        if (pMultiviewInfo->subpassCount && pMultiviewInfo->subpassCount != pCreateInfo->subpassCount) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                            "VUID-VkRenderPassCreateInfo-pNext-01928",
+                            "Subpass count is %u but multiview info has a subpass count of %u.", pCreateInfo->subpassCount,
+                            pMultiviewInfo->subpassCount);
+        } else if (pMultiviewInfo->dependencyCount && pMultiviewInfo->dependencyCount != pCreateInfo->dependencyCount) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                            "VUID-VkRenderPassCreateInfo-pNext-01929",
+                            "Dependency count is %u but multiview info has a dependency count of %u.", pCreateInfo->dependencyCount,
+                            pMultiviewInfo->dependencyCount);
+        }
+    }
+    const VkRenderPassInputAttachmentAspectCreateInfo *pInputAttachmentAspectInfo =
+        lvl_find_in_chain<VkRenderPassInputAttachmentAspectCreateInfo>(pCreateInfo->pNext);
+    if (pInputAttachmentAspectInfo) {
+        for (uint32_t i = 0; i < pInputAttachmentAspectInfo->aspectReferenceCount; ++i) {
+            uint32_t subpass = pInputAttachmentAspectInfo->pAspectReferences[i].subpass;
+            uint32_t attachment = pInputAttachmentAspectInfo->pAspectReferences[i].inputAttachmentIndex;
+            if (subpass >= pCreateInfo->subpassCount) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                "VUID-VkRenderPassCreateInfo-pNext-01926",
+                                "Subpass index %u specified by input attachment aspect info %u is greater than the subpass "
+                                "count of %u for this render pass.",
+                                subpass, i, pCreateInfo->subpassCount);
+            } else if (pCreateInfo->pSubpasses && attachment >= pCreateInfo->pSubpasses[subpass].inputAttachmentCount) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                "VUID-VkRenderPassCreateInfo-pNext-01927",
+                                "Input attachment index %u specified by input attachment aspect info %u is greater than the "
+                                "input attachment count of %u for this subpass.",
+                                attachment, i, pCreateInfo->pSubpasses[subpass].inputAttachmentCount);
+            }
+        }
+    }
+    const VkRenderPassFragmentDensityMapCreateInfoEXT *pFragmentDensityMapInfo =
+        lvl_find_in_chain<VkRenderPassFragmentDensityMapCreateInfoEXT>(pCreateInfo->pNext);
+    if (pFragmentDensityMapInfo) {
+        if (pFragmentDensityMapInfo->fragmentDensityMapAttachment.attachment != VK_ATTACHMENT_UNUSED) {
+            if (pFragmentDensityMapInfo->fragmentDensityMapAttachment.attachment >= pCreateInfo->attachmentCount) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                "VUID-VkRenderPassFragmentDensityMapCreateInfoEXT-fragmentDensityMapAttachment-02547",
+                                "fragmentDensityMapAttachment %u must be less than attachmentCount %u of for this render pass.",
+                                pFragmentDensityMapInfo->fragmentDensityMapAttachment.attachment, pCreateInfo->attachmentCount);
+            } else {
+                if (!(pFragmentDensityMapInfo->fragmentDensityMapAttachment.layout ==
+                          VK_IMAGE_LAYOUT_FRAGMENT_DENSITY_MAP_OPTIMAL_EXT ||
+                      pFragmentDensityMapInfo->fragmentDensityMapAttachment.layout == VK_IMAGE_LAYOUT_GENERAL)) {
+                    skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                    "VUID-VkRenderPassFragmentDensityMapCreateInfoEXT-fragmentDensityMapAttachment-02549",
+                                    "Layout of fragmentDensityMapAttachment %u' must be equal to "
+                                    "VK_IMAGE_LAYOUT_FRAGMENT_DENSITY_MAP_OPTIMAL_EXT, or VK_IMAGE_LAYOUT_GENERAL.",
+                                    pFragmentDensityMapInfo->fragmentDensityMapAttachment.attachment);
+                }
+                if (!(pCreateInfo->pAttachments[pFragmentDensityMapInfo->fragmentDensityMapAttachment.attachment].loadOp ==
+                          VK_ATTACHMENT_LOAD_OP_LOAD ||
+                      pCreateInfo->pAttachments[pFragmentDensityMapInfo->fragmentDensityMapAttachment.attachment].loadOp ==
+                          VK_ATTACHMENT_LOAD_OP_DONT_CARE)) {
+                    skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                    "VUID-VkRenderPassFragmentDensityMapCreateInfoEXT-fragmentDensityMapAttachment-02550",
+                                    "FragmentDensityMapAttachment %u' must reference an attachment with a loadOp "
+                                    "equal to VK_ATTACHMENT_LOAD_OP_LOAD or VK_ATTACHMENT_LOAD_OP_DONT_CARE.",
+                                    pFragmentDensityMapInfo->fragmentDensityMapAttachment.attachment);
+                }
+                if (pCreateInfo->pAttachments[pFragmentDensityMapInfo->fragmentDensityMapAttachment.attachment].storeOp !=
+                    VK_ATTACHMENT_STORE_OP_DONT_CARE) {
+                    skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                    "VUID-VkRenderPassFragmentDensityMapCreateInfoEXT-fragmentDensityMapAttachment-02551",
+                                    "FragmentDensityMapAttachment %u' must reference an attachment with a storeOp "
+                                    "equal to VK_ATTACHMENT_STORE_OP_DONT_CARE.",
+                                    pFragmentDensityMapInfo->fragmentDensityMapAttachment.attachment);
+                }
+            }
+        }
+    }
+
+    if (!skip) {
+        safe_VkRenderPassCreateInfo2KHR create_info_2;
+        ConvertVkRenderPassCreateInfoToV2KHR(*pCreateInfo, &create_info_2);
+        skip |= ValidateCreateRenderPass(device, RENDER_PASS_VERSION_1, create_info_2.ptr());
+    }
+
+    return skip;
+}
+
+static bool ValidateDepthStencilResolve(const debug_report_data *report_data,
+                                        const VkPhysicalDeviceDepthStencilResolvePropertiesKHR &depth_stencil_resolve_props,
+                                        const VkRenderPassCreateInfo2KHR *pCreateInfo) {
+    bool skip = false;
+
+    // If the pNext list of VkSubpassDescription2KHR includes a VkSubpassDescriptionDepthStencilResolveKHR structure,
+    // then that structure describes depth/stencil resolve operations for the subpass.
+    for (uint32_t i = 0; i < pCreateInfo->subpassCount; i++) {
+        const VkSubpassDescription2KHR &subpass = pCreateInfo->pSubpasses[i];
+        const auto *resolve = lvl_find_in_chain<VkSubpassDescriptionDepthStencilResolveKHR>(subpass.pNext);
+
+        if (resolve == nullptr) {
+            continue;
+        }
+
+        const bool resolve_attachment_not_unused = (resolve->pDepthStencilResolveAttachment != nullptr &&
+                                                    resolve->pDepthStencilResolveAttachment->attachment != VK_ATTACHMENT_UNUSED);
+        const bool valid_resolve_attachment_index =
+            (resolve_attachment_not_unused && resolve->pDepthStencilResolveAttachment->attachment < pCreateInfo->attachmentCount);
+
+        const bool ds_attachment_not_unused =
+            (subpass.pDepthStencilAttachment != nullptr && subpass.pDepthStencilAttachment->attachment != VK_ATTACHMENT_UNUSED);
+        const bool valid_ds_attachment_index =
+            (ds_attachment_not_unused && subpass.pDepthStencilAttachment->attachment < pCreateInfo->attachmentCount);
+
+        if (resolve_attachment_not_unused && subpass.pDepthStencilAttachment != nullptr &&
+            subpass.pDepthStencilAttachment->attachment == VK_ATTACHMENT_UNUSED) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                            "VUID-VkSubpassDescriptionDepthStencilResolveKHR-pDepthStencilResolveAttachment-03177",
+                            "vkCreateRenderPass2KHR(): Subpass %u includes a VkSubpassDescriptionDepthStencilResolveKHR "
+                            "structure with resolve attachment %u, but pDepthStencilAttachment=VK_ATTACHMENT_UNUSED.",
+                            i, resolve->pDepthStencilResolveAttachment->attachment);
+        }
+
+        if (resolve_attachment_not_unused && resolve->depthResolveMode == VK_RESOLVE_MODE_NONE_KHR &&
+            resolve->stencilResolveMode == VK_RESOLVE_MODE_NONE_KHR) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                            "VUID-VkSubpassDescriptionDepthStencilResolveKHR-pDepthStencilResolveAttachment-03178",
+                            "vkCreateRenderPass2KHR(): Subpass %u includes a VkSubpassDescriptionDepthStencilResolveKHR "
+                            "structure with resolve attachment %u, but both depth and stencil resolve modes are "
+                            "VK_RESOLVE_MODE_NONE_KHR.",
+                            i, resolve->pDepthStencilResolveAttachment->attachment);
+        }
+
+        if (resolve_attachment_not_unused && valid_ds_attachment_index &&
+            pCreateInfo->pAttachments[subpass.pDepthStencilAttachment->attachment].samples == VK_SAMPLE_COUNT_1_BIT) {
+            skip |= log_msg(
+                report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                "VUID-VkSubpassDescriptionDepthStencilResolveKHR-pDepthStencilResolveAttachment-03179",
+                "vkCreateRenderPass2KHR(): Subpass %u includes a VkSubpassDescriptionDepthStencilResolveKHR "
+                "structure with resolve attachment %u. However pDepthStencilAttachment has sample count=VK_SAMPLE_COUNT_1_BIT.",
+                i, resolve->pDepthStencilResolveAttachment->attachment);
+        }
+
+        if (valid_resolve_attachment_index &&
+            pCreateInfo->pAttachments[resolve->pDepthStencilResolveAttachment->attachment].samples != VK_SAMPLE_COUNT_1_BIT) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                            "VUID-VkSubpassDescriptionDepthStencilResolveKHR-pDepthStencilResolveAttachment-03180",
+                            "vkCreateRenderPass2KHR(): Subpass %u includes a VkSubpassDescriptionDepthStencilResolveKHR "
+                            "structure with resolve attachment %u which has sample count=VK_SAMPLE_COUNT_1_BIT.",
+                            i, resolve->pDepthStencilResolveAttachment->attachment);
+        }
+
+        VkFormat pDepthStencilAttachmentFormat =
+            (valid_ds_attachment_index ? pCreateInfo->pAttachments[subpass.pDepthStencilAttachment->attachment].format
+                                       : VK_FORMAT_UNDEFINED);
+        VkFormat pDepthStencilResolveAttachmentFormat =
+            (valid_resolve_attachment_index ? pCreateInfo->pAttachments[resolve->pDepthStencilResolveAttachment->attachment].format
+                                            : VK_FORMAT_UNDEFINED);
+
+        if (valid_ds_attachment_index && valid_resolve_attachment_index &&
+            ((FormatDepthSize(pDepthStencilAttachmentFormat) != FormatDepthSize(pDepthStencilResolveAttachmentFormat)) ||
+             (FormatDepthNumericalType(pDepthStencilAttachmentFormat) !=
+              FormatDepthNumericalType(pDepthStencilResolveAttachmentFormat)))) {
+            skip |=
+                log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                        "VUID-VkSubpassDescriptionDepthStencilResolveKHR-pDepthStencilResolveAttachment-03181",
+                        "vkCreateRenderPass2KHR(): Subpass %u includes a VkSubpassDescriptionDepthStencilResolveKHR "
+                        "structure with resolve attachment %u which has a depth component (size %u). The depth component "
+                        "of pDepthStencilAttachment must have the same number of bits (currently %u) and the same numerical type.",
+                        i, resolve->pDepthStencilResolveAttachment->attachment,
+                        FormatDepthSize(pDepthStencilResolveAttachmentFormat), FormatDepthSize(pDepthStencilAttachmentFormat));
+        }
+
+        if (valid_ds_attachment_index && valid_resolve_attachment_index &&
+            ((FormatStencilSize(pDepthStencilAttachmentFormat) != FormatStencilSize(pDepthStencilResolveAttachmentFormat)) ||
+             (FormatStencilNumericalType(pDepthStencilAttachmentFormat) !=
+              FormatStencilNumericalType(pDepthStencilResolveAttachmentFormat)))) {
+            skip |=
+                log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                        "VUID-VkSubpassDescriptionDepthStencilResolveKHR-pDepthStencilResolveAttachment-03182",
+                        "vkCreateRenderPass2KHR(): Subpass %u includes a VkSubpassDescriptionDepthStencilResolveKHR "
+                        "structure with resolve attachment %u which has a stencil component (size %u). The stencil component "
+                        "of pDepthStencilAttachment must have the same number of bits (currently %u) and the same numerical type.",
+                        i, resolve->pDepthStencilResolveAttachment->attachment,
+                        FormatStencilSize(pDepthStencilResolveAttachmentFormat), FormatStencilSize(pDepthStencilAttachmentFormat));
+        }
+
+        if (!(resolve->depthResolveMode == VK_RESOLVE_MODE_NONE_KHR ||
+              resolve->depthResolveMode & depth_stencil_resolve_props.supportedDepthResolveModes)) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                            "VUID-VkSubpassDescriptionDepthStencilResolveKHR-depthResolveMode-03183",
+                            "vkCreateRenderPass2KHR(): Subpass %u includes a VkSubpassDescriptionDepthStencilResolveKHR "
+                            "structure with invalid depthResolveMode=%u.",
+                            i, resolve->depthResolveMode);
+        }
+
+        if (!(resolve->stencilResolveMode == VK_RESOLVE_MODE_NONE_KHR ||
+              resolve->stencilResolveMode & depth_stencil_resolve_props.supportedStencilResolveModes)) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                            "VUID-VkSubpassDescriptionDepthStencilResolveKHR-stencilResolveMode-03184",
+                            "vkCreateRenderPass2KHR(): Subpass %u includes a VkSubpassDescriptionDepthStencilResolveKHR "
+                            "structure with invalid stencilResolveMode=%u.",
+                            i, resolve->stencilResolveMode);
+        }
+
+        if (valid_resolve_attachment_index && FormatIsDepthAndStencil(pDepthStencilResolveAttachmentFormat) &&
+            depth_stencil_resolve_props.independentResolve == VK_FALSE &&
+            depth_stencil_resolve_props.independentResolveNone == VK_FALSE &&
+            !(resolve->depthResolveMode == resolve->stencilResolveMode)) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                            "VUID-VkSubpassDescriptionDepthStencilResolveKHR-pDepthStencilResolveAttachment-03185",
+                            "vkCreateRenderPass2KHR(): Subpass %u includes a VkSubpassDescriptionDepthStencilResolveKHR "
+                            "structure. The values of depthResolveMode (%u) and stencilResolveMode (%u) must be identical.",
+                            i, resolve->depthResolveMode, resolve->stencilResolveMode);
+        }
+
+        if (valid_resolve_attachment_index && FormatIsDepthAndStencil(pDepthStencilResolveAttachmentFormat) &&
+            depth_stencil_resolve_props.independentResolve == VK_FALSE &&
+            depth_stencil_resolve_props.independentResolveNone == VK_TRUE &&
+            !(resolve->depthResolveMode == resolve->stencilResolveMode || resolve->depthResolveMode == VK_RESOLVE_MODE_NONE_KHR ||
+              resolve->stencilResolveMode == VK_RESOLVE_MODE_NONE_KHR)) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                            "VUID-VkSubpassDescriptionDepthStencilResolveKHR-pDepthStencilResolveAttachment-03186",
+                            "vkCreateRenderPass2KHR(): Subpass %u includes a VkSubpassDescriptionDepthStencilResolveKHR "
+                            "structure. The values of depthResolveMode (%u) and stencilResolveMode (%u) must be identical, or "
+                            "one of them must be %u.",
+                            i, resolve->depthResolveMode, resolve->stencilResolveMode, VK_RESOLVE_MODE_NONE_KHR);
+        }
+    }
+
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateCreateRenderPass2KHR(VkDevice device, const VkRenderPassCreateInfo2KHR *pCreateInfo,
+                                                     const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass) const {
+    bool skip = false;
+
+    if (device_extensions.vk_khr_depth_stencil_resolve) {
+        skip |= ValidateDepthStencilResolve(report_data, phys_dev_ext_props.depth_stencil_resolve_props, pCreateInfo);
+    }
+
+    safe_VkRenderPassCreateInfo2KHR create_info_2(pCreateInfo);
+    skip |= ValidateCreateRenderPass(device, RENDER_PASS_VERSION_2, create_info_2.ptr());
+
+    return skip;
+}
+
+bool CoreChecks::ValidatePrimaryCommandBuffer(const CMD_BUFFER_STATE *pCB, char const *cmd_name, const char *error_code) const {
+    bool skip = false;
+    if (pCB->createInfo.level != VK_COMMAND_BUFFER_LEVEL_PRIMARY) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                        HandleToUint64(pCB->commandBuffer), error_code, "Cannot execute command %s on a secondary command buffer.",
+                        cmd_name);
+    }
+    return skip;
+}
+
+bool CoreChecks::VerifyRenderAreaBounds(const VkRenderPassBeginInfo *pRenderPassBegin) const {
+    bool skip = false;
+    const safe_VkFramebufferCreateInfo *pFramebufferInfo = &GetFramebufferState(pRenderPassBegin->framebuffer)->createInfo;
+    if (pRenderPassBegin->renderArea.offset.x < 0 ||
+        (pRenderPassBegin->renderArea.offset.x + pRenderPassBegin->renderArea.extent.width) > pFramebufferInfo->width ||
+        pRenderPassBegin->renderArea.offset.y < 0 ||
+        (pRenderPassBegin->renderArea.offset.y + pRenderPassBegin->renderArea.extent.height) > pFramebufferInfo->height) {
+        skip |= static_cast<bool>(log_msg(
+            report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+            kVUID_Core_DrawState_InvalidRenderArea,
+            "Cannot execute a render pass with renderArea not within the bound of the framebuffer. RenderArea: x %d, y %d, width "
+            "%d, height %d. Framebuffer: width %d, height %d.",
+            pRenderPassBegin->renderArea.offset.x, pRenderPassBegin->renderArea.offset.y, pRenderPassBegin->renderArea.extent.width,
+            pRenderPassBegin->renderArea.extent.height, pFramebufferInfo->width, pFramebufferInfo->height));
+    }
+    return skip;
+}
+
+bool CoreChecks::VerifyFramebufferAndRenderPassImageViews(const VkRenderPassBeginInfo *pRenderPassBeginInfo) const {
+    bool skip = false;
+    const VkRenderPassAttachmentBeginInfoKHR *pRenderPassAttachmentBeginInfo =
+        lvl_find_in_chain<VkRenderPassAttachmentBeginInfoKHR>(pRenderPassBeginInfo->pNext);
+
+    if (pRenderPassAttachmentBeginInfo && pRenderPassAttachmentBeginInfo->attachmentCount != 0) {
+        const safe_VkFramebufferCreateInfo *pFramebufferCreateInfo =
+            &GetFramebufferState(pRenderPassBeginInfo->framebuffer)->createInfo;
+        const VkFramebufferAttachmentsCreateInfoKHR *pFramebufferAttachmentsCreateInfo =
+            lvl_find_in_chain<VkFramebufferAttachmentsCreateInfoKHR>(pFramebufferCreateInfo->pNext);
+        if ((pFramebufferCreateInfo->flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR) == 0) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
+                            HandleToUint64(pRenderPassBeginInfo->renderPass), "VUID-VkRenderPassBeginInfo-framebuffer-03207",
+                            "VkRenderPassBeginInfo: Image views specified at render pass begin, but framebuffer not created with "
+                            "VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR");
+        } else if (pFramebufferAttachmentsCreateInfo) {
+            if (pFramebufferAttachmentsCreateInfo->attachmentImageInfoCount != pRenderPassAttachmentBeginInfo->attachmentCount) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
+                                HandleToUint64(pRenderPassBeginInfo->renderPass), "VUID-VkRenderPassBeginInfo-framebuffer-03208",
+                                "VkRenderPassBeginInfo: %u image views specified at render pass begin, but framebuffer "
+                                "created expecting %u attachments",
+                                pRenderPassAttachmentBeginInfo->attachmentCount,
+                                pFramebufferAttachmentsCreateInfo->attachmentImageInfoCount);
+            } else {
+                const safe_VkRenderPassCreateInfo2KHR *pRenderPassCreateInfo =
+                    &GetRenderPassState(pRenderPassBeginInfo->renderPass)->createInfo;
+                for (uint32_t i = 0; i < pRenderPassAttachmentBeginInfo->attachmentCount; ++i) {
+                    const VkImageViewCreateInfo *pImageViewCreateInfo =
+                        &GetImageViewState(pRenderPassAttachmentBeginInfo->pAttachments[i])->create_info;
+                    const VkFramebufferAttachmentImageInfoKHR *pFramebufferAttachmentImageInfo =
+                        &pFramebufferAttachmentsCreateInfo->pAttachmentImageInfos[i];
+                    const VkImageCreateInfo *pImageCreateInfo = &GetImageState(pImageViewCreateInfo->image)->createInfo;
+
+                    if (pFramebufferAttachmentImageInfo->flags != pImageCreateInfo->flags) {
+                        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
+                                        HandleToUint64(pRenderPassBeginInfo->renderPass),
+                                        "VUID-VkRenderPassBeginInfo-framebuffer-03209",
+                                        "VkRenderPassBeginInfo: Image view #%u created from an image with flags set as 0x%X, "
+                                        "but image info #%u used to create the framebuffer had flags set as 0x%X",
+                                        i, pImageCreateInfo->flags, i, pFramebufferAttachmentImageInfo->flags);
+                    }
+
+                    if (pFramebufferAttachmentImageInfo->usage != pImageCreateInfo->usage) {
+                        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
+                                        HandleToUint64(pRenderPassBeginInfo->renderPass),
+                                        "VUID-VkRenderPassBeginInfo-framebuffer-03210",
+                                        "VkRenderPassBeginInfo: Image view #%u created from an image with usage set as 0x%X, "
+                                        "but image info #%u used to create the framebuffer had usage set as 0x%X",
+                                        i, pImageCreateInfo->usage, i, pFramebufferAttachmentImageInfo->usage);
+                    }
+
+                    if (pFramebufferAttachmentImageInfo->width != pImageCreateInfo->extent.width) {
+                        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
+                                        HandleToUint64(pRenderPassBeginInfo->renderPass),
+                                        "VUID-VkRenderPassBeginInfo-framebuffer-03211",
+                                        "VkRenderPassBeginInfo: Image view #%u created from an image with width set as %u, "
+                                        "but image info #%u used to create the framebuffer had width set as %u",
+                                        i, pImageCreateInfo->extent.width, i, pFramebufferAttachmentImageInfo->width);
+                    }
+
+                    if (pFramebufferAttachmentImageInfo->height != pImageCreateInfo->extent.height) {
+                        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
+                                        HandleToUint64(pRenderPassBeginInfo->renderPass),
+                                        "VUID-VkRenderPassBeginInfo-framebuffer-03212",
+                                        "VkRenderPassBeginInfo: Image view #%u created from an image with height set as %u, "
+                                        "but image info #%u used to create the framebuffer had height set as %u",
+                                        i, pImageCreateInfo->extent.height, i, pFramebufferAttachmentImageInfo->height);
+                    }
+
+                    if (pFramebufferAttachmentImageInfo->layerCount != pImageViewCreateInfo->subresourceRange.layerCount) {
+                        skip |= log_msg(
+                            report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
+                            HandleToUint64(pRenderPassBeginInfo->renderPass), "VUID-VkRenderPassBeginInfo-framebuffer-03213",
+                            "VkRenderPassBeginInfo: Image view #%u created with a subresource range with a layerCount of %u, "
+                            "but image info #%u used to create the framebuffer had layerCount set as %u",
+                            i, pImageViewCreateInfo->subresourceRange.layerCount, i, pFramebufferAttachmentImageInfo->layerCount);
+                    }
+
+                    const VkImageFormatListCreateInfoKHR *pImageFormatListCreateInfo =
+                        lvl_find_in_chain<VkImageFormatListCreateInfoKHR>(pImageCreateInfo->pNext);
+                    if (pImageFormatListCreateInfo) {
+                        if (pImageFormatListCreateInfo->viewFormatCount != pFramebufferAttachmentImageInfo->viewFormatCount) {
+                            skip |= log_msg(
+                                report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
+                                HandleToUint64(pRenderPassBeginInfo->renderPass), "VUID-VkRenderPassBeginInfo-framebuffer-03214",
+                                "VkRenderPassBeginInfo: Image view #%u created with an image with a viewFormatCount of %u, "
+                                "but image info #%u used to create the framebuffer had viewFormatCount set as %u",
+                                i, pImageFormatListCreateInfo->viewFormatCount, i,
+                                pFramebufferAttachmentImageInfo->viewFormatCount);
+                        }
+
+                        for (uint32_t j = 0; j < pImageFormatListCreateInfo->viewFormatCount; ++j) {
+                            bool formatFound = false;
+                            for (uint32_t k = 0; k < pFramebufferAttachmentImageInfo->viewFormatCount; ++k) {
+                                if (pImageFormatListCreateInfo->pViewFormats[j] ==
+                                    pFramebufferAttachmentImageInfo->pViewFormats[k]) {
+                                    formatFound = true;
+                                }
+                            }
+                            if (!formatFound) {
+                                skip |=
+                                    log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
+                                            HandleToUint64(pRenderPassBeginInfo->renderPass),
+                                            "VUID-VkRenderPassBeginInfo-framebuffer-03215",
+                                            "VkRenderPassBeginInfo: Image view #%u created with an image including the format "
+                                            "%s in its view format list, "
+                                            "but image info #%u used to create the framebuffer does not include this format",
+                                            i, string_VkFormat(pImageFormatListCreateInfo->pViewFormats[j]), i);
+                            }
+                        }
+                    }
+
+                    if (pRenderPassCreateInfo->pAttachments[i].format != pImageViewCreateInfo->format) {
+                        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
+                                        HandleToUint64(pRenderPassBeginInfo->renderPass),
+                                        "VUID-VkRenderPassBeginInfo-framebuffer-03216",
+                                        "VkRenderPassBeginInfo: Image view #%u created with a format of %s, "
+                                        "but render pass attachment description #%u created with a format of %s",
+                                        i, string_VkFormat(pImageViewCreateInfo->format), i,
+                                        string_VkFormat(pRenderPassCreateInfo->pAttachments[i].format));
+                    }
+
+                    if (pRenderPassCreateInfo->pAttachments[i].samples != pImageCreateInfo->samples) {
+                        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
+                                        HandleToUint64(pRenderPassBeginInfo->renderPass),
+                                        "VUID-VkRenderPassBeginInfo-framebuffer-03217",
+                                        "VkRenderPassBeginInfo: Image view #%u created with an image with %s samples, "
+                                        "but render pass attachment description #%u created with %s samples",
+                                        i, string_VkSampleCountFlagBits(pImageCreateInfo->samples), i,
+                                        string_VkSampleCountFlagBits(pRenderPassCreateInfo->pAttachments[i].samples));
+                    }
+
+                    if (pImageViewCreateInfo->subresourceRange.levelCount != 1) {
+                        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT,
+                                        HandleToUint64(pRenderPassAttachmentBeginInfo->pAttachments[i]),
+                                        "VUID-VkRenderPassAttachmentBeginInfoKHR-pAttachments-03218",
+                                        "VkRenderPassAttachmentBeginInfo: Image view #%u created with multiple (%u) mip levels.", i,
+                                        pImageViewCreateInfo->subresourceRange.levelCount);
+                    }
+
+                    if (((pImageViewCreateInfo->components.r != VK_COMPONENT_SWIZZLE_IDENTITY) &&
+                         (pImageViewCreateInfo->components.r != VK_COMPONENT_SWIZZLE_R)) ||
+                        ((pImageViewCreateInfo->components.g != VK_COMPONENT_SWIZZLE_IDENTITY) &&
+                         (pImageViewCreateInfo->components.g != VK_COMPONENT_SWIZZLE_G)) ||
+                        ((pImageViewCreateInfo->components.b != VK_COMPONENT_SWIZZLE_IDENTITY) &&
+                         (pImageViewCreateInfo->components.b != VK_COMPONENT_SWIZZLE_B)) ||
+                        ((pImageViewCreateInfo->components.a != VK_COMPONENT_SWIZZLE_IDENTITY) &&
+                         (pImageViewCreateInfo->components.a != VK_COMPONENT_SWIZZLE_A))) {
+                        skip |=
+                            log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT,
+                                    HandleToUint64(pRenderPassAttachmentBeginInfo->pAttachments[i]),
+                                    "VUID-VkRenderPassAttachmentBeginInfoKHR-pAttachments-03219",
+                                    "VkRenderPassAttachmentBeginInfo: Image view #%u created with non-identity swizzle. All "
+                                    "framebuffer attachments must have been created with the identity swizzle. Here are the actual "
+                                    "swizzle values:\n"
+                                    "r swizzle = %s\n"
+                                    "g swizzle = %s\n"
+                                    "b swizzle = %s\n"
+                                    "a swizzle = %s\n",
+                                    i, string_VkComponentSwizzle(pImageViewCreateInfo->components.r),
+                                    string_VkComponentSwizzle(pImageViewCreateInfo->components.g),
+                                    string_VkComponentSwizzle(pImageViewCreateInfo->components.b),
+                                    string_VkComponentSwizzle(pImageViewCreateInfo->components.a));
+                    }
+                }
+            }
+        }
+    }
+
+    return skip;
+}
+
+// If this is a stencil format, make sure the stencil[Load|Store]Op flag is checked, while if it is a depth/color attachment the
+// [load|store]Op flag must be checked
+// TODO: The memory valid flag in DEVICE_MEMORY_STATE should probably be split to track the validity of stencil memory separately.
+template <typename T>
+static bool FormatSpecificLoadAndStoreOpSettings(VkFormat format, T color_depth_op, T stencil_op, T op) {
+    if (color_depth_op != op && stencil_op != op) {
+        return false;
+    }
+    bool check_color_depth_load_op = !FormatIsStencilOnly(format);
+    bool check_stencil_load_op = FormatIsDepthAndStencil(format) || !check_color_depth_load_op;
+
+    return ((check_color_depth_load_op && (color_depth_op == op)) || (check_stencil_load_op && (stencil_op == op)));
+}
+
+bool CoreChecks::ValidateCmdBeginRenderPass(VkCommandBuffer commandBuffer, RenderPassCreateVersion rp_version,
+                                            const VkRenderPassBeginInfo *pRenderPassBegin) const {
+    const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+    assert(cb_state);
+    auto render_pass_state = pRenderPassBegin ? GetRenderPassState(pRenderPassBegin->renderPass) : nullptr;
+    auto framebuffer = pRenderPassBegin ? GetFramebufferState(pRenderPassBegin->framebuffer) : nullptr;
+
+    bool skip = false;
+    const bool use_rp2 = (rp_version == RENDER_PASS_VERSION_2);
+    const char *vuid;
+    const char *const function_name = use_rp2 ? "vkCmdBeginRenderPass2KHR()" : "vkCmdBeginRenderPass()";
+
+    if (render_pass_state) {
+        uint32_t clear_op_size = 0;  // Make sure pClearValues is at least as large as last LOAD_OP_CLEAR
+
+        // Handle extension struct from EXT_sample_locations
+        const VkRenderPassSampleLocationsBeginInfoEXT *pSampleLocationsBeginInfo =
+            lvl_find_in_chain<VkRenderPassSampleLocationsBeginInfoEXT>(pRenderPassBegin->pNext);
+        if (pSampleLocationsBeginInfo) {
+            for (uint32_t i = 0; i < pSampleLocationsBeginInfo->attachmentInitialSampleLocationsCount; ++i) {
+                if (pSampleLocationsBeginInfo->pAttachmentInitialSampleLocations[i].attachmentIndex >=
+                    render_pass_state->createInfo.attachmentCount) {
+                    skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                    "VUID-VkAttachmentSampleLocationsEXT-attachmentIndex-01531",
+                                    "Attachment index %u specified by attachment sample locations %u is greater than the "
+                                    "attachment count of %u for the render pass being begun.",
+                                    pSampleLocationsBeginInfo->pAttachmentInitialSampleLocations[i].attachmentIndex, i,
+                                    render_pass_state->createInfo.attachmentCount);
+                }
+            }
+
+            for (uint32_t i = 0; i < pSampleLocationsBeginInfo->postSubpassSampleLocationsCount; ++i) {
+                if (pSampleLocationsBeginInfo->pPostSubpassSampleLocations[i].subpassIndex >=
+                    render_pass_state->createInfo.subpassCount) {
+                    skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                    "VUID-VkSubpassSampleLocationsEXT-subpassIndex-01532",
+                                    "Subpass index %u specified by subpass sample locations %u is greater than the subpass count "
+                                    "of %u for the render pass being begun.",
+                                    pSampleLocationsBeginInfo->pPostSubpassSampleLocations[i].subpassIndex, i,
+                                    render_pass_state->createInfo.subpassCount);
+                }
+            }
+        }
+
+        for (uint32_t i = 0; i < render_pass_state->createInfo.attachmentCount; ++i) {
+            auto pAttachment = &render_pass_state->createInfo.pAttachments[i];
+            if (FormatSpecificLoadAndStoreOpSettings(pAttachment->format, pAttachment->loadOp, pAttachment->stencilLoadOp,
+                                                     VK_ATTACHMENT_LOAD_OP_CLEAR)) {
+                clear_op_size = static_cast<uint32_t>(i) + 1;
+            }
+        }
+
+        if (clear_op_size > pRenderPassBegin->clearValueCount) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
+                            HandleToUint64(render_pass_state->renderPass), "VUID-VkRenderPassBeginInfo-clearValueCount-00902",
+                            "In %s the VkRenderPassBeginInfo struct has a clearValueCount of %u but there "
+                            "must be at least %u entries in pClearValues array to account for the highest index attachment in "
+                            "%s that uses VK_ATTACHMENT_LOAD_OP_CLEAR is %u. Note that the pClearValues array is indexed by "
+                            "attachment number so even if some pClearValues entries between 0 and %u correspond to attachments "
+                            "that aren't cleared they will be ignored.",
+                            function_name, pRenderPassBegin->clearValueCount, clear_op_size,
+                            report_data->FormatHandle(render_pass_state->renderPass).c_str(), clear_op_size, clear_op_size - 1);
+        }
+        skip |= VerifyFramebufferAndRenderPassImageViews(pRenderPassBegin);
+        skip |= VerifyRenderAreaBounds(pRenderPassBegin);
+        skip |= VerifyFramebufferAndRenderPassLayouts(rp_version, cb_state, pRenderPassBegin,
+                                                      GetFramebufferState(pRenderPassBegin->framebuffer));
+        if (framebuffer->rp_state->renderPass != render_pass_state->renderPass) {
+            skip |= ValidateRenderPassCompatibility("render pass", render_pass_state, "framebuffer", framebuffer->rp_state.get(),
+                                                    function_name, "VUID-VkRenderPassBeginInfo-renderPass-00904");
+        }
+
+        vuid = use_rp2 ? "VUID-vkCmdBeginRenderPass2KHR-renderpass" : "VUID-vkCmdBeginRenderPass-renderpass";
+        skip |= InsideRenderPass(cb_state, function_name, vuid);
+        skip |= ValidateDependencies(framebuffer, render_pass_state);
+
+        vuid = use_rp2 ? "VUID-vkCmdBeginRenderPass2KHR-bufferlevel" : "VUID-vkCmdBeginRenderPass-bufferlevel";
+        skip |= ValidatePrimaryCommandBuffer(cb_state, function_name, vuid);
+
+        vuid = use_rp2 ? "VUID-vkCmdBeginRenderPass2KHR-commandBuffer-cmdpool" : "VUID-vkCmdBeginRenderPass-commandBuffer-cmdpool";
+        skip |= ValidateCmdQueueFlags(cb_state, function_name, VK_QUEUE_GRAPHICS_BIT, vuid);
+
+        const CMD_TYPE cmd_type = use_rp2 ? CMD_BEGINRENDERPASS2KHR : CMD_BEGINRENDERPASS;
+        skip |= ValidateCmd(cb_state, cmd_type, function_name);
+    }
+
+    auto chained_device_group_struct = lvl_find_in_chain<VkDeviceGroupRenderPassBeginInfo>(pRenderPassBegin->pNext);
+    if (chained_device_group_struct) {
+        skip |= ValidateDeviceMaskToPhysicalDeviceCount(
+            chained_device_group_struct->deviceMask, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
+            HandleToUint64(pRenderPassBegin->renderPass), "VUID-VkDeviceGroupRenderPassBeginInfo-deviceMask-00905");
+        skip |= ValidateDeviceMaskToZero(chained_device_group_struct->deviceMask, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
+                                         HandleToUint64(pRenderPassBegin->renderPass),
+                                         "VUID-VkDeviceGroupRenderPassBeginInfo-deviceMask-00906");
+        skip |= ValidateDeviceMaskToCommandBuffer(
+            cb_state, chained_device_group_struct->deviceMask, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
+            HandleToUint64(pRenderPassBegin->renderPass), "VUID-VkDeviceGroupRenderPassBeginInfo-deviceMask-00907");
+
+        if (chained_device_group_struct->deviceRenderAreaCount != 0 &&
+            chained_device_group_struct->deviceRenderAreaCount != physical_device_count) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
+                            HandleToUint64(pRenderPassBegin->renderPass),
+                            "VUID-VkDeviceGroupRenderPassBeginInfo-deviceRenderAreaCount-00908",
+                            "deviceRenderAreaCount[%" PRIu32 "] is invaild. Physical device count is %" PRIu32 ".",
+                            chained_device_group_struct->deviceRenderAreaCount, physical_device_count);
+        }
+    }
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateCmdBeginRenderPass(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
+                                                   VkSubpassContents contents) const {
+    bool skip = ValidateCmdBeginRenderPass(commandBuffer, RENDER_PASS_VERSION_1, pRenderPassBegin);
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateCmdBeginRenderPass2KHR(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
+                                                       const VkSubpassBeginInfoKHR *pSubpassBeginInfo) const {
+    bool skip = ValidateCmdBeginRenderPass(commandBuffer, RENDER_PASS_VERSION_2, pRenderPassBegin);
+    return skip;
+}
+
+void CoreChecks::RecordCmdBeginRenderPassLayouts(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
+                                                 const VkSubpassContents contents) {
+    CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+    auto render_pass_state = pRenderPassBegin ? GetRenderPassState(pRenderPassBegin->renderPass) : nullptr;
+    auto framebuffer = pRenderPassBegin ? GetFramebufferState(pRenderPassBegin->framebuffer) : nullptr;
+    if (render_pass_state) {
+        // transition attachments to the correct layouts for beginning of renderPass and first subpass
+        TransitionBeginRenderPassLayouts(cb_state, render_pass_state, framebuffer);
+    }
+}
+
+void CoreChecks::PreCallRecordCmdBeginRenderPass(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
+                                                 VkSubpassContents contents) {
+    StateTracker::PreCallRecordCmdBeginRenderPass(commandBuffer, pRenderPassBegin, contents);
+    RecordCmdBeginRenderPassLayouts(commandBuffer, pRenderPassBegin, contents);
+}
+
+void CoreChecks::PreCallRecordCmdBeginRenderPass2KHR(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
+                                                     const VkSubpassBeginInfoKHR *pSubpassBeginInfo) {
+    StateTracker::PreCallRecordCmdBeginRenderPass2KHR(commandBuffer, pRenderPassBegin, pSubpassBeginInfo);
+    RecordCmdBeginRenderPassLayouts(commandBuffer, pRenderPassBegin, pSubpassBeginInfo->contents);
+}
+
+bool CoreChecks::ValidateCmdNextSubpass(RenderPassCreateVersion rp_version, VkCommandBuffer commandBuffer) const {
+    const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+    assert(cb_state);
+    bool skip = false;
+    const bool use_rp2 = (rp_version == RENDER_PASS_VERSION_2);
+    const char *vuid;
+    const char *const function_name = use_rp2 ? "vkCmdNextSubpass2KHR()" : "vkCmdNextSubpass()";
+
+    vuid = use_rp2 ? "VUID-vkCmdNextSubpass2KHR-bufferlevel" : "VUID-vkCmdNextSubpass-bufferlevel";
+    skip |= ValidatePrimaryCommandBuffer(cb_state, function_name, vuid);
+
+    vuid = use_rp2 ? "VUID-vkCmdNextSubpass2KHR-commandBuffer-cmdpool" : "VUID-vkCmdNextSubpass-commandBuffer-cmdpool";
+    skip |= ValidateCmdQueueFlags(cb_state, function_name, VK_QUEUE_GRAPHICS_BIT, vuid);
+    const CMD_TYPE cmd_type = use_rp2 ? CMD_NEXTSUBPASS2KHR : CMD_NEXTSUBPASS;
+    skip |= ValidateCmd(cb_state, cmd_type, function_name);
+
+    vuid = use_rp2 ? "VUID-vkCmdNextSubpass2KHR-renderpass" : "VUID-vkCmdNextSubpass-renderpass";
+    skip |= OutsideRenderPass(cb_state, function_name, vuid);
+
+    auto subpassCount = cb_state->activeRenderPass->createInfo.subpassCount;
+    if (cb_state->activeSubpass == subpassCount - 1) {
+        vuid = use_rp2 ? "VUID-vkCmdNextSubpass2KHR-None-03102" : "VUID-vkCmdNextSubpass-None-00909";
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                        HandleToUint64(commandBuffer), vuid, "%s: Attempted to advance beyond final subpass.", function_name);
+    }
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) const {
+    return ValidateCmdNextSubpass(RENDER_PASS_VERSION_1, commandBuffer);
+}
+
+bool CoreChecks::PreCallValidateCmdNextSubpass2KHR(VkCommandBuffer commandBuffer, const VkSubpassBeginInfoKHR *pSubpassBeginInfo,
+                                                   const VkSubpassEndInfoKHR *pSubpassEndInfo) const {
+    return ValidateCmdNextSubpass(RENDER_PASS_VERSION_2, commandBuffer);
+}
+
+void CoreChecks::RecordCmdNextSubpassLayouts(VkCommandBuffer commandBuffer, VkSubpassContents contents) {
+    CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+    TransitionSubpassLayouts(cb_state, cb_state->activeRenderPass, cb_state->activeSubpass,
+                             GetFramebufferState(cb_state->activeRenderPassBeginInfo.framebuffer));
+}
+
+void CoreChecks::PostCallRecordCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) {
+    StateTracker::PostCallRecordCmdNextSubpass(commandBuffer, contents);
+    RecordCmdNextSubpassLayouts(commandBuffer, contents);
+}
+
+void CoreChecks::PostCallRecordCmdNextSubpass2KHR(VkCommandBuffer commandBuffer, const VkSubpassBeginInfoKHR *pSubpassBeginInfo,
+                                                  const VkSubpassEndInfoKHR *pSubpassEndInfo) {
+    StateTracker::PostCallRecordCmdNextSubpass2KHR(commandBuffer, pSubpassBeginInfo, pSubpassEndInfo);
+    RecordCmdNextSubpassLayouts(commandBuffer, pSubpassBeginInfo->contents);
+}
+
+bool CoreChecks::ValidateCmdEndRenderPass(RenderPassCreateVersion rp_version, VkCommandBuffer commandBuffer) const {
+    const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+    assert(cb_state);
+    bool skip = false;
+    const bool use_rp2 = (rp_version == RENDER_PASS_VERSION_2);
+    const char *vuid;
+    const char *const function_name = use_rp2 ? "vkCmdEndRenderPass2KHR()" : "vkCmdEndRenderPass()";
+
+    RENDER_PASS_STATE *rp_state = cb_state->activeRenderPass;
+    if (rp_state) {
+        if (cb_state->activeSubpass != rp_state->createInfo.subpassCount - 1) {
+            vuid = use_rp2 ? "VUID-vkCmdEndRenderPass2KHR-None-03103" : "VUID-vkCmdEndRenderPass-None-00910";
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                            HandleToUint64(commandBuffer), vuid, "%s: Called before reaching final subpass.", function_name);
+        }
+    }
+
+    vuid = use_rp2 ? "VUID-vkCmdEndRenderPass2KHR-renderpass" : "VUID-vkCmdEndRenderPass-renderpass";
+    skip |= OutsideRenderPass(cb_state, function_name, vuid);
+
+    vuid = use_rp2 ? "VUID-vkCmdEndRenderPass2KHR-bufferlevel" : "VUID-vkCmdEndRenderPass-bufferlevel";
+    skip |= ValidatePrimaryCommandBuffer(cb_state, function_name, vuid);
+
+    vuid = use_rp2 ? "VUID-vkCmdEndRenderPass2KHR-commandBuffer-cmdpool" : "VUID-vkCmdEndRenderPass-commandBuffer-cmdpool";
+    skip |= ValidateCmdQueueFlags(cb_state, function_name, VK_QUEUE_GRAPHICS_BIT, vuid);
+
+    const CMD_TYPE cmd_type = use_rp2 ? CMD_ENDRENDERPASS2KHR : CMD_ENDRENDERPASS;
+    skip |= ValidateCmd(cb_state, cmd_type, function_name);
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateCmdEndRenderPass(VkCommandBuffer commandBuffer) const {
+    bool skip = ValidateCmdEndRenderPass(RENDER_PASS_VERSION_1, commandBuffer);
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateCmdEndRenderPass2KHR(VkCommandBuffer commandBuffer,
+                                                     const VkSubpassEndInfoKHR *pSubpassEndInfo) const {
+    bool skip = ValidateCmdEndRenderPass(RENDER_PASS_VERSION_2, commandBuffer);
+    return skip;
+}
+
+void CoreChecks::RecordCmdEndRenderPassLayouts(VkCommandBuffer commandBuffer) {
+    CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+    FRAMEBUFFER_STATE *framebuffer = GetFramebufferState(cb_state->activeFramebuffer);
+    TransitionFinalSubpassLayouts(cb_state, &cb_state->activeRenderPassBeginInfo, framebuffer);
+}
+
+void CoreChecks::PostCallRecordCmdEndRenderPass(VkCommandBuffer commandBuffer) {
+    // Record the end at the CoreLevel to ensure StateTracker cleanup doesn't step on anything we need.
+    RecordCmdEndRenderPassLayouts(commandBuffer);
+    StateTracker::PostCallRecordCmdEndRenderPass(commandBuffer);
+}
+
+void CoreChecks::PostCallRecordCmdEndRenderPass2KHR(VkCommandBuffer commandBuffer, const VkSubpassEndInfoKHR *pSubpassEndInfo) {
+    // Record the end at the CoreLevel to ensure StateTracker cleanup doesn't step on anything we need.
+    RecordCmdEndRenderPassLayouts(commandBuffer);
+    StateTracker::PostCallRecordCmdEndRenderPass2KHR(commandBuffer, pSubpassEndInfo);
+}
+
+bool CoreChecks::ValidateFramebuffer(VkCommandBuffer primaryBuffer, const CMD_BUFFER_STATE *pCB, VkCommandBuffer secondaryBuffer,
+                                     const CMD_BUFFER_STATE *pSubCB, const char *caller) const {
+    bool skip = false;
+    if (!pSubCB->beginInfo.pInheritanceInfo) {
+        return skip;
+    }
+    VkFramebuffer primary_fb = pCB->activeFramebuffer;
+    VkFramebuffer secondary_fb = pSubCB->beginInfo.pInheritanceInfo->framebuffer;
+    if (secondary_fb != VK_NULL_HANDLE) {
+        if (primary_fb != secondary_fb) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                            HandleToUint64(primaryBuffer), "VUID-vkCmdExecuteCommands-pCommandBuffers-00099",
+                            "vkCmdExecuteCommands() called w/ invalid secondary %s which has a %s"
+                            " that is not the same as the primary command buffer's current active %s.",
+                            report_data->FormatHandle(secondaryBuffer).c_str(), report_data->FormatHandle(secondary_fb).c_str(),
+                            report_data->FormatHandle(primary_fb).c_str());
+        }
+        auto fb = GetFramebufferState(secondary_fb);
+        if (!fb) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                            HandleToUint64(primaryBuffer), kVUID_Core_DrawState_InvalidSecondaryCommandBuffer,
+                            "vkCmdExecuteCommands() called w/ invalid %s which has invalid %s.",
+                            report_data->FormatHandle(secondaryBuffer).c_str(), report_data->FormatHandle(secondary_fb).c_str());
+            return skip;
+        }
+    }
+    return skip;
+}
+
+bool CoreChecks::ValidateSecondaryCommandBufferState(const CMD_BUFFER_STATE *pCB, const CMD_BUFFER_STATE *pSubCB) const {
+    bool skip = false;
+    unordered_set<int> activeTypes;
+    if (!disabled.query_validation) {
+        for (auto queryObject : pCB->activeQueries) {
+            auto query_pool_state = GetQueryPoolState(queryObject.pool);
+            if (query_pool_state) {
+                if (query_pool_state->createInfo.queryType == VK_QUERY_TYPE_PIPELINE_STATISTICS &&
+                    pSubCB->beginInfo.pInheritanceInfo) {
+                    VkQueryPipelineStatisticFlags cmdBufStatistics = pSubCB->beginInfo.pInheritanceInfo->pipelineStatistics;
+                    if ((cmdBufStatistics & query_pool_state->createInfo.pipelineStatistics) != cmdBufStatistics) {
+                        skip |= log_msg(
+                            report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                            HandleToUint64(pCB->commandBuffer), "VUID-vkCmdExecuteCommands-commandBuffer-00104",
+                            "vkCmdExecuteCommands() called w/ invalid %s which has invalid active %s"
+                            ". Pipeline statistics is being queried so the command buffer must have all bits set on the queryPool.",
+                            report_data->FormatHandle(pCB->commandBuffer).c_str(),
+                            report_data->FormatHandle(queryObject.pool).c_str());
+                    }
+                }
+                activeTypes.insert(query_pool_state->createInfo.queryType);
+            }
+        }
+        for (auto queryObject : pSubCB->startedQueries) {
+            auto query_pool_state = GetQueryPoolState(queryObject.pool);
+            if (query_pool_state && activeTypes.count(query_pool_state->createInfo.queryType)) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                                HandleToUint64(pCB->commandBuffer), kVUID_Core_DrawState_InvalidSecondaryCommandBuffer,
+                                "vkCmdExecuteCommands() called w/ invalid %s which has invalid active %s"
+                                " of type %d but a query of that type has been started on secondary %s.",
+                                report_data->FormatHandle(pCB->commandBuffer).c_str(),
+                                report_data->FormatHandle(queryObject.pool).c_str(), query_pool_state->createInfo.queryType,
+                                report_data->FormatHandle(pSubCB->commandBuffer).c_str());
+            }
+        }
+    }
+    auto primary_pool = pCB->command_pool.get();
+    auto secondary_pool = pSubCB->command_pool.get();
+    if (primary_pool && secondary_pool && (primary_pool->queueFamilyIndex != secondary_pool->queueFamilyIndex)) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                        HandleToUint64(pSubCB->commandBuffer), kVUID_Core_DrawState_InvalidQueueFamily,
+                        "vkCmdExecuteCommands(): Primary %s created in queue family %d has secondary "
+                        "%s created in queue family %d.",
+                        report_data->FormatHandle(pCB->commandBuffer).c_str(), primary_pool->queueFamilyIndex,
+                        report_data->FormatHandle(pSubCB->commandBuffer).c_str(), secondary_pool->queueFamilyIndex);
+    }
+
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateCmdExecuteCommands(VkCommandBuffer commandBuffer, uint32_t commandBuffersCount,
+                                                   const VkCommandBuffer *pCommandBuffers) const {
+    const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+    assert(cb_state);
+    bool skip = false;
+    const CMD_BUFFER_STATE *sub_cb_state = NULL;
+    std::unordered_set<const CMD_BUFFER_STATE *> linked_command_buffers;
+
+    for (uint32_t i = 0; i < commandBuffersCount; i++) {
+        sub_cb_state = GetCBState(pCommandBuffers[i]);
+        assert(sub_cb_state);
+        if (VK_COMMAND_BUFFER_LEVEL_PRIMARY == sub_cb_state->createInfo.level) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                            HandleToUint64(pCommandBuffers[i]), "VUID-vkCmdExecuteCommands-pCommandBuffers-00088",
+                            "vkCmdExecuteCommands() called w/ Primary %s in element %u of pCommandBuffers array. All "
+                            "cmd buffers in pCommandBuffers array must be secondary.",
+                            report_data->FormatHandle(pCommandBuffers[i]).c_str(), i);
+        } else if (VK_COMMAND_BUFFER_LEVEL_SECONDARY == sub_cb_state->createInfo.level) {
+            if (sub_cb_state->beginInfo.pInheritanceInfo != nullptr) {
+                const auto secondary_rp_state = GetRenderPassState(sub_cb_state->beginInfo.pInheritanceInfo->renderPass);
+                if (cb_state->activeRenderPass &&
+                    !(sub_cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT)) {
+                    skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                                    HandleToUint64(pCommandBuffers[i]), "VUID-vkCmdExecuteCommands-pCommandBuffers-00096",
+                                    "vkCmdExecuteCommands(): Secondary %s is executed within a %s "
+                                    "instance scope, but the Secondary Command Buffer does not have the "
+                                    "VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT set in VkCommandBufferBeginInfo::flags when "
+                                    "the vkBeginCommandBuffer() was called.",
+                                    report_data->FormatHandle(pCommandBuffers[i]).c_str(),
+                                    report_data->FormatHandle(cb_state->activeRenderPass->renderPass).c_str());
+                } else if (!cb_state->activeRenderPass &&
+                           (sub_cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT)) {
+                    skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                                    HandleToUint64(pCommandBuffers[i]), "VUID-vkCmdExecuteCommands-pCommandBuffers-00100",
+                                    "vkCmdExecuteCommands(): Secondary %s is executed outside a render pass "
+                                    "instance scope, but the Secondary Command Buffer does have the "
+                                    "VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT set in VkCommandBufferBeginInfo::flags when "
+                                    "the vkBeginCommandBuffer() was called.",
+                                    report_data->FormatHandle(pCommandBuffers[i]).c_str());
+                } else if (cb_state->activeRenderPass &&
+                           (sub_cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT)) {
+                    // Make sure render pass is compatible with parent command buffer pass if has continue
+                    if (cb_state->activeRenderPass->renderPass != secondary_rp_state->renderPass) {
+                        skip |= ValidateRenderPassCompatibility(
+                            "primary command buffer", cb_state->activeRenderPass, "secondary command buffer", secondary_rp_state,
+                            "vkCmdExecuteCommands()", "VUID-vkCmdExecuteCommands-pInheritanceInfo-00098");
+                    }
+                    //  If framebuffer for secondary CB is not NULL, then it must match active FB from primaryCB
+                    skip |=
+                        ValidateFramebuffer(commandBuffer, cb_state, pCommandBuffers[i], sub_cb_state, "vkCmdExecuteCommands()");
+                    if (!sub_cb_state->cmd_execute_commands_functions.empty()) {
+                        //  Inherit primary's activeFramebuffer and while running validate functions
+                        for (auto &function : sub_cb_state->cmd_execute_commands_functions) {
+                            skip |= function(cb_state, cb_state->activeFramebuffer);
+                        }
+                    }
+                }
+            }
+        }
+
+        // TODO(mlentine): Move more logic into this method
+        skip |= ValidateSecondaryCommandBufferState(cb_state, sub_cb_state);
+        skip |= ValidateCommandBufferState(sub_cb_state, "vkCmdExecuteCommands()", 0,
+                                           "VUID-vkCmdExecuteCommands-pCommandBuffers-00089");
+        if (!(sub_cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT)) {
+            if (sub_cb_state->in_use.load()) {
+                // TODO: Find some way to differentiate between the -00090 and -00091 conditions
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                                HandleToUint64(cb_state->commandBuffer), "VUID-vkCmdExecuteCommands-pCommandBuffers-00090",
+                                "Cannot execute pending %s without VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT set.",
+                                report_data->FormatHandle(sub_cb_state->commandBuffer).c_str());
+            }
+            // We use an const_cast, because one cannot query a container keyed on a non-const pointer using a const pointer
+            if (cb_state->linkedCommandBuffers.count(const_cast<CMD_BUFFER_STATE *>(sub_cb_state))) {
+                skip |= log_msg(
+                    report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                    HandleToUint64(cb_state->commandBuffer), "VUID-vkCmdExecuteCommands-pCommandBuffers-00092",
+                    "Cannot execute %s without VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT set if previously executed in %s",
+                    report_data->FormatHandle(sub_cb_state->commandBuffer).c_str(),
+                    report_data->FormatHandle(cb_state->commandBuffer).c_str());
+            }
+
+            const auto insert_pair = linked_command_buffers.insert(sub_cb_state);
+            if (!insert_pair.second) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                                HandleToUint64(cb_state->commandBuffer), "VUID-vkCmdExecuteCommands-pCommandBuffers-00093",
+                                "Cannot duplicate %s in pCommandBuffers without VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT set.",
+                                report_data->FormatHandle(cb_state->commandBuffer).c_str());
+            }
+
+            if (cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT) {
+                // Warn that non-simultaneous secondary cmd buffer renders primary non-simultaneous
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                                HandleToUint64(pCommandBuffers[i]), kVUID_Core_DrawState_InvalidCommandBufferSimultaneousUse,
+                                "vkCmdExecuteCommands(): Secondary %s does not have "
+                                "VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT set and will cause primary "
+                                "%s to be treated as if it does not have "
+                                "VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT set, even though it does.",
+                                report_data->FormatHandle(pCommandBuffers[i]).c_str(),
+                                report_data->FormatHandle(cb_state->commandBuffer).c_str());
+            }
+        }
+        if (!cb_state->activeQueries.empty() && !enabled_features.core.inheritedQueries) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                            HandleToUint64(pCommandBuffers[i]), "VUID-vkCmdExecuteCommands-commandBuffer-00101",
+                            "vkCmdExecuteCommands(): Secondary %s cannot be submitted with a query in flight and "
+                            "inherited queries not supported on this device.",
+                            report_data->FormatHandle(pCommandBuffers[i]).c_str());
+        }
+        // Validate initial layout uses vs. the primary cmd buffer state
+        // Novel Valid usage: "UNASSIGNED-vkCmdExecuteCommands-commandBuffer-00001"
+        // initial layout usage of secondary command buffers resources must match parent command buffer
+        const auto *const_cb_state = static_cast<const CMD_BUFFER_STATE *>(cb_state);
+        for (const auto &sub_layout_map_entry : sub_cb_state->image_layout_map) {
+            const auto image = sub_layout_map_entry.first;
+            const auto *image_state = GetImageState(image);
+            if (!image_state) continue;  // Can't set layouts of a dead image
+
+            const auto *cb_subres_map = GetImageSubresourceLayoutMap(const_cb_state, image);
+            // Const getter can be null in which case we have nothing to check against for this image...
+            if (!cb_subres_map) continue;
+
+            const auto &sub_cb_subres_map = sub_layout_map_entry.second;
+            // Validate the initial_uses, that they match the current state of the primary cb, or absent a current state,
+            // that the match any initial_layout.
+            for (auto it_init = sub_cb_subres_map->BeginInitialUse(); !it_init.AtEnd(); ++it_init) {
+                const auto &sub_layout = (*it_init).layout;
+                if (VK_IMAGE_LAYOUT_UNDEFINED == sub_layout) continue;  // secondary doesn't care about current or initial
+                const auto &subresource = (*it_init).subresource;
+                // Look up the current layout (if any)
+                VkImageLayout cb_layout = cb_subres_map->GetSubresourceLayout(subresource);
+                const char *layout_type = "current";
+                if (cb_layout == kInvalidLayout) {
+                    // Find initial layout (if any)
+                    cb_layout = cb_subres_map->GetSubresourceInitialLayout(subresource);
+                    layout_type = "initial";
+                }
+                if ((cb_layout != kInvalidLayout) && (cb_layout != sub_layout)) {
+                    log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                            HandleToUint64(pCommandBuffers[i]), "UNASSIGNED-vkCmdExecuteCommands-commandBuffer-00001",
+                            "%s: Executed secondary command buffer using %s (subresource: aspectMask 0x%X array layer %u, "
+                            "mip level %u) which expects layout %s--instead, image %s layout is %s.",
+                            "vkCmdExecuteCommands():", report_data->FormatHandle(image).c_str(), subresource.aspectMask,
+                            subresource.arrayLayer, subresource.mipLevel, string_VkImageLayout(sub_layout), layout_type,
+                            string_VkImageLayout(cb_layout));
+                }
+            }
+        }
+    }
+
+    skip |= ValidatePrimaryCommandBuffer(cb_state, "vkCmdExecuteCommands()", "VUID-vkCmdExecuteCommands-bufferlevel");
+    skip |= ValidateCmdQueueFlags(cb_state, "vkCmdExecuteCommands()",
+                                  VK_QUEUE_TRANSFER_BIT | VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT,
+                                  "VUID-vkCmdExecuteCommands-commandBuffer-cmdpool");
+    skip |= ValidateCmd(cb_state, CMD_EXECUTECOMMANDS, "vkCmdExecuteCommands()");
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateMapMemory(VkDevice device, VkDeviceMemory mem, VkDeviceSize offset, VkDeviceSize size,
+                                          VkFlags flags, void **ppData) const {
+    bool skip = false;
+    const DEVICE_MEMORY_STATE *mem_info = GetDevMemState(mem);
+    if (mem_info) {
+        if ((phys_dev_mem_props.memoryTypes[mem_info->alloc_info.memoryTypeIndex].propertyFlags &
+             VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0) {
+            skip = log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,
+                           HandleToUint64(mem), "VUID-vkMapMemory-memory-00682",
+                           "Mapping Memory without VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT set: %s.",
+                           report_data->FormatHandle(mem).c_str());
+        }
+        skip |= ValidateMapMemRange(mem_info, offset, size);
+    }
+    return skip;
+}
+
+void CoreChecks::PostCallRecordMapMemory(VkDevice device, VkDeviceMemory mem, VkDeviceSize offset, VkDeviceSize size, VkFlags flags,
+                                         void **ppData, VkResult result) {
+    if (VK_SUCCESS != result) return;
+    StateTracker::PostCallRecordMapMemory(device, mem, offset, size, flags, ppData, result);
+    InitializeShadowMemory(mem, offset, size, ppData);
+}
+
+bool CoreChecks::PreCallValidateUnmapMemory(VkDevice device, VkDeviceMemory mem) const {
+    bool skip = false;
+    const auto mem_info = GetDevMemState(mem);
+    if (mem_info && !mem_info->mapped_range.size) {
+        // Valid Usage: memory must currently be mapped
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,
+                        HandleToUint64(mem), "VUID-vkUnmapMemory-memory-00689", "Unmapping Memory without memory being mapped: %s.",
+                        report_data->FormatHandle(mem).c_str());
+    }
+    return skip;
+}
+
+void CoreChecks::PreCallRecordUnmapMemory(VkDevice device, VkDeviceMemory mem) {
+    // Only core checks uses the shadow copy, clear that up here
+    auto mem_info = GetDevMemState(mem);
+    if (mem_info && mem_info->shadow_copy_base) {
+        free(mem_info->shadow_copy_base);
+        mem_info->shadow_copy_base = nullptr;
+        mem_info->shadow_copy = nullptr;
+        mem_info->shadow_pad_size = 0;
+    }
+    StateTracker::PreCallRecordUnmapMemory(device, mem);
+}
+
+bool CoreChecks::ValidateMemoryIsMapped(const char *funcName, uint32_t memRangeCount, const VkMappedMemoryRange *pMemRanges) const {
+    bool skip = false;
+    for (uint32_t i = 0; i < memRangeCount; ++i) {
+        auto mem_info = GetDevMemState(pMemRanges[i].memory);
+        if (mem_info) {
+            if (pMemRanges[i].size == VK_WHOLE_SIZE) {
+                if (mem_info->mapped_range.offset > pMemRanges[i].offset) {
+                    skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,
+                                    HandleToUint64(pMemRanges[i].memory), "VUID-VkMappedMemoryRange-size-00686",
+                                    "%s: Flush/Invalidate offset (" PRINTF_SIZE_T_SPECIFIER
+                                    ") is less than Memory Object's offset (" PRINTF_SIZE_T_SPECIFIER ").",
+                                    funcName, static_cast<size_t>(pMemRanges[i].offset),
+                                    static_cast<size_t>(mem_info->mapped_range.offset));
+                }
+            } else {
+                const uint64_t data_end = (mem_info->mapped_range.size == VK_WHOLE_SIZE)
+                                              ? mem_info->alloc_info.allocationSize
+                                              : (mem_info->mapped_range.offset + mem_info->mapped_range.size);
+                if ((mem_info->mapped_range.offset > pMemRanges[i].offset) ||
+                    (data_end < (pMemRanges[i].offset + pMemRanges[i].size))) {
+                    skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,
+                                    HandleToUint64(pMemRanges[i].memory), "VUID-VkMappedMemoryRange-size-00685",
+                                    "%s: Flush/Invalidate size or offset (" PRINTF_SIZE_T_SPECIFIER ", " PRINTF_SIZE_T_SPECIFIER
+                                    ") exceed the Memory Object's upper-bound (" PRINTF_SIZE_T_SPECIFIER ").",
+                                    funcName, static_cast<size_t>(pMemRanges[i].offset + pMemRanges[i].size),
+                                    static_cast<size_t>(pMemRanges[i].offset), static_cast<size_t>(data_end));
+                }
+            }
+        }
+    }
+    return skip;
+}
+
+bool CoreChecks::ValidateAndCopyNoncoherentMemoryToDriver(uint32_t mem_range_count, const VkMappedMemoryRange *mem_ranges) const {
+    bool skip = false;
+    for (uint32_t i = 0; i < mem_range_count; ++i) {
+        auto mem_info = GetDevMemState(mem_ranges[i].memory);
+        if (mem_info) {
+            if (mem_info->shadow_copy) {
+                VkDeviceSize size = (mem_info->mapped_range.size != VK_WHOLE_SIZE)
+                                        ? mem_info->mapped_range.size
+                                        : (mem_info->alloc_info.allocationSize - mem_info->mapped_range.offset);
+                char *data = static_cast<char *>(mem_info->shadow_copy);
+                for (uint64_t j = 0; j < mem_info->shadow_pad_size; ++j) {
+                    if (data[j] != NoncoherentMemoryFillValue) {
+                        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,
+                                        HandleToUint64(mem_ranges[i].memory), kVUID_Core_MemTrack_InvalidMap,
+                                        "Memory underflow was detected on %s.",
+                                        report_data->FormatHandle(mem_ranges[i].memory).c_str());
+                    }
+                }
+                for (uint64_t j = (size + mem_info->shadow_pad_size); j < (2 * mem_info->shadow_pad_size + size); ++j) {
+                    if (data[j] != NoncoherentMemoryFillValue) {
+                        skip |=
+                            log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,
+                                    HandleToUint64(mem_ranges[i].memory), kVUID_Core_MemTrack_InvalidMap,
+                                    "Memory overflow was detected on %s.", report_data->FormatHandle(mem_ranges[i].memory).c_str());
+                    }
+                }
+                memcpy(mem_info->p_driver_data, static_cast<void *>(data + mem_info->shadow_pad_size), (size_t)(size));
+            }
+        }
+    }
+    return skip;
+}
+
+void CoreChecks::CopyNoncoherentMemoryFromDriver(uint32_t mem_range_count, const VkMappedMemoryRange *mem_ranges) {
+    for (uint32_t i = 0; i < mem_range_count; ++i) {
+        auto mem_info = GetDevMemState(mem_ranges[i].memory);
+        if (mem_info && mem_info->shadow_copy) {
+            VkDeviceSize size = (mem_info->mapped_range.size != VK_WHOLE_SIZE)
+                                    ? mem_info->mapped_range.size
+                                    : (mem_info->alloc_info.allocationSize - mem_ranges[i].offset);
+            char *data = static_cast<char *>(mem_info->shadow_copy);
+            memcpy(data + mem_info->shadow_pad_size, mem_info->p_driver_data, (size_t)(size));
+        }
+    }
+}
+
+bool CoreChecks::ValidateMappedMemoryRangeDeviceLimits(const char *func_name, uint32_t mem_range_count,
+                                                       const VkMappedMemoryRange *mem_ranges) const {
+    bool skip = false;
+    for (uint32_t i = 0; i < mem_range_count; ++i) {
+        uint64_t atom_size = phys_dev_props.limits.nonCoherentAtomSize;
+        if (SafeModulo(mem_ranges[i].offset, atom_size) != 0) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,
+                            HandleToUint64(mem_ranges->memory), "VUID-VkMappedMemoryRange-offset-00687",
+                            "%s: Offset in pMemRanges[%d] is 0x%" PRIxLEAST64
+                            ", which is not a multiple of VkPhysicalDeviceLimits::nonCoherentAtomSize (0x%" PRIxLEAST64 ").",
+                            func_name, i, mem_ranges[i].offset, atom_size);
+        }
+        auto mem_info = GetDevMemState(mem_ranges[i].memory);
+        if (mem_info) {
+            if ((mem_ranges[i].size != VK_WHOLE_SIZE) &&
+                (mem_ranges[i].size + mem_ranges[i].offset != mem_info->alloc_info.allocationSize) &&
+                (SafeModulo(mem_ranges[i].size, atom_size) != 0)) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,
+                                HandleToUint64(mem_ranges->memory), "VUID-VkMappedMemoryRange-size-01390",
+                                "%s: Size in pMemRanges[%d] is 0x%" PRIxLEAST64
+                                ", which is not a multiple of VkPhysicalDeviceLimits::nonCoherentAtomSize (0x%" PRIxLEAST64 ").",
+                                func_name, i, mem_ranges[i].size, atom_size);
+            }
+        }
+    }
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateFlushMappedMemoryRanges(VkDevice device, uint32_t memRangeCount,
+                                                        const VkMappedMemoryRange *pMemRanges) const {
+    bool skip = false;
+    skip |= ValidateMappedMemoryRangeDeviceLimits("vkFlushMappedMemoryRanges", memRangeCount, pMemRanges);
+    skip |= ValidateAndCopyNoncoherentMemoryToDriver(memRangeCount, pMemRanges);
+    skip |= ValidateMemoryIsMapped("vkFlushMappedMemoryRanges", memRangeCount, pMemRanges);
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateInvalidateMappedMemoryRanges(VkDevice device, uint32_t memRangeCount,
+                                                             const VkMappedMemoryRange *pMemRanges) const {
+    bool skip = false;
+    skip |= ValidateMappedMemoryRangeDeviceLimits("vkInvalidateMappedMemoryRanges", memRangeCount, pMemRanges);
+    skip |= ValidateMemoryIsMapped("vkInvalidateMappedMemoryRanges", memRangeCount, pMemRanges);
+    return skip;
+}
+
+void CoreChecks::PostCallRecordInvalidateMappedMemoryRanges(VkDevice device, uint32_t memRangeCount,
+                                                            const VkMappedMemoryRange *pMemRanges, VkResult result) {
+    if (VK_SUCCESS == result) {
+        // Update our shadow copy with modified driver data
+        CopyNoncoherentMemoryFromDriver(memRangeCount, pMemRanges);
+    }
+}
+
+bool CoreChecks::PreCallValidateGetDeviceMemoryCommitment(VkDevice device, VkDeviceMemory mem, VkDeviceSize *pCommittedMem) const {
+    bool skip = false;
+    const auto mem_info = GetDevMemState(mem);
+
+    if (mem_info) {
+        if ((phys_dev_mem_props.memoryTypes[mem_info->alloc_info.memoryTypeIndex].propertyFlags &
+             VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) == 0) {
+            skip = log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,
+                           HandleToUint64(mem), "VUID-vkGetDeviceMemoryCommitment-memory-00690",
+                           "Querying commitment for memory without VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT set: %s.",
+                           report_data->FormatHandle(mem).c_str());
+        }
+    }
+    return skip;
+}
+
+bool CoreChecks::ValidateBindImageMemory(const VkBindImageMemoryInfo &bindInfo, const char *api_name) const {
+    bool skip = false;
+    const IMAGE_STATE *image_state = GetImageState(bindInfo.image);
+    if (image_state) {
+        // Track objects tied to memory
+        uint64_t image_handle = HandleToUint64(bindInfo.image);
+        skip = ValidateSetMemBinding(bindInfo.memory, VulkanTypedHandle(bindInfo.image, kVulkanObjectTypeImage), api_name);
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+        if (image_state->external_format_android) {
+            if (image_state->memory_requirements_checked) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, image_handle,
+                                kVUID_Core_BindImage_InvalidMemReqQuery,
+                                "%s: Must not call vkGetImageMemoryRequirements on %s that will be bound to an external "
+                                "Android hardware buffer.",
+                                api_name, report_data->FormatHandle(bindInfo.image).c_str());
+            }
+            return skip;
+        }
+#endif  // VK_USE_PLATFORM_ANDROID_KHR
+
+        // Validate bound memory range information
+        const auto mem_info = GetDevMemState(bindInfo.memory);
+        if (mem_info) {
+            skip |= ValidateInsertImageMemoryRange(bindInfo.image, mem_info, bindInfo.memoryOffset, image_state->requirements,
+                                                   image_state->createInfo.tiling == VK_IMAGE_TILING_LINEAR, api_name);
+            skip |= ValidateMemoryTypes(mem_info, image_state->requirements.memoryTypeBits, api_name,
+                                        "VUID-vkBindImageMemory-memory-01047");
+        }
+
+        // Validate memory requirements alignment
+        if (SafeModulo(bindInfo.memoryOffset, image_state->requirements.alignment) != 0) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, image_handle,
+                            "VUID-vkBindImageMemory-memoryOffset-01048",
+                            "%s: memoryOffset is 0x%" PRIxLEAST64
+                            " but must be an integer multiple of the VkMemoryRequirements::alignment value 0x%" PRIxLEAST64
+                            ", returned from a call to vkGetImageMemoryRequirements with image.",
+                            api_name, bindInfo.memoryOffset, image_state->requirements.alignment);
+        }
+
+        if (mem_info) {
+            // Validate memory requirements size
+            if (image_state->requirements.size > mem_info->alloc_info.allocationSize - bindInfo.memoryOffset) {
+                skip |=
+                    log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, image_handle,
+                            "VUID-vkBindImageMemory-size-01049",
+                            "%s: memory size minus memoryOffset is 0x%" PRIxLEAST64
+                            " but must be at least as large as VkMemoryRequirements::size value 0x%" PRIxLEAST64
+                            ", returned from a call to vkGetImageMemoryRequirements with image.",
+                            api_name, mem_info->alloc_info.allocationSize - bindInfo.memoryOffset, image_state->requirements.size);
+            }
+
+            // Validate dedicated allocation
+            if (mem_info->is_dedicated) {
+                if (enabled_features.dedicated_allocation_image_aliasing_features.dedicatedAllocationImageAliasing) {
+                    const auto orig_image_state = GetImageState(mem_info->dedicated_image);
+                    const auto current_image_state = GetImageState(bindInfo.image);
+                    if ((bindInfo.memoryOffset != 0) || !orig_image_state || !current_image_state ||
+                        !current_image_state->IsCreateInfoDedicatedAllocationImageAliasingCompatible(
+                            orig_image_state->createInfo)) {
+                        const char *validation_error;
+                        if (strcmp(api_name, "vkBindImageMemory()") == 0) {
+                            validation_error = "VUID-vkBindImageMemory-memory-02629";
+                        } else {
+                            validation_error = "VUID-VkBindImageMemoryInfo-memory-02631";
+                        }
+                        skip |=
+                            log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, image_handle,
+                                    validation_error,
+                                    "%s: for dedicated memory allocation %s, VkMemoryDedicatedAllocateInfoKHR:: %s must compatible "
+                                    "with %s and memoryOffset 0x%" PRIxLEAST64 " must be zero.",
+                                    api_name, report_data->FormatHandle(bindInfo.memory).c_str(),
+                                    report_data->FormatHandle(mem_info->dedicated_image).c_str(),
+                                    report_data->FormatHandle(bindInfo.image).c_str(), bindInfo.memoryOffset);
+                    }
+                } else {
+                    if ((bindInfo.memoryOffset != 0) || (mem_info->dedicated_image != bindInfo.image)) {
+                        const char *validation_error;
+                        if (strcmp(api_name, "vkBindImageMemory()") == 0) {
+                            validation_error = "VUID-vkBindImageMemory-memory-01509";
+                        } else {
+                            validation_error = "VUID-VkBindImageMemoryInfo-memory-01903";
+                        }
+                        skip |=
+                            log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, image_handle,
+                                    validation_error,
+                                    "%s: for dedicated memory allocation %s, VkMemoryDedicatedAllocateInfoKHR:: %s must be equal "
+                                    "to %s and memoryOffset 0x%" PRIxLEAST64 " must be zero.",
+                                    api_name, report_data->FormatHandle(bindInfo.memory).c_str(),
+                                    report_data->FormatHandle(mem_info->dedicated_image).c_str(),
+                                    report_data->FormatHandle(bindInfo.image).c_str(), bindInfo.memoryOffset);
+                    }
+                }
+            }
+        }
+
+        const auto swapchain_info = lvl_find_in_chain<VkBindImageMemorySwapchainInfoKHR>(bindInfo.pNext);
+        if (swapchain_info) {
+            if (bindInfo.memory != VK_NULL_HANDLE) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, image_handle,
+                                "VUID-VkBindImageMemoryInfo-pNext-01631", "%s: %s is not VK_NULL_HANDLE.", api_name,
+                                report_data->FormatHandle(bindInfo.memory).c_str());
+            }
+            if (image_state->create_from_swapchain != swapchain_info->swapchain) {
+                log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+                        HandleToUint64(image_state->image), kVUID_Core_BindImageMemory_Swapchain,
+                        "%s: %s is created by %s, but the image is bound by %s. The image should be created and bound by the same "
+                        "swapchain",
+                        api_name, report_data->FormatHandle(image_state->image).c_str(),
+                        report_data->FormatHandle(image_state->create_from_swapchain).c_str(),
+                        report_data->FormatHandle(swapchain_info->swapchain).c_str());
+            }
+            const auto swapchain_state = GetSwapchainState(swapchain_info->swapchain);
+            if (swapchain_state && swapchain_state->images.size() <= swapchain_info->imageIndex) {
+                skip |=
+                    log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, image_handle,
+                            "VUID-VkBindImageMemorySwapchainInfoKHR-imageIndex-01644",
+                            "%s: imageIndex (%i) is out of bounds of %s images (size: %i)", api_name, swapchain_info->imageIndex,
+                            report_data->FormatHandle(swapchain_info->swapchain).c_str(), (int)swapchain_state->images.size());
+            }
+        } else {
+            if (image_state->create_from_swapchain) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, image_handle,
+                                "VUID-VkBindImageMemoryInfo-image-01630",
+                                "%s: pNext of VkBindImageMemoryInfo doesn't include VkBindImageMemorySwapchainInfoKHR.", api_name);
+            }
+            if (!mem_info) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, image_handle,
+                                "VUID-VkBindImageMemoryInfo-pNext-01632", "%s: %s is invalid.", api_name,
+                                report_data->FormatHandle(bindInfo.memory).c_str());
+            }
+        }
+    }
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateBindImageMemory(VkDevice device, VkImage image, VkDeviceMemory mem,
+                                                VkDeviceSize memoryOffset) const {
+    VkBindImageMemoryInfo bindInfo = {};
+    bindInfo.sType = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO;
+    bindInfo.image = image;
+    bindInfo.memory = mem;
+    bindInfo.memoryOffset = memoryOffset;
+    return ValidateBindImageMemory(bindInfo, "vkBindImageMemory()");
+}
+
+bool CoreChecks::PreCallValidateBindImageMemory2(VkDevice device, uint32_t bindInfoCount,
+                                                 const VkBindImageMemoryInfoKHR *pBindInfos) const {
+    bool skip = false;
+    char api_name[128];
+    for (uint32_t i = 0; i < bindInfoCount; i++) {
+        sprintf(api_name, "vkBindImageMemory2() pBindInfos[%u]", i);
+        skip |= ValidateBindImageMemory(pBindInfos[i], api_name);
+    }
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateBindImageMemory2KHR(VkDevice device, uint32_t bindInfoCount,
+                                                    const VkBindImageMemoryInfoKHR *pBindInfos) const {
+    bool skip = false;
+    char api_name[128];
+    for (uint32_t i = 0; i < bindInfoCount; i++) {
+        sprintf(api_name, "vkBindImageMemory2KHR() pBindInfos[%u]", i);
+        skip |= ValidateBindImageMemory(pBindInfos[i], api_name);
+    }
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateSetEvent(VkDevice device, VkEvent event) const {
+    bool skip = false;
+    const auto event_state = GetEventState(event);
+    if (event_state) {
+        if (event_state->write_in_use) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT,
+                            HandleToUint64(event), kVUID_Core_DrawState_QueueForwardProgress,
+                            "Cannot call vkSetEvent() on %s that is already in use by a command buffer.",
+                            report_data->FormatHandle(event).c_str());
+        }
+    }
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateQueueBindSparse(VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo *pBindInfo,
+                                                VkFence fence) const {
+    const auto queue_data = GetQueueState(queue);
+    const auto pFence = GetFenceState(fence);
+    bool skip = ValidateFenceForSubmit(pFence);
+    if (skip) {
+        return true;
+    }
+
+    const auto queueFlags = GetPhysicalDeviceState()->queue_family_properties[queue_data->queueFamilyIndex].queueFlags;
+    if (!(queueFlags & VK_QUEUE_SPARSE_BINDING_BIT)) {
+        skip |= log_msg(
+            report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT, HandleToUint64(queue),
+            "VUID-vkQueueBindSparse-queuetype",
+            "Attempting vkQueueBindSparse on a non-memory-management capable queue -- VK_QUEUE_SPARSE_BINDING_BIT not set.");
+    }
+
+    unordered_set<VkSemaphore> signaled_semaphores;
+    unordered_set<VkSemaphore> unsignaled_semaphores;
+    unordered_set<VkSemaphore> internal_semaphores;
+    unordered_map<VkSemaphore, std::set<uint64_t>> timeline_values;
+    for (uint32_t bindIdx = 0; bindIdx < bindInfoCount; ++bindIdx) {
+        const VkBindSparseInfo &bindInfo = pBindInfo[bindIdx];
+
+        auto timeline_semaphore_submit_info = lvl_find_in_chain<VkTimelineSemaphoreSubmitInfoKHR>(pBindInfo->pNext);
+        std::vector<SEMAPHORE_WAIT> semaphore_waits;
+        std::vector<VkSemaphore> semaphore_signals;
+        for (uint32_t i = 0; i < bindInfo.waitSemaphoreCount; ++i) {
+            VkSemaphore semaphore = bindInfo.pWaitSemaphores[i];
+            const auto pSemaphore = GetSemaphoreState(semaphore);
+            if (pSemaphore && pSemaphore->type == VK_SEMAPHORE_TYPE_TIMELINE_KHR && !timeline_semaphore_submit_info) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT,
+                                HandleToUint64(semaphore), "VUID-VkBindSparseInfo-pWaitSemaphores-03246",
+                                "VkQueueBindSparse: %s is a timeline semaphore, but pBindInfo does not"
+                                "include an instance of VkTimelineSemaphoreSubmitInfoKHR",
+                                report_data->FormatHandle(semaphore).c_str());
+            }
+            if (pSemaphore && pSemaphore->type == VK_SEMAPHORE_TYPE_TIMELINE_KHR && timeline_semaphore_submit_info &&
+                bindInfo.waitSemaphoreCount != timeline_semaphore_submit_info->waitSemaphoreValueCount) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT,
+                                HandleToUint64(semaphore), "VUID-VkBindSparseInfo-pNext-03247",
+                                "VkQueueBindSparse: %s is a timeline semaphore, it contains an instance of"
+                                "VkTimelineSemaphoreSubmitInfoKHR, but waitSemaphoreValueCount is different than "
+                                "waitSemaphoreCount",
+                                report_data->FormatHandle(semaphore).c_str());
+            }
+            if (pSemaphore && pSemaphore->type == VK_SEMAPHORE_TYPE_BINARY_KHR &&
+                (pSemaphore->scope == kSyncScopeInternal || internal_semaphores.count(semaphore))) {
+                if (unsignaled_semaphores.count(semaphore) ||
+                    (!(signaled_semaphores.count(semaphore)) && !(pSemaphore->signaled))) {
+                    skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT,
+                                    HandleToUint64(semaphore), kVUID_Core_DrawState_QueueForwardProgress,
+                                    "%s is waiting on %s that has no way to be signaled.", report_data->FormatHandle(queue).c_str(),
+                                    report_data->FormatHandle(semaphore).c_str());
+                } else {
+                    signaled_semaphores.erase(semaphore);
+                    unsignaled_semaphores.insert(semaphore);
+                }
+            }
+            if (pSemaphore && pSemaphore->type == VK_SEMAPHORE_TYPE_BINARY_KHR &&
+                pSemaphore->scope == kSyncScopeExternalTemporary) {
+                internal_semaphores.insert(semaphore);
+            }
+            if (!skip && pSemaphore && pSemaphore->type == VK_SEMAPHORE_TYPE_TIMELINE_KHR) {
+                auto &values = timeline_values[semaphore];
+                if (values.empty()) {
+                    values.insert(pSemaphore->payload);
+                }
+                values.insert(timeline_semaphore_submit_info->pWaitSemaphoreValues[i]);
+            }
+        }
+
+        for (uint32_t i = 0; i < bindInfo.signalSemaphoreCount; ++i) {
+            VkSemaphore semaphore = bindInfo.pSignalSemaphores[i];
+            const auto pSemaphore = GetSemaphoreState(semaphore);
+            if (pSemaphore && pSemaphore->type == VK_SEMAPHORE_TYPE_TIMELINE_KHR && !timeline_semaphore_submit_info) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT,
+                                HandleToUint64(semaphore), "VUID-VkBindSparseInfo-pWaitSemaphores-03246",
+                                "VkQueueBindSparse: %s is a timeline semaphore, but pBindInfo does not"
+                                "include an instance of VkTimelineSemaphoreSubmitInfoKHR",
+                                report_data->FormatHandle(semaphore).c_str());
+            }
+            if (pSemaphore && pSemaphore->type == VK_SEMAPHORE_TYPE_TIMELINE_KHR && timeline_semaphore_submit_info &&
+                timeline_semaphore_submit_info->pSignalSemaphoreValues[i] <= pSemaphore->payload) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT,
+                                HandleToUint64(semaphore), "VUID-VkBindSparseInfo-pSignalSemaphores-03249",
+                                "VkQueueBindSparse: signal value in %s must be greater than current timeline semaphore %s value",
+                                report_data->FormatHandle(queue).c_str(), report_data->FormatHandle(semaphore).c_str());
+            }
+            if (pSemaphore && pSemaphore->type == VK_SEMAPHORE_TYPE_TIMELINE_KHR && timeline_semaphore_submit_info &&
+                bindInfo.signalSemaphoreCount != timeline_semaphore_submit_info->signalSemaphoreValueCount) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT,
+                                HandleToUint64(semaphore), "VUID-VkBindSparseInfo-pNext-03248",
+                                "VkQueueBindSparse: %s is a timeline semaphore, it contains an instance of"
+                                "VkTimelineSemaphoreSubmitInfoKHR, but signalSemaphoreValueCount is different than "
+                                "signalSemaphoreCount",
+                                report_data->FormatHandle(semaphore).c_str());
+            }
+            if (pSemaphore && pSemaphore->type == VK_SEMAPHORE_TYPE_BINARY_KHR && pSemaphore->scope == kSyncScopeInternal) {
+                if (signaled_semaphores.count(semaphore) || (!(unsignaled_semaphores.count(semaphore)) && pSemaphore->signaled)) {
+                    skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT,
+                                    HandleToUint64(semaphore), kVUID_Core_DrawState_QueueForwardProgress,
+                                    "%s is signaling %s that was previously signaled by %s but has not since "
+                                    "been waited on by any queue.",
+                                    report_data->FormatHandle(queue).c_str(), report_data->FormatHandle(semaphore).c_str(),
+                                    report_data->FormatHandle(pSemaphore->signaler.first).c_str());
+                } else {
+                    unsignaled_semaphores.erase(semaphore);
+                    signaled_semaphores.insert(semaphore);
+                }
+            }
+            if (!skip && pSemaphore && pSemaphore->type == VK_SEMAPHORE_TYPE_TIMELINE_KHR) {
+                auto &values = timeline_values[semaphore];
+                if (values.empty()) {
+                    values.insert(pSemaphore->payload);
+                }
+                values.insert(timeline_semaphore_submit_info->pSignalSemaphoreValues[i]);
+            }
+        }
+    }
+
+    for (uint32_t bindIdx = 0; bindIdx < bindInfoCount; ++bindIdx) {
+        const VkBindSparseInfo *bindInfo = &pBindInfo[bindIdx];
+        auto *info = lvl_find_in_chain<VkTimelineSemaphoreSubmitInfoKHR>(bindInfo->pNext);
+
+        for (uint32_t i = 0; i < bindInfo->waitSemaphoreCount; ++i) {
+            VkSemaphore semaphore = bindInfo->pWaitSemaphores[i];
+            skip |= ValidateMaxTimelineSemaphoreValueDifference(queue, semaphore, info ? info->pWaitSemaphoreValues[i] : 0,
+                                                                &timeline_values, "VkQueueBindSparse",
+                                                                "VUID-VkBindSparseInfo-pWaitSemaphores-03250");
+        }
+        for (uint32_t i = 0; i < bindInfo->signalSemaphoreCount; ++i) {
+            VkSemaphore semaphore = bindInfo->pSignalSemaphores[i];
+            skip |= ValidateMaxTimelineSemaphoreValueDifference(queue, semaphore, info ? info->pSignalSemaphoreValues[i] : 0,
+                                                                &timeline_values, "VkQueueBindSparse",
+                                                                "VUID-VkBindSparseInfo-pSignalSemaphores-03251");
+        }
+    }
+
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateSignalSemaphoreKHR(VkDevice device, const VkSemaphoreSignalInfoKHR *pSignalInfo) const {
+    bool skip = false;
+    const auto pSemaphore = GetSemaphoreState(pSignalInfo->semaphore);
+    if (pSemaphore && pSemaphore->type != VK_SEMAPHORE_TYPE_TIMELINE_KHR) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT,
+                        HandleToUint64(pSignalInfo->semaphore), "VUID-VkSemaphoreSignalInfoKHR-semaphore-03257",
+                        "VkSignalSemaphoreKHR: semaphore %s must be of VK_SEMAPHORE_TYPE_TIMELINE_KHR type",
+                        report_data->FormatHandle(pSignalInfo->semaphore).c_str());
+    }
+    return skip;
+}
+bool CoreChecks::ValidateImportSemaphore(VkSemaphore semaphore, const char *caller_name) const {
+    bool skip = false;
+    const SEMAPHORE_STATE *sema_node = GetSemaphoreState(semaphore);
+    if (sema_node) {
+        const VulkanTypedHandle obj_struct(semaphore, kVulkanObjectTypeSemaphore);
+        skip |= ValidateObjectNotInUse(sema_node, obj_struct, caller_name, kVUIDUndefined);
+    }
+    return skip;
+}
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+bool CoreChecks::PreCallValidateImportSemaphoreWin32HandleKHR(
+    VkDevice device, const VkImportSemaphoreWin32HandleInfoKHR *pImportSemaphoreWin32HandleInfo) const {
+    return ValidateImportSemaphore(pImportSemaphoreWin32HandleInfo->semaphore, "vkImportSemaphoreWin32HandleKHR");
+}
+
+#endif  // VK_USE_PLATFORM_WIN32_KHR
+
+bool CoreChecks::PreCallValidateImportSemaphoreFdKHR(VkDevice device,
+                                                     const VkImportSemaphoreFdInfoKHR *pImportSemaphoreFdInfo) const {
+    return ValidateImportSemaphore(pImportSemaphoreFdInfo->semaphore, "vkImportSemaphoreFdKHR");
+}
+
+bool CoreChecks::ValidateImportFence(VkFence fence, const char *caller_name) const {
+    const FENCE_STATE *fence_node = GetFenceState(fence);
+    bool skip = false;
+    if (fence_node && fence_node->scope == kSyncScopeInternal && fence_node->state == FENCE_INFLIGHT) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT, HandleToUint64(fence),
+                        kVUIDUndefined, "Cannot call %s on %s that is currently in use.", caller_name,
+                        report_data->FormatHandle(fence).c_str());
+    }
+    return skip;
+}
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+bool CoreChecks::PreCallValidateImportFenceWin32HandleKHR(
+    VkDevice device, const VkImportFenceWin32HandleInfoKHR *pImportFenceWin32HandleInfo) const {
+    return ValidateImportFence(pImportFenceWin32HandleInfo->fence, "vkImportFenceWin32HandleKHR");
+}
+#endif  // VK_USE_PLATFORM_WIN32_KHR
+
+bool CoreChecks::PreCallValidateImportFenceFdKHR(VkDevice device, const VkImportFenceFdInfoKHR *pImportFenceFdInfo) const {
+    return ValidateImportFence(pImportFenceFdInfo->fence, "vkImportFenceFdKHR");
+}
+
+bool CoreChecks::ValidateCreateSwapchain(const char *func_name, VkSwapchainCreateInfoKHR const *pCreateInfo,
+                                         const SURFACE_STATE *surface_state, const SWAPCHAIN_NODE *old_swapchain_state) const {
+    // All physical devices and queue families are required to be able to present to any native window on Android; require the
+    // application to have established support on any other platform.
+    if (!instance_extensions.vk_khr_android_surface) {
+        auto support_predicate = [this](decltype(surface_state->gpu_queue_support)::value_type qs) -> bool {
+            // TODO: should restrict search only to queue families of VkDeviceQueueCreateInfos, not whole phys. device
+            return (qs.first.gpu == physical_device) && qs.second;
+        };
+        const auto &support = surface_state->gpu_queue_support;
+        bool is_supported = std::any_of(support.begin(), support.end(), support_predicate);
+
+        if (!is_supported) {
+            if (log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
+                        "VUID-VkSwapchainCreateInfoKHR-surface-01270",
+                        "%s: pCreateInfo->surface is not known at this time to be supported for presentation by this device. The "
+                        "vkGetPhysicalDeviceSurfaceSupportKHR() must be called beforehand, and it must return VK_TRUE support with "
+                        "this surface for at least one queue family of this device.",
+                        func_name))
+                return true;
+        }
+    }
+
+    if (old_swapchain_state) {
+        if (old_swapchain_state->createInfo.surface != pCreateInfo->surface) {
+            if (log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT,
+                        HandleToUint64(pCreateInfo->oldSwapchain), "VUID-VkSwapchainCreateInfoKHR-oldSwapchain-01933",
+                        "%s: pCreateInfo->oldSwapchain's surface is not pCreateInfo->surface", func_name))
+                return true;
+        }
+        if (old_swapchain_state->retired) {
+            if (log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT,
+                        HandleToUint64(pCreateInfo->oldSwapchain), "VUID-VkSwapchainCreateInfoKHR-oldSwapchain-01933",
+                        "%s: pCreateInfo->oldSwapchain is retired", func_name))
+                return true;
+        }
+    }
+
+    if ((pCreateInfo->imageExtent.width == 0) || (pCreateInfo->imageExtent.height == 0)) {
+        if (log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
+                    "VUID-VkSwapchainCreateInfoKHR-imageExtent-01689", "%s: pCreateInfo->imageExtent = (%d, %d) which is illegal.",
+                    func_name, pCreateInfo->imageExtent.width, pCreateInfo->imageExtent.height))
+            return true;
+    }
+
+    auto physical_device_state = GetPhysicalDeviceState();
+    bool skip = false;
+    VkSurfaceTransformFlagBitsKHR currentTransform = physical_device_state->surfaceCapabilities.currentTransform;
+    if ((pCreateInfo->preTransform & currentTransform) != pCreateInfo->preTransform) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT,
+                        HandleToUint64(physical_device), kVUID_Core_Swapchain_PreTransform,
+                        "%s: pCreateInfo->preTransform (%s) doesn't match the currentTransform (%s) returned by "
+                        "vkGetPhysicalDeviceSurfaceCapabilitiesKHR, the presentation engine will transform the image "
+                        "content as part of the presentation operation.",
+                        func_name, string_VkSurfaceTransformFlagBitsKHR(pCreateInfo->preTransform),
+                        string_VkSurfaceTransformFlagBitsKHR(currentTransform));
+    }
+
+    VkSurfaceCapabilitiesKHR capabilities{};
+    DispatchGetPhysicalDeviceSurfaceCapabilitiesKHR(physical_device_state->phys_device, pCreateInfo->surface, &capabilities);
+    // Validate pCreateInfo->minImageCount against VkSurfaceCapabilitiesKHR::{min|max}ImageCount:
+    if (pCreateInfo->minImageCount < capabilities.minImageCount) {
+        if (log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
+                    "VUID-VkSwapchainCreateInfoKHR-minImageCount-01271",
+                    "%s called with minImageCount = %d, which is outside the bounds returned by "
+                    "vkGetPhysicalDeviceSurfaceCapabilitiesKHR() (i.e. minImageCount = %d, maxImageCount = %d).",
+                    func_name, pCreateInfo->minImageCount, capabilities.minImageCount, capabilities.maxImageCount))
+            return true;
+    }
+
+    if ((capabilities.maxImageCount > 0) && (pCreateInfo->minImageCount > capabilities.maxImageCount)) {
+        if (log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
+                    "VUID-VkSwapchainCreateInfoKHR-minImageCount-01272",
+                    "%s called with minImageCount = %d, which is outside the bounds returned by "
+                    "vkGetPhysicalDeviceSurfaceCapabilitiesKHR() (i.e. minImageCount = %d, maxImageCount = %d).",
+                    func_name, pCreateInfo->minImageCount, capabilities.minImageCount, capabilities.maxImageCount))
+            return true;
+    }
+
+    // Validate pCreateInfo->imageExtent against VkSurfaceCapabilitiesKHR::{current|min|max}ImageExtent:
+    if ((pCreateInfo->imageExtent.width < capabilities.minImageExtent.width) ||
+        (pCreateInfo->imageExtent.width > capabilities.maxImageExtent.width) ||
+        (pCreateInfo->imageExtent.height < capabilities.minImageExtent.height) ||
+        (pCreateInfo->imageExtent.height > capabilities.maxImageExtent.height)) {
+        if (log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
+                    "VUID-VkSwapchainCreateInfoKHR-imageExtent-01274",
+                    "%s called with imageExtent = (%d,%d), which is outside the bounds returned by "
+                    "vkGetPhysicalDeviceSurfaceCapabilitiesKHR(): currentExtent = (%d,%d), minImageExtent = (%d,%d), "
+                    "maxImageExtent = (%d,%d).",
+                    func_name, pCreateInfo->imageExtent.width, pCreateInfo->imageExtent.height, capabilities.currentExtent.width,
+                    capabilities.currentExtent.height, capabilities.minImageExtent.width, capabilities.minImageExtent.height,
+                    capabilities.maxImageExtent.width, capabilities.maxImageExtent.height))
+            return true;
+    }
+    // pCreateInfo->preTransform should have exactly one bit set, and that bit must also be set in
+    // VkSurfaceCapabilitiesKHR::supportedTransforms.
+    if (!pCreateInfo->preTransform || (pCreateInfo->preTransform & (pCreateInfo->preTransform - 1)) ||
+        !(pCreateInfo->preTransform & capabilities.supportedTransforms)) {
+        // This is an error situation; one for which we'd like to give the developer a helpful, multi-line error message.  Build
+        // it up a little at a time, and then log it:
+        std::string errorString = "";
+        char str[1024];
+        // Here's the first part of the message:
+        sprintf(str, "%s called with a non-supported pCreateInfo->preTransform (i.e. %s).  Supported values are:\n", func_name,
+                string_VkSurfaceTransformFlagBitsKHR(pCreateInfo->preTransform));
+        errorString += str;
+        for (int i = 0; i < 32; i++) {
+            // Build up the rest of the message:
+            if ((1 << i) & capabilities.supportedTransforms) {
+                const char *newStr = string_VkSurfaceTransformFlagBitsKHR((VkSurfaceTransformFlagBitsKHR)(1 << i));
+                sprintf(str, "    %s\n", newStr);
+                errorString += str;
+            }
+        }
+        // Log the message that we've built up:
+        if (log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
+                    "VUID-VkSwapchainCreateInfoKHR-preTransform-01279", "%s.", errorString.c_str()))
+            return true;
+    }
+
+    // pCreateInfo->compositeAlpha should have exactly one bit set, and that bit must also be set in
+    // VkSurfaceCapabilitiesKHR::supportedCompositeAlpha
+    if (!pCreateInfo->compositeAlpha || (pCreateInfo->compositeAlpha & (pCreateInfo->compositeAlpha - 1)) ||
+        !((pCreateInfo->compositeAlpha) & capabilities.supportedCompositeAlpha)) {
+        // This is an error situation; one for which we'd like to give the developer a helpful, multi-line error message.  Build
+        // it up a little at a time, and then log it:
+        std::string errorString = "";
+        char str[1024];
+        // Here's the first part of the message:
+        sprintf(str, "%s called with a non-supported pCreateInfo->compositeAlpha (i.e. %s).  Supported values are:\n", func_name,
+                string_VkCompositeAlphaFlagBitsKHR(pCreateInfo->compositeAlpha));
+        errorString += str;
+        for (int i = 0; i < 32; i++) {
+            // Build up the rest of the message:
+            if ((1 << i) & capabilities.supportedCompositeAlpha) {
+                const char *newStr = string_VkCompositeAlphaFlagBitsKHR((VkCompositeAlphaFlagBitsKHR)(1 << i));
+                sprintf(str, "    %s\n", newStr);
+                errorString += str;
+            }
+        }
+        // Log the message that we've built up:
+        if (log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
+                    "VUID-VkSwapchainCreateInfoKHR-compositeAlpha-01280", "%s.", errorString.c_str()))
+            return true;
+    }
+    // Validate pCreateInfo->imageArrayLayers against VkSurfaceCapabilitiesKHR::maxImageArrayLayers:
+    if (pCreateInfo->imageArrayLayers > capabilities.maxImageArrayLayers) {
+        if (log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
+                    "VUID-VkSwapchainCreateInfoKHR-imageArrayLayers-01275",
+                    "%s called with a non-supported imageArrayLayers (i.e. %d).  Maximum value is %d.", func_name,
+                    pCreateInfo->imageArrayLayers, capabilities.maxImageArrayLayers))
+            return true;
+    }
+    // Validate pCreateInfo->imageUsage against VkSurfaceCapabilitiesKHR::supportedUsageFlags:
+    if (pCreateInfo->imageUsage != (pCreateInfo->imageUsage & capabilities.supportedUsageFlags)) {
+        if (log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
+                    "VUID-VkSwapchainCreateInfoKHR-imageUsage-01276",
+                    "%s called with a non-supported pCreateInfo->imageUsage (i.e. 0x%08x).  Supported flag bits are 0x%08x.",
+                    func_name, pCreateInfo->imageUsage, capabilities.supportedUsageFlags))
+            return true;
+    }
+
+    if (device_extensions.vk_khr_surface_protected_capabilities && (pCreateInfo->flags & VK_SWAPCHAIN_CREATE_PROTECTED_BIT_KHR)) {
+        VkPhysicalDeviceSurfaceInfo2KHR surfaceInfo = {VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SURFACE_INFO_2_KHR};
+        surfaceInfo.surface = pCreateInfo->surface;
+        VkSurfaceProtectedCapabilitiesKHR surfaceProtectedCapabilities = {VK_STRUCTURE_TYPE_SURFACE_PROTECTED_CAPABILITIES_KHR};
+        VkSurfaceCapabilities2KHR surfaceCapabilities = {VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_KHR};
+        surfaceCapabilities.pNext = &surfaceProtectedCapabilities;
+        DispatchGetPhysicalDeviceSurfaceCapabilities2KHR(physical_device_state->phys_device, &surfaceInfo, &surfaceCapabilities);
+
+        if (!surfaceProtectedCapabilities.supportsProtected) {
+            if (log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
+                        "VUID-VkSwapchainCreateInfoKHR-flags-03187",
+                        "%s: pCreateInfo->flags contains VK_SWAPCHAIN_CREATE_PROTECTED_BIT_KHR but the surface "
+                        "capabilities does not have VkSurfaceProtectedCapabilitiesKHR.supportsProtected set to VK_TRUE.",
+                        func_name))
+                return true;
+        }
+    }
+
+    std::vector<VkSurfaceFormatKHR> surface_formats;
+    const auto *surface_formats_ref = &surface_formats;
+
+    // Validate pCreateInfo values with the results of vkGetPhysicalDeviceSurfaceFormatsKHR():
+    if (physical_device_state->surface_formats.empty()) {
+        uint32_t surface_format_count = 0;
+        DispatchGetPhysicalDeviceSurfaceFormatsKHR(physical_device, pCreateInfo->surface, &surface_format_count, nullptr);
+        surface_formats.resize(surface_format_count);
+        DispatchGetPhysicalDeviceSurfaceFormatsKHR(physical_device, pCreateInfo->surface, &surface_format_count,
+                                                   &surface_formats[0]);
+    } else {
+        surface_formats_ref = &physical_device_state->surface_formats;
+    }
+
+    {
+        // Validate pCreateInfo->imageFormat against VkSurfaceFormatKHR::format:
+        bool foundFormat = false;
+        bool foundColorSpace = false;
+        bool foundMatch = false;
+        for (auto const &format : *surface_formats_ref) {
+            if (pCreateInfo->imageFormat == format.format) {
+                // Validate pCreateInfo->imageColorSpace against VkSurfaceFormatKHR::colorSpace:
+                foundFormat = true;
+                if (pCreateInfo->imageColorSpace == format.colorSpace) {
+                    foundMatch = true;
+                    break;
+                }
+            } else {
+                if (pCreateInfo->imageColorSpace == format.colorSpace) {
+                    foundColorSpace = true;
+                }
+            }
+        }
+        if (!foundMatch) {
+            if (!foundFormat) {
+                if (log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+                            HandleToUint64(device), "VUID-VkSwapchainCreateInfoKHR-imageFormat-01273",
+                            "%s called with a non-supported pCreateInfo->imageFormat (i.e. %d).", func_name,
+                            pCreateInfo->imageFormat))
+                    return true;
+            }
+            if (!foundColorSpace) {
+                if (log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+                            HandleToUint64(device), "VUID-VkSwapchainCreateInfoKHR-imageFormat-01273",
+                            "%s called with a non-supported pCreateInfo->imageColorSpace (i.e. %d).", func_name,
+                            pCreateInfo->imageColorSpace))
+                    return true;
+            }
+        }
+    }
+
+    std::vector<VkPresentModeKHR> present_modes;
+    const auto *present_modes_ref = &present_modes;
+
+    // Validate pCreateInfo values with the results of vkGetPhysicalDeviceSurfacePresentModesKHR():
+    if (physical_device_state->present_modes.empty()) {
+        uint32_t present_mode_count = 0;
+        DispatchGetPhysicalDeviceSurfacePresentModesKHR(physical_device_state->phys_device, pCreateInfo->surface,
+                                                        &present_mode_count, nullptr);
+        present_modes.resize(present_mode_count);
+        DispatchGetPhysicalDeviceSurfacePresentModesKHR(physical_device_state->phys_device, pCreateInfo->surface,
+                                                        &present_mode_count, &present_modes[0]);
+    } else {
+        present_modes_ref = &physical_device_state->present_modes;
+    }
+
+    // Validate pCreateInfo->presentMode against vkGetPhysicalDeviceSurfacePresentModesKHR():
+    bool foundMatch =
+        std::find(present_modes_ref->begin(), present_modes_ref->end(), pCreateInfo->presentMode) != present_modes_ref->end();
+    if (!foundMatch) {
+        if (log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
+                    "VUID-VkSwapchainCreateInfoKHR-presentMode-01281", "%s called with a non-supported presentMode (i.e. %s).",
+                    func_name, string_VkPresentModeKHR(pCreateInfo->presentMode)))
+            return true;
+    }
+
+    // Validate state for shared presentable case
+    if (VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR == pCreateInfo->presentMode ||
+        VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR == pCreateInfo->presentMode) {
+        if (!device_extensions.vk_khr_shared_presentable_image) {
+            if (log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
+                        kVUID_Core_DrawState_ExtensionNotEnabled,
+                        "%s called with presentMode %s which requires the VK_KHR_shared_presentable_image extension, which has not "
+                        "been enabled.",
+                        func_name, string_VkPresentModeKHR(pCreateInfo->presentMode)))
+                return true;
+        } else if (pCreateInfo->minImageCount != 1) {
+            if (log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
+                        "VUID-VkSwapchainCreateInfoKHR-minImageCount-01383",
+                        "%s called with presentMode %s, but minImageCount value is %d. For shared presentable image, minImageCount "
+                        "must be 1.",
+                        func_name, string_VkPresentModeKHR(pCreateInfo->presentMode), pCreateInfo->minImageCount))
+                return true;
+        }
+    }
+
+    if (pCreateInfo->flags & VK_SWAPCHAIN_CREATE_MUTABLE_FORMAT_BIT_KHR) {
+        if (!device_extensions.vk_khr_swapchain_mutable_format) {
+            if (log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
+                        kVUID_Core_DrawState_ExtensionNotEnabled,
+                        "%s: pCreateInfo->flags contains VK_SWAPCHAIN_CREATE_MUTABLE_FORMAT_BIT_KHR which requires the "
+                        "VK_KHR_swapchain_mutable_format extension, which has not been enabled.",
+                        func_name))
+                return true;
+        } else {
+            const auto *image_format_list = lvl_find_in_chain<VkImageFormatListCreateInfoKHR>(pCreateInfo->pNext);
+            if (image_format_list == nullptr) {
+                if (log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+                            HandleToUint64(device), "VUID-VkSwapchainCreateInfoKHR-flags-03168",
+                            "%s: pCreateInfo->flags contains VK_SWAPCHAIN_CREATE_MUTABLE_FORMAT_BIT_KHR but the pNext chain of "
+                            "pCreateInfo does not contain an instance of VkImageFormatListCreateInfoKHR.",
+                            func_name))
+                    return true;
+            } else if (image_format_list->viewFormatCount == 0) {
+                if (log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+                            HandleToUint64(device), "VUID-VkSwapchainCreateInfoKHR-flags-03168",
+                            "%s: pCreateInfo->flags contains VK_SWAPCHAIN_CREATE_MUTABLE_FORMAT_BIT_KHR but the viewFormatCount "
+                            "member of VkImageFormatListCreateInfoKHR in the pNext chain is zero.",
+                            func_name))
+                    return true;
+            } else {
+                bool found_base_format = false;
+                for (uint32_t i = 0; i < image_format_list->viewFormatCount; ++i) {
+                    if (image_format_list->pViewFormats[i] == pCreateInfo->imageFormat) {
+                        found_base_format = true;
+                        break;
+                    }
+                }
+                if (!found_base_format) {
+                    if (log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+                                HandleToUint64(device), "VUID-VkSwapchainCreateInfoKHR-flags-03168",
+                                "%s: pCreateInfo->flags contains VK_SWAPCHAIN_CREATE_MUTABLE_FORMAT_BIT_KHR but none of the "
+                                "elements of the pViewFormats member of VkImageFormatListCreateInfoKHR match "
+                                "pCreateInfo->imageFormat.",
+                                func_name))
+                        return true;
+                }
+            }
+        }
+    }
+
+    if ((pCreateInfo->imageSharingMode == VK_SHARING_MODE_CONCURRENT) && pCreateInfo->pQueueFamilyIndices) {
+        bool skip1 =
+            ValidateQueueFamilies(pCreateInfo->queueFamilyIndexCount, pCreateInfo->pQueueFamilyIndices, "vkCreateBuffer",
+                                  "pCreateInfo->pQueueFamilyIndices", "VUID-VkSwapchainCreateInfoKHR-imageSharingMode-01428",
+                                  "VUID-VkSwapchainCreateInfoKHR-imageSharingMode-01428", false);
+        if (skip1) return true;
+    }
+
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateCreateSwapchainKHR(VkDevice device, const VkSwapchainCreateInfoKHR *pCreateInfo,
+                                                   const VkAllocationCallbacks *pAllocator, VkSwapchainKHR *pSwapchain) const {
+    const auto surface_state = GetSurfaceState(pCreateInfo->surface);
+    const auto old_swapchain_state = GetSwapchainState(pCreateInfo->oldSwapchain);
+    return ValidateCreateSwapchain("vkCreateSwapchainKHR()", pCreateInfo, surface_state, old_swapchain_state);
+}
+
+void CoreChecks::PreCallRecordDestroySwapchainKHR(VkDevice device, VkSwapchainKHR swapchain,
+                                                  const VkAllocationCallbacks *pAllocator) {
+    if (swapchain) {
+        auto swapchain_data = GetSwapchainState(swapchain);
+        if (swapchain_data) {
+            for (const auto &swapchain_image : swapchain_data->images) {
+                auto image_sub = imageSubresourceMap.find(swapchain_image.image);
+                if (image_sub != imageSubresourceMap.end()) {
+                    for (auto imgsubpair : image_sub->second) {
+                        auto image_item = imageLayoutMap.find(imgsubpair);
+                        if (image_item != imageLayoutMap.end()) {
+                            imageLayoutMap.erase(image_item);
+                        }
+                    }
+                    imageSubresourceMap.erase(image_sub);
+                }
+                EraseQFOImageRelaseBarriers(swapchain_image.image);
+            }
+        }
+    }
+    StateTracker::PreCallRecordDestroySwapchainKHR(device, swapchain, pAllocator);
+}
+
+bool CoreChecks::PreCallValidateGetSwapchainImagesKHR(VkDevice device, VkSwapchainKHR swapchain, uint32_t *pSwapchainImageCount,
+                                                      VkImage *pSwapchainImages) const {
+    auto swapchain_state = GetSwapchainState(swapchain);
+    bool skip = false;
+    if (swapchain_state && pSwapchainImages) {
+        if (*pSwapchainImageCount > swapchain_state->get_swapchain_image_count) {
+            skip |=
+                log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
+                        kVUID_Core_Swapchain_InvalidCount,
+                        "vkGetSwapchainImagesKHR() called with non-NULL pSwapchainImageCount, and with pSwapchainImages set to a "
+                        "value (%d) that is greater than the value (%d) that was returned when pSwapchainImageCount was NULL.",
+                        *pSwapchainImageCount, swapchain_state->get_swapchain_image_count);
+        }
+    }
+    return skip;
+}
+
+void CoreChecks::PostCallRecordGetSwapchainImagesKHR(VkDevice device, VkSwapchainKHR swapchain, uint32_t *pSwapchainImageCount,
+                                                     VkImage *pSwapchainImages, VkResult result) {
+    // Usually we'd call the StateTracker first, but
+    //     a) none of the new state needed below is from the StateTracker
+    //     b) StateTracker *will* update swapchain_state->images which we use to guard against double initialization
+    // so we'll do it in the opposite order -- CoreChecks then StateTracker.
+    //
+    // Note, this will get trickier if we start storing image shared pointers in the image layout data, at which point
+    // we'll have to reverse the order *back* and find some other scheme to prevent double initialization.
+
+    if (((result == VK_SUCCESS) || (result == VK_INCOMPLETE)) && pSwapchainImages) {
+        // Initialze image layout tracking data
+        auto swapchain_state = GetSwapchainState(swapchain);
+        const auto image_vector_size = swapchain_state->images.size();
+        IMAGE_LAYOUT_STATE image_layout_node;
+        image_layout_node.layout = VK_IMAGE_LAYOUT_UNDEFINED;
+        image_layout_node.format = swapchain_state->createInfo.imageFormat;
+
+        for (uint32_t i = 0; i < *pSwapchainImageCount; ++i) {
+            // This is check makes sure that we don't have an image initialized for this swapchain index, but
+            // given that it's StateTracker that stores this information, need to protect against non-extant entries in the vector
+            if ((i < image_vector_size) && (swapchain_state->images[i].image != VK_NULL_HANDLE)) continue;
+
+            ImageSubresourcePair subpair = {pSwapchainImages[i], false, VkImageSubresource()};
+            imageSubresourceMap[pSwapchainImages[i]].push_back(subpair);
+            imageLayoutMap[subpair] = image_layout_node;
+        }
+    }
+
+    // Now call the base class
+    StateTracker::PostCallRecordGetSwapchainImagesKHR(device, swapchain, pSwapchainImageCount, pSwapchainImages, result);
+}
+
+bool CoreChecks::PreCallValidateQueuePresentKHR(VkQueue queue, const VkPresentInfoKHR *pPresentInfo) const {
+    bool skip = false;
+    const auto queue_state = GetQueueState(queue);
+
+    for (uint32_t i = 0; i < pPresentInfo->waitSemaphoreCount; ++i) {
+        const auto pSemaphore = GetSemaphoreState(pPresentInfo->pWaitSemaphores[i]);
+        if (pSemaphore && pSemaphore->type != VK_SEMAPHORE_TYPE_BINARY_KHR) {
+            skip |= log_msg(
+                report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT,
+                HandleToUint64(pPresentInfo->pWaitSemaphores[i]),
+                "VUID-vkQueuePresentKHR-pWaitSemaphores-03267",  // VUID-VkPresentInfoKHR-pWaitSemaphores-03269 could fit also!!
+                "VkQueuePresent: %s is not a VK_SEMAPHORE_TYPE_BINARY_KHR",
+                report_data->FormatHandle(pPresentInfo->pWaitSemaphores[i]).c_str());
+        }
+        if (pSemaphore && !pSemaphore->signaled) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 0,
+                            kVUID_Core_DrawState_QueueForwardProgress, "%s is waiting on %s that has no way to be signaled.",
+                            report_data->FormatHandle(queue).c_str(),
+                            report_data->FormatHandle(pPresentInfo->pWaitSemaphores[i]).c_str());
+        }
+    }
+
+    for (uint32_t i = 0; i < pPresentInfo->swapchainCount; ++i) {
+        const auto swapchain_data = GetSwapchainState(pPresentInfo->pSwapchains[i]);
+        if (swapchain_data) {
+            if (pPresentInfo->pImageIndices[i] >= swapchain_data->images.size()) {
+                skip |=
+                    log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT,
+                            HandleToUint64(pPresentInfo->pSwapchains[i]), kVUID_Core_DrawState_SwapchainInvalidImage,
+                            "vkQueuePresentKHR: Swapchain image index too large (%u). There are only %u images in this swapchain.",
+                            pPresentInfo->pImageIndices[i], (uint32_t)swapchain_data->images.size());
+            } else {
+                auto image = swapchain_data->images[pPresentInfo->pImageIndices[i]].image;
+                const auto image_state = GetImageState(image);
+
+                if (!image_state->acquired) {
+                    skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT,
+                                    HandleToUint64(pPresentInfo->pSwapchains[i]), kVUID_Core_DrawState_SwapchainImageNotAcquired,
+                                    "vkQueuePresentKHR: Swapchain image index %u has not been acquired.",
+                                    pPresentInfo->pImageIndices[i]);
+                }
+
+                vector<VkImageLayout> layouts;
+                if (FindLayouts(image, layouts)) {
+                    for (auto layout : layouts) {
+                        if ((layout != VK_IMAGE_LAYOUT_PRESENT_SRC_KHR) && (!device_extensions.vk_khr_shared_presentable_image ||
+                                                                            (layout != VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR))) {
+                            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT,
+                                            HandleToUint64(queue), "VUID-VkPresentInfoKHR-pImageIndices-01296",
+                                            "Images passed to present must be in layout VK_IMAGE_LAYOUT_PRESENT_SRC_KHR or "
+                                            "VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR but is in %s.",
+                                            string_VkImageLayout(layout));
+                        }
+                    }
+                }
+            }
+
+            // All physical devices and queue families are required to be able to present to any native window on Android; require
+            // the application to have established support on any other platform.
+            if (!instance_extensions.vk_khr_android_surface) {
+                const auto surface_state = GetSurfaceState(swapchain_data->createInfo.surface);
+                auto support_it = surface_state->gpu_queue_support.find({physical_device, queue_state->queueFamilyIndex});
+
+                if (support_it == surface_state->gpu_queue_support.end()) {
+                    skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT,
+                                    HandleToUint64(pPresentInfo->pSwapchains[i]), kVUID_Core_DrawState_SwapchainUnsupportedQueue,
+                                    "vkQueuePresentKHR: Presenting image without calling vkGetPhysicalDeviceSurfaceSupportKHR");
+                } else if (!support_it->second) {
+                    skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT,
+                                    HandleToUint64(pPresentInfo->pSwapchains[i]), "VUID-vkQueuePresentKHR-pSwapchains-01292",
+                                    "vkQueuePresentKHR: Presenting image on queue that cannot present to this surface.");
+                }
+            }
+        }
+    }
+    if (pPresentInfo->pNext) {
+        // Verify ext struct
+        const auto *present_regions = lvl_find_in_chain<VkPresentRegionsKHR>(pPresentInfo->pNext);
+        if (present_regions) {
+            for (uint32_t i = 0; i < present_regions->swapchainCount; ++i) {
+                const auto swapchain_data = GetSwapchainState(pPresentInfo->pSwapchains[i]);
+                assert(swapchain_data);
+                VkPresentRegionKHR region = present_regions->pRegions[i];
+                for (uint32_t j = 0; j < region.rectangleCount; ++j) {
+                    VkRectLayerKHR rect = region.pRectangles[j];
+                    if ((rect.offset.x + rect.extent.width) > swapchain_data->createInfo.imageExtent.width) {
+                        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT,
+                                        HandleToUint64(pPresentInfo->pSwapchains[i]), "VUID-VkRectLayerKHR-offset-01261",
+                                        "vkQueuePresentKHR(): For VkPresentRegionKHR down pNext chain, "
+                                        "pRegion[%i].pRectangles[%i], the sum of offset.x (%i) and extent.width (%i) is greater "
+                                        "than the corresponding swapchain's imageExtent.width (%i).",
+                                        i, j, rect.offset.x, rect.extent.width, swapchain_data->createInfo.imageExtent.width);
+                    }
+                    if ((rect.offset.y + rect.extent.height) > swapchain_data->createInfo.imageExtent.height) {
+                        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT,
+                                        HandleToUint64(pPresentInfo->pSwapchains[i]), "VUID-VkRectLayerKHR-offset-01261",
+                                        "vkQueuePresentKHR(): For VkPresentRegionKHR down pNext chain, "
+                                        "pRegion[%i].pRectangles[%i], the sum of offset.y (%i) and extent.height (%i) is greater "
+                                        "than the corresponding swapchain's imageExtent.height (%i).",
+                                        i, j, rect.offset.y, rect.extent.height, swapchain_data->createInfo.imageExtent.height);
+                    }
+                    if (rect.layer > swapchain_data->createInfo.imageArrayLayers) {
+                        skip |= log_msg(
+                            report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT,
+                            HandleToUint64(pPresentInfo->pSwapchains[i]), "VUID-VkRectLayerKHR-layer-01262",
+                            "vkQueuePresentKHR(): For VkPresentRegionKHR down pNext chain, pRegion[%i].pRectangles[%i], the layer "
+                            "(%i) is greater than the corresponding swapchain's imageArrayLayers (%i).",
+                            i, j, rect.layer, swapchain_data->createInfo.imageArrayLayers);
+                    }
+                }
+            }
+        }
+
+        const auto *present_times_info = lvl_find_in_chain<VkPresentTimesInfoGOOGLE>(pPresentInfo->pNext);
+        if (present_times_info) {
+            if (pPresentInfo->swapchainCount != present_times_info->swapchainCount) {
+                skip |=
+                    log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT,
+                            HandleToUint64(pPresentInfo->pSwapchains[0]), "VUID-VkPresentTimesInfoGOOGLE-swapchainCount-01247",
+                            "vkQueuePresentKHR(): VkPresentTimesInfoGOOGLE.swapchainCount is %i but pPresentInfo->swapchainCount "
+                            "is %i. For VkPresentTimesInfoGOOGLE down pNext chain of VkPresentInfoKHR, "
+                            "VkPresentTimesInfoGOOGLE.swapchainCount must equal VkPresentInfoKHR.swapchainCount.",
+                            present_times_info->swapchainCount, pPresentInfo->swapchainCount);
+            }
+        }
+    }
+
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateCreateSharedSwapchainsKHR(VkDevice device, uint32_t swapchainCount,
+                                                          const VkSwapchainCreateInfoKHR *pCreateInfos,
+                                                          const VkAllocationCallbacks *pAllocator,
+                                                          VkSwapchainKHR *pSwapchains) const {
+    bool skip = false;
+    if (pCreateInfos) {
+        for (uint32_t i = 0; i < swapchainCount; i++) {
+            const auto surface_state = GetSurfaceState(pCreateInfos[i].surface);
+            const auto old_swapchain_state = GetSwapchainState(pCreateInfos[i].oldSwapchain);
+            std::stringstream func_name;
+            func_name << "vkCreateSharedSwapchainsKHR[" << swapchainCount << "]()";
+            skip |= ValidateCreateSwapchain(func_name.str().c_str(), &pCreateInfos[i], surface_state, old_swapchain_state);
+        }
+    }
+    return skip;
+}
+
+bool CoreChecks::ValidateAcquireNextImage(VkDevice device, const CommandVersion cmd_version, VkSwapchainKHR swapchain,
+                                          uint64_t timeout, VkSemaphore semaphore, VkFence fence, uint32_t *pImageIndex,
+                                          const char *func_name, const char *semaphore_type_vuid) const {
+    bool skip = false;
+
+    auto pSemaphore = GetSemaphoreState(semaphore);
+    if (pSemaphore && pSemaphore->type != VK_SEMAPHORE_TYPE_BINARY_KHR) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT,
+                        HandleToUint64(semaphore), semaphore_type_vuid, "%s: %s is not a VK_SEMAPHORE_TYPE_BINARY_KHR", func_name,
+                        report_data->FormatHandle(semaphore).c_str());
+    }
+    if (pSemaphore && pSemaphore->scope == kSyncScopeInternal && pSemaphore->signaled) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT,
+                        HandleToUint64(semaphore), "VUID-vkAcquireNextImageKHR-semaphore-01286",
+                        "%s: Semaphore must not be currently signaled or in a wait state.", func_name);
+    }
+
+    auto pFence = GetFenceState(fence);
+    if (pFence) {
+        skip |= ValidateFenceForSubmit(pFence);
+    }
+
+    const auto swapchain_data = GetSwapchainState(swapchain);
+    if (swapchain_data) {
+        if (swapchain_data->retired) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT,
+                            HandleToUint64(swapchain), "VUID-vkAcquireNextImageKHR-swapchain-01285",
+                            "%s: This swapchain has been retired. The application can still present any images it "
+                            "has acquired, but cannot acquire any more.",
+                            func_name);
+        }
+
+        auto physical_device_state = GetPhysicalDeviceState();
+        // TODO: this is technically wrong on many levels, but requires massive cleanup
+        if (physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHR_called) {
+            const uint32_t acquired_images =
+                static_cast<uint32_t>(std::count_if(swapchain_data->images.begin(), swapchain_data->images.end(),
+                                                    [=](SWAPCHAIN_IMAGE image) { return GetImageState(image.image)->acquired; }));
+            const uint32_t swapchain_image_count = static_cast<uint32_t>(swapchain_data->images.size());
+            const auto min_image_count = physical_device_state->surfaceCapabilities.minImageCount;
+            const bool too_many_already_acquired = acquired_images > swapchain_image_count - min_image_count;
+            if (timeout == UINT64_MAX && too_many_already_acquired) {
+                const char *vuid = "INVALID-vuid";
+                if (cmd_version == CMD_VERSION_1)
+                    vuid = "VUID-vkAcquireNextImageKHR-swapchain-01802";
+                else if (cmd_version == CMD_VERSION_2)
+                    vuid = "VUID-vkAcquireNextImage2KHR-swapchain-01803";
+                else
+                    assert(false);
+
+                const uint32_t acquirable = swapchain_image_count - min_image_count + 1;
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT,
+                                HandleToUint64(swapchain), vuid,
+                                "%s: Application has already previously acquired %" PRIu32 " image%s from swapchain. Only %" PRIu32
+                                " %s available to be acquired using a timeout of UINT64_MAX (given the swapchain has %" PRIu32
+                                ", and VkSurfaceCapabilitiesKHR::minImageCount is %" PRIu32 ").",
+                                func_name, acquired_images, acquired_images > 1 ? "s" : "", acquirable,
+                                acquirable > 1 ? "are" : "is", swapchain_image_count, min_image_count);
+            }
+        }
+
+        if (swapchain_data->images.size() == 0) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT,
+                            HandleToUint64(swapchain), kVUID_Core_DrawState_SwapchainImagesNotFound,
+                            "%s: No images found to acquire from. Application probably did not call "
+                            "vkGetSwapchainImagesKHR after swapchain creation.",
+                            func_name);
+        }
+    }
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateAcquireNextImageKHR(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout,
+                                                    VkSemaphore semaphore, VkFence fence, uint32_t *pImageIndex) const {
+    return ValidateAcquireNextImage(device, CMD_VERSION_1, swapchain, timeout, semaphore, fence, pImageIndex,
+                                    "vkAcquireNextImageKHR", "VUID-vkAcquireNextImageKHR-semaphore-03265");
+}
+
+bool CoreChecks::PreCallValidateAcquireNextImage2KHR(VkDevice device, const VkAcquireNextImageInfoKHR *pAcquireInfo,
+                                                     uint32_t *pImageIndex) const {
+    bool skip = false;
+    skip |= ValidateDeviceMaskToPhysicalDeviceCount(pAcquireInfo->deviceMask, VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT,
+                                                    HandleToUint64(pAcquireInfo->swapchain),
+                                                    "VUID-VkAcquireNextImageInfoKHR-deviceMask-01290");
+    skip |= ValidateDeviceMaskToZero(pAcquireInfo->deviceMask, VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT,
+                                     HandleToUint64(pAcquireInfo->swapchain), "VUID-VkAcquireNextImageInfoKHR-deviceMask-01291");
+    skip |= ValidateAcquireNextImage(device, CMD_VERSION_2, pAcquireInfo->swapchain, pAcquireInfo->timeout, pAcquireInfo->semaphore,
+                                     pAcquireInfo->fence, pImageIndex, "vkAcquireNextImage2KHR",
+                                     "VUID-VkAcquireNextImageInfoKHR-semaphore-03266");
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateDestroySurfaceKHR(VkInstance instance, VkSurfaceKHR surface,
+                                                  const VkAllocationCallbacks *pAllocator) const {
+    const auto surface_state = GetSurfaceState(surface);
+    bool skip = false;
+    if ((surface_state) && (surface_state->swapchain)) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT,
+                        HandleToUint64(instance), "VUID-vkDestroySurfaceKHR-surface-01266",
+                        "vkDestroySurfaceKHR() called before its associated VkSwapchainKHR was destroyed.");
+    }
+    return skip;
+}
+
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+bool CoreChecks::PreCallValidateGetPhysicalDeviceWaylandPresentationSupportKHR(VkPhysicalDevice physicalDevice,
+                                                                               uint32_t queueFamilyIndex,
+                                                                               struct wl_display *display) const {
+    const auto pd_state = GetPhysicalDeviceState(physicalDevice);
+    return ValidateQueueFamilyIndex(pd_state, queueFamilyIndex,
+                                    "VUID-vkGetPhysicalDeviceWaylandPresentationSupportKHR-queueFamilyIndex-01306",
+                                    "vkGetPhysicalDeviceWaylandPresentationSupportKHR", "queueFamilyIndex");
+}
+#endif  // VK_USE_PLATFORM_WAYLAND_KHR
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+bool CoreChecks::PreCallValidateGetPhysicalDeviceWin32PresentationSupportKHR(VkPhysicalDevice physicalDevice,
+                                                                             uint32_t queueFamilyIndex) const {
+    const auto pd_state = GetPhysicalDeviceState(physicalDevice);
+    return ValidateQueueFamilyIndex(pd_state, queueFamilyIndex,
+                                    "VUID-vkGetPhysicalDeviceWin32PresentationSupportKHR-queueFamilyIndex-01309",
+                                    "vkGetPhysicalDeviceWin32PresentationSupportKHR", "queueFamilyIndex");
+}
+#endif  // VK_USE_PLATFORM_WIN32_KHR
+
+#ifdef VK_USE_PLATFORM_XCB_KHR
+bool CoreChecks::PreCallValidateGetPhysicalDeviceXcbPresentationSupportKHR(VkPhysicalDevice physicalDevice,
+                                                                           uint32_t queueFamilyIndex, xcb_connection_t *connection,
+                                                                           xcb_visualid_t visual_id) const {
+    const auto pd_state = GetPhysicalDeviceState(physicalDevice);
+    return ValidateQueueFamilyIndex(pd_state, queueFamilyIndex,
+                                    "VUID-vkGetPhysicalDeviceXcbPresentationSupportKHR-queueFamilyIndex-01312",
+                                    "vkGetPhysicalDeviceXcbPresentationSupportKHR", "queueFamilyIndex");
+}
+#endif  // VK_USE_PLATFORM_XCB_KHR
+
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+bool CoreChecks::PreCallValidateGetPhysicalDeviceXlibPresentationSupportKHR(VkPhysicalDevice physicalDevice,
+                                                                            uint32_t queueFamilyIndex, Display *dpy,
+                                                                            VisualID visualID) const {
+    const auto pd_state = GetPhysicalDeviceState(physicalDevice);
+    return ValidateQueueFamilyIndex(pd_state, queueFamilyIndex,
+                                    "VUID-vkGetPhysicalDeviceXlibPresentationSupportKHR-queueFamilyIndex-01315",
+                                    "vkGetPhysicalDeviceXlibPresentationSupportKHR", "queueFamilyIndex");
+}
+#endif  // VK_USE_PLATFORM_XLIB_KHR
+
+bool CoreChecks::PreCallValidateGetPhysicalDeviceSurfaceSupportKHR(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex,
+                                                                   VkSurfaceKHR surface, VkBool32 *pSupported) const {
+    const auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
+    return ValidateQueueFamilyIndex(physical_device_state, queueFamilyIndex,
+                                    "VUID-vkGetPhysicalDeviceSurfaceSupportKHR-queueFamilyIndex-01269",
+                                    "vkGetPhysicalDeviceSurfaceSupportKHR", "queueFamilyIndex");
+}
+
+bool CoreChecks::ValidateDescriptorUpdateTemplate(const char *func_name,
+                                                  const VkDescriptorUpdateTemplateCreateInfoKHR *pCreateInfo) const {
+    bool skip = false;
+    const auto layout = GetDescriptorSetLayoutShared(pCreateInfo->descriptorSetLayout);
+    if (VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET == pCreateInfo->templateType && !layout) {
+        const VulkanTypedHandle ds_typed(pCreateInfo->descriptorSetLayout, kVulkanObjectTypeDescriptorSetLayout);
+        skip |=
+            log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT, ds_typed.handle,
+                    "VUID-VkDescriptorUpdateTemplateCreateInfo-templateType-00350",
+                    "%s: Invalid pCreateInfo->descriptorSetLayout (%s)", func_name, report_data->FormatHandle(ds_typed).c_str());
+    } else if (VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR == pCreateInfo->templateType) {
+        auto bind_point = pCreateInfo->pipelineBindPoint;
+        bool valid_bp = (bind_point == VK_PIPELINE_BIND_POINT_GRAPHICS) || (bind_point == VK_PIPELINE_BIND_POINT_COMPUTE);
+        if (!valid_bp) {
+            skip |=
+                log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                        "VUID-VkDescriptorUpdateTemplateCreateInfo-templateType-00351",
+                        "%s: Invalid pCreateInfo->pipelineBindPoint (%" PRIu32 ").", func_name, static_cast<uint32_t>(bind_point));
+        }
+        const auto pipeline_layout = GetPipelineLayout(pCreateInfo->pipelineLayout);
+        if (!pipeline_layout) {
+            const VulkanTypedHandle pl_typed(pCreateInfo->pipelineLayout, kVulkanObjectTypePipelineLayout);
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT,
+                            pl_typed.handle, "VUID-VkDescriptorUpdateTemplateCreateInfo-templateType-00352",
+                            "%s: Invalid pCreateInfo->pipelineLayout (%s)", func_name, report_data->FormatHandle(pl_typed).c_str());
+        } else {
+            const uint32_t pd_set = pCreateInfo->set;
+            if ((pd_set >= pipeline_layout->set_layouts.size()) || !pipeline_layout->set_layouts[pd_set] ||
+                !pipeline_layout->set_layouts[pd_set]->IsPushDescriptor()) {
+                const VulkanTypedHandle pl_typed(pCreateInfo->pipelineLayout, kVulkanObjectTypePipelineLayout);
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT,
+                                pl_typed.handle, "VUID-VkDescriptorUpdateTemplateCreateInfo-templateType-00353",
+                                "%s: pCreateInfo->set (%" PRIu32
+                                ") does not refer to the push descriptor set layout for pCreateInfo->pipelineLayout (%s).",
+                                func_name, pd_set, report_data->FormatHandle(pl_typed).c_str());
+            }
+        }
+    }
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateCreateDescriptorUpdateTemplate(VkDevice device,
+                                                               const VkDescriptorUpdateTemplateCreateInfoKHR *pCreateInfo,
+                                                               const VkAllocationCallbacks *pAllocator,
+                                                               VkDescriptorUpdateTemplateKHR *pDescriptorUpdateTemplate) const {
+    bool skip = ValidateDescriptorUpdateTemplate("vkCreateDescriptorUpdateTemplate()", pCreateInfo);
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateCreateDescriptorUpdateTemplateKHR(VkDevice device,
+                                                                  const VkDescriptorUpdateTemplateCreateInfoKHR *pCreateInfo,
+                                                                  const VkAllocationCallbacks *pAllocator,
+                                                                  VkDescriptorUpdateTemplateKHR *pDescriptorUpdateTemplate) const {
+    bool skip = ValidateDescriptorUpdateTemplate("vkCreateDescriptorUpdateTemplateKHR()", pCreateInfo);
+    return skip;
+}
+
+bool CoreChecks::ValidateUpdateDescriptorSetWithTemplate(VkDescriptorSet descriptorSet,
+                                                         VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
+                                                         const void *pData) const {
+    bool skip = false;
+    auto const template_map_entry = desc_template_map.find(descriptorUpdateTemplate);
+    if ((template_map_entry == desc_template_map.end()) || (template_map_entry->second.get() == nullptr)) {
+        // Object tracker will report errors for invalid descriptorUpdateTemplate values, avoiding a crash in release builds
+        // but retaining the assert as template support is new enough to want to investigate these in debug builds.
+        assert(0);
+    } else {
+        const TEMPLATE_STATE *template_state = template_map_entry->second.get();
+        // TODO: Validate template push descriptor updates
+        if (template_state->create_info.templateType == VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET) {
+            skip = ValidateUpdateDescriptorSetsWithTemplateKHR(descriptorSet, template_state, pData);
+        }
+    }
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateUpdateDescriptorSetWithTemplate(VkDevice device, VkDescriptorSet descriptorSet,
+                                                                VkDescriptorUpdateTemplate descriptorUpdateTemplate,
+                                                                const void *pData) const {
+    return ValidateUpdateDescriptorSetWithTemplate(descriptorSet, descriptorUpdateTemplate, pData);
+}
+
+bool CoreChecks::PreCallValidateUpdateDescriptorSetWithTemplateKHR(VkDevice device, VkDescriptorSet descriptorSet,
+                                                                   VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
+                                                                   const void *pData) const {
+    return ValidateUpdateDescriptorSetWithTemplate(descriptorSet, descriptorUpdateTemplate, pData);
+}
+
+bool CoreChecks::PreCallValidateCmdPushDescriptorSetWithTemplateKHR(VkCommandBuffer commandBuffer,
+                                                                    VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
+                                                                    VkPipelineLayout layout, uint32_t set,
+                                                                    const void *pData) const {
+    const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+    assert(cb_state);
+    const char *const func_name = "vkPushDescriptorSetWithTemplateKHR()";
+    bool skip = false;
+    skip |= ValidateCmd(cb_state, CMD_PUSHDESCRIPTORSETWITHTEMPLATEKHR, func_name);
+
+    const auto layout_data = GetPipelineLayout(layout);
+    const auto dsl = GetDslFromPipelineLayout(layout_data, set);
+    const VulkanTypedHandle layout_typed(layout, kVulkanObjectTypePipelineLayout);
+
+    // Validate the set index points to a push descriptor set and is in range
+    if (dsl) {
+        if (!dsl->IsPushDescriptor()) {
+            skip = log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT,
+                           layout_typed.handle, "VUID-vkCmdPushDescriptorSetKHR-set-00365",
+                           "%s: Set index %" PRIu32 " does not match push descriptor set layout index for %s.", func_name, set,
+                           report_data->FormatHandle(layout_typed).c_str());
+        }
+    } else if (layout_data && (set >= layout_data->set_layouts.size())) {
+        skip = log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT,
+                       layout_typed.handle, "VUID-vkCmdPushDescriptorSetKHR-set-00364",
+                       "%s: Set index %" PRIu32 " is outside of range for %s (set < %" PRIu32 ").", func_name, set,
+                       report_data->FormatHandle(layout_typed).c_str(), static_cast<uint32_t>(layout_data->set_layouts.size()));
+    }
+
+    const auto template_state = GetDescriptorTemplateState(descriptorUpdateTemplate);
+    if (template_state) {
+        const auto &template_ci = template_state->create_info;
+        static const std::map<VkPipelineBindPoint, std::string> bind_errors = {
+            std::make_pair(VK_PIPELINE_BIND_POINT_GRAPHICS, "VUID-vkCmdPushDescriptorSetWithTemplateKHR-commandBuffer-00366"),
+            std::make_pair(VK_PIPELINE_BIND_POINT_COMPUTE, "VUID-vkCmdPushDescriptorSetWithTemplateKHR-commandBuffer-00366"),
+            std::make_pair(VK_PIPELINE_BIND_POINT_RAY_TRACING_NV,
+                           "VUID-vkCmdPushDescriptorSetWithTemplateKHR-commandBuffer-00366")};
+        skip |= ValidatePipelineBindPoint(cb_state, template_ci.pipelineBindPoint, func_name, bind_errors);
+
+        if (template_ci.templateType != VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                            HandleToUint64(cb_state->commandBuffer), kVUID_Core_PushDescriptorUpdate_TemplateType,
+                            "%s: descriptorUpdateTemplate %s was not created with flag "
+                            "VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR.",
+                            func_name, report_data->FormatHandle(descriptorUpdateTemplate).c_str());
+        }
+        if (template_ci.set != set) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                            HandleToUint64(cb_state->commandBuffer), kVUID_Core_PushDescriptorUpdate_Template_SetMismatched,
+                            "%s: descriptorUpdateTemplate %s created with set %" PRIu32
+                            " does not match command parameter set %" PRIu32 ".",
+                            func_name, report_data->FormatHandle(descriptorUpdateTemplate).c_str(), template_ci.set, set);
+        }
+        if (!CompatForSet(set, layout_data, GetPipelineLayout(template_ci.pipelineLayout))) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                            HandleToUint64(cb_state->commandBuffer), kVUID_Core_PushDescriptorUpdate_Template_LayoutMismatched,
+                            "%s: descriptorUpdateTemplate %s created with %s is incompatible with command parameter "
+                            "%s for set %" PRIu32,
+                            func_name, report_data->FormatHandle(descriptorUpdateTemplate).c_str(),
+                            report_data->FormatHandle(template_ci.pipelineLayout).c_str(),
+                            report_data->FormatHandle(layout).c_str(), set);
+        }
+    }
+
+    if (dsl && template_state) {
+        // Create an empty proxy in order to use the existing descriptor set update validation
+        cvdescriptorset::DescriptorSet proxy_ds(VK_NULL_HANDLE, nullptr, dsl, 0, nullptr, report_data);
+        // Decode the template into a set of write updates
+        cvdescriptorset::DecodedTemplateUpdate decoded_template(this, VK_NULL_HANDLE, template_state, pData,
+                                                                dsl->GetDescriptorSetLayout());
+        // Validate the decoded update against the proxy_ds
+        skip |= ValidatePushDescriptorsUpdate(&proxy_ds, static_cast<uint32_t>(decoded_template.desc_writes.size()),
+                                              decoded_template.desc_writes.data(), func_name);
+    }
+
+    return skip;
+}
+
+bool CoreChecks::ValidateGetPhysicalDeviceDisplayPlanePropertiesKHRQuery(VkPhysicalDevice physicalDevice, uint32_t planeIndex,
+                                                                         const char *api_name) const {
+    bool skip = false;
+    const auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
+    if (physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHR_called) {
+        if (planeIndex >= physical_device_state->display_plane_property_count) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT,
+                            HandleToUint64(physicalDevice), "VUID-vkGetDisplayPlaneSupportedDisplaysKHR-planeIndex-01249",
+                            "%s(): planeIndex must be in the range [0, %d] that was returned by "
+                            "vkGetPhysicalDeviceDisplayPlanePropertiesKHR "
+                            "or vkGetPhysicalDeviceDisplayPlaneProperties2KHR. Do you have the plane index hardcoded?",
+                            api_name, physical_device_state->display_plane_property_count - 1);
+        }
+    }
+
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateGetDisplayPlaneSupportedDisplaysKHR(VkPhysicalDevice physicalDevice, uint32_t planeIndex,
+                                                                    uint32_t *pDisplayCount, VkDisplayKHR *pDisplays) const {
+    bool skip = false;
+    skip |= ValidateGetPhysicalDeviceDisplayPlanePropertiesKHRQuery(physicalDevice, planeIndex,
+                                                                    "vkGetDisplayPlaneSupportedDisplaysKHR");
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateGetDisplayPlaneCapabilitiesKHR(VkPhysicalDevice physicalDevice, VkDisplayModeKHR mode,
+                                                               uint32_t planeIndex,
+                                                               VkDisplayPlaneCapabilitiesKHR *pCapabilities) const {
+    bool skip = false;
+    skip |= ValidateGetPhysicalDeviceDisplayPlanePropertiesKHRQuery(physicalDevice, planeIndex, "vkGetDisplayPlaneCapabilitiesKHR");
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateGetDisplayPlaneCapabilities2KHR(VkPhysicalDevice physicalDevice,
+                                                                const VkDisplayPlaneInfo2KHR *pDisplayPlaneInfo,
+                                                                VkDisplayPlaneCapabilities2KHR *pCapabilities) const {
+    bool skip = false;
+    skip |= ValidateGetPhysicalDeviceDisplayPlanePropertiesKHRQuery(physicalDevice, pDisplayPlaneInfo->planeIndex,
+                                                                    "vkGetDisplayPlaneCapabilities2KHR");
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateCmdDebugMarkerBeginEXT(VkCommandBuffer commandBuffer,
+                                                       const VkDebugMarkerMarkerInfoEXT *pMarkerInfo) const {
+    const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+    assert(cb_state);
+    return ValidateCmd(cb_state, CMD_DEBUGMARKERBEGINEXT, "vkCmdDebugMarkerBeginEXT()");
+}
+
+bool CoreChecks::PreCallValidateCmdDebugMarkerEndEXT(VkCommandBuffer commandBuffer) const {
+    const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+    assert(cb_state);
+    return ValidateCmd(cb_state, CMD_DEBUGMARKERENDEXT, "vkCmdDebugMarkerEndEXT()");
+}
+
+bool CoreChecks::PreCallValidateCmdBeginQueryIndexedEXT(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query,
+                                                        VkQueryControlFlags flags, uint32_t index) const {
+    if (disabled.query_validation) return false;
+    const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+    assert(cb_state);
+    QueryObject query_obj(queryPool, query, index);
+    const char *cmd_name = "vkCmdBeginQueryIndexedEXT()";
+    bool skip = ValidateBeginQuery(
+        cb_state, query_obj, flags, CMD_BEGINQUERYINDEXEDEXT, cmd_name, "VUID-vkCmdBeginQueryIndexedEXT-commandBuffer-cmdpool",
+        "VUID-vkCmdBeginQueryIndexedEXT-queryType-02338", "VUID-vkCmdBeginQueryIndexedEXT-queryType-00803",
+        "VUID-vkCmdBeginQueryIndexedEXT-queryType-00800", "VUID-vkCmdBeginQueryIndexedEXT-query-00802");
+
+    // Extension specific VU's
+    const auto &query_pool_ci = GetQueryPoolState(query_obj.pool)->createInfo;
+    if (query_pool_ci.queryType == VK_QUERY_TYPE_TRANSFORM_FEEDBACK_STREAM_EXT) {
+        if (device_extensions.vk_ext_transform_feedback &&
+            (index >= phys_dev_ext_props.transform_feedback_props.maxTransformFeedbackStreams)) {
+            skip |= log_msg(
+                report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                HandleToUint64(cb_state->commandBuffer), "VUID-vkCmdBeginQueryIndexedEXT-queryType-02339",
+                "%s: index %" PRIu32
+                " must be less than VkPhysicalDeviceTransformFeedbackPropertiesEXT::maxTransformFeedbackStreams %" PRIu32 ".",
+                cmd_name, index, phys_dev_ext_props.transform_feedback_props.maxTransformFeedbackStreams);
+        }
+    } else if (index != 0) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                        HandleToUint64(cb_state->commandBuffer), "VUID-vkCmdBeginQueryIndexedEXT-queryType-02340",
+                        "%s: index %" PRIu32
+                        " must be zero if %s was not created with type VK_QUERY_TYPE_TRANSFORM_FEEDBACK_STREAM_EXT.",
+                        cmd_name, index, report_data->FormatHandle(queryPool).c_str());
+    }
+    return skip;
+}
+
+void CoreChecks::PreCallRecordCmdBeginQueryIndexedEXT(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query,
+                                                      VkQueryControlFlags flags, uint32_t index) {
+    if (disabled.query_validation) return;
+    QueryObject query_obj = {queryPool, query, index};
+    EnqueueVerifyBeginQuery(commandBuffer, query_obj, "vkCmdBeginQueryIndexedEXT()");
+}
+
+bool CoreChecks::PreCallValidateCmdEndQueryIndexedEXT(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query,
+                                                      uint32_t index) const {
+    if (disabled.query_validation) return false;
+    QueryObject query_obj = {queryPool, query, index};
+    const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+    assert(cb_state);
+    return ValidateCmdEndQuery(cb_state, query_obj, CMD_ENDQUERYINDEXEDEXT, "vkCmdEndQueryIndexedEXT()",
+                               "VUID-vkCmdEndQueryIndexedEXT-commandBuffer-cmdpool", "VUID-vkCmdEndQueryIndexedEXT-None-02342");
+}
+
+bool CoreChecks::PreCallValidateCmdSetDiscardRectangleEXT(VkCommandBuffer commandBuffer, uint32_t firstDiscardRectangle,
+                                                          uint32_t discardRectangleCount,
+                                                          const VkRect2D *pDiscardRectangles) const {
+    const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+    // Minimal validation for command buffer state
+    return ValidateCmd(cb_state, CMD_SETDISCARDRECTANGLEEXT, "vkCmdSetDiscardRectangleEXT()");
+}
+
+bool CoreChecks::PreCallValidateCmdSetSampleLocationsEXT(VkCommandBuffer commandBuffer,
+                                                         const VkSampleLocationsInfoEXT *pSampleLocationsInfo) const {
+    const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+    // Minimal validation for command buffer state
+    return ValidateCmd(cb_state, CMD_SETSAMPLELOCATIONSEXT, "vkCmdSetSampleLocationsEXT()");
+}
+
+bool CoreChecks::ValidateCreateSamplerYcbcrConversion(const char *func_name,
+                                                      const VkSamplerYcbcrConversionCreateInfo *create_info) const {
+    bool skip = false;
+    if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
+        skip |= ValidateCreateSamplerYcbcrConversionANDROID(create_info);
+    } else {  // Not android hardware buffer
+        if (VK_FORMAT_UNDEFINED == create_info->format) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT, 0,
+                            "VUID-VkSamplerYcbcrConversionCreateInfo-format-01649",
+                            "%s: CreateInfo format type is VK_FORMAT_UNDEFINED.", func_name);
+        }
+    }
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateCreateSamplerYcbcrConversion(VkDevice device, const VkSamplerYcbcrConversionCreateInfo *pCreateInfo,
+                                                             const VkAllocationCallbacks *pAllocator,
+                                                             VkSamplerYcbcrConversion *pYcbcrConversion) const {
+    return ValidateCreateSamplerYcbcrConversion("vkCreateSamplerYcbcrConversion()", pCreateInfo);
+}
+
+bool CoreChecks::PreCallValidateCreateSamplerYcbcrConversionKHR(VkDevice device,
+                                                                const VkSamplerYcbcrConversionCreateInfo *pCreateInfo,
+                                                                const VkAllocationCallbacks *pAllocator,
+                                                                VkSamplerYcbcrConversion *pYcbcrConversion) const {
+    return ValidateCreateSamplerYcbcrConversion("vkCreateSamplerYcbcrConversionKHR()", pCreateInfo);
+}
+
+bool CoreChecks::PreCallValidateGetBufferDeviceAddressKHR(VkDevice device, const VkBufferDeviceAddressInfoKHR *pInfo) const {
+    bool skip = false;
+
+    if (!enabled_features.buffer_device_address.bufferDeviceAddress &&
+        !enabled_features.buffer_device_address_ext.bufferDeviceAddress) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT,
+                        HandleToUint64(pInfo->buffer), "VUID-vkGetBufferDeviceAddressKHR-bufferDeviceAddress-03324",
+                        "The bufferDeviceAddress feature must: be enabled.");
+    }
+
+    if (physical_device_count > 1 && !enabled_features.buffer_device_address.bufferDeviceAddressMultiDevice &&
+        !enabled_features.buffer_device_address_ext.bufferDeviceAddressMultiDevice) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT,
+                        HandleToUint64(pInfo->buffer), "VUID-vkGetBufferDeviceAddressKHR-device-03325",
+                        "If device was created with multiple physical devices, then the "
+                        "bufferDeviceAddressMultiDevice feature must: be enabled.");
+    }
+
+    const auto buffer_state = GetBufferState(pInfo->buffer);
+    if (buffer_state) {
+        if (!(buffer_state->createInfo.flags & VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_KHR)) {
+            skip |= ValidateMemoryIsBoundToBuffer(buffer_state, "vkGetBufferDeviceAddressEXT()",
+                                                  "VUID-VkBufferDeviceAddressInfoKHR-buffer-02600");
+        }
+
+        skip |= ValidateBufferUsageFlags(buffer_state, VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_KHR, true,
+                                         "VUID-VkBufferDeviceAddressInfoKHR-buffer-02601", "vkGetBufferDeviceAddressEXT()",
+                                         "VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_EXT");
+    }
+
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateGetBufferDeviceAddressEXT(VkDevice device, const VkBufferDeviceAddressInfoEXT *pInfo) const {
+    return PreCallValidateGetBufferDeviceAddressKHR(device, (const VkBufferDeviceAddressInfoKHR *)pInfo);
+}
+
+bool CoreChecks::PreCallValidateGetBufferOpaqueCaptureAddressKHR(VkDevice device, const VkBufferDeviceAddressInfoKHR *pInfo) const {
+    bool skip = false;
+
+    if (!enabled_features.buffer_device_address.bufferDeviceAddress) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT,
+                        HandleToUint64(pInfo->buffer), "VUID-vkGetBufferOpaqueCaptureAddressKHR-None-03326",
+                        "The bufferDeviceAddress feature must: be enabled.");
+    }
+
+    if (physical_device_count > 1 && !enabled_features.buffer_device_address.bufferDeviceAddressMultiDevice) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT,
+                        HandleToUint64(pInfo->buffer), "VUID-vkGetBufferOpaqueCaptureAddressKHR-device-03327",
+                        "If device was created with multiple physical devices, then the "
+                        "bufferDeviceAddressMultiDevice feature must: be enabled.");
+    }
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateGetDeviceMemoryOpaqueCaptureAddressKHR(
+    VkDevice device, const VkDeviceMemoryOpaqueCaptureAddressInfoKHR *pInfo) const {
+    bool skip = false;
+
+    if (!enabled_features.buffer_device_address.bufferDeviceAddress) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,
+                        HandleToUint64(pInfo->memory), "VUID-vkGetDeviceMemoryOpaqueCaptureAddressKHR-None-03334",
+                        "The bufferDeviceAddress feature must: be enabled.");
+    }
+
+    if (physical_device_count > 1 && !enabled_features.buffer_device_address.bufferDeviceAddressMultiDevice) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,
+                        HandleToUint64(pInfo->memory), "VUID-vkGetDeviceMemoryOpaqueCaptureAddressKHR-device-03335",
+                        "If device was created with multiple physical devices, then the "
+                        "bufferDeviceAddressMultiDevice feature must: be enabled.");
+    }
+
+    const DEVICE_MEMORY_STATE *mem_info = GetDevMemState(pInfo->memory);
+    if (mem_info) {
+        auto chained_flags_struct = lvl_find_in_chain<VkMemoryAllocateFlagsInfo>(mem_info->alloc_info.pNext);
+        if (!chained_flags_struct || !(chained_flags_struct->flags & VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT_KHR)) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,
+                            HandleToUint64(pInfo->memory), "VUID-VkDeviceMemoryOpaqueCaptureAddressInfoKHR-memory-03336",
+                            "memory must have been allocated with VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT_KHR.");
+        }
+    }
+
+    return skip;
+}
+
+bool CoreChecks::ValidateQueryRange(VkDevice device, VkQueryPool queryPool, uint32_t totalCount, uint32_t firstQuery,
+                                    uint32_t queryCount, const char *vuid_badfirst, const char *vuid_badrange) const {
+    bool skip = false;
+
+    if (firstQuery >= totalCount) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
+                        vuid_badfirst, "firstQuery (%" PRIu32 ") greater than or equal to query pool count (%" PRIu32 ") for %s",
+                        firstQuery, totalCount, report_data->FormatHandle(queryPool).c_str());
+    }
+
+    if ((firstQuery + queryCount) > totalCount) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
+                        vuid_badrange, "Query range [%" PRIu32 ", %" PRIu32 ") goes beyond query pool count (%" PRIu32 ") for %s",
+                        firstQuery, firstQuery + queryCount, totalCount, report_data->FormatHandle(queryPool).c_str());
+    }
+
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateResetQueryPoolEXT(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery,
+                                                  uint32_t queryCount) const {
+    if (disabled.query_validation) return false;
+
+    bool skip = false;
+
+    if (!enabled_features.host_query_reset_features.hostQueryReset) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
+                        "VUID-vkResetQueryPoolEXT-None-02665", "Host query reset not enabled for device");
+    }
+
+    const auto query_pool_state = GetQueryPoolState(queryPool);
+    if (query_pool_state) {
+        skip |= ValidateQueryRange(device, queryPool, query_pool_state->createInfo.queryCount, firstQuery, queryCount,
+                                   "VUID-vkResetQueryPoolEXT-firstQuery-02666", "VUID-vkResetQueryPoolEXT-firstQuery-02667");
+    }
+
+    return skip;
+}
+
+VkResult CoreChecks::CoreLayerCreateValidationCacheEXT(VkDevice device, const VkValidationCacheCreateInfoEXT *pCreateInfo,
+                                                       const VkAllocationCallbacks *pAllocator,
+                                                       VkValidationCacheEXT *pValidationCache) {
+    *pValidationCache = ValidationCache::Create(pCreateInfo);
+    return *pValidationCache ? VK_SUCCESS : VK_ERROR_INITIALIZATION_FAILED;
+}
+
+void CoreChecks::CoreLayerDestroyValidationCacheEXT(VkDevice device, VkValidationCacheEXT validationCache,
+                                                    const VkAllocationCallbacks *pAllocator) {
+    delete CastFromHandle<ValidationCache *>(validationCache);
+}
+
+VkResult CoreChecks::CoreLayerGetValidationCacheDataEXT(VkDevice device, VkValidationCacheEXT validationCache, size_t *pDataSize,
+                                                        void *pData) {
+    size_t inSize = *pDataSize;
+    CastFromHandle<ValidationCache *>(validationCache)->Write(pDataSize, pData);
+    return (pData && *pDataSize != inSize) ? VK_INCOMPLETE : VK_SUCCESS;
+}
+
+VkResult CoreChecks::CoreLayerMergeValidationCachesEXT(VkDevice device, VkValidationCacheEXT dstCache, uint32_t srcCacheCount,
+                                                       const VkValidationCacheEXT *pSrcCaches) {
+    bool skip = false;
+    auto dst = CastFromHandle<ValidationCache *>(dstCache);
+    VkResult result = VK_SUCCESS;
+    for (uint32_t i = 0; i < srcCacheCount; i++) {
+        auto src = CastFromHandle<const ValidationCache *>(pSrcCaches[i]);
+        if (src == dst) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT, 0,
+                            "VUID-vkMergeValidationCachesEXT-dstCache-01536",
+                            "vkMergeValidationCachesEXT: dstCache (0x%" PRIx64 ") must not appear in pSrcCaches array.",
+                            HandleToUint64(dstCache));
+            result = VK_ERROR_VALIDATION_FAILED_EXT;
+        }
+        if (!skip) {
+            dst->Merge(src);
+        }
+    }
+
+    return result;
+}
+
+bool CoreChecks::ValidateCmdSetDeviceMask(VkCommandBuffer commandBuffer, uint32_t deviceMask, const char *func_name) const {
+    bool skip = false;
+    const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+    skip |= ValidateCmd(cb_state, CMD_SETDEVICEMASK, func_name);
+    skip |= ValidateDeviceMaskToPhysicalDeviceCount(deviceMask, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                                                    HandleToUint64(commandBuffer), "VUID-vkCmdSetDeviceMask-deviceMask-00108");
+    skip |= ValidateDeviceMaskToZero(deviceMask, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, HandleToUint64(commandBuffer),
+                                     "VUID-vkCmdSetDeviceMask-deviceMask-00109");
+    skip |= ValidateDeviceMaskToCommandBuffer(cb_state, deviceMask, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                                              HandleToUint64(commandBuffer), "VUID-vkCmdSetDeviceMask-deviceMask-00110");
+    if (cb_state->activeRenderPass) {
+        skip |= ValidateDeviceMaskToRenderPass(cb_state, deviceMask, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                                               HandleToUint64(commandBuffer), "VUID-vkCmdSetDeviceMask-deviceMask-00111");
+    }
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateCmdSetDeviceMask(VkCommandBuffer commandBuffer, uint32_t deviceMask) const {
+    return ValidateCmdSetDeviceMask(commandBuffer, deviceMask, "vkSetDeviceMask()");
+}
+
+bool CoreChecks::PreCallValidateCmdSetDeviceMaskKHR(VkCommandBuffer commandBuffer, uint32_t deviceMask) const {
+    return ValidateCmdSetDeviceMask(commandBuffer, deviceMask, "vkSetDeviceMaskKHR()");
+}
+
+bool CoreChecks::PreCallValidateGetSemaphoreCounterValueKHR(VkDevice device, VkSemaphore semaphore, uint64_t *pValue) const {
+    bool skip = false;
+    const auto *pSemaphore = GetSemaphoreState(semaphore);
+    if (pSemaphore && pSemaphore->type != VK_SEMAPHORE_TYPE_TIMELINE_KHR) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT,
+                        HandleToUint64(semaphore), "VUID-vkGetSemaphoreCounterValueKHR-semaphore-03255",
+                        "vkGetSemaphoreCounterValueKHR: semaphore %s must be of VK_SEMAPHORE_TYPE_TIMELINE_KHR type",
+                        report_data->FormatHandle(semaphore).c_str());
+    }
+    return skip;
+}
+bool CoreChecks::ValidateQueryPoolStride(const std::string &vuid_not_64, const std::string &vuid_64, const VkDeviceSize stride,
+                                         const char *parameter_name, const uint64_t parameter_value,
+                                         const VkQueryResultFlags flags) const {
+    bool skip = false;
+    if (flags & VK_QUERY_RESULT_64_BIT) {
+        static const int condition_multiples = 0b0111;
+        if ((stride & condition_multiples) || (parameter_value & condition_multiples)) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid_64,
+                            "stride %" PRIx64 " or %s %" PRIx64 " is invalid.", stride, parameter_name, parameter_value);
+        }
+    } else {
+        static const int condition_multiples = 0b0011;
+        if ((stride & condition_multiples) || (parameter_value & condition_multiples)) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid_not_64,
+                            "stride %" PRIx64 " or %s %" PRIx64 " is invalid.", stride, parameter_name, parameter_value);
+        }
+    }
+    return skip;
+}
+
+bool CoreChecks::ValidateCmdDrawStrideWithStruct(VkCommandBuffer commandBuffer, const std::string &vuid, const uint32_t stride,
+                                                 const char *struct_name, const uint32_t struct_size) const {
+    bool skip = false;
+    static const int condition_multiples = 0b0011;
+    if ((stride & condition_multiples) || (stride < struct_size)) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                        HandleToUint64(commandBuffer), vuid, "stride %d is invalid or less than sizeof(%s) %d.", stride,
+                        struct_name, struct_size);
+    }
+    return skip;
+}
+
+bool CoreChecks::ValidateCmdDrawStrideWithBuffer(VkCommandBuffer commandBuffer, const std::string &vuid, const uint32_t stride,
+                                                 const char *struct_name, const uint32_t struct_size, const uint32_t drawCount,
+                                                 const VkDeviceSize offset, const BUFFER_STATE *buffer_state) const {
+    bool skip = false;
+    uint64_t validation_value = stride * (drawCount - 1) + offset + struct_size;
+    if (validation_value > buffer_state->createInfo.size) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                        HandleToUint64(commandBuffer), vuid,
+                        "stride[%d] * (drawCount[%d] - 1) + offset[%" PRIx64 "] + sizeof(%s)[%d] = %" PRIx64
+                        " is greater than the size[%" PRIx64 "] of %s.",
+                        stride, drawCount, offset, struct_name, struct_size, validation_value, buffer_state->createInfo.size,
+                        report_data->FormatHandle(buffer_state->buffer).c_str());
+    }
+    return skip;
+}
+
+void PIPELINE_STATE::initGraphicsPipeline(const ValidationStateTracker *state_data, const VkGraphicsPipelineCreateInfo *pCreateInfo,
+                                          std::shared_ptr<const RENDER_PASS_STATE> &&rpstate) {
+    reset();
+    bool uses_color_attachment = false;
+    bool uses_depthstencil_attachment = false;
+    if (pCreateInfo->subpass < rpstate->createInfo.subpassCount) {
+        const auto &subpass = rpstate->createInfo.pSubpasses[pCreateInfo->subpass];
+
+        for (uint32_t i = 0; i < subpass.colorAttachmentCount; ++i) {
+            if (subpass.pColorAttachments[i].attachment != VK_ATTACHMENT_UNUSED) {
+                uses_color_attachment = true;
+                break;
+            }
+        }
+
+        if (subpass.pDepthStencilAttachment && subpass.pDepthStencilAttachment->attachment != VK_ATTACHMENT_UNUSED) {
+            uses_depthstencil_attachment = true;
+        }
+    }
+    graphicsPipelineCI.initialize(pCreateInfo, uses_color_attachment, uses_depthstencil_attachment);
+    if (graphicsPipelineCI.pInputAssemblyState) {
+        topology_at_rasterizer = graphicsPipelineCI.pInputAssemblyState->topology;
+    }
+
+    stage_state.resize(pCreateInfo->stageCount);
+    for (uint32_t i = 0; i < pCreateInfo->stageCount; i++) {
+        const VkPipelineShaderStageCreateInfo *pPSSCI = &pCreateInfo->pStages[i];
+        this->duplicate_shaders |= this->active_shaders & pPSSCI->stage;
+        this->active_shaders |= pPSSCI->stage;
+        state_data->RecordPipelineShaderStage(pPSSCI, this, &stage_state[i]);
+    }
+
+    if (graphicsPipelineCI.pVertexInputState) {
+        const auto pVICI = graphicsPipelineCI.pVertexInputState;
+        if (pVICI->vertexBindingDescriptionCount) {
+            this->vertex_binding_descriptions_ = std::vector<VkVertexInputBindingDescription>(
+                pVICI->pVertexBindingDescriptions, pVICI->pVertexBindingDescriptions + pVICI->vertexBindingDescriptionCount);
+
+            this->vertex_binding_to_index_map_.reserve(pVICI->vertexBindingDescriptionCount);
+            for (uint32_t i = 0; i < pVICI->vertexBindingDescriptionCount; ++i) {
+                this->vertex_binding_to_index_map_[pVICI->pVertexBindingDescriptions[i].binding] = i;
+            }
+        }
+        if (pVICI->vertexAttributeDescriptionCount) {
+            this->vertex_attribute_descriptions_ = std::vector<VkVertexInputAttributeDescription>(
+                pVICI->pVertexAttributeDescriptions, pVICI->pVertexAttributeDescriptions + pVICI->vertexAttributeDescriptionCount);
+            for (uint32_t i = 0; i < pVICI->vertexAttributeDescriptionCount; ++i) {
+                const auto attribute_format = pVICI->pVertexAttributeDescriptions[i].format;
+                VkDeviceSize vtx_attrib_req_alignment = FormatElementSize(attribute_format);
+                if (FormatElementIsTexel(attribute_format)) {
+                    vtx_attrib_req_alignment = SafeDivision(vtx_attrib_req_alignment, FormatChannelCount(attribute_format));
+                }
+                this->vertex_attribute_alignments_.push_back(vtx_attrib_req_alignment);
+            }
+        }
+    }
+    if (graphicsPipelineCI.pColorBlendState) {
+        const auto pCBCI = graphicsPipelineCI.pColorBlendState;
+        if (pCBCI->attachmentCount) {
+            this->attachments =
+                std::vector<VkPipelineColorBlendAttachmentState>(pCBCI->pAttachments, pCBCI->pAttachments + pCBCI->attachmentCount);
+        }
+    }
+    rp_state = rpstate;
+}
+
+void PIPELINE_STATE::initComputePipeline(const ValidationStateTracker *state_data, const VkComputePipelineCreateInfo *pCreateInfo) {
+    reset();
+    computePipelineCI.initialize(pCreateInfo);
+    switch (computePipelineCI.stage.stage) {
+        case VK_SHADER_STAGE_COMPUTE_BIT:
+            this->active_shaders |= VK_SHADER_STAGE_COMPUTE_BIT;
+            stage_state.resize(1);
+            state_data->RecordPipelineShaderStage(&pCreateInfo->stage, this, &stage_state[0]);
+            break;
+        default:
+            // TODO : Flag error
+            break;
+    }
+}
+
+void PIPELINE_STATE::initRayTracingPipelineNV(const ValidationStateTracker *state_data,
+                                              const VkRayTracingPipelineCreateInfoNV *pCreateInfo) {
+    reset();
+    raytracingPipelineCI.initialize(pCreateInfo);
+
+    stage_state.resize(pCreateInfo->stageCount);
+    for (uint32_t stage_index = 0; stage_index < pCreateInfo->stageCount; stage_index++) {
+        const auto &shader_stage = pCreateInfo->pStages[stage_index];
+        switch (shader_stage.stage) {
+            case VK_SHADER_STAGE_RAYGEN_BIT_NV:
+                this->active_shaders |= VK_SHADER_STAGE_RAYGEN_BIT_NV;
+                break;
+            case VK_SHADER_STAGE_ANY_HIT_BIT_NV:
+                this->active_shaders |= VK_SHADER_STAGE_ANY_HIT_BIT_NV;
+                break;
+            case VK_SHADER_STAGE_CLOSEST_HIT_BIT_NV:
+                this->active_shaders |= VK_SHADER_STAGE_CLOSEST_HIT_BIT_NV;
+                break;
+            case VK_SHADER_STAGE_MISS_BIT_NV:
+                this->active_shaders |= VK_SHADER_STAGE_MISS_BIT_NV;
+                break;
+            case VK_SHADER_STAGE_INTERSECTION_BIT_NV:
+                this->active_shaders |= VK_SHADER_STAGE_INTERSECTION_BIT_NV;
+                break;
+            case VK_SHADER_STAGE_CALLABLE_BIT_NV:
+                this->active_shaders |= VK_SHADER_STAGE_CALLABLE_BIT_NV;
+                break;
+            default:
+                // TODO : Flag error
+                break;
+        }
+        state_data->RecordPipelineShaderStage(&shader_stage, this, &stage_state[stage_index]);
+    }
+}
diff --git a/src/third_party/vulkan-validation-layers/src/layers/core_validation.h b/src/third_party/vulkan-validation-layers/src/layers/core_validation.h
new file mode 100644
index 0000000..b842250
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/layers/core_validation.h
@@ -0,0 +1,1056 @@
+/* Copyright (c) 2015-2019 The Khronos Group Inc.
+ * Copyright (c) 2015-2019 Valve Corporation
+ * Copyright (c) 2015-2019 LunarG, Inc.
+ * Copyright (C) 2015-2019 Google Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Courtney Goeltzenleuchter <courtneygo@google.com>
+ * Author: Tobin Ehlis <tobine@google.com>
+ * Author: Chris Forbes <chrisf@ijw.co.nz>
+ * Author: Mark Lobodzinski <mark@lunarg.com>
+ * Author: Dave Houlton <daveh@lunarg.com>
+ */
+
+#pragma once
+
+#include "state_tracker.h"
+#include "gpu_validation.h"
+#include "shader_validation.h"
+
+class CoreChecks : public ValidationStateTracker {
+  public:
+    using StateTracker = ValidationStateTracker;
+    std::unordered_set<uint64_t> ahb_ext_formats_set;
+    GlobalQFOTransferBarrierMap<VkImageMemoryBarrier> qfo_release_image_barrier_map;
+    GlobalQFOTransferBarrierMap<VkBufferMemoryBarrier> qfo_release_buffer_barrier_map;
+    unordered_map<VkImage, std::vector<ImageSubresourcePair>> imageSubresourceMap;
+    using ImageSubresPairLayoutMap = std::unordered_map<ImageSubresourcePair, IMAGE_LAYOUT_STATE>;
+    ImageSubresPairLayoutMap imageLayoutMap;
+
+    void IncrementCommandCount(VkCommandBuffer commandBuffer);
+
+    bool VerifyQueueStateToSeq(const QUEUE_STATE* initial_queue, uint64_t initial_seq) const;
+    bool ValidateSetMemBinding(VkDeviceMemory mem, const VulkanTypedHandle& typed_handle, const char* apiName) const;
+    bool ValidateDeviceQueueFamily(uint32_t queue_family, const char* cmd_name, const char* parameter_name, const char* error_code,
+                                   bool optional) const;
+    bool ValidateBindBufferMemory(VkBuffer buffer, VkDeviceMemory mem, VkDeviceSize memoryOffset, const char* api_name) const;
+    bool ValidateGetImageMemoryRequirements2(const VkImageMemoryRequirementsInfo2* pInfo) const;
+    bool CheckCommandBuffersInFlight(const COMMAND_POOL_STATE* pPool, const char* action, const char* error_code) const;
+    bool CheckCommandBufferInFlight(const CMD_BUFFER_STATE* cb_node, const char* action, const char* error_code) const;
+    bool VerifyQueueStateToFence(VkFence fence) const;
+    void StoreMemRanges(VkDeviceMemory mem, VkDeviceSize offset, VkDeviceSize size);
+    bool ValidateIdleDescriptorSet(VkDescriptorSet set, const char* func_str) const;
+    void InitializeShadowMemory(VkDeviceMemory mem, VkDeviceSize offset, VkDeviceSize size, void** ppData);
+    bool ValidatePipelineLocked(std::vector<std::shared_ptr<PIPELINE_STATE>> const& pPipelines, int pipelineIndex) const;
+    bool ValidatePipelineUnlocked(const PIPELINE_STATE* pPipeline, uint32_t pipelineIndex) const;
+    bool ValidImageBufferQueue(const CMD_BUFFER_STATE* cb_node, const VulkanTypedHandle& object, uint32_t queueFamilyIndex,
+                               uint32_t count, const uint32_t* indices) const;
+    bool ValidateFenceForSubmit(const FENCE_STATE* pFence) const;
+    bool ValidateSemaphoresForSubmit(VkQueue queue, const VkSubmitInfo* submit,
+                                     std::unordered_set<VkSemaphore>* unsignaled_sema_arg,
+                                     std::unordered_set<VkSemaphore>* signaled_sema_arg,
+                                     std::unordered_set<VkSemaphore>* internal_sema_arg,
+                                     unordered_map<VkSemaphore, std::set<uint64_t>>* timeline_values_arg) const;
+    bool ValidateMaxTimelineSemaphoreValueDifference(VkQueue queue, VkSemaphore semaphore, const uint64_t semaphoreHandleValue,
+                                                     unordered_map<VkSemaphore, std::set<uint64_t>>* timeline_values_arg,
+                                                     const char* func_name, const char* vuid) const;
+    bool ValidateCommandBuffersForSubmit(VkQueue queue, const VkSubmitInfo* submit,
+                                         ImageSubresPairLayoutMap* localImageLayoutMap_arg, QueryMap* local_query_to_state_map,
+                                         std::vector<VkCommandBuffer>* current_cmds_arg) const;
+    bool ValidateStatus(const CMD_BUFFER_STATE* pNode, CBStatusFlags status_mask, VkFlags msg_flags, const char* fail_msg,
+                        const char* msg_code) const;
+    bool ValidateDrawStateFlags(const CMD_BUFFER_STATE* pCB, const PIPELINE_STATE* pPipe, bool indexed, const char* msg_code) const;
+    bool LogInvalidAttachmentMessage(const char* type1_string, const RENDER_PASS_STATE* rp1_state, const char* type2_string,
+                                     const RENDER_PASS_STATE* rp2_state, uint32_t primary_attach, uint32_t secondary_attach,
+                                     const char* msg, const char* caller, const char* error_code) const;
+    bool ValidateStageMaskGsTsEnables(VkPipelineStageFlags stageMask, const char* caller, const char* geo_error_id,
+                                      const char* tess_error_id, const char* mesh_error_id, const char* task_error_id) const;
+    bool ValidateMapMemRange(const DEVICE_MEMORY_STATE* mem_info, VkDeviceSize offset, VkDeviceSize size) const;
+    bool ValidatePushConstantRange(const uint32_t offset, const uint32_t size, const char* caller_name, uint32_t index) const;
+    bool ValidateRenderPassDAG(RenderPassCreateVersion rp_version, const VkRenderPassCreateInfo2KHR* pCreateInfo) const;
+    bool ValidateAttachmentCompatibility(const char* type1_string, const RENDER_PASS_STATE* rp1_state, const char* type2_string,
+                                         const RENDER_PASS_STATE* rp2_state, uint32_t primary_attach, uint32_t secondary_attach,
+                                         const char* caller, const char* error_code) const;
+    bool ValidateSubpassCompatibility(const char* type1_string, const RENDER_PASS_STATE* rp1_state, const char* type2_string,
+                                      const RENDER_PASS_STATE* rp2_state, const int subpass, const char* caller,
+                                      const char* error_code) const;
+    bool ValidateRenderPassCompatibility(const char* type1_string, const RENDER_PASS_STATE* rp1_state, const char* type2_string,
+                                         const RENDER_PASS_STATE* rp2_state, const char* caller, const char* error_code) const;
+    bool ReportInvalidCommandBuffer(const CMD_BUFFER_STATE* cb_state, const char* call_source) const;
+    bool ValidateQueueFamilyIndex(const PHYSICAL_DEVICE_STATE* pd_state, uint32_t requested_queue_family, const char* err_code,
+                                  const char* cmd_name, const char* queue_family_var_name) const;
+    bool ValidateDeviceQueueCreateInfos(const PHYSICAL_DEVICE_STATE* pd_state, uint32_t info_count,
+                                        const VkDeviceQueueCreateInfo* infos) const;
+
+    bool ValidatePipelineVertexDivisors(std::vector<std::shared_ptr<PIPELINE_STATE>> const& pipe_state_vec, const uint32_t count,
+                                        const VkGraphicsPipelineCreateInfo* pipe_cis) const;
+    void EnqueueSubmitTimeValidateImageBarrierAttachment(const char* func_name, CMD_BUFFER_STATE* cb_state,
+                                                         uint32_t imageMemBarrierCount,
+                                                         const VkImageMemoryBarrier* pImageMemBarriers);
+    bool ValidateImageBarrierAttachment(const char* funcName, CMD_BUFFER_STATE const* cb_state, VkFramebuffer framebuffer,
+                                        uint32_t active_subpass, const safe_VkSubpassDescription2KHR& sub_desc,
+                                        const VulkanTypedHandle& rp_handle, uint32_t img_index,
+                                        const VkImageMemoryBarrier& img_barrier) const;
+    static bool ValidateConcurrentBarrierAtSubmit(const ValidationStateTracker* state_data, const QUEUE_STATE* queue_data,
+                                                  const char* func_name, const CMD_BUFFER_STATE* cb_state,
+                                                  const VulkanTypedHandle& typed_handle, uint32_t src_queue_family,
+                                                  uint32_t dst_queue_family);
+    bool ValidateCmdBeginRenderPass(VkCommandBuffer commandBuffer, RenderPassCreateVersion rp_version,
+                                    const VkRenderPassBeginInfo* pRenderPassBegin) const;
+    bool ValidateDependencies(FRAMEBUFFER_STATE const* framebuffer, RENDER_PASS_STATE const* renderPass) const;
+    bool ValidateBarriers(const char* funcName, const CMD_BUFFER_STATE* cb_state, VkPipelineStageFlags src_stage_mask,
+                          VkPipelineStageFlags dst_stage_mask, uint32_t memBarrierCount, const VkMemoryBarrier* pMemBarriers,
+                          uint32_t bufferBarrierCount, const VkBufferMemoryBarrier* pBufferMemBarriers,
+                          uint32_t imageMemBarrierCount, const VkImageMemoryBarrier* pImageMemBarriers) const;
+    bool ValidateBarrierQueueFamilies(const char* func_name, const CMD_BUFFER_STATE* cb_state, const VkImageMemoryBarrier& barrier,
+                                      const IMAGE_STATE* state_data) const;
+    bool ValidateBarrierQueueFamilies(const char* func_name, const CMD_BUFFER_STATE* cb_state, const VkBufferMemoryBarrier& barrier,
+                                      const BUFFER_STATE* state_data) const;
+    bool ValidateCreateSwapchain(const char* func_name, VkSwapchainCreateInfoKHR const* pCreateInfo,
+                                 const SURFACE_STATE* surface_state, const SWAPCHAIN_NODE* old_swapchain_state) const;
+    bool ValidatePipelineBindPoint(const CMD_BUFFER_STATE* cb_state, VkPipelineBindPoint bind_point, const char* func_name,
+                                   const std::map<VkPipelineBindPoint, std::string>& bind_errors) const;
+    bool ValidateMemoryIsMapped(const char* funcName, uint32_t memRangeCount, const VkMappedMemoryRange* pMemRanges) const;
+    bool ValidateAndCopyNoncoherentMemoryToDriver(uint32_t mem_range_count, const VkMappedMemoryRange* mem_ranges) const;
+    void CopyNoncoherentMemoryFromDriver(uint32_t mem_range_count, const VkMappedMemoryRange* mem_ranges);
+    bool ValidateMappedMemoryRangeDeviceLimits(const char* func_name, uint32_t mem_range_count,
+                                               const VkMappedMemoryRange* mem_ranges) const;
+    BarrierOperationsType ComputeBarrierOperationsType(const CMD_BUFFER_STATE* cb_state, uint32_t buffer_barrier_count,
+                                                       const VkBufferMemoryBarrier* buffer_barriers, uint32_t image_barrier_count,
+                                                       const VkImageMemoryBarrier* image_barriers) const;
+    bool ValidateStageMasksAgainstQueueCapabilities(const CMD_BUFFER_STATE* cb_state, VkPipelineStageFlags source_stage_mask,
+                                                    VkPipelineStageFlags dest_stage_mask, BarrierOperationsType barrier_op_type,
+                                                    const char* function, const char* error_code) const;
+    bool ValidateRenderPassImageBarriers(const char* funcName, const CMD_BUFFER_STATE* cb_state, uint32_t active_subpass,
+                                         const safe_VkSubpassDescription2KHR& sub_desc, const VulkanTypedHandle& rp_handle,
+                                         const safe_VkSubpassDependency2KHR* dependencies,
+                                         const std::vector<uint32_t>& self_dependencies, uint32_t image_mem_barrier_count,
+                                         const VkImageMemoryBarrier* image_barriers) const;
+    bool ValidateSecondaryCommandBufferState(const CMD_BUFFER_STATE* pCB, const CMD_BUFFER_STATE* pSubCB) const;
+    bool ValidateFramebuffer(VkCommandBuffer primaryBuffer, const CMD_BUFFER_STATE* pCB, VkCommandBuffer secondaryBuffer,
+                             const CMD_BUFFER_STATE* pSubCB, const char* caller) const;
+    bool ValidateDescriptorUpdateTemplate(const char* func_name, const VkDescriptorUpdateTemplateCreateInfoKHR* pCreateInfo) const;
+    bool ValidateCreateSamplerYcbcrConversion(const char* func_name, const VkSamplerYcbcrConversionCreateInfo* create_info) const;
+    bool ValidateImportFence(VkFence fence, const char* caller_name) const;
+    bool ValidateAcquireNextImage(VkDevice device, CommandVersion cmd_version, VkSwapchainKHR swapchain, uint64_t timeout,
+                                  VkSemaphore semaphore, VkFence fence, uint32_t* pImageIndex, const char* func_name,
+                                  const char* semaphore_type_vuid) const;
+    bool VerifyRenderAreaBounds(const VkRenderPassBeginInfo* pRenderPassBegin) const;
+    bool VerifyFramebufferAndRenderPassImageViews(const VkRenderPassBeginInfo* pRenderPassBeginInfo) const;
+    bool ValidatePrimaryCommandBuffer(const CMD_BUFFER_STATE* pCB, char const* cmd_name, const char* error_code) const;
+    void RecordCmdNextSubpassLayouts(VkCommandBuffer commandBuffer, VkSubpassContents contents);
+    bool ValidateCmdEndRenderPass(RenderPassCreateVersion rp_version, VkCommandBuffer commandBuffer) const;
+    void RecordCmdEndRenderPassLayouts(VkCommandBuffer commandBuffer);
+    bool ValidateFramebufferCreateInfo(const VkFramebufferCreateInfo* pCreateInfo) const;
+    bool MatchUsage(uint32_t count, const VkAttachmentReference2KHR* attachments, const VkFramebufferCreateInfo* fbci,
+                    VkImageUsageFlagBits usage_flag, const char* error_code) const;
+    bool IsImageLayoutReadOnly(VkImageLayout layout) const;
+    bool CheckDependencyExists(const uint32_t subpass, const VkImageLayout layout,
+                               const std::vector<SubpassLayout>& dependent_subpasses, const std::vector<DAGNode>& subpass_to_node,
+                               bool& skip) const;
+    bool CheckPreserved(const VkRenderPassCreateInfo2KHR* pCreateInfo, const int index, const uint32_t attachment,
+                        const std::vector<DAGNode>& subpass_to_node, int depth, bool& skip) const;
+    bool ValidateBindImageMemory(const VkBindImageMemoryInfo& bindInfo, const char* api_name) const;
+    bool ValidateGetPhysicalDeviceDisplayPlanePropertiesKHRQuery(VkPhysicalDevice physicalDevice, uint32_t planeIndex,
+                                                                 const char* api_name) const;
+    static bool ValidateCopyQueryPoolResults(const ValidationStateTracker* state_data, VkCommandBuffer commandBuffer,
+                                             VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount,
+                                             VkQueryResultFlags flags, QueryMap* localQueryToStateMap);
+    static bool VerifyQueryIsReset(const ValidationStateTracker* state_data, VkCommandBuffer commandBuffer, QueryObject query_obj,
+                                   const char* func_name, QueryMap* localQueryToStateMap);
+    bool ValidateImportSemaphore(VkSemaphore semaphore, const char* caller_name) const;
+    bool ValidateBeginQuery(const CMD_BUFFER_STATE* cb_state, const QueryObject& query_obj, VkFlags flags, CMD_TYPE cmd,
+                            const char* cmd_name, const char* vuid_queue_flags, const char* vuid_queue_feedback,
+                            const char* vuid_queue_occlusion, const char* vuid_precise, const char* vuid_query_count) const;
+    bool ValidateCmdEndQuery(const CMD_BUFFER_STATE* cb_state, const QueryObject& query_obj, CMD_TYPE cmd, const char* cmd_name,
+                             const char* vuid_queue_flags, const char* vuid_active_queries) const;
+
+    bool ValidateCmdDrawType(VkCommandBuffer cmd_buffer, bool indexed, VkPipelineBindPoint bind_point, CMD_TYPE cmd_type,
+                             const char* caller, VkQueueFlags queue_flags, const char* queue_flag_code,
+                             const char* renderpass_msg_code, const char* pipebound_msg_code,
+                             const char* dynamic_state_msg_code) const;
+    bool ValidateCmdNextSubpass(RenderPassCreateVersion rp_version, VkCommandBuffer commandBuffer) const;
+    bool ValidateInsertMemoryRange(const VulkanTypedHandle& typed_handle, const DEVICE_MEMORY_STATE* mem_info,
+                                   VkDeviceSize memoryOffset, const VkMemoryRequirements& memRequirements, bool is_linear,
+                                   const char* api_name) const;
+    bool ValidateInsertImageMemoryRange(VkImage image, const DEVICE_MEMORY_STATE* mem_info, VkDeviceSize mem_offset,
+                                        const VkMemoryRequirements& mem_reqs, bool is_linear, const char* api_name) const;
+    bool ValidateInsertBufferMemoryRange(VkBuffer buffer, const DEVICE_MEMORY_STATE* mem_info, VkDeviceSize mem_offset,
+                                         const VkMemoryRequirements& mem_reqs, const char* api_name) const;
+    bool ValidateInsertAccelerationStructureMemoryRange(VkAccelerationStructureNV as, const DEVICE_MEMORY_STATE* mem_info,
+                                                        VkDeviceSize mem_offset, const VkMemoryRequirements& mem_reqs,
+                                                        const char* api_name) const;
+
+    bool ValidateMemoryTypes(const DEVICE_MEMORY_STATE* mem_info, const uint32_t memory_type_bits, const char* funcName,
+                             const char* msgCode) const;
+    bool ValidateCommandBufferState(const CMD_BUFFER_STATE* cb_state, const char* call_source, int current_submit_count,
+                                    const char* vu_id) const;
+    bool ValidateCommandBufferSimultaneousUse(const CMD_BUFFER_STATE* pCB, int current_submit_count) const;
+    bool ValidateGetDeviceQueue(uint32_t queueFamilyIndex, uint32_t queueIndex, VkQueue* pQueue, const char* valid_qfi_vuid,
+                                const char* qfi_in_range_vuid) const;
+    bool ValidateRenderpassAttachmentUsage(RenderPassCreateVersion rp_version, const VkRenderPassCreateInfo2KHR* pCreateInfo) const;
+    bool AddAttachmentUse(RenderPassCreateVersion rp_version, uint32_t subpass, std::vector<uint8_t>& attachment_uses,
+                          std::vector<VkImageLayout>& attachment_layouts, uint32_t attachment, uint8_t new_use,
+                          VkImageLayout new_layout) const;
+    bool ValidateAttachmentIndex(RenderPassCreateVersion rp_version, uint32_t attachment, uint32_t attachment_count,
+                                 const char* type) const;
+    bool ValidateCreateRenderPass(VkDevice device, RenderPassCreateVersion rp_version,
+                                  const VkRenderPassCreateInfo2KHR* pCreateInfo) const;
+    bool ValidateRenderPassPipelineBarriers(const char* funcName, const CMD_BUFFER_STATE* cb_state,
+                                            VkPipelineStageFlags src_stage_mask, VkPipelineStageFlags dst_stage_mask,
+                                            VkDependencyFlags dependency_flags, uint32_t mem_barrier_count,
+                                            const VkMemoryBarrier* mem_barriers, uint32_t buffer_mem_barrier_count,
+                                            const VkBufferMemoryBarrier* buffer_mem_barriers, uint32_t image_mem_barrier_count,
+                                            const VkImageMemoryBarrier* image_barriers) const;
+    bool CheckStageMaskQueueCompatibility(VkCommandBuffer command_buffer, VkPipelineStageFlags stage_mask, VkQueueFlags queue_flags,
+                                          const char* function, const char* src_or_dest, const char* error_code) const;
+    bool ValidateUpdateDescriptorSetWithTemplate(VkDescriptorSet descriptorSet,
+                                                 VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate, const void* pData) const;
+    bool ValidateMemoryIsBoundToBuffer(const BUFFER_STATE*, const char*, const char*) const;
+    bool ValidateMemoryIsBoundToImage(const IMAGE_STATE*, const char*, const char*) const;
+    bool ValidateMemoryIsBoundToAccelerationStructure(const ACCELERATION_STRUCTURE_STATE*, const char*, const char*) const;
+    bool ValidateObjectNotInUse(const BASE_NODE* obj_node, const VulkanTypedHandle& obj_struct, const char* caller_name,
+                                const char* error_code) const;
+    bool ValidateCmdQueueFlags(const CMD_BUFFER_STATE* cb_node, const char* caller_name, VkQueueFlags flags,
+                               const char* error_code) const;
+    bool InsideRenderPass(const CMD_BUFFER_STATE* pCB, const char* apiName, const char* msgCode) const;
+    bool OutsideRenderPass(const CMD_BUFFER_STATE* pCB, const char* apiName, const char* msgCode) const;
+
+    static void SetLayout(ImageSubresPairLayoutMap& imageLayoutMap, ImageSubresourcePair imgpair, VkImageLayout layout);
+
+    bool ValidateImageSampleCount(const IMAGE_STATE* image_state, VkSampleCountFlagBits sample_count, const char* location,
+                                  const std::string& msgCode) const;
+    bool ValidateCmdSubpassState(const CMD_BUFFER_STATE* pCB, const CMD_TYPE cmd_type) const;
+    bool ValidateCmd(const CMD_BUFFER_STATE* cb_state, const CMD_TYPE cmd, const char* caller_name) const;
+
+    bool ValidateDeviceMaskToPhysicalDeviceCount(uint32_t deviceMask, VkDebugReportObjectTypeEXT VUID_handle_type,
+                                                 uint64_t VUID_handle, const char* VUID) const;
+    bool ValidateDeviceMaskToZero(uint32_t deviceMask, VkDebugReportObjectTypeEXT VUID_handle_type, uint64_t VUID_handle,
+                                  const char* VUID) const;
+    bool ValidateDeviceMaskToCommandBuffer(const CMD_BUFFER_STATE* pCB, uint32_t deviceMask,
+                                           VkDebugReportObjectTypeEXT VUID_handle_type, uint64_t VUID_handle,
+                                           const char* VUID) const;
+    bool ValidateDeviceMaskToRenderPass(const CMD_BUFFER_STATE* pCB, uint32_t deviceMask,
+                                        VkDebugReportObjectTypeEXT VUID_handle_type, uint64_t VUID_handle, const char* VUID) const;
+
+    bool ValidateBindAccelerationStructureMemoryNV(VkDevice device, const VkBindAccelerationStructureMemoryInfoNV& info) const;
+    // Prototypes for CoreChecks accessor functions
+    VkFormatProperties GetPDFormatProperties(const VkFormat format) const;
+    const VkPhysicalDeviceMemoryProperties* GetPhysicalDeviceMemoryProperties();
+
+    const GlobalQFOTransferBarrierMap<VkImageMemoryBarrier>& GetGlobalQFOReleaseBarrierMap(
+        const QFOTransferBarrier<VkImageMemoryBarrier>::Tag& type_tag) const;
+    const GlobalQFOTransferBarrierMap<VkBufferMemoryBarrier>& GetGlobalQFOReleaseBarrierMap(
+        const QFOTransferBarrier<VkBufferMemoryBarrier>::Tag& type_tag) const;
+    GlobalQFOTransferBarrierMap<VkImageMemoryBarrier>& GetGlobalQFOReleaseBarrierMap(
+        const QFOTransferBarrier<VkImageMemoryBarrier>::Tag& type_tag);
+    GlobalQFOTransferBarrierMap<VkBufferMemoryBarrier>& GetGlobalQFOReleaseBarrierMap(
+        const QFOTransferBarrier<VkBufferMemoryBarrier>::Tag& type_tag);
+    template <typename Barrier>
+    void RecordQueuedQFOTransferBarriers(CMD_BUFFER_STATE* cb_state);
+    template <typename Barrier>
+    bool ValidateQueuedQFOTransferBarriers(const CMD_BUFFER_STATE* cb_state, QFOTransferCBScoreboards<Barrier>* scoreboards) const;
+    bool ValidateQueuedQFOTransfers(const CMD_BUFFER_STATE* cb_state,
+                                    QFOTransferCBScoreboards<VkImageMemoryBarrier>* qfo_image_scoreboards,
+                                    QFOTransferCBScoreboards<VkBufferMemoryBarrier>* qfo_buffer_scoreboards) const;
+    template <typename BarrierRecord, typename Scoreboard>
+    bool ValidateAndUpdateQFOScoreboard(const debug_report_data* report_data, const CMD_BUFFER_STATE* cb_state,
+                                        const char* operation, const BarrierRecord& barrier, Scoreboard* scoreboard) const;
+    template <typename Barrier>
+    void RecordBarrierArrayValidationInfo(const char* func_name, CMD_BUFFER_STATE* cb_state, uint32_t barrier_count,
+                                          const Barrier* barriers);
+    void RecordBarrierValidationInfo(const char* func_name, CMD_BUFFER_STATE* cb_state, uint32_t bufferBarrierCount,
+                                     const VkBufferMemoryBarrier* pBufferMemBarriers, uint32_t imageMemBarrierCount,
+                                     const VkImageMemoryBarrier* pImageMemBarriers);
+    template <typename Barrier>
+    bool ValidateQFOTransferBarrierUniqueness(const char* func_name, const CMD_BUFFER_STATE* cb_state, uint32_t barrier_count,
+                                              const Barrier* barriers) const;
+    bool IsReleaseOp(CMD_BUFFER_STATE* cb_state, const VkImageMemoryBarrier& barrier) const;
+    bool ValidateBarriersQFOTransferUniqueness(const char* func_name, const CMD_BUFFER_STATE* cb_state, uint32_t bufferBarrierCount,
+                                               const VkBufferMemoryBarrier* pBufferMemBarriers, uint32_t imageMemBarrierCount,
+                                               const VkImageMemoryBarrier* pImageMemBarriers) const;
+    bool ValidatePrimaryCommandBufferState(const CMD_BUFFER_STATE* pCB, int current_submit_count,
+                                           QFOTransferCBScoreboards<VkImageMemoryBarrier>* qfo_image_scoreboards,
+                                           QFOTransferCBScoreboards<VkBufferMemoryBarrier>* qfo_buffer_scoreboards) const;
+    bool ValidatePipelineDrawtimeState(const LAST_BOUND_STATE& state, const CMD_BUFFER_STATE* pCB, CMD_TYPE cmd_type,
+                                       const PIPELINE_STATE* pPipeline, const char* caller) const;
+    bool ValidateCmdBufDrawState(const CMD_BUFFER_STATE* cb_node, CMD_TYPE cmd_type, const bool indexed,
+                                 const VkPipelineBindPoint bind_point, const char* function, const char* pipe_err_code,
+                                 const char* state_err_code) const;
+    static bool ValidateEventStageMask(const ValidationStateTracker* state_data, const CMD_BUFFER_STATE* pCB, size_t eventCount,
+                                       size_t firstEventIndex, VkPipelineStageFlags sourceStageMask,
+                                       EventToStageMap* localEventToStageMap);
+    bool ValidateQueueFamilyIndices(const CMD_BUFFER_STATE* pCB, VkQueue queue) const;
+    bool ValidatePerformanceQueries(const CMD_BUFFER_STATE* pCB, VkQueue queue, VkQueryPool& first_query_pool,
+                                    uint32_t counterPassIndex) const;
+    VkResult CoreLayerCreateValidationCacheEXT(VkDevice device, const VkValidationCacheCreateInfoEXT* pCreateInfo,
+                                               const VkAllocationCallbacks* pAllocator, VkValidationCacheEXT* pValidationCache);
+    void CoreLayerDestroyValidationCacheEXT(VkDevice device, VkValidationCacheEXT validationCache,
+                                            const VkAllocationCallbacks* pAllocator);
+    VkResult CoreLayerMergeValidationCachesEXT(VkDevice device, VkValidationCacheEXT dstCache, uint32_t srcCacheCount,
+                                               const VkValidationCacheEXT* pSrcCaches);
+    VkResult CoreLayerGetValidationCacheDataEXT(VkDevice device, VkValidationCacheEXT validationCache, size_t* pDataSize,
+                                                void* pData);
+    // For given bindings validate state at time of draw is correct, returning false on error and writing error details into string*
+    bool ValidateDrawState(const cvdescriptorset::DescriptorSet* descriptor_set, const std::map<uint32_t, descriptor_req>& bindings,
+                           const std::vector<uint32_t>& dynamic_offsets, const CMD_BUFFER_STATE* cb_node, const char* caller,
+                           std::string* error) const;
+    bool ValidateDescriptorSetBindingData(const CMD_BUFFER_STATE* cb_node, const cvdescriptorset::DescriptorSet* descriptor_set,
+                                          const std::vector<uint32_t>& dynamic_offsets, uint32_t binding, descriptor_req reqs,
+                                          const char* caller, std::string* error) const;
+
+    // Validate contents of a CopyUpdate
+    using DescriptorSet = cvdescriptorset::DescriptorSet;
+    bool ValidateCopyUpdate(const VkCopyDescriptorSet* update, const DescriptorSet* dst_set, const DescriptorSet* src_set,
+                            const char* func_name, std::string* error_code, std::string* error_msg) const;
+    bool VerifyCopyUpdateContents(const VkCopyDescriptorSet* update, const DescriptorSet* src_set, VkDescriptorType type,
+                                  uint32_t index, const char* func_name, std::string* error_code, std::string* error_msg) const;
+    // Validate contents of a WriteUpdate
+    bool ValidateWriteUpdate(const DescriptorSet* descriptor_set, const VkWriteDescriptorSet* update, const char* func_name,
+                             std::string* error_code, std::string* error_msg) const;
+    bool VerifyWriteUpdateContents(const DescriptorSet* dest_set, const VkWriteDescriptorSet* update, const uint32_t index,
+                                   const char* func_name, std::string* error_code, std::string* error_msg) const;
+    // Shared helper functions - These are useful because the shared sampler image descriptor type
+    //  performs common functions with both sampler and image descriptors so they can share their common functions
+    bool ValidateImageUpdate(VkImageView, VkImageLayout, VkDescriptorType, const char* func_name, std::string*, std::string*) const;
+    // Validate contents of a push descriptor update
+    bool ValidatePushDescriptorsUpdate(const DescriptorSet* push_set, uint32_t write_count, const VkWriteDescriptorSet* p_wds,
+                                       const char* func_name) const;
+    // Descriptor Set Validation Functions
+    bool ValidateSampler(VkSampler) const;
+    bool ValidateBufferUpdate(VkDescriptorBufferInfo const* buffer_info, VkDescriptorType type, const char* func_name,
+                              std::string* error_code, std::string* error_msg) const;
+    bool ValidateUpdateDescriptorSetsWithTemplateKHR(VkDescriptorSet descriptorSet, const TEMPLATE_STATE* template_state,
+                                                     const void* pData) const;
+    bool ValidateAllocateDescriptorSets(const VkDescriptorSetAllocateInfo*,
+                                        const cvdescriptorset::AllocateDescriptorSetsData*) const;
+    bool ValidateUpdateDescriptorSets(uint32_t write_count, const VkWriteDescriptorSet* p_wds, uint32_t copy_count,
+                                      const VkCopyDescriptorSet* p_cds, const char* func_name) const;
+
+    // Stuff from shader_validation
+    bool ValidateGraphicsPipelineShaderState(const PIPELINE_STATE* pPipeline) const;
+    bool ValidateComputePipeline(PIPELINE_STATE* pPipeline) const;
+    bool ValidateRayTracingPipelineNV(PIPELINE_STATE* pipeline) const;
+    bool PreCallValidateCreateShaderModule(VkDevice device, const VkShaderModuleCreateInfo* pCreateInfo,
+                                           const VkAllocationCallbacks* pAllocator, VkShaderModule* pShaderModule) const;
+    bool ValidatePipelineShaderStage(VkPipelineShaderStageCreateInfo const* pStage, const PIPELINE_STATE* pipeline,
+                                     const PIPELINE_STATE::StageState& stage_state, const SHADER_MODULE_STATE* module,
+                                     const spirv_inst_iter& entrypoint, bool check_point_size) const;
+    bool ValidatePointListShaderState(const PIPELINE_STATE* pipeline, SHADER_MODULE_STATE const* src, spirv_inst_iter entrypoint,
+                                      VkShaderStageFlagBits stage) const;
+    bool ValidateShaderCapabilities(SHADER_MODULE_STATE const* src, VkShaderStageFlagBits stage) const;
+    bool ValidateShaderStageWritableDescriptor(VkShaderStageFlagBits stage, bool has_writable_descriptor) const;
+    bool ValidateShaderStageInputOutputLimits(SHADER_MODULE_STATE const* src, VkPipelineShaderStageCreateInfo const* pStage,
+                                              const PIPELINE_STATE* pipeline, spirv_inst_iter entrypoint) const;
+    bool ValidateShaderStageGroupNonUniform(SHADER_MODULE_STATE const* src, VkShaderStageFlagBits stage) const;
+    bool ValidateCooperativeMatrix(SHADER_MODULE_STATE const* src, VkPipelineShaderStageCreateInfo const* pStage,
+                                   const PIPELINE_STATE* pipeline) const;
+    bool ValidateExecutionModes(SHADER_MODULE_STATE const* src, spirv_inst_iter entrypoint) const;
+
+    // Buffer Validation Functions
+    template <class OBJECT, class LAYOUT>
+    void SetLayout(OBJECT* pObject, VkImage image, VkImageSubresource range, const LAYOUT& layout);
+    template <class OBJECT, class LAYOUT>
+    void SetLayout(OBJECT* pObject, ImageSubresourcePair imgpair, const LAYOUT& layout, VkImageAspectFlags aspectMask);
+    // Remove the pending QFO release records from the global set
+    // Note that the type of the handle argument constrained to match Barrier type
+    // The defaulted BarrierRecord argument allows use to declare the type once, but is not intended to be specified by the caller
+    template <typename Barrier, typename BarrierRecord = QFOTransferBarrier<Barrier>>
+    void EraseQFOReleaseBarriers(const typename BarrierRecord::HandleType& handle) {
+        GlobalQFOTransferBarrierMap<Barrier>& global_release_barriers =
+            GetGlobalQFOReleaseBarrierMap(typename BarrierRecord::Tag());
+        global_release_barriers.erase(handle);
+    }
+    bool ValidateCopyImageTransferGranularityRequirements(const CMD_BUFFER_STATE* cb_node, const IMAGE_STATE* src_img,
+                                                          const IMAGE_STATE* dst_img, const VkImageCopy* region, const uint32_t i,
+                                                          const char* function) const;
+    bool ValidateIdleBuffer(VkBuffer buffer) const;
+    bool ValidateUsageFlags(VkFlags actual, VkFlags desired, VkBool32 strict, const VulkanTypedHandle& typed_handle,
+                            const char* msgCode, char const* func_name, char const* usage_str) const;
+    bool ValidateImageSubresourceRange(const uint32_t image_mip_count, const uint32_t image_layer_count,
+                                       const VkImageSubresourceRange& subresourceRange, const char* cmd_name,
+                                       const char* param_name, const char* image_layer_count_var_name, const uint64_t image_handle,
+                                       SubresourceRangeErrorCodes errorCodes) const;
+    void SetImageLayout(CMD_BUFFER_STATE* cb_node, const IMAGE_STATE& image_state,
+                        const VkImageSubresourceRange& image_subresource_range, VkImageLayout layout,
+                        VkImageLayout expected_layout = kInvalidLayout);
+    void SetImageLayout(CMD_BUFFER_STATE* cb_node, const IMAGE_STATE& image_state,
+                        const VkImageSubresourceLayers& image_subresource_layers, VkImageLayout layout);
+    bool ValidateRenderPassLayoutAgainstFramebufferImageUsage(RenderPassCreateVersion rp_version, VkImageLayout layout,
+                                                              VkImage image, VkImageView image_view, VkFramebuffer framebuffer,
+                                                              VkRenderPass renderpass, uint32_t attachment_index,
+                                                              const char* variable_name) const;
+    bool ValidateBufferImageCopyData(uint32_t regionCount, const VkBufferImageCopy* pRegions, const IMAGE_STATE* image_state,
+                                     const char* function) const;
+    bool ValidateBufferViewRange(const BUFFER_STATE* buffer_state, const VkBufferViewCreateInfo* pCreateInfo,
+                                 const VkPhysicalDeviceLimits* device_limits) const;
+    bool ValidateBufferViewBuffer(const BUFFER_STATE* buffer_state, const VkBufferViewCreateInfo* pCreateInfo) const;
+
+    bool PreCallValidateCreateImage(VkDevice device, const VkImageCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator,
+                                    VkImage* pImage) const;
+
+    void PostCallRecordCreateImage(VkDevice device, const VkImageCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator,
+                                   VkImage* pImage, VkResult result);
+
+    void PreCallRecordDestroyImage(VkDevice device, VkImage image, const VkAllocationCallbacks* pAllocator);
+
+    bool PreCallValidateDestroyImage(VkDevice device, VkImage image, const VkAllocationCallbacks* pAllocator) const;
+
+    bool ValidateImageAttributes(const IMAGE_STATE* image_state, const VkImageSubresourceRange& range) const;
+
+    bool ValidateClearAttachmentExtent(VkCommandBuffer command_buffer, uint32_t attachment_index,
+                                       const FRAMEBUFFER_STATE* framebuffer, uint32_t fb_attachment, const VkRect2D& render_area,
+                                       uint32_t rect_count, const VkClearRect* clear_rects) const;
+    bool ValidateImageCopyData(const uint32_t regionCount, const VkImageCopy* ic_regions, const IMAGE_STATE* src_state,
+                               const IMAGE_STATE* dst_state) const;
+
+    bool VerifyClearImageLayout(const CMD_BUFFER_STATE* cb_node, const IMAGE_STATE* image_state,
+                                const VkImageSubresourceRange& range, VkImageLayout dest_image_layout, const char* func_name) const;
+
+    bool VerifyImageLayout(const CMD_BUFFER_STATE* cb_node, const IMAGE_STATE* image_state, const VkImageSubresourceRange& range,
+                           VkImageAspectFlags view_aspect, VkImageLayout explicit_layout, VkImageLayout optimal_layout,
+                           const char* caller, const char* layout_invalid_msg_code, const char* layout_mismatch_msg_code,
+                           bool* error) const;
+
+    bool VerifyImageLayout(const CMD_BUFFER_STATE* cb_node, const IMAGE_STATE* image_state, const VkImageSubresourceRange& range,
+                           VkImageLayout explicit_layout, VkImageLayout optimal_layout, const char* caller,
+                           const char* layout_invalid_msg_code, const char* layout_mismatch_msg_code, bool* error) const {
+        return VerifyImageLayout(cb_node, image_state, range, 0, explicit_layout, optimal_layout, caller, layout_invalid_msg_code,
+                                 layout_mismatch_msg_code, error);
+    }
+
+    bool VerifyImageLayout(const CMD_BUFFER_STATE* cb_node, const IMAGE_STATE* image_state,
+                           const VkImageSubresourceLayers& subLayers, VkImageLayout explicit_layout, VkImageLayout optimal_layout,
+                           const char* caller, const char* layout_invalid_msg_code, const char* layout_mismatch_msg_code,
+                           bool* error) const;
+
+    bool CheckItgExtent(const CMD_BUFFER_STATE* cb_node, const VkExtent3D* extent, const VkOffset3D* offset,
+                        const VkExtent3D* granularity, const VkExtent3D* subresource_extent, const VkImageType image_type,
+                        const uint32_t i, const char* function, const char* member, const char* vuid) const;
+
+    bool CheckItgOffset(const CMD_BUFFER_STATE* cb_node, const VkOffset3D* offset, const VkExtent3D* granularity, const uint32_t i,
+                        const char* function, const char* member, const char* vuid) const;
+    VkExtent3D GetScaledItg(const CMD_BUFFER_STATE* cb_node, const IMAGE_STATE* img) const;
+    bool CopyImageMultiplaneValidation(VkCommandBuffer command_buffer, const IMAGE_STATE* src_image_state,
+                                       const IMAGE_STATE* dst_image_state, const VkImageCopy region) const;
+
+    bool PreCallValidateCmdClearColorImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout,
+                                           const VkClearColorValue* pColor, uint32_t rangeCount,
+                                           const VkImageSubresourceRange* pRanges) const;
+
+    void PreCallRecordCmdClearColorImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout,
+                                         const VkClearColorValue* pColor, uint32_t rangeCount,
+                                         const VkImageSubresourceRange* pRanges);
+
+    bool PreCallValidateCmdClearDepthStencilImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout,
+                                                  const VkClearDepthStencilValue* pDepthStencil, uint32_t rangeCount,
+                                                  const VkImageSubresourceRange* pRanges) const;
+
+    void PreCallRecordCmdClearDepthStencilImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout,
+                                                const VkClearDepthStencilValue* pDepthStencil, uint32_t rangeCount,
+                                                const VkImageSubresourceRange* pRanges);
+
+    bool FindLayoutVerifyLayout(ImageSubresourcePair imgpair, VkImageLayout& layout, const VkImageAspectFlags aspectMask);
+
+    bool FindGlobalLayout(ImageSubresourcePair imgpair, VkImageLayout& layout);
+
+    bool FindLayouts(VkImage image, std::vector<VkImageLayout>& layouts) const;
+
+    bool FindLayout(const ImageSubresPairLayoutMap& imageLayoutMap, ImageSubresourcePair imgpair, VkImageLayout& layout) const;
+
+    static bool FindLayout(const ImageSubresPairLayoutMap& imageLayoutMap, ImageSubresourcePair imgpair, VkImageLayout& layout,
+                           const VkImageAspectFlags aspectMask);
+
+    void SetGlobalLayout(ImageSubresourcePair imgpair, const VkImageLayout& layout);
+
+    void SetImageViewLayout(CMD_BUFFER_STATE* cb_node, const IMAGE_VIEW_STATE& view_state, VkImageLayout layout,
+                            VkImageLayout layoutStencil);
+    void SetImageViewInitialLayout(CMD_BUFFER_STATE* cb_node, const IMAGE_VIEW_STATE& view_state, VkImageLayout layout);
+
+    void SetImageInitialLayout(CMD_BUFFER_STATE* cb_node, VkImage image, const VkImageSubresourceRange& range,
+                               VkImageLayout layout);
+    void SetImageInitialLayout(CMD_BUFFER_STATE* cb_node, const IMAGE_STATE& image_state, const VkImageSubresourceRange& range,
+                               VkImageLayout layout);
+    void SetImageInitialLayout(CMD_BUFFER_STATE* cb_node, const IMAGE_STATE& image_state, const VkImageSubresourceLayers& layers,
+                               VkImageLayout layout);
+
+    bool VerifyFramebufferAndRenderPassLayouts(RenderPassCreateVersion rp_version, const CMD_BUFFER_STATE* pCB,
+                                               const VkRenderPassBeginInfo* pRenderPassBegin,
+                                               const FRAMEBUFFER_STATE* framebuffer_state) const;
+    void RecordCmdBeginRenderPassLayouts(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo* pRenderPassBegin,
+                                         const VkSubpassContents contents);
+    void TransitionAttachmentRefLayout(CMD_BUFFER_STATE* pCB, FRAMEBUFFER_STATE* pFramebuffer,
+                                       const safe_VkAttachmentReference2KHR& ref);
+
+    void TransitionSubpassLayouts(CMD_BUFFER_STATE*, const RENDER_PASS_STATE*, const int, FRAMEBUFFER_STATE*);
+
+    void TransitionBeginRenderPassLayouts(CMD_BUFFER_STATE*, const RENDER_PASS_STATE*, FRAMEBUFFER_STATE*);
+
+    bool ValidateBarrierLayoutToImageUsage(const VkImageMemoryBarrier& img_barrier, bool new_not_old, VkImageUsageFlags usage,
+                                           const char* func_name, const char* barrier_pname) const;
+
+    bool ValidateBarriersToImages(const CMD_BUFFER_STATE* cb_state, uint32_t imageMemoryBarrierCount,
+                                  const VkImageMemoryBarrier* pImageMemoryBarriers, const char* func_name) const;
+
+    void RecordQueuedQFOTransfers(CMD_BUFFER_STATE* pCB);
+    void EraseQFOImageRelaseBarriers(const VkImage& image);
+
+    void TransitionImageLayouts(CMD_BUFFER_STATE* cb_state, uint32_t memBarrierCount, const VkImageMemoryBarrier* pImgMemBarriers);
+
+    void RecordTransitionImageLayout(CMD_BUFFER_STATE* cb_state, const IMAGE_STATE* image_state,
+                                     const VkImageMemoryBarrier& mem_barrier);
+
+    void TransitionFinalSubpassLayouts(CMD_BUFFER_STATE* pCB, const VkRenderPassBeginInfo* pRenderPassBegin,
+                                       FRAMEBUFFER_STATE* framebuffer_state);
+
+    bool PreCallValidateCmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
+                                     VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount,
+                                     const VkImageCopy* pRegions) const;
+
+    bool PreCallValidateCmdClearAttachments(VkCommandBuffer commandBuffer, uint32_t attachmentCount,
+                                            const VkClearAttachment* pAttachments, uint32_t rectCount,
+                                            const VkClearRect* pRects) const;
+    void PreCallRecordCmdClearAttachments(VkCommandBuffer commandBuffer, uint32_t attachmentCount,
+                                          const VkClearAttachment* pAttachments, uint32_t rectCount, const VkClearRect* pRects);
+
+    bool PreCallValidateCmdResolveImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
+                                        VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount,
+                                        const VkImageResolve* pRegions) const;
+
+    bool PreCallValidateCmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
+                                     VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount,
+                                     const VkImageBlit* pRegions, VkFilter filter) const;
+
+    void PreCallRecordCmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage,
+                                   VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageBlit* pRegions,
+                                   VkFilter filter);
+
+    bool ValidateCmdBufImageLayouts(const CMD_BUFFER_STATE* pCB, const ImageSubresPairLayoutMap& globalImageLayoutMap,
+                                    ImageSubresPairLayoutMap* overlayLayoutMap_arg) const;
+
+    void UpdateCmdBufImageLayouts(CMD_BUFFER_STATE* pCB);
+
+    bool VerifyBoundMemoryIsValid(VkDeviceMemory mem, const VulkanTypedHandle& typed_handle, const char* api_name,
+                                  const char* error_code) const;
+
+    bool ValidateLayoutVsAttachmentDescription(const debug_report_data* report_data, RenderPassCreateVersion rp_version,
+                                               const VkImageLayout first_layout, const uint32_t attachment,
+                                               const VkAttachmentDescription2KHR& attachment_description) const;
+
+    bool ValidateLayouts(RenderPassCreateVersion rp_version, VkDevice device, const VkRenderPassCreateInfo2KHR* pCreateInfo) const;
+
+    bool ValidateImageUsageFlags(IMAGE_STATE const* image_state, VkFlags desired, bool strict, const char* msgCode,
+                                 char const* func_name, char const* usage_string) const;
+
+    bool ValidateImageFormatFeatureFlags(IMAGE_STATE const* image_state, VkFormatFeatureFlags desired, char const* func_name,
+                                         const char* linear_vuid, const char* optimal_vuid) const;
+
+    bool ValidateImageSubresourceLayers(const CMD_BUFFER_STATE* cb_node, const VkImageSubresourceLayers* subresource_layers,
+                                        char const* func_name, char const* member, uint32_t i) const;
+
+    bool ValidateBufferUsageFlags(BUFFER_STATE const* buffer_state, VkFlags desired, bool strict, const char* msgCode,
+                                  char const* func_name, char const* usage_string) const;
+
+    bool PreCallValidateCreateBuffer(VkDevice device, const VkBufferCreateInfo* pCreateInfo,
+                                     const VkAllocationCallbacks* pAllocator, VkBuffer* pBuffer) const;
+
+    bool PreCallValidateCreateBufferView(VkDevice device, const VkBufferViewCreateInfo* pCreateInfo,
+                                         const VkAllocationCallbacks* pAllocator, VkBufferView* pView) const;
+
+    bool ValidateImageAspectMask(VkImage image, VkFormat format, VkImageAspectFlags aspect_mask, const char* func_name,
+                                 const char* vuid = kVUID_Core_DrawState_InvalidImageAspect) const;
+
+    bool ValidateCreateImageViewSubresourceRange(const IMAGE_STATE* image_state, bool is_imageview_2d_type,
+                                                 const VkImageSubresourceRange& subresourceRange) const;
+
+    bool ValidateCmdClearColorSubresourceRange(const IMAGE_STATE* image_state, const VkImageSubresourceRange& subresourceRange,
+                                               const char* param_name) const;
+
+    bool ValidateCmdClearDepthSubresourceRange(const IMAGE_STATE* image_state, const VkImageSubresourceRange& subresourceRange,
+                                               const char* param_name) const;
+
+    bool ValidateImageBarrierSubresourceRange(const IMAGE_STATE* image_state, const VkImageSubresourceRange& subresourceRange,
+                                              const char* cmd_name, const char* param_name) const;
+
+    bool PreCallValidateCreateImageView(VkDevice device, const VkImageViewCreateInfo* pCreateInfo,
+                                        const VkAllocationCallbacks* pAllocator, VkImageView* pView) const;
+
+    bool ValidateCopyBufferImageTransferGranularityRequirements(const CMD_BUFFER_STATE* cb_node, const IMAGE_STATE* img,
+                                                                const VkBufferImageCopy* region, const uint32_t i,
+                                                                const char* function, const char* vuid) const;
+
+    bool ValidateImageMipLevel(const CMD_BUFFER_STATE* cb_node, const IMAGE_STATE* img, uint32_t mip_level, const uint32_t i,
+                               const char* function, const char* member, const char* vuid) const;
+
+    bool ValidateImageArrayLayerRange(const CMD_BUFFER_STATE* cb_node, const IMAGE_STATE* img, const uint32_t base_layer,
+                                      const uint32_t layer_count, const uint32_t i, const char* function, const char* member,
+                                      const char* vuid) const;
+
+    void PreCallRecordCmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage,
+                                   VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageCopy* pRegions);
+
+    bool PreCallValidateCmdCopyBuffer(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer, uint32_t regionCount,
+                                      const VkBufferCopy* pRegions) const;
+    bool PreCallValidateDestroyImageView(VkDevice device, VkImageView imageView, const VkAllocationCallbacks* pAllocator) const;
+
+    bool PreCallValidateDestroyBuffer(VkDevice device, VkBuffer buffer, const VkAllocationCallbacks* pAllocator) const;
+
+    bool PreCallValidateDestroyBufferView(VkDevice device, VkBufferView bufferView, const VkAllocationCallbacks* pAllocator) const;
+
+    bool PreCallValidateCmdFillBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize size,
+                                      uint32_t data) const;
+
+    bool PreCallValidateCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
+                                             VkBuffer dstBuffer, uint32_t regionCount, const VkBufferImageCopy* pRegions) const;
+
+    void PreCallRecordCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
+                                           VkBuffer dstBuffer, uint32_t regionCount, const VkBufferImageCopy* pRegions);
+
+    bool PreCallValidateCmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage,
+                                             VkImageLayout dstImageLayout, uint32_t regionCount,
+                                             const VkBufferImageCopy* pRegions) const;
+
+    void PreCallRecordCmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage,
+                                           VkImageLayout dstImageLayout, uint32_t regionCount, const VkBufferImageCopy* pRegions);
+
+    bool PreCallValidateGetImageSubresourceLayout(VkDevice device, VkImage image, const VkImageSubresource* pSubresource,
+                                                  VkSubresourceLayout* pLayout) const;
+    bool ValidateCreateImageANDROID(const debug_report_data* report_data, const VkImageCreateInfo* create_info) const;
+    bool ValidateCreateImageViewANDROID(const VkImageViewCreateInfo* create_info) const;
+    bool ValidateGetImageSubresourceLayoutANDROID(const VkImage image) const;
+    bool ValidateQueueFamilies(uint32_t queue_family_count, const uint32_t* queue_families, const char* cmd_name,
+                               const char* array_parameter_name, const char* unique_error_code, const char* valid_error_code,
+                               bool optional) const;
+    bool ValidateAllocateMemoryANDROID(const VkMemoryAllocateInfo* alloc_info) const;
+    bool ValidateGetImageMemoryRequirements2ANDROID(const VkImage image) const;
+    bool ValidateCreateSamplerYcbcrConversionANDROID(const VkSamplerYcbcrConversionCreateInfo* create_info) const;
+    bool PreCallValidateCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
+                                                const VkGraphicsPipelineCreateInfo* pCreateInfos,
+                                                const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines,
+                                                void* cgpl_state) const;
+    bool PreCallValidateCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
+                                               const VkComputePipelineCreateInfo* pCreateInfos,
+                                               const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines,
+                                               void* pipe_state) const;
+    bool PreCallValidateGetPipelineExecutablePropertiesKHR(VkDevice device, const VkPipelineInfoKHR* pPipelineInfo,
+                                                           uint32_t* pExecutableCount,
+                                                           VkPipelineExecutablePropertiesKHR* pProperties) const;
+    bool ValidatePipelineExecutableInfo(VkDevice device, const VkPipelineExecutableInfoKHR* pExecutableInfo) const;
+    bool PreCallValidateGetPipelineExecutableStatisticsKHR(VkDevice device, const VkPipelineExecutableInfoKHR* pExecutableInfo,
+                                                           uint32_t* pStatisticCount,
+                                                           VkPipelineExecutableStatisticKHR* pStatistics) const;
+    bool PreCallValidateGetPipelineExecutableInternalRepresentationsKHR(
+        VkDevice device, const VkPipelineExecutableInfoKHR* pExecutableInfo, uint32_t* pInternalRepresentationCount,
+        VkPipelineExecutableInternalRepresentationKHR* pStatistics) const;
+    bool PreCallValidateCreatePipelineLayout(VkDevice device, const VkPipelineLayoutCreateInfo* pCreateInfo,
+                                             const VkAllocationCallbacks* pAllocator, VkPipelineLayout* pPipelineLayout) const;
+    bool PreCallValidateAllocateDescriptorSets(VkDevice device, const VkDescriptorSetAllocateInfo* pAllocateInfo,
+                                               VkDescriptorSet* pDescriptorSets, void* ads_state) const;
+    bool PreCallValidateCreateRayTracingPipelinesNV(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
+                                                    const VkRayTracingPipelineCreateInfoNV* pCreateInfos,
+                                                    const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines,
+                                                    void* pipe_state) const;
+    bool PreCallValidateCmdTraceRaysNV(VkCommandBuffer commandBuffer, VkBuffer raygenShaderBindingTableBuffer,
+                                       VkDeviceSize raygenShaderBindingOffset, VkBuffer missShaderBindingTableBuffer,
+                                       VkDeviceSize missShaderBindingOffset, VkDeviceSize missShaderBindingStride,
+                                       VkBuffer hitShaderBindingTableBuffer, VkDeviceSize hitShaderBindingOffset,
+                                       VkDeviceSize hitShaderBindingStride, VkBuffer callableShaderBindingTableBuffer,
+                                       VkDeviceSize callableShaderBindingOffset, VkDeviceSize callableShaderBindingStride,
+                                       uint32_t width, uint32_t height, uint32_t depth) const;
+    void PostCallRecordCmdTraceRaysNV(VkCommandBuffer commandBuffer, VkBuffer raygenShaderBindingTableBuffer,
+                                      VkDeviceSize raygenShaderBindingOffset, VkBuffer missShaderBindingTableBuffer,
+                                      VkDeviceSize missShaderBindingOffset, VkDeviceSize missShaderBindingStride,
+                                      VkBuffer hitShaderBindingTableBuffer, VkDeviceSize hitShaderBindingOffset,
+                                      VkDeviceSize hitShaderBindingStride, VkBuffer callableShaderBindingTableBuffer,
+                                      VkDeviceSize callableShaderBindingOffset, VkDeviceSize callableShaderBindingStride,
+                                      uint32_t width, uint32_t height, uint32_t depth);
+    bool PreCallValidateCreateDevice(VkPhysicalDevice gpu, const VkDeviceCreateInfo* pCreateInfo,
+                                     const VkAllocationCallbacks* pAllocator, VkDevice* pDevice) const;
+    void PostCallRecordCreateDevice(VkPhysicalDevice gpu, const VkDeviceCreateInfo* pCreateInfo,
+                                    const VkAllocationCallbacks* pAllocator, VkDevice* pDevice, VkResult result);
+    bool PreCallValidateCmdUpdateBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset,
+                                        VkDeviceSize dataSize, const void* pData) const;
+    bool PreCallValidateGetDeviceQueue(VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex, VkQueue* pQueue) const;
+    bool PreCallValidateCreateSamplerYcbcrConversion(VkDevice device, const VkSamplerYcbcrConversionCreateInfo* pCreateInfo,
+                                                     const VkAllocationCallbacks* pAllocator,
+                                                     VkSamplerYcbcrConversion* pYcbcrConversion) const;
+    bool PreCallValidateCreateSamplerYcbcrConversionKHR(VkDevice device, const VkSamplerYcbcrConversionCreateInfo* pCreateInfo,
+                                                        const VkAllocationCallbacks* pAllocator,
+                                                        VkSamplerYcbcrConversion* pYcbcrConversion) const;
+    bool PreCallValidateCmdDebugMarkerBeginEXT(VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT* pMarkerInfo) const;
+    void PreCallRecordDestroyDevice(VkDevice device, const VkAllocationCallbacks* pAllocator);
+    bool PreCallValidateQueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo* pSubmits, VkFence fence) const;
+    void PostCallRecordQueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo* pSubmits, VkFence fence,
+                                   VkResult result);
+    bool PreCallValidateAllocateMemory(VkDevice device, const VkMemoryAllocateInfo* pAllocateInfo,
+                                       const VkAllocationCallbacks* pAllocator, VkDeviceMemory* pMemory) const;
+    bool PreCallValidateFreeMemory(VkDevice device, VkDeviceMemory mem, const VkAllocationCallbacks* pAllocator) const;
+    bool PreCallValidateWaitForFences(VkDevice device, uint32_t fenceCount, const VkFence* pFences, VkBool32 waitAll,
+                                      uint64_t timeout) const;
+    bool PreCallValidateQueueWaitIdle(VkQueue queue) const;
+    bool PreCallValidateDeviceWaitIdle(VkDevice device) const;
+    bool PreCallValidateCreateSemaphore(VkDevice device, const VkSemaphoreCreateInfo* pCreateInfo,
+                                        const VkAllocationCallbacks* pAllocator, VkSemaphore* pSemaphore) const;
+    bool PreCallValidateWaitSemaphoresKHR(VkDevice device, const VkSemaphoreWaitInfoKHR* pWaitInfo, uint64_t timeout) const;
+    bool PreCallValidateDestroyFence(VkDevice device, VkFence fence, const VkAllocationCallbacks* pAllocator) const;
+    bool PreCallValidateDestroySemaphore(VkDevice device, VkSemaphore semaphore, const VkAllocationCallbacks* pAllocator) const;
+    bool PreCallValidateDestroyEvent(VkDevice device, VkEvent event, const VkAllocationCallbacks* pAllocator) const;
+    bool PreCallValidateDestroyQueryPool(VkDevice device, VkQueryPool queryPool, const VkAllocationCallbacks* pAllocator) const;
+    bool ValidateGetQueryPoolResultsFlags(VkQueryPool queryPool, VkQueryResultFlags flags) const;
+    bool ValidateGetQueryPoolResultsQueries(VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount) const;
+    bool ValidatePerformanceQueryResults(const char* cmd_name, const QUERY_POOL_STATE* query_pool_state, uint32_t firstQuery,
+                                         uint32_t queryCount, VkQueryResultFlags flags) const;
+    bool ValidateGetQueryPoolPerformanceResults(VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, void* pData,
+                                                VkDeviceSize stride, VkQueryResultFlags flags) const;
+    bool PreCallValidateGetQueryPoolResults(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount,
+                                            size_t dataSize, void* pData, VkDeviceSize stride, VkQueryResultFlags flags) const;
+    bool PreCallValidateBindBufferMemory2KHR(VkDevice device, uint32_t bindInfoCount,
+                                             const VkBindBufferMemoryInfoKHR* pBindInfos) const;
+    bool PreCallValidateBindBufferMemory2(VkDevice device, uint32_t bindInfoCount,
+                                          const VkBindBufferMemoryInfoKHR* pBindInfos) const;
+    bool PreCallValidateBindBufferMemory(VkDevice device, VkBuffer buffer, VkDeviceMemory mem, VkDeviceSize memoryOffset) const;
+    bool PreCallValidateGetImageMemoryRequirements2(VkDevice device, const VkImageMemoryRequirementsInfo2* pInfo,
+                                                    VkMemoryRequirements2* pMemoryRequirements) const;
+    bool PreCallValidateGetImageMemoryRequirements2KHR(VkDevice device, const VkImageMemoryRequirementsInfo2* pInfo,
+                                                       VkMemoryRequirements2* pMemoryRequirements) const;
+    bool PreCallValidateGetPhysicalDeviceImageFormatProperties2(VkPhysicalDevice physicalDevice,
+                                                                const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo,
+                                                                VkImageFormatProperties2* pImageFormatProperties) const;
+    bool PreCallValidateGetPhysicalDeviceImageFormatProperties2KHR(VkPhysicalDevice physicalDevice,
+                                                                   const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo,
+                                                                   VkImageFormatProperties2* pImageFormatProperties) const;
+    bool PreCallValidateDestroyPipeline(VkDevice device, VkPipeline pipeline, const VkAllocationCallbacks* pAllocator) const;
+    bool PreCallValidateDestroySampler(VkDevice device, VkSampler sampler, const VkAllocationCallbacks* pAllocator) const;
+    bool PreCallValidateDestroyDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool,
+                                              const VkAllocationCallbacks* pAllocator) const;
+    bool PreCallValidateFreeCommandBuffers(VkDevice device, VkCommandPool commandPool, uint32_t commandBufferCount,
+                                           const VkCommandBuffer* pCommandBuffers) const;
+    bool PreCallValidateCreateCommandPool(VkDevice device, const VkCommandPoolCreateInfo* pCreateInfo,
+                                          const VkAllocationCallbacks* pAllocator, VkCommandPool* pCommandPool) const;
+    bool PreCallValidateCreateQueryPool(VkDevice device, const VkQueryPoolCreateInfo* pCreateInfo,
+                                        const VkAllocationCallbacks* pAllocator, VkQueryPool* pQueryPool) const;
+    bool PreCallValidateDestroyCommandPool(VkDevice device, VkCommandPool commandPool,
+                                           const VkAllocationCallbacks* pAllocator) const;
+    bool PreCallValidateResetCommandPool(VkDevice device, VkCommandPool commandPool, VkCommandPoolResetFlags flags) const;
+    bool PreCallValidateResetFences(VkDevice device, uint32_t fenceCount, const VkFence* pFences) const;
+    bool PreCallValidateDestroyFramebuffer(VkDevice device, VkFramebuffer framebuffer,
+                                           const VkAllocationCallbacks* pAllocator) const;
+    bool PreCallValidateDestroyRenderPass(VkDevice device, VkRenderPass renderPass, const VkAllocationCallbacks* pAllocator) const;
+    bool PreCallValidateCreateDescriptorSetLayout(VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo,
+                                                  const VkAllocationCallbacks* pAllocator, VkDescriptorSetLayout* pSetLayout) const;
+    bool PreCallValidateResetDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool,
+                                            VkDescriptorPoolResetFlags flags) const;
+    bool PreCallValidateFreeDescriptorSets(VkDevice device, VkDescriptorPool descriptorPool, uint32_t count,
+                                           const VkDescriptorSet* pDescriptorSets) const;
+    bool PreCallValidateUpdateDescriptorSets(VkDevice device, uint32_t descriptorWriteCount,
+                                             const VkWriteDescriptorSet* pDescriptorWrites, uint32_t descriptorCopyCount,
+                                             const VkCopyDescriptorSet* pDescriptorCopies) const;
+    bool PreCallValidateBeginCommandBuffer(VkCommandBuffer commandBuffer, const VkCommandBufferBeginInfo* pBeginInfo) const;
+    bool PreCallValidateEndCommandBuffer(VkCommandBuffer commandBuffer) const;
+    bool PreCallValidateResetCommandBuffer(VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags) const;
+    bool PreCallValidateCmdBindPipeline(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint,
+                                        VkPipeline pipeline) const;
+    bool PreCallValidateCmdSetViewport(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount,
+                                       const VkViewport* pViewports) const;
+    bool PreCallValidateCmdSetScissor(VkCommandBuffer commandBuffer, uint32_t firstScissor, uint32_t scissorCount,
+                                      const VkRect2D* pScissors) const;
+    bool PreCallValidateCmdSetExclusiveScissorNV(VkCommandBuffer commandBuffer, uint32_t firstExclusiveScissor,
+                                                 uint32_t exclusiveScissorCount, const VkRect2D* pExclusiveScissors) const;
+
+    bool PreCallValidateCmdSetViewportWScalingNV(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount,
+                                                 const VkViewportWScalingNV* pViewportWScalings) const;
+
+    bool PreCallValidateCmdBindShadingRateImageNV(VkCommandBuffer commandBuffer, VkImageView imageView,
+                                                  VkImageLayout imageLayout) const;
+    bool PreCallValidateCmdSetViewportShadingRatePaletteNV(VkCommandBuffer commandBuffer, uint32_t firstViewport,
+                                                           uint32_t viewportCount,
+                                                           const VkShadingRatePaletteNV* pShadingRatePalettes) const;
+    bool ValidateGeometryTrianglesNV(const VkGeometryTrianglesNV& triangles, VkDebugReportObjectTypeEXT object_type,
+                                     uint64_t object_handle, const char* func_name) const;
+    bool ValidateGeometryAABBNV(const VkGeometryAABBNV& geometry, VkDebugReportObjectTypeEXT object_type, uint64_t object_handle,
+                                const char* func_name) const;
+    bool ValidateGeometryNV(const VkGeometryNV& geometry, VkDebugReportObjectTypeEXT object_type, uint64_t object_handle,
+                            const char* func_name) const;
+    bool PreCallValidateCreateAccelerationStructureNV(VkDevice device, const VkAccelerationStructureCreateInfoNV* pCreateInfo,
+                                                      const VkAllocationCallbacks* pAllocator,
+                                                      VkAccelerationStructureNV* pAccelerationStructure) const;
+    bool PreCallValidateBindAccelerationStructureMemoryNV(VkDevice device, uint32_t bindInfoCount,
+                                                          const VkBindAccelerationStructureMemoryInfoNV* pBindInfos) const;
+    bool PreCallValidateGetAccelerationStructureHandleNV(VkDevice device, VkAccelerationStructureNV accelerationStructure,
+                                                         size_t dataSize, void* pData) const;
+    bool PreCallValidateCmdBuildAccelerationStructureNV(VkCommandBuffer commandBuffer, const VkAccelerationStructureInfoNV* pInfo,
+                                                        VkBuffer instanceData, VkDeviceSize instanceOffset, VkBool32 update,
+                                                        VkAccelerationStructureNV dst, VkAccelerationStructureNV src,
+                                                        VkBuffer scratch, VkDeviceSize scratchOffset) const;
+    bool PreCallValidateCmdCopyAccelerationStructureNV(VkCommandBuffer commandBuffer, VkAccelerationStructureNV dst,
+                                                       VkAccelerationStructureNV src, VkCopyAccelerationStructureModeNV mode) const;
+    bool PreCallValidateDestroyAccelerationStructureNV(VkDevice device, VkAccelerationStructureNV accelerationStructure,
+                                                       const VkAllocationCallbacks* pAllocator) const;
+    bool PreCallValidateCmdSetLineWidth(VkCommandBuffer commandBuffer, float lineWidth) const;
+    bool PreCallValidateCmdSetLineStippleEXT(VkCommandBuffer commandBuffer, uint32_t lineStippleFactor,
+                                             uint16_t lineStipplePattern) const;
+    bool PreCallValidateCmdSetDepthBias(VkCommandBuffer commandBuffer, float depthBiasConstantFactor, float depthBiasClamp,
+                                        float depthBiasSlopeFactor) const;
+    bool PreCallValidateCmdSetBlendConstants(VkCommandBuffer commandBuffer, const float blendConstants[4]) const;
+    bool PreCallValidateCmdSetDepthBounds(VkCommandBuffer commandBuffer, float minDepthBounds, float maxDepthBounds) const;
+    bool PreCallValidateCmdSetStencilCompareMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
+                                                 uint32_t compareMask) const;
+    bool PreCallValidateCmdSetStencilWriteMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
+                                               uint32_t writeMask) const;
+    bool PreCallValidateCmdSetStencilReference(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
+                                               uint32_t reference) const;
+    bool PreCallValidateCmdBindDescriptorSets(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint,
+                                              VkPipelineLayout layout, uint32_t firstSet, uint32_t setCount,
+                                              const VkDescriptorSet* pDescriptorSets, uint32_t dynamicOffsetCount,
+                                              const uint32_t* pDynamicOffsets) const;
+    bool PreCallValidateCmdPushDescriptorSetKHR(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint,
+                                                VkPipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount,
+                                                const VkWriteDescriptorSet* pDescriptorWrites) const;
+    bool PreCallValidateCmdBindIndexBuffer(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
+                                           VkIndexType indexType) const;
+    bool PreCallValidateCmdBindVertexBuffers(VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount,
+                                             const VkBuffer* pBuffers, const VkDeviceSize* pOffsets) const;
+    bool PreCallValidateCmdDraw(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex,
+                                uint32_t firstInstance) const;
+    bool PreCallValidateCmdDrawIndexed(VkCommandBuffer commandBuffer, uint32_t indexCount, uint32_t instanceCount,
+                                       uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance) const;
+    bool PreCallValidateCmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t count,
+                                               uint32_t stride) const;
+    bool PreCallValidateCmdDrawIndexedIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
+                                                       VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
+                                                       uint32_t stride) const;
+    bool PreCallValidateCmdDispatch(VkCommandBuffer commandBuffer, uint32_t x, uint32_t y, uint32_t z) const;
+    bool PreCallValidateCmdDispatchIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset) const;
+    bool PreCallValidateCmdDrawIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t count,
+                                        uint32_t stride) const;
+    bool PreCallValidateCmdSetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask) const;
+    bool PreCallValidateCmdResetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask) const;
+    bool PreCallValidateCmdWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent* pEvents,
+                                      VkPipelineStageFlags sourceStageMask, VkPipelineStageFlags dstStageMask,
+                                      uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers,
+                                      uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers,
+                                      uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers) const;
+    void PreCallRecordCmdWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent* pEvents,
+                                    VkPipelineStageFlags sourceStageMask, VkPipelineStageFlags dstStageMask,
+                                    uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers,
+                                    uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers,
+                                    uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers);
+    void PostCallRecordCmdWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent* pEvents,
+                                     VkPipelineStageFlags sourceStageMask, VkPipelineStageFlags dstStageMask,
+                                     uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers,
+                                     uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers,
+                                     uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers);
+    bool PreCallValidateCmdPipelineBarrier(VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask,
+                                           VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags,
+                                           uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers,
+                                           uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers,
+                                           uint32_t imageMemoryBarrierCount,
+                                           const VkImageMemoryBarrier* pImageMemoryBarriers) const;
+    void PreCallRecordCmdPipelineBarrier(VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask,
+                                         VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags,
+                                         uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers,
+                                         uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers,
+                                         uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers);
+
+    void EnqueueVerifyBeginQuery(VkCommandBuffer, const QueryObject& query_obj, const char* func);
+    bool PreCallValidateCmdBeginQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t slot, VkFlags flags) const;
+    void PreCallRecordCmdBeginQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t slot, VkFlags flags);
+    bool PreCallValidateCmdEndQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t slot) const;
+    bool PreCallValidateCmdResetQueryPool(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery,
+                                          uint32_t queryCount) const;
+    bool PreCallValidateCmdCopyQueryPoolResults(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery,
+                                                uint32_t queryCount, VkBuffer dstBuffer, VkDeviceSize dstOffset,
+                                                VkDeviceSize stride, VkQueryResultFlags flags) const;
+    void PreCallRecordCmdCopyQueryPoolResults(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery,
+                                              uint32_t queryCount, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize stride,
+                                              VkQueryResultFlags flags);
+    bool PreCallValidateCmdPushConstants(VkCommandBuffer commandBuffer, VkPipelineLayout layout, VkShaderStageFlags stageFlags,
+                                         uint32_t offset, uint32_t size, const void* pValues) const;
+    bool PreCallValidateCmdWriteTimestamp(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage,
+                                          VkQueryPool queryPool, uint32_t slot) const;
+    void PreCallRecordCmdWriteTimestamp(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage, VkQueryPool queryPool,
+                                        uint32_t slot);
+    bool PreCallValidateCreateFramebuffer(VkDevice device, const VkFramebufferCreateInfo* pCreateInfo,
+                                          const VkAllocationCallbacks* pAllocator, VkFramebuffer* pFramebuffer) const;
+    bool PreCallValidateCreateRenderPass(VkDevice device, const VkRenderPassCreateInfo* pCreateInfo,
+                                         const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass) const;
+    bool PreCallValidateGetDeviceMemoryCommitment(VkDevice device, VkDeviceMemory mem, VkDeviceSize* pCommittedMem) const;
+    bool PreCallValidateCreateRenderPass2KHR(VkDevice device, const VkRenderPassCreateInfo2KHR* pCreateInfo,
+                                             const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass) const;
+    bool PreCallValidateCmdBeginRenderPass(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo* pRenderPassBegin,
+                                           VkSubpassContents contents) const;
+    void PreCallRecordCmdBeginRenderPass(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo* pRenderPassBegin,
+                                         VkSubpassContents contents);
+    bool PreCallValidateCmdBeginRenderPass2KHR(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo* pRenderPassBegin,
+                                               const VkSubpassBeginInfoKHR* pSubpassBeginInfo) const;
+    void PreCallRecordCmdBeginRenderPass2KHR(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo* pRenderPassBegin,
+                                             const VkSubpassBeginInfoKHR* pSubpassBeginInfo);
+    bool PreCallValidateCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) const;
+    void PostCallRecordCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents);
+    bool PreCallValidateCmdNextSubpass2KHR(VkCommandBuffer commandBuffer, const VkSubpassBeginInfoKHR* pSubpassBeginInfo,
+                                           const VkSubpassEndInfoKHR* pSubpassEndInfo) const;
+    void PostCallRecordCmdNextSubpass2KHR(VkCommandBuffer commandBuffer, const VkSubpassBeginInfoKHR* pSubpassBeginInfo,
+                                          const VkSubpassEndInfoKHR* pSubpassEndInfo);
+    bool PreCallValidateCmdEndRenderPass(VkCommandBuffer commandBuffer) const;
+    void PostCallRecordCmdEndRenderPass(VkCommandBuffer commandBuffer);
+    bool PreCallValidateCmdEndRenderPass2KHR(VkCommandBuffer commandBuffer, const VkSubpassEndInfoKHR* pSubpassEndInfo) const;
+    void PostCallRecordCmdEndRenderPass2KHR(VkCommandBuffer commandBuffer, const VkSubpassEndInfoKHR* pSubpassEndInfo);
+    bool PreCallValidateCmdExecuteCommands(VkCommandBuffer commandBuffer, uint32_t commandBuffersCount,
+                                           const VkCommandBuffer* pCommandBuffers) const;
+    bool PreCallValidateMapMemory(VkDevice device, VkDeviceMemory mem, VkDeviceSize offset, VkDeviceSize size, VkFlags flags,
+                                  void** ppData) const;
+    void PostCallRecordMapMemory(VkDevice device, VkDeviceMemory mem, VkDeviceSize offset, VkDeviceSize size, VkFlags flags,
+                                 void** ppData, VkResult result);
+    bool PreCallValidateUnmapMemory(VkDevice device, VkDeviceMemory mem) const;
+    void PreCallRecordUnmapMemory(VkDevice device, VkDeviceMemory mem);
+    bool PreCallValidateFlushMappedMemoryRanges(VkDevice device, uint32_t memRangeCount,
+                                                const VkMappedMemoryRange* pMemRanges) const;
+    bool PreCallValidateInvalidateMappedMemoryRanges(VkDevice device, uint32_t memRangeCount,
+                                                     const VkMappedMemoryRange* pMemRanges) const;
+    void PostCallRecordInvalidateMappedMemoryRanges(VkDevice device, uint32_t memRangeCount, const VkMappedMemoryRange* pMemRanges,
+                                                    VkResult result);
+    bool PreCallValidateBindImageMemory(VkDevice device, VkImage image, VkDeviceMemory mem, VkDeviceSize memoryOffset) const;
+    bool PreCallValidateBindImageMemory2(VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfoKHR* pBindInfos) const;
+    bool PreCallValidateBindImageMemory2KHR(VkDevice device, uint32_t bindInfoCount,
+                                            const VkBindImageMemoryInfoKHR* pBindInfos) const;
+    bool PreCallValidateSetEvent(VkDevice device, VkEvent event) const;
+    bool PreCallValidateQueueBindSparse(VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo* pBindInfo,
+                                        VkFence fence) const;
+    bool PreCallValidateSignalSemaphoreKHR(VkDevice device, const VkSemaphoreSignalInfoKHR* pSignalInfo) const;
+    bool PreCallValidateImportSemaphoreFdKHR(VkDevice device, const VkImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo) const;
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    bool PreCallValidateImportSemaphoreWin32HandleKHR(
+        VkDevice device, const VkImportSemaphoreWin32HandleInfoKHR* pImportSemaphoreWin32HandleInfo) const;
+    bool PreCallValidateImportFenceWin32HandleKHR(VkDevice device,
+                                                  const VkImportFenceWin32HandleInfoKHR* pImportFenceWin32HandleInfo) const;
+#endif  // VK_USE_PLATFORM_WIN32_KHR
+    bool PreCallValidateImportFenceFdKHR(VkDevice device, const VkImportFenceFdInfoKHR* pImportFenceFdInfo) const;
+
+    bool PreCallValidateCreateSwapchainKHR(VkDevice device, const VkSwapchainCreateInfoKHR* pCreateInfo,
+                                           const VkAllocationCallbacks* pAllocator, VkSwapchainKHR* pSwapchain) const;
+    void PreCallRecordDestroySwapchainKHR(VkDevice device, VkSwapchainKHR swapchain, const VkAllocationCallbacks* pAllocator);
+    bool PreCallValidateGetSwapchainImagesKHR(VkDevice device, VkSwapchainKHR swapchain, uint32_t* pSwapchainImageCount,
+                                              VkImage* pSwapchainImages) const;
+    void PostCallRecordGetSwapchainImagesKHR(VkDevice device, VkSwapchainKHR swapchain, uint32_t* pSwapchainImageCount,
+                                             VkImage* pSwapchainImages, VkResult result);
+    bool PreCallValidateQueuePresentKHR(VkQueue queue, const VkPresentInfoKHR* pPresentInfo) const;
+    bool PreCallValidateCreateSharedSwapchainsKHR(VkDevice device, uint32_t swapchainCount,
+                                                  const VkSwapchainCreateInfoKHR* pCreateInfos,
+                                                  const VkAllocationCallbacks* pAllocator, VkSwapchainKHR* pSwapchains) const;
+    bool PreCallValidateAcquireNextImageKHR(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout, VkSemaphore semaphore,
+                                            VkFence fence, uint32_t* pImageIndex) const;
+    bool PreCallValidateAcquireNextImage2KHR(VkDevice device, const VkAcquireNextImageInfoKHR* pAcquireInfo,
+                                             uint32_t* pImageIndex) const;
+    bool PreCallValidateDestroySurfaceKHR(VkInstance instance, VkSurfaceKHR surface, const VkAllocationCallbacks* pAllocator) const;
+    bool PreCallValidateGetPhysicalDeviceSurfaceSupportKHR(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex,
+                                                           VkSurfaceKHR surface, VkBool32* pSupported) const;
+    bool PreCallValidateCreateDescriptorUpdateTemplate(VkDevice device, const VkDescriptorUpdateTemplateCreateInfoKHR* pCreateInfo,
+                                                       const VkAllocationCallbacks* pAllocator,
+                                                       VkDescriptorUpdateTemplateKHR* pDescriptorUpdateTemplate) const;
+    bool PreCallValidateCreateDescriptorUpdateTemplateKHR(VkDevice device,
+                                                          const VkDescriptorUpdateTemplateCreateInfoKHR* pCreateInfo,
+                                                          const VkAllocationCallbacks* pAllocator,
+                                                          VkDescriptorUpdateTemplateKHR* pDescriptorUpdateTemplate) const;
+    bool PreCallValidateUpdateDescriptorSetWithTemplate(VkDevice device, VkDescriptorSet descriptorSet,
+                                                        VkDescriptorUpdateTemplate descriptorUpdateTemplate,
+                                                        const void* pData) const;
+    bool PreCallValidateUpdateDescriptorSetWithTemplateKHR(VkDevice device, VkDescriptorSet descriptorSet,
+                                                           VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
+                                                           const void* pData) const;
+
+    bool PreCallValidateCmdPushDescriptorSetWithTemplateKHR(VkCommandBuffer commandBuffer,
+                                                            VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
+                                                            VkPipelineLayout layout, uint32_t set, const void* pData) const;
+    bool PreCallValidateGetDisplayPlaneSupportedDisplaysKHR(VkPhysicalDevice physicalDevice, uint32_t planeIndex,
+                                                            uint32_t* pDisplayCount, VkDisplayKHR* pDisplays) const;
+    bool PreCallValidateGetDisplayPlaneCapabilitiesKHR(VkPhysicalDevice physicalDevice, VkDisplayModeKHR mode, uint32_t planeIndex,
+                                                       VkDisplayPlaneCapabilitiesKHR* pCapabilities) const;
+    bool PreCallValidateGetDisplayPlaneCapabilities2KHR(VkPhysicalDevice physicalDevice,
+                                                        const VkDisplayPlaneInfo2KHR* pDisplayPlaneInfo,
+                                                        VkDisplayPlaneCapabilities2KHR* pCapabilities) const;
+    bool PreCallValidateCmdDebugMarkerEndEXT(VkCommandBuffer commandBuffer) const;
+
+    bool PreCallValidateCmdBeginQueryIndexedEXT(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query,
+                                                VkQueryControlFlags flags, uint32_t index) const;
+    void PreCallRecordCmdBeginQueryIndexedEXT(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query,
+                                              VkQueryControlFlags flags, uint32_t index);
+    bool PreCallValidateCmdEndQueryIndexedEXT(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query,
+                                              uint32_t index) const;
+
+    bool PreCallValidateCmdSetDiscardRectangleEXT(VkCommandBuffer commandBuffer, uint32_t firstDiscardRectangle,
+                                                  uint32_t discardRectangleCount, const VkRect2D* pDiscardRectangles) const;
+    bool PreCallValidateCmdSetSampleLocationsEXT(VkCommandBuffer commandBuffer,
+                                                 const VkSampleLocationsInfoEXT* pSampleLocationsInfo) const;
+    bool PreCallValidateCmdDrawIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
+                                                VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
+                                                uint32_t stride) const;
+    bool PreCallValidateCmdDrawMeshTasksNV(VkCommandBuffer commandBuffer, uint32_t taskCount, uint32_t firstTask) const;
+    bool PreCallValidateCmdDrawMeshTasksIndirectNV(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
+                                                   uint32_t drawCount, uint32_t stride) const;
+    bool PreCallValidateCmdDrawMeshTasksIndirectCountNV(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
+                                                        VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
+                                                        uint32_t stride) const;
+    bool PreCallValidateGetBufferDeviceAddressEXT(VkDevice device, const VkBufferDeviceAddressInfoEXT* pInfo) const;
+    bool PreCallValidateGetBufferDeviceAddressKHR(VkDevice device, const VkBufferDeviceAddressInfoKHR* pInfo) const;
+    bool PreCallValidateGetBufferOpaqueCaptureAddressKHR(VkDevice device, const VkBufferDeviceAddressInfoKHR* pInfo) const;
+    bool PreCallValidateGetDeviceMemoryOpaqueCaptureAddressKHR(VkDevice device,
+                                                               const VkDeviceMemoryOpaqueCaptureAddressInfoKHR* pInfo) const;
+    bool ValidateCmdSetDeviceMask(VkCommandBuffer commandBuffer, uint32_t deviceMask, const char* func_name) const;
+    bool PreCallValidateCmdSetDeviceMask(VkCommandBuffer commandBuffer, uint32_t deviceMask) const;
+    bool PreCallValidateCmdSetDeviceMaskKHR(VkCommandBuffer commandBuffer, uint32_t deviceMask) const;
+    bool PreCallValidateGetSemaphoreCounterValueKHR(VkDevice device, VkSemaphore sempahore, uint64_t* pValue) const;
+    bool ValidateComputeWorkGroupSizes(const SHADER_MODULE_STATE* shader) const;
+
+    bool ValidateQueryRange(VkDevice device, VkQueryPool queryPool, uint32_t totalCount, uint32_t firstQuery, uint32_t queryCount,
+                            const char* vuid_badfirst, const char* vuid_badrange) const;
+    bool PreCallValidateResetQueryPoolEXT(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount) const;
+
+    bool ValidateComputeWorkGroupInvocations(CMD_BUFFER_STATE* cb_state, uint32_t groupCountX, uint32_t groupCountY,
+                                             uint32_t groupCountZ);
+    bool ValidateQueryPoolStride(const std::string& vuid_not_64, const std::string& vuid_64, const VkDeviceSize stride,
+                                 const char* parameter_name, const uint64_t parameter_value, const VkQueryResultFlags flags) const;
+    bool ValidateCmdDrawStrideWithStruct(VkCommandBuffer commandBuffer, const std::string& vuid, const uint32_t stride,
+                                         const char* struct_name, const uint32_t struct_size) const;
+    bool ValidateCmdDrawStrideWithBuffer(VkCommandBuffer commandBuffer, const std::string& vuid, const uint32_t stride,
+                                         const char* struct_name, const uint32_t struct_size, const uint32_t drawCount,
+                                         const VkDeviceSize offset, const BUFFER_STATE* buffer_state) const;
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+    bool PreCallValidateGetAndroidHardwareBufferPropertiesANDROID(VkDevice device, const struct AHardwareBuffer* buffer,
+                                                                  VkAndroidHardwareBufferPropertiesANDROID* pProperties) const;
+    void PostCallRecordGetAndroidHardwareBufferPropertiesANDROID(VkDevice device, const struct AHardwareBuffer* buffer,
+                                                                 VkAndroidHardwareBufferPropertiesANDROID* pProperties,
+                                                                 VkResult result);
+    bool PreCallValidateGetMemoryAndroidHardwareBufferANDROID(VkDevice device,
+                                                              const VkMemoryGetAndroidHardwareBufferInfoANDROID* pInfo,
+                                                              struct AHardwareBuffer** pBuffer) const;
+#endif  // VK_USE_PLATFORM_ANDROID_KHR
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+    bool PreCallValidateGetPhysicalDeviceWaylandPresentationSupportKHR(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex,
+                                                                       struct wl_display* display) const;
+#endif  // VK_USE_PLATFORM_WAYLAND_KHR
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    bool PreCallValidateGetPhysicalDeviceWin32PresentationSupportKHR(VkPhysicalDevice physicalDevice,
+                                                                     uint32_t queueFamilyIndex) const;
+#endif  // VK_USE_PLATFORM_WIN32_KHR
+#ifdef VK_USE_PLATFORM_XCB_KHR
+    bool PreCallValidateGetPhysicalDeviceXcbPresentationSupportKHR(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex,
+                                                                   xcb_connection_t* connection, xcb_visualid_t visual_id) const;
+#endif  // VK_USE_PLATFORM_XCB_KHR
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+    bool PreCallValidateGetPhysicalDeviceXlibPresentationSupportKHR(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex,
+                                                                    Display* dpy, VisualID visualID) const;
+#endif  // VK_USE_PLATFORM_XLIB_KHR
+
+};  // Class CoreChecks
diff --git a/src/third_party/vulkan-validation-layers/src/layers/core_validation_error_enums.h b/src/third_party/vulkan-validation-layers/src/layers/core_validation_error_enums.h
new file mode 100644
index 0000000..f06a190
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/layers/core_validation_error_enums.h
@@ -0,0 +1,289 @@
+/* Copyright (c) 2015-2019 The Khronos Group Inc.
+ * Copyright (c) 2015-2019 Valve Corporation
+ * Copyright (c) 2015-2019 LunarG, Inc.
+ * Copyright (C) 2015-2019 Google Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Courtney Goeltzenleuchter <courtneygo@google.com>
+ * Author: Tobin Ehlis <tobine@google.com>
+ * Author: Chris Forbes <chrisf@ijw.co.nz>
+ * Author: Mark Lobodzinski <mark@lunarg.com>
+ * Author: Dave Houlton <daveh@lunarg.com>
+ * Author: John Zulauf <jzulauf@lunarg.com>
+ * Author: Camden Stocker <camden@lunarg.com>
+ */
+#ifndef CORE_VALIDATION_ERROR_ENUMS_H_
+#define CORE_VALIDATION_ERROR_ENUMS_H_
+
+// Suppress unused warning on Linux
+#if defined(__GNUC__)
+#define DECORATE_UNUSED __attribute__((unused))
+#else
+#define DECORATE_UNUSED
+#endif
+
+// clang-format off
+
+static const char DECORATE_UNUSED *kVUID_Core_MemTrack_FenceState = "UNASSIGNED-CoreValidation-MemTrack-FenceState";
+static const char DECORATE_UNUSED *kVUID_Core_MemTrack_InvalidMap = "UNASSIGNED-CoreValidation-MemTrack-InvalidMap";
+static const char DECORATE_UNUSED *kVUID_Core_MemTrack_InvalidState = "UNASSIGNED-CoreValidation-MemTrack-InvalidState";
+static const char DECORATE_UNUSED *kVUID_Core_MemTrack_InvalidUsageFlag = "UNASSIGNED-CoreValidation-MemTrack-InvalidUsageFlag";
+static const char DECORATE_UNUSED *kVUID_Core_MemTrack_RebindObject = "UNASSIGNED-CoreValidation-MemTrack-RebindObject";
+// Previously defined but unused - uncomment as needed
+//static const char DECORATE_UNUSED *kVUID_Core_MemTrack_InternalError = "UNASSIGNED-CoreValidation-MemTrack-InternalError";
+//static const char DECORATE_UNUSED *kVUID_Core_MemTrack_InvalidCB = "UNASSIGNED-CoreValidation-MemTrack-InvalidCB";
+//static const char DECORATE_UNUSED *kVUID_Core_MemTrack_InvalidMemObj = "UNASSIGNED-CoreValidation-MemTrack-InvalidMemObj";
+//static const char DECORATE_UNUSED *kVUID_Core_MemTrack_InvalidMemRegion = "UNASSIGNED-CoreValidation-MemTrack-InvalidMemRegion";
+//static const char DECORATE_UNUSED *kVUID_Core_MemTrack_InvalidMemType = "UNASSIGNED-CoreValidation-MemTrack-InvalidMemType";
+//static const char DECORATE_UNUSED *kVUID_Core_MemTrack_InvalidObject = "UNASSIGNED-CoreValidation-MemTrack-InvalidObject";
+//static const char DECORATE_UNUSED *kVUID_Core_MemTrack_MemoryLeak = "UNASSIGNED-CoreValidation-MemTrack-MemoryLeak";
+//static const char DECORATE_UNUSED *kVUID_Core_MemTrack_ObjNotBound = "UNASSIGNED-CoreValidation-MemTrack-ObjNotBound";
+//static const char DECORATE_UNUSED *kVUID_Core_MemTrack_ResetCBWhileInFlight = "UNASSIGNED-CoreValidation-MemTrack-ResetCBWhileInFlight";
+//static const char DECORATE_UNUSED *kVUID_Core_MemTrack_InvalidAliasing = "UNASSIGNED-CoreValidation-MemTrack-InvalidAliasing";
+//static const char DECORATE_UNUSED *kVUID_Core_MemTrack_FreedMemRef = "UNASSIGNED-CoreValidation-MemTrack-FreedMemRef";
+
+static const char DECORATE_UNUSED *kVUID_Core_DrawState_ClearCmdBeforeDraw = "UNASSIGNED-CoreValidation-DrawState-ClearCmdBeforeDraw";
+static const char DECORATE_UNUSED *kVUID_Core_DrawState_CommandBufferSingleSubmitViolation = "UNASSIGNED-CoreValidation-DrawState-CommandBufferSingleSubmitViolation";
+static const char DECORATE_UNUSED *kVUID_Core_DrawState_DescriptorSetNotBound = "UNASSIGNED-CoreValidation-DrawState-DescriptorSetNotBound";
+static const char DECORATE_UNUSED *kVUID_Core_DrawState_DescriptorSetNotUpdated = "UNASSIGNED-CoreValidation-DrawState-DescriptorSetNotUpdated";
+static const char DECORATE_UNUSED *kVUID_Core_DrawState_DoubleDestroy = "UNASSIGNED-CoreValidation-DrawState-DoubleDestroy";
+static const char DECORATE_UNUSED *kVUID_Core_DrawState_ExtensionNotEnabled = "UNASSIGNED-CoreValidation-DrawState-ExtensionNotEnabled";
+static const char DECORATE_UNUSED *kVUID_Core_DrawState_InternalError = "UNASSIGNED-CoreValidation-DrawState-InternalError";
+static const char DECORATE_UNUSED *kVUID_Core_DrawState_InvalidBarrier = "UNASSIGNED-CoreValidation-DrawState-InvalidBarrier";
+//static const char DECORATE_UNUSED *kVUID_Core_DrawState_InvalidBuffer = "UNASSIGNED-CoreValidation-DrawState-InvalidBuffer";
+static const char DECORATE_UNUSED *kVUID_Core_DrawState_InvalidCommandBuffer = "UNASSIGNED-CoreValidation-DrawState-InvalidCommandBuffer";
+static const char DECORATE_UNUSED *kVUID_Core_DrawState_InvalidCommandBufferSimultaneousUse = "UNASSIGNED-CoreValidation-DrawState-InvalidCommandBufferSimultaneousUse";
+static const char DECORATE_UNUSED *kVUID_Core_DrawState_InvalidDescriptorSet = "UNASSIGNED-CoreValidation-DrawState-InvalidDescriptorSet";
+static const char DECORATE_UNUSED *kVUID_Core_DrawState_InvalidEvent = "UNASSIGNED-CoreValidation-DrawState-InvalidEvent";
+static const char DECORATE_UNUSED *kVUID_Core_DrawState_InvalidExtents = "UNASSIGNED-CoreValidation-DrawState-InvalidExtents";
+static const char DECORATE_UNUSED *kVUID_Core_DrawState_InvalidFeature = "UNASSIGNED-CoreValidation-DrawState-InvalidFeature";
+static const char DECORATE_UNUSED *kVUID_Core_DrawState_InvalidFence = "UNASSIGNED-CoreValidation-DrawState-InvalidFence";
+//static const char DECORATE_UNUSED *kVUID_Core_DrawState_InvalidImage = "UNASSIGNED-CoreValidation-DrawState-InvalidImage";
+static const char DECORATE_UNUSED *kVUID_Core_DrawState_InvalidImageAspect = "UNASSIGNED-CoreValidation-DrawState-InvalidImageAspect";
+static const char DECORATE_UNUSED *kVUID_Core_DrawState_InvalidImageLayout = "UNASSIGNED-CoreValidation-DrawState-InvalidImageLayout";
+static const char DECORATE_UNUSED *kVUID_Core_DrawState_InvalidLayout = "UNASSIGNED-CoreValidation-DrawState-InvalidLayout";
+static const char DECORATE_UNUSED *kVUID_Core_DrawState_InvalidPipeline = "UNASSIGNED-CoreValidation-DrawState-InvalidPipeline";
+static const char DECORATE_UNUSED *kVUID_Core_DrawState_InvalidPipelineCreateState = "UNASSIGNED-CoreValidation-DrawState-InvalidPipelineCreateState";
+static const char DECORATE_UNUSED *kVUID_Core_DrawState_InvalidQuery = "UNASSIGNED-CoreValidation-DrawState-InvalidQuery";
+static const char DECORATE_UNUSED *kVUID_Core_DrawState_QueryNotReset = "UNASSIGNED-CoreValidation-DrawState-QueryNotReset";
+static const char DECORATE_UNUSED *kVUID_Core_DrawState_InvalidQueueFamily = "UNASSIGNED-CoreValidation-DrawState-InvalidQueueFamily";
+static const char DECORATE_UNUSED *kVUID_Core_DrawState_InvalidRenderArea = "UNASSIGNED-CoreValidation-DrawState-InvalidRenderArea";
+static const char DECORATE_UNUSED *kVUID_Core_DrawState_InvalidRenderpass = "UNASSIGNED-CoreValidation-DrawState-InvalidRenderpass";
+static const char DECORATE_UNUSED *kVUID_Core_DrawState_InvalidSecondaryCommandBuffer = "UNASSIGNED-CoreValidation-DrawState-InvalidSecondaryCommandBuffer";
+static const char DECORATE_UNUSED *kVUID_Core_DrawState_InvalidSet = "UNASSIGNED-CoreValidation-DrawState-InvalidSet";
+static const char DECORATE_UNUSED *kVUID_Core_DrawState_MismatchedImageFormat = "UNASSIGNED-CoreValidation-DrawState-MismatchedImageFormat";
+static const char DECORATE_UNUSED *kVUID_Core_DrawState_MismatchedImageType = "UNASSIGNED-CoreValidation-DrawState-MismatchedImageType";
+static const char DECORATE_UNUSED *kVUID_Core_DrawState_MissingAttachmentReference = "UNASSIGNED-CoreValidation-DrawState-MissingAttachmentReference";
+static const char DECORATE_UNUSED *kVUID_Core_DrawState_NoActiveRenderpass = "UNASSIGNED-CoreValidation-DrawState-NoActiveRenderpass";
+static const char DECORATE_UNUSED *kVUID_Core_DrawState_NoEndCommandBuffer = "UNASSIGNED-CoreValidation-DrawState-NoEndCommandBuffer";
+static const char DECORATE_UNUSED *kVUID_Core_DrawState_NumSamplesMismatch = "UNASSIGNED-CoreValidation-DrawState-NumSamplesMismatch";
+static const char DECORATE_UNUSED *kVUID_Core_DrawState_OutOfMemory = "UNASSIGNED-CoreValidation-DrawState-OutOfMemory";
+static const char DECORATE_UNUSED *kVUID_Core_DrawState_PipelineLayoutsIncompatible = "UNASSIGNED-CoreValidation-DrawState-PipelineLayoutsIncompatible";
+static const char DECORATE_UNUSED *kVUID_Core_DrawState_QueueForwardProgress = "UNASSIGNED-CoreValidation-DrawState-QueueForwardProgress";
+static const char DECORATE_UNUSED *kVUID_Core_DrawState_SwapchainCreateBeforeQuery = "UNASSIGNED-CoreValidation-DrawState-SwapchainCreateBeforeQuery";
+static const char DECORATE_UNUSED *kVUID_Core_DrawState_SwapchainImageNotAcquired = "UNASSIGNED-CoreValidation-DrawState-SwapchainImageNotAcquired";
+static const char DECORATE_UNUSED *kVUID_Core_DrawState_SwapchainImagesNotFound = "UNASSIGNED-CoreValidation-DrawState-SwapchainImagesNotFound";
+static const char DECORATE_UNUSED *kVUID_Core_DrawState_SwapchainInvalidImage = "UNASSIGNED-CoreValidation-DrawState-SwapchainInvalidImage";
+static const char DECORATE_UNUSED *kVUID_Core_DrawState_SwapchainNoSyncForAcquire = "UNASSIGNED-CoreValidation-DrawState-SwapchainNoSyncForAcquire";
+static const char DECORATE_UNUSED *kVUID_Core_DrawState_SwapchainReplaced = "UNASSIGNED-CoreValidation-DrawState-SwapchainReplaced";
+//static const char DECORATE_UNUSED *kVUID_Core_DrawState_SwapchainTooManyImages = "UNASSIGNED-CoreValidation-DrawState-SwapchainTooManyImages";
+static const char DECORATE_UNUSED *kVUID_Core_DrawState_SwapchainUnsupportedQueue = "UNASSIGNED-CoreValidation-DrawState-SwapchainUnsupportedQueue";
+static const char DECORATE_UNUSED *kVUID_Core_DrawState_ViewportScissorMismatch = "UNASSIGNED-CoreValidation-DrawState-ViewportScissorMismatch";
+static const char DECORATE_UNUSED *kVUID_Core_DrawState_VtxIndexOutOfBounds = "UNASSIGNED-CoreValidation-DrawState-VtxIndexOutOfBounds";
+static const char DECORATE_UNUSED *kVUID_Core_DrawState_InvalidVtxAttributeAlignment = "UNASSIGNED-CoreValidation-DrawState-InvalidVtxAttributeAlignment";
+static const char DECORATE_UNUSED *kVUID_Core_DrawState_InvalidImageView = "UNASSIGNED-CoreValidation-DrawState-InvalidImageView";
+// Previously defined but unused - uncomment as needed
+//static const char DECORATE_UNUSED *kVUID_Core_DrawState_BeginCommandBufferInvalidState = "UNASSIGNED-CoreValidation-DrawState-BeginCommandBufferInvalidState";
+//static const char DECORATE_UNUSED *kVUID_Core_DrawState_BlendNotBound = "UNASSIGNED-CoreValidation-DrawState-BlendNotBound";
+//static const char DECORATE_UNUSED *kVUID_Core_DrawState_BufferinfoDescriptorError = "UNASSIGNED-CoreValidation-DrawState-BufferinfoDescriptorError";
+//static const char DECORATE_UNUSED *kVUID_Core_DrawState_BufferviewDescriptorError = "UNASSIGNED-CoreValidation-DrawState-BufferviewDescriptorError";
+//static const char DECORATE_UNUSED *kVUID_Core_DrawState_CantFreeFromNonFreePool = "UNASSIGNED-CoreValidation-DrawState-CantFreeFromNonFreePool";
+//static const char DECORATE_UNUSED *kVUID_Core_DrawState_DepthBiasNotBound = "UNASSIGNED-CoreValidation-DrawState-DepthBiasNotBound";
+//static const char DECORATE_UNUSED *kVUID_Core_DrawState_DepthBoundsNotBound = "UNASSIGNED-CoreValidation-DrawState-DepthBoundsNotBound";
+//static const char DECORATE_UNUSED *kVUID_Core_DrawState_DescriptorPoolEmpty = "UNASSIGNED-CoreValidation-DrawState-DescriptorPoolEmpty";
+//static const char DECORATE_UNUSED *kVUID_Core_DrawState_DescriptorStageflagsMismatch = "UNASSIGNED-CoreValidation-DrawState-DescriptorStageflagsMismatch";
+//static const char DECORATE_UNUSED *kVUID_Core_DrawState_DescriptorTypeMismatch = "UNASSIGNED-CoreValidation-DrawState-DescriptorTypeMismatch";
+//static const char DECORATE_UNUSED *kVUID_Core_DrawState_DescriptorUpdateOutOfBounds = "UNASSIGNED-CoreValidation-DrawState-DescriptorUpdateOutOfBounds";
+//static const char DECORATE_UNUSED *kVUID_Core_DrawState_DisabledLogicOp = "UNASSIGNED-CoreValidation-DrawState-DisabledLogicOp";
+//static const char DECORATE_UNUSED *kVUID_Core_DrawState_DynamicOffsetOverflow = "UNASSIGNED-CoreValidation-DrawState-DynamicOffsetOverflow";
+//static const char DECORATE_UNUSED *kVUID_Core_DrawState_FramebufferIncompatible = "UNASSIGNED-CoreValidation-DrawState-FramebufferIncompatible";
+//static const char DECORATE_UNUSED *kVUID_Core_DrawState_ImageTransferGranularity = "UNASSIGNED-CoreValidation-DrawState-ImageTransferGranularity";
+//static const char DECORATE_UNUSED *kVUID_Core_DrawState_ImageviewDescriptorError = "UNASSIGNED-CoreValidation-DrawState-ImageviewDescriptorError";
+//static const char DECORATE_UNUSED *kVUID_Core_DrawState_InconsistentImmutableSamplerUpdate = "UNASSIGNED-CoreValidation-DrawState-InconsistentImmutableSamplerUpdate";
+//static const char DECORATE_UNUSED *kVUID_Core_DrawState_IndependentBlend = "UNASSIGNED-CoreValidation-DrawState-IndependentBlend";
+//static const char DECORATE_UNUSED *kVUID_Core_DrawState_IndexBufferNotBound = "UNASSIGNED-CoreValidation-DrawState-IndexBufferNotBound";
+//static const char DECORATE_UNUSED *kVUID_Core_DrawState_InvalidAttachmentIndex = "UNASSIGNED-CoreValidation-DrawState-InvalidAttachmentIndex";
+//static const char DECORATE_UNUSED *kVUID_Core_DrawState_InvalidBufferMemoryOffset = "UNASSIGNED-CoreValidation-DrawState-InvalidBufferMemoryOffset";
+//static const char DECORATE_UNUSED *kVUID_Core_DrawState_InvalidBufferView = "UNASSIGNED-CoreValidation-DrawState-InvalidBufferView";
+//static const char DECORATE_UNUSED *kVUID_Core_DrawState_InvalidCommandBufferReset = "UNASSIGNED-CoreValidation-DrawState-InvalidCommandBufferReset";
+//static const char DECORATE_UNUSED *kVUID_Core_DrawState_InvalidCommandPool = "UNASSIGNED-CoreValidation-DrawState-InvalidCommandPool";
+//static const char DECORATE_UNUSED *kVUID_Core_DrawState_InvalidCopyUpdate = "UNASSIGNED-CoreValidation-DrawState-InvalidCopyUpdate";
+//static const char DECORATE_UNUSED *kVUID_Core_DrawState_InvalidDescriptorPool = "UNASSIGNED-CoreValidation-DrawState-InvalidDescriptorPool";
+//static const char DECORATE_UNUSED *kVUID_Core_DrawState_InvalidDeviceMemory = "UNASSIGNED-CoreValidation-DrawState-InvalidDeviceMemory";
+//static const char DECORATE_UNUSED *kVUID_Core_DrawState_InvalidFramebuffer = "UNASSIGNED-CoreValidation-DrawState-InvalidFramebuffer";
+//static const char DECORATE_UNUSED *kVUID_Core_DrawState_InvalidFramebufferCreateInfo = "UNASSIGNED-CoreValidation-DrawState-InvalidFramebufferCreateInfo";
+//static const char DECORATE_UNUSED *kVUID_Core_DrawState_InvalidImageFilter = "UNASSIGNED-CoreValidation-DrawState-InvalidImageFilter";
+//static const char DECORATE_UNUSED *kVUID_Core_DrawState_InvalidImageSubrange = "UNASSIGNED-CoreValidation-DrawState-InvalidImageSubrange";
+//static const char DECORATE_UNUSED *kVUID_Core_DrawState_InvalidImageUsage = "UNASSIGNED-CoreValidation-DrawState-InvalidImageUsage";
+//static const char DECORATE_UNUSED *kVUID_Core_DrawState_InvalidImageView = "UNASSIGNED-CoreValidation-DrawState-InvalidImageView";
+//static const char DECORATE_UNUSED *kVUID_Core_DrawState_InvalidQueryPool = "UNASSIGNED-CoreValidation-DrawState-InvalidQueryPool";
+//static const char DECORATE_UNUSED *kVUID_Core_DrawState_InvalidQueueIndex = "UNASSIGNED-CoreValidation-DrawState-InvalidQueueIndex";
+//static const char DECORATE_UNUSED *kVUID_Core_DrawState_InvalidRenderpassCmd = "UNASSIGNED-CoreValidation-DrawState-InvalidRenderpassCmd";
+//static const char DECORATE_UNUSED *kVUID_Core_DrawState_InvalidSampler = "UNASSIGNED-CoreValidation-DrawState-InvalidSampler";
+//static const char DECORATE_UNUSED *kVUID_Core_DrawState_InvalidStorageBufferOffset = "UNASSIGNED-CoreValidation-DrawState-InvalidStorageBufferOffset";
+//static const char DECORATE_UNUSED *kVUID_Core_DrawState_InvalidSubpassIndex = "UNASSIGNED-CoreValidation-DrawState-InvalidSubpassIndex";
+//static const char DECORATE_UNUSED *kVUID_Core_DrawState_InvalidTexelBufferOffset = "UNASSIGNED-CoreValidation-DrawState-InvalidTexelBufferOffset";
+//static const char DECORATE_UNUSED *kVUID_Core_DrawState_InvalidUniformBufferOffset = "UNASSIGNED-CoreValidation-DrawState-InvalidUniformBufferOffset";
+//static const char DECORATE_UNUSED *kVUID_Core_DrawState_InvalidUpdateStruct = "UNASSIGNED-CoreValidation-DrawState-InvalidUpdateStruct";
+//static const char DECORATE_UNUSED *kVUID_Core_DrawState_InvalidWriteUpdate = "UNASSIGNED-CoreValidation-DrawState-InvalidWriteUpdate";
+//static const char DECORATE_UNUSED *kVUID_Core_DrawState_LineWidthNotBound = "UNASSIGNED-CoreValidation-DrawState-LineWidthNotBound";
+//static const char DECORATE_UNUSED *kVUID_Core_DrawState_MismatchedImageAspect = "UNASSIGNED-CoreValidation-DrawState-MismatchedImageAspect";
+//static const char DECORATE_UNUSED *kVUID_Core_DrawState_NoBeginCommandBuffer = "UNASSIGNED-CoreValidation-DrawState-NoBeginCommandBuffer";
+//static const char DECORATE_UNUSED *kVUID_Core_DrawState_ObjectInUse = "UNASSIGNED-CoreValidation-DrawState-ObjectInUse";
+//static const char DECORATE_UNUSED *kVUID_Core_DrawState_PushConstantsError = "UNASSIGNED-CoreValidation-DrawState-PushConstantsError";
+//static const char DECORATE_UNUSED *kVUID_Core_DrawState_RenderpassIncompatible = "UNASSIGNED-CoreValidation-DrawState-RenderpassIncompatible";
+//static const char DECORATE_UNUSED *kVUID_Core_DrawState_RenderpassTooManyClearValues = "UNASSIGNED-CoreValidation-DrawState-RenderpassTooManyClearValues";
+//static const char DECORATE_UNUSED *kVUID_Core_DrawState_SamplerDescriptorError = "UNASSIGNED-CoreValidation-DrawState-SamplerDescriptorError";
+//static const char DECORATE_UNUSED *kVUID_Core_DrawState_ScissorNotBound = "UNASSIGNED-CoreValidation-DrawState-ScissorNotBound";
+//static const char DECORATE_UNUSED *kVUID_Core_DrawState_StencilNotBound = "UNASSIGNED-CoreValidation-DrawState-StencilNotBound";
+//static const char DECORATE_UNUSED *kVUID_Core_DrawState_SwapchainBadCompositeAlpha = "UNASSIGNED-CoreValidation-DrawState-SwapchainBadCompositeAlpha";
+//static const char DECORATE_UNUSED *kVUID_Core_DrawState_SwapchainBadExtents = "UNASSIGNED-CoreValidation-DrawState-SwapchainBadExtents";
+//static const char DECORATE_UNUSED *kVUID_Core_DrawState_SwapchainBadFormat = "UNASSIGNED-CoreValidation-DrawState-SwapchainBadFormat";
+//static const char DECORATE_UNUSED *kVUID_Core_DrawState_SwapchainBadImageCount = "UNASSIGNED-CoreValidation-DrawState-SwapchainBadImageCount";
+//static const char DECORATE_UNUSED *kVUID_Core_DrawState_SwapchainBadLayerCount = "UNASSIGNED-CoreValidation-DrawState-SwapchainBadLayerCount";
+//static const char DECORATE_UNUSED *kVUID_Core_DrawState_SwapchainBadPreTransform = "UNASSIGNED-CoreValidation-DrawState-SwapchainBadPreTransform";
+//static const char DECORATE_UNUSED *kVUID_Core_DrawState_SwapchainBadPresentMode = "UNASSIGNED-CoreValidation-DrawState-SwapchainBadPresentMode";
+//static const char DECORATE_UNUSED *kVUID_Core_DrawState_SwapchainBadUsageFlags = "UNASSIGNED-CoreValidation-DrawState-SwapchainBadUsageFlags";
+//static const char DECORATE_UNUSED *kVUID_Core_DrawState_ViewportNotBound = "UNASSIGNED-CoreValidation-DrawState-ViewportNotBound";
+//static const char DECORATE_UNUSED *kVUID_Core_DrawState_VtxIndexAlignmentError = "UNASSIGNED-CoreValidation-DrawState-VtxIndexAlignmentError";
+
+static const char DECORATE_UNUSED *kVUID_Core_Shader_DescriptorNotAccessibleFromStage = "UNASSIGNED-CoreValidation-Shader-DescriptorNotAccessibleFromStage";
+static const char DECORATE_UNUSED *kVUID_Core_Shader_DescriptorTypeMismatch = "UNASSIGNED-CoreValidation-Shader-DescriptorTypeMismatch";
+static const char DECORATE_UNUSED *kVUID_Core_Shader_ExceedDeviceLimit = "UNASSIGNED-CoreValidation-Shader-ExceedDeviceLimit";
+static const char DECORATE_UNUSED *kVUID_Core_Shader_FeatureNotEnabled = "UNASSIGNED-CoreValidation-Shader-FeatureNotEnabled";
+static const char DECORATE_UNUSED *kVUID_Core_Shader_InconsistentSpirv = "UNASSIGNED-CoreValidation-Shader-InconsistentSpirv";
+static const char DECORATE_UNUSED *kVUID_Core_Shader_InconsistentVi = "UNASSIGNED-CoreValidation-Shader-InconsistentVi";
+static const char DECORATE_UNUSED *kVUID_Core_Shader_InputAttachmentTypeMismatch = "UNASSIGNED-CoreValidation-Shader-InputAttachmentTypeMismatch";
+static const char DECORATE_UNUSED *kVUID_Core_Shader_InputNotProduced = "UNASSIGNED-CoreValidation-Shader-InputNotProduced";
+static const char DECORATE_UNUSED *kVUID_Core_Shader_InterfaceTypeMismatch = "UNASSIGNED-CoreValidation-Shader-InterfaceTypeMismatch";
+static const char DECORATE_UNUSED *kVUID_Core_Shader_MissingDescriptor = "UNASSIGNED-CoreValidation-Shader-MissingDescriptor";
+static const char DECORATE_UNUSED *kVUID_Core_Shader_MissingInputAttachment = "UNASSIGNED-CoreValidation-Shader-MissingInputAttachment";
+static const char DECORATE_UNUSED *kVUID_Core_Shader_OutputNotConsumed = "UNASSIGNED-CoreValidation-Shader-OutputNotConsumed";
+static const char DECORATE_UNUSED *kVUID_Core_Shader_PushConstantNotAccessibleFromStage = "UNASSIGNED-CoreValidation-Shader-PushConstantNotAccessibleFromStage";
+static const char DECORATE_UNUSED *kVUID_Core_Shader_PushConstantOutOfRange = "UNASSIGNED-CoreValidation-Shader-PushConstantOutOfRange";
+static const char DECORATE_UNUSED *kVUID_Core_Shader_MissingPointSizeBuiltIn = "UNASSIGNED-CoreValidation-Shader-PointSizeMissing";
+static const char DECORATE_UNUSED *kVUID_Core_Shader_PointSizeBuiltInOverSpecified = "UNASSIGNED-CoreValidation-Shader-PointSizeOverSpecified";
+static const char DECORATE_UNUSED *kVUID_Core_Shader_NoAlphaAtLocation0WithAlphaToCoverage = "UNASSIGNED-CoreValidation-Shader-NoAlphaAtLocation0WithAlphaToCoverage";
+static const char DECORATE_UNUSED *kVUID_Core_Shader_CooperativeMatrixSupportedStages = "UNASSIGNED-CoreValidation-Shader-CooperativeMatrixSupportedStages";
+static const char DECORATE_UNUSED *kVUID_Core_Shader_CooperativeMatrixType = "UNASSIGNED-CoreValidation-Shader-CooperativeMatrixType";
+static const char DECORATE_UNUSED *kVUID_Core_Shader_CooperativeMatrixMulAdd = "UNASSIGNED-CoreValidation-Shader-CooperativeMatrixMulAdd";
+// Previously defined but unused - uncomment as needed
+//static const char DECORATE_UNUSED *kVUID_Core_Shader_BadCapability = "UNASSIGNED-CoreValidation-Shader-BadCapability";
+//static const char DECORATE_UNUSED *kVUID_Core_Shader_BadSpecialization = "UNASSIGNED-CoreValidation-Shader-BadSpecialization";
+//static const char DECORATE_UNUSED *kVUID_Core_Shader_MissingEntrypoint = "UNASSIGNED-CoreValidation-Shader-MissingEntrypoint";
+//static const char DECORATE_UNUSED *kVUID_Core_Shader_NonSpirvShader = "UNASSIGNED-CoreValidation-Shader-NonSpirvShader";
+//static const char DECORATE_UNUSED *kVUID_Core_Shader_UnknownStage = "UNASSIGNED-CoreValidation-Shader-UnknownStage";
+
+static const char DECORATE_UNUSED *kVUID_Core_DevLimit_CountMismatch = "UNASSIGNED-CoreValidation-DevLimitCountMismatch";
+static const char DECORATE_UNUSED *kVUID_Core_DevLimit_InvalidFeatureRequested = "UNASSIGNED-CoreValidation-DevLimit-InvalidFeatureRequested";
+static const char DECORATE_UNUSED *kVUID_Core_DevLimit_InvalidInstance = "UNASSIGNED-CoreValidation-DevLimit-InvalidInstance";
+static const char DECORATE_UNUSED *kVUID_Core_DevLimit_MissingQueryCount = "UNASSIGNED-CoreValidation-DevLimit-MissingQueryCount";
+static const char DECORATE_UNUSED *kVUID_Core_DevLimit_MustQueryCount = "UNASSIGNED-CoreValidation-DevLimit-MustQueryCount";
+// Previously defined but unused - uncomment as needed
+//static const char DECORATE_UNUSED *kVUID_Core_DevLimit_InvalidPhysicalDevice = "UNASSIGNED-CoreValidation-DevLimit-InvalidPhysicalDevice";
+
+static const char DECORATE_UNUSED *kVUID_Core_Swapchain_GetSupportedDisplaysWithoutQuery = "UNASSIGNED-CoreValidation-Swapchain-GetSupportedDisplaysWithoutQuery";
+static const char DECORATE_UNUSED *kVUID_Core_Swapchain_InvalidCount = "UNASSIGNED-CoreValidation-SwapchainInvalidCount";
+static const char DECORATE_UNUSED *kVUID_Core_Swapchain_PriorCount = "UNASSIGNED-CoreValidation-SwapchainPriorCount";
+static const char DECORATE_UNUSED *kVUID_Core_Swapchain_PreTransform = "UNASSIGNED-CoreValidation-SwapchainPreTransform";
+static const char DECORATE_UNUSED *kVUID_Core_BindImageMemory_Swapchain = "UNASSIGNED-CoreValidation-BindImageMemory-Swapchain";
+
+// Previously defined but unused - uncomment as needed
+//static const char DECORATE_UNUSED *kVUID_Core_Swapchain_BadBool = "UNASSIGNED-CoreValidation-SwapchainBadBool";
+//static const char DECORATE_UNUSED *kVUID_Core_Swapchain_CreateSwapBadCompositeAlpha = "UNASSIGNED-CoreValidation-Swapchain-CreateSwapBadCompositeAlpha";
+//static const char DECORATE_UNUSED *kVUID_Core_Swapchain_CreateSwapBadImgArrayLayers = "UNASSIGNED-CoreValidation-Swapchain-CreateSwapBadImgArrayLayers";
+//static const char DECORATE_UNUSED *kVUID_Core_Swapchain_CreateSwapBadImgColorSpace = "UNASSIGNED-CoreValidation-Swapchain-CreateSwapBadImgColorSpace";
+//static const char DECORATE_UNUSED *kVUID_Core_Swapchain_CreateSwapBadImgFmtClrSp = "UNASSIGNED-CoreValidation-Swapchain-CreateSwapBadImgFmtClrSp";
+//static const char DECORATE_UNUSED *kVUID_Core_Swapchain_CreateSwapBadImgFormat = "UNASSIGNED-CoreValidation-Swapchain-CreateSwapBadImgFormat";
+//static const char DECORATE_UNUSED *kVUID_Core_Swapchain_CreateSwapBadImgUsageFlags = "UNASSIGNED-CoreValidation-Swapchain-CreateSwapBadImgUsageFlags";
+//static const char DECORATE_UNUSED *kVUID_Core_Swapchain_CreateSwapBadPreTransform = "UNASSIGNED-CoreValidation-Swapchain-CreateSwapBadPreTransform";
+//static const char DECORATE_UNUSED *kVUID_Core_Swapchain_CreateSwapBadPresentMode = "UNASSIGNED-CoreValidation-Swapchain-CreateSwapBadPresentMode";
+//static const char DECORATE_UNUSED *kVUID_Core_Swapchain_CreateSwapBadSharingMode = "UNASSIGNED-CoreValidation-Swapchain-CreateSwapBadSharingMode";
+//static const char DECORATE_UNUSED *kVUID_Core_Swapchain_CreateSwapBadSharingValues = "UNASSIGNED-CoreValidation-Swapchain-CreateSwapBadSharingValues";
+//static const char DECORATE_UNUSED *kVUID_Core_Swapchain_CreateSwapExtentsNoMatchWin = "UNASSIGNED-CoreValidation-Swapchain-CreateSwapExtentsNoMatchWin";
+//static const char DECORATE_UNUSED *kVUID_Core_Swapchain_CreateSwapOutOfBoundsExtents = "UNASSIGNED-CoreValidation-Swapchain-CreateSwapOutOfBoundsExtents";
+//static const char DECORATE_UNUSED *kVUID_Core_Swapchain_CreateSwapWithoutQuery = "UNASSIGNED-CoreValidation-Swapchain-CreateSwapWithoutQuery";
+//static const char DECORATE_UNUSED *kVUID_Core_Swapchain_CreateUnsupportedSurface = "UNASSIGNED-CoreValidation-SwapchainCreateUnsupportedSurface";
+//static const char DECORATE_UNUSED *kVUID_Core_Swapchain_DelObjectBeforeChildren = "UNASSIGNED-CoreValidation-Swapchain-DelObjectBeforeChildren";
+//static const char DECORATE_UNUSED *kVUID_Core_Swapchain_ExtNotEnabledButUsed = "UNASSIGNED-CoreValidation-Swapchain-ExtNotEnabledButUsed";
+//static const char DECORATE_UNUSED *kVUID_Core_Swapchain_InvalidHandle = "UNASSIGNED-CoreValidation-SwapchainInvalidHandle";
+//static const char DECORATE_UNUSED *kVUID_Core_Swapchain_NullPointer = "UNASSIGNED-CoreValidation-SwapchainNullPointer";
+//static const char DECORATE_UNUSED *kVUID_Core_Swapchain_PlaneIndexTooLarge = "UNASSIGNED-CoreValidation-Swapchain-PlaneIndexTooLarge";
+//static const char DECORATE_UNUSED *kVUID_Core_Swapchain_WrongNext = "UNASSIGNED-CoreValidation-SwapchainWrongNext";
+//static const char DECORATE_UNUSED *kVUID_Core_Swapchain_WrongStype = "UNASSIGNED-CoreValidation-SwapchainWrongStype";
+//static const char DECORATE_UNUSED *kVUID_Core_Swapchain_ZeroValue = "UNASSIGNED-CoreValidation-SwapchainZeroValue";
+
+static const char DECORATE_UNUSED *kVUID_Core_Image_InvalidFormatLimitsViolation = "UNASSIGNED-CoreValidation-Image-InvalidFormatLimitsViolation";
+static const char DECORATE_UNUSED *kVUID_Core_Image_ZeroAreaSubregion = "UNASSIGNED-CoreValidation-Image-ZeroAreaSubregion";
+
+static const char DECORATE_UNUSED *kVUID_Core_PushDescriptorUpdate_TemplateType = "UNASSIGNED-CoreValidation-vkCmdPushDescriptorSetWithTemplateKHR-descriptorUpdateTemplate-templateType";
+static const char DECORATE_UNUSED *kVUID_Core_PushDescriptorUpdate_Template_SetMismatched = "UNASSIGNED-CoreValidation-vkCmdPushDescriptorSetWithTemplateKHR-set";
+static const char DECORATE_UNUSED *kVUID_Core_PushDescriptorUpdate_Template_LayoutMismatched = "UNASSIGNED-CoreValidation-vkCmdPushDescriptorSetWithTemplateKHR-layout";
+
+static const char DECORATE_UNUSED *kVUID_Core_BindImage_InvalidMemReqQuery = "UNASSIGNED-CoreValidation-vkBindImageMemory-invalid-requirements";
+static const char DECORATE_UNUSED *kVUID_Core_BindImage_NoMemReqQuery = "UNASSIGNED-CoreValidation-vkBindImageMemory-memory-requirements";
+static const char DECORATE_UNUSED *kVUID_Core_BindBuffer_NoMemReqQuery = "UNASSIGNED-CoreValidation-vkBindBufferMemory-memory-requirements";
+
+static const char DECORATE_UNUSED *kVUID_Core_BindAccelNV_NoMemReqQuery = "UNASSIGNED-CoreValidation-vkBindAccelerationStructureMemoryNV-object-requirements";
+static const char DECORATE_UNUSED *kVUID_Core_CmdBuildAccelNV_NoScratchMemReqQuery = "UNASSIGNED-CoreValidation-vkCmdBuildAccelerationStructureNV-scratch-requirements";
+static const char DECORATE_UNUSED *kVUID_Core_CmdBuildAccelNV_NoUpdateMemReqQuery = "UNASSIGNED-CoreValidation-vkCmdBuildAccelerationStructureNV-update-requirements";
+
+// clang-format on
+
+#undef DECORATE_UNUSED
+
+#if 0  // Preserve these comments for possible inclusion in the spec reference string database
+enum SWAPCHAIN_ERROR {
+    SWAPCHAIN_INVALID_HANDLE,                     // Handle used that isn't currently valid
+    SWAPCHAIN_NULL_POINTER,                       // Pointer set to NULL, instead of being a valid pointer
+    SWAPCHAIN_EXT_NOT_ENABLED_BUT_USED,           // Did not enable WSI extension, but called WSI function
+    SWAPCHAIN_DEL_OBJECT_BEFORE_CHILDREN,         // Called vkDestroyDevice() before vkDestroySwapchainKHR()
+    SWAPCHAIN_CREATE_UNSUPPORTED_SURFACE,         // Called vkCreateSwapchainKHR() with a pCreateInfo->surface that wasn't supported
+    SWAPCHAIN_CREATE_SWAP_WITHOUT_QUERY,          // Called vkCreateSwapchainKHR() without calling a query
+    SWAPCHAIN_CREATE_SWAP_OUT_OF_BOUNDS_EXTENTS,  // Called vkCreateSwapchainKHR() with out-of-bounds imageExtent
+    SWAPCHAIN_CREATE_SWAP_EXTENTS_NO_MATCH_WIN,   // Called vkCreateSwapchainKHR w/imageExtent that doesn't match window's extent
+    SWAPCHAIN_CREATE_SWAP_BAD_PRE_TRANSFORM,      // Called vkCreateSwapchainKHR() with a non-supported preTransform
+    SWAPCHAIN_CREATE_SWAP_BAD_COMPOSITE_ALPHA,    // Called vkCreateSwapchainKHR() with a non-supported compositeAlpha
+    SWAPCHAIN_CREATE_SWAP_BAD_IMG_ARRAY_LAYERS,   // Called vkCreateSwapchainKHR() with a non-supported imageArrayLayers
+    SWAPCHAIN_CREATE_SWAP_BAD_IMG_USAGE_FLAGS,    // Called vkCreateSwapchainKHR() with a non-supported imageUsageFlags
+    SWAPCHAIN_CREATE_SWAP_BAD_IMG_COLOR_SPACE,    // Called vkCreateSwapchainKHR() with a non-supported imageColorSpace
+    SWAPCHAIN_CREATE_SWAP_BAD_IMG_FORMAT,         // Called vkCreateSwapchainKHR() with a non-supported imageFormat
+    SWAPCHAIN_CREATE_SWAP_BAD_IMG_FMT_CLR_SP,     // Called vkCreateSwapchainKHR() with a non-supported imageColorSpace
+    SWAPCHAIN_CREATE_SWAP_BAD_PRESENT_MODE,       // Called vkCreateSwapchainKHR() with a non-supported presentMode
+    SWAPCHAIN_CREATE_SWAP_BAD_SHARING_MODE,       // Called vkCreateSwapchainKHR() with a non-supported imageSharingMode
+    SWAPCHAIN_CREATE_SWAP_BAD_SHARING_VALUES,     // Called vkCreateSwapchainKHR() with bad values when imageSharingMode is
+                                                  // VK_SHARING_MODE_CONCURRENT
+    SWAPCHAIN_BAD_BOOL,       // VkBool32 that doesn't have value of VK_TRUE or VK_FALSE (e.g. is a non-zero form of true)
+    SWAPCHAIN_PRIOR_COUNT,    // Query must be called first to get value of pCount, then called second time
+    SWAPCHAIN_INVALID_COUNT,  // Second time a query called, the pCount value didn't match first time
+    SWAPCHAIN_WRONG_STYPE,    // The sType for a struct has the wrong value
+    SWAPCHAIN_WRONG_NEXT,     // The pNext for a struct is not NULL
+    SWAPCHAIN_ZERO_VALUE,     // A value should be non-zero
+    SWAPCHAIN_GET_SUPPORTED_DISPLAYS_WITHOUT_QUERY,  // vkGetDisplayPlaneSupportedDisplaysKHR should be called after querying
+                                                     // device display plane properties
+    SWAPCHAIN_PLANE_INDEX_TOO_LARGE,  // a planeIndex value is larger than what vkGetDisplayPlaneSupportedDisplaysKHR returns
+};
+#endif
+
+#endif  // CORE_VALIDATION_ERROR_ENUMS_H_
diff --git a/src/third_party/vulkan-validation-layers/src/layers/core_validation_types.h b/src/third_party/vulkan-validation-layers/src/layers/core_validation_types.h
new file mode 100644
index 0000000..809d94c
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/layers/core_validation_types.h
@@ -0,0 +1,1716 @@
+/* Copyright (c) 2015-2019 The Khronos Group Inc.
+ * Copyright (c) 2015-2019 Valve Corporation
+ * Copyright (c) 2015-2019 LunarG, Inc.
+ * Copyright (C) 2015-2019 Google Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Courtney Goeltzenleuchter <courtneygo@google.com>
+ * Author: Tobin Ehlis <tobine@google.com>
+ * Author: Chris Forbes <chrisf@ijw.co.nz>
+ * Author: Mark Lobodzinski <mark@lunarg.com>
+ * Author: Dave Houlton <daveh@lunarg.com>
+ * Author: John Zulauf <jzulauf@lunarg.com>
+ */
+#ifndef CORE_VALIDATION_TYPES_H_
+#define CORE_VALIDATION_TYPES_H_
+
+#include "cast_utils.h"
+#include "hash_vk_types.h"
+#include "sparse_containers.h"
+#include "vk_safe_struct.h"
+#include "vulkan/vulkan.h"
+#include "vk_layer_logging.h"
+#include "vk_object_types.h"
+#include "vk_extension_helper.h"
+#include "vk_typemap_helper.h"
+#include "convert_to_renderpass2.h"
+#include "layer_chassis_dispatch.h"
+
+#include <array>
+#include <atomic>
+#include <functional>
+#include <list>
+#include <map>
+#include <memory>
+#include <set>
+#include <string.h>
+#include <unordered_map>
+#include <unordered_set>
+#include <vector>
+#include <memory>
+#include <list>
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+#include "android_ndk_types.h"
+#endif  // VK_USE_PLATFORM_ANDROID_KHR
+
+// Fwd declarations -- including descriptor_set.h creates an ugly include loop
+namespace cvdescriptorset {
+class DescriptorSetLayoutDef;
+class DescriptorSetLayout;
+class DescriptorSet;
+}  // namespace cvdescriptorset
+
+struct CMD_BUFFER_STATE;
+class CoreChecks;
+class ValidationStateTracker;
+
+enum CALL_STATE {
+    UNCALLED,       // Function has not been called
+    QUERY_COUNT,    // Function called once to query a count
+    QUERY_DETAILS,  // Function called w/ a count to query details
+};
+
+class BASE_NODE {
+  public:
+    // Track when object is being used by an in-flight command buffer
+    std::atomic_int in_use;
+    // Track command buffers that this object is bound to
+    //  binding initialized when cmd referencing object is bound to command buffer
+    //  binding removed when command buffer is reset or destroyed
+    // When an object is destroyed, any bound cbs are set to INVALID.
+    // "int" value is an index into object_bindings where the corresponding
+    // backpointer to this node is stored.
+    small_unordered_map<CMD_BUFFER_STATE *, int, 8> cb_bindings;
+    // Set to true when the API-level object is destroyed, but this object may
+    // hang around until its shared_ptr refcount goes to zero.
+    bool destroyed;
+
+    BASE_NODE() {
+        in_use.store(0);
+        destroyed = false;
+    };
+};
+
+// Track command pools and their command buffers
+struct COMMAND_POOL_STATE : public BASE_NODE {
+    VkCommandPoolCreateFlags createFlags;
+    uint32_t queueFamilyIndex;
+    // Cmd buffers allocated from this pool
+    std::unordered_set<VkCommandBuffer> commandBuffers;
+};
+
+// Utilities for barriers and the commmand pool
+template <typename Barrier>
+static bool IsTransferOp(const Barrier *barrier) {
+    return barrier->srcQueueFamilyIndex != barrier->dstQueueFamilyIndex;
+}
+
+template <typename Barrier, bool assume_transfer = false>
+static bool TempIsReleaseOp(const COMMAND_POOL_STATE *pool, const Barrier *barrier) {
+    return (assume_transfer || IsTransferOp(barrier)) && (pool->queueFamilyIndex == barrier->srcQueueFamilyIndex);
+}
+
+template <typename Barrier, bool assume_transfer = false>
+static bool IsAcquireOp(const COMMAND_POOL_STATE *pool, const Barrier *barrier) {
+    return (assume_transfer || IsTransferOp(barrier)) && (pool->queueFamilyIndex == barrier->dstQueueFamilyIndex);
+}
+
+static inline bool QueueFamilyIsSpecial(const uint32_t queue_family_index) {
+    return (queue_family_index == VK_QUEUE_FAMILY_EXTERNAL_KHR) || (queue_family_index == VK_QUEUE_FAMILY_FOREIGN_EXT);
+}
+
+static inline bool QueueFamilyIsIgnored(uint32_t queue_family_index) { return queue_family_index == VK_QUEUE_FAMILY_IGNORED; }
+
+// Intentionally ignore VulkanTypedHandle::node, it is optional
+inline bool operator==(const VulkanTypedHandle &a, const VulkanTypedHandle &b) NOEXCEPT {
+    return a.handle == b.handle && a.type == b.type;
+}
+namespace std {
+template <>
+struct hash<VulkanTypedHandle> {
+    size_t operator()(VulkanTypedHandle obj) const NOEXCEPT { return hash<uint64_t>()(obj.handle) ^ hash<uint32_t>()(obj.type); }
+};
+}  // namespace std
+
+// Flags describing requirements imposed by the pipeline on a descriptor. These
+// can't be checked at pipeline creation time as they depend on the Image or
+// ImageView bound.
+enum descriptor_req {
+    DESCRIPTOR_REQ_VIEW_TYPE_1D = 1 << VK_IMAGE_VIEW_TYPE_1D,
+    DESCRIPTOR_REQ_VIEW_TYPE_1D_ARRAY = 1 << VK_IMAGE_VIEW_TYPE_1D_ARRAY,
+    DESCRIPTOR_REQ_VIEW_TYPE_2D = 1 << VK_IMAGE_VIEW_TYPE_2D,
+    DESCRIPTOR_REQ_VIEW_TYPE_2D_ARRAY = 1 << VK_IMAGE_VIEW_TYPE_2D_ARRAY,
+    DESCRIPTOR_REQ_VIEW_TYPE_3D = 1 << VK_IMAGE_VIEW_TYPE_3D,
+    DESCRIPTOR_REQ_VIEW_TYPE_CUBE = 1 << VK_IMAGE_VIEW_TYPE_CUBE,
+    DESCRIPTOR_REQ_VIEW_TYPE_CUBE_ARRAY = 1 << VK_IMAGE_VIEW_TYPE_CUBE_ARRAY,
+
+    DESCRIPTOR_REQ_ALL_VIEW_TYPE_BITS = (1 << (VK_IMAGE_VIEW_TYPE_END_RANGE + 1)) - 1,
+
+    DESCRIPTOR_REQ_SINGLE_SAMPLE = 2 << VK_IMAGE_VIEW_TYPE_END_RANGE,
+    DESCRIPTOR_REQ_MULTI_SAMPLE = DESCRIPTOR_REQ_SINGLE_SAMPLE << 1,
+
+    DESCRIPTOR_REQ_COMPONENT_TYPE_FLOAT = DESCRIPTOR_REQ_MULTI_SAMPLE << 1,
+    DESCRIPTOR_REQ_COMPONENT_TYPE_SINT = DESCRIPTOR_REQ_COMPONENT_TYPE_FLOAT << 1,
+    DESCRIPTOR_REQ_COMPONENT_TYPE_UINT = DESCRIPTOR_REQ_COMPONENT_TYPE_SINT << 1,
+};
+
+extern unsigned DescriptorRequirementsBitsFromFormat(VkFormat fmt);
+
+typedef std::map<uint32_t, descriptor_req> BindingReqMap;
+
+struct DESCRIPTOR_POOL_STATE : BASE_NODE {
+    VkDescriptorPool pool;
+    uint32_t maxSets;        // Max descriptor sets allowed in this pool
+    uint32_t availableSets;  // Available descriptor sets in this pool
+
+    safe_VkDescriptorPoolCreateInfo createInfo;
+    std::unordered_set<cvdescriptorset::DescriptorSet *> sets;  // Collection of all sets in this pool
+    std::map<uint32_t, uint32_t> maxDescriptorTypeCount;        // Max # of descriptors of each type in this pool
+    std::map<uint32_t, uint32_t> availableDescriptorTypeCount;  // Available # of descriptors of each type in this pool
+
+    DESCRIPTOR_POOL_STATE(const VkDescriptorPool pool, const VkDescriptorPoolCreateInfo *pCreateInfo)
+        : pool(pool),
+          maxSets(pCreateInfo->maxSets),
+          availableSets(pCreateInfo->maxSets),
+          createInfo(pCreateInfo),
+          maxDescriptorTypeCount(),
+          availableDescriptorTypeCount() {
+        // Collect maximums per descriptor type.
+        for (uint32_t i = 0; i < createInfo.poolSizeCount; ++i) {
+            uint32_t typeIndex = static_cast<uint32_t>(createInfo.pPoolSizes[i].type);
+            // Same descriptor types can appear several times
+            maxDescriptorTypeCount[typeIndex] += createInfo.pPoolSizes[i].descriptorCount;
+            availableDescriptorTypeCount[typeIndex] = maxDescriptorTypeCount[typeIndex];
+        }
+    }
+};
+
+// Generic memory binding struct to track objects bound to objects
+struct MEM_BINDING {
+    VkDeviceMemory mem;
+    VkDeviceSize offset;
+    VkDeviceSize size;
+};
+
+struct BufferBinding {
+    VkBuffer buffer;
+    VkDeviceSize size;
+    VkDeviceSize offset;
+};
+
+struct IndexBufferBinding : BufferBinding {
+    VkIndexType index_type;
+};
+
+inline bool operator==(MEM_BINDING a, MEM_BINDING b) NOEXCEPT { return a.mem == b.mem && a.offset == b.offset && a.size == b.size; }
+
+namespace std {
+template <>
+struct hash<MEM_BINDING> {
+    size_t operator()(MEM_BINDING mb) const NOEXCEPT {
+        auto intermediate = hash<uint64_t>()(reinterpret_cast<uint64_t &>(mb.mem)) ^ hash<uint64_t>()(mb.offset);
+        return intermediate ^ hash<uint64_t>()(mb.size);
+    }
+};
+}  // namespace std
+
+// Superclass for bindable object state (currently images and buffers)
+class BINDABLE : public BASE_NODE {
+  public:
+    bool sparse;  // Is this object being bound with sparse memory or not?
+    // Non-sparse binding data
+    MEM_BINDING binding;
+    // Memory requirements for this BINDABLE
+    VkMemoryRequirements requirements;
+    // bool to track if memory requirements were checked
+    bool memory_requirements_checked;
+    // Tracks external memory types creating resource
+    VkExternalMemoryHandleTypeFlags external_memory_handle;
+    // Sparse binding data, initially just tracking MEM_BINDING per mem object
+    //  There's more data for sparse bindings so need better long-term solution
+    // TODO : Need to update solution to track all sparse binding data
+    std::unordered_set<MEM_BINDING> sparse_bindings;
+
+    small_unordered_set<VkDeviceMemory, 1> bound_memory_set_;
+
+    BINDABLE()
+        : sparse(false),
+          binding{},
+          requirements{},
+          memory_requirements_checked(false),
+          external_memory_handle(0),
+          sparse_bindings{},
+          bound_memory_set_{} {};
+
+    // Update the cached set of memory bindings.
+    // Code that changes binding.mem or sparse_bindings must call UpdateBoundMemorySet()
+    void UpdateBoundMemorySet() {
+        bound_memory_set_.clear();
+        if (!sparse) {
+            bound_memory_set_.insert(binding.mem);
+        } else {
+            for (auto sb : sparse_bindings) {
+                bound_memory_set_.insert(sb.mem);
+            }
+        }
+    }
+
+    // Return unordered set of memory objects that are bound
+    // Instead of creating a set from scratch each query, return the cached one
+    const small_unordered_set<VkDeviceMemory, 1> &GetBoundMemory() const { return bound_memory_set_; }
+};
+
+class BUFFER_STATE : public BINDABLE {
+  public:
+    VkBuffer buffer;
+    VkBufferCreateInfo createInfo;
+    VkDeviceAddress deviceAddress;
+    BUFFER_STATE(VkBuffer buff, const VkBufferCreateInfo *pCreateInfo) : buffer(buff), createInfo(*pCreateInfo) {
+        if ((createInfo.sharingMode == VK_SHARING_MODE_CONCURRENT) && (createInfo.queueFamilyIndexCount > 0)) {
+            uint32_t *pQueueFamilyIndices = new uint32_t[createInfo.queueFamilyIndexCount];
+            for (uint32_t i = 0; i < createInfo.queueFamilyIndexCount; i++) {
+                pQueueFamilyIndices[i] = pCreateInfo->pQueueFamilyIndices[i];
+            }
+            createInfo.pQueueFamilyIndices = pQueueFamilyIndices;
+        }
+
+        if (createInfo.flags & VK_BUFFER_CREATE_SPARSE_BINDING_BIT) {
+            sparse = true;
+        }
+
+        auto *externalMemoryInfo = lvl_find_in_chain<VkExternalMemoryBufferCreateInfo>(pCreateInfo->pNext);
+        if (externalMemoryInfo) {
+            external_memory_handle = externalMemoryInfo->handleTypes;
+        }
+    };
+
+    BUFFER_STATE(BUFFER_STATE const &rh_obj) = delete;
+
+    ~BUFFER_STATE() {
+        if ((createInfo.sharingMode == VK_SHARING_MODE_CONCURRENT) && (createInfo.queueFamilyIndexCount > 0)) {
+            delete[] createInfo.pQueueFamilyIndices;
+            createInfo.pQueueFamilyIndices = nullptr;
+        }
+    };
+};
+
+class BUFFER_VIEW_STATE : public BASE_NODE {
+  public:
+    VkBufferView buffer_view;
+    VkBufferViewCreateInfo create_info;
+    std::shared_ptr<BUFFER_STATE> buffer_state;
+    BUFFER_VIEW_STATE(const std::shared_ptr<BUFFER_STATE> &bf, VkBufferView bv, const VkBufferViewCreateInfo *ci)
+        : buffer_view(bv), create_info(*ci), buffer_state(bf){};
+    BUFFER_VIEW_STATE(const BUFFER_VIEW_STATE &rh_obj) = delete;
+};
+
+struct SAMPLER_STATE : public BASE_NODE {
+    VkSampler sampler;
+    VkSamplerCreateInfo createInfo;
+    VkSamplerYcbcrConversion samplerConversion = VK_NULL_HANDLE;
+
+    SAMPLER_STATE(const VkSampler *ps, const VkSamplerCreateInfo *pci) : sampler(*ps), createInfo(*pci) {
+        auto *conversionInfo = lvl_find_in_chain<VkSamplerYcbcrConversionInfo>(pci->pNext);
+        if (conversionInfo) samplerConversion = conversionInfo->conversion;
+    }
+};
+
+class IMAGE_STATE : public BINDABLE {
+  public:
+    VkImage image;
+    VkImageCreateInfo createInfo;
+    bool valid;               // If this is a swapchain image backing memory track valid here as it doesn't have DEVICE_MEMORY_STATE
+    bool acquired;            // If this is a swapchain image, has it been acquired by the app.
+    bool shared_presentable;  // True for a front-buffered swapchain image
+    bool layout_locked;       // A front-buffered image that has been presented can never have layout transitioned
+    bool get_sparse_reqs_called;         // Track if GetImageSparseMemoryRequirements() has been called for this image
+    bool sparse_metadata_required;       // Track if sparse metadata aspect is required for this image
+    bool sparse_metadata_bound;          // Track if sparse metadata aspect is bound to this image
+    bool imported_ahb;                   // True if image was imported from an Android Hardware Buffer
+    bool has_ahb_format;                 // True if image was created with an external Android format
+    uint64_t ahb_format;                 // External Android format, if provided
+    VkImageSubresourceRange full_range;  // The normalized ISR for all levels, layers (slices), and aspects
+    VkSwapchainKHR create_from_swapchain;
+    VkSwapchainKHR bind_swapchain;
+    uint32_t bind_swapchain_imageIndex;
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+    uint64_t external_format_android;
+#endif  // VK_USE_PLATFORM_ANDROID_KHR
+
+    std::vector<VkSparseImageMemoryRequirements> sparse_requirements;
+    IMAGE_STATE(VkImage img, const VkImageCreateInfo *pCreateInfo);
+    IMAGE_STATE(IMAGE_STATE const &rh_obj) = delete;
+
+    std::unordered_set<VkImage> aliasing_images;
+    bool IsCompatibleAliasing(IMAGE_STATE *other_image_state);
+
+    bool IsCreateInfoEqual(const VkImageCreateInfo &other_createInfo) const;
+    bool IsCreateInfoDedicatedAllocationImageAliasingCompatible(const VkImageCreateInfo &other_createInfo) const;
+
+    inline bool IsImageTypeEqual(const VkImageCreateInfo &other_createInfo) const {
+        return createInfo.imageType == other_createInfo.imageType;
+    }
+    inline bool IsFormatEqual(const VkImageCreateInfo &other_createInfo) const {
+        return createInfo.format == other_createInfo.format;
+    }
+    inline bool IsMipLevelsEqual(const VkImageCreateInfo &other_createInfo) const {
+        return createInfo.mipLevels == other_createInfo.mipLevels;
+    }
+    inline bool IsUsageEqual(const VkImageCreateInfo &other_createInfo) const { return createInfo.usage == other_createInfo.usage; }
+    inline bool IsSamplesEqual(const VkImageCreateInfo &other_createInfo) const {
+        return createInfo.samples == other_createInfo.samples;
+    }
+    inline bool IsTilingEqual(const VkImageCreateInfo &other_createInfo) const {
+        return createInfo.tiling == other_createInfo.tiling;
+    }
+    inline bool IsArrayLayersEqual(const VkImageCreateInfo &other_createInfo) const {
+        return createInfo.arrayLayers == other_createInfo.arrayLayers;
+    }
+    inline bool IsInitialLayoutEqual(const VkImageCreateInfo &other_createInfo) const {
+        return createInfo.initialLayout == other_createInfo.initialLayout;
+    }
+    inline bool IsSharingModeEqual(const VkImageCreateInfo &other_createInfo) const {
+        return createInfo.sharingMode == other_createInfo.sharingMode;
+    }
+    inline bool IsExtentEqual(const VkImageCreateInfo &other_createInfo) const {
+        return (createInfo.extent.width == other_createInfo.extent.width) &&
+               (createInfo.extent.height == other_createInfo.extent.height) &&
+               (createInfo.extent.depth == other_createInfo.extent.depth);
+    }
+    inline bool IsQueueFamilyIndicesEqual(const VkImageCreateInfo &other_createInfo) const {
+        return (createInfo.queueFamilyIndexCount == other_createInfo.queueFamilyIndexCount) &&
+               (createInfo.queueFamilyIndexCount == 0 ||
+                memcmp(createInfo.pQueueFamilyIndices, other_createInfo.pQueueFamilyIndices,
+                       createInfo.queueFamilyIndexCount * sizeof(createInfo.pQueueFamilyIndices[0])) == 0);
+    }
+
+    ~IMAGE_STATE() {
+        if ((createInfo.sharingMode == VK_SHARING_MODE_CONCURRENT) && (createInfo.queueFamilyIndexCount > 0)) {
+            delete[] createInfo.pQueueFamilyIndices;
+            createInfo.pQueueFamilyIndices = nullptr;
+        }
+    };
+};
+
+class IMAGE_VIEW_STATE : public BASE_NODE {
+  public:
+    VkImageView image_view;
+    VkImageViewCreateInfo create_info;
+    VkImageSubresourceRange normalized_subresource_range;
+    VkSampleCountFlagBits samples;
+    unsigned descriptor_format_bits;
+    VkSamplerYcbcrConversion samplerConversion;  // Handle of the ycbcr sampler conversion the image was created with, if any
+    std::shared_ptr<IMAGE_STATE> image_state;
+    IMAGE_VIEW_STATE(const std::shared_ptr<IMAGE_STATE> &image_state, VkImageView iv, const VkImageViewCreateInfo *ci);
+    IMAGE_VIEW_STATE(const IMAGE_VIEW_STATE &rh_obj) = delete;
+};
+
+class ACCELERATION_STRUCTURE_STATE : public BINDABLE {
+  public:
+    VkAccelerationStructureNV acceleration_structure;
+    safe_VkAccelerationStructureCreateInfoNV create_info;
+    bool memory_requirements_checked = false;
+    VkMemoryRequirements2KHR memory_requirements;
+    bool build_scratch_memory_requirements_checked = false;
+    VkMemoryRequirements2KHR build_scratch_memory_requirements;
+    bool update_scratch_memory_requirements_checked = false;
+    VkMemoryRequirements2KHR update_scratch_memory_requirements;
+    bool built = false;
+    safe_VkAccelerationStructureInfoNV build_info;
+    uint64_t opaque_handle = 0;
+    ACCELERATION_STRUCTURE_STATE(VkAccelerationStructureNV as, const VkAccelerationStructureCreateInfoNV *ci)
+        : acceleration_structure(as),
+          create_info(ci),
+          memory_requirements{},
+          build_scratch_memory_requirements_checked{},
+          update_scratch_memory_requirements_checked{} {}
+    ACCELERATION_STRUCTURE_STATE(const ACCELERATION_STRUCTURE_STATE &rh_obj) = delete;
+};
+
+struct MemRange {
+    VkDeviceSize offset = 0;
+    VkDeviceSize size = 0;
+};
+
+// Data struct for tracking memory object
+struct DEVICE_MEMORY_STATE : public BASE_NODE {
+    void *object;  // Dispatchable object used to create this memory (device of swapchain)
+    VkDeviceMemory mem;
+    safe_VkMemoryAllocateInfo alloc_info;
+    bool is_dedicated;
+    VkBuffer dedicated_buffer;
+    VkImage dedicated_image;
+    bool is_export;
+    VkExternalMemoryHandleTypeFlags export_handle_type_flags;
+    std::unordered_set<VulkanTypedHandle> obj_bindings;  // objects bound to this memory
+    // Convenience vectors of handles to speed up iterating over objects independently
+    std::unordered_set<VkImage> bound_images;
+    // TODO: Convert the two sets to the correct types.
+    std::unordered_set<uint64_t> bound_buffers;
+    std::unordered_set<uint64_t> bound_acceleration_structures;
+
+    MemRange mapped_range;
+    void *shadow_copy_base;    // Base of layer's allocation for guard band, data, and alignment space
+    void *shadow_copy;         // Pointer to start of guard-band data before mapped region
+    uint64_t shadow_pad_size;  // Size of the guard-band data before and after actual data. It MUST be a
+                               // multiple of limits.minMemoryMapAlignment
+    void *p_driver_data;       // Pointer to application's actual memory
+
+    DEVICE_MEMORY_STATE(void *disp_object, const VkDeviceMemory in_mem, const VkMemoryAllocateInfo *p_alloc_info)
+        : object(disp_object),
+          mem(in_mem),
+          alloc_info(p_alloc_info),
+          is_dedicated(false),
+          dedicated_buffer(VK_NULL_HANDLE),
+          dedicated_image(VK_NULL_HANDLE),
+          is_export(false),
+          export_handle_type_flags(0),
+          mapped_range{},
+          shadow_copy_base(0),
+          shadow_copy(0),
+          shadow_pad_size(0),
+          p_driver_data(0){};
+};
+
+struct SWAPCHAIN_IMAGE {
+    VkImage image;
+    std::unordered_set<VkImage> bound_images;
+};
+
+class SWAPCHAIN_NODE : public BASE_NODE {
+  public:
+    safe_VkSwapchainCreateInfoKHR createInfo;
+    VkSwapchainKHR swapchain;
+    std::vector<SWAPCHAIN_IMAGE> images;
+    bool retired = false;
+    bool shared_presentable = false;
+    CALL_STATE vkGetSwapchainImagesKHRState = UNCALLED;
+    uint32_t get_swapchain_image_count = 0;
+    SWAPCHAIN_NODE(const VkSwapchainCreateInfoKHR *pCreateInfo, VkSwapchainKHR swapchain)
+        : createInfo(pCreateInfo), swapchain(swapchain) {}
+};
+
+struct ColorAspectTraits {
+    static const uint32_t kAspectCount = 1;
+    static int Index(VkImageAspectFlags mask) { return 0; };
+    static VkImageAspectFlags AspectMask() { return VK_IMAGE_ASPECT_COLOR_BIT; }
+    static const std::array<VkImageAspectFlagBits, kAspectCount> &AspectBits() {
+        static std::array<VkImageAspectFlagBits, kAspectCount> kAspectBits{{VK_IMAGE_ASPECT_COLOR_BIT}};
+        return kAspectBits;
+    }
+};
+
+struct DepthAspectTraits {
+    static const uint32_t kAspectCount = 1;
+    static int Index(VkImageAspectFlags mask) { return 0; };
+    static VkImageAspectFlags AspectMask() { return VK_IMAGE_ASPECT_DEPTH_BIT; }
+    static const std::array<VkImageAspectFlagBits, kAspectCount> &AspectBits() {
+        static std::array<VkImageAspectFlagBits, kAspectCount> kAspectBits{{VK_IMAGE_ASPECT_DEPTH_BIT}};
+        return kAspectBits;
+    }
+};
+
+struct StencilAspectTraits {
+    static const uint32_t kAspectCount = 1;
+    static int Index(VkImageAspectFlags mask) { return 0; };
+    static VkImageAspectFlags AspectMask() { return VK_IMAGE_ASPECT_STENCIL_BIT; }
+    static const std::array<VkImageAspectFlagBits, kAspectCount> &AspectBits() {
+        static std::array<VkImageAspectFlagBits, kAspectCount> kAspectBits{{VK_IMAGE_ASPECT_STENCIL_BIT}};
+        return kAspectBits;
+    }
+};
+
+struct DepthStencilAspectTraits {
+    // VK_IMAGE_ASPECT_DEPTH_BIT = 0x00000002,  >> 1 -> 1 -1 -> 0
+    // VK_IMAGE_ASPECT_STENCIL_BIT = 0x00000004, >> 1 -> 2 -1 = 1
+    static const uint32_t kAspectCount = 2;
+    static uint32_t Index(VkImageAspectFlags mask) {
+        uint32_t index = (mask >> 1) - 1;
+        assert((index == 0) || (index == 1));
+        return index;
+    };
+    static VkImageAspectFlags AspectMask() { return VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT; }
+    static const std::array<VkImageAspectFlagBits, kAspectCount> &AspectBits() {
+        static std::array<VkImageAspectFlagBits, kAspectCount> kAspectBits{
+            {VK_IMAGE_ASPECT_DEPTH_BIT, VK_IMAGE_ASPECT_STENCIL_BIT}};
+        return kAspectBits;
+    }
+};
+
+struct Multiplane2AspectTraits {
+    // VK_IMAGE_ASPECT_PLANE_0_BIT = 0x00000010, >> 4 - 1 -> 0
+    // VK_IMAGE_ASPECT_PLANE_1_BIT = 0x00000020, >> 4 - 1 -> 1
+    static const uint32_t kAspectCount = 2;
+    static uint32_t Index(VkImageAspectFlags mask) {
+        uint32_t index = (mask >> 4) - 1;
+        assert((index == 0) || (index == 1));
+        return index;
+    };
+    static VkImageAspectFlags AspectMask() { return VK_IMAGE_ASPECT_PLANE_0_BIT | VK_IMAGE_ASPECT_PLANE_1_BIT; }
+    static const std::array<VkImageAspectFlagBits, kAspectCount> &AspectBits() {
+        static std::array<VkImageAspectFlagBits, kAspectCount> kAspectBits{
+            {VK_IMAGE_ASPECT_PLANE_0_BIT, VK_IMAGE_ASPECT_PLANE_1_BIT}};
+        return kAspectBits;
+    }
+};
+
+struct Multiplane3AspectTraits {
+    // VK_IMAGE_ASPECT_PLANE_0_BIT = 0x00000010, >> 4 - 1 -> 0
+    // VK_IMAGE_ASPECT_PLANE_1_BIT = 0x00000020, >> 4 - 1 -> 1
+    // VK_IMAGE_ASPECT_PLANE_2_BIT = 0x00000040, >> 4 - 1 -> 3
+    static const uint32_t kAspectCount = 3;
+    static uint32_t Index(VkImageAspectFlags mask) {
+        uint32_t index = (mask >> 4) - 1;
+        index = index > 2 ? 2 : index;
+        assert((index == 0) || (index == 1) || (index == 2));
+        return index;
+    };
+    static VkImageAspectFlags AspectMask() {
+        return VK_IMAGE_ASPECT_PLANE_0_BIT | VK_IMAGE_ASPECT_PLANE_1_BIT | VK_IMAGE_ASPECT_PLANE_2_BIT;
+    }
+    static const std::array<VkImageAspectFlagBits, kAspectCount> &AspectBits() {
+        static std::array<VkImageAspectFlagBits, kAspectCount> kAspectBits{
+            {VK_IMAGE_ASPECT_PLANE_0_BIT, VK_IMAGE_ASPECT_PLANE_1_BIT, VK_IMAGE_ASPECT_PLANE_2_BIT}};
+        return kAspectBits;
+    }
+};
+
+std::string FormatDebugLabel(const char *prefix, const LoggingLabel &label);
+
+const static VkImageLayout kInvalidLayout = VK_IMAGE_LAYOUT_MAX_ENUM;
+// Interface class.
+class ImageSubresourceLayoutMap {
+  public:
+    typedef std::function<bool(const VkImageSubresource &, VkImageLayout, VkImageLayout)> Callback;
+    struct InitialLayoutState {
+        VkImageView image_view;          // For relaxed matching rule evaluation, else VK_NULL_HANDLE
+        VkImageAspectFlags aspect_mask;  // For relaxed matching rules... else 0
+        LoggingLabel label;
+        InitialLayoutState(const CMD_BUFFER_STATE &cb_state_, const IMAGE_VIEW_STATE *view_state);
+        InitialLayoutState() : image_view(VK_NULL_HANDLE), aspect_mask(0), label() {}
+    };
+
+    struct SubresourceLayout {
+        VkImageSubresource subresource;
+        VkImageLayout layout;
+    };
+
+    struct SubresourceRangeLayout {
+        VkImageSubresourceRange range;
+        VkImageLayout layout;
+    };
+
+    class ConstIteratorInterface {
+      public:
+        // Make the value accessor non virtual
+        const SubresourceLayout &operator*() const { return value_; }
+
+        virtual ConstIteratorInterface &operator++() = 0;
+        virtual bool AtEnd() const = 0;
+        virtual ~ConstIteratorInterface(){};
+
+      protected:
+        SubresourceLayout value_;
+    };
+
+    class ConstIterator {
+      public:
+        ConstIterator &operator++() {
+            ++(*it_);
+            return *this;
+        }
+        const SubresourceLayout &operator*() const { return *(*it_); }
+        ConstIterator(ConstIteratorInterface *it) : it_(it){};
+        bool AtEnd() const { return it_->AtEnd(); }
+
+      protected:
+        std::unique_ptr<ConstIteratorInterface> it_;
+    };
+
+    virtual ConstIterator BeginInitialUse() const = 0;
+    virtual ConstIterator BeginSetLayout() const = 0;
+
+    virtual bool SetSubresourceRangeLayout(const CMD_BUFFER_STATE &cb_state, const VkImageSubresourceRange &range,
+                                           VkImageLayout layout, VkImageLayout expected_layout = kInvalidLayout) = 0;
+    virtual bool SetSubresourceRangeInitialLayout(const CMD_BUFFER_STATE &cb_state, const VkImageSubresourceRange &range,
+                                                  VkImageLayout layout, const IMAGE_VIEW_STATE *view_state = nullptr) = 0;
+    virtual bool ForRange(const VkImageSubresourceRange &range, const Callback &callback, bool skip_invalid = true,
+                          bool always_get_initial = false) const = 0;
+    virtual VkImageLayout GetSubresourceLayout(const VkImageSubresource subresource) const = 0;
+    virtual VkImageLayout GetSubresourceInitialLayout(const VkImageSubresource subresource) const = 0;
+    virtual const InitialLayoutState *GetSubresourceInitialLayoutState(const VkImageSubresource subresource) const = 0;
+    virtual bool UpdateFrom(const ImageSubresourceLayoutMap &from) = 0;
+    virtual uintptr_t CompatibilityKey() const = 0;
+    ImageSubresourceLayoutMap() {}
+    virtual ~ImageSubresourceLayoutMap() {}
+};
+
+template <typename AspectTraits_, size_t kSparseThreshold = 64U>
+class ImageSubresourceLayoutMapImpl : public ImageSubresourceLayoutMap {
+  public:
+    typedef ImageSubresourceLayoutMap Base;
+    typedef AspectTraits_ AspectTraits;
+    typedef Base::SubresourceLayout SubresourceLayout;
+    typedef sparse_container::SparseVector<size_t, VkImageLayout, true, kInvalidLayout, kSparseThreshold> LayoutMap;
+    typedef sparse_container::SparseVector<size_t, VkImageLayout, false, kInvalidLayout, kSparseThreshold> InitialLayoutMap;
+
+    struct Layouts {
+        LayoutMap current;
+        InitialLayoutMap initial;
+        Layouts(size_t size) : current(0, size), initial(0, size) {}
+    };
+
+    template <typename Container>
+    class ConstIteratorImpl : public Base::ConstIteratorInterface {
+      public:
+        ConstIteratorImpl &operator++() override {
+            ++it_;
+            UpdateValue();
+            return *this;
+        }
+        // Just good enough for cend checks
+        ConstIteratorImpl(const ImageSubresourceLayoutMapImpl &map, const Container &container)
+            : map_(&map), container_(&container), the_end_(false) {
+            it_ = container_->cbegin();
+            UpdateValue();
+        }
+        ~ConstIteratorImpl() override {}
+        virtual bool AtEnd() const override { return the_end_; }
+
+      protected:
+        void UpdateValue() {
+            if (it_ != container_->cend()) {
+                value_.subresource = map_->Decode((*it_).first);
+                value_.layout = (*it_).second;
+            } else {
+                the_end_ = true;
+                value_.layout = kInvalidLayout;
+            }
+        }
+
+        typedef typename Container::const_iterator ContainerIterator;
+        const ImageSubresourceLayoutMapImpl *map_;
+        const Container *container_;
+        bool the_end_;
+        ContainerIterator it_;
+    };
+
+    Base::ConstIterator BeginInitialUse() const override {
+        return Base::ConstIterator(new ConstIteratorImpl<InitialLayoutMap>(*this, layouts_.initial));
+    }
+
+    Base::ConstIterator BeginSetLayout() const override {
+        return Base::ConstIterator(new ConstIteratorImpl<LayoutMap>(*this, layouts_.current));
+    }
+
+    bool SetSubresourceRangeLayout(const CMD_BUFFER_STATE &cb_state, const VkImageSubresourceRange &range, VkImageLayout layout,
+                                   VkImageLayout expected_layout = kInvalidLayout) override {
+        bool updated = false;
+        if (expected_layout == kInvalidLayout) {
+            // Set the initial layout to the set layout as we had no other layout to reference
+            expected_layout = layout;
+        }
+        if (!InRange(range)) return false;  // Don't even try to track bogus subreources
+
+        InitialLayoutState *initial_state = nullptr;
+        const uint32_t end_mip = range.baseMipLevel + range.levelCount;
+        const auto &aspects = AspectTraits::AspectBits();
+        for (uint32_t aspect_index = 0; aspect_index < AspectTraits::kAspectCount; aspect_index++) {
+            if (0 == (range.aspectMask & aspects[aspect_index])) continue;
+            size_t array_offset = Encode(aspect_index, range.baseMipLevel);
+            for (uint32_t mip_level = range.baseMipLevel; mip_level < end_mip; ++mip_level, array_offset += mip_size_) {
+                size_t start = array_offset + range.baseArrayLayer;
+                size_t end = start + range.layerCount;
+                bool updated_level = layouts_.current.SetRange(start, end, layout);
+                if (updated_level) {
+                    // We only need to try setting the initial layout, if we changed any of the layout values above
+                    updated = true;
+                    if (layouts_.initial.SetRange(start, end, expected_layout)) {
+                        // We only need to try setting the initial layout *state* if the initial layout was updated
+                        initial_state = UpdateInitialLayoutState(start, end, initial_state, cb_state, nullptr);
+                    }
+                }
+            }
+        }
+        if (updated) version_++;
+        return updated;
+    }
+
+    bool SetSubresourceRangeInitialLayout(const CMD_BUFFER_STATE &cb_state, const VkImageSubresourceRange &range,
+                                          VkImageLayout layout, const IMAGE_VIEW_STATE *view_state = nullptr) override {
+        bool updated = false;
+        if (!InRange(range)) return false;  // Don't even try to track bogus subreources
+
+        InitialLayoutState *initial_state = nullptr;
+        const uint32_t end_mip = range.baseMipLevel + range.levelCount;
+        const auto &aspects = AspectTraits::AspectBits();
+        for (uint32_t aspect_index = 0; aspect_index < AspectTraits::kAspectCount; aspect_index++) {
+            if (0 == (range.aspectMask & aspects[aspect_index])) continue;
+            size_t array_offset = Encode(aspect_index, range.baseMipLevel);
+            for (uint32_t mip_level = range.baseMipLevel; mip_level < end_mip; ++mip_level, array_offset += mip_size_) {
+                size_t start = array_offset + range.baseArrayLayer;
+                size_t end = start + range.layerCount;
+                bool updated_level = layouts_.initial.SetRange(start, end, layout);
+                if (updated_level) {
+                    updated = true;
+                    // We only need to try setting the initial layout *state* if the initial layout was updated
+                    initial_state = UpdateInitialLayoutState(start, end, initial_state, cb_state, view_state);
+                }
+            }
+        }
+        if (updated) version_++;
+        return updated;
+    }
+
+    // Loop over the given range calling the callback, primarily for
+    // validation checks.  By default the initial_value is only looked
+    // up if the set value isn't found.
+    bool ForRange(const VkImageSubresourceRange &range, const Callback &callback, bool skip_invalid = true,
+                  bool always_get_initial = false) const override {
+        if (!InRange(range)) return false;  // Don't even try to process bogus subreources
+
+        VkImageSubresource subres;
+        auto &level = subres.mipLevel;
+        auto &layer = subres.arrayLayer;
+        auto &aspect = subres.aspectMask;
+        const auto &aspects = AspectTraits::AspectBits();
+        bool keep_on = true;
+        const uint32_t end_mip = range.baseMipLevel + range.levelCount;
+        const uint32_t end_layer = range.baseArrayLayer + range.layerCount;
+        for (uint32_t aspect_index = 0; aspect_index < AspectTraits::kAspectCount; aspect_index++) {
+            if (0 == (range.aspectMask & aspects[aspect_index])) continue;
+            aspect = aspects[aspect_index];  // noting that this and the following loop indices are references
+            size_t array_offset = Encode(aspect_index, range.baseMipLevel);
+            for (level = range.baseMipLevel; level < end_mip; ++level, array_offset += mip_size_) {
+                for (layer = range.baseArrayLayer; layer < end_layer; layer++) {
+                    // TODO -- would an interator with range check be faster?
+                    size_t index = array_offset + layer;
+                    VkImageLayout layout = layouts_.current.Get(index);
+                    VkImageLayout initial_layout = kInvalidLayout;
+                    if (always_get_initial || (layout == kInvalidLayout)) {
+                        initial_layout = layouts_.initial.Get(index);
+                    }
+
+                    if (!skip_invalid || (layout != kInvalidLayout) || (initial_layout != kInvalidLayout)) {
+                        keep_on = callback(subres, layout, initial_layout);
+                        if (!keep_on) return keep_on;  // False value from the callback aborts the range traversal
+                    }
+                }
+            }
+        }
+        return keep_on;
+    }
+    VkImageLayout GetSubresourceInitialLayout(const VkImageSubresource subresource) const override {
+        if (!InRange(subresource)) return kInvalidLayout;
+        uint32_t aspect_index = AspectTraits::Index(subresource.aspectMask);
+        size_t index = Encode(aspect_index, subresource.mipLevel, subresource.arrayLayer);
+        return layouts_.initial.Get(index);
+    }
+
+    const InitialLayoutState *GetSubresourceInitialLayoutState(const VkImageSubresource subresource) const override {
+        if (!InRange(subresource)) return nullptr;
+        uint32_t aspect_index = AspectTraits::Index(subresource.aspectMask);
+        size_t index = Encode(aspect_index, subresource.mipLevel, subresource.arrayLayer);
+        return initial_layout_state_map_.Get(index);
+    }
+
+    VkImageLayout GetSubresourceLayout(const VkImageSubresource subresource) const override {
+        if (!InRange(subresource)) return kInvalidLayout;
+        uint32_t aspect_index = AspectTraits::Index(subresource.aspectMask);
+        size_t index = Encode(aspect_index, subresource.mipLevel, subresource.arrayLayer);
+        return layouts_.current.Get(index);
+    }
+
+    // TODO: make sure this paranoia check is sufficient and not too much.
+    uintptr_t CompatibilityKey() const override {
+        return (reinterpret_cast<const uintptr_t>(&image_state_) ^ AspectTraits::AspectMask() ^ kSparseThreshold);
+    }
+
+    bool UpdateFrom(const ImageSubresourceLayoutMap &other) override {
+        // Must be from matching images for the reinterpret cast to be valid
+        assert(CompatibilityKey() == other.CompatibilityKey());
+        if (CompatibilityKey() != other.CompatibilityKey()) return false;
+
+        const auto &from = reinterpret_cast<const ImageSubresourceLayoutMapImpl &>(other);
+        bool updated = false;
+        updated |= layouts_.initial.Merge(from.layouts_.initial);
+        updated |= layouts_.current.Merge(from.layouts_.current);
+        initial_layout_state_map_.Merge(from.initial_layout_state_map_);
+
+        return updated;
+    }
+
+    ImageSubresourceLayoutMapImpl() : Base() {}
+    ImageSubresourceLayoutMapImpl(const IMAGE_STATE &image_state)
+        : Base(),
+          image_state_(image_state),
+          mip_size_(image_state.full_range.layerCount),
+          aspect_size_(mip_size_ * image_state.full_range.levelCount),
+          version_(0),
+          layouts_(aspect_size_ * AspectTraits::kAspectCount),
+          initial_layout_states_(),
+          initial_layout_state_map_(0, aspect_size_ * AspectTraits::kAspectCount) {
+        // Setup the row <-> aspect/mip_level base Encode/Decode LUT...
+        aspect_offsets_[0] = 0;
+        for (size_t i = 1; i < aspect_offsets_.size(); ++i) {  // Size is a compile time constant
+            aspect_offsets_[i] = aspect_offsets_[i - 1] + aspect_size_;
+        }
+    }
+    ~ImageSubresourceLayoutMapImpl() override {}
+
+  protected:
+    // This looks a bit ponderous but kAspectCount is a compile time constant
+    VkImageSubresource Decode(size_t index) const {
+        VkImageSubresource subres;
+        // find aspect index
+        uint32_t aspect_index = 0;
+        if (AspectTraits::kAspectCount == 2) {
+            if (index >= aspect_offsets_[1]) {
+                aspect_index = 1;
+                index = index - aspect_offsets_[aspect_index];
+            }
+        } else if (AspectTraits::kAspectCount == 3) {
+            if (index >= aspect_offsets_[2]) {
+                aspect_index = 2;
+            } else if (index >= aspect_offsets_[1]) {
+                aspect_index = 1;
+            }
+            index = index - aspect_offsets_[aspect_index];
+        } else {
+            assert(AspectTraits::kAspectCount == 1);  // Only aspect counts of 1, 2, and 3 supported
+        }
+
+        subres.aspectMask = AspectTraits::AspectBits()[aspect_index];
+        subres.mipLevel =
+            static_cast<uint32_t>(index / mip_size_);  // One hopes the compiler with optimize this pair of divisions...
+        subres.arrayLayer = static_cast<uint32_t>(index % mip_size_);
+
+        return subres;
+    }
+
+    uint32_t LevelLimit(uint32_t level) const { return (std::min)(image_state_.full_range.levelCount, level); }
+    uint32_t LayerLimit(uint32_t layer) const { return (std::min)(image_state_.full_range.layerCount, layer); }
+
+    bool InRange(const VkImageSubresource &subres) const {
+        bool in_range = (subres.mipLevel < image_state_.full_range.levelCount) &&
+                        (subres.arrayLayer < image_state_.full_range.layerCount) &&
+                        (subres.aspectMask & AspectTraits::AspectMask());
+        return in_range;
+    }
+
+    bool InRange(const VkImageSubresourceRange &range) const {
+        bool in_range = (range.baseMipLevel < image_state_.full_range.levelCount) &&
+                        ((range.baseMipLevel + range.levelCount) <= image_state_.full_range.levelCount) &&
+                        (range.baseArrayLayer < image_state_.full_range.layerCount) &&
+                        ((range.baseArrayLayer + range.layerCount) <= image_state_.full_range.layerCount) &&
+                        (range.aspectMask & AspectTraits::AspectMask());
+        return in_range;
+    }
+
+    inline size_t Encode(uint32_t aspect_index) const {
+        return (AspectTraits::kAspectCount == 1) ? 0 : aspect_offsets_[aspect_index];
+    }
+    inline size_t Encode(uint32_t aspect_index, uint32_t mip_level) const { return Encode(aspect_index) + mip_level * mip_size_; }
+    inline size_t Encode(uint32_t aspect_index, uint32_t mip_level, uint32_t array_layer) const {
+        return Encode(aspect_index, mip_level) + array_layer;
+    }
+
+    InitialLayoutState *UpdateInitialLayoutState(size_t start, size_t end, InitialLayoutState *initial_state,
+                                                 const CMD_BUFFER_STATE &cb_state, const IMAGE_VIEW_STATE *view_state) {
+        if (!initial_state) {
+            // Allocate on demand...  initial_layout_states_ holds ownership as a unique_ptr, while
+            // each subresource has a non-owning copy of the plain pointer.
+            initial_state = new InitialLayoutState(cb_state, view_state);
+            initial_layout_states_.emplace_back(initial_state);
+        }
+        assert(initial_state);
+        initial_layout_state_map_.SetRange(start, end, initial_state);
+        return initial_state;
+    }
+
+    typedef std::vector<std::unique_ptr<InitialLayoutState>> InitialLayoutStates;
+    // This map *also* needs "write once" semantics
+    typedef sparse_container::SparseVector<size_t, InitialLayoutState *, false, nullptr, kSparseThreshold> InitialLayoutStateMap;
+
+    const IMAGE_STATE &image_state_;
+    const size_t mip_size_;
+    const size_t aspect_size_;
+    uint64_t version_ = 0;
+    Layouts layouts_;
+    InitialLayoutStates initial_layout_states_;
+    InitialLayoutStateMap initial_layout_state_map_;
+    std::array<size_t, AspectTraits::kAspectCount> aspect_offsets_;
+};
+
+static VkImageLayout NormalizeImageLayout(VkImageLayout layout, VkImageLayout non_normal, VkImageLayout normal) {
+    return (layout == non_normal) ? normal : layout;
+}
+
+static VkImageLayout NormalizeDepthImageLayout(VkImageLayout layout) {
+    return NormalizeImageLayout(layout, VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL,
+                                VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL);
+}
+
+static VkImageLayout NormalizeStencilImageLayout(VkImageLayout layout) {
+    return NormalizeImageLayout(layout, VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL,
+                                VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL);
+}
+
+static bool ImageLayoutMatches(const VkImageAspectFlags aspect_mask, VkImageLayout a, VkImageLayout b) {
+    bool matches = (a == b);
+    if (!matches) {
+        // Relaxed rules when referencing *only* the depth or stencil aspects
+        if (aspect_mask == VK_IMAGE_ASPECT_DEPTH_BIT) {
+            matches = NormalizeDepthImageLayout(a) == NormalizeDepthImageLayout(b);
+        } else if (aspect_mask == VK_IMAGE_ASPECT_STENCIL_BIT) {
+            matches = NormalizeStencilImageLayout(a) == NormalizeStencilImageLayout(b);
+        }
+    }
+    return matches;
+}
+
+// Utility type for ForRange callbacks
+struct LayoutUseCheckAndMessage {
+    const static VkImageAspectFlags kDepthOrStencil = VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT;
+    const ImageSubresourceLayoutMap *layout_map;
+    const VkImageAspectFlags aspect_mask;
+    const char *message;
+    VkImageLayout layout;
+
+    LayoutUseCheckAndMessage() = delete;
+    LayoutUseCheckAndMessage(const ImageSubresourceLayoutMap *layout_map_, const VkImageAspectFlags aspect_mask_ = 0)
+        : layout_map(layout_map_), aspect_mask{aspect_mask_}, message(nullptr), layout(kInvalidLayout) {}
+    bool Check(const VkImageSubresource &subres, VkImageLayout check, VkImageLayout current_layout, VkImageLayout initial_layout) {
+        message = nullptr;
+        layout = kInvalidLayout;  // Success status
+        if (current_layout != kInvalidLayout && !ImageLayoutMatches(aspect_mask, check, current_layout)) {
+            message = "previous known";
+            layout = current_layout;
+        } else if ((initial_layout != kInvalidLayout) && !ImageLayoutMatches(aspect_mask, check, initial_layout)) {
+            // To check the relaxed rule matching we need to see how the initial use was used
+            const auto initial_layout_state = layout_map->GetSubresourceInitialLayoutState(subres);
+            assert(initial_layout_state);  // If we have an initial layout, we better have a state for it
+            if (!((initial_layout_state->aspect_mask & kDepthOrStencil) &&
+                  ImageLayoutMatches(initial_layout_state->aspect_mask, check, initial_layout))) {
+                message = "previously used";
+                layout = initial_layout;
+            }
+        }
+        return layout == kInvalidLayout;
+    }
+};
+
+// Store the DAG.
+struct DAGNode {
+    uint32_t pass;
+    std::vector<uint32_t> prev;
+    std::vector<uint32_t> next;
+};
+
+struct RENDER_PASS_STATE : public BASE_NODE {
+    VkRenderPass renderPass;
+    safe_VkRenderPassCreateInfo2KHR createInfo;
+    std::vector<std::vector<uint32_t>> self_dependencies;
+    std::vector<DAGNode> subpassToNode;
+    std::unordered_map<uint32_t, bool> attachment_first_read;
+
+    RENDER_PASS_STATE(VkRenderPassCreateInfo2KHR const *pCreateInfo) : createInfo(pCreateInfo) {}
+    RENDER_PASS_STATE(VkRenderPassCreateInfo const *pCreateInfo) {
+        ConvertVkRenderPassCreateInfoToV2KHR(*pCreateInfo, &createInfo);
+    }
+};
+
+// Autogenerated as part of the vk_validation_error_message.h codegen
+enum CMD_TYPE { VUID_CMD_ENUM_LIST(CMD_) };
+
+enum CB_STATE {
+    CB_NEW,                 // Newly created CB w/o any cmds
+    CB_RECORDING,           // BeginCB has been called on this CB
+    CB_RECORDED,            // EndCB has been called on this CB
+    CB_INVALID_COMPLETE,    // had a complete recording, but was since invalidated
+    CB_INVALID_INCOMPLETE,  // fouled before recording was completed
+};
+
+// CB Status -- used to track status of various bindings on cmd buffer objects
+typedef VkFlags CBStatusFlags;
+enum CBStatusFlagBits {
+    // clang-format off
+    CBSTATUS_NONE                   = 0x00000000,   // No status is set
+    CBSTATUS_LINE_WIDTH_SET         = 0x00000001,   // Line width has been set
+    CBSTATUS_DEPTH_BIAS_SET         = 0x00000002,   // Depth bias has been set
+    CBSTATUS_BLEND_CONSTANTS_SET    = 0x00000004,   // Blend constants state has been set
+    CBSTATUS_DEPTH_BOUNDS_SET       = 0x00000008,   // Depth bounds state object has been set
+    CBSTATUS_STENCIL_READ_MASK_SET  = 0x00000010,   // Stencil read mask has been set
+    CBSTATUS_STENCIL_WRITE_MASK_SET = 0x00000020,   // Stencil write mask has been set
+    CBSTATUS_STENCIL_REFERENCE_SET  = 0x00000040,   // Stencil reference has been set
+    CBSTATUS_VIEWPORT_SET           = 0x00000080,
+    CBSTATUS_SCISSOR_SET            = 0x00000100,
+    CBSTATUS_INDEX_BUFFER_BOUND     = 0x00000200,   // Index buffer has been set
+    CBSTATUS_EXCLUSIVE_SCISSOR_SET  = 0x00000400,
+    CBSTATUS_SHADING_RATE_PALETTE_SET = 0x00000800,
+    CBSTATUS_LINE_STIPPLE_SET       = 0x00001000,
+    CBSTATUS_VIEWPORT_W_SCALING_SET = 0x00002000,
+    CBSTATUS_ALL_STATE_SET          = 0x00003DFF,   // All state set (intentionally exclude index buffer)
+    // clang-format on
+};
+
+struct QueryObject {
+    VkQueryPool pool;
+    uint32_t query;
+    // These next two fields are *not* used in hash or comparison, they are effectively a data payload
+    uint32_t index;  // must be zero if !indexed
+    bool indexed;
+    // Command index in the command buffer where the end of the query was
+    // recorded (equal to the number of commands in the command buffer before
+    // the end of the query).
+    uint64_t endCommandIndex;
+
+    QueryObject(VkQueryPool pool_, uint32_t query_) : pool(pool_), query(query_), index(0), indexed(false), endCommandIndex(0) {}
+    QueryObject(VkQueryPool pool_, uint32_t query_, uint32_t index_)
+        : pool(pool_), query(query_), index(index_), indexed(true), endCommandIndex(0) {}
+    QueryObject(const QueryObject &obj)
+        : pool(obj.pool), query(obj.query), index(obj.index), indexed(obj.indexed), endCommandIndex(obj.endCommandIndex) {}
+    bool operator<(const QueryObject &rhs) const { return (pool == rhs.pool) ? query < rhs.query : pool < rhs.pool; }
+};
+
+inline bool operator==(const QueryObject &query1, const QueryObject &query2) {
+    return ((query1.pool == query2.pool) && (query1.query == query2.query));
+}
+
+struct QueryObjectPass {
+    QueryObject obj;
+    uint32_t perf_pass;
+
+    QueryObjectPass(const QueryObject &obj_, uint32_t perf_pass_) : obj(obj_), perf_pass(perf_pass_) {}
+    QueryObjectPass(const QueryObjectPass &obj_) : obj(obj_.obj), perf_pass(obj_.perf_pass) {}
+    bool operator<(const QueryObjectPass &rhs) const { return (obj == rhs.obj) ? perf_pass < rhs.perf_pass : obj < rhs.obj; }
+};
+
+inline bool operator==(const QueryObjectPass &query1, const QueryObjectPass &query2) {
+    return ((query1.obj == query2.obj) && (query1.perf_pass == query2.perf_pass));
+}
+
+enum QueryState {
+    QUERYSTATE_UNKNOWN,    // Initial state.
+    QUERYSTATE_RESET,      // After resetting.
+    QUERYSTATE_RUNNING,    // Query running.
+    QUERYSTATE_ENDED,      // Query ended but results may not be available.
+    QUERYSTATE_AVAILABLE,  // Results available.
+};
+
+enum QueryResultType {
+    QUERYRESULT_UNKNOWN,
+    QUERYRESULT_NO_DATA,
+    QUERYRESULT_MAYBE_NO_DATA,
+    QUERYRESULT_SOME_DATA,
+    QUERYRESULT_WAIT_ON_RESET,
+    QUERYRESULT_WAIT_ON_RUNNING,
+};
+
+inline const char *string_QueryResultType(QueryResultType result_type) {
+    switch (result_type) {
+        case QUERYRESULT_UNKNOWN:
+            return "query may be in an unknown state";
+        case QUERYRESULT_NO_DATA:
+        case QUERYRESULT_MAYBE_NO_DATA:
+            return "query may return no data";
+        case QUERYRESULT_SOME_DATA:
+            return "query will return some data or availability bit";
+        case QUERYRESULT_WAIT_ON_RESET:
+            return "waiting on a query that has been reset and not issued yet";
+        case QUERYRESULT_WAIT_ON_RUNNING:
+            return "waiting on a query that has not ended yet";
+    }
+    assert(false);
+    return "UNKNOWN QUERY STATE";  // Unreachable.
+}
+
+namespace std {
+template <>
+struct hash<QueryObject> {
+    size_t operator()(QueryObject query) const throw() {
+        return hash<uint64_t>()((uint64_t)(query.pool)) ^ hash<uint32_t>()(query.query);
+    }
+};
+
+template <>
+struct hash<QueryObjectPass> {
+    size_t operator()(QueryObjectPass query) const throw() {
+        return hash<QueryObject>()(query.obj) ^ hash<uint32_t>()(query.perf_pass);
+    }
+};
+}  // namespace std
+
+struct CBVertexBufferBindingInfo {
+    std::vector<BufferBinding> vertex_buffer_bindings;
+};
+
+struct ImageSubresourcePair {
+    VkImage image;
+    bool hasSubresource;
+    VkImageSubresource subresource;
+};
+
+inline bool operator==(const ImageSubresourcePair &img1, const ImageSubresourcePair &img2) {
+    if (img1.image != img2.image || img1.hasSubresource != img2.hasSubresource) return false;
+    return !img1.hasSubresource ||
+           (img1.subresource.aspectMask == img2.subresource.aspectMask && img1.subresource.mipLevel == img2.subresource.mipLevel &&
+            img1.subresource.arrayLayer == img2.subresource.arrayLayer);
+}
+
+namespace std {
+template <>
+struct hash<ImageSubresourcePair> {
+    size_t operator()(ImageSubresourcePair img) const throw() {
+        size_t hashVal = hash<uint64_t>()(reinterpret_cast<uint64_t &>(img.image));
+        hashVal ^= hash<bool>()(img.hasSubresource);
+        if (img.hasSubresource) {
+            hashVal ^= hash<uint32_t>()(reinterpret_cast<uint32_t &>(img.subresource.aspectMask));
+            hashVal ^= hash<uint32_t>()(img.subresource.mipLevel);
+            hashVal ^= hash<uint32_t>()(img.subresource.arrayLayer);
+        }
+        return hashVal;
+    }
+};
+}  // namespace std
+
+// Canonical dictionary for PushConstantRanges
+using PushConstantRangesDict = hash_util::Dictionary<PushConstantRanges>;
+using PushConstantRangesId = PushConstantRangesDict::Id;
+
+// Canonical dictionary for the pipeline layout's layout of descriptorsetlayouts
+using DescriptorSetLayoutDef = cvdescriptorset::DescriptorSetLayoutDef;
+using DescriptorSetLayoutId = std::shared_ptr<const DescriptorSetLayoutDef>;
+using PipelineLayoutSetLayoutsDef = std::vector<DescriptorSetLayoutId>;
+using PipelineLayoutSetLayoutsDict =
+    hash_util::Dictionary<PipelineLayoutSetLayoutsDef, hash_util::IsOrderedContainer<PipelineLayoutSetLayoutsDef>>;
+using PipelineLayoutSetLayoutsId = PipelineLayoutSetLayoutsDict::Id;
+
+// Defines/stores a compatibility defintion for set N
+// The "layout layout" must store at least set+1 entries, but only the first set+1 are considered for hash and equality testing
+// Note: the "cannonical" data are referenced by Id, not including handle or device specific state
+// Note: hash and equality only consider layout_id entries [0, set] for determining uniqueness
+struct PipelineLayoutCompatDef {
+    uint32_t set;
+    PushConstantRangesId push_constant_ranges;
+    PipelineLayoutSetLayoutsId set_layouts_id;
+    PipelineLayoutCompatDef(const uint32_t set_index, const PushConstantRangesId pcr_id, const PipelineLayoutSetLayoutsId sl_id)
+        : set(set_index), push_constant_ranges(pcr_id), set_layouts_id(sl_id) {}
+    size_t hash() const;
+    bool operator==(const PipelineLayoutCompatDef &other) const;
+};
+
+// Canonical dictionary for PipelineLayoutCompat records
+using PipelineLayoutCompatDict = hash_util::Dictionary<PipelineLayoutCompatDef, hash_util::HasHashMember<PipelineLayoutCompatDef>>;
+using PipelineLayoutCompatId = PipelineLayoutCompatDict::Id;
+
+// Store layouts and pushconstants for PipelineLayout
+struct PIPELINE_LAYOUT_STATE : public BASE_NODE {
+    VkPipelineLayout layout;
+    std::vector<std::shared_ptr<cvdescriptorset::DescriptorSetLayout const>> set_layouts;
+    PushConstantRangesId push_constant_ranges;
+    std::vector<PipelineLayoutCompatId> compat_for_set;
+
+    PIPELINE_LAYOUT_STATE() : layout(VK_NULL_HANDLE), set_layouts{}, push_constant_ranges{}, compat_for_set{} {}
+
+    void reset() {
+        layout = VK_NULL_HANDLE;
+        set_layouts.clear();
+        push_constant_ranges.reset();
+        compat_for_set.clear();
+    }
+};
+// Shader typedefs needed to store StageStage below
+struct interface_var {
+    uint32_t id;
+    uint32_t type_id;
+    uint32_t offset;
+    bool is_patch;
+    bool is_block_member;
+    bool is_relaxed_precision;
+    // TODO: collect the name, too? Isn't required to be present.
+};
+typedef std::pair<unsigned, unsigned> descriptor_slot_t;
+
+class PIPELINE_STATE : public BASE_NODE {
+  public:
+    struct StageState {
+        std::unordered_set<uint32_t> accessible_ids;
+        std::vector<std::pair<descriptor_slot_t, interface_var>> descriptor_uses;
+        bool has_writable_descriptor;
+    };
+
+    VkPipeline pipeline;
+    safe_VkGraphicsPipelineCreateInfo graphicsPipelineCI;
+    safe_VkComputePipelineCreateInfo computePipelineCI;
+    safe_VkRayTracingPipelineCreateInfoNV raytracingPipelineCI;
+    // Hold shared ptr to RP in case RP itself is destroyed
+    std::shared_ptr<const RENDER_PASS_STATE> rp_state;
+    // Flag of which shader stages are active for this pipeline
+    uint32_t active_shaders;
+    uint32_t duplicate_shaders;
+    // Capture which slots (set#->bindings) are actually used by the shaders of this pipeline
+    std::unordered_map<uint32_t, BindingReqMap> active_slots;
+    uint32_t max_active_slot;  // the highest set number in active_slots for pipeline layout compatibility checks
+    // Additional metadata needed by pipeline_state initialization and validation
+    std::vector<StageState> stage_state;
+    // Vtx input info (if any)
+    std::vector<VkVertexInputBindingDescription> vertex_binding_descriptions_;
+    std::vector<VkVertexInputAttributeDescription> vertex_attribute_descriptions_;
+    std::vector<VkDeviceSize> vertex_attribute_alignments_;
+    std::unordered_map<uint32_t, uint32_t> vertex_binding_to_index_map_;
+    std::vector<VkPipelineColorBlendAttachmentState> attachments;
+    bool blendConstantsEnabled;  // Blend constants enabled for any attachments
+    std::shared_ptr<const PIPELINE_LAYOUT_STATE> pipeline_layout;
+    VkPrimitiveTopology topology_at_rasterizer;
+
+    // Default constructor
+    PIPELINE_STATE()
+        : pipeline{},
+          graphicsPipelineCI{},
+          computePipelineCI{},
+          raytracingPipelineCI{},
+          rp_state(nullptr),
+          active_shaders(0),
+          duplicate_shaders(0),
+          active_slots(),
+          max_active_slot(0),
+          vertex_binding_descriptions_(),
+          vertex_attribute_descriptions_(),
+          vertex_binding_to_index_map_(),
+          attachments(),
+          blendConstantsEnabled(false),
+          pipeline_layout(),
+          topology_at_rasterizer{} {}
+
+    void reset() {
+        VkGraphicsPipelineCreateInfo emptyGraphicsCI = {};
+        graphicsPipelineCI.initialize(&emptyGraphicsCI, false, false);
+        VkComputePipelineCreateInfo emptyComputeCI = {};
+        computePipelineCI.initialize(&emptyComputeCI);
+        VkRayTracingPipelineCreateInfoNV emptyRayTracingCI = {};
+        raytracingPipelineCI.initialize(&emptyRayTracingCI);
+        stage_state.clear();
+    }
+
+    void initGraphicsPipeline(const ValidationStateTracker *state_data, const VkGraphicsPipelineCreateInfo *pCreateInfo,
+                              std::shared_ptr<const RENDER_PASS_STATE> &&rpstate);
+    void initComputePipeline(const ValidationStateTracker *state_data, const VkComputePipelineCreateInfo *pCreateInfo);
+    void initRayTracingPipelineNV(const ValidationStateTracker *state_data, const VkRayTracingPipelineCreateInfoNV *pCreateInfo);
+
+    inline VkPipelineBindPoint getPipelineType() const {
+        if (graphicsPipelineCI.sType == VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO)
+            return VK_PIPELINE_BIND_POINT_GRAPHICS;
+        else if (computePipelineCI.sType == VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO)
+            return VK_PIPELINE_BIND_POINT_COMPUTE;
+        else if (raytracingPipelineCI.sType == VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_NV)
+            return VK_PIPELINE_BIND_POINT_RAY_TRACING_NV;
+        else
+            return VK_PIPELINE_BIND_POINT_MAX_ENUM;
+    }
+
+    inline VkPipelineCreateFlags getPipelineCreateFlags() const {
+        if (graphicsPipelineCI.sType == VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO)
+            return graphicsPipelineCI.flags;
+        else if (computePipelineCI.sType == VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO)
+            return computePipelineCI.flags;
+        else if (raytracingPipelineCI.sType == VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_NV)
+            return raytracingPipelineCI.flags;
+        else
+            return 0;
+    }
+};
+
+// Track last states that are bound per pipeline bind point (Gfx & Compute)
+struct LAST_BOUND_STATE {
+    LAST_BOUND_STATE() { reset(); }  // must define default constructor for portability reasons
+    PIPELINE_STATE *pipeline_state;
+    VkPipelineLayout pipeline_layout;
+    std::unique_ptr<cvdescriptorset::DescriptorSet> push_descriptor_set;
+
+    // Ordered bound set tracking where index is set# that given set is bound to
+    struct PER_SET {
+        PER_SET()
+            : bound_descriptor_set(nullptr),
+              compat_id_for_set(0),
+              validated_set(nullptr),
+              validated_set_change_count(~0ULL),
+              validated_set_image_layout_change_count(~0ULL),
+              validated_set_binding_req_map() {}
+
+        cvdescriptorset::DescriptorSet *bound_descriptor_set;
+        // one dynamic offset per dynamic descriptor bound to this CB
+        std::vector<uint32_t> dynamicOffsets;
+        PipelineLayoutCompatId compat_id_for_set;
+
+        // Cache most recently validated descriptor state for ValidateCmdBufDrawState/UpdateDrawState
+        const cvdescriptorset::DescriptorSet *validated_set;
+        uint64_t validated_set_change_count;
+        uint64_t validated_set_image_layout_change_count;
+        BindingReqMap validated_set_binding_req_map;
+    };
+
+    std::vector<PER_SET> per_set;
+
+    void reset() {
+        pipeline_state = nullptr;
+        pipeline_layout = VK_NULL_HANDLE;
+        push_descriptor_set = nullptr;
+        per_set.clear();
+    }
+
+    void UnbindAndResetPushDescriptorSet(cvdescriptorset::DescriptorSet *ds) {
+        if (push_descriptor_set) {
+            for (std::size_t i = 0; i < per_set.size(); i++) {
+                if (per_set[i].bound_descriptor_set == push_descriptor_set.get()) {
+                    per_set[i].bound_descriptor_set = nullptr;
+                }
+            }
+        }
+        push_descriptor_set.reset(ds);
+    }
+};
+
+static inline bool CompatForSet(uint32_t set, const LAST_BOUND_STATE &a, const std::vector<PipelineLayoutCompatId> &b) {
+    bool result = (set < a.per_set.size()) && (set < b.size()) && (a.per_set[set].compat_id_for_set == b[set]);
+    return result;
+}
+
+static inline bool CompatForSet(uint32_t set, const PIPELINE_LAYOUT_STATE *a, const PIPELINE_LAYOUT_STATE *b) {
+    // Intentionally have a result variable to simplify debugging
+    bool result = a && b && (set < a->compat_for_set.size()) && (set < b->compat_for_set.size()) &&
+                  (a->compat_for_set[set] == b->compat_for_set[set]);
+    return result;
+}
+
+// Types to store queue family ownership (QFO) Transfers
+
+// Common to image and buffer memory barriers
+template <typename Handle, typename Barrier>
+struct QFOTransferBarrierBase {
+    using HandleType = Handle;
+    using BarrierType = Barrier;
+    struct Tag {};
+    HandleType handle = VK_NULL_HANDLE;
+    uint32_t srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
+    uint32_t dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
+
+    QFOTransferBarrierBase() = default;
+    QFOTransferBarrierBase(const BarrierType &barrier, const HandleType &resource_handle)
+        : handle(resource_handle),
+          srcQueueFamilyIndex(barrier.srcQueueFamilyIndex),
+          dstQueueFamilyIndex(barrier.dstQueueFamilyIndex) {}
+
+    hash_util::HashCombiner base_hash_combiner() const {
+        hash_util::HashCombiner hc;
+        hc << srcQueueFamilyIndex << dstQueueFamilyIndex << handle;
+        return hc;
+    }
+
+    bool operator==(const QFOTransferBarrierBase &rhs) const {
+        return (srcQueueFamilyIndex == rhs.srcQueueFamilyIndex) && (dstQueueFamilyIndex == rhs.dstQueueFamilyIndex) &&
+               (handle == rhs.handle);
+    }
+};
+
+template <typename Barrier>
+struct QFOTransferBarrier {};
+
+// Image barrier specific implementation
+template <>
+struct QFOTransferBarrier<VkImageMemoryBarrier> : public QFOTransferBarrierBase<VkImage, VkImageMemoryBarrier> {
+    using BaseType = QFOTransferBarrierBase<VkImage, VkImageMemoryBarrier>;
+    VkImageLayout oldLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+    VkImageLayout newLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+    VkImageSubresourceRange subresourceRange;
+
+    QFOTransferBarrier() = default;
+    QFOTransferBarrier(const BarrierType &barrier)
+        : BaseType(barrier, barrier.image),
+          oldLayout(barrier.oldLayout),
+          newLayout(barrier.newLayout),
+          subresourceRange(barrier.subresourceRange) {}
+    size_t hash() const {
+        // Ignoring the layout information for the purpose of the hash, as we're interested in QFO release/acquisition w.r.t.
+        // the subresource affected, an layout transitions are current validated on another path
+        auto hc = base_hash_combiner() << subresourceRange;
+        return hc.Value();
+    }
+    bool operator==(const QFOTransferBarrier<BarrierType> &rhs) const {
+        // Ignoring layout w.r.t. equality. See comment in hash above.
+        return (static_cast<BaseType>(*this) == static_cast<BaseType>(rhs)) && (subresourceRange == rhs.subresourceRange);
+    }
+    // TODO: codegen a comprehensive complie time type -> string (and or other traits) template family
+    static const char *BarrierName() { return "VkImageMemoryBarrier"; }
+    static const char *HandleName() { return "VkImage"; }
+    // UNASSIGNED-VkImageMemoryBarrier-image-00001 QFO transfer image barrier must not duplicate QFO recorded in command buffer
+    static const char *ErrMsgDuplicateQFOInCB() { return "UNASSIGNED-VkImageMemoryBarrier-image-00001"; }
+    // UNASSIGNED-VkImageMemoryBarrier-image-00002 QFO transfer image barrier must not duplicate QFO submitted in batch
+    static const char *ErrMsgDuplicateQFOInSubmit() { return "UNASSIGNED-VkImageMemoryBarrier-image-00002"; }
+    // UNASSIGNED-VkImageMemoryBarrier-image-00003 QFO transfer image barrier must not duplicate QFO submitted previously
+    static const char *ErrMsgDuplicateQFOSubmitted() { return "UNASSIGNED-VkImageMemoryBarrier-image-00003"; }
+    // UNASSIGNED-VkImageMemoryBarrier-image-00004 QFO acquire image barrier must have matching QFO release submitted previously
+    static const char *ErrMsgMissingQFOReleaseInSubmit() { return "UNASSIGNED-VkImageMemoryBarrier-image-00004"; }
+};
+
+// Buffer barrier specific implementation
+template <>
+struct QFOTransferBarrier<VkBufferMemoryBarrier> : public QFOTransferBarrierBase<VkBuffer, VkBufferMemoryBarrier> {
+    using BaseType = QFOTransferBarrierBase<VkBuffer, VkBufferMemoryBarrier>;
+    VkDeviceSize offset = 0;
+    VkDeviceSize size = 0;
+    QFOTransferBarrier(const VkBufferMemoryBarrier &barrier)
+        : BaseType(barrier, barrier.buffer), offset(barrier.offset), size(barrier.size) {}
+    size_t hash() const {
+        auto hc = base_hash_combiner() << offset << size;
+        return hc.Value();
+    }
+    bool operator==(const QFOTransferBarrier<BarrierType> &rhs) const {
+        return (static_cast<BaseType>(*this) == static_cast<BaseType>(rhs)) && (offset == rhs.offset) && (size == rhs.size);
+    }
+    static const char *BarrierName() { return "VkBufferMemoryBarrier"; }
+    static const char *HandleName() { return "VkBuffer"; }
+    // UNASSIGNED-VkImageMemoryBarrier-buffer-00001 QFO transfer buffer barrier must not duplicate QFO recorded in command buffer
+    static const char *ErrMsgDuplicateQFOInCB() { return "UNASSIGNED-VkBufferMemoryBarrier-buffer-00001"; }
+    // UNASSIGNED-VkBufferMemoryBarrier-buffer-00002 QFO transfer buffer barrier must not duplicate QFO submitted in batch
+    static const char *ErrMsgDuplicateQFOInSubmit() { return "UNASSIGNED-VkBufferMemoryBarrier-buffer-00002"; }
+    // UNASSIGNED-VkBufferMemoryBarrier-buffer-00003 QFO transfer buffer barrier must not duplicate QFO submitted previously
+    static const char *ErrMsgDuplicateQFOSubmitted() { return "UNASSIGNED-VkBufferMemoryBarrier-buffer-00003"; }
+    // UNASSIGNED-VkBufferMemoryBarrier-buffer-00004 QFO acquire buffer barrier must have matching QFO release submitted previously
+    static const char *ErrMsgMissingQFOReleaseInSubmit() { return "UNASSIGNED-VkBufferMemoryBarrier-buffer-00004"; }
+};
+
+template <typename Barrier>
+using QFOTransferBarrierHash = hash_util::HasHashMember<QFOTransferBarrier<Barrier>>;
+
+// Command buffers store the set of barriers recorded
+template <typename Barrier>
+using QFOTransferBarrierSet = std::unordered_set<QFOTransferBarrier<Barrier>, QFOTransferBarrierHash<Barrier>>;
+template <typename Barrier>
+struct QFOTransferBarrierSets {
+    QFOTransferBarrierSet<Barrier> release;
+    QFOTransferBarrierSet<Barrier> acquire;
+    void Reset() {
+        acquire.clear();
+        release.clear();
+    }
+};
+
+// The layer_data stores the map of pending release barriers
+template <typename Barrier>
+using GlobalQFOTransferBarrierMap =
+    std::unordered_map<typename QFOTransferBarrier<Barrier>::HandleType, QFOTransferBarrierSet<Barrier>>;
+
+// Submit queue uses the Scoreboard to track all release/acquire operations in a batch.
+template <typename Barrier>
+using QFOTransferCBScoreboard =
+    std::unordered_map<QFOTransferBarrier<Barrier>, const CMD_BUFFER_STATE *, QFOTransferBarrierHash<Barrier>>;
+template <typename Barrier>
+struct QFOTransferCBScoreboards {
+    QFOTransferCBScoreboard<Barrier> acquire;
+    QFOTransferCBScoreboard<Barrier> release;
+};
+
+typedef std::map<QueryObject, QueryState> QueryMap;
+typedef std::map<QueryObjectPass, QueryState> QueryPassMap;
+typedef std::unordered_map<VkEvent, VkPipelineStageFlags> EventToStageMap;
+
+// Cmd Buffer Wrapper Struct - TODO : This desperately needs its own class
+struct CMD_BUFFER_STATE : public BASE_NODE {
+    VkCommandBuffer commandBuffer;
+    VkCommandBufferAllocateInfo createInfo = {};
+    VkCommandBufferBeginInfo beginInfo;
+    VkCommandBufferInheritanceInfo inheritanceInfo;
+    VkDevice device;  // device this CB belongs to
+    std::shared_ptr<const COMMAND_POOL_STATE> command_pool;
+    bool hasDrawCmd;
+    bool hasTraceRaysCmd;
+    bool hasBuildAccelerationStructureCmd;
+    bool hasDispatchCmd;
+
+    CB_STATE state;         // Track cmd buffer update state
+    uint64_t commandCount;  // Number of commands recorded
+    uint64_t submitCount;   // Number of times CB has been submitted
+    typedef uint64_t ImageLayoutUpdateCount;
+    ImageLayoutUpdateCount image_layout_change_count;  // The sequence number for changes to image layout (for cached validation)
+    CBStatusFlags status;                              // Track status of various bindings on cmd buffer
+    CBStatusFlags static_status;                       // All state bits provided by current graphics pipeline
+                                                       // rather than dynamic state
+    // Currently storing "lastBound" objects on per-CB basis
+    //  long-term may want to create caches of "lastBound" states and could have
+    //  each individual CMD_NODE referencing its own "lastBound" state
+    // Store last bound state for Gfx & Compute pipeline bind points
+    std::map<uint32_t, LAST_BOUND_STATE> lastBound;
+
+    using Bindings = std::map<uint32_t, descriptor_req>;
+    using Pipelines_Bindings = std::map<VkPipeline, Bindings>;
+    std::unordered_map<VkDescriptorSet, Pipelines_Bindings> validate_descriptorsets_in_queuesubmit;
+
+    uint32_t viewportMask;
+    uint32_t scissorMask;
+    uint32_t initial_device_mask;
+
+    VkRenderPassBeginInfo activeRenderPassBeginInfo;
+    RENDER_PASS_STATE *activeRenderPass;
+    VkSubpassContents activeSubpassContents;
+    uint32_t active_render_pass_device_mask;
+    uint32_t activeSubpass;
+    VkFramebuffer activeFramebuffer;
+    std::unordered_set<VkFramebuffer> framebuffers;
+    // Unified data structs to track objects bound to this command buffer as well as object
+    //  dependencies that have been broken : either destroyed objects, or updated descriptor sets
+    std::vector<VulkanTypedHandle> object_bindings;
+    std::vector<VulkanTypedHandle> broken_bindings;
+
+    QFOTransferBarrierSets<VkBufferMemoryBarrier> qfo_transfer_buffer_barriers;
+    QFOTransferBarrierSets<VkImageMemoryBarrier> qfo_transfer_image_barriers;
+
+    std::unordered_set<VkEvent> waitedEvents;
+    std::vector<VkEvent> writeEventsBeforeWait;
+    std::vector<VkEvent> events;
+    std::unordered_set<QueryObject> activeQueries;
+    std::unordered_set<QueryObject> startedQueries;
+    typedef std::unordered_map<VkImage, std::unique_ptr<ImageSubresourceLayoutMap>> ImageLayoutMap;
+    ImageLayoutMap image_layout_map;
+    CBVertexBufferBindingInfo current_vertex_buffer_binding_info;
+    bool vertex_buffer_used;  // Track for perf warning to make sure any bound vtx buffer used
+    VkCommandBuffer primaryCommandBuffer;
+    // If primary, the secondary command buffers we will call.
+    // If secondary, the primary command buffers we will be called by.
+    std::unordered_set<CMD_BUFFER_STATE *> linkedCommandBuffers;
+    // Validation functions run at primary CB queue submit time
+    std::vector<std::function<bool(const ValidationStateTracker *device_data, const class QUEUE_STATE *queue_state)>>
+        queue_submit_functions;
+    // Validation functions run when secondary CB is executed in primary
+    std::vector<std::function<bool(const CMD_BUFFER_STATE *, VkFramebuffer)>> cmd_execute_commands_functions;
+    std::vector<
+        std::function<bool(const ValidationStateTracker *device_data, bool do_validate, EventToStageMap *localEventToStageMap)>>
+        eventUpdates;
+    std::vector<std::function<bool(const ValidationStateTracker *device_data, bool do_validate, QueryMap *localQueryToStateMap)>>
+        queryUpdates;
+    std::unordered_set<cvdescriptorset::DescriptorSet *> validated_descriptor_sets;
+    // Contents valid only after an index buffer is bound (CBSTATUS_INDEX_BUFFER_BOUND set)
+    IndexBufferBinding index_buffer_binding;
+    bool performance_lock_acquired = false;
+    bool performance_lock_released = false;
+
+    // Cache of current insert label...
+    LoggingLabel debug_label;
+
+    std::vector<uint8_t> push_constant_data;
+    PushConstantRangesId push_constant_data_ranges;
+};
+
+static inline const QFOTransferBarrierSets<VkImageMemoryBarrier> &GetQFOBarrierSets(
+    const CMD_BUFFER_STATE *cb, const QFOTransferBarrier<VkImageMemoryBarrier>::Tag &type_tag) {
+    return cb->qfo_transfer_image_barriers;
+}
+static inline const QFOTransferBarrierSets<VkBufferMemoryBarrier> &GetQFOBarrierSets(
+    const CMD_BUFFER_STATE *cb, const QFOTransferBarrier<VkBufferMemoryBarrier>::Tag &type_tag) {
+    return cb->qfo_transfer_buffer_barriers;
+}
+static inline QFOTransferBarrierSets<VkImageMemoryBarrier> &GetQFOBarrierSets(
+    CMD_BUFFER_STATE *cb, const QFOTransferBarrier<VkImageMemoryBarrier>::Tag &type_tag) {
+    return cb->qfo_transfer_image_barriers;
+}
+static inline QFOTransferBarrierSets<VkBufferMemoryBarrier> &GetQFOBarrierSets(
+    CMD_BUFFER_STATE *cb, const QFOTransferBarrier<VkBufferMemoryBarrier>::Tag &type_tag) {
+    return cb->qfo_transfer_buffer_barriers;
+}
+
+struct SEMAPHORE_WAIT {
+    VkSemaphore semaphore;
+    VkQueue queue;
+    uint64_t seq;
+};
+
+struct CB_SUBMISSION {
+    CB_SUBMISSION(std::vector<VkCommandBuffer> const &cbs, std::vector<SEMAPHORE_WAIT> const &waitSemaphores,
+                  std::vector<VkSemaphore> const &signalSemaphores, std::vector<VkSemaphore> const &externalSemaphores,
+                  VkFence fence, uint32_t perf_submit_pass)
+        : cbs(cbs),
+          waitSemaphores(waitSemaphores),
+          signalSemaphores(signalSemaphores),
+          externalSemaphores(externalSemaphores),
+          fence(fence),
+          perf_submit_pass(perf_submit_pass) {}
+
+    std::vector<VkCommandBuffer> cbs;
+    std::vector<SEMAPHORE_WAIT> waitSemaphores;
+    std::vector<VkSemaphore> signalSemaphores;
+    std::vector<VkSemaphore> externalSemaphores;
+    VkFence fence;
+    uint32_t perf_submit_pass;
+};
+
+struct IMAGE_LAYOUT_STATE {
+    VkImageLayout layout;
+    VkFormat format;
+};
+
+struct MT_FB_ATTACHMENT_INFO {
+    IMAGE_VIEW_STATE *view_state;
+    VkImage image;
+};
+
+class FRAMEBUFFER_STATE : public BASE_NODE {
+  public:
+    VkFramebuffer framebuffer;
+    safe_VkFramebufferCreateInfo createInfo;
+    std::shared_ptr<const RENDER_PASS_STATE> rp_state;
+    FRAMEBUFFER_STATE(VkFramebuffer fb, const VkFramebufferCreateInfo *pCreateInfo, std::shared_ptr<RENDER_PASS_STATE> &&rpstate)
+        : framebuffer(fb), createInfo(pCreateInfo), rp_state(rpstate){};
+};
+
+struct SHADER_MODULE_STATE;
+struct DeviceExtensions;
+
+struct DeviceFeatures {
+    VkPhysicalDeviceFeatures core;
+    VkPhysicalDeviceDescriptorIndexingFeaturesEXT descriptor_indexing;
+    VkPhysicalDevice8BitStorageFeaturesKHR eight_bit_storage;
+    VkPhysicalDeviceExclusiveScissorFeaturesNV exclusive_scissor;
+    VkPhysicalDeviceShadingRateImageFeaturesNV shading_rate_image;
+    VkPhysicalDeviceMeshShaderFeaturesNV mesh_shader;
+    VkPhysicalDeviceInlineUniformBlockFeaturesEXT inline_uniform_block;
+    VkPhysicalDeviceTransformFeedbackFeaturesEXT transform_feedback_features;
+    VkPhysicalDeviceFloat16Int8FeaturesKHR float16_int8;
+    VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT vtx_attrib_divisor_features;
+    VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR uniform_buffer_standard_layout;
+    VkPhysicalDeviceScalarBlockLayoutFeaturesEXT scalar_block_layout_features;
+    VkPhysicalDeviceBufferDeviceAddressFeaturesKHR buffer_device_address;
+    VkPhysicalDeviceBufferDeviceAddressFeaturesEXT buffer_device_address_ext;
+    VkPhysicalDeviceCooperativeMatrixFeaturesNV cooperative_matrix_features;
+    VkPhysicalDeviceHostQueryResetFeaturesEXT host_query_reset_features;
+    VkPhysicalDeviceComputeShaderDerivativesFeaturesNV compute_shader_derivatives_features;
+    VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV fragment_shader_barycentric_features;
+    VkPhysicalDeviceShaderImageFootprintFeaturesNV shader_image_footprint_features;
+    VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT fragment_shader_interlock_features;
+    VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT demote_to_helper_invocation_features;
+    VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT texel_buffer_alignment_features;
+    VkPhysicalDeviceImagelessFramebufferFeaturesKHR imageless_framebuffer_features;
+    VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR pipeline_exe_props_features;
+    VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV dedicated_allocation_image_aliasing_features;
+    VkPhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR subgroup_extended_types_features;
+    VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR separate_depth_stencil_layouts_features;
+    VkPhysicalDevicePerformanceQueryFeaturesKHR performance_query_features;
+    VkPhysicalDeviceTimelineSemaphoreFeaturesKHR timeline_semaphore_features;
+    VkPhysicalDeviceCoherentMemoryFeaturesAMD device_coherent_memory_features;
+};
+
+enum RenderPassCreateVersion { RENDER_PASS_VERSION_1 = 0, RENDER_PASS_VERSION_2 = 1 };
+enum CommandVersion { CMD_VERSION_1 = 0, CMD_VERSION_2 = 1 };
+
+enum BarrierOperationsType {
+    kAllAcquire,  // All Barrier operations are "ownership acquire" operations
+    kAllRelease,  // All Barrier operations are "ownership release" operations
+    kGeneral,     // Either no ownership operations or a mix of ownership operation types and/or non-ownership operations
+};
+
+ImageSubresourceLayoutMap *GetImageSubresourceLayoutMap(CMD_BUFFER_STATE *cb_state, const IMAGE_STATE &image_state);
+const ImageSubresourceLayoutMap *GetImageSubresourceLayoutMap(const CMD_BUFFER_STATE *cb_state, VkImage image);
+
+#endif  // CORE_VALIDATION_TYPES_H_
diff --git a/src/third_party/vulkan-validation-layers/src/layers/descriptor_sets.cpp b/src/third_party/vulkan-validation-layers/src/layers/descriptor_sets.cpp
new file mode 100644
index 0000000..8c10772
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/layers/descriptor_sets.cpp
@@ -0,0 +1,2654 @@
+/* Copyright (c) 2015-2019 The Khronos Group Inc.
+ * Copyright (c) 2015-2019 Valve Corporation
+ * Copyright (c) 2015-2019 LunarG, Inc.
+ * Copyright (C) 2015-2019 Google Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Tobin Ehlis <tobine@google.com>
+ *         John Zulauf <jzulauf@lunarg.com>
+ */
+
+// Allow use of STL min and max functions in Windows
+#define NOMINMAX
+
+#include "chassis.h"
+#include "core_validation_error_enums.h"
+#include "core_validation.h"
+#include "descriptor_sets.h"
+#include "hash_vk_types.h"
+#include "vk_enum_string_helper.h"
+#include "vk_safe_struct.h"
+#include "vk_typemap_helper.h"
+#include "buffer_validation.h"
+#include <sstream>
+#include <algorithm>
+#include <array>
+#include <memory>
+
+// ExtendedBinding collects a VkDescriptorSetLayoutBinding and any extended
+// state that comes from a different array/structure so they can stay together
+// while being sorted by binding number.
+struct ExtendedBinding {
+    ExtendedBinding(const VkDescriptorSetLayoutBinding *l, VkDescriptorBindingFlagsEXT f) : layout_binding(l), binding_flags(f) {}
+
+    const VkDescriptorSetLayoutBinding *layout_binding;
+    VkDescriptorBindingFlagsEXT binding_flags;
+};
+
+struct BindingNumCmp {
+    bool operator()(const ExtendedBinding &a, const ExtendedBinding &b) const {
+        return a.layout_binding->binding < b.layout_binding->binding;
+    }
+};
+
+using DescriptorSet = cvdescriptorset::DescriptorSet;
+using DescriptorSetLayout = cvdescriptorset::DescriptorSetLayout;
+using DescriptorSetLayoutDef = cvdescriptorset::DescriptorSetLayoutDef;
+using DescriptorSetLayoutId = cvdescriptorset::DescriptorSetLayoutId;
+
+// Canonical dictionary of DescriptorSetLayoutDef (without any handle/device specific information)
+cvdescriptorset::DescriptorSetLayoutDict descriptor_set_layout_dict;
+
+DescriptorSetLayoutId GetCanonicalId(const VkDescriptorSetLayoutCreateInfo *p_create_info) {
+    return descriptor_set_layout_dict.look_up(DescriptorSetLayoutDef(p_create_info));
+}
+
+// Construct DescriptorSetLayout instance from given create info
+// Proactively reserve and resize as possible, as the reallocation was visible in profiling
+cvdescriptorset::DescriptorSetLayoutDef::DescriptorSetLayoutDef(const VkDescriptorSetLayoutCreateInfo *p_create_info)
+    : flags_(p_create_info->flags), binding_count_(0), descriptor_count_(0), dynamic_descriptor_count_(0) {
+    const auto *flags_create_info = lvl_find_in_chain<VkDescriptorSetLayoutBindingFlagsCreateInfoEXT>(p_create_info->pNext);
+
+    binding_type_stats_ = {0, 0, 0};
+    std::set<ExtendedBinding, BindingNumCmp> sorted_bindings;
+    const uint32_t input_bindings_count = p_create_info->bindingCount;
+    // Sort the input bindings in binding number order, eliminating duplicates
+    for (uint32_t i = 0; i < input_bindings_count; i++) {
+        VkDescriptorBindingFlagsEXT flags = 0;
+        if (flags_create_info && flags_create_info->bindingCount == p_create_info->bindingCount) {
+            flags = flags_create_info->pBindingFlags[i];
+        }
+        sorted_bindings.insert(ExtendedBinding(p_create_info->pBindings + i, flags));
+    }
+
+    // Store the create info in the sorted order from above
+    std::map<uint32_t, uint32_t> binding_to_dyn_count;
+    uint32_t index = 0;
+    binding_count_ = static_cast<uint32_t>(sorted_bindings.size());
+    bindings_.reserve(binding_count_);
+    binding_flags_.reserve(binding_count_);
+    binding_to_index_map_.reserve(binding_count_);
+    for (auto input_binding : sorted_bindings) {
+        // Add to binding and map, s.t. it is robust to invalid duplication of binding_num
+        const auto binding_num = input_binding.layout_binding->binding;
+        binding_to_index_map_[binding_num] = index++;
+        bindings_.emplace_back(input_binding.layout_binding);
+        auto &binding_info = bindings_.back();
+        binding_flags_.emplace_back(input_binding.binding_flags);
+
+        descriptor_count_ += binding_info.descriptorCount;
+        if (binding_info.descriptorCount > 0) {
+            non_empty_bindings_.insert(binding_num);
+        }
+
+        if (binding_info.descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC ||
+            binding_info.descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC) {
+            binding_to_dyn_count[binding_num] = binding_info.descriptorCount;
+            dynamic_descriptor_count_ += binding_info.descriptorCount;
+            binding_type_stats_.dynamic_buffer_count++;
+        } else if ((binding_info.descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER) ||
+                   (binding_info.descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER)) {
+            binding_type_stats_.non_dynamic_buffer_count++;
+        } else {
+            binding_type_stats_.image_sampler_count++;
+        }
+    }
+    assert(bindings_.size() == binding_count_);
+    assert(binding_flags_.size() == binding_count_);
+    uint32_t global_index = 0;
+    global_index_range_.reserve(binding_count_);
+    // Vector order is finalized so build vectors of descriptors and dynamic offsets by binding index
+    for (uint32_t i = 0; i < binding_count_; ++i) {
+        auto final_index = global_index + bindings_[i].descriptorCount;
+        global_index_range_.emplace_back(global_index, final_index);
+        global_index = final_index;
+    }
+
+    // Now create dyn offset array mapping for any dynamic descriptors
+    uint32_t dyn_array_idx = 0;
+    binding_to_dynamic_array_idx_map_.reserve(binding_to_dyn_count.size());
+    for (const auto &bc_pair : binding_to_dyn_count) {
+        binding_to_dynamic_array_idx_map_[bc_pair.first] = dyn_array_idx;
+        dyn_array_idx += bc_pair.second;
+    }
+}
+
+size_t cvdescriptorset::DescriptorSetLayoutDef::hash() const {
+    hash_util::HashCombiner hc;
+    hc << flags_;
+    hc.Combine(bindings_);
+    hc.Combine(binding_flags_);
+    return hc.Value();
+}
+//
+
+// Return valid index or "end" i.e. binding_count_;
+// The asserts in "Get" are reduced to the set where no valid answer(like null or 0) could be given
+// Common code for all binding lookups.
+uint32_t cvdescriptorset::DescriptorSetLayoutDef::GetIndexFromBinding(uint32_t binding) const {
+    const auto &bi_itr = binding_to_index_map_.find(binding);
+    if (bi_itr != binding_to_index_map_.cend()) return bi_itr->second;
+    return GetBindingCount();
+}
+VkDescriptorSetLayoutBinding const *cvdescriptorset::DescriptorSetLayoutDef::GetDescriptorSetLayoutBindingPtrFromIndex(
+    const uint32_t index) const {
+    if (index >= bindings_.size()) return nullptr;
+    return bindings_[index].ptr();
+}
+// Return descriptorCount for given index, 0 if index is unavailable
+uint32_t cvdescriptorset::DescriptorSetLayoutDef::GetDescriptorCountFromIndex(const uint32_t index) const {
+    if (index >= bindings_.size()) return 0;
+    return bindings_[index].descriptorCount;
+}
+// For the given index, return descriptorType
+VkDescriptorType cvdescriptorset::DescriptorSetLayoutDef::GetTypeFromIndex(const uint32_t index) const {
+    assert(index < bindings_.size());
+    if (index < bindings_.size()) return bindings_[index].descriptorType;
+    return VK_DESCRIPTOR_TYPE_MAX_ENUM;
+}
+// For the given index, return stageFlags
+VkShaderStageFlags cvdescriptorset::DescriptorSetLayoutDef::GetStageFlagsFromIndex(const uint32_t index) const {
+    assert(index < bindings_.size());
+    if (index < bindings_.size()) return bindings_[index].stageFlags;
+    return VkShaderStageFlags(0);
+}
+// Return binding flags for given index, 0 if index is unavailable
+VkDescriptorBindingFlagsEXT cvdescriptorset::DescriptorSetLayoutDef::GetDescriptorBindingFlagsFromIndex(
+    const uint32_t index) const {
+    if (index >= binding_flags_.size()) return 0;
+    return binding_flags_[index];
+}
+
+const cvdescriptorset::IndexRange &cvdescriptorset::DescriptorSetLayoutDef::GetGlobalIndexRangeFromIndex(uint32_t index) const {
+    const static IndexRange kInvalidRange = {0xFFFFFFFF, 0xFFFFFFFF};
+    if (index >= binding_flags_.size()) return kInvalidRange;
+    return global_index_range_[index];
+}
+
+// For the given binding, return the global index range (half open)
+// As start and end are often needed in pairs, get both with a single lookup.
+const cvdescriptorset::IndexRange &cvdescriptorset::DescriptorSetLayoutDef::GetGlobalIndexRangeFromBinding(
+    const uint32_t binding) const {
+    uint32_t index = GetIndexFromBinding(binding);
+    return GetGlobalIndexRangeFromIndex(index);
+}
+
+// For given binding, return ptr to ImmutableSampler array
+VkSampler const *cvdescriptorset::DescriptorSetLayoutDef::GetImmutableSamplerPtrFromBinding(const uint32_t binding) const {
+    const auto &bi_itr = binding_to_index_map_.find(binding);
+    if (bi_itr != binding_to_index_map_.end()) {
+        return bindings_[bi_itr->second].pImmutableSamplers;
+    }
+    return nullptr;
+}
+// Move to next valid binding having a non-zero binding count
+uint32_t cvdescriptorset::DescriptorSetLayoutDef::GetNextValidBinding(const uint32_t binding) const {
+    auto it = non_empty_bindings_.upper_bound(binding);
+    assert(it != non_empty_bindings_.cend());
+    if (it != non_empty_bindings_.cend()) return *it;
+    return GetMaxBinding() + 1;
+}
+// For given index, return ptr to ImmutableSampler array
+VkSampler const *cvdescriptorset::DescriptorSetLayoutDef::GetImmutableSamplerPtrFromIndex(const uint32_t index) const {
+    if (index < bindings_.size()) {
+        return bindings_[index].pImmutableSamplers;
+    }
+    return nullptr;
+}
+
+// If our layout is compatible with rh_ds_layout, return true.
+bool cvdescriptorset::DescriptorSetLayout::IsCompatible(DescriptorSetLayout const *rh_ds_layout) const {
+    bool compatible = (this == rh_ds_layout) || (GetLayoutDef() == rh_ds_layout->GetLayoutDef());
+    return compatible;
+}
+
+// TODO: Find a way to add smarts to the autogenerated version of this
+static std::string smart_string_VkShaderStageFlags(VkShaderStageFlags stage_flags) {
+    if (stage_flags == VK_SHADER_STAGE_ALL) {
+        return string_VkShaderStageFlagBits(VK_SHADER_STAGE_ALL);
+    }
+
+    return string_VkShaderStageFlags(stage_flags);
+}
+
+// If our layout is compatible with bound_dsl, return true,
+//  else return false and fill in error_msg will description of what causes incompatibility
+bool cvdescriptorset::VerifySetLayoutCompatibility(const debug_report_data *report_data, DescriptorSetLayout const *layout_dsl,
+                                                   DescriptorSetLayout const *bound_dsl, std::string *error_msg) {
+    // Short circuit the detailed check.
+    if (layout_dsl->IsCompatible(bound_dsl)) return true;
+
+    // Do a detailed compatibility check of this lhs def (referenced by layout_dsl), vs. the rhs (layout and def)
+    // Should only be run if trivial accept has failed, and in that context should return false.
+    VkDescriptorSetLayout layout_dsl_handle = layout_dsl->GetDescriptorSetLayout();
+    VkDescriptorSetLayout bound_dsl_handle = bound_dsl->GetDescriptorSetLayout();
+    DescriptorSetLayoutDef const *layout_ds_layout_def = layout_dsl->GetLayoutDef();
+    DescriptorSetLayoutDef const *bound_ds_layout_def = bound_dsl->GetLayoutDef();
+
+    // Check descriptor counts
+    const auto bound_total_count = bound_ds_layout_def->GetTotalDescriptorCount();
+    if (layout_ds_layout_def->GetTotalDescriptorCount() != bound_ds_layout_def->GetTotalDescriptorCount()) {
+        std::stringstream error_str;
+        error_str << report_data->FormatHandle(layout_dsl_handle) << " from pipeline layout has "
+                  << layout_ds_layout_def->GetTotalDescriptorCount() << " total descriptors, but "
+                  << report_data->FormatHandle(bound_dsl_handle) << ", which is bound, has " << bound_total_count
+                  << " total descriptors.";
+        *error_msg = error_str.str();
+        return false;  // trivial fail case
+    }
+
+    // Descriptor counts match so need to go through bindings one-by-one
+    //  and verify that type and stageFlags match
+    for (const auto &layout_binding : layout_ds_layout_def->GetBindings()) {
+        // TODO : Do we also need to check immutable samplers?
+        const auto bound_binding = bound_ds_layout_def->GetBindingInfoFromBinding(layout_binding.binding);
+        if (layout_binding.descriptorCount != bound_binding->descriptorCount) {
+            std::stringstream error_str;
+            error_str << "Binding " << layout_binding.binding << " for " << report_data->FormatHandle(layout_dsl_handle)
+                      << " from pipeline layout has a descriptorCount of " << layout_binding.descriptorCount << " but binding "
+                      << layout_binding.binding << " for " << report_data->FormatHandle(bound_dsl_handle)
+                      << ", which is bound, has a descriptorCount of " << bound_binding->descriptorCount;
+            *error_msg = error_str.str();
+            return false;
+        } else if (layout_binding.descriptorType != bound_binding->descriptorType) {
+            std::stringstream error_str;
+            error_str << "Binding " << layout_binding.binding << " for " << report_data->FormatHandle(layout_dsl_handle)
+                      << " from pipeline layout is type '" << string_VkDescriptorType(layout_binding.descriptorType)
+                      << "' but binding " << layout_binding.binding << " for " << report_data->FormatHandle(bound_dsl_handle)
+                      << ", which is bound, is type '" << string_VkDescriptorType(bound_binding->descriptorType) << "'";
+            *error_msg = error_str.str();
+            return false;
+        } else if (layout_binding.stageFlags != bound_binding->stageFlags) {
+            std::stringstream error_str;
+            error_str << "Binding " << layout_binding.binding << " for " << report_data->FormatHandle(layout_dsl_handle)
+                      << " from pipeline layout has stageFlags " << smart_string_VkShaderStageFlags(layout_binding.stageFlags)
+                      << " but binding " << layout_binding.binding << " for " << report_data->FormatHandle(bound_dsl_handle)
+                      << ", which is bound, has stageFlags " << smart_string_VkShaderStageFlags(bound_binding->stageFlags);
+            *error_msg = error_str.str();
+            return false;
+        }
+    }
+
+    const auto &ds_layout_flags = layout_ds_layout_def->GetBindingFlags();
+    const auto &bound_layout_flags = bound_ds_layout_def->GetBindingFlags();
+    if (bound_layout_flags != ds_layout_flags) {
+        std::stringstream error_str;
+        assert(ds_layout_flags.size() == bound_layout_flags.size());
+        size_t i;
+        for (i = 0; i < ds_layout_flags.size(); i++) {
+            if (ds_layout_flags[i] != bound_layout_flags[i]) break;
+        }
+        error_str << report_data->FormatHandle(layout_dsl_handle)
+                  << " from pipeline layout does not have the same binding flags at binding " << i << " ( "
+                  << string_VkDescriptorBindingFlagsEXT(ds_layout_flags[i]) << " ) as "
+                  << report_data->FormatHandle(bound_dsl_handle) << " ( "
+                  << string_VkDescriptorBindingFlagsEXT(bound_layout_flags[i]) << " ), which is bound";
+        *error_msg = error_str.str();
+        return false;
+    }
+
+    // No detailed check should succeed if the trivial check failed -- or the dictionary has failed somehow.
+    bool compatible = true;
+    assert(!compatible);
+    return compatible;
+}
+
+bool cvdescriptorset::DescriptorSetLayoutDef::IsNextBindingConsistent(const uint32_t binding) const {
+    if (!binding_to_index_map_.count(binding + 1)) return false;
+    auto const &bi_itr = binding_to_index_map_.find(binding);
+    if (bi_itr != binding_to_index_map_.end()) {
+        const auto &next_bi_itr = binding_to_index_map_.find(binding + 1);
+        if (next_bi_itr != binding_to_index_map_.end()) {
+            auto type = bindings_[bi_itr->second].descriptorType;
+            auto stage_flags = bindings_[bi_itr->second].stageFlags;
+            auto immut_samp = bindings_[bi_itr->second].pImmutableSamplers ? true : false;
+            auto flags = binding_flags_[bi_itr->second];
+            if ((type != bindings_[next_bi_itr->second].descriptorType) ||
+                (stage_flags != bindings_[next_bi_itr->second].stageFlags) ||
+                (immut_samp != (bindings_[next_bi_itr->second].pImmutableSamplers ? true : false)) ||
+                (flags != binding_flags_[next_bi_itr->second])) {
+                return false;
+            }
+            return true;
+        }
+    }
+    return false;
+}
+
+// The DescriptorSetLayout stores the per handle data for a descriptor set layout, and references the common defintion for the
+// handle invariant portion
+cvdescriptorset::DescriptorSetLayout::DescriptorSetLayout(const VkDescriptorSetLayoutCreateInfo *p_create_info,
+                                                          const VkDescriptorSetLayout layout)
+    : layout_(layout), layout_id_(GetCanonicalId(p_create_info)) {}
+
+// Validate descriptor set layout create info
+bool cvdescriptorset::ValidateDescriptorSetLayoutCreateInfo(
+    const debug_report_data *report_data, const VkDescriptorSetLayoutCreateInfo *create_info, const bool push_descriptor_ext,
+    const uint32_t max_push_descriptors, const bool descriptor_indexing_ext,
+    const VkPhysicalDeviceDescriptorIndexingFeaturesEXT *descriptor_indexing_features,
+    const VkPhysicalDeviceInlineUniformBlockFeaturesEXT *inline_uniform_block_features,
+    const VkPhysicalDeviceInlineUniformBlockPropertiesEXT *inline_uniform_block_props, const DeviceExtensions *device_extensions) {
+    bool skip = false;
+    std::unordered_set<uint32_t> bindings;
+    uint64_t total_descriptors = 0;
+
+    const auto *flags_create_info = lvl_find_in_chain<VkDescriptorSetLayoutBindingFlagsCreateInfoEXT>(create_info->pNext);
+
+    const bool push_descriptor_set = !!(create_info->flags & VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR);
+    if (push_descriptor_set && !push_descriptor_ext) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                        kVUID_Core_DrawState_ExtensionNotEnabled,
+                        "Attempted to use %s in %s but its required extension %s has not been enabled.\n",
+                        "VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR", "VkDescriptorSetLayoutCreateInfo::flags",
+                        VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME);
+    }
+
+    const bool update_after_bind_set = !!(create_info->flags & VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT);
+    if (update_after_bind_set && !descriptor_indexing_ext) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                        kVUID_Core_DrawState_ExtensionNotEnabled,
+                        "Attemped to use %s in %s but its required extension %s has not been enabled.\n",
+                        "VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT", "VkDescriptorSetLayoutCreateInfo::flags",
+                        VK_EXT_DESCRIPTOR_INDEXING_EXTENSION_NAME);
+    }
+
+    auto valid_type = [push_descriptor_set](const VkDescriptorType type) {
+        return !push_descriptor_set ||
+               ((type != VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC) && (type != VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC) &&
+                (type != VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT));
+    };
+
+    uint32_t max_binding = 0;
+
+    for (uint32_t i = 0; i < create_info->bindingCount; ++i) {
+        const auto &binding_info = create_info->pBindings[i];
+        max_binding = std::max(max_binding, binding_info.binding);
+
+        if (!bindings.insert(binding_info.binding).second) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                            "VUID-VkDescriptorSetLayoutCreateInfo-binding-00279",
+                            "duplicated binding number in VkDescriptorSetLayoutBinding.");
+        }
+        if (!valid_type(binding_info.descriptorType)) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                            (binding_info.descriptorType == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT)
+                                ? "VUID-VkDescriptorSetLayoutCreateInfo-flags-02208"
+                                : "VUID-VkDescriptorSetLayoutCreateInfo-flags-00280",
+                            "invalid type %s ,for push descriptors in VkDescriptorSetLayoutBinding entry %" PRIu32 ".",
+                            string_VkDescriptorType(binding_info.descriptorType), i);
+        }
+
+        if (binding_info.descriptorType == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT) {
+            if (!device_extensions->vk_ext_inline_uniform_block) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, 0,
+                                "UNASSIGNED-Extension not enabled",
+                                "Creating VkDescriptorSetLayout with descriptor type  VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT "
+                                "but extension %s is missing",
+                                VK_EXT_INLINE_UNIFORM_BLOCK_EXTENSION_NAME);
+            } else {
+                if ((binding_info.descriptorCount % 4) != 0) {
+                    skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                    "VUID-VkDescriptorSetLayoutBinding-descriptorType-02209",
+                                    "descriptorCount =(%" PRIu32 ") must be a multiple of 4", binding_info.descriptorCount);
+                }
+                if (binding_info.descriptorCount > inline_uniform_block_props->maxInlineUniformBlockSize) {
+                    skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                    "VUID-VkDescriptorSetLayoutBinding-descriptorType-02210",
+                                    "descriptorCount =(%" PRIu32 ") must be less than or equal to maxInlineUniformBlockSize",
+                                    binding_info.descriptorCount);
+                }
+            }
+        }
+
+        total_descriptors += binding_info.descriptorCount;
+    }
+
+    if (flags_create_info) {
+        if (flags_create_info->bindingCount != 0 && flags_create_info->bindingCount != create_info->bindingCount) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                            "VUID-VkDescriptorSetLayoutBindingFlagsCreateInfoEXT-bindingCount-03002",
+                            "VkDescriptorSetLayoutCreateInfo::bindingCount (%d) != "
+                            "VkDescriptorSetLayoutBindingFlagsCreateInfoEXT::bindingCount (%d)",
+                            create_info->bindingCount, flags_create_info->bindingCount);
+        }
+
+        if (flags_create_info->bindingCount == create_info->bindingCount) {
+            for (uint32_t i = 0; i < create_info->bindingCount; ++i) {
+                const auto &binding_info = create_info->pBindings[i];
+
+                if (flags_create_info->pBindingFlags[i] & VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT) {
+                    if (!update_after_bind_set) {
+                        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                        "VUID-VkDescriptorSetLayoutCreateInfo-flags-03000",
+                                        "Invalid flags for VkDescriptorSetLayoutBinding entry %" PRIu32, i);
+                    }
+
+                    if (binding_info.descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER &&
+                        !descriptor_indexing_features->descriptorBindingUniformBufferUpdateAfterBind) {
+                        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                        "VUID-VkDescriptorSetLayoutBindingFlagsCreateInfoEXT-"
+                                        "descriptorBindingUniformBufferUpdateAfterBind-03005",
+                                        "Invalid flags for VkDescriptorSetLayoutBinding entry %" PRIu32, i);
+                    }
+                    if ((binding_info.descriptorType == VK_DESCRIPTOR_TYPE_SAMPLER ||
+                         binding_info.descriptorType == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER ||
+                         binding_info.descriptorType == VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE) &&
+                        !descriptor_indexing_features->descriptorBindingSampledImageUpdateAfterBind) {
+                        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                        "VUID-VkDescriptorSetLayoutBindingFlagsCreateInfoEXT-"
+                                        "descriptorBindingSampledImageUpdateAfterBind-03006",
+                                        "Invalid flags for VkDescriptorSetLayoutBinding entry %" PRIu32, i);
+                    }
+                    if (binding_info.descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_IMAGE &&
+                        !descriptor_indexing_features->descriptorBindingStorageImageUpdateAfterBind) {
+                        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                        "VUID-VkDescriptorSetLayoutBindingFlagsCreateInfoEXT-"
+                                        "descriptorBindingStorageImageUpdateAfterBind-03007",
+                                        "Invalid flags for VkDescriptorSetLayoutBinding entry %" PRIu32, i);
+                    }
+                    if (binding_info.descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER &&
+                        !descriptor_indexing_features->descriptorBindingStorageBufferUpdateAfterBind) {
+                        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                        "VUID-VkDescriptorSetLayoutBindingFlagsCreateInfoEXT-"
+                                        "descriptorBindingStorageBufferUpdateAfterBind-03008",
+                                        "Invalid flags for VkDescriptorSetLayoutBinding entry %" PRIu32, i);
+                    }
+                    if (binding_info.descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER &&
+                        !descriptor_indexing_features->descriptorBindingUniformTexelBufferUpdateAfterBind) {
+                        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                        "VUID-VkDescriptorSetLayoutBindingFlagsCreateInfoEXT-"
+                                        "descriptorBindingUniformTexelBufferUpdateAfterBind-03009",
+                                        "Invalid flags for VkDescriptorSetLayoutBinding entry %" PRIu32, i);
+                    }
+                    if (binding_info.descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER &&
+                        !descriptor_indexing_features->descriptorBindingStorageTexelBufferUpdateAfterBind) {
+                        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                        "VUID-VkDescriptorSetLayoutBindingFlagsCreateInfoEXT-"
+                                        "descriptorBindingStorageTexelBufferUpdateAfterBind-03010",
+                                        "Invalid flags for VkDescriptorSetLayoutBinding entry %" PRIu32, i);
+                    }
+                    if ((binding_info.descriptorType == VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT ||
+                         binding_info.descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC ||
+                         binding_info.descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC)) {
+                        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                        "VUID-VkDescriptorSetLayoutBindingFlagsCreateInfoEXT-None-03011",
+                                        "Invalid flags for VkDescriptorSetLayoutBinding entry %" PRIu32, i);
+                    }
+
+                    if (binding_info.descriptorType == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT &&
+                        !inline_uniform_block_features->descriptorBindingInlineUniformBlockUpdateAfterBind) {
+                        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                        "VUID-VkDescriptorSetLayoutBindingFlagsCreateInfoEXT-"
+                                        "descriptorBindingInlineUniformBlockUpdateAfterBind-02211",
+                                        "Invalid flags (VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT) for "
+                                        "VkDescriptorSetLayoutBinding entry %" PRIu32
+                                        " with descriptorBindingInlineUniformBlockUpdateAfterBind not enabled",
+                                        i);
+                    }
+                }
+
+                if (flags_create_info->pBindingFlags[i] & VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT_EXT) {
+                    if (!descriptor_indexing_features->descriptorBindingUpdateUnusedWhilePending) {
+                        skip |= log_msg(
+                            report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                            "VUID-VkDescriptorSetLayoutBindingFlagsCreateInfoEXT-descriptorBindingUpdateUnusedWhilePending-03012",
+                            "Invalid flags for VkDescriptorSetLayoutBinding entry %" PRIu32, i);
+                    }
+                }
+
+                if (flags_create_info->pBindingFlags[i] & VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT_EXT) {
+                    if (!descriptor_indexing_features->descriptorBindingPartiallyBound) {
+                        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                        "VUID-VkDescriptorSetLayoutBindingFlagsCreateInfoEXT-descriptorBindingPartiallyBound-03013",
+                                        "Invalid flags for VkDescriptorSetLayoutBinding entry %" PRIu32, i);
+                    }
+                }
+
+                if (flags_create_info->pBindingFlags[i] & VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT_EXT) {
+                    if (binding_info.binding != max_binding) {
+                        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                        "VUID-VkDescriptorSetLayoutBindingFlagsCreateInfoEXT-pBindingFlags-03004",
+                                        "Invalid flags for VkDescriptorSetLayoutBinding entry %" PRIu32, i);
+                    }
+
+                    if (!descriptor_indexing_features->descriptorBindingVariableDescriptorCount) {
+                        skip |= log_msg(
+                            report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                            "VUID-VkDescriptorSetLayoutBindingFlagsCreateInfoEXT-descriptorBindingVariableDescriptorCount-03014",
+                            "Invalid flags for VkDescriptorSetLayoutBinding entry %" PRIu32, i);
+                    }
+                    if ((binding_info.descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC ||
+                         binding_info.descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC)) {
+                        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                        "VUID-VkDescriptorSetLayoutBindingFlagsCreateInfoEXT-pBindingFlags-03015",
+                                        "Invalid flags for VkDescriptorSetLayoutBinding entry %" PRIu32, i);
+                    }
+                }
+
+                if (push_descriptor_set &&
+                    (flags_create_info->pBindingFlags[i] &
+                     (VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT | VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT_EXT |
+                      VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT_EXT))) {
+                    skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                    "VUID-VkDescriptorSetLayoutBindingFlagsCreateInfoEXT-flags-03003",
+                                    "Invalid flags for VkDescriptorSetLayoutBinding entry %" PRIu32, i);
+                }
+            }
+        }
+    }
+
+    if ((push_descriptor_set) && (total_descriptors > max_push_descriptors)) {
+        const char *undefined = push_descriptor_ext ? "" : " -- undefined";
+        skip |=
+            log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                    "VUID-VkDescriptorSetLayoutCreateInfo-flags-00281",
+                    "for push descriptor, total descriptor count in layout (%" PRIu64
+                    ") must not be greater than VkPhysicalDevicePushDescriptorPropertiesKHR::maxPushDescriptors (%" PRIu32 "%s).",
+                    total_descriptors, max_push_descriptors, undefined);
+    }
+
+    return skip;
+}
+
+cvdescriptorset::AllocateDescriptorSetsData::AllocateDescriptorSetsData(uint32_t count)
+    : required_descriptors_by_type{}, layout_nodes(count, nullptr) {}
+
+cvdescriptorset::DescriptorSet::DescriptorSet(const VkDescriptorSet set, DESCRIPTOR_POOL_STATE *pool_state,
+                                              const std::shared_ptr<DescriptorSetLayout const> &layout, uint32_t variable_count,
+                                              cvdescriptorset::DescriptorSet::StateTracker *state_data,
+                                              const debug_report_data *report_data)
+    : some_update_(false),
+      set_(set),
+      pool_state_(pool_state),
+      p_layout_(layout),
+      report_data_(report_data),
+      variable_count_(variable_count),
+      change_count_(0) {
+    // Foreach binding, create default descriptors of given type
+    descriptors_.reserve(p_layout_->GetTotalDescriptorCount());
+    for (uint32_t i = 0; i < p_layout_->GetBindingCount(); ++i) {
+        auto type = p_layout_->GetTypeFromIndex(i);
+        switch (type) {
+            case VK_DESCRIPTOR_TYPE_SAMPLER: {
+                auto immut_sampler = p_layout_->GetImmutableSamplerPtrFromIndex(i);
+                for (uint32_t di = 0; di < p_layout_->GetDescriptorCountFromIndex(i); ++di) {
+                    if (immut_sampler) {
+                        descriptors_.emplace_back(new SamplerDescriptor(state_data, immut_sampler + di));
+                        some_update_ = true;  // Immutable samplers are updated at creation
+                    } else
+                        descriptors_.emplace_back(new SamplerDescriptor(state_data, nullptr));
+                }
+                break;
+            }
+            case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER: {
+                auto immut = p_layout_->GetImmutableSamplerPtrFromIndex(i);
+                for (uint32_t di = 0; di < p_layout_->GetDescriptorCountFromIndex(i); ++di) {
+                    if (immut) {
+                        descriptors_.emplace_back(new ImageSamplerDescriptor(state_data, immut + di));
+                        some_update_ = true;  // Immutable samplers are updated at creation
+                    } else
+                        descriptors_.emplace_back(new ImageSamplerDescriptor(state_data, nullptr));
+                }
+                break;
+            }
+            // ImageDescriptors
+            case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
+            case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
+            case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
+                for (uint32_t di = 0; di < p_layout_->GetDescriptorCountFromIndex(i); ++di)
+                    descriptors_.emplace_back(new ImageDescriptor(type));
+                break;
+            case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
+            case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
+                for (uint32_t di = 0; di < p_layout_->GetDescriptorCountFromIndex(i); ++di)
+                    descriptors_.emplace_back(new TexelDescriptor(type));
+                break;
+            case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
+            case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
+            case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
+            case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
+                for (uint32_t di = 0; di < p_layout_->GetDescriptorCountFromIndex(i); ++di)
+                    descriptors_.emplace_back(new BufferDescriptor(type));
+                break;
+            case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT:
+                for (uint32_t di = 0; di < p_layout_->GetDescriptorCountFromIndex(i); ++di)
+                    descriptors_.emplace_back(new InlineUniformDescriptor(type));
+                break;
+            case VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV:
+                for (uint32_t di = 0; di < p_layout_->GetDescriptorCountFromIndex(i); ++di)
+                    descriptors_.emplace_back(new AccelerationStructureDescriptor(type));
+                break;
+            default:
+                assert(0);  // Bad descriptor type specified
+                break;
+        }
+    }
+}
+
+cvdescriptorset::DescriptorSet::~DescriptorSet() {}
+
+static std::string StringDescriptorReqViewType(descriptor_req req) {
+    std::string result("");
+    for (unsigned i = 0; i <= VK_IMAGE_VIEW_TYPE_END_RANGE; i++) {
+        if (req & (1 << i)) {
+            if (result.size()) result += ", ";
+            result += string_VkImageViewType(VkImageViewType(i));
+        }
+    }
+
+    if (!result.size()) result = "(none)";
+
+    return result;
+}
+
+static char const *StringDescriptorReqComponentType(descriptor_req req) {
+    if (req & DESCRIPTOR_REQ_COMPONENT_TYPE_SINT) return "SINT";
+    if (req & DESCRIPTOR_REQ_COMPONENT_TYPE_UINT) return "UINT";
+    if (req & DESCRIPTOR_REQ_COMPONENT_TYPE_FLOAT) return "FLOAT";
+    return "(none)";
+}
+
+unsigned DescriptorRequirementsBitsFromFormat(VkFormat fmt) {
+    if (FormatIsSInt(fmt)) return DESCRIPTOR_REQ_COMPONENT_TYPE_SINT;
+    if (FormatIsUInt(fmt)) return DESCRIPTOR_REQ_COMPONENT_TYPE_UINT;
+    if (FormatIsDepthAndStencil(fmt)) return DESCRIPTOR_REQ_COMPONENT_TYPE_FLOAT | DESCRIPTOR_REQ_COMPONENT_TYPE_UINT;
+    if (fmt == VK_FORMAT_UNDEFINED) return 0;
+    // everything else -- UNORM/SNORM/FLOAT/USCALED/SSCALED is all float in the shader.
+    return DESCRIPTOR_REQ_COMPONENT_TYPE_FLOAT;
+}
+
+// Validate that the state of this set is appropriate for the given bindings and dynamic_offsets at Draw time
+//  This includes validating that all descriptors in the given bindings are updated,
+//  that any update buffers are valid, and that any dynamic offsets are within the bounds of their buffers.
+// Return true if state is acceptable, or false and write an error message into error string
+bool CoreChecks::ValidateDrawState(const DescriptorSet *descriptor_set, const std::map<uint32_t, descriptor_req> &bindings,
+                                   const std::vector<uint32_t> &dynamic_offsets, const CMD_BUFFER_STATE *cb_node,
+                                   const char *caller, std::string *error) const {
+    for (auto binding_pair : bindings) {
+        auto binding = binding_pair.first;
+        DescriptorSetLayout::ConstBindingIterator binding_it(descriptor_set->GetLayout().get(), binding);
+        if (binding_it.AtEnd()) {  //  End at construction is the condition for an invalid binding.
+            std::stringstream error_str;
+            error_str << "Attempting to validate DrawState for binding #" << binding
+                      << " which is an invalid binding for this descriptor set.";
+            *error = error_str.str();
+            return false;
+        }
+
+        if (binding_it.GetDescriptorBindingFlags() &
+            (VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT_EXT | VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT)) {
+            // Can't validate the descriptor because it may not have been updated,
+            // or the view could have been destroyed
+            continue;
+        }
+        if (!ValidateDescriptorSetBindingData(cb_node, descriptor_set, dynamic_offsets, binding, binding_pair.second, caller,
+                                              error))
+            return false;
+    }
+    return true;
+}
+
+bool CoreChecks::ValidateDescriptorSetBindingData(const CMD_BUFFER_STATE *cb_node, const DescriptorSet *descriptor_set,
+                                                  const std::vector<uint32_t> &dynamic_offsets, uint32_t binding,
+                                                  descriptor_req reqs, const char *caller, std::string *error) const {
+    using DescriptorClass = cvdescriptorset::DescriptorClass;
+    using BufferDescriptor = cvdescriptorset::BufferDescriptor;
+    using ImageDescriptor = cvdescriptorset::ImageDescriptor;
+    using ImageSamplerDescriptor = cvdescriptorset::ImageSamplerDescriptor;
+    using SamplerDescriptor = cvdescriptorset::SamplerDescriptor;
+    using TexelDescriptor = cvdescriptorset::TexelDescriptor;
+    DescriptorSetLayout::ConstBindingIterator binding_it(descriptor_set->GetLayout().get(), binding);
+    {
+        // Copy the range, the end range is subject to update based on variable length descriptor arrays.
+        cvdescriptorset::IndexRange index_range = binding_it.GetGlobalIndexRange();
+        auto array_idx = 0;  // Track array idx if we're dealing with array descriptors
+
+        if (binding_it.IsVariableDescriptorCount()) {
+            // Only validate the first N descriptors if it uses variable_count
+            index_range.end = index_range.start + descriptor_set->GetVariableDescriptorCount();
+        }
+
+        for (uint32_t i = index_range.start; i < index_range.end; ++i, ++array_idx) {
+            uint32_t index = i - index_range.start;
+            const auto *descriptor = descriptor_set->GetDescriptorFromGlobalIndex(i);
+
+            if (descriptor->GetClass() == DescriptorClass::InlineUniform) {
+                // Can't validate the descriptor because it may not have been updated.
+                continue;
+            } else if (!descriptor->updated) {
+                std::stringstream error_str;
+                error_str << "Descriptor in binding #" << binding << " index " << index
+                          << " is being used in draw but has never been updated via vkUpdateDescriptorSets() or a similar call.";
+                *error = error_str.str();
+                return false;
+            } else {
+                auto descriptor_class = descriptor->GetClass();
+                if (descriptor_class == DescriptorClass::GeneralBuffer) {
+                    // Verify that buffers are valid
+                    auto buffer = static_cast<const BufferDescriptor *>(descriptor)->GetBuffer();
+                    auto buffer_node = static_cast<const BufferDescriptor *>(descriptor)->GetBufferState();
+                    if (!buffer_node || buffer_node->destroyed) {
+                        std::stringstream error_str;
+                        error_str << "Descriptor in binding #" << binding << " index " << index << " is using buffer "
+                                  << report_data->FormatHandle(buffer).c_str() << " that is invalid or has been destroyed.";
+                        *error = error_str.str();
+                        return false;
+                    } else if (!buffer_node->sparse) {
+                        for (auto mem_binding : buffer_node->GetBoundMemory()) {
+                            if (!GetDevMemState(mem_binding)) {
+                                std::stringstream error_str;
+                                error_str << "Descriptor in binding #" << binding << " index " << index << " uses buffer " << buffer
+                                          << " that references invalid memory " << mem_binding << ".";
+                                *error = error_str.str();
+                                return false;
+                            }
+                        }
+                    }
+                    if (descriptor->IsDynamic()) {
+                        // Validate that dynamic offsets are within the buffer
+                        auto buffer_size = buffer_node->createInfo.size;
+                        auto range = static_cast<const BufferDescriptor *>(descriptor)->GetRange();
+                        auto desc_offset = static_cast<const BufferDescriptor *>(descriptor)->GetOffset();
+                        auto dyn_offset = dynamic_offsets[binding_it.GetDynamicOffsetIndex() + array_idx];
+                        if (VK_WHOLE_SIZE == range) {
+                            if ((dyn_offset + desc_offset) > buffer_size) {
+                                std::stringstream error_str;
+                                error_str << "Dynamic descriptor in binding #" << binding << " index " << index << " uses buffer "
+                                          << buffer << " with update range of VK_WHOLE_SIZE has dynamic offset " << dyn_offset
+                                          << " combined with offset " << desc_offset << " that oversteps the buffer size of "
+                                          << buffer_size << ".";
+                                *error = error_str.str();
+                                return false;
+                            }
+                        } else {
+                            if ((dyn_offset + desc_offset + range) > buffer_size) {
+                                std::stringstream error_str;
+                                error_str << "Dynamic descriptor in binding #" << binding << " index " << index << " uses buffer "
+                                          << buffer << " with dynamic offset " << dyn_offset << " combined with offset "
+                                          << desc_offset << " and range " << range << " that oversteps the buffer size of "
+                                          << buffer_size << ".";
+                                *error = error_str.str();
+                                return false;
+                            }
+                        }
+                    }
+                } else if (descriptor_class == DescriptorClass::ImageSampler || descriptor_class == DescriptorClass::Image) {
+                    VkImageView image_view;
+                    VkImageLayout image_layout;
+                    const IMAGE_VIEW_STATE *image_view_state;
+                    if (descriptor_class == DescriptorClass::ImageSampler) {
+                        image_view = static_cast<const ImageSamplerDescriptor *>(descriptor)->GetImageView();
+                        image_view_state = static_cast<const ImageSamplerDescriptor *>(descriptor)->GetImageViewState();
+                        image_layout = static_cast<const ImageSamplerDescriptor *>(descriptor)->GetImageLayout();
+                    } else {
+                        image_view = static_cast<const ImageDescriptor *>(descriptor)->GetImageView();
+                        image_view_state = static_cast<const ImageDescriptor *>(descriptor)->GetImageViewState();
+                        image_layout = static_cast<const ImageDescriptor *>(descriptor)->GetImageLayout();
+                    }
+
+                    if (!image_view_state || image_view_state->destroyed) {
+                        // Image view must have been destroyed since initial update. Could potentially flag the descriptor
+                        //  as "invalid" (updated = false) at DestroyImageView() time and detect this error at bind time
+                        std::stringstream error_str;
+                        error_str << "Descriptor in binding #" << binding << " index " << index << " is using imageView "
+                                  << report_data->FormatHandle(image_view).c_str() << " that is invalid or has been destroyed.";
+                        *error = error_str.str();
+                        return false;
+                    }
+                    const auto &image_view_ci = image_view_state->create_info;
+
+                    if (reqs & DESCRIPTOR_REQ_ALL_VIEW_TYPE_BITS) {
+                        if (~reqs & (1 << image_view_ci.viewType)) {
+                            // bad view type
+                            std::stringstream error_str;
+                            error_str << "Descriptor in binding #" << binding << " index " << index
+                                      << " requires an image view of type " << StringDescriptorReqViewType(reqs) << " but got "
+                                      << string_VkImageViewType(image_view_ci.viewType) << ".";
+                            *error = error_str.str();
+                            return false;
+                        }
+
+                        if (!(reqs & image_view_state->descriptor_format_bits)) {
+                            // bad component type
+                            std::stringstream error_str;
+                            error_str << "Descriptor in binding #" << binding << " index " << index << " requires "
+                                      << StringDescriptorReqComponentType(reqs)
+                                      << " component type, but bound descriptor format is " << string_VkFormat(image_view_ci.format)
+                                      << ".";
+                            *error = error_str.str();
+                            return false;
+                        }
+                    }
+
+                    if (!disabled.image_layout_validation) {
+                        auto image_node = image_view_state->image_state.get();
+                        assert(image_node);
+                        // Verify Image Layout
+                        // No "invalid layout" VUID required for this call, since the optimal_layout parameter is UNDEFINED.
+                        bool hit_error = false;
+                        VerifyImageLayout(cb_node, image_node, image_view_state->normalized_subresource_range,
+                                          image_view_ci.subresourceRange.aspectMask, image_layout, VK_IMAGE_LAYOUT_UNDEFINED,
+                                          caller, kVUIDUndefined, "VUID-VkDescriptorImageInfo-imageLayout-00344", &hit_error);
+                        if (hit_error) {
+                            *error =
+                                "Image layout specified at vkUpdateDescriptorSet* or vkCmdPushDescriptorSet* time "
+                                "doesn't match actual image layout at time descriptor is used. See previous error callback for "
+                                "specific details.";
+                            return false;
+                        }
+                    }
+
+                    // Verify Sample counts
+                    if ((reqs & DESCRIPTOR_REQ_SINGLE_SAMPLE) && image_view_state->samples != VK_SAMPLE_COUNT_1_BIT) {
+                        std::stringstream error_str;
+                        error_str << "Descriptor in binding #" << binding << " index " << index
+                                  << " requires bound image to have VK_SAMPLE_COUNT_1_BIT but got "
+                                  << string_VkSampleCountFlagBits(image_view_state->samples) << ".";
+                        *error = error_str.str();
+                        return false;
+                    }
+                    if ((reqs & DESCRIPTOR_REQ_MULTI_SAMPLE) && image_view_state->samples == VK_SAMPLE_COUNT_1_BIT) {
+                        std::stringstream error_str;
+                        error_str << "Descriptor in binding #" << binding << " index " << index
+                                  << " requires bound image to have multiple samples, but got VK_SAMPLE_COUNT_1_BIT.";
+                        *error = error_str.str();
+                        return false;
+                    }
+                } else if (descriptor_class == DescriptorClass::TexelBuffer) {
+                    auto texel_buffer = static_cast<const TexelDescriptor *>(descriptor);
+                    auto buffer_view = texel_buffer->GetBufferView();
+                    auto buffer_view_state = texel_buffer->GetBufferViewState();
+
+                    if (!buffer_view_state || buffer_view_state->destroyed) {
+                        std::stringstream error_str;
+                        error_str << "Descriptor in binding #" << binding << " index " << index << " is using bufferView "
+                                  << report_data->FormatHandle(buffer_view).c_str() << " that is invalid or has been destroyed.";
+                        *error = error_str.str();
+                        return false;
+                    }
+                    auto buffer = buffer_view_state->create_info.buffer;
+                    auto buffer_state = buffer_view_state->buffer_state.get();
+                    if (buffer_state->destroyed) {
+                        std::stringstream error_str;
+                        error_str << "Descriptor in binding #" << binding << " index " << index << " is using buffer "
+                                  << report_data->FormatHandle(buffer).c_str() << " that has been destroyed.";
+                        *error = error_str.str();
+                        return false;
+                    }
+                    auto format_bits = DescriptorRequirementsBitsFromFormat(buffer_view_state->create_info.format);
+
+                    if (!(reqs & format_bits)) {
+                        // bad component type
+                        std::stringstream error_str;
+                        error_str << "Descriptor in binding #" << binding << " index " << index << " requires "
+                                  << StringDescriptorReqComponentType(reqs) << " component type, but bound descriptor format is "
+                                  << string_VkFormat(buffer_view_state->create_info.format) << ".";
+                        *error = error_str.str();
+                        return false;
+                    }
+                }
+                if (descriptor_class == DescriptorClass::ImageSampler || descriptor_class == DescriptorClass::PlainSampler) {
+                    // Verify Sampler still valid
+                    VkSampler sampler;
+                    const SAMPLER_STATE *sampler_state;
+                    if (descriptor_class == DescriptorClass::ImageSampler) {
+                        sampler = static_cast<const ImageSamplerDescriptor *>(descriptor)->GetSampler();
+                        sampler_state = static_cast<const ImageSamplerDescriptor *>(descriptor)->GetSamplerState();
+                    } else {
+                        sampler = static_cast<const SamplerDescriptor *>(descriptor)->GetSampler();
+                        sampler_state = static_cast<const SamplerDescriptor *>(descriptor)->GetSamplerState();
+                    }
+                    if (!sampler_state || sampler_state->destroyed) {
+                        std::stringstream error_str;
+                        error_str << "Descriptor in binding #" << binding << " index " << index << " is using sampler "
+                                  << report_data->FormatHandle(sampler).c_str() << " that is invalid or has been destroyed.";
+                        *error = error_str.str();
+                        return false;
+                    } else {
+                        if (sampler_state->samplerConversion && !descriptor->IsImmutableSampler()) {
+                            std::stringstream error_str;
+                            error_str << "sampler (" << sampler << ") in the descriptor set (" << descriptor_set->GetSet()
+                                      << ") contains a YCBCR conversion (" << sampler_state->samplerConversion
+                                      << ") , then the sampler MUST also exists as an immutable sampler.";
+                            *error = error_str.str();
+                        }
+                    }
+                }
+            }
+        }
+    }
+    return true;
+}
+
+// Set is being deleted or updates so invalidate all bound cmd buffers
+void cvdescriptorset::DescriptorSet::InvalidateBoundCmdBuffers(ValidationStateTracker *state_data) {
+    state_data->InvalidateCommandBuffers(cb_bindings, VulkanTypedHandle(set_, kVulkanObjectTypeDescriptorSet), /*unlink*/ false);
+}
+
+// Loop through the write updates to do for a push descriptor set, ignoring dstSet
+void cvdescriptorset::DescriptorSet::PerformPushDescriptorsUpdate(ValidationStateTracker *dev_data, uint32_t write_count,
+                                                                  const VkWriteDescriptorSet *p_wds) {
+    assert(IsPushDescriptor());
+    for (uint32_t i = 0; i < write_count; i++) {
+        PerformWriteUpdate(dev_data, &p_wds[i]);
+    }
+
+    push_descriptor_set_writes.clear();
+    push_descriptor_set_writes.reserve(static_cast<std::size_t>(write_count));
+    for (uint32_t i = 0; i < write_count; i++) {
+        push_descriptor_set_writes.push_back(safe_VkWriteDescriptorSet(&p_wds[i]));
+    }
+}
+
+// Perform write update in given update struct
+void cvdescriptorset::DescriptorSet::PerformWriteUpdate(ValidationStateTracker *dev_data, const VkWriteDescriptorSet *update) {
+    // Perform update on a per-binding basis as consecutive updates roll over to next binding
+    auto descriptors_remaining = update->descriptorCount;
+    auto offset = update->dstArrayElement;
+    auto orig_binding = DescriptorSetLayout::ConstBindingIterator(p_layout_.get(), update->dstBinding);
+    auto current_binding = orig_binding;
+
+    uint32_t update_index = 0;
+    // Verify next consecutive binding matches type, stage flags & immutable sampler use and if AtEnd
+    while (descriptors_remaining && orig_binding.IsConsistent(current_binding)) {
+        const auto &index_range = current_binding.GetGlobalIndexRange();
+        auto global_idx = index_range.start + offset;
+        // global_idx is which descriptor is needed to update. If global_idx > index_range.end, it means the descriptor isn't in
+        // this binding, maybe in next binding.
+        if (global_idx >= index_range.end) {
+            offset -= current_binding.GetDescriptorCount();
+            ++current_binding;
+            continue;
+        }
+
+        // Loop over the updates for a single binding at a time
+        uint32_t update_count = std::min(descriptors_remaining, current_binding.GetDescriptorCount() - offset);
+        for (uint32_t di = 0; di < update_count; ++di, ++update_index) {
+            descriptors_[global_idx + di]->WriteUpdate(dev_data, update, update_index);
+        }
+        // Roll over to next binding in case of consecutive update
+        descriptors_remaining -= update_count;
+        if (descriptors_remaining) {
+            // Starting offset is beyond the current binding. Check consistency, update counters and advance to the next binding,
+            // looking for the start point. All bindings (even those skipped) must be consistent with the update and with the
+            // original binding.
+            offset = 0;
+            ++current_binding;
+        }
+    }
+    if (update->descriptorCount) {
+        some_update_ = true;
+        change_count_++;
+    }
+
+    if (!(p_layout_->GetDescriptorBindingFlagsFromBinding(update->dstBinding) &
+          (VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT_EXT | VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT))) {
+        InvalidateBoundCmdBuffers(dev_data);
+    }
+}
+// Validate Copy update
+bool CoreChecks::ValidateCopyUpdate(const VkCopyDescriptorSet *update, const DescriptorSet *dst_set, const DescriptorSet *src_set,
+                                    const char *func_name, std::string *error_code, std::string *error_msg) const {
+    auto dst_layout = dst_set->GetLayout().get();
+    auto src_layout = src_set->GetLayout().get();
+
+    // Verify dst layout still valid
+    if (dst_layout->destroyed) {
+        *error_code = "VUID-VkCopyDescriptorSet-dstSet-parameter";
+        string_sprintf(error_msg,
+                       "Cannot call %s to perform copy update on dstSet %s"
+                       " created with destroyed %s.",
+                       func_name, report_data->FormatHandle(dst_set->GetSet()).c_str(),
+                       report_data->FormatHandle(dst_layout->GetDescriptorSetLayout()).c_str());
+        return false;
+    }
+
+    // Verify src layout still valid
+    if (src_layout->destroyed) {
+        *error_code = "VUID-VkCopyDescriptorSet-srcSet-parameter";
+        string_sprintf(error_msg,
+                       "Cannot call %s to perform copy update of dstSet %s"
+                       " from srcSet %s"
+                       " created with destroyed %s.",
+                       func_name, report_data->FormatHandle(dst_set->GetSet()).c_str(),
+                       report_data->FormatHandle(src_set->GetSet()).c_str(),
+                       report_data->FormatHandle(src_layout->GetDescriptorSetLayout()).c_str());
+        return false;
+    }
+
+    if (!dst_layout->HasBinding(update->dstBinding)) {
+        *error_code = "VUID-VkCopyDescriptorSet-dstBinding-00347";
+        std::stringstream error_str;
+        error_str << "DescriptorSet " << dst_set->GetSet() << " does not have copy update dest binding of " << update->dstBinding;
+        *error_msg = error_str.str();
+        return false;
+    }
+    if (!src_set->HasBinding(update->srcBinding)) {
+        *error_code = "VUID-VkCopyDescriptorSet-srcBinding-00345";
+        std::stringstream error_str;
+        error_str << "DescriptorSet " << dst_set->GetSet() << " does not have copy update src binding of " << update->srcBinding;
+        *error_msg = error_str.str();
+        return false;
+    }
+    // Verify idle ds
+    if (dst_set->in_use.load() &&
+        !(dst_layout->GetDescriptorBindingFlagsFromBinding(update->dstBinding) &
+          (VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT_EXT | VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT))) {
+        // TODO : Re-using Free Idle error code, need copy update idle error code
+        *error_code = "VUID-vkFreeDescriptorSets-pDescriptorSets-00309";
+        std::stringstream error_str;
+        error_str << "Cannot call " << func_name << " to perform copy update on descriptor set " << dst_set->GetSet()
+                  << " that is in use by a command buffer";
+        *error_msg = error_str.str();
+        return false;
+    }
+    // src & dst set bindings are valid
+    // Check bounds of src & dst
+    auto src_start_idx = src_set->GetGlobalIndexRangeFromBinding(update->srcBinding).start + update->srcArrayElement;
+    if ((src_start_idx + update->descriptorCount) > src_set->GetTotalDescriptorCount()) {
+        // SRC update out of bounds
+        *error_code = "VUID-VkCopyDescriptorSet-srcArrayElement-00346";
+        std::stringstream error_str;
+        error_str << "Attempting copy update from descriptorSet " << update->srcSet << " binding#" << update->srcBinding
+                  << " with offset index of " << src_set->GetGlobalIndexRangeFromBinding(update->srcBinding).start
+                  << " plus update array offset of " << update->srcArrayElement << " and update of " << update->descriptorCount
+                  << " descriptors oversteps total number of descriptors in set: " << src_set->GetTotalDescriptorCount();
+        *error_msg = error_str.str();
+        return false;
+    }
+    auto dst_start_idx = dst_layout->GetGlobalIndexRangeFromBinding(update->dstBinding).start + update->dstArrayElement;
+    if ((dst_start_idx + update->descriptorCount) > dst_layout->GetTotalDescriptorCount()) {
+        // DST update out of bounds
+        *error_code = "VUID-VkCopyDescriptorSet-dstArrayElement-00348";
+        std::stringstream error_str;
+        error_str << "Attempting copy update to descriptorSet " << dst_set->GetSet() << " binding#" << update->dstBinding
+                  << " with offset index of " << dst_layout->GetGlobalIndexRangeFromBinding(update->dstBinding).start
+                  << " plus update array offset of " << update->dstArrayElement << " and update of " << update->descriptorCount
+                  << " descriptors oversteps total number of descriptors in set: " << dst_layout->GetTotalDescriptorCount();
+        *error_msg = error_str.str();
+        return false;
+    }
+    // Check that types match
+    // TODO : Base default error case going from here is "VUID-VkAcquireNextImageInfoKHR-semaphore-parameter"2ba which covers all
+    // consistency issues, need more fine-grained error codes
+    *error_code = "VUID-VkCopyDescriptorSet-srcSet-00349";
+    auto src_type = src_set->GetTypeFromBinding(update->srcBinding);
+    auto dst_type = dst_layout->GetTypeFromBinding(update->dstBinding);
+    if (src_type != dst_type) {
+        std::stringstream error_str;
+        error_str << "Attempting copy update to descriptorSet " << dst_set->GetSet() << " binding #" << update->dstBinding
+                  << " with type " << string_VkDescriptorType(dst_type) << " from descriptorSet " << src_set->GetSet()
+                  << " binding #" << update->srcBinding << " with type " << string_VkDescriptorType(src_type)
+                  << ". Types do not match";
+        *error_msg = error_str.str();
+        return false;
+    }
+    // Verify consistency of src & dst bindings if update crosses binding boundaries
+    if ((!VerifyUpdateConsistency(DescriptorSetLayout::ConstBindingIterator(src_layout, update->srcBinding),
+                                  update->srcArrayElement, update->descriptorCount, "copy update from", src_set->GetSet(),
+                                  error_msg)) ||
+        (!VerifyUpdateConsistency(DescriptorSetLayout::ConstBindingIterator(dst_layout, update->dstBinding),
+                                  update->dstArrayElement, update->descriptorCount, "copy update to", dst_set->GetSet(),
+                                  error_msg))) {
+        return false;
+    }
+
+    if ((src_layout->GetCreateFlags() & VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT) &&
+        !(dst_layout->GetCreateFlags() & VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT)) {
+        *error_code = "VUID-VkCopyDescriptorSet-srcSet-01918";
+        std::stringstream error_str;
+        error_str << "If pname:srcSet's (" << update->srcSet
+                  << ") layout was created with the "
+                     "ename:VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT flag "
+                     "set, then pname:dstSet's ("
+                  << update->dstSet
+                  << ") layout must: also have been created with the "
+                     "ename:VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT flag set";
+        *error_msg = error_str.str();
+        return false;
+    }
+
+    if (!(src_layout->GetCreateFlags() & VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT) &&
+        (dst_layout->GetCreateFlags() & VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT)) {
+        *error_code = "VUID-VkCopyDescriptorSet-srcSet-01919";
+        std::stringstream error_str;
+        error_str << "If pname:srcSet's (" << update->srcSet
+                  << ") layout was created without the "
+                     "ename:VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT flag "
+                     "set, then pname:dstSet's ("
+                  << update->dstSet
+                  << ") layout must: also have been created without the "
+                     "ename:VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT flag set";
+        *error_msg = error_str.str();
+        return false;
+    }
+
+    if ((src_set->GetPoolState()->createInfo.flags & VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT_EXT) &&
+        !(dst_set->GetPoolState()->createInfo.flags & VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT_EXT)) {
+        *error_code = "VUID-VkCopyDescriptorSet-srcSet-01920";
+        std::stringstream error_str;
+        error_str << "If the descriptor pool from which pname:srcSet (" << update->srcSet
+                  << ") was allocated was created "
+                     "with the ename:VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT_EXT flag "
+                     "set, then the descriptor pool from which pname:dstSet ("
+                  << update->dstSet
+                  << ") was allocated must: "
+                     "also have been created with the ename:VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT_EXT flag set";
+        *error_msg = error_str.str();
+        return false;
+    }
+
+    if (!(src_set->GetPoolState()->createInfo.flags & VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT_EXT) &&
+        (dst_set->GetPoolState()->createInfo.flags & VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT_EXT)) {
+        *error_code = "VUID-VkCopyDescriptorSet-srcSet-01921";
+        std::stringstream error_str;
+        error_str << "If the descriptor pool from which pname:srcSet (" << update->srcSet
+                  << ") was allocated was created "
+                     "without the ename:VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT_EXT flag "
+                     "set, then the descriptor pool from which pname:dstSet ("
+                  << update->dstSet
+                  << ") was allocated must: "
+                     "also have been created without the ename:VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT_EXT flag set";
+        *error_msg = error_str.str();
+        return false;
+    }
+
+    if (src_type == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT) {
+        if ((update->srcArrayElement % 4) != 0) {
+            *error_code = "VUID-VkCopyDescriptorSet-srcBinding-02223";
+            std::stringstream error_str;
+            error_str << "Attempting copy update to VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT binding with "
+                      << "srcArrayElement " << update->srcArrayElement << " not a multiple of 4";
+            *error_msg = error_str.str();
+            return false;
+        }
+        if ((update->dstArrayElement % 4) != 0) {
+            *error_code = "VUID-VkCopyDescriptorSet-dstBinding-02224";
+            std::stringstream error_str;
+            error_str << "Attempting copy update to VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT binding with "
+                      << "dstArrayElement " << update->dstArrayElement << " not a multiple of 4";
+            *error_msg = error_str.str();
+            return false;
+        }
+        if ((update->descriptorCount % 4) != 0) {
+            *error_code = "VUID-VkCopyDescriptorSet-srcBinding-02225";
+            std::stringstream error_str;
+            error_str << "Attempting copy update to VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT binding with "
+                      << "descriptorCount " << update->descriptorCount << " not a multiple of 4";
+            *error_msg = error_str.str();
+            return false;
+        }
+    }
+
+    // Update parameters all look good and descriptor updated so verify update contents
+    if (!VerifyCopyUpdateContents(update, src_set, src_type, src_start_idx, func_name, error_code, error_msg)) return false;
+
+    // All checks passed so update is good
+    return true;
+}
+// Perform Copy update
+void cvdescriptorset::DescriptorSet::PerformCopyUpdate(ValidationStateTracker *dev_data, const VkCopyDescriptorSet *update,
+                                                       const DescriptorSet *src_set) {
+    auto src_start_idx = src_set->GetGlobalIndexRangeFromBinding(update->srcBinding).start + update->srcArrayElement;
+    auto dst_start_idx = p_layout_->GetGlobalIndexRangeFromBinding(update->dstBinding).start + update->dstArrayElement;
+    // Update parameters all look good so perform update
+    for (uint32_t di = 0; di < update->descriptorCount; ++di) {
+        auto src = src_set->descriptors_[src_start_idx + di].get();
+        auto dst = descriptors_[dst_start_idx + di].get();
+        if (src->updated) {
+            dst->CopyUpdate(dev_data, src);
+            some_update_ = true;
+            change_count_++;
+        } else {
+            dst->updated = false;
+        }
+    }
+
+    if (!(p_layout_->GetDescriptorBindingFlagsFromBinding(update->dstBinding) &
+          (VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT_EXT | VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT))) {
+        InvalidateBoundCmdBuffers(dev_data);
+    }
+}
+
+// Update the drawing state for the affected descriptors.
+// Set cb_node to this set and this set to cb_node.
+// Add the bindings of the descriptor
+// Set the layout based on the current descriptor layout (will mask subsequent layer mismatch errors)
+// TODO: Modify the UpdateDrawState virtural functions to *only* set initial layout and not change layouts
+// Prereq: This should be called for a set that has been confirmed to be active for the given cb_node, meaning it's going
+//   to be used in a draw by the given cb_node
+void cvdescriptorset::DescriptorSet::UpdateDrawState(ValidationStateTracker *device_data, CMD_BUFFER_STATE *cb_node,
+                                                     const PIPELINE_STATE *pipe,
+                                                     const std::map<uint32_t, descriptor_req> &binding_req_map) {
+    if (!device_data->disabled.command_buffer_state) {
+        // bind cb to this descriptor set
+        // Add bindings for descriptor set, the set's pool, and individual objects in the set
+        if (device_data->AddCommandBufferBinding(cb_bindings, VulkanTypedHandle(set_, kVulkanObjectTypeDescriptorSet, this),
+                                                 cb_node)) {
+            device_data->AddCommandBufferBinding(pool_state_->cb_bindings,
+                                                 VulkanTypedHandle(pool_state_->pool, kVulkanObjectTypeDescriptorPool, pool_state_),
+                                                 cb_node);
+        }
+    }
+
+    // Descriptor UpdateDrawState functions do two things - associate resources to the command buffer,
+    // and call image layout validation callbacks. If both are disabled, skip the entire loop.
+    if (device_data->disabled.command_buffer_state && device_data->disabled.image_layout_validation) {
+        return;
+    }
+
+    // For the active slots, use set# to look up descriptorSet from boundDescriptorSets, and bind all of that descriptor set's
+    // resources
+    for (auto binding_req_pair : binding_req_map) {
+        auto binding = binding_req_pair.first;
+        auto index = p_layout_->GetIndexFromBinding(binding_req_pair.first);
+
+        // We aren't validating descriptors created with PARTIALLY_BOUND or UPDATE_AFTER_BIND, so don't record state
+        auto flags = p_layout_->GetDescriptorBindingFlagsFromIndex(index);
+        if (flags & (VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT_EXT | VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT)) {
+            if (flags & VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT) {
+                cb_node->validate_descriptorsets_in_queuesubmit[set_][pipe->pipeline][binding] = binding_req_pair.second;
+            }
+            continue;
+        }
+        auto range = p_layout_->GetGlobalIndexRangeFromIndex(index);
+        for (uint32_t i = range.start; i < range.end; ++i) {
+            descriptors_[i]->UpdateDrawState(device_data, cb_node);
+        }
+    }
+}
+
+void cvdescriptorset::DescriptorSet::FilterOneBindingReq(const BindingReqMap::value_type &binding_req_pair, BindingReqMap *out_req,
+                                                         const TrackedBindings &bindings, uint32_t limit) {
+    if (bindings.size() < limit) {
+        const auto it = bindings.find(binding_req_pair.first);
+        if (it == bindings.cend()) out_req->emplace(binding_req_pair);
+    }
+}
+
+void cvdescriptorset::DescriptorSet::FilterBindingReqs(const CMD_BUFFER_STATE &cb_state, const PIPELINE_STATE &pipeline,
+                                                       const BindingReqMap &in_req, BindingReqMap *out_req) const {
+    // For const cleanliness we have to find in the maps...
+    const auto validated_it = cached_validation_.find(&cb_state);
+    if (validated_it == cached_validation_.cend()) {
+        // We have nothing validated, copy in to out
+        for (const auto &binding_req_pair : in_req) {
+            out_req->emplace(binding_req_pair);
+        }
+        return;
+    }
+    const auto &validated = validated_it->second;
+
+    const auto image_sample_version_it = validated.image_samplers.find(&pipeline);
+    const VersionedBindings *image_sample_version = nullptr;
+    if (image_sample_version_it != validated.image_samplers.cend()) {
+        image_sample_version = &(image_sample_version_it->second);
+    }
+    const auto &dynamic_buffers = validated.dynamic_buffers;
+    const auto &non_dynamic_buffers = validated.non_dynamic_buffers;
+    const auto &stats = p_layout_->GetBindingTypeStats();
+    for (const auto &binding_req_pair : in_req) {
+        auto binding = binding_req_pair.first;
+        VkDescriptorSetLayoutBinding const *layout_binding = p_layout_->GetDescriptorSetLayoutBindingPtrFromBinding(binding);
+        if (!layout_binding) {
+            continue;
+        }
+        // Caching criteria differs per type.
+        // If image_layout have changed , the image descriptors need to be validated against them.
+        if ((layout_binding->descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC) ||
+            (layout_binding->descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC)) {
+            FilterOneBindingReq(binding_req_pair, out_req, dynamic_buffers, stats.dynamic_buffer_count);
+        } else if ((layout_binding->descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER) ||
+                   (layout_binding->descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER)) {
+            FilterOneBindingReq(binding_req_pair, out_req, non_dynamic_buffers, stats.non_dynamic_buffer_count);
+        } else {
+            // This is rather crude, as the changed layouts may not impact the bound descriptors,
+            // but the simple "versioning" is a simple "dirt" test.
+            bool stale = true;
+            if (image_sample_version) {
+                const auto version_it = image_sample_version->find(binding);
+                if (version_it != image_sample_version->cend() && (version_it->second == cb_state.image_layout_change_count)) {
+                    stale = false;
+                }
+            }
+            if (stale) {
+                out_req->emplace(binding_req_pair);
+            }
+        }
+    }
+}
+
+void cvdescriptorset::DescriptorSet::UpdateValidationCache(const CMD_BUFFER_STATE &cb_state, const PIPELINE_STATE &pipeline,
+                                                           const BindingReqMap &updated_bindings) {
+    // For const cleanliness we have to find in the maps...
+    auto &validated = cached_validation_[&cb_state];
+
+    auto &image_sample_version = validated.image_samplers[&pipeline];
+    auto &dynamic_buffers = validated.dynamic_buffers;
+    auto &non_dynamic_buffers = validated.non_dynamic_buffers;
+    for (const auto &binding_req_pair : updated_bindings) {
+        auto binding = binding_req_pair.first;
+        VkDescriptorSetLayoutBinding const *layout_binding = p_layout_->GetDescriptorSetLayoutBindingPtrFromBinding(binding);
+        if (!layout_binding) {
+            continue;
+        }
+        // Caching criteria differs per type.
+        if ((layout_binding->descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC) ||
+            (layout_binding->descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC)) {
+            dynamic_buffers.emplace(binding);
+        } else if ((layout_binding->descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER) ||
+                   (layout_binding->descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER)) {
+            non_dynamic_buffers.emplace(binding);
+        } else {
+            // Save the layout change version...
+            image_sample_version[binding] = cb_state.image_layout_change_count;
+        }
+    }
+}
+
+cvdescriptorset::SamplerDescriptor::SamplerDescriptor(ValidationStateTracker *dev_data, const VkSampler *immut)
+    : sampler_(VK_NULL_HANDLE), immutable_(false) {
+    updated = false;
+    descriptor_class = PlainSampler;
+    if (immut) {
+        sampler_ = *immut;
+        sampler_state_ = dev_data->GetSamplerShared(sampler_);
+        immutable_ = true;
+        updated = true;
+    }
+}
+// Validate given sampler. Currently this only checks to make sure it exists in the samplerMap
+bool CoreChecks::ValidateSampler(const VkSampler sampler) const { return (GetSamplerState(sampler) != nullptr); }
+
+bool CoreChecks::ValidateImageUpdate(VkImageView image_view, VkImageLayout image_layout, VkDescriptorType type,
+                                     const char *func_name, std::string *error_code, std::string *error_msg) const {
+    auto iv_state = GetImageViewState(image_view);
+    assert(iv_state);
+
+    // Note that when an imageview is created, we validated that memory is bound so no need to re-check here
+    // Validate that imageLayout is compatible with aspect_mask and image format
+    //  and validate that image usage bits are correct for given usage
+    VkImageAspectFlags aspect_mask = iv_state->create_info.subresourceRange.aspectMask;
+    VkImage image = iv_state->create_info.image;
+    VkFormat format = VK_FORMAT_MAX_ENUM;
+    VkImageUsageFlags usage = 0;
+    auto image_node = GetImageState(image);
+    assert(image_node);
+
+    format = image_node->createInfo.format;
+    usage = image_node->createInfo.usage;
+    // Validate that memory is bound to image
+    // TODO: This should have its own valid usage id apart from 2524 which is from CreateImageView case. The only
+    //  the error here occurs is if memory bound to a created imageView has been freed.
+    if (ValidateMemoryIsBoundToImage(image_node, func_name, "VUID-VkImageViewCreateInfo-image-01020")) {
+        *error_code = "VUID-VkImageViewCreateInfo-image-01020";
+        *error_msg = "No memory bound to image.";
+        return false;
+    }
+
+    // KHR_maintenance1 allows rendering into 2D or 2DArray views which slice a 3D image,
+    // but not binding them to descriptor sets.
+    if (image_node->createInfo.imageType == VK_IMAGE_TYPE_3D && (iv_state->create_info.viewType == VK_IMAGE_VIEW_TYPE_2D ||
+                                                                 iv_state->create_info.viewType == VK_IMAGE_VIEW_TYPE_2D_ARRAY)) {
+        *error_code = "VUID-VkDescriptorImageInfo-imageView-00343";
+        *error_msg = "ImageView must not be a 2D or 2DArray view of a 3D image";
+        return false;
+    }
+
+    // TODO : The various image aspect and format checks here are based on general spec language in 11.5 Image Views section under
+    // vkCreateImageView(). What's the best way to create unique id for these cases?
+    *error_code = "UNASSIGNED-CoreValidation-DrawState-InvalidImageView";
+    bool ds = FormatIsDepthOrStencil(format);
+    switch (image_layout) {
+        case VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL:
+            // Only Color bit must be set
+            if ((aspect_mask & VK_IMAGE_ASPECT_COLOR_BIT) != VK_IMAGE_ASPECT_COLOR_BIT) {
+                std::stringstream error_str;
+                error_str
+                    << "ImageView (" << report_data->FormatHandle(image_view).c_str()
+                    << ") uses layout VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL but does not have VK_IMAGE_ASPECT_COLOR_BIT set.";
+                *error_msg = error_str.str();
+                return false;
+            }
+            // format must NOT be DS
+            if (ds) {
+                std::stringstream error_str;
+                error_str << "ImageView (" << report_data->FormatHandle(image_view).c_str()
+                          << ") uses layout VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL but the image format is "
+                          << string_VkFormat(format) << " which is not a color format.";
+                *error_msg = error_str.str();
+                return false;
+            }
+            break;
+        case VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL:
+        case VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL:
+            // Depth or stencil bit must be set, but both must NOT be set
+            if (aspect_mask & VK_IMAGE_ASPECT_DEPTH_BIT) {
+                if (aspect_mask & VK_IMAGE_ASPECT_STENCIL_BIT) {
+                    // both  must NOT be set
+                    std::stringstream error_str;
+                    error_str << "ImageView (" << report_data->FormatHandle(image_view).c_str()
+                              << ") has both STENCIL and DEPTH aspects set";
+                    *error_msg = error_str.str();
+                    return false;
+                }
+            } else if (!(aspect_mask & VK_IMAGE_ASPECT_STENCIL_BIT)) {
+                // Neither were set
+                std::stringstream error_str;
+                error_str << "ImageView (" << report_data->FormatHandle(image_view).c_str() << ") has layout "
+                          << string_VkImageLayout(image_layout) << " but does not have STENCIL or DEPTH aspects set";
+                *error_msg = error_str.str();
+                return false;
+            }
+            // format must be DS
+            if (!ds) {
+                std::stringstream error_str;
+                error_str << "ImageView (" << report_data->FormatHandle(image_view).c_str() << ") has layout "
+                          << string_VkImageLayout(image_layout) << " but the image format is " << string_VkFormat(format)
+                          << " which is not a depth/stencil format.";
+                *error_msg = error_str.str();
+                return false;
+            }
+            break;
+        default:
+            // For other layouts if the source is depth/stencil image, both aspect bits must not be set
+            if (ds) {
+                if (aspect_mask & VK_IMAGE_ASPECT_DEPTH_BIT) {
+                    if (aspect_mask & VK_IMAGE_ASPECT_STENCIL_BIT) {
+                        // both  must NOT be set
+                        std::stringstream error_str;
+                        error_str << "ImageView (" << report_data->FormatHandle(image_view).c_str() << ") has layout "
+                                  << string_VkImageLayout(image_layout) << " and is using depth/stencil image of format "
+                                  << string_VkFormat(format)
+                                  << " but it has both STENCIL and DEPTH aspects set, which is illegal. When using a depth/stencil "
+                                     "image in a descriptor set, please only set either VK_IMAGE_ASPECT_DEPTH_BIT or "
+                                     "VK_IMAGE_ASPECT_STENCIL_BIT depending on whether it will be used for depth reads or stencil "
+                                     "reads respectively.";
+                        *error_code = "VUID-VkDescriptorImageInfo-imageView-01976";
+                        *error_msg = error_str.str();
+                        return false;
+                    }
+                }
+            }
+            break;
+    }
+    // Now validate that usage flags are correctly set for given type of update
+    //  As we're switching per-type, if any type has specific layout requirements, check those here as well
+    // TODO : The various image usage bit requirements are in general spec language for VkImageUsageFlags bit block in 11.3 Images
+    // under vkCreateImage()
+    // TODO : Need to also validate case "VUID-VkWriteDescriptorSet-descriptorType-00336" where STORAGE_IMAGE & INPUT_ATTACH types
+    // must have been created with identify swizzle
+    const char *error_usage_bit = nullptr;
+    switch (type) {
+        case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
+        case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER: {
+            if (!(usage & VK_IMAGE_USAGE_SAMPLED_BIT)) {
+                error_usage_bit = "VK_IMAGE_USAGE_SAMPLED_BIT";
+            }
+            break;
+        }
+        case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE: {
+            if (!(usage & VK_IMAGE_USAGE_STORAGE_BIT)) {
+                error_usage_bit = "VK_IMAGE_USAGE_STORAGE_BIT";
+            } else if (VK_IMAGE_LAYOUT_GENERAL != image_layout) {
+                std::stringstream error_str;
+                // TODO : Need to create custom enum error codes for these cases
+                if (image_node->shared_presentable) {
+                    if (VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR != image_layout) {
+                        error_str << "ImageView (" << report_data->FormatHandle(image_view).c_str()
+                                  << ") of VK_DESCRIPTOR_TYPE_STORAGE_IMAGE type with a front-buffered image is being updated with "
+                                     "layout "
+                                  << string_VkImageLayout(image_layout)
+                                  << " but according to spec section 13.1 Descriptor Types, 'Front-buffered images that report "
+                                     "support for VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT must be in the "
+                                     "VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR layout.'";
+                        *error_msg = error_str.str();
+                        return false;
+                    }
+                } else if (VK_IMAGE_LAYOUT_GENERAL != image_layout) {
+                    error_str << "ImageView (" << report_data->FormatHandle(image_view).c_str()
+                              << ") of VK_DESCRIPTOR_TYPE_STORAGE_IMAGE type is being updated with layout "
+                              << string_VkImageLayout(image_layout)
+                              << " but according to spec section 13.1 Descriptor Types, 'Load and store operations on storage "
+                                 "images can only be done on images in VK_IMAGE_LAYOUT_GENERAL layout.'";
+                    *error_msg = error_str.str();
+                    return false;
+                }
+            }
+            break;
+        }
+        case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT: {
+            if (!(usage & VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT)) {
+                error_usage_bit = "VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT";
+            }
+            break;
+        }
+        default:
+            break;
+    }
+    if (error_usage_bit) {
+        std::stringstream error_str;
+        error_str << "ImageView (" << report_data->FormatHandle(image_view).c_str() << ") with usage mask " << std::hex
+                  << std::showbase << usage << " being used for a descriptor update of type " << string_VkDescriptorType(type)
+                  << " does not have " << error_usage_bit << " set.";
+        *error_msg = error_str.str();
+        return false;
+    }
+
+    if ((type == VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE) || (type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER)) {
+        // Test that the layout is compatible with the descriptorType for the two sampled image types
+        const static std::array<VkImageLayout, 3> valid_layouts = {
+            {VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL, VK_IMAGE_LAYOUT_GENERAL}};
+
+        struct ExtensionLayout {
+            VkImageLayout layout;
+            ExtEnabled DeviceExtensions::*extension;
+        };
+
+        const static std::array<ExtensionLayout, 3> extended_layouts{
+            {//  Note double brace req'd for aggregate initialization
+             {VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR, &DeviceExtensions::vk_khr_shared_presentable_image},
+             {VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL, &DeviceExtensions::vk_khr_maintenance2},
+             {VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL, &DeviceExtensions::vk_khr_maintenance2}}};
+        auto is_layout = [image_layout, this](const ExtensionLayout &ext_layout) {
+            return device_extensions.*(ext_layout.extension) && (ext_layout.layout == image_layout);
+        };
+
+        bool valid_layout = (std::find(valid_layouts.cbegin(), valid_layouts.cend(), image_layout) != valid_layouts.cend()) ||
+                            std::any_of(extended_layouts.cbegin(), extended_layouts.cend(), is_layout);
+
+        if (!valid_layout) {
+            *error_code = "VUID-VkWriteDescriptorSet-descriptorType-01403";
+            std::stringstream error_str;
+            error_str << "Descriptor update with descriptorType " << string_VkDescriptorType(type)
+                      << " is being updated with invalid imageLayout " << string_VkImageLayout(image_layout) << " for image "
+                      << report_data->FormatHandle(image).c_str() << " in imageView "
+                      << report_data->FormatHandle(image_view).c_str()
+                      << ". Allowed layouts are: VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL, "
+                      << "VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL, VK_IMAGE_LAYOUT_GENERAL";
+            for (auto &ext_layout : extended_layouts) {
+                if (device_extensions.*(ext_layout.extension)) {
+                    error_str << ", " << string_VkImageLayout(ext_layout.layout);
+                }
+            }
+            *error_msg = error_str.str();
+            return false;
+        }
+    }
+
+    return true;
+}
+
+void cvdescriptorset::SamplerDescriptor::WriteUpdate(ValidationStateTracker *dev_data, const VkWriteDescriptorSet *update,
+                                                     const uint32_t index) {
+    if (!immutable_) {
+        sampler_ = update->pImageInfo[index].sampler;
+        sampler_state_ = dev_data->GetSamplerShared(sampler_);
+    }
+    updated = true;
+}
+
+void cvdescriptorset::SamplerDescriptor::CopyUpdate(ValidationStateTracker *dev_data, const Descriptor *src) {
+    if (!immutable_) {
+        auto update_sampler = static_cast<const SamplerDescriptor *>(src)->sampler_;
+        sampler_ = update_sampler;
+        sampler_state_ = dev_data->GetSamplerShared(sampler_);
+    }
+    updated = true;
+}
+
+void cvdescriptorset::SamplerDescriptor::UpdateDrawState(ValidationStateTracker *dev_data, CMD_BUFFER_STATE *cb_node) {
+    if (!immutable_) {
+        auto sampler_state = GetSamplerState();
+        if (sampler_state) dev_data->AddCommandBufferBindingSampler(cb_node, sampler_state);
+    }
+}
+
+cvdescriptorset::ImageSamplerDescriptor::ImageSamplerDescriptor(ValidationStateTracker *dev_data, const VkSampler *immut)
+    : sampler_(VK_NULL_HANDLE), immutable_(false), image_view_(VK_NULL_HANDLE), image_layout_(VK_IMAGE_LAYOUT_UNDEFINED) {
+    updated = false;
+    descriptor_class = ImageSampler;
+    if (immut) {
+        sampler_ = *immut;
+        sampler_state_ = dev_data->GetSamplerShared(sampler_);
+        immutable_ = true;
+    }
+}
+
+void cvdescriptorset::ImageSamplerDescriptor::WriteUpdate(ValidationStateTracker *dev_data, const VkWriteDescriptorSet *update,
+                                                          const uint32_t index) {
+    updated = true;
+    const auto &image_info = update->pImageInfo[index];
+    if (!immutable_) {
+        sampler_ = image_info.sampler;
+        sampler_state_ = dev_data->GetSamplerShared(sampler_);
+    }
+    image_view_ = image_info.imageView;
+    image_layout_ = image_info.imageLayout;
+    image_view_state_ = dev_data->GetImageViewShared(image_view_);
+}
+
+void cvdescriptorset::ImageSamplerDescriptor::CopyUpdate(ValidationStateTracker *dev_data, const Descriptor *src) {
+    if (!immutable_) {
+        auto update_sampler = static_cast<const ImageSamplerDescriptor *>(src)->sampler_;
+        sampler_ = update_sampler;
+        sampler_state_ = dev_data->GetSamplerShared(sampler_);
+    }
+    auto image_view = static_cast<const ImageSamplerDescriptor *>(src)->image_view_;
+    auto image_layout = static_cast<const ImageSamplerDescriptor *>(src)->image_layout_;
+    updated = true;
+    image_view_ = image_view;
+    image_layout_ = image_layout;
+    image_view_state_ = dev_data->GetImageViewShared(image_view_);
+}
+
+void cvdescriptorset::ImageSamplerDescriptor::UpdateDrawState(ValidationStateTracker *dev_data, CMD_BUFFER_STATE *cb_node) {
+    // First add binding for any non-immutable sampler
+    if (!immutable_) {
+        auto sampler_state = GetSamplerState();
+        if (sampler_state) dev_data->AddCommandBufferBindingSampler(cb_node, sampler_state);
+    }
+    // Add binding for image
+    auto iv_state = GetImageViewState();
+    if (iv_state) {
+        dev_data->AddCommandBufferBindingImageView(cb_node, iv_state);
+        dev_data->CallSetImageViewInitialLayoutCallback(cb_node, *iv_state, image_layout_);
+    }
+}
+
+cvdescriptorset::ImageDescriptor::ImageDescriptor(const VkDescriptorType type)
+    : storage_(false), image_view_(VK_NULL_HANDLE), image_layout_(VK_IMAGE_LAYOUT_UNDEFINED) {
+    updated = false;
+    descriptor_class = Image;
+    if (VK_DESCRIPTOR_TYPE_STORAGE_IMAGE == type) storage_ = true;
+}
+
+void cvdescriptorset::ImageDescriptor::WriteUpdate(ValidationStateTracker *dev_data, const VkWriteDescriptorSet *update,
+                                                   const uint32_t index) {
+    updated = true;
+    const auto &image_info = update->pImageInfo[index];
+    image_view_ = image_info.imageView;
+    image_layout_ = image_info.imageLayout;
+    image_view_state_ = dev_data->GetImageViewShared(image_view_);
+}
+
+void cvdescriptorset::ImageDescriptor::CopyUpdate(ValidationStateTracker *dev_data, const Descriptor *src) {
+    auto image_view = static_cast<const ImageDescriptor *>(src)->image_view_;
+    auto image_layout = static_cast<const ImageDescriptor *>(src)->image_layout_;
+    updated = true;
+    image_view_ = image_view;
+    image_layout_ = image_layout;
+    image_view_state_ = dev_data->GetImageViewShared(image_view_);
+}
+
+void cvdescriptorset::ImageDescriptor::UpdateDrawState(ValidationStateTracker *dev_data, CMD_BUFFER_STATE *cb_node) {
+    // Add binding for image
+    auto iv_state = GetImageViewState();
+    if (iv_state) {
+        dev_data->AddCommandBufferBindingImageView(cb_node, iv_state);
+        dev_data->CallSetImageViewInitialLayoutCallback(cb_node, *iv_state, image_layout_);
+    }
+}
+
+cvdescriptorset::BufferDescriptor::BufferDescriptor(const VkDescriptorType type)
+    : storage_(false), dynamic_(false), buffer_(VK_NULL_HANDLE), offset_(0), range_(0) {
+    updated = false;
+    descriptor_class = GeneralBuffer;
+    if (VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC == type) {
+        dynamic_ = true;
+    } else if (VK_DESCRIPTOR_TYPE_STORAGE_BUFFER == type) {
+        storage_ = true;
+    } else if (VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC == type) {
+        dynamic_ = true;
+        storage_ = true;
+    }
+}
+void cvdescriptorset::BufferDescriptor::WriteUpdate(ValidationStateTracker *dev_data, const VkWriteDescriptorSet *update,
+                                                    const uint32_t index) {
+    updated = true;
+    const auto &buffer_info = update->pBufferInfo[index];
+    buffer_ = buffer_info.buffer;
+    offset_ = buffer_info.offset;
+    range_ = buffer_info.range;
+    buffer_state_ = dev_data->GetBufferShared(buffer_);
+}
+
+void cvdescriptorset::BufferDescriptor::CopyUpdate(ValidationStateTracker *dev_data, const Descriptor *src) {
+    auto buff_desc = static_cast<const BufferDescriptor *>(src);
+    updated = true;
+    buffer_ = buff_desc->buffer_;
+    offset_ = buff_desc->offset_;
+    range_ = buff_desc->range_;
+    buffer_state_ = dev_data->GetBufferShared(buffer_);
+}
+
+void cvdescriptorset::BufferDescriptor::UpdateDrawState(ValidationStateTracker *dev_data, CMD_BUFFER_STATE *cb_node) {
+    auto buffer_node = GetBufferState();
+    if (buffer_node) dev_data->AddCommandBufferBindingBuffer(cb_node, buffer_node);
+}
+
+cvdescriptorset::TexelDescriptor::TexelDescriptor(const VkDescriptorType type) : buffer_view_(VK_NULL_HANDLE), storage_(false) {
+    updated = false;
+    descriptor_class = TexelBuffer;
+    if (VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER == type) storage_ = true;
+}
+
+void cvdescriptorset::TexelDescriptor::WriteUpdate(ValidationStateTracker *dev_data, const VkWriteDescriptorSet *update,
+                                                   const uint32_t index) {
+    updated = true;
+    buffer_view_ = update->pTexelBufferView[index];
+    buffer_view_state_ = dev_data->GetBufferViewShared(buffer_view_);
+}
+
+void cvdescriptorset::TexelDescriptor::CopyUpdate(ValidationStateTracker *dev_data, const Descriptor *src) {
+    updated = true;
+    buffer_view_ = static_cast<const TexelDescriptor *>(src)->buffer_view_;
+    buffer_view_state_ = dev_data->GetBufferViewShared(buffer_view_);
+}
+
+void cvdescriptorset::TexelDescriptor::UpdateDrawState(ValidationStateTracker *dev_data, CMD_BUFFER_STATE *cb_node) {
+    auto bv_state = GetBufferViewState();
+    if (bv_state) {
+        dev_data->AddCommandBufferBindingBufferView(cb_node, bv_state);
+    }
+}
+
+// This is a helper function that iterates over a set of Write and Copy updates, pulls the DescriptorSet* for updated
+//  sets, and then calls their respective Validate[Write|Copy]Update functions.
+// If the update hits an issue for which the callback returns "true", meaning that the call down the chain should
+//  be skipped, then true is returned.
+// If there is no issue with the update, then false is returned.
+bool CoreChecks::ValidateUpdateDescriptorSets(uint32_t write_count, const VkWriteDescriptorSet *p_wds, uint32_t copy_count,
+                                              const VkCopyDescriptorSet *p_cds, const char *func_name) const {
+    bool skip = false;
+    // Validate Write updates
+    for (uint32_t i = 0; i < write_count; i++) {
+        auto dest_set = p_wds[i].dstSet;
+        auto set_node = GetSetNode(dest_set);
+        if (!set_node) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT,
+                            HandleToUint64(dest_set), kVUID_Core_DrawState_InvalidDescriptorSet,
+                            "Cannot call %s on %s that has not been allocated.", func_name,
+                            report_data->FormatHandle(dest_set).c_str());
+        } else {
+            std::string error_code;
+            std::string error_str;
+            if (!ValidateWriteUpdate(set_node, &p_wds[i], func_name, &error_code, &error_str)) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT,
+                                HandleToUint64(dest_set), error_code, "%s failed write update validation for %s with error: %s.",
+                                func_name, report_data->FormatHandle(dest_set).c_str(), error_str.c_str());
+            }
+        }
+    }
+    // Now validate copy updates
+    for (uint32_t i = 0; i < copy_count; ++i) {
+        auto dst_set = p_cds[i].dstSet;
+        auto src_set = p_cds[i].srcSet;
+        auto src_node = GetSetNode(src_set);
+        auto dst_node = GetSetNode(dst_set);
+        // Object_tracker verifies that src & dest descriptor set are valid
+        assert(src_node);
+        assert(dst_node);
+        std::string error_code;
+        std::string error_str;
+        if (!ValidateCopyUpdate(&p_cds[i], dst_node, src_node, func_name, &error_code, &error_str)) {
+            skip |=
+                log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT,
+                        HandleToUint64(dst_set), error_code, "%s failed copy update from %s to %s with error: %s.", func_name,
+                        report_data->FormatHandle(src_set).c_str(), report_data->FormatHandle(dst_set).c_str(), error_str.c_str());
+        }
+    }
+    return skip;
+}
+// This is a helper function that iterates over a set of Write and Copy updates, pulls the DescriptorSet* for updated
+//  sets, and then calls their respective Perform[Write|Copy]Update functions.
+// Prerequisite : ValidateUpdateDescriptorSets() should be called and return "false" prior to calling PerformUpdateDescriptorSets()
+//  with the same set of updates.
+// This is split from the validate code to allow validation prior to calling down the chain, and then update after
+//  calling down the chain.
+void cvdescriptorset::PerformUpdateDescriptorSets(ValidationStateTracker *dev_data, uint32_t write_count,
+                                                  const VkWriteDescriptorSet *p_wds, uint32_t copy_count,
+                                                  const VkCopyDescriptorSet *p_cds) {
+    // Write updates first
+    uint32_t i = 0;
+    for (i = 0; i < write_count; ++i) {
+        auto dest_set = p_wds[i].dstSet;
+        auto set_node = dev_data->GetSetNode(dest_set);
+        if (set_node) {
+            set_node->PerformWriteUpdate(dev_data, &p_wds[i]);
+        }
+    }
+    // Now copy updates
+    for (i = 0; i < copy_count; ++i) {
+        auto dst_set = p_cds[i].dstSet;
+        auto src_set = p_cds[i].srcSet;
+        auto src_node = dev_data->GetSetNode(src_set);
+        auto dst_node = dev_data->GetSetNode(dst_set);
+        if (src_node && dst_node) {
+            dst_node->PerformCopyUpdate(dev_data, &p_cds[i], src_node);
+        }
+    }
+}
+
+cvdescriptorset::DecodedTemplateUpdate::DecodedTemplateUpdate(const ValidationStateTracker *device_data,
+                                                              VkDescriptorSet descriptorSet, const TEMPLATE_STATE *template_state,
+                                                              const void *pData, VkDescriptorSetLayout push_layout) {
+    auto const &create_info = template_state->create_info;
+    inline_infos.resize(create_info.descriptorUpdateEntryCount);  // Make sure we have one if we need it
+    desc_writes.reserve(create_info.descriptorUpdateEntryCount);  // emplaced, so reserved without initialization
+    VkDescriptorSetLayout effective_dsl = create_info.templateType == VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET
+                                              ? create_info.descriptorSetLayout
+                                              : push_layout;
+    auto layout_obj = device_data->GetDescriptorSetLayoutShared(effective_dsl);
+
+    // Create a WriteDescriptorSet struct for each template update entry
+    for (uint32_t i = 0; i < create_info.descriptorUpdateEntryCount; i++) {
+        auto binding_count = layout_obj->GetDescriptorCountFromBinding(create_info.pDescriptorUpdateEntries[i].dstBinding);
+        auto binding_being_updated = create_info.pDescriptorUpdateEntries[i].dstBinding;
+        auto dst_array_element = create_info.pDescriptorUpdateEntries[i].dstArrayElement;
+
+        desc_writes.reserve(desc_writes.size() + create_info.pDescriptorUpdateEntries[i].descriptorCount);
+        for (uint32_t j = 0; j < create_info.pDescriptorUpdateEntries[i].descriptorCount; j++) {
+            desc_writes.emplace_back();
+            auto &write_entry = desc_writes.back();
+
+            size_t offset = create_info.pDescriptorUpdateEntries[i].offset + j * create_info.pDescriptorUpdateEntries[i].stride;
+            char *update_entry = (char *)(pData) + offset;
+
+            if (dst_array_element >= binding_count) {
+                dst_array_element = 0;
+                binding_being_updated = layout_obj->GetNextValidBinding(binding_being_updated);
+            }
+
+            write_entry.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
+            write_entry.pNext = NULL;
+            write_entry.dstSet = descriptorSet;
+            write_entry.dstBinding = binding_being_updated;
+            write_entry.dstArrayElement = dst_array_element;
+            write_entry.descriptorCount = 1;
+            write_entry.descriptorType = create_info.pDescriptorUpdateEntries[i].descriptorType;
+
+            switch (create_info.pDescriptorUpdateEntries[i].descriptorType) {
+                case VK_DESCRIPTOR_TYPE_SAMPLER:
+                case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
+                case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
+                case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
+                case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
+                    write_entry.pImageInfo = reinterpret_cast<VkDescriptorImageInfo *>(update_entry);
+                    break;
+
+                case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
+                case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
+                case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
+                case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
+                    write_entry.pBufferInfo = reinterpret_cast<VkDescriptorBufferInfo *>(update_entry);
+                    break;
+
+                case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
+                case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
+                    write_entry.pTexelBufferView = reinterpret_cast<VkBufferView *>(update_entry);
+                    break;
+                case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT: {
+                    VkWriteDescriptorSetInlineUniformBlockEXT *inline_info = &inline_infos[i];
+                    inline_info->sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK_EXT;
+                    inline_info->pNext = nullptr;
+                    inline_info->dataSize = create_info.pDescriptorUpdateEntries[i].descriptorCount;
+                    inline_info->pData = update_entry;
+                    write_entry.pNext = inline_info;
+                    // descriptorCount must match the dataSize member of the VkWriteDescriptorSetInlineUniformBlockEXT structure
+                    write_entry.descriptorCount = inline_info->dataSize;
+                    // skip the rest of the array, they just represent bytes in the update
+                    j = create_info.pDescriptorUpdateEntries[i].descriptorCount;
+                    break;
+                }
+                default:
+                    assert(0);
+                    break;
+            }
+            dst_array_element++;
+        }
+    }
+}
+// These helper functions carry out the validate and record descriptor updates peformed via update templates. They decode
+// the templatized data and leverage the non-template UpdateDescriptor helper functions.
+bool CoreChecks::ValidateUpdateDescriptorSetsWithTemplateKHR(VkDescriptorSet descriptorSet, const TEMPLATE_STATE *template_state,
+                                                             const void *pData) const {
+    // Translate the templated update into a normal update for validation...
+    cvdescriptorset::DecodedTemplateUpdate decoded_update(this, descriptorSet, template_state, pData);
+    return ValidateUpdateDescriptorSets(static_cast<uint32_t>(decoded_update.desc_writes.size()), decoded_update.desc_writes.data(),
+                                        0, NULL, "vkUpdateDescriptorSetWithTemplate()");
+}
+
+std::string cvdescriptorset::DescriptorSet::StringifySetAndLayout() const {
+    std::string out;
+    auto layout_handle = p_layout_->GetDescriptorSetLayout();
+    if (IsPushDescriptor()) {
+        string_sprintf(&out, "Push Descriptors defined with VkDescriptorSetLayout %s",
+                       report_data_->FormatHandle(layout_handle).c_str());
+    } else {
+        string_sprintf(&out, "VkDescriptorSet %s allocated with VkDescriptorSetLayout %s", report_data_->FormatHandle(set_).c_str(),
+                       report_data_->FormatHandle(layout_handle).c_str());
+    }
+    return out;
+};
+
+// Loop through the write updates to validate for a push descriptor set, ignoring dstSet
+bool CoreChecks::ValidatePushDescriptorsUpdate(const DescriptorSet *push_set, uint32_t write_count,
+                                               const VkWriteDescriptorSet *p_wds, const char *func_name) const {
+    assert(push_set->IsPushDescriptor());
+    bool skip = false;
+    for (uint32_t i = 0; i < write_count; i++) {
+        std::string error_code;
+        std::string error_str;
+        if (!ValidateWriteUpdate(push_set, &p_wds[i], func_name, &error_code, &error_str)) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT,
+                            HandleToUint64(push_set->GetDescriptorSetLayout()), error_code, "%s failed update validation: %s.",
+                            func_name, error_str.c_str());
+        }
+    }
+    return skip;
+}
+
+// For the given buffer, verify that its creation parameters are appropriate for the given type
+//  If there's an error, update the error_msg string with details and return false, else return true
+bool cvdescriptorset::ValidateBufferUsage(BUFFER_STATE const *buffer_node, VkDescriptorType type, std::string *error_code,
+                                          std::string *error_msg) {
+    // Verify that usage bits set correctly for given type
+    auto usage = buffer_node->createInfo.usage;
+    const char *error_usage_bit = nullptr;
+    switch (type) {
+        case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
+            if (!(usage & VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT)) {
+                *error_code = "VUID-VkWriteDescriptorSet-descriptorType-00334";
+                error_usage_bit = "VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT";
+            }
+            break;
+        case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
+            if (!(usage & VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT)) {
+                *error_code = "VUID-VkWriteDescriptorSet-descriptorType-00335";
+                error_usage_bit = "VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT";
+            }
+            break;
+        case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
+        case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
+            if (!(usage & VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT)) {
+                *error_code = "VUID-VkWriteDescriptorSet-descriptorType-00330";
+                error_usage_bit = "VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT";
+            }
+            break;
+        case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
+        case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
+            if (!(usage & VK_BUFFER_USAGE_STORAGE_BUFFER_BIT)) {
+                *error_code = "VUID-VkWriteDescriptorSet-descriptorType-00331";
+                error_usage_bit = "VK_BUFFER_USAGE_STORAGE_BUFFER_BIT";
+            }
+            break;
+        default:
+            break;
+    }
+    if (error_usage_bit) {
+        std::stringstream error_str;
+        error_str << "Buffer (" << buffer_node->buffer << ") with usage mask " << std::hex << std::showbase << usage
+                  << " being used for a descriptor update of type " << string_VkDescriptorType(type) << " does not have "
+                  << error_usage_bit << " set.";
+        *error_msg = error_str.str();
+        return false;
+    }
+    return true;
+}
+// For buffer descriptor updates, verify the buffer usage and VkDescriptorBufferInfo struct which includes:
+//  1. buffer is valid
+//  2. buffer was created with correct usage flags
+//  3. offset is less than buffer size
+//  4. range is either VK_WHOLE_SIZE or falls in (0, (buffer size - offset)]
+//  5. range and offset are within the device's limits
+// If there's an error, update the error_msg string with details and return false, else return true
+bool CoreChecks::ValidateBufferUpdate(VkDescriptorBufferInfo const *buffer_info, VkDescriptorType type, const char *func_name,
+                                      std::string *error_code, std::string *error_msg) const {
+    // First make sure that buffer is valid
+    auto buffer_node = GetBufferState(buffer_info->buffer);
+    // Any invalid buffer should already be caught by object_tracker
+    assert(buffer_node);
+    if (ValidateMemoryIsBoundToBuffer(buffer_node, func_name, "VUID-VkWriteDescriptorSet-descriptorType-00329")) {
+        *error_code = "VUID-VkWriteDescriptorSet-descriptorType-00329";
+        *error_msg = "No memory bound to buffer.";
+        return false;
+    }
+    // Verify usage bits
+    if (!cvdescriptorset::ValidateBufferUsage(buffer_node, type, error_code, error_msg)) {
+        // error_msg will have been updated by ValidateBufferUsage()
+        return false;
+    }
+    // offset must be less than buffer size
+    if (buffer_info->offset >= buffer_node->createInfo.size) {
+        *error_code = "VUID-VkDescriptorBufferInfo-offset-00340";
+        std::stringstream error_str;
+        error_str << "VkDescriptorBufferInfo offset of " << buffer_info->offset << " is greater than or equal to buffer "
+                  << buffer_node->buffer << " size of " << buffer_node->createInfo.size;
+        *error_msg = error_str.str();
+        return false;
+    }
+    if (buffer_info->range != VK_WHOLE_SIZE) {
+        // Range must be VK_WHOLE_SIZE or > 0
+        if (!buffer_info->range) {
+            *error_code = "VUID-VkDescriptorBufferInfo-range-00341";
+            std::stringstream error_str;
+            error_str << "VkDescriptorBufferInfo range is not VK_WHOLE_SIZE and is zero, which is not allowed.";
+            *error_msg = error_str.str();
+            return false;
+        }
+        // Range must be VK_WHOLE_SIZE or <= (buffer size - offset)
+        if (buffer_info->range > (buffer_node->createInfo.size - buffer_info->offset)) {
+            *error_code = "VUID-VkDescriptorBufferInfo-range-00342";
+            std::stringstream error_str;
+            error_str << "VkDescriptorBufferInfo range is " << buffer_info->range << " which is greater than buffer size ("
+                      << buffer_node->createInfo.size << ") minus requested offset of " << buffer_info->offset;
+            *error_msg = error_str.str();
+            return false;
+        }
+    }
+    // Check buffer update sizes against device limits
+    const auto &limits = phys_dev_props.limits;
+    if (VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER == type || VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC == type) {
+        auto max_ub_range = limits.maxUniformBufferRange;
+        if (buffer_info->range != VK_WHOLE_SIZE && buffer_info->range > max_ub_range) {
+            *error_code = "VUID-VkWriteDescriptorSet-descriptorType-00332";
+            std::stringstream error_str;
+            error_str << "VkDescriptorBufferInfo range is " << buffer_info->range
+                      << " which is greater than this device's maxUniformBufferRange (" << max_ub_range << ")";
+            *error_msg = error_str.str();
+            return false;
+        } else if (buffer_info->range == VK_WHOLE_SIZE && (buffer_node->createInfo.size - buffer_info->offset) > max_ub_range) {
+            *error_code = "VUID-VkWriteDescriptorSet-descriptorType-00332";
+            std::stringstream error_str;
+            error_str << "VkDescriptorBufferInfo range is VK_WHOLE_SIZE but effective range "
+                      << "(" << (buffer_node->createInfo.size - buffer_info->offset) << ") is greater than this device's "
+                      << "maxUniformBufferRange (" << max_ub_range << ")";
+            *error_msg = error_str.str();
+            return false;
+        }
+    } else if (VK_DESCRIPTOR_TYPE_STORAGE_BUFFER == type || VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC == type) {
+        auto max_sb_range = limits.maxStorageBufferRange;
+        if (buffer_info->range != VK_WHOLE_SIZE && buffer_info->range > max_sb_range) {
+            *error_code = "VUID-VkWriteDescriptorSet-descriptorType-00333";
+            std::stringstream error_str;
+            error_str << "VkDescriptorBufferInfo range is " << buffer_info->range
+                      << " which is greater than this device's maxStorageBufferRange (" << max_sb_range << ")";
+            *error_msg = error_str.str();
+            return false;
+        } else if (buffer_info->range == VK_WHOLE_SIZE && (buffer_node->createInfo.size - buffer_info->offset) > max_sb_range) {
+            *error_code = "VUID-VkWriteDescriptorSet-descriptorType-00333";
+            std::stringstream error_str;
+            error_str << "VkDescriptorBufferInfo range is VK_WHOLE_SIZE but effective range "
+                      << "(" << (buffer_node->createInfo.size - buffer_info->offset) << ") is greater than this device's "
+                      << "maxStorageBufferRange (" << max_sb_range << ")";
+            *error_msg = error_str.str();
+            return false;
+        }
+    }
+    return true;
+}
+// Verify that the contents of the update are ok, but don't perform actual update
+bool CoreChecks::VerifyCopyUpdateContents(const VkCopyDescriptorSet *update, const DescriptorSet *src_set, VkDescriptorType type,
+                                          uint32_t index, const char *func_name, std::string *error_code,
+                                          std::string *error_msg) const {
+    // Note : Repurposing some Write update error codes here as specific details aren't called out for copy updates like they are
+    // for write updates
+    using DescriptorClass = cvdescriptorset::DescriptorClass;
+    using BufferDescriptor = cvdescriptorset::BufferDescriptor;
+    using ImageDescriptor = cvdescriptorset::ImageDescriptor;
+    using ImageSamplerDescriptor = cvdescriptorset::ImageSamplerDescriptor;
+    using SamplerDescriptor = cvdescriptorset::SamplerDescriptor;
+    using TexelDescriptor = cvdescriptorset::TexelDescriptor;
+
+    auto device_data = this;
+    switch (src_set->GetDescriptorFromGlobalIndex(index)->descriptor_class) {
+        case DescriptorClass::PlainSampler: {
+            for (uint32_t di = 0; di < update->descriptorCount; ++di) {
+                const auto src_desc = src_set->GetDescriptorFromGlobalIndex(index + di);
+                if (!src_desc->updated) continue;
+                if (!src_desc->IsImmutableSampler()) {
+                    auto update_sampler = static_cast<const SamplerDescriptor *>(src_desc)->GetSampler();
+                    if (!ValidateSampler(update_sampler)) {
+                        *error_code = "VUID-VkWriteDescriptorSet-descriptorType-00325";
+                        std::stringstream error_str;
+                        error_str << "Attempted copy update to sampler descriptor with invalid sampler: " << update_sampler << ".";
+                        *error_msg = error_str.str();
+                        return false;
+                    }
+                } else {
+                    // TODO : Warn here
+                }
+            }
+            break;
+        }
+        case DescriptorClass::ImageSampler: {
+            for (uint32_t di = 0; di < update->descriptorCount; ++di) {
+                const auto src_desc = src_set->GetDescriptorFromGlobalIndex(index + di);
+                if (!src_desc->updated) continue;
+                auto img_samp_desc = static_cast<const ImageSamplerDescriptor *>(src_desc);
+                // First validate sampler
+                if (!img_samp_desc->IsImmutableSampler()) {
+                    auto update_sampler = img_samp_desc->GetSampler();
+                    if (!ValidateSampler(update_sampler)) {
+                        *error_code = "VUID-VkWriteDescriptorSet-descriptorType-00325";
+                        std::stringstream error_str;
+                        error_str << "Attempted copy update to sampler descriptor with invalid sampler: " << update_sampler << ".";
+                        *error_msg = error_str.str();
+                        return false;
+                    }
+                } else {
+                    // TODO : Warn here
+                }
+                // Validate image
+                auto image_view = img_samp_desc->GetImageView();
+                auto image_layout = img_samp_desc->GetImageLayout();
+                if (!ValidateImageUpdate(image_view, image_layout, type, func_name, error_code, error_msg)) {
+                    std::stringstream error_str;
+                    error_str << "Attempted copy update to combined image sampler descriptor failed due to: " << error_msg->c_str();
+                    *error_msg = error_str.str();
+                    return false;
+                }
+            }
+            break;
+        }
+        case DescriptorClass::Image: {
+            for (uint32_t di = 0; di < update->descriptorCount; ++di) {
+                const auto src_desc = src_set->GetDescriptorFromGlobalIndex(index + di);
+                if (!src_desc->updated) continue;
+                auto img_desc = static_cast<const ImageDescriptor *>(src_desc);
+                auto image_view = img_desc->GetImageView();
+                auto image_layout = img_desc->GetImageLayout();
+                if (!ValidateImageUpdate(image_view, image_layout, type, func_name, error_code, error_msg)) {
+                    std::stringstream error_str;
+                    error_str << "Attempted copy update to image descriptor failed due to: " << error_msg->c_str();
+                    *error_msg = error_str.str();
+                    return false;
+                }
+            }
+            break;
+        }
+        case DescriptorClass::TexelBuffer: {
+            for (uint32_t di = 0; di < update->descriptorCount; ++di) {
+                const auto src_desc = src_set->GetDescriptorFromGlobalIndex(index + di);
+                if (!src_desc->updated) continue;
+                auto buffer_view = static_cast<const TexelDescriptor *>(src_desc)->GetBufferView();
+                auto bv_state = device_data->GetBufferViewState(buffer_view);
+                if (!bv_state) {
+                    *error_code = "VUID-VkWriteDescriptorSet-descriptorType-00323";
+                    std::stringstream error_str;
+                    error_str << "Attempted copy update to texel buffer descriptor with invalid buffer view: " << buffer_view;
+                    *error_msg = error_str.str();
+                    return false;
+                }
+                auto buffer = bv_state->create_info.buffer;
+                if (!cvdescriptorset::ValidateBufferUsage(GetBufferState(buffer), type, error_code, error_msg)) {
+                    std::stringstream error_str;
+                    error_str << "Attempted copy update to texel buffer descriptor failed due to: " << error_msg->c_str();
+                    *error_msg = error_str.str();
+                    return false;
+                }
+            }
+            break;
+        }
+        case DescriptorClass::GeneralBuffer: {
+            for (uint32_t di = 0; di < update->descriptorCount; ++di) {
+                const auto src_desc = src_set->GetDescriptorFromGlobalIndex(index + di);
+                if (!src_desc->updated) continue;
+                auto buffer = static_cast<const BufferDescriptor *>(src_desc)->GetBuffer();
+                if (!cvdescriptorset::ValidateBufferUsage(GetBufferState(buffer), type, error_code, error_msg)) {
+                    std::stringstream error_str;
+                    error_str << "Attempted copy update to buffer descriptor failed due to: " << error_msg->c_str();
+                    *error_msg = error_str.str();
+                    return false;
+                }
+            }
+            break;
+        }
+        case DescriptorClass::InlineUniform:
+        case DescriptorClass::AccelerationStructure:
+            break;
+        default:
+            assert(0);  // We've already verified update type so should never get here
+            break;
+    }
+    // All checks passed so update contents are good
+    return true;
+}
+// Verify that the state at allocate time is correct, but don't actually allocate the sets yet
+bool CoreChecks::ValidateAllocateDescriptorSets(const VkDescriptorSetAllocateInfo *p_alloc_info,
+                                                const cvdescriptorset::AllocateDescriptorSetsData *ds_data) const {
+    bool skip = false;
+    auto pool_state = GetDescriptorPoolState(p_alloc_info->descriptorPool);
+
+    for (uint32_t i = 0; i < p_alloc_info->descriptorSetCount; i++) {
+        auto layout = GetDescriptorSetLayoutShared(p_alloc_info->pSetLayouts[i]);
+        if (layout) {  // nullptr layout indicates no valid layout handle for this device, validated/logged in object_tracker
+            if (layout->IsPushDescriptor()) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT,
+                                HandleToUint64(p_alloc_info->pSetLayouts[i]), "VUID-VkDescriptorSetAllocateInfo-pSetLayouts-00308",
+                                "%s specified at pSetLayouts[%" PRIu32
+                                "] in vkAllocateDescriptorSets() was created with invalid flag %s set.",
+                                report_data->FormatHandle(p_alloc_info->pSetLayouts[i]).c_str(), i,
+                                "VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR");
+            }
+            if (layout->GetCreateFlags() & VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT &&
+                !(pool_state->createInfo.flags & VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT_EXT)) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT,
+                                0, "VUID-VkDescriptorSetAllocateInfo-pSetLayouts-03044",
+                                "Descriptor set layout create flags and pool create flags mismatch for index (%d)", i);
+            }
+        }
+    }
+    if (!device_extensions.vk_khr_maintenance1) {
+        // Track number of descriptorSets allowable in this pool
+        if (pool_state->availableSets < p_alloc_info->descriptorSetCount) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT,
+                            HandleToUint64(pool_state->pool), "VUID-VkDescriptorSetAllocateInfo-descriptorSetCount-00306",
+                            "Unable to allocate %u descriptorSets from %s"
+                            ". This pool only has %d descriptorSets remaining.",
+                            p_alloc_info->descriptorSetCount, report_data->FormatHandle(pool_state->pool).c_str(),
+                            pool_state->availableSets);
+        }
+        // Determine whether descriptor counts are satisfiable
+        for (auto it = ds_data->required_descriptors_by_type.begin(); it != ds_data->required_descriptors_by_type.end(); ++it) {
+            auto count_iter = pool_state->availableDescriptorTypeCount.find(it->first);
+            uint32_t availableCount = (count_iter != pool_state->availableDescriptorTypeCount.end()) ? count_iter->second : 0;
+
+            if (ds_data->required_descriptors_by_type.at(it->first) > availableCount) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT,
+                                HandleToUint64(pool_state->pool), "VUID-VkDescriptorSetAllocateInfo-descriptorPool-00307",
+                                "Unable to allocate %u descriptors of type %s from %s"
+                                ". This pool only has %d descriptors of this type remaining.",
+                                ds_data->required_descriptors_by_type.at(it->first),
+                                string_VkDescriptorType(VkDescriptorType(it->first)),
+                                report_data->FormatHandle(pool_state->pool).c_str(), availableCount);
+            }
+        }
+    }
+
+    const auto *count_allocate_info = lvl_find_in_chain<VkDescriptorSetVariableDescriptorCountAllocateInfoEXT>(p_alloc_info->pNext);
+
+    if (count_allocate_info) {
+        if (count_allocate_info->descriptorSetCount != 0 &&
+            count_allocate_info->descriptorSetCount != p_alloc_info->descriptorSetCount) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT, 0,
+                            "VUID-VkDescriptorSetVariableDescriptorCountAllocateInfoEXT-descriptorSetCount-03045",
+                            "VkDescriptorSetAllocateInfo::descriptorSetCount (%d) != "
+                            "VkDescriptorSetVariableDescriptorCountAllocateInfoEXT::descriptorSetCount (%d)",
+                            p_alloc_info->descriptorSetCount, count_allocate_info->descriptorSetCount);
+        }
+        if (count_allocate_info->descriptorSetCount == p_alloc_info->descriptorSetCount) {
+            for (uint32_t i = 0; i < p_alloc_info->descriptorSetCount; i++) {
+                auto layout = GetDescriptorSetLayoutShared(p_alloc_info->pSetLayouts[i]);
+                if (count_allocate_info->pDescriptorCounts[i] > layout->GetDescriptorCountFromBinding(layout->GetMaxBinding())) {
+                    skip |= log_msg(
+                        report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT, 0,
+                        "VUID-VkDescriptorSetVariableDescriptorCountAllocateInfoEXT-pSetLayouts-03046",
+                        "pDescriptorCounts[%d] = (%d), binding's descriptorCount = (%d)", i,
+                        count_allocate_info->pDescriptorCounts[i], layout->GetDescriptorCountFromBinding(layout->GetMaxBinding()));
+                }
+            }
+        }
+    }
+
+    return skip;
+}
+
+const BindingReqMap &cvdescriptorset::PrefilterBindRequestMap::FilteredMap(const CMD_BUFFER_STATE &cb_state,
+                                                                           const PIPELINE_STATE &pipeline) {
+    if (IsManyDescriptors()) {
+        filtered_map_.reset(new std::map<uint32_t, descriptor_req>());
+        descriptor_set_.FilterBindingReqs(cb_state, pipeline, orig_map_, filtered_map_.get());
+        return *filtered_map_;
+    }
+    return orig_map_;
+}
+
+// Starting at offset descriptor of given binding, parse over update_count
+//  descriptor updates and verify that for any binding boundaries that are crossed, the next binding(s) are all consistent
+//  Consistency means that their type, stage flags, and whether or not they use immutable samplers matches
+//  If so, return true. If not, fill in error_msg and return false
+bool cvdescriptorset::VerifyUpdateConsistency(DescriptorSetLayout::ConstBindingIterator current_binding, uint32_t offset,
+                                              uint32_t update_count, const char *type, const VkDescriptorSet set,
+                                              std::string *error_msg) {
+    bool pass = true;
+    // Verify consecutive bindings match (if needed)
+    auto orig_binding = current_binding;
+
+    while (pass && update_count) {
+        // First, it's legal to offset beyond your own binding so handle that case
+        if (offset > 0) {
+            const auto &index_range = current_binding.GetGlobalIndexRange();
+            // index_range.start + offset is which descriptor is needed to update. If it > index_range.end, it means the descriptor
+            // isn't in this binding, maybe in next binding.
+            if ((index_range.start + offset) >= index_range.end) {
+                // Advance to next binding, decrement offset by binding size
+                offset -= current_binding.GetDescriptorCount();
+                ++current_binding;
+                // Verify next consecutive binding matches type, stage flags & immutable sampler use and if AtEnd
+                if (!orig_binding.IsConsistent(current_binding)) {
+                    pass = false;
+                }
+                continue;
+            }
+        }
+
+        update_count -= std::min(update_count, current_binding.GetDescriptorCount() - offset);
+        if (update_count) {
+            // Starting offset is beyond the current binding. Check consistency, update counters and advance to the next binding,
+            // looking for the start point. All bindings (even those skipped) must be consistent with the update and with the
+            // original binding.
+            offset = 0;
+            ++current_binding;
+            // Verify next consecutive binding matches type, stage flags & immutable sampler use and if AtEnd
+            if (!orig_binding.IsConsistent(current_binding)) {
+                pass = false;
+            }
+        }
+    }
+
+    if (!pass) {
+        std::stringstream error_str;
+        error_str << "Attempting " << type;
+        if (current_binding.Layout()->IsPushDescriptor()) {
+            error_str << " push descriptors";
+        } else {
+            error_str << " descriptor set " << set;
+        }
+        error_str << " binding #" << orig_binding.Binding() << " with #" << update_count
+                  << " descriptors being updated but this update oversteps the bounds of this binding and the next binding is "
+                     "not consistent with current binding so this update is invalid.";
+        *error_msg = error_str.str();
+    }
+    return pass;
+}
+
+// Validate the state for a given write update but don't actually perform the update
+//  If an error would occur for this update, return false and fill in details in error_msg string
+bool CoreChecks::ValidateWriteUpdate(const DescriptorSet *dest_set, const VkWriteDescriptorSet *update, const char *func_name,
+                                     std::string *error_code, std::string *error_msg) const {
+    const auto dest_layout = dest_set->GetLayout().get();
+
+    // Verify dst layout still valid
+    if (dest_layout->destroyed) {
+        *error_code = "VUID-VkWriteDescriptorSet-dstSet-00320";
+        string_sprintf(error_msg, "Cannot call %s to perform write update on %s which has been destroyed", func_name,
+                       dest_set->StringifySetAndLayout().c_str());
+        return false;
+    }
+    // Verify dst binding exists
+    if (!dest_layout->HasBinding(update->dstBinding)) {
+        *error_code = "VUID-VkWriteDescriptorSet-dstBinding-00315";
+        std::stringstream error_str;
+        error_str << dest_set->StringifySetAndLayout() << " does not have binding " << update->dstBinding;
+        *error_msg = error_str.str();
+        return false;
+    }
+
+    DescriptorSetLayout::ConstBindingIterator dest(dest_layout, update->dstBinding);
+    // Make sure binding isn't empty
+    if (0 == dest.GetDescriptorCount()) {
+        *error_code = "VUID-VkWriteDescriptorSet-dstBinding-00316";
+        std::stringstream error_str;
+        error_str << dest_set->StringifySetAndLayout() << " cannot updated binding " << update->dstBinding
+                  << " that has 0 descriptors";
+        *error_msg = error_str.str();
+        return false;
+    }
+
+    // Verify idle ds
+    if (dest_set->in_use.load() && !(dest.GetDescriptorBindingFlags() & (VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT_EXT |
+                                                                         VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT))) {
+        // TODO : Re-using Free Idle error code, need write update idle error code
+        *error_code = "VUID-vkFreeDescriptorSets-pDescriptorSets-00309";
+        std::stringstream error_str;
+        error_str << "Cannot call " << func_name << " to perform write update on " << dest_set->StringifySetAndLayout()
+                  << " that is in use by a command buffer";
+        *error_msg = error_str.str();
+        return false;
+    }
+    // We know that binding is valid, verify update and do update on each descriptor
+    auto start_idx = dest.GetGlobalIndexRange().start + update->dstArrayElement;
+    auto type = dest.GetType();
+    if (type != update->descriptorType) {
+        *error_code = "VUID-VkWriteDescriptorSet-descriptorType-00319";
+        std::stringstream error_str;
+        error_str << "Attempting write update to " << dest_set->StringifySetAndLayout() << " binding #" << update->dstBinding
+                  << " with type " << string_VkDescriptorType(type) << " but update type is "
+                  << string_VkDescriptorType(update->descriptorType);
+        *error_msg = error_str.str();
+        return false;
+    }
+    if (type == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT) {
+        if ((update->dstArrayElement % 4) != 0) {
+            *error_code = "VUID-VkWriteDescriptorSet-descriptorType-02219";
+            std::stringstream error_str;
+            error_str << "Attempting write update to " << dest_set->StringifySetAndLayout() << " binding #" << update->dstBinding
+                      << " with "
+                      << "dstArrayElement " << update->dstArrayElement << " not a multiple of 4";
+            *error_msg = error_str.str();
+            return false;
+        }
+        if ((update->descriptorCount % 4) != 0) {
+            *error_code = "VUID-VkWriteDescriptorSet-descriptorType-02220";
+            std::stringstream error_str;
+            error_str << "Attempting write update to " << dest_set->StringifySetAndLayout() << " binding #" << update->dstBinding
+                      << " with "
+                      << "descriptorCount  " << update->descriptorCount << " not a multiple of 4";
+            *error_msg = error_str.str();
+            return false;
+        }
+        const auto *write_inline_info = lvl_find_in_chain<VkWriteDescriptorSetInlineUniformBlockEXT>(update->pNext);
+        if (!write_inline_info || write_inline_info->dataSize != update->descriptorCount) {
+            *error_code = "VUID-VkWriteDescriptorSet-descriptorType-02221";
+            std::stringstream error_str;
+            if (!write_inline_info) {
+                error_str << "Attempting write update to " << dest_set->StringifySetAndLayout() << " binding #"
+                          << update->dstBinding << " with "
+                          << "VkWriteDescriptorSetInlineUniformBlockEXT missing";
+            } else {
+                error_str << "Attempting write update to " << dest_set->StringifySetAndLayout() << " binding #"
+                          << update->dstBinding << " with "
+                          << "VkWriteDescriptorSetInlineUniformBlockEXT dataSize " << write_inline_info->dataSize
+                          << " not equal to "
+                          << "VkWriteDescriptorSet descriptorCount " << update->descriptorCount;
+            }
+            *error_msg = error_str.str();
+            return false;
+        }
+        // This error is probably unreachable due to the previous two errors
+        if (write_inline_info && (write_inline_info->dataSize % 4) != 0) {
+            *error_code = "VUID-VkWriteDescriptorSetInlineUniformBlockEXT-dataSize-02222";
+            std::stringstream error_str;
+            error_str << "Attempting write update to " << dest_set->StringifySetAndLayout() << " binding #" << update->dstBinding
+                      << " with "
+                      << "VkWriteDescriptorSetInlineUniformBlockEXT dataSize " << write_inline_info->dataSize
+                      << " not a multiple of 4";
+            *error_msg = error_str.str();
+            return false;
+        }
+    }
+    // Verify consecutive bindings match (if needed)
+    if (!VerifyUpdateConsistency(DescriptorSetLayout::ConstBindingIterator(dest_layout, update->dstBinding),
+                                 update->dstArrayElement, update->descriptorCount, "write update to", dest_set->GetSet(),
+                                 error_msg)) {
+        // TODO : Should break out "consecutive binding updates" language into valid usage statements
+        *error_code = "VUID-VkWriteDescriptorSet-dstArrayElement-00321";
+        return false;
+    }
+    // Update is within bounds and consistent so last step is to validate update contents
+    if (!VerifyWriteUpdateContents(dest_set, update, start_idx, func_name, error_code, error_msg)) {
+        std::stringstream error_str;
+        error_str << "Write update to " << dest_set->StringifySetAndLayout() << " binding #" << update->dstBinding
+                  << " failed with error message: " << error_msg->c_str();
+        *error_msg = error_str.str();
+        return false;
+    }
+    // All checks passed, update is clean
+    return true;
+}
+
+// Verify that the contents of the update are ok, but don't perform actual update
+bool CoreChecks::VerifyWriteUpdateContents(const DescriptorSet *dest_set, const VkWriteDescriptorSet *update, const uint32_t index,
+                                           const char *func_name, std::string *error_code, std::string *error_msg) const {
+    using ImageSamplerDescriptor = cvdescriptorset::ImageSamplerDescriptor;
+    using SamplerDescriptor = cvdescriptorset::SamplerDescriptor;
+
+    switch (update->descriptorType) {
+        case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER: {
+            for (uint32_t di = 0; di < update->descriptorCount; ++di) {
+                // Validate image
+                auto image_view = update->pImageInfo[di].imageView;
+                auto image_layout = update->pImageInfo[di].imageLayout;
+                if (!ValidateImageUpdate(image_view, image_layout, update->descriptorType, func_name, error_code, error_msg)) {
+                    std::stringstream error_str;
+                    error_str << "Attempted write update to combined image sampler descriptor failed due to: "
+                              << error_msg->c_str();
+                    *error_msg = error_str.str();
+                    return false;
+                }
+                if (device_extensions.vk_khr_sampler_ycbcr_conversion) {
+                    ImageSamplerDescriptor *desc = (ImageSamplerDescriptor *)dest_set->GetDescriptorFromGlobalIndex(index + di);
+                    if (desc->IsImmutableSampler()) {
+                        auto sampler_state = GetSamplerState(desc->GetSampler());
+                        auto iv_state = GetImageViewState(image_view);
+                        if (iv_state && sampler_state) {
+                            if (iv_state->samplerConversion != sampler_state->samplerConversion) {
+                                *error_code = "VUID-VkWriteDescriptorSet-descriptorType-01948";
+                                std::stringstream error_str;
+                                error_str << "Attempted write update to combined image sampler and image view and sampler ycbcr "
+                                             "conversions are not identical, sampler: "
+                                          << desc->GetSampler() << " image view: " << iv_state->image_view << ".";
+                                *error_msg = error_str.str();
+                                return false;
+                            }
+                        }
+                    } else {
+                        auto iv_state = GetImageViewState(image_view);
+                        if (iv_state && (iv_state->samplerConversion != VK_NULL_HANDLE)) {
+                            *error_code = "VUID-VkWriteDescriptorSet-descriptorType-02738";
+                            std::stringstream error_str;
+                            error_str << "Because dstSet (" << update->dstSet << ") is bound to image view ("
+                                      << iv_state->image_view
+                                      << ") that includes a YCBCR conversion, it must have been allocated with a layout that "
+                                         "includes an immutable sampler.";
+                            *error_msg = error_str.str();
+                            return false;
+                        }
+                    }
+                }
+            }
+        }
+        // fall through
+        case VK_DESCRIPTOR_TYPE_SAMPLER: {
+            for (uint32_t di = 0; di < update->descriptorCount; ++di) {
+                SamplerDescriptor *desc = (SamplerDescriptor *)dest_set->GetDescriptorFromGlobalIndex(index + di);
+                if (!desc->IsImmutableSampler()) {
+                    if (!ValidateSampler(update->pImageInfo[di].sampler)) {
+                        *error_code = "VUID-VkWriteDescriptorSet-descriptorType-00325";
+                        std::stringstream error_str;
+                        error_str << "Attempted write update to sampler descriptor with invalid sampler: "
+                                  << update->pImageInfo[di].sampler << ".";
+                        *error_msg = error_str.str();
+                        return false;
+                    }
+                } else {
+                    // TODO : Warn here
+                }
+            }
+            break;
+        }
+        case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
+        case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
+        case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE: {
+            for (uint32_t di = 0; di < update->descriptorCount; ++di) {
+                auto image_view = update->pImageInfo[di].imageView;
+                auto image_layout = update->pImageInfo[di].imageLayout;
+                if (!ValidateImageUpdate(image_view, image_layout, update->descriptorType, func_name, error_code, error_msg)) {
+                    std::stringstream error_str;
+                    error_str << "Attempted write update to image descriptor failed due to: " << error_msg->c_str();
+                    *error_msg = error_str.str();
+                    return false;
+                }
+            }
+            break;
+        }
+        case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
+        case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER: {
+            for (uint32_t di = 0; di < update->descriptorCount; ++di) {
+                auto buffer_view = update->pTexelBufferView[di];
+                auto bv_state = GetBufferViewState(buffer_view);
+                if (!bv_state) {
+                    *error_code = "VUID-VkWriteDescriptorSet-descriptorType-00323";
+                    std::stringstream error_str;
+                    error_str << "Attempted write update to texel buffer descriptor with invalid buffer view: " << buffer_view;
+                    *error_msg = error_str.str();
+                    return false;
+                }
+                auto buffer = bv_state->create_info.buffer;
+                auto buffer_state = GetBufferState(buffer);
+                // Verify that buffer underlying the view hasn't been destroyed prematurely
+                if (!buffer_state) {
+                    *error_code = "VUID-VkWriteDescriptorSet-descriptorType-00323";
+                    std::stringstream error_str;
+                    error_str << "Attempted write update to texel buffer descriptor failed because underlying buffer (" << buffer
+                              << ") has been destroyed: " << error_msg->c_str();
+                    *error_msg = error_str.str();
+                    return false;
+                } else if (!cvdescriptorset::ValidateBufferUsage(buffer_state, update->descriptorType, error_code, error_msg)) {
+                    std::stringstream error_str;
+                    error_str << "Attempted write update to texel buffer descriptor failed due to: " << error_msg->c_str();
+                    *error_msg = error_str.str();
+                    return false;
+                }
+            }
+            break;
+        }
+        case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
+        case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
+        case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
+        case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC: {
+            for (uint32_t di = 0; di < update->descriptorCount; ++di) {
+                if (!ValidateBufferUpdate(update->pBufferInfo + di, update->descriptorType, func_name, error_code, error_msg)) {
+                    std::stringstream error_str;
+                    error_str << "Attempted write update to buffer descriptor failed due to: " << error_msg->c_str();
+                    *error_msg = error_str.str();
+                    return false;
+                }
+            }
+            break;
+        }
+        case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT:
+            break;
+        case VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV:
+            // XXX TODO
+            break;
+        default:
+            assert(0);  // We've already verified update type so should never get here
+            break;
+    }
+    // All checks passed so update contents are good
+    return true;
+}
diff --git a/src/third_party/vulkan-validation-layers/src/layers/descriptor_sets.h b/src/third_party/vulkan-validation-layers/src/layers/descriptor_sets.h
new file mode 100644
index 0000000..3271311
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/layers/descriptor_sets.h
@@ -0,0 +1,686 @@
+/* Copyright (c) 2015-2019 The Khronos Group Inc.
+ * Copyright (c) 2015-2019 Valve Corporation
+ * Copyright (c) 2015-2019 LunarG, Inc.
+ * Copyright (C) 2015-2019 Google Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Tobin Ehlis <tobine@google.com>
+ *         John Zulauf <jzulauf@lunarg.com>
+ */
+#ifndef CORE_VALIDATION_DESCRIPTOR_SETS_H_
+#define CORE_VALIDATION_DESCRIPTOR_SETS_H_
+
+#include "hash_vk_types.h"
+#include "vk_layer_logging.h"
+#include "vk_layer_utils.h"
+#include "vk_safe_struct.h"
+#include "vulkan/vk_layer.h"
+#include "vk_object_types.h"
+#include <map>
+#include <memory>
+#include <set>
+#include <unordered_map>
+#include <unordered_set>
+#include <vector>
+
+class CoreChecks;
+class ValidationStateTracker;
+
+// Descriptor Data structures
+namespace cvdescriptorset {
+
+// Utility structs/classes/types
+// Index range for global indices below, end is exclusive, i.e. [start,end)
+struct IndexRange {
+    IndexRange(uint32_t start_in, uint32_t end_in) : start(start_in), end(end_in) {}
+    IndexRange() = default;
+    uint32_t start;
+    uint32_t end;
+};
+
+/*
+ * DescriptorSetLayoutDef/DescriptorSetLayout classes
+ *
+ * Overview - These two classes encapsulate the Vulkan VkDescriptorSetLayout data (layout).
+ *   A layout consists of some number of bindings, each of which has a binding#, a
+ *   type, descriptor count, stage flags, and pImmutableSamplers.
+
+ *   The DescriptorSetLayoutDef represents a canonicalization of the input data and contains
+ *   neither per handle or per device state.  It is possible for different handles on
+ *   different devices to share a common def.  This is used and useful for quick compatibiltiy
+ *   validation.  The DescriptorSetLayout refers to a DescriptorSetLayoutDef and contains
+ *   all per handle state.
+ *
+ * Index vs Binding - A layout is created with an array of VkDescriptorSetLayoutBinding
+ *  where each array index will have a corresponding binding# that is defined in that struct.
+ *  The binding#, then, is decoupled from VkDescriptorSetLayoutBinding index, which allows
+ *  bindings to be defined out-of-order. This DescriptorSetLayout class, however, stores
+ *  the bindings internally in-order. This is useful for operations which may "roll over"
+ *  from a single binding to the next consecutive binding.
+ *
+ *  Note that although the bindings are stored in-order, there still may be "gaps" in the
+ *  binding#. For example, if the binding creation order is 8, 7, 10, 3, 4, then the
+ *  internal binding array will have five entries stored in binding order 3, 4, 7, 8, 10.
+ *  To process all of the bindings in a layout you can iterate from 0 to GetBindingCount()
+ *  and use the Get*FromIndex() functions for each index. To just process a single binding,
+ *  use the Get*FromBinding() functions.
+ *
+ * Global Index - The binding vector index has as many indices as there are bindings.
+ *  This class also has the concept of a Global Index. For the global index functions,
+ *  there are as many global indices as there are descriptors in the layout.
+ *  For the global index, consider all of the bindings to be a flat array where
+ *  descriptor 0 of of the lowest binding# is index 0 and each descriptor in the layout
+ *  increments from there. So if the lowest binding# in this example had descriptorCount of
+ *  10, then the GlobalStartIndex of the 2nd lowest binding# will be 10 where 0-9 are the
+ *  global indices for the lowest binding#.
+ */
+class DescriptorSetLayoutDef {
+  public:
+    // Constructors and destructor
+    DescriptorSetLayoutDef(const VkDescriptorSetLayoutCreateInfo *p_create_info);
+    size_t hash() const;
+
+    uint32_t GetTotalDescriptorCount() const { return descriptor_count_; };
+    uint32_t GetDynamicDescriptorCount() const { return dynamic_descriptor_count_; };
+    VkDescriptorSetLayoutCreateFlags GetCreateFlags() const { return flags_; }
+    // For a given binding, return the number of descriptors in that binding and all successive bindings
+    uint32_t GetBindingCount() const { return binding_count_; };
+    // Non-empty binding numbers in order
+    const std::set<uint32_t> &GetSortedBindingSet() const { return non_empty_bindings_; }
+    // Return true if given binding is present in this layout
+    bool HasBinding(const uint32_t binding) const { return binding_to_index_map_.count(binding) > 0; };
+    // Return true if binding 1 beyond given exists and has same type, stageFlags & immutable sampler use
+    bool IsNextBindingConsistent(const uint32_t) const;
+    uint32_t GetIndexFromBinding(uint32_t binding) const;
+    // Various Get functions that can either be passed a binding#, which will
+    //  be automatically translated into the appropriate index, or the index# can be passed in directly
+    uint32_t GetMaxBinding() const { return bindings_[bindings_.size() - 1].binding; }
+    VkDescriptorSetLayoutBinding const *GetDescriptorSetLayoutBindingPtrFromIndex(const uint32_t) const;
+    VkDescriptorSetLayoutBinding const *GetDescriptorSetLayoutBindingPtrFromBinding(uint32_t binding) const {
+        return GetDescriptorSetLayoutBindingPtrFromIndex(GetIndexFromBinding(binding));
+    }
+    const std::vector<safe_VkDescriptorSetLayoutBinding> &GetBindings() const { return bindings_; }
+    const VkDescriptorSetLayoutBinding *GetBindingInfoFromIndex(const uint32_t index) const { return bindings_[index].ptr(); }
+    const VkDescriptorSetLayoutBinding *GetBindingInfoFromBinding(const uint32_t binding) const {
+        return GetBindingInfoFromIndex(GetIndexFromBinding(binding));
+    }
+    const std::vector<VkDescriptorBindingFlagsEXT> &GetBindingFlags() const { return binding_flags_; }
+    uint32_t GetDescriptorCountFromIndex(const uint32_t) const;
+    uint32_t GetDescriptorCountFromBinding(const uint32_t binding) const {
+        return GetDescriptorCountFromIndex(GetIndexFromBinding(binding));
+    }
+    VkDescriptorType GetTypeFromIndex(const uint32_t) const;
+    VkDescriptorType GetTypeFromBinding(const uint32_t binding) const { return GetTypeFromIndex(GetIndexFromBinding(binding)); }
+    VkShaderStageFlags GetStageFlagsFromIndex(const uint32_t) const;
+    VkShaderStageFlags GetStageFlagsFromBinding(const uint32_t binding) const {
+        return GetStageFlagsFromIndex(GetIndexFromBinding(binding));
+    }
+    VkDescriptorBindingFlagsEXT GetDescriptorBindingFlagsFromIndex(const uint32_t) const;
+    VkDescriptorBindingFlagsEXT GetDescriptorBindingFlagsFromBinding(const uint32_t binding) const {
+        return GetDescriptorBindingFlagsFromIndex(GetIndexFromBinding(binding));
+    }
+    VkSampler const *GetImmutableSamplerPtrFromBinding(const uint32_t) const;
+    VkSampler const *GetImmutableSamplerPtrFromIndex(const uint32_t) const;
+    // For a given binding and array index, return the corresponding index into the dynamic offset array
+    int32_t GetDynamicOffsetIndexFromBinding(uint32_t binding) const {
+        auto dyn_off = binding_to_dynamic_array_idx_map_.find(binding);
+        if (dyn_off == binding_to_dynamic_array_idx_map_.end()) {
+            assert(0);  // Requesting dyn offset for invalid binding/array idx pair
+            return -1;
+        }
+        return dyn_off->second;
+    }
+    // For a particular binding, get the global index range
+    //  This call should be guarded by a call to "HasBinding(binding)" to verify that the given binding exists
+    const IndexRange &GetGlobalIndexRangeFromBinding(const uint32_t) const;
+    const cvdescriptorset::IndexRange &GetGlobalIndexRangeFromIndex(uint32_t index) const;
+
+    // Helper function to get the next valid binding for a descriptor
+    uint32_t GetNextValidBinding(const uint32_t) const;
+    bool IsPushDescriptor() const { return GetCreateFlags() & VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR; };
+
+    struct BindingTypeStats {
+        uint32_t dynamic_buffer_count;
+        uint32_t non_dynamic_buffer_count;
+        uint32_t image_sampler_count;
+    };
+    const BindingTypeStats &GetBindingTypeStats() const { return binding_type_stats_; }
+
+  private:
+    // Only the first three data members are used for hash and equality checks, the other members are derived from them, and are
+    // used to speed up the various lookups/queries/validations
+    VkDescriptorSetLayoutCreateFlags flags_;
+    std::vector<safe_VkDescriptorSetLayoutBinding> bindings_;
+    std::vector<VkDescriptorBindingFlagsEXT> binding_flags_;
+
+    // Convenience data structures for rapid lookup of various descriptor set layout properties
+    std::set<uint32_t> non_empty_bindings_;  // Containing non-emtpy bindings in numerical order
+    std::unordered_map<uint32_t, uint32_t> binding_to_index_map_;
+    // The following map allows an non-iterative lookup of a binding from a global index...
+    std::vector<IndexRange> global_index_range_;  // range is exclusive of .end
+    // For a given binding map to associated index in the dynamic offset array
+    std::unordered_map<uint32_t, uint32_t> binding_to_dynamic_array_idx_map_;
+
+    uint32_t binding_count_;     // # of bindings in this layout
+    uint32_t descriptor_count_;  // total # descriptors in this layout
+    uint32_t dynamic_descriptor_count_;
+    BindingTypeStats binding_type_stats_;
+};
+
+static inline bool operator==(const DescriptorSetLayoutDef &lhs, const DescriptorSetLayoutDef &rhs) {
+    bool result = (lhs.GetCreateFlags() == rhs.GetCreateFlags()) && (lhs.GetBindings() == rhs.GetBindings()) &&
+                  (lhs.GetBindingFlags() == rhs.GetBindingFlags());
+    return result;
+}
+
+// Canonical dictionary of DSL definitions -- independent of device or handle
+using DescriptorSetLayoutDict = hash_util::Dictionary<DescriptorSetLayoutDef, hash_util::HasHashMember<DescriptorSetLayoutDef>>;
+using DescriptorSetLayoutId = DescriptorSetLayoutDict::Id;
+
+class DescriptorSetLayout : public BASE_NODE {
+  public:
+    // Constructors and destructor
+    DescriptorSetLayout(const VkDescriptorSetLayoutCreateInfo *p_create_info, const VkDescriptorSetLayout layout);
+    bool HasBinding(const uint32_t binding) const { return layout_id_->HasBinding(binding); }
+    // Return true if this layout is compatible with passed in layout from a pipelineLayout,
+    //   else return false and update error_msg with description of incompatibility
+    // Return true if this layout is compatible with passed in layout
+    bool IsCompatible(DescriptorSetLayout const *rh_ds_layout) const;
+    // Straightforward Get functions
+    VkDescriptorSetLayout GetDescriptorSetLayout() const { return layout_; };
+    const DescriptorSetLayoutDef *GetLayoutDef() const { return layout_id_.get(); }
+    DescriptorSetLayoutId GetLayoutId() const { return layout_id_; }
+    uint32_t GetTotalDescriptorCount() const { return layout_id_->GetTotalDescriptorCount(); };
+    uint32_t GetDynamicDescriptorCount() const { return layout_id_->GetDynamicDescriptorCount(); };
+    uint32_t GetBindingCount() const { return layout_id_->GetBindingCount(); };
+    VkDescriptorSetLayoutCreateFlags GetCreateFlags() const { return layout_id_->GetCreateFlags(); }
+    bool IsNextBindingConsistent(const uint32_t) const;
+    uint32_t GetIndexFromBinding(uint32_t binding) const { return layout_id_->GetIndexFromBinding(binding); }
+    // Various Get functions that can either be passed a binding#, which will
+    //  be automatically translated into the appropriate index, or the index# can be passed in directly
+    uint32_t GetMaxBinding() const { return layout_id_->GetMaxBinding(); }
+    VkDescriptorSetLayoutBinding const *GetDescriptorSetLayoutBindingPtrFromIndex(const uint32_t index) const {
+        return layout_id_->GetDescriptorSetLayoutBindingPtrFromIndex(index);
+    }
+    VkDescriptorSetLayoutBinding const *GetDescriptorSetLayoutBindingPtrFromBinding(uint32_t binding) const {
+        return layout_id_->GetDescriptorSetLayoutBindingPtrFromBinding(binding);
+    }
+    const std::vector<safe_VkDescriptorSetLayoutBinding> &GetBindings() const { return layout_id_->GetBindings(); }
+    const std::set<uint32_t> &GetSortedBindingSet() const { return layout_id_->GetSortedBindingSet(); }
+    uint32_t GetDescriptorCountFromIndex(const uint32_t index) const { return layout_id_->GetDescriptorCountFromIndex(index); }
+    uint32_t GetDescriptorCountFromBinding(const uint32_t binding) const {
+        return layout_id_->GetDescriptorCountFromBinding(binding);
+    }
+    VkDescriptorType GetTypeFromIndex(const uint32_t index) const { return layout_id_->GetTypeFromIndex(index); }
+    VkDescriptorType GetTypeFromBinding(const uint32_t binding) const { return layout_id_->GetTypeFromBinding(binding); }
+    VkShaderStageFlags GetStageFlagsFromIndex(const uint32_t index) const { return layout_id_->GetStageFlagsFromIndex(index); }
+    VkShaderStageFlags GetStageFlagsFromBinding(const uint32_t binding) const {
+        return layout_id_->GetStageFlagsFromBinding(binding);
+    }
+    VkDescriptorBindingFlagsEXT GetDescriptorBindingFlagsFromIndex(const uint32_t index) const {
+        return layout_id_->GetDescriptorBindingFlagsFromIndex(index);
+    }
+    VkDescriptorBindingFlagsEXT GetDescriptorBindingFlagsFromBinding(const uint32_t binding) const {
+        return layout_id_->GetDescriptorBindingFlagsFromBinding(binding);
+    }
+    VkSampler const *GetImmutableSamplerPtrFromBinding(const uint32_t binding) const {
+        return layout_id_->GetImmutableSamplerPtrFromBinding(binding);
+    }
+    VkSampler const *GetImmutableSamplerPtrFromIndex(const uint32_t index) const {
+        return layout_id_->GetImmutableSamplerPtrFromIndex(index);
+    }
+    // For a given binding and array index, return the corresponding index into the dynamic offset array
+    int32_t GetDynamicOffsetIndexFromBinding(uint32_t binding) const {
+        return layout_id_->GetDynamicOffsetIndexFromBinding(binding);
+    }
+    // For a particular binding, get the global index range
+    //  This call should be guarded by a call to "HasBinding(binding)" to verify that the given binding exists
+    const IndexRange &GetGlobalIndexRangeFromBinding(const uint32_t binding) const {
+        return layout_id_->GetGlobalIndexRangeFromBinding(binding);
+    }
+    const IndexRange &GetGlobalIndexRangeFromIndex(uint32_t index) const { return layout_id_->GetGlobalIndexRangeFromIndex(index); }
+
+    // Helper function to get the next valid binding for a descriptor
+    uint32_t GetNextValidBinding(const uint32_t binding) const { return layout_id_->GetNextValidBinding(binding); }
+    bool IsPushDescriptor() const { return layout_id_->IsPushDescriptor(); }
+    bool IsVariableDescriptorCountFromIndex(uint32_t index) const {
+        return !!(GetDescriptorBindingFlagsFromIndex(index) & VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT_EXT);
+    }
+    bool IsVariableDescriptorCount(uint32_t binding) const {
+        return IsVariableDescriptorCountFromIndex(GetIndexFromBinding(binding));
+    }
+
+    using BindingTypeStats = DescriptorSetLayoutDef::BindingTypeStats;
+    const BindingTypeStats &GetBindingTypeStats() const { return layout_id_->GetBindingTypeStats(); }
+
+    // Binding Iterator
+    class ConstBindingIterator {
+      public:
+        ConstBindingIterator() = delete;
+        ConstBindingIterator(const ConstBindingIterator &other) = default;
+        ConstBindingIterator &operator=(const ConstBindingIterator &rhs) = default;
+
+        ConstBindingIterator(const DescriptorSetLayout *layout) : layout_(layout), index_(0) { assert(layout); }
+        ConstBindingIterator(const DescriptorSetLayout *layout, uint32_t binding) : ConstBindingIterator(layout) {
+            index_ = layout->GetIndexFromBinding(binding);
+        }
+
+        VkDescriptorSetLayoutBinding const *GetDescriptorSetLayoutBindingPtr() const {
+            return layout_->GetDescriptorSetLayoutBindingPtrFromIndex(index_);
+        }
+        uint32_t GetDescriptorCount() const { return layout_->GetDescriptorCountFromIndex(index_); }
+        VkDescriptorType GetType() const { return layout_->GetTypeFromIndex(index_); }
+        VkShaderStageFlags GetStageFlags() const { return layout_->GetStageFlagsFromIndex(index_); }
+
+        VkDescriptorBindingFlagsEXT GetDescriptorBindingFlags() const {
+            return layout_->GetDescriptorBindingFlagsFromIndex(index_);
+        }
+
+        bool IsVariableDescriptorCount() const { return layout_->IsVariableDescriptorCountFromIndex(index_); }
+
+        VkSampler const *GetImmutableSamplerPtr() const { return layout_->GetImmutableSamplerPtrFromIndex(index_); }
+        const IndexRange &GetGlobalIndexRange() const { return layout_->GetGlobalIndexRangeFromIndex(index_); }
+        bool AtEnd() const { return index_ == layout_->GetBindingCount(); }
+
+        // Return index into dynamic offset array for given binding
+        int32_t GetDynamicOffsetIndex() const {
+            return layout_->GetDynamicOffsetIndexFromBinding(Binding());  //  There is only binding mapped access in layout_
+        }
+
+        bool operator==(const ConstBindingIterator &rhs) { return (index_ = rhs.index_) && (layout_ == rhs.layout_); }
+
+        ConstBindingIterator &operator++() {
+            if (!AtEnd()) {
+                index_++;
+            }
+            return *this;
+        }
+
+        bool IsConsistent(const ConstBindingIterator &other) const {
+            if (AtEnd() || other.AtEnd()) {
+                return false;
+            }
+            const auto *binding_ci = GetDescriptorSetLayoutBindingPtr();
+            const auto *other_binding_ci = other.GetDescriptorSetLayoutBindingPtr();
+            assert((binding_ci != nullptr) && (other_binding_ci != nullptr));
+
+            if ((binding_ci->descriptorType != other_binding_ci->descriptorType) ||
+                (binding_ci->stageFlags != other_binding_ci->stageFlags) ||
+                (!hash_util::similar_for_nullity(binding_ci->pImmutableSamplers, other_binding_ci->pImmutableSamplers)) ||
+                (GetDescriptorBindingFlags() != other.GetDescriptorBindingFlags())) {
+                return false;
+            }
+            return true;
+        }
+
+        const DescriptorSetLayout *Layout() const { return layout_; }
+        uint32_t Binding() const { return layout_->GetBindings()[index_].binding; }
+        ConstBindingIterator Next() {
+            ConstBindingIterator next(*this);
+            ++next;
+            return next;
+        }
+
+      private:
+        const DescriptorSetLayout *layout_;
+        uint32_t index_;
+    };
+    ConstBindingIterator end() const { return ConstBindingIterator(this, GetBindingCount()); }
+
+  private:
+    VkDescriptorSetLayout layout_;
+    DescriptorSetLayoutId layout_id_;
+};
+
+/*
+ * Descriptor classes
+ *  Descriptor is an abstract base class from which 5 separate descriptor types are derived.
+ *   This allows the WriteUpdate() and CopyUpdate() operations to be specialized per
+ *   descriptor type, but all descriptors in a set can be accessed via the common Descriptor*.
+ */
+
+// Slightly broader than type, each c++ "class" will has a corresponding "DescriptorClass"
+enum DescriptorClass { PlainSampler, ImageSampler, Image, TexelBuffer, GeneralBuffer, InlineUniform, AccelerationStructure };
+
+class Descriptor {
+  public:
+    virtual ~Descriptor(){};
+    virtual void WriteUpdate(ValidationStateTracker *dev_data, const VkWriteDescriptorSet *, const uint32_t) = 0;
+    virtual void CopyUpdate(ValidationStateTracker *dev_data, const Descriptor *) = 0;
+    // Create binding between resources of this descriptor and given cb_node
+    virtual void UpdateDrawState(ValidationStateTracker *, CMD_BUFFER_STATE *) = 0;
+    virtual DescriptorClass GetClass() const { return descriptor_class; };
+    // Special fast-path check for SamplerDescriptors that are immutable
+    virtual bool IsImmutableSampler() const { return false; };
+    // Check for dynamic descriptor type
+    virtual bool IsDynamic() const { return false; };
+    // Check for storage descriptor type
+    virtual bool IsStorage() const { return false; };
+    bool updated;  // Has descriptor been updated?
+    DescriptorClass descriptor_class;
+};
+
+// Return true if this layout is compatible with passed in layout from a pipelineLayout,
+//   else return false and update error_msg with description of incompatibility
+bool VerifySetLayoutCompatibility(const debug_report_data *report_data, DescriptorSetLayout const *lh_ds_layout,
+                                  DescriptorSetLayout const *rh_ds_layout, std::string *error_msg);
+bool ValidateDescriptorSetLayoutCreateInfo(const debug_report_data *report_data, const VkDescriptorSetLayoutCreateInfo *create_info,
+                                           const bool push_descriptor_ext, const uint32_t max_push_descriptors,
+                                           const bool descriptor_indexing_ext,
+                                           const VkPhysicalDeviceDescriptorIndexingFeaturesEXT *descriptor_indexing_features,
+                                           const VkPhysicalDeviceInlineUniformBlockFeaturesEXT *inline_uniform_block_features,
+                                           const VkPhysicalDeviceInlineUniformBlockPropertiesEXT *inline_uniform_block_props,
+                                           const DeviceExtensions *device_extensions);
+
+class SamplerDescriptor : public Descriptor {
+  public:
+    SamplerDescriptor(ValidationStateTracker *dev_data, const VkSampler *);
+    void WriteUpdate(ValidationStateTracker *dev_data, const VkWriteDescriptorSet *, const uint32_t) override;
+    void CopyUpdate(ValidationStateTracker *dev_data, const Descriptor *) override;
+    void UpdateDrawState(ValidationStateTracker *, CMD_BUFFER_STATE *) override;
+    virtual bool IsImmutableSampler() const override { return immutable_; };
+    VkSampler GetSampler() const { return sampler_; }
+    const SAMPLER_STATE *GetSamplerState() const { return sampler_state_.get(); }
+    SAMPLER_STATE *GetSamplerState() { return sampler_state_.get(); }
+
+  private:
+    VkSampler sampler_;
+    bool immutable_;
+    std::shared_ptr<SAMPLER_STATE> sampler_state_;
+};
+
+class ImageSamplerDescriptor : public Descriptor {
+  public:
+    ImageSamplerDescriptor(ValidationStateTracker *dev_data, const VkSampler *);
+    void WriteUpdate(ValidationStateTracker *dev_data, const VkWriteDescriptorSet *, const uint32_t) override;
+    void CopyUpdate(ValidationStateTracker *dev_data, const Descriptor *) override;
+    void UpdateDrawState(ValidationStateTracker *, CMD_BUFFER_STATE *) override;
+    virtual bool IsImmutableSampler() const override { return immutable_; };
+    VkSampler GetSampler() const { return sampler_; }
+    VkImageView GetImageView() const { return image_view_; }
+    const IMAGE_VIEW_STATE *GetImageViewState() const { return image_view_state_.get(); }
+    IMAGE_VIEW_STATE *GetImageViewState() { return image_view_state_.get(); }
+    VkImageLayout GetImageLayout() const { return image_layout_; }
+    const SAMPLER_STATE *GetSamplerState() const { return sampler_state_.get(); }
+    SAMPLER_STATE *GetSamplerState() { return sampler_state_.get(); }
+
+  private:
+    std::shared_ptr<SAMPLER_STATE> sampler_state_;
+    VkSampler sampler_;
+    bool immutable_;
+    std::shared_ptr<IMAGE_VIEW_STATE> image_view_state_;
+    VkImageView image_view_;
+    VkImageLayout image_layout_;
+};
+
+class ImageDescriptor : public Descriptor {
+  public:
+    ImageDescriptor(const VkDescriptorType);
+    void WriteUpdate(ValidationStateTracker *dev_data, const VkWriteDescriptorSet *, const uint32_t) override;
+    void CopyUpdate(ValidationStateTracker *dev_data, const Descriptor *) override;
+    void UpdateDrawState(ValidationStateTracker *, CMD_BUFFER_STATE *) override;
+    virtual bool IsStorage() const override { return storage_; }
+    VkImageView GetImageView() const { return image_view_; }
+    const IMAGE_VIEW_STATE *GetImageViewState() const { return image_view_state_.get(); }
+    IMAGE_VIEW_STATE *GetImageViewState() { return image_view_state_.get(); }
+    VkImageLayout GetImageLayout() const { return image_layout_; }
+
+  private:
+    bool storage_;
+    std::shared_ptr<IMAGE_VIEW_STATE> image_view_state_;
+    VkImageView image_view_;
+    VkImageLayout image_layout_;
+};
+
+class TexelDescriptor : public Descriptor {
+  public:
+    TexelDescriptor(const VkDescriptorType);
+    void WriteUpdate(ValidationStateTracker *dev_data, const VkWriteDescriptorSet *, const uint32_t) override;
+    void CopyUpdate(ValidationStateTracker *dev_data, const Descriptor *) override;
+    void UpdateDrawState(ValidationStateTracker *, CMD_BUFFER_STATE *) override;
+    virtual bool IsStorage() const override { return storage_; }
+    VkBufferView GetBufferView() const { return buffer_view_; }
+    const BUFFER_VIEW_STATE *GetBufferViewState() const { return buffer_view_state_.get(); }
+    BUFFER_VIEW_STATE *GetBufferViewState() { return buffer_view_state_.get(); }
+
+  private:
+    VkBufferView buffer_view_;
+    bool storage_;
+    std::shared_ptr<BUFFER_VIEW_STATE> buffer_view_state_;
+};
+
+class BufferDescriptor : public Descriptor {
+  public:
+    BufferDescriptor(const VkDescriptorType);
+    void WriteUpdate(ValidationStateTracker *dev_data, const VkWriteDescriptorSet *, const uint32_t) override;
+    void CopyUpdate(ValidationStateTracker *dev_data, const Descriptor *) override;
+    void UpdateDrawState(ValidationStateTracker *, CMD_BUFFER_STATE *) override;
+    virtual bool IsDynamic() const override { return dynamic_; }
+    virtual bool IsStorage() const override { return storage_; }
+    VkBuffer GetBuffer() const { return buffer_; }
+    const BUFFER_STATE *GetBufferState() const { return buffer_state_.get(); }
+    BUFFER_STATE *GetBufferState() { return buffer_state_.get(); }
+    VkDeviceSize GetOffset() const { return offset_; }
+    VkDeviceSize GetRange() const { return range_; }
+
+  private:
+    bool storage_;
+    bool dynamic_;
+    VkBuffer buffer_;
+    VkDeviceSize offset_;
+    VkDeviceSize range_;
+    std::shared_ptr<BUFFER_STATE> buffer_state_;
+};
+
+class InlineUniformDescriptor : public Descriptor {
+  public:
+    InlineUniformDescriptor(const VkDescriptorType) {
+        updated = false;
+        descriptor_class = InlineUniform;
+    }
+    void WriteUpdate(ValidationStateTracker *dev_data, const VkWriteDescriptorSet *, const uint32_t) override { updated = true; }
+    void CopyUpdate(ValidationStateTracker *dev_data, const Descriptor *) override { updated = true; }
+    void UpdateDrawState(ValidationStateTracker *, CMD_BUFFER_STATE *) override {}
+};
+
+class AccelerationStructureDescriptor : public Descriptor {
+  public:
+    AccelerationStructureDescriptor(const VkDescriptorType) {
+        updated = false;
+        descriptor_class = AccelerationStructure;
+    }
+    void WriteUpdate(ValidationStateTracker *dev_data, const VkWriteDescriptorSet *, const uint32_t) override { updated = true; }
+    void CopyUpdate(ValidationStateTracker *dev_data, const Descriptor *) override { updated = true; }
+    void UpdateDrawState(ValidationStateTracker *, CMD_BUFFER_STATE *) override {}
+};
+
+// Structs to contain common elements that need to be shared between Validate* and Perform* calls below
+struct AllocateDescriptorSetsData {
+    std::map<uint32_t, uint32_t> required_descriptors_by_type;
+    std::vector<std::shared_ptr<DescriptorSetLayout const>> layout_nodes;
+    AllocateDescriptorSetsData(uint32_t);
+};
+// Helper functions for descriptor set functions that cross multiple sets
+// "Validate" will make sure an update is ok without actually performing it
+bool ValidateUpdateDescriptorSets(const debug_report_data *, const CoreChecks *, uint32_t, const VkWriteDescriptorSet *, uint32_t,
+                                  const VkCopyDescriptorSet *, const char *func_name);
+// "Perform" does the update with the assumption that ValidateUpdateDescriptorSets() has passed for the given update
+void PerformUpdateDescriptorSets(ValidationStateTracker *, uint32_t, const VkWriteDescriptorSet *, uint32_t,
+                                 const VkCopyDescriptorSet *);
+
+// Core Validation specific validation checks using DescriptorSet and DescriptorSetLayoutAccessors
+// TODO: migrate out of descriptor_set.cpp/h
+// For a particular binding starting at offset and having update_count descriptors
+// updated, verify that for any binding boundaries crossed, the update is consistent
+bool VerifyUpdateConsistency(DescriptorSetLayout::ConstBindingIterator current_binding, uint32_t offset, uint32_t update_count,
+                             const char *type, const VkDescriptorSet set, std::string *error_msg);
+
+// Validate buffer descriptor update info
+bool ValidateBufferUsage(BUFFER_STATE const *buffer_node, VkDescriptorType type, std::string *error_code, std::string *error_msg);
+
+// Helper class to encapsulate the descriptor update template decoding logic
+struct DecodedTemplateUpdate {
+    std::vector<VkWriteDescriptorSet> desc_writes;
+    std::vector<VkWriteDescriptorSetInlineUniformBlockEXT> inline_infos;
+    DecodedTemplateUpdate(const ValidationStateTracker *device_data, VkDescriptorSet descriptorSet,
+                          const TEMPLATE_STATE *template_state, const void *pData,
+                          VkDescriptorSetLayout push_layout = VK_NULL_HANDLE);
+};
+
+/*
+ * DescriptorSet class
+ *
+ * Overview - This class encapsulates the Vulkan VkDescriptorSet data (set).
+ *   A set has an underlying layout which defines the bindings in the set and the
+ *   types and numbers of descriptors in each descriptor slot. Most of the layout
+ *   interfaces are exposed through identically-named functions in the set class.
+ *   Please refer to the DescriptorSetLayout comment above for a description of
+ *   index, binding, and global index.
+ *
+ * At construction a vector of Descriptor* is created with types corresponding to the
+ *   layout. The primary operation performed on the descriptors is to update them
+ *   via write or copy updates, and validate that the update contents are correct.
+ *   In order to validate update contents, the DescriptorSet stores a bunch of ptrs
+ *   to data maps where various Vulkan objects can be looked up. The management of
+ *   those maps is performed externally. The set class relies on their contents to
+ *   be correct at the time of update.
+ */
+class DescriptorSet : public BASE_NODE {
+  public:
+    using StateTracker = ValidationStateTracker;
+    DescriptorSet(const VkDescriptorSet, DESCRIPTOR_POOL_STATE *, const std::shared_ptr<DescriptorSetLayout const> &,
+                  uint32_t variable_count, StateTracker *state_data, const debug_report_data *report_data);
+    ~DescriptorSet();
+    // A number of common Get* functions that return data based on layout from which this set was created
+    uint32_t GetTotalDescriptorCount() const { return p_layout_->GetTotalDescriptorCount(); };
+    uint32_t GetDynamicDescriptorCount() const { return p_layout_->GetDynamicDescriptorCount(); };
+    uint32_t GetBindingCount() const { return p_layout_->GetBindingCount(); };
+    VkDescriptorType GetTypeFromIndex(const uint32_t index) const { return p_layout_->GetTypeFromIndex(index); };
+    VkDescriptorType GetTypeFromBinding(const uint32_t binding) const { return p_layout_->GetTypeFromBinding(binding); };
+    uint32_t GetDescriptorCountFromIndex(const uint32_t index) const { return p_layout_->GetDescriptorCountFromIndex(index); };
+    uint32_t GetDescriptorCountFromBinding(const uint32_t binding) const {
+        return p_layout_->GetDescriptorCountFromBinding(binding);
+    };
+    // Return index into dynamic offset array for given binding
+    int32_t GetDynamicOffsetIndexFromBinding(uint32_t binding) const {
+        return p_layout_->GetDynamicOffsetIndexFromBinding(binding);
+    }
+    // Return true if given binding is present in this set
+    bool HasBinding(const uint32_t binding) const { return p_layout_->HasBinding(binding); };
+
+    std::string StringifySetAndLayout() const;
+
+    // Perform a push update whose contents were just validated using ValidatePushDescriptorsUpdate
+    void PerformPushDescriptorsUpdate(ValidationStateTracker *dev_data, uint32_t write_count, const VkWriteDescriptorSet *p_wds);
+    // Perform a WriteUpdate whose contents were just validated using ValidateWriteUpdate
+    void PerformWriteUpdate(ValidationStateTracker *dev_data, const VkWriteDescriptorSet *);
+    // Perform a CopyUpdate whose contents were just validated using ValidateCopyUpdate
+    void PerformCopyUpdate(ValidationStateTracker *dev_data, const VkCopyDescriptorSet *, const DescriptorSet *);
+
+    const std::shared_ptr<DescriptorSetLayout const> &GetLayout() const { return p_layout_; };
+    VkDescriptorSetLayout GetDescriptorSetLayout() const { return p_layout_->GetDescriptorSetLayout(); }
+    VkDescriptorSet GetSet() const { return set_; };
+    // Bind given cmd_buffer to this descriptor set and
+    // update CB image layout map with image/imagesampler descriptor image layouts
+    void UpdateDrawState(ValidationStateTracker *, CMD_BUFFER_STATE *, const PIPELINE_STATE *,
+                         const std::map<uint32_t, descriptor_req> &);
+
+    // Track work that has been bound or validated to avoid duplicate work, important when large descriptor arrays
+    // are present
+    typedef std::unordered_set<uint32_t> TrackedBindings;
+    static void FilterOneBindingReq(const BindingReqMap::value_type &binding_req_pair, BindingReqMap *out_req,
+                                    const TrackedBindings &set, uint32_t limit);
+    void FilterBindingReqs(const CMD_BUFFER_STATE &, const PIPELINE_STATE &, const BindingReqMap &in_req,
+                           BindingReqMap *out_req) const;
+    void UpdateValidationCache(const CMD_BUFFER_STATE &cb_state, const PIPELINE_STATE &pipeline,
+                               const BindingReqMap &updated_bindings);
+    void ClearCachedDynamicDescriptorValidation(CMD_BUFFER_STATE *cb_state) {
+        cached_validation_[cb_state].dynamic_buffers.clear();
+    }
+    void ClearCachedValidation(CMD_BUFFER_STATE *cb_state) { cached_validation_.erase(cb_state); }
+    VkSampler const *GetImmutableSamplerPtrFromBinding(const uint32_t index) const {
+        return p_layout_->GetImmutableSamplerPtrFromBinding(index);
+    };
+    // For a particular binding, get the global index
+    const IndexRange GetGlobalIndexRangeFromBinding(const uint32_t binding, bool actual_length = false) const {
+        if (actual_length && binding == p_layout_->GetMaxBinding() && IsVariableDescriptorCount(binding)) {
+            IndexRange range = p_layout_->GetGlobalIndexRangeFromBinding(binding);
+            auto diff = GetDescriptorCountFromBinding(binding) - GetVariableDescriptorCount();
+            range.end -= diff;
+            return range;
+        }
+        return p_layout_->GetGlobalIndexRangeFromBinding(binding);
+    };
+    // Return true if any part of set has ever been updated
+    bool IsUpdated() const { return some_update_; };
+    bool IsPushDescriptor() const { return p_layout_->IsPushDescriptor(); };
+    bool IsVariableDescriptorCount(uint32_t binding) const { return p_layout_->IsVariableDescriptorCount(binding); }
+    bool IsUpdateAfterBind(uint32_t binding) const {
+        return !!(p_layout_->GetDescriptorBindingFlagsFromBinding(binding) & VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT);
+    }
+    uint32_t GetVariableDescriptorCount() const { return variable_count_; }
+    DESCRIPTOR_POOL_STATE *GetPoolState() const { return pool_state_; }
+    const Descriptor *GetDescriptorFromGlobalIndex(const uint32_t index) const { return descriptors_[index].get(); }
+    uint64_t GetChangeCount() const { return change_count_; }
+
+    const std::vector<safe_VkWriteDescriptorSet> &GetWrites() const { return push_descriptor_set_writes; }
+
+  private:
+    // Private helper to set all bound cmd buffers to INVALID state
+    void InvalidateBoundCmdBuffers(ValidationStateTracker *state_data);
+    bool some_update_;  // has any part of the set ever been updated?
+    VkDescriptorSet set_;
+    DESCRIPTOR_POOL_STATE *pool_state_;
+    const std::shared_ptr<DescriptorSetLayout const> p_layout_;
+    std::vector<std::unique_ptr<Descriptor>> descriptors_;
+    const debug_report_data *report_data_;
+    uint32_t variable_count_;
+    uint64_t change_count_;
+
+    // If this descriptor set is a push descriptor set, the descriptor
+    // set writes that were last pushed.
+    std::vector<safe_VkWriteDescriptorSet> push_descriptor_set_writes;
+
+    // Cached binding and validation support:
+    //
+    // For the lifespan of a given command buffer recording, do lazy evaluation, caching, and dirtying of
+    // expensive validation operation (typically per-draw)
+    typedef std::unordered_map<CMD_BUFFER_STATE *, TrackedBindings> TrackedBindingMap;
+    // Track the validation caching of bindings vs. the command buffer and draw state
+    typedef std::unordered_map<uint32_t, CMD_BUFFER_STATE::ImageLayoutUpdateCount> VersionedBindings;
+    struct CachedValidation {
+        TrackedBindings command_binding_and_usage;                                     // Persistent for the life of the recording
+        TrackedBindings non_dynamic_buffers;                                           // Persistent for the life of the recording
+        TrackedBindings dynamic_buffers;                                               // Dirtied (flushed) each BindDescriptorSet
+        std::unordered_map<const PIPELINE_STATE *, VersionedBindings> image_samplers;  // Tested vs. changes to CB's ImageLayout
+    };
+    typedef std::unordered_map<const CMD_BUFFER_STATE *, CachedValidation> CachedValidationMap;
+    // Image and ImageView bindings are validated per pipeline and not invalidate by repeated binding
+    CachedValidationMap cached_validation_;
+};
+// For the "bindless" style resource usage with many descriptors, need to optimize binding and validation
+class PrefilterBindRequestMap {
+  public:
+    static const uint32_t kManyDescriptors_ = 64;  // TODO base this number on measured data
+    std::unique_ptr<BindingReqMap> filtered_map_;
+    const BindingReqMap &orig_map_;
+    const DescriptorSet &descriptor_set_;
+
+    PrefilterBindRequestMap(const DescriptorSet &ds, const BindingReqMap &in_map)
+        : filtered_map_(), orig_map_(in_map), descriptor_set_(ds) {}
+    const BindingReqMap &FilteredMap(const CMD_BUFFER_STATE &cb_state, const PIPELINE_STATE &);
+    bool IsManyDescriptors() const { return descriptor_set_.GetTotalDescriptorCount() > kManyDescriptors_; }
+};
+}  // namespace cvdescriptorset
+#endif  // CORE_VALIDATION_DESCRIPTOR_SETS_H_
diff --git a/src/third_party/vulkan-validation-layers/src/layers/drawdispatch.cpp b/src/third_party/vulkan-validation-layers/src/layers/drawdispatch.cpp
new file mode 100644
index 0000000..b1f6b47
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/layers/drawdispatch.cpp
@@ -0,0 +1,345 @@
+/* Copyright (c) 2015-2019 The Khronos Group Inc.
+ * Copyright (c) 2015-2019 Valve Corporation
+ * Copyright (c) 2015-2019 LunarG, Inc.
+ * Copyright (C) 2015-2019 Google Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Cody Northrop <cnorthrop@google.com>
+ * Author: Michael Lentine <mlentine@google.com>
+ * Author: Tobin Ehlis <tobine@google.com>
+ * Author: Chia-I Wu <olv@google.com>
+ * Author: Chris Forbes <chrisf@ijw.co.nz>
+ * Author: Mark Lobodzinski <mark@lunarg.com>
+ * Author: Ian Elliott <ianelliott@google.com>
+ * Author: Dave Houlton <daveh@lunarg.com>
+ * Author: Dustin Graves <dustin@lunarg.com>
+ * Author: Jeremy Hayes <jeremy@lunarg.com>
+ * Author: Jon Ashburn <jon@lunarg.com>
+ * Author: Karl Schultz <karl@lunarg.com>
+ * Author: Mark Young <marky@lunarg.com>
+ * Author: Mike Schuchardt <mikes@lunarg.com>
+ * Author: Mike Weiblen <mikew@lunarg.com>
+ * Author: Tony Barbour <tony@LunarG.com>
+ * Author: John Zulauf <jzulauf@lunarg.com>
+ * Author: Shannon McPherson <shannon@lunarg.com>
+ */
+
+// Allow use of STL min and max functions in Windows
+#define NOMINMAX
+
+#include "chassis.h"
+#include "core_validation.h"
+
+// Generic function to handle validation for all CmdDraw* type functions
+bool CoreChecks::ValidateCmdDrawType(VkCommandBuffer cmd_buffer, bool indexed, VkPipelineBindPoint bind_point, CMD_TYPE cmd_type,
+                                     const char *caller, VkQueueFlags queue_flags, const char *queue_flag_code,
+                                     const char *renderpass_msg_code, const char *pipebound_msg_code,
+                                     const char *dynamic_state_msg_code) const {
+    bool skip = false;
+    const CMD_BUFFER_STATE *cb_state = GetCBState(cmd_buffer);
+    if (cb_state) {
+        skip |= ValidateCmdQueueFlags(cb_state, caller, queue_flags, queue_flag_code);
+        skip |= ValidateCmd(cb_state, cmd_type, caller);
+        skip |=
+            ValidateCmdBufDrawState(cb_state, cmd_type, indexed, bind_point, caller, pipebound_msg_code, dynamic_state_msg_code);
+        skip |= (VK_PIPELINE_BIND_POINT_GRAPHICS == bind_point) ? OutsideRenderPass(cb_state, caller, renderpass_msg_code)
+                                                                : InsideRenderPass(cb_state, caller, renderpass_msg_code);
+    }
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateCmdDraw(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount,
+                                        uint32_t firstVertex, uint32_t firstInstance) const {
+    return ValidateCmdDrawType(commandBuffer, false, VK_PIPELINE_BIND_POINT_GRAPHICS, CMD_DRAW, "vkCmdDraw()",
+                               VK_QUEUE_GRAPHICS_BIT, "VUID-vkCmdDraw-commandBuffer-cmdpool", "VUID-vkCmdDraw-renderpass",
+                               "VUID-vkCmdDraw-None-02700", "VUID-vkCmdDraw-commandBuffer-02701");
+}
+
+bool CoreChecks::PreCallValidateCmdDrawIndexed(VkCommandBuffer commandBuffer, uint32_t indexCount, uint32_t instanceCount,
+                                               uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance) const {
+    bool skip = ValidateCmdDrawType(commandBuffer, true, VK_PIPELINE_BIND_POINT_GRAPHICS, CMD_DRAWINDEXED, "vkCmdDrawIndexed()",
+                                    VK_QUEUE_GRAPHICS_BIT, "VUID-vkCmdDrawIndexed-commandBuffer-cmdpool",
+                                    "VUID-vkCmdDrawIndexed-renderpass", "VUID-vkCmdDrawIndexed-None-02700",
+                                    "VUID-vkCmdDrawIndexed-commandBuffer-02701");
+    const CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+    if (!skip && (cb_state->status & CBSTATUS_INDEX_BUFFER_BOUND)) {
+        unsigned int index_size = 0;
+        const auto &index_buffer_binding = cb_state->index_buffer_binding;
+        if (index_buffer_binding.index_type == VK_INDEX_TYPE_UINT16) {
+            index_size = 2;
+        } else if (index_buffer_binding.index_type == VK_INDEX_TYPE_UINT32) {
+            index_size = 4;
+        } else if (index_buffer_binding.index_type == VK_INDEX_TYPE_UINT8_EXT) {
+            index_size = 1;
+        }
+        VkDeviceSize end_offset = (index_size * ((VkDeviceSize)firstIndex + indexCount)) + index_buffer_binding.offset;
+        if (end_offset > index_buffer_binding.size) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT,
+                            HandleToUint64(index_buffer_binding.buffer), "VUID-vkCmdDrawIndexed-indexSize-00463",
+                            "vkCmdDrawIndexed() index size (%d) * (firstIndex (%d) + indexCount (%d)) "
+                            "+ binding offset (%" PRIuLEAST64 ") = an ending offset of %" PRIuLEAST64
+                            " bytes, "
+                            "which is greater than the index buffer size (%" PRIuLEAST64 ").",
+                            index_size, firstIndex, indexCount, index_buffer_binding.offset, end_offset, index_buffer_binding.size);
+        }
+    }
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateCmdDrawIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t count,
+                                                uint32_t stride) const {
+    bool skip = ValidateCmdDrawType(commandBuffer, false, VK_PIPELINE_BIND_POINT_GRAPHICS, CMD_DRAWINDIRECT, "vkCmdDrawIndirect()",
+                                    VK_QUEUE_GRAPHICS_BIT, "VUID-vkCmdDrawIndirect-commandBuffer-cmdpool",
+                                    "VUID-vkCmdDrawIndirect-renderpass", "VUID-vkCmdDrawIndirect-None-02700",
+                                    "VUID-vkCmdDrawIndirect-commandBuffer-02701");
+    const BUFFER_STATE *buffer_state = GetBufferState(buffer);
+    skip |= ValidateMemoryIsBoundToBuffer(buffer_state, "vkCmdDrawIndirect()", "VUID-vkCmdDrawIndirect-buffer-02708");
+    skip |= ValidateBufferUsageFlags(buffer_state, VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT, true, "VUID-vkCmdDrawIndirect-buffer-02709",
+                                     "vkCmdDrawIndirect()", "VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT");
+    if (count > 1) {
+        skip |= ValidateCmdDrawStrideWithStruct(commandBuffer, "VUID-vkCmdDrawIndirect-drawCount-00476", stride,
+                                                "VkDrawIndirectCommand", sizeof(VkDrawIndirectCommand));
+        skip |=
+            ValidateCmdDrawStrideWithBuffer(commandBuffer, "VUID-vkCmdDrawIndirect-drawCount-00488", stride,
+                                            "VkDrawIndirectCommand", sizeof(VkDrawIndirectCommand), count, offset, buffer_state);
+    }
+    // TODO: If the drawIndirectFirstInstance feature is not enabled, all the firstInstance members of the
+    // VkDrawIndirectCommand structures accessed by this command must be 0, which will require access to the contents of 'buffer'.
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateCmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
+                                                       uint32_t count, uint32_t stride) const {
+    bool skip = ValidateCmdDrawType(
+        commandBuffer, true, VK_PIPELINE_BIND_POINT_GRAPHICS, CMD_DRAWINDEXEDINDIRECT, "vkCmdDrawIndexedIndirect()",
+        VK_QUEUE_GRAPHICS_BIT, "VUID-vkCmdDrawIndexedIndirect-commandBuffer-cmdpool", "VUID-vkCmdDrawIndexedIndirect-renderpass",
+        "VUID-vkCmdDrawIndexedIndirect-None-02700", "VUID-vkCmdDrawIndexedIndirect-commandBuffer-02701");
+    const BUFFER_STATE *buffer_state = GetBufferState(buffer);
+    skip |= ValidateMemoryIsBoundToBuffer(buffer_state, "vkCmdDrawIndexedIndirect()", "VUID-vkCmdDrawIndexedIndirect-buffer-02708");
+    skip |= ValidateBufferUsageFlags(buffer_state, VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT, true,
+                                     "VUID-vkCmdDrawIndexedIndirect-buffer-02709", "vkCmdDrawIndexedIndirect()",
+                                     "VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT");
+    if (count > 1) {
+        skip |= ValidateCmdDrawStrideWithStruct(commandBuffer, "VUID-vkCmdDrawIndexedIndirect-drawCount-00528", stride,
+                                                "VkDrawIndexedIndirectCommand", sizeof(VkDrawIndexedIndirectCommand));
+        skip |= ValidateCmdDrawStrideWithBuffer(commandBuffer, "VUID-vkCmdDrawIndexedIndirect-drawCount-00540", stride,
+                                                "VkDrawIndexedIndirectCommand", sizeof(VkDrawIndexedIndirectCommand), count, offset,
+                                                buffer_state);
+    }
+    // TODO: If the drawIndirectFirstInstance feature is not enabled, all the firstInstance members of the
+    // VkDrawIndexedIndirectCommand structures accessed by this command must be 0, which will require access to the contents of
+    // 'buffer'.
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateCmdDispatch(VkCommandBuffer commandBuffer, uint32_t x, uint32_t y, uint32_t z) const {
+    bool skip = false;
+    skip |= ValidateCmdDrawType(commandBuffer, false, VK_PIPELINE_BIND_POINT_COMPUTE, CMD_DISPATCH, "vkCmdDispatch()",
+                                VK_QUEUE_COMPUTE_BIT, "VUID-vkCmdDispatch-commandBuffer-cmdpool", "VUID-vkCmdDispatch-renderpass",
+                                "VUID-vkCmdDispatch-None-02700", kVUIDUndefined);
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateCmdDispatchIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset) const {
+    bool skip =
+        ValidateCmdDrawType(commandBuffer, false, VK_PIPELINE_BIND_POINT_COMPUTE, CMD_DISPATCHINDIRECT, "vkCmdDispatchIndirect()",
+                            VK_QUEUE_COMPUTE_BIT, "VUID-vkCmdDispatchIndirect-commandBuffer-cmdpool",
+                            "VUID-vkCmdDispatchIndirect-renderpass", "VUID-vkCmdDispatchIndirect-None-02700", kVUIDUndefined);
+    const BUFFER_STATE *buffer_state = GetBufferState(buffer);
+    skip |= ValidateMemoryIsBoundToBuffer(buffer_state, "vkCmdDispatchIndirect()", "VUID-vkCmdDispatchIndirect-buffer-02708");
+    skip |=
+        ValidateBufferUsageFlags(buffer_state, VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT, true, "VUID-vkCmdDispatchIndirect-buffer-02709",
+                                 "vkCmdDispatchIndirect()", "VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT");
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateCmdDrawIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
+                                                        VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
+                                                        uint32_t stride) const {
+    bool skip = false;
+    if (offset & 3) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                        HandleToUint64(commandBuffer), "VUID-vkCmdDrawIndirectCountKHR-offset-02710",
+                        "vkCmdDrawIndirectCountKHR() parameter, VkDeviceSize offset (0x%" PRIxLEAST64 "), is not a multiple of 4.",
+                        offset);
+    }
+
+    if (countBufferOffset & 3) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                        HandleToUint64(commandBuffer), "VUID-vkCmdDrawIndirectCountKHR-countBufferOffset-02716",
+                        "vkCmdDrawIndirectCountKHR() parameter, VkDeviceSize countBufferOffset (0x%" PRIxLEAST64
+                        "), is not a multiple of 4.",
+                        countBufferOffset);
+    }
+    skip |= ValidateCmdDrawStrideWithStruct(commandBuffer, "VUID-vkCmdDrawIndirectCountKHR-stride-03110", stride,
+                                            "VkDrawIndirectCommand", sizeof(VkDrawIndirectCommand));
+    if (maxDrawCount > 1) {
+        const BUFFER_STATE *buffer_state = GetBufferState(buffer);
+        skip |= ValidateCmdDrawStrideWithBuffer(commandBuffer, "VUID-vkCmdDrawIndirectCountKHR-maxDrawCount-03111", stride,
+                                                "VkDrawIndirectCommand", sizeof(VkDrawIndirectCommand), maxDrawCount, offset,
+                                                buffer_state);
+    }
+
+    skip |= ValidateCmdDrawType(commandBuffer, false, VK_PIPELINE_BIND_POINT_GRAPHICS, CMD_DRAWINDIRECTCOUNTKHR,
+                                "vkCmdDrawIndirectCountKHR()", VK_QUEUE_GRAPHICS_BIT,
+                                "VUID-vkCmdDrawIndirectCountKHR-commandBuffer-cmdpool", "VUID-vkCmdDrawIndirectCountKHR-renderpass",
+                                "VUID-vkCmdDrawIndirectCountKHR-None-02700", "VUID-vkCmdDrawIndirectCountKHR-commandBuffer-02701");
+    const BUFFER_STATE *buffer_state = GetBufferState(buffer);
+    const BUFFER_STATE *count_buffer_state = GetBufferState(countBuffer);
+    skip |=
+        ValidateMemoryIsBoundToBuffer(buffer_state, "vkCmdDrawIndirectCountKHR()", "VUID-vkCmdDrawIndirectCountKHR-buffer-02708");
+    skip |= ValidateMemoryIsBoundToBuffer(count_buffer_state, "vkCmdDrawIndirectCountKHR()",
+                                          "VUID-vkCmdDrawIndirectCountKHR-countBuffer-02714");
+    skip |= ValidateBufferUsageFlags(buffer_state, VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT, true,
+                                     "VUID-vkCmdDrawIndirectCountKHR-buffer-02709", "vkCmdDrawIndirectCountKHR()",
+                                     "VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT");
+    skip |= ValidateBufferUsageFlags(count_buffer_state, VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT, true,
+                                     "VUID-vkCmdDrawIndirectCountKHR-countBuffer-02715", "vkCmdDrawIndirectCountKHR()",
+                                     "VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT");
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateCmdDrawIndexedIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
+                                                               VkBuffer countBuffer, VkDeviceSize countBufferOffset,
+                                                               uint32_t maxDrawCount, uint32_t stride) const {
+    bool skip = false;
+    if (offset & 3) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                        HandleToUint64(commandBuffer), "VUID-vkCmdDrawIndexedIndirectCountKHR-offset-02710",
+                        "vkCmdDrawIndexedIndirectCountKHR() parameter, VkDeviceSize offset (0x%" PRIxLEAST64
+                        "), is not a multiple of 4.",
+                        offset);
+    }
+
+    if (countBufferOffset & 3) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                        HandleToUint64(commandBuffer), "VUID-vkCmdDrawIndexedIndirectCountKHR-countBufferOffset-02716",
+                        "vkCmdDrawIndexedIndirectCountKHR() parameter, VkDeviceSize countBufferOffset (0x%" PRIxLEAST64
+                        "), is not a multiple of 4.",
+                        countBufferOffset);
+    }
+
+    skip |= ValidateCmdDrawStrideWithStruct(commandBuffer, "VUID-vkCmdDrawIndexedIndirectCountKHR-stride-03142", stride,
+                                            "VkDrawIndirectCommand", sizeof(VkDrawIndexedIndirectCommand));
+    if (maxDrawCount > 1) {
+        const BUFFER_STATE *buffer_state = GetBufferState(buffer);
+        skip |= ValidateCmdDrawStrideWithBuffer(commandBuffer, "VUID-vkCmdDrawIndexedIndirectCountKHR-maxDrawCount-03143", stride,
+                                                "VkDrawIndirectCommand", sizeof(VkDrawIndexedIndirectCommand), maxDrawCount, offset,
+                                                buffer_state);
+    }
+
+    skip |= ValidateCmdDrawType(
+        commandBuffer, true, VK_PIPELINE_BIND_POINT_GRAPHICS, CMD_DRAWINDEXEDINDIRECTCOUNTKHR, "vkCmdDrawIndexedIndirectCountKHR()",
+        VK_QUEUE_GRAPHICS_BIT, "VUID-vkCmdDrawIndexedIndirectCountKHR-commandBuffer-cmdpool",
+        "VUID-vkCmdDrawIndexedIndirectCountKHR-renderpass", "VUID-vkCmdDrawIndexedIndirectCountKHR-None-02700",
+        "VUID-vkCmdDrawIndexedIndirectCountKHR-commandBuffer-02701");
+    const BUFFER_STATE *buffer_state = GetBufferState(buffer);
+    const BUFFER_STATE *count_buffer_state = GetBufferState(countBuffer);
+    skip |= ValidateMemoryIsBoundToBuffer(buffer_state, "vkCmdDrawIndexedIndirectCountKHR()",
+                                          "VUID-vkCmdDrawIndexedIndirectCountKHR-buffer-02708");
+    skip |= ValidateMemoryIsBoundToBuffer(count_buffer_state, "vkCmdDrawIndexedIndirectCountKHR()",
+                                          "VUID-vkCmdDrawIndexedIndirectCountKHR-countBuffer-02714");
+    skip |= ValidateBufferUsageFlags(buffer_state, VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT, true,
+                                     "VUID-vkCmdDrawIndexedIndirectCountKHR-buffer-02709", "vkCmdDrawIndexedIndirectCountKHR()",
+                                     "VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT");
+    skip |= ValidateBufferUsageFlags(count_buffer_state, VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT, true,
+                                     "VUID-vkCmdDrawIndexedIndirectCountKHR-countBuffer-02715",
+                                     "vkCmdDrawIndexedIndirectCountKHR()", "VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT");
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateCmdTraceRaysNV(VkCommandBuffer commandBuffer, VkBuffer raygenShaderBindingTableBuffer,
+                                               VkDeviceSize raygenShaderBindingOffset, VkBuffer missShaderBindingTableBuffer,
+                                               VkDeviceSize missShaderBindingOffset, VkDeviceSize missShaderBindingStride,
+                                               VkBuffer hitShaderBindingTableBuffer, VkDeviceSize hitShaderBindingOffset,
+                                               VkDeviceSize hitShaderBindingStride, VkBuffer callableShaderBindingTableBuffer,
+                                               VkDeviceSize callableShaderBindingOffset, VkDeviceSize callableShaderBindingStride,
+                                               uint32_t width, uint32_t height, uint32_t depth) const {
+    bool skip =
+        ValidateCmdDrawType(commandBuffer, true, VK_PIPELINE_BIND_POINT_RAY_TRACING_NV, CMD_TRACERAYSNV, "vkCmdTraceRaysNV()",
+                            VK_QUEUE_COMPUTE_BIT, "VUID-vkCmdTraceRaysNV-commandBuffer-cmdpool", "VUID-vkCmdTraceRaysNV-renderpass",
+                            "VUID-vkCmdTraceRaysNV-None-02700", "VUID-vkCmdTraceRaysNV-commandBuffer-02701");
+    return skip;
+}
+
+void CoreChecks::PostCallRecordCmdTraceRaysNV(VkCommandBuffer commandBuffer, VkBuffer raygenShaderBindingTableBuffer,
+                                              VkDeviceSize raygenShaderBindingOffset, VkBuffer missShaderBindingTableBuffer,
+                                              VkDeviceSize missShaderBindingOffset, VkDeviceSize missShaderBindingStride,
+                                              VkBuffer hitShaderBindingTableBuffer, VkDeviceSize hitShaderBindingOffset,
+                                              VkDeviceSize hitShaderBindingStride, VkBuffer callableShaderBindingTableBuffer,
+                                              VkDeviceSize callableShaderBindingOffset, VkDeviceSize callableShaderBindingStride,
+                                              uint32_t width, uint32_t height, uint32_t depth) {
+    CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+    UpdateStateCmdDrawDispatchType(cb_state, VK_PIPELINE_BIND_POINT_RAY_TRACING_NV);
+    cb_state->hasTraceRaysCmd = true;
+}
+
+bool CoreChecks::PreCallValidateCmdDrawMeshTasksNV(VkCommandBuffer commandBuffer, uint32_t taskCount, uint32_t firstTask) const {
+    bool skip = ValidateCmdDrawType(commandBuffer, false, VK_PIPELINE_BIND_POINT_GRAPHICS, CMD_DRAWMESHTASKSNV,
+                                    "vkCmdDrawMeshTasksNV()", VK_QUEUE_GRAPHICS_BIT,
+                                    "VUID-vkCmdDrawMeshTasksNV-commandBuffer-cmdpool", "VUID-vkCmdDrawMeshTasksNV-renderpass",
+                                    "VUID-vkCmdDrawMeshTasksNV-None-02700", "VUID-vkCmdDrawMeshTasksNV-commandBuffer-02701");
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateCmdDrawMeshTasksIndirectNV(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
+                                                           uint32_t drawCount, uint32_t stride) const {
+    bool skip = ValidateCmdDrawType(commandBuffer, false, VK_PIPELINE_BIND_POINT_GRAPHICS, CMD_DRAWMESHTASKSINDIRECTNV,
+                                    "vkCmdDrawMeshTasksIndirectNV()", VK_QUEUE_GRAPHICS_BIT,
+                                    "VUID-vkCmdDrawMeshTasksIndirectNV-commandBuffer-cmdpool",
+                                    "VUID-vkCmdDrawMeshTasksIndirectNV-renderpass", "VUID-vkCmdDrawMeshTasksIndirectNV-None-02700",
+                                    "VUID-vkCmdDrawMeshTasksIndirectNV-commandBuffer-02701");
+    const BUFFER_STATE *buffer_state = GetBufferState(buffer);
+    skip |= ValidateMemoryIsBoundToBuffer(buffer_state, "vkCmdDrawMeshTasksIndirectNV()",
+                                          "VUID-vkCmdDrawMeshTasksIndirectNV-buffer-02708");
+    skip |= ValidateBufferUsageFlags(buffer_state, VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT, true,
+                                     "VUID-vkCmdDrawMeshTasksIndirectNV-buffer-02709", "vkCmdDrawMeshTasksIndirectNV()",
+                                     "VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT");
+    if (drawCount > 1) {
+        skip |= ValidateCmdDrawStrideWithBuffer(commandBuffer, "VUID-vkCmdDrawMeshTasksIndirectNV-drawCount-02157", stride,
+                                                "VkDrawMeshTasksIndirectCommandNV", sizeof(VkDrawMeshTasksIndirectCommandNV),
+                                                drawCount, offset, buffer_state);
+    }
+    return skip;
+}
+
+bool CoreChecks::PreCallValidateCmdDrawMeshTasksIndirectCountNV(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
+                                                                VkBuffer countBuffer, VkDeviceSize countBufferOffset,
+                                                                uint32_t maxDrawCount, uint32_t stride) const {
+    bool skip = ValidateCmdDrawType(
+        commandBuffer, false, VK_PIPELINE_BIND_POINT_GRAPHICS, CMD_DRAWMESHTASKSINDIRECTCOUNTNV,
+        "vkCmdDrawMeshTasksIndirectCountNV()", VK_QUEUE_GRAPHICS_BIT,
+        "VUID-vkCmdDrawMeshTasksIndirectCountNV-commandBuffer-cmdpool", "VUID-vkCmdDrawMeshTasksIndirectCountNV-renderpass",
+        "VUID-vkCmdDrawMeshTasksIndirectCountNV-None-02700", "VUID-vkCmdDrawMeshTasksIndirectCountNV-commandBuffer-02701");
+    const BUFFER_STATE *buffer_state = GetBufferState(buffer);
+    const BUFFER_STATE *count_buffer_state = GetBufferState(countBuffer);
+    skip |= ValidateMemoryIsBoundToBuffer(buffer_state, "vkCmdDrawMeshTasksIndirectCountNV()",
+                                          "VUID-vkCmdDrawMeshTasksIndirectCountNV-buffer-02708");
+    skip |= ValidateMemoryIsBoundToBuffer(count_buffer_state, "vkCmdDrawMeshTasksIndirectCountNV()",
+                                          "VUID-vkCmdDrawMeshTasksIndirectCountNV-countBuffer-02714");
+    skip |= ValidateBufferUsageFlags(buffer_state, VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT, true,
+                                     "VUID-vkCmdDrawMeshTasksIndirectCountNV-buffer-02709", "vkCmdDrawIndexedIndirectCountKHR()",
+                                     "VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT");
+    skip |= ValidateBufferUsageFlags(count_buffer_state, VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT, true,
+                                     "VUID-vkCmdDrawMeshTasksIndirectCountNV-countBuffer-02715",
+                                     "vkCmdDrawIndexedIndirectCountKHR()", "VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT");
+    skip |= ValidateCmdDrawStrideWithStruct(commandBuffer, "VUID-vkCmdDrawMeshTasksIndirectCountNV-stride-02182", stride,
+                                            "VkDrawMeshTasksIndirectCommandNV", sizeof(VkDrawMeshTasksIndirectCommandNV));
+    if (maxDrawCount > 1) {
+        skip |= ValidateCmdDrawStrideWithBuffer(commandBuffer, "VUID-vkCmdDrawMeshTasksIndirectCountNV-maxDrawCount-02183", stride,
+                                                "VkDrawMeshTasksIndirectCommandNV", sizeof(VkDrawMeshTasksIndirectCommandNV),
+                                                maxDrawCount, offset, buffer_state);
+    }
+    return skip;
+}
diff --git a/src/third_party/vulkan-validation-layers/src/layers/generated/.clang-format b/src/third_party/vulkan-validation-layers/src/layers/generated/.clang-format
new file mode 100644
index 0000000..f7ef8f3
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/layers/generated/.clang-format
@@ -0,0 +1,6 @@
+---
+# Disable clang-format for generated code
+DisableFormat: true
+# C-F has a bug in which it insists on sorting includes even if disabled
+SortIncludes: false
+...
diff --git a/src/third_party/vulkan-validation-layers/src/layers/generated/chassis.cpp b/src/third_party/vulkan-validation-layers/src/layers/generated/chassis.cpp
new file mode 100644
index 0000000..502fec1
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/layers/generated/chassis.cpp
@@ -0,0 +1,9933 @@
+
+// This file is ***GENERATED***.  Do Not Edit.
+// See layer_chassis_generator.py for modifications.
+
+/* Copyright (c) 2015-2019 The Khronos Group Inc.
+ * Copyright (c) 2015-2019 Valve Corporation
+ * Copyright (c) 2015-2019 LunarG, Inc.
+ * Copyright (c) 2015-2019 Google Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Mark Lobodzinski <mark@lunarg.com>
+ */
+
+
+#include <string.h>
+#include <mutex>
+
+#define VALIDATION_ERROR_MAP_IMPL
+
+#include "chassis.h"
+#include "layer_chassis_dispatch.h"
+
+small_unordered_map<void*, ValidationObject*, 2> layer_data_map;
+
+// Global unique object identifier.
+std::atomic<uint64_t> global_unique_id(1ULL);
+// Map uniqueID to actual object handle. Accesses to the map itself are
+// internally synchronized.
+vl_concurrent_unordered_map<uint64_t, uint64_t, 4, HashedUint64> unique_id_mapping;
+
+bool wrap_handles = true;
+
+#define OBJECT_LAYER_NAME "VK_LAYER_KHRONOS_validation"
+#define OBJECT_LAYER_DESCRIPTION "khronos_validation"
+
+// Include layer validation object definitions
+#include "best_practices.h"
+#include "core_validation.h"
+#include "command_counter.h"
+#include "gpu_validation.h"
+#include "object_lifetime_validation.h"
+#include "stateless_validation.h"
+#include "thread_safety.h"
+
+namespace vulkan_layer_chassis {
+
+using std::unordered_map;
+
+static const VkLayerProperties global_layer = {
+    OBJECT_LAYER_NAME, VK_LAYER_API_VERSION, 1, "LunarG validation Layer",
+};
+
+static const VkExtensionProperties instance_extensions[] = {{VK_EXT_DEBUG_REPORT_EXTENSION_NAME, VK_EXT_DEBUG_REPORT_SPEC_VERSION},
+                                                            {VK_EXT_DEBUG_UTILS_EXTENSION_NAME, VK_EXT_DEBUG_UTILS_SPEC_VERSION}};
+static const VkExtensionProperties device_extensions[] = {
+    {VK_EXT_VALIDATION_CACHE_EXTENSION_NAME, VK_EXT_VALIDATION_CACHE_SPEC_VERSION},
+    {VK_EXT_DEBUG_MARKER_EXTENSION_NAME, VK_EXT_DEBUG_MARKER_SPEC_VERSION},
+};
+
+typedef struct {
+    bool is_instance_api;
+    void* funcptr;
+} function_data;
+
+extern const std::unordered_map<std::string, function_data> name_to_funcptr_map;
+
+// Manually written functions
+
+// Check enabled instance extensions against supported instance extension whitelist
+static void InstanceExtensionWhitelist(ValidationObject *layer_data, const VkInstanceCreateInfo *pCreateInfo, VkInstance instance) {
+    for (uint32_t i = 0; i < pCreateInfo->enabledExtensionCount; i++) {
+        // Check for recognized instance extensions
+        if (!white_list(pCreateInfo->ppEnabledExtensionNames[i], kInstanceExtensionNames)) {
+            log_msg(layer_data->report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                    kVUIDUndefined,
+                    "Instance Extension %s is not supported by this layer.  Using this extension may adversely affect validation "
+                    "results and/or produce undefined behavior.",
+                    pCreateInfo->ppEnabledExtensionNames[i]);
+        }
+    }
+}
+
+// Check enabled device extensions against supported device extension whitelist
+static void DeviceExtensionWhitelist(ValidationObject *layer_data, const VkDeviceCreateInfo *pCreateInfo, VkDevice device) {
+    for (uint32_t i = 0; i < pCreateInfo->enabledExtensionCount; i++) {
+        // Check for recognized device extensions
+        if (!white_list(pCreateInfo->ppEnabledExtensionNames[i], kDeviceExtensionNames)) {
+            log_msg(layer_data->report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                    kVUIDUndefined,
+                    "Device Extension %s is not supported by this layer.  Using this extension may adversely affect validation "
+                    "results and/or produce undefined behavior.",
+                    pCreateInfo->ppEnabledExtensionNames[i]);
+        }
+    }
+}
+
+
+// Process validation features, flags and settings specified through extensions, a layer settings file, or environment variables
+
+static const std::unordered_map<std::string, VkValidationFeatureDisableEXT> VkValFeatureDisableLookup = {
+    {"VK_VALIDATION_FEATURE_DISABLE_SHADERS_EXT", VK_VALIDATION_FEATURE_DISABLE_SHADERS_EXT},
+    {"VK_VALIDATION_FEATURE_DISABLE_THREAD_SAFETY_EXT", VK_VALIDATION_FEATURE_DISABLE_THREAD_SAFETY_EXT},
+    {"VK_VALIDATION_FEATURE_DISABLE_API_PARAMETERS_EXT", VK_VALIDATION_FEATURE_DISABLE_API_PARAMETERS_EXT},
+    {"VK_VALIDATION_FEATURE_DISABLE_OBJECT_LIFETIMES_EXT", VK_VALIDATION_FEATURE_DISABLE_OBJECT_LIFETIMES_EXT},
+    {"VK_VALIDATION_FEATURE_DISABLE_CORE_CHECKS_EXT", VK_VALIDATION_FEATURE_DISABLE_CORE_CHECKS_EXT},
+    {"VK_VALIDATION_FEATURE_DISABLE_UNIQUE_HANDLES_EXT", VK_VALIDATION_FEATURE_DISABLE_UNIQUE_HANDLES_EXT},
+    {"VK_VALIDATION_FEATURE_DISABLE_ALL_EXT", VK_VALIDATION_FEATURE_DISABLE_ALL_EXT},
+};
+
+static const std::unordered_map<std::string, VkValidationFeatureEnableEXT> VkValFeatureEnableLookup = {
+    {"VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_EXT", VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_EXT},
+    {"VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_RESERVE_BINDING_SLOT_EXT", VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_RESERVE_BINDING_SLOT_EXT},
+    {"VK_VALIDATION_FEATURE_ENABLE_BEST_PRACTICES_EXT", VK_VALIDATION_FEATURE_ENABLE_BEST_PRACTICES_EXT},
+};
+
+static const std::unordered_map<std::string, ValidationCheckDisables> ValidationDisableLookup = {
+    {"VALIDATION_CHECK_DISABLE_COMMAND_BUFFER_STATE", VALIDATION_CHECK_DISABLE_COMMAND_BUFFER_STATE},
+    {"VALIDATION_CHECK_DISABLE_OBJECT_IN_USE", VALIDATION_CHECK_DISABLE_OBJECT_IN_USE},
+    {"VALIDATION_CHECK_DISABLE_IDLE_DESCRIPTOR_SET", VALIDATION_CHECK_DISABLE_IDLE_DESCRIPTOR_SET},
+    {"VALIDATION_CHECK_DISABLE_PUSH_CONSTANT_RANGE", VALIDATION_CHECK_DISABLE_PUSH_CONSTANT_RANGE},
+    {"VALIDATION_CHECK_DISABLE_QUERY_VALIDATION", VALIDATION_CHECK_DISABLE_QUERY_VALIDATION},
+    {"VALIDATION_CHECK_DISABLE_IMAGE_LAYOUT_VALIDATION", VALIDATION_CHECK_DISABLE_IMAGE_LAYOUT_VALIDATION},
+};
+
+// Set the local disable flag for the appropriate VALIDATION_CHECK_DISABLE enum
+void SetValidationDisable(CHECK_DISABLED* disable_data, const ValidationCheckDisables disable_id) {
+    switch (disable_id) {
+        case VALIDATION_CHECK_DISABLE_COMMAND_BUFFER_STATE:
+            disable_data->command_buffer_state = true;
+            break;
+        case VALIDATION_CHECK_DISABLE_OBJECT_IN_USE:
+            disable_data->object_in_use = true;
+            break;
+        case VALIDATION_CHECK_DISABLE_IDLE_DESCRIPTOR_SET:
+            disable_data->idle_descriptor_set = true;
+            break;
+        case VALIDATION_CHECK_DISABLE_PUSH_CONSTANT_RANGE:
+            disable_data->push_constant_range = true;
+            break;
+        case VALIDATION_CHECK_DISABLE_QUERY_VALIDATION:
+            disable_data->query_validation = true;
+            break;
+        case VALIDATION_CHECK_DISABLE_IMAGE_LAYOUT_VALIDATION:
+            disable_data->image_layout_validation = true;
+            break;
+        default:
+            assert(true);
+    }
+}
+
+// Set the local disable flag for a single VK_VALIDATION_FEATURE_DISABLE_* flag
+void SetValidationFeatureDisable(CHECK_DISABLED* disable_data, const VkValidationFeatureDisableEXT feature_disable) {
+    switch (feature_disable) {
+        case VK_VALIDATION_FEATURE_DISABLE_SHADERS_EXT:
+            disable_data->shader_validation = true;
+            break;
+        case VK_VALIDATION_FEATURE_DISABLE_THREAD_SAFETY_EXT:
+            disable_data->thread_safety = true;
+            break;
+        case VK_VALIDATION_FEATURE_DISABLE_API_PARAMETERS_EXT:
+            disable_data->stateless_checks = true;
+            break;
+        case VK_VALIDATION_FEATURE_DISABLE_OBJECT_LIFETIMES_EXT:
+            disable_data->object_tracking = true;
+            break;
+        case VK_VALIDATION_FEATURE_DISABLE_CORE_CHECKS_EXT:
+            disable_data->core_checks = true;
+            break;
+        case VK_VALIDATION_FEATURE_DISABLE_UNIQUE_HANDLES_EXT:
+            disable_data->handle_wrapping = true;
+            break;
+        case VK_VALIDATION_FEATURE_DISABLE_ALL_EXT:
+            // Set all disabled flags to true
+            disable_data->SetAll(true);
+            break;
+        default:
+            break;
+    }
+}
+
+// Set the local enable flag for a single VK_VALIDATION_FEATURE_ENABLE_* flag
+void SetValidationFeatureEnable(CHECK_ENABLED *enable_data, const VkValidationFeatureEnableEXT feature_enable) {
+    switch (feature_enable) {
+        case VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_EXT:
+            enable_data->gpu_validation = true;
+            break;
+        case VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_RESERVE_BINDING_SLOT_EXT:
+            enable_data->gpu_validation_reserve_binding_slot = true;
+            break;
+        case VK_VALIDATION_FEATURE_ENABLE_BEST_PRACTICES_EXT:
+            enable_data->best_practices = true;
+            break;
+        default:
+            break;
+    }
+}
+
+// Set the local disable flag for settings specified through the VK_EXT_validation_flags extension
+void SetValidationFlags(CHECK_DISABLED* disables, const VkValidationFlagsEXT* val_flags_struct) {
+    for (uint32_t i = 0; i < val_flags_struct->disabledValidationCheckCount; ++i) {
+        switch (val_flags_struct->pDisabledValidationChecks[i]) {
+            case VK_VALIDATION_CHECK_SHADERS_EXT:
+                disables->shader_validation = true;
+                break;
+            case VK_VALIDATION_CHECK_ALL_EXT:
+                // Set all disabled flags to true
+                disables->SetAll(true);
+                break;
+            default:
+                break;
+        }
+    }
+}
+
+// Process Validation Features flags specified through the ValidationFeature extension
+void SetValidationFeatures(CHECK_DISABLED *disable_data, CHECK_ENABLED *enable_data,
+                           const VkValidationFeaturesEXT *val_features_struct) {
+    for (uint32_t i = 0; i < val_features_struct->disabledValidationFeatureCount; ++i) {
+        SetValidationFeatureDisable(disable_data, val_features_struct->pDisabledValidationFeatures[i]);
+    }
+    for (uint32_t i = 0; i < val_features_struct->enabledValidationFeatureCount; ++i) {
+        SetValidationFeatureEnable(enable_data, val_features_struct->pEnabledValidationFeatures[i]);
+    }
+}
+
+// Given a string representation of a list of enable enum values, call the appropriate setter function
+void SetLocalEnableSetting(std::string list_of_enables, std::string delimiter, CHECK_ENABLED* enables) {
+    size_t pos = 0;
+    std::string token;
+    while (list_of_enables.length() != 0) {
+        pos = list_of_enables.find(delimiter);
+        if (pos != std::string::npos) {
+            token = list_of_enables.substr(0, pos);
+        } else {
+            pos = list_of_enables.length() - delimiter.length();
+            token = list_of_enables;
+        }
+        if (token.find("VK_VALIDATION_FEATURE_ENABLE_") != std::string::npos) {
+            auto result = VkValFeatureEnableLookup.find(token);
+            if (result != VkValFeatureEnableLookup.end()) {
+                SetValidationFeatureEnable(enables, result->second);
+            }
+        }
+        list_of_enables.erase(0, pos + delimiter.length());
+    }
+}
+
+// Given a string representation of a list of disable enum values, call the appropriate setter function
+void SetLocalDisableSetting(std::string list_of_disables, std::string delimiter, CHECK_DISABLED* disables) {
+    size_t pos = 0;
+    std::string token;
+    while (list_of_disables.length() != 0) {
+        pos = list_of_disables.find(delimiter);
+        if (pos != std::string::npos) {
+            token = list_of_disables.substr(0, pos);
+        } else {
+            pos = list_of_disables.length() - delimiter.length();
+            token = list_of_disables;
+        }
+        if (token.find("VK_VALIDATION_FEATURE_DISABLE_") != std::string::npos) {
+            auto result = VkValFeatureDisableLookup.find(token);
+            if (result != VkValFeatureDisableLookup.end()) {
+                SetValidationFeatureDisable(disables, result->second);
+            }
+        }
+        if (token.find("VALIDATION_CHECK_DISABLE_") != std::string::npos) {
+            auto result = ValidationDisableLookup.find(token);
+            if (result != ValidationDisableLookup.end()) {
+                SetValidationDisable(disables, result->second);
+            }
+        }
+        list_of_disables.erase(0, pos + delimiter.length());
+    }
+}
+
+// Process enables and disables set though the vk_layer_settings.txt config file or through an environment variable
+void ProcessConfigAndEnvSettings(const char* layer_description, CHECK_ENABLED* enables, CHECK_DISABLED* disables) {
+    std::string enable_key = layer_description;
+    std::string disable_key = layer_description;
+    enable_key.append(".enables");
+    disable_key.append(".disables");
+    std::string list_of_config_enables = getLayerOption(enable_key.c_str());
+    std::string list_of_env_enables = GetLayerEnvVar("VK_LAYER_ENABLES");
+    std::string list_of_config_disables = getLayerOption(disable_key.c_str());
+    std::string list_of_env_disables = GetLayerEnvVar("VK_LAYER_DISABLES");
+#if defined(_WIN32)
+    std::string env_delimiter = ";";
+#else
+    std::string env_delimiter = ":";
+#endif
+    SetLocalEnableSetting(list_of_config_enables, ",", enables);
+    SetLocalEnableSetting(list_of_env_enables, env_delimiter, enables);
+    SetLocalDisableSetting(list_of_config_disables, ",", disables);
+    SetLocalDisableSetting(list_of_env_disables, env_delimiter, disables);
+}
+
+
+// Non-code-generated chassis API functions
+
+VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL GetDeviceProcAddr(VkDevice device, const char *funcName) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!ApiParentExtensionEnabled(funcName, &layer_data->device_extensions)) {
+        return nullptr;
+    }
+    const auto &item = name_to_funcptr_map.find(funcName);
+    if (item != name_to_funcptr_map.end()) {
+        if (item->second.is_instance_api) {
+            return nullptr;
+        } else {
+            return reinterpret_cast<PFN_vkVoidFunction>(item->second.funcptr);
+        }
+    }
+    auto &table = layer_data->device_dispatch_table;
+    if (!table.GetDeviceProcAddr) return nullptr;
+    return table.GetDeviceProcAddr(device, funcName);
+}
+
+VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL GetInstanceProcAddr(VkInstance instance, const char *funcName) {
+    const auto &item = name_to_funcptr_map.find(funcName);
+    if (item != name_to_funcptr_map.end()) {
+        return reinterpret_cast<PFN_vkVoidFunction>(item->second.funcptr);
+    }
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(instance), layer_data_map);
+    auto &table = layer_data->instance_dispatch_table;
+    if (!table.GetInstanceProcAddr) return nullptr;
+    return table.GetInstanceProcAddr(instance, funcName);
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL EnumerateInstanceLayerProperties(uint32_t *pCount, VkLayerProperties *pProperties) {
+    return util_GetLayerProperties(1, &global_layer, pCount, pProperties);
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL EnumerateDeviceLayerProperties(VkPhysicalDevice physicalDevice, uint32_t *pCount,
+                                                              VkLayerProperties *pProperties) {
+    return util_GetLayerProperties(1, &global_layer, pCount, pProperties);
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL EnumerateInstanceExtensionProperties(const char *pLayerName, uint32_t *pCount,
+                                                                    VkExtensionProperties *pProperties) {
+    if (pLayerName && !strcmp(pLayerName, global_layer.layerName))
+        return util_GetExtensionProperties(ARRAY_SIZE(instance_extensions), instance_extensions, pCount, pProperties);
+
+    return VK_ERROR_LAYER_NOT_PRESENT;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL EnumerateDeviceExtensionProperties(VkPhysicalDevice physicalDevice, const char *pLayerName,
+                                                                  uint32_t *pCount, VkExtensionProperties *pProperties) {
+    if (pLayerName && !strcmp(pLayerName, global_layer.layerName)) return util_GetExtensionProperties(ARRAY_SIZE(device_extensions), device_extensions, pCount, pProperties);
+    assert(physicalDevice);
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    return layer_data->instance_dispatch_table.EnumerateDeviceExtensionProperties(physicalDevice, pLayerName, pCount, pProperties);
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL CreateInstance(const VkInstanceCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator,
+                                              VkInstance *pInstance) {
+    VkLayerInstanceCreateInfo* chain_info = get_chain_info(pCreateInfo, VK_LAYER_LINK_INFO);
+
+    assert(chain_info->u.pLayerInfo);
+    PFN_vkGetInstanceProcAddr fpGetInstanceProcAddr = chain_info->u.pLayerInfo->pfnNextGetInstanceProcAddr;
+    PFN_vkCreateInstance fpCreateInstance = (PFN_vkCreateInstance)fpGetInstanceProcAddr(NULL, "vkCreateInstance");
+    if (fpCreateInstance == NULL) return VK_ERROR_INITIALIZATION_FAILED;
+    chain_info->u.pLayerInfo = chain_info->u.pLayerInfo->pNext;
+    uint32_t specified_version = (pCreateInfo->pApplicationInfo ? pCreateInfo->pApplicationInfo->apiVersion : VK_API_VERSION_1_0);
+    uint32_t api_version = (specified_version < VK_API_VERSION_1_1) ? VK_API_VERSION_1_0 : VK_API_VERSION_1_1;
+    auto report_data = new debug_report_data{};
+    report_data->instance_pnext_chain = SafePnextCopy(pCreateInfo->pNext);
+    ActivateInstanceDebugCallbacks(report_data);
+
+    CHECK_ENABLED local_enables {};
+    CHECK_DISABLED local_disables {};
+    const auto *validation_features_ext = lvl_find_in_chain<VkValidationFeaturesEXT>(pCreateInfo->pNext);
+    if (validation_features_ext) {
+        SetValidationFeatures(&local_disables, &local_enables, validation_features_ext);
+    }
+    const auto *validation_flags_ext = lvl_find_in_chain<VkValidationFlagsEXT>(pCreateInfo->pNext);
+    if (validation_flags_ext) {
+        SetValidationFlags(&local_disables, validation_flags_ext);
+    }
+    ProcessConfigAndEnvSettings(OBJECT_LAYER_DESCRIPTION, &local_enables, &local_disables);
+
+    // Create temporary dispatch vector for pre-calls until instance is created
+    std::vector<ValidationObject*> local_object_dispatch;
+    // Add VOs to dispatch vector. Order here will be the validation dispatch order!
+    auto thread_checker = new ThreadSafety(nullptr);
+    if (!local_disables.thread_safety) {
+        local_object_dispatch.emplace_back(thread_checker);
+    }
+    thread_checker->container_type = LayerObjectTypeThreading;
+    thread_checker->api_version = api_version;
+    thread_checker->report_data = report_data;
+    auto parameter_validation = new StatelessValidation;
+    if (!local_disables.stateless_checks) {
+        local_object_dispatch.emplace_back(parameter_validation);
+    }
+    parameter_validation->container_type = LayerObjectTypeParameterValidation;
+    parameter_validation->api_version = api_version;
+    parameter_validation->report_data = report_data;
+    auto object_tracker = new ObjectLifetimes;
+    if (!local_disables.object_tracking) {
+        local_object_dispatch.emplace_back(object_tracker);
+    }
+    object_tracker->container_type = LayerObjectTypeObjectTracker;
+    object_tracker->api_version = api_version;
+    object_tracker->report_data = report_data;
+    auto core_checks = new CoreChecks;
+    if (!local_disables.core_checks) {
+        local_object_dispatch.emplace_back(core_checks);
+    }
+    core_checks->container_type = LayerObjectTypeCoreValidation;
+    core_checks->api_version = api_version;
+    core_checks->report_data = report_data;
+    auto best_practices = new BestPractices;
+    if (local_enables.best_practices) {
+        local_object_dispatch.emplace_back(best_practices);
+    }
+    best_practices->container_type = LayerObjectTypeBestPractices;
+    best_practices->api_version = api_version;
+    best_practices->report_data = report_data;
+    auto gpu_assisted = new GpuAssisted;
+    if (local_enables.gpu_validation) {
+        local_object_dispatch.emplace_back(gpu_assisted);
+    }
+    gpu_assisted->container_type = LayerObjectTypeGpuAssisted;
+
+    // If handle wrapping is disabled via the ValidationFeatures extension, override build flag
+    if (local_disables.handle_wrapping) {
+        wrap_handles = false;
+    }
+
+    // Init dispatch array and call registration functions
+    for (auto intercept : local_object_dispatch) {
+        (const_cast<const ValidationObject*>(intercept))->PreCallValidateCreateInstance(pCreateInfo, pAllocator, pInstance);
+    }
+    for (auto intercept : local_object_dispatch) {
+        intercept->PreCallRecordCreateInstance(pCreateInfo, pAllocator, pInstance);
+    }
+
+    VkResult result = fpCreateInstance(pCreateInfo, pAllocator, pInstance);
+    if (result != VK_SUCCESS) return result;
+
+    auto framework = GetLayerDataPtr(get_dispatch_key(*pInstance), layer_data_map);
+
+    framework->object_dispatch = local_object_dispatch;
+    framework->container_type = LayerObjectTypeInstance;
+    framework->disabled = local_disables;
+    framework->enabled = local_enables;
+
+    framework->instance = *pInstance;
+    layer_init_instance_dispatch_table(*pInstance, &framework->instance_dispatch_table, fpGetInstanceProcAddr);
+    framework->report_data = report_data;
+    framework->api_version = api_version;
+    framework->instance_extensions.InitFromInstanceCreateInfo(specified_version, pCreateInfo);
+
+    layer_debug_messenger_actions(framework->report_data, pAllocator, OBJECT_LAYER_DESCRIPTION);
+
+    object_tracker->instance_dispatch_table = framework->instance_dispatch_table;
+    object_tracker->enabled = framework->enabled;
+    object_tracker->disabled = framework->disabled;
+    thread_checker->instance_dispatch_table = framework->instance_dispatch_table;
+    thread_checker->enabled = framework->enabled;
+    thread_checker->disabled = framework->disabled;
+    parameter_validation->instance_dispatch_table = framework->instance_dispatch_table;
+    parameter_validation->enabled = framework->enabled;
+    parameter_validation->disabled = framework->disabled;
+    core_checks->instance_dispatch_table = framework->instance_dispatch_table;
+    core_checks->instance = *pInstance;
+    core_checks->enabled = framework->enabled;
+    core_checks->disabled = framework->disabled;
+    core_checks->instance_state = core_checks;
+    best_practices->instance_dispatch_table = framework->instance_dispatch_table;
+    best_practices->enabled = framework->enabled;
+    best_practices->disabled = framework->disabled;
+    gpu_assisted->instance_dispatch_table = framework->instance_dispatch_table;
+    gpu_assisted->enabled = framework->enabled;
+    gpu_assisted->disabled = framework->disabled;
+
+    for (auto intercept : framework->object_dispatch) {
+        intercept->PostCallRecordCreateInstance(pCreateInfo, pAllocator, pInstance, result);
+    }
+
+    InstanceExtensionWhitelist(framework, pCreateInfo, *pInstance);
+    DeactivateInstanceDebugCallbacks(report_data);
+    return result;
+}
+
+VKAPI_ATTR void VKAPI_CALL DestroyInstance(VkInstance instance, const VkAllocationCallbacks *pAllocator) {
+    dispatch_key key = get_dispatch_key(instance);
+    auto layer_data = GetLayerDataPtr(key, layer_data_map);
+    ActivateInstanceDebugCallbacks(layer_data->report_data);
+
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        (const_cast<const ValidationObject*>(intercept))->PreCallValidateDestroyInstance(instance, pAllocator);
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordDestroyInstance(instance, pAllocator);
+    }
+
+    layer_data->instance_dispatch_table.DestroyInstance(instance, pAllocator);
+
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordDestroyInstance(instance, pAllocator);
+    }
+
+    DeactivateInstanceDebugCallbacks(layer_data->report_data);
+    FreePnextChain(layer_data->report_data->instance_pnext_chain);
+
+    layer_debug_utils_destroy_instance(layer_data->report_data);
+
+    for (auto item = layer_data->object_dispatch.begin(); item != layer_data->object_dispatch.end(); item++) {
+        delete *item;
+    }
+    FreeLayerDataPtr(key, layer_data_map);
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL CreateDevice(VkPhysicalDevice gpu, const VkDeviceCreateInfo *pCreateInfo,
+                                            const VkAllocationCallbacks *pAllocator, VkDevice *pDevice) {
+    VkLayerDeviceCreateInfo *chain_info = get_chain_info(pCreateInfo, VK_LAYER_LINK_INFO);
+
+    auto instance_interceptor = GetLayerDataPtr(get_dispatch_key(gpu), layer_data_map);
+
+    PFN_vkGetInstanceProcAddr fpGetInstanceProcAddr = chain_info->u.pLayerInfo->pfnNextGetInstanceProcAddr;
+    PFN_vkGetDeviceProcAddr fpGetDeviceProcAddr = chain_info->u.pLayerInfo->pfnNextGetDeviceProcAddr;
+    PFN_vkCreateDevice fpCreateDevice = (PFN_vkCreateDevice)fpGetInstanceProcAddr(instance_interceptor->instance, "vkCreateDevice");
+    if (fpCreateDevice == NULL) {
+        return VK_ERROR_INITIALIZATION_FAILED;
+    }
+    chain_info->u.pLayerInfo = chain_info->u.pLayerInfo->pNext;
+
+    // Get physical device limits for device
+    VkPhysicalDeviceProperties device_properties = {};
+    instance_interceptor->instance_dispatch_table.GetPhysicalDeviceProperties(gpu, &device_properties);
+
+    // Setup the validation tables based on the application API version from the instance and the capabilities of the device driver
+    uint32_t effective_api_version = std::min(device_properties.apiVersion, instance_interceptor->api_version);
+
+    DeviceExtensions device_extensions = {};
+    device_extensions.InitFromDeviceCreateInfo(&instance_interceptor->instance_extensions, effective_api_version, pCreateInfo);
+    for (auto item : instance_interceptor->object_dispatch) {
+        item->device_extensions = device_extensions;
+    }
+
+    safe_VkDeviceCreateInfo modified_create_info(pCreateInfo);
+
+    bool skip = false;
+    for (auto intercept : instance_interceptor->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCreateDevice(gpu, pCreateInfo, pAllocator, pDevice);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : instance_interceptor->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCreateDevice(gpu, pCreateInfo, pAllocator, pDevice, &modified_create_info);
+    }
+
+    VkResult result = fpCreateDevice(gpu, reinterpret_cast<VkDeviceCreateInfo *>(&modified_create_info), pAllocator, pDevice);
+    if (result != VK_SUCCESS) {
+        return result;
+    }
+
+    auto device_interceptor = GetLayerDataPtr(get_dispatch_key(*pDevice), layer_data_map);
+    device_interceptor->container_type = LayerObjectTypeDevice;
+
+    // Save local info in device object
+    device_interceptor->phys_dev_properties.properties = device_properties;
+    device_interceptor->api_version = device_interceptor->device_extensions.InitFromDeviceCreateInfo(
+        &instance_interceptor->instance_extensions, effective_api_version, pCreateInfo);
+    device_interceptor->device_extensions = device_extensions;
+
+    layer_init_device_dispatch_table(*pDevice, &device_interceptor->device_dispatch_table, fpGetDeviceProcAddr);
+
+    device_interceptor->device = *pDevice;
+    device_interceptor->physical_device = gpu;
+    device_interceptor->instance = instance_interceptor->instance;
+    device_interceptor->report_data = instance_interceptor->report_data;
+
+    // Note that this defines the order in which the layer validation objects are called
+    auto thread_safety = new ThreadSafety(reinterpret_cast<ThreadSafety *>(instance_interceptor->GetValidationObject(instance_interceptor->object_dispatch, LayerObjectTypeThreading)));
+    thread_safety->container_type = LayerObjectTypeThreading;
+    if (!instance_interceptor->disabled.thread_safety) {
+        device_interceptor->object_dispatch.emplace_back(thread_safety);
+    }
+    auto stateless_validation = new StatelessValidation;
+    stateless_validation->container_type = LayerObjectTypeParameterValidation;
+    if (!instance_interceptor->disabled.stateless_checks) {
+        device_interceptor->object_dispatch.emplace_back(stateless_validation);
+    }
+    auto object_tracker = new ObjectLifetimes;
+    object_tracker->container_type = LayerObjectTypeObjectTracker;
+    if (!instance_interceptor->disabled.object_tracking) {
+        device_interceptor->object_dispatch.emplace_back(object_tracker);
+    }
+    auto core_checks = new CoreChecks;
+    core_checks->container_type = LayerObjectTypeCoreValidation;
+    core_checks->instance_state = reinterpret_cast<CoreChecks *>(
+        core_checks->GetValidationObject(instance_interceptor->object_dispatch, LayerObjectTypeCoreValidation));
+    if (!instance_interceptor->disabled.core_checks) {
+        // Only enable the command counters when needed.
+        if (device_extensions.vk_khr_performance_query) {
+            auto command_counter = new CommandCounter(core_checks);
+            command_counter->container_type = LayerObjectTypeDevice;
+            device_interceptor->object_dispatch.emplace_back(command_counter);
+        }
+        device_interceptor->object_dispatch.emplace_back(core_checks);
+    }
+    auto best_practices = new BestPractices;
+    best_practices->container_type = LayerObjectTypeBestPractices;
+    best_practices->instance_state = reinterpret_cast<BestPractices *>(
+        best_practices->GetValidationObject(instance_interceptor->object_dispatch, LayerObjectTypeBestPractices));
+    if (instance_interceptor->enabled.best_practices) {
+        device_interceptor->object_dispatch.emplace_back(best_practices);
+    }
+    auto gpu_assisted = new GpuAssisted;
+    gpu_assisted->container_type = LayerObjectTypeGpuAssisted;
+    gpu_assisted->instance_state = reinterpret_cast<GpuAssisted *>(
+        gpu_assisted->GetValidationObject(instance_interceptor->object_dispatch, LayerObjectTypeGpuAssisted));
+    if (instance_interceptor->enabled.gpu_validation) {
+        device_interceptor->object_dispatch.emplace_back(gpu_assisted);
+    }
+
+    // Set per-intercept common data items
+    for (auto dev_intercept : device_interceptor->object_dispatch) {
+        dev_intercept->device = *pDevice;
+        dev_intercept->physical_device = gpu;
+        dev_intercept->instance = instance_interceptor->instance;
+        dev_intercept->report_data = device_interceptor->report_data;
+        dev_intercept->device_dispatch_table = device_interceptor->device_dispatch_table;
+        dev_intercept->api_version = device_interceptor->api_version;
+        dev_intercept->disabled = instance_interceptor->disabled;
+        dev_intercept->enabled = instance_interceptor->enabled;
+        dev_intercept->instance_dispatch_table = instance_interceptor->instance_dispatch_table;
+        dev_intercept->instance_extensions = instance_interceptor->instance_extensions;
+        dev_intercept->device_extensions = device_interceptor->device_extensions;
+    }
+
+    for (auto intercept : instance_interceptor->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCreateDevice(gpu, pCreateInfo, pAllocator, pDevice, result);
+    }
+
+    DeviceExtensionWhitelist(device_interceptor, pCreateInfo, *pDevice);
+
+    return result;
+}
+
+VKAPI_ATTR void VKAPI_CALL DestroyDevice(VkDevice device, const VkAllocationCallbacks *pAllocator) {
+    dispatch_key key = get_dispatch_key(device);
+    auto layer_data = GetLayerDataPtr(key, layer_data_map);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        (const_cast<const ValidationObject*>(intercept))->PreCallValidateDestroyDevice(device, pAllocator);
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordDestroyDevice(device, pAllocator);
+    }
+
+    layer_data->device_dispatch_table.DestroyDevice(device, pAllocator);
+
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordDestroyDevice(device, pAllocator);
+    }
+
+    for (auto item = layer_data->object_dispatch.begin(); item != layer_data->object_dispatch.end(); item++) {
+        delete *item;
+    }
+    FreeLayerDataPtr(key, layer_data_map);
+}
+
+
+// Special-case APIs for which core_validation needs custom parameter lists and/or modifies parameters
+
+VKAPI_ATTR VkResult VKAPI_CALL CreateGraphicsPipelines(
+    VkDevice                                    device,
+    VkPipelineCache                             pipelineCache,
+    uint32_t                                    createInfoCount,
+    const VkGraphicsPipelineCreateInfo*         pCreateInfos,
+    const VkAllocationCallbacks*                pAllocator,
+    VkPipeline*                                 pPipelines) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+
+    create_graphics_pipeline_api_state cgpl_state[LayerObjectTypeMaxEnum]{};
+
+    for (auto intercept : layer_data->object_dispatch) {
+        cgpl_state[intercept->container_type].pCreateInfos = pCreateInfos;
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCreateGraphicsPipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines, &(cgpl_state[intercept->container_type]));
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCreateGraphicsPipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines, &(cgpl_state[intercept->container_type]));
+    }
+
+    auto usepCreateInfos = (!cgpl_state[LayerObjectTypeGpuAssisted].pCreateInfos) ? pCreateInfos : cgpl_state[LayerObjectTypeGpuAssisted].pCreateInfos;
+
+    VkResult result = DispatchCreateGraphicsPipelines(device, pipelineCache, createInfoCount, usepCreateInfos, pAllocator, pPipelines);
+
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCreateGraphicsPipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines, result, &(cgpl_state[intercept->container_type]));
+    }
+    return result;
+}
+
+// This API saves some core_validation pipeline state state on the stack for performance purposes
+VKAPI_ATTR VkResult VKAPI_CALL CreateComputePipelines(
+    VkDevice                                    device,
+    VkPipelineCache                             pipelineCache,
+    uint32_t                                    createInfoCount,
+    const VkComputePipelineCreateInfo*          pCreateInfos,
+    const VkAllocationCallbacks*                pAllocator,
+    VkPipeline*                                 pPipelines) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+
+    create_compute_pipeline_api_state ccpl_state[LayerObjectTypeMaxEnum]{};
+
+    for (auto intercept : layer_data->object_dispatch) {
+        ccpl_state[intercept->container_type].pCreateInfos = pCreateInfos;
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCreateComputePipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines, &(ccpl_state[intercept->container_type]));
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCreateComputePipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines, &(ccpl_state[intercept->container_type]));
+    }
+
+    auto usepCreateInfos = (!ccpl_state[LayerObjectTypeGpuAssisted].pCreateInfos) ? pCreateInfos : ccpl_state[LayerObjectTypeGpuAssisted].pCreateInfos;
+
+    VkResult result = DispatchCreateComputePipelines(device, pipelineCache, createInfoCount, usepCreateInfos, pAllocator, pPipelines);
+
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCreateComputePipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines, result, &(ccpl_state[intercept->container_type]));
+    }
+    return result;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL CreateRayTracingPipelinesNV(
+    VkDevice                                    device,
+    VkPipelineCache                             pipelineCache,
+    uint32_t                                    createInfoCount,
+    const VkRayTracingPipelineCreateInfoNV*     pCreateInfos,
+    const VkAllocationCallbacks*                pAllocator,
+    VkPipeline*                                 pPipelines) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+
+    create_ray_tracing_pipeline_api_state crtpl_state[LayerObjectTypeMaxEnum]{};
+
+    for (auto intercept : layer_data->object_dispatch) {
+        crtpl_state[intercept->container_type].pCreateInfos = pCreateInfos;
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCreateRayTracingPipelinesNV(device, pipelineCache, createInfoCount, pCreateInfos,
+                                                                      pAllocator, pPipelines, &(crtpl_state[intercept->container_type]));
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCreateRayTracingPipelinesNV(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator,
+                                                            pPipelines, &(crtpl_state[intercept->container_type]));
+    }
+
+    VkResult result = DispatchCreateRayTracingPipelinesNV(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines);
+
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCreateRayTracingPipelinesNV(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator,
+                                                             pPipelines, result, &(crtpl_state[intercept->container_type]));
+    }
+    return result;
+}
+
+// This API needs the ability to modify a down-chain parameter
+VKAPI_ATTR VkResult VKAPI_CALL CreatePipelineLayout(
+    VkDevice                                    device,
+    const VkPipelineLayoutCreateInfo*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkPipelineLayout*                           pPipelineLayout) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+
+    create_pipeline_layout_api_state cpl_state{};
+    cpl_state.modified_create_info = *pCreateInfo;
+
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCreatePipelineLayout(device, pCreateInfo, pAllocator, pPipelineLayout);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCreatePipelineLayout(device, pCreateInfo, pAllocator, pPipelineLayout, &cpl_state);
+    }
+    VkResult result = DispatchCreatePipelineLayout(device, &cpl_state.modified_create_info, pAllocator, pPipelineLayout);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCreatePipelineLayout(device, pCreateInfo, pAllocator, pPipelineLayout, result);
+    }
+    return result;
+}
+
+// This API needs some local stack data for performance reasons and also may modify a parameter
+VKAPI_ATTR VkResult VKAPI_CALL CreateShaderModule(
+    VkDevice                                    device,
+    const VkShaderModuleCreateInfo*             pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkShaderModule*                             pShaderModule) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+
+    create_shader_module_api_state csm_state{};
+    csm_state.instrumented_create_info = *pCreateInfo;
+
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCreateShaderModule(device, pCreateInfo, pAllocator, pShaderModule, &csm_state);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCreateShaderModule(device, pCreateInfo, pAllocator, pShaderModule, &csm_state);
+    }
+    VkResult result = DispatchCreateShaderModule(device, &csm_state.instrumented_create_info, pAllocator, pShaderModule);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCreateShaderModule(device, pCreateInfo, pAllocator, pShaderModule, result, &csm_state);
+    }
+    return result;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL AllocateDescriptorSets(
+    VkDevice                                    device,
+    const VkDescriptorSetAllocateInfo*          pAllocateInfo,
+    VkDescriptorSet*                            pDescriptorSets) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+
+    cvdescriptorset::AllocateDescriptorSetsData ads_state(pAllocateInfo->descriptorSetCount);
+
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateAllocateDescriptorSets(device, pAllocateInfo, pDescriptorSets, &ads_state);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordAllocateDescriptorSets(device, pAllocateInfo, pDescriptorSets);
+    }
+    VkResult result = DispatchAllocateDescriptorSets(device, pAllocateInfo, pDescriptorSets);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordAllocateDescriptorSets(device, pAllocateInfo, pDescriptorSets, result, &ads_state);
+    }
+    return result;
+}
+
+// This API needs the ability to modify a down-chain parameter
+VKAPI_ATTR VkResult VKAPI_CALL CreateBuffer(
+    VkDevice                                    device,
+    const VkBufferCreateInfo*                   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkBuffer*                                   pBuffer) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+
+    create_buffer_api_state cb_state{};
+    cb_state.modified_create_info = *pCreateInfo;
+
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCreateBuffer(device, pCreateInfo, pAllocator, pBuffer);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCreateBuffer(device, pCreateInfo, pAllocator, pBuffer, &cb_state);
+    }
+    VkResult result = DispatchCreateBuffer(device, &cb_state.modified_create_info, pAllocator, pBuffer);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCreateBuffer(device, pCreateInfo, pAllocator, pBuffer, result);
+    }
+    return result;
+}
+
+
+
+// ValidationCache APIs do not dispatch
+
+VKAPI_ATTR VkResult VKAPI_CALL CreateValidationCacheEXT(
+    VkDevice                                    device,
+    const VkValidationCacheCreateInfoEXT*       pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkValidationCacheEXT*                       pValidationCache) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    VkResult result = VK_SUCCESS;
+
+    ValidationObject *validation_data = layer_data->GetValidationObject(layer_data->object_dispatch, LayerObjectTypeCoreValidation);
+    if (validation_data) {
+        auto lock = validation_data->write_lock();
+        result = validation_data->CoreLayerCreateValidationCacheEXT(device, pCreateInfo, pAllocator, pValidationCache);
+    }
+    return result;
+}
+
+VKAPI_ATTR void VKAPI_CALL DestroyValidationCacheEXT(
+    VkDevice                                    device,
+    VkValidationCacheEXT                        validationCache,
+    const VkAllocationCallbacks*                pAllocator) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+
+    ValidationObject *validation_data = layer_data->GetValidationObject(layer_data->object_dispatch, LayerObjectTypeCoreValidation);
+    if (validation_data) {
+        auto lock = validation_data->write_lock();
+        validation_data->CoreLayerDestroyValidationCacheEXT(device, validationCache, pAllocator);
+    }
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL MergeValidationCachesEXT(
+    VkDevice                                    device,
+    VkValidationCacheEXT                        dstCache,
+    uint32_t                                    srcCacheCount,
+    const VkValidationCacheEXT*                 pSrcCaches) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    VkResult result = VK_SUCCESS;
+
+    ValidationObject *validation_data = layer_data->GetValidationObject(layer_data->object_dispatch, LayerObjectTypeCoreValidation);
+    if (validation_data) {
+        auto lock = validation_data->write_lock();
+        result = validation_data->CoreLayerMergeValidationCachesEXT(device, dstCache, srcCacheCount, pSrcCaches);
+    }
+    return result;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL GetValidationCacheDataEXT(
+    VkDevice                                    device,
+    VkValidationCacheEXT                        validationCache,
+    size_t*                                     pDataSize,
+    void*                                       pData) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    VkResult result = VK_SUCCESS;
+
+    ValidationObject *validation_data = layer_data->GetValidationObject(layer_data->object_dispatch, LayerObjectTypeCoreValidation);
+    if (validation_data) {
+        auto lock = validation_data->write_lock();
+        result = validation_data->CoreLayerGetValidationCacheDataEXT(device, validationCache, pDataSize, pData);
+    }
+    return result;
+
+}
+
+
+VKAPI_ATTR VkResult VKAPI_CALL EnumeratePhysicalDevices(
+    VkInstance                                  instance,
+    uint32_t*                                   pPhysicalDeviceCount,
+    VkPhysicalDevice*                           pPhysicalDevices) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(instance), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateEnumeratePhysicalDevices(instance, pPhysicalDeviceCount, pPhysicalDevices);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordEnumeratePhysicalDevices(instance, pPhysicalDeviceCount, pPhysicalDevices);
+    }
+    VkResult result = DispatchEnumeratePhysicalDevices(instance, pPhysicalDeviceCount, pPhysicalDevices);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordEnumeratePhysicalDevices(instance, pPhysicalDeviceCount, pPhysicalDevices, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceFeatures(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceFeatures*                   pFeatures) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetPhysicalDeviceFeatures(physicalDevice, pFeatures);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetPhysicalDeviceFeatures(physicalDevice, pFeatures);
+    }
+    DispatchGetPhysicalDeviceFeatures(physicalDevice, pFeatures);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetPhysicalDeviceFeatures(physicalDevice, pFeatures);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceFormatProperties(
+    VkPhysicalDevice                            physicalDevice,
+    VkFormat                                    format,
+    VkFormatProperties*                         pFormatProperties) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetPhysicalDeviceFormatProperties(physicalDevice, format, pFormatProperties);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetPhysicalDeviceFormatProperties(physicalDevice, format, pFormatProperties);
+    }
+    DispatchGetPhysicalDeviceFormatProperties(physicalDevice, format, pFormatProperties);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetPhysicalDeviceFormatProperties(physicalDevice, format, pFormatProperties);
+    }
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceImageFormatProperties(
+    VkPhysicalDevice                            physicalDevice,
+    VkFormat                                    format,
+    VkImageType                                 type,
+    VkImageTiling                               tiling,
+    VkImageUsageFlags                           usage,
+    VkImageCreateFlags                          flags,
+    VkImageFormatProperties*                    pImageFormatProperties) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetPhysicalDeviceImageFormatProperties(physicalDevice, format, type, tiling, usage, flags, pImageFormatProperties);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetPhysicalDeviceImageFormatProperties(physicalDevice, format, type, tiling, usage, flags, pImageFormatProperties);
+    }
+    VkResult result = DispatchGetPhysicalDeviceImageFormatProperties(physicalDevice, format, type, tiling, usage, flags, pImageFormatProperties);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetPhysicalDeviceImageFormatProperties(physicalDevice, format, type, tiling, usage, flags, pImageFormatProperties, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceProperties(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceProperties*                 pProperties) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetPhysicalDeviceProperties(physicalDevice, pProperties);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetPhysicalDeviceProperties(physicalDevice, pProperties);
+    }
+    DispatchGetPhysicalDeviceProperties(physicalDevice, pProperties);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetPhysicalDeviceProperties(physicalDevice, pProperties);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceQueueFamilyProperties(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pQueueFamilyPropertyCount,
+    VkQueueFamilyProperties*                    pQueueFamilyProperties) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetPhysicalDeviceQueueFamilyProperties(physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetPhysicalDeviceQueueFamilyProperties(physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties);
+    }
+    DispatchGetPhysicalDeviceQueueFamilyProperties(physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetPhysicalDeviceQueueFamilyProperties(physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceMemoryProperties(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceMemoryProperties*           pMemoryProperties) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetPhysicalDeviceMemoryProperties(physicalDevice, pMemoryProperties);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetPhysicalDeviceMemoryProperties(physicalDevice, pMemoryProperties);
+    }
+    DispatchGetPhysicalDeviceMemoryProperties(physicalDevice, pMemoryProperties);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetPhysicalDeviceMemoryProperties(physicalDevice, pMemoryProperties);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL GetDeviceQueue(
+    VkDevice                                    device,
+    uint32_t                                    queueFamilyIndex,
+    uint32_t                                    queueIndex,
+    VkQueue*                                    pQueue) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetDeviceQueue(device, queueFamilyIndex, queueIndex, pQueue);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetDeviceQueue(device, queueFamilyIndex, queueIndex, pQueue);
+    }
+    DispatchGetDeviceQueue(device, queueFamilyIndex, queueIndex, pQueue);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetDeviceQueue(device, queueFamilyIndex, queueIndex, pQueue);
+    }
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL QueueSubmit(
+    VkQueue                                     queue,
+    uint32_t                                    submitCount,
+    const VkSubmitInfo*                         pSubmits,
+    VkFence                                     fence) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(queue), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateQueueSubmit(queue, submitCount, pSubmits, fence);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordQueueSubmit(queue, submitCount, pSubmits, fence);
+    }
+    VkResult result = DispatchQueueSubmit(queue, submitCount, pSubmits, fence);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordQueueSubmit(queue, submitCount, pSubmits, fence, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL QueueWaitIdle(
+    VkQueue                                     queue) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(queue), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateQueueWaitIdle(queue);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordQueueWaitIdle(queue);
+    }
+    VkResult result = DispatchQueueWaitIdle(queue);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordQueueWaitIdle(queue, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL DeviceWaitIdle(
+    VkDevice                                    device) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateDeviceWaitIdle(device);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordDeviceWaitIdle(device);
+    }
+    VkResult result = DispatchDeviceWaitIdle(device);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordDeviceWaitIdle(device, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL AllocateMemory(
+    VkDevice                                    device,
+    const VkMemoryAllocateInfo*                 pAllocateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDeviceMemory*                             pMemory) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateAllocateMemory(device, pAllocateInfo, pAllocator, pMemory);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordAllocateMemory(device, pAllocateInfo, pAllocator, pMemory);
+    }
+    VkResult result = DispatchAllocateMemory(device, pAllocateInfo, pAllocator, pMemory);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordAllocateMemory(device, pAllocateInfo, pAllocator, pMemory, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR void VKAPI_CALL FreeMemory(
+    VkDevice                                    device,
+    VkDeviceMemory                              memory,
+    const VkAllocationCallbacks*                pAllocator) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateFreeMemory(device, memory, pAllocator);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordFreeMemory(device, memory, pAllocator);
+    }
+    DispatchFreeMemory(device, memory, pAllocator);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordFreeMemory(device, memory, pAllocator);
+    }
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL MapMemory(
+    VkDevice                                    device,
+    VkDeviceMemory                              memory,
+    VkDeviceSize                                offset,
+    VkDeviceSize                                size,
+    VkMemoryMapFlags                            flags,
+    void**                                      ppData) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateMapMemory(device, memory, offset, size, flags, ppData);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordMapMemory(device, memory, offset, size, flags, ppData);
+    }
+    VkResult result = DispatchMapMemory(device, memory, offset, size, flags, ppData);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordMapMemory(device, memory, offset, size, flags, ppData, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR void VKAPI_CALL UnmapMemory(
+    VkDevice                                    device,
+    VkDeviceMemory                              memory) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateUnmapMemory(device, memory);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordUnmapMemory(device, memory);
+    }
+    DispatchUnmapMemory(device, memory);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordUnmapMemory(device, memory);
+    }
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL FlushMappedMemoryRanges(
+    VkDevice                                    device,
+    uint32_t                                    memoryRangeCount,
+    const VkMappedMemoryRange*                  pMemoryRanges) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateFlushMappedMemoryRanges(device, memoryRangeCount, pMemoryRanges);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordFlushMappedMemoryRanges(device, memoryRangeCount, pMemoryRanges);
+    }
+    VkResult result = DispatchFlushMappedMemoryRanges(device, memoryRangeCount, pMemoryRanges);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordFlushMappedMemoryRanges(device, memoryRangeCount, pMemoryRanges, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL InvalidateMappedMemoryRanges(
+    VkDevice                                    device,
+    uint32_t                                    memoryRangeCount,
+    const VkMappedMemoryRange*                  pMemoryRanges) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateInvalidateMappedMemoryRanges(device, memoryRangeCount, pMemoryRanges);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordInvalidateMappedMemoryRanges(device, memoryRangeCount, pMemoryRanges);
+    }
+    VkResult result = DispatchInvalidateMappedMemoryRanges(device, memoryRangeCount, pMemoryRanges);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordInvalidateMappedMemoryRanges(device, memoryRangeCount, pMemoryRanges, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR void VKAPI_CALL GetDeviceMemoryCommitment(
+    VkDevice                                    device,
+    VkDeviceMemory                              memory,
+    VkDeviceSize*                               pCommittedMemoryInBytes) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetDeviceMemoryCommitment(device, memory, pCommittedMemoryInBytes);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetDeviceMemoryCommitment(device, memory, pCommittedMemoryInBytes);
+    }
+    DispatchGetDeviceMemoryCommitment(device, memory, pCommittedMemoryInBytes);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetDeviceMemoryCommitment(device, memory, pCommittedMemoryInBytes);
+    }
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL BindBufferMemory(
+    VkDevice                                    device,
+    VkBuffer                                    buffer,
+    VkDeviceMemory                              memory,
+    VkDeviceSize                                memoryOffset) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateBindBufferMemory(device, buffer, memory, memoryOffset);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordBindBufferMemory(device, buffer, memory, memoryOffset);
+    }
+    VkResult result = DispatchBindBufferMemory(device, buffer, memory, memoryOffset);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordBindBufferMemory(device, buffer, memory, memoryOffset, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL BindImageMemory(
+    VkDevice                                    device,
+    VkImage                                     image,
+    VkDeviceMemory                              memory,
+    VkDeviceSize                                memoryOffset) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateBindImageMemory(device, image, memory, memoryOffset);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordBindImageMemory(device, image, memory, memoryOffset);
+    }
+    VkResult result = DispatchBindImageMemory(device, image, memory, memoryOffset);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordBindImageMemory(device, image, memory, memoryOffset, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR void VKAPI_CALL GetBufferMemoryRequirements(
+    VkDevice                                    device,
+    VkBuffer                                    buffer,
+    VkMemoryRequirements*                       pMemoryRequirements) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetBufferMemoryRequirements(device, buffer, pMemoryRequirements);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetBufferMemoryRequirements(device, buffer, pMemoryRequirements);
+    }
+    DispatchGetBufferMemoryRequirements(device, buffer, pMemoryRequirements);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetBufferMemoryRequirements(device, buffer, pMemoryRequirements);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL GetImageMemoryRequirements(
+    VkDevice                                    device,
+    VkImage                                     image,
+    VkMemoryRequirements*                       pMemoryRequirements) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetImageMemoryRequirements(device, image, pMemoryRequirements);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetImageMemoryRequirements(device, image, pMemoryRequirements);
+    }
+    DispatchGetImageMemoryRequirements(device, image, pMemoryRequirements);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetImageMemoryRequirements(device, image, pMemoryRequirements);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL GetImageSparseMemoryRequirements(
+    VkDevice                                    device,
+    VkImage                                     image,
+    uint32_t*                                   pSparseMemoryRequirementCount,
+    VkSparseImageMemoryRequirements*            pSparseMemoryRequirements) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetImageSparseMemoryRequirements(device, image, pSparseMemoryRequirementCount, pSparseMemoryRequirements);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetImageSparseMemoryRequirements(device, image, pSparseMemoryRequirementCount, pSparseMemoryRequirements);
+    }
+    DispatchGetImageSparseMemoryRequirements(device, image, pSparseMemoryRequirementCount, pSparseMemoryRequirements);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetImageSparseMemoryRequirements(device, image, pSparseMemoryRequirementCount, pSparseMemoryRequirements);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceSparseImageFormatProperties(
+    VkPhysicalDevice                            physicalDevice,
+    VkFormat                                    format,
+    VkImageType                                 type,
+    VkSampleCountFlagBits                       samples,
+    VkImageUsageFlags                           usage,
+    VkImageTiling                               tiling,
+    uint32_t*                                   pPropertyCount,
+    VkSparseImageFormatProperties*              pProperties) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetPhysicalDeviceSparseImageFormatProperties(physicalDevice, format, type, samples, usage, tiling, pPropertyCount, pProperties);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetPhysicalDeviceSparseImageFormatProperties(physicalDevice, format, type, samples, usage, tiling, pPropertyCount, pProperties);
+    }
+    DispatchGetPhysicalDeviceSparseImageFormatProperties(physicalDevice, format, type, samples, usage, tiling, pPropertyCount, pProperties);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetPhysicalDeviceSparseImageFormatProperties(physicalDevice, format, type, samples, usage, tiling, pPropertyCount, pProperties);
+    }
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL QueueBindSparse(
+    VkQueue                                     queue,
+    uint32_t                                    bindInfoCount,
+    const VkBindSparseInfo*                     pBindInfo,
+    VkFence                                     fence) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(queue), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateQueueBindSparse(queue, bindInfoCount, pBindInfo, fence);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordQueueBindSparse(queue, bindInfoCount, pBindInfo, fence);
+    }
+    VkResult result = DispatchQueueBindSparse(queue, bindInfoCount, pBindInfo, fence);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordQueueBindSparse(queue, bindInfoCount, pBindInfo, fence, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL CreateFence(
+    VkDevice                                    device,
+    const VkFenceCreateInfo*                    pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkFence*                                    pFence) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCreateFence(device, pCreateInfo, pAllocator, pFence);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCreateFence(device, pCreateInfo, pAllocator, pFence);
+    }
+    VkResult result = DispatchCreateFence(device, pCreateInfo, pAllocator, pFence);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCreateFence(device, pCreateInfo, pAllocator, pFence, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR void VKAPI_CALL DestroyFence(
+    VkDevice                                    device,
+    VkFence                                     fence,
+    const VkAllocationCallbacks*                pAllocator) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateDestroyFence(device, fence, pAllocator);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordDestroyFence(device, fence, pAllocator);
+    }
+    DispatchDestroyFence(device, fence, pAllocator);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordDestroyFence(device, fence, pAllocator);
+    }
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL ResetFences(
+    VkDevice                                    device,
+    uint32_t                                    fenceCount,
+    const VkFence*                              pFences) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateResetFences(device, fenceCount, pFences);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordResetFences(device, fenceCount, pFences);
+    }
+    VkResult result = DispatchResetFences(device, fenceCount, pFences);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordResetFences(device, fenceCount, pFences, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL GetFenceStatus(
+    VkDevice                                    device,
+    VkFence                                     fence) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetFenceStatus(device, fence);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetFenceStatus(device, fence);
+    }
+    VkResult result = DispatchGetFenceStatus(device, fence);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetFenceStatus(device, fence, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL WaitForFences(
+    VkDevice                                    device,
+    uint32_t                                    fenceCount,
+    const VkFence*                              pFences,
+    VkBool32                                    waitAll,
+    uint64_t                                    timeout) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateWaitForFences(device, fenceCount, pFences, waitAll, timeout);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordWaitForFences(device, fenceCount, pFences, waitAll, timeout);
+    }
+    VkResult result = DispatchWaitForFences(device, fenceCount, pFences, waitAll, timeout);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordWaitForFences(device, fenceCount, pFences, waitAll, timeout, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL CreateSemaphore(
+    VkDevice                                    device,
+    const VkSemaphoreCreateInfo*                pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSemaphore*                                pSemaphore) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCreateSemaphore(device, pCreateInfo, pAllocator, pSemaphore);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCreateSemaphore(device, pCreateInfo, pAllocator, pSemaphore);
+    }
+    VkResult result = DispatchCreateSemaphore(device, pCreateInfo, pAllocator, pSemaphore);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCreateSemaphore(device, pCreateInfo, pAllocator, pSemaphore, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR void VKAPI_CALL DestroySemaphore(
+    VkDevice                                    device,
+    VkSemaphore                                 semaphore,
+    const VkAllocationCallbacks*                pAllocator) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateDestroySemaphore(device, semaphore, pAllocator);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordDestroySemaphore(device, semaphore, pAllocator);
+    }
+    DispatchDestroySemaphore(device, semaphore, pAllocator);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordDestroySemaphore(device, semaphore, pAllocator);
+    }
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL CreateEvent(
+    VkDevice                                    device,
+    const VkEventCreateInfo*                    pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkEvent*                                    pEvent) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCreateEvent(device, pCreateInfo, pAllocator, pEvent);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCreateEvent(device, pCreateInfo, pAllocator, pEvent);
+    }
+    VkResult result = DispatchCreateEvent(device, pCreateInfo, pAllocator, pEvent);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCreateEvent(device, pCreateInfo, pAllocator, pEvent, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR void VKAPI_CALL DestroyEvent(
+    VkDevice                                    device,
+    VkEvent                                     event,
+    const VkAllocationCallbacks*                pAllocator) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateDestroyEvent(device, event, pAllocator);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordDestroyEvent(device, event, pAllocator);
+    }
+    DispatchDestroyEvent(device, event, pAllocator);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordDestroyEvent(device, event, pAllocator);
+    }
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL GetEventStatus(
+    VkDevice                                    device,
+    VkEvent                                     event) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetEventStatus(device, event);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetEventStatus(device, event);
+    }
+    VkResult result = DispatchGetEventStatus(device, event);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetEventStatus(device, event, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL SetEvent(
+    VkDevice                                    device,
+    VkEvent                                     event) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateSetEvent(device, event);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordSetEvent(device, event);
+    }
+    VkResult result = DispatchSetEvent(device, event);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordSetEvent(device, event, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL ResetEvent(
+    VkDevice                                    device,
+    VkEvent                                     event) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateResetEvent(device, event);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordResetEvent(device, event);
+    }
+    VkResult result = DispatchResetEvent(device, event);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordResetEvent(device, event, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL CreateQueryPool(
+    VkDevice                                    device,
+    const VkQueryPoolCreateInfo*                pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkQueryPool*                                pQueryPool) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCreateQueryPool(device, pCreateInfo, pAllocator, pQueryPool);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCreateQueryPool(device, pCreateInfo, pAllocator, pQueryPool);
+    }
+    VkResult result = DispatchCreateQueryPool(device, pCreateInfo, pAllocator, pQueryPool);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCreateQueryPool(device, pCreateInfo, pAllocator, pQueryPool, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR void VKAPI_CALL DestroyQueryPool(
+    VkDevice                                    device,
+    VkQueryPool                                 queryPool,
+    const VkAllocationCallbacks*                pAllocator) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateDestroyQueryPool(device, queryPool, pAllocator);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordDestroyQueryPool(device, queryPool, pAllocator);
+    }
+    DispatchDestroyQueryPool(device, queryPool, pAllocator);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordDestroyQueryPool(device, queryPool, pAllocator);
+    }
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL GetQueryPoolResults(
+    VkDevice                                    device,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    firstQuery,
+    uint32_t                                    queryCount,
+    size_t                                      dataSize,
+    void*                                       pData,
+    VkDeviceSize                                stride,
+    VkQueryResultFlags                          flags) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetQueryPoolResults(device, queryPool, firstQuery, queryCount, dataSize, pData, stride, flags);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetQueryPoolResults(device, queryPool, firstQuery, queryCount, dataSize, pData, stride, flags);
+    }
+    VkResult result = DispatchGetQueryPoolResults(device, queryPool, firstQuery, queryCount, dataSize, pData, stride, flags);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetQueryPoolResults(device, queryPool, firstQuery, queryCount, dataSize, pData, stride, flags, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR void VKAPI_CALL DestroyBuffer(
+    VkDevice                                    device,
+    VkBuffer                                    buffer,
+    const VkAllocationCallbacks*                pAllocator) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateDestroyBuffer(device, buffer, pAllocator);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordDestroyBuffer(device, buffer, pAllocator);
+    }
+    DispatchDestroyBuffer(device, buffer, pAllocator);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordDestroyBuffer(device, buffer, pAllocator);
+    }
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL CreateBufferView(
+    VkDevice                                    device,
+    const VkBufferViewCreateInfo*               pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkBufferView*                               pView) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCreateBufferView(device, pCreateInfo, pAllocator, pView);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCreateBufferView(device, pCreateInfo, pAllocator, pView);
+    }
+    VkResult result = DispatchCreateBufferView(device, pCreateInfo, pAllocator, pView);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCreateBufferView(device, pCreateInfo, pAllocator, pView, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR void VKAPI_CALL DestroyBufferView(
+    VkDevice                                    device,
+    VkBufferView                                bufferView,
+    const VkAllocationCallbacks*                pAllocator) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateDestroyBufferView(device, bufferView, pAllocator);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordDestroyBufferView(device, bufferView, pAllocator);
+    }
+    DispatchDestroyBufferView(device, bufferView, pAllocator);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordDestroyBufferView(device, bufferView, pAllocator);
+    }
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL CreateImage(
+    VkDevice                                    device,
+    const VkImageCreateInfo*                    pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkImage*                                    pImage) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCreateImage(device, pCreateInfo, pAllocator, pImage);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCreateImage(device, pCreateInfo, pAllocator, pImage);
+    }
+    VkResult result = DispatchCreateImage(device, pCreateInfo, pAllocator, pImage);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCreateImage(device, pCreateInfo, pAllocator, pImage, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR void VKAPI_CALL DestroyImage(
+    VkDevice                                    device,
+    VkImage                                     image,
+    const VkAllocationCallbacks*                pAllocator) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateDestroyImage(device, image, pAllocator);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordDestroyImage(device, image, pAllocator);
+    }
+    DispatchDestroyImage(device, image, pAllocator);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordDestroyImage(device, image, pAllocator);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL GetImageSubresourceLayout(
+    VkDevice                                    device,
+    VkImage                                     image,
+    const VkImageSubresource*                   pSubresource,
+    VkSubresourceLayout*                        pLayout) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetImageSubresourceLayout(device, image, pSubresource, pLayout);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetImageSubresourceLayout(device, image, pSubresource, pLayout);
+    }
+    DispatchGetImageSubresourceLayout(device, image, pSubresource, pLayout);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetImageSubresourceLayout(device, image, pSubresource, pLayout);
+    }
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL CreateImageView(
+    VkDevice                                    device,
+    const VkImageViewCreateInfo*                pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkImageView*                                pView) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCreateImageView(device, pCreateInfo, pAllocator, pView);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCreateImageView(device, pCreateInfo, pAllocator, pView);
+    }
+    VkResult result = DispatchCreateImageView(device, pCreateInfo, pAllocator, pView);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCreateImageView(device, pCreateInfo, pAllocator, pView, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR void VKAPI_CALL DestroyImageView(
+    VkDevice                                    device,
+    VkImageView                                 imageView,
+    const VkAllocationCallbacks*                pAllocator) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateDestroyImageView(device, imageView, pAllocator);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordDestroyImageView(device, imageView, pAllocator);
+    }
+    DispatchDestroyImageView(device, imageView, pAllocator);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordDestroyImageView(device, imageView, pAllocator);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL DestroyShaderModule(
+    VkDevice                                    device,
+    VkShaderModule                              shaderModule,
+    const VkAllocationCallbacks*                pAllocator) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateDestroyShaderModule(device, shaderModule, pAllocator);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordDestroyShaderModule(device, shaderModule, pAllocator);
+    }
+    DispatchDestroyShaderModule(device, shaderModule, pAllocator);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordDestroyShaderModule(device, shaderModule, pAllocator);
+    }
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL CreatePipelineCache(
+    VkDevice                                    device,
+    const VkPipelineCacheCreateInfo*            pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkPipelineCache*                            pPipelineCache) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCreatePipelineCache(device, pCreateInfo, pAllocator, pPipelineCache);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCreatePipelineCache(device, pCreateInfo, pAllocator, pPipelineCache);
+    }
+    VkResult result = DispatchCreatePipelineCache(device, pCreateInfo, pAllocator, pPipelineCache);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCreatePipelineCache(device, pCreateInfo, pAllocator, pPipelineCache, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR void VKAPI_CALL DestroyPipelineCache(
+    VkDevice                                    device,
+    VkPipelineCache                             pipelineCache,
+    const VkAllocationCallbacks*                pAllocator) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateDestroyPipelineCache(device, pipelineCache, pAllocator);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordDestroyPipelineCache(device, pipelineCache, pAllocator);
+    }
+    DispatchDestroyPipelineCache(device, pipelineCache, pAllocator);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordDestroyPipelineCache(device, pipelineCache, pAllocator);
+    }
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL GetPipelineCacheData(
+    VkDevice                                    device,
+    VkPipelineCache                             pipelineCache,
+    size_t*                                     pDataSize,
+    void*                                       pData) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetPipelineCacheData(device, pipelineCache, pDataSize, pData);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetPipelineCacheData(device, pipelineCache, pDataSize, pData);
+    }
+    VkResult result = DispatchGetPipelineCacheData(device, pipelineCache, pDataSize, pData);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetPipelineCacheData(device, pipelineCache, pDataSize, pData, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL MergePipelineCaches(
+    VkDevice                                    device,
+    VkPipelineCache                             dstCache,
+    uint32_t                                    srcCacheCount,
+    const VkPipelineCache*                      pSrcCaches) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateMergePipelineCaches(device, dstCache, srcCacheCount, pSrcCaches);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordMergePipelineCaches(device, dstCache, srcCacheCount, pSrcCaches);
+    }
+    VkResult result = DispatchMergePipelineCaches(device, dstCache, srcCacheCount, pSrcCaches);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordMergePipelineCaches(device, dstCache, srcCacheCount, pSrcCaches, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR void VKAPI_CALL DestroyPipeline(
+    VkDevice                                    device,
+    VkPipeline                                  pipeline,
+    const VkAllocationCallbacks*                pAllocator) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateDestroyPipeline(device, pipeline, pAllocator);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordDestroyPipeline(device, pipeline, pAllocator);
+    }
+    DispatchDestroyPipeline(device, pipeline, pAllocator);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordDestroyPipeline(device, pipeline, pAllocator);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL DestroyPipelineLayout(
+    VkDevice                                    device,
+    VkPipelineLayout                            pipelineLayout,
+    const VkAllocationCallbacks*                pAllocator) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateDestroyPipelineLayout(device, pipelineLayout, pAllocator);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordDestroyPipelineLayout(device, pipelineLayout, pAllocator);
+    }
+    DispatchDestroyPipelineLayout(device, pipelineLayout, pAllocator);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordDestroyPipelineLayout(device, pipelineLayout, pAllocator);
+    }
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL CreateSampler(
+    VkDevice                                    device,
+    const VkSamplerCreateInfo*                  pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSampler*                                  pSampler) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCreateSampler(device, pCreateInfo, pAllocator, pSampler);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCreateSampler(device, pCreateInfo, pAllocator, pSampler);
+    }
+    VkResult result = DispatchCreateSampler(device, pCreateInfo, pAllocator, pSampler);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCreateSampler(device, pCreateInfo, pAllocator, pSampler, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR void VKAPI_CALL DestroySampler(
+    VkDevice                                    device,
+    VkSampler                                   sampler,
+    const VkAllocationCallbacks*                pAllocator) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateDestroySampler(device, sampler, pAllocator);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordDestroySampler(device, sampler, pAllocator);
+    }
+    DispatchDestroySampler(device, sampler, pAllocator);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordDestroySampler(device, sampler, pAllocator);
+    }
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL CreateDescriptorSetLayout(
+    VkDevice                                    device,
+    const VkDescriptorSetLayoutCreateInfo*      pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDescriptorSetLayout*                      pSetLayout) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCreateDescriptorSetLayout(device, pCreateInfo, pAllocator, pSetLayout);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCreateDescriptorSetLayout(device, pCreateInfo, pAllocator, pSetLayout);
+    }
+    VkResult result = DispatchCreateDescriptorSetLayout(device, pCreateInfo, pAllocator, pSetLayout);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCreateDescriptorSetLayout(device, pCreateInfo, pAllocator, pSetLayout, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR void VKAPI_CALL DestroyDescriptorSetLayout(
+    VkDevice                                    device,
+    VkDescriptorSetLayout                       descriptorSetLayout,
+    const VkAllocationCallbacks*                pAllocator) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateDestroyDescriptorSetLayout(device, descriptorSetLayout, pAllocator);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordDestroyDescriptorSetLayout(device, descriptorSetLayout, pAllocator);
+    }
+    DispatchDestroyDescriptorSetLayout(device, descriptorSetLayout, pAllocator);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordDestroyDescriptorSetLayout(device, descriptorSetLayout, pAllocator);
+    }
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL CreateDescriptorPool(
+    VkDevice                                    device,
+    const VkDescriptorPoolCreateInfo*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDescriptorPool*                           pDescriptorPool) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCreateDescriptorPool(device, pCreateInfo, pAllocator, pDescriptorPool);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCreateDescriptorPool(device, pCreateInfo, pAllocator, pDescriptorPool);
+    }
+    VkResult result = DispatchCreateDescriptorPool(device, pCreateInfo, pAllocator, pDescriptorPool);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCreateDescriptorPool(device, pCreateInfo, pAllocator, pDescriptorPool, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR void VKAPI_CALL DestroyDescriptorPool(
+    VkDevice                                    device,
+    VkDescriptorPool                            descriptorPool,
+    const VkAllocationCallbacks*                pAllocator) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateDestroyDescriptorPool(device, descriptorPool, pAllocator);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordDestroyDescriptorPool(device, descriptorPool, pAllocator);
+    }
+    DispatchDestroyDescriptorPool(device, descriptorPool, pAllocator);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordDestroyDescriptorPool(device, descriptorPool, pAllocator);
+    }
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL ResetDescriptorPool(
+    VkDevice                                    device,
+    VkDescriptorPool                            descriptorPool,
+    VkDescriptorPoolResetFlags                  flags) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateResetDescriptorPool(device, descriptorPool, flags);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordResetDescriptorPool(device, descriptorPool, flags);
+    }
+    VkResult result = DispatchResetDescriptorPool(device, descriptorPool, flags);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordResetDescriptorPool(device, descriptorPool, flags, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL FreeDescriptorSets(
+    VkDevice                                    device,
+    VkDescriptorPool                            descriptorPool,
+    uint32_t                                    descriptorSetCount,
+    const VkDescriptorSet*                      pDescriptorSets) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateFreeDescriptorSets(device, descriptorPool, descriptorSetCount, pDescriptorSets);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordFreeDescriptorSets(device, descriptorPool, descriptorSetCount, pDescriptorSets);
+    }
+    VkResult result = DispatchFreeDescriptorSets(device, descriptorPool, descriptorSetCount, pDescriptorSets);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordFreeDescriptorSets(device, descriptorPool, descriptorSetCount, pDescriptorSets, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR void VKAPI_CALL UpdateDescriptorSets(
+    VkDevice                                    device,
+    uint32_t                                    descriptorWriteCount,
+    const VkWriteDescriptorSet*                 pDescriptorWrites,
+    uint32_t                                    descriptorCopyCount,
+    const VkCopyDescriptorSet*                  pDescriptorCopies) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateUpdateDescriptorSets(device, descriptorWriteCount, pDescriptorWrites, descriptorCopyCount, pDescriptorCopies);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordUpdateDescriptorSets(device, descriptorWriteCount, pDescriptorWrites, descriptorCopyCount, pDescriptorCopies);
+    }
+    DispatchUpdateDescriptorSets(device, descriptorWriteCount, pDescriptorWrites, descriptorCopyCount, pDescriptorCopies);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordUpdateDescriptorSets(device, descriptorWriteCount, pDescriptorWrites, descriptorCopyCount, pDescriptorCopies);
+    }
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL CreateFramebuffer(
+    VkDevice                                    device,
+    const VkFramebufferCreateInfo*              pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkFramebuffer*                              pFramebuffer) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCreateFramebuffer(device, pCreateInfo, pAllocator, pFramebuffer);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCreateFramebuffer(device, pCreateInfo, pAllocator, pFramebuffer);
+    }
+    VkResult result = DispatchCreateFramebuffer(device, pCreateInfo, pAllocator, pFramebuffer);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCreateFramebuffer(device, pCreateInfo, pAllocator, pFramebuffer, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR void VKAPI_CALL DestroyFramebuffer(
+    VkDevice                                    device,
+    VkFramebuffer                               framebuffer,
+    const VkAllocationCallbacks*                pAllocator) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateDestroyFramebuffer(device, framebuffer, pAllocator);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordDestroyFramebuffer(device, framebuffer, pAllocator);
+    }
+    DispatchDestroyFramebuffer(device, framebuffer, pAllocator);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordDestroyFramebuffer(device, framebuffer, pAllocator);
+    }
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL CreateRenderPass(
+    VkDevice                                    device,
+    const VkRenderPassCreateInfo*               pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkRenderPass*                               pRenderPass) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCreateRenderPass(device, pCreateInfo, pAllocator, pRenderPass);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCreateRenderPass(device, pCreateInfo, pAllocator, pRenderPass);
+    }
+    VkResult result = DispatchCreateRenderPass(device, pCreateInfo, pAllocator, pRenderPass);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCreateRenderPass(device, pCreateInfo, pAllocator, pRenderPass, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR void VKAPI_CALL DestroyRenderPass(
+    VkDevice                                    device,
+    VkRenderPass                                renderPass,
+    const VkAllocationCallbacks*                pAllocator) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateDestroyRenderPass(device, renderPass, pAllocator);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordDestroyRenderPass(device, renderPass, pAllocator);
+    }
+    DispatchDestroyRenderPass(device, renderPass, pAllocator);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordDestroyRenderPass(device, renderPass, pAllocator);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL GetRenderAreaGranularity(
+    VkDevice                                    device,
+    VkRenderPass                                renderPass,
+    VkExtent2D*                                 pGranularity) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetRenderAreaGranularity(device, renderPass, pGranularity);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetRenderAreaGranularity(device, renderPass, pGranularity);
+    }
+    DispatchGetRenderAreaGranularity(device, renderPass, pGranularity);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetRenderAreaGranularity(device, renderPass, pGranularity);
+    }
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL CreateCommandPool(
+    VkDevice                                    device,
+    const VkCommandPoolCreateInfo*              pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkCommandPool*                              pCommandPool) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCreateCommandPool(device, pCreateInfo, pAllocator, pCommandPool);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCreateCommandPool(device, pCreateInfo, pAllocator, pCommandPool);
+    }
+    VkResult result = DispatchCreateCommandPool(device, pCreateInfo, pAllocator, pCommandPool);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCreateCommandPool(device, pCreateInfo, pAllocator, pCommandPool, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR void VKAPI_CALL DestroyCommandPool(
+    VkDevice                                    device,
+    VkCommandPool                               commandPool,
+    const VkAllocationCallbacks*                pAllocator) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateDestroyCommandPool(device, commandPool, pAllocator);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordDestroyCommandPool(device, commandPool, pAllocator);
+    }
+    DispatchDestroyCommandPool(device, commandPool, pAllocator);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordDestroyCommandPool(device, commandPool, pAllocator);
+    }
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL ResetCommandPool(
+    VkDevice                                    device,
+    VkCommandPool                               commandPool,
+    VkCommandPoolResetFlags                     flags) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateResetCommandPool(device, commandPool, flags);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordResetCommandPool(device, commandPool, flags);
+    }
+    VkResult result = DispatchResetCommandPool(device, commandPool, flags);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordResetCommandPool(device, commandPool, flags, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL AllocateCommandBuffers(
+    VkDevice                                    device,
+    const VkCommandBufferAllocateInfo*          pAllocateInfo,
+    VkCommandBuffer*                            pCommandBuffers) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateAllocateCommandBuffers(device, pAllocateInfo, pCommandBuffers);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordAllocateCommandBuffers(device, pAllocateInfo, pCommandBuffers);
+    }
+    VkResult result = DispatchAllocateCommandBuffers(device, pAllocateInfo, pCommandBuffers);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordAllocateCommandBuffers(device, pAllocateInfo, pCommandBuffers, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR void VKAPI_CALL FreeCommandBuffers(
+    VkDevice                                    device,
+    VkCommandPool                               commandPool,
+    uint32_t                                    commandBufferCount,
+    const VkCommandBuffer*                      pCommandBuffers) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateFreeCommandBuffers(device, commandPool, commandBufferCount, pCommandBuffers);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordFreeCommandBuffers(device, commandPool, commandBufferCount, pCommandBuffers);
+    }
+    DispatchFreeCommandBuffers(device, commandPool, commandBufferCount, pCommandBuffers);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordFreeCommandBuffers(device, commandPool, commandBufferCount, pCommandBuffers);
+    }
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL BeginCommandBuffer(
+    VkCommandBuffer                             commandBuffer,
+    const VkCommandBufferBeginInfo*             pBeginInfo) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateBeginCommandBuffer(commandBuffer, pBeginInfo);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordBeginCommandBuffer(commandBuffer, pBeginInfo);
+    }
+    VkResult result = DispatchBeginCommandBuffer(commandBuffer, pBeginInfo);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordBeginCommandBuffer(commandBuffer, pBeginInfo, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL EndCommandBuffer(
+    VkCommandBuffer                             commandBuffer) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateEndCommandBuffer(commandBuffer);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordEndCommandBuffer(commandBuffer);
+    }
+    VkResult result = DispatchEndCommandBuffer(commandBuffer);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordEndCommandBuffer(commandBuffer, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL ResetCommandBuffer(
+    VkCommandBuffer                             commandBuffer,
+    VkCommandBufferResetFlags                   flags) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateResetCommandBuffer(commandBuffer, flags);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordResetCommandBuffer(commandBuffer, flags);
+    }
+    VkResult result = DispatchResetCommandBuffer(commandBuffer, flags);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordResetCommandBuffer(commandBuffer, flags, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR void VKAPI_CALL CmdBindPipeline(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineBindPoint                         pipelineBindPoint,
+    VkPipeline                                  pipeline) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCmdBindPipeline(commandBuffer, pipelineBindPoint, pipeline);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCmdBindPipeline(commandBuffer, pipelineBindPoint, pipeline);
+    }
+    DispatchCmdBindPipeline(commandBuffer, pipelineBindPoint, pipeline);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCmdBindPipeline(commandBuffer, pipelineBindPoint, pipeline);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL CmdSetViewport(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstViewport,
+    uint32_t                                    viewportCount,
+    const VkViewport*                           pViewports) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCmdSetViewport(commandBuffer, firstViewport, viewportCount, pViewports);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCmdSetViewport(commandBuffer, firstViewport, viewportCount, pViewports);
+    }
+    DispatchCmdSetViewport(commandBuffer, firstViewport, viewportCount, pViewports);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCmdSetViewport(commandBuffer, firstViewport, viewportCount, pViewports);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL CmdSetScissor(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstScissor,
+    uint32_t                                    scissorCount,
+    const VkRect2D*                             pScissors) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCmdSetScissor(commandBuffer, firstScissor, scissorCount, pScissors);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCmdSetScissor(commandBuffer, firstScissor, scissorCount, pScissors);
+    }
+    DispatchCmdSetScissor(commandBuffer, firstScissor, scissorCount, pScissors);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCmdSetScissor(commandBuffer, firstScissor, scissorCount, pScissors);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL CmdSetLineWidth(
+    VkCommandBuffer                             commandBuffer,
+    float                                       lineWidth) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCmdSetLineWidth(commandBuffer, lineWidth);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCmdSetLineWidth(commandBuffer, lineWidth);
+    }
+    DispatchCmdSetLineWidth(commandBuffer, lineWidth);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCmdSetLineWidth(commandBuffer, lineWidth);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL CmdSetDepthBias(
+    VkCommandBuffer                             commandBuffer,
+    float                                       depthBiasConstantFactor,
+    float                                       depthBiasClamp,
+    float                                       depthBiasSlopeFactor) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCmdSetDepthBias(commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCmdSetDepthBias(commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor);
+    }
+    DispatchCmdSetDepthBias(commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCmdSetDepthBias(commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL CmdSetBlendConstants(
+    VkCommandBuffer                             commandBuffer,
+    const float                                 blendConstants[4]) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCmdSetBlendConstants(commandBuffer, blendConstants);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCmdSetBlendConstants(commandBuffer, blendConstants);
+    }
+    DispatchCmdSetBlendConstants(commandBuffer, blendConstants);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCmdSetBlendConstants(commandBuffer, blendConstants);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL CmdSetDepthBounds(
+    VkCommandBuffer                             commandBuffer,
+    float                                       minDepthBounds,
+    float                                       maxDepthBounds) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCmdSetDepthBounds(commandBuffer, minDepthBounds, maxDepthBounds);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCmdSetDepthBounds(commandBuffer, minDepthBounds, maxDepthBounds);
+    }
+    DispatchCmdSetDepthBounds(commandBuffer, minDepthBounds, maxDepthBounds);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCmdSetDepthBounds(commandBuffer, minDepthBounds, maxDepthBounds);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL CmdSetStencilCompareMask(
+    VkCommandBuffer                             commandBuffer,
+    VkStencilFaceFlags                          faceMask,
+    uint32_t                                    compareMask) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCmdSetStencilCompareMask(commandBuffer, faceMask, compareMask);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCmdSetStencilCompareMask(commandBuffer, faceMask, compareMask);
+    }
+    DispatchCmdSetStencilCompareMask(commandBuffer, faceMask, compareMask);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCmdSetStencilCompareMask(commandBuffer, faceMask, compareMask);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL CmdSetStencilWriteMask(
+    VkCommandBuffer                             commandBuffer,
+    VkStencilFaceFlags                          faceMask,
+    uint32_t                                    writeMask) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCmdSetStencilWriteMask(commandBuffer, faceMask, writeMask);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCmdSetStencilWriteMask(commandBuffer, faceMask, writeMask);
+    }
+    DispatchCmdSetStencilWriteMask(commandBuffer, faceMask, writeMask);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCmdSetStencilWriteMask(commandBuffer, faceMask, writeMask);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL CmdSetStencilReference(
+    VkCommandBuffer                             commandBuffer,
+    VkStencilFaceFlags                          faceMask,
+    uint32_t                                    reference) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCmdSetStencilReference(commandBuffer, faceMask, reference);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCmdSetStencilReference(commandBuffer, faceMask, reference);
+    }
+    DispatchCmdSetStencilReference(commandBuffer, faceMask, reference);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCmdSetStencilReference(commandBuffer, faceMask, reference);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL CmdBindDescriptorSets(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineBindPoint                         pipelineBindPoint,
+    VkPipelineLayout                            layout,
+    uint32_t                                    firstSet,
+    uint32_t                                    descriptorSetCount,
+    const VkDescriptorSet*                      pDescriptorSets,
+    uint32_t                                    dynamicOffsetCount,
+    const uint32_t*                             pDynamicOffsets) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCmdBindDescriptorSets(commandBuffer, pipelineBindPoint, layout, firstSet, descriptorSetCount, pDescriptorSets, dynamicOffsetCount, pDynamicOffsets);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCmdBindDescriptorSets(commandBuffer, pipelineBindPoint, layout, firstSet, descriptorSetCount, pDescriptorSets, dynamicOffsetCount, pDynamicOffsets);
+    }
+    DispatchCmdBindDescriptorSets(commandBuffer, pipelineBindPoint, layout, firstSet, descriptorSetCount, pDescriptorSets, dynamicOffsetCount, pDynamicOffsets);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCmdBindDescriptorSets(commandBuffer, pipelineBindPoint, layout, firstSet, descriptorSetCount, pDescriptorSets, dynamicOffsetCount, pDynamicOffsets);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL CmdBindIndexBuffer(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkIndexType                                 indexType) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCmdBindIndexBuffer(commandBuffer, buffer, offset, indexType);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCmdBindIndexBuffer(commandBuffer, buffer, offset, indexType);
+    }
+    DispatchCmdBindIndexBuffer(commandBuffer, buffer, offset, indexType);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCmdBindIndexBuffer(commandBuffer, buffer, offset, indexType);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL CmdBindVertexBuffers(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstBinding,
+    uint32_t                                    bindingCount,
+    const VkBuffer*                             pBuffers,
+    const VkDeviceSize*                         pOffsets) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCmdBindVertexBuffers(commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCmdBindVertexBuffers(commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets);
+    }
+    DispatchCmdBindVertexBuffers(commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCmdBindVertexBuffers(commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL CmdDraw(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    vertexCount,
+    uint32_t                                    instanceCount,
+    uint32_t                                    firstVertex,
+    uint32_t                                    firstInstance) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCmdDraw(commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCmdDraw(commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance);
+    }
+    DispatchCmdDraw(commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCmdDraw(commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL CmdDrawIndexed(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    indexCount,
+    uint32_t                                    instanceCount,
+    uint32_t                                    firstIndex,
+    int32_t                                     vertexOffset,
+    uint32_t                                    firstInstance) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCmdDrawIndexed(commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCmdDrawIndexed(commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance);
+    }
+    DispatchCmdDrawIndexed(commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCmdDrawIndexed(commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL CmdDrawIndirect(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    uint32_t                                    drawCount,
+    uint32_t                                    stride) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCmdDrawIndirect(commandBuffer, buffer, offset, drawCount, stride);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCmdDrawIndirect(commandBuffer, buffer, offset, drawCount, stride);
+    }
+    DispatchCmdDrawIndirect(commandBuffer, buffer, offset, drawCount, stride);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCmdDrawIndirect(commandBuffer, buffer, offset, drawCount, stride);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL CmdDrawIndexedIndirect(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    uint32_t                                    drawCount,
+    uint32_t                                    stride) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCmdDrawIndexedIndirect(commandBuffer, buffer, offset, drawCount, stride);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCmdDrawIndexedIndirect(commandBuffer, buffer, offset, drawCount, stride);
+    }
+    DispatchCmdDrawIndexedIndirect(commandBuffer, buffer, offset, drawCount, stride);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCmdDrawIndexedIndirect(commandBuffer, buffer, offset, drawCount, stride);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL CmdDispatch(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    groupCountX,
+    uint32_t                                    groupCountY,
+    uint32_t                                    groupCountZ) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCmdDispatch(commandBuffer, groupCountX, groupCountY, groupCountZ);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCmdDispatch(commandBuffer, groupCountX, groupCountY, groupCountZ);
+    }
+    DispatchCmdDispatch(commandBuffer, groupCountX, groupCountY, groupCountZ);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCmdDispatch(commandBuffer, groupCountX, groupCountY, groupCountZ);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL CmdDispatchIndirect(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCmdDispatchIndirect(commandBuffer, buffer, offset);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCmdDispatchIndirect(commandBuffer, buffer, offset);
+    }
+    DispatchCmdDispatchIndirect(commandBuffer, buffer, offset);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCmdDispatchIndirect(commandBuffer, buffer, offset);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL CmdCopyBuffer(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    srcBuffer,
+    VkBuffer                                    dstBuffer,
+    uint32_t                                    regionCount,
+    const VkBufferCopy*                         pRegions) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCmdCopyBuffer(commandBuffer, srcBuffer, dstBuffer, regionCount, pRegions);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCmdCopyBuffer(commandBuffer, srcBuffer, dstBuffer, regionCount, pRegions);
+    }
+    DispatchCmdCopyBuffer(commandBuffer, srcBuffer, dstBuffer, regionCount, pRegions);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCmdCopyBuffer(commandBuffer, srcBuffer, dstBuffer, regionCount, pRegions);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL CmdCopyImage(
+    VkCommandBuffer                             commandBuffer,
+    VkImage                                     srcImage,
+    VkImageLayout                               srcImageLayout,
+    VkImage                                     dstImage,
+    VkImageLayout                               dstImageLayout,
+    uint32_t                                    regionCount,
+    const VkImageCopy*                          pRegions) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCmdCopyImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCmdCopyImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions);
+    }
+    DispatchCmdCopyImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCmdCopyImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL CmdBlitImage(
+    VkCommandBuffer                             commandBuffer,
+    VkImage                                     srcImage,
+    VkImageLayout                               srcImageLayout,
+    VkImage                                     dstImage,
+    VkImageLayout                               dstImageLayout,
+    uint32_t                                    regionCount,
+    const VkImageBlit*                          pRegions,
+    VkFilter                                    filter) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCmdBlitImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions, filter);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCmdBlitImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions, filter);
+    }
+    DispatchCmdBlitImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions, filter);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCmdBlitImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions, filter);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL CmdCopyBufferToImage(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    srcBuffer,
+    VkImage                                     dstImage,
+    VkImageLayout                               dstImageLayout,
+    uint32_t                                    regionCount,
+    const VkBufferImageCopy*                    pRegions) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCmdCopyBufferToImage(commandBuffer, srcBuffer, dstImage, dstImageLayout, regionCount, pRegions);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCmdCopyBufferToImage(commandBuffer, srcBuffer, dstImage, dstImageLayout, regionCount, pRegions);
+    }
+    DispatchCmdCopyBufferToImage(commandBuffer, srcBuffer, dstImage, dstImageLayout, regionCount, pRegions);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCmdCopyBufferToImage(commandBuffer, srcBuffer, dstImage, dstImageLayout, regionCount, pRegions);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL CmdCopyImageToBuffer(
+    VkCommandBuffer                             commandBuffer,
+    VkImage                                     srcImage,
+    VkImageLayout                               srcImageLayout,
+    VkBuffer                                    dstBuffer,
+    uint32_t                                    regionCount,
+    const VkBufferImageCopy*                    pRegions) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCmdCopyImageToBuffer(commandBuffer, srcImage, srcImageLayout, dstBuffer, regionCount, pRegions);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCmdCopyImageToBuffer(commandBuffer, srcImage, srcImageLayout, dstBuffer, regionCount, pRegions);
+    }
+    DispatchCmdCopyImageToBuffer(commandBuffer, srcImage, srcImageLayout, dstBuffer, regionCount, pRegions);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCmdCopyImageToBuffer(commandBuffer, srcImage, srcImageLayout, dstBuffer, regionCount, pRegions);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL CmdUpdateBuffer(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    dstBuffer,
+    VkDeviceSize                                dstOffset,
+    VkDeviceSize                                dataSize,
+    const void*                                 pData) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCmdUpdateBuffer(commandBuffer, dstBuffer, dstOffset, dataSize, pData);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCmdUpdateBuffer(commandBuffer, dstBuffer, dstOffset, dataSize, pData);
+    }
+    DispatchCmdUpdateBuffer(commandBuffer, dstBuffer, dstOffset, dataSize, pData);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCmdUpdateBuffer(commandBuffer, dstBuffer, dstOffset, dataSize, pData);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL CmdFillBuffer(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    dstBuffer,
+    VkDeviceSize                                dstOffset,
+    VkDeviceSize                                size,
+    uint32_t                                    data) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCmdFillBuffer(commandBuffer, dstBuffer, dstOffset, size, data);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCmdFillBuffer(commandBuffer, dstBuffer, dstOffset, size, data);
+    }
+    DispatchCmdFillBuffer(commandBuffer, dstBuffer, dstOffset, size, data);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCmdFillBuffer(commandBuffer, dstBuffer, dstOffset, size, data);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL CmdClearColorImage(
+    VkCommandBuffer                             commandBuffer,
+    VkImage                                     image,
+    VkImageLayout                               imageLayout,
+    const VkClearColorValue*                    pColor,
+    uint32_t                                    rangeCount,
+    const VkImageSubresourceRange*              pRanges) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCmdClearColorImage(commandBuffer, image, imageLayout, pColor, rangeCount, pRanges);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCmdClearColorImage(commandBuffer, image, imageLayout, pColor, rangeCount, pRanges);
+    }
+    DispatchCmdClearColorImage(commandBuffer, image, imageLayout, pColor, rangeCount, pRanges);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCmdClearColorImage(commandBuffer, image, imageLayout, pColor, rangeCount, pRanges);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL CmdClearDepthStencilImage(
+    VkCommandBuffer                             commandBuffer,
+    VkImage                                     image,
+    VkImageLayout                               imageLayout,
+    const VkClearDepthStencilValue*             pDepthStencil,
+    uint32_t                                    rangeCount,
+    const VkImageSubresourceRange*              pRanges) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCmdClearDepthStencilImage(commandBuffer, image, imageLayout, pDepthStencil, rangeCount, pRanges);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCmdClearDepthStencilImage(commandBuffer, image, imageLayout, pDepthStencil, rangeCount, pRanges);
+    }
+    DispatchCmdClearDepthStencilImage(commandBuffer, image, imageLayout, pDepthStencil, rangeCount, pRanges);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCmdClearDepthStencilImage(commandBuffer, image, imageLayout, pDepthStencil, rangeCount, pRanges);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL CmdClearAttachments(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    attachmentCount,
+    const VkClearAttachment*                    pAttachments,
+    uint32_t                                    rectCount,
+    const VkClearRect*                          pRects) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCmdClearAttachments(commandBuffer, attachmentCount, pAttachments, rectCount, pRects);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCmdClearAttachments(commandBuffer, attachmentCount, pAttachments, rectCount, pRects);
+    }
+    DispatchCmdClearAttachments(commandBuffer, attachmentCount, pAttachments, rectCount, pRects);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCmdClearAttachments(commandBuffer, attachmentCount, pAttachments, rectCount, pRects);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL CmdResolveImage(
+    VkCommandBuffer                             commandBuffer,
+    VkImage                                     srcImage,
+    VkImageLayout                               srcImageLayout,
+    VkImage                                     dstImage,
+    VkImageLayout                               dstImageLayout,
+    uint32_t                                    regionCount,
+    const VkImageResolve*                       pRegions) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCmdResolveImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCmdResolveImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions);
+    }
+    DispatchCmdResolveImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCmdResolveImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL CmdSetEvent(
+    VkCommandBuffer                             commandBuffer,
+    VkEvent                                     event,
+    VkPipelineStageFlags                        stageMask) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCmdSetEvent(commandBuffer, event, stageMask);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCmdSetEvent(commandBuffer, event, stageMask);
+    }
+    DispatchCmdSetEvent(commandBuffer, event, stageMask);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCmdSetEvent(commandBuffer, event, stageMask);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL CmdResetEvent(
+    VkCommandBuffer                             commandBuffer,
+    VkEvent                                     event,
+    VkPipelineStageFlags                        stageMask) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCmdResetEvent(commandBuffer, event, stageMask);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCmdResetEvent(commandBuffer, event, stageMask);
+    }
+    DispatchCmdResetEvent(commandBuffer, event, stageMask);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCmdResetEvent(commandBuffer, event, stageMask);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL CmdWaitEvents(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    eventCount,
+    const VkEvent*                              pEvents,
+    VkPipelineStageFlags                        srcStageMask,
+    VkPipelineStageFlags                        dstStageMask,
+    uint32_t                                    memoryBarrierCount,
+    const VkMemoryBarrier*                      pMemoryBarriers,
+    uint32_t                                    bufferMemoryBarrierCount,
+    const VkBufferMemoryBarrier*                pBufferMemoryBarriers,
+    uint32_t                                    imageMemoryBarrierCount,
+    const VkImageMemoryBarrier*                 pImageMemoryBarriers) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCmdWaitEvents(commandBuffer, eventCount, pEvents, srcStageMask, dstStageMask, memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCmdWaitEvents(commandBuffer, eventCount, pEvents, srcStageMask, dstStageMask, memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers);
+    }
+    DispatchCmdWaitEvents(commandBuffer, eventCount, pEvents, srcStageMask, dstStageMask, memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCmdWaitEvents(commandBuffer, eventCount, pEvents, srcStageMask, dstStageMask, memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL CmdPipelineBarrier(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineStageFlags                        srcStageMask,
+    VkPipelineStageFlags                        dstStageMask,
+    VkDependencyFlags                           dependencyFlags,
+    uint32_t                                    memoryBarrierCount,
+    const VkMemoryBarrier*                      pMemoryBarriers,
+    uint32_t                                    bufferMemoryBarrierCount,
+    const VkBufferMemoryBarrier*                pBufferMemoryBarriers,
+    uint32_t                                    imageMemoryBarrierCount,
+    const VkImageMemoryBarrier*                 pImageMemoryBarriers) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCmdPipelineBarrier(commandBuffer, srcStageMask, dstStageMask, dependencyFlags, memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCmdPipelineBarrier(commandBuffer, srcStageMask, dstStageMask, dependencyFlags, memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers);
+    }
+    DispatchCmdPipelineBarrier(commandBuffer, srcStageMask, dstStageMask, dependencyFlags, memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCmdPipelineBarrier(commandBuffer, srcStageMask, dstStageMask, dependencyFlags, memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL CmdBeginQuery(
+    VkCommandBuffer                             commandBuffer,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    query,
+    VkQueryControlFlags                         flags) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCmdBeginQuery(commandBuffer, queryPool, query, flags);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCmdBeginQuery(commandBuffer, queryPool, query, flags);
+    }
+    DispatchCmdBeginQuery(commandBuffer, queryPool, query, flags);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCmdBeginQuery(commandBuffer, queryPool, query, flags);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL CmdEndQuery(
+    VkCommandBuffer                             commandBuffer,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    query) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCmdEndQuery(commandBuffer, queryPool, query);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCmdEndQuery(commandBuffer, queryPool, query);
+    }
+    DispatchCmdEndQuery(commandBuffer, queryPool, query);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCmdEndQuery(commandBuffer, queryPool, query);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL CmdResetQueryPool(
+    VkCommandBuffer                             commandBuffer,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    firstQuery,
+    uint32_t                                    queryCount) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCmdResetQueryPool(commandBuffer, queryPool, firstQuery, queryCount);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCmdResetQueryPool(commandBuffer, queryPool, firstQuery, queryCount);
+    }
+    DispatchCmdResetQueryPool(commandBuffer, queryPool, firstQuery, queryCount);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCmdResetQueryPool(commandBuffer, queryPool, firstQuery, queryCount);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL CmdWriteTimestamp(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineStageFlagBits                     pipelineStage,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    query) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCmdWriteTimestamp(commandBuffer, pipelineStage, queryPool, query);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCmdWriteTimestamp(commandBuffer, pipelineStage, queryPool, query);
+    }
+    DispatchCmdWriteTimestamp(commandBuffer, pipelineStage, queryPool, query);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCmdWriteTimestamp(commandBuffer, pipelineStage, queryPool, query);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL CmdCopyQueryPoolResults(
+    VkCommandBuffer                             commandBuffer,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    firstQuery,
+    uint32_t                                    queryCount,
+    VkBuffer                                    dstBuffer,
+    VkDeviceSize                                dstOffset,
+    VkDeviceSize                                stride,
+    VkQueryResultFlags                          flags) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCmdCopyQueryPoolResults(commandBuffer, queryPool, firstQuery, queryCount, dstBuffer, dstOffset, stride, flags);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCmdCopyQueryPoolResults(commandBuffer, queryPool, firstQuery, queryCount, dstBuffer, dstOffset, stride, flags);
+    }
+    DispatchCmdCopyQueryPoolResults(commandBuffer, queryPool, firstQuery, queryCount, dstBuffer, dstOffset, stride, flags);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCmdCopyQueryPoolResults(commandBuffer, queryPool, firstQuery, queryCount, dstBuffer, dstOffset, stride, flags);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL CmdPushConstants(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineLayout                            layout,
+    VkShaderStageFlags                          stageFlags,
+    uint32_t                                    offset,
+    uint32_t                                    size,
+    const void*                                 pValues) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCmdPushConstants(commandBuffer, layout, stageFlags, offset, size, pValues);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCmdPushConstants(commandBuffer, layout, stageFlags, offset, size, pValues);
+    }
+    DispatchCmdPushConstants(commandBuffer, layout, stageFlags, offset, size, pValues);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCmdPushConstants(commandBuffer, layout, stageFlags, offset, size, pValues);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL CmdBeginRenderPass(
+    VkCommandBuffer                             commandBuffer,
+    const VkRenderPassBeginInfo*                pRenderPassBegin,
+    VkSubpassContents                           contents) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCmdBeginRenderPass(commandBuffer, pRenderPassBegin, contents);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCmdBeginRenderPass(commandBuffer, pRenderPassBegin, contents);
+    }
+    DispatchCmdBeginRenderPass(commandBuffer, pRenderPassBegin, contents);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCmdBeginRenderPass(commandBuffer, pRenderPassBegin, contents);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL CmdNextSubpass(
+    VkCommandBuffer                             commandBuffer,
+    VkSubpassContents                           contents) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCmdNextSubpass(commandBuffer, contents);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCmdNextSubpass(commandBuffer, contents);
+    }
+    DispatchCmdNextSubpass(commandBuffer, contents);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCmdNextSubpass(commandBuffer, contents);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL CmdEndRenderPass(
+    VkCommandBuffer                             commandBuffer) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCmdEndRenderPass(commandBuffer);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCmdEndRenderPass(commandBuffer);
+    }
+    DispatchCmdEndRenderPass(commandBuffer);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCmdEndRenderPass(commandBuffer);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL CmdExecuteCommands(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    commandBufferCount,
+    const VkCommandBuffer*                      pCommandBuffers) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCmdExecuteCommands(commandBuffer, commandBufferCount, pCommandBuffers);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCmdExecuteCommands(commandBuffer, commandBufferCount, pCommandBuffers);
+    }
+    DispatchCmdExecuteCommands(commandBuffer, commandBufferCount, pCommandBuffers);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCmdExecuteCommands(commandBuffer, commandBufferCount, pCommandBuffers);
+    }
+}
+
+
+VKAPI_ATTR VkResult VKAPI_CALL BindBufferMemory2(
+    VkDevice                                    device,
+    uint32_t                                    bindInfoCount,
+    const VkBindBufferMemoryInfo*               pBindInfos) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateBindBufferMemory2(device, bindInfoCount, pBindInfos);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordBindBufferMemory2(device, bindInfoCount, pBindInfos);
+    }
+    VkResult result = DispatchBindBufferMemory2(device, bindInfoCount, pBindInfos);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordBindBufferMemory2(device, bindInfoCount, pBindInfos, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL BindImageMemory2(
+    VkDevice                                    device,
+    uint32_t                                    bindInfoCount,
+    const VkBindImageMemoryInfo*                pBindInfos) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateBindImageMemory2(device, bindInfoCount, pBindInfos);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordBindImageMemory2(device, bindInfoCount, pBindInfos);
+    }
+    VkResult result = DispatchBindImageMemory2(device, bindInfoCount, pBindInfos);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordBindImageMemory2(device, bindInfoCount, pBindInfos, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR void VKAPI_CALL GetDeviceGroupPeerMemoryFeatures(
+    VkDevice                                    device,
+    uint32_t                                    heapIndex,
+    uint32_t                                    localDeviceIndex,
+    uint32_t                                    remoteDeviceIndex,
+    VkPeerMemoryFeatureFlags*                   pPeerMemoryFeatures) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetDeviceGroupPeerMemoryFeatures(device, heapIndex, localDeviceIndex, remoteDeviceIndex, pPeerMemoryFeatures);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetDeviceGroupPeerMemoryFeatures(device, heapIndex, localDeviceIndex, remoteDeviceIndex, pPeerMemoryFeatures);
+    }
+    DispatchGetDeviceGroupPeerMemoryFeatures(device, heapIndex, localDeviceIndex, remoteDeviceIndex, pPeerMemoryFeatures);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetDeviceGroupPeerMemoryFeatures(device, heapIndex, localDeviceIndex, remoteDeviceIndex, pPeerMemoryFeatures);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL CmdSetDeviceMask(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    deviceMask) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCmdSetDeviceMask(commandBuffer, deviceMask);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCmdSetDeviceMask(commandBuffer, deviceMask);
+    }
+    DispatchCmdSetDeviceMask(commandBuffer, deviceMask);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCmdSetDeviceMask(commandBuffer, deviceMask);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL CmdDispatchBase(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    baseGroupX,
+    uint32_t                                    baseGroupY,
+    uint32_t                                    baseGroupZ,
+    uint32_t                                    groupCountX,
+    uint32_t                                    groupCountY,
+    uint32_t                                    groupCountZ) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCmdDispatchBase(commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCmdDispatchBase(commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ);
+    }
+    DispatchCmdDispatchBase(commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCmdDispatchBase(commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ);
+    }
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL EnumeratePhysicalDeviceGroups(
+    VkInstance                                  instance,
+    uint32_t*                                   pPhysicalDeviceGroupCount,
+    VkPhysicalDeviceGroupProperties*            pPhysicalDeviceGroupProperties) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(instance), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateEnumeratePhysicalDeviceGroups(instance, pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordEnumeratePhysicalDeviceGroups(instance, pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties);
+    }
+    VkResult result = DispatchEnumeratePhysicalDeviceGroups(instance, pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordEnumeratePhysicalDeviceGroups(instance, pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR void VKAPI_CALL GetImageMemoryRequirements2(
+    VkDevice                                    device,
+    const VkImageMemoryRequirementsInfo2*       pInfo,
+    VkMemoryRequirements2*                      pMemoryRequirements) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetImageMemoryRequirements2(device, pInfo, pMemoryRequirements);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetImageMemoryRequirements2(device, pInfo, pMemoryRequirements);
+    }
+    DispatchGetImageMemoryRequirements2(device, pInfo, pMemoryRequirements);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetImageMemoryRequirements2(device, pInfo, pMemoryRequirements);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL GetBufferMemoryRequirements2(
+    VkDevice                                    device,
+    const VkBufferMemoryRequirementsInfo2*      pInfo,
+    VkMemoryRequirements2*                      pMemoryRequirements) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetBufferMemoryRequirements2(device, pInfo, pMemoryRequirements);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetBufferMemoryRequirements2(device, pInfo, pMemoryRequirements);
+    }
+    DispatchGetBufferMemoryRequirements2(device, pInfo, pMemoryRequirements);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetBufferMemoryRequirements2(device, pInfo, pMemoryRequirements);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL GetImageSparseMemoryRequirements2(
+    VkDevice                                    device,
+    const VkImageSparseMemoryRequirementsInfo2* pInfo,
+    uint32_t*                                   pSparseMemoryRequirementCount,
+    VkSparseImageMemoryRequirements2*           pSparseMemoryRequirements) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetImageSparseMemoryRequirements2(device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetImageSparseMemoryRequirements2(device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements);
+    }
+    DispatchGetImageSparseMemoryRequirements2(device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetImageSparseMemoryRequirements2(device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceFeatures2(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceFeatures2*                  pFeatures) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetPhysicalDeviceFeatures2(physicalDevice, pFeatures);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetPhysicalDeviceFeatures2(physicalDevice, pFeatures);
+    }
+    DispatchGetPhysicalDeviceFeatures2(physicalDevice, pFeatures);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetPhysicalDeviceFeatures2(physicalDevice, pFeatures);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceProperties2(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceProperties2*                pProperties) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetPhysicalDeviceProperties2(physicalDevice, pProperties);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetPhysicalDeviceProperties2(physicalDevice, pProperties);
+    }
+    DispatchGetPhysicalDeviceProperties2(physicalDevice, pProperties);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetPhysicalDeviceProperties2(physicalDevice, pProperties);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceFormatProperties2(
+    VkPhysicalDevice                            physicalDevice,
+    VkFormat                                    format,
+    VkFormatProperties2*                        pFormatProperties) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetPhysicalDeviceFormatProperties2(physicalDevice, format, pFormatProperties);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetPhysicalDeviceFormatProperties2(physicalDevice, format, pFormatProperties);
+    }
+    DispatchGetPhysicalDeviceFormatProperties2(physicalDevice, format, pFormatProperties);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetPhysicalDeviceFormatProperties2(physicalDevice, format, pFormatProperties);
+    }
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceImageFormatProperties2(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceImageFormatInfo2*     pImageFormatInfo,
+    VkImageFormatProperties2*                   pImageFormatProperties) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetPhysicalDeviceImageFormatProperties2(physicalDevice, pImageFormatInfo, pImageFormatProperties);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetPhysicalDeviceImageFormatProperties2(physicalDevice, pImageFormatInfo, pImageFormatProperties);
+    }
+    VkResult result = DispatchGetPhysicalDeviceImageFormatProperties2(physicalDevice, pImageFormatInfo, pImageFormatProperties);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetPhysicalDeviceImageFormatProperties2(physicalDevice, pImageFormatInfo, pImageFormatProperties, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceQueueFamilyProperties2(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pQueueFamilyPropertyCount,
+    VkQueueFamilyProperties2*                   pQueueFamilyProperties) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetPhysicalDeviceQueueFamilyProperties2(physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetPhysicalDeviceQueueFamilyProperties2(physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties);
+    }
+    DispatchGetPhysicalDeviceQueueFamilyProperties2(physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetPhysicalDeviceQueueFamilyProperties2(physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceMemoryProperties2(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceMemoryProperties2*          pMemoryProperties) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetPhysicalDeviceMemoryProperties2(physicalDevice, pMemoryProperties);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetPhysicalDeviceMemoryProperties2(physicalDevice, pMemoryProperties);
+    }
+    DispatchGetPhysicalDeviceMemoryProperties2(physicalDevice, pMemoryProperties);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetPhysicalDeviceMemoryProperties2(physicalDevice, pMemoryProperties);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceSparseImageFormatProperties2(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo,
+    uint32_t*                                   pPropertyCount,
+    VkSparseImageFormatProperties2*             pProperties) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetPhysicalDeviceSparseImageFormatProperties2(physicalDevice, pFormatInfo, pPropertyCount, pProperties);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetPhysicalDeviceSparseImageFormatProperties2(physicalDevice, pFormatInfo, pPropertyCount, pProperties);
+    }
+    DispatchGetPhysicalDeviceSparseImageFormatProperties2(physicalDevice, pFormatInfo, pPropertyCount, pProperties);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetPhysicalDeviceSparseImageFormatProperties2(physicalDevice, pFormatInfo, pPropertyCount, pProperties);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL TrimCommandPool(
+    VkDevice                                    device,
+    VkCommandPool                               commandPool,
+    VkCommandPoolTrimFlags                      flags) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateTrimCommandPool(device, commandPool, flags);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordTrimCommandPool(device, commandPool, flags);
+    }
+    DispatchTrimCommandPool(device, commandPool, flags);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordTrimCommandPool(device, commandPool, flags);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL GetDeviceQueue2(
+    VkDevice                                    device,
+    const VkDeviceQueueInfo2*                   pQueueInfo,
+    VkQueue*                                    pQueue) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetDeviceQueue2(device, pQueueInfo, pQueue);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetDeviceQueue2(device, pQueueInfo, pQueue);
+    }
+    DispatchGetDeviceQueue2(device, pQueueInfo, pQueue);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetDeviceQueue2(device, pQueueInfo, pQueue);
+    }
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL CreateSamplerYcbcrConversion(
+    VkDevice                                    device,
+    const VkSamplerYcbcrConversionCreateInfo*   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSamplerYcbcrConversion*                   pYcbcrConversion) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCreateSamplerYcbcrConversion(device, pCreateInfo, pAllocator, pYcbcrConversion);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCreateSamplerYcbcrConversion(device, pCreateInfo, pAllocator, pYcbcrConversion);
+    }
+    VkResult result = DispatchCreateSamplerYcbcrConversion(device, pCreateInfo, pAllocator, pYcbcrConversion);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCreateSamplerYcbcrConversion(device, pCreateInfo, pAllocator, pYcbcrConversion, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR void VKAPI_CALL DestroySamplerYcbcrConversion(
+    VkDevice                                    device,
+    VkSamplerYcbcrConversion                    ycbcrConversion,
+    const VkAllocationCallbacks*                pAllocator) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateDestroySamplerYcbcrConversion(device, ycbcrConversion, pAllocator);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordDestroySamplerYcbcrConversion(device, ycbcrConversion, pAllocator);
+    }
+    DispatchDestroySamplerYcbcrConversion(device, ycbcrConversion, pAllocator);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordDestroySamplerYcbcrConversion(device, ycbcrConversion, pAllocator);
+    }
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL CreateDescriptorUpdateTemplate(
+    VkDevice                                    device,
+    const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDescriptorUpdateTemplate*                 pDescriptorUpdateTemplate) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCreateDescriptorUpdateTemplate(device, pCreateInfo, pAllocator, pDescriptorUpdateTemplate);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCreateDescriptorUpdateTemplate(device, pCreateInfo, pAllocator, pDescriptorUpdateTemplate);
+    }
+    VkResult result = DispatchCreateDescriptorUpdateTemplate(device, pCreateInfo, pAllocator, pDescriptorUpdateTemplate);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCreateDescriptorUpdateTemplate(device, pCreateInfo, pAllocator, pDescriptorUpdateTemplate, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR void VKAPI_CALL DestroyDescriptorUpdateTemplate(
+    VkDevice                                    device,
+    VkDescriptorUpdateTemplate                  descriptorUpdateTemplate,
+    const VkAllocationCallbacks*                pAllocator) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateDestroyDescriptorUpdateTemplate(device, descriptorUpdateTemplate, pAllocator);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordDestroyDescriptorUpdateTemplate(device, descriptorUpdateTemplate, pAllocator);
+    }
+    DispatchDestroyDescriptorUpdateTemplate(device, descriptorUpdateTemplate, pAllocator);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordDestroyDescriptorUpdateTemplate(device, descriptorUpdateTemplate, pAllocator);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL UpdateDescriptorSetWithTemplate(
+    VkDevice                                    device,
+    VkDescriptorSet                             descriptorSet,
+    VkDescriptorUpdateTemplate                  descriptorUpdateTemplate,
+    const void*                                 pData) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateUpdateDescriptorSetWithTemplate(device, descriptorSet, descriptorUpdateTemplate, pData);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordUpdateDescriptorSetWithTemplate(device, descriptorSet, descriptorUpdateTemplate, pData);
+    }
+    DispatchUpdateDescriptorSetWithTemplate(device, descriptorSet, descriptorUpdateTemplate, pData);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordUpdateDescriptorSetWithTemplate(device, descriptorSet, descriptorUpdateTemplate, pData);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceExternalBufferProperties(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceExternalBufferInfo*   pExternalBufferInfo,
+    VkExternalBufferProperties*                 pExternalBufferProperties) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetPhysicalDeviceExternalBufferProperties(physicalDevice, pExternalBufferInfo, pExternalBufferProperties);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetPhysicalDeviceExternalBufferProperties(physicalDevice, pExternalBufferInfo, pExternalBufferProperties);
+    }
+    DispatchGetPhysicalDeviceExternalBufferProperties(physicalDevice, pExternalBufferInfo, pExternalBufferProperties);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetPhysicalDeviceExternalBufferProperties(physicalDevice, pExternalBufferInfo, pExternalBufferProperties);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceExternalFenceProperties(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceExternalFenceInfo*    pExternalFenceInfo,
+    VkExternalFenceProperties*                  pExternalFenceProperties) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetPhysicalDeviceExternalFenceProperties(physicalDevice, pExternalFenceInfo, pExternalFenceProperties);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetPhysicalDeviceExternalFenceProperties(physicalDevice, pExternalFenceInfo, pExternalFenceProperties);
+    }
+    DispatchGetPhysicalDeviceExternalFenceProperties(physicalDevice, pExternalFenceInfo, pExternalFenceProperties);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetPhysicalDeviceExternalFenceProperties(physicalDevice, pExternalFenceInfo, pExternalFenceProperties);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceExternalSemaphoreProperties(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo,
+    VkExternalSemaphoreProperties*              pExternalSemaphoreProperties) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetPhysicalDeviceExternalSemaphoreProperties(physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetPhysicalDeviceExternalSemaphoreProperties(physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties);
+    }
+    DispatchGetPhysicalDeviceExternalSemaphoreProperties(physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetPhysicalDeviceExternalSemaphoreProperties(physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL GetDescriptorSetLayoutSupport(
+    VkDevice                                    device,
+    const VkDescriptorSetLayoutCreateInfo*      pCreateInfo,
+    VkDescriptorSetLayoutSupport*               pSupport) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetDescriptorSetLayoutSupport(device, pCreateInfo, pSupport);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetDescriptorSetLayoutSupport(device, pCreateInfo, pSupport);
+    }
+    DispatchGetDescriptorSetLayoutSupport(device, pCreateInfo, pSupport);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetDescriptorSetLayoutSupport(device, pCreateInfo, pSupport);
+    }
+}
+
+
+VKAPI_ATTR void VKAPI_CALL DestroySurfaceKHR(
+    VkInstance                                  instance,
+    VkSurfaceKHR                                surface,
+    const VkAllocationCallbacks*                pAllocator) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(instance), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateDestroySurfaceKHR(instance, surface, pAllocator);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordDestroySurfaceKHR(instance, surface, pAllocator);
+    }
+    DispatchDestroySurfaceKHR(instance, surface, pAllocator);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordDestroySurfaceKHR(instance, surface, pAllocator);
+    }
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceSurfaceSupportKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t                                    queueFamilyIndex,
+    VkSurfaceKHR                                surface,
+    VkBool32*                                   pSupported) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetPhysicalDeviceSurfaceSupportKHR(physicalDevice, queueFamilyIndex, surface, pSupported);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetPhysicalDeviceSurfaceSupportKHR(physicalDevice, queueFamilyIndex, surface, pSupported);
+    }
+    VkResult result = DispatchGetPhysicalDeviceSurfaceSupportKHR(physicalDevice, queueFamilyIndex, surface, pSupported);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetPhysicalDeviceSurfaceSupportKHR(physicalDevice, queueFamilyIndex, surface, pSupported, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceSurfaceCapabilitiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkSurfaceKHR                                surface,
+    VkSurfaceCapabilitiesKHR*                   pSurfaceCapabilities) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetPhysicalDeviceSurfaceCapabilitiesKHR(physicalDevice, surface, pSurfaceCapabilities);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetPhysicalDeviceSurfaceCapabilitiesKHR(physicalDevice, surface, pSurfaceCapabilities);
+    }
+    VkResult result = DispatchGetPhysicalDeviceSurfaceCapabilitiesKHR(physicalDevice, surface, pSurfaceCapabilities);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetPhysicalDeviceSurfaceCapabilitiesKHR(physicalDevice, surface, pSurfaceCapabilities, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceSurfaceFormatsKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkSurfaceKHR                                surface,
+    uint32_t*                                   pSurfaceFormatCount,
+    VkSurfaceFormatKHR*                         pSurfaceFormats) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetPhysicalDeviceSurfaceFormatsKHR(physicalDevice, surface, pSurfaceFormatCount, pSurfaceFormats);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetPhysicalDeviceSurfaceFormatsKHR(physicalDevice, surface, pSurfaceFormatCount, pSurfaceFormats);
+    }
+    VkResult result = DispatchGetPhysicalDeviceSurfaceFormatsKHR(physicalDevice, surface, pSurfaceFormatCount, pSurfaceFormats);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetPhysicalDeviceSurfaceFormatsKHR(physicalDevice, surface, pSurfaceFormatCount, pSurfaceFormats, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceSurfacePresentModesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkSurfaceKHR                                surface,
+    uint32_t*                                   pPresentModeCount,
+    VkPresentModeKHR*                           pPresentModes) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetPhysicalDeviceSurfacePresentModesKHR(physicalDevice, surface, pPresentModeCount, pPresentModes);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetPhysicalDeviceSurfacePresentModesKHR(physicalDevice, surface, pPresentModeCount, pPresentModes);
+    }
+    VkResult result = DispatchGetPhysicalDeviceSurfacePresentModesKHR(physicalDevice, surface, pPresentModeCount, pPresentModes);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetPhysicalDeviceSurfacePresentModesKHR(physicalDevice, surface, pPresentModeCount, pPresentModes, result);
+    }
+    return result;
+}
+
+
+VKAPI_ATTR VkResult VKAPI_CALL CreateSwapchainKHR(
+    VkDevice                                    device,
+    const VkSwapchainCreateInfoKHR*             pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSwapchainKHR*                             pSwapchain) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCreateSwapchainKHR(device, pCreateInfo, pAllocator, pSwapchain);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCreateSwapchainKHR(device, pCreateInfo, pAllocator, pSwapchain);
+    }
+    VkResult result = DispatchCreateSwapchainKHR(device, pCreateInfo, pAllocator, pSwapchain);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCreateSwapchainKHR(device, pCreateInfo, pAllocator, pSwapchain, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR void VKAPI_CALL DestroySwapchainKHR(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    const VkAllocationCallbacks*                pAllocator) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateDestroySwapchainKHR(device, swapchain, pAllocator);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordDestroySwapchainKHR(device, swapchain, pAllocator);
+    }
+    DispatchDestroySwapchainKHR(device, swapchain, pAllocator);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordDestroySwapchainKHR(device, swapchain, pAllocator);
+    }
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL GetSwapchainImagesKHR(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    uint32_t*                                   pSwapchainImageCount,
+    VkImage*                                    pSwapchainImages) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetSwapchainImagesKHR(device, swapchain, pSwapchainImageCount, pSwapchainImages);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetSwapchainImagesKHR(device, swapchain, pSwapchainImageCount, pSwapchainImages);
+    }
+    VkResult result = DispatchGetSwapchainImagesKHR(device, swapchain, pSwapchainImageCount, pSwapchainImages);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetSwapchainImagesKHR(device, swapchain, pSwapchainImageCount, pSwapchainImages, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL AcquireNextImageKHR(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    uint64_t                                    timeout,
+    VkSemaphore                                 semaphore,
+    VkFence                                     fence,
+    uint32_t*                                   pImageIndex) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateAcquireNextImageKHR(device, swapchain, timeout, semaphore, fence, pImageIndex);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordAcquireNextImageKHR(device, swapchain, timeout, semaphore, fence, pImageIndex);
+    }
+    VkResult result = DispatchAcquireNextImageKHR(device, swapchain, timeout, semaphore, fence, pImageIndex);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordAcquireNextImageKHR(device, swapchain, timeout, semaphore, fence, pImageIndex, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL QueuePresentKHR(
+    VkQueue                                     queue,
+    const VkPresentInfoKHR*                     pPresentInfo) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(queue), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateQueuePresentKHR(queue, pPresentInfo);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordQueuePresentKHR(queue, pPresentInfo);
+    }
+    VkResult result = DispatchQueuePresentKHR(queue, pPresentInfo);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordQueuePresentKHR(queue, pPresentInfo, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL GetDeviceGroupPresentCapabilitiesKHR(
+    VkDevice                                    device,
+    VkDeviceGroupPresentCapabilitiesKHR*        pDeviceGroupPresentCapabilities) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetDeviceGroupPresentCapabilitiesKHR(device, pDeviceGroupPresentCapabilities);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetDeviceGroupPresentCapabilitiesKHR(device, pDeviceGroupPresentCapabilities);
+    }
+    VkResult result = DispatchGetDeviceGroupPresentCapabilitiesKHR(device, pDeviceGroupPresentCapabilities);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetDeviceGroupPresentCapabilitiesKHR(device, pDeviceGroupPresentCapabilities, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL GetDeviceGroupSurfacePresentModesKHR(
+    VkDevice                                    device,
+    VkSurfaceKHR                                surface,
+    VkDeviceGroupPresentModeFlagsKHR*           pModes) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetDeviceGroupSurfacePresentModesKHR(device, surface, pModes);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetDeviceGroupSurfacePresentModesKHR(device, surface, pModes);
+    }
+    VkResult result = DispatchGetDeviceGroupSurfacePresentModesKHR(device, surface, pModes);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetDeviceGroupSurfacePresentModesKHR(device, surface, pModes, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDevicePresentRectanglesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkSurfaceKHR                                surface,
+    uint32_t*                                   pRectCount,
+    VkRect2D*                                   pRects) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetPhysicalDevicePresentRectanglesKHR(physicalDevice, surface, pRectCount, pRects);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetPhysicalDevicePresentRectanglesKHR(physicalDevice, surface, pRectCount, pRects);
+    }
+    VkResult result = DispatchGetPhysicalDevicePresentRectanglesKHR(physicalDevice, surface, pRectCount, pRects);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetPhysicalDevicePresentRectanglesKHR(physicalDevice, surface, pRectCount, pRects, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL AcquireNextImage2KHR(
+    VkDevice                                    device,
+    const VkAcquireNextImageInfoKHR*            pAcquireInfo,
+    uint32_t*                                   pImageIndex) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateAcquireNextImage2KHR(device, pAcquireInfo, pImageIndex);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordAcquireNextImage2KHR(device, pAcquireInfo, pImageIndex);
+    }
+    VkResult result = DispatchAcquireNextImage2KHR(device, pAcquireInfo, pImageIndex);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordAcquireNextImage2KHR(device, pAcquireInfo, pImageIndex, result);
+    }
+    return result;
+}
+
+
+VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceDisplayPropertiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pPropertyCount,
+    VkDisplayPropertiesKHR*                     pProperties) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetPhysicalDeviceDisplayPropertiesKHR(physicalDevice, pPropertyCount, pProperties);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetPhysicalDeviceDisplayPropertiesKHR(physicalDevice, pPropertyCount, pProperties);
+    }
+    VkResult result = DispatchGetPhysicalDeviceDisplayPropertiesKHR(physicalDevice, pPropertyCount, pProperties);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetPhysicalDeviceDisplayPropertiesKHR(physicalDevice, pPropertyCount, pProperties, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceDisplayPlanePropertiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pPropertyCount,
+    VkDisplayPlanePropertiesKHR*                pProperties) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetPhysicalDeviceDisplayPlanePropertiesKHR(physicalDevice, pPropertyCount, pProperties);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetPhysicalDeviceDisplayPlanePropertiesKHR(physicalDevice, pPropertyCount, pProperties);
+    }
+    VkResult result = DispatchGetPhysicalDeviceDisplayPlanePropertiesKHR(physicalDevice, pPropertyCount, pProperties);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetPhysicalDeviceDisplayPlanePropertiesKHR(physicalDevice, pPropertyCount, pProperties, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL GetDisplayPlaneSupportedDisplaysKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t                                    planeIndex,
+    uint32_t*                                   pDisplayCount,
+    VkDisplayKHR*                               pDisplays) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetDisplayPlaneSupportedDisplaysKHR(physicalDevice, planeIndex, pDisplayCount, pDisplays);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetDisplayPlaneSupportedDisplaysKHR(physicalDevice, planeIndex, pDisplayCount, pDisplays);
+    }
+    VkResult result = DispatchGetDisplayPlaneSupportedDisplaysKHR(physicalDevice, planeIndex, pDisplayCount, pDisplays);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetDisplayPlaneSupportedDisplaysKHR(physicalDevice, planeIndex, pDisplayCount, pDisplays, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL GetDisplayModePropertiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkDisplayKHR                                display,
+    uint32_t*                                   pPropertyCount,
+    VkDisplayModePropertiesKHR*                 pProperties) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetDisplayModePropertiesKHR(physicalDevice, display, pPropertyCount, pProperties);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetDisplayModePropertiesKHR(physicalDevice, display, pPropertyCount, pProperties);
+    }
+    VkResult result = DispatchGetDisplayModePropertiesKHR(physicalDevice, display, pPropertyCount, pProperties);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetDisplayModePropertiesKHR(physicalDevice, display, pPropertyCount, pProperties, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL CreateDisplayModeKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkDisplayKHR                                display,
+    const VkDisplayModeCreateInfoKHR*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDisplayModeKHR*                           pMode) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCreateDisplayModeKHR(physicalDevice, display, pCreateInfo, pAllocator, pMode);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCreateDisplayModeKHR(physicalDevice, display, pCreateInfo, pAllocator, pMode);
+    }
+    VkResult result = DispatchCreateDisplayModeKHR(physicalDevice, display, pCreateInfo, pAllocator, pMode);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCreateDisplayModeKHR(physicalDevice, display, pCreateInfo, pAllocator, pMode, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL GetDisplayPlaneCapabilitiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkDisplayModeKHR                            mode,
+    uint32_t                                    planeIndex,
+    VkDisplayPlaneCapabilitiesKHR*              pCapabilities) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetDisplayPlaneCapabilitiesKHR(physicalDevice, mode, planeIndex, pCapabilities);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetDisplayPlaneCapabilitiesKHR(physicalDevice, mode, planeIndex, pCapabilities);
+    }
+    VkResult result = DispatchGetDisplayPlaneCapabilitiesKHR(physicalDevice, mode, planeIndex, pCapabilities);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetDisplayPlaneCapabilitiesKHR(physicalDevice, mode, planeIndex, pCapabilities, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL CreateDisplayPlaneSurfaceKHR(
+    VkInstance                                  instance,
+    const VkDisplaySurfaceCreateInfoKHR*        pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(instance), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCreateDisplayPlaneSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCreateDisplayPlaneSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
+    }
+    VkResult result = DispatchCreateDisplayPlaneSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCreateDisplayPlaneSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface, result);
+    }
+    return result;
+}
+
+
+VKAPI_ATTR VkResult VKAPI_CALL CreateSharedSwapchainsKHR(
+    VkDevice                                    device,
+    uint32_t                                    swapchainCount,
+    const VkSwapchainCreateInfoKHR*             pCreateInfos,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSwapchainKHR*                             pSwapchains) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCreateSharedSwapchainsKHR(device, swapchainCount, pCreateInfos, pAllocator, pSwapchains);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCreateSharedSwapchainsKHR(device, swapchainCount, pCreateInfos, pAllocator, pSwapchains);
+    }
+    VkResult result = DispatchCreateSharedSwapchainsKHR(device, swapchainCount, pCreateInfos, pAllocator, pSwapchains);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCreateSharedSwapchainsKHR(device, swapchainCount, pCreateInfos, pAllocator, pSwapchains, result);
+    }
+    return result;
+}
+
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+
+VKAPI_ATTR VkResult VKAPI_CALL CreateXlibSurfaceKHR(
+    VkInstance                                  instance,
+    const VkXlibSurfaceCreateInfoKHR*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(instance), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCreateXlibSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCreateXlibSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
+    }
+    VkResult result = DispatchCreateXlibSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCreateXlibSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR VkBool32 VKAPI_CALL GetPhysicalDeviceXlibPresentationSupportKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t                                    queueFamilyIndex,
+    Display*                                    dpy,
+    VisualID                                    visualID) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetPhysicalDeviceXlibPresentationSupportKHR(physicalDevice, queueFamilyIndex, dpy, visualID);
+        if (skip) return VK_FALSE;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetPhysicalDeviceXlibPresentationSupportKHR(physicalDevice, queueFamilyIndex, dpy, visualID);
+    }
+    VkBool32 result = DispatchGetPhysicalDeviceXlibPresentationSupportKHR(physicalDevice, queueFamilyIndex, dpy, visualID);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetPhysicalDeviceXlibPresentationSupportKHR(physicalDevice, queueFamilyIndex, dpy, visualID);
+    }
+    return result;
+}
+#endif // VK_USE_PLATFORM_XLIB_KHR
+
+#ifdef VK_USE_PLATFORM_XCB_KHR
+
+VKAPI_ATTR VkResult VKAPI_CALL CreateXcbSurfaceKHR(
+    VkInstance                                  instance,
+    const VkXcbSurfaceCreateInfoKHR*            pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(instance), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCreateXcbSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCreateXcbSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
+    }
+    VkResult result = DispatchCreateXcbSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCreateXcbSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR VkBool32 VKAPI_CALL GetPhysicalDeviceXcbPresentationSupportKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t                                    queueFamilyIndex,
+    xcb_connection_t*                           connection,
+    xcb_visualid_t                              visual_id) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetPhysicalDeviceXcbPresentationSupportKHR(physicalDevice, queueFamilyIndex, connection, visual_id);
+        if (skip) return VK_FALSE;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetPhysicalDeviceXcbPresentationSupportKHR(physicalDevice, queueFamilyIndex, connection, visual_id);
+    }
+    VkBool32 result = DispatchGetPhysicalDeviceXcbPresentationSupportKHR(physicalDevice, queueFamilyIndex, connection, visual_id);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetPhysicalDeviceXcbPresentationSupportKHR(physicalDevice, queueFamilyIndex, connection, visual_id);
+    }
+    return result;
+}
+#endif // VK_USE_PLATFORM_XCB_KHR
+
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+
+VKAPI_ATTR VkResult VKAPI_CALL CreateWaylandSurfaceKHR(
+    VkInstance                                  instance,
+    const VkWaylandSurfaceCreateInfoKHR*        pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(instance), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCreateWaylandSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCreateWaylandSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
+    }
+    VkResult result = DispatchCreateWaylandSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCreateWaylandSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR VkBool32 VKAPI_CALL GetPhysicalDeviceWaylandPresentationSupportKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t                                    queueFamilyIndex,
+    struct wl_display*                          display) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetPhysicalDeviceWaylandPresentationSupportKHR(physicalDevice, queueFamilyIndex, display);
+        if (skip) return VK_FALSE;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetPhysicalDeviceWaylandPresentationSupportKHR(physicalDevice, queueFamilyIndex, display);
+    }
+    VkBool32 result = DispatchGetPhysicalDeviceWaylandPresentationSupportKHR(physicalDevice, queueFamilyIndex, display);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetPhysicalDeviceWaylandPresentationSupportKHR(physicalDevice, queueFamilyIndex, display);
+    }
+    return result;
+}
+#endif // VK_USE_PLATFORM_WAYLAND_KHR
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+
+VKAPI_ATTR VkResult VKAPI_CALL CreateAndroidSurfaceKHR(
+    VkInstance                                  instance,
+    const VkAndroidSurfaceCreateInfoKHR*        pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(instance), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCreateAndroidSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCreateAndroidSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
+    }
+    VkResult result = DispatchCreateAndroidSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCreateAndroidSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface, result);
+    }
+    return result;
+}
+#endif // VK_USE_PLATFORM_ANDROID_KHR
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+VKAPI_ATTR VkResult VKAPI_CALL CreateWin32SurfaceKHR(
+    VkInstance                                  instance,
+    const VkWin32SurfaceCreateInfoKHR*          pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(instance), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCreateWin32SurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCreateWin32SurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
+    }
+    VkResult result = DispatchCreateWin32SurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCreateWin32SurfaceKHR(instance, pCreateInfo, pAllocator, pSurface, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR VkBool32 VKAPI_CALL GetPhysicalDeviceWin32PresentationSupportKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t                                    queueFamilyIndex) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetPhysicalDeviceWin32PresentationSupportKHR(physicalDevice, queueFamilyIndex);
+        if (skip) return VK_FALSE;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetPhysicalDeviceWin32PresentationSupportKHR(physicalDevice, queueFamilyIndex);
+    }
+    VkBool32 result = DispatchGetPhysicalDeviceWin32PresentationSupportKHR(physicalDevice, queueFamilyIndex);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetPhysicalDeviceWin32PresentationSupportKHR(physicalDevice, queueFamilyIndex);
+    }
+    return result;
+}
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+
+
+
+VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceFeatures2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceFeatures2*                  pFeatures) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetPhysicalDeviceFeatures2KHR(physicalDevice, pFeatures);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetPhysicalDeviceFeatures2KHR(physicalDevice, pFeatures);
+    }
+    DispatchGetPhysicalDeviceFeatures2KHR(physicalDevice, pFeatures);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetPhysicalDeviceFeatures2KHR(physicalDevice, pFeatures);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceProperties2*                pProperties) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetPhysicalDeviceProperties2KHR(physicalDevice, pProperties);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetPhysicalDeviceProperties2KHR(physicalDevice, pProperties);
+    }
+    DispatchGetPhysicalDeviceProperties2KHR(physicalDevice, pProperties);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetPhysicalDeviceProperties2KHR(physicalDevice, pProperties);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceFormatProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkFormat                                    format,
+    VkFormatProperties2*                        pFormatProperties) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetPhysicalDeviceFormatProperties2KHR(physicalDevice, format, pFormatProperties);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetPhysicalDeviceFormatProperties2KHR(physicalDevice, format, pFormatProperties);
+    }
+    DispatchGetPhysicalDeviceFormatProperties2KHR(physicalDevice, format, pFormatProperties);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetPhysicalDeviceFormatProperties2KHR(physicalDevice, format, pFormatProperties);
+    }
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceImageFormatProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceImageFormatInfo2*     pImageFormatInfo,
+    VkImageFormatProperties2*                   pImageFormatProperties) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetPhysicalDeviceImageFormatProperties2KHR(physicalDevice, pImageFormatInfo, pImageFormatProperties);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetPhysicalDeviceImageFormatProperties2KHR(physicalDevice, pImageFormatInfo, pImageFormatProperties);
+    }
+    VkResult result = DispatchGetPhysicalDeviceImageFormatProperties2KHR(physicalDevice, pImageFormatInfo, pImageFormatProperties);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetPhysicalDeviceImageFormatProperties2KHR(physicalDevice, pImageFormatInfo, pImageFormatProperties, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceQueueFamilyProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pQueueFamilyPropertyCount,
+    VkQueueFamilyProperties2*                   pQueueFamilyProperties) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetPhysicalDeviceQueueFamilyProperties2KHR(physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetPhysicalDeviceQueueFamilyProperties2KHR(physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties);
+    }
+    DispatchGetPhysicalDeviceQueueFamilyProperties2KHR(physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetPhysicalDeviceQueueFamilyProperties2KHR(physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceMemoryProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceMemoryProperties2*          pMemoryProperties) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetPhysicalDeviceMemoryProperties2KHR(physicalDevice, pMemoryProperties);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetPhysicalDeviceMemoryProperties2KHR(physicalDevice, pMemoryProperties);
+    }
+    DispatchGetPhysicalDeviceMemoryProperties2KHR(physicalDevice, pMemoryProperties);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetPhysicalDeviceMemoryProperties2KHR(physicalDevice, pMemoryProperties);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceSparseImageFormatProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo,
+    uint32_t*                                   pPropertyCount,
+    VkSparseImageFormatProperties2*             pProperties) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetPhysicalDeviceSparseImageFormatProperties2KHR(physicalDevice, pFormatInfo, pPropertyCount, pProperties);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetPhysicalDeviceSparseImageFormatProperties2KHR(physicalDevice, pFormatInfo, pPropertyCount, pProperties);
+    }
+    DispatchGetPhysicalDeviceSparseImageFormatProperties2KHR(physicalDevice, pFormatInfo, pPropertyCount, pProperties);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetPhysicalDeviceSparseImageFormatProperties2KHR(physicalDevice, pFormatInfo, pPropertyCount, pProperties);
+    }
+}
+
+
+VKAPI_ATTR void VKAPI_CALL GetDeviceGroupPeerMemoryFeaturesKHR(
+    VkDevice                                    device,
+    uint32_t                                    heapIndex,
+    uint32_t                                    localDeviceIndex,
+    uint32_t                                    remoteDeviceIndex,
+    VkPeerMemoryFeatureFlags*                   pPeerMemoryFeatures) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetDeviceGroupPeerMemoryFeaturesKHR(device, heapIndex, localDeviceIndex, remoteDeviceIndex, pPeerMemoryFeatures);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetDeviceGroupPeerMemoryFeaturesKHR(device, heapIndex, localDeviceIndex, remoteDeviceIndex, pPeerMemoryFeatures);
+    }
+    DispatchGetDeviceGroupPeerMemoryFeaturesKHR(device, heapIndex, localDeviceIndex, remoteDeviceIndex, pPeerMemoryFeatures);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetDeviceGroupPeerMemoryFeaturesKHR(device, heapIndex, localDeviceIndex, remoteDeviceIndex, pPeerMemoryFeatures);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL CmdSetDeviceMaskKHR(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    deviceMask) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCmdSetDeviceMaskKHR(commandBuffer, deviceMask);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCmdSetDeviceMaskKHR(commandBuffer, deviceMask);
+    }
+    DispatchCmdSetDeviceMaskKHR(commandBuffer, deviceMask);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCmdSetDeviceMaskKHR(commandBuffer, deviceMask);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL CmdDispatchBaseKHR(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    baseGroupX,
+    uint32_t                                    baseGroupY,
+    uint32_t                                    baseGroupZ,
+    uint32_t                                    groupCountX,
+    uint32_t                                    groupCountY,
+    uint32_t                                    groupCountZ) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCmdDispatchBaseKHR(commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCmdDispatchBaseKHR(commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ);
+    }
+    DispatchCmdDispatchBaseKHR(commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCmdDispatchBaseKHR(commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ);
+    }
+}
+
+
+
+VKAPI_ATTR void VKAPI_CALL TrimCommandPoolKHR(
+    VkDevice                                    device,
+    VkCommandPool                               commandPool,
+    VkCommandPoolTrimFlags                      flags) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateTrimCommandPoolKHR(device, commandPool, flags);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordTrimCommandPoolKHR(device, commandPool, flags);
+    }
+    DispatchTrimCommandPoolKHR(device, commandPool, flags);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordTrimCommandPoolKHR(device, commandPool, flags);
+    }
+}
+
+
+VKAPI_ATTR VkResult VKAPI_CALL EnumeratePhysicalDeviceGroupsKHR(
+    VkInstance                                  instance,
+    uint32_t*                                   pPhysicalDeviceGroupCount,
+    VkPhysicalDeviceGroupProperties*            pPhysicalDeviceGroupProperties) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(instance), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateEnumeratePhysicalDeviceGroupsKHR(instance, pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordEnumeratePhysicalDeviceGroupsKHR(instance, pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties);
+    }
+    VkResult result = DispatchEnumeratePhysicalDeviceGroupsKHR(instance, pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordEnumeratePhysicalDeviceGroupsKHR(instance, pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties, result);
+    }
+    return result;
+}
+
+
+VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceExternalBufferPropertiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceExternalBufferInfo*   pExternalBufferInfo,
+    VkExternalBufferProperties*                 pExternalBufferProperties) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetPhysicalDeviceExternalBufferPropertiesKHR(physicalDevice, pExternalBufferInfo, pExternalBufferProperties);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetPhysicalDeviceExternalBufferPropertiesKHR(physicalDevice, pExternalBufferInfo, pExternalBufferProperties);
+    }
+    DispatchGetPhysicalDeviceExternalBufferPropertiesKHR(physicalDevice, pExternalBufferInfo, pExternalBufferProperties);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetPhysicalDeviceExternalBufferPropertiesKHR(physicalDevice, pExternalBufferInfo, pExternalBufferProperties);
+    }
+}
+
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+VKAPI_ATTR VkResult VKAPI_CALL GetMemoryWin32HandleKHR(
+    VkDevice                                    device,
+    const VkMemoryGetWin32HandleInfoKHR*        pGetWin32HandleInfo,
+    HANDLE*                                     pHandle) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetMemoryWin32HandleKHR(device, pGetWin32HandleInfo, pHandle);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetMemoryWin32HandleKHR(device, pGetWin32HandleInfo, pHandle);
+    }
+    VkResult result = DispatchGetMemoryWin32HandleKHR(device, pGetWin32HandleInfo, pHandle);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetMemoryWin32HandleKHR(device, pGetWin32HandleInfo, pHandle, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL GetMemoryWin32HandlePropertiesKHR(
+    VkDevice                                    device,
+    VkExternalMemoryHandleTypeFlagBits          handleType,
+    HANDLE                                      handle,
+    VkMemoryWin32HandlePropertiesKHR*           pMemoryWin32HandleProperties) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetMemoryWin32HandlePropertiesKHR(device, handleType, handle, pMemoryWin32HandleProperties);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetMemoryWin32HandlePropertiesKHR(device, handleType, handle, pMemoryWin32HandleProperties);
+    }
+    VkResult result = DispatchGetMemoryWin32HandlePropertiesKHR(device, handleType, handle, pMemoryWin32HandleProperties);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetMemoryWin32HandlePropertiesKHR(device, handleType, handle, pMemoryWin32HandleProperties, result);
+    }
+    return result;
+}
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+
+VKAPI_ATTR VkResult VKAPI_CALL GetMemoryFdKHR(
+    VkDevice                                    device,
+    const VkMemoryGetFdInfoKHR*                 pGetFdInfo,
+    int*                                        pFd) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetMemoryFdKHR(device, pGetFdInfo, pFd);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetMemoryFdKHR(device, pGetFdInfo, pFd);
+    }
+    VkResult result = DispatchGetMemoryFdKHR(device, pGetFdInfo, pFd);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetMemoryFdKHR(device, pGetFdInfo, pFd, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL GetMemoryFdPropertiesKHR(
+    VkDevice                                    device,
+    VkExternalMemoryHandleTypeFlagBits          handleType,
+    int                                         fd,
+    VkMemoryFdPropertiesKHR*                    pMemoryFdProperties) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetMemoryFdPropertiesKHR(device, handleType, fd, pMemoryFdProperties);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetMemoryFdPropertiesKHR(device, handleType, fd, pMemoryFdProperties);
+    }
+    VkResult result = DispatchGetMemoryFdPropertiesKHR(device, handleType, fd, pMemoryFdProperties);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetMemoryFdPropertiesKHR(device, handleType, fd, pMemoryFdProperties, result);
+    }
+    return result;
+}
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+
+VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceExternalSemaphorePropertiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo,
+    VkExternalSemaphoreProperties*              pExternalSemaphoreProperties) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetPhysicalDeviceExternalSemaphorePropertiesKHR(physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetPhysicalDeviceExternalSemaphorePropertiesKHR(physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties);
+    }
+    DispatchGetPhysicalDeviceExternalSemaphorePropertiesKHR(physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetPhysicalDeviceExternalSemaphorePropertiesKHR(physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties);
+    }
+}
+
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+VKAPI_ATTR VkResult VKAPI_CALL ImportSemaphoreWin32HandleKHR(
+    VkDevice                                    device,
+    const VkImportSemaphoreWin32HandleInfoKHR*  pImportSemaphoreWin32HandleInfo) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateImportSemaphoreWin32HandleKHR(device, pImportSemaphoreWin32HandleInfo);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordImportSemaphoreWin32HandleKHR(device, pImportSemaphoreWin32HandleInfo);
+    }
+    VkResult result = DispatchImportSemaphoreWin32HandleKHR(device, pImportSemaphoreWin32HandleInfo);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordImportSemaphoreWin32HandleKHR(device, pImportSemaphoreWin32HandleInfo, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL GetSemaphoreWin32HandleKHR(
+    VkDevice                                    device,
+    const VkSemaphoreGetWin32HandleInfoKHR*     pGetWin32HandleInfo,
+    HANDLE*                                     pHandle) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetSemaphoreWin32HandleKHR(device, pGetWin32HandleInfo, pHandle);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetSemaphoreWin32HandleKHR(device, pGetWin32HandleInfo, pHandle);
+    }
+    VkResult result = DispatchGetSemaphoreWin32HandleKHR(device, pGetWin32HandleInfo, pHandle);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetSemaphoreWin32HandleKHR(device, pGetWin32HandleInfo, pHandle, result);
+    }
+    return result;
+}
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+
+VKAPI_ATTR VkResult VKAPI_CALL ImportSemaphoreFdKHR(
+    VkDevice                                    device,
+    const VkImportSemaphoreFdInfoKHR*           pImportSemaphoreFdInfo) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateImportSemaphoreFdKHR(device, pImportSemaphoreFdInfo);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordImportSemaphoreFdKHR(device, pImportSemaphoreFdInfo);
+    }
+    VkResult result = DispatchImportSemaphoreFdKHR(device, pImportSemaphoreFdInfo);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordImportSemaphoreFdKHR(device, pImportSemaphoreFdInfo, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL GetSemaphoreFdKHR(
+    VkDevice                                    device,
+    const VkSemaphoreGetFdInfoKHR*              pGetFdInfo,
+    int*                                        pFd) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetSemaphoreFdKHR(device, pGetFdInfo, pFd);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetSemaphoreFdKHR(device, pGetFdInfo, pFd);
+    }
+    VkResult result = DispatchGetSemaphoreFdKHR(device, pGetFdInfo, pFd);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetSemaphoreFdKHR(device, pGetFdInfo, pFd, result);
+    }
+    return result;
+}
+
+
+VKAPI_ATTR void VKAPI_CALL CmdPushDescriptorSetKHR(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineBindPoint                         pipelineBindPoint,
+    VkPipelineLayout                            layout,
+    uint32_t                                    set,
+    uint32_t                                    descriptorWriteCount,
+    const VkWriteDescriptorSet*                 pDescriptorWrites) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCmdPushDescriptorSetKHR(commandBuffer, pipelineBindPoint, layout, set, descriptorWriteCount, pDescriptorWrites);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCmdPushDescriptorSetKHR(commandBuffer, pipelineBindPoint, layout, set, descriptorWriteCount, pDescriptorWrites);
+    }
+    DispatchCmdPushDescriptorSetKHR(commandBuffer, pipelineBindPoint, layout, set, descriptorWriteCount, pDescriptorWrites);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCmdPushDescriptorSetKHR(commandBuffer, pipelineBindPoint, layout, set, descriptorWriteCount, pDescriptorWrites);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL CmdPushDescriptorSetWithTemplateKHR(
+    VkCommandBuffer                             commandBuffer,
+    VkDescriptorUpdateTemplate                  descriptorUpdateTemplate,
+    VkPipelineLayout                            layout,
+    uint32_t                                    set,
+    const void*                                 pData) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCmdPushDescriptorSetWithTemplateKHR(commandBuffer, descriptorUpdateTemplate, layout, set, pData);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCmdPushDescriptorSetWithTemplateKHR(commandBuffer, descriptorUpdateTemplate, layout, set, pData);
+    }
+    DispatchCmdPushDescriptorSetWithTemplateKHR(commandBuffer, descriptorUpdateTemplate, layout, set, pData);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCmdPushDescriptorSetWithTemplateKHR(commandBuffer, descriptorUpdateTemplate, layout, set, pData);
+    }
+}
+
+
+
+
+
+VKAPI_ATTR VkResult VKAPI_CALL CreateDescriptorUpdateTemplateKHR(
+    VkDevice                                    device,
+    const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDescriptorUpdateTemplate*                 pDescriptorUpdateTemplate) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCreateDescriptorUpdateTemplateKHR(device, pCreateInfo, pAllocator, pDescriptorUpdateTemplate);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCreateDescriptorUpdateTemplateKHR(device, pCreateInfo, pAllocator, pDescriptorUpdateTemplate);
+    }
+    VkResult result = DispatchCreateDescriptorUpdateTemplateKHR(device, pCreateInfo, pAllocator, pDescriptorUpdateTemplate);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCreateDescriptorUpdateTemplateKHR(device, pCreateInfo, pAllocator, pDescriptorUpdateTemplate, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR void VKAPI_CALL DestroyDescriptorUpdateTemplateKHR(
+    VkDevice                                    device,
+    VkDescriptorUpdateTemplate                  descriptorUpdateTemplate,
+    const VkAllocationCallbacks*                pAllocator) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateDestroyDescriptorUpdateTemplateKHR(device, descriptorUpdateTemplate, pAllocator);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordDestroyDescriptorUpdateTemplateKHR(device, descriptorUpdateTemplate, pAllocator);
+    }
+    DispatchDestroyDescriptorUpdateTemplateKHR(device, descriptorUpdateTemplate, pAllocator);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordDestroyDescriptorUpdateTemplateKHR(device, descriptorUpdateTemplate, pAllocator);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL UpdateDescriptorSetWithTemplateKHR(
+    VkDevice                                    device,
+    VkDescriptorSet                             descriptorSet,
+    VkDescriptorUpdateTemplate                  descriptorUpdateTemplate,
+    const void*                                 pData) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateUpdateDescriptorSetWithTemplateKHR(device, descriptorSet, descriptorUpdateTemplate, pData);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordUpdateDescriptorSetWithTemplateKHR(device, descriptorSet, descriptorUpdateTemplate, pData);
+    }
+    DispatchUpdateDescriptorSetWithTemplateKHR(device, descriptorSet, descriptorUpdateTemplate, pData);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordUpdateDescriptorSetWithTemplateKHR(device, descriptorSet, descriptorUpdateTemplate, pData);
+    }
+}
+
+
+
+VKAPI_ATTR VkResult VKAPI_CALL CreateRenderPass2KHR(
+    VkDevice                                    device,
+    const VkRenderPassCreateInfo2KHR*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkRenderPass*                               pRenderPass) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCreateRenderPass2KHR(device, pCreateInfo, pAllocator, pRenderPass);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCreateRenderPass2KHR(device, pCreateInfo, pAllocator, pRenderPass);
+    }
+    VkResult result = DispatchCreateRenderPass2KHR(device, pCreateInfo, pAllocator, pRenderPass);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCreateRenderPass2KHR(device, pCreateInfo, pAllocator, pRenderPass, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR void VKAPI_CALL CmdBeginRenderPass2KHR(
+    VkCommandBuffer                             commandBuffer,
+    const VkRenderPassBeginInfo*                pRenderPassBegin,
+    const VkSubpassBeginInfoKHR*                pSubpassBeginInfo) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCmdBeginRenderPass2KHR(commandBuffer, pRenderPassBegin, pSubpassBeginInfo);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCmdBeginRenderPass2KHR(commandBuffer, pRenderPassBegin, pSubpassBeginInfo);
+    }
+    DispatchCmdBeginRenderPass2KHR(commandBuffer, pRenderPassBegin, pSubpassBeginInfo);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCmdBeginRenderPass2KHR(commandBuffer, pRenderPassBegin, pSubpassBeginInfo);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL CmdNextSubpass2KHR(
+    VkCommandBuffer                             commandBuffer,
+    const VkSubpassBeginInfoKHR*                pSubpassBeginInfo,
+    const VkSubpassEndInfoKHR*                  pSubpassEndInfo) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCmdNextSubpass2KHR(commandBuffer, pSubpassBeginInfo, pSubpassEndInfo);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCmdNextSubpass2KHR(commandBuffer, pSubpassBeginInfo, pSubpassEndInfo);
+    }
+    DispatchCmdNextSubpass2KHR(commandBuffer, pSubpassBeginInfo, pSubpassEndInfo);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCmdNextSubpass2KHR(commandBuffer, pSubpassBeginInfo, pSubpassEndInfo);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL CmdEndRenderPass2KHR(
+    VkCommandBuffer                             commandBuffer,
+    const VkSubpassEndInfoKHR*                  pSubpassEndInfo) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCmdEndRenderPass2KHR(commandBuffer, pSubpassEndInfo);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCmdEndRenderPass2KHR(commandBuffer, pSubpassEndInfo);
+    }
+    DispatchCmdEndRenderPass2KHR(commandBuffer, pSubpassEndInfo);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCmdEndRenderPass2KHR(commandBuffer, pSubpassEndInfo);
+    }
+}
+
+
+VKAPI_ATTR VkResult VKAPI_CALL GetSwapchainStatusKHR(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetSwapchainStatusKHR(device, swapchain);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetSwapchainStatusKHR(device, swapchain);
+    }
+    VkResult result = DispatchGetSwapchainStatusKHR(device, swapchain);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetSwapchainStatusKHR(device, swapchain, result);
+    }
+    return result;
+}
+
+
+VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceExternalFencePropertiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceExternalFenceInfo*    pExternalFenceInfo,
+    VkExternalFenceProperties*                  pExternalFenceProperties) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetPhysicalDeviceExternalFencePropertiesKHR(physicalDevice, pExternalFenceInfo, pExternalFenceProperties);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetPhysicalDeviceExternalFencePropertiesKHR(physicalDevice, pExternalFenceInfo, pExternalFenceProperties);
+    }
+    DispatchGetPhysicalDeviceExternalFencePropertiesKHR(physicalDevice, pExternalFenceInfo, pExternalFenceProperties);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetPhysicalDeviceExternalFencePropertiesKHR(physicalDevice, pExternalFenceInfo, pExternalFenceProperties);
+    }
+}
+
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+VKAPI_ATTR VkResult VKAPI_CALL ImportFenceWin32HandleKHR(
+    VkDevice                                    device,
+    const VkImportFenceWin32HandleInfoKHR*      pImportFenceWin32HandleInfo) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateImportFenceWin32HandleKHR(device, pImportFenceWin32HandleInfo);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordImportFenceWin32HandleKHR(device, pImportFenceWin32HandleInfo);
+    }
+    VkResult result = DispatchImportFenceWin32HandleKHR(device, pImportFenceWin32HandleInfo);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordImportFenceWin32HandleKHR(device, pImportFenceWin32HandleInfo, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL GetFenceWin32HandleKHR(
+    VkDevice                                    device,
+    const VkFenceGetWin32HandleInfoKHR*         pGetWin32HandleInfo,
+    HANDLE*                                     pHandle) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetFenceWin32HandleKHR(device, pGetWin32HandleInfo, pHandle);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetFenceWin32HandleKHR(device, pGetWin32HandleInfo, pHandle);
+    }
+    VkResult result = DispatchGetFenceWin32HandleKHR(device, pGetWin32HandleInfo, pHandle);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetFenceWin32HandleKHR(device, pGetWin32HandleInfo, pHandle, result);
+    }
+    return result;
+}
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+
+VKAPI_ATTR VkResult VKAPI_CALL ImportFenceFdKHR(
+    VkDevice                                    device,
+    const VkImportFenceFdInfoKHR*               pImportFenceFdInfo) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateImportFenceFdKHR(device, pImportFenceFdInfo);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordImportFenceFdKHR(device, pImportFenceFdInfo);
+    }
+    VkResult result = DispatchImportFenceFdKHR(device, pImportFenceFdInfo);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordImportFenceFdKHR(device, pImportFenceFdInfo, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL GetFenceFdKHR(
+    VkDevice                                    device,
+    const VkFenceGetFdInfoKHR*                  pGetFdInfo,
+    int*                                        pFd) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetFenceFdKHR(device, pGetFdInfo, pFd);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetFenceFdKHR(device, pGetFdInfo, pFd);
+    }
+    VkResult result = DispatchGetFenceFdKHR(device, pGetFdInfo, pFd);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetFenceFdKHR(device, pGetFdInfo, pFd, result);
+    }
+    return result;
+}
+
+
+VKAPI_ATTR VkResult VKAPI_CALL EnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t                                    queueFamilyIndex,
+    uint32_t*                                   pCounterCount,
+    VkPerformanceCounterKHR*                    pCounters,
+    VkPerformanceCounterDescriptionKHR*         pCounterDescriptions) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(physicalDevice, queueFamilyIndex, pCounterCount, pCounters, pCounterDescriptions);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(physicalDevice, queueFamilyIndex, pCounterCount, pCounters, pCounterDescriptions);
+    }
+    VkResult result = DispatchEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(physicalDevice, queueFamilyIndex, pCounterCount, pCounters, pCounterDescriptions);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(physicalDevice, queueFamilyIndex, pCounterCount, pCounters, pCounterDescriptions, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkQueryPoolPerformanceCreateInfoKHR*  pPerformanceQueryCreateInfo,
+    uint32_t*                                   pNumPasses) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR(physicalDevice, pPerformanceQueryCreateInfo, pNumPasses);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR(physicalDevice, pPerformanceQueryCreateInfo, pNumPasses);
+    }
+    DispatchGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR(physicalDevice, pPerformanceQueryCreateInfo, pNumPasses);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR(physicalDevice, pPerformanceQueryCreateInfo, pNumPasses);
+    }
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL AcquireProfilingLockKHR(
+    VkDevice                                    device,
+    const VkAcquireProfilingLockInfoKHR*        pInfo) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateAcquireProfilingLockKHR(device, pInfo);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordAcquireProfilingLockKHR(device, pInfo);
+    }
+    VkResult result = DispatchAcquireProfilingLockKHR(device, pInfo);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordAcquireProfilingLockKHR(device, pInfo, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR void VKAPI_CALL ReleaseProfilingLockKHR(
+    VkDevice                                    device) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateReleaseProfilingLockKHR(device);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordReleaseProfilingLockKHR(device);
+    }
+    DispatchReleaseProfilingLockKHR(device);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordReleaseProfilingLockKHR(device);
+    }
+}
+
+
+
+VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceSurfaceCapabilities2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceSurfaceInfo2KHR*      pSurfaceInfo,
+    VkSurfaceCapabilities2KHR*                  pSurfaceCapabilities) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetPhysicalDeviceSurfaceCapabilities2KHR(physicalDevice, pSurfaceInfo, pSurfaceCapabilities);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetPhysicalDeviceSurfaceCapabilities2KHR(physicalDevice, pSurfaceInfo, pSurfaceCapabilities);
+    }
+    VkResult result = DispatchGetPhysicalDeviceSurfaceCapabilities2KHR(physicalDevice, pSurfaceInfo, pSurfaceCapabilities);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetPhysicalDeviceSurfaceCapabilities2KHR(physicalDevice, pSurfaceInfo, pSurfaceCapabilities, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceSurfaceFormats2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceSurfaceInfo2KHR*      pSurfaceInfo,
+    uint32_t*                                   pSurfaceFormatCount,
+    VkSurfaceFormat2KHR*                        pSurfaceFormats) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetPhysicalDeviceSurfaceFormats2KHR(physicalDevice, pSurfaceInfo, pSurfaceFormatCount, pSurfaceFormats);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetPhysicalDeviceSurfaceFormats2KHR(physicalDevice, pSurfaceInfo, pSurfaceFormatCount, pSurfaceFormats);
+    }
+    VkResult result = DispatchGetPhysicalDeviceSurfaceFormats2KHR(physicalDevice, pSurfaceInfo, pSurfaceFormatCount, pSurfaceFormats);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetPhysicalDeviceSurfaceFormats2KHR(physicalDevice, pSurfaceInfo, pSurfaceFormatCount, pSurfaceFormats, result);
+    }
+    return result;
+}
+
+
+
+VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceDisplayProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pPropertyCount,
+    VkDisplayProperties2KHR*                    pProperties) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetPhysicalDeviceDisplayProperties2KHR(physicalDevice, pPropertyCount, pProperties);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetPhysicalDeviceDisplayProperties2KHR(physicalDevice, pPropertyCount, pProperties);
+    }
+    VkResult result = DispatchGetPhysicalDeviceDisplayProperties2KHR(physicalDevice, pPropertyCount, pProperties);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetPhysicalDeviceDisplayProperties2KHR(physicalDevice, pPropertyCount, pProperties, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceDisplayPlaneProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pPropertyCount,
+    VkDisplayPlaneProperties2KHR*               pProperties) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetPhysicalDeviceDisplayPlaneProperties2KHR(physicalDevice, pPropertyCount, pProperties);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetPhysicalDeviceDisplayPlaneProperties2KHR(physicalDevice, pPropertyCount, pProperties);
+    }
+    VkResult result = DispatchGetPhysicalDeviceDisplayPlaneProperties2KHR(physicalDevice, pPropertyCount, pProperties);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetPhysicalDeviceDisplayPlaneProperties2KHR(physicalDevice, pPropertyCount, pProperties, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL GetDisplayModeProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkDisplayKHR                                display,
+    uint32_t*                                   pPropertyCount,
+    VkDisplayModeProperties2KHR*                pProperties) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetDisplayModeProperties2KHR(physicalDevice, display, pPropertyCount, pProperties);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetDisplayModeProperties2KHR(physicalDevice, display, pPropertyCount, pProperties);
+    }
+    VkResult result = DispatchGetDisplayModeProperties2KHR(physicalDevice, display, pPropertyCount, pProperties);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetDisplayModeProperties2KHR(physicalDevice, display, pPropertyCount, pProperties, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL GetDisplayPlaneCapabilities2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkDisplayPlaneInfo2KHR*               pDisplayPlaneInfo,
+    VkDisplayPlaneCapabilities2KHR*             pCapabilities) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetDisplayPlaneCapabilities2KHR(physicalDevice, pDisplayPlaneInfo, pCapabilities);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetDisplayPlaneCapabilities2KHR(physicalDevice, pDisplayPlaneInfo, pCapabilities);
+    }
+    VkResult result = DispatchGetDisplayPlaneCapabilities2KHR(physicalDevice, pDisplayPlaneInfo, pCapabilities);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetDisplayPlaneCapabilities2KHR(physicalDevice, pDisplayPlaneInfo, pCapabilities, result);
+    }
+    return result;
+}
+
+
+
+
+
+VKAPI_ATTR void VKAPI_CALL GetImageMemoryRequirements2KHR(
+    VkDevice                                    device,
+    const VkImageMemoryRequirementsInfo2*       pInfo,
+    VkMemoryRequirements2*                      pMemoryRequirements) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetImageMemoryRequirements2KHR(device, pInfo, pMemoryRequirements);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetImageMemoryRequirements2KHR(device, pInfo, pMemoryRequirements);
+    }
+    DispatchGetImageMemoryRequirements2KHR(device, pInfo, pMemoryRequirements);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetImageMemoryRequirements2KHR(device, pInfo, pMemoryRequirements);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL GetBufferMemoryRequirements2KHR(
+    VkDevice                                    device,
+    const VkBufferMemoryRequirementsInfo2*      pInfo,
+    VkMemoryRequirements2*                      pMemoryRequirements) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetBufferMemoryRequirements2KHR(device, pInfo, pMemoryRequirements);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetBufferMemoryRequirements2KHR(device, pInfo, pMemoryRequirements);
+    }
+    DispatchGetBufferMemoryRequirements2KHR(device, pInfo, pMemoryRequirements);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetBufferMemoryRequirements2KHR(device, pInfo, pMemoryRequirements);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL GetImageSparseMemoryRequirements2KHR(
+    VkDevice                                    device,
+    const VkImageSparseMemoryRequirementsInfo2* pInfo,
+    uint32_t*                                   pSparseMemoryRequirementCount,
+    VkSparseImageMemoryRequirements2*           pSparseMemoryRequirements) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetImageSparseMemoryRequirements2KHR(device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetImageSparseMemoryRequirements2KHR(device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements);
+    }
+    DispatchGetImageSparseMemoryRequirements2KHR(device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetImageSparseMemoryRequirements2KHR(device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements);
+    }
+}
+
+
+
+VKAPI_ATTR VkResult VKAPI_CALL CreateSamplerYcbcrConversionKHR(
+    VkDevice                                    device,
+    const VkSamplerYcbcrConversionCreateInfo*   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSamplerYcbcrConversion*                   pYcbcrConversion) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCreateSamplerYcbcrConversionKHR(device, pCreateInfo, pAllocator, pYcbcrConversion);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCreateSamplerYcbcrConversionKHR(device, pCreateInfo, pAllocator, pYcbcrConversion);
+    }
+    VkResult result = DispatchCreateSamplerYcbcrConversionKHR(device, pCreateInfo, pAllocator, pYcbcrConversion);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCreateSamplerYcbcrConversionKHR(device, pCreateInfo, pAllocator, pYcbcrConversion, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR void VKAPI_CALL DestroySamplerYcbcrConversionKHR(
+    VkDevice                                    device,
+    VkSamplerYcbcrConversion                    ycbcrConversion,
+    const VkAllocationCallbacks*                pAllocator) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateDestroySamplerYcbcrConversionKHR(device, ycbcrConversion, pAllocator);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordDestroySamplerYcbcrConversionKHR(device, ycbcrConversion, pAllocator);
+    }
+    DispatchDestroySamplerYcbcrConversionKHR(device, ycbcrConversion, pAllocator);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordDestroySamplerYcbcrConversionKHR(device, ycbcrConversion, pAllocator);
+    }
+}
+
+
+VKAPI_ATTR VkResult VKAPI_CALL BindBufferMemory2KHR(
+    VkDevice                                    device,
+    uint32_t                                    bindInfoCount,
+    const VkBindBufferMemoryInfo*               pBindInfos) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateBindBufferMemory2KHR(device, bindInfoCount, pBindInfos);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordBindBufferMemory2KHR(device, bindInfoCount, pBindInfos);
+    }
+    VkResult result = DispatchBindBufferMemory2KHR(device, bindInfoCount, pBindInfos);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordBindBufferMemory2KHR(device, bindInfoCount, pBindInfos, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL BindImageMemory2KHR(
+    VkDevice                                    device,
+    uint32_t                                    bindInfoCount,
+    const VkBindImageMemoryInfo*                pBindInfos) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateBindImageMemory2KHR(device, bindInfoCount, pBindInfos);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordBindImageMemory2KHR(device, bindInfoCount, pBindInfos);
+    }
+    VkResult result = DispatchBindImageMemory2KHR(device, bindInfoCount, pBindInfos);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordBindImageMemory2KHR(device, bindInfoCount, pBindInfos, result);
+    }
+    return result;
+}
+
+
+VKAPI_ATTR void VKAPI_CALL GetDescriptorSetLayoutSupportKHR(
+    VkDevice                                    device,
+    const VkDescriptorSetLayoutCreateInfo*      pCreateInfo,
+    VkDescriptorSetLayoutSupport*               pSupport) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetDescriptorSetLayoutSupportKHR(device, pCreateInfo, pSupport);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetDescriptorSetLayoutSupportKHR(device, pCreateInfo, pSupport);
+    }
+    DispatchGetDescriptorSetLayoutSupportKHR(device, pCreateInfo, pSupport);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetDescriptorSetLayoutSupportKHR(device, pCreateInfo, pSupport);
+    }
+}
+
+
+VKAPI_ATTR void VKAPI_CALL CmdDrawIndirectCountKHR(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkBuffer                                    countBuffer,
+    VkDeviceSize                                countBufferOffset,
+    uint32_t                                    maxDrawCount,
+    uint32_t                                    stride) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCmdDrawIndirectCountKHR(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCmdDrawIndirectCountKHR(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
+    }
+    DispatchCmdDrawIndirectCountKHR(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCmdDrawIndirectCountKHR(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL CmdDrawIndexedIndirectCountKHR(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkBuffer                                    countBuffer,
+    VkDeviceSize                                countBufferOffset,
+    uint32_t                                    maxDrawCount,
+    uint32_t                                    stride) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCmdDrawIndexedIndirectCountKHR(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCmdDrawIndexedIndirectCountKHR(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
+    }
+    DispatchCmdDrawIndexedIndirectCountKHR(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCmdDrawIndexedIndirectCountKHR(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
+    }
+}
+
+
+
+
+
+
+
+
+
+
+VKAPI_ATTR VkResult VKAPI_CALL GetSemaphoreCounterValueKHR(
+    VkDevice                                    device,
+    VkSemaphore                                 semaphore,
+    uint64_t*                                   pValue) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetSemaphoreCounterValueKHR(device, semaphore, pValue);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetSemaphoreCounterValueKHR(device, semaphore, pValue);
+    }
+    VkResult result = DispatchGetSemaphoreCounterValueKHR(device, semaphore, pValue);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetSemaphoreCounterValueKHR(device, semaphore, pValue, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL WaitSemaphoresKHR(
+    VkDevice                                    device,
+    const VkSemaphoreWaitInfoKHR*               pWaitInfo,
+    uint64_t                                    timeout) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateWaitSemaphoresKHR(device, pWaitInfo, timeout);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordWaitSemaphoresKHR(device, pWaitInfo, timeout);
+    }
+    VkResult result = DispatchWaitSemaphoresKHR(device, pWaitInfo, timeout);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordWaitSemaphoresKHR(device, pWaitInfo, timeout, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL SignalSemaphoreKHR(
+    VkDevice                                    device,
+    const VkSemaphoreSignalInfoKHR*             pSignalInfo) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateSignalSemaphoreKHR(device, pSignalInfo);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordSignalSemaphoreKHR(device, pSignalInfo);
+    }
+    VkResult result = DispatchSignalSemaphoreKHR(device, pSignalInfo);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordSignalSemaphoreKHR(device, pSignalInfo, result);
+    }
+    return result;
+}
+
+
+
+
+
+
+
+VKAPI_ATTR VkDeviceAddress VKAPI_CALL GetBufferDeviceAddressKHR(
+    VkDevice                                    device,
+    const VkBufferDeviceAddressInfoKHR*         pInfo) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetBufferDeviceAddressKHR(device, pInfo);
+        if (skip) return 0;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetBufferDeviceAddressKHR(device, pInfo);
+    }
+    VkDeviceAddress result = DispatchGetBufferDeviceAddressKHR(device, pInfo);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetBufferDeviceAddressKHR(device, pInfo, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR uint64_t VKAPI_CALL GetBufferOpaqueCaptureAddressKHR(
+    VkDevice                                    device,
+    const VkBufferDeviceAddressInfoKHR*         pInfo) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetBufferOpaqueCaptureAddressKHR(device, pInfo);
+        if (skip) return 0;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetBufferOpaqueCaptureAddressKHR(device, pInfo);
+    }
+    uint64_t result = DispatchGetBufferOpaqueCaptureAddressKHR(device, pInfo);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetBufferOpaqueCaptureAddressKHR(device, pInfo);
+    }
+    return result;
+}
+
+VKAPI_ATTR uint64_t VKAPI_CALL GetDeviceMemoryOpaqueCaptureAddressKHR(
+    VkDevice                                    device,
+    const VkDeviceMemoryOpaqueCaptureAddressInfoKHR* pInfo) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetDeviceMemoryOpaqueCaptureAddressKHR(device, pInfo);
+        if (skip) return 0;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetDeviceMemoryOpaqueCaptureAddressKHR(device, pInfo);
+    }
+    uint64_t result = DispatchGetDeviceMemoryOpaqueCaptureAddressKHR(device, pInfo);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetDeviceMemoryOpaqueCaptureAddressKHR(device, pInfo);
+    }
+    return result;
+}
+
+
+VKAPI_ATTR VkResult VKAPI_CALL GetPipelineExecutablePropertiesKHR(
+    VkDevice                                    device,
+    const VkPipelineInfoKHR*                    pPipelineInfo,
+    uint32_t*                                   pExecutableCount,
+    VkPipelineExecutablePropertiesKHR*          pProperties) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetPipelineExecutablePropertiesKHR(device, pPipelineInfo, pExecutableCount, pProperties);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetPipelineExecutablePropertiesKHR(device, pPipelineInfo, pExecutableCount, pProperties);
+    }
+    VkResult result = DispatchGetPipelineExecutablePropertiesKHR(device, pPipelineInfo, pExecutableCount, pProperties);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetPipelineExecutablePropertiesKHR(device, pPipelineInfo, pExecutableCount, pProperties, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL GetPipelineExecutableStatisticsKHR(
+    VkDevice                                    device,
+    const VkPipelineExecutableInfoKHR*          pExecutableInfo,
+    uint32_t*                                   pStatisticCount,
+    VkPipelineExecutableStatisticKHR*           pStatistics) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetPipelineExecutableStatisticsKHR(device, pExecutableInfo, pStatisticCount, pStatistics);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetPipelineExecutableStatisticsKHR(device, pExecutableInfo, pStatisticCount, pStatistics);
+    }
+    VkResult result = DispatchGetPipelineExecutableStatisticsKHR(device, pExecutableInfo, pStatisticCount, pStatistics);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetPipelineExecutableStatisticsKHR(device, pExecutableInfo, pStatisticCount, pStatistics, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL GetPipelineExecutableInternalRepresentationsKHR(
+    VkDevice                                    device,
+    const VkPipelineExecutableInfoKHR*          pExecutableInfo,
+    uint32_t*                                   pInternalRepresentationCount,
+    VkPipelineExecutableInternalRepresentationKHR* pInternalRepresentations) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetPipelineExecutableInternalRepresentationsKHR(device, pExecutableInfo, pInternalRepresentationCount, pInternalRepresentations);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetPipelineExecutableInternalRepresentationsKHR(device, pExecutableInfo, pInternalRepresentationCount, pInternalRepresentations);
+    }
+    VkResult result = DispatchGetPipelineExecutableInternalRepresentationsKHR(device, pExecutableInfo, pInternalRepresentationCount, pInternalRepresentations);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetPipelineExecutableInternalRepresentationsKHR(device, pExecutableInfo, pInternalRepresentationCount, pInternalRepresentations, result);
+    }
+    return result;
+}
+
+
+VKAPI_ATTR VkResult VKAPI_CALL CreateDebugReportCallbackEXT(
+    VkInstance                                  instance,
+    const VkDebugReportCallbackCreateInfoEXT*   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDebugReportCallbackEXT*                   pCallback) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(instance), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCreateDebugReportCallbackEXT(instance, pCreateInfo, pAllocator, pCallback);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCreateDebugReportCallbackEXT(instance, pCreateInfo, pAllocator, pCallback);
+    }
+    VkResult result = DispatchCreateDebugReportCallbackEXT(instance, pCreateInfo, pAllocator, pCallback);
+    layer_create_report_callback(layer_data->report_data, false, pCreateInfo, pAllocator, pCallback);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCreateDebugReportCallbackEXT(instance, pCreateInfo, pAllocator, pCallback, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR void VKAPI_CALL DestroyDebugReportCallbackEXT(
+    VkInstance                                  instance,
+    VkDebugReportCallbackEXT                    callback,
+    const VkAllocationCallbacks*                pAllocator) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(instance), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateDestroyDebugReportCallbackEXT(instance, callback, pAllocator);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordDestroyDebugReportCallbackEXT(instance, callback, pAllocator);
+    }
+    DispatchDestroyDebugReportCallbackEXT(instance, callback, pAllocator);
+    layer_destroy_callback(layer_data->report_data, callback, pAllocator);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordDestroyDebugReportCallbackEXT(instance, callback, pAllocator);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL DebugReportMessageEXT(
+    VkInstance                                  instance,
+    VkDebugReportFlagsEXT                       flags,
+    VkDebugReportObjectTypeEXT                  objectType,
+    uint64_t                                    object,
+    size_t                                      location,
+    int32_t                                     messageCode,
+    const char*                                 pLayerPrefix,
+    const char*                                 pMessage) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(instance), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateDebugReportMessageEXT(instance, flags, objectType, object, location, messageCode, pLayerPrefix, pMessage);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordDebugReportMessageEXT(instance, flags, objectType, object, location, messageCode, pLayerPrefix, pMessage);
+    }
+    DispatchDebugReportMessageEXT(instance, flags, objectType, object, location, messageCode, pLayerPrefix, pMessage);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordDebugReportMessageEXT(instance, flags, objectType, object, location, messageCode, pLayerPrefix, pMessage);
+    }
+}
+
+
+
+
+
+
+
+
+VKAPI_ATTR VkResult VKAPI_CALL DebugMarkerSetObjectTagEXT(
+    VkDevice                                    device,
+    const VkDebugMarkerObjectTagInfoEXT*        pTagInfo) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateDebugMarkerSetObjectTagEXT(device, pTagInfo);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordDebugMarkerSetObjectTagEXT(device, pTagInfo);
+    }
+    VkResult result = DispatchDebugMarkerSetObjectTagEXT(device, pTagInfo);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordDebugMarkerSetObjectTagEXT(device, pTagInfo, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL DebugMarkerSetObjectNameEXT(
+    VkDevice                                    device,
+    const VkDebugMarkerObjectNameInfoEXT*       pNameInfo) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateDebugMarkerSetObjectNameEXT(device, pNameInfo);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordDebugMarkerSetObjectNameEXT(device, pNameInfo);
+    }
+    layer_data->report_data->DebugReportSetMarkerObjectName(pNameInfo);
+    VkResult result = DispatchDebugMarkerSetObjectNameEXT(device, pNameInfo);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordDebugMarkerSetObjectNameEXT(device, pNameInfo, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR void VKAPI_CALL CmdDebugMarkerBeginEXT(
+    VkCommandBuffer                             commandBuffer,
+    const VkDebugMarkerMarkerInfoEXT*           pMarkerInfo) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCmdDebugMarkerBeginEXT(commandBuffer, pMarkerInfo);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCmdDebugMarkerBeginEXT(commandBuffer, pMarkerInfo);
+    }
+    DispatchCmdDebugMarkerBeginEXT(commandBuffer, pMarkerInfo);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCmdDebugMarkerBeginEXT(commandBuffer, pMarkerInfo);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL CmdDebugMarkerEndEXT(
+    VkCommandBuffer                             commandBuffer) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCmdDebugMarkerEndEXT(commandBuffer);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCmdDebugMarkerEndEXT(commandBuffer);
+    }
+    DispatchCmdDebugMarkerEndEXT(commandBuffer);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCmdDebugMarkerEndEXT(commandBuffer);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL CmdDebugMarkerInsertEXT(
+    VkCommandBuffer                             commandBuffer,
+    const VkDebugMarkerMarkerInfoEXT*           pMarkerInfo) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCmdDebugMarkerInsertEXT(commandBuffer, pMarkerInfo);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCmdDebugMarkerInsertEXT(commandBuffer, pMarkerInfo);
+    }
+    DispatchCmdDebugMarkerInsertEXT(commandBuffer, pMarkerInfo);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCmdDebugMarkerInsertEXT(commandBuffer, pMarkerInfo);
+    }
+}
+
+
+
+
+VKAPI_ATTR void VKAPI_CALL CmdBindTransformFeedbackBuffersEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstBinding,
+    uint32_t                                    bindingCount,
+    const VkBuffer*                             pBuffers,
+    const VkDeviceSize*                         pOffsets,
+    const VkDeviceSize*                         pSizes) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCmdBindTransformFeedbackBuffersEXT(commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets, pSizes);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCmdBindTransformFeedbackBuffersEXT(commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets, pSizes);
+    }
+    DispatchCmdBindTransformFeedbackBuffersEXT(commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets, pSizes);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCmdBindTransformFeedbackBuffersEXT(commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets, pSizes);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL CmdBeginTransformFeedbackEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstCounterBuffer,
+    uint32_t                                    counterBufferCount,
+    const VkBuffer*                             pCounterBuffers,
+    const VkDeviceSize*                         pCounterBufferOffsets) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCmdBeginTransformFeedbackEXT(commandBuffer, firstCounterBuffer, counterBufferCount, pCounterBuffers, pCounterBufferOffsets);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCmdBeginTransformFeedbackEXT(commandBuffer, firstCounterBuffer, counterBufferCount, pCounterBuffers, pCounterBufferOffsets);
+    }
+    DispatchCmdBeginTransformFeedbackEXT(commandBuffer, firstCounterBuffer, counterBufferCount, pCounterBuffers, pCounterBufferOffsets);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCmdBeginTransformFeedbackEXT(commandBuffer, firstCounterBuffer, counterBufferCount, pCounterBuffers, pCounterBufferOffsets);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL CmdEndTransformFeedbackEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstCounterBuffer,
+    uint32_t                                    counterBufferCount,
+    const VkBuffer*                             pCounterBuffers,
+    const VkDeviceSize*                         pCounterBufferOffsets) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCmdEndTransformFeedbackEXT(commandBuffer, firstCounterBuffer, counterBufferCount, pCounterBuffers, pCounterBufferOffsets);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCmdEndTransformFeedbackEXT(commandBuffer, firstCounterBuffer, counterBufferCount, pCounterBuffers, pCounterBufferOffsets);
+    }
+    DispatchCmdEndTransformFeedbackEXT(commandBuffer, firstCounterBuffer, counterBufferCount, pCounterBuffers, pCounterBufferOffsets);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCmdEndTransformFeedbackEXT(commandBuffer, firstCounterBuffer, counterBufferCount, pCounterBuffers, pCounterBufferOffsets);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL CmdBeginQueryIndexedEXT(
+    VkCommandBuffer                             commandBuffer,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    query,
+    VkQueryControlFlags                         flags,
+    uint32_t                                    index) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCmdBeginQueryIndexedEXT(commandBuffer, queryPool, query, flags, index);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCmdBeginQueryIndexedEXT(commandBuffer, queryPool, query, flags, index);
+    }
+    DispatchCmdBeginQueryIndexedEXT(commandBuffer, queryPool, query, flags, index);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCmdBeginQueryIndexedEXT(commandBuffer, queryPool, query, flags, index);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL CmdEndQueryIndexedEXT(
+    VkCommandBuffer                             commandBuffer,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    query,
+    uint32_t                                    index) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCmdEndQueryIndexedEXT(commandBuffer, queryPool, query, index);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCmdEndQueryIndexedEXT(commandBuffer, queryPool, query, index);
+    }
+    DispatchCmdEndQueryIndexedEXT(commandBuffer, queryPool, query, index);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCmdEndQueryIndexedEXT(commandBuffer, queryPool, query, index);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL CmdDrawIndirectByteCountEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    instanceCount,
+    uint32_t                                    firstInstance,
+    VkBuffer                                    counterBuffer,
+    VkDeviceSize                                counterBufferOffset,
+    uint32_t                                    counterOffset,
+    uint32_t                                    vertexStride) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCmdDrawIndirectByteCountEXT(commandBuffer, instanceCount, firstInstance, counterBuffer, counterBufferOffset, counterOffset, vertexStride);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCmdDrawIndirectByteCountEXT(commandBuffer, instanceCount, firstInstance, counterBuffer, counterBufferOffset, counterOffset, vertexStride);
+    }
+    DispatchCmdDrawIndirectByteCountEXT(commandBuffer, instanceCount, firstInstance, counterBuffer, counterBufferOffset, counterOffset, vertexStride);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCmdDrawIndirectByteCountEXT(commandBuffer, instanceCount, firstInstance, counterBuffer, counterBufferOffset, counterOffset, vertexStride);
+    }
+}
+
+
+VKAPI_ATTR uint32_t VKAPI_CALL GetImageViewHandleNVX(
+    VkDevice                                    device,
+    const VkImageViewHandleInfoNVX*             pInfo) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetImageViewHandleNVX(device, pInfo);
+        if (skip) return 0;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetImageViewHandleNVX(device, pInfo);
+    }
+    uint32_t result = DispatchGetImageViewHandleNVX(device, pInfo);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetImageViewHandleNVX(device, pInfo);
+    }
+    return result;
+}
+
+
+VKAPI_ATTR void VKAPI_CALL CmdDrawIndirectCountAMD(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkBuffer                                    countBuffer,
+    VkDeviceSize                                countBufferOffset,
+    uint32_t                                    maxDrawCount,
+    uint32_t                                    stride) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCmdDrawIndirectCountAMD(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCmdDrawIndirectCountAMD(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
+    }
+    DispatchCmdDrawIndirectCountAMD(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCmdDrawIndirectCountAMD(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL CmdDrawIndexedIndirectCountAMD(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkBuffer                                    countBuffer,
+    VkDeviceSize                                countBufferOffset,
+    uint32_t                                    maxDrawCount,
+    uint32_t                                    stride) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCmdDrawIndexedIndirectCountAMD(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCmdDrawIndexedIndirectCountAMD(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
+    }
+    DispatchCmdDrawIndexedIndirectCountAMD(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCmdDrawIndexedIndirectCountAMD(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
+    }
+}
+
+
+
+
+
+
+VKAPI_ATTR VkResult VKAPI_CALL GetShaderInfoAMD(
+    VkDevice                                    device,
+    VkPipeline                                  pipeline,
+    VkShaderStageFlagBits                       shaderStage,
+    VkShaderInfoTypeAMD                         infoType,
+    size_t*                                     pInfoSize,
+    void*                                       pInfo) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetShaderInfoAMD(device, pipeline, shaderStage, infoType, pInfoSize, pInfo);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetShaderInfoAMD(device, pipeline, shaderStage, infoType, pInfoSize, pInfo);
+    }
+    VkResult result = DispatchGetShaderInfoAMD(device, pipeline, shaderStage, infoType, pInfoSize, pInfo);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetShaderInfoAMD(device, pipeline, shaderStage, infoType, pInfoSize, pInfo, result);
+    }
+    return result;
+}
+
+
+#ifdef VK_USE_PLATFORM_GGP
+
+VKAPI_ATTR VkResult VKAPI_CALL CreateStreamDescriptorSurfaceGGP(
+    VkInstance                                  instance,
+    const VkStreamDescriptorSurfaceCreateInfoGGP* pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(instance), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCreateStreamDescriptorSurfaceGGP(instance, pCreateInfo, pAllocator, pSurface);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCreateStreamDescriptorSurfaceGGP(instance, pCreateInfo, pAllocator, pSurface);
+    }
+    VkResult result = DispatchCreateStreamDescriptorSurfaceGGP(instance, pCreateInfo, pAllocator, pSurface);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCreateStreamDescriptorSurfaceGGP(instance, pCreateInfo, pAllocator, pSurface, result);
+    }
+    return result;
+}
+#endif // VK_USE_PLATFORM_GGP
+
+
+
+
+VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceExternalImageFormatPropertiesNV(
+    VkPhysicalDevice                            physicalDevice,
+    VkFormat                                    format,
+    VkImageType                                 type,
+    VkImageTiling                               tiling,
+    VkImageUsageFlags                           usage,
+    VkImageCreateFlags                          flags,
+    VkExternalMemoryHandleTypeFlagsNV           externalHandleType,
+    VkExternalImageFormatPropertiesNV*          pExternalImageFormatProperties) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetPhysicalDeviceExternalImageFormatPropertiesNV(physicalDevice, format, type, tiling, usage, flags, externalHandleType, pExternalImageFormatProperties);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetPhysicalDeviceExternalImageFormatPropertiesNV(physicalDevice, format, type, tiling, usage, flags, externalHandleType, pExternalImageFormatProperties);
+    }
+    VkResult result = DispatchGetPhysicalDeviceExternalImageFormatPropertiesNV(physicalDevice, format, type, tiling, usage, flags, externalHandleType, pExternalImageFormatProperties);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetPhysicalDeviceExternalImageFormatPropertiesNV(physicalDevice, format, type, tiling, usage, flags, externalHandleType, pExternalImageFormatProperties, result);
+    }
+    return result;
+}
+
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+VKAPI_ATTR VkResult VKAPI_CALL GetMemoryWin32HandleNV(
+    VkDevice                                    device,
+    VkDeviceMemory                              memory,
+    VkExternalMemoryHandleTypeFlagsNV           handleType,
+    HANDLE*                                     pHandle) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetMemoryWin32HandleNV(device, memory, handleType, pHandle);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetMemoryWin32HandleNV(device, memory, handleType, pHandle);
+    }
+    VkResult result = DispatchGetMemoryWin32HandleNV(device, memory, handleType, pHandle);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetMemoryWin32HandleNV(device, memory, handleType, pHandle, result);
+    }
+    return result;
+}
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+
+#ifdef VK_USE_PLATFORM_VI_NN
+
+VKAPI_ATTR VkResult VKAPI_CALL CreateViSurfaceNN(
+    VkInstance                                  instance,
+    const VkViSurfaceCreateInfoNN*              pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(instance), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCreateViSurfaceNN(instance, pCreateInfo, pAllocator, pSurface);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCreateViSurfaceNN(instance, pCreateInfo, pAllocator, pSurface);
+    }
+    VkResult result = DispatchCreateViSurfaceNN(instance, pCreateInfo, pAllocator, pSurface);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCreateViSurfaceNN(instance, pCreateInfo, pAllocator, pSurface, result);
+    }
+    return result;
+}
+#endif // VK_USE_PLATFORM_VI_NN
+
+
+
+
+
+
+VKAPI_ATTR void VKAPI_CALL CmdBeginConditionalRenderingEXT(
+    VkCommandBuffer                             commandBuffer,
+    const VkConditionalRenderingBeginInfoEXT*   pConditionalRenderingBegin) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCmdBeginConditionalRenderingEXT(commandBuffer, pConditionalRenderingBegin);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCmdBeginConditionalRenderingEXT(commandBuffer, pConditionalRenderingBegin);
+    }
+    DispatchCmdBeginConditionalRenderingEXT(commandBuffer, pConditionalRenderingBegin);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCmdBeginConditionalRenderingEXT(commandBuffer, pConditionalRenderingBegin);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL CmdEndConditionalRenderingEXT(
+    VkCommandBuffer                             commandBuffer) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCmdEndConditionalRenderingEXT(commandBuffer);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCmdEndConditionalRenderingEXT(commandBuffer);
+    }
+    DispatchCmdEndConditionalRenderingEXT(commandBuffer);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCmdEndConditionalRenderingEXT(commandBuffer);
+    }
+}
+
+
+VKAPI_ATTR void VKAPI_CALL CmdProcessCommandsNVX(
+    VkCommandBuffer                             commandBuffer,
+    const VkCmdProcessCommandsInfoNVX*          pProcessCommandsInfo) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCmdProcessCommandsNVX(commandBuffer, pProcessCommandsInfo);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCmdProcessCommandsNVX(commandBuffer, pProcessCommandsInfo);
+    }
+    DispatchCmdProcessCommandsNVX(commandBuffer, pProcessCommandsInfo);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCmdProcessCommandsNVX(commandBuffer, pProcessCommandsInfo);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL CmdReserveSpaceForCommandsNVX(
+    VkCommandBuffer                             commandBuffer,
+    const VkCmdReserveSpaceForCommandsInfoNVX*  pReserveSpaceInfo) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCmdReserveSpaceForCommandsNVX(commandBuffer, pReserveSpaceInfo);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCmdReserveSpaceForCommandsNVX(commandBuffer, pReserveSpaceInfo);
+    }
+    DispatchCmdReserveSpaceForCommandsNVX(commandBuffer, pReserveSpaceInfo);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCmdReserveSpaceForCommandsNVX(commandBuffer, pReserveSpaceInfo);
+    }
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL CreateIndirectCommandsLayoutNVX(
+    VkDevice                                    device,
+    const VkIndirectCommandsLayoutCreateInfoNVX* pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkIndirectCommandsLayoutNVX*                pIndirectCommandsLayout) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCreateIndirectCommandsLayoutNVX(device, pCreateInfo, pAllocator, pIndirectCommandsLayout);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCreateIndirectCommandsLayoutNVX(device, pCreateInfo, pAllocator, pIndirectCommandsLayout);
+    }
+    VkResult result = DispatchCreateIndirectCommandsLayoutNVX(device, pCreateInfo, pAllocator, pIndirectCommandsLayout);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCreateIndirectCommandsLayoutNVX(device, pCreateInfo, pAllocator, pIndirectCommandsLayout, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR void VKAPI_CALL DestroyIndirectCommandsLayoutNVX(
+    VkDevice                                    device,
+    VkIndirectCommandsLayoutNVX                 indirectCommandsLayout,
+    const VkAllocationCallbacks*                pAllocator) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateDestroyIndirectCommandsLayoutNVX(device, indirectCommandsLayout, pAllocator);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordDestroyIndirectCommandsLayoutNVX(device, indirectCommandsLayout, pAllocator);
+    }
+    DispatchDestroyIndirectCommandsLayoutNVX(device, indirectCommandsLayout, pAllocator);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordDestroyIndirectCommandsLayoutNVX(device, indirectCommandsLayout, pAllocator);
+    }
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL CreateObjectTableNVX(
+    VkDevice                                    device,
+    const VkObjectTableCreateInfoNVX*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkObjectTableNVX*                           pObjectTable) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCreateObjectTableNVX(device, pCreateInfo, pAllocator, pObjectTable);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCreateObjectTableNVX(device, pCreateInfo, pAllocator, pObjectTable);
+    }
+    VkResult result = DispatchCreateObjectTableNVX(device, pCreateInfo, pAllocator, pObjectTable);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCreateObjectTableNVX(device, pCreateInfo, pAllocator, pObjectTable, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR void VKAPI_CALL DestroyObjectTableNVX(
+    VkDevice                                    device,
+    VkObjectTableNVX                            objectTable,
+    const VkAllocationCallbacks*                pAllocator) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateDestroyObjectTableNVX(device, objectTable, pAllocator);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordDestroyObjectTableNVX(device, objectTable, pAllocator);
+    }
+    DispatchDestroyObjectTableNVX(device, objectTable, pAllocator);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordDestroyObjectTableNVX(device, objectTable, pAllocator);
+    }
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL RegisterObjectsNVX(
+    VkDevice                                    device,
+    VkObjectTableNVX                            objectTable,
+    uint32_t                                    objectCount,
+    const VkObjectTableEntryNVX* const*         ppObjectTableEntries,
+    const uint32_t*                             pObjectIndices) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateRegisterObjectsNVX(device, objectTable, objectCount, ppObjectTableEntries, pObjectIndices);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordRegisterObjectsNVX(device, objectTable, objectCount, ppObjectTableEntries, pObjectIndices);
+    }
+    VkResult result = DispatchRegisterObjectsNVX(device, objectTable, objectCount, ppObjectTableEntries, pObjectIndices);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordRegisterObjectsNVX(device, objectTable, objectCount, ppObjectTableEntries, pObjectIndices, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL UnregisterObjectsNVX(
+    VkDevice                                    device,
+    VkObjectTableNVX                            objectTable,
+    uint32_t                                    objectCount,
+    const VkObjectEntryTypeNVX*                 pObjectEntryTypes,
+    const uint32_t*                             pObjectIndices) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateUnregisterObjectsNVX(device, objectTable, objectCount, pObjectEntryTypes, pObjectIndices);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordUnregisterObjectsNVX(device, objectTable, objectCount, pObjectEntryTypes, pObjectIndices);
+    }
+    VkResult result = DispatchUnregisterObjectsNVX(device, objectTable, objectCount, pObjectEntryTypes, pObjectIndices);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordUnregisterObjectsNVX(device, objectTable, objectCount, pObjectEntryTypes, pObjectIndices, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceGeneratedCommandsPropertiesNVX(
+    VkPhysicalDevice                            physicalDevice,
+    VkDeviceGeneratedCommandsFeaturesNVX*       pFeatures,
+    VkDeviceGeneratedCommandsLimitsNVX*         pLimits) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetPhysicalDeviceGeneratedCommandsPropertiesNVX(physicalDevice, pFeatures, pLimits);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetPhysicalDeviceGeneratedCommandsPropertiesNVX(physicalDevice, pFeatures, pLimits);
+    }
+    DispatchGetPhysicalDeviceGeneratedCommandsPropertiesNVX(physicalDevice, pFeatures, pLimits);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetPhysicalDeviceGeneratedCommandsPropertiesNVX(physicalDevice, pFeatures, pLimits);
+    }
+}
+
+
+VKAPI_ATTR void VKAPI_CALL CmdSetViewportWScalingNV(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstViewport,
+    uint32_t                                    viewportCount,
+    const VkViewportWScalingNV*                 pViewportWScalings) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCmdSetViewportWScalingNV(commandBuffer, firstViewport, viewportCount, pViewportWScalings);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCmdSetViewportWScalingNV(commandBuffer, firstViewport, viewportCount, pViewportWScalings);
+    }
+    DispatchCmdSetViewportWScalingNV(commandBuffer, firstViewport, viewportCount, pViewportWScalings);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCmdSetViewportWScalingNV(commandBuffer, firstViewport, viewportCount, pViewportWScalings);
+    }
+}
+
+
+VKAPI_ATTR VkResult VKAPI_CALL ReleaseDisplayEXT(
+    VkPhysicalDevice                            physicalDevice,
+    VkDisplayKHR                                display) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateReleaseDisplayEXT(physicalDevice, display);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordReleaseDisplayEXT(physicalDevice, display);
+    }
+    VkResult result = DispatchReleaseDisplayEXT(physicalDevice, display);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordReleaseDisplayEXT(physicalDevice, display, result);
+    }
+    return result;
+}
+
+#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
+
+VKAPI_ATTR VkResult VKAPI_CALL AcquireXlibDisplayEXT(
+    VkPhysicalDevice                            physicalDevice,
+    Display*                                    dpy,
+    VkDisplayKHR                                display) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateAcquireXlibDisplayEXT(physicalDevice, dpy, display);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordAcquireXlibDisplayEXT(physicalDevice, dpy, display);
+    }
+    VkResult result = DispatchAcquireXlibDisplayEXT(physicalDevice, dpy, display);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordAcquireXlibDisplayEXT(physicalDevice, dpy, display, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL GetRandROutputDisplayEXT(
+    VkPhysicalDevice                            physicalDevice,
+    Display*                                    dpy,
+    RROutput                                    rrOutput,
+    VkDisplayKHR*                               pDisplay) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetRandROutputDisplayEXT(physicalDevice, dpy, rrOutput, pDisplay);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetRandROutputDisplayEXT(physicalDevice, dpy, rrOutput, pDisplay);
+    }
+    VkResult result = DispatchGetRandROutputDisplayEXT(physicalDevice, dpy, rrOutput, pDisplay);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetRandROutputDisplayEXT(physicalDevice, dpy, rrOutput, pDisplay, result);
+    }
+    return result;
+}
+#endif // VK_USE_PLATFORM_XLIB_XRANDR_EXT
+
+
+VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceSurfaceCapabilities2EXT(
+    VkPhysicalDevice                            physicalDevice,
+    VkSurfaceKHR                                surface,
+    VkSurfaceCapabilities2EXT*                  pSurfaceCapabilities) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetPhysicalDeviceSurfaceCapabilities2EXT(physicalDevice, surface, pSurfaceCapabilities);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetPhysicalDeviceSurfaceCapabilities2EXT(physicalDevice, surface, pSurfaceCapabilities);
+    }
+    VkResult result = DispatchGetPhysicalDeviceSurfaceCapabilities2EXT(physicalDevice, surface, pSurfaceCapabilities);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetPhysicalDeviceSurfaceCapabilities2EXT(physicalDevice, surface, pSurfaceCapabilities, result);
+    }
+    return result;
+}
+
+
+VKAPI_ATTR VkResult VKAPI_CALL DisplayPowerControlEXT(
+    VkDevice                                    device,
+    VkDisplayKHR                                display,
+    const VkDisplayPowerInfoEXT*                pDisplayPowerInfo) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateDisplayPowerControlEXT(device, display, pDisplayPowerInfo);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordDisplayPowerControlEXT(device, display, pDisplayPowerInfo);
+    }
+    VkResult result = DispatchDisplayPowerControlEXT(device, display, pDisplayPowerInfo);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordDisplayPowerControlEXT(device, display, pDisplayPowerInfo, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL RegisterDeviceEventEXT(
+    VkDevice                                    device,
+    const VkDeviceEventInfoEXT*                 pDeviceEventInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkFence*                                    pFence) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateRegisterDeviceEventEXT(device, pDeviceEventInfo, pAllocator, pFence);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordRegisterDeviceEventEXT(device, pDeviceEventInfo, pAllocator, pFence);
+    }
+    VkResult result = DispatchRegisterDeviceEventEXT(device, pDeviceEventInfo, pAllocator, pFence);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordRegisterDeviceEventEXT(device, pDeviceEventInfo, pAllocator, pFence, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL RegisterDisplayEventEXT(
+    VkDevice                                    device,
+    VkDisplayKHR                                display,
+    const VkDisplayEventInfoEXT*                pDisplayEventInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkFence*                                    pFence) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateRegisterDisplayEventEXT(device, display, pDisplayEventInfo, pAllocator, pFence);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordRegisterDisplayEventEXT(device, display, pDisplayEventInfo, pAllocator, pFence);
+    }
+    VkResult result = DispatchRegisterDisplayEventEXT(device, display, pDisplayEventInfo, pAllocator, pFence);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordRegisterDisplayEventEXT(device, display, pDisplayEventInfo, pAllocator, pFence, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL GetSwapchainCounterEXT(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    VkSurfaceCounterFlagBitsEXT                 counter,
+    uint64_t*                                   pCounterValue) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetSwapchainCounterEXT(device, swapchain, counter, pCounterValue);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetSwapchainCounterEXT(device, swapchain, counter, pCounterValue);
+    }
+    VkResult result = DispatchGetSwapchainCounterEXT(device, swapchain, counter, pCounterValue);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetSwapchainCounterEXT(device, swapchain, counter, pCounterValue, result);
+    }
+    return result;
+}
+
+
+VKAPI_ATTR VkResult VKAPI_CALL GetRefreshCycleDurationGOOGLE(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    VkRefreshCycleDurationGOOGLE*               pDisplayTimingProperties) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetRefreshCycleDurationGOOGLE(device, swapchain, pDisplayTimingProperties);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetRefreshCycleDurationGOOGLE(device, swapchain, pDisplayTimingProperties);
+    }
+    VkResult result = DispatchGetRefreshCycleDurationGOOGLE(device, swapchain, pDisplayTimingProperties);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetRefreshCycleDurationGOOGLE(device, swapchain, pDisplayTimingProperties, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL GetPastPresentationTimingGOOGLE(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    uint32_t*                                   pPresentationTimingCount,
+    VkPastPresentationTimingGOOGLE*             pPresentationTimings) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetPastPresentationTimingGOOGLE(device, swapchain, pPresentationTimingCount, pPresentationTimings);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetPastPresentationTimingGOOGLE(device, swapchain, pPresentationTimingCount, pPresentationTimings);
+    }
+    VkResult result = DispatchGetPastPresentationTimingGOOGLE(device, swapchain, pPresentationTimingCount, pPresentationTimings);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetPastPresentationTimingGOOGLE(device, swapchain, pPresentationTimingCount, pPresentationTimings, result);
+    }
+    return result;
+}
+
+
+
+
+
+
+
+VKAPI_ATTR void VKAPI_CALL CmdSetDiscardRectangleEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstDiscardRectangle,
+    uint32_t                                    discardRectangleCount,
+    const VkRect2D*                             pDiscardRectangles) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCmdSetDiscardRectangleEXT(commandBuffer, firstDiscardRectangle, discardRectangleCount, pDiscardRectangles);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCmdSetDiscardRectangleEXT(commandBuffer, firstDiscardRectangle, discardRectangleCount, pDiscardRectangles);
+    }
+    DispatchCmdSetDiscardRectangleEXT(commandBuffer, firstDiscardRectangle, discardRectangleCount, pDiscardRectangles);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCmdSetDiscardRectangleEXT(commandBuffer, firstDiscardRectangle, discardRectangleCount, pDiscardRectangles);
+    }
+}
+
+
+
+
+
+VKAPI_ATTR void VKAPI_CALL SetHdrMetadataEXT(
+    VkDevice                                    device,
+    uint32_t                                    swapchainCount,
+    const VkSwapchainKHR*                       pSwapchains,
+    const VkHdrMetadataEXT*                     pMetadata) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateSetHdrMetadataEXT(device, swapchainCount, pSwapchains, pMetadata);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordSetHdrMetadataEXT(device, swapchainCount, pSwapchains, pMetadata);
+    }
+    DispatchSetHdrMetadataEXT(device, swapchainCount, pSwapchains, pMetadata);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordSetHdrMetadataEXT(device, swapchainCount, pSwapchains, pMetadata);
+    }
+}
+
+#ifdef VK_USE_PLATFORM_IOS_MVK
+
+VKAPI_ATTR VkResult VKAPI_CALL CreateIOSSurfaceMVK(
+    VkInstance                                  instance,
+    const VkIOSSurfaceCreateInfoMVK*            pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(instance), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCreateIOSSurfaceMVK(instance, pCreateInfo, pAllocator, pSurface);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCreateIOSSurfaceMVK(instance, pCreateInfo, pAllocator, pSurface);
+    }
+    VkResult result = DispatchCreateIOSSurfaceMVK(instance, pCreateInfo, pAllocator, pSurface);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCreateIOSSurfaceMVK(instance, pCreateInfo, pAllocator, pSurface, result);
+    }
+    return result;
+}
+#endif // VK_USE_PLATFORM_IOS_MVK
+
+#ifdef VK_USE_PLATFORM_MACOS_MVK
+
+VKAPI_ATTR VkResult VKAPI_CALL CreateMacOSSurfaceMVK(
+    VkInstance                                  instance,
+    const VkMacOSSurfaceCreateInfoMVK*          pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(instance), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCreateMacOSSurfaceMVK(instance, pCreateInfo, pAllocator, pSurface);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCreateMacOSSurfaceMVK(instance, pCreateInfo, pAllocator, pSurface);
+    }
+    VkResult result = DispatchCreateMacOSSurfaceMVK(instance, pCreateInfo, pAllocator, pSurface);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCreateMacOSSurfaceMVK(instance, pCreateInfo, pAllocator, pSurface, result);
+    }
+    return result;
+}
+#endif // VK_USE_PLATFORM_MACOS_MVK
+
+
+
+
+VKAPI_ATTR VkResult VKAPI_CALL SetDebugUtilsObjectNameEXT(
+    VkDevice                                    device,
+    const VkDebugUtilsObjectNameInfoEXT*        pNameInfo) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateSetDebugUtilsObjectNameEXT(device, pNameInfo);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordSetDebugUtilsObjectNameEXT(device, pNameInfo);
+    }
+    layer_data->report_data->DebugReportSetUtilsObjectName(pNameInfo);
+    VkResult result = DispatchSetDebugUtilsObjectNameEXT(device, pNameInfo);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordSetDebugUtilsObjectNameEXT(device, pNameInfo, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL SetDebugUtilsObjectTagEXT(
+    VkDevice                                    device,
+    const VkDebugUtilsObjectTagInfoEXT*         pTagInfo) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateSetDebugUtilsObjectTagEXT(device, pTagInfo);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordSetDebugUtilsObjectTagEXT(device, pTagInfo);
+    }
+    VkResult result = DispatchSetDebugUtilsObjectTagEXT(device, pTagInfo);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordSetDebugUtilsObjectTagEXT(device, pTagInfo, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR void VKAPI_CALL QueueBeginDebugUtilsLabelEXT(
+    VkQueue                                     queue,
+    const VkDebugUtilsLabelEXT*                 pLabelInfo) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(queue), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateQueueBeginDebugUtilsLabelEXT(queue, pLabelInfo);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordQueueBeginDebugUtilsLabelEXT(queue, pLabelInfo);
+    }
+    BeginQueueDebugUtilsLabel(layer_data->report_data, queue, pLabelInfo);
+    DispatchQueueBeginDebugUtilsLabelEXT(queue, pLabelInfo);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordQueueBeginDebugUtilsLabelEXT(queue, pLabelInfo);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL QueueEndDebugUtilsLabelEXT(
+    VkQueue                                     queue) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(queue), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateQueueEndDebugUtilsLabelEXT(queue);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordQueueEndDebugUtilsLabelEXT(queue);
+    }
+    DispatchQueueEndDebugUtilsLabelEXT(queue);
+    EndQueueDebugUtilsLabel(layer_data->report_data, queue);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordQueueEndDebugUtilsLabelEXT(queue);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL QueueInsertDebugUtilsLabelEXT(
+    VkQueue                                     queue,
+    const VkDebugUtilsLabelEXT*                 pLabelInfo) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(queue), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateQueueInsertDebugUtilsLabelEXT(queue, pLabelInfo);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordQueueInsertDebugUtilsLabelEXT(queue, pLabelInfo);
+    }
+    InsertQueueDebugUtilsLabel(layer_data->report_data, queue, pLabelInfo);
+    DispatchQueueInsertDebugUtilsLabelEXT(queue, pLabelInfo);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordQueueInsertDebugUtilsLabelEXT(queue, pLabelInfo);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL CmdBeginDebugUtilsLabelEXT(
+    VkCommandBuffer                             commandBuffer,
+    const VkDebugUtilsLabelEXT*                 pLabelInfo) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCmdBeginDebugUtilsLabelEXT(commandBuffer, pLabelInfo);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCmdBeginDebugUtilsLabelEXT(commandBuffer, pLabelInfo);
+    }
+    DispatchCmdBeginDebugUtilsLabelEXT(commandBuffer, pLabelInfo);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCmdBeginDebugUtilsLabelEXT(commandBuffer, pLabelInfo);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL CmdEndDebugUtilsLabelEXT(
+    VkCommandBuffer                             commandBuffer) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCmdEndDebugUtilsLabelEXT(commandBuffer);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCmdEndDebugUtilsLabelEXT(commandBuffer);
+    }
+    DispatchCmdEndDebugUtilsLabelEXT(commandBuffer);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCmdEndDebugUtilsLabelEXT(commandBuffer);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL CmdInsertDebugUtilsLabelEXT(
+    VkCommandBuffer                             commandBuffer,
+    const VkDebugUtilsLabelEXT*                 pLabelInfo) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCmdInsertDebugUtilsLabelEXT(commandBuffer, pLabelInfo);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCmdInsertDebugUtilsLabelEXT(commandBuffer, pLabelInfo);
+    }
+    DispatchCmdInsertDebugUtilsLabelEXT(commandBuffer, pLabelInfo);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCmdInsertDebugUtilsLabelEXT(commandBuffer, pLabelInfo);
+    }
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL CreateDebugUtilsMessengerEXT(
+    VkInstance                                  instance,
+    const VkDebugUtilsMessengerCreateInfoEXT*   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDebugUtilsMessengerEXT*                   pMessenger) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(instance), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCreateDebugUtilsMessengerEXT(instance, pCreateInfo, pAllocator, pMessenger);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCreateDebugUtilsMessengerEXT(instance, pCreateInfo, pAllocator, pMessenger);
+    }
+    VkResult result = DispatchCreateDebugUtilsMessengerEXT(instance, pCreateInfo, pAllocator, pMessenger);
+    layer_create_messenger_callback(layer_data->report_data, false, pCreateInfo, pAllocator, pMessenger);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCreateDebugUtilsMessengerEXT(instance, pCreateInfo, pAllocator, pMessenger, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR void VKAPI_CALL DestroyDebugUtilsMessengerEXT(
+    VkInstance                                  instance,
+    VkDebugUtilsMessengerEXT                    messenger,
+    const VkAllocationCallbacks*                pAllocator) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(instance), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateDestroyDebugUtilsMessengerEXT(instance, messenger, pAllocator);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordDestroyDebugUtilsMessengerEXT(instance, messenger, pAllocator);
+    }
+    DispatchDestroyDebugUtilsMessengerEXT(instance, messenger, pAllocator);
+    layer_destroy_callback(layer_data->report_data, messenger, pAllocator);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordDestroyDebugUtilsMessengerEXT(instance, messenger, pAllocator);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL SubmitDebugUtilsMessageEXT(
+    VkInstance                                  instance,
+    VkDebugUtilsMessageSeverityFlagBitsEXT      messageSeverity,
+    VkDebugUtilsMessageTypeFlagsEXT             messageTypes,
+    const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(instance), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateSubmitDebugUtilsMessageEXT(instance, messageSeverity, messageTypes, pCallbackData);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordSubmitDebugUtilsMessageEXT(instance, messageSeverity, messageTypes, pCallbackData);
+    }
+    DispatchSubmitDebugUtilsMessageEXT(instance, messageSeverity, messageTypes, pCallbackData);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordSubmitDebugUtilsMessageEXT(instance, messageSeverity, messageTypes, pCallbackData);
+    }
+}
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+
+VKAPI_ATTR VkResult VKAPI_CALL GetAndroidHardwareBufferPropertiesANDROID(
+    VkDevice                                    device,
+    const struct AHardwareBuffer*               buffer,
+    VkAndroidHardwareBufferPropertiesANDROID*   pProperties) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetAndroidHardwareBufferPropertiesANDROID(device, buffer, pProperties);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetAndroidHardwareBufferPropertiesANDROID(device, buffer, pProperties);
+    }
+    VkResult result = DispatchGetAndroidHardwareBufferPropertiesANDROID(device, buffer, pProperties);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetAndroidHardwareBufferPropertiesANDROID(device, buffer, pProperties, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL GetMemoryAndroidHardwareBufferANDROID(
+    VkDevice                                    device,
+    const VkMemoryGetAndroidHardwareBufferInfoANDROID* pInfo,
+    struct AHardwareBuffer**                    pBuffer) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetMemoryAndroidHardwareBufferANDROID(device, pInfo, pBuffer);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetMemoryAndroidHardwareBufferANDROID(device, pInfo, pBuffer);
+    }
+    VkResult result = DispatchGetMemoryAndroidHardwareBufferANDROID(device, pInfo, pBuffer);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetMemoryAndroidHardwareBufferANDROID(device, pInfo, pBuffer, result);
+    }
+    return result;
+}
+#endif // VK_USE_PLATFORM_ANDROID_KHR
+
+
+
+
+
+
+
+
+VKAPI_ATTR void VKAPI_CALL CmdSetSampleLocationsEXT(
+    VkCommandBuffer                             commandBuffer,
+    const VkSampleLocationsInfoEXT*             pSampleLocationsInfo) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCmdSetSampleLocationsEXT(commandBuffer, pSampleLocationsInfo);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCmdSetSampleLocationsEXT(commandBuffer, pSampleLocationsInfo);
+    }
+    DispatchCmdSetSampleLocationsEXT(commandBuffer, pSampleLocationsInfo);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCmdSetSampleLocationsEXT(commandBuffer, pSampleLocationsInfo);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceMultisamplePropertiesEXT(
+    VkPhysicalDevice                            physicalDevice,
+    VkSampleCountFlagBits                       samples,
+    VkMultisamplePropertiesEXT*                 pMultisampleProperties) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetPhysicalDeviceMultisamplePropertiesEXT(physicalDevice, samples, pMultisampleProperties);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetPhysicalDeviceMultisamplePropertiesEXT(physicalDevice, samples, pMultisampleProperties);
+    }
+    DispatchGetPhysicalDeviceMultisamplePropertiesEXT(physicalDevice, samples, pMultisampleProperties);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetPhysicalDeviceMultisamplePropertiesEXT(physicalDevice, samples, pMultisampleProperties);
+    }
+}
+
+
+
+
+
+
+
+
+VKAPI_ATTR VkResult VKAPI_CALL GetImageDrmFormatModifierPropertiesEXT(
+    VkDevice                                    device,
+    VkImage                                     image,
+    VkImageDrmFormatModifierPropertiesEXT*      pProperties) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetImageDrmFormatModifierPropertiesEXT(device, image, pProperties);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetImageDrmFormatModifierPropertiesEXT(device, image, pProperties);
+    }
+    VkResult result = DispatchGetImageDrmFormatModifierPropertiesEXT(device, image, pProperties);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetImageDrmFormatModifierPropertiesEXT(device, image, pProperties, result);
+    }
+    return result;
+}
+
+
+
+
+
+VKAPI_ATTR void VKAPI_CALL CmdBindShadingRateImageNV(
+    VkCommandBuffer                             commandBuffer,
+    VkImageView                                 imageView,
+    VkImageLayout                               imageLayout) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCmdBindShadingRateImageNV(commandBuffer, imageView, imageLayout);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCmdBindShadingRateImageNV(commandBuffer, imageView, imageLayout);
+    }
+    DispatchCmdBindShadingRateImageNV(commandBuffer, imageView, imageLayout);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCmdBindShadingRateImageNV(commandBuffer, imageView, imageLayout);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL CmdSetViewportShadingRatePaletteNV(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstViewport,
+    uint32_t                                    viewportCount,
+    const VkShadingRatePaletteNV*               pShadingRatePalettes) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCmdSetViewportShadingRatePaletteNV(commandBuffer, firstViewport, viewportCount, pShadingRatePalettes);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCmdSetViewportShadingRatePaletteNV(commandBuffer, firstViewport, viewportCount, pShadingRatePalettes);
+    }
+    DispatchCmdSetViewportShadingRatePaletteNV(commandBuffer, firstViewport, viewportCount, pShadingRatePalettes);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCmdSetViewportShadingRatePaletteNV(commandBuffer, firstViewport, viewportCount, pShadingRatePalettes);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL CmdSetCoarseSampleOrderNV(
+    VkCommandBuffer                             commandBuffer,
+    VkCoarseSampleOrderTypeNV                   sampleOrderType,
+    uint32_t                                    customSampleOrderCount,
+    const VkCoarseSampleOrderCustomNV*          pCustomSampleOrders) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCmdSetCoarseSampleOrderNV(commandBuffer, sampleOrderType, customSampleOrderCount, pCustomSampleOrders);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCmdSetCoarseSampleOrderNV(commandBuffer, sampleOrderType, customSampleOrderCount, pCustomSampleOrders);
+    }
+    DispatchCmdSetCoarseSampleOrderNV(commandBuffer, sampleOrderType, customSampleOrderCount, pCustomSampleOrders);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCmdSetCoarseSampleOrderNV(commandBuffer, sampleOrderType, customSampleOrderCount, pCustomSampleOrders);
+    }
+}
+
+
+VKAPI_ATTR VkResult VKAPI_CALL CreateAccelerationStructureNV(
+    VkDevice                                    device,
+    const VkAccelerationStructureCreateInfoNV*  pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkAccelerationStructureNV*                  pAccelerationStructure) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCreateAccelerationStructureNV(device, pCreateInfo, pAllocator, pAccelerationStructure);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCreateAccelerationStructureNV(device, pCreateInfo, pAllocator, pAccelerationStructure);
+    }
+    VkResult result = DispatchCreateAccelerationStructureNV(device, pCreateInfo, pAllocator, pAccelerationStructure);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCreateAccelerationStructureNV(device, pCreateInfo, pAllocator, pAccelerationStructure, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR void VKAPI_CALL DestroyAccelerationStructureNV(
+    VkDevice                                    device,
+    VkAccelerationStructureNV                   accelerationStructure,
+    const VkAllocationCallbacks*                pAllocator) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateDestroyAccelerationStructureNV(device, accelerationStructure, pAllocator);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordDestroyAccelerationStructureNV(device, accelerationStructure, pAllocator);
+    }
+    DispatchDestroyAccelerationStructureNV(device, accelerationStructure, pAllocator);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordDestroyAccelerationStructureNV(device, accelerationStructure, pAllocator);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL GetAccelerationStructureMemoryRequirementsNV(
+    VkDevice                                    device,
+    const VkAccelerationStructureMemoryRequirementsInfoNV* pInfo,
+    VkMemoryRequirements2KHR*                   pMemoryRequirements) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetAccelerationStructureMemoryRequirementsNV(device, pInfo, pMemoryRequirements);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetAccelerationStructureMemoryRequirementsNV(device, pInfo, pMemoryRequirements);
+    }
+    DispatchGetAccelerationStructureMemoryRequirementsNV(device, pInfo, pMemoryRequirements);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetAccelerationStructureMemoryRequirementsNV(device, pInfo, pMemoryRequirements);
+    }
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL BindAccelerationStructureMemoryNV(
+    VkDevice                                    device,
+    uint32_t                                    bindInfoCount,
+    const VkBindAccelerationStructureMemoryInfoNV* pBindInfos) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateBindAccelerationStructureMemoryNV(device, bindInfoCount, pBindInfos);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordBindAccelerationStructureMemoryNV(device, bindInfoCount, pBindInfos);
+    }
+    VkResult result = DispatchBindAccelerationStructureMemoryNV(device, bindInfoCount, pBindInfos);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordBindAccelerationStructureMemoryNV(device, bindInfoCount, pBindInfos, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR void VKAPI_CALL CmdBuildAccelerationStructureNV(
+    VkCommandBuffer                             commandBuffer,
+    const VkAccelerationStructureInfoNV*        pInfo,
+    VkBuffer                                    instanceData,
+    VkDeviceSize                                instanceOffset,
+    VkBool32                                    update,
+    VkAccelerationStructureNV                   dst,
+    VkAccelerationStructureNV                   src,
+    VkBuffer                                    scratch,
+    VkDeviceSize                                scratchOffset) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCmdBuildAccelerationStructureNV(commandBuffer, pInfo, instanceData, instanceOffset, update, dst, src, scratch, scratchOffset);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCmdBuildAccelerationStructureNV(commandBuffer, pInfo, instanceData, instanceOffset, update, dst, src, scratch, scratchOffset);
+    }
+    DispatchCmdBuildAccelerationStructureNV(commandBuffer, pInfo, instanceData, instanceOffset, update, dst, src, scratch, scratchOffset);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCmdBuildAccelerationStructureNV(commandBuffer, pInfo, instanceData, instanceOffset, update, dst, src, scratch, scratchOffset);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL CmdCopyAccelerationStructureNV(
+    VkCommandBuffer                             commandBuffer,
+    VkAccelerationStructureNV                   dst,
+    VkAccelerationStructureNV                   src,
+    VkCopyAccelerationStructureModeNV           mode) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCmdCopyAccelerationStructureNV(commandBuffer, dst, src, mode);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCmdCopyAccelerationStructureNV(commandBuffer, dst, src, mode);
+    }
+    DispatchCmdCopyAccelerationStructureNV(commandBuffer, dst, src, mode);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCmdCopyAccelerationStructureNV(commandBuffer, dst, src, mode);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL CmdTraceRaysNV(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    raygenShaderBindingTableBuffer,
+    VkDeviceSize                                raygenShaderBindingOffset,
+    VkBuffer                                    missShaderBindingTableBuffer,
+    VkDeviceSize                                missShaderBindingOffset,
+    VkDeviceSize                                missShaderBindingStride,
+    VkBuffer                                    hitShaderBindingTableBuffer,
+    VkDeviceSize                                hitShaderBindingOffset,
+    VkDeviceSize                                hitShaderBindingStride,
+    VkBuffer                                    callableShaderBindingTableBuffer,
+    VkDeviceSize                                callableShaderBindingOffset,
+    VkDeviceSize                                callableShaderBindingStride,
+    uint32_t                                    width,
+    uint32_t                                    height,
+    uint32_t                                    depth) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCmdTraceRaysNV(commandBuffer, raygenShaderBindingTableBuffer, raygenShaderBindingOffset, missShaderBindingTableBuffer, missShaderBindingOffset, missShaderBindingStride, hitShaderBindingTableBuffer, hitShaderBindingOffset, hitShaderBindingStride, callableShaderBindingTableBuffer, callableShaderBindingOffset, callableShaderBindingStride, width, height, depth);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCmdTraceRaysNV(commandBuffer, raygenShaderBindingTableBuffer, raygenShaderBindingOffset, missShaderBindingTableBuffer, missShaderBindingOffset, missShaderBindingStride, hitShaderBindingTableBuffer, hitShaderBindingOffset, hitShaderBindingStride, callableShaderBindingTableBuffer, callableShaderBindingOffset, callableShaderBindingStride, width, height, depth);
+    }
+    DispatchCmdTraceRaysNV(commandBuffer, raygenShaderBindingTableBuffer, raygenShaderBindingOffset, missShaderBindingTableBuffer, missShaderBindingOffset, missShaderBindingStride, hitShaderBindingTableBuffer, hitShaderBindingOffset, hitShaderBindingStride, callableShaderBindingTableBuffer, callableShaderBindingOffset, callableShaderBindingStride, width, height, depth);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCmdTraceRaysNV(commandBuffer, raygenShaderBindingTableBuffer, raygenShaderBindingOffset, missShaderBindingTableBuffer, missShaderBindingOffset, missShaderBindingStride, hitShaderBindingTableBuffer, hitShaderBindingOffset, hitShaderBindingStride, callableShaderBindingTableBuffer, callableShaderBindingOffset, callableShaderBindingStride, width, height, depth);
+    }
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL GetRayTracingShaderGroupHandlesNV(
+    VkDevice                                    device,
+    VkPipeline                                  pipeline,
+    uint32_t                                    firstGroup,
+    uint32_t                                    groupCount,
+    size_t                                      dataSize,
+    void*                                       pData) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetRayTracingShaderGroupHandlesNV(device, pipeline, firstGroup, groupCount, dataSize, pData);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetRayTracingShaderGroupHandlesNV(device, pipeline, firstGroup, groupCount, dataSize, pData);
+    }
+    VkResult result = DispatchGetRayTracingShaderGroupHandlesNV(device, pipeline, firstGroup, groupCount, dataSize, pData);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetRayTracingShaderGroupHandlesNV(device, pipeline, firstGroup, groupCount, dataSize, pData, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL GetAccelerationStructureHandleNV(
+    VkDevice                                    device,
+    VkAccelerationStructureNV                   accelerationStructure,
+    size_t                                      dataSize,
+    void*                                       pData) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetAccelerationStructureHandleNV(device, accelerationStructure, dataSize, pData);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetAccelerationStructureHandleNV(device, accelerationStructure, dataSize, pData);
+    }
+    VkResult result = DispatchGetAccelerationStructureHandleNV(device, accelerationStructure, dataSize, pData);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetAccelerationStructureHandleNV(device, accelerationStructure, dataSize, pData, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR void VKAPI_CALL CmdWriteAccelerationStructuresPropertiesNV(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    accelerationStructureCount,
+    const VkAccelerationStructureNV*            pAccelerationStructures,
+    VkQueryType                                 queryType,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    firstQuery) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCmdWriteAccelerationStructuresPropertiesNV(commandBuffer, accelerationStructureCount, pAccelerationStructures, queryType, queryPool, firstQuery);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCmdWriteAccelerationStructuresPropertiesNV(commandBuffer, accelerationStructureCount, pAccelerationStructures, queryType, queryPool, firstQuery);
+    }
+    DispatchCmdWriteAccelerationStructuresPropertiesNV(commandBuffer, accelerationStructureCount, pAccelerationStructures, queryType, queryPool, firstQuery);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCmdWriteAccelerationStructuresPropertiesNV(commandBuffer, accelerationStructureCount, pAccelerationStructures, queryType, queryPool, firstQuery);
+    }
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL CompileDeferredNV(
+    VkDevice                                    device,
+    VkPipeline                                  pipeline,
+    uint32_t                                    shader) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCompileDeferredNV(device, pipeline, shader);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCompileDeferredNV(device, pipeline, shader);
+    }
+    VkResult result = DispatchCompileDeferredNV(device, pipeline, shader);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCompileDeferredNV(device, pipeline, shader, result);
+    }
+    return result;
+}
+
+
+
+
+
+VKAPI_ATTR VkResult VKAPI_CALL GetMemoryHostPointerPropertiesEXT(
+    VkDevice                                    device,
+    VkExternalMemoryHandleTypeFlagBits          handleType,
+    const void*                                 pHostPointer,
+    VkMemoryHostPointerPropertiesEXT*           pMemoryHostPointerProperties) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetMemoryHostPointerPropertiesEXT(device, handleType, pHostPointer, pMemoryHostPointerProperties);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetMemoryHostPointerPropertiesEXT(device, handleType, pHostPointer, pMemoryHostPointerProperties);
+    }
+    VkResult result = DispatchGetMemoryHostPointerPropertiesEXT(device, handleType, pHostPointer, pMemoryHostPointerProperties);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetMemoryHostPointerPropertiesEXT(device, handleType, pHostPointer, pMemoryHostPointerProperties, result);
+    }
+    return result;
+}
+
+
+VKAPI_ATTR void VKAPI_CALL CmdWriteBufferMarkerAMD(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineStageFlagBits                     pipelineStage,
+    VkBuffer                                    dstBuffer,
+    VkDeviceSize                                dstOffset,
+    uint32_t                                    marker) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCmdWriteBufferMarkerAMD(commandBuffer, pipelineStage, dstBuffer, dstOffset, marker);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCmdWriteBufferMarkerAMD(commandBuffer, pipelineStage, dstBuffer, dstOffset, marker);
+    }
+    DispatchCmdWriteBufferMarkerAMD(commandBuffer, pipelineStage, dstBuffer, dstOffset, marker);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCmdWriteBufferMarkerAMD(commandBuffer, pipelineStage, dstBuffer, dstOffset, marker);
+    }
+}
+
+
+
+VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceCalibrateableTimeDomainsEXT(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pTimeDomainCount,
+    VkTimeDomainEXT*                            pTimeDomains) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetPhysicalDeviceCalibrateableTimeDomainsEXT(physicalDevice, pTimeDomainCount, pTimeDomains);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetPhysicalDeviceCalibrateableTimeDomainsEXT(physicalDevice, pTimeDomainCount, pTimeDomains);
+    }
+    VkResult result = DispatchGetPhysicalDeviceCalibrateableTimeDomainsEXT(physicalDevice, pTimeDomainCount, pTimeDomains);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetPhysicalDeviceCalibrateableTimeDomainsEXT(physicalDevice, pTimeDomainCount, pTimeDomains, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL GetCalibratedTimestampsEXT(
+    VkDevice                                    device,
+    uint32_t                                    timestampCount,
+    const VkCalibratedTimestampInfoEXT*         pTimestampInfos,
+    uint64_t*                                   pTimestamps,
+    uint64_t*                                   pMaxDeviation) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetCalibratedTimestampsEXT(device, timestampCount, pTimestampInfos, pTimestamps, pMaxDeviation);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetCalibratedTimestampsEXT(device, timestampCount, pTimestampInfos, pTimestamps, pMaxDeviation);
+    }
+    VkResult result = DispatchGetCalibratedTimestampsEXT(device, timestampCount, pTimestampInfos, pTimestamps, pMaxDeviation);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetCalibratedTimestampsEXT(device, timestampCount, pTimestampInfos, pTimestamps, pMaxDeviation, result);
+    }
+    return result;
+}
+
+
+
+
+#ifdef VK_USE_PLATFORM_GGP
+#endif // VK_USE_PLATFORM_GGP
+
+
+
+
+
+VKAPI_ATTR void VKAPI_CALL CmdDrawMeshTasksNV(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    taskCount,
+    uint32_t                                    firstTask) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCmdDrawMeshTasksNV(commandBuffer, taskCount, firstTask);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCmdDrawMeshTasksNV(commandBuffer, taskCount, firstTask);
+    }
+    DispatchCmdDrawMeshTasksNV(commandBuffer, taskCount, firstTask);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCmdDrawMeshTasksNV(commandBuffer, taskCount, firstTask);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL CmdDrawMeshTasksIndirectNV(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    uint32_t                                    drawCount,
+    uint32_t                                    stride) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCmdDrawMeshTasksIndirectNV(commandBuffer, buffer, offset, drawCount, stride);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCmdDrawMeshTasksIndirectNV(commandBuffer, buffer, offset, drawCount, stride);
+    }
+    DispatchCmdDrawMeshTasksIndirectNV(commandBuffer, buffer, offset, drawCount, stride);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCmdDrawMeshTasksIndirectNV(commandBuffer, buffer, offset, drawCount, stride);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL CmdDrawMeshTasksIndirectCountNV(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkBuffer                                    countBuffer,
+    VkDeviceSize                                countBufferOffset,
+    uint32_t                                    maxDrawCount,
+    uint32_t                                    stride) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCmdDrawMeshTasksIndirectCountNV(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCmdDrawMeshTasksIndirectCountNV(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
+    }
+    DispatchCmdDrawMeshTasksIndirectCountNV(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCmdDrawMeshTasksIndirectCountNV(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
+    }
+}
+
+
+
+
+VKAPI_ATTR void VKAPI_CALL CmdSetExclusiveScissorNV(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstExclusiveScissor,
+    uint32_t                                    exclusiveScissorCount,
+    const VkRect2D*                             pExclusiveScissors) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCmdSetExclusiveScissorNV(commandBuffer, firstExclusiveScissor, exclusiveScissorCount, pExclusiveScissors);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCmdSetExclusiveScissorNV(commandBuffer, firstExclusiveScissor, exclusiveScissorCount, pExclusiveScissors);
+    }
+    DispatchCmdSetExclusiveScissorNV(commandBuffer, firstExclusiveScissor, exclusiveScissorCount, pExclusiveScissors);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCmdSetExclusiveScissorNV(commandBuffer, firstExclusiveScissor, exclusiveScissorCount, pExclusiveScissors);
+    }
+}
+
+
+VKAPI_ATTR void VKAPI_CALL CmdSetCheckpointNV(
+    VkCommandBuffer                             commandBuffer,
+    const void*                                 pCheckpointMarker) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCmdSetCheckpointNV(commandBuffer, pCheckpointMarker);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCmdSetCheckpointNV(commandBuffer, pCheckpointMarker);
+    }
+    DispatchCmdSetCheckpointNV(commandBuffer, pCheckpointMarker);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCmdSetCheckpointNV(commandBuffer, pCheckpointMarker);
+    }
+}
+
+VKAPI_ATTR void VKAPI_CALL GetQueueCheckpointDataNV(
+    VkQueue                                     queue,
+    uint32_t*                                   pCheckpointDataCount,
+    VkCheckpointDataNV*                         pCheckpointData) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(queue), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetQueueCheckpointDataNV(queue, pCheckpointDataCount, pCheckpointData);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetQueueCheckpointDataNV(queue, pCheckpointDataCount, pCheckpointData);
+    }
+    DispatchGetQueueCheckpointDataNV(queue, pCheckpointDataCount, pCheckpointData);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetQueueCheckpointDataNV(queue, pCheckpointDataCount, pCheckpointData);
+    }
+}
+
+
+
+VKAPI_ATTR VkResult VKAPI_CALL InitializePerformanceApiINTEL(
+    VkDevice                                    device,
+    const VkInitializePerformanceApiInfoINTEL*  pInitializeInfo) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateInitializePerformanceApiINTEL(device, pInitializeInfo);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordInitializePerformanceApiINTEL(device, pInitializeInfo);
+    }
+    VkResult result = DispatchInitializePerformanceApiINTEL(device, pInitializeInfo);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordInitializePerformanceApiINTEL(device, pInitializeInfo, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR void VKAPI_CALL UninitializePerformanceApiINTEL(
+    VkDevice                                    device) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateUninitializePerformanceApiINTEL(device);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordUninitializePerformanceApiINTEL(device);
+    }
+    DispatchUninitializePerformanceApiINTEL(device);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordUninitializePerformanceApiINTEL(device);
+    }
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL CmdSetPerformanceMarkerINTEL(
+    VkCommandBuffer                             commandBuffer,
+    const VkPerformanceMarkerInfoINTEL*         pMarkerInfo) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCmdSetPerformanceMarkerINTEL(commandBuffer, pMarkerInfo);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCmdSetPerformanceMarkerINTEL(commandBuffer, pMarkerInfo);
+    }
+    VkResult result = DispatchCmdSetPerformanceMarkerINTEL(commandBuffer, pMarkerInfo);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCmdSetPerformanceMarkerINTEL(commandBuffer, pMarkerInfo, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL CmdSetPerformanceStreamMarkerINTEL(
+    VkCommandBuffer                             commandBuffer,
+    const VkPerformanceStreamMarkerInfoINTEL*   pMarkerInfo) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCmdSetPerformanceStreamMarkerINTEL(commandBuffer, pMarkerInfo);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCmdSetPerformanceStreamMarkerINTEL(commandBuffer, pMarkerInfo);
+    }
+    VkResult result = DispatchCmdSetPerformanceStreamMarkerINTEL(commandBuffer, pMarkerInfo);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCmdSetPerformanceStreamMarkerINTEL(commandBuffer, pMarkerInfo, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL CmdSetPerformanceOverrideINTEL(
+    VkCommandBuffer                             commandBuffer,
+    const VkPerformanceOverrideInfoINTEL*       pOverrideInfo) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCmdSetPerformanceOverrideINTEL(commandBuffer, pOverrideInfo);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCmdSetPerformanceOverrideINTEL(commandBuffer, pOverrideInfo);
+    }
+    VkResult result = DispatchCmdSetPerformanceOverrideINTEL(commandBuffer, pOverrideInfo);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCmdSetPerformanceOverrideINTEL(commandBuffer, pOverrideInfo, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL AcquirePerformanceConfigurationINTEL(
+    VkDevice                                    device,
+    const VkPerformanceConfigurationAcquireInfoINTEL* pAcquireInfo,
+    VkPerformanceConfigurationINTEL*            pConfiguration) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateAcquirePerformanceConfigurationINTEL(device, pAcquireInfo, pConfiguration);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordAcquirePerformanceConfigurationINTEL(device, pAcquireInfo, pConfiguration);
+    }
+    VkResult result = DispatchAcquirePerformanceConfigurationINTEL(device, pAcquireInfo, pConfiguration);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordAcquirePerformanceConfigurationINTEL(device, pAcquireInfo, pConfiguration, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL ReleasePerformanceConfigurationINTEL(
+    VkDevice                                    device,
+    VkPerformanceConfigurationINTEL             configuration) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateReleasePerformanceConfigurationINTEL(device, configuration);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordReleasePerformanceConfigurationINTEL(device, configuration);
+    }
+    VkResult result = DispatchReleasePerformanceConfigurationINTEL(device, configuration);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordReleasePerformanceConfigurationINTEL(device, configuration, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL QueueSetPerformanceConfigurationINTEL(
+    VkQueue                                     queue,
+    VkPerformanceConfigurationINTEL             configuration) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(queue), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateQueueSetPerformanceConfigurationINTEL(queue, configuration);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordQueueSetPerformanceConfigurationINTEL(queue, configuration);
+    }
+    VkResult result = DispatchQueueSetPerformanceConfigurationINTEL(queue, configuration);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordQueueSetPerformanceConfigurationINTEL(queue, configuration, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL GetPerformanceParameterINTEL(
+    VkDevice                                    device,
+    VkPerformanceParameterTypeINTEL             parameter,
+    VkPerformanceValueINTEL*                    pValue) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetPerformanceParameterINTEL(device, parameter, pValue);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetPerformanceParameterINTEL(device, parameter, pValue);
+    }
+    VkResult result = DispatchGetPerformanceParameterINTEL(device, parameter, pValue);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetPerformanceParameterINTEL(device, parameter, pValue, result);
+    }
+    return result;
+}
+
+
+
+VKAPI_ATTR void VKAPI_CALL SetLocalDimmingAMD(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapChain,
+    VkBool32                                    localDimmingEnable) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateSetLocalDimmingAMD(device, swapChain, localDimmingEnable);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordSetLocalDimmingAMD(device, swapChain, localDimmingEnable);
+    }
+    DispatchSetLocalDimmingAMD(device, swapChain, localDimmingEnable);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordSetLocalDimmingAMD(device, swapChain, localDimmingEnable);
+    }
+}
+
+#ifdef VK_USE_PLATFORM_FUCHSIA
+
+VKAPI_ATTR VkResult VKAPI_CALL CreateImagePipeSurfaceFUCHSIA(
+    VkInstance                                  instance,
+    const VkImagePipeSurfaceCreateInfoFUCHSIA*  pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(instance), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCreateImagePipeSurfaceFUCHSIA(instance, pCreateInfo, pAllocator, pSurface);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCreateImagePipeSurfaceFUCHSIA(instance, pCreateInfo, pAllocator, pSurface);
+    }
+    VkResult result = DispatchCreateImagePipeSurfaceFUCHSIA(instance, pCreateInfo, pAllocator, pSurface);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCreateImagePipeSurfaceFUCHSIA(instance, pCreateInfo, pAllocator, pSurface, result);
+    }
+    return result;
+}
+#endif // VK_USE_PLATFORM_FUCHSIA
+
+#ifdef VK_USE_PLATFORM_METAL_EXT
+
+VKAPI_ATTR VkResult VKAPI_CALL CreateMetalSurfaceEXT(
+    VkInstance                                  instance,
+    const VkMetalSurfaceCreateInfoEXT*          pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(instance), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCreateMetalSurfaceEXT(instance, pCreateInfo, pAllocator, pSurface);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCreateMetalSurfaceEXT(instance, pCreateInfo, pAllocator, pSurface);
+    }
+    VkResult result = DispatchCreateMetalSurfaceEXT(instance, pCreateInfo, pAllocator, pSurface);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCreateMetalSurfaceEXT(instance, pCreateInfo, pAllocator, pSurface, result);
+    }
+    return result;
+}
+#endif // VK_USE_PLATFORM_METAL_EXT
+
+
+
+
+
+
+
+
+
+
+
+
+VKAPI_ATTR VkDeviceAddress VKAPI_CALL GetBufferDeviceAddressEXT(
+    VkDevice                                    device,
+    const VkBufferDeviceAddressInfoKHR*         pInfo) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetBufferDeviceAddressEXT(device, pInfo);
+        if (skip) return 0;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetBufferDeviceAddressEXT(device, pInfo);
+    }
+    VkDeviceAddress result = DispatchGetBufferDeviceAddressEXT(device, pInfo);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetBufferDeviceAddressEXT(device, pInfo, result);
+    }
+    return result;
+}
+
+
+VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceToolPropertiesEXT(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pToolCount,
+    VkPhysicalDeviceToolPropertiesEXT*          pToolProperties) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetPhysicalDeviceToolPropertiesEXT(physicalDevice, pToolCount, pToolProperties);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetPhysicalDeviceToolPropertiesEXT(physicalDevice, pToolCount, pToolProperties);
+    }
+    VkResult result = DispatchGetPhysicalDeviceToolPropertiesEXT(physicalDevice, pToolCount, pToolProperties);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetPhysicalDeviceToolPropertiesEXT(physicalDevice, pToolCount, pToolProperties, result);
+    }
+    return result;
+}
+
+
+
+
+VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceCooperativeMatrixPropertiesNV(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pPropertyCount,
+    VkCooperativeMatrixPropertiesNV*            pProperties) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetPhysicalDeviceCooperativeMatrixPropertiesNV(physicalDevice, pPropertyCount, pProperties);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetPhysicalDeviceCooperativeMatrixPropertiesNV(physicalDevice, pPropertyCount, pProperties);
+    }
+    VkResult result = DispatchGetPhysicalDeviceCooperativeMatrixPropertiesNV(physicalDevice, pPropertyCount, pProperties);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetPhysicalDeviceCooperativeMatrixPropertiesNV(physicalDevice, pPropertyCount, pProperties, result);
+    }
+    return result;
+}
+
+
+VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pCombinationCount,
+    VkFramebufferMixedSamplesCombinationNV*     pCombinations) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV(physicalDevice, pCombinationCount, pCombinations);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV(physicalDevice, pCombinationCount, pCombinations);
+    }
+    VkResult result = DispatchGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV(physicalDevice, pCombinationCount, pCombinations);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV(physicalDevice, pCombinationCount, pCombinations, result);
+    }
+    return result;
+}
+
+
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceSurfacePresentModes2EXT(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceSurfaceInfo2KHR*      pSurfaceInfo,
+    uint32_t*                                   pPresentModeCount,
+    VkPresentModeKHR*                           pPresentModes) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetPhysicalDeviceSurfacePresentModes2EXT(physicalDevice, pSurfaceInfo, pPresentModeCount, pPresentModes);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetPhysicalDeviceSurfacePresentModes2EXT(physicalDevice, pSurfaceInfo, pPresentModeCount, pPresentModes);
+    }
+    VkResult result = DispatchGetPhysicalDeviceSurfacePresentModes2EXT(physicalDevice, pSurfaceInfo, pPresentModeCount, pPresentModes);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetPhysicalDeviceSurfacePresentModes2EXT(physicalDevice, pSurfaceInfo, pPresentModeCount, pPresentModes, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL AcquireFullScreenExclusiveModeEXT(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateAcquireFullScreenExclusiveModeEXT(device, swapchain);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordAcquireFullScreenExclusiveModeEXT(device, swapchain);
+    }
+    VkResult result = DispatchAcquireFullScreenExclusiveModeEXT(device, swapchain);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordAcquireFullScreenExclusiveModeEXT(device, swapchain, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL ReleaseFullScreenExclusiveModeEXT(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateReleaseFullScreenExclusiveModeEXT(device, swapchain);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordReleaseFullScreenExclusiveModeEXT(device, swapchain);
+    }
+    VkResult result = DispatchReleaseFullScreenExclusiveModeEXT(device, swapchain);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordReleaseFullScreenExclusiveModeEXT(device, swapchain, result);
+    }
+    return result;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL GetDeviceGroupSurfacePresentModes2EXT(
+    VkDevice                                    device,
+    const VkPhysicalDeviceSurfaceInfo2KHR*      pSurfaceInfo,
+    VkDeviceGroupPresentModeFlagsKHR*           pModes) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateGetDeviceGroupSurfacePresentModes2EXT(device, pSurfaceInfo, pModes);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordGetDeviceGroupSurfacePresentModes2EXT(device, pSurfaceInfo, pModes);
+    }
+    VkResult result = DispatchGetDeviceGroupSurfacePresentModes2EXT(device, pSurfaceInfo, pModes);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordGetDeviceGroupSurfacePresentModes2EXT(device, pSurfaceInfo, pModes, result);
+    }
+    return result;
+}
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+
+VKAPI_ATTR VkResult VKAPI_CALL CreateHeadlessSurfaceEXT(
+    VkInstance                                  instance,
+    const VkHeadlessSurfaceCreateInfoEXT*       pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(instance), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCreateHeadlessSurfaceEXT(instance, pCreateInfo, pAllocator, pSurface);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCreateHeadlessSurfaceEXT(instance, pCreateInfo, pAllocator, pSurface);
+    }
+    VkResult result = DispatchCreateHeadlessSurfaceEXT(instance, pCreateInfo, pAllocator, pSurface);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCreateHeadlessSurfaceEXT(instance, pCreateInfo, pAllocator, pSurface, result);
+    }
+    return result;
+}
+
+
+VKAPI_ATTR void VKAPI_CALL CmdSetLineStippleEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    lineStippleFactor,
+    uint16_t                                    lineStipplePattern) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCmdSetLineStippleEXT(commandBuffer, lineStippleFactor, lineStipplePattern);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCmdSetLineStippleEXT(commandBuffer, lineStippleFactor, lineStipplePattern);
+    }
+    DispatchCmdSetLineStippleEXT(commandBuffer, lineStippleFactor, lineStipplePattern);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCmdSetLineStippleEXT(commandBuffer, lineStippleFactor, lineStipplePattern);
+    }
+}
+
+
+VKAPI_ATTR void VKAPI_CALL ResetQueryPoolEXT(
+    VkDevice                                    device,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    firstQuery,
+    uint32_t                                    queryCount) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateResetQueryPoolEXT(device, queryPool, firstQuery, queryCount);
+        if (skip) return;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordResetQueryPoolEXT(device, queryPool, firstQuery, queryCount);
+    }
+    DispatchResetQueryPoolEXT(device, queryPool, firstQuery, queryCount);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordResetQueryPoolEXT(device, queryPool, firstQuery, queryCount);
+    }
+}
+
+
+
+
+
+// Map of intercepted ApiName to its associated function data
+#ifdef _MSC_VER
+#pragma warning( suppress: 6262 ) // VS analysis: this uses more than 16 kiB, which is fine here at global scope
+#endif
+const std::unordered_map<std::string, function_data> name_to_funcptr_map = {
+    {"vkCreateInstance", {true, (void*)CreateInstance}},
+    {"vkDestroyInstance", {true, (void*)DestroyInstance}},
+    {"vkEnumeratePhysicalDevices", {true, (void*)EnumeratePhysicalDevices}},
+    {"vkGetPhysicalDeviceFeatures", {true, (void*)GetPhysicalDeviceFeatures}},
+    {"vkGetPhysicalDeviceFormatProperties", {true, (void*)GetPhysicalDeviceFormatProperties}},
+    {"vkGetPhysicalDeviceImageFormatProperties", {true, (void*)GetPhysicalDeviceImageFormatProperties}},
+    {"vkGetPhysicalDeviceProperties", {true, (void*)GetPhysicalDeviceProperties}},
+    {"vkGetPhysicalDeviceQueueFamilyProperties", {true, (void*)GetPhysicalDeviceQueueFamilyProperties}},
+    {"vkGetPhysicalDeviceMemoryProperties", {true, (void*)GetPhysicalDeviceMemoryProperties}},
+    {"vkGetInstanceProcAddr", {true, (void*)GetInstanceProcAddr}},
+    {"vkGetDeviceProcAddr", {false, (void*)GetDeviceProcAddr}},
+    {"vkCreateDevice", {true, (void*)CreateDevice}},
+    {"vkDestroyDevice", {false, (void*)DestroyDevice}},
+    {"vkEnumerateInstanceExtensionProperties", {false, (void*)EnumerateInstanceExtensionProperties}},
+    {"vkEnumerateDeviceExtensionProperties", {true, (void*)EnumerateDeviceExtensionProperties}},
+    {"vkEnumerateInstanceLayerProperties", {false, (void*)EnumerateInstanceLayerProperties}},
+    {"vkEnumerateDeviceLayerProperties", {true, (void*)EnumerateDeviceLayerProperties}},
+    {"vkGetDeviceQueue", {false, (void*)GetDeviceQueue}},
+    {"vkQueueSubmit", {false, (void*)QueueSubmit}},
+    {"vkQueueWaitIdle", {false, (void*)QueueWaitIdle}},
+    {"vkDeviceWaitIdle", {false, (void*)DeviceWaitIdle}},
+    {"vkAllocateMemory", {false, (void*)AllocateMemory}},
+    {"vkFreeMemory", {false, (void*)FreeMemory}},
+    {"vkMapMemory", {false, (void*)MapMemory}},
+    {"vkUnmapMemory", {false, (void*)UnmapMemory}},
+    {"vkFlushMappedMemoryRanges", {false, (void*)FlushMappedMemoryRanges}},
+    {"vkInvalidateMappedMemoryRanges", {false, (void*)InvalidateMappedMemoryRanges}},
+    {"vkGetDeviceMemoryCommitment", {false, (void*)GetDeviceMemoryCommitment}},
+    {"vkBindBufferMemory", {false, (void*)BindBufferMemory}},
+    {"vkBindImageMemory", {false, (void*)BindImageMemory}},
+    {"vkGetBufferMemoryRequirements", {false, (void*)GetBufferMemoryRequirements}},
+    {"vkGetImageMemoryRequirements", {false, (void*)GetImageMemoryRequirements}},
+    {"vkGetImageSparseMemoryRequirements", {false, (void*)GetImageSparseMemoryRequirements}},
+    {"vkGetPhysicalDeviceSparseImageFormatProperties", {true, (void*)GetPhysicalDeviceSparseImageFormatProperties}},
+    {"vkQueueBindSparse", {false, (void*)QueueBindSparse}},
+    {"vkCreateFence", {false, (void*)CreateFence}},
+    {"vkDestroyFence", {false, (void*)DestroyFence}},
+    {"vkResetFences", {false, (void*)ResetFences}},
+    {"vkGetFenceStatus", {false, (void*)GetFenceStatus}},
+    {"vkWaitForFences", {false, (void*)WaitForFences}},
+    {"vkCreateSemaphore", {false, (void*)CreateSemaphore}},
+    {"vkDestroySemaphore", {false, (void*)DestroySemaphore}},
+    {"vkCreateEvent", {false, (void*)CreateEvent}},
+    {"vkDestroyEvent", {false, (void*)DestroyEvent}},
+    {"vkGetEventStatus", {false, (void*)GetEventStatus}},
+    {"vkSetEvent", {false, (void*)SetEvent}},
+    {"vkResetEvent", {false, (void*)ResetEvent}},
+    {"vkCreateQueryPool", {false, (void*)CreateQueryPool}},
+    {"vkDestroyQueryPool", {false, (void*)DestroyQueryPool}},
+    {"vkGetQueryPoolResults", {false, (void*)GetQueryPoolResults}},
+    {"vkCreateBuffer", {false, (void*)CreateBuffer}},
+    {"vkDestroyBuffer", {false, (void*)DestroyBuffer}},
+    {"vkCreateBufferView", {false, (void*)CreateBufferView}},
+    {"vkDestroyBufferView", {false, (void*)DestroyBufferView}},
+    {"vkCreateImage", {false, (void*)CreateImage}},
+    {"vkDestroyImage", {false, (void*)DestroyImage}},
+    {"vkGetImageSubresourceLayout", {false, (void*)GetImageSubresourceLayout}},
+    {"vkCreateImageView", {false, (void*)CreateImageView}},
+    {"vkDestroyImageView", {false, (void*)DestroyImageView}},
+    {"vkCreateShaderModule", {false, (void*)CreateShaderModule}},
+    {"vkDestroyShaderModule", {false, (void*)DestroyShaderModule}},
+    {"vkCreatePipelineCache", {false, (void*)CreatePipelineCache}},
+    {"vkDestroyPipelineCache", {false, (void*)DestroyPipelineCache}},
+    {"vkGetPipelineCacheData", {false, (void*)GetPipelineCacheData}},
+    {"vkMergePipelineCaches", {false, (void*)MergePipelineCaches}},
+    {"vkCreateGraphicsPipelines", {false, (void*)CreateGraphicsPipelines}},
+    {"vkCreateComputePipelines", {false, (void*)CreateComputePipelines}},
+    {"vkDestroyPipeline", {false, (void*)DestroyPipeline}},
+    {"vkCreatePipelineLayout", {false, (void*)CreatePipelineLayout}},
+    {"vkDestroyPipelineLayout", {false, (void*)DestroyPipelineLayout}},
+    {"vkCreateSampler", {false, (void*)CreateSampler}},
+    {"vkDestroySampler", {false, (void*)DestroySampler}},
+    {"vkCreateDescriptorSetLayout", {false, (void*)CreateDescriptorSetLayout}},
+    {"vkDestroyDescriptorSetLayout", {false, (void*)DestroyDescriptorSetLayout}},
+    {"vkCreateDescriptorPool", {false, (void*)CreateDescriptorPool}},
+    {"vkDestroyDescriptorPool", {false, (void*)DestroyDescriptorPool}},
+    {"vkResetDescriptorPool", {false, (void*)ResetDescriptorPool}},
+    {"vkAllocateDescriptorSets", {false, (void*)AllocateDescriptorSets}},
+    {"vkFreeDescriptorSets", {false, (void*)FreeDescriptorSets}},
+    {"vkUpdateDescriptorSets", {false, (void*)UpdateDescriptorSets}},
+    {"vkCreateFramebuffer", {false, (void*)CreateFramebuffer}},
+    {"vkDestroyFramebuffer", {false, (void*)DestroyFramebuffer}},
+    {"vkCreateRenderPass", {false, (void*)CreateRenderPass}},
+    {"vkDestroyRenderPass", {false, (void*)DestroyRenderPass}},
+    {"vkGetRenderAreaGranularity", {false, (void*)GetRenderAreaGranularity}},
+    {"vkCreateCommandPool", {false, (void*)CreateCommandPool}},
+    {"vkDestroyCommandPool", {false, (void*)DestroyCommandPool}},
+    {"vkResetCommandPool", {false, (void*)ResetCommandPool}},
+    {"vkAllocateCommandBuffers", {false, (void*)AllocateCommandBuffers}},
+    {"vkFreeCommandBuffers", {false, (void*)FreeCommandBuffers}},
+    {"vkBeginCommandBuffer", {false, (void*)BeginCommandBuffer}},
+    {"vkEndCommandBuffer", {false, (void*)EndCommandBuffer}},
+    {"vkResetCommandBuffer", {false, (void*)ResetCommandBuffer}},
+    {"vkCmdBindPipeline", {false, (void*)CmdBindPipeline}},
+    {"vkCmdSetViewport", {false, (void*)CmdSetViewport}},
+    {"vkCmdSetScissor", {false, (void*)CmdSetScissor}},
+    {"vkCmdSetLineWidth", {false, (void*)CmdSetLineWidth}},
+    {"vkCmdSetDepthBias", {false, (void*)CmdSetDepthBias}},
+    {"vkCmdSetBlendConstants", {false, (void*)CmdSetBlendConstants}},
+    {"vkCmdSetDepthBounds", {false, (void*)CmdSetDepthBounds}},
+    {"vkCmdSetStencilCompareMask", {false, (void*)CmdSetStencilCompareMask}},
+    {"vkCmdSetStencilWriteMask", {false, (void*)CmdSetStencilWriteMask}},
+    {"vkCmdSetStencilReference", {false, (void*)CmdSetStencilReference}},
+    {"vkCmdBindDescriptorSets", {false, (void*)CmdBindDescriptorSets}},
+    {"vkCmdBindIndexBuffer", {false, (void*)CmdBindIndexBuffer}},
+    {"vkCmdBindVertexBuffers", {false, (void*)CmdBindVertexBuffers}},
+    {"vkCmdDraw", {false, (void*)CmdDraw}},
+    {"vkCmdDrawIndexed", {false, (void*)CmdDrawIndexed}},
+    {"vkCmdDrawIndirect", {false, (void*)CmdDrawIndirect}},
+    {"vkCmdDrawIndexedIndirect", {false, (void*)CmdDrawIndexedIndirect}},
+    {"vkCmdDispatch", {false, (void*)CmdDispatch}},
+    {"vkCmdDispatchIndirect", {false, (void*)CmdDispatchIndirect}},
+    {"vkCmdCopyBuffer", {false, (void*)CmdCopyBuffer}},
+    {"vkCmdCopyImage", {false, (void*)CmdCopyImage}},
+    {"vkCmdBlitImage", {false, (void*)CmdBlitImage}},
+    {"vkCmdCopyBufferToImage", {false, (void*)CmdCopyBufferToImage}},
+    {"vkCmdCopyImageToBuffer", {false, (void*)CmdCopyImageToBuffer}},
+    {"vkCmdUpdateBuffer", {false, (void*)CmdUpdateBuffer}},
+    {"vkCmdFillBuffer", {false, (void*)CmdFillBuffer}},
+    {"vkCmdClearColorImage", {false, (void*)CmdClearColorImage}},
+    {"vkCmdClearDepthStencilImage", {false, (void*)CmdClearDepthStencilImage}},
+    {"vkCmdClearAttachments", {false, (void*)CmdClearAttachments}},
+    {"vkCmdResolveImage", {false, (void*)CmdResolveImage}},
+    {"vkCmdSetEvent", {false, (void*)CmdSetEvent}},
+    {"vkCmdResetEvent", {false, (void*)CmdResetEvent}},
+    {"vkCmdWaitEvents", {false, (void*)CmdWaitEvents}},
+    {"vkCmdPipelineBarrier", {false, (void*)CmdPipelineBarrier}},
+    {"vkCmdBeginQuery", {false, (void*)CmdBeginQuery}},
+    {"vkCmdEndQuery", {false, (void*)CmdEndQuery}},
+    {"vkCmdResetQueryPool", {false, (void*)CmdResetQueryPool}},
+    {"vkCmdWriteTimestamp", {false, (void*)CmdWriteTimestamp}},
+    {"vkCmdCopyQueryPoolResults", {false, (void*)CmdCopyQueryPoolResults}},
+    {"vkCmdPushConstants", {false, (void*)CmdPushConstants}},
+    {"vkCmdBeginRenderPass", {false, (void*)CmdBeginRenderPass}},
+    {"vkCmdNextSubpass", {false, (void*)CmdNextSubpass}},
+    {"vkCmdEndRenderPass", {false, (void*)CmdEndRenderPass}},
+    {"vkCmdExecuteCommands", {false, (void*)CmdExecuteCommands}},
+    {"vkBindBufferMemory2", {false, (void*)BindBufferMemory2}},
+    {"vkBindImageMemory2", {false, (void*)BindImageMemory2}},
+    {"vkGetDeviceGroupPeerMemoryFeatures", {false, (void*)GetDeviceGroupPeerMemoryFeatures}},
+    {"vkCmdSetDeviceMask", {false, (void*)CmdSetDeviceMask}},
+    {"vkCmdDispatchBase", {false, (void*)CmdDispatchBase}},
+    {"vkEnumeratePhysicalDeviceGroups", {true, (void*)EnumeratePhysicalDeviceGroups}},
+    {"vkGetImageMemoryRequirements2", {false, (void*)GetImageMemoryRequirements2}},
+    {"vkGetBufferMemoryRequirements2", {false, (void*)GetBufferMemoryRequirements2}},
+    {"vkGetImageSparseMemoryRequirements2", {false, (void*)GetImageSparseMemoryRequirements2}},
+    {"vkGetPhysicalDeviceFeatures2", {true, (void*)GetPhysicalDeviceFeatures2}},
+    {"vkGetPhysicalDeviceProperties2", {true, (void*)GetPhysicalDeviceProperties2}},
+    {"vkGetPhysicalDeviceFormatProperties2", {true, (void*)GetPhysicalDeviceFormatProperties2}},
+    {"vkGetPhysicalDeviceImageFormatProperties2", {true, (void*)GetPhysicalDeviceImageFormatProperties2}},
+    {"vkGetPhysicalDeviceQueueFamilyProperties2", {true, (void*)GetPhysicalDeviceQueueFamilyProperties2}},
+    {"vkGetPhysicalDeviceMemoryProperties2", {true, (void*)GetPhysicalDeviceMemoryProperties2}},
+    {"vkGetPhysicalDeviceSparseImageFormatProperties2", {true, (void*)GetPhysicalDeviceSparseImageFormatProperties2}},
+    {"vkTrimCommandPool", {false, (void*)TrimCommandPool}},
+    {"vkGetDeviceQueue2", {false, (void*)GetDeviceQueue2}},
+    {"vkCreateSamplerYcbcrConversion", {false, (void*)CreateSamplerYcbcrConversion}},
+    {"vkDestroySamplerYcbcrConversion", {false, (void*)DestroySamplerYcbcrConversion}},
+    {"vkCreateDescriptorUpdateTemplate", {false, (void*)CreateDescriptorUpdateTemplate}},
+    {"vkDestroyDescriptorUpdateTemplate", {false, (void*)DestroyDescriptorUpdateTemplate}},
+    {"vkUpdateDescriptorSetWithTemplate", {false, (void*)UpdateDescriptorSetWithTemplate}},
+    {"vkGetPhysicalDeviceExternalBufferProperties", {true, (void*)GetPhysicalDeviceExternalBufferProperties}},
+    {"vkGetPhysicalDeviceExternalFenceProperties", {true, (void*)GetPhysicalDeviceExternalFenceProperties}},
+    {"vkGetPhysicalDeviceExternalSemaphoreProperties", {true, (void*)GetPhysicalDeviceExternalSemaphoreProperties}},
+    {"vkGetDescriptorSetLayoutSupport", {false, (void*)GetDescriptorSetLayoutSupport}},
+    {"vkDestroySurfaceKHR", {true, (void*)DestroySurfaceKHR}},
+    {"vkGetPhysicalDeviceSurfaceSupportKHR", {true, (void*)GetPhysicalDeviceSurfaceSupportKHR}},
+    {"vkGetPhysicalDeviceSurfaceCapabilitiesKHR", {true, (void*)GetPhysicalDeviceSurfaceCapabilitiesKHR}},
+    {"vkGetPhysicalDeviceSurfaceFormatsKHR", {true, (void*)GetPhysicalDeviceSurfaceFormatsKHR}},
+    {"vkGetPhysicalDeviceSurfacePresentModesKHR", {true, (void*)GetPhysicalDeviceSurfacePresentModesKHR}},
+    {"vkCreateSwapchainKHR", {false, (void*)CreateSwapchainKHR}},
+    {"vkDestroySwapchainKHR", {false, (void*)DestroySwapchainKHR}},
+    {"vkGetSwapchainImagesKHR", {false, (void*)GetSwapchainImagesKHR}},
+    {"vkAcquireNextImageKHR", {false, (void*)AcquireNextImageKHR}},
+    {"vkQueuePresentKHR", {false, (void*)QueuePresentKHR}},
+    {"vkGetDeviceGroupPresentCapabilitiesKHR", {false, (void*)GetDeviceGroupPresentCapabilitiesKHR}},
+    {"vkGetDeviceGroupSurfacePresentModesKHR", {false, (void*)GetDeviceGroupSurfacePresentModesKHR}},
+    {"vkGetPhysicalDevicePresentRectanglesKHR", {true, (void*)GetPhysicalDevicePresentRectanglesKHR}},
+    {"vkAcquireNextImage2KHR", {false, (void*)AcquireNextImage2KHR}},
+    {"vkGetPhysicalDeviceDisplayPropertiesKHR", {true, (void*)GetPhysicalDeviceDisplayPropertiesKHR}},
+    {"vkGetPhysicalDeviceDisplayPlanePropertiesKHR", {true, (void*)GetPhysicalDeviceDisplayPlanePropertiesKHR}},
+    {"vkGetDisplayPlaneSupportedDisplaysKHR", {true, (void*)GetDisplayPlaneSupportedDisplaysKHR}},
+    {"vkGetDisplayModePropertiesKHR", {true, (void*)GetDisplayModePropertiesKHR}},
+    {"vkCreateDisplayModeKHR", {true, (void*)CreateDisplayModeKHR}},
+    {"vkGetDisplayPlaneCapabilitiesKHR", {true, (void*)GetDisplayPlaneCapabilitiesKHR}},
+    {"vkCreateDisplayPlaneSurfaceKHR", {true, (void*)CreateDisplayPlaneSurfaceKHR}},
+    {"vkCreateSharedSwapchainsKHR", {false, (void*)CreateSharedSwapchainsKHR}},
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+    {"vkCreateXlibSurfaceKHR", {true, (void*)CreateXlibSurfaceKHR}},
+#endif
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+    {"vkGetPhysicalDeviceXlibPresentationSupportKHR", {true, (void*)GetPhysicalDeviceXlibPresentationSupportKHR}},
+#endif
+#ifdef VK_USE_PLATFORM_XCB_KHR
+    {"vkCreateXcbSurfaceKHR", {true, (void*)CreateXcbSurfaceKHR}},
+#endif
+#ifdef VK_USE_PLATFORM_XCB_KHR
+    {"vkGetPhysicalDeviceXcbPresentationSupportKHR", {true, (void*)GetPhysicalDeviceXcbPresentationSupportKHR}},
+#endif
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+    {"vkCreateWaylandSurfaceKHR", {true, (void*)CreateWaylandSurfaceKHR}},
+#endif
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+    {"vkGetPhysicalDeviceWaylandPresentationSupportKHR", {true, (void*)GetPhysicalDeviceWaylandPresentationSupportKHR}},
+#endif
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+    {"vkCreateAndroidSurfaceKHR", {true, (void*)CreateAndroidSurfaceKHR}},
+#endif
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    {"vkCreateWin32SurfaceKHR", {true, (void*)CreateWin32SurfaceKHR}},
+#endif
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    {"vkGetPhysicalDeviceWin32PresentationSupportKHR", {true, (void*)GetPhysicalDeviceWin32PresentationSupportKHR}},
+#endif
+    {"vkGetPhysicalDeviceFeatures2KHR", {true, (void*)GetPhysicalDeviceFeatures2KHR}},
+    {"vkGetPhysicalDeviceProperties2KHR", {true, (void*)GetPhysicalDeviceProperties2KHR}},
+    {"vkGetPhysicalDeviceFormatProperties2KHR", {true, (void*)GetPhysicalDeviceFormatProperties2KHR}},
+    {"vkGetPhysicalDeviceImageFormatProperties2KHR", {true, (void*)GetPhysicalDeviceImageFormatProperties2KHR}},
+    {"vkGetPhysicalDeviceQueueFamilyProperties2KHR", {true, (void*)GetPhysicalDeviceQueueFamilyProperties2KHR}},
+    {"vkGetPhysicalDeviceMemoryProperties2KHR", {true, (void*)GetPhysicalDeviceMemoryProperties2KHR}},
+    {"vkGetPhysicalDeviceSparseImageFormatProperties2KHR", {true, (void*)GetPhysicalDeviceSparseImageFormatProperties2KHR}},
+    {"vkGetDeviceGroupPeerMemoryFeaturesKHR", {false, (void*)GetDeviceGroupPeerMemoryFeaturesKHR}},
+    {"vkCmdSetDeviceMaskKHR", {false, (void*)CmdSetDeviceMaskKHR}},
+    {"vkCmdDispatchBaseKHR", {false, (void*)CmdDispatchBaseKHR}},
+    {"vkTrimCommandPoolKHR", {false, (void*)TrimCommandPoolKHR}},
+    {"vkEnumeratePhysicalDeviceGroupsKHR", {true, (void*)EnumeratePhysicalDeviceGroupsKHR}},
+    {"vkGetPhysicalDeviceExternalBufferPropertiesKHR", {true, (void*)GetPhysicalDeviceExternalBufferPropertiesKHR}},
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    {"vkGetMemoryWin32HandleKHR", {false, (void*)GetMemoryWin32HandleKHR}},
+#endif
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    {"vkGetMemoryWin32HandlePropertiesKHR", {false, (void*)GetMemoryWin32HandlePropertiesKHR}},
+#endif
+    {"vkGetMemoryFdKHR", {false, (void*)GetMemoryFdKHR}},
+    {"vkGetMemoryFdPropertiesKHR", {false, (void*)GetMemoryFdPropertiesKHR}},
+    {"vkGetPhysicalDeviceExternalSemaphorePropertiesKHR", {true, (void*)GetPhysicalDeviceExternalSemaphorePropertiesKHR}},
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    {"vkImportSemaphoreWin32HandleKHR", {false, (void*)ImportSemaphoreWin32HandleKHR}},
+#endif
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    {"vkGetSemaphoreWin32HandleKHR", {false, (void*)GetSemaphoreWin32HandleKHR}},
+#endif
+    {"vkImportSemaphoreFdKHR", {false, (void*)ImportSemaphoreFdKHR}},
+    {"vkGetSemaphoreFdKHR", {false, (void*)GetSemaphoreFdKHR}},
+    {"vkCmdPushDescriptorSetKHR", {false, (void*)CmdPushDescriptorSetKHR}},
+    {"vkCmdPushDescriptorSetWithTemplateKHR", {false, (void*)CmdPushDescriptorSetWithTemplateKHR}},
+    {"vkCreateDescriptorUpdateTemplateKHR", {false, (void*)CreateDescriptorUpdateTemplateKHR}},
+    {"vkDestroyDescriptorUpdateTemplateKHR", {false, (void*)DestroyDescriptorUpdateTemplateKHR}},
+    {"vkUpdateDescriptorSetWithTemplateKHR", {false, (void*)UpdateDescriptorSetWithTemplateKHR}},
+    {"vkCreateRenderPass2KHR", {false, (void*)CreateRenderPass2KHR}},
+    {"vkCmdBeginRenderPass2KHR", {false, (void*)CmdBeginRenderPass2KHR}},
+    {"vkCmdNextSubpass2KHR", {false, (void*)CmdNextSubpass2KHR}},
+    {"vkCmdEndRenderPass2KHR", {false, (void*)CmdEndRenderPass2KHR}},
+    {"vkGetSwapchainStatusKHR", {false, (void*)GetSwapchainStatusKHR}},
+    {"vkGetPhysicalDeviceExternalFencePropertiesKHR", {true, (void*)GetPhysicalDeviceExternalFencePropertiesKHR}},
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    {"vkImportFenceWin32HandleKHR", {false, (void*)ImportFenceWin32HandleKHR}},
+#endif
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    {"vkGetFenceWin32HandleKHR", {false, (void*)GetFenceWin32HandleKHR}},
+#endif
+    {"vkImportFenceFdKHR", {false, (void*)ImportFenceFdKHR}},
+    {"vkGetFenceFdKHR", {false, (void*)GetFenceFdKHR}},
+    {"vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR", {true, (void*)EnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR}},
+    {"vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR", {true, (void*)GetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR}},
+    {"vkAcquireProfilingLockKHR", {false, (void*)AcquireProfilingLockKHR}},
+    {"vkReleaseProfilingLockKHR", {false, (void*)ReleaseProfilingLockKHR}},
+    {"vkGetPhysicalDeviceSurfaceCapabilities2KHR", {true, (void*)GetPhysicalDeviceSurfaceCapabilities2KHR}},
+    {"vkGetPhysicalDeviceSurfaceFormats2KHR", {true, (void*)GetPhysicalDeviceSurfaceFormats2KHR}},
+    {"vkGetPhysicalDeviceDisplayProperties2KHR", {true, (void*)GetPhysicalDeviceDisplayProperties2KHR}},
+    {"vkGetPhysicalDeviceDisplayPlaneProperties2KHR", {true, (void*)GetPhysicalDeviceDisplayPlaneProperties2KHR}},
+    {"vkGetDisplayModeProperties2KHR", {true, (void*)GetDisplayModeProperties2KHR}},
+    {"vkGetDisplayPlaneCapabilities2KHR", {true, (void*)GetDisplayPlaneCapabilities2KHR}},
+    {"vkGetImageMemoryRequirements2KHR", {false, (void*)GetImageMemoryRequirements2KHR}},
+    {"vkGetBufferMemoryRequirements2KHR", {false, (void*)GetBufferMemoryRequirements2KHR}},
+    {"vkGetImageSparseMemoryRequirements2KHR", {false, (void*)GetImageSparseMemoryRequirements2KHR}},
+    {"vkCreateSamplerYcbcrConversionKHR", {false, (void*)CreateSamplerYcbcrConversionKHR}},
+    {"vkDestroySamplerYcbcrConversionKHR", {false, (void*)DestroySamplerYcbcrConversionKHR}},
+    {"vkBindBufferMemory2KHR", {false, (void*)BindBufferMemory2KHR}},
+    {"vkBindImageMemory2KHR", {false, (void*)BindImageMemory2KHR}},
+    {"vkGetDescriptorSetLayoutSupportKHR", {false, (void*)GetDescriptorSetLayoutSupportKHR}},
+    {"vkCmdDrawIndirectCountKHR", {false, (void*)CmdDrawIndirectCountKHR}},
+    {"vkCmdDrawIndexedIndirectCountKHR", {false, (void*)CmdDrawIndexedIndirectCountKHR}},
+    {"vkGetSemaphoreCounterValueKHR", {false, (void*)GetSemaphoreCounterValueKHR}},
+    {"vkWaitSemaphoresKHR", {false, (void*)WaitSemaphoresKHR}},
+    {"vkSignalSemaphoreKHR", {false, (void*)SignalSemaphoreKHR}},
+    {"vkGetBufferDeviceAddressKHR", {false, (void*)GetBufferDeviceAddressKHR}},
+    {"vkGetBufferOpaqueCaptureAddressKHR", {false, (void*)GetBufferOpaqueCaptureAddressKHR}},
+    {"vkGetDeviceMemoryOpaqueCaptureAddressKHR", {false, (void*)GetDeviceMemoryOpaqueCaptureAddressKHR}},
+    {"vkGetPipelineExecutablePropertiesKHR", {false, (void*)GetPipelineExecutablePropertiesKHR}},
+    {"vkGetPipelineExecutableStatisticsKHR", {false, (void*)GetPipelineExecutableStatisticsKHR}},
+    {"vkGetPipelineExecutableInternalRepresentationsKHR", {false, (void*)GetPipelineExecutableInternalRepresentationsKHR}},
+    {"vkCreateDebugReportCallbackEXT", {true, (void*)CreateDebugReportCallbackEXT}},
+    {"vkDestroyDebugReportCallbackEXT", {true, (void*)DestroyDebugReportCallbackEXT}},
+    {"vkDebugReportMessageEXT", {true, (void*)DebugReportMessageEXT}},
+    {"vkDebugMarkerSetObjectTagEXT", {false, (void*)DebugMarkerSetObjectTagEXT}},
+    {"vkDebugMarkerSetObjectNameEXT", {false, (void*)DebugMarkerSetObjectNameEXT}},
+    {"vkCmdDebugMarkerBeginEXT", {false, (void*)CmdDebugMarkerBeginEXT}},
+    {"vkCmdDebugMarkerEndEXT", {false, (void*)CmdDebugMarkerEndEXT}},
+    {"vkCmdDebugMarkerInsertEXT", {false, (void*)CmdDebugMarkerInsertEXT}},
+    {"vkCmdBindTransformFeedbackBuffersEXT", {false, (void*)CmdBindTransformFeedbackBuffersEXT}},
+    {"vkCmdBeginTransformFeedbackEXT", {false, (void*)CmdBeginTransformFeedbackEXT}},
+    {"vkCmdEndTransformFeedbackEXT", {false, (void*)CmdEndTransformFeedbackEXT}},
+    {"vkCmdBeginQueryIndexedEXT", {false, (void*)CmdBeginQueryIndexedEXT}},
+    {"vkCmdEndQueryIndexedEXT", {false, (void*)CmdEndQueryIndexedEXT}},
+    {"vkCmdDrawIndirectByteCountEXT", {false, (void*)CmdDrawIndirectByteCountEXT}},
+    {"vkGetImageViewHandleNVX", {false, (void*)GetImageViewHandleNVX}},
+    {"vkCmdDrawIndirectCountAMD", {false, (void*)CmdDrawIndirectCountAMD}},
+    {"vkCmdDrawIndexedIndirectCountAMD", {false, (void*)CmdDrawIndexedIndirectCountAMD}},
+    {"vkGetShaderInfoAMD", {false, (void*)GetShaderInfoAMD}},
+#ifdef VK_USE_PLATFORM_GGP
+    {"vkCreateStreamDescriptorSurfaceGGP", {true, (void*)CreateStreamDescriptorSurfaceGGP}},
+#endif
+    {"vkGetPhysicalDeviceExternalImageFormatPropertiesNV", {true, (void*)GetPhysicalDeviceExternalImageFormatPropertiesNV}},
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    {"vkGetMemoryWin32HandleNV", {false, (void*)GetMemoryWin32HandleNV}},
+#endif
+#ifdef VK_USE_PLATFORM_VI_NN
+    {"vkCreateViSurfaceNN", {true, (void*)CreateViSurfaceNN}},
+#endif
+    {"vkCmdBeginConditionalRenderingEXT", {false, (void*)CmdBeginConditionalRenderingEXT}},
+    {"vkCmdEndConditionalRenderingEXT", {false, (void*)CmdEndConditionalRenderingEXT}},
+    {"vkCmdProcessCommandsNVX", {false, (void*)CmdProcessCommandsNVX}},
+    {"vkCmdReserveSpaceForCommandsNVX", {false, (void*)CmdReserveSpaceForCommandsNVX}},
+    {"vkCreateIndirectCommandsLayoutNVX", {false, (void*)CreateIndirectCommandsLayoutNVX}},
+    {"vkDestroyIndirectCommandsLayoutNVX", {false, (void*)DestroyIndirectCommandsLayoutNVX}},
+    {"vkCreateObjectTableNVX", {false, (void*)CreateObjectTableNVX}},
+    {"vkDestroyObjectTableNVX", {false, (void*)DestroyObjectTableNVX}},
+    {"vkRegisterObjectsNVX", {false, (void*)RegisterObjectsNVX}},
+    {"vkUnregisterObjectsNVX", {false, (void*)UnregisterObjectsNVX}},
+    {"vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX", {true, (void*)GetPhysicalDeviceGeneratedCommandsPropertiesNVX}},
+    {"vkCmdSetViewportWScalingNV", {false, (void*)CmdSetViewportWScalingNV}},
+    {"vkReleaseDisplayEXT", {true, (void*)ReleaseDisplayEXT}},
+#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
+    {"vkAcquireXlibDisplayEXT", {true, (void*)AcquireXlibDisplayEXT}},
+#endif
+#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
+    {"vkGetRandROutputDisplayEXT", {true, (void*)GetRandROutputDisplayEXT}},
+#endif
+    {"vkGetPhysicalDeviceSurfaceCapabilities2EXT", {true, (void*)GetPhysicalDeviceSurfaceCapabilities2EXT}},
+    {"vkDisplayPowerControlEXT", {false, (void*)DisplayPowerControlEXT}},
+    {"vkRegisterDeviceEventEXT", {false, (void*)RegisterDeviceEventEXT}},
+    {"vkRegisterDisplayEventEXT", {false, (void*)RegisterDisplayEventEXT}},
+    {"vkGetSwapchainCounterEXT", {false, (void*)GetSwapchainCounterEXT}},
+    {"vkGetRefreshCycleDurationGOOGLE", {false, (void*)GetRefreshCycleDurationGOOGLE}},
+    {"vkGetPastPresentationTimingGOOGLE", {false, (void*)GetPastPresentationTimingGOOGLE}},
+    {"vkCmdSetDiscardRectangleEXT", {false, (void*)CmdSetDiscardRectangleEXT}},
+    {"vkSetHdrMetadataEXT", {false, (void*)SetHdrMetadataEXT}},
+#ifdef VK_USE_PLATFORM_IOS_MVK
+    {"vkCreateIOSSurfaceMVK", {true, (void*)CreateIOSSurfaceMVK}},
+#endif
+#ifdef VK_USE_PLATFORM_MACOS_MVK
+    {"vkCreateMacOSSurfaceMVK", {true, (void*)CreateMacOSSurfaceMVK}},
+#endif
+    {"vkSetDebugUtilsObjectNameEXT", {false, (void*)SetDebugUtilsObjectNameEXT}},
+    {"vkSetDebugUtilsObjectTagEXT", {false, (void*)SetDebugUtilsObjectTagEXT}},
+    {"vkQueueBeginDebugUtilsLabelEXT", {false, (void*)QueueBeginDebugUtilsLabelEXT}},
+    {"vkQueueEndDebugUtilsLabelEXT", {false, (void*)QueueEndDebugUtilsLabelEXT}},
+    {"vkQueueInsertDebugUtilsLabelEXT", {false, (void*)QueueInsertDebugUtilsLabelEXT}},
+    {"vkCmdBeginDebugUtilsLabelEXT", {false, (void*)CmdBeginDebugUtilsLabelEXT}},
+    {"vkCmdEndDebugUtilsLabelEXT", {false, (void*)CmdEndDebugUtilsLabelEXT}},
+    {"vkCmdInsertDebugUtilsLabelEXT", {false, (void*)CmdInsertDebugUtilsLabelEXT}},
+    {"vkCreateDebugUtilsMessengerEXT", {true, (void*)CreateDebugUtilsMessengerEXT}},
+    {"vkDestroyDebugUtilsMessengerEXT", {true, (void*)DestroyDebugUtilsMessengerEXT}},
+    {"vkSubmitDebugUtilsMessageEXT", {true, (void*)SubmitDebugUtilsMessageEXT}},
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+    {"vkGetAndroidHardwareBufferPropertiesANDROID", {false, (void*)GetAndroidHardwareBufferPropertiesANDROID}},
+#endif
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+    {"vkGetMemoryAndroidHardwareBufferANDROID", {false, (void*)GetMemoryAndroidHardwareBufferANDROID}},
+#endif
+    {"vkCmdSetSampleLocationsEXT", {false, (void*)CmdSetSampleLocationsEXT}},
+    {"vkGetPhysicalDeviceMultisamplePropertiesEXT", {true, (void*)GetPhysicalDeviceMultisamplePropertiesEXT}},
+    {"vkGetImageDrmFormatModifierPropertiesEXT", {false, (void*)GetImageDrmFormatModifierPropertiesEXT}},
+    {"vkCreateValidationCacheEXT", {false, (void*)CreateValidationCacheEXT}},
+    {"vkDestroyValidationCacheEXT", {false, (void*)DestroyValidationCacheEXT}},
+    {"vkMergeValidationCachesEXT", {false, (void*)MergeValidationCachesEXT}},
+    {"vkGetValidationCacheDataEXT", {false, (void*)GetValidationCacheDataEXT}},
+    {"vkCmdBindShadingRateImageNV", {false, (void*)CmdBindShadingRateImageNV}},
+    {"vkCmdSetViewportShadingRatePaletteNV", {false, (void*)CmdSetViewportShadingRatePaletteNV}},
+    {"vkCmdSetCoarseSampleOrderNV", {false, (void*)CmdSetCoarseSampleOrderNV}},
+    {"vkCreateAccelerationStructureNV", {false, (void*)CreateAccelerationStructureNV}},
+    {"vkDestroyAccelerationStructureNV", {false, (void*)DestroyAccelerationStructureNV}},
+    {"vkGetAccelerationStructureMemoryRequirementsNV", {false, (void*)GetAccelerationStructureMemoryRequirementsNV}},
+    {"vkBindAccelerationStructureMemoryNV", {false, (void*)BindAccelerationStructureMemoryNV}},
+    {"vkCmdBuildAccelerationStructureNV", {false, (void*)CmdBuildAccelerationStructureNV}},
+    {"vkCmdCopyAccelerationStructureNV", {false, (void*)CmdCopyAccelerationStructureNV}},
+    {"vkCmdTraceRaysNV", {false, (void*)CmdTraceRaysNV}},
+    {"vkCreateRayTracingPipelinesNV", {false, (void*)CreateRayTracingPipelinesNV}},
+    {"vkGetRayTracingShaderGroupHandlesNV", {false, (void*)GetRayTracingShaderGroupHandlesNV}},
+    {"vkGetAccelerationStructureHandleNV", {false, (void*)GetAccelerationStructureHandleNV}},
+    {"vkCmdWriteAccelerationStructuresPropertiesNV", {false, (void*)CmdWriteAccelerationStructuresPropertiesNV}},
+    {"vkCompileDeferredNV", {false, (void*)CompileDeferredNV}},
+    {"vkGetMemoryHostPointerPropertiesEXT", {false, (void*)GetMemoryHostPointerPropertiesEXT}},
+    {"vkCmdWriteBufferMarkerAMD", {false, (void*)CmdWriteBufferMarkerAMD}},
+    {"vkGetPhysicalDeviceCalibrateableTimeDomainsEXT", {true, (void*)GetPhysicalDeviceCalibrateableTimeDomainsEXT}},
+    {"vkGetCalibratedTimestampsEXT", {false, (void*)GetCalibratedTimestampsEXT}},
+    {"vkCmdDrawMeshTasksNV", {false, (void*)CmdDrawMeshTasksNV}},
+    {"vkCmdDrawMeshTasksIndirectNV", {false, (void*)CmdDrawMeshTasksIndirectNV}},
+    {"vkCmdDrawMeshTasksIndirectCountNV", {false, (void*)CmdDrawMeshTasksIndirectCountNV}},
+    {"vkCmdSetExclusiveScissorNV", {false, (void*)CmdSetExclusiveScissorNV}},
+    {"vkCmdSetCheckpointNV", {false, (void*)CmdSetCheckpointNV}},
+    {"vkGetQueueCheckpointDataNV", {false, (void*)GetQueueCheckpointDataNV}},
+    {"vkInitializePerformanceApiINTEL", {false, (void*)InitializePerformanceApiINTEL}},
+    {"vkUninitializePerformanceApiINTEL", {false, (void*)UninitializePerformanceApiINTEL}},
+    {"vkCmdSetPerformanceMarkerINTEL", {false, (void*)CmdSetPerformanceMarkerINTEL}},
+    {"vkCmdSetPerformanceStreamMarkerINTEL", {false, (void*)CmdSetPerformanceStreamMarkerINTEL}},
+    {"vkCmdSetPerformanceOverrideINTEL", {false, (void*)CmdSetPerformanceOverrideINTEL}},
+    {"vkAcquirePerformanceConfigurationINTEL", {false, (void*)AcquirePerformanceConfigurationINTEL}},
+    {"vkReleasePerformanceConfigurationINTEL", {false, (void*)ReleasePerformanceConfigurationINTEL}},
+    {"vkQueueSetPerformanceConfigurationINTEL", {false, (void*)QueueSetPerformanceConfigurationINTEL}},
+    {"vkGetPerformanceParameterINTEL", {false, (void*)GetPerformanceParameterINTEL}},
+    {"vkSetLocalDimmingAMD", {false, (void*)SetLocalDimmingAMD}},
+#ifdef VK_USE_PLATFORM_FUCHSIA
+    {"vkCreateImagePipeSurfaceFUCHSIA", {true, (void*)CreateImagePipeSurfaceFUCHSIA}},
+#endif
+#ifdef VK_USE_PLATFORM_METAL_EXT
+    {"vkCreateMetalSurfaceEXT", {true, (void*)CreateMetalSurfaceEXT}},
+#endif
+    {"vkGetBufferDeviceAddressEXT", {false, (void*)GetBufferDeviceAddressEXT}},
+    {"vkGetPhysicalDeviceToolPropertiesEXT", {true, (void*)GetPhysicalDeviceToolPropertiesEXT}},
+    {"vkGetPhysicalDeviceCooperativeMatrixPropertiesNV", {true, (void*)GetPhysicalDeviceCooperativeMatrixPropertiesNV}},
+    {"vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV", {true, (void*)GetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV}},
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    {"vkGetPhysicalDeviceSurfacePresentModes2EXT", {true, (void*)GetPhysicalDeviceSurfacePresentModes2EXT}},
+#endif
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    {"vkAcquireFullScreenExclusiveModeEXT", {false, (void*)AcquireFullScreenExclusiveModeEXT}},
+#endif
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    {"vkReleaseFullScreenExclusiveModeEXT", {false, (void*)ReleaseFullScreenExclusiveModeEXT}},
+#endif
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    {"vkGetDeviceGroupSurfacePresentModes2EXT", {false, (void*)GetDeviceGroupSurfacePresentModes2EXT}},
+#endif
+    {"vkCreateHeadlessSurfaceEXT", {true, (void*)CreateHeadlessSurfaceEXT}},
+    {"vkCmdSetLineStippleEXT", {false, (void*)CmdSetLineStippleEXT}},
+    {"vkResetQueryPoolEXT", {false, (void*)ResetQueryPoolEXT}},
+};
+
+
+} // namespace vulkan_layer_chassis
+
+// loader-layer interface v0, just wrappers since there is only a layer
+
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceExtensionProperties(const char *pLayerName, uint32_t *pCount,
+                                                                                      VkExtensionProperties *pProperties) {
+    return vulkan_layer_chassis::EnumerateInstanceExtensionProperties(pLayerName, pCount, pProperties);
+}
+
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceLayerProperties(uint32_t *pCount,
+                                                                                  VkLayerProperties *pProperties) {
+    return vulkan_layer_chassis::EnumerateInstanceLayerProperties(pCount, pProperties);
+}
+
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateDeviceLayerProperties(VkPhysicalDevice physicalDevice, uint32_t *pCount,
+                                                                                VkLayerProperties *pProperties) {
+    // the layer command handles VK_NULL_HANDLE just fine internally
+    assert(physicalDevice == VK_NULL_HANDLE);
+    return vulkan_layer_chassis::EnumerateDeviceLayerProperties(VK_NULL_HANDLE, pCount, pProperties);
+}
+
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateDeviceExtensionProperties(VkPhysicalDevice physicalDevice,
+                                                                                    const char *pLayerName, uint32_t *pCount,
+                                                                                    VkExtensionProperties *pProperties) {
+    // the layer command handles VK_NULL_HANDLE just fine internally
+    assert(physicalDevice == VK_NULL_HANDLE);
+    return vulkan_layer_chassis::EnumerateDeviceExtensionProperties(VK_NULL_HANDLE, pLayerName, pCount, pProperties);
+}
+
+VK_LAYER_EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vkGetDeviceProcAddr(VkDevice dev, const char *funcName) {
+    return vulkan_layer_chassis::GetDeviceProcAddr(dev, funcName);
+}
+
+VK_LAYER_EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vkGetInstanceProcAddr(VkInstance instance, const char *funcName) {
+    return vulkan_layer_chassis::GetInstanceProcAddr(instance, funcName);
+}
+
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkNegotiateLoaderLayerInterfaceVersion(VkNegotiateLayerInterface *pVersionStruct) {
+    assert(pVersionStruct != NULL);
+    assert(pVersionStruct->sType == LAYER_NEGOTIATE_INTERFACE_STRUCT);
+
+    // Fill in the function pointers if our version is at least capable of having the structure contain them.
+    if (pVersionStruct->loaderLayerInterfaceVersion >= 2) {
+        pVersionStruct->pfnGetInstanceProcAddr = vkGetInstanceProcAddr;
+        pVersionStruct->pfnGetDeviceProcAddr = vkGetDeviceProcAddr;
+        pVersionStruct->pfnGetPhysicalDeviceProcAddr = nullptr;
+    }
+
+    return VK_SUCCESS;
+}
diff --git a/src/third_party/vulkan-validation-layers/src/layers/generated/chassis.h b/src/third_party/vulkan-validation-layers/src/layers/generated/chassis.h
new file mode 100644
index 0000000..fe64ec5
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/layers/generated/chassis.h
@@ -0,0 +1,3851 @@
+
+// This file is ***GENERATED***.  Do Not Edit.
+// See layer_chassis_generator.py for modifications.
+
+/* Copyright (c) 2015-2019 The Khronos Group Inc.
+ * Copyright (c) 2015-2019 Valve Corporation
+ * Copyright (c) 2015-2019 LunarG, Inc.
+ * Copyright (c) 2015-2019 Google Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Mark Lobodzinski <mark@lunarg.com>
+ */
+#pragma once
+
+
+#define NOMINMAX
+#include <atomic>
+#include <mutex>
+#include <cinttypes>
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+#include <unordered_map>
+#include <unordered_set>
+#include <algorithm>
+#include <memory>
+
+#include "vk_loader_platform.h"
+#include "vulkan/vulkan.h"
+#include "vk_layer_config.h"
+#include "vk_layer_data.h"
+#include "vk_layer_logging.h"
+#include "vk_object_types.h"
+#include "vulkan/vk_layer.h"
+#include "vk_enum_string_helper.h"
+#include "vk_layer_extension_utils.h"
+#include "vk_layer_utils.h"
+#include "vulkan/vk_layer.h"
+#include "vk_dispatch_table_helper.h"
+#include "vk_extension_helper.h"
+#include "vk_safe_struct.h"
+#include "vk_typemap_helper.h"
+
+
+extern std::atomic<uint64_t> global_unique_id;
+
+// To avoid re-hashing unique ids on each use, we precompute the hash and store the
+// hash's LSBs in the high 24 bits.
+struct HashedUint64 {
+    static const int HASHED_UINT64_SHIFT = 40;
+    size_t operator()(const uint64_t &t) const { return t >> HASHED_UINT64_SHIFT; }
+
+    static uint64_t hash(uint64_t id) {
+        uint64_t h = (uint64_t)std::hash<uint64_t>()(id);
+        id |= h << HASHED_UINT64_SHIFT;
+        return id;
+    }
+};
+
+extern vl_concurrent_unordered_map<uint64_t, uint64_t, 4, HashedUint64> unique_id_mapping;
+
+
+
+VKAPI_ATTR VkResult VKAPI_CALL CreateInstance(
+    const VkInstanceCreateInfo*                 pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkInstance*                                 pInstance);
+
+VKAPI_ATTR void VKAPI_CALL DestroyInstance(
+    VkInstance                                  instance,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL EnumeratePhysicalDevices(
+    VkInstance                                  instance,
+    uint32_t*                                   pPhysicalDeviceCount,
+    VkPhysicalDevice*                           pPhysicalDevices);
+
+VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceFeatures(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceFeatures*                   pFeatures);
+
+VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceFormatProperties(
+    VkPhysicalDevice                            physicalDevice,
+    VkFormat                                    format,
+    VkFormatProperties*                         pFormatProperties);
+
+VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceImageFormatProperties(
+    VkPhysicalDevice                            physicalDevice,
+    VkFormat                                    format,
+    VkImageType                                 type,
+    VkImageTiling                               tiling,
+    VkImageUsageFlags                           usage,
+    VkImageCreateFlags                          flags,
+    VkImageFormatProperties*                    pImageFormatProperties);
+
+VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceProperties(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceProperties*                 pProperties);
+
+VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceQueueFamilyProperties(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pQueueFamilyPropertyCount,
+    VkQueueFamilyProperties*                    pQueueFamilyProperties);
+
+VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceMemoryProperties(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceMemoryProperties*           pMemoryProperties);
+
+VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL GetInstanceProcAddr(
+    VkInstance                                  instance,
+    const char*                                 pName);
+
+VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL GetDeviceProcAddr(
+    VkDevice                                    device,
+    const char*                                 pName);
+
+VKAPI_ATTR VkResult VKAPI_CALL CreateDevice(
+    VkPhysicalDevice                            physicalDevice,
+    const VkDeviceCreateInfo*                   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDevice*                                   pDevice);
+
+VKAPI_ATTR void VKAPI_CALL DestroyDevice(
+    VkDevice                                    device,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL EnumerateInstanceExtensionProperties(
+    const char*                                 pLayerName,
+    uint32_t*                                   pPropertyCount,
+    VkExtensionProperties*                      pProperties);
+
+VKAPI_ATTR VkResult VKAPI_CALL EnumerateDeviceExtensionProperties(
+    VkPhysicalDevice                            physicalDevice,
+    const char*                                 pLayerName,
+    uint32_t*                                   pPropertyCount,
+    VkExtensionProperties*                      pProperties);
+
+VKAPI_ATTR VkResult VKAPI_CALL EnumerateInstanceLayerProperties(
+    uint32_t*                                   pPropertyCount,
+    VkLayerProperties*                          pProperties);
+
+VKAPI_ATTR VkResult VKAPI_CALL EnumerateDeviceLayerProperties(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pPropertyCount,
+    VkLayerProperties*                          pProperties);
+
+VKAPI_ATTR void VKAPI_CALL GetDeviceQueue(
+    VkDevice                                    device,
+    uint32_t                                    queueFamilyIndex,
+    uint32_t                                    queueIndex,
+    VkQueue*                                    pQueue);
+
+VKAPI_ATTR VkResult VKAPI_CALL QueueSubmit(
+    VkQueue                                     queue,
+    uint32_t                                    submitCount,
+    const VkSubmitInfo*                         pSubmits,
+    VkFence                                     fence);
+
+VKAPI_ATTR VkResult VKAPI_CALL QueueWaitIdle(
+    VkQueue                                     queue);
+
+VKAPI_ATTR VkResult VKAPI_CALL DeviceWaitIdle(
+    VkDevice                                    device);
+
+VKAPI_ATTR VkResult VKAPI_CALL AllocateMemory(
+    VkDevice                                    device,
+    const VkMemoryAllocateInfo*                 pAllocateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDeviceMemory*                             pMemory);
+
+VKAPI_ATTR void VKAPI_CALL FreeMemory(
+    VkDevice                                    device,
+    VkDeviceMemory                              memory,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL MapMemory(
+    VkDevice                                    device,
+    VkDeviceMemory                              memory,
+    VkDeviceSize                                offset,
+    VkDeviceSize                                size,
+    VkMemoryMapFlags                            flags,
+    void**                                      ppData);
+
+VKAPI_ATTR void VKAPI_CALL UnmapMemory(
+    VkDevice                                    device,
+    VkDeviceMemory                              memory);
+
+VKAPI_ATTR VkResult VKAPI_CALL FlushMappedMemoryRanges(
+    VkDevice                                    device,
+    uint32_t                                    memoryRangeCount,
+    const VkMappedMemoryRange*                  pMemoryRanges);
+
+VKAPI_ATTR VkResult VKAPI_CALL InvalidateMappedMemoryRanges(
+    VkDevice                                    device,
+    uint32_t                                    memoryRangeCount,
+    const VkMappedMemoryRange*                  pMemoryRanges);
+
+VKAPI_ATTR void VKAPI_CALL GetDeviceMemoryCommitment(
+    VkDevice                                    device,
+    VkDeviceMemory                              memory,
+    VkDeviceSize*                               pCommittedMemoryInBytes);
+
+VKAPI_ATTR VkResult VKAPI_CALL BindBufferMemory(
+    VkDevice                                    device,
+    VkBuffer                                    buffer,
+    VkDeviceMemory                              memory,
+    VkDeviceSize                                memoryOffset);
+
+VKAPI_ATTR VkResult VKAPI_CALL BindImageMemory(
+    VkDevice                                    device,
+    VkImage                                     image,
+    VkDeviceMemory                              memory,
+    VkDeviceSize                                memoryOffset);
+
+VKAPI_ATTR void VKAPI_CALL GetBufferMemoryRequirements(
+    VkDevice                                    device,
+    VkBuffer                                    buffer,
+    VkMemoryRequirements*                       pMemoryRequirements);
+
+VKAPI_ATTR void VKAPI_CALL GetImageMemoryRequirements(
+    VkDevice                                    device,
+    VkImage                                     image,
+    VkMemoryRequirements*                       pMemoryRequirements);
+
+VKAPI_ATTR void VKAPI_CALL GetImageSparseMemoryRequirements(
+    VkDevice                                    device,
+    VkImage                                     image,
+    uint32_t*                                   pSparseMemoryRequirementCount,
+    VkSparseImageMemoryRequirements*            pSparseMemoryRequirements);
+
+VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceSparseImageFormatProperties(
+    VkPhysicalDevice                            physicalDevice,
+    VkFormat                                    format,
+    VkImageType                                 type,
+    VkSampleCountFlagBits                       samples,
+    VkImageUsageFlags                           usage,
+    VkImageTiling                               tiling,
+    uint32_t*                                   pPropertyCount,
+    VkSparseImageFormatProperties*              pProperties);
+
+VKAPI_ATTR VkResult VKAPI_CALL QueueBindSparse(
+    VkQueue                                     queue,
+    uint32_t                                    bindInfoCount,
+    const VkBindSparseInfo*                     pBindInfo,
+    VkFence                                     fence);
+
+VKAPI_ATTR VkResult VKAPI_CALL CreateFence(
+    VkDevice                                    device,
+    const VkFenceCreateInfo*                    pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkFence*                                    pFence);
+
+VKAPI_ATTR void VKAPI_CALL DestroyFence(
+    VkDevice                                    device,
+    VkFence                                     fence,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL ResetFences(
+    VkDevice                                    device,
+    uint32_t                                    fenceCount,
+    const VkFence*                              pFences);
+
+VKAPI_ATTR VkResult VKAPI_CALL GetFenceStatus(
+    VkDevice                                    device,
+    VkFence                                     fence);
+
+VKAPI_ATTR VkResult VKAPI_CALL WaitForFences(
+    VkDevice                                    device,
+    uint32_t                                    fenceCount,
+    const VkFence*                              pFences,
+    VkBool32                                    waitAll,
+    uint64_t                                    timeout);
+
+VKAPI_ATTR VkResult VKAPI_CALL CreateSemaphore(
+    VkDevice                                    device,
+    const VkSemaphoreCreateInfo*                pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSemaphore*                                pSemaphore);
+
+VKAPI_ATTR void VKAPI_CALL DestroySemaphore(
+    VkDevice                                    device,
+    VkSemaphore                                 semaphore,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL CreateEvent(
+    VkDevice                                    device,
+    const VkEventCreateInfo*                    pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkEvent*                                    pEvent);
+
+VKAPI_ATTR void VKAPI_CALL DestroyEvent(
+    VkDevice                                    device,
+    VkEvent                                     event,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL GetEventStatus(
+    VkDevice                                    device,
+    VkEvent                                     event);
+
+VKAPI_ATTR VkResult VKAPI_CALL SetEvent(
+    VkDevice                                    device,
+    VkEvent                                     event);
+
+VKAPI_ATTR VkResult VKAPI_CALL ResetEvent(
+    VkDevice                                    device,
+    VkEvent                                     event);
+
+VKAPI_ATTR VkResult VKAPI_CALL CreateQueryPool(
+    VkDevice                                    device,
+    const VkQueryPoolCreateInfo*                pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkQueryPool*                                pQueryPool);
+
+VKAPI_ATTR void VKAPI_CALL DestroyQueryPool(
+    VkDevice                                    device,
+    VkQueryPool                                 queryPool,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL GetQueryPoolResults(
+    VkDevice                                    device,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    firstQuery,
+    uint32_t                                    queryCount,
+    size_t                                      dataSize,
+    void*                                       pData,
+    VkDeviceSize                                stride,
+    VkQueryResultFlags                          flags);
+
+VKAPI_ATTR VkResult VKAPI_CALL CreateBuffer(
+    VkDevice                                    device,
+    const VkBufferCreateInfo*                   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkBuffer*                                   pBuffer);
+
+VKAPI_ATTR void VKAPI_CALL DestroyBuffer(
+    VkDevice                                    device,
+    VkBuffer                                    buffer,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL CreateBufferView(
+    VkDevice                                    device,
+    const VkBufferViewCreateInfo*               pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkBufferView*                               pView);
+
+VKAPI_ATTR void VKAPI_CALL DestroyBufferView(
+    VkDevice                                    device,
+    VkBufferView                                bufferView,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL CreateImage(
+    VkDevice                                    device,
+    const VkImageCreateInfo*                    pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkImage*                                    pImage);
+
+VKAPI_ATTR void VKAPI_CALL DestroyImage(
+    VkDevice                                    device,
+    VkImage                                     image,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR void VKAPI_CALL GetImageSubresourceLayout(
+    VkDevice                                    device,
+    VkImage                                     image,
+    const VkImageSubresource*                   pSubresource,
+    VkSubresourceLayout*                        pLayout);
+
+VKAPI_ATTR VkResult VKAPI_CALL CreateImageView(
+    VkDevice                                    device,
+    const VkImageViewCreateInfo*                pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkImageView*                                pView);
+
+VKAPI_ATTR void VKAPI_CALL DestroyImageView(
+    VkDevice                                    device,
+    VkImageView                                 imageView,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL CreateShaderModule(
+    VkDevice                                    device,
+    const VkShaderModuleCreateInfo*             pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkShaderModule*                             pShaderModule);
+
+VKAPI_ATTR void VKAPI_CALL DestroyShaderModule(
+    VkDevice                                    device,
+    VkShaderModule                              shaderModule,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL CreatePipelineCache(
+    VkDevice                                    device,
+    const VkPipelineCacheCreateInfo*            pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkPipelineCache*                            pPipelineCache);
+
+VKAPI_ATTR void VKAPI_CALL DestroyPipelineCache(
+    VkDevice                                    device,
+    VkPipelineCache                             pipelineCache,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL GetPipelineCacheData(
+    VkDevice                                    device,
+    VkPipelineCache                             pipelineCache,
+    size_t*                                     pDataSize,
+    void*                                       pData);
+
+VKAPI_ATTR VkResult VKAPI_CALL MergePipelineCaches(
+    VkDevice                                    device,
+    VkPipelineCache                             dstCache,
+    uint32_t                                    srcCacheCount,
+    const VkPipelineCache*                      pSrcCaches);
+
+VKAPI_ATTR VkResult VKAPI_CALL CreateGraphicsPipelines(
+    VkDevice                                    device,
+    VkPipelineCache                             pipelineCache,
+    uint32_t                                    createInfoCount,
+    const VkGraphicsPipelineCreateInfo*         pCreateInfos,
+    const VkAllocationCallbacks*                pAllocator,
+    VkPipeline*                                 pPipelines);
+
+VKAPI_ATTR VkResult VKAPI_CALL CreateComputePipelines(
+    VkDevice                                    device,
+    VkPipelineCache                             pipelineCache,
+    uint32_t                                    createInfoCount,
+    const VkComputePipelineCreateInfo*          pCreateInfos,
+    const VkAllocationCallbacks*                pAllocator,
+    VkPipeline*                                 pPipelines);
+
+VKAPI_ATTR void VKAPI_CALL DestroyPipeline(
+    VkDevice                                    device,
+    VkPipeline                                  pipeline,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL CreatePipelineLayout(
+    VkDevice                                    device,
+    const VkPipelineLayoutCreateInfo*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkPipelineLayout*                           pPipelineLayout);
+
+VKAPI_ATTR void VKAPI_CALL DestroyPipelineLayout(
+    VkDevice                                    device,
+    VkPipelineLayout                            pipelineLayout,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL CreateSampler(
+    VkDevice                                    device,
+    const VkSamplerCreateInfo*                  pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSampler*                                  pSampler);
+
+VKAPI_ATTR void VKAPI_CALL DestroySampler(
+    VkDevice                                    device,
+    VkSampler                                   sampler,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL CreateDescriptorSetLayout(
+    VkDevice                                    device,
+    const VkDescriptorSetLayoutCreateInfo*      pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDescriptorSetLayout*                      pSetLayout);
+
+VKAPI_ATTR void VKAPI_CALL DestroyDescriptorSetLayout(
+    VkDevice                                    device,
+    VkDescriptorSetLayout                       descriptorSetLayout,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL CreateDescriptorPool(
+    VkDevice                                    device,
+    const VkDescriptorPoolCreateInfo*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDescriptorPool*                           pDescriptorPool);
+
+VKAPI_ATTR void VKAPI_CALL DestroyDescriptorPool(
+    VkDevice                                    device,
+    VkDescriptorPool                            descriptorPool,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL ResetDescriptorPool(
+    VkDevice                                    device,
+    VkDescriptorPool                            descriptorPool,
+    VkDescriptorPoolResetFlags                  flags);
+
+VKAPI_ATTR VkResult VKAPI_CALL AllocateDescriptorSets(
+    VkDevice                                    device,
+    const VkDescriptorSetAllocateInfo*          pAllocateInfo,
+    VkDescriptorSet*                            pDescriptorSets);
+
+VKAPI_ATTR VkResult VKAPI_CALL FreeDescriptorSets(
+    VkDevice                                    device,
+    VkDescriptorPool                            descriptorPool,
+    uint32_t                                    descriptorSetCount,
+    const VkDescriptorSet*                      pDescriptorSets);
+
+VKAPI_ATTR void VKAPI_CALL UpdateDescriptorSets(
+    VkDevice                                    device,
+    uint32_t                                    descriptorWriteCount,
+    const VkWriteDescriptorSet*                 pDescriptorWrites,
+    uint32_t                                    descriptorCopyCount,
+    const VkCopyDescriptorSet*                  pDescriptorCopies);
+
+VKAPI_ATTR VkResult VKAPI_CALL CreateFramebuffer(
+    VkDevice                                    device,
+    const VkFramebufferCreateInfo*              pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkFramebuffer*                              pFramebuffer);
+
+VKAPI_ATTR void VKAPI_CALL DestroyFramebuffer(
+    VkDevice                                    device,
+    VkFramebuffer                               framebuffer,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL CreateRenderPass(
+    VkDevice                                    device,
+    const VkRenderPassCreateInfo*               pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkRenderPass*                               pRenderPass);
+
+VKAPI_ATTR void VKAPI_CALL DestroyRenderPass(
+    VkDevice                                    device,
+    VkRenderPass                                renderPass,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR void VKAPI_CALL GetRenderAreaGranularity(
+    VkDevice                                    device,
+    VkRenderPass                                renderPass,
+    VkExtent2D*                                 pGranularity);
+
+VKAPI_ATTR VkResult VKAPI_CALL CreateCommandPool(
+    VkDevice                                    device,
+    const VkCommandPoolCreateInfo*              pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkCommandPool*                              pCommandPool);
+
+VKAPI_ATTR void VKAPI_CALL DestroyCommandPool(
+    VkDevice                                    device,
+    VkCommandPool                               commandPool,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL ResetCommandPool(
+    VkDevice                                    device,
+    VkCommandPool                               commandPool,
+    VkCommandPoolResetFlags                     flags);
+
+VKAPI_ATTR VkResult VKAPI_CALL AllocateCommandBuffers(
+    VkDevice                                    device,
+    const VkCommandBufferAllocateInfo*          pAllocateInfo,
+    VkCommandBuffer*                            pCommandBuffers);
+
+VKAPI_ATTR void VKAPI_CALL FreeCommandBuffers(
+    VkDevice                                    device,
+    VkCommandPool                               commandPool,
+    uint32_t                                    commandBufferCount,
+    const VkCommandBuffer*                      pCommandBuffers);
+
+VKAPI_ATTR VkResult VKAPI_CALL BeginCommandBuffer(
+    VkCommandBuffer                             commandBuffer,
+    const VkCommandBufferBeginInfo*             pBeginInfo);
+
+VKAPI_ATTR VkResult VKAPI_CALL EndCommandBuffer(
+    VkCommandBuffer                             commandBuffer);
+
+VKAPI_ATTR VkResult VKAPI_CALL ResetCommandBuffer(
+    VkCommandBuffer                             commandBuffer,
+    VkCommandBufferResetFlags                   flags);
+
+VKAPI_ATTR void VKAPI_CALL CmdBindPipeline(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineBindPoint                         pipelineBindPoint,
+    VkPipeline                                  pipeline);
+
+VKAPI_ATTR void VKAPI_CALL CmdSetViewport(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstViewport,
+    uint32_t                                    viewportCount,
+    const VkViewport*                           pViewports);
+
+VKAPI_ATTR void VKAPI_CALL CmdSetScissor(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstScissor,
+    uint32_t                                    scissorCount,
+    const VkRect2D*                             pScissors);
+
+VKAPI_ATTR void VKAPI_CALL CmdSetLineWidth(
+    VkCommandBuffer                             commandBuffer,
+    float                                       lineWidth);
+
+VKAPI_ATTR void VKAPI_CALL CmdSetDepthBias(
+    VkCommandBuffer                             commandBuffer,
+    float                                       depthBiasConstantFactor,
+    float                                       depthBiasClamp,
+    float                                       depthBiasSlopeFactor);
+
+VKAPI_ATTR void VKAPI_CALL CmdSetBlendConstants(
+    VkCommandBuffer                             commandBuffer,
+    const float                                 blendConstants[4]);
+
+VKAPI_ATTR void VKAPI_CALL CmdSetDepthBounds(
+    VkCommandBuffer                             commandBuffer,
+    float                                       minDepthBounds,
+    float                                       maxDepthBounds);
+
+VKAPI_ATTR void VKAPI_CALL CmdSetStencilCompareMask(
+    VkCommandBuffer                             commandBuffer,
+    VkStencilFaceFlags                          faceMask,
+    uint32_t                                    compareMask);
+
+VKAPI_ATTR void VKAPI_CALL CmdSetStencilWriteMask(
+    VkCommandBuffer                             commandBuffer,
+    VkStencilFaceFlags                          faceMask,
+    uint32_t                                    writeMask);
+
+VKAPI_ATTR void VKAPI_CALL CmdSetStencilReference(
+    VkCommandBuffer                             commandBuffer,
+    VkStencilFaceFlags                          faceMask,
+    uint32_t                                    reference);
+
+VKAPI_ATTR void VKAPI_CALL CmdBindDescriptorSets(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineBindPoint                         pipelineBindPoint,
+    VkPipelineLayout                            layout,
+    uint32_t                                    firstSet,
+    uint32_t                                    descriptorSetCount,
+    const VkDescriptorSet*                      pDescriptorSets,
+    uint32_t                                    dynamicOffsetCount,
+    const uint32_t*                             pDynamicOffsets);
+
+VKAPI_ATTR void VKAPI_CALL CmdBindIndexBuffer(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkIndexType                                 indexType);
+
+VKAPI_ATTR void VKAPI_CALL CmdBindVertexBuffers(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstBinding,
+    uint32_t                                    bindingCount,
+    const VkBuffer*                             pBuffers,
+    const VkDeviceSize*                         pOffsets);
+
+VKAPI_ATTR void VKAPI_CALL CmdDraw(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    vertexCount,
+    uint32_t                                    instanceCount,
+    uint32_t                                    firstVertex,
+    uint32_t                                    firstInstance);
+
+VKAPI_ATTR void VKAPI_CALL CmdDrawIndexed(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    indexCount,
+    uint32_t                                    instanceCount,
+    uint32_t                                    firstIndex,
+    int32_t                                     vertexOffset,
+    uint32_t                                    firstInstance);
+
+VKAPI_ATTR void VKAPI_CALL CmdDrawIndirect(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    uint32_t                                    drawCount,
+    uint32_t                                    stride);
+
+VKAPI_ATTR void VKAPI_CALL CmdDrawIndexedIndirect(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    uint32_t                                    drawCount,
+    uint32_t                                    stride);
+
+VKAPI_ATTR void VKAPI_CALL CmdDispatch(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    groupCountX,
+    uint32_t                                    groupCountY,
+    uint32_t                                    groupCountZ);
+
+VKAPI_ATTR void VKAPI_CALL CmdDispatchIndirect(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset);
+
+VKAPI_ATTR void VKAPI_CALL CmdCopyBuffer(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    srcBuffer,
+    VkBuffer                                    dstBuffer,
+    uint32_t                                    regionCount,
+    const VkBufferCopy*                         pRegions);
+
+VKAPI_ATTR void VKAPI_CALL CmdCopyImage(
+    VkCommandBuffer                             commandBuffer,
+    VkImage                                     srcImage,
+    VkImageLayout                               srcImageLayout,
+    VkImage                                     dstImage,
+    VkImageLayout                               dstImageLayout,
+    uint32_t                                    regionCount,
+    const VkImageCopy*                          pRegions);
+
+VKAPI_ATTR void VKAPI_CALL CmdBlitImage(
+    VkCommandBuffer                             commandBuffer,
+    VkImage                                     srcImage,
+    VkImageLayout                               srcImageLayout,
+    VkImage                                     dstImage,
+    VkImageLayout                               dstImageLayout,
+    uint32_t                                    regionCount,
+    const VkImageBlit*                          pRegions,
+    VkFilter                                    filter);
+
+VKAPI_ATTR void VKAPI_CALL CmdCopyBufferToImage(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    srcBuffer,
+    VkImage                                     dstImage,
+    VkImageLayout                               dstImageLayout,
+    uint32_t                                    regionCount,
+    const VkBufferImageCopy*                    pRegions);
+
+VKAPI_ATTR void VKAPI_CALL CmdCopyImageToBuffer(
+    VkCommandBuffer                             commandBuffer,
+    VkImage                                     srcImage,
+    VkImageLayout                               srcImageLayout,
+    VkBuffer                                    dstBuffer,
+    uint32_t                                    regionCount,
+    const VkBufferImageCopy*                    pRegions);
+
+VKAPI_ATTR void VKAPI_CALL CmdUpdateBuffer(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    dstBuffer,
+    VkDeviceSize                                dstOffset,
+    VkDeviceSize                                dataSize,
+    const void*                                 pData);
+
+VKAPI_ATTR void VKAPI_CALL CmdFillBuffer(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    dstBuffer,
+    VkDeviceSize                                dstOffset,
+    VkDeviceSize                                size,
+    uint32_t                                    data);
+
+VKAPI_ATTR void VKAPI_CALL CmdClearColorImage(
+    VkCommandBuffer                             commandBuffer,
+    VkImage                                     image,
+    VkImageLayout                               imageLayout,
+    const VkClearColorValue*                    pColor,
+    uint32_t                                    rangeCount,
+    const VkImageSubresourceRange*              pRanges);
+
+VKAPI_ATTR void VKAPI_CALL CmdClearDepthStencilImage(
+    VkCommandBuffer                             commandBuffer,
+    VkImage                                     image,
+    VkImageLayout                               imageLayout,
+    const VkClearDepthStencilValue*             pDepthStencil,
+    uint32_t                                    rangeCount,
+    const VkImageSubresourceRange*              pRanges);
+
+VKAPI_ATTR void VKAPI_CALL CmdClearAttachments(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    attachmentCount,
+    const VkClearAttachment*                    pAttachments,
+    uint32_t                                    rectCount,
+    const VkClearRect*                          pRects);
+
+VKAPI_ATTR void VKAPI_CALL CmdResolveImage(
+    VkCommandBuffer                             commandBuffer,
+    VkImage                                     srcImage,
+    VkImageLayout                               srcImageLayout,
+    VkImage                                     dstImage,
+    VkImageLayout                               dstImageLayout,
+    uint32_t                                    regionCount,
+    const VkImageResolve*                       pRegions);
+
+VKAPI_ATTR void VKAPI_CALL CmdSetEvent(
+    VkCommandBuffer                             commandBuffer,
+    VkEvent                                     event,
+    VkPipelineStageFlags                        stageMask);
+
+VKAPI_ATTR void VKAPI_CALL CmdResetEvent(
+    VkCommandBuffer                             commandBuffer,
+    VkEvent                                     event,
+    VkPipelineStageFlags                        stageMask);
+
+VKAPI_ATTR void VKAPI_CALL CmdWaitEvents(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    eventCount,
+    const VkEvent*                              pEvents,
+    VkPipelineStageFlags                        srcStageMask,
+    VkPipelineStageFlags                        dstStageMask,
+    uint32_t                                    memoryBarrierCount,
+    const VkMemoryBarrier*                      pMemoryBarriers,
+    uint32_t                                    bufferMemoryBarrierCount,
+    const VkBufferMemoryBarrier*                pBufferMemoryBarriers,
+    uint32_t                                    imageMemoryBarrierCount,
+    const VkImageMemoryBarrier*                 pImageMemoryBarriers);
+
+VKAPI_ATTR void VKAPI_CALL CmdPipelineBarrier(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineStageFlags                        srcStageMask,
+    VkPipelineStageFlags                        dstStageMask,
+    VkDependencyFlags                           dependencyFlags,
+    uint32_t                                    memoryBarrierCount,
+    const VkMemoryBarrier*                      pMemoryBarriers,
+    uint32_t                                    bufferMemoryBarrierCount,
+    const VkBufferMemoryBarrier*                pBufferMemoryBarriers,
+    uint32_t                                    imageMemoryBarrierCount,
+    const VkImageMemoryBarrier*                 pImageMemoryBarriers);
+
+VKAPI_ATTR void VKAPI_CALL CmdBeginQuery(
+    VkCommandBuffer                             commandBuffer,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    query,
+    VkQueryControlFlags                         flags);
+
+VKAPI_ATTR void VKAPI_CALL CmdEndQuery(
+    VkCommandBuffer                             commandBuffer,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    query);
+
+VKAPI_ATTR void VKAPI_CALL CmdResetQueryPool(
+    VkCommandBuffer                             commandBuffer,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    firstQuery,
+    uint32_t                                    queryCount);
+
+VKAPI_ATTR void VKAPI_CALL CmdWriteTimestamp(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineStageFlagBits                     pipelineStage,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    query);
+
+VKAPI_ATTR void VKAPI_CALL CmdCopyQueryPoolResults(
+    VkCommandBuffer                             commandBuffer,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    firstQuery,
+    uint32_t                                    queryCount,
+    VkBuffer                                    dstBuffer,
+    VkDeviceSize                                dstOffset,
+    VkDeviceSize                                stride,
+    VkQueryResultFlags                          flags);
+
+VKAPI_ATTR void VKAPI_CALL CmdPushConstants(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineLayout                            layout,
+    VkShaderStageFlags                          stageFlags,
+    uint32_t                                    offset,
+    uint32_t                                    size,
+    const void*                                 pValues);
+
+VKAPI_ATTR void VKAPI_CALL CmdBeginRenderPass(
+    VkCommandBuffer                             commandBuffer,
+    const VkRenderPassBeginInfo*                pRenderPassBegin,
+    VkSubpassContents                           contents);
+
+VKAPI_ATTR void VKAPI_CALL CmdNextSubpass(
+    VkCommandBuffer                             commandBuffer,
+    VkSubpassContents                           contents);
+
+VKAPI_ATTR void VKAPI_CALL CmdEndRenderPass(
+    VkCommandBuffer                             commandBuffer);
+
+VKAPI_ATTR void VKAPI_CALL CmdExecuteCommands(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    commandBufferCount,
+    const VkCommandBuffer*                      pCommandBuffers);
+
+
+VKAPI_ATTR VkResult VKAPI_CALL BindBufferMemory2(
+    VkDevice                                    device,
+    uint32_t                                    bindInfoCount,
+    const VkBindBufferMemoryInfo*               pBindInfos);
+
+VKAPI_ATTR VkResult VKAPI_CALL BindImageMemory2(
+    VkDevice                                    device,
+    uint32_t                                    bindInfoCount,
+    const VkBindImageMemoryInfo*                pBindInfos);
+
+VKAPI_ATTR void VKAPI_CALL GetDeviceGroupPeerMemoryFeatures(
+    VkDevice                                    device,
+    uint32_t                                    heapIndex,
+    uint32_t                                    localDeviceIndex,
+    uint32_t                                    remoteDeviceIndex,
+    VkPeerMemoryFeatureFlags*                   pPeerMemoryFeatures);
+
+VKAPI_ATTR void VKAPI_CALL CmdSetDeviceMask(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    deviceMask);
+
+VKAPI_ATTR void VKAPI_CALL CmdDispatchBase(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    baseGroupX,
+    uint32_t                                    baseGroupY,
+    uint32_t                                    baseGroupZ,
+    uint32_t                                    groupCountX,
+    uint32_t                                    groupCountY,
+    uint32_t                                    groupCountZ);
+
+VKAPI_ATTR VkResult VKAPI_CALL EnumeratePhysicalDeviceGroups(
+    VkInstance                                  instance,
+    uint32_t*                                   pPhysicalDeviceGroupCount,
+    VkPhysicalDeviceGroupProperties*            pPhysicalDeviceGroupProperties);
+
+VKAPI_ATTR void VKAPI_CALL GetImageMemoryRequirements2(
+    VkDevice                                    device,
+    const VkImageMemoryRequirementsInfo2*       pInfo,
+    VkMemoryRequirements2*                      pMemoryRequirements);
+
+VKAPI_ATTR void VKAPI_CALL GetBufferMemoryRequirements2(
+    VkDevice                                    device,
+    const VkBufferMemoryRequirementsInfo2*      pInfo,
+    VkMemoryRequirements2*                      pMemoryRequirements);
+
+VKAPI_ATTR void VKAPI_CALL GetImageSparseMemoryRequirements2(
+    VkDevice                                    device,
+    const VkImageSparseMemoryRequirementsInfo2* pInfo,
+    uint32_t*                                   pSparseMemoryRequirementCount,
+    VkSparseImageMemoryRequirements2*           pSparseMemoryRequirements);
+
+VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceFeatures2(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceFeatures2*                  pFeatures);
+
+VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceProperties2(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceProperties2*                pProperties);
+
+VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceFormatProperties2(
+    VkPhysicalDevice                            physicalDevice,
+    VkFormat                                    format,
+    VkFormatProperties2*                        pFormatProperties);
+
+VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceImageFormatProperties2(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceImageFormatInfo2*     pImageFormatInfo,
+    VkImageFormatProperties2*                   pImageFormatProperties);
+
+VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceQueueFamilyProperties2(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pQueueFamilyPropertyCount,
+    VkQueueFamilyProperties2*                   pQueueFamilyProperties);
+
+VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceMemoryProperties2(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceMemoryProperties2*          pMemoryProperties);
+
+VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceSparseImageFormatProperties2(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo,
+    uint32_t*                                   pPropertyCount,
+    VkSparseImageFormatProperties2*             pProperties);
+
+VKAPI_ATTR void VKAPI_CALL TrimCommandPool(
+    VkDevice                                    device,
+    VkCommandPool                               commandPool,
+    VkCommandPoolTrimFlags                      flags);
+
+VKAPI_ATTR void VKAPI_CALL GetDeviceQueue2(
+    VkDevice                                    device,
+    const VkDeviceQueueInfo2*                   pQueueInfo,
+    VkQueue*                                    pQueue);
+
+VKAPI_ATTR VkResult VKAPI_CALL CreateSamplerYcbcrConversion(
+    VkDevice                                    device,
+    const VkSamplerYcbcrConversionCreateInfo*   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSamplerYcbcrConversion*                   pYcbcrConversion);
+
+VKAPI_ATTR void VKAPI_CALL DestroySamplerYcbcrConversion(
+    VkDevice                                    device,
+    VkSamplerYcbcrConversion                    ycbcrConversion,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL CreateDescriptorUpdateTemplate(
+    VkDevice                                    device,
+    const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDescriptorUpdateTemplate*                 pDescriptorUpdateTemplate);
+
+VKAPI_ATTR void VKAPI_CALL DestroyDescriptorUpdateTemplate(
+    VkDevice                                    device,
+    VkDescriptorUpdateTemplate                  descriptorUpdateTemplate,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR void VKAPI_CALL UpdateDescriptorSetWithTemplate(
+    VkDevice                                    device,
+    VkDescriptorSet                             descriptorSet,
+    VkDescriptorUpdateTemplate                  descriptorUpdateTemplate,
+    const void*                                 pData);
+
+VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceExternalBufferProperties(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceExternalBufferInfo*   pExternalBufferInfo,
+    VkExternalBufferProperties*                 pExternalBufferProperties);
+
+VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceExternalFenceProperties(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceExternalFenceInfo*    pExternalFenceInfo,
+    VkExternalFenceProperties*                  pExternalFenceProperties);
+
+VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceExternalSemaphoreProperties(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo,
+    VkExternalSemaphoreProperties*              pExternalSemaphoreProperties);
+
+VKAPI_ATTR void VKAPI_CALL GetDescriptorSetLayoutSupport(
+    VkDevice                                    device,
+    const VkDescriptorSetLayoutCreateInfo*      pCreateInfo,
+    VkDescriptorSetLayoutSupport*               pSupport);
+
+
+VKAPI_ATTR void VKAPI_CALL DestroySurfaceKHR(
+    VkInstance                                  instance,
+    VkSurfaceKHR                                surface,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceSurfaceSupportKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t                                    queueFamilyIndex,
+    VkSurfaceKHR                                surface,
+    VkBool32*                                   pSupported);
+
+VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceSurfaceCapabilitiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkSurfaceKHR                                surface,
+    VkSurfaceCapabilitiesKHR*                   pSurfaceCapabilities);
+
+VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceSurfaceFormatsKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkSurfaceKHR                                surface,
+    uint32_t*                                   pSurfaceFormatCount,
+    VkSurfaceFormatKHR*                         pSurfaceFormats);
+
+VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceSurfacePresentModesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkSurfaceKHR                                surface,
+    uint32_t*                                   pPresentModeCount,
+    VkPresentModeKHR*                           pPresentModes);
+
+
+VKAPI_ATTR VkResult VKAPI_CALL CreateSwapchainKHR(
+    VkDevice                                    device,
+    const VkSwapchainCreateInfoKHR*             pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSwapchainKHR*                             pSwapchain);
+
+VKAPI_ATTR void VKAPI_CALL DestroySwapchainKHR(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL GetSwapchainImagesKHR(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    uint32_t*                                   pSwapchainImageCount,
+    VkImage*                                    pSwapchainImages);
+
+VKAPI_ATTR VkResult VKAPI_CALL AcquireNextImageKHR(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    uint64_t                                    timeout,
+    VkSemaphore                                 semaphore,
+    VkFence                                     fence,
+    uint32_t*                                   pImageIndex);
+
+VKAPI_ATTR VkResult VKAPI_CALL QueuePresentKHR(
+    VkQueue                                     queue,
+    const VkPresentInfoKHR*                     pPresentInfo);
+
+VKAPI_ATTR VkResult VKAPI_CALL GetDeviceGroupPresentCapabilitiesKHR(
+    VkDevice                                    device,
+    VkDeviceGroupPresentCapabilitiesKHR*        pDeviceGroupPresentCapabilities);
+
+VKAPI_ATTR VkResult VKAPI_CALL GetDeviceGroupSurfacePresentModesKHR(
+    VkDevice                                    device,
+    VkSurfaceKHR                                surface,
+    VkDeviceGroupPresentModeFlagsKHR*           pModes);
+
+VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDevicePresentRectanglesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkSurfaceKHR                                surface,
+    uint32_t*                                   pRectCount,
+    VkRect2D*                                   pRects);
+
+VKAPI_ATTR VkResult VKAPI_CALL AcquireNextImage2KHR(
+    VkDevice                                    device,
+    const VkAcquireNextImageInfoKHR*            pAcquireInfo,
+    uint32_t*                                   pImageIndex);
+
+
+VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceDisplayPropertiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pPropertyCount,
+    VkDisplayPropertiesKHR*                     pProperties);
+
+VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceDisplayPlanePropertiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pPropertyCount,
+    VkDisplayPlanePropertiesKHR*                pProperties);
+
+VKAPI_ATTR VkResult VKAPI_CALL GetDisplayPlaneSupportedDisplaysKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t                                    planeIndex,
+    uint32_t*                                   pDisplayCount,
+    VkDisplayKHR*                               pDisplays);
+
+VKAPI_ATTR VkResult VKAPI_CALL GetDisplayModePropertiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkDisplayKHR                                display,
+    uint32_t*                                   pPropertyCount,
+    VkDisplayModePropertiesKHR*                 pProperties);
+
+VKAPI_ATTR VkResult VKAPI_CALL CreateDisplayModeKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkDisplayKHR                                display,
+    const VkDisplayModeCreateInfoKHR*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDisplayModeKHR*                           pMode);
+
+VKAPI_ATTR VkResult VKAPI_CALL GetDisplayPlaneCapabilitiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkDisplayModeKHR                            mode,
+    uint32_t                                    planeIndex,
+    VkDisplayPlaneCapabilitiesKHR*              pCapabilities);
+
+VKAPI_ATTR VkResult VKAPI_CALL CreateDisplayPlaneSurfaceKHR(
+    VkInstance                                  instance,
+    const VkDisplaySurfaceCreateInfoKHR*        pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface);
+
+
+VKAPI_ATTR VkResult VKAPI_CALL CreateSharedSwapchainsKHR(
+    VkDevice                                    device,
+    uint32_t                                    swapchainCount,
+    const VkSwapchainCreateInfoKHR*             pCreateInfos,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSwapchainKHR*                             pSwapchains);
+
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+
+VKAPI_ATTR VkResult VKAPI_CALL CreateXlibSurfaceKHR(
+    VkInstance                                  instance,
+    const VkXlibSurfaceCreateInfoKHR*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface);
+
+VKAPI_ATTR VkBool32 VKAPI_CALL GetPhysicalDeviceXlibPresentationSupportKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t                                    queueFamilyIndex,
+    Display*                                    dpy,
+    VisualID                                    visualID);
+#endif // VK_USE_PLATFORM_XLIB_KHR
+
+#ifdef VK_USE_PLATFORM_XCB_KHR
+
+VKAPI_ATTR VkResult VKAPI_CALL CreateXcbSurfaceKHR(
+    VkInstance                                  instance,
+    const VkXcbSurfaceCreateInfoKHR*            pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface);
+
+VKAPI_ATTR VkBool32 VKAPI_CALL GetPhysicalDeviceXcbPresentationSupportKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t                                    queueFamilyIndex,
+    xcb_connection_t*                           connection,
+    xcb_visualid_t                              visual_id);
+#endif // VK_USE_PLATFORM_XCB_KHR
+
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+
+VKAPI_ATTR VkResult VKAPI_CALL CreateWaylandSurfaceKHR(
+    VkInstance                                  instance,
+    const VkWaylandSurfaceCreateInfoKHR*        pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface);
+
+VKAPI_ATTR VkBool32 VKAPI_CALL GetPhysicalDeviceWaylandPresentationSupportKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t                                    queueFamilyIndex,
+    struct wl_display*                          display);
+#endif // VK_USE_PLATFORM_WAYLAND_KHR
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+
+VKAPI_ATTR VkResult VKAPI_CALL CreateAndroidSurfaceKHR(
+    VkInstance                                  instance,
+    const VkAndroidSurfaceCreateInfoKHR*        pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface);
+#endif // VK_USE_PLATFORM_ANDROID_KHR
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+VKAPI_ATTR VkResult VKAPI_CALL CreateWin32SurfaceKHR(
+    VkInstance                                  instance,
+    const VkWin32SurfaceCreateInfoKHR*          pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface);
+
+VKAPI_ATTR VkBool32 VKAPI_CALL GetPhysicalDeviceWin32PresentationSupportKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t                                    queueFamilyIndex);
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+
+
+
+VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceFeatures2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceFeatures2*                  pFeatures);
+
+VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceProperties2*                pProperties);
+
+VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceFormatProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkFormat                                    format,
+    VkFormatProperties2*                        pFormatProperties);
+
+VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceImageFormatProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceImageFormatInfo2*     pImageFormatInfo,
+    VkImageFormatProperties2*                   pImageFormatProperties);
+
+VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceQueueFamilyProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pQueueFamilyPropertyCount,
+    VkQueueFamilyProperties2*                   pQueueFamilyProperties);
+
+VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceMemoryProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceMemoryProperties2*          pMemoryProperties);
+
+VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceSparseImageFormatProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo,
+    uint32_t*                                   pPropertyCount,
+    VkSparseImageFormatProperties2*             pProperties);
+
+
+VKAPI_ATTR void VKAPI_CALL GetDeviceGroupPeerMemoryFeaturesKHR(
+    VkDevice                                    device,
+    uint32_t                                    heapIndex,
+    uint32_t                                    localDeviceIndex,
+    uint32_t                                    remoteDeviceIndex,
+    VkPeerMemoryFeatureFlags*                   pPeerMemoryFeatures);
+
+VKAPI_ATTR void VKAPI_CALL CmdSetDeviceMaskKHR(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    deviceMask);
+
+VKAPI_ATTR void VKAPI_CALL CmdDispatchBaseKHR(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    baseGroupX,
+    uint32_t                                    baseGroupY,
+    uint32_t                                    baseGroupZ,
+    uint32_t                                    groupCountX,
+    uint32_t                                    groupCountY,
+    uint32_t                                    groupCountZ);
+
+
+
+VKAPI_ATTR void VKAPI_CALL TrimCommandPoolKHR(
+    VkDevice                                    device,
+    VkCommandPool                               commandPool,
+    VkCommandPoolTrimFlags                      flags);
+
+
+VKAPI_ATTR VkResult VKAPI_CALL EnumeratePhysicalDeviceGroupsKHR(
+    VkInstance                                  instance,
+    uint32_t*                                   pPhysicalDeviceGroupCount,
+    VkPhysicalDeviceGroupProperties*            pPhysicalDeviceGroupProperties);
+
+
+VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceExternalBufferPropertiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceExternalBufferInfo*   pExternalBufferInfo,
+    VkExternalBufferProperties*                 pExternalBufferProperties);
+
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+VKAPI_ATTR VkResult VKAPI_CALL GetMemoryWin32HandleKHR(
+    VkDevice                                    device,
+    const VkMemoryGetWin32HandleInfoKHR*        pGetWin32HandleInfo,
+    HANDLE*                                     pHandle);
+
+VKAPI_ATTR VkResult VKAPI_CALL GetMemoryWin32HandlePropertiesKHR(
+    VkDevice                                    device,
+    VkExternalMemoryHandleTypeFlagBits          handleType,
+    HANDLE                                      handle,
+    VkMemoryWin32HandlePropertiesKHR*           pMemoryWin32HandleProperties);
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+
+VKAPI_ATTR VkResult VKAPI_CALL GetMemoryFdKHR(
+    VkDevice                                    device,
+    const VkMemoryGetFdInfoKHR*                 pGetFdInfo,
+    int*                                        pFd);
+
+VKAPI_ATTR VkResult VKAPI_CALL GetMemoryFdPropertiesKHR(
+    VkDevice                                    device,
+    VkExternalMemoryHandleTypeFlagBits          handleType,
+    int                                         fd,
+    VkMemoryFdPropertiesKHR*                    pMemoryFdProperties);
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+
+VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceExternalSemaphorePropertiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo,
+    VkExternalSemaphoreProperties*              pExternalSemaphoreProperties);
+
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+VKAPI_ATTR VkResult VKAPI_CALL ImportSemaphoreWin32HandleKHR(
+    VkDevice                                    device,
+    const VkImportSemaphoreWin32HandleInfoKHR*  pImportSemaphoreWin32HandleInfo);
+
+VKAPI_ATTR VkResult VKAPI_CALL GetSemaphoreWin32HandleKHR(
+    VkDevice                                    device,
+    const VkSemaphoreGetWin32HandleInfoKHR*     pGetWin32HandleInfo,
+    HANDLE*                                     pHandle);
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+
+VKAPI_ATTR VkResult VKAPI_CALL ImportSemaphoreFdKHR(
+    VkDevice                                    device,
+    const VkImportSemaphoreFdInfoKHR*           pImportSemaphoreFdInfo);
+
+VKAPI_ATTR VkResult VKAPI_CALL GetSemaphoreFdKHR(
+    VkDevice                                    device,
+    const VkSemaphoreGetFdInfoKHR*              pGetFdInfo,
+    int*                                        pFd);
+
+
+VKAPI_ATTR void VKAPI_CALL CmdPushDescriptorSetKHR(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineBindPoint                         pipelineBindPoint,
+    VkPipelineLayout                            layout,
+    uint32_t                                    set,
+    uint32_t                                    descriptorWriteCount,
+    const VkWriteDescriptorSet*                 pDescriptorWrites);
+
+VKAPI_ATTR void VKAPI_CALL CmdPushDescriptorSetWithTemplateKHR(
+    VkCommandBuffer                             commandBuffer,
+    VkDescriptorUpdateTemplate                  descriptorUpdateTemplate,
+    VkPipelineLayout                            layout,
+    uint32_t                                    set,
+    const void*                                 pData);
+
+
+
+
+
+VKAPI_ATTR VkResult VKAPI_CALL CreateDescriptorUpdateTemplateKHR(
+    VkDevice                                    device,
+    const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDescriptorUpdateTemplate*                 pDescriptorUpdateTemplate);
+
+VKAPI_ATTR void VKAPI_CALL DestroyDescriptorUpdateTemplateKHR(
+    VkDevice                                    device,
+    VkDescriptorUpdateTemplate                  descriptorUpdateTemplate,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR void VKAPI_CALL UpdateDescriptorSetWithTemplateKHR(
+    VkDevice                                    device,
+    VkDescriptorSet                             descriptorSet,
+    VkDescriptorUpdateTemplate                  descriptorUpdateTemplate,
+    const void*                                 pData);
+
+
+
+VKAPI_ATTR VkResult VKAPI_CALL CreateRenderPass2KHR(
+    VkDevice                                    device,
+    const VkRenderPassCreateInfo2KHR*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkRenderPass*                               pRenderPass);
+
+VKAPI_ATTR void VKAPI_CALL CmdBeginRenderPass2KHR(
+    VkCommandBuffer                             commandBuffer,
+    const VkRenderPassBeginInfo*                pRenderPassBegin,
+    const VkSubpassBeginInfoKHR*                pSubpassBeginInfo);
+
+VKAPI_ATTR void VKAPI_CALL CmdNextSubpass2KHR(
+    VkCommandBuffer                             commandBuffer,
+    const VkSubpassBeginInfoKHR*                pSubpassBeginInfo,
+    const VkSubpassEndInfoKHR*                  pSubpassEndInfo);
+
+VKAPI_ATTR void VKAPI_CALL CmdEndRenderPass2KHR(
+    VkCommandBuffer                             commandBuffer,
+    const VkSubpassEndInfoKHR*                  pSubpassEndInfo);
+
+
+VKAPI_ATTR VkResult VKAPI_CALL GetSwapchainStatusKHR(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain);
+
+
+VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceExternalFencePropertiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceExternalFenceInfo*    pExternalFenceInfo,
+    VkExternalFenceProperties*                  pExternalFenceProperties);
+
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+VKAPI_ATTR VkResult VKAPI_CALL ImportFenceWin32HandleKHR(
+    VkDevice                                    device,
+    const VkImportFenceWin32HandleInfoKHR*      pImportFenceWin32HandleInfo);
+
+VKAPI_ATTR VkResult VKAPI_CALL GetFenceWin32HandleKHR(
+    VkDevice                                    device,
+    const VkFenceGetWin32HandleInfoKHR*         pGetWin32HandleInfo,
+    HANDLE*                                     pHandle);
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+
+VKAPI_ATTR VkResult VKAPI_CALL ImportFenceFdKHR(
+    VkDevice                                    device,
+    const VkImportFenceFdInfoKHR*               pImportFenceFdInfo);
+
+VKAPI_ATTR VkResult VKAPI_CALL GetFenceFdKHR(
+    VkDevice                                    device,
+    const VkFenceGetFdInfoKHR*                  pGetFdInfo,
+    int*                                        pFd);
+
+
+VKAPI_ATTR VkResult VKAPI_CALL EnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t                                    queueFamilyIndex,
+    uint32_t*                                   pCounterCount,
+    VkPerformanceCounterKHR*                    pCounters,
+    VkPerformanceCounterDescriptionKHR*         pCounterDescriptions);
+
+VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkQueryPoolPerformanceCreateInfoKHR*  pPerformanceQueryCreateInfo,
+    uint32_t*                                   pNumPasses);
+
+VKAPI_ATTR VkResult VKAPI_CALL AcquireProfilingLockKHR(
+    VkDevice                                    device,
+    const VkAcquireProfilingLockInfoKHR*        pInfo);
+
+VKAPI_ATTR void VKAPI_CALL ReleaseProfilingLockKHR(
+    VkDevice                                    device);
+
+
+
+VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceSurfaceCapabilities2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceSurfaceInfo2KHR*      pSurfaceInfo,
+    VkSurfaceCapabilities2KHR*                  pSurfaceCapabilities);
+
+VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceSurfaceFormats2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceSurfaceInfo2KHR*      pSurfaceInfo,
+    uint32_t*                                   pSurfaceFormatCount,
+    VkSurfaceFormat2KHR*                        pSurfaceFormats);
+
+
+
+VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceDisplayProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pPropertyCount,
+    VkDisplayProperties2KHR*                    pProperties);
+
+VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceDisplayPlaneProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pPropertyCount,
+    VkDisplayPlaneProperties2KHR*               pProperties);
+
+VKAPI_ATTR VkResult VKAPI_CALL GetDisplayModeProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkDisplayKHR                                display,
+    uint32_t*                                   pPropertyCount,
+    VkDisplayModeProperties2KHR*                pProperties);
+
+VKAPI_ATTR VkResult VKAPI_CALL GetDisplayPlaneCapabilities2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkDisplayPlaneInfo2KHR*               pDisplayPlaneInfo,
+    VkDisplayPlaneCapabilities2KHR*             pCapabilities);
+
+
+
+
+
+VKAPI_ATTR void VKAPI_CALL GetImageMemoryRequirements2KHR(
+    VkDevice                                    device,
+    const VkImageMemoryRequirementsInfo2*       pInfo,
+    VkMemoryRequirements2*                      pMemoryRequirements);
+
+VKAPI_ATTR void VKAPI_CALL GetBufferMemoryRequirements2KHR(
+    VkDevice                                    device,
+    const VkBufferMemoryRequirementsInfo2*      pInfo,
+    VkMemoryRequirements2*                      pMemoryRequirements);
+
+VKAPI_ATTR void VKAPI_CALL GetImageSparseMemoryRequirements2KHR(
+    VkDevice                                    device,
+    const VkImageSparseMemoryRequirementsInfo2* pInfo,
+    uint32_t*                                   pSparseMemoryRequirementCount,
+    VkSparseImageMemoryRequirements2*           pSparseMemoryRequirements);
+
+
+
+VKAPI_ATTR VkResult VKAPI_CALL CreateSamplerYcbcrConversionKHR(
+    VkDevice                                    device,
+    const VkSamplerYcbcrConversionCreateInfo*   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSamplerYcbcrConversion*                   pYcbcrConversion);
+
+VKAPI_ATTR void VKAPI_CALL DestroySamplerYcbcrConversionKHR(
+    VkDevice                                    device,
+    VkSamplerYcbcrConversion                    ycbcrConversion,
+    const VkAllocationCallbacks*                pAllocator);
+
+
+VKAPI_ATTR VkResult VKAPI_CALL BindBufferMemory2KHR(
+    VkDevice                                    device,
+    uint32_t                                    bindInfoCount,
+    const VkBindBufferMemoryInfo*               pBindInfos);
+
+VKAPI_ATTR VkResult VKAPI_CALL BindImageMemory2KHR(
+    VkDevice                                    device,
+    uint32_t                                    bindInfoCount,
+    const VkBindImageMemoryInfo*                pBindInfos);
+
+
+VKAPI_ATTR void VKAPI_CALL GetDescriptorSetLayoutSupportKHR(
+    VkDevice                                    device,
+    const VkDescriptorSetLayoutCreateInfo*      pCreateInfo,
+    VkDescriptorSetLayoutSupport*               pSupport);
+
+
+VKAPI_ATTR void VKAPI_CALL CmdDrawIndirectCountKHR(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkBuffer                                    countBuffer,
+    VkDeviceSize                                countBufferOffset,
+    uint32_t                                    maxDrawCount,
+    uint32_t                                    stride);
+
+VKAPI_ATTR void VKAPI_CALL CmdDrawIndexedIndirectCountKHR(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkBuffer                                    countBuffer,
+    VkDeviceSize                                countBufferOffset,
+    uint32_t                                    maxDrawCount,
+    uint32_t                                    stride);
+
+
+
+
+
+
+
+
+
+
+VKAPI_ATTR VkResult VKAPI_CALL GetSemaphoreCounterValueKHR(
+    VkDevice                                    device,
+    VkSemaphore                                 semaphore,
+    uint64_t*                                   pValue);
+
+VKAPI_ATTR VkResult VKAPI_CALL WaitSemaphoresKHR(
+    VkDevice                                    device,
+    const VkSemaphoreWaitInfoKHR*               pWaitInfo,
+    uint64_t                                    timeout);
+
+VKAPI_ATTR VkResult VKAPI_CALL SignalSemaphoreKHR(
+    VkDevice                                    device,
+    const VkSemaphoreSignalInfoKHR*             pSignalInfo);
+
+
+
+
+
+
+
+VKAPI_ATTR VkDeviceAddress VKAPI_CALL GetBufferDeviceAddressKHR(
+    VkDevice                                    device,
+    const VkBufferDeviceAddressInfoKHR*         pInfo);
+
+VKAPI_ATTR uint64_t VKAPI_CALL GetBufferOpaqueCaptureAddressKHR(
+    VkDevice                                    device,
+    const VkBufferDeviceAddressInfoKHR*         pInfo);
+
+VKAPI_ATTR uint64_t VKAPI_CALL GetDeviceMemoryOpaqueCaptureAddressKHR(
+    VkDevice                                    device,
+    const VkDeviceMemoryOpaqueCaptureAddressInfoKHR* pInfo);
+
+
+VKAPI_ATTR VkResult VKAPI_CALL GetPipelineExecutablePropertiesKHR(
+    VkDevice                                    device,
+    const VkPipelineInfoKHR*                    pPipelineInfo,
+    uint32_t*                                   pExecutableCount,
+    VkPipelineExecutablePropertiesKHR*          pProperties);
+
+VKAPI_ATTR VkResult VKAPI_CALL GetPipelineExecutableStatisticsKHR(
+    VkDevice                                    device,
+    const VkPipelineExecutableInfoKHR*          pExecutableInfo,
+    uint32_t*                                   pStatisticCount,
+    VkPipelineExecutableStatisticKHR*           pStatistics);
+
+VKAPI_ATTR VkResult VKAPI_CALL GetPipelineExecutableInternalRepresentationsKHR(
+    VkDevice                                    device,
+    const VkPipelineExecutableInfoKHR*          pExecutableInfo,
+    uint32_t*                                   pInternalRepresentationCount,
+    VkPipelineExecutableInternalRepresentationKHR* pInternalRepresentations);
+
+
+VKAPI_ATTR VkResult VKAPI_CALL CreateDebugReportCallbackEXT(
+    VkInstance                                  instance,
+    const VkDebugReportCallbackCreateInfoEXT*   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDebugReportCallbackEXT*                   pCallback);
+
+VKAPI_ATTR void VKAPI_CALL DestroyDebugReportCallbackEXT(
+    VkInstance                                  instance,
+    VkDebugReportCallbackEXT                    callback,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR void VKAPI_CALL DebugReportMessageEXT(
+    VkInstance                                  instance,
+    VkDebugReportFlagsEXT                       flags,
+    VkDebugReportObjectTypeEXT                  objectType,
+    uint64_t                                    object,
+    size_t                                      location,
+    int32_t                                     messageCode,
+    const char*                                 pLayerPrefix,
+    const char*                                 pMessage);
+
+
+
+
+
+
+
+
+VKAPI_ATTR VkResult VKAPI_CALL DebugMarkerSetObjectTagEXT(
+    VkDevice                                    device,
+    const VkDebugMarkerObjectTagInfoEXT*        pTagInfo);
+
+VKAPI_ATTR VkResult VKAPI_CALL DebugMarkerSetObjectNameEXT(
+    VkDevice                                    device,
+    const VkDebugMarkerObjectNameInfoEXT*       pNameInfo);
+
+VKAPI_ATTR void VKAPI_CALL CmdDebugMarkerBeginEXT(
+    VkCommandBuffer                             commandBuffer,
+    const VkDebugMarkerMarkerInfoEXT*           pMarkerInfo);
+
+VKAPI_ATTR void VKAPI_CALL CmdDebugMarkerEndEXT(
+    VkCommandBuffer                             commandBuffer);
+
+VKAPI_ATTR void VKAPI_CALL CmdDebugMarkerInsertEXT(
+    VkCommandBuffer                             commandBuffer,
+    const VkDebugMarkerMarkerInfoEXT*           pMarkerInfo);
+
+
+
+
+VKAPI_ATTR void VKAPI_CALL CmdBindTransformFeedbackBuffersEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstBinding,
+    uint32_t                                    bindingCount,
+    const VkBuffer*                             pBuffers,
+    const VkDeviceSize*                         pOffsets,
+    const VkDeviceSize*                         pSizes);
+
+VKAPI_ATTR void VKAPI_CALL CmdBeginTransformFeedbackEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstCounterBuffer,
+    uint32_t                                    counterBufferCount,
+    const VkBuffer*                             pCounterBuffers,
+    const VkDeviceSize*                         pCounterBufferOffsets);
+
+VKAPI_ATTR void VKAPI_CALL CmdEndTransformFeedbackEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstCounterBuffer,
+    uint32_t                                    counterBufferCount,
+    const VkBuffer*                             pCounterBuffers,
+    const VkDeviceSize*                         pCounterBufferOffsets);
+
+VKAPI_ATTR void VKAPI_CALL CmdBeginQueryIndexedEXT(
+    VkCommandBuffer                             commandBuffer,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    query,
+    VkQueryControlFlags                         flags,
+    uint32_t                                    index);
+
+VKAPI_ATTR void VKAPI_CALL CmdEndQueryIndexedEXT(
+    VkCommandBuffer                             commandBuffer,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    query,
+    uint32_t                                    index);
+
+VKAPI_ATTR void VKAPI_CALL CmdDrawIndirectByteCountEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    instanceCount,
+    uint32_t                                    firstInstance,
+    VkBuffer                                    counterBuffer,
+    VkDeviceSize                                counterBufferOffset,
+    uint32_t                                    counterOffset,
+    uint32_t                                    vertexStride);
+
+
+VKAPI_ATTR uint32_t VKAPI_CALL GetImageViewHandleNVX(
+    VkDevice                                    device,
+    const VkImageViewHandleInfoNVX*             pInfo);
+
+
+VKAPI_ATTR void VKAPI_CALL CmdDrawIndirectCountAMD(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkBuffer                                    countBuffer,
+    VkDeviceSize                                countBufferOffset,
+    uint32_t                                    maxDrawCount,
+    uint32_t                                    stride);
+
+VKAPI_ATTR void VKAPI_CALL CmdDrawIndexedIndirectCountAMD(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkBuffer                                    countBuffer,
+    VkDeviceSize                                countBufferOffset,
+    uint32_t                                    maxDrawCount,
+    uint32_t                                    stride);
+
+
+
+
+
+
+VKAPI_ATTR VkResult VKAPI_CALL GetShaderInfoAMD(
+    VkDevice                                    device,
+    VkPipeline                                  pipeline,
+    VkShaderStageFlagBits                       shaderStage,
+    VkShaderInfoTypeAMD                         infoType,
+    size_t*                                     pInfoSize,
+    void*                                       pInfo);
+
+
+#ifdef VK_USE_PLATFORM_GGP
+
+VKAPI_ATTR VkResult VKAPI_CALL CreateStreamDescriptorSurfaceGGP(
+    VkInstance                                  instance,
+    const VkStreamDescriptorSurfaceCreateInfoGGP* pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface);
+#endif // VK_USE_PLATFORM_GGP
+
+
+
+
+VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceExternalImageFormatPropertiesNV(
+    VkPhysicalDevice                            physicalDevice,
+    VkFormat                                    format,
+    VkImageType                                 type,
+    VkImageTiling                               tiling,
+    VkImageUsageFlags                           usage,
+    VkImageCreateFlags                          flags,
+    VkExternalMemoryHandleTypeFlagsNV           externalHandleType,
+    VkExternalImageFormatPropertiesNV*          pExternalImageFormatProperties);
+
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+VKAPI_ATTR VkResult VKAPI_CALL GetMemoryWin32HandleNV(
+    VkDevice                                    device,
+    VkDeviceMemory                              memory,
+    VkExternalMemoryHandleTypeFlagsNV           handleType,
+    HANDLE*                                     pHandle);
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+
+#ifdef VK_USE_PLATFORM_VI_NN
+
+VKAPI_ATTR VkResult VKAPI_CALL CreateViSurfaceNN(
+    VkInstance                                  instance,
+    const VkViSurfaceCreateInfoNN*              pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface);
+#endif // VK_USE_PLATFORM_VI_NN
+
+
+
+
+
+
+VKAPI_ATTR void VKAPI_CALL CmdBeginConditionalRenderingEXT(
+    VkCommandBuffer                             commandBuffer,
+    const VkConditionalRenderingBeginInfoEXT*   pConditionalRenderingBegin);
+
+VKAPI_ATTR void VKAPI_CALL CmdEndConditionalRenderingEXT(
+    VkCommandBuffer                             commandBuffer);
+
+
+VKAPI_ATTR void VKAPI_CALL CmdProcessCommandsNVX(
+    VkCommandBuffer                             commandBuffer,
+    const VkCmdProcessCommandsInfoNVX*          pProcessCommandsInfo);
+
+VKAPI_ATTR void VKAPI_CALL CmdReserveSpaceForCommandsNVX(
+    VkCommandBuffer                             commandBuffer,
+    const VkCmdReserveSpaceForCommandsInfoNVX*  pReserveSpaceInfo);
+
+VKAPI_ATTR VkResult VKAPI_CALL CreateIndirectCommandsLayoutNVX(
+    VkDevice                                    device,
+    const VkIndirectCommandsLayoutCreateInfoNVX* pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkIndirectCommandsLayoutNVX*                pIndirectCommandsLayout);
+
+VKAPI_ATTR void VKAPI_CALL DestroyIndirectCommandsLayoutNVX(
+    VkDevice                                    device,
+    VkIndirectCommandsLayoutNVX                 indirectCommandsLayout,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL CreateObjectTableNVX(
+    VkDevice                                    device,
+    const VkObjectTableCreateInfoNVX*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkObjectTableNVX*                           pObjectTable);
+
+VKAPI_ATTR void VKAPI_CALL DestroyObjectTableNVX(
+    VkDevice                                    device,
+    VkObjectTableNVX                            objectTable,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL RegisterObjectsNVX(
+    VkDevice                                    device,
+    VkObjectTableNVX                            objectTable,
+    uint32_t                                    objectCount,
+    const VkObjectTableEntryNVX* const*         ppObjectTableEntries,
+    const uint32_t*                             pObjectIndices);
+
+VKAPI_ATTR VkResult VKAPI_CALL UnregisterObjectsNVX(
+    VkDevice                                    device,
+    VkObjectTableNVX                            objectTable,
+    uint32_t                                    objectCount,
+    const VkObjectEntryTypeNVX*                 pObjectEntryTypes,
+    const uint32_t*                             pObjectIndices);
+
+VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceGeneratedCommandsPropertiesNVX(
+    VkPhysicalDevice                            physicalDevice,
+    VkDeviceGeneratedCommandsFeaturesNVX*       pFeatures,
+    VkDeviceGeneratedCommandsLimitsNVX*         pLimits);
+
+
+VKAPI_ATTR void VKAPI_CALL CmdSetViewportWScalingNV(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstViewport,
+    uint32_t                                    viewportCount,
+    const VkViewportWScalingNV*                 pViewportWScalings);
+
+
+VKAPI_ATTR VkResult VKAPI_CALL ReleaseDisplayEXT(
+    VkPhysicalDevice                            physicalDevice,
+    VkDisplayKHR                                display);
+
+#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
+
+VKAPI_ATTR VkResult VKAPI_CALL AcquireXlibDisplayEXT(
+    VkPhysicalDevice                            physicalDevice,
+    Display*                                    dpy,
+    VkDisplayKHR                                display);
+
+VKAPI_ATTR VkResult VKAPI_CALL GetRandROutputDisplayEXT(
+    VkPhysicalDevice                            physicalDevice,
+    Display*                                    dpy,
+    RROutput                                    rrOutput,
+    VkDisplayKHR*                               pDisplay);
+#endif // VK_USE_PLATFORM_XLIB_XRANDR_EXT
+
+
+VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceSurfaceCapabilities2EXT(
+    VkPhysicalDevice                            physicalDevice,
+    VkSurfaceKHR                                surface,
+    VkSurfaceCapabilities2EXT*                  pSurfaceCapabilities);
+
+
+VKAPI_ATTR VkResult VKAPI_CALL DisplayPowerControlEXT(
+    VkDevice                                    device,
+    VkDisplayKHR                                display,
+    const VkDisplayPowerInfoEXT*                pDisplayPowerInfo);
+
+VKAPI_ATTR VkResult VKAPI_CALL RegisterDeviceEventEXT(
+    VkDevice                                    device,
+    const VkDeviceEventInfoEXT*                 pDeviceEventInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkFence*                                    pFence);
+
+VKAPI_ATTR VkResult VKAPI_CALL RegisterDisplayEventEXT(
+    VkDevice                                    device,
+    VkDisplayKHR                                display,
+    const VkDisplayEventInfoEXT*                pDisplayEventInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkFence*                                    pFence);
+
+VKAPI_ATTR VkResult VKAPI_CALL GetSwapchainCounterEXT(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    VkSurfaceCounterFlagBitsEXT                 counter,
+    uint64_t*                                   pCounterValue);
+
+
+VKAPI_ATTR VkResult VKAPI_CALL GetRefreshCycleDurationGOOGLE(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    VkRefreshCycleDurationGOOGLE*               pDisplayTimingProperties);
+
+VKAPI_ATTR VkResult VKAPI_CALL GetPastPresentationTimingGOOGLE(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    uint32_t*                                   pPresentationTimingCount,
+    VkPastPresentationTimingGOOGLE*             pPresentationTimings);
+
+
+
+
+
+
+
+VKAPI_ATTR void VKAPI_CALL CmdSetDiscardRectangleEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstDiscardRectangle,
+    uint32_t                                    discardRectangleCount,
+    const VkRect2D*                             pDiscardRectangles);
+
+
+
+
+
+VKAPI_ATTR void VKAPI_CALL SetHdrMetadataEXT(
+    VkDevice                                    device,
+    uint32_t                                    swapchainCount,
+    const VkSwapchainKHR*                       pSwapchains,
+    const VkHdrMetadataEXT*                     pMetadata);
+
+#ifdef VK_USE_PLATFORM_IOS_MVK
+
+VKAPI_ATTR VkResult VKAPI_CALL CreateIOSSurfaceMVK(
+    VkInstance                                  instance,
+    const VkIOSSurfaceCreateInfoMVK*            pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface);
+#endif // VK_USE_PLATFORM_IOS_MVK
+
+#ifdef VK_USE_PLATFORM_MACOS_MVK
+
+VKAPI_ATTR VkResult VKAPI_CALL CreateMacOSSurfaceMVK(
+    VkInstance                                  instance,
+    const VkMacOSSurfaceCreateInfoMVK*          pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface);
+#endif // VK_USE_PLATFORM_MACOS_MVK
+
+
+
+
+VKAPI_ATTR VkResult VKAPI_CALL SetDebugUtilsObjectNameEXT(
+    VkDevice                                    device,
+    const VkDebugUtilsObjectNameInfoEXT*        pNameInfo);
+
+VKAPI_ATTR VkResult VKAPI_CALL SetDebugUtilsObjectTagEXT(
+    VkDevice                                    device,
+    const VkDebugUtilsObjectTagInfoEXT*         pTagInfo);
+
+VKAPI_ATTR void VKAPI_CALL QueueBeginDebugUtilsLabelEXT(
+    VkQueue                                     queue,
+    const VkDebugUtilsLabelEXT*                 pLabelInfo);
+
+VKAPI_ATTR void VKAPI_CALL QueueEndDebugUtilsLabelEXT(
+    VkQueue                                     queue);
+
+VKAPI_ATTR void VKAPI_CALL QueueInsertDebugUtilsLabelEXT(
+    VkQueue                                     queue,
+    const VkDebugUtilsLabelEXT*                 pLabelInfo);
+
+VKAPI_ATTR void VKAPI_CALL CmdBeginDebugUtilsLabelEXT(
+    VkCommandBuffer                             commandBuffer,
+    const VkDebugUtilsLabelEXT*                 pLabelInfo);
+
+VKAPI_ATTR void VKAPI_CALL CmdEndDebugUtilsLabelEXT(
+    VkCommandBuffer                             commandBuffer);
+
+VKAPI_ATTR void VKAPI_CALL CmdInsertDebugUtilsLabelEXT(
+    VkCommandBuffer                             commandBuffer,
+    const VkDebugUtilsLabelEXT*                 pLabelInfo);
+
+VKAPI_ATTR VkResult VKAPI_CALL CreateDebugUtilsMessengerEXT(
+    VkInstance                                  instance,
+    const VkDebugUtilsMessengerCreateInfoEXT*   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDebugUtilsMessengerEXT*                   pMessenger);
+
+VKAPI_ATTR void VKAPI_CALL DestroyDebugUtilsMessengerEXT(
+    VkInstance                                  instance,
+    VkDebugUtilsMessengerEXT                    messenger,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR void VKAPI_CALL SubmitDebugUtilsMessageEXT(
+    VkInstance                                  instance,
+    VkDebugUtilsMessageSeverityFlagBitsEXT      messageSeverity,
+    VkDebugUtilsMessageTypeFlagsEXT             messageTypes,
+    const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData);
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+
+VKAPI_ATTR VkResult VKAPI_CALL GetAndroidHardwareBufferPropertiesANDROID(
+    VkDevice                                    device,
+    const struct AHardwareBuffer*               buffer,
+    VkAndroidHardwareBufferPropertiesANDROID*   pProperties);
+
+VKAPI_ATTR VkResult VKAPI_CALL GetMemoryAndroidHardwareBufferANDROID(
+    VkDevice                                    device,
+    const VkMemoryGetAndroidHardwareBufferInfoANDROID* pInfo,
+    struct AHardwareBuffer**                    pBuffer);
+#endif // VK_USE_PLATFORM_ANDROID_KHR
+
+
+
+
+
+
+
+
+VKAPI_ATTR void VKAPI_CALL CmdSetSampleLocationsEXT(
+    VkCommandBuffer                             commandBuffer,
+    const VkSampleLocationsInfoEXT*             pSampleLocationsInfo);
+
+VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceMultisamplePropertiesEXT(
+    VkPhysicalDevice                            physicalDevice,
+    VkSampleCountFlagBits                       samples,
+    VkMultisamplePropertiesEXT*                 pMultisampleProperties);
+
+
+
+
+
+
+
+
+VKAPI_ATTR VkResult VKAPI_CALL GetImageDrmFormatModifierPropertiesEXT(
+    VkDevice                                    device,
+    VkImage                                     image,
+    VkImageDrmFormatModifierPropertiesEXT*      pProperties);
+
+
+VKAPI_ATTR VkResult VKAPI_CALL CreateValidationCacheEXT(
+    VkDevice                                    device,
+    const VkValidationCacheCreateInfoEXT*       pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkValidationCacheEXT*                       pValidationCache);
+
+VKAPI_ATTR void VKAPI_CALL DestroyValidationCacheEXT(
+    VkDevice                                    device,
+    VkValidationCacheEXT                        validationCache,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL MergeValidationCachesEXT(
+    VkDevice                                    device,
+    VkValidationCacheEXT                        dstCache,
+    uint32_t                                    srcCacheCount,
+    const VkValidationCacheEXT*                 pSrcCaches);
+
+VKAPI_ATTR VkResult VKAPI_CALL GetValidationCacheDataEXT(
+    VkDevice                                    device,
+    VkValidationCacheEXT                        validationCache,
+    size_t*                                     pDataSize,
+    void*                                       pData);
+
+
+
+
+VKAPI_ATTR void VKAPI_CALL CmdBindShadingRateImageNV(
+    VkCommandBuffer                             commandBuffer,
+    VkImageView                                 imageView,
+    VkImageLayout                               imageLayout);
+
+VKAPI_ATTR void VKAPI_CALL CmdSetViewportShadingRatePaletteNV(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstViewport,
+    uint32_t                                    viewportCount,
+    const VkShadingRatePaletteNV*               pShadingRatePalettes);
+
+VKAPI_ATTR void VKAPI_CALL CmdSetCoarseSampleOrderNV(
+    VkCommandBuffer                             commandBuffer,
+    VkCoarseSampleOrderTypeNV                   sampleOrderType,
+    uint32_t                                    customSampleOrderCount,
+    const VkCoarseSampleOrderCustomNV*          pCustomSampleOrders);
+
+
+VKAPI_ATTR VkResult VKAPI_CALL CreateAccelerationStructureNV(
+    VkDevice                                    device,
+    const VkAccelerationStructureCreateInfoNV*  pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkAccelerationStructureNV*                  pAccelerationStructure);
+
+VKAPI_ATTR void VKAPI_CALL DestroyAccelerationStructureNV(
+    VkDevice                                    device,
+    VkAccelerationStructureNV                   accelerationStructure,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR void VKAPI_CALL GetAccelerationStructureMemoryRequirementsNV(
+    VkDevice                                    device,
+    const VkAccelerationStructureMemoryRequirementsInfoNV* pInfo,
+    VkMemoryRequirements2KHR*                   pMemoryRequirements);
+
+VKAPI_ATTR VkResult VKAPI_CALL BindAccelerationStructureMemoryNV(
+    VkDevice                                    device,
+    uint32_t                                    bindInfoCount,
+    const VkBindAccelerationStructureMemoryInfoNV* pBindInfos);
+
+VKAPI_ATTR void VKAPI_CALL CmdBuildAccelerationStructureNV(
+    VkCommandBuffer                             commandBuffer,
+    const VkAccelerationStructureInfoNV*        pInfo,
+    VkBuffer                                    instanceData,
+    VkDeviceSize                                instanceOffset,
+    VkBool32                                    update,
+    VkAccelerationStructureNV                   dst,
+    VkAccelerationStructureNV                   src,
+    VkBuffer                                    scratch,
+    VkDeviceSize                                scratchOffset);
+
+VKAPI_ATTR void VKAPI_CALL CmdCopyAccelerationStructureNV(
+    VkCommandBuffer                             commandBuffer,
+    VkAccelerationStructureNV                   dst,
+    VkAccelerationStructureNV                   src,
+    VkCopyAccelerationStructureModeNV           mode);
+
+VKAPI_ATTR void VKAPI_CALL CmdTraceRaysNV(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    raygenShaderBindingTableBuffer,
+    VkDeviceSize                                raygenShaderBindingOffset,
+    VkBuffer                                    missShaderBindingTableBuffer,
+    VkDeviceSize                                missShaderBindingOffset,
+    VkDeviceSize                                missShaderBindingStride,
+    VkBuffer                                    hitShaderBindingTableBuffer,
+    VkDeviceSize                                hitShaderBindingOffset,
+    VkDeviceSize                                hitShaderBindingStride,
+    VkBuffer                                    callableShaderBindingTableBuffer,
+    VkDeviceSize                                callableShaderBindingOffset,
+    VkDeviceSize                                callableShaderBindingStride,
+    uint32_t                                    width,
+    uint32_t                                    height,
+    uint32_t                                    depth);
+
+VKAPI_ATTR VkResult VKAPI_CALL CreateRayTracingPipelinesNV(
+    VkDevice                                    device,
+    VkPipelineCache                             pipelineCache,
+    uint32_t                                    createInfoCount,
+    const VkRayTracingPipelineCreateInfoNV*     pCreateInfos,
+    const VkAllocationCallbacks*                pAllocator,
+    VkPipeline*                                 pPipelines);
+
+VKAPI_ATTR VkResult VKAPI_CALL GetRayTracingShaderGroupHandlesNV(
+    VkDevice                                    device,
+    VkPipeline                                  pipeline,
+    uint32_t                                    firstGroup,
+    uint32_t                                    groupCount,
+    size_t                                      dataSize,
+    void*                                       pData);
+
+VKAPI_ATTR VkResult VKAPI_CALL GetAccelerationStructureHandleNV(
+    VkDevice                                    device,
+    VkAccelerationStructureNV                   accelerationStructure,
+    size_t                                      dataSize,
+    void*                                       pData);
+
+VKAPI_ATTR void VKAPI_CALL CmdWriteAccelerationStructuresPropertiesNV(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    accelerationStructureCount,
+    const VkAccelerationStructureNV*            pAccelerationStructures,
+    VkQueryType                                 queryType,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    firstQuery);
+
+VKAPI_ATTR VkResult VKAPI_CALL CompileDeferredNV(
+    VkDevice                                    device,
+    VkPipeline                                  pipeline,
+    uint32_t                                    shader);
+
+
+
+
+
+VKAPI_ATTR VkResult VKAPI_CALL GetMemoryHostPointerPropertiesEXT(
+    VkDevice                                    device,
+    VkExternalMemoryHandleTypeFlagBits          handleType,
+    const void*                                 pHostPointer,
+    VkMemoryHostPointerPropertiesEXT*           pMemoryHostPointerProperties);
+
+
+VKAPI_ATTR void VKAPI_CALL CmdWriteBufferMarkerAMD(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineStageFlagBits                     pipelineStage,
+    VkBuffer                                    dstBuffer,
+    VkDeviceSize                                dstOffset,
+    uint32_t                                    marker);
+
+
+
+VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceCalibrateableTimeDomainsEXT(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pTimeDomainCount,
+    VkTimeDomainEXT*                            pTimeDomains);
+
+VKAPI_ATTR VkResult VKAPI_CALL GetCalibratedTimestampsEXT(
+    VkDevice                                    device,
+    uint32_t                                    timestampCount,
+    const VkCalibratedTimestampInfoEXT*         pTimestampInfos,
+    uint64_t*                                   pTimestamps,
+    uint64_t*                                   pMaxDeviation);
+
+
+
+
+#ifdef VK_USE_PLATFORM_GGP
+#endif // VK_USE_PLATFORM_GGP
+
+
+
+
+
+VKAPI_ATTR void VKAPI_CALL CmdDrawMeshTasksNV(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    taskCount,
+    uint32_t                                    firstTask);
+
+VKAPI_ATTR void VKAPI_CALL CmdDrawMeshTasksIndirectNV(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    uint32_t                                    drawCount,
+    uint32_t                                    stride);
+
+VKAPI_ATTR void VKAPI_CALL CmdDrawMeshTasksIndirectCountNV(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkBuffer                                    countBuffer,
+    VkDeviceSize                                countBufferOffset,
+    uint32_t                                    maxDrawCount,
+    uint32_t                                    stride);
+
+
+
+
+VKAPI_ATTR void VKAPI_CALL CmdSetExclusiveScissorNV(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstExclusiveScissor,
+    uint32_t                                    exclusiveScissorCount,
+    const VkRect2D*                             pExclusiveScissors);
+
+
+VKAPI_ATTR void VKAPI_CALL CmdSetCheckpointNV(
+    VkCommandBuffer                             commandBuffer,
+    const void*                                 pCheckpointMarker);
+
+VKAPI_ATTR void VKAPI_CALL GetQueueCheckpointDataNV(
+    VkQueue                                     queue,
+    uint32_t*                                   pCheckpointDataCount,
+    VkCheckpointDataNV*                         pCheckpointData);
+
+
+
+VKAPI_ATTR VkResult VKAPI_CALL InitializePerformanceApiINTEL(
+    VkDevice                                    device,
+    const VkInitializePerformanceApiInfoINTEL*  pInitializeInfo);
+
+VKAPI_ATTR void VKAPI_CALL UninitializePerformanceApiINTEL(
+    VkDevice                                    device);
+
+VKAPI_ATTR VkResult VKAPI_CALL CmdSetPerformanceMarkerINTEL(
+    VkCommandBuffer                             commandBuffer,
+    const VkPerformanceMarkerInfoINTEL*         pMarkerInfo);
+
+VKAPI_ATTR VkResult VKAPI_CALL CmdSetPerformanceStreamMarkerINTEL(
+    VkCommandBuffer                             commandBuffer,
+    const VkPerformanceStreamMarkerInfoINTEL*   pMarkerInfo);
+
+VKAPI_ATTR VkResult VKAPI_CALL CmdSetPerformanceOverrideINTEL(
+    VkCommandBuffer                             commandBuffer,
+    const VkPerformanceOverrideInfoINTEL*       pOverrideInfo);
+
+VKAPI_ATTR VkResult VKAPI_CALL AcquirePerformanceConfigurationINTEL(
+    VkDevice                                    device,
+    const VkPerformanceConfigurationAcquireInfoINTEL* pAcquireInfo,
+    VkPerformanceConfigurationINTEL*            pConfiguration);
+
+VKAPI_ATTR VkResult VKAPI_CALL ReleasePerformanceConfigurationINTEL(
+    VkDevice                                    device,
+    VkPerformanceConfigurationINTEL             configuration);
+
+VKAPI_ATTR VkResult VKAPI_CALL QueueSetPerformanceConfigurationINTEL(
+    VkQueue                                     queue,
+    VkPerformanceConfigurationINTEL             configuration);
+
+VKAPI_ATTR VkResult VKAPI_CALL GetPerformanceParameterINTEL(
+    VkDevice                                    device,
+    VkPerformanceParameterTypeINTEL             parameter,
+    VkPerformanceValueINTEL*                    pValue);
+
+
+
+VKAPI_ATTR void VKAPI_CALL SetLocalDimmingAMD(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapChain,
+    VkBool32                                    localDimmingEnable);
+
+#ifdef VK_USE_PLATFORM_FUCHSIA
+
+VKAPI_ATTR VkResult VKAPI_CALL CreateImagePipeSurfaceFUCHSIA(
+    VkInstance                                  instance,
+    const VkImagePipeSurfaceCreateInfoFUCHSIA*  pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface);
+#endif // VK_USE_PLATFORM_FUCHSIA
+
+#ifdef VK_USE_PLATFORM_METAL_EXT
+
+VKAPI_ATTR VkResult VKAPI_CALL CreateMetalSurfaceEXT(
+    VkInstance                                  instance,
+    const VkMetalSurfaceCreateInfoEXT*          pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface);
+#endif // VK_USE_PLATFORM_METAL_EXT
+
+
+
+
+
+
+
+
+
+
+
+
+VKAPI_ATTR VkDeviceAddress VKAPI_CALL GetBufferDeviceAddressEXT(
+    VkDevice                                    device,
+    const VkBufferDeviceAddressInfoKHR*         pInfo);
+
+
+VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceToolPropertiesEXT(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pToolCount,
+    VkPhysicalDeviceToolPropertiesEXT*          pToolProperties);
+
+
+
+
+VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceCooperativeMatrixPropertiesNV(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pPropertyCount,
+    VkCooperativeMatrixPropertiesNV*            pProperties);
+
+
+VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pCombinationCount,
+    VkFramebufferMixedSamplesCombinationNV*     pCombinations);
+
+
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceSurfacePresentModes2EXT(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceSurfaceInfo2KHR*      pSurfaceInfo,
+    uint32_t*                                   pPresentModeCount,
+    VkPresentModeKHR*                           pPresentModes);
+
+VKAPI_ATTR VkResult VKAPI_CALL AcquireFullScreenExclusiveModeEXT(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain);
+
+VKAPI_ATTR VkResult VKAPI_CALL ReleaseFullScreenExclusiveModeEXT(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain);
+
+VKAPI_ATTR VkResult VKAPI_CALL GetDeviceGroupSurfacePresentModes2EXT(
+    VkDevice                                    device,
+    const VkPhysicalDeviceSurfaceInfo2KHR*      pSurfaceInfo,
+    VkDeviceGroupPresentModeFlagsKHR*           pModes);
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+
+VKAPI_ATTR VkResult VKAPI_CALL CreateHeadlessSurfaceEXT(
+    VkInstance                                  instance,
+    const VkHeadlessSurfaceCreateInfoEXT*       pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface);
+
+
+VKAPI_ATTR void VKAPI_CALL CmdSetLineStippleEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    lineStippleFactor,
+    uint16_t                                    lineStipplePattern);
+
+
+VKAPI_ATTR void VKAPI_CALL ResetQueryPoolEXT(
+    VkDevice                                    device,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    firstQuery,
+    uint32_t                                    queryCount);
+
+
+
+
+
+
+
+
+// Layer object type identifiers
+enum LayerObjectTypeId {
+    LayerObjectTypeInstance,                    // Container for an instance dispatch object
+    LayerObjectTypeDevice,                      // Container for a device dispatch object
+    LayerObjectTypeThreading,                   // Instance or device threading layer object
+    LayerObjectTypeParameterValidation,         // Instance or device parameter validation layer object
+    LayerObjectTypeObjectTracker,               // Instance or device object tracker layer object
+    LayerObjectTypeCoreValidation,              // Instance or device core validation layer object
+    LayerObjectTypeBestPractices,               // Instance or device best practices layer object
+    LayerObjectTypeGpuAssisted,                 // Instance or device gpu assisted validation layer object
+    LayerObjectTypeMaxEnum,                     // Max enum count
+};
+
+struct TEMPLATE_STATE {
+    VkDescriptorUpdateTemplateKHR desc_update_template;
+    safe_VkDescriptorUpdateTemplateCreateInfo create_info;
+    bool destroyed;
+
+    TEMPLATE_STATE(VkDescriptorUpdateTemplateKHR update_template, safe_VkDescriptorUpdateTemplateCreateInfo *pCreateInfo)
+        : desc_update_template(update_template), create_info(*pCreateInfo), destroyed(false) {}
+};
+
+class LAYER_PHYS_DEV_PROPERTIES {
+public:
+    VkPhysicalDeviceProperties properties;
+    std::vector<VkQueueFamilyProperties> queue_family_properties;
+};
+
+typedef enum ValidationCheckDisables {
+    VALIDATION_CHECK_DISABLE_COMMAND_BUFFER_STATE,
+    VALIDATION_CHECK_DISABLE_OBJECT_IN_USE,
+    VALIDATION_CHECK_DISABLE_IDLE_DESCRIPTOR_SET,
+    VALIDATION_CHECK_DISABLE_PUSH_CONSTANT_RANGE,
+    VALIDATION_CHECK_DISABLE_QUERY_VALIDATION,
+    VALIDATION_CHECK_DISABLE_IMAGE_LAYOUT_VALIDATION,
+} ValidationCheckDisables;
+
+// CHECK_DISABLED struct is a container for bools that can block validation checks from being performed.
+// These bools are all "false" by default meaning that all checks are enabled. Enum values can be specified
+// via the vk_layer_setting.txt config file or at CreateInstance time via the VK_EXT_validation_features extension
+// that can selectively disable checks.
+struct CHECK_DISABLED {
+    bool command_buffer_state;                      // Skip command buffer state validation
+    bool object_in_use;                             // Skip all object in_use checking
+    bool idle_descriptor_set;                       // Skip check to verify that descriptor set is not in-use
+    bool push_constant_range;                       // Skip push constant range checks
+    bool query_validation;                          // Disable all core validation query-related checks
+    bool image_layout_validation;                   // Disable image layout validation
+    bool object_tracking;                           // Disable object lifetime validation
+    bool core_checks;                               // Disable core validation checks
+    bool thread_safety;                             // Disable thread safety validation
+    bool stateless_checks;                          // Disable stateless validation checks
+    bool handle_wrapping;                           // Disable unique handles/handle wrapping
+    bool shader_validation;                         // Skip validation for shaders
+
+    void SetAll(bool value) { std::fill(&command_buffer_state, &shader_validation + 1, value); }
+};
+
+struct CHECK_ENABLED {
+    bool gpu_validation;
+    bool gpu_validation_reserve_binding_slot;
+    bool best_practices;
+
+    void SetAll(bool value) { std::fill(&gpu_validation, &gpu_validation_reserve_binding_slot + 1, value); }
+};
+
+// Layer chassis validation object base class definition
+class ValidationObject {
+    public:
+        uint32_t api_version;
+        debug_report_data* report_data = nullptr;
+
+        VkLayerInstanceDispatchTable instance_dispatch_table;
+        VkLayerDispatchTable device_dispatch_table;
+
+        InstanceExtensions instance_extensions;
+        DeviceExtensions device_extensions = {};
+        CHECK_DISABLED disabled = {};
+        CHECK_ENABLED enabled = {};
+
+        VkInstance instance = VK_NULL_HANDLE;
+        VkPhysicalDevice physical_device = VK_NULL_HANDLE;
+        VkDevice device = VK_NULL_HANDLE;
+        LAYER_PHYS_DEV_PROPERTIES phys_dev_properties = {};
+
+        std::vector<ValidationObject*> object_dispatch;
+        LayerObjectTypeId container_type;
+
+        std::string layer_name = "CHASSIS";
+
+        // Constructor
+        ValidationObject(){};
+        // Destructor
+        virtual ~ValidationObject() {};
+
+        ReadWriteLock validation_object_mutex;
+        virtual read_lock_guard_t read_lock() {
+            return read_lock_guard_t(validation_object_mutex);
+        }
+        virtual write_lock_guard_t write_lock() {
+            return write_lock_guard_t(validation_object_mutex);
+        }
+
+        ValidationObject* GetValidationObject(std::vector<ValidationObject*>& object_dispatch, LayerObjectTypeId object_type) {
+            for (auto validation_object : object_dispatch) {
+                if (validation_object->container_type == object_type) {
+                    return validation_object;
+                }
+            }
+            return nullptr;
+        };
+
+        // Handle Wrapping Data
+        // Reverse map display handles
+        vl_concurrent_unordered_map<VkDisplayKHR, uint64_t, 0> display_id_reverse_mapping;
+        // Wrapping Descriptor Template Update structures requires access to the template createinfo structs
+        std::unordered_map<uint64_t, std::unique_ptr<TEMPLATE_STATE>> desc_template_createinfo_map;
+        struct SubpassesUsageStates {
+            std::unordered_set<uint32_t> subpasses_using_color_attachment;
+            std::unordered_set<uint32_t> subpasses_using_depthstencil_attachment;
+        };
+        // Uses unwrapped handles
+        std::unordered_map<VkRenderPass, SubpassesUsageStates> renderpasses_states;
+        // Map of wrapped swapchain handles to arrays of wrapped swapchain image IDs
+        // Each swapchain has an immutable list of wrapped swapchain image IDs -- always return these IDs if they exist
+        std::unordered_map<VkSwapchainKHR, std::vector<VkImage>> swapchain_wrapped_image_handle_map;
+        // Map of wrapped descriptor pools to set of wrapped descriptor sets allocated from each pool
+        std::unordered_map<VkDescriptorPool, std::unordered_set<VkDescriptorSet>> pool_descriptor_sets_map;
+
+
+        // Unwrap a handle.
+        template <typename HandleType>
+        HandleType Unwrap(HandleType wrappedHandle) {
+            auto iter = unique_id_mapping.find(reinterpret_cast<uint64_t const &>(wrappedHandle));
+            if (iter == unique_id_mapping.end())
+                return (HandleType)0;
+            return (HandleType)iter->second;
+        }
+
+        // Wrap a newly created handle with a new unique ID, and return the new ID.
+        template <typename HandleType>
+        HandleType WrapNew(HandleType newlyCreatedHandle) {
+            auto unique_id = global_unique_id++;
+            unique_id = HashedUint64::hash(unique_id);
+            unique_id_mapping.insert_or_assign(unique_id, reinterpret_cast<uint64_t const &>(newlyCreatedHandle));
+            return (HandleType)unique_id;
+        }
+
+        // Specialized handling for VkDisplayKHR. Adds an entry to enable reverse-lookup.
+        VkDisplayKHR WrapDisplay(VkDisplayKHR newlyCreatedHandle, ValidationObject *map_data) {
+            auto unique_id = global_unique_id++;
+            unique_id = HashedUint64::hash(unique_id);
+            unique_id_mapping.insert_or_assign(unique_id, reinterpret_cast<uint64_t const &>(newlyCreatedHandle));
+            map_data->display_id_reverse_mapping.insert_or_assign(newlyCreatedHandle, unique_id);
+            return (VkDisplayKHR)unique_id;
+        }
+
+        // VkDisplayKHR objects don't have a single point of creation, so we need to see if one already exists in the map before
+        // creating another.
+        VkDisplayKHR MaybeWrapDisplay(VkDisplayKHR handle, ValidationObject *map_data) {
+            // See if this display is already known
+            auto it = map_data->display_id_reverse_mapping.find(handle);
+            if (it != map_data->display_id_reverse_mapping.end()) return (VkDisplayKHR)it->second;
+            // Unknown, so wrap
+            return WrapDisplay(handle, map_data);
+        }
+
+        // Pre/post hook point declarations
+        virtual bool PreCallValidateCreateInstance(const VkInstanceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkInstance* pInstance) const { return false; };
+        virtual void PreCallRecordCreateInstance(const VkInstanceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkInstance* pInstance) {};
+        virtual void PostCallRecordCreateInstance(const VkInstanceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkInstance* pInstance, VkResult result) {};
+        virtual bool PreCallValidateDestroyInstance(VkInstance instance, const VkAllocationCallbacks* pAllocator) const { return false; };
+        virtual void PreCallRecordDestroyInstance(VkInstance instance, const VkAllocationCallbacks* pAllocator) {};
+        virtual void PostCallRecordDestroyInstance(VkInstance instance, const VkAllocationCallbacks* pAllocator) {};
+        virtual bool PreCallValidateEnumeratePhysicalDevices(VkInstance instance, uint32_t* pPhysicalDeviceCount, VkPhysicalDevice* pPhysicalDevices) const { return false; };
+        virtual void PreCallRecordEnumeratePhysicalDevices(VkInstance instance, uint32_t* pPhysicalDeviceCount, VkPhysicalDevice* pPhysicalDevices) {};
+        virtual void PostCallRecordEnumeratePhysicalDevices(VkInstance instance, uint32_t* pPhysicalDeviceCount, VkPhysicalDevice* pPhysicalDevices, VkResult result) {};
+        virtual bool PreCallValidateGetPhysicalDeviceFeatures(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures* pFeatures) const { return false; };
+        virtual void PreCallRecordGetPhysicalDeviceFeatures(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures* pFeatures) {};
+        virtual void PostCallRecordGetPhysicalDeviceFeatures(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures* pFeatures) {};
+        virtual bool PreCallValidateGetPhysicalDeviceFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties* pFormatProperties) const { return false; };
+        virtual void PreCallRecordGetPhysicalDeviceFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties* pFormatProperties) {};
+        virtual void PostCallRecordGetPhysicalDeviceFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties* pFormatProperties) {};
+        virtual bool PreCallValidateGetPhysicalDeviceImageFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkImageFormatProperties* pImageFormatProperties) const { return false; };
+        virtual void PreCallRecordGetPhysicalDeviceImageFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkImageFormatProperties* pImageFormatProperties) {};
+        virtual void PostCallRecordGetPhysicalDeviceImageFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkImageFormatProperties* pImageFormatProperties, VkResult result) {};
+        virtual bool PreCallValidateGetPhysicalDeviceProperties(VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties* pProperties) const { return false; };
+        virtual void PreCallRecordGetPhysicalDeviceProperties(VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties* pProperties) {};
+        virtual void PostCallRecordGetPhysicalDeviceProperties(VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties* pProperties) {};
+        virtual bool PreCallValidateGetPhysicalDeviceQueueFamilyProperties(VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties* pQueueFamilyProperties) const { return false; };
+        virtual void PreCallRecordGetPhysicalDeviceQueueFamilyProperties(VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties* pQueueFamilyProperties) {};
+        virtual void PostCallRecordGetPhysicalDeviceQueueFamilyProperties(VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties* pQueueFamilyProperties) {};
+        virtual bool PreCallValidateGetPhysicalDeviceMemoryProperties(VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties* pMemoryProperties) const { return false; };
+        virtual void PreCallRecordGetPhysicalDeviceMemoryProperties(VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties* pMemoryProperties) {};
+        virtual void PostCallRecordGetPhysicalDeviceMemoryProperties(VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties* pMemoryProperties) {};
+        virtual bool PreCallValidateGetInstanceProcAddr(VkInstance instance, const char* pName) const { return false; };
+        virtual void PreCallRecordGetInstanceProcAddr(VkInstance instance, const char* pName) {};
+        virtual void PostCallRecordGetInstanceProcAddr(VkInstance instance, const char* pName) {};
+        virtual bool PreCallValidateGetDeviceProcAddr(VkDevice device, const char* pName) const { return false; };
+        virtual void PreCallRecordGetDeviceProcAddr(VkDevice device, const char* pName) {};
+        virtual void PostCallRecordGetDeviceProcAddr(VkDevice device, const char* pName) {};
+        virtual bool PreCallValidateCreateDevice(VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDevice* pDevice) const { return false; };
+        virtual void PreCallRecordCreateDevice(VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDevice* pDevice) {};
+        virtual void PostCallRecordCreateDevice(VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDevice* pDevice, VkResult result) {};
+        virtual bool PreCallValidateDestroyDevice(VkDevice device, const VkAllocationCallbacks* pAllocator) const { return false; };
+        virtual void PreCallRecordDestroyDevice(VkDevice device, const VkAllocationCallbacks* pAllocator) {};
+        virtual void PostCallRecordDestroyDevice(VkDevice device, const VkAllocationCallbacks* pAllocator) {};
+        virtual bool PreCallValidateEnumerateInstanceExtensionProperties(const char* pLayerName, uint32_t* pPropertyCount, VkExtensionProperties* pProperties) const { return false; };
+        virtual void PreCallRecordEnumerateInstanceExtensionProperties(const char* pLayerName, uint32_t* pPropertyCount, VkExtensionProperties* pProperties) {};
+        virtual void PostCallRecordEnumerateInstanceExtensionProperties(const char* pLayerName, uint32_t* pPropertyCount, VkExtensionProperties* pProperties, VkResult result) {};
+        virtual bool PreCallValidateEnumerateDeviceExtensionProperties(VkPhysicalDevice physicalDevice, const char* pLayerName, uint32_t* pPropertyCount, VkExtensionProperties* pProperties) const { return false; };
+        virtual void PreCallRecordEnumerateDeviceExtensionProperties(VkPhysicalDevice physicalDevice, const char* pLayerName, uint32_t* pPropertyCount, VkExtensionProperties* pProperties) {};
+        virtual void PostCallRecordEnumerateDeviceExtensionProperties(VkPhysicalDevice physicalDevice, const char* pLayerName, uint32_t* pPropertyCount, VkExtensionProperties* pProperties, VkResult result) {};
+        virtual bool PreCallValidateEnumerateInstanceLayerProperties(uint32_t* pPropertyCount, VkLayerProperties* pProperties) const { return false; };
+        virtual void PreCallRecordEnumerateInstanceLayerProperties(uint32_t* pPropertyCount, VkLayerProperties* pProperties) {};
+        virtual void PostCallRecordEnumerateInstanceLayerProperties(uint32_t* pPropertyCount, VkLayerProperties* pProperties, VkResult result) {};
+        virtual bool PreCallValidateEnumerateDeviceLayerProperties(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkLayerProperties* pProperties) const { return false; };
+        virtual void PreCallRecordEnumerateDeviceLayerProperties(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkLayerProperties* pProperties) {};
+        virtual void PostCallRecordEnumerateDeviceLayerProperties(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkLayerProperties* pProperties, VkResult result) {};
+        virtual bool PreCallValidateGetDeviceQueue(VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex, VkQueue* pQueue) const { return false; };
+        virtual void PreCallRecordGetDeviceQueue(VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex, VkQueue* pQueue) {};
+        virtual void PostCallRecordGetDeviceQueue(VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex, VkQueue* pQueue) {};
+        virtual bool PreCallValidateQueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo* pSubmits, VkFence fence) const { return false; };
+        virtual void PreCallRecordQueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo* pSubmits, VkFence fence) {};
+        virtual void PostCallRecordQueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo* pSubmits, VkFence fence, VkResult result) {};
+        virtual bool PreCallValidateQueueWaitIdle(VkQueue queue) const { return false; };
+        virtual void PreCallRecordQueueWaitIdle(VkQueue queue) {};
+        virtual void PostCallRecordQueueWaitIdle(VkQueue queue, VkResult result) {};
+        virtual bool PreCallValidateDeviceWaitIdle(VkDevice device) const { return false; };
+        virtual void PreCallRecordDeviceWaitIdle(VkDevice device) {};
+        virtual void PostCallRecordDeviceWaitIdle(VkDevice device, VkResult result) {};
+        virtual bool PreCallValidateAllocateMemory(VkDevice device, const VkMemoryAllocateInfo* pAllocateInfo, const VkAllocationCallbacks* pAllocator, VkDeviceMemory* pMemory) const { return false; };
+        virtual void PreCallRecordAllocateMemory(VkDevice device, const VkMemoryAllocateInfo* pAllocateInfo, const VkAllocationCallbacks* pAllocator, VkDeviceMemory* pMemory) {};
+        virtual void PostCallRecordAllocateMemory(VkDevice device, const VkMemoryAllocateInfo* pAllocateInfo, const VkAllocationCallbacks* pAllocator, VkDeviceMemory* pMemory, VkResult result) {};
+        virtual bool PreCallValidateFreeMemory(VkDevice device, VkDeviceMemory memory, const VkAllocationCallbacks* pAllocator) const { return false; };
+        virtual void PreCallRecordFreeMemory(VkDevice device, VkDeviceMemory memory, const VkAllocationCallbacks* pAllocator) {};
+        virtual void PostCallRecordFreeMemory(VkDevice device, VkDeviceMemory memory, const VkAllocationCallbacks* pAllocator) {};
+        virtual bool PreCallValidateMapMemory(VkDevice device, VkDeviceMemory memory, VkDeviceSize offset, VkDeviceSize size, VkMemoryMapFlags flags, void** ppData) const { return false; };
+        virtual void PreCallRecordMapMemory(VkDevice device, VkDeviceMemory memory, VkDeviceSize offset, VkDeviceSize size, VkMemoryMapFlags flags, void** ppData) {};
+        virtual void PostCallRecordMapMemory(VkDevice device, VkDeviceMemory memory, VkDeviceSize offset, VkDeviceSize size, VkMemoryMapFlags flags, void** ppData, VkResult result) {};
+        virtual bool PreCallValidateUnmapMemory(VkDevice device, VkDeviceMemory memory) const { return false; };
+        virtual void PreCallRecordUnmapMemory(VkDevice device, VkDeviceMemory memory) {};
+        virtual void PostCallRecordUnmapMemory(VkDevice device, VkDeviceMemory memory) {};
+        virtual bool PreCallValidateFlushMappedMemoryRanges(VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange* pMemoryRanges) const { return false; };
+        virtual void PreCallRecordFlushMappedMemoryRanges(VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange* pMemoryRanges) {};
+        virtual void PostCallRecordFlushMappedMemoryRanges(VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange* pMemoryRanges, VkResult result) {};
+        virtual bool PreCallValidateInvalidateMappedMemoryRanges(VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange* pMemoryRanges) const { return false; };
+        virtual void PreCallRecordInvalidateMappedMemoryRanges(VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange* pMemoryRanges) {};
+        virtual void PostCallRecordInvalidateMappedMemoryRanges(VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange* pMemoryRanges, VkResult result) {};
+        virtual bool PreCallValidateGetDeviceMemoryCommitment(VkDevice device, VkDeviceMemory memory, VkDeviceSize* pCommittedMemoryInBytes) const { return false; };
+        virtual void PreCallRecordGetDeviceMemoryCommitment(VkDevice device, VkDeviceMemory memory, VkDeviceSize* pCommittedMemoryInBytes) {};
+        virtual void PostCallRecordGetDeviceMemoryCommitment(VkDevice device, VkDeviceMemory memory, VkDeviceSize* pCommittedMemoryInBytes) {};
+        virtual bool PreCallValidateBindBufferMemory(VkDevice device, VkBuffer buffer, VkDeviceMemory memory, VkDeviceSize memoryOffset) const { return false; };
+        virtual void PreCallRecordBindBufferMemory(VkDevice device, VkBuffer buffer, VkDeviceMemory memory, VkDeviceSize memoryOffset) {};
+        virtual void PostCallRecordBindBufferMemory(VkDevice device, VkBuffer buffer, VkDeviceMemory memory, VkDeviceSize memoryOffset, VkResult result) {};
+        virtual bool PreCallValidateBindImageMemory(VkDevice device, VkImage image, VkDeviceMemory memory, VkDeviceSize memoryOffset) const { return false; };
+        virtual void PreCallRecordBindImageMemory(VkDevice device, VkImage image, VkDeviceMemory memory, VkDeviceSize memoryOffset) {};
+        virtual void PostCallRecordBindImageMemory(VkDevice device, VkImage image, VkDeviceMemory memory, VkDeviceSize memoryOffset, VkResult result) {};
+        virtual bool PreCallValidateGetBufferMemoryRequirements(VkDevice device, VkBuffer buffer, VkMemoryRequirements* pMemoryRequirements) const { return false; };
+        virtual void PreCallRecordGetBufferMemoryRequirements(VkDevice device, VkBuffer buffer, VkMemoryRequirements* pMemoryRequirements) {};
+        virtual void PostCallRecordGetBufferMemoryRequirements(VkDevice device, VkBuffer buffer, VkMemoryRequirements* pMemoryRequirements) {};
+        virtual bool PreCallValidateGetImageMemoryRequirements(VkDevice device, VkImage image, VkMemoryRequirements* pMemoryRequirements) const { return false; };
+        virtual void PreCallRecordGetImageMemoryRequirements(VkDevice device, VkImage image, VkMemoryRequirements* pMemoryRequirements) {};
+        virtual void PostCallRecordGetImageMemoryRequirements(VkDevice device, VkImage image, VkMemoryRequirements* pMemoryRequirements) {};
+        virtual bool PreCallValidateGetImageSparseMemoryRequirements(VkDevice device, VkImage image, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements* pSparseMemoryRequirements) const { return false; };
+        virtual void PreCallRecordGetImageSparseMemoryRequirements(VkDevice device, VkImage image, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements* pSparseMemoryRequirements) {};
+        virtual void PostCallRecordGetImageSparseMemoryRequirements(VkDevice device, VkImage image, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements* pSparseMemoryRequirements) {};
+        virtual bool PreCallValidateGetPhysicalDeviceSparseImageFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkSampleCountFlagBits samples, VkImageUsageFlags usage, VkImageTiling tiling, uint32_t* pPropertyCount, VkSparseImageFormatProperties* pProperties) const { return false; };
+        virtual void PreCallRecordGetPhysicalDeviceSparseImageFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkSampleCountFlagBits samples, VkImageUsageFlags usage, VkImageTiling tiling, uint32_t* pPropertyCount, VkSparseImageFormatProperties* pProperties) {};
+        virtual void PostCallRecordGetPhysicalDeviceSparseImageFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkSampleCountFlagBits samples, VkImageUsageFlags usage, VkImageTiling tiling, uint32_t* pPropertyCount, VkSparseImageFormatProperties* pProperties) {};
+        virtual bool PreCallValidateQueueBindSparse(VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo* pBindInfo, VkFence fence) const { return false; };
+        virtual void PreCallRecordQueueBindSparse(VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo* pBindInfo, VkFence fence) {};
+        virtual void PostCallRecordQueueBindSparse(VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo* pBindInfo, VkFence fence, VkResult result) {};
+        virtual bool PreCallValidateCreateFence(VkDevice device, const VkFenceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence) const { return false; };
+        virtual void PreCallRecordCreateFence(VkDevice device, const VkFenceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence) {};
+        virtual void PostCallRecordCreateFence(VkDevice device, const VkFenceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence, VkResult result) {};
+        virtual bool PreCallValidateDestroyFence(VkDevice device, VkFence fence, const VkAllocationCallbacks* pAllocator) const { return false; };
+        virtual void PreCallRecordDestroyFence(VkDevice device, VkFence fence, const VkAllocationCallbacks* pAllocator) {};
+        virtual void PostCallRecordDestroyFence(VkDevice device, VkFence fence, const VkAllocationCallbacks* pAllocator) {};
+        virtual bool PreCallValidateResetFences(VkDevice device, uint32_t fenceCount, const VkFence* pFences) const { return false; };
+        virtual void PreCallRecordResetFences(VkDevice device, uint32_t fenceCount, const VkFence* pFences) {};
+        virtual void PostCallRecordResetFences(VkDevice device, uint32_t fenceCount, const VkFence* pFences, VkResult result) {};
+        virtual bool PreCallValidateGetFenceStatus(VkDevice device, VkFence fence) const { return false; };
+        virtual void PreCallRecordGetFenceStatus(VkDevice device, VkFence fence) {};
+        virtual void PostCallRecordGetFenceStatus(VkDevice device, VkFence fence, VkResult result) {};
+        virtual bool PreCallValidateWaitForFences(VkDevice device, uint32_t fenceCount, const VkFence* pFences, VkBool32 waitAll, uint64_t timeout) const { return false; };
+        virtual void PreCallRecordWaitForFences(VkDevice device, uint32_t fenceCount, const VkFence* pFences, VkBool32 waitAll, uint64_t timeout) {};
+        virtual void PostCallRecordWaitForFences(VkDevice device, uint32_t fenceCount, const VkFence* pFences, VkBool32 waitAll, uint64_t timeout, VkResult result) {};
+        virtual bool PreCallValidateCreateSemaphore(VkDevice device, const VkSemaphoreCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSemaphore* pSemaphore) const { return false; };
+        virtual void PreCallRecordCreateSemaphore(VkDevice device, const VkSemaphoreCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSemaphore* pSemaphore) {};
+        virtual void PostCallRecordCreateSemaphore(VkDevice device, const VkSemaphoreCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSemaphore* pSemaphore, VkResult result) {};
+        virtual bool PreCallValidateDestroySemaphore(VkDevice device, VkSemaphore semaphore, const VkAllocationCallbacks* pAllocator) const { return false; };
+        virtual void PreCallRecordDestroySemaphore(VkDevice device, VkSemaphore semaphore, const VkAllocationCallbacks* pAllocator) {};
+        virtual void PostCallRecordDestroySemaphore(VkDevice device, VkSemaphore semaphore, const VkAllocationCallbacks* pAllocator) {};
+        virtual bool PreCallValidateCreateEvent(VkDevice device, const VkEventCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkEvent* pEvent) const { return false; };
+        virtual void PreCallRecordCreateEvent(VkDevice device, const VkEventCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkEvent* pEvent) {};
+        virtual void PostCallRecordCreateEvent(VkDevice device, const VkEventCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkEvent* pEvent, VkResult result) {};
+        virtual bool PreCallValidateDestroyEvent(VkDevice device, VkEvent event, const VkAllocationCallbacks* pAllocator) const { return false; };
+        virtual void PreCallRecordDestroyEvent(VkDevice device, VkEvent event, const VkAllocationCallbacks* pAllocator) {};
+        virtual void PostCallRecordDestroyEvent(VkDevice device, VkEvent event, const VkAllocationCallbacks* pAllocator) {};
+        virtual bool PreCallValidateGetEventStatus(VkDevice device, VkEvent event) const { return false; };
+        virtual void PreCallRecordGetEventStatus(VkDevice device, VkEvent event) {};
+        virtual void PostCallRecordGetEventStatus(VkDevice device, VkEvent event, VkResult result) {};
+        virtual bool PreCallValidateSetEvent(VkDevice device, VkEvent event) const { return false; };
+        virtual void PreCallRecordSetEvent(VkDevice device, VkEvent event) {};
+        virtual void PostCallRecordSetEvent(VkDevice device, VkEvent event, VkResult result) {};
+        virtual bool PreCallValidateResetEvent(VkDevice device, VkEvent event) const { return false; };
+        virtual void PreCallRecordResetEvent(VkDevice device, VkEvent event) {};
+        virtual void PostCallRecordResetEvent(VkDevice device, VkEvent event, VkResult result) {};
+        virtual bool PreCallValidateCreateQueryPool(VkDevice device, const VkQueryPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkQueryPool* pQueryPool) const { return false; };
+        virtual void PreCallRecordCreateQueryPool(VkDevice device, const VkQueryPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkQueryPool* pQueryPool) {};
+        virtual void PostCallRecordCreateQueryPool(VkDevice device, const VkQueryPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkQueryPool* pQueryPool, VkResult result) {};
+        virtual bool PreCallValidateDestroyQueryPool(VkDevice device, VkQueryPool queryPool, const VkAllocationCallbacks* pAllocator) const { return false; };
+        virtual void PreCallRecordDestroyQueryPool(VkDevice device, VkQueryPool queryPool, const VkAllocationCallbacks* pAllocator) {};
+        virtual void PostCallRecordDestroyQueryPool(VkDevice device, VkQueryPool queryPool, const VkAllocationCallbacks* pAllocator) {};
+        virtual bool PreCallValidateGetQueryPoolResults(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, void* pData, VkDeviceSize stride, VkQueryResultFlags flags) const { return false; };
+        virtual void PreCallRecordGetQueryPoolResults(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, void* pData, VkDeviceSize stride, VkQueryResultFlags flags) {};
+        virtual void PostCallRecordGetQueryPoolResults(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, void* pData, VkDeviceSize stride, VkQueryResultFlags flags, VkResult result) {};
+        virtual bool PreCallValidateCreateBuffer(VkDevice device, const VkBufferCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkBuffer* pBuffer) const { return false; };
+        virtual void PreCallRecordCreateBuffer(VkDevice device, const VkBufferCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkBuffer* pBuffer) {};
+        virtual void PostCallRecordCreateBuffer(VkDevice device, const VkBufferCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkBuffer* pBuffer, VkResult result) {};
+        virtual bool PreCallValidateDestroyBuffer(VkDevice device, VkBuffer buffer, const VkAllocationCallbacks* pAllocator) const { return false; };
+        virtual void PreCallRecordDestroyBuffer(VkDevice device, VkBuffer buffer, const VkAllocationCallbacks* pAllocator) {};
+        virtual void PostCallRecordDestroyBuffer(VkDevice device, VkBuffer buffer, const VkAllocationCallbacks* pAllocator) {};
+        virtual bool PreCallValidateCreateBufferView(VkDevice device, const VkBufferViewCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkBufferView* pView) const { return false; };
+        virtual void PreCallRecordCreateBufferView(VkDevice device, const VkBufferViewCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkBufferView* pView) {};
+        virtual void PostCallRecordCreateBufferView(VkDevice device, const VkBufferViewCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkBufferView* pView, VkResult result) {};
+        virtual bool PreCallValidateDestroyBufferView(VkDevice device, VkBufferView bufferView, const VkAllocationCallbacks* pAllocator) const { return false; };
+        virtual void PreCallRecordDestroyBufferView(VkDevice device, VkBufferView bufferView, const VkAllocationCallbacks* pAllocator) {};
+        virtual void PostCallRecordDestroyBufferView(VkDevice device, VkBufferView bufferView, const VkAllocationCallbacks* pAllocator) {};
+        virtual bool PreCallValidateCreateImage(VkDevice device, const VkImageCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkImage* pImage) const { return false; };
+        virtual void PreCallRecordCreateImage(VkDevice device, const VkImageCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkImage* pImage) {};
+        virtual void PostCallRecordCreateImage(VkDevice device, const VkImageCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkImage* pImage, VkResult result) {};
+        virtual bool PreCallValidateDestroyImage(VkDevice device, VkImage image, const VkAllocationCallbacks* pAllocator) const { return false; };
+        virtual void PreCallRecordDestroyImage(VkDevice device, VkImage image, const VkAllocationCallbacks* pAllocator) {};
+        virtual void PostCallRecordDestroyImage(VkDevice device, VkImage image, const VkAllocationCallbacks* pAllocator) {};
+        virtual bool PreCallValidateGetImageSubresourceLayout(VkDevice device, VkImage image, const VkImageSubresource* pSubresource, VkSubresourceLayout* pLayout) const { return false; };
+        virtual void PreCallRecordGetImageSubresourceLayout(VkDevice device, VkImage image, const VkImageSubresource* pSubresource, VkSubresourceLayout* pLayout) {};
+        virtual void PostCallRecordGetImageSubresourceLayout(VkDevice device, VkImage image, const VkImageSubresource* pSubresource, VkSubresourceLayout* pLayout) {};
+        virtual bool PreCallValidateCreateImageView(VkDevice device, const VkImageViewCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkImageView* pView) const { return false; };
+        virtual void PreCallRecordCreateImageView(VkDevice device, const VkImageViewCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkImageView* pView) {};
+        virtual void PostCallRecordCreateImageView(VkDevice device, const VkImageViewCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkImageView* pView, VkResult result) {};
+        virtual bool PreCallValidateDestroyImageView(VkDevice device, VkImageView imageView, const VkAllocationCallbacks* pAllocator) const { return false; };
+        virtual void PreCallRecordDestroyImageView(VkDevice device, VkImageView imageView, const VkAllocationCallbacks* pAllocator) {};
+        virtual void PostCallRecordDestroyImageView(VkDevice device, VkImageView imageView, const VkAllocationCallbacks* pAllocator) {};
+        virtual bool PreCallValidateCreateShaderModule(VkDevice device, const VkShaderModuleCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkShaderModule* pShaderModule) const { return false; };
+        virtual void PreCallRecordCreateShaderModule(VkDevice device, const VkShaderModuleCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkShaderModule* pShaderModule) {};
+        virtual void PostCallRecordCreateShaderModule(VkDevice device, const VkShaderModuleCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkShaderModule* pShaderModule, VkResult result) {};
+        virtual bool PreCallValidateDestroyShaderModule(VkDevice device, VkShaderModule shaderModule, const VkAllocationCallbacks* pAllocator) const { return false; };
+        virtual void PreCallRecordDestroyShaderModule(VkDevice device, VkShaderModule shaderModule, const VkAllocationCallbacks* pAllocator) {};
+        virtual void PostCallRecordDestroyShaderModule(VkDevice device, VkShaderModule shaderModule, const VkAllocationCallbacks* pAllocator) {};
+        virtual bool PreCallValidateCreatePipelineCache(VkDevice device, const VkPipelineCacheCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPipelineCache* pPipelineCache) const { return false; };
+        virtual void PreCallRecordCreatePipelineCache(VkDevice device, const VkPipelineCacheCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPipelineCache* pPipelineCache) {};
+        virtual void PostCallRecordCreatePipelineCache(VkDevice device, const VkPipelineCacheCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPipelineCache* pPipelineCache, VkResult result) {};
+        virtual bool PreCallValidateDestroyPipelineCache(VkDevice device, VkPipelineCache pipelineCache, const VkAllocationCallbacks* pAllocator) const { return false; };
+        virtual void PreCallRecordDestroyPipelineCache(VkDevice device, VkPipelineCache pipelineCache, const VkAllocationCallbacks* pAllocator) {};
+        virtual void PostCallRecordDestroyPipelineCache(VkDevice device, VkPipelineCache pipelineCache, const VkAllocationCallbacks* pAllocator) {};
+        virtual bool PreCallValidateGetPipelineCacheData(VkDevice device, VkPipelineCache pipelineCache, size_t* pDataSize, void* pData) const { return false; };
+        virtual void PreCallRecordGetPipelineCacheData(VkDevice device, VkPipelineCache pipelineCache, size_t* pDataSize, void* pData) {};
+        virtual void PostCallRecordGetPipelineCacheData(VkDevice device, VkPipelineCache pipelineCache, size_t* pDataSize, void* pData, VkResult result) {};
+        virtual bool PreCallValidateMergePipelineCaches(VkDevice device, VkPipelineCache dstCache, uint32_t srcCacheCount, const VkPipelineCache* pSrcCaches) const { return false; };
+        virtual void PreCallRecordMergePipelineCaches(VkDevice device, VkPipelineCache dstCache, uint32_t srcCacheCount, const VkPipelineCache* pSrcCaches) {};
+        virtual void PostCallRecordMergePipelineCaches(VkDevice device, VkPipelineCache dstCache, uint32_t srcCacheCount, const VkPipelineCache* pSrcCaches, VkResult result) {};
+        virtual bool PreCallValidateCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkGraphicsPipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines) const { return false; };
+        virtual void PreCallRecordCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkGraphicsPipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines) {};
+        virtual void PostCallRecordCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkGraphicsPipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines, VkResult result) {};
+        virtual bool PreCallValidateCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkComputePipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines) const { return false; };
+        virtual void PreCallRecordCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkComputePipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines) {};
+        virtual void PostCallRecordCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkComputePipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines, VkResult result) {};
+        virtual bool PreCallValidateDestroyPipeline(VkDevice device, VkPipeline pipeline, const VkAllocationCallbacks* pAllocator) const { return false; };
+        virtual void PreCallRecordDestroyPipeline(VkDevice device, VkPipeline pipeline, const VkAllocationCallbacks* pAllocator) {};
+        virtual void PostCallRecordDestroyPipeline(VkDevice device, VkPipeline pipeline, const VkAllocationCallbacks* pAllocator) {};
+        virtual bool PreCallValidateCreatePipelineLayout(VkDevice device, const VkPipelineLayoutCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPipelineLayout* pPipelineLayout) const { return false; };
+        virtual void PreCallRecordCreatePipelineLayout(VkDevice device, const VkPipelineLayoutCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPipelineLayout* pPipelineLayout) {};
+        virtual void PostCallRecordCreatePipelineLayout(VkDevice device, const VkPipelineLayoutCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPipelineLayout* pPipelineLayout, VkResult result) {};
+        virtual bool PreCallValidateDestroyPipelineLayout(VkDevice device, VkPipelineLayout pipelineLayout, const VkAllocationCallbacks* pAllocator) const { return false; };
+        virtual void PreCallRecordDestroyPipelineLayout(VkDevice device, VkPipelineLayout pipelineLayout, const VkAllocationCallbacks* pAllocator) {};
+        virtual void PostCallRecordDestroyPipelineLayout(VkDevice device, VkPipelineLayout pipelineLayout, const VkAllocationCallbacks* pAllocator) {};
+        virtual bool PreCallValidateCreateSampler(VkDevice device, const VkSamplerCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSampler* pSampler) const { return false; };
+        virtual void PreCallRecordCreateSampler(VkDevice device, const VkSamplerCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSampler* pSampler) {};
+        virtual void PostCallRecordCreateSampler(VkDevice device, const VkSamplerCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSampler* pSampler, VkResult result) {};
+        virtual bool PreCallValidateDestroySampler(VkDevice device, VkSampler sampler, const VkAllocationCallbacks* pAllocator) const { return false; };
+        virtual void PreCallRecordDestroySampler(VkDevice device, VkSampler sampler, const VkAllocationCallbacks* pAllocator) {};
+        virtual void PostCallRecordDestroySampler(VkDevice device, VkSampler sampler, const VkAllocationCallbacks* pAllocator) {};
+        virtual bool PreCallValidateCreateDescriptorSetLayout(VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorSetLayout* pSetLayout) const { return false; };
+        virtual void PreCallRecordCreateDescriptorSetLayout(VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorSetLayout* pSetLayout) {};
+        virtual void PostCallRecordCreateDescriptorSetLayout(VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorSetLayout* pSetLayout, VkResult result) {};
+        virtual bool PreCallValidateDestroyDescriptorSetLayout(VkDevice device, VkDescriptorSetLayout descriptorSetLayout, const VkAllocationCallbacks* pAllocator) const { return false; };
+        virtual void PreCallRecordDestroyDescriptorSetLayout(VkDevice device, VkDescriptorSetLayout descriptorSetLayout, const VkAllocationCallbacks* pAllocator) {};
+        virtual void PostCallRecordDestroyDescriptorSetLayout(VkDevice device, VkDescriptorSetLayout descriptorSetLayout, const VkAllocationCallbacks* pAllocator) {};
+        virtual bool PreCallValidateCreateDescriptorPool(VkDevice device, const VkDescriptorPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorPool* pDescriptorPool) const { return false; };
+        virtual void PreCallRecordCreateDescriptorPool(VkDevice device, const VkDescriptorPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorPool* pDescriptorPool) {};
+        virtual void PostCallRecordCreateDescriptorPool(VkDevice device, const VkDescriptorPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorPool* pDescriptorPool, VkResult result) {};
+        virtual bool PreCallValidateDestroyDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool, const VkAllocationCallbacks* pAllocator) const { return false; };
+        virtual void PreCallRecordDestroyDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool, const VkAllocationCallbacks* pAllocator) {};
+        virtual void PostCallRecordDestroyDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool, const VkAllocationCallbacks* pAllocator) {};
+        virtual bool PreCallValidateResetDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool, VkDescriptorPoolResetFlags flags) const { return false; };
+        virtual void PreCallRecordResetDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool, VkDescriptorPoolResetFlags flags) {};
+        virtual void PostCallRecordResetDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool, VkDescriptorPoolResetFlags flags, VkResult result) {};
+        virtual bool PreCallValidateAllocateDescriptorSets(VkDevice device, const VkDescriptorSetAllocateInfo* pAllocateInfo, VkDescriptorSet* pDescriptorSets) const { return false; };
+        virtual void PreCallRecordAllocateDescriptorSets(VkDevice device, const VkDescriptorSetAllocateInfo* pAllocateInfo, VkDescriptorSet* pDescriptorSets) {};
+        virtual void PostCallRecordAllocateDescriptorSets(VkDevice device, const VkDescriptorSetAllocateInfo* pAllocateInfo, VkDescriptorSet* pDescriptorSets, VkResult result) {};
+        virtual bool PreCallValidateFreeDescriptorSets(VkDevice device, VkDescriptorPool descriptorPool, uint32_t descriptorSetCount, const VkDescriptorSet* pDescriptorSets) const { return false; };
+        virtual void PreCallRecordFreeDescriptorSets(VkDevice device, VkDescriptorPool descriptorPool, uint32_t descriptorSetCount, const VkDescriptorSet* pDescriptorSets) {};
+        virtual void PostCallRecordFreeDescriptorSets(VkDevice device, VkDescriptorPool descriptorPool, uint32_t descriptorSetCount, const VkDescriptorSet* pDescriptorSets, VkResult result) {};
+        virtual bool PreCallValidateUpdateDescriptorSets(VkDevice device, uint32_t descriptorWriteCount, const VkWriteDescriptorSet* pDescriptorWrites, uint32_t descriptorCopyCount, const VkCopyDescriptorSet* pDescriptorCopies) const { return false; };
+        virtual void PreCallRecordUpdateDescriptorSets(VkDevice device, uint32_t descriptorWriteCount, const VkWriteDescriptorSet* pDescriptorWrites, uint32_t descriptorCopyCount, const VkCopyDescriptorSet* pDescriptorCopies) {};
+        virtual void PostCallRecordUpdateDescriptorSets(VkDevice device, uint32_t descriptorWriteCount, const VkWriteDescriptorSet* pDescriptorWrites, uint32_t descriptorCopyCount, const VkCopyDescriptorSet* pDescriptorCopies) {};
+        virtual bool PreCallValidateCreateFramebuffer(VkDevice device, const VkFramebufferCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkFramebuffer* pFramebuffer) const { return false; };
+        virtual void PreCallRecordCreateFramebuffer(VkDevice device, const VkFramebufferCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkFramebuffer* pFramebuffer) {};
+        virtual void PostCallRecordCreateFramebuffer(VkDevice device, const VkFramebufferCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkFramebuffer* pFramebuffer, VkResult result) {};
+        virtual bool PreCallValidateDestroyFramebuffer(VkDevice device, VkFramebuffer framebuffer, const VkAllocationCallbacks* pAllocator) const { return false; };
+        virtual void PreCallRecordDestroyFramebuffer(VkDevice device, VkFramebuffer framebuffer, const VkAllocationCallbacks* pAllocator) {};
+        virtual void PostCallRecordDestroyFramebuffer(VkDevice device, VkFramebuffer framebuffer, const VkAllocationCallbacks* pAllocator) {};
+        virtual bool PreCallValidateCreateRenderPass(VkDevice device, const VkRenderPassCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass) const { return false; };
+        virtual void PreCallRecordCreateRenderPass(VkDevice device, const VkRenderPassCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass) {};
+        virtual void PostCallRecordCreateRenderPass(VkDevice device, const VkRenderPassCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass, VkResult result) {};
+        virtual bool PreCallValidateDestroyRenderPass(VkDevice device, VkRenderPass renderPass, const VkAllocationCallbacks* pAllocator) const { return false; };
+        virtual void PreCallRecordDestroyRenderPass(VkDevice device, VkRenderPass renderPass, const VkAllocationCallbacks* pAllocator) {};
+        virtual void PostCallRecordDestroyRenderPass(VkDevice device, VkRenderPass renderPass, const VkAllocationCallbacks* pAllocator) {};
+        virtual bool PreCallValidateGetRenderAreaGranularity(VkDevice device, VkRenderPass renderPass, VkExtent2D* pGranularity) const { return false; };
+        virtual void PreCallRecordGetRenderAreaGranularity(VkDevice device, VkRenderPass renderPass, VkExtent2D* pGranularity) {};
+        virtual void PostCallRecordGetRenderAreaGranularity(VkDevice device, VkRenderPass renderPass, VkExtent2D* pGranularity) {};
+        virtual bool PreCallValidateCreateCommandPool(VkDevice device, const VkCommandPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkCommandPool* pCommandPool) const { return false; };
+        virtual void PreCallRecordCreateCommandPool(VkDevice device, const VkCommandPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkCommandPool* pCommandPool) {};
+        virtual void PostCallRecordCreateCommandPool(VkDevice device, const VkCommandPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkCommandPool* pCommandPool, VkResult result) {};
+        virtual bool PreCallValidateDestroyCommandPool(VkDevice device, VkCommandPool commandPool, const VkAllocationCallbacks* pAllocator) const { return false; };
+        virtual void PreCallRecordDestroyCommandPool(VkDevice device, VkCommandPool commandPool, const VkAllocationCallbacks* pAllocator) {};
+        virtual void PostCallRecordDestroyCommandPool(VkDevice device, VkCommandPool commandPool, const VkAllocationCallbacks* pAllocator) {};
+        virtual bool PreCallValidateResetCommandPool(VkDevice device, VkCommandPool commandPool, VkCommandPoolResetFlags flags) const { return false; };
+        virtual void PreCallRecordResetCommandPool(VkDevice device, VkCommandPool commandPool, VkCommandPoolResetFlags flags) {};
+        virtual void PostCallRecordResetCommandPool(VkDevice device, VkCommandPool commandPool, VkCommandPoolResetFlags flags, VkResult result) {};
+        virtual bool PreCallValidateAllocateCommandBuffers(VkDevice device, const VkCommandBufferAllocateInfo* pAllocateInfo, VkCommandBuffer* pCommandBuffers) const { return false; };
+        virtual void PreCallRecordAllocateCommandBuffers(VkDevice device, const VkCommandBufferAllocateInfo* pAllocateInfo, VkCommandBuffer* pCommandBuffers) {};
+        virtual void PostCallRecordAllocateCommandBuffers(VkDevice device, const VkCommandBufferAllocateInfo* pAllocateInfo, VkCommandBuffer* pCommandBuffers, VkResult result) {};
+        virtual bool PreCallValidateFreeCommandBuffers(VkDevice device, VkCommandPool commandPool, uint32_t commandBufferCount, const VkCommandBuffer* pCommandBuffers) const { return false; };
+        virtual void PreCallRecordFreeCommandBuffers(VkDevice device, VkCommandPool commandPool, uint32_t commandBufferCount, const VkCommandBuffer* pCommandBuffers) {};
+        virtual void PostCallRecordFreeCommandBuffers(VkDevice device, VkCommandPool commandPool, uint32_t commandBufferCount, const VkCommandBuffer* pCommandBuffers) {};
+        virtual bool PreCallValidateBeginCommandBuffer(VkCommandBuffer commandBuffer, const VkCommandBufferBeginInfo* pBeginInfo) const { return false; };
+        virtual void PreCallRecordBeginCommandBuffer(VkCommandBuffer commandBuffer, const VkCommandBufferBeginInfo* pBeginInfo) {};
+        virtual void PostCallRecordBeginCommandBuffer(VkCommandBuffer commandBuffer, const VkCommandBufferBeginInfo* pBeginInfo, VkResult result) {};
+        virtual bool PreCallValidateEndCommandBuffer(VkCommandBuffer commandBuffer) const { return false; };
+        virtual void PreCallRecordEndCommandBuffer(VkCommandBuffer commandBuffer) {};
+        virtual void PostCallRecordEndCommandBuffer(VkCommandBuffer commandBuffer, VkResult result) {};
+        virtual bool PreCallValidateResetCommandBuffer(VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags) const { return false; };
+        virtual void PreCallRecordResetCommandBuffer(VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags) {};
+        virtual void PostCallRecordResetCommandBuffer(VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags, VkResult result) {};
+        virtual bool PreCallValidateCmdBindPipeline(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline) const { return false; };
+        virtual void PreCallRecordCmdBindPipeline(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline) {};
+        virtual void PostCallRecordCmdBindPipeline(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline) {};
+        virtual bool PreCallValidateCmdSetViewport(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewport* pViewports) const { return false; };
+        virtual void PreCallRecordCmdSetViewport(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewport* pViewports) {};
+        virtual void PostCallRecordCmdSetViewport(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewport* pViewports) {};
+        virtual bool PreCallValidateCmdSetScissor(VkCommandBuffer commandBuffer, uint32_t firstScissor, uint32_t scissorCount, const VkRect2D* pScissors) const { return false; };
+        virtual void PreCallRecordCmdSetScissor(VkCommandBuffer commandBuffer, uint32_t firstScissor, uint32_t scissorCount, const VkRect2D* pScissors) {};
+        virtual void PostCallRecordCmdSetScissor(VkCommandBuffer commandBuffer, uint32_t firstScissor, uint32_t scissorCount, const VkRect2D* pScissors) {};
+        virtual bool PreCallValidateCmdSetLineWidth(VkCommandBuffer commandBuffer, float lineWidth) const { return false; };
+        virtual void PreCallRecordCmdSetLineWidth(VkCommandBuffer commandBuffer, float lineWidth) {};
+        virtual void PostCallRecordCmdSetLineWidth(VkCommandBuffer commandBuffer, float lineWidth) {};
+        virtual bool PreCallValidateCmdSetDepthBias(VkCommandBuffer commandBuffer, float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor) const { return false; };
+        virtual void PreCallRecordCmdSetDepthBias(VkCommandBuffer commandBuffer, float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor) {};
+        virtual void PostCallRecordCmdSetDepthBias(VkCommandBuffer commandBuffer, float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor) {};
+        virtual bool PreCallValidateCmdSetBlendConstants(VkCommandBuffer commandBuffer, const float blendConstants[4]) const { return false; };
+        virtual void PreCallRecordCmdSetBlendConstants(VkCommandBuffer commandBuffer, const float blendConstants[4]) {};
+        virtual void PostCallRecordCmdSetBlendConstants(VkCommandBuffer commandBuffer, const float blendConstants[4]) {};
+        virtual bool PreCallValidateCmdSetDepthBounds(VkCommandBuffer commandBuffer, float minDepthBounds, float maxDepthBounds) const { return false; };
+        virtual void PreCallRecordCmdSetDepthBounds(VkCommandBuffer commandBuffer, float minDepthBounds, float maxDepthBounds) {};
+        virtual void PostCallRecordCmdSetDepthBounds(VkCommandBuffer commandBuffer, float minDepthBounds, float maxDepthBounds) {};
+        virtual bool PreCallValidateCmdSetStencilCompareMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t compareMask) const { return false; };
+        virtual void PreCallRecordCmdSetStencilCompareMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t compareMask) {};
+        virtual void PostCallRecordCmdSetStencilCompareMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t compareMask) {};
+        virtual bool PreCallValidateCmdSetStencilWriteMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t writeMask) const { return false; };
+        virtual void PreCallRecordCmdSetStencilWriteMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t writeMask) {};
+        virtual void PostCallRecordCmdSetStencilWriteMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t writeMask) {};
+        virtual bool PreCallValidateCmdSetStencilReference(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t reference) const { return false; };
+        virtual void PreCallRecordCmdSetStencilReference(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t reference) {};
+        virtual void PostCallRecordCmdSetStencilReference(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t reference) {};
+        virtual bool PreCallValidateCmdBindDescriptorSets(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t firstSet, uint32_t descriptorSetCount, const VkDescriptorSet* pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t* pDynamicOffsets) const { return false; };
+        virtual void PreCallRecordCmdBindDescriptorSets(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t firstSet, uint32_t descriptorSetCount, const VkDescriptorSet* pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t* pDynamicOffsets) {};
+        virtual void PostCallRecordCmdBindDescriptorSets(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t firstSet, uint32_t descriptorSetCount, const VkDescriptorSet* pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t* pDynamicOffsets) {};
+        virtual bool PreCallValidateCmdBindIndexBuffer(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkIndexType indexType) const { return false; };
+        virtual void PreCallRecordCmdBindIndexBuffer(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkIndexType indexType) {};
+        virtual void PostCallRecordCmdBindIndexBuffer(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkIndexType indexType) {};
+        virtual bool PreCallValidateCmdBindVertexBuffers(VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets) const { return false; };
+        virtual void PreCallRecordCmdBindVertexBuffers(VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets) {};
+        virtual void PostCallRecordCmdBindVertexBuffers(VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets) {};
+        virtual bool PreCallValidateCmdDraw(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance) const { return false; };
+        virtual void PreCallRecordCmdDraw(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance) {};
+        virtual void PostCallRecordCmdDraw(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance) {};
+        virtual bool PreCallValidateCmdDrawIndexed(VkCommandBuffer commandBuffer, uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance) const { return false; };
+        virtual void PreCallRecordCmdDrawIndexed(VkCommandBuffer commandBuffer, uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance) {};
+        virtual void PostCallRecordCmdDrawIndexed(VkCommandBuffer commandBuffer, uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance) {};
+        virtual bool PreCallValidateCmdDrawIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride) const { return false; };
+        virtual void PreCallRecordCmdDrawIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride) {};
+        virtual void PostCallRecordCmdDrawIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride) {};
+        virtual bool PreCallValidateCmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride) const { return false; };
+        virtual void PreCallRecordCmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride) {};
+        virtual void PostCallRecordCmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride) {};
+        virtual bool PreCallValidateCmdDispatch(VkCommandBuffer commandBuffer, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ) const { return false; };
+        virtual void PreCallRecordCmdDispatch(VkCommandBuffer commandBuffer, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ) {};
+        virtual void PostCallRecordCmdDispatch(VkCommandBuffer commandBuffer, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ) {};
+        virtual bool PreCallValidateCmdDispatchIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset) const { return false; };
+        virtual void PreCallRecordCmdDispatchIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset) {};
+        virtual void PostCallRecordCmdDispatchIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset) {};
+        virtual bool PreCallValidateCmdCopyBuffer(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer, uint32_t regionCount, const VkBufferCopy* pRegions) const { return false; };
+        virtual void PreCallRecordCmdCopyBuffer(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer, uint32_t regionCount, const VkBufferCopy* pRegions) {};
+        virtual void PostCallRecordCmdCopyBuffer(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer, uint32_t regionCount, const VkBufferCopy* pRegions) {};
+        virtual bool PreCallValidateCmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageCopy* pRegions) const { return false; };
+        virtual void PreCallRecordCmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageCopy* pRegions) {};
+        virtual void PostCallRecordCmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageCopy* pRegions) {};
+        virtual bool PreCallValidateCmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageBlit* pRegions, VkFilter filter) const { return false; };
+        virtual void PreCallRecordCmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageBlit* pRegions, VkFilter filter) {};
+        virtual void PostCallRecordCmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageBlit* pRegions, VkFilter filter) {};
+        virtual bool PreCallValidateCmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkBufferImageCopy* pRegions) const { return false; };
+        virtual void PreCallRecordCmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkBufferImageCopy* pRegions) {};
+        virtual void PostCallRecordCmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkBufferImageCopy* pRegions) {};
+        virtual bool PreCallValidateCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkBuffer dstBuffer, uint32_t regionCount, const VkBufferImageCopy* pRegions) const { return false; };
+        virtual void PreCallRecordCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkBuffer dstBuffer, uint32_t regionCount, const VkBufferImageCopy* pRegions) {};
+        virtual void PostCallRecordCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkBuffer dstBuffer, uint32_t regionCount, const VkBufferImageCopy* pRegions) {};
+        virtual bool PreCallValidateCmdUpdateBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize dataSize, const void* pData) const { return false; };
+        virtual void PreCallRecordCmdUpdateBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize dataSize, const void* pData) {};
+        virtual void PostCallRecordCmdUpdateBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize dataSize, const void* pData) {};
+        virtual bool PreCallValidateCmdFillBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize size, uint32_t data) const { return false; };
+        virtual void PreCallRecordCmdFillBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize size, uint32_t data) {};
+        virtual void PostCallRecordCmdFillBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize size, uint32_t data) {};
+        virtual bool PreCallValidateCmdClearColorImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, const VkClearColorValue* pColor, uint32_t rangeCount, const VkImageSubresourceRange* pRanges) const { return false; };
+        virtual void PreCallRecordCmdClearColorImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, const VkClearColorValue* pColor, uint32_t rangeCount, const VkImageSubresourceRange* pRanges) {};
+        virtual void PostCallRecordCmdClearColorImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, const VkClearColorValue* pColor, uint32_t rangeCount, const VkImageSubresourceRange* pRanges) {};
+        virtual bool PreCallValidateCmdClearDepthStencilImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, const VkClearDepthStencilValue* pDepthStencil, uint32_t rangeCount, const VkImageSubresourceRange* pRanges) const { return false; };
+        virtual void PreCallRecordCmdClearDepthStencilImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, const VkClearDepthStencilValue* pDepthStencil, uint32_t rangeCount, const VkImageSubresourceRange* pRanges) {};
+        virtual void PostCallRecordCmdClearDepthStencilImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, const VkClearDepthStencilValue* pDepthStencil, uint32_t rangeCount, const VkImageSubresourceRange* pRanges) {};
+        virtual bool PreCallValidateCmdClearAttachments(VkCommandBuffer commandBuffer, uint32_t attachmentCount, const VkClearAttachment* pAttachments, uint32_t rectCount, const VkClearRect* pRects) const { return false; };
+        virtual void PreCallRecordCmdClearAttachments(VkCommandBuffer commandBuffer, uint32_t attachmentCount, const VkClearAttachment* pAttachments, uint32_t rectCount, const VkClearRect* pRects) {};
+        virtual void PostCallRecordCmdClearAttachments(VkCommandBuffer commandBuffer, uint32_t attachmentCount, const VkClearAttachment* pAttachments, uint32_t rectCount, const VkClearRect* pRects) {};
+        virtual bool PreCallValidateCmdResolveImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageResolve* pRegions) const { return false; };
+        virtual void PreCallRecordCmdResolveImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageResolve* pRegions) {};
+        virtual void PostCallRecordCmdResolveImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageResolve* pRegions) {};
+        virtual bool PreCallValidateCmdSetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask) const { return false; };
+        virtual void PreCallRecordCmdSetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask) {};
+        virtual void PostCallRecordCmdSetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask) {};
+        virtual bool PreCallValidateCmdResetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask) const { return false; };
+        virtual void PreCallRecordCmdResetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask) {};
+        virtual void PostCallRecordCmdResetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask) {};
+        virtual bool PreCallValidateCmdWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent* pEvents, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers) const { return false; };
+        virtual void PreCallRecordCmdWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent* pEvents, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers) {};
+        virtual void PostCallRecordCmdWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent* pEvents, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers) {};
+        virtual bool PreCallValidateCmdPipelineBarrier(VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers) const { return false; };
+        virtual void PreCallRecordCmdPipelineBarrier(VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers) {};
+        virtual void PostCallRecordCmdPipelineBarrier(VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers) {};
+        virtual bool PreCallValidateCmdBeginQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, VkQueryControlFlags flags) const { return false; };
+        virtual void PreCallRecordCmdBeginQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, VkQueryControlFlags flags) {};
+        virtual void PostCallRecordCmdBeginQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, VkQueryControlFlags flags) {};
+        virtual bool PreCallValidateCmdEndQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query) const { return false; };
+        virtual void PreCallRecordCmdEndQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query) {};
+        virtual void PostCallRecordCmdEndQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query) {};
+        virtual bool PreCallValidateCmdResetQueryPool(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount) const { return false; };
+        virtual void PreCallRecordCmdResetQueryPool(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount) {};
+        virtual void PostCallRecordCmdResetQueryPool(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount) {};
+        virtual bool PreCallValidateCmdWriteTimestamp(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage, VkQueryPool queryPool, uint32_t query) const { return false; };
+        virtual void PreCallRecordCmdWriteTimestamp(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage, VkQueryPool queryPool, uint32_t query) {};
+        virtual void PostCallRecordCmdWriteTimestamp(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage, VkQueryPool queryPool, uint32_t query) {};
+        virtual bool PreCallValidateCmdCopyQueryPoolResults(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize stride, VkQueryResultFlags flags) const { return false; };
+        virtual void PreCallRecordCmdCopyQueryPoolResults(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize stride, VkQueryResultFlags flags) {};
+        virtual void PostCallRecordCmdCopyQueryPoolResults(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize stride, VkQueryResultFlags flags) {};
+        virtual bool PreCallValidateCmdPushConstants(VkCommandBuffer commandBuffer, VkPipelineLayout layout, VkShaderStageFlags stageFlags, uint32_t offset, uint32_t size, const void* pValues) const { return false; };
+        virtual void PreCallRecordCmdPushConstants(VkCommandBuffer commandBuffer, VkPipelineLayout layout, VkShaderStageFlags stageFlags, uint32_t offset, uint32_t size, const void* pValues) {};
+        virtual void PostCallRecordCmdPushConstants(VkCommandBuffer commandBuffer, VkPipelineLayout layout, VkShaderStageFlags stageFlags, uint32_t offset, uint32_t size, const void* pValues) {};
+        virtual bool PreCallValidateCmdBeginRenderPass(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo* pRenderPassBegin, VkSubpassContents contents) const { return false; };
+        virtual void PreCallRecordCmdBeginRenderPass(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo* pRenderPassBegin, VkSubpassContents contents) {};
+        virtual void PostCallRecordCmdBeginRenderPass(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo* pRenderPassBegin, VkSubpassContents contents) {};
+        virtual bool PreCallValidateCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) const { return false; };
+        virtual void PreCallRecordCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) {};
+        virtual void PostCallRecordCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) {};
+        virtual bool PreCallValidateCmdEndRenderPass(VkCommandBuffer commandBuffer) const { return false; };
+        virtual void PreCallRecordCmdEndRenderPass(VkCommandBuffer commandBuffer) {};
+        virtual void PostCallRecordCmdEndRenderPass(VkCommandBuffer commandBuffer) {};
+        virtual bool PreCallValidateCmdExecuteCommands(VkCommandBuffer commandBuffer, uint32_t commandBufferCount, const VkCommandBuffer* pCommandBuffers) const { return false; };
+        virtual void PreCallRecordCmdExecuteCommands(VkCommandBuffer commandBuffer, uint32_t commandBufferCount, const VkCommandBuffer* pCommandBuffers) {};
+        virtual void PostCallRecordCmdExecuteCommands(VkCommandBuffer commandBuffer, uint32_t commandBufferCount, const VkCommandBuffer* pCommandBuffers) {};
+        virtual bool PreCallValidateBindBufferMemory2(VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo* pBindInfos) const { return false; };
+        virtual void PreCallRecordBindBufferMemory2(VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo* pBindInfos) {};
+        virtual void PostCallRecordBindBufferMemory2(VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo* pBindInfos, VkResult result) {};
+        virtual bool PreCallValidateBindImageMemory2(VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo* pBindInfos) const { return false; };
+        virtual void PreCallRecordBindImageMemory2(VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo* pBindInfos) {};
+        virtual void PostCallRecordBindImageMemory2(VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo* pBindInfos, VkResult result) {};
+        virtual bool PreCallValidateGetDeviceGroupPeerMemoryFeatures(VkDevice device, uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VkPeerMemoryFeatureFlags* pPeerMemoryFeatures) const { return false; };
+        virtual void PreCallRecordGetDeviceGroupPeerMemoryFeatures(VkDevice device, uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VkPeerMemoryFeatureFlags* pPeerMemoryFeatures) {};
+        virtual void PostCallRecordGetDeviceGroupPeerMemoryFeatures(VkDevice device, uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VkPeerMemoryFeatureFlags* pPeerMemoryFeatures) {};
+        virtual bool PreCallValidateCmdSetDeviceMask(VkCommandBuffer commandBuffer, uint32_t deviceMask) const { return false; };
+        virtual void PreCallRecordCmdSetDeviceMask(VkCommandBuffer commandBuffer, uint32_t deviceMask) {};
+        virtual void PostCallRecordCmdSetDeviceMask(VkCommandBuffer commandBuffer, uint32_t deviceMask) {};
+        virtual bool PreCallValidateCmdDispatchBase(VkCommandBuffer commandBuffer, uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ) const { return false; };
+        virtual void PreCallRecordCmdDispatchBase(VkCommandBuffer commandBuffer, uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ) {};
+        virtual void PostCallRecordCmdDispatchBase(VkCommandBuffer commandBuffer, uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ) {};
+        virtual bool PreCallValidateEnumeratePhysicalDeviceGroups(VkInstance instance, uint32_t* pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties) const { return false; };
+        virtual void PreCallRecordEnumeratePhysicalDeviceGroups(VkInstance instance, uint32_t* pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties) {};
+        virtual void PostCallRecordEnumeratePhysicalDeviceGroups(VkInstance instance, uint32_t* pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties, VkResult result) {};
+        virtual bool PreCallValidateGetImageMemoryRequirements2(VkDevice device, const VkImageMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements) const { return false; };
+        virtual void PreCallRecordGetImageMemoryRequirements2(VkDevice device, const VkImageMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements) {};
+        virtual void PostCallRecordGetImageMemoryRequirements2(VkDevice device, const VkImageMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements) {};
+        virtual bool PreCallValidateGetBufferMemoryRequirements2(VkDevice device, const VkBufferMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements) const { return false; };
+        virtual void PreCallRecordGetBufferMemoryRequirements2(VkDevice device, const VkBufferMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements) {};
+        virtual void PostCallRecordGetBufferMemoryRequirements2(VkDevice device, const VkBufferMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements) {};
+        virtual bool PreCallValidateGetImageSparseMemoryRequirements2(VkDevice device, const VkImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2* pSparseMemoryRequirements) const { return false; };
+        virtual void PreCallRecordGetImageSparseMemoryRequirements2(VkDevice device, const VkImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2* pSparseMemoryRequirements) {};
+        virtual void PostCallRecordGetImageSparseMemoryRequirements2(VkDevice device, const VkImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2* pSparseMemoryRequirements) {};
+        virtual bool PreCallValidateGetPhysicalDeviceFeatures2(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2* pFeatures) const { return false; };
+        virtual void PreCallRecordGetPhysicalDeviceFeatures2(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2* pFeatures) {};
+        virtual void PostCallRecordGetPhysicalDeviceFeatures2(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2* pFeatures) {};
+        virtual bool PreCallValidateGetPhysicalDeviceProperties2(VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties2* pProperties) const { return false; };
+        virtual void PreCallRecordGetPhysicalDeviceProperties2(VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties2* pProperties) {};
+        virtual void PostCallRecordGetPhysicalDeviceProperties2(VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties2* pProperties) {};
+        virtual bool PreCallValidateGetPhysicalDeviceFormatProperties2(VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties2* pFormatProperties) const { return false; };
+        virtual void PreCallRecordGetPhysicalDeviceFormatProperties2(VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties2* pFormatProperties) {};
+        virtual void PostCallRecordGetPhysicalDeviceFormatProperties2(VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties2* pFormatProperties) {};
+        virtual bool PreCallValidateGetPhysicalDeviceImageFormatProperties2(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo, VkImageFormatProperties2* pImageFormatProperties) const { return false; };
+        virtual void PreCallRecordGetPhysicalDeviceImageFormatProperties2(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo, VkImageFormatProperties2* pImageFormatProperties) {};
+        virtual void PostCallRecordGetPhysicalDeviceImageFormatProperties2(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo, VkImageFormatProperties2* pImageFormatProperties, VkResult result) {};
+        virtual bool PreCallValidateGetPhysicalDeviceQueueFamilyProperties2(VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties2* pQueueFamilyProperties) const { return false; };
+        virtual void PreCallRecordGetPhysicalDeviceQueueFamilyProperties2(VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties2* pQueueFamilyProperties) {};
+        virtual void PostCallRecordGetPhysicalDeviceQueueFamilyProperties2(VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties2* pQueueFamilyProperties) {};
+        virtual bool PreCallValidateGetPhysicalDeviceMemoryProperties2(VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties2* pMemoryProperties) const { return false; };
+        virtual void PreCallRecordGetPhysicalDeviceMemoryProperties2(VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties2* pMemoryProperties) {};
+        virtual void PostCallRecordGetPhysicalDeviceMemoryProperties2(VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties2* pMemoryProperties) {};
+        virtual bool PreCallValidateGetPhysicalDeviceSparseImageFormatProperties2(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, VkSparseImageFormatProperties2* pProperties) const { return false; };
+        virtual void PreCallRecordGetPhysicalDeviceSparseImageFormatProperties2(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, VkSparseImageFormatProperties2* pProperties) {};
+        virtual void PostCallRecordGetPhysicalDeviceSparseImageFormatProperties2(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, VkSparseImageFormatProperties2* pProperties) {};
+        virtual bool PreCallValidateTrimCommandPool(VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlags flags) const { return false; };
+        virtual void PreCallRecordTrimCommandPool(VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlags flags) {};
+        virtual void PostCallRecordTrimCommandPool(VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlags flags) {};
+        virtual bool PreCallValidateGetDeviceQueue2(VkDevice device, const VkDeviceQueueInfo2* pQueueInfo, VkQueue* pQueue) const { return false; };
+        virtual void PreCallRecordGetDeviceQueue2(VkDevice device, const VkDeviceQueueInfo2* pQueueInfo, VkQueue* pQueue) {};
+        virtual void PostCallRecordGetDeviceQueue2(VkDevice device, const VkDeviceQueueInfo2* pQueueInfo, VkQueue* pQueue) {};
+        virtual bool PreCallValidateCreateSamplerYcbcrConversion(VkDevice device, const VkSamplerYcbcrConversionCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSamplerYcbcrConversion* pYcbcrConversion) const { return false; };
+        virtual void PreCallRecordCreateSamplerYcbcrConversion(VkDevice device, const VkSamplerYcbcrConversionCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSamplerYcbcrConversion* pYcbcrConversion) {};
+        virtual void PostCallRecordCreateSamplerYcbcrConversion(VkDevice device, const VkSamplerYcbcrConversionCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSamplerYcbcrConversion* pYcbcrConversion, VkResult result) {};
+        virtual bool PreCallValidateDestroySamplerYcbcrConversion(VkDevice device, VkSamplerYcbcrConversion ycbcrConversion, const VkAllocationCallbacks* pAllocator) const { return false; };
+        virtual void PreCallRecordDestroySamplerYcbcrConversion(VkDevice device, VkSamplerYcbcrConversion ycbcrConversion, const VkAllocationCallbacks* pAllocator) {};
+        virtual void PostCallRecordDestroySamplerYcbcrConversion(VkDevice device, VkSamplerYcbcrConversion ycbcrConversion, const VkAllocationCallbacks* pAllocator) {};
+        virtual bool PreCallValidateCreateDescriptorUpdateTemplate(VkDevice device, const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate) const { return false; };
+        virtual void PreCallRecordCreateDescriptorUpdateTemplate(VkDevice device, const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate) {};
+        virtual void PostCallRecordCreateDescriptorUpdateTemplate(VkDevice device, const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate, VkResult result) {};
+        virtual bool PreCallValidateDestroyDescriptorUpdateTemplate(VkDevice device, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const VkAllocationCallbacks* pAllocator) const { return false; };
+        virtual void PreCallRecordDestroyDescriptorUpdateTemplate(VkDevice device, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const VkAllocationCallbacks* pAllocator) {};
+        virtual void PostCallRecordDestroyDescriptorUpdateTemplate(VkDevice device, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const VkAllocationCallbacks* pAllocator) {};
+        virtual bool PreCallValidateUpdateDescriptorSetWithTemplate(VkDevice device, VkDescriptorSet descriptorSet, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData) const { return false; };
+        virtual void PreCallRecordUpdateDescriptorSetWithTemplate(VkDevice device, VkDescriptorSet descriptorSet, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData) {};
+        virtual void PostCallRecordUpdateDescriptorSetWithTemplate(VkDevice device, VkDescriptorSet descriptorSet, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData) {};
+        virtual bool PreCallValidateGetPhysicalDeviceExternalBufferProperties(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo, VkExternalBufferProperties* pExternalBufferProperties) const { return false; };
+        virtual void PreCallRecordGetPhysicalDeviceExternalBufferProperties(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo, VkExternalBufferProperties* pExternalBufferProperties) {};
+        virtual void PostCallRecordGetPhysicalDeviceExternalBufferProperties(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo, VkExternalBufferProperties* pExternalBufferProperties) {};
+        virtual bool PreCallValidateGetPhysicalDeviceExternalFenceProperties(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo, VkExternalFenceProperties* pExternalFenceProperties) const { return false; };
+        virtual void PreCallRecordGetPhysicalDeviceExternalFenceProperties(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo, VkExternalFenceProperties* pExternalFenceProperties) {};
+        virtual void PostCallRecordGetPhysicalDeviceExternalFenceProperties(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo, VkExternalFenceProperties* pExternalFenceProperties) {};
+        virtual bool PreCallValidateGetPhysicalDeviceExternalSemaphoreProperties(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, VkExternalSemaphoreProperties* pExternalSemaphoreProperties) const { return false; };
+        virtual void PreCallRecordGetPhysicalDeviceExternalSemaphoreProperties(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, VkExternalSemaphoreProperties* pExternalSemaphoreProperties) {};
+        virtual void PostCallRecordGetPhysicalDeviceExternalSemaphoreProperties(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, VkExternalSemaphoreProperties* pExternalSemaphoreProperties) {};
+        virtual bool PreCallValidateGetDescriptorSetLayoutSupport(VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, VkDescriptorSetLayoutSupport* pSupport) const { return false; };
+        virtual void PreCallRecordGetDescriptorSetLayoutSupport(VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, VkDescriptorSetLayoutSupport* pSupport) {};
+        virtual void PostCallRecordGetDescriptorSetLayoutSupport(VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, VkDescriptorSetLayoutSupport* pSupport) {};
+        virtual bool PreCallValidateDestroySurfaceKHR(VkInstance instance, VkSurfaceKHR surface, const VkAllocationCallbacks* pAllocator) const { return false; };
+        virtual void PreCallRecordDestroySurfaceKHR(VkInstance instance, VkSurfaceKHR surface, const VkAllocationCallbacks* pAllocator) {};
+        virtual void PostCallRecordDestroySurfaceKHR(VkInstance instance, VkSurfaceKHR surface, const VkAllocationCallbacks* pAllocator) {};
+        virtual bool PreCallValidateGetPhysicalDeviceSurfaceSupportKHR(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, VkSurfaceKHR surface, VkBool32* pSupported) const { return false; };
+        virtual void PreCallRecordGetPhysicalDeviceSurfaceSupportKHR(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, VkSurfaceKHR surface, VkBool32* pSupported) {};
+        virtual void PostCallRecordGetPhysicalDeviceSurfaceSupportKHR(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, VkSurfaceKHR surface, VkBool32* pSupported, VkResult result) {};
+        virtual bool PreCallValidateGetPhysicalDeviceSurfaceCapabilitiesKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, VkSurfaceCapabilitiesKHR* pSurfaceCapabilities) const { return false; };
+        virtual void PreCallRecordGetPhysicalDeviceSurfaceCapabilitiesKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, VkSurfaceCapabilitiesKHR* pSurfaceCapabilities) {};
+        virtual void PostCallRecordGetPhysicalDeviceSurfaceCapabilitiesKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, VkSurfaceCapabilitiesKHR* pSurfaceCapabilities, VkResult result) {};
+        virtual bool PreCallValidateGetPhysicalDeviceSurfaceFormatsKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pSurfaceFormatCount, VkSurfaceFormatKHR* pSurfaceFormats) const { return false; };
+        virtual void PreCallRecordGetPhysicalDeviceSurfaceFormatsKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pSurfaceFormatCount, VkSurfaceFormatKHR* pSurfaceFormats) {};
+        virtual void PostCallRecordGetPhysicalDeviceSurfaceFormatsKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pSurfaceFormatCount, VkSurfaceFormatKHR* pSurfaceFormats, VkResult result) {};
+        virtual bool PreCallValidateGetPhysicalDeviceSurfacePresentModesKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pPresentModeCount, VkPresentModeKHR* pPresentModes) const { return false; };
+        virtual void PreCallRecordGetPhysicalDeviceSurfacePresentModesKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pPresentModeCount, VkPresentModeKHR* pPresentModes) {};
+        virtual void PostCallRecordGetPhysicalDeviceSurfacePresentModesKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pPresentModeCount, VkPresentModeKHR* pPresentModes, VkResult result) {};
+        virtual bool PreCallValidateCreateSwapchainKHR(VkDevice device, const VkSwapchainCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSwapchainKHR* pSwapchain) const { return false; };
+        virtual void PreCallRecordCreateSwapchainKHR(VkDevice device, const VkSwapchainCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSwapchainKHR* pSwapchain) {};
+        virtual void PostCallRecordCreateSwapchainKHR(VkDevice device, const VkSwapchainCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSwapchainKHR* pSwapchain, VkResult result) {};
+        virtual bool PreCallValidateDestroySwapchainKHR(VkDevice device, VkSwapchainKHR swapchain, const VkAllocationCallbacks* pAllocator) const { return false; };
+        virtual void PreCallRecordDestroySwapchainKHR(VkDevice device, VkSwapchainKHR swapchain, const VkAllocationCallbacks* pAllocator) {};
+        virtual void PostCallRecordDestroySwapchainKHR(VkDevice device, VkSwapchainKHR swapchain, const VkAllocationCallbacks* pAllocator) {};
+        virtual bool PreCallValidateGetSwapchainImagesKHR(VkDevice device, VkSwapchainKHR swapchain, uint32_t* pSwapchainImageCount, VkImage* pSwapchainImages) const { return false; };
+        virtual void PreCallRecordGetSwapchainImagesKHR(VkDevice device, VkSwapchainKHR swapchain, uint32_t* pSwapchainImageCount, VkImage* pSwapchainImages) {};
+        virtual void PostCallRecordGetSwapchainImagesKHR(VkDevice device, VkSwapchainKHR swapchain, uint32_t* pSwapchainImageCount, VkImage* pSwapchainImages, VkResult result) {};
+        virtual bool PreCallValidateAcquireNextImageKHR(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout, VkSemaphore semaphore, VkFence fence, uint32_t* pImageIndex) const { return false; };
+        virtual void PreCallRecordAcquireNextImageKHR(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout, VkSemaphore semaphore, VkFence fence, uint32_t* pImageIndex) {};
+        virtual void PostCallRecordAcquireNextImageKHR(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout, VkSemaphore semaphore, VkFence fence, uint32_t* pImageIndex, VkResult result) {};
+        virtual bool PreCallValidateQueuePresentKHR(VkQueue queue, const VkPresentInfoKHR* pPresentInfo) const { return false; };
+        virtual void PreCallRecordQueuePresentKHR(VkQueue queue, const VkPresentInfoKHR* pPresentInfo) {};
+        virtual void PostCallRecordQueuePresentKHR(VkQueue queue, const VkPresentInfoKHR* pPresentInfo, VkResult result) {};
+        virtual bool PreCallValidateGetDeviceGroupPresentCapabilitiesKHR(VkDevice device, VkDeviceGroupPresentCapabilitiesKHR* pDeviceGroupPresentCapabilities) const { return false; };
+        virtual void PreCallRecordGetDeviceGroupPresentCapabilitiesKHR(VkDevice device, VkDeviceGroupPresentCapabilitiesKHR* pDeviceGroupPresentCapabilities) {};
+        virtual void PostCallRecordGetDeviceGroupPresentCapabilitiesKHR(VkDevice device, VkDeviceGroupPresentCapabilitiesKHR* pDeviceGroupPresentCapabilities, VkResult result) {};
+        virtual bool PreCallValidateGetDeviceGroupSurfacePresentModesKHR(VkDevice device, VkSurfaceKHR surface, VkDeviceGroupPresentModeFlagsKHR* pModes) const { return false; };
+        virtual void PreCallRecordGetDeviceGroupSurfacePresentModesKHR(VkDevice device, VkSurfaceKHR surface, VkDeviceGroupPresentModeFlagsKHR* pModes) {};
+        virtual void PostCallRecordGetDeviceGroupSurfacePresentModesKHR(VkDevice device, VkSurfaceKHR surface, VkDeviceGroupPresentModeFlagsKHR* pModes, VkResult result) {};
+        virtual bool PreCallValidateGetPhysicalDevicePresentRectanglesKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pRectCount, VkRect2D* pRects) const { return false; };
+        virtual void PreCallRecordGetPhysicalDevicePresentRectanglesKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pRectCount, VkRect2D* pRects) {};
+        virtual void PostCallRecordGetPhysicalDevicePresentRectanglesKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pRectCount, VkRect2D* pRects, VkResult result) {};
+        virtual bool PreCallValidateAcquireNextImage2KHR(VkDevice device, const VkAcquireNextImageInfoKHR* pAcquireInfo, uint32_t* pImageIndex) const { return false; };
+        virtual void PreCallRecordAcquireNextImage2KHR(VkDevice device, const VkAcquireNextImageInfoKHR* pAcquireInfo, uint32_t* pImageIndex) {};
+        virtual void PostCallRecordAcquireNextImage2KHR(VkDevice device, const VkAcquireNextImageInfoKHR* pAcquireInfo, uint32_t* pImageIndex, VkResult result) {};
+        virtual bool PreCallValidateGetPhysicalDeviceDisplayPropertiesKHR(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPropertiesKHR* pProperties) const { return false; };
+        virtual void PreCallRecordGetPhysicalDeviceDisplayPropertiesKHR(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPropertiesKHR* pProperties) {};
+        virtual void PostCallRecordGetPhysicalDeviceDisplayPropertiesKHR(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPropertiesKHR* pProperties, VkResult result) {};
+        virtual bool PreCallValidateGetPhysicalDeviceDisplayPlanePropertiesKHR(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPlanePropertiesKHR* pProperties) const { return false; };
+        virtual void PreCallRecordGetPhysicalDeviceDisplayPlanePropertiesKHR(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPlanePropertiesKHR* pProperties) {};
+        virtual void PostCallRecordGetPhysicalDeviceDisplayPlanePropertiesKHR(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPlanePropertiesKHR* pProperties, VkResult result) {};
+        virtual bool PreCallValidateGetDisplayPlaneSupportedDisplaysKHR(VkPhysicalDevice physicalDevice, uint32_t planeIndex, uint32_t* pDisplayCount, VkDisplayKHR* pDisplays) const { return false; };
+        virtual void PreCallRecordGetDisplayPlaneSupportedDisplaysKHR(VkPhysicalDevice physicalDevice, uint32_t planeIndex, uint32_t* pDisplayCount, VkDisplayKHR* pDisplays) {};
+        virtual void PostCallRecordGetDisplayPlaneSupportedDisplaysKHR(VkPhysicalDevice physicalDevice, uint32_t planeIndex, uint32_t* pDisplayCount, VkDisplayKHR* pDisplays, VkResult result) {};
+        virtual bool PreCallValidateGetDisplayModePropertiesKHR(VkPhysicalDevice physicalDevice, VkDisplayKHR display, uint32_t* pPropertyCount, VkDisplayModePropertiesKHR* pProperties) const { return false; };
+        virtual void PreCallRecordGetDisplayModePropertiesKHR(VkPhysicalDevice physicalDevice, VkDisplayKHR display, uint32_t* pPropertyCount, VkDisplayModePropertiesKHR* pProperties) {};
+        virtual void PostCallRecordGetDisplayModePropertiesKHR(VkPhysicalDevice physicalDevice, VkDisplayKHR display, uint32_t* pPropertyCount, VkDisplayModePropertiesKHR* pProperties, VkResult result) {};
+        virtual bool PreCallValidateCreateDisplayModeKHR(VkPhysicalDevice physicalDevice, VkDisplayKHR display, const VkDisplayModeCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDisplayModeKHR* pMode) const { return false; };
+        virtual void PreCallRecordCreateDisplayModeKHR(VkPhysicalDevice physicalDevice, VkDisplayKHR display, const VkDisplayModeCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDisplayModeKHR* pMode) {};
+        virtual void PostCallRecordCreateDisplayModeKHR(VkPhysicalDevice physicalDevice, VkDisplayKHR display, const VkDisplayModeCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDisplayModeKHR* pMode, VkResult result) {};
+        virtual bool PreCallValidateGetDisplayPlaneCapabilitiesKHR(VkPhysicalDevice physicalDevice, VkDisplayModeKHR mode, uint32_t planeIndex, VkDisplayPlaneCapabilitiesKHR* pCapabilities) const { return false; };
+        virtual void PreCallRecordGetDisplayPlaneCapabilitiesKHR(VkPhysicalDevice physicalDevice, VkDisplayModeKHR mode, uint32_t planeIndex, VkDisplayPlaneCapabilitiesKHR* pCapabilities) {};
+        virtual void PostCallRecordGetDisplayPlaneCapabilitiesKHR(VkPhysicalDevice physicalDevice, VkDisplayModeKHR mode, uint32_t planeIndex, VkDisplayPlaneCapabilitiesKHR* pCapabilities, VkResult result) {};
+        virtual bool PreCallValidateCreateDisplayPlaneSurfaceKHR(VkInstance instance, const VkDisplaySurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface) const { return false; };
+        virtual void PreCallRecordCreateDisplayPlaneSurfaceKHR(VkInstance instance, const VkDisplaySurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface) {};
+        virtual void PostCallRecordCreateDisplayPlaneSurfaceKHR(VkInstance instance, const VkDisplaySurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface, VkResult result) {};
+        virtual bool PreCallValidateCreateSharedSwapchainsKHR(VkDevice device, uint32_t swapchainCount, const VkSwapchainCreateInfoKHR* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkSwapchainKHR* pSwapchains) const { return false; };
+        virtual void PreCallRecordCreateSharedSwapchainsKHR(VkDevice device, uint32_t swapchainCount, const VkSwapchainCreateInfoKHR* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkSwapchainKHR* pSwapchains) {};
+        virtual void PostCallRecordCreateSharedSwapchainsKHR(VkDevice device, uint32_t swapchainCount, const VkSwapchainCreateInfoKHR* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkSwapchainKHR* pSwapchains, VkResult result) {};
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+        virtual bool PreCallValidateCreateXlibSurfaceKHR(VkInstance instance, const VkXlibSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface) const { return false; };
+        virtual void PreCallRecordCreateXlibSurfaceKHR(VkInstance instance, const VkXlibSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface) {};
+        virtual void PostCallRecordCreateXlibSurfaceKHR(VkInstance instance, const VkXlibSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface, VkResult result) {};
+#endif
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+        virtual bool PreCallValidateGetPhysicalDeviceXlibPresentationSupportKHR(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, Display* dpy, VisualID visualID) const { return false; };
+        virtual void PreCallRecordGetPhysicalDeviceXlibPresentationSupportKHR(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, Display* dpy, VisualID visualID) {};
+        virtual void PostCallRecordGetPhysicalDeviceXlibPresentationSupportKHR(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, Display* dpy, VisualID visualID) {};
+#endif
+#ifdef VK_USE_PLATFORM_XCB_KHR
+        virtual bool PreCallValidateCreateXcbSurfaceKHR(VkInstance instance, const VkXcbSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface) const { return false; };
+        virtual void PreCallRecordCreateXcbSurfaceKHR(VkInstance instance, const VkXcbSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface) {};
+        virtual void PostCallRecordCreateXcbSurfaceKHR(VkInstance instance, const VkXcbSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface, VkResult result) {};
+#endif
+#ifdef VK_USE_PLATFORM_XCB_KHR
+        virtual bool PreCallValidateGetPhysicalDeviceXcbPresentationSupportKHR(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, xcb_connection_t* connection, xcb_visualid_t visual_id) const { return false; };
+        virtual void PreCallRecordGetPhysicalDeviceXcbPresentationSupportKHR(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, xcb_connection_t* connection, xcb_visualid_t visual_id) {};
+        virtual void PostCallRecordGetPhysicalDeviceXcbPresentationSupportKHR(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, xcb_connection_t* connection, xcb_visualid_t visual_id) {};
+#endif
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+        virtual bool PreCallValidateCreateWaylandSurfaceKHR(VkInstance instance, const VkWaylandSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface) const { return false; };
+        virtual void PreCallRecordCreateWaylandSurfaceKHR(VkInstance instance, const VkWaylandSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface) {};
+        virtual void PostCallRecordCreateWaylandSurfaceKHR(VkInstance instance, const VkWaylandSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface, VkResult result) {};
+#endif
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+        virtual bool PreCallValidateGetPhysicalDeviceWaylandPresentationSupportKHR(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, struct wl_display* display) const { return false; };
+        virtual void PreCallRecordGetPhysicalDeviceWaylandPresentationSupportKHR(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, struct wl_display* display) {};
+        virtual void PostCallRecordGetPhysicalDeviceWaylandPresentationSupportKHR(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, struct wl_display* display) {};
+#endif
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+        virtual bool PreCallValidateCreateAndroidSurfaceKHR(VkInstance instance, const VkAndroidSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface) const { return false; };
+        virtual void PreCallRecordCreateAndroidSurfaceKHR(VkInstance instance, const VkAndroidSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface) {};
+        virtual void PostCallRecordCreateAndroidSurfaceKHR(VkInstance instance, const VkAndroidSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface, VkResult result) {};
+#endif
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+        virtual bool PreCallValidateCreateWin32SurfaceKHR(VkInstance instance, const VkWin32SurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface) const { return false; };
+        virtual void PreCallRecordCreateWin32SurfaceKHR(VkInstance instance, const VkWin32SurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface) {};
+        virtual void PostCallRecordCreateWin32SurfaceKHR(VkInstance instance, const VkWin32SurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface, VkResult result) {};
+#endif
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+        virtual bool PreCallValidateGetPhysicalDeviceWin32PresentationSupportKHR(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex) const { return false; };
+        virtual void PreCallRecordGetPhysicalDeviceWin32PresentationSupportKHR(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex) {};
+        virtual void PostCallRecordGetPhysicalDeviceWin32PresentationSupportKHR(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex) {};
+#endif
+        virtual bool PreCallValidateGetPhysicalDeviceFeatures2KHR(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2* pFeatures) const { return false; };
+        virtual void PreCallRecordGetPhysicalDeviceFeatures2KHR(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2* pFeatures) {};
+        virtual void PostCallRecordGetPhysicalDeviceFeatures2KHR(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2* pFeatures) {};
+        virtual bool PreCallValidateGetPhysicalDeviceProperties2KHR(VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties2* pProperties) const { return false; };
+        virtual void PreCallRecordGetPhysicalDeviceProperties2KHR(VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties2* pProperties) {};
+        virtual void PostCallRecordGetPhysicalDeviceProperties2KHR(VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties2* pProperties) {};
+        virtual bool PreCallValidateGetPhysicalDeviceFormatProperties2KHR(VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties2* pFormatProperties) const { return false; };
+        virtual void PreCallRecordGetPhysicalDeviceFormatProperties2KHR(VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties2* pFormatProperties) {};
+        virtual void PostCallRecordGetPhysicalDeviceFormatProperties2KHR(VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties2* pFormatProperties) {};
+        virtual bool PreCallValidateGetPhysicalDeviceImageFormatProperties2KHR(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo, VkImageFormatProperties2* pImageFormatProperties) const { return false; };
+        virtual void PreCallRecordGetPhysicalDeviceImageFormatProperties2KHR(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo, VkImageFormatProperties2* pImageFormatProperties) {};
+        virtual void PostCallRecordGetPhysicalDeviceImageFormatProperties2KHR(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo, VkImageFormatProperties2* pImageFormatProperties, VkResult result) {};
+        virtual bool PreCallValidateGetPhysicalDeviceQueueFamilyProperties2KHR(VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties2* pQueueFamilyProperties) const { return false; };
+        virtual void PreCallRecordGetPhysicalDeviceQueueFamilyProperties2KHR(VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties2* pQueueFamilyProperties) {};
+        virtual void PostCallRecordGetPhysicalDeviceQueueFamilyProperties2KHR(VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties2* pQueueFamilyProperties) {};
+        virtual bool PreCallValidateGetPhysicalDeviceMemoryProperties2KHR(VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties2* pMemoryProperties) const { return false; };
+        virtual void PreCallRecordGetPhysicalDeviceMemoryProperties2KHR(VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties2* pMemoryProperties) {};
+        virtual void PostCallRecordGetPhysicalDeviceMemoryProperties2KHR(VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties2* pMemoryProperties) {};
+        virtual bool PreCallValidateGetPhysicalDeviceSparseImageFormatProperties2KHR(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, VkSparseImageFormatProperties2* pProperties) const { return false; };
+        virtual void PreCallRecordGetPhysicalDeviceSparseImageFormatProperties2KHR(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, VkSparseImageFormatProperties2* pProperties) {};
+        virtual void PostCallRecordGetPhysicalDeviceSparseImageFormatProperties2KHR(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, VkSparseImageFormatProperties2* pProperties) {};
+        virtual bool PreCallValidateGetDeviceGroupPeerMemoryFeaturesKHR(VkDevice device, uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VkPeerMemoryFeatureFlags* pPeerMemoryFeatures) const { return false; };
+        virtual void PreCallRecordGetDeviceGroupPeerMemoryFeaturesKHR(VkDevice device, uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VkPeerMemoryFeatureFlags* pPeerMemoryFeatures) {};
+        virtual void PostCallRecordGetDeviceGroupPeerMemoryFeaturesKHR(VkDevice device, uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VkPeerMemoryFeatureFlags* pPeerMemoryFeatures) {};
+        virtual bool PreCallValidateCmdSetDeviceMaskKHR(VkCommandBuffer commandBuffer, uint32_t deviceMask) const { return false; };
+        virtual void PreCallRecordCmdSetDeviceMaskKHR(VkCommandBuffer commandBuffer, uint32_t deviceMask) {};
+        virtual void PostCallRecordCmdSetDeviceMaskKHR(VkCommandBuffer commandBuffer, uint32_t deviceMask) {};
+        virtual bool PreCallValidateCmdDispatchBaseKHR(VkCommandBuffer commandBuffer, uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ) const { return false; };
+        virtual void PreCallRecordCmdDispatchBaseKHR(VkCommandBuffer commandBuffer, uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ) {};
+        virtual void PostCallRecordCmdDispatchBaseKHR(VkCommandBuffer commandBuffer, uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ) {};
+        virtual bool PreCallValidateTrimCommandPoolKHR(VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlags flags) const { return false; };
+        virtual void PreCallRecordTrimCommandPoolKHR(VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlags flags) {};
+        virtual void PostCallRecordTrimCommandPoolKHR(VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlags flags) {};
+        virtual bool PreCallValidateEnumeratePhysicalDeviceGroupsKHR(VkInstance instance, uint32_t* pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties) const { return false; };
+        virtual void PreCallRecordEnumeratePhysicalDeviceGroupsKHR(VkInstance instance, uint32_t* pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties) {};
+        virtual void PostCallRecordEnumeratePhysicalDeviceGroupsKHR(VkInstance instance, uint32_t* pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties, VkResult result) {};
+        virtual bool PreCallValidateGetPhysicalDeviceExternalBufferPropertiesKHR(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo, VkExternalBufferProperties* pExternalBufferProperties) const { return false; };
+        virtual void PreCallRecordGetPhysicalDeviceExternalBufferPropertiesKHR(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo, VkExternalBufferProperties* pExternalBufferProperties) {};
+        virtual void PostCallRecordGetPhysicalDeviceExternalBufferPropertiesKHR(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo, VkExternalBufferProperties* pExternalBufferProperties) {};
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+        virtual bool PreCallValidateGetMemoryWin32HandleKHR(VkDevice device, const VkMemoryGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle) const { return false; };
+        virtual void PreCallRecordGetMemoryWin32HandleKHR(VkDevice device, const VkMemoryGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle) {};
+        virtual void PostCallRecordGetMemoryWin32HandleKHR(VkDevice device, const VkMemoryGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle, VkResult result) {};
+#endif
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+        virtual bool PreCallValidateGetMemoryWin32HandlePropertiesKHR(VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, VkMemoryWin32HandlePropertiesKHR* pMemoryWin32HandleProperties) const { return false; };
+        virtual void PreCallRecordGetMemoryWin32HandlePropertiesKHR(VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, VkMemoryWin32HandlePropertiesKHR* pMemoryWin32HandleProperties) {};
+        virtual void PostCallRecordGetMemoryWin32HandlePropertiesKHR(VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, VkMemoryWin32HandlePropertiesKHR* pMemoryWin32HandleProperties, VkResult result) {};
+#endif
+        virtual bool PreCallValidateGetMemoryFdKHR(VkDevice device, const VkMemoryGetFdInfoKHR* pGetFdInfo, int* pFd) const { return false; };
+        virtual void PreCallRecordGetMemoryFdKHR(VkDevice device, const VkMemoryGetFdInfoKHR* pGetFdInfo, int* pFd) {};
+        virtual void PostCallRecordGetMemoryFdKHR(VkDevice device, const VkMemoryGetFdInfoKHR* pGetFdInfo, int* pFd, VkResult result) {};
+        virtual bool PreCallValidateGetMemoryFdPropertiesKHR(VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, int fd, VkMemoryFdPropertiesKHR* pMemoryFdProperties) const { return false; };
+        virtual void PreCallRecordGetMemoryFdPropertiesKHR(VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, int fd, VkMemoryFdPropertiesKHR* pMemoryFdProperties) {};
+        virtual void PostCallRecordGetMemoryFdPropertiesKHR(VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, int fd, VkMemoryFdPropertiesKHR* pMemoryFdProperties, VkResult result) {};
+        virtual bool PreCallValidateGetPhysicalDeviceExternalSemaphorePropertiesKHR(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, VkExternalSemaphoreProperties* pExternalSemaphoreProperties) const { return false; };
+        virtual void PreCallRecordGetPhysicalDeviceExternalSemaphorePropertiesKHR(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, VkExternalSemaphoreProperties* pExternalSemaphoreProperties) {};
+        virtual void PostCallRecordGetPhysicalDeviceExternalSemaphorePropertiesKHR(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, VkExternalSemaphoreProperties* pExternalSemaphoreProperties) {};
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+        virtual bool PreCallValidateImportSemaphoreWin32HandleKHR(VkDevice device, const VkImportSemaphoreWin32HandleInfoKHR* pImportSemaphoreWin32HandleInfo) const { return false; };
+        virtual void PreCallRecordImportSemaphoreWin32HandleKHR(VkDevice device, const VkImportSemaphoreWin32HandleInfoKHR* pImportSemaphoreWin32HandleInfo) {};
+        virtual void PostCallRecordImportSemaphoreWin32HandleKHR(VkDevice device, const VkImportSemaphoreWin32HandleInfoKHR* pImportSemaphoreWin32HandleInfo, VkResult result) {};
+#endif
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+        virtual bool PreCallValidateGetSemaphoreWin32HandleKHR(VkDevice device, const VkSemaphoreGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle) const { return false; };
+        virtual void PreCallRecordGetSemaphoreWin32HandleKHR(VkDevice device, const VkSemaphoreGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle) {};
+        virtual void PostCallRecordGetSemaphoreWin32HandleKHR(VkDevice device, const VkSemaphoreGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle, VkResult result) {};
+#endif
+        virtual bool PreCallValidateImportSemaphoreFdKHR(VkDevice device, const VkImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo) const { return false; };
+        virtual void PreCallRecordImportSemaphoreFdKHR(VkDevice device, const VkImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo) {};
+        virtual void PostCallRecordImportSemaphoreFdKHR(VkDevice device, const VkImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo, VkResult result) {};
+        virtual bool PreCallValidateGetSemaphoreFdKHR(VkDevice device, const VkSemaphoreGetFdInfoKHR* pGetFdInfo, int* pFd) const { return false; };
+        virtual void PreCallRecordGetSemaphoreFdKHR(VkDevice device, const VkSemaphoreGetFdInfoKHR* pGetFdInfo, int* pFd) {};
+        virtual void PostCallRecordGetSemaphoreFdKHR(VkDevice device, const VkSemaphoreGetFdInfoKHR* pGetFdInfo, int* pFd, VkResult result) {};
+        virtual bool PreCallValidateCmdPushDescriptorSetKHR(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount, const VkWriteDescriptorSet* pDescriptorWrites) const { return false; };
+        virtual void PreCallRecordCmdPushDescriptorSetKHR(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount, const VkWriteDescriptorSet* pDescriptorWrites) {};
+        virtual void PostCallRecordCmdPushDescriptorSetKHR(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount, const VkWriteDescriptorSet* pDescriptorWrites) {};
+        virtual bool PreCallValidateCmdPushDescriptorSetWithTemplateKHR(VkCommandBuffer commandBuffer, VkDescriptorUpdateTemplate descriptorUpdateTemplate, VkPipelineLayout layout, uint32_t set, const void* pData) const { return false; };
+        virtual void PreCallRecordCmdPushDescriptorSetWithTemplateKHR(VkCommandBuffer commandBuffer, VkDescriptorUpdateTemplate descriptorUpdateTemplate, VkPipelineLayout layout, uint32_t set, const void* pData) {};
+        virtual void PostCallRecordCmdPushDescriptorSetWithTemplateKHR(VkCommandBuffer commandBuffer, VkDescriptorUpdateTemplate descriptorUpdateTemplate, VkPipelineLayout layout, uint32_t set, const void* pData) {};
+        virtual bool PreCallValidateCreateDescriptorUpdateTemplateKHR(VkDevice device, const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate) const { return false; };
+        virtual void PreCallRecordCreateDescriptorUpdateTemplateKHR(VkDevice device, const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate) {};
+        virtual void PostCallRecordCreateDescriptorUpdateTemplateKHR(VkDevice device, const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate, VkResult result) {};
+        virtual bool PreCallValidateDestroyDescriptorUpdateTemplateKHR(VkDevice device, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const VkAllocationCallbacks* pAllocator) const { return false; };
+        virtual void PreCallRecordDestroyDescriptorUpdateTemplateKHR(VkDevice device, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const VkAllocationCallbacks* pAllocator) {};
+        virtual void PostCallRecordDestroyDescriptorUpdateTemplateKHR(VkDevice device, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const VkAllocationCallbacks* pAllocator) {};
+        virtual bool PreCallValidateUpdateDescriptorSetWithTemplateKHR(VkDevice device, VkDescriptorSet descriptorSet, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData) const { return false; };
+        virtual void PreCallRecordUpdateDescriptorSetWithTemplateKHR(VkDevice device, VkDescriptorSet descriptorSet, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData) {};
+        virtual void PostCallRecordUpdateDescriptorSetWithTemplateKHR(VkDevice device, VkDescriptorSet descriptorSet, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData) {};
+        virtual bool PreCallValidateCreateRenderPass2KHR(VkDevice device, const VkRenderPassCreateInfo2KHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass) const { return false; };
+        virtual void PreCallRecordCreateRenderPass2KHR(VkDevice device, const VkRenderPassCreateInfo2KHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass) {};
+        virtual void PostCallRecordCreateRenderPass2KHR(VkDevice device, const VkRenderPassCreateInfo2KHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass, VkResult result) {};
+        virtual bool PreCallValidateCmdBeginRenderPass2KHR(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo*      pRenderPassBegin, const VkSubpassBeginInfoKHR*      pSubpassBeginInfo) const { return false; };
+        virtual void PreCallRecordCmdBeginRenderPass2KHR(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo*      pRenderPassBegin, const VkSubpassBeginInfoKHR*      pSubpassBeginInfo) {};
+        virtual void PostCallRecordCmdBeginRenderPass2KHR(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo*      pRenderPassBegin, const VkSubpassBeginInfoKHR*      pSubpassBeginInfo) {};
+        virtual bool PreCallValidateCmdNextSubpass2KHR(VkCommandBuffer commandBuffer, const VkSubpassBeginInfoKHR*      pSubpassBeginInfo, const VkSubpassEndInfoKHR*        pSubpassEndInfo) const { return false; };
+        virtual void PreCallRecordCmdNextSubpass2KHR(VkCommandBuffer commandBuffer, const VkSubpassBeginInfoKHR*      pSubpassBeginInfo, const VkSubpassEndInfoKHR*        pSubpassEndInfo) {};
+        virtual void PostCallRecordCmdNextSubpass2KHR(VkCommandBuffer commandBuffer, const VkSubpassBeginInfoKHR*      pSubpassBeginInfo, const VkSubpassEndInfoKHR*        pSubpassEndInfo) {};
+        virtual bool PreCallValidateCmdEndRenderPass2KHR(VkCommandBuffer commandBuffer, const VkSubpassEndInfoKHR*        pSubpassEndInfo) const { return false; };
+        virtual void PreCallRecordCmdEndRenderPass2KHR(VkCommandBuffer commandBuffer, const VkSubpassEndInfoKHR*        pSubpassEndInfo) {};
+        virtual void PostCallRecordCmdEndRenderPass2KHR(VkCommandBuffer commandBuffer, const VkSubpassEndInfoKHR*        pSubpassEndInfo) {};
+        virtual bool PreCallValidateGetSwapchainStatusKHR(VkDevice device, VkSwapchainKHR swapchain) const { return false; };
+        virtual void PreCallRecordGetSwapchainStatusKHR(VkDevice device, VkSwapchainKHR swapchain) {};
+        virtual void PostCallRecordGetSwapchainStatusKHR(VkDevice device, VkSwapchainKHR swapchain, VkResult result) {};
+        virtual bool PreCallValidateGetPhysicalDeviceExternalFencePropertiesKHR(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo, VkExternalFenceProperties* pExternalFenceProperties) const { return false; };
+        virtual void PreCallRecordGetPhysicalDeviceExternalFencePropertiesKHR(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo, VkExternalFenceProperties* pExternalFenceProperties) {};
+        virtual void PostCallRecordGetPhysicalDeviceExternalFencePropertiesKHR(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo, VkExternalFenceProperties* pExternalFenceProperties) {};
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+        virtual bool PreCallValidateImportFenceWin32HandleKHR(VkDevice device, const VkImportFenceWin32HandleInfoKHR* pImportFenceWin32HandleInfo) const { return false; };
+        virtual void PreCallRecordImportFenceWin32HandleKHR(VkDevice device, const VkImportFenceWin32HandleInfoKHR* pImportFenceWin32HandleInfo) {};
+        virtual void PostCallRecordImportFenceWin32HandleKHR(VkDevice device, const VkImportFenceWin32HandleInfoKHR* pImportFenceWin32HandleInfo, VkResult result) {};
+#endif
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+        virtual bool PreCallValidateGetFenceWin32HandleKHR(VkDevice device, const VkFenceGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle) const { return false; };
+        virtual void PreCallRecordGetFenceWin32HandleKHR(VkDevice device, const VkFenceGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle) {};
+        virtual void PostCallRecordGetFenceWin32HandleKHR(VkDevice device, const VkFenceGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle, VkResult result) {};
+#endif
+        virtual bool PreCallValidateImportFenceFdKHR(VkDevice device, const VkImportFenceFdInfoKHR* pImportFenceFdInfo) const { return false; };
+        virtual void PreCallRecordImportFenceFdKHR(VkDevice device, const VkImportFenceFdInfoKHR* pImportFenceFdInfo) {};
+        virtual void PostCallRecordImportFenceFdKHR(VkDevice device, const VkImportFenceFdInfoKHR* pImportFenceFdInfo, VkResult result) {};
+        virtual bool PreCallValidateGetFenceFdKHR(VkDevice device, const VkFenceGetFdInfoKHR* pGetFdInfo, int* pFd) const { return false; };
+        virtual void PreCallRecordGetFenceFdKHR(VkDevice device, const VkFenceGetFdInfoKHR* pGetFdInfo, int* pFd) {};
+        virtual void PostCallRecordGetFenceFdKHR(VkDevice device, const VkFenceGetFdInfoKHR* pGetFdInfo, int* pFd, VkResult result) {};
+        virtual bool PreCallValidateEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, uint32_t* pCounterCount, VkPerformanceCounterKHR* pCounters, VkPerformanceCounterDescriptionKHR* pCounterDescriptions) const { return false; };
+        virtual void PreCallRecordEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, uint32_t* pCounterCount, VkPerformanceCounterKHR* pCounters, VkPerformanceCounterDescriptionKHR* pCounterDescriptions) {};
+        virtual void PostCallRecordEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, uint32_t* pCounterCount, VkPerformanceCounterKHR* pCounters, VkPerformanceCounterDescriptionKHR* pCounterDescriptions, VkResult result) {};
+        virtual bool PreCallValidateGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR(VkPhysicalDevice physicalDevice, const VkQueryPoolPerformanceCreateInfoKHR* pPerformanceQueryCreateInfo, uint32_t* pNumPasses) const { return false; };
+        virtual void PreCallRecordGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR(VkPhysicalDevice physicalDevice, const VkQueryPoolPerformanceCreateInfoKHR* pPerformanceQueryCreateInfo, uint32_t* pNumPasses) {};
+        virtual void PostCallRecordGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR(VkPhysicalDevice physicalDevice, const VkQueryPoolPerformanceCreateInfoKHR* pPerformanceQueryCreateInfo, uint32_t* pNumPasses) {};
+        virtual bool PreCallValidateAcquireProfilingLockKHR(VkDevice device, const VkAcquireProfilingLockInfoKHR* pInfo) const { return false; };
+        virtual void PreCallRecordAcquireProfilingLockKHR(VkDevice device, const VkAcquireProfilingLockInfoKHR* pInfo) {};
+        virtual void PostCallRecordAcquireProfilingLockKHR(VkDevice device, const VkAcquireProfilingLockInfoKHR* pInfo, VkResult result) {};
+        virtual bool PreCallValidateReleaseProfilingLockKHR(VkDevice device) const { return false; };
+        virtual void PreCallRecordReleaseProfilingLockKHR(VkDevice device) {};
+        virtual void PostCallRecordReleaseProfilingLockKHR(VkDevice device) {};
+        virtual bool PreCallValidateGetPhysicalDeviceSurfaceCapabilities2KHR(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, VkSurfaceCapabilities2KHR* pSurfaceCapabilities) const { return false; };
+        virtual void PreCallRecordGetPhysicalDeviceSurfaceCapabilities2KHR(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, VkSurfaceCapabilities2KHR* pSurfaceCapabilities) {};
+        virtual void PostCallRecordGetPhysicalDeviceSurfaceCapabilities2KHR(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, VkSurfaceCapabilities2KHR* pSurfaceCapabilities, VkResult result) {};
+        virtual bool PreCallValidateGetPhysicalDeviceSurfaceFormats2KHR(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pSurfaceFormatCount, VkSurfaceFormat2KHR* pSurfaceFormats) const { return false; };
+        virtual void PreCallRecordGetPhysicalDeviceSurfaceFormats2KHR(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pSurfaceFormatCount, VkSurfaceFormat2KHR* pSurfaceFormats) {};
+        virtual void PostCallRecordGetPhysicalDeviceSurfaceFormats2KHR(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pSurfaceFormatCount, VkSurfaceFormat2KHR* pSurfaceFormats, VkResult result) {};
+        virtual bool PreCallValidateGetPhysicalDeviceDisplayProperties2KHR(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayProperties2KHR* pProperties) const { return false; };
+        virtual void PreCallRecordGetPhysicalDeviceDisplayProperties2KHR(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayProperties2KHR* pProperties) {};
+        virtual void PostCallRecordGetPhysicalDeviceDisplayProperties2KHR(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayProperties2KHR* pProperties, VkResult result) {};
+        virtual bool PreCallValidateGetPhysicalDeviceDisplayPlaneProperties2KHR(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPlaneProperties2KHR* pProperties) const { return false; };
+        virtual void PreCallRecordGetPhysicalDeviceDisplayPlaneProperties2KHR(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPlaneProperties2KHR* pProperties) {};
+        virtual void PostCallRecordGetPhysicalDeviceDisplayPlaneProperties2KHR(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPlaneProperties2KHR* pProperties, VkResult result) {};
+        virtual bool PreCallValidateGetDisplayModeProperties2KHR(VkPhysicalDevice physicalDevice, VkDisplayKHR display, uint32_t* pPropertyCount, VkDisplayModeProperties2KHR* pProperties) const { return false; };
+        virtual void PreCallRecordGetDisplayModeProperties2KHR(VkPhysicalDevice physicalDevice, VkDisplayKHR display, uint32_t* pPropertyCount, VkDisplayModeProperties2KHR* pProperties) {};
+        virtual void PostCallRecordGetDisplayModeProperties2KHR(VkPhysicalDevice physicalDevice, VkDisplayKHR display, uint32_t* pPropertyCount, VkDisplayModeProperties2KHR* pProperties, VkResult result) {};
+        virtual bool PreCallValidateGetDisplayPlaneCapabilities2KHR(VkPhysicalDevice physicalDevice, const VkDisplayPlaneInfo2KHR* pDisplayPlaneInfo, VkDisplayPlaneCapabilities2KHR* pCapabilities) const { return false; };
+        virtual void PreCallRecordGetDisplayPlaneCapabilities2KHR(VkPhysicalDevice physicalDevice, const VkDisplayPlaneInfo2KHR* pDisplayPlaneInfo, VkDisplayPlaneCapabilities2KHR* pCapabilities) {};
+        virtual void PostCallRecordGetDisplayPlaneCapabilities2KHR(VkPhysicalDevice physicalDevice, const VkDisplayPlaneInfo2KHR* pDisplayPlaneInfo, VkDisplayPlaneCapabilities2KHR* pCapabilities, VkResult result) {};
+        virtual bool PreCallValidateGetImageMemoryRequirements2KHR(VkDevice device, const VkImageMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements) const { return false; };
+        virtual void PreCallRecordGetImageMemoryRequirements2KHR(VkDevice device, const VkImageMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements) {};
+        virtual void PostCallRecordGetImageMemoryRequirements2KHR(VkDevice device, const VkImageMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements) {};
+        virtual bool PreCallValidateGetBufferMemoryRequirements2KHR(VkDevice device, const VkBufferMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements) const { return false; };
+        virtual void PreCallRecordGetBufferMemoryRequirements2KHR(VkDevice device, const VkBufferMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements) {};
+        virtual void PostCallRecordGetBufferMemoryRequirements2KHR(VkDevice device, const VkBufferMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements) {};
+        virtual bool PreCallValidateGetImageSparseMemoryRequirements2KHR(VkDevice device, const VkImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2* pSparseMemoryRequirements) const { return false; };
+        virtual void PreCallRecordGetImageSparseMemoryRequirements2KHR(VkDevice device, const VkImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2* pSparseMemoryRequirements) {};
+        virtual void PostCallRecordGetImageSparseMemoryRequirements2KHR(VkDevice device, const VkImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2* pSparseMemoryRequirements) {};
+        virtual bool PreCallValidateCreateSamplerYcbcrConversionKHR(VkDevice device, const VkSamplerYcbcrConversionCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSamplerYcbcrConversion* pYcbcrConversion) const { return false; };
+        virtual void PreCallRecordCreateSamplerYcbcrConversionKHR(VkDevice device, const VkSamplerYcbcrConversionCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSamplerYcbcrConversion* pYcbcrConversion) {};
+        virtual void PostCallRecordCreateSamplerYcbcrConversionKHR(VkDevice device, const VkSamplerYcbcrConversionCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSamplerYcbcrConversion* pYcbcrConversion, VkResult result) {};
+        virtual bool PreCallValidateDestroySamplerYcbcrConversionKHR(VkDevice device, VkSamplerYcbcrConversion ycbcrConversion, const VkAllocationCallbacks* pAllocator) const { return false; };
+        virtual void PreCallRecordDestroySamplerYcbcrConversionKHR(VkDevice device, VkSamplerYcbcrConversion ycbcrConversion, const VkAllocationCallbacks* pAllocator) {};
+        virtual void PostCallRecordDestroySamplerYcbcrConversionKHR(VkDevice device, VkSamplerYcbcrConversion ycbcrConversion, const VkAllocationCallbacks* pAllocator) {};
+        virtual bool PreCallValidateBindBufferMemory2KHR(VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo* pBindInfos) const { return false; };
+        virtual void PreCallRecordBindBufferMemory2KHR(VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo* pBindInfos) {};
+        virtual void PostCallRecordBindBufferMemory2KHR(VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo* pBindInfos, VkResult result) {};
+        virtual bool PreCallValidateBindImageMemory2KHR(VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo* pBindInfos) const { return false; };
+        virtual void PreCallRecordBindImageMemory2KHR(VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo* pBindInfos) {};
+        virtual void PostCallRecordBindImageMemory2KHR(VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo* pBindInfos, VkResult result) {};
+        virtual bool PreCallValidateGetDescriptorSetLayoutSupportKHR(VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, VkDescriptorSetLayoutSupport* pSupport) const { return false; };
+        virtual void PreCallRecordGetDescriptorSetLayoutSupportKHR(VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, VkDescriptorSetLayoutSupport* pSupport) {};
+        virtual void PostCallRecordGetDescriptorSetLayoutSupportKHR(VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, VkDescriptorSetLayoutSupport* pSupport) {};
+        virtual bool PreCallValidateCmdDrawIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride) const { return false; };
+        virtual void PreCallRecordCmdDrawIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride) {};
+        virtual void PostCallRecordCmdDrawIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride) {};
+        virtual bool PreCallValidateCmdDrawIndexedIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride) const { return false; };
+        virtual void PreCallRecordCmdDrawIndexedIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride) {};
+        virtual void PostCallRecordCmdDrawIndexedIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride) {};
+        virtual bool PreCallValidateGetSemaphoreCounterValueKHR(VkDevice device, VkSemaphore semaphore, uint64_t* pValue) const { return false; };
+        virtual void PreCallRecordGetSemaphoreCounterValueKHR(VkDevice device, VkSemaphore semaphore, uint64_t* pValue) {};
+        virtual void PostCallRecordGetSemaphoreCounterValueKHR(VkDevice device, VkSemaphore semaphore, uint64_t* pValue, VkResult result) {};
+        virtual bool PreCallValidateWaitSemaphoresKHR(VkDevice device, const VkSemaphoreWaitInfoKHR* pWaitInfo, uint64_t timeout) const { return false; };
+        virtual void PreCallRecordWaitSemaphoresKHR(VkDevice device, const VkSemaphoreWaitInfoKHR* pWaitInfo, uint64_t timeout) {};
+        virtual void PostCallRecordWaitSemaphoresKHR(VkDevice device, const VkSemaphoreWaitInfoKHR* pWaitInfo, uint64_t timeout, VkResult result) {};
+        virtual bool PreCallValidateSignalSemaphoreKHR(VkDevice device, const VkSemaphoreSignalInfoKHR* pSignalInfo) const { return false; };
+        virtual void PreCallRecordSignalSemaphoreKHR(VkDevice device, const VkSemaphoreSignalInfoKHR* pSignalInfo) {};
+        virtual void PostCallRecordSignalSemaphoreKHR(VkDevice device, const VkSemaphoreSignalInfoKHR* pSignalInfo, VkResult result) {};
+        virtual bool PreCallValidateGetBufferDeviceAddressKHR(VkDevice device, const VkBufferDeviceAddressInfoKHR* pInfo) const { return false; };
+        virtual void PreCallRecordGetBufferDeviceAddressKHR(VkDevice device, const VkBufferDeviceAddressInfoKHR* pInfo) {};
+        virtual void PostCallRecordGetBufferDeviceAddressKHR(VkDevice device, const VkBufferDeviceAddressInfoKHR* pInfo, VkDeviceAddress result) {};
+        virtual bool PreCallValidateGetBufferOpaqueCaptureAddressKHR(VkDevice device, const VkBufferDeviceAddressInfoKHR* pInfo) const { return false; };
+        virtual void PreCallRecordGetBufferOpaqueCaptureAddressKHR(VkDevice device, const VkBufferDeviceAddressInfoKHR* pInfo) {};
+        virtual void PostCallRecordGetBufferOpaqueCaptureAddressKHR(VkDevice device, const VkBufferDeviceAddressInfoKHR* pInfo) {};
+        virtual bool PreCallValidateGetDeviceMemoryOpaqueCaptureAddressKHR(VkDevice device, const VkDeviceMemoryOpaqueCaptureAddressInfoKHR* pInfo) const { return false; };
+        virtual void PreCallRecordGetDeviceMemoryOpaqueCaptureAddressKHR(VkDevice device, const VkDeviceMemoryOpaqueCaptureAddressInfoKHR* pInfo) {};
+        virtual void PostCallRecordGetDeviceMemoryOpaqueCaptureAddressKHR(VkDevice device, const VkDeviceMemoryOpaqueCaptureAddressInfoKHR* pInfo) {};
+        virtual bool PreCallValidateGetPipelineExecutablePropertiesKHR(VkDevice                        device, const VkPipelineInfoKHR*        pPipelineInfo, uint32_t* pExecutableCount, VkPipelineExecutablePropertiesKHR* pProperties) const { return false; };
+        virtual void PreCallRecordGetPipelineExecutablePropertiesKHR(VkDevice                        device, const VkPipelineInfoKHR*        pPipelineInfo, uint32_t* pExecutableCount, VkPipelineExecutablePropertiesKHR* pProperties) {};
+        virtual void PostCallRecordGetPipelineExecutablePropertiesKHR(VkDevice                        device, const VkPipelineInfoKHR*        pPipelineInfo, uint32_t* pExecutableCount, VkPipelineExecutablePropertiesKHR* pProperties, VkResult result) {};
+        virtual bool PreCallValidateGetPipelineExecutableStatisticsKHR(VkDevice                        device, const VkPipelineExecutableInfoKHR*  pExecutableInfo, uint32_t* pStatisticCount, VkPipelineExecutableStatisticKHR* pStatistics) const { return false; };
+        virtual void PreCallRecordGetPipelineExecutableStatisticsKHR(VkDevice                        device, const VkPipelineExecutableInfoKHR*  pExecutableInfo, uint32_t* pStatisticCount, VkPipelineExecutableStatisticKHR* pStatistics) {};
+        virtual void PostCallRecordGetPipelineExecutableStatisticsKHR(VkDevice                        device, const VkPipelineExecutableInfoKHR*  pExecutableInfo, uint32_t* pStatisticCount, VkPipelineExecutableStatisticKHR* pStatistics, VkResult result) {};
+        virtual bool PreCallValidateGetPipelineExecutableInternalRepresentationsKHR(VkDevice                        device, const VkPipelineExecutableInfoKHR*  pExecutableInfo, uint32_t* pInternalRepresentationCount, VkPipelineExecutableInternalRepresentationKHR* pInternalRepresentations) const { return false; };
+        virtual void PreCallRecordGetPipelineExecutableInternalRepresentationsKHR(VkDevice                        device, const VkPipelineExecutableInfoKHR*  pExecutableInfo, uint32_t* pInternalRepresentationCount, VkPipelineExecutableInternalRepresentationKHR* pInternalRepresentations) {};
+        virtual void PostCallRecordGetPipelineExecutableInternalRepresentationsKHR(VkDevice                        device, const VkPipelineExecutableInfoKHR*  pExecutableInfo, uint32_t* pInternalRepresentationCount, VkPipelineExecutableInternalRepresentationKHR* pInternalRepresentations, VkResult result) {};
+        virtual bool PreCallValidateCreateDebugReportCallbackEXT(VkInstance instance, const VkDebugReportCallbackCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDebugReportCallbackEXT* pCallback) const { return false; };
+        virtual void PreCallRecordCreateDebugReportCallbackEXT(VkInstance instance, const VkDebugReportCallbackCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDebugReportCallbackEXT* pCallback) {};
+        virtual void PostCallRecordCreateDebugReportCallbackEXT(VkInstance instance, const VkDebugReportCallbackCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDebugReportCallbackEXT* pCallback, VkResult result) {};
+        virtual bool PreCallValidateDestroyDebugReportCallbackEXT(VkInstance instance, VkDebugReportCallbackEXT callback, const VkAllocationCallbacks* pAllocator) const { return false; };
+        virtual void PreCallRecordDestroyDebugReportCallbackEXT(VkInstance instance, VkDebugReportCallbackEXT callback, const VkAllocationCallbacks* pAllocator) {};
+        virtual void PostCallRecordDestroyDebugReportCallbackEXT(VkInstance instance, VkDebugReportCallbackEXT callback, const VkAllocationCallbacks* pAllocator) {};
+        virtual bool PreCallValidateDebugReportMessageEXT(VkInstance instance, VkDebugReportFlagsEXT flags, VkDebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const char* pLayerPrefix, const char* pMessage) const { return false; };
+        virtual void PreCallRecordDebugReportMessageEXT(VkInstance instance, VkDebugReportFlagsEXT flags, VkDebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const char* pLayerPrefix, const char* pMessage) {};
+        virtual void PostCallRecordDebugReportMessageEXT(VkInstance instance, VkDebugReportFlagsEXT flags, VkDebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const char* pLayerPrefix, const char* pMessage) {};
+        virtual bool PreCallValidateDebugMarkerSetObjectTagEXT(VkDevice device, const VkDebugMarkerObjectTagInfoEXT* pTagInfo) const { return false; };
+        virtual void PreCallRecordDebugMarkerSetObjectTagEXT(VkDevice device, const VkDebugMarkerObjectTagInfoEXT* pTagInfo) {};
+        virtual void PostCallRecordDebugMarkerSetObjectTagEXT(VkDevice device, const VkDebugMarkerObjectTagInfoEXT* pTagInfo, VkResult result) {};
+        virtual bool PreCallValidateDebugMarkerSetObjectNameEXT(VkDevice device, const VkDebugMarkerObjectNameInfoEXT* pNameInfo) const { return false; };
+        virtual void PreCallRecordDebugMarkerSetObjectNameEXT(VkDevice device, const VkDebugMarkerObjectNameInfoEXT* pNameInfo) {};
+        virtual void PostCallRecordDebugMarkerSetObjectNameEXT(VkDevice device, const VkDebugMarkerObjectNameInfoEXT* pNameInfo, VkResult result) {};
+        virtual bool PreCallValidateCmdDebugMarkerBeginEXT(VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT* pMarkerInfo) const { return false; };
+        virtual void PreCallRecordCmdDebugMarkerBeginEXT(VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT* pMarkerInfo) {};
+        virtual void PostCallRecordCmdDebugMarkerBeginEXT(VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT* pMarkerInfo) {};
+        virtual bool PreCallValidateCmdDebugMarkerEndEXT(VkCommandBuffer commandBuffer) const { return false; };
+        virtual void PreCallRecordCmdDebugMarkerEndEXT(VkCommandBuffer commandBuffer) {};
+        virtual void PostCallRecordCmdDebugMarkerEndEXT(VkCommandBuffer commandBuffer) {};
+        virtual bool PreCallValidateCmdDebugMarkerInsertEXT(VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT* pMarkerInfo) const { return false; };
+        virtual void PreCallRecordCmdDebugMarkerInsertEXT(VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT* pMarkerInfo) {};
+        virtual void PostCallRecordCmdDebugMarkerInsertEXT(VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT* pMarkerInfo) {};
+        virtual bool PreCallValidateCmdBindTransformFeedbackBuffersEXT(VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets, const VkDeviceSize* pSizes) const { return false; };
+        virtual void PreCallRecordCmdBindTransformFeedbackBuffersEXT(VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets, const VkDeviceSize* pSizes) {};
+        virtual void PostCallRecordCmdBindTransformFeedbackBuffersEXT(VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets, const VkDeviceSize* pSizes) {};
+        virtual bool PreCallValidateCmdBeginTransformFeedbackEXT(VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VkBuffer* pCounterBuffers, const VkDeviceSize* pCounterBufferOffsets) const { return false; };
+        virtual void PreCallRecordCmdBeginTransformFeedbackEXT(VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VkBuffer* pCounterBuffers, const VkDeviceSize* pCounterBufferOffsets) {};
+        virtual void PostCallRecordCmdBeginTransformFeedbackEXT(VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VkBuffer* pCounterBuffers, const VkDeviceSize* pCounterBufferOffsets) {};
+        virtual bool PreCallValidateCmdEndTransformFeedbackEXT(VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VkBuffer* pCounterBuffers, const VkDeviceSize* pCounterBufferOffsets) const { return false; };
+        virtual void PreCallRecordCmdEndTransformFeedbackEXT(VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VkBuffer* pCounterBuffers, const VkDeviceSize* pCounterBufferOffsets) {};
+        virtual void PostCallRecordCmdEndTransformFeedbackEXT(VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VkBuffer* pCounterBuffers, const VkDeviceSize* pCounterBufferOffsets) {};
+        virtual bool PreCallValidateCmdBeginQueryIndexedEXT(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, VkQueryControlFlags flags, uint32_t index) const { return false; };
+        virtual void PreCallRecordCmdBeginQueryIndexedEXT(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, VkQueryControlFlags flags, uint32_t index) {};
+        virtual void PostCallRecordCmdBeginQueryIndexedEXT(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, VkQueryControlFlags flags, uint32_t index) {};
+        virtual bool PreCallValidateCmdEndQueryIndexedEXT(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, uint32_t index) const { return false; };
+        virtual void PreCallRecordCmdEndQueryIndexedEXT(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, uint32_t index) {};
+        virtual void PostCallRecordCmdEndQueryIndexedEXT(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, uint32_t index) {};
+        virtual bool PreCallValidateCmdDrawIndirectByteCountEXT(VkCommandBuffer commandBuffer, uint32_t instanceCount, uint32_t firstInstance, VkBuffer counterBuffer, VkDeviceSize counterBufferOffset, uint32_t counterOffset, uint32_t vertexStride) const { return false; };
+        virtual void PreCallRecordCmdDrawIndirectByteCountEXT(VkCommandBuffer commandBuffer, uint32_t instanceCount, uint32_t firstInstance, VkBuffer counterBuffer, VkDeviceSize counterBufferOffset, uint32_t counterOffset, uint32_t vertexStride) {};
+        virtual void PostCallRecordCmdDrawIndirectByteCountEXT(VkCommandBuffer commandBuffer, uint32_t instanceCount, uint32_t firstInstance, VkBuffer counterBuffer, VkDeviceSize counterBufferOffset, uint32_t counterOffset, uint32_t vertexStride) {};
+        virtual bool PreCallValidateGetImageViewHandleNVX(VkDevice device, const VkImageViewHandleInfoNVX* pInfo) const { return false; };
+        virtual void PreCallRecordGetImageViewHandleNVX(VkDevice device, const VkImageViewHandleInfoNVX* pInfo) {};
+        virtual void PostCallRecordGetImageViewHandleNVX(VkDevice device, const VkImageViewHandleInfoNVX* pInfo) {};
+        virtual bool PreCallValidateCmdDrawIndirectCountAMD(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride) const { return false; };
+        virtual void PreCallRecordCmdDrawIndirectCountAMD(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride) {};
+        virtual void PostCallRecordCmdDrawIndirectCountAMD(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride) {};
+        virtual bool PreCallValidateCmdDrawIndexedIndirectCountAMD(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride) const { return false; };
+        virtual void PreCallRecordCmdDrawIndexedIndirectCountAMD(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride) {};
+        virtual void PostCallRecordCmdDrawIndexedIndirectCountAMD(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride) {};
+        virtual bool PreCallValidateGetShaderInfoAMD(VkDevice device, VkPipeline pipeline, VkShaderStageFlagBits shaderStage, VkShaderInfoTypeAMD infoType, size_t* pInfoSize, void* pInfo) const { return false; };
+        virtual void PreCallRecordGetShaderInfoAMD(VkDevice device, VkPipeline pipeline, VkShaderStageFlagBits shaderStage, VkShaderInfoTypeAMD infoType, size_t* pInfoSize, void* pInfo) {};
+        virtual void PostCallRecordGetShaderInfoAMD(VkDevice device, VkPipeline pipeline, VkShaderStageFlagBits shaderStage, VkShaderInfoTypeAMD infoType, size_t* pInfoSize, void* pInfo, VkResult result) {};
+#ifdef VK_USE_PLATFORM_GGP
+        virtual bool PreCallValidateCreateStreamDescriptorSurfaceGGP(VkInstance instance, const VkStreamDescriptorSurfaceCreateInfoGGP* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface) const { return false; };
+        virtual void PreCallRecordCreateStreamDescriptorSurfaceGGP(VkInstance instance, const VkStreamDescriptorSurfaceCreateInfoGGP* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface) {};
+        virtual void PostCallRecordCreateStreamDescriptorSurfaceGGP(VkInstance instance, const VkStreamDescriptorSurfaceCreateInfoGGP* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface, VkResult result) {};
+#endif
+        virtual bool PreCallValidateGetPhysicalDeviceExternalImageFormatPropertiesNV(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkExternalMemoryHandleTypeFlagsNV externalHandleType, VkExternalImageFormatPropertiesNV* pExternalImageFormatProperties) const { return false; };
+        virtual void PreCallRecordGetPhysicalDeviceExternalImageFormatPropertiesNV(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkExternalMemoryHandleTypeFlagsNV externalHandleType, VkExternalImageFormatPropertiesNV* pExternalImageFormatProperties) {};
+        virtual void PostCallRecordGetPhysicalDeviceExternalImageFormatPropertiesNV(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkExternalMemoryHandleTypeFlagsNV externalHandleType, VkExternalImageFormatPropertiesNV* pExternalImageFormatProperties, VkResult result) {};
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+        virtual bool PreCallValidateGetMemoryWin32HandleNV(VkDevice device, VkDeviceMemory memory, VkExternalMemoryHandleTypeFlagsNV handleType, HANDLE* pHandle) const { return false; };
+        virtual void PreCallRecordGetMemoryWin32HandleNV(VkDevice device, VkDeviceMemory memory, VkExternalMemoryHandleTypeFlagsNV handleType, HANDLE* pHandle) {};
+        virtual void PostCallRecordGetMemoryWin32HandleNV(VkDevice device, VkDeviceMemory memory, VkExternalMemoryHandleTypeFlagsNV handleType, HANDLE* pHandle, VkResult result) {};
+#endif
+#ifdef VK_USE_PLATFORM_VI_NN
+        virtual bool PreCallValidateCreateViSurfaceNN(VkInstance instance, const VkViSurfaceCreateInfoNN* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface) const { return false; };
+        virtual void PreCallRecordCreateViSurfaceNN(VkInstance instance, const VkViSurfaceCreateInfoNN* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface) {};
+        virtual void PostCallRecordCreateViSurfaceNN(VkInstance instance, const VkViSurfaceCreateInfoNN* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface, VkResult result) {};
+#endif
+        virtual bool PreCallValidateCmdBeginConditionalRenderingEXT(VkCommandBuffer commandBuffer, const VkConditionalRenderingBeginInfoEXT* pConditionalRenderingBegin) const { return false; };
+        virtual void PreCallRecordCmdBeginConditionalRenderingEXT(VkCommandBuffer commandBuffer, const VkConditionalRenderingBeginInfoEXT* pConditionalRenderingBegin) {};
+        virtual void PostCallRecordCmdBeginConditionalRenderingEXT(VkCommandBuffer commandBuffer, const VkConditionalRenderingBeginInfoEXT* pConditionalRenderingBegin) {};
+        virtual bool PreCallValidateCmdEndConditionalRenderingEXT(VkCommandBuffer commandBuffer) const { return false; };
+        virtual void PreCallRecordCmdEndConditionalRenderingEXT(VkCommandBuffer commandBuffer) {};
+        virtual void PostCallRecordCmdEndConditionalRenderingEXT(VkCommandBuffer commandBuffer) {};
+        virtual bool PreCallValidateCmdProcessCommandsNVX(VkCommandBuffer commandBuffer, const VkCmdProcessCommandsInfoNVX* pProcessCommandsInfo) const { return false; };
+        virtual void PreCallRecordCmdProcessCommandsNVX(VkCommandBuffer commandBuffer, const VkCmdProcessCommandsInfoNVX* pProcessCommandsInfo) {};
+        virtual void PostCallRecordCmdProcessCommandsNVX(VkCommandBuffer commandBuffer, const VkCmdProcessCommandsInfoNVX* pProcessCommandsInfo) {};
+        virtual bool PreCallValidateCmdReserveSpaceForCommandsNVX(VkCommandBuffer commandBuffer, const VkCmdReserveSpaceForCommandsInfoNVX* pReserveSpaceInfo) const { return false; };
+        virtual void PreCallRecordCmdReserveSpaceForCommandsNVX(VkCommandBuffer commandBuffer, const VkCmdReserveSpaceForCommandsInfoNVX* pReserveSpaceInfo) {};
+        virtual void PostCallRecordCmdReserveSpaceForCommandsNVX(VkCommandBuffer commandBuffer, const VkCmdReserveSpaceForCommandsInfoNVX* pReserveSpaceInfo) {};
+        virtual bool PreCallValidateCreateIndirectCommandsLayoutNVX(VkDevice device, const VkIndirectCommandsLayoutCreateInfoNVX* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkIndirectCommandsLayoutNVX* pIndirectCommandsLayout) const { return false; };
+        virtual void PreCallRecordCreateIndirectCommandsLayoutNVX(VkDevice device, const VkIndirectCommandsLayoutCreateInfoNVX* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkIndirectCommandsLayoutNVX* pIndirectCommandsLayout) {};
+        virtual void PostCallRecordCreateIndirectCommandsLayoutNVX(VkDevice device, const VkIndirectCommandsLayoutCreateInfoNVX* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkIndirectCommandsLayoutNVX* pIndirectCommandsLayout, VkResult result) {};
+        virtual bool PreCallValidateDestroyIndirectCommandsLayoutNVX(VkDevice device, VkIndirectCommandsLayoutNVX indirectCommandsLayout, const VkAllocationCallbacks* pAllocator) const { return false; };
+        virtual void PreCallRecordDestroyIndirectCommandsLayoutNVX(VkDevice device, VkIndirectCommandsLayoutNVX indirectCommandsLayout, const VkAllocationCallbacks* pAllocator) {};
+        virtual void PostCallRecordDestroyIndirectCommandsLayoutNVX(VkDevice device, VkIndirectCommandsLayoutNVX indirectCommandsLayout, const VkAllocationCallbacks* pAllocator) {};
+        virtual bool PreCallValidateCreateObjectTableNVX(VkDevice device, const VkObjectTableCreateInfoNVX* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkObjectTableNVX* pObjectTable) const { return false; };
+        virtual void PreCallRecordCreateObjectTableNVX(VkDevice device, const VkObjectTableCreateInfoNVX* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkObjectTableNVX* pObjectTable) {};
+        virtual void PostCallRecordCreateObjectTableNVX(VkDevice device, const VkObjectTableCreateInfoNVX* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkObjectTableNVX* pObjectTable, VkResult result) {};
+        virtual bool PreCallValidateDestroyObjectTableNVX(VkDevice device, VkObjectTableNVX objectTable, const VkAllocationCallbacks* pAllocator) const { return false; };
+        virtual void PreCallRecordDestroyObjectTableNVX(VkDevice device, VkObjectTableNVX objectTable, const VkAllocationCallbacks* pAllocator) {};
+        virtual void PostCallRecordDestroyObjectTableNVX(VkDevice device, VkObjectTableNVX objectTable, const VkAllocationCallbacks* pAllocator) {};
+        virtual bool PreCallValidateRegisterObjectsNVX(VkDevice device, VkObjectTableNVX objectTable, uint32_t objectCount, const VkObjectTableEntryNVX* const*    ppObjectTableEntries, const uint32_t* pObjectIndices) const { return false; };
+        virtual void PreCallRecordRegisterObjectsNVX(VkDevice device, VkObjectTableNVX objectTable, uint32_t objectCount, const VkObjectTableEntryNVX* const*    ppObjectTableEntries, const uint32_t* pObjectIndices) {};
+        virtual void PostCallRecordRegisterObjectsNVX(VkDevice device, VkObjectTableNVX objectTable, uint32_t objectCount, const VkObjectTableEntryNVX* const*    ppObjectTableEntries, const uint32_t* pObjectIndices, VkResult result) {};
+        virtual bool PreCallValidateUnregisterObjectsNVX(VkDevice device, VkObjectTableNVX objectTable, uint32_t objectCount, const VkObjectEntryTypeNVX* pObjectEntryTypes, const uint32_t* pObjectIndices) const { return false; };
+        virtual void PreCallRecordUnregisterObjectsNVX(VkDevice device, VkObjectTableNVX objectTable, uint32_t objectCount, const VkObjectEntryTypeNVX* pObjectEntryTypes, const uint32_t* pObjectIndices) {};
+        virtual void PostCallRecordUnregisterObjectsNVX(VkDevice device, VkObjectTableNVX objectTable, uint32_t objectCount, const VkObjectEntryTypeNVX* pObjectEntryTypes, const uint32_t* pObjectIndices, VkResult result) {};
+        virtual bool PreCallValidateGetPhysicalDeviceGeneratedCommandsPropertiesNVX(VkPhysicalDevice physicalDevice, VkDeviceGeneratedCommandsFeaturesNVX* pFeatures, VkDeviceGeneratedCommandsLimitsNVX* pLimits) const { return false; };
+        virtual void PreCallRecordGetPhysicalDeviceGeneratedCommandsPropertiesNVX(VkPhysicalDevice physicalDevice, VkDeviceGeneratedCommandsFeaturesNVX* pFeatures, VkDeviceGeneratedCommandsLimitsNVX* pLimits) {};
+        virtual void PostCallRecordGetPhysicalDeviceGeneratedCommandsPropertiesNVX(VkPhysicalDevice physicalDevice, VkDeviceGeneratedCommandsFeaturesNVX* pFeatures, VkDeviceGeneratedCommandsLimitsNVX* pLimits) {};
+        virtual bool PreCallValidateCmdSetViewportWScalingNV(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewportWScalingNV* pViewportWScalings) const { return false; };
+        virtual void PreCallRecordCmdSetViewportWScalingNV(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewportWScalingNV* pViewportWScalings) {};
+        virtual void PostCallRecordCmdSetViewportWScalingNV(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewportWScalingNV* pViewportWScalings) {};
+        virtual bool PreCallValidateReleaseDisplayEXT(VkPhysicalDevice physicalDevice, VkDisplayKHR display) const { return false; };
+        virtual void PreCallRecordReleaseDisplayEXT(VkPhysicalDevice physicalDevice, VkDisplayKHR display) {};
+        virtual void PostCallRecordReleaseDisplayEXT(VkPhysicalDevice physicalDevice, VkDisplayKHR display, VkResult result) {};
+#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
+        virtual bool PreCallValidateAcquireXlibDisplayEXT(VkPhysicalDevice physicalDevice, Display* dpy, VkDisplayKHR display) const { return false; };
+        virtual void PreCallRecordAcquireXlibDisplayEXT(VkPhysicalDevice physicalDevice, Display* dpy, VkDisplayKHR display) {};
+        virtual void PostCallRecordAcquireXlibDisplayEXT(VkPhysicalDevice physicalDevice, Display* dpy, VkDisplayKHR display, VkResult result) {};
+#endif
+#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
+        virtual bool PreCallValidateGetRandROutputDisplayEXT(VkPhysicalDevice physicalDevice, Display* dpy, RROutput rrOutput, VkDisplayKHR* pDisplay) const { return false; };
+        virtual void PreCallRecordGetRandROutputDisplayEXT(VkPhysicalDevice physicalDevice, Display* dpy, RROutput rrOutput, VkDisplayKHR* pDisplay) {};
+        virtual void PostCallRecordGetRandROutputDisplayEXT(VkPhysicalDevice physicalDevice, Display* dpy, RROutput rrOutput, VkDisplayKHR* pDisplay, VkResult result) {};
+#endif
+        virtual bool PreCallValidateGetPhysicalDeviceSurfaceCapabilities2EXT(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, VkSurfaceCapabilities2EXT* pSurfaceCapabilities) const { return false; };
+        virtual void PreCallRecordGetPhysicalDeviceSurfaceCapabilities2EXT(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, VkSurfaceCapabilities2EXT* pSurfaceCapabilities) {};
+        virtual void PostCallRecordGetPhysicalDeviceSurfaceCapabilities2EXT(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, VkSurfaceCapabilities2EXT* pSurfaceCapabilities, VkResult result) {};
+        virtual bool PreCallValidateDisplayPowerControlEXT(VkDevice device, VkDisplayKHR display, const VkDisplayPowerInfoEXT* pDisplayPowerInfo) const { return false; };
+        virtual void PreCallRecordDisplayPowerControlEXT(VkDevice device, VkDisplayKHR display, const VkDisplayPowerInfoEXT* pDisplayPowerInfo) {};
+        virtual void PostCallRecordDisplayPowerControlEXT(VkDevice device, VkDisplayKHR display, const VkDisplayPowerInfoEXT* pDisplayPowerInfo, VkResult result) {};
+        virtual bool PreCallValidateRegisterDeviceEventEXT(VkDevice device, const VkDeviceEventInfoEXT* pDeviceEventInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence) const { return false; };
+        virtual void PreCallRecordRegisterDeviceEventEXT(VkDevice device, const VkDeviceEventInfoEXT* pDeviceEventInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence) {};
+        virtual void PostCallRecordRegisterDeviceEventEXT(VkDevice device, const VkDeviceEventInfoEXT* pDeviceEventInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence, VkResult result) {};
+        virtual bool PreCallValidateRegisterDisplayEventEXT(VkDevice device, VkDisplayKHR display, const VkDisplayEventInfoEXT* pDisplayEventInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence) const { return false; };
+        virtual void PreCallRecordRegisterDisplayEventEXT(VkDevice device, VkDisplayKHR display, const VkDisplayEventInfoEXT* pDisplayEventInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence) {};
+        virtual void PostCallRecordRegisterDisplayEventEXT(VkDevice device, VkDisplayKHR display, const VkDisplayEventInfoEXT* pDisplayEventInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence, VkResult result) {};
+        virtual bool PreCallValidateGetSwapchainCounterEXT(VkDevice device, VkSwapchainKHR swapchain, VkSurfaceCounterFlagBitsEXT counter, uint64_t* pCounterValue) const { return false; };
+        virtual void PreCallRecordGetSwapchainCounterEXT(VkDevice device, VkSwapchainKHR swapchain, VkSurfaceCounterFlagBitsEXT counter, uint64_t* pCounterValue) {};
+        virtual void PostCallRecordGetSwapchainCounterEXT(VkDevice device, VkSwapchainKHR swapchain, VkSurfaceCounterFlagBitsEXT counter, uint64_t* pCounterValue, VkResult result) {};
+        virtual bool PreCallValidateGetRefreshCycleDurationGOOGLE(VkDevice device, VkSwapchainKHR swapchain, VkRefreshCycleDurationGOOGLE* pDisplayTimingProperties) const { return false; };
+        virtual void PreCallRecordGetRefreshCycleDurationGOOGLE(VkDevice device, VkSwapchainKHR swapchain, VkRefreshCycleDurationGOOGLE* pDisplayTimingProperties) {};
+        virtual void PostCallRecordGetRefreshCycleDurationGOOGLE(VkDevice device, VkSwapchainKHR swapchain, VkRefreshCycleDurationGOOGLE* pDisplayTimingProperties, VkResult result) {};
+        virtual bool PreCallValidateGetPastPresentationTimingGOOGLE(VkDevice device, VkSwapchainKHR swapchain, uint32_t* pPresentationTimingCount, VkPastPresentationTimingGOOGLE* pPresentationTimings) const { return false; };
+        virtual void PreCallRecordGetPastPresentationTimingGOOGLE(VkDevice device, VkSwapchainKHR swapchain, uint32_t* pPresentationTimingCount, VkPastPresentationTimingGOOGLE* pPresentationTimings) {};
+        virtual void PostCallRecordGetPastPresentationTimingGOOGLE(VkDevice device, VkSwapchainKHR swapchain, uint32_t* pPresentationTimingCount, VkPastPresentationTimingGOOGLE* pPresentationTimings, VkResult result) {};
+        virtual bool PreCallValidateCmdSetDiscardRectangleEXT(VkCommandBuffer commandBuffer, uint32_t firstDiscardRectangle, uint32_t discardRectangleCount, const VkRect2D* pDiscardRectangles) const { return false; };
+        virtual void PreCallRecordCmdSetDiscardRectangleEXT(VkCommandBuffer commandBuffer, uint32_t firstDiscardRectangle, uint32_t discardRectangleCount, const VkRect2D* pDiscardRectangles) {};
+        virtual void PostCallRecordCmdSetDiscardRectangleEXT(VkCommandBuffer commandBuffer, uint32_t firstDiscardRectangle, uint32_t discardRectangleCount, const VkRect2D* pDiscardRectangles) {};
+        virtual bool PreCallValidateSetHdrMetadataEXT(VkDevice device, uint32_t swapchainCount, const VkSwapchainKHR* pSwapchains, const VkHdrMetadataEXT* pMetadata) const { return false; };
+        virtual void PreCallRecordSetHdrMetadataEXT(VkDevice device, uint32_t swapchainCount, const VkSwapchainKHR* pSwapchains, const VkHdrMetadataEXT* pMetadata) {};
+        virtual void PostCallRecordSetHdrMetadataEXT(VkDevice device, uint32_t swapchainCount, const VkSwapchainKHR* pSwapchains, const VkHdrMetadataEXT* pMetadata) {};
+#ifdef VK_USE_PLATFORM_IOS_MVK
+        virtual bool PreCallValidateCreateIOSSurfaceMVK(VkInstance instance, const VkIOSSurfaceCreateInfoMVK* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface) const { return false; };
+        virtual void PreCallRecordCreateIOSSurfaceMVK(VkInstance instance, const VkIOSSurfaceCreateInfoMVK* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface) {};
+        virtual void PostCallRecordCreateIOSSurfaceMVK(VkInstance instance, const VkIOSSurfaceCreateInfoMVK* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface, VkResult result) {};
+#endif
+#ifdef VK_USE_PLATFORM_MACOS_MVK
+        virtual bool PreCallValidateCreateMacOSSurfaceMVK(VkInstance instance, const VkMacOSSurfaceCreateInfoMVK* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface) const { return false; };
+        virtual void PreCallRecordCreateMacOSSurfaceMVK(VkInstance instance, const VkMacOSSurfaceCreateInfoMVK* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface) {};
+        virtual void PostCallRecordCreateMacOSSurfaceMVK(VkInstance instance, const VkMacOSSurfaceCreateInfoMVK* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface, VkResult result) {};
+#endif
+        virtual bool PreCallValidateSetDebugUtilsObjectNameEXT(VkDevice device, const VkDebugUtilsObjectNameInfoEXT* pNameInfo) const { return false; };
+        virtual void PreCallRecordSetDebugUtilsObjectNameEXT(VkDevice device, const VkDebugUtilsObjectNameInfoEXT* pNameInfo) {};
+        virtual void PostCallRecordSetDebugUtilsObjectNameEXT(VkDevice device, const VkDebugUtilsObjectNameInfoEXT* pNameInfo, VkResult result) {};
+        virtual bool PreCallValidateSetDebugUtilsObjectTagEXT(VkDevice device, const VkDebugUtilsObjectTagInfoEXT* pTagInfo) const { return false; };
+        virtual void PreCallRecordSetDebugUtilsObjectTagEXT(VkDevice device, const VkDebugUtilsObjectTagInfoEXT* pTagInfo) {};
+        virtual void PostCallRecordSetDebugUtilsObjectTagEXT(VkDevice device, const VkDebugUtilsObjectTagInfoEXT* pTagInfo, VkResult result) {};
+        virtual bool PreCallValidateQueueBeginDebugUtilsLabelEXT(VkQueue queue, const VkDebugUtilsLabelEXT* pLabelInfo) const { return false; };
+        virtual void PreCallRecordQueueBeginDebugUtilsLabelEXT(VkQueue queue, const VkDebugUtilsLabelEXT* pLabelInfo) {};
+        virtual void PostCallRecordQueueBeginDebugUtilsLabelEXT(VkQueue queue, const VkDebugUtilsLabelEXT* pLabelInfo) {};
+        virtual bool PreCallValidateQueueEndDebugUtilsLabelEXT(VkQueue queue) const { return false; };
+        virtual void PreCallRecordQueueEndDebugUtilsLabelEXT(VkQueue queue) {};
+        virtual void PostCallRecordQueueEndDebugUtilsLabelEXT(VkQueue queue) {};
+        virtual bool PreCallValidateQueueInsertDebugUtilsLabelEXT(VkQueue queue, const VkDebugUtilsLabelEXT* pLabelInfo) const { return false; };
+        virtual void PreCallRecordQueueInsertDebugUtilsLabelEXT(VkQueue queue, const VkDebugUtilsLabelEXT* pLabelInfo) {};
+        virtual void PostCallRecordQueueInsertDebugUtilsLabelEXT(VkQueue queue, const VkDebugUtilsLabelEXT* pLabelInfo) {};
+        virtual bool PreCallValidateCmdBeginDebugUtilsLabelEXT(VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT* pLabelInfo) const { return false; };
+        virtual void PreCallRecordCmdBeginDebugUtilsLabelEXT(VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT* pLabelInfo) {};
+        virtual void PostCallRecordCmdBeginDebugUtilsLabelEXT(VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT* pLabelInfo) {};
+        virtual bool PreCallValidateCmdEndDebugUtilsLabelEXT(VkCommandBuffer commandBuffer) const { return false; };
+        virtual void PreCallRecordCmdEndDebugUtilsLabelEXT(VkCommandBuffer commandBuffer) {};
+        virtual void PostCallRecordCmdEndDebugUtilsLabelEXT(VkCommandBuffer commandBuffer) {};
+        virtual bool PreCallValidateCmdInsertDebugUtilsLabelEXT(VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT* pLabelInfo) const { return false; };
+        virtual void PreCallRecordCmdInsertDebugUtilsLabelEXT(VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT* pLabelInfo) {};
+        virtual void PostCallRecordCmdInsertDebugUtilsLabelEXT(VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT* pLabelInfo) {};
+        virtual bool PreCallValidateCreateDebugUtilsMessengerEXT(VkInstance instance, const VkDebugUtilsMessengerCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDebugUtilsMessengerEXT* pMessenger) const { return false; };
+        virtual void PreCallRecordCreateDebugUtilsMessengerEXT(VkInstance instance, const VkDebugUtilsMessengerCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDebugUtilsMessengerEXT* pMessenger) {};
+        virtual void PostCallRecordCreateDebugUtilsMessengerEXT(VkInstance instance, const VkDebugUtilsMessengerCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDebugUtilsMessengerEXT* pMessenger, VkResult result) {};
+        virtual bool PreCallValidateDestroyDebugUtilsMessengerEXT(VkInstance instance, VkDebugUtilsMessengerEXT messenger, const VkAllocationCallbacks* pAllocator) const { return false; };
+        virtual void PreCallRecordDestroyDebugUtilsMessengerEXT(VkInstance instance, VkDebugUtilsMessengerEXT messenger, const VkAllocationCallbacks* pAllocator) {};
+        virtual void PostCallRecordDestroyDebugUtilsMessengerEXT(VkInstance instance, VkDebugUtilsMessengerEXT messenger, const VkAllocationCallbacks* pAllocator) {};
+        virtual bool PreCallValidateSubmitDebugUtilsMessageEXT(VkInstance instance, VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VkDebugUtilsMessageTypeFlagsEXT messageTypes, const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData) const { return false; };
+        virtual void PreCallRecordSubmitDebugUtilsMessageEXT(VkInstance instance, VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VkDebugUtilsMessageTypeFlagsEXT messageTypes, const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData) {};
+        virtual void PostCallRecordSubmitDebugUtilsMessageEXT(VkInstance instance, VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VkDebugUtilsMessageTypeFlagsEXT messageTypes, const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData) {};
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+        virtual bool PreCallValidateGetAndroidHardwareBufferPropertiesANDROID(VkDevice device, const struct AHardwareBuffer* buffer, VkAndroidHardwareBufferPropertiesANDROID* pProperties) const { return false; };
+        virtual void PreCallRecordGetAndroidHardwareBufferPropertiesANDROID(VkDevice device, const struct AHardwareBuffer* buffer, VkAndroidHardwareBufferPropertiesANDROID* pProperties) {};
+        virtual void PostCallRecordGetAndroidHardwareBufferPropertiesANDROID(VkDevice device, const struct AHardwareBuffer* buffer, VkAndroidHardwareBufferPropertiesANDROID* pProperties, VkResult result) {};
+#endif
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+        virtual bool PreCallValidateGetMemoryAndroidHardwareBufferANDROID(VkDevice device, const VkMemoryGetAndroidHardwareBufferInfoANDROID* pInfo, struct AHardwareBuffer** pBuffer) const { return false; };
+        virtual void PreCallRecordGetMemoryAndroidHardwareBufferANDROID(VkDevice device, const VkMemoryGetAndroidHardwareBufferInfoANDROID* pInfo, struct AHardwareBuffer** pBuffer) {};
+        virtual void PostCallRecordGetMemoryAndroidHardwareBufferANDROID(VkDevice device, const VkMemoryGetAndroidHardwareBufferInfoANDROID* pInfo, struct AHardwareBuffer** pBuffer, VkResult result) {};
+#endif
+        virtual bool PreCallValidateCmdSetSampleLocationsEXT(VkCommandBuffer commandBuffer, const VkSampleLocationsInfoEXT* pSampleLocationsInfo) const { return false; };
+        virtual void PreCallRecordCmdSetSampleLocationsEXT(VkCommandBuffer commandBuffer, const VkSampleLocationsInfoEXT* pSampleLocationsInfo) {};
+        virtual void PostCallRecordCmdSetSampleLocationsEXT(VkCommandBuffer commandBuffer, const VkSampleLocationsInfoEXT* pSampleLocationsInfo) {};
+        virtual bool PreCallValidateGetPhysicalDeviceMultisamplePropertiesEXT(VkPhysicalDevice physicalDevice, VkSampleCountFlagBits samples, VkMultisamplePropertiesEXT* pMultisampleProperties) const { return false; };
+        virtual void PreCallRecordGetPhysicalDeviceMultisamplePropertiesEXT(VkPhysicalDevice physicalDevice, VkSampleCountFlagBits samples, VkMultisamplePropertiesEXT* pMultisampleProperties) {};
+        virtual void PostCallRecordGetPhysicalDeviceMultisamplePropertiesEXT(VkPhysicalDevice physicalDevice, VkSampleCountFlagBits samples, VkMultisamplePropertiesEXT* pMultisampleProperties) {};
+        virtual bool PreCallValidateGetImageDrmFormatModifierPropertiesEXT(VkDevice device, VkImage image, VkImageDrmFormatModifierPropertiesEXT* pProperties) const { return false; };
+        virtual void PreCallRecordGetImageDrmFormatModifierPropertiesEXT(VkDevice device, VkImage image, VkImageDrmFormatModifierPropertiesEXT* pProperties) {};
+        virtual void PostCallRecordGetImageDrmFormatModifierPropertiesEXT(VkDevice device, VkImage image, VkImageDrmFormatModifierPropertiesEXT* pProperties, VkResult result) {};
+        virtual bool PreCallValidateCmdBindShadingRateImageNV(VkCommandBuffer commandBuffer, VkImageView imageView, VkImageLayout imageLayout) const { return false; };
+        virtual void PreCallRecordCmdBindShadingRateImageNV(VkCommandBuffer commandBuffer, VkImageView imageView, VkImageLayout imageLayout) {};
+        virtual void PostCallRecordCmdBindShadingRateImageNV(VkCommandBuffer commandBuffer, VkImageView imageView, VkImageLayout imageLayout) {};
+        virtual bool PreCallValidateCmdSetViewportShadingRatePaletteNV(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkShadingRatePaletteNV* pShadingRatePalettes) const { return false; };
+        virtual void PreCallRecordCmdSetViewportShadingRatePaletteNV(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkShadingRatePaletteNV* pShadingRatePalettes) {};
+        virtual void PostCallRecordCmdSetViewportShadingRatePaletteNV(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkShadingRatePaletteNV* pShadingRatePalettes) {};
+        virtual bool PreCallValidateCmdSetCoarseSampleOrderNV(VkCommandBuffer commandBuffer, VkCoarseSampleOrderTypeNV sampleOrderType, uint32_t customSampleOrderCount, const VkCoarseSampleOrderCustomNV* pCustomSampleOrders) const { return false; };
+        virtual void PreCallRecordCmdSetCoarseSampleOrderNV(VkCommandBuffer commandBuffer, VkCoarseSampleOrderTypeNV sampleOrderType, uint32_t customSampleOrderCount, const VkCoarseSampleOrderCustomNV* pCustomSampleOrders) {};
+        virtual void PostCallRecordCmdSetCoarseSampleOrderNV(VkCommandBuffer commandBuffer, VkCoarseSampleOrderTypeNV sampleOrderType, uint32_t customSampleOrderCount, const VkCoarseSampleOrderCustomNV* pCustomSampleOrders) {};
+        virtual bool PreCallValidateCreateAccelerationStructureNV(VkDevice device, const VkAccelerationStructureCreateInfoNV* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkAccelerationStructureNV* pAccelerationStructure) const { return false; };
+        virtual void PreCallRecordCreateAccelerationStructureNV(VkDevice device, const VkAccelerationStructureCreateInfoNV* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkAccelerationStructureNV* pAccelerationStructure) {};
+        virtual void PostCallRecordCreateAccelerationStructureNV(VkDevice device, const VkAccelerationStructureCreateInfoNV* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkAccelerationStructureNV* pAccelerationStructure, VkResult result) {};
+        virtual bool PreCallValidateDestroyAccelerationStructureNV(VkDevice device, VkAccelerationStructureNV accelerationStructure, const VkAllocationCallbacks* pAllocator) const { return false; };
+        virtual void PreCallRecordDestroyAccelerationStructureNV(VkDevice device, VkAccelerationStructureNV accelerationStructure, const VkAllocationCallbacks* pAllocator) {};
+        virtual void PostCallRecordDestroyAccelerationStructureNV(VkDevice device, VkAccelerationStructureNV accelerationStructure, const VkAllocationCallbacks* pAllocator) {};
+        virtual bool PreCallValidateGetAccelerationStructureMemoryRequirementsNV(VkDevice device, const VkAccelerationStructureMemoryRequirementsInfoNV* pInfo, VkMemoryRequirements2KHR* pMemoryRequirements) const { return false; };
+        virtual void PreCallRecordGetAccelerationStructureMemoryRequirementsNV(VkDevice device, const VkAccelerationStructureMemoryRequirementsInfoNV* pInfo, VkMemoryRequirements2KHR* pMemoryRequirements) {};
+        virtual void PostCallRecordGetAccelerationStructureMemoryRequirementsNV(VkDevice device, const VkAccelerationStructureMemoryRequirementsInfoNV* pInfo, VkMemoryRequirements2KHR* pMemoryRequirements) {};
+        virtual bool PreCallValidateBindAccelerationStructureMemoryNV(VkDevice device, uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoNV* pBindInfos) const { return false; };
+        virtual void PreCallRecordBindAccelerationStructureMemoryNV(VkDevice device, uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoNV* pBindInfos) {};
+        virtual void PostCallRecordBindAccelerationStructureMemoryNV(VkDevice device, uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoNV* pBindInfos, VkResult result) {};
+        virtual bool PreCallValidateCmdBuildAccelerationStructureNV(VkCommandBuffer commandBuffer, const VkAccelerationStructureInfoNV* pInfo, VkBuffer instanceData, VkDeviceSize instanceOffset, VkBool32 update, VkAccelerationStructureNV dst, VkAccelerationStructureNV src, VkBuffer scratch, VkDeviceSize scratchOffset) const { return false; };
+        virtual void PreCallRecordCmdBuildAccelerationStructureNV(VkCommandBuffer commandBuffer, const VkAccelerationStructureInfoNV* pInfo, VkBuffer instanceData, VkDeviceSize instanceOffset, VkBool32 update, VkAccelerationStructureNV dst, VkAccelerationStructureNV src, VkBuffer scratch, VkDeviceSize scratchOffset) {};
+        virtual void PostCallRecordCmdBuildAccelerationStructureNV(VkCommandBuffer commandBuffer, const VkAccelerationStructureInfoNV* pInfo, VkBuffer instanceData, VkDeviceSize instanceOffset, VkBool32 update, VkAccelerationStructureNV dst, VkAccelerationStructureNV src, VkBuffer scratch, VkDeviceSize scratchOffset) {};
+        virtual bool PreCallValidateCmdCopyAccelerationStructureNV(VkCommandBuffer commandBuffer, VkAccelerationStructureNV dst, VkAccelerationStructureNV src, VkCopyAccelerationStructureModeNV mode) const { return false; };
+        virtual void PreCallRecordCmdCopyAccelerationStructureNV(VkCommandBuffer commandBuffer, VkAccelerationStructureNV dst, VkAccelerationStructureNV src, VkCopyAccelerationStructureModeNV mode) {};
+        virtual void PostCallRecordCmdCopyAccelerationStructureNV(VkCommandBuffer commandBuffer, VkAccelerationStructureNV dst, VkAccelerationStructureNV src, VkCopyAccelerationStructureModeNV mode) {};
+        virtual bool PreCallValidateCmdTraceRaysNV(VkCommandBuffer commandBuffer, VkBuffer raygenShaderBindingTableBuffer, VkDeviceSize raygenShaderBindingOffset, VkBuffer missShaderBindingTableBuffer, VkDeviceSize missShaderBindingOffset, VkDeviceSize missShaderBindingStride, VkBuffer hitShaderBindingTableBuffer, VkDeviceSize hitShaderBindingOffset, VkDeviceSize hitShaderBindingStride, VkBuffer callableShaderBindingTableBuffer, VkDeviceSize callableShaderBindingOffset, VkDeviceSize callableShaderBindingStride, uint32_t width, uint32_t height, uint32_t depth) const { return false; };
+        virtual void PreCallRecordCmdTraceRaysNV(VkCommandBuffer commandBuffer, VkBuffer raygenShaderBindingTableBuffer, VkDeviceSize raygenShaderBindingOffset, VkBuffer missShaderBindingTableBuffer, VkDeviceSize missShaderBindingOffset, VkDeviceSize missShaderBindingStride, VkBuffer hitShaderBindingTableBuffer, VkDeviceSize hitShaderBindingOffset, VkDeviceSize hitShaderBindingStride, VkBuffer callableShaderBindingTableBuffer, VkDeviceSize callableShaderBindingOffset, VkDeviceSize callableShaderBindingStride, uint32_t width, uint32_t height, uint32_t depth) {};
+        virtual void PostCallRecordCmdTraceRaysNV(VkCommandBuffer commandBuffer, VkBuffer raygenShaderBindingTableBuffer, VkDeviceSize raygenShaderBindingOffset, VkBuffer missShaderBindingTableBuffer, VkDeviceSize missShaderBindingOffset, VkDeviceSize missShaderBindingStride, VkBuffer hitShaderBindingTableBuffer, VkDeviceSize hitShaderBindingOffset, VkDeviceSize hitShaderBindingStride, VkBuffer callableShaderBindingTableBuffer, VkDeviceSize callableShaderBindingOffset, VkDeviceSize callableShaderBindingStride, uint32_t width, uint32_t height, uint32_t depth) {};
+        virtual bool PreCallValidateCreateRayTracingPipelinesNV(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkRayTracingPipelineCreateInfoNV* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines) const { return false; };
+        virtual void PreCallRecordCreateRayTracingPipelinesNV(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkRayTracingPipelineCreateInfoNV* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines) {};
+        virtual void PostCallRecordCreateRayTracingPipelinesNV(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkRayTracingPipelineCreateInfoNV* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines, VkResult result) {};
+        virtual bool PreCallValidateGetRayTracingShaderGroupHandlesNV(VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData) const { return false; };
+        virtual void PreCallRecordGetRayTracingShaderGroupHandlesNV(VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData) {};
+        virtual void PostCallRecordGetRayTracingShaderGroupHandlesNV(VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData, VkResult result) {};
+        virtual bool PreCallValidateGetAccelerationStructureHandleNV(VkDevice device, VkAccelerationStructureNV accelerationStructure, size_t dataSize, void* pData) const { return false; };
+        virtual void PreCallRecordGetAccelerationStructureHandleNV(VkDevice device, VkAccelerationStructureNV accelerationStructure, size_t dataSize, void* pData) {};
+        virtual void PostCallRecordGetAccelerationStructureHandleNV(VkDevice device, VkAccelerationStructureNV accelerationStructure, size_t dataSize, void* pData, VkResult result) {};
+        virtual bool PreCallValidateCmdWriteAccelerationStructuresPropertiesNV(VkCommandBuffer commandBuffer, uint32_t accelerationStructureCount, const VkAccelerationStructureNV* pAccelerationStructures, VkQueryType queryType, VkQueryPool queryPool, uint32_t firstQuery) const { return false; };
+        virtual void PreCallRecordCmdWriteAccelerationStructuresPropertiesNV(VkCommandBuffer commandBuffer, uint32_t accelerationStructureCount, const VkAccelerationStructureNV* pAccelerationStructures, VkQueryType queryType, VkQueryPool queryPool, uint32_t firstQuery) {};
+        virtual void PostCallRecordCmdWriteAccelerationStructuresPropertiesNV(VkCommandBuffer commandBuffer, uint32_t accelerationStructureCount, const VkAccelerationStructureNV* pAccelerationStructures, VkQueryType queryType, VkQueryPool queryPool, uint32_t firstQuery) {};
+        virtual bool PreCallValidateCompileDeferredNV(VkDevice device, VkPipeline pipeline, uint32_t shader) const { return false; };
+        virtual void PreCallRecordCompileDeferredNV(VkDevice device, VkPipeline pipeline, uint32_t shader) {};
+        virtual void PostCallRecordCompileDeferredNV(VkDevice device, VkPipeline pipeline, uint32_t shader, VkResult result) {};
+        virtual bool PreCallValidateGetMemoryHostPointerPropertiesEXT(VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, const void* pHostPointer, VkMemoryHostPointerPropertiesEXT* pMemoryHostPointerProperties) const { return false; };
+        virtual void PreCallRecordGetMemoryHostPointerPropertiesEXT(VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, const void* pHostPointer, VkMemoryHostPointerPropertiesEXT* pMemoryHostPointerProperties) {};
+        virtual void PostCallRecordGetMemoryHostPointerPropertiesEXT(VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, const void* pHostPointer, VkMemoryHostPointerPropertiesEXT* pMemoryHostPointerProperties, VkResult result) {};
+        virtual bool PreCallValidateCmdWriteBufferMarkerAMD(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage, VkBuffer dstBuffer, VkDeviceSize dstOffset, uint32_t marker) const { return false; };
+        virtual void PreCallRecordCmdWriteBufferMarkerAMD(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage, VkBuffer dstBuffer, VkDeviceSize dstOffset, uint32_t marker) {};
+        virtual void PostCallRecordCmdWriteBufferMarkerAMD(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage, VkBuffer dstBuffer, VkDeviceSize dstOffset, uint32_t marker) {};
+        virtual bool PreCallValidateGetPhysicalDeviceCalibrateableTimeDomainsEXT(VkPhysicalDevice physicalDevice, uint32_t* pTimeDomainCount, VkTimeDomainEXT* pTimeDomains) const { return false; };
+        virtual void PreCallRecordGetPhysicalDeviceCalibrateableTimeDomainsEXT(VkPhysicalDevice physicalDevice, uint32_t* pTimeDomainCount, VkTimeDomainEXT* pTimeDomains) {};
+        virtual void PostCallRecordGetPhysicalDeviceCalibrateableTimeDomainsEXT(VkPhysicalDevice physicalDevice, uint32_t* pTimeDomainCount, VkTimeDomainEXT* pTimeDomains, VkResult result) {};
+        virtual bool PreCallValidateGetCalibratedTimestampsEXT(VkDevice device, uint32_t timestampCount, const VkCalibratedTimestampInfoEXT* pTimestampInfos, uint64_t* pTimestamps, uint64_t* pMaxDeviation) const { return false; };
+        virtual void PreCallRecordGetCalibratedTimestampsEXT(VkDevice device, uint32_t timestampCount, const VkCalibratedTimestampInfoEXT* pTimestampInfos, uint64_t* pTimestamps, uint64_t* pMaxDeviation) {};
+        virtual void PostCallRecordGetCalibratedTimestampsEXT(VkDevice device, uint32_t timestampCount, const VkCalibratedTimestampInfoEXT* pTimestampInfos, uint64_t* pTimestamps, uint64_t* pMaxDeviation, VkResult result) {};
+        virtual bool PreCallValidateCmdDrawMeshTasksNV(VkCommandBuffer commandBuffer, uint32_t taskCount, uint32_t firstTask) const { return false; };
+        virtual void PreCallRecordCmdDrawMeshTasksNV(VkCommandBuffer commandBuffer, uint32_t taskCount, uint32_t firstTask) {};
+        virtual void PostCallRecordCmdDrawMeshTasksNV(VkCommandBuffer commandBuffer, uint32_t taskCount, uint32_t firstTask) {};
+        virtual bool PreCallValidateCmdDrawMeshTasksIndirectNV(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride) const { return false; };
+        virtual void PreCallRecordCmdDrawMeshTasksIndirectNV(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride) {};
+        virtual void PostCallRecordCmdDrawMeshTasksIndirectNV(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride) {};
+        virtual bool PreCallValidateCmdDrawMeshTasksIndirectCountNV(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride) const { return false; };
+        virtual void PreCallRecordCmdDrawMeshTasksIndirectCountNV(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride) {};
+        virtual void PostCallRecordCmdDrawMeshTasksIndirectCountNV(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride) {};
+        virtual bool PreCallValidateCmdSetExclusiveScissorNV(VkCommandBuffer commandBuffer, uint32_t firstExclusiveScissor, uint32_t exclusiveScissorCount, const VkRect2D* pExclusiveScissors) const { return false; };
+        virtual void PreCallRecordCmdSetExclusiveScissorNV(VkCommandBuffer commandBuffer, uint32_t firstExclusiveScissor, uint32_t exclusiveScissorCount, const VkRect2D* pExclusiveScissors) {};
+        virtual void PostCallRecordCmdSetExclusiveScissorNV(VkCommandBuffer commandBuffer, uint32_t firstExclusiveScissor, uint32_t exclusiveScissorCount, const VkRect2D* pExclusiveScissors) {};
+        virtual bool PreCallValidateCmdSetCheckpointNV(VkCommandBuffer commandBuffer, const void* pCheckpointMarker) const { return false; };
+        virtual void PreCallRecordCmdSetCheckpointNV(VkCommandBuffer commandBuffer, const void* pCheckpointMarker) {};
+        virtual void PostCallRecordCmdSetCheckpointNV(VkCommandBuffer commandBuffer, const void* pCheckpointMarker) {};
+        virtual bool PreCallValidateGetQueueCheckpointDataNV(VkQueue queue, uint32_t* pCheckpointDataCount, VkCheckpointDataNV* pCheckpointData) const { return false; };
+        virtual void PreCallRecordGetQueueCheckpointDataNV(VkQueue queue, uint32_t* pCheckpointDataCount, VkCheckpointDataNV* pCheckpointData) {};
+        virtual void PostCallRecordGetQueueCheckpointDataNV(VkQueue queue, uint32_t* pCheckpointDataCount, VkCheckpointDataNV* pCheckpointData) {};
+        virtual bool PreCallValidateInitializePerformanceApiINTEL(VkDevice device, const VkInitializePerformanceApiInfoINTEL* pInitializeInfo) const { return false; };
+        virtual void PreCallRecordInitializePerformanceApiINTEL(VkDevice device, const VkInitializePerformanceApiInfoINTEL* pInitializeInfo) {};
+        virtual void PostCallRecordInitializePerformanceApiINTEL(VkDevice device, const VkInitializePerformanceApiInfoINTEL* pInitializeInfo, VkResult result) {};
+        virtual bool PreCallValidateUninitializePerformanceApiINTEL(VkDevice device) const { return false; };
+        virtual void PreCallRecordUninitializePerformanceApiINTEL(VkDevice device) {};
+        virtual void PostCallRecordUninitializePerformanceApiINTEL(VkDevice device) {};
+        virtual bool PreCallValidateCmdSetPerformanceMarkerINTEL(VkCommandBuffer commandBuffer, const VkPerformanceMarkerInfoINTEL* pMarkerInfo) const { return false; };
+        virtual void PreCallRecordCmdSetPerformanceMarkerINTEL(VkCommandBuffer commandBuffer, const VkPerformanceMarkerInfoINTEL* pMarkerInfo) {};
+        virtual void PostCallRecordCmdSetPerformanceMarkerINTEL(VkCommandBuffer commandBuffer, const VkPerformanceMarkerInfoINTEL* pMarkerInfo, VkResult result) {};
+        virtual bool PreCallValidateCmdSetPerformanceStreamMarkerINTEL(VkCommandBuffer commandBuffer, const VkPerformanceStreamMarkerInfoINTEL* pMarkerInfo) const { return false; };
+        virtual void PreCallRecordCmdSetPerformanceStreamMarkerINTEL(VkCommandBuffer commandBuffer, const VkPerformanceStreamMarkerInfoINTEL* pMarkerInfo) {};
+        virtual void PostCallRecordCmdSetPerformanceStreamMarkerINTEL(VkCommandBuffer commandBuffer, const VkPerformanceStreamMarkerInfoINTEL* pMarkerInfo, VkResult result) {};
+        virtual bool PreCallValidateCmdSetPerformanceOverrideINTEL(VkCommandBuffer commandBuffer, const VkPerformanceOverrideInfoINTEL* pOverrideInfo) const { return false; };
+        virtual void PreCallRecordCmdSetPerformanceOverrideINTEL(VkCommandBuffer commandBuffer, const VkPerformanceOverrideInfoINTEL* pOverrideInfo) {};
+        virtual void PostCallRecordCmdSetPerformanceOverrideINTEL(VkCommandBuffer commandBuffer, const VkPerformanceOverrideInfoINTEL* pOverrideInfo, VkResult result) {};
+        virtual bool PreCallValidateAcquirePerformanceConfigurationINTEL(VkDevice device, const VkPerformanceConfigurationAcquireInfoINTEL* pAcquireInfo, VkPerformanceConfigurationINTEL* pConfiguration) const { return false; };
+        virtual void PreCallRecordAcquirePerformanceConfigurationINTEL(VkDevice device, const VkPerformanceConfigurationAcquireInfoINTEL* pAcquireInfo, VkPerformanceConfigurationINTEL* pConfiguration) {};
+        virtual void PostCallRecordAcquirePerformanceConfigurationINTEL(VkDevice device, const VkPerformanceConfigurationAcquireInfoINTEL* pAcquireInfo, VkPerformanceConfigurationINTEL* pConfiguration, VkResult result) {};
+        virtual bool PreCallValidateReleasePerformanceConfigurationINTEL(VkDevice device, VkPerformanceConfigurationINTEL configuration) const { return false; };
+        virtual void PreCallRecordReleasePerformanceConfigurationINTEL(VkDevice device, VkPerformanceConfigurationINTEL configuration) {};
+        virtual void PostCallRecordReleasePerformanceConfigurationINTEL(VkDevice device, VkPerformanceConfigurationINTEL configuration, VkResult result) {};
+        virtual bool PreCallValidateQueueSetPerformanceConfigurationINTEL(VkQueue queue, VkPerformanceConfigurationINTEL configuration) const { return false; };
+        virtual void PreCallRecordQueueSetPerformanceConfigurationINTEL(VkQueue queue, VkPerformanceConfigurationINTEL configuration) {};
+        virtual void PostCallRecordQueueSetPerformanceConfigurationINTEL(VkQueue queue, VkPerformanceConfigurationINTEL configuration, VkResult result) {};
+        virtual bool PreCallValidateGetPerformanceParameterINTEL(VkDevice device, VkPerformanceParameterTypeINTEL parameter, VkPerformanceValueINTEL* pValue) const { return false; };
+        virtual void PreCallRecordGetPerformanceParameterINTEL(VkDevice device, VkPerformanceParameterTypeINTEL parameter, VkPerformanceValueINTEL* pValue) {};
+        virtual void PostCallRecordGetPerformanceParameterINTEL(VkDevice device, VkPerformanceParameterTypeINTEL parameter, VkPerformanceValueINTEL* pValue, VkResult result) {};
+        virtual bool PreCallValidateSetLocalDimmingAMD(VkDevice device, VkSwapchainKHR swapChain, VkBool32 localDimmingEnable) const { return false; };
+        virtual void PreCallRecordSetLocalDimmingAMD(VkDevice device, VkSwapchainKHR swapChain, VkBool32 localDimmingEnable) {};
+        virtual void PostCallRecordSetLocalDimmingAMD(VkDevice device, VkSwapchainKHR swapChain, VkBool32 localDimmingEnable) {};
+#ifdef VK_USE_PLATFORM_FUCHSIA
+        virtual bool PreCallValidateCreateImagePipeSurfaceFUCHSIA(VkInstance instance, const VkImagePipeSurfaceCreateInfoFUCHSIA* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface) const { return false; };
+        virtual void PreCallRecordCreateImagePipeSurfaceFUCHSIA(VkInstance instance, const VkImagePipeSurfaceCreateInfoFUCHSIA* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface) {};
+        virtual void PostCallRecordCreateImagePipeSurfaceFUCHSIA(VkInstance instance, const VkImagePipeSurfaceCreateInfoFUCHSIA* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface, VkResult result) {};
+#endif
+#ifdef VK_USE_PLATFORM_METAL_EXT
+        virtual bool PreCallValidateCreateMetalSurfaceEXT(VkInstance instance, const VkMetalSurfaceCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface) const { return false; };
+        virtual void PreCallRecordCreateMetalSurfaceEXT(VkInstance instance, const VkMetalSurfaceCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface) {};
+        virtual void PostCallRecordCreateMetalSurfaceEXT(VkInstance instance, const VkMetalSurfaceCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface, VkResult result) {};
+#endif
+        virtual bool PreCallValidateGetBufferDeviceAddressEXT(VkDevice device, const VkBufferDeviceAddressInfoKHR* pInfo) const { return false; };
+        virtual void PreCallRecordGetBufferDeviceAddressEXT(VkDevice device, const VkBufferDeviceAddressInfoKHR* pInfo) {};
+        virtual void PostCallRecordGetBufferDeviceAddressEXT(VkDevice device, const VkBufferDeviceAddressInfoKHR* pInfo, VkDeviceAddress result) {};
+        virtual bool PreCallValidateGetPhysicalDeviceToolPropertiesEXT(VkPhysicalDevice physicalDevice, uint32_t* pToolCount, VkPhysicalDeviceToolPropertiesEXT* pToolProperties) const { return false; };
+        virtual void PreCallRecordGetPhysicalDeviceToolPropertiesEXT(VkPhysicalDevice physicalDevice, uint32_t* pToolCount, VkPhysicalDeviceToolPropertiesEXT* pToolProperties) {};
+        virtual void PostCallRecordGetPhysicalDeviceToolPropertiesEXT(VkPhysicalDevice physicalDevice, uint32_t* pToolCount, VkPhysicalDeviceToolPropertiesEXT* pToolProperties, VkResult result) {};
+        virtual bool PreCallValidateGetPhysicalDeviceCooperativeMatrixPropertiesNV(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkCooperativeMatrixPropertiesNV* pProperties) const { return false; };
+        virtual void PreCallRecordGetPhysicalDeviceCooperativeMatrixPropertiesNV(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkCooperativeMatrixPropertiesNV* pProperties) {};
+        virtual void PostCallRecordGetPhysicalDeviceCooperativeMatrixPropertiesNV(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkCooperativeMatrixPropertiesNV* pProperties, VkResult result) {};
+        virtual bool PreCallValidateGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV(VkPhysicalDevice physicalDevice, uint32_t* pCombinationCount, VkFramebufferMixedSamplesCombinationNV* pCombinations) const { return false; };
+        virtual void PreCallRecordGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV(VkPhysicalDevice physicalDevice, uint32_t* pCombinationCount, VkFramebufferMixedSamplesCombinationNV* pCombinations) {};
+        virtual void PostCallRecordGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV(VkPhysicalDevice physicalDevice, uint32_t* pCombinationCount, VkFramebufferMixedSamplesCombinationNV* pCombinations, VkResult result) {};
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+        virtual bool PreCallValidateGetPhysicalDeviceSurfacePresentModes2EXT(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pPresentModeCount, VkPresentModeKHR* pPresentModes) const { return false; };
+        virtual void PreCallRecordGetPhysicalDeviceSurfacePresentModes2EXT(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pPresentModeCount, VkPresentModeKHR* pPresentModes) {};
+        virtual void PostCallRecordGetPhysicalDeviceSurfacePresentModes2EXT(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pPresentModeCount, VkPresentModeKHR* pPresentModes, VkResult result) {};
+#endif
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+        virtual bool PreCallValidateAcquireFullScreenExclusiveModeEXT(VkDevice device, VkSwapchainKHR swapchain) const { return false; };
+        virtual void PreCallRecordAcquireFullScreenExclusiveModeEXT(VkDevice device, VkSwapchainKHR swapchain) {};
+        virtual void PostCallRecordAcquireFullScreenExclusiveModeEXT(VkDevice device, VkSwapchainKHR swapchain, VkResult result) {};
+#endif
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+        virtual bool PreCallValidateReleaseFullScreenExclusiveModeEXT(VkDevice device, VkSwapchainKHR swapchain) const { return false; };
+        virtual void PreCallRecordReleaseFullScreenExclusiveModeEXT(VkDevice device, VkSwapchainKHR swapchain) {};
+        virtual void PostCallRecordReleaseFullScreenExclusiveModeEXT(VkDevice device, VkSwapchainKHR swapchain, VkResult result) {};
+#endif
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+        virtual bool PreCallValidateGetDeviceGroupSurfacePresentModes2EXT(VkDevice device, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, VkDeviceGroupPresentModeFlagsKHR* pModes) const { return false; };
+        virtual void PreCallRecordGetDeviceGroupSurfacePresentModes2EXT(VkDevice device, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, VkDeviceGroupPresentModeFlagsKHR* pModes) {};
+        virtual void PostCallRecordGetDeviceGroupSurfacePresentModes2EXT(VkDevice device, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, VkDeviceGroupPresentModeFlagsKHR* pModes, VkResult result) {};
+#endif
+        virtual bool PreCallValidateCreateHeadlessSurfaceEXT(VkInstance instance, const VkHeadlessSurfaceCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface) const { return false; };
+        virtual void PreCallRecordCreateHeadlessSurfaceEXT(VkInstance instance, const VkHeadlessSurfaceCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface) {};
+        virtual void PostCallRecordCreateHeadlessSurfaceEXT(VkInstance instance, const VkHeadlessSurfaceCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface, VkResult result) {};
+        virtual bool PreCallValidateCmdSetLineStippleEXT(VkCommandBuffer commandBuffer, uint32_t lineStippleFactor, uint16_t lineStipplePattern) const { return false; };
+        virtual void PreCallRecordCmdSetLineStippleEXT(VkCommandBuffer commandBuffer, uint32_t lineStippleFactor, uint16_t lineStipplePattern) {};
+        virtual void PostCallRecordCmdSetLineStippleEXT(VkCommandBuffer commandBuffer, uint32_t lineStippleFactor, uint16_t lineStipplePattern) {};
+        virtual bool PreCallValidateResetQueryPoolEXT(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount) const { return false; };
+        virtual void PreCallRecordResetQueryPoolEXT(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount) {};
+        virtual void PostCallRecordResetQueryPoolEXT(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount) {};
+
+        virtual VkResult CoreLayerCreateValidationCacheEXT(VkDevice device, const VkValidationCacheCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkValidationCacheEXT* pValidationCache) { return VK_SUCCESS; };
+        virtual void CoreLayerDestroyValidationCacheEXT(VkDevice device, VkValidationCacheEXT validationCache, const VkAllocationCallbacks* pAllocator) {};
+        virtual VkResult CoreLayerMergeValidationCachesEXT(VkDevice device, VkValidationCacheEXT dstCache, uint32_t srcCacheCount, const VkValidationCacheEXT* pSrcCaches)  { return VK_SUCCESS; };
+        virtual VkResult CoreLayerGetValidationCacheDataEXT(VkDevice device, VkValidationCacheEXT validationCache, size_t* pDataSize, void* pData)  { return VK_SUCCESS; };
+
+        // Allow additional state parameter for CreateGraphicsPipelines
+        virtual bool PreCallValidateCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkGraphicsPipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines, void* cgpl_state) const {
+            return PreCallValidateCreateGraphicsPipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines);
+        };
+        virtual void PreCallRecordCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkGraphicsPipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines, void* cgpl_state) {
+            PreCallRecordCreateGraphicsPipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines);
+        };
+        virtual void PostCallRecordCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkGraphicsPipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines, VkResult result, void* cgpl_state) {
+            PostCallRecordCreateGraphicsPipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines, result);
+        };
+
+        // Allow additional state parameter for CreateComputePipelines
+        virtual bool PreCallValidateCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkComputePipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines, void* pipe_state) const {
+            return PreCallValidateCreateComputePipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines);
+        };
+        virtual void PreCallRecordCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkComputePipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines, void* ccpl_state) {
+            PreCallRecordCreateComputePipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines);
+        };
+        virtual void PostCallRecordCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkComputePipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines, VkResult result, void* pipe_state) {
+            PostCallRecordCreateComputePipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines, result);
+        };
+
+        // Allow additional state parameter for CreateRayTracingPipelinesNV
+        virtual bool PreCallValidateCreateRayTracingPipelinesNV(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkRayTracingPipelineCreateInfoNV* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines, void* pipe_state) const {
+            return PreCallValidateCreateRayTracingPipelinesNV(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines);
+        };
+        virtual void PreCallRecordCreateRayTracingPipelinesNV(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkRayTracingPipelineCreateInfoNV* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines, void* ccpl_state) {
+            PreCallRecordCreateRayTracingPipelinesNV(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines);
+        };
+        virtual void PostCallRecordCreateRayTracingPipelinesNV(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkRayTracingPipelineCreateInfoNV* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines, VkResult result, void* pipe_state) {
+            PostCallRecordCreateRayTracingPipelinesNV(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines, result);
+        };
+
+        // Allow modification of a down-chain parameter for CreatePipelineLayout
+        virtual void PreCallRecordCreatePipelineLayout(VkDevice device, const VkPipelineLayoutCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPipelineLayout* pPipelineLayout, void *cpl_state) {
+            PreCallRecordCreatePipelineLayout(device, pCreateInfo, pAllocator, pPipelineLayout);
+        };
+
+        // Enable the CreateShaderModule API to take an extra argument for state preservation and paramter modification
+        virtual bool PreCallValidateCreateShaderModule(VkDevice device, const VkShaderModuleCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkShaderModule* pShaderModule, void* csm_state) const {
+            return PreCallValidateCreateShaderModule(device, pCreateInfo, pAllocator, pShaderModule);
+        };
+        virtual void PreCallRecordCreateShaderModule(VkDevice device, const VkShaderModuleCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkShaderModule* pShaderModule, void* csm_state) {
+            PreCallRecordCreateShaderModule(device, pCreateInfo, pAllocator, pShaderModule);
+        };
+        virtual void PostCallRecordCreateShaderModule(VkDevice device, const VkShaderModuleCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkShaderModule* pShaderModule, VkResult result, void* csm_state) {
+            PostCallRecordCreateShaderModule(device, pCreateInfo, pAllocator, pShaderModule, result);
+        };
+
+        // Allow AllocateDescriptorSets to use some local stack storage for performance purposes
+        virtual bool PreCallValidateAllocateDescriptorSets(VkDevice device, const VkDescriptorSetAllocateInfo* pAllocateInfo, VkDescriptorSet* pDescriptorSets, void* ads_state) const {
+            return PreCallValidateAllocateDescriptorSets(device, pAllocateInfo, pDescriptorSets);
+        };
+        virtual void PostCallRecordAllocateDescriptorSets(VkDevice device, const VkDescriptorSetAllocateInfo* pAllocateInfo, VkDescriptorSet* pDescriptorSets, VkResult result, void* ads_state)  {
+            PostCallRecordAllocateDescriptorSets(device, pAllocateInfo, pDescriptorSets, result);
+        };
+
+        // Allow modification of a down-chain parameter for CreateBuffer
+        virtual void PreCallRecordCreateBuffer(VkDevice device, const VkBufferCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkBuffer* pBuffer, void *cb_state) {
+            PreCallRecordCreateBuffer(device, pCreateInfo, pAllocator, pBuffer);
+        };
+
+        // Modify a parameter to CreateDevice
+        virtual void PreCallRecordCreateDevice(VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDevice* pDevice, safe_VkDeviceCreateInfo *modified_create_info) {
+            PreCallRecordCreateDevice(physicalDevice, pCreateInfo, pAllocator, pDevice);
+        };
+};
+
+extern small_unordered_map<void*, ValidationObject*, 2> layer_data_map;
diff --git a/src/third_party/vulkan-validation-layers/src/layers/generated/command_counter_helper.cpp b/src/third_party/vulkan-validation-layers/src/layers/generated/command_counter_helper.cpp
new file mode 100644
index 0000000..1864e28
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/layers/generated/command_counter_helper.cpp
@@ -0,0 +1,308 @@
+// *** THIS FILE IS GENERATED - DO NOT EDIT ***
+// See command_counter_generator.py for modifications
+
+/*
+ * Copyright (c) 2015-2019 The Khronos Group Inc.
+ * Copyright (c) 2015-2019 Valve Corporation
+ * Copyright (c) 2015-2019 LunarG, Inc.
+ * Copyright (c) 2019      Intel Corporation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Mark Lobodzinski <mark@lunarg.com>
+ * Author: Lionel Landwerlin <lionel.g.landwerlin@intel.com>
+ */
+
+#include "chassis.h"
+#include "state_tracker.h"
+#include "command_counter.h"
+
+void CommandCounter::PreCallRecordCmdBindPipeline(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline) {
+    coreChecks->IncrementCommandCount(commandBuffer);
+}
+void CommandCounter::PreCallRecordCmdSetViewport(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewport* pViewports) {
+    coreChecks->IncrementCommandCount(commandBuffer);
+}
+void CommandCounter::PreCallRecordCmdSetScissor(VkCommandBuffer commandBuffer, uint32_t firstScissor, uint32_t scissorCount, const VkRect2D* pScissors) {
+    coreChecks->IncrementCommandCount(commandBuffer);
+}
+void CommandCounter::PreCallRecordCmdSetLineWidth(VkCommandBuffer commandBuffer, float lineWidth) {
+    coreChecks->IncrementCommandCount(commandBuffer);
+}
+void CommandCounter::PreCallRecordCmdSetDepthBias(VkCommandBuffer commandBuffer, float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor) {
+    coreChecks->IncrementCommandCount(commandBuffer);
+}
+void CommandCounter::PreCallRecordCmdSetBlendConstants(VkCommandBuffer commandBuffer, const float blendConstants[4]) {
+    coreChecks->IncrementCommandCount(commandBuffer);
+}
+void CommandCounter::PreCallRecordCmdSetDepthBounds(VkCommandBuffer commandBuffer, float minDepthBounds, float maxDepthBounds) {
+    coreChecks->IncrementCommandCount(commandBuffer);
+}
+void CommandCounter::PreCallRecordCmdSetStencilCompareMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t compareMask) {
+    coreChecks->IncrementCommandCount(commandBuffer);
+}
+void CommandCounter::PreCallRecordCmdSetStencilWriteMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t writeMask) {
+    coreChecks->IncrementCommandCount(commandBuffer);
+}
+void CommandCounter::PreCallRecordCmdSetStencilReference(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t reference) {
+    coreChecks->IncrementCommandCount(commandBuffer);
+}
+void CommandCounter::PreCallRecordCmdBindDescriptorSets(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t firstSet, uint32_t descriptorSetCount, const VkDescriptorSet* pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t* pDynamicOffsets) {
+    coreChecks->IncrementCommandCount(commandBuffer);
+}
+void CommandCounter::PreCallRecordCmdBindIndexBuffer(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkIndexType indexType) {
+    coreChecks->IncrementCommandCount(commandBuffer);
+}
+void CommandCounter::PreCallRecordCmdBindVertexBuffers(VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets) {
+    coreChecks->IncrementCommandCount(commandBuffer);
+}
+void CommandCounter::PreCallRecordCmdDraw(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance) {
+    coreChecks->IncrementCommandCount(commandBuffer);
+}
+void CommandCounter::PreCallRecordCmdDrawIndexed(VkCommandBuffer commandBuffer, uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance) {
+    coreChecks->IncrementCommandCount(commandBuffer);
+}
+void CommandCounter::PreCallRecordCmdDrawIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride) {
+    coreChecks->IncrementCommandCount(commandBuffer);
+}
+void CommandCounter::PreCallRecordCmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride) {
+    coreChecks->IncrementCommandCount(commandBuffer);
+}
+void CommandCounter::PreCallRecordCmdDispatch(VkCommandBuffer commandBuffer, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ) {
+    coreChecks->IncrementCommandCount(commandBuffer);
+}
+void CommandCounter::PreCallRecordCmdDispatchIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset) {
+    coreChecks->IncrementCommandCount(commandBuffer);
+}
+void CommandCounter::PreCallRecordCmdCopyBuffer(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer, uint32_t regionCount, const VkBufferCopy* pRegions) {
+    coreChecks->IncrementCommandCount(commandBuffer);
+}
+void CommandCounter::PreCallRecordCmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageCopy* pRegions) {
+    coreChecks->IncrementCommandCount(commandBuffer);
+}
+void CommandCounter::PreCallRecordCmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageBlit* pRegions, VkFilter filter) {
+    coreChecks->IncrementCommandCount(commandBuffer);
+}
+void CommandCounter::PreCallRecordCmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkBufferImageCopy* pRegions) {
+    coreChecks->IncrementCommandCount(commandBuffer);
+}
+void CommandCounter::PreCallRecordCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkBuffer dstBuffer, uint32_t regionCount, const VkBufferImageCopy* pRegions) {
+    coreChecks->IncrementCommandCount(commandBuffer);
+}
+void CommandCounter::PreCallRecordCmdUpdateBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize dataSize, const void* pData) {
+    coreChecks->IncrementCommandCount(commandBuffer);
+}
+void CommandCounter::PreCallRecordCmdFillBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize size, uint32_t data) {
+    coreChecks->IncrementCommandCount(commandBuffer);
+}
+void CommandCounter::PreCallRecordCmdClearColorImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, const VkClearColorValue* pColor, uint32_t rangeCount, const VkImageSubresourceRange* pRanges) {
+    coreChecks->IncrementCommandCount(commandBuffer);
+}
+void CommandCounter::PreCallRecordCmdClearDepthStencilImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, const VkClearDepthStencilValue* pDepthStencil, uint32_t rangeCount, const VkImageSubresourceRange* pRanges) {
+    coreChecks->IncrementCommandCount(commandBuffer);
+}
+void CommandCounter::PreCallRecordCmdClearAttachments(VkCommandBuffer commandBuffer, uint32_t attachmentCount, const VkClearAttachment* pAttachments, uint32_t rectCount, const VkClearRect* pRects) {
+    coreChecks->IncrementCommandCount(commandBuffer);
+}
+void CommandCounter::PreCallRecordCmdResolveImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageResolve* pRegions) {
+    coreChecks->IncrementCommandCount(commandBuffer);
+}
+void CommandCounter::PreCallRecordCmdSetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask) {
+    coreChecks->IncrementCommandCount(commandBuffer);
+}
+void CommandCounter::PreCallRecordCmdResetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask) {
+    coreChecks->IncrementCommandCount(commandBuffer);
+}
+void CommandCounter::PreCallRecordCmdWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent* pEvents, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers) {
+    coreChecks->IncrementCommandCount(commandBuffer);
+}
+void CommandCounter::PreCallRecordCmdPipelineBarrier(VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers) {
+    coreChecks->IncrementCommandCount(commandBuffer);
+}
+void CommandCounter::PreCallRecordCmdBeginQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, VkQueryControlFlags flags) {
+    coreChecks->IncrementCommandCount(commandBuffer);
+}
+void CommandCounter::PreCallRecordCmdEndQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query) {
+    coreChecks->IncrementCommandCount(commandBuffer);
+}
+void CommandCounter::PreCallRecordCmdResetQueryPool(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount) {
+    coreChecks->IncrementCommandCount(commandBuffer);
+}
+void CommandCounter::PreCallRecordCmdWriteTimestamp(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage, VkQueryPool queryPool, uint32_t query) {
+    coreChecks->IncrementCommandCount(commandBuffer);
+}
+void CommandCounter::PreCallRecordCmdCopyQueryPoolResults(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize stride, VkQueryResultFlags flags) {
+    coreChecks->IncrementCommandCount(commandBuffer);
+}
+void CommandCounter::PreCallRecordCmdPushConstants(VkCommandBuffer commandBuffer, VkPipelineLayout layout, VkShaderStageFlags stageFlags, uint32_t offset, uint32_t size, const void* pValues) {
+    coreChecks->IncrementCommandCount(commandBuffer);
+}
+void CommandCounter::PreCallRecordCmdBeginRenderPass(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo* pRenderPassBegin, VkSubpassContents contents) {
+    coreChecks->IncrementCommandCount(commandBuffer);
+}
+void CommandCounter::PreCallRecordCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) {
+    coreChecks->IncrementCommandCount(commandBuffer);
+}
+void CommandCounter::PreCallRecordCmdEndRenderPass(VkCommandBuffer commandBuffer) {
+    coreChecks->IncrementCommandCount(commandBuffer);
+}
+void CommandCounter::PreCallRecordCmdExecuteCommands(VkCommandBuffer commandBuffer, uint32_t commandBufferCount, const VkCommandBuffer* pCommandBuffers) {
+    coreChecks->IncrementCommandCount(commandBuffer);
+}
+void CommandCounter::PreCallRecordCmdSetDeviceMask(VkCommandBuffer commandBuffer, uint32_t deviceMask) {
+    coreChecks->IncrementCommandCount(commandBuffer);
+}
+void CommandCounter::PreCallRecordCmdDispatchBase(VkCommandBuffer commandBuffer, uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ) {
+    coreChecks->IncrementCommandCount(commandBuffer);
+}
+void CommandCounter::PreCallRecordCmdSetDeviceMaskKHR(VkCommandBuffer commandBuffer, uint32_t deviceMask) {
+    coreChecks->IncrementCommandCount(commandBuffer);
+}
+void CommandCounter::PreCallRecordCmdDispatchBaseKHR(VkCommandBuffer commandBuffer, uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ) {
+    coreChecks->IncrementCommandCount(commandBuffer);
+}
+void CommandCounter::PreCallRecordCmdPushDescriptorSetKHR(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount, const VkWriteDescriptorSet* pDescriptorWrites) {
+    coreChecks->IncrementCommandCount(commandBuffer);
+}
+void CommandCounter::PreCallRecordCmdPushDescriptorSetWithTemplateKHR(VkCommandBuffer commandBuffer, VkDescriptorUpdateTemplate descriptorUpdateTemplate, VkPipelineLayout layout, uint32_t set, const void* pData) {
+    coreChecks->IncrementCommandCount(commandBuffer);
+}
+void CommandCounter::PreCallRecordCmdBeginRenderPass2KHR(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo*      pRenderPassBegin, const VkSubpassBeginInfoKHR*      pSubpassBeginInfo) {
+    coreChecks->IncrementCommandCount(commandBuffer);
+}
+void CommandCounter::PreCallRecordCmdNextSubpass2KHR(VkCommandBuffer commandBuffer, const VkSubpassBeginInfoKHR*      pSubpassBeginInfo, const VkSubpassEndInfoKHR*        pSubpassEndInfo) {
+    coreChecks->IncrementCommandCount(commandBuffer);
+}
+void CommandCounter::PreCallRecordCmdEndRenderPass2KHR(VkCommandBuffer commandBuffer, const VkSubpassEndInfoKHR*        pSubpassEndInfo) {
+    coreChecks->IncrementCommandCount(commandBuffer);
+}
+void CommandCounter::PreCallRecordCmdDrawIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride) {
+    coreChecks->IncrementCommandCount(commandBuffer);
+}
+void CommandCounter::PreCallRecordCmdDrawIndexedIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride) {
+    coreChecks->IncrementCommandCount(commandBuffer);
+}
+void CommandCounter::PreCallRecordCmdDebugMarkerBeginEXT(VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT* pMarkerInfo) {
+    coreChecks->IncrementCommandCount(commandBuffer);
+}
+void CommandCounter::PreCallRecordCmdDebugMarkerEndEXT(VkCommandBuffer commandBuffer) {
+    coreChecks->IncrementCommandCount(commandBuffer);
+}
+void CommandCounter::PreCallRecordCmdDebugMarkerInsertEXT(VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT* pMarkerInfo) {
+    coreChecks->IncrementCommandCount(commandBuffer);
+}
+void CommandCounter::PreCallRecordCmdBindTransformFeedbackBuffersEXT(VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets, const VkDeviceSize* pSizes) {
+    coreChecks->IncrementCommandCount(commandBuffer);
+}
+void CommandCounter::PreCallRecordCmdBeginTransformFeedbackEXT(VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VkBuffer* pCounterBuffers, const VkDeviceSize* pCounterBufferOffsets) {
+    coreChecks->IncrementCommandCount(commandBuffer);
+}
+void CommandCounter::PreCallRecordCmdEndTransformFeedbackEXT(VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VkBuffer* pCounterBuffers, const VkDeviceSize* pCounterBufferOffsets) {
+    coreChecks->IncrementCommandCount(commandBuffer);
+}
+void CommandCounter::PreCallRecordCmdBeginQueryIndexedEXT(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, VkQueryControlFlags flags, uint32_t index) {
+    coreChecks->IncrementCommandCount(commandBuffer);
+}
+void CommandCounter::PreCallRecordCmdEndQueryIndexedEXT(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, uint32_t index) {
+    coreChecks->IncrementCommandCount(commandBuffer);
+}
+void CommandCounter::PreCallRecordCmdDrawIndirectByteCountEXT(VkCommandBuffer commandBuffer, uint32_t instanceCount, uint32_t firstInstance, VkBuffer counterBuffer, VkDeviceSize counterBufferOffset, uint32_t counterOffset, uint32_t vertexStride) {
+    coreChecks->IncrementCommandCount(commandBuffer);
+}
+void CommandCounter::PreCallRecordCmdDrawIndirectCountAMD(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride) {
+    coreChecks->IncrementCommandCount(commandBuffer);
+}
+void CommandCounter::PreCallRecordCmdDrawIndexedIndirectCountAMD(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride) {
+    coreChecks->IncrementCommandCount(commandBuffer);
+}
+void CommandCounter::PreCallRecordCmdBeginConditionalRenderingEXT(VkCommandBuffer commandBuffer, const VkConditionalRenderingBeginInfoEXT* pConditionalRenderingBegin) {
+    coreChecks->IncrementCommandCount(commandBuffer);
+}
+void CommandCounter::PreCallRecordCmdEndConditionalRenderingEXT(VkCommandBuffer commandBuffer) {
+    coreChecks->IncrementCommandCount(commandBuffer);
+}
+void CommandCounter::PreCallRecordCmdProcessCommandsNVX(VkCommandBuffer commandBuffer, const VkCmdProcessCommandsInfoNVX* pProcessCommandsInfo) {
+    coreChecks->IncrementCommandCount(commandBuffer);
+}
+void CommandCounter::PreCallRecordCmdReserveSpaceForCommandsNVX(VkCommandBuffer commandBuffer, const VkCmdReserveSpaceForCommandsInfoNVX* pReserveSpaceInfo) {
+    coreChecks->IncrementCommandCount(commandBuffer);
+}
+void CommandCounter::PreCallRecordCmdSetViewportWScalingNV(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewportWScalingNV* pViewportWScalings) {
+    coreChecks->IncrementCommandCount(commandBuffer);
+}
+void CommandCounter::PreCallRecordCmdSetDiscardRectangleEXT(VkCommandBuffer commandBuffer, uint32_t firstDiscardRectangle, uint32_t discardRectangleCount, const VkRect2D* pDiscardRectangles) {
+    coreChecks->IncrementCommandCount(commandBuffer);
+}
+void CommandCounter::PreCallRecordCmdBeginDebugUtilsLabelEXT(VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT* pLabelInfo) {
+    coreChecks->IncrementCommandCount(commandBuffer);
+}
+void CommandCounter::PreCallRecordCmdEndDebugUtilsLabelEXT(VkCommandBuffer commandBuffer) {
+    coreChecks->IncrementCommandCount(commandBuffer);
+}
+void CommandCounter::PreCallRecordCmdInsertDebugUtilsLabelEXT(VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT* pLabelInfo) {
+    coreChecks->IncrementCommandCount(commandBuffer);
+}
+void CommandCounter::PreCallRecordCmdSetSampleLocationsEXT(VkCommandBuffer commandBuffer, const VkSampleLocationsInfoEXT* pSampleLocationsInfo) {
+    coreChecks->IncrementCommandCount(commandBuffer);
+}
+void CommandCounter::PreCallRecordCmdBindShadingRateImageNV(VkCommandBuffer commandBuffer, VkImageView imageView, VkImageLayout imageLayout) {
+    coreChecks->IncrementCommandCount(commandBuffer);
+}
+void CommandCounter::PreCallRecordCmdSetViewportShadingRatePaletteNV(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkShadingRatePaletteNV* pShadingRatePalettes) {
+    coreChecks->IncrementCommandCount(commandBuffer);
+}
+void CommandCounter::PreCallRecordCmdSetCoarseSampleOrderNV(VkCommandBuffer commandBuffer, VkCoarseSampleOrderTypeNV sampleOrderType, uint32_t customSampleOrderCount, const VkCoarseSampleOrderCustomNV* pCustomSampleOrders) {
+    coreChecks->IncrementCommandCount(commandBuffer);
+}
+void CommandCounter::PreCallRecordCmdBuildAccelerationStructureNV(VkCommandBuffer commandBuffer, const VkAccelerationStructureInfoNV* pInfo, VkBuffer instanceData, VkDeviceSize instanceOffset, VkBool32 update, VkAccelerationStructureNV dst, VkAccelerationStructureNV src, VkBuffer scratch, VkDeviceSize scratchOffset) {
+    coreChecks->IncrementCommandCount(commandBuffer);
+}
+void CommandCounter::PreCallRecordCmdCopyAccelerationStructureNV(VkCommandBuffer commandBuffer, VkAccelerationStructureNV dst, VkAccelerationStructureNV src, VkCopyAccelerationStructureModeNV mode) {
+    coreChecks->IncrementCommandCount(commandBuffer);
+}
+void CommandCounter::PreCallRecordCmdTraceRaysNV(VkCommandBuffer commandBuffer, VkBuffer raygenShaderBindingTableBuffer, VkDeviceSize raygenShaderBindingOffset, VkBuffer missShaderBindingTableBuffer, VkDeviceSize missShaderBindingOffset, VkDeviceSize missShaderBindingStride, VkBuffer hitShaderBindingTableBuffer, VkDeviceSize hitShaderBindingOffset, VkDeviceSize hitShaderBindingStride, VkBuffer callableShaderBindingTableBuffer, VkDeviceSize callableShaderBindingOffset, VkDeviceSize callableShaderBindingStride, uint32_t width, uint32_t height, uint32_t depth) {
+    coreChecks->IncrementCommandCount(commandBuffer);
+}
+void CommandCounter::PreCallRecordCmdWriteAccelerationStructuresPropertiesNV(VkCommandBuffer commandBuffer, uint32_t accelerationStructureCount, const VkAccelerationStructureNV* pAccelerationStructures, VkQueryType queryType, VkQueryPool queryPool, uint32_t firstQuery) {
+    coreChecks->IncrementCommandCount(commandBuffer);
+}
+void CommandCounter::PreCallRecordCmdWriteBufferMarkerAMD(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage, VkBuffer dstBuffer, VkDeviceSize dstOffset, uint32_t marker) {
+    coreChecks->IncrementCommandCount(commandBuffer);
+}
+void CommandCounter::PreCallRecordCmdDrawMeshTasksNV(VkCommandBuffer commandBuffer, uint32_t taskCount, uint32_t firstTask) {
+    coreChecks->IncrementCommandCount(commandBuffer);
+}
+void CommandCounter::PreCallRecordCmdDrawMeshTasksIndirectNV(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride) {
+    coreChecks->IncrementCommandCount(commandBuffer);
+}
+void CommandCounter::PreCallRecordCmdDrawMeshTasksIndirectCountNV(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride) {
+    coreChecks->IncrementCommandCount(commandBuffer);
+}
+void CommandCounter::PreCallRecordCmdSetExclusiveScissorNV(VkCommandBuffer commandBuffer, uint32_t firstExclusiveScissor, uint32_t exclusiveScissorCount, const VkRect2D* pExclusiveScissors) {
+    coreChecks->IncrementCommandCount(commandBuffer);
+}
+void CommandCounter::PreCallRecordCmdSetCheckpointNV(VkCommandBuffer commandBuffer, const void* pCheckpointMarker) {
+    coreChecks->IncrementCommandCount(commandBuffer);
+}
+void CommandCounter::PreCallRecordCmdSetPerformanceMarkerINTEL(VkCommandBuffer commandBuffer, const VkPerformanceMarkerInfoINTEL* pMarkerInfo) {
+    coreChecks->IncrementCommandCount(commandBuffer);
+}
+void CommandCounter::PreCallRecordCmdSetPerformanceStreamMarkerINTEL(VkCommandBuffer commandBuffer, const VkPerformanceStreamMarkerInfoINTEL* pMarkerInfo) {
+    coreChecks->IncrementCommandCount(commandBuffer);
+}
+void CommandCounter::PreCallRecordCmdSetPerformanceOverrideINTEL(VkCommandBuffer commandBuffer, const VkPerformanceOverrideInfoINTEL* pOverrideInfo) {
+    coreChecks->IncrementCommandCount(commandBuffer);
+}
+void CommandCounter::PreCallRecordCmdSetLineStippleEXT(VkCommandBuffer commandBuffer, uint32_t lineStippleFactor, uint16_t lineStipplePattern) {
+    coreChecks->IncrementCommandCount(commandBuffer);
+}
diff --git a/src/third_party/vulkan-validation-layers/src/layers/generated/command_counter_helper.h b/src/third_party/vulkan-validation-layers/src/layers/generated/command_counter_helper.h
new file mode 100644
index 0000000..92bd268
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/layers/generated/command_counter_helper.h
@@ -0,0 +1,119 @@
+#pragma once
+// *** THIS FILE IS GENERATED - DO NOT EDIT ***
+// See command_counter_generator.py for modifications
+
+/*
+ * Copyright (c) 2015-2019 The Khronos Group Inc.
+ * Copyright (c) 2015-2019 Valve Corporation
+ * Copyright (c) 2015-2019 LunarG, Inc.
+ * Copyright (c) 2019      Intel Corporation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Mark Lobodzinski <mark@lunarg.com>
+ * Author: Lionel Landwerlin <lionel.g.landwerlin@intel.com>
+ */
+
+void PreCallRecordCmdBindPipeline(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline);
+void PreCallRecordCmdSetViewport(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewport* pViewports);
+void PreCallRecordCmdSetScissor(VkCommandBuffer commandBuffer, uint32_t firstScissor, uint32_t scissorCount, const VkRect2D* pScissors);
+void PreCallRecordCmdSetLineWidth(VkCommandBuffer commandBuffer, float lineWidth);
+void PreCallRecordCmdSetDepthBias(VkCommandBuffer commandBuffer, float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor);
+void PreCallRecordCmdSetBlendConstants(VkCommandBuffer commandBuffer, const float blendConstants[4]);
+void PreCallRecordCmdSetDepthBounds(VkCommandBuffer commandBuffer, float minDepthBounds, float maxDepthBounds);
+void PreCallRecordCmdSetStencilCompareMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t compareMask);
+void PreCallRecordCmdSetStencilWriteMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t writeMask);
+void PreCallRecordCmdSetStencilReference(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t reference);
+void PreCallRecordCmdBindDescriptorSets(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t firstSet, uint32_t descriptorSetCount, const VkDescriptorSet* pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t* pDynamicOffsets);
+void PreCallRecordCmdBindIndexBuffer(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkIndexType indexType);
+void PreCallRecordCmdBindVertexBuffers(VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets);
+void PreCallRecordCmdDraw(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance);
+void PreCallRecordCmdDrawIndexed(VkCommandBuffer commandBuffer, uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance);
+void PreCallRecordCmdDrawIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride);
+void PreCallRecordCmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride);
+void PreCallRecordCmdDispatch(VkCommandBuffer commandBuffer, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ);
+void PreCallRecordCmdDispatchIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset);
+void PreCallRecordCmdCopyBuffer(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer, uint32_t regionCount, const VkBufferCopy* pRegions);
+void PreCallRecordCmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageCopy* pRegions);
+void PreCallRecordCmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageBlit* pRegions, VkFilter filter);
+void PreCallRecordCmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkBufferImageCopy* pRegions);
+void PreCallRecordCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkBuffer dstBuffer, uint32_t regionCount, const VkBufferImageCopy* pRegions);
+void PreCallRecordCmdUpdateBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize dataSize, const void* pData);
+void PreCallRecordCmdFillBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize size, uint32_t data);
+void PreCallRecordCmdClearColorImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, const VkClearColorValue* pColor, uint32_t rangeCount, const VkImageSubresourceRange* pRanges);
+void PreCallRecordCmdClearDepthStencilImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, const VkClearDepthStencilValue* pDepthStencil, uint32_t rangeCount, const VkImageSubresourceRange* pRanges);
+void PreCallRecordCmdClearAttachments(VkCommandBuffer commandBuffer, uint32_t attachmentCount, const VkClearAttachment* pAttachments, uint32_t rectCount, const VkClearRect* pRects);
+void PreCallRecordCmdResolveImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageResolve* pRegions);
+void PreCallRecordCmdSetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask);
+void PreCallRecordCmdResetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask);
+void PreCallRecordCmdWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent* pEvents, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers);
+void PreCallRecordCmdPipelineBarrier(VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers);
+void PreCallRecordCmdBeginQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, VkQueryControlFlags flags);
+void PreCallRecordCmdEndQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query);
+void PreCallRecordCmdResetQueryPool(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount);
+void PreCallRecordCmdWriteTimestamp(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage, VkQueryPool queryPool, uint32_t query);
+void PreCallRecordCmdCopyQueryPoolResults(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize stride, VkQueryResultFlags flags);
+void PreCallRecordCmdPushConstants(VkCommandBuffer commandBuffer, VkPipelineLayout layout, VkShaderStageFlags stageFlags, uint32_t offset, uint32_t size, const void* pValues);
+void PreCallRecordCmdBeginRenderPass(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo* pRenderPassBegin, VkSubpassContents contents);
+void PreCallRecordCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents);
+void PreCallRecordCmdEndRenderPass(VkCommandBuffer commandBuffer);
+void PreCallRecordCmdExecuteCommands(VkCommandBuffer commandBuffer, uint32_t commandBufferCount, const VkCommandBuffer* pCommandBuffers);
+void PreCallRecordCmdSetDeviceMask(VkCommandBuffer commandBuffer, uint32_t deviceMask);
+void PreCallRecordCmdDispatchBase(VkCommandBuffer commandBuffer, uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ);
+void PreCallRecordCmdSetDeviceMaskKHR(VkCommandBuffer commandBuffer, uint32_t deviceMask);
+void PreCallRecordCmdDispatchBaseKHR(VkCommandBuffer commandBuffer, uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ);
+void PreCallRecordCmdPushDescriptorSetKHR(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount, const VkWriteDescriptorSet* pDescriptorWrites);
+void PreCallRecordCmdPushDescriptorSetWithTemplateKHR(VkCommandBuffer commandBuffer, VkDescriptorUpdateTemplate descriptorUpdateTemplate, VkPipelineLayout layout, uint32_t set, const void* pData);
+void PreCallRecordCmdBeginRenderPass2KHR(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo*      pRenderPassBegin, const VkSubpassBeginInfoKHR*      pSubpassBeginInfo);
+void PreCallRecordCmdNextSubpass2KHR(VkCommandBuffer commandBuffer, const VkSubpassBeginInfoKHR*      pSubpassBeginInfo, const VkSubpassEndInfoKHR*        pSubpassEndInfo);
+void PreCallRecordCmdEndRenderPass2KHR(VkCommandBuffer commandBuffer, const VkSubpassEndInfoKHR*        pSubpassEndInfo);
+void PreCallRecordCmdDrawIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride);
+void PreCallRecordCmdDrawIndexedIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride);
+void PreCallRecordCmdDebugMarkerBeginEXT(VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT* pMarkerInfo);
+void PreCallRecordCmdDebugMarkerEndEXT(VkCommandBuffer commandBuffer);
+void PreCallRecordCmdDebugMarkerInsertEXT(VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT* pMarkerInfo);
+void PreCallRecordCmdBindTransformFeedbackBuffersEXT(VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets, const VkDeviceSize* pSizes);
+void PreCallRecordCmdBeginTransformFeedbackEXT(VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VkBuffer* pCounterBuffers, const VkDeviceSize* pCounterBufferOffsets);
+void PreCallRecordCmdEndTransformFeedbackEXT(VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VkBuffer* pCounterBuffers, const VkDeviceSize* pCounterBufferOffsets);
+void PreCallRecordCmdBeginQueryIndexedEXT(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, VkQueryControlFlags flags, uint32_t index);
+void PreCallRecordCmdEndQueryIndexedEXT(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, uint32_t index);
+void PreCallRecordCmdDrawIndirectByteCountEXT(VkCommandBuffer commandBuffer, uint32_t instanceCount, uint32_t firstInstance, VkBuffer counterBuffer, VkDeviceSize counterBufferOffset, uint32_t counterOffset, uint32_t vertexStride);
+void PreCallRecordCmdDrawIndirectCountAMD(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride);
+void PreCallRecordCmdDrawIndexedIndirectCountAMD(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride);
+void PreCallRecordCmdBeginConditionalRenderingEXT(VkCommandBuffer commandBuffer, const VkConditionalRenderingBeginInfoEXT* pConditionalRenderingBegin);
+void PreCallRecordCmdEndConditionalRenderingEXT(VkCommandBuffer commandBuffer);
+void PreCallRecordCmdProcessCommandsNVX(VkCommandBuffer commandBuffer, const VkCmdProcessCommandsInfoNVX* pProcessCommandsInfo);
+void PreCallRecordCmdReserveSpaceForCommandsNVX(VkCommandBuffer commandBuffer, const VkCmdReserveSpaceForCommandsInfoNVX* pReserveSpaceInfo);
+void PreCallRecordCmdSetViewportWScalingNV(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewportWScalingNV* pViewportWScalings);
+void PreCallRecordCmdSetDiscardRectangleEXT(VkCommandBuffer commandBuffer, uint32_t firstDiscardRectangle, uint32_t discardRectangleCount, const VkRect2D* pDiscardRectangles);
+void PreCallRecordCmdBeginDebugUtilsLabelEXT(VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT* pLabelInfo);
+void PreCallRecordCmdEndDebugUtilsLabelEXT(VkCommandBuffer commandBuffer);
+void PreCallRecordCmdInsertDebugUtilsLabelEXT(VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT* pLabelInfo);
+void PreCallRecordCmdSetSampleLocationsEXT(VkCommandBuffer commandBuffer, const VkSampleLocationsInfoEXT* pSampleLocationsInfo);
+void PreCallRecordCmdBindShadingRateImageNV(VkCommandBuffer commandBuffer, VkImageView imageView, VkImageLayout imageLayout);
+void PreCallRecordCmdSetViewportShadingRatePaletteNV(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkShadingRatePaletteNV* pShadingRatePalettes);
+void PreCallRecordCmdSetCoarseSampleOrderNV(VkCommandBuffer commandBuffer, VkCoarseSampleOrderTypeNV sampleOrderType, uint32_t customSampleOrderCount, const VkCoarseSampleOrderCustomNV* pCustomSampleOrders);
+void PreCallRecordCmdBuildAccelerationStructureNV(VkCommandBuffer commandBuffer, const VkAccelerationStructureInfoNV* pInfo, VkBuffer instanceData, VkDeviceSize instanceOffset, VkBool32 update, VkAccelerationStructureNV dst, VkAccelerationStructureNV src, VkBuffer scratch, VkDeviceSize scratchOffset);
+void PreCallRecordCmdCopyAccelerationStructureNV(VkCommandBuffer commandBuffer, VkAccelerationStructureNV dst, VkAccelerationStructureNV src, VkCopyAccelerationStructureModeNV mode);
+void PreCallRecordCmdTraceRaysNV(VkCommandBuffer commandBuffer, VkBuffer raygenShaderBindingTableBuffer, VkDeviceSize raygenShaderBindingOffset, VkBuffer missShaderBindingTableBuffer, VkDeviceSize missShaderBindingOffset, VkDeviceSize missShaderBindingStride, VkBuffer hitShaderBindingTableBuffer, VkDeviceSize hitShaderBindingOffset, VkDeviceSize hitShaderBindingStride, VkBuffer callableShaderBindingTableBuffer, VkDeviceSize callableShaderBindingOffset, VkDeviceSize callableShaderBindingStride, uint32_t width, uint32_t height, uint32_t depth);
+void PreCallRecordCmdWriteAccelerationStructuresPropertiesNV(VkCommandBuffer commandBuffer, uint32_t accelerationStructureCount, const VkAccelerationStructureNV* pAccelerationStructures, VkQueryType queryType, VkQueryPool queryPool, uint32_t firstQuery);
+void PreCallRecordCmdWriteBufferMarkerAMD(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage, VkBuffer dstBuffer, VkDeviceSize dstOffset, uint32_t marker);
+void PreCallRecordCmdDrawMeshTasksNV(VkCommandBuffer commandBuffer, uint32_t taskCount, uint32_t firstTask);
+void PreCallRecordCmdDrawMeshTasksIndirectNV(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride);
+void PreCallRecordCmdDrawMeshTasksIndirectCountNV(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride);
+void PreCallRecordCmdSetExclusiveScissorNV(VkCommandBuffer commandBuffer, uint32_t firstExclusiveScissor, uint32_t exclusiveScissorCount, const VkRect2D* pExclusiveScissors);
+void PreCallRecordCmdSetCheckpointNV(VkCommandBuffer commandBuffer, const void* pCheckpointMarker);
+void PreCallRecordCmdSetPerformanceMarkerINTEL(VkCommandBuffer commandBuffer, const VkPerformanceMarkerInfoINTEL* pMarkerInfo);
+void PreCallRecordCmdSetPerformanceStreamMarkerINTEL(VkCommandBuffer commandBuffer, const VkPerformanceStreamMarkerInfoINTEL* pMarkerInfo);
+void PreCallRecordCmdSetPerformanceOverrideINTEL(VkCommandBuffer commandBuffer, const VkPerformanceOverrideInfoINTEL* pOverrideInfo);
+void PreCallRecordCmdSetLineStippleEXT(VkCommandBuffer commandBuffer, uint32_t lineStippleFactor, uint16_t lineStipplePattern);
diff --git a/src/third_party/vulkan-validation-layers/src/layers/generated/layer_chassis_dispatch.cpp b/src/third_party/vulkan-validation-layers/src/layers/generated/layer_chassis_dispatch.cpp
new file mode 100644
index 0000000..833e23f
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/layers/generated/layer_chassis_dispatch.cpp
@@ -0,0 +1,6987 @@
+
+// This file is ***GENERATED***.  Do Not Edit.
+// See layer_chassis_dispatch_generator.py for modifications.
+
+/* Copyright (c) 2015-2019 The Khronos Group Inc.
+ * Copyright (c) 2015-2019 Valve Corporation
+ * Copyright (c) 2015-2019 LunarG, Inc.
+ * Copyright (c) 2015-2019 Google Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Mark Lobodzinski <mark@lunarg.com>
+ */
+
+#include <mutex>
+#include "chassis.h"
+#include "layer_chassis_dispatch.h"
+#include "vk_layer_utils.h"
+
+// This intentionally includes a cpp file
+#include "vk_safe_struct.cpp"
+
+ReadWriteLock dispatch_lock;
+
+// Unique Objects pNext extension handling function
+void WrapPnextChainHandles(ValidationObject *layer_data, const void *pNext) {
+    void *cur_pnext = const_cast<void *>(pNext);
+    while (cur_pnext != NULL) {
+        VkBaseOutStructure *header = reinterpret_cast<VkBaseOutStructure *>(cur_pnext);
+
+        switch (header->sType) {
+#ifdef VK_USE_PLATFORM_WIN32_KHR 
+            case VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_KHR: {
+                    safe_VkWin32KeyedMutexAcquireReleaseInfoKHR *safe_struct = reinterpret_cast<safe_VkWin32KeyedMutexAcquireReleaseInfoKHR *>(cur_pnext);
+                    if (safe_struct->pAcquireSyncs) {
+                        for (uint32_t index0 = 0; index0 < safe_struct->acquireCount; ++index0) {
+                            safe_struct->pAcquireSyncs[index0] = layer_data->Unwrap(safe_struct->pAcquireSyncs[index0]);
+                        }
+                    }
+                    if (safe_struct->pReleaseSyncs) {
+                        for (uint32_t index0 = 0; index0 < safe_struct->releaseCount; ++index0) {
+                            safe_struct->pReleaseSyncs[index0] = layer_data->Unwrap(safe_struct->pReleaseSyncs[index0]);
+                        }
+                    }
+                } break;
+#endif // VK_USE_PLATFORM_WIN32_KHR 
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR 
+            case VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_NV: {
+                    safe_VkWin32KeyedMutexAcquireReleaseInfoNV *safe_struct = reinterpret_cast<safe_VkWin32KeyedMutexAcquireReleaseInfoNV *>(cur_pnext);
+                    if (safe_struct->pAcquireSyncs) {
+                        for (uint32_t index0 = 0; index0 < safe_struct->acquireCount; ++index0) {
+                            safe_struct->pAcquireSyncs[index0] = layer_data->Unwrap(safe_struct->pAcquireSyncs[index0]);
+                        }
+                    }
+                    if (safe_struct->pReleaseSyncs) {
+                        for (uint32_t index0 = 0; index0 < safe_struct->releaseCount; ++index0) {
+                            safe_struct->pReleaseSyncs[index0] = layer_data->Unwrap(safe_struct->pReleaseSyncs[index0]);
+                        }
+                    }
+                } break;
+#endif // VK_USE_PLATFORM_WIN32_KHR 
+
+            case VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_MEMORY_ALLOCATE_INFO_NV: {
+                    safe_VkDedicatedAllocationMemoryAllocateInfoNV *safe_struct = reinterpret_cast<safe_VkDedicatedAllocationMemoryAllocateInfoNV *>(cur_pnext);
+                    if (safe_struct->image) {
+                        safe_struct->image = layer_data->Unwrap(safe_struct->image);
+                    }
+                    if (safe_struct->buffer) {
+                        safe_struct->buffer = layer_data->Unwrap(safe_struct->buffer);
+                    }
+                } break;
+
+            case VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO: {
+                    safe_VkMemoryDedicatedAllocateInfo *safe_struct = reinterpret_cast<safe_VkMemoryDedicatedAllocateInfo *>(cur_pnext);
+                    if (safe_struct->image) {
+                        safe_struct->image = layer_data->Unwrap(safe_struct->image);
+                    }
+                    if (safe_struct->buffer) {
+                        safe_struct->buffer = layer_data->Unwrap(safe_struct->buffer);
+                    }
+                } break;
+
+            case VK_STRUCTURE_TYPE_IMAGE_SWAPCHAIN_CREATE_INFO_KHR: {
+                    safe_VkImageSwapchainCreateInfoKHR *safe_struct = reinterpret_cast<safe_VkImageSwapchainCreateInfoKHR *>(cur_pnext);
+                    if (safe_struct->swapchain) {
+                        safe_struct->swapchain = layer_data->Unwrap(safe_struct->swapchain);
+                    }
+                } break;
+
+            case VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO: {
+                    safe_VkSamplerYcbcrConversionInfo *safe_struct = reinterpret_cast<safe_VkSamplerYcbcrConversionInfo *>(cur_pnext);
+                    if (safe_struct->conversion) {
+                        safe_struct->conversion = layer_data->Unwrap(safe_struct->conversion);
+                    }
+                } break;
+
+            case VK_STRUCTURE_TYPE_SHADER_MODULE_VALIDATION_CACHE_CREATE_INFO_EXT: {
+                    safe_VkShaderModuleValidationCacheCreateInfoEXT *safe_struct = reinterpret_cast<safe_VkShaderModuleValidationCacheCreateInfoEXT *>(cur_pnext);
+                    if (safe_struct->validationCache) {
+                        safe_struct->validationCache = layer_data->Unwrap(safe_struct->validationCache);
+                    }
+                } break;
+
+            case VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_NV: {
+                    safe_VkWriteDescriptorSetAccelerationStructureNV *safe_struct = reinterpret_cast<safe_VkWriteDescriptorSetAccelerationStructureNV *>(cur_pnext);
+                    if (safe_struct->pAccelerationStructures) {
+                        for (uint32_t index0 = 0; index0 < safe_struct->accelerationStructureCount; ++index0) {
+                            safe_struct->pAccelerationStructures[index0] = layer_data->Unwrap(safe_struct->pAccelerationStructures[index0]);
+                        }
+                    }
+                } break;
+
+            case VK_STRUCTURE_TYPE_RENDER_PASS_ATTACHMENT_BEGIN_INFO_KHR: {
+                    safe_VkRenderPassAttachmentBeginInfoKHR *safe_struct = reinterpret_cast<safe_VkRenderPassAttachmentBeginInfoKHR *>(cur_pnext);
+                    if (safe_struct->pAttachments) {
+                        for (uint32_t index0 = 0; index0 < safe_struct->attachmentCount; ++index0) {
+                            safe_struct->pAttachments[index0] = layer_data->Unwrap(safe_struct->pAttachments[index0]);
+                        }
+                    }
+                } break;
+
+            case VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_SWAPCHAIN_INFO_KHR: {
+                    safe_VkBindImageMemorySwapchainInfoKHR *safe_struct = reinterpret_cast<safe_VkBindImageMemorySwapchainInfoKHR *>(cur_pnext);
+                    if (safe_struct->swapchain) {
+                        safe_struct->swapchain = layer_data->Unwrap(safe_struct->swapchain);
+                    }
+                } break;
+
+            default:
+                break;
+        }
+
+        // Process the next structure in the chain
+        cur_pnext = header->pNext;
+    }
+}
+
+
+// Manually written Dispatch routines
+
+
+#define DISPATCH_MAX_STACK_ALLOCATIONS 32
+
+// The VK_EXT_pipeline_creation_feedback extension returns data from the driver -- we've created a copy of the pnext chain, so
+// copy the returned data to the caller before freeing the copy's data.
+void CopyCreatePipelineFeedbackData(const void *src_chain, const void *dst_chain) {
+    auto src_feedback_struct = lvl_find_in_chain<VkPipelineCreationFeedbackCreateInfoEXT>(src_chain);
+    if (!src_feedback_struct) return;
+    auto dst_feedback_struct = const_cast<VkPipelineCreationFeedbackCreateInfoEXT *>(
+        lvl_find_in_chain<VkPipelineCreationFeedbackCreateInfoEXT>(dst_chain));
+    *dst_feedback_struct->pPipelineCreationFeedback = *src_feedback_struct->pPipelineCreationFeedback;
+    for (uint32_t i = 0; i < src_feedback_struct->pipelineStageCreationFeedbackCount; i++) {
+        dst_feedback_struct->pPipelineStageCreationFeedbacks[i] = src_feedback_struct->pPipelineStageCreationFeedbacks[i];
+    }
+}
+
+VkResult DispatchCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount,
+                                         const VkGraphicsPipelineCreateInfo *pCreateInfos,
+                                         const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.CreateGraphicsPipelines(device, pipelineCache, createInfoCount,
+                                                                                           pCreateInfos, pAllocator, pPipelines);
+    safe_VkGraphicsPipelineCreateInfo *local_pCreateInfos = nullptr;
+    if (pCreateInfos) {
+        local_pCreateInfos = new safe_VkGraphicsPipelineCreateInfo[createInfoCount];
+        read_lock_guard_t lock(dispatch_lock);
+        for (uint32_t idx0 = 0; idx0 < createInfoCount; ++idx0) {
+            bool uses_color_attachment = false;
+            bool uses_depthstencil_attachment = false;
+            {
+                const auto subpasses_uses_it = layer_data->renderpasses_states.find(layer_data->Unwrap(pCreateInfos[idx0].renderPass));
+                if (subpasses_uses_it != layer_data->renderpasses_states.end()) {
+                    const auto &subpasses_uses = subpasses_uses_it->second;
+                    if (subpasses_uses.subpasses_using_color_attachment.count(pCreateInfos[idx0].subpass))
+                        uses_color_attachment = true;
+                    if (subpasses_uses.subpasses_using_depthstencil_attachment.count(pCreateInfos[idx0].subpass))
+                        uses_depthstencil_attachment = true;
+                }
+            }
+
+            local_pCreateInfos[idx0].initialize(&pCreateInfos[idx0], uses_color_attachment, uses_depthstencil_attachment);
+
+            if (pCreateInfos[idx0].basePipelineHandle) {
+                local_pCreateInfos[idx0].basePipelineHandle = layer_data->Unwrap(pCreateInfos[idx0].basePipelineHandle);
+            }
+            if (pCreateInfos[idx0].layout) {
+                local_pCreateInfos[idx0].layout = layer_data->Unwrap(pCreateInfos[idx0].layout);
+            }
+            if (pCreateInfos[idx0].pStages) {
+                for (uint32_t idx1 = 0; idx1 < pCreateInfos[idx0].stageCount; ++idx1) {
+                    if (pCreateInfos[idx0].pStages[idx1].module) {
+                        local_pCreateInfos[idx0].pStages[idx1].module = layer_data->Unwrap(pCreateInfos[idx0].pStages[idx1].module);
+                    }
+                }
+            }
+            if (pCreateInfos[idx0].renderPass) {
+                local_pCreateInfos[idx0].renderPass = layer_data->Unwrap(pCreateInfos[idx0].renderPass);
+            }
+        }
+    }
+    if (pipelineCache) {
+        pipelineCache = layer_data->Unwrap(pipelineCache);
+    }
+
+    VkResult result = layer_data->device_dispatch_table.CreateGraphicsPipelines(device, pipelineCache, createInfoCount,
+                                                                                local_pCreateInfos->ptr(), pAllocator, pPipelines);
+    for (uint32_t i = 0; i < createInfoCount; ++i) {
+        if (pCreateInfos[i].pNext != VK_NULL_HANDLE) {
+            CopyCreatePipelineFeedbackData(local_pCreateInfos[i].pNext, pCreateInfos[i].pNext);
+        }
+    }
+
+    delete[] local_pCreateInfos;
+    {
+        for (uint32_t i = 0; i < createInfoCount; ++i) {
+            if (pPipelines[i] != VK_NULL_HANDLE) {
+                pPipelines[i] = layer_data->WrapNew(pPipelines[i]);
+            }
+        }
+    }
+    return result;
+}
+
+template <typename T>
+static void UpdateCreateRenderPassState(ValidationObject *layer_data, const T *pCreateInfo, VkRenderPass renderPass) {
+    auto &renderpass_state = layer_data->renderpasses_states[renderPass];
+
+    for (uint32_t subpass = 0; subpass < pCreateInfo->subpassCount; ++subpass) {
+        bool uses_color = false;
+        for (uint32_t i = 0; i < pCreateInfo->pSubpasses[subpass].colorAttachmentCount && !uses_color; ++i)
+            if (pCreateInfo->pSubpasses[subpass].pColorAttachments[i].attachment != VK_ATTACHMENT_UNUSED) uses_color = true;
+
+        bool uses_depthstencil = false;
+        if (pCreateInfo->pSubpasses[subpass].pDepthStencilAttachment)
+            if (pCreateInfo->pSubpasses[subpass].pDepthStencilAttachment->attachment != VK_ATTACHMENT_UNUSED)
+                uses_depthstencil = true;
+
+        if (uses_color) renderpass_state.subpasses_using_color_attachment.insert(subpass);
+        if (uses_depthstencil) renderpass_state.subpasses_using_depthstencil_attachment.insert(subpass);
+    }
+}
+
+VkResult DispatchCreateRenderPass(VkDevice device, const VkRenderPassCreateInfo *pCreateInfo,
+                                  const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    VkResult result = layer_data->device_dispatch_table.CreateRenderPass(device, pCreateInfo, pAllocator, pRenderPass);
+    if (!wrap_handles) return result;
+    if (VK_SUCCESS == result) {
+        write_lock_guard_t lock(dispatch_lock);
+        UpdateCreateRenderPassState(layer_data, pCreateInfo, *pRenderPass);
+        *pRenderPass = layer_data->WrapNew(*pRenderPass);
+    }
+    return result;
+}
+
+VkResult DispatchCreateRenderPass2KHR(VkDevice device, const VkRenderPassCreateInfo2KHR *pCreateInfo,
+                                      const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    VkResult result = layer_data->device_dispatch_table.CreateRenderPass2KHR(device, pCreateInfo, pAllocator, pRenderPass);
+    if (!wrap_handles) return result;
+    if (VK_SUCCESS == result) {
+        write_lock_guard_t lock(dispatch_lock);
+        UpdateCreateRenderPassState(layer_data, pCreateInfo, *pRenderPass);
+        *pRenderPass = layer_data->WrapNew(*pRenderPass);
+    }
+    return result;
+}
+
+void DispatchDestroyRenderPass(VkDevice device, VkRenderPass renderPass, const VkAllocationCallbacks *pAllocator) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.DestroyRenderPass(device, renderPass, pAllocator);
+    uint64_t renderPass_id = reinterpret_cast<uint64_t &>(renderPass);
+
+    auto iter = unique_id_mapping.pop(renderPass_id);
+    if (iter != unique_id_mapping.end()) {
+        renderPass = (VkRenderPass)iter->second;
+    } else {
+        renderPass = (VkRenderPass)0;
+    }
+
+    layer_data->device_dispatch_table.DestroyRenderPass(device, renderPass, pAllocator);
+
+    write_lock_guard_t lock(dispatch_lock);
+    layer_data->renderpasses_states.erase(renderPass);
+}
+
+VkResult DispatchCreateSwapchainKHR(VkDevice device, const VkSwapchainCreateInfoKHR *pCreateInfo,
+                                    const VkAllocationCallbacks *pAllocator, VkSwapchainKHR *pSwapchain) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.CreateSwapchainKHR(device, pCreateInfo, pAllocator, pSwapchain);
+    safe_VkSwapchainCreateInfoKHR *local_pCreateInfo = NULL;
+    if (pCreateInfo) {
+        local_pCreateInfo = new safe_VkSwapchainCreateInfoKHR(pCreateInfo);
+        local_pCreateInfo->oldSwapchain = layer_data->Unwrap(pCreateInfo->oldSwapchain);
+        // Surface is instance-level object
+        local_pCreateInfo->surface = layer_data->Unwrap(pCreateInfo->surface);
+    }
+
+    VkResult result = layer_data->device_dispatch_table.CreateSwapchainKHR(device, local_pCreateInfo->ptr(), pAllocator, pSwapchain);
+    delete local_pCreateInfo;
+
+    if (VK_SUCCESS == result) {
+        *pSwapchain = layer_data->WrapNew(*pSwapchain);
+    }
+    return result;
+}
+
+VkResult DispatchCreateSharedSwapchainsKHR(VkDevice device, uint32_t swapchainCount, const VkSwapchainCreateInfoKHR *pCreateInfos,
+                                           const VkAllocationCallbacks *pAllocator, VkSwapchainKHR *pSwapchains) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles)
+        return layer_data->device_dispatch_table.CreateSharedSwapchainsKHR(device, swapchainCount, pCreateInfos, pAllocator,
+                                                                           pSwapchains);
+    safe_VkSwapchainCreateInfoKHR *local_pCreateInfos = NULL;
+    {
+        if (pCreateInfos) {
+            local_pCreateInfos = new safe_VkSwapchainCreateInfoKHR[swapchainCount];
+            for (uint32_t i = 0; i < swapchainCount; ++i) {
+                local_pCreateInfos[i].initialize(&pCreateInfos[i]);
+                if (pCreateInfos[i].surface) {
+                    // Surface is instance-level object
+                    local_pCreateInfos[i].surface = layer_data->Unwrap(pCreateInfos[i].surface);
+                }
+                if (pCreateInfos[i].oldSwapchain) {
+                    local_pCreateInfos[i].oldSwapchain = layer_data->Unwrap(pCreateInfos[i].oldSwapchain);
+                }
+            }
+        }
+    }
+    VkResult result = layer_data->device_dispatch_table.CreateSharedSwapchainsKHR(device, swapchainCount, local_pCreateInfos->ptr(),
+                                                                                  pAllocator, pSwapchains);
+    delete[] local_pCreateInfos;
+    if (VK_SUCCESS == result) {
+        for (uint32_t i = 0; i < swapchainCount; i++) {
+            pSwapchains[i] = layer_data->WrapNew(pSwapchains[i]);
+        }
+    }
+    return result;
+}
+
+VkResult DispatchGetSwapchainImagesKHR(VkDevice device, VkSwapchainKHR swapchain, uint32_t *pSwapchainImageCount,
+                                       VkImage *pSwapchainImages) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles)
+        return layer_data->device_dispatch_table.GetSwapchainImagesKHR(device, swapchain, pSwapchainImageCount, pSwapchainImages);
+    VkSwapchainKHR wrapped_swapchain_handle = swapchain;
+    if (VK_NULL_HANDLE != swapchain) {
+        swapchain = layer_data->Unwrap(swapchain);
+    }
+    VkResult result =
+        layer_data->device_dispatch_table.GetSwapchainImagesKHR(device, swapchain, pSwapchainImageCount, pSwapchainImages);
+    if ((VK_SUCCESS == result) || (VK_INCOMPLETE == result)) {
+        if ((*pSwapchainImageCount > 0) && pSwapchainImages) {
+            write_lock_guard_t lock(dispatch_lock);
+            auto &wrapped_swapchain_image_handles = layer_data->swapchain_wrapped_image_handle_map[wrapped_swapchain_handle];
+            for (uint32_t i = static_cast<uint32_t>(wrapped_swapchain_image_handles.size()); i < *pSwapchainImageCount; i++) {
+                wrapped_swapchain_image_handles.emplace_back(layer_data->WrapNew(pSwapchainImages[i]));
+            }
+            for (uint32_t i = 0; i < *pSwapchainImageCount; i++) {
+                pSwapchainImages[i] = wrapped_swapchain_image_handles[i];
+            }
+        }
+    }
+    return result;
+}
+
+void DispatchDestroySwapchainKHR(VkDevice device, VkSwapchainKHR swapchain, const VkAllocationCallbacks *pAllocator) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.DestroySwapchainKHR(device, swapchain, pAllocator);
+    write_lock_guard_t lock(dispatch_lock);
+
+    auto &image_array = layer_data->swapchain_wrapped_image_handle_map[swapchain];
+    for (auto &image_handle : image_array) {
+        unique_id_mapping.erase(HandleToUint64(image_handle));
+    }
+    layer_data->swapchain_wrapped_image_handle_map.erase(swapchain);
+    lock.unlock();
+
+    uint64_t swapchain_id = HandleToUint64(swapchain);
+
+    auto iter = unique_id_mapping.pop(swapchain_id);
+    if (iter != unique_id_mapping.end()) {
+        swapchain = (VkSwapchainKHR)iter->second;
+    } else {
+        swapchain = (VkSwapchainKHR)0;
+    }
+
+    layer_data->device_dispatch_table.DestroySwapchainKHR(device, swapchain, pAllocator);
+}
+
+VkResult DispatchQueuePresentKHR(VkQueue queue, const VkPresentInfoKHR *pPresentInfo) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(queue), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.QueuePresentKHR(queue, pPresentInfo);
+    safe_VkPresentInfoKHR *local_pPresentInfo = NULL;
+    {
+        if (pPresentInfo) {
+            local_pPresentInfo = new safe_VkPresentInfoKHR(pPresentInfo);
+            if (local_pPresentInfo->pWaitSemaphores) {
+                for (uint32_t index1 = 0; index1 < local_pPresentInfo->waitSemaphoreCount; ++index1) {
+                    local_pPresentInfo->pWaitSemaphores[index1] = layer_data->Unwrap(pPresentInfo->pWaitSemaphores[index1]);
+                }
+            }
+            if (local_pPresentInfo->pSwapchains) {
+                for (uint32_t index1 = 0; index1 < local_pPresentInfo->swapchainCount; ++index1) {
+                    local_pPresentInfo->pSwapchains[index1] = layer_data->Unwrap(pPresentInfo->pSwapchains[index1]);
+                }
+            }
+        }
+    }
+    VkResult result = layer_data->device_dispatch_table.QueuePresentKHR(queue, local_pPresentInfo->ptr());
+
+    // pResults is an output array embedded in a structure. The code generator neglects to copy back from the safe_* version,
+    // so handle it as a special case here:
+    if (pPresentInfo && pPresentInfo->pResults) {
+        for (uint32_t i = 0; i < pPresentInfo->swapchainCount; i++) {
+            pPresentInfo->pResults[i] = local_pPresentInfo->pResults[i];
+        }
+    }
+    delete local_pPresentInfo;
+    return result;
+}
+
+void DispatchDestroyDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool, const VkAllocationCallbacks *pAllocator) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.DestroyDescriptorPool(device, descriptorPool, pAllocator);
+    write_lock_guard_t lock(dispatch_lock);
+
+    // remove references to implicitly freed descriptor sets
+    for(auto descriptor_set : layer_data->pool_descriptor_sets_map[descriptorPool]) {
+        unique_id_mapping.erase(reinterpret_cast<uint64_t &>(descriptor_set));
+    }
+    layer_data->pool_descriptor_sets_map.erase(descriptorPool);
+    lock.unlock();
+
+    uint64_t descriptorPool_id = reinterpret_cast<uint64_t &>(descriptorPool);
+
+    auto iter = unique_id_mapping.pop(descriptorPool_id);
+    if (iter != unique_id_mapping.end()) {
+        descriptorPool = (VkDescriptorPool)iter->second;
+    } else {
+        descriptorPool = (VkDescriptorPool)0;
+    }
+
+    layer_data->device_dispatch_table.DestroyDescriptorPool(device, descriptorPool, pAllocator);
+}
+
+VkResult DispatchResetDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool, VkDescriptorPoolResetFlags flags) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.ResetDescriptorPool(device, descriptorPool, flags);
+    VkDescriptorPool local_descriptor_pool = VK_NULL_HANDLE;
+    {
+        local_descriptor_pool = layer_data->Unwrap(descriptorPool);
+    }
+    VkResult result = layer_data->device_dispatch_table.ResetDescriptorPool(device, local_descriptor_pool, flags);
+    if (VK_SUCCESS == result) {
+        write_lock_guard_t lock(dispatch_lock);
+        // remove references to implicitly freed descriptor sets
+        for(auto descriptor_set : layer_data->pool_descriptor_sets_map[descriptorPool]) {
+            unique_id_mapping.erase(reinterpret_cast<uint64_t &>(descriptor_set));
+        }
+        layer_data->pool_descriptor_sets_map[descriptorPool].clear();
+    }
+
+    return result;
+}
+
+VkResult DispatchAllocateDescriptorSets(VkDevice device, const VkDescriptorSetAllocateInfo *pAllocateInfo,
+                                        VkDescriptorSet *pDescriptorSets) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.AllocateDescriptorSets(device, pAllocateInfo, pDescriptorSets);
+    safe_VkDescriptorSetAllocateInfo *local_pAllocateInfo = NULL;
+    {
+        if (pAllocateInfo) {
+            local_pAllocateInfo = new safe_VkDescriptorSetAllocateInfo(pAllocateInfo);
+            if (pAllocateInfo->descriptorPool) {
+                local_pAllocateInfo->descriptorPool = layer_data->Unwrap(pAllocateInfo->descriptorPool);
+            }
+            if (local_pAllocateInfo->pSetLayouts) {
+                for (uint32_t index1 = 0; index1 < local_pAllocateInfo->descriptorSetCount; ++index1) {
+                    local_pAllocateInfo->pSetLayouts[index1] = layer_data->Unwrap(local_pAllocateInfo->pSetLayouts[index1]);
+                }
+            }
+        }
+    }
+    VkResult result = layer_data->device_dispatch_table.AllocateDescriptorSets(
+        device, (const VkDescriptorSetAllocateInfo *)local_pAllocateInfo, pDescriptorSets);
+    if (local_pAllocateInfo) {
+        delete local_pAllocateInfo;
+    }
+    if (VK_SUCCESS == result) {
+        write_lock_guard_t lock(dispatch_lock);
+        auto &pool_descriptor_sets = layer_data->pool_descriptor_sets_map[pAllocateInfo->descriptorPool];
+        for (uint32_t index0 = 0; index0 < pAllocateInfo->descriptorSetCount; index0++) {
+            pDescriptorSets[index0] = layer_data->WrapNew(pDescriptorSets[index0]);
+            pool_descriptor_sets.insert(pDescriptorSets[index0]);
+        }
+    }
+    return result;
+}
+
+VkResult DispatchFreeDescriptorSets(VkDevice device, VkDescriptorPool descriptorPool, uint32_t descriptorSetCount,
+                                    const VkDescriptorSet *pDescriptorSets) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles)
+        return layer_data->device_dispatch_table.FreeDescriptorSets(device, descriptorPool, descriptorSetCount, pDescriptorSets);
+    VkDescriptorSet *local_pDescriptorSets = NULL;
+    VkDescriptorPool local_descriptor_pool = VK_NULL_HANDLE;
+    {
+        local_descriptor_pool = layer_data->Unwrap(descriptorPool);
+        if (pDescriptorSets) {
+            local_pDescriptorSets = new VkDescriptorSet[descriptorSetCount];
+            for (uint32_t index0 = 0; index0 < descriptorSetCount; ++index0) {
+                local_pDescriptorSets[index0] = layer_data->Unwrap(pDescriptorSets[index0]);
+            }
+        }
+    }
+    VkResult result = layer_data->device_dispatch_table.FreeDescriptorSets(device, local_descriptor_pool, descriptorSetCount,
+                                                                           (const VkDescriptorSet *)local_pDescriptorSets);
+    if (local_pDescriptorSets) delete[] local_pDescriptorSets;
+    if ((VK_SUCCESS == result) && (pDescriptorSets)) {
+        write_lock_guard_t lock(dispatch_lock);
+        auto &pool_descriptor_sets = layer_data->pool_descriptor_sets_map[descriptorPool];
+        for (uint32_t index0 = 0; index0 < descriptorSetCount; index0++) {
+            VkDescriptorSet handle = pDescriptorSets[index0];
+            pool_descriptor_sets.erase(handle);
+            uint64_t unique_id = reinterpret_cast<uint64_t &>(handle);
+            unique_id_mapping.erase(unique_id);
+        }
+    }
+    return result;
+}
+
+// This is the core version of this routine.  The extension version is below.
+VkResult DispatchCreateDescriptorUpdateTemplate(VkDevice device, const VkDescriptorUpdateTemplateCreateInfoKHR *pCreateInfo,
+                                                const VkAllocationCallbacks *pAllocator,
+                                                VkDescriptorUpdateTemplateKHR *pDescriptorUpdateTemplate) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles)
+        return layer_data->device_dispatch_table.CreateDescriptorUpdateTemplate(device, pCreateInfo, pAllocator,
+                                                                                pDescriptorUpdateTemplate);
+    safe_VkDescriptorUpdateTemplateCreateInfo var_local_pCreateInfo;
+    safe_VkDescriptorUpdateTemplateCreateInfo *local_pCreateInfo = NULL;
+    if (pCreateInfo) {
+        local_pCreateInfo = &var_local_pCreateInfo;
+        local_pCreateInfo->initialize(pCreateInfo);
+        if (pCreateInfo->templateType == VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET) {
+            local_pCreateInfo->descriptorSetLayout = layer_data->Unwrap(pCreateInfo->descriptorSetLayout);
+        }
+        if (pCreateInfo->templateType == VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR) {
+            local_pCreateInfo->pipelineLayout = layer_data->Unwrap(pCreateInfo->pipelineLayout);
+        }
+    }
+    VkResult result = layer_data->device_dispatch_table.CreateDescriptorUpdateTemplate(device, local_pCreateInfo->ptr(), pAllocator,
+                                                                                       pDescriptorUpdateTemplate);
+    if (VK_SUCCESS == result) {
+        *pDescriptorUpdateTemplate = layer_data->WrapNew(*pDescriptorUpdateTemplate);
+
+        // Shadow template createInfo for later updates
+        if (local_pCreateInfo) {
+            write_lock_guard_t lock(dispatch_lock);
+            std::unique_ptr<TEMPLATE_STATE> template_state(new TEMPLATE_STATE(*pDescriptorUpdateTemplate, local_pCreateInfo));
+            layer_data->desc_template_createinfo_map[(uint64_t)*pDescriptorUpdateTemplate] = std::move(template_state);
+        }
+    }
+    return result;
+}
+
+// This is the extension version of this routine.  The core version is above.
+VkResult DispatchCreateDescriptorUpdateTemplateKHR(VkDevice device, const VkDescriptorUpdateTemplateCreateInfoKHR *pCreateInfo,
+                                                   const VkAllocationCallbacks *pAllocator,
+                                                   VkDescriptorUpdateTemplateKHR *pDescriptorUpdateTemplate) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles)
+        return layer_data->device_dispatch_table.CreateDescriptorUpdateTemplateKHR(device, pCreateInfo, pAllocator,
+                                                                                   pDescriptorUpdateTemplate);
+    safe_VkDescriptorUpdateTemplateCreateInfo var_local_pCreateInfo;
+    safe_VkDescriptorUpdateTemplateCreateInfo *local_pCreateInfo = NULL;
+    if (pCreateInfo) {
+        local_pCreateInfo = &var_local_pCreateInfo;
+        local_pCreateInfo->initialize(pCreateInfo);
+        if (pCreateInfo->templateType == VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET) {
+            local_pCreateInfo->descriptorSetLayout = layer_data->Unwrap(pCreateInfo->descriptorSetLayout);
+        }
+        if (pCreateInfo->templateType == VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR) {
+            local_pCreateInfo->pipelineLayout = layer_data->Unwrap(pCreateInfo->pipelineLayout);
+        }
+    }
+    VkResult result = layer_data->device_dispatch_table.CreateDescriptorUpdateTemplateKHR(device, local_pCreateInfo->ptr(),
+                                                                                          pAllocator, pDescriptorUpdateTemplate);
+
+    if (VK_SUCCESS == result) {
+        *pDescriptorUpdateTemplate = layer_data->WrapNew(*pDescriptorUpdateTemplate);
+
+        // Shadow template createInfo for later updates
+        if (local_pCreateInfo) {
+            write_lock_guard_t lock(dispatch_lock);
+            std::unique_ptr<TEMPLATE_STATE> template_state(new TEMPLATE_STATE(*pDescriptorUpdateTemplate, local_pCreateInfo));
+            layer_data->desc_template_createinfo_map[(uint64_t)*pDescriptorUpdateTemplate] = std::move(template_state);
+        }
+    }
+    return result;
+}
+
+// This is the core version of this routine.  The extension version is below.
+void DispatchDestroyDescriptorUpdateTemplate(VkDevice device, VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
+                                             const VkAllocationCallbacks *pAllocator) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles)
+        return layer_data->device_dispatch_table.DestroyDescriptorUpdateTemplate(device, descriptorUpdateTemplate, pAllocator);
+    write_lock_guard_t lock(dispatch_lock);
+    uint64_t descriptor_update_template_id = reinterpret_cast<uint64_t &>(descriptorUpdateTemplate);
+    layer_data->desc_template_createinfo_map.erase(descriptor_update_template_id);
+    lock.unlock();
+
+    auto iter = unique_id_mapping.pop(descriptor_update_template_id);
+    if (iter != unique_id_mapping.end()) {
+        descriptorUpdateTemplate = (VkDescriptorUpdateTemplate)iter->second;
+    } else {
+        descriptorUpdateTemplate = (VkDescriptorUpdateTemplate)0;
+    }
+
+    layer_data->device_dispatch_table.DestroyDescriptorUpdateTemplate(device, descriptorUpdateTemplate, pAllocator);
+}
+
+// This is the extension version of this routine.  The core version is above.
+void DispatchDestroyDescriptorUpdateTemplateKHR(VkDevice device, VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
+                                                const VkAllocationCallbacks *pAllocator) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles)
+        return layer_data->device_dispatch_table.DestroyDescriptorUpdateTemplateKHR(device, descriptorUpdateTemplate, pAllocator);
+    write_lock_guard_t lock(dispatch_lock);
+    uint64_t descriptor_update_template_id = reinterpret_cast<uint64_t &>(descriptorUpdateTemplate);
+    layer_data->desc_template_createinfo_map.erase(descriptor_update_template_id);
+    lock.unlock();
+
+    auto iter = unique_id_mapping.pop(descriptor_update_template_id);
+    if (iter != unique_id_mapping.end()) {
+        descriptorUpdateTemplate = (VkDescriptorUpdateTemplate)iter->second;
+    } else {
+        descriptorUpdateTemplate = (VkDescriptorUpdateTemplate)0;
+    }
+
+    layer_data->device_dispatch_table.DestroyDescriptorUpdateTemplateKHR(device, descriptorUpdateTemplate, pAllocator);
+}
+
+void *BuildUnwrappedUpdateTemplateBuffer(ValidationObject *layer_data, uint64_t descriptorUpdateTemplate, const void *pData) {
+    auto const template_map_entry = layer_data->desc_template_createinfo_map.find(descriptorUpdateTemplate);
+    auto const &create_info = template_map_entry->second->create_info;
+    size_t allocation_size = 0;
+    std::vector<std::tuple<size_t, VulkanObjectType, uint64_t, size_t>> template_entries;
+
+    for (uint32_t i = 0; i < create_info.descriptorUpdateEntryCount; i++) {
+        for (uint32_t j = 0; j < create_info.pDescriptorUpdateEntries[i].descriptorCount; j++) {
+            size_t offset = create_info.pDescriptorUpdateEntries[i].offset + j * create_info.pDescriptorUpdateEntries[i].stride;
+            char *update_entry = (char *)(pData) + offset;
+
+            switch (create_info.pDescriptorUpdateEntries[i].descriptorType) {
+                case VK_DESCRIPTOR_TYPE_SAMPLER:
+                case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
+                case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
+                case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
+                case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT: {
+                    auto image_entry = reinterpret_cast<VkDescriptorImageInfo *>(update_entry);
+                    allocation_size = std::max(allocation_size, offset + sizeof(VkDescriptorImageInfo));
+
+                    VkDescriptorImageInfo *wrapped_entry = new VkDescriptorImageInfo(*image_entry);
+                    wrapped_entry->sampler = layer_data->Unwrap(image_entry->sampler);
+                    wrapped_entry->imageView = layer_data->Unwrap(image_entry->imageView);
+                    template_entries.emplace_back(offset, kVulkanObjectTypeImage, CastToUint64(wrapped_entry), 0);
+                } break;
+
+                case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
+                case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
+                case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
+                case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC: {
+                    auto buffer_entry = reinterpret_cast<VkDescriptorBufferInfo *>(update_entry);
+                    allocation_size = std::max(allocation_size, offset + sizeof(VkDescriptorBufferInfo));
+
+                    VkDescriptorBufferInfo *wrapped_entry = new VkDescriptorBufferInfo(*buffer_entry);
+                    wrapped_entry->buffer = layer_data->Unwrap(buffer_entry->buffer);
+                    template_entries.emplace_back(offset, kVulkanObjectTypeBuffer, CastToUint64(wrapped_entry), 0);
+                } break;
+
+                case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
+                case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER: {
+                    auto buffer_view_handle = reinterpret_cast<VkBufferView *>(update_entry);
+                    allocation_size = std::max(allocation_size, offset + sizeof(VkBufferView));
+
+                    VkBufferView wrapped_entry = layer_data->Unwrap(*buffer_view_handle);
+                    template_entries.emplace_back(offset, kVulkanObjectTypeBufferView, CastToUint64(wrapped_entry), 0);
+                } break;
+                case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT: {
+                    size_t numBytes = create_info.pDescriptorUpdateEntries[i].descriptorCount;
+                    allocation_size = std::max(allocation_size, offset + numBytes);
+                    // nothing to unwrap, just plain data
+                    template_entries.emplace_back(offset, kVulkanObjectTypeUnknown, CastToUint64(update_entry),
+                                                  numBytes);
+                    // to break out of the loop
+                    j = create_info.pDescriptorUpdateEntries[i].descriptorCount;
+                } break;
+                default:
+                    assert(0);
+                    break;
+            }
+        }
+    }
+    // Allocate required buffer size and populate with source/unwrapped data
+    void *unwrapped_data = malloc(allocation_size);
+    for (auto &this_entry : template_entries) {
+        VulkanObjectType type = std::get<1>(this_entry);
+        void *destination = (char *)unwrapped_data + std::get<0>(this_entry);
+        uint64_t source = std::get<2>(this_entry);
+        size_t size = std::get<3>(this_entry);
+
+        if (size != 0) {
+            assert(type == kVulkanObjectTypeUnknown);
+            memcpy(destination, CastFromUint64<void *>(source), size);
+        } else {
+            switch (type) {
+                case kVulkanObjectTypeImage:
+                    *(reinterpret_cast<VkDescriptorImageInfo *>(destination)) =
+                        *(reinterpret_cast<VkDescriptorImageInfo *>(source));
+                    delete CastFromUint64<VkDescriptorImageInfo *>(source);
+                    break;
+                case kVulkanObjectTypeBuffer:
+                    *(reinterpret_cast<VkDescriptorBufferInfo *>(destination)) =
+                        *(CastFromUint64<VkDescriptorBufferInfo *>(source));
+                    delete CastFromUint64<VkDescriptorBufferInfo *>(source);
+                    break;
+                case kVulkanObjectTypeBufferView:
+                    *(reinterpret_cast<VkBufferView *>(destination)) = CastFromUint64<VkBufferView>(source);
+                    break;
+                default:
+                    assert(0);
+                    break;
+            }
+        }
+    }
+    return (void *)unwrapped_data;
+}
+
+void DispatchUpdateDescriptorSetWithTemplate(VkDevice device, VkDescriptorSet descriptorSet,
+                                             VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate, const void *pData) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles)
+        return layer_data->device_dispatch_table.UpdateDescriptorSetWithTemplate(device, descriptorSet, descriptorUpdateTemplate,
+                                                                                 pData);
+    uint64_t template_handle = reinterpret_cast<uint64_t &>(descriptorUpdateTemplate);
+    void *unwrapped_buffer = nullptr;
+    {
+        read_lock_guard_t lock(dispatch_lock);
+        descriptorSet = layer_data->Unwrap(descriptorSet);
+        descriptorUpdateTemplate = (VkDescriptorUpdateTemplate)layer_data->Unwrap(descriptorUpdateTemplate);
+        unwrapped_buffer = BuildUnwrappedUpdateTemplateBuffer(layer_data, template_handle, pData);
+    }
+    layer_data->device_dispatch_table.UpdateDescriptorSetWithTemplate(device, descriptorSet, descriptorUpdateTemplate, unwrapped_buffer);
+    free(unwrapped_buffer);
+}
+
+void DispatchUpdateDescriptorSetWithTemplateKHR(VkDevice device, VkDescriptorSet descriptorSet,
+                                                VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate, const void *pData) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles)
+        return layer_data->device_dispatch_table.UpdateDescriptorSetWithTemplateKHR(device, descriptorSet, descriptorUpdateTemplate,
+                                                                                    pData);
+    uint64_t template_handle = reinterpret_cast<uint64_t &>(descriptorUpdateTemplate);
+    void *unwrapped_buffer = nullptr;
+    {
+        read_lock_guard_t lock(dispatch_lock);
+        descriptorSet = layer_data->Unwrap(descriptorSet);
+        descriptorUpdateTemplate = layer_data->Unwrap(descriptorUpdateTemplate);
+        unwrapped_buffer = BuildUnwrappedUpdateTemplateBuffer(layer_data, template_handle, pData);
+    }
+    layer_data->device_dispatch_table.UpdateDescriptorSetWithTemplateKHR(device, descriptorSet, descriptorUpdateTemplate, unwrapped_buffer);
+    free(unwrapped_buffer);
+}
+
+void DispatchCmdPushDescriptorSetWithTemplateKHR(VkCommandBuffer commandBuffer,
+                                                 VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate, VkPipelineLayout layout,
+                                                 uint32_t set, const void *pData) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    if (!wrap_handles)
+        return layer_data->device_dispatch_table.CmdPushDescriptorSetWithTemplateKHR(commandBuffer, descriptorUpdateTemplate,
+                                                                                     layout, set, pData);
+    uint64_t template_handle = reinterpret_cast<uint64_t &>(descriptorUpdateTemplate);
+    void *unwrapped_buffer = nullptr;
+    {
+        read_lock_guard_t lock(dispatch_lock);
+        descriptorUpdateTemplate = layer_data->Unwrap(descriptorUpdateTemplate);
+        layout = layer_data->Unwrap(layout);
+        unwrapped_buffer = BuildUnwrappedUpdateTemplateBuffer(layer_data, template_handle, pData);
+    }
+    layer_data->device_dispatch_table.CmdPushDescriptorSetWithTemplateKHR(commandBuffer, descriptorUpdateTemplate, layout, set,
+                                                                 unwrapped_buffer);
+    free(unwrapped_buffer);
+}
+
+VkResult DispatchGetPhysicalDeviceDisplayPropertiesKHR(VkPhysicalDevice physicalDevice, uint32_t *pPropertyCount,
+                                                       VkDisplayPropertiesKHR *pProperties) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    VkResult result =
+        layer_data->instance_dispatch_table.GetPhysicalDeviceDisplayPropertiesKHR(physicalDevice, pPropertyCount, pProperties);
+    if (!wrap_handles) return result;
+    if ((result == VK_SUCCESS || result == VK_INCOMPLETE) && pProperties) {
+        for (uint32_t idx0 = 0; idx0 < *pPropertyCount; ++idx0) {
+            pProperties[idx0].display = layer_data->MaybeWrapDisplay(pProperties[idx0].display, layer_data);
+        }
+    }
+    return result;
+}
+
+VkResult DispatchGetPhysicalDeviceDisplayProperties2KHR(VkPhysicalDevice physicalDevice, uint32_t *pPropertyCount,
+                                                        VkDisplayProperties2KHR *pProperties) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    VkResult result =
+        layer_data->instance_dispatch_table.GetPhysicalDeviceDisplayProperties2KHR(physicalDevice, pPropertyCount, pProperties);
+    if (!wrap_handles) return result;
+    if ((result == VK_SUCCESS || result == VK_INCOMPLETE) && pProperties) {
+        for (uint32_t idx0 = 0; idx0 < *pPropertyCount; ++idx0) {
+            pProperties[idx0].displayProperties.display =
+                layer_data->MaybeWrapDisplay(pProperties[idx0].displayProperties.display, layer_data);
+        }
+    }
+    return result;
+}
+
+VkResult DispatchGetPhysicalDeviceDisplayPlanePropertiesKHR(VkPhysicalDevice physicalDevice, uint32_t *pPropertyCount,
+                                                            VkDisplayPlanePropertiesKHR *pProperties) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    VkResult result =
+        layer_data->instance_dispatch_table.GetPhysicalDeviceDisplayPlanePropertiesKHR(physicalDevice, pPropertyCount, pProperties);
+    if (!wrap_handles) return result;
+    if ((result == VK_SUCCESS || result == VK_INCOMPLETE) && pProperties) {
+        for (uint32_t idx0 = 0; idx0 < *pPropertyCount; ++idx0) {
+            VkDisplayKHR &opt_display = pProperties[idx0].currentDisplay;
+            if (opt_display) opt_display = layer_data->MaybeWrapDisplay(opt_display, layer_data);
+        }
+    }
+    return result;
+}
+
+VkResult DispatchGetPhysicalDeviceDisplayPlaneProperties2KHR(VkPhysicalDevice physicalDevice, uint32_t *pPropertyCount,
+                                                             VkDisplayPlaneProperties2KHR *pProperties) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    VkResult result = layer_data->instance_dispatch_table.GetPhysicalDeviceDisplayPlaneProperties2KHR(physicalDevice,
+                                                                                                      pPropertyCount, pProperties);
+    if (!wrap_handles) return result;
+    if ((result == VK_SUCCESS || result == VK_INCOMPLETE) && pProperties) {
+        for (uint32_t idx0 = 0; idx0 < *pPropertyCount; ++idx0) {
+            VkDisplayKHR &opt_display = pProperties[idx0].displayPlaneProperties.currentDisplay;
+            if (opt_display) opt_display = layer_data->MaybeWrapDisplay(opt_display, layer_data);
+        }
+    }
+    return result;
+}
+
+VkResult DispatchGetDisplayPlaneSupportedDisplaysKHR(VkPhysicalDevice physicalDevice, uint32_t planeIndex, uint32_t *pDisplayCount,
+                                                     VkDisplayKHR *pDisplays) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    VkResult result = layer_data->instance_dispatch_table.GetDisplayPlaneSupportedDisplaysKHR(physicalDevice, planeIndex,
+                                                                                              pDisplayCount, pDisplays);
+    if ((result == VK_SUCCESS || result == VK_INCOMPLETE) && pDisplays) {
+    if (!wrap_handles) return result;
+        for (uint32_t i = 0; i < *pDisplayCount; ++i) {
+            if (pDisplays[i]) pDisplays[i] = layer_data->MaybeWrapDisplay(pDisplays[i], layer_data);
+        }
+    }
+    return result;
+}
+
+VkResult DispatchGetDisplayModePropertiesKHR(VkPhysicalDevice physicalDevice, VkDisplayKHR display, uint32_t *pPropertyCount,
+                                             VkDisplayModePropertiesKHR *pProperties) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    if (!wrap_handles)
+        return layer_data->instance_dispatch_table.GetDisplayModePropertiesKHR(physicalDevice, display, pPropertyCount,
+                                                                               pProperties);
+    {
+        display = layer_data->Unwrap(display);
+    }
+
+    VkResult result = layer_data->instance_dispatch_table.GetDisplayModePropertiesKHR(physicalDevice, display, pPropertyCount, pProperties);
+    if ((result == VK_SUCCESS || result == VK_INCOMPLETE) && pProperties) {
+        for (uint32_t idx0 = 0; idx0 < *pPropertyCount; ++idx0) {
+            pProperties[idx0].displayMode = layer_data->WrapNew(pProperties[idx0].displayMode);
+        }
+    }
+    return result;
+}
+
+VkResult DispatchGetDisplayModeProperties2KHR(VkPhysicalDevice physicalDevice, VkDisplayKHR display, uint32_t *pPropertyCount,
+                                              VkDisplayModeProperties2KHR *pProperties) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    if (!wrap_handles)
+        return layer_data->instance_dispatch_table.GetDisplayModeProperties2KHR(physicalDevice, display, pPropertyCount,
+                                                                                pProperties);
+    {
+        display = layer_data->Unwrap(display);
+    }
+
+    VkResult result =
+        layer_data->instance_dispatch_table.GetDisplayModeProperties2KHR(physicalDevice, display, pPropertyCount, pProperties);
+    if ((result == VK_SUCCESS || result == VK_INCOMPLETE) && pProperties) {
+        for (uint32_t idx0 = 0; idx0 < *pPropertyCount; ++idx0) {
+            pProperties[idx0].displayModeProperties.displayMode = layer_data->WrapNew(pProperties[idx0].displayModeProperties.displayMode);
+        }
+    }
+    return result;
+}
+
+VkResult DispatchDebugMarkerSetObjectTagEXT(VkDevice device, const VkDebugMarkerObjectTagInfoEXT *pTagInfo) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.DebugMarkerSetObjectTagEXT(device, pTagInfo);
+    safe_VkDebugMarkerObjectTagInfoEXT local_tag_info(pTagInfo);
+    {
+        auto it = unique_id_mapping.find(reinterpret_cast<uint64_t &>(local_tag_info.object));
+        if (it != unique_id_mapping.end()) {
+            local_tag_info.object = it->second;
+        }
+    }
+    VkResult result = layer_data->device_dispatch_table.DebugMarkerSetObjectTagEXT(device, 
+                                                                                   reinterpret_cast<VkDebugMarkerObjectTagInfoEXT *>(&local_tag_info));
+    return result;
+}
+
+VkResult DispatchDebugMarkerSetObjectNameEXT(VkDevice device, const VkDebugMarkerObjectNameInfoEXT *pNameInfo) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.DebugMarkerSetObjectNameEXT(device, pNameInfo);
+    safe_VkDebugMarkerObjectNameInfoEXT local_name_info(pNameInfo);
+    {
+        auto it = unique_id_mapping.find(reinterpret_cast<uint64_t &>(local_name_info.object));
+        if (it != unique_id_mapping.end()) {
+            local_name_info.object = it->second;
+        }
+    }
+    VkResult result = layer_data->device_dispatch_table.DebugMarkerSetObjectNameEXT(
+        device, reinterpret_cast<VkDebugMarkerObjectNameInfoEXT *>(&local_name_info));
+    return result;
+}
+
+// VK_EXT_debug_utils
+VkResult DispatchSetDebugUtilsObjectTagEXT(VkDevice device, const VkDebugUtilsObjectTagInfoEXT *pTagInfo) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.SetDebugUtilsObjectTagEXT(device, pTagInfo);
+    safe_VkDebugUtilsObjectTagInfoEXT local_tag_info(pTagInfo);
+    {
+        auto it = unique_id_mapping.find(reinterpret_cast<uint64_t &>(local_tag_info.objectHandle));
+        if (it != unique_id_mapping.end()) {
+            local_tag_info.objectHandle = it->second;
+        }
+    }
+    VkResult result = layer_data->device_dispatch_table.SetDebugUtilsObjectTagEXT(
+        device, reinterpret_cast<const VkDebugUtilsObjectTagInfoEXT *>(&local_tag_info));
+    return result;
+}
+
+VkResult DispatchSetDebugUtilsObjectNameEXT(VkDevice device, const VkDebugUtilsObjectNameInfoEXT *pNameInfo) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.SetDebugUtilsObjectNameEXT(device, pNameInfo);
+    safe_VkDebugUtilsObjectNameInfoEXT local_name_info(pNameInfo);
+    {
+        auto it = unique_id_mapping.find(reinterpret_cast<uint64_t &>(local_name_info.objectHandle));
+        if (it != unique_id_mapping.end()) {
+            local_name_info.objectHandle = it->second;
+        }
+    }
+    VkResult result = layer_data->device_dispatch_table.SetDebugUtilsObjectNameEXT(
+        device, reinterpret_cast<const VkDebugUtilsObjectNameInfoEXT *>(&local_name_info));
+    return result;
+}
+
+
+
+
+// Skip vkCreateInstance dispatch, manually generated
+
+// Skip vkDestroyInstance dispatch, manually generated
+
+VkResult DispatchEnumeratePhysicalDevices(
+    VkInstance                                  instance,
+    uint32_t*                                   pPhysicalDeviceCount,
+    VkPhysicalDevice*                           pPhysicalDevices)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(instance), layer_data_map);
+    VkResult result = layer_data->instance_dispatch_table.EnumeratePhysicalDevices(instance, pPhysicalDeviceCount, pPhysicalDevices);
+
+    return result;
+}
+
+void DispatchGetPhysicalDeviceFeatures(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceFeatures*                   pFeatures)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    layer_data->instance_dispatch_table.GetPhysicalDeviceFeatures(physicalDevice, pFeatures);
+
+}
+
+void DispatchGetPhysicalDeviceFormatProperties(
+    VkPhysicalDevice                            physicalDevice,
+    VkFormat                                    format,
+    VkFormatProperties*                         pFormatProperties)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    layer_data->instance_dispatch_table.GetPhysicalDeviceFormatProperties(physicalDevice, format, pFormatProperties);
+
+}
+
+VkResult DispatchGetPhysicalDeviceImageFormatProperties(
+    VkPhysicalDevice                            physicalDevice,
+    VkFormat                                    format,
+    VkImageType                                 type,
+    VkImageTiling                               tiling,
+    VkImageUsageFlags                           usage,
+    VkImageCreateFlags                          flags,
+    VkImageFormatProperties*                    pImageFormatProperties)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    VkResult result = layer_data->instance_dispatch_table.GetPhysicalDeviceImageFormatProperties(physicalDevice, format, type, tiling, usage, flags, pImageFormatProperties);
+
+    return result;
+}
+
+void DispatchGetPhysicalDeviceProperties(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceProperties*                 pProperties)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    layer_data->instance_dispatch_table.GetPhysicalDeviceProperties(physicalDevice, pProperties);
+
+}
+
+void DispatchGetPhysicalDeviceQueueFamilyProperties(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pQueueFamilyPropertyCount,
+    VkQueueFamilyProperties*                    pQueueFamilyProperties)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    layer_data->instance_dispatch_table.GetPhysicalDeviceQueueFamilyProperties(physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties);
+
+}
+
+void DispatchGetPhysicalDeviceMemoryProperties(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceMemoryProperties*           pMemoryProperties)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    layer_data->instance_dispatch_table.GetPhysicalDeviceMemoryProperties(physicalDevice, pMemoryProperties);
+
+}
+
+PFN_vkVoidFunction DispatchGetInstanceProcAddr(
+    VkInstance                                  instance,
+    const char*                                 pName)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(instance), layer_data_map);
+    PFN_vkVoidFunction result = layer_data->instance_dispatch_table.GetInstanceProcAddr(instance, pName);
+
+    return result;
+}
+
+PFN_vkVoidFunction DispatchGetDeviceProcAddr(
+    VkDevice                                    device,
+    const char*                                 pName)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    PFN_vkVoidFunction result = layer_data->device_dispatch_table.GetDeviceProcAddr(device, pName);
+
+    return result;
+}
+
+// Skip vkCreateDevice dispatch, manually generated
+
+// Skip vkDestroyDevice dispatch, manually generated
+
+// Skip vkEnumerateInstanceExtensionProperties dispatch, manually generated
+
+// Skip vkEnumerateDeviceExtensionProperties dispatch, manually generated
+
+// Skip vkEnumerateInstanceLayerProperties dispatch, manually generated
+
+// Skip vkEnumerateDeviceLayerProperties dispatch, manually generated
+
+void DispatchGetDeviceQueue(
+    VkDevice                                    device,
+    uint32_t                                    queueFamilyIndex,
+    uint32_t                                    queueIndex,
+    VkQueue*                                    pQueue)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    layer_data->device_dispatch_table.GetDeviceQueue(device, queueFamilyIndex, queueIndex, pQueue);
+
+}
+
+VkResult DispatchQueueSubmit(
+    VkQueue                                     queue,
+    uint32_t                                    submitCount,
+    const VkSubmitInfo*                         pSubmits,
+    VkFence                                     fence)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(queue), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.QueueSubmit(queue, submitCount, pSubmits, fence);
+    safe_VkSubmitInfo *local_pSubmits = NULL;
+    {
+        if (pSubmits) {
+            local_pSubmits = new safe_VkSubmitInfo[submitCount];
+            for (uint32_t index0 = 0; index0 < submitCount; ++index0) {
+                local_pSubmits[index0].initialize(&pSubmits[index0]);
+                WrapPnextChainHandles(layer_data, local_pSubmits[index0].pNext);
+                if (local_pSubmits[index0].pWaitSemaphores) {
+                    for (uint32_t index1 = 0; index1 < local_pSubmits[index0].waitSemaphoreCount; ++index1) {
+                        local_pSubmits[index0].pWaitSemaphores[index1] = layer_data->Unwrap(local_pSubmits[index0].pWaitSemaphores[index1]);
+                    }
+                }
+                if (local_pSubmits[index0].pSignalSemaphores) {
+                    for (uint32_t index1 = 0; index1 < local_pSubmits[index0].signalSemaphoreCount; ++index1) {
+                        local_pSubmits[index0].pSignalSemaphores[index1] = layer_data->Unwrap(local_pSubmits[index0].pSignalSemaphores[index1]);
+                    }
+                }
+            }
+        }
+        fence = layer_data->Unwrap(fence);
+    }
+    VkResult result = layer_data->device_dispatch_table.QueueSubmit(queue, submitCount, (const VkSubmitInfo*)local_pSubmits, fence);
+    if (local_pSubmits) {
+        delete[] local_pSubmits;
+    }
+    return result;
+}
+
+VkResult DispatchQueueWaitIdle(
+    VkQueue                                     queue)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(queue), layer_data_map);
+    VkResult result = layer_data->device_dispatch_table.QueueWaitIdle(queue);
+
+    return result;
+}
+
+VkResult DispatchDeviceWaitIdle(
+    VkDevice                                    device)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    VkResult result = layer_data->device_dispatch_table.DeviceWaitIdle(device);
+
+    return result;
+}
+
+VkResult DispatchAllocateMemory(
+    VkDevice                                    device,
+    const VkMemoryAllocateInfo*                 pAllocateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDeviceMemory*                             pMemory)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.AllocateMemory(device, pAllocateInfo, pAllocator, pMemory);
+    safe_VkMemoryAllocateInfo var_local_pAllocateInfo;
+    safe_VkMemoryAllocateInfo *local_pAllocateInfo = NULL;
+    {
+        if (pAllocateInfo) {
+            local_pAllocateInfo = &var_local_pAllocateInfo;
+            local_pAllocateInfo->initialize(pAllocateInfo);
+            WrapPnextChainHandles(layer_data, local_pAllocateInfo->pNext);
+        }
+    }
+    VkResult result = layer_data->device_dispatch_table.AllocateMemory(device, (const VkMemoryAllocateInfo*)local_pAllocateInfo, pAllocator, pMemory);
+    if (VK_SUCCESS == result) {
+        *pMemory = layer_data->WrapNew(*pMemory);
+    }
+    return result;
+}
+
+void DispatchFreeMemory(
+    VkDevice                                    device,
+    VkDeviceMemory                              memory,
+    const VkAllocationCallbacks*                pAllocator)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.FreeMemory(device, memory, pAllocator);
+    uint64_t memory_id = reinterpret_cast<uint64_t &>(memory);
+    auto iter = unique_id_mapping.pop(memory_id);
+    if (iter != unique_id_mapping.end()) {
+        memory = (VkDeviceMemory)iter->second;
+    } else {
+        memory = (VkDeviceMemory)0;
+    }
+    layer_data->device_dispatch_table.FreeMemory(device, memory, pAllocator);
+
+}
+
+VkResult DispatchMapMemory(
+    VkDevice                                    device,
+    VkDeviceMemory                              memory,
+    VkDeviceSize                                offset,
+    VkDeviceSize                                size,
+    VkMemoryMapFlags                            flags,
+    void**                                      ppData)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.MapMemory(device, memory, offset, size, flags, ppData);
+    {
+        memory = layer_data->Unwrap(memory);
+    }
+    VkResult result = layer_data->device_dispatch_table.MapMemory(device, memory, offset, size, flags, ppData);
+
+    return result;
+}
+
+void DispatchUnmapMemory(
+    VkDevice                                    device,
+    VkDeviceMemory                              memory)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.UnmapMemory(device, memory);
+    {
+        memory = layer_data->Unwrap(memory);
+    }
+    layer_data->device_dispatch_table.UnmapMemory(device, memory);
+
+}
+
+VkResult DispatchFlushMappedMemoryRanges(
+    VkDevice                                    device,
+    uint32_t                                    memoryRangeCount,
+    const VkMappedMemoryRange*                  pMemoryRanges)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.FlushMappedMemoryRanges(device, memoryRangeCount, pMemoryRanges);
+    safe_VkMappedMemoryRange *local_pMemoryRanges = NULL;
+    {
+        if (pMemoryRanges) {
+            local_pMemoryRanges = new safe_VkMappedMemoryRange[memoryRangeCount];
+            for (uint32_t index0 = 0; index0 < memoryRangeCount; ++index0) {
+                local_pMemoryRanges[index0].initialize(&pMemoryRanges[index0]);
+                if (pMemoryRanges[index0].memory) {
+                    local_pMemoryRanges[index0].memory = layer_data->Unwrap(pMemoryRanges[index0].memory);
+                }
+            }
+        }
+    }
+    VkResult result = layer_data->device_dispatch_table.FlushMappedMemoryRanges(device, memoryRangeCount, (const VkMappedMemoryRange*)local_pMemoryRanges);
+    if (local_pMemoryRanges) {
+        delete[] local_pMemoryRanges;
+    }
+    return result;
+}
+
+VkResult DispatchInvalidateMappedMemoryRanges(
+    VkDevice                                    device,
+    uint32_t                                    memoryRangeCount,
+    const VkMappedMemoryRange*                  pMemoryRanges)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.InvalidateMappedMemoryRanges(device, memoryRangeCount, pMemoryRanges);
+    safe_VkMappedMemoryRange *local_pMemoryRanges = NULL;
+    {
+        if (pMemoryRanges) {
+            local_pMemoryRanges = new safe_VkMappedMemoryRange[memoryRangeCount];
+            for (uint32_t index0 = 0; index0 < memoryRangeCount; ++index0) {
+                local_pMemoryRanges[index0].initialize(&pMemoryRanges[index0]);
+                if (pMemoryRanges[index0].memory) {
+                    local_pMemoryRanges[index0].memory = layer_data->Unwrap(pMemoryRanges[index0].memory);
+                }
+            }
+        }
+    }
+    VkResult result = layer_data->device_dispatch_table.InvalidateMappedMemoryRanges(device, memoryRangeCount, (const VkMappedMemoryRange*)local_pMemoryRanges);
+    if (local_pMemoryRanges) {
+        delete[] local_pMemoryRanges;
+    }
+    return result;
+}
+
+void DispatchGetDeviceMemoryCommitment(
+    VkDevice                                    device,
+    VkDeviceMemory                              memory,
+    VkDeviceSize*                               pCommittedMemoryInBytes)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.GetDeviceMemoryCommitment(device, memory, pCommittedMemoryInBytes);
+    {
+        memory = layer_data->Unwrap(memory);
+    }
+    layer_data->device_dispatch_table.GetDeviceMemoryCommitment(device, memory, pCommittedMemoryInBytes);
+
+}
+
+VkResult DispatchBindBufferMemory(
+    VkDevice                                    device,
+    VkBuffer                                    buffer,
+    VkDeviceMemory                              memory,
+    VkDeviceSize                                memoryOffset)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.BindBufferMemory(device, buffer, memory, memoryOffset);
+    {
+        buffer = layer_data->Unwrap(buffer);
+        memory = layer_data->Unwrap(memory);
+    }
+    VkResult result = layer_data->device_dispatch_table.BindBufferMemory(device, buffer, memory, memoryOffset);
+
+    return result;
+}
+
+VkResult DispatchBindImageMemory(
+    VkDevice                                    device,
+    VkImage                                     image,
+    VkDeviceMemory                              memory,
+    VkDeviceSize                                memoryOffset)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.BindImageMemory(device, image, memory, memoryOffset);
+    {
+        image = layer_data->Unwrap(image);
+        memory = layer_data->Unwrap(memory);
+    }
+    VkResult result = layer_data->device_dispatch_table.BindImageMemory(device, image, memory, memoryOffset);
+
+    return result;
+}
+
+void DispatchGetBufferMemoryRequirements(
+    VkDevice                                    device,
+    VkBuffer                                    buffer,
+    VkMemoryRequirements*                       pMemoryRequirements)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.GetBufferMemoryRequirements(device, buffer, pMemoryRequirements);
+    {
+        buffer = layer_data->Unwrap(buffer);
+    }
+    layer_data->device_dispatch_table.GetBufferMemoryRequirements(device, buffer, pMemoryRequirements);
+
+}
+
+void DispatchGetImageMemoryRequirements(
+    VkDevice                                    device,
+    VkImage                                     image,
+    VkMemoryRequirements*                       pMemoryRequirements)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.GetImageMemoryRequirements(device, image, pMemoryRequirements);
+    {
+        image = layer_data->Unwrap(image);
+    }
+    layer_data->device_dispatch_table.GetImageMemoryRequirements(device, image, pMemoryRequirements);
+
+}
+
+void DispatchGetImageSparseMemoryRequirements(
+    VkDevice                                    device,
+    VkImage                                     image,
+    uint32_t*                                   pSparseMemoryRequirementCount,
+    VkSparseImageMemoryRequirements*            pSparseMemoryRequirements)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.GetImageSparseMemoryRequirements(device, image, pSparseMemoryRequirementCount, pSparseMemoryRequirements);
+    {
+        image = layer_data->Unwrap(image);
+    }
+    layer_data->device_dispatch_table.GetImageSparseMemoryRequirements(device, image, pSparseMemoryRequirementCount, pSparseMemoryRequirements);
+
+}
+
+void DispatchGetPhysicalDeviceSparseImageFormatProperties(
+    VkPhysicalDevice                            physicalDevice,
+    VkFormat                                    format,
+    VkImageType                                 type,
+    VkSampleCountFlagBits                       samples,
+    VkImageUsageFlags                           usage,
+    VkImageTiling                               tiling,
+    uint32_t*                                   pPropertyCount,
+    VkSparseImageFormatProperties*              pProperties)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    layer_data->instance_dispatch_table.GetPhysicalDeviceSparseImageFormatProperties(physicalDevice, format, type, samples, usage, tiling, pPropertyCount, pProperties);
+
+}
+
+VkResult DispatchQueueBindSparse(
+    VkQueue                                     queue,
+    uint32_t                                    bindInfoCount,
+    const VkBindSparseInfo*                     pBindInfo,
+    VkFence                                     fence)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(queue), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.QueueBindSparse(queue, bindInfoCount, pBindInfo, fence);
+    safe_VkBindSparseInfo *local_pBindInfo = NULL;
+    {
+        if (pBindInfo) {
+            local_pBindInfo = new safe_VkBindSparseInfo[bindInfoCount];
+            for (uint32_t index0 = 0; index0 < bindInfoCount; ++index0) {
+                local_pBindInfo[index0].initialize(&pBindInfo[index0]);
+                WrapPnextChainHandles(layer_data, local_pBindInfo[index0].pNext);
+                if (local_pBindInfo[index0].pWaitSemaphores) {
+                    for (uint32_t index1 = 0; index1 < local_pBindInfo[index0].waitSemaphoreCount; ++index1) {
+                        local_pBindInfo[index0].pWaitSemaphores[index1] = layer_data->Unwrap(local_pBindInfo[index0].pWaitSemaphores[index1]);
+                    }
+                }
+                if (local_pBindInfo[index0].pBufferBinds) {
+                    for (uint32_t index1 = 0; index1 < local_pBindInfo[index0].bufferBindCount; ++index1) {
+                        if (pBindInfo[index0].pBufferBinds[index1].buffer) {
+                            local_pBindInfo[index0].pBufferBinds[index1].buffer = layer_data->Unwrap(pBindInfo[index0].pBufferBinds[index1].buffer);
+                        }
+                        if (local_pBindInfo[index0].pBufferBinds[index1].pBinds) {
+                            for (uint32_t index2 = 0; index2 < local_pBindInfo[index0].pBufferBinds[index1].bindCount; ++index2) {
+                                if (pBindInfo[index0].pBufferBinds[index1].pBinds[index2].memory) {
+                                    local_pBindInfo[index0].pBufferBinds[index1].pBinds[index2].memory = layer_data->Unwrap(pBindInfo[index0].pBufferBinds[index1].pBinds[index2].memory);
+                                }
+                            }
+                        }
+                    }
+                }
+                if (local_pBindInfo[index0].pImageOpaqueBinds) {
+                    for (uint32_t index1 = 0; index1 < local_pBindInfo[index0].imageOpaqueBindCount; ++index1) {
+                        if (pBindInfo[index0].pImageOpaqueBinds[index1].image) {
+                            local_pBindInfo[index0].pImageOpaqueBinds[index1].image = layer_data->Unwrap(pBindInfo[index0].pImageOpaqueBinds[index1].image);
+                        }
+                        if (local_pBindInfo[index0].pImageOpaqueBinds[index1].pBinds) {
+                            for (uint32_t index2 = 0; index2 < local_pBindInfo[index0].pImageOpaqueBinds[index1].bindCount; ++index2) {
+                                if (pBindInfo[index0].pImageOpaqueBinds[index1].pBinds[index2].memory) {
+                                    local_pBindInfo[index0].pImageOpaqueBinds[index1].pBinds[index2].memory = layer_data->Unwrap(pBindInfo[index0].pImageOpaqueBinds[index1].pBinds[index2].memory);
+                                }
+                            }
+                        }
+                    }
+                }
+                if (local_pBindInfo[index0].pImageBinds) {
+                    for (uint32_t index1 = 0; index1 < local_pBindInfo[index0].imageBindCount; ++index1) {
+                        if (pBindInfo[index0].pImageBinds[index1].image) {
+                            local_pBindInfo[index0].pImageBinds[index1].image = layer_data->Unwrap(pBindInfo[index0].pImageBinds[index1].image);
+                        }
+                        if (local_pBindInfo[index0].pImageBinds[index1].pBinds) {
+                            for (uint32_t index2 = 0; index2 < local_pBindInfo[index0].pImageBinds[index1].bindCount; ++index2) {
+                                if (pBindInfo[index0].pImageBinds[index1].pBinds[index2].memory) {
+                                    local_pBindInfo[index0].pImageBinds[index1].pBinds[index2].memory = layer_data->Unwrap(pBindInfo[index0].pImageBinds[index1].pBinds[index2].memory);
+                                }
+                            }
+                        }
+                    }
+                }
+                if (local_pBindInfo[index0].pSignalSemaphores) {
+                    for (uint32_t index1 = 0; index1 < local_pBindInfo[index0].signalSemaphoreCount; ++index1) {
+                        local_pBindInfo[index0].pSignalSemaphores[index1] = layer_data->Unwrap(local_pBindInfo[index0].pSignalSemaphores[index1]);
+                    }
+                }
+            }
+        }
+        fence = layer_data->Unwrap(fence);
+    }
+    VkResult result = layer_data->device_dispatch_table.QueueBindSparse(queue, bindInfoCount, (const VkBindSparseInfo*)local_pBindInfo, fence);
+    if (local_pBindInfo) {
+        delete[] local_pBindInfo;
+    }
+    return result;
+}
+
+VkResult DispatchCreateFence(
+    VkDevice                                    device,
+    const VkFenceCreateInfo*                    pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkFence*                                    pFence)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.CreateFence(device, pCreateInfo, pAllocator, pFence);
+    VkResult result = layer_data->device_dispatch_table.CreateFence(device, pCreateInfo, pAllocator, pFence);
+    if (VK_SUCCESS == result) {
+        *pFence = layer_data->WrapNew(*pFence);
+    }
+    return result;
+}
+
+void DispatchDestroyFence(
+    VkDevice                                    device,
+    VkFence                                     fence,
+    const VkAllocationCallbacks*                pAllocator)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.DestroyFence(device, fence, pAllocator);
+    uint64_t fence_id = reinterpret_cast<uint64_t &>(fence);
+    auto iter = unique_id_mapping.pop(fence_id);
+    if (iter != unique_id_mapping.end()) {
+        fence = (VkFence)iter->second;
+    } else {
+        fence = (VkFence)0;
+    }
+    layer_data->device_dispatch_table.DestroyFence(device, fence, pAllocator);
+
+}
+
+VkResult DispatchResetFences(
+    VkDevice                                    device,
+    uint32_t                                    fenceCount,
+    const VkFence*                              pFences)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.ResetFences(device, fenceCount, pFences);
+    VkFence var_local_pFences[DISPATCH_MAX_STACK_ALLOCATIONS];
+    VkFence *local_pFences = NULL;
+    {
+        if (pFences) {
+            local_pFences = fenceCount > DISPATCH_MAX_STACK_ALLOCATIONS ? new VkFence[fenceCount] : var_local_pFences;
+            for (uint32_t index0 = 0; index0 < fenceCount; ++index0) {
+                local_pFences[index0] = layer_data->Unwrap(pFences[index0]);
+            }
+        }
+    }
+    VkResult result = layer_data->device_dispatch_table.ResetFences(device, fenceCount, (const VkFence*)local_pFences);
+    if (local_pFences != var_local_pFences)
+        delete[] local_pFences;
+    return result;
+}
+
+VkResult DispatchGetFenceStatus(
+    VkDevice                                    device,
+    VkFence                                     fence)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.GetFenceStatus(device, fence);
+    {
+        fence = layer_data->Unwrap(fence);
+    }
+    VkResult result = layer_data->device_dispatch_table.GetFenceStatus(device, fence);
+
+    return result;
+}
+
+VkResult DispatchWaitForFences(
+    VkDevice                                    device,
+    uint32_t                                    fenceCount,
+    const VkFence*                              pFences,
+    VkBool32                                    waitAll,
+    uint64_t                                    timeout)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.WaitForFences(device, fenceCount, pFences, waitAll, timeout);
+    VkFence var_local_pFences[DISPATCH_MAX_STACK_ALLOCATIONS];
+    VkFence *local_pFences = NULL;
+    {
+        if (pFences) {
+            local_pFences = fenceCount > DISPATCH_MAX_STACK_ALLOCATIONS ? new VkFence[fenceCount] : var_local_pFences;
+            for (uint32_t index0 = 0; index0 < fenceCount; ++index0) {
+                local_pFences[index0] = layer_data->Unwrap(pFences[index0]);
+            }
+        }
+    }
+    VkResult result = layer_data->device_dispatch_table.WaitForFences(device, fenceCount, (const VkFence*)local_pFences, waitAll, timeout);
+    if (local_pFences != var_local_pFences)
+        delete[] local_pFences;
+    return result;
+}
+
+VkResult DispatchCreateSemaphore(
+    VkDevice                                    device,
+    const VkSemaphoreCreateInfo*                pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSemaphore*                                pSemaphore)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.CreateSemaphore(device, pCreateInfo, pAllocator, pSemaphore);
+    VkResult result = layer_data->device_dispatch_table.CreateSemaphore(device, pCreateInfo, pAllocator, pSemaphore);
+    if (VK_SUCCESS == result) {
+        *pSemaphore = layer_data->WrapNew(*pSemaphore);
+    }
+    return result;
+}
+
+void DispatchDestroySemaphore(
+    VkDevice                                    device,
+    VkSemaphore                                 semaphore,
+    const VkAllocationCallbacks*                pAllocator)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.DestroySemaphore(device, semaphore, pAllocator);
+    uint64_t semaphore_id = reinterpret_cast<uint64_t &>(semaphore);
+    auto iter = unique_id_mapping.pop(semaphore_id);
+    if (iter != unique_id_mapping.end()) {
+        semaphore = (VkSemaphore)iter->second;
+    } else {
+        semaphore = (VkSemaphore)0;
+    }
+    layer_data->device_dispatch_table.DestroySemaphore(device, semaphore, pAllocator);
+
+}
+
+VkResult DispatchCreateEvent(
+    VkDevice                                    device,
+    const VkEventCreateInfo*                    pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkEvent*                                    pEvent)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.CreateEvent(device, pCreateInfo, pAllocator, pEvent);
+    VkResult result = layer_data->device_dispatch_table.CreateEvent(device, pCreateInfo, pAllocator, pEvent);
+    if (VK_SUCCESS == result) {
+        *pEvent = layer_data->WrapNew(*pEvent);
+    }
+    return result;
+}
+
+void DispatchDestroyEvent(
+    VkDevice                                    device,
+    VkEvent                                     event,
+    const VkAllocationCallbacks*                pAllocator)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.DestroyEvent(device, event, pAllocator);
+    uint64_t event_id = reinterpret_cast<uint64_t &>(event);
+    auto iter = unique_id_mapping.pop(event_id);
+    if (iter != unique_id_mapping.end()) {
+        event = (VkEvent)iter->second;
+    } else {
+        event = (VkEvent)0;
+    }
+    layer_data->device_dispatch_table.DestroyEvent(device, event, pAllocator);
+
+}
+
+VkResult DispatchGetEventStatus(
+    VkDevice                                    device,
+    VkEvent                                     event)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.GetEventStatus(device, event);
+    {
+        event = layer_data->Unwrap(event);
+    }
+    VkResult result = layer_data->device_dispatch_table.GetEventStatus(device, event);
+
+    return result;
+}
+
+VkResult DispatchSetEvent(
+    VkDevice                                    device,
+    VkEvent                                     event)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.SetEvent(device, event);
+    {
+        event = layer_data->Unwrap(event);
+    }
+    VkResult result = layer_data->device_dispatch_table.SetEvent(device, event);
+
+    return result;
+}
+
+VkResult DispatchResetEvent(
+    VkDevice                                    device,
+    VkEvent                                     event)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.ResetEvent(device, event);
+    {
+        event = layer_data->Unwrap(event);
+    }
+    VkResult result = layer_data->device_dispatch_table.ResetEvent(device, event);
+
+    return result;
+}
+
+VkResult DispatchCreateQueryPool(
+    VkDevice                                    device,
+    const VkQueryPoolCreateInfo*                pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkQueryPool*                                pQueryPool)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.CreateQueryPool(device, pCreateInfo, pAllocator, pQueryPool);
+    VkResult result = layer_data->device_dispatch_table.CreateQueryPool(device, pCreateInfo, pAllocator, pQueryPool);
+    if (VK_SUCCESS == result) {
+        *pQueryPool = layer_data->WrapNew(*pQueryPool);
+    }
+    return result;
+}
+
+void DispatchDestroyQueryPool(
+    VkDevice                                    device,
+    VkQueryPool                                 queryPool,
+    const VkAllocationCallbacks*                pAllocator)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.DestroyQueryPool(device, queryPool, pAllocator);
+    uint64_t queryPool_id = reinterpret_cast<uint64_t &>(queryPool);
+    auto iter = unique_id_mapping.pop(queryPool_id);
+    if (iter != unique_id_mapping.end()) {
+        queryPool = (VkQueryPool)iter->second;
+    } else {
+        queryPool = (VkQueryPool)0;
+    }
+    layer_data->device_dispatch_table.DestroyQueryPool(device, queryPool, pAllocator);
+
+}
+
+VkResult DispatchGetQueryPoolResults(
+    VkDevice                                    device,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    firstQuery,
+    uint32_t                                    queryCount,
+    size_t                                      dataSize,
+    void*                                       pData,
+    VkDeviceSize                                stride,
+    VkQueryResultFlags                          flags)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.GetQueryPoolResults(device, queryPool, firstQuery, queryCount, dataSize, pData, stride, flags);
+    {
+        queryPool = layer_data->Unwrap(queryPool);
+    }
+    VkResult result = layer_data->device_dispatch_table.GetQueryPoolResults(device, queryPool, firstQuery, queryCount, dataSize, pData, stride, flags);
+
+    return result;
+}
+
+VkResult DispatchCreateBuffer(
+    VkDevice                                    device,
+    const VkBufferCreateInfo*                   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkBuffer*                                   pBuffer)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.CreateBuffer(device, pCreateInfo, pAllocator, pBuffer);
+    VkResult result = layer_data->device_dispatch_table.CreateBuffer(device, pCreateInfo, pAllocator, pBuffer);
+    if (VK_SUCCESS == result) {
+        *pBuffer = layer_data->WrapNew(*pBuffer);
+    }
+    return result;
+}
+
+void DispatchDestroyBuffer(
+    VkDevice                                    device,
+    VkBuffer                                    buffer,
+    const VkAllocationCallbacks*                pAllocator)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.DestroyBuffer(device, buffer, pAllocator);
+    uint64_t buffer_id = reinterpret_cast<uint64_t &>(buffer);
+    auto iter = unique_id_mapping.pop(buffer_id);
+    if (iter != unique_id_mapping.end()) {
+        buffer = (VkBuffer)iter->second;
+    } else {
+        buffer = (VkBuffer)0;
+    }
+    layer_data->device_dispatch_table.DestroyBuffer(device, buffer, pAllocator);
+
+}
+
+VkResult DispatchCreateBufferView(
+    VkDevice                                    device,
+    const VkBufferViewCreateInfo*               pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkBufferView*                               pView)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.CreateBufferView(device, pCreateInfo, pAllocator, pView);
+    safe_VkBufferViewCreateInfo var_local_pCreateInfo;
+    safe_VkBufferViewCreateInfo *local_pCreateInfo = NULL;
+    {
+        if (pCreateInfo) {
+            local_pCreateInfo = &var_local_pCreateInfo;
+            local_pCreateInfo->initialize(pCreateInfo);
+            if (pCreateInfo->buffer) {
+                local_pCreateInfo->buffer = layer_data->Unwrap(pCreateInfo->buffer);
+            }
+        }
+    }
+    VkResult result = layer_data->device_dispatch_table.CreateBufferView(device, (const VkBufferViewCreateInfo*)local_pCreateInfo, pAllocator, pView);
+    if (VK_SUCCESS == result) {
+        *pView = layer_data->WrapNew(*pView);
+    }
+    return result;
+}
+
+void DispatchDestroyBufferView(
+    VkDevice                                    device,
+    VkBufferView                                bufferView,
+    const VkAllocationCallbacks*                pAllocator)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.DestroyBufferView(device, bufferView, pAllocator);
+    uint64_t bufferView_id = reinterpret_cast<uint64_t &>(bufferView);
+    auto iter = unique_id_mapping.pop(bufferView_id);
+    if (iter != unique_id_mapping.end()) {
+        bufferView = (VkBufferView)iter->second;
+    } else {
+        bufferView = (VkBufferView)0;
+    }
+    layer_data->device_dispatch_table.DestroyBufferView(device, bufferView, pAllocator);
+
+}
+
+VkResult DispatchCreateImage(
+    VkDevice                                    device,
+    const VkImageCreateInfo*                    pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkImage*                                    pImage)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.CreateImage(device, pCreateInfo, pAllocator, pImage);
+    safe_VkImageCreateInfo var_local_pCreateInfo;
+    safe_VkImageCreateInfo *local_pCreateInfo = NULL;
+    {
+        if (pCreateInfo) {
+            local_pCreateInfo = &var_local_pCreateInfo;
+            local_pCreateInfo->initialize(pCreateInfo);
+            WrapPnextChainHandles(layer_data, local_pCreateInfo->pNext);
+        }
+    }
+    VkResult result = layer_data->device_dispatch_table.CreateImage(device, (const VkImageCreateInfo*)local_pCreateInfo, pAllocator, pImage);
+    if (VK_SUCCESS == result) {
+        *pImage = layer_data->WrapNew(*pImage);
+    }
+    return result;
+}
+
+void DispatchDestroyImage(
+    VkDevice                                    device,
+    VkImage                                     image,
+    const VkAllocationCallbacks*                pAllocator)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.DestroyImage(device, image, pAllocator);
+    uint64_t image_id = reinterpret_cast<uint64_t &>(image);
+    auto iter = unique_id_mapping.pop(image_id);
+    if (iter != unique_id_mapping.end()) {
+        image = (VkImage)iter->second;
+    } else {
+        image = (VkImage)0;
+    }
+    layer_data->device_dispatch_table.DestroyImage(device, image, pAllocator);
+
+}
+
+void DispatchGetImageSubresourceLayout(
+    VkDevice                                    device,
+    VkImage                                     image,
+    const VkImageSubresource*                   pSubresource,
+    VkSubresourceLayout*                        pLayout)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.GetImageSubresourceLayout(device, image, pSubresource, pLayout);
+    {
+        image = layer_data->Unwrap(image);
+    }
+    layer_data->device_dispatch_table.GetImageSubresourceLayout(device, image, pSubresource, pLayout);
+
+}
+
+VkResult DispatchCreateImageView(
+    VkDevice                                    device,
+    const VkImageViewCreateInfo*                pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkImageView*                                pView)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.CreateImageView(device, pCreateInfo, pAllocator, pView);
+    safe_VkImageViewCreateInfo var_local_pCreateInfo;
+    safe_VkImageViewCreateInfo *local_pCreateInfo = NULL;
+    {
+        if (pCreateInfo) {
+            local_pCreateInfo = &var_local_pCreateInfo;
+            local_pCreateInfo->initialize(pCreateInfo);
+            if (pCreateInfo->image) {
+                local_pCreateInfo->image = layer_data->Unwrap(pCreateInfo->image);
+            }
+            WrapPnextChainHandles(layer_data, local_pCreateInfo->pNext);
+        }
+    }
+    VkResult result = layer_data->device_dispatch_table.CreateImageView(device, (const VkImageViewCreateInfo*)local_pCreateInfo, pAllocator, pView);
+    if (VK_SUCCESS == result) {
+        *pView = layer_data->WrapNew(*pView);
+    }
+    return result;
+}
+
+void DispatchDestroyImageView(
+    VkDevice                                    device,
+    VkImageView                                 imageView,
+    const VkAllocationCallbacks*                pAllocator)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.DestroyImageView(device, imageView, pAllocator);
+    uint64_t imageView_id = reinterpret_cast<uint64_t &>(imageView);
+    auto iter = unique_id_mapping.pop(imageView_id);
+    if (iter != unique_id_mapping.end()) {
+        imageView = (VkImageView)iter->second;
+    } else {
+        imageView = (VkImageView)0;
+    }
+    layer_data->device_dispatch_table.DestroyImageView(device, imageView, pAllocator);
+
+}
+
+VkResult DispatchCreateShaderModule(
+    VkDevice                                    device,
+    const VkShaderModuleCreateInfo*             pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkShaderModule*                             pShaderModule)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.CreateShaderModule(device, pCreateInfo, pAllocator, pShaderModule);
+    safe_VkShaderModuleCreateInfo var_local_pCreateInfo;
+    safe_VkShaderModuleCreateInfo *local_pCreateInfo = NULL;
+    {
+        if (pCreateInfo) {
+            local_pCreateInfo = &var_local_pCreateInfo;
+            local_pCreateInfo->initialize(pCreateInfo);
+            WrapPnextChainHandles(layer_data, local_pCreateInfo->pNext);
+        }
+    }
+    VkResult result = layer_data->device_dispatch_table.CreateShaderModule(device, (const VkShaderModuleCreateInfo*)local_pCreateInfo, pAllocator, pShaderModule);
+    if (VK_SUCCESS == result) {
+        *pShaderModule = layer_data->WrapNew(*pShaderModule);
+    }
+    return result;
+}
+
+void DispatchDestroyShaderModule(
+    VkDevice                                    device,
+    VkShaderModule                              shaderModule,
+    const VkAllocationCallbacks*                pAllocator)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.DestroyShaderModule(device, shaderModule, pAllocator);
+    uint64_t shaderModule_id = reinterpret_cast<uint64_t &>(shaderModule);
+    auto iter = unique_id_mapping.pop(shaderModule_id);
+    if (iter != unique_id_mapping.end()) {
+        shaderModule = (VkShaderModule)iter->second;
+    } else {
+        shaderModule = (VkShaderModule)0;
+    }
+    layer_data->device_dispatch_table.DestroyShaderModule(device, shaderModule, pAllocator);
+
+}
+
+VkResult DispatchCreatePipelineCache(
+    VkDevice                                    device,
+    const VkPipelineCacheCreateInfo*            pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkPipelineCache*                            pPipelineCache)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.CreatePipelineCache(device, pCreateInfo, pAllocator, pPipelineCache);
+    VkResult result = layer_data->device_dispatch_table.CreatePipelineCache(device, pCreateInfo, pAllocator, pPipelineCache);
+    if (VK_SUCCESS == result) {
+        *pPipelineCache = layer_data->WrapNew(*pPipelineCache);
+    }
+    return result;
+}
+
+void DispatchDestroyPipelineCache(
+    VkDevice                                    device,
+    VkPipelineCache                             pipelineCache,
+    const VkAllocationCallbacks*                pAllocator)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.DestroyPipelineCache(device, pipelineCache, pAllocator);
+    uint64_t pipelineCache_id = reinterpret_cast<uint64_t &>(pipelineCache);
+    auto iter = unique_id_mapping.pop(pipelineCache_id);
+    if (iter != unique_id_mapping.end()) {
+        pipelineCache = (VkPipelineCache)iter->second;
+    } else {
+        pipelineCache = (VkPipelineCache)0;
+    }
+    layer_data->device_dispatch_table.DestroyPipelineCache(device, pipelineCache, pAllocator);
+
+}
+
+VkResult DispatchGetPipelineCacheData(
+    VkDevice                                    device,
+    VkPipelineCache                             pipelineCache,
+    size_t*                                     pDataSize,
+    void*                                       pData)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.GetPipelineCacheData(device, pipelineCache, pDataSize, pData);
+    {
+        pipelineCache = layer_data->Unwrap(pipelineCache);
+    }
+    VkResult result = layer_data->device_dispatch_table.GetPipelineCacheData(device, pipelineCache, pDataSize, pData);
+
+    return result;
+}
+
+VkResult DispatchMergePipelineCaches(
+    VkDevice                                    device,
+    VkPipelineCache                             dstCache,
+    uint32_t                                    srcCacheCount,
+    const VkPipelineCache*                      pSrcCaches)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.MergePipelineCaches(device, dstCache, srcCacheCount, pSrcCaches);
+    VkPipelineCache var_local_pSrcCaches[DISPATCH_MAX_STACK_ALLOCATIONS];
+    VkPipelineCache *local_pSrcCaches = NULL;
+    {
+        dstCache = layer_data->Unwrap(dstCache);
+        if (pSrcCaches) {
+            local_pSrcCaches = srcCacheCount > DISPATCH_MAX_STACK_ALLOCATIONS ? new VkPipelineCache[srcCacheCount] : var_local_pSrcCaches;
+            for (uint32_t index0 = 0; index0 < srcCacheCount; ++index0) {
+                local_pSrcCaches[index0] = layer_data->Unwrap(pSrcCaches[index0]);
+            }
+        }
+    }
+    VkResult result = layer_data->device_dispatch_table.MergePipelineCaches(device, dstCache, srcCacheCount, (const VkPipelineCache*)local_pSrcCaches);
+    if (local_pSrcCaches != var_local_pSrcCaches)
+        delete[] local_pSrcCaches;
+    return result;
+}
+
+// Skip vkCreateGraphicsPipelines dispatch, manually generated
+
+VkResult DispatchCreateComputePipelines(
+    VkDevice                                    device,
+    VkPipelineCache                             pipelineCache,
+    uint32_t                                    createInfoCount,
+    const VkComputePipelineCreateInfo*          pCreateInfos,
+    const VkAllocationCallbacks*                pAllocator,
+    VkPipeline*                                 pPipelines)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.CreateComputePipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines);
+    safe_VkComputePipelineCreateInfo *local_pCreateInfos = NULL;
+    {
+        pipelineCache = layer_data->Unwrap(pipelineCache);
+        if (pCreateInfos) {
+            local_pCreateInfos = new safe_VkComputePipelineCreateInfo[createInfoCount];
+            for (uint32_t index0 = 0; index0 < createInfoCount; ++index0) {
+                local_pCreateInfos[index0].initialize(&pCreateInfos[index0]);
+                if (pCreateInfos[index0].stage.module) {
+                    local_pCreateInfos[index0].stage.module = layer_data->Unwrap(pCreateInfos[index0].stage.module);
+                }
+                if (pCreateInfos[index0].layout) {
+                    local_pCreateInfos[index0].layout = layer_data->Unwrap(pCreateInfos[index0].layout);
+                }
+                if (pCreateInfos[index0].basePipelineHandle) {
+                    local_pCreateInfos[index0].basePipelineHandle = layer_data->Unwrap(pCreateInfos[index0].basePipelineHandle);
+                }
+            }
+        }
+    }
+    VkResult result = layer_data->device_dispatch_table.CreateComputePipelines(device, pipelineCache, createInfoCount, (const VkComputePipelineCreateInfo*)local_pCreateInfos, pAllocator, pPipelines);
+    for (uint32_t i = 0; i < createInfoCount; ++i) {
+        if (pCreateInfos[i].pNext != VK_NULL_HANDLE) {
+            CopyCreatePipelineFeedbackData(local_pCreateInfos[i].pNext, pCreateInfos[i].pNext);
+        }
+    }
+
+    if (local_pCreateInfos) {
+        delete[] local_pCreateInfos;
+    }
+    {
+        for (uint32_t index0 = 0; index0 < createInfoCount; index0++) {
+            if (pPipelines[index0] != VK_NULL_HANDLE) {
+                pPipelines[index0] = layer_data->WrapNew(pPipelines[index0]);
+            }
+        }
+    }
+    return result;
+}
+
+void DispatchDestroyPipeline(
+    VkDevice                                    device,
+    VkPipeline                                  pipeline,
+    const VkAllocationCallbacks*                pAllocator)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.DestroyPipeline(device, pipeline, pAllocator);
+    uint64_t pipeline_id = reinterpret_cast<uint64_t &>(pipeline);
+    auto iter = unique_id_mapping.pop(pipeline_id);
+    if (iter != unique_id_mapping.end()) {
+        pipeline = (VkPipeline)iter->second;
+    } else {
+        pipeline = (VkPipeline)0;
+    }
+    layer_data->device_dispatch_table.DestroyPipeline(device, pipeline, pAllocator);
+
+}
+
+VkResult DispatchCreatePipelineLayout(
+    VkDevice                                    device,
+    const VkPipelineLayoutCreateInfo*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkPipelineLayout*                           pPipelineLayout)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.CreatePipelineLayout(device, pCreateInfo, pAllocator, pPipelineLayout);
+    safe_VkPipelineLayoutCreateInfo var_local_pCreateInfo;
+    safe_VkPipelineLayoutCreateInfo *local_pCreateInfo = NULL;
+    {
+        if (pCreateInfo) {
+            local_pCreateInfo = &var_local_pCreateInfo;
+            local_pCreateInfo->initialize(pCreateInfo);
+            if (local_pCreateInfo->pSetLayouts) {
+                for (uint32_t index1 = 0; index1 < local_pCreateInfo->setLayoutCount; ++index1) {
+                    local_pCreateInfo->pSetLayouts[index1] = layer_data->Unwrap(local_pCreateInfo->pSetLayouts[index1]);
+                }
+            }
+        }
+    }
+    VkResult result = layer_data->device_dispatch_table.CreatePipelineLayout(device, (const VkPipelineLayoutCreateInfo*)local_pCreateInfo, pAllocator, pPipelineLayout);
+    if (VK_SUCCESS == result) {
+        *pPipelineLayout = layer_data->WrapNew(*pPipelineLayout);
+    }
+    return result;
+}
+
+void DispatchDestroyPipelineLayout(
+    VkDevice                                    device,
+    VkPipelineLayout                            pipelineLayout,
+    const VkAllocationCallbacks*                pAllocator)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.DestroyPipelineLayout(device, pipelineLayout, pAllocator);
+    uint64_t pipelineLayout_id = reinterpret_cast<uint64_t &>(pipelineLayout);
+    auto iter = unique_id_mapping.pop(pipelineLayout_id);
+    if (iter != unique_id_mapping.end()) {
+        pipelineLayout = (VkPipelineLayout)iter->second;
+    } else {
+        pipelineLayout = (VkPipelineLayout)0;
+    }
+    layer_data->device_dispatch_table.DestroyPipelineLayout(device, pipelineLayout, pAllocator);
+
+}
+
+VkResult DispatchCreateSampler(
+    VkDevice                                    device,
+    const VkSamplerCreateInfo*                  pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSampler*                                  pSampler)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.CreateSampler(device, pCreateInfo, pAllocator, pSampler);
+    safe_VkSamplerCreateInfo var_local_pCreateInfo;
+    safe_VkSamplerCreateInfo *local_pCreateInfo = NULL;
+    {
+        if (pCreateInfo) {
+            local_pCreateInfo = &var_local_pCreateInfo;
+            local_pCreateInfo->initialize(pCreateInfo);
+            WrapPnextChainHandles(layer_data, local_pCreateInfo->pNext);
+        }
+    }
+    VkResult result = layer_data->device_dispatch_table.CreateSampler(device, (const VkSamplerCreateInfo*)local_pCreateInfo, pAllocator, pSampler);
+    if (VK_SUCCESS == result) {
+        *pSampler = layer_data->WrapNew(*pSampler);
+    }
+    return result;
+}
+
+void DispatchDestroySampler(
+    VkDevice                                    device,
+    VkSampler                                   sampler,
+    const VkAllocationCallbacks*                pAllocator)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.DestroySampler(device, sampler, pAllocator);
+    uint64_t sampler_id = reinterpret_cast<uint64_t &>(sampler);
+    auto iter = unique_id_mapping.pop(sampler_id);
+    if (iter != unique_id_mapping.end()) {
+        sampler = (VkSampler)iter->second;
+    } else {
+        sampler = (VkSampler)0;
+    }
+    layer_data->device_dispatch_table.DestroySampler(device, sampler, pAllocator);
+
+}
+
+VkResult DispatchCreateDescriptorSetLayout(
+    VkDevice                                    device,
+    const VkDescriptorSetLayoutCreateInfo*      pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDescriptorSetLayout*                      pSetLayout)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.CreateDescriptorSetLayout(device, pCreateInfo, pAllocator, pSetLayout);
+    safe_VkDescriptorSetLayoutCreateInfo var_local_pCreateInfo;
+    safe_VkDescriptorSetLayoutCreateInfo *local_pCreateInfo = NULL;
+    {
+        if (pCreateInfo) {
+            local_pCreateInfo = &var_local_pCreateInfo;
+            local_pCreateInfo->initialize(pCreateInfo);
+            if (local_pCreateInfo->pBindings) {
+                for (uint32_t index1 = 0; index1 < local_pCreateInfo->bindingCount; ++index1) {
+                    if (local_pCreateInfo->pBindings[index1].pImmutableSamplers) {
+                        for (uint32_t index2 = 0; index2 < local_pCreateInfo->pBindings[index1].descriptorCount; ++index2) {
+                            local_pCreateInfo->pBindings[index1].pImmutableSamplers[index2] = layer_data->Unwrap(local_pCreateInfo->pBindings[index1].pImmutableSamplers[index2]);
+                        }
+                    }
+                }
+            }
+        }
+    }
+    VkResult result = layer_data->device_dispatch_table.CreateDescriptorSetLayout(device, (const VkDescriptorSetLayoutCreateInfo*)local_pCreateInfo, pAllocator, pSetLayout);
+    if (VK_SUCCESS == result) {
+        *pSetLayout = layer_data->WrapNew(*pSetLayout);
+    }
+    return result;
+}
+
+void DispatchDestroyDescriptorSetLayout(
+    VkDevice                                    device,
+    VkDescriptorSetLayout                       descriptorSetLayout,
+    const VkAllocationCallbacks*                pAllocator)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.DestroyDescriptorSetLayout(device, descriptorSetLayout, pAllocator);
+    uint64_t descriptorSetLayout_id = reinterpret_cast<uint64_t &>(descriptorSetLayout);
+    auto iter = unique_id_mapping.pop(descriptorSetLayout_id);
+    if (iter != unique_id_mapping.end()) {
+        descriptorSetLayout = (VkDescriptorSetLayout)iter->second;
+    } else {
+        descriptorSetLayout = (VkDescriptorSetLayout)0;
+    }
+    layer_data->device_dispatch_table.DestroyDescriptorSetLayout(device, descriptorSetLayout, pAllocator);
+
+}
+
+VkResult DispatchCreateDescriptorPool(
+    VkDevice                                    device,
+    const VkDescriptorPoolCreateInfo*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDescriptorPool*                           pDescriptorPool)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.CreateDescriptorPool(device, pCreateInfo, pAllocator, pDescriptorPool);
+    VkResult result = layer_data->device_dispatch_table.CreateDescriptorPool(device, pCreateInfo, pAllocator, pDescriptorPool);
+    if (VK_SUCCESS == result) {
+        *pDescriptorPool = layer_data->WrapNew(*pDescriptorPool);
+    }
+    return result;
+}
+
+// Skip vkDestroyDescriptorPool dispatch, manually generated
+
+// Skip vkResetDescriptorPool dispatch, manually generated
+
+// Skip vkAllocateDescriptorSets dispatch, manually generated
+
+// Skip vkFreeDescriptorSets dispatch, manually generated
+
+void DispatchUpdateDescriptorSets(
+    VkDevice                                    device,
+    uint32_t                                    descriptorWriteCount,
+    const VkWriteDescriptorSet*                 pDescriptorWrites,
+    uint32_t                                    descriptorCopyCount,
+    const VkCopyDescriptorSet*                  pDescriptorCopies)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.UpdateDescriptorSets(device, descriptorWriteCount, pDescriptorWrites, descriptorCopyCount, pDescriptorCopies);
+    safe_VkWriteDescriptorSet *local_pDescriptorWrites = NULL;
+    safe_VkCopyDescriptorSet *local_pDescriptorCopies = NULL;
+    {
+        if (pDescriptorWrites) {
+            local_pDescriptorWrites = new safe_VkWriteDescriptorSet[descriptorWriteCount];
+            for (uint32_t index0 = 0; index0 < descriptorWriteCount; ++index0) {
+                local_pDescriptorWrites[index0].initialize(&pDescriptorWrites[index0]);
+                WrapPnextChainHandles(layer_data, local_pDescriptorWrites[index0].pNext);
+                if (pDescriptorWrites[index0].dstSet) {
+                    local_pDescriptorWrites[index0].dstSet = layer_data->Unwrap(pDescriptorWrites[index0].dstSet);
+                }
+                if (local_pDescriptorWrites[index0].pImageInfo) {
+                    for (uint32_t index1 = 0; index1 < local_pDescriptorWrites[index0].descriptorCount; ++index1) {
+                        if (pDescriptorWrites[index0].pImageInfo[index1].sampler) {
+                            local_pDescriptorWrites[index0].pImageInfo[index1].sampler = layer_data->Unwrap(pDescriptorWrites[index0].pImageInfo[index1].sampler);
+                        }
+                        if (pDescriptorWrites[index0].pImageInfo[index1].imageView) {
+                            local_pDescriptorWrites[index0].pImageInfo[index1].imageView = layer_data->Unwrap(pDescriptorWrites[index0].pImageInfo[index1].imageView);
+                        }
+                    }
+                }
+                if (local_pDescriptorWrites[index0].pBufferInfo) {
+                    for (uint32_t index1 = 0; index1 < local_pDescriptorWrites[index0].descriptorCount; ++index1) {
+                        if (pDescriptorWrites[index0].pBufferInfo[index1].buffer) {
+                            local_pDescriptorWrites[index0].pBufferInfo[index1].buffer = layer_data->Unwrap(pDescriptorWrites[index0].pBufferInfo[index1].buffer);
+                        }
+                    }
+                }
+                if (local_pDescriptorWrites[index0].pTexelBufferView) {
+                    for (uint32_t index1 = 0; index1 < local_pDescriptorWrites[index0].descriptorCount; ++index1) {
+                        local_pDescriptorWrites[index0].pTexelBufferView[index1] = layer_data->Unwrap(local_pDescriptorWrites[index0].pTexelBufferView[index1]);
+                    }
+                }
+            }
+        }
+        if (pDescriptorCopies) {
+            local_pDescriptorCopies = new safe_VkCopyDescriptorSet[descriptorCopyCount];
+            for (uint32_t index0 = 0; index0 < descriptorCopyCount; ++index0) {
+                local_pDescriptorCopies[index0].initialize(&pDescriptorCopies[index0]);
+                if (pDescriptorCopies[index0].srcSet) {
+                    local_pDescriptorCopies[index0].srcSet = layer_data->Unwrap(pDescriptorCopies[index0].srcSet);
+                }
+                if (pDescriptorCopies[index0].dstSet) {
+                    local_pDescriptorCopies[index0].dstSet = layer_data->Unwrap(pDescriptorCopies[index0].dstSet);
+                }
+            }
+        }
+    }
+    layer_data->device_dispatch_table.UpdateDescriptorSets(device, descriptorWriteCount, (const VkWriteDescriptorSet*)local_pDescriptorWrites, descriptorCopyCount, (const VkCopyDescriptorSet*)local_pDescriptorCopies);
+    if (local_pDescriptorWrites) {
+        delete[] local_pDescriptorWrites;
+    }
+    if (local_pDescriptorCopies) {
+        delete[] local_pDescriptorCopies;
+    }
+}
+
+VkResult DispatchCreateFramebuffer(
+    VkDevice                                    device,
+    const VkFramebufferCreateInfo*              pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkFramebuffer*                              pFramebuffer)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.CreateFramebuffer(device, pCreateInfo, pAllocator, pFramebuffer);
+    safe_VkFramebufferCreateInfo var_local_pCreateInfo;
+    safe_VkFramebufferCreateInfo *local_pCreateInfo = NULL;
+    {
+        if (pCreateInfo) {
+            local_pCreateInfo = &var_local_pCreateInfo;
+            local_pCreateInfo->initialize(pCreateInfo);
+            if (pCreateInfo->renderPass) {
+                local_pCreateInfo->renderPass = layer_data->Unwrap(pCreateInfo->renderPass);
+            }
+            if (local_pCreateInfo->pAttachments) {
+                for (uint32_t index1 = 0; index1 < local_pCreateInfo->attachmentCount; ++index1) {
+                    local_pCreateInfo->pAttachments[index1] = layer_data->Unwrap(local_pCreateInfo->pAttachments[index1]);
+                }
+            }
+        }
+    }
+    VkResult result = layer_data->device_dispatch_table.CreateFramebuffer(device, (const VkFramebufferCreateInfo*)local_pCreateInfo, pAllocator, pFramebuffer);
+    if (VK_SUCCESS == result) {
+        *pFramebuffer = layer_data->WrapNew(*pFramebuffer);
+    }
+    return result;
+}
+
+void DispatchDestroyFramebuffer(
+    VkDevice                                    device,
+    VkFramebuffer                               framebuffer,
+    const VkAllocationCallbacks*                pAllocator)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.DestroyFramebuffer(device, framebuffer, pAllocator);
+    uint64_t framebuffer_id = reinterpret_cast<uint64_t &>(framebuffer);
+    auto iter = unique_id_mapping.pop(framebuffer_id);
+    if (iter != unique_id_mapping.end()) {
+        framebuffer = (VkFramebuffer)iter->second;
+    } else {
+        framebuffer = (VkFramebuffer)0;
+    }
+    layer_data->device_dispatch_table.DestroyFramebuffer(device, framebuffer, pAllocator);
+
+}
+
+// Skip vkCreateRenderPass dispatch, manually generated
+
+// Skip vkDestroyRenderPass dispatch, manually generated
+
+void DispatchGetRenderAreaGranularity(
+    VkDevice                                    device,
+    VkRenderPass                                renderPass,
+    VkExtent2D*                                 pGranularity)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.GetRenderAreaGranularity(device, renderPass, pGranularity);
+    {
+        renderPass = layer_data->Unwrap(renderPass);
+    }
+    layer_data->device_dispatch_table.GetRenderAreaGranularity(device, renderPass, pGranularity);
+
+}
+
+VkResult DispatchCreateCommandPool(
+    VkDevice                                    device,
+    const VkCommandPoolCreateInfo*              pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkCommandPool*                              pCommandPool)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.CreateCommandPool(device, pCreateInfo, pAllocator, pCommandPool);
+    VkResult result = layer_data->device_dispatch_table.CreateCommandPool(device, pCreateInfo, pAllocator, pCommandPool);
+    if (VK_SUCCESS == result) {
+        *pCommandPool = layer_data->WrapNew(*pCommandPool);
+    }
+    return result;
+}
+
+void DispatchDestroyCommandPool(
+    VkDevice                                    device,
+    VkCommandPool                               commandPool,
+    const VkAllocationCallbacks*                pAllocator)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.DestroyCommandPool(device, commandPool, pAllocator);
+    uint64_t commandPool_id = reinterpret_cast<uint64_t &>(commandPool);
+    auto iter = unique_id_mapping.pop(commandPool_id);
+    if (iter != unique_id_mapping.end()) {
+        commandPool = (VkCommandPool)iter->second;
+    } else {
+        commandPool = (VkCommandPool)0;
+    }
+    layer_data->device_dispatch_table.DestroyCommandPool(device, commandPool, pAllocator);
+
+}
+
+VkResult DispatchResetCommandPool(
+    VkDevice                                    device,
+    VkCommandPool                               commandPool,
+    VkCommandPoolResetFlags                     flags)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.ResetCommandPool(device, commandPool, flags);
+    {
+        commandPool = layer_data->Unwrap(commandPool);
+    }
+    VkResult result = layer_data->device_dispatch_table.ResetCommandPool(device, commandPool, flags);
+
+    return result;
+}
+
+VkResult DispatchAllocateCommandBuffers(
+    VkDevice                                    device,
+    const VkCommandBufferAllocateInfo*          pAllocateInfo,
+    VkCommandBuffer*                            pCommandBuffers)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.AllocateCommandBuffers(device, pAllocateInfo, pCommandBuffers);
+    safe_VkCommandBufferAllocateInfo var_local_pAllocateInfo;
+    safe_VkCommandBufferAllocateInfo *local_pAllocateInfo = NULL;
+    {
+        if (pAllocateInfo) {
+            local_pAllocateInfo = &var_local_pAllocateInfo;
+            local_pAllocateInfo->initialize(pAllocateInfo);
+            if (pAllocateInfo->commandPool) {
+                local_pAllocateInfo->commandPool = layer_data->Unwrap(pAllocateInfo->commandPool);
+            }
+        }
+    }
+    VkResult result = layer_data->device_dispatch_table.AllocateCommandBuffers(device, (const VkCommandBufferAllocateInfo*)local_pAllocateInfo, pCommandBuffers);
+
+    return result;
+}
+
+void DispatchFreeCommandBuffers(
+    VkDevice                                    device,
+    VkCommandPool                               commandPool,
+    uint32_t                                    commandBufferCount,
+    const VkCommandBuffer*                      pCommandBuffers)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.FreeCommandBuffers(device, commandPool, commandBufferCount, pCommandBuffers);
+    {
+        commandPool = layer_data->Unwrap(commandPool);
+    }
+    layer_data->device_dispatch_table.FreeCommandBuffers(device, commandPool, commandBufferCount, pCommandBuffers);
+
+}
+
+VkResult DispatchBeginCommandBuffer(
+    VkCommandBuffer                             commandBuffer,
+    const VkCommandBufferBeginInfo*             pBeginInfo)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.BeginCommandBuffer(commandBuffer, pBeginInfo);
+    safe_VkCommandBufferBeginInfo var_local_pBeginInfo;
+    safe_VkCommandBufferBeginInfo *local_pBeginInfo = NULL;
+    {
+        if (pBeginInfo) {
+            local_pBeginInfo = &var_local_pBeginInfo;
+            local_pBeginInfo->initialize(pBeginInfo);
+            if (local_pBeginInfo->pInheritanceInfo) {
+                if (pBeginInfo->pInheritanceInfo->renderPass) {
+                    local_pBeginInfo->pInheritanceInfo->renderPass = layer_data->Unwrap(pBeginInfo->pInheritanceInfo->renderPass);
+                }
+                if (pBeginInfo->pInheritanceInfo->framebuffer) {
+                    local_pBeginInfo->pInheritanceInfo->framebuffer = layer_data->Unwrap(pBeginInfo->pInheritanceInfo->framebuffer);
+                }
+            }
+        }
+    }
+    VkResult result = layer_data->device_dispatch_table.BeginCommandBuffer(commandBuffer, (const VkCommandBufferBeginInfo*)local_pBeginInfo);
+
+    return result;
+}
+
+VkResult DispatchEndCommandBuffer(
+    VkCommandBuffer                             commandBuffer)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    VkResult result = layer_data->device_dispatch_table.EndCommandBuffer(commandBuffer);
+
+    return result;
+}
+
+VkResult DispatchResetCommandBuffer(
+    VkCommandBuffer                             commandBuffer,
+    VkCommandBufferResetFlags                   flags)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    VkResult result = layer_data->device_dispatch_table.ResetCommandBuffer(commandBuffer, flags);
+
+    return result;
+}
+
+void DispatchCmdBindPipeline(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineBindPoint                         pipelineBindPoint,
+    VkPipeline                                  pipeline)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.CmdBindPipeline(commandBuffer, pipelineBindPoint, pipeline);
+    {
+        pipeline = layer_data->Unwrap(pipeline);
+    }
+    layer_data->device_dispatch_table.CmdBindPipeline(commandBuffer, pipelineBindPoint, pipeline);
+
+}
+
+void DispatchCmdSetViewport(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstViewport,
+    uint32_t                                    viewportCount,
+    const VkViewport*                           pViewports)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    layer_data->device_dispatch_table.CmdSetViewport(commandBuffer, firstViewport, viewportCount, pViewports);
+
+}
+
+void DispatchCmdSetScissor(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstScissor,
+    uint32_t                                    scissorCount,
+    const VkRect2D*                             pScissors)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    layer_data->device_dispatch_table.CmdSetScissor(commandBuffer, firstScissor, scissorCount, pScissors);
+
+}
+
+void DispatchCmdSetLineWidth(
+    VkCommandBuffer                             commandBuffer,
+    float                                       lineWidth)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    layer_data->device_dispatch_table.CmdSetLineWidth(commandBuffer, lineWidth);
+
+}
+
+void DispatchCmdSetDepthBias(
+    VkCommandBuffer                             commandBuffer,
+    float                                       depthBiasConstantFactor,
+    float                                       depthBiasClamp,
+    float                                       depthBiasSlopeFactor)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    layer_data->device_dispatch_table.CmdSetDepthBias(commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor);
+
+}
+
+void DispatchCmdSetBlendConstants(
+    VkCommandBuffer                             commandBuffer,
+    const float                                 blendConstants[4])
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    layer_data->device_dispatch_table.CmdSetBlendConstants(commandBuffer, blendConstants);
+
+}
+
+void DispatchCmdSetDepthBounds(
+    VkCommandBuffer                             commandBuffer,
+    float                                       minDepthBounds,
+    float                                       maxDepthBounds)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    layer_data->device_dispatch_table.CmdSetDepthBounds(commandBuffer, minDepthBounds, maxDepthBounds);
+
+}
+
+void DispatchCmdSetStencilCompareMask(
+    VkCommandBuffer                             commandBuffer,
+    VkStencilFaceFlags                          faceMask,
+    uint32_t                                    compareMask)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    layer_data->device_dispatch_table.CmdSetStencilCompareMask(commandBuffer, faceMask, compareMask);
+
+}
+
+void DispatchCmdSetStencilWriteMask(
+    VkCommandBuffer                             commandBuffer,
+    VkStencilFaceFlags                          faceMask,
+    uint32_t                                    writeMask)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    layer_data->device_dispatch_table.CmdSetStencilWriteMask(commandBuffer, faceMask, writeMask);
+
+}
+
+void DispatchCmdSetStencilReference(
+    VkCommandBuffer                             commandBuffer,
+    VkStencilFaceFlags                          faceMask,
+    uint32_t                                    reference)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    layer_data->device_dispatch_table.CmdSetStencilReference(commandBuffer, faceMask, reference);
+
+}
+
+void DispatchCmdBindDescriptorSets(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineBindPoint                         pipelineBindPoint,
+    VkPipelineLayout                            layout,
+    uint32_t                                    firstSet,
+    uint32_t                                    descriptorSetCount,
+    const VkDescriptorSet*                      pDescriptorSets,
+    uint32_t                                    dynamicOffsetCount,
+    const uint32_t*                             pDynamicOffsets)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.CmdBindDescriptorSets(commandBuffer, pipelineBindPoint, layout, firstSet, descriptorSetCount, pDescriptorSets, dynamicOffsetCount, pDynamicOffsets);
+    VkDescriptorSet var_local_pDescriptorSets[DISPATCH_MAX_STACK_ALLOCATIONS];
+    VkDescriptorSet *local_pDescriptorSets = NULL;
+    {
+        layout = layer_data->Unwrap(layout);
+        if (pDescriptorSets) {
+            local_pDescriptorSets = descriptorSetCount > DISPATCH_MAX_STACK_ALLOCATIONS ? new VkDescriptorSet[descriptorSetCount] : var_local_pDescriptorSets;
+            for (uint32_t index0 = 0; index0 < descriptorSetCount; ++index0) {
+                local_pDescriptorSets[index0] = layer_data->Unwrap(pDescriptorSets[index0]);
+            }
+        }
+    }
+    layer_data->device_dispatch_table.CmdBindDescriptorSets(commandBuffer, pipelineBindPoint, layout, firstSet, descriptorSetCount, (const VkDescriptorSet*)local_pDescriptorSets, dynamicOffsetCount, pDynamicOffsets);
+    if (local_pDescriptorSets != var_local_pDescriptorSets)
+        delete[] local_pDescriptorSets;
+}
+
+void DispatchCmdBindIndexBuffer(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkIndexType                                 indexType)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.CmdBindIndexBuffer(commandBuffer, buffer, offset, indexType);
+    {
+        buffer = layer_data->Unwrap(buffer);
+    }
+    layer_data->device_dispatch_table.CmdBindIndexBuffer(commandBuffer, buffer, offset, indexType);
+
+}
+
+void DispatchCmdBindVertexBuffers(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstBinding,
+    uint32_t                                    bindingCount,
+    const VkBuffer*                             pBuffers,
+    const VkDeviceSize*                         pOffsets)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.CmdBindVertexBuffers(commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets);
+    VkBuffer var_local_pBuffers[DISPATCH_MAX_STACK_ALLOCATIONS];
+    VkBuffer *local_pBuffers = NULL;
+    {
+        if (pBuffers) {
+            local_pBuffers = bindingCount > DISPATCH_MAX_STACK_ALLOCATIONS ? new VkBuffer[bindingCount] : var_local_pBuffers;
+            for (uint32_t index0 = 0; index0 < bindingCount; ++index0) {
+                local_pBuffers[index0] = layer_data->Unwrap(pBuffers[index0]);
+            }
+        }
+    }
+    layer_data->device_dispatch_table.CmdBindVertexBuffers(commandBuffer, firstBinding, bindingCount, (const VkBuffer*)local_pBuffers, pOffsets);
+    if (local_pBuffers != var_local_pBuffers)
+        delete[] local_pBuffers;
+}
+
+void DispatchCmdDraw(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    vertexCount,
+    uint32_t                                    instanceCount,
+    uint32_t                                    firstVertex,
+    uint32_t                                    firstInstance)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    layer_data->device_dispatch_table.CmdDraw(commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance);
+
+}
+
+void DispatchCmdDrawIndexed(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    indexCount,
+    uint32_t                                    instanceCount,
+    uint32_t                                    firstIndex,
+    int32_t                                     vertexOffset,
+    uint32_t                                    firstInstance)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    layer_data->device_dispatch_table.CmdDrawIndexed(commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance);
+
+}
+
+void DispatchCmdDrawIndirect(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    uint32_t                                    drawCount,
+    uint32_t                                    stride)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.CmdDrawIndirect(commandBuffer, buffer, offset, drawCount, stride);
+    {
+        buffer = layer_data->Unwrap(buffer);
+    }
+    layer_data->device_dispatch_table.CmdDrawIndirect(commandBuffer, buffer, offset, drawCount, stride);
+
+}
+
+void DispatchCmdDrawIndexedIndirect(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    uint32_t                                    drawCount,
+    uint32_t                                    stride)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.CmdDrawIndexedIndirect(commandBuffer, buffer, offset, drawCount, stride);
+    {
+        buffer = layer_data->Unwrap(buffer);
+    }
+    layer_data->device_dispatch_table.CmdDrawIndexedIndirect(commandBuffer, buffer, offset, drawCount, stride);
+
+}
+
+void DispatchCmdDispatch(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    groupCountX,
+    uint32_t                                    groupCountY,
+    uint32_t                                    groupCountZ)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    layer_data->device_dispatch_table.CmdDispatch(commandBuffer, groupCountX, groupCountY, groupCountZ);
+
+}
+
+void DispatchCmdDispatchIndirect(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.CmdDispatchIndirect(commandBuffer, buffer, offset);
+    {
+        buffer = layer_data->Unwrap(buffer);
+    }
+    layer_data->device_dispatch_table.CmdDispatchIndirect(commandBuffer, buffer, offset);
+
+}
+
+void DispatchCmdCopyBuffer(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    srcBuffer,
+    VkBuffer                                    dstBuffer,
+    uint32_t                                    regionCount,
+    const VkBufferCopy*                         pRegions)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.CmdCopyBuffer(commandBuffer, srcBuffer, dstBuffer, regionCount, pRegions);
+    {
+        srcBuffer = layer_data->Unwrap(srcBuffer);
+        dstBuffer = layer_data->Unwrap(dstBuffer);
+    }
+    layer_data->device_dispatch_table.CmdCopyBuffer(commandBuffer, srcBuffer, dstBuffer, regionCount, pRegions);
+
+}
+
+void DispatchCmdCopyImage(
+    VkCommandBuffer                             commandBuffer,
+    VkImage                                     srcImage,
+    VkImageLayout                               srcImageLayout,
+    VkImage                                     dstImage,
+    VkImageLayout                               dstImageLayout,
+    uint32_t                                    regionCount,
+    const VkImageCopy*                          pRegions)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.CmdCopyImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions);
+    {
+        srcImage = layer_data->Unwrap(srcImage);
+        dstImage = layer_data->Unwrap(dstImage);
+    }
+    layer_data->device_dispatch_table.CmdCopyImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions);
+
+}
+
+void DispatchCmdBlitImage(
+    VkCommandBuffer                             commandBuffer,
+    VkImage                                     srcImage,
+    VkImageLayout                               srcImageLayout,
+    VkImage                                     dstImage,
+    VkImageLayout                               dstImageLayout,
+    uint32_t                                    regionCount,
+    const VkImageBlit*                          pRegions,
+    VkFilter                                    filter)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.CmdBlitImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions, filter);
+    {
+        srcImage = layer_data->Unwrap(srcImage);
+        dstImage = layer_data->Unwrap(dstImage);
+    }
+    layer_data->device_dispatch_table.CmdBlitImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions, filter);
+
+}
+
+void DispatchCmdCopyBufferToImage(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    srcBuffer,
+    VkImage                                     dstImage,
+    VkImageLayout                               dstImageLayout,
+    uint32_t                                    regionCount,
+    const VkBufferImageCopy*                    pRegions)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.CmdCopyBufferToImage(commandBuffer, srcBuffer, dstImage, dstImageLayout, regionCount, pRegions);
+    {
+        srcBuffer = layer_data->Unwrap(srcBuffer);
+        dstImage = layer_data->Unwrap(dstImage);
+    }
+    layer_data->device_dispatch_table.CmdCopyBufferToImage(commandBuffer, srcBuffer, dstImage, dstImageLayout, regionCount, pRegions);
+
+}
+
+void DispatchCmdCopyImageToBuffer(
+    VkCommandBuffer                             commandBuffer,
+    VkImage                                     srcImage,
+    VkImageLayout                               srcImageLayout,
+    VkBuffer                                    dstBuffer,
+    uint32_t                                    regionCount,
+    const VkBufferImageCopy*                    pRegions)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.CmdCopyImageToBuffer(commandBuffer, srcImage, srcImageLayout, dstBuffer, regionCount, pRegions);
+    {
+        srcImage = layer_data->Unwrap(srcImage);
+        dstBuffer = layer_data->Unwrap(dstBuffer);
+    }
+    layer_data->device_dispatch_table.CmdCopyImageToBuffer(commandBuffer, srcImage, srcImageLayout, dstBuffer, regionCount, pRegions);
+
+}
+
+void DispatchCmdUpdateBuffer(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    dstBuffer,
+    VkDeviceSize                                dstOffset,
+    VkDeviceSize                                dataSize,
+    const void*                                 pData)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.CmdUpdateBuffer(commandBuffer, dstBuffer, dstOffset, dataSize, pData);
+    {
+        dstBuffer = layer_data->Unwrap(dstBuffer);
+    }
+    layer_data->device_dispatch_table.CmdUpdateBuffer(commandBuffer, dstBuffer, dstOffset, dataSize, pData);
+
+}
+
+void DispatchCmdFillBuffer(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    dstBuffer,
+    VkDeviceSize                                dstOffset,
+    VkDeviceSize                                size,
+    uint32_t                                    data)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.CmdFillBuffer(commandBuffer, dstBuffer, dstOffset, size, data);
+    {
+        dstBuffer = layer_data->Unwrap(dstBuffer);
+    }
+    layer_data->device_dispatch_table.CmdFillBuffer(commandBuffer, dstBuffer, dstOffset, size, data);
+
+}
+
+void DispatchCmdClearColorImage(
+    VkCommandBuffer                             commandBuffer,
+    VkImage                                     image,
+    VkImageLayout                               imageLayout,
+    const VkClearColorValue*                    pColor,
+    uint32_t                                    rangeCount,
+    const VkImageSubresourceRange*              pRanges)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.CmdClearColorImage(commandBuffer, image, imageLayout, pColor, rangeCount, pRanges);
+    {
+        image = layer_data->Unwrap(image);
+    }
+    layer_data->device_dispatch_table.CmdClearColorImage(commandBuffer, image, imageLayout, pColor, rangeCount, pRanges);
+
+}
+
+void DispatchCmdClearDepthStencilImage(
+    VkCommandBuffer                             commandBuffer,
+    VkImage                                     image,
+    VkImageLayout                               imageLayout,
+    const VkClearDepthStencilValue*             pDepthStencil,
+    uint32_t                                    rangeCount,
+    const VkImageSubresourceRange*              pRanges)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.CmdClearDepthStencilImage(commandBuffer, image, imageLayout, pDepthStencil, rangeCount, pRanges);
+    {
+        image = layer_data->Unwrap(image);
+    }
+    layer_data->device_dispatch_table.CmdClearDepthStencilImage(commandBuffer, image, imageLayout, pDepthStencil, rangeCount, pRanges);
+
+}
+
+void DispatchCmdClearAttachments(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    attachmentCount,
+    const VkClearAttachment*                    pAttachments,
+    uint32_t                                    rectCount,
+    const VkClearRect*                          pRects)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    layer_data->device_dispatch_table.CmdClearAttachments(commandBuffer, attachmentCount, pAttachments, rectCount, pRects);
+
+}
+
+void DispatchCmdResolveImage(
+    VkCommandBuffer                             commandBuffer,
+    VkImage                                     srcImage,
+    VkImageLayout                               srcImageLayout,
+    VkImage                                     dstImage,
+    VkImageLayout                               dstImageLayout,
+    uint32_t                                    regionCount,
+    const VkImageResolve*                       pRegions)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.CmdResolveImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions);
+    {
+        srcImage = layer_data->Unwrap(srcImage);
+        dstImage = layer_data->Unwrap(dstImage);
+    }
+    layer_data->device_dispatch_table.CmdResolveImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions);
+
+}
+
+void DispatchCmdSetEvent(
+    VkCommandBuffer                             commandBuffer,
+    VkEvent                                     event,
+    VkPipelineStageFlags                        stageMask)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.CmdSetEvent(commandBuffer, event, stageMask);
+    {
+        event = layer_data->Unwrap(event);
+    }
+    layer_data->device_dispatch_table.CmdSetEvent(commandBuffer, event, stageMask);
+
+}
+
+void DispatchCmdResetEvent(
+    VkCommandBuffer                             commandBuffer,
+    VkEvent                                     event,
+    VkPipelineStageFlags                        stageMask)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.CmdResetEvent(commandBuffer, event, stageMask);
+    {
+        event = layer_data->Unwrap(event);
+    }
+    layer_data->device_dispatch_table.CmdResetEvent(commandBuffer, event, stageMask);
+
+}
+
+void DispatchCmdWaitEvents(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    eventCount,
+    const VkEvent*                              pEvents,
+    VkPipelineStageFlags                        srcStageMask,
+    VkPipelineStageFlags                        dstStageMask,
+    uint32_t                                    memoryBarrierCount,
+    const VkMemoryBarrier*                      pMemoryBarriers,
+    uint32_t                                    bufferMemoryBarrierCount,
+    const VkBufferMemoryBarrier*                pBufferMemoryBarriers,
+    uint32_t                                    imageMemoryBarrierCount,
+    const VkImageMemoryBarrier*                 pImageMemoryBarriers)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.CmdWaitEvents(commandBuffer, eventCount, pEvents, srcStageMask, dstStageMask, memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers);
+    VkEvent var_local_pEvents[DISPATCH_MAX_STACK_ALLOCATIONS];
+    VkEvent *local_pEvents = NULL;
+    safe_VkBufferMemoryBarrier *local_pBufferMemoryBarriers = NULL;
+    safe_VkImageMemoryBarrier *local_pImageMemoryBarriers = NULL;
+    {
+        if (pEvents) {
+            local_pEvents = eventCount > DISPATCH_MAX_STACK_ALLOCATIONS ? new VkEvent[eventCount] : var_local_pEvents;
+            for (uint32_t index0 = 0; index0 < eventCount; ++index0) {
+                local_pEvents[index0] = layer_data->Unwrap(pEvents[index0]);
+            }
+        }
+        if (pBufferMemoryBarriers) {
+            local_pBufferMemoryBarriers = new safe_VkBufferMemoryBarrier[bufferMemoryBarrierCount];
+            for (uint32_t index0 = 0; index0 < bufferMemoryBarrierCount; ++index0) {
+                local_pBufferMemoryBarriers[index0].initialize(&pBufferMemoryBarriers[index0]);
+                if (pBufferMemoryBarriers[index0].buffer) {
+                    local_pBufferMemoryBarriers[index0].buffer = layer_data->Unwrap(pBufferMemoryBarriers[index0].buffer);
+                }
+            }
+        }
+        if (pImageMemoryBarriers) {
+            local_pImageMemoryBarriers = new safe_VkImageMemoryBarrier[imageMemoryBarrierCount];
+            for (uint32_t index0 = 0; index0 < imageMemoryBarrierCount; ++index0) {
+                local_pImageMemoryBarriers[index0].initialize(&pImageMemoryBarriers[index0]);
+                if (pImageMemoryBarriers[index0].image) {
+                    local_pImageMemoryBarriers[index0].image = layer_data->Unwrap(pImageMemoryBarriers[index0].image);
+                }
+            }
+        }
+    }
+    layer_data->device_dispatch_table.CmdWaitEvents(commandBuffer, eventCount, (const VkEvent*)local_pEvents, srcStageMask, dstStageMask, memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount, (const VkBufferMemoryBarrier*)local_pBufferMemoryBarriers, imageMemoryBarrierCount, (const VkImageMemoryBarrier*)local_pImageMemoryBarriers);
+    if (local_pEvents != var_local_pEvents)
+        delete[] local_pEvents;
+    if (local_pBufferMemoryBarriers) {
+        delete[] local_pBufferMemoryBarriers;
+    }
+    if (local_pImageMemoryBarriers) {
+        delete[] local_pImageMemoryBarriers;
+    }
+}
+
+void DispatchCmdPipelineBarrier(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineStageFlags                        srcStageMask,
+    VkPipelineStageFlags                        dstStageMask,
+    VkDependencyFlags                           dependencyFlags,
+    uint32_t                                    memoryBarrierCount,
+    const VkMemoryBarrier*                      pMemoryBarriers,
+    uint32_t                                    bufferMemoryBarrierCount,
+    const VkBufferMemoryBarrier*                pBufferMemoryBarriers,
+    uint32_t                                    imageMemoryBarrierCount,
+    const VkImageMemoryBarrier*                 pImageMemoryBarriers)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.CmdPipelineBarrier(commandBuffer, srcStageMask, dstStageMask, dependencyFlags, memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers);
+    safe_VkBufferMemoryBarrier *local_pBufferMemoryBarriers = NULL;
+    safe_VkImageMemoryBarrier *local_pImageMemoryBarriers = NULL;
+    {
+        if (pBufferMemoryBarriers) {
+            local_pBufferMemoryBarriers = new safe_VkBufferMemoryBarrier[bufferMemoryBarrierCount];
+            for (uint32_t index0 = 0; index0 < bufferMemoryBarrierCount; ++index0) {
+                local_pBufferMemoryBarriers[index0].initialize(&pBufferMemoryBarriers[index0]);
+                if (pBufferMemoryBarriers[index0].buffer) {
+                    local_pBufferMemoryBarriers[index0].buffer = layer_data->Unwrap(pBufferMemoryBarriers[index0].buffer);
+                }
+            }
+        }
+        if (pImageMemoryBarriers) {
+            local_pImageMemoryBarriers = new safe_VkImageMemoryBarrier[imageMemoryBarrierCount];
+            for (uint32_t index0 = 0; index0 < imageMemoryBarrierCount; ++index0) {
+                local_pImageMemoryBarriers[index0].initialize(&pImageMemoryBarriers[index0]);
+                if (pImageMemoryBarriers[index0].image) {
+                    local_pImageMemoryBarriers[index0].image = layer_data->Unwrap(pImageMemoryBarriers[index0].image);
+                }
+            }
+        }
+    }
+    layer_data->device_dispatch_table.CmdPipelineBarrier(commandBuffer, srcStageMask, dstStageMask, dependencyFlags, memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount, (const VkBufferMemoryBarrier*)local_pBufferMemoryBarriers, imageMemoryBarrierCount, (const VkImageMemoryBarrier*)local_pImageMemoryBarriers);
+    if (local_pBufferMemoryBarriers) {
+        delete[] local_pBufferMemoryBarriers;
+    }
+    if (local_pImageMemoryBarriers) {
+        delete[] local_pImageMemoryBarriers;
+    }
+}
+
+void DispatchCmdBeginQuery(
+    VkCommandBuffer                             commandBuffer,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    query,
+    VkQueryControlFlags                         flags)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.CmdBeginQuery(commandBuffer, queryPool, query, flags);
+    {
+        queryPool = layer_data->Unwrap(queryPool);
+    }
+    layer_data->device_dispatch_table.CmdBeginQuery(commandBuffer, queryPool, query, flags);
+
+}
+
+void DispatchCmdEndQuery(
+    VkCommandBuffer                             commandBuffer,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    query)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.CmdEndQuery(commandBuffer, queryPool, query);
+    {
+        queryPool = layer_data->Unwrap(queryPool);
+    }
+    layer_data->device_dispatch_table.CmdEndQuery(commandBuffer, queryPool, query);
+
+}
+
+void DispatchCmdResetQueryPool(
+    VkCommandBuffer                             commandBuffer,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    firstQuery,
+    uint32_t                                    queryCount)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.CmdResetQueryPool(commandBuffer, queryPool, firstQuery, queryCount);
+    {
+        queryPool = layer_data->Unwrap(queryPool);
+    }
+    layer_data->device_dispatch_table.CmdResetQueryPool(commandBuffer, queryPool, firstQuery, queryCount);
+
+}
+
+void DispatchCmdWriteTimestamp(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineStageFlagBits                     pipelineStage,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    query)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.CmdWriteTimestamp(commandBuffer, pipelineStage, queryPool, query);
+    {
+        queryPool = layer_data->Unwrap(queryPool);
+    }
+    layer_data->device_dispatch_table.CmdWriteTimestamp(commandBuffer, pipelineStage, queryPool, query);
+
+}
+
+void DispatchCmdCopyQueryPoolResults(
+    VkCommandBuffer                             commandBuffer,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    firstQuery,
+    uint32_t                                    queryCount,
+    VkBuffer                                    dstBuffer,
+    VkDeviceSize                                dstOffset,
+    VkDeviceSize                                stride,
+    VkQueryResultFlags                          flags)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.CmdCopyQueryPoolResults(commandBuffer, queryPool, firstQuery, queryCount, dstBuffer, dstOffset, stride, flags);
+    {
+        queryPool = layer_data->Unwrap(queryPool);
+        dstBuffer = layer_data->Unwrap(dstBuffer);
+    }
+    layer_data->device_dispatch_table.CmdCopyQueryPoolResults(commandBuffer, queryPool, firstQuery, queryCount, dstBuffer, dstOffset, stride, flags);
+
+}
+
+void DispatchCmdPushConstants(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineLayout                            layout,
+    VkShaderStageFlags                          stageFlags,
+    uint32_t                                    offset,
+    uint32_t                                    size,
+    const void*                                 pValues)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.CmdPushConstants(commandBuffer, layout, stageFlags, offset, size, pValues);
+    {
+        layout = layer_data->Unwrap(layout);
+    }
+    layer_data->device_dispatch_table.CmdPushConstants(commandBuffer, layout, stageFlags, offset, size, pValues);
+
+}
+
+void DispatchCmdBeginRenderPass(
+    VkCommandBuffer                             commandBuffer,
+    const VkRenderPassBeginInfo*                pRenderPassBegin,
+    VkSubpassContents                           contents)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.CmdBeginRenderPass(commandBuffer, pRenderPassBegin, contents);
+    safe_VkRenderPassBeginInfo var_local_pRenderPassBegin;
+    safe_VkRenderPassBeginInfo *local_pRenderPassBegin = NULL;
+    {
+        if (pRenderPassBegin) {
+            local_pRenderPassBegin = &var_local_pRenderPassBegin;
+            local_pRenderPassBegin->initialize(pRenderPassBegin);
+            if (pRenderPassBegin->renderPass) {
+                local_pRenderPassBegin->renderPass = layer_data->Unwrap(pRenderPassBegin->renderPass);
+            }
+            if (pRenderPassBegin->framebuffer) {
+                local_pRenderPassBegin->framebuffer = layer_data->Unwrap(pRenderPassBegin->framebuffer);
+            }
+            WrapPnextChainHandles(layer_data, local_pRenderPassBegin->pNext);
+        }
+    }
+    layer_data->device_dispatch_table.CmdBeginRenderPass(commandBuffer, (const VkRenderPassBeginInfo*)local_pRenderPassBegin, contents);
+
+}
+
+void DispatchCmdNextSubpass(
+    VkCommandBuffer                             commandBuffer,
+    VkSubpassContents                           contents)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    layer_data->device_dispatch_table.CmdNextSubpass(commandBuffer, contents);
+
+}
+
+void DispatchCmdEndRenderPass(
+    VkCommandBuffer                             commandBuffer)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    layer_data->device_dispatch_table.CmdEndRenderPass(commandBuffer);
+
+}
+
+void DispatchCmdExecuteCommands(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    commandBufferCount,
+    const VkCommandBuffer*                      pCommandBuffers)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    layer_data->device_dispatch_table.CmdExecuteCommands(commandBuffer, commandBufferCount, pCommandBuffers);
+
+}
+
+// Skip vkEnumerateInstanceVersion dispatch, manually generated
+
+VkResult DispatchBindBufferMemory2(
+    VkDevice                                    device,
+    uint32_t                                    bindInfoCount,
+    const VkBindBufferMemoryInfo*               pBindInfos)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.BindBufferMemory2(device, bindInfoCount, pBindInfos);
+    safe_VkBindBufferMemoryInfo *local_pBindInfos = NULL;
+    {
+        if (pBindInfos) {
+            local_pBindInfos = new safe_VkBindBufferMemoryInfo[bindInfoCount];
+            for (uint32_t index0 = 0; index0 < bindInfoCount; ++index0) {
+                local_pBindInfos[index0].initialize(&pBindInfos[index0]);
+                if (pBindInfos[index0].buffer) {
+                    local_pBindInfos[index0].buffer = layer_data->Unwrap(pBindInfos[index0].buffer);
+                }
+                if (pBindInfos[index0].memory) {
+                    local_pBindInfos[index0].memory = layer_data->Unwrap(pBindInfos[index0].memory);
+                }
+            }
+        }
+    }
+    VkResult result = layer_data->device_dispatch_table.BindBufferMemory2(device, bindInfoCount, (const VkBindBufferMemoryInfo*)local_pBindInfos);
+    if (local_pBindInfos) {
+        delete[] local_pBindInfos;
+    }
+    return result;
+}
+
+VkResult DispatchBindImageMemory2(
+    VkDevice                                    device,
+    uint32_t                                    bindInfoCount,
+    const VkBindImageMemoryInfo*                pBindInfos)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.BindImageMemory2(device, bindInfoCount, pBindInfos);
+    safe_VkBindImageMemoryInfo *local_pBindInfos = NULL;
+    {
+        if (pBindInfos) {
+            local_pBindInfos = new safe_VkBindImageMemoryInfo[bindInfoCount];
+            for (uint32_t index0 = 0; index0 < bindInfoCount; ++index0) {
+                local_pBindInfos[index0].initialize(&pBindInfos[index0]);
+                WrapPnextChainHandles(layer_data, local_pBindInfos[index0].pNext);
+                if (pBindInfos[index0].image) {
+                    local_pBindInfos[index0].image = layer_data->Unwrap(pBindInfos[index0].image);
+                }
+                if (pBindInfos[index0].memory) {
+                    local_pBindInfos[index0].memory = layer_data->Unwrap(pBindInfos[index0].memory);
+                }
+            }
+        }
+    }
+    VkResult result = layer_data->device_dispatch_table.BindImageMemory2(device, bindInfoCount, (const VkBindImageMemoryInfo*)local_pBindInfos);
+    if (local_pBindInfos) {
+        delete[] local_pBindInfos;
+    }
+    return result;
+}
+
+void DispatchGetDeviceGroupPeerMemoryFeatures(
+    VkDevice                                    device,
+    uint32_t                                    heapIndex,
+    uint32_t                                    localDeviceIndex,
+    uint32_t                                    remoteDeviceIndex,
+    VkPeerMemoryFeatureFlags*                   pPeerMemoryFeatures)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    layer_data->device_dispatch_table.GetDeviceGroupPeerMemoryFeatures(device, heapIndex, localDeviceIndex, remoteDeviceIndex, pPeerMemoryFeatures);
+
+}
+
+void DispatchCmdSetDeviceMask(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    deviceMask)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    layer_data->device_dispatch_table.CmdSetDeviceMask(commandBuffer, deviceMask);
+
+}
+
+void DispatchCmdDispatchBase(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    baseGroupX,
+    uint32_t                                    baseGroupY,
+    uint32_t                                    baseGroupZ,
+    uint32_t                                    groupCountX,
+    uint32_t                                    groupCountY,
+    uint32_t                                    groupCountZ)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    layer_data->device_dispatch_table.CmdDispatchBase(commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ);
+
+}
+
+VkResult DispatchEnumeratePhysicalDeviceGroups(
+    VkInstance                                  instance,
+    uint32_t*                                   pPhysicalDeviceGroupCount,
+    VkPhysicalDeviceGroupProperties*            pPhysicalDeviceGroupProperties)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(instance), layer_data_map);
+    VkResult result = layer_data->instance_dispatch_table.EnumeratePhysicalDeviceGroups(instance, pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties);
+
+    return result;
+}
+
+void DispatchGetImageMemoryRequirements2(
+    VkDevice                                    device,
+    const VkImageMemoryRequirementsInfo2*       pInfo,
+    VkMemoryRequirements2*                      pMemoryRequirements)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.GetImageMemoryRequirements2(device, pInfo, pMemoryRequirements);
+    safe_VkImageMemoryRequirementsInfo2 var_local_pInfo;
+    safe_VkImageMemoryRequirementsInfo2 *local_pInfo = NULL;
+    {
+        if (pInfo) {
+            local_pInfo = &var_local_pInfo;
+            local_pInfo->initialize(pInfo);
+            if (pInfo->image) {
+                local_pInfo->image = layer_data->Unwrap(pInfo->image);
+            }
+        }
+    }
+    layer_data->device_dispatch_table.GetImageMemoryRequirements2(device, (const VkImageMemoryRequirementsInfo2*)local_pInfo, pMemoryRequirements);
+
+}
+
+void DispatchGetBufferMemoryRequirements2(
+    VkDevice                                    device,
+    const VkBufferMemoryRequirementsInfo2*      pInfo,
+    VkMemoryRequirements2*                      pMemoryRequirements)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.GetBufferMemoryRequirements2(device, pInfo, pMemoryRequirements);
+    safe_VkBufferMemoryRequirementsInfo2 var_local_pInfo;
+    safe_VkBufferMemoryRequirementsInfo2 *local_pInfo = NULL;
+    {
+        if (pInfo) {
+            local_pInfo = &var_local_pInfo;
+            local_pInfo->initialize(pInfo);
+            if (pInfo->buffer) {
+                local_pInfo->buffer = layer_data->Unwrap(pInfo->buffer);
+            }
+        }
+    }
+    layer_data->device_dispatch_table.GetBufferMemoryRequirements2(device, (const VkBufferMemoryRequirementsInfo2*)local_pInfo, pMemoryRequirements);
+
+}
+
+void DispatchGetImageSparseMemoryRequirements2(
+    VkDevice                                    device,
+    const VkImageSparseMemoryRequirementsInfo2* pInfo,
+    uint32_t*                                   pSparseMemoryRequirementCount,
+    VkSparseImageMemoryRequirements2*           pSparseMemoryRequirements)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.GetImageSparseMemoryRequirements2(device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements);
+    safe_VkImageSparseMemoryRequirementsInfo2 var_local_pInfo;
+    safe_VkImageSparseMemoryRequirementsInfo2 *local_pInfo = NULL;
+    {
+        if (pInfo) {
+            local_pInfo = &var_local_pInfo;
+            local_pInfo->initialize(pInfo);
+            if (pInfo->image) {
+                local_pInfo->image = layer_data->Unwrap(pInfo->image);
+            }
+        }
+    }
+    layer_data->device_dispatch_table.GetImageSparseMemoryRequirements2(device, (const VkImageSparseMemoryRequirementsInfo2*)local_pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements);
+
+}
+
+void DispatchGetPhysicalDeviceFeatures2(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceFeatures2*                  pFeatures)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    layer_data->instance_dispatch_table.GetPhysicalDeviceFeatures2(physicalDevice, pFeatures);
+
+}
+
+void DispatchGetPhysicalDeviceProperties2(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceProperties2*                pProperties)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    layer_data->instance_dispatch_table.GetPhysicalDeviceProperties2(physicalDevice, pProperties);
+
+}
+
+void DispatchGetPhysicalDeviceFormatProperties2(
+    VkPhysicalDevice                            physicalDevice,
+    VkFormat                                    format,
+    VkFormatProperties2*                        pFormatProperties)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    layer_data->instance_dispatch_table.GetPhysicalDeviceFormatProperties2(physicalDevice, format, pFormatProperties);
+
+}
+
+VkResult DispatchGetPhysicalDeviceImageFormatProperties2(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceImageFormatInfo2*     pImageFormatInfo,
+    VkImageFormatProperties2*                   pImageFormatProperties)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    if (!wrap_handles) return layer_data->instance_dispatch_table.GetPhysicalDeviceImageFormatProperties2(physicalDevice, pImageFormatInfo, pImageFormatProperties);
+    safe_VkPhysicalDeviceImageFormatInfo2 var_local_pImageFormatInfo;
+    safe_VkPhysicalDeviceImageFormatInfo2 *local_pImageFormatInfo = NULL;
+    {
+        if (pImageFormatInfo) {
+            local_pImageFormatInfo = &var_local_pImageFormatInfo;
+            local_pImageFormatInfo->initialize(pImageFormatInfo);
+            WrapPnextChainHandles(layer_data, local_pImageFormatInfo->pNext);
+        }
+    }
+    VkResult result = layer_data->instance_dispatch_table.GetPhysicalDeviceImageFormatProperties2(physicalDevice, (const VkPhysicalDeviceImageFormatInfo2*)local_pImageFormatInfo, pImageFormatProperties);
+
+    return result;
+}
+
+void DispatchGetPhysicalDeviceQueueFamilyProperties2(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pQueueFamilyPropertyCount,
+    VkQueueFamilyProperties2*                   pQueueFamilyProperties)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    layer_data->instance_dispatch_table.GetPhysicalDeviceQueueFamilyProperties2(physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties);
+
+}
+
+void DispatchGetPhysicalDeviceMemoryProperties2(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceMemoryProperties2*          pMemoryProperties)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    layer_data->instance_dispatch_table.GetPhysicalDeviceMemoryProperties2(physicalDevice, pMemoryProperties);
+
+}
+
+void DispatchGetPhysicalDeviceSparseImageFormatProperties2(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo,
+    uint32_t*                                   pPropertyCount,
+    VkSparseImageFormatProperties2*             pProperties)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    layer_data->instance_dispatch_table.GetPhysicalDeviceSparseImageFormatProperties2(physicalDevice, pFormatInfo, pPropertyCount, pProperties);
+
+}
+
+void DispatchTrimCommandPool(
+    VkDevice                                    device,
+    VkCommandPool                               commandPool,
+    VkCommandPoolTrimFlags                      flags)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.TrimCommandPool(device, commandPool, flags);
+    {
+        commandPool = layer_data->Unwrap(commandPool);
+    }
+    layer_data->device_dispatch_table.TrimCommandPool(device, commandPool, flags);
+
+}
+
+void DispatchGetDeviceQueue2(
+    VkDevice                                    device,
+    const VkDeviceQueueInfo2*                   pQueueInfo,
+    VkQueue*                                    pQueue)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    layer_data->device_dispatch_table.GetDeviceQueue2(device, pQueueInfo, pQueue);
+
+}
+
+VkResult DispatchCreateSamplerYcbcrConversion(
+    VkDevice                                    device,
+    const VkSamplerYcbcrConversionCreateInfo*   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSamplerYcbcrConversion*                   pYcbcrConversion)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.CreateSamplerYcbcrConversion(device, pCreateInfo, pAllocator, pYcbcrConversion);
+    safe_VkSamplerYcbcrConversionCreateInfo var_local_pCreateInfo;
+    safe_VkSamplerYcbcrConversionCreateInfo *local_pCreateInfo = NULL;
+    {
+        if (pCreateInfo) {
+            local_pCreateInfo = &var_local_pCreateInfo;
+            local_pCreateInfo->initialize(pCreateInfo);
+            WrapPnextChainHandles(layer_data, local_pCreateInfo->pNext);
+        }
+    }
+    VkResult result = layer_data->device_dispatch_table.CreateSamplerYcbcrConversion(device, (const VkSamplerYcbcrConversionCreateInfo*)local_pCreateInfo, pAllocator, pYcbcrConversion);
+    if (VK_SUCCESS == result) {
+        *pYcbcrConversion = layer_data->WrapNew(*pYcbcrConversion);
+    }
+    return result;
+}
+
+void DispatchDestroySamplerYcbcrConversion(
+    VkDevice                                    device,
+    VkSamplerYcbcrConversion                    ycbcrConversion,
+    const VkAllocationCallbacks*                pAllocator)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.DestroySamplerYcbcrConversion(device, ycbcrConversion, pAllocator);
+    uint64_t ycbcrConversion_id = reinterpret_cast<uint64_t &>(ycbcrConversion);
+    auto iter = unique_id_mapping.pop(ycbcrConversion_id);
+    if (iter != unique_id_mapping.end()) {
+        ycbcrConversion = (VkSamplerYcbcrConversion)iter->second;
+    } else {
+        ycbcrConversion = (VkSamplerYcbcrConversion)0;
+    }
+    layer_data->device_dispatch_table.DestroySamplerYcbcrConversion(device, ycbcrConversion, pAllocator);
+
+}
+
+// Skip vkCreateDescriptorUpdateTemplate dispatch, manually generated
+
+// Skip vkDestroyDescriptorUpdateTemplate dispatch, manually generated
+
+// Skip vkUpdateDescriptorSetWithTemplate dispatch, manually generated
+
+void DispatchGetPhysicalDeviceExternalBufferProperties(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceExternalBufferInfo*   pExternalBufferInfo,
+    VkExternalBufferProperties*                 pExternalBufferProperties)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    layer_data->instance_dispatch_table.GetPhysicalDeviceExternalBufferProperties(physicalDevice, pExternalBufferInfo, pExternalBufferProperties);
+
+}
+
+void DispatchGetPhysicalDeviceExternalFenceProperties(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceExternalFenceInfo*    pExternalFenceInfo,
+    VkExternalFenceProperties*                  pExternalFenceProperties)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    layer_data->instance_dispatch_table.GetPhysicalDeviceExternalFenceProperties(physicalDevice, pExternalFenceInfo, pExternalFenceProperties);
+
+}
+
+void DispatchGetPhysicalDeviceExternalSemaphoreProperties(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo,
+    VkExternalSemaphoreProperties*              pExternalSemaphoreProperties)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    layer_data->instance_dispatch_table.GetPhysicalDeviceExternalSemaphoreProperties(physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties);
+
+}
+
+void DispatchGetDescriptorSetLayoutSupport(
+    VkDevice                                    device,
+    const VkDescriptorSetLayoutCreateInfo*      pCreateInfo,
+    VkDescriptorSetLayoutSupport*               pSupport)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.GetDescriptorSetLayoutSupport(device, pCreateInfo, pSupport);
+    safe_VkDescriptorSetLayoutCreateInfo var_local_pCreateInfo;
+    safe_VkDescriptorSetLayoutCreateInfo *local_pCreateInfo = NULL;
+    {
+        if (pCreateInfo) {
+            local_pCreateInfo = &var_local_pCreateInfo;
+            local_pCreateInfo->initialize(pCreateInfo);
+            if (local_pCreateInfo->pBindings) {
+                for (uint32_t index1 = 0; index1 < local_pCreateInfo->bindingCount; ++index1) {
+                    if (local_pCreateInfo->pBindings[index1].pImmutableSamplers) {
+                        for (uint32_t index2 = 0; index2 < local_pCreateInfo->pBindings[index1].descriptorCount; ++index2) {
+                            local_pCreateInfo->pBindings[index1].pImmutableSamplers[index2] = layer_data->Unwrap(local_pCreateInfo->pBindings[index1].pImmutableSamplers[index2]);
+                        }
+                    }
+                }
+            }
+        }
+    }
+    layer_data->device_dispatch_table.GetDescriptorSetLayoutSupport(device, (const VkDescriptorSetLayoutCreateInfo*)local_pCreateInfo, pSupport);
+
+}
+
+void DispatchDestroySurfaceKHR(
+    VkInstance                                  instance,
+    VkSurfaceKHR                                surface,
+    const VkAllocationCallbacks*                pAllocator)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(instance), layer_data_map);
+    if (!wrap_handles) return layer_data->instance_dispatch_table.DestroySurfaceKHR(instance, surface, pAllocator);
+    uint64_t surface_id = reinterpret_cast<uint64_t &>(surface);
+    auto iter = unique_id_mapping.pop(surface_id);
+    if (iter != unique_id_mapping.end()) {
+        surface = (VkSurfaceKHR)iter->second;
+    } else {
+        surface = (VkSurfaceKHR)0;
+    }
+    layer_data->instance_dispatch_table.DestroySurfaceKHR(instance, surface, pAllocator);
+
+}
+
+VkResult DispatchGetPhysicalDeviceSurfaceSupportKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t                                    queueFamilyIndex,
+    VkSurfaceKHR                                surface,
+    VkBool32*                                   pSupported)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    if (!wrap_handles) return layer_data->instance_dispatch_table.GetPhysicalDeviceSurfaceSupportKHR(physicalDevice, queueFamilyIndex, surface, pSupported);
+    {
+        surface = layer_data->Unwrap(surface);
+    }
+    VkResult result = layer_data->instance_dispatch_table.GetPhysicalDeviceSurfaceSupportKHR(physicalDevice, queueFamilyIndex, surface, pSupported);
+
+    return result;
+}
+
+VkResult DispatchGetPhysicalDeviceSurfaceCapabilitiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkSurfaceKHR                                surface,
+    VkSurfaceCapabilitiesKHR*                   pSurfaceCapabilities)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    if (!wrap_handles) return layer_data->instance_dispatch_table.GetPhysicalDeviceSurfaceCapabilitiesKHR(physicalDevice, surface, pSurfaceCapabilities);
+    {
+        surface = layer_data->Unwrap(surface);
+    }
+    VkResult result = layer_data->instance_dispatch_table.GetPhysicalDeviceSurfaceCapabilitiesKHR(physicalDevice, surface, pSurfaceCapabilities);
+
+    return result;
+}
+
+VkResult DispatchGetPhysicalDeviceSurfaceFormatsKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkSurfaceKHR                                surface,
+    uint32_t*                                   pSurfaceFormatCount,
+    VkSurfaceFormatKHR*                         pSurfaceFormats)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    if (!wrap_handles) return layer_data->instance_dispatch_table.GetPhysicalDeviceSurfaceFormatsKHR(physicalDevice, surface, pSurfaceFormatCount, pSurfaceFormats);
+    {
+        surface = layer_data->Unwrap(surface);
+    }
+    VkResult result = layer_data->instance_dispatch_table.GetPhysicalDeviceSurfaceFormatsKHR(physicalDevice, surface, pSurfaceFormatCount, pSurfaceFormats);
+
+    return result;
+}
+
+VkResult DispatchGetPhysicalDeviceSurfacePresentModesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkSurfaceKHR                                surface,
+    uint32_t*                                   pPresentModeCount,
+    VkPresentModeKHR*                           pPresentModes)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    if (!wrap_handles) return layer_data->instance_dispatch_table.GetPhysicalDeviceSurfacePresentModesKHR(physicalDevice, surface, pPresentModeCount, pPresentModes);
+    {
+        surface = layer_data->Unwrap(surface);
+    }
+    VkResult result = layer_data->instance_dispatch_table.GetPhysicalDeviceSurfacePresentModesKHR(physicalDevice, surface, pPresentModeCount, pPresentModes);
+
+    return result;
+}
+
+// Skip vkCreateSwapchainKHR dispatch, manually generated
+
+// Skip vkDestroySwapchainKHR dispatch, manually generated
+
+// Skip vkGetSwapchainImagesKHR dispatch, manually generated
+
+VkResult DispatchAcquireNextImageKHR(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    uint64_t                                    timeout,
+    VkSemaphore                                 semaphore,
+    VkFence                                     fence,
+    uint32_t*                                   pImageIndex)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.AcquireNextImageKHR(device, swapchain, timeout, semaphore, fence, pImageIndex);
+    {
+        swapchain = layer_data->Unwrap(swapchain);
+        semaphore = layer_data->Unwrap(semaphore);
+        fence = layer_data->Unwrap(fence);
+    }
+    VkResult result = layer_data->device_dispatch_table.AcquireNextImageKHR(device, swapchain, timeout, semaphore, fence, pImageIndex);
+
+    return result;
+}
+
+// Skip vkQueuePresentKHR dispatch, manually generated
+
+VkResult DispatchGetDeviceGroupPresentCapabilitiesKHR(
+    VkDevice                                    device,
+    VkDeviceGroupPresentCapabilitiesKHR*        pDeviceGroupPresentCapabilities)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    VkResult result = layer_data->device_dispatch_table.GetDeviceGroupPresentCapabilitiesKHR(device, pDeviceGroupPresentCapabilities);
+
+    return result;
+}
+
+VkResult DispatchGetDeviceGroupSurfacePresentModesKHR(
+    VkDevice                                    device,
+    VkSurfaceKHR                                surface,
+    VkDeviceGroupPresentModeFlagsKHR*           pModes)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.GetDeviceGroupSurfacePresentModesKHR(device, surface, pModes);
+    {
+        surface = layer_data->Unwrap(surface);
+    }
+    VkResult result = layer_data->device_dispatch_table.GetDeviceGroupSurfacePresentModesKHR(device, surface, pModes);
+
+    return result;
+}
+
+VkResult DispatchGetPhysicalDevicePresentRectanglesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkSurfaceKHR                                surface,
+    uint32_t*                                   pRectCount,
+    VkRect2D*                                   pRects)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    if (!wrap_handles) return layer_data->instance_dispatch_table.GetPhysicalDevicePresentRectanglesKHR(physicalDevice, surface, pRectCount, pRects);
+    {
+        surface = layer_data->Unwrap(surface);
+    }
+    VkResult result = layer_data->instance_dispatch_table.GetPhysicalDevicePresentRectanglesKHR(physicalDevice, surface, pRectCount, pRects);
+
+    return result;
+}
+
+VkResult DispatchAcquireNextImage2KHR(
+    VkDevice                                    device,
+    const VkAcquireNextImageInfoKHR*            pAcquireInfo,
+    uint32_t*                                   pImageIndex)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.AcquireNextImage2KHR(device, pAcquireInfo, pImageIndex);
+    safe_VkAcquireNextImageInfoKHR var_local_pAcquireInfo;
+    safe_VkAcquireNextImageInfoKHR *local_pAcquireInfo = NULL;
+    {
+        if (pAcquireInfo) {
+            local_pAcquireInfo = &var_local_pAcquireInfo;
+            local_pAcquireInfo->initialize(pAcquireInfo);
+            if (pAcquireInfo->swapchain) {
+                local_pAcquireInfo->swapchain = layer_data->Unwrap(pAcquireInfo->swapchain);
+            }
+            if (pAcquireInfo->semaphore) {
+                local_pAcquireInfo->semaphore = layer_data->Unwrap(pAcquireInfo->semaphore);
+            }
+            if (pAcquireInfo->fence) {
+                local_pAcquireInfo->fence = layer_data->Unwrap(pAcquireInfo->fence);
+            }
+        }
+    }
+    VkResult result = layer_data->device_dispatch_table.AcquireNextImage2KHR(device, (const VkAcquireNextImageInfoKHR*)local_pAcquireInfo, pImageIndex);
+
+    return result;
+}
+
+// Skip vkGetPhysicalDeviceDisplayPropertiesKHR dispatch, manually generated
+
+// Skip vkGetPhysicalDeviceDisplayPlanePropertiesKHR dispatch, manually generated
+
+// Skip vkGetDisplayPlaneSupportedDisplaysKHR dispatch, manually generated
+
+// Skip vkGetDisplayModePropertiesKHR dispatch, manually generated
+
+VkResult DispatchCreateDisplayModeKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkDisplayKHR                                display,
+    const VkDisplayModeCreateInfoKHR*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDisplayModeKHR*                           pMode)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    if (!wrap_handles) return layer_data->instance_dispatch_table.CreateDisplayModeKHR(physicalDevice, display, pCreateInfo, pAllocator, pMode);
+    {
+        display = layer_data->Unwrap(display);
+    }
+    VkResult result = layer_data->instance_dispatch_table.CreateDisplayModeKHR(physicalDevice, display, pCreateInfo, pAllocator, pMode);
+    if (VK_SUCCESS == result) {
+        *pMode = layer_data->WrapNew(*pMode);
+    }
+    return result;
+}
+
+VkResult DispatchGetDisplayPlaneCapabilitiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkDisplayModeKHR                            mode,
+    uint32_t                                    planeIndex,
+    VkDisplayPlaneCapabilitiesKHR*              pCapabilities)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    if (!wrap_handles) return layer_data->instance_dispatch_table.GetDisplayPlaneCapabilitiesKHR(physicalDevice, mode, planeIndex, pCapabilities);
+    {
+        mode = layer_data->Unwrap(mode);
+    }
+    VkResult result = layer_data->instance_dispatch_table.GetDisplayPlaneCapabilitiesKHR(physicalDevice, mode, planeIndex, pCapabilities);
+
+    return result;
+}
+
+VkResult DispatchCreateDisplayPlaneSurfaceKHR(
+    VkInstance                                  instance,
+    const VkDisplaySurfaceCreateInfoKHR*        pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(instance), layer_data_map);
+    if (!wrap_handles) return layer_data->instance_dispatch_table.CreateDisplayPlaneSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
+    safe_VkDisplaySurfaceCreateInfoKHR var_local_pCreateInfo;
+    safe_VkDisplaySurfaceCreateInfoKHR *local_pCreateInfo = NULL;
+    {
+        if (pCreateInfo) {
+            local_pCreateInfo = &var_local_pCreateInfo;
+            local_pCreateInfo->initialize(pCreateInfo);
+            if (pCreateInfo->displayMode) {
+                local_pCreateInfo->displayMode = layer_data->Unwrap(pCreateInfo->displayMode);
+            }
+        }
+    }
+    VkResult result = layer_data->instance_dispatch_table.CreateDisplayPlaneSurfaceKHR(instance, (const VkDisplaySurfaceCreateInfoKHR*)local_pCreateInfo, pAllocator, pSurface);
+    if (VK_SUCCESS == result) {
+        *pSurface = layer_data->WrapNew(*pSurface);
+    }
+    return result;
+}
+
+// Skip vkCreateSharedSwapchainsKHR dispatch, manually generated
+
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+
+VkResult DispatchCreateXlibSurfaceKHR(
+    VkInstance                                  instance,
+    const VkXlibSurfaceCreateInfoKHR*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(instance), layer_data_map);
+    if (!wrap_handles) return layer_data->instance_dispatch_table.CreateXlibSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
+    VkResult result = layer_data->instance_dispatch_table.CreateXlibSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
+    if (VK_SUCCESS == result) {
+        *pSurface = layer_data->WrapNew(*pSurface);
+    }
+    return result;
+}
+#endif // VK_USE_PLATFORM_XLIB_KHR
+
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+
+VkBool32 DispatchGetPhysicalDeviceXlibPresentationSupportKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t                                    queueFamilyIndex,
+    Display*                                    dpy,
+    VisualID                                    visualID)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    VkBool32 result = layer_data->instance_dispatch_table.GetPhysicalDeviceXlibPresentationSupportKHR(physicalDevice, queueFamilyIndex, dpy, visualID);
+
+    return result;
+}
+#endif // VK_USE_PLATFORM_XLIB_KHR
+
+#ifdef VK_USE_PLATFORM_XCB_KHR
+
+VkResult DispatchCreateXcbSurfaceKHR(
+    VkInstance                                  instance,
+    const VkXcbSurfaceCreateInfoKHR*            pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(instance), layer_data_map);
+    if (!wrap_handles) return layer_data->instance_dispatch_table.CreateXcbSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
+    VkResult result = layer_data->instance_dispatch_table.CreateXcbSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
+    if (VK_SUCCESS == result) {
+        *pSurface = layer_data->WrapNew(*pSurface);
+    }
+    return result;
+}
+#endif // VK_USE_PLATFORM_XCB_KHR
+
+#ifdef VK_USE_PLATFORM_XCB_KHR
+
+VkBool32 DispatchGetPhysicalDeviceXcbPresentationSupportKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t                                    queueFamilyIndex,
+    xcb_connection_t*                           connection,
+    xcb_visualid_t                              visual_id)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    VkBool32 result = layer_data->instance_dispatch_table.GetPhysicalDeviceXcbPresentationSupportKHR(physicalDevice, queueFamilyIndex, connection, visual_id);
+
+    return result;
+}
+#endif // VK_USE_PLATFORM_XCB_KHR
+
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+
+VkResult DispatchCreateWaylandSurfaceKHR(
+    VkInstance                                  instance,
+    const VkWaylandSurfaceCreateInfoKHR*        pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(instance), layer_data_map);
+    if (!wrap_handles) return layer_data->instance_dispatch_table.CreateWaylandSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
+    VkResult result = layer_data->instance_dispatch_table.CreateWaylandSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
+    if (VK_SUCCESS == result) {
+        *pSurface = layer_data->WrapNew(*pSurface);
+    }
+    return result;
+}
+#endif // VK_USE_PLATFORM_WAYLAND_KHR
+
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+
+VkBool32 DispatchGetPhysicalDeviceWaylandPresentationSupportKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t                                    queueFamilyIndex,
+    struct wl_display*                          display)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    VkBool32 result = layer_data->instance_dispatch_table.GetPhysicalDeviceWaylandPresentationSupportKHR(physicalDevice, queueFamilyIndex, display);
+
+    return result;
+}
+#endif // VK_USE_PLATFORM_WAYLAND_KHR
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+
+VkResult DispatchCreateAndroidSurfaceKHR(
+    VkInstance                                  instance,
+    const VkAndroidSurfaceCreateInfoKHR*        pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(instance), layer_data_map);
+    if (!wrap_handles) return layer_data->instance_dispatch_table.CreateAndroidSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
+    VkResult result = layer_data->instance_dispatch_table.CreateAndroidSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
+    if (VK_SUCCESS == result) {
+        *pSurface = layer_data->WrapNew(*pSurface);
+    }
+    return result;
+}
+#endif // VK_USE_PLATFORM_ANDROID_KHR
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+VkResult DispatchCreateWin32SurfaceKHR(
+    VkInstance                                  instance,
+    const VkWin32SurfaceCreateInfoKHR*          pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(instance), layer_data_map);
+    if (!wrap_handles) return layer_data->instance_dispatch_table.CreateWin32SurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
+    VkResult result = layer_data->instance_dispatch_table.CreateWin32SurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
+    if (VK_SUCCESS == result) {
+        *pSurface = layer_data->WrapNew(*pSurface);
+    }
+    return result;
+}
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+VkBool32 DispatchGetPhysicalDeviceWin32PresentationSupportKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t                                    queueFamilyIndex)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    VkBool32 result = layer_data->instance_dispatch_table.GetPhysicalDeviceWin32PresentationSupportKHR(physicalDevice, queueFamilyIndex);
+
+    return result;
+}
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+void DispatchGetPhysicalDeviceFeatures2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceFeatures2*                  pFeatures)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    layer_data->instance_dispatch_table.GetPhysicalDeviceFeatures2KHR(physicalDevice, pFeatures);
+
+}
+
+void DispatchGetPhysicalDeviceProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceProperties2*                pProperties)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    layer_data->instance_dispatch_table.GetPhysicalDeviceProperties2KHR(physicalDevice, pProperties);
+
+}
+
+void DispatchGetPhysicalDeviceFormatProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkFormat                                    format,
+    VkFormatProperties2*                        pFormatProperties)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    layer_data->instance_dispatch_table.GetPhysicalDeviceFormatProperties2KHR(physicalDevice, format, pFormatProperties);
+
+}
+
+VkResult DispatchGetPhysicalDeviceImageFormatProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceImageFormatInfo2*     pImageFormatInfo,
+    VkImageFormatProperties2*                   pImageFormatProperties)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    if (!wrap_handles) return layer_data->instance_dispatch_table.GetPhysicalDeviceImageFormatProperties2KHR(physicalDevice, pImageFormatInfo, pImageFormatProperties);
+    safe_VkPhysicalDeviceImageFormatInfo2 var_local_pImageFormatInfo;
+    safe_VkPhysicalDeviceImageFormatInfo2 *local_pImageFormatInfo = NULL;
+    {
+        if (pImageFormatInfo) {
+            local_pImageFormatInfo = &var_local_pImageFormatInfo;
+            local_pImageFormatInfo->initialize(pImageFormatInfo);
+            WrapPnextChainHandles(layer_data, local_pImageFormatInfo->pNext);
+        }
+    }
+    VkResult result = layer_data->instance_dispatch_table.GetPhysicalDeviceImageFormatProperties2KHR(physicalDevice, (const VkPhysicalDeviceImageFormatInfo2*)local_pImageFormatInfo, pImageFormatProperties);
+
+    return result;
+}
+
+void DispatchGetPhysicalDeviceQueueFamilyProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pQueueFamilyPropertyCount,
+    VkQueueFamilyProperties2*                   pQueueFamilyProperties)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    layer_data->instance_dispatch_table.GetPhysicalDeviceQueueFamilyProperties2KHR(physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties);
+
+}
+
+void DispatchGetPhysicalDeviceMemoryProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceMemoryProperties2*          pMemoryProperties)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    layer_data->instance_dispatch_table.GetPhysicalDeviceMemoryProperties2KHR(physicalDevice, pMemoryProperties);
+
+}
+
+void DispatchGetPhysicalDeviceSparseImageFormatProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo,
+    uint32_t*                                   pPropertyCount,
+    VkSparseImageFormatProperties2*             pProperties)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    layer_data->instance_dispatch_table.GetPhysicalDeviceSparseImageFormatProperties2KHR(physicalDevice, pFormatInfo, pPropertyCount, pProperties);
+
+}
+
+void DispatchGetDeviceGroupPeerMemoryFeaturesKHR(
+    VkDevice                                    device,
+    uint32_t                                    heapIndex,
+    uint32_t                                    localDeviceIndex,
+    uint32_t                                    remoteDeviceIndex,
+    VkPeerMemoryFeatureFlags*                   pPeerMemoryFeatures)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    layer_data->device_dispatch_table.GetDeviceGroupPeerMemoryFeaturesKHR(device, heapIndex, localDeviceIndex, remoteDeviceIndex, pPeerMemoryFeatures);
+
+}
+
+void DispatchCmdSetDeviceMaskKHR(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    deviceMask)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    layer_data->device_dispatch_table.CmdSetDeviceMaskKHR(commandBuffer, deviceMask);
+
+}
+
+void DispatchCmdDispatchBaseKHR(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    baseGroupX,
+    uint32_t                                    baseGroupY,
+    uint32_t                                    baseGroupZ,
+    uint32_t                                    groupCountX,
+    uint32_t                                    groupCountY,
+    uint32_t                                    groupCountZ)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    layer_data->device_dispatch_table.CmdDispatchBaseKHR(commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ);
+
+}
+
+void DispatchTrimCommandPoolKHR(
+    VkDevice                                    device,
+    VkCommandPool                               commandPool,
+    VkCommandPoolTrimFlags                      flags)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.TrimCommandPoolKHR(device, commandPool, flags);
+    {
+        commandPool = layer_data->Unwrap(commandPool);
+    }
+    layer_data->device_dispatch_table.TrimCommandPoolKHR(device, commandPool, flags);
+
+}
+
+VkResult DispatchEnumeratePhysicalDeviceGroupsKHR(
+    VkInstance                                  instance,
+    uint32_t*                                   pPhysicalDeviceGroupCount,
+    VkPhysicalDeviceGroupProperties*            pPhysicalDeviceGroupProperties)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(instance), layer_data_map);
+    VkResult result = layer_data->instance_dispatch_table.EnumeratePhysicalDeviceGroupsKHR(instance, pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties);
+
+    return result;
+}
+
+void DispatchGetPhysicalDeviceExternalBufferPropertiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceExternalBufferInfo*   pExternalBufferInfo,
+    VkExternalBufferProperties*                 pExternalBufferProperties)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    layer_data->instance_dispatch_table.GetPhysicalDeviceExternalBufferPropertiesKHR(physicalDevice, pExternalBufferInfo, pExternalBufferProperties);
+
+}
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+VkResult DispatchGetMemoryWin32HandleKHR(
+    VkDevice                                    device,
+    const VkMemoryGetWin32HandleInfoKHR*        pGetWin32HandleInfo,
+    HANDLE*                                     pHandle)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.GetMemoryWin32HandleKHR(device, pGetWin32HandleInfo, pHandle);
+    safe_VkMemoryGetWin32HandleInfoKHR var_local_pGetWin32HandleInfo;
+    safe_VkMemoryGetWin32HandleInfoKHR *local_pGetWin32HandleInfo = NULL;
+    {
+        if (pGetWin32HandleInfo) {
+            local_pGetWin32HandleInfo = &var_local_pGetWin32HandleInfo;
+            local_pGetWin32HandleInfo->initialize(pGetWin32HandleInfo);
+            if (pGetWin32HandleInfo->memory) {
+                local_pGetWin32HandleInfo->memory = layer_data->Unwrap(pGetWin32HandleInfo->memory);
+            }
+        }
+    }
+    VkResult result = layer_data->device_dispatch_table.GetMemoryWin32HandleKHR(device, (const VkMemoryGetWin32HandleInfoKHR*)local_pGetWin32HandleInfo, pHandle);
+
+    return result;
+}
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+VkResult DispatchGetMemoryWin32HandlePropertiesKHR(
+    VkDevice                                    device,
+    VkExternalMemoryHandleTypeFlagBits          handleType,
+    HANDLE                                      handle,
+    VkMemoryWin32HandlePropertiesKHR*           pMemoryWin32HandleProperties)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    VkResult result = layer_data->device_dispatch_table.GetMemoryWin32HandlePropertiesKHR(device, handleType, handle, pMemoryWin32HandleProperties);
+
+    return result;
+}
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+VkResult DispatchGetMemoryFdKHR(
+    VkDevice                                    device,
+    const VkMemoryGetFdInfoKHR*                 pGetFdInfo,
+    int*                                        pFd)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.GetMemoryFdKHR(device, pGetFdInfo, pFd);
+    safe_VkMemoryGetFdInfoKHR var_local_pGetFdInfo;
+    safe_VkMemoryGetFdInfoKHR *local_pGetFdInfo = NULL;
+    {
+        if (pGetFdInfo) {
+            local_pGetFdInfo = &var_local_pGetFdInfo;
+            local_pGetFdInfo->initialize(pGetFdInfo);
+            if (pGetFdInfo->memory) {
+                local_pGetFdInfo->memory = layer_data->Unwrap(pGetFdInfo->memory);
+            }
+        }
+    }
+    VkResult result = layer_data->device_dispatch_table.GetMemoryFdKHR(device, (const VkMemoryGetFdInfoKHR*)local_pGetFdInfo, pFd);
+
+    return result;
+}
+
+VkResult DispatchGetMemoryFdPropertiesKHR(
+    VkDevice                                    device,
+    VkExternalMemoryHandleTypeFlagBits          handleType,
+    int                                         fd,
+    VkMemoryFdPropertiesKHR*                    pMemoryFdProperties)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    VkResult result = layer_data->device_dispatch_table.GetMemoryFdPropertiesKHR(device, handleType, fd, pMemoryFdProperties);
+
+    return result;
+}
+
+void DispatchGetPhysicalDeviceExternalSemaphorePropertiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo,
+    VkExternalSemaphoreProperties*              pExternalSemaphoreProperties)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    layer_data->instance_dispatch_table.GetPhysicalDeviceExternalSemaphorePropertiesKHR(physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties);
+
+}
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+VkResult DispatchImportSemaphoreWin32HandleKHR(
+    VkDevice                                    device,
+    const VkImportSemaphoreWin32HandleInfoKHR*  pImportSemaphoreWin32HandleInfo)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.ImportSemaphoreWin32HandleKHR(device, pImportSemaphoreWin32HandleInfo);
+    safe_VkImportSemaphoreWin32HandleInfoKHR var_local_pImportSemaphoreWin32HandleInfo;
+    safe_VkImportSemaphoreWin32HandleInfoKHR *local_pImportSemaphoreWin32HandleInfo = NULL;
+    {
+        if (pImportSemaphoreWin32HandleInfo) {
+            local_pImportSemaphoreWin32HandleInfo = &var_local_pImportSemaphoreWin32HandleInfo;
+            local_pImportSemaphoreWin32HandleInfo->initialize(pImportSemaphoreWin32HandleInfo);
+            if (pImportSemaphoreWin32HandleInfo->semaphore) {
+                local_pImportSemaphoreWin32HandleInfo->semaphore = layer_data->Unwrap(pImportSemaphoreWin32HandleInfo->semaphore);
+            }
+        }
+    }
+    VkResult result = layer_data->device_dispatch_table.ImportSemaphoreWin32HandleKHR(device, (const VkImportSemaphoreWin32HandleInfoKHR*)local_pImportSemaphoreWin32HandleInfo);
+
+    return result;
+}
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+VkResult DispatchGetSemaphoreWin32HandleKHR(
+    VkDevice                                    device,
+    const VkSemaphoreGetWin32HandleInfoKHR*     pGetWin32HandleInfo,
+    HANDLE*                                     pHandle)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.GetSemaphoreWin32HandleKHR(device, pGetWin32HandleInfo, pHandle);
+    safe_VkSemaphoreGetWin32HandleInfoKHR var_local_pGetWin32HandleInfo;
+    safe_VkSemaphoreGetWin32HandleInfoKHR *local_pGetWin32HandleInfo = NULL;
+    {
+        if (pGetWin32HandleInfo) {
+            local_pGetWin32HandleInfo = &var_local_pGetWin32HandleInfo;
+            local_pGetWin32HandleInfo->initialize(pGetWin32HandleInfo);
+            if (pGetWin32HandleInfo->semaphore) {
+                local_pGetWin32HandleInfo->semaphore = layer_data->Unwrap(pGetWin32HandleInfo->semaphore);
+            }
+        }
+    }
+    VkResult result = layer_data->device_dispatch_table.GetSemaphoreWin32HandleKHR(device, (const VkSemaphoreGetWin32HandleInfoKHR*)local_pGetWin32HandleInfo, pHandle);
+
+    return result;
+}
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+VkResult DispatchImportSemaphoreFdKHR(
+    VkDevice                                    device,
+    const VkImportSemaphoreFdInfoKHR*           pImportSemaphoreFdInfo)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.ImportSemaphoreFdKHR(device, pImportSemaphoreFdInfo);
+    safe_VkImportSemaphoreFdInfoKHR var_local_pImportSemaphoreFdInfo;
+    safe_VkImportSemaphoreFdInfoKHR *local_pImportSemaphoreFdInfo = NULL;
+    {
+        if (pImportSemaphoreFdInfo) {
+            local_pImportSemaphoreFdInfo = &var_local_pImportSemaphoreFdInfo;
+            local_pImportSemaphoreFdInfo->initialize(pImportSemaphoreFdInfo);
+            if (pImportSemaphoreFdInfo->semaphore) {
+                local_pImportSemaphoreFdInfo->semaphore = layer_data->Unwrap(pImportSemaphoreFdInfo->semaphore);
+            }
+        }
+    }
+    VkResult result = layer_data->device_dispatch_table.ImportSemaphoreFdKHR(device, (const VkImportSemaphoreFdInfoKHR*)local_pImportSemaphoreFdInfo);
+
+    return result;
+}
+
+VkResult DispatchGetSemaphoreFdKHR(
+    VkDevice                                    device,
+    const VkSemaphoreGetFdInfoKHR*              pGetFdInfo,
+    int*                                        pFd)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.GetSemaphoreFdKHR(device, pGetFdInfo, pFd);
+    safe_VkSemaphoreGetFdInfoKHR var_local_pGetFdInfo;
+    safe_VkSemaphoreGetFdInfoKHR *local_pGetFdInfo = NULL;
+    {
+        if (pGetFdInfo) {
+            local_pGetFdInfo = &var_local_pGetFdInfo;
+            local_pGetFdInfo->initialize(pGetFdInfo);
+            if (pGetFdInfo->semaphore) {
+                local_pGetFdInfo->semaphore = layer_data->Unwrap(pGetFdInfo->semaphore);
+            }
+        }
+    }
+    VkResult result = layer_data->device_dispatch_table.GetSemaphoreFdKHR(device, (const VkSemaphoreGetFdInfoKHR*)local_pGetFdInfo, pFd);
+
+    return result;
+}
+
+void DispatchCmdPushDescriptorSetKHR(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineBindPoint                         pipelineBindPoint,
+    VkPipelineLayout                            layout,
+    uint32_t                                    set,
+    uint32_t                                    descriptorWriteCount,
+    const VkWriteDescriptorSet*                 pDescriptorWrites)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.CmdPushDescriptorSetKHR(commandBuffer, pipelineBindPoint, layout, set, descriptorWriteCount, pDescriptorWrites);
+    safe_VkWriteDescriptorSet *local_pDescriptorWrites = NULL;
+    {
+        layout = layer_data->Unwrap(layout);
+        if (pDescriptorWrites) {
+            local_pDescriptorWrites = new safe_VkWriteDescriptorSet[descriptorWriteCount];
+            for (uint32_t index0 = 0; index0 < descriptorWriteCount; ++index0) {
+                local_pDescriptorWrites[index0].initialize(&pDescriptorWrites[index0]);
+                WrapPnextChainHandles(layer_data, local_pDescriptorWrites[index0].pNext);
+                if (pDescriptorWrites[index0].dstSet) {
+                    local_pDescriptorWrites[index0].dstSet = layer_data->Unwrap(pDescriptorWrites[index0].dstSet);
+                }
+                if (local_pDescriptorWrites[index0].pImageInfo) {
+                    for (uint32_t index1 = 0; index1 < local_pDescriptorWrites[index0].descriptorCount; ++index1) {
+                        if (pDescriptorWrites[index0].pImageInfo[index1].sampler) {
+                            local_pDescriptorWrites[index0].pImageInfo[index1].sampler = layer_data->Unwrap(pDescriptorWrites[index0].pImageInfo[index1].sampler);
+                        }
+                        if (pDescriptorWrites[index0].pImageInfo[index1].imageView) {
+                            local_pDescriptorWrites[index0].pImageInfo[index1].imageView = layer_data->Unwrap(pDescriptorWrites[index0].pImageInfo[index1].imageView);
+                        }
+                    }
+                }
+                if (local_pDescriptorWrites[index0].pBufferInfo) {
+                    for (uint32_t index1 = 0; index1 < local_pDescriptorWrites[index0].descriptorCount; ++index1) {
+                        if (pDescriptorWrites[index0].pBufferInfo[index1].buffer) {
+                            local_pDescriptorWrites[index0].pBufferInfo[index1].buffer = layer_data->Unwrap(pDescriptorWrites[index0].pBufferInfo[index1].buffer);
+                        }
+                    }
+                }
+                if (local_pDescriptorWrites[index0].pTexelBufferView) {
+                    for (uint32_t index1 = 0; index1 < local_pDescriptorWrites[index0].descriptorCount; ++index1) {
+                        local_pDescriptorWrites[index0].pTexelBufferView[index1] = layer_data->Unwrap(local_pDescriptorWrites[index0].pTexelBufferView[index1]);
+                    }
+                }
+            }
+        }
+    }
+    layer_data->device_dispatch_table.CmdPushDescriptorSetKHR(commandBuffer, pipelineBindPoint, layout, set, descriptorWriteCount, (const VkWriteDescriptorSet*)local_pDescriptorWrites);
+    if (local_pDescriptorWrites) {
+        delete[] local_pDescriptorWrites;
+    }
+}
+
+// Skip vkCmdPushDescriptorSetWithTemplateKHR dispatch, manually generated
+
+// Skip vkCreateDescriptorUpdateTemplateKHR dispatch, manually generated
+
+// Skip vkDestroyDescriptorUpdateTemplateKHR dispatch, manually generated
+
+// Skip vkUpdateDescriptorSetWithTemplateKHR dispatch, manually generated
+
+// Skip vkCreateRenderPass2KHR dispatch, manually generated
+
+void DispatchCmdBeginRenderPass2KHR(
+    VkCommandBuffer                             commandBuffer,
+    const VkRenderPassBeginInfo*                pRenderPassBegin,
+    const VkSubpassBeginInfoKHR*                pSubpassBeginInfo)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.CmdBeginRenderPass2KHR(commandBuffer, pRenderPassBegin, pSubpassBeginInfo);
+    safe_VkRenderPassBeginInfo var_local_pRenderPassBegin;
+    safe_VkRenderPassBeginInfo *local_pRenderPassBegin = NULL;
+    {
+        if (pRenderPassBegin) {
+            local_pRenderPassBegin = &var_local_pRenderPassBegin;
+            local_pRenderPassBegin->initialize(pRenderPassBegin);
+            if (pRenderPassBegin->renderPass) {
+                local_pRenderPassBegin->renderPass = layer_data->Unwrap(pRenderPassBegin->renderPass);
+            }
+            if (pRenderPassBegin->framebuffer) {
+                local_pRenderPassBegin->framebuffer = layer_data->Unwrap(pRenderPassBegin->framebuffer);
+            }
+            WrapPnextChainHandles(layer_data, local_pRenderPassBegin->pNext);
+        }
+    }
+    layer_data->device_dispatch_table.CmdBeginRenderPass2KHR(commandBuffer, (const VkRenderPassBeginInfo*)local_pRenderPassBegin, pSubpassBeginInfo);
+
+}
+
+void DispatchCmdNextSubpass2KHR(
+    VkCommandBuffer                             commandBuffer,
+    const VkSubpassBeginInfoKHR*                pSubpassBeginInfo,
+    const VkSubpassEndInfoKHR*                  pSubpassEndInfo)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    layer_data->device_dispatch_table.CmdNextSubpass2KHR(commandBuffer, pSubpassBeginInfo, pSubpassEndInfo);
+
+}
+
+void DispatchCmdEndRenderPass2KHR(
+    VkCommandBuffer                             commandBuffer,
+    const VkSubpassEndInfoKHR*                  pSubpassEndInfo)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    layer_data->device_dispatch_table.CmdEndRenderPass2KHR(commandBuffer, pSubpassEndInfo);
+
+}
+
+VkResult DispatchGetSwapchainStatusKHR(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.GetSwapchainStatusKHR(device, swapchain);
+    {
+        swapchain = layer_data->Unwrap(swapchain);
+    }
+    VkResult result = layer_data->device_dispatch_table.GetSwapchainStatusKHR(device, swapchain);
+
+    return result;
+}
+
+void DispatchGetPhysicalDeviceExternalFencePropertiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceExternalFenceInfo*    pExternalFenceInfo,
+    VkExternalFenceProperties*                  pExternalFenceProperties)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    layer_data->instance_dispatch_table.GetPhysicalDeviceExternalFencePropertiesKHR(physicalDevice, pExternalFenceInfo, pExternalFenceProperties);
+
+}
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+VkResult DispatchImportFenceWin32HandleKHR(
+    VkDevice                                    device,
+    const VkImportFenceWin32HandleInfoKHR*      pImportFenceWin32HandleInfo)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.ImportFenceWin32HandleKHR(device, pImportFenceWin32HandleInfo);
+    safe_VkImportFenceWin32HandleInfoKHR var_local_pImportFenceWin32HandleInfo;
+    safe_VkImportFenceWin32HandleInfoKHR *local_pImportFenceWin32HandleInfo = NULL;
+    {
+        if (pImportFenceWin32HandleInfo) {
+            local_pImportFenceWin32HandleInfo = &var_local_pImportFenceWin32HandleInfo;
+            local_pImportFenceWin32HandleInfo->initialize(pImportFenceWin32HandleInfo);
+            if (pImportFenceWin32HandleInfo->fence) {
+                local_pImportFenceWin32HandleInfo->fence = layer_data->Unwrap(pImportFenceWin32HandleInfo->fence);
+            }
+        }
+    }
+    VkResult result = layer_data->device_dispatch_table.ImportFenceWin32HandleKHR(device, (const VkImportFenceWin32HandleInfoKHR*)local_pImportFenceWin32HandleInfo);
+
+    return result;
+}
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+VkResult DispatchGetFenceWin32HandleKHR(
+    VkDevice                                    device,
+    const VkFenceGetWin32HandleInfoKHR*         pGetWin32HandleInfo,
+    HANDLE*                                     pHandle)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.GetFenceWin32HandleKHR(device, pGetWin32HandleInfo, pHandle);
+    safe_VkFenceGetWin32HandleInfoKHR var_local_pGetWin32HandleInfo;
+    safe_VkFenceGetWin32HandleInfoKHR *local_pGetWin32HandleInfo = NULL;
+    {
+        if (pGetWin32HandleInfo) {
+            local_pGetWin32HandleInfo = &var_local_pGetWin32HandleInfo;
+            local_pGetWin32HandleInfo->initialize(pGetWin32HandleInfo);
+            if (pGetWin32HandleInfo->fence) {
+                local_pGetWin32HandleInfo->fence = layer_data->Unwrap(pGetWin32HandleInfo->fence);
+            }
+        }
+    }
+    VkResult result = layer_data->device_dispatch_table.GetFenceWin32HandleKHR(device, (const VkFenceGetWin32HandleInfoKHR*)local_pGetWin32HandleInfo, pHandle);
+
+    return result;
+}
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+VkResult DispatchImportFenceFdKHR(
+    VkDevice                                    device,
+    const VkImportFenceFdInfoKHR*               pImportFenceFdInfo)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.ImportFenceFdKHR(device, pImportFenceFdInfo);
+    safe_VkImportFenceFdInfoKHR var_local_pImportFenceFdInfo;
+    safe_VkImportFenceFdInfoKHR *local_pImportFenceFdInfo = NULL;
+    {
+        if (pImportFenceFdInfo) {
+            local_pImportFenceFdInfo = &var_local_pImportFenceFdInfo;
+            local_pImportFenceFdInfo->initialize(pImportFenceFdInfo);
+            if (pImportFenceFdInfo->fence) {
+                local_pImportFenceFdInfo->fence = layer_data->Unwrap(pImportFenceFdInfo->fence);
+            }
+        }
+    }
+    VkResult result = layer_data->device_dispatch_table.ImportFenceFdKHR(device, (const VkImportFenceFdInfoKHR*)local_pImportFenceFdInfo);
+
+    return result;
+}
+
+VkResult DispatchGetFenceFdKHR(
+    VkDevice                                    device,
+    const VkFenceGetFdInfoKHR*                  pGetFdInfo,
+    int*                                        pFd)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.GetFenceFdKHR(device, pGetFdInfo, pFd);
+    safe_VkFenceGetFdInfoKHR var_local_pGetFdInfo;
+    safe_VkFenceGetFdInfoKHR *local_pGetFdInfo = NULL;
+    {
+        if (pGetFdInfo) {
+            local_pGetFdInfo = &var_local_pGetFdInfo;
+            local_pGetFdInfo->initialize(pGetFdInfo);
+            if (pGetFdInfo->fence) {
+                local_pGetFdInfo->fence = layer_data->Unwrap(pGetFdInfo->fence);
+            }
+        }
+    }
+    VkResult result = layer_data->device_dispatch_table.GetFenceFdKHR(device, (const VkFenceGetFdInfoKHR*)local_pGetFdInfo, pFd);
+
+    return result;
+}
+
+VkResult DispatchEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t                                    queueFamilyIndex,
+    uint32_t*                                   pCounterCount,
+    VkPerformanceCounterKHR*                    pCounters,
+    VkPerformanceCounterDescriptionKHR*         pCounterDescriptions)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    VkResult result = layer_data->instance_dispatch_table.EnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(physicalDevice, queueFamilyIndex, pCounterCount, pCounters, pCounterDescriptions);
+
+    return result;
+}
+
+void DispatchGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkQueryPoolPerformanceCreateInfoKHR*  pPerformanceQueryCreateInfo,
+    uint32_t*                                   pNumPasses)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    layer_data->instance_dispatch_table.GetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR(physicalDevice, pPerformanceQueryCreateInfo, pNumPasses);
+
+}
+
+VkResult DispatchAcquireProfilingLockKHR(
+    VkDevice                                    device,
+    const VkAcquireProfilingLockInfoKHR*        pInfo)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    VkResult result = layer_data->device_dispatch_table.AcquireProfilingLockKHR(device, pInfo);
+
+    return result;
+}
+
+void DispatchReleaseProfilingLockKHR(
+    VkDevice                                    device)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    layer_data->device_dispatch_table.ReleaseProfilingLockKHR(device);
+
+}
+
+VkResult DispatchGetPhysicalDeviceSurfaceCapabilities2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceSurfaceInfo2KHR*      pSurfaceInfo,
+    VkSurfaceCapabilities2KHR*                  pSurfaceCapabilities)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    if (!wrap_handles) return layer_data->instance_dispatch_table.GetPhysicalDeviceSurfaceCapabilities2KHR(physicalDevice, pSurfaceInfo, pSurfaceCapabilities);
+    safe_VkPhysicalDeviceSurfaceInfo2KHR var_local_pSurfaceInfo;
+    safe_VkPhysicalDeviceSurfaceInfo2KHR *local_pSurfaceInfo = NULL;
+    {
+        if (pSurfaceInfo) {
+            local_pSurfaceInfo = &var_local_pSurfaceInfo;
+            local_pSurfaceInfo->initialize(pSurfaceInfo);
+            if (pSurfaceInfo->surface) {
+                local_pSurfaceInfo->surface = layer_data->Unwrap(pSurfaceInfo->surface);
+            }
+        }
+    }
+    VkResult result = layer_data->instance_dispatch_table.GetPhysicalDeviceSurfaceCapabilities2KHR(physicalDevice, (const VkPhysicalDeviceSurfaceInfo2KHR*)local_pSurfaceInfo, pSurfaceCapabilities);
+
+    return result;
+}
+
+VkResult DispatchGetPhysicalDeviceSurfaceFormats2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceSurfaceInfo2KHR*      pSurfaceInfo,
+    uint32_t*                                   pSurfaceFormatCount,
+    VkSurfaceFormat2KHR*                        pSurfaceFormats)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    if (!wrap_handles) return layer_data->instance_dispatch_table.GetPhysicalDeviceSurfaceFormats2KHR(physicalDevice, pSurfaceInfo, pSurfaceFormatCount, pSurfaceFormats);
+    safe_VkPhysicalDeviceSurfaceInfo2KHR var_local_pSurfaceInfo;
+    safe_VkPhysicalDeviceSurfaceInfo2KHR *local_pSurfaceInfo = NULL;
+    {
+        if (pSurfaceInfo) {
+            local_pSurfaceInfo = &var_local_pSurfaceInfo;
+            local_pSurfaceInfo->initialize(pSurfaceInfo);
+            if (pSurfaceInfo->surface) {
+                local_pSurfaceInfo->surface = layer_data->Unwrap(pSurfaceInfo->surface);
+            }
+        }
+    }
+    VkResult result = layer_data->instance_dispatch_table.GetPhysicalDeviceSurfaceFormats2KHR(physicalDevice, (const VkPhysicalDeviceSurfaceInfo2KHR*)local_pSurfaceInfo, pSurfaceFormatCount, pSurfaceFormats);
+
+    return result;
+}
+
+// Skip vkGetPhysicalDeviceDisplayProperties2KHR dispatch, manually generated
+
+// Skip vkGetPhysicalDeviceDisplayPlaneProperties2KHR dispatch, manually generated
+
+// Skip vkGetDisplayModeProperties2KHR dispatch, manually generated
+
+VkResult DispatchGetDisplayPlaneCapabilities2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkDisplayPlaneInfo2KHR*               pDisplayPlaneInfo,
+    VkDisplayPlaneCapabilities2KHR*             pCapabilities)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    if (!wrap_handles) return layer_data->instance_dispatch_table.GetDisplayPlaneCapabilities2KHR(physicalDevice, pDisplayPlaneInfo, pCapabilities);
+    safe_VkDisplayPlaneInfo2KHR var_local_pDisplayPlaneInfo;
+    safe_VkDisplayPlaneInfo2KHR *local_pDisplayPlaneInfo = NULL;
+    {
+        if (pDisplayPlaneInfo) {
+            local_pDisplayPlaneInfo = &var_local_pDisplayPlaneInfo;
+            local_pDisplayPlaneInfo->initialize(pDisplayPlaneInfo);
+            if (pDisplayPlaneInfo->mode) {
+                local_pDisplayPlaneInfo->mode = layer_data->Unwrap(pDisplayPlaneInfo->mode);
+            }
+        }
+    }
+    VkResult result = layer_data->instance_dispatch_table.GetDisplayPlaneCapabilities2KHR(physicalDevice, (const VkDisplayPlaneInfo2KHR*)local_pDisplayPlaneInfo, pCapabilities);
+
+    return result;
+}
+
+void DispatchGetImageMemoryRequirements2KHR(
+    VkDevice                                    device,
+    const VkImageMemoryRequirementsInfo2*       pInfo,
+    VkMemoryRequirements2*                      pMemoryRequirements)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.GetImageMemoryRequirements2KHR(device, pInfo, pMemoryRequirements);
+    safe_VkImageMemoryRequirementsInfo2 var_local_pInfo;
+    safe_VkImageMemoryRequirementsInfo2 *local_pInfo = NULL;
+    {
+        if (pInfo) {
+            local_pInfo = &var_local_pInfo;
+            local_pInfo->initialize(pInfo);
+            if (pInfo->image) {
+                local_pInfo->image = layer_data->Unwrap(pInfo->image);
+            }
+        }
+    }
+    layer_data->device_dispatch_table.GetImageMemoryRequirements2KHR(device, (const VkImageMemoryRequirementsInfo2*)local_pInfo, pMemoryRequirements);
+
+}
+
+void DispatchGetBufferMemoryRequirements2KHR(
+    VkDevice                                    device,
+    const VkBufferMemoryRequirementsInfo2*      pInfo,
+    VkMemoryRequirements2*                      pMemoryRequirements)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.GetBufferMemoryRequirements2KHR(device, pInfo, pMemoryRequirements);
+    safe_VkBufferMemoryRequirementsInfo2 var_local_pInfo;
+    safe_VkBufferMemoryRequirementsInfo2 *local_pInfo = NULL;
+    {
+        if (pInfo) {
+            local_pInfo = &var_local_pInfo;
+            local_pInfo->initialize(pInfo);
+            if (pInfo->buffer) {
+                local_pInfo->buffer = layer_data->Unwrap(pInfo->buffer);
+            }
+        }
+    }
+    layer_data->device_dispatch_table.GetBufferMemoryRequirements2KHR(device, (const VkBufferMemoryRequirementsInfo2*)local_pInfo, pMemoryRequirements);
+
+}
+
+void DispatchGetImageSparseMemoryRequirements2KHR(
+    VkDevice                                    device,
+    const VkImageSparseMemoryRequirementsInfo2* pInfo,
+    uint32_t*                                   pSparseMemoryRequirementCount,
+    VkSparseImageMemoryRequirements2*           pSparseMemoryRequirements)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.GetImageSparseMemoryRequirements2KHR(device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements);
+    safe_VkImageSparseMemoryRequirementsInfo2 var_local_pInfo;
+    safe_VkImageSparseMemoryRequirementsInfo2 *local_pInfo = NULL;
+    {
+        if (pInfo) {
+            local_pInfo = &var_local_pInfo;
+            local_pInfo->initialize(pInfo);
+            if (pInfo->image) {
+                local_pInfo->image = layer_data->Unwrap(pInfo->image);
+            }
+        }
+    }
+    layer_data->device_dispatch_table.GetImageSparseMemoryRequirements2KHR(device, (const VkImageSparseMemoryRequirementsInfo2*)local_pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements);
+
+}
+
+VkResult DispatchCreateSamplerYcbcrConversionKHR(
+    VkDevice                                    device,
+    const VkSamplerYcbcrConversionCreateInfo*   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSamplerYcbcrConversion*                   pYcbcrConversion)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.CreateSamplerYcbcrConversionKHR(device, pCreateInfo, pAllocator, pYcbcrConversion);
+    safe_VkSamplerYcbcrConversionCreateInfo var_local_pCreateInfo;
+    safe_VkSamplerYcbcrConversionCreateInfo *local_pCreateInfo = NULL;
+    {
+        if (pCreateInfo) {
+            local_pCreateInfo = &var_local_pCreateInfo;
+            local_pCreateInfo->initialize(pCreateInfo);
+            WrapPnextChainHandles(layer_data, local_pCreateInfo->pNext);
+        }
+    }
+    VkResult result = layer_data->device_dispatch_table.CreateSamplerYcbcrConversionKHR(device, (const VkSamplerYcbcrConversionCreateInfo*)local_pCreateInfo, pAllocator, pYcbcrConversion);
+    if (VK_SUCCESS == result) {
+        *pYcbcrConversion = layer_data->WrapNew(*pYcbcrConversion);
+    }
+    return result;
+}
+
+void DispatchDestroySamplerYcbcrConversionKHR(
+    VkDevice                                    device,
+    VkSamplerYcbcrConversion                    ycbcrConversion,
+    const VkAllocationCallbacks*                pAllocator)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.DestroySamplerYcbcrConversionKHR(device, ycbcrConversion, pAllocator);
+    uint64_t ycbcrConversion_id = reinterpret_cast<uint64_t &>(ycbcrConversion);
+    auto iter = unique_id_mapping.pop(ycbcrConversion_id);
+    if (iter != unique_id_mapping.end()) {
+        ycbcrConversion = (VkSamplerYcbcrConversion)iter->second;
+    } else {
+        ycbcrConversion = (VkSamplerYcbcrConversion)0;
+    }
+    layer_data->device_dispatch_table.DestroySamplerYcbcrConversionKHR(device, ycbcrConversion, pAllocator);
+
+}
+
+VkResult DispatchBindBufferMemory2KHR(
+    VkDevice                                    device,
+    uint32_t                                    bindInfoCount,
+    const VkBindBufferMemoryInfo*               pBindInfos)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.BindBufferMemory2KHR(device, bindInfoCount, pBindInfos);
+    safe_VkBindBufferMemoryInfo *local_pBindInfos = NULL;
+    {
+        if (pBindInfos) {
+            local_pBindInfos = new safe_VkBindBufferMemoryInfo[bindInfoCount];
+            for (uint32_t index0 = 0; index0 < bindInfoCount; ++index0) {
+                local_pBindInfos[index0].initialize(&pBindInfos[index0]);
+                if (pBindInfos[index0].buffer) {
+                    local_pBindInfos[index0].buffer = layer_data->Unwrap(pBindInfos[index0].buffer);
+                }
+                if (pBindInfos[index0].memory) {
+                    local_pBindInfos[index0].memory = layer_data->Unwrap(pBindInfos[index0].memory);
+                }
+            }
+        }
+    }
+    VkResult result = layer_data->device_dispatch_table.BindBufferMemory2KHR(device, bindInfoCount, (const VkBindBufferMemoryInfo*)local_pBindInfos);
+    if (local_pBindInfos) {
+        delete[] local_pBindInfos;
+    }
+    return result;
+}
+
+VkResult DispatchBindImageMemory2KHR(
+    VkDevice                                    device,
+    uint32_t                                    bindInfoCount,
+    const VkBindImageMemoryInfo*                pBindInfos)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.BindImageMemory2KHR(device, bindInfoCount, pBindInfos);
+    safe_VkBindImageMemoryInfo *local_pBindInfos = NULL;
+    {
+        if (pBindInfos) {
+            local_pBindInfos = new safe_VkBindImageMemoryInfo[bindInfoCount];
+            for (uint32_t index0 = 0; index0 < bindInfoCount; ++index0) {
+                local_pBindInfos[index0].initialize(&pBindInfos[index0]);
+                WrapPnextChainHandles(layer_data, local_pBindInfos[index0].pNext);
+                if (pBindInfos[index0].image) {
+                    local_pBindInfos[index0].image = layer_data->Unwrap(pBindInfos[index0].image);
+                }
+                if (pBindInfos[index0].memory) {
+                    local_pBindInfos[index0].memory = layer_data->Unwrap(pBindInfos[index0].memory);
+                }
+            }
+        }
+    }
+    VkResult result = layer_data->device_dispatch_table.BindImageMemory2KHR(device, bindInfoCount, (const VkBindImageMemoryInfo*)local_pBindInfos);
+    if (local_pBindInfos) {
+        delete[] local_pBindInfos;
+    }
+    return result;
+}
+
+void DispatchGetDescriptorSetLayoutSupportKHR(
+    VkDevice                                    device,
+    const VkDescriptorSetLayoutCreateInfo*      pCreateInfo,
+    VkDescriptorSetLayoutSupport*               pSupport)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.GetDescriptorSetLayoutSupportKHR(device, pCreateInfo, pSupport);
+    safe_VkDescriptorSetLayoutCreateInfo var_local_pCreateInfo;
+    safe_VkDescriptorSetLayoutCreateInfo *local_pCreateInfo = NULL;
+    {
+        if (pCreateInfo) {
+            local_pCreateInfo = &var_local_pCreateInfo;
+            local_pCreateInfo->initialize(pCreateInfo);
+            if (local_pCreateInfo->pBindings) {
+                for (uint32_t index1 = 0; index1 < local_pCreateInfo->bindingCount; ++index1) {
+                    if (local_pCreateInfo->pBindings[index1].pImmutableSamplers) {
+                        for (uint32_t index2 = 0; index2 < local_pCreateInfo->pBindings[index1].descriptorCount; ++index2) {
+                            local_pCreateInfo->pBindings[index1].pImmutableSamplers[index2] = layer_data->Unwrap(local_pCreateInfo->pBindings[index1].pImmutableSamplers[index2]);
+                        }
+                    }
+                }
+            }
+        }
+    }
+    layer_data->device_dispatch_table.GetDescriptorSetLayoutSupportKHR(device, (const VkDescriptorSetLayoutCreateInfo*)local_pCreateInfo, pSupport);
+
+}
+
+void DispatchCmdDrawIndirectCountKHR(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkBuffer                                    countBuffer,
+    VkDeviceSize                                countBufferOffset,
+    uint32_t                                    maxDrawCount,
+    uint32_t                                    stride)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.CmdDrawIndirectCountKHR(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
+    {
+        buffer = layer_data->Unwrap(buffer);
+        countBuffer = layer_data->Unwrap(countBuffer);
+    }
+    layer_data->device_dispatch_table.CmdDrawIndirectCountKHR(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
+
+}
+
+void DispatchCmdDrawIndexedIndirectCountKHR(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkBuffer                                    countBuffer,
+    VkDeviceSize                                countBufferOffset,
+    uint32_t                                    maxDrawCount,
+    uint32_t                                    stride)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.CmdDrawIndexedIndirectCountKHR(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
+    {
+        buffer = layer_data->Unwrap(buffer);
+        countBuffer = layer_data->Unwrap(countBuffer);
+    }
+    layer_data->device_dispatch_table.CmdDrawIndexedIndirectCountKHR(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
+
+}
+
+VkResult DispatchGetSemaphoreCounterValueKHR(
+    VkDevice                                    device,
+    VkSemaphore                                 semaphore,
+    uint64_t*                                   pValue)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.GetSemaphoreCounterValueKHR(device, semaphore, pValue);
+    {
+        semaphore = layer_data->Unwrap(semaphore);
+    }
+    VkResult result = layer_data->device_dispatch_table.GetSemaphoreCounterValueKHR(device, semaphore, pValue);
+
+    return result;
+}
+
+VkResult DispatchWaitSemaphoresKHR(
+    VkDevice                                    device,
+    const VkSemaphoreWaitInfoKHR*               pWaitInfo,
+    uint64_t                                    timeout)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.WaitSemaphoresKHR(device, pWaitInfo, timeout);
+    safe_VkSemaphoreWaitInfoKHR var_local_pWaitInfo;
+    safe_VkSemaphoreWaitInfoKHR *local_pWaitInfo = NULL;
+    {
+        if (pWaitInfo) {
+            local_pWaitInfo = &var_local_pWaitInfo;
+            local_pWaitInfo->initialize(pWaitInfo);
+            if (local_pWaitInfo->pSemaphores) {
+                for (uint32_t index1 = 0; index1 < local_pWaitInfo->semaphoreCount; ++index1) {
+                    local_pWaitInfo->pSemaphores[index1] = layer_data->Unwrap(local_pWaitInfo->pSemaphores[index1]);
+                }
+            }
+        }
+    }
+    VkResult result = layer_data->device_dispatch_table.WaitSemaphoresKHR(device, (const VkSemaphoreWaitInfoKHR*)local_pWaitInfo, timeout);
+
+    return result;
+}
+
+VkResult DispatchSignalSemaphoreKHR(
+    VkDevice                                    device,
+    const VkSemaphoreSignalInfoKHR*             pSignalInfo)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.SignalSemaphoreKHR(device, pSignalInfo);
+    safe_VkSemaphoreSignalInfoKHR var_local_pSignalInfo;
+    safe_VkSemaphoreSignalInfoKHR *local_pSignalInfo = NULL;
+    {
+        if (pSignalInfo) {
+            local_pSignalInfo = &var_local_pSignalInfo;
+            local_pSignalInfo->initialize(pSignalInfo);
+            if (pSignalInfo->semaphore) {
+                local_pSignalInfo->semaphore = layer_data->Unwrap(pSignalInfo->semaphore);
+            }
+        }
+    }
+    VkResult result = layer_data->device_dispatch_table.SignalSemaphoreKHR(device, (const VkSemaphoreSignalInfoKHR*)local_pSignalInfo);
+
+    return result;
+}
+
+VkDeviceAddress DispatchGetBufferDeviceAddressKHR(
+    VkDevice                                    device,
+    const VkBufferDeviceAddressInfoKHR*         pInfo)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.GetBufferDeviceAddressKHR(device, pInfo);
+    safe_VkBufferDeviceAddressInfoKHR var_local_pInfo;
+    safe_VkBufferDeviceAddressInfoKHR *local_pInfo = NULL;
+    {
+        if (pInfo) {
+            local_pInfo = &var_local_pInfo;
+            local_pInfo->initialize(pInfo);
+            if (pInfo->buffer) {
+                local_pInfo->buffer = layer_data->Unwrap(pInfo->buffer);
+            }
+        }
+    }
+    VkDeviceAddress result = layer_data->device_dispatch_table.GetBufferDeviceAddressKHR(device, (const VkBufferDeviceAddressInfoKHR*)local_pInfo);
+
+    return result;
+}
+
+uint64_t DispatchGetBufferOpaqueCaptureAddressKHR(
+    VkDevice                                    device,
+    const VkBufferDeviceAddressInfoKHR*         pInfo)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.GetBufferOpaqueCaptureAddressKHR(device, pInfo);
+    safe_VkBufferDeviceAddressInfoKHR var_local_pInfo;
+    safe_VkBufferDeviceAddressInfoKHR *local_pInfo = NULL;
+    {
+        if (pInfo) {
+            local_pInfo = &var_local_pInfo;
+            local_pInfo->initialize(pInfo);
+            if (pInfo->buffer) {
+                local_pInfo->buffer = layer_data->Unwrap(pInfo->buffer);
+            }
+        }
+    }
+    uint64_t result = layer_data->device_dispatch_table.GetBufferOpaqueCaptureAddressKHR(device, (const VkBufferDeviceAddressInfoKHR*)local_pInfo);
+
+    return result;
+}
+
+uint64_t DispatchGetDeviceMemoryOpaqueCaptureAddressKHR(
+    VkDevice                                    device,
+    const VkDeviceMemoryOpaqueCaptureAddressInfoKHR* pInfo)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.GetDeviceMemoryOpaqueCaptureAddressKHR(device, pInfo);
+    safe_VkDeviceMemoryOpaqueCaptureAddressInfoKHR var_local_pInfo;
+    safe_VkDeviceMemoryOpaqueCaptureAddressInfoKHR *local_pInfo = NULL;
+    {
+        if (pInfo) {
+            local_pInfo = &var_local_pInfo;
+            local_pInfo->initialize(pInfo);
+            if (pInfo->memory) {
+                local_pInfo->memory = layer_data->Unwrap(pInfo->memory);
+            }
+        }
+    }
+    uint64_t result = layer_data->device_dispatch_table.GetDeviceMemoryOpaqueCaptureAddressKHR(device, (const VkDeviceMemoryOpaqueCaptureAddressInfoKHR*)local_pInfo);
+
+    return result;
+}
+
+VkResult DispatchGetPipelineExecutablePropertiesKHR(
+    VkDevice                                    device,
+    const VkPipelineInfoKHR*                    pPipelineInfo,
+    uint32_t*                                   pExecutableCount,
+    VkPipelineExecutablePropertiesKHR*          pProperties)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.GetPipelineExecutablePropertiesKHR(device, pPipelineInfo, pExecutableCount, pProperties);
+    safe_VkPipelineInfoKHR var_local_pPipelineInfo;
+    safe_VkPipelineInfoKHR *local_pPipelineInfo = NULL;
+    {
+        if (pPipelineInfo) {
+            local_pPipelineInfo = &var_local_pPipelineInfo;
+            local_pPipelineInfo->initialize(pPipelineInfo);
+            if (pPipelineInfo->pipeline) {
+                local_pPipelineInfo->pipeline = layer_data->Unwrap(pPipelineInfo->pipeline);
+            }
+        }
+    }
+    VkResult result = layer_data->device_dispatch_table.GetPipelineExecutablePropertiesKHR(device, (const VkPipelineInfoKHR*)local_pPipelineInfo, pExecutableCount, pProperties);
+
+    return result;
+}
+
+VkResult DispatchGetPipelineExecutableStatisticsKHR(
+    VkDevice                                    device,
+    const VkPipelineExecutableInfoKHR*          pExecutableInfo,
+    uint32_t*                                   pStatisticCount,
+    VkPipelineExecutableStatisticKHR*           pStatistics)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.GetPipelineExecutableStatisticsKHR(device, pExecutableInfo, pStatisticCount, pStatistics);
+    safe_VkPipelineExecutableInfoKHR var_local_pExecutableInfo;
+    safe_VkPipelineExecutableInfoKHR *local_pExecutableInfo = NULL;
+    {
+        if (pExecutableInfo) {
+            local_pExecutableInfo = &var_local_pExecutableInfo;
+            local_pExecutableInfo->initialize(pExecutableInfo);
+            if (pExecutableInfo->pipeline) {
+                local_pExecutableInfo->pipeline = layer_data->Unwrap(pExecutableInfo->pipeline);
+            }
+        }
+    }
+    VkResult result = layer_data->device_dispatch_table.GetPipelineExecutableStatisticsKHR(device, (const VkPipelineExecutableInfoKHR*)local_pExecutableInfo, pStatisticCount, pStatistics);
+
+    return result;
+}
+
+VkResult DispatchGetPipelineExecutableInternalRepresentationsKHR(
+    VkDevice                                    device,
+    const VkPipelineExecutableInfoKHR*          pExecutableInfo,
+    uint32_t*                                   pInternalRepresentationCount,
+    VkPipelineExecutableInternalRepresentationKHR* pInternalRepresentations)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.GetPipelineExecutableInternalRepresentationsKHR(device, pExecutableInfo, pInternalRepresentationCount, pInternalRepresentations);
+    safe_VkPipelineExecutableInfoKHR var_local_pExecutableInfo;
+    safe_VkPipelineExecutableInfoKHR *local_pExecutableInfo = NULL;
+    {
+        if (pExecutableInfo) {
+            local_pExecutableInfo = &var_local_pExecutableInfo;
+            local_pExecutableInfo->initialize(pExecutableInfo);
+            if (pExecutableInfo->pipeline) {
+                local_pExecutableInfo->pipeline = layer_data->Unwrap(pExecutableInfo->pipeline);
+            }
+        }
+    }
+    VkResult result = layer_data->device_dispatch_table.GetPipelineExecutableInternalRepresentationsKHR(device, (const VkPipelineExecutableInfoKHR*)local_pExecutableInfo, pInternalRepresentationCount, pInternalRepresentations);
+
+    return result;
+}
+
+VkResult DispatchCreateDebugReportCallbackEXT(
+    VkInstance                                  instance,
+    const VkDebugReportCallbackCreateInfoEXT*   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDebugReportCallbackEXT*                   pCallback)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(instance), layer_data_map);
+    if (!wrap_handles) return layer_data->instance_dispatch_table.CreateDebugReportCallbackEXT(instance, pCreateInfo, pAllocator, pCallback);
+    VkResult result = layer_data->instance_dispatch_table.CreateDebugReportCallbackEXT(instance, pCreateInfo, pAllocator, pCallback);
+    if (VK_SUCCESS == result) {
+        *pCallback = layer_data->WrapNew(*pCallback);
+    }
+    return result;
+}
+
+void DispatchDestroyDebugReportCallbackEXT(
+    VkInstance                                  instance,
+    VkDebugReportCallbackEXT                    callback,
+    const VkAllocationCallbacks*                pAllocator)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(instance), layer_data_map);
+    if (!wrap_handles) return layer_data->instance_dispatch_table.DestroyDebugReportCallbackEXT(instance, callback, pAllocator);
+    uint64_t callback_id = reinterpret_cast<uint64_t &>(callback);
+    auto iter = unique_id_mapping.pop(callback_id);
+    if (iter != unique_id_mapping.end()) {
+        callback = (VkDebugReportCallbackEXT)iter->second;
+    } else {
+        callback = (VkDebugReportCallbackEXT)0;
+    }
+    layer_data->instance_dispatch_table.DestroyDebugReportCallbackEXT(instance, callback, pAllocator);
+
+}
+
+void DispatchDebugReportMessageEXT(
+    VkInstance                                  instance,
+    VkDebugReportFlagsEXT                       flags,
+    VkDebugReportObjectTypeEXT                  objectType,
+    uint64_t                                    object,
+    size_t                                      location,
+    int32_t                                     messageCode,
+    const char*                                 pLayerPrefix,
+    const char*                                 pMessage)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(instance), layer_data_map);
+    layer_data->instance_dispatch_table.DebugReportMessageEXT(instance, flags, objectType, object, location, messageCode, pLayerPrefix, pMessage);
+
+}
+
+// Skip vkDebugMarkerSetObjectTagEXT dispatch, manually generated
+
+// Skip vkDebugMarkerSetObjectNameEXT dispatch, manually generated
+
+void DispatchCmdDebugMarkerBeginEXT(
+    VkCommandBuffer                             commandBuffer,
+    const VkDebugMarkerMarkerInfoEXT*           pMarkerInfo)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    layer_data->device_dispatch_table.CmdDebugMarkerBeginEXT(commandBuffer, pMarkerInfo);
+
+}
+
+void DispatchCmdDebugMarkerEndEXT(
+    VkCommandBuffer                             commandBuffer)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    layer_data->device_dispatch_table.CmdDebugMarkerEndEXT(commandBuffer);
+
+}
+
+void DispatchCmdDebugMarkerInsertEXT(
+    VkCommandBuffer                             commandBuffer,
+    const VkDebugMarkerMarkerInfoEXT*           pMarkerInfo)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    layer_data->device_dispatch_table.CmdDebugMarkerInsertEXT(commandBuffer, pMarkerInfo);
+
+}
+
+void DispatchCmdBindTransformFeedbackBuffersEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstBinding,
+    uint32_t                                    bindingCount,
+    const VkBuffer*                             pBuffers,
+    const VkDeviceSize*                         pOffsets,
+    const VkDeviceSize*                         pSizes)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.CmdBindTransformFeedbackBuffersEXT(commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets, pSizes);
+    VkBuffer var_local_pBuffers[DISPATCH_MAX_STACK_ALLOCATIONS];
+    VkBuffer *local_pBuffers = NULL;
+    {
+        if (pBuffers) {
+            local_pBuffers = bindingCount > DISPATCH_MAX_STACK_ALLOCATIONS ? new VkBuffer[bindingCount] : var_local_pBuffers;
+            for (uint32_t index0 = 0; index0 < bindingCount; ++index0) {
+                local_pBuffers[index0] = layer_data->Unwrap(pBuffers[index0]);
+            }
+        }
+    }
+    layer_data->device_dispatch_table.CmdBindTransformFeedbackBuffersEXT(commandBuffer, firstBinding, bindingCount, (const VkBuffer*)local_pBuffers, pOffsets, pSizes);
+    if (local_pBuffers != var_local_pBuffers)
+        delete[] local_pBuffers;
+}
+
+void DispatchCmdBeginTransformFeedbackEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstCounterBuffer,
+    uint32_t                                    counterBufferCount,
+    const VkBuffer*                             pCounterBuffers,
+    const VkDeviceSize*                         pCounterBufferOffsets)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.CmdBeginTransformFeedbackEXT(commandBuffer, firstCounterBuffer, counterBufferCount, pCounterBuffers, pCounterBufferOffsets);
+    VkBuffer var_local_pCounterBuffers[DISPATCH_MAX_STACK_ALLOCATIONS];
+    VkBuffer *local_pCounterBuffers = NULL;
+    {
+        if (pCounterBuffers) {
+            local_pCounterBuffers = counterBufferCount > DISPATCH_MAX_STACK_ALLOCATIONS ? new VkBuffer[counterBufferCount] : var_local_pCounterBuffers;
+            for (uint32_t index0 = 0; index0 < counterBufferCount; ++index0) {
+                local_pCounterBuffers[index0] = layer_data->Unwrap(pCounterBuffers[index0]);
+            }
+        }
+    }
+    layer_data->device_dispatch_table.CmdBeginTransformFeedbackEXT(commandBuffer, firstCounterBuffer, counterBufferCount, (const VkBuffer*)local_pCounterBuffers, pCounterBufferOffsets);
+    if (local_pCounterBuffers != var_local_pCounterBuffers)
+        delete[] local_pCounterBuffers;
+}
+
+void DispatchCmdEndTransformFeedbackEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstCounterBuffer,
+    uint32_t                                    counterBufferCount,
+    const VkBuffer*                             pCounterBuffers,
+    const VkDeviceSize*                         pCounterBufferOffsets)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.CmdEndTransformFeedbackEXT(commandBuffer, firstCounterBuffer, counterBufferCount, pCounterBuffers, pCounterBufferOffsets);
+    VkBuffer var_local_pCounterBuffers[DISPATCH_MAX_STACK_ALLOCATIONS];
+    VkBuffer *local_pCounterBuffers = NULL;
+    {
+        if (pCounterBuffers) {
+            local_pCounterBuffers = counterBufferCount > DISPATCH_MAX_STACK_ALLOCATIONS ? new VkBuffer[counterBufferCount] : var_local_pCounterBuffers;
+            for (uint32_t index0 = 0; index0 < counterBufferCount; ++index0) {
+                local_pCounterBuffers[index0] = layer_data->Unwrap(pCounterBuffers[index0]);
+            }
+        }
+    }
+    layer_data->device_dispatch_table.CmdEndTransformFeedbackEXT(commandBuffer, firstCounterBuffer, counterBufferCount, (const VkBuffer*)local_pCounterBuffers, pCounterBufferOffsets);
+    if (local_pCounterBuffers != var_local_pCounterBuffers)
+        delete[] local_pCounterBuffers;
+}
+
+void DispatchCmdBeginQueryIndexedEXT(
+    VkCommandBuffer                             commandBuffer,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    query,
+    VkQueryControlFlags                         flags,
+    uint32_t                                    index)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.CmdBeginQueryIndexedEXT(commandBuffer, queryPool, query, flags, index);
+    {
+        queryPool = layer_data->Unwrap(queryPool);
+    }
+    layer_data->device_dispatch_table.CmdBeginQueryIndexedEXT(commandBuffer, queryPool, query, flags, index);
+
+}
+
+void DispatchCmdEndQueryIndexedEXT(
+    VkCommandBuffer                             commandBuffer,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    query,
+    uint32_t                                    index)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.CmdEndQueryIndexedEXT(commandBuffer, queryPool, query, index);
+    {
+        queryPool = layer_data->Unwrap(queryPool);
+    }
+    layer_data->device_dispatch_table.CmdEndQueryIndexedEXT(commandBuffer, queryPool, query, index);
+
+}
+
+void DispatchCmdDrawIndirectByteCountEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    instanceCount,
+    uint32_t                                    firstInstance,
+    VkBuffer                                    counterBuffer,
+    VkDeviceSize                                counterBufferOffset,
+    uint32_t                                    counterOffset,
+    uint32_t                                    vertexStride)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.CmdDrawIndirectByteCountEXT(commandBuffer, instanceCount, firstInstance, counterBuffer, counterBufferOffset, counterOffset, vertexStride);
+    {
+        counterBuffer = layer_data->Unwrap(counterBuffer);
+    }
+    layer_data->device_dispatch_table.CmdDrawIndirectByteCountEXT(commandBuffer, instanceCount, firstInstance, counterBuffer, counterBufferOffset, counterOffset, vertexStride);
+
+}
+
+uint32_t DispatchGetImageViewHandleNVX(
+    VkDevice                                    device,
+    const VkImageViewHandleInfoNVX*             pInfo)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.GetImageViewHandleNVX(device, pInfo);
+    safe_VkImageViewHandleInfoNVX var_local_pInfo;
+    safe_VkImageViewHandleInfoNVX *local_pInfo = NULL;
+    {
+        if (pInfo) {
+            local_pInfo = &var_local_pInfo;
+            local_pInfo->initialize(pInfo);
+            if (pInfo->imageView) {
+                local_pInfo->imageView = layer_data->Unwrap(pInfo->imageView);
+            }
+            if (pInfo->sampler) {
+                local_pInfo->sampler = layer_data->Unwrap(pInfo->sampler);
+            }
+        }
+    }
+    uint32_t result = layer_data->device_dispatch_table.GetImageViewHandleNVX(device, (const VkImageViewHandleInfoNVX*)local_pInfo);
+
+    return result;
+}
+
+void DispatchCmdDrawIndirectCountAMD(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkBuffer                                    countBuffer,
+    VkDeviceSize                                countBufferOffset,
+    uint32_t                                    maxDrawCount,
+    uint32_t                                    stride)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.CmdDrawIndirectCountAMD(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
+    {
+        buffer = layer_data->Unwrap(buffer);
+        countBuffer = layer_data->Unwrap(countBuffer);
+    }
+    layer_data->device_dispatch_table.CmdDrawIndirectCountAMD(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
+
+}
+
+void DispatchCmdDrawIndexedIndirectCountAMD(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkBuffer                                    countBuffer,
+    VkDeviceSize                                countBufferOffset,
+    uint32_t                                    maxDrawCount,
+    uint32_t                                    stride)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.CmdDrawIndexedIndirectCountAMD(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
+    {
+        buffer = layer_data->Unwrap(buffer);
+        countBuffer = layer_data->Unwrap(countBuffer);
+    }
+    layer_data->device_dispatch_table.CmdDrawIndexedIndirectCountAMD(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
+
+}
+
+VkResult DispatchGetShaderInfoAMD(
+    VkDevice                                    device,
+    VkPipeline                                  pipeline,
+    VkShaderStageFlagBits                       shaderStage,
+    VkShaderInfoTypeAMD                         infoType,
+    size_t*                                     pInfoSize,
+    void*                                       pInfo)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.GetShaderInfoAMD(device, pipeline, shaderStage, infoType, pInfoSize, pInfo);
+    {
+        pipeline = layer_data->Unwrap(pipeline);
+    }
+    VkResult result = layer_data->device_dispatch_table.GetShaderInfoAMD(device, pipeline, shaderStage, infoType, pInfoSize, pInfo);
+
+    return result;
+}
+
+#ifdef VK_USE_PLATFORM_GGP
+
+VkResult DispatchCreateStreamDescriptorSurfaceGGP(
+    VkInstance                                  instance,
+    const VkStreamDescriptorSurfaceCreateInfoGGP* pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(instance), layer_data_map);
+    if (!wrap_handles) return layer_data->instance_dispatch_table.CreateStreamDescriptorSurfaceGGP(instance, pCreateInfo, pAllocator, pSurface);
+    VkResult result = layer_data->instance_dispatch_table.CreateStreamDescriptorSurfaceGGP(instance, pCreateInfo, pAllocator, pSurface);
+    if (VK_SUCCESS == result) {
+        *pSurface = layer_data->WrapNew(*pSurface);
+    }
+    return result;
+}
+#endif // VK_USE_PLATFORM_GGP
+
+VkResult DispatchGetPhysicalDeviceExternalImageFormatPropertiesNV(
+    VkPhysicalDevice                            physicalDevice,
+    VkFormat                                    format,
+    VkImageType                                 type,
+    VkImageTiling                               tiling,
+    VkImageUsageFlags                           usage,
+    VkImageCreateFlags                          flags,
+    VkExternalMemoryHandleTypeFlagsNV           externalHandleType,
+    VkExternalImageFormatPropertiesNV*          pExternalImageFormatProperties)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    VkResult result = layer_data->instance_dispatch_table.GetPhysicalDeviceExternalImageFormatPropertiesNV(physicalDevice, format, type, tiling, usage, flags, externalHandleType, pExternalImageFormatProperties);
+
+    return result;
+}
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+VkResult DispatchGetMemoryWin32HandleNV(
+    VkDevice                                    device,
+    VkDeviceMemory                              memory,
+    VkExternalMemoryHandleTypeFlagsNV           handleType,
+    HANDLE*                                     pHandle)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.GetMemoryWin32HandleNV(device, memory, handleType, pHandle);
+    {
+        memory = layer_data->Unwrap(memory);
+    }
+    VkResult result = layer_data->device_dispatch_table.GetMemoryWin32HandleNV(device, memory, handleType, pHandle);
+
+    return result;
+}
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+#ifdef VK_USE_PLATFORM_VI_NN
+
+VkResult DispatchCreateViSurfaceNN(
+    VkInstance                                  instance,
+    const VkViSurfaceCreateInfoNN*              pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(instance), layer_data_map);
+    if (!wrap_handles) return layer_data->instance_dispatch_table.CreateViSurfaceNN(instance, pCreateInfo, pAllocator, pSurface);
+    VkResult result = layer_data->instance_dispatch_table.CreateViSurfaceNN(instance, pCreateInfo, pAllocator, pSurface);
+    if (VK_SUCCESS == result) {
+        *pSurface = layer_data->WrapNew(*pSurface);
+    }
+    return result;
+}
+#endif // VK_USE_PLATFORM_VI_NN
+
+void DispatchCmdBeginConditionalRenderingEXT(
+    VkCommandBuffer                             commandBuffer,
+    const VkConditionalRenderingBeginInfoEXT*   pConditionalRenderingBegin)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.CmdBeginConditionalRenderingEXT(commandBuffer, pConditionalRenderingBegin);
+    safe_VkConditionalRenderingBeginInfoEXT var_local_pConditionalRenderingBegin;
+    safe_VkConditionalRenderingBeginInfoEXT *local_pConditionalRenderingBegin = NULL;
+    {
+        if (pConditionalRenderingBegin) {
+            local_pConditionalRenderingBegin = &var_local_pConditionalRenderingBegin;
+            local_pConditionalRenderingBegin->initialize(pConditionalRenderingBegin);
+            if (pConditionalRenderingBegin->buffer) {
+                local_pConditionalRenderingBegin->buffer = layer_data->Unwrap(pConditionalRenderingBegin->buffer);
+            }
+        }
+    }
+    layer_data->device_dispatch_table.CmdBeginConditionalRenderingEXT(commandBuffer, (const VkConditionalRenderingBeginInfoEXT*)local_pConditionalRenderingBegin);
+
+}
+
+void DispatchCmdEndConditionalRenderingEXT(
+    VkCommandBuffer                             commandBuffer)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    layer_data->device_dispatch_table.CmdEndConditionalRenderingEXT(commandBuffer);
+
+}
+
+void DispatchCmdProcessCommandsNVX(
+    VkCommandBuffer                             commandBuffer,
+    const VkCmdProcessCommandsInfoNVX*          pProcessCommandsInfo)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.CmdProcessCommandsNVX(commandBuffer, pProcessCommandsInfo);
+    safe_VkCmdProcessCommandsInfoNVX var_local_pProcessCommandsInfo;
+    safe_VkCmdProcessCommandsInfoNVX *local_pProcessCommandsInfo = NULL;
+    {
+        if (pProcessCommandsInfo) {
+            local_pProcessCommandsInfo = &var_local_pProcessCommandsInfo;
+            local_pProcessCommandsInfo->initialize(pProcessCommandsInfo);
+            if (pProcessCommandsInfo->objectTable) {
+                local_pProcessCommandsInfo->objectTable = layer_data->Unwrap(pProcessCommandsInfo->objectTable);
+            }
+            if (pProcessCommandsInfo->indirectCommandsLayout) {
+                local_pProcessCommandsInfo->indirectCommandsLayout = layer_data->Unwrap(pProcessCommandsInfo->indirectCommandsLayout);
+            }
+            if (local_pProcessCommandsInfo->pIndirectCommandsTokens) {
+                for (uint32_t index1 = 0; index1 < local_pProcessCommandsInfo->indirectCommandsTokenCount; ++index1) {
+                    if (pProcessCommandsInfo->pIndirectCommandsTokens[index1].buffer) {
+                        local_pProcessCommandsInfo->pIndirectCommandsTokens[index1].buffer = layer_data->Unwrap(pProcessCommandsInfo->pIndirectCommandsTokens[index1].buffer);
+                    }
+                }
+            }
+            if (pProcessCommandsInfo->sequencesCountBuffer) {
+                local_pProcessCommandsInfo->sequencesCountBuffer = layer_data->Unwrap(pProcessCommandsInfo->sequencesCountBuffer);
+            }
+            if (pProcessCommandsInfo->sequencesIndexBuffer) {
+                local_pProcessCommandsInfo->sequencesIndexBuffer = layer_data->Unwrap(pProcessCommandsInfo->sequencesIndexBuffer);
+            }
+        }
+    }
+    layer_data->device_dispatch_table.CmdProcessCommandsNVX(commandBuffer, (const VkCmdProcessCommandsInfoNVX*)local_pProcessCommandsInfo);
+
+}
+
+void DispatchCmdReserveSpaceForCommandsNVX(
+    VkCommandBuffer                             commandBuffer,
+    const VkCmdReserveSpaceForCommandsInfoNVX*  pReserveSpaceInfo)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.CmdReserveSpaceForCommandsNVX(commandBuffer, pReserveSpaceInfo);
+    safe_VkCmdReserveSpaceForCommandsInfoNVX var_local_pReserveSpaceInfo;
+    safe_VkCmdReserveSpaceForCommandsInfoNVX *local_pReserveSpaceInfo = NULL;
+    {
+        if (pReserveSpaceInfo) {
+            local_pReserveSpaceInfo = &var_local_pReserveSpaceInfo;
+            local_pReserveSpaceInfo->initialize(pReserveSpaceInfo);
+            if (pReserveSpaceInfo->objectTable) {
+                local_pReserveSpaceInfo->objectTable = layer_data->Unwrap(pReserveSpaceInfo->objectTable);
+            }
+            if (pReserveSpaceInfo->indirectCommandsLayout) {
+                local_pReserveSpaceInfo->indirectCommandsLayout = layer_data->Unwrap(pReserveSpaceInfo->indirectCommandsLayout);
+            }
+        }
+    }
+    layer_data->device_dispatch_table.CmdReserveSpaceForCommandsNVX(commandBuffer, (const VkCmdReserveSpaceForCommandsInfoNVX*)local_pReserveSpaceInfo);
+
+}
+
+VkResult DispatchCreateIndirectCommandsLayoutNVX(
+    VkDevice                                    device,
+    const VkIndirectCommandsLayoutCreateInfoNVX* pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkIndirectCommandsLayoutNVX*                pIndirectCommandsLayout)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.CreateIndirectCommandsLayoutNVX(device, pCreateInfo, pAllocator, pIndirectCommandsLayout);
+    VkResult result = layer_data->device_dispatch_table.CreateIndirectCommandsLayoutNVX(device, pCreateInfo, pAllocator, pIndirectCommandsLayout);
+    if (VK_SUCCESS == result) {
+        *pIndirectCommandsLayout = layer_data->WrapNew(*pIndirectCommandsLayout);
+    }
+    return result;
+}
+
+void DispatchDestroyIndirectCommandsLayoutNVX(
+    VkDevice                                    device,
+    VkIndirectCommandsLayoutNVX                 indirectCommandsLayout,
+    const VkAllocationCallbacks*                pAllocator)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.DestroyIndirectCommandsLayoutNVX(device, indirectCommandsLayout, pAllocator);
+    uint64_t indirectCommandsLayout_id = reinterpret_cast<uint64_t &>(indirectCommandsLayout);
+    auto iter = unique_id_mapping.pop(indirectCommandsLayout_id);
+    if (iter != unique_id_mapping.end()) {
+        indirectCommandsLayout = (VkIndirectCommandsLayoutNVX)iter->second;
+    } else {
+        indirectCommandsLayout = (VkIndirectCommandsLayoutNVX)0;
+    }
+    layer_data->device_dispatch_table.DestroyIndirectCommandsLayoutNVX(device, indirectCommandsLayout, pAllocator);
+
+}
+
+VkResult DispatchCreateObjectTableNVX(
+    VkDevice                                    device,
+    const VkObjectTableCreateInfoNVX*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkObjectTableNVX*                           pObjectTable)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.CreateObjectTableNVX(device, pCreateInfo, pAllocator, pObjectTable);
+    VkResult result = layer_data->device_dispatch_table.CreateObjectTableNVX(device, pCreateInfo, pAllocator, pObjectTable);
+    if (VK_SUCCESS == result) {
+        *pObjectTable = layer_data->WrapNew(*pObjectTable);
+    }
+    return result;
+}
+
+void DispatchDestroyObjectTableNVX(
+    VkDevice                                    device,
+    VkObjectTableNVX                            objectTable,
+    const VkAllocationCallbacks*                pAllocator)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.DestroyObjectTableNVX(device, objectTable, pAllocator);
+    uint64_t objectTable_id = reinterpret_cast<uint64_t &>(objectTable);
+    auto iter = unique_id_mapping.pop(objectTable_id);
+    if (iter != unique_id_mapping.end()) {
+        objectTable = (VkObjectTableNVX)iter->second;
+    } else {
+        objectTable = (VkObjectTableNVX)0;
+    }
+    layer_data->device_dispatch_table.DestroyObjectTableNVX(device, objectTable, pAllocator);
+
+}
+
+VkResult DispatchRegisterObjectsNVX(
+    VkDevice                                    device,
+    VkObjectTableNVX                            objectTable,
+    uint32_t                                    objectCount,
+    const VkObjectTableEntryNVX* const*         ppObjectTableEntries,
+    const uint32_t*                             pObjectIndices)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.RegisterObjectsNVX(device, objectTable, objectCount, ppObjectTableEntries, pObjectIndices);
+    {
+        objectTable = layer_data->Unwrap(objectTable);
+    }
+    VkResult result = layer_data->device_dispatch_table.RegisterObjectsNVX(device, objectTable, objectCount, ppObjectTableEntries, pObjectIndices);
+
+    return result;
+}
+
+VkResult DispatchUnregisterObjectsNVX(
+    VkDevice                                    device,
+    VkObjectTableNVX                            objectTable,
+    uint32_t                                    objectCount,
+    const VkObjectEntryTypeNVX*                 pObjectEntryTypes,
+    const uint32_t*                             pObjectIndices)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.UnregisterObjectsNVX(device, objectTable, objectCount, pObjectEntryTypes, pObjectIndices);
+    {
+        objectTable = layer_data->Unwrap(objectTable);
+    }
+    VkResult result = layer_data->device_dispatch_table.UnregisterObjectsNVX(device, objectTable, objectCount, pObjectEntryTypes, pObjectIndices);
+
+    return result;
+}
+
+void DispatchGetPhysicalDeviceGeneratedCommandsPropertiesNVX(
+    VkPhysicalDevice                            physicalDevice,
+    VkDeviceGeneratedCommandsFeaturesNVX*       pFeatures,
+    VkDeviceGeneratedCommandsLimitsNVX*         pLimits)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    layer_data->instance_dispatch_table.GetPhysicalDeviceGeneratedCommandsPropertiesNVX(physicalDevice, pFeatures, pLimits);
+
+}
+
+void DispatchCmdSetViewportWScalingNV(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstViewport,
+    uint32_t                                    viewportCount,
+    const VkViewportWScalingNV*                 pViewportWScalings)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    layer_data->device_dispatch_table.CmdSetViewportWScalingNV(commandBuffer, firstViewport, viewportCount, pViewportWScalings);
+
+}
+
+VkResult DispatchReleaseDisplayEXT(
+    VkPhysicalDevice                            physicalDevice,
+    VkDisplayKHR                                display)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    if (!wrap_handles) return layer_data->instance_dispatch_table.ReleaseDisplayEXT(physicalDevice, display);
+    {
+        display = layer_data->Unwrap(display);
+    }
+    VkResult result = layer_data->instance_dispatch_table.ReleaseDisplayEXT(physicalDevice, display);
+
+    return result;
+}
+
+#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
+
+VkResult DispatchAcquireXlibDisplayEXT(
+    VkPhysicalDevice                            physicalDevice,
+    Display*                                    dpy,
+    VkDisplayKHR                                display)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    if (!wrap_handles) return layer_data->instance_dispatch_table.AcquireXlibDisplayEXT(physicalDevice, dpy, display);
+    {
+        display = layer_data->Unwrap(display);
+    }
+    VkResult result = layer_data->instance_dispatch_table.AcquireXlibDisplayEXT(physicalDevice, dpy, display);
+
+    return result;
+}
+#endif // VK_USE_PLATFORM_XLIB_XRANDR_EXT
+
+#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
+
+VkResult DispatchGetRandROutputDisplayEXT(
+    VkPhysicalDevice                            physicalDevice,
+    Display*                                    dpy,
+    RROutput                                    rrOutput,
+    VkDisplayKHR*                               pDisplay)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    if (!wrap_handles) return layer_data->instance_dispatch_table.GetRandROutputDisplayEXT(physicalDevice, dpy, rrOutput, pDisplay);
+    VkResult result = layer_data->instance_dispatch_table.GetRandROutputDisplayEXT(physicalDevice, dpy, rrOutput, pDisplay);
+    if (VK_SUCCESS == result) {
+        *pDisplay = layer_data->WrapNew(*pDisplay);
+    }
+    return result;
+}
+#endif // VK_USE_PLATFORM_XLIB_XRANDR_EXT
+
+VkResult DispatchGetPhysicalDeviceSurfaceCapabilities2EXT(
+    VkPhysicalDevice                            physicalDevice,
+    VkSurfaceKHR                                surface,
+    VkSurfaceCapabilities2EXT*                  pSurfaceCapabilities)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    if (!wrap_handles) return layer_data->instance_dispatch_table.GetPhysicalDeviceSurfaceCapabilities2EXT(physicalDevice, surface, pSurfaceCapabilities);
+    {
+        surface = layer_data->Unwrap(surface);
+    }
+    VkResult result = layer_data->instance_dispatch_table.GetPhysicalDeviceSurfaceCapabilities2EXT(physicalDevice, surface, pSurfaceCapabilities);
+
+    return result;
+}
+
+VkResult DispatchDisplayPowerControlEXT(
+    VkDevice                                    device,
+    VkDisplayKHR                                display,
+    const VkDisplayPowerInfoEXT*                pDisplayPowerInfo)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.DisplayPowerControlEXT(device, display, pDisplayPowerInfo);
+    {
+        display = layer_data->Unwrap(display);
+    }
+    VkResult result = layer_data->device_dispatch_table.DisplayPowerControlEXT(device, display, pDisplayPowerInfo);
+
+    return result;
+}
+
+VkResult DispatchRegisterDeviceEventEXT(
+    VkDevice                                    device,
+    const VkDeviceEventInfoEXT*                 pDeviceEventInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkFence*                                    pFence)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.RegisterDeviceEventEXT(device, pDeviceEventInfo, pAllocator, pFence);
+    VkResult result = layer_data->device_dispatch_table.RegisterDeviceEventEXT(device, pDeviceEventInfo, pAllocator, pFence);
+    if (VK_SUCCESS == result) {
+        *pFence = layer_data->WrapNew(*pFence);
+    }
+    return result;
+}
+
+VkResult DispatchRegisterDisplayEventEXT(
+    VkDevice                                    device,
+    VkDisplayKHR                                display,
+    const VkDisplayEventInfoEXT*                pDisplayEventInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkFence*                                    pFence)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.RegisterDisplayEventEXT(device, display, pDisplayEventInfo, pAllocator, pFence);
+    {
+        display = layer_data->Unwrap(display);
+    }
+    VkResult result = layer_data->device_dispatch_table.RegisterDisplayEventEXT(device, display, pDisplayEventInfo, pAllocator, pFence);
+    if (VK_SUCCESS == result) {
+        *pFence = layer_data->WrapNew(*pFence);
+    }
+    return result;
+}
+
+VkResult DispatchGetSwapchainCounterEXT(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    VkSurfaceCounterFlagBitsEXT                 counter,
+    uint64_t*                                   pCounterValue)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.GetSwapchainCounterEXT(device, swapchain, counter, pCounterValue);
+    {
+        swapchain = layer_data->Unwrap(swapchain);
+    }
+    VkResult result = layer_data->device_dispatch_table.GetSwapchainCounterEXT(device, swapchain, counter, pCounterValue);
+
+    return result;
+}
+
+VkResult DispatchGetRefreshCycleDurationGOOGLE(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    VkRefreshCycleDurationGOOGLE*               pDisplayTimingProperties)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.GetRefreshCycleDurationGOOGLE(device, swapchain, pDisplayTimingProperties);
+    {
+        swapchain = layer_data->Unwrap(swapchain);
+    }
+    VkResult result = layer_data->device_dispatch_table.GetRefreshCycleDurationGOOGLE(device, swapchain, pDisplayTimingProperties);
+
+    return result;
+}
+
+VkResult DispatchGetPastPresentationTimingGOOGLE(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    uint32_t*                                   pPresentationTimingCount,
+    VkPastPresentationTimingGOOGLE*             pPresentationTimings)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.GetPastPresentationTimingGOOGLE(device, swapchain, pPresentationTimingCount, pPresentationTimings);
+    {
+        swapchain = layer_data->Unwrap(swapchain);
+    }
+    VkResult result = layer_data->device_dispatch_table.GetPastPresentationTimingGOOGLE(device, swapchain, pPresentationTimingCount, pPresentationTimings);
+
+    return result;
+}
+
+void DispatchCmdSetDiscardRectangleEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstDiscardRectangle,
+    uint32_t                                    discardRectangleCount,
+    const VkRect2D*                             pDiscardRectangles)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    layer_data->device_dispatch_table.CmdSetDiscardRectangleEXT(commandBuffer, firstDiscardRectangle, discardRectangleCount, pDiscardRectangles);
+
+}
+
+void DispatchSetHdrMetadataEXT(
+    VkDevice                                    device,
+    uint32_t                                    swapchainCount,
+    const VkSwapchainKHR*                       pSwapchains,
+    const VkHdrMetadataEXT*                     pMetadata)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.SetHdrMetadataEXT(device, swapchainCount, pSwapchains, pMetadata);
+    VkSwapchainKHR var_local_pSwapchains[DISPATCH_MAX_STACK_ALLOCATIONS];
+    VkSwapchainKHR *local_pSwapchains = NULL;
+    {
+        if (pSwapchains) {
+            local_pSwapchains = swapchainCount > DISPATCH_MAX_STACK_ALLOCATIONS ? new VkSwapchainKHR[swapchainCount] : var_local_pSwapchains;
+            for (uint32_t index0 = 0; index0 < swapchainCount; ++index0) {
+                local_pSwapchains[index0] = layer_data->Unwrap(pSwapchains[index0]);
+            }
+        }
+    }
+    layer_data->device_dispatch_table.SetHdrMetadataEXT(device, swapchainCount, (const VkSwapchainKHR*)local_pSwapchains, pMetadata);
+    if (local_pSwapchains != var_local_pSwapchains)
+        delete[] local_pSwapchains;
+}
+
+#ifdef VK_USE_PLATFORM_IOS_MVK
+
+VkResult DispatchCreateIOSSurfaceMVK(
+    VkInstance                                  instance,
+    const VkIOSSurfaceCreateInfoMVK*            pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(instance), layer_data_map);
+    if (!wrap_handles) return layer_data->instance_dispatch_table.CreateIOSSurfaceMVK(instance, pCreateInfo, pAllocator, pSurface);
+    VkResult result = layer_data->instance_dispatch_table.CreateIOSSurfaceMVK(instance, pCreateInfo, pAllocator, pSurface);
+    if (VK_SUCCESS == result) {
+        *pSurface = layer_data->WrapNew(*pSurface);
+    }
+    return result;
+}
+#endif // VK_USE_PLATFORM_IOS_MVK
+
+#ifdef VK_USE_PLATFORM_MACOS_MVK
+
+VkResult DispatchCreateMacOSSurfaceMVK(
+    VkInstance                                  instance,
+    const VkMacOSSurfaceCreateInfoMVK*          pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(instance), layer_data_map);
+    if (!wrap_handles) return layer_data->instance_dispatch_table.CreateMacOSSurfaceMVK(instance, pCreateInfo, pAllocator, pSurface);
+    VkResult result = layer_data->instance_dispatch_table.CreateMacOSSurfaceMVK(instance, pCreateInfo, pAllocator, pSurface);
+    if (VK_SUCCESS == result) {
+        *pSurface = layer_data->WrapNew(*pSurface);
+    }
+    return result;
+}
+#endif // VK_USE_PLATFORM_MACOS_MVK
+
+// Skip vkSetDebugUtilsObjectNameEXT dispatch, manually generated
+
+// Skip vkSetDebugUtilsObjectTagEXT dispatch, manually generated
+
+void DispatchQueueBeginDebugUtilsLabelEXT(
+    VkQueue                                     queue,
+    const VkDebugUtilsLabelEXT*                 pLabelInfo)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(queue), layer_data_map);
+    layer_data->device_dispatch_table.QueueBeginDebugUtilsLabelEXT(queue, pLabelInfo);
+
+}
+
+void DispatchQueueEndDebugUtilsLabelEXT(
+    VkQueue                                     queue)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(queue), layer_data_map);
+    layer_data->device_dispatch_table.QueueEndDebugUtilsLabelEXT(queue);
+
+}
+
+void DispatchQueueInsertDebugUtilsLabelEXT(
+    VkQueue                                     queue,
+    const VkDebugUtilsLabelEXT*                 pLabelInfo)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(queue), layer_data_map);
+    layer_data->device_dispatch_table.QueueInsertDebugUtilsLabelEXT(queue, pLabelInfo);
+
+}
+
+void DispatchCmdBeginDebugUtilsLabelEXT(
+    VkCommandBuffer                             commandBuffer,
+    const VkDebugUtilsLabelEXT*                 pLabelInfo)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    layer_data->device_dispatch_table.CmdBeginDebugUtilsLabelEXT(commandBuffer, pLabelInfo);
+
+}
+
+void DispatchCmdEndDebugUtilsLabelEXT(
+    VkCommandBuffer                             commandBuffer)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    layer_data->device_dispatch_table.CmdEndDebugUtilsLabelEXT(commandBuffer);
+
+}
+
+void DispatchCmdInsertDebugUtilsLabelEXT(
+    VkCommandBuffer                             commandBuffer,
+    const VkDebugUtilsLabelEXT*                 pLabelInfo)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    layer_data->device_dispatch_table.CmdInsertDebugUtilsLabelEXT(commandBuffer, pLabelInfo);
+
+}
+
+VkResult DispatchCreateDebugUtilsMessengerEXT(
+    VkInstance                                  instance,
+    const VkDebugUtilsMessengerCreateInfoEXT*   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDebugUtilsMessengerEXT*                   pMessenger)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(instance), layer_data_map);
+    if (!wrap_handles) return layer_data->instance_dispatch_table.CreateDebugUtilsMessengerEXT(instance, pCreateInfo, pAllocator, pMessenger);
+    VkResult result = layer_data->instance_dispatch_table.CreateDebugUtilsMessengerEXT(instance, pCreateInfo, pAllocator, pMessenger);
+    if (VK_SUCCESS == result) {
+        *pMessenger = layer_data->WrapNew(*pMessenger);
+    }
+    return result;
+}
+
+void DispatchDestroyDebugUtilsMessengerEXT(
+    VkInstance                                  instance,
+    VkDebugUtilsMessengerEXT                    messenger,
+    const VkAllocationCallbacks*                pAllocator)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(instance), layer_data_map);
+    if (!wrap_handles) return layer_data->instance_dispatch_table.DestroyDebugUtilsMessengerEXT(instance, messenger, pAllocator);
+    uint64_t messenger_id = reinterpret_cast<uint64_t &>(messenger);
+    auto iter = unique_id_mapping.pop(messenger_id);
+    if (iter != unique_id_mapping.end()) {
+        messenger = (VkDebugUtilsMessengerEXT)iter->second;
+    } else {
+        messenger = (VkDebugUtilsMessengerEXT)0;
+    }
+    layer_data->instance_dispatch_table.DestroyDebugUtilsMessengerEXT(instance, messenger, pAllocator);
+
+}
+
+void DispatchSubmitDebugUtilsMessageEXT(
+    VkInstance                                  instance,
+    VkDebugUtilsMessageSeverityFlagBitsEXT      messageSeverity,
+    VkDebugUtilsMessageTypeFlagsEXT             messageTypes,
+    const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(instance), layer_data_map);
+    layer_data->instance_dispatch_table.SubmitDebugUtilsMessageEXT(instance, messageSeverity, messageTypes, pCallbackData);
+
+}
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+
+VkResult DispatchGetAndroidHardwareBufferPropertiesANDROID(
+    VkDevice                                    device,
+    const struct AHardwareBuffer*               buffer,
+    VkAndroidHardwareBufferPropertiesANDROID*   pProperties)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    VkResult result = layer_data->device_dispatch_table.GetAndroidHardwareBufferPropertiesANDROID(device, buffer, pProperties);
+
+    return result;
+}
+#endif // VK_USE_PLATFORM_ANDROID_KHR
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+
+VkResult DispatchGetMemoryAndroidHardwareBufferANDROID(
+    VkDevice                                    device,
+    const VkMemoryGetAndroidHardwareBufferInfoANDROID* pInfo,
+    struct AHardwareBuffer**                    pBuffer)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.GetMemoryAndroidHardwareBufferANDROID(device, pInfo, pBuffer);
+    safe_VkMemoryGetAndroidHardwareBufferInfoANDROID var_local_pInfo;
+    safe_VkMemoryGetAndroidHardwareBufferInfoANDROID *local_pInfo = NULL;
+    {
+        if (pInfo) {
+            local_pInfo = &var_local_pInfo;
+            local_pInfo->initialize(pInfo);
+            if (pInfo->memory) {
+                local_pInfo->memory = layer_data->Unwrap(pInfo->memory);
+            }
+        }
+    }
+    VkResult result = layer_data->device_dispatch_table.GetMemoryAndroidHardwareBufferANDROID(device, (const VkMemoryGetAndroidHardwareBufferInfoANDROID*)local_pInfo, pBuffer);
+
+    return result;
+}
+#endif // VK_USE_PLATFORM_ANDROID_KHR
+
+void DispatchCmdSetSampleLocationsEXT(
+    VkCommandBuffer                             commandBuffer,
+    const VkSampleLocationsInfoEXT*             pSampleLocationsInfo)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    layer_data->device_dispatch_table.CmdSetSampleLocationsEXT(commandBuffer, pSampleLocationsInfo);
+
+}
+
+void DispatchGetPhysicalDeviceMultisamplePropertiesEXT(
+    VkPhysicalDevice                            physicalDevice,
+    VkSampleCountFlagBits                       samples,
+    VkMultisamplePropertiesEXT*                 pMultisampleProperties)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    layer_data->instance_dispatch_table.GetPhysicalDeviceMultisamplePropertiesEXT(physicalDevice, samples, pMultisampleProperties);
+
+}
+
+VkResult DispatchGetImageDrmFormatModifierPropertiesEXT(
+    VkDevice                                    device,
+    VkImage                                     image,
+    VkImageDrmFormatModifierPropertiesEXT*      pProperties)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.GetImageDrmFormatModifierPropertiesEXT(device, image, pProperties);
+    {
+        image = layer_data->Unwrap(image);
+    }
+    VkResult result = layer_data->device_dispatch_table.GetImageDrmFormatModifierPropertiesEXT(device, image, pProperties);
+
+    return result;
+}
+
+VkResult DispatchCreateValidationCacheEXT(
+    VkDevice                                    device,
+    const VkValidationCacheCreateInfoEXT*       pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkValidationCacheEXT*                       pValidationCache)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.CreateValidationCacheEXT(device, pCreateInfo, pAllocator, pValidationCache);
+    VkResult result = layer_data->device_dispatch_table.CreateValidationCacheEXT(device, pCreateInfo, pAllocator, pValidationCache);
+    if (VK_SUCCESS == result) {
+        *pValidationCache = layer_data->WrapNew(*pValidationCache);
+    }
+    return result;
+}
+
+void DispatchDestroyValidationCacheEXT(
+    VkDevice                                    device,
+    VkValidationCacheEXT                        validationCache,
+    const VkAllocationCallbacks*                pAllocator)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.DestroyValidationCacheEXT(device, validationCache, pAllocator);
+    uint64_t validationCache_id = reinterpret_cast<uint64_t &>(validationCache);
+    auto iter = unique_id_mapping.pop(validationCache_id);
+    if (iter != unique_id_mapping.end()) {
+        validationCache = (VkValidationCacheEXT)iter->second;
+    } else {
+        validationCache = (VkValidationCacheEXT)0;
+    }
+    layer_data->device_dispatch_table.DestroyValidationCacheEXT(device, validationCache, pAllocator);
+
+}
+
+VkResult DispatchMergeValidationCachesEXT(
+    VkDevice                                    device,
+    VkValidationCacheEXT                        dstCache,
+    uint32_t                                    srcCacheCount,
+    const VkValidationCacheEXT*                 pSrcCaches)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.MergeValidationCachesEXT(device, dstCache, srcCacheCount, pSrcCaches);
+    VkValidationCacheEXT var_local_pSrcCaches[DISPATCH_MAX_STACK_ALLOCATIONS];
+    VkValidationCacheEXT *local_pSrcCaches = NULL;
+    {
+        dstCache = layer_data->Unwrap(dstCache);
+        if (pSrcCaches) {
+            local_pSrcCaches = srcCacheCount > DISPATCH_MAX_STACK_ALLOCATIONS ? new VkValidationCacheEXT[srcCacheCount] : var_local_pSrcCaches;
+            for (uint32_t index0 = 0; index0 < srcCacheCount; ++index0) {
+                local_pSrcCaches[index0] = layer_data->Unwrap(pSrcCaches[index0]);
+            }
+        }
+    }
+    VkResult result = layer_data->device_dispatch_table.MergeValidationCachesEXT(device, dstCache, srcCacheCount, (const VkValidationCacheEXT*)local_pSrcCaches);
+    if (local_pSrcCaches != var_local_pSrcCaches)
+        delete[] local_pSrcCaches;
+    return result;
+}
+
+VkResult DispatchGetValidationCacheDataEXT(
+    VkDevice                                    device,
+    VkValidationCacheEXT                        validationCache,
+    size_t*                                     pDataSize,
+    void*                                       pData)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.GetValidationCacheDataEXT(device, validationCache, pDataSize, pData);
+    {
+        validationCache = layer_data->Unwrap(validationCache);
+    }
+    VkResult result = layer_data->device_dispatch_table.GetValidationCacheDataEXT(device, validationCache, pDataSize, pData);
+
+    return result;
+}
+
+void DispatchCmdBindShadingRateImageNV(
+    VkCommandBuffer                             commandBuffer,
+    VkImageView                                 imageView,
+    VkImageLayout                               imageLayout)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.CmdBindShadingRateImageNV(commandBuffer, imageView, imageLayout);
+    {
+        imageView = layer_data->Unwrap(imageView);
+    }
+    layer_data->device_dispatch_table.CmdBindShadingRateImageNV(commandBuffer, imageView, imageLayout);
+
+}
+
+void DispatchCmdSetViewportShadingRatePaletteNV(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstViewport,
+    uint32_t                                    viewportCount,
+    const VkShadingRatePaletteNV*               pShadingRatePalettes)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    layer_data->device_dispatch_table.CmdSetViewportShadingRatePaletteNV(commandBuffer, firstViewport, viewportCount, pShadingRatePalettes);
+
+}
+
+void DispatchCmdSetCoarseSampleOrderNV(
+    VkCommandBuffer                             commandBuffer,
+    VkCoarseSampleOrderTypeNV                   sampleOrderType,
+    uint32_t                                    customSampleOrderCount,
+    const VkCoarseSampleOrderCustomNV*          pCustomSampleOrders)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    layer_data->device_dispatch_table.CmdSetCoarseSampleOrderNV(commandBuffer, sampleOrderType, customSampleOrderCount, pCustomSampleOrders);
+
+}
+
+VkResult DispatchCreateAccelerationStructureNV(
+    VkDevice                                    device,
+    const VkAccelerationStructureCreateInfoNV*  pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkAccelerationStructureNV*                  pAccelerationStructure)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.CreateAccelerationStructureNV(device, pCreateInfo, pAllocator, pAccelerationStructure);
+    safe_VkAccelerationStructureCreateInfoNV var_local_pCreateInfo;
+    safe_VkAccelerationStructureCreateInfoNV *local_pCreateInfo = NULL;
+    {
+        if (pCreateInfo) {
+            local_pCreateInfo = &var_local_pCreateInfo;
+            local_pCreateInfo->initialize(pCreateInfo);
+            if (local_pCreateInfo->info.pGeometries) {
+                for (uint32_t index2 = 0; index2 < local_pCreateInfo->info.geometryCount; ++index2) {
+                    if (pCreateInfo->info.pGeometries[index2].geometry.triangles.vertexData) {
+                        local_pCreateInfo->info.pGeometries[index2].geometry.triangles.vertexData = layer_data->Unwrap(pCreateInfo->info.pGeometries[index2].geometry.triangles.vertexData);
+                    }
+                    if (pCreateInfo->info.pGeometries[index2].geometry.triangles.indexData) {
+                        local_pCreateInfo->info.pGeometries[index2].geometry.triangles.indexData = layer_data->Unwrap(pCreateInfo->info.pGeometries[index2].geometry.triangles.indexData);
+                    }
+                    if (pCreateInfo->info.pGeometries[index2].geometry.triangles.transformData) {
+                        local_pCreateInfo->info.pGeometries[index2].geometry.triangles.transformData = layer_data->Unwrap(pCreateInfo->info.pGeometries[index2].geometry.triangles.transformData);
+                    }
+                    if (pCreateInfo->info.pGeometries[index2].geometry.aabbs.aabbData) {
+                        local_pCreateInfo->info.pGeometries[index2].geometry.aabbs.aabbData = layer_data->Unwrap(pCreateInfo->info.pGeometries[index2].geometry.aabbs.aabbData);
+                    }
+                }
+            }
+        }
+    }
+    VkResult result = layer_data->device_dispatch_table.CreateAccelerationStructureNV(device, (const VkAccelerationStructureCreateInfoNV*)local_pCreateInfo, pAllocator, pAccelerationStructure);
+    if (VK_SUCCESS == result) {
+        *pAccelerationStructure = layer_data->WrapNew(*pAccelerationStructure);
+    }
+    return result;
+}
+
+void DispatchDestroyAccelerationStructureNV(
+    VkDevice                                    device,
+    VkAccelerationStructureNV                   accelerationStructure,
+    const VkAllocationCallbacks*                pAllocator)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.DestroyAccelerationStructureNV(device, accelerationStructure, pAllocator);
+    uint64_t accelerationStructure_id = reinterpret_cast<uint64_t &>(accelerationStructure);
+    auto iter = unique_id_mapping.pop(accelerationStructure_id);
+    if (iter != unique_id_mapping.end()) {
+        accelerationStructure = (VkAccelerationStructureNV)iter->second;
+    } else {
+        accelerationStructure = (VkAccelerationStructureNV)0;
+    }
+    layer_data->device_dispatch_table.DestroyAccelerationStructureNV(device, accelerationStructure, pAllocator);
+
+}
+
+void DispatchGetAccelerationStructureMemoryRequirementsNV(
+    VkDevice                                    device,
+    const VkAccelerationStructureMemoryRequirementsInfoNV* pInfo,
+    VkMemoryRequirements2KHR*                   pMemoryRequirements)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.GetAccelerationStructureMemoryRequirementsNV(device, pInfo, pMemoryRequirements);
+    safe_VkAccelerationStructureMemoryRequirementsInfoNV var_local_pInfo;
+    safe_VkAccelerationStructureMemoryRequirementsInfoNV *local_pInfo = NULL;
+    {
+        if (pInfo) {
+            local_pInfo = &var_local_pInfo;
+            local_pInfo->initialize(pInfo);
+            if (pInfo->accelerationStructure) {
+                local_pInfo->accelerationStructure = layer_data->Unwrap(pInfo->accelerationStructure);
+            }
+        }
+    }
+    layer_data->device_dispatch_table.GetAccelerationStructureMemoryRequirementsNV(device, (const VkAccelerationStructureMemoryRequirementsInfoNV*)local_pInfo, pMemoryRequirements);
+
+}
+
+VkResult DispatchBindAccelerationStructureMemoryNV(
+    VkDevice                                    device,
+    uint32_t                                    bindInfoCount,
+    const VkBindAccelerationStructureMemoryInfoNV* pBindInfos)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.BindAccelerationStructureMemoryNV(device, bindInfoCount, pBindInfos);
+    safe_VkBindAccelerationStructureMemoryInfoNV *local_pBindInfos = NULL;
+    {
+        if (pBindInfos) {
+            local_pBindInfos = new safe_VkBindAccelerationStructureMemoryInfoNV[bindInfoCount];
+            for (uint32_t index0 = 0; index0 < bindInfoCount; ++index0) {
+                local_pBindInfos[index0].initialize(&pBindInfos[index0]);
+                if (pBindInfos[index0].accelerationStructure) {
+                    local_pBindInfos[index0].accelerationStructure = layer_data->Unwrap(pBindInfos[index0].accelerationStructure);
+                }
+                if (pBindInfos[index0].memory) {
+                    local_pBindInfos[index0].memory = layer_data->Unwrap(pBindInfos[index0].memory);
+                }
+            }
+        }
+    }
+    VkResult result = layer_data->device_dispatch_table.BindAccelerationStructureMemoryNV(device, bindInfoCount, (const VkBindAccelerationStructureMemoryInfoNV*)local_pBindInfos);
+    if (local_pBindInfos) {
+        delete[] local_pBindInfos;
+    }
+    return result;
+}
+
+void DispatchCmdBuildAccelerationStructureNV(
+    VkCommandBuffer                             commandBuffer,
+    const VkAccelerationStructureInfoNV*        pInfo,
+    VkBuffer                                    instanceData,
+    VkDeviceSize                                instanceOffset,
+    VkBool32                                    update,
+    VkAccelerationStructureNV                   dst,
+    VkAccelerationStructureNV                   src,
+    VkBuffer                                    scratch,
+    VkDeviceSize                                scratchOffset)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.CmdBuildAccelerationStructureNV(commandBuffer, pInfo, instanceData, instanceOffset, update, dst, src, scratch, scratchOffset);
+    safe_VkAccelerationStructureInfoNV var_local_pInfo;
+    safe_VkAccelerationStructureInfoNV *local_pInfo = NULL;
+    {
+        if (pInfo) {
+            local_pInfo = &var_local_pInfo;
+            local_pInfo->initialize(pInfo);
+            if (local_pInfo->pGeometries) {
+                for (uint32_t index1 = 0; index1 < local_pInfo->geometryCount; ++index1) {
+                    if (pInfo->pGeometries[index1].geometry.triangles.vertexData) {
+                        local_pInfo->pGeometries[index1].geometry.triangles.vertexData = layer_data->Unwrap(pInfo->pGeometries[index1].geometry.triangles.vertexData);
+                    }
+                    if (pInfo->pGeometries[index1].geometry.triangles.indexData) {
+                        local_pInfo->pGeometries[index1].geometry.triangles.indexData = layer_data->Unwrap(pInfo->pGeometries[index1].geometry.triangles.indexData);
+                    }
+                    if (pInfo->pGeometries[index1].geometry.triangles.transformData) {
+                        local_pInfo->pGeometries[index1].geometry.triangles.transformData = layer_data->Unwrap(pInfo->pGeometries[index1].geometry.triangles.transformData);
+                    }
+                    if (pInfo->pGeometries[index1].geometry.aabbs.aabbData) {
+                        local_pInfo->pGeometries[index1].geometry.aabbs.aabbData = layer_data->Unwrap(pInfo->pGeometries[index1].geometry.aabbs.aabbData);
+                    }
+                }
+            }
+        }
+        instanceData = layer_data->Unwrap(instanceData);
+        dst = layer_data->Unwrap(dst);
+        src = layer_data->Unwrap(src);
+        scratch = layer_data->Unwrap(scratch);
+    }
+    layer_data->device_dispatch_table.CmdBuildAccelerationStructureNV(commandBuffer, (const VkAccelerationStructureInfoNV*)local_pInfo, instanceData, instanceOffset, update, dst, src, scratch, scratchOffset);
+
+}
+
+void DispatchCmdCopyAccelerationStructureNV(
+    VkCommandBuffer                             commandBuffer,
+    VkAccelerationStructureNV                   dst,
+    VkAccelerationStructureNV                   src,
+    VkCopyAccelerationStructureModeNV           mode)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.CmdCopyAccelerationStructureNV(commandBuffer, dst, src, mode);
+    {
+        dst = layer_data->Unwrap(dst);
+        src = layer_data->Unwrap(src);
+    }
+    layer_data->device_dispatch_table.CmdCopyAccelerationStructureNV(commandBuffer, dst, src, mode);
+
+}
+
+void DispatchCmdTraceRaysNV(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    raygenShaderBindingTableBuffer,
+    VkDeviceSize                                raygenShaderBindingOffset,
+    VkBuffer                                    missShaderBindingTableBuffer,
+    VkDeviceSize                                missShaderBindingOffset,
+    VkDeviceSize                                missShaderBindingStride,
+    VkBuffer                                    hitShaderBindingTableBuffer,
+    VkDeviceSize                                hitShaderBindingOffset,
+    VkDeviceSize                                hitShaderBindingStride,
+    VkBuffer                                    callableShaderBindingTableBuffer,
+    VkDeviceSize                                callableShaderBindingOffset,
+    VkDeviceSize                                callableShaderBindingStride,
+    uint32_t                                    width,
+    uint32_t                                    height,
+    uint32_t                                    depth)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.CmdTraceRaysNV(commandBuffer, raygenShaderBindingTableBuffer, raygenShaderBindingOffset, missShaderBindingTableBuffer, missShaderBindingOffset, missShaderBindingStride, hitShaderBindingTableBuffer, hitShaderBindingOffset, hitShaderBindingStride, callableShaderBindingTableBuffer, callableShaderBindingOffset, callableShaderBindingStride, width, height, depth);
+    {
+        raygenShaderBindingTableBuffer = layer_data->Unwrap(raygenShaderBindingTableBuffer);
+        missShaderBindingTableBuffer = layer_data->Unwrap(missShaderBindingTableBuffer);
+        hitShaderBindingTableBuffer = layer_data->Unwrap(hitShaderBindingTableBuffer);
+        callableShaderBindingTableBuffer = layer_data->Unwrap(callableShaderBindingTableBuffer);
+    }
+    layer_data->device_dispatch_table.CmdTraceRaysNV(commandBuffer, raygenShaderBindingTableBuffer, raygenShaderBindingOffset, missShaderBindingTableBuffer, missShaderBindingOffset, missShaderBindingStride, hitShaderBindingTableBuffer, hitShaderBindingOffset, hitShaderBindingStride, callableShaderBindingTableBuffer, callableShaderBindingOffset, callableShaderBindingStride, width, height, depth);
+
+}
+
+VkResult DispatchCreateRayTracingPipelinesNV(
+    VkDevice                                    device,
+    VkPipelineCache                             pipelineCache,
+    uint32_t                                    createInfoCount,
+    const VkRayTracingPipelineCreateInfoNV*     pCreateInfos,
+    const VkAllocationCallbacks*                pAllocator,
+    VkPipeline*                                 pPipelines)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.CreateRayTracingPipelinesNV(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines);
+    safe_VkRayTracingPipelineCreateInfoNV *local_pCreateInfos = NULL;
+    {
+        pipelineCache = layer_data->Unwrap(pipelineCache);
+        if (pCreateInfos) {
+            local_pCreateInfos = new safe_VkRayTracingPipelineCreateInfoNV[createInfoCount];
+            for (uint32_t index0 = 0; index0 < createInfoCount; ++index0) {
+                local_pCreateInfos[index0].initialize(&pCreateInfos[index0]);
+                if (local_pCreateInfos[index0].pStages) {
+                    for (uint32_t index1 = 0; index1 < local_pCreateInfos[index0].stageCount; ++index1) {
+                        if (pCreateInfos[index0].pStages[index1].module) {
+                            local_pCreateInfos[index0].pStages[index1].module = layer_data->Unwrap(pCreateInfos[index0].pStages[index1].module);
+                        }
+                    }
+                }
+                if (pCreateInfos[index0].layout) {
+                    local_pCreateInfos[index0].layout = layer_data->Unwrap(pCreateInfos[index0].layout);
+                }
+                if (pCreateInfos[index0].basePipelineHandle) {
+                    local_pCreateInfos[index0].basePipelineHandle = layer_data->Unwrap(pCreateInfos[index0].basePipelineHandle);
+                }
+            }
+        }
+    }
+    VkResult result = layer_data->device_dispatch_table.CreateRayTracingPipelinesNV(device, pipelineCache, createInfoCount, (const VkRayTracingPipelineCreateInfoNV*)local_pCreateInfos, pAllocator, pPipelines);
+    for (uint32_t i = 0; i < createInfoCount; ++i) {
+        if (pCreateInfos[i].pNext != VK_NULL_HANDLE) {
+            CopyCreatePipelineFeedbackData(local_pCreateInfos[i].pNext, pCreateInfos[i].pNext);
+        }
+    }
+
+    if (local_pCreateInfos) {
+        delete[] local_pCreateInfos;
+    }
+    {
+        for (uint32_t index0 = 0; index0 < createInfoCount; index0++) {
+            if (pPipelines[index0] != VK_NULL_HANDLE) {
+                pPipelines[index0] = layer_data->WrapNew(pPipelines[index0]);
+            }
+        }
+    }
+    return result;
+}
+
+VkResult DispatchGetRayTracingShaderGroupHandlesNV(
+    VkDevice                                    device,
+    VkPipeline                                  pipeline,
+    uint32_t                                    firstGroup,
+    uint32_t                                    groupCount,
+    size_t                                      dataSize,
+    void*                                       pData)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.GetRayTracingShaderGroupHandlesNV(device, pipeline, firstGroup, groupCount, dataSize, pData);
+    {
+        pipeline = layer_data->Unwrap(pipeline);
+    }
+    VkResult result = layer_data->device_dispatch_table.GetRayTracingShaderGroupHandlesNV(device, pipeline, firstGroup, groupCount, dataSize, pData);
+
+    return result;
+}
+
+VkResult DispatchGetAccelerationStructureHandleNV(
+    VkDevice                                    device,
+    VkAccelerationStructureNV                   accelerationStructure,
+    size_t                                      dataSize,
+    void*                                       pData)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.GetAccelerationStructureHandleNV(device, accelerationStructure, dataSize, pData);
+    {
+        accelerationStructure = layer_data->Unwrap(accelerationStructure);
+    }
+    VkResult result = layer_data->device_dispatch_table.GetAccelerationStructureHandleNV(device, accelerationStructure, dataSize, pData);
+
+    return result;
+}
+
+void DispatchCmdWriteAccelerationStructuresPropertiesNV(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    accelerationStructureCount,
+    const VkAccelerationStructureNV*            pAccelerationStructures,
+    VkQueryType                                 queryType,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    firstQuery)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.CmdWriteAccelerationStructuresPropertiesNV(commandBuffer, accelerationStructureCount, pAccelerationStructures, queryType, queryPool, firstQuery);
+    VkAccelerationStructureNV var_local_pAccelerationStructures[DISPATCH_MAX_STACK_ALLOCATIONS];
+    VkAccelerationStructureNV *local_pAccelerationStructures = NULL;
+    {
+        if (pAccelerationStructures) {
+            local_pAccelerationStructures = accelerationStructureCount > DISPATCH_MAX_STACK_ALLOCATIONS ? new VkAccelerationStructureNV[accelerationStructureCount] : var_local_pAccelerationStructures;
+            for (uint32_t index0 = 0; index0 < accelerationStructureCount; ++index0) {
+                local_pAccelerationStructures[index0] = layer_data->Unwrap(pAccelerationStructures[index0]);
+            }
+        }
+        queryPool = layer_data->Unwrap(queryPool);
+    }
+    layer_data->device_dispatch_table.CmdWriteAccelerationStructuresPropertiesNV(commandBuffer, accelerationStructureCount, (const VkAccelerationStructureNV*)local_pAccelerationStructures, queryType, queryPool, firstQuery);
+    if (local_pAccelerationStructures != var_local_pAccelerationStructures)
+        delete[] local_pAccelerationStructures;
+}
+
+VkResult DispatchCompileDeferredNV(
+    VkDevice                                    device,
+    VkPipeline                                  pipeline,
+    uint32_t                                    shader)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.CompileDeferredNV(device, pipeline, shader);
+    {
+        pipeline = layer_data->Unwrap(pipeline);
+    }
+    VkResult result = layer_data->device_dispatch_table.CompileDeferredNV(device, pipeline, shader);
+
+    return result;
+}
+
+VkResult DispatchGetMemoryHostPointerPropertiesEXT(
+    VkDevice                                    device,
+    VkExternalMemoryHandleTypeFlagBits          handleType,
+    const void*                                 pHostPointer,
+    VkMemoryHostPointerPropertiesEXT*           pMemoryHostPointerProperties)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    VkResult result = layer_data->device_dispatch_table.GetMemoryHostPointerPropertiesEXT(device, handleType, pHostPointer, pMemoryHostPointerProperties);
+
+    return result;
+}
+
+void DispatchCmdWriteBufferMarkerAMD(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineStageFlagBits                     pipelineStage,
+    VkBuffer                                    dstBuffer,
+    VkDeviceSize                                dstOffset,
+    uint32_t                                    marker)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.CmdWriteBufferMarkerAMD(commandBuffer, pipelineStage, dstBuffer, dstOffset, marker);
+    {
+        dstBuffer = layer_data->Unwrap(dstBuffer);
+    }
+    layer_data->device_dispatch_table.CmdWriteBufferMarkerAMD(commandBuffer, pipelineStage, dstBuffer, dstOffset, marker);
+
+}
+
+VkResult DispatchGetPhysicalDeviceCalibrateableTimeDomainsEXT(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pTimeDomainCount,
+    VkTimeDomainEXT*                            pTimeDomains)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    VkResult result = layer_data->instance_dispatch_table.GetPhysicalDeviceCalibrateableTimeDomainsEXT(physicalDevice, pTimeDomainCount, pTimeDomains);
+
+    return result;
+}
+
+VkResult DispatchGetCalibratedTimestampsEXT(
+    VkDevice                                    device,
+    uint32_t                                    timestampCount,
+    const VkCalibratedTimestampInfoEXT*         pTimestampInfos,
+    uint64_t*                                   pTimestamps,
+    uint64_t*                                   pMaxDeviation)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    VkResult result = layer_data->device_dispatch_table.GetCalibratedTimestampsEXT(device, timestampCount, pTimestampInfos, pTimestamps, pMaxDeviation);
+
+    return result;
+}
+
+void DispatchCmdDrawMeshTasksNV(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    taskCount,
+    uint32_t                                    firstTask)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    layer_data->device_dispatch_table.CmdDrawMeshTasksNV(commandBuffer, taskCount, firstTask);
+
+}
+
+void DispatchCmdDrawMeshTasksIndirectNV(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    uint32_t                                    drawCount,
+    uint32_t                                    stride)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.CmdDrawMeshTasksIndirectNV(commandBuffer, buffer, offset, drawCount, stride);
+    {
+        buffer = layer_data->Unwrap(buffer);
+    }
+    layer_data->device_dispatch_table.CmdDrawMeshTasksIndirectNV(commandBuffer, buffer, offset, drawCount, stride);
+
+}
+
+void DispatchCmdDrawMeshTasksIndirectCountNV(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkBuffer                                    countBuffer,
+    VkDeviceSize                                countBufferOffset,
+    uint32_t                                    maxDrawCount,
+    uint32_t                                    stride)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.CmdDrawMeshTasksIndirectCountNV(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
+    {
+        buffer = layer_data->Unwrap(buffer);
+        countBuffer = layer_data->Unwrap(countBuffer);
+    }
+    layer_data->device_dispatch_table.CmdDrawMeshTasksIndirectCountNV(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
+
+}
+
+void DispatchCmdSetExclusiveScissorNV(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstExclusiveScissor,
+    uint32_t                                    exclusiveScissorCount,
+    const VkRect2D*                             pExclusiveScissors)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    layer_data->device_dispatch_table.CmdSetExclusiveScissorNV(commandBuffer, firstExclusiveScissor, exclusiveScissorCount, pExclusiveScissors);
+
+}
+
+void DispatchCmdSetCheckpointNV(
+    VkCommandBuffer                             commandBuffer,
+    const void*                                 pCheckpointMarker)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    layer_data->device_dispatch_table.CmdSetCheckpointNV(commandBuffer, pCheckpointMarker);
+
+}
+
+void DispatchGetQueueCheckpointDataNV(
+    VkQueue                                     queue,
+    uint32_t*                                   pCheckpointDataCount,
+    VkCheckpointDataNV*                         pCheckpointData)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(queue), layer_data_map);
+    layer_data->device_dispatch_table.GetQueueCheckpointDataNV(queue, pCheckpointDataCount, pCheckpointData);
+
+}
+
+VkResult DispatchInitializePerformanceApiINTEL(
+    VkDevice                                    device,
+    const VkInitializePerformanceApiInfoINTEL*  pInitializeInfo)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    VkResult result = layer_data->device_dispatch_table.InitializePerformanceApiINTEL(device, pInitializeInfo);
+
+    return result;
+}
+
+void DispatchUninitializePerformanceApiINTEL(
+    VkDevice                                    device)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    layer_data->device_dispatch_table.UninitializePerformanceApiINTEL(device);
+
+}
+
+VkResult DispatchCmdSetPerformanceMarkerINTEL(
+    VkCommandBuffer                             commandBuffer,
+    const VkPerformanceMarkerInfoINTEL*         pMarkerInfo)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    VkResult result = layer_data->device_dispatch_table.CmdSetPerformanceMarkerINTEL(commandBuffer, pMarkerInfo);
+
+    return result;
+}
+
+VkResult DispatchCmdSetPerformanceStreamMarkerINTEL(
+    VkCommandBuffer                             commandBuffer,
+    const VkPerformanceStreamMarkerInfoINTEL*   pMarkerInfo)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    VkResult result = layer_data->device_dispatch_table.CmdSetPerformanceStreamMarkerINTEL(commandBuffer, pMarkerInfo);
+
+    return result;
+}
+
+VkResult DispatchCmdSetPerformanceOverrideINTEL(
+    VkCommandBuffer                             commandBuffer,
+    const VkPerformanceOverrideInfoINTEL*       pOverrideInfo)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    VkResult result = layer_data->device_dispatch_table.CmdSetPerformanceOverrideINTEL(commandBuffer, pOverrideInfo);
+
+    return result;
+}
+
+VkResult DispatchAcquirePerformanceConfigurationINTEL(
+    VkDevice                                    device,
+    const VkPerformanceConfigurationAcquireInfoINTEL* pAcquireInfo,
+    VkPerformanceConfigurationINTEL*            pConfiguration)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.AcquirePerformanceConfigurationINTEL(device, pAcquireInfo, pConfiguration);
+    {
+        pConfiguration = layer_data->Unwrap(pConfiguration);
+    }
+    VkResult result = layer_data->device_dispatch_table.AcquirePerformanceConfigurationINTEL(device, pAcquireInfo, pConfiguration);
+
+    return result;
+}
+
+VkResult DispatchReleasePerformanceConfigurationINTEL(
+    VkDevice                                    device,
+    VkPerformanceConfigurationINTEL             configuration)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.ReleasePerformanceConfigurationINTEL(device, configuration);
+    {
+        configuration = layer_data->Unwrap(configuration);
+    }
+    VkResult result = layer_data->device_dispatch_table.ReleasePerformanceConfigurationINTEL(device, configuration);
+
+    return result;
+}
+
+VkResult DispatchQueueSetPerformanceConfigurationINTEL(
+    VkQueue                                     queue,
+    VkPerformanceConfigurationINTEL             configuration)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(queue), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.QueueSetPerformanceConfigurationINTEL(queue, configuration);
+    {
+        configuration = layer_data->Unwrap(configuration);
+    }
+    VkResult result = layer_data->device_dispatch_table.QueueSetPerformanceConfigurationINTEL(queue, configuration);
+
+    return result;
+}
+
+VkResult DispatchGetPerformanceParameterINTEL(
+    VkDevice                                    device,
+    VkPerformanceParameterTypeINTEL             parameter,
+    VkPerformanceValueINTEL*                    pValue)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    VkResult result = layer_data->device_dispatch_table.GetPerformanceParameterINTEL(device, parameter, pValue);
+
+    return result;
+}
+
+void DispatchSetLocalDimmingAMD(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapChain,
+    VkBool32                                    localDimmingEnable)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.SetLocalDimmingAMD(device, swapChain, localDimmingEnable);
+    {
+        swapChain = layer_data->Unwrap(swapChain);
+    }
+    layer_data->device_dispatch_table.SetLocalDimmingAMD(device, swapChain, localDimmingEnable);
+
+}
+
+#ifdef VK_USE_PLATFORM_FUCHSIA
+
+VkResult DispatchCreateImagePipeSurfaceFUCHSIA(
+    VkInstance                                  instance,
+    const VkImagePipeSurfaceCreateInfoFUCHSIA*  pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(instance), layer_data_map);
+    if (!wrap_handles) return layer_data->instance_dispatch_table.CreateImagePipeSurfaceFUCHSIA(instance, pCreateInfo, pAllocator, pSurface);
+    VkResult result = layer_data->instance_dispatch_table.CreateImagePipeSurfaceFUCHSIA(instance, pCreateInfo, pAllocator, pSurface);
+    if (VK_SUCCESS == result) {
+        *pSurface = layer_data->WrapNew(*pSurface);
+    }
+    return result;
+}
+#endif // VK_USE_PLATFORM_FUCHSIA
+
+#ifdef VK_USE_PLATFORM_METAL_EXT
+
+VkResult DispatchCreateMetalSurfaceEXT(
+    VkInstance                                  instance,
+    const VkMetalSurfaceCreateInfoEXT*          pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(instance), layer_data_map);
+    if (!wrap_handles) return layer_data->instance_dispatch_table.CreateMetalSurfaceEXT(instance, pCreateInfo, pAllocator, pSurface);
+    VkResult result = layer_data->instance_dispatch_table.CreateMetalSurfaceEXT(instance, pCreateInfo, pAllocator, pSurface);
+    if (VK_SUCCESS == result) {
+        *pSurface = layer_data->WrapNew(*pSurface);
+    }
+    return result;
+}
+#endif // VK_USE_PLATFORM_METAL_EXT
+
+VkDeviceAddress DispatchGetBufferDeviceAddressEXT(
+    VkDevice                                    device,
+    const VkBufferDeviceAddressInfoKHR*         pInfo)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.GetBufferDeviceAddressEXT(device, pInfo);
+    safe_VkBufferDeviceAddressInfoKHR var_local_pInfo;
+    safe_VkBufferDeviceAddressInfoKHR *local_pInfo = NULL;
+    {
+        if (pInfo) {
+            local_pInfo = &var_local_pInfo;
+            local_pInfo->initialize(pInfo);
+            if (pInfo->buffer) {
+                local_pInfo->buffer = layer_data->Unwrap(pInfo->buffer);
+            }
+        }
+    }
+    VkDeviceAddress result = layer_data->device_dispatch_table.GetBufferDeviceAddressEXT(device, (const VkBufferDeviceAddressInfoKHR*)local_pInfo);
+
+    return result;
+}
+
+VkResult DispatchGetPhysicalDeviceToolPropertiesEXT(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pToolCount,
+    VkPhysicalDeviceToolPropertiesEXT*          pToolProperties)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    VkResult result = layer_data->instance_dispatch_table.GetPhysicalDeviceToolPropertiesEXT(physicalDevice, pToolCount, pToolProperties);
+
+    return result;
+}
+
+VkResult DispatchGetPhysicalDeviceCooperativeMatrixPropertiesNV(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pPropertyCount,
+    VkCooperativeMatrixPropertiesNV*            pProperties)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    VkResult result = layer_data->instance_dispatch_table.GetPhysicalDeviceCooperativeMatrixPropertiesNV(physicalDevice, pPropertyCount, pProperties);
+
+    return result;
+}
+
+VkResult DispatchGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pCombinationCount,
+    VkFramebufferMixedSamplesCombinationNV*     pCombinations)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    VkResult result = layer_data->instance_dispatch_table.GetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV(physicalDevice, pCombinationCount, pCombinations);
+
+    return result;
+}
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+VkResult DispatchGetPhysicalDeviceSurfacePresentModes2EXT(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceSurfaceInfo2KHR*      pSurfaceInfo,
+    uint32_t*                                   pPresentModeCount,
+    VkPresentModeKHR*                           pPresentModes)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    if (!wrap_handles) return layer_data->instance_dispatch_table.GetPhysicalDeviceSurfacePresentModes2EXT(physicalDevice, pSurfaceInfo, pPresentModeCount, pPresentModes);
+    safe_VkPhysicalDeviceSurfaceInfo2KHR var_local_pSurfaceInfo;
+    safe_VkPhysicalDeviceSurfaceInfo2KHR *local_pSurfaceInfo = NULL;
+    {
+        if (pSurfaceInfo) {
+            local_pSurfaceInfo = &var_local_pSurfaceInfo;
+            local_pSurfaceInfo->initialize(pSurfaceInfo);
+            if (pSurfaceInfo->surface) {
+                local_pSurfaceInfo->surface = layer_data->Unwrap(pSurfaceInfo->surface);
+            }
+        }
+    }
+    VkResult result = layer_data->instance_dispatch_table.GetPhysicalDeviceSurfacePresentModes2EXT(physicalDevice, (const VkPhysicalDeviceSurfaceInfo2KHR*)local_pSurfaceInfo, pPresentModeCount, pPresentModes);
+
+    return result;
+}
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+VkResult DispatchAcquireFullScreenExclusiveModeEXT(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.AcquireFullScreenExclusiveModeEXT(device, swapchain);
+    {
+        swapchain = layer_data->Unwrap(swapchain);
+    }
+    VkResult result = layer_data->device_dispatch_table.AcquireFullScreenExclusiveModeEXT(device, swapchain);
+
+    return result;
+}
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+VkResult DispatchReleaseFullScreenExclusiveModeEXT(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.ReleaseFullScreenExclusiveModeEXT(device, swapchain);
+    {
+        swapchain = layer_data->Unwrap(swapchain);
+    }
+    VkResult result = layer_data->device_dispatch_table.ReleaseFullScreenExclusiveModeEXT(device, swapchain);
+
+    return result;
+}
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+VkResult DispatchGetDeviceGroupSurfacePresentModes2EXT(
+    VkDevice                                    device,
+    const VkPhysicalDeviceSurfaceInfo2KHR*      pSurfaceInfo,
+    VkDeviceGroupPresentModeFlagsKHR*           pModes)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.GetDeviceGroupSurfacePresentModes2EXT(device, pSurfaceInfo, pModes);
+    safe_VkPhysicalDeviceSurfaceInfo2KHR var_local_pSurfaceInfo;
+    safe_VkPhysicalDeviceSurfaceInfo2KHR *local_pSurfaceInfo = NULL;
+    {
+        if (pSurfaceInfo) {
+            local_pSurfaceInfo = &var_local_pSurfaceInfo;
+            local_pSurfaceInfo->initialize(pSurfaceInfo);
+            if (pSurfaceInfo->surface) {
+                local_pSurfaceInfo->surface = layer_data->Unwrap(pSurfaceInfo->surface);
+            }
+        }
+    }
+    VkResult result = layer_data->device_dispatch_table.GetDeviceGroupSurfacePresentModes2EXT(device, (const VkPhysicalDeviceSurfaceInfo2KHR*)local_pSurfaceInfo, pModes);
+
+    return result;
+}
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+VkResult DispatchCreateHeadlessSurfaceEXT(
+    VkInstance                                  instance,
+    const VkHeadlessSurfaceCreateInfoEXT*       pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(instance), layer_data_map);
+    if (!wrap_handles) return layer_data->instance_dispatch_table.CreateHeadlessSurfaceEXT(instance, pCreateInfo, pAllocator, pSurface);
+    VkResult result = layer_data->instance_dispatch_table.CreateHeadlessSurfaceEXT(instance, pCreateInfo, pAllocator, pSurface);
+    if (VK_SUCCESS == result) {
+        *pSurface = layer_data->WrapNew(*pSurface);
+    }
+    return result;
+}
+
+void DispatchCmdSetLineStippleEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    lineStippleFactor,
+    uint16_t                                    lineStipplePattern)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    layer_data->device_dispatch_table.CmdSetLineStippleEXT(commandBuffer, lineStippleFactor, lineStipplePattern);
+
+}
+
+void DispatchResetQueryPoolEXT(
+    VkDevice                                    device,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    firstQuery,
+    uint32_t                                    queryCount)
+{
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.ResetQueryPoolEXT(device, queryPool, firstQuery, queryCount);
+    {
+        queryPool = layer_data->Unwrap(queryPool);
+    }
+    layer_data->device_dispatch_table.ResetQueryPoolEXT(device, queryPool, firstQuery, queryCount);
+
+}
\ No newline at end of file
diff --git a/src/third_party/vulkan-validation-layers/src/layers/generated/layer_chassis_dispatch.h b/src/third_party/vulkan-validation-layers/src/layers/generated/layer_chassis_dispatch.h
new file mode 100644
index 0000000..cd4e984
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/layers/generated/layer_chassis_dispatch.h
@@ -0,0 +1,1832 @@
+
+
+// This file is ***GENERATED***.  Do Not Edit.
+// See layer_chassis_dispatch_generator.py for modifications.
+
+/* Copyright (c) 2015-2019 The Khronos Group Inc.
+ * Copyright (c) 2015-2019 Valve Corporation
+ * Copyright (c) 2015-2019 LunarG, Inc.
+ * Copyright (c) 2015-2019 Google Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Mark Lobodzinski <mark@lunarg.com>
+ */
+#pragma once
+
+#if defined(LAYER_CHASSIS_CAN_WRAP_HANDLES)
+extern bool wrap_handles;
+#else
+extern bool wrap_handles;
+#endif
+VkResult DispatchCreateInstance(
+    const VkInstanceCreateInfo*                 pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkInstance*                                 pInstance);
+void DispatchDestroyInstance(
+    VkInstance                                  instance,
+    const VkAllocationCallbacks*                pAllocator);
+VkResult DispatchEnumeratePhysicalDevices(
+    VkInstance                                  instance,
+    uint32_t*                                   pPhysicalDeviceCount,
+    VkPhysicalDevice*                           pPhysicalDevices);
+void DispatchGetPhysicalDeviceFeatures(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceFeatures*                   pFeatures);
+void DispatchGetPhysicalDeviceFormatProperties(
+    VkPhysicalDevice                            physicalDevice,
+    VkFormat                                    format,
+    VkFormatProperties*                         pFormatProperties);
+VkResult DispatchGetPhysicalDeviceImageFormatProperties(
+    VkPhysicalDevice                            physicalDevice,
+    VkFormat                                    format,
+    VkImageType                                 type,
+    VkImageTiling                               tiling,
+    VkImageUsageFlags                           usage,
+    VkImageCreateFlags                          flags,
+    VkImageFormatProperties*                    pImageFormatProperties);
+void DispatchGetPhysicalDeviceProperties(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceProperties*                 pProperties);
+void DispatchGetPhysicalDeviceQueueFamilyProperties(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pQueueFamilyPropertyCount,
+    VkQueueFamilyProperties*                    pQueueFamilyProperties);
+void DispatchGetPhysicalDeviceMemoryProperties(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceMemoryProperties*           pMemoryProperties);
+PFN_vkVoidFunction DispatchGetInstanceProcAddr(
+    VkInstance                                  instance,
+    const char*                                 pName);
+PFN_vkVoidFunction DispatchGetDeviceProcAddr(
+    VkDevice                                    device,
+    const char*                                 pName);
+VkResult DispatchCreateDevice(
+    VkPhysicalDevice                            physicalDevice,
+    const VkDeviceCreateInfo*                   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDevice*                                   pDevice);
+void DispatchDestroyDevice(
+    VkDevice                                    device,
+    const VkAllocationCallbacks*                pAllocator);
+VkResult DispatchEnumerateInstanceExtensionProperties(
+    const char*                                 pLayerName,
+    uint32_t*                                   pPropertyCount,
+    VkExtensionProperties*                      pProperties);
+VkResult DispatchEnumerateDeviceExtensionProperties(
+    VkPhysicalDevice                            physicalDevice,
+    const char*                                 pLayerName,
+    uint32_t*                                   pPropertyCount,
+    VkExtensionProperties*                      pProperties);
+VkResult DispatchEnumerateInstanceLayerProperties(
+    uint32_t*                                   pPropertyCount,
+    VkLayerProperties*                          pProperties);
+VkResult DispatchEnumerateDeviceLayerProperties(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pPropertyCount,
+    VkLayerProperties*                          pProperties);
+void DispatchGetDeviceQueue(
+    VkDevice                                    device,
+    uint32_t                                    queueFamilyIndex,
+    uint32_t                                    queueIndex,
+    VkQueue*                                    pQueue);
+VkResult DispatchQueueSubmit(
+    VkQueue                                     queue,
+    uint32_t                                    submitCount,
+    const VkSubmitInfo*                         pSubmits,
+    VkFence                                     fence);
+VkResult DispatchQueueWaitIdle(
+    VkQueue                                     queue);
+VkResult DispatchDeviceWaitIdle(
+    VkDevice                                    device);
+VkResult DispatchAllocateMemory(
+    VkDevice                                    device,
+    const VkMemoryAllocateInfo*                 pAllocateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDeviceMemory*                             pMemory);
+void DispatchFreeMemory(
+    VkDevice                                    device,
+    VkDeviceMemory                              memory,
+    const VkAllocationCallbacks*                pAllocator);
+VkResult DispatchMapMemory(
+    VkDevice                                    device,
+    VkDeviceMemory                              memory,
+    VkDeviceSize                                offset,
+    VkDeviceSize                                size,
+    VkMemoryMapFlags                            flags,
+    void**                                      ppData);
+void DispatchUnmapMemory(
+    VkDevice                                    device,
+    VkDeviceMemory                              memory);
+VkResult DispatchFlushMappedMemoryRanges(
+    VkDevice                                    device,
+    uint32_t                                    memoryRangeCount,
+    const VkMappedMemoryRange*                  pMemoryRanges);
+VkResult DispatchInvalidateMappedMemoryRanges(
+    VkDevice                                    device,
+    uint32_t                                    memoryRangeCount,
+    const VkMappedMemoryRange*                  pMemoryRanges);
+void DispatchGetDeviceMemoryCommitment(
+    VkDevice                                    device,
+    VkDeviceMemory                              memory,
+    VkDeviceSize*                               pCommittedMemoryInBytes);
+VkResult DispatchBindBufferMemory(
+    VkDevice                                    device,
+    VkBuffer                                    buffer,
+    VkDeviceMemory                              memory,
+    VkDeviceSize                                memoryOffset);
+VkResult DispatchBindImageMemory(
+    VkDevice                                    device,
+    VkImage                                     image,
+    VkDeviceMemory                              memory,
+    VkDeviceSize                                memoryOffset);
+void DispatchGetBufferMemoryRequirements(
+    VkDevice                                    device,
+    VkBuffer                                    buffer,
+    VkMemoryRequirements*                       pMemoryRequirements);
+void DispatchGetImageMemoryRequirements(
+    VkDevice                                    device,
+    VkImage                                     image,
+    VkMemoryRequirements*                       pMemoryRequirements);
+void DispatchGetImageSparseMemoryRequirements(
+    VkDevice                                    device,
+    VkImage                                     image,
+    uint32_t*                                   pSparseMemoryRequirementCount,
+    VkSparseImageMemoryRequirements*            pSparseMemoryRequirements);
+void DispatchGetPhysicalDeviceSparseImageFormatProperties(
+    VkPhysicalDevice                            physicalDevice,
+    VkFormat                                    format,
+    VkImageType                                 type,
+    VkSampleCountFlagBits                       samples,
+    VkImageUsageFlags                           usage,
+    VkImageTiling                               tiling,
+    uint32_t*                                   pPropertyCount,
+    VkSparseImageFormatProperties*              pProperties);
+VkResult DispatchQueueBindSparse(
+    VkQueue                                     queue,
+    uint32_t                                    bindInfoCount,
+    const VkBindSparseInfo*                     pBindInfo,
+    VkFence                                     fence);
+VkResult DispatchCreateFence(
+    VkDevice                                    device,
+    const VkFenceCreateInfo*                    pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkFence*                                    pFence);
+void DispatchDestroyFence(
+    VkDevice                                    device,
+    VkFence                                     fence,
+    const VkAllocationCallbacks*                pAllocator);
+VkResult DispatchResetFences(
+    VkDevice                                    device,
+    uint32_t                                    fenceCount,
+    const VkFence*                              pFences);
+VkResult DispatchGetFenceStatus(
+    VkDevice                                    device,
+    VkFence                                     fence);
+VkResult DispatchWaitForFences(
+    VkDevice                                    device,
+    uint32_t                                    fenceCount,
+    const VkFence*                              pFences,
+    VkBool32                                    waitAll,
+    uint64_t                                    timeout);
+VkResult DispatchCreateSemaphore(
+    VkDevice                                    device,
+    const VkSemaphoreCreateInfo*                pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSemaphore*                                pSemaphore);
+void DispatchDestroySemaphore(
+    VkDevice                                    device,
+    VkSemaphore                                 semaphore,
+    const VkAllocationCallbacks*                pAllocator);
+VkResult DispatchCreateEvent(
+    VkDevice                                    device,
+    const VkEventCreateInfo*                    pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkEvent*                                    pEvent);
+void DispatchDestroyEvent(
+    VkDevice                                    device,
+    VkEvent                                     event,
+    const VkAllocationCallbacks*                pAllocator);
+VkResult DispatchGetEventStatus(
+    VkDevice                                    device,
+    VkEvent                                     event);
+VkResult DispatchSetEvent(
+    VkDevice                                    device,
+    VkEvent                                     event);
+VkResult DispatchResetEvent(
+    VkDevice                                    device,
+    VkEvent                                     event);
+VkResult DispatchCreateQueryPool(
+    VkDevice                                    device,
+    const VkQueryPoolCreateInfo*                pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkQueryPool*                                pQueryPool);
+void DispatchDestroyQueryPool(
+    VkDevice                                    device,
+    VkQueryPool                                 queryPool,
+    const VkAllocationCallbacks*                pAllocator);
+VkResult DispatchGetQueryPoolResults(
+    VkDevice                                    device,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    firstQuery,
+    uint32_t                                    queryCount,
+    size_t                                      dataSize,
+    void*                                       pData,
+    VkDeviceSize                                stride,
+    VkQueryResultFlags                          flags);
+VkResult DispatchCreateBuffer(
+    VkDevice                                    device,
+    const VkBufferCreateInfo*                   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkBuffer*                                   pBuffer);
+void DispatchDestroyBuffer(
+    VkDevice                                    device,
+    VkBuffer                                    buffer,
+    const VkAllocationCallbacks*                pAllocator);
+VkResult DispatchCreateBufferView(
+    VkDevice                                    device,
+    const VkBufferViewCreateInfo*               pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkBufferView*                               pView);
+void DispatchDestroyBufferView(
+    VkDevice                                    device,
+    VkBufferView                                bufferView,
+    const VkAllocationCallbacks*                pAllocator);
+VkResult DispatchCreateImage(
+    VkDevice                                    device,
+    const VkImageCreateInfo*                    pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkImage*                                    pImage);
+void DispatchDestroyImage(
+    VkDevice                                    device,
+    VkImage                                     image,
+    const VkAllocationCallbacks*                pAllocator);
+void DispatchGetImageSubresourceLayout(
+    VkDevice                                    device,
+    VkImage                                     image,
+    const VkImageSubresource*                   pSubresource,
+    VkSubresourceLayout*                        pLayout);
+VkResult DispatchCreateImageView(
+    VkDevice                                    device,
+    const VkImageViewCreateInfo*                pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkImageView*                                pView);
+void DispatchDestroyImageView(
+    VkDevice                                    device,
+    VkImageView                                 imageView,
+    const VkAllocationCallbacks*                pAllocator);
+VkResult DispatchCreateShaderModule(
+    VkDevice                                    device,
+    const VkShaderModuleCreateInfo*             pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkShaderModule*                             pShaderModule);
+void DispatchDestroyShaderModule(
+    VkDevice                                    device,
+    VkShaderModule                              shaderModule,
+    const VkAllocationCallbacks*                pAllocator);
+VkResult DispatchCreatePipelineCache(
+    VkDevice                                    device,
+    const VkPipelineCacheCreateInfo*            pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkPipelineCache*                            pPipelineCache);
+void DispatchDestroyPipelineCache(
+    VkDevice                                    device,
+    VkPipelineCache                             pipelineCache,
+    const VkAllocationCallbacks*                pAllocator);
+VkResult DispatchGetPipelineCacheData(
+    VkDevice                                    device,
+    VkPipelineCache                             pipelineCache,
+    size_t*                                     pDataSize,
+    void*                                       pData);
+VkResult DispatchMergePipelineCaches(
+    VkDevice                                    device,
+    VkPipelineCache                             dstCache,
+    uint32_t                                    srcCacheCount,
+    const VkPipelineCache*                      pSrcCaches);
+VkResult DispatchCreateGraphicsPipelines(
+    VkDevice                                    device,
+    VkPipelineCache                             pipelineCache,
+    uint32_t                                    createInfoCount,
+    const VkGraphicsPipelineCreateInfo*         pCreateInfos,
+    const VkAllocationCallbacks*                pAllocator,
+    VkPipeline*                                 pPipelines);
+VkResult DispatchCreateComputePipelines(
+    VkDevice                                    device,
+    VkPipelineCache                             pipelineCache,
+    uint32_t                                    createInfoCount,
+    const VkComputePipelineCreateInfo*          pCreateInfos,
+    const VkAllocationCallbacks*                pAllocator,
+    VkPipeline*                                 pPipelines);
+void DispatchDestroyPipeline(
+    VkDevice                                    device,
+    VkPipeline                                  pipeline,
+    const VkAllocationCallbacks*                pAllocator);
+VkResult DispatchCreatePipelineLayout(
+    VkDevice                                    device,
+    const VkPipelineLayoutCreateInfo*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkPipelineLayout*                           pPipelineLayout);
+void DispatchDestroyPipelineLayout(
+    VkDevice                                    device,
+    VkPipelineLayout                            pipelineLayout,
+    const VkAllocationCallbacks*                pAllocator);
+VkResult DispatchCreateSampler(
+    VkDevice                                    device,
+    const VkSamplerCreateInfo*                  pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSampler*                                  pSampler);
+void DispatchDestroySampler(
+    VkDevice                                    device,
+    VkSampler                                   sampler,
+    const VkAllocationCallbacks*                pAllocator);
+VkResult DispatchCreateDescriptorSetLayout(
+    VkDevice                                    device,
+    const VkDescriptorSetLayoutCreateInfo*      pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDescriptorSetLayout*                      pSetLayout);
+void DispatchDestroyDescriptorSetLayout(
+    VkDevice                                    device,
+    VkDescriptorSetLayout                       descriptorSetLayout,
+    const VkAllocationCallbacks*                pAllocator);
+VkResult DispatchCreateDescriptorPool(
+    VkDevice                                    device,
+    const VkDescriptorPoolCreateInfo*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDescriptorPool*                           pDescriptorPool);
+void DispatchDestroyDescriptorPool(
+    VkDevice                                    device,
+    VkDescriptorPool                            descriptorPool,
+    const VkAllocationCallbacks*                pAllocator);
+VkResult DispatchResetDescriptorPool(
+    VkDevice                                    device,
+    VkDescriptorPool                            descriptorPool,
+    VkDescriptorPoolResetFlags                  flags);
+VkResult DispatchAllocateDescriptorSets(
+    VkDevice                                    device,
+    const VkDescriptorSetAllocateInfo*          pAllocateInfo,
+    VkDescriptorSet*                            pDescriptorSets);
+VkResult DispatchFreeDescriptorSets(
+    VkDevice                                    device,
+    VkDescriptorPool                            descriptorPool,
+    uint32_t                                    descriptorSetCount,
+    const VkDescriptorSet*                      pDescriptorSets);
+void DispatchUpdateDescriptorSets(
+    VkDevice                                    device,
+    uint32_t                                    descriptorWriteCount,
+    const VkWriteDescriptorSet*                 pDescriptorWrites,
+    uint32_t                                    descriptorCopyCount,
+    const VkCopyDescriptorSet*                  pDescriptorCopies);
+VkResult DispatchCreateFramebuffer(
+    VkDevice                                    device,
+    const VkFramebufferCreateInfo*              pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkFramebuffer*                              pFramebuffer);
+void DispatchDestroyFramebuffer(
+    VkDevice                                    device,
+    VkFramebuffer                               framebuffer,
+    const VkAllocationCallbacks*                pAllocator);
+VkResult DispatchCreateRenderPass(
+    VkDevice                                    device,
+    const VkRenderPassCreateInfo*               pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkRenderPass*                               pRenderPass);
+void DispatchDestroyRenderPass(
+    VkDevice                                    device,
+    VkRenderPass                                renderPass,
+    const VkAllocationCallbacks*                pAllocator);
+void DispatchGetRenderAreaGranularity(
+    VkDevice                                    device,
+    VkRenderPass                                renderPass,
+    VkExtent2D*                                 pGranularity);
+VkResult DispatchCreateCommandPool(
+    VkDevice                                    device,
+    const VkCommandPoolCreateInfo*              pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkCommandPool*                              pCommandPool);
+void DispatchDestroyCommandPool(
+    VkDevice                                    device,
+    VkCommandPool                               commandPool,
+    const VkAllocationCallbacks*                pAllocator);
+VkResult DispatchResetCommandPool(
+    VkDevice                                    device,
+    VkCommandPool                               commandPool,
+    VkCommandPoolResetFlags                     flags);
+VkResult DispatchAllocateCommandBuffers(
+    VkDevice                                    device,
+    const VkCommandBufferAllocateInfo*          pAllocateInfo,
+    VkCommandBuffer*                            pCommandBuffers);
+void DispatchFreeCommandBuffers(
+    VkDevice                                    device,
+    VkCommandPool                               commandPool,
+    uint32_t                                    commandBufferCount,
+    const VkCommandBuffer*                      pCommandBuffers);
+VkResult DispatchBeginCommandBuffer(
+    VkCommandBuffer                             commandBuffer,
+    const VkCommandBufferBeginInfo*             pBeginInfo);
+VkResult DispatchEndCommandBuffer(
+    VkCommandBuffer                             commandBuffer);
+VkResult DispatchResetCommandBuffer(
+    VkCommandBuffer                             commandBuffer,
+    VkCommandBufferResetFlags                   flags);
+void DispatchCmdBindPipeline(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineBindPoint                         pipelineBindPoint,
+    VkPipeline                                  pipeline);
+void DispatchCmdSetViewport(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstViewport,
+    uint32_t                                    viewportCount,
+    const VkViewport*                           pViewports);
+void DispatchCmdSetScissor(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstScissor,
+    uint32_t                                    scissorCount,
+    const VkRect2D*                             pScissors);
+void DispatchCmdSetLineWidth(
+    VkCommandBuffer                             commandBuffer,
+    float                                       lineWidth);
+void DispatchCmdSetDepthBias(
+    VkCommandBuffer                             commandBuffer,
+    float                                       depthBiasConstantFactor,
+    float                                       depthBiasClamp,
+    float                                       depthBiasSlopeFactor);
+void DispatchCmdSetBlendConstants(
+    VkCommandBuffer                             commandBuffer,
+    const float                                 blendConstants[4]);
+void DispatchCmdSetDepthBounds(
+    VkCommandBuffer                             commandBuffer,
+    float                                       minDepthBounds,
+    float                                       maxDepthBounds);
+void DispatchCmdSetStencilCompareMask(
+    VkCommandBuffer                             commandBuffer,
+    VkStencilFaceFlags                          faceMask,
+    uint32_t                                    compareMask);
+void DispatchCmdSetStencilWriteMask(
+    VkCommandBuffer                             commandBuffer,
+    VkStencilFaceFlags                          faceMask,
+    uint32_t                                    writeMask);
+void DispatchCmdSetStencilReference(
+    VkCommandBuffer                             commandBuffer,
+    VkStencilFaceFlags                          faceMask,
+    uint32_t                                    reference);
+void DispatchCmdBindDescriptorSets(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineBindPoint                         pipelineBindPoint,
+    VkPipelineLayout                            layout,
+    uint32_t                                    firstSet,
+    uint32_t                                    descriptorSetCount,
+    const VkDescriptorSet*                      pDescriptorSets,
+    uint32_t                                    dynamicOffsetCount,
+    const uint32_t*                             pDynamicOffsets);
+void DispatchCmdBindIndexBuffer(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkIndexType                                 indexType);
+void DispatchCmdBindVertexBuffers(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstBinding,
+    uint32_t                                    bindingCount,
+    const VkBuffer*                             pBuffers,
+    const VkDeviceSize*                         pOffsets);
+void DispatchCmdDraw(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    vertexCount,
+    uint32_t                                    instanceCount,
+    uint32_t                                    firstVertex,
+    uint32_t                                    firstInstance);
+void DispatchCmdDrawIndexed(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    indexCount,
+    uint32_t                                    instanceCount,
+    uint32_t                                    firstIndex,
+    int32_t                                     vertexOffset,
+    uint32_t                                    firstInstance);
+void DispatchCmdDrawIndirect(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    uint32_t                                    drawCount,
+    uint32_t                                    stride);
+void DispatchCmdDrawIndexedIndirect(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    uint32_t                                    drawCount,
+    uint32_t                                    stride);
+void DispatchCmdDispatch(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    groupCountX,
+    uint32_t                                    groupCountY,
+    uint32_t                                    groupCountZ);
+void DispatchCmdDispatchIndirect(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset);
+void DispatchCmdCopyBuffer(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    srcBuffer,
+    VkBuffer                                    dstBuffer,
+    uint32_t                                    regionCount,
+    const VkBufferCopy*                         pRegions);
+void DispatchCmdCopyImage(
+    VkCommandBuffer                             commandBuffer,
+    VkImage                                     srcImage,
+    VkImageLayout                               srcImageLayout,
+    VkImage                                     dstImage,
+    VkImageLayout                               dstImageLayout,
+    uint32_t                                    regionCount,
+    const VkImageCopy*                          pRegions);
+void DispatchCmdBlitImage(
+    VkCommandBuffer                             commandBuffer,
+    VkImage                                     srcImage,
+    VkImageLayout                               srcImageLayout,
+    VkImage                                     dstImage,
+    VkImageLayout                               dstImageLayout,
+    uint32_t                                    regionCount,
+    const VkImageBlit*                          pRegions,
+    VkFilter                                    filter);
+void DispatchCmdCopyBufferToImage(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    srcBuffer,
+    VkImage                                     dstImage,
+    VkImageLayout                               dstImageLayout,
+    uint32_t                                    regionCount,
+    const VkBufferImageCopy*                    pRegions);
+void DispatchCmdCopyImageToBuffer(
+    VkCommandBuffer                             commandBuffer,
+    VkImage                                     srcImage,
+    VkImageLayout                               srcImageLayout,
+    VkBuffer                                    dstBuffer,
+    uint32_t                                    regionCount,
+    const VkBufferImageCopy*                    pRegions);
+void DispatchCmdUpdateBuffer(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    dstBuffer,
+    VkDeviceSize                                dstOffset,
+    VkDeviceSize                                dataSize,
+    const void*                                 pData);
+void DispatchCmdFillBuffer(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    dstBuffer,
+    VkDeviceSize                                dstOffset,
+    VkDeviceSize                                size,
+    uint32_t                                    data);
+void DispatchCmdClearColorImage(
+    VkCommandBuffer                             commandBuffer,
+    VkImage                                     image,
+    VkImageLayout                               imageLayout,
+    const VkClearColorValue*                    pColor,
+    uint32_t                                    rangeCount,
+    const VkImageSubresourceRange*              pRanges);
+void DispatchCmdClearDepthStencilImage(
+    VkCommandBuffer                             commandBuffer,
+    VkImage                                     image,
+    VkImageLayout                               imageLayout,
+    const VkClearDepthStencilValue*             pDepthStencil,
+    uint32_t                                    rangeCount,
+    const VkImageSubresourceRange*              pRanges);
+void DispatchCmdClearAttachments(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    attachmentCount,
+    const VkClearAttachment*                    pAttachments,
+    uint32_t                                    rectCount,
+    const VkClearRect*                          pRects);
+void DispatchCmdResolveImage(
+    VkCommandBuffer                             commandBuffer,
+    VkImage                                     srcImage,
+    VkImageLayout                               srcImageLayout,
+    VkImage                                     dstImage,
+    VkImageLayout                               dstImageLayout,
+    uint32_t                                    regionCount,
+    const VkImageResolve*                       pRegions);
+void DispatchCmdSetEvent(
+    VkCommandBuffer                             commandBuffer,
+    VkEvent                                     event,
+    VkPipelineStageFlags                        stageMask);
+void DispatchCmdResetEvent(
+    VkCommandBuffer                             commandBuffer,
+    VkEvent                                     event,
+    VkPipelineStageFlags                        stageMask);
+void DispatchCmdWaitEvents(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    eventCount,
+    const VkEvent*                              pEvents,
+    VkPipelineStageFlags                        srcStageMask,
+    VkPipelineStageFlags                        dstStageMask,
+    uint32_t                                    memoryBarrierCount,
+    const VkMemoryBarrier*                      pMemoryBarriers,
+    uint32_t                                    bufferMemoryBarrierCount,
+    const VkBufferMemoryBarrier*                pBufferMemoryBarriers,
+    uint32_t                                    imageMemoryBarrierCount,
+    const VkImageMemoryBarrier*                 pImageMemoryBarriers);
+void DispatchCmdPipelineBarrier(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineStageFlags                        srcStageMask,
+    VkPipelineStageFlags                        dstStageMask,
+    VkDependencyFlags                           dependencyFlags,
+    uint32_t                                    memoryBarrierCount,
+    const VkMemoryBarrier*                      pMemoryBarriers,
+    uint32_t                                    bufferMemoryBarrierCount,
+    const VkBufferMemoryBarrier*                pBufferMemoryBarriers,
+    uint32_t                                    imageMemoryBarrierCount,
+    const VkImageMemoryBarrier*                 pImageMemoryBarriers);
+void DispatchCmdBeginQuery(
+    VkCommandBuffer                             commandBuffer,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    query,
+    VkQueryControlFlags                         flags);
+void DispatchCmdEndQuery(
+    VkCommandBuffer                             commandBuffer,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    query);
+void DispatchCmdResetQueryPool(
+    VkCommandBuffer                             commandBuffer,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    firstQuery,
+    uint32_t                                    queryCount);
+void DispatchCmdWriteTimestamp(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineStageFlagBits                     pipelineStage,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    query);
+void DispatchCmdCopyQueryPoolResults(
+    VkCommandBuffer                             commandBuffer,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    firstQuery,
+    uint32_t                                    queryCount,
+    VkBuffer                                    dstBuffer,
+    VkDeviceSize                                dstOffset,
+    VkDeviceSize                                stride,
+    VkQueryResultFlags                          flags);
+void DispatchCmdPushConstants(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineLayout                            layout,
+    VkShaderStageFlags                          stageFlags,
+    uint32_t                                    offset,
+    uint32_t                                    size,
+    const void*                                 pValues);
+void DispatchCmdBeginRenderPass(
+    VkCommandBuffer                             commandBuffer,
+    const VkRenderPassBeginInfo*                pRenderPassBegin,
+    VkSubpassContents                           contents);
+void DispatchCmdNextSubpass(
+    VkCommandBuffer                             commandBuffer,
+    VkSubpassContents                           contents);
+void DispatchCmdEndRenderPass(
+    VkCommandBuffer                             commandBuffer);
+void DispatchCmdExecuteCommands(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    commandBufferCount,
+    const VkCommandBuffer*                      pCommandBuffers);
+VkResult DispatchEnumerateInstanceVersion(
+    uint32_t*                                   pApiVersion);
+VkResult DispatchBindBufferMemory2(
+    VkDevice                                    device,
+    uint32_t                                    bindInfoCount,
+    const VkBindBufferMemoryInfo*               pBindInfos);
+VkResult DispatchBindImageMemory2(
+    VkDevice                                    device,
+    uint32_t                                    bindInfoCount,
+    const VkBindImageMemoryInfo*                pBindInfos);
+void DispatchGetDeviceGroupPeerMemoryFeatures(
+    VkDevice                                    device,
+    uint32_t                                    heapIndex,
+    uint32_t                                    localDeviceIndex,
+    uint32_t                                    remoteDeviceIndex,
+    VkPeerMemoryFeatureFlags*                   pPeerMemoryFeatures);
+void DispatchCmdSetDeviceMask(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    deviceMask);
+void DispatchCmdDispatchBase(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    baseGroupX,
+    uint32_t                                    baseGroupY,
+    uint32_t                                    baseGroupZ,
+    uint32_t                                    groupCountX,
+    uint32_t                                    groupCountY,
+    uint32_t                                    groupCountZ);
+VkResult DispatchEnumeratePhysicalDeviceGroups(
+    VkInstance                                  instance,
+    uint32_t*                                   pPhysicalDeviceGroupCount,
+    VkPhysicalDeviceGroupProperties*            pPhysicalDeviceGroupProperties);
+void DispatchGetImageMemoryRequirements2(
+    VkDevice                                    device,
+    const VkImageMemoryRequirementsInfo2*       pInfo,
+    VkMemoryRequirements2*                      pMemoryRequirements);
+void DispatchGetBufferMemoryRequirements2(
+    VkDevice                                    device,
+    const VkBufferMemoryRequirementsInfo2*      pInfo,
+    VkMemoryRequirements2*                      pMemoryRequirements);
+void DispatchGetImageSparseMemoryRequirements2(
+    VkDevice                                    device,
+    const VkImageSparseMemoryRequirementsInfo2* pInfo,
+    uint32_t*                                   pSparseMemoryRequirementCount,
+    VkSparseImageMemoryRequirements2*           pSparseMemoryRequirements);
+void DispatchGetPhysicalDeviceFeatures2(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceFeatures2*                  pFeatures);
+void DispatchGetPhysicalDeviceProperties2(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceProperties2*                pProperties);
+void DispatchGetPhysicalDeviceFormatProperties2(
+    VkPhysicalDevice                            physicalDevice,
+    VkFormat                                    format,
+    VkFormatProperties2*                        pFormatProperties);
+VkResult DispatchGetPhysicalDeviceImageFormatProperties2(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceImageFormatInfo2*     pImageFormatInfo,
+    VkImageFormatProperties2*                   pImageFormatProperties);
+void DispatchGetPhysicalDeviceQueueFamilyProperties2(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pQueueFamilyPropertyCount,
+    VkQueueFamilyProperties2*                   pQueueFamilyProperties);
+void DispatchGetPhysicalDeviceMemoryProperties2(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceMemoryProperties2*          pMemoryProperties);
+void DispatchGetPhysicalDeviceSparseImageFormatProperties2(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo,
+    uint32_t*                                   pPropertyCount,
+    VkSparseImageFormatProperties2*             pProperties);
+void DispatchTrimCommandPool(
+    VkDevice                                    device,
+    VkCommandPool                               commandPool,
+    VkCommandPoolTrimFlags                      flags);
+void DispatchGetDeviceQueue2(
+    VkDevice                                    device,
+    const VkDeviceQueueInfo2*                   pQueueInfo,
+    VkQueue*                                    pQueue);
+VkResult DispatchCreateSamplerYcbcrConversion(
+    VkDevice                                    device,
+    const VkSamplerYcbcrConversionCreateInfo*   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSamplerYcbcrConversion*                   pYcbcrConversion);
+void DispatchDestroySamplerYcbcrConversion(
+    VkDevice                                    device,
+    VkSamplerYcbcrConversion                    ycbcrConversion,
+    const VkAllocationCallbacks*                pAllocator);
+VkResult DispatchCreateDescriptorUpdateTemplate(
+    VkDevice                                    device,
+    const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDescriptorUpdateTemplate*                 pDescriptorUpdateTemplate);
+void DispatchDestroyDescriptorUpdateTemplate(
+    VkDevice                                    device,
+    VkDescriptorUpdateTemplate                  descriptorUpdateTemplate,
+    const VkAllocationCallbacks*                pAllocator);
+void DispatchUpdateDescriptorSetWithTemplate(
+    VkDevice                                    device,
+    VkDescriptorSet                             descriptorSet,
+    VkDescriptorUpdateTemplate                  descriptorUpdateTemplate,
+    const void*                                 pData);
+void DispatchGetPhysicalDeviceExternalBufferProperties(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceExternalBufferInfo*   pExternalBufferInfo,
+    VkExternalBufferProperties*                 pExternalBufferProperties);
+void DispatchGetPhysicalDeviceExternalFenceProperties(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceExternalFenceInfo*    pExternalFenceInfo,
+    VkExternalFenceProperties*                  pExternalFenceProperties);
+void DispatchGetPhysicalDeviceExternalSemaphoreProperties(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo,
+    VkExternalSemaphoreProperties*              pExternalSemaphoreProperties);
+void DispatchGetDescriptorSetLayoutSupport(
+    VkDevice                                    device,
+    const VkDescriptorSetLayoutCreateInfo*      pCreateInfo,
+    VkDescriptorSetLayoutSupport*               pSupport);
+void DispatchDestroySurfaceKHR(
+    VkInstance                                  instance,
+    VkSurfaceKHR                                surface,
+    const VkAllocationCallbacks*                pAllocator);
+VkResult DispatchGetPhysicalDeviceSurfaceSupportKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t                                    queueFamilyIndex,
+    VkSurfaceKHR                                surface,
+    VkBool32*                                   pSupported);
+VkResult DispatchGetPhysicalDeviceSurfaceCapabilitiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkSurfaceKHR                                surface,
+    VkSurfaceCapabilitiesKHR*                   pSurfaceCapabilities);
+VkResult DispatchGetPhysicalDeviceSurfaceFormatsKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkSurfaceKHR                                surface,
+    uint32_t*                                   pSurfaceFormatCount,
+    VkSurfaceFormatKHR*                         pSurfaceFormats);
+VkResult DispatchGetPhysicalDeviceSurfacePresentModesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkSurfaceKHR                                surface,
+    uint32_t*                                   pPresentModeCount,
+    VkPresentModeKHR*                           pPresentModes);
+VkResult DispatchCreateSwapchainKHR(
+    VkDevice                                    device,
+    const VkSwapchainCreateInfoKHR*             pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSwapchainKHR*                             pSwapchain);
+void DispatchDestroySwapchainKHR(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    const VkAllocationCallbacks*                pAllocator);
+VkResult DispatchGetSwapchainImagesKHR(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    uint32_t*                                   pSwapchainImageCount,
+    VkImage*                                    pSwapchainImages);
+VkResult DispatchAcquireNextImageKHR(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    uint64_t                                    timeout,
+    VkSemaphore                                 semaphore,
+    VkFence                                     fence,
+    uint32_t*                                   pImageIndex);
+VkResult DispatchQueuePresentKHR(
+    VkQueue                                     queue,
+    const VkPresentInfoKHR*                     pPresentInfo);
+VkResult DispatchGetDeviceGroupPresentCapabilitiesKHR(
+    VkDevice                                    device,
+    VkDeviceGroupPresentCapabilitiesKHR*        pDeviceGroupPresentCapabilities);
+VkResult DispatchGetDeviceGroupSurfacePresentModesKHR(
+    VkDevice                                    device,
+    VkSurfaceKHR                                surface,
+    VkDeviceGroupPresentModeFlagsKHR*           pModes);
+VkResult DispatchGetPhysicalDevicePresentRectanglesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkSurfaceKHR                                surface,
+    uint32_t*                                   pRectCount,
+    VkRect2D*                                   pRects);
+VkResult DispatchAcquireNextImage2KHR(
+    VkDevice                                    device,
+    const VkAcquireNextImageInfoKHR*            pAcquireInfo,
+    uint32_t*                                   pImageIndex);
+VkResult DispatchGetPhysicalDeviceDisplayPropertiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pPropertyCount,
+    VkDisplayPropertiesKHR*                     pProperties);
+VkResult DispatchGetPhysicalDeviceDisplayPlanePropertiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pPropertyCount,
+    VkDisplayPlanePropertiesKHR*                pProperties);
+VkResult DispatchGetDisplayPlaneSupportedDisplaysKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t                                    planeIndex,
+    uint32_t*                                   pDisplayCount,
+    VkDisplayKHR*                               pDisplays);
+VkResult DispatchGetDisplayModePropertiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkDisplayKHR                                display,
+    uint32_t*                                   pPropertyCount,
+    VkDisplayModePropertiesKHR*                 pProperties);
+VkResult DispatchCreateDisplayModeKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkDisplayKHR                                display,
+    const VkDisplayModeCreateInfoKHR*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDisplayModeKHR*                           pMode);
+VkResult DispatchGetDisplayPlaneCapabilitiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkDisplayModeKHR                            mode,
+    uint32_t                                    planeIndex,
+    VkDisplayPlaneCapabilitiesKHR*              pCapabilities);
+VkResult DispatchCreateDisplayPlaneSurfaceKHR(
+    VkInstance                                  instance,
+    const VkDisplaySurfaceCreateInfoKHR*        pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface);
+VkResult DispatchCreateSharedSwapchainsKHR(
+    VkDevice                                    device,
+    uint32_t                                    swapchainCount,
+    const VkSwapchainCreateInfoKHR*             pCreateInfos,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSwapchainKHR*                             pSwapchains);
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+VkResult DispatchCreateXlibSurfaceKHR(
+    VkInstance                                  instance,
+    const VkXlibSurfaceCreateInfoKHR*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface);
+#endif // VK_USE_PLATFORM_XLIB_KHR
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+VkBool32 DispatchGetPhysicalDeviceXlibPresentationSupportKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t                                    queueFamilyIndex,
+    Display*                                    dpy,
+    VisualID                                    visualID);
+#endif // VK_USE_PLATFORM_XLIB_KHR
+#ifdef VK_USE_PLATFORM_XCB_KHR
+VkResult DispatchCreateXcbSurfaceKHR(
+    VkInstance                                  instance,
+    const VkXcbSurfaceCreateInfoKHR*            pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface);
+#endif // VK_USE_PLATFORM_XCB_KHR
+#ifdef VK_USE_PLATFORM_XCB_KHR
+VkBool32 DispatchGetPhysicalDeviceXcbPresentationSupportKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t                                    queueFamilyIndex,
+    xcb_connection_t*                           connection,
+    xcb_visualid_t                              visual_id);
+#endif // VK_USE_PLATFORM_XCB_KHR
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+VkResult DispatchCreateWaylandSurfaceKHR(
+    VkInstance                                  instance,
+    const VkWaylandSurfaceCreateInfoKHR*        pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface);
+#endif // VK_USE_PLATFORM_WAYLAND_KHR
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+VkBool32 DispatchGetPhysicalDeviceWaylandPresentationSupportKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t                                    queueFamilyIndex,
+    struct wl_display*                          display);
+#endif // VK_USE_PLATFORM_WAYLAND_KHR
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+VkResult DispatchCreateAndroidSurfaceKHR(
+    VkInstance                                  instance,
+    const VkAndroidSurfaceCreateInfoKHR*        pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface);
+#endif // VK_USE_PLATFORM_ANDROID_KHR
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+VkResult DispatchCreateWin32SurfaceKHR(
+    VkInstance                                  instance,
+    const VkWin32SurfaceCreateInfoKHR*          pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface);
+#endif // VK_USE_PLATFORM_WIN32_KHR
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+VkBool32 DispatchGetPhysicalDeviceWin32PresentationSupportKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t                                    queueFamilyIndex);
+#endif // VK_USE_PLATFORM_WIN32_KHR
+void DispatchGetPhysicalDeviceFeatures2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceFeatures2*                  pFeatures);
+void DispatchGetPhysicalDeviceProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceProperties2*                pProperties);
+void DispatchGetPhysicalDeviceFormatProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkFormat                                    format,
+    VkFormatProperties2*                        pFormatProperties);
+VkResult DispatchGetPhysicalDeviceImageFormatProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceImageFormatInfo2*     pImageFormatInfo,
+    VkImageFormatProperties2*                   pImageFormatProperties);
+void DispatchGetPhysicalDeviceQueueFamilyProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pQueueFamilyPropertyCount,
+    VkQueueFamilyProperties2*                   pQueueFamilyProperties);
+void DispatchGetPhysicalDeviceMemoryProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceMemoryProperties2*          pMemoryProperties);
+void DispatchGetPhysicalDeviceSparseImageFormatProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo,
+    uint32_t*                                   pPropertyCount,
+    VkSparseImageFormatProperties2*             pProperties);
+void DispatchGetDeviceGroupPeerMemoryFeaturesKHR(
+    VkDevice                                    device,
+    uint32_t                                    heapIndex,
+    uint32_t                                    localDeviceIndex,
+    uint32_t                                    remoteDeviceIndex,
+    VkPeerMemoryFeatureFlags*                   pPeerMemoryFeatures);
+void DispatchCmdSetDeviceMaskKHR(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    deviceMask);
+void DispatchCmdDispatchBaseKHR(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    baseGroupX,
+    uint32_t                                    baseGroupY,
+    uint32_t                                    baseGroupZ,
+    uint32_t                                    groupCountX,
+    uint32_t                                    groupCountY,
+    uint32_t                                    groupCountZ);
+void DispatchTrimCommandPoolKHR(
+    VkDevice                                    device,
+    VkCommandPool                               commandPool,
+    VkCommandPoolTrimFlags                      flags);
+VkResult DispatchEnumeratePhysicalDeviceGroupsKHR(
+    VkInstance                                  instance,
+    uint32_t*                                   pPhysicalDeviceGroupCount,
+    VkPhysicalDeviceGroupProperties*            pPhysicalDeviceGroupProperties);
+void DispatchGetPhysicalDeviceExternalBufferPropertiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceExternalBufferInfo*   pExternalBufferInfo,
+    VkExternalBufferProperties*                 pExternalBufferProperties);
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+VkResult DispatchGetMemoryWin32HandleKHR(
+    VkDevice                                    device,
+    const VkMemoryGetWin32HandleInfoKHR*        pGetWin32HandleInfo,
+    HANDLE*                                     pHandle);
+#endif // VK_USE_PLATFORM_WIN32_KHR
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+VkResult DispatchGetMemoryWin32HandlePropertiesKHR(
+    VkDevice                                    device,
+    VkExternalMemoryHandleTypeFlagBits          handleType,
+    HANDLE                                      handle,
+    VkMemoryWin32HandlePropertiesKHR*           pMemoryWin32HandleProperties);
+#endif // VK_USE_PLATFORM_WIN32_KHR
+VkResult DispatchGetMemoryFdKHR(
+    VkDevice                                    device,
+    const VkMemoryGetFdInfoKHR*                 pGetFdInfo,
+    int*                                        pFd);
+VkResult DispatchGetMemoryFdPropertiesKHR(
+    VkDevice                                    device,
+    VkExternalMemoryHandleTypeFlagBits          handleType,
+    int                                         fd,
+    VkMemoryFdPropertiesKHR*                    pMemoryFdProperties);
+void DispatchGetPhysicalDeviceExternalSemaphorePropertiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo,
+    VkExternalSemaphoreProperties*              pExternalSemaphoreProperties);
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+VkResult DispatchImportSemaphoreWin32HandleKHR(
+    VkDevice                                    device,
+    const VkImportSemaphoreWin32HandleInfoKHR*  pImportSemaphoreWin32HandleInfo);
+#endif // VK_USE_PLATFORM_WIN32_KHR
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+VkResult DispatchGetSemaphoreWin32HandleKHR(
+    VkDevice                                    device,
+    const VkSemaphoreGetWin32HandleInfoKHR*     pGetWin32HandleInfo,
+    HANDLE*                                     pHandle);
+#endif // VK_USE_PLATFORM_WIN32_KHR
+VkResult DispatchImportSemaphoreFdKHR(
+    VkDevice                                    device,
+    const VkImportSemaphoreFdInfoKHR*           pImportSemaphoreFdInfo);
+VkResult DispatchGetSemaphoreFdKHR(
+    VkDevice                                    device,
+    const VkSemaphoreGetFdInfoKHR*              pGetFdInfo,
+    int*                                        pFd);
+void DispatchCmdPushDescriptorSetKHR(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineBindPoint                         pipelineBindPoint,
+    VkPipelineLayout                            layout,
+    uint32_t                                    set,
+    uint32_t                                    descriptorWriteCount,
+    const VkWriteDescriptorSet*                 pDescriptorWrites);
+void DispatchCmdPushDescriptorSetWithTemplateKHR(
+    VkCommandBuffer                             commandBuffer,
+    VkDescriptorUpdateTemplate                  descriptorUpdateTemplate,
+    VkPipelineLayout                            layout,
+    uint32_t                                    set,
+    const void*                                 pData);
+VkResult DispatchCreateDescriptorUpdateTemplateKHR(
+    VkDevice                                    device,
+    const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDescriptorUpdateTemplate*                 pDescriptorUpdateTemplate);
+void DispatchDestroyDescriptorUpdateTemplateKHR(
+    VkDevice                                    device,
+    VkDescriptorUpdateTemplate                  descriptorUpdateTemplate,
+    const VkAllocationCallbacks*                pAllocator);
+void DispatchUpdateDescriptorSetWithTemplateKHR(
+    VkDevice                                    device,
+    VkDescriptorSet                             descriptorSet,
+    VkDescriptorUpdateTemplate                  descriptorUpdateTemplate,
+    const void*                                 pData);
+VkResult DispatchCreateRenderPass2KHR(
+    VkDevice                                    device,
+    const VkRenderPassCreateInfo2KHR*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkRenderPass*                               pRenderPass);
+void DispatchCmdBeginRenderPass2KHR(
+    VkCommandBuffer                             commandBuffer,
+    const VkRenderPassBeginInfo*                pRenderPassBegin,
+    const VkSubpassBeginInfoKHR*                pSubpassBeginInfo);
+void DispatchCmdNextSubpass2KHR(
+    VkCommandBuffer                             commandBuffer,
+    const VkSubpassBeginInfoKHR*                pSubpassBeginInfo,
+    const VkSubpassEndInfoKHR*                  pSubpassEndInfo);
+void DispatchCmdEndRenderPass2KHR(
+    VkCommandBuffer                             commandBuffer,
+    const VkSubpassEndInfoKHR*                  pSubpassEndInfo);
+VkResult DispatchGetSwapchainStatusKHR(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain);
+void DispatchGetPhysicalDeviceExternalFencePropertiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceExternalFenceInfo*    pExternalFenceInfo,
+    VkExternalFenceProperties*                  pExternalFenceProperties);
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+VkResult DispatchImportFenceWin32HandleKHR(
+    VkDevice                                    device,
+    const VkImportFenceWin32HandleInfoKHR*      pImportFenceWin32HandleInfo);
+#endif // VK_USE_PLATFORM_WIN32_KHR
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+VkResult DispatchGetFenceWin32HandleKHR(
+    VkDevice                                    device,
+    const VkFenceGetWin32HandleInfoKHR*         pGetWin32HandleInfo,
+    HANDLE*                                     pHandle);
+#endif // VK_USE_PLATFORM_WIN32_KHR
+VkResult DispatchImportFenceFdKHR(
+    VkDevice                                    device,
+    const VkImportFenceFdInfoKHR*               pImportFenceFdInfo);
+VkResult DispatchGetFenceFdKHR(
+    VkDevice                                    device,
+    const VkFenceGetFdInfoKHR*                  pGetFdInfo,
+    int*                                        pFd);
+VkResult DispatchEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t                                    queueFamilyIndex,
+    uint32_t*                                   pCounterCount,
+    VkPerformanceCounterKHR*                    pCounters,
+    VkPerformanceCounterDescriptionKHR*         pCounterDescriptions);
+void DispatchGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkQueryPoolPerformanceCreateInfoKHR*  pPerformanceQueryCreateInfo,
+    uint32_t*                                   pNumPasses);
+VkResult DispatchAcquireProfilingLockKHR(
+    VkDevice                                    device,
+    const VkAcquireProfilingLockInfoKHR*        pInfo);
+void DispatchReleaseProfilingLockKHR(
+    VkDevice                                    device);
+VkResult DispatchGetPhysicalDeviceSurfaceCapabilities2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceSurfaceInfo2KHR*      pSurfaceInfo,
+    VkSurfaceCapabilities2KHR*                  pSurfaceCapabilities);
+VkResult DispatchGetPhysicalDeviceSurfaceFormats2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceSurfaceInfo2KHR*      pSurfaceInfo,
+    uint32_t*                                   pSurfaceFormatCount,
+    VkSurfaceFormat2KHR*                        pSurfaceFormats);
+VkResult DispatchGetPhysicalDeviceDisplayProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pPropertyCount,
+    VkDisplayProperties2KHR*                    pProperties);
+VkResult DispatchGetPhysicalDeviceDisplayPlaneProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pPropertyCount,
+    VkDisplayPlaneProperties2KHR*               pProperties);
+VkResult DispatchGetDisplayModeProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkDisplayKHR                                display,
+    uint32_t*                                   pPropertyCount,
+    VkDisplayModeProperties2KHR*                pProperties);
+VkResult DispatchGetDisplayPlaneCapabilities2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkDisplayPlaneInfo2KHR*               pDisplayPlaneInfo,
+    VkDisplayPlaneCapabilities2KHR*             pCapabilities);
+void DispatchGetImageMemoryRequirements2KHR(
+    VkDevice                                    device,
+    const VkImageMemoryRequirementsInfo2*       pInfo,
+    VkMemoryRequirements2*                      pMemoryRequirements);
+void DispatchGetBufferMemoryRequirements2KHR(
+    VkDevice                                    device,
+    const VkBufferMemoryRequirementsInfo2*      pInfo,
+    VkMemoryRequirements2*                      pMemoryRequirements);
+void DispatchGetImageSparseMemoryRequirements2KHR(
+    VkDevice                                    device,
+    const VkImageSparseMemoryRequirementsInfo2* pInfo,
+    uint32_t*                                   pSparseMemoryRequirementCount,
+    VkSparseImageMemoryRequirements2*           pSparseMemoryRequirements);
+VkResult DispatchCreateSamplerYcbcrConversionKHR(
+    VkDevice                                    device,
+    const VkSamplerYcbcrConversionCreateInfo*   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSamplerYcbcrConversion*                   pYcbcrConversion);
+void DispatchDestroySamplerYcbcrConversionKHR(
+    VkDevice                                    device,
+    VkSamplerYcbcrConversion                    ycbcrConversion,
+    const VkAllocationCallbacks*                pAllocator);
+VkResult DispatchBindBufferMemory2KHR(
+    VkDevice                                    device,
+    uint32_t                                    bindInfoCount,
+    const VkBindBufferMemoryInfo*               pBindInfos);
+VkResult DispatchBindImageMemory2KHR(
+    VkDevice                                    device,
+    uint32_t                                    bindInfoCount,
+    const VkBindImageMemoryInfo*                pBindInfos);
+void DispatchGetDescriptorSetLayoutSupportKHR(
+    VkDevice                                    device,
+    const VkDescriptorSetLayoutCreateInfo*      pCreateInfo,
+    VkDescriptorSetLayoutSupport*               pSupport);
+void DispatchCmdDrawIndirectCountKHR(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkBuffer                                    countBuffer,
+    VkDeviceSize                                countBufferOffset,
+    uint32_t                                    maxDrawCount,
+    uint32_t                                    stride);
+void DispatchCmdDrawIndexedIndirectCountKHR(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkBuffer                                    countBuffer,
+    VkDeviceSize                                countBufferOffset,
+    uint32_t                                    maxDrawCount,
+    uint32_t                                    stride);
+VkResult DispatchGetSemaphoreCounterValueKHR(
+    VkDevice                                    device,
+    VkSemaphore                                 semaphore,
+    uint64_t*                                   pValue);
+VkResult DispatchWaitSemaphoresKHR(
+    VkDevice                                    device,
+    const VkSemaphoreWaitInfoKHR*               pWaitInfo,
+    uint64_t                                    timeout);
+VkResult DispatchSignalSemaphoreKHR(
+    VkDevice                                    device,
+    const VkSemaphoreSignalInfoKHR*             pSignalInfo);
+VkDeviceAddress DispatchGetBufferDeviceAddressKHR(
+    VkDevice                                    device,
+    const VkBufferDeviceAddressInfoKHR*         pInfo);
+uint64_t DispatchGetBufferOpaqueCaptureAddressKHR(
+    VkDevice                                    device,
+    const VkBufferDeviceAddressInfoKHR*         pInfo);
+uint64_t DispatchGetDeviceMemoryOpaqueCaptureAddressKHR(
+    VkDevice                                    device,
+    const VkDeviceMemoryOpaqueCaptureAddressInfoKHR* pInfo);
+VkResult DispatchGetPipelineExecutablePropertiesKHR(
+    VkDevice                                    device,
+    const VkPipelineInfoKHR*                    pPipelineInfo,
+    uint32_t*                                   pExecutableCount,
+    VkPipelineExecutablePropertiesKHR*          pProperties);
+VkResult DispatchGetPipelineExecutableStatisticsKHR(
+    VkDevice                                    device,
+    const VkPipelineExecutableInfoKHR*          pExecutableInfo,
+    uint32_t*                                   pStatisticCount,
+    VkPipelineExecutableStatisticKHR*           pStatistics);
+VkResult DispatchGetPipelineExecutableInternalRepresentationsKHR(
+    VkDevice                                    device,
+    const VkPipelineExecutableInfoKHR*          pExecutableInfo,
+    uint32_t*                                   pInternalRepresentationCount,
+    VkPipelineExecutableInternalRepresentationKHR* pInternalRepresentations);
+VkResult DispatchCreateDebugReportCallbackEXT(
+    VkInstance                                  instance,
+    const VkDebugReportCallbackCreateInfoEXT*   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDebugReportCallbackEXT*                   pCallback);
+void DispatchDestroyDebugReportCallbackEXT(
+    VkInstance                                  instance,
+    VkDebugReportCallbackEXT                    callback,
+    const VkAllocationCallbacks*                pAllocator);
+void DispatchDebugReportMessageEXT(
+    VkInstance                                  instance,
+    VkDebugReportFlagsEXT                       flags,
+    VkDebugReportObjectTypeEXT                  objectType,
+    uint64_t                                    object,
+    size_t                                      location,
+    int32_t                                     messageCode,
+    const char*                                 pLayerPrefix,
+    const char*                                 pMessage);
+VkResult DispatchDebugMarkerSetObjectTagEXT(
+    VkDevice                                    device,
+    const VkDebugMarkerObjectTagInfoEXT*        pTagInfo);
+VkResult DispatchDebugMarkerSetObjectNameEXT(
+    VkDevice                                    device,
+    const VkDebugMarkerObjectNameInfoEXT*       pNameInfo);
+void DispatchCmdDebugMarkerBeginEXT(
+    VkCommandBuffer                             commandBuffer,
+    const VkDebugMarkerMarkerInfoEXT*           pMarkerInfo);
+void DispatchCmdDebugMarkerEndEXT(
+    VkCommandBuffer                             commandBuffer);
+void DispatchCmdDebugMarkerInsertEXT(
+    VkCommandBuffer                             commandBuffer,
+    const VkDebugMarkerMarkerInfoEXT*           pMarkerInfo);
+void DispatchCmdBindTransformFeedbackBuffersEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstBinding,
+    uint32_t                                    bindingCount,
+    const VkBuffer*                             pBuffers,
+    const VkDeviceSize*                         pOffsets,
+    const VkDeviceSize*                         pSizes);
+void DispatchCmdBeginTransformFeedbackEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstCounterBuffer,
+    uint32_t                                    counterBufferCount,
+    const VkBuffer*                             pCounterBuffers,
+    const VkDeviceSize*                         pCounterBufferOffsets);
+void DispatchCmdEndTransformFeedbackEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstCounterBuffer,
+    uint32_t                                    counterBufferCount,
+    const VkBuffer*                             pCounterBuffers,
+    const VkDeviceSize*                         pCounterBufferOffsets);
+void DispatchCmdBeginQueryIndexedEXT(
+    VkCommandBuffer                             commandBuffer,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    query,
+    VkQueryControlFlags                         flags,
+    uint32_t                                    index);
+void DispatchCmdEndQueryIndexedEXT(
+    VkCommandBuffer                             commandBuffer,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    query,
+    uint32_t                                    index);
+void DispatchCmdDrawIndirectByteCountEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    instanceCount,
+    uint32_t                                    firstInstance,
+    VkBuffer                                    counterBuffer,
+    VkDeviceSize                                counterBufferOffset,
+    uint32_t                                    counterOffset,
+    uint32_t                                    vertexStride);
+uint32_t DispatchGetImageViewHandleNVX(
+    VkDevice                                    device,
+    const VkImageViewHandleInfoNVX*             pInfo);
+void DispatchCmdDrawIndirectCountAMD(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkBuffer                                    countBuffer,
+    VkDeviceSize                                countBufferOffset,
+    uint32_t                                    maxDrawCount,
+    uint32_t                                    stride);
+void DispatchCmdDrawIndexedIndirectCountAMD(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkBuffer                                    countBuffer,
+    VkDeviceSize                                countBufferOffset,
+    uint32_t                                    maxDrawCount,
+    uint32_t                                    stride);
+VkResult DispatchGetShaderInfoAMD(
+    VkDevice                                    device,
+    VkPipeline                                  pipeline,
+    VkShaderStageFlagBits                       shaderStage,
+    VkShaderInfoTypeAMD                         infoType,
+    size_t*                                     pInfoSize,
+    void*                                       pInfo);
+#ifdef VK_USE_PLATFORM_GGP
+VkResult DispatchCreateStreamDescriptorSurfaceGGP(
+    VkInstance                                  instance,
+    const VkStreamDescriptorSurfaceCreateInfoGGP* pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface);
+#endif // VK_USE_PLATFORM_GGP
+VkResult DispatchGetPhysicalDeviceExternalImageFormatPropertiesNV(
+    VkPhysicalDevice                            physicalDevice,
+    VkFormat                                    format,
+    VkImageType                                 type,
+    VkImageTiling                               tiling,
+    VkImageUsageFlags                           usage,
+    VkImageCreateFlags                          flags,
+    VkExternalMemoryHandleTypeFlagsNV           externalHandleType,
+    VkExternalImageFormatPropertiesNV*          pExternalImageFormatProperties);
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+VkResult DispatchGetMemoryWin32HandleNV(
+    VkDevice                                    device,
+    VkDeviceMemory                              memory,
+    VkExternalMemoryHandleTypeFlagsNV           handleType,
+    HANDLE*                                     pHandle);
+#endif // VK_USE_PLATFORM_WIN32_KHR
+#ifdef VK_USE_PLATFORM_VI_NN
+VkResult DispatchCreateViSurfaceNN(
+    VkInstance                                  instance,
+    const VkViSurfaceCreateInfoNN*              pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface);
+#endif // VK_USE_PLATFORM_VI_NN
+void DispatchCmdBeginConditionalRenderingEXT(
+    VkCommandBuffer                             commandBuffer,
+    const VkConditionalRenderingBeginInfoEXT*   pConditionalRenderingBegin);
+void DispatchCmdEndConditionalRenderingEXT(
+    VkCommandBuffer                             commandBuffer);
+void DispatchCmdProcessCommandsNVX(
+    VkCommandBuffer                             commandBuffer,
+    const VkCmdProcessCommandsInfoNVX*          pProcessCommandsInfo);
+void DispatchCmdReserveSpaceForCommandsNVX(
+    VkCommandBuffer                             commandBuffer,
+    const VkCmdReserveSpaceForCommandsInfoNVX*  pReserveSpaceInfo);
+VkResult DispatchCreateIndirectCommandsLayoutNVX(
+    VkDevice                                    device,
+    const VkIndirectCommandsLayoutCreateInfoNVX* pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkIndirectCommandsLayoutNVX*                pIndirectCommandsLayout);
+void DispatchDestroyIndirectCommandsLayoutNVX(
+    VkDevice                                    device,
+    VkIndirectCommandsLayoutNVX                 indirectCommandsLayout,
+    const VkAllocationCallbacks*                pAllocator);
+VkResult DispatchCreateObjectTableNVX(
+    VkDevice                                    device,
+    const VkObjectTableCreateInfoNVX*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkObjectTableNVX*                           pObjectTable);
+void DispatchDestroyObjectTableNVX(
+    VkDevice                                    device,
+    VkObjectTableNVX                            objectTable,
+    const VkAllocationCallbacks*                pAllocator);
+VkResult DispatchRegisterObjectsNVX(
+    VkDevice                                    device,
+    VkObjectTableNVX                            objectTable,
+    uint32_t                                    objectCount,
+    const VkObjectTableEntryNVX* const*         ppObjectTableEntries,
+    const uint32_t*                             pObjectIndices);
+VkResult DispatchUnregisterObjectsNVX(
+    VkDevice                                    device,
+    VkObjectTableNVX                            objectTable,
+    uint32_t                                    objectCount,
+    const VkObjectEntryTypeNVX*                 pObjectEntryTypes,
+    const uint32_t*                             pObjectIndices);
+void DispatchGetPhysicalDeviceGeneratedCommandsPropertiesNVX(
+    VkPhysicalDevice                            physicalDevice,
+    VkDeviceGeneratedCommandsFeaturesNVX*       pFeatures,
+    VkDeviceGeneratedCommandsLimitsNVX*         pLimits);
+void DispatchCmdSetViewportWScalingNV(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstViewport,
+    uint32_t                                    viewportCount,
+    const VkViewportWScalingNV*                 pViewportWScalings);
+VkResult DispatchReleaseDisplayEXT(
+    VkPhysicalDevice                            physicalDevice,
+    VkDisplayKHR                                display);
+#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
+VkResult DispatchAcquireXlibDisplayEXT(
+    VkPhysicalDevice                            physicalDevice,
+    Display*                                    dpy,
+    VkDisplayKHR                                display);
+#endif // VK_USE_PLATFORM_XLIB_XRANDR_EXT
+#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
+VkResult DispatchGetRandROutputDisplayEXT(
+    VkPhysicalDevice                            physicalDevice,
+    Display*                                    dpy,
+    RROutput                                    rrOutput,
+    VkDisplayKHR*                               pDisplay);
+#endif // VK_USE_PLATFORM_XLIB_XRANDR_EXT
+VkResult DispatchGetPhysicalDeviceSurfaceCapabilities2EXT(
+    VkPhysicalDevice                            physicalDevice,
+    VkSurfaceKHR                                surface,
+    VkSurfaceCapabilities2EXT*                  pSurfaceCapabilities);
+VkResult DispatchDisplayPowerControlEXT(
+    VkDevice                                    device,
+    VkDisplayKHR                                display,
+    const VkDisplayPowerInfoEXT*                pDisplayPowerInfo);
+VkResult DispatchRegisterDeviceEventEXT(
+    VkDevice                                    device,
+    const VkDeviceEventInfoEXT*                 pDeviceEventInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkFence*                                    pFence);
+VkResult DispatchRegisterDisplayEventEXT(
+    VkDevice                                    device,
+    VkDisplayKHR                                display,
+    const VkDisplayEventInfoEXT*                pDisplayEventInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkFence*                                    pFence);
+VkResult DispatchGetSwapchainCounterEXT(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    VkSurfaceCounterFlagBitsEXT                 counter,
+    uint64_t*                                   pCounterValue);
+VkResult DispatchGetRefreshCycleDurationGOOGLE(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    VkRefreshCycleDurationGOOGLE*               pDisplayTimingProperties);
+VkResult DispatchGetPastPresentationTimingGOOGLE(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    uint32_t*                                   pPresentationTimingCount,
+    VkPastPresentationTimingGOOGLE*             pPresentationTimings);
+void DispatchCmdSetDiscardRectangleEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstDiscardRectangle,
+    uint32_t                                    discardRectangleCount,
+    const VkRect2D*                             pDiscardRectangles);
+void DispatchSetHdrMetadataEXT(
+    VkDevice                                    device,
+    uint32_t                                    swapchainCount,
+    const VkSwapchainKHR*                       pSwapchains,
+    const VkHdrMetadataEXT*                     pMetadata);
+#ifdef VK_USE_PLATFORM_IOS_MVK
+VkResult DispatchCreateIOSSurfaceMVK(
+    VkInstance                                  instance,
+    const VkIOSSurfaceCreateInfoMVK*            pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface);
+#endif // VK_USE_PLATFORM_IOS_MVK
+#ifdef VK_USE_PLATFORM_MACOS_MVK
+VkResult DispatchCreateMacOSSurfaceMVK(
+    VkInstance                                  instance,
+    const VkMacOSSurfaceCreateInfoMVK*          pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface);
+#endif // VK_USE_PLATFORM_MACOS_MVK
+VkResult DispatchSetDebugUtilsObjectNameEXT(
+    VkDevice                                    device,
+    const VkDebugUtilsObjectNameInfoEXT*        pNameInfo);
+VkResult DispatchSetDebugUtilsObjectTagEXT(
+    VkDevice                                    device,
+    const VkDebugUtilsObjectTagInfoEXT*         pTagInfo);
+void DispatchQueueBeginDebugUtilsLabelEXT(
+    VkQueue                                     queue,
+    const VkDebugUtilsLabelEXT*                 pLabelInfo);
+void DispatchQueueEndDebugUtilsLabelEXT(
+    VkQueue                                     queue);
+void DispatchQueueInsertDebugUtilsLabelEXT(
+    VkQueue                                     queue,
+    const VkDebugUtilsLabelEXT*                 pLabelInfo);
+void DispatchCmdBeginDebugUtilsLabelEXT(
+    VkCommandBuffer                             commandBuffer,
+    const VkDebugUtilsLabelEXT*                 pLabelInfo);
+void DispatchCmdEndDebugUtilsLabelEXT(
+    VkCommandBuffer                             commandBuffer);
+void DispatchCmdInsertDebugUtilsLabelEXT(
+    VkCommandBuffer                             commandBuffer,
+    const VkDebugUtilsLabelEXT*                 pLabelInfo);
+VkResult DispatchCreateDebugUtilsMessengerEXT(
+    VkInstance                                  instance,
+    const VkDebugUtilsMessengerCreateInfoEXT*   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDebugUtilsMessengerEXT*                   pMessenger);
+void DispatchDestroyDebugUtilsMessengerEXT(
+    VkInstance                                  instance,
+    VkDebugUtilsMessengerEXT                    messenger,
+    const VkAllocationCallbacks*                pAllocator);
+void DispatchSubmitDebugUtilsMessageEXT(
+    VkInstance                                  instance,
+    VkDebugUtilsMessageSeverityFlagBitsEXT      messageSeverity,
+    VkDebugUtilsMessageTypeFlagsEXT             messageTypes,
+    const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData);
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+VkResult DispatchGetAndroidHardwareBufferPropertiesANDROID(
+    VkDevice                                    device,
+    const struct AHardwareBuffer*               buffer,
+    VkAndroidHardwareBufferPropertiesANDROID*   pProperties);
+#endif // VK_USE_PLATFORM_ANDROID_KHR
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+VkResult DispatchGetMemoryAndroidHardwareBufferANDROID(
+    VkDevice                                    device,
+    const VkMemoryGetAndroidHardwareBufferInfoANDROID* pInfo,
+    struct AHardwareBuffer**                    pBuffer);
+#endif // VK_USE_PLATFORM_ANDROID_KHR
+void DispatchCmdSetSampleLocationsEXT(
+    VkCommandBuffer                             commandBuffer,
+    const VkSampleLocationsInfoEXT*             pSampleLocationsInfo);
+void DispatchGetPhysicalDeviceMultisamplePropertiesEXT(
+    VkPhysicalDevice                            physicalDevice,
+    VkSampleCountFlagBits                       samples,
+    VkMultisamplePropertiesEXT*                 pMultisampleProperties);
+VkResult DispatchGetImageDrmFormatModifierPropertiesEXT(
+    VkDevice                                    device,
+    VkImage                                     image,
+    VkImageDrmFormatModifierPropertiesEXT*      pProperties);
+VkResult DispatchCreateValidationCacheEXT(
+    VkDevice                                    device,
+    const VkValidationCacheCreateInfoEXT*       pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkValidationCacheEXT*                       pValidationCache);
+void DispatchDestroyValidationCacheEXT(
+    VkDevice                                    device,
+    VkValidationCacheEXT                        validationCache,
+    const VkAllocationCallbacks*                pAllocator);
+VkResult DispatchMergeValidationCachesEXT(
+    VkDevice                                    device,
+    VkValidationCacheEXT                        dstCache,
+    uint32_t                                    srcCacheCount,
+    const VkValidationCacheEXT*                 pSrcCaches);
+VkResult DispatchGetValidationCacheDataEXT(
+    VkDevice                                    device,
+    VkValidationCacheEXT                        validationCache,
+    size_t*                                     pDataSize,
+    void*                                       pData);
+void DispatchCmdBindShadingRateImageNV(
+    VkCommandBuffer                             commandBuffer,
+    VkImageView                                 imageView,
+    VkImageLayout                               imageLayout);
+void DispatchCmdSetViewportShadingRatePaletteNV(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstViewport,
+    uint32_t                                    viewportCount,
+    const VkShadingRatePaletteNV*               pShadingRatePalettes);
+void DispatchCmdSetCoarseSampleOrderNV(
+    VkCommandBuffer                             commandBuffer,
+    VkCoarseSampleOrderTypeNV                   sampleOrderType,
+    uint32_t                                    customSampleOrderCount,
+    const VkCoarseSampleOrderCustomNV*          pCustomSampleOrders);
+VkResult DispatchCreateAccelerationStructureNV(
+    VkDevice                                    device,
+    const VkAccelerationStructureCreateInfoNV*  pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkAccelerationStructureNV*                  pAccelerationStructure);
+void DispatchDestroyAccelerationStructureNV(
+    VkDevice                                    device,
+    VkAccelerationStructureNV                   accelerationStructure,
+    const VkAllocationCallbacks*                pAllocator);
+void DispatchGetAccelerationStructureMemoryRequirementsNV(
+    VkDevice                                    device,
+    const VkAccelerationStructureMemoryRequirementsInfoNV* pInfo,
+    VkMemoryRequirements2KHR*                   pMemoryRequirements);
+VkResult DispatchBindAccelerationStructureMemoryNV(
+    VkDevice                                    device,
+    uint32_t                                    bindInfoCount,
+    const VkBindAccelerationStructureMemoryInfoNV* pBindInfos);
+void DispatchCmdBuildAccelerationStructureNV(
+    VkCommandBuffer                             commandBuffer,
+    const VkAccelerationStructureInfoNV*        pInfo,
+    VkBuffer                                    instanceData,
+    VkDeviceSize                                instanceOffset,
+    VkBool32                                    update,
+    VkAccelerationStructureNV                   dst,
+    VkAccelerationStructureNV                   src,
+    VkBuffer                                    scratch,
+    VkDeviceSize                                scratchOffset);
+void DispatchCmdCopyAccelerationStructureNV(
+    VkCommandBuffer                             commandBuffer,
+    VkAccelerationStructureNV                   dst,
+    VkAccelerationStructureNV                   src,
+    VkCopyAccelerationStructureModeNV           mode);
+void DispatchCmdTraceRaysNV(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    raygenShaderBindingTableBuffer,
+    VkDeviceSize                                raygenShaderBindingOffset,
+    VkBuffer                                    missShaderBindingTableBuffer,
+    VkDeviceSize                                missShaderBindingOffset,
+    VkDeviceSize                                missShaderBindingStride,
+    VkBuffer                                    hitShaderBindingTableBuffer,
+    VkDeviceSize                                hitShaderBindingOffset,
+    VkDeviceSize                                hitShaderBindingStride,
+    VkBuffer                                    callableShaderBindingTableBuffer,
+    VkDeviceSize                                callableShaderBindingOffset,
+    VkDeviceSize                                callableShaderBindingStride,
+    uint32_t                                    width,
+    uint32_t                                    height,
+    uint32_t                                    depth);
+VkResult DispatchCreateRayTracingPipelinesNV(
+    VkDevice                                    device,
+    VkPipelineCache                             pipelineCache,
+    uint32_t                                    createInfoCount,
+    const VkRayTracingPipelineCreateInfoNV*     pCreateInfos,
+    const VkAllocationCallbacks*                pAllocator,
+    VkPipeline*                                 pPipelines);
+VkResult DispatchGetRayTracingShaderGroupHandlesNV(
+    VkDevice                                    device,
+    VkPipeline                                  pipeline,
+    uint32_t                                    firstGroup,
+    uint32_t                                    groupCount,
+    size_t                                      dataSize,
+    void*                                       pData);
+VkResult DispatchGetAccelerationStructureHandleNV(
+    VkDevice                                    device,
+    VkAccelerationStructureNV                   accelerationStructure,
+    size_t                                      dataSize,
+    void*                                       pData);
+void DispatchCmdWriteAccelerationStructuresPropertiesNV(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    accelerationStructureCount,
+    const VkAccelerationStructureNV*            pAccelerationStructures,
+    VkQueryType                                 queryType,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    firstQuery);
+VkResult DispatchCompileDeferredNV(
+    VkDevice                                    device,
+    VkPipeline                                  pipeline,
+    uint32_t                                    shader);
+VkResult DispatchGetMemoryHostPointerPropertiesEXT(
+    VkDevice                                    device,
+    VkExternalMemoryHandleTypeFlagBits          handleType,
+    const void*                                 pHostPointer,
+    VkMemoryHostPointerPropertiesEXT*           pMemoryHostPointerProperties);
+void DispatchCmdWriteBufferMarkerAMD(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineStageFlagBits                     pipelineStage,
+    VkBuffer                                    dstBuffer,
+    VkDeviceSize                                dstOffset,
+    uint32_t                                    marker);
+VkResult DispatchGetPhysicalDeviceCalibrateableTimeDomainsEXT(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pTimeDomainCount,
+    VkTimeDomainEXT*                            pTimeDomains);
+VkResult DispatchGetCalibratedTimestampsEXT(
+    VkDevice                                    device,
+    uint32_t                                    timestampCount,
+    const VkCalibratedTimestampInfoEXT*         pTimestampInfos,
+    uint64_t*                                   pTimestamps,
+    uint64_t*                                   pMaxDeviation);
+void DispatchCmdDrawMeshTasksNV(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    taskCount,
+    uint32_t                                    firstTask);
+void DispatchCmdDrawMeshTasksIndirectNV(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    uint32_t                                    drawCount,
+    uint32_t                                    stride);
+void DispatchCmdDrawMeshTasksIndirectCountNV(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkBuffer                                    countBuffer,
+    VkDeviceSize                                countBufferOffset,
+    uint32_t                                    maxDrawCount,
+    uint32_t                                    stride);
+void DispatchCmdSetExclusiveScissorNV(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstExclusiveScissor,
+    uint32_t                                    exclusiveScissorCount,
+    const VkRect2D*                             pExclusiveScissors);
+void DispatchCmdSetCheckpointNV(
+    VkCommandBuffer                             commandBuffer,
+    const void*                                 pCheckpointMarker);
+void DispatchGetQueueCheckpointDataNV(
+    VkQueue                                     queue,
+    uint32_t*                                   pCheckpointDataCount,
+    VkCheckpointDataNV*                         pCheckpointData);
+VkResult DispatchInitializePerformanceApiINTEL(
+    VkDevice                                    device,
+    const VkInitializePerformanceApiInfoINTEL*  pInitializeInfo);
+void DispatchUninitializePerformanceApiINTEL(
+    VkDevice                                    device);
+VkResult DispatchCmdSetPerformanceMarkerINTEL(
+    VkCommandBuffer                             commandBuffer,
+    const VkPerformanceMarkerInfoINTEL*         pMarkerInfo);
+VkResult DispatchCmdSetPerformanceStreamMarkerINTEL(
+    VkCommandBuffer                             commandBuffer,
+    const VkPerformanceStreamMarkerInfoINTEL*   pMarkerInfo);
+VkResult DispatchCmdSetPerformanceOverrideINTEL(
+    VkCommandBuffer                             commandBuffer,
+    const VkPerformanceOverrideInfoINTEL*       pOverrideInfo);
+VkResult DispatchAcquirePerformanceConfigurationINTEL(
+    VkDevice                                    device,
+    const VkPerformanceConfigurationAcquireInfoINTEL* pAcquireInfo,
+    VkPerformanceConfigurationINTEL*            pConfiguration);
+VkResult DispatchReleasePerformanceConfigurationINTEL(
+    VkDevice                                    device,
+    VkPerformanceConfigurationINTEL             configuration);
+VkResult DispatchQueueSetPerformanceConfigurationINTEL(
+    VkQueue                                     queue,
+    VkPerformanceConfigurationINTEL             configuration);
+VkResult DispatchGetPerformanceParameterINTEL(
+    VkDevice                                    device,
+    VkPerformanceParameterTypeINTEL             parameter,
+    VkPerformanceValueINTEL*                    pValue);
+void DispatchSetLocalDimmingAMD(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapChain,
+    VkBool32                                    localDimmingEnable);
+#ifdef VK_USE_PLATFORM_FUCHSIA
+VkResult DispatchCreateImagePipeSurfaceFUCHSIA(
+    VkInstance                                  instance,
+    const VkImagePipeSurfaceCreateInfoFUCHSIA*  pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface);
+#endif // VK_USE_PLATFORM_FUCHSIA
+#ifdef VK_USE_PLATFORM_METAL_EXT
+VkResult DispatchCreateMetalSurfaceEXT(
+    VkInstance                                  instance,
+    const VkMetalSurfaceCreateInfoEXT*          pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface);
+#endif // VK_USE_PLATFORM_METAL_EXT
+VkDeviceAddress DispatchGetBufferDeviceAddressEXT(
+    VkDevice                                    device,
+    const VkBufferDeviceAddressInfoKHR*         pInfo);
+VkResult DispatchGetPhysicalDeviceToolPropertiesEXT(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pToolCount,
+    VkPhysicalDeviceToolPropertiesEXT*          pToolProperties);
+VkResult DispatchGetPhysicalDeviceCooperativeMatrixPropertiesNV(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pPropertyCount,
+    VkCooperativeMatrixPropertiesNV*            pProperties);
+VkResult DispatchGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pCombinationCount,
+    VkFramebufferMixedSamplesCombinationNV*     pCombinations);
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+VkResult DispatchGetPhysicalDeviceSurfacePresentModes2EXT(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceSurfaceInfo2KHR*      pSurfaceInfo,
+    uint32_t*                                   pPresentModeCount,
+    VkPresentModeKHR*                           pPresentModes);
+#endif // VK_USE_PLATFORM_WIN32_KHR
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+VkResult DispatchAcquireFullScreenExclusiveModeEXT(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain);
+#endif // VK_USE_PLATFORM_WIN32_KHR
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+VkResult DispatchReleaseFullScreenExclusiveModeEXT(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain);
+#endif // VK_USE_PLATFORM_WIN32_KHR
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+VkResult DispatchGetDeviceGroupSurfacePresentModes2EXT(
+    VkDevice                                    device,
+    const VkPhysicalDeviceSurfaceInfo2KHR*      pSurfaceInfo,
+    VkDeviceGroupPresentModeFlagsKHR*           pModes);
+#endif // VK_USE_PLATFORM_WIN32_KHR
+VkResult DispatchCreateHeadlessSurfaceEXT(
+    VkInstance                                  instance,
+    const VkHeadlessSurfaceCreateInfoEXT*       pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface);
+void DispatchCmdSetLineStippleEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    lineStippleFactor,
+    uint16_t                                    lineStipplePattern);
+void DispatchResetQueryPoolEXT(
+    VkDevice                                    device,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    firstQuery,
+    uint32_t                                    queryCount);
\ No newline at end of file
diff --git a/src/third_party/vulkan-validation-layers/src/layers/generated/lvt_function_pointers.cpp b/src/third_party/vulkan-validation-layers/src/layers/generated/lvt_function_pointers.cpp
new file mode 100644
index 0000000..5175a9d
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/layers/generated/lvt_function_pointers.cpp
@@ -0,0 +1,483 @@
+// *** THIS FILE IS GENERATED - DO NOT EDIT ***
+// See lvt_file_generator.py for modifications
+
+/*
+ * Copyright (c) 2015-2019 The Khronos Group Inc.
+ * Copyright (c) 2015-2019 Valve Corporation
+ * Copyright (c) 2015-2019 LunarG, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Mark Lobodzinski <mark@lunarg.com>
+ */
+
+
+#include "lvt_function_pointers.h"
+#include <stdio.h>
+
+namespace vk {
+
+PFN_vkCreateInstance CreateInstance;
+PFN_vkDestroyInstance DestroyInstance;
+PFN_vkEnumeratePhysicalDevices EnumeratePhysicalDevices;
+PFN_vkGetPhysicalDeviceFeatures GetPhysicalDeviceFeatures;
+PFN_vkGetPhysicalDeviceFormatProperties GetPhysicalDeviceFormatProperties;
+PFN_vkGetPhysicalDeviceImageFormatProperties GetPhysicalDeviceImageFormatProperties;
+PFN_vkGetPhysicalDeviceProperties GetPhysicalDeviceProperties;
+PFN_vkGetPhysicalDeviceQueueFamilyProperties GetPhysicalDeviceQueueFamilyProperties;
+PFN_vkGetPhysicalDeviceMemoryProperties GetPhysicalDeviceMemoryProperties;
+PFN_vkGetInstanceProcAddr GetInstanceProcAddr;
+PFN_vkGetDeviceProcAddr GetDeviceProcAddr;
+PFN_vkCreateDevice CreateDevice;
+PFN_vkDestroyDevice DestroyDevice;
+PFN_vkEnumerateInstanceExtensionProperties EnumerateInstanceExtensionProperties;
+PFN_vkEnumerateDeviceExtensionProperties EnumerateDeviceExtensionProperties;
+PFN_vkEnumerateInstanceLayerProperties EnumerateInstanceLayerProperties;
+PFN_vkEnumerateDeviceLayerProperties EnumerateDeviceLayerProperties;
+PFN_vkGetDeviceQueue GetDeviceQueue;
+PFN_vkQueueSubmit QueueSubmit;
+PFN_vkQueueWaitIdle QueueWaitIdle;
+PFN_vkDeviceWaitIdle DeviceWaitIdle;
+PFN_vkAllocateMemory AllocateMemory;
+PFN_vkFreeMemory FreeMemory;
+PFN_vkMapMemory MapMemory;
+PFN_vkUnmapMemory UnmapMemory;
+PFN_vkFlushMappedMemoryRanges FlushMappedMemoryRanges;
+PFN_vkInvalidateMappedMemoryRanges InvalidateMappedMemoryRanges;
+PFN_vkGetDeviceMemoryCommitment GetDeviceMemoryCommitment;
+PFN_vkBindBufferMemory BindBufferMemory;
+PFN_vkBindImageMemory BindImageMemory;
+PFN_vkGetBufferMemoryRequirements GetBufferMemoryRequirements;
+PFN_vkGetImageMemoryRequirements GetImageMemoryRequirements;
+PFN_vkGetImageSparseMemoryRequirements GetImageSparseMemoryRequirements;
+PFN_vkGetPhysicalDeviceSparseImageFormatProperties GetPhysicalDeviceSparseImageFormatProperties;
+PFN_vkQueueBindSparse QueueBindSparse;
+PFN_vkCreateFence CreateFence;
+PFN_vkDestroyFence DestroyFence;
+PFN_vkResetFences ResetFences;
+PFN_vkGetFenceStatus GetFenceStatus;
+PFN_vkWaitForFences WaitForFences;
+PFN_vkCreateSemaphore CreateSemaphore;
+PFN_vkDestroySemaphore DestroySemaphore;
+PFN_vkCreateEvent CreateEvent;
+PFN_vkDestroyEvent DestroyEvent;
+PFN_vkGetEventStatus GetEventStatus;
+PFN_vkSetEvent SetEvent;
+PFN_vkResetEvent ResetEvent;
+PFN_vkCreateQueryPool CreateQueryPool;
+PFN_vkDestroyQueryPool DestroyQueryPool;
+PFN_vkGetQueryPoolResults GetQueryPoolResults;
+PFN_vkCreateBuffer CreateBuffer;
+PFN_vkDestroyBuffer DestroyBuffer;
+PFN_vkCreateBufferView CreateBufferView;
+PFN_vkDestroyBufferView DestroyBufferView;
+PFN_vkCreateImage CreateImage;
+PFN_vkDestroyImage DestroyImage;
+PFN_vkGetImageSubresourceLayout GetImageSubresourceLayout;
+PFN_vkCreateImageView CreateImageView;
+PFN_vkDestroyImageView DestroyImageView;
+PFN_vkCreateShaderModule CreateShaderModule;
+PFN_vkDestroyShaderModule DestroyShaderModule;
+PFN_vkCreatePipelineCache CreatePipelineCache;
+PFN_vkDestroyPipelineCache DestroyPipelineCache;
+PFN_vkGetPipelineCacheData GetPipelineCacheData;
+PFN_vkMergePipelineCaches MergePipelineCaches;
+PFN_vkCreateGraphicsPipelines CreateGraphicsPipelines;
+PFN_vkCreateComputePipelines CreateComputePipelines;
+PFN_vkDestroyPipeline DestroyPipeline;
+PFN_vkCreatePipelineLayout CreatePipelineLayout;
+PFN_vkDestroyPipelineLayout DestroyPipelineLayout;
+PFN_vkCreateSampler CreateSampler;
+PFN_vkDestroySampler DestroySampler;
+PFN_vkCreateDescriptorSetLayout CreateDescriptorSetLayout;
+PFN_vkDestroyDescriptorSetLayout DestroyDescriptorSetLayout;
+PFN_vkCreateDescriptorPool CreateDescriptorPool;
+PFN_vkDestroyDescriptorPool DestroyDescriptorPool;
+PFN_vkResetDescriptorPool ResetDescriptorPool;
+PFN_vkAllocateDescriptorSets AllocateDescriptorSets;
+PFN_vkFreeDescriptorSets FreeDescriptorSets;
+PFN_vkUpdateDescriptorSets UpdateDescriptorSets;
+PFN_vkCreateFramebuffer CreateFramebuffer;
+PFN_vkDestroyFramebuffer DestroyFramebuffer;
+PFN_vkCreateRenderPass CreateRenderPass;
+PFN_vkDestroyRenderPass DestroyRenderPass;
+PFN_vkGetRenderAreaGranularity GetRenderAreaGranularity;
+PFN_vkCreateCommandPool CreateCommandPool;
+PFN_vkDestroyCommandPool DestroyCommandPool;
+PFN_vkResetCommandPool ResetCommandPool;
+PFN_vkAllocateCommandBuffers AllocateCommandBuffers;
+PFN_vkFreeCommandBuffers FreeCommandBuffers;
+PFN_vkBeginCommandBuffer BeginCommandBuffer;
+PFN_vkEndCommandBuffer EndCommandBuffer;
+PFN_vkResetCommandBuffer ResetCommandBuffer;
+PFN_vkCmdBindPipeline CmdBindPipeline;
+PFN_vkCmdSetViewport CmdSetViewport;
+PFN_vkCmdSetScissor CmdSetScissor;
+PFN_vkCmdSetLineWidth CmdSetLineWidth;
+PFN_vkCmdSetDepthBias CmdSetDepthBias;
+PFN_vkCmdSetBlendConstants CmdSetBlendConstants;
+PFN_vkCmdSetDepthBounds CmdSetDepthBounds;
+PFN_vkCmdSetStencilCompareMask CmdSetStencilCompareMask;
+PFN_vkCmdSetStencilWriteMask CmdSetStencilWriteMask;
+PFN_vkCmdSetStencilReference CmdSetStencilReference;
+PFN_vkCmdBindDescriptorSets CmdBindDescriptorSets;
+PFN_vkCmdBindIndexBuffer CmdBindIndexBuffer;
+PFN_vkCmdBindVertexBuffers CmdBindVertexBuffers;
+PFN_vkCmdDraw CmdDraw;
+PFN_vkCmdDrawIndexed CmdDrawIndexed;
+PFN_vkCmdDrawIndirect CmdDrawIndirect;
+PFN_vkCmdDrawIndexedIndirect CmdDrawIndexedIndirect;
+PFN_vkCmdDispatch CmdDispatch;
+PFN_vkCmdDispatchIndirect CmdDispatchIndirect;
+PFN_vkCmdCopyBuffer CmdCopyBuffer;
+PFN_vkCmdCopyImage CmdCopyImage;
+PFN_vkCmdBlitImage CmdBlitImage;
+PFN_vkCmdCopyBufferToImage CmdCopyBufferToImage;
+PFN_vkCmdCopyImageToBuffer CmdCopyImageToBuffer;
+PFN_vkCmdUpdateBuffer CmdUpdateBuffer;
+PFN_vkCmdFillBuffer CmdFillBuffer;
+PFN_vkCmdClearColorImage CmdClearColorImage;
+PFN_vkCmdClearDepthStencilImage CmdClearDepthStencilImage;
+PFN_vkCmdClearAttachments CmdClearAttachments;
+PFN_vkCmdResolveImage CmdResolveImage;
+PFN_vkCmdSetEvent CmdSetEvent;
+PFN_vkCmdResetEvent CmdResetEvent;
+PFN_vkCmdWaitEvents CmdWaitEvents;
+PFN_vkCmdPipelineBarrier CmdPipelineBarrier;
+PFN_vkCmdBeginQuery CmdBeginQuery;
+PFN_vkCmdEndQuery CmdEndQuery;
+PFN_vkCmdResetQueryPool CmdResetQueryPool;
+PFN_vkCmdWriteTimestamp CmdWriteTimestamp;
+PFN_vkCmdCopyQueryPoolResults CmdCopyQueryPoolResults;
+PFN_vkCmdPushConstants CmdPushConstants;
+PFN_vkCmdBeginRenderPass CmdBeginRenderPass;
+PFN_vkCmdNextSubpass CmdNextSubpass;
+PFN_vkCmdEndRenderPass CmdEndRenderPass;
+PFN_vkCmdExecuteCommands CmdExecuteCommands;
+PFN_vkEnumerateInstanceVersion EnumerateInstanceVersion;
+PFN_vkBindBufferMemory2 BindBufferMemory2;
+PFN_vkBindImageMemory2 BindImageMemory2;
+PFN_vkGetDeviceGroupPeerMemoryFeatures GetDeviceGroupPeerMemoryFeatures;
+PFN_vkCmdSetDeviceMask CmdSetDeviceMask;
+PFN_vkCmdDispatchBase CmdDispatchBase;
+PFN_vkEnumeratePhysicalDeviceGroups EnumeratePhysicalDeviceGroups;
+PFN_vkGetImageMemoryRequirements2 GetImageMemoryRequirements2;
+PFN_vkGetBufferMemoryRequirements2 GetBufferMemoryRequirements2;
+PFN_vkGetImageSparseMemoryRequirements2 GetImageSparseMemoryRequirements2;
+PFN_vkGetPhysicalDeviceFeatures2 GetPhysicalDeviceFeatures2;
+PFN_vkGetPhysicalDeviceProperties2 GetPhysicalDeviceProperties2;
+PFN_vkGetPhysicalDeviceFormatProperties2 GetPhysicalDeviceFormatProperties2;
+PFN_vkGetPhysicalDeviceImageFormatProperties2 GetPhysicalDeviceImageFormatProperties2;
+PFN_vkGetPhysicalDeviceQueueFamilyProperties2 GetPhysicalDeviceQueueFamilyProperties2;
+PFN_vkGetPhysicalDeviceMemoryProperties2 GetPhysicalDeviceMemoryProperties2;
+PFN_vkGetPhysicalDeviceSparseImageFormatProperties2 GetPhysicalDeviceSparseImageFormatProperties2;
+PFN_vkTrimCommandPool TrimCommandPool;
+PFN_vkGetDeviceQueue2 GetDeviceQueue2;
+PFN_vkCreateSamplerYcbcrConversion CreateSamplerYcbcrConversion;
+PFN_vkDestroySamplerYcbcrConversion DestroySamplerYcbcrConversion;
+PFN_vkCreateDescriptorUpdateTemplate CreateDescriptorUpdateTemplate;
+PFN_vkDestroyDescriptorUpdateTemplate DestroyDescriptorUpdateTemplate;
+PFN_vkUpdateDescriptorSetWithTemplate UpdateDescriptorSetWithTemplate;
+PFN_vkGetPhysicalDeviceExternalBufferProperties GetPhysicalDeviceExternalBufferProperties;
+PFN_vkGetPhysicalDeviceExternalFenceProperties GetPhysicalDeviceExternalFenceProperties;
+PFN_vkGetPhysicalDeviceExternalSemaphoreProperties GetPhysicalDeviceExternalSemaphoreProperties;
+PFN_vkGetDescriptorSetLayoutSupport GetDescriptorSetLayoutSupport;
+PFN_vkDestroySurfaceKHR DestroySurfaceKHR;
+PFN_vkGetPhysicalDeviceSurfaceSupportKHR GetPhysicalDeviceSurfaceSupportKHR;
+PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR GetPhysicalDeviceSurfaceCapabilitiesKHR;
+PFN_vkGetPhysicalDeviceSurfaceFormatsKHR GetPhysicalDeviceSurfaceFormatsKHR;
+PFN_vkGetPhysicalDeviceSurfacePresentModesKHR GetPhysicalDeviceSurfacePresentModesKHR;
+PFN_vkCreateSwapchainKHR CreateSwapchainKHR;
+PFN_vkDestroySwapchainKHR DestroySwapchainKHR;
+PFN_vkGetSwapchainImagesKHR GetSwapchainImagesKHR;
+PFN_vkAcquireNextImageKHR AcquireNextImageKHR;
+PFN_vkQueuePresentKHR QueuePresentKHR;
+PFN_vkGetDeviceGroupPresentCapabilitiesKHR GetDeviceGroupPresentCapabilitiesKHR;
+PFN_vkGetDeviceGroupSurfacePresentModesKHR GetDeviceGroupSurfacePresentModesKHR;
+PFN_vkGetPhysicalDevicePresentRectanglesKHR GetPhysicalDevicePresentRectanglesKHR;
+PFN_vkAcquireNextImage2KHR AcquireNextImage2KHR;
+PFN_vkGetPhysicalDeviceDisplayPropertiesKHR GetPhysicalDeviceDisplayPropertiesKHR;
+PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR GetPhysicalDeviceDisplayPlanePropertiesKHR;
+PFN_vkGetDisplayPlaneSupportedDisplaysKHR GetDisplayPlaneSupportedDisplaysKHR;
+PFN_vkGetDisplayModePropertiesKHR GetDisplayModePropertiesKHR;
+PFN_vkCreateDisplayModeKHR CreateDisplayModeKHR;
+PFN_vkGetDisplayPlaneCapabilitiesKHR GetDisplayPlaneCapabilitiesKHR;
+PFN_vkCreateDisplayPlaneSurfaceKHR CreateDisplayPlaneSurfaceKHR;
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+PFN_vkCreateXlibSurfaceKHR CreateXlibSurfaceKHR;
+#endif // VK_USE_PLATFORM_XLIB_KHR
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR GetPhysicalDeviceXlibPresentationSupportKHR;
+#endif // VK_USE_PLATFORM_XLIB_KHR
+#ifdef VK_USE_PLATFORM_XCB_KHR
+PFN_vkCreateXcbSurfaceKHR CreateXcbSurfaceKHR;
+#endif // VK_USE_PLATFORM_XCB_KHR
+#ifdef VK_USE_PLATFORM_XCB_KHR
+PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR GetPhysicalDeviceXcbPresentationSupportKHR;
+#endif // VK_USE_PLATFORM_XCB_KHR
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+PFN_vkCreateWaylandSurfaceKHR CreateWaylandSurfaceKHR;
+#endif // VK_USE_PLATFORM_WAYLAND_KHR
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR GetPhysicalDeviceWaylandPresentationSupportKHR;
+#endif // VK_USE_PLATFORM_WAYLAND_KHR
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+PFN_vkCreateAndroidSurfaceKHR CreateAndroidSurfaceKHR;
+#endif // VK_USE_PLATFORM_ANDROID_KHR
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+PFN_vkCreateWin32SurfaceKHR CreateWin32SurfaceKHR;
+#endif // VK_USE_PLATFORM_WIN32_KHR
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR GetPhysicalDeviceWin32PresentationSupportKHR;
+#endif // VK_USE_PLATFORM_WIN32_KHR
+#ifdef VK_USE_PLATFORM_MACOS_MVK
+PFN_vkCreateMacOSSurfaceMVK CreateMacOSSurfaceMVK;
+#endif // VK_USE_PLATFORM_MACOS_MVK
+
+
+void InitDispatchTable() {
+
+#if(WIN32)
+    const char filename[] = "vulkan-1.dll";
+#elif(__APPLE__)
+    const char filename[] = "libvulkan.dylib";
+#else
+    const char filename[] = "libvulkan.so";
+#endif
+
+    auto loader_handle = loader_platform_open_library(filename);
+
+    if (loader_handle == nullptr) {
+        printf("%s\n", loader_platform_open_library_error(filename));
+        exit(1);
+    }
+
+    CreateInstance = reinterpret_cast<PFN_vkCreateInstance>(loader_platform_get_proc_address(loader_handle, "vkCreateInstance"));
+    DestroyInstance = reinterpret_cast<PFN_vkDestroyInstance>(loader_platform_get_proc_address(loader_handle, "vkDestroyInstance"));
+    EnumeratePhysicalDevices = reinterpret_cast<PFN_vkEnumeratePhysicalDevices>(loader_platform_get_proc_address(loader_handle, "vkEnumeratePhysicalDevices"));
+    GetPhysicalDeviceFeatures = reinterpret_cast<PFN_vkGetPhysicalDeviceFeatures>(loader_platform_get_proc_address(loader_handle, "vkGetPhysicalDeviceFeatures"));
+    GetPhysicalDeviceFormatProperties = reinterpret_cast<PFN_vkGetPhysicalDeviceFormatProperties>(loader_platform_get_proc_address(loader_handle, "vkGetPhysicalDeviceFormatProperties"));
+    GetPhysicalDeviceImageFormatProperties = reinterpret_cast<PFN_vkGetPhysicalDeviceImageFormatProperties>(loader_platform_get_proc_address(loader_handle, "vkGetPhysicalDeviceImageFormatProperties"));
+    GetPhysicalDeviceProperties = reinterpret_cast<PFN_vkGetPhysicalDeviceProperties>(loader_platform_get_proc_address(loader_handle, "vkGetPhysicalDeviceProperties"));
+    GetPhysicalDeviceQueueFamilyProperties = reinterpret_cast<PFN_vkGetPhysicalDeviceQueueFamilyProperties>(loader_platform_get_proc_address(loader_handle, "vkGetPhysicalDeviceQueueFamilyProperties"));
+    GetPhysicalDeviceMemoryProperties = reinterpret_cast<PFN_vkGetPhysicalDeviceMemoryProperties>(loader_platform_get_proc_address(loader_handle, "vkGetPhysicalDeviceMemoryProperties"));
+    GetInstanceProcAddr = reinterpret_cast<PFN_vkGetInstanceProcAddr>(loader_platform_get_proc_address(loader_handle, "vkGetInstanceProcAddr"));
+    GetDeviceProcAddr = reinterpret_cast<PFN_vkGetDeviceProcAddr>(loader_platform_get_proc_address(loader_handle, "vkGetDeviceProcAddr"));
+    CreateDevice = reinterpret_cast<PFN_vkCreateDevice>(loader_platform_get_proc_address(loader_handle, "vkCreateDevice"));
+    DestroyDevice = reinterpret_cast<PFN_vkDestroyDevice>(loader_platform_get_proc_address(loader_handle, "vkDestroyDevice"));
+    EnumerateInstanceExtensionProperties = reinterpret_cast<PFN_vkEnumerateInstanceExtensionProperties>(loader_platform_get_proc_address(loader_handle, "vkEnumerateInstanceExtensionProperties"));
+    EnumerateDeviceExtensionProperties = reinterpret_cast<PFN_vkEnumerateDeviceExtensionProperties>(loader_platform_get_proc_address(loader_handle, "vkEnumerateDeviceExtensionProperties"));
+    EnumerateInstanceLayerProperties = reinterpret_cast<PFN_vkEnumerateInstanceLayerProperties>(loader_platform_get_proc_address(loader_handle, "vkEnumerateInstanceLayerProperties"));
+    EnumerateDeviceLayerProperties = reinterpret_cast<PFN_vkEnumerateDeviceLayerProperties>(loader_platform_get_proc_address(loader_handle, "vkEnumerateDeviceLayerProperties"));
+    GetDeviceQueue = reinterpret_cast<PFN_vkGetDeviceQueue>(loader_platform_get_proc_address(loader_handle, "vkGetDeviceQueue"));
+    QueueSubmit = reinterpret_cast<PFN_vkQueueSubmit>(loader_platform_get_proc_address(loader_handle, "vkQueueSubmit"));
+    QueueWaitIdle = reinterpret_cast<PFN_vkQueueWaitIdle>(loader_platform_get_proc_address(loader_handle, "vkQueueWaitIdle"));
+    DeviceWaitIdle = reinterpret_cast<PFN_vkDeviceWaitIdle>(loader_platform_get_proc_address(loader_handle, "vkDeviceWaitIdle"));
+    AllocateMemory = reinterpret_cast<PFN_vkAllocateMemory>(loader_platform_get_proc_address(loader_handle, "vkAllocateMemory"));
+    FreeMemory = reinterpret_cast<PFN_vkFreeMemory>(loader_platform_get_proc_address(loader_handle, "vkFreeMemory"));
+    MapMemory = reinterpret_cast<PFN_vkMapMemory>(loader_platform_get_proc_address(loader_handle, "vkMapMemory"));
+    UnmapMemory = reinterpret_cast<PFN_vkUnmapMemory>(loader_platform_get_proc_address(loader_handle, "vkUnmapMemory"));
+    FlushMappedMemoryRanges = reinterpret_cast<PFN_vkFlushMappedMemoryRanges>(loader_platform_get_proc_address(loader_handle, "vkFlushMappedMemoryRanges"));
+    InvalidateMappedMemoryRanges = reinterpret_cast<PFN_vkInvalidateMappedMemoryRanges>(loader_platform_get_proc_address(loader_handle, "vkInvalidateMappedMemoryRanges"));
+    GetDeviceMemoryCommitment = reinterpret_cast<PFN_vkGetDeviceMemoryCommitment>(loader_platform_get_proc_address(loader_handle, "vkGetDeviceMemoryCommitment"));
+    BindBufferMemory = reinterpret_cast<PFN_vkBindBufferMemory>(loader_platform_get_proc_address(loader_handle, "vkBindBufferMemory"));
+    BindImageMemory = reinterpret_cast<PFN_vkBindImageMemory>(loader_platform_get_proc_address(loader_handle, "vkBindImageMemory"));
+    GetBufferMemoryRequirements = reinterpret_cast<PFN_vkGetBufferMemoryRequirements>(loader_platform_get_proc_address(loader_handle, "vkGetBufferMemoryRequirements"));
+    GetImageMemoryRequirements = reinterpret_cast<PFN_vkGetImageMemoryRequirements>(loader_platform_get_proc_address(loader_handle, "vkGetImageMemoryRequirements"));
+    GetImageSparseMemoryRequirements = reinterpret_cast<PFN_vkGetImageSparseMemoryRequirements>(loader_platform_get_proc_address(loader_handle, "vkGetImageSparseMemoryRequirements"));
+    GetPhysicalDeviceSparseImageFormatProperties = reinterpret_cast<PFN_vkGetPhysicalDeviceSparseImageFormatProperties>(loader_platform_get_proc_address(loader_handle, "vkGetPhysicalDeviceSparseImageFormatProperties"));
+    QueueBindSparse = reinterpret_cast<PFN_vkQueueBindSparse>(loader_platform_get_proc_address(loader_handle, "vkQueueBindSparse"));
+    CreateFence = reinterpret_cast<PFN_vkCreateFence>(loader_platform_get_proc_address(loader_handle, "vkCreateFence"));
+    DestroyFence = reinterpret_cast<PFN_vkDestroyFence>(loader_platform_get_proc_address(loader_handle, "vkDestroyFence"));
+    ResetFences = reinterpret_cast<PFN_vkResetFences>(loader_platform_get_proc_address(loader_handle, "vkResetFences"));
+    GetFenceStatus = reinterpret_cast<PFN_vkGetFenceStatus>(loader_platform_get_proc_address(loader_handle, "vkGetFenceStatus"));
+    WaitForFences = reinterpret_cast<PFN_vkWaitForFences>(loader_platform_get_proc_address(loader_handle, "vkWaitForFences"));
+    CreateSemaphore = reinterpret_cast<PFN_vkCreateSemaphore>(loader_platform_get_proc_address(loader_handle, "vkCreateSemaphore"));
+    DestroySemaphore = reinterpret_cast<PFN_vkDestroySemaphore>(loader_platform_get_proc_address(loader_handle, "vkDestroySemaphore"));
+    CreateEvent = reinterpret_cast<PFN_vkCreateEvent>(loader_platform_get_proc_address(loader_handle, "vkCreateEvent"));
+    DestroyEvent = reinterpret_cast<PFN_vkDestroyEvent>(loader_platform_get_proc_address(loader_handle, "vkDestroyEvent"));
+    GetEventStatus = reinterpret_cast<PFN_vkGetEventStatus>(loader_platform_get_proc_address(loader_handle, "vkGetEventStatus"));
+    SetEvent = reinterpret_cast<PFN_vkSetEvent>(loader_platform_get_proc_address(loader_handle, "vkSetEvent"));
+    ResetEvent = reinterpret_cast<PFN_vkResetEvent>(loader_platform_get_proc_address(loader_handle, "vkResetEvent"));
+    CreateQueryPool = reinterpret_cast<PFN_vkCreateQueryPool>(loader_platform_get_proc_address(loader_handle, "vkCreateQueryPool"));
+    DestroyQueryPool = reinterpret_cast<PFN_vkDestroyQueryPool>(loader_platform_get_proc_address(loader_handle, "vkDestroyQueryPool"));
+    GetQueryPoolResults = reinterpret_cast<PFN_vkGetQueryPoolResults>(loader_platform_get_proc_address(loader_handle, "vkGetQueryPoolResults"));
+    CreateBuffer = reinterpret_cast<PFN_vkCreateBuffer>(loader_platform_get_proc_address(loader_handle, "vkCreateBuffer"));
+    DestroyBuffer = reinterpret_cast<PFN_vkDestroyBuffer>(loader_platform_get_proc_address(loader_handle, "vkDestroyBuffer"));
+    CreateBufferView = reinterpret_cast<PFN_vkCreateBufferView>(loader_platform_get_proc_address(loader_handle, "vkCreateBufferView"));
+    DestroyBufferView = reinterpret_cast<PFN_vkDestroyBufferView>(loader_platform_get_proc_address(loader_handle, "vkDestroyBufferView"));
+    CreateImage = reinterpret_cast<PFN_vkCreateImage>(loader_platform_get_proc_address(loader_handle, "vkCreateImage"));
+    DestroyImage = reinterpret_cast<PFN_vkDestroyImage>(loader_platform_get_proc_address(loader_handle, "vkDestroyImage"));
+    GetImageSubresourceLayout = reinterpret_cast<PFN_vkGetImageSubresourceLayout>(loader_platform_get_proc_address(loader_handle, "vkGetImageSubresourceLayout"));
+    CreateImageView = reinterpret_cast<PFN_vkCreateImageView>(loader_platform_get_proc_address(loader_handle, "vkCreateImageView"));
+    DestroyImageView = reinterpret_cast<PFN_vkDestroyImageView>(loader_platform_get_proc_address(loader_handle, "vkDestroyImageView"));
+    CreateShaderModule = reinterpret_cast<PFN_vkCreateShaderModule>(loader_platform_get_proc_address(loader_handle, "vkCreateShaderModule"));
+    DestroyShaderModule = reinterpret_cast<PFN_vkDestroyShaderModule>(loader_platform_get_proc_address(loader_handle, "vkDestroyShaderModule"));
+    CreatePipelineCache = reinterpret_cast<PFN_vkCreatePipelineCache>(loader_platform_get_proc_address(loader_handle, "vkCreatePipelineCache"));
+    DestroyPipelineCache = reinterpret_cast<PFN_vkDestroyPipelineCache>(loader_platform_get_proc_address(loader_handle, "vkDestroyPipelineCache"));
+    GetPipelineCacheData = reinterpret_cast<PFN_vkGetPipelineCacheData>(loader_platform_get_proc_address(loader_handle, "vkGetPipelineCacheData"));
+    MergePipelineCaches = reinterpret_cast<PFN_vkMergePipelineCaches>(loader_platform_get_proc_address(loader_handle, "vkMergePipelineCaches"));
+    CreateGraphicsPipelines = reinterpret_cast<PFN_vkCreateGraphicsPipelines>(loader_platform_get_proc_address(loader_handle, "vkCreateGraphicsPipelines"));
+    CreateComputePipelines = reinterpret_cast<PFN_vkCreateComputePipelines>(loader_platform_get_proc_address(loader_handle, "vkCreateComputePipelines"));
+    DestroyPipeline = reinterpret_cast<PFN_vkDestroyPipeline>(loader_platform_get_proc_address(loader_handle, "vkDestroyPipeline"));
+    CreatePipelineLayout = reinterpret_cast<PFN_vkCreatePipelineLayout>(loader_platform_get_proc_address(loader_handle, "vkCreatePipelineLayout"));
+    DestroyPipelineLayout = reinterpret_cast<PFN_vkDestroyPipelineLayout>(loader_platform_get_proc_address(loader_handle, "vkDestroyPipelineLayout"));
+    CreateSampler = reinterpret_cast<PFN_vkCreateSampler>(loader_platform_get_proc_address(loader_handle, "vkCreateSampler"));
+    DestroySampler = reinterpret_cast<PFN_vkDestroySampler>(loader_platform_get_proc_address(loader_handle, "vkDestroySampler"));
+    CreateDescriptorSetLayout = reinterpret_cast<PFN_vkCreateDescriptorSetLayout>(loader_platform_get_proc_address(loader_handle, "vkCreateDescriptorSetLayout"));
+    DestroyDescriptorSetLayout = reinterpret_cast<PFN_vkDestroyDescriptorSetLayout>(loader_platform_get_proc_address(loader_handle, "vkDestroyDescriptorSetLayout"));
+    CreateDescriptorPool = reinterpret_cast<PFN_vkCreateDescriptorPool>(loader_platform_get_proc_address(loader_handle, "vkCreateDescriptorPool"));
+    DestroyDescriptorPool = reinterpret_cast<PFN_vkDestroyDescriptorPool>(loader_platform_get_proc_address(loader_handle, "vkDestroyDescriptorPool"));
+    ResetDescriptorPool = reinterpret_cast<PFN_vkResetDescriptorPool>(loader_platform_get_proc_address(loader_handle, "vkResetDescriptorPool"));
+    AllocateDescriptorSets = reinterpret_cast<PFN_vkAllocateDescriptorSets>(loader_platform_get_proc_address(loader_handle, "vkAllocateDescriptorSets"));
+    FreeDescriptorSets = reinterpret_cast<PFN_vkFreeDescriptorSets>(loader_platform_get_proc_address(loader_handle, "vkFreeDescriptorSets"));
+    UpdateDescriptorSets = reinterpret_cast<PFN_vkUpdateDescriptorSets>(loader_platform_get_proc_address(loader_handle, "vkUpdateDescriptorSets"));
+    CreateFramebuffer = reinterpret_cast<PFN_vkCreateFramebuffer>(loader_platform_get_proc_address(loader_handle, "vkCreateFramebuffer"));
+    DestroyFramebuffer = reinterpret_cast<PFN_vkDestroyFramebuffer>(loader_platform_get_proc_address(loader_handle, "vkDestroyFramebuffer"));
+    CreateRenderPass = reinterpret_cast<PFN_vkCreateRenderPass>(loader_platform_get_proc_address(loader_handle, "vkCreateRenderPass"));
+    DestroyRenderPass = reinterpret_cast<PFN_vkDestroyRenderPass>(loader_platform_get_proc_address(loader_handle, "vkDestroyRenderPass"));
+    GetRenderAreaGranularity = reinterpret_cast<PFN_vkGetRenderAreaGranularity>(loader_platform_get_proc_address(loader_handle, "vkGetRenderAreaGranularity"));
+    CreateCommandPool = reinterpret_cast<PFN_vkCreateCommandPool>(loader_platform_get_proc_address(loader_handle, "vkCreateCommandPool"));
+    DestroyCommandPool = reinterpret_cast<PFN_vkDestroyCommandPool>(loader_platform_get_proc_address(loader_handle, "vkDestroyCommandPool"));
+    ResetCommandPool = reinterpret_cast<PFN_vkResetCommandPool>(loader_platform_get_proc_address(loader_handle, "vkResetCommandPool"));
+    AllocateCommandBuffers = reinterpret_cast<PFN_vkAllocateCommandBuffers>(loader_platform_get_proc_address(loader_handle, "vkAllocateCommandBuffers"));
+    FreeCommandBuffers = reinterpret_cast<PFN_vkFreeCommandBuffers>(loader_platform_get_proc_address(loader_handle, "vkFreeCommandBuffers"));
+    BeginCommandBuffer = reinterpret_cast<PFN_vkBeginCommandBuffer>(loader_platform_get_proc_address(loader_handle, "vkBeginCommandBuffer"));
+    EndCommandBuffer = reinterpret_cast<PFN_vkEndCommandBuffer>(loader_platform_get_proc_address(loader_handle, "vkEndCommandBuffer"));
+    ResetCommandBuffer = reinterpret_cast<PFN_vkResetCommandBuffer>(loader_platform_get_proc_address(loader_handle, "vkResetCommandBuffer"));
+    CmdBindPipeline = reinterpret_cast<PFN_vkCmdBindPipeline>(loader_platform_get_proc_address(loader_handle, "vkCmdBindPipeline"));
+    CmdSetViewport = reinterpret_cast<PFN_vkCmdSetViewport>(loader_platform_get_proc_address(loader_handle, "vkCmdSetViewport"));
+    CmdSetScissor = reinterpret_cast<PFN_vkCmdSetScissor>(loader_platform_get_proc_address(loader_handle, "vkCmdSetScissor"));
+    CmdSetLineWidth = reinterpret_cast<PFN_vkCmdSetLineWidth>(loader_platform_get_proc_address(loader_handle, "vkCmdSetLineWidth"));
+    CmdSetDepthBias = reinterpret_cast<PFN_vkCmdSetDepthBias>(loader_platform_get_proc_address(loader_handle, "vkCmdSetDepthBias"));
+    CmdSetBlendConstants = reinterpret_cast<PFN_vkCmdSetBlendConstants>(loader_platform_get_proc_address(loader_handle, "vkCmdSetBlendConstants"));
+    CmdSetDepthBounds = reinterpret_cast<PFN_vkCmdSetDepthBounds>(loader_platform_get_proc_address(loader_handle, "vkCmdSetDepthBounds"));
+    CmdSetStencilCompareMask = reinterpret_cast<PFN_vkCmdSetStencilCompareMask>(loader_platform_get_proc_address(loader_handle, "vkCmdSetStencilCompareMask"));
+    CmdSetStencilWriteMask = reinterpret_cast<PFN_vkCmdSetStencilWriteMask>(loader_platform_get_proc_address(loader_handle, "vkCmdSetStencilWriteMask"));
+    CmdSetStencilReference = reinterpret_cast<PFN_vkCmdSetStencilReference>(loader_platform_get_proc_address(loader_handle, "vkCmdSetStencilReference"));
+    CmdBindDescriptorSets = reinterpret_cast<PFN_vkCmdBindDescriptorSets>(loader_platform_get_proc_address(loader_handle, "vkCmdBindDescriptorSets"));
+    CmdBindIndexBuffer = reinterpret_cast<PFN_vkCmdBindIndexBuffer>(loader_platform_get_proc_address(loader_handle, "vkCmdBindIndexBuffer"));
+    CmdBindVertexBuffers = reinterpret_cast<PFN_vkCmdBindVertexBuffers>(loader_platform_get_proc_address(loader_handle, "vkCmdBindVertexBuffers"));
+    CmdDraw = reinterpret_cast<PFN_vkCmdDraw>(loader_platform_get_proc_address(loader_handle, "vkCmdDraw"));
+    CmdDrawIndexed = reinterpret_cast<PFN_vkCmdDrawIndexed>(loader_platform_get_proc_address(loader_handle, "vkCmdDrawIndexed"));
+    CmdDrawIndirect = reinterpret_cast<PFN_vkCmdDrawIndirect>(loader_platform_get_proc_address(loader_handle, "vkCmdDrawIndirect"));
+    CmdDrawIndexedIndirect = reinterpret_cast<PFN_vkCmdDrawIndexedIndirect>(loader_platform_get_proc_address(loader_handle, "vkCmdDrawIndexedIndirect"));
+    CmdDispatch = reinterpret_cast<PFN_vkCmdDispatch>(loader_platform_get_proc_address(loader_handle, "vkCmdDispatch"));
+    CmdDispatchIndirect = reinterpret_cast<PFN_vkCmdDispatchIndirect>(loader_platform_get_proc_address(loader_handle, "vkCmdDispatchIndirect"));
+    CmdCopyBuffer = reinterpret_cast<PFN_vkCmdCopyBuffer>(loader_platform_get_proc_address(loader_handle, "vkCmdCopyBuffer"));
+    CmdCopyImage = reinterpret_cast<PFN_vkCmdCopyImage>(loader_platform_get_proc_address(loader_handle, "vkCmdCopyImage"));
+    CmdBlitImage = reinterpret_cast<PFN_vkCmdBlitImage>(loader_platform_get_proc_address(loader_handle, "vkCmdBlitImage"));
+    CmdCopyBufferToImage = reinterpret_cast<PFN_vkCmdCopyBufferToImage>(loader_platform_get_proc_address(loader_handle, "vkCmdCopyBufferToImage"));
+    CmdCopyImageToBuffer = reinterpret_cast<PFN_vkCmdCopyImageToBuffer>(loader_platform_get_proc_address(loader_handle, "vkCmdCopyImageToBuffer"));
+    CmdUpdateBuffer = reinterpret_cast<PFN_vkCmdUpdateBuffer>(loader_platform_get_proc_address(loader_handle, "vkCmdUpdateBuffer"));
+    CmdFillBuffer = reinterpret_cast<PFN_vkCmdFillBuffer>(loader_platform_get_proc_address(loader_handle, "vkCmdFillBuffer"));
+    CmdClearColorImage = reinterpret_cast<PFN_vkCmdClearColorImage>(loader_platform_get_proc_address(loader_handle, "vkCmdClearColorImage"));
+    CmdClearDepthStencilImage = reinterpret_cast<PFN_vkCmdClearDepthStencilImage>(loader_platform_get_proc_address(loader_handle, "vkCmdClearDepthStencilImage"));
+    CmdClearAttachments = reinterpret_cast<PFN_vkCmdClearAttachments>(loader_platform_get_proc_address(loader_handle, "vkCmdClearAttachments"));
+    CmdResolveImage = reinterpret_cast<PFN_vkCmdResolveImage>(loader_platform_get_proc_address(loader_handle, "vkCmdResolveImage"));
+    CmdSetEvent = reinterpret_cast<PFN_vkCmdSetEvent>(loader_platform_get_proc_address(loader_handle, "vkCmdSetEvent"));
+    CmdResetEvent = reinterpret_cast<PFN_vkCmdResetEvent>(loader_platform_get_proc_address(loader_handle, "vkCmdResetEvent"));
+    CmdWaitEvents = reinterpret_cast<PFN_vkCmdWaitEvents>(loader_platform_get_proc_address(loader_handle, "vkCmdWaitEvents"));
+    CmdPipelineBarrier = reinterpret_cast<PFN_vkCmdPipelineBarrier>(loader_platform_get_proc_address(loader_handle, "vkCmdPipelineBarrier"));
+    CmdBeginQuery = reinterpret_cast<PFN_vkCmdBeginQuery>(loader_platform_get_proc_address(loader_handle, "vkCmdBeginQuery"));
+    CmdEndQuery = reinterpret_cast<PFN_vkCmdEndQuery>(loader_platform_get_proc_address(loader_handle, "vkCmdEndQuery"));
+    CmdResetQueryPool = reinterpret_cast<PFN_vkCmdResetQueryPool>(loader_platform_get_proc_address(loader_handle, "vkCmdResetQueryPool"));
+    CmdWriteTimestamp = reinterpret_cast<PFN_vkCmdWriteTimestamp>(loader_platform_get_proc_address(loader_handle, "vkCmdWriteTimestamp"));
+    CmdCopyQueryPoolResults = reinterpret_cast<PFN_vkCmdCopyQueryPoolResults>(loader_platform_get_proc_address(loader_handle, "vkCmdCopyQueryPoolResults"));
+    CmdPushConstants = reinterpret_cast<PFN_vkCmdPushConstants>(loader_platform_get_proc_address(loader_handle, "vkCmdPushConstants"));
+    CmdBeginRenderPass = reinterpret_cast<PFN_vkCmdBeginRenderPass>(loader_platform_get_proc_address(loader_handle, "vkCmdBeginRenderPass"));
+    CmdNextSubpass = reinterpret_cast<PFN_vkCmdNextSubpass>(loader_platform_get_proc_address(loader_handle, "vkCmdNextSubpass"));
+    CmdEndRenderPass = reinterpret_cast<PFN_vkCmdEndRenderPass>(loader_platform_get_proc_address(loader_handle, "vkCmdEndRenderPass"));
+    CmdExecuteCommands = reinterpret_cast<PFN_vkCmdExecuteCommands>(loader_platform_get_proc_address(loader_handle, "vkCmdExecuteCommands"));
+    EnumerateInstanceVersion = reinterpret_cast<PFN_vkEnumerateInstanceVersion>(loader_platform_get_proc_address(loader_handle, "vkEnumerateInstanceVersion"));
+    BindBufferMemory2 = reinterpret_cast<PFN_vkBindBufferMemory2>(loader_platform_get_proc_address(loader_handle, "vkBindBufferMemory2"));
+    BindImageMemory2 = reinterpret_cast<PFN_vkBindImageMemory2>(loader_platform_get_proc_address(loader_handle, "vkBindImageMemory2"));
+    GetDeviceGroupPeerMemoryFeatures = reinterpret_cast<PFN_vkGetDeviceGroupPeerMemoryFeatures>(loader_platform_get_proc_address(loader_handle, "vkGetDeviceGroupPeerMemoryFeatures"));
+    CmdSetDeviceMask = reinterpret_cast<PFN_vkCmdSetDeviceMask>(loader_platform_get_proc_address(loader_handle, "vkCmdSetDeviceMask"));
+    CmdDispatchBase = reinterpret_cast<PFN_vkCmdDispatchBase>(loader_platform_get_proc_address(loader_handle, "vkCmdDispatchBase"));
+    EnumeratePhysicalDeviceGroups = reinterpret_cast<PFN_vkEnumeratePhysicalDeviceGroups>(loader_platform_get_proc_address(loader_handle, "vkEnumeratePhysicalDeviceGroups"));
+    GetImageMemoryRequirements2 = reinterpret_cast<PFN_vkGetImageMemoryRequirements2>(loader_platform_get_proc_address(loader_handle, "vkGetImageMemoryRequirements2"));
+    GetBufferMemoryRequirements2 = reinterpret_cast<PFN_vkGetBufferMemoryRequirements2>(loader_platform_get_proc_address(loader_handle, "vkGetBufferMemoryRequirements2"));
+    GetImageSparseMemoryRequirements2 = reinterpret_cast<PFN_vkGetImageSparseMemoryRequirements2>(loader_platform_get_proc_address(loader_handle, "vkGetImageSparseMemoryRequirements2"));
+    GetPhysicalDeviceFeatures2 = reinterpret_cast<PFN_vkGetPhysicalDeviceFeatures2>(loader_platform_get_proc_address(loader_handle, "vkGetPhysicalDeviceFeatures2"));
+    GetPhysicalDeviceProperties2 = reinterpret_cast<PFN_vkGetPhysicalDeviceProperties2>(loader_platform_get_proc_address(loader_handle, "vkGetPhysicalDeviceProperties2"));
+    GetPhysicalDeviceFormatProperties2 = reinterpret_cast<PFN_vkGetPhysicalDeviceFormatProperties2>(loader_platform_get_proc_address(loader_handle, "vkGetPhysicalDeviceFormatProperties2"));
+    GetPhysicalDeviceImageFormatProperties2 = reinterpret_cast<PFN_vkGetPhysicalDeviceImageFormatProperties2>(loader_platform_get_proc_address(loader_handle, "vkGetPhysicalDeviceImageFormatProperties2"));
+    GetPhysicalDeviceQueueFamilyProperties2 = reinterpret_cast<PFN_vkGetPhysicalDeviceQueueFamilyProperties2>(loader_platform_get_proc_address(loader_handle, "vkGetPhysicalDeviceQueueFamilyProperties2"));
+    GetPhysicalDeviceMemoryProperties2 = reinterpret_cast<PFN_vkGetPhysicalDeviceMemoryProperties2>(loader_platform_get_proc_address(loader_handle, "vkGetPhysicalDeviceMemoryProperties2"));
+    GetPhysicalDeviceSparseImageFormatProperties2 = reinterpret_cast<PFN_vkGetPhysicalDeviceSparseImageFormatProperties2>(loader_platform_get_proc_address(loader_handle, "vkGetPhysicalDeviceSparseImageFormatProperties2"));
+    TrimCommandPool = reinterpret_cast<PFN_vkTrimCommandPool>(loader_platform_get_proc_address(loader_handle, "vkTrimCommandPool"));
+    GetDeviceQueue2 = reinterpret_cast<PFN_vkGetDeviceQueue2>(loader_platform_get_proc_address(loader_handle, "vkGetDeviceQueue2"));
+    CreateSamplerYcbcrConversion = reinterpret_cast<PFN_vkCreateSamplerYcbcrConversion>(loader_platform_get_proc_address(loader_handle, "vkCreateSamplerYcbcrConversion"));
+    DestroySamplerYcbcrConversion = reinterpret_cast<PFN_vkDestroySamplerYcbcrConversion>(loader_platform_get_proc_address(loader_handle, "vkDestroySamplerYcbcrConversion"));
+    CreateDescriptorUpdateTemplate = reinterpret_cast<PFN_vkCreateDescriptorUpdateTemplate>(loader_platform_get_proc_address(loader_handle, "vkCreateDescriptorUpdateTemplate"));
+    DestroyDescriptorUpdateTemplate = reinterpret_cast<PFN_vkDestroyDescriptorUpdateTemplate>(loader_platform_get_proc_address(loader_handle, "vkDestroyDescriptorUpdateTemplate"));
+    UpdateDescriptorSetWithTemplate = reinterpret_cast<PFN_vkUpdateDescriptorSetWithTemplate>(loader_platform_get_proc_address(loader_handle, "vkUpdateDescriptorSetWithTemplate"));
+    GetPhysicalDeviceExternalBufferProperties = reinterpret_cast<PFN_vkGetPhysicalDeviceExternalBufferProperties>(loader_platform_get_proc_address(loader_handle, "vkGetPhysicalDeviceExternalBufferProperties"));
+    GetPhysicalDeviceExternalFenceProperties = reinterpret_cast<PFN_vkGetPhysicalDeviceExternalFenceProperties>(loader_platform_get_proc_address(loader_handle, "vkGetPhysicalDeviceExternalFenceProperties"));
+    GetPhysicalDeviceExternalSemaphoreProperties = reinterpret_cast<PFN_vkGetPhysicalDeviceExternalSemaphoreProperties>(loader_platform_get_proc_address(loader_handle, "vkGetPhysicalDeviceExternalSemaphoreProperties"));
+    GetDescriptorSetLayoutSupport = reinterpret_cast<PFN_vkGetDescriptorSetLayoutSupport>(loader_platform_get_proc_address(loader_handle, "vkGetDescriptorSetLayoutSupport"));
+    DestroySurfaceKHR = reinterpret_cast<PFN_vkDestroySurfaceKHR>(loader_platform_get_proc_address(loader_handle, "vkDestroySurfaceKHR"));
+    GetPhysicalDeviceSurfaceSupportKHR = reinterpret_cast<PFN_vkGetPhysicalDeviceSurfaceSupportKHR>(loader_platform_get_proc_address(loader_handle, "vkGetPhysicalDeviceSurfaceSupportKHR"));
+    GetPhysicalDeviceSurfaceCapabilitiesKHR = reinterpret_cast<PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR>(loader_platform_get_proc_address(loader_handle, "vkGetPhysicalDeviceSurfaceCapabilitiesKHR"));
+    GetPhysicalDeviceSurfaceFormatsKHR = reinterpret_cast<PFN_vkGetPhysicalDeviceSurfaceFormatsKHR>(loader_platform_get_proc_address(loader_handle, "vkGetPhysicalDeviceSurfaceFormatsKHR"));
+    GetPhysicalDeviceSurfacePresentModesKHR = reinterpret_cast<PFN_vkGetPhysicalDeviceSurfacePresentModesKHR>(loader_platform_get_proc_address(loader_handle, "vkGetPhysicalDeviceSurfacePresentModesKHR"));
+    CreateSwapchainKHR = reinterpret_cast<PFN_vkCreateSwapchainKHR>(loader_platform_get_proc_address(loader_handle, "vkCreateSwapchainKHR"));
+    DestroySwapchainKHR = reinterpret_cast<PFN_vkDestroySwapchainKHR>(loader_platform_get_proc_address(loader_handle, "vkDestroySwapchainKHR"));
+    GetSwapchainImagesKHR = reinterpret_cast<PFN_vkGetSwapchainImagesKHR>(loader_platform_get_proc_address(loader_handle, "vkGetSwapchainImagesKHR"));
+    AcquireNextImageKHR = reinterpret_cast<PFN_vkAcquireNextImageKHR>(loader_platform_get_proc_address(loader_handle, "vkAcquireNextImageKHR"));
+    QueuePresentKHR = reinterpret_cast<PFN_vkQueuePresentKHR>(loader_platform_get_proc_address(loader_handle, "vkQueuePresentKHR"));
+    GetDeviceGroupPresentCapabilitiesKHR = reinterpret_cast<PFN_vkGetDeviceGroupPresentCapabilitiesKHR>(loader_platform_get_proc_address(loader_handle, "vkGetDeviceGroupPresentCapabilitiesKHR"));
+    GetDeviceGroupSurfacePresentModesKHR = reinterpret_cast<PFN_vkGetDeviceGroupSurfacePresentModesKHR>(loader_platform_get_proc_address(loader_handle, "vkGetDeviceGroupSurfacePresentModesKHR"));
+    GetPhysicalDevicePresentRectanglesKHR = reinterpret_cast<PFN_vkGetPhysicalDevicePresentRectanglesKHR>(loader_platform_get_proc_address(loader_handle, "vkGetPhysicalDevicePresentRectanglesKHR"));
+    AcquireNextImage2KHR = reinterpret_cast<PFN_vkAcquireNextImage2KHR>(loader_platform_get_proc_address(loader_handle, "vkAcquireNextImage2KHR"));
+    GetPhysicalDeviceDisplayPropertiesKHR = reinterpret_cast<PFN_vkGetPhysicalDeviceDisplayPropertiesKHR>(loader_platform_get_proc_address(loader_handle, "vkGetPhysicalDeviceDisplayPropertiesKHR"));
+    GetPhysicalDeviceDisplayPlanePropertiesKHR = reinterpret_cast<PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR>(loader_platform_get_proc_address(loader_handle, "vkGetPhysicalDeviceDisplayPlanePropertiesKHR"));
+    GetDisplayPlaneSupportedDisplaysKHR = reinterpret_cast<PFN_vkGetDisplayPlaneSupportedDisplaysKHR>(loader_platform_get_proc_address(loader_handle, "vkGetDisplayPlaneSupportedDisplaysKHR"));
+    GetDisplayModePropertiesKHR = reinterpret_cast<PFN_vkGetDisplayModePropertiesKHR>(loader_platform_get_proc_address(loader_handle, "vkGetDisplayModePropertiesKHR"));
+    CreateDisplayModeKHR = reinterpret_cast<PFN_vkCreateDisplayModeKHR>(loader_platform_get_proc_address(loader_handle, "vkCreateDisplayModeKHR"));
+    GetDisplayPlaneCapabilitiesKHR = reinterpret_cast<PFN_vkGetDisplayPlaneCapabilitiesKHR>(loader_platform_get_proc_address(loader_handle, "vkGetDisplayPlaneCapabilitiesKHR"));
+    CreateDisplayPlaneSurfaceKHR = reinterpret_cast<PFN_vkCreateDisplayPlaneSurfaceKHR>(loader_platform_get_proc_address(loader_handle, "vkCreateDisplayPlaneSurfaceKHR"));
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+    CreateXlibSurfaceKHR = reinterpret_cast<PFN_vkCreateXlibSurfaceKHR>(loader_platform_get_proc_address(loader_handle, "vkCreateXlibSurfaceKHR"));
+#endif // VK_USE_PLATFORM_XLIB_KHR
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+    GetPhysicalDeviceXlibPresentationSupportKHR = reinterpret_cast<PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR>(loader_platform_get_proc_address(loader_handle, "vkGetPhysicalDeviceXlibPresentationSupportKHR"));
+#endif // VK_USE_PLATFORM_XLIB_KHR
+#ifdef VK_USE_PLATFORM_XCB_KHR
+    CreateXcbSurfaceKHR = reinterpret_cast<PFN_vkCreateXcbSurfaceKHR>(loader_platform_get_proc_address(loader_handle, "vkCreateXcbSurfaceKHR"));
+#endif // VK_USE_PLATFORM_XCB_KHR
+#ifdef VK_USE_PLATFORM_XCB_KHR
+    GetPhysicalDeviceXcbPresentationSupportKHR = reinterpret_cast<PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR>(loader_platform_get_proc_address(loader_handle, "vkGetPhysicalDeviceXcbPresentationSupportKHR"));
+#endif // VK_USE_PLATFORM_XCB_KHR
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+    CreateWaylandSurfaceKHR = reinterpret_cast<PFN_vkCreateWaylandSurfaceKHR>(loader_platform_get_proc_address(loader_handle, "vkCreateWaylandSurfaceKHR"));
+#endif // VK_USE_PLATFORM_WAYLAND_KHR
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+    GetPhysicalDeviceWaylandPresentationSupportKHR = reinterpret_cast<PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR>(loader_platform_get_proc_address(loader_handle, "vkGetPhysicalDeviceWaylandPresentationSupportKHR"));
+#endif // VK_USE_PLATFORM_WAYLAND_KHR
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+    CreateAndroidSurfaceKHR = reinterpret_cast<PFN_vkCreateAndroidSurfaceKHR>(loader_platform_get_proc_address(loader_handle, "vkCreateAndroidSurfaceKHR"));
+#endif // VK_USE_PLATFORM_ANDROID_KHR
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    CreateWin32SurfaceKHR = reinterpret_cast<PFN_vkCreateWin32SurfaceKHR>(loader_platform_get_proc_address(loader_handle, "vkCreateWin32SurfaceKHR"));
+#endif // VK_USE_PLATFORM_WIN32_KHR
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    GetPhysicalDeviceWin32PresentationSupportKHR = reinterpret_cast<PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR>(loader_platform_get_proc_address(loader_handle, "vkGetPhysicalDeviceWin32PresentationSupportKHR"));
+#endif // VK_USE_PLATFORM_WIN32_KHR
+#ifdef VK_USE_PLATFORM_MACOS_MVK
+    CreateMacOSSurfaceMVK = reinterpret_cast<PFN_vkCreateMacOSSurfaceMVK>(loader_platform_get_proc_address(loader_handle, "vkCreateMacOSSurfaceMVK"));
+#endif // VK_USE_PLATFORM_MACOS_MVK
+}
+
+} // namespace vk
diff --git a/src/third_party/vulkan-validation-layers/src/layers/generated/lvt_function_pointers.h b/src/third_party/vulkan-validation-layers/src/layers/generated/lvt_function_pointers.h
new file mode 100644
index 0000000..e2ef7ef
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/layers/generated/lvt_function_pointers.h
@@ -0,0 +1,250 @@
+#pragma once
+// *** THIS FILE IS GENERATED - DO NOT EDIT ***
+// See lvt_file_generator.py for modifications
+
+/*
+ * Copyright (c) 2015-2019 The Khronos Group Inc.
+ * Copyright (c) 2015-2019 Valve Corporation
+ * Copyright (c) 2015-2019 LunarG, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Mark Lobodzinski <mark@lunarg.com>
+ */
+
+
+#include <vulkan/vulkan.h>
+#include "vk_loader_platform.h"
+
+namespace vk {
+
+extern PFN_vkCreateInstance CreateInstance;
+extern PFN_vkDestroyInstance DestroyInstance;
+extern PFN_vkEnumeratePhysicalDevices EnumeratePhysicalDevices;
+extern PFN_vkGetPhysicalDeviceFeatures GetPhysicalDeviceFeatures;
+extern PFN_vkGetPhysicalDeviceFormatProperties GetPhysicalDeviceFormatProperties;
+extern PFN_vkGetPhysicalDeviceImageFormatProperties GetPhysicalDeviceImageFormatProperties;
+extern PFN_vkGetPhysicalDeviceProperties GetPhysicalDeviceProperties;
+extern PFN_vkGetPhysicalDeviceQueueFamilyProperties GetPhysicalDeviceQueueFamilyProperties;
+extern PFN_vkGetPhysicalDeviceMemoryProperties GetPhysicalDeviceMemoryProperties;
+extern PFN_vkGetInstanceProcAddr GetInstanceProcAddr;
+extern PFN_vkGetDeviceProcAddr GetDeviceProcAddr;
+extern PFN_vkCreateDevice CreateDevice;
+extern PFN_vkDestroyDevice DestroyDevice;
+extern PFN_vkEnumerateInstanceExtensionProperties EnumerateInstanceExtensionProperties;
+extern PFN_vkEnumerateDeviceExtensionProperties EnumerateDeviceExtensionProperties;
+extern PFN_vkEnumerateInstanceLayerProperties EnumerateInstanceLayerProperties;
+extern PFN_vkEnumerateDeviceLayerProperties EnumerateDeviceLayerProperties;
+extern PFN_vkGetDeviceQueue GetDeviceQueue;
+extern PFN_vkQueueSubmit QueueSubmit;
+extern PFN_vkQueueWaitIdle QueueWaitIdle;
+extern PFN_vkDeviceWaitIdle DeviceWaitIdle;
+extern PFN_vkAllocateMemory AllocateMemory;
+extern PFN_vkFreeMemory FreeMemory;
+extern PFN_vkMapMemory MapMemory;
+extern PFN_vkUnmapMemory UnmapMemory;
+extern PFN_vkFlushMappedMemoryRanges FlushMappedMemoryRanges;
+extern PFN_vkInvalidateMappedMemoryRanges InvalidateMappedMemoryRanges;
+extern PFN_vkGetDeviceMemoryCommitment GetDeviceMemoryCommitment;
+extern PFN_vkBindBufferMemory BindBufferMemory;
+extern PFN_vkBindImageMemory BindImageMemory;
+extern PFN_vkGetBufferMemoryRequirements GetBufferMemoryRequirements;
+extern PFN_vkGetImageMemoryRequirements GetImageMemoryRequirements;
+extern PFN_vkGetImageSparseMemoryRequirements GetImageSparseMemoryRequirements;
+extern PFN_vkGetPhysicalDeviceSparseImageFormatProperties GetPhysicalDeviceSparseImageFormatProperties;
+extern PFN_vkQueueBindSparse QueueBindSparse;
+extern PFN_vkCreateFence CreateFence;
+extern PFN_vkDestroyFence DestroyFence;
+extern PFN_vkResetFences ResetFences;
+extern PFN_vkGetFenceStatus GetFenceStatus;
+extern PFN_vkWaitForFences WaitForFences;
+extern PFN_vkCreateSemaphore CreateSemaphore;
+extern PFN_vkDestroySemaphore DestroySemaphore;
+extern PFN_vkCreateEvent CreateEvent;
+extern PFN_vkDestroyEvent DestroyEvent;
+extern PFN_vkGetEventStatus GetEventStatus;
+extern PFN_vkSetEvent SetEvent;
+extern PFN_vkResetEvent ResetEvent;
+extern PFN_vkCreateQueryPool CreateQueryPool;
+extern PFN_vkDestroyQueryPool DestroyQueryPool;
+extern PFN_vkGetQueryPoolResults GetQueryPoolResults;
+extern PFN_vkCreateBuffer CreateBuffer;
+extern PFN_vkDestroyBuffer DestroyBuffer;
+extern PFN_vkCreateBufferView CreateBufferView;
+extern PFN_vkDestroyBufferView DestroyBufferView;
+extern PFN_vkCreateImage CreateImage;
+extern PFN_vkDestroyImage DestroyImage;
+extern PFN_vkGetImageSubresourceLayout GetImageSubresourceLayout;
+extern PFN_vkCreateImageView CreateImageView;
+extern PFN_vkDestroyImageView DestroyImageView;
+extern PFN_vkCreateShaderModule CreateShaderModule;
+extern PFN_vkDestroyShaderModule DestroyShaderModule;
+extern PFN_vkCreatePipelineCache CreatePipelineCache;
+extern PFN_vkDestroyPipelineCache DestroyPipelineCache;
+extern PFN_vkGetPipelineCacheData GetPipelineCacheData;
+extern PFN_vkMergePipelineCaches MergePipelineCaches;
+extern PFN_vkCreateGraphicsPipelines CreateGraphicsPipelines;
+extern PFN_vkCreateComputePipelines CreateComputePipelines;
+extern PFN_vkDestroyPipeline DestroyPipeline;
+extern PFN_vkCreatePipelineLayout CreatePipelineLayout;
+extern PFN_vkDestroyPipelineLayout DestroyPipelineLayout;
+extern PFN_vkCreateSampler CreateSampler;
+extern PFN_vkDestroySampler DestroySampler;
+extern PFN_vkCreateDescriptorSetLayout CreateDescriptorSetLayout;
+extern PFN_vkDestroyDescriptorSetLayout DestroyDescriptorSetLayout;
+extern PFN_vkCreateDescriptorPool CreateDescriptorPool;
+extern PFN_vkDestroyDescriptorPool DestroyDescriptorPool;
+extern PFN_vkResetDescriptorPool ResetDescriptorPool;
+extern PFN_vkAllocateDescriptorSets AllocateDescriptorSets;
+extern PFN_vkFreeDescriptorSets FreeDescriptorSets;
+extern PFN_vkUpdateDescriptorSets UpdateDescriptorSets;
+extern PFN_vkCreateFramebuffer CreateFramebuffer;
+extern PFN_vkDestroyFramebuffer DestroyFramebuffer;
+extern PFN_vkCreateRenderPass CreateRenderPass;
+extern PFN_vkDestroyRenderPass DestroyRenderPass;
+extern PFN_vkGetRenderAreaGranularity GetRenderAreaGranularity;
+extern PFN_vkCreateCommandPool CreateCommandPool;
+extern PFN_vkDestroyCommandPool DestroyCommandPool;
+extern PFN_vkResetCommandPool ResetCommandPool;
+extern PFN_vkAllocateCommandBuffers AllocateCommandBuffers;
+extern PFN_vkFreeCommandBuffers FreeCommandBuffers;
+extern PFN_vkBeginCommandBuffer BeginCommandBuffer;
+extern PFN_vkEndCommandBuffer EndCommandBuffer;
+extern PFN_vkResetCommandBuffer ResetCommandBuffer;
+extern PFN_vkCmdBindPipeline CmdBindPipeline;
+extern PFN_vkCmdSetViewport CmdSetViewport;
+extern PFN_vkCmdSetScissor CmdSetScissor;
+extern PFN_vkCmdSetLineWidth CmdSetLineWidth;
+extern PFN_vkCmdSetDepthBias CmdSetDepthBias;
+extern PFN_vkCmdSetBlendConstants CmdSetBlendConstants;
+extern PFN_vkCmdSetDepthBounds CmdSetDepthBounds;
+extern PFN_vkCmdSetStencilCompareMask CmdSetStencilCompareMask;
+extern PFN_vkCmdSetStencilWriteMask CmdSetStencilWriteMask;
+extern PFN_vkCmdSetStencilReference CmdSetStencilReference;
+extern PFN_vkCmdBindDescriptorSets CmdBindDescriptorSets;
+extern PFN_vkCmdBindIndexBuffer CmdBindIndexBuffer;
+extern PFN_vkCmdBindVertexBuffers CmdBindVertexBuffers;
+extern PFN_vkCmdDraw CmdDraw;
+extern PFN_vkCmdDrawIndexed CmdDrawIndexed;
+extern PFN_vkCmdDrawIndirect CmdDrawIndirect;
+extern PFN_vkCmdDrawIndexedIndirect CmdDrawIndexedIndirect;
+extern PFN_vkCmdDispatch CmdDispatch;
+extern PFN_vkCmdDispatchIndirect CmdDispatchIndirect;
+extern PFN_vkCmdCopyBuffer CmdCopyBuffer;
+extern PFN_vkCmdCopyImage CmdCopyImage;
+extern PFN_vkCmdBlitImage CmdBlitImage;
+extern PFN_vkCmdCopyBufferToImage CmdCopyBufferToImage;
+extern PFN_vkCmdCopyImageToBuffer CmdCopyImageToBuffer;
+extern PFN_vkCmdUpdateBuffer CmdUpdateBuffer;
+extern PFN_vkCmdFillBuffer CmdFillBuffer;
+extern PFN_vkCmdClearColorImage CmdClearColorImage;
+extern PFN_vkCmdClearDepthStencilImage CmdClearDepthStencilImage;
+extern PFN_vkCmdClearAttachments CmdClearAttachments;
+extern PFN_vkCmdResolveImage CmdResolveImage;
+extern PFN_vkCmdSetEvent CmdSetEvent;
+extern PFN_vkCmdResetEvent CmdResetEvent;
+extern PFN_vkCmdWaitEvents CmdWaitEvents;
+extern PFN_vkCmdPipelineBarrier CmdPipelineBarrier;
+extern PFN_vkCmdBeginQuery CmdBeginQuery;
+extern PFN_vkCmdEndQuery CmdEndQuery;
+extern PFN_vkCmdResetQueryPool CmdResetQueryPool;
+extern PFN_vkCmdWriteTimestamp CmdWriteTimestamp;
+extern PFN_vkCmdCopyQueryPoolResults CmdCopyQueryPoolResults;
+extern PFN_vkCmdPushConstants CmdPushConstants;
+extern PFN_vkCmdBeginRenderPass CmdBeginRenderPass;
+extern PFN_vkCmdNextSubpass CmdNextSubpass;
+extern PFN_vkCmdEndRenderPass CmdEndRenderPass;
+extern PFN_vkCmdExecuteCommands CmdExecuteCommands;
+extern PFN_vkEnumerateInstanceVersion EnumerateInstanceVersion;
+extern PFN_vkBindBufferMemory2 BindBufferMemory2;
+extern PFN_vkBindImageMemory2 BindImageMemory2;
+extern PFN_vkGetDeviceGroupPeerMemoryFeatures GetDeviceGroupPeerMemoryFeatures;
+extern PFN_vkCmdSetDeviceMask CmdSetDeviceMask;
+extern PFN_vkCmdDispatchBase CmdDispatchBase;
+extern PFN_vkEnumeratePhysicalDeviceGroups EnumeratePhysicalDeviceGroups;
+extern PFN_vkGetImageMemoryRequirements2 GetImageMemoryRequirements2;
+extern PFN_vkGetBufferMemoryRequirements2 GetBufferMemoryRequirements2;
+extern PFN_vkGetImageSparseMemoryRequirements2 GetImageSparseMemoryRequirements2;
+extern PFN_vkGetPhysicalDeviceFeatures2 GetPhysicalDeviceFeatures2;
+extern PFN_vkGetPhysicalDeviceProperties2 GetPhysicalDeviceProperties2;
+extern PFN_vkGetPhysicalDeviceFormatProperties2 GetPhysicalDeviceFormatProperties2;
+extern PFN_vkGetPhysicalDeviceImageFormatProperties2 GetPhysicalDeviceImageFormatProperties2;
+extern PFN_vkGetPhysicalDeviceQueueFamilyProperties2 GetPhysicalDeviceQueueFamilyProperties2;
+extern PFN_vkGetPhysicalDeviceMemoryProperties2 GetPhysicalDeviceMemoryProperties2;
+extern PFN_vkGetPhysicalDeviceSparseImageFormatProperties2 GetPhysicalDeviceSparseImageFormatProperties2;
+extern PFN_vkTrimCommandPool TrimCommandPool;
+extern PFN_vkGetDeviceQueue2 GetDeviceQueue2;
+extern PFN_vkCreateSamplerYcbcrConversion CreateSamplerYcbcrConversion;
+extern PFN_vkDestroySamplerYcbcrConversion DestroySamplerYcbcrConversion;
+extern PFN_vkCreateDescriptorUpdateTemplate CreateDescriptorUpdateTemplate;
+extern PFN_vkDestroyDescriptorUpdateTemplate DestroyDescriptorUpdateTemplate;
+extern PFN_vkUpdateDescriptorSetWithTemplate UpdateDescriptorSetWithTemplate;
+extern PFN_vkGetPhysicalDeviceExternalBufferProperties GetPhysicalDeviceExternalBufferProperties;
+extern PFN_vkGetPhysicalDeviceExternalFenceProperties GetPhysicalDeviceExternalFenceProperties;
+extern PFN_vkGetPhysicalDeviceExternalSemaphoreProperties GetPhysicalDeviceExternalSemaphoreProperties;
+extern PFN_vkGetDescriptorSetLayoutSupport GetDescriptorSetLayoutSupport;
+extern PFN_vkDestroySurfaceKHR DestroySurfaceKHR;
+extern PFN_vkGetPhysicalDeviceSurfaceSupportKHR GetPhysicalDeviceSurfaceSupportKHR;
+extern PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR GetPhysicalDeviceSurfaceCapabilitiesKHR;
+extern PFN_vkGetPhysicalDeviceSurfaceFormatsKHR GetPhysicalDeviceSurfaceFormatsKHR;
+extern PFN_vkGetPhysicalDeviceSurfacePresentModesKHR GetPhysicalDeviceSurfacePresentModesKHR;
+extern PFN_vkCreateSwapchainKHR CreateSwapchainKHR;
+extern PFN_vkDestroySwapchainKHR DestroySwapchainKHR;
+extern PFN_vkGetSwapchainImagesKHR GetSwapchainImagesKHR;
+extern PFN_vkAcquireNextImageKHR AcquireNextImageKHR;
+extern PFN_vkQueuePresentKHR QueuePresentKHR;
+extern PFN_vkGetDeviceGroupPresentCapabilitiesKHR GetDeviceGroupPresentCapabilitiesKHR;
+extern PFN_vkGetDeviceGroupSurfacePresentModesKHR GetDeviceGroupSurfacePresentModesKHR;
+extern PFN_vkGetPhysicalDevicePresentRectanglesKHR GetPhysicalDevicePresentRectanglesKHR;
+extern PFN_vkAcquireNextImage2KHR AcquireNextImage2KHR;
+extern PFN_vkGetPhysicalDeviceDisplayPropertiesKHR GetPhysicalDeviceDisplayPropertiesKHR;
+extern PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR GetPhysicalDeviceDisplayPlanePropertiesKHR;
+extern PFN_vkGetDisplayPlaneSupportedDisplaysKHR GetDisplayPlaneSupportedDisplaysKHR;
+extern PFN_vkGetDisplayModePropertiesKHR GetDisplayModePropertiesKHR;
+extern PFN_vkCreateDisplayModeKHR CreateDisplayModeKHR;
+extern PFN_vkGetDisplayPlaneCapabilitiesKHR GetDisplayPlaneCapabilitiesKHR;
+extern PFN_vkCreateDisplayPlaneSurfaceKHR CreateDisplayPlaneSurfaceKHR;
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+extern PFN_vkCreateXlibSurfaceKHR CreateXlibSurfaceKHR;
+#endif // VK_USE_PLATFORM_XLIB_KHR
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+extern PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR GetPhysicalDeviceXlibPresentationSupportKHR;
+#endif // VK_USE_PLATFORM_XLIB_KHR
+#ifdef VK_USE_PLATFORM_XCB_KHR
+extern PFN_vkCreateXcbSurfaceKHR CreateXcbSurfaceKHR;
+#endif // VK_USE_PLATFORM_XCB_KHR
+#ifdef VK_USE_PLATFORM_XCB_KHR
+extern PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR GetPhysicalDeviceXcbPresentationSupportKHR;
+#endif // VK_USE_PLATFORM_XCB_KHR
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+extern PFN_vkCreateWaylandSurfaceKHR CreateWaylandSurfaceKHR;
+#endif // VK_USE_PLATFORM_WAYLAND_KHR
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+extern PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR GetPhysicalDeviceWaylandPresentationSupportKHR;
+#endif // VK_USE_PLATFORM_WAYLAND_KHR
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+extern PFN_vkCreateAndroidSurfaceKHR CreateAndroidSurfaceKHR;
+#endif // VK_USE_PLATFORM_ANDROID_KHR
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+extern PFN_vkCreateWin32SurfaceKHR CreateWin32SurfaceKHR;
+#endif // VK_USE_PLATFORM_WIN32_KHR
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+extern PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR GetPhysicalDeviceWin32PresentationSupportKHR;
+#endif // VK_USE_PLATFORM_WIN32_KHR
+#ifdef VK_USE_PLATFORM_MACOS_MVK
+extern PFN_vkCreateMacOSSurfaceMVK CreateMacOSSurfaceMVK;
+#endif // VK_USE_PLATFORM_MACOS_MVK
+
+void InitDispatchTable();
+
+} // namespace vk
diff --git a/src/third_party/vulkan-validation-layers/src/layers/generated/object_tracker.cpp b/src/third_party/vulkan-validation-layers/src/layers/generated/object_tracker.cpp
new file mode 100644
index 0000000..da0076d
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/layers/generated/object_tracker.cpp
@@ -0,0 +1,5252 @@
+// *** THIS FILE IS GENERATED - DO NOT EDIT ***
+// See object_tracker_generator.py for modifications
+
+
+/***************************************************************************
+ *
+ * Copyright (c) 2015-2019 The Khronos Group Inc.
+ * Copyright (c) 2015-2019 Valve Corporation
+ * Copyright (c) 2015-2019 LunarG, Inc.
+ * Copyright (c) 2015-2019 Google Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Mark Lobodzinski <mark@lunarg.com>
+ * Author: Dave Houlton <daveh@lunarg.com>
+ *
+ ****************************************************************************/
+
+
+#include "chassis.h"
+#include "object_lifetime_validation.h"
+
+
+
+// ObjectTracker undestroyed objects validation function
+bool ObjectLifetimes::ReportUndestroyedInstanceObjects(VkInstance instance, const std::string& error_code) const {
+    bool skip = false;
+    skip |= ReportLeakedInstanceObjects(instance, kVulkanObjectTypeSurfaceKHR, error_code);
+    skip |= ReportLeakedInstanceObjects(instance, kVulkanObjectTypeSwapchainKHR, error_code);
+    skip |= ReportLeakedInstanceObjects(instance, kVulkanObjectTypeDisplayKHR, error_code);
+    skip |= ReportLeakedInstanceObjects(instance, kVulkanObjectTypeDisplayModeKHR, error_code);
+    skip |= ReportLeakedInstanceObjects(instance, kVulkanObjectTypeDebugReportCallbackEXT, error_code);
+    skip |= ReportLeakedInstanceObjects(instance, kVulkanObjectTypeDebugUtilsMessengerEXT, error_code);
+    return skip;
+}
+bool ObjectLifetimes::ReportUndestroyedDeviceObjects(VkDevice device, const std::string& error_code) const {
+    bool skip = false;
+    skip |= ReportLeakedDeviceObjects(device, kVulkanObjectTypeCommandBuffer, error_code);
+    skip |= ReportLeakedDeviceObjects(device, kVulkanObjectTypeSemaphore, error_code);
+    skip |= ReportLeakedDeviceObjects(device, kVulkanObjectTypeFence, error_code);
+    skip |= ReportLeakedDeviceObjects(device, kVulkanObjectTypeDeviceMemory, error_code);
+    skip |= ReportLeakedDeviceObjects(device, kVulkanObjectTypeBuffer, error_code);
+    skip |= ReportLeakedDeviceObjects(device, kVulkanObjectTypeImage, error_code);
+    skip |= ReportLeakedDeviceObjects(device, kVulkanObjectTypeEvent, error_code);
+    skip |= ReportLeakedDeviceObjects(device, kVulkanObjectTypeQueryPool, error_code);
+    skip |= ReportLeakedDeviceObjects(device, kVulkanObjectTypeBufferView, error_code);
+    skip |= ReportLeakedDeviceObjects(device, kVulkanObjectTypeImageView, error_code);
+    skip |= ReportLeakedDeviceObjects(device, kVulkanObjectTypeShaderModule, error_code);
+    skip |= ReportLeakedDeviceObjects(device, kVulkanObjectTypePipelineCache, error_code);
+    skip |= ReportLeakedDeviceObjects(device, kVulkanObjectTypePipelineLayout, error_code);
+    skip |= ReportLeakedDeviceObjects(device, kVulkanObjectTypeRenderPass, error_code);
+    skip |= ReportLeakedDeviceObjects(device, kVulkanObjectTypePipeline, error_code);
+    skip |= ReportLeakedDeviceObjects(device, kVulkanObjectTypeDescriptorSetLayout, error_code);
+    skip |= ReportLeakedDeviceObjects(device, kVulkanObjectTypeSampler, error_code);
+    skip |= ReportLeakedDeviceObjects(device, kVulkanObjectTypeDescriptorPool, error_code);
+    skip |= ReportLeakedDeviceObjects(device, kVulkanObjectTypeDescriptorSet, error_code);
+    skip |= ReportLeakedDeviceObjects(device, kVulkanObjectTypeFramebuffer, error_code);
+    skip |= ReportLeakedDeviceObjects(device, kVulkanObjectTypeCommandPool, error_code);
+    skip |= ReportLeakedDeviceObjects(device, kVulkanObjectTypeSamplerYcbcrConversion, error_code);
+    skip |= ReportLeakedDeviceObjects(device, kVulkanObjectTypeDescriptorUpdateTemplate, error_code);
+    skip |= ReportLeakedDeviceObjects(device, kVulkanObjectTypeObjectTableNVX, error_code);
+    skip |= ReportLeakedDeviceObjects(device, kVulkanObjectTypeIndirectCommandsLayoutNVX, error_code);
+    skip |= ReportLeakedDeviceObjects(device, kVulkanObjectTypeValidationCacheEXT, error_code);
+    skip |= ReportLeakedDeviceObjects(device, kVulkanObjectTypeAccelerationStructureNV, error_code);
+    skip |= ReportLeakedDeviceObjects(device, kVulkanObjectTypePerformanceConfigurationINTEL, error_code);
+    return skip;
+}
+
+void ObjectLifetimes::DestroyLeakedInstanceObjects() {
+    DestroyUndestroyedObjects(kVulkanObjectTypeSurfaceKHR);
+    DestroyUndestroyedObjects(kVulkanObjectTypeSwapchainKHR);
+    DestroyUndestroyedObjects(kVulkanObjectTypeDisplayKHR);
+    DestroyUndestroyedObjects(kVulkanObjectTypeDisplayModeKHR);
+    DestroyUndestroyedObjects(kVulkanObjectTypeDebugReportCallbackEXT);
+    DestroyUndestroyedObjects(kVulkanObjectTypeDebugUtilsMessengerEXT);
+}
+void ObjectLifetimes::DestroyLeakedDeviceObjects() {
+    DestroyUndestroyedObjects(kVulkanObjectTypeCommandBuffer);
+    DestroyUndestroyedObjects(kVulkanObjectTypeSemaphore);
+    DestroyUndestroyedObjects(kVulkanObjectTypeFence);
+    DestroyUndestroyedObjects(kVulkanObjectTypeDeviceMemory);
+    DestroyUndestroyedObjects(kVulkanObjectTypeBuffer);
+    DestroyUndestroyedObjects(kVulkanObjectTypeImage);
+    DestroyUndestroyedObjects(kVulkanObjectTypeEvent);
+    DestroyUndestroyedObjects(kVulkanObjectTypeQueryPool);
+    DestroyUndestroyedObjects(kVulkanObjectTypeBufferView);
+    DestroyUndestroyedObjects(kVulkanObjectTypeImageView);
+    DestroyUndestroyedObjects(kVulkanObjectTypeShaderModule);
+    DestroyUndestroyedObjects(kVulkanObjectTypePipelineCache);
+    DestroyUndestroyedObjects(kVulkanObjectTypePipelineLayout);
+    DestroyUndestroyedObjects(kVulkanObjectTypeRenderPass);
+    DestroyUndestroyedObjects(kVulkanObjectTypePipeline);
+    DestroyUndestroyedObjects(kVulkanObjectTypeDescriptorSetLayout);
+    DestroyUndestroyedObjects(kVulkanObjectTypeSampler);
+    DestroyUndestroyedObjects(kVulkanObjectTypeDescriptorPool);
+    DestroyUndestroyedObjects(kVulkanObjectTypeDescriptorSet);
+    DestroyUndestroyedObjects(kVulkanObjectTypeFramebuffer);
+    DestroyUndestroyedObjects(kVulkanObjectTypeCommandPool);
+    DestroyUndestroyedObjects(kVulkanObjectTypeSamplerYcbcrConversion);
+    DestroyUndestroyedObjects(kVulkanObjectTypeDescriptorUpdateTemplate);
+    DestroyUndestroyedObjects(kVulkanObjectTypeObjectTableNVX);
+    DestroyUndestroyedObjects(kVulkanObjectTypeIndirectCommandsLayoutNVX);
+    DestroyUndestroyedObjects(kVulkanObjectTypeValidationCacheEXT);
+    DestroyUndestroyedObjects(kVulkanObjectTypeAccelerationStructureNV);
+    DestroyUndestroyedObjects(kVulkanObjectTypePerformanceConfigurationINTEL);
+}
+
+
+
+bool ObjectLifetimes::PreCallValidateGetPhysicalDeviceFeatures(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceFeatures*                   pFeatures) const {
+    bool skip = false;
+    skip |= ValidateObject(physicalDevice, kVulkanObjectTypePhysicalDevice, false, "VUID-vkGetPhysicalDeviceFeatures-physicalDevice-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateGetPhysicalDeviceFormatProperties(
+    VkPhysicalDevice                            physicalDevice,
+    VkFormat                                    format,
+    VkFormatProperties*                         pFormatProperties) const {
+    bool skip = false;
+    skip |= ValidateObject(physicalDevice, kVulkanObjectTypePhysicalDevice, false, "VUID-vkGetPhysicalDeviceFormatProperties-physicalDevice-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateGetPhysicalDeviceImageFormatProperties(
+    VkPhysicalDevice                            physicalDevice,
+    VkFormat                                    format,
+    VkImageType                                 type,
+    VkImageTiling                               tiling,
+    VkImageUsageFlags                           usage,
+    VkImageCreateFlags                          flags,
+    VkImageFormatProperties*                    pImageFormatProperties) const {
+    bool skip = false;
+    skip |= ValidateObject(physicalDevice, kVulkanObjectTypePhysicalDevice, false, "VUID-vkGetPhysicalDeviceImageFormatProperties-physicalDevice-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateGetPhysicalDeviceProperties(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceProperties*                 pProperties) const {
+    bool skip = false;
+    skip |= ValidateObject(physicalDevice, kVulkanObjectTypePhysicalDevice, false, "VUID-vkGetPhysicalDeviceProperties-physicalDevice-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateGetPhysicalDeviceMemoryProperties(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceMemoryProperties*           pMemoryProperties) const {
+    bool skip = false;
+    skip |= ValidateObject(physicalDevice, kVulkanObjectTypePhysicalDevice, false, "VUID-vkGetPhysicalDeviceMemoryProperties-physicalDevice-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateGetInstanceProcAddr(
+    VkInstance                                  instance,
+    const char*                                 pName) const {
+    bool skip = false;
+    skip |= ValidateObject(instance, kVulkanObjectTypeInstance, true, "VUID-vkGetInstanceProcAddr-instance-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateGetDeviceProcAddr(
+    VkDevice                                    device,
+    const char*                                 pName) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkGetDeviceProcAddr-device-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateCreateDevice(
+    VkPhysicalDevice                            physicalDevice,
+    const VkDeviceCreateInfo*                   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDevice*                                   pDevice) const {
+    bool skip = false;
+    skip |= ValidateObject(physicalDevice, kVulkanObjectTypePhysicalDevice, false, "VUID-vkCreateDevice-physicalDevice-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+void ObjectLifetimes::PostCallRecordCreateDevice(
+    VkPhysicalDevice                            physicalDevice,
+    const VkDeviceCreateInfo*                   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDevice*                                   pDevice,
+    VkResult                                    result) {
+    if (result != VK_SUCCESS) return;
+    CreateObject(*pDevice, kVulkanObjectTypeDevice, pAllocator);
+
+}
+
+bool ObjectLifetimes::PreCallValidateEnumerateDeviceExtensionProperties(
+    VkPhysicalDevice                            physicalDevice,
+    const char*                                 pLayerName,
+    uint32_t*                                   pPropertyCount,
+    VkExtensionProperties*                      pProperties) const {
+    bool skip = false;
+    skip |= ValidateObject(physicalDevice, kVulkanObjectTypePhysicalDevice, false, "VUID-vkEnumerateDeviceExtensionProperties-physicalDevice-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateEnumerateDeviceLayerProperties(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pPropertyCount,
+    VkLayerProperties*                          pProperties) const {
+    bool skip = false;
+    skip |= ValidateObject(physicalDevice, kVulkanObjectTypePhysicalDevice, false, "VUID-vkEnumerateDeviceLayerProperties-physicalDevice-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateQueueSubmit(
+    VkQueue                                     queue,
+    uint32_t                                    submitCount,
+    const VkSubmitInfo*                         pSubmits,
+    VkFence                                     fence) const {
+    bool skip = false;
+    skip |= ValidateObject(queue, kVulkanObjectTypeQueue, false, "VUID-vkQueueSubmit-queue-parameter", "VUID-vkQueueSubmit-commonparent");
+    if (pSubmits) {
+        for (uint32_t index0 = 0; index0 < submitCount; ++index0) {
+            if (pSubmits[index0].pWaitSemaphores) {
+                for (uint32_t index1 = 0; index1 < pSubmits[index0].waitSemaphoreCount; ++index1) {
+                    skip |= ValidateObject(pSubmits[index0].pWaitSemaphores[index1], kVulkanObjectTypeSemaphore, false, "VUID-VkSubmitInfo-pWaitSemaphores-parameter", "VUID-VkSubmitInfo-commonparent");
+                }
+            }
+            if (pSubmits[index0].pCommandBuffers) {
+                for (uint32_t index1 = 0; index1 < pSubmits[index0].commandBufferCount; ++index1) {
+                    skip |= ValidateObject(pSubmits[index0].pCommandBuffers[index1], kVulkanObjectTypeCommandBuffer, false, "VUID-VkSubmitInfo-pCommandBuffers-parameter", "VUID-VkSubmitInfo-commonparent");
+                }
+            }
+            if (pSubmits[index0].pSignalSemaphores) {
+                for (uint32_t index1 = 0; index1 < pSubmits[index0].signalSemaphoreCount; ++index1) {
+                    skip |= ValidateObject(pSubmits[index0].pSignalSemaphores[index1], kVulkanObjectTypeSemaphore, false, "VUID-VkSubmitInfo-pSignalSemaphores-parameter", "VUID-VkSubmitInfo-commonparent");
+                }
+            }
+        }
+    }
+    skip |= ValidateObject(fence, kVulkanObjectTypeFence, true, "VUID-vkQueueSubmit-fence-parameter", "VUID-vkQueueSubmit-commonparent");
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateQueueWaitIdle(
+    VkQueue                                     queue) const {
+    bool skip = false;
+    skip |= ValidateObject(queue, kVulkanObjectTypeQueue, false, "VUID-vkQueueWaitIdle-queue-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateDeviceWaitIdle(
+    VkDevice                                    device) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkDeviceWaitIdle-device-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateAllocateMemory(
+    VkDevice                                    device,
+    const VkMemoryAllocateInfo*                 pAllocateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDeviceMemory*                             pMemory) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkAllocateMemory-device-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+void ObjectLifetimes::PostCallRecordAllocateMemory(
+    VkDevice                                    device,
+    const VkMemoryAllocateInfo*                 pAllocateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDeviceMemory*                             pMemory,
+    VkResult                                    result) {
+    if (result != VK_SUCCESS) return;
+    CreateObject(*pMemory, kVulkanObjectTypeDeviceMemory, pAllocator);
+
+}
+
+bool ObjectLifetimes::PreCallValidateFreeMemory(
+    VkDevice                                    device,
+    VkDeviceMemory                              memory,
+    const VkAllocationCallbacks*                pAllocator) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkFreeMemory-device-parameter", kVUIDUndefined);
+    skip |= ValidateObject(memory, kVulkanObjectTypeDeviceMemory, true, "VUID-vkFreeMemory-memory-parameter", "VUID-vkFreeMemory-memory-parent");
+    skip |= ValidateDestroyObject(memory, kVulkanObjectTypeDeviceMemory, pAllocator, kVUIDUndefined, kVUIDUndefined);
+
+    return skip;
+}
+
+void ObjectLifetimes::PreCallRecordFreeMemory(
+    VkDevice                                    device,
+    VkDeviceMemory                              memory,
+    const VkAllocationCallbacks*                pAllocator) {
+    RecordDestroyObject(memory, kVulkanObjectTypeDeviceMemory);
+
+}
+
+bool ObjectLifetimes::PreCallValidateMapMemory(
+    VkDevice                                    device,
+    VkDeviceMemory                              memory,
+    VkDeviceSize                                offset,
+    VkDeviceSize                                size,
+    VkMemoryMapFlags                            flags,
+    void**                                      ppData) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkMapMemory-device-parameter", kVUIDUndefined);
+    skip |= ValidateObject(memory, kVulkanObjectTypeDeviceMemory, false, "VUID-vkMapMemory-memory-parameter", "VUID-vkMapMemory-memory-parent");
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateUnmapMemory(
+    VkDevice                                    device,
+    VkDeviceMemory                              memory) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkUnmapMemory-device-parameter", kVUIDUndefined);
+    skip |= ValidateObject(memory, kVulkanObjectTypeDeviceMemory, false, "VUID-vkUnmapMemory-memory-parameter", "VUID-vkUnmapMemory-memory-parent");
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateFlushMappedMemoryRanges(
+    VkDevice                                    device,
+    uint32_t                                    memoryRangeCount,
+    const VkMappedMemoryRange*                  pMemoryRanges) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkFlushMappedMemoryRanges-device-parameter", kVUIDUndefined);
+    if (pMemoryRanges) {
+        for (uint32_t index0 = 0; index0 < memoryRangeCount; ++index0) {
+            skip |= ValidateObject(pMemoryRanges[index0].memory, kVulkanObjectTypeDeviceMemory, false, "VUID-VkMappedMemoryRange-memory-parameter", kVUIDUndefined);
+        }
+    }
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateInvalidateMappedMemoryRanges(
+    VkDevice                                    device,
+    uint32_t                                    memoryRangeCount,
+    const VkMappedMemoryRange*                  pMemoryRanges) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkInvalidateMappedMemoryRanges-device-parameter", kVUIDUndefined);
+    if (pMemoryRanges) {
+        for (uint32_t index0 = 0; index0 < memoryRangeCount; ++index0) {
+            skip |= ValidateObject(pMemoryRanges[index0].memory, kVulkanObjectTypeDeviceMemory, false, "VUID-VkMappedMemoryRange-memory-parameter", kVUIDUndefined);
+        }
+    }
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateGetDeviceMemoryCommitment(
+    VkDevice                                    device,
+    VkDeviceMemory                              memory,
+    VkDeviceSize*                               pCommittedMemoryInBytes) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkGetDeviceMemoryCommitment-device-parameter", kVUIDUndefined);
+    skip |= ValidateObject(memory, kVulkanObjectTypeDeviceMemory, false, "VUID-vkGetDeviceMemoryCommitment-memory-parameter", "VUID-vkGetDeviceMemoryCommitment-memory-parent");
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateBindBufferMemory(
+    VkDevice                                    device,
+    VkBuffer                                    buffer,
+    VkDeviceMemory                              memory,
+    VkDeviceSize                                memoryOffset) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkBindBufferMemory-device-parameter", kVUIDUndefined);
+    skip |= ValidateObject(buffer, kVulkanObjectTypeBuffer, false, "VUID-vkBindBufferMemory-buffer-parameter", "VUID-vkBindBufferMemory-buffer-parent");
+    skip |= ValidateObject(memory, kVulkanObjectTypeDeviceMemory, false, "VUID-vkBindBufferMemory-memory-parameter", "VUID-vkBindBufferMemory-memory-parent");
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateBindImageMemory(
+    VkDevice                                    device,
+    VkImage                                     image,
+    VkDeviceMemory                              memory,
+    VkDeviceSize                                memoryOffset) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkBindImageMemory-device-parameter", kVUIDUndefined);
+    skip |= ValidateObject(image, kVulkanObjectTypeImage, false, "VUID-vkBindImageMemory-image-parameter", "VUID-vkBindImageMemory-image-parent");
+    skip |= ValidateObject(memory, kVulkanObjectTypeDeviceMemory, false, "VUID-vkBindImageMemory-memory-parameter", "VUID-vkBindImageMemory-memory-parent");
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateGetBufferMemoryRequirements(
+    VkDevice                                    device,
+    VkBuffer                                    buffer,
+    VkMemoryRequirements*                       pMemoryRequirements) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkGetBufferMemoryRequirements-device-parameter", kVUIDUndefined);
+    skip |= ValidateObject(buffer, kVulkanObjectTypeBuffer, false, "VUID-vkGetBufferMemoryRequirements-buffer-parameter", "VUID-vkGetBufferMemoryRequirements-buffer-parent");
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateGetImageMemoryRequirements(
+    VkDevice                                    device,
+    VkImage                                     image,
+    VkMemoryRequirements*                       pMemoryRequirements) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkGetImageMemoryRequirements-device-parameter", kVUIDUndefined);
+    skip |= ValidateObject(image, kVulkanObjectTypeImage, false, "VUID-vkGetImageMemoryRequirements-image-parameter", "VUID-vkGetImageMemoryRequirements-image-parent");
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateGetImageSparseMemoryRequirements(
+    VkDevice                                    device,
+    VkImage                                     image,
+    uint32_t*                                   pSparseMemoryRequirementCount,
+    VkSparseImageMemoryRequirements*            pSparseMemoryRequirements) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkGetImageSparseMemoryRequirements-device-parameter", kVUIDUndefined);
+    skip |= ValidateObject(image, kVulkanObjectTypeImage, false, "VUID-vkGetImageSparseMemoryRequirements-image-parameter", "VUID-vkGetImageSparseMemoryRequirements-image-parent");
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateGetPhysicalDeviceSparseImageFormatProperties(
+    VkPhysicalDevice                            physicalDevice,
+    VkFormat                                    format,
+    VkImageType                                 type,
+    VkSampleCountFlagBits                       samples,
+    VkImageUsageFlags                           usage,
+    VkImageTiling                               tiling,
+    uint32_t*                                   pPropertyCount,
+    VkSparseImageFormatProperties*              pProperties) const {
+    bool skip = false;
+    skip |= ValidateObject(physicalDevice, kVulkanObjectTypePhysicalDevice, false, "VUID-vkGetPhysicalDeviceSparseImageFormatProperties-physicalDevice-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateQueueBindSparse(
+    VkQueue                                     queue,
+    uint32_t                                    bindInfoCount,
+    const VkBindSparseInfo*                     pBindInfo,
+    VkFence                                     fence) const {
+    bool skip = false;
+    skip |= ValidateObject(queue, kVulkanObjectTypeQueue, false, "VUID-vkQueueBindSparse-queue-parameter", "VUID-vkQueueBindSparse-commonparent");
+    if (pBindInfo) {
+        for (uint32_t index0 = 0; index0 < bindInfoCount; ++index0) {
+            if (pBindInfo[index0].pWaitSemaphores) {
+                for (uint32_t index1 = 0; index1 < pBindInfo[index0].waitSemaphoreCount; ++index1) {
+                    skip |= ValidateObject(pBindInfo[index0].pWaitSemaphores[index1], kVulkanObjectTypeSemaphore, false, "VUID-VkBindSparseInfo-pWaitSemaphores-parameter", "VUID-VkBindSparseInfo-commonparent");
+                }
+            }
+            if (pBindInfo[index0].pBufferBinds) {
+                for (uint32_t index1 = 0; index1 < pBindInfo[index0].bufferBindCount; ++index1) {
+                    skip |= ValidateObject(pBindInfo[index0].pBufferBinds[index1].buffer, kVulkanObjectTypeBuffer, false, "VUID-VkSparseBufferMemoryBindInfo-buffer-parameter", kVUIDUndefined);
+                    if (pBindInfo[index0].pBufferBinds[index1].pBinds) {
+                        for (uint32_t index2 = 0; index2 < pBindInfo[index0].pBufferBinds[index1].bindCount; ++index2) {
+                            skip |= ValidateObject(pBindInfo[index0].pBufferBinds[index1].pBinds[index2].memory, kVulkanObjectTypeDeviceMemory, true, "VUID-VkSparseMemoryBind-memory-parameter", kVUIDUndefined);
+                        }
+                    }
+                }
+            }
+            if (pBindInfo[index0].pImageOpaqueBinds) {
+                for (uint32_t index1 = 0; index1 < pBindInfo[index0].imageOpaqueBindCount; ++index1) {
+                    skip |= ValidateObject(pBindInfo[index0].pImageOpaqueBinds[index1].image, kVulkanObjectTypeImage, false, "VUID-VkSparseImageOpaqueMemoryBindInfo-image-parameter", kVUIDUndefined);
+                    if (pBindInfo[index0].pImageOpaqueBinds[index1].pBinds) {
+                        for (uint32_t index2 = 0; index2 < pBindInfo[index0].pImageOpaqueBinds[index1].bindCount; ++index2) {
+                            skip |= ValidateObject(pBindInfo[index0].pImageOpaqueBinds[index1].pBinds[index2].memory, kVulkanObjectTypeDeviceMemory, true, "VUID-VkSparseMemoryBind-memory-parameter", kVUIDUndefined);
+                        }
+                    }
+                }
+            }
+            if (pBindInfo[index0].pImageBinds) {
+                for (uint32_t index1 = 0; index1 < pBindInfo[index0].imageBindCount; ++index1) {
+                    skip |= ValidateObject(pBindInfo[index0].pImageBinds[index1].image, kVulkanObjectTypeImage, false, "VUID-VkSparseImageMemoryBindInfo-image-parameter", kVUIDUndefined);
+                    if (pBindInfo[index0].pImageBinds[index1].pBinds) {
+                        for (uint32_t index2 = 0; index2 < pBindInfo[index0].pImageBinds[index1].bindCount; ++index2) {
+                            skip |= ValidateObject(pBindInfo[index0].pImageBinds[index1].pBinds[index2].memory, kVulkanObjectTypeDeviceMemory, true, "VUID-VkSparseImageMemoryBind-memory-parameter", kVUIDUndefined);
+                        }
+                    }
+                }
+            }
+            if (pBindInfo[index0].pSignalSemaphores) {
+                for (uint32_t index1 = 0; index1 < pBindInfo[index0].signalSemaphoreCount; ++index1) {
+                    skip |= ValidateObject(pBindInfo[index0].pSignalSemaphores[index1], kVulkanObjectTypeSemaphore, false, "VUID-VkBindSparseInfo-pSignalSemaphores-parameter", "VUID-VkBindSparseInfo-commonparent");
+                }
+            }
+        }
+    }
+    skip |= ValidateObject(fence, kVulkanObjectTypeFence, true, "VUID-vkQueueBindSparse-fence-parameter", "VUID-vkQueueBindSparse-commonparent");
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateCreateFence(
+    VkDevice                                    device,
+    const VkFenceCreateInfo*                    pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkFence*                                    pFence) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkCreateFence-device-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+void ObjectLifetimes::PostCallRecordCreateFence(
+    VkDevice                                    device,
+    const VkFenceCreateInfo*                    pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkFence*                                    pFence,
+    VkResult                                    result) {
+    if (result != VK_SUCCESS) return;
+    CreateObject(*pFence, kVulkanObjectTypeFence, pAllocator);
+
+}
+
+bool ObjectLifetimes::PreCallValidateDestroyFence(
+    VkDevice                                    device,
+    VkFence                                     fence,
+    const VkAllocationCallbacks*                pAllocator) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkDestroyFence-device-parameter", kVUIDUndefined);
+    skip |= ValidateObject(fence, kVulkanObjectTypeFence, true, "VUID-vkDestroyFence-fence-parameter", "VUID-vkDestroyFence-fence-parent");
+    skip |= ValidateDestroyObject(fence, kVulkanObjectTypeFence, pAllocator, "VUID-vkDestroyFence-fence-01121", "VUID-vkDestroyFence-fence-01122");
+
+    return skip;
+}
+
+void ObjectLifetimes::PreCallRecordDestroyFence(
+    VkDevice                                    device,
+    VkFence                                     fence,
+    const VkAllocationCallbacks*                pAllocator) {
+    RecordDestroyObject(fence, kVulkanObjectTypeFence);
+
+}
+
+bool ObjectLifetimes::PreCallValidateResetFences(
+    VkDevice                                    device,
+    uint32_t                                    fenceCount,
+    const VkFence*                              pFences) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkResetFences-device-parameter", kVUIDUndefined);
+    if (pFences) {
+        for (uint32_t index0 = 0; index0 < fenceCount; ++index0) {
+            skip |= ValidateObject(pFences[index0], kVulkanObjectTypeFence, false, "VUID-vkResetFences-pFences-parameter", "VUID-vkResetFences-pFences-parent");
+        }
+    }
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateGetFenceStatus(
+    VkDevice                                    device,
+    VkFence                                     fence) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkGetFenceStatus-device-parameter", kVUIDUndefined);
+    skip |= ValidateObject(fence, kVulkanObjectTypeFence, false, "VUID-vkGetFenceStatus-fence-parameter", "VUID-vkGetFenceStatus-fence-parent");
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateWaitForFences(
+    VkDevice                                    device,
+    uint32_t                                    fenceCount,
+    const VkFence*                              pFences,
+    VkBool32                                    waitAll,
+    uint64_t                                    timeout) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkWaitForFences-device-parameter", kVUIDUndefined);
+    if (pFences) {
+        for (uint32_t index0 = 0; index0 < fenceCount; ++index0) {
+            skip |= ValidateObject(pFences[index0], kVulkanObjectTypeFence, false, "VUID-vkWaitForFences-pFences-parameter", "VUID-vkWaitForFences-pFences-parent");
+        }
+    }
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateCreateSemaphore(
+    VkDevice                                    device,
+    const VkSemaphoreCreateInfo*                pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSemaphore*                                pSemaphore) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkCreateSemaphore-device-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+void ObjectLifetimes::PostCallRecordCreateSemaphore(
+    VkDevice                                    device,
+    const VkSemaphoreCreateInfo*                pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSemaphore*                                pSemaphore,
+    VkResult                                    result) {
+    if (result != VK_SUCCESS) return;
+    CreateObject(*pSemaphore, kVulkanObjectTypeSemaphore, pAllocator);
+
+}
+
+bool ObjectLifetimes::PreCallValidateDestroySemaphore(
+    VkDevice                                    device,
+    VkSemaphore                                 semaphore,
+    const VkAllocationCallbacks*                pAllocator) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkDestroySemaphore-device-parameter", kVUIDUndefined);
+    skip |= ValidateObject(semaphore, kVulkanObjectTypeSemaphore, true, "VUID-vkDestroySemaphore-semaphore-parameter", "VUID-vkDestroySemaphore-semaphore-parent");
+    skip |= ValidateDestroyObject(semaphore, kVulkanObjectTypeSemaphore, pAllocator, "VUID-vkDestroySemaphore-semaphore-01138", "VUID-vkDestroySemaphore-semaphore-01139");
+
+    return skip;
+}
+
+void ObjectLifetimes::PreCallRecordDestroySemaphore(
+    VkDevice                                    device,
+    VkSemaphore                                 semaphore,
+    const VkAllocationCallbacks*                pAllocator) {
+    RecordDestroyObject(semaphore, kVulkanObjectTypeSemaphore);
+
+}
+
+bool ObjectLifetimes::PreCallValidateCreateEvent(
+    VkDevice                                    device,
+    const VkEventCreateInfo*                    pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkEvent*                                    pEvent) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkCreateEvent-device-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+void ObjectLifetimes::PostCallRecordCreateEvent(
+    VkDevice                                    device,
+    const VkEventCreateInfo*                    pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkEvent*                                    pEvent,
+    VkResult                                    result) {
+    if (result != VK_SUCCESS) return;
+    CreateObject(*pEvent, kVulkanObjectTypeEvent, pAllocator);
+
+}
+
+bool ObjectLifetimes::PreCallValidateDestroyEvent(
+    VkDevice                                    device,
+    VkEvent                                     event,
+    const VkAllocationCallbacks*                pAllocator) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkDestroyEvent-device-parameter", kVUIDUndefined);
+    skip |= ValidateObject(event, kVulkanObjectTypeEvent, true, "VUID-vkDestroyEvent-event-parameter", "VUID-vkDestroyEvent-event-parent");
+    skip |= ValidateDestroyObject(event, kVulkanObjectTypeEvent, pAllocator, "VUID-vkDestroyEvent-event-01146", "VUID-vkDestroyEvent-event-01147");
+
+    return skip;
+}
+
+void ObjectLifetimes::PreCallRecordDestroyEvent(
+    VkDevice                                    device,
+    VkEvent                                     event,
+    const VkAllocationCallbacks*                pAllocator) {
+    RecordDestroyObject(event, kVulkanObjectTypeEvent);
+
+}
+
+bool ObjectLifetimes::PreCallValidateGetEventStatus(
+    VkDevice                                    device,
+    VkEvent                                     event) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkGetEventStatus-device-parameter", kVUIDUndefined);
+    skip |= ValidateObject(event, kVulkanObjectTypeEvent, false, "VUID-vkGetEventStatus-event-parameter", "VUID-vkGetEventStatus-event-parent");
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateSetEvent(
+    VkDevice                                    device,
+    VkEvent                                     event) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkSetEvent-device-parameter", kVUIDUndefined);
+    skip |= ValidateObject(event, kVulkanObjectTypeEvent, false, "VUID-vkSetEvent-event-parameter", "VUID-vkSetEvent-event-parent");
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateResetEvent(
+    VkDevice                                    device,
+    VkEvent                                     event) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkResetEvent-device-parameter", kVUIDUndefined);
+    skip |= ValidateObject(event, kVulkanObjectTypeEvent, false, "VUID-vkResetEvent-event-parameter", "VUID-vkResetEvent-event-parent");
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateCreateQueryPool(
+    VkDevice                                    device,
+    const VkQueryPoolCreateInfo*                pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkQueryPool*                                pQueryPool) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkCreateQueryPool-device-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+void ObjectLifetimes::PostCallRecordCreateQueryPool(
+    VkDevice                                    device,
+    const VkQueryPoolCreateInfo*                pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkQueryPool*                                pQueryPool,
+    VkResult                                    result) {
+    if (result != VK_SUCCESS) return;
+    CreateObject(*pQueryPool, kVulkanObjectTypeQueryPool, pAllocator);
+
+}
+
+bool ObjectLifetimes::PreCallValidateDestroyQueryPool(
+    VkDevice                                    device,
+    VkQueryPool                                 queryPool,
+    const VkAllocationCallbacks*                pAllocator) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkDestroyQueryPool-device-parameter", kVUIDUndefined);
+    skip |= ValidateObject(queryPool, kVulkanObjectTypeQueryPool, true, "VUID-vkDestroyQueryPool-queryPool-parameter", "VUID-vkDestroyQueryPool-queryPool-parent");
+    skip |= ValidateDestroyObject(queryPool, kVulkanObjectTypeQueryPool, pAllocator, "VUID-vkDestroyQueryPool-queryPool-00794", "VUID-vkDestroyQueryPool-queryPool-00795");
+
+    return skip;
+}
+
+void ObjectLifetimes::PreCallRecordDestroyQueryPool(
+    VkDevice                                    device,
+    VkQueryPool                                 queryPool,
+    const VkAllocationCallbacks*                pAllocator) {
+    RecordDestroyObject(queryPool, kVulkanObjectTypeQueryPool);
+
+}
+
+bool ObjectLifetimes::PreCallValidateGetQueryPoolResults(
+    VkDevice                                    device,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    firstQuery,
+    uint32_t                                    queryCount,
+    size_t                                      dataSize,
+    void*                                       pData,
+    VkDeviceSize                                stride,
+    VkQueryResultFlags                          flags) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkGetQueryPoolResults-device-parameter", kVUIDUndefined);
+    skip |= ValidateObject(queryPool, kVulkanObjectTypeQueryPool, false, "VUID-vkGetQueryPoolResults-queryPool-parameter", "VUID-vkGetQueryPoolResults-queryPool-parent");
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateCreateBuffer(
+    VkDevice                                    device,
+    const VkBufferCreateInfo*                   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkBuffer*                                   pBuffer) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkCreateBuffer-device-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+void ObjectLifetimes::PostCallRecordCreateBuffer(
+    VkDevice                                    device,
+    const VkBufferCreateInfo*                   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkBuffer*                                   pBuffer,
+    VkResult                                    result) {
+    if (result != VK_SUCCESS) return;
+    CreateObject(*pBuffer, kVulkanObjectTypeBuffer, pAllocator);
+
+}
+
+bool ObjectLifetimes::PreCallValidateDestroyBuffer(
+    VkDevice                                    device,
+    VkBuffer                                    buffer,
+    const VkAllocationCallbacks*                pAllocator) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkDestroyBuffer-device-parameter", kVUIDUndefined);
+    skip |= ValidateObject(buffer, kVulkanObjectTypeBuffer, true, "VUID-vkDestroyBuffer-buffer-parameter", "VUID-vkDestroyBuffer-buffer-parent");
+    skip |= ValidateDestroyObject(buffer, kVulkanObjectTypeBuffer, pAllocator, "VUID-vkDestroyBuffer-buffer-00923", "VUID-vkDestroyBuffer-buffer-00924");
+
+    return skip;
+}
+
+void ObjectLifetimes::PreCallRecordDestroyBuffer(
+    VkDevice                                    device,
+    VkBuffer                                    buffer,
+    const VkAllocationCallbacks*                pAllocator) {
+    RecordDestroyObject(buffer, kVulkanObjectTypeBuffer);
+
+}
+
+bool ObjectLifetimes::PreCallValidateCreateBufferView(
+    VkDevice                                    device,
+    const VkBufferViewCreateInfo*               pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkBufferView*                               pView) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkCreateBufferView-device-parameter", kVUIDUndefined);
+    if (pCreateInfo) {
+        skip |= ValidateObject(pCreateInfo->buffer, kVulkanObjectTypeBuffer, false, "VUID-VkBufferViewCreateInfo-buffer-parameter", kVUIDUndefined);
+    }
+
+    return skip;
+}
+
+void ObjectLifetimes::PostCallRecordCreateBufferView(
+    VkDevice                                    device,
+    const VkBufferViewCreateInfo*               pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkBufferView*                               pView,
+    VkResult                                    result) {
+    if (result != VK_SUCCESS) return;
+    CreateObject(*pView, kVulkanObjectTypeBufferView, pAllocator);
+
+}
+
+bool ObjectLifetimes::PreCallValidateDestroyBufferView(
+    VkDevice                                    device,
+    VkBufferView                                bufferView,
+    const VkAllocationCallbacks*                pAllocator) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkDestroyBufferView-device-parameter", kVUIDUndefined);
+    skip |= ValidateObject(bufferView, kVulkanObjectTypeBufferView, true, "VUID-vkDestroyBufferView-bufferView-parameter", "VUID-vkDestroyBufferView-bufferView-parent");
+    skip |= ValidateDestroyObject(bufferView, kVulkanObjectTypeBufferView, pAllocator, "VUID-vkDestroyBufferView-bufferView-00937", "VUID-vkDestroyBufferView-bufferView-00938");
+
+    return skip;
+}
+
+void ObjectLifetimes::PreCallRecordDestroyBufferView(
+    VkDevice                                    device,
+    VkBufferView                                bufferView,
+    const VkAllocationCallbacks*                pAllocator) {
+    RecordDestroyObject(bufferView, kVulkanObjectTypeBufferView);
+
+}
+
+bool ObjectLifetimes::PreCallValidateCreateImage(
+    VkDevice                                    device,
+    const VkImageCreateInfo*                    pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkImage*                                    pImage) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkCreateImage-device-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+void ObjectLifetimes::PostCallRecordCreateImage(
+    VkDevice                                    device,
+    const VkImageCreateInfo*                    pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkImage*                                    pImage,
+    VkResult                                    result) {
+    if (result != VK_SUCCESS) return;
+    CreateObject(*pImage, kVulkanObjectTypeImage, pAllocator);
+
+}
+
+bool ObjectLifetimes::PreCallValidateDestroyImage(
+    VkDevice                                    device,
+    VkImage                                     image,
+    const VkAllocationCallbacks*                pAllocator) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkDestroyImage-device-parameter", kVUIDUndefined);
+    skip |= ValidateObject(image, kVulkanObjectTypeImage, true, "VUID-vkDestroyImage-image-parameter", "VUID-vkDestroyImage-image-parent");
+    skip |= ValidateDestroyObject(image, kVulkanObjectTypeImage, pAllocator, "VUID-vkDestroyImage-image-01001", "VUID-vkDestroyImage-image-01002");
+
+    return skip;
+}
+
+void ObjectLifetimes::PreCallRecordDestroyImage(
+    VkDevice                                    device,
+    VkImage                                     image,
+    const VkAllocationCallbacks*                pAllocator) {
+    RecordDestroyObject(image, kVulkanObjectTypeImage);
+
+}
+
+bool ObjectLifetimes::PreCallValidateGetImageSubresourceLayout(
+    VkDevice                                    device,
+    VkImage                                     image,
+    const VkImageSubresource*                   pSubresource,
+    VkSubresourceLayout*                        pLayout) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkGetImageSubresourceLayout-device-parameter", kVUIDUndefined);
+    skip |= ValidateObject(image, kVulkanObjectTypeImage, false, "VUID-vkGetImageSubresourceLayout-image-parameter", "VUID-vkGetImageSubresourceLayout-image-parent");
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateCreateImageView(
+    VkDevice                                    device,
+    const VkImageViewCreateInfo*                pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkImageView*                                pView) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkCreateImageView-device-parameter", kVUIDUndefined);
+    if (pCreateInfo) {
+        skip |= ValidateObject(pCreateInfo->image, kVulkanObjectTypeImage, false, "VUID-VkImageViewCreateInfo-image-parameter", kVUIDUndefined);
+    }
+
+    return skip;
+}
+
+void ObjectLifetimes::PostCallRecordCreateImageView(
+    VkDevice                                    device,
+    const VkImageViewCreateInfo*                pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkImageView*                                pView,
+    VkResult                                    result) {
+    if (result != VK_SUCCESS) return;
+    CreateObject(*pView, kVulkanObjectTypeImageView, pAllocator);
+
+}
+
+bool ObjectLifetimes::PreCallValidateDestroyImageView(
+    VkDevice                                    device,
+    VkImageView                                 imageView,
+    const VkAllocationCallbacks*                pAllocator) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkDestroyImageView-device-parameter", kVUIDUndefined);
+    skip |= ValidateObject(imageView, kVulkanObjectTypeImageView, true, "VUID-vkDestroyImageView-imageView-parameter", "VUID-vkDestroyImageView-imageView-parent");
+    skip |= ValidateDestroyObject(imageView, kVulkanObjectTypeImageView, pAllocator, "VUID-vkDestroyImageView-imageView-01027", "VUID-vkDestroyImageView-imageView-01028");
+
+    return skip;
+}
+
+void ObjectLifetimes::PreCallRecordDestroyImageView(
+    VkDevice                                    device,
+    VkImageView                                 imageView,
+    const VkAllocationCallbacks*                pAllocator) {
+    RecordDestroyObject(imageView, kVulkanObjectTypeImageView);
+
+}
+
+bool ObjectLifetimes::PreCallValidateCreateShaderModule(
+    VkDevice                                    device,
+    const VkShaderModuleCreateInfo*             pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkShaderModule*                             pShaderModule) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkCreateShaderModule-device-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+void ObjectLifetimes::PostCallRecordCreateShaderModule(
+    VkDevice                                    device,
+    const VkShaderModuleCreateInfo*             pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkShaderModule*                             pShaderModule,
+    VkResult                                    result) {
+    if (result != VK_SUCCESS) return;
+    CreateObject(*pShaderModule, kVulkanObjectTypeShaderModule, pAllocator);
+
+}
+
+bool ObjectLifetimes::PreCallValidateDestroyShaderModule(
+    VkDevice                                    device,
+    VkShaderModule                              shaderModule,
+    const VkAllocationCallbacks*                pAllocator) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkDestroyShaderModule-device-parameter", kVUIDUndefined);
+    skip |= ValidateObject(shaderModule, kVulkanObjectTypeShaderModule, true, "VUID-vkDestroyShaderModule-shaderModule-parameter", "VUID-vkDestroyShaderModule-shaderModule-parent");
+    skip |= ValidateDestroyObject(shaderModule, kVulkanObjectTypeShaderModule, pAllocator, "VUID-vkDestroyShaderModule-shaderModule-01092", "VUID-vkDestroyShaderModule-shaderModule-01093");
+
+    return skip;
+}
+
+void ObjectLifetimes::PreCallRecordDestroyShaderModule(
+    VkDevice                                    device,
+    VkShaderModule                              shaderModule,
+    const VkAllocationCallbacks*                pAllocator) {
+    RecordDestroyObject(shaderModule, kVulkanObjectTypeShaderModule);
+
+}
+
+bool ObjectLifetimes::PreCallValidateCreatePipelineCache(
+    VkDevice                                    device,
+    const VkPipelineCacheCreateInfo*            pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkPipelineCache*                            pPipelineCache) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkCreatePipelineCache-device-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+void ObjectLifetimes::PostCallRecordCreatePipelineCache(
+    VkDevice                                    device,
+    const VkPipelineCacheCreateInfo*            pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkPipelineCache*                            pPipelineCache,
+    VkResult                                    result) {
+    if (result != VK_SUCCESS) return;
+    CreateObject(*pPipelineCache, kVulkanObjectTypePipelineCache, pAllocator);
+
+}
+
+bool ObjectLifetimes::PreCallValidateDestroyPipelineCache(
+    VkDevice                                    device,
+    VkPipelineCache                             pipelineCache,
+    const VkAllocationCallbacks*                pAllocator) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkDestroyPipelineCache-device-parameter", kVUIDUndefined);
+    skip |= ValidateObject(pipelineCache, kVulkanObjectTypePipelineCache, true, "VUID-vkDestroyPipelineCache-pipelineCache-parameter", "VUID-vkDestroyPipelineCache-pipelineCache-parent");
+    skip |= ValidateDestroyObject(pipelineCache, kVulkanObjectTypePipelineCache, pAllocator, "VUID-vkDestroyPipelineCache-pipelineCache-00771", "VUID-vkDestroyPipelineCache-pipelineCache-00772");
+
+    return skip;
+}
+
+void ObjectLifetimes::PreCallRecordDestroyPipelineCache(
+    VkDevice                                    device,
+    VkPipelineCache                             pipelineCache,
+    const VkAllocationCallbacks*                pAllocator) {
+    RecordDestroyObject(pipelineCache, kVulkanObjectTypePipelineCache);
+
+}
+
+bool ObjectLifetimes::PreCallValidateGetPipelineCacheData(
+    VkDevice                                    device,
+    VkPipelineCache                             pipelineCache,
+    size_t*                                     pDataSize,
+    void*                                       pData) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkGetPipelineCacheData-device-parameter", kVUIDUndefined);
+    skip |= ValidateObject(pipelineCache, kVulkanObjectTypePipelineCache, false, "VUID-vkGetPipelineCacheData-pipelineCache-parameter", "VUID-vkGetPipelineCacheData-pipelineCache-parent");
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateMergePipelineCaches(
+    VkDevice                                    device,
+    VkPipelineCache                             dstCache,
+    uint32_t                                    srcCacheCount,
+    const VkPipelineCache*                      pSrcCaches) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkMergePipelineCaches-device-parameter", kVUIDUndefined);
+    skip |= ValidateObject(dstCache, kVulkanObjectTypePipelineCache, false, "VUID-vkMergePipelineCaches-dstCache-parameter", "VUID-vkMergePipelineCaches-dstCache-parent");
+    if (pSrcCaches) {
+        for (uint32_t index0 = 0; index0 < srcCacheCount; ++index0) {
+            skip |= ValidateObject(pSrcCaches[index0], kVulkanObjectTypePipelineCache, false, "VUID-vkMergePipelineCaches-pSrcCaches-parameter", "VUID-vkMergePipelineCaches-pSrcCaches-parent");
+        }
+    }
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateCreateGraphicsPipelines(
+    VkDevice                                    device,
+    VkPipelineCache                             pipelineCache,
+    uint32_t                                    createInfoCount,
+    const VkGraphicsPipelineCreateInfo*         pCreateInfos,
+    const VkAllocationCallbacks*                pAllocator,
+    VkPipeline*                                 pPipelines) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkCreateGraphicsPipelines-device-parameter", kVUIDUndefined);
+    skip |= ValidateObject(pipelineCache, kVulkanObjectTypePipelineCache, true, "VUID-vkCreateGraphicsPipelines-pipelineCache-parameter", "VUID-vkCreateGraphicsPipelines-pipelineCache-parent");
+    if (pCreateInfos) {
+        for (uint32_t index0 = 0; index0 < createInfoCount; ++index0) {
+            if (pCreateInfos[index0].pStages) {
+                for (uint32_t index1 = 0; index1 < pCreateInfos[index0].stageCount; ++index1) {
+                    skip |= ValidateObject(pCreateInfos[index0].pStages[index1].module, kVulkanObjectTypeShaderModule, false, "VUID-VkPipelineShaderStageCreateInfo-module-parameter", kVUIDUndefined);
+                }
+            }
+            skip |= ValidateObject(pCreateInfos[index0].layout, kVulkanObjectTypePipelineLayout, false, "VUID-VkGraphicsPipelineCreateInfo-layout-parameter", "VUID-VkGraphicsPipelineCreateInfo-commonparent");
+            skip |= ValidateObject(pCreateInfos[index0].renderPass, kVulkanObjectTypeRenderPass, false, "VUID-VkGraphicsPipelineCreateInfo-renderPass-parameter", "VUID-VkGraphicsPipelineCreateInfo-commonparent");
+            if ((pCreateInfos[index0].flags & VK_PIPELINE_CREATE_DERIVATIVE_BIT) && (pCreateInfos[index0].basePipelineIndex == -1))
+                skip |= ValidateObject(pCreateInfos[index0].basePipelineHandle, kVulkanObjectTypePipeline, false, "VUID-VkGraphicsPipelineCreateInfo-flags-00722", "VUID-VkGraphicsPipelineCreateInfo-commonparent");
+        }
+    }
+
+    return skip;
+}
+
+void ObjectLifetimes::PostCallRecordCreateGraphicsPipelines(
+    VkDevice                                    device,
+    VkPipelineCache                             pipelineCache,
+    uint32_t                                    createInfoCount,
+    const VkGraphicsPipelineCreateInfo*         pCreateInfos,
+    const VkAllocationCallbacks*                pAllocator,
+    VkPipeline*                                 pPipelines,
+    VkResult                                    result) {
+    if (VK_ERROR_VALIDATION_FAILED_EXT == result) return;
+    if (pPipelines) {
+        for (uint32_t index = 0; index < createInfoCount; index++) {
+            if (!pPipelines[index]) continue;
+            CreateObject(pPipelines[index], kVulkanObjectTypePipeline, pAllocator);
+        }
+    }
+
+}
+
+bool ObjectLifetimes::PreCallValidateCreateComputePipelines(
+    VkDevice                                    device,
+    VkPipelineCache                             pipelineCache,
+    uint32_t                                    createInfoCount,
+    const VkComputePipelineCreateInfo*          pCreateInfos,
+    const VkAllocationCallbacks*                pAllocator,
+    VkPipeline*                                 pPipelines) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkCreateComputePipelines-device-parameter", kVUIDUndefined);
+    skip |= ValidateObject(pipelineCache, kVulkanObjectTypePipelineCache, true, "VUID-vkCreateComputePipelines-pipelineCache-parameter", "VUID-vkCreateComputePipelines-pipelineCache-parent");
+    if (pCreateInfos) {
+        for (uint32_t index0 = 0; index0 < createInfoCount; ++index0) {
+            skip |= ValidateObject(pCreateInfos[index0].stage.module, kVulkanObjectTypeShaderModule, false, "VUID-VkPipelineShaderStageCreateInfo-module-parameter", kVUIDUndefined);
+            skip |= ValidateObject(pCreateInfos[index0].layout, kVulkanObjectTypePipelineLayout, false, "VUID-VkComputePipelineCreateInfo-layout-parameter", "VUID-VkComputePipelineCreateInfo-commonparent");
+            if ((pCreateInfos[index0].flags & VK_PIPELINE_CREATE_DERIVATIVE_BIT) && (pCreateInfos[index0].basePipelineIndex == -1))
+                skip |= ValidateObject(pCreateInfos[index0].basePipelineHandle, kVulkanObjectTypePipeline, false, "VUID-VkComputePipelineCreateInfo-flags-00697", "VUID-VkComputePipelineCreateInfo-commonparent");
+        }
+    }
+
+    return skip;
+}
+
+void ObjectLifetimes::PostCallRecordCreateComputePipelines(
+    VkDevice                                    device,
+    VkPipelineCache                             pipelineCache,
+    uint32_t                                    createInfoCount,
+    const VkComputePipelineCreateInfo*          pCreateInfos,
+    const VkAllocationCallbacks*                pAllocator,
+    VkPipeline*                                 pPipelines,
+    VkResult                                    result) {
+    if (VK_ERROR_VALIDATION_FAILED_EXT == result) return;
+    if (pPipelines) {
+        for (uint32_t index = 0; index < createInfoCount; index++) {
+            if (!pPipelines[index]) continue;
+            CreateObject(pPipelines[index], kVulkanObjectTypePipeline, pAllocator);
+        }
+    }
+
+}
+
+bool ObjectLifetimes::PreCallValidateDestroyPipeline(
+    VkDevice                                    device,
+    VkPipeline                                  pipeline,
+    const VkAllocationCallbacks*                pAllocator) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkDestroyPipeline-device-parameter", kVUIDUndefined);
+    skip |= ValidateObject(pipeline, kVulkanObjectTypePipeline, true, "VUID-vkDestroyPipeline-pipeline-parameter", "VUID-vkDestroyPipeline-pipeline-parent");
+    skip |= ValidateDestroyObject(pipeline, kVulkanObjectTypePipeline, pAllocator, "VUID-vkDestroyPipeline-pipeline-00766", "VUID-vkDestroyPipeline-pipeline-00767");
+
+    return skip;
+}
+
+void ObjectLifetimes::PreCallRecordDestroyPipeline(
+    VkDevice                                    device,
+    VkPipeline                                  pipeline,
+    const VkAllocationCallbacks*                pAllocator) {
+    RecordDestroyObject(pipeline, kVulkanObjectTypePipeline);
+
+}
+
+bool ObjectLifetimes::PreCallValidateCreatePipelineLayout(
+    VkDevice                                    device,
+    const VkPipelineLayoutCreateInfo*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkPipelineLayout*                           pPipelineLayout) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkCreatePipelineLayout-device-parameter", kVUIDUndefined);
+    if (pCreateInfo) {
+        if (pCreateInfo->pSetLayouts) {
+            for (uint32_t index1 = 0; index1 < pCreateInfo->setLayoutCount; ++index1) {
+                skip |= ValidateObject(pCreateInfo->pSetLayouts[index1], kVulkanObjectTypeDescriptorSetLayout, false, "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-parameter", kVUIDUndefined);
+            }
+        }
+    }
+
+    return skip;
+}
+
+void ObjectLifetimes::PostCallRecordCreatePipelineLayout(
+    VkDevice                                    device,
+    const VkPipelineLayoutCreateInfo*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkPipelineLayout*                           pPipelineLayout,
+    VkResult                                    result) {
+    if (result != VK_SUCCESS) return;
+    CreateObject(*pPipelineLayout, kVulkanObjectTypePipelineLayout, pAllocator);
+
+}
+
+bool ObjectLifetimes::PreCallValidateDestroyPipelineLayout(
+    VkDevice                                    device,
+    VkPipelineLayout                            pipelineLayout,
+    const VkAllocationCallbacks*                pAllocator) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkDestroyPipelineLayout-device-parameter", kVUIDUndefined);
+    skip |= ValidateObject(pipelineLayout, kVulkanObjectTypePipelineLayout, true, "VUID-vkDestroyPipelineLayout-pipelineLayout-parameter", "VUID-vkDestroyPipelineLayout-pipelineLayout-parent");
+    skip |= ValidateDestroyObject(pipelineLayout, kVulkanObjectTypePipelineLayout, pAllocator, "VUID-vkDestroyPipelineLayout-pipelineLayout-00299", "VUID-vkDestroyPipelineLayout-pipelineLayout-00300");
+
+    return skip;
+}
+
+void ObjectLifetimes::PreCallRecordDestroyPipelineLayout(
+    VkDevice                                    device,
+    VkPipelineLayout                            pipelineLayout,
+    const VkAllocationCallbacks*                pAllocator) {
+    RecordDestroyObject(pipelineLayout, kVulkanObjectTypePipelineLayout);
+
+}
+
+bool ObjectLifetimes::PreCallValidateCreateSampler(
+    VkDevice                                    device,
+    const VkSamplerCreateInfo*                  pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSampler*                                  pSampler) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkCreateSampler-device-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+void ObjectLifetimes::PostCallRecordCreateSampler(
+    VkDevice                                    device,
+    const VkSamplerCreateInfo*                  pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSampler*                                  pSampler,
+    VkResult                                    result) {
+    if (result != VK_SUCCESS) return;
+    CreateObject(*pSampler, kVulkanObjectTypeSampler, pAllocator);
+
+}
+
+bool ObjectLifetimes::PreCallValidateDestroySampler(
+    VkDevice                                    device,
+    VkSampler                                   sampler,
+    const VkAllocationCallbacks*                pAllocator) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkDestroySampler-device-parameter", kVUIDUndefined);
+    skip |= ValidateObject(sampler, kVulkanObjectTypeSampler, true, "VUID-vkDestroySampler-sampler-parameter", "VUID-vkDestroySampler-sampler-parent");
+    skip |= ValidateDestroyObject(sampler, kVulkanObjectTypeSampler, pAllocator, "VUID-vkDestroySampler-sampler-01083", "VUID-vkDestroySampler-sampler-01084");
+
+    return skip;
+}
+
+void ObjectLifetimes::PreCallRecordDestroySampler(
+    VkDevice                                    device,
+    VkSampler                                   sampler,
+    const VkAllocationCallbacks*                pAllocator) {
+    RecordDestroyObject(sampler, kVulkanObjectTypeSampler);
+
+}
+
+bool ObjectLifetimes::PreCallValidateDestroyDescriptorSetLayout(
+    VkDevice                                    device,
+    VkDescriptorSetLayout                       descriptorSetLayout,
+    const VkAllocationCallbacks*                pAllocator) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkDestroyDescriptorSetLayout-device-parameter", kVUIDUndefined);
+    skip |= ValidateObject(descriptorSetLayout, kVulkanObjectTypeDescriptorSetLayout, true, "VUID-vkDestroyDescriptorSetLayout-descriptorSetLayout-parameter", "VUID-vkDestroyDescriptorSetLayout-descriptorSetLayout-parent");
+    skip |= ValidateDestroyObject(descriptorSetLayout, kVulkanObjectTypeDescriptorSetLayout, pAllocator, "VUID-vkDestroyDescriptorSetLayout-descriptorSetLayout-00284", "VUID-vkDestroyDescriptorSetLayout-descriptorSetLayout-00285");
+
+    return skip;
+}
+
+void ObjectLifetimes::PreCallRecordDestroyDescriptorSetLayout(
+    VkDevice                                    device,
+    VkDescriptorSetLayout                       descriptorSetLayout,
+    const VkAllocationCallbacks*                pAllocator) {
+    RecordDestroyObject(descriptorSetLayout, kVulkanObjectTypeDescriptorSetLayout);
+
+}
+
+bool ObjectLifetimes::PreCallValidateCreateDescriptorPool(
+    VkDevice                                    device,
+    const VkDescriptorPoolCreateInfo*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDescriptorPool*                           pDescriptorPool) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkCreateDescriptorPool-device-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+void ObjectLifetimes::PostCallRecordCreateDescriptorPool(
+    VkDevice                                    device,
+    const VkDescriptorPoolCreateInfo*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDescriptorPool*                           pDescriptorPool,
+    VkResult                                    result) {
+    if (result != VK_SUCCESS) return;
+    CreateObject(*pDescriptorPool, kVulkanObjectTypeDescriptorPool, pAllocator);
+
+}
+
+bool ObjectLifetimes::PreCallValidateDestroyFramebuffer(
+    VkDevice                                    device,
+    VkFramebuffer                               framebuffer,
+    const VkAllocationCallbacks*                pAllocator) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkDestroyFramebuffer-device-parameter", kVUIDUndefined);
+    skip |= ValidateObject(framebuffer, kVulkanObjectTypeFramebuffer, true, "VUID-vkDestroyFramebuffer-framebuffer-parameter", "VUID-vkDestroyFramebuffer-framebuffer-parent");
+    skip |= ValidateDestroyObject(framebuffer, kVulkanObjectTypeFramebuffer, pAllocator, "VUID-vkDestroyFramebuffer-framebuffer-00893", "VUID-vkDestroyFramebuffer-framebuffer-00894");
+
+    return skip;
+}
+
+void ObjectLifetimes::PreCallRecordDestroyFramebuffer(
+    VkDevice                                    device,
+    VkFramebuffer                               framebuffer,
+    const VkAllocationCallbacks*                pAllocator) {
+    RecordDestroyObject(framebuffer, kVulkanObjectTypeFramebuffer);
+
+}
+
+bool ObjectLifetimes::PreCallValidateCreateRenderPass(
+    VkDevice                                    device,
+    const VkRenderPassCreateInfo*               pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkRenderPass*                               pRenderPass) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkCreateRenderPass-device-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+void ObjectLifetimes::PostCallRecordCreateRenderPass(
+    VkDevice                                    device,
+    const VkRenderPassCreateInfo*               pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkRenderPass*                               pRenderPass,
+    VkResult                                    result) {
+    if (result != VK_SUCCESS) return;
+    CreateObject(*pRenderPass, kVulkanObjectTypeRenderPass, pAllocator);
+
+}
+
+bool ObjectLifetimes::PreCallValidateDestroyRenderPass(
+    VkDevice                                    device,
+    VkRenderPass                                renderPass,
+    const VkAllocationCallbacks*                pAllocator) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkDestroyRenderPass-device-parameter", kVUIDUndefined);
+    skip |= ValidateObject(renderPass, kVulkanObjectTypeRenderPass, true, "VUID-vkDestroyRenderPass-renderPass-parameter", "VUID-vkDestroyRenderPass-renderPass-parent");
+    skip |= ValidateDestroyObject(renderPass, kVulkanObjectTypeRenderPass, pAllocator, "VUID-vkDestroyRenderPass-renderPass-00874", "VUID-vkDestroyRenderPass-renderPass-00875");
+
+    return skip;
+}
+
+void ObjectLifetimes::PreCallRecordDestroyRenderPass(
+    VkDevice                                    device,
+    VkRenderPass                                renderPass,
+    const VkAllocationCallbacks*                pAllocator) {
+    RecordDestroyObject(renderPass, kVulkanObjectTypeRenderPass);
+
+}
+
+bool ObjectLifetimes::PreCallValidateGetRenderAreaGranularity(
+    VkDevice                                    device,
+    VkRenderPass                                renderPass,
+    VkExtent2D*                                 pGranularity) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkGetRenderAreaGranularity-device-parameter", kVUIDUndefined);
+    skip |= ValidateObject(renderPass, kVulkanObjectTypeRenderPass, false, "VUID-vkGetRenderAreaGranularity-renderPass-parameter", "VUID-vkGetRenderAreaGranularity-renderPass-parent");
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateCreateCommandPool(
+    VkDevice                                    device,
+    const VkCommandPoolCreateInfo*              pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkCommandPool*                              pCommandPool) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkCreateCommandPool-device-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+void ObjectLifetimes::PostCallRecordCreateCommandPool(
+    VkDevice                                    device,
+    const VkCommandPoolCreateInfo*              pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkCommandPool*                              pCommandPool,
+    VkResult                                    result) {
+    if (result != VK_SUCCESS) return;
+    CreateObject(*pCommandPool, kVulkanObjectTypeCommandPool, pAllocator);
+
+}
+
+bool ObjectLifetimes::PreCallValidateResetCommandPool(
+    VkDevice                                    device,
+    VkCommandPool                               commandPool,
+    VkCommandPoolResetFlags                     flags) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkResetCommandPool-device-parameter", kVUIDUndefined);
+    skip |= ValidateObject(commandPool, kVulkanObjectTypeCommandPool, false, "VUID-vkResetCommandPool-commandPool-parameter", "VUID-vkResetCommandPool-commandPool-parent");
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateEndCommandBuffer(
+    VkCommandBuffer                             commandBuffer) const {
+    bool skip = false;
+    skip |= ValidateObject(commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkEndCommandBuffer-commandBuffer-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateResetCommandBuffer(
+    VkCommandBuffer                             commandBuffer,
+    VkCommandBufferResetFlags                   flags) const {
+    bool skip = false;
+    skip |= ValidateObject(commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkResetCommandBuffer-commandBuffer-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateCmdBindPipeline(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineBindPoint                         pipelineBindPoint,
+    VkPipeline                                  pipeline) const {
+    bool skip = false;
+    skip |= ValidateObject(commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdBindPipeline-commandBuffer-parameter", "VUID-vkCmdBindPipeline-commonparent");
+    skip |= ValidateObject(pipeline, kVulkanObjectTypePipeline, false, "VUID-vkCmdBindPipeline-pipeline-parameter", "VUID-vkCmdBindPipeline-commonparent");
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateCmdSetViewport(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstViewport,
+    uint32_t                                    viewportCount,
+    const VkViewport*                           pViewports) const {
+    bool skip = false;
+    skip |= ValidateObject(commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdSetViewport-commandBuffer-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateCmdSetScissor(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstScissor,
+    uint32_t                                    scissorCount,
+    const VkRect2D*                             pScissors) const {
+    bool skip = false;
+    skip |= ValidateObject(commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdSetScissor-commandBuffer-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateCmdSetLineWidth(
+    VkCommandBuffer                             commandBuffer,
+    float                                       lineWidth) const {
+    bool skip = false;
+    skip |= ValidateObject(commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdSetLineWidth-commandBuffer-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateCmdSetDepthBias(
+    VkCommandBuffer                             commandBuffer,
+    float                                       depthBiasConstantFactor,
+    float                                       depthBiasClamp,
+    float                                       depthBiasSlopeFactor) const {
+    bool skip = false;
+    skip |= ValidateObject(commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdSetDepthBias-commandBuffer-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateCmdSetBlendConstants(
+    VkCommandBuffer                             commandBuffer,
+    const float                                 blendConstants[4]) const {
+    bool skip = false;
+    skip |= ValidateObject(commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdSetBlendConstants-commandBuffer-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateCmdSetDepthBounds(
+    VkCommandBuffer                             commandBuffer,
+    float                                       minDepthBounds,
+    float                                       maxDepthBounds) const {
+    bool skip = false;
+    skip |= ValidateObject(commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdSetDepthBounds-commandBuffer-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateCmdSetStencilCompareMask(
+    VkCommandBuffer                             commandBuffer,
+    VkStencilFaceFlags                          faceMask,
+    uint32_t                                    compareMask) const {
+    bool skip = false;
+    skip |= ValidateObject(commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdSetStencilCompareMask-commandBuffer-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateCmdSetStencilWriteMask(
+    VkCommandBuffer                             commandBuffer,
+    VkStencilFaceFlags                          faceMask,
+    uint32_t                                    writeMask) const {
+    bool skip = false;
+    skip |= ValidateObject(commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdSetStencilWriteMask-commandBuffer-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateCmdSetStencilReference(
+    VkCommandBuffer                             commandBuffer,
+    VkStencilFaceFlags                          faceMask,
+    uint32_t                                    reference) const {
+    bool skip = false;
+    skip |= ValidateObject(commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdSetStencilReference-commandBuffer-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateCmdBindDescriptorSets(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineBindPoint                         pipelineBindPoint,
+    VkPipelineLayout                            layout,
+    uint32_t                                    firstSet,
+    uint32_t                                    descriptorSetCount,
+    const VkDescriptorSet*                      pDescriptorSets,
+    uint32_t                                    dynamicOffsetCount,
+    const uint32_t*                             pDynamicOffsets) const {
+    bool skip = false;
+    skip |= ValidateObject(commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdBindDescriptorSets-commandBuffer-parameter", "VUID-vkCmdBindDescriptorSets-commonparent");
+    skip |= ValidateObject(layout, kVulkanObjectTypePipelineLayout, false, "VUID-vkCmdBindDescriptorSets-layout-parameter", "VUID-vkCmdBindDescriptorSets-commonparent");
+    if (pDescriptorSets) {
+        for (uint32_t index0 = 0; index0 < descriptorSetCount; ++index0) {
+            skip |= ValidateObject(pDescriptorSets[index0], kVulkanObjectTypeDescriptorSet, false, "VUID-vkCmdBindDescriptorSets-pDescriptorSets-parameter", "VUID-vkCmdBindDescriptorSets-commonparent");
+        }
+    }
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateCmdBindIndexBuffer(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkIndexType                                 indexType) const {
+    bool skip = false;
+    skip |= ValidateObject(commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdBindIndexBuffer-commandBuffer-parameter", "VUID-vkCmdBindIndexBuffer-commonparent");
+    skip |= ValidateObject(buffer, kVulkanObjectTypeBuffer, false, "VUID-vkCmdBindIndexBuffer-buffer-parameter", "VUID-vkCmdBindIndexBuffer-commonparent");
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateCmdBindVertexBuffers(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstBinding,
+    uint32_t                                    bindingCount,
+    const VkBuffer*                             pBuffers,
+    const VkDeviceSize*                         pOffsets) const {
+    bool skip = false;
+    skip |= ValidateObject(commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdBindVertexBuffers-commandBuffer-parameter", "VUID-vkCmdBindVertexBuffers-commonparent");
+    if (pBuffers) {
+        for (uint32_t index0 = 0; index0 < bindingCount; ++index0) {
+            skip |= ValidateObject(pBuffers[index0], kVulkanObjectTypeBuffer, false, "VUID-vkCmdBindVertexBuffers-pBuffers-parameter", "VUID-vkCmdBindVertexBuffers-commonparent");
+        }
+    }
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateCmdDraw(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    vertexCount,
+    uint32_t                                    instanceCount,
+    uint32_t                                    firstVertex,
+    uint32_t                                    firstInstance) const {
+    bool skip = false;
+    skip |= ValidateObject(commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdDraw-commandBuffer-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateCmdDrawIndexed(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    indexCount,
+    uint32_t                                    instanceCount,
+    uint32_t                                    firstIndex,
+    int32_t                                     vertexOffset,
+    uint32_t                                    firstInstance) const {
+    bool skip = false;
+    skip |= ValidateObject(commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdDrawIndexed-commandBuffer-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateCmdDrawIndirect(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    uint32_t                                    drawCount,
+    uint32_t                                    stride) const {
+    bool skip = false;
+    skip |= ValidateObject(commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdDrawIndirect-commandBuffer-parameter", "VUID-vkCmdDrawIndirect-commonparent");
+    skip |= ValidateObject(buffer, kVulkanObjectTypeBuffer, false, "VUID-vkCmdDrawIndirect-buffer-parameter", "VUID-vkCmdDrawIndirect-commonparent");
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateCmdDrawIndexedIndirect(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    uint32_t                                    drawCount,
+    uint32_t                                    stride) const {
+    bool skip = false;
+    skip |= ValidateObject(commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdDrawIndexedIndirect-commandBuffer-parameter", "VUID-vkCmdDrawIndexedIndirect-commonparent");
+    skip |= ValidateObject(buffer, kVulkanObjectTypeBuffer, false, "VUID-vkCmdDrawIndexedIndirect-buffer-parameter", "VUID-vkCmdDrawIndexedIndirect-commonparent");
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateCmdDispatch(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    groupCountX,
+    uint32_t                                    groupCountY,
+    uint32_t                                    groupCountZ) const {
+    bool skip = false;
+    skip |= ValidateObject(commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdDispatch-commandBuffer-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateCmdDispatchIndirect(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset) const {
+    bool skip = false;
+    skip |= ValidateObject(commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdDispatchIndirect-commandBuffer-parameter", "VUID-vkCmdDispatchIndirect-commonparent");
+    skip |= ValidateObject(buffer, kVulkanObjectTypeBuffer, false, "VUID-vkCmdDispatchIndirect-buffer-parameter", "VUID-vkCmdDispatchIndirect-commonparent");
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateCmdCopyBuffer(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    srcBuffer,
+    VkBuffer                                    dstBuffer,
+    uint32_t                                    regionCount,
+    const VkBufferCopy*                         pRegions) const {
+    bool skip = false;
+    skip |= ValidateObject(commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdCopyBuffer-commandBuffer-parameter", "VUID-vkCmdCopyBuffer-commonparent");
+    skip |= ValidateObject(srcBuffer, kVulkanObjectTypeBuffer, false, "VUID-vkCmdCopyBuffer-srcBuffer-parameter", "VUID-vkCmdCopyBuffer-commonparent");
+    skip |= ValidateObject(dstBuffer, kVulkanObjectTypeBuffer, false, "VUID-vkCmdCopyBuffer-dstBuffer-parameter", "VUID-vkCmdCopyBuffer-commonparent");
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateCmdCopyImage(
+    VkCommandBuffer                             commandBuffer,
+    VkImage                                     srcImage,
+    VkImageLayout                               srcImageLayout,
+    VkImage                                     dstImage,
+    VkImageLayout                               dstImageLayout,
+    uint32_t                                    regionCount,
+    const VkImageCopy*                          pRegions) const {
+    bool skip = false;
+    skip |= ValidateObject(commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdCopyImage-commandBuffer-parameter", "VUID-vkCmdCopyImage-commonparent");
+    skip |= ValidateObject(srcImage, kVulkanObjectTypeImage, false, "VUID-vkCmdCopyImage-srcImage-parameter", "VUID-vkCmdCopyImage-commonparent");
+    skip |= ValidateObject(dstImage, kVulkanObjectTypeImage, false, "VUID-vkCmdCopyImage-dstImage-parameter", "VUID-vkCmdCopyImage-commonparent");
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateCmdBlitImage(
+    VkCommandBuffer                             commandBuffer,
+    VkImage                                     srcImage,
+    VkImageLayout                               srcImageLayout,
+    VkImage                                     dstImage,
+    VkImageLayout                               dstImageLayout,
+    uint32_t                                    regionCount,
+    const VkImageBlit*                          pRegions,
+    VkFilter                                    filter) const {
+    bool skip = false;
+    skip |= ValidateObject(commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdBlitImage-commandBuffer-parameter", "VUID-vkCmdBlitImage-commonparent");
+    skip |= ValidateObject(srcImage, kVulkanObjectTypeImage, false, "VUID-vkCmdBlitImage-srcImage-parameter", "VUID-vkCmdBlitImage-commonparent");
+    skip |= ValidateObject(dstImage, kVulkanObjectTypeImage, false, "VUID-vkCmdBlitImage-dstImage-parameter", "VUID-vkCmdBlitImage-commonparent");
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateCmdCopyBufferToImage(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    srcBuffer,
+    VkImage                                     dstImage,
+    VkImageLayout                               dstImageLayout,
+    uint32_t                                    regionCount,
+    const VkBufferImageCopy*                    pRegions) const {
+    bool skip = false;
+    skip |= ValidateObject(commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdCopyBufferToImage-commandBuffer-parameter", "VUID-vkCmdCopyBufferToImage-commonparent");
+    skip |= ValidateObject(srcBuffer, kVulkanObjectTypeBuffer, false, "VUID-vkCmdCopyBufferToImage-srcBuffer-parameter", "VUID-vkCmdCopyBufferToImage-commonparent");
+    skip |= ValidateObject(dstImage, kVulkanObjectTypeImage, false, "VUID-vkCmdCopyBufferToImage-dstImage-parameter", "VUID-vkCmdCopyBufferToImage-commonparent");
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateCmdCopyImageToBuffer(
+    VkCommandBuffer                             commandBuffer,
+    VkImage                                     srcImage,
+    VkImageLayout                               srcImageLayout,
+    VkBuffer                                    dstBuffer,
+    uint32_t                                    regionCount,
+    const VkBufferImageCopy*                    pRegions) const {
+    bool skip = false;
+    skip |= ValidateObject(commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdCopyImageToBuffer-commandBuffer-parameter", "VUID-vkCmdCopyImageToBuffer-commonparent");
+    skip |= ValidateObject(srcImage, kVulkanObjectTypeImage, false, "VUID-vkCmdCopyImageToBuffer-srcImage-parameter", "VUID-vkCmdCopyImageToBuffer-commonparent");
+    skip |= ValidateObject(dstBuffer, kVulkanObjectTypeBuffer, false, "VUID-vkCmdCopyImageToBuffer-dstBuffer-parameter", "VUID-vkCmdCopyImageToBuffer-commonparent");
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateCmdUpdateBuffer(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    dstBuffer,
+    VkDeviceSize                                dstOffset,
+    VkDeviceSize                                dataSize,
+    const void*                                 pData) const {
+    bool skip = false;
+    skip |= ValidateObject(commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdUpdateBuffer-commandBuffer-parameter", "VUID-vkCmdUpdateBuffer-commonparent");
+    skip |= ValidateObject(dstBuffer, kVulkanObjectTypeBuffer, false, "VUID-vkCmdUpdateBuffer-dstBuffer-parameter", "VUID-vkCmdUpdateBuffer-commonparent");
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateCmdFillBuffer(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    dstBuffer,
+    VkDeviceSize                                dstOffset,
+    VkDeviceSize                                size,
+    uint32_t                                    data) const {
+    bool skip = false;
+    skip |= ValidateObject(commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdFillBuffer-commandBuffer-parameter", "VUID-vkCmdFillBuffer-commonparent");
+    skip |= ValidateObject(dstBuffer, kVulkanObjectTypeBuffer, false, "VUID-vkCmdFillBuffer-dstBuffer-parameter", "VUID-vkCmdFillBuffer-commonparent");
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateCmdClearColorImage(
+    VkCommandBuffer                             commandBuffer,
+    VkImage                                     image,
+    VkImageLayout                               imageLayout,
+    const VkClearColorValue*                    pColor,
+    uint32_t                                    rangeCount,
+    const VkImageSubresourceRange*              pRanges) const {
+    bool skip = false;
+    skip |= ValidateObject(commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdClearColorImage-commandBuffer-parameter", "VUID-vkCmdClearColorImage-commonparent");
+    skip |= ValidateObject(image, kVulkanObjectTypeImage, false, "VUID-vkCmdClearColorImage-image-parameter", "VUID-vkCmdClearColorImage-commonparent");
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateCmdClearDepthStencilImage(
+    VkCommandBuffer                             commandBuffer,
+    VkImage                                     image,
+    VkImageLayout                               imageLayout,
+    const VkClearDepthStencilValue*             pDepthStencil,
+    uint32_t                                    rangeCount,
+    const VkImageSubresourceRange*              pRanges) const {
+    bool skip = false;
+    skip |= ValidateObject(commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdClearDepthStencilImage-commandBuffer-parameter", "VUID-vkCmdClearDepthStencilImage-commonparent");
+    skip |= ValidateObject(image, kVulkanObjectTypeImage, false, "VUID-vkCmdClearDepthStencilImage-image-parameter", "VUID-vkCmdClearDepthStencilImage-commonparent");
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateCmdClearAttachments(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    attachmentCount,
+    const VkClearAttachment*                    pAttachments,
+    uint32_t                                    rectCount,
+    const VkClearRect*                          pRects) const {
+    bool skip = false;
+    skip |= ValidateObject(commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdClearAttachments-commandBuffer-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateCmdResolveImage(
+    VkCommandBuffer                             commandBuffer,
+    VkImage                                     srcImage,
+    VkImageLayout                               srcImageLayout,
+    VkImage                                     dstImage,
+    VkImageLayout                               dstImageLayout,
+    uint32_t                                    regionCount,
+    const VkImageResolve*                       pRegions) const {
+    bool skip = false;
+    skip |= ValidateObject(commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdResolveImage-commandBuffer-parameter", "VUID-vkCmdResolveImage-commonparent");
+    skip |= ValidateObject(srcImage, kVulkanObjectTypeImage, false, "VUID-vkCmdResolveImage-srcImage-parameter", "VUID-vkCmdResolveImage-commonparent");
+    skip |= ValidateObject(dstImage, kVulkanObjectTypeImage, false, "VUID-vkCmdResolveImage-dstImage-parameter", "VUID-vkCmdResolveImage-commonparent");
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateCmdSetEvent(
+    VkCommandBuffer                             commandBuffer,
+    VkEvent                                     event,
+    VkPipelineStageFlags                        stageMask) const {
+    bool skip = false;
+    skip |= ValidateObject(commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdSetEvent-commandBuffer-parameter", "VUID-vkCmdSetEvent-commonparent");
+    skip |= ValidateObject(event, kVulkanObjectTypeEvent, false, "VUID-vkCmdSetEvent-event-parameter", "VUID-vkCmdSetEvent-commonparent");
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateCmdResetEvent(
+    VkCommandBuffer                             commandBuffer,
+    VkEvent                                     event,
+    VkPipelineStageFlags                        stageMask) const {
+    bool skip = false;
+    skip |= ValidateObject(commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdResetEvent-commandBuffer-parameter", "VUID-vkCmdResetEvent-commonparent");
+    skip |= ValidateObject(event, kVulkanObjectTypeEvent, false, "VUID-vkCmdResetEvent-event-parameter", "VUID-vkCmdResetEvent-commonparent");
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateCmdWaitEvents(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    eventCount,
+    const VkEvent*                              pEvents,
+    VkPipelineStageFlags                        srcStageMask,
+    VkPipelineStageFlags                        dstStageMask,
+    uint32_t                                    memoryBarrierCount,
+    const VkMemoryBarrier*                      pMemoryBarriers,
+    uint32_t                                    bufferMemoryBarrierCount,
+    const VkBufferMemoryBarrier*                pBufferMemoryBarriers,
+    uint32_t                                    imageMemoryBarrierCount,
+    const VkImageMemoryBarrier*                 pImageMemoryBarriers) const {
+    bool skip = false;
+    skip |= ValidateObject(commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdWaitEvents-commandBuffer-parameter", "VUID-vkCmdWaitEvents-commonparent");
+    if (pEvents) {
+        for (uint32_t index0 = 0; index0 < eventCount; ++index0) {
+            skip |= ValidateObject(pEvents[index0], kVulkanObjectTypeEvent, false, "VUID-vkCmdWaitEvents-pEvents-parameter", "VUID-vkCmdWaitEvents-commonparent");
+        }
+    }
+    if (pBufferMemoryBarriers) {
+        for (uint32_t index0 = 0; index0 < bufferMemoryBarrierCount; ++index0) {
+            skip |= ValidateObject(pBufferMemoryBarriers[index0].buffer, kVulkanObjectTypeBuffer, false, "VUID-VkBufferMemoryBarrier-buffer-parameter", kVUIDUndefined);
+        }
+    }
+    if (pImageMemoryBarriers) {
+        for (uint32_t index0 = 0; index0 < imageMemoryBarrierCount; ++index0) {
+            skip |= ValidateObject(pImageMemoryBarriers[index0].image, kVulkanObjectTypeImage, false, "VUID-VkImageMemoryBarrier-image-parameter", kVUIDUndefined);
+        }
+    }
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateCmdPipelineBarrier(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineStageFlags                        srcStageMask,
+    VkPipelineStageFlags                        dstStageMask,
+    VkDependencyFlags                           dependencyFlags,
+    uint32_t                                    memoryBarrierCount,
+    const VkMemoryBarrier*                      pMemoryBarriers,
+    uint32_t                                    bufferMemoryBarrierCount,
+    const VkBufferMemoryBarrier*                pBufferMemoryBarriers,
+    uint32_t                                    imageMemoryBarrierCount,
+    const VkImageMemoryBarrier*                 pImageMemoryBarriers) const {
+    bool skip = false;
+    skip |= ValidateObject(commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdPipelineBarrier-commandBuffer-parameter", kVUIDUndefined);
+    if (pBufferMemoryBarriers) {
+        for (uint32_t index0 = 0; index0 < bufferMemoryBarrierCount; ++index0) {
+            skip |= ValidateObject(pBufferMemoryBarriers[index0].buffer, kVulkanObjectTypeBuffer, false, "VUID-VkBufferMemoryBarrier-buffer-parameter", kVUIDUndefined);
+        }
+    }
+    if (pImageMemoryBarriers) {
+        for (uint32_t index0 = 0; index0 < imageMemoryBarrierCount; ++index0) {
+            skip |= ValidateObject(pImageMemoryBarriers[index0].image, kVulkanObjectTypeImage, false, "VUID-VkImageMemoryBarrier-image-parameter", kVUIDUndefined);
+        }
+    }
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateCmdBeginQuery(
+    VkCommandBuffer                             commandBuffer,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    query,
+    VkQueryControlFlags                         flags) const {
+    bool skip = false;
+    skip |= ValidateObject(commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdBeginQuery-commandBuffer-parameter", "VUID-vkCmdBeginQuery-commonparent");
+    skip |= ValidateObject(queryPool, kVulkanObjectTypeQueryPool, false, "VUID-vkCmdBeginQuery-queryPool-parameter", "VUID-vkCmdBeginQuery-commonparent");
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateCmdEndQuery(
+    VkCommandBuffer                             commandBuffer,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    query) const {
+    bool skip = false;
+    skip |= ValidateObject(commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdEndQuery-commandBuffer-parameter", "VUID-vkCmdEndQuery-commonparent");
+    skip |= ValidateObject(queryPool, kVulkanObjectTypeQueryPool, false, "VUID-vkCmdEndQuery-queryPool-parameter", "VUID-vkCmdEndQuery-commonparent");
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateCmdResetQueryPool(
+    VkCommandBuffer                             commandBuffer,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    firstQuery,
+    uint32_t                                    queryCount) const {
+    bool skip = false;
+    skip |= ValidateObject(commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdResetQueryPool-commandBuffer-parameter", "VUID-vkCmdResetQueryPool-commonparent");
+    skip |= ValidateObject(queryPool, kVulkanObjectTypeQueryPool, false, "VUID-vkCmdResetQueryPool-queryPool-parameter", "VUID-vkCmdResetQueryPool-commonparent");
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateCmdWriteTimestamp(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineStageFlagBits                     pipelineStage,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    query) const {
+    bool skip = false;
+    skip |= ValidateObject(commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdWriteTimestamp-commandBuffer-parameter", "VUID-vkCmdWriteTimestamp-commonparent");
+    skip |= ValidateObject(queryPool, kVulkanObjectTypeQueryPool, false, "VUID-vkCmdWriteTimestamp-queryPool-parameter", "VUID-vkCmdWriteTimestamp-commonparent");
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateCmdCopyQueryPoolResults(
+    VkCommandBuffer                             commandBuffer,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    firstQuery,
+    uint32_t                                    queryCount,
+    VkBuffer                                    dstBuffer,
+    VkDeviceSize                                dstOffset,
+    VkDeviceSize                                stride,
+    VkQueryResultFlags                          flags) const {
+    bool skip = false;
+    skip |= ValidateObject(commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdCopyQueryPoolResults-commandBuffer-parameter", "VUID-vkCmdCopyQueryPoolResults-commonparent");
+    skip |= ValidateObject(queryPool, kVulkanObjectTypeQueryPool, false, "VUID-vkCmdCopyQueryPoolResults-queryPool-parameter", "VUID-vkCmdCopyQueryPoolResults-commonparent");
+    skip |= ValidateObject(dstBuffer, kVulkanObjectTypeBuffer, false, "VUID-vkCmdCopyQueryPoolResults-dstBuffer-parameter", "VUID-vkCmdCopyQueryPoolResults-commonparent");
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateCmdPushConstants(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineLayout                            layout,
+    VkShaderStageFlags                          stageFlags,
+    uint32_t                                    offset,
+    uint32_t                                    size,
+    const void*                                 pValues) const {
+    bool skip = false;
+    skip |= ValidateObject(commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdPushConstants-commandBuffer-parameter", "VUID-vkCmdPushConstants-commonparent");
+    skip |= ValidateObject(layout, kVulkanObjectTypePipelineLayout, false, "VUID-vkCmdPushConstants-layout-parameter", "VUID-vkCmdPushConstants-commonparent");
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateCmdBeginRenderPass(
+    VkCommandBuffer                             commandBuffer,
+    const VkRenderPassBeginInfo*                pRenderPassBegin,
+    VkSubpassContents                           contents) const {
+    bool skip = false;
+    skip |= ValidateObject(commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdBeginRenderPass-commandBuffer-parameter", kVUIDUndefined);
+    if (pRenderPassBegin) {
+        skip |= ValidateObject(pRenderPassBegin->renderPass, kVulkanObjectTypeRenderPass, false, "VUID-VkRenderPassBeginInfo-renderPass-parameter", "VUID-VkRenderPassBeginInfo-commonparent");
+        skip |= ValidateObject(pRenderPassBegin->framebuffer, kVulkanObjectTypeFramebuffer, false, "VUID-VkRenderPassBeginInfo-framebuffer-parameter", "VUID-VkRenderPassBeginInfo-commonparent");
+    }
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateCmdNextSubpass(
+    VkCommandBuffer                             commandBuffer,
+    VkSubpassContents                           contents) const {
+    bool skip = false;
+    skip |= ValidateObject(commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdNextSubpass-commandBuffer-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateCmdEndRenderPass(
+    VkCommandBuffer                             commandBuffer) const {
+    bool skip = false;
+    skip |= ValidateObject(commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdEndRenderPass-commandBuffer-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateCmdExecuteCommands(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    commandBufferCount,
+    const VkCommandBuffer*                      pCommandBuffers) const {
+    bool skip = false;
+    skip |= ValidateObject(commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdExecuteCommands-commandBuffer-parameter", "VUID-vkCmdExecuteCommands-commonparent");
+    if (pCommandBuffers) {
+        for (uint32_t index0 = 0; index0 < commandBufferCount; ++index0) {
+            skip |= ValidateObject(pCommandBuffers[index0], kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdExecuteCommands-pCommandBuffers-parameter", "VUID-vkCmdExecuteCommands-commonparent");
+        }
+    }
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateBindBufferMemory2(
+    VkDevice                                    device,
+    uint32_t                                    bindInfoCount,
+    const VkBindBufferMemoryInfo*               pBindInfos) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkBindBufferMemory2-device-parameter", kVUIDUndefined);
+    if (pBindInfos) {
+        for (uint32_t index0 = 0; index0 < bindInfoCount; ++index0) {
+            skip |= ValidateObject(pBindInfos[index0].buffer, kVulkanObjectTypeBuffer, false, "VUID-VkBindBufferMemoryInfo-buffer-parameter", "VUID-VkBindBufferMemoryInfo-commonparent");
+            skip |= ValidateObject(pBindInfos[index0].memory, kVulkanObjectTypeDeviceMemory, false, "VUID-VkBindBufferMemoryInfo-memory-parameter", "VUID-VkBindBufferMemoryInfo-commonparent");
+        }
+    }
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateBindImageMemory2(
+    VkDevice                                    device,
+    uint32_t                                    bindInfoCount,
+    const VkBindImageMemoryInfo*                pBindInfos) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkBindImageMemory2-device-parameter", kVUIDUndefined);
+    if (pBindInfos) {
+        for (uint32_t index0 = 0; index0 < bindInfoCount; ++index0) {
+            skip |= ValidateObject(pBindInfos[index0].image, kVulkanObjectTypeImage, false, "VUID-VkBindImageMemoryInfo-image-parameter", "VUID-VkBindImageMemoryInfo-commonparent");
+            skip |= ValidateObject(pBindInfos[index0].memory, kVulkanObjectTypeDeviceMemory, true, kVUIDUndefined, "VUID-VkBindImageMemoryInfo-commonparent");
+        }
+    }
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateGetDeviceGroupPeerMemoryFeatures(
+    VkDevice                                    device,
+    uint32_t                                    heapIndex,
+    uint32_t                                    localDeviceIndex,
+    uint32_t                                    remoteDeviceIndex,
+    VkPeerMemoryFeatureFlags*                   pPeerMemoryFeatures) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkGetDeviceGroupPeerMemoryFeatures-device-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateCmdSetDeviceMask(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    deviceMask) const {
+    bool skip = false;
+    skip |= ValidateObject(commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdSetDeviceMask-commandBuffer-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateCmdDispatchBase(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    baseGroupX,
+    uint32_t                                    baseGroupY,
+    uint32_t                                    baseGroupZ,
+    uint32_t                                    groupCountX,
+    uint32_t                                    groupCountY,
+    uint32_t                                    groupCountZ) const {
+    bool skip = false;
+    skip |= ValidateObject(commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdDispatchBase-commandBuffer-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateEnumeratePhysicalDeviceGroups(
+    VkInstance                                  instance,
+    uint32_t*                                   pPhysicalDeviceGroupCount,
+    VkPhysicalDeviceGroupProperties*            pPhysicalDeviceGroupProperties) const {
+    bool skip = false;
+    skip |= ValidateObject(instance, kVulkanObjectTypeInstance, false, "VUID-vkEnumeratePhysicalDeviceGroups-instance-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateGetImageMemoryRequirements2(
+    VkDevice                                    device,
+    const VkImageMemoryRequirementsInfo2*       pInfo,
+    VkMemoryRequirements2*                      pMemoryRequirements) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkGetImageMemoryRequirements2-device-parameter", kVUIDUndefined);
+    if (pInfo) {
+        skip |= ValidateObject(pInfo->image, kVulkanObjectTypeImage, false, "VUID-VkImageMemoryRequirementsInfo2-image-parameter", kVUIDUndefined);
+    }
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateGetBufferMemoryRequirements2(
+    VkDevice                                    device,
+    const VkBufferMemoryRequirementsInfo2*      pInfo,
+    VkMemoryRequirements2*                      pMemoryRequirements) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkGetBufferMemoryRequirements2-device-parameter", kVUIDUndefined);
+    if (pInfo) {
+        skip |= ValidateObject(pInfo->buffer, kVulkanObjectTypeBuffer, false, "VUID-VkBufferMemoryRequirementsInfo2-buffer-parameter", kVUIDUndefined);
+    }
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateGetImageSparseMemoryRequirements2(
+    VkDevice                                    device,
+    const VkImageSparseMemoryRequirementsInfo2* pInfo,
+    uint32_t*                                   pSparseMemoryRequirementCount,
+    VkSparseImageMemoryRequirements2*           pSparseMemoryRequirements) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkGetImageSparseMemoryRequirements2-device-parameter", kVUIDUndefined);
+    if (pInfo) {
+        skip |= ValidateObject(pInfo->image, kVulkanObjectTypeImage, false, "VUID-VkImageSparseMemoryRequirementsInfo2-image-parameter", kVUIDUndefined);
+    }
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateGetPhysicalDeviceFeatures2(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceFeatures2*                  pFeatures) const {
+    bool skip = false;
+    skip |= ValidateObject(physicalDevice, kVulkanObjectTypePhysicalDevice, false, "VUID-vkGetPhysicalDeviceFeatures2-physicalDevice-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateGetPhysicalDeviceProperties2(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceProperties2*                pProperties) const {
+    bool skip = false;
+    skip |= ValidateObject(physicalDevice, kVulkanObjectTypePhysicalDevice, false, "VUID-vkGetPhysicalDeviceProperties2-physicalDevice-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateGetPhysicalDeviceFormatProperties2(
+    VkPhysicalDevice                            physicalDevice,
+    VkFormat                                    format,
+    VkFormatProperties2*                        pFormatProperties) const {
+    bool skip = false;
+    skip |= ValidateObject(physicalDevice, kVulkanObjectTypePhysicalDevice, false, "VUID-vkGetPhysicalDeviceFormatProperties2-physicalDevice-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateGetPhysicalDeviceImageFormatProperties2(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceImageFormatInfo2*     pImageFormatInfo,
+    VkImageFormatProperties2*                   pImageFormatProperties) const {
+    bool skip = false;
+    skip |= ValidateObject(physicalDevice, kVulkanObjectTypePhysicalDevice, false, "VUID-vkGetPhysicalDeviceImageFormatProperties2-physicalDevice-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateGetPhysicalDeviceMemoryProperties2(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceMemoryProperties2*          pMemoryProperties) const {
+    bool skip = false;
+    skip |= ValidateObject(physicalDevice, kVulkanObjectTypePhysicalDevice, false, "VUID-vkGetPhysicalDeviceMemoryProperties2-physicalDevice-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateGetPhysicalDeviceSparseImageFormatProperties2(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo,
+    uint32_t*                                   pPropertyCount,
+    VkSparseImageFormatProperties2*             pProperties) const {
+    bool skip = false;
+    skip |= ValidateObject(physicalDevice, kVulkanObjectTypePhysicalDevice, false, "VUID-vkGetPhysicalDeviceSparseImageFormatProperties2-physicalDevice-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateTrimCommandPool(
+    VkDevice                                    device,
+    VkCommandPool                               commandPool,
+    VkCommandPoolTrimFlags                      flags) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkTrimCommandPool-device-parameter", kVUIDUndefined);
+    skip |= ValidateObject(commandPool, kVulkanObjectTypeCommandPool, false, "VUID-vkTrimCommandPool-commandPool-parameter", "VUID-vkTrimCommandPool-commandPool-parent");
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateCreateSamplerYcbcrConversion(
+    VkDevice                                    device,
+    const VkSamplerYcbcrConversionCreateInfo*   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSamplerYcbcrConversion*                   pYcbcrConversion) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkCreateSamplerYcbcrConversion-device-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+void ObjectLifetimes::PostCallRecordCreateSamplerYcbcrConversion(
+    VkDevice                                    device,
+    const VkSamplerYcbcrConversionCreateInfo*   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSamplerYcbcrConversion*                   pYcbcrConversion,
+    VkResult                                    result) {
+    if (result != VK_SUCCESS) return;
+    CreateObject(*pYcbcrConversion, kVulkanObjectTypeSamplerYcbcrConversion, pAllocator);
+
+}
+
+bool ObjectLifetimes::PreCallValidateDestroySamplerYcbcrConversion(
+    VkDevice                                    device,
+    VkSamplerYcbcrConversion                    ycbcrConversion,
+    const VkAllocationCallbacks*                pAllocator) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkDestroySamplerYcbcrConversion-device-parameter", kVUIDUndefined);
+    skip |= ValidateObject(ycbcrConversion, kVulkanObjectTypeSamplerYcbcrConversion, true, "VUID-vkDestroySamplerYcbcrConversion-ycbcrConversion-parameter", "VUID-vkDestroySamplerYcbcrConversion-ycbcrConversion-parent");
+    skip |= ValidateDestroyObject(ycbcrConversion, kVulkanObjectTypeSamplerYcbcrConversion, pAllocator, kVUIDUndefined, kVUIDUndefined);
+
+    return skip;
+}
+
+void ObjectLifetimes::PreCallRecordDestroySamplerYcbcrConversion(
+    VkDevice                                    device,
+    VkSamplerYcbcrConversion                    ycbcrConversion,
+    const VkAllocationCallbacks*                pAllocator) {
+    RecordDestroyObject(ycbcrConversion, kVulkanObjectTypeSamplerYcbcrConversion);
+
+}
+
+bool ObjectLifetimes::PreCallValidateCreateDescriptorUpdateTemplate(
+    VkDevice                                    device,
+    const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDescriptorUpdateTemplate*                 pDescriptorUpdateTemplate) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkCreateDescriptorUpdateTemplate-device-parameter", kVUIDUndefined);
+    if (pCreateInfo) {
+        skip |= ValidateObject(pCreateInfo->descriptorSetLayout, kVulkanObjectTypeDescriptorSetLayout, true, kVUIDUndefined, "VUID-VkDescriptorUpdateTemplateCreateInfo-commonparent");
+        skip |= ValidateObject(pCreateInfo->pipelineLayout, kVulkanObjectTypePipelineLayout, true, kVUIDUndefined, "VUID-VkDescriptorUpdateTemplateCreateInfo-commonparent");
+    }
+
+    return skip;
+}
+
+void ObjectLifetimes::PostCallRecordCreateDescriptorUpdateTemplate(
+    VkDevice                                    device,
+    const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDescriptorUpdateTemplate*                 pDescriptorUpdateTemplate,
+    VkResult                                    result) {
+    if (result != VK_SUCCESS) return;
+    CreateObject(*pDescriptorUpdateTemplate, kVulkanObjectTypeDescriptorUpdateTemplate, pAllocator);
+
+}
+
+bool ObjectLifetimes::PreCallValidateDestroyDescriptorUpdateTemplate(
+    VkDevice                                    device,
+    VkDescriptorUpdateTemplate                  descriptorUpdateTemplate,
+    const VkAllocationCallbacks*                pAllocator) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkDestroyDescriptorUpdateTemplate-device-parameter", kVUIDUndefined);
+    skip |= ValidateObject(descriptorUpdateTemplate, kVulkanObjectTypeDescriptorUpdateTemplate, true, "VUID-vkDestroyDescriptorUpdateTemplate-descriptorUpdateTemplate-parameter", "VUID-vkDestroyDescriptorUpdateTemplate-descriptorUpdateTemplate-parent");
+    skip |= ValidateDestroyObject(descriptorUpdateTemplate, kVulkanObjectTypeDescriptorUpdateTemplate, pAllocator, "VUID-vkDestroyDescriptorUpdateTemplate-descriptorSetLayout-00356", "VUID-vkDestroyDescriptorUpdateTemplate-descriptorSetLayout-00357");
+
+    return skip;
+}
+
+void ObjectLifetimes::PreCallRecordDestroyDescriptorUpdateTemplate(
+    VkDevice                                    device,
+    VkDescriptorUpdateTemplate                  descriptorUpdateTemplate,
+    const VkAllocationCallbacks*                pAllocator) {
+    RecordDestroyObject(descriptorUpdateTemplate, kVulkanObjectTypeDescriptorUpdateTemplate);
+
+}
+
+bool ObjectLifetimes::PreCallValidateUpdateDescriptorSetWithTemplate(
+    VkDevice                                    device,
+    VkDescriptorSet                             descriptorSet,
+    VkDescriptorUpdateTemplate                  descriptorUpdateTemplate,
+    const void*                                 pData) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkUpdateDescriptorSetWithTemplate-device-parameter", kVUIDUndefined);
+    skip |= ValidateObject(descriptorSet, kVulkanObjectTypeDescriptorSet, false, "VUID-vkUpdateDescriptorSetWithTemplate-descriptorSet-parameter", kVUIDUndefined);
+    skip |= ValidateObject(descriptorUpdateTemplate, kVulkanObjectTypeDescriptorUpdateTemplate, false, "VUID-vkUpdateDescriptorSetWithTemplate-descriptorUpdateTemplate-parameter", "VUID-vkUpdateDescriptorSetWithTemplate-descriptorUpdateTemplate-parent");
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateGetPhysicalDeviceExternalBufferProperties(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceExternalBufferInfo*   pExternalBufferInfo,
+    VkExternalBufferProperties*                 pExternalBufferProperties) const {
+    bool skip = false;
+    skip |= ValidateObject(physicalDevice, kVulkanObjectTypePhysicalDevice, false, "VUID-vkGetPhysicalDeviceExternalBufferProperties-physicalDevice-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateGetPhysicalDeviceExternalFenceProperties(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceExternalFenceInfo*    pExternalFenceInfo,
+    VkExternalFenceProperties*                  pExternalFenceProperties) const {
+    bool skip = false;
+    skip |= ValidateObject(physicalDevice, kVulkanObjectTypePhysicalDevice, false, "VUID-vkGetPhysicalDeviceExternalFenceProperties-physicalDevice-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateGetPhysicalDeviceExternalSemaphoreProperties(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo,
+    VkExternalSemaphoreProperties*              pExternalSemaphoreProperties) const {
+    bool skip = false;
+    skip |= ValidateObject(physicalDevice, kVulkanObjectTypePhysicalDevice, false, "VUID-vkGetPhysicalDeviceExternalSemaphoreProperties-physicalDevice-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateDestroySurfaceKHR(
+    VkInstance                                  instance,
+    VkSurfaceKHR                                surface,
+    const VkAllocationCallbacks*                pAllocator) const {
+    bool skip = false;
+    skip |= ValidateObject(instance, kVulkanObjectTypeInstance, false, "VUID-vkDestroySurfaceKHR-instance-parameter", kVUIDUndefined);
+    skip |= ValidateObject(surface, kVulkanObjectTypeSurfaceKHR, true, "VUID-vkDestroySurfaceKHR-surface-parameter", "VUID-vkDestroySurfaceKHR-surface-parent");
+    skip |= ValidateDestroyObject(surface, kVulkanObjectTypeSurfaceKHR, pAllocator, "VUID-vkDestroySurfaceKHR-surface-01267", "VUID-vkDestroySurfaceKHR-surface-01268");
+
+    return skip;
+}
+
+void ObjectLifetimes::PreCallRecordDestroySurfaceKHR(
+    VkInstance                                  instance,
+    VkSurfaceKHR                                surface,
+    const VkAllocationCallbacks*                pAllocator) {
+    RecordDestroyObject(surface, kVulkanObjectTypeSurfaceKHR);
+
+}
+
+bool ObjectLifetimes::PreCallValidateGetPhysicalDeviceSurfaceSupportKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t                                    queueFamilyIndex,
+    VkSurfaceKHR                                surface,
+    VkBool32*                                   pSupported) const {
+    bool skip = false;
+    skip |= ValidateObject(physicalDevice, kVulkanObjectTypePhysicalDevice, false, "VUID-vkGetPhysicalDeviceSurfaceSupportKHR-physicalDevice-parameter", "VUID-vkGetPhysicalDeviceSurfaceSupportKHR-commonparent");
+    skip |= ValidateObject(surface, kVulkanObjectTypeSurfaceKHR, false, "VUID-vkGetPhysicalDeviceSurfaceSupportKHR-surface-parameter", "VUID-vkGetPhysicalDeviceSurfaceSupportKHR-commonparent");
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateGetPhysicalDeviceSurfaceCapabilitiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkSurfaceKHR                                surface,
+    VkSurfaceCapabilitiesKHR*                   pSurfaceCapabilities) const {
+    bool skip = false;
+    skip |= ValidateObject(physicalDevice, kVulkanObjectTypePhysicalDevice, false, "VUID-vkGetPhysicalDeviceSurfaceCapabilitiesKHR-physicalDevice-parameter", "VUID-vkGetPhysicalDeviceSurfaceCapabilitiesKHR-commonparent");
+    skip |= ValidateObject(surface, kVulkanObjectTypeSurfaceKHR, false, "VUID-vkGetPhysicalDeviceSurfaceCapabilitiesKHR-surface-parameter", "VUID-vkGetPhysicalDeviceSurfaceCapabilitiesKHR-commonparent");
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateGetPhysicalDeviceSurfaceFormatsKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkSurfaceKHR                                surface,
+    uint32_t*                                   pSurfaceFormatCount,
+    VkSurfaceFormatKHR*                         pSurfaceFormats) const {
+    bool skip = false;
+    skip |= ValidateObject(physicalDevice, kVulkanObjectTypePhysicalDevice, false, "VUID-vkGetPhysicalDeviceSurfaceFormatsKHR-physicalDevice-parameter", "VUID-vkGetPhysicalDeviceSurfaceFormatsKHR-commonparent");
+    skip |= ValidateObject(surface, kVulkanObjectTypeSurfaceKHR, false, "VUID-vkGetPhysicalDeviceSurfaceFormatsKHR-surface-parameter", "VUID-vkGetPhysicalDeviceSurfaceFormatsKHR-commonparent");
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateGetPhysicalDeviceSurfacePresentModesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkSurfaceKHR                                surface,
+    uint32_t*                                   pPresentModeCount,
+    VkPresentModeKHR*                           pPresentModes) const {
+    bool skip = false;
+    skip |= ValidateObject(physicalDevice, kVulkanObjectTypePhysicalDevice, false, "VUID-vkGetPhysicalDeviceSurfacePresentModesKHR-physicalDevice-parameter", "VUID-vkGetPhysicalDeviceSurfacePresentModesKHR-commonparent");
+    skip |= ValidateObject(surface, kVulkanObjectTypeSurfaceKHR, false, "VUID-vkGetPhysicalDeviceSurfacePresentModesKHR-surface-parameter", "VUID-vkGetPhysicalDeviceSurfacePresentModesKHR-commonparent");
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateCreateSwapchainKHR(
+    VkDevice                                    device,
+    const VkSwapchainCreateInfoKHR*             pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSwapchainKHR*                             pSwapchain) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkCreateSwapchainKHR-device-parameter", kVUIDUndefined);
+    if (pCreateInfo) {
+        skip |= ValidateObject(pCreateInfo->surface, kVulkanObjectTypeSurfaceKHR, false, "VUID-VkSwapchainCreateInfoKHR-surface-parameter", "VUID-VkSwapchainCreateInfoKHR-commonparent");
+        skip |= ValidateObject(pCreateInfo->oldSwapchain, kVulkanObjectTypeSwapchainKHR, true, "VUID-VkSwapchainCreateInfoKHR-oldSwapchain-parameter", "VUID-VkSwapchainCreateInfoKHR-oldSwapchain-parent");
+    }
+
+    return skip;
+}
+
+void ObjectLifetimes::PostCallRecordCreateSwapchainKHR(
+    VkDevice                                    device,
+    const VkSwapchainCreateInfoKHR*             pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSwapchainKHR*                             pSwapchain,
+    VkResult                                    result) {
+    if (result != VK_SUCCESS) return;
+    CreateObject(*pSwapchain, kVulkanObjectTypeSwapchainKHR, pAllocator);
+
+}
+
+bool ObjectLifetimes::PreCallValidateAcquireNextImageKHR(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    uint64_t                                    timeout,
+    VkSemaphore                                 semaphore,
+    VkFence                                     fence,
+    uint32_t*                                   pImageIndex) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkAcquireNextImageKHR-device-parameter", "VUID-vkAcquireNextImageKHR-commonparent");
+    skip |= ValidateObject(swapchain, kVulkanObjectTypeSwapchainKHR, false, "VUID-vkAcquireNextImageKHR-swapchain-parameter", "VUID-vkAcquireNextImageKHR-commonparent");
+    skip |= ValidateObject(semaphore, kVulkanObjectTypeSemaphore, true, "VUID-vkAcquireNextImageKHR-semaphore-parameter", "VUID-vkAcquireNextImageKHR-semaphore-parent");
+    skip |= ValidateObject(fence, kVulkanObjectTypeFence, true, "VUID-vkAcquireNextImageKHR-fence-parameter", "VUID-vkAcquireNextImageKHR-fence-parent");
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateQueuePresentKHR(
+    VkQueue                                     queue,
+    const VkPresentInfoKHR*                     pPresentInfo) const {
+    bool skip = false;
+    skip |= ValidateObject(queue, kVulkanObjectTypeQueue, false, "VUID-vkQueuePresentKHR-queue-parameter", kVUIDUndefined);
+    if (pPresentInfo) {
+        if (pPresentInfo->pWaitSemaphores) {
+            for (uint32_t index1 = 0; index1 < pPresentInfo->waitSemaphoreCount; ++index1) {
+                skip |= ValidateObject(pPresentInfo->pWaitSemaphores[index1], kVulkanObjectTypeSemaphore, false, "VUID-VkPresentInfoKHR-pWaitSemaphores-parameter", "VUID-VkPresentInfoKHR-commonparent");
+            }
+        }
+        if (pPresentInfo->pSwapchains) {
+            for (uint32_t index1 = 0; index1 < pPresentInfo->swapchainCount; ++index1) {
+                skip |= ValidateObject(pPresentInfo->pSwapchains[index1], kVulkanObjectTypeSwapchainKHR, false, "VUID-VkPresentInfoKHR-pSwapchains-parameter", "VUID-VkPresentInfoKHR-commonparent");
+            }
+        }
+    }
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateGetDeviceGroupPresentCapabilitiesKHR(
+    VkDevice                                    device,
+    VkDeviceGroupPresentCapabilitiesKHR*        pDeviceGroupPresentCapabilities) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkGetDeviceGroupPresentCapabilitiesKHR-device-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateGetDeviceGroupSurfacePresentModesKHR(
+    VkDevice                                    device,
+    VkSurfaceKHR                                surface,
+    VkDeviceGroupPresentModeFlagsKHR*           pModes) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkGetDeviceGroupSurfacePresentModesKHR-device-parameter", "VUID-vkGetDeviceGroupSurfacePresentModesKHR-commonparent");
+    skip |= ValidateObject(surface, kVulkanObjectTypeSurfaceKHR, false, "VUID-vkGetDeviceGroupSurfacePresentModesKHR-surface-parameter", "VUID-vkGetDeviceGroupSurfacePresentModesKHR-commonparent");
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateGetPhysicalDevicePresentRectanglesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkSurfaceKHR                                surface,
+    uint32_t*                                   pRectCount,
+    VkRect2D*                                   pRects) const {
+    bool skip = false;
+    skip |= ValidateObject(physicalDevice, kVulkanObjectTypePhysicalDevice, false, "VUID-vkGetPhysicalDevicePresentRectanglesKHR-physicalDevice-parameter", "VUID-vkGetPhysicalDevicePresentRectanglesKHR-commonparent");
+    skip |= ValidateObject(surface, kVulkanObjectTypeSurfaceKHR, false, "VUID-vkGetPhysicalDevicePresentRectanglesKHR-surface-parameter", "VUID-vkGetPhysicalDevicePresentRectanglesKHR-commonparent");
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateAcquireNextImage2KHR(
+    VkDevice                                    device,
+    const VkAcquireNextImageInfoKHR*            pAcquireInfo,
+    uint32_t*                                   pImageIndex) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkAcquireNextImage2KHR-device-parameter", kVUIDUndefined);
+    if (pAcquireInfo) {
+        skip |= ValidateObject(pAcquireInfo->swapchain, kVulkanObjectTypeSwapchainKHR, false, "VUID-VkAcquireNextImageInfoKHR-swapchain-parameter", "VUID-VkAcquireNextImageInfoKHR-commonparent");
+        skip |= ValidateObject(pAcquireInfo->semaphore, kVulkanObjectTypeSemaphore, true, "VUID-VkAcquireNextImageInfoKHR-semaphore-parameter", "VUID-VkAcquireNextImageInfoKHR-commonparent");
+        skip |= ValidateObject(pAcquireInfo->fence, kVulkanObjectTypeFence, true, "VUID-VkAcquireNextImageInfoKHR-fence-parameter", "VUID-VkAcquireNextImageInfoKHR-commonparent");
+    }
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateGetPhysicalDeviceDisplayPlanePropertiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pPropertyCount,
+    VkDisplayPlanePropertiesKHR*                pProperties) const {
+    bool skip = false;
+    skip |= ValidateObject(physicalDevice, kVulkanObjectTypePhysicalDevice, false, "VUID-vkGetPhysicalDeviceDisplayPlanePropertiesKHR-physicalDevice-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateGetDisplayPlaneSupportedDisplaysKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t                                    planeIndex,
+    uint32_t*                                   pDisplayCount,
+    VkDisplayKHR*                               pDisplays) const {
+    bool skip = false;
+    skip |= ValidateObject(physicalDevice, kVulkanObjectTypePhysicalDevice, false, "VUID-vkGetDisplayPlaneSupportedDisplaysKHR-physicalDevice-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+void ObjectLifetimes::PostCallRecordGetDisplayPlaneSupportedDisplaysKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t                                    planeIndex,
+    uint32_t*                                   pDisplayCount,
+    VkDisplayKHR*                               pDisplays,
+    VkResult                                    result) {
+    if (result != VK_SUCCESS) return;
+    if (pDisplays) {
+        for (uint32_t index = 0; index < *pDisplayCount; index++) {
+            CreateObject(pDisplays[index], kVulkanObjectTypeDisplayKHR, nullptr);
+        }
+    }
+
+}
+
+bool ObjectLifetimes::PreCallValidateCreateDisplayModeKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkDisplayKHR                                display,
+    const VkDisplayModeCreateInfoKHR*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDisplayModeKHR*                           pMode) const {
+    bool skip = false;
+    skip |= ValidateObject(physicalDevice, kVulkanObjectTypePhysicalDevice, false, "VUID-vkCreateDisplayModeKHR-physicalDevice-parameter", kVUIDUndefined);
+    skip |= ValidateObject(display, kVulkanObjectTypeDisplayKHR, false, "VUID-vkCreateDisplayModeKHR-display-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+void ObjectLifetimes::PostCallRecordCreateDisplayModeKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkDisplayKHR                                display,
+    const VkDisplayModeCreateInfoKHR*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDisplayModeKHR*                           pMode,
+    VkResult                                    result) {
+    if (result != VK_SUCCESS) return;
+    CreateObject(*pMode, kVulkanObjectTypeDisplayModeKHR, pAllocator);
+
+}
+
+bool ObjectLifetimes::PreCallValidateGetDisplayPlaneCapabilitiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkDisplayModeKHR                            mode,
+    uint32_t                                    planeIndex,
+    VkDisplayPlaneCapabilitiesKHR*              pCapabilities) const {
+    bool skip = false;
+    skip |= ValidateObject(physicalDevice, kVulkanObjectTypePhysicalDevice, false, "VUID-vkGetDisplayPlaneCapabilitiesKHR-physicalDevice-parameter", kVUIDUndefined);
+    skip |= ValidateObject(mode, kVulkanObjectTypeDisplayModeKHR, false, "VUID-vkGetDisplayPlaneCapabilitiesKHR-mode-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateCreateDisplayPlaneSurfaceKHR(
+    VkInstance                                  instance,
+    const VkDisplaySurfaceCreateInfoKHR*        pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface) const {
+    bool skip = false;
+    skip |= ValidateObject(instance, kVulkanObjectTypeInstance, false, "VUID-vkCreateDisplayPlaneSurfaceKHR-instance-parameter", kVUIDUndefined);
+    if (pCreateInfo) {
+        skip |= ValidateObject(pCreateInfo->displayMode, kVulkanObjectTypeDisplayModeKHR, false, "VUID-VkDisplaySurfaceCreateInfoKHR-displayMode-parameter", kVUIDUndefined);
+    }
+
+    return skip;
+}
+
+void ObjectLifetimes::PostCallRecordCreateDisplayPlaneSurfaceKHR(
+    VkInstance                                  instance,
+    const VkDisplaySurfaceCreateInfoKHR*        pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface,
+    VkResult                                    result) {
+    if (result != VK_SUCCESS) return;
+    CreateObject(*pSurface, kVulkanObjectTypeSurfaceKHR, pAllocator);
+
+}
+
+bool ObjectLifetimes::PreCallValidateCreateSharedSwapchainsKHR(
+    VkDevice                                    device,
+    uint32_t                                    swapchainCount,
+    const VkSwapchainCreateInfoKHR*             pCreateInfos,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSwapchainKHR*                             pSwapchains) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkCreateSharedSwapchainsKHR-device-parameter", kVUIDUndefined);
+    if (pCreateInfos) {
+        for (uint32_t index0 = 0; index0 < swapchainCount; ++index0) {
+            skip |= ValidateObject(pCreateInfos[index0].surface, kVulkanObjectTypeSurfaceKHR, false, "VUID-VkSwapchainCreateInfoKHR-surface-parameter", "VUID-VkSwapchainCreateInfoKHR-commonparent");
+            skip |= ValidateObject(pCreateInfos[index0].oldSwapchain, kVulkanObjectTypeSwapchainKHR, true, "VUID-VkSwapchainCreateInfoKHR-oldSwapchain-parameter", "VUID-VkSwapchainCreateInfoKHR-oldSwapchain-parent");
+        }
+    }
+
+    return skip;
+}
+
+void ObjectLifetimes::PostCallRecordCreateSharedSwapchainsKHR(
+    VkDevice                                    device,
+    uint32_t                                    swapchainCount,
+    const VkSwapchainCreateInfoKHR*             pCreateInfos,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSwapchainKHR*                             pSwapchains,
+    VkResult                                    result) {
+    if (result != VK_SUCCESS) return;
+    if (pSwapchains) {
+        for (uint32_t index = 0; index < swapchainCount; index++) {
+            CreateObject(pSwapchains[index], kVulkanObjectTypeSwapchainKHR, pAllocator);
+        }
+    }
+
+}
+
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+
+bool ObjectLifetimes::PreCallValidateCreateXlibSurfaceKHR(
+    VkInstance                                  instance,
+    const VkXlibSurfaceCreateInfoKHR*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface) const {
+    bool skip = false;
+    skip |= ValidateObject(instance, kVulkanObjectTypeInstance, false, "VUID-vkCreateXlibSurfaceKHR-instance-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+void ObjectLifetimes::PostCallRecordCreateXlibSurfaceKHR(
+    VkInstance                                  instance,
+    const VkXlibSurfaceCreateInfoKHR*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface,
+    VkResult                                    result) {
+    if (result != VK_SUCCESS) return;
+    CreateObject(*pSurface, kVulkanObjectTypeSurfaceKHR, pAllocator);
+
+}
+#endif // VK_USE_PLATFORM_XLIB_KHR
+
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+
+bool ObjectLifetimes::PreCallValidateGetPhysicalDeviceXlibPresentationSupportKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t                                    queueFamilyIndex,
+    Display*                                    dpy,
+    VisualID                                    visualID) const {
+    bool skip = false;
+    skip |= ValidateObject(physicalDevice, kVulkanObjectTypePhysicalDevice, false, "VUID-vkGetPhysicalDeviceXlibPresentationSupportKHR-physicalDevice-parameter", kVUIDUndefined);
+
+    return skip;
+}
+#endif // VK_USE_PLATFORM_XLIB_KHR
+
+#ifdef VK_USE_PLATFORM_XCB_KHR
+
+bool ObjectLifetimes::PreCallValidateCreateXcbSurfaceKHR(
+    VkInstance                                  instance,
+    const VkXcbSurfaceCreateInfoKHR*            pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface) const {
+    bool skip = false;
+    skip |= ValidateObject(instance, kVulkanObjectTypeInstance, false, "VUID-vkCreateXcbSurfaceKHR-instance-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+void ObjectLifetimes::PostCallRecordCreateXcbSurfaceKHR(
+    VkInstance                                  instance,
+    const VkXcbSurfaceCreateInfoKHR*            pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface,
+    VkResult                                    result) {
+    if (result != VK_SUCCESS) return;
+    CreateObject(*pSurface, kVulkanObjectTypeSurfaceKHR, pAllocator);
+
+}
+#endif // VK_USE_PLATFORM_XCB_KHR
+
+#ifdef VK_USE_PLATFORM_XCB_KHR
+
+bool ObjectLifetimes::PreCallValidateGetPhysicalDeviceXcbPresentationSupportKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t                                    queueFamilyIndex,
+    xcb_connection_t*                           connection,
+    xcb_visualid_t                              visual_id) const {
+    bool skip = false;
+    skip |= ValidateObject(physicalDevice, kVulkanObjectTypePhysicalDevice, false, "VUID-vkGetPhysicalDeviceXcbPresentationSupportKHR-physicalDevice-parameter", kVUIDUndefined);
+
+    return skip;
+}
+#endif // VK_USE_PLATFORM_XCB_KHR
+
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+
+bool ObjectLifetimes::PreCallValidateCreateWaylandSurfaceKHR(
+    VkInstance                                  instance,
+    const VkWaylandSurfaceCreateInfoKHR*        pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface) const {
+    bool skip = false;
+    skip |= ValidateObject(instance, kVulkanObjectTypeInstance, false, "VUID-vkCreateWaylandSurfaceKHR-instance-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+void ObjectLifetimes::PostCallRecordCreateWaylandSurfaceKHR(
+    VkInstance                                  instance,
+    const VkWaylandSurfaceCreateInfoKHR*        pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface,
+    VkResult                                    result) {
+    if (result != VK_SUCCESS) return;
+    CreateObject(*pSurface, kVulkanObjectTypeSurfaceKHR, pAllocator);
+
+}
+#endif // VK_USE_PLATFORM_WAYLAND_KHR
+
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+
+bool ObjectLifetimes::PreCallValidateGetPhysicalDeviceWaylandPresentationSupportKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t                                    queueFamilyIndex,
+    struct wl_display*                          display) const {
+    bool skip = false;
+    skip |= ValidateObject(physicalDevice, kVulkanObjectTypePhysicalDevice, false, "VUID-vkGetPhysicalDeviceWaylandPresentationSupportKHR-physicalDevice-parameter", kVUIDUndefined);
+
+    return skip;
+}
+#endif // VK_USE_PLATFORM_WAYLAND_KHR
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+
+bool ObjectLifetimes::PreCallValidateCreateAndroidSurfaceKHR(
+    VkInstance                                  instance,
+    const VkAndroidSurfaceCreateInfoKHR*        pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface) const {
+    bool skip = false;
+    skip |= ValidateObject(instance, kVulkanObjectTypeInstance, false, "VUID-vkCreateAndroidSurfaceKHR-instance-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+void ObjectLifetimes::PostCallRecordCreateAndroidSurfaceKHR(
+    VkInstance                                  instance,
+    const VkAndroidSurfaceCreateInfoKHR*        pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface,
+    VkResult                                    result) {
+    if (result != VK_SUCCESS) return;
+    CreateObject(*pSurface, kVulkanObjectTypeSurfaceKHR, pAllocator);
+
+}
+#endif // VK_USE_PLATFORM_ANDROID_KHR
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+bool ObjectLifetimes::PreCallValidateCreateWin32SurfaceKHR(
+    VkInstance                                  instance,
+    const VkWin32SurfaceCreateInfoKHR*          pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface) const {
+    bool skip = false;
+    skip |= ValidateObject(instance, kVulkanObjectTypeInstance, false, "VUID-vkCreateWin32SurfaceKHR-instance-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+void ObjectLifetimes::PostCallRecordCreateWin32SurfaceKHR(
+    VkInstance                                  instance,
+    const VkWin32SurfaceCreateInfoKHR*          pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface,
+    VkResult                                    result) {
+    if (result != VK_SUCCESS) return;
+    CreateObject(*pSurface, kVulkanObjectTypeSurfaceKHR, pAllocator);
+
+}
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+bool ObjectLifetimes::PreCallValidateGetPhysicalDeviceWin32PresentationSupportKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t                                    queueFamilyIndex) const {
+    bool skip = false;
+    skip |= ValidateObject(physicalDevice, kVulkanObjectTypePhysicalDevice, false, "VUID-vkGetPhysicalDeviceWin32PresentationSupportKHR-physicalDevice-parameter", kVUIDUndefined);
+
+    return skip;
+}
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+bool ObjectLifetimes::PreCallValidateGetPhysicalDeviceFeatures2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceFeatures2*                  pFeatures) const {
+    bool skip = false;
+    skip |= ValidateObject(physicalDevice, kVulkanObjectTypePhysicalDevice, false, "VUID-vkGetPhysicalDeviceFeatures2-physicalDevice-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateGetPhysicalDeviceProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceProperties2*                pProperties) const {
+    bool skip = false;
+    skip |= ValidateObject(physicalDevice, kVulkanObjectTypePhysicalDevice, false, "VUID-vkGetPhysicalDeviceProperties2-physicalDevice-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateGetPhysicalDeviceFormatProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkFormat                                    format,
+    VkFormatProperties2*                        pFormatProperties) const {
+    bool skip = false;
+    skip |= ValidateObject(physicalDevice, kVulkanObjectTypePhysicalDevice, false, "VUID-vkGetPhysicalDeviceFormatProperties2-physicalDevice-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateGetPhysicalDeviceImageFormatProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceImageFormatInfo2*     pImageFormatInfo,
+    VkImageFormatProperties2*                   pImageFormatProperties) const {
+    bool skip = false;
+    skip |= ValidateObject(physicalDevice, kVulkanObjectTypePhysicalDevice, false, "VUID-vkGetPhysicalDeviceImageFormatProperties2-physicalDevice-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateGetPhysicalDeviceMemoryProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceMemoryProperties2*          pMemoryProperties) const {
+    bool skip = false;
+    skip |= ValidateObject(physicalDevice, kVulkanObjectTypePhysicalDevice, false, "VUID-vkGetPhysicalDeviceMemoryProperties2-physicalDevice-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateGetPhysicalDeviceSparseImageFormatProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo,
+    uint32_t*                                   pPropertyCount,
+    VkSparseImageFormatProperties2*             pProperties) const {
+    bool skip = false;
+    skip |= ValidateObject(physicalDevice, kVulkanObjectTypePhysicalDevice, false, "VUID-vkGetPhysicalDeviceSparseImageFormatProperties2-physicalDevice-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateGetDeviceGroupPeerMemoryFeaturesKHR(
+    VkDevice                                    device,
+    uint32_t                                    heapIndex,
+    uint32_t                                    localDeviceIndex,
+    uint32_t                                    remoteDeviceIndex,
+    VkPeerMemoryFeatureFlags*                   pPeerMemoryFeatures) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkGetDeviceGroupPeerMemoryFeatures-device-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateCmdSetDeviceMaskKHR(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    deviceMask) const {
+    bool skip = false;
+    skip |= ValidateObject(commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdSetDeviceMask-commandBuffer-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateCmdDispatchBaseKHR(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    baseGroupX,
+    uint32_t                                    baseGroupY,
+    uint32_t                                    baseGroupZ,
+    uint32_t                                    groupCountX,
+    uint32_t                                    groupCountY,
+    uint32_t                                    groupCountZ) const {
+    bool skip = false;
+    skip |= ValidateObject(commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdDispatchBase-commandBuffer-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateTrimCommandPoolKHR(
+    VkDevice                                    device,
+    VkCommandPool                               commandPool,
+    VkCommandPoolTrimFlags                      flags) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkTrimCommandPool-device-parameter", kVUIDUndefined);
+    skip |= ValidateObject(commandPool, kVulkanObjectTypeCommandPool, false, "VUID-vkTrimCommandPool-commandPool-parameter", "VUID-vkTrimCommandPool-commandPool-parent");
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateEnumeratePhysicalDeviceGroupsKHR(
+    VkInstance                                  instance,
+    uint32_t*                                   pPhysicalDeviceGroupCount,
+    VkPhysicalDeviceGroupProperties*            pPhysicalDeviceGroupProperties) const {
+    bool skip = false;
+    skip |= ValidateObject(instance, kVulkanObjectTypeInstance, false, "VUID-vkEnumeratePhysicalDeviceGroups-instance-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateGetPhysicalDeviceExternalBufferPropertiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceExternalBufferInfo*   pExternalBufferInfo,
+    VkExternalBufferProperties*                 pExternalBufferProperties) const {
+    bool skip = false;
+    skip |= ValidateObject(physicalDevice, kVulkanObjectTypePhysicalDevice, false, "VUID-vkGetPhysicalDeviceExternalBufferProperties-physicalDevice-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+bool ObjectLifetimes::PreCallValidateGetMemoryWin32HandleKHR(
+    VkDevice                                    device,
+    const VkMemoryGetWin32HandleInfoKHR*        pGetWin32HandleInfo,
+    HANDLE*                                     pHandle) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkGetMemoryWin32HandleKHR-device-parameter", kVUIDUndefined);
+    if (pGetWin32HandleInfo) {
+        skip |= ValidateObject(pGetWin32HandleInfo->memory, kVulkanObjectTypeDeviceMemory, false, "VUID-VkMemoryGetWin32HandleInfoKHR-memory-parameter", kVUIDUndefined);
+    }
+
+    return skip;
+}
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+bool ObjectLifetimes::PreCallValidateGetMemoryWin32HandlePropertiesKHR(
+    VkDevice                                    device,
+    VkExternalMemoryHandleTypeFlagBits          handleType,
+    HANDLE                                      handle,
+    VkMemoryWin32HandlePropertiesKHR*           pMemoryWin32HandleProperties) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkGetMemoryWin32HandlePropertiesKHR-device-parameter", kVUIDUndefined);
+
+    return skip;
+}
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+bool ObjectLifetimes::PreCallValidateGetMemoryFdKHR(
+    VkDevice                                    device,
+    const VkMemoryGetFdInfoKHR*                 pGetFdInfo,
+    int*                                        pFd) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkGetMemoryFdKHR-device-parameter", kVUIDUndefined);
+    if (pGetFdInfo) {
+        skip |= ValidateObject(pGetFdInfo->memory, kVulkanObjectTypeDeviceMemory, false, "VUID-VkMemoryGetFdInfoKHR-memory-parameter", kVUIDUndefined);
+    }
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateGetMemoryFdPropertiesKHR(
+    VkDevice                                    device,
+    VkExternalMemoryHandleTypeFlagBits          handleType,
+    int                                         fd,
+    VkMemoryFdPropertiesKHR*                    pMemoryFdProperties) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkGetMemoryFdPropertiesKHR-device-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateGetPhysicalDeviceExternalSemaphorePropertiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo,
+    VkExternalSemaphoreProperties*              pExternalSemaphoreProperties) const {
+    bool skip = false;
+    skip |= ValidateObject(physicalDevice, kVulkanObjectTypePhysicalDevice, false, "VUID-vkGetPhysicalDeviceExternalSemaphoreProperties-physicalDevice-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+bool ObjectLifetimes::PreCallValidateImportSemaphoreWin32HandleKHR(
+    VkDevice                                    device,
+    const VkImportSemaphoreWin32HandleInfoKHR*  pImportSemaphoreWin32HandleInfo) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkImportSemaphoreWin32HandleKHR-device-parameter", kVUIDUndefined);
+    if (pImportSemaphoreWin32HandleInfo) {
+        skip |= ValidateObject(pImportSemaphoreWin32HandleInfo->semaphore, kVulkanObjectTypeSemaphore, false, "VUID-VkImportSemaphoreWin32HandleInfoKHR-semaphore-parameter", kVUIDUndefined);
+    }
+
+    return skip;
+}
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+bool ObjectLifetimes::PreCallValidateGetSemaphoreWin32HandleKHR(
+    VkDevice                                    device,
+    const VkSemaphoreGetWin32HandleInfoKHR*     pGetWin32HandleInfo,
+    HANDLE*                                     pHandle) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkGetSemaphoreWin32HandleKHR-device-parameter", kVUIDUndefined);
+    if (pGetWin32HandleInfo) {
+        skip |= ValidateObject(pGetWin32HandleInfo->semaphore, kVulkanObjectTypeSemaphore, false, "VUID-VkSemaphoreGetWin32HandleInfoKHR-semaphore-parameter", kVUIDUndefined);
+    }
+
+    return skip;
+}
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+bool ObjectLifetimes::PreCallValidateImportSemaphoreFdKHR(
+    VkDevice                                    device,
+    const VkImportSemaphoreFdInfoKHR*           pImportSemaphoreFdInfo) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkImportSemaphoreFdKHR-device-parameter", kVUIDUndefined);
+    if (pImportSemaphoreFdInfo) {
+        skip |= ValidateObject(pImportSemaphoreFdInfo->semaphore, kVulkanObjectTypeSemaphore, false, "VUID-VkImportSemaphoreFdInfoKHR-semaphore-parameter", kVUIDUndefined);
+    }
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateGetSemaphoreFdKHR(
+    VkDevice                                    device,
+    const VkSemaphoreGetFdInfoKHR*              pGetFdInfo,
+    int*                                        pFd) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkGetSemaphoreFdKHR-device-parameter", kVUIDUndefined);
+    if (pGetFdInfo) {
+        skip |= ValidateObject(pGetFdInfo->semaphore, kVulkanObjectTypeSemaphore, false, "VUID-VkSemaphoreGetFdInfoKHR-semaphore-parameter", kVUIDUndefined);
+    }
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateCmdPushDescriptorSetWithTemplateKHR(
+    VkCommandBuffer                             commandBuffer,
+    VkDescriptorUpdateTemplate                  descriptorUpdateTemplate,
+    VkPipelineLayout                            layout,
+    uint32_t                                    set,
+    const void*                                 pData) const {
+    bool skip = false;
+    skip |= ValidateObject(commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdPushDescriptorSetWithTemplateKHR-commandBuffer-parameter", "VUID-vkCmdPushDescriptorSetWithTemplateKHR-commonparent");
+    skip |= ValidateObject(descriptorUpdateTemplate, kVulkanObjectTypeDescriptorUpdateTemplate, false, "VUID-vkCmdPushDescriptorSetWithTemplateKHR-descriptorUpdateTemplate-parameter", "VUID-vkCmdPushDescriptorSetWithTemplateKHR-commonparent");
+    skip |= ValidateObject(layout, kVulkanObjectTypePipelineLayout, false, "VUID-vkCmdPushDescriptorSetWithTemplateKHR-layout-parameter", "VUID-vkCmdPushDescriptorSetWithTemplateKHR-commonparent");
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateCreateDescriptorUpdateTemplateKHR(
+    VkDevice                                    device,
+    const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDescriptorUpdateTemplate*                 pDescriptorUpdateTemplate) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkCreateDescriptorUpdateTemplate-device-parameter", kVUIDUndefined);
+    if (pCreateInfo) {
+        skip |= ValidateObject(pCreateInfo->descriptorSetLayout, kVulkanObjectTypeDescriptorSetLayout, true, kVUIDUndefined, "VUID-VkDescriptorUpdateTemplateCreateInfo-commonparent");
+        skip |= ValidateObject(pCreateInfo->pipelineLayout, kVulkanObjectTypePipelineLayout, true, kVUIDUndefined, "VUID-VkDescriptorUpdateTemplateCreateInfo-commonparent");
+    }
+
+    return skip;
+}
+
+void ObjectLifetimes::PostCallRecordCreateDescriptorUpdateTemplateKHR(
+    VkDevice                                    device,
+    const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDescriptorUpdateTemplate*                 pDescriptorUpdateTemplate,
+    VkResult                                    result) {
+    if (result != VK_SUCCESS) return;
+    CreateObject(*pDescriptorUpdateTemplate, kVulkanObjectTypeDescriptorUpdateTemplate, pAllocator);
+
+}
+
+bool ObjectLifetimes::PreCallValidateDestroyDescriptorUpdateTemplateKHR(
+    VkDevice                                    device,
+    VkDescriptorUpdateTemplate                  descriptorUpdateTemplate,
+    const VkAllocationCallbacks*                pAllocator) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkDestroyDescriptorUpdateTemplate-device-parameter", kVUIDUndefined);
+    skip |= ValidateObject(descriptorUpdateTemplate, kVulkanObjectTypeDescriptorUpdateTemplate, true, "VUID-vkDestroyDescriptorUpdateTemplate-descriptorUpdateTemplate-parameter", "VUID-vkDestroyDescriptorUpdateTemplate-descriptorUpdateTemplate-parent");
+    skip |= ValidateDestroyObject(descriptorUpdateTemplate, kVulkanObjectTypeDescriptorUpdateTemplate, pAllocator, "VUID-vkDestroyDescriptorUpdateTemplate-descriptorSetLayout-00356", "VUID-vkDestroyDescriptorUpdateTemplate-descriptorSetLayout-00357");
+
+    return skip;
+}
+
+void ObjectLifetimes::PreCallRecordDestroyDescriptorUpdateTemplateKHR(
+    VkDevice                                    device,
+    VkDescriptorUpdateTemplate                  descriptorUpdateTemplate,
+    const VkAllocationCallbacks*                pAllocator) {
+    RecordDestroyObject(descriptorUpdateTemplate, kVulkanObjectTypeDescriptorUpdateTemplate);
+
+}
+
+bool ObjectLifetimes::PreCallValidateUpdateDescriptorSetWithTemplateKHR(
+    VkDevice                                    device,
+    VkDescriptorSet                             descriptorSet,
+    VkDescriptorUpdateTemplate                  descriptorUpdateTemplate,
+    const void*                                 pData) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkUpdateDescriptorSetWithTemplate-device-parameter", kVUIDUndefined);
+    skip |= ValidateObject(descriptorSet, kVulkanObjectTypeDescriptorSet, false, "VUID-vkUpdateDescriptorSetWithTemplate-descriptorSet-parameter", kVUIDUndefined);
+    skip |= ValidateObject(descriptorUpdateTemplate, kVulkanObjectTypeDescriptorUpdateTemplate, false, "VUID-vkUpdateDescriptorSetWithTemplate-descriptorUpdateTemplate-parameter", "VUID-vkUpdateDescriptorSetWithTemplate-descriptorUpdateTemplate-parent");
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateCreateRenderPass2KHR(
+    VkDevice                                    device,
+    const VkRenderPassCreateInfo2KHR*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkRenderPass*                               pRenderPass) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkCreateRenderPass2KHR-device-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+void ObjectLifetimes::PostCallRecordCreateRenderPass2KHR(
+    VkDevice                                    device,
+    const VkRenderPassCreateInfo2KHR*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkRenderPass*                               pRenderPass,
+    VkResult                                    result) {
+    if (result != VK_SUCCESS) return;
+    CreateObject(*pRenderPass, kVulkanObjectTypeRenderPass, pAllocator);
+
+}
+
+bool ObjectLifetimes::PreCallValidateCmdBeginRenderPass2KHR(
+    VkCommandBuffer                             commandBuffer,
+    const VkRenderPassBeginInfo*                pRenderPassBegin,
+    const VkSubpassBeginInfoKHR*                pSubpassBeginInfo) const {
+    bool skip = false;
+    skip |= ValidateObject(commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdBeginRenderPass2KHR-commandBuffer-parameter", kVUIDUndefined);
+    if (pRenderPassBegin) {
+        skip |= ValidateObject(pRenderPassBegin->renderPass, kVulkanObjectTypeRenderPass, false, "VUID-VkRenderPassBeginInfo-renderPass-parameter", "VUID-VkRenderPassBeginInfo-commonparent");
+        skip |= ValidateObject(pRenderPassBegin->framebuffer, kVulkanObjectTypeFramebuffer, false, "VUID-VkRenderPassBeginInfo-framebuffer-parameter", "VUID-VkRenderPassBeginInfo-commonparent");
+    }
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateCmdNextSubpass2KHR(
+    VkCommandBuffer                             commandBuffer,
+    const VkSubpassBeginInfoKHR*                pSubpassBeginInfo,
+    const VkSubpassEndInfoKHR*                  pSubpassEndInfo) const {
+    bool skip = false;
+    skip |= ValidateObject(commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdNextSubpass2KHR-commandBuffer-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateCmdEndRenderPass2KHR(
+    VkCommandBuffer                             commandBuffer,
+    const VkSubpassEndInfoKHR*                  pSubpassEndInfo) const {
+    bool skip = false;
+    skip |= ValidateObject(commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdEndRenderPass2KHR-commandBuffer-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateGetSwapchainStatusKHR(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkGetSwapchainStatusKHR-device-parameter", "VUID-vkGetSwapchainStatusKHR-commonparent");
+    skip |= ValidateObject(swapchain, kVulkanObjectTypeSwapchainKHR, false, "VUID-vkGetSwapchainStatusKHR-swapchain-parameter", "VUID-vkGetSwapchainStatusKHR-commonparent");
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateGetPhysicalDeviceExternalFencePropertiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceExternalFenceInfo*    pExternalFenceInfo,
+    VkExternalFenceProperties*                  pExternalFenceProperties) const {
+    bool skip = false;
+    skip |= ValidateObject(physicalDevice, kVulkanObjectTypePhysicalDevice, false, "VUID-vkGetPhysicalDeviceExternalFenceProperties-physicalDevice-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+bool ObjectLifetimes::PreCallValidateImportFenceWin32HandleKHR(
+    VkDevice                                    device,
+    const VkImportFenceWin32HandleInfoKHR*      pImportFenceWin32HandleInfo) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkImportFenceWin32HandleKHR-device-parameter", kVUIDUndefined);
+    if (pImportFenceWin32HandleInfo) {
+        skip |= ValidateObject(pImportFenceWin32HandleInfo->fence, kVulkanObjectTypeFence, false, "VUID-VkImportFenceWin32HandleInfoKHR-fence-parameter", kVUIDUndefined);
+    }
+
+    return skip;
+}
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+bool ObjectLifetimes::PreCallValidateGetFenceWin32HandleKHR(
+    VkDevice                                    device,
+    const VkFenceGetWin32HandleInfoKHR*         pGetWin32HandleInfo,
+    HANDLE*                                     pHandle) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkGetFenceWin32HandleKHR-device-parameter", kVUIDUndefined);
+    if (pGetWin32HandleInfo) {
+        skip |= ValidateObject(pGetWin32HandleInfo->fence, kVulkanObjectTypeFence, false, "VUID-VkFenceGetWin32HandleInfoKHR-fence-parameter", kVUIDUndefined);
+    }
+
+    return skip;
+}
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+bool ObjectLifetimes::PreCallValidateImportFenceFdKHR(
+    VkDevice                                    device,
+    const VkImportFenceFdInfoKHR*               pImportFenceFdInfo) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkImportFenceFdKHR-device-parameter", kVUIDUndefined);
+    if (pImportFenceFdInfo) {
+        skip |= ValidateObject(pImportFenceFdInfo->fence, kVulkanObjectTypeFence, false, "VUID-VkImportFenceFdInfoKHR-fence-parameter", kVUIDUndefined);
+    }
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateGetFenceFdKHR(
+    VkDevice                                    device,
+    const VkFenceGetFdInfoKHR*                  pGetFdInfo,
+    int*                                        pFd) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkGetFenceFdKHR-device-parameter", kVUIDUndefined);
+    if (pGetFdInfo) {
+        skip |= ValidateObject(pGetFdInfo->fence, kVulkanObjectTypeFence, false, "VUID-VkFenceGetFdInfoKHR-fence-parameter", kVUIDUndefined);
+    }
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t                                    queueFamilyIndex,
+    uint32_t*                                   pCounterCount,
+    VkPerformanceCounterKHR*                    pCounters,
+    VkPerformanceCounterDescriptionKHR*         pCounterDescriptions) const {
+    bool skip = false;
+    skip |= ValidateObject(physicalDevice, kVulkanObjectTypePhysicalDevice, false, "VUID-vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR-physicalDevice-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkQueryPoolPerformanceCreateInfoKHR*  pPerformanceQueryCreateInfo,
+    uint32_t*                                   pNumPasses) const {
+    bool skip = false;
+    skip |= ValidateObject(physicalDevice, kVulkanObjectTypePhysicalDevice, false, "VUID-vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR-physicalDevice-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateAcquireProfilingLockKHR(
+    VkDevice                                    device,
+    const VkAcquireProfilingLockInfoKHR*        pInfo) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkAcquireProfilingLockKHR-device-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateReleaseProfilingLockKHR(
+    VkDevice                                    device) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkReleaseProfilingLockKHR-device-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateGetPhysicalDeviceSurfaceCapabilities2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceSurfaceInfo2KHR*      pSurfaceInfo,
+    VkSurfaceCapabilities2KHR*                  pSurfaceCapabilities) const {
+    bool skip = false;
+    skip |= ValidateObject(physicalDevice, kVulkanObjectTypePhysicalDevice, false, "VUID-vkGetPhysicalDeviceSurfaceCapabilities2KHR-physicalDevice-parameter", kVUIDUndefined);
+    if (pSurfaceInfo) {
+        skip |= ValidateObject(pSurfaceInfo->surface, kVulkanObjectTypeSurfaceKHR, false, "VUID-VkPhysicalDeviceSurfaceInfo2KHR-surface-parameter", kVUIDUndefined);
+    }
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateGetPhysicalDeviceSurfaceFormats2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceSurfaceInfo2KHR*      pSurfaceInfo,
+    uint32_t*                                   pSurfaceFormatCount,
+    VkSurfaceFormat2KHR*                        pSurfaceFormats) const {
+    bool skip = false;
+    skip |= ValidateObject(physicalDevice, kVulkanObjectTypePhysicalDevice, false, "VUID-vkGetPhysicalDeviceSurfaceFormats2KHR-physicalDevice-parameter", kVUIDUndefined);
+    if (pSurfaceInfo) {
+        skip |= ValidateObject(pSurfaceInfo->surface, kVulkanObjectTypeSurfaceKHR, false, "VUID-VkPhysicalDeviceSurfaceInfo2KHR-surface-parameter", kVUIDUndefined);
+    }
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateGetPhysicalDeviceDisplayPlaneProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pPropertyCount,
+    VkDisplayPlaneProperties2KHR*               pProperties) const {
+    bool skip = false;
+    skip |= ValidateObject(physicalDevice, kVulkanObjectTypePhysicalDevice, false, "VUID-vkGetPhysicalDeviceDisplayPlaneProperties2KHR-physicalDevice-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateGetDisplayPlaneCapabilities2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkDisplayPlaneInfo2KHR*               pDisplayPlaneInfo,
+    VkDisplayPlaneCapabilities2KHR*             pCapabilities) const {
+    bool skip = false;
+    skip |= ValidateObject(physicalDevice, kVulkanObjectTypePhysicalDevice, false, "VUID-vkGetDisplayPlaneCapabilities2KHR-physicalDevice-parameter", kVUIDUndefined);
+    if (pDisplayPlaneInfo) {
+        skip |= ValidateObject(pDisplayPlaneInfo->mode, kVulkanObjectTypeDisplayModeKHR, false, "VUID-VkDisplayPlaneInfo2KHR-mode-parameter", kVUIDUndefined);
+    }
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateGetImageMemoryRequirements2KHR(
+    VkDevice                                    device,
+    const VkImageMemoryRequirementsInfo2*       pInfo,
+    VkMemoryRequirements2*                      pMemoryRequirements) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkGetImageMemoryRequirements2-device-parameter", kVUIDUndefined);
+    if (pInfo) {
+        skip |= ValidateObject(pInfo->image, kVulkanObjectTypeImage, false, "VUID-VkImageMemoryRequirementsInfo2-image-parameter", kVUIDUndefined);
+    }
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateGetBufferMemoryRequirements2KHR(
+    VkDevice                                    device,
+    const VkBufferMemoryRequirementsInfo2*      pInfo,
+    VkMemoryRequirements2*                      pMemoryRequirements) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkGetBufferMemoryRequirements2-device-parameter", kVUIDUndefined);
+    if (pInfo) {
+        skip |= ValidateObject(pInfo->buffer, kVulkanObjectTypeBuffer, false, "VUID-VkBufferMemoryRequirementsInfo2-buffer-parameter", kVUIDUndefined);
+    }
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateGetImageSparseMemoryRequirements2KHR(
+    VkDevice                                    device,
+    const VkImageSparseMemoryRequirementsInfo2* pInfo,
+    uint32_t*                                   pSparseMemoryRequirementCount,
+    VkSparseImageMemoryRequirements2*           pSparseMemoryRequirements) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkGetImageSparseMemoryRequirements2-device-parameter", kVUIDUndefined);
+    if (pInfo) {
+        skip |= ValidateObject(pInfo->image, kVulkanObjectTypeImage, false, "VUID-VkImageSparseMemoryRequirementsInfo2-image-parameter", kVUIDUndefined);
+    }
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateCreateSamplerYcbcrConversionKHR(
+    VkDevice                                    device,
+    const VkSamplerYcbcrConversionCreateInfo*   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSamplerYcbcrConversion*                   pYcbcrConversion) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkCreateSamplerYcbcrConversion-device-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+void ObjectLifetimes::PostCallRecordCreateSamplerYcbcrConversionKHR(
+    VkDevice                                    device,
+    const VkSamplerYcbcrConversionCreateInfo*   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSamplerYcbcrConversion*                   pYcbcrConversion,
+    VkResult                                    result) {
+    if (result != VK_SUCCESS) return;
+    CreateObject(*pYcbcrConversion, kVulkanObjectTypeSamplerYcbcrConversion, pAllocator);
+
+}
+
+bool ObjectLifetimes::PreCallValidateDestroySamplerYcbcrConversionKHR(
+    VkDevice                                    device,
+    VkSamplerYcbcrConversion                    ycbcrConversion,
+    const VkAllocationCallbacks*                pAllocator) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkDestroySamplerYcbcrConversion-device-parameter", kVUIDUndefined);
+    skip |= ValidateObject(ycbcrConversion, kVulkanObjectTypeSamplerYcbcrConversion, true, "VUID-vkDestroySamplerYcbcrConversion-ycbcrConversion-parameter", "VUID-vkDestroySamplerYcbcrConversion-ycbcrConversion-parent");
+    skip |= ValidateDestroyObject(ycbcrConversion, kVulkanObjectTypeSamplerYcbcrConversion, pAllocator, kVUIDUndefined, kVUIDUndefined);
+
+    return skip;
+}
+
+void ObjectLifetimes::PreCallRecordDestroySamplerYcbcrConversionKHR(
+    VkDevice                                    device,
+    VkSamplerYcbcrConversion                    ycbcrConversion,
+    const VkAllocationCallbacks*                pAllocator) {
+    RecordDestroyObject(ycbcrConversion, kVulkanObjectTypeSamplerYcbcrConversion);
+
+}
+
+bool ObjectLifetimes::PreCallValidateBindBufferMemory2KHR(
+    VkDevice                                    device,
+    uint32_t                                    bindInfoCount,
+    const VkBindBufferMemoryInfo*               pBindInfos) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkBindBufferMemory2-device-parameter", kVUIDUndefined);
+    if (pBindInfos) {
+        for (uint32_t index0 = 0; index0 < bindInfoCount; ++index0) {
+            skip |= ValidateObject(pBindInfos[index0].buffer, kVulkanObjectTypeBuffer, false, "VUID-VkBindBufferMemoryInfo-buffer-parameter", "VUID-VkBindBufferMemoryInfo-commonparent");
+            skip |= ValidateObject(pBindInfos[index0].memory, kVulkanObjectTypeDeviceMemory, false, "VUID-VkBindBufferMemoryInfo-memory-parameter", "VUID-VkBindBufferMemoryInfo-commonparent");
+        }
+    }
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateBindImageMemory2KHR(
+    VkDevice                                    device,
+    uint32_t                                    bindInfoCount,
+    const VkBindImageMemoryInfo*                pBindInfos) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkBindImageMemory2-device-parameter", kVUIDUndefined);
+    if (pBindInfos) {
+        for (uint32_t index0 = 0; index0 < bindInfoCount; ++index0) {
+            skip |= ValidateObject(pBindInfos[index0].image, kVulkanObjectTypeImage, false, "VUID-VkBindImageMemoryInfo-image-parameter", "VUID-VkBindImageMemoryInfo-commonparent");
+            skip |= ValidateObject(pBindInfos[index0].memory, kVulkanObjectTypeDeviceMemory, true, kVUIDUndefined, "VUID-VkBindImageMemoryInfo-commonparent");
+        }
+    }
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateCmdDrawIndirectCountKHR(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkBuffer                                    countBuffer,
+    VkDeviceSize                                countBufferOffset,
+    uint32_t                                    maxDrawCount,
+    uint32_t                                    stride) const {
+    bool skip = false;
+    skip |= ValidateObject(commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdDrawIndirectCountKHR-commandBuffer-parameter", "VUID-vkCmdDrawIndirectCountKHR-commonparent");
+    skip |= ValidateObject(buffer, kVulkanObjectTypeBuffer, false, "VUID-vkCmdDrawIndirectCountKHR-buffer-parameter", "VUID-vkCmdDrawIndirectCountKHR-commonparent");
+    skip |= ValidateObject(countBuffer, kVulkanObjectTypeBuffer, false, "VUID-vkCmdDrawIndirectCountKHR-countBuffer-parameter", "VUID-vkCmdDrawIndirectCountKHR-commonparent");
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateCmdDrawIndexedIndirectCountKHR(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkBuffer                                    countBuffer,
+    VkDeviceSize                                countBufferOffset,
+    uint32_t                                    maxDrawCount,
+    uint32_t                                    stride) const {
+    bool skip = false;
+    skip |= ValidateObject(commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdDrawIndexedIndirectCountKHR-commandBuffer-parameter", "VUID-vkCmdDrawIndexedIndirectCountKHR-commonparent");
+    skip |= ValidateObject(buffer, kVulkanObjectTypeBuffer, false, "VUID-vkCmdDrawIndexedIndirectCountKHR-buffer-parameter", "VUID-vkCmdDrawIndexedIndirectCountKHR-commonparent");
+    skip |= ValidateObject(countBuffer, kVulkanObjectTypeBuffer, false, "VUID-vkCmdDrawIndexedIndirectCountKHR-countBuffer-parameter", "VUID-vkCmdDrawIndexedIndirectCountKHR-commonparent");
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateGetSemaphoreCounterValueKHR(
+    VkDevice                                    device,
+    VkSemaphore                                 semaphore,
+    uint64_t*                                   pValue) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkGetSemaphoreCounterValueKHR-device-parameter", kVUIDUndefined);
+    skip |= ValidateObject(semaphore, kVulkanObjectTypeSemaphore, false, "VUID-vkGetSemaphoreCounterValueKHR-semaphore-parameter", "VUID-vkGetSemaphoreCounterValueKHR-semaphore-parent");
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateWaitSemaphoresKHR(
+    VkDevice                                    device,
+    const VkSemaphoreWaitInfoKHR*               pWaitInfo,
+    uint64_t                                    timeout) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkWaitSemaphoresKHR-device-parameter", kVUIDUndefined);
+    if (pWaitInfo) {
+        if (pWaitInfo->pSemaphores) {
+            for (uint32_t index1 = 0; index1 < pWaitInfo->semaphoreCount; ++index1) {
+                skip |= ValidateObject(pWaitInfo->pSemaphores[index1], kVulkanObjectTypeSemaphore, false, "VUID-VkSemaphoreWaitInfoKHR-pSemaphores-parameter", kVUIDUndefined);
+            }
+        }
+    }
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateSignalSemaphoreKHR(
+    VkDevice                                    device,
+    const VkSemaphoreSignalInfoKHR*             pSignalInfo) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkSignalSemaphoreKHR-device-parameter", kVUIDUndefined);
+    if (pSignalInfo) {
+        skip |= ValidateObject(pSignalInfo->semaphore, kVulkanObjectTypeSemaphore, false, "VUID-VkSemaphoreSignalInfoKHR-semaphore-parameter", kVUIDUndefined);
+    }
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateGetBufferDeviceAddressKHR(
+    VkDevice                                    device,
+    const VkBufferDeviceAddressInfoKHR*         pInfo) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkGetBufferDeviceAddressKHR-device-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateGetBufferOpaqueCaptureAddressKHR(
+    VkDevice                                    device,
+    const VkBufferDeviceAddressInfoKHR*         pInfo) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkGetBufferOpaqueCaptureAddressKHR-device-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateGetDeviceMemoryOpaqueCaptureAddressKHR(
+    VkDevice                                    device,
+    const VkDeviceMemoryOpaqueCaptureAddressInfoKHR* pInfo) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkGetDeviceMemoryOpaqueCaptureAddressKHR-device-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateGetPipelineExecutablePropertiesKHR(
+    VkDevice                                    device,
+    const VkPipelineInfoKHR*                    pPipelineInfo,
+    uint32_t*                                   pExecutableCount,
+    VkPipelineExecutablePropertiesKHR*          pProperties) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkGetPipelineExecutablePropertiesKHR-device-parameter", kVUIDUndefined);
+    if (pPipelineInfo) {
+        skip |= ValidateObject(pPipelineInfo->pipeline, kVulkanObjectTypePipeline, false, "VUID-VkPipelineInfoKHR-pipeline-parameter", kVUIDUndefined);
+    }
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateGetPipelineExecutableStatisticsKHR(
+    VkDevice                                    device,
+    const VkPipelineExecutableInfoKHR*          pExecutableInfo,
+    uint32_t*                                   pStatisticCount,
+    VkPipelineExecutableStatisticKHR*           pStatistics) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkGetPipelineExecutableStatisticsKHR-device-parameter", kVUIDUndefined);
+    if (pExecutableInfo) {
+        skip |= ValidateObject(pExecutableInfo->pipeline, kVulkanObjectTypePipeline, false, "VUID-VkPipelineExecutableInfoKHR-pipeline-parameter", kVUIDUndefined);
+    }
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateGetPipelineExecutableInternalRepresentationsKHR(
+    VkDevice                                    device,
+    const VkPipelineExecutableInfoKHR*          pExecutableInfo,
+    uint32_t*                                   pInternalRepresentationCount,
+    VkPipelineExecutableInternalRepresentationKHR* pInternalRepresentations) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkGetPipelineExecutableInternalRepresentationsKHR-device-parameter", kVUIDUndefined);
+    if (pExecutableInfo) {
+        skip |= ValidateObject(pExecutableInfo->pipeline, kVulkanObjectTypePipeline, false, "VUID-VkPipelineExecutableInfoKHR-pipeline-parameter", kVUIDUndefined);
+    }
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateCreateDebugReportCallbackEXT(
+    VkInstance                                  instance,
+    const VkDebugReportCallbackCreateInfoEXT*   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDebugReportCallbackEXT*                   pCallback) const {
+    bool skip = false;
+    skip |= ValidateObject(instance, kVulkanObjectTypeInstance, false, "VUID-vkCreateDebugReportCallbackEXT-instance-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+void ObjectLifetimes::PostCallRecordCreateDebugReportCallbackEXT(
+    VkInstance                                  instance,
+    const VkDebugReportCallbackCreateInfoEXT*   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDebugReportCallbackEXT*                   pCallback,
+    VkResult                                    result) {
+    if (result != VK_SUCCESS) return;
+    CreateObject(*pCallback, kVulkanObjectTypeDebugReportCallbackEXT, pAllocator);
+
+}
+
+bool ObjectLifetimes::PreCallValidateDestroyDebugReportCallbackEXT(
+    VkInstance                                  instance,
+    VkDebugReportCallbackEXT                    callback,
+    const VkAllocationCallbacks*                pAllocator) const {
+    bool skip = false;
+    skip |= ValidateObject(instance, kVulkanObjectTypeInstance, false, "VUID-vkDestroyDebugReportCallbackEXT-instance-parameter", kVUIDUndefined);
+    skip |= ValidateObject(callback, kVulkanObjectTypeDebugReportCallbackEXT, false, "VUID-vkDestroyDebugReportCallbackEXT-callback-parameter", "VUID-vkDestroyDebugReportCallbackEXT-callback-parent");
+    skip |= ValidateDestroyObject(callback, kVulkanObjectTypeDebugReportCallbackEXT, pAllocator, kVUIDUndefined, kVUIDUndefined);
+
+    return skip;
+}
+
+void ObjectLifetimes::PreCallRecordDestroyDebugReportCallbackEXT(
+    VkInstance                                  instance,
+    VkDebugReportCallbackEXT                    callback,
+    const VkAllocationCallbacks*                pAllocator) {
+    RecordDestroyObject(callback, kVulkanObjectTypeDebugReportCallbackEXT);
+
+}
+
+bool ObjectLifetimes::PreCallValidateDebugReportMessageEXT(
+    VkInstance                                  instance,
+    VkDebugReportFlagsEXT                       flags,
+    VkDebugReportObjectTypeEXT                  objectType,
+    uint64_t                                    object,
+    size_t                                      location,
+    int32_t                                     messageCode,
+    const char*                                 pLayerPrefix,
+    const char*                                 pMessage) const {
+    bool skip = false;
+    skip |= ValidateObject(instance, kVulkanObjectTypeInstance, false, "VUID-vkDebugReportMessageEXT-instance-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateDebugMarkerSetObjectTagEXT(
+    VkDevice                                    device,
+    const VkDebugMarkerObjectTagInfoEXT*        pTagInfo) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkDebugMarkerSetObjectTagEXT-device-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateDebugMarkerSetObjectNameEXT(
+    VkDevice                                    device,
+    const VkDebugMarkerObjectNameInfoEXT*       pNameInfo) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkDebugMarkerSetObjectNameEXT-device-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateCmdDebugMarkerBeginEXT(
+    VkCommandBuffer                             commandBuffer,
+    const VkDebugMarkerMarkerInfoEXT*           pMarkerInfo) const {
+    bool skip = false;
+    skip |= ValidateObject(commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdDebugMarkerBeginEXT-commandBuffer-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateCmdDebugMarkerEndEXT(
+    VkCommandBuffer                             commandBuffer) const {
+    bool skip = false;
+    skip |= ValidateObject(commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdDebugMarkerEndEXT-commandBuffer-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateCmdDebugMarkerInsertEXT(
+    VkCommandBuffer                             commandBuffer,
+    const VkDebugMarkerMarkerInfoEXT*           pMarkerInfo) const {
+    bool skip = false;
+    skip |= ValidateObject(commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdDebugMarkerInsertEXT-commandBuffer-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateCmdBindTransformFeedbackBuffersEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstBinding,
+    uint32_t                                    bindingCount,
+    const VkBuffer*                             pBuffers,
+    const VkDeviceSize*                         pOffsets,
+    const VkDeviceSize*                         pSizes) const {
+    bool skip = false;
+    skip |= ValidateObject(commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdBindTransformFeedbackBuffersEXT-commandBuffer-parameter", "VUID-vkCmdBindTransformFeedbackBuffersEXT-commonparent");
+    if (pBuffers) {
+        for (uint32_t index0 = 0; index0 < bindingCount; ++index0) {
+            skip |= ValidateObject(pBuffers[index0], kVulkanObjectTypeBuffer, false, "VUID-vkCmdBindTransformFeedbackBuffersEXT-pBuffers-parameter", "VUID-vkCmdBindTransformFeedbackBuffersEXT-commonparent");
+        }
+    }
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateCmdBeginTransformFeedbackEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstCounterBuffer,
+    uint32_t                                    counterBufferCount,
+    const VkBuffer*                             pCounterBuffers,
+    const VkDeviceSize*                         pCounterBufferOffsets) const {
+    bool skip = false;
+    skip |= ValidateObject(commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdBeginTransformFeedbackEXT-commandBuffer-parameter", "VUID-vkCmdBeginTransformFeedbackEXT-commonparent");
+    if (pCounterBuffers) {
+        for (uint32_t index0 = 0; index0 < counterBufferCount; ++index0) {
+            skip |= ValidateObject(pCounterBuffers[index0], kVulkanObjectTypeBuffer, true, kVUIDUndefined, "VUID-vkCmdBeginTransformFeedbackEXT-commonparent");
+        }
+    }
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateCmdEndTransformFeedbackEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstCounterBuffer,
+    uint32_t                                    counterBufferCount,
+    const VkBuffer*                             pCounterBuffers,
+    const VkDeviceSize*                         pCounterBufferOffsets) const {
+    bool skip = false;
+    skip |= ValidateObject(commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdEndTransformFeedbackEXT-commandBuffer-parameter", "VUID-vkCmdEndTransformFeedbackEXT-commonparent");
+    if (pCounterBuffers) {
+        for (uint32_t index0 = 0; index0 < counterBufferCount; ++index0) {
+            skip |= ValidateObject(pCounterBuffers[index0], kVulkanObjectTypeBuffer, true, kVUIDUndefined, "VUID-vkCmdEndTransformFeedbackEXT-commonparent");
+        }
+    }
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateCmdBeginQueryIndexedEXT(
+    VkCommandBuffer                             commandBuffer,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    query,
+    VkQueryControlFlags                         flags,
+    uint32_t                                    index) const {
+    bool skip = false;
+    skip |= ValidateObject(commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdBeginQueryIndexedEXT-commandBuffer-parameter", "VUID-vkCmdBeginQueryIndexedEXT-commonparent");
+    skip |= ValidateObject(queryPool, kVulkanObjectTypeQueryPool, false, "VUID-vkCmdBeginQueryIndexedEXT-queryPool-parameter", "VUID-vkCmdBeginQueryIndexedEXT-commonparent");
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateCmdEndQueryIndexedEXT(
+    VkCommandBuffer                             commandBuffer,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    query,
+    uint32_t                                    index) const {
+    bool skip = false;
+    skip |= ValidateObject(commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdEndQueryIndexedEXT-commandBuffer-parameter", "VUID-vkCmdEndQueryIndexedEXT-commonparent");
+    skip |= ValidateObject(queryPool, kVulkanObjectTypeQueryPool, false, "VUID-vkCmdEndQueryIndexedEXT-queryPool-parameter", "VUID-vkCmdEndQueryIndexedEXT-commonparent");
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateCmdDrawIndirectByteCountEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    instanceCount,
+    uint32_t                                    firstInstance,
+    VkBuffer                                    counterBuffer,
+    VkDeviceSize                                counterBufferOffset,
+    uint32_t                                    counterOffset,
+    uint32_t                                    vertexStride) const {
+    bool skip = false;
+    skip |= ValidateObject(commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdDrawIndirectByteCountEXT-commandBuffer-parameter", "VUID-vkCmdDrawIndirectByteCountEXT-commonparent");
+    skip |= ValidateObject(counterBuffer, kVulkanObjectTypeBuffer, false, "VUID-vkCmdDrawIndirectByteCountEXT-counterBuffer-parameter", "VUID-vkCmdDrawIndirectByteCountEXT-commonparent");
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateGetImageViewHandleNVX(
+    VkDevice                                    device,
+    const VkImageViewHandleInfoNVX*             pInfo) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkGetImageViewHandleNVX-device-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateCmdDrawIndirectCountAMD(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkBuffer                                    countBuffer,
+    VkDeviceSize                                countBufferOffset,
+    uint32_t                                    maxDrawCount,
+    uint32_t                                    stride) const {
+    bool skip = false;
+    skip |= ValidateObject(commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdDrawIndirectCountKHR-commandBuffer-parameter", "VUID-vkCmdDrawIndirectCountKHR-commonparent");
+    skip |= ValidateObject(buffer, kVulkanObjectTypeBuffer, false, "VUID-vkCmdDrawIndirectCountKHR-buffer-parameter", "VUID-vkCmdDrawIndirectCountKHR-commonparent");
+    skip |= ValidateObject(countBuffer, kVulkanObjectTypeBuffer, false, "VUID-vkCmdDrawIndirectCountKHR-countBuffer-parameter", "VUID-vkCmdDrawIndirectCountKHR-commonparent");
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateCmdDrawIndexedIndirectCountAMD(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkBuffer                                    countBuffer,
+    VkDeviceSize                                countBufferOffset,
+    uint32_t                                    maxDrawCount,
+    uint32_t                                    stride) const {
+    bool skip = false;
+    skip |= ValidateObject(commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdDrawIndexedIndirectCountKHR-commandBuffer-parameter", "VUID-vkCmdDrawIndexedIndirectCountKHR-commonparent");
+    skip |= ValidateObject(buffer, kVulkanObjectTypeBuffer, false, "VUID-vkCmdDrawIndexedIndirectCountKHR-buffer-parameter", "VUID-vkCmdDrawIndexedIndirectCountKHR-commonparent");
+    skip |= ValidateObject(countBuffer, kVulkanObjectTypeBuffer, false, "VUID-vkCmdDrawIndexedIndirectCountKHR-countBuffer-parameter", "VUID-vkCmdDrawIndexedIndirectCountKHR-commonparent");
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateGetShaderInfoAMD(
+    VkDevice                                    device,
+    VkPipeline                                  pipeline,
+    VkShaderStageFlagBits                       shaderStage,
+    VkShaderInfoTypeAMD                         infoType,
+    size_t*                                     pInfoSize,
+    void*                                       pInfo) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkGetShaderInfoAMD-device-parameter", kVUIDUndefined);
+    skip |= ValidateObject(pipeline, kVulkanObjectTypePipeline, false, "VUID-vkGetShaderInfoAMD-pipeline-parameter", "VUID-vkGetShaderInfoAMD-pipeline-parent");
+
+    return skip;
+}
+
+#ifdef VK_USE_PLATFORM_GGP
+
+bool ObjectLifetimes::PreCallValidateCreateStreamDescriptorSurfaceGGP(
+    VkInstance                                  instance,
+    const VkStreamDescriptorSurfaceCreateInfoGGP* pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface) const {
+    bool skip = false;
+    skip |= ValidateObject(instance, kVulkanObjectTypeInstance, false, "VUID-vkCreateStreamDescriptorSurfaceGGP-instance-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+void ObjectLifetimes::PostCallRecordCreateStreamDescriptorSurfaceGGP(
+    VkInstance                                  instance,
+    const VkStreamDescriptorSurfaceCreateInfoGGP* pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface,
+    VkResult                                    result) {
+    if (result != VK_SUCCESS) return;
+    CreateObject(*pSurface, kVulkanObjectTypeSurfaceKHR, pAllocator);
+
+}
+#endif // VK_USE_PLATFORM_GGP
+
+bool ObjectLifetimes::PreCallValidateGetPhysicalDeviceExternalImageFormatPropertiesNV(
+    VkPhysicalDevice                            physicalDevice,
+    VkFormat                                    format,
+    VkImageType                                 type,
+    VkImageTiling                               tiling,
+    VkImageUsageFlags                           usage,
+    VkImageCreateFlags                          flags,
+    VkExternalMemoryHandleTypeFlagsNV           externalHandleType,
+    VkExternalImageFormatPropertiesNV*          pExternalImageFormatProperties) const {
+    bool skip = false;
+    skip |= ValidateObject(physicalDevice, kVulkanObjectTypePhysicalDevice, false, "VUID-vkGetPhysicalDeviceExternalImageFormatPropertiesNV-physicalDevice-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+bool ObjectLifetimes::PreCallValidateGetMemoryWin32HandleNV(
+    VkDevice                                    device,
+    VkDeviceMemory                              memory,
+    VkExternalMemoryHandleTypeFlagsNV           handleType,
+    HANDLE*                                     pHandle) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkGetMemoryWin32HandleNV-device-parameter", kVUIDUndefined);
+    skip |= ValidateObject(memory, kVulkanObjectTypeDeviceMemory, false, "VUID-vkGetMemoryWin32HandleNV-memory-parameter", "VUID-vkGetMemoryWin32HandleNV-memory-parent");
+
+    return skip;
+}
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+#ifdef VK_USE_PLATFORM_VI_NN
+
+bool ObjectLifetimes::PreCallValidateCreateViSurfaceNN(
+    VkInstance                                  instance,
+    const VkViSurfaceCreateInfoNN*              pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface) const {
+    bool skip = false;
+    skip |= ValidateObject(instance, kVulkanObjectTypeInstance, false, "VUID-vkCreateViSurfaceNN-instance-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+void ObjectLifetimes::PostCallRecordCreateViSurfaceNN(
+    VkInstance                                  instance,
+    const VkViSurfaceCreateInfoNN*              pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface,
+    VkResult                                    result) {
+    if (result != VK_SUCCESS) return;
+    CreateObject(*pSurface, kVulkanObjectTypeSurfaceKHR, pAllocator);
+
+}
+#endif // VK_USE_PLATFORM_VI_NN
+
+bool ObjectLifetimes::PreCallValidateCmdBeginConditionalRenderingEXT(
+    VkCommandBuffer                             commandBuffer,
+    const VkConditionalRenderingBeginInfoEXT*   pConditionalRenderingBegin) const {
+    bool skip = false;
+    skip |= ValidateObject(commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdBeginConditionalRenderingEXT-commandBuffer-parameter", kVUIDUndefined);
+    if (pConditionalRenderingBegin) {
+        skip |= ValidateObject(pConditionalRenderingBegin->buffer, kVulkanObjectTypeBuffer, false, "VUID-VkConditionalRenderingBeginInfoEXT-buffer-parameter", kVUIDUndefined);
+    }
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateCmdEndConditionalRenderingEXT(
+    VkCommandBuffer                             commandBuffer) const {
+    bool skip = false;
+    skip |= ValidateObject(commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdEndConditionalRenderingEXT-commandBuffer-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateCmdProcessCommandsNVX(
+    VkCommandBuffer                             commandBuffer,
+    const VkCmdProcessCommandsInfoNVX*          pProcessCommandsInfo) const {
+    bool skip = false;
+    skip |= ValidateObject(commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdProcessCommandsNVX-commandBuffer-parameter", kVUIDUndefined);
+    if (pProcessCommandsInfo) {
+        skip |= ValidateObject(pProcessCommandsInfo->objectTable, kVulkanObjectTypeObjectTableNVX, false, "VUID-VkCmdProcessCommandsInfoNVX-objectTable-parameter", "VUID-VkCmdProcessCommandsInfoNVX-commonparent");
+        skip |= ValidateObject(pProcessCommandsInfo->indirectCommandsLayout, kVulkanObjectTypeIndirectCommandsLayoutNVX, false, "VUID-VkCmdProcessCommandsInfoNVX-indirectCommandsLayout-parameter", "VUID-VkCmdProcessCommandsInfoNVX-commonparent");
+        if (pProcessCommandsInfo->pIndirectCommandsTokens) {
+            for (uint32_t index1 = 0; index1 < pProcessCommandsInfo->indirectCommandsTokenCount; ++index1) {
+                skip |= ValidateObject(pProcessCommandsInfo->pIndirectCommandsTokens[index1].buffer, kVulkanObjectTypeBuffer, false, "VUID-VkIndirectCommandsTokenNVX-buffer-parameter", kVUIDUndefined);
+            }
+        }
+        skip |= ValidateObject(pProcessCommandsInfo->targetCommandBuffer, kVulkanObjectTypeCommandBuffer, true, "VUID-VkCmdProcessCommandsInfoNVX-targetCommandBuffer-parameter", "VUID-VkCmdProcessCommandsInfoNVX-commonparent");
+        skip |= ValidateObject(pProcessCommandsInfo->sequencesCountBuffer, kVulkanObjectTypeBuffer, true, "VUID-VkCmdProcessCommandsInfoNVX-sequencesCountBuffer-parameter", "VUID-VkCmdProcessCommandsInfoNVX-commonparent");
+        skip |= ValidateObject(pProcessCommandsInfo->sequencesIndexBuffer, kVulkanObjectTypeBuffer, true, "VUID-VkCmdProcessCommandsInfoNVX-sequencesIndexBuffer-parameter", "VUID-VkCmdProcessCommandsInfoNVX-commonparent");
+    }
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateCmdReserveSpaceForCommandsNVX(
+    VkCommandBuffer                             commandBuffer,
+    const VkCmdReserveSpaceForCommandsInfoNVX*  pReserveSpaceInfo) const {
+    bool skip = false;
+    skip |= ValidateObject(commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdReserveSpaceForCommandsNVX-commandBuffer-parameter", kVUIDUndefined);
+    if (pReserveSpaceInfo) {
+        skip |= ValidateObject(pReserveSpaceInfo->objectTable, kVulkanObjectTypeObjectTableNVX, false, "VUID-VkCmdReserveSpaceForCommandsInfoNVX-objectTable-parameter", "VUID-VkCmdReserveSpaceForCommandsInfoNVX-commonparent");
+        skip |= ValidateObject(pReserveSpaceInfo->indirectCommandsLayout, kVulkanObjectTypeIndirectCommandsLayoutNVX, false, "VUID-VkCmdReserveSpaceForCommandsInfoNVX-indirectCommandsLayout-parameter", "VUID-VkCmdReserveSpaceForCommandsInfoNVX-commonparent");
+    }
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateCreateIndirectCommandsLayoutNVX(
+    VkDevice                                    device,
+    const VkIndirectCommandsLayoutCreateInfoNVX* pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkIndirectCommandsLayoutNVX*                pIndirectCommandsLayout) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkCreateIndirectCommandsLayoutNVX-device-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+void ObjectLifetimes::PostCallRecordCreateIndirectCommandsLayoutNVX(
+    VkDevice                                    device,
+    const VkIndirectCommandsLayoutCreateInfoNVX* pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkIndirectCommandsLayoutNVX*                pIndirectCommandsLayout,
+    VkResult                                    result) {
+    if (result != VK_SUCCESS) return;
+    CreateObject(*pIndirectCommandsLayout, kVulkanObjectTypeIndirectCommandsLayoutNVX, pAllocator);
+
+}
+
+bool ObjectLifetimes::PreCallValidateDestroyIndirectCommandsLayoutNVX(
+    VkDevice                                    device,
+    VkIndirectCommandsLayoutNVX                 indirectCommandsLayout,
+    const VkAllocationCallbacks*                pAllocator) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkDestroyIndirectCommandsLayoutNVX-device-parameter", kVUIDUndefined);
+    skip |= ValidateObject(indirectCommandsLayout, kVulkanObjectTypeIndirectCommandsLayoutNVX, false, "VUID-vkDestroyIndirectCommandsLayoutNVX-indirectCommandsLayout-parameter", "VUID-vkDestroyIndirectCommandsLayoutNVX-indirectCommandsLayout-parent");
+    skip |= ValidateDestroyObject(indirectCommandsLayout, kVulkanObjectTypeIndirectCommandsLayoutNVX, pAllocator, kVUIDUndefined, kVUIDUndefined);
+
+    return skip;
+}
+
+void ObjectLifetimes::PreCallRecordDestroyIndirectCommandsLayoutNVX(
+    VkDevice                                    device,
+    VkIndirectCommandsLayoutNVX                 indirectCommandsLayout,
+    const VkAllocationCallbacks*                pAllocator) {
+    RecordDestroyObject(indirectCommandsLayout, kVulkanObjectTypeIndirectCommandsLayoutNVX);
+
+}
+
+bool ObjectLifetimes::PreCallValidateCreateObjectTableNVX(
+    VkDevice                                    device,
+    const VkObjectTableCreateInfoNVX*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkObjectTableNVX*                           pObjectTable) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkCreateObjectTableNVX-device-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+void ObjectLifetimes::PostCallRecordCreateObjectTableNVX(
+    VkDevice                                    device,
+    const VkObjectTableCreateInfoNVX*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkObjectTableNVX*                           pObjectTable,
+    VkResult                                    result) {
+    if (result != VK_SUCCESS) return;
+    CreateObject(*pObjectTable, kVulkanObjectTypeObjectTableNVX, pAllocator);
+
+}
+
+bool ObjectLifetimes::PreCallValidateDestroyObjectTableNVX(
+    VkDevice                                    device,
+    VkObjectTableNVX                            objectTable,
+    const VkAllocationCallbacks*                pAllocator) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkDestroyObjectTableNVX-device-parameter", kVUIDUndefined);
+    skip |= ValidateObject(objectTable, kVulkanObjectTypeObjectTableNVX, false, "VUID-vkDestroyObjectTableNVX-objectTable-parameter", "VUID-vkDestroyObjectTableNVX-objectTable-parent");
+    skip |= ValidateDestroyObject(objectTable, kVulkanObjectTypeObjectTableNVX, pAllocator, kVUIDUndefined, kVUIDUndefined);
+
+    return skip;
+}
+
+void ObjectLifetimes::PreCallRecordDestroyObjectTableNVX(
+    VkDevice                                    device,
+    VkObjectTableNVX                            objectTable,
+    const VkAllocationCallbacks*                pAllocator) {
+    RecordDestroyObject(objectTable, kVulkanObjectTypeObjectTableNVX);
+
+}
+
+bool ObjectLifetimes::PreCallValidateRegisterObjectsNVX(
+    VkDevice                                    device,
+    VkObjectTableNVX                            objectTable,
+    uint32_t                                    objectCount,
+    const VkObjectTableEntryNVX* const*         ppObjectTableEntries,
+    const uint32_t*                             pObjectIndices) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkRegisterObjectsNVX-device-parameter", kVUIDUndefined);
+    skip |= ValidateObject(objectTable, kVulkanObjectTypeObjectTableNVX, false, "VUID-vkRegisterObjectsNVX-objectTable-parameter", "VUID-vkRegisterObjectsNVX-objectTable-parent");
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateUnregisterObjectsNVX(
+    VkDevice                                    device,
+    VkObjectTableNVX                            objectTable,
+    uint32_t                                    objectCount,
+    const VkObjectEntryTypeNVX*                 pObjectEntryTypes,
+    const uint32_t*                             pObjectIndices) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkUnregisterObjectsNVX-device-parameter", kVUIDUndefined);
+    skip |= ValidateObject(objectTable, kVulkanObjectTypeObjectTableNVX, false, "VUID-vkUnregisterObjectsNVX-objectTable-parameter", "VUID-vkUnregisterObjectsNVX-objectTable-parent");
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateGetPhysicalDeviceGeneratedCommandsPropertiesNVX(
+    VkPhysicalDevice                            physicalDevice,
+    VkDeviceGeneratedCommandsFeaturesNVX*       pFeatures,
+    VkDeviceGeneratedCommandsLimitsNVX*         pLimits) const {
+    bool skip = false;
+    skip |= ValidateObject(physicalDevice, kVulkanObjectTypePhysicalDevice, false, "VUID-vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX-physicalDevice-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateCmdSetViewportWScalingNV(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstViewport,
+    uint32_t                                    viewportCount,
+    const VkViewportWScalingNV*                 pViewportWScalings) const {
+    bool skip = false;
+    skip |= ValidateObject(commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdSetViewportWScalingNV-commandBuffer-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateReleaseDisplayEXT(
+    VkPhysicalDevice                            physicalDevice,
+    VkDisplayKHR                                display) const {
+    bool skip = false;
+    skip |= ValidateObject(physicalDevice, kVulkanObjectTypePhysicalDevice, false, "VUID-vkReleaseDisplayEXT-physicalDevice-parameter", kVUIDUndefined);
+    skip |= ValidateObject(display, kVulkanObjectTypeDisplayKHR, false, "VUID-vkReleaseDisplayEXT-display-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
+
+bool ObjectLifetimes::PreCallValidateAcquireXlibDisplayEXT(
+    VkPhysicalDevice                            physicalDevice,
+    Display*                                    dpy,
+    VkDisplayKHR                                display) const {
+    bool skip = false;
+    skip |= ValidateObject(physicalDevice, kVulkanObjectTypePhysicalDevice, false, "VUID-vkAcquireXlibDisplayEXT-physicalDevice-parameter", kVUIDUndefined);
+    skip |= ValidateObject(display, kVulkanObjectTypeDisplayKHR, false, "VUID-vkAcquireXlibDisplayEXT-display-parameter", kVUIDUndefined);
+
+    return skip;
+}
+#endif // VK_USE_PLATFORM_XLIB_XRANDR_EXT
+
+#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
+
+bool ObjectLifetimes::PreCallValidateGetRandROutputDisplayEXT(
+    VkPhysicalDevice                            physicalDevice,
+    Display*                                    dpy,
+    RROutput                                    rrOutput,
+    VkDisplayKHR*                               pDisplay) const {
+    bool skip = false;
+    skip |= ValidateObject(physicalDevice, kVulkanObjectTypePhysicalDevice, false, "VUID-vkGetRandROutputDisplayEXT-physicalDevice-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+void ObjectLifetimes::PostCallRecordGetRandROutputDisplayEXT(
+    VkPhysicalDevice                            physicalDevice,
+    Display*                                    dpy,
+    RROutput                                    rrOutput,
+    VkDisplayKHR*                               pDisplay,
+    VkResult                                    result) {
+    if (result != VK_SUCCESS) return;
+    CreateObject(*pDisplay, kVulkanObjectTypeDisplayKHR, nullptr);
+
+}
+#endif // VK_USE_PLATFORM_XLIB_XRANDR_EXT
+
+bool ObjectLifetimes::PreCallValidateGetPhysicalDeviceSurfaceCapabilities2EXT(
+    VkPhysicalDevice                            physicalDevice,
+    VkSurfaceKHR                                surface,
+    VkSurfaceCapabilities2EXT*                  pSurfaceCapabilities) const {
+    bool skip = false;
+    skip |= ValidateObject(physicalDevice, kVulkanObjectTypePhysicalDevice, false, "VUID-vkGetPhysicalDeviceSurfaceCapabilities2EXT-physicalDevice-parameter", "VUID-vkGetPhysicalDeviceSurfaceCapabilities2EXT-commonparent");
+    skip |= ValidateObject(surface, kVulkanObjectTypeSurfaceKHR, false, "VUID-vkGetPhysicalDeviceSurfaceCapabilities2EXT-surface-parameter", "VUID-vkGetPhysicalDeviceSurfaceCapabilities2EXT-commonparent");
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateDisplayPowerControlEXT(
+    VkDevice                                    device,
+    VkDisplayKHR                                display,
+    const VkDisplayPowerInfoEXT*                pDisplayPowerInfo) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkDisplayPowerControlEXT-device-parameter", kVUIDUndefined);
+    skip |= ValidateObject(display, kVulkanObjectTypeDisplayKHR, false, "VUID-vkDisplayPowerControlEXT-display-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateRegisterDeviceEventEXT(
+    VkDevice                                    device,
+    const VkDeviceEventInfoEXT*                 pDeviceEventInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkFence*                                    pFence) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkRegisterDeviceEventEXT-device-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+void ObjectLifetimes::PostCallRecordRegisterDeviceEventEXT(
+    VkDevice                                    device,
+    const VkDeviceEventInfoEXT*                 pDeviceEventInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkFence*                                    pFence,
+    VkResult                                    result) {
+    if (result != VK_SUCCESS) return;
+    CreateObject(*pFence, kVulkanObjectTypeFence, pAllocator);
+
+}
+
+bool ObjectLifetimes::PreCallValidateRegisterDisplayEventEXT(
+    VkDevice                                    device,
+    VkDisplayKHR                                display,
+    const VkDisplayEventInfoEXT*                pDisplayEventInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkFence*                                    pFence) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkRegisterDisplayEventEXT-device-parameter", kVUIDUndefined);
+    skip |= ValidateObject(display, kVulkanObjectTypeDisplayKHR, false, "VUID-vkRegisterDisplayEventEXT-display-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+void ObjectLifetimes::PostCallRecordRegisterDisplayEventEXT(
+    VkDevice                                    device,
+    VkDisplayKHR                                display,
+    const VkDisplayEventInfoEXT*                pDisplayEventInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkFence*                                    pFence,
+    VkResult                                    result) {
+    if (result != VK_SUCCESS) return;
+    CreateObject(*pFence, kVulkanObjectTypeFence, pAllocator);
+
+}
+
+bool ObjectLifetimes::PreCallValidateGetSwapchainCounterEXT(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    VkSurfaceCounterFlagBitsEXT                 counter,
+    uint64_t*                                   pCounterValue) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkGetSwapchainCounterEXT-device-parameter", "VUID-vkGetSwapchainCounterEXT-commonparent");
+    skip |= ValidateObject(swapchain, kVulkanObjectTypeSwapchainKHR, false, "VUID-vkGetSwapchainCounterEXT-swapchain-parameter", "VUID-vkGetSwapchainCounterEXT-commonparent");
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateGetRefreshCycleDurationGOOGLE(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    VkRefreshCycleDurationGOOGLE*               pDisplayTimingProperties) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkGetRefreshCycleDurationGOOGLE-device-parameter", "VUID-vkGetRefreshCycleDurationGOOGLE-commonparent");
+    skip |= ValidateObject(swapchain, kVulkanObjectTypeSwapchainKHR, false, "VUID-vkGetRefreshCycleDurationGOOGLE-swapchain-parameter", "VUID-vkGetRefreshCycleDurationGOOGLE-commonparent");
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateGetPastPresentationTimingGOOGLE(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    uint32_t*                                   pPresentationTimingCount,
+    VkPastPresentationTimingGOOGLE*             pPresentationTimings) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkGetPastPresentationTimingGOOGLE-device-parameter", "VUID-vkGetPastPresentationTimingGOOGLE-commonparent");
+    skip |= ValidateObject(swapchain, kVulkanObjectTypeSwapchainKHR, false, "VUID-vkGetPastPresentationTimingGOOGLE-swapchain-parameter", "VUID-vkGetPastPresentationTimingGOOGLE-commonparent");
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateCmdSetDiscardRectangleEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstDiscardRectangle,
+    uint32_t                                    discardRectangleCount,
+    const VkRect2D*                             pDiscardRectangles) const {
+    bool skip = false;
+    skip |= ValidateObject(commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdSetDiscardRectangleEXT-commandBuffer-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateSetHdrMetadataEXT(
+    VkDevice                                    device,
+    uint32_t                                    swapchainCount,
+    const VkSwapchainKHR*                       pSwapchains,
+    const VkHdrMetadataEXT*                     pMetadata) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkSetHdrMetadataEXT-device-parameter", "VUID-vkSetHdrMetadataEXT-commonparent");
+    if (pSwapchains) {
+        for (uint32_t index0 = 0; index0 < swapchainCount; ++index0) {
+            skip |= ValidateObject(pSwapchains[index0], kVulkanObjectTypeSwapchainKHR, false, "VUID-vkSetHdrMetadataEXT-pSwapchains-parameter", "VUID-vkSetHdrMetadataEXT-commonparent");
+        }
+    }
+
+    return skip;
+}
+
+#ifdef VK_USE_PLATFORM_IOS_MVK
+
+bool ObjectLifetimes::PreCallValidateCreateIOSSurfaceMVK(
+    VkInstance                                  instance,
+    const VkIOSSurfaceCreateInfoMVK*            pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface) const {
+    bool skip = false;
+    skip |= ValidateObject(instance, kVulkanObjectTypeInstance, false, "VUID-vkCreateIOSSurfaceMVK-instance-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+void ObjectLifetimes::PostCallRecordCreateIOSSurfaceMVK(
+    VkInstance                                  instance,
+    const VkIOSSurfaceCreateInfoMVK*            pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface,
+    VkResult                                    result) {
+    if (result != VK_SUCCESS) return;
+    CreateObject(*pSurface, kVulkanObjectTypeSurfaceKHR, pAllocator);
+
+}
+#endif // VK_USE_PLATFORM_IOS_MVK
+
+#ifdef VK_USE_PLATFORM_MACOS_MVK
+
+bool ObjectLifetimes::PreCallValidateCreateMacOSSurfaceMVK(
+    VkInstance                                  instance,
+    const VkMacOSSurfaceCreateInfoMVK*          pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface) const {
+    bool skip = false;
+    skip |= ValidateObject(instance, kVulkanObjectTypeInstance, false, "VUID-vkCreateMacOSSurfaceMVK-instance-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+void ObjectLifetimes::PostCallRecordCreateMacOSSurfaceMVK(
+    VkInstance                                  instance,
+    const VkMacOSSurfaceCreateInfoMVK*          pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface,
+    VkResult                                    result) {
+    if (result != VK_SUCCESS) return;
+    CreateObject(*pSurface, kVulkanObjectTypeSurfaceKHR, pAllocator);
+
+}
+#endif // VK_USE_PLATFORM_MACOS_MVK
+
+bool ObjectLifetimes::PreCallValidateQueueBeginDebugUtilsLabelEXT(
+    VkQueue                                     queue,
+    const VkDebugUtilsLabelEXT*                 pLabelInfo) const {
+    bool skip = false;
+    skip |= ValidateObject(queue, kVulkanObjectTypeQueue, false, "VUID-vkQueueBeginDebugUtilsLabelEXT-queue-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateQueueEndDebugUtilsLabelEXT(
+    VkQueue                                     queue) const {
+    bool skip = false;
+    skip |= ValidateObject(queue, kVulkanObjectTypeQueue, false, "VUID-vkQueueEndDebugUtilsLabelEXT-queue-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateQueueInsertDebugUtilsLabelEXT(
+    VkQueue                                     queue,
+    const VkDebugUtilsLabelEXT*                 pLabelInfo) const {
+    bool skip = false;
+    skip |= ValidateObject(queue, kVulkanObjectTypeQueue, false, "VUID-vkQueueInsertDebugUtilsLabelEXT-queue-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateCmdBeginDebugUtilsLabelEXT(
+    VkCommandBuffer                             commandBuffer,
+    const VkDebugUtilsLabelEXT*                 pLabelInfo) const {
+    bool skip = false;
+    skip |= ValidateObject(commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdBeginDebugUtilsLabelEXT-commandBuffer-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateCmdEndDebugUtilsLabelEXT(
+    VkCommandBuffer                             commandBuffer) const {
+    bool skip = false;
+    skip |= ValidateObject(commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdEndDebugUtilsLabelEXT-commandBuffer-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateCmdInsertDebugUtilsLabelEXT(
+    VkCommandBuffer                             commandBuffer,
+    const VkDebugUtilsLabelEXT*                 pLabelInfo) const {
+    bool skip = false;
+    skip |= ValidateObject(commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdInsertDebugUtilsLabelEXT-commandBuffer-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateCreateDebugUtilsMessengerEXT(
+    VkInstance                                  instance,
+    const VkDebugUtilsMessengerCreateInfoEXT*   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDebugUtilsMessengerEXT*                   pMessenger) const {
+    bool skip = false;
+    skip |= ValidateObject(instance, kVulkanObjectTypeInstance, false, "VUID-vkCreateDebugUtilsMessengerEXT-instance-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+void ObjectLifetimes::PostCallRecordCreateDebugUtilsMessengerEXT(
+    VkInstance                                  instance,
+    const VkDebugUtilsMessengerCreateInfoEXT*   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDebugUtilsMessengerEXT*                   pMessenger,
+    VkResult                                    result) {
+    if (result != VK_SUCCESS) return;
+    CreateObject(*pMessenger, kVulkanObjectTypeDebugUtilsMessengerEXT, pAllocator);
+
+}
+
+bool ObjectLifetimes::PreCallValidateDestroyDebugUtilsMessengerEXT(
+    VkInstance                                  instance,
+    VkDebugUtilsMessengerEXT                    messenger,
+    const VkAllocationCallbacks*                pAllocator) const {
+    bool skip = false;
+    skip |= ValidateObject(instance, kVulkanObjectTypeInstance, false, "VUID-vkDestroyDebugUtilsMessengerEXT-instance-parameter", kVUIDUndefined);
+    skip |= ValidateObject(messenger, kVulkanObjectTypeDebugUtilsMessengerEXT, false, "VUID-vkDestroyDebugUtilsMessengerEXT-messenger-parameter", "VUID-vkDestroyDebugUtilsMessengerEXT-messenger-parent");
+    skip |= ValidateDestroyObject(messenger, kVulkanObjectTypeDebugUtilsMessengerEXT, pAllocator, kVUIDUndefined, kVUIDUndefined);
+
+    return skip;
+}
+
+void ObjectLifetimes::PreCallRecordDestroyDebugUtilsMessengerEXT(
+    VkInstance                                  instance,
+    VkDebugUtilsMessengerEXT                    messenger,
+    const VkAllocationCallbacks*                pAllocator) {
+    RecordDestroyObject(messenger, kVulkanObjectTypeDebugUtilsMessengerEXT);
+
+}
+
+bool ObjectLifetimes::PreCallValidateSubmitDebugUtilsMessageEXT(
+    VkInstance                                  instance,
+    VkDebugUtilsMessageSeverityFlagBitsEXT      messageSeverity,
+    VkDebugUtilsMessageTypeFlagsEXT             messageTypes,
+    const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData) const {
+    bool skip = false;
+    skip |= ValidateObject(instance, kVulkanObjectTypeInstance, false, "VUID-vkSubmitDebugUtilsMessageEXT-instance-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+
+bool ObjectLifetimes::PreCallValidateGetAndroidHardwareBufferPropertiesANDROID(
+    VkDevice                                    device,
+    const struct AHardwareBuffer*               buffer,
+    VkAndroidHardwareBufferPropertiesANDROID*   pProperties) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkGetAndroidHardwareBufferPropertiesANDROID-device-parameter", kVUIDUndefined);
+
+    return skip;
+}
+#endif // VK_USE_PLATFORM_ANDROID_KHR
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+
+bool ObjectLifetimes::PreCallValidateGetMemoryAndroidHardwareBufferANDROID(
+    VkDevice                                    device,
+    const VkMemoryGetAndroidHardwareBufferInfoANDROID* pInfo,
+    struct AHardwareBuffer**                    pBuffer) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkGetMemoryAndroidHardwareBufferANDROID-device-parameter", kVUIDUndefined);
+    if (pInfo) {
+        skip |= ValidateObject(pInfo->memory, kVulkanObjectTypeDeviceMemory, false, "VUID-VkMemoryGetAndroidHardwareBufferInfoANDROID-memory-parameter", kVUIDUndefined);
+    }
+
+    return skip;
+}
+#endif // VK_USE_PLATFORM_ANDROID_KHR
+
+bool ObjectLifetimes::PreCallValidateCmdSetSampleLocationsEXT(
+    VkCommandBuffer                             commandBuffer,
+    const VkSampleLocationsInfoEXT*             pSampleLocationsInfo) const {
+    bool skip = false;
+    skip |= ValidateObject(commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdSetSampleLocationsEXT-commandBuffer-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateGetPhysicalDeviceMultisamplePropertiesEXT(
+    VkPhysicalDevice                            physicalDevice,
+    VkSampleCountFlagBits                       samples,
+    VkMultisamplePropertiesEXT*                 pMultisampleProperties) const {
+    bool skip = false;
+    skip |= ValidateObject(physicalDevice, kVulkanObjectTypePhysicalDevice, false, "VUID-vkGetPhysicalDeviceMultisamplePropertiesEXT-physicalDevice-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateGetImageDrmFormatModifierPropertiesEXT(
+    VkDevice                                    device,
+    VkImage                                     image,
+    VkImageDrmFormatModifierPropertiesEXT*      pProperties) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkGetImageDrmFormatModifierPropertiesEXT-device-parameter", kVUIDUndefined);
+    skip |= ValidateObject(image, kVulkanObjectTypeImage, false, "VUID-vkGetImageDrmFormatModifierPropertiesEXT-image-parameter", "VUID-vkGetImageDrmFormatModifierPropertiesEXT-image-parent");
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateCreateValidationCacheEXT(
+    VkDevice                                    device,
+    const VkValidationCacheCreateInfoEXT*       pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkValidationCacheEXT*                       pValidationCache) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkCreateValidationCacheEXT-device-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+void ObjectLifetimes::PostCallRecordCreateValidationCacheEXT(
+    VkDevice                                    device,
+    const VkValidationCacheCreateInfoEXT*       pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkValidationCacheEXT*                       pValidationCache,
+    VkResult                                    result) {
+    if (result != VK_SUCCESS) return;
+    CreateObject(*pValidationCache, kVulkanObjectTypeValidationCacheEXT, pAllocator);
+
+}
+
+bool ObjectLifetimes::PreCallValidateDestroyValidationCacheEXT(
+    VkDevice                                    device,
+    VkValidationCacheEXT                        validationCache,
+    const VkAllocationCallbacks*                pAllocator) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkDestroyValidationCacheEXT-device-parameter", kVUIDUndefined);
+    skip |= ValidateObject(validationCache, kVulkanObjectTypeValidationCacheEXT, true, "VUID-vkDestroyValidationCacheEXT-validationCache-parameter", "VUID-vkDestroyValidationCacheEXT-validationCache-parent");
+    skip |= ValidateDestroyObject(validationCache, kVulkanObjectTypeValidationCacheEXT, pAllocator, kVUIDUndefined, kVUIDUndefined);
+
+    return skip;
+}
+
+void ObjectLifetimes::PreCallRecordDestroyValidationCacheEXT(
+    VkDevice                                    device,
+    VkValidationCacheEXT                        validationCache,
+    const VkAllocationCallbacks*                pAllocator) {
+    RecordDestroyObject(validationCache, kVulkanObjectTypeValidationCacheEXT);
+
+}
+
+bool ObjectLifetimes::PreCallValidateMergeValidationCachesEXT(
+    VkDevice                                    device,
+    VkValidationCacheEXT                        dstCache,
+    uint32_t                                    srcCacheCount,
+    const VkValidationCacheEXT*                 pSrcCaches) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkMergeValidationCachesEXT-device-parameter", kVUIDUndefined);
+    skip |= ValidateObject(dstCache, kVulkanObjectTypeValidationCacheEXT, false, "VUID-vkMergeValidationCachesEXT-dstCache-parameter", "VUID-vkMergeValidationCachesEXT-dstCache-parent");
+    if (pSrcCaches) {
+        for (uint32_t index0 = 0; index0 < srcCacheCount; ++index0) {
+            skip |= ValidateObject(pSrcCaches[index0], kVulkanObjectTypeValidationCacheEXT, false, "VUID-vkMergeValidationCachesEXT-pSrcCaches-parameter", "VUID-vkMergeValidationCachesEXT-pSrcCaches-parent");
+        }
+    }
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateGetValidationCacheDataEXT(
+    VkDevice                                    device,
+    VkValidationCacheEXT                        validationCache,
+    size_t*                                     pDataSize,
+    void*                                       pData) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkGetValidationCacheDataEXT-device-parameter", kVUIDUndefined);
+    skip |= ValidateObject(validationCache, kVulkanObjectTypeValidationCacheEXT, false, "VUID-vkGetValidationCacheDataEXT-validationCache-parameter", "VUID-vkGetValidationCacheDataEXT-validationCache-parent");
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateCmdBindShadingRateImageNV(
+    VkCommandBuffer                             commandBuffer,
+    VkImageView                                 imageView,
+    VkImageLayout                               imageLayout) const {
+    bool skip = false;
+    skip |= ValidateObject(commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdBindShadingRateImageNV-commandBuffer-parameter", "VUID-vkCmdBindShadingRateImageNV-commonparent");
+    skip |= ValidateObject(imageView, kVulkanObjectTypeImageView, true, "VUID-vkCmdBindShadingRateImageNV-imageView-parameter", "VUID-vkCmdBindShadingRateImageNV-commonparent");
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateCmdSetViewportShadingRatePaletteNV(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstViewport,
+    uint32_t                                    viewportCount,
+    const VkShadingRatePaletteNV*               pShadingRatePalettes) const {
+    bool skip = false;
+    skip |= ValidateObject(commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdSetViewportShadingRatePaletteNV-commandBuffer-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateCmdSetCoarseSampleOrderNV(
+    VkCommandBuffer                             commandBuffer,
+    VkCoarseSampleOrderTypeNV                   sampleOrderType,
+    uint32_t                                    customSampleOrderCount,
+    const VkCoarseSampleOrderCustomNV*          pCustomSampleOrders) const {
+    bool skip = false;
+    skip |= ValidateObject(commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdSetCoarseSampleOrderNV-commandBuffer-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateCreateAccelerationStructureNV(
+    VkDevice                                    device,
+    const VkAccelerationStructureCreateInfoNV*  pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkAccelerationStructureNV*                  pAccelerationStructure) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkCreateAccelerationStructureNV-device-parameter", kVUIDUndefined);
+    if (pCreateInfo) {
+        if (pCreateInfo->info.pGeometries) {
+            for (uint32_t index2 = 0; index2 < pCreateInfo->info.geometryCount; ++index2) {
+                skip |= ValidateObject(pCreateInfo->info.pGeometries[index2].geometry.triangles.vertexData, kVulkanObjectTypeBuffer, true, "VUID-VkGeometryTrianglesNV-vertexData-parameter", "VUID-VkGeometryTrianglesNV-commonparent");
+                skip |= ValidateObject(pCreateInfo->info.pGeometries[index2].geometry.triangles.indexData, kVulkanObjectTypeBuffer, true, "VUID-VkGeometryTrianglesNV-indexData-parameter", "VUID-VkGeometryTrianglesNV-commonparent");
+                skip |= ValidateObject(pCreateInfo->info.pGeometries[index2].geometry.triangles.transformData, kVulkanObjectTypeBuffer, true, "VUID-VkGeometryTrianglesNV-transformData-parameter", "VUID-VkGeometryTrianglesNV-commonparent");
+                skip |= ValidateObject(pCreateInfo->info.pGeometries[index2].geometry.aabbs.aabbData, kVulkanObjectTypeBuffer, true, "VUID-VkGeometryAABBNV-aabbData-parameter", kVUIDUndefined);
+            }
+        }
+    }
+
+    return skip;
+}
+
+void ObjectLifetimes::PostCallRecordCreateAccelerationStructureNV(
+    VkDevice                                    device,
+    const VkAccelerationStructureCreateInfoNV*  pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkAccelerationStructureNV*                  pAccelerationStructure,
+    VkResult                                    result) {
+    if (result != VK_SUCCESS) return;
+    CreateObject(*pAccelerationStructure, kVulkanObjectTypeAccelerationStructureNV, pAllocator);
+
+}
+
+bool ObjectLifetimes::PreCallValidateDestroyAccelerationStructureNV(
+    VkDevice                                    device,
+    VkAccelerationStructureNV                   accelerationStructure,
+    const VkAllocationCallbacks*                pAllocator) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkDestroyAccelerationStructureNV-device-parameter", kVUIDUndefined);
+    skip |= ValidateObject(accelerationStructure, kVulkanObjectTypeAccelerationStructureNV, false, "VUID-vkDestroyAccelerationStructureNV-accelerationStructure-parameter", "VUID-vkDestroyAccelerationStructureNV-accelerationStructure-parent");
+    skip |= ValidateDestroyObject(accelerationStructure, kVulkanObjectTypeAccelerationStructureNV, pAllocator, kVUIDUndefined, kVUIDUndefined);
+
+    return skip;
+}
+
+void ObjectLifetimes::PreCallRecordDestroyAccelerationStructureNV(
+    VkDevice                                    device,
+    VkAccelerationStructureNV                   accelerationStructure,
+    const VkAllocationCallbacks*                pAllocator) {
+    RecordDestroyObject(accelerationStructure, kVulkanObjectTypeAccelerationStructureNV);
+
+}
+
+bool ObjectLifetimes::PreCallValidateGetAccelerationStructureMemoryRequirementsNV(
+    VkDevice                                    device,
+    const VkAccelerationStructureMemoryRequirementsInfoNV* pInfo,
+    VkMemoryRequirements2KHR*                   pMemoryRequirements) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkGetAccelerationStructureMemoryRequirementsNV-device-parameter", kVUIDUndefined);
+    if (pInfo) {
+        skip |= ValidateObject(pInfo->accelerationStructure, kVulkanObjectTypeAccelerationStructureNV, false, "VUID-VkAccelerationStructureMemoryRequirementsInfoNV-accelerationStructure-parameter", kVUIDUndefined);
+    }
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateBindAccelerationStructureMemoryNV(
+    VkDevice                                    device,
+    uint32_t                                    bindInfoCount,
+    const VkBindAccelerationStructureMemoryInfoNV* pBindInfos) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkBindAccelerationStructureMemoryNV-device-parameter", kVUIDUndefined);
+    if (pBindInfos) {
+        for (uint32_t index0 = 0; index0 < bindInfoCount; ++index0) {
+            skip |= ValidateObject(pBindInfos[index0].accelerationStructure, kVulkanObjectTypeAccelerationStructureNV, false, "VUID-VkBindAccelerationStructureMemoryInfoNV-accelerationStructure-parameter", "VUID-VkBindAccelerationStructureMemoryInfoNV-commonparent");
+            skip |= ValidateObject(pBindInfos[index0].memory, kVulkanObjectTypeDeviceMemory, false, "VUID-VkBindAccelerationStructureMemoryInfoNV-memory-parameter", "VUID-VkBindAccelerationStructureMemoryInfoNV-commonparent");
+        }
+    }
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateCmdBuildAccelerationStructureNV(
+    VkCommandBuffer                             commandBuffer,
+    const VkAccelerationStructureInfoNV*        pInfo,
+    VkBuffer                                    instanceData,
+    VkDeviceSize                                instanceOffset,
+    VkBool32                                    update,
+    VkAccelerationStructureNV                   dst,
+    VkAccelerationStructureNV                   src,
+    VkBuffer                                    scratch,
+    VkDeviceSize                                scratchOffset) const {
+    bool skip = false;
+    skip |= ValidateObject(commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdBuildAccelerationStructureNV-commandBuffer-parameter", "VUID-vkCmdBuildAccelerationStructureNV-commonparent");
+    if (pInfo) {
+        if (pInfo->pGeometries) {
+            for (uint32_t index1 = 0; index1 < pInfo->geometryCount; ++index1) {
+                skip |= ValidateObject(pInfo->pGeometries[index1].geometry.triangles.vertexData, kVulkanObjectTypeBuffer, true, "VUID-VkGeometryTrianglesNV-vertexData-parameter", "VUID-VkGeometryTrianglesNV-commonparent");
+                skip |= ValidateObject(pInfo->pGeometries[index1].geometry.triangles.indexData, kVulkanObjectTypeBuffer, true, "VUID-VkGeometryTrianglesNV-indexData-parameter", "VUID-VkGeometryTrianglesNV-commonparent");
+                skip |= ValidateObject(pInfo->pGeometries[index1].geometry.triangles.transformData, kVulkanObjectTypeBuffer, true, "VUID-VkGeometryTrianglesNV-transformData-parameter", "VUID-VkGeometryTrianglesNV-commonparent");
+                skip |= ValidateObject(pInfo->pGeometries[index1].geometry.aabbs.aabbData, kVulkanObjectTypeBuffer, true, "VUID-VkGeometryAABBNV-aabbData-parameter", kVUIDUndefined);
+            }
+        }
+    }
+    skip |= ValidateObject(instanceData, kVulkanObjectTypeBuffer, true, "VUID-vkCmdBuildAccelerationStructureNV-instanceData-parameter", "VUID-vkCmdBuildAccelerationStructureNV-commonparent");
+    skip |= ValidateObject(dst, kVulkanObjectTypeAccelerationStructureNV, false, "VUID-vkCmdBuildAccelerationStructureNV-dst-parameter", "VUID-vkCmdBuildAccelerationStructureNV-commonparent");
+    skip |= ValidateObject(src, kVulkanObjectTypeAccelerationStructureNV, true, "VUID-vkCmdBuildAccelerationStructureNV-src-parameter", "VUID-vkCmdBuildAccelerationStructureNV-commonparent");
+    skip |= ValidateObject(scratch, kVulkanObjectTypeBuffer, false, "VUID-vkCmdBuildAccelerationStructureNV-scratch-parameter", "VUID-vkCmdBuildAccelerationStructureNV-commonparent");
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateCmdCopyAccelerationStructureNV(
+    VkCommandBuffer                             commandBuffer,
+    VkAccelerationStructureNV                   dst,
+    VkAccelerationStructureNV                   src,
+    VkCopyAccelerationStructureModeNV           mode) const {
+    bool skip = false;
+    skip |= ValidateObject(commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdCopyAccelerationStructureNV-commandBuffer-parameter", "VUID-vkCmdCopyAccelerationStructureNV-commonparent");
+    skip |= ValidateObject(dst, kVulkanObjectTypeAccelerationStructureNV, false, "VUID-vkCmdCopyAccelerationStructureNV-dst-parameter", "VUID-vkCmdCopyAccelerationStructureNV-commonparent");
+    skip |= ValidateObject(src, kVulkanObjectTypeAccelerationStructureNV, false, "VUID-vkCmdCopyAccelerationStructureNV-src-parameter", "VUID-vkCmdCopyAccelerationStructureNV-commonparent");
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateCmdTraceRaysNV(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    raygenShaderBindingTableBuffer,
+    VkDeviceSize                                raygenShaderBindingOffset,
+    VkBuffer                                    missShaderBindingTableBuffer,
+    VkDeviceSize                                missShaderBindingOffset,
+    VkDeviceSize                                missShaderBindingStride,
+    VkBuffer                                    hitShaderBindingTableBuffer,
+    VkDeviceSize                                hitShaderBindingOffset,
+    VkDeviceSize                                hitShaderBindingStride,
+    VkBuffer                                    callableShaderBindingTableBuffer,
+    VkDeviceSize                                callableShaderBindingOffset,
+    VkDeviceSize                                callableShaderBindingStride,
+    uint32_t                                    width,
+    uint32_t                                    height,
+    uint32_t                                    depth) const {
+    bool skip = false;
+    skip |= ValidateObject(commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdTraceRaysNV-commandBuffer-parameter", "VUID-vkCmdTraceRaysNV-commonparent");
+    skip |= ValidateObject(raygenShaderBindingTableBuffer, kVulkanObjectTypeBuffer, false, "VUID-vkCmdTraceRaysNV-raygenShaderBindingTableBuffer-parameter", "VUID-vkCmdTraceRaysNV-commonparent");
+    skip |= ValidateObject(missShaderBindingTableBuffer, kVulkanObjectTypeBuffer, true, "VUID-vkCmdTraceRaysNV-missShaderBindingTableBuffer-parameter", "VUID-vkCmdTraceRaysNV-commonparent");
+    skip |= ValidateObject(hitShaderBindingTableBuffer, kVulkanObjectTypeBuffer, true, "VUID-vkCmdTraceRaysNV-hitShaderBindingTableBuffer-parameter", "VUID-vkCmdTraceRaysNV-commonparent");
+    skip |= ValidateObject(callableShaderBindingTableBuffer, kVulkanObjectTypeBuffer, true, "VUID-vkCmdTraceRaysNV-callableShaderBindingTableBuffer-parameter", "VUID-vkCmdTraceRaysNV-commonparent");
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateCreateRayTracingPipelinesNV(
+    VkDevice                                    device,
+    VkPipelineCache                             pipelineCache,
+    uint32_t                                    createInfoCount,
+    const VkRayTracingPipelineCreateInfoNV*     pCreateInfos,
+    const VkAllocationCallbacks*                pAllocator,
+    VkPipeline*                                 pPipelines) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkCreateRayTracingPipelinesNV-device-parameter", kVUIDUndefined);
+    skip |= ValidateObject(pipelineCache, kVulkanObjectTypePipelineCache, true, "VUID-vkCreateRayTracingPipelinesNV-pipelineCache-parameter", "VUID-vkCreateRayTracingPipelinesNV-pipelineCache-parent");
+    if (pCreateInfos) {
+        for (uint32_t index0 = 0; index0 < createInfoCount; ++index0) {
+            if (pCreateInfos[index0].pStages) {
+                for (uint32_t index1 = 0; index1 < pCreateInfos[index0].stageCount; ++index1) {
+                    skip |= ValidateObject(pCreateInfos[index0].pStages[index1].module, kVulkanObjectTypeShaderModule, false, "VUID-VkPipelineShaderStageCreateInfo-module-parameter", kVUIDUndefined);
+                }
+            }
+            skip |= ValidateObject(pCreateInfos[index0].layout, kVulkanObjectTypePipelineLayout, false, "VUID-VkRayTracingPipelineCreateInfoNV-layout-parameter", "VUID-VkRayTracingPipelineCreateInfoNV-commonparent");
+            if ((pCreateInfos[index0].flags & VK_PIPELINE_CREATE_DERIVATIVE_BIT) && (pCreateInfos[index0].basePipelineIndex == -1))
+                skip |= ValidateObject(pCreateInfos[index0].basePipelineHandle, kVulkanObjectTypePipeline, false, "VUID-VkRayTracingPipelineCreateInfoNV-flags-02404", "VUID-VkRayTracingPipelineCreateInfoNV-commonparent");
+        }
+    }
+
+    return skip;
+}
+
+void ObjectLifetimes::PostCallRecordCreateRayTracingPipelinesNV(
+    VkDevice                                    device,
+    VkPipelineCache                             pipelineCache,
+    uint32_t                                    createInfoCount,
+    const VkRayTracingPipelineCreateInfoNV*     pCreateInfos,
+    const VkAllocationCallbacks*                pAllocator,
+    VkPipeline*                                 pPipelines,
+    VkResult                                    result) {
+    if (VK_ERROR_VALIDATION_FAILED_EXT == result) return;
+    if (pPipelines) {
+        for (uint32_t index = 0; index < createInfoCount; index++) {
+            if (!pPipelines[index]) continue;
+            CreateObject(pPipelines[index], kVulkanObjectTypePipeline, pAllocator);
+        }
+    }
+
+}
+
+bool ObjectLifetimes::PreCallValidateGetRayTracingShaderGroupHandlesNV(
+    VkDevice                                    device,
+    VkPipeline                                  pipeline,
+    uint32_t                                    firstGroup,
+    uint32_t                                    groupCount,
+    size_t                                      dataSize,
+    void*                                       pData) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkGetRayTracingShaderGroupHandlesNV-device-parameter", kVUIDUndefined);
+    skip |= ValidateObject(pipeline, kVulkanObjectTypePipeline, false, "VUID-vkGetRayTracingShaderGroupHandlesNV-pipeline-parameter", "VUID-vkGetRayTracingShaderGroupHandlesNV-pipeline-parent");
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateGetAccelerationStructureHandleNV(
+    VkDevice                                    device,
+    VkAccelerationStructureNV                   accelerationStructure,
+    size_t                                      dataSize,
+    void*                                       pData) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkGetAccelerationStructureHandleNV-device-parameter", kVUIDUndefined);
+    skip |= ValidateObject(accelerationStructure, kVulkanObjectTypeAccelerationStructureNV, false, "VUID-vkGetAccelerationStructureHandleNV-accelerationStructure-parameter", "VUID-vkGetAccelerationStructureHandleNV-accelerationStructure-parent");
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateCmdWriteAccelerationStructuresPropertiesNV(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    accelerationStructureCount,
+    const VkAccelerationStructureNV*            pAccelerationStructures,
+    VkQueryType                                 queryType,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    firstQuery) const {
+    bool skip = false;
+    skip |= ValidateObject(commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdWriteAccelerationStructuresPropertiesNV-commandBuffer-parameter", "VUID-vkCmdWriteAccelerationStructuresPropertiesNV-commonparent");
+    if (pAccelerationStructures) {
+        for (uint32_t index0 = 0; index0 < accelerationStructureCount; ++index0) {
+            skip |= ValidateObject(pAccelerationStructures[index0], kVulkanObjectTypeAccelerationStructureNV, false, "VUID-vkCmdWriteAccelerationStructuresPropertiesNV-pAccelerationStructures-parameter", "VUID-vkCmdWriteAccelerationStructuresPropertiesNV-commonparent");
+        }
+    }
+    skip |= ValidateObject(queryPool, kVulkanObjectTypeQueryPool, false, "VUID-vkCmdWriteAccelerationStructuresPropertiesNV-queryPool-parameter", "VUID-vkCmdWriteAccelerationStructuresPropertiesNV-commonparent");
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateCompileDeferredNV(
+    VkDevice                                    device,
+    VkPipeline                                  pipeline,
+    uint32_t                                    shader) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkCompileDeferredNV-device-parameter", kVUIDUndefined);
+    skip |= ValidateObject(pipeline, kVulkanObjectTypePipeline, false, "VUID-vkCompileDeferredNV-pipeline-parameter", "VUID-vkCompileDeferredNV-pipeline-parent");
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateGetMemoryHostPointerPropertiesEXT(
+    VkDevice                                    device,
+    VkExternalMemoryHandleTypeFlagBits          handleType,
+    const void*                                 pHostPointer,
+    VkMemoryHostPointerPropertiesEXT*           pMemoryHostPointerProperties) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkGetMemoryHostPointerPropertiesEXT-device-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateCmdWriteBufferMarkerAMD(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineStageFlagBits                     pipelineStage,
+    VkBuffer                                    dstBuffer,
+    VkDeviceSize                                dstOffset,
+    uint32_t                                    marker) const {
+    bool skip = false;
+    skip |= ValidateObject(commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdWriteBufferMarkerAMD-commandBuffer-parameter", "VUID-vkCmdWriteBufferMarkerAMD-commonparent");
+    skip |= ValidateObject(dstBuffer, kVulkanObjectTypeBuffer, false, "VUID-vkCmdWriteBufferMarkerAMD-dstBuffer-parameter", "VUID-vkCmdWriteBufferMarkerAMD-commonparent");
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateGetPhysicalDeviceCalibrateableTimeDomainsEXT(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pTimeDomainCount,
+    VkTimeDomainEXT*                            pTimeDomains) const {
+    bool skip = false;
+    skip |= ValidateObject(physicalDevice, kVulkanObjectTypePhysicalDevice, false, "VUID-vkGetPhysicalDeviceCalibrateableTimeDomainsEXT-physicalDevice-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateGetCalibratedTimestampsEXT(
+    VkDevice                                    device,
+    uint32_t                                    timestampCount,
+    const VkCalibratedTimestampInfoEXT*         pTimestampInfos,
+    uint64_t*                                   pTimestamps,
+    uint64_t*                                   pMaxDeviation) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkGetCalibratedTimestampsEXT-device-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateCmdDrawMeshTasksNV(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    taskCount,
+    uint32_t                                    firstTask) const {
+    bool skip = false;
+    skip |= ValidateObject(commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdDrawMeshTasksNV-commandBuffer-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateCmdDrawMeshTasksIndirectNV(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    uint32_t                                    drawCount,
+    uint32_t                                    stride) const {
+    bool skip = false;
+    skip |= ValidateObject(commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdDrawMeshTasksIndirectNV-commandBuffer-parameter", "VUID-vkCmdDrawMeshTasksIndirectNV-commonparent");
+    skip |= ValidateObject(buffer, kVulkanObjectTypeBuffer, false, "VUID-vkCmdDrawMeshTasksIndirectNV-buffer-parameter", "VUID-vkCmdDrawMeshTasksIndirectNV-commonparent");
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateCmdDrawMeshTasksIndirectCountNV(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkBuffer                                    countBuffer,
+    VkDeviceSize                                countBufferOffset,
+    uint32_t                                    maxDrawCount,
+    uint32_t                                    stride) const {
+    bool skip = false;
+    skip |= ValidateObject(commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdDrawMeshTasksIndirectCountNV-commandBuffer-parameter", "VUID-vkCmdDrawMeshTasksIndirectCountNV-commonparent");
+    skip |= ValidateObject(buffer, kVulkanObjectTypeBuffer, false, "VUID-vkCmdDrawMeshTasksIndirectCountNV-buffer-parameter", "VUID-vkCmdDrawMeshTasksIndirectCountNV-commonparent");
+    skip |= ValidateObject(countBuffer, kVulkanObjectTypeBuffer, false, "VUID-vkCmdDrawMeshTasksIndirectCountNV-countBuffer-parameter", "VUID-vkCmdDrawMeshTasksIndirectCountNV-commonparent");
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateCmdSetExclusiveScissorNV(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstExclusiveScissor,
+    uint32_t                                    exclusiveScissorCount,
+    const VkRect2D*                             pExclusiveScissors) const {
+    bool skip = false;
+    skip |= ValidateObject(commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdSetExclusiveScissorNV-commandBuffer-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateCmdSetCheckpointNV(
+    VkCommandBuffer                             commandBuffer,
+    const void*                                 pCheckpointMarker) const {
+    bool skip = false;
+    skip |= ValidateObject(commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdSetCheckpointNV-commandBuffer-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateGetQueueCheckpointDataNV(
+    VkQueue                                     queue,
+    uint32_t*                                   pCheckpointDataCount,
+    VkCheckpointDataNV*                         pCheckpointData) const {
+    bool skip = false;
+    skip |= ValidateObject(queue, kVulkanObjectTypeQueue, false, "VUID-vkGetQueueCheckpointDataNV-queue-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateInitializePerformanceApiINTEL(
+    VkDevice                                    device,
+    const VkInitializePerformanceApiInfoINTEL*  pInitializeInfo) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkInitializePerformanceApiINTEL-device-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateUninitializePerformanceApiINTEL(
+    VkDevice                                    device) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkUninitializePerformanceApiINTEL-device-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateCmdSetPerformanceMarkerINTEL(
+    VkCommandBuffer                             commandBuffer,
+    const VkPerformanceMarkerInfoINTEL*         pMarkerInfo) const {
+    bool skip = false;
+    skip |= ValidateObject(commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdSetPerformanceMarkerINTEL-commandBuffer-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateCmdSetPerformanceStreamMarkerINTEL(
+    VkCommandBuffer                             commandBuffer,
+    const VkPerformanceStreamMarkerInfoINTEL*   pMarkerInfo) const {
+    bool skip = false;
+    skip |= ValidateObject(commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdSetPerformanceStreamMarkerINTEL-commandBuffer-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateCmdSetPerformanceOverrideINTEL(
+    VkCommandBuffer                             commandBuffer,
+    const VkPerformanceOverrideInfoINTEL*       pOverrideInfo) const {
+    bool skip = false;
+    skip |= ValidateObject(commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdSetPerformanceOverrideINTEL-commandBuffer-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateGetPerformanceParameterINTEL(
+    VkDevice                                    device,
+    VkPerformanceParameterTypeINTEL             parameter,
+    VkPerformanceValueINTEL*                    pValue) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkGetPerformanceParameterINTEL-device-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateSetLocalDimmingAMD(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapChain,
+    VkBool32                                    localDimmingEnable) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkSetLocalDimmingAMD-device-parameter", "VUID-vkSetLocalDimmingAMD-commonparent");
+    skip |= ValidateObject(swapChain, kVulkanObjectTypeSwapchainKHR, false, "VUID-vkSetLocalDimmingAMD-swapChain-parameter", "VUID-vkSetLocalDimmingAMD-commonparent");
+
+    return skip;
+}
+
+#ifdef VK_USE_PLATFORM_FUCHSIA
+
+bool ObjectLifetimes::PreCallValidateCreateImagePipeSurfaceFUCHSIA(
+    VkInstance                                  instance,
+    const VkImagePipeSurfaceCreateInfoFUCHSIA*  pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface) const {
+    bool skip = false;
+    skip |= ValidateObject(instance, kVulkanObjectTypeInstance, false, "VUID-vkCreateImagePipeSurfaceFUCHSIA-instance-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+void ObjectLifetimes::PostCallRecordCreateImagePipeSurfaceFUCHSIA(
+    VkInstance                                  instance,
+    const VkImagePipeSurfaceCreateInfoFUCHSIA*  pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface,
+    VkResult                                    result) {
+    if (result != VK_SUCCESS) return;
+    CreateObject(*pSurface, kVulkanObjectTypeSurfaceKHR, pAllocator);
+
+}
+#endif // VK_USE_PLATFORM_FUCHSIA
+
+#ifdef VK_USE_PLATFORM_METAL_EXT
+
+bool ObjectLifetimes::PreCallValidateCreateMetalSurfaceEXT(
+    VkInstance                                  instance,
+    const VkMetalSurfaceCreateInfoEXT*          pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface) const {
+    bool skip = false;
+    skip |= ValidateObject(instance, kVulkanObjectTypeInstance, false, "VUID-vkCreateMetalSurfaceEXT-instance-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+void ObjectLifetimes::PostCallRecordCreateMetalSurfaceEXT(
+    VkInstance                                  instance,
+    const VkMetalSurfaceCreateInfoEXT*          pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface,
+    VkResult                                    result) {
+    if (result != VK_SUCCESS) return;
+    CreateObject(*pSurface, kVulkanObjectTypeSurfaceKHR, pAllocator);
+
+}
+#endif // VK_USE_PLATFORM_METAL_EXT
+
+bool ObjectLifetimes::PreCallValidateGetBufferDeviceAddressEXT(
+    VkDevice                                    device,
+    const VkBufferDeviceAddressInfoKHR*         pInfo) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkGetBufferDeviceAddressKHR-device-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateGetPhysicalDeviceToolPropertiesEXT(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pToolCount,
+    VkPhysicalDeviceToolPropertiesEXT*          pToolProperties) const {
+    bool skip = false;
+    skip |= ValidateObject(physicalDevice, kVulkanObjectTypePhysicalDevice, false, "VUID-vkGetPhysicalDeviceToolPropertiesEXT-physicalDevice-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateGetPhysicalDeviceCooperativeMatrixPropertiesNV(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pPropertyCount,
+    VkCooperativeMatrixPropertiesNV*            pProperties) const {
+    bool skip = false;
+    skip |= ValidateObject(physicalDevice, kVulkanObjectTypePhysicalDevice, false, "VUID-vkGetPhysicalDeviceCooperativeMatrixPropertiesNV-physicalDevice-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pCombinationCount,
+    VkFramebufferMixedSamplesCombinationNV*     pCombinations) const {
+    bool skip = false;
+    skip |= ValidateObject(physicalDevice, kVulkanObjectTypePhysicalDevice, false, "VUID-vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV-physicalDevice-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+bool ObjectLifetimes::PreCallValidateGetPhysicalDeviceSurfacePresentModes2EXT(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceSurfaceInfo2KHR*      pSurfaceInfo,
+    uint32_t*                                   pPresentModeCount,
+    VkPresentModeKHR*                           pPresentModes) const {
+    bool skip = false;
+    skip |= ValidateObject(physicalDevice, kVulkanObjectTypePhysicalDevice, false, "VUID-vkGetPhysicalDeviceSurfacePresentModes2EXT-physicalDevice-parameter", kVUIDUndefined);
+    if (pSurfaceInfo) {
+        skip |= ValidateObject(pSurfaceInfo->surface, kVulkanObjectTypeSurfaceKHR, false, "VUID-VkPhysicalDeviceSurfaceInfo2KHR-surface-parameter", kVUIDUndefined);
+    }
+
+    return skip;
+}
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+bool ObjectLifetimes::PreCallValidateAcquireFullScreenExclusiveModeEXT(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkAcquireFullScreenExclusiveModeEXT-device-parameter", "VUID-vkAcquireFullScreenExclusiveModeEXT-commonparent");
+    skip |= ValidateObject(swapchain, kVulkanObjectTypeSwapchainKHR, false, "VUID-vkAcquireFullScreenExclusiveModeEXT-swapchain-parameter", "VUID-vkAcquireFullScreenExclusiveModeEXT-commonparent");
+
+    return skip;
+}
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+bool ObjectLifetimes::PreCallValidateReleaseFullScreenExclusiveModeEXT(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, kVUIDUndefined, kVUIDUndefined);
+    skip |= ValidateObject(swapchain, kVulkanObjectTypeSwapchainKHR, false, kVUIDUndefined, kVUIDUndefined);
+
+    return skip;
+}
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+bool ObjectLifetimes::PreCallValidateGetDeviceGroupSurfacePresentModes2EXT(
+    VkDevice                                    device,
+    const VkPhysicalDeviceSurfaceInfo2KHR*      pSurfaceInfo,
+    VkDeviceGroupPresentModeFlagsKHR*           pModes) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkGetDeviceGroupSurfacePresentModes2EXT-device-parameter", kVUIDUndefined);
+    if (pSurfaceInfo) {
+        skip |= ValidateObject(pSurfaceInfo->surface, kVulkanObjectTypeSurfaceKHR, false, "VUID-VkPhysicalDeviceSurfaceInfo2KHR-surface-parameter", kVUIDUndefined);
+    }
+
+    return skip;
+}
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+bool ObjectLifetimes::PreCallValidateCreateHeadlessSurfaceEXT(
+    VkInstance                                  instance,
+    const VkHeadlessSurfaceCreateInfoEXT*       pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface) const {
+    bool skip = false;
+    skip |= ValidateObject(instance, kVulkanObjectTypeInstance, false, "VUID-vkCreateHeadlessSurfaceEXT-instance-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+void ObjectLifetimes::PostCallRecordCreateHeadlessSurfaceEXT(
+    VkInstance                                  instance,
+    const VkHeadlessSurfaceCreateInfoEXT*       pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface,
+    VkResult                                    result) {
+    if (result != VK_SUCCESS) return;
+    CreateObject(*pSurface, kVulkanObjectTypeSurfaceKHR, pAllocator);
+
+}
+
+bool ObjectLifetimes::PreCallValidateCmdSetLineStippleEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    lineStippleFactor,
+    uint16_t                                    lineStipplePattern) const {
+    bool skip = false;
+    skip |= ValidateObject(commandBuffer, kVulkanObjectTypeCommandBuffer, false, "VUID-vkCmdSetLineStippleEXT-commandBuffer-parameter", kVUIDUndefined);
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateResetQueryPoolEXT(
+    VkDevice                                    device,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    firstQuery,
+    uint32_t                                    queryCount) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkResetQueryPoolEXT-device-parameter", kVUIDUndefined);
+    skip |= ValidateObject(queryPool, kVulkanObjectTypeQueryPool, false, "VUID-vkResetQueryPoolEXT-queryPool-parameter", "VUID-vkResetQueryPoolEXT-queryPool-parent");
+
+    return skip;
+}
+
+
diff --git a/src/third_party/vulkan-validation-layers/src/layers/generated/object_tracker.h b/src/third_party/vulkan-validation-layers/src/layers/generated/object_tracker.h
new file mode 100644
index 0000000..8b86d05
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/layers/generated/object_tracker.h
@@ -0,0 +1,2354 @@
+// *** THIS FILE IS GENERATED - DO NOT EDIT ***
+// See object_tracker_generator.py for modifications
+
+
+/***************************************************************************
+ *
+ * Copyright (c) 2015-2019 The Khronos Group Inc.
+ * Copyright (c) 2015-2019 Valve Corporation
+ * Copyright (c) 2015-2019 LunarG, Inc.
+ * Copyright (c) 2015-2019 Google Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Mark Lobodzinski <mark@lunarg.com>
+ * Author: Dave Houlton <daveh@lunarg.com>
+ *
+ ****************************************************************************/
+
+
+
+
+void PostCallRecordCreateInstance(
+    const VkInstanceCreateInfo*                 pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkInstance*                                 pInstance,
+    VkResult                                    result);
+bool PreCallValidateDestroyInstance(
+    VkInstance                                  instance,
+    const VkAllocationCallbacks*                pAllocator) const;
+void PreCallRecordDestroyInstance(
+    VkInstance                                  instance,
+    const VkAllocationCallbacks*                pAllocator);
+bool PreCallValidateEnumeratePhysicalDevices(
+    VkInstance                                  instance,
+    uint32_t*                                   pPhysicalDeviceCount,
+    VkPhysicalDevice*                           pPhysicalDevices) const;
+void PostCallRecordEnumeratePhysicalDevices(
+    VkInstance                                  instance,
+    uint32_t*                                   pPhysicalDeviceCount,
+    VkPhysicalDevice*                           pPhysicalDevices,
+    VkResult                                    result);
+bool PreCallValidateGetPhysicalDeviceFeatures(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceFeatures*                   pFeatures) const;
+bool PreCallValidateGetPhysicalDeviceFormatProperties(
+    VkPhysicalDevice                            physicalDevice,
+    VkFormat                                    format,
+    VkFormatProperties*                         pFormatProperties) const;
+bool PreCallValidateGetPhysicalDeviceImageFormatProperties(
+    VkPhysicalDevice                            physicalDevice,
+    VkFormat                                    format,
+    VkImageType                                 type,
+    VkImageTiling                               tiling,
+    VkImageUsageFlags                           usage,
+    VkImageCreateFlags                          flags,
+    VkImageFormatProperties*                    pImageFormatProperties) const;
+bool PreCallValidateGetPhysicalDeviceProperties(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceProperties*                 pProperties) const;
+bool PreCallValidateGetPhysicalDeviceQueueFamilyProperties(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pQueueFamilyPropertyCount,
+    VkQueueFamilyProperties*                    pQueueFamilyProperties) const;
+bool PreCallValidateGetPhysicalDeviceMemoryProperties(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceMemoryProperties*           pMemoryProperties) const;
+bool PreCallValidateGetInstanceProcAddr(
+    VkInstance                                  instance,
+    const char*                                 pName) const;
+bool PreCallValidateGetDeviceProcAddr(
+    VkDevice                                    device,
+    const char*                                 pName) const;
+bool PreCallValidateCreateDevice(
+    VkPhysicalDevice                            physicalDevice,
+    const VkDeviceCreateInfo*                   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDevice*                                   pDevice) const;
+void PostCallRecordCreateDevice(
+    VkPhysicalDevice                            physicalDevice,
+    const VkDeviceCreateInfo*                   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDevice*                                   pDevice,
+    VkResult                                    result);
+bool PreCallValidateDestroyDevice(
+    VkDevice                                    device,
+    const VkAllocationCallbacks*                pAllocator) const;
+void PreCallRecordDestroyDevice(
+    VkDevice                                    device,
+    const VkAllocationCallbacks*                pAllocator);
+bool PreCallValidateEnumerateDeviceExtensionProperties(
+    VkPhysicalDevice                            physicalDevice,
+    const char*                                 pLayerName,
+    uint32_t*                                   pPropertyCount,
+    VkExtensionProperties*                      pProperties) const;
+bool PreCallValidateEnumerateDeviceLayerProperties(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pPropertyCount,
+    VkLayerProperties*                          pProperties) const;
+bool PreCallValidateGetDeviceQueue(
+    VkDevice                                    device,
+    uint32_t                                    queueFamilyIndex,
+    uint32_t                                    queueIndex,
+    VkQueue*                                    pQueue) const;
+void PostCallRecordGetDeviceQueue(
+    VkDevice                                    device,
+    uint32_t                                    queueFamilyIndex,
+    uint32_t                                    queueIndex,
+    VkQueue*                                    pQueue);
+bool PreCallValidateQueueSubmit(
+    VkQueue                                     queue,
+    uint32_t                                    submitCount,
+    const VkSubmitInfo*                         pSubmits,
+    VkFence                                     fence) const;
+bool PreCallValidateQueueWaitIdle(
+    VkQueue                                     queue) const;
+bool PreCallValidateDeviceWaitIdle(
+    VkDevice                                    device) const;
+bool PreCallValidateAllocateMemory(
+    VkDevice                                    device,
+    const VkMemoryAllocateInfo*                 pAllocateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDeviceMemory*                             pMemory) const;
+void PostCallRecordAllocateMemory(
+    VkDevice                                    device,
+    const VkMemoryAllocateInfo*                 pAllocateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDeviceMemory*                             pMemory,
+    VkResult                                    result);
+bool PreCallValidateFreeMemory(
+    VkDevice                                    device,
+    VkDeviceMemory                              memory,
+    const VkAllocationCallbacks*                pAllocator) const;
+void PreCallRecordFreeMemory(
+    VkDevice                                    device,
+    VkDeviceMemory                              memory,
+    const VkAllocationCallbacks*                pAllocator);
+bool PreCallValidateMapMemory(
+    VkDevice                                    device,
+    VkDeviceMemory                              memory,
+    VkDeviceSize                                offset,
+    VkDeviceSize                                size,
+    VkMemoryMapFlags                            flags,
+    void**                                      ppData) const;
+bool PreCallValidateUnmapMemory(
+    VkDevice                                    device,
+    VkDeviceMemory                              memory) const;
+bool PreCallValidateFlushMappedMemoryRanges(
+    VkDevice                                    device,
+    uint32_t                                    memoryRangeCount,
+    const VkMappedMemoryRange*                  pMemoryRanges) const;
+bool PreCallValidateInvalidateMappedMemoryRanges(
+    VkDevice                                    device,
+    uint32_t                                    memoryRangeCount,
+    const VkMappedMemoryRange*                  pMemoryRanges) const;
+bool PreCallValidateGetDeviceMemoryCommitment(
+    VkDevice                                    device,
+    VkDeviceMemory                              memory,
+    VkDeviceSize*                               pCommittedMemoryInBytes) const;
+bool PreCallValidateBindBufferMemory(
+    VkDevice                                    device,
+    VkBuffer                                    buffer,
+    VkDeviceMemory                              memory,
+    VkDeviceSize                                memoryOffset) const;
+bool PreCallValidateBindImageMemory(
+    VkDevice                                    device,
+    VkImage                                     image,
+    VkDeviceMemory                              memory,
+    VkDeviceSize                                memoryOffset) const;
+bool PreCallValidateGetBufferMemoryRequirements(
+    VkDevice                                    device,
+    VkBuffer                                    buffer,
+    VkMemoryRequirements*                       pMemoryRequirements) const;
+bool PreCallValidateGetImageMemoryRequirements(
+    VkDevice                                    device,
+    VkImage                                     image,
+    VkMemoryRequirements*                       pMemoryRequirements) const;
+bool PreCallValidateGetImageSparseMemoryRequirements(
+    VkDevice                                    device,
+    VkImage                                     image,
+    uint32_t*                                   pSparseMemoryRequirementCount,
+    VkSparseImageMemoryRequirements*            pSparseMemoryRequirements) const;
+bool PreCallValidateGetPhysicalDeviceSparseImageFormatProperties(
+    VkPhysicalDevice                            physicalDevice,
+    VkFormat                                    format,
+    VkImageType                                 type,
+    VkSampleCountFlagBits                       samples,
+    VkImageUsageFlags                           usage,
+    VkImageTiling                               tiling,
+    uint32_t*                                   pPropertyCount,
+    VkSparseImageFormatProperties*              pProperties) const;
+bool PreCallValidateQueueBindSparse(
+    VkQueue                                     queue,
+    uint32_t                                    bindInfoCount,
+    const VkBindSparseInfo*                     pBindInfo,
+    VkFence                                     fence) const;
+bool PreCallValidateCreateFence(
+    VkDevice                                    device,
+    const VkFenceCreateInfo*                    pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkFence*                                    pFence) const;
+void PostCallRecordCreateFence(
+    VkDevice                                    device,
+    const VkFenceCreateInfo*                    pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkFence*                                    pFence,
+    VkResult                                    result);
+bool PreCallValidateDestroyFence(
+    VkDevice                                    device,
+    VkFence                                     fence,
+    const VkAllocationCallbacks*                pAllocator) const;
+void PreCallRecordDestroyFence(
+    VkDevice                                    device,
+    VkFence                                     fence,
+    const VkAllocationCallbacks*                pAllocator);
+bool PreCallValidateResetFences(
+    VkDevice                                    device,
+    uint32_t                                    fenceCount,
+    const VkFence*                              pFences) const;
+bool PreCallValidateGetFenceStatus(
+    VkDevice                                    device,
+    VkFence                                     fence) const;
+bool PreCallValidateWaitForFences(
+    VkDevice                                    device,
+    uint32_t                                    fenceCount,
+    const VkFence*                              pFences,
+    VkBool32                                    waitAll,
+    uint64_t                                    timeout) const;
+bool PreCallValidateCreateSemaphore(
+    VkDevice                                    device,
+    const VkSemaphoreCreateInfo*                pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSemaphore*                                pSemaphore) const;
+void PostCallRecordCreateSemaphore(
+    VkDevice                                    device,
+    const VkSemaphoreCreateInfo*                pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSemaphore*                                pSemaphore,
+    VkResult                                    result);
+bool PreCallValidateDestroySemaphore(
+    VkDevice                                    device,
+    VkSemaphore                                 semaphore,
+    const VkAllocationCallbacks*                pAllocator) const;
+void PreCallRecordDestroySemaphore(
+    VkDevice                                    device,
+    VkSemaphore                                 semaphore,
+    const VkAllocationCallbacks*                pAllocator);
+bool PreCallValidateCreateEvent(
+    VkDevice                                    device,
+    const VkEventCreateInfo*                    pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkEvent*                                    pEvent) const;
+void PostCallRecordCreateEvent(
+    VkDevice                                    device,
+    const VkEventCreateInfo*                    pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkEvent*                                    pEvent,
+    VkResult                                    result);
+bool PreCallValidateDestroyEvent(
+    VkDevice                                    device,
+    VkEvent                                     event,
+    const VkAllocationCallbacks*                pAllocator) const;
+void PreCallRecordDestroyEvent(
+    VkDevice                                    device,
+    VkEvent                                     event,
+    const VkAllocationCallbacks*                pAllocator);
+bool PreCallValidateGetEventStatus(
+    VkDevice                                    device,
+    VkEvent                                     event) const;
+bool PreCallValidateSetEvent(
+    VkDevice                                    device,
+    VkEvent                                     event) const;
+bool PreCallValidateResetEvent(
+    VkDevice                                    device,
+    VkEvent                                     event) const;
+bool PreCallValidateCreateQueryPool(
+    VkDevice                                    device,
+    const VkQueryPoolCreateInfo*                pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkQueryPool*                                pQueryPool) const;
+void PostCallRecordCreateQueryPool(
+    VkDevice                                    device,
+    const VkQueryPoolCreateInfo*                pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkQueryPool*                                pQueryPool,
+    VkResult                                    result);
+bool PreCallValidateDestroyQueryPool(
+    VkDevice                                    device,
+    VkQueryPool                                 queryPool,
+    const VkAllocationCallbacks*                pAllocator) const;
+void PreCallRecordDestroyQueryPool(
+    VkDevice                                    device,
+    VkQueryPool                                 queryPool,
+    const VkAllocationCallbacks*                pAllocator);
+bool PreCallValidateGetQueryPoolResults(
+    VkDevice                                    device,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    firstQuery,
+    uint32_t                                    queryCount,
+    size_t                                      dataSize,
+    void*                                       pData,
+    VkDeviceSize                                stride,
+    VkQueryResultFlags                          flags) const;
+bool PreCallValidateCreateBuffer(
+    VkDevice                                    device,
+    const VkBufferCreateInfo*                   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkBuffer*                                   pBuffer) const;
+void PostCallRecordCreateBuffer(
+    VkDevice                                    device,
+    const VkBufferCreateInfo*                   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkBuffer*                                   pBuffer,
+    VkResult                                    result);
+bool PreCallValidateDestroyBuffer(
+    VkDevice                                    device,
+    VkBuffer                                    buffer,
+    const VkAllocationCallbacks*                pAllocator) const;
+void PreCallRecordDestroyBuffer(
+    VkDevice                                    device,
+    VkBuffer                                    buffer,
+    const VkAllocationCallbacks*                pAllocator);
+bool PreCallValidateCreateBufferView(
+    VkDevice                                    device,
+    const VkBufferViewCreateInfo*               pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkBufferView*                               pView) const;
+void PostCallRecordCreateBufferView(
+    VkDevice                                    device,
+    const VkBufferViewCreateInfo*               pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkBufferView*                               pView,
+    VkResult                                    result);
+bool PreCallValidateDestroyBufferView(
+    VkDevice                                    device,
+    VkBufferView                                bufferView,
+    const VkAllocationCallbacks*                pAllocator) const;
+void PreCallRecordDestroyBufferView(
+    VkDevice                                    device,
+    VkBufferView                                bufferView,
+    const VkAllocationCallbacks*                pAllocator);
+bool PreCallValidateCreateImage(
+    VkDevice                                    device,
+    const VkImageCreateInfo*                    pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkImage*                                    pImage) const;
+void PostCallRecordCreateImage(
+    VkDevice                                    device,
+    const VkImageCreateInfo*                    pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkImage*                                    pImage,
+    VkResult                                    result);
+bool PreCallValidateDestroyImage(
+    VkDevice                                    device,
+    VkImage                                     image,
+    const VkAllocationCallbacks*                pAllocator) const;
+void PreCallRecordDestroyImage(
+    VkDevice                                    device,
+    VkImage                                     image,
+    const VkAllocationCallbacks*                pAllocator);
+bool PreCallValidateGetImageSubresourceLayout(
+    VkDevice                                    device,
+    VkImage                                     image,
+    const VkImageSubresource*                   pSubresource,
+    VkSubresourceLayout*                        pLayout) const;
+bool PreCallValidateCreateImageView(
+    VkDevice                                    device,
+    const VkImageViewCreateInfo*                pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkImageView*                                pView) const;
+void PostCallRecordCreateImageView(
+    VkDevice                                    device,
+    const VkImageViewCreateInfo*                pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkImageView*                                pView,
+    VkResult                                    result);
+bool PreCallValidateDestroyImageView(
+    VkDevice                                    device,
+    VkImageView                                 imageView,
+    const VkAllocationCallbacks*                pAllocator) const;
+void PreCallRecordDestroyImageView(
+    VkDevice                                    device,
+    VkImageView                                 imageView,
+    const VkAllocationCallbacks*                pAllocator);
+bool PreCallValidateCreateShaderModule(
+    VkDevice                                    device,
+    const VkShaderModuleCreateInfo*             pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkShaderModule*                             pShaderModule) const;
+void PostCallRecordCreateShaderModule(
+    VkDevice                                    device,
+    const VkShaderModuleCreateInfo*             pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkShaderModule*                             pShaderModule,
+    VkResult                                    result);
+bool PreCallValidateDestroyShaderModule(
+    VkDevice                                    device,
+    VkShaderModule                              shaderModule,
+    const VkAllocationCallbacks*                pAllocator) const;
+void PreCallRecordDestroyShaderModule(
+    VkDevice                                    device,
+    VkShaderModule                              shaderModule,
+    const VkAllocationCallbacks*                pAllocator);
+bool PreCallValidateCreatePipelineCache(
+    VkDevice                                    device,
+    const VkPipelineCacheCreateInfo*            pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkPipelineCache*                            pPipelineCache) const;
+void PostCallRecordCreatePipelineCache(
+    VkDevice                                    device,
+    const VkPipelineCacheCreateInfo*            pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkPipelineCache*                            pPipelineCache,
+    VkResult                                    result);
+bool PreCallValidateDestroyPipelineCache(
+    VkDevice                                    device,
+    VkPipelineCache                             pipelineCache,
+    const VkAllocationCallbacks*                pAllocator) const;
+void PreCallRecordDestroyPipelineCache(
+    VkDevice                                    device,
+    VkPipelineCache                             pipelineCache,
+    const VkAllocationCallbacks*                pAllocator);
+bool PreCallValidateGetPipelineCacheData(
+    VkDevice                                    device,
+    VkPipelineCache                             pipelineCache,
+    size_t*                                     pDataSize,
+    void*                                       pData) const;
+bool PreCallValidateMergePipelineCaches(
+    VkDevice                                    device,
+    VkPipelineCache                             dstCache,
+    uint32_t                                    srcCacheCount,
+    const VkPipelineCache*                      pSrcCaches) const;
+bool PreCallValidateCreateGraphicsPipelines(
+    VkDevice                                    device,
+    VkPipelineCache                             pipelineCache,
+    uint32_t                                    createInfoCount,
+    const VkGraphicsPipelineCreateInfo*         pCreateInfos,
+    const VkAllocationCallbacks*                pAllocator,
+    VkPipeline*                                 pPipelines) const;
+void PostCallRecordCreateGraphicsPipelines(
+    VkDevice                                    device,
+    VkPipelineCache                             pipelineCache,
+    uint32_t                                    createInfoCount,
+    const VkGraphicsPipelineCreateInfo*         pCreateInfos,
+    const VkAllocationCallbacks*                pAllocator,
+    VkPipeline*                                 pPipelines,
+    VkResult                                    result);
+bool PreCallValidateCreateComputePipelines(
+    VkDevice                                    device,
+    VkPipelineCache                             pipelineCache,
+    uint32_t                                    createInfoCount,
+    const VkComputePipelineCreateInfo*          pCreateInfos,
+    const VkAllocationCallbacks*                pAllocator,
+    VkPipeline*                                 pPipelines) const;
+void PostCallRecordCreateComputePipelines(
+    VkDevice                                    device,
+    VkPipelineCache                             pipelineCache,
+    uint32_t                                    createInfoCount,
+    const VkComputePipelineCreateInfo*          pCreateInfos,
+    const VkAllocationCallbacks*                pAllocator,
+    VkPipeline*                                 pPipelines,
+    VkResult                                    result);
+bool PreCallValidateDestroyPipeline(
+    VkDevice                                    device,
+    VkPipeline                                  pipeline,
+    const VkAllocationCallbacks*                pAllocator) const;
+void PreCallRecordDestroyPipeline(
+    VkDevice                                    device,
+    VkPipeline                                  pipeline,
+    const VkAllocationCallbacks*                pAllocator);
+bool PreCallValidateCreatePipelineLayout(
+    VkDevice                                    device,
+    const VkPipelineLayoutCreateInfo*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkPipelineLayout*                           pPipelineLayout) const;
+void PostCallRecordCreatePipelineLayout(
+    VkDevice                                    device,
+    const VkPipelineLayoutCreateInfo*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkPipelineLayout*                           pPipelineLayout,
+    VkResult                                    result);
+bool PreCallValidateDestroyPipelineLayout(
+    VkDevice                                    device,
+    VkPipelineLayout                            pipelineLayout,
+    const VkAllocationCallbacks*                pAllocator) const;
+void PreCallRecordDestroyPipelineLayout(
+    VkDevice                                    device,
+    VkPipelineLayout                            pipelineLayout,
+    const VkAllocationCallbacks*                pAllocator);
+bool PreCallValidateCreateSampler(
+    VkDevice                                    device,
+    const VkSamplerCreateInfo*                  pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSampler*                                  pSampler) const;
+void PostCallRecordCreateSampler(
+    VkDevice                                    device,
+    const VkSamplerCreateInfo*                  pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSampler*                                  pSampler,
+    VkResult                                    result);
+bool PreCallValidateDestroySampler(
+    VkDevice                                    device,
+    VkSampler                                   sampler,
+    const VkAllocationCallbacks*                pAllocator) const;
+void PreCallRecordDestroySampler(
+    VkDevice                                    device,
+    VkSampler                                   sampler,
+    const VkAllocationCallbacks*                pAllocator);
+bool PreCallValidateCreateDescriptorSetLayout(
+    VkDevice                                    device,
+    const VkDescriptorSetLayoutCreateInfo*      pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDescriptorSetLayout*                      pSetLayout) const;
+void PostCallRecordCreateDescriptorSetLayout(
+    VkDevice                                    device,
+    const VkDescriptorSetLayoutCreateInfo*      pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDescriptorSetLayout*                      pSetLayout,
+    VkResult                                    result);
+bool PreCallValidateDestroyDescriptorSetLayout(
+    VkDevice                                    device,
+    VkDescriptorSetLayout                       descriptorSetLayout,
+    const VkAllocationCallbacks*                pAllocator) const;
+void PreCallRecordDestroyDescriptorSetLayout(
+    VkDevice                                    device,
+    VkDescriptorSetLayout                       descriptorSetLayout,
+    const VkAllocationCallbacks*                pAllocator);
+bool PreCallValidateCreateDescriptorPool(
+    VkDevice                                    device,
+    const VkDescriptorPoolCreateInfo*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDescriptorPool*                           pDescriptorPool) const;
+void PostCallRecordCreateDescriptorPool(
+    VkDevice                                    device,
+    const VkDescriptorPoolCreateInfo*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDescriptorPool*                           pDescriptorPool,
+    VkResult                                    result);
+bool PreCallValidateDestroyDescriptorPool(
+    VkDevice                                    device,
+    VkDescriptorPool                            descriptorPool,
+    const VkAllocationCallbacks*                pAllocator) const;
+void PreCallRecordDestroyDescriptorPool(
+    VkDevice                                    device,
+    VkDescriptorPool                            descriptorPool,
+    const VkAllocationCallbacks*                pAllocator);
+bool PreCallValidateResetDescriptorPool(
+    VkDevice                                    device,
+    VkDescriptorPool                            descriptorPool,
+    VkDescriptorPoolResetFlags                  flags) const;
+bool PreCallValidateAllocateDescriptorSets(
+    VkDevice                                    device,
+    const VkDescriptorSetAllocateInfo*          pAllocateInfo,
+    VkDescriptorSet*                            pDescriptorSets) const;
+void PostCallRecordAllocateDescriptorSets(
+    VkDevice                                    device,
+    const VkDescriptorSetAllocateInfo*          pAllocateInfo,
+    VkDescriptorSet*                            pDescriptorSets,
+    VkResult                                    result);
+bool PreCallValidateFreeDescriptorSets(
+    VkDevice                                    device,
+    VkDescriptorPool                            descriptorPool,
+    uint32_t                                    descriptorSetCount,
+    const VkDescriptorSet*                      pDescriptorSets) const;
+bool PreCallValidateUpdateDescriptorSets(
+    VkDevice                                    device,
+    uint32_t                                    descriptorWriteCount,
+    const VkWriteDescriptorSet*                 pDescriptorWrites,
+    uint32_t                                    descriptorCopyCount,
+    const VkCopyDescriptorSet*                  pDescriptorCopies) const;
+bool PreCallValidateCreateFramebuffer(
+    VkDevice                                    device,
+    const VkFramebufferCreateInfo*              pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkFramebuffer*                              pFramebuffer) const;
+void PostCallRecordCreateFramebuffer(
+    VkDevice                                    device,
+    const VkFramebufferCreateInfo*              pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkFramebuffer*                              pFramebuffer,
+    VkResult                                    result);
+bool PreCallValidateDestroyFramebuffer(
+    VkDevice                                    device,
+    VkFramebuffer                               framebuffer,
+    const VkAllocationCallbacks*                pAllocator) const;
+void PreCallRecordDestroyFramebuffer(
+    VkDevice                                    device,
+    VkFramebuffer                               framebuffer,
+    const VkAllocationCallbacks*                pAllocator);
+bool PreCallValidateCreateRenderPass(
+    VkDevice                                    device,
+    const VkRenderPassCreateInfo*               pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkRenderPass*                               pRenderPass) const;
+void PostCallRecordCreateRenderPass(
+    VkDevice                                    device,
+    const VkRenderPassCreateInfo*               pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkRenderPass*                               pRenderPass,
+    VkResult                                    result);
+bool PreCallValidateDestroyRenderPass(
+    VkDevice                                    device,
+    VkRenderPass                                renderPass,
+    const VkAllocationCallbacks*                pAllocator) const;
+void PreCallRecordDestroyRenderPass(
+    VkDevice                                    device,
+    VkRenderPass                                renderPass,
+    const VkAllocationCallbacks*                pAllocator);
+bool PreCallValidateGetRenderAreaGranularity(
+    VkDevice                                    device,
+    VkRenderPass                                renderPass,
+    VkExtent2D*                                 pGranularity) const;
+bool PreCallValidateCreateCommandPool(
+    VkDevice                                    device,
+    const VkCommandPoolCreateInfo*              pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkCommandPool*                              pCommandPool) const;
+void PostCallRecordCreateCommandPool(
+    VkDevice                                    device,
+    const VkCommandPoolCreateInfo*              pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkCommandPool*                              pCommandPool,
+    VkResult                                    result);
+bool PreCallValidateDestroyCommandPool(
+    VkDevice                                    device,
+    VkCommandPool                               commandPool,
+    const VkAllocationCallbacks*                pAllocator) const;
+void PreCallRecordDestroyCommandPool(
+    VkDevice                                    device,
+    VkCommandPool                               commandPool,
+    const VkAllocationCallbacks*                pAllocator);
+bool PreCallValidateResetCommandPool(
+    VkDevice                                    device,
+    VkCommandPool                               commandPool,
+    VkCommandPoolResetFlags                     flags) const;
+bool PreCallValidateAllocateCommandBuffers(
+    VkDevice                                    device,
+    const VkCommandBufferAllocateInfo*          pAllocateInfo,
+    VkCommandBuffer*                            pCommandBuffers) const;
+void PostCallRecordAllocateCommandBuffers(
+    VkDevice                                    device,
+    const VkCommandBufferAllocateInfo*          pAllocateInfo,
+    VkCommandBuffer*                            pCommandBuffers,
+    VkResult                                    result);
+bool PreCallValidateFreeCommandBuffers(
+    VkDevice                                    device,
+    VkCommandPool                               commandPool,
+    uint32_t                                    commandBufferCount,
+    const VkCommandBuffer*                      pCommandBuffers) const;
+bool PreCallValidateBeginCommandBuffer(
+    VkCommandBuffer                             commandBuffer,
+    const VkCommandBufferBeginInfo*             pBeginInfo) const;
+bool PreCallValidateEndCommandBuffer(
+    VkCommandBuffer                             commandBuffer) const;
+bool PreCallValidateResetCommandBuffer(
+    VkCommandBuffer                             commandBuffer,
+    VkCommandBufferResetFlags                   flags) const;
+bool PreCallValidateCmdBindPipeline(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineBindPoint                         pipelineBindPoint,
+    VkPipeline                                  pipeline) const;
+bool PreCallValidateCmdSetViewport(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstViewport,
+    uint32_t                                    viewportCount,
+    const VkViewport*                           pViewports) const;
+bool PreCallValidateCmdSetScissor(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstScissor,
+    uint32_t                                    scissorCount,
+    const VkRect2D*                             pScissors) const;
+bool PreCallValidateCmdSetLineWidth(
+    VkCommandBuffer                             commandBuffer,
+    float                                       lineWidth) const;
+bool PreCallValidateCmdSetDepthBias(
+    VkCommandBuffer                             commandBuffer,
+    float                                       depthBiasConstantFactor,
+    float                                       depthBiasClamp,
+    float                                       depthBiasSlopeFactor) const;
+bool PreCallValidateCmdSetBlendConstants(
+    VkCommandBuffer                             commandBuffer,
+    const float                                 blendConstants[4]) const;
+bool PreCallValidateCmdSetDepthBounds(
+    VkCommandBuffer                             commandBuffer,
+    float                                       minDepthBounds,
+    float                                       maxDepthBounds) const;
+bool PreCallValidateCmdSetStencilCompareMask(
+    VkCommandBuffer                             commandBuffer,
+    VkStencilFaceFlags                          faceMask,
+    uint32_t                                    compareMask) const;
+bool PreCallValidateCmdSetStencilWriteMask(
+    VkCommandBuffer                             commandBuffer,
+    VkStencilFaceFlags                          faceMask,
+    uint32_t                                    writeMask) const;
+bool PreCallValidateCmdSetStencilReference(
+    VkCommandBuffer                             commandBuffer,
+    VkStencilFaceFlags                          faceMask,
+    uint32_t                                    reference) const;
+bool PreCallValidateCmdBindDescriptorSets(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineBindPoint                         pipelineBindPoint,
+    VkPipelineLayout                            layout,
+    uint32_t                                    firstSet,
+    uint32_t                                    descriptorSetCount,
+    const VkDescriptorSet*                      pDescriptorSets,
+    uint32_t                                    dynamicOffsetCount,
+    const uint32_t*                             pDynamicOffsets) const;
+bool PreCallValidateCmdBindIndexBuffer(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkIndexType                                 indexType) const;
+bool PreCallValidateCmdBindVertexBuffers(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstBinding,
+    uint32_t                                    bindingCount,
+    const VkBuffer*                             pBuffers,
+    const VkDeviceSize*                         pOffsets) const;
+bool PreCallValidateCmdDraw(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    vertexCount,
+    uint32_t                                    instanceCount,
+    uint32_t                                    firstVertex,
+    uint32_t                                    firstInstance) const;
+bool PreCallValidateCmdDrawIndexed(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    indexCount,
+    uint32_t                                    instanceCount,
+    uint32_t                                    firstIndex,
+    int32_t                                     vertexOffset,
+    uint32_t                                    firstInstance) const;
+bool PreCallValidateCmdDrawIndirect(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    uint32_t                                    drawCount,
+    uint32_t                                    stride) const;
+bool PreCallValidateCmdDrawIndexedIndirect(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    uint32_t                                    drawCount,
+    uint32_t                                    stride) const;
+bool PreCallValidateCmdDispatch(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    groupCountX,
+    uint32_t                                    groupCountY,
+    uint32_t                                    groupCountZ) const;
+bool PreCallValidateCmdDispatchIndirect(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset) const;
+bool PreCallValidateCmdCopyBuffer(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    srcBuffer,
+    VkBuffer                                    dstBuffer,
+    uint32_t                                    regionCount,
+    const VkBufferCopy*                         pRegions) const;
+bool PreCallValidateCmdCopyImage(
+    VkCommandBuffer                             commandBuffer,
+    VkImage                                     srcImage,
+    VkImageLayout                               srcImageLayout,
+    VkImage                                     dstImage,
+    VkImageLayout                               dstImageLayout,
+    uint32_t                                    regionCount,
+    const VkImageCopy*                          pRegions) const;
+bool PreCallValidateCmdBlitImage(
+    VkCommandBuffer                             commandBuffer,
+    VkImage                                     srcImage,
+    VkImageLayout                               srcImageLayout,
+    VkImage                                     dstImage,
+    VkImageLayout                               dstImageLayout,
+    uint32_t                                    regionCount,
+    const VkImageBlit*                          pRegions,
+    VkFilter                                    filter) const;
+bool PreCallValidateCmdCopyBufferToImage(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    srcBuffer,
+    VkImage                                     dstImage,
+    VkImageLayout                               dstImageLayout,
+    uint32_t                                    regionCount,
+    const VkBufferImageCopy*                    pRegions) const;
+bool PreCallValidateCmdCopyImageToBuffer(
+    VkCommandBuffer                             commandBuffer,
+    VkImage                                     srcImage,
+    VkImageLayout                               srcImageLayout,
+    VkBuffer                                    dstBuffer,
+    uint32_t                                    regionCount,
+    const VkBufferImageCopy*                    pRegions) const;
+bool PreCallValidateCmdUpdateBuffer(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    dstBuffer,
+    VkDeviceSize                                dstOffset,
+    VkDeviceSize                                dataSize,
+    const void*                                 pData) const;
+bool PreCallValidateCmdFillBuffer(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    dstBuffer,
+    VkDeviceSize                                dstOffset,
+    VkDeviceSize                                size,
+    uint32_t                                    data) const;
+bool PreCallValidateCmdClearColorImage(
+    VkCommandBuffer                             commandBuffer,
+    VkImage                                     image,
+    VkImageLayout                               imageLayout,
+    const VkClearColorValue*                    pColor,
+    uint32_t                                    rangeCount,
+    const VkImageSubresourceRange*              pRanges) const;
+bool PreCallValidateCmdClearDepthStencilImage(
+    VkCommandBuffer                             commandBuffer,
+    VkImage                                     image,
+    VkImageLayout                               imageLayout,
+    const VkClearDepthStencilValue*             pDepthStencil,
+    uint32_t                                    rangeCount,
+    const VkImageSubresourceRange*              pRanges) const;
+bool PreCallValidateCmdClearAttachments(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    attachmentCount,
+    const VkClearAttachment*                    pAttachments,
+    uint32_t                                    rectCount,
+    const VkClearRect*                          pRects) const;
+bool PreCallValidateCmdResolveImage(
+    VkCommandBuffer                             commandBuffer,
+    VkImage                                     srcImage,
+    VkImageLayout                               srcImageLayout,
+    VkImage                                     dstImage,
+    VkImageLayout                               dstImageLayout,
+    uint32_t                                    regionCount,
+    const VkImageResolve*                       pRegions) const;
+bool PreCallValidateCmdSetEvent(
+    VkCommandBuffer                             commandBuffer,
+    VkEvent                                     event,
+    VkPipelineStageFlags                        stageMask) const;
+bool PreCallValidateCmdResetEvent(
+    VkCommandBuffer                             commandBuffer,
+    VkEvent                                     event,
+    VkPipelineStageFlags                        stageMask) const;
+bool PreCallValidateCmdWaitEvents(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    eventCount,
+    const VkEvent*                              pEvents,
+    VkPipelineStageFlags                        srcStageMask,
+    VkPipelineStageFlags                        dstStageMask,
+    uint32_t                                    memoryBarrierCount,
+    const VkMemoryBarrier*                      pMemoryBarriers,
+    uint32_t                                    bufferMemoryBarrierCount,
+    const VkBufferMemoryBarrier*                pBufferMemoryBarriers,
+    uint32_t                                    imageMemoryBarrierCount,
+    const VkImageMemoryBarrier*                 pImageMemoryBarriers) const;
+bool PreCallValidateCmdPipelineBarrier(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineStageFlags                        srcStageMask,
+    VkPipelineStageFlags                        dstStageMask,
+    VkDependencyFlags                           dependencyFlags,
+    uint32_t                                    memoryBarrierCount,
+    const VkMemoryBarrier*                      pMemoryBarriers,
+    uint32_t                                    bufferMemoryBarrierCount,
+    const VkBufferMemoryBarrier*                pBufferMemoryBarriers,
+    uint32_t                                    imageMemoryBarrierCount,
+    const VkImageMemoryBarrier*                 pImageMemoryBarriers) const;
+bool PreCallValidateCmdBeginQuery(
+    VkCommandBuffer                             commandBuffer,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    query,
+    VkQueryControlFlags                         flags) const;
+bool PreCallValidateCmdEndQuery(
+    VkCommandBuffer                             commandBuffer,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    query) const;
+bool PreCallValidateCmdResetQueryPool(
+    VkCommandBuffer                             commandBuffer,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    firstQuery,
+    uint32_t                                    queryCount) const;
+bool PreCallValidateCmdWriteTimestamp(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineStageFlagBits                     pipelineStage,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    query) const;
+bool PreCallValidateCmdCopyQueryPoolResults(
+    VkCommandBuffer                             commandBuffer,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    firstQuery,
+    uint32_t                                    queryCount,
+    VkBuffer                                    dstBuffer,
+    VkDeviceSize                                dstOffset,
+    VkDeviceSize                                stride,
+    VkQueryResultFlags                          flags) const;
+bool PreCallValidateCmdPushConstants(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineLayout                            layout,
+    VkShaderStageFlags                          stageFlags,
+    uint32_t                                    offset,
+    uint32_t                                    size,
+    const void*                                 pValues) const;
+bool PreCallValidateCmdBeginRenderPass(
+    VkCommandBuffer                             commandBuffer,
+    const VkRenderPassBeginInfo*                pRenderPassBegin,
+    VkSubpassContents                           contents) const;
+bool PreCallValidateCmdNextSubpass(
+    VkCommandBuffer                             commandBuffer,
+    VkSubpassContents                           contents) const;
+bool PreCallValidateCmdEndRenderPass(
+    VkCommandBuffer                             commandBuffer) const;
+bool PreCallValidateCmdExecuteCommands(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    commandBufferCount,
+    const VkCommandBuffer*                      pCommandBuffers) const;
+bool PreCallValidateBindBufferMemory2(
+    VkDevice                                    device,
+    uint32_t                                    bindInfoCount,
+    const VkBindBufferMemoryInfo*               pBindInfos) const;
+bool PreCallValidateBindImageMemory2(
+    VkDevice                                    device,
+    uint32_t                                    bindInfoCount,
+    const VkBindImageMemoryInfo*                pBindInfos) const;
+bool PreCallValidateGetDeviceGroupPeerMemoryFeatures(
+    VkDevice                                    device,
+    uint32_t                                    heapIndex,
+    uint32_t                                    localDeviceIndex,
+    uint32_t                                    remoteDeviceIndex,
+    VkPeerMemoryFeatureFlags*                   pPeerMemoryFeatures) const;
+bool PreCallValidateCmdSetDeviceMask(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    deviceMask) const;
+bool PreCallValidateCmdDispatchBase(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    baseGroupX,
+    uint32_t                                    baseGroupY,
+    uint32_t                                    baseGroupZ,
+    uint32_t                                    groupCountX,
+    uint32_t                                    groupCountY,
+    uint32_t                                    groupCountZ) const;
+bool PreCallValidateEnumeratePhysicalDeviceGroups(
+    VkInstance                                  instance,
+    uint32_t*                                   pPhysicalDeviceGroupCount,
+    VkPhysicalDeviceGroupProperties*            pPhysicalDeviceGroupProperties) const;
+bool PreCallValidateGetImageMemoryRequirements2(
+    VkDevice                                    device,
+    const VkImageMemoryRequirementsInfo2*       pInfo,
+    VkMemoryRequirements2*                      pMemoryRequirements) const;
+bool PreCallValidateGetBufferMemoryRequirements2(
+    VkDevice                                    device,
+    const VkBufferMemoryRequirementsInfo2*      pInfo,
+    VkMemoryRequirements2*                      pMemoryRequirements) const;
+bool PreCallValidateGetImageSparseMemoryRequirements2(
+    VkDevice                                    device,
+    const VkImageSparseMemoryRequirementsInfo2* pInfo,
+    uint32_t*                                   pSparseMemoryRequirementCount,
+    VkSparseImageMemoryRequirements2*           pSparseMemoryRequirements) const;
+bool PreCallValidateGetPhysicalDeviceFeatures2(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceFeatures2*                  pFeatures) const;
+bool PreCallValidateGetPhysicalDeviceProperties2(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceProperties2*                pProperties) const;
+bool PreCallValidateGetPhysicalDeviceFormatProperties2(
+    VkPhysicalDevice                            physicalDevice,
+    VkFormat                                    format,
+    VkFormatProperties2*                        pFormatProperties) const;
+bool PreCallValidateGetPhysicalDeviceImageFormatProperties2(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceImageFormatInfo2*     pImageFormatInfo,
+    VkImageFormatProperties2*                   pImageFormatProperties) const;
+bool PreCallValidateGetPhysicalDeviceQueueFamilyProperties2(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pQueueFamilyPropertyCount,
+    VkQueueFamilyProperties2*                   pQueueFamilyProperties) const;
+bool PreCallValidateGetPhysicalDeviceMemoryProperties2(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceMemoryProperties2*          pMemoryProperties) const;
+bool PreCallValidateGetPhysicalDeviceSparseImageFormatProperties2(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo,
+    uint32_t*                                   pPropertyCount,
+    VkSparseImageFormatProperties2*             pProperties) const;
+bool PreCallValidateTrimCommandPool(
+    VkDevice                                    device,
+    VkCommandPool                               commandPool,
+    VkCommandPoolTrimFlags                      flags) const;
+bool PreCallValidateGetDeviceQueue2(
+    VkDevice                                    device,
+    const VkDeviceQueueInfo2*                   pQueueInfo,
+    VkQueue*                                    pQueue) const;
+void PostCallRecordGetDeviceQueue2(
+    VkDevice                                    device,
+    const VkDeviceQueueInfo2*                   pQueueInfo,
+    VkQueue*                                    pQueue);
+bool PreCallValidateCreateSamplerYcbcrConversion(
+    VkDevice                                    device,
+    const VkSamplerYcbcrConversionCreateInfo*   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSamplerYcbcrConversion*                   pYcbcrConversion) const;
+void PostCallRecordCreateSamplerYcbcrConversion(
+    VkDevice                                    device,
+    const VkSamplerYcbcrConversionCreateInfo*   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSamplerYcbcrConversion*                   pYcbcrConversion,
+    VkResult                                    result);
+bool PreCallValidateDestroySamplerYcbcrConversion(
+    VkDevice                                    device,
+    VkSamplerYcbcrConversion                    ycbcrConversion,
+    const VkAllocationCallbacks*                pAllocator) const;
+void PreCallRecordDestroySamplerYcbcrConversion(
+    VkDevice                                    device,
+    VkSamplerYcbcrConversion                    ycbcrConversion,
+    const VkAllocationCallbacks*                pAllocator);
+bool PreCallValidateCreateDescriptorUpdateTemplate(
+    VkDevice                                    device,
+    const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDescriptorUpdateTemplate*                 pDescriptorUpdateTemplate) const;
+void PostCallRecordCreateDescriptorUpdateTemplate(
+    VkDevice                                    device,
+    const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDescriptorUpdateTemplate*                 pDescriptorUpdateTemplate,
+    VkResult                                    result);
+bool PreCallValidateDestroyDescriptorUpdateTemplate(
+    VkDevice                                    device,
+    VkDescriptorUpdateTemplate                  descriptorUpdateTemplate,
+    const VkAllocationCallbacks*                pAllocator) const;
+void PreCallRecordDestroyDescriptorUpdateTemplate(
+    VkDevice                                    device,
+    VkDescriptorUpdateTemplate                  descriptorUpdateTemplate,
+    const VkAllocationCallbacks*                pAllocator);
+bool PreCallValidateUpdateDescriptorSetWithTemplate(
+    VkDevice                                    device,
+    VkDescriptorSet                             descriptorSet,
+    VkDescriptorUpdateTemplate                  descriptorUpdateTemplate,
+    const void*                                 pData) const;
+bool PreCallValidateGetPhysicalDeviceExternalBufferProperties(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceExternalBufferInfo*   pExternalBufferInfo,
+    VkExternalBufferProperties*                 pExternalBufferProperties) const;
+bool PreCallValidateGetPhysicalDeviceExternalFenceProperties(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceExternalFenceInfo*    pExternalFenceInfo,
+    VkExternalFenceProperties*                  pExternalFenceProperties) const;
+bool PreCallValidateGetPhysicalDeviceExternalSemaphoreProperties(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo,
+    VkExternalSemaphoreProperties*              pExternalSemaphoreProperties) const;
+bool PreCallValidateGetDescriptorSetLayoutSupport(
+    VkDevice                                    device,
+    const VkDescriptorSetLayoutCreateInfo*      pCreateInfo,
+    VkDescriptorSetLayoutSupport*               pSupport) const;
+bool PreCallValidateDestroySurfaceKHR(
+    VkInstance                                  instance,
+    VkSurfaceKHR                                surface,
+    const VkAllocationCallbacks*                pAllocator) const;
+void PreCallRecordDestroySurfaceKHR(
+    VkInstance                                  instance,
+    VkSurfaceKHR                                surface,
+    const VkAllocationCallbacks*                pAllocator);
+bool PreCallValidateGetPhysicalDeviceSurfaceSupportKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t                                    queueFamilyIndex,
+    VkSurfaceKHR                                surface,
+    VkBool32*                                   pSupported) const;
+bool PreCallValidateGetPhysicalDeviceSurfaceCapabilitiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkSurfaceKHR                                surface,
+    VkSurfaceCapabilitiesKHR*                   pSurfaceCapabilities) const;
+bool PreCallValidateGetPhysicalDeviceSurfaceFormatsKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkSurfaceKHR                                surface,
+    uint32_t*                                   pSurfaceFormatCount,
+    VkSurfaceFormatKHR*                         pSurfaceFormats) const;
+bool PreCallValidateGetPhysicalDeviceSurfacePresentModesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkSurfaceKHR                                surface,
+    uint32_t*                                   pPresentModeCount,
+    VkPresentModeKHR*                           pPresentModes) const;
+bool PreCallValidateCreateSwapchainKHR(
+    VkDevice                                    device,
+    const VkSwapchainCreateInfoKHR*             pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSwapchainKHR*                             pSwapchain) const;
+void PostCallRecordCreateSwapchainKHR(
+    VkDevice                                    device,
+    const VkSwapchainCreateInfoKHR*             pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSwapchainKHR*                             pSwapchain,
+    VkResult                                    result);
+bool PreCallValidateDestroySwapchainKHR(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    const VkAllocationCallbacks*                pAllocator) const;
+void PreCallRecordDestroySwapchainKHR(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    const VkAllocationCallbacks*                pAllocator);
+bool PreCallValidateGetSwapchainImagesKHR(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    uint32_t*                                   pSwapchainImageCount,
+    VkImage*                                    pSwapchainImages) const;
+void PostCallRecordGetSwapchainImagesKHR(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    uint32_t*                                   pSwapchainImageCount,
+    VkImage*                                    pSwapchainImages,
+    VkResult                                    result);
+bool PreCallValidateAcquireNextImageKHR(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    uint64_t                                    timeout,
+    VkSemaphore                                 semaphore,
+    VkFence                                     fence,
+    uint32_t*                                   pImageIndex) const;
+bool PreCallValidateQueuePresentKHR(
+    VkQueue                                     queue,
+    const VkPresentInfoKHR*                     pPresentInfo) const;
+bool PreCallValidateGetDeviceGroupPresentCapabilitiesKHR(
+    VkDevice                                    device,
+    VkDeviceGroupPresentCapabilitiesKHR*        pDeviceGroupPresentCapabilities) const;
+bool PreCallValidateGetDeviceGroupSurfacePresentModesKHR(
+    VkDevice                                    device,
+    VkSurfaceKHR                                surface,
+    VkDeviceGroupPresentModeFlagsKHR*           pModes) const;
+bool PreCallValidateGetPhysicalDevicePresentRectanglesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkSurfaceKHR                                surface,
+    uint32_t*                                   pRectCount,
+    VkRect2D*                                   pRects) const;
+bool PreCallValidateAcquireNextImage2KHR(
+    VkDevice                                    device,
+    const VkAcquireNextImageInfoKHR*            pAcquireInfo,
+    uint32_t*                                   pImageIndex) const;
+bool PreCallValidateGetPhysicalDeviceDisplayPropertiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pPropertyCount,
+    VkDisplayPropertiesKHR*                     pProperties) const;
+bool PreCallValidateGetPhysicalDeviceDisplayPlanePropertiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pPropertyCount,
+    VkDisplayPlanePropertiesKHR*                pProperties) const;
+bool PreCallValidateGetDisplayPlaneSupportedDisplaysKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t                                    planeIndex,
+    uint32_t*                                   pDisplayCount,
+    VkDisplayKHR*                               pDisplays) const;
+void PostCallRecordGetDisplayPlaneSupportedDisplaysKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t                                    planeIndex,
+    uint32_t*                                   pDisplayCount,
+    VkDisplayKHR*                               pDisplays,
+    VkResult                                    result);
+bool PreCallValidateGetDisplayModePropertiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkDisplayKHR                                display,
+    uint32_t*                                   pPropertyCount,
+    VkDisplayModePropertiesKHR*                 pProperties) const;
+bool PreCallValidateCreateDisplayModeKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkDisplayKHR                                display,
+    const VkDisplayModeCreateInfoKHR*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDisplayModeKHR*                           pMode) const;
+void PostCallRecordCreateDisplayModeKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkDisplayKHR                                display,
+    const VkDisplayModeCreateInfoKHR*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDisplayModeKHR*                           pMode,
+    VkResult                                    result);
+bool PreCallValidateGetDisplayPlaneCapabilitiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkDisplayModeKHR                            mode,
+    uint32_t                                    planeIndex,
+    VkDisplayPlaneCapabilitiesKHR*              pCapabilities) const;
+bool PreCallValidateCreateDisplayPlaneSurfaceKHR(
+    VkInstance                                  instance,
+    const VkDisplaySurfaceCreateInfoKHR*        pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface) const;
+void PostCallRecordCreateDisplayPlaneSurfaceKHR(
+    VkInstance                                  instance,
+    const VkDisplaySurfaceCreateInfoKHR*        pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface,
+    VkResult                                    result);
+bool PreCallValidateCreateSharedSwapchainsKHR(
+    VkDevice                                    device,
+    uint32_t                                    swapchainCount,
+    const VkSwapchainCreateInfoKHR*             pCreateInfos,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSwapchainKHR*                             pSwapchains) const;
+void PostCallRecordCreateSharedSwapchainsKHR(
+    VkDevice                                    device,
+    uint32_t                                    swapchainCount,
+    const VkSwapchainCreateInfoKHR*             pCreateInfos,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSwapchainKHR*                             pSwapchains,
+    VkResult                                    result);
+
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+bool PreCallValidateCreateXlibSurfaceKHR(
+    VkInstance                                  instance,
+    const VkXlibSurfaceCreateInfoKHR*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface) const;
+void PostCallRecordCreateXlibSurfaceKHR(
+    VkInstance                                  instance,
+    const VkXlibSurfaceCreateInfoKHR*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface,
+    VkResult                                    result);
+#endif // VK_USE_PLATFORM_XLIB_KHR
+
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+bool PreCallValidateGetPhysicalDeviceXlibPresentationSupportKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t                                    queueFamilyIndex,
+    Display*                                    dpy,
+    VisualID                                    visualID) const;
+#endif // VK_USE_PLATFORM_XLIB_KHR
+
+#ifdef VK_USE_PLATFORM_XCB_KHR
+bool PreCallValidateCreateXcbSurfaceKHR(
+    VkInstance                                  instance,
+    const VkXcbSurfaceCreateInfoKHR*            pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface) const;
+void PostCallRecordCreateXcbSurfaceKHR(
+    VkInstance                                  instance,
+    const VkXcbSurfaceCreateInfoKHR*            pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface,
+    VkResult                                    result);
+#endif // VK_USE_PLATFORM_XCB_KHR
+
+#ifdef VK_USE_PLATFORM_XCB_KHR
+bool PreCallValidateGetPhysicalDeviceXcbPresentationSupportKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t                                    queueFamilyIndex,
+    xcb_connection_t*                           connection,
+    xcb_visualid_t                              visual_id) const;
+#endif // VK_USE_PLATFORM_XCB_KHR
+
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+bool PreCallValidateCreateWaylandSurfaceKHR(
+    VkInstance                                  instance,
+    const VkWaylandSurfaceCreateInfoKHR*        pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface) const;
+void PostCallRecordCreateWaylandSurfaceKHR(
+    VkInstance                                  instance,
+    const VkWaylandSurfaceCreateInfoKHR*        pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface,
+    VkResult                                    result);
+#endif // VK_USE_PLATFORM_WAYLAND_KHR
+
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+bool PreCallValidateGetPhysicalDeviceWaylandPresentationSupportKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t                                    queueFamilyIndex,
+    struct wl_display*                          display) const;
+#endif // VK_USE_PLATFORM_WAYLAND_KHR
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+bool PreCallValidateCreateAndroidSurfaceKHR(
+    VkInstance                                  instance,
+    const VkAndroidSurfaceCreateInfoKHR*        pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface) const;
+void PostCallRecordCreateAndroidSurfaceKHR(
+    VkInstance                                  instance,
+    const VkAndroidSurfaceCreateInfoKHR*        pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface,
+    VkResult                                    result);
+#endif // VK_USE_PLATFORM_ANDROID_KHR
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+bool PreCallValidateCreateWin32SurfaceKHR(
+    VkInstance                                  instance,
+    const VkWin32SurfaceCreateInfoKHR*          pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface) const;
+void PostCallRecordCreateWin32SurfaceKHR(
+    VkInstance                                  instance,
+    const VkWin32SurfaceCreateInfoKHR*          pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface,
+    VkResult                                    result);
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+bool PreCallValidateGetPhysicalDeviceWin32PresentationSupportKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t                                    queueFamilyIndex) const;
+#endif // VK_USE_PLATFORM_WIN32_KHR
+bool PreCallValidateGetPhysicalDeviceFeatures2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceFeatures2*                  pFeatures) const;
+bool PreCallValidateGetPhysicalDeviceProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceProperties2*                pProperties) const;
+bool PreCallValidateGetPhysicalDeviceFormatProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkFormat                                    format,
+    VkFormatProperties2*                        pFormatProperties) const;
+bool PreCallValidateGetPhysicalDeviceImageFormatProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceImageFormatInfo2*     pImageFormatInfo,
+    VkImageFormatProperties2*                   pImageFormatProperties) const;
+bool PreCallValidateGetPhysicalDeviceQueueFamilyProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pQueueFamilyPropertyCount,
+    VkQueueFamilyProperties2*                   pQueueFamilyProperties) const;
+bool PreCallValidateGetPhysicalDeviceMemoryProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceMemoryProperties2*          pMemoryProperties) const;
+bool PreCallValidateGetPhysicalDeviceSparseImageFormatProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo,
+    uint32_t*                                   pPropertyCount,
+    VkSparseImageFormatProperties2*             pProperties) const;
+bool PreCallValidateGetDeviceGroupPeerMemoryFeaturesKHR(
+    VkDevice                                    device,
+    uint32_t                                    heapIndex,
+    uint32_t                                    localDeviceIndex,
+    uint32_t                                    remoteDeviceIndex,
+    VkPeerMemoryFeatureFlags*                   pPeerMemoryFeatures) const;
+bool PreCallValidateCmdSetDeviceMaskKHR(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    deviceMask) const;
+bool PreCallValidateCmdDispatchBaseKHR(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    baseGroupX,
+    uint32_t                                    baseGroupY,
+    uint32_t                                    baseGroupZ,
+    uint32_t                                    groupCountX,
+    uint32_t                                    groupCountY,
+    uint32_t                                    groupCountZ) const;
+bool PreCallValidateTrimCommandPoolKHR(
+    VkDevice                                    device,
+    VkCommandPool                               commandPool,
+    VkCommandPoolTrimFlags                      flags) const;
+bool PreCallValidateEnumeratePhysicalDeviceGroupsKHR(
+    VkInstance                                  instance,
+    uint32_t*                                   pPhysicalDeviceGroupCount,
+    VkPhysicalDeviceGroupProperties*            pPhysicalDeviceGroupProperties) const;
+bool PreCallValidateGetPhysicalDeviceExternalBufferPropertiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceExternalBufferInfo*   pExternalBufferInfo,
+    VkExternalBufferProperties*                 pExternalBufferProperties) const;
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+bool PreCallValidateGetMemoryWin32HandleKHR(
+    VkDevice                                    device,
+    const VkMemoryGetWin32HandleInfoKHR*        pGetWin32HandleInfo,
+    HANDLE*                                     pHandle) const;
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+bool PreCallValidateGetMemoryWin32HandlePropertiesKHR(
+    VkDevice                                    device,
+    VkExternalMemoryHandleTypeFlagBits          handleType,
+    HANDLE                                      handle,
+    VkMemoryWin32HandlePropertiesKHR*           pMemoryWin32HandleProperties) const;
+#endif // VK_USE_PLATFORM_WIN32_KHR
+bool PreCallValidateGetMemoryFdKHR(
+    VkDevice                                    device,
+    const VkMemoryGetFdInfoKHR*                 pGetFdInfo,
+    int*                                        pFd) const;
+bool PreCallValidateGetMemoryFdPropertiesKHR(
+    VkDevice                                    device,
+    VkExternalMemoryHandleTypeFlagBits          handleType,
+    int                                         fd,
+    VkMemoryFdPropertiesKHR*                    pMemoryFdProperties) const;
+bool PreCallValidateGetPhysicalDeviceExternalSemaphorePropertiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo,
+    VkExternalSemaphoreProperties*              pExternalSemaphoreProperties) const;
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+bool PreCallValidateImportSemaphoreWin32HandleKHR(
+    VkDevice                                    device,
+    const VkImportSemaphoreWin32HandleInfoKHR*  pImportSemaphoreWin32HandleInfo) const;
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+bool PreCallValidateGetSemaphoreWin32HandleKHR(
+    VkDevice                                    device,
+    const VkSemaphoreGetWin32HandleInfoKHR*     pGetWin32HandleInfo,
+    HANDLE*                                     pHandle) const;
+#endif // VK_USE_PLATFORM_WIN32_KHR
+bool PreCallValidateImportSemaphoreFdKHR(
+    VkDevice                                    device,
+    const VkImportSemaphoreFdInfoKHR*           pImportSemaphoreFdInfo) const;
+bool PreCallValidateGetSemaphoreFdKHR(
+    VkDevice                                    device,
+    const VkSemaphoreGetFdInfoKHR*              pGetFdInfo,
+    int*                                        pFd) const;
+bool PreCallValidateCmdPushDescriptorSetKHR(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineBindPoint                         pipelineBindPoint,
+    VkPipelineLayout                            layout,
+    uint32_t                                    set,
+    uint32_t                                    descriptorWriteCount,
+    const VkWriteDescriptorSet*                 pDescriptorWrites) const;
+bool PreCallValidateCmdPushDescriptorSetWithTemplateKHR(
+    VkCommandBuffer                             commandBuffer,
+    VkDescriptorUpdateTemplate                  descriptorUpdateTemplate,
+    VkPipelineLayout                            layout,
+    uint32_t                                    set,
+    const void*                                 pData) const;
+bool PreCallValidateCreateDescriptorUpdateTemplateKHR(
+    VkDevice                                    device,
+    const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDescriptorUpdateTemplate*                 pDescriptorUpdateTemplate) const;
+void PostCallRecordCreateDescriptorUpdateTemplateKHR(
+    VkDevice                                    device,
+    const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDescriptorUpdateTemplate*                 pDescriptorUpdateTemplate,
+    VkResult                                    result);
+bool PreCallValidateDestroyDescriptorUpdateTemplateKHR(
+    VkDevice                                    device,
+    VkDescriptorUpdateTemplate                  descriptorUpdateTemplate,
+    const VkAllocationCallbacks*                pAllocator) const;
+void PreCallRecordDestroyDescriptorUpdateTemplateKHR(
+    VkDevice                                    device,
+    VkDescriptorUpdateTemplate                  descriptorUpdateTemplate,
+    const VkAllocationCallbacks*                pAllocator);
+bool PreCallValidateUpdateDescriptorSetWithTemplateKHR(
+    VkDevice                                    device,
+    VkDescriptorSet                             descriptorSet,
+    VkDescriptorUpdateTemplate                  descriptorUpdateTemplate,
+    const void*                                 pData) const;
+bool PreCallValidateCreateRenderPass2KHR(
+    VkDevice                                    device,
+    const VkRenderPassCreateInfo2KHR*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkRenderPass*                               pRenderPass) const;
+void PostCallRecordCreateRenderPass2KHR(
+    VkDevice                                    device,
+    const VkRenderPassCreateInfo2KHR*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkRenderPass*                               pRenderPass,
+    VkResult                                    result);
+bool PreCallValidateCmdBeginRenderPass2KHR(
+    VkCommandBuffer                             commandBuffer,
+    const VkRenderPassBeginInfo*                pRenderPassBegin,
+    const VkSubpassBeginInfoKHR*                pSubpassBeginInfo) const;
+bool PreCallValidateCmdNextSubpass2KHR(
+    VkCommandBuffer                             commandBuffer,
+    const VkSubpassBeginInfoKHR*                pSubpassBeginInfo,
+    const VkSubpassEndInfoKHR*                  pSubpassEndInfo) const;
+bool PreCallValidateCmdEndRenderPass2KHR(
+    VkCommandBuffer                             commandBuffer,
+    const VkSubpassEndInfoKHR*                  pSubpassEndInfo) const;
+bool PreCallValidateGetSwapchainStatusKHR(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain) const;
+bool PreCallValidateGetPhysicalDeviceExternalFencePropertiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceExternalFenceInfo*    pExternalFenceInfo,
+    VkExternalFenceProperties*                  pExternalFenceProperties) const;
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+bool PreCallValidateImportFenceWin32HandleKHR(
+    VkDevice                                    device,
+    const VkImportFenceWin32HandleInfoKHR*      pImportFenceWin32HandleInfo) const;
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+bool PreCallValidateGetFenceWin32HandleKHR(
+    VkDevice                                    device,
+    const VkFenceGetWin32HandleInfoKHR*         pGetWin32HandleInfo,
+    HANDLE*                                     pHandle) const;
+#endif // VK_USE_PLATFORM_WIN32_KHR
+bool PreCallValidateImportFenceFdKHR(
+    VkDevice                                    device,
+    const VkImportFenceFdInfoKHR*               pImportFenceFdInfo) const;
+bool PreCallValidateGetFenceFdKHR(
+    VkDevice                                    device,
+    const VkFenceGetFdInfoKHR*                  pGetFdInfo,
+    int*                                        pFd) const;
+bool PreCallValidateEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t                                    queueFamilyIndex,
+    uint32_t*                                   pCounterCount,
+    VkPerformanceCounterKHR*                    pCounters,
+    VkPerformanceCounterDescriptionKHR*         pCounterDescriptions) const;
+bool PreCallValidateGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkQueryPoolPerformanceCreateInfoKHR*  pPerformanceQueryCreateInfo,
+    uint32_t*                                   pNumPasses) const;
+bool PreCallValidateAcquireProfilingLockKHR(
+    VkDevice                                    device,
+    const VkAcquireProfilingLockInfoKHR*        pInfo) const;
+bool PreCallValidateReleaseProfilingLockKHR(
+    VkDevice                                    device) const;
+bool PreCallValidateGetPhysicalDeviceSurfaceCapabilities2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceSurfaceInfo2KHR*      pSurfaceInfo,
+    VkSurfaceCapabilities2KHR*                  pSurfaceCapabilities) const;
+bool PreCallValidateGetPhysicalDeviceSurfaceFormats2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceSurfaceInfo2KHR*      pSurfaceInfo,
+    uint32_t*                                   pSurfaceFormatCount,
+    VkSurfaceFormat2KHR*                        pSurfaceFormats) const;
+bool PreCallValidateGetPhysicalDeviceDisplayProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pPropertyCount,
+    VkDisplayProperties2KHR*                    pProperties) const;
+bool PreCallValidateGetPhysicalDeviceDisplayPlaneProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pPropertyCount,
+    VkDisplayPlaneProperties2KHR*               pProperties) const;
+bool PreCallValidateGetDisplayModeProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkDisplayKHR                                display,
+    uint32_t*                                   pPropertyCount,
+    VkDisplayModeProperties2KHR*                pProperties) const;
+bool PreCallValidateGetDisplayPlaneCapabilities2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkDisplayPlaneInfo2KHR*               pDisplayPlaneInfo,
+    VkDisplayPlaneCapabilities2KHR*             pCapabilities) const;
+bool PreCallValidateGetImageMemoryRequirements2KHR(
+    VkDevice                                    device,
+    const VkImageMemoryRequirementsInfo2*       pInfo,
+    VkMemoryRequirements2*                      pMemoryRequirements) const;
+bool PreCallValidateGetBufferMemoryRequirements2KHR(
+    VkDevice                                    device,
+    const VkBufferMemoryRequirementsInfo2*      pInfo,
+    VkMemoryRequirements2*                      pMemoryRequirements) const;
+bool PreCallValidateGetImageSparseMemoryRequirements2KHR(
+    VkDevice                                    device,
+    const VkImageSparseMemoryRequirementsInfo2* pInfo,
+    uint32_t*                                   pSparseMemoryRequirementCount,
+    VkSparseImageMemoryRequirements2*           pSparseMemoryRequirements) const;
+bool PreCallValidateCreateSamplerYcbcrConversionKHR(
+    VkDevice                                    device,
+    const VkSamplerYcbcrConversionCreateInfo*   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSamplerYcbcrConversion*                   pYcbcrConversion) const;
+void PostCallRecordCreateSamplerYcbcrConversionKHR(
+    VkDevice                                    device,
+    const VkSamplerYcbcrConversionCreateInfo*   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSamplerYcbcrConversion*                   pYcbcrConversion,
+    VkResult                                    result);
+bool PreCallValidateDestroySamplerYcbcrConversionKHR(
+    VkDevice                                    device,
+    VkSamplerYcbcrConversion                    ycbcrConversion,
+    const VkAllocationCallbacks*                pAllocator) const;
+void PreCallRecordDestroySamplerYcbcrConversionKHR(
+    VkDevice                                    device,
+    VkSamplerYcbcrConversion                    ycbcrConversion,
+    const VkAllocationCallbacks*                pAllocator);
+bool PreCallValidateBindBufferMemory2KHR(
+    VkDevice                                    device,
+    uint32_t                                    bindInfoCount,
+    const VkBindBufferMemoryInfo*               pBindInfos) const;
+bool PreCallValidateBindImageMemory2KHR(
+    VkDevice                                    device,
+    uint32_t                                    bindInfoCount,
+    const VkBindImageMemoryInfo*                pBindInfos) const;
+bool PreCallValidateGetDescriptorSetLayoutSupportKHR(
+    VkDevice                                    device,
+    const VkDescriptorSetLayoutCreateInfo*      pCreateInfo,
+    VkDescriptorSetLayoutSupport*               pSupport) const;
+bool PreCallValidateCmdDrawIndirectCountKHR(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkBuffer                                    countBuffer,
+    VkDeviceSize                                countBufferOffset,
+    uint32_t                                    maxDrawCount,
+    uint32_t                                    stride) const;
+bool PreCallValidateCmdDrawIndexedIndirectCountKHR(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkBuffer                                    countBuffer,
+    VkDeviceSize                                countBufferOffset,
+    uint32_t                                    maxDrawCount,
+    uint32_t                                    stride) const;
+bool PreCallValidateGetSemaphoreCounterValueKHR(
+    VkDevice                                    device,
+    VkSemaphore                                 semaphore,
+    uint64_t*                                   pValue) const;
+bool PreCallValidateWaitSemaphoresKHR(
+    VkDevice                                    device,
+    const VkSemaphoreWaitInfoKHR*               pWaitInfo,
+    uint64_t                                    timeout) const;
+bool PreCallValidateSignalSemaphoreKHR(
+    VkDevice                                    device,
+    const VkSemaphoreSignalInfoKHR*             pSignalInfo) const;
+bool PreCallValidateGetBufferDeviceAddressKHR(
+    VkDevice                                    device,
+    const VkBufferDeviceAddressInfoKHR*         pInfo) const;
+bool PreCallValidateGetBufferOpaqueCaptureAddressKHR(
+    VkDevice                                    device,
+    const VkBufferDeviceAddressInfoKHR*         pInfo) const;
+bool PreCallValidateGetDeviceMemoryOpaqueCaptureAddressKHR(
+    VkDevice                                    device,
+    const VkDeviceMemoryOpaqueCaptureAddressInfoKHR* pInfo) const;
+bool PreCallValidateGetPipelineExecutablePropertiesKHR(
+    VkDevice                                    device,
+    const VkPipelineInfoKHR*                    pPipelineInfo,
+    uint32_t*                                   pExecutableCount,
+    VkPipelineExecutablePropertiesKHR*          pProperties) const;
+bool PreCallValidateGetPipelineExecutableStatisticsKHR(
+    VkDevice                                    device,
+    const VkPipelineExecutableInfoKHR*          pExecutableInfo,
+    uint32_t*                                   pStatisticCount,
+    VkPipelineExecutableStatisticKHR*           pStatistics) const;
+bool PreCallValidateGetPipelineExecutableInternalRepresentationsKHR(
+    VkDevice                                    device,
+    const VkPipelineExecutableInfoKHR*          pExecutableInfo,
+    uint32_t*                                   pInternalRepresentationCount,
+    VkPipelineExecutableInternalRepresentationKHR* pInternalRepresentations) const;
+bool PreCallValidateCreateDebugReportCallbackEXT(
+    VkInstance                                  instance,
+    const VkDebugReportCallbackCreateInfoEXT*   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDebugReportCallbackEXT*                   pCallback) const;
+void PostCallRecordCreateDebugReportCallbackEXT(
+    VkInstance                                  instance,
+    const VkDebugReportCallbackCreateInfoEXT*   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDebugReportCallbackEXT*                   pCallback,
+    VkResult                                    result);
+bool PreCallValidateDestroyDebugReportCallbackEXT(
+    VkInstance                                  instance,
+    VkDebugReportCallbackEXT                    callback,
+    const VkAllocationCallbacks*                pAllocator) const;
+void PreCallRecordDestroyDebugReportCallbackEXT(
+    VkInstance                                  instance,
+    VkDebugReportCallbackEXT                    callback,
+    const VkAllocationCallbacks*                pAllocator);
+bool PreCallValidateDebugReportMessageEXT(
+    VkInstance                                  instance,
+    VkDebugReportFlagsEXT                       flags,
+    VkDebugReportObjectTypeEXT                  objectType,
+    uint64_t                                    object,
+    size_t                                      location,
+    int32_t                                     messageCode,
+    const char*                                 pLayerPrefix,
+    const char*                                 pMessage) const;
+bool PreCallValidateDebugMarkerSetObjectTagEXT(
+    VkDevice                                    device,
+    const VkDebugMarkerObjectTagInfoEXT*        pTagInfo) const;
+bool PreCallValidateDebugMarkerSetObjectNameEXT(
+    VkDevice                                    device,
+    const VkDebugMarkerObjectNameInfoEXT*       pNameInfo) const;
+bool PreCallValidateCmdDebugMarkerBeginEXT(
+    VkCommandBuffer                             commandBuffer,
+    const VkDebugMarkerMarkerInfoEXT*           pMarkerInfo) const;
+bool PreCallValidateCmdDebugMarkerEndEXT(
+    VkCommandBuffer                             commandBuffer) const;
+bool PreCallValidateCmdDebugMarkerInsertEXT(
+    VkCommandBuffer                             commandBuffer,
+    const VkDebugMarkerMarkerInfoEXT*           pMarkerInfo) const;
+bool PreCallValidateCmdBindTransformFeedbackBuffersEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstBinding,
+    uint32_t                                    bindingCount,
+    const VkBuffer*                             pBuffers,
+    const VkDeviceSize*                         pOffsets,
+    const VkDeviceSize*                         pSizes) const;
+bool PreCallValidateCmdBeginTransformFeedbackEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstCounterBuffer,
+    uint32_t                                    counterBufferCount,
+    const VkBuffer*                             pCounterBuffers,
+    const VkDeviceSize*                         pCounterBufferOffsets) const;
+bool PreCallValidateCmdEndTransformFeedbackEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstCounterBuffer,
+    uint32_t                                    counterBufferCount,
+    const VkBuffer*                             pCounterBuffers,
+    const VkDeviceSize*                         pCounterBufferOffsets) const;
+bool PreCallValidateCmdBeginQueryIndexedEXT(
+    VkCommandBuffer                             commandBuffer,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    query,
+    VkQueryControlFlags                         flags,
+    uint32_t                                    index) const;
+bool PreCallValidateCmdEndQueryIndexedEXT(
+    VkCommandBuffer                             commandBuffer,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    query,
+    uint32_t                                    index) const;
+bool PreCallValidateCmdDrawIndirectByteCountEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    instanceCount,
+    uint32_t                                    firstInstance,
+    VkBuffer                                    counterBuffer,
+    VkDeviceSize                                counterBufferOffset,
+    uint32_t                                    counterOffset,
+    uint32_t                                    vertexStride) const;
+bool PreCallValidateGetImageViewHandleNVX(
+    VkDevice                                    device,
+    const VkImageViewHandleInfoNVX*             pInfo) const;
+bool PreCallValidateCmdDrawIndirectCountAMD(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkBuffer                                    countBuffer,
+    VkDeviceSize                                countBufferOffset,
+    uint32_t                                    maxDrawCount,
+    uint32_t                                    stride) const;
+bool PreCallValidateCmdDrawIndexedIndirectCountAMD(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkBuffer                                    countBuffer,
+    VkDeviceSize                                countBufferOffset,
+    uint32_t                                    maxDrawCount,
+    uint32_t                                    stride) const;
+bool PreCallValidateGetShaderInfoAMD(
+    VkDevice                                    device,
+    VkPipeline                                  pipeline,
+    VkShaderStageFlagBits                       shaderStage,
+    VkShaderInfoTypeAMD                         infoType,
+    size_t*                                     pInfoSize,
+    void*                                       pInfo) const;
+
+#ifdef VK_USE_PLATFORM_GGP
+bool PreCallValidateCreateStreamDescriptorSurfaceGGP(
+    VkInstance                                  instance,
+    const VkStreamDescriptorSurfaceCreateInfoGGP* pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface) const;
+void PostCallRecordCreateStreamDescriptorSurfaceGGP(
+    VkInstance                                  instance,
+    const VkStreamDescriptorSurfaceCreateInfoGGP* pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface,
+    VkResult                                    result);
+#endif // VK_USE_PLATFORM_GGP
+bool PreCallValidateGetPhysicalDeviceExternalImageFormatPropertiesNV(
+    VkPhysicalDevice                            physicalDevice,
+    VkFormat                                    format,
+    VkImageType                                 type,
+    VkImageTiling                               tiling,
+    VkImageUsageFlags                           usage,
+    VkImageCreateFlags                          flags,
+    VkExternalMemoryHandleTypeFlagsNV           externalHandleType,
+    VkExternalImageFormatPropertiesNV*          pExternalImageFormatProperties) const;
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+bool PreCallValidateGetMemoryWin32HandleNV(
+    VkDevice                                    device,
+    VkDeviceMemory                              memory,
+    VkExternalMemoryHandleTypeFlagsNV           handleType,
+    HANDLE*                                     pHandle) const;
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+#ifdef VK_USE_PLATFORM_VI_NN
+bool PreCallValidateCreateViSurfaceNN(
+    VkInstance                                  instance,
+    const VkViSurfaceCreateInfoNN*              pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface) const;
+void PostCallRecordCreateViSurfaceNN(
+    VkInstance                                  instance,
+    const VkViSurfaceCreateInfoNN*              pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface,
+    VkResult                                    result);
+#endif // VK_USE_PLATFORM_VI_NN
+bool PreCallValidateCmdBeginConditionalRenderingEXT(
+    VkCommandBuffer                             commandBuffer,
+    const VkConditionalRenderingBeginInfoEXT*   pConditionalRenderingBegin) const;
+bool PreCallValidateCmdEndConditionalRenderingEXT(
+    VkCommandBuffer                             commandBuffer) const;
+bool PreCallValidateCmdProcessCommandsNVX(
+    VkCommandBuffer                             commandBuffer,
+    const VkCmdProcessCommandsInfoNVX*          pProcessCommandsInfo) const;
+bool PreCallValidateCmdReserveSpaceForCommandsNVX(
+    VkCommandBuffer                             commandBuffer,
+    const VkCmdReserveSpaceForCommandsInfoNVX*  pReserveSpaceInfo) const;
+bool PreCallValidateCreateIndirectCommandsLayoutNVX(
+    VkDevice                                    device,
+    const VkIndirectCommandsLayoutCreateInfoNVX* pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkIndirectCommandsLayoutNVX*                pIndirectCommandsLayout) const;
+void PostCallRecordCreateIndirectCommandsLayoutNVX(
+    VkDevice                                    device,
+    const VkIndirectCommandsLayoutCreateInfoNVX* pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkIndirectCommandsLayoutNVX*                pIndirectCommandsLayout,
+    VkResult                                    result);
+bool PreCallValidateDestroyIndirectCommandsLayoutNVX(
+    VkDevice                                    device,
+    VkIndirectCommandsLayoutNVX                 indirectCommandsLayout,
+    const VkAllocationCallbacks*                pAllocator) const;
+void PreCallRecordDestroyIndirectCommandsLayoutNVX(
+    VkDevice                                    device,
+    VkIndirectCommandsLayoutNVX                 indirectCommandsLayout,
+    const VkAllocationCallbacks*                pAllocator);
+bool PreCallValidateCreateObjectTableNVX(
+    VkDevice                                    device,
+    const VkObjectTableCreateInfoNVX*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkObjectTableNVX*                           pObjectTable) const;
+void PostCallRecordCreateObjectTableNVX(
+    VkDevice                                    device,
+    const VkObjectTableCreateInfoNVX*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkObjectTableNVX*                           pObjectTable,
+    VkResult                                    result);
+bool PreCallValidateDestroyObjectTableNVX(
+    VkDevice                                    device,
+    VkObjectTableNVX                            objectTable,
+    const VkAllocationCallbacks*                pAllocator) const;
+void PreCallRecordDestroyObjectTableNVX(
+    VkDevice                                    device,
+    VkObjectTableNVX                            objectTable,
+    const VkAllocationCallbacks*                pAllocator);
+bool PreCallValidateRegisterObjectsNVX(
+    VkDevice                                    device,
+    VkObjectTableNVX                            objectTable,
+    uint32_t                                    objectCount,
+    const VkObjectTableEntryNVX* const*         ppObjectTableEntries,
+    const uint32_t*                             pObjectIndices) const;
+bool PreCallValidateUnregisterObjectsNVX(
+    VkDevice                                    device,
+    VkObjectTableNVX                            objectTable,
+    uint32_t                                    objectCount,
+    const VkObjectEntryTypeNVX*                 pObjectEntryTypes,
+    const uint32_t*                             pObjectIndices) const;
+bool PreCallValidateGetPhysicalDeviceGeneratedCommandsPropertiesNVX(
+    VkPhysicalDevice                            physicalDevice,
+    VkDeviceGeneratedCommandsFeaturesNVX*       pFeatures,
+    VkDeviceGeneratedCommandsLimitsNVX*         pLimits) const;
+bool PreCallValidateCmdSetViewportWScalingNV(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstViewport,
+    uint32_t                                    viewportCount,
+    const VkViewportWScalingNV*                 pViewportWScalings) const;
+bool PreCallValidateReleaseDisplayEXT(
+    VkPhysicalDevice                            physicalDevice,
+    VkDisplayKHR                                display) const;
+
+#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
+bool PreCallValidateAcquireXlibDisplayEXT(
+    VkPhysicalDevice                            physicalDevice,
+    Display*                                    dpy,
+    VkDisplayKHR                                display) const;
+#endif // VK_USE_PLATFORM_XLIB_XRANDR_EXT
+
+#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
+bool PreCallValidateGetRandROutputDisplayEXT(
+    VkPhysicalDevice                            physicalDevice,
+    Display*                                    dpy,
+    RROutput                                    rrOutput,
+    VkDisplayKHR*                               pDisplay) const;
+void PostCallRecordGetRandROutputDisplayEXT(
+    VkPhysicalDevice                            physicalDevice,
+    Display*                                    dpy,
+    RROutput                                    rrOutput,
+    VkDisplayKHR*                               pDisplay,
+    VkResult                                    result);
+#endif // VK_USE_PLATFORM_XLIB_XRANDR_EXT
+bool PreCallValidateGetPhysicalDeviceSurfaceCapabilities2EXT(
+    VkPhysicalDevice                            physicalDevice,
+    VkSurfaceKHR                                surface,
+    VkSurfaceCapabilities2EXT*                  pSurfaceCapabilities) const;
+bool PreCallValidateDisplayPowerControlEXT(
+    VkDevice                                    device,
+    VkDisplayKHR                                display,
+    const VkDisplayPowerInfoEXT*                pDisplayPowerInfo) const;
+bool PreCallValidateRegisterDeviceEventEXT(
+    VkDevice                                    device,
+    const VkDeviceEventInfoEXT*                 pDeviceEventInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkFence*                                    pFence) const;
+void PostCallRecordRegisterDeviceEventEXT(
+    VkDevice                                    device,
+    const VkDeviceEventInfoEXT*                 pDeviceEventInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkFence*                                    pFence,
+    VkResult                                    result);
+bool PreCallValidateRegisterDisplayEventEXT(
+    VkDevice                                    device,
+    VkDisplayKHR                                display,
+    const VkDisplayEventInfoEXT*                pDisplayEventInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkFence*                                    pFence) const;
+void PostCallRecordRegisterDisplayEventEXT(
+    VkDevice                                    device,
+    VkDisplayKHR                                display,
+    const VkDisplayEventInfoEXT*                pDisplayEventInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkFence*                                    pFence,
+    VkResult                                    result);
+bool PreCallValidateGetSwapchainCounterEXT(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    VkSurfaceCounterFlagBitsEXT                 counter,
+    uint64_t*                                   pCounterValue) const;
+bool PreCallValidateGetRefreshCycleDurationGOOGLE(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    VkRefreshCycleDurationGOOGLE*               pDisplayTimingProperties) const;
+bool PreCallValidateGetPastPresentationTimingGOOGLE(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    uint32_t*                                   pPresentationTimingCount,
+    VkPastPresentationTimingGOOGLE*             pPresentationTimings) const;
+bool PreCallValidateCmdSetDiscardRectangleEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstDiscardRectangle,
+    uint32_t                                    discardRectangleCount,
+    const VkRect2D*                             pDiscardRectangles) const;
+bool PreCallValidateSetHdrMetadataEXT(
+    VkDevice                                    device,
+    uint32_t                                    swapchainCount,
+    const VkSwapchainKHR*                       pSwapchains,
+    const VkHdrMetadataEXT*                     pMetadata) const;
+
+#ifdef VK_USE_PLATFORM_IOS_MVK
+bool PreCallValidateCreateIOSSurfaceMVK(
+    VkInstance                                  instance,
+    const VkIOSSurfaceCreateInfoMVK*            pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface) const;
+void PostCallRecordCreateIOSSurfaceMVK(
+    VkInstance                                  instance,
+    const VkIOSSurfaceCreateInfoMVK*            pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface,
+    VkResult                                    result);
+#endif // VK_USE_PLATFORM_IOS_MVK
+
+#ifdef VK_USE_PLATFORM_MACOS_MVK
+bool PreCallValidateCreateMacOSSurfaceMVK(
+    VkInstance                                  instance,
+    const VkMacOSSurfaceCreateInfoMVK*          pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface) const;
+void PostCallRecordCreateMacOSSurfaceMVK(
+    VkInstance                                  instance,
+    const VkMacOSSurfaceCreateInfoMVK*          pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface,
+    VkResult                                    result);
+#endif // VK_USE_PLATFORM_MACOS_MVK
+bool PreCallValidateSetDebugUtilsObjectNameEXT(
+    VkDevice                                    device,
+    const VkDebugUtilsObjectNameInfoEXT*        pNameInfo) const;
+bool PreCallValidateSetDebugUtilsObjectTagEXT(
+    VkDevice                                    device,
+    const VkDebugUtilsObjectTagInfoEXT*         pTagInfo) const;
+bool PreCallValidateQueueBeginDebugUtilsLabelEXT(
+    VkQueue                                     queue,
+    const VkDebugUtilsLabelEXT*                 pLabelInfo) const;
+bool PreCallValidateQueueEndDebugUtilsLabelEXT(
+    VkQueue                                     queue) const;
+bool PreCallValidateQueueInsertDebugUtilsLabelEXT(
+    VkQueue                                     queue,
+    const VkDebugUtilsLabelEXT*                 pLabelInfo) const;
+bool PreCallValidateCmdBeginDebugUtilsLabelEXT(
+    VkCommandBuffer                             commandBuffer,
+    const VkDebugUtilsLabelEXT*                 pLabelInfo) const;
+bool PreCallValidateCmdEndDebugUtilsLabelEXT(
+    VkCommandBuffer                             commandBuffer) const;
+bool PreCallValidateCmdInsertDebugUtilsLabelEXT(
+    VkCommandBuffer                             commandBuffer,
+    const VkDebugUtilsLabelEXT*                 pLabelInfo) const;
+bool PreCallValidateCreateDebugUtilsMessengerEXT(
+    VkInstance                                  instance,
+    const VkDebugUtilsMessengerCreateInfoEXT*   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDebugUtilsMessengerEXT*                   pMessenger) const;
+void PostCallRecordCreateDebugUtilsMessengerEXT(
+    VkInstance                                  instance,
+    const VkDebugUtilsMessengerCreateInfoEXT*   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDebugUtilsMessengerEXT*                   pMessenger,
+    VkResult                                    result);
+bool PreCallValidateDestroyDebugUtilsMessengerEXT(
+    VkInstance                                  instance,
+    VkDebugUtilsMessengerEXT                    messenger,
+    const VkAllocationCallbacks*                pAllocator) const;
+void PreCallRecordDestroyDebugUtilsMessengerEXT(
+    VkInstance                                  instance,
+    VkDebugUtilsMessengerEXT                    messenger,
+    const VkAllocationCallbacks*                pAllocator);
+bool PreCallValidateSubmitDebugUtilsMessageEXT(
+    VkInstance                                  instance,
+    VkDebugUtilsMessageSeverityFlagBitsEXT      messageSeverity,
+    VkDebugUtilsMessageTypeFlagsEXT             messageTypes,
+    const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData) const;
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+bool PreCallValidateGetAndroidHardwareBufferPropertiesANDROID(
+    VkDevice                                    device,
+    const struct AHardwareBuffer*               buffer,
+    VkAndroidHardwareBufferPropertiesANDROID*   pProperties) const;
+#endif // VK_USE_PLATFORM_ANDROID_KHR
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+bool PreCallValidateGetMemoryAndroidHardwareBufferANDROID(
+    VkDevice                                    device,
+    const VkMemoryGetAndroidHardwareBufferInfoANDROID* pInfo,
+    struct AHardwareBuffer**                    pBuffer) const;
+#endif // VK_USE_PLATFORM_ANDROID_KHR
+bool PreCallValidateCmdSetSampleLocationsEXT(
+    VkCommandBuffer                             commandBuffer,
+    const VkSampleLocationsInfoEXT*             pSampleLocationsInfo) const;
+bool PreCallValidateGetPhysicalDeviceMultisamplePropertiesEXT(
+    VkPhysicalDevice                            physicalDevice,
+    VkSampleCountFlagBits                       samples,
+    VkMultisamplePropertiesEXT*                 pMultisampleProperties) const;
+bool PreCallValidateGetImageDrmFormatModifierPropertiesEXT(
+    VkDevice                                    device,
+    VkImage                                     image,
+    VkImageDrmFormatModifierPropertiesEXT*      pProperties) const;
+bool PreCallValidateCreateValidationCacheEXT(
+    VkDevice                                    device,
+    const VkValidationCacheCreateInfoEXT*       pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkValidationCacheEXT*                       pValidationCache) const;
+void PostCallRecordCreateValidationCacheEXT(
+    VkDevice                                    device,
+    const VkValidationCacheCreateInfoEXT*       pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkValidationCacheEXT*                       pValidationCache,
+    VkResult                                    result);
+bool PreCallValidateDestroyValidationCacheEXT(
+    VkDevice                                    device,
+    VkValidationCacheEXT                        validationCache,
+    const VkAllocationCallbacks*                pAllocator) const;
+void PreCallRecordDestroyValidationCacheEXT(
+    VkDevice                                    device,
+    VkValidationCacheEXT                        validationCache,
+    const VkAllocationCallbacks*                pAllocator);
+bool PreCallValidateMergeValidationCachesEXT(
+    VkDevice                                    device,
+    VkValidationCacheEXT                        dstCache,
+    uint32_t                                    srcCacheCount,
+    const VkValidationCacheEXT*                 pSrcCaches) const;
+bool PreCallValidateGetValidationCacheDataEXT(
+    VkDevice                                    device,
+    VkValidationCacheEXT                        validationCache,
+    size_t*                                     pDataSize,
+    void*                                       pData) const;
+bool PreCallValidateCmdBindShadingRateImageNV(
+    VkCommandBuffer                             commandBuffer,
+    VkImageView                                 imageView,
+    VkImageLayout                               imageLayout) const;
+bool PreCallValidateCmdSetViewportShadingRatePaletteNV(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstViewport,
+    uint32_t                                    viewportCount,
+    const VkShadingRatePaletteNV*               pShadingRatePalettes) const;
+bool PreCallValidateCmdSetCoarseSampleOrderNV(
+    VkCommandBuffer                             commandBuffer,
+    VkCoarseSampleOrderTypeNV                   sampleOrderType,
+    uint32_t                                    customSampleOrderCount,
+    const VkCoarseSampleOrderCustomNV*          pCustomSampleOrders) const;
+bool PreCallValidateCreateAccelerationStructureNV(
+    VkDevice                                    device,
+    const VkAccelerationStructureCreateInfoNV*  pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkAccelerationStructureNV*                  pAccelerationStructure) const;
+void PostCallRecordCreateAccelerationStructureNV(
+    VkDevice                                    device,
+    const VkAccelerationStructureCreateInfoNV*  pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkAccelerationStructureNV*                  pAccelerationStructure,
+    VkResult                                    result);
+bool PreCallValidateDestroyAccelerationStructureNV(
+    VkDevice                                    device,
+    VkAccelerationStructureNV                   accelerationStructure,
+    const VkAllocationCallbacks*                pAllocator) const;
+void PreCallRecordDestroyAccelerationStructureNV(
+    VkDevice                                    device,
+    VkAccelerationStructureNV                   accelerationStructure,
+    const VkAllocationCallbacks*                pAllocator);
+bool PreCallValidateGetAccelerationStructureMemoryRequirementsNV(
+    VkDevice                                    device,
+    const VkAccelerationStructureMemoryRequirementsInfoNV* pInfo,
+    VkMemoryRequirements2KHR*                   pMemoryRequirements) const;
+bool PreCallValidateBindAccelerationStructureMemoryNV(
+    VkDevice                                    device,
+    uint32_t                                    bindInfoCount,
+    const VkBindAccelerationStructureMemoryInfoNV* pBindInfos) const;
+bool PreCallValidateCmdBuildAccelerationStructureNV(
+    VkCommandBuffer                             commandBuffer,
+    const VkAccelerationStructureInfoNV*        pInfo,
+    VkBuffer                                    instanceData,
+    VkDeviceSize                                instanceOffset,
+    VkBool32                                    update,
+    VkAccelerationStructureNV                   dst,
+    VkAccelerationStructureNV                   src,
+    VkBuffer                                    scratch,
+    VkDeviceSize                                scratchOffset) const;
+bool PreCallValidateCmdCopyAccelerationStructureNV(
+    VkCommandBuffer                             commandBuffer,
+    VkAccelerationStructureNV                   dst,
+    VkAccelerationStructureNV                   src,
+    VkCopyAccelerationStructureModeNV           mode) const;
+bool PreCallValidateCmdTraceRaysNV(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    raygenShaderBindingTableBuffer,
+    VkDeviceSize                                raygenShaderBindingOffset,
+    VkBuffer                                    missShaderBindingTableBuffer,
+    VkDeviceSize                                missShaderBindingOffset,
+    VkDeviceSize                                missShaderBindingStride,
+    VkBuffer                                    hitShaderBindingTableBuffer,
+    VkDeviceSize                                hitShaderBindingOffset,
+    VkDeviceSize                                hitShaderBindingStride,
+    VkBuffer                                    callableShaderBindingTableBuffer,
+    VkDeviceSize                                callableShaderBindingOffset,
+    VkDeviceSize                                callableShaderBindingStride,
+    uint32_t                                    width,
+    uint32_t                                    height,
+    uint32_t                                    depth) const;
+bool PreCallValidateCreateRayTracingPipelinesNV(
+    VkDevice                                    device,
+    VkPipelineCache                             pipelineCache,
+    uint32_t                                    createInfoCount,
+    const VkRayTracingPipelineCreateInfoNV*     pCreateInfos,
+    const VkAllocationCallbacks*                pAllocator,
+    VkPipeline*                                 pPipelines) const;
+void PostCallRecordCreateRayTracingPipelinesNV(
+    VkDevice                                    device,
+    VkPipelineCache                             pipelineCache,
+    uint32_t                                    createInfoCount,
+    const VkRayTracingPipelineCreateInfoNV*     pCreateInfos,
+    const VkAllocationCallbacks*                pAllocator,
+    VkPipeline*                                 pPipelines,
+    VkResult                                    result);
+bool PreCallValidateGetRayTracingShaderGroupHandlesNV(
+    VkDevice                                    device,
+    VkPipeline                                  pipeline,
+    uint32_t                                    firstGroup,
+    uint32_t                                    groupCount,
+    size_t                                      dataSize,
+    void*                                       pData) const;
+bool PreCallValidateGetAccelerationStructureHandleNV(
+    VkDevice                                    device,
+    VkAccelerationStructureNV                   accelerationStructure,
+    size_t                                      dataSize,
+    void*                                       pData) const;
+bool PreCallValidateCmdWriteAccelerationStructuresPropertiesNV(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    accelerationStructureCount,
+    const VkAccelerationStructureNV*            pAccelerationStructures,
+    VkQueryType                                 queryType,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    firstQuery) const;
+bool PreCallValidateCompileDeferredNV(
+    VkDevice                                    device,
+    VkPipeline                                  pipeline,
+    uint32_t                                    shader) const;
+bool PreCallValidateGetMemoryHostPointerPropertiesEXT(
+    VkDevice                                    device,
+    VkExternalMemoryHandleTypeFlagBits          handleType,
+    const void*                                 pHostPointer,
+    VkMemoryHostPointerPropertiesEXT*           pMemoryHostPointerProperties) const;
+bool PreCallValidateCmdWriteBufferMarkerAMD(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineStageFlagBits                     pipelineStage,
+    VkBuffer                                    dstBuffer,
+    VkDeviceSize                                dstOffset,
+    uint32_t                                    marker) const;
+bool PreCallValidateGetPhysicalDeviceCalibrateableTimeDomainsEXT(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pTimeDomainCount,
+    VkTimeDomainEXT*                            pTimeDomains) const;
+bool PreCallValidateGetCalibratedTimestampsEXT(
+    VkDevice                                    device,
+    uint32_t                                    timestampCount,
+    const VkCalibratedTimestampInfoEXT*         pTimestampInfos,
+    uint64_t*                                   pTimestamps,
+    uint64_t*                                   pMaxDeviation) const;
+bool PreCallValidateCmdDrawMeshTasksNV(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    taskCount,
+    uint32_t                                    firstTask) const;
+bool PreCallValidateCmdDrawMeshTasksIndirectNV(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    uint32_t                                    drawCount,
+    uint32_t                                    stride) const;
+bool PreCallValidateCmdDrawMeshTasksIndirectCountNV(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkBuffer                                    countBuffer,
+    VkDeviceSize                                countBufferOffset,
+    uint32_t                                    maxDrawCount,
+    uint32_t                                    stride) const;
+bool PreCallValidateCmdSetExclusiveScissorNV(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstExclusiveScissor,
+    uint32_t                                    exclusiveScissorCount,
+    const VkRect2D*                             pExclusiveScissors) const;
+bool PreCallValidateCmdSetCheckpointNV(
+    VkCommandBuffer                             commandBuffer,
+    const void*                                 pCheckpointMarker) const;
+bool PreCallValidateGetQueueCheckpointDataNV(
+    VkQueue                                     queue,
+    uint32_t*                                   pCheckpointDataCount,
+    VkCheckpointDataNV*                         pCheckpointData) const;
+bool PreCallValidateInitializePerformanceApiINTEL(
+    VkDevice                                    device,
+    const VkInitializePerformanceApiInfoINTEL*  pInitializeInfo) const;
+bool PreCallValidateUninitializePerformanceApiINTEL(
+    VkDevice                                    device) const;
+bool PreCallValidateCmdSetPerformanceMarkerINTEL(
+    VkCommandBuffer                             commandBuffer,
+    const VkPerformanceMarkerInfoINTEL*         pMarkerInfo) const;
+bool PreCallValidateCmdSetPerformanceStreamMarkerINTEL(
+    VkCommandBuffer                             commandBuffer,
+    const VkPerformanceStreamMarkerInfoINTEL*   pMarkerInfo) const;
+bool PreCallValidateCmdSetPerformanceOverrideINTEL(
+    VkCommandBuffer                             commandBuffer,
+    const VkPerformanceOverrideInfoINTEL*       pOverrideInfo) const;
+bool PreCallValidateAcquirePerformanceConfigurationINTEL(
+    VkDevice                                    device,
+    const VkPerformanceConfigurationAcquireInfoINTEL* pAcquireInfo,
+    VkPerformanceConfigurationINTEL*            pConfiguration) const;
+bool PreCallValidateReleasePerformanceConfigurationINTEL(
+    VkDevice                                    device,
+    VkPerformanceConfigurationINTEL             configuration) const;
+bool PreCallValidateQueueSetPerformanceConfigurationINTEL(
+    VkQueue                                     queue,
+    VkPerformanceConfigurationINTEL             configuration) const;
+bool PreCallValidateGetPerformanceParameterINTEL(
+    VkDevice                                    device,
+    VkPerformanceParameterTypeINTEL             parameter,
+    VkPerformanceValueINTEL*                    pValue) const;
+bool PreCallValidateSetLocalDimmingAMD(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapChain,
+    VkBool32                                    localDimmingEnable) const;
+
+#ifdef VK_USE_PLATFORM_FUCHSIA
+bool PreCallValidateCreateImagePipeSurfaceFUCHSIA(
+    VkInstance                                  instance,
+    const VkImagePipeSurfaceCreateInfoFUCHSIA*  pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface) const;
+void PostCallRecordCreateImagePipeSurfaceFUCHSIA(
+    VkInstance                                  instance,
+    const VkImagePipeSurfaceCreateInfoFUCHSIA*  pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface,
+    VkResult                                    result);
+#endif // VK_USE_PLATFORM_FUCHSIA
+
+#ifdef VK_USE_PLATFORM_METAL_EXT
+bool PreCallValidateCreateMetalSurfaceEXT(
+    VkInstance                                  instance,
+    const VkMetalSurfaceCreateInfoEXT*          pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface) const;
+void PostCallRecordCreateMetalSurfaceEXT(
+    VkInstance                                  instance,
+    const VkMetalSurfaceCreateInfoEXT*          pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface,
+    VkResult                                    result);
+#endif // VK_USE_PLATFORM_METAL_EXT
+bool PreCallValidateGetBufferDeviceAddressEXT(
+    VkDevice                                    device,
+    const VkBufferDeviceAddressInfoKHR*         pInfo) const;
+bool PreCallValidateGetPhysicalDeviceToolPropertiesEXT(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pToolCount,
+    VkPhysicalDeviceToolPropertiesEXT*          pToolProperties) const;
+bool PreCallValidateGetPhysicalDeviceCooperativeMatrixPropertiesNV(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pPropertyCount,
+    VkCooperativeMatrixPropertiesNV*            pProperties) const;
+bool PreCallValidateGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pCombinationCount,
+    VkFramebufferMixedSamplesCombinationNV*     pCombinations) const;
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+bool PreCallValidateGetPhysicalDeviceSurfacePresentModes2EXT(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceSurfaceInfo2KHR*      pSurfaceInfo,
+    uint32_t*                                   pPresentModeCount,
+    VkPresentModeKHR*                           pPresentModes) const;
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+bool PreCallValidateAcquireFullScreenExclusiveModeEXT(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain) const;
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+bool PreCallValidateReleaseFullScreenExclusiveModeEXT(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain) const;
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+bool PreCallValidateGetDeviceGroupSurfacePresentModes2EXT(
+    VkDevice                                    device,
+    const VkPhysicalDeviceSurfaceInfo2KHR*      pSurfaceInfo,
+    VkDeviceGroupPresentModeFlagsKHR*           pModes) const;
+#endif // VK_USE_PLATFORM_WIN32_KHR
+bool PreCallValidateCreateHeadlessSurfaceEXT(
+    VkInstance                                  instance,
+    const VkHeadlessSurfaceCreateInfoEXT*       pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface) const;
+void PostCallRecordCreateHeadlessSurfaceEXT(
+    VkInstance                                  instance,
+    const VkHeadlessSurfaceCreateInfoEXT*       pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface,
+    VkResult                                    result);
+bool PreCallValidateCmdSetLineStippleEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    lineStippleFactor,
+    uint16_t                                    lineStipplePattern) const;
+bool PreCallValidateResetQueryPoolEXT(
+    VkDevice                                    device,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    firstQuery,
+    uint32_t                                    queryCount) const;
+
+
+void PostCallRecordDestroyInstance(VkInstance instance, const VkAllocationCallbacks *pAllocator);
+void PreCallRecordResetDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool, VkDescriptorPoolResetFlags flags);
+void PostCallRecordGetPhysicalDeviceQueueFamilyProperties(VkPhysicalDevice physicalDevice, uint32_t *pQueueFamilyPropertyCount, VkQueueFamilyProperties *pQueueFamilyProperties);
+void PreCallRecordFreeCommandBuffers(VkDevice device, VkCommandPool commandPool, uint32_t commandBufferCount, const VkCommandBuffer *pCommandBuffers);
+void PreCallRecordFreeDescriptorSets(VkDevice device, VkDescriptorPool descriptorPool, uint32_t descriptorSetCount, const VkDescriptorSet *pDescriptorSets);
+void PostCallRecordGetPhysicalDeviceQueueFamilyProperties2(VkPhysicalDevice physicalDevice, uint32_t *pQueueFamilyPropertyCount, VkQueueFamilyProperties2KHR *pQueueFamilyProperties);
+void PostCallRecordGetPhysicalDeviceQueueFamilyProperties2KHR(VkPhysicalDevice physicalDevice, uint32_t *pQueueFamilyPropertyCount, VkQueueFamilyProperties2KHR *pQueueFamilyProperties);
+void PostCallRecordGetPhysicalDeviceDisplayPropertiesKHR(VkPhysicalDevice physicalDevice, uint32_t *pPropertyCount, VkDisplayPropertiesKHR *pProperties, VkResult result);
+void PostCallRecordGetDisplayModePropertiesKHR(VkPhysicalDevice physicalDevice, VkDisplayKHR display, uint32_t *pPropertyCount, VkDisplayModePropertiesKHR *pProperties, VkResult result);
+void PostCallRecordGetPhysicalDeviceDisplayProperties2KHR(VkPhysicalDevice physicalDevice, uint32_t *pPropertyCount, VkDisplayProperties2KHR *pProperties, VkResult result);
+void PostCallRecordGetDisplayModeProperties2KHR(VkPhysicalDevice physicalDevice, VkDisplayKHR display, uint32_t *pPropertyCount, VkDisplayModeProperties2KHR *pProperties, VkResult result);
diff --git a/src/third_party/vulkan-validation-layers/src/layers/generated/parameter_validation.cpp b/src/third_party/vulkan-validation-layers/src/layers/generated/parameter_validation.cpp
new file mode 100644
index 0000000..55ff31c
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/layers/generated/parameter_validation.cpp
@@ -0,0 +1,10670 @@
+/* *** THIS FILE IS GENERATED - DO NOT EDIT! ***
+ * See parameter_validation_generator.py for modifications
+ *
+ * Copyright (c) 2015-2019 The Khronos Group Inc.
+ * Copyright (c) 2015-2019 LunarG, Inc.
+ * Copyright (C) 2015-2019 Google Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * Copyright (c) 2015-2017 Valve Corporation
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Mark Lobodzinski <mark@LunarG.com>
+ * Author: Dave Houlton <daveh@LunarG.com>
+ */
+
+
+#include "chassis.h"
+
+#include "stateless_validation.h"
+
+const uint32_t GeneratedVulkanHeaderVersion = 130;
+
+const VkAccessFlags AllVkAccessFlagBits = VK_ACCESS_INDIRECT_COMMAND_READ_BIT|VK_ACCESS_INDEX_READ_BIT|VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT|VK_ACCESS_UNIFORM_READ_BIT|VK_ACCESS_INPUT_ATTACHMENT_READ_BIT|VK_ACCESS_SHADER_READ_BIT|VK_ACCESS_SHADER_WRITE_BIT|VK_ACCESS_COLOR_ATTACHMENT_READ_BIT|VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT|VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT|VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT|VK_ACCESS_TRANSFER_READ_BIT|VK_ACCESS_TRANSFER_WRITE_BIT|VK_ACCESS_HOST_READ_BIT|VK_ACCESS_HOST_WRITE_BIT|VK_ACCESS_MEMORY_READ_BIT|VK_ACCESS_MEMORY_WRITE_BIT|VK_ACCESS_TRANSFORM_FEEDBACK_WRITE_BIT_EXT|VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_READ_BIT_EXT|VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_WRITE_BIT_EXT|VK_ACCESS_CONDITIONAL_RENDERING_READ_BIT_EXT|VK_ACCESS_COMMAND_PROCESS_READ_BIT_NVX|VK_ACCESS_COMMAND_PROCESS_WRITE_BIT_NVX|VK_ACCESS_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT|VK_ACCESS_SHADING_RATE_IMAGE_READ_BIT_NV|VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_NV|VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_NV|VK_ACCESS_FRAGMENT_DENSITY_MAP_READ_BIT_EXT;
+const VkAttachmentDescriptionFlags AllVkAttachmentDescriptionFlagBits = VK_ATTACHMENT_DESCRIPTION_MAY_ALIAS_BIT;
+const VkBufferCreateFlags AllVkBufferCreateFlagBits = VK_BUFFER_CREATE_SPARSE_BINDING_BIT|VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT|VK_BUFFER_CREATE_SPARSE_ALIASED_BIT|VK_BUFFER_CREATE_PROTECTED_BIT|VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_EXT|VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_KHR;
+const VkBufferUsageFlags AllVkBufferUsageFlagBits = VK_BUFFER_USAGE_TRANSFER_SRC_BIT|VK_BUFFER_USAGE_TRANSFER_DST_BIT|VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT|VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT|VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT|VK_BUFFER_USAGE_STORAGE_BUFFER_BIT|VK_BUFFER_USAGE_INDEX_BUFFER_BIT|VK_BUFFER_USAGE_VERTEX_BUFFER_BIT|VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT|VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_BUFFER_BIT_EXT|VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_COUNTER_BUFFER_BIT_EXT|VK_BUFFER_USAGE_CONDITIONAL_RENDERING_BIT_EXT|VK_BUFFER_USAGE_RAY_TRACING_BIT_NV|VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_EXT|VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_KHR;
+const VkColorComponentFlags AllVkColorComponentFlagBits = VK_COLOR_COMPONENT_R_BIT|VK_COLOR_COMPONENT_G_BIT|VK_COLOR_COMPONENT_B_BIT|VK_COLOR_COMPONENT_A_BIT;
+const VkCommandBufferResetFlags AllVkCommandBufferResetFlagBits = VK_COMMAND_BUFFER_RESET_RELEASE_RESOURCES_BIT;
+const VkCommandBufferUsageFlags AllVkCommandBufferUsageFlagBits = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT|VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT|VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT;
+const VkCommandPoolCreateFlags AllVkCommandPoolCreateFlagBits = VK_COMMAND_POOL_CREATE_TRANSIENT_BIT|VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT|VK_COMMAND_POOL_CREATE_PROTECTED_BIT;
+const VkCommandPoolResetFlags AllVkCommandPoolResetFlagBits = VK_COMMAND_POOL_RESET_RELEASE_RESOURCES_BIT;
+const VkCullModeFlags AllVkCullModeFlagBits = VK_CULL_MODE_NONE|VK_CULL_MODE_FRONT_BIT|VK_CULL_MODE_BACK_BIT|VK_CULL_MODE_FRONT_AND_BACK;
+const VkDependencyFlags AllVkDependencyFlagBits = VK_DEPENDENCY_BY_REGION_BIT|VK_DEPENDENCY_DEVICE_GROUP_BIT|VK_DEPENDENCY_VIEW_LOCAL_BIT|VK_DEPENDENCY_VIEW_LOCAL_BIT_KHR|VK_DEPENDENCY_DEVICE_GROUP_BIT_KHR;
+const VkDescriptorPoolCreateFlags AllVkDescriptorPoolCreateFlagBits = VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT|VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT_EXT;
+const VkDescriptorSetLayoutCreateFlags AllVkDescriptorSetLayoutCreateFlagBits = VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR|VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT;
+const VkDeviceQueueCreateFlags AllVkDeviceQueueCreateFlagBits = VK_DEVICE_QUEUE_CREATE_PROTECTED_BIT;
+const VkFenceCreateFlags AllVkFenceCreateFlagBits = VK_FENCE_CREATE_SIGNALED_BIT;
+const VkFormatFeatureFlags AllVkFormatFeatureFlagBits = VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT|VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT|VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT|VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT|VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT|VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_ATOMIC_BIT|VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT|VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT|VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT|VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT|VK_FORMAT_FEATURE_BLIT_SRC_BIT|VK_FORMAT_FEATURE_BLIT_DST_BIT|VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT|VK_FORMAT_FEATURE_TRANSFER_SRC_BIT|VK_FORMAT_FEATURE_TRANSFER_DST_BIT|VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT|VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT|VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT|VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT|VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT|VK_FORMAT_FEATURE_DISJOINT_BIT|VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT|VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_IMG|VK_FORMAT_FEATURE_TRANSFER_SRC_BIT_KHR|VK_FORMAT_FEATURE_TRANSFER_DST_BIT_KHR|VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT_EXT|VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT_KHR|VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT_KHR|VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT_KHR|VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT_KHR|VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT_KHR|VK_FORMAT_FEATURE_DISJOINT_BIT_KHR|VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT_KHR|VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT|VK_FORMAT_FEATURE_FRAGMENT_DENSITY_MAP_BIT_EXT;
+const VkFramebufferCreateFlags AllVkFramebufferCreateFlagBits = VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR;
+const VkImageAspectFlags AllVkImageAspectFlagBits = VK_IMAGE_ASPECT_COLOR_BIT|VK_IMAGE_ASPECT_DEPTH_BIT|VK_IMAGE_ASPECT_STENCIL_BIT|VK_IMAGE_ASPECT_METADATA_BIT|VK_IMAGE_ASPECT_PLANE_0_BIT|VK_IMAGE_ASPECT_PLANE_1_BIT|VK_IMAGE_ASPECT_PLANE_2_BIT|VK_IMAGE_ASPECT_PLANE_0_BIT_KHR|VK_IMAGE_ASPECT_PLANE_1_BIT_KHR|VK_IMAGE_ASPECT_PLANE_2_BIT_KHR|VK_IMAGE_ASPECT_MEMORY_PLANE_0_BIT_EXT|VK_IMAGE_ASPECT_MEMORY_PLANE_1_BIT_EXT|VK_IMAGE_ASPECT_MEMORY_PLANE_2_BIT_EXT|VK_IMAGE_ASPECT_MEMORY_PLANE_3_BIT_EXT;
+const VkImageCreateFlags AllVkImageCreateFlagBits = VK_IMAGE_CREATE_SPARSE_BINDING_BIT|VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT|VK_IMAGE_CREATE_SPARSE_ALIASED_BIT|VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT|VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT|VK_IMAGE_CREATE_ALIAS_BIT|VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT|VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT|VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT|VK_IMAGE_CREATE_EXTENDED_USAGE_BIT|VK_IMAGE_CREATE_PROTECTED_BIT|VK_IMAGE_CREATE_DISJOINT_BIT|VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV|VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR|VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT_KHR|VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT_KHR|VK_IMAGE_CREATE_EXTENDED_USAGE_BIT_KHR|VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT|VK_IMAGE_CREATE_DISJOINT_BIT_KHR|VK_IMAGE_CREATE_ALIAS_BIT_KHR|VK_IMAGE_CREATE_SUBSAMPLED_BIT_EXT;
+const VkImageUsageFlags AllVkImageUsageFlagBits = VK_IMAGE_USAGE_TRANSFER_SRC_BIT|VK_IMAGE_USAGE_TRANSFER_DST_BIT|VK_IMAGE_USAGE_SAMPLED_BIT|VK_IMAGE_USAGE_STORAGE_BIT|VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT|VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT|VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT|VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT|VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV|VK_IMAGE_USAGE_FRAGMENT_DENSITY_MAP_BIT_EXT;
+const VkImageViewCreateFlags AllVkImageViewCreateFlagBits = VK_IMAGE_VIEW_CREATE_FRAGMENT_DENSITY_MAP_DYNAMIC_BIT_EXT;
+const VkMemoryHeapFlags AllVkMemoryHeapFlagBits = VK_MEMORY_HEAP_DEVICE_LOCAL_BIT|VK_MEMORY_HEAP_MULTI_INSTANCE_BIT|VK_MEMORY_HEAP_MULTI_INSTANCE_BIT_KHR;
+const VkMemoryPropertyFlags AllVkMemoryPropertyFlagBits = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT|VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT|VK_MEMORY_PROPERTY_HOST_COHERENT_BIT|VK_MEMORY_PROPERTY_HOST_CACHED_BIT|VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT|VK_MEMORY_PROPERTY_PROTECTED_BIT|VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD|VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD;
+const VkPipelineCreateFlags AllVkPipelineCreateFlagBits = VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT|VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT|VK_PIPELINE_CREATE_DERIVATIVE_BIT|VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT|VK_PIPELINE_CREATE_DISPATCH_BASE_BIT|VK_PIPELINE_CREATE_DISPATCH_BASE|VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT_KHR|VK_PIPELINE_CREATE_DISPATCH_BASE_KHR|VK_PIPELINE_CREATE_DEFER_COMPILE_BIT_NV|VK_PIPELINE_CREATE_CAPTURE_STATISTICS_BIT_KHR|VK_PIPELINE_CREATE_CAPTURE_INTERNAL_REPRESENTATIONS_BIT_KHR;
+const VkPipelineShaderStageCreateFlags AllVkPipelineShaderStageCreateFlagBits = VK_PIPELINE_SHADER_STAGE_CREATE_ALLOW_VARYING_SUBGROUP_SIZE_BIT_EXT|VK_PIPELINE_SHADER_STAGE_CREATE_REQUIRE_FULL_SUBGROUPS_BIT_EXT;
+const VkPipelineStageFlags AllVkPipelineStageFlagBits = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT|VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT|VK_PIPELINE_STAGE_VERTEX_INPUT_BIT|VK_PIPELINE_STAGE_VERTEX_SHADER_BIT|VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT|VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT|VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT|VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT|VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT|VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT|VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT|VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT|VK_PIPELINE_STAGE_TRANSFER_BIT|VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT|VK_PIPELINE_STAGE_HOST_BIT|VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT|VK_PIPELINE_STAGE_ALL_COMMANDS_BIT|VK_PIPELINE_STAGE_TRANSFORM_FEEDBACK_BIT_EXT|VK_PIPELINE_STAGE_CONDITIONAL_RENDERING_BIT_EXT|VK_PIPELINE_STAGE_COMMAND_PROCESS_BIT_NVX|VK_PIPELINE_STAGE_SHADING_RATE_IMAGE_BIT_NV|VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_NV|VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_NV|VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV|VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV|VK_PIPELINE_STAGE_FRAGMENT_DENSITY_PROCESS_BIT_EXT;
+const VkQueryControlFlags AllVkQueryControlFlagBits = VK_QUERY_CONTROL_PRECISE_BIT;
+const VkQueryPipelineStatisticFlags AllVkQueryPipelineStatisticFlagBits = VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_VERTICES_BIT|VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_PRIMITIVES_BIT|VK_QUERY_PIPELINE_STATISTIC_VERTEX_SHADER_INVOCATIONS_BIT|VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_INVOCATIONS_BIT|VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_PRIMITIVES_BIT|VK_QUERY_PIPELINE_STATISTIC_CLIPPING_INVOCATIONS_BIT|VK_QUERY_PIPELINE_STATISTIC_CLIPPING_PRIMITIVES_BIT|VK_QUERY_PIPELINE_STATISTIC_FRAGMENT_SHADER_INVOCATIONS_BIT|VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_CONTROL_SHADER_PATCHES_BIT|VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_EVALUATION_SHADER_INVOCATIONS_BIT|VK_QUERY_PIPELINE_STATISTIC_COMPUTE_SHADER_INVOCATIONS_BIT;
+const VkQueryResultFlags AllVkQueryResultFlagBits = VK_QUERY_RESULT_64_BIT|VK_QUERY_RESULT_WAIT_BIT|VK_QUERY_RESULT_WITH_AVAILABILITY_BIT|VK_QUERY_RESULT_PARTIAL_BIT;
+const VkQueueFlags AllVkQueueFlagBits = VK_QUEUE_GRAPHICS_BIT|VK_QUEUE_COMPUTE_BIT|VK_QUEUE_TRANSFER_BIT|VK_QUEUE_SPARSE_BINDING_BIT|VK_QUEUE_PROTECTED_BIT;
+const VkSampleCountFlags AllVkSampleCountFlagBits = VK_SAMPLE_COUNT_1_BIT|VK_SAMPLE_COUNT_2_BIT|VK_SAMPLE_COUNT_4_BIT|VK_SAMPLE_COUNT_8_BIT|VK_SAMPLE_COUNT_16_BIT|VK_SAMPLE_COUNT_32_BIT|VK_SAMPLE_COUNT_64_BIT;
+const VkSamplerCreateFlags AllVkSamplerCreateFlagBits = VK_SAMPLER_CREATE_SUBSAMPLED_BIT_EXT|VK_SAMPLER_CREATE_SUBSAMPLED_COARSE_RECONSTRUCTION_BIT_EXT;
+const VkShaderStageFlags AllVkShaderStageFlagBits = VK_SHADER_STAGE_VERTEX_BIT|VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT|VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT|VK_SHADER_STAGE_GEOMETRY_BIT|VK_SHADER_STAGE_FRAGMENT_BIT|VK_SHADER_STAGE_COMPUTE_BIT|VK_SHADER_STAGE_ALL_GRAPHICS|VK_SHADER_STAGE_ALL|VK_SHADER_STAGE_RAYGEN_BIT_NV|VK_SHADER_STAGE_ANY_HIT_BIT_NV|VK_SHADER_STAGE_CLOSEST_HIT_BIT_NV|VK_SHADER_STAGE_MISS_BIT_NV|VK_SHADER_STAGE_INTERSECTION_BIT_NV|VK_SHADER_STAGE_CALLABLE_BIT_NV|VK_SHADER_STAGE_TASK_BIT_NV|VK_SHADER_STAGE_MESH_BIT_NV;
+const VkSparseImageFormatFlags AllVkSparseImageFormatFlagBits = VK_SPARSE_IMAGE_FORMAT_SINGLE_MIPTAIL_BIT|VK_SPARSE_IMAGE_FORMAT_ALIGNED_MIP_SIZE_BIT|VK_SPARSE_IMAGE_FORMAT_NONSTANDARD_BLOCK_SIZE_BIT;
+const VkSparseMemoryBindFlags AllVkSparseMemoryBindFlagBits = VK_SPARSE_MEMORY_BIND_METADATA_BIT;
+const VkStencilFaceFlags AllVkStencilFaceFlagBits = VK_STENCIL_FACE_FRONT_BIT|VK_STENCIL_FACE_BACK_BIT|VK_STENCIL_FACE_FRONT_AND_BACK|VK_STENCIL_FRONT_AND_BACK;
+const VkSubpassDescriptionFlags AllVkSubpassDescriptionFlagBits = VK_SUBPASS_DESCRIPTION_PER_VIEW_ATTRIBUTES_BIT_NVX|VK_SUBPASS_DESCRIPTION_PER_VIEW_POSITION_X_ONLY_BIT_NVX;
+const VkExternalFenceFeatureFlags AllVkExternalFenceFeatureFlagBits = VK_EXTERNAL_FENCE_FEATURE_EXPORTABLE_BIT|VK_EXTERNAL_FENCE_FEATURE_IMPORTABLE_BIT|VK_EXTERNAL_FENCE_FEATURE_EXPORTABLE_BIT_KHR|VK_EXTERNAL_FENCE_FEATURE_IMPORTABLE_BIT_KHR;
+const VkExternalFenceHandleTypeFlags AllVkExternalFenceHandleTypeFlagBits = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_FD_BIT|VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_BIT|VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT|VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT|VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_FD_BIT_KHR|VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_BIT_KHR|VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_KHR|VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT_KHR;
+const VkExternalMemoryFeatureFlags AllVkExternalMemoryFeatureFlagBits = VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT|VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT|VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT|VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT_KHR|VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT_KHR|VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT_KHR;
+const VkExternalMemoryHandleTypeFlags AllVkExternalMemoryHandleTypeFlagBits = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT|VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT|VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT|VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT|VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_KMT_BIT|VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP_BIT|VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT|VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT_KHR|VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT_KHR|VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_KHR|VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT_KHR|VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_KMT_BIT_KHR|VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP_BIT_KHR|VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT_KHR|VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT|VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID|VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT|VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_MAPPED_FOREIGN_MEMORY_BIT_EXT;
+const VkExternalSemaphoreFeatureFlags AllVkExternalSemaphoreFeatureFlagBits = VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT|VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT|VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT_KHR|VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT_KHR;
+const VkExternalSemaphoreHandleTypeFlags AllVkExternalSemaphoreHandleTypeFlagBits = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT|VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT|VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT|VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT|VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT|VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT_KHR|VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT_KHR|VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_KHR|VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT_KHR|VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT_KHR;
+const VkFenceImportFlags AllVkFenceImportFlagBits = VK_FENCE_IMPORT_TEMPORARY_BIT|VK_FENCE_IMPORT_TEMPORARY_BIT_KHR;
+const VkMemoryAllocateFlags AllVkMemoryAllocateFlagBits = VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT|VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT_KHR|VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT_KHR|VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_KHR;
+const VkPeerMemoryFeatureFlags AllVkPeerMemoryFeatureFlagBits = VK_PEER_MEMORY_FEATURE_COPY_SRC_BIT|VK_PEER_MEMORY_FEATURE_COPY_DST_BIT|VK_PEER_MEMORY_FEATURE_GENERIC_SRC_BIT|VK_PEER_MEMORY_FEATURE_GENERIC_DST_BIT|VK_PEER_MEMORY_FEATURE_COPY_SRC_BIT_KHR|VK_PEER_MEMORY_FEATURE_COPY_DST_BIT_KHR|VK_PEER_MEMORY_FEATURE_GENERIC_SRC_BIT_KHR|VK_PEER_MEMORY_FEATURE_GENERIC_DST_BIT_KHR;
+const VkSemaphoreImportFlags AllVkSemaphoreImportFlagBits = VK_SEMAPHORE_IMPORT_TEMPORARY_BIT|VK_SEMAPHORE_IMPORT_TEMPORARY_BIT_KHR;
+const VkSubgroupFeatureFlags AllVkSubgroupFeatureFlagBits = VK_SUBGROUP_FEATURE_BASIC_BIT|VK_SUBGROUP_FEATURE_VOTE_BIT|VK_SUBGROUP_FEATURE_ARITHMETIC_BIT|VK_SUBGROUP_FEATURE_BALLOT_BIT|VK_SUBGROUP_FEATURE_SHUFFLE_BIT|VK_SUBGROUP_FEATURE_SHUFFLE_RELATIVE_BIT|VK_SUBGROUP_FEATURE_CLUSTERED_BIT|VK_SUBGROUP_FEATURE_QUAD_BIT|VK_SUBGROUP_FEATURE_PARTITIONED_BIT_NV;
+const VkCompositeAlphaFlagsKHR AllVkCompositeAlphaFlagBitsKHR = VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR|VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR|VK_COMPOSITE_ALPHA_POST_MULTIPLIED_BIT_KHR|VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR;
+const VkSurfaceTransformFlagsKHR AllVkSurfaceTransformFlagBitsKHR = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR|VK_SURFACE_TRANSFORM_ROTATE_90_BIT_KHR|VK_SURFACE_TRANSFORM_ROTATE_180_BIT_KHR|VK_SURFACE_TRANSFORM_ROTATE_270_BIT_KHR|VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_BIT_KHR|VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_90_BIT_KHR|VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_180_BIT_KHR|VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_270_BIT_KHR|VK_SURFACE_TRANSFORM_INHERIT_BIT_KHR;
+const VkDeviceGroupPresentModeFlagsKHR AllVkDeviceGroupPresentModeFlagBitsKHR = VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_BIT_KHR|VK_DEVICE_GROUP_PRESENT_MODE_REMOTE_BIT_KHR|VK_DEVICE_GROUP_PRESENT_MODE_SUM_BIT_KHR|VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_MULTI_DEVICE_BIT_KHR;
+const VkSwapchainCreateFlagsKHR AllVkSwapchainCreateFlagBitsKHR = VK_SWAPCHAIN_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR|VK_SWAPCHAIN_CREATE_PROTECTED_BIT_KHR|VK_SWAPCHAIN_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR|VK_SWAPCHAIN_CREATE_MUTABLE_FORMAT_BIT_KHR;
+const VkDisplayPlaneAlphaFlagsKHR AllVkDisplayPlaneAlphaFlagBitsKHR = VK_DISPLAY_PLANE_ALPHA_OPAQUE_BIT_KHR|VK_DISPLAY_PLANE_ALPHA_GLOBAL_BIT_KHR|VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_BIT_KHR|VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_PREMULTIPLIED_BIT_KHR;
+const VkMemoryAllocateFlagsKHR AllVkMemoryAllocateFlagBitsKHR = VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT|VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT_KHR|VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT_KHR|VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_KHR;
+const VkPeerMemoryFeatureFlagsKHR AllVkPeerMemoryFeatureFlagBitsKHR = VK_PEER_MEMORY_FEATURE_COPY_SRC_BIT|VK_PEER_MEMORY_FEATURE_COPY_DST_BIT|VK_PEER_MEMORY_FEATURE_GENERIC_SRC_BIT|VK_PEER_MEMORY_FEATURE_GENERIC_DST_BIT|VK_PEER_MEMORY_FEATURE_COPY_SRC_BIT_KHR|VK_PEER_MEMORY_FEATURE_COPY_DST_BIT_KHR|VK_PEER_MEMORY_FEATURE_GENERIC_SRC_BIT_KHR|VK_PEER_MEMORY_FEATURE_GENERIC_DST_BIT_KHR;
+const VkExternalMemoryFeatureFlagsKHR AllVkExternalMemoryFeatureFlagBitsKHR = VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT|VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT|VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT|VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT_KHR|VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT_KHR|VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT_KHR;
+const VkExternalMemoryHandleTypeFlagsKHR AllVkExternalMemoryHandleTypeFlagBitsKHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT|VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT|VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT|VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT|VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_KMT_BIT|VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP_BIT|VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT|VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT_KHR|VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT_KHR|VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_KHR|VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT_KHR|VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_KMT_BIT_KHR|VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP_BIT_KHR|VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT_KHR|VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT|VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID|VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT|VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_MAPPED_FOREIGN_MEMORY_BIT_EXT;
+const VkExternalSemaphoreFeatureFlagsKHR AllVkExternalSemaphoreFeatureFlagBitsKHR = VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT|VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT|VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT_KHR|VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT_KHR;
+const VkExternalSemaphoreHandleTypeFlagsKHR AllVkExternalSemaphoreHandleTypeFlagBitsKHR = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT|VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT|VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT|VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT|VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT|VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT_KHR|VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT_KHR|VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_KHR|VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT_KHR|VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT_KHR;
+const VkSemaphoreImportFlagsKHR AllVkSemaphoreImportFlagBitsKHR = VK_SEMAPHORE_IMPORT_TEMPORARY_BIT|VK_SEMAPHORE_IMPORT_TEMPORARY_BIT_KHR;
+const VkExternalFenceFeatureFlagsKHR AllVkExternalFenceFeatureFlagBitsKHR = VK_EXTERNAL_FENCE_FEATURE_EXPORTABLE_BIT|VK_EXTERNAL_FENCE_FEATURE_IMPORTABLE_BIT|VK_EXTERNAL_FENCE_FEATURE_EXPORTABLE_BIT_KHR|VK_EXTERNAL_FENCE_FEATURE_IMPORTABLE_BIT_KHR;
+const VkExternalFenceHandleTypeFlagsKHR AllVkExternalFenceHandleTypeFlagBitsKHR = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_FD_BIT|VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_BIT|VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT|VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT|VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_FD_BIT_KHR|VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_BIT_KHR|VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_KHR|VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT_KHR;
+const VkFenceImportFlagsKHR AllVkFenceImportFlagBitsKHR = VK_FENCE_IMPORT_TEMPORARY_BIT|VK_FENCE_IMPORT_TEMPORARY_BIT_KHR;
+const VkPerformanceCounterDescriptionFlagsKHR AllVkPerformanceCounterDescriptionFlagBitsKHR = VK_PERFORMANCE_COUNTER_DESCRIPTION_PERFORMANCE_IMPACTING_KHR|VK_PERFORMANCE_COUNTER_DESCRIPTION_CONCURRENTLY_IMPACTED_KHR;
+const VkResolveModeFlagsKHR AllVkResolveModeFlagBitsKHR = VK_RESOLVE_MODE_NONE_KHR|VK_RESOLVE_MODE_SAMPLE_ZERO_BIT_KHR|VK_RESOLVE_MODE_AVERAGE_BIT_KHR|VK_RESOLVE_MODE_MIN_BIT_KHR|VK_RESOLVE_MODE_MAX_BIT_KHR;
+const VkSemaphoreWaitFlagsKHR AllVkSemaphoreWaitFlagBitsKHR = VK_SEMAPHORE_WAIT_ANY_BIT_KHR;
+const VkDebugReportFlagsEXT AllVkDebugReportFlagBitsEXT = VK_DEBUG_REPORT_INFORMATION_BIT_EXT|VK_DEBUG_REPORT_WARNING_BIT_EXT|VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT|VK_DEBUG_REPORT_ERROR_BIT_EXT|VK_DEBUG_REPORT_DEBUG_BIT_EXT;
+const VkExternalMemoryFeatureFlagsNV AllVkExternalMemoryFeatureFlagBitsNV = VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT_NV|VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT_NV|VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT_NV;
+const VkExternalMemoryHandleTypeFlagsNV AllVkExternalMemoryHandleTypeFlagBitsNV = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT_NV|VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_NV|VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_BIT_NV|VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_KMT_BIT_NV;
+const VkConditionalRenderingFlagsEXT AllVkConditionalRenderingFlagBitsEXT = VK_CONDITIONAL_RENDERING_INVERTED_BIT_EXT;
+const VkIndirectCommandsLayoutUsageFlagsNVX AllVkIndirectCommandsLayoutUsageFlagBitsNVX = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_UNORDERED_SEQUENCES_BIT_NVX|VK_INDIRECT_COMMANDS_LAYOUT_USAGE_SPARSE_SEQUENCES_BIT_NVX|VK_INDIRECT_COMMANDS_LAYOUT_USAGE_EMPTY_EXECUTIONS_BIT_NVX|VK_INDIRECT_COMMANDS_LAYOUT_USAGE_INDEXED_SEQUENCES_BIT_NVX;
+const VkObjectEntryUsageFlagsNVX AllVkObjectEntryUsageFlagBitsNVX = VK_OBJECT_ENTRY_USAGE_GRAPHICS_BIT_NVX|VK_OBJECT_ENTRY_USAGE_COMPUTE_BIT_NVX;
+const VkSurfaceCounterFlagsEXT AllVkSurfaceCounterFlagBitsEXT = VK_SURFACE_COUNTER_VBLANK_EXT;
+const VkDebugUtilsMessageSeverityFlagsEXT AllVkDebugUtilsMessageSeverityFlagBitsEXT = VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT|VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT|VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT|VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT;
+const VkDebugUtilsMessageTypeFlagsEXT AllVkDebugUtilsMessageTypeFlagBitsEXT = VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT|VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT|VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT;
+const VkDescriptorBindingFlagsEXT AllVkDescriptorBindingFlagBitsEXT = VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT|VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT_EXT|VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT_EXT|VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT_EXT;
+const VkBuildAccelerationStructureFlagsNV AllVkBuildAccelerationStructureFlagBitsNV = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_NV|VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_NV|VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_TRACE_BIT_NV|VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_BUILD_BIT_NV|VK_BUILD_ACCELERATION_STRUCTURE_LOW_MEMORY_BIT_NV;
+const VkGeometryFlagsNV AllVkGeometryFlagBitsNV = VK_GEOMETRY_OPAQUE_BIT_NV|VK_GEOMETRY_NO_DUPLICATE_ANY_HIT_INVOCATION_BIT_NV;
+const VkGeometryInstanceFlagsNV AllVkGeometryInstanceFlagBitsNV = VK_GEOMETRY_INSTANCE_TRIANGLE_CULL_DISABLE_BIT_NV|VK_GEOMETRY_INSTANCE_TRIANGLE_FRONT_COUNTERCLOCKWISE_BIT_NV|VK_GEOMETRY_INSTANCE_FORCE_OPAQUE_BIT_NV|VK_GEOMETRY_INSTANCE_FORCE_NO_OPAQUE_BIT_NV;
+const VkPipelineCreationFeedbackFlagsEXT AllVkPipelineCreationFeedbackFlagBitsEXT = VK_PIPELINE_CREATION_FEEDBACK_VALID_BIT_EXT|VK_PIPELINE_CREATION_FEEDBACK_APPLICATION_PIPELINE_CACHE_HIT_BIT_EXT|VK_PIPELINE_CREATION_FEEDBACK_BASE_PIPELINE_ACCELERATION_BIT_EXT;
+const VkToolPurposeFlagsEXT AllVkToolPurposeFlagBitsEXT = VK_TOOL_PURPOSE_VALIDATION_BIT_EXT|VK_TOOL_PURPOSE_PROFILING_BIT_EXT|VK_TOOL_PURPOSE_TRACING_BIT_EXT|VK_TOOL_PURPOSE_ADDITIONAL_FEATURES_BIT_EXT|VK_TOOL_PURPOSE_MODIFYING_FEATURES_BIT_EXT|VK_TOOL_PURPOSE_DEBUG_REPORTING_BIT_EXT|VK_TOOL_PURPOSE_DEBUG_MARKERS_BIT_EXT|VK_TOOL_PURPOSE_DEBUG_REPORTING_BIT_EXT|VK_TOOL_PURPOSE_DEBUG_MARKERS_BIT_EXT;
+
+const std::vector<VkPipelineCacheHeaderVersion> AllVkPipelineCacheHeaderVersionEnums = {VK_PIPELINE_CACHE_HEADER_VERSION_ONE, };
+const std::vector<VkResult> AllVkResultEnums = {VK_SUCCESS, VK_NOT_READY, VK_TIMEOUT, VK_EVENT_SET, VK_EVENT_RESET, VK_INCOMPLETE, VK_ERROR_OUT_OF_HOST_MEMORY, VK_ERROR_OUT_OF_DEVICE_MEMORY, VK_ERROR_INITIALIZATION_FAILED, VK_ERROR_DEVICE_LOST, VK_ERROR_MEMORY_MAP_FAILED, VK_ERROR_LAYER_NOT_PRESENT, VK_ERROR_EXTENSION_NOT_PRESENT, VK_ERROR_FEATURE_NOT_PRESENT, VK_ERROR_INCOMPATIBLE_DRIVER, VK_ERROR_TOO_MANY_OBJECTS, VK_ERROR_FORMAT_NOT_SUPPORTED, VK_ERROR_FRAGMENTED_POOL, VK_ERROR_OUT_OF_POOL_MEMORY, VK_ERROR_INVALID_EXTERNAL_HANDLE, VK_ERROR_SURFACE_LOST_KHR, VK_ERROR_NATIVE_WINDOW_IN_USE_KHR, VK_SUBOPTIMAL_KHR, VK_ERROR_OUT_OF_DATE_KHR, VK_ERROR_INCOMPATIBLE_DISPLAY_KHR, VK_ERROR_VALIDATION_FAILED_EXT, VK_ERROR_INVALID_SHADER_NV, VK_ERROR_OUT_OF_POOL_MEMORY_KHR, VK_ERROR_INVALID_EXTERNAL_HANDLE_KHR, VK_ERROR_INVALID_DRM_FORMAT_MODIFIER_PLANE_LAYOUT_EXT, VK_ERROR_FRAGMENTATION_EXT, VK_ERROR_NOT_PERMITTED_EXT, VK_ERROR_INVALID_DEVICE_ADDRESS_EXT, VK_ERROR_FULL_SCREEN_EXCLUSIVE_MODE_LOST_EXT, VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS_KHR, };
+const std::vector<VkSystemAllocationScope> AllVkSystemAllocationScopeEnums = {VK_SYSTEM_ALLOCATION_SCOPE_COMMAND, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT, VK_SYSTEM_ALLOCATION_SCOPE_CACHE, VK_SYSTEM_ALLOCATION_SCOPE_DEVICE, VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE, };
+const std::vector<VkInternalAllocationType> AllVkInternalAllocationTypeEnums = {VK_INTERNAL_ALLOCATION_TYPE_EXECUTABLE, };
+const std::vector<VkFormat> AllVkFormatEnums = {VK_FORMAT_UNDEFINED, VK_FORMAT_R4G4_UNORM_PACK8, VK_FORMAT_R4G4B4A4_UNORM_PACK16, VK_FORMAT_B4G4R4A4_UNORM_PACK16, VK_FORMAT_R5G6B5_UNORM_PACK16, VK_FORMAT_B5G6R5_UNORM_PACK16, VK_FORMAT_R5G5B5A1_UNORM_PACK16, VK_FORMAT_B5G5R5A1_UNORM_PACK16, VK_FORMAT_A1R5G5B5_UNORM_PACK16, VK_FORMAT_R8_UNORM, VK_FORMAT_R8_SNORM, VK_FORMAT_R8_USCALED, VK_FORMAT_R8_SSCALED, VK_FORMAT_R8_UINT, VK_FORMAT_R8_SINT, VK_FORMAT_R8_SRGB, VK_FORMAT_R8G8_UNORM, VK_FORMAT_R8G8_SNORM, VK_FORMAT_R8G8_USCALED, VK_FORMAT_R8G8_SSCALED, VK_FORMAT_R8G8_UINT, VK_FORMAT_R8G8_SINT, VK_FORMAT_R8G8_SRGB, VK_FORMAT_R8G8B8_UNORM, VK_FORMAT_R8G8B8_SNORM, VK_FORMAT_R8G8B8_USCALED, VK_FORMAT_R8G8B8_SSCALED, VK_FORMAT_R8G8B8_UINT, VK_FORMAT_R8G8B8_SINT, VK_FORMAT_R8G8B8_SRGB, VK_FORMAT_B8G8R8_UNORM, VK_FORMAT_B8G8R8_SNORM, VK_FORMAT_B8G8R8_USCALED, VK_FORMAT_B8G8R8_SSCALED, VK_FORMAT_B8G8R8_UINT, VK_FORMAT_B8G8R8_SINT, VK_FORMAT_B8G8R8_SRGB, VK_FORMAT_R8G8B8A8_UNORM, VK_FORMAT_R8G8B8A8_SNORM, VK_FORMAT_R8G8B8A8_USCALED, VK_FORMAT_R8G8B8A8_SSCALED, VK_FORMAT_R8G8B8A8_UINT, VK_FORMAT_R8G8B8A8_SINT, VK_FORMAT_R8G8B8A8_SRGB, VK_FORMAT_B8G8R8A8_UNORM, VK_FORMAT_B8G8R8A8_SNORM, VK_FORMAT_B8G8R8A8_USCALED, VK_FORMAT_B8G8R8A8_SSCALED, VK_FORMAT_B8G8R8A8_UINT, VK_FORMAT_B8G8R8A8_SINT, VK_FORMAT_B8G8R8A8_SRGB, VK_FORMAT_A8B8G8R8_UNORM_PACK32, VK_FORMAT_A8B8G8R8_SNORM_PACK32, VK_FORMAT_A8B8G8R8_USCALED_PACK32, VK_FORMAT_A8B8G8R8_SSCALED_PACK32, VK_FORMAT_A8B8G8R8_UINT_PACK32, VK_FORMAT_A8B8G8R8_SINT_PACK32, VK_FORMAT_A8B8G8R8_SRGB_PACK32, VK_FORMAT_A2R10G10B10_UNORM_PACK32, VK_FORMAT_A2R10G10B10_SNORM_PACK32, VK_FORMAT_A2R10G10B10_USCALED_PACK32, VK_FORMAT_A2R10G10B10_SSCALED_PACK32, VK_FORMAT_A2R10G10B10_UINT_PACK32, VK_FORMAT_A2R10G10B10_SINT_PACK32, VK_FORMAT_A2B10G10R10_UNORM_PACK32, VK_FORMAT_A2B10G10R10_SNORM_PACK32, VK_FORMAT_A2B10G10R10_USCALED_PACK32, VK_FORMAT_A2B10G10R10_SSCALED_PACK32, VK_FORMAT_A2B10G10R10_UINT_PACK32, VK_FORMAT_A2B10G10R10_SINT_PACK32, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_SNORM, VK_FORMAT_R16_USCALED, VK_FORMAT_R16_SSCALED, VK_FORMAT_R16_UINT, VK_FORMAT_R16_SINT, VK_FORMAT_R16_SFLOAT, VK_FORMAT_R16G16_UNORM, VK_FORMAT_R16G16_SNORM, VK_FORMAT_R16G16_USCALED, VK_FORMAT_R16G16_SSCALED, VK_FORMAT_R16G16_UINT, VK_FORMAT_R16G16_SINT, VK_FORMAT_R16G16_SFLOAT, VK_FORMAT_R16G16B16_UNORM, VK_FORMAT_R16G16B16_SNORM, VK_FORMAT_R16G16B16_USCALED, VK_FORMAT_R16G16B16_SSCALED, VK_FORMAT_R16G16B16_UINT, VK_FORMAT_R16G16B16_SINT, VK_FORMAT_R16G16B16_SFLOAT, VK_FORMAT_R16G16B16A16_UNORM, VK_FORMAT_R16G16B16A16_SNORM, VK_FORMAT_R16G16B16A16_USCALED, VK_FORMAT_R16G16B16A16_SSCALED, VK_FORMAT_R16G16B16A16_UINT, VK_FORMAT_R16G16B16A16_SINT, VK_FORMAT_R16G16B16A16_SFLOAT, VK_FORMAT_R32_UINT, VK_FORMAT_R32_SINT, VK_FORMAT_R32_SFLOAT, VK_FORMAT_R32G32_UINT, VK_FORMAT_R32G32_SINT, VK_FORMAT_R32G32_SFLOAT, VK_FORMAT_R32G32B32_UINT, VK_FORMAT_R32G32B32_SINT, VK_FORMAT_R32G32B32_SFLOAT, VK_FORMAT_R32G32B32A32_UINT, VK_FORMAT_R32G32B32A32_SINT, VK_FORMAT_R32G32B32A32_SFLOAT, VK_FORMAT_R64_UINT, VK_FORMAT_R64_SINT, VK_FORMAT_R64_SFLOAT, VK_FORMAT_R64G64_UINT, VK_FORMAT_R64G64_SINT, VK_FORMAT_R64G64_SFLOAT, VK_FORMAT_R64G64B64_UINT, VK_FORMAT_R64G64B64_SINT, VK_FORMAT_R64G64B64_SFLOAT, VK_FORMAT_R64G64B64A64_UINT, VK_FORMAT_R64G64B64A64_SINT, VK_FORMAT_R64G64B64A64_SFLOAT, VK_FORMAT_B10G11R11_UFLOAT_PACK32, VK_FORMAT_E5B9G9R9_UFLOAT_PACK32, VK_FORMAT_D16_UNORM, VK_FORMAT_X8_D24_UNORM_PACK32, VK_FORMAT_D32_SFLOAT, VK_FORMAT_S8_UINT, VK_FORMAT_D16_UNORM_S8_UINT, VK_FORMAT_D24_UNORM_S8_UINT, VK_FORMAT_D32_SFLOAT_S8_UINT, VK_FORMAT_BC1_RGB_UNORM_BLOCK, VK_FORMAT_BC1_RGB_SRGB_BLOCK, VK_FORMAT_BC1_RGBA_UNORM_BLOCK, VK_FORMAT_BC1_RGBA_SRGB_BLOCK, VK_FORMAT_BC2_UNORM_BLOCK, VK_FORMAT_BC2_SRGB_BLOCK, VK_FORMAT_BC3_UNORM_BLOCK, VK_FORMAT_BC3_SRGB_BLOCK, VK_FORMAT_BC4_UNORM_BLOCK, VK_FORMAT_BC4_SNORM_BLOCK, VK_FORMAT_BC5_UNORM_BLOCK, VK_FORMAT_BC5_SNORM_BLOCK, VK_FORMAT_BC6H_UFLOAT_BLOCK, VK_FORMAT_BC6H_SFLOAT_BLOCK, VK_FORMAT_BC7_UNORM_BLOCK, VK_FORMAT_BC7_SRGB_BLOCK, VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK, VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK, VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK, VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK, VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK, VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK, VK_FORMAT_EAC_R11_UNORM_BLOCK, VK_FORMAT_EAC_R11_SNORM_BLOCK, VK_FORMAT_EAC_R11G11_UNORM_BLOCK, VK_FORMAT_EAC_R11G11_SNORM_BLOCK, VK_FORMAT_ASTC_4x4_UNORM_BLOCK, VK_FORMAT_ASTC_4x4_SRGB_BLOCK, VK_FORMAT_ASTC_5x4_UNORM_BLOCK, VK_FORMAT_ASTC_5x4_SRGB_BLOCK, VK_FORMAT_ASTC_5x5_UNORM_BLOCK, VK_FORMAT_ASTC_5x5_SRGB_BLOCK, VK_FORMAT_ASTC_6x5_UNORM_BLOCK, VK_FORMAT_ASTC_6x5_SRGB_BLOCK, VK_FORMAT_ASTC_6x6_UNORM_BLOCK, VK_FORMAT_ASTC_6x6_SRGB_BLOCK, VK_FORMAT_ASTC_8x5_UNORM_BLOCK, VK_FORMAT_ASTC_8x5_SRGB_BLOCK, VK_FORMAT_ASTC_8x6_UNORM_BLOCK, VK_FORMAT_ASTC_8x6_SRGB_BLOCK, VK_FORMAT_ASTC_8x8_UNORM_BLOCK, VK_FORMAT_ASTC_8x8_SRGB_BLOCK, VK_FORMAT_ASTC_10x5_UNORM_BLOCK, VK_FORMAT_ASTC_10x5_SRGB_BLOCK, VK_FORMAT_ASTC_10x6_UNORM_BLOCK, VK_FORMAT_ASTC_10x6_SRGB_BLOCK, VK_FORMAT_ASTC_10x8_UNORM_BLOCK, VK_FORMAT_ASTC_10x8_SRGB_BLOCK, VK_FORMAT_ASTC_10x10_UNORM_BLOCK, VK_FORMAT_ASTC_10x10_SRGB_BLOCK, VK_FORMAT_ASTC_12x10_UNORM_BLOCK, VK_FORMAT_ASTC_12x10_SRGB_BLOCK, VK_FORMAT_ASTC_12x12_UNORM_BLOCK, VK_FORMAT_ASTC_12x12_SRGB_BLOCK, VK_FORMAT_G8B8G8R8_422_UNORM, VK_FORMAT_B8G8R8G8_422_UNORM, VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM, VK_FORMAT_G8_B8R8_2PLANE_420_UNORM, VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM, VK_FORMAT_G8_B8R8_2PLANE_422_UNORM, VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM, VK_FORMAT_R10X6_UNORM_PACK16, VK_FORMAT_R10X6G10X6_UNORM_2PACK16, VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16, VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16, VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16, VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16, VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16, VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16, VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16, VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16, VK_FORMAT_R12X4_UNORM_PACK16, VK_FORMAT_R12X4G12X4_UNORM_2PACK16, VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16, VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16, VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16, VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16, VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16, VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16, VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16, VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16, VK_FORMAT_G16B16G16R16_422_UNORM, VK_FORMAT_B16G16R16G16_422_UNORM, VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM, VK_FORMAT_G16_B16R16_2PLANE_420_UNORM, VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM, VK_FORMAT_G16_B16R16_2PLANE_422_UNORM, VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM, VK_FORMAT_PVRTC1_2BPP_UNORM_BLOCK_IMG, VK_FORMAT_PVRTC1_4BPP_UNORM_BLOCK_IMG, VK_FORMAT_PVRTC2_2BPP_UNORM_BLOCK_IMG, VK_FORMAT_PVRTC2_4BPP_UNORM_BLOCK_IMG, VK_FORMAT_PVRTC1_2BPP_SRGB_BLOCK_IMG, VK_FORMAT_PVRTC1_4BPP_SRGB_BLOCK_IMG, VK_FORMAT_PVRTC2_2BPP_SRGB_BLOCK_IMG, VK_FORMAT_PVRTC2_4BPP_SRGB_BLOCK_IMG, VK_FORMAT_ASTC_4x4_SFLOAT_BLOCK_EXT, VK_FORMAT_ASTC_5x4_SFLOAT_BLOCK_EXT, VK_FORMAT_ASTC_5x5_SFLOAT_BLOCK_EXT, VK_FORMAT_ASTC_6x5_SFLOAT_BLOCK_EXT, VK_FORMAT_ASTC_6x6_SFLOAT_BLOCK_EXT, VK_FORMAT_ASTC_8x5_SFLOAT_BLOCK_EXT, VK_FORMAT_ASTC_8x6_SFLOAT_BLOCK_EXT, VK_FORMAT_ASTC_8x8_SFLOAT_BLOCK_EXT, VK_FORMAT_ASTC_10x5_SFLOAT_BLOCK_EXT, VK_FORMAT_ASTC_10x6_SFLOAT_BLOCK_EXT, VK_FORMAT_ASTC_10x8_SFLOAT_BLOCK_EXT, VK_FORMAT_ASTC_10x10_SFLOAT_BLOCK_EXT, VK_FORMAT_ASTC_12x10_SFLOAT_BLOCK_EXT, VK_FORMAT_ASTC_12x12_SFLOAT_BLOCK_EXT, VK_FORMAT_G8B8G8R8_422_UNORM_KHR, VK_FORMAT_B8G8R8G8_422_UNORM_KHR, VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM_KHR, VK_FORMAT_G8_B8R8_2PLANE_420_UNORM_KHR, VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM_KHR, VK_FORMAT_G8_B8R8_2PLANE_422_UNORM_KHR, VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM_KHR, VK_FORMAT_R10X6_UNORM_PACK16_KHR, VK_FORMAT_R10X6G10X6_UNORM_2PACK16_KHR, VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16_KHR, VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16_KHR, VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16_KHR, VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16_KHR, VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16_KHR, VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16_KHR, VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16_KHR, VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16_KHR, VK_FORMAT_R12X4_UNORM_PACK16_KHR, VK_FORMAT_R12X4G12X4_UNORM_2PACK16_KHR, VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16_KHR, VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16_KHR, VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16_KHR, VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16_KHR, VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16_KHR, VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16_KHR, VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16_KHR, VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16_KHR, VK_FORMAT_G16B16G16R16_422_UNORM_KHR, VK_FORMAT_B16G16R16G16_422_UNORM_KHR, VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM_KHR, VK_FORMAT_G16_B16R16_2PLANE_420_UNORM_KHR, VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM_KHR, VK_FORMAT_G16_B16R16_2PLANE_422_UNORM_KHR, VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM_KHR, };
+const std::vector<VkImageType> AllVkImageTypeEnums = {VK_IMAGE_TYPE_1D, VK_IMAGE_TYPE_2D, VK_IMAGE_TYPE_3D, };
+const std::vector<VkImageTiling> AllVkImageTilingEnums = {VK_IMAGE_TILING_OPTIMAL, VK_IMAGE_TILING_LINEAR, VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT, };
+const std::vector<VkPhysicalDeviceType> AllVkPhysicalDeviceTypeEnums = {VK_PHYSICAL_DEVICE_TYPE_OTHER, VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU, VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU, VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU, VK_PHYSICAL_DEVICE_TYPE_CPU, };
+const std::vector<VkQueryType> AllVkQueryTypeEnums = {VK_QUERY_TYPE_OCCLUSION, VK_QUERY_TYPE_PIPELINE_STATISTICS, VK_QUERY_TYPE_TIMESTAMP, VK_QUERY_TYPE_TRANSFORM_FEEDBACK_STREAM_EXT, VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR, VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_NV, VK_QUERY_TYPE_PERFORMANCE_QUERY_INTEL, };
+const std::vector<VkSharingMode> AllVkSharingModeEnums = {VK_SHARING_MODE_EXCLUSIVE, VK_SHARING_MODE_CONCURRENT, };
+const std::vector<VkImageLayout> AllVkImageLayoutEnums = {VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL, VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, VK_IMAGE_LAYOUT_PREINITIALIZED, VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL, VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL, VK_IMAGE_LAYOUT_PRESENT_SRC_KHR, VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR, VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL_KHR, VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL_KHR, VK_IMAGE_LAYOUT_SHADING_RATE_OPTIMAL_NV, VK_IMAGE_LAYOUT_FRAGMENT_DENSITY_MAP_OPTIMAL_EXT, VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR, VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR, VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR, VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR, };
+const std::vector<VkImageViewType> AllVkImageViewTypeEnums = {VK_IMAGE_VIEW_TYPE_1D, VK_IMAGE_VIEW_TYPE_2D, VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, VK_IMAGE_VIEW_TYPE_1D_ARRAY, VK_IMAGE_VIEW_TYPE_2D_ARRAY, VK_IMAGE_VIEW_TYPE_CUBE_ARRAY, };
+const std::vector<VkComponentSwizzle> AllVkComponentSwizzleEnums = {VK_COMPONENT_SWIZZLE_IDENTITY, VK_COMPONENT_SWIZZLE_ZERO, VK_COMPONENT_SWIZZLE_ONE, VK_COMPONENT_SWIZZLE_R, VK_COMPONENT_SWIZZLE_G, VK_COMPONENT_SWIZZLE_B, VK_COMPONENT_SWIZZLE_A, };
+const std::vector<VkVertexInputRate> AllVkVertexInputRateEnums = {VK_VERTEX_INPUT_RATE_VERTEX, VK_VERTEX_INPUT_RATE_INSTANCE, };
+const std::vector<VkPrimitiveTopology> AllVkPrimitiveTopologyEnums = {VK_PRIMITIVE_TOPOLOGY_POINT_LIST, VK_PRIMITIVE_TOPOLOGY_LINE_LIST, VK_PRIMITIVE_TOPOLOGY_LINE_STRIP, VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST, VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP, VK_PRIMITIVE_TOPOLOGY_TRIANGLE_FAN, VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY, VK_PRIMITIVE_TOPOLOGY_LINE_STRIP_WITH_ADJACENCY, VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY, VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY, VK_PRIMITIVE_TOPOLOGY_PATCH_LIST, };
+const std::vector<VkPolygonMode> AllVkPolygonModeEnums = {VK_POLYGON_MODE_FILL, VK_POLYGON_MODE_LINE, VK_POLYGON_MODE_POINT, VK_POLYGON_MODE_FILL_RECTANGLE_NV, };
+const std::vector<VkFrontFace> AllVkFrontFaceEnums = {VK_FRONT_FACE_COUNTER_CLOCKWISE, VK_FRONT_FACE_CLOCKWISE, };
+const std::vector<VkCompareOp> AllVkCompareOpEnums = {VK_COMPARE_OP_NEVER, VK_COMPARE_OP_LESS, VK_COMPARE_OP_EQUAL, VK_COMPARE_OP_LESS_OR_EQUAL, VK_COMPARE_OP_GREATER, VK_COMPARE_OP_NOT_EQUAL, VK_COMPARE_OP_GREATER_OR_EQUAL, VK_COMPARE_OP_ALWAYS, };
+const std::vector<VkStencilOp> AllVkStencilOpEnums = {VK_STENCIL_OP_KEEP, VK_STENCIL_OP_ZERO, VK_STENCIL_OP_REPLACE, VK_STENCIL_OP_INCREMENT_AND_CLAMP, VK_STENCIL_OP_DECREMENT_AND_CLAMP, VK_STENCIL_OP_INVERT, VK_STENCIL_OP_INCREMENT_AND_WRAP, VK_STENCIL_OP_DECREMENT_AND_WRAP, };
+const std::vector<VkLogicOp> AllVkLogicOpEnums = {VK_LOGIC_OP_CLEAR, VK_LOGIC_OP_AND, VK_LOGIC_OP_AND_REVERSE, VK_LOGIC_OP_COPY, VK_LOGIC_OP_AND_INVERTED, VK_LOGIC_OP_NO_OP, VK_LOGIC_OP_XOR, VK_LOGIC_OP_OR, VK_LOGIC_OP_NOR, VK_LOGIC_OP_EQUIVALENT, VK_LOGIC_OP_INVERT, VK_LOGIC_OP_OR_REVERSE, VK_LOGIC_OP_COPY_INVERTED, VK_LOGIC_OP_OR_INVERTED, VK_LOGIC_OP_NAND, VK_LOGIC_OP_SET, };
+const std::vector<VkBlendFactor> AllVkBlendFactorEnums = {VK_BLEND_FACTOR_ZERO, VK_BLEND_FACTOR_ONE, VK_BLEND_FACTOR_SRC_COLOR, VK_BLEND_FACTOR_ONE_MINUS_SRC_COLOR, VK_BLEND_FACTOR_DST_COLOR, VK_BLEND_FACTOR_ONE_MINUS_DST_COLOR, VK_BLEND_FACTOR_SRC_ALPHA, VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA, VK_BLEND_FACTOR_DST_ALPHA, VK_BLEND_FACTOR_ONE_MINUS_DST_ALPHA, VK_BLEND_FACTOR_CONSTANT_COLOR, VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_COLOR, VK_BLEND_FACTOR_CONSTANT_ALPHA, VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA, VK_BLEND_FACTOR_SRC_ALPHA_SATURATE, VK_BLEND_FACTOR_SRC1_COLOR, VK_BLEND_FACTOR_ONE_MINUS_SRC1_COLOR, VK_BLEND_FACTOR_SRC1_ALPHA, VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA, };
+const std::vector<VkBlendOp> AllVkBlendOpEnums = {VK_BLEND_OP_ADD, VK_BLEND_OP_SUBTRACT, VK_BLEND_OP_REVERSE_SUBTRACT, VK_BLEND_OP_MIN, VK_BLEND_OP_MAX, VK_BLEND_OP_ZERO_EXT, VK_BLEND_OP_SRC_EXT, VK_BLEND_OP_DST_EXT, VK_BLEND_OP_SRC_OVER_EXT, VK_BLEND_OP_DST_OVER_EXT, VK_BLEND_OP_SRC_IN_EXT, VK_BLEND_OP_DST_IN_EXT, VK_BLEND_OP_SRC_OUT_EXT, VK_BLEND_OP_DST_OUT_EXT, VK_BLEND_OP_SRC_ATOP_EXT, VK_BLEND_OP_DST_ATOP_EXT, VK_BLEND_OP_XOR_EXT, VK_BLEND_OP_MULTIPLY_EXT, VK_BLEND_OP_SCREEN_EXT, VK_BLEND_OP_OVERLAY_EXT, VK_BLEND_OP_DARKEN_EXT, VK_BLEND_OP_LIGHTEN_EXT, VK_BLEND_OP_COLORDODGE_EXT, VK_BLEND_OP_COLORBURN_EXT, VK_BLEND_OP_HARDLIGHT_EXT, VK_BLEND_OP_SOFTLIGHT_EXT, VK_BLEND_OP_DIFFERENCE_EXT, VK_BLEND_OP_EXCLUSION_EXT, VK_BLEND_OP_INVERT_EXT, VK_BLEND_OP_INVERT_RGB_EXT, VK_BLEND_OP_LINEARDODGE_EXT, VK_BLEND_OP_LINEARBURN_EXT, VK_BLEND_OP_VIVIDLIGHT_EXT, VK_BLEND_OP_LINEARLIGHT_EXT, VK_BLEND_OP_PINLIGHT_EXT, VK_BLEND_OP_HARDMIX_EXT, VK_BLEND_OP_HSL_HUE_EXT, VK_BLEND_OP_HSL_SATURATION_EXT, VK_BLEND_OP_HSL_COLOR_EXT, VK_BLEND_OP_HSL_LUMINOSITY_EXT, VK_BLEND_OP_PLUS_EXT, VK_BLEND_OP_PLUS_CLAMPED_EXT, VK_BLEND_OP_PLUS_CLAMPED_ALPHA_EXT, VK_BLEND_OP_PLUS_DARKER_EXT, VK_BLEND_OP_MINUS_EXT, VK_BLEND_OP_MINUS_CLAMPED_EXT, VK_BLEND_OP_CONTRAST_EXT, VK_BLEND_OP_INVERT_OVG_EXT, VK_BLEND_OP_RED_EXT, VK_BLEND_OP_GREEN_EXT, VK_BLEND_OP_BLUE_EXT, };
+const std::vector<VkDynamicState> AllVkDynamicStateEnums = {VK_DYNAMIC_STATE_VIEWPORT, VK_DYNAMIC_STATE_SCISSOR, VK_DYNAMIC_STATE_LINE_WIDTH, VK_DYNAMIC_STATE_DEPTH_BIAS, VK_DYNAMIC_STATE_BLEND_CONSTANTS, VK_DYNAMIC_STATE_DEPTH_BOUNDS, VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK, VK_DYNAMIC_STATE_STENCIL_WRITE_MASK, VK_DYNAMIC_STATE_STENCIL_REFERENCE, VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV, VK_DYNAMIC_STATE_DISCARD_RECTANGLE_EXT, VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_EXT, VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV, VK_DYNAMIC_STATE_VIEWPORT_COARSE_SAMPLE_ORDER_NV, VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_NV, VK_DYNAMIC_STATE_LINE_STIPPLE_EXT, };
+const std::vector<VkFilter> AllVkFilterEnums = {VK_FILTER_NEAREST, VK_FILTER_LINEAR, VK_FILTER_CUBIC_IMG, VK_FILTER_CUBIC_EXT, };
+const std::vector<VkSamplerMipmapMode> AllVkSamplerMipmapModeEnums = {VK_SAMPLER_MIPMAP_MODE_NEAREST, VK_SAMPLER_MIPMAP_MODE_LINEAR, };
+const std::vector<VkSamplerAddressMode> AllVkSamplerAddressModeEnums = {VK_SAMPLER_ADDRESS_MODE_REPEAT, VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT, VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE, VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER, VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE, VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE_KHR, };
+const std::vector<VkBorderColor> AllVkBorderColorEnums = {VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK, VK_BORDER_COLOR_INT_TRANSPARENT_BLACK, VK_BORDER_COLOR_FLOAT_OPAQUE_BLACK, VK_BORDER_COLOR_INT_OPAQUE_BLACK, VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE, VK_BORDER_COLOR_INT_OPAQUE_WHITE, };
+const std::vector<VkDescriptorType> AllVkDescriptorTypeEnums = {VK_DESCRIPTOR_TYPE_SAMPLER, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER, VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC, VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC, VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT, VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV, };
+const std::vector<VkAttachmentLoadOp> AllVkAttachmentLoadOpEnums = {VK_ATTACHMENT_LOAD_OP_LOAD, VK_ATTACHMENT_LOAD_OP_CLEAR, VK_ATTACHMENT_LOAD_OP_DONT_CARE, };
+const std::vector<VkAttachmentStoreOp> AllVkAttachmentStoreOpEnums = {VK_ATTACHMENT_STORE_OP_STORE, VK_ATTACHMENT_STORE_OP_DONT_CARE, };
+const std::vector<VkPipelineBindPoint> AllVkPipelineBindPointEnums = {VK_PIPELINE_BIND_POINT_GRAPHICS, VK_PIPELINE_BIND_POINT_COMPUTE, VK_PIPELINE_BIND_POINT_RAY_TRACING_NV, };
+const std::vector<VkCommandBufferLevel> AllVkCommandBufferLevelEnums = {VK_COMMAND_BUFFER_LEVEL_PRIMARY, VK_COMMAND_BUFFER_LEVEL_SECONDARY, };
+const std::vector<VkIndexType> AllVkIndexTypeEnums = {VK_INDEX_TYPE_UINT16, VK_INDEX_TYPE_UINT32, VK_INDEX_TYPE_NONE_NV, VK_INDEX_TYPE_UINT8_EXT, };
+const std::vector<VkSubpassContents> AllVkSubpassContentsEnums = {VK_SUBPASS_CONTENTS_INLINE, VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS, };
+const std::vector<VkObjectType> AllVkObjectTypeEnums = {VK_OBJECT_TYPE_UNKNOWN, VK_OBJECT_TYPE_INSTANCE, VK_OBJECT_TYPE_PHYSICAL_DEVICE, VK_OBJECT_TYPE_DEVICE, VK_OBJECT_TYPE_QUEUE, VK_OBJECT_TYPE_SEMAPHORE, VK_OBJECT_TYPE_COMMAND_BUFFER, VK_OBJECT_TYPE_FENCE, VK_OBJECT_TYPE_DEVICE_MEMORY, VK_OBJECT_TYPE_BUFFER, VK_OBJECT_TYPE_IMAGE, VK_OBJECT_TYPE_EVENT, VK_OBJECT_TYPE_QUERY_POOL, VK_OBJECT_TYPE_BUFFER_VIEW, VK_OBJECT_TYPE_IMAGE_VIEW, VK_OBJECT_TYPE_SHADER_MODULE, VK_OBJECT_TYPE_PIPELINE_CACHE, VK_OBJECT_TYPE_PIPELINE_LAYOUT, VK_OBJECT_TYPE_RENDER_PASS, VK_OBJECT_TYPE_PIPELINE, VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT, VK_OBJECT_TYPE_SAMPLER, VK_OBJECT_TYPE_DESCRIPTOR_POOL, VK_OBJECT_TYPE_DESCRIPTOR_SET, VK_OBJECT_TYPE_FRAMEBUFFER, VK_OBJECT_TYPE_COMMAND_POOL, VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION, VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE, VK_OBJECT_TYPE_SURFACE_KHR, VK_OBJECT_TYPE_SWAPCHAIN_KHR, VK_OBJECT_TYPE_DISPLAY_KHR, VK_OBJECT_TYPE_DISPLAY_MODE_KHR, VK_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT, VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_KHR, VK_OBJECT_TYPE_OBJECT_TABLE_NVX, VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX, VK_OBJECT_TYPE_DEBUG_UTILS_MESSENGER_EXT, VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR, VK_OBJECT_TYPE_VALIDATION_CACHE_EXT, VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV, VK_OBJECT_TYPE_PERFORMANCE_CONFIGURATION_INTEL, };
+const std::vector<VkVendorId> AllVkVendorIdEnums = {VK_VENDOR_ID_VIV, VK_VENDOR_ID_VSI, VK_VENDOR_ID_KAZAN, };
+const std::vector<VkPointClippingBehavior> AllVkPointClippingBehaviorEnums = {VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES, VK_POINT_CLIPPING_BEHAVIOR_USER_CLIP_PLANES_ONLY, VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES_KHR, VK_POINT_CLIPPING_BEHAVIOR_USER_CLIP_PLANES_ONLY_KHR, };
+const std::vector<VkTessellationDomainOrigin> AllVkTessellationDomainOriginEnums = {VK_TESSELLATION_DOMAIN_ORIGIN_UPPER_LEFT, VK_TESSELLATION_DOMAIN_ORIGIN_LOWER_LEFT, VK_TESSELLATION_DOMAIN_ORIGIN_UPPER_LEFT_KHR, VK_TESSELLATION_DOMAIN_ORIGIN_LOWER_LEFT_KHR, };
+const std::vector<VkSamplerYcbcrModelConversion> AllVkSamplerYcbcrModelConversionEnums = {VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY, VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_IDENTITY, VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709, VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_601, VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020, VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY_KHR, VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_IDENTITY_KHR, VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709_KHR, VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_601_KHR, VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020_KHR, };
+const std::vector<VkSamplerYcbcrRange> AllVkSamplerYcbcrRangeEnums = {VK_SAMPLER_YCBCR_RANGE_ITU_FULL, VK_SAMPLER_YCBCR_RANGE_ITU_NARROW, VK_SAMPLER_YCBCR_RANGE_ITU_FULL_KHR, VK_SAMPLER_YCBCR_RANGE_ITU_NARROW_KHR, };
+const std::vector<VkChromaLocation> AllVkChromaLocationEnums = {VK_CHROMA_LOCATION_COSITED_EVEN, VK_CHROMA_LOCATION_MIDPOINT, VK_CHROMA_LOCATION_COSITED_EVEN_KHR, VK_CHROMA_LOCATION_MIDPOINT_KHR, };
+const std::vector<VkDescriptorUpdateTemplateType> AllVkDescriptorUpdateTemplateTypeEnums = {VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET, VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR, VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET_KHR, VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR, };
+const std::vector<VkColorSpaceKHR> AllVkColorSpaceKHREnums = {VK_COLOR_SPACE_SRGB_NONLINEAR_KHR, VK_COLORSPACE_SRGB_NONLINEAR_KHR, VK_COLOR_SPACE_DISPLAY_P3_NONLINEAR_EXT, VK_COLOR_SPACE_EXTENDED_SRGB_LINEAR_EXT, VK_COLOR_SPACE_DISPLAY_P3_LINEAR_EXT, VK_COLOR_SPACE_DCI_P3_NONLINEAR_EXT, VK_COLOR_SPACE_BT709_LINEAR_EXT, VK_COLOR_SPACE_BT709_NONLINEAR_EXT, VK_COLOR_SPACE_BT2020_LINEAR_EXT, VK_COLOR_SPACE_HDR10_ST2084_EXT, VK_COLOR_SPACE_DOLBYVISION_EXT, VK_COLOR_SPACE_HDR10_HLG_EXT, VK_COLOR_SPACE_ADOBERGB_LINEAR_EXT, VK_COLOR_SPACE_ADOBERGB_NONLINEAR_EXT, VK_COLOR_SPACE_PASS_THROUGH_EXT, VK_COLOR_SPACE_EXTENDED_SRGB_NONLINEAR_EXT, VK_COLOR_SPACE_DCI_P3_LINEAR_EXT, VK_COLOR_SPACE_DISPLAY_NATIVE_AMD, };
+const std::vector<VkPresentModeKHR> AllVkPresentModeKHREnums = {VK_PRESENT_MODE_IMMEDIATE_KHR, VK_PRESENT_MODE_MAILBOX_KHR, VK_PRESENT_MODE_FIFO_KHR, VK_PRESENT_MODE_FIFO_RELAXED_KHR, VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR, VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR, };
+const std::vector<VkDescriptorUpdateTemplateTypeKHR> AllVkDescriptorUpdateTemplateTypeKHREnums = {VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET, VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR, VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET_KHR, VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR, };
+const std::vector<VkPerformanceCounterUnitKHR> AllVkPerformanceCounterUnitKHREnums = {VK_PERFORMANCE_COUNTER_UNIT_GENERIC_KHR, VK_PERFORMANCE_COUNTER_UNIT_PERCENTAGE_KHR, VK_PERFORMANCE_COUNTER_UNIT_NANOSECONDS_KHR, VK_PERFORMANCE_COUNTER_UNIT_BYTES_KHR, VK_PERFORMANCE_COUNTER_UNIT_BYTES_PER_SECOND_KHR, VK_PERFORMANCE_COUNTER_UNIT_KELVIN_KHR, VK_PERFORMANCE_COUNTER_UNIT_WATTS_KHR, VK_PERFORMANCE_COUNTER_UNIT_VOLTS_KHR, VK_PERFORMANCE_COUNTER_UNIT_AMPS_KHR, VK_PERFORMANCE_COUNTER_UNIT_HERTZ_KHR, VK_PERFORMANCE_COUNTER_UNIT_CYCLES_KHR, };
+const std::vector<VkPerformanceCounterScopeKHR> AllVkPerformanceCounterScopeKHREnums = {VK_QUERY_SCOPE_COMMAND_BUFFER_KHR, VK_QUERY_SCOPE_RENDER_PASS_KHR, VK_QUERY_SCOPE_COMMAND_KHR, };
+const std::vector<VkPerformanceCounterStorageKHR> AllVkPerformanceCounterStorageKHREnums = {VK_PERFORMANCE_COUNTER_STORAGE_INT32_KHR, VK_PERFORMANCE_COUNTER_STORAGE_INT64_KHR, VK_PERFORMANCE_COUNTER_STORAGE_UINT32_KHR, VK_PERFORMANCE_COUNTER_STORAGE_UINT64_KHR, VK_PERFORMANCE_COUNTER_STORAGE_FLOAT32_KHR, VK_PERFORMANCE_COUNTER_STORAGE_FLOAT64_KHR, };
+const std::vector<VkPointClippingBehaviorKHR> AllVkPointClippingBehaviorKHREnums = {VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES, VK_POINT_CLIPPING_BEHAVIOR_USER_CLIP_PLANES_ONLY, VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES_KHR, VK_POINT_CLIPPING_BEHAVIOR_USER_CLIP_PLANES_ONLY_KHR, };
+const std::vector<VkTessellationDomainOriginKHR> AllVkTessellationDomainOriginKHREnums = {VK_TESSELLATION_DOMAIN_ORIGIN_UPPER_LEFT, VK_TESSELLATION_DOMAIN_ORIGIN_LOWER_LEFT, VK_TESSELLATION_DOMAIN_ORIGIN_UPPER_LEFT_KHR, VK_TESSELLATION_DOMAIN_ORIGIN_LOWER_LEFT_KHR, };
+const std::vector<VkSamplerYcbcrModelConversionKHR> AllVkSamplerYcbcrModelConversionKHREnums = {VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY, VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_IDENTITY, VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709, VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_601, VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020, VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY_KHR, VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_IDENTITY_KHR, VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709_KHR, VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_601_KHR, VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020_KHR, };
+const std::vector<VkSamplerYcbcrRangeKHR> AllVkSamplerYcbcrRangeKHREnums = {VK_SAMPLER_YCBCR_RANGE_ITU_FULL, VK_SAMPLER_YCBCR_RANGE_ITU_NARROW, VK_SAMPLER_YCBCR_RANGE_ITU_FULL_KHR, VK_SAMPLER_YCBCR_RANGE_ITU_NARROW_KHR, };
+const std::vector<VkChromaLocationKHR> AllVkChromaLocationKHREnums = {VK_CHROMA_LOCATION_COSITED_EVEN, VK_CHROMA_LOCATION_MIDPOINT, VK_CHROMA_LOCATION_COSITED_EVEN_KHR, VK_CHROMA_LOCATION_MIDPOINT_KHR, };
+const std::vector<VkDriverIdKHR> AllVkDriverIdKHREnums = {VK_DRIVER_ID_AMD_PROPRIETARY_KHR, VK_DRIVER_ID_AMD_OPEN_SOURCE_KHR, VK_DRIVER_ID_MESA_RADV_KHR, VK_DRIVER_ID_NVIDIA_PROPRIETARY_KHR, VK_DRIVER_ID_INTEL_PROPRIETARY_WINDOWS_KHR, VK_DRIVER_ID_INTEL_OPEN_SOURCE_MESA_KHR, VK_DRIVER_ID_IMAGINATION_PROPRIETARY_KHR, VK_DRIVER_ID_QUALCOMM_PROPRIETARY_KHR, VK_DRIVER_ID_ARM_PROPRIETARY_KHR, VK_DRIVER_ID_GOOGLE_SWIFTSHADER_KHR, VK_DRIVER_ID_GGP_PROPRIETARY_KHR, VK_DRIVER_ID_BROADCOM_PROPRIETARY_KHR, };
+const std::vector<VkShaderFloatControlsIndependenceKHR> AllVkShaderFloatControlsIndependenceKHREnums = {VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_32_BIT_ONLY_KHR, VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_ALL_KHR, VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_NONE_KHR, };
+const std::vector<VkSemaphoreTypeKHR> AllVkSemaphoreTypeKHREnums = {VK_SEMAPHORE_TYPE_BINARY_KHR, VK_SEMAPHORE_TYPE_TIMELINE_KHR, };
+const std::vector<VkPipelineExecutableStatisticFormatKHR> AllVkPipelineExecutableStatisticFormatKHREnums = {VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_BOOL32_KHR, VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_INT64_KHR, VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_UINT64_KHR, VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_FLOAT64_KHR, };
+const std::vector<VkDebugReportObjectTypeEXT> AllVkDebugReportObjectTypeEXTEnums = {VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_KHR_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_MODE_KHR_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_OBJECT_TABLE_NVX_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_KHR_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV_EXT, };
+const std::vector<VkRasterizationOrderAMD> AllVkRasterizationOrderAMDEnums = {VK_RASTERIZATION_ORDER_STRICT_AMD, VK_RASTERIZATION_ORDER_RELAXED_AMD, };
+const std::vector<VkShaderInfoTypeAMD> AllVkShaderInfoTypeAMDEnums = {VK_SHADER_INFO_TYPE_STATISTICS_AMD, VK_SHADER_INFO_TYPE_BINARY_AMD, VK_SHADER_INFO_TYPE_DISASSEMBLY_AMD, };
+const std::vector<VkValidationCheckEXT> AllVkValidationCheckEXTEnums = {VK_VALIDATION_CHECK_ALL_EXT, VK_VALIDATION_CHECK_SHADERS_EXT, };
+const std::vector<VkIndirectCommandsTokenTypeNVX> AllVkIndirectCommandsTokenTypeNVXEnums = {VK_INDIRECT_COMMANDS_TOKEN_TYPE_PIPELINE_NVX, VK_INDIRECT_COMMANDS_TOKEN_TYPE_DESCRIPTOR_SET_NVX, VK_INDIRECT_COMMANDS_TOKEN_TYPE_INDEX_BUFFER_NVX, VK_INDIRECT_COMMANDS_TOKEN_TYPE_VERTEX_BUFFER_NVX, VK_INDIRECT_COMMANDS_TOKEN_TYPE_PUSH_CONSTANT_NVX, VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_NVX, VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_NVX, VK_INDIRECT_COMMANDS_TOKEN_TYPE_DISPATCH_NVX, };
+const std::vector<VkObjectEntryTypeNVX> AllVkObjectEntryTypeNVXEnums = {VK_OBJECT_ENTRY_TYPE_DESCRIPTOR_SET_NVX, VK_OBJECT_ENTRY_TYPE_PIPELINE_NVX, VK_OBJECT_ENTRY_TYPE_INDEX_BUFFER_NVX, VK_OBJECT_ENTRY_TYPE_VERTEX_BUFFER_NVX, VK_OBJECT_ENTRY_TYPE_PUSH_CONSTANT_NVX, };
+const std::vector<VkDisplayPowerStateEXT> AllVkDisplayPowerStateEXTEnums = {VK_DISPLAY_POWER_STATE_OFF_EXT, VK_DISPLAY_POWER_STATE_SUSPEND_EXT, VK_DISPLAY_POWER_STATE_ON_EXT, };
+const std::vector<VkDeviceEventTypeEXT> AllVkDeviceEventTypeEXTEnums = {VK_DEVICE_EVENT_TYPE_DISPLAY_HOTPLUG_EXT, };
+const std::vector<VkDisplayEventTypeEXT> AllVkDisplayEventTypeEXTEnums = {VK_DISPLAY_EVENT_TYPE_FIRST_PIXEL_OUT_EXT, };
+const std::vector<VkViewportCoordinateSwizzleNV> AllVkViewportCoordinateSwizzleNVEnums = {VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_X_NV, VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_X_NV, VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_Y_NV, VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_Y_NV, VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_Z_NV, VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_Z_NV, VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_W_NV, VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_W_NV, };
+const std::vector<VkDiscardRectangleModeEXT> AllVkDiscardRectangleModeEXTEnums = {VK_DISCARD_RECTANGLE_MODE_INCLUSIVE_EXT, VK_DISCARD_RECTANGLE_MODE_EXCLUSIVE_EXT, };
+const std::vector<VkConservativeRasterizationModeEXT> AllVkConservativeRasterizationModeEXTEnums = {VK_CONSERVATIVE_RASTERIZATION_MODE_DISABLED_EXT, VK_CONSERVATIVE_RASTERIZATION_MODE_OVERESTIMATE_EXT, VK_CONSERVATIVE_RASTERIZATION_MODE_UNDERESTIMATE_EXT, };
+const std::vector<VkSamplerReductionModeEXT> AllVkSamplerReductionModeEXTEnums = {VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE_EXT, VK_SAMPLER_REDUCTION_MODE_MIN_EXT, VK_SAMPLER_REDUCTION_MODE_MAX_EXT, };
+const std::vector<VkBlendOverlapEXT> AllVkBlendOverlapEXTEnums = {VK_BLEND_OVERLAP_UNCORRELATED_EXT, VK_BLEND_OVERLAP_DISJOINT_EXT, VK_BLEND_OVERLAP_CONJOINT_EXT, };
+const std::vector<VkCoverageModulationModeNV> AllVkCoverageModulationModeNVEnums = {VK_COVERAGE_MODULATION_MODE_NONE_NV, VK_COVERAGE_MODULATION_MODE_RGB_NV, VK_COVERAGE_MODULATION_MODE_ALPHA_NV, VK_COVERAGE_MODULATION_MODE_RGBA_NV, };
+const std::vector<VkValidationCacheHeaderVersionEXT> AllVkValidationCacheHeaderVersionEXTEnums = {VK_VALIDATION_CACHE_HEADER_VERSION_ONE_EXT, };
+const std::vector<VkShadingRatePaletteEntryNV> AllVkShadingRatePaletteEntryNVEnums = {VK_SHADING_RATE_PALETTE_ENTRY_NO_INVOCATIONS_NV, VK_SHADING_RATE_PALETTE_ENTRY_16_INVOCATIONS_PER_PIXEL_NV, VK_SHADING_RATE_PALETTE_ENTRY_8_INVOCATIONS_PER_PIXEL_NV, VK_SHADING_RATE_PALETTE_ENTRY_4_INVOCATIONS_PER_PIXEL_NV, VK_SHADING_RATE_PALETTE_ENTRY_2_INVOCATIONS_PER_PIXEL_NV, VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_PIXEL_NV, VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X1_PIXELS_NV, VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_1X2_PIXELS_NV, VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X2_PIXELS_NV, VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_4X2_PIXELS_NV, VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X4_PIXELS_NV, VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_4X4_PIXELS_NV, };
+const std::vector<VkCoarseSampleOrderTypeNV> AllVkCoarseSampleOrderTypeNVEnums = {VK_COARSE_SAMPLE_ORDER_TYPE_DEFAULT_NV, VK_COARSE_SAMPLE_ORDER_TYPE_CUSTOM_NV, VK_COARSE_SAMPLE_ORDER_TYPE_PIXEL_MAJOR_NV, VK_COARSE_SAMPLE_ORDER_TYPE_SAMPLE_MAJOR_NV, };
+const std::vector<VkAccelerationStructureTypeNV> AllVkAccelerationStructureTypeNVEnums = {VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_NV, VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_NV, };
+const std::vector<VkRayTracingShaderGroupTypeNV> AllVkRayTracingShaderGroupTypeNVEnums = {VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_NV, VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_NV, VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_NV, };
+const std::vector<VkGeometryTypeNV> AllVkGeometryTypeNVEnums = {VK_GEOMETRY_TYPE_TRIANGLES_NV, VK_GEOMETRY_TYPE_AABBS_NV, };
+const std::vector<VkCopyAccelerationStructureModeNV> AllVkCopyAccelerationStructureModeNVEnums = {VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_NV, VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_NV, };
+const std::vector<VkAccelerationStructureMemoryRequirementsTypeNV> AllVkAccelerationStructureMemoryRequirementsTypeNVEnums = {VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV, VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV, VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV, };
+const std::vector<VkQueueGlobalPriorityEXT> AllVkQueueGlobalPriorityEXTEnums = {VK_QUEUE_GLOBAL_PRIORITY_LOW_EXT, VK_QUEUE_GLOBAL_PRIORITY_MEDIUM_EXT, VK_QUEUE_GLOBAL_PRIORITY_HIGH_EXT, VK_QUEUE_GLOBAL_PRIORITY_REALTIME_EXT, };
+const std::vector<VkTimeDomainEXT> AllVkTimeDomainEXTEnums = {VK_TIME_DOMAIN_DEVICE_EXT, VK_TIME_DOMAIN_CLOCK_MONOTONIC_EXT, VK_TIME_DOMAIN_CLOCK_MONOTONIC_RAW_EXT, VK_TIME_DOMAIN_QUERY_PERFORMANCE_COUNTER_EXT, };
+const std::vector<VkMemoryOverallocationBehaviorAMD> AllVkMemoryOverallocationBehaviorAMDEnums = {VK_MEMORY_OVERALLOCATION_BEHAVIOR_DEFAULT_AMD, VK_MEMORY_OVERALLOCATION_BEHAVIOR_ALLOWED_AMD, VK_MEMORY_OVERALLOCATION_BEHAVIOR_DISALLOWED_AMD, };
+const std::vector<VkPerformanceConfigurationTypeINTEL> AllVkPerformanceConfigurationTypeINTELEnums = {VK_PERFORMANCE_CONFIGURATION_TYPE_COMMAND_QUEUE_METRICS_DISCOVERY_ACTIVATED_INTEL, };
+const std::vector<VkQueryPoolSamplingModeINTEL> AllVkQueryPoolSamplingModeINTELEnums = {VK_QUERY_POOL_SAMPLING_MODE_MANUAL_INTEL, };
+const std::vector<VkPerformanceOverrideTypeINTEL> AllVkPerformanceOverrideTypeINTELEnums = {VK_PERFORMANCE_OVERRIDE_TYPE_NULL_HARDWARE_INTEL, VK_PERFORMANCE_OVERRIDE_TYPE_FLUSH_GPU_CACHES_INTEL, };
+const std::vector<VkPerformanceParameterTypeINTEL> AllVkPerformanceParameterTypeINTELEnums = {VK_PERFORMANCE_PARAMETER_TYPE_HW_COUNTERS_SUPPORTED_INTEL, VK_PERFORMANCE_PARAMETER_TYPE_STREAM_MARKER_VALID_BITS_INTEL, };
+const std::vector<VkPerformanceValueTypeINTEL> AllVkPerformanceValueTypeINTELEnums = {VK_PERFORMANCE_VALUE_TYPE_UINT32_INTEL, VK_PERFORMANCE_VALUE_TYPE_UINT64_INTEL, VK_PERFORMANCE_VALUE_TYPE_FLOAT_INTEL, VK_PERFORMANCE_VALUE_TYPE_BOOL_INTEL, VK_PERFORMANCE_VALUE_TYPE_STRING_INTEL, };
+const std::vector<VkValidationFeatureEnableEXT> AllVkValidationFeatureEnableEXTEnums = {VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_EXT, VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_RESERVE_BINDING_SLOT_EXT, VK_VALIDATION_FEATURE_ENABLE_BEST_PRACTICES_EXT, };
+const std::vector<VkValidationFeatureDisableEXT> AllVkValidationFeatureDisableEXTEnums = {VK_VALIDATION_FEATURE_DISABLE_ALL_EXT, VK_VALIDATION_FEATURE_DISABLE_SHADERS_EXT, VK_VALIDATION_FEATURE_DISABLE_THREAD_SAFETY_EXT, VK_VALIDATION_FEATURE_DISABLE_API_PARAMETERS_EXT, VK_VALIDATION_FEATURE_DISABLE_OBJECT_LIFETIMES_EXT, VK_VALIDATION_FEATURE_DISABLE_CORE_CHECKS_EXT, VK_VALIDATION_FEATURE_DISABLE_UNIQUE_HANDLES_EXT, };
+const std::vector<VkComponentTypeNV> AllVkComponentTypeNVEnums = {VK_COMPONENT_TYPE_FLOAT16_NV, VK_COMPONENT_TYPE_FLOAT32_NV, VK_COMPONENT_TYPE_FLOAT64_NV, VK_COMPONENT_TYPE_SINT8_NV, VK_COMPONENT_TYPE_SINT16_NV, VK_COMPONENT_TYPE_SINT32_NV, VK_COMPONENT_TYPE_SINT64_NV, VK_COMPONENT_TYPE_UINT8_NV, VK_COMPONENT_TYPE_UINT16_NV, VK_COMPONENT_TYPE_UINT32_NV, VK_COMPONENT_TYPE_UINT64_NV, };
+const std::vector<VkScopeNV> AllVkScopeNVEnums = {VK_SCOPE_DEVICE_NV, VK_SCOPE_WORKGROUP_NV, VK_SCOPE_SUBGROUP_NV, VK_SCOPE_QUEUE_FAMILY_NV, };
+const std::vector<VkCoverageReductionModeNV> AllVkCoverageReductionModeNVEnums = {VK_COVERAGE_REDUCTION_MODE_MERGE_NV, VK_COVERAGE_REDUCTION_MODE_TRUNCATE_NV, };
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+const std::vector<VkFullScreenExclusiveEXT> AllVkFullScreenExclusiveEXTEnums = {VK_FULL_SCREEN_EXCLUSIVE_DEFAULT_EXT, VK_FULL_SCREEN_EXCLUSIVE_ALLOWED_EXT, VK_FULL_SCREEN_EXCLUSIVE_DISALLOWED_EXT, VK_FULL_SCREEN_EXCLUSIVE_APPLICATION_CONTROLLED_EXT, };
+#endif // VK_USE_PLATFORM_WIN32_KHR
+const std::vector<VkLineRasterizationModeEXT> AllVkLineRasterizationModeEXTEnums = {VK_LINE_RASTERIZATION_MODE_DEFAULT_EXT, VK_LINE_RASTERIZATION_MODE_RECTANGULAR_EXT, VK_LINE_RASTERIZATION_MODE_BRESENHAM_EXT, VK_LINE_RASTERIZATION_MODE_RECTANGULAR_SMOOTH_EXT, };
+
+
+bool StatelessValidation::ValidatePnextStructContents(const char *api_name, const ParameterName &parameter_name, const VkBaseOutStructure* header) const {
+    bool skip = false;
+    switch(header->sType) {
+
+        // Validation code for VkPhysicalDevice16BitStorageFeatures structure members
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES: {
+            VkPhysicalDevice16BitStorageFeatures *structure = (VkPhysicalDevice16BitStorageFeatures *) header;
+            skip |= validate_bool32("VkPhysicalDevice16BitStorageFeatures", "storageBuffer16BitAccess", structure->storageBuffer16BitAccess);
+
+            skip |= validate_bool32("VkPhysicalDevice16BitStorageFeatures", "uniformAndStorageBuffer16BitAccess", structure->uniformAndStorageBuffer16BitAccess);
+
+            skip |= validate_bool32("VkPhysicalDevice16BitStorageFeatures", "storagePushConstant16", structure->storagePushConstant16);
+
+            skip |= validate_bool32("VkPhysicalDevice16BitStorageFeatures", "storageInputOutput16", structure->storageInputOutput16);
+        } break;
+
+        // Validation code for VkMemoryAllocateFlagsInfo structure members
+        case VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO: {
+            VkMemoryAllocateFlagsInfo *structure = (VkMemoryAllocateFlagsInfo *) header;
+            skip |= validate_flags("VkMemoryAllocateFlagsInfo", "flags", "VkMemoryAllocateFlagBits", AllVkMemoryAllocateFlagBits, structure->flags, kOptionalFlags, "VUID-VkMemoryAllocateFlagsInfo-flags-parameter");
+        } break;
+
+        // Validation code for VkDeviceGroupRenderPassBeginInfo structure members
+        case VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO: {
+            VkDeviceGroupRenderPassBeginInfo *structure = (VkDeviceGroupRenderPassBeginInfo *) header;
+            skip |= validate_array("VkDeviceGroupRenderPassBeginInfo", "deviceRenderAreaCount", "pDeviceRenderAreas", structure->deviceRenderAreaCount, &structure->pDeviceRenderAreas, false, true, kVUIDUndefined, "VUID-VkDeviceGroupRenderPassBeginInfo-pDeviceRenderAreas-parameter");
+
+            if (structure->pDeviceRenderAreas != NULL)
+            {
+                for (uint32_t deviceRenderAreaIndex = 0; deviceRenderAreaIndex < structure->deviceRenderAreaCount; ++deviceRenderAreaIndex)
+                {
+                }
+            }
+        } break;
+
+        // Validation code for VkDeviceGroupSubmitInfo structure members
+        case VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO: {
+            VkDeviceGroupSubmitInfo *structure = (VkDeviceGroupSubmitInfo *) header;
+            skip |= validate_array("VkDeviceGroupSubmitInfo", "waitSemaphoreCount", "pWaitSemaphoreDeviceIndices", structure->waitSemaphoreCount, &structure->pWaitSemaphoreDeviceIndices, false, true, kVUIDUndefined, "VUID-VkDeviceGroupSubmitInfo-pWaitSemaphoreDeviceIndices-parameter");
+
+            skip |= validate_array("VkDeviceGroupSubmitInfo", "commandBufferCount", "pCommandBufferDeviceMasks", structure->commandBufferCount, &structure->pCommandBufferDeviceMasks, false, true, kVUIDUndefined, "VUID-VkDeviceGroupSubmitInfo-pCommandBufferDeviceMasks-parameter");
+
+            skip |= validate_array("VkDeviceGroupSubmitInfo", "signalSemaphoreCount", "pSignalSemaphoreDeviceIndices", structure->signalSemaphoreCount, &structure->pSignalSemaphoreDeviceIndices, false, true, kVUIDUndefined, "VUID-VkDeviceGroupSubmitInfo-pSignalSemaphoreDeviceIndices-parameter");
+        } break;
+
+        // Validation code for VkBindBufferMemoryDeviceGroupInfo structure members
+        case VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO: {
+            VkBindBufferMemoryDeviceGroupInfo *structure = (VkBindBufferMemoryDeviceGroupInfo *) header;
+            skip |= validate_array("VkBindBufferMemoryDeviceGroupInfo", "deviceIndexCount", "pDeviceIndices", structure->deviceIndexCount, &structure->pDeviceIndices, false, true, kVUIDUndefined, "VUID-VkBindBufferMemoryDeviceGroupInfo-pDeviceIndices-parameter");
+        } break;
+
+        // Validation code for VkBindImageMemoryDeviceGroupInfo structure members
+        case VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO: {
+            VkBindImageMemoryDeviceGroupInfo *structure = (VkBindImageMemoryDeviceGroupInfo *) header;
+            skip |= validate_array("VkBindImageMemoryDeviceGroupInfo", "deviceIndexCount", "pDeviceIndices", structure->deviceIndexCount, &structure->pDeviceIndices, false, true, kVUIDUndefined, "VUID-VkBindImageMemoryDeviceGroupInfo-pDeviceIndices-parameter");
+
+            skip |= validate_array("VkBindImageMemoryDeviceGroupInfo", "splitInstanceBindRegionCount", "pSplitInstanceBindRegions", structure->splitInstanceBindRegionCount, &structure->pSplitInstanceBindRegions, false, true, kVUIDUndefined, "VUID-VkBindImageMemoryDeviceGroupInfo-pSplitInstanceBindRegions-parameter");
+
+            if (structure->pSplitInstanceBindRegions != NULL)
+            {
+                for (uint32_t splitInstanceBindRegionIndex = 0; splitInstanceBindRegionIndex < structure->splitInstanceBindRegionCount; ++splitInstanceBindRegionIndex)
+                {
+                }
+            }
+        } break;
+
+        // Validation code for VkDeviceGroupDeviceCreateInfo structure members
+        case VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO: {
+            VkDeviceGroupDeviceCreateInfo *structure = (VkDeviceGroupDeviceCreateInfo *) header;
+            skip |= validate_array("VkDeviceGroupDeviceCreateInfo", "physicalDeviceCount", "pPhysicalDevices", structure->physicalDeviceCount, &structure->pPhysicalDevices, false, true, kVUIDUndefined, "VUID-VkDeviceGroupDeviceCreateInfo-pPhysicalDevices-parameter");
+        } break;
+
+        // Validation code for VkPhysicalDeviceFeatures2 structure members
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2: {
+            VkPhysicalDeviceFeatures2 *structure = (VkPhysicalDeviceFeatures2 *) header;
+            skip |= validate_bool32("VkPhysicalDeviceFeatures2", "features.robustBufferAccess", structure->features.robustBufferAccess);
+
+            skip |= validate_bool32("VkPhysicalDeviceFeatures2", "features.fullDrawIndexUint32", structure->features.fullDrawIndexUint32);
+
+            skip |= validate_bool32("VkPhysicalDeviceFeatures2", "features.imageCubeArray", structure->features.imageCubeArray);
+
+            skip |= validate_bool32("VkPhysicalDeviceFeatures2", "features.independentBlend", structure->features.independentBlend);
+
+            skip |= validate_bool32("VkPhysicalDeviceFeatures2", "features.geometryShader", structure->features.geometryShader);
+
+            skip |= validate_bool32("VkPhysicalDeviceFeatures2", "features.tessellationShader", structure->features.tessellationShader);
+
+            skip |= validate_bool32("VkPhysicalDeviceFeatures2", "features.sampleRateShading", structure->features.sampleRateShading);
+
+            skip |= validate_bool32("VkPhysicalDeviceFeatures2", "features.dualSrcBlend", structure->features.dualSrcBlend);
+
+            skip |= validate_bool32("VkPhysicalDeviceFeatures2", "features.logicOp", structure->features.logicOp);
+
+            skip |= validate_bool32("VkPhysicalDeviceFeatures2", "features.multiDrawIndirect", structure->features.multiDrawIndirect);
+
+            skip |= validate_bool32("VkPhysicalDeviceFeatures2", "features.drawIndirectFirstInstance", structure->features.drawIndirectFirstInstance);
+
+            skip |= validate_bool32("VkPhysicalDeviceFeatures2", "features.depthClamp", structure->features.depthClamp);
+
+            skip |= validate_bool32("VkPhysicalDeviceFeatures2", "features.depthBiasClamp", structure->features.depthBiasClamp);
+
+            skip |= validate_bool32("VkPhysicalDeviceFeatures2", "features.fillModeNonSolid", structure->features.fillModeNonSolid);
+
+            skip |= validate_bool32("VkPhysicalDeviceFeatures2", "features.depthBounds", structure->features.depthBounds);
+
+            skip |= validate_bool32("VkPhysicalDeviceFeatures2", "features.wideLines", structure->features.wideLines);
+
+            skip |= validate_bool32("VkPhysicalDeviceFeatures2", "features.largePoints", structure->features.largePoints);
+
+            skip |= validate_bool32("VkPhysicalDeviceFeatures2", "features.alphaToOne", structure->features.alphaToOne);
+
+            skip |= validate_bool32("VkPhysicalDeviceFeatures2", "features.multiViewport", structure->features.multiViewport);
+
+            skip |= validate_bool32("VkPhysicalDeviceFeatures2", "features.samplerAnisotropy", structure->features.samplerAnisotropy);
+
+            skip |= validate_bool32("VkPhysicalDeviceFeatures2", "features.textureCompressionETC2", structure->features.textureCompressionETC2);
+
+            skip |= validate_bool32("VkPhysicalDeviceFeatures2", "features.textureCompressionASTC_LDR", structure->features.textureCompressionASTC_LDR);
+
+            skip |= validate_bool32("VkPhysicalDeviceFeatures2", "features.textureCompressionBC", structure->features.textureCompressionBC);
+
+            skip |= validate_bool32("VkPhysicalDeviceFeatures2", "features.occlusionQueryPrecise", structure->features.occlusionQueryPrecise);
+
+            skip |= validate_bool32("VkPhysicalDeviceFeatures2", "features.pipelineStatisticsQuery", structure->features.pipelineStatisticsQuery);
+
+            skip |= validate_bool32("VkPhysicalDeviceFeatures2", "features.vertexPipelineStoresAndAtomics", structure->features.vertexPipelineStoresAndAtomics);
+
+            skip |= validate_bool32("VkPhysicalDeviceFeatures2", "features.fragmentStoresAndAtomics", structure->features.fragmentStoresAndAtomics);
+
+            skip |= validate_bool32("VkPhysicalDeviceFeatures2", "features.shaderTessellationAndGeometryPointSize", structure->features.shaderTessellationAndGeometryPointSize);
+
+            skip |= validate_bool32("VkPhysicalDeviceFeatures2", "features.shaderImageGatherExtended", structure->features.shaderImageGatherExtended);
+
+            skip |= validate_bool32("VkPhysicalDeviceFeatures2", "features.shaderStorageImageExtendedFormats", structure->features.shaderStorageImageExtendedFormats);
+
+            skip |= validate_bool32("VkPhysicalDeviceFeatures2", "features.shaderStorageImageMultisample", structure->features.shaderStorageImageMultisample);
+
+            skip |= validate_bool32("VkPhysicalDeviceFeatures2", "features.shaderStorageImageReadWithoutFormat", structure->features.shaderStorageImageReadWithoutFormat);
+
+            skip |= validate_bool32("VkPhysicalDeviceFeatures2", "features.shaderStorageImageWriteWithoutFormat", structure->features.shaderStorageImageWriteWithoutFormat);
+
+            skip |= validate_bool32("VkPhysicalDeviceFeatures2", "features.shaderUniformBufferArrayDynamicIndexing", structure->features.shaderUniformBufferArrayDynamicIndexing);
+
+            skip |= validate_bool32("VkPhysicalDeviceFeatures2", "features.shaderSampledImageArrayDynamicIndexing", structure->features.shaderSampledImageArrayDynamicIndexing);
+
+            skip |= validate_bool32("VkPhysicalDeviceFeatures2", "features.shaderStorageBufferArrayDynamicIndexing", structure->features.shaderStorageBufferArrayDynamicIndexing);
+
+            skip |= validate_bool32("VkPhysicalDeviceFeatures2", "features.shaderStorageImageArrayDynamicIndexing", structure->features.shaderStorageImageArrayDynamicIndexing);
+
+            skip |= validate_bool32("VkPhysicalDeviceFeatures2", "features.shaderClipDistance", structure->features.shaderClipDistance);
+
+            skip |= validate_bool32("VkPhysicalDeviceFeatures2", "features.shaderCullDistance", structure->features.shaderCullDistance);
+
+            skip |= validate_bool32("VkPhysicalDeviceFeatures2", "features.shaderFloat64", structure->features.shaderFloat64);
+
+            skip |= validate_bool32("VkPhysicalDeviceFeatures2", "features.shaderInt64", structure->features.shaderInt64);
+
+            skip |= validate_bool32("VkPhysicalDeviceFeatures2", "features.shaderInt16", structure->features.shaderInt16);
+
+            skip |= validate_bool32("VkPhysicalDeviceFeatures2", "features.shaderResourceResidency", structure->features.shaderResourceResidency);
+
+            skip |= validate_bool32("VkPhysicalDeviceFeatures2", "features.shaderResourceMinLod", structure->features.shaderResourceMinLod);
+
+            skip |= validate_bool32("VkPhysicalDeviceFeatures2", "features.sparseBinding", structure->features.sparseBinding);
+
+            skip |= validate_bool32("VkPhysicalDeviceFeatures2", "features.sparseResidencyBuffer", structure->features.sparseResidencyBuffer);
+
+            skip |= validate_bool32("VkPhysicalDeviceFeatures2", "features.sparseResidencyImage2D", structure->features.sparseResidencyImage2D);
+
+            skip |= validate_bool32("VkPhysicalDeviceFeatures2", "features.sparseResidencyImage3D", structure->features.sparseResidencyImage3D);
+
+            skip |= validate_bool32("VkPhysicalDeviceFeatures2", "features.sparseResidency2Samples", structure->features.sparseResidency2Samples);
+
+            skip |= validate_bool32("VkPhysicalDeviceFeatures2", "features.sparseResidency4Samples", structure->features.sparseResidency4Samples);
+
+            skip |= validate_bool32("VkPhysicalDeviceFeatures2", "features.sparseResidency8Samples", structure->features.sparseResidency8Samples);
+
+            skip |= validate_bool32("VkPhysicalDeviceFeatures2", "features.sparseResidency16Samples", structure->features.sparseResidency16Samples);
+
+            skip |= validate_bool32("VkPhysicalDeviceFeatures2", "features.sparseResidencyAliased", structure->features.sparseResidencyAliased);
+
+            skip |= validate_bool32("VkPhysicalDeviceFeatures2", "features.variableMultisampleRate", structure->features.variableMultisampleRate);
+
+            skip |= validate_bool32("VkPhysicalDeviceFeatures2", "features.inheritedQueries", structure->features.inheritedQueries);
+        } break;
+
+        // Validation code for VkRenderPassInputAttachmentAspectCreateInfo structure members
+        case VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO: {
+            VkRenderPassInputAttachmentAspectCreateInfo *structure = (VkRenderPassInputAttachmentAspectCreateInfo *) header;
+            skip |= validate_array("VkRenderPassInputAttachmentAspectCreateInfo", "aspectReferenceCount", "pAspectReferences", structure->aspectReferenceCount, &structure->pAspectReferences, true, true, "VUID-VkRenderPassInputAttachmentAspectCreateInfo-aspectReferenceCount-arraylength", "VUID-VkRenderPassInputAttachmentAspectCreateInfo-pAspectReferences-parameter");
+
+            if (structure->pAspectReferences != NULL)
+            {
+                for (uint32_t aspectReferenceIndex = 0; aspectReferenceIndex < structure->aspectReferenceCount; ++aspectReferenceIndex)
+                {
+                    skip |= validate_flags("VkRenderPassInputAttachmentAspectCreateInfo", ParameterName("pAspectReferences[%i].aspectMask", ParameterName::IndexVector{ aspectReferenceIndex }), "VkImageAspectFlagBits", AllVkImageAspectFlagBits, structure->pAspectReferences[aspectReferenceIndex].aspectMask, kRequiredFlags, "VUID-VkInputAttachmentAspectReference-aspectMask-parameter", "VUID-VkInputAttachmentAspectReference-aspectMask-requiredbitmask");
+                }
+            }
+        } break;
+
+        // Validation code for VkImageViewUsageCreateInfo structure members
+        case VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO: {
+            VkImageViewUsageCreateInfo *structure = (VkImageViewUsageCreateInfo *) header;
+            skip |= validate_flags("VkImageViewUsageCreateInfo", "usage", "VkImageUsageFlagBits", AllVkImageUsageFlagBits, structure->usage, kRequiredFlags, "VUID-VkImageViewUsageCreateInfo-usage-parameter", "VUID-VkImageViewUsageCreateInfo-usage-requiredbitmask");
+        } break;
+
+        // Validation code for VkPipelineTessellationDomainOriginStateCreateInfo structure members
+        case VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO: {
+            VkPipelineTessellationDomainOriginStateCreateInfo *structure = (VkPipelineTessellationDomainOriginStateCreateInfo *) header;
+            skip |= validate_ranged_enum("VkPipelineTessellationDomainOriginStateCreateInfo", "domainOrigin", "VkTessellationDomainOrigin", AllVkTessellationDomainOriginEnums, structure->domainOrigin, "VUID-VkPipelineTessellationDomainOriginStateCreateInfo-domainOrigin-parameter");
+        } break;
+
+        // Validation code for VkRenderPassMultiviewCreateInfo structure members
+        case VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO: {
+            VkRenderPassMultiviewCreateInfo *structure = (VkRenderPassMultiviewCreateInfo *) header;
+            skip |= validate_array("VkRenderPassMultiviewCreateInfo", "subpassCount", "pViewMasks", structure->subpassCount, &structure->pViewMasks, false, true, kVUIDUndefined, "VUID-VkRenderPassMultiviewCreateInfo-pViewMasks-parameter");
+
+            skip |= validate_array("VkRenderPassMultiviewCreateInfo", "dependencyCount", "pViewOffsets", structure->dependencyCount, &structure->pViewOffsets, false, true, kVUIDUndefined, "VUID-VkRenderPassMultiviewCreateInfo-pViewOffsets-parameter");
+
+            skip |= validate_array("VkRenderPassMultiviewCreateInfo", "correlationMaskCount", "pCorrelationMasks", structure->correlationMaskCount, &structure->pCorrelationMasks, false, true, kVUIDUndefined, "VUID-VkRenderPassMultiviewCreateInfo-pCorrelationMasks-parameter");
+        } break;
+
+        // Validation code for VkPhysicalDeviceMultiviewFeatures structure members
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES: {
+            VkPhysicalDeviceMultiviewFeatures *structure = (VkPhysicalDeviceMultiviewFeatures *) header;
+            skip |= validate_bool32("VkPhysicalDeviceMultiviewFeatures", "multiview", structure->multiview);
+
+            skip |= validate_bool32("VkPhysicalDeviceMultiviewFeatures", "multiviewGeometryShader", structure->multiviewGeometryShader);
+
+            skip |= validate_bool32("VkPhysicalDeviceMultiviewFeatures", "multiviewTessellationShader", structure->multiviewTessellationShader);
+        } break;
+
+        // Validation code for VkPhysicalDeviceVariablePointersFeatures structure members
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES: {
+            VkPhysicalDeviceVariablePointersFeatures *structure = (VkPhysicalDeviceVariablePointersFeatures *) header;
+            skip |= validate_bool32("VkPhysicalDeviceVariablePointersFeatures", "variablePointersStorageBuffer", structure->variablePointersStorageBuffer);
+
+            skip |= validate_bool32("VkPhysicalDeviceVariablePointersFeatures", "variablePointers", structure->variablePointers);
+        } break;
+
+        // Validation code for VkPhysicalDeviceProtectedMemoryFeatures structure members
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_FEATURES: {
+            VkPhysicalDeviceProtectedMemoryFeatures *structure = (VkPhysicalDeviceProtectedMemoryFeatures *) header;
+            skip |= validate_bool32("VkPhysicalDeviceProtectedMemoryFeatures", "protectedMemory", structure->protectedMemory);
+        } break;
+
+        // Validation code for VkProtectedSubmitInfo structure members
+        case VK_STRUCTURE_TYPE_PROTECTED_SUBMIT_INFO: {
+            VkProtectedSubmitInfo *structure = (VkProtectedSubmitInfo *) header;
+            skip |= validate_bool32("VkProtectedSubmitInfo", "protectedSubmit", structure->protectedSubmit);
+        } break;
+
+        // Validation code for VkSamplerYcbcrConversionInfo structure members
+        case VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO: {
+            VkSamplerYcbcrConversionInfo *structure = (VkSamplerYcbcrConversionInfo *) header;
+            skip |= validate_required_handle("VkSamplerYcbcrConversionInfo", "conversion", structure->conversion);
+        } break;
+
+        // Validation code for VkBindImagePlaneMemoryInfo structure members
+        case VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO: {
+            VkBindImagePlaneMemoryInfo *structure = (VkBindImagePlaneMemoryInfo *) header;
+            skip |= validate_flags("VkBindImagePlaneMemoryInfo", "planeAspect", "VkImageAspectFlagBits", AllVkImageAspectFlagBits, structure->planeAspect, kRequiredSingleBit, "VUID-VkBindImagePlaneMemoryInfo-planeAspect-parameter", "VUID-VkBindImagePlaneMemoryInfo-planeAspect-parameter");
+        } break;
+
+        // Validation code for VkImagePlaneMemoryRequirementsInfo structure members
+        case VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO: {
+            VkImagePlaneMemoryRequirementsInfo *structure = (VkImagePlaneMemoryRequirementsInfo *) header;
+            skip |= validate_flags("VkImagePlaneMemoryRequirementsInfo", "planeAspect", "VkImageAspectFlagBits", AllVkImageAspectFlagBits, structure->planeAspect, kRequiredSingleBit, "VUID-VkImagePlaneMemoryRequirementsInfo-planeAspect-parameter", "VUID-VkImagePlaneMemoryRequirementsInfo-planeAspect-parameter");
+        } break;
+
+        // Validation code for VkPhysicalDeviceSamplerYcbcrConversionFeatures structure members
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES: {
+            VkPhysicalDeviceSamplerYcbcrConversionFeatures *structure = (VkPhysicalDeviceSamplerYcbcrConversionFeatures *) header;
+            skip |= validate_bool32("VkPhysicalDeviceSamplerYcbcrConversionFeatures", "samplerYcbcrConversion", structure->samplerYcbcrConversion);
+        } break;
+
+        // Validation code for VkPhysicalDeviceExternalImageFormatInfo structure members
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO: {
+            VkPhysicalDeviceExternalImageFormatInfo *structure = (VkPhysicalDeviceExternalImageFormatInfo *) header;
+            skip |= validate_flags("VkPhysicalDeviceExternalImageFormatInfo", "handleType", "VkExternalMemoryHandleTypeFlagBits", AllVkExternalMemoryHandleTypeFlagBits, structure->handleType, kOptionalSingleBit, "VUID-VkPhysicalDeviceExternalImageFormatInfo-handleType-parameter");
+        } break;
+
+        // Validation code for VkExternalMemoryImageCreateInfo structure members
+        case VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO: {
+            VkExternalMemoryImageCreateInfo *structure = (VkExternalMemoryImageCreateInfo *) header;
+            skip |= validate_flags("VkExternalMemoryImageCreateInfo", "handleTypes", "VkExternalMemoryHandleTypeFlagBits", AllVkExternalMemoryHandleTypeFlagBits, structure->handleTypes, kRequiredFlags, "VUID-VkExternalMemoryImageCreateInfo-handleTypes-parameter", "VUID-VkExternalMemoryImageCreateInfo-handleTypes-requiredbitmask");
+        } break;
+
+        // Validation code for VkExternalMemoryBufferCreateInfo structure members
+        case VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO: {
+            VkExternalMemoryBufferCreateInfo *structure = (VkExternalMemoryBufferCreateInfo *) header;
+            skip |= validate_flags("VkExternalMemoryBufferCreateInfo", "handleTypes", "VkExternalMemoryHandleTypeFlagBits", AllVkExternalMemoryHandleTypeFlagBits, structure->handleTypes, kOptionalFlags, "VUID-VkExternalMemoryBufferCreateInfo-handleTypes-parameter");
+        } break;
+
+        // Validation code for VkExportMemoryAllocateInfo structure members
+        case VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO: {
+            VkExportMemoryAllocateInfo *structure = (VkExportMemoryAllocateInfo *) header;
+            skip |= validate_flags("VkExportMemoryAllocateInfo", "handleTypes", "VkExternalMemoryHandleTypeFlagBits", AllVkExternalMemoryHandleTypeFlagBits, structure->handleTypes, kOptionalFlags, "VUID-VkExportMemoryAllocateInfo-handleTypes-parameter");
+        } break;
+
+        // Validation code for VkExportFenceCreateInfo structure members
+        case VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO: {
+            VkExportFenceCreateInfo *structure = (VkExportFenceCreateInfo *) header;
+            skip |= validate_flags("VkExportFenceCreateInfo", "handleTypes", "VkExternalFenceHandleTypeFlagBits", AllVkExternalFenceHandleTypeFlagBits, structure->handleTypes, kOptionalFlags, "VUID-VkExportFenceCreateInfo-handleTypes-parameter");
+        } break;
+
+        // Validation code for VkExportSemaphoreCreateInfo structure members
+        case VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO: {
+            VkExportSemaphoreCreateInfo *structure = (VkExportSemaphoreCreateInfo *) header;
+            skip |= validate_flags("VkExportSemaphoreCreateInfo", "handleTypes", "VkExternalSemaphoreHandleTypeFlagBits", AllVkExternalSemaphoreHandleTypeFlagBits, structure->handleTypes, kOptionalFlags, "VUID-VkExportSemaphoreCreateInfo-handleTypes-parameter");
+        } break;
+
+        // Validation code for VkPhysicalDeviceShaderDrawParametersFeatures structure members
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES: {
+            VkPhysicalDeviceShaderDrawParametersFeatures *structure = (VkPhysicalDeviceShaderDrawParametersFeatures *) header;
+            skip |= validate_bool32("VkPhysicalDeviceShaderDrawParametersFeatures", "shaderDrawParameters", structure->shaderDrawParameters);
+        } break;
+
+        // Validation code for VkBindImageMemorySwapchainInfoKHR structure members
+        case VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_SWAPCHAIN_INFO_KHR: {
+            VkBindImageMemorySwapchainInfoKHR *structure = (VkBindImageMemorySwapchainInfoKHR *) header;
+            skip |= validate_required_handle("VkBindImageMemorySwapchainInfoKHR", "swapchain", structure->swapchain);
+        } break;
+
+        // Validation code for VkDeviceGroupPresentInfoKHR structure members
+        case VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_INFO_KHR: {
+            VkDeviceGroupPresentInfoKHR *structure = (VkDeviceGroupPresentInfoKHR *) header;
+            skip |= validate_array("VkDeviceGroupPresentInfoKHR", "swapchainCount", "pDeviceMasks", structure->swapchainCount, &structure->pDeviceMasks, false, true, kVUIDUndefined, "VUID-VkDeviceGroupPresentInfoKHR-pDeviceMasks-parameter");
+
+            skip |= validate_flags("VkDeviceGroupPresentInfoKHR", "mode", "VkDeviceGroupPresentModeFlagBitsKHR", AllVkDeviceGroupPresentModeFlagBitsKHR, structure->mode, kRequiredSingleBit, "VUID-VkDeviceGroupPresentInfoKHR-mode-parameter", "VUID-VkDeviceGroupPresentInfoKHR-mode-parameter");
+        } break;
+
+        // Validation code for VkDeviceGroupSwapchainCreateInfoKHR structure members
+        case VK_STRUCTURE_TYPE_DEVICE_GROUP_SWAPCHAIN_CREATE_INFO_KHR: {
+            VkDeviceGroupSwapchainCreateInfoKHR *structure = (VkDeviceGroupSwapchainCreateInfoKHR *) header;
+            skip |= validate_flags("VkDeviceGroupSwapchainCreateInfoKHR", "modes", "VkDeviceGroupPresentModeFlagBitsKHR", AllVkDeviceGroupPresentModeFlagBitsKHR, structure->modes, kRequiredFlags, "VUID-VkDeviceGroupSwapchainCreateInfoKHR-modes-parameter", "VUID-VkDeviceGroupSwapchainCreateInfoKHR-modes-requiredbitmask");
+        } break;
+
+        // Validation code for VkDisplayPresentInfoKHR structure members
+        case VK_STRUCTURE_TYPE_DISPLAY_PRESENT_INFO_KHR: {
+            VkDisplayPresentInfoKHR *structure = (VkDisplayPresentInfoKHR *) header;
+            skip |= validate_bool32("VkDisplayPresentInfoKHR", "persistent", structure->persistent);
+        } break;
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+        // Validation code for VkImportMemoryWin32HandleInfoKHR structure members
+        case VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_KHR: {
+            VkImportMemoryWin32HandleInfoKHR *structure = (VkImportMemoryWin32HandleInfoKHR *) header;
+            skip |= validate_flags("VkImportMemoryWin32HandleInfoKHR", "handleType", "VkExternalMemoryHandleTypeFlagBits", AllVkExternalMemoryHandleTypeFlagBits, structure->handleType, kOptionalSingleBit, "VUID-VkImportMemoryWin32HandleInfoKHR-handleType-parameter");
+        } break;
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+        // Validation code for VkImportMemoryFdInfoKHR structure members
+        case VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR: {
+            VkImportMemoryFdInfoKHR *structure = (VkImportMemoryFdInfoKHR *) header;
+            skip |= validate_flags("VkImportMemoryFdInfoKHR", "handleType", "VkExternalMemoryHandleTypeFlagBits", AllVkExternalMemoryHandleTypeFlagBits, structure->handleType, kOptionalSingleBit, "VUID-VkImportMemoryFdInfoKHR-handleType-parameter");
+        } break;
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+        // Validation code for VkWin32KeyedMutexAcquireReleaseInfoKHR structure members
+        case VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_KHR: {
+            VkWin32KeyedMutexAcquireReleaseInfoKHR *structure = (VkWin32KeyedMutexAcquireReleaseInfoKHR *) header;
+            skip |= validate_array("VkWin32KeyedMutexAcquireReleaseInfoKHR", "acquireCount", "pAcquireSyncs", structure->acquireCount, &structure->pAcquireSyncs, false, true, kVUIDUndefined, "VUID-VkWin32KeyedMutexAcquireReleaseInfoKHR-pAcquireSyncs-parameter");
+
+            skip |= validate_array("VkWin32KeyedMutexAcquireReleaseInfoKHR", "acquireCount", "pAcquireKeys", structure->acquireCount, &structure->pAcquireKeys, false, true, kVUIDUndefined, "VUID-VkWin32KeyedMutexAcquireReleaseInfoKHR-pAcquireKeys-parameter");
+
+            skip |= validate_array("VkWin32KeyedMutexAcquireReleaseInfoKHR", "acquireCount", "pAcquireTimeouts", structure->acquireCount, &structure->pAcquireTimeouts, false, true, kVUIDUndefined, "VUID-VkWin32KeyedMutexAcquireReleaseInfoKHR-pAcquireTimeouts-parameter");
+
+            skip |= validate_array("VkWin32KeyedMutexAcquireReleaseInfoKHR", "releaseCount", "pReleaseSyncs", structure->releaseCount, &structure->pReleaseSyncs, false, true, kVUIDUndefined, "VUID-VkWin32KeyedMutexAcquireReleaseInfoKHR-pReleaseSyncs-parameter");
+
+            skip |= validate_array("VkWin32KeyedMutexAcquireReleaseInfoKHR", "releaseCount", "pReleaseKeys", structure->releaseCount, &structure->pReleaseKeys, false, true, kVUIDUndefined, "VUID-VkWin32KeyedMutexAcquireReleaseInfoKHR-pReleaseKeys-parameter");
+        } break;
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+        // Validation code for VkPhysicalDeviceShaderFloat16Int8FeaturesKHR structure members
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES_KHR: {
+            VkPhysicalDeviceShaderFloat16Int8FeaturesKHR *structure = (VkPhysicalDeviceShaderFloat16Int8FeaturesKHR *) header;
+            skip |= validate_bool32("VkPhysicalDeviceShaderFloat16Int8FeaturesKHR", "shaderFloat16", structure->shaderFloat16);
+
+            skip |= validate_bool32("VkPhysicalDeviceShaderFloat16Int8FeaturesKHR", "shaderInt8", structure->shaderInt8);
+        } break;
+
+        // Validation code for VkPresentRegionsKHR structure members
+        case VK_STRUCTURE_TYPE_PRESENT_REGIONS_KHR: {
+            VkPresentRegionsKHR *structure = (VkPresentRegionsKHR *) header;
+            skip |= validate_array("VkPresentRegionsKHR", "swapchainCount", "pRegions", structure->swapchainCount, &structure->pRegions, true, false, "VUID-VkPresentRegionsKHR-swapchainCount-arraylength", "VUID-VkPresentRegionsKHR-pRegions-parameter");
+
+            if (structure->pRegions != NULL)
+            {
+                for (uint32_t swapchainIndex = 0; swapchainIndex < structure->swapchainCount; ++swapchainIndex)
+                {
+                    if (structure->pRegions[swapchainIndex].pRectangles != NULL)
+                    {
+                        for (uint32_t rectangleIndex = 0; rectangleIndex < structure->pRegions[swapchainIndex].rectangleCount; ++rectangleIndex)
+                        {
+                        }
+                    }
+                }
+            }
+        } break;
+
+        // Validation code for VkPhysicalDeviceImagelessFramebufferFeaturesKHR structure members
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES_KHR: {
+            VkPhysicalDeviceImagelessFramebufferFeaturesKHR *structure = (VkPhysicalDeviceImagelessFramebufferFeaturesKHR *) header;
+            skip |= validate_bool32("VkPhysicalDeviceImagelessFramebufferFeaturesKHR", "imagelessFramebuffer", structure->imagelessFramebuffer);
+        } break;
+
+        // Validation code for VkFramebufferAttachmentsCreateInfoKHR structure members
+        case VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO_KHR: {
+            VkFramebufferAttachmentsCreateInfoKHR *structure = (VkFramebufferAttachmentsCreateInfoKHR *) header;
+            skip |= validate_struct_type_array("VkFramebufferAttachmentsCreateInfoKHR", "attachmentImageInfoCount", "pAttachmentImageInfos", "VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO_KHR", structure->attachmentImageInfoCount, structure->pAttachmentImageInfos, VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO_KHR, false, true, "VUID-VkFramebufferAttachmentImageInfoKHR-sType-sType", "VUID-VkFramebufferAttachmentsCreateInfoKHR-pAttachmentImageInfos-parameter", kVUIDUndefined);
+
+            if (structure->pAttachmentImageInfos != NULL)
+            {
+                for (uint32_t attachmentImageInfoIndex = 0; attachmentImageInfoIndex < structure->attachmentImageInfoCount; ++attachmentImageInfoIndex)
+                {
+                    skip |= validate_flags("VkFramebufferAttachmentsCreateInfoKHR", ParameterName("pAttachmentImageInfos[%i].flags", ParameterName::IndexVector{ attachmentImageInfoIndex }), "VkImageCreateFlagBits", AllVkImageCreateFlagBits, structure->pAttachmentImageInfos[attachmentImageInfoIndex].flags, kOptionalFlags, "VUID-VkFramebufferAttachmentImageInfoKHR-flags-parameter");
+
+                    skip |= validate_flags("VkFramebufferAttachmentsCreateInfoKHR", ParameterName("pAttachmentImageInfos[%i].usage", ParameterName::IndexVector{ attachmentImageInfoIndex }), "VkImageUsageFlagBits", AllVkImageUsageFlagBits, structure->pAttachmentImageInfos[attachmentImageInfoIndex].usage, kRequiredFlags, "VUID-VkFramebufferAttachmentImageInfoKHR-usage-parameter", "VUID-VkFramebufferAttachmentImageInfoKHR-usage-requiredbitmask");
+
+                    skip |= validate_ranged_enum_array("VkFramebufferAttachmentsCreateInfoKHR", ParameterName("pAttachmentImageInfos[%i].viewFormatCount", ParameterName::IndexVector{ attachmentImageInfoIndex }), ParameterName("pAttachmentImageInfos[%i].pViewFormats", ParameterName::IndexVector{ attachmentImageInfoIndex }), "VkFormat", AllVkFormatEnums, structure->pAttachmentImageInfos[attachmentImageInfoIndex].viewFormatCount, structure->pAttachmentImageInfos[attachmentImageInfoIndex].pViewFormats, false, true);
+                }
+            }
+        } break;
+
+        // Validation code for VkRenderPassAttachmentBeginInfoKHR structure members
+        case VK_STRUCTURE_TYPE_RENDER_PASS_ATTACHMENT_BEGIN_INFO_KHR: {
+            VkRenderPassAttachmentBeginInfoKHR *structure = (VkRenderPassAttachmentBeginInfoKHR *) header;
+            skip |= validate_array("VkRenderPassAttachmentBeginInfoKHR", "attachmentCount", "pAttachments", structure->attachmentCount, &structure->pAttachments, false, true, kVUIDUndefined, "VUID-VkRenderPassAttachmentBeginInfoKHR-pAttachments-parameter");
+        } break;
+
+        // Validation code for VkPhysicalDevicePerformanceQueryFeaturesKHR structure members
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PERFORMANCE_QUERY_FEATURES_KHR: {
+            VkPhysicalDevicePerformanceQueryFeaturesKHR *structure = (VkPhysicalDevicePerformanceQueryFeaturesKHR *) header;
+            skip |= validate_bool32("VkPhysicalDevicePerformanceQueryFeaturesKHR", "performanceCounterQueryPools", structure->performanceCounterQueryPools);
+
+            skip |= validate_bool32("VkPhysicalDevicePerformanceQueryFeaturesKHR", "performanceCounterMultipleQueryPools", structure->performanceCounterMultipleQueryPools);
+        } break;
+
+        // Validation code for VkQueryPoolPerformanceCreateInfoKHR structure members
+        case VK_STRUCTURE_TYPE_QUERY_POOL_PERFORMANCE_CREATE_INFO_KHR: {
+            VkQueryPoolPerformanceCreateInfoKHR *structure = (VkQueryPoolPerformanceCreateInfoKHR *) header;
+            skip |= validate_array("VkQueryPoolPerformanceCreateInfoKHR", "counterIndexCount", "pCounterIndices", structure->counterIndexCount, &structure->pCounterIndices, true, true, "VUID-VkQueryPoolPerformanceCreateInfoKHR-counterIndexCount-arraylength", "VUID-VkQueryPoolPerformanceCreateInfoKHR-pCounterIndices-parameter");
+        } break;
+
+        // Validation code for VkImageFormatListCreateInfoKHR structure members
+        case VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO_KHR: {
+            VkImageFormatListCreateInfoKHR *structure = (VkImageFormatListCreateInfoKHR *) header;
+            skip |= validate_ranged_enum_array("VkImageFormatListCreateInfoKHR", "viewFormatCount", "pViewFormats", "VkFormat", AllVkFormatEnums, structure->viewFormatCount, structure->pViewFormats, false, true);
+        } break;
+
+        // Validation code for VkPhysicalDevice8BitStorageFeaturesKHR structure members
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES_KHR: {
+            VkPhysicalDevice8BitStorageFeaturesKHR *structure = (VkPhysicalDevice8BitStorageFeaturesKHR *) header;
+            skip |= validate_bool32("VkPhysicalDevice8BitStorageFeaturesKHR", "storageBuffer8BitAccess", structure->storageBuffer8BitAccess);
+
+            skip |= validate_bool32("VkPhysicalDevice8BitStorageFeaturesKHR", "uniformAndStorageBuffer8BitAccess", structure->uniformAndStorageBuffer8BitAccess);
+
+            skip |= validate_bool32("VkPhysicalDevice8BitStorageFeaturesKHR", "storagePushConstant8", structure->storagePushConstant8);
+        } break;
+
+        // Validation code for VkPhysicalDeviceShaderAtomicInt64FeaturesKHR structure members
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES_KHR: {
+            VkPhysicalDeviceShaderAtomicInt64FeaturesKHR *structure = (VkPhysicalDeviceShaderAtomicInt64FeaturesKHR *) header;
+            skip |= validate_bool32("VkPhysicalDeviceShaderAtomicInt64FeaturesKHR", "shaderBufferInt64Atomics", structure->shaderBufferInt64Atomics);
+
+            skip |= validate_bool32("VkPhysicalDeviceShaderAtomicInt64FeaturesKHR", "shaderSharedInt64Atomics", structure->shaderSharedInt64Atomics);
+        } break;
+
+        // Validation code for VkPhysicalDeviceShaderClockFeaturesKHR structure members
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CLOCK_FEATURES_KHR: {
+            VkPhysicalDeviceShaderClockFeaturesKHR *structure = (VkPhysicalDeviceShaderClockFeaturesKHR *) header;
+            skip |= validate_bool32("VkPhysicalDeviceShaderClockFeaturesKHR", "shaderSubgroupClock", structure->shaderSubgroupClock);
+
+            skip |= validate_bool32("VkPhysicalDeviceShaderClockFeaturesKHR", "shaderDeviceClock", structure->shaderDeviceClock);
+        } break;
+
+        // Validation code for VkSubpassDescriptionDepthStencilResolveKHR structure members
+        case VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE_KHR: {
+            VkSubpassDescriptionDepthStencilResolveKHR *structure = (VkSubpassDescriptionDepthStencilResolveKHR *) header;
+            skip |= validate_flags("VkSubpassDescriptionDepthStencilResolveKHR", "depthResolveMode", "VkResolveModeFlagBitsKHR", AllVkResolveModeFlagBitsKHR, structure->depthResolveMode, kRequiredSingleBit, "VUID-VkSubpassDescriptionDepthStencilResolveKHR-depthResolveMode-parameter", "VUID-VkSubpassDescriptionDepthStencilResolveKHR-depthResolveMode-parameter");
+
+            skip |= validate_flags("VkSubpassDescriptionDepthStencilResolveKHR", "stencilResolveMode", "VkResolveModeFlagBitsKHR", AllVkResolveModeFlagBitsKHR, structure->stencilResolveMode, kRequiredSingleBit, "VUID-VkSubpassDescriptionDepthStencilResolveKHR-stencilResolveMode-parameter", "VUID-VkSubpassDescriptionDepthStencilResolveKHR-stencilResolveMode-parameter");
+
+            skip |= validate_struct_type("VkSubpassDescriptionDepthStencilResolveKHR", "pDepthStencilResolveAttachment", "VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2_KHR", structure->pDepthStencilResolveAttachment, VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2_KHR, false, "VUID-VkSubpassDescriptionDepthStencilResolveKHR-pDepthStencilResolveAttachment-parameter", "VUID-VkAttachmentReference2KHR-sType-sType");
+
+            if (structure->pDepthStencilResolveAttachment != NULL)
+            {
+                skip |= validate_ranged_enum("VkSubpassDescriptionDepthStencilResolveKHR", "pDepthStencilResolveAttachment->layout", "VkImageLayout", AllVkImageLayoutEnums, structure->pDepthStencilResolveAttachment->layout, "VUID-VkAttachmentReference2KHR-layout-parameter");
+            }
+        } break;
+
+        // Validation code for VkPhysicalDeviceTimelineSemaphoreFeaturesKHR structure members
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES_KHR: {
+            VkPhysicalDeviceTimelineSemaphoreFeaturesKHR *structure = (VkPhysicalDeviceTimelineSemaphoreFeaturesKHR *) header;
+            skip |= validate_bool32("VkPhysicalDeviceTimelineSemaphoreFeaturesKHR", "timelineSemaphore", structure->timelineSemaphore);
+        } break;
+
+        // Validation code for VkSemaphoreTypeCreateInfoKHR structure members
+        case VK_STRUCTURE_TYPE_SEMAPHORE_TYPE_CREATE_INFO_KHR: {
+            VkSemaphoreTypeCreateInfoKHR *structure = (VkSemaphoreTypeCreateInfoKHR *) header;
+            skip |= validate_ranged_enum("VkSemaphoreTypeCreateInfoKHR", "semaphoreType", "VkSemaphoreTypeKHR", AllVkSemaphoreTypeKHREnums, structure->semaphoreType, "VUID-VkSemaphoreTypeCreateInfoKHR-semaphoreType-parameter");
+        } break;
+
+        // Validation code for VkPhysicalDeviceVulkanMemoryModelFeaturesKHR structure members
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES_KHR: {
+            VkPhysicalDeviceVulkanMemoryModelFeaturesKHR *structure = (VkPhysicalDeviceVulkanMemoryModelFeaturesKHR *) header;
+            skip |= validate_bool32("VkPhysicalDeviceVulkanMemoryModelFeaturesKHR", "vulkanMemoryModel", structure->vulkanMemoryModel);
+
+            skip |= validate_bool32("VkPhysicalDeviceVulkanMemoryModelFeaturesKHR", "vulkanMemoryModelDeviceScope", structure->vulkanMemoryModelDeviceScope);
+
+            skip |= validate_bool32("VkPhysicalDeviceVulkanMemoryModelFeaturesKHR", "vulkanMemoryModelAvailabilityVisibilityChains", structure->vulkanMemoryModelAvailabilityVisibilityChains);
+        } break;
+
+        // Validation code for VkSurfaceProtectedCapabilitiesKHR structure members
+        case VK_STRUCTURE_TYPE_SURFACE_PROTECTED_CAPABILITIES_KHR: {
+            VkSurfaceProtectedCapabilitiesKHR *structure = (VkSurfaceProtectedCapabilitiesKHR *) header;
+            skip |= validate_bool32("VkSurfaceProtectedCapabilitiesKHR", "supportsProtected", structure->supportsProtected);
+        } break;
+
+        // Validation code for VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR structure members
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES_KHR: {
+            VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR *structure = (VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR *) header;
+            skip |= validate_bool32("VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR", "separateDepthStencilLayouts", structure->separateDepthStencilLayouts);
+        } break;
+
+        // Validation code for VkAttachmentReferenceStencilLayoutKHR structure members
+        case VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_STENCIL_LAYOUT_KHR: {
+            VkAttachmentReferenceStencilLayoutKHR *structure = (VkAttachmentReferenceStencilLayoutKHR *) header;
+            skip |= validate_ranged_enum("VkAttachmentReferenceStencilLayoutKHR", "stencilLayout", "VkImageLayout", AllVkImageLayoutEnums, structure->stencilLayout, "VUID-VkAttachmentReferenceStencilLayoutKHR-stencilLayout-parameter");
+        } break;
+
+        // Validation code for VkAttachmentDescriptionStencilLayoutKHR structure members
+        case VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT_KHR: {
+            VkAttachmentDescriptionStencilLayoutKHR *structure = (VkAttachmentDescriptionStencilLayoutKHR *) header;
+            skip |= validate_ranged_enum("VkAttachmentDescriptionStencilLayoutKHR", "stencilInitialLayout", "VkImageLayout", AllVkImageLayoutEnums, structure->stencilInitialLayout, "VUID-VkAttachmentDescriptionStencilLayoutKHR-stencilInitialLayout-parameter");
+
+            skip |= validate_ranged_enum("VkAttachmentDescriptionStencilLayoutKHR", "stencilFinalLayout", "VkImageLayout", AllVkImageLayoutEnums, structure->stencilFinalLayout, "VUID-VkAttachmentDescriptionStencilLayoutKHR-stencilFinalLayout-parameter");
+        } break;
+
+        // Validation code for VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR structure members
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES_KHR: {
+            VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR *structure = (VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR *) header;
+            skip |= validate_bool32("VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR", "uniformBufferStandardLayout", structure->uniformBufferStandardLayout);
+        } break;
+
+        // Validation code for VkPhysicalDeviceBufferDeviceAddressFeaturesKHR structure members
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_KHR: {
+            VkPhysicalDeviceBufferDeviceAddressFeaturesKHR *structure = (VkPhysicalDeviceBufferDeviceAddressFeaturesKHR *) header;
+            skip |= validate_bool32("VkPhysicalDeviceBufferDeviceAddressFeaturesKHR", "bufferDeviceAddress", structure->bufferDeviceAddress);
+
+            skip |= validate_bool32("VkPhysicalDeviceBufferDeviceAddressFeaturesKHR", "bufferDeviceAddressCaptureReplay", structure->bufferDeviceAddressCaptureReplay);
+
+            skip |= validate_bool32("VkPhysicalDeviceBufferDeviceAddressFeaturesKHR", "bufferDeviceAddressMultiDevice", structure->bufferDeviceAddressMultiDevice);
+        } break;
+
+        // Validation code for VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR structure members
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_EXECUTABLE_PROPERTIES_FEATURES_KHR: {
+            VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR *structure = (VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR *) header;
+            skip |= validate_bool32("VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR", "pipelineExecutableInfo", structure->pipelineExecutableInfo);
+        } break;
+
+        // Validation code for VkDebugReportCallbackCreateInfoEXT structure members
+        case VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT: {
+            VkDebugReportCallbackCreateInfoEXT *structure = (VkDebugReportCallbackCreateInfoEXT *) header;
+            skip |= validate_flags("VkDebugReportCallbackCreateInfoEXT", "flags", "VkDebugReportFlagBitsEXT", AllVkDebugReportFlagBitsEXT, structure->flags, kOptionalFlags, "VUID-VkDebugReportCallbackCreateInfoEXT-flags-parameter");
+
+            skip |= validate_required_pointer("VkDebugReportCallbackCreateInfoEXT", "pfnCallback", reinterpret_cast<const void*>(structure->pfnCallback), "VUID-VkDebugReportCallbackCreateInfoEXT-pfnCallback-parameter");
+        } break;
+
+        // Validation code for VkPipelineRasterizationStateRasterizationOrderAMD structure members
+        case VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_RASTERIZATION_ORDER_AMD: {
+            VkPipelineRasterizationStateRasterizationOrderAMD *structure = (VkPipelineRasterizationStateRasterizationOrderAMD *) header;
+            skip |= validate_ranged_enum("VkPipelineRasterizationStateRasterizationOrderAMD", "rasterizationOrder", "VkRasterizationOrderAMD", AllVkRasterizationOrderAMDEnums, structure->rasterizationOrder, "VUID-VkPipelineRasterizationStateRasterizationOrderAMD-rasterizationOrder-parameter");
+        } break;
+
+        // Validation code for VkDedicatedAllocationImageCreateInfoNV structure members
+        case VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_IMAGE_CREATE_INFO_NV: {
+            VkDedicatedAllocationImageCreateInfoNV *structure = (VkDedicatedAllocationImageCreateInfoNV *) header;
+            skip |= validate_bool32("VkDedicatedAllocationImageCreateInfoNV", "dedicatedAllocation", structure->dedicatedAllocation);
+        } break;
+
+        // Validation code for VkDedicatedAllocationBufferCreateInfoNV structure members
+        case VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_BUFFER_CREATE_INFO_NV: {
+            VkDedicatedAllocationBufferCreateInfoNV *structure = (VkDedicatedAllocationBufferCreateInfoNV *) header;
+            skip |= validate_bool32("VkDedicatedAllocationBufferCreateInfoNV", "dedicatedAllocation", structure->dedicatedAllocation);
+        } break;
+
+        // Validation code for VkPhysicalDeviceTransformFeedbackFeaturesEXT structure members
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_FEATURES_EXT: {
+            VkPhysicalDeviceTransformFeedbackFeaturesEXT *structure = (VkPhysicalDeviceTransformFeedbackFeaturesEXT *) header;
+            skip |= validate_bool32("VkPhysicalDeviceTransformFeedbackFeaturesEXT", "transformFeedback", structure->transformFeedback);
+
+            skip |= validate_bool32("VkPhysicalDeviceTransformFeedbackFeaturesEXT", "geometryStreams", structure->geometryStreams);
+        } break;
+
+        // Validation code for VkPipelineRasterizationStateStreamCreateInfoEXT structure members
+        case VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_STREAM_CREATE_INFO_EXT: {
+            VkPipelineRasterizationStateStreamCreateInfoEXT *structure = (VkPipelineRasterizationStateStreamCreateInfoEXT *) header;
+            skip |= validate_reserved_flags("VkPipelineRasterizationStateStreamCreateInfoEXT", "flags", structure->flags, "VUID-VkPipelineRasterizationStateStreamCreateInfoEXT-flags-zerobitmask");
+        } break;
+
+        // Validation code for VkPhysicalDeviceCornerSampledImageFeaturesNV structure members
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CORNER_SAMPLED_IMAGE_FEATURES_NV: {
+            VkPhysicalDeviceCornerSampledImageFeaturesNV *structure = (VkPhysicalDeviceCornerSampledImageFeaturesNV *) header;
+            skip |= validate_bool32("VkPhysicalDeviceCornerSampledImageFeaturesNV", "cornerSampledImage", structure->cornerSampledImage);
+        } break;
+
+        // Validation code for VkExternalMemoryImageCreateInfoNV structure members
+        case VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_NV: {
+            VkExternalMemoryImageCreateInfoNV *structure = (VkExternalMemoryImageCreateInfoNV *) header;
+            skip |= validate_flags("VkExternalMemoryImageCreateInfoNV", "handleTypes", "VkExternalMemoryHandleTypeFlagBitsNV", AllVkExternalMemoryHandleTypeFlagBitsNV, structure->handleTypes, kOptionalFlags, "VUID-VkExternalMemoryImageCreateInfoNV-handleTypes-parameter");
+        } break;
+
+        // Validation code for VkExportMemoryAllocateInfoNV structure members
+        case VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_NV: {
+            VkExportMemoryAllocateInfoNV *structure = (VkExportMemoryAllocateInfoNV *) header;
+            skip |= validate_flags("VkExportMemoryAllocateInfoNV", "handleTypes", "VkExternalMemoryHandleTypeFlagBitsNV", AllVkExternalMemoryHandleTypeFlagBitsNV, structure->handleTypes, kOptionalFlags, "VUID-VkExportMemoryAllocateInfoNV-handleTypes-parameter");
+        } break;
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+        // Validation code for VkImportMemoryWin32HandleInfoNV structure members
+        case VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_NV: {
+            VkImportMemoryWin32HandleInfoNV *structure = (VkImportMemoryWin32HandleInfoNV *) header;
+            skip |= validate_flags("VkImportMemoryWin32HandleInfoNV", "handleType", "VkExternalMemoryHandleTypeFlagBitsNV", AllVkExternalMemoryHandleTypeFlagBitsNV, structure->handleType, kOptionalFlags, "VUID-VkImportMemoryWin32HandleInfoNV-handleType-parameter");
+        } break;
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+        // Validation code for VkWin32KeyedMutexAcquireReleaseInfoNV structure members
+        case VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_NV: {
+            VkWin32KeyedMutexAcquireReleaseInfoNV *structure = (VkWin32KeyedMutexAcquireReleaseInfoNV *) header;
+            skip |= validate_array("VkWin32KeyedMutexAcquireReleaseInfoNV", "acquireCount", "pAcquireSyncs", structure->acquireCount, &structure->pAcquireSyncs, false, true, kVUIDUndefined, "VUID-VkWin32KeyedMutexAcquireReleaseInfoNV-pAcquireSyncs-parameter");
+
+            skip |= validate_array("VkWin32KeyedMutexAcquireReleaseInfoNV", "acquireCount", "pAcquireKeys", structure->acquireCount, &structure->pAcquireKeys, false, true, kVUIDUndefined, "VUID-VkWin32KeyedMutexAcquireReleaseInfoNV-pAcquireKeys-parameter");
+
+            skip |= validate_array("VkWin32KeyedMutexAcquireReleaseInfoNV", "acquireCount", "pAcquireTimeoutMilliseconds", structure->acquireCount, &structure->pAcquireTimeoutMilliseconds, false, true, kVUIDUndefined, "VUID-VkWin32KeyedMutexAcquireReleaseInfoNV-pAcquireTimeoutMilliseconds-parameter");
+
+            skip |= validate_array("VkWin32KeyedMutexAcquireReleaseInfoNV", "releaseCount", "pReleaseSyncs", structure->releaseCount, &structure->pReleaseSyncs, false, true, kVUIDUndefined, "VUID-VkWin32KeyedMutexAcquireReleaseInfoNV-pReleaseSyncs-parameter");
+
+            skip |= validate_array("VkWin32KeyedMutexAcquireReleaseInfoNV", "releaseCount", "pReleaseKeys", structure->releaseCount, &structure->pReleaseKeys, false, true, kVUIDUndefined, "VUID-VkWin32KeyedMutexAcquireReleaseInfoNV-pReleaseKeys-parameter");
+        } break;
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+        // Validation code for VkValidationFlagsEXT structure members
+        case VK_STRUCTURE_TYPE_VALIDATION_FLAGS_EXT: {
+            VkValidationFlagsEXT *structure = (VkValidationFlagsEXT *) header;
+            skip |= validate_ranged_enum_array("VkValidationFlagsEXT", "disabledValidationCheckCount", "pDisabledValidationChecks", "VkValidationCheckEXT", AllVkValidationCheckEXTEnums, structure->disabledValidationCheckCount, structure->pDisabledValidationChecks, true, true);
+        } break;
+
+        // Validation code for VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT structure members
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXTURE_COMPRESSION_ASTC_HDR_FEATURES_EXT: {
+            VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT *structure = (VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT *) header;
+            skip |= validate_bool32("VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT", "textureCompressionASTC_HDR", structure->textureCompressionASTC_HDR);
+        } break;
+
+        // Validation code for VkImageViewASTCDecodeModeEXT structure members
+        case VK_STRUCTURE_TYPE_IMAGE_VIEW_ASTC_DECODE_MODE_EXT: {
+            VkImageViewASTCDecodeModeEXT *structure = (VkImageViewASTCDecodeModeEXT *) header;
+            skip |= validate_ranged_enum("VkImageViewASTCDecodeModeEXT", "decodeMode", "VkFormat", AllVkFormatEnums, structure->decodeMode, "VUID-VkImageViewASTCDecodeModeEXT-decodeMode-parameter");
+        } break;
+
+        // Validation code for VkPhysicalDeviceASTCDecodeFeaturesEXT structure members
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ASTC_DECODE_FEATURES_EXT: {
+            VkPhysicalDeviceASTCDecodeFeaturesEXT *structure = (VkPhysicalDeviceASTCDecodeFeaturesEXT *) header;
+            skip |= validate_bool32("VkPhysicalDeviceASTCDecodeFeaturesEXT", "decodeModeSharedExponent", structure->decodeModeSharedExponent);
+        } break;
+
+        // Validation code for VkPhysicalDeviceConditionalRenderingFeaturesEXT structure members
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONDITIONAL_RENDERING_FEATURES_EXT: {
+            VkPhysicalDeviceConditionalRenderingFeaturesEXT *structure = (VkPhysicalDeviceConditionalRenderingFeaturesEXT *) header;
+            skip |= validate_bool32("VkPhysicalDeviceConditionalRenderingFeaturesEXT", "conditionalRendering", structure->conditionalRendering);
+
+            skip |= validate_bool32("VkPhysicalDeviceConditionalRenderingFeaturesEXT", "inheritedConditionalRendering", structure->inheritedConditionalRendering);
+        } break;
+
+        // Validation code for VkCommandBufferInheritanceConditionalRenderingInfoEXT structure members
+        case VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_CONDITIONAL_RENDERING_INFO_EXT: {
+            VkCommandBufferInheritanceConditionalRenderingInfoEXT *structure = (VkCommandBufferInheritanceConditionalRenderingInfoEXT *) header;
+            skip |= validate_bool32("VkCommandBufferInheritanceConditionalRenderingInfoEXT", "conditionalRenderingEnable", structure->conditionalRenderingEnable);
+        } break;
+
+        // Validation code for VkPipelineViewportWScalingStateCreateInfoNV structure members
+        case VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_W_SCALING_STATE_CREATE_INFO_NV: {
+            VkPipelineViewportWScalingStateCreateInfoNV *structure = (VkPipelineViewportWScalingStateCreateInfoNV *) header;
+            skip |= validate_bool32("VkPipelineViewportWScalingStateCreateInfoNV", "viewportWScalingEnable", structure->viewportWScalingEnable);
+        } break;
+
+        // Validation code for VkSwapchainCounterCreateInfoEXT structure members
+        case VK_STRUCTURE_TYPE_SWAPCHAIN_COUNTER_CREATE_INFO_EXT: {
+            VkSwapchainCounterCreateInfoEXT *structure = (VkSwapchainCounterCreateInfoEXT *) header;
+            skip |= validate_flags("VkSwapchainCounterCreateInfoEXT", "surfaceCounters", "VkSurfaceCounterFlagBitsEXT", AllVkSurfaceCounterFlagBitsEXT, structure->surfaceCounters, kOptionalFlags, "VUID-VkSwapchainCounterCreateInfoEXT-surfaceCounters-parameter");
+        } break;
+
+        // Validation code for VkPresentTimesInfoGOOGLE structure members
+        case VK_STRUCTURE_TYPE_PRESENT_TIMES_INFO_GOOGLE: {
+            VkPresentTimesInfoGOOGLE *structure = (VkPresentTimesInfoGOOGLE *) header;
+            skip |= validate_array("VkPresentTimesInfoGOOGLE", "swapchainCount", "pTimes", structure->swapchainCount, &structure->pTimes, true, false, "VUID-VkPresentTimesInfoGOOGLE-swapchainCount-arraylength", "VUID-VkPresentTimesInfoGOOGLE-pTimes-parameter");
+
+            if (structure->pTimes != NULL)
+            {
+                for (uint32_t swapchainIndex = 0; swapchainIndex < structure->swapchainCount; ++swapchainIndex)
+                {
+                }
+            }
+        } break;
+
+        // Validation code for VkPipelineViewportSwizzleStateCreateInfoNV structure members
+        case VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SWIZZLE_STATE_CREATE_INFO_NV: {
+            VkPipelineViewportSwizzleStateCreateInfoNV *structure = (VkPipelineViewportSwizzleStateCreateInfoNV *) header;
+            skip |= validate_reserved_flags("VkPipelineViewportSwizzleStateCreateInfoNV", "flags", structure->flags, "VUID-VkPipelineViewportSwizzleStateCreateInfoNV-flags-zerobitmask");
+
+            skip |= validate_array("VkPipelineViewportSwizzleStateCreateInfoNV", "viewportCount", "pViewportSwizzles", structure->viewportCount, &structure->pViewportSwizzles, true, true, "VUID-VkPipelineViewportSwizzleStateCreateInfoNV-viewportCount-arraylength", "VUID-VkPipelineViewportSwizzleStateCreateInfoNV-pViewportSwizzles-parameter");
+
+            if (structure->pViewportSwizzles != NULL)
+            {
+                for (uint32_t viewportIndex = 0; viewportIndex < structure->viewportCount; ++viewportIndex)
+                {
+                    skip |= validate_ranged_enum("VkPipelineViewportSwizzleStateCreateInfoNV", ParameterName("pViewportSwizzles[%i].x", ParameterName::IndexVector{ viewportIndex }), "VkViewportCoordinateSwizzleNV", AllVkViewportCoordinateSwizzleNVEnums, structure->pViewportSwizzles[viewportIndex].x, "VUID-VkViewportSwizzleNV-x-parameter");
+
+                    skip |= validate_ranged_enum("VkPipelineViewportSwizzleStateCreateInfoNV", ParameterName("pViewportSwizzles[%i].y", ParameterName::IndexVector{ viewportIndex }), "VkViewportCoordinateSwizzleNV", AllVkViewportCoordinateSwizzleNVEnums, structure->pViewportSwizzles[viewportIndex].y, "VUID-VkViewportSwizzleNV-y-parameter");
+
+                    skip |= validate_ranged_enum("VkPipelineViewportSwizzleStateCreateInfoNV", ParameterName("pViewportSwizzles[%i].z", ParameterName::IndexVector{ viewportIndex }), "VkViewportCoordinateSwizzleNV", AllVkViewportCoordinateSwizzleNVEnums, structure->pViewportSwizzles[viewportIndex].z, "VUID-VkViewportSwizzleNV-z-parameter");
+
+                    skip |= validate_ranged_enum("VkPipelineViewportSwizzleStateCreateInfoNV", ParameterName("pViewportSwizzles[%i].w", ParameterName::IndexVector{ viewportIndex }), "VkViewportCoordinateSwizzleNV", AllVkViewportCoordinateSwizzleNVEnums, structure->pViewportSwizzles[viewportIndex].w, "VUID-VkViewportSwizzleNV-w-parameter");
+                }
+            }
+        } break;
+
+        // Validation code for VkPipelineDiscardRectangleStateCreateInfoEXT structure members
+        case VK_STRUCTURE_TYPE_PIPELINE_DISCARD_RECTANGLE_STATE_CREATE_INFO_EXT: {
+            VkPipelineDiscardRectangleStateCreateInfoEXT *structure = (VkPipelineDiscardRectangleStateCreateInfoEXT *) header;
+            skip |= validate_reserved_flags("VkPipelineDiscardRectangleStateCreateInfoEXT", "flags", structure->flags, "VUID-VkPipelineDiscardRectangleStateCreateInfoEXT-flags-zerobitmask");
+
+            skip |= validate_ranged_enum("VkPipelineDiscardRectangleStateCreateInfoEXT", "discardRectangleMode", "VkDiscardRectangleModeEXT", AllVkDiscardRectangleModeEXTEnums, structure->discardRectangleMode, "VUID-VkPipelineDiscardRectangleStateCreateInfoEXT-discardRectangleMode-parameter");
+        } break;
+
+        // Validation code for VkPipelineRasterizationConservativeStateCreateInfoEXT structure members
+        case VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_CONSERVATIVE_STATE_CREATE_INFO_EXT: {
+            VkPipelineRasterizationConservativeStateCreateInfoEXT *structure = (VkPipelineRasterizationConservativeStateCreateInfoEXT *) header;
+            skip |= validate_reserved_flags("VkPipelineRasterizationConservativeStateCreateInfoEXT", "flags", structure->flags, "VUID-VkPipelineRasterizationConservativeStateCreateInfoEXT-flags-zerobitmask");
+
+            skip |= validate_ranged_enum("VkPipelineRasterizationConservativeStateCreateInfoEXT", "conservativeRasterizationMode", "VkConservativeRasterizationModeEXT", AllVkConservativeRasterizationModeEXTEnums, structure->conservativeRasterizationMode, "VUID-VkPipelineRasterizationConservativeStateCreateInfoEXT-conservativeRasterizationMode-parameter");
+        } break;
+
+        // Validation code for VkPhysicalDeviceDepthClipEnableFeaturesEXT structure members
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLIP_ENABLE_FEATURES_EXT: {
+            VkPhysicalDeviceDepthClipEnableFeaturesEXT *structure = (VkPhysicalDeviceDepthClipEnableFeaturesEXT *) header;
+            skip |= validate_bool32("VkPhysicalDeviceDepthClipEnableFeaturesEXT", "depthClipEnable", structure->depthClipEnable);
+        } break;
+
+        // Validation code for VkPipelineRasterizationDepthClipStateCreateInfoEXT structure members
+        case VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_DEPTH_CLIP_STATE_CREATE_INFO_EXT: {
+            VkPipelineRasterizationDepthClipStateCreateInfoEXT *structure = (VkPipelineRasterizationDepthClipStateCreateInfoEXT *) header;
+            skip |= validate_reserved_flags("VkPipelineRasterizationDepthClipStateCreateInfoEXT", "flags", structure->flags, "VUID-VkPipelineRasterizationDepthClipStateCreateInfoEXT-flags-zerobitmask");
+
+            skip |= validate_bool32("VkPipelineRasterizationDepthClipStateCreateInfoEXT", "depthClipEnable", structure->depthClipEnable);
+        } break;
+
+        // Validation code for VkDebugUtilsMessengerCreateInfoEXT structure members
+        case VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT: {
+            VkDebugUtilsMessengerCreateInfoEXT *structure = (VkDebugUtilsMessengerCreateInfoEXT *) header;
+            skip |= validate_reserved_flags("VkDebugUtilsMessengerCreateInfoEXT", "flags", structure->flags, "VUID-VkDebugUtilsMessengerCreateInfoEXT-flags-zerobitmask");
+
+            skip |= validate_flags("VkDebugUtilsMessengerCreateInfoEXT", "messageSeverity", "VkDebugUtilsMessageSeverityFlagBitsEXT", AllVkDebugUtilsMessageSeverityFlagBitsEXT, structure->messageSeverity, kRequiredFlags, "VUID-VkDebugUtilsMessengerCreateInfoEXT-messageSeverity-parameter", "VUID-VkDebugUtilsMessengerCreateInfoEXT-messageSeverity-requiredbitmask");
+
+            skip |= validate_flags("VkDebugUtilsMessengerCreateInfoEXT", "messageType", "VkDebugUtilsMessageTypeFlagBitsEXT", AllVkDebugUtilsMessageTypeFlagBitsEXT, structure->messageType, kRequiredFlags, "VUID-VkDebugUtilsMessengerCreateInfoEXT-messageType-parameter", "VUID-VkDebugUtilsMessengerCreateInfoEXT-messageType-requiredbitmask");
+
+            skip |= validate_required_pointer("VkDebugUtilsMessengerCreateInfoEXT", "pfnUserCallback", reinterpret_cast<const void*>(structure->pfnUserCallback), "VUID-VkDebugUtilsMessengerCreateInfoEXT-pfnUserCallback-parameter");
+        } break;
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+        // Validation code for VkImportAndroidHardwareBufferInfoANDROID structure members
+        case VK_STRUCTURE_TYPE_IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID: {
+            VkImportAndroidHardwareBufferInfoANDROID *structure = (VkImportAndroidHardwareBufferInfoANDROID *) header;
+            skip |= validate_required_pointer("VkImportAndroidHardwareBufferInfoANDROID", "buffer", structure->buffer, "VUID-VkImportAndroidHardwareBufferInfoANDROID-buffer-parameter");
+        } break;
+#endif // VK_USE_PLATFORM_ANDROID_KHR
+
+        // Validation code for VkSamplerReductionModeCreateInfoEXT structure members
+        case VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO_EXT: {
+            VkSamplerReductionModeCreateInfoEXT *structure = (VkSamplerReductionModeCreateInfoEXT *) header;
+            skip |= validate_ranged_enum("VkSamplerReductionModeCreateInfoEXT", "reductionMode", "VkSamplerReductionModeEXT", AllVkSamplerReductionModeEXTEnums, structure->reductionMode, "VUID-VkSamplerReductionModeCreateInfoEXT-reductionMode-parameter");
+        } break;
+
+        // Validation code for VkPhysicalDeviceInlineUniformBlockFeaturesEXT structure members
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES_EXT: {
+            VkPhysicalDeviceInlineUniformBlockFeaturesEXT *structure = (VkPhysicalDeviceInlineUniformBlockFeaturesEXT *) header;
+            skip |= validate_bool32("VkPhysicalDeviceInlineUniformBlockFeaturesEXT", "inlineUniformBlock", structure->inlineUniformBlock);
+
+            skip |= validate_bool32("VkPhysicalDeviceInlineUniformBlockFeaturesEXT", "descriptorBindingInlineUniformBlockUpdateAfterBind", structure->descriptorBindingInlineUniformBlockUpdateAfterBind);
+        } break;
+
+        // Validation code for VkWriteDescriptorSetInlineUniformBlockEXT structure members
+        case VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK_EXT: {
+            VkWriteDescriptorSetInlineUniformBlockEXT *structure = (VkWriteDescriptorSetInlineUniformBlockEXT *) header;
+            skip |= validate_array("VkWriteDescriptorSetInlineUniformBlockEXT", "dataSize", "pData", structure->dataSize, &structure->pData, true, true, "VUID-VkWriteDescriptorSetInlineUniformBlockEXT-dataSize-arraylength", "VUID-VkWriteDescriptorSetInlineUniformBlockEXT-pData-parameter");
+        } break;
+
+        // Validation code for VkSampleLocationsInfoEXT structure members
+        case VK_STRUCTURE_TYPE_SAMPLE_LOCATIONS_INFO_EXT: {
+            VkSampleLocationsInfoEXT *structure = (VkSampleLocationsInfoEXT *) header;
+            skip |= validate_flags("VkSampleLocationsInfoEXT", "sampleLocationsPerPixel", "VkSampleCountFlagBits", AllVkSampleCountFlagBits, structure->sampleLocationsPerPixel, kOptionalSingleBit, "VUID-VkSampleLocationsInfoEXT-sampleLocationsPerPixel-parameter");
+
+            skip |= validate_array("VkSampleLocationsInfoEXT", "sampleLocationsCount", "pSampleLocations", structure->sampleLocationsCount, &structure->pSampleLocations, false, true, kVUIDUndefined, "VUID-VkSampleLocationsInfoEXT-pSampleLocations-parameter");
+
+            if (structure->pSampleLocations != NULL)
+            {
+                for (uint32_t sampleLocationsIndex = 0; sampleLocationsIndex < structure->sampleLocationsCount; ++sampleLocationsIndex)
+                {
+                }
+            }
+        } break;
+
+        // Validation code for VkRenderPassSampleLocationsBeginInfoEXT structure members
+        case VK_STRUCTURE_TYPE_RENDER_PASS_SAMPLE_LOCATIONS_BEGIN_INFO_EXT: {
+            VkRenderPassSampleLocationsBeginInfoEXT *structure = (VkRenderPassSampleLocationsBeginInfoEXT *) header;
+            skip |= validate_array("VkRenderPassSampleLocationsBeginInfoEXT", "attachmentInitialSampleLocationsCount", "pAttachmentInitialSampleLocations", structure->attachmentInitialSampleLocationsCount, &structure->pAttachmentInitialSampleLocations, false, true, kVUIDUndefined, "VUID-VkRenderPassSampleLocationsBeginInfoEXT-pAttachmentInitialSampleLocations-parameter");
+
+            if (structure->pAttachmentInitialSampleLocations != NULL)
+            {
+                for (uint32_t attachmentInitialSampleLocationsIndex = 0; attachmentInitialSampleLocationsIndex < structure->attachmentInitialSampleLocationsCount; ++attachmentInitialSampleLocationsIndex)
+                {
+                    skip |= validate_struct_type("VkRenderPassSampleLocationsBeginInfoEXT", ParameterName("pAttachmentInitialSampleLocations[%i].sampleLocationsInfo", ParameterName::IndexVector{ attachmentInitialSampleLocationsIndex }), "VK_STRUCTURE_TYPE_SAMPLE_LOCATIONS_INFO_EXT", &(structure->pAttachmentInitialSampleLocations[attachmentInitialSampleLocationsIndex].sampleLocationsInfo), VK_STRUCTURE_TYPE_SAMPLE_LOCATIONS_INFO_EXT, false, kVUIDUndefined, "VUID-VkSampleLocationsInfoEXT-sType-sType");
+
+                    skip |= validate_flags("VkRenderPassSampleLocationsBeginInfoEXT", ParameterName("pAttachmentInitialSampleLocations[%i].sampleLocationsInfo.sampleLocationsPerPixel", ParameterName::IndexVector{ attachmentInitialSampleLocationsIndex }), "VkSampleCountFlagBits", AllVkSampleCountFlagBits, structure->pAttachmentInitialSampleLocations[attachmentInitialSampleLocationsIndex].sampleLocationsInfo.sampleLocationsPerPixel, kOptionalSingleBit, "VUID-VkSampleLocationsInfoEXT-sampleLocationsPerPixel-parameter");
+
+                    skip |= validate_array("VkRenderPassSampleLocationsBeginInfoEXT", ParameterName("pAttachmentInitialSampleLocations[%i].sampleLocationsInfo.sampleLocationsCount", ParameterName::IndexVector{ attachmentInitialSampleLocationsIndex }), ParameterName("pAttachmentInitialSampleLocations[%i].sampleLocationsInfo.pSampleLocations", ParameterName::IndexVector{ attachmentInitialSampleLocationsIndex }), structure->pAttachmentInitialSampleLocations[attachmentInitialSampleLocationsIndex].sampleLocationsInfo.sampleLocationsCount, &structure->pAttachmentInitialSampleLocations[attachmentInitialSampleLocationsIndex].sampleLocationsInfo.pSampleLocations, false, true, kVUIDUndefined, "VUID-VkSampleLocationsInfoEXT-pSampleLocations-parameter");
+
+                    if (structure->pAttachmentInitialSampleLocations[attachmentInitialSampleLocationsIndex].sampleLocationsInfo.pSampleLocations != NULL)
+                    {
+                        for (uint32_t sampleLocationsIndex = 0; sampleLocationsIndex < structure->pAttachmentInitialSampleLocations[attachmentInitialSampleLocationsIndex].sampleLocationsInfo.sampleLocationsCount; ++sampleLocationsIndex)
+                        {
+                        }
+                    }
+                }
+            }
+
+            skip |= validate_array("VkRenderPassSampleLocationsBeginInfoEXT", "postSubpassSampleLocationsCount", "pPostSubpassSampleLocations", structure->postSubpassSampleLocationsCount, &structure->pPostSubpassSampleLocations, false, true, kVUIDUndefined, "VUID-VkRenderPassSampleLocationsBeginInfoEXT-pPostSubpassSampleLocations-parameter");
+
+            if (structure->pPostSubpassSampleLocations != NULL)
+            {
+                for (uint32_t postSubpassSampleLocationsIndex = 0; postSubpassSampleLocationsIndex < structure->postSubpassSampleLocationsCount; ++postSubpassSampleLocationsIndex)
+                {
+                    skip |= validate_struct_type("VkRenderPassSampleLocationsBeginInfoEXT", ParameterName("pPostSubpassSampleLocations[%i].sampleLocationsInfo", ParameterName::IndexVector{ postSubpassSampleLocationsIndex }), "VK_STRUCTURE_TYPE_SAMPLE_LOCATIONS_INFO_EXT", &(structure->pPostSubpassSampleLocations[postSubpassSampleLocationsIndex].sampleLocationsInfo), VK_STRUCTURE_TYPE_SAMPLE_LOCATIONS_INFO_EXT, false, kVUIDUndefined, "VUID-VkSampleLocationsInfoEXT-sType-sType");
+
+                    skip |= validate_flags("VkRenderPassSampleLocationsBeginInfoEXT", ParameterName("pPostSubpassSampleLocations[%i].sampleLocationsInfo.sampleLocationsPerPixel", ParameterName::IndexVector{ postSubpassSampleLocationsIndex }), "VkSampleCountFlagBits", AllVkSampleCountFlagBits, structure->pPostSubpassSampleLocations[postSubpassSampleLocationsIndex].sampleLocationsInfo.sampleLocationsPerPixel, kOptionalSingleBit, "VUID-VkSampleLocationsInfoEXT-sampleLocationsPerPixel-parameter");
+
+                    skip |= validate_array("VkRenderPassSampleLocationsBeginInfoEXT", ParameterName("pPostSubpassSampleLocations[%i].sampleLocationsInfo.sampleLocationsCount", ParameterName::IndexVector{ postSubpassSampleLocationsIndex }), ParameterName("pPostSubpassSampleLocations[%i].sampleLocationsInfo.pSampleLocations", ParameterName::IndexVector{ postSubpassSampleLocationsIndex }), structure->pPostSubpassSampleLocations[postSubpassSampleLocationsIndex].sampleLocationsInfo.sampleLocationsCount, &structure->pPostSubpassSampleLocations[postSubpassSampleLocationsIndex].sampleLocationsInfo.pSampleLocations, false, true, kVUIDUndefined, "VUID-VkSampleLocationsInfoEXT-pSampleLocations-parameter");
+
+                    if (structure->pPostSubpassSampleLocations[postSubpassSampleLocationsIndex].sampleLocationsInfo.pSampleLocations != NULL)
+                    {
+                        for (uint32_t sampleLocationsIndex = 0; sampleLocationsIndex < structure->pPostSubpassSampleLocations[postSubpassSampleLocationsIndex].sampleLocationsInfo.sampleLocationsCount; ++sampleLocationsIndex)
+                        {
+                        }
+                    }
+                }
+            }
+        } break;
+
+        // Validation code for VkPipelineSampleLocationsStateCreateInfoEXT structure members
+        case VK_STRUCTURE_TYPE_PIPELINE_SAMPLE_LOCATIONS_STATE_CREATE_INFO_EXT: {
+            VkPipelineSampleLocationsStateCreateInfoEXT *structure = (VkPipelineSampleLocationsStateCreateInfoEXT *) header;
+            skip |= validate_bool32("VkPipelineSampleLocationsStateCreateInfoEXT", "sampleLocationsEnable", structure->sampleLocationsEnable);
+
+            skip |= validate_struct_type("VkPipelineSampleLocationsStateCreateInfoEXT", "sampleLocationsInfo", "VK_STRUCTURE_TYPE_SAMPLE_LOCATIONS_INFO_EXT", &(structure->sampleLocationsInfo), VK_STRUCTURE_TYPE_SAMPLE_LOCATIONS_INFO_EXT, false, kVUIDUndefined, "VUID-VkSampleLocationsInfoEXT-sType-sType");
+
+            skip |= validate_flags("VkPipelineSampleLocationsStateCreateInfoEXT", "sampleLocationsInfo.sampleLocationsPerPixel", "VkSampleCountFlagBits", AllVkSampleCountFlagBits, structure->sampleLocationsInfo.sampleLocationsPerPixel, kOptionalSingleBit, "VUID-VkSampleLocationsInfoEXT-sampleLocationsPerPixel-parameter");
+
+            skip |= validate_array("VkPipelineSampleLocationsStateCreateInfoEXT", "sampleLocationsInfo.sampleLocationsCount", "sampleLocationsInfo.pSampleLocations", structure->sampleLocationsInfo.sampleLocationsCount, &structure->sampleLocationsInfo.pSampleLocations, false, true, kVUIDUndefined, "VUID-VkSampleLocationsInfoEXT-pSampleLocations-parameter");
+
+            if (structure->sampleLocationsInfo.pSampleLocations != NULL)
+            {
+                for (uint32_t sampleLocationsIndex = 0; sampleLocationsIndex < structure->sampleLocationsInfo.sampleLocationsCount; ++sampleLocationsIndex)
+                {
+                }
+            }
+        } break;
+
+        // Validation code for VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT structure members
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_FEATURES_EXT: {
+            VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT *structure = (VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT *) header;
+            skip |= validate_bool32("VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT", "advancedBlendCoherentOperations", structure->advancedBlendCoherentOperations);
+        } break;
+
+        // Validation code for VkPipelineColorBlendAdvancedStateCreateInfoEXT structure members
+        case VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_ADVANCED_STATE_CREATE_INFO_EXT: {
+            VkPipelineColorBlendAdvancedStateCreateInfoEXT *structure = (VkPipelineColorBlendAdvancedStateCreateInfoEXT *) header;
+            skip |= validate_bool32("VkPipelineColorBlendAdvancedStateCreateInfoEXT", "srcPremultiplied", structure->srcPremultiplied);
+
+            skip |= validate_bool32("VkPipelineColorBlendAdvancedStateCreateInfoEXT", "dstPremultiplied", structure->dstPremultiplied);
+
+            skip |= validate_ranged_enum("VkPipelineColorBlendAdvancedStateCreateInfoEXT", "blendOverlap", "VkBlendOverlapEXT", AllVkBlendOverlapEXTEnums, structure->blendOverlap, "VUID-VkPipelineColorBlendAdvancedStateCreateInfoEXT-blendOverlap-parameter");
+        } break;
+
+        // Validation code for VkPipelineCoverageToColorStateCreateInfoNV structure members
+        case VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_TO_COLOR_STATE_CREATE_INFO_NV: {
+            VkPipelineCoverageToColorStateCreateInfoNV *structure = (VkPipelineCoverageToColorStateCreateInfoNV *) header;
+            skip |= validate_reserved_flags("VkPipelineCoverageToColorStateCreateInfoNV", "flags", structure->flags, "VUID-VkPipelineCoverageToColorStateCreateInfoNV-flags-zerobitmask");
+
+            skip |= validate_bool32("VkPipelineCoverageToColorStateCreateInfoNV", "coverageToColorEnable", structure->coverageToColorEnable);
+        } break;
+
+        // Validation code for VkPipelineCoverageModulationStateCreateInfoNV structure members
+        case VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_MODULATION_STATE_CREATE_INFO_NV: {
+            VkPipelineCoverageModulationStateCreateInfoNV *structure = (VkPipelineCoverageModulationStateCreateInfoNV *) header;
+            skip |= validate_reserved_flags("VkPipelineCoverageModulationStateCreateInfoNV", "flags", structure->flags, "VUID-VkPipelineCoverageModulationStateCreateInfoNV-flags-zerobitmask");
+
+            skip |= validate_ranged_enum("VkPipelineCoverageModulationStateCreateInfoNV", "coverageModulationMode", "VkCoverageModulationModeNV", AllVkCoverageModulationModeNVEnums, structure->coverageModulationMode, "VUID-VkPipelineCoverageModulationStateCreateInfoNV-coverageModulationMode-parameter");
+
+            skip |= validate_bool32("VkPipelineCoverageModulationStateCreateInfoNV", "coverageModulationTableEnable", structure->coverageModulationTableEnable);
+        } break;
+
+        // Validation code for VkPhysicalDeviceShaderSMBuiltinsFeaturesNV structure members
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_FEATURES_NV: {
+            VkPhysicalDeviceShaderSMBuiltinsFeaturesNV *structure = (VkPhysicalDeviceShaderSMBuiltinsFeaturesNV *) header;
+            skip |= validate_bool32("VkPhysicalDeviceShaderSMBuiltinsFeaturesNV", "shaderSMBuiltins", structure->shaderSMBuiltins);
+        } break;
+
+        // Validation code for VkPhysicalDeviceImageDrmFormatModifierInfoEXT structure members
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_DRM_FORMAT_MODIFIER_INFO_EXT: {
+            VkPhysicalDeviceImageDrmFormatModifierInfoEXT *structure = (VkPhysicalDeviceImageDrmFormatModifierInfoEXT *) header;
+            skip |= validate_ranged_enum("VkPhysicalDeviceImageDrmFormatModifierInfoEXT", "sharingMode", "VkSharingMode", AllVkSharingModeEnums, structure->sharingMode, "VUID-VkPhysicalDeviceImageDrmFormatModifierInfoEXT-sharingMode-parameter");
+        } break;
+
+        // Validation code for VkImageDrmFormatModifierListCreateInfoEXT structure members
+        case VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_LIST_CREATE_INFO_EXT: {
+            VkImageDrmFormatModifierListCreateInfoEXT *structure = (VkImageDrmFormatModifierListCreateInfoEXT *) header;
+            skip |= validate_array("VkImageDrmFormatModifierListCreateInfoEXT", "drmFormatModifierCount", "pDrmFormatModifiers", structure->drmFormatModifierCount, &structure->pDrmFormatModifiers, true, true, "VUID-VkImageDrmFormatModifierListCreateInfoEXT-drmFormatModifierCount-arraylength", "VUID-VkImageDrmFormatModifierListCreateInfoEXT-pDrmFormatModifiers-parameter");
+        } break;
+
+        // Validation code for VkImageDrmFormatModifierExplicitCreateInfoEXT structure members
+        case VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_EXPLICIT_CREATE_INFO_EXT: {
+            VkImageDrmFormatModifierExplicitCreateInfoEXT *structure = (VkImageDrmFormatModifierExplicitCreateInfoEXT *) header;
+            skip |= validate_array("VkImageDrmFormatModifierExplicitCreateInfoEXT", "drmFormatModifierPlaneCount", "pPlaneLayouts", structure->drmFormatModifierPlaneCount, &structure->pPlaneLayouts, true, true, kVUIDUndefined, "VUID-VkImageDrmFormatModifierExplicitCreateInfoEXT-pPlaneLayouts-parameter");
+
+            if (structure->pPlaneLayouts != NULL)
+            {
+                for (uint32_t drmFormatModifierPlaneIndex = 0; drmFormatModifierPlaneIndex < structure->drmFormatModifierPlaneCount; ++drmFormatModifierPlaneIndex)
+                {
+                }
+            }
+        } break;
+
+        // Validation code for VkShaderModuleValidationCacheCreateInfoEXT structure members
+        case VK_STRUCTURE_TYPE_SHADER_MODULE_VALIDATION_CACHE_CREATE_INFO_EXT: {
+            VkShaderModuleValidationCacheCreateInfoEXT *structure = (VkShaderModuleValidationCacheCreateInfoEXT *) header;
+            skip |= validate_required_handle("VkShaderModuleValidationCacheCreateInfoEXT", "validationCache", structure->validationCache);
+        } break;
+
+        // Validation code for VkDescriptorSetLayoutBindingFlagsCreateInfoEXT structure members
+        case VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT: {
+            VkDescriptorSetLayoutBindingFlagsCreateInfoEXT *structure = (VkDescriptorSetLayoutBindingFlagsCreateInfoEXT *) header;
+            skip |= validate_flags_array("VkDescriptorSetLayoutBindingFlagsCreateInfoEXT", "bindingCount", "pBindingFlags", "VkDescriptorBindingFlagBitsEXT", AllVkDescriptorBindingFlagBitsEXT, structure->bindingCount, structure->pBindingFlags, false, false);
+        } break;
+
+        // Validation code for VkPhysicalDeviceDescriptorIndexingFeaturesEXT structure members
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES_EXT: {
+            VkPhysicalDeviceDescriptorIndexingFeaturesEXT *structure = (VkPhysicalDeviceDescriptorIndexingFeaturesEXT *) header;
+            skip |= validate_bool32("VkPhysicalDeviceDescriptorIndexingFeaturesEXT", "shaderInputAttachmentArrayDynamicIndexing", structure->shaderInputAttachmentArrayDynamicIndexing);
+
+            skip |= validate_bool32("VkPhysicalDeviceDescriptorIndexingFeaturesEXT", "shaderUniformTexelBufferArrayDynamicIndexing", structure->shaderUniformTexelBufferArrayDynamicIndexing);
+
+            skip |= validate_bool32("VkPhysicalDeviceDescriptorIndexingFeaturesEXT", "shaderStorageTexelBufferArrayDynamicIndexing", structure->shaderStorageTexelBufferArrayDynamicIndexing);
+
+            skip |= validate_bool32("VkPhysicalDeviceDescriptorIndexingFeaturesEXT", "shaderUniformBufferArrayNonUniformIndexing", structure->shaderUniformBufferArrayNonUniformIndexing);
+
+            skip |= validate_bool32("VkPhysicalDeviceDescriptorIndexingFeaturesEXT", "shaderSampledImageArrayNonUniformIndexing", structure->shaderSampledImageArrayNonUniformIndexing);
+
+            skip |= validate_bool32("VkPhysicalDeviceDescriptorIndexingFeaturesEXT", "shaderStorageBufferArrayNonUniformIndexing", structure->shaderStorageBufferArrayNonUniformIndexing);
+
+            skip |= validate_bool32("VkPhysicalDeviceDescriptorIndexingFeaturesEXT", "shaderStorageImageArrayNonUniformIndexing", structure->shaderStorageImageArrayNonUniformIndexing);
+
+            skip |= validate_bool32("VkPhysicalDeviceDescriptorIndexingFeaturesEXT", "shaderInputAttachmentArrayNonUniformIndexing", structure->shaderInputAttachmentArrayNonUniformIndexing);
+
+            skip |= validate_bool32("VkPhysicalDeviceDescriptorIndexingFeaturesEXT", "shaderUniformTexelBufferArrayNonUniformIndexing", structure->shaderUniformTexelBufferArrayNonUniformIndexing);
+
+            skip |= validate_bool32("VkPhysicalDeviceDescriptorIndexingFeaturesEXT", "shaderStorageTexelBufferArrayNonUniformIndexing", structure->shaderStorageTexelBufferArrayNonUniformIndexing);
+
+            skip |= validate_bool32("VkPhysicalDeviceDescriptorIndexingFeaturesEXT", "descriptorBindingUniformBufferUpdateAfterBind", structure->descriptorBindingUniformBufferUpdateAfterBind);
+
+            skip |= validate_bool32("VkPhysicalDeviceDescriptorIndexingFeaturesEXT", "descriptorBindingSampledImageUpdateAfterBind", structure->descriptorBindingSampledImageUpdateAfterBind);
+
+            skip |= validate_bool32("VkPhysicalDeviceDescriptorIndexingFeaturesEXT", "descriptorBindingStorageImageUpdateAfterBind", structure->descriptorBindingStorageImageUpdateAfterBind);
+
+            skip |= validate_bool32("VkPhysicalDeviceDescriptorIndexingFeaturesEXT", "descriptorBindingStorageBufferUpdateAfterBind", structure->descriptorBindingStorageBufferUpdateAfterBind);
+
+            skip |= validate_bool32("VkPhysicalDeviceDescriptorIndexingFeaturesEXT", "descriptorBindingUniformTexelBufferUpdateAfterBind", structure->descriptorBindingUniformTexelBufferUpdateAfterBind);
+
+            skip |= validate_bool32("VkPhysicalDeviceDescriptorIndexingFeaturesEXT", "descriptorBindingStorageTexelBufferUpdateAfterBind", structure->descriptorBindingStorageTexelBufferUpdateAfterBind);
+
+            skip |= validate_bool32("VkPhysicalDeviceDescriptorIndexingFeaturesEXT", "descriptorBindingUpdateUnusedWhilePending", structure->descriptorBindingUpdateUnusedWhilePending);
+
+            skip |= validate_bool32("VkPhysicalDeviceDescriptorIndexingFeaturesEXT", "descriptorBindingPartiallyBound", structure->descriptorBindingPartiallyBound);
+
+            skip |= validate_bool32("VkPhysicalDeviceDescriptorIndexingFeaturesEXT", "descriptorBindingVariableDescriptorCount", structure->descriptorBindingVariableDescriptorCount);
+
+            skip |= validate_bool32("VkPhysicalDeviceDescriptorIndexingFeaturesEXT", "runtimeDescriptorArray", structure->runtimeDescriptorArray);
+        } break;
+
+        // Validation code for VkDescriptorSetVariableDescriptorCountAllocateInfoEXT structure members
+        case VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO_EXT: {
+            VkDescriptorSetVariableDescriptorCountAllocateInfoEXT *structure = (VkDescriptorSetVariableDescriptorCountAllocateInfoEXT *) header;
+            skip |= validate_array("VkDescriptorSetVariableDescriptorCountAllocateInfoEXT", "descriptorSetCount", "pDescriptorCounts", structure->descriptorSetCount, &structure->pDescriptorCounts, false, true, kVUIDUndefined, "VUID-VkDescriptorSetVariableDescriptorCountAllocateInfoEXT-pDescriptorCounts-parameter");
+        } break;
+
+        // Validation code for VkPipelineViewportShadingRateImageStateCreateInfoNV structure members
+        case VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SHADING_RATE_IMAGE_STATE_CREATE_INFO_NV: {
+            VkPipelineViewportShadingRateImageStateCreateInfoNV *structure = (VkPipelineViewportShadingRateImageStateCreateInfoNV *) header;
+            skip |= validate_bool32("VkPipelineViewportShadingRateImageStateCreateInfoNV", "shadingRateImageEnable", structure->shadingRateImageEnable);
+
+            if (structure->pShadingRatePalettes != NULL)
+            {
+                for (uint32_t viewportIndex = 0; viewportIndex < structure->viewportCount; ++viewportIndex)
+                {
+                    skip |= validate_ranged_enum_array("VkPipelineViewportShadingRateImageStateCreateInfoNV", ParameterName("pShadingRatePalettes[%i].shadingRatePaletteEntryCount", ParameterName::IndexVector{ viewportIndex }), ParameterName("pShadingRatePalettes[%i].pShadingRatePaletteEntries", ParameterName::IndexVector{ viewportIndex }), "VkShadingRatePaletteEntryNV", AllVkShadingRatePaletteEntryNVEnums, structure->pShadingRatePalettes[viewportIndex].shadingRatePaletteEntryCount, structure->pShadingRatePalettes[viewportIndex].pShadingRatePaletteEntries, true, true);
+                }
+            }
+        } break;
+
+        // Validation code for VkPhysicalDeviceShadingRateImageFeaturesNV structure members
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_FEATURES_NV: {
+            VkPhysicalDeviceShadingRateImageFeaturesNV *structure = (VkPhysicalDeviceShadingRateImageFeaturesNV *) header;
+            skip |= validate_bool32("VkPhysicalDeviceShadingRateImageFeaturesNV", "shadingRateImage", structure->shadingRateImage);
+
+            skip |= validate_bool32("VkPhysicalDeviceShadingRateImageFeaturesNV", "shadingRateCoarseSampleOrder", structure->shadingRateCoarseSampleOrder);
+        } break;
+
+        // Validation code for VkPipelineViewportCoarseSampleOrderStateCreateInfoNV structure members
+        case VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_COARSE_SAMPLE_ORDER_STATE_CREATE_INFO_NV: {
+            VkPipelineViewportCoarseSampleOrderStateCreateInfoNV *structure = (VkPipelineViewportCoarseSampleOrderStateCreateInfoNV *) header;
+            skip |= validate_ranged_enum("VkPipelineViewportCoarseSampleOrderStateCreateInfoNV", "sampleOrderType", "VkCoarseSampleOrderTypeNV", AllVkCoarseSampleOrderTypeNVEnums, structure->sampleOrderType, "VUID-VkPipelineViewportCoarseSampleOrderStateCreateInfoNV-sampleOrderType-parameter");
+
+            skip |= validate_array("VkPipelineViewportCoarseSampleOrderStateCreateInfoNV", "customSampleOrderCount", "pCustomSampleOrders", structure->customSampleOrderCount, &structure->pCustomSampleOrders, false, true, kVUIDUndefined, "VUID-VkPipelineViewportCoarseSampleOrderStateCreateInfoNV-pCustomSampleOrders-parameter");
+
+            if (structure->pCustomSampleOrders != NULL)
+            {
+                for (uint32_t customSampleOrderIndex = 0; customSampleOrderIndex < structure->customSampleOrderCount; ++customSampleOrderIndex)
+                {
+                    skip |= validate_ranged_enum("VkPipelineViewportCoarseSampleOrderStateCreateInfoNV", ParameterName("pCustomSampleOrders[%i].shadingRate", ParameterName::IndexVector{ customSampleOrderIndex }), "VkShadingRatePaletteEntryNV", AllVkShadingRatePaletteEntryNVEnums, structure->pCustomSampleOrders[customSampleOrderIndex].shadingRate, "VUID-VkCoarseSampleOrderCustomNV-shadingRate-parameter");
+
+                    skip |= validate_array("VkPipelineViewportCoarseSampleOrderStateCreateInfoNV", ParameterName("pCustomSampleOrders[%i].sampleLocationCount", ParameterName::IndexVector{ customSampleOrderIndex }), ParameterName("pCustomSampleOrders[%i].pSampleLocations", ParameterName::IndexVector{ customSampleOrderIndex }), structure->pCustomSampleOrders[customSampleOrderIndex].sampleLocationCount, &structure->pCustomSampleOrders[customSampleOrderIndex].pSampleLocations, true, true, "VUID-VkCoarseSampleOrderCustomNV-sampleLocationCount-arraylength", "VUID-VkCoarseSampleOrderCustomNV-pSampleLocations-parameter");
+
+                    if (structure->pCustomSampleOrders[customSampleOrderIndex].pSampleLocations != NULL)
+                    {
+                        for (uint32_t sampleLocationIndex = 0; sampleLocationIndex < structure->pCustomSampleOrders[customSampleOrderIndex].sampleLocationCount; ++sampleLocationIndex)
+                        {
+                        }
+                    }
+                }
+            }
+        } break;
+
+        // Validation code for VkWriteDescriptorSetAccelerationStructureNV structure members
+        case VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_NV: {
+            VkWriteDescriptorSetAccelerationStructureNV *structure = (VkWriteDescriptorSetAccelerationStructureNV *) header;
+            skip |= validate_handle_array("VkWriteDescriptorSetAccelerationStructureNV", "accelerationStructureCount", "pAccelerationStructures", structure->accelerationStructureCount, structure->pAccelerationStructures, true, true);
+        } break;
+
+        // Validation code for VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV structure members
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_REPRESENTATIVE_FRAGMENT_TEST_FEATURES_NV: {
+            VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV *structure = (VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV *) header;
+            skip |= validate_bool32("VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV", "representativeFragmentTest", structure->representativeFragmentTest);
+        } break;
+
+        // Validation code for VkPipelineRepresentativeFragmentTestStateCreateInfoNV structure members
+        case VK_STRUCTURE_TYPE_PIPELINE_REPRESENTATIVE_FRAGMENT_TEST_STATE_CREATE_INFO_NV: {
+            VkPipelineRepresentativeFragmentTestStateCreateInfoNV *structure = (VkPipelineRepresentativeFragmentTestStateCreateInfoNV *) header;
+            skip |= validate_bool32("VkPipelineRepresentativeFragmentTestStateCreateInfoNV", "representativeFragmentTestEnable", structure->representativeFragmentTestEnable);
+        } break;
+
+        // Validation code for VkPhysicalDeviceImageViewImageFormatInfoEXT structure members
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_VIEW_IMAGE_FORMAT_INFO_EXT: {
+            VkPhysicalDeviceImageViewImageFormatInfoEXT *structure = (VkPhysicalDeviceImageViewImageFormatInfoEXT *) header;
+            skip |= validate_ranged_enum("VkPhysicalDeviceImageViewImageFormatInfoEXT", "imageViewType", "VkImageViewType", AllVkImageViewTypeEnums, structure->imageViewType, "VUID-VkPhysicalDeviceImageViewImageFormatInfoEXT-imageViewType-parameter");
+        } break;
+
+        // Validation code for VkDeviceQueueGlobalPriorityCreateInfoEXT structure members
+        case VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_EXT: {
+            VkDeviceQueueGlobalPriorityCreateInfoEXT *structure = (VkDeviceQueueGlobalPriorityCreateInfoEXT *) header;
+            skip |= validate_ranged_enum("VkDeviceQueueGlobalPriorityCreateInfoEXT", "globalPriority", "VkQueueGlobalPriorityEXT", AllVkQueueGlobalPriorityEXTEnums, structure->globalPriority, "VUID-VkDeviceQueueGlobalPriorityCreateInfoEXT-globalPriority-parameter");
+        } break;
+
+        // Validation code for VkImportMemoryHostPointerInfoEXT structure members
+        case VK_STRUCTURE_TYPE_IMPORT_MEMORY_HOST_POINTER_INFO_EXT: {
+            VkImportMemoryHostPointerInfoEXT *structure = (VkImportMemoryHostPointerInfoEXT *) header;
+            skip |= validate_flags("VkImportMemoryHostPointerInfoEXT", "handleType", "VkExternalMemoryHandleTypeFlagBits", AllVkExternalMemoryHandleTypeFlagBits, structure->handleType, kRequiredSingleBit, "VUID-VkImportMemoryHostPointerInfoEXT-handleType-parameter", "VUID-VkImportMemoryHostPointerInfoEXT-handleType-parameter");
+
+            skip |= validate_required_pointer("VkImportMemoryHostPointerInfoEXT", "pHostPointer", structure->pHostPointer, kVUIDUndefined);
+        } break;
+
+        // Validation code for VkPipelineCompilerControlCreateInfoAMD structure members
+        case VK_STRUCTURE_TYPE_PIPELINE_COMPILER_CONTROL_CREATE_INFO_AMD: {
+            VkPipelineCompilerControlCreateInfoAMD *structure = (VkPipelineCompilerControlCreateInfoAMD *) header;
+            skip |= validate_reserved_flags("VkPipelineCompilerControlCreateInfoAMD", "compilerControlFlags", structure->compilerControlFlags, "VUID-VkPipelineCompilerControlCreateInfoAMD-compilerControlFlags-zerobitmask");
+        } break;
+
+        // Validation code for VkDeviceMemoryOverallocationCreateInfoAMD structure members
+        case VK_STRUCTURE_TYPE_DEVICE_MEMORY_OVERALLOCATION_CREATE_INFO_AMD: {
+            VkDeviceMemoryOverallocationCreateInfoAMD *structure = (VkDeviceMemoryOverallocationCreateInfoAMD *) header;
+            skip |= validate_ranged_enum("VkDeviceMemoryOverallocationCreateInfoAMD", "overallocationBehavior", "VkMemoryOverallocationBehaviorAMD", AllVkMemoryOverallocationBehaviorAMDEnums, structure->overallocationBehavior, "VUID-VkDeviceMemoryOverallocationCreateInfoAMD-overallocationBehavior-parameter");
+        } break;
+
+        // Validation code for VkPipelineVertexInputDivisorStateCreateInfoEXT structure members
+        case VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_EXT: {
+            VkPipelineVertexInputDivisorStateCreateInfoEXT *structure = (VkPipelineVertexInputDivisorStateCreateInfoEXT *) header;
+            skip |= validate_array("VkPipelineVertexInputDivisorStateCreateInfoEXT", "vertexBindingDivisorCount", "pVertexBindingDivisors", structure->vertexBindingDivisorCount, &structure->pVertexBindingDivisors, true, true, "VUID-VkPipelineVertexInputDivisorStateCreateInfoEXT-vertexBindingDivisorCount-arraylength", "VUID-VkPipelineVertexInputDivisorStateCreateInfoEXT-pVertexBindingDivisors-parameter");
+
+            if (structure->pVertexBindingDivisors != NULL)
+            {
+                for (uint32_t vertexBindingDivisorIndex = 0; vertexBindingDivisorIndex < structure->vertexBindingDivisorCount; ++vertexBindingDivisorIndex)
+                {
+                }
+            }
+        } break;
+
+        // Validation code for VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT structure members
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_EXT: {
+            VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT *structure = (VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT *) header;
+            skip |= validate_bool32("VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT", "vertexAttributeInstanceRateDivisor", structure->vertexAttributeInstanceRateDivisor);
+
+            skip |= validate_bool32("VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT", "vertexAttributeInstanceRateZeroDivisor", structure->vertexAttributeInstanceRateZeroDivisor);
+        } break;
+
+        // Validation code for VkPipelineCreationFeedbackCreateInfoEXT structure members
+        case VK_STRUCTURE_TYPE_PIPELINE_CREATION_FEEDBACK_CREATE_INFO_EXT: {
+            VkPipelineCreationFeedbackCreateInfoEXT *structure = (VkPipelineCreationFeedbackCreateInfoEXT *) header;
+            skip |= validate_required_pointer("VkPipelineCreationFeedbackCreateInfoEXT", "pPipelineCreationFeedback", structure->pPipelineCreationFeedback, "VUID-VkPipelineCreationFeedbackCreateInfoEXT-pPipelineCreationFeedback-parameter");
+
+            if (structure->pPipelineCreationFeedback != NULL)
+            {
+            }
+
+            skip |= validate_array("VkPipelineCreationFeedbackCreateInfoEXT", "pipelineStageCreationFeedbackCount", "pPipelineStageCreationFeedbacks", structure->pipelineStageCreationFeedbackCount, &structure->pPipelineStageCreationFeedbacks, true, true, "VUID-VkPipelineCreationFeedbackCreateInfoEXT-pipelineStageCreationFeedbackCount-arraylength", "VUID-VkPipelineCreationFeedbackCreateInfoEXT-pPipelineStageCreationFeedbacks-parameter");
+
+            if (structure->pPipelineStageCreationFeedbacks != NULL)
+            {
+                for (uint32_t pipelineStageCreationFeedbackIndex = 0; pipelineStageCreationFeedbackIndex < structure->pipelineStageCreationFeedbackCount; ++pipelineStageCreationFeedbackIndex)
+                {
+                }
+            }
+        } break;
+
+        // Validation code for VkPhysicalDeviceComputeShaderDerivativesFeaturesNV structure members
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_FEATURES_NV: {
+            VkPhysicalDeviceComputeShaderDerivativesFeaturesNV *structure = (VkPhysicalDeviceComputeShaderDerivativesFeaturesNV *) header;
+            skip |= validate_bool32("VkPhysicalDeviceComputeShaderDerivativesFeaturesNV", "computeDerivativeGroupQuads", structure->computeDerivativeGroupQuads);
+
+            skip |= validate_bool32("VkPhysicalDeviceComputeShaderDerivativesFeaturesNV", "computeDerivativeGroupLinear", structure->computeDerivativeGroupLinear);
+        } break;
+
+        // Validation code for VkPhysicalDeviceMeshShaderFeaturesNV structure members
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_FEATURES_NV: {
+            VkPhysicalDeviceMeshShaderFeaturesNV *structure = (VkPhysicalDeviceMeshShaderFeaturesNV *) header;
+            skip |= validate_bool32("VkPhysicalDeviceMeshShaderFeaturesNV", "taskShader", structure->taskShader);
+
+            skip |= validate_bool32("VkPhysicalDeviceMeshShaderFeaturesNV", "meshShader", structure->meshShader);
+        } break;
+
+        // Validation code for VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV structure members
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_FEATURES_NV: {
+            VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV *structure = (VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV *) header;
+            skip |= validate_bool32("VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV", "fragmentShaderBarycentric", structure->fragmentShaderBarycentric);
+        } break;
+
+        // Validation code for VkPhysicalDeviceShaderImageFootprintFeaturesNV structure members
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_IMAGE_FOOTPRINT_FEATURES_NV: {
+            VkPhysicalDeviceShaderImageFootprintFeaturesNV *structure = (VkPhysicalDeviceShaderImageFootprintFeaturesNV *) header;
+            skip |= validate_bool32("VkPhysicalDeviceShaderImageFootprintFeaturesNV", "imageFootprint", structure->imageFootprint);
+        } break;
+
+        // Validation code for VkPipelineViewportExclusiveScissorStateCreateInfoNV structure members
+        case VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_EXCLUSIVE_SCISSOR_STATE_CREATE_INFO_NV: {
+            VkPipelineViewportExclusiveScissorStateCreateInfoNV *structure = (VkPipelineViewportExclusiveScissorStateCreateInfoNV *) header;
+            if (structure->pExclusiveScissors != NULL)
+            {
+                for (uint32_t exclusiveScissorIndex = 0; exclusiveScissorIndex < structure->exclusiveScissorCount; ++exclusiveScissorIndex)
+                {
+                }
+            }
+        } break;
+
+        // Validation code for VkPhysicalDeviceExclusiveScissorFeaturesNV structure members
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXCLUSIVE_SCISSOR_FEATURES_NV: {
+            VkPhysicalDeviceExclusiveScissorFeaturesNV *structure = (VkPhysicalDeviceExclusiveScissorFeaturesNV *) header;
+            skip |= validate_bool32("VkPhysicalDeviceExclusiveScissorFeaturesNV", "exclusiveScissor", structure->exclusiveScissor);
+        } break;
+
+        // Validation code for VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL structure members
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_FUNCTIONS_2_FEATURES_INTEL: {
+            VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL *structure = (VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL *) header;
+            skip |= validate_bool32("VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL", "shaderIntegerFunctions2", structure->shaderIntegerFunctions2);
+        } break;
+
+        // Validation code for VkSwapchainDisplayNativeHdrCreateInfoAMD structure members
+        case VK_STRUCTURE_TYPE_SWAPCHAIN_DISPLAY_NATIVE_HDR_CREATE_INFO_AMD: {
+            VkSwapchainDisplayNativeHdrCreateInfoAMD *structure = (VkSwapchainDisplayNativeHdrCreateInfoAMD *) header;
+            skip |= validate_bool32("VkSwapchainDisplayNativeHdrCreateInfoAMD", "localDimmingEnable", structure->localDimmingEnable);
+        } break;
+
+        // Validation code for VkRenderPassFragmentDensityMapCreateInfoEXT structure members
+        case VK_STRUCTURE_TYPE_RENDER_PASS_FRAGMENT_DENSITY_MAP_CREATE_INFO_EXT: {
+            VkRenderPassFragmentDensityMapCreateInfoEXT *structure = (VkRenderPassFragmentDensityMapCreateInfoEXT *) header;
+            skip |= validate_ranged_enum("VkRenderPassFragmentDensityMapCreateInfoEXT", "fragmentDensityMapAttachment.layout", "VkImageLayout", AllVkImageLayoutEnums, structure->fragmentDensityMapAttachment.layout, "VUID-VkAttachmentReference-layout-parameter");
+        } break;
+
+        // Validation code for VkPhysicalDeviceScalarBlockLayoutFeaturesEXT structure members
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES_EXT: {
+            VkPhysicalDeviceScalarBlockLayoutFeaturesEXT *structure = (VkPhysicalDeviceScalarBlockLayoutFeaturesEXT *) header;
+            skip |= validate_bool32("VkPhysicalDeviceScalarBlockLayoutFeaturesEXT", "scalarBlockLayout", structure->scalarBlockLayout);
+        } break;
+
+        // Validation code for VkPhysicalDeviceSubgroupSizeControlFeaturesEXT structure members
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES_EXT: {
+            VkPhysicalDeviceSubgroupSizeControlFeaturesEXT *structure = (VkPhysicalDeviceSubgroupSizeControlFeaturesEXT *) header;
+            skip |= validate_bool32("VkPhysicalDeviceSubgroupSizeControlFeaturesEXT", "subgroupSizeControl", structure->subgroupSizeControl);
+
+            skip |= validate_bool32("VkPhysicalDeviceSubgroupSizeControlFeaturesEXT", "computeFullSubgroups", structure->computeFullSubgroups);
+        } break;
+
+        // Validation code for VkPhysicalDeviceCoherentMemoryFeaturesAMD structure members
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COHERENT_MEMORY_FEATURES_AMD: {
+            VkPhysicalDeviceCoherentMemoryFeaturesAMD *structure = (VkPhysicalDeviceCoherentMemoryFeaturesAMD *) header;
+            skip |= validate_bool32("VkPhysicalDeviceCoherentMemoryFeaturesAMD", "deviceCoherentMemory", structure->deviceCoherentMemory);
+        } break;
+
+        // Validation code for VkPhysicalDeviceMemoryPriorityFeaturesEXT structure members
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PRIORITY_FEATURES_EXT: {
+            VkPhysicalDeviceMemoryPriorityFeaturesEXT *structure = (VkPhysicalDeviceMemoryPriorityFeaturesEXT *) header;
+            skip |= validate_bool32("VkPhysicalDeviceMemoryPriorityFeaturesEXT", "memoryPriority", structure->memoryPriority);
+        } break;
+
+        // Validation code for VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV structure members
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEDICATED_ALLOCATION_IMAGE_ALIASING_FEATURES_NV: {
+            VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV *structure = (VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV *) header;
+            skip |= validate_bool32("VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV", "dedicatedAllocationImageAliasing", structure->dedicatedAllocationImageAliasing);
+        } break;
+
+        // Validation code for VkPhysicalDeviceBufferDeviceAddressFeaturesEXT structure members
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_EXT: {
+            VkPhysicalDeviceBufferDeviceAddressFeaturesEXT *structure = (VkPhysicalDeviceBufferDeviceAddressFeaturesEXT *) header;
+            skip |= validate_bool32("VkPhysicalDeviceBufferDeviceAddressFeaturesEXT", "bufferDeviceAddress", structure->bufferDeviceAddress);
+
+            skip |= validate_bool32("VkPhysicalDeviceBufferDeviceAddressFeaturesEXT", "bufferDeviceAddressCaptureReplay", structure->bufferDeviceAddressCaptureReplay);
+
+            skip |= validate_bool32("VkPhysicalDeviceBufferDeviceAddressFeaturesEXT", "bufferDeviceAddressMultiDevice", structure->bufferDeviceAddressMultiDevice);
+        } break;
+
+        // Validation code for VkImageStencilUsageCreateInfoEXT structure members
+        case VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO_EXT: {
+            VkImageStencilUsageCreateInfoEXT *structure = (VkImageStencilUsageCreateInfoEXT *) header;
+            skip |= validate_flags("VkImageStencilUsageCreateInfoEXT", "stencilUsage", "VkImageUsageFlagBits", AllVkImageUsageFlagBits, structure->stencilUsage, kRequiredFlags, "VUID-VkImageStencilUsageCreateInfoEXT-stencilUsage-parameter", "VUID-VkImageStencilUsageCreateInfoEXT-stencilUsage-requiredbitmask");
+        } break;
+
+        // Validation code for VkValidationFeaturesEXT structure members
+        case VK_STRUCTURE_TYPE_VALIDATION_FEATURES_EXT: {
+            VkValidationFeaturesEXT *structure = (VkValidationFeaturesEXT *) header;
+            skip |= validate_ranged_enum_array("VkValidationFeaturesEXT", "enabledValidationFeatureCount", "pEnabledValidationFeatures", "VkValidationFeatureEnableEXT", AllVkValidationFeatureEnableEXTEnums, structure->enabledValidationFeatureCount, structure->pEnabledValidationFeatures, false, true);
+
+            skip |= validate_ranged_enum_array("VkValidationFeaturesEXT", "disabledValidationFeatureCount", "pDisabledValidationFeatures", "VkValidationFeatureDisableEXT", AllVkValidationFeatureDisableEXTEnums, structure->disabledValidationFeatureCount, structure->pDisabledValidationFeatures, false, true);
+        } break;
+
+        // Validation code for VkPhysicalDeviceCooperativeMatrixFeaturesNV structure members
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_FEATURES_NV: {
+            VkPhysicalDeviceCooperativeMatrixFeaturesNV *structure = (VkPhysicalDeviceCooperativeMatrixFeaturesNV *) header;
+            skip |= validate_bool32("VkPhysicalDeviceCooperativeMatrixFeaturesNV", "cooperativeMatrix", structure->cooperativeMatrix);
+
+            skip |= validate_bool32("VkPhysicalDeviceCooperativeMatrixFeaturesNV", "cooperativeMatrixRobustBufferAccess", structure->cooperativeMatrixRobustBufferAccess);
+        } break;
+
+        // Validation code for VkPhysicalDeviceCoverageReductionModeFeaturesNV structure members
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COVERAGE_REDUCTION_MODE_FEATURES_NV: {
+            VkPhysicalDeviceCoverageReductionModeFeaturesNV *structure = (VkPhysicalDeviceCoverageReductionModeFeaturesNV *) header;
+            skip |= validate_bool32("VkPhysicalDeviceCoverageReductionModeFeaturesNV", "coverageReductionMode", structure->coverageReductionMode);
+        } break;
+
+        // Validation code for VkPipelineCoverageReductionStateCreateInfoNV structure members
+        case VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_REDUCTION_STATE_CREATE_INFO_NV: {
+            VkPipelineCoverageReductionStateCreateInfoNV *structure = (VkPipelineCoverageReductionStateCreateInfoNV *) header;
+            skip |= validate_reserved_flags("VkPipelineCoverageReductionStateCreateInfoNV", "flags", structure->flags, "VUID-VkPipelineCoverageReductionStateCreateInfoNV-flags-zerobitmask");
+
+            skip |= validate_ranged_enum("VkPipelineCoverageReductionStateCreateInfoNV", "coverageReductionMode", "VkCoverageReductionModeNV", AllVkCoverageReductionModeNVEnums, structure->coverageReductionMode, "VUID-VkPipelineCoverageReductionStateCreateInfoNV-coverageReductionMode-parameter");
+        } break;
+
+        // Validation code for VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT structure members
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_INTERLOCK_FEATURES_EXT: {
+            VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT *structure = (VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT *) header;
+            skip |= validate_bool32("VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT", "fragmentShaderSampleInterlock", structure->fragmentShaderSampleInterlock);
+
+            skip |= validate_bool32("VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT", "fragmentShaderPixelInterlock", structure->fragmentShaderPixelInterlock);
+
+            skip |= validate_bool32("VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT", "fragmentShaderShadingRateInterlock", structure->fragmentShaderShadingRateInterlock);
+        } break;
+
+        // Validation code for VkPhysicalDeviceYcbcrImageArraysFeaturesEXT structure members
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_YCBCR_IMAGE_ARRAYS_FEATURES_EXT: {
+            VkPhysicalDeviceYcbcrImageArraysFeaturesEXT *structure = (VkPhysicalDeviceYcbcrImageArraysFeaturesEXT *) header;
+            skip |= validate_bool32("VkPhysicalDeviceYcbcrImageArraysFeaturesEXT", "ycbcrImageArrays", structure->ycbcrImageArrays);
+        } break;
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+        // Validation code for VkSurfaceFullScreenExclusiveInfoEXT structure members
+        case VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_INFO_EXT: {
+            VkSurfaceFullScreenExclusiveInfoEXT *structure = (VkSurfaceFullScreenExclusiveInfoEXT *) header;
+            skip |= validate_ranged_enum("VkSurfaceFullScreenExclusiveInfoEXT", "fullScreenExclusive", "VkFullScreenExclusiveEXT", AllVkFullScreenExclusiveEXTEnums, structure->fullScreenExclusive, "VUID-VkSurfaceFullScreenExclusiveInfoEXT-fullScreenExclusive-parameter");
+        } break;
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+        // Validation code for VkSurfaceCapabilitiesFullScreenExclusiveEXT structure members
+        case VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_FULL_SCREEN_EXCLUSIVE_EXT: {
+            VkSurfaceCapabilitiesFullScreenExclusiveEXT *structure = (VkSurfaceCapabilitiesFullScreenExclusiveEXT *) header;
+            skip |= validate_bool32("VkSurfaceCapabilitiesFullScreenExclusiveEXT", "fullScreenExclusiveSupported", structure->fullScreenExclusiveSupported);
+        } break;
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+        // Validation code for VkPhysicalDeviceLineRasterizationFeaturesEXT structure members
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_EXT: {
+            VkPhysicalDeviceLineRasterizationFeaturesEXT *structure = (VkPhysicalDeviceLineRasterizationFeaturesEXT *) header;
+            skip |= validate_bool32("VkPhysicalDeviceLineRasterizationFeaturesEXT", "rectangularLines", structure->rectangularLines);
+
+            skip |= validate_bool32("VkPhysicalDeviceLineRasterizationFeaturesEXT", "bresenhamLines", structure->bresenhamLines);
+
+            skip |= validate_bool32("VkPhysicalDeviceLineRasterizationFeaturesEXT", "smoothLines", structure->smoothLines);
+
+            skip |= validate_bool32("VkPhysicalDeviceLineRasterizationFeaturesEXT", "stippledRectangularLines", structure->stippledRectangularLines);
+
+            skip |= validate_bool32("VkPhysicalDeviceLineRasterizationFeaturesEXT", "stippledBresenhamLines", structure->stippledBresenhamLines);
+
+            skip |= validate_bool32("VkPhysicalDeviceLineRasterizationFeaturesEXT", "stippledSmoothLines", structure->stippledSmoothLines);
+        } break;
+
+        // Validation code for VkPipelineRasterizationLineStateCreateInfoEXT structure members
+        case VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO_EXT: {
+            VkPipelineRasterizationLineStateCreateInfoEXT *structure = (VkPipelineRasterizationLineStateCreateInfoEXT *) header;
+            skip |= validate_ranged_enum("VkPipelineRasterizationLineStateCreateInfoEXT", "lineRasterizationMode", "VkLineRasterizationModeEXT", AllVkLineRasterizationModeEXTEnums, structure->lineRasterizationMode, "VUID-VkPipelineRasterizationLineStateCreateInfoEXT-lineRasterizationMode-parameter");
+
+            skip |= validate_bool32("VkPipelineRasterizationLineStateCreateInfoEXT", "stippledLineEnable", structure->stippledLineEnable);
+        } break;
+
+        // Validation code for VkPhysicalDeviceHostQueryResetFeaturesEXT structure members
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES_EXT: {
+            VkPhysicalDeviceHostQueryResetFeaturesEXT *structure = (VkPhysicalDeviceHostQueryResetFeaturesEXT *) header;
+            skip |= validate_bool32("VkPhysicalDeviceHostQueryResetFeaturesEXT", "hostQueryReset", structure->hostQueryReset);
+        } break;
+
+        // Validation code for VkPhysicalDeviceIndexTypeUint8FeaturesEXT structure members
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_EXT: {
+            VkPhysicalDeviceIndexTypeUint8FeaturesEXT *structure = (VkPhysicalDeviceIndexTypeUint8FeaturesEXT *) header;
+            skip |= validate_bool32("VkPhysicalDeviceIndexTypeUint8FeaturesEXT", "indexTypeUint8", structure->indexTypeUint8);
+        } break;
+
+        // Validation code for VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT structure members
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES_EXT: {
+            VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT *structure = (VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT *) header;
+            skip |= validate_bool32("VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT", "shaderDemoteToHelperInvocation", structure->shaderDemoteToHelperInvocation);
+        } break;
+
+        // Validation code for VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT structure members
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_FEATURES_EXT: {
+            VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT *structure = (VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT *) header;
+            skip |= validate_bool32("VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT", "texelBufferAlignment", structure->texelBufferAlignment);
+        } break;
+        default:
+            skip = false;
+    }
+    return skip;
+}
+
+
+bool StatelessValidation::OutputExtensionError(const std::string &api_name, const std::string &extension_name) const {
+    return log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                   kVUID_PVError_ExtensionNotEnabled, "Attemped to call %s() but its required extension %s has not been enabled\n",
+                   api_name.c_str(), extension_name.c_str());
+}
+
+
+bool StatelessValidation::PreCallValidateCreateInstance(
+    const VkInstanceCreateInfo*                 pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkInstance*                                 pInstance) const {
+    bool skip = false;
+    skip |= validate_struct_type("vkCreateInstance", "pCreateInfo", "VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO", pCreateInfo, VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO, true, "VUID-vkCreateInstance-pCreateInfo-parameter", "VUID-VkInstanceCreateInfo-sType-sType");
+    if (pCreateInfo != NULL)
+    {
+        const VkStructureType allowed_structs_VkInstanceCreateInfo[] = { VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT, VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT, VK_STRUCTURE_TYPE_VALIDATION_FEATURES_EXT, VK_STRUCTURE_TYPE_VALIDATION_FLAGS_EXT };
+
+        skip |= validate_struct_pnext("vkCreateInstance", "pCreateInfo->pNext", "VkDebugReportCallbackCreateInfoEXT, VkDebugUtilsMessengerCreateInfoEXT, VkValidationFeaturesEXT, VkValidationFlagsEXT", pCreateInfo->pNext, ARRAY_SIZE(allowed_structs_VkInstanceCreateInfo), allowed_structs_VkInstanceCreateInfo, GeneratedVulkanHeaderVersion, "VUID-VkInstanceCreateInfo-pNext-pNext");
+
+        skip |= validate_reserved_flags("vkCreateInstance", "pCreateInfo->flags", pCreateInfo->flags, "VUID-VkInstanceCreateInfo-flags-zerobitmask");
+
+        skip |= validate_struct_type("vkCreateInstance", "pCreateInfo->pApplicationInfo", "VK_STRUCTURE_TYPE_APPLICATION_INFO", pCreateInfo->pApplicationInfo, VK_STRUCTURE_TYPE_APPLICATION_INFO, false, "VUID-VkInstanceCreateInfo-pApplicationInfo-parameter", "VUID-VkApplicationInfo-sType-sType");
+
+        if (pCreateInfo->pApplicationInfo != NULL)
+        {
+            skip |= validate_struct_pnext("vkCreateInstance", "pCreateInfo->pApplicationInfo->pNext", NULL, pCreateInfo->pApplicationInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkApplicationInfo-pNext-pNext");
+        }
+
+        skip |= validate_string_array("vkCreateInstance", "pCreateInfo->enabledLayerCount", "pCreateInfo->ppEnabledLayerNames", pCreateInfo->enabledLayerCount, pCreateInfo->ppEnabledLayerNames, false, true, kVUIDUndefined, "VUID-VkInstanceCreateInfo-ppEnabledLayerNames-parameter");
+
+        skip |= validate_string_array("vkCreateInstance", "pCreateInfo->enabledExtensionCount", "pCreateInfo->ppEnabledExtensionNames", pCreateInfo->enabledExtensionCount, pCreateInfo->ppEnabledExtensionNames, false, true, kVUIDUndefined, "VUID-VkInstanceCreateInfo-ppEnabledExtensionNames-parameter");
+    }
+    if (pAllocator != NULL)
+    {
+        skip |= validate_required_pointer("vkCreateInstance", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
+
+        skip |= validate_required_pointer("vkCreateInstance", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
+
+        skip |= validate_required_pointer("vkCreateInstance", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
+
+        if (pAllocator->pfnInternalAllocation != NULL)
+        {
+            skip |= validate_required_pointer("vkCreateInstance", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+
+        if (pAllocator->pfnInternalFree != NULL)
+        {
+            skip |= validate_required_pointer("vkCreateInstance", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+    }
+    skip |= validate_required_pointer("vkCreateInstance", "pInstance", pInstance, "VUID-vkCreateInstance-pInstance-parameter");
+    if (!skip) skip |= manual_PreCallValidateCreateInstance(pCreateInfo, pAllocator, pInstance);
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateDestroyInstance(
+    VkInstance                                  instance,
+    const VkAllocationCallbacks*                pAllocator) const {
+    bool skip = false;
+    if (pAllocator != NULL)
+    {
+        skip |= validate_required_pointer("vkDestroyInstance", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
+
+        skip |= validate_required_pointer("vkDestroyInstance", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
+
+        skip |= validate_required_pointer("vkDestroyInstance", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
+
+        if (pAllocator->pfnInternalAllocation != NULL)
+        {
+            skip |= validate_required_pointer("vkDestroyInstance", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+
+        if (pAllocator->pfnInternalFree != NULL)
+        {
+            skip |= validate_required_pointer("vkDestroyInstance", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateEnumeratePhysicalDevices(
+    VkInstance                                  instance,
+    uint32_t*                                   pPhysicalDeviceCount,
+    VkPhysicalDevice*                           pPhysicalDevices) const {
+    bool skip = false;
+    skip |= validate_array("vkEnumeratePhysicalDevices", "pPhysicalDeviceCount", "pPhysicalDevices", pPhysicalDeviceCount, &pPhysicalDevices, true, false, false, kVUIDUndefined, "VUID-vkEnumeratePhysicalDevices-pPhysicalDevices-parameter");
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateGetPhysicalDeviceFeatures(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceFeatures*                   pFeatures) const {
+    bool skip = false;
+    skip |= validate_required_pointer("vkGetPhysicalDeviceFeatures", "pFeatures", pFeatures, "VUID-vkGetPhysicalDeviceFeatures-pFeatures-parameter");
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateGetPhysicalDeviceFormatProperties(
+    VkPhysicalDevice                            physicalDevice,
+    VkFormat                                    format,
+    VkFormatProperties*                         pFormatProperties) const {
+    bool skip = false;
+    skip |= validate_ranged_enum("vkGetPhysicalDeviceFormatProperties", "format", "VkFormat", AllVkFormatEnums, format, "VUID-vkGetPhysicalDeviceFormatProperties-format-parameter");
+    skip |= validate_required_pointer("vkGetPhysicalDeviceFormatProperties", "pFormatProperties", pFormatProperties, "VUID-vkGetPhysicalDeviceFormatProperties-pFormatProperties-parameter");
+    if (pFormatProperties != NULL)
+    {
+        // No xml-driven validation
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateGetPhysicalDeviceImageFormatProperties(
+    VkPhysicalDevice                            physicalDevice,
+    VkFormat                                    format,
+    VkImageType                                 type,
+    VkImageTiling                               tiling,
+    VkImageUsageFlags                           usage,
+    VkImageCreateFlags                          flags,
+    VkImageFormatProperties*                    pImageFormatProperties) const {
+    bool skip = false;
+    skip |= validate_ranged_enum("vkGetPhysicalDeviceImageFormatProperties", "format", "VkFormat", AllVkFormatEnums, format, "VUID-vkGetPhysicalDeviceImageFormatProperties-format-parameter");
+    skip |= validate_ranged_enum("vkGetPhysicalDeviceImageFormatProperties", "type", "VkImageType", AllVkImageTypeEnums, type, "VUID-vkGetPhysicalDeviceImageFormatProperties-type-parameter");
+    skip |= validate_ranged_enum("vkGetPhysicalDeviceImageFormatProperties", "tiling", "VkImageTiling", AllVkImageTilingEnums, tiling, "VUID-vkGetPhysicalDeviceImageFormatProperties-tiling-parameter");
+    skip |= validate_flags("vkGetPhysicalDeviceImageFormatProperties", "usage", "VkImageUsageFlagBits", AllVkImageUsageFlagBits, usage, kRequiredFlags, "VUID-vkGetPhysicalDeviceImageFormatProperties-usage-parameter", "VUID-vkGetPhysicalDeviceImageFormatProperties-usage-requiredbitmask");
+    skip |= validate_flags("vkGetPhysicalDeviceImageFormatProperties", "flags", "VkImageCreateFlagBits", AllVkImageCreateFlagBits, flags, kOptionalFlags, "VUID-vkGetPhysicalDeviceImageFormatProperties-flags-parameter");
+    skip |= validate_required_pointer("vkGetPhysicalDeviceImageFormatProperties", "pImageFormatProperties", pImageFormatProperties, "VUID-vkGetPhysicalDeviceImageFormatProperties-pImageFormatProperties-parameter");
+    if (pImageFormatProperties != NULL)
+    {
+        // No xml-driven validation
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateGetPhysicalDeviceProperties(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceProperties*                 pProperties) const {
+    bool skip = false;
+    skip |= validate_required_pointer("vkGetPhysicalDeviceProperties", "pProperties", pProperties, "VUID-vkGetPhysicalDeviceProperties-pProperties-parameter");
+    if (pProperties != NULL)
+    {
+        // No xml-driven validation
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateGetPhysicalDeviceQueueFamilyProperties(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pQueueFamilyPropertyCount,
+    VkQueueFamilyProperties*                    pQueueFamilyProperties) const {
+    bool skip = false;
+    skip |= validate_array("vkGetPhysicalDeviceQueueFamilyProperties", "pQueueFamilyPropertyCount", "pQueueFamilyProperties", pQueueFamilyPropertyCount, &pQueueFamilyProperties, true, false, false, kVUIDUndefined, "VUID-vkGetPhysicalDeviceQueueFamilyProperties-pQueueFamilyProperties-parameter");
+    if (pQueueFamilyProperties != NULL)
+    {
+        for (uint32_t pQueueFamilyPropertyIndex = 0; pQueueFamilyPropertyIndex < *pQueueFamilyPropertyCount; ++pQueueFamilyPropertyIndex)
+        {
+            // No xml-driven validation
+        }
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateGetPhysicalDeviceMemoryProperties(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceMemoryProperties*           pMemoryProperties) const {
+    bool skip = false;
+    skip |= validate_required_pointer("vkGetPhysicalDeviceMemoryProperties", "pMemoryProperties", pMemoryProperties, "VUID-vkGetPhysicalDeviceMemoryProperties-pMemoryProperties-parameter");
+    if (pMemoryProperties != NULL)
+    {
+        // No xml-driven validation
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateCreateDevice(
+    VkPhysicalDevice                            physicalDevice,
+    const VkDeviceCreateInfo*                   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDevice*                                   pDevice) const {
+    bool skip = false;
+    skip |= validate_struct_type("vkCreateDevice", "pCreateInfo", "VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO", pCreateInfo, VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO, true, "VUID-vkCreateDevice-pCreateInfo-parameter", "VUID-VkDeviceCreateInfo-sType-sType");
+    if (pCreateInfo != NULL)
+    {
+        const VkStructureType allowed_structs_VkDeviceCreateInfo[] = { VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO, VK_STRUCTURE_TYPE_DEVICE_MEMORY_OVERALLOCATION_CREATE_INFO_AMD, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES_KHR, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ASTC_DECODE_FEATURES_EXT, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_FEATURES_EXT, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_EXT, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_KHR, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COHERENT_MEMORY_FEATURES_AMD, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_FEATURES_NV, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONDITIONAL_RENDERING_FEATURES_EXT, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_FEATURES_NV, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CORNER_SAMPLED_IMAGE_FEATURES_NV, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COVERAGE_REDUCTION_MODE_FEATURES_NV, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEDICATED_ALLOCATION_IMAGE_ALIASING_FEATURES_NV, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLIP_ENABLE_FEATURES_EXT, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES_EXT, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXCLUSIVE_SCISSOR_FEATURES_NV, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_FEATURES_EXT, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_FEATURES_NV, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_INTERLOCK_FEATURES_EXT, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES_EXT, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES_KHR, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_EXT, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES_EXT, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_EXT, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PRIORITY_FEATURES_EXT, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_FEATURES_NV, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PERFORMANCE_QUERY_FEATURES_KHR, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_EXECUTABLE_PROPERTIES_FEATURES_KHR, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_FEATURES, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_REPRESENTATIVE_FRAGMENT_TEST_FEATURES_NV, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES_EXT, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES_KHR, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES_KHR, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CLOCK_FEATURES_KHR, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES_EXT, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES_KHR, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_IMAGE_FOOTPRINT_FEATURES_NV, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_FUNCTIONS_2_FEATURES_INTEL, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_FEATURES_NV, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES_KHR, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_FEATURES_NV, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES_EXT, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_FEATURES_EXT, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXTURE_COMPRESSION_ASTC_HDR_FEATURES_EXT, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES_KHR, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_FEATURES_EXT, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES_KHR, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_EXT, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES_KHR, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_YCBCR_IMAGE_ARRAYS_FEATURES_EXT };
+
+        skip |= validate_struct_pnext("vkCreateDevice", "pCreateInfo->pNext", "VkDeviceGroupDeviceCreateInfo, VkDeviceMemoryOverallocationCreateInfoAMD, VkPhysicalDevice16BitStorageFeatures, VkPhysicalDevice8BitStorageFeaturesKHR, VkPhysicalDeviceASTCDecodeFeaturesEXT, VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT, VkPhysicalDeviceBufferDeviceAddressFeaturesEXT, VkPhysicalDeviceBufferDeviceAddressFeaturesKHR, VkPhysicalDeviceCoherentMemoryFeaturesAMD, VkPhysicalDeviceComputeShaderDerivativesFeaturesNV, VkPhysicalDeviceConditionalRenderingFeaturesEXT, VkPhysicalDeviceCooperativeMatrixFeaturesNV, VkPhysicalDeviceCornerSampledImageFeaturesNV, VkPhysicalDeviceCoverageReductionModeFeaturesNV, VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV, VkPhysicalDeviceDepthClipEnableFeaturesEXT, VkPhysicalDeviceDescriptorIndexingFeaturesEXT, VkPhysicalDeviceExclusiveScissorFeaturesNV, VkPhysicalDeviceFeatures2, VkPhysicalDeviceFragmentDensityMapFeaturesEXT, VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV, VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT, VkPhysicalDeviceHostQueryResetFeaturesEXT, VkPhysicalDeviceImagelessFramebufferFeaturesKHR, VkPhysicalDeviceIndexTypeUint8FeaturesEXT, VkPhysicalDeviceInlineUniformBlockFeaturesEXT, VkPhysicalDeviceLineRasterizationFeaturesEXT, VkPhysicalDeviceMemoryPriorityFeaturesEXT, VkPhysicalDeviceMeshShaderFeaturesNV, VkPhysicalDeviceMultiviewFeatures, VkPhysicalDevicePerformanceQueryFeaturesKHR, VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR, VkPhysicalDeviceProtectedMemoryFeatures, VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV, VkPhysicalDeviceSamplerYcbcrConversionFeatures, VkPhysicalDeviceScalarBlockLayoutFeaturesEXT, VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR, VkPhysicalDeviceShaderAtomicInt64FeaturesKHR, VkPhysicalDeviceShaderClockFeaturesKHR, VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT, VkPhysicalDeviceShaderDrawParametersFeatures, VkPhysicalDeviceShaderFloat16Int8FeaturesKHR, VkPhysicalDeviceShaderImageFootprintFeaturesNV, VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL, VkPhysicalDeviceShaderSMBuiltinsFeaturesNV, VkPhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR, VkPhysicalDeviceShadingRateImageFeaturesNV, VkPhysicalDeviceSubgroupSizeControlFeaturesEXT, VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT, VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT, VkPhysicalDeviceTimelineSemaphoreFeaturesKHR, VkPhysicalDeviceTransformFeedbackFeaturesEXT, VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR, VkPhysicalDeviceVariablePointersFeatures, VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT, VkPhysicalDeviceVulkanMemoryModelFeaturesKHR, VkPhysicalDeviceYcbcrImageArraysFeaturesEXT", pCreateInfo->pNext, ARRAY_SIZE(allowed_structs_VkDeviceCreateInfo), allowed_structs_VkDeviceCreateInfo, GeneratedVulkanHeaderVersion, "VUID-VkDeviceCreateInfo-pNext-pNext");
+
+        skip |= validate_reserved_flags("vkCreateDevice", "pCreateInfo->flags", pCreateInfo->flags, "VUID-VkDeviceCreateInfo-flags-zerobitmask");
+
+        skip |= validate_struct_type_array("vkCreateDevice", "pCreateInfo->queueCreateInfoCount", "pCreateInfo->pQueueCreateInfos", "VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO", pCreateInfo->queueCreateInfoCount, pCreateInfo->pQueueCreateInfos, VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO, true, true, "VUID-VkDeviceQueueCreateInfo-sType-sType", "VUID-VkDeviceCreateInfo-pQueueCreateInfos-parameter", "VUID-VkDeviceCreateInfo-queueCreateInfoCount-arraylength");
+
+        if (pCreateInfo->pQueueCreateInfos != NULL)
+        {
+            for (uint32_t queueCreateInfoIndex = 0; queueCreateInfoIndex < pCreateInfo->queueCreateInfoCount; ++queueCreateInfoIndex)
+            {
+                const VkStructureType allowed_structs_VkDeviceQueueCreateInfo[] = { VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_EXT };
+
+                skip |= validate_struct_pnext("vkCreateDevice", ParameterName("pCreateInfo->pQueueCreateInfos[%i].pNext", ParameterName::IndexVector{ queueCreateInfoIndex }), "VkDeviceQueueGlobalPriorityCreateInfoEXT", pCreateInfo->pQueueCreateInfos[queueCreateInfoIndex].pNext, ARRAY_SIZE(allowed_structs_VkDeviceQueueCreateInfo), allowed_structs_VkDeviceQueueCreateInfo, GeneratedVulkanHeaderVersion, "VUID-VkDeviceQueueCreateInfo-pNext-pNext");
+
+                skip |= validate_flags("vkCreateDevice", ParameterName("pCreateInfo->pQueueCreateInfos[%i].flags", ParameterName::IndexVector{ queueCreateInfoIndex }), "VkDeviceQueueCreateFlagBits", AllVkDeviceQueueCreateFlagBits, pCreateInfo->pQueueCreateInfos[queueCreateInfoIndex].flags, kOptionalFlags, "VUID-VkDeviceQueueCreateInfo-flags-parameter");
+
+                skip |= validate_array("vkCreateDevice", ParameterName("pCreateInfo->pQueueCreateInfos[%i].queueCount", ParameterName::IndexVector{ queueCreateInfoIndex }), ParameterName("pCreateInfo->pQueueCreateInfos[%i].pQueuePriorities", ParameterName::IndexVector{ queueCreateInfoIndex }), pCreateInfo->pQueueCreateInfos[queueCreateInfoIndex].queueCount, &pCreateInfo->pQueueCreateInfos[queueCreateInfoIndex].pQueuePriorities, true, true, "VUID-VkDeviceQueueCreateInfo-queueCount-arraylength", "VUID-VkDeviceQueueCreateInfo-pQueuePriorities-parameter");
+            }
+        }
+
+        skip |= validate_string_array("vkCreateDevice", "pCreateInfo->enabledLayerCount", "pCreateInfo->ppEnabledLayerNames", pCreateInfo->enabledLayerCount, pCreateInfo->ppEnabledLayerNames, false, true, kVUIDUndefined, "VUID-VkDeviceCreateInfo-ppEnabledLayerNames-parameter");
+
+        skip |= validate_string_array("vkCreateDevice", "pCreateInfo->enabledExtensionCount", "pCreateInfo->ppEnabledExtensionNames", pCreateInfo->enabledExtensionCount, pCreateInfo->ppEnabledExtensionNames, false, true, kVUIDUndefined, "VUID-VkDeviceCreateInfo-ppEnabledExtensionNames-parameter");
+
+        if (pCreateInfo->pEnabledFeatures != NULL)
+        {
+            skip |= validate_bool32("vkCreateDevice", "pCreateInfo->pEnabledFeatures->robustBufferAccess", pCreateInfo->pEnabledFeatures->robustBufferAccess);
+
+            skip |= validate_bool32("vkCreateDevice", "pCreateInfo->pEnabledFeatures->fullDrawIndexUint32", pCreateInfo->pEnabledFeatures->fullDrawIndexUint32);
+
+            skip |= validate_bool32("vkCreateDevice", "pCreateInfo->pEnabledFeatures->imageCubeArray", pCreateInfo->pEnabledFeatures->imageCubeArray);
+
+            skip |= validate_bool32("vkCreateDevice", "pCreateInfo->pEnabledFeatures->independentBlend", pCreateInfo->pEnabledFeatures->independentBlend);
+
+            skip |= validate_bool32("vkCreateDevice", "pCreateInfo->pEnabledFeatures->geometryShader", pCreateInfo->pEnabledFeatures->geometryShader);
+
+            skip |= validate_bool32("vkCreateDevice", "pCreateInfo->pEnabledFeatures->tessellationShader", pCreateInfo->pEnabledFeatures->tessellationShader);
+
+            skip |= validate_bool32("vkCreateDevice", "pCreateInfo->pEnabledFeatures->sampleRateShading", pCreateInfo->pEnabledFeatures->sampleRateShading);
+
+            skip |= validate_bool32("vkCreateDevice", "pCreateInfo->pEnabledFeatures->dualSrcBlend", pCreateInfo->pEnabledFeatures->dualSrcBlend);
+
+            skip |= validate_bool32("vkCreateDevice", "pCreateInfo->pEnabledFeatures->logicOp", pCreateInfo->pEnabledFeatures->logicOp);
+
+            skip |= validate_bool32("vkCreateDevice", "pCreateInfo->pEnabledFeatures->multiDrawIndirect", pCreateInfo->pEnabledFeatures->multiDrawIndirect);
+
+            skip |= validate_bool32("vkCreateDevice", "pCreateInfo->pEnabledFeatures->drawIndirectFirstInstance", pCreateInfo->pEnabledFeatures->drawIndirectFirstInstance);
+
+            skip |= validate_bool32("vkCreateDevice", "pCreateInfo->pEnabledFeatures->depthClamp", pCreateInfo->pEnabledFeatures->depthClamp);
+
+            skip |= validate_bool32("vkCreateDevice", "pCreateInfo->pEnabledFeatures->depthBiasClamp", pCreateInfo->pEnabledFeatures->depthBiasClamp);
+
+            skip |= validate_bool32("vkCreateDevice", "pCreateInfo->pEnabledFeatures->fillModeNonSolid", pCreateInfo->pEnabledFeatures->fillModeNonSolid);
+
+            skip |= validate_bool32("vkCreateDevice", "pCreateInfo->pEnabledFeatures->depthBounds", pCreateInfo->pEnabledFeatures->depthBounds);
+
+            skip |= validate_bool32("vkCreateDevice", "pCreateInfo->pEnabledFeatures->wideLines", pCreateInfo->pEnabledFeatures->wideLines);
+
+            skip |= validate_bool32("vkCreateDevice", "pCreateInfo->pEnabledFeatures->largePoints", pCreateInfo->pEnabledFeatures->largePoints);
+
+            skip |= validate_bool32("vkCreateDevice", "pCreateInfo->pEnabledFeatures->alphaToOne", pCreateInfo->pEnabledFeatures->alphaToOne);
+
+            skip |= validate_bool32("vkCreateDevice", "pCreateInfo->pEnabledFeatures->multiViewport", pCreateInfo->pEnabledFeatures->multiViewport);
+
+            skip |= validate_bool32("vkCreateDevice", "pCreateInfo->pEnabledFeatures->samplerAnisotropy", pCreateInfo->pEnabledFeatures->samplerAnisotropy);
+
+            skip |= validate_bool32("vkCreateDevice", "pCreateInfo->pEnabledFeatures->textureCompressionETC2", pCreateInfo->pEnabledFeatures->textureCompressionETC2);
+
+            skip |= validate_bool32("vkCreateDevice", "pCreateInfo->pEnabledFeatures->textureCompressionASTC_LDR", pCreateInfo->pEnabledFeatures->textureCompressionASTC_LDR);
+
+            skip |= validate_bool32("vkCreateDevice", "pCreateInfo->pEnabledFeatures->textureCompressionBC", pCreateInfo->pEnabledFeatures->textureCompressionBC);
+
+            skip |= validate_bool32("vkCreateDevice", "pCreateInfo->pEnabledFeatures->occlusionQueryPrecise", pCreateInfo->pEnabledFeatures->occlusionQueryPrecise);
+
+            skip |= validate_bool32("vkCreateDevice", "pCreateInfo->pEnabledFeatures->pipelineStatisticsQuery", pCreateInfo->pEnabledFeatures->pipelineStatisticsQuery);
+
+            skip |= validate_bool32("vkCreateDevice", "pCreateInfo->pEnabledFeatures->vertexPipelineStoresAndAtomics", pCreateInfo->pEnabledFeatures->vertexPipelineStoresAndAtomics);
+
+            skip |= validate_bool32("vkCreateDevice", "pCreateInfo->pEnabledFeatures->fragmentStoresAndAtomics", pCreateInfo->pEnabledFeatures->fragmentStoresAndAtomics);
+
+            skip |= validate_bool32("vkCreateDevice", "pCreateInfo->pEnabledFeatures->shaderTessellationAndGeometryPointSize", pCreateInfo->pEnabledFeatures->shaderTessellationAndGeometryPointSize);
+
+            skip |= validate_bool32("vkCreateDevice", "pCreateInfo->pEnabledFeatures->shaderImageGatherExtended", pCreateInfo->pEnabledFeatures->shaderImageGatherExtended);
+
+            skip |= validate_bool32("vkCreateDevice", "pCreateInfo->pEnabledFeatures->shaderStorageImageExtendedFormats", pCreateInfo->pEnabledFeatures->shaderStorageImageExtendedFormats);
+
+            skip |= validate_bool32("vkCreateDevice", "pCreateInfo->pEnabledFeatures->shaderStorageImageMultisample", pCreateInfo->pEnabledFeatures->shaderStorageImageMultisample);
+
+            skip |= validate_bool32("vkCreateDevice", "pCreateInfo->pEnabledFeatures->shaderStorageImageReadWithoutFormat", pCreateInfo->pEnabledFeatures->shaderStorageImageReadWithoutFormat);
+
+            skip |= validate_bool32("vkCreateDevice", "pCreateInfo->pEnabledFeatures->shaderStorageImageWriteWithoutFormat", pCreateInfo->pEnabledFeatures->shaderStorageImageWriteWithoutFormat);
+
+            skip |= validate_bool32("vkCreateDevice", "pCreateInfo->pEnabledFeatures->shaderUniformBufferArrayDynamicIndexing", pCreateInfo->pEnabledFeatures->shaderUniformBufferArrayDynamicIndexing);
+
+            skip |= validate_bool32("vkCreateDevice", "pCreateInfo->pEnabledFeatures->shaderSampledImageArrayDynamicIndexing", pCreateInfo->pEnabledFeatures->shaderSampledImageArrayDynamicIndexing);
+
+            skip |= validate_bool32("vkCreateDevice", "pCreateInfo->pEnabledFeatures->shaderStorageBufferArrayDynamicIndexing", pCreateInfo->pEnabledFeatures->shaderStorageBufferArrayDynamicIndexing);
+
+            skip |= validate_bool32("vkCreateDevice", "pCreateInfo->pEnabledFeatures->shaderStorageImageArrayDynamicIndexing", pCreateInfo->pEnabledFeatures->shaderStorageImageArrayDynamicIndexing);
+
+            skip |= validate_bool32("vkCreateDevice", "pCreateInfo->pEnabledFeatures->shaderClipDistance", pCreateInfo->pEnabledFeatures->shaderClipDistance);
+
+            skip |= validate_bool32("vkCreateDevice", "pCreateInfo->pEnabledFeatures->shaderCullDistance", pCreateInfo->pEnabledFeatures->shaderCullDistance);
+
+            skip |= validate_bool32("vkCreateDevice", "pCreateInfo->pEnabledFeatures->shaderFloat64", pCreateInfo->pEnabledFeatures->shaderFloat64);
+
+            skip |= validate_bool32("vkCreateDevice", "pCreateInfo->pEnabledFeatures->shaderInt64", pCreateInfo->pEnabledFeatures->shaderInt64);
+
+            skip |= validate_bool32("vkCreateDevice", "pCreateInfo->pEnabledFeatures->shaderInt16", pCreateInfo->pEnabledFeatures->shaderInt16);
+
+            skip |= validate_bool32("vkCreateDevice", "pCreateInfo->pEnabledFeatures->shaderResourceResidency", pCreateInfo->pEnabledFeatures->shaderResourceResidency);
+
+            skip |= validate_bool32("vkCreateDevice", "pCreateInfo->pEnabledFeatures->shaderResourceMinLod", pCreateInfo->pEnabledFeatures->shaderResourceMinLod);
+
+            skip |= validate_bool32("vkCreateDevice", "pCreateInfo->pEnabledFeatures->sparseBinding", pCreateInfo->pEnabledFeatures->sparseBinding);
+
+            skip |= validate_bool32("vkCreateDevice", "pCreateInfo->pEnabledFeatures->sparseResidencyBuffer", pCreateInfo->pEnabledFeatures->sparseResidencyBuffer);
+
+            skip |= validate_bool32("vkCreateDevice", "pCreateInfo->pEnabledFeatures->sparseResidencyImage2D", pCreateInfo->pEnabledFeatures->sparseResidencyImage2D);
+
+            skip |= validate_bool32("vkCreateDevice", "pCreateInfo->pEnabledFeatures->sparseResidencyImage3D", pCreateInfo->pEnabledFeatures->sparseResidencyImage3D);
+
+            skip |= validate_bool32("vkCreateDevice", "pCreateInfo->pEnabledFeatures->sparseResidency2Samples", pCreateInfo->pEnabledFeatures->sparseResidency2Samples);
+
+            skip |= validate_bool32("vkCreateDevice", "pCreateInfo->pEnabledFeatures->sparseResidency4Samples", pCreateInfo->pEnabledFeatures->sparseResidency4Samples);
+
+            skip |= validate_bool32("vkCreateDevice", "pCreateInfo->pEnabledFeatures->sparseResidency8Samples", pCreateInfo->pEnabledFeatures->sparseResidency8Samples);
+
+            skip |= validate_bool32("vkCreateDevice", "pCreateInfo->pEnabledFeatures->sparseResidency16Samples", pCreateInfo->pEnabledFeatures->sparseResidency16Samples);
+
+            skip |= validate_bool32("vkCreateDevice", "pCreateInfo->pEnabledFeatures->sparseResidencyAliased", pCreateInfo->pEnabledFeatures->sparseResidencyAliased);
+
+            skip |= validate_bool32("vkCreateDevice", "pCreateInfo->pEnabledFeatures->variableMultisampleRate", pCreateInfo->pEnabledFeatures->variableMultisampleRate);
+
+            skip |= validate_bool32("vkCreateDevice", "pCreateInfo->pEnabledFeatures->inheritedQueries", pCreateInfo->pEnabledFeatures->inheritedQueries);
+        }
+    }
+    if (pAllocator != NULL)
+    {
+        skip |= validate_required_pointer("vkCreateDevice", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
+
+        skip |= validate_required_pointer("vkCreateDevice", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
+
+        skip |= validate_required_pointer("vkCreateDevice", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
+
+        if (pAllocator->pfnInternalAllocation != NULL)
+        {
+            skip |= validate_required_pointer("vkCreateDevice", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+
+        if (pAllocator->pfnInternalFree != NULL)
+        {
+            skip |= validate_required_pointer("vkCreateDevice", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+    }
+    skip |= validate_required_pointer("vkCreateDevice", "pDevice", pDevice, "VUID-vkCreateDevice-pDevice-parameter");
+    if (!skip) skip |= manual_PreCallValidateCreateDevice(physicalDevice, pCreateInfo, pAllocator, pDevice);
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateDestroyDevice(
+    VkDevice                                    device,
+    const VkAllocationCallbacks*                pAllocator) const {
+    bool skip = false;
+    if (pAllocator != NULL)
+    {
+        skip |= validate_required_pointer("vkDestroyDevice", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
+
+        skip |= validate_required_pointer("vkDestroyDevice", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
+
+        skip |= validate_required_pointer("vkDestroyDevice", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
+
+        if (pAllocator->pfnInternalAllocation != NULL)
+        {
+            skip |= validate_required_pointer("vkDestroyDevice", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+
+        if (pAllocator->pfnInternalFree != NULL)
+        {
+            skip |= validate_required_pointer("vkDestroyDevice", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateGetDeviceQueue(
+    VkDevice                                    device,
+    uint32_t                                    queueFamilyIndex,
+    uint32_t                                    queueIndex,
+    VkQueue*                                    pQueue) const {
+    bool skip = false;
+    skip |= validate_required_pointer("vkGetDeviceQueue", "pQueue", pQueue, "VUID-vkGetDeviceQueue-pQueue-parameter");
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateQueueSubmit(
+    VkQueue                                     queue,
+    uint32_t                                    submitCount,
+    const VkSubmitInfo*                         pSubmits,
+    VkFence                                     fence) const {
+    bool skip = false;
+    skip |= validate_struct_type_array("vkQueueSubmit", "submitCount", "pSubmits", "VK_STRUCTURE_TYPE_SUBMIT_INFO", submitCount, pSubmits, VK_STRUCTURE_TYPE_SUBMIT_INFO, false, true, "VUID-VkSubmitInfo-sType-sType", "VUID-vkQueueSubmit-pSubmits-parameter", kVUIDUndefined);
+    if (pSubmits != NULL)
+    {
+        for (uint32_t submitIndex = 0; submitIndex < submitCount; ++submitIndex)
+        {
+            const VkStructureType allowed_structs_VkSubmitInfo[] = { VK_STRUCTURE_TYPE_D3D12_FENCE_SUBMIT_INFO_KHR, VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO, VK_STRUCTURE_TYPE_PERFORMANCE_QUERY_SUBMIT_INFO_KHR, VK_STRUCTURE_TYPE_PROTECTED_SUBMIT_INFO, VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO_KHR, VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_KHR, VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_NV };
+
+            skip |= validate_struct_pnext("vkQueueSubmit", ParameterName("pSubmits[%i].pNext", ParameterName::IndexVector{ submitIndex }), "VkD3D12FenceSubmitInfoKHR, VkDeviceGroupSubmitInfo, VkPerformanceQuerySubmitInfoKHR, VkProtectedSubmitInfo, VkTimelineSemaphoreSubmitInfoKHR, VkWin32KeyedMutexAcquireReleaseInfoKHR, VkWin32KeyedMutexAcquireReleaseInfoNV", pSubmits[submitIndex].pNext, ARRAY_SIZE(allowed_structs_VkSubmitInfo), allowed_structs_VkSubmitInfo, GeneratedVulkanHeaderVersion, "VUID-VkSubmitInfo-pNext-pNext");
+
+            skip |= validate_array("vkQueueSubmit", ParameterName("pSubmits[%i].waitSemaphoreCount", ParameterName::IndexVector{ submitIndex }), ParameterName("pSubmits[%i].pWaitSemaphores", ParameterName::IndexVector{ submitIndex }), pSubmits[submitIndex].waitSemaphoreCount, &pSubmits[submitIndex].pWaitSemaphores, false, true, kVUIDUndefined, "VUID-VkSubmitInfo-pWaitSemaphores-parameter");
+
+            skip |= validate_flags_array("vkQueueSubmit", ParameterName("pSubmits[%i].waitSemaphoreCount", ParameterName::IndexVector{ submitIndex }), ParameterName("pSubmits[%i].pWaitDstStageMask", ParameterName::IndexVector{ submitIndex }), "VkPipelineStageFlagBits", AllVkPipelineStageFlagBits, pSubmits[submitIndex].waitSemaphoreCount, pSubmits[submitIndex].pWaitDstStageMask, false, true);
+
+            skip |= validate_array("vkQueueSubmit", ParameterName("pSubmits[%i].commandBufferCount", ParameterName::IndexVector{ submitIndex }), ParameterName("pSubmits[%i].pCommandBuffers", ParameterName::IndexVector{ submitIndex }), pSubmits[submitIndex].commandBufferCount, &pSubmits[submitIndex].pCommandBuffers, false, true, kVUIDUndefined, "VUID-VkSubmitInfo-pCommandBuffers-parameter");
+
+            skip |= validate_array("vkQueueSubmit", ParameterName("pSubmits[%i].signalSemaphoreCount", ParameterName::IndexVector{ submitIndex }), ParameterName("pSubmits[%i].pSignalSemaphores", ParameterName::IndexVector{ submitIndex }), pSubmits[submitIndex].signalSemaphoreCount, &pSubmits[submitIndex].pSignalSemaphores, false, true, kVUIDUndefined, "VUID-VkSubmitInfo-pSignalSemaphores-parameter");
+        }
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateQueueWaitIdle(
+    VkQueue                                     queue) const {
+    bool skip = false;
+    // No xml-driven validation
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateDeviceWaitIdle(
+    VkDevice                                    device) const {
+    bool skip = false;
+    // No xml-driven validation
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateAllocateMemory(
+    VkDevice                                    device,
+    const VkMemoryAllocateInfo*                 pAllocateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDeviceMemory*                             pMemory) const {
+    bool skip = false;
+    skip |= validate_struct_type("vkAllocateMemory", "pAllocateInfo", "VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO", pAllocateInfo, VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO, true, "VUID-vkAllocateMemory-pAllocateInfo-parameter", "VUID-VkMemoryAllocateInfo-sType-sType");
+    if (pAllocateInfo != NULL)
+    {
+        const VkStructureType allowed_structs_VkMemoryAllocateInfo[] = { VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_MEMORY_ALLOCATE_INFO_NV, VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO, VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_NV, VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_KHR, VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_NV, VK_STRUCTURE_TYPE_IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID, VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR, VK_STRUCTURE_TYPE_IMPORT_MEMORY_HOST_POINTER_INFO_EXT, VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_KHR, VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_NV, VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO, VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO, VK_STRUCTURE_TYPE_MEMORY_OPAQUE_CAPTURE_ADDRESS_ALLOCATE_INFO_KHR, VK_STRUCTURE_TYPE_MEMORY_PRIORITY_ALLOCATE_INFO_EXT };
+
+        skip |= validate_struct_pnext("vkAllocateMemory", "pAllocateInfo->pNext", "VkDedicatedAllocationMemoryAllocateInfoNV, VkExportMemoryAllocateInfo, VkExportMemoryAllocateInfoNV, VkExportMemoryWin32HandleInfoKHR, VkExportMemoryWin32HandleInfoNV, VkImportAndroidHardwareBufferInfoANDROID, VkImportMemoryFdInfoKHR, VkImportMemoryHostPointerInfoEXT, VkImportMemoryWin32HandleInfoKHR, VkImportMemoryWin32HandleInfoNV, VkMemoryAllocateFlagsInfo, VkMemoryDedicatedAllocateInfo, VkMemoryOpaqueCaptureAddressAllocateInfoKHR, VkMemoryPriorityAllocateInfoEXT", pAllocateInfo->pNext, ARRAY_SIZE(allowed_structs_VkMemoryAllocateInfo), allowed_structs_VkMemoryAllocateInfo, GeneratedVulkanHeaderVersion, "VUID-VkMemoryAllocateInfo-pNext-pNext");
+    }
+    if (pAllocator != NULL)
+    {
+        skip |= validate_required_pointer("vkAllocateMemory", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
+
+        skip |= validate_required_pointer("vkAllocateMemory", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
+
+        skip |= validate_required_pointer("vkAllocateMemory", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
+
+        if (pAllocator->pfnInternalAllocation != NULL)
+        {
+            skip |= validate_required_pointer("vkAllocateMemory", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+
+        if (pAllocator->pfnInternalFree != NULL)
+        {
+            skip |= validate_required_pointer("vkAllocateMemory", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+    }
+    skip |= validate_required_pointer("vkAllocateMemory", "pMemory", pMemory, "VUID-vkAllocateMemory-pMemory-parameter");
+    if (!skip) skip |= manual_PreCallValidateAllocateMemory(device, pAllocateInfo, pAllocator, pMemory);
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateFreeMemory(
+    VkDevice                                    device,
+    VkDeviceMemory                              memory,
+    const VkAllocationCallbacks*                pAllocator) const {
+    bool skip = false;
+    if (pAllocator != NULL)
+    {
+        skip |= validate_required_pointer("vkFreeMemory", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
+
+        skip |= validate_required_pointer("vkFreeMemory", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
+
+        skip |= validate_required_pointer("vkFreeMemory", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
+
+        if (pAllocator->pfnInternalAllocation != NULL)
+        {
+            skip |= validate_required_pointer("vkFreeMemory", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+
+        if (pAllocator->pfnInternalFree != NULL)
+        {
+            skip |= validate_required_pointer("vkFreeMemory", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateMapMemory(
+    VkDevice                                    device,
+    VkDeviceMemory                              memory,
+    VkDeviceSize                                offset,
+    VkDeviceSize                                size,
+    VkMemoryMapFlags                            flags,
+    void**                                      ppData) const {
+    bool skip = false;
+    skip |= validate_required_handle("vkMapMemory", "memory", memory);
+    skip |= validate_reserved_flags("vkMapMemory", "flags", flags, "VUID-vkMapMemory-flags-zerobitmask");
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateUnmapMemory(
+    VkDevice                                    device,
+    VkDeviceMemory                              memory) const {
+    bool skip = false;
+    skip |= validate_required_handle("vkUnmapMemory", "memory", memory);
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateFlushMappedMemoryRanges(
+    VkDevice                                    device,
+    uint32_t                                    memoryRangeCount,
+    const VkMappedMemoryRange*                  pMemoryRanges) const {
+    bool skip = false;
+    skip |= validate_struct_type_array("vkFlushMappedMemoryRanges", "memoryRangeCount", "pMemoryRanges", "VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE", memoryRangeCount, pMemoryRanges, VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE, true, true, "VUID-VkMappedMemoryRange-sType-sType", "VUID-vkFlushMappedMemoryRanges-pMemoryRanges-parameter", "VUID-vkFlushMappedMemoryRanges-memoryRangeCount-arraylength");
+    if (pMemoryRanges != NULL)
+    {
+        for (uint32_t memoryRangeIndex = 0; memoryRangeIndex < memoryRangeCount; ++memoryRangeIndex)
+        {
+            skip |= validate_struct_pnext("vkFlushMappedMemoryRanges", ParameterName("pMemoryRanges[%i].pNext", ParameterName::IndexVector{ memoryRangeIndex }), NULL, pMemoryRanges[memoryRangeIndex].pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkMappedMemoryRange-pNext-pNext");
+
+            skip |= validate_required_handle("vkFlushMappedMemoryRanges", ParameterName("pMemoryRanges[%i].memory", ParameterName::IndexVector{ memoryRangeIndex }), pMemoryRanges[memoryRangeIndex].memory);
+        }
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateInvalidateMappedMemoryRanges(
+    VkDevice                                    device,
+    uint32_t                                    memoryRangeCount,
+    const VkMappedMemoryRange*                  pMemoryRanges) const {
+    bool skip = false;
+    skip |= validate_struct_type_array("vkInvalidateMappedMemoryRanges", "memoryRangeCount", "pMemoryRanges", "VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE", memoryRangeCount, pMemoryRanges, VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE, true, true, "VUID-VkMappedMemoryRange-sType-sType", "VUID-vkInvalidateMappedMemoryRanges-pMemoryRanges-parameter", "VUID-vkInvalidateMappedMemoryRanges-memoryRangeCount-arraylength");
+    if (pMemoryRanges != NULL)
+    {
+        for (uint32_t memoryRangeIndex = 0; memoryRangeIndex < memoryRangeCount; ++memoryRangeIndex)
+        {
+            skip |= validate_struct_pnext("vkInvalidateMappedMemoryRanges", ParameterName("pMemoryRanges[%i].pNext", ParameterName::IndexVector{ memoryRangeIndex }), NULL, pMemoryRanges[memoryRangeIndex].pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkMappedMemoryRange-pNext-pNext");
+
+            skip |= validate_required_handle("vkInvalidateMappedMemoryRanges", ParameterName("pMemoryRanges[%i].memory", ParameterName::IndexVector{ memoryRangeIndex }), pMemoryRanges[memoryRangeIndex].memory);
+        }
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateGetDeviceMemoryCommitment(
+    VkDevice                                    device,
+    VkDeviceMemory                              memory,
+    VkDeviceSize*                               pCommittedMemoryInBytes) const {
+    bool skip = false;
+    skip |= validate_required_handle("vkGetDeviceMemoryCommitment", "memory", memory);
+    skip |= validate_required_pointer("vkGetDeviceMemoryCommitment", "pCommittedMemoryInBytes", pCommittedMemoryInBytes, "VUID-vkGetDeviceMemoryCommitment-pCommittedMemoryInBytes-parameter");
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateBindBufferMemory(
+    VkDevice                                    device,
+    VkBuffer                                    buffer,
+    VkDeviceMemory                              memory,
+    VkDeviceSize                                memoryOffset) const {
+    bool skip = false;
+    skip |= validate_required_handle("vkBindBufferMemory", "buffer", buffer);
+    skip |= validate_required_handle("vkBindBufferMemory", "memory", memory);
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateBindImageMemory(
+    VkDevice                                    device,
+    VkImage                                     image,
+    VkDeviceMemory                              memory,
+    VkDeviceSize                                memoryOffset) const {
+    bool skip = false;
+    skip |= validate_required_handle("vkBindImageMemory", "image", image);
+    skip |= validate_required_handle("vkBindImageMemory", "memory", memory);
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateGetBufferMemoryRequirements(
+    VkDevice                                    device,
+    VkBuffer                                    buffer,
+    VkMemoryRequirements*                       pMemoryRequirements) const {
+    bool skip = false;
+    skip |= validate_required_handle("vkGetBufferMemoryRequirements", "buffer", buffer);
+    skip |= validate_required_pointer("vkGetBufferMemoryRequirements", "pMemoryRequirements", pMemoryRequirements, "VUID-vkGetBufferMemoryRequirements-pMemoryRequirements-parameter");
+    if (pMemoryRequirements != NULL)
+    {
+        // No xml-driven validation
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateGetImageMemoryRequirements(
+    VkDevice                                    device,
+    VkImage                                     image,
+    VkMemoryRequirements*                       pMemoryRequirements) const {
+    bool skip = false;
+    skip |= validate_required_handle("vkGetImageMemoryRequirements", "image", image);
+    skip |= validate_required_pointer("vkGetImageMemoryRequirements", "pMemoryRequirements", pMemoryRequirements, "VUID-vkGetImageMemoryRequirements-pMemoryRequirements-parameter");
+    if (pMemoryRequirements != NULL)
+    {
+        // No xml-driven validation
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateGetImageSparseMemoryRequirements(
+    VkDevice                                    device,
+    VkImage                                     image,
+    uint32_t*                                   pSparseMemoryRequirementCount,
+    VkSparseImageMemoryRequirements*            pSparseMemoryRequirements) const {
+    bool skip = false;
+    skip |= validate_required_handle("vkGetImageSparseMemoryRequirements", "image", image);
+    skip |= validate_array("vkGetImageSparseMemoryRequirements", "pSparseMemoryRequirementCount", "pSparseMemoryRequirements", pSparseMemoryRequirementCount, &pSparseMemoryRequirements, true, false, false, kVUIDUndefined, "VUID-vkGetImageSparseMemoryRequirements-pSparseMemoryRequirements-parameter");
+    if (pSparseMemoryRequirements != NULL)
+    {
+        for (uint32_t pSparseMemoryRequirementIndex = 0; pSparseMemoryRequirementIndex < *pSparseMemoryRequirementCount; ++pSparseMemoryRequirementIndex)
+        {
+            // No xml-driven validation
+        }
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateGetPhysicalDeviceSparseImageFormatProperties(
+    VkPhysicalDevice                            physicalDevice,
+    VkFormat                                    format,
+    VkImageType                                 type,
+    VkSampleCountFlagBits                       samples,
+    VkImageUsageFlags                           usage,
+    VkImageTiling                               tiling,
+    uint32_t*                                   pPropertyCount,
+    VkSparseImageFormatProperties*              pProperties) const {
+    bool skip = false;
+    skip |= validate_ranged_enum("vkGetPhysicalDeviceSparseImageFormatProperties", "format", "VkFormat", AllVkFormatEnums, format, "VUID-vkGetPhysicalDeviceSparseImageFormatProperties-format-parameter");
+    skip |= validate_ranged_enum("vkGetPhysicalDeviceSparseImageFormatProperties", "type", "VkImageType", AllVkImageTypeEnums, type, "VUID-vkGetPhysicalDeviceSparseImageFormatProperties-type-parameter");
+    skip |= validate_flags("vkGetPhysicalDeviceSparseImageFormatProperties", "samples", "VkSampleCountFlagBits", AllVkSampleCountFlagBits, samples, kRequiredSingleBit, "VUID-vkGetPhysicalDeviceSparseImageFormatProperties-samples-parameter", "VUID-vkGetPhysicalDeviceSparseImageFormatProperties-samples-parameter");
+    skip |= validate_flags("vkGetPhysicalDeviceSparseImageFormatProperties", "usage", "VkImageUsageFlagBits", AllVkImageUsageFlagBits, usage, kRequiredFlags, "VUID-vkGetPhysicalDeviceSparseImageFormatProperties-usage-parameter", "VUID-vkGetPhysicalDeviceSparseImageFormatProperties-usage-requiredbitmask");
+    skip |= validate_ranged_enum("vkGetPhysicalDeviceSparseImageFormatProperties", "tiling", "VkImageTiling", AllVkImageTilingEnums, tiling, "VUID-vkGetPhysicalDeviceSparseImageFormatProperties-tiling-parameter");
+    skip |= validate_array("vkGetPhysicalDeviceSparseImageFormatProperties", "pPropertyCount", "pProperties", pPropertyCount, &pProperties, true, false, false, kVUIDUndefined, "VUID-vkGetPhysicalDeviceSparseImageFormatProperties-pProperties-parameter");
+    if (pProperties != NULL)
+    {
+        for (uint32_t pPropertyIndex = 0; pPropertyIndex < *pPropertyCount; ++pPropertyIndex)
+        {
+            // No xml-driven validation
+        }
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateQueueBindSparse(
+    VkQueue                                     queue,
+    uint32_t                                    bindInfoCount,
+    const VkBindSparseInfo*                     pBindInfo,
+    VkFence                                     fence) const {
+    bool skip = false;
+    skip |= validate_struct_type_array("vkQueueBindSparse", "bindInfoCount", "pBindInfo", "VK_STRUCTURE_TYPE_BIND_SPARSE_INFO", bindInfoCount, pBindInfo, VK_STRUCTURE_TYPE_BIND_SPARSE_INFO, false, true, "VUID-VkBindSparseInfo-sType-sType", "VUID-vkQueueBindSparse-pBindInfo-parameter", kVUIDUndefined);
+    if (pBindInfo != NULL)
+    {
+        for (uint32_t bindInfoIndex = 0; bindInfoIndex < bindInfoCount; ++bindInfoIndex)
+        {
+            const VkStructureType allowed_structs_VkBindSparseInfo[] = { VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO, VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO_KHR };
+
+            skip |= validate_struct_pnext("vkQueueBindSparse", ParameterName("pBindInfo[%i].pNext", ParameterName::IndexVector{ bindInfoIndex }), "VkDeviceGroupBindSparseInfo, VkTimelineSemaphoreSubmitInfoKHR", pBindInfo[bindInfoIndex].pNext, ARRAY_SIZE(allowed_structs_VkBindSparseInfo), allowed_structs_VkBindSparseInfo, GeneratedVulkanHeaderVersion, "VUID-VkBindSparseInfo-pNext-pNext");
+
+            skip |= validate_array("vkQueueBindSparse", ParameterName("pBindInfo[%i].waitSemaphoreCount", ParameterName::IndexVector{ bindInfoIndex }), ParameterName("pBindInfo[%i].pWaitSemaphores", ParameterName::IndexVector{ bindInfoIndex }), pBindInfo[bindInfoIndex].waitSemaphoreCount, &pBindInfo[bindInfoIndex].pWaitSemaphores, false, true, kVUIDUndefined, "VUID-VkBindSparseInfo-pWaitSemaphores-parameter");
+
+            skip |= validate_array("vkQueueBindSparse", ParameterName("pBindInfo[%i].bufferBindCount", ParameterName::IndexVector{ bindInfoIndex }), ParameterName("pBindInfo[%i].pBufferBinds", ParameterName::IndexVector{ bindInfoIndex }), pBindInfo[bindInfoIndex].bufferBindCount, &pBindInfo[bindInfoIndex].pBufferBinds, false, true, kVUIDUndefined, "VUID-VkBindSparseInfo-pBufferBinds-parameter");
+
+            if (pBindInfo[bindInfoIndex].pBufferBinds != NULL)
+            {
+                for (uint32_t bufferBindIndex = 0; bufferBindIndex < pBindInfo[bindInfoIndex].bufferBindCount; ++bufferBindIndex)
+                {
+                    skip |= validate_required_handle("vkQueueBindSparse", ParameterName("pBindInfo[%i].pBufferBinds[%i].buffer", ParameterName::IndexVector{ bindInfoIndex, bufferBindIndex }), pBindInfo[bindInfoIndex].pBufferBinds[bufferBindIndex].buffer);
+
+                    skip |= validate_array("vkQueueBindSparse", ParameterName("pBindInfo[%i].pBufferBinds[%i].bindCount", ParameterName::IndexVector{ bindInfoIndex, bufferBindIndex }), ParameterName("pBindInfo[%i].pBufferBinds[%i].pBinds", ParameterName::IndexVector{ bindInfoIndex, bufferBindIndex }), pBindInfo[bindInfoIndex].pBufferBinds[bufferBindIndex].bindCount, &pBindInfo[bindInfoIndex].pBufferBinds[bufferBindIndex].pBinds, true, true, "VUID-VkSparseBufferMemoryBindInfo-bindCount-arraylength", "VUID-VkSparseBufferMemoryBindInfo-pBinds-parameter");
+
+                    if (pBindInfo[bindInfoIndex].pBufferBinds[bufferBindIndex].pBinds != NULL)
+                    {
+                        for (uint32_t bindIndex = 0; bindIndex < pBindInfo[bindInfoIndex].pBufferBinds[bufferBindIndex].bindCount; ++bindIndex)
+                        {
+                            skip |= validate_flags("vkQueueBindSparse", ParameterName("pBindInfo[%i].pBufferBinds[%i].pBinds[%i].flags", ParameterName::IndexVector{ bindInfoIndex, bufferBindIndex, bindIndex }), "VkSparseMemoryBindFlagBits", AllVkSparseMemoryBindFlagBits, pBindInfo[bindInfoIndex].pBufferBinds[bufferBindIndex].pBinds[bindIndex].flags, kOptionalFlags, "VUID-VkSparseMemoryBind-flags-parameter");
+                        }
+                    }
+                }
+            }
+
+            skip |= validate_array("vkQueueBindSparse", ParameterName("pBindInfo[%i].imageOpaqueBindCount", ParameterName::IndexVector{ bindInfoIndex }), ParameterName("pBindInfo[%i].pImageOpaqueBinds", ParameterName::IndexVector{ bindInfoIndex }), pBindInfo[bindInfoIndex].imageOpaqueBindCount, &pBindInfo[bindInfoIndex].pImageOpaqueBinds, false, true, kVUIDUndefined, "VUID-VkBindSparseInfo-pImageOpaqueBinds-parameter");
+
+            if (pBindInfo[bindInfoIndex].pImageOpaqueBinds != NULL)
+            {
+                for (uint32_t imageOpaqueBindIndex = 0; imageOpaqueBindIndex < pBindInfo[bindInfoIndex].imageOpaqueBindCount; ++imageOpaqueBindIndex)
+                {
+                    skip |= validate_required_handle("vkQueueBindSparse", ParameterName("pBindInfo[%i].pImageOpaqueBinds[%i].image", ParameterName::IndexVector{ bindInfoIndex, imageOpaqueBindIndex }), pBindInfo[bindInfoIndex].pImageOpaqueBinds[imageOpaqueBindIndex].image);
+
+                    skip |= validate_array("vkQueueBindSparse", ParameterName("pBindInfo[%i].pImageOpaqueBinds[%i].bindCount", ParameterName::IndexVector{ bindInfoIndex, imageOpaqueBindIndex }), ParameterName("pBindInfo[%i].pImageOpaqueBinds[%i].pBinds", ParameterName::IndexVector{ bindInfoIndex, imageOpaqueBindIndex }), pBindInfo[bindInfoIndex].pImageOpaqueBinds[imageOpaqueBindIndex].bindCount, &pBindInfo[bindInfoIndex].pImageOpaqueBinds[imageOpaqueBindIndex].pBinds, true, true, "VUID-VkSparseImageOpaqueMemoryBindInfo-bindCount-arraylength", "VUID-VkSparseImageOpaqueMemoryBindInfo-pBinds-parameter");
+
+                    if (pBindInfo[bindInfoIndex].pImageOpaqueBinds[imageOpaqueBindIndex].pBinds != NULL)
+                    {
+                        for (uint32_t bindIndex = 0; bindIndex < pBindInfo[bindInfoIndex].pImageOpaqueBinds[imageOpaqueBindIndex].bindCount; ++bindIndex)
+                        {
+                            skip |= validate_flags("vkQueueBindSparse", ParameterName("pBindInfo[%i].pImageOpaqueBinds[%i].pBinds[%i].flags", ParameterName::IndexVector{ bindInfoIndex, imageOpaqueBindIndex, bindIndex }), "VkSparseMemoryBindFlagBits", AllVkSparseMemoryBindFlagBits, pBindInfo[bindInfoIndex].pImageOpaqueBinds[imageOpaqueBindIndex].pBinds[bindIndex].flags, kOptionalFlags, "VUID-VkSparseMemoryBind-flags-parameter");
+                        }
+                    }
+                }
+            }
+
+            skip |= validate_array("vkQueueBindSparse", ParameterName("pBindInfo[%i].imageBindCount", ParameterName::IndexVector{ bindInfoIndex }), ParameterName("pBindInfo[%i].pImageBinds", ParameterName::IndexVector{ bindInfoIndex }), pBindInfo[bindInfoIndex].imageBindCount, &pBindInfo[bindInfoIndex].pImageBinds, false, true, kVUIDUndefined, "VUID-VkBindSparseInfo-pImageBinds-parameter");
+
+            if (pBindInfo[bindInfoIndex].pImageBinds != NULL)
+            {
+                for (uint32_t imageBindIndex = 0; imageBindIndex < pBindInfo[bindInfoIndex].imageBindCount; ++imageBindIndex)
+                {
+                    skip |= validate_required_handle("vkQueueBindSparse", ParameterName("pBindInfo[%i].pImageBinds[%i].image", ParameterName::IndexVector{ bindInfoIndex, imageBindIndex }), pBindInfo[bindInfoIndex].pImageBinds[imageBindIndex].image);
+
+                    skip |= validate_array("vkQueueBindSparse", ParameterName("pBindInfo[%i].pImageBinds[%i].bindCount", ParameterName::IndexVector{ bindInfoIndex, imageBindIndex }), ParameterName("pBindInfo[%i].pImageBinds[%i].pBinds", ParameterName::IndexVector{ bindInfoIndex, imageBindIndex }), pBindInfo[bindInfoIndex].pImageBinds[imageBindIndex].bindCount, &pBindInfo[bindInfoIndex].pImageBinds[imageBindIndex].pBinds, true, true, "VUID-VkSparseImageMemoryBindInfo-bindCount-arraylength", "VUID-VkSparseImageMemoryBindInfo-pBinds-parameter");
+
+                    if (pBindInfo[bindInfoIndex].pImageBinds[imageBindIndex].pBinds != NULL)
+                    {
+                        for (uint32_t bindIndex = 0; bindIndex < pBindInfo[bindInfoIndex].pImageBinds[imageBindIndex].bindCount; ++bindIndex)
+                        {
+                            skip |= validate_flags("vkQueueBindSparse", ParameterName("pBindInfo[%i].pImageBinds[%i].pBinds[%i].subresource.aspectMask", ParameterName::IndexVector{ bindInfoIndex, imageBindIndex, bindIndex }), "VkImageAspectFlagBits", AllVkImageAspectFlagBits, pBindInfo[bindInfoIndex].pImageBinds[imageBindIndex].pBinds[bindIndex].subresource.aspectMask, kRequiredFlags, "VUID-VkImageSubresource-aspectMask-parameter", "VUID-VkImageSubresource-aspectMask-requiredbitmask");
+
+                            // No xml-driven validation
+
+                            // No xml-driven validation
+
+                            skip |= validate_flags("vkQueueBindSparse", ParameterName("pBindInfo[%i].pImageBinds[%i].pBinds[%i].flags", ParameterName::IndexVector{ bindInfoIndex, imageBindIndex, bindIndex }), "VkSparseMemoryBindFlagBits", AllVkSparseMemoryBindFlagBits, pBindInfo[bindInfoIndex].pImageBinds[imageBindIndex].pBinds[bindIndex].flags, kOptionalFlags, "VUID-VkSparseImageMemoryBind-flags-parameter");
+                        }
+                    }
+                }
+            }
+
+            skip |= validate_array("vkQueueBindSparse", ParameterName("pBindInfo[%i].signalSemaphoreCount", ParameterName::IndexVector{ bindInfoIndex }), ParameterName("pBindInfo[%i].pSignalSemaphores", ParameterName::IndexVector{ bindInfoIndex }), pBindInfo[bindInfoIndex].signalSemaphoreCount, &pBindInfo[bindInfoIndex].pSignalSemaphores, false, true, kVUIDUndefined, "VUID-VkBindSparseInfo-pSignalSemaphores-parameter");
+        }
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateCreateFence(
+    VkDevice                                    device,
+    const VkFenceCreateInfo*                    pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkFence*                                    pFence) const {
+    bool skip = false;
+    skip |= validate_struct_type("vkCreateFence", "pCreateInfo", "VK_STRUCTURE_TYPE_FENCE_CREATE_INFO", pCreateInfo, VK_STRUCTURE_TYPE_FENCE_CREATE_INFO, true, "VUID-vkCreateFence-pCreateInfo-parameter", "VUID-VkFenceCreateInfo-sType-sType");
+    if (pCreateInfo != NULL)
+    {
+        const VkStructureType allowed_structs_VkFenceCreateInfo[] = { VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO, VK_STRUCTURE_TYPE_EXPORT_FENCE_WIN32_HANDLE_INFO_KHR };
+
+        skip |= validate_struct_pnext("vkCreateFence", "pCreateInfo->pNext", "VkExportFenceCreateInfo, VkExportFenceWin32HandleInfoKHR", pCreateInfo->pNext, ARRAY_SIZE(allowed_structs_VkFenceCreateInfo), allowed_structs_VkFenceCreateInfo, GeneratedVulkanHeaderVersion, "VUID-VkFenceCreateInfo-pNext-pNext");
+
+        skip |= validate_flags("vkCreateFence", "pCreateInfo->flags", "VkFenceCreateFlagBits", AllVkFenceCreateFlagBits, pCreateInfo->flags, kOptionalFlags, "VUID-VkFenceCreateInfo-flags-parameter");
+    }
+    if (pAllocator != NULL)
+    {
+        skip |= validate_required_pointer("vkCreateFence", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
+
+        skip |= validate_required_pointer("vkCreateFence", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
+
+        skip |= validate_required_pointer("vkCreateFence", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
+
+        if (pAllocator->pfnInternalAllocation != NULL)
+        {
+            skip |= validate_required_pointer("vkCreateFence", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+
+        if (pAllocator->pfnInternalFree != NULL)
+        {
+            skip |= validate_required_pointer("vkCreateFence", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+    }
+    skip |= validate_required_pointer("vkCreateFence", "pFence", pFence, "VUID-vkCreateFence-pFence-parameter");
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateDestroyFence(
+    VkDevice                                    device,
+    VkFence                                     fence,
+    const VkAllocationCallbacks*                pAllocator) const {
+    bool skip = false;
+    if (pAllocator != NULL)
+    {
+        skip |= validate_required_pointer("vkDestroyFence", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
+
+        skip |= validate_required_pointer("vkDestroyFence", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
+
+        skip |= validate_required_pointer("vkDestroyFence", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
+
+        if (pAllocator->pfnInternalAllocation != NULL)
+        {
+            skip |= validate_required_pointer("vkDestroyFence", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+
+        if (pAllocator->pfnInternalFree != NULL)
+        {
+            skip |= validate_required_pointer("vkDestroyFence", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateResetFences(
+    VkDevice                                    device,
+    uint32_t                                    fenceCount,
+    const VkFence*                              pFences) const {
+    bool skip = false;
+    skip |= validate_handle_array("vkResetFences", "fenceCount", "pFences", fenceCount, pFences, true, true);
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateGetFenceStatus(
+    VkDevice                                    device,
+    VkFence                                     fence) const {
+    bool skip = false;
+    skip |= validate_required_handle("vkGetFenceStatus", "fence", fence);
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateWaitForFences(
+    VkDevice                                    device,
+    uint32_t                                    fenceCount,
+    const VkFence*                              pFences,
+    VkBool32                                    waitAll,
+    uint64_t                                    timeout) const {
+    bool skip = false;
+    skip |= validate_handle_array("vkWaitForFences", "fenceCount", "pFences", fenceCount, pFences, true, true);
+    skip |= validate_bool32("vkWaitForFences", "waitAll", waitAll);
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateCreateSemaphore(
+    VkDevice                                    device,
+    const VkSemaphoreCreateInfo*                pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSemaphore*                                pSemaphore) const {
+    bool skip = false;
+    skip |= validate_struct_type("vkCreateSemaphore", "pCreateInfo", "VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO", pCreateInfo, VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO, true, "VUID-vkCreateSemaphore-pCreateInfo-parameter", "VUID-VkSemaphoreCreateInfo-sType-sType");
+    if (pCreateInfo != NULL)
+    {
+        const VkStructureType allowed_structs_VkSemaphoreCreateInfo[] = { VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO, VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR, VK_STRUCTURE_TYPE_SEMAPHORE_TYPE_CREATE_INFO_KHR };
+
+        skip |= validate_struct_pnext("vkCreateSemaphore", "pCreateInfo->pNext", "VkExportSemaphoreCreateInfo, VkExportSemaphoreWin32HandleInfoKHR, VkSemaphoreTypeCreateInfoKHR", pCreateInfo->pNext, ARRAY_SIZE(allowed_structs_VkSemaphoreCreateInfo), allowed_structs_VkSemaphoreCreateInfo, GeneratedVulkanHeaderVersion, "VUID-VkSemaphoreCreateInfo-pNext-pNext");
+
+        skip |= validate_reserved_flags("vkCreateSemaphore", "pCreateInfo->flags", pCreateInfo->flags, "VUID-VkSemaphoreCreateInfo-flags-zerobitmask");
+    }
+    if (pAllocator != NULL)
+    {
+        skip |= validate_required_pointer("vkCreateSemaphore", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
+
+        skip |= validate_required_pointer("vkCreateSemaphore", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
+
+        skip |= validate_required_pointer("vkCreateSemaphore", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
+
+        if (pAllocator->pfnInternalAllocation != NULL)
+        {
+            skip |= validate_required_pointer("vkCreateSemaphore", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+
+        if (pAllocator->pfnInternalFree != NULL)
+        {
+            skip |= validate_required_pointer("vkCreateSemaphore", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+    }
+    skip |= validate_required_pointer("vkCreateSemaphore", "pSemaphore", pSemaphore, "VUID-vkCreateSemaphore-pSemaphore-parameter");
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateDestroySemaphore(
+    VkDevice                                    device,
+    VkSemaphore                                 semaphore,
+    const VkAllocationCallbacks*                pAllocator) const {
+    bool skip = false;
+    if (pAllocator != NULL)
+    {
+        skip |= validate_required_pointer("vkDestroySemaphore", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
+
+        skip |= validate_required_pointer("vkDestroySemaphore", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
+
+        skip |= validate_required_pointer("vkDestroySemaphore", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
+
+        if (pAllocator->pfnInternalAllocation != NULL)
+        {
+            skip |= validate_required_pointer("vkDestroySemaphore", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+
+        if (pAllocator->pfnInternalFree != NULL)
+        {
+            skip |= validate_required_pointer("vkDestroySemaphore", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateCreateEvent(
+    VkDevice                                    device,
+    const VkEventCreateInfo*                    pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkEvent*                                    pEvent) const {
+    bool skip = false;
+    skip |= validate_struct_type("vkCreateEvent", "pCreateInfo", "VK_STRUCTURE_TYPE_EVENT_CREATE_INFO", pCreateInfo, VK_STRUCTURE_TYPE_EVENT_CREATE_INFO, true, "VUID-vkCreateEvent-pCreateInfo-parameter", "VUID-VkEventCreateInfo-sType-sType");
+    if (pCreateInfo != NULL)
+    {
+        skip |= validate_struct_pnext("vkCreateEvent", "pCreateInfo->pNext", NULL, pCreateInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkEventCreateInfo-pNext-pNext");
+
+        skip |= validate_reserved_flags("vkCreateEvent", "pCreateInfo->flags", pCreateInfo->flags, "VUID-VkEventCreateInfo-flags-zerobitmask");
+    }
+    if (pAllocator != NULL)
+    {
+        skip |= validate_required_pointer("vkCreateEvent", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
+
+        skip |= validate_required_pointer("vkCreateEvent", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
+
+        skip |= validate_required_pointer("vkCreateEvent", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
+
+        if (pAllocator->pfnInternalAllocation != NULL)
+        {
+            skip |= validate_required_pointer("vkCreateEvent", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+
+        if (pAllocator->pfnInternalFree != NULL)
+        {
+            skip |= validate_required_pointer("vkCreateEvent", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+    }
+    skip |= validate_required_pointer("vkCreateEvent", "pEvent", pEvent, "VUID-vkCreateEvent-pEvent-parameter");
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateDestroyEvent(
+    VkDevice                                    device,
+    VkEvent                                     event,
+    const VkAllocationCallbacks*                pAllocator) const {
+    bool skip = false;
+    if (pAllocator != NULL)
+    {
+        skip |= validate_required_pointer("vkDestroyEvent", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
+
+        skip |= validate_required_pointer("vkDestroyEvent", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
+
+        skip |= validate_required_pointer("vkDestroyEvent", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
+
+        if (pAllocator->pfnInternalAllocation != NULL)
+        {
+            skip |= validate_required_pointer("vkDestroyEvent", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+
+        if (pAllocator->pfnInternalFree != NULL)
+        {
+            skip |= validate_required_pointer("vkDestroyEvent", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateGetEventStatus(
+    VkDevice                                    device,
+    VkEvent                                     event) const {
+    bool skip = false;
+    skip |= validate_required_handle("vkGetEventStatus", "event", event);
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateSetEvent(
+    VkDevice                                    device,
+    VkEvent                                     event) const {
+    bool skip = false;
+    skip |= validate_required_handle("vkSetEvent", "event", event);
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateResetEvent(
+    VkDevice                                    device,
+    VkEvent                                     event) const {
+    bool skip = false;
+    skip |= validate_required_handle("vkResetEvent", "event", event);
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateCreateQueryPool(
+    VkDevice                                    device,
+    const VkQueryPoolCreateInfo*                pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkQueryPool*                                pQueryPool) const {
+    bool skip = false;
+    skip |= validate_struct_type("vkCreateQueryPool", "pCreateInfo", "VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO", pCreateInfo, VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO, true, "VUID-vkCreateQueryPool-pCreateInfo-parameter", "VUID-VkQueryPoolCreateInfo-sType-sType");
+    if (pCreateInfo != NULL)
+    {
+        const VkStructureType allowed_structs_VkQueryPoolCreateInfo[] = { VK_STRUCTURE_TYPE_QUERY_POOL_PERFORMANCE_CREATE_INFO_KHR };
+
+        skip |= validate_struct_pnext("vkCreateQueryPool", "pCreateInfo->pNext", "VkQueryPoolPerformanceCreateInfoKHR", pCreateInfo->pNext, ARRAY_SIZE(allowed_structs_VkQueryPoolCreateInfo), allowed_structs_VkQueryPoolCreateInfo, GeneratedVulkanHeaderVersion, "VUID-VkQueryPoolCreateInfo-pNext-pNext");
+
+        skip |= validate_reserved_flags("vkCreateQueryPool", "pCreateInfo->flags", pCreateInfo->flags, "VUID-VkQueryPoolCreateInfo-flags-zerobitmask");
+
+        skip |= validate_ranged_enum("vkCreateQueryPool", "pCreateInfo->queryType", "VkQueryType", AllVkQueryTypeEnums, pCreateInfo->queryType, "VUID-VkQueryPoolCreateInfo-queryType-parameter");
+    }
+    if (pAllocator != NULL)
+    {
+        skip |= validate_required_pointer("vkCreateQueryPool", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
+
+        skip |= validate_required_pointer("vkCreateQueryPool", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
+
+        skip |= validate_required_pointer("vkCreateQueryPool", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
+
+        if (pAllocator->pfnInternalAllocation != NULL)
+        {
+            skip |= validate_required_pointer("vkCreateQueryPool", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+
+        if (pAllocator->pfnInternalFree != NULL)
+        {
+            skip |= validate_required_pointer("vkCreateQueryPool", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+    }
+    skip |= validate_required_pointer("vkCreateQueryPool", "pQueryPool", pQueryPool, "VUID-vkCreateQueryPool-pQueryPool-parameter");
+    if (!skip) skip |= manual_PreCallValidateCreateQueryPool(device, pCreateInfo, pAllocator, pQueryPool);
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateDestroyQueryPool(
+    VkDevice                                    device,
+    VkQueryPool                                 queryPool,
+    const VkAllocationCallbacks*                pAllocator) const {
+    bool skip = false;
+    if (pAllocator != NULL)
+    {
+        skip |= validate_required_pointer("vkDestroyQueryPool", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
+
+        skip |= validate_required_pointer("vkDestroyQueryPool", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
+
+        skip |= validate_required_pointer("vkDestroyQueryPool", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
+
+        if (pAllocator->pfnInternalAllocation != NULL)
+        {
+            skip |= validate_required_pointer("vkDestroyQueryPool", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+
+        if (pAllocator->pfnInternalFree != NULL)
+        {
+            skip |= validate_required_pointer("vkDestroyQueryPool", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateGetQueryPoolResults(
+    VkDevice                                    device,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    firstQuery,
+    uint32_t                                    queryCount,
+    size_t                                      dataSize,
+    void*                                       pData,
+    VkDeviceSize                                stride,
+    VkQueryResultFlags                          flags) const {
+    bool skip = false;
+    skip |= validate_required_handle("vkGetQueryPoolResults", "queryPool", queryPool);
+    skip |= validate_array("vkGetQueryPoolResults", "dataSize", "pData", dataSize, &pData, true, true, "VUID-vkGetQueryPoolResults-dataSize-arraylength", "VUID-vkGetQueryPoolResults-pData-parameter");
+    skip |= validate_flags("vkGetQueryPoolResults", "flags", "VkQueryResultFlagBits", AllVkQueryResultFlagBits, flags, kOptionalFlags, "VUID-vkGetQueryPoolResults-flags-parameter");
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateCreateBuffer(
+    VkDevice                                    device,
+    const VkBufferCreateInfo*                   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkBuffer*                                   pBuffer) const {
+    bool skip = false;
+    skip |= validate_struct_type("vkCreateBuffer", "pCreateInfo", "VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO", pCreateInfo, VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO, true, "VUID-vkCreateBuffer-pCreateInfo-parameter", "VUID-VkBufferCreateInfo-sType-sType");
+    if (pCreateInfo != NULL)
+    {
+        const VkStructureType allowed_structs_VkBufferCreateInfo[] = { VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_CREATE_INFO_EXT, VK_STRUCTURE_TYPE_BUFFER_OPAQUE_CAPTURE_ADDRESS_CREATE_INFO_KHR, VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_BUFFER_CREATE_INFO_NV, VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO };
+
+        skip |= validate_struct_pnext("vkCreateBuffer", "pCreateInfo->pNext", "VkBufferDeviceAddressCreateInfoEXT, VkBufferOpaqueCaptureAddressCreateInfoKHR, VkDedicatedAllocationBufferCreateInfoNV, VkExternalMemoryBufferCreateInfo", pCreateInfo->pNext, ARRAY_SIZE(allowed_structs_VkBufferCreateInfo), allowed_structs_VkBufferCreateInfo, GeneratedVulkanHeaderVersion, "VUID-VkBufferCreateInfo-pNext-pNext");
+
+        skip |= validate_flags("vkCreateBuffer", "pCreateInfo->flags", "VkBufferCreateFlagBits", AllVkBufferCreateFlagBits, pCreateInfo->flags, kOptionalFlags, "VUID-VkBufferCreateInfo-flags-parameter");
+
+        skip |= validate_flags("vkCreateBuffer", "pCreateInfo->usage", "VkBufferUsageFlagBits", AllVkBufferUsageFlagBits, pCreateInfo->usage, kRequiredFlags, "VUID-VkBufferCreateInfo-usage-parameter", "VUID-VkBufferCreateInfo-usage-requiredbitmask");
+
+        skip |= validate_ranged_enum("vkCreateBuffer", "pCreateInfo->sharingMode", "VkSharingMode", AllVkSharingModeEnums, pCreateInfo->sharingMode, "VUID-VkBufferCreateInfo-sharingMode-parameter");
+    }
+    if (pAllocator != NULL)
+    {
+        skip |= validate_required_pointer("vkCreateBuffer", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
+
+        skip |= validate_required_pointer("vkCreateBuffer", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
+
+        skip |= validate_required_pointer("vkCreateBuffer", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
+
+        if (pAllocator->pfnInternalAllocation != NULL)
+        {
+            skip |= validate_required_pointer("vkCreateBuffer", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+
+        if (pAllocator->pfnInternalFree != NULL)
+        {
+            skip |= validate_required_pointer("vkCreateBuffer", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+    }
+    skip |= validate_required_pointer("vkCreateBuffer", "pBuffer", pBuffer, "VUID-vkCreateBuffer-pBuffer-parameter");
+    if (!skip) skip |= manual_PreCallValidateCreateBuffer(device, pCreateInfo, pAllocator, pBuffer);
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateDestroyBuffer(
+    VkDevice                                    device,
+    VkBuffer                                    buffer,
+    const VkAllocationCallbacks*                pAllocator) const {
+    bool skip = false;
+    if (pAllocator != NULL)
+    {
+        skip |= validate_required_pointer("vkDestroyBuffer", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
+
+        skip |= validate_required_pointer("vkDestroyBuffer", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
+
+        skip |= validate_required_pointer("vkDestroyBuffer", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
+
+        if (pAllocator->pfnInternalAllocation != NULL)
+        {
+            skip |= validate_required_pointer("vkDestroyBuffer", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+
+        if (pAllocator->pfnInternalFree != NULL)
+        {
+            skip |= validate_required_pointer("vkDestroyBuffer", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateCreateBufferView(
+    VkDevice                                    device,
+    const VkBufferViewCreateInfo*               pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkBufferView*                               pView) const {
+    bool skip = false;
+    skip |= validate_struct_type("vkCreateBufferView", "pCreateInfo", "VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO", pCreateInfo, VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO, true, "VUID-vkCreateBufferView-pCreateInfo-parameter", "VUID-VkBufferViewCreateInfo-sType-sType");
+    if (pCreateInfo != NULL)
+    {
+        skip |= validate_struct_pnext("vkCreateBufferView", "pCreateInfo->pNext", NULL, pCreateInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkBufferViewCreateInfo-pNext-pNext");
+
+        skip |= validate_reserved_flags("vkCreateBufferView", "pCreateInfo->flags", pCreateInfo->flags, "VUID-VkBufferViewCreateInfo-flags-zerobitmask");
+
+        skip |= validate_required_handle("vkCreateBufferView", "pCreateInfo->buffer", pCreateInfo->buffer);
+
+        skip |= validate_ranged_enum("vkCreateBufferView", "pCreateInfo->format", "VkFormat", AllVkFormatEnums, pCreateInfo->format, "VUID-VkBufferViewCreateInfo-format-parameter");
+    }
+    if (pAllocator != NULL)
+    {
+        skip |= validate_required_pointer("vkCreateBufferView", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
+
+        skip |= validate_required_pointer("vkCreateBufferView", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
+
+        skip |= validate_required_pointer("vkCreateBufferView", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
+
+        if (pAllocator->pfnInternalAllocation != NULL)
+        {
+            skip |= validate_required_pointer("vkCreateBufferView", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+
+        if (pAllocator->pfnInternalFree != NULL)
+        {
+            skip |= validate_required_pointer("vkCreateBufferView", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+    }
+    skip |= validate_required_pointer("vkCreateBufferView", "pView", pView, "VUID-vkCreateBufferView-pView-parameter");
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateDestroyBufferView(
+    VkDevice                                    device,
+    VkBufferView                                bufferView,
+    const VkAllocationCallbacks*                pAllocator) const {
+    bool skip = false;
+    if (pAllocator != NULL)
+    {
+        skip |= validate_required_pointer("vkDestroyBufferView", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
+
+        skip |= validate_required_pointer("vkDestroyBufferView", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
+
+        skip |= validate_required_pointer("vkDestroyBufferView", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
+
+        if (pAllocator->pfnInternalAllocation != NULL)
+        {
+            skip |= validate_required_pointer("vkDestroyBufferView", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+
+        if (pAllocator->pfnInternalFree != NULL)
+        {
+            skip |= validate_required_pointer("vkDestroyBufferView", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateCreateImage(
+    VkDevice                                    device,
+    const VkImageCreateInfo*                    pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkImage*                                    pImage) const {
+    bool skip = false;
+    skip |= validate_struct_type("vkCreateImage", "pCreateInfo", "VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO", pCreateInfo, VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO, true, "VUID-vkCreateImage-pCreateInfo-parameter", "VUID-VkImageCreateInfo-sType-sType");
+    if (pCreateInfo != NULL)
+    {
+        const VkStructureType allowed_structs_VkImageCreateInfo[] = { VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_IMAGE_CREATE_INFO_NV, VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID, VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO, VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_NV, VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_EXPLICIT_CREATE_INFO_EXT, VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_LIST_CREATE_INFO_EXT, VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO_KHR, VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO_EXT, VK_STRUCTURE_TYPE_IMAGE_SWAPCHAIN_CREATE_INFO_KHR };
+
+        skip |= validate_struct_pnext("vkCreateImage", "pCreateInfo->pNext", "VkDedicatedAllocationImageCreateInfoNV, VkExternalFormatANDROID, VkExternalMemoryImageCreateInfo, VkExternalMemoryImageCreateInfoNV, VkImageDrmFormatModifierExplicitCreateInfoEXT, VkImageDrmFormatModifierListCreateInfoEXT, VkImageFormatListCreateInfoKHR, VkImageStencilUsageCreateInfoEXT, VkImageSwapchainCreateInfoKHR", pCreateInfo->pNext, ARRAY_SIZE(allowed_structs_VkImageCreateInfo), allowed_structs_VkImageCreateInfo, GeneratedVulkanHeaderVersion, "VUID-VkImageCreateInfo-pNext-pNext");
+
+        skip |= validate_flags("vkCreateImage", "pCreateInfo->flags", "VkImageCreateFlagBits", AllVkImageCreateFlagBits, pCreateInfo->flags, kOptionalFlags, "VUID-VkImageCreateInfo-flags-parameter");
+
+        skip |= validate_ranged_enum("vkCreateImage", "pCreateInfo->imageType", "VkImageType", AllVkImageTypeEnums, pCreateInfo->imageType, "VUID-VkImageCreateInfo-imageType-parameter");
+
+        skip |= validate_ranged_enum("vkCreateImage", "pCreateInfo->format", "VkFormat", AllVkFormatEnums, pCreateInfo->format, "VUID-VkImageCreateInfo-format-parameter");
+
+        // No xml-driven validation
+
+        skip |= validate_flags("vkCreateImage", "pCreateInfo->samples", "VkSampleCountFlagBits", AllVkSampleCountFlagBits, pCreateInfo->samples, kRequiredSingleBit, "VUID-VkImageCreateInfo-samples-parameter", "VUID-VkImageCreateInfo-samples-parameter");
+
+        skip |= validate_ranged_enum("vkCreateImage", "pCreateInfo->tiling", "VkImageTiling", AllVkImageTilingEnums, pCreateInfo->tiling, "VUID-VkImageCreateInfo-tiling-parameter");
+
+        skip |= validate_flags("vkCreateImage", "pCreateInfo->usage", "VkImageUsageFlagBits", AllVkImageUsageFlagBits, pCreateInfo->usage, kRequiredFlags, "VUID-VkImageCreateInfo-usage-parameter", "VUID-VkImageCreateInfo-usage-requiredbitmask");
+
+        skip |= validate_ranged_enum("vkCreateImage", "pCreateInfo->sharingMode", "VkSharingMode", AllVkSharingModeEnums, pCreateInfo->sharingMode, "VUID-VkImageCreateInfo-sharingMode-parameter");
+
+        skip |= validate_ranged_enum("vkCreateImage", "pCreateInfo->initialLayout", "VkImageLayout", AllVkImageLayoutEnums, pCreateInfo->initialLayout, "VUID-VkImageCreateInfo-initialLayout-parameter");
+    }
+    if (pAllocator != NULL)
+    {
+        skip |= validate_required_pointer("vkCreateImage", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
+
+        skip |= validate_required_pointer("vkCreateImage", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
+
+        skip |= validate_required_pointer("vkCreateImage", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
+
+        if (pAllocator->pfnInternalAllocation != NULL)
+        {
+            skip |= validate_required_pointer("vkCreateImage", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+
+        if (pAllocator->pfnInternalFree != NULL)
+        {
+            skip |= validate_required_pointer("vkCreateImage", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+    }
+    skip |= validate_required_pointer("vkCreateImage", "pImage", pImage, "VUID-vkCreateImage-pImage-parameter");
+    if (!skip) skip |= manual_PreCallValidateCreateImage(device, pCreateInfo, pAllocator, pImage);
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateDestroyImage(
+    VkDevice                                    device,
+    VkImage                                     image,
+    const VkAllocationCallbacks*                pAllocator) const {
+    bool skip = false;
+    if (pAllocator != NULL)
+    {
+        skip |= validate_required_pointer("vkDestroyImage", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
+
+        skip |= validate_required_pointer("vkDestroyImage", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
+
+        skip |= validate_required_pointer("vkDestroyImage", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
+
+        if (pAllocator->pfnInternalAllocation != NULL)
+        {
+            skip |= validate_required_pointer("vkDestroyImage", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+
+        if (pAllocator->pfnInternalFree != NULL)
+        {
+            skip |= validate_required_pointer("vkDestroyImage", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateGetImageSubresourceLayout(
+    VkDevice                                    device,
+    VkImage                                     image,
+    const VkImageSubresource*                   pSubresource,
+    VkSubresourceLayout*                        pLayout) const {
+    bool skip = false;
+    skip |= validate_required_handle("vkGetImageSubresourceLayout", "image", image);
+    skip |= validate_required_pointer("vkGetImageSubresourceLayout", "pSubresource", pSubresource, "VUID-vkGetImageSubresourceLayout-pSubresource-parameter");
+    if (pSubresource != NULL)
+    {
+        skip |= validate_flags("vkGetImageSubresourceLayout", "pSubresource->aspectMask", "VkImageAspectFlagBits", AllVkImageAspectFlagBits, pSubresource->aspectMask, kRequiredFlags, "VUID-VkImageSubresource-aspectMask-parameter", "VUID-VkImageSubresource-aspectMask-requiredbitmask");
+    }
+    skip |= validate_required_pointer("vkGetImageSubresourceLayout", "pLayout", pLayout, "VUID-vkGetImageSubresourceLayout-pLayout-parameter");
+    if (pLayout != NULL)
+    {
+        // No xml-driven validation
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateCreateImageView(
+    VkDevice                                    device,
+    const VkImageViewCreateInfo*                pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkImageView*                                pView) const {
+    bool skip = false;
+    skip |= validate_struct_type("vkCreateImageView", "pCreateInfo", "VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO", pCreateInfo, VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO, true, "VUID-vkCreateImageView-pCreateInfo-parameter", "VUID-VkImageViewCreateInfo-sType-sType");
+    if (pCreateInfo != NULL)
+    {
+        const VkStructureType allowed_structs_VkImageViewCreateInfo[] = { VK_STRUCTURE_TYPE_IMAGE_VIEW_ASTC_DECODE_MODE_EXT, VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO, VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO };
+
+        skip |= validate_struct_pnext("vkCreateImageView", "pCreateInfo->pNext", "VkImageViewASTCDecodeModeEXT, VkImageViewUsageCreateInfo, VkSamplerYcbcrConversionInfo", pCreateInfo->pNext, ARRAY_SIZE(allowed_structs_VkImageViewCreateInfo), allowed_structs_VkImageViewCreateInfo, GeneratedVulkanHeaderVersion, "VUID-VkImageViewCreateInfo-pNext-pNext");
+
+        skip |= validate_flags("vkCreateImageView", "pCreateInfo->flags", "VkImageViewCreateFlagBits", AllVkImageViewCreateFlagBits, pCreateInfo->flags, kOptionalFlags, "VUID-VkImageViewCreateInfo-flags-parameter");
+
+        skip |= validate_required_handle("vkCreateImageView", "pCreateInfo->image", pCreateInfo->image);
+
+        skip |= validate_ranged_enum("vkCreateImageView", "pCreateInfo->viewType", "VkImageViewType", AllVkImageViewTypeEnums, pCreateInfo->viewType, "VUID-VkImageViewCreateInfo-viewType-parameter");
+
+        skip |= validate_ranged_enum("vkCreateImageView", "pCreateInfo->format", "VkFormat", AllVkFormatEnums, pCreateInfo->format, "VUID-VkImageViewCreateInfo-format-parameter");
+
+        skip |= validate_ranged_enum("vkCreateImageView", "pCreateInfo->components.r", "VkComponentSwizzle", AllVkComponentSwizzleEnums, pCreateInfo->components.r, "VUID-VkComponentMapping-r-parameter");
+
+        skip |= validate_ranged_enum("vkCreateImageView", "pCreateInfo->components.g", "VkComponentSwizzle", AllVkComponentSwizzleEnums, pCreateInfo->components.g, "VUID-VkComponentMapping-g-parameter");
+
+        skip |= validate_ranged_enum("vkCreateImageView", "pCreateInfo->components.b", "VkComponentSwizzle", AllVkComponentSwizzleEnums, pCreateInfo->components.b, "VUID-VkComponentMapping-b-parameter");
+
+        skip |= validate_ranged_enum("vkCreateImageView", "pCreateInfo->components.a", "VkComponentSwizzle", AllVkComponentSwizzleEnums, pCreateInfo->components.a, "VUID-VkComponentMapping-a-parameter");
+
+        skip |= validate_flags("vkCreateImageView", "pCreateInfo->subresourceRange.aspectMask", "VkImageAspectFlagBits", AllVkImageAspectFlagBits, pCreateInfo->subresourceRange.aspectMask, kRequiredFlags, "VUID-VkImageSubresourceRange-aspectMask-parameter", "VUID-VkImageSubresourceRange-aspectMask-requiredbitmask");
+    }
+    if (pAllocator != NULL)
+    {
+        skip |= validate_required_pointer("vkCreateImageView", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
+
+        skip |= validate_required_pointer("vkCreateImageView", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
+
+        skip |= validate_required_pointer("vkCreateImageView", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
+
+        if (pAllocator->pfnInternalAllocation != NULL)
+        {
+            skip |= validate_required_pointer("vkCreateImageView", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+
+        if (pAllocator->pfnInternalFree != NULL)
+        {
+            skip |= validate_required_pointer("vkCreateImageView", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+    }
+    skip |= validate_required_pointer("vkCreateImageView", "pView", pView, "VUID-vkCreateImageView-pView-parameter");
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateDestroyImageView(
+    VkDevice                                    device,
+    VkImageView                                 imageView,
+    const VkAllocationCallbacks*                pAllocator) const {
+    bool skip = false;
+    if (pAllocator != NULL)
+    {
+        skip |= validate_required_pointer("vkDestroyImageView", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
+
+        skip |= validate_required_pointer("vkDestroyImageView", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
+
+        skip |= validate_required_pointer("vkDestroyImageView", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
+
+        if (pAllocator->pfnInternalAllocation != NULL)
+        {
+            skip |= validate_required_pointer("vkDestroyImageView", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+
+        if (pAllocator->pfnInternalFree != NULL)
+        {
+            skip |= validate_required_pointer("vkDestroyImageView", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateCreateShaderModule(
+    VkDevice                                    device,
+    const VkShaderModuleCreateInfo*             pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkShaderModule*                             pShaderModule) const {
+    bool skip = false;
+    skip |= validate_struct_type("vkCreateShaderModule", "pCreateInfo", "VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO", pCreateInfo, VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO, true, "VUID-vkCreateShaderModule-pCreateInfo-parameter", "VUID-VkShaderModuleCreateInfo-sType-sType");
+    if (pCreateInfo != NULL)
+    {
+        const VkStructureType allowed_structs_VkShaderModuleCreateInfo[] = { VK_STRUCTURE_TYPE_SHADER_MODULE_VALIDATION_CACHE_CREATE_INFO_EXT };
+
+        skip |= validate_struct_pnext("vkCreateShaderModule", "pCreateInfo->pNext", "VkShaderModuleValidationCacheCreateInfoEXT", pCreateInfo->pNext, ARRAY_SIZE(allowed_structs_VkShaderModuleCreateInfo), allowed_structs_VkShaderModuleCreateInfo, GeneratedVulkanHeaderVersion, "VUID-VkShaderModuleCreateInfo-pNext-pNext");
+
+        skip |= validate_reserved_flags("vkCreateShaderModule", "pCreateInfo->flags", pCreateInfo->flags, "VUID-VkShaderModuleCreateInfo-flags-zerobitmask");
+
+        skip |= validate_array("vkCreateShaderModule", "pCreateInfo->codeSize", "pCreateInfo->pCode", pCreateInfo->codeSize, &pCreateInfo->pCode, true, true, kVUIDUndefined, "VUID-VkShaderModuleCreateInfo-pCode-parameter");
+    }
+    if (pAllocator != NULL)
+    {
+        skip |= validate_required_pointer("vkCreateShaderModule", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
+
+        skip |= validate_required_pointer("vkCreateShaderModule", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
+
+        skip |= validate_required_pointer("vkCreateShaderModule", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
+
+        if (pAllocator->pfnInternalAllocation != NULL)
+        {
+            skip |= validate_required_pointer("vkCreateShaderModule", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+
+        if (pAllocator->pfnInternalFree != NULL)
+        {
+            skip |= validate_required_pointer("vkCreateShaderModule", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+    }
+    skip |= validate_required_pointer("vkCreateShaderModule", "pShaderModule", pShaderModule, "VUID-vkCreateShaderModule-pShaderModule-parameter");
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateDestroyShaderModule(
+    VkDevice                                    device,
+    VkShaderModule                              shaderModule,
+    const VkAllocationCallbacks*                pAllocator) const {
+    bool skip = false;
+    if (pAllocator != NULL)
+    {
+        skip |= validate_required_pointer("vkDestroyShaderModule", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
+
+        skip |= validate_required_pointer("vkDestroyShaderModule", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
+
+        skip |= validate_required_pointer("vkDestroyShaderModule", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
+
+        if (pAllocator->pfnInternalAllocation != NULL)
+        {
+            skip |= validate_required_pointer("vkDestroyShaderModule", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+
+        if (pAllocator->pfnInternalFree != NULL)
+        {
+            skip |= validate_required_pointer("vkDestroyShaderModule", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateCreatePipelineCache(
+    VkDevice                                    device,
+    const VkPipelineCacheCreateInfo*            pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkPipelineCache*                            pPipelineCache) const {
+    bool skip = false;
+    skip |= validate_struct_type("vkCreatePipelineCache", "pCreateInfo", "VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO", pCreateInfo, VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO, true, "VUID-vkCreatePipelineCache-pCreateInfo-parameter", "VUID-VkPipelineCacheCreateInfo-sType-sType");
+    if (pCreateInfo != NULL)
+    {
+        skip |= validate_struct_pnext("vkCreatePipelineCache", "pCreateInfo->pNext", NULL, pCreateInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkPipelineCacheCreateInfo-pNext-pNext");
+
+        skip |= validate_reserved_flags("vkCreatePipelineCache", "pCreateInfo->flags", pCreateInfo->flags, "VUID-VkPipelineCacheCreateInfo-flags-zerobitmask");
+
+        skip |= validate_array("vkCreatePipelineCache", "pCreateInfo->initialDataSize", "pCreateInfo->pInitialData", pCreateInfo->initialDataSize, &pCreateInfo->pInitialData, false, true, kVUIDUndefined, "VUID-VkPipelineCacheCreateInfo-pInitialData-parameter");
+    }
+    if (pAllocator != NULL)
+    {
+        skip |= validate_required_pointer("vkCreatePipelineCache", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
+
+        skip |= validate_required_pointer("vkCreatePipelineCache", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
+
+        skip |= validate_required_pointer("vkCreatePipelineCache", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
+
+        if (pAllocator->pfnInternalAllocation != NULL)
+        {
+            skip |= validate_required_pointer("vkCreatePipelineCache", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+
+        if (pAllocator->pfnInternalFree != NULL)
+        {
+            skip |= validate_required_pointer("vkCreatePipelineCache", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+    }
+    skip |= validate_required_pointer("vkCreatePipelineCache", "pPipelineCache", pPipelineCache, "VUID-vkCreatePipelineCache-pPipelineCache-parameter");
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateDestroyPipelineCache(
+    VkDevice                                    device,
+    VkPipelineCache                             pipelineCache,
+    const VkAllocationCallbacks*                pAllocator) const {
+    bool skip = false;
+    if (pAllocator != NULL)
+    {
+        skip |= validate_required_pointer("vkDestroyPipelineCache", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
+
+        skip |= validate_required_pointer("vkDestroyPipelineCache", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
+
+        skip |= validate_required_pointer("vkDestroyPipelineCache", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
+
+        if (pAllocator->pfnInternalAllocation != NULL)
+        {
+            skip |= validate_required_pointer("vkDestroyPipelineCache", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+
+        if (pAllocator->pfnInternalFree != NULL)
+        {
+            skip |= validate_required_pointer("vkDestroyPipelineCache", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateGetPipelineCacheData(
+    VkDevice                                    device,
+    VkPipelineCache                             pipelineCache,
+    size_t*                                     pDataSize,
+    void*                                       pData) const {
+    bool skip = false;
+    skip |= validate_required_handle("vkGetPipelineCacheData", "pipelineCache", pipelineCache);
+    skip |= validate_array("vkGetPipelineCacheData", "pDataSize", "pData", pDataSize, &pData, true, false, false, kVUIDUndefined, "VUID-vkGetPipelineCacheData-pData-parameter");
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateMergePipelineCaches(
+    VkDevice                                    device,
+    VkPipelineCache                             dstCache,
+    uint32_t                                    srcCacheCount,
+    const VkPipelineCache*                      pSrcCaches) const {
+    bool skip = false;
+    skip |= validate_required_handle("vkMergePipelineCaches", "dstCache", dstCache);
+    skip |= validate_handle_array("vkMergePipelineCaches", "srcCacheCount", "pSrcCaches", srcCacheCount, pSrcCaches, true, true);
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateCreateGraphicsPipelines(
+    VkDevice                                    device,
+    VkPipelineCache                             pipelineCache,
+    uint32_t                                    createInfoCount,
+    const VkGraphicsPipelineCreateInfo*         pCreateInfos,
+    const VkAllocationCallbacks*                pAllocator,
+    VkPipeline*                                 pPipelines) const {
+    bool skip = false;
+    skip |= validate_struct_type_array("vkCreateGraphicsPipelines", "createInfoCount", "pCreateInfos", "VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO", createInfoCount, pCreateInfos, VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO, true, true, "VUID-VkGraphicsPipelineCreateInfo-sType-sType", "VUID-vkCreateGraphicsPipelines-pCreateInfos-parameter", "VUID-vkCreateGraphicsPipelines-createInfoCount-arraylength");
+    if (pCreateInfos != NULL)
+    {
+        for (uint32_t createInfoIndex = 0; createInfoIndex < createInfoCount; ++createInfoIndex)
+        {
+            const VkStructureType allowed_structs_VkGraphicsPipelineCreateInfo[] = { VK_STRUCTURE_TYPE_PIPELINE_COMPILER_CONTROL_CREATE_INFO_AMD, VK_STRUCTURE_TYPE_PIPELINE_CREATION_FEEDBACK_CREATE_INFO_EXT, VK_STRUCTURE_TYPE_PIPELINE_DISCARD_RECTANGLE_STATE_CREATE_INFO_EXT, VK_STRUCTURE_TYPE_PIPELINE_REPRESENTATIVE_FRAGMENT_TEST_STATE_CREATE_INFO_NV };
+
+            skip |= validate_struct_pnext("vkCreateGraphicsPipelines", ParameterName("pCreateInfos[%i].pNext", ParameterName::IndexVector{ createInfoIndex }), "VkPipelineCompilerControlCreateInfoAMD, VkPipelineCreationFeedbackCreateInfoEXT, VkPipelineDiscardRectangleStateCreateInfoEXT, VkPipelineRepresentativeFragmentTestStateCreateInfoNV", pCreateInfos[createInfoIndex].pNext, ARRAY_SIZE(allowed_structs_VkGraphicsPipelineCreateInfo), allowed_structs_VkGraphicsPipelineCreateInfo, GeneratedVulkanHeaderVersion, "VUID-VkGraphicsPipelineCreateInfo-pNext-pNext");
+
+            skip |= validate_flags("vkCreateGraphicsPipelines", ParameterName("pCreateInfos[%i].flags", ParameterName::IndexVector{ createInfoIndex }), "VkPipelineCreateFlagBits", AllVkPipelineCreateFlagBits, pCreateInfos[createInfoIndex].flags, kOptionalFlags, "VUID-VkGraphicsPipelineCreateInfo-flags-parameter");
+
+            skip |= validate_struct_type_array("vkCreateGraphicsPipelines", ParameterName("pCreateInfos[%i].stageCount", ParameterName::IndexVector{ createInfoIndex }), ParameterName("pCreateInfos[%i].pStages", ParameterName::IndexVector{ createInfoIndex }), "VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO", pCreateInfos[createInfoIndex].stageCount, pCreateInfos[createInfoIndex].pStages, VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO, true, true, "VUID-VkPipelineShaderStageCreateInfo-sType-sType", "VUID-VkGraphicsPipelineCreateInfo-pStages-parameter", "VUID-VkGraphicsPipelineCreateInfo-stageCount-arraylength");
+
+            if (pCreateInfos[createInfoIndex].pStages != NULL)
+            {
+                for (uint32_t stageIndex = 0; stageIndex < pCreateInfos[createInfoIndex].stageCount; ++stageIndex)
+                {
+                    const VkStructureType allowed_structs_VkPipelineShaderStageCreateInfo[] = { VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO_EXT };
+
+                    skip |= validate_struct_pnext("vkCreateGraphicsPipelines", ParameterName("pCreateInfos[%i].pStages[%i].pNext", ParameterName::IndexVector{ createInfoIndex, stageIndex }), "VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT", pCreateInfos[createInfoIndex].pStages[stageIndex].pNext, ARRAY_SIZE(allowed_structs_VkPipelineShaderStageCreateInfo), allowed_structs_VkPipelineShaderStageCreateInfo, GeneratedVulkanHeaderVersion, "VUID-VkPipelineShaderStageCreateInfo-pNext-pNext");
+
+                    skip |= validate_flags("vkCreateGraphicsPipelines", ParameterName("pCreateInfos[%i].pStages[%i].flags", ParameterName::IndexVector{ createInfoIndex, stageIndex }), "VkPipelineShaderStageCreateFlagBits", AllVkPipelineShaderStageCreateFlagBits, pCreateInfos[createInfoIndex].pStages[stageIndex].flags, kOptionalFlags, "VUID-VkPipelineShaderStageCreateInfo-flags-parameter");
+
+                    skip |= validate_flags("vkCreateGraphicsPipelines", ParameterName("pCreateInfos[%i].pStages[%i].stage", ParameterName::IndexVector{ createInfoIndex, stageIndex }), "VkShaderStageFlagBits", AllVkShaderStageFlagBits, pCreateInfos[createInfoIndex].pStages[stageIndex].stage, kRequiredSingleBit, "VUID-VkPipelineShaderStageCreateInfo-stage-parameter", "VUID-VkPipelineShaderStageCreateInfo-stage-parameter");
+
+                    skip |= validate_required_handle("vkCreateGraphicsPipelines", ParameterName("pCreateInfos[%i].pStages[%i].module", ParameterName::IndexVector{ createInfoIndex, stageIndex }), pCreateInfos[createInfoIndex].pStages[stageIndex].module);
+
+                    skip |= validate_required_pointer("vkCreateGraphicsPipelines", ParameterName("pCreateInfos[%i].pStages[%i].pName", ParameterName::IndexVector{ createInfoIndex, stageIndex }), pCreateInfos[createInfoIndex].pStages[stageIndex].pName, "VUID-VkPipelineShaderStageCreateInfo-pName-parameter");
+
+                    if (pCreateInfos[createInfoIndex].pStages[stageIndex].pSpecializationInfo != NULL)
+                    {
+                        skip |= validate_array("vkCreateGraphicsPipelines", ParameterName("pCreateInfos[%i].pStages[%i].pSpecializationInfo->mapEntryCount", ParameterName::IndexVector{ createInfoIndex, stageIndex }), ParameterName("pCreateInfos[%i].pStages[%i].pSpecializationInfo->pMapEntries", ParameterName::IndexVector{ createInfoIndex, stageIndex }), pCreateInfos[createInfoIndex].pStages[stageIndex].pSpecializationInfo->mapEntryCount, &pCreateInfos[createInfoIndex].pStages[stageIndex].pSpecializationInfo->pMapEntries, false, true, kVUIDUndefined, "VUID-VkSpecializationInfo-pMapEntries-parameter");
+
+                        if (pCreateInfos[createInfoIndex].pStages[stageIndex].pSpecializationInfo->pMapEntries != NULL)
+                        {
+                            for (uint32_t mapEntryIndex = 0; mapEntryIndex < pCreateInfos[createInfoIndex].pStages[stageIndex].pSpecializationInfo->mapEntryCount; ++mapEntryIndex)
+                            {
+                                // No xml-driven validation
+                            }
+                        }
+
+                        skip |= validate_array("vkCreateGraphicsPipelines", ParameterName("pCreateInfos[%i].pStages[%i].pSpecializationInfo->dataSize", ParameterName::IndexVector{ createInfoIndex, stageIndex }), ParameterName("pCreateInfos[%i].pStages[%i].pSpecializationInfo->pData", ParameterName::IndexVector{ createInfoIndex, stageIndex }), pCreateInfos[createInfoIndex].pStages[stageIndex].pSpecializationInfo->dataSize, &pCreateInfos[createInfoIndex].pStages[stageIndex].pSpecializationInfo->pData, false, true, kVUIDUndefined, "VUID-VkSpecializationInfo-pData-parameter");
+                    }
+                }
+            }
+
+            skip |= validate_struct_type("vkCreateGraphicsPipelines", ParameterName("pCreateInfos[%i].pRasterizationState", ParameterName::IndexVector{ createInfoIndex }), "VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO", pCreateInfos[createInfoIndex].pRasterizationState, VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO, true, "VUID-VkGraphicsPipelineCreateInfo-pRasterizationState-parameter", "VUID-VkPipelineRasterizationStateCreateInfo-sType-sType");
+
+            if (pCreateInfos[createInfoIndex].pRasterizationState != NULL)
+            {
+                const VkStructureType allowed_structs_VkPipelineRasterizationStateCreateInfo[] = { VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_CONSERVATIVE_STATE_CREATE_INFO_EXT, VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_DEPTH_CLIP_STATE_CREATE_INFO_EXT, VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO_EXT, VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_RASTERIZATION_ORDER_AMD, VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_STREAM_CREATE_INFO_EXT };
+
+                skip |= validate_struct_pnext("vkCreateGraphicsPipelines", ParameterName("pCreateInfos[%i].pRasterizationState->pNext", ParameterName::IndexVector{ createInfoIndex }), "VkPipelineRasterizationConservativeStateCreateInfoEXT, VkPipelineRasterizationDepthClipStateCreateInfoEXT, VkPipelineRasterizationLineStateCreateInfoEXT, VkPipelineRasterizationStateRasterizationOrderAMD, VkPipelineRasterizationStateStreamCreateInfoEXT", pCreateInfos[createInfoIndex].pRasterizationState->pNext, ARRAY_SIZE(allowed_structs_VkPipelineRasterizationStateCreateInfo), allowed_structs_VkPipelineRasterizationStateCreateInfo, GeneratedVulkanHeaderVersion, "VUID-VkPipelineRasterizationStateCreateInfo-pNext-pNext");
+
+                skip |= validate_reserved_flags("vkCreateGraphicsPipelines", ParameterName("pCreateInfos[%i].pRasterizationState->flags", ParameterName::IndexVector{ createInfoIndex }), pCreateInfos[createInfoIndex].pRasterizationState->flags, "VUID-VkPipelineRasterizationStateCreateInfo-flags-zerobitmask");
+
+                skip |= validate_bool32("vkCreateGraphicsPipelines", ParameterName("pCreateInfos[%i].pRasterizationState->depthClampEnable", ParameterName::IndexVector{ createInfoIndex }), pCreateInfos[createInfoIndex].pRasterizationState->depthClampEnable);
+
+                skip |= validate_bool32("vkCreateGraphicsPipelines", ParameterName("pCreateInfos[%i].pRasterizationState->rasterizerDiscardEnable", ParameterName::IndexVector{ createInfoIndex }), pCreateInfos[createInfoIndex].pRasterizationState->rasterizerDiscardEnable);
+
+                skip |= validate_ranged_enum("vkCreateGraphicsPipelines", ParameterName("pCreateInfos[%i].pRasterizationState->polygonMode", ParameterName::IndexVector{ createInfoIndex }), "VkPolygonMode", AllVkPolygonModeEnums, pCreateInfos[createInfoIndex].pRasterizationState->polygonMode, "VUID-VkPipelineRasterizationStateCreateInfo-polygonMode-parameter");
+
+                skip |= validate_flags("vkCreateGraphicsPipelines", ParameterName("pCreateInfos[%i].pRasterizationState->cullMode", ParameterName::IndexVector{ createInfoIndex }), "VkCullModeFlagBits", AllVkCullModeFlagBits, pCreateInfos[createInfoIndex].pRasterizationState->cullMode, kOptionalFlags, "VUID-VkPipelineRasterizationStateCreateInfo-cullMode-parameter");
+
+                skip |= validate_ranged_enum("vkCreateGraphicsPipelines", ParameterName("pCreateInfos[%i].pRasterizationState->frontFace", ParameterName::IndexVector{ createInfoIndex }), "VkFrontFace", AllVkFrontFaceEnums, pCreateInfos[createInfoIndex].pRasterizationState->frontFace, "VUID-VkPipelineRasterizationStateCreateInfo-frontFace-parameter");
+
+                skip |= validate_bool32("vkCreateGraphicsPipelines", ParameterName("pCreateInfos[%i].pRasterizationState->depthBiasEnable", ParameterName::IndexVector{ createInfoIndex }), pCreateInfos[createInfoIndex].pRasterizationState->depthBiasEnable);
+            }
+
+            skip |= validate_struct_type("vkCreateGraphicsPipelines", ParameterName("pCreateInfos[%i].pDynamicState", ParameterName::IndexVector{ createInfoIndex }), "VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO", pCreateInfos[createInfoIndex].pDynamicState, VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO, false, "VUID-VkGraphicsPipelineCreateInfo-pDynamicState-parameter", "VUID-VkPipelineDynamicStateCreateInfo-sType-sType");
+
+            if (pCreateInfos[createInfoIndex].pDynamicState != NULL)
+            {
+                skip |= validate_struct_pnext("vkCreateGraphicsPipelines", ParameterName("pCreateInfos[%i].pDynamicState->pNext", ParameterName::IndexVector{ createInfoIndex }), NULL, pCreateInfos[createInfoIndex].pDynamicState->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkPipelineDynamicStateCreateInfo-pNext-pNext");
+
+                skip |= validate_reserved_flags("vkCreateGraphicsPipelines", ParameterName("pCreateInfos[%i].pDynamicState->flags", ParameterName::IndexVector{ createInfoIndex }), pCreateInfos[createInfoIndex].pDynamicState->flags, "VUID-VkPipelineDynamicStateCreateInfo-flags-zerobitmask");
+
+                skip |= validate_ranged_enum_array("vkCreateGraphicsPipelines", ParameterName("pCreateInfos[%i].pDynamicState->dynamicStateCount", ParameterName::IndexVector{ createInfoIndex }), ParameterName("pCreateInfos[%i].pDynamicState->pDynamicStates", ParameterName::IndexVector{ createInfoIndex }), "VkDynamicState", AllVkDynamicStateEnums, pCreateInfos[createInfoIndex].pDynamicState->dynamicStateCount, pCreateInfos[createInfoIndex].pDynamicState->pDynamicStates, false, true);
+            }
+
+            skip |= validate_required_handle("vkCreateGraphicsPipelines", ParameterName("pCreateInfos[%i].layout", ParameterName::IndexVector{ createInfoIndex }), pCreateInfos[createInfoIndex].layout);
+
+            skip |= validate_required_handle("vkCreateGraphicsPipelines", ParameterName("pCreateInfos[%i].renderPass", ParameterName::IndexVector{ createInfoIndex }), pCreateInfos[createInfoIndex].renderPass);
+        }
+    }
+    if (pAllocator != NULL)
+    {
+        skip |= validate_required_pointer("vkCreateGraphicsPipelines", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
+
+        skip |= validate_required_pointer("vkCreateGraphicsPipelines", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
+
+        skip |= validate_required_pointer("vkCreateGraphicsPipelines", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
+
+        if (pAllocator->pfnInternalAllocation != NULL)
+        {
+            skip |= validate_required_pointer("vkCreateGraphicsPipelines", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+
+        if (pAllocator->pfnInternalFree != NULL)
+        {
+            skip |= validate_required_pointer("vkCreateGraphicsPipelines", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+    }
+    skip |= validate_array("vkCreateGraphicsPipelines", "createInfoCount", "pPipelines", createInfoCount, &pPipelines, true, true, "VUID-vkCreateGraphicsPipelines-createInfoCount-arraylength", "VUID-vkCreateGraphicsPipelines-pPipelines-parameter");
+    if (!skip) skip |= manual_PreCallValidateCreateGraphicsPipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines);
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateCreateComputePipelines(
+    VkDevice                                    device,
+    VkPipelineCache                             pipelineCache,
+    uint32_t                                    createInfoCount,
+    const VkComputePipelineCreateInfo*          pCreateInfos,
+    const VkAllocationCallbacks*                pAllocator,
+    VkPipeline*                                 pPipelines) const {
+    bool skip = false;
+    skip |= validate_struct_type_array("vkCreateComputePipelines", "createInfoCount", "pCreateInfos", "VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO", createInfoCount, pCreateInfos, VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO, true, true, "VUID-VkComputePipelineCreateInfo-sType-sType", "VUID-vkCreateComputePipelines-pCreateInfos-parameter", "VUID-vkCreateComputePipelines-createInfoCount-arraylength");
+    if (pCreateInfos != NULL)
+    {
+        for (uint32_t createInfoIndex = 0; createInfoIndex < createInfoCount; ++createInfoIndex)
+        {
+            const VkStructureType allowed_structs_VkComputePipelineCreateInfo[] = { VK_STRUCTURE_TYPE_PIPELINE_COMPILER_CONTROL_CREATE_INFO_AMD, VK_STRUCTURE_TYPE_PIPELINE_CREATION_FEEDBACK_CREATE_INFO_EXT };
+
+            skip |= validate_struct_pnext("vkCreateComputePipelines", ParameterName("pCreateInfos[%i].pNext", ParameterName::IndexVector{ createInfoIndex }), "VkPipelineCompilerControlCreateInfoAMD, VkPipelineCreationFeedbackCreateInfoEXT", pCreateInfos[createInfoIndex].pNext, ARRAY_SIZE(allowed_structs_VkComputePipelineCreateInfo), allowed_structs_VkComputePipelineCreateInfo, GeneratedVulkanHeaderVersion, "VUID-VkComputePipelineCreateInfo-pNext-pNext");
+
+            skip |= validate_flags("vkCreateComputePipelines", ParameterName("pCreateInfos[%i].flags", ParameterName::IndexVector{ createInfoIndex }), "VkPipelineCreateFlagBits", AllVkPipelineCreateFlagBits, pCreateInfos[createInfoIndex].flags, kOptionalFlags, "VUID-VkComputePipelineCreateInfo-flags-parameter");
+
+            skip |= validate_struct_type("vkCreateComputePipelines", ParameterName("pCreateInfos[%i].stage", ParameterName::IndexVector{ createInfoIndex }), "VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO", &(pCreateInfos[createInfoIndex].stage), VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO, false, kVUIDUndefined, "VUID-VkPipelineShaderStageCreateInfo-sType-sType");
+
+            const VkStructureType allowed_structs_VkPipelineShaderStageCreateInfo[] = { VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO_EXT };
+
+            skip |= validate_struct_pnext("vkCreateComputePipelines", ParameterName("pCreateInfos[%i].stage.pNext", ParameterName::IndexVector{ createInfoIndex }), "VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT", pCreateInfos[createInfoIndex].stage.pNext, ARRAY_SIZE(allowed_structs_VkPipelineShaderStageCreateInfo), allowed_structs_VkPipelineShaderStageCreateInfo, GeneratedVulkanHeaderVersion, "VUID-VkPipelineShaderStageCreateInfo-pNext-pNext");
+
+            skip |= validate_flags("vkCreateComputePipelines", ParameterName("pCreateInfos[%i].stage.flags", ParameterName::IndexVector{ createInfoIndex }), "VkPipelineShaderStageCreateFlagBits", AllVkPipelineShaderStageCreateFlagBits, pCreateInfos[createInfoIndex].stage.flags, kOptionalFlags, "VUID-VkPipelineShaderStageCreateInfo-flags-parameter");
+
+            skip |= validate_flags("vkCreateComputePipelines", ParameterName("pCreateInfos[%i].stage.stage", ParameterName::IndexVector{ createInfoIndex }), "VkShaderStageFlagBits", AllVkShaderStageFlagBits, pCreateInfos[createInfoIndex].stage.stage, kRequiredSingleBit, "VUID-VkPipelineShaderStageCreateInfo-stage-parameter", "VUID-VkPipelineShaderStageCreateInfo-stage-parameter");
+
+            skip |= validate_required_handle("vkCreateComputePipelines", ParameterName("pCreateInfos[%i].stage.module", ParameterName::IndexVector{ createInfoIndex }), pCreateInfos[createInfoIndex].stage.module);
+
+            skip |= validate_required_pointer("vkCreateComputePipelines", ParameterName("pCreateInfos[%i].stage.pName", ParameterName::IndexVector{ createInfoIndex }), pCreateInfos[createInfoIndex].stage.pName, "VUID-VkPipelineShaderStageCreateInfo-pName-parameter");
+
+            if (pCreateInfos[createInfoIndex].stage.pSpecializationInfo != NULL)
+            {
+                skip |= validate_array("vkCreateComputePipelines", ParameterName("pCreateInfos[%i].stage.pSpecializationInfo->mapEntryCount", ParameterName::IndexVector{ createInfoIndex }), ParameterName("pCreateInfos[%i].stage.pSpecializationInfo->pMapEntries", ParameterName::IndexVector{ createInfoIndex }), pCreateInfos[createInfoIndex].stage.pSpecializationInfo->mapEntryCount, &pCreateInfos[createInfoIndex].stage.pSpecializationInfo->pMapEntries, false, true, kVUIDUndefined, "VUID-VkSpecializationInfo-pMapEntries-parameter");
+
+                if (pCreateInfos[createInfoIndex].stage.pSpecializationInfo->pMapEntries != NULL)
+                {
+                    for (uint32_t mapEntryIndex = 0; mapEntryIndex < pCreateInfos[createInfoIndex].stage.pSpecializationInfo->mapEntryCount; ++mapEntryIndex)
+                    {
+                        // No xml-driven validation
+                    }
+                }
+
+                skip |= validate_array("vkCreateComputePipelines", ParameterName("pCreateInfos[%i].stage.pSpecializationInfo->dataSize", ParameterName::IndexVector{ createInfoIndex }), ParameterName("pCreateInfos[%i].stage.pSpecializationInfo->pData", ParameterName::IndexVector{ createInfoIndex }), pCreateInfos[createInfoIndex].stage.pSpecializationInfo->dataSize, &pCreateInfos[createInfoIndex].stage.pSpecializationInfo->pData, false, true, kVUIDUndefined, "VUID-VkSpecializationInfo-pData-parameter");
+            }
+
+            skip |= validate_required_handle("vkCreateComputePipelines", ParameterName("pCreateInfos[%i].layout", ParameterName::IndexVector{ createInfoIndex }), pCreateInfos[createInfoIndex].layout);
+        }
+    }
+    if (pAllocator != NULL)
+    {
+        skip |= validate_required_pointer("vkCreateComputePipelines", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
+
+        skip |= validate_required_pointer("vkCreateComputePipelines", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
+
+        skip |= validate_required_pointer("vkCreateComputePipelines", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
+
+        if (pAllocator->pfnInternalAllocation != NULL)
+        {
+            skip |= validate_required_pointer("vkCreateComputePipelines", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+
+        if (pAllocator->pfnInternalFree != NULL)
+        {
+            skip |= validate_required_pointer("vkCreateComputePipelines", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+    }
+    skip |= validate_array("vkCreateComputePipelines", "createInfoCount", "pPipelines", createInfoCount, &pPipelines, true, true, "VUID-vkCreateComputePipelines-createInfoCount-arraylength", "VUID-vkCreateComputePipelines-pPipelines-parameter");
+    if (!skip) skip |= manual_PreCallValidateCreateComputePipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines);
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateDestroyPipeline(
+    VkDevice                                    device,
+    VkPipeline                                  pipeline,
+    const VkAllocationCallbacks*                pAllocator) const {
+    bool skip = false;
+    if (pAllocator != NULL)
+    {
+        skip |= validate_required_pointer("vkDestroyPipeline", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
+
+        skip |= validate_required_pointer("vkDestroyPipeline", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
+
+        skip |= validate_required_pointer("vkDestroyPipeline", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
+
+        if (pAllocator->pfnInternalAllocation != NULL)
+        {
+            skip |= validate_required_pointer("vkDestroyPipeline", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+
+        if (pAllocator->pfnInternalFree != NULL)
+        {
+            skip |= validate_required_pointer("vkDestroyPipeline", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateCreatePipelineLayout(
+    VkDevice                                    device,
+    const VkPipelineLayoutCreateInfo*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkPipelineLayout*                           pPipelineLayout) const {
+    bool skip = false;
+    skip |= validate_struct_type("vkCreatePipelineLayout", "pCreateInfo", "VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO", pCreateInfo, VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO, true, "VUID-vkCreatePipelineLayout-pCreateInfo-parameter", "VUID-VkPipelineLayoutCreateInfo-sType-sType");
+    if (pCreateInfo != NULL)
+    {
+        skip |= validate_struct_pnext("vkCreatePipelineLayout", "pCreateInfo->pNext", NULL, pCreateInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkPipelineLayoutCreateInfo-pNext-pNext");
+
+        skip |= validate_reserved_flags("vkCreatePipelineLayout", "pCreateInfo->flags", pCreateInfo->flags, "VUID-VkPipelineLayoutCreateInfo-flags-zerobitmask");
+
+        skip |= validate_array("vkCreatePipelineLayout", "pCreateInfo->setLayoutCount", "pCreateInfo->pSetLayouts", pCreateInfo->setLayoutCount, &pCreateInfo->pSetLayouts, false, true, kVUIDUndefined, "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-parameter");
+
+        skip |= validate_array("vkCreatePipelineLayout", "pCreateInfo->pushConstantRangeCount", "pCreateInfo->pPushConstantRanges", pCreateInfo->pushConstantRangeCount, &pCreateInfo->pPushConstantRanges, false, true, kVUIDUndefined, "VUID-VkPipelineLayoutCreateInfo-pPushConstantRanges-parameter");
+
+        if (pCreateInfo->pPushConstantRanges != NULL)
+        {
+            for (uint32_t pushConstantRangeIndex = 0; pushConstantRangeIndex < pCreateInfo->pushConstantRangeCount; ++pushConstantRangeIndex)
+            {
+                skip |= validate_flags("vkCreatePipelineLayout", ParameterName("pCreateInfo->pPushConstantRanges[%i].stageFlags", ParameterName::IndexVector{ pushConstantRangeIndex }), "VkShaderStageFlagBits", AllVkShaderStageFlagBits, pCreateInfo->pPushConstantRanges[pushConstantRangeIndex].stageFlags, kRequiredFlags, "VUID-VkPushConstantRange-stageFlags-parameter", "VUID-VkPushConstantRange-stageFlags-requiredbitmask");
+            }
+        }
+    }
+    if (pAllocator != NULL)
+    {
+        skip |= validate_required_pointer("vkCreatePipelineLayout", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
+
+        skip |= validate_required_pointer("vkCreatePipelineLayout", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
+
+        skip |= validate_required_pointer("vkCreatePipelineLayout", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
+
+        if (pAllocator->pfnInternalAllocation != NULL)
+        {
+            skip |= validate_required_pointer("vkCreatePipelineLayout", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+
+        if (pAllocator->pfnInternalFree != NULL)
+        {
+            skip |= validate_required_pointer("vkCreatePipelineLayout", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+    }
+    skip |= validate_required_pointer("vkCreatePipelineLayout", "pPipelineLayout", pPipelineLayout, "VUID-vkCreatePipelineLayout-pPipelineLayout-parameter");
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateDestroyPipelineLayout(
+    VkDevice                                    device,
+    VkPipelineLayout                            pipelineLayout,
+    const VkAllocationCallbacks*                pAllocator) const {
+    bool skip = false;
+    if (pAllocator != NULL)
+    {
+        skip |= validate_required_pointer("vkDestroyPipelineLayout", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
+
+        skip |= validate_required_pointer("vkDestroyPipelineLayout", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
+
+        skip |= validate_required_pointer("vkDestroyPipelineLayout", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
+
+        if (pAllocator->pfnInternalAllocation != NULL)
+        {
+            skip |= validate_required_pointer("vkDestroyPipelineLayout", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+
+        if (pAllocator->pfnInternalFree != NULL)
+        {
+            skip |= validate_required_pointer("vkDestroyPipelineLayout", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateCreateSampler(
+    VkDevice                                    device,
+    const VkSamplerCreateInfo*                  pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSampler*                                  pSampler) const {
+    bool skip = false;
+    skip |= validate_struct_type("vkCreateSampler", "pCreateInfo", "VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO", pCreateInfo, VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO, true, "VUID-vkCreateSampler-pCreateInfo-parameter", "VUID-VkSamplerCreateInfo-sType-sType");
+    if (pCreateInfo != NULL)
+    {
+        const VkStructureType allowed_structs_VkSamplerCreateInfo[] = { VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO_EXT, VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO };
+
+        skip |= validate_struct_pnext("vkCreateSampler", "pCreateInfo->pNext", "VkSamplerReductionModeCreateInfoEXT, VkSamplerYcbcrConversionInfo", pCreateInfo->pNext, ARRAY_SIZE(allowed_structs_VkSamplerCreateInfo), allowed_structs_VkSamplerCreateInfo, GeneratedVulkanHeaderVersion, "VUID-VkSamplerCreateInfo-pNext-pNext");
+
+        skip |= validate_flags("vkCreateSampler", "pCreateInfo->flags", "VkSamplerCreateFlagBits", AllVkSamplerCreateFlagBits, pCreateInfo->flags, kOptionalFlags, "VUID-VkSamplerCreateInfo-flags-parameter");
+
+        skip |= validate_ranged_enum("vkCreateSampler", "pCreateInfo->magFilter", "VkFilter", AllVkFilterEnums, pCreateInfo->magFilter, "VUID-VkSamplerCreateInfo-magFilter-parameter");
+
+        skip |= validate_ranged_enum("vkCreateSampler", "pCreateInfo->minFilter", "VkFilter", AllVkFilterEnums, pCreateInfo->minFilter, "VUID-VkSamplerCreateInfo-minFilter-parameter");
+
+        skip |= validate_ranged_enum("vkCreateSampler", "pCreateInfo->mipmapMode", "VkSamplerMipmapMode", AllVkSamplerMipmapModeEnums, pCreateInfo->mipmapMode, "VUID-VkSamplerCreateInfo-mipmapMode-parameter");
+
+        skip |= validate_ranged_enum("vkCreateSampler", "pCreateInfo->addressModeU", "VkSamplerAddressMode", AllVkSamplerAddressModeEnums, pCreateInfo->addressModeU, "VUID-VkSamplerCreateInfo-addressModeU-parameter");
+
+        skip |= validate_ranged_enum("vkCreateSampler", "pCreateInfo->addressModeV", "VkSamplerAddressMode", AllVkSamplerAddressModeEnums, pCreateInfo->addressModeV, "VUID-VkSamplerCreateInfo-addressModeV-parameter");
+
+        skip |= validate_ranged_enum("vkCreateSampler", "pCreateInfo->addressModeW", "VkSamplerAddressMode", AllVkSamplerAddressModeEnums, pCreateInfo->addressModeW, "VUID-VkSamplerCreateInfo-addressModeW-parameter");
+
+        skip |= validate_bool32("vkCreateSampler", "pCreateInfo->anisotropyEnable", pCreateInfo->anisotropyEnable);
+
+        skip |= validate_bool32("vkCreateSampler", "pCreateInfo->compareEnable", pCreateInfo->compareEnable);
+
+        skip |= validate_bool32("vkCreateSampler", "pCreateInfo->unnormalizedCoordinates", pCreateInfo->unnormalizedCoordinates);
+    }
+    if (pAllocator != NULL)
+    {
+        skip |= validate_required_pointer("vkCreateSampler", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
+
+        skip |= validate_required_pointer("vkCreateSampler", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
+
+        skip |= validate_required_pointer("vkCreateSampler", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
+
+        if (pAllocator->pfnInternalAllocation != NULL)
+        {
+            skip |= validate_required_pointer("vkCreateSampler", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+
+        if (pAllocator->pfnInternalFree != NULL)
+        {
+            skip |= validate_required_pointer("vkCreateSampler", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+    }
+    skip |= validate_required_pointer("vkCreateSampler", "pSampler", pSampler, "VUID-vkCreateSampler-pSampler-parameter");
+    if (!skip) skip |= manual_PreCallValidateCreateSampler(device, pCreateInfo, pAllocator, pSampler);
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateDestroySampler(
+    VkDevice                                    device,
+    VkSampler                                   sampler,
+    const VkAllocationCallbacks*                pAllocator) const {
+    bool skip = false;
+    if (pAllocator != NULL)
+    {
+        skip |= validate_required_pointer("vkDestroySampler", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
+
+        skip |= validate_required_pointer("vkDestroySampler", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
+
+        skip |= validate_required_pointer("vkDestroySampler", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
+
+        if (pAllocator->pfnInternalAllocation != NULL)
+        {
+            skip |= validate_required_pointer("vkDestroySampler", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+
+        if (pAllocator->pfnInternalFree != NULL)
+        {
+            skip |= validate_required_pointer("vkDestroySampler", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateCreateDescriptorSetLayout(
+    VkDevice                                    device,
+    const VkDescriptorSetLayoutCreateInfo*      pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDescriptorSetLayout*                      pSetLayout) const {
+    bool skip = false;
+    skip |= validate_struct_type("vkCreateDescriptorSetLayout", "pCreateInfo", "VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO", pCreateInfo, VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO, true, "VUID-vkCreateDescriptorSetLayout-pCreateInfo-parameter", "VUID-VkDescriptorSetLayoutCreateInfo-sType-sType");
+    if (pCreateInfo != NULL)
+    {
+        const VkStructureType allowed_structs_VkDescriptorSetLayoutCreateInfo[] = { VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT };
+
+        skip |= validate_struct_pnext("vkCreateDescriptorSetLayout", "pCreateInfo->pNext", "VkDescriptorSetLayoutBindingFlagsCreateInfoEXT", pCreateInfo->pNext, ARRAY_SIZE(allowed_structs_VkDescriptorSetLayoutCreateInfo), allowed_structs_VkDescriptorSetLayoutCreateInfo, GeneratedVulkanHeaderVersion, "VUID-VkDescriptorSetLayoutCreateInfo-pNext-pNext");
+
+        skip |= validate_flags("vkCreateDescriptorSetLayout", "pCreateInfo->flags", "VkDescriptorSetLayoutCreateFlagBits", AllVkDescriptorSetLayoutCreateFlagBits, pCreateInfo->flags, kOptionalFlags, "VUID-VkDescriptorSetLayoutCreateInfo-flags-parameter");
+
+        skip |= validate_array("vkCreateDescriptorSetLayout", "pCreateInfo->bindingCount", "pCreateInfo->pBindings", pCreateInfo->bindingCount, &pCreateInfo->pBindings, false, true, kVUIDUndefined, "VUID-VkDescriptorSetLayoutCreateInfo-pBindings-parameter");
+
+        if (pCreateInfo->pBindings != NULL)
+        {
+            for (uint32_t bindingIndex = 0; bindingIndex < pCreateInfo->bindingCount; ++bindingIndex)
+            {
+                skip |= validate_ranged_enum("vkCreateDescriptorSetLayout", ParameterName("pCreateInfo->pBindings[%i].descriptorType", ParameterName::IndexVector{ bindingIndex }), "VkDescriptorType", AllVkDescriptorTypeEnums, pCreateInfo->pBindings[bindingIndex].descriptorType, "VUID-VkDescriptorSetLayoutBinding-descriptorType-parameter");
+            }
+        }
+    }
+    if (pAllocator != NULL)
+    {
+        skip |= validate_required_pointer("vkCreateDescriptorSetLayout", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
+
+        skip |= validate_required_pointer("vkCreateDescriptorSetLayout", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
+
+        skip |= validate_required_pointer("vkCreateDescriptorSetLayout", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
+
+        if (pAllocator->pfnInternalAllocation != NULL)
+        {
+            skip |= validate_required_pointer("vkCreateDescriptorSetLayout", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+
+        if (pAllocator->pfnInternalFree != NULL)
+        {
+            skip |= validate_required_pointer("vkCreateDescriptorSetLayout", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+    }
+    skip |= validate_required_pointer("vkCreateDescriptorSetLayout", "pSetLayout", pSetLayout, "VUID-vkCreateDescriptorSetLayout-pSetLayout-parameter");
+    if (!skip) skip |= manual_PreCallValidateCreateDescriptorSetLayout(device, pCreateInfo, pAllocator, pSetLayout);
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateDestroyDescriptorSetLayout(
+    VkDevice                                    device,
+    VkDescriptorSetLayout                       descriptorSetLayout,
+    const VkAllocationCallbacks*                pAllocator) const {
+    bool skip = false;
+    if (pAllocator != NULL)
+    {
+        skip |= validate_required_pointer("vkDestroyDescriptorSetLayout", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
+
+        skip |= validate_required_pointer("vkDestroyDescriptorSetLayout", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
+
+        skip |= validate_required_pointer("vkDestroyDescriptorSetLayout", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
+
+        if (pAllocator->pfnInternalAllocation != NULL)
+        {
+            skip |= validate_required_pointer("vkDestroyDescriptorSetLayout", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+
+        if (pAllocator->pfnInternalFree != NULL)
+        {
+            skip |= validate_required_pointer("vkDestroyDescriptorSetLayout", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateCreateDescriptorPool(
+    VkDevice                                    device,
+    const VkDescriptorPoolCreateInfo*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDescriptorPool*                           pDescriptorPool) const {
+    bool skip = false;
+    skip |= validate_struct_type("vkCreateDescriptorPool", "pCreateInfo", "VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO", pCreateInfo, VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO, true, "VUID-vkCreateDescriptorPool-pCreateInfo-parameter", "VUID-VkDescriptorPoolCreateInfo-sType-sType");
+    if (pCreateInfo != NULL)
+    {
+        const VkStructureType allowed_structs_VkDescriptorPoolCreateInfo[] = { VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO_EXT };
+
+        skip |= validate_struct_pnext("vkCreateDescriptorPool", "pCreateInfo->pNext", "VkDescriptorPoolInlineUniformBlockCreateInfoEXT", pCreateInfo->pNext, ARRAY_SIZE(allowed_structs_VkDescriptorPoolCreateInfo), allowed_structs_VkDescriptorPoolCreateInfo, GeneratedVulkanHeaderVersion, "VUID-VkDescriptorPoolCreateInfo-pNext-pNext");
+
+        skip |= validate_flags("vkCreateDescriptorPool", "pCreateInfo->flags", "VkDescriptorPoolCreateFlagBits", AllVkDescriptorPoolCreateFlagBits, pCreateInfo->flags, kOptionalFlags, "VUID-VkDescriptorPoolCreateInfo-flags-parameter");
+
+        skip |= validate_array("vkCreateDescriptorPool", "pCreateInfo->poolSizeCount", "pCreateInfo->pPoolSizes", pCreateInfo->poolSizeCount, &pCreateInfo->pPoolSizes, true, true, "VUID-VkDescriptorPoolCreateInfo-poolSizeCount-arraylength", "VUID-VkDescriptorPoolCreateInfo-pPoolSizes-parameter");
+
+        if (pCreateInfo->pPoolSizes != NULL)
+        {
+            for (uint32_t poolSizeIndex = 0; poolSizeIndex < pCreateInfo->poolSizeCount; ++poolSizeIndex)
+            {
+                skip |= validate_ranged_enum("vkCreateDescriptorPool", ParameterName("pCreateInfo->pPoolSizes[%i].type", ParameterName::IndexVector{ poolSizeIndex }), "VkDescriptorType", AllVkDescriptorTypeEnums, pCreateInfo->pPoolSizes[poolSizeIndex].type, "VUID-VkDescriptorPoolSize-type-parameter");
+            }
+        }
+    }
+    if (pAllocator != NULL)
+    {
+        skip |= validate_required_pointer("vkCreateDescriptorPool", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
+
+        skip |= validate_required_pointer("vkCreateDescriptorPool", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
+
+        skip |= validate_required_pointer("vkCreateDescriptorPool", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
+
+        if (pAllocator->pfnInternalAllocation != NULL)
+        {
+            skip |= validate_required_pointer("vkCreateDescriptorPool", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+
+        if (pAllocator->pfnInternalFree != NULL)
+        {
+            skip |= validate_required_pointer("vkCreateDescriptorPool", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+    }
+    skip |= validate_required_pointer("vkCreateDescriptorPool", "pDescriptorPool", pDescriptorPool, "VUID-vkCreateDescriptorPool-pDescriptorPool-parameter");
+    if (!skip) skip |= manual_PreCallValidateCreateDescriptorPool(device, pCreateInfo, pAllocator, pDescriptorPool);
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateDestroyDescriptorPool(
+    VkDevice                                    device,
+    VkDescriptorPool                            descriptorPool,
+    const VkAllocationCallbacks*                pAllocator) const {
+    bool skip = false;
+    if (pAllocator != NULL)
+    {
+        skip |= validate_required_pointer("vkDestroyDescriptorPool", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
+
+        skip |= validate_required_pointer("vkDestroyDescriptorPool", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
+
+        skip |= validate_required_pointer("vkDestroyDescriptorPool", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
+
+        if (pAllocator->pfnInternalAllocation != NULL)
+        {
+            skip |= validate_required_pointer("vkDestroyDescriptorPool", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+
+        if (pAllocator->pfnInternalFree != NULL)
+        {
+            skip |= validate_required_pointer("vkDestroyDescriptorPool", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateResetDescriptorPool(
+    VkDevice                                    device,
+    VkDescriptorPool                            descriptorPool,
+    VkDescriptorPoolResetFlags                  flags) const {
+    bool skip = false;
+    skip |= validate_required_handle("vkResetDescriptorPool", "descriptorPool", descriptorPool);
+    skip |= validate_reserved_flags("vkResetDescriptorPool", "flags", flags, "VUID-vkResetDescriptorPool-flags-zerobitmask");
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateAllocateDescriptorSets(
+    VkDevice                                    device,
+    const VkDescriptorSetAllocateInfo*          pAllocateInfo,
+    VkDescriptorSet*                            pDescriptorSets) const {
+    bool skip = false;
+    skip |= validate_struct_type("vkAllocateDescriptorSets", "pAllocateInfo", "VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO", pAllocateInfo, VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO, true, "VUID-vkAllocateDescriptorSets-pAllocateInfo-parameter", "VUID-VkDescriptorSetAllocateInfo-sType-sType");
+    if (pAllocateInfo != NULL)
+    {
+        const VkStructureType allowed_structs_VkDescriptorSetAllocateInfo[] = { VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO_EXT };
+
+        skip |= validate_struct_pnext("vkAllocateDescriptorSets", "pAllocateInfo->pNext", "VkDescriptorSetVariableDescriptorCountAllocateInfoEXT", pAllocateInfo->pNext, ARRAY_SIZE(allowed_structs_VkDescriptorSetAllocateInfo), allowed_structs_VkDescriptorSetAllocateInfo, GeneratedVulkanHeaderVersion, "VUID-VkDescriptorSetAllocateInfo-pNext-pNext");
+
+        skip |= validate_required_handle("vkAllocateDescriptorSets", "pAllocateInfo->descriptorPool", pAllocateInfo->descriptorPool);
+
+        skip |= validate_handle_array("vkAllocateDescriptorSets", "pAllocateInfo->descriptorSetCount", "pAllocateInfo->pSetLayouts", pAllocateInfo->descriptorSetCount, pAllocateInfo->pSetLayouts, true, true);
+    }
+    if (pAllocateInfo != NULL) {
+        skip |= validate_array("vkAllocateDescriptorSets", "pAllocateInfo->descriptorSetCount", "pDescriptorSets", pAllocateInfo->descriptorSetCount, &pDescriptorSets, true, true, kVUIDUndefined, "VUID-vkAllocateDescriptorSets-pDescriptorSets-parameter");
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateFreeDescriptorSets(
+    VkDevice                                    device,
+    VkDescriptorPool                            descriptorPool,
+    uint32_t                                    descriptorSetCount,
+    const VkDescriptorSet*                      pDescriptorSets) const {
+    bool skip = false;
+    skip |= validate_required_handle("vkFreeDescriptorSets", "descriptorPool", descriptorPool);
+    if (!skip) skip |= manual_PreCallValidateFreeDescriptorSets(device, descriptorPool, descriptorSetCount, pDescriptorSets);
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateUpdateDescriptorSets(
+    VkDevice                                    device,
+    uint32_t                                    descriptorWriteCount,
+    const VkWriteDescriptorSet*                 pDescriptorWrites,
+    uint32_t                                    descriptorCopyCount,
+    const VkCopyDescriptorSet*                  pDescriptorCopies) const {
+    bool skip = false;
+    skip |= validate_struct_type_array("vkUpdateDescriptorSets", "descriptorWriteCount", "pDescriptorWrites", "VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET", descriptorWriteCount, pDescriptorWrites, VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET, false, true, "VUID-VkWriteDescriptorSet-sType-sType", "VUID-vkUpdateDescriptorSets-pDescriptorWrites-parameter", kVUIDUndefined);
+    if (pDescriptorWrites != NULL)
+    {
+        for (uint32_t descriptorWriteIndex = 0; descriptorWriteIndex < descriptorWriteCount; ++descriptorWriteIndex)
+        {
+            const VkStructureType allowed_structs_VkWriteDescriptorSet[] = { VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_NV, VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK_EXT };
+
+            skip |= validate_struct_pnext("vkUpdateDescriptorSets", ParameterName("pDescriptorWrites[%i].pNext", ParameterName::IndexVector{ descriptorWriteIndex }), "VkWriteDescriptorSetAccelerationStructureNV, VkWriteDescriptorSetInlineUniformBlockEXT", pDescriptorWrites[descriptorWriteIndex].pNext, ARRAY_SIZE(allowed_structs_VkWriteDescriptorSet), allowed_structs_VkWriteDescriptorSet, GeneratedVulkanHeaderVersion, "VUID-VkWriteDescriptorSet-pNext-pNext");
+
+            skip |= validate_ranged_enum("vkUpdateDescriptorSets", ParameterName("pDescriptorWrites[%i].descriptorType", ParameterName::IndexVector{ descriptorWriteIndex }), "VkDescriptorType", AllVkDescriptorTypeEnums, pDescriptorWrites[descriptorWriteIndex].descriptorType, "VUID-VkWriteDescriptorSet-descriptorType-parameter");
+        }
+    }
+    skip |= validate_struct_type_array("vkUpdateDescriptorSets", "descriptorCopyCount", "pDescriptorCopies", "VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET", descriptorCopyCount, pDescriptorCopies, VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET, false, true, "VUID-VkCopyDescriptorSet-sType-sType", "VUID-vkUpdateDescriptorSets-pDescriptorCopies-parameter", kVUIDUndefined);
+    if (pDescriptorCopies != NULL)
+    {
+        for (uint32_t descriptorCopyIndex = 0; descriptorCopyIndex < descriptorCopyCount; ++descriptorCopyIndex)
+        {
+            skip |= validate_struct_pnext("vkUpdateDescriptorSets", ParameterName("pDescriptorCopies[%i].pNext", ParameterName::IndexVector{ descriptorCopyIndex }), NULL, pDescriptorCopies[descriptorCopyIndex].pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkCopyDescriptorSet-pNext-pNext");
+
+            skip |= validate_required_handle("vkUpdateDescriptorSets", ParameterName("pDescriptorCopies[%i].srcSet", ParameterName::IndexVector{ descriptorCopyIndex }), pDescriptorCopies[descriptorCopyIndex].srcSet);
+
+            skip |= validate_required_handle("vkUpdateDescriptorSets", ParameterName("pDescriptorCopies[%i].dstSet", ParameterName::IndexVector{ descriptorCopyIndex }), pDescriptorCopies[descriptorCopyIndex].dstSet);
+        }
+    }
+    if (!skip) skip |= manual_PreCallValidateUpdateDescriptorSets(device, descriptorWriteCount, pDescriptorWrites, descriptorCopyCount, pDescriptorCopies);
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateCreateFramebuffer(
+    VkDevice                                    device,
+    const VkFramebufferCreateInfo*              pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkFramebuffer*                              pFramebuffer) const {
+    bool skip = false;
+    skip |= validate_struct_type("vkCreateFramebuffer", "pCreateInfo", "VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO", pCreateInfo, VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, true, "VUID-vkCreateFramebuffer-pCreateInfo-parameter", "VUID-VkFramebufferCreateInfo-sType-sType");
+    if (pCreateInfo != NULL)
+    {
+        const VkStructureType allowed_structs_VkFramebufferCreateInfo[] = { VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO_KHR };
+
+        skip |= validate_struct_pnext("vkCreateFramebuffer", "pCreateInfo->pNext", "VkFramebufferAttachmentsCreateInfoKHR", pCreateInfo->pNext, ARRAY_SIZE(allowed_structs_VkFramebufferCreateInfo), allowed_structs_VkFramebufferCreateInfo, GeneratedVulkanHeaderVersion, "VUID-VkFramebufferCreateInfo-pNext-pNext");
+
+        skip |= validate_flags("vkCreateFramebuffer", "pCreateInfo->flags", "VkFramebufferCreateFlagBits", AllVkFramebufferCreateFlagBits, pCreateInfo->flags, kOptionalFlags, "VUID-VkFramebufferCreateInfo-flags-parameter");
+
+        skip |= validate_required_handle("vkCreateFramebuffer", "pCreateInfo->renderPass", pCreateInfo->renderPass);
+    }
+    if (pAllocator != NULL)
+    {
+        skip |= validate_required_pointer("vkCreateFramebuffer", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
+
+        skip |= validate_required_pointer("vkCreateFramebuffer", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
+
+        skip |= validate_required_pointer("vkCreateFramebuffer", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
+
+        if (pAllocator->pfnInternalAllocation != NULL)
+        {
+            skip |= validate_required_pointer("vkCreateFramebuffer", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+
+        if (pAllocator->pfnInternalFree != NULL)
+        {
+            skip |= validate_required_pointer("vkCreateFramebuffer", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+    }
+    skip |= validate_required_pointer("vkCreateFramebuffer", "pFramebuffer", pFramebuffer, "VUID-vkCreateFramebuffer-pFramebuffer-parameter");
+    if (!skip) skip |= manual_PreCallValidateCreateFramebuffer(device, pCreateInfo, pAllocator, pFramebuffer);
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateDestroyFramebuffer(
+    VkDevice                                    device,
+    VkFramebuffer                               framebuffer,
+    const VkAllocationCallbacks*                pAllocator) const {
+    bool skip = false;
+    if (pAllocator != NULL)
+    {
+        skip |= validate_required_pointer("vkDestroyFramebuffer", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
+
+        skip |= validate_required_pointer("vkDestroyFramebuffer", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
+
+        skip |= validate_required_pointer("vkDestroyFramebuffer", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
+
+        if (pAllocator->pfnInternalAllocation != NULL)
+        {
+            skip |= validate_required_pointer("vkDestroyFramebuffer", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+
+        if (pAllocator->pfnInternalFree != NULL)
+        {
+            skip |= validate_required_pointer("vkDestroyFramebuffer", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateCreateRenderPass(
+    VkDevice                                    device,
+    const VkRenderPassCreateInfo*               pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkRenderPass*                               pRenderPass) const {
+    bool skip = false;
+    skip |= validate_struct_type("vkCreateRenderPass", "pCreateInfo", "VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO", pCreateInfo, VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, true, "VUID-vkCreateRenderPass-pCreateInfo-parameter", "VUID-VkRenderPassCreateInfo-sType-sType");
+    if (pCreateInfo != NULL)
+    {
+        const VkStructureType allowed_structs_VkRenderPassCreateInfo[] = { VK_STRUCTURE_TYPE_RENDER_PASS_FRAGMENT_DENSITY_MAP_CREATE_INFO_EXT, VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO, VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO };
+
+        skip |= validate_struct_pnext("vkCreateRenderPass", "pCreateInfo->pNext", "VkRenderPassFragmentDensityMapCreateInfoEXT, VkRenderPassInputAttachmentAspectCreateInfo, VkRenderPassMultiviewCreateInfo", pCreateInfo->pNext, ARRAY_SIZE(allowed_structs_VkRenderPassCreateInfo), allowed_structs_VkRenderPassCreateInfo, GeneratedVulkanHeaderVersion, "VUID-VkRenderPassCreateInfo-pNext-pNext");
+
+        skip |= validate_reserved_flags("vkCreateRenderPass", "pCreateInfo->flags", pCreateInfo->flags, "VUID-VkRenderPassCreateInfo-flags-zerobitmask");
+
+        skip |= validate_array("vkCreateRenderPass", "pCreateInfo->attachmentCount", "pCreateInfo->pAttachments", pCreateInfo->attachmentCount, &pCreateInfo->pAttachments, false, true, kVUIDUndefined, "VUID-VkRenderPassCreateInfo-pAttachments-parameter");
+
+        if (pCreateInfo->pAttachments != NULL)
+        {
+            for (uint32_t attachmentIndex = 0; attachmentIndex < pCreateInfo->attachmentCount; ++attachmentIndex)
+            {
+                skip |= validate_flags("vkCreateRenderPass", ParameterName("pCreateInfo->pAttachments[%i].flags", ParameterName::IndexVector{ attachmentIndex }), "VkAttachmentDescriptionFlagBits", AllVkAttachmentDescriptionFlagBits, pCreateInfo->pAttachments[attachmentIndex].flags, kOptionalFlags, "VUID-VkAttachmentDescription-flags-parameter");
+
+                skip |= validate_ranged_enum("vkCreateRenderPass", ParameterName("pCreateInfo->pAttachments[%i].format", ParameterName::IndexVector{ attachmentIndex }), "VkFormat", AllVkFormatEnums, pCreateInfo->pAttachments[attachmentIndex].format, "VUID-VkAttachmentDescription-format-parameter");
+
+                skip |= validate_flags("vkCreateRenderPass", ParameterName("pCreateInfo->pAttachments[%i].samples", ParameterName::IndexVector{ attachmentIndex }), "VkSampleCountFlagBits", AllVkSampleCountFlagBits, pCreateInfo->pAttachments[attachmentIndex].samples, kRequiredSingleBit, "VUID-VkAttachmentDescription-samples-parameter", "VUID-VkAttachmentDescription-samples-parameter");
+
+                skip |= validate_ranged_enum("vkCreateRenderPass", ParameterName("pCreateInfo->pAttachments[%i].loadOp", ParameterName::IndexVector{ attachmentIndex }), "VkAttachmentLoadOp", AllVkAttachmentLoadOpEnums, pCreateInfo->pAttachments[attachmentIndex].loadOp, "VUID-VkAttachmentDescription-loadOp-parameter");
+
+                skip |= validate_ranged_enum("vkCreateRenderPass", ParameterName("pCreateInfo->pAttachments[%i].storeOp", ParameterName::IndexVector{ attachmentIndex }), "VkAttachmentStoreOp", AllVkAttachmentStoreOpEnums, pCreateInfo->pAttachments[attachmentIndex].storeOp, "VUID-VkAttachmentDescription-storeOp-parameter");
+
+                skip |= validate_ranged_enum("vkCreateRenderPass", ParameterName("pCreateInfo->pAttachments[%i].stencilLoadOp", ParameterName::IndexVector{ attachmentIndex }), "VkAttachmentLoadOp", AllVkAttachmentLoadOpEnums, pCreateInfo->pAttachments[attachmentIndex].stencilLoadOp, "VUID-VkAttachmentDescription-stencilLoadOp-parameter");
+
+                skip |= validate_ranged_enum("vkCreateRenderPass", ParameterName("pCreateInfo->pAttachments[%i].stencilStoreOp", ParameterName::IndexVector{ attachmentIndex }), "VkAttachmentStoreOp", AllVkAttachmentStoreOpEnums, pCreateInfo->pAttachments[attachmentIndex].stencilStoreOp, "VUID-VkAttachmentDescription-stencilStoreOp-parameter");
+
+                skip |= validate_ranged_enum("vkCreateRenderPass", ParameterName("pCreateInfo->pAttachments[%i].initialLayout", ParameterName::IndexVector{ attachmentIndex }), "VkImageLayout", AllVkImageLayoutEnums, pCreateInfo->pAttachments[attachmentIndex].initialLayout, "VUID-VkAttachmentDescription-initialLayout-parameter");
+
+                skip |= validate_ranged_enum("vkCreateRenderPass", ParameterName("pCreateInfo->pAttachments[%i].finalLayout", ParameterName::IndexVector{ attachmentIndex }), "VkImageLayout", AllVkImageLayoutEnums, pCreateInfo->pAttachments[attachmentIndex].finalLayout, "VUID-VkAttachmentDescription-finalLayout-parameter");
+            }
+        }
+
+        skip |= validate_array("vkCreateRenderPass", "pCreateInfo->subpassCount", "pCreateInfo->pSubpasses", pCreateInfo->subpassCount, &pCreateInfo->pSubpasses, true, true, "VUID-VkRenderPassCreateInfo-subpassCount-arraylength", "VUID-VkRenderPassCreateInfo-pSubpasses-parameter");
+
+        if (pCreateInfo->pSubpasses != NULL)
+        {
+            for (uint32_t subpassIndex = 0; subpassIndex < pCreateInfo->subpassCount; ++subpassIndex)
+            {
+                skip |= validate_flags("vkCreateRenderPass", ParameterName("pCreateInfo->pSubpasses[%i].flags", ParameterName::IndexVector{ subpassIndex }), "VkSubpassDescriptionFlagBits", AllVkSubpassDescriptionFlagBits, pCreateInfo->pSubpasses[subpassIndex].flags, kOptionalFlags, "VUID-VkSubpassDescription-flags-parameter");
+
+                skip |= validate_ranged_enum("vkCreateRenderPass", ParameterName("pCreateInfo->pSubpasses[%i].pipelineBindPoint", ParameterName::IndexVector{ subpassIndex }), "VkPipelineBindPoint", AllVkPipelineBindPointEnums, pCreateInfo->pSubpasses[subpassIndex].pipelineBindPoint, "VUID-VkSubpassDescription-pipelineBindPoint-parameter");
+
+                skip |= validate_array("vkCreateRenderPass", ParameterName("pCreateInfo->pSubpasses[%i].inputAttachmentCount", ParameterName::IndexVector{ subpassIndex }), ParameterName("pCreateInfo->pSubpasses[%i].pInputAttachments", ParameterName::IndexVector{ subpassIndex }), pCreateInfo->pSubpasses[subpassIndex].inputAttachmentCount, &pCreateInfo->pSubpasses[subpassIndex].pInputAttachments, false, true, kVUIDUndefined, "VUID-VkSubpassDescription-pInputAttachments-parameter");
+
+                if (pCreateInfo->pSubpasses[subpassIndex].pInputAttachments != NULL)
+                {
+                    for (uint32_t inputAttachmentIndex = 0; inputAttachmentIndex < pCreateInfo->pSubpasses[subpassIndex].inputAttachmentCount; ++inputAttachmentIndex)
+                    {
+                        skip |= validate_ranged_enum("vkCreateRenderPass", ParameterName("pCreateInfo->pSubpasses[%i].pInputAttachments[%i].layout", ParameterName::IndexVector{ subpassIndex, inputAttachmentIndex }), "VkImageLayout", AllVkImageLayoutEnums, pCreateInfo->pSubpasses[subpassIndex].pInputAttachments[inputAttachmentIndex].layout, "VUID-VkAttachmentReference-layout-parameter");
+                    }
+                }
+
+                skip |= validate_array("vkCreateRenderPass", ParameterName("pCreateInfo->pSubpasses[%i].colorAttachmentCount", ParameterName::IndexVector{ subpassIndex }), ParameterName("pCreateInfo->pSubpasses[%i].pColorAttachments", ParameterName::IndexVector{ subpassIndex }), pCreateInfo->pSubpasses[subpassIndex].colorAttachmentCount, &pCreateInfo->pSubpasses[subpassIndex].pColorAttachments, false, true, kVUIDUndefined, "VUID-VkSubpassDescription-pColorAttachments-parameter");
+
+                if (pCreateInfo->pSubpasses[subpassIndex].pColorAttachments != NULL)
+                {
+                    for (uint32_t colorAttachmentIndex = 0; colorAttachmentIndex < pCreateInfo->pSubpasses[subpassIndex].colorAttachmentCount; ++colorAttachmentIndex)
+                    {
+                        skip |= validate_ranged_enum("vkCreateRenderPass", ParameterName("pCreateInfo->pSubpasses[%i].pColorAttachments[%i].layout", ParameterName::IndexVector{ subpassIndex, colorAttachmentIndex }), "VkImageLayout", AllVkImageLayoutEnums, pCreateInfo->pSubpasses[subpassIndex].pColorAttachments[colorAttachmentIndex].layout, "VUID-VkAttachmentReference-layout-parameter");
+                    }
+                }
+
+                if (pCreateInfo->pSubpasses[subpassIndex].pResolveAttachments != NULL)
+                {
+                    for (uint32_t colorAttachmentIndex = 0; colorAttachmentIndex < pCreateInfo->pSubpasses[subpassIndex].colorAttachmentCount; ++colorAttachmentIndex)
+                    {
+                        skip |= validate_ranged_enum("vkCreateRenderPass", ParameterName("pCreateInfo->pSubpasses[%i].pResolveAttachments[%i].layout", ParameterName::IndexVector{ subpassIndex, colorAttachmentIndex }), "VkImageLayout", AllVkImageLayoutEnums, pCreateInfo->pSubpasses[subpassIndex].pResolveAttachments[colorAttachmentIndex].layout, "VUID-VkAttachmentReference-layout-parameter");
+                    }
+                }
+
+                if (pCreateInfo->pSubpasses[subpassIndex].pDepthStencilAttachment != NULL)
+                {
+                    skip |= validate_ranged_enum("vkCreateRenderPass", ParameterName("pCreateInfo->pSubpasses[%i].pDepthStencilAttachment->layout", ParameterName::IndexVector{ subpassIndex }), "VkImageLayout", AllVkImageLayoutEnums, pCreateInfo->pSubpasses[subpassIndex].pDepthStencilAttachment->layout, "VUID-VkAttachmentReference-layout-parameter");
+                }
+
+                skip |= validate_array("vkCreateRenderPass", ParameterName("pCreateInfo->pSubpasses[%i].preserveAttachmentCount", ParameterName::IndexVector{ subpassIndex }), ParameterName("pCreateInfo->pSubpasses[%i].pPreserveAttachments", ParameterName::IndexVector{ subpassIndex }), pCreateInfo->pSubpasses[subpassIndex].preserveAttachmentCount, &pCreateInfo->pSubpasses[subpassIndex].pPreserveAttachments, false, true, kVUIDUndefined, "VUID-VkSubpassDescription-pPreserveAttachments-parameter");
+            }
+        }
+
+        skip |= validate_array("vkCreateRenderPass", "pCreateInfo->dependencyCount", "pCreateInfo->pDependencies", pCreateInfo->dependencyCount, &pCreateInfo->pDependencies, false, true, kVUIDUndefined, "VUID-VkRenderPassCreateInfo-pDependencies-parameter");
+
+        if (pCreateInfo->pDependencies != NULL)
+        {
+            for (uint32_t dependencyIndex = 0; dependencyIndex < pCreateInfo->dependencyCount; ++dependencyIndex)
+            {
+                skip |= validate_flags("vkCreateRenderPass", ParameterName("pCreateInfo->pDependencies[%i].srcStageMask", ParameterName::IndexVector{ dependencyIndex }), "VkPipelineStageFlagBits", AllVkPipelineStageFlagBits, pCreateInfo->pDependencies[dependencyIndex].srcStageMask, kRequiredFlags, "VUID-VkSubpassDependency-srcStageMask-parameter", "VUID-VkSubpassDependency-srcStageMask-requiredbitmask");
+
+                skip |= validate_flags("vkCreateRenderPass", ParameterName("pCreateInfo->pDependencies[%i].dstStageMask", ParameterName::IndexVector{ dependencyIndex }), "VkPipelineStageFlagBits", AllVkPipelineStageFlagBits, pCreateInfo->pDependencies[dependencyIndex].dstStageMask, kRequiredFlags, "VUID-VkSubpassDependency-dstStageMask-parameter", "VUID-VkSubpassDependency-dstStageMask-requiredbitmask");
+
+                skip |= validate_flags("vkCreateRenderPass", ParameterName("pCreateInfo->pDependencies[%i].srcAccessMask", ParameterName::IndexVector{ dependencyIndex }), "VkAccessFlagBits", AllVkAccessFlagBits, pCreateInfo->pDependencies[dependencyIndex].srcAccessMask, kOptionalFlags, "VUID-VkSubpassDependency-srcAccessMask-parameter");
+
+                skip |= validate_flags("vkCreateRenderPass", ParameterName("pCreateInfo->pDependencies[%i].dstAccessMask", ParameterName::IndexVector{ dependencyIndex }), "VkAccessFlagBits", AllVkAccessFlagBits, pCreateInfo->pDependencies[dependencyIndex].dstAccessMask, kOptionalFlags, "VUID-VkSubpassDependency-dstAccessMask-parameter");
+
+                skip |= validate_flags("vkCreateRenderPass", ParameterName("pCreateInfo->pDependencies[%i].dependencyFlags", ParameterName::IndexVector{ dependencyIndex }), "VkDependencyFlagBits", AllVkDependencyFlagBits, pCreateInfo->pDependencies[dependencyIndex].dependencyFlags, kOptionalFlags, "VUID-VkSubpassDependency-dependencyFlags-parameter");
+            }
+        }
+    }
+    if (pAllocator != NULL)
+    {
+        skip |= validate_required_pointer("vkCreateRenderPass", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
+
+        skip |= validate_required_pointer("vkCreateRenderPass", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
+
+        skip |= validate_required_pointer("vkCreateRenderPass", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
+
+        if (pAllocator->pfnInternalAllocation != NULL)
+        {
+            skip |= validate_required_pointer("vkCreateRenderPass", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+
+        if (pAllocator->pfnInternalFree != NULL)
+        {
+            skip |= validate_required_pointer("vkCreateRenderPass", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+    }
+    skip |= validate_required_pointer("vkCreateRenderPass", "pRenderPass", pRenderPass, "VUID-vkCreateRenderPass-pRenderPass-parameter");
+    if (!skip) skip |= manual_PreCallValidateCreateRenderPass(device, pCreateInfo, pAllocator, pRenderPass);
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateDestroyRenderPass(
+    VkDevice                                    device,
+    VkRenderPass                                renderPass,
+    const VkAllocationCallbacks*                pAllocator) const {
+    bool skip = false;
+    if (pAllocator != NULL)
+    {
+        skip |= validate_required_pointer("vkDestroyRenderPass", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
+
+        skip |= validate_required_pointer("vkDestroyRenderPass", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
+
+        skip |= validate_required_pointer("vkDestroyRenderPass", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
+
+        if (pAllocator->pfnInternalAllocation != NULL)
+        {
+            skip |= validate_required_pointer("vkDestroyRenderPass", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+
+        if (pAllocator->pfnInternalFree != NULL)
+        {
+            skip |= validate_required_pointer("vkDestroyRenderPass", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateGetRenderAreaGranularity(
+    VkDevice                                    device,
+    VkRenderPass                                renderPass,
+    VkExtent2D*                                 pGranularity) const {
+    bool skip = false;
+    skip |= validate_required_handle("vkGetRenderAreaGranularity", "renderPass", renderPass);
+    skip |= validate_required_pointer("vkGetRenderAreaGranularity", "pGranularity", pGranularity, "VUID-vkGetRenderAreaGranularity-pGranularity-parameter");
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateCreateCommandPool(
+    VkDevice                                    device,
+    const VkCommandPoolCreateInfo*              pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkCommandPool*                              pCommandPool) const {
+    bool skip = false;
+    skip |= validate_struct_type("vkCreateCommandPool", "pCreateInfo", "VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO", pCreateInfo, VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO, true, "VUID-vkCreateCommandPool-pCreateInfo-parameter", "VUID-VkCommandPoolCreateInfo-sType-sType");
+    if (pCreateInfo != NULL)
+    {
+        skip |= validate_struct_pnext("vkCreateCommandPool", "pCreateInfo->pNext", NULL, pCreateInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkCommandPoolCreateInfo-pNext-pNext");
+
+        skip |= validate_flags("vkCreateCommandPool", "pCreateInfo->flags", "VkCommandPoolCreateFlagBits", AllVkCommandPoolCreateFlagBits, pCreateInfo->flags, kOptionalFlags, "VUID-VkCommandPoolCreateInfo-flags-parameter");
+    }
+    if (pAllocator != NULL)
+    {
+        skip |= validate_required_pointer("vkCreateCommandPool", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
+
+        skip |= validate_required_pointer("vkCreateCommandPool", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
+
+        skip |= validate_required_pointer("vkCreateCommandPool", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
+
+        if (pAllocator->pfnInternalAllocation != NULL)
+        {
+            skip |= validate_required_pointer("vkCreateCommandPool", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+
+        if (pAllocator->pfnInternalFree != NULL)
+        {
+            skip |= validate_required_pointer("vkCreateCommandPool", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+    }
+    skip |= validate_required_pointer("vkCreateCommandPool", "pCommandPool", pCommandPool, "VUID-vkCreateCommandPool-pCommandPool-parameter");
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateDestroyCommandPool(
+    VkDevice                                    device,
+    VkCommandPool                               commandPool,
+    const VkAllocationCallbacks*                pAllocator) const {
+    bool skip = false;
+    if (pAllocator != NULL)
+    {
+        skip |= validate_required_pointer("vkDestroyCommandPool", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
+
+        skip |= validate_required_pointer("vkDestroyCommandPool", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
+
+        skip |= validate_required_pointer("vkDestroyCommandPool", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
+
+        if (pAllocator->pfnInternalAllocation != NULL)
+        {
+            skip |= validate_required_pointer("vkDestroyCommandPool", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+
+        if (pAllocator->pfnInternalFree != NULL)
+        {
+            skip |= validate_required_pointer("vkDestroyCommandPool", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateResetCommandPool(
+    VkDevice                                    device,
+    VkCommandPool                               commandPool,
+    VkCommandPoolResetFlags                     flags) const {
+    bool skip = false;
+    skip |= validate_required_handle("vkResetCommandPool", "commandPool", commandPool);
+    skip |= validate_flags("vkResetCommandPool", "flags", "VkCommandPoolResetFlagBits", AllVkCommandPoolResetFlagBits, flags, kOptionalFlags, "VUID-vkResetCommandPool-flags-parameter");
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateAllocateCommandBuffers(
+    VkDevice                                    device,
+    const VkCommandBufferAllocateInfo*          pAllocateInfo,
+    VkCommandBuffer*                            pCommandBuffers) const {
+    bool skip = false;
+    skip |= validate_struct_type("vkAllocateCommandBuffers", "pAllocateInfo", "VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO", pAllocateInfo, VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO, true, "VUID-vkAllocateCommandBuffers-pAllocateInfo-parameter", "VUID-VkCommandBufferAllocateInfo-sType-sType");
+    if (pAllocateInfo != NULL)
+    {
+        skip |= validate_struct_pnext("vkAllocateCommandBuffers", "pAllocateInfo->pNext", NULL, pAllocateInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkCommandBufferAllocateInfo-pNext-pNext");
+
+        skip |= validate_required_handle("vkAllocateCommandBuffers", "pAllocateInfo->commandPool", pAllocateInfo->commandPool);
+
+        skip |= validate_ranged_enum("vkAllocateCommandBuffers", "pAllocateInfo->level", "VkCommandBufferLevel", AllVkCommandBufferLevelEnums, pAllocateInfo->level, "VUID-VkCommandBufferAllocateInfo-level-parameter");
+    }
+    if (pAllocateInfo != NULL) {
+        skip |= validate_array("vkAllocateCommandBuffers", "pAllocateInfo->commandBufferCount", "pCommandBuffers", pAllocateInfo->commandBufferCount, &pCommandBuffers, true, true, kVUIDUndefined, "VUID-vkAllocateCommandBuffers-pCommandBuffers-parameter");
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateFreeCommandBuffers(
+    VkDevice                                    device,
+    VkCommandPool                               commandPool,
+    uint32_t                                    commandBufferCount,
+    const VkCommandBuffer*                      pCommandBuffers) const {
+    bool skip = false;
+    skip |= validate_required_handle("vkFreeCommandBuffers", "commandPool", commandPool);
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateBeginCommandBuffer(
+    VkCommandBuffer                             commandBuffer,
+    const VkCommandBufferBeginInfo*             pBeginInfo) const {
+    bool skip = false;
+    skip |= validate_struct_type("vkBeginCommandBuffer", "pBeginInfo", "VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO", pBeginInfo, VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO, true, "VUID-vkBeginCommandBuffer-pBeginInfo-parameter", "VUID-VkCommandBufferBeginInfo-sType-sType");
+    if (pBeginInfo != NULL)
+    {
+        const VkStructureType allowed_structs_VkCommandBufferBeginInfo[] = { VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO };
+
+        skip |= validate_struct_pnext("vkBeginCommandBuffer", "pBeginInfo->pNext", "VkDeviceGroupCommandBufferBeginInfo", pBeginInfo->pNext, ARRAY_SIZE(allowed_structs_VkCommandBufferBeginInfo), allowed_structs_VkCommandBufferBeginInfo, GeneratedVulkanHeaderVersion, "VUID-VkCommandBufferBeginInfo-pNext-pNext");
+
+        skip |= validate_flags("vkBeginCommandBuffer", "pBeginInfo->flags", "VkCommandBufferUsageFlagBits", AllVkCommandBufferUsageFlagBits, pBeginInfo->flags, kOptionalFlags, "VUID-VkCommandBufferBeginInfo-flags-parameter");
+    }
+    if (!skip) skip |= manual_PreCallValidateBeginCommandBuffer(commandBuffer, pBeginInfo);
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateEndCommandBuffer(
+    VkCommandBuffer                             commandBuffer) const {
+    bool skip = false;
+    // No xml-driven validation
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateResetCommandBuffer(
+    VkCommandBuffer                             commandBuffer,
+    VkCommandBufferResetFlags                   flags) const {
+    bool skip = false;
+    skip |= validate_flags("vkResetCommandBuffer", "flags", "VkCommandBufferResetFlagBits", AllVkCommandBufferResetFlagBits, flags, kOptionalFlags, "VUID-vkResetCommandBuffer-flags-parameter");
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateCmdBindPipeline(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineBindPoint                         pipelineBindPoint,
+    VkPipeline                                  pipeline) const {
+    bool skip = false;
+    skip |= validate_ranged_enum("vkCmdBindPipeline", "pipelineBindPoint", "VkPipelineBindPoint", AllVkPipelineBindPointEnums, pipelineBindPoint, "VUID-vkCmdBindPipeline-pipelineBindPoint-parameter");
+    skip |= validate_required_handle("vkCmdBindPipeline", "pipeline", pipeline);
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateCmdSetViewport(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstViewport,
+    uint32_t                                    viewportCount,
+    const VkViewport*                           pViewports) const {
+    bool skip = false;
+    skip |= validate_array("vkCmdSetViewport", "viewportCount", "pViewports", viewportCount, &pViewports, true, true, "VUID-vkCmdSetViewport-viewportCount-arraylength", "VUID-vkCmdSetViewport-pViewports-parameter");
+    if (pViewports != NULL)
+    {
+        for (uint32_t viewportIndex = 0; viewportIndex < viewportCount; ++viewportIndex)
+        {
+            // No xml-driven validation
+        }
+    }
+    if (!skip) skip |= manual_PreCallValidateCmdSetViewport(commandBuffer, firstViewport, viewportCount, pViewports);
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateCmdSetScissor(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstScissor,
+    uint32_t                                    scissorCount,
+    const VkRect2D*                             pScissors) const {
+    bool skip = false;
+    skip |= validate_array("vkCmdSetScissor", "scissorCount", "pScissors", scissorCount, &pScissors, true, true, "VUID-vkCmdSetScissor-scissorCount-arraylength", "VUID-vkCmdSetScissor-pScissors-parameter");
+    if (pScissors != NULL)
+    {
+        for (uint32_t scissorIndex = 0; scissorIndex < scissorCount; ++scissorIndex)
+        {
+            // No xml-driven validation
+
+            // No xml-driven validation
+        }
+    }
+    if (!skip) skip |= manual_PreCallValidateCmdSetScissor(commandBuffer, firstScissor, scissorCount, pScissors);
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateCmdSetLineWidth(
+    VkCommandBuffer                             commandBuffer,
+    float                                       lineWidth) const {
+    bool skip = false;
+    // No xml-driven validation
+    if (!skip) skip |= manual_PreCallValidateCmdSetLineWidth(commandBuffer, lineWidth);
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateCmdSetDepthBias(
+    VkCommandBuffer                             commandBuffer,
+    float                                       depthBiasConstantFactor,
+    float                                       depthBiasClamp,
+    float                                       depthBiasSlopeFactor) const {
+    bool skip = false;
+    // No xml-driven validation
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateCmdSetBlendConstants(
+    VkCommandBuffer                             commandBuffer,
+    const float                                 blendConstants[4]) const {
+    bool skip = false;
+    skip |= validate_required_pointer("vkCmdSetBlendConstants", "blendConstants", blendConstants, kVUIDUndefined);
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateCmdSetDepthBounds(
+    VkCommandBuffer                             commandBuffer,
+    float                                       minDepthBounds,
+    float                                       maxDepthBounds) const {
+    bool skip = false;
+    // No xml-driven validation
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateCmdSetStencilCompareMask(
+    VkCommandBuffer                             commandBuffer,
+    VkStencilFaceFlags                          faceMask,
+    uint32_t                                    compareMask) const {
+    bool skip = false;
+    skip |= validate_flags("vkCmdSetStencilCompareMask", "faceMask", "VkStencilFaceFlagBits", AllVkStencilFaceFlagBits, faceMask, kRequiredFlags, "VUID-vkCmdSetStencilCompareMask-faceMask-parameter", "VUID-vkCmdSetStencilCompareMask-faceMask-requiredbitmask");
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateCmdSetStencilWriteMask(
+    VkCommandBuffer                             commandBuffer,
+    VkStencilFaceFlags                          faceMask,
+    uint32_t                                    writeMask) const {
+    bool skip = false;
+    skip |= validate_flags("vkCmdSetStencilWriteMask", "faceMask", "VkStencilFaceFlagBits", AllVkStencilFaceFlagBits, faceMask, kRequiredFlags, "VUID-vkCmdSetStencilWriteMask-faceMask-parameter", "VUID-vkCmdSetStencilWriteMask-faceMask-requiredbitmask");
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateCmdSetStencilReference(
+    VkCommandBuffer                             commandBuffer,
+    VkStencilFaceFlags                          faceMask,
+    uint32_t                                    reference) const {
+    bool skip = false;
+    skip |= validate_flags("vkCmdSetStencilReference", "faceMask", "VkStencilFaceFlagBits", AllVkStencilFaceFlagBits, faceMask, kRequiredFlags, "VUID-vkCmdSetStencilReference-faceMask-parameter", "VUID-vkCmdSetStencilReference-faceMask-requiredbitmask");
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateCmdBindDescriptorSets(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineBindPoint                         pipelineBindPoint,
+    VkPipelineLayout                            layout,
+    uint32_t                                    firstSet,
+    uint32_t                                    descriptorSetCount,
+    const VkDescriptorSet*                      pDescriptorSets,
+    uint32_t                                    dynamicOffsetCount,
+    const uint32_t*                             pDynamicOffsets) const {
+    bool skip = false;
+    skip |= validate_ranged_enum("vkCmdBindDescriptorSets", "pipelineBindPoint", "VkPipelineBindPoint", AllVkPipelineBindPointEnums, pipelineBindPoint, "VUID-vkCmdBindDescriptorSets-pipelineBindPoint-parameter");
+    skip |= validate_required_handle("vkCmdBindDescriptorSets", "layout", layout);
+    skip |= validate_handle_array("vkCmdBindDescriptorSets", "descriptorSetCount", "pDescriptorSets", descriptorSetCount, pDescriptorSets, true, true);
+    skip |= validate_array("vkCmdBindDescriptorSets", "dynamicOffsetCount", "pDynamicOffsets", dynamicOffsetCount, &pDynamicOffsets, false, true, kVUIDUndefined, "VUID-vkCmdBindDescriptorSets-pDynamicOffsets-parameter");
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateCmdBindIndexBuffer(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkIndexType                                 indexType) const {
+    bool skip = false;
+    skip |= validate_required_handle("vkCmdBindIndexBuffer", "buffer", buffer);
+    skip |= validate_ranged_enum("vkCmdBindIndexBuffer", "indexType", "VkIndexType", AllVkIndexTypeEnums, indexType, "VUID-vkCmdBindIndexBuffer-indexType-parameter");
+    if (!skip) skip |= manual_PreCallValidateCmdBindIndexBuffer(commandBuffer, buffer, offset, indexType);
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateCmdBindVertexBuffers(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstBinding,
+    uint32_t                                    bindingCount,
+    const VkBuffer*                             pBuffers,
+    const VkDeviceSize*                         pOffsets) const {
+    bool skip = false;
+    skip |= validate_handle_array("vkCmdBindVertexBuffers", "bindingCount", "pBuffers", bindingCount, pBuffers, true, true);
+    skip |= validate_array("vkCmdBindVertexBuffers", "bindingCount", "pOffsets", bindingCount, &pOffsets, true, true, "VUID-vkCmdBindVertexBuffers-bindingCount-arraylength", "VUID-vkCmdBindVertexBuffers-pOffsets-parameter");
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateCmdDraw(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    vertexCount,
+    uint32_t                                    instanceCount,
+    uint32_t                                    firstVertex,
+    uint32_t                                    firstInstance) const {
+    bool skip = false;
+    // No xml-driven validation
+    if (!skip) skip |= manual_PreCallValidateCmdDraw(commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance);
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateCmdDrawIndexed(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    indexCount,
+    uint32_t                                    instanceCount,
+    uint32_t                                    firstIndex,
+    int32_t                                     vertexOffset,
+    uint32_t                                    firstInstance) const {
+    bool skip = false;
+    // No xml-driven validation
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateCmdDrawIndirect(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    uint32_t                                    drawCount,
+    uint32_t                                    stride) const {
+    bool skip = false;
+    skip |= validate_required_handle("vkCmdDrawIndirect", "buffer", buffer);
+    if (!skip) skip |= manual_PreCallValidateCmdDrawIndirect(commandBuffer, buffer, offset, drawCount, stride);
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateCmdDrawIndexedIndirect(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    uint32_t                                    drawCount,
+    uint32_t                                    stride) const {
+    bool skip = false;
+    skip |= validate_required_handle("vkCmdDrawIndexedIndirect", "buffer", buffer);
+    if (!skip) skip |= manual_PreCallValidateCmdDrawIndexedIndirect(commandBuffer, buffer, offset, drawCount, stride);
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateCmdDispatch(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    groupCountX,
+    uint32_t                                    groupCountY,
+    uint32_t                                    groupCountZ) const {
+    bool skip = false;
+    // No xml-driven validation
+    if (!skip) skip |= manual_PreCallValidateCmdDispatch(commandBuffer, groupCountX, groupCountY, groupCountZ);
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateCmdDispatchIndirect(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset) const {
+    bool skip = false;
+    skip |= validate_required_handle("vkCmdDispatchIndirect", "buffer", buffer);
+    if (!skip) skip |= manual_PreCallValidateCmdDispatchIndirect(commandBuffer, buffer, offset);
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateCmdCopyBuffer(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    srcBuffer,
+    VkBuffer                                    dstBuffer,
+    uint32_t                                    regionCount,
+    const VkBufferCopy*                         pRegions) const {
+    bool skip = false;
+    skip |= validate_required_handle("vkCmdCopyBuffer", "srcBuffer", srcBuffer);
+    skip |= validate_required_handle("vkCmdCopyBuffer", "dstBuffer", dstBuffer);
+    skip |= validate_array("vkCmdCopyBuffer", "regionCount", "pRegions", regionCount, &pRegions, true, true, "VUID-vkCmdCopyBuffer-regionCount-arraylength", "VUID-vkCmdCopyBuffer-pRegions-parameter");
+    if (pRegions != NULL)
+    {
+        for (uint32_t regionIndex = 0; regionIndex < regionCount; ++regionIndex)
+        {
+            // No xml-driven validation
+        }
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateCmdCopyImage(
+    VkCommandBuffer                             commandBuffer,
+    VkImage                                     srcImage,
+    VkImageLayout                               srcImageLayout,
+    VkImage                                     dstImage,
+    VkImageLayout                               dstImageLayout,
+    uint32_t                                    regionCount,
+    const VkImageCopy*                          pRegions) const {
+    bool skip = false;
+    skip |= validate_required_handle("vkCmdCopyImage", "srcImage", srcImage);
+    skip |= validate_ranged_enum("vkCmdCopyImage", "srcImageLayout", "VkImageLayout", AllVkImageLayoutEnums, srcImageLayout, "VUID-vkCmdCopyImage-srcImageLayout-parameter");
+    skip |= validate_required_handle("vkCmdCopyImage", "dstImage", dstImage);
+    skip |= validate_ranged_enum("vkCmdCopyImage", "dstImageLayout", "VkImageLayout", AllVkImageLayoutEnums, dstImageLayout, "VUID-vkCmdCopyImage-dstImageLayout-parameter");
+    skip |= validate_array("vkCmdCopyImage", "regionCount", "pRegions", regionCount, &pRegions, true, true, "VUID-vkCmdCopyImage-regionCount-arraylength", "VUID-vkCmdCopyImage-pRegions-parameter");
+    if (pRegions != NULL)
+    {
+        for (uint32_t regionIndex = 0; regionIndex < regionCount; ++regionIndex)
+        {
+            skip |= validate_flags("vkCmdCopyImage", ParameterName("pRegions[%i].srcSubresource.aspectMask", ParameterName::IndexVector{ regionIndex }), "VkImageAspectFlagBits", AllVkImageAspectFlagBits, pRegions[regionIndex].srcSubresource.aspectMask, kRequiredFlags, "VUID-VkImageSubresourceLayers-aspectMask-parameter", "VUID-VkImageSubresourceLayers-aspectMask-requiredbitmask");
+
+            // No xml-driven validation
+
+            skip |= validate_flags("vkCmdCopyImage", ParameterName("pRegions[%i].dstSubresource.aspectMask", ParameterName::IndexVector{ regionIndex }), "VkImageAspectFlagBits", AllVkImageAspectFlagBits, pRegions[regionIndex].dstSubresource.aspectMask, kRequiredFlags, "VUID-VkImageSubresourceLayers-aspectMask-parameter", "VUID-VkImageSubresourceLayers-aspectMask-requiredbitmask");
+
+            // No xml-driven validation
+
+            // No xml-driven validation
+        }
+    }
+    if (!skip) skip |= manual_PreCallValidateCmdCopyImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions);
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateCmdBlitImage(
+    VkCommandBuffer                             commandBuffer,
+    VkImage                                     srcImage,
+    VkImageLayout                               srcImageLayout,
+    VkImage                                     dstImage,
+    VkImageLayout                               dstImageLayout,
+    uint32_t                                    regionCount,
+    const VkImageBlit*                          pRegions,
+    VkFilter                                    filter) const {
+    bool skip = false;
+    skip |= validate_required_handle("vkCmdBlitImage", "srcImage", srcImage);
+    skip |= validate_ranged_enum("vkCmdBlitImage", "srcImageLayout", "VkImageLayout", AllVkImageLayoutEnums, srcImageLayout, "VUID-vkCmdBlitImage-srcImageLayout-parameter");
+    skip |= validate_required_handle("vkCmdBlitImage", "dstImage", dstImage);
+    skip |= validate_ranged_enum("vkCmdBlitImage", "dstImageLayout", "VkImageLayout", AllVkImageLayoutEnums, dstImageLayout, "VUID-vkCmdBlitImage-dstImageLayout-parameter");
+    skip |= validate_array("vkCmdBlitImage", "regionCount", "pRegions", regionCount, &pRegions, true, true, "VUID-vkCmdBlitImage-regionCount-arraylength", "VUID-vkCmdBlitImage-pRegions-parameter");
+    if (pRegions != NULL)
+    {
+        for (uint32_t regionIndex = 0; regionIndex < regionCount; ++regionIndex)
+        {
+            skip |= validate_flags("vkCmdBlitImage", ParameterName("pRegions[%i].srcSubresource.aspectMask", ParameterName::IndexVector{ regionIndex }), "VkImageAspectFlagBits", AllVkImageAspectFlagBits, pRegions[regionIndex].srcSubresource.aspectMask, kRequiredFlags, "VUID-VkImageSubresourceLayers-aspectMask-parameter", "VUID-VkImageSubresourceLayers-aspectMask-requiredbitmask");
+
+            skip |= validate_flags("vkCmdBlitImage", ParameterName("pRegions[%i].dstSubresource.aspectMask", ParameterName::IndexVector{ regionIndex }), "VkImageAspectFlagBits", AllVkImageAspectFlagBits, pRegions[regionIndex].dstSubresource.aspectMask, kRequiredFlags, "VUID-VkImageSubresourceLayers-aspectMask-parameter", "VUID-VkImageSubresourceLayers-aspectMask-requiredbitmask");
+        }
+    }
+    skip |= validate_ranged_enum("vkCmdBlitImage", "filter", "VkFilter", AllVkFilterEnums, filter, "VUID-vkCmdBlitImage-filter-parameter");
+    if (!skip) skip |= manual_PreCallValidateCmdBlitImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions, filter);
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateCmdCopyBufferToImage(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    srcBuffer,
+    VkImage                                     dstImage,
+    VkImageLayout                               dstImageLayout,
+    uint32_t                                    regionCount,
+    const VkBufferImageCopy*                    pRegions) const {
+    bool skip = false;
+    skip |= validate_required_handle("vkCmdCopyBufferToImage", "srcBuffer", srcBuffer);
+    skip |= validate_required_handle("vkCmdCopyBufferToImage", "dstImage", dstImage);
+    skip |= validate_ranged_enum("vkCmdCopyBufferToImage", "dstImageLayout", "VkImageLayout", AllVkImageLayoutEnums, dstImageLayout, "VUID-vkCmdCopyBufferToImage-dstImageLayout-parameter");
+    skip |= validate_array("vkCmdCopyBufferToImage", "regionCount", "pRegions", regionCount, &pRegions, true, true, "VUID-vkCmdCopyBufferToImage-regionCount-arraylength", "VUID-vkCmdCopyBufferToImage-pRegions-parameter");
+    if (pRegions != NULL)
+    {
+        for (uint32_t regionIndex = 0; regionIndex < regionCount; ++regionIndex)
+        {
+            skip |= validate_flags("vkCmdCopyBufferToImage", ParameterName("pRegions[%i].imageSubresource.aspectMask", ParameterName::IndexVector{ regionIndex }), "VkImageAspectFlagBits", AllVkImageAspectFlagBits, pRegions[regionIndex].imageSubresource.aspectMask, kRequiredFlags, "VUID-VkImageSubresourceLayers-aspectMask-parameter", "VUID-VkImageSubresourceLayers-aspectMask-requiredbitmask");
+
+            // No xml-driven validation
+
+            // No xml-driven validation
+        }
+    }
+    if (!skip) skip |= manual_PreCallValidateCmdCopyBufferToImage(commandBuffer, srcBuffer, dstImage, dstImageLayout, regionCount, pRegions);
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateCmdCopyImageToBuffer(
+    VkCommandBuffer                             commandBuffer,
+    VkImage                                     srcImage,
+    VkImageLayout                               srcImageLayout,
+    VkBuffer                                    dstBuffer,
+    uint32_t                                    regionCount,
+    const VkBufferImageCopy*                    pRegions) const {
+    bool skip = false;
+    skip |= validate_required_handle("vkCmdCopyImageToBuffer", "srcImage", srcImage);
+    skip |= validate_ranged_enum("vkCmdCopyImageToBuffer", "srcImageLayout", "VkImageLayout", AllVkImageLayoutEnums, srcImageLayout, "VUID-vkCmdCopyImageToBuffer-srcImageLayout-parameter");
+    skip |= validate_required_handle("vkCmdCopyImageToBuffer", "dstBuffer", dstBuffer);
+    skip |= validate_array("vkCmdCopyImageToBuffer", "regionCount", "pRegions", regionCount, &pRegions, true, true, "VUID-vkCmdCopyImageToBuffer-regionCount-arraylength", "VUID-vkCmdCopyImageToBuffer-pRegions-parameter");
+    if (pRegions != NULL)
+    {
+        for (uint32_t regionIndex = 0; regionIndex < regionCount; ++regionIndex)
+        {
+            skip |= validate_flags("vkCmdCopyImageToBuffer", ParameterName("pRegions[%i].imageSubresource.aspectMask", ParameterName::IndexVector{ regionIndex }), "VkImageAspectFlagBits", AllVkImageAspectFlagBits, pRegions[regionIndex].imageSubresource.aspectMask, kRequiredFlags, "VUID-VkImageSubresourceLayers-aspectMask-parameter", "VUID-VkImageSubresourceLayers-aspectMask-requiredbitmask");
+
+            // No xml-driven validation
+
+            // No xml-driven validation
+        }
+    }
+    if (!skip) skip |= manual_PreCallValidateCmdCopyImageToBuffer(commandBuffer, srcImage, srcImageLayout, dstBuffer, regionCount, pRegions);
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateCmdUpdateBuffer(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    dstBuffer,
+    VkDeviceSize                                dstOffset,
+    VkDeviceSize                                dataSize,
+    const void*                                 pData) const {
+    bool skip = false;
+    skip |= validate_required_handle("vkCmdUpdateBuffer", "dstBuffer", dstBuffer);
+    skip |= validate_array("vkCmdUpdateBuffer", "dataSize", "pData", dataSize, &pData, true, true, "VUID-vkCmdUpdateBuffer-dataSize-arraylength", "VUID-vkCmdUpdateBuffer-pData-parameter");
+    if (!skip) skip |= manual_PreCallValidateCmdUpdateBuffer(commandBuffer, dstBuffer, dstOffset, dataSize, pData);
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateCmdFillBuffer(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    dstBuffer,
+    VkDeviceSize                                dstOffset,
+    VkDeviceSize                                size,
+    uint32_t                                    data) const {
+    bool skip = false;
+    skip |= validate_required_handle("vkCmdFillBuffer", "dstBuffer", dstBuffer);
+    if (!skip) skip |= manual_PreCallValidateCmdFillBuffer(commandBuffer, dstBuffer, dstOffset, size, data);
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateCmdClearColorImage(
+    VkCommandBuffer                             commandBuffer,
+    VkImage                                     image,
+    VkImageLayout                               imageLayout,
+    const VkClearColorValue*                    pColor,
+    uint32_t                                    rangeCount,
+    const VkImageSubresourceRange*              pRanges) const {
+    bool skip = false;
+    skip |= validate_required_handle("vkCmdClearColorImage", "image", image);
+    skip |= validate_ranged_enum("vkCmdClearColorImage", "imageLayout", "VkImageLayout", AllVkImageLayoutEnums, imageLayout, "VUID-vkCmdClearColorImage-imageLayout-parameter");
+    skip |= validate_required_pointer("vkCmdClearColorImage", "pColor", pColor, "VUID-vkCmdClearColorImage-pColor-parameter");
+    if (pColor != NULL)
+    {
+        // No xml-driven validation
+    }
+    skip |= validate_array("vkCmdClearColorImage", "rangeCount", "pRanges", rangeCount, &pRanges, true, true, "VUID-vkCmdClearColorImage-rangeCount-arraylength", "VUID-vkCmdClearColorImage-pRanges-parameter");
+    if (pRanges != NULL)
+    {
+        for (uint32_t rangeIndex = 0; rangeIndex < rangeCount; ++rangeIndex)
+        {
+            skip |= validate_flags("vkCmdClearColorImage", ParameterName("pRanges[%i].aspectMask", ParameterName::IndexVector{ rangeIndex }), "VkImageAspectFlagBits", AllVkImageAspectFlagBits, pRanges[rangeIndex].aspectMask, kRequiredFlags, "VUID-VkImageSubresourceRange-aspectMask-parameter", "VUID-VkImageSubresourceRange-aspectMask-requiredbitmask");
+        }
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateCmdClearDepthStencilImage(
+    VkCommandBuffer                             commandBuffer,
+    VkImage                                     image,
+    VkImageLayout                               imageLayout,
+    const VkClearDepthStencilValue*             pDepthStencil,
+    uint32_t                                    rangeCount,
+    const VkImageSubresourceRange*              pRanges) const {
+    bool skip = false;
+    skip |= validate_required_handle("vkCmdClearDepthStencilImage", "image", image);
+    skip |= validate_ranged_enum("vkCmdClearDepthStencilImage", "imageLayout", "VkImageLayout", AllVkImageLayoutEnums, imageLayout, "VUID-vkCmdClearDepthStencilImage-imageLayout-parameter");
+    skip |= validate_required_pointer("vkCmdClearDepthStencilImage", "pDepthStencil", pDepthStencil, "VUID-vkCmdClearDepthStencilImage-pDepthStencil-parameter");
+    if (pDepthStencil != NULL)
+    {
+        // No xml-driven validation
+    }
+    skip |= validate_array("vkCmdClearDepthStencilImage", "rangeCount", "pRanges", rangeCount, &pRanges, true, true, "VUID-vkCmdClearDepthStencilImage-rangeCount-arraylength", "VUID-vkCmdClearDepthStencilImage-pRanges-parameter");
+    if (pRanges != NULL)
+    {
+        for (uint32_t rangeIndex = 0; rangeIndex < rangeCount; ++rangeIndex)
+        {
+            skip |= validate_flags("vkCmdClearDepthStencilImage", ParameterName("pRanges[%i].aspectMask", ParameterName::IndexVector{ rangeIndex }), "VkImageAspectFlagBits", AllVkImageAspectFlagBits, pRanges[rangeIndex].aspectMask, kRequiredFlags, "VUID-VkImageSubresourceRange-aspectMask-parameter", "VUID-VkImageSubresourceRange-aspectMask-requiredbitmask");
+        }
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateCmdClearAttachments(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    attachmentCount,
+    const VkClearAttachment*                    pAttachments,
+    uint32_t                                    rectCount,
+    const VkClearRect*                          pRects) const {
+    bool skip = false;
+    skip |= validate_array("vkCmdClearAttachments", "attachmentCount", "pAttachments", attachmentCount, &pAttachments, true, true, "VUID-vkCmdClearAttachments-attachmentCount-arraylength", "VUID-vkCmdClearAttachments-pAttachments-parameter");
+    if (pAttachments != NULL)
+    {
+        for (uint32_t attachmentIndex = 0; attachmentIndex < attachmentCount; ++attachmentIndex)
+        {
+            skip |= validate_flags("vkCmdClearAttachments", ParameterName("pAttachments[%i].aspectMask", ParameterName::IndexVector{ attachmentIndex }), "VkImageAspectFlagBits", AllVkImageAspectFlagBits, pAttachments[attachmentIndex].aspectMask, kRequiredFlags, "VUID-VkClearAttachment-aspectMask-parameter", "VUID-VkClearAttachment-aspectMask-requiredbitmask");
+
+            // No xml-driven validation
+
+            // No xml-driven validation
+        }
+    }
+    skip |= validate_array("vkCmdClearAttachments", "rectCount", "pRects", rectCount, &pRects, true, true, "VUID-vkCmdClearAttachments-rectCount-arraylength", "VUID-vkCmdClearAttachments-pRects-parameter");
+    if (pRects != NULL)
+    {
+        for (uint32_t rectIndex = 0; rectIndex < rectCount; ++rectIndex)
+        {
+            // No xml-driven validation
+
+            // No xml-driven validation
+        }
+    }
+    if (!skip) skip |= manual_PreCallValidateCmdClearAttachments(commandBuffer, attachmentCount, pAttachments, rectCount, pRects);
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateCmdResolveImage(
+    VkCommandBuffer                             commandBuffer,
+    VkImage                                     srcImage,
+    VkImageLayout                               srcImageLayout,
+    VkImage                                     dstImage,
+    VkImageLayout                               dstImageLayout,
+    uint32_t                                    regionCount,
+    const VkImageResolve*                       pRegions) const {
+    bool skip = false;
+    skip |= validate_required_handle("vkCmdResolveImage", "srcImage", srcImage);
+    skip |= validate_ranged_enum("vkCmdResolveImage", "srcImageLayout", "VkImageLayout", AllVkImageLayoutEnums, srcImageLayout, "VUID-vkCmdResolveImage-srcImageLayout-parameter");
+    skip |= validate_required_handle("vkCmdResolveImage", "dstImage", dstImage);
+    skip |= validate_ranged_enum("vkCmdResolveImage", "dstImageLayout", "VkImageLayout", AllVkImageLayoutEnums, dstImageLayout, "VUID-vkCmdResolveImage-dstImageLayout-parameter");
+    skip |= validate_array("vkCmdResolveImage", "regionCount", "pRegions", regionCount, &pRegions, true, true, "VUID-vkCmdResolveImage-regionCount-arraylength", "VUID-vkCmdResolveImage-pRegions-parameter");
+    if (pRegions != NULL)
+    {
+        for (uint32_t regionIndex = 0; regionIndex < regionCount; ++regionIndex)
+        {
+            skip |= validate_flags("vkCmdResolveImage", ParameterName("pRegions[%i].srcSubresource.aspectMask", ParameterName::IndexVector{ regionIndex }), "VkImageAspectFlagBits", AllVkImageAspectFlagBits, pRegions[regionIndex].srcSubresource.aspectMask, kRequiredFlags, "VUID-VkImageSubresourceLayers-aspectMask-parameter", "VUID-VkImageSubresourceLayers-aspectMask-requiredbitmask");
+
+            // No xml-driven validation
+
+            skip |= validate_flags("vkCmdResolveImage", ParameterName("pRegions[%i].dstSubresource.aspectMask", ParameterName::IndexVector{ regionIndex }), "VkImageAspectFlagBits", AllVkImageAspectFlagBits, pRegions[regionIndex].dstSubresource.aspectMask, kRequiredFlags, "VUID-VkImageSubresourceLayers-aspectMask-parameter", "VUID-VkImageSubresourceLayers-aspectMask-requiredbitmask");
+
+            // No xml-driven validation
+
+            // No xml-driven validation
+        }
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateCmdSetEvent(
+    VkCommandBuffer                             commandBuffer,
+    VkEvent                                     event,
+    VkPipelineStageFlags                        stageMask) const {
+    bool skip = false;
+    skip |= validate_required_handle("vkCmdSetEvent", "event", event);
+    skip |= validate_flags("vkCmdSetEvent", "stageMask", "VkPipelineStageFlagBits", AllVkPipelineStageFlagBits, stageMask, kRequiredFlags, "VUID-vkCmdSetEvent-stageMask-parameter", "VUID-vkCmdSetEvent-stageMask-requiredbitmask");
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateCmdResetEvent(
+    VkCommandBuffer                             commandBuffer,
+    VkEvent                                     event,
+    VkPipelineStageFlags                        stageMask) const {
+    bool skip = false;
+    skip |= validate_required_handle("vkCmdResetEvent", "event", event);
+    skip |= validate_flags("vkCmdResetEvent", "stageMask", "VkPipelineStageFlagBits", AllVkPipelineStageFlagBits, stageMask, kRequiredFlags, "VUID-vkCmdResetEvent-stageMask-parameter", "VUID-vkCmdResetEvent-stageMask-requiredbitmask");
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateCmdWaitEvents(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    eventCount,
+    const VkEvent*                              pEvents,
+    VkPipelineStageFlags                        srcStageMask,
+    VkPipelineStageFlags                        dstStageMask,
+    uint32_t                                    memoryBarrierCount,
+    const VkMemoryBarrier*                      pMemoryBarriers,
+    uint32_t                                    bufferMemoryBarrierCount,
+    const VkBufferMemoryBarrier*                pBufferMemoryBarriers,
+    uint32_t                                    imageMemoryBarrierCount,
+    const VkImageMemoryBarrier*                 pImageMemoryBarriers) const {
+    bool skip = false;
+    skip |= validate_handle_array("vkCmdWaitEvents", "eventCount", "pEvents", eventCount, pEvents, true, true);
+    skip |= validate_flags("vkCmdWaitEvents", "srcStageMask", "VkPipelineStageFlagBits", AllVkPipelineStageFlagBits, srcStageMask, kRequiredFlags, "VUID-vkCmdWaitEvents-srcStageMask-parameter", "VUID-vkCmdWaitEvents-srcStageMask-requiredbitmask");
+    skip |= validate_flags("vkCmdWaitEvents", "dstStageMask", "VkPipelineStageFlagBits", AllVkPipelineStageFlagBits, dstStageMask, kRequiredFlags, "VUID-vkCmdWaitEvents-dstStageMask-parameter", "VUID-vkCmdWaitEvents-dstStageMask-requiredbitmask");
+    skip |= validate_struct_type_array("vkCmdWaitEvents", "memoryBarrierCount", "pMemoryBarriers", "VK_STRUCTURE_TYPE_MEMORY_BARRIER", memoryBarrierCount, pMemoryBarriers, VK_STRUCTURE_TYPE_MEMORY_BARRIER, false, true, "VUID-VkMemoryBarrier-sType-sType", "VUID-vkCmdWaitEvents-pMemoryBarriers-parameter", kVUIDUndefined);
+    if (pMemoryBarriers != NULL)
+    {
+        for (uint32_t memoryBarrierIndex = 0; memoryBarrierIndex < memoryBarrierCount; ++memoryBarrierIndex)
+        {
+            skip |= validate_struct_pnext("vkCmdWaitEvents", ParameterName("pMemoryBarriers[%i].pNext", ParameterName::IndexVector{ memoryBarrierIndex }), NULL, pMemoryBarriers[memoryBarrierIndex].pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkMemoryBarrier-pNext-pNext");
+
+            skip |= validate_flags("vkCmdWaitEvents", ParameterName("pMemoryBarriers[%i].srcAccessMask", ParameterName::IndexVector{ memoryBarrierIndex }), "VkAccessFlagBits", AllVkAccessFlagBits, pMemoryBarriers[memoryBarrierIndex].srcAccessMask, kOptionalFlags, "VUID-VkMemoryBarrier-srcAccessMask-parameter");
+
+            skip |= validate_flags("vkCmdWaitEvents", ParameterName("pMemoryBarriers[%i].dstAccessMask", ParameterName::IndexVector{ memoryBarrierIndex }), "VkAccessFlagBits", AllVkAccessFlagBits, pMemoryBarriers[memoryBarrierIndex].dstAccessMask, kOptionalFlags, "VUID-VkMemoryBarrier-dstAccessMask-parameter");
+        }
+    }
+    skip |= validate_struct_type_array("vkCmdWaitEvents", "bufferMemoryBarrierCount", "pBufferMemoryBarriers", "VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER", bufferMemoryBarrierCount, pBufferMemoryBarriers, VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER, false, true, "VUID-VkBufferMemoryBarrier-sType-sType", "VUID-vkCmdWaitEvents-pBufferMemoryBarriers-parameter", kVUIDUndefined);
+    if (pBufferMemoryBarriers != NULL)
+    {
+        for (uint32_t bufferMemoryBarrierIndex = 0; bufferMemoryBarrierIndex < bufferMemoryBarrierCount; ++bufferMemoryBarrierIndex)
+        {
+            skip |= validate_struct_pnext("vkCmdWaitEvents", ParameterName("pBufferMemoryBarriers[%i].pNext", ParameterName::IndexVector{ bufferMemoryBarrierIndex }), NULL, pBufferMemoryBarriers[bufferMemoryBarrierIndex].pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkBufferMemoryBarrier-pNext-pNext");
+
+            skip |= validate_required_handle("vkCmdWaitEvents", ParameterName("pBufferMemoryBarriers[%i].buffer", ParameterName::IndexVector{ bufferMemoryBarrierIndex }), pBufferMemoryBarriers[bufferMemoryBarrierIndex].buffer);
+        }
+    }
+    skip |= validate_struct_type_array("vkCmdWaitEvents", "imageMemoryBarrierCount", "pImageMemoryBarriers", "VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER", imageMemoryBarrierCount, pImageMemoryBarriers, VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER, false, true, "VUID-VkImageMemoryBarrier-sType-sType", "VUID-vkCmdWaitEvents-pImageMemoryBarriers-parameter", kVUIDUndefined);
+    if (pImageMemoryBarriers != NULL)
+    {
+        for (uint32_t imageMemoryBarrierIndex = 0; imageMemoryBarrierIndex < imageMemoryBarrierCount; ++imageMemoryBarrierIndex)
+        {
+            const VkStructureType allowed_structs_VkImageMemoryBarrier[] = { VK_STRUCTURE_TYPE_SAMPLE_LOCATIONS_INFO_EXT };
+
+            skip |= validate_struct_pnext("vkCmdWaitEvents", ParameterName("pImageMemoryBarriers[%i].pNext", ParameterName::IndexVector{ imageMemoryBarrierIndex }), "VkSampleLocationsInfoEXT", pImageMemoryBarriers[imageMemoryBarrierIndex].pNext, ARRAY_SIZE(allowed_structs_VkImageMemoryBarrier), allowed_structs_VkImageMemoryBarrier, GeneratedVulkanHeaderVersion, "VUID-VkImageMemoryBarrier-pNext-pNext");
+
+            skip |= validate_ranged_enum("vkCmdWaitEvents", ParameterName("pImageMemoryBarriers[%i].oldLayout", ParameterName::IndexVector{ imageMemoryBarrierIndex }), "VkImageLayout", AllVkImageLayoutEnums, pImageMemoryBarriers[imageMemoryBarrierIndex].oldLayout, "VUID-VkImageMemoryBarrier-oldLayout-parameter");
+
+            skip |= validate_ranged_enum("vkCmdWaitEvents", ParameterName("pImageMemoryBarriers[%i].newLayout", ParameterName::IndexVector{ imageMemoryBarrierIndex }), "VkImageLayout", AllVkImageLayoutEnums, pImageMemoryBarriers[imageMemoryBarrierIndex].newLayout, "VUID-VkImageMemoryBarrier-newLayout-parameter");
+
+            skip |= validate_required_handle("vkCmdWaitEvents", ParameterName("pImageMemoryBarriers[%i].image", ParameterName::IndexVector{ imageMemoryBarrierIndex }), pImageMemoryBarriers[imageMemoryBarrierIndex].image);
+
+            skip |= validate_flags("vkCmdWaitEvents", ParameterName("pImageMemoryBarriers[%i].subresourceRange.aspectMask", ParameterName::IndexVector{ imageMemoryBarrierIndex }), "VkImageAspectFlagBits", AllVkImageAspectFlagBits, pImageMemoryBarriers[imageMemoryBarrierIndex].subresourceRange.aspectMask, kRequiredFlags, "VUID-VkImageSubresourceRange-aspectMask-parameter", "VUID-VkImageSubresourceRange-aspectMask-requiredbitmask");
+        }
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateCmdPipelineBarrier(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineStageFlags                        srcStageMask,
+    VkPipelineStageFlags                        dstStageMask,
+    VkDependencyFlags                           dependencyFlags,
+    uint32_t                                    memoryBarrierCount,
+    const VkMemoryBarrier*                      pMemoryBarriers,
+    uint32_t                                    bufferMemoryBarrierCount,
+    const VkBufferMemoryBarrier*                pBufferMemoryBarriers,
+    uint32_t                                    imageMemoryBarrierCount,
+    const VkImageMemoryBarrier*                 pImageMemoryBarriers) const {
+    bool skip = false;
+    skip |= validate_flags("vkCmdPipelineBarrier", "srcStageMask", "VkPipelineStageFlagBits", AllVkPipelineStageFlagBits, srcStageMask, kRequiredFlags, "VUID-vkCmdPipelineBarrier-srcStageMask-parameter", "VUID-vkCmdPipelineBarrier-srcStageMask-requiredbitmask");
+    skip |= validate_flags("vkCmdPipelineBarrier", "dstStageMask", "VkPipelineStageFlagBits", AllVkPipelineStageFlagBits, dstStageMask, kRequiredFlags, "VUID-vkCmdPipelineBarrier-dstStageMask-parameter", "VUID-vkCmdPipelineBarrier-dstStageMask-requiredbitmask");
+    skip |= validate_flags("vkCmdPipelineBarrier", "dependencyFlags", "VkDependencyFlagBits", AllVkDependencyFlagBits, dependencyFlags, kOptionalFlags, "VUID-vkCmdPipelineBarrier-dependencyFlags-parameter");
+    skip |= validate_struct_type_array("vkCmdPipelineBarrier", "memoryBarrierCount", "pMemoryBarriers", "VK_STRUCTURE_TYPE_MEMORY_BARRIER", memoryBarrierCount, pMemoryBarriers, VK_STRUCTURE_TYPE_MEMORY_BARRIER, false, true, "VUID-VkMemoryBarrier-sType-sType", "VUID-vkCmdPipelineBarrier-pMemoryBarriers-parameter", kVUIDUndefined);
+    if (pMemoryBarriers != NULL)
+    {
+        for (uint32_t memoryBarrierIndex = 0; memoryBarrierIndex < memoryBarrierCount; ++memoryBarrierIndex)
+        {
+            skip |= validate_struct_pnext("vkCmdPipelineBarrier", ParameterName("pMemoryBarriers[%i].pNext", ParameterName::IndexVector{ memoryBarrierIndex }), NULL, pMemoryBarriers[memoryBarrierIndex].pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkMemoryBarrier-pNext-pNext");
+
+            skip |= validate_flags("vkCmdPipelineBarrier", ParameterName("pMemoryBarriers[%i].srcAccessMask", ParameterName::IndexVector{ memoryBarrierIndex }), "VkAccessFlagBits", AllVkAccessFlagBits, pMemoryBarriers[memoryBarrierIndex].srcAccessMask, kOptionalFlags, "VUID-VkMemoryBarrier-srcAccessMask-parameter");
+
+            skip |= validate_flags("vkCmdPipelineBarrier", ParameterName("pMemoryBarriers[%i].dstAccessMask", ParameterName::IndexVector{ memoryBarrierIndex }), "VkAccessFlagBits", AllVkAccessFlagBits, pMemoryBarriers[memoryBarrierIndex].dstAccessMask, kOptionalFlags, "VUID-VkMemoryBarrier-dstAccessMask-parameter");
+        }
+    }
+    skip |= validate_struct_type_array("vkCmdPipelineBarrier", "bufferMemoryBarrierCount", "pBufferMemoryBarriers", "VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER", bufferMemoryBarrierCount, pBufferMemoryBarriers, VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER, false, true, "VUID-VkBufferMemoryBarrier-sType-sType", "VUID-vkCmdPipelineBarrier-pBufferMemoryBarriers-parameter", kVUIDUndefined);
+    if (pBufferMemoryBarriers != NULL)
+    {
+        for (uint32_t bufferMemoryBarrierIndex = 0; bufferMemoryBarrierIndex < bufferMemoryBarrierCount; ++bufferMemoryBarrierIndex)
+        {
+            skip |= validate_struct_pnext("vkCmdPipelineBarrier", ParameterName("pBufferMemoryBarriers[%i].pNext", ParameterName::IndexVector{ bufferMemoryBarrierIndex }), NULL, pBufferMemoryBarriers[bufferMemoryBarrierIndex].pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkBufferMemoryBarrier-pNext-pNext");
+
+            skip |= validate_required_handle("vkCmdPipelineBarrier", ParameterName("pBufferMemoryBarriers[%i].buffer", ParameterName::IndexVector{ bufferMemoryBarrierIndex }), pBufferMemoryBarriers[bufferMemoryBarrierIndex].buffer);
+        }
+    }
+    skip |= validate_struct_type_array("vkCmdPipelineBarrier", "imageMemoryBarrierCount", "pImageMemoryBarriers", "VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER", imageMemoryBarrierCount, pImageMemoryBarriers, VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER, false, true, "VUID-VkImageMemoryBarrier-sType-sType", "VUID-vkCmdPipelineBarrier-pImageMemoryBarriers-parameter", kVUIDUndefined);
+    if (pImageMemoryBarriers != NULL)
+    {
+        for (uint32_t imageMemoryBarrierIndex = 0; imageMemoryBarrierIndex < imageMemoryBarrierCount; ++imageMemoryBarrierIndex)
+        {
+            const VkStructureType allowed_structs_VkImageMemoryBarrier[] = { VK_STRUCTURE_TYPE_SAMPLE_LOCATIONS_INFO_EXT };
+
+            skip |= validate_struct_pnext("vkCmdPipelineBarrier", ParameterName("pImageMemoryBarriers[%i].pNext", ParameterName::IndexVector{ imageMemoryBarrierIndex }), "VkSampleLocationsInfoEXT", pImageMemoryBarriers[imageMemoryBarrierIndex].pNext, ARRAY_SIZE(allowed_structs_VkImageMemoryBarrier), allowed_structs_VkImageMemoryBarrier, GeneratedVulkanHeaderVersion, "VUID-VkImageMemoryBarrier-pNext-pNext");
+
+            skip |= validate_ranged_enum("vkCmdPipelineBarrier", ParameterName("pImageMemoryBarriers[%i].oldLayout", ParameterName::IndexVector{ imageMemoryBarrierIndex }), "VkImageLayout", AllVkImageLayoutEnums, pImageMemoryBarriers[imageMemoryBarrierIndex].oldLayout, "VUID-VkImageMemoryBarrier-oldLayout-parameter");
+
+            skip |= validate_ranged_enum("vkCmdPipelineBarrier", ParameterName("pImageMemoryBarriers[%i].newLayout", ParameterName::IndexVector{ imageMemoryBarrierIndex }), "VkImageLayout", AllVkImageLayoutEnums, pImageMemoryBarriers[imageMemoryBarrierIndex].newLayout, "VUID-VkImageMemoryBarrier-newLayout-parameter");
+
+            skip |= validate_required_handle("vkCmdPipelineBarrier", ParameterName("pImageMemoryBarriers[%i].image", ParameterName::IndexVector{ imageMemoryBarrierIndex }), pImageMemoryBarriers[imageMemoryBarrierIndex].image);
+
+            skip |= validate_flags("vkCmdPipelineBarrier", ParameterName("pImageMemoryBarriers[%i].subresourceRange.aspectMask", ParameterName::IndexVector{ imageMemoryBarrierIndex }), "VkImageAspectFlagBits", AllVkImageAspectFlagBits, pImageMemoryBarriers[imageMemoryBarrierIndex].subresourceRange.aspectMask, kRequiredFlags, "VUID-VkImageSubresourceRange-aspectMask-parameter", "VUID-VkImageSubresourceRange-aspectMask-requiredbitmask");
+        }
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateCmdBeginQuery(
+    VkCommandBuffer                             commandBuffer,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    query,
+    VkQueryControlFlags                         flags) const {
+    bool skip = false;
+    skip |= validate_required_handle("vkCmdBeginQuery", "queryPool", queryPool);
+    skip |= validate_flags("vkCmdBeginQuery", "flags", "VkQueryControlFlagBits", AllVkQueryControlFlagBits, flags, kOptionalFlags, "VUID-vkCmdBeginQuery-flags-parameter");
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateCmdEndQuery(
+    VkCommandBuffer                             commandBuffer,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    query) const {
+    bool skip = false;
+    skip |= validate_required_handle("vkCmdEndQuery", "queryPool", queryPool);
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateCmdResetQueryPool(
+    VkCommandBuffer                             commandBuffer,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    firstQuery,
+    uint32_t                                    queryCount) const {
+    bool skip = false;
+    skip |= validate_required_handle("vkCmdResetQueryPool", "queryPool", queryPool);
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateCmdWriteTimestamp(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineStageFlagBits                     pipelineStage,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    query) const {
+    bool skip = false;
+    skip |= validate_flags("vkCmdWriteTimestamp", "pipelineStage", "VkPipelineStageFlagBits", AllVkPipelineStageFlagBits, pipelineStage, kRequiredSingleBit, "VUID-vkCmdWriteTimestamp-pipelineStage-parameter", "VUID-vkCmdWriteTimestamp-pipelineStage-parameter");
+    skip |= validate_required_handle("vkCmdWriteTimestamp", "queryPool", queryPool);
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateCmdCopyQueryPoolResults(
+    VkCommandBuffer                             commandBuffer,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    firstQuery,
+    uint32_t                                    queryCount,
+    VkBuffer                                    dstBuffer,
+    VkDeviceSize                                dstOffset,
+    VkDeviceSize                                stride,
+    VkQueryResultFlags                          flags) const {
+    bool skip = false;
+    skip |= validate_required_handle("vkCmdCopyQueryPoolResults", "queryPool", queryPool);
+    skip |= validate_required_handle("vkCmdCopyQueryPoolResults", "dstBuffer", dstBuffer);
+    skip |= validate_flags("vkCmdCopyQueryPoolResults", "flags", "VkQueryResultFlagBits", AllVkQueryResultFlagBits, flags, kOptionalFlags, "VUID-vkCmdCopyQueryPoolResults-flags-parameter");
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateCmdPushConstants(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineLayout                            layout,
+    VkShaderStageFlags                          stageFlags,
+    uint32_t                                    offset,
+    uint32_t                                    size,
+    const void*                                 pValues) const {
+    bool skip = false;
+    skip |= validate_required_handle("vkCmdPushConstants", "layout", layout);
+    skip |= validate_flags("vkCmdPushConstants", "stageFlags", "VkShaderStageFlagBits", AllVkShaderStageFlagBits, stageFlags, kRequiredFlags, "VUID-vkCmdPushConstants-stageFlags-parameter", "VUID-vkCmdPushConstants-stageFlags-requiredbitmask");
+    skip |= validate_array("vkCmdPushConstants", "size", "pValues", size, &pValues, true, true, "VUID-vkCmdPushConstants-size-arraylength", "VUID-vkCmdPushConstants-pValues-parameter");
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateCmdBeginRenderPass(
+    VkCommandBuffer                             commandBuffer,
+    const VkRenderPassBeginInfo*                pRenderPassBegin,
+    VkSubpassContents                           contents) const {
+    bool skip = false;
+    skip |= validate_struct_type("vkCmdBeginRenderPass", "pRenderPassBegin", "VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO", pRenderPassBegin, VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO, true, "VUID-vkCmdBeginRenderPass-pRenderPassBegin-parameter", "VUID-VkRenderPassBeginInfo-sType-sType");
+    if (pRenderPassBegin != NULL)
+    {
+        const VkStructureType allowed_structs_VkRenderPassBeginInfo[] = { VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO, VK_STRUCTURE_TYPE_RENDER_PASS_ATTACHMENT_BEGIN_INFO_KHR, VK_STRUCTURE_TYPE_RENDER_PASS_SAMPLE_LOCATIONS_BEGIN_INFO_EXT };
+
+        skip |= validate_struct_pnext("vkCmdBeginRenderPass", "pRenderPassBegin->pNext", "VkDeviceGroupRenderPassBeginInfo, VkRenderPassAttachmentBeginInfoKHR, VkRenderPassSampleLocationsBeginInfoEXT", pRenderPassBegin->pNext, ARRAY_SIZE(allowed_structs_VkRenderPassBeginInfo), allowed_structs_VkRenderPassBeginInfo, GeneratedVulkanHeaderVersion, "VUID-VkRenderPassBeginInfo-pNext-pNext");
+
+        skip |= validate_required_handle("vkCmdBeginRenderPass", "pRenderPassBegin->renderPass", pRenderPassBegin->renderPass);
+
+        skip |= validate_required_handle("vkCmdBeginRenderPass", "pRenderPassBegin->framebuffer", pRenderPassBegin->framebuffer);
+
+        // No xml-driven validation
+
+        // No xml-driven validation
+
+        skip |= validate_array("vkCmdBeginRenderPass", "pRenderPassBegin->clearValueCount", "pRenderPassBegin->pClearValues", pRenderPassBegin->clearValueCount, &pRenderPassBegin->pClearValues, false, true, kVUIDUndefined, "VUID-VkRenderPassBeginInfo-pClearValues-parameter");
+
+        if (pRenderPassBegin->pClearValues != NULL)
+        {
+            for (uint32_t clearValueIndex = 0; clearValueIndex < pRenderPassBegin->clearValueCount; ++clearValueIndex)
+            {
+                // No xml-driven validation
+
+                // No xml-driven validation
+            }
+        }
+    }
+    skip |= validate_ranged_enum("vkCmdBeginRenderPass", "contents", "VkSubpassContents", AllVkSubpassContentsEnums, contents, "VUID-vkCmdBeginRenderPass-contents-parameter");
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateCmdNextSubpass(
+    VkCommandBuffer                             commandBuffer,
+    VkSubpassContents                           contents) const {
+    bool skip = false;
+    skip |= validate_ranged_enum("vkCmdNextSubpass", "contents", "VkSubpassContents", AllVkSubpassContentsEnums, contents, "VUID-vkCmdNextSubpass-contents-parameter");
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateCmdEndRenderPass(
+    VkCommandBuffer                             commandBuffer) const {
+    bool skip = false;
+    // No xml-driven validation
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateCmdExecuteCommands(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    commandBufferCount,
+    const VkCommandBuffer*                      pCommandBuffers) const {
+    bool skip = false;
+    skip |= validate_handle_array("vkCmdExecuteCommands", "commandBufferCount", "pCommandBuffers", commandBufferCount, pCommandBuffers, true, true);
+    return skip;
+}
+
+
+
+bool StatelessValidation::PreCallValidateBindBufferMemory2(
+    VkDevice                                    device,
+    uint32_t                                    bindInfoCount,
+    const VkBindBufferMemoryInfo*               pBindInfos) const {
+    bool skip = false;
+    skip |= validate_struct_type_array("vkBindBufferMemory2", "bindInfoCount", "pBindInfos", "VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO", bindInfoCount, pBindInfos, VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO, true, true, "VUID-VkBindBufferMemoryInfo-sType-sType", "VUID-vkBindBufferMemory2-pBindInfos-parameter", "VUID-vkBindBufferMemory2-bindInfoCount-arraylength");
+    if (pBindInfos != NULL)
+    {
+        for (uint32_t bindInfoIndex = 0; bindInfoIndex < bindInfoCount; ++bindInfoIndex)
+        {
+            const VkStructureType allowed_structs_VkBindBufferMemoryInfo[] = { VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO };
+
+            skip |= validate_struct_pnext("vkBindBufferMemory2", ParameterName("pBindInfos[%i].pNext", ParameterName::IndexVector{ bindInfoIndex }), "VkBindBufferMemoryDeviceGroupInfo", pBindInfos[bindInfoIndex].pNext, ARRAY_SIZE(allowed_structs_VkBindBufferMemoryInfo), allowed_structs_VkBindBufferMemoryInfo, GeneratedVulkanHeaderVersion, "VUID-VkBindBufferMemoryInfo-pNext-pNext");
+
+            skip |= validate_required_handle("vkBindBufferMemory2", ParameterName("pBindInfos[%i].buffer", ParameterName::IndexVector{ bindInfoIndex }), pBindInfos[bindInfoIndex].buffer);
+
+            skip |= validate_required_handle("vkBindBufferMemory2", ParameterName("pBindInfos[%i].memory", ParameterName::IndexVector{ bindInfoIndex }), pBindInfos[bindInfoIndex].memory);
+        }
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateBindImageMemory2(
+    VkDevice                                    device,
+    uint32_t                                    bindInfoCount,
+    const VkBindImageMemoryInfo*                pBindInfos) const {
+    bool skip = false;
+    skip |= validate_struct_type_array("vkBindImageMemory2", "bindInfoCount", "pBindInfos", "VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO", bindInfoCount, pBindInfos, VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO, true, true, "VUID-VkBindImageMemoryInfo-sType-sType", "VUID-vkBindImageMemory2-pBindInfos-parameter", "VUID-vkBindImageMemory2-bindInfoCount-arraylength");
+    if (pBindInfos != NULL)
+    {
+        for (uint32_t bindInfoIndex = 0; bindInfoIndex < bindInfoCount; ++bindInfoIndex)
+        {
+            const VkStructureType allowed_structs_VkBindImageMemoryInfo[] = { VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO, VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_SWAPCHAIN_INFO_KHR, VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO };
+
+            skip |= validate_struct_pnext("vkBindImageMemory2", ParameterName("pBindInfos[%i].pNext", ParameterName::IndexVector{ bindInfoIndex }), "VkBindImageMemoryDeviceGroupInfo, VkBindImageMemorySwapchainInfoKHR, VkBindImagePlaneMemoryInfo", pBindInfos[bindInfoIndex].pNext, ARRAY_SIZE(allowed_structs_VkBindImageMemoryInfo), allowed_structs_VkBindImageMemoryInfo, GeneratedVulkanHeaderVersion, "VUID-VkBindImageMemoryInfo-pNext-pNext");
+
+            skip |= validate_required_handle("vkBindImageMemory2", ParameterName("pBindInfos[%i].image", ParameterName::IndexVector{ bindInfoIndex }), pBindInfos[bindInfoIndex].image);
+        }
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateGetDeviceGroupPeerMemoryFeatures(
+    VkDevice                                    device,
+    uint32_t                                    heapIndex,
+    uint32_t                                    localDeviceIndex,
+    uint32_t                                    remoteDeviceIndex,
+    VkPeerMemoryFeatureFlags*                   pPeerMemoryFeatures) const {
+    bool skip = false;
+    skip |= validate_required_pointer("vkGetDeviceGroupPeerMemoryFeatures", "pPeerMemoryFeatures", pPeerMemoryFeatures, "VUID-vkGetDeviceGroupPeerMemoryFeatures-pPeerMemoryFeatures-parameter");
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateCmdSetDeviceMask(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    deviceMask) const {
+    bool skip = false;
+    // No xml-driven validation
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateCmdDispatchBase(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    baseGroupX,
+    uint32_t                                    baseGroupY,
+    uint32_t                                    baseGroupZ,
+    uint32_t                                    groupCountX,
+    uint32_t                                    groupCountY,
+    uint32_t                                    groupCountZ) const {
+    bool skip = false;
+    // No xml-driven validation
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateEnumeratePhysicalDeviceGroups(
+    VkInstance                                  instance,
+    uint32_t*                                   pPhysicalDeviceGroupCount,
+    VkPhysicalDeviceGroupProperties*            pPhysicalDeviceGroupProperties) const {
+    bool skip = false;
+    skip |= validate_struct_type_array("vkEnumeratePhysicalDeviceGroups", "pPhysicalDeviceGroupCount", "pPhysicalDeviceGroupProperties", "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES", pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES, true, false, false, "VUID-VkPhysicalDeviceGroupProperties-sType-sType", "VUID-vkEnumeratePhysicalDeviceGroups-pPhysicalDeviceGroupProperties-parameter", kVUIDUndefined);
+    if (pPhysicalDeviceGroupProperties != NULL)
+    {
+        for (uint32_t pPhysicalDeviceGroupIndex = 0; pPhysicalDeviceGroupIndex < *pPhysicalDeviceGroupCount; ++pPhysicalDeviceGroupIndex)
+        {
+            // No xml-driven validation
+        }
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateGetImageMemoryRequirements2(
+    VkDevice                                    device,
+    const VkImageMemoryRequirementsInfo2*       pInfo,
+    VkMemoryRequirements2*                      pMemoryRequirements) const {
+    bool skip = false;
+    skip |= validate_struct_type("vkGetImageMemoryRequirements2", "pInfo", "VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2", pInfo, VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2, true, "VUID-vkGetImageMemoryRequirements2-pInfo-parameter", "VUID-VkImageMemoryRequirementsInfo2-sType-sType");
+    if (pInfo != NULL)
+    {
+        const VkStructureType allowed_structs_VkImageMemoryRequirementsInfo2[] = { VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO };
+
+        skip |= validate_struct_pnext("vkGetImageMemoryRequirements2", "pInfo->pNext", "VkImagePlaneMemoryRequirementsInfo", pInfo->pNext, ARRAY_SIZE(allowed_structs_VkImageMemoryRequirementsInfo2), allowed_structs_VkImageMemoryRequirementsInfo2, GeneratedVulkanHeaderVersion, "VUID-VkImageMemoryRequirementsInfo2-pNext-pNext");
+
+        skip |= validate_required_handle("vkGetImageMemoryRequirements2", "pInfo->image", pInfo->image);
+    }
+    skip |= validate_struct_type("vkGetImageMemoryRequirements2", "pMemoryRequirements", "VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2", pMemoryRequirements, VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2, true, "VUID-vkGetImageMemoryRequirements2-pMemoryRequirements-parameter", "VUID-VkMemoryRequirements2-sType-sType");
+    if (pMemoryRequirements != NULL)
+    {
+        // No xml-driven validation
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateGetBufferMemoryRequirements2(
+    VkDevice                                    device,
+    const VkBufferMemoryRequirementsInfo2*      pInfo,
+    VkMemoryRequirements2*                      pMemoryRequirements) const {
+    bool skip = false;
+    skip |= validate_struct_type("vkGetBufferMemoryRequirements2", "pInfo", "VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2", pInfo, VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2, true, "VUID-vkGetBufferMemoryRequirements2-pInfo-parameter", "VUID-VkBufferMemoryRequirementsInfo2-sType-sType");
+    if (pInfo != NULL)
+    {
+        skip |= validate_struct_pnext("vkGetBufferMemoryRequirements2", "pInfo->pNext", NULL, pInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkBufferMemoryRequirementsInfo2-pNext-pNext");
+
+        skip |= validate_required_handle("vkGetBufferMemoryRequirements2", "pInfo->buffer", pInfo->buffer);
+    }
+    skip |= validate_struct_type("vkGetBufferMemoryRequirements2", "pMemoryRequirements", "VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2", pMemoryRequirements, VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2, true, "VUID-vkGetBufferMemoryRequirements2-pMemoryRequirements-parameter", "VUID-VkMemoryRequirements2-sType-sType");
+    if (pMemoryRequirements != NULL)
+    {
+        // No xml-driven validation
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateGetImageSparseMemoryRequirements2(
+    VkDevice                                    device,
+    const VkImageSparseMemoryRequirementsInfo2* pInfo,
+    uint32_t*                                   pSparseMemoryRequirementCount,
+    VkSparseImageMemoryRequirements2*           pSparseMemoryRequirements) const {
+    bool skip = false;
+    skip |= validate_struct_type("vkGetImageSparseMemoryRequirements2", "pInfo", "VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2", pInfo, VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2, true, "VUID-vkGetImageSparseMemoryRequirements2-pInfo-parameter", "VUID-VkImageSparseMemoryRequirementsInfo2-sType-sType");
+    if (pInfo != NULL)
+    {
+        skip |= validate_struct_pnext("vkGetImageSparseMemoryRequirements2", "pInfo->pNext", NULL, pInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkImageSparseMemoryRequirementsInfo2-pNext-pNext");
+
+        skip |= validate_required_handle("vkGetImageSparseMemoryRequirements2", "pInfo->image", pInfo->image);
+    }
+    skip |= validate_struct_type_array("vkGetImageSparseMemoryRequirements2", "pSparseMemoryRequirementCount", "pSparseMemoryRequirements", "VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2", pSparseMemoryRequirementCount, pSparseMemoryRequirements, VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2, true, false, false, "VUID-VkSparseImageMemoryRequirements2-sType-sType", "VUID-vkGetImageSparseMemoryRequirements2-pSparseMemoryRequirements-parameter", kVUIDUndefined);
+    if (pSparseMemoryRequirements != NULL)
+    {
+        for (uint32_t pSparseMemoryRequirementIndex = 0; pSparseMemoryRequirementIndex < *pSparseMemoryRequirementCount; ++pSparseMemoryRequirementIndex)
+        {
+            // No xml-driven validation
+        }
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateGetPhysicalDeviceFeatures2(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceFeatures2*                  pFeatures) const {
+    bool skip = false;
+    skip |= validate_struct_type("vkGetPhysicalDeviceFeatures2", "pFeatures", "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2", pFeatures, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2, true, "VUID-vkGetPhysicalDeviceFeatures2-pFeatures-parameter", "VUID-VkPhysicalDeviceFeatures2-sType-sType");
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateGetPhysicalDeviceProperties2(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceProperties2*                pProperties) const {
+    bool skip = false;
+    skip |= validate_struct_type("vkGetPhysicalDeviceProperties2", "pProperties", "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2", pProperties, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2, true, "VUID-vkGetPhysicalDeviceProperties2-pProperties-parameter", "VUID-VkPhysicalDeviceProperties2-sType-sType");
+    if (pProperties != NULL)
+    {
+        // No xml-driven validation
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateGetPhysicalDeviceFormatProperties2(
+    VkPhysicalDevice                            physicalDevice,
+    VkFormat                                    format,
+    VkFormatProperties2*                        pFormatProperties) const {
+    bool skip = false;
+    skip |= validate_ranged_enum("vkGetPhysicalDeviceFormatProperties2", "format", "VkFormat", AllVkFormatEnums, format, "VUID-vkGetPhysicalDeviceFormatProperties2-format-parameter");
+    skip |= validate_struct_type("vkGetPhysicalDeviceFormatProperties2", "pFormatProperties", "VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2", pFormatProperties, VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2, true, "VUID-vkGetPhysicalDeviceFormatProperties2-pFormatProperties-parameter", "VUID-VkFormatProperties2-sType-sType");
+    if (pFormatProperties != NULL)
+    {
+        // No xml-driven validation
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateGetPhysicalDeviceImageFormatProperties2(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceImageFormatInfo2*     pImageFormatInfo,
+    VkImageFormatProperties2*                   pImageFormatProperties) const {
+    bool skip = false;
+    skip |= validate_struct_type("vkGetPhysicalDeviceImageFormatProperties2", "pImageFormatInfo", "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2", pImageFormatInfo, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2, true, "VUID-vkGetPhysicalDeviceImageFormatProperties2-pImageFormatInfo-parameter", "VUID-VkPhysicalDeviceImageFormatInfo2-sType-sType");
+    if (pImageFormatInfo != NULL)
+    {
+        const VkStructureType allowed_structs_VkPhysicalDeviceImageFormatInfo2[] = { VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO_KHR, VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO_EXT, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_DRM_FORMAT_MODIFIER_INFO_EXT, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_VIEW_IMAGE_FORMAT_INFO_EXT };
+
+        skip |= validate_struct_pnext("vkGetPhysicalDeviceImageFormatProperties2", "pImageFormatInfo->pNext", "VkImageFormatListCreateInfoKHR, VkImageStencilUsageCreateInfoEXT, VkPhysicalDeviceExternalImageFormatInfo, VkPhysicalDeviceImageDrmFormatModifierInfoEXT, VkPhysicalDeviceImageViewImageFormatInfoEXT", pImageFormatInfo->pNext, ARRAY_SIZE(allowed_structs_VkPhysicalDeviceImageFormatInfo2), allowed_structs_VkPhysicalDeviceImageFormatInfo2, GeneratedVulkanHeaderVersion, "VUID-VkPhysicalDeviceImageFormatInfo2-pNext-pNext");
+
+        skip |= validate_ranged_enum("vkGetPhysicalDeviceImageFormatProperties2", "pImageFormatInfo->format", "VkFormat", AllVkFormatEnums, pImageFormatInfo->format, "VUID-VkPhysicalDeviceImageFormatInfo2-format-parameter");
+
+        skip |= validate_ranged_enum("vkGetPhysicalDeviceImageFormatProperties2", "pImageFormatInfo->type", "VkImageType", AllVkImageTypeEnums, pImageFormatInfo->type, "VUID-VkPhysicalDeviceImageFormatInfo2-type-parameter");
+
+        skip |= validate_ranged_enum("vkGetPhysicalDeviceImageFormatProperties2", "pImageFormatInfo->tiling", "VkImageTiling", AllVkImageTilingEnums, pImageFormatInfo->tiling, "VUID-VkPhysicalDeviceImageFormatInfo2-tiling-parameter");
+
+        skip |= validate_flags("vkGetPhysicalDeviceImageFormatProperties2", "pImageFormatInfo->usage", "VkImageUsageFlagBits", AllVkImageUsageFlagBits, pImageFormatInfo->usage, kRequiredFlags, "VUID-VkPhysicalDeviceImageFormatInfo2-usage-parameter", "VUID-VkPhysicalDeviceImageFormatInfo2-usage-requiredbitmask");
+
+        skip |= validate_flags("vkGetPhysicalDeviceImageFormatProperties2", "pImageFormatInfo->flags", "VkImageCreateFlagBits", AllVkImageCreateFlagBits, pImageFormatInfo->flags, kOptionalFlags, "VUID-VkPhysicalDeviceImageFormatInfo2-flags-parameter");
+    }
+    skip |= validate_struct_type("vkGetPhysicalDeviceImageFormatProperties2", "pImageFormatProperties", "VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2", pImageFormatProperties, VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2, true, "VUID-vkGetPhysicalDeviceImageFormatProperties2-pImageFormatProperties-parameter", "VUID-VkImageFormatProperties2-sType-sType");
+    if (pImageFormatProperties != NULL)
+    {
+        // No xml-driven validation
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateGetPhysicalDeviceQueueFamilyProperties2(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pQueueFamilyPropertyCount,
+    VkQueueFamilyProperties2*                   pQueueFamilyProperties) const {
+    bool skip = false;
+    skip |= validate_struct_type_array("vkGetPhysicalDeviceQueueFamilyProperties2", "pQueueFamilyPropertyCount", "pQueueFamilyProperties", "VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2", pQueueFamilyPropertyCount, pQueueFamilyProperties, VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2, true, false, false, "VUID-VkQueueFamilyProperties2-sType-sType", "VUID-vkGetPhysicalDeviceQueueFamilyProperties2-pQueueFamilyProperties-parameter", kVUIDUndefined);
+    if (pQueueFamilyProperties != NULL)
+    {
+        for (uint32_t pQueueFamilyPropertyIndex = 0; pQueueFamilyPropertyIndex < *pQueueFamilyPropertyCount; ++pQueueFamilyPropertyIndex)
+        {
+            // No xml-driven validation
+        }
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateGetPhysicalDeviceMemoryProperties2(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceMemoryProperties2*          pMemoryProperties) const {
+    bool skip = false;
+    skip |= validate_struct_type("vkGetPhysicalDeviceMemoryProperties2", "pMemoryProperties", "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2", pMemoryProperties, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2, true, "VUID-vkGetPhysicalDeviceMemoryProperties2-pMemoryProperties-parameter", "VUID-VkPhysicalDeviceMemoryProperties2-sType-sType");
+    if (pMemoryProperties != NULL)
+    {
+        // No xml-driven validation
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateGetPhysicalDeviceSparseImageFormatProperties2(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo,
+    uint32_t*                                   pPropertyCount,
+    VkSparseImageFormatProperties2*             pProperties) const {
+    bool skip = false;
+    skip |= validate_struct_type("vkGetPhysicalDeviceSparseImageFormatProperties2", "pFormatInfo", "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2", pFormatInfo, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2, true, "VUID-vkGetPhysicalDeviceSparseImageFormatProperties2-pFormatInfo-parameter", "VUID-VkPhysicalDeviceSparseImageFormatInfo2-sType-sType");
+    if (pFormatInfo != NULL)
+    {
+        skip |= validate_struct_pnext("vkGetPhysicalDeviceSparseImageFormatProperties2", "pFormatInfo->pNext", NULL, pFormatInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkPhysicalDeviceSparseImageFormatInfo2-pNext-pNext");
+
+        skip |= validate_ranged_enum("vkGetPhysicalDeviceSparseImageFormatProperties2", "pFormatInfo->format", "VkFormat", AllVkFormatEnums, pFormatInfo->format, "VUID-VkPhysicalDeviceSparseImageFormatInfo2-format-parameter");
+
+        skip |= validate_ranged_enum("vkGetPhysicalDeviceSparseImageFormatProperties2", "pFormatInfo->type", "VkImageType", AllVkImageTypeEnums, pFormatInfo->type, "VUID-VkPhysicalDeviceSparseImageFormatInfo2-type-parameter");
+
+        skip |= validate_flags("vkGetPhysicalDeviceSparseImageFormatProperties2", "pFormatInfo->samples", "VkSampleCountFlagBits", AllVkSampleCountFlagBits, pFormatInfo->samples, kRequiredSingleBit, "VUID-VkPhysicalDeviceSparseImageFormatInfo2-samples-parameter", "VUID-VkPhysicalDeviceSparseImageFormatInfo2-samples-parameter");
+
+        skip |= validate_flags("vkGetPhysicalDeviceSparseImageFormatProperties2", "pFormatInfo->usage", "VkImageUsageFlagBits", AllVkImageUsageFlagBits, pFormatInfo->usage, kRequiredFlags, "VUID-VkPhysicalDeviceSparseImageFormatInfo2-usage-parameter", "VUID-VkPhysicalDeviceSparseImageFormatInfo2-usage-requiredbitmask");
+
+        skip |= validate_ranged_enum("vkGetPhysicalDeviceSparseImageFormatProperties2", "pFormatInfo->tiling", "VkImageTiling", AllVkImageTilingEnums, pFormatInfo->tiling, "VUID-VkPhysicalDeviceSparseImageFormatInfo2-tiling-parameter");
+    }
+    skip |= validate_struct_type_array("vkGetPhysicalDeviceSparseImageFormatProperties2", "pPropertyCount", "pProperties", "VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2", pPropertyCount, pProperties, VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2, true, false, false, "VUID-VkSparseImageFormatProperties2-sType-sType", "VUID-vkGetPhysicalDeviceSparseImageFormatProperties2-pProperties-parameter", kVUIDUndefined);
+    if (pProperties != NULL)
+    {
+        for (uint32_t pPropertyIndex = 0; pPropertyIndex < *pPropertyCount; ++pPropertyIndex)
+        {
+            // No xml-driven validation
+        }
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateTrimCommandPool(
+    VkDevice                                    device,
+    VkCommandPool                               commandPool,
+    VkCommandPoolTrimFlags                      flags) const {
+    bool skip = false;
+    skip |= validate_required_handle("vkTrimCommandPool", "commandPool", commandPool);
+    skip |= validate_reserved_flags("vkTrimCommandPool", "flags", flags, "VUID-vkTrimCommandPool-flags-zerobitmask");
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateGetDeviceQueue2(
+    VkDevice                                    device,
+    const VkDeviceQueueInfo2*                   pQueueInfo,
+    VkQueue*                                    pQueue) const {
+    bool skip = false;
+    skip |= validate_struct_type("vkGetDeviceQueue2", "pQueueInfo", "VK_STRUCTURE_TYPE_DEVICE_QUEUE_INFO_2", pQueueInfo, VK_STRUCTURE_TYPE_DEVICE_QUEUE_INFO_2, true, "VUID-vkGetDeviceQueue2-pQueueInfo-parameter", "VUID-VkDeviceQueueInfo2-sType-sType");
+    if (pQueueInfo != NULL)
+    {
+        skip |= validate_struct_pnext("vkGetDeviceQueue2", "pQueueInfo->pNext", NULL, pQueueInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkDeviceQueueInfo2-pNext-pNext");
+
+        skip |= validate_flags("vkGetDeviceQueue2", "pQueueInfo->flags", "VkDeviceQueueCreateFlagBits", AllVkDeviceQueueCreateFlagBits, pQueueInfo->flags, kOptionalFlags, "VUID-VkDeviceQueueInfo2-flags-parameter");
+    }
+    skip |= validate_required_pointer("vkGetDeviceQueue2", "pQueue", pQueue, "VUID-vkGetDeviceQueue2-pQueue-parameter");
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateCreateSamplerYcbcrConversion(
+    VkDevice                                    device,
+    const VkSamplerYcbcrConversionCreateInfo*   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSamplerYcbcrConversion*                   pYcbcrConversion) const {
+    bool skip = false;
+    skip |= validate_struct_type("vkCreateSamplerYcbcrConversion", "pCreateInfo", "VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO", pCreateInfo, VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO, true, "VUID-vkCreateSamplerYcbcrConversion-pCreateInfo-parameter", "VUID-VkSamplerYcbcrConversionCreateInfo-sType-sType");
+    if (pCreateInfo != NULL)
+    {
+        const VkStructureType allowed_structs_VkSamplerYcbcrConversionCreateInfo[] = { VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID };
+
+        skip |= validate_struct_pnext("vkCreateSamplerYcbcrConversion", "pCreateInfo->pNext", "VkExternalFormatANDROID", pCreateInfo->pNext, ARRAY_SIZE(allowed_structs_VkSamplerYcbcrConversionCreateInfo), allowed_structs_VkSamplerYcbcrConversionCreateInfo, GeneratedVulkanHeaderVersion, "VUID-VkSamplerYcbcrConversionCreateInfo-pNext-pNext");
+
+        skip |= validate_ranged_enum("vkCreateSamplerYcbcrConversion", "pCreateInfo->format", "VkFormat", AllVkFormatEnums, pCreateInfo->format, "VUID-VkSamplerYcbcrConversionCreateInfo-format-parameter");
+
+        skip |= validate_ranged_enum("vkCreateSamplerYcbcrConversion", "pCreateInfo->ycbcrModel", "VkSamplerYcbcrModelConversion", AllVkSamplerYcbcrModelConversionEnums, pCreateInfo->ycbcrModel, "VUID-VkSamplerYcbcrConversionCreateInfo-ycbcrModel-parameter");
+
+        skip |= validate_ranged_enum("vkCreateSamplerYcbcrConversion", "pCreateInfo->ycbcrRange", "VkSamplerYcbcrRange", AllVkSamplerYcbcrRangeEnums, pCreateInfo->ycbcrRange, "VUID-VkSamplerYcbcrConversionCreateInfo-ycbcrRange-parameter");
+
+        skip |= validate_ranged_enum("vkCreateSamplerYcbcrConversion", "pCreateInfo->components.r", "VkComponentSwizzle", AllVkComponentSwizzleEnums, pCreateInfo->components.r, "VUID-VkComponentMapping-r-parameter");
+
+        skip |= validate_ranged_enum("vkCreateSamplerYcbcrConversion", "pCreateInfo->components.g", "VkComponentSwizzle", AllVkComponentSwizzleEnums, pCreateInfo->components.g, "VUID-VkComponentMapping-g-parameter");
+
+        skip |= validate_ranged_enum("vkCreateSamplerYcbcrConversion", "pCreateInfo->components.b", "VkComponentSwizzle", AllVkComponentSwizzleEnums, pCreateInfo->components.b, "VUID-VkComponentMapping-b-parameter");
+
+        skip |= validate_ranged_enum("vkCreateSamplerYcbcrConversion", "pCreateInfo->components.a", "VkComponentSwizzle", AllVkComponentSwizzleEnums, pCreateInfo->components.a, "VUID-VkComponentMapping-a-parameter");
+
+        skip |= validate_ranged_enum("vkCreateSamplerYcbcrConversion", "pCreateInfo->xChromaOffset", "VkChromaLocation", AllVkChromaLocationEnums, pCreateInfo->xChromaOffset, "VUID-VkSamplerYcbcrConversionCreateInfo-xChromaOffset-parameter");
+
+        skip |= validate_ranged_enum("vkCreateSamplerYcbcrConversion", "pCreateInfo->yChromaOffset", "VkChromaLocation", AllVkChromaLocationEnums, pCreateInfo->yChromaOffset, "VUID-VkSamplerYcbcrConversionCreateInfo-yChromaOffset-parameter");
+
+        skip |= validate_ranged_enum("vkCreateSamplerYcbcrConversion", "pCreateInfo->chromaFilter", "VkFilter", AllVkFilterEnums, pCreateInfo->chromaFilter, "VUID-VkSamplerYcbcrConversionCreateInfo-chromaFilter-parameter");
+
+        skip |= validate_bool32("vkCreateSamplerYcbcrConversion", "pCreateInfo->forceExplicitReconstruction", pCreateInfo->forceExplicitReconstruction);
+    }
+    if (pAllocator != NULL)
+    {
+        skip |= validate_required_pointer("vkCreateSamplerYcbcrConversion", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
+
+        skip |= validate_required_pointer("vkCreateSamplerYcbcrConversion", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
+
+        skip |= validate_required_pointer("vkCreateSamplerYcbcrConversion", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
+
+        if (pAllocator->pfnInternalAllocation != NULL)
+        {
+            skip |= validate_required_pointer("vkCreateSamplerYcbcrConversion", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+
+        if (pAllocator->pfnInternalFree != NULL)
+        {
+            skip |= validate_required_pointer("vkCreateSamplerYcbcrConversion", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+    }
+    skip |= validate_required_pointer("vkCreateSamplerYcbcrConversion", "pYcbcrConversion", pYcbcrConversion, "VUID-vkCreateSamplerYcbcrConversion-pYcbcrConversion-parameter");
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateDestroySamplerYcbcrConversion(
+    VkDevice                                    device,
+    VkSamplerYcbcrConversion                    ycbcrConversion,
+    const VkAllocationCallbacks*                pAllocator) const {
+    bool skip = false;
+    if (pAllocator != NULL)
+    {
+        skip |= validate_required_pointer("vkDestroySamplerYcbcrConversion", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
+
+        skip |= validate_required_pointer("vkDestroySamplerYcbcrConversion", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
+
+        skip |= validate_required_pointer("vkDestroySamplerYcbcrConversion", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
+
+        if (pAllocator->pfnInternalAllocation != NULL)
+        {
+            skip |= validate_required_pointer("vkDestroySamplerYcbcrConversion", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+
+        if (pAllocator->pfnInternalFree != NULL)
+        {
+            skip |= validate_required_pointer("vkDestroySamplerYcbcrConversion", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateCreateDescriptorUpdateTemplate(
+    VkDevice                                    device,
+    const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDescriptorUpdateTemplate*                 pDescriptorUpdateTemplate) const {
+    bool skip = false;
+    skip |= validate_struct_type("vkCreateDescriptorUpdateTemplate", "pCreateInfo", "VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO", pCreateInfo, VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO, true, "VUID-vkCreateDescriptorUpdateTemplate-pCreateInfo-parameter", "VUID-VkDescriptorUpdateTemplateCreateInfo-sType-sType");
+    if (pCreateInfo != NULL)
+    {
+        skip |= validate_struct_pnext("vkCreateDescriptorUpdateTemplate", "pCreateInfo->pNext", NULL, pCreateInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkDescriptorUpdateTemplateCreateInfo-pNext-pNext");
+
+        skip |= validate_reserved_flags("vkCreateDescriptorUpdateTemplate", "pCreateInfo->flags", pCreateInfo->flags, "VUID-VkDescriptorUpdateTemplateCreateInfo-flags-zerobitmask");
+
+        skip |= validate_array("vkCreateDescriptorUpdateTemplate", "pCreateInfo->descriptorUpdateEntryCount", "pCreateInfo->pDescriptorUpdateEntries", pCreateInfo->descriptorUpdateEntryCount, &pCreateInfo->pDescriptorUpdateEntries, true, true, "VUID-VkDescriptorUpdateTemplateCreateInfo-descriptorUpdateEntryCount-arraylength", "VUID-VkDescriptorUpdateTemplateCreateInfo-pDescriptorUpdateEntries-parameter");
+
+        if (pCreateInfo->pDescriptorUpdateEntries != NULL)
+        {
+            for (uint32_t descriptorUpdateEntryIndex = 0; descriptorUpdateEntryIndex < pCreateInfo->descriptorUpdateEntryCount; ++descriptorUpdateEntryIndex)
+            {
+                skip |= validate_ranged_enum("vkCreateDescriptorUpdateTemplate", ParameterName("pCreateInfo->pDescriptorUpdateEntries[%i].descriptorType", ParameterName::IndexVector{ descriptorUpdateEntryIndex }), "VkDescriptorType", AllVkDescriptorTypeEnums, pCreateInfo->pDescriptorUpdateEntries[descriptorUpdateEntryIndex].descriptorType, "VUID-VkDescriptorUpdateTemplateEntry-descriptorType-parameter");
+            }
+        }
+
+        skip |= validate_ranged_enum("vkCreateDescriptorUpdateTemplate", "pCreateInfo->templateType", "VkDescriptorUpdateTemplateType", AllVkDescriptorUpdateTemplateTypeEnums, pCreateInfo->templateType, "VUID-VkDescriptorUpdateTemplateCreateInfo-templateType-parameter");
+    }
+    if (pAllocator != NULL)
+    {
+        skip |= validate_required_pointer("vkCreateDescriptorUpdateTemplate", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
+
+        skip |= validate_required_pointer("vkCreateDescriptorUpdateTemplate", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
+
+        skip |= validate_required_pointer("vkCreateDescriptorUpdateTemplate", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
+
+        if (pAllocator->pfnInternalAllocation != NULL)
+        {
+            skip |= validate_required_pointer("vkCreateDescriptorUpdateTemplate", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+
+        if (pAllocator->pfnInternalFree != NULL)
+        {
+            skip |= validate_required_pointer("vkCreateDescriptorUpdateTemplate", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+    }
+    skip |= validate_required_pointer("vkCreateDescriptorUpdateTemplate", "pDescriptorUpdateTemplate", pDescriptorUpdateTemplate, "VUID-vkCreateDescriptorUpdateTemplate-pDescriptorUpdateTemplate-parameter");
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateDestroyDescriptorUpdateTemplate(
+    VkDevice                                    device,
+    VkDescriptorUpdateTemplate                  descriptorUpdateTemplate,
+    const VkAllocationCallbacks*                pAllocator) const {
+    bool skip = false;
+    if (pAllocator != NULL)
+    {
+        skip |= validate_required_pointer("vkDestroyDescriptorUpdateTemplate", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
+
+        skip |= validate_required_pointer("vkDestroyDescriptorUpdateTemplate", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
+
+        skip |= validate_required_pointer("vkDestroyDescriptorUpdateTemplate", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
+
+        if (pAllocator->pfnInternalAllocation != NULL)
+        {
+            skip |= validate_required_pointer("vkDestroyDescriptorUpdateTemplate", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+
+        if (pAllocator->pfnInternalFree != NULL)
+        {
+            skip |= validate_required_pointer("vkDestroyDescriptorUpdateTemplate", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateUpdateDescriptorSetWithTemplate(
+    VkDevice                                    device,
+    VkDescriptorSet                             descriptorSet,
+    VkDescriptorUpdateTemplate                  descriptorUpdateTemplate,
+    const void*                                 pData) const {
+    bool skip = false;
+    skip |= validate_required_handle("vkUpdateDescriptorSetWithTemplate", "descriptorSet", descriptorSet);
+    skip |= validate_required_handle("vkUpdateDescriptorSetWithTemplate", "descriptorUpdateTemplate", descriptorUpdateTemplate);
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateGetPhysicalDeviceExternalBufferProperties(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceExternalBufferInfo*   pExternalBufferInfo,
+    VkExternalBufferProperties*                 pExternalBufferProperties) const {
+    bool skip = false;
+    skip |= validate_struct_type("vkGetPhysicalDeviceExternalBufferProperties", "pExternalBufferInfo", "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO", pExternalBufferInfo, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO, true, "VUID-vkGetPhysicalDeviceExternalBufferProperties-pExternalBufferInfo-parameter", "VUID-VkPhysicalDeviceExternalBufferInfo-sType-sType");
+    if (pExternalBufferInfo != NULL)
+    {
+        skip |= validate_struct_pnext("vkGetPhysicalDeviceExternalBufferProperties", "pExternalBufferInfo->pNext", NULL, pExternalBufferInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkPhysicalDeviceExternalBufferInfo-pNext-pNext");
+
+        skip |= validate_flags("vkGetPhysicalDeviceExternalBufferProperties", "pExternalBufferInfo->flags", "VkBufferCreateFlagBits", AllVkBufferCreateFlagBits, pExternalBufferInfo->flags, kOptionalFlags, "VUID-VkPhysicalDeviceExternalBufferInfo-flags-parameter");
+
+        skip |= validate_flags("vkGetPhysicalDeviceExternalBufferProperties", "pExternalBufferInfo->usage", "VkBufferUsageFlagBits", AllVkBufferUsageFlagBits, pExternalBufferInfo->usage, kRequiredFlags, "VUID-VkPhysicalDeviceExternalBufferInfo-usage-parameter", "VUID-VkPhysicalDeviceExternalBufferInfo-usage-requiredbitmask");
+
+        skip |= validate_flags("vkGetPhysicalDeviceExternalBufferProperties", "pExternalBufferInfo->handleType", "VkExternalMemoryHandleTypeFlagBits", AllVkExternalMemoryHandleTypeFlagBits, pExternalBufferInfo->handleType, kRequiredSingleBit, "VUID-VkPhysicalDeviceExternalBufferInfo-handleType-parameter", "VUID-VkPhysicalDeviceExternalBufferInfo-handleType-parameter");
+    }
+    skip |= validate_struct_type("vkGetPhysicalDeviceExternalBufferProperties", "pExternalBufferProperties", "VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES", pExternalBufferProperties, VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES, true, "VUID-vkGetPhysicalDeviceExternalBufferProperties-pExternalBufferProperties-parameter", "VUID-VkExternalBufferProperties-sType-sType");
+    if (pExternalBufferProperties != NULL)
+    {
+        // No xml-driven validation
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateGetPhysicalDeviceExternalFenceProperties(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceExternalFenceInfo*    pExternalFenceInfo,
+    VkExternalFenceProperties*                  pExternalFenceProperties) const {
+    bool skip = false;
+    skip |= validate_struct_type("vkGetPhysicalDeviceExternalFenceProperties", "pExternalFenceInfo", "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO", pExternalFenceInfo, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO, true, "VUID-vkGetPhysicalDeviceExternalFenceProperties-pExternalFenceInfo-parameter", "VUID-VkPhysicalDeviceExternalFenceInfo-sType-sType");
+    if (pExternalFenceInfo != NULL)
+    {
+        skip |= validate_struct_pnext("vkGetPhysicalDeviceExternalFenceProperties", "pExternalFenceInfo->pNext", NULL, pExternalFenceInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkPhysicalDeviceExternalFenceInfo-pNext-pNext");
+
+        skip |= validate_flags("vkGetPhysicalDeviceExternalFenceProperties", "pExternalFenceInfo->handleType", "VkExternalFenceHandleTypeFlagBits", AllVkExternalFenceHandleTypeFlagBits, pExternalFenceInfo->handleType, kRequiredSingleBit, "VUID-VkPhysicalDeviceExternalFenceInfo-handleType-parameter", "VUID-VkPhysicalDeviceExternalFenceInfo-handleType-parameter");
+    }
+    skip |= validate_struct_type("vkGetPhysicalDeviceExternalFenceProperties", "pExternalFenceProperties", "VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES", pExternalFenceProperties, VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES, true, "VUID-vkGetPhysicalDeviceExternalFenceProperties-pExternalFenceProperties-parameter", "VUID-VkExternalFenceProperties-sType-sType");
+    if (pExternalFenceProperties != NULL)
+    {
+        // No xml-driven validation
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateGetPhysicalDeviceExternalSemaphoreProperties(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo,
+    VkExternalSemaphoreProperties*              pExternalSemaphoreProperties) const {
+    bool skip = false;
+    skip |= validate_struct_type("vkGetPhysicalDeviceExternalSemaphoreProperties", "pExternalSemaphoreInfo", "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO", pExternalSemaphoreInfo, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO, true, "VUID-vkGetPhysicalDeviceExternalSemaphoreProperties-pExternalSemaphoreInfo-parameter", "VUID-VkPhysicalDeviceExternalSemaphoreInfo-sType-sType");
+    if (pExternalSemaphoreInfo != NULL)
+    {
+        const VkStructureType allowed_structs_VkPhysicalDeviceExternalSemaphoreInfo[] = { VK_STRUCTURE_TYPE_SEMAPHORE_TYPE_CREATE_INFO_KHR };
+
+        skip |= validate_struct_pnext("vkGetPhysicalDeviceExternalSemaphoreProperties", "pExternalSemaphoreInfo->pNext", "VkSemaphoreTypeCreateInfoKHR", pExternalSemaphoreInfo->pNext, ARRAY_SIZE(allowed_structs_VkPhysicalDeviceExternalSemaphoreInfo), allowed_structs_VkPhysicalDeviceExternalSemaphoreInfo, GeneratedVulkanHeaderVersion, "VUID-VkPhysicalDeviceExternalSemaphoreInfo-pNext-pNext");
+
+        skip |= validate_flags("vkGetPhysicalDeviceExternalSemaphoreProperties", "pExternalSemaphoreInfo->handleType", "VkExternalSemaphoreHandleTypeFlagBits", AllVkExternalSemaphoreHandleTypeFlagBits, pExternalSemaphoreInfo->handleType, kRequiredSingleBit, "VUID-VkPhysicalDeviceExternalSemaphoreInfo-handleType-parameter", "VUID-VkPhysicalDeviceExternalSemaphoreInfo-handleType-parameter");
+    }
+    skip |= validate_struct_type("vkGetPhysicalDeviceExternalSemaphoreProperties", "pExternalSemaphoreProperties", "VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES", pExternalSemaphoreProperties, VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES, true, "VUID-vkGetPhysicalDeviceExternalSemaphoreProperties-pExternalSemaphoreProperties-parameter", "VUID-VkExternalSemaphoreProperties-sType-sType");
+    if (pExternalSemaphoreProperties != NULL)
+    {
+        // No xml-driven validation
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateGetDescriptorSetLayoutSupport(
+    VkDevice                                    device,
+    const VkDescriptorSetLayoutCreateInfo*      pCreateInfo,
+    VkDescriptorSetLayoutSupport*               pSupport) const {
+    bool skip = false;
+    skip |= validate_struct_type("vkGetDescriptorSetLayoutSupport", "pCreateInfo", "VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO", pCreateInfo, VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO, true, "VUID-vkGetDescriptorSetLayoutSupport-pCreateInfo-parameter", "VUID-VkDescriptorSetLayoutCreateInfo-sType-sType");
+    if (pCreateInfo != NULL)
+    {
+        const VkStructureType allowed_structs_VkDescriptorSetLayoutCreateInfo[] = { VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT };
+
+        skip |= validate_struct_pnext("vkGetDescriptorSetLayoutSupport", "pCreateInfo->pNext", "VkDescriptorSetLayoutBindingFlagsCreateInfoEXT", pCreateInfo->pNext, ARRAY_SIZE(allowed_structs_VkDescriptorSetLayoutCreateInfo), allowed_structs_VkDescriptorSetLayoutCreateInfo, GeneratedVulkanHeaderVersion, "VUID-VkDescriptorSetLayoutCreateInfo-pNext-pNext");
+
+        skip |= validate_flags("vkGetDescriptorSetLayoutSupport", "pCreateInfo->flags", "VkDescriptorSetLayoutCreateFlagBits", AllVkDescriptorSetLayoutCreateFlagBits, pCreateInfo->flags, kOptionalFlags, "VUID-VkDescriptorSetLayoutCreateInfo-flags-parameter");
+
+        skip |= validate_array("vkGetDescriptorSetLayoutSupport", "pCreateInfo->bindingCount", "pCreateInfo->pBindings", pCreateInfo->bindingCount, &pCreateInfo->pBindings, false, true, kVUIDUndefined, "VUID-VkDescriptorSetLayoutCreateInfo-pBindings-parameter");
+
+        if (pCreateInfo->pBindings != NULL)
+        {
+            for (uint32_t bindingIndex = 0; bindingIndex < pCreateInfo->bindingCount; ++bindingIndex)
+            {
+                skip |= validate_ranged_enum("vkGetDescriptorSetLayoutSupport", ParameterName("pCreateInfo->pBindings[%i].descriptorType", ParameterName::IndexVector{ bindingIndex }), "VkDescriptorType", AllVkDescriptorTypeEnums, pCreateInfo->pBindings[bindingIndex].descriptorType, "VUID-VkDescriptorSetLayoutBinding-descriptorType-parameter");
+            }
+        }
+    }
+    skip |= validate_struct_type("vkGetDescriptorSetLayoutSupport", "pSupport", "VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT", pSupport, VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT, true, "VUID-vkGetDescriptorSetLayoutSupport-pSupport-parameter", "VUID-VkDescriptorSetLayoutSupport-sType-sType");
+    if (pSupport != NULL)
+    {
+        // No xml-driven validation
+    }
+    return skip;
+}
+
+
+
+bool StatelessValidation::PreCallValidateDestroySurfaceKHR(
+    VkInstance                                  instance,
+    VkSurfaceKHR                                surface,
+    const VkAllocationCallbacks*                pAllocator) const {
+    bool skip = false;
+    if (!instance_extensions.vk_khr_surface) skip |= OutputExtensionError("vkDestroySurfaceKHR", VK_KHR_SURFACE_EXTENSION_NAME);
+    if (pAllocator != NULL)
+    {
+        skip |= validate_required_pointer("vkDestroySurfaceKHR", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
+
+        skip |= validate_required_pointer("vkDestroySurfaceKHR", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
+
+        skip |= validate_required_pointer("vkDestroySurfaceKHR", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
+
+        if (pAllocator->pfnInternalAllocation != NULL)
+        {
+            skip |= validate_required_pointer("vkDestroySurfaceKHR", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+
+        if (pAllocator->pfnInternalFree != NULL)
+        {
+            skip |= validate_required_pointer("vkDestroySurfaceKHR", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateGetPhysicalDeviceSurfaceSupportKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t                                    queueFamilyIndex,
+    VkSurfaceKHR                                surface,
+    VkBool32*                                   pSupported) const {
+    bool skip = false;
+    if (!instance_extensions.vk_khr_surface) skip |= OutputExtensionError("vkGetPhysicalDeviceSurfaceSupportKHR", VK_KHR_SURFACE_EXTENSION_NAME);
+    skip |= validate_required_handle("vkGetPhysicalDeviceSurfaceSupportKHR", "surface", surface);
+    skip |= validate_required_pointer("vkGetPhysicalDeviceSurfaceSupportKHR", "pSupported", pSupported, "VUID-vkGetPhysicalDeviceSurfaceSupportKHR-pSupported-parameter");
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateGetPhysicalDeviceSurfaceCapabilitiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkSurfaceKHR                                surface,
+    VkSurfaceCapabilitiesKHR*                   pSurfaceCapabilities) const {
+    bool skip = false;
+    if (!instance_extensions.vk_khr_surface) skip |= OutputExtensionError("vkGetPhysicalDeviceSurfaceCapabilitiesKHR", VK_KHR_SURFACE_EXTENSION_NAME);
+    skip |= validate_required_handle("vkGetPhysicalDeviceSurfaceCapabilitiesKHR", "surface", surface);
+    skip |= validate_required_pointer("vkGetPhysicalDeviceSurfaceCapabilitiesKHR", "pSurfaceCapabilities", pSurfaceCapabilities, "VUID-vkGetPhysicalDeviceSurfaceCapabilitiesKHR-pSurfaceCapabilities-parameter");
+    if (pSurfaceCapabilities != NULL)
+    {
+        // No xml-driven validation
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateGetPhysicalDeviceSurfaceFormatsKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkSurfaceKHR                                surface,
+    uint32_t*                                   pSurfaceFormatCount,
+    VkSurfaceFormatKHR*                         pSurfaceFormats) const {
+    bool skip = false;
+    if (!instance_extensions.vk_khr_surface) skip |= OutputExtensionError("vkGetPhysicalDeviceSurfaceFormatsKHR", VK_KHR_SURFACE_EXTENSION_NAME);
+    skip |= validate_required_handle("vkGetPhysicalDeviceSurfaceFormatsKHR", "surface", surface);
+    skip |= validate_array("vkGetPhysicalDeviceSurfaceFormatsKHR", "pSurfaceFormatCount", "pSurfaceFormats", pSurfaceFormatCount, &pSurfaceFormats, true, false, false, kVUIDUndefined, "VUID-vkGetPhysicalDeviceSurfaceFormatsKHR-pSurfaceFormats-parameter");
+    if (pSurfaceFormats != NULL)
+    {
+        for (uint32_t pSurfaceFormatIndex = 0; pSurfaceFormatIndex < *pSurfaceFormatCount; ++pSurfaceFormatIndex)
+        {
+            // No xml-driven validation
+        }
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateGetPhysicalDeviceSurfacePresentModesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkSurfaceKHR                                surface,
+    uint32_t*                                   pPresentModeCount,
+    VkPresentModeKHR*                           pPresentModes) const {
+    bool skip = false;
+    if (!instance_extensions.vk_khr_surface) skip |= OutputExtensionError("vkGetPhysicalDeviceSurfacePresentModesKHR", VK_KHR_SURFACE_EXTENSION_NAME);
+    skip |= validate_required_handle("vkGetPhysicalDeviceSurfacePresentModesKHR", "surface", surface);
+    skip |= validate_array("vkGetPhysicalDeviceSurfacePresentModesKHR", "pPresentModeCount", "pPresentModes", pPresentModeCount, &pPresentModes, true, false, false, kVUIDUndefined, "VUID-vkGetPhysicalDeviceSurfacePresentModesKHR-pPresentModes-parameter");
+    return skip;
+}
+
+
+
+bool StatelessValidation::PreCallValidateCreateSwapchainKHR(
+    VkDevice                                    device,
+    const VkSwapchainCreateInfoKHR*             pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSwapchainKHR*                             pSwapchain) const {
+    bool skip = false;
+    if (!device_extensions.vk_khr_surface) skip |= OutputExtensionError("vkCreateSwapchainKHR", VK_KHR_SURFACE_EXTENSION_NAME);
+    if (!device_extensions.vk_khr_swapchain) skip |= OutputExtensionError("vkCreateSwapchainKHR", VK_KHR_SWAPCHAIN_EXTENSION_NAME);
+    skip |= validate_struct_type("vkCreateSwapchainKHR", "pCreateInfo", "VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR", pCreateInfo, VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR, true, "VUID-vkCreateSwapchainKHR-pCreateInfo-parameter", "VUID-VkSwapchainCreateInfoKHR-sType-sType");
+    if (pCreateInfo != NULL)
+    {
+        const VkStructureType allowed_structs_VkSwapchainCreateInfoKHR[] = { VK_STRUCTURE_TYPE_DEVICE_GROUP_SWAPCHAIN_CREATE_INFO_KHR, VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO_KHR, VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_INFO_EXT, VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_WIN32_INFO_EXT, VK_STRUCTURE_TYPE_SWAPCHAIN_COUNTER_CREATE_INFO_EXT, VK_STRUCTURE_TYPE_SWAPCHAIN_DISPLAY_NATIVE_HDR_CREATE_INFO_AMD };
+
+        skip |= validate_struct_pnext("vkCreateSwapchainKHR", "pCreateInfo->pNext", "VkDeviceGroupSwapchainCreateInfoKHR, VkImageFormatListCreateInfoKHR, VkSurfaceFullScreenExclusiveInfoEXT, VkSurfaceFullScreenExclusiveWin32InfoEXT, VkSwapchainCounterCreateInfoEXT, VkSwapchainDisplayNativeHdrCreateInfoAMD", pCreateInfo->pNext, ARRAY_SIZE(allowed_structs_VkSwapchainCreateInfoKHR), allowed_structs_VkSwapchainCreateInfoKHR, GeneratedVulkanHeaderVersion, "VUID-VkSwapchainCreateInfoKHR-pNext-pNext");
+
+        skip |= validate_flags("vkCreateSwapchainKHR", "pCreateInfo->flags", "VkSwapchainCreateFlagBitsKHR", AllVkSwapchainCreateFlagBitsKHR, pCreateInfo->flags, kOptionalFlags, "VUID-VkSwapchainCreateInfoKHR-flags-parameter");
+
+        skip |= validate_required_handle("vkCreateSwapchainKHR", "pCreateInfo->surface", pCreateInfo->surface);
+
+        skip |= validate_ranged_enum("vkCreateSwapchainKHR", "pCreateInfo->imageFormat", "VkFormat", AllVkFormatEnums, pCreateInfo->imageFormat, "VUID-VkSwapchainCreateInfoKHR-imageFormat-parameter");
+
+        skip |= validate_ranged_enum("vkCreateSwapchainKHR", "pCreateInfo->imageColorSpace", "VkColorSpaceKHR", AllVkColorSpaceKHREnums, pCreateInfo->imageColorSpace, "VUID-VkSwapchainCreateInfoKHR-imageColorSpace-parameter");
+
+        // No xml-driven validation
+
+        skip |= validate_flags("vkCreateSwapchainKHR", "pCreateInfo->imageUsage", "VkImageUsageFlagBits", AllVkImageUsageFlagBits, pCreateInfo->imageUsage, kRequiredFlags, "VUID-VkSwapchainCreateInfoKHR-imageUsage-parameter", "VUID-VkSwapchainCreateInfoKHR-imageUsage-requiredbitmask");
+
+        skip |= validate_ranged_enum("vkCreateSwapchainKHR", "pCreateInfo->imageSharingMode", "VkSharingMode", AllVkSharingModeEnums, pCreateInfo->imageSharingMode, "VUID-VkSwapchainCreateInfoKHR-imageSharingMode-parameter");
+
+        skip |= validate_flags("vkCreateSwapchainKHR", "pCreateInfo->preTransform", "VkSurfaceTransformFlagBitsKHR", AllVkSurfaceTransformFlagBitsKHR, pCreateInfo->preTransform, kRequiredSingleBit, "VUID-VkSwapchainCreateInfoKHR-preTransform-parameter", "VUID-VkSwapchainCreateInfoKHR-preTransform-parameter");
+
+        skip |= validate_flags("vkCreateSwapchainKHR", "pCreateInfo->compositeAlpha", "VkCompositeAlphaFlagBitsKHR", AllVkCompositeAlphaFlagBitsKHR, pCreateInfo->compositeAlpha, kRequiredSingleBit, "VUID-VkSwapchainCreateInfoKHR-compositeAlpha-parameter", "VUID-VkSwapchainCreateInfoKHR-compositeAlpha-parameter");
+
+        skip |= validate_ranged_enum("vkCreateSwapchainKHR", "pCreateInfo->presentMode", "VkPresentModeKHR", AllVkPresentModeKHREnums, pCreateInfo->presentMode, "VUID-VkSwapchainCreateInfoKHR-presentMode-parameter");
+
+        skip |= validate_bool32("vkCreateSwapchainKHR", "pCreateInfo->clipped", pCreateInfo->clipped);
+    }
+    if (pAllocator != NULL)
+    {
+        skip |= validate_required_pointer("vkCreateSwapchainKHR", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
+
+        skip |= validate_required_pointer("vkCreateSwapchainKHR", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
+
+        skip |= validate_required_pointer("vkCreateSwapchainKHR", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
+
+        if (pAllocator->pfnInternalAllocation != NULL)
+        {
+            skip |= validate_required_pointer("vkCreateSwapchainKHR", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+
+        if (pAllocator->pfnInternalFree != NULL)
+        {
+            skip |= validate_required_pointer("vkCreateSwapchainKHR", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+    }
+    skip |= validate_required_pointer("vkCreateSwapchainKHR", "pSwapchain", pSwapchain, "VUID-vkCreateSwapchainKHR-pSwapchain-parameter");
+    if (!skip) skip |= manual_PreCallValidateCreateSwapchainKHR(device, pCreateInfo, pAllocator, pSwapchain);
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateDestroySwapchainKHR(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    const VkAllocationCallbacks*                pAllocator) const {
+    bool skip = false;
+    if (!device_extensions.vk_khr_surface) skip |= OutputExtensionError("vkDestroySwapchainKHR", VK_KHR_SURFACE_EXTENSION_NAME);
+    if (!device_extensions.vk_khr_swapchain) skip |= OutputExtensionError("vkDestroySwapchainKHR", VK_KHR_SWAPCHAIN_EXTENSION_NAME);
+    if (pAllocator != NULL)
+    {
+        skip |= validate_required_pointer("vkDestroySwapchainKHR", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
+
+        skip |= validate_required_pointer("vkDestroySwapchainKHR", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
+
+        skip |= validate_required_pointer("vkDestroySwapchainKHR", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
+
+        if (pAllocator->pfnInternalAllocation != NULL)
+        {
+            skip |= validate_required_pointer("vkDestroySwapchainKHR", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+
+        if (pAllocator->pfnInternalFree != NULL)
+        {
+            skip |= validate_required_pointer("vkDestroySwapchainKHR", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateGetSwapchainImagesKHR(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    uint32_t*                                   pSwapchainImageCount,
+    VkImage*                                    pSwapchainImages) const {
+    bool skip = false;
+    if (!device_extensions.vk_khr_surface) skip |= OutputExtensionError("vkGetSwapchainImagesKHR", VK_KHR_SURFACE_EXTENSION_NAME);
+    if (!device_extensions.vk_khr_swapchain) skip |= OutputExtensionError("vkGetSwapchainImagesKHR", VK_KHR_SWAPCHAIN_EXTENSION_NAME);
+    skip |= validate_required_handle("vkGetSwapchainImagesKHR", "swapchain", swapchain);
+    skip |= validate_array("vkGetSwapchainImagesKHR", "pSwapchainImageCount", "pSwapchainImages", pSwapchainImageCount, &pSwapchainImages, true, false, false, kVUIDUndefined, "VUID-vkGetSwapchainImagesKHR-pSwapchainImages-parameter");
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateAcquireNextImageKHR(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    uint64_t                                    timeout,
+    VkSemaphore                                 semaphore,
+    VkFence                                     fence,
+    uint32_t*                                   pImageIndex) const {
+    bool skip = false;
+    if (!device_extensions.vk_khr_surface) skip |= OutputExtensionError("vkAcquireNextImageKHR", VK_KHR_SURFACE_EXTENSION_NAME);
+    if (!device_extensions.vk_khr_swapchain) skip |= OutputExtensionError("vkAcquireNextImageKHR", VK_KHR_SWAPCHAIN_EXTENSION_NAME);
+    skip |= validate_required_handle("vkAcquireNextImageKHR", "swapchain", swapchain);
+    skip |= validate_required_pointer("vkAcquireNextImageKHR", "pImageIndex", pImageIndex, "VUID-vkAcquireNextImageKHR-pImageIndex-parameter");
+    if (!skip) skip |= manual_PreCallValidateAcquireNextImageKHR(device, swapchain, timeout, semaphore, fence, pImageIndex);
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateQueuePresentKHR(
+    VkQueue                                     queue,
+    const VkPresentInfoKHR*                     pPresentInfo) const {
+    bool skip = false;
+    if (!device_extensions.vk_khr_surface) skip |= OutputExtensionError("vkQueuePresentKHR", VK_KHR_SURFACE_EXTENSION_NAME);
+    if (!device_extensions.vk_khr_swapchain) skip |= OutputExtensionError("vkQueuePresentKHR", VK_KHR_SWAPCHAIN_EXTENSION_NAME);
+    skip |= validate_struct_type("vkQueuePresentKHR", "pPresentInfo", "VK_STRUCTURE_TYPE_PRESENT_INFO_KHR", pPresentInfo, VK_STRUCTURE_TYPE_PRESENT_INFO_KHR, true, "VUID-vkQueuePresentKHR-pPresentInfo-parameter", "VUID-VkPresentInfoKHR-sType-sType");
+    if (pPresentInfo != NULL)
+    {
+        const VkStructureType allowed_structs_VkPresentInfoKHR[] = { VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_INFO_KHR, VK_STRUCTURE_TYPE_DISPLAY_PRESENT_INFO_KHR, VK_STRUCTURE_TYPE_PRESENT_FRAME_TOKEN_GGP, VK_STRUCTURE_TYPE_PRESENT_REGIONS_KHR, VK_STRUCTURE_TYPE_PRESENT_TIMES_INFO_GOOGLE };
+
+        skip |= validate_struct_pnext("vkQueuePresentKHR", "pPresentInfo->pNext", "VkDeviceGroupPresentInfoKHR, VkDisplayPresentInfoKHR, VkPresentFrameTokenGGP, VkPresentRegionsKHR, VkPresentTimesInfoGOOGLE", pPresentInfo->pNext, ARRAY_SIZE(allowed_structs_VkPresentInfoKHR), allowed_structs_VkPresentInfoKHR, GeneratedVulkanHeaderVersion, "VUID-VkPresentInfoKHR-pNext-pNext");
+
+        skip |= validate_array("vkQueuePresentKHR", "pPresentInfo->waitSemaphoreCount", "pPresentInfo->pWaitSemaphores", pPresentInfo->waitSemaphoreCount, &pPresentInfo->pWaitSemaphores, false, true, kVUIDUndefined, "VUID-VkPresentInfoKHR-pWaitSemaphores-parameter");
+
+        skip |= validate_handle_array("vkQueuePresentKHR", "pPresentInfo->swapchainCount", "pPresentInfo->pSwapchains", pPresentInfo->swapchainCount, pPresentInfo->pSwapchains, true, true);
+
+        skip |= validate_array("vkQueuePresentKHR", "pPresentInfo->swapchainCount", "pPresentInfo->pImageIndices", pPresentInfo->swapchainCount, &pPresentInfo->pImageIndices, true, true, "VUID-VkPresentInfoKHR-swapchainCount-arraylength", "VUID-VkPresentInfoKHR-pImageIndices-parameter");
+
+        skip |= validate_array("vkQueuePresentKHR", "pPresentInfo->swapchainCount", "pPresentInfo->pResults", pPresentInfo->swapchainCount, &pPresentInfo->pResults, true, false, "VUID-VkPresentInfoKHR-swapchainCount-arraylength", "VUID-VkPresentInfoKHR-pResults-parameter");
+    }
+    if (!skip) skip |= manual_PreCallValidateQueuePresentKHR(queue, pPresentInfo);
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateGetDeviceGroupPresentCapabilitiesKHR(
+    VkDevice                                    device,
+    VkDeviceGroupPresentCapabilitiesKHR*        pDeviceGroupPresentCapabilities) const {
+    bool skip = false;
+    if (!device_extensions.vk_khr_surface) skip |= OutputExtensionError("vkGetDeviceGroupPresentCapabilitiesKHR", VK_KHR_SURFACE_EXTENSION_NAME);
+    if (!device_extensions.vk_khr_swapchain) skip |= OutputExtensionError("vkGetDeviceGroupPresentCapabilitiesKHR", VK_KHR_SWAPCHAIN_EXTENSION_NAME);
+    skip |= validate_struct_type("vkGetDeviceGroupPresentCapabilitiesKHR", "pDeviceGroupPresentCapabilities", "VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_CAPABILITIES_KHR", pDeviceGroupPresentCapabilities, VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_CAPABILITIES_KHR, true, "VUID-vkGetDeviceGroupPresentCapabilitiesKHR-pDeviceGroupPresentCapabilities-parameter", "VUID-VkDeviceGroupPresentCapabilitiesKHR-sType-sType");
+    if (pDeviceGroupPresentCapabilities != NULL)
+    {
+        skip |= validate_struct_pnext("vkGetDeviceGroupPresentCapabilitiesKHR", "pDeviceGroupPresentCapabilities->pNext", NULL, pDeviceGroupPresentCapabilities->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkDeviceGroupPresentCapabilitiesKHR-pNext-pNext");
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateGetDeviceGroupSurfacePresentModesKHR(
+    VkDevice                                    device,
+    VkSurfaceKHR                                surface,
+    VkDeviceGroupPresentModeFlagsKHR*           pModes) const {
+    bool skip = false;
+    if (!device_extensions.vk_khr_surface) skip |= OutputExtensionError("vkGetDeviceGroupSurfacePresentModesKHR", VK_KHR_SURFACE_EXTENSION_NAME);
+    if (!device_extensions.vk_khr_swapchain) skip |= OutputExtensionError("vkGetDeviceGroupSurfacePresentModesKHR", VK_KHR_SWAPCHAIN_EXTENSION_NAME);
+    skip |= validate_required_handle("vkGetDeviceGroupSurfacePresentModesKHR", "surface", surface);
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateGetPhysicalDevicePresentRectanglesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkSurfaceKHR                                surface,
+    uint32_t*                                   pRectCount,
+    VkRect2D*                                   pRects) const {
+    bool skip = false;
+    skip |= validate_required_handle("vkGetPhysicalDevicePresentRectanglesKHR", "surface", surface);
+    skip |= validate_array("vkGetPhysicalDevicePresentRectanglesKHR", "pRectCount", "pRects", pRectCount, &pRects, true, false, false, kVUIDUndefined, "VUID-vkGetPhysicalDevicePresentRectanglesKHR-pRects-parameter");
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateAcquireNextImage2KHR(
+    VkDevice                                    device,
+    const VkAcquireNextImageInfoKHR*            pAcquireInfo,
+    uint32_t*                                   pImageIndex) const {
+    bool skip = false;
+    if (!device_extensions.vk_khr_surface) skip |= OutputExtensionError("vkAcquireNextImage2KHR", VK_KHR_SURFACE_EXTENSION_NAME);
+    if (!device_extensions.vk_khr_swapchain) skip |= OutputExtensionError("vkAcquireNextImage2KHR", VK_KHR_SWAPCHAIN_EXTENSION_NAME);
+    skip |= validate_struct_type("vkAcquireNextImage2KHR", "pAcquireInfo", "VK_STRUCTURE_TYPE_ACQUIRE_NEXT_IMAGE_INFO_KHR", pAcquireInfo, VK_STRUCTURE_TYPE_ACQUIRE_NEXT_IMAGE_INFO_KHR, true, "VUID-vkAcquireNextImage2KHR-pAcquireInfo-parameter", "VUID-VkAcquireNextImageInfoKHR-sType-sType");
+    if (pAcquireInfo != NULL)
+    {
+        skip |= validate_struct_pnext("vkAcquireNextImage2KHR", "pAcquireInfo->pNext", NULL, pAcquireInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkAcquireNextImageInfoKHR-pNext-pNext");
+
+        skip |= validate_required_handle("vkAcquireNextImage2KHR", "pAcquireInfo->swapchain", pAcquireInfo->swapchain);
+    }
+    skip |= validate_required_pointer("vkAcquireNextImage2KHR", "pImageIndex", pImageIndex, "VUID-vkAcquireNextImage2KHR-pImageIndex-parameter");
+    if (!skip) skip |= manual_PreCallValidateAcquireNextImage2KHR(device, pAcquireInfo, pImageIndex);
+    return skip;
+}
+
+
+
+bool StatelessValidation::PreCallValidateGetPhysicalDeviceDisplayPropertiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pPropertyCount,
+    VkDisplayPropertiesKHR*                     pProperties) const {
+    bool skip = false;
+    if (!instance_extensions.vk_khr_surface) skip |= OutputExtensionError("vkGetPhysicalDeviceDisplayPropertiesKHR", VK_KHR_SURFACE_EXTENSION_NAME);
+    if (!instance_extensions.vk_khr_display) skip |= OutputExtensionError("vkGetPhysicalDeviceDisplayPropertiesKHR", VK_KHR_DISPLAY_EXTENSION_NAME);
+    skip |= validate_array("vkGetPhysicalDeviceDisplayPropertiesKHR", "pPropertyCount", "pProperties", pPropertyCount, &pProperties, true, false, false, kVUIDUndefined, "VUID-vkGetPhysicalDeviceDisplayPropertiesKHR-pProperties-parameter");
+    if (pProperties != NULL)
+    {
+        for (uint32_t pPropertyIndex = 0; pPropertyIndex < *pPropertyCount; ++pPropertyIndex)
+        {
+            // No xml-driven validation
+        }
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateGetPhysicalDeviceDisplayPlanePropertiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pPropertyCount,
+    VkDisplayPlanePropertiesKHR*                pProperties) const {
+    bool skip = false;
+    if (!instance_extensions.vk_khr_surface) skip |= OutputExtensionError("vkGetPhysicalDeviceDisplayPlanePropertiesKHR", VK_KHR_SURFACE_EXTENSION_NAME);
+    if (!instance_extensions.vk_khr_display) skip |= OutputExtensionError("vkGetPhysicalDeviceDisplayPlanePropertiesKHR", VK_KHR_DISPLAY_EXTENSION_NAME);
+    skip |= validate_array("vkGetPhysicalDeviceDisplayPlanePropertiesKHR", "pPropertyCount", "pProperties", pPropertyCount, &pProperties, true, false, false, kVUIDUndefined, "VUID-vkGetPhysicalDeviceDisplayPlanePropertiesKHR-pProperties-parameter");
+    if (pProperties != NULL)
+    {
+        for (uint32_t pPropertyIndex = 0; pPropertyIndex < *pPropertyCount; ++pPropertyIndex)
+        {
+            // No xml-driven validation
+        }
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateGetDisplayPlaneSupportedDisplaysKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t                                    planeIndex,
+    uint32_t*                                   pDisplayCount,
+    VkDisplayKHR*                               pDisplays) const {
+    bool skip = false;
+    if (!instance_extensions.vk_khr_surface) skip |= OutputExtensionError("vkGetDisplayPlaneSupportedDisplaysKHR", VK_KHR_SURFACE_EXTENSION_NAME);
+    if (!instance_extensions.vk_khr_display) skip |= OutputExtensionError("vkGetDisplayPlaneSupportedDisplaysKHR", VK_KHR_DISPLAY_EXTENSION_NAME);
+    skip |= validate_array("vkGetDisplayPlaneSupportedDisplaysKHR", "pDisplayCount", "pDisplays", pDisplayCount, &pDisplays, true, false, false, kVUIDUndefined, "VUID-vkGetDisplayPlaneSupportedDisplaysKHR-pDisplays-parameter");
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateGetDisplayModePropertiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkDisplayKHR                                display,
+    uint32_t*                                   pPropertyCount,
+    VkDisplayModePropertiesKHR*                 pProperties) const {
+    bool skip = false;
+    if (!instance_extensions.vk_khr_surface) skip |= OutputExtensionError("vkGetDisplayModePropertiesKHR", VK_KHR_SURFACE_EXTENSION_NAME);
+    if (!instance_extensions.vk_khr_display) skip |= OutputExtensionError("vkGetDisplayModePropertiesKHR", VK_KHR_DISPLAY_EXTENSION_NAME);
+    skip |= validate_required_handle("vkGetDisplayModePropertiesKHR", "display", display);
+    skip |= validate_array("vkGetDisplayModePropertiesKHR", "pPropertyCount", "pProperties", pPropertyCount, &pProperties, true, false, false, kVUIDUndefined, "VUID-vkGetDisplayModePropertiesKHR-pProperties-parameter");
+    if (pProperties != NULL)
+    {
+        for (uint32_t pPropertyIndex = 0; pPropertyIndex < *pPropertyCount; ++pPropertyIndex)
+        {
+            // No xml-driven validation
+        }
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateCreateDisplayModeKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkDisplayKHR                                display,
+    const VkDisplayModeCreateInfoKHR*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDisplayModeKHR*                           pMode) const {
+    bool skip = false;
+    if (!instance_extensions.vk_khr_surface) skip |= OutputExtensionError("vkCreateDisplayModeKHR", VK_KHR_SURFACE_EXTENSION_NAME);
+    if (!instance_extensions.vk_khr_display) skip |= OutputExtensionError("vkCreateDisplayModeKHR", VK_KHR_DISPLAY_EXTENSION_NAME);
+    skip |= validate_required_handle("vkCreateDisplayModeKHR", "display", display);
+    skip |= validate_struct_type("vkCreateDisplayModeKHR", "pCreateInfo", "VK_STRUCTURE_TYPE_DISPLAY_MODE_CREATE_INFO_KHR", pCreateInfo, VK_STRUCTURE_TYPE_DISPLAY_MODE_CREATE_INFO_KHR, true, "VUID-vkCreateDisplayModeKHR-pCreateInfo-parameter", "VUID-VkDisplayModeCreateInfoKHR-sType-sType");
+    if (pCreateInfo != NULL)
+    {
+        skip |= validate_struct_pnext("vkCreateDisplayModeKHR", "pCreateInfo->pNext", NULL, pCreateInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkDisplayModeCreateInfoKHR-pNext-pNext");
+
+        skip |= validate_reserved_flags("vkCreateDisplayModeKHR", "pCreateInfo->flags", pCreateInfo->flags, "VUID-VkDisplayModeCreateInfoKHR-flags-zerobitmask");
+
+        // No xml-driven validation
+    }
+    if (pAllocator != NULL)
+    {
+        skip |= validate_required_pointer("vkCreateDisplayModeKHR", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
+
+        skip |= validate_required_pointer("vkCreateDisplayModeKHR", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
+
+        skip |= validate_required_pointer("vkCreateDisplayModeKHR", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
+
+        if (pAllocator->pfnInternalAllocation != NULL)
+        {
+            skip |= validate_required_pointer("vkCreateDisplayModeKHR", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+
+        if (pAllocator->pfnInternalFree != NULL)
+        {
+            skip |= validate_required_pointer("vkCreateDisplayModeKHR", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+    }
+    skip |= validate_required_pointer("vkCreateDisplayModeKHR", "pMode", pMode, "VUID-vkCreateDisplayModeKHR-pMode-parameter");
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateGetDisplayPlaneCapabilitiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkDisplayModeKHR                            mode,
+    uint32_t                                    planeIndex,
+    VkDisplayPlaneCapabilitiesKHR*              pCapabilities) const {
+    bool skip = false;
+    if (!instance_extensions.vk_khr_surface) skip |= OutputExtensionError("vkGetDisplayPlaneCapabilitiesKHR", VK_KHR_SURFACE_EXTENSION_NAME);
+    if (!instance_extensions.vk_khr_display) skip |= OutputExtensionError("vkGetDisplayPlaneCapabilitiesKHR", VK_KHR_DISPLAY_EXTENSION_NAME);
+    skip |= validate_required_handle("vkGetDisplayPlaneCapabilitiesKHR", "mode", mode);
+    skip |= validate_required_pointer("vkGetDisplayPlaneCapabilitiesKHR", "pCapabilities", pCapabilities, "VUID-vkGetDisplayPlaneCapabilitiesKHR-pCapabilities-parameter");
+    if (pCapabilities != NULL)
+    {
+        // No xml-driven validation
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateCreateDisplayPlaneSurfaceKHR(
+    VkInstance                                  instance,
+    const VkDisplaySurfaceCreateInfoKHR*        pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface) const {
+    bool skip = false;
+    if (!instance_extensions.vk_khr_surface) skip |= OutputExtensionError("vkCreateDisplayPlaneSurfaceKHR", VK_KHR_SURFACE_EXTENSION_NAME);
+    if (!instance_extensions.vk_khr_display) skip |= OutputExtensionError("vkCreateDisplayPlaneSurfaceKHR", VK_KHR_DISPLAY_EXTENSION_NAME);
+    skip |= validate_struct_type("vkCreateDisplayPlaneSurfaceKHR", "pCreateInfo", "VK_STRUCTURE_TYPE_DISPLAY_SURFACE_CREATE_INFO_KHR", pCreateInfo, VK_STRUCTURE_TYPE_DISPLAY_SURFACE_CREATE_INFO_KHR, true, "VUID-vkCreateDisplayPlaneSurfaceKHR-pCreateInfo-parameter", "VUID-VkDisplaySurfaceCreateInfoKHR-sType-sType");
+    if (pCreateInfo != NULL)
+    {
+        skip |= validate_struct_pnext("vkCreateDisplayPlaneSurfaceKHR", "pCreateInfo->pNext", NULL, pCreateInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkDisplaySurfaceCreateInfoKHR-pNext-pNext");
+
+        skip |= validate_reserved_flags("vkCreateDisplayPlaneSurfaceKHR", "pCreateInfo->flags", pCreateInfo->flags, "VUID-VkDisplaySurfaceCreateInfoKHR-flags-zerobitmask");
+
+        skip |= validate_required_handle("vkCreateDisplayPlaneSurfaceKHR", "pCreateInfo->displayMode", pCreateInfo->displayMode);
+
+        skip |= validate_flags("vkCreateDisplayPlaneSurfaceKHR", "pCreateInfo->transform", "VkSurfaceTransformFlagBitsKHR", AllVkSurfaceTransformFlagBitsKHR, pCreateInfo->transform, kRequiredSingleBit, "VUID-VkDisplaySurfaceCreateInfoKHR-transform-parameter", "VUID-VkDisplaySurfaceCreateInfoKHR-transform-parameter");
+
+        skip |= validate_flags("vkCreateDisplayPlaneSurfaceKHR", "pCreateInfo->alphaMode", "VkDisplayPlaneAlphaFlagBitsKHR", AllVkDisplayPlaneAlphaFlagBitsKHR, pCreateInfo->alphaMode, kRequiredSingleBit, "VUID-VkDisplaySurfaceCreateInfoKHR-alphaMode-parameter", "VUID-VkDisplaySurfaceCreateInfoKHR-alphaMode-parameter");
+
+        // No xml-driven validation
+    }
+    if (pAllocator != NULL)
+    {
+        skip |= validate_required_pointer("vkCreateDisplayPlaneSurfaceKHR", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
+
+        skip |= validate_required_pointer("vkCreateDisplayPlaneSurfaceKHR", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
+
+        skip |= validate_required_pointer("vkCreateDisplayPlaneSurfaceKHR", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
+
+        if (pAllocator->pfnInternalAllocation != NULL)
+        {
+            skip |= validate_required_pointer("vkCreateDisplayPlaneSurfaceKHR", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+
+        if (pAllocator->pfnInternalFree != NULL)
+        {
+            skip |= validate_required_pointer("vkCreateDisplayPlaneSurfaceKHR", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+    }
+    skip |= validate_required_pointer("vkCreateDisplayPlaneSurfaceKHR", "pSurface", pSurface, "VUID-vkCreateDisplayPlaneSurfaceKHR-pSurface-parameter");
+    return skip;
+}
+
+
+
+bool StatelessValidation::PreCallValidateCreateSharedSwapchainsKHR(
+    VkDevice                                    device,
+    uint32_t                                    swapchainCount,
+    const VkSwapchainCreateInfoKHR*             pCreateInfos,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSwapchainKHR*                             pSwapchains) const {
+    bool skip = false;
+    if (!device_extensions.vk_khr_display) skip |= OutputExtensionError("vkCreateSharedSwapchainsKHR", VK_KHR_DISPLAY_EXTENSION_NAME);
+    if (!device_extensions.vk_khr_swapchain) skip |= OutputExtensionError("vkCreateSharedSwapchainsKHR", VK_KHR_SWAPCHAIN_EXTENSION_NAME);
+    if (!device_extensions.vk_khr_display_swapchain) skip |= OutputExtensionError("vkCreateSharedSwapchainsKHR", VK_KHR_DISPLAY_SWAPCHAIN_EXTENSION_NAME);
+    skip |= validate_struct_type_array("vkCreateSharedSwapchainsKHR", "swapchainCount", "pCreateInfos", "VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR", swapchainCount, pCreateInfos, VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR, true, true, "VUID-VkSwapchainCreateInfoKHR-sType-sType", "VUID-vkCreateSharedSwapchainsKHR-pCreateInfos-parameter", "VUID-vkCreateSharedSwapchainsKHR-swapchainCount-arraylength");
+    if (pCreateInfos != NULL)
+    {
+        for (uint32_t swapchainIndex = 0; swapchainIndex < swapchainCount; ++swapchainIndex)
+        {
+            const VkStructureType allowed_structs_VkSwapchainCreateInfoKHR[] = { VK_STRUCTURE_TYPE_DEVICE_GROUP_SWAPCHAIN_CREATE_INFO_KHR, VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO_KHR, VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_INFO_EXT, VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_WIN32_INFO_EXT, VK_STRUCTURE_TYPE_SWAPCHAIN_COUNTER_CREATE_INFO_EXT, VK_STRUCTURE_TYPE_SWAPCHAIN_DISPLAY_NATIVE_HDR_CREATE_INFO_AMD };
+
+            skip |= validate_struct_pnext("vkCreateSharedSwapchainsKHR", ParameterName("pCreateInfos[%i].pNext", ParameterName::IndexVector{ swapchainIndex }), "VkDeviceGroupSwapchainCreateInfoKHR, VkImageFormatListCreateInfoKHR, VkSurfaceFullScreenExclusiveInfoEXT, VkSurfaceFullScreenExclusiveWin32InfoEXT, VkSwapchainCounterCreateInfoEXT, VkSwapchainDisplayNativeHdrCreateInfoAMD", pCreateInfos[swapchainIndex].pNext, ARRAY_SIZE(allowed_structs_VkSwapchainCreateInfoKHR), allowed_structs_VkSwapchainCreateInfoKHR, GeneratedVulkanHeaderVersion, "VUID-VkSwapchainCreateInfoKHR-pNext-pNext");
+
+            skip |= validate_flags("vkCreateSharedSwapchainsKHR", ParameterName("pCreateInfos[%i].flags", ParameterName::IndexVector{ swapchainIndex }), "VkSwapchainCreateFlagBitsKHR", AllVkSwapchainCreateFlagBitsKHR, pCreateInfos[swapchainIndex].flags, kOptionalFlags, "VUID-VkSwapchainCreateInfoKHR-flags-parameter");
+
+            skip |= validate_required_handle("vkCreateSharedSwapchainsKHR", ParameterName("pCreateInfos[%i].surface", ParameterName::IndexVector{ swapchainIndex }), pCreateInfos[swapchainIndex].surface);
+
+            skip |= validate_ranged_enum("vkCreateSharedSwapchainsKHR", ParameterName("pCreateInfos[%i].imageFormat", ParameterName::IndexVector{ swapchainIndex }), "VkFormat", AllVkFormatEnums, pCreateInfos[swapchainIndex].imageFormat, "VUID-VkSwapchainCreateInfoKHR-imageFormat-parameter");
+
+            skip |= validate_ranged_enum("vkCreateSharedSwapchainsKHR", ParameterName("pCreateInfos[%i].imageColorSpace", ParameterName::IndexVector{ swapchainIndex }), "VkColorSpaceKHR", AllVkColorSpaceKHREnums, pCreateInfos[swapchainIndex].imageColorSpace, "VUID-VkSwapchainCreateInfoKHR-imageColorSpace-parameter");
+
+            // No xml-driven validation
+
+            skip |= validate_flags("vkCreateSharedSwapchainsKHR", ParameterName("pCreateInfos[%i].imageUsage", ParameterName::IndexVector{ swapchainIndex }), "VkImageUsageFlagBits", AllVkImageUsageFlagBits, pCreateInfos[swapchainIndex].imageUsage, kRequiredFlags, "VUID-VkSwapchainCreateInfoKHR-imageUsage-parameter", "VUID-VkSwapchainCreateInfoKHR-imageUsage-requiredbitmask");
+
+            skip |= validate_ranged_enum("vkCreateSharedSwapchainsKHR", ParameterName("pCreateInfos[%i].imageSharingMode", ParameterName::IndexVector{ swapchainIndex }), "VkSharingMode", AllVkSharingModeEnums, pCreateInfos[swapchainIndex].imageSharingMode, "VUID-VkSwapchainCreateInfoKHR-imageSharingMode-parameter");
+
+            skip |= validate_flags("vkCreateSharedSwapchainsKHR", ParameterName("pCreateInfos[%i].preTransform", ParameterName::IndexVector{ swapchainIndex }), "VkSurfaceTransformFlagBitsKHR", AllVkSurfaceTransformFlagBitsKHR, pCreateInfos[swapchainIndex].preTransform, kRequiredSingleBit, "VUID-VkSwapchainCreateInfoKHR-preTransform-parameter", "VUID-VkSwapchainCreateInfoKHR-preTransform-parameter");
+
+            skip |= validate_flags("vkCreateSharedSwapchainsKHR", ParameterName("pCreateInfos[%i].compositeAlpha", ParameterName::IndexVector{ swapchainIndex }), "VkCompositeAlphaFlagBitsKHR", AllVkCompositeAlphaFlagBitsKHR, pCreateInfos[swapchainIndex].compositeAlpha, kRequiredSingleBit, "VUID-VkSwapchainCreateInfoKHR-compositeAlpha-parameter", "VUID-VkSwapchainCreateInfoKHR-compositeAlpha-parameter");
+
+            skip |= validate_ranged_enum("vkCreateSharedSwapchainsKHR", ParameterName("pCreateInfos[%i].presentMode", ParameterName::IndexVector{ swapchainIndex }), "VkPresentModeKHR", AllVkPresentModeKHREnums, pCreateInfos[swapchainIndex].presentMode, "VUID-VkSwapchainCreateInfoKHR-presentMode-parameter");
+
+            skip |= validate_bool32("vkCreateSharedSwapchainsKHR", ParameterName("pCreateInfos[%i].clipped", ParameterName::IndexVector{ swapchainIndex }), pCreateInfos[swapchainIndex].clipped);
+        }
+    }
+    if (pAllocator != NULL)
+    {
+        skip |= validate_required_pointer("vkCreateSharedSwapchainsKHR", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
+
+        skip |= validate_required_pointer("vkCreateSharedSwapchainsKHR", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
+
+        skip |= validate_required_pointer("vkCreateSharedSwapchainsKHR", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
+
+        if (pAllocator->pfnInternalAllocation != NULL)
+        {
+            skip |= validate_required_pointer("vkCreateSharedSwapchainsKHR", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+
+        if (pAllocator->pfnInternalFree != NULL)
+        {
+            skip |= validate_required_pointer("vkCreateSharedSwapchainsKHR", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+    }
+    skip |= validate_array("vkCreateSharedSwapchainsKHR", "swapchainCount", "pSwapchains", swapchainCount, &pSwapchains, true, true, "VUID-vkCreateSharedSwapchainsKHR-swapchainCount-arraylength", "VUID-vkCreateSharedSwapchainsKHR-pSwapchains-parameter");
+    return skip;
+}
+
+
+
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+
+bool StatelessValidation::PreCallValidateCreateXlibSurfaceKHR(
+    VkInstance                                  instance,
+    const VkXlibSurfaceCreateInfoKHR*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface) const {
+    bool skip = false;
+    if (!instance_extensions.vk_khr_surface) skip |= OutputExtensionError("vkCreateXlibSurfaceKHR", VK_KHR_SURFACE_EXTENSION_NAME);
+    if (!instance_extensions.vk_khr_xlib_surface) skip |= OutputExtensionError("vkCreateXlibSurfaceKHR", VK_KHR_XLIB_SURFACE_EXTENSION_NAME);
+    skip |= validate_struct_type("vkCreateXlibSurfaceKHR", "pCreateInfo", "VK_STRUCTURE_TYPE_XLIB_SURFACE_CREATE_INFO_KHR", pCreateInfo, VK_STRUCTURE_TYPE_XLIB_SURFACE_CREATE_INFO_KHR, true, "VUID-vkCreateXlibSurfaceKHR-pCreateInfo-parameter", "VUID-VkXlibSurfaceCreateInfoKHR-sType-sType");
+    if (pCreateInfo != NULL)
+    {
+        skip |= validate_struct_pnext("vkCreateXlibSurfaceKHR", "pCreateInfo->pNext", NULL, pCreateInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkXlibSurfaceCreateInfoKHR-pNext-pNext");
+
+        skip |= validate_reserved_flags("vkCreateXlibSurfaceKHR", "pCreateInfo->flags", pCreateInfo->flags, "VUID-VkXlibSurfaceCreateInfoKHR-flags-zerobitmask");
+    }
+    if (pAllocator != NULL)
+    {
+        skip |= validate_required_pointer("vkCreateXlibSurfaceKHR", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
+
+        skip |= validate_required_pointer("vkCreateXlibSurfaceKHR", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
+
+        skip |= validate_required_pointer("vkCreateXlibSurfaceKHR", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
+
+        if (pAllocator->pfnInternalAllocation != NULL)
+        {
+            skip |= validate_required_pointer("vkCreateXlibSurfaceKHR", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+
+        if (pAllocator->pfnInternalFree != NULL)
+        {
+            skip |= validate_required_pointer("vkCreateXlibSurfaceKHR", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+    }
+    skip |= validate_required_pointer("vkCreateXlibSurfaceKHR", "pSurface", pSurface, "VUID-vkCreateXlibSurfaceKHR-pSurface-parameter");
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateGetPhysicalDeviceXlibPresentationSupportKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t                                    queueFamilyIndex,
+    Display*                                    dpy,
+    VisualID                                    visualID) const {
+    bool skip = false;
+    if (!instance_extensions.vk_khr_surface) skip |= OutputExtensionError("vkGetPhysicalDeviceXlibPresentationSupportKHR", VK_KHR_SURFACE_EXTENSION_NAME);
+    if (!instance_extensions.vk_khr_xlib_surface) skip |= OutputExtensionError("vkGetPhysicalDeviceXlibPresentationSupportKHR", VK_KHR_XLIB_SURFACE_EXTENSION_NAME);
+    skip |= validate_required_pointer("vkGetPhysicalDeviceXlibPresentationSupportKHR", "dpy", dpy, "VUID-vkGetPhysicalDeviceXlibPresentationSupportKHR-dpy-parameter");
+    return skip;
+}
+
+#endif // VK_USE_PLATFORM_XLIB_KHR
+
+#ifdef VK_USE_PLATFORM_XCB_KHR
+
+bool StatelessValidation::PreCallValidateCreateXcbSurfaceKHR(
+    VkInstance                                  instance,
+    const VkXcbSurfaceCreateInfoKHR*            pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface) const {
+    bool skip = false;
+    if (!instance_extensions.vk_khr_surface) skip |= OutputExtensionError("vkCreateXcbSurfaceKHR", VK_KHR_SURFACE_EXTENSION_NAME);
+    if (!instance_extensions.vk_khr_xcb_surface) skip |= OutputExtensionError("vkCreateXcbSurfaceKHR", VK_KHR_XCB_SURFACE_EXTENSION_NAME);
+    skip |= validate_struct_type("vkCreateXcbSurfaceKHR", "pCreateInfo", "VK_STRUCTURE_TYPE_XCB_SURFACE_CREATE_INFO_KHR", pCreateInfo, VK_STRUCTURE_TYPE_XCB_SURFACE_CREATE_INFO_KHR, true, "VUID-vkCreateXcbSurfaceKHR-pCreateInfo-parameter", "VUID-VkXcbSurfaceCreateInfoKHR-sType-sType");
+    if (pCreateInfo != NULL)
+    {
+        skip |= validate_struct_pnext("vkCreateXcbSurfaceKHR", "pCreateInfo->pNext", NULL, pCreateInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkXcbSurfaceCreateInfoKHR-pNext-pNext");
+
+        skip |= validate_reserved_flags("vkCreateXcbSurfaceKHR", "pCreateInfo->flags", pCreateInfo->flags, "VUID-VkXcbSurfaceCreateInfoKHR-flags-zerobitmask");
+    }
+    if (pAllocator != NULL)
+    {
+        skip |= validate_required_pointer("vkCreateXcbSurfaceKHR", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
+
+        skip |= validate_required_pointer("vkCreateXcbSurfaceKHR", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
+
+        skip |= validate_required_pointer("vkCreateXcbSurfaceKHR", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
+
+        if (pAllocator->pfnInternalAllocation != NULL)
+        {
+            skip |= validate_required_pointer("vkCreateXcbSurfaceKHR", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+
+        if (pAllocator->pfnInternalFree != NULL)
+        {
+            skip |= validate_required_pointer("vkCreateXcbSurfaceKHR", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+    }
+    skip |= validate_required_pointer("vkCreateXcbSurfaceKHR", "pSurface", pSurface, "VUID-vkCreateXcbSurfaceKHR-pSurface-parameter");
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateGetPhysicalDeviceXcbPresentationSupportKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t                                    queueFamilyIndex,
+    xcb_connection_t*                           connection,
+    xcb_visualid_t                              visual_id) const {
+    bool skip = false;
+    if (!instance_extensions.vk_khr_surface) skip |= OutputExtensionError("vkGetPhysicalDeviceXcbPresentationSupportKHR", VK_KHR_SURFACE_EXTENSION_NAME);
+    if (!instance_extensions.vk_khr_xcb_surface) skip |= OutputExtensionError("vkGetPhysicalDeviceXcbPresentationSupportKHR", VK_KHR_XCB_SURFACE_EXTENSION_NAME);
+    skip |= validate_required_pointer("vkGetPhysicalDeviceXcbPresentationSupportKHR", "connection", connection, "VUID-vkGetPhysicalDeviceXcbPresentationSupportKHR-connection-parameter");
+    return skip;
+}
+
+#endif // VK_USE_PLATFORM_XCB_KHR
+
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+
+bool StatelessValidation::PreCallValidateCreateWaylandSurfaceKHR(
+    VkInstance                                  instance,
+    const VkWaylandSurfaceCreateInfoKHR*        pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface) const {
+    bool skip = false;
+    if (!instance_extensions.vk_khr_surface) skip |= OutputExtensionError("vkCreateWaylandSurfaceKHR", VK_KHR_SURFACE_EXTENSION_NAME);
+    if (!instance_extensions.vk_khr_wayland_surface) skip |= OutputExtensionError("vkCreateWaylandSurfaceKHR", VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME);
+    skip |= validate_struct_type("vkCreateWaylandSurfaceKHR", "pCreateInfo", "VK_STRUCTURE_TYPE_WAYLAND_SURFACE_CREATE_INFO_KHR", pCreateInfo, VK_STRUCTURE_TYPE_WAYLAND_SURFACE_CREATE_INFO_KHR, true, "VUID-vkCreateWaylandSurfaceKHR-pCreateInfo-parameter", "VUID-VkWaylandSurfaceCreateInfoKHR-sType-sType");
+    if (pCreateInfo != NULL)
+    {
+        skip |= validate_struct_pnext("vkCreateWaylandSurfaceKHR", "pCreateInfo->pNext", NULL, pCreateInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkWaylandSurfaceCreateInfoKHR-pNext-pNext");
+
+        skip |= validate_reserved_flags("vkCreateWaylandSurfaceKHR", "pCreateInfo->flags", pCreateInfo->flags, "VUID-VkWaylandSurfaceCreateInfoKHR-flags-zerobitmask");
+    }
+    if (pAllocator != NULL)
+    {
+        skip |= validate_required_pointer("vkCreateWaylandSurfaceKHR", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
+
+        skip |= validate_required_pointer("vkCreateWaylandSurfaceKHR", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
+
+        skip |= validate_required_pointer("vkCreateWaylandSurfaceKHR", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
+
+        if (pAllocator->pfnInternalAllocation != NULL)
+        {
+            skip |= validate_required_pointer("vkCreateWaylandSurfaceKHR", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+
+        if (pAllocator->pfnInternalFree != NULL)
+        {
+            skip |= validate_required_pointer("vkCreateWaylandSurfaceKHR", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+    }
+    skip |= validate_required_pointer("vkCreateWaylandSurfaceKHR", "pSurface", pSurface, "VUID-vkCreateWaylandSurfaceKHR-pSurface-parameter");
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateGetPhysicalDeviceWaylandPresentationSupportKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t                                    queueFamilyIndex,
+    struct wl_display*                          display) const {
+    bool skip = false;
+    if (!instance_extensions.vk_khr_surface) skip |= OutputExtensionError("vkGetPhysicalDeviceWaylandPresentationSupportKHR", VK_KHR_SURFACE_EXTENSION_NAME);
+    if (!instance_extensions.vk_khr_wayland_surface) skip |= OutputExtensionError("vkGetPhysicalDeviceWaylandPresentationSupportKHR", VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME);
+    skip |= validate_required_pointer("vkGetPhysicalDeviceWaylandPresentationSupportKHR", "display", display, "VUID-vkGetPhysicalDeviceWaylandPresentationSupportKHR-display-parameter");
+    return skip;
+}
+
+#endif // VK_USE_PLATFORM_WAYLAND_KHR
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+
+bool StatelessValidation::PreCallValidateCreateAndroidSurfaceKHR(
+    VkInstance                                  instance,
+    const VkAndroidSurfaceCreateInfoKHR*        pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface) const {
+    bool skip = false;
+    if (!instance_extensions.vk_khr_surface) skip |= OutputExtensionError("vkCreateAndroidSurfaceKHR", VK_KHR_SURFACE_EXTENSION_NAME);
+    if (!instance_extensions.vk_khr_android_surface) skip |= OutputExtensionError("vkCreateAndroidSurfaceKHR", VK_KHR_ANDROID_SURFACE_EXTENSION_NAME);
+    skip |= validate_struct_type("vkCreateAndroidSurfaceKHR", "pCreateInfo", "VK_STRUCTURE_TYPE_ANDROID_SURFACE_CREATE_INFO_KHR", pCreateInfo, VK_STRUCTURE_TYPE_ANDROID_SURFACE_CREATE_INFO_KHR, true, "VUID-vkCreateAndroidSurfaceKHR-pCreateInfo-parameter", "VUID-VkAndroidSurfaceCreateInfoKHR-sType-sType");
+    if (pCreateInfo != NULL)
+    {
+        skip |= validate_struct_pnext("vkCreateAndroidSurfaceKHR", "pCreateInfo->pNext", NULL, pCreateInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkAndroidSurfaceCreateInfoKHR-pNext-pNext");
+
+        skip |= validate_reserved_flags("vkCreateAndroidSurfaceKHR", "pCreateInfo->flags", pCreateInfo->flags, "VUID-VkAndroidSurfaceCreateInfoKHR-flags-zerobitmask");
+    }
+    if (pAllocator != NULL)
+    {
+        skip |= validate_required_pointer("vkCreateAndroidSurfaceKHR", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
+
+        skip |= validate_required_pointer("vkCreateAndroidSurfaceKHR", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
+
+        skip |= validate_required_pointer("vkCreateAndroidSurfaceKHR", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
+
+        if (pAllocator->pfnInternalAllocation != NULL)
+        {
+            skip |= validate_required_pointer("vkCreateAndroidSurfaceKHR", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+
+        if (pAllocator->pfnInternalFree != NULL)
+        {
+            skip |= validate_required_pointer("vkCreateAndroidSurfaceKHR", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+    }
+    skip |= validate_required_pointer("vkCreateAndroidSurfaceKHR", "pSurface", pSurface, "VUID-vkCreateAndroidSurfaceKHR-pSurface-parameter");
+    return skip;
+}
+
+#endif // VK_USE_PLATFORM_ANDROID_KHR
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+bool StatelessValidation::PreCallValidateCreateWin32SurfaceKHR(
+    VkInstance                                  instance,
+    const VkWin32SurfaceCreateInfoKHR*          pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface) const {
+    bool skip = false;
+    if (!instance_extensions.vk_khr_surface) skip |= OutputExtensionError("vkCreateWin32SurfaceKHR", VK_KHR_SURFACE_EXTENSION_NAME);
+    if (!instance_extensions.vk_khr_win32_surface) skip |= OutputExtensionError("vkCreateWin32SurfaceKHR", VK_KHR_WIN32_SURFACE_EXTENSION_NAME);
+    skip |= validate_struct_type("vkCreateWin32SurfaceKHR", "pCreateInfo", "VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR", pCreateInfo, VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR, true, "VUID-vkCreateWin32SurfaceKHR-pCreateInfo-parameter", "VUID-VkWin32SurfaceCreateInfoKHR-sType-sType");
+    if (pCreateInfo != NULL)
+    {
+        skip |= validate_struct_pnext("vkCreateWin32SurfaceKHR", "pCreateInfo->pNext", NULL, pCreateInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkWin32SurfaceCreateInfoKHR-pNext-pNext");
+
+        skip |= validate_reserved_flags("vkCreateWin32SurfaceKHR", "pCreateInfo->flags", pCreateInfo->flags, "VUID-VkWin32SurfaceCreateInfoKHR-flags-zerobitmask");
+    }
+    if (pAllocator != NULL)
+    {
+        skip |= validate_required_pointer("vkCreateWin32SurfaceKHR", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
+
+        skip |= validate_required_pointer("vkCreateWin32SurfaceKHR", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
+
+        skip |= validate_required_pointer("vkCreateWin32SurfaceKHR", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
+
+        if (pAllocator->pfnInternalAllocation != NULL)
+        {
+            skip |= validate_required_pointer("vkCreateWin32SurfaceKHR", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+
+        if (pAllocator->pfnInternalFree != NULL)
+        {
+            skip |= validate_required_pointer("vkCreateWin32SurfaceKHR", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+    }
+    skip |= validate_required_pointer("vkCreateWin32SurfaceKHR", "pSurface", pSurface, "VUID-vkCreateWin32SurfaceKHR-pSurface-parameter");
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateGetPhysicalDeviceWin32PresentationSupportKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t                                    queueFamilyIndex) const {
+    bool skip = false;
+    if (!instance_extensions.vk_khr_surface) skip |= OutputExtensionError("vkGetPhysicalDeviceWin32PresentationSupportKHR", VK_KHR_SURFACE_EXTENSION_NAME);
+    if (!instance_extensions.vk_khr_win32_surface) skip |= OutputExtensionError("vkGetPhysicalDeviceWin32PresentationSupportKHR", VK_KHR_WIN32_SURFACE_EXTENSION_NAME);
+    // No xml-driven validation
+    return skip;
+}
+
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+
+
+
+
+bool StatelessValidation::PreCallValidateGetPhysicalDeviceFeatures2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceFeatures2*                  pFeatures) const {
+    bool skip = false;
+    if (!instance_extensions.vk_khr_get_physical_device_properties_2) skip |= OutputExtensionError("vkGetPhysicalDeviceFeatures2KHR", VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    skip |= validate_struct_type("vkGetPhysicalDeviceFeatures2KHR", "pFeatures", "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2", pFeatures, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2, true, "VUID-vkGetPhysicalDeviceFeatures2-pFeatures-parameter", "VUID-VkPhysicalDeviceFeatures2-sType-sType");
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateGetPhysicalDeviceProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceProperties2*                pProperties) const {
+    bool skip = false;
+    if (!instance_extensions.vk_khr_get_physical_device_properties_2) skip |= OutputExtensionError("vkGetPhysicalDeviceProperties2KHR", VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    skip |= validate_struct_type("vkGetPhysicalDeviceProperties2KHR", "pProperties", "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2", pProperties, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2, true, "VUID-vkGetPhysicalDeviceProperties2-pProperties-parameter", "VUID-VkPhysicalDeviceProperties2-sType-sType");
+    if (pProperties != NULL)
+    {
+        // No xml-driven validation
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateGetPhysicalDeviceFormatProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkFormat                                    format,
+    VkFormatProperties2*                        pFormatProperties) const {
+    bool skip = false;
+    if (!instance_extensions.vk_khr_get_physical_device_properties_2) skip |= OutputExtensionError("vkGetPhysicalDeviceFormatProperties2KHR", VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    skip |= validate_ranged_enum("vkGetPhysicalDeviceFormatProperties2KHR", "format", "VkFormat", AllVkFormatEnums, format, "VUID-vkGetPhysicalDeviceFormatProperties2-format-parameter");
+    skip |= validate_struct_type("vkGetPhysicalDeviceFormatProperties2KHR", "pFormatProperties", "VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2", pFormatProperties, VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2, true, "VUID-vkGetPhysicalDeviceFormatProperties2-pFormatProperties-parameter", "VUID-VkFormatProperties2-sType-sType");
+    if (pFormatProperties != NULL)
+    {
+        // No xml-driven validation
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateGetPhysicalDeviceImageFormatProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceImageFormatInfo2*     pImageFormatInfo,
+    VkImageFormatProperties2*                   pImageFormatProperties) const {
+    bool skip = false;
+    if (!instance_extensions.vk_khr_get_physical_device_properties_2) skip |= OutputExtensionError("vkGetPhysicalDeviceImageFormatProperties2KHR", VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    skip |= validate_struct_type("vkGetPhysicalDeviceImageFormatProperties2KHR", "pImageFormatInfo", "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2", pImageFormatInfo, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2, true, "VUID-vkGetPhysicalDeviceImageFormatProperties2-pImageFormatInfo-parameter", "VUID-VkPhysicalDeviceImageFormatInfo2-sType-sType");
+    if (pImageFormatInfo != NULL)
+    {
+        const VkStructureType allowed_structs_VkPhysicalDeviceImageFormatInfo2[] = { VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO_KHR, VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO_EXT, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_DRM_FORMAT_MODIFIER_INFO_EXT, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_VIEW_IMAGE_FORMAT_INFO_EXT };
+
+        skip |= validate_struct_pnext("vkGetPhysicalDeviceImageFormatProperties2KHR", "pImageFormatInfo->pNext", "VkImageFormatListCreateInfoKHR, VkImageStencilUsageCreateInfoEXT, VkPhysicalDeviceExternalImageFormatInfo, VkPhysicalDeviceImageDrmFormatModifierInfoEXT, VkPhysicalDeviceImageViewImageFormatInfoEXT", pImageFormatInfo->pNext, ARRAY_SIZE(allowed_structs_VkPhysicalDeviceImageFormatInfo2), allowed_structs_VkPhysicalDeviceImageFormatInfo2, GeneratedVulkanHeaderVersion, "VUID-VkPhysicalDeviceImageFormatInfo2-pNext-pNext");
+
+        skip |= validate_ranged_enum("vkGetPhysicalDeviceImageFormatProperties2KHR", "pImageFormatInfo->format", "VkFormat", AllVkFormatEnums, pImageFormatInfo->format, "VUID-VkPhysicalDeviceImageFormatInfo2-format-parameter");
+
+        skip |= validate_ranged_enum("vkGetPhysicalDeviceImageFormatProperties2KHR", "pImageFormatInfo->type", "VkImageType", AllVkImageTypeEnums, pImageFormatInfo->type, "VUID-VkPhysicalDeviceImageFormatInfo2-type-parameter");
+
+        skip |= validate_ranged_enum("vkGetPhysicalDeviceImageFormatProperties2KHR", "pImageFormatInfo->tiling", "VkImageTiling", AllVkImageTilingEnums, pImageFormatInfo->tiling, "VUID-VkPhysicalDeviceImageFormatInfo2-tiling-parameter");
+
+        skip |= validate_flags("vkGetPhysicalDeviceImageFormatProperties2KHR", "pImageFormatInfo->usage", "VkImageUsageFlagBits", AllVkImageUsageFlagBits, pImageFormatInfo->usage, kRequiredFlags, "VUID-VkPhysicalDeviceImageFormatInfo2-usage-parameter", "VUID-VkPhysicalDeviceImageFormatInfo2-usage-requiredbitmask");
+
+        skip |= validate_flags("vkGetPhysicalDeviceImageFormatProperties2KHR", "pImageFormatInfo->flags", "VkImageCreateFlagBits", AllVkImageCreateFlagBits, pImageFormatInfo->flags, kOptionalFlags, "VUID-VkPhysicalDeviceImageFormatInfo2-flags-parameter");
+    }
+    skip |= validate_struct_type("vkGetPhysicalDeviceImageFormatProperties2KHR", "pImageFormatProperties", "VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2", pImageFormatProperties, VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2, true, "VUID-vkGetPhysicalDeviceImageFormatProperties2-pImageFormatProperties-parameter", "VUID-VkImageFormatProperties2-sType-sType");
+    if (pImageFormatProperties != NULL)
+    {
+        // No xml-driven validation
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateGetPhysicalDeviceQueueFamilyProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pQueueFamilyPropertyCount,
+    VkQueueFamilyProperties2*                   pQueueFamilyProperties) const {
+    bool skip = false;
+    if (!instance_extensions.vk_khr_get_physical_device_properties_2) skip |= OutputExtensionError("vkGetPhysicalDeviceQueueFamilyProperties2KHR", VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    skip |= validate_struct_type_array("vkGetPhysicalDeviceQueueFamilyProperties2KHR", "pQueueFamilyPropertyCount", "pQueueFamilyProperties", "VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2", pQueueFamilyPropertyCount, pQueueFamilyProperties, VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2, true, false, false, "VUID-VkQueueFamilyProperties2-sType-sType", "VUID-vkGetPhysicalDeviceQueueFamilyProperties2-pQueueFamilyProperties-parameter", kVUIDUndefined);
+    if (pQueueFamilyProperties != NULL)
+    {
+        for (uint32_t pQueueFamilyPropertyIndex = 0; pQueueFamilyPropertyIndex < *pQueueFamilyPropertyCount; ++pQueueFamilyPropertyIndex)
+        {
+            // No xml-driven validation
+        }
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateGetPhysicalDeviceMemoryProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceMemoryProperties2*          pMemoryProperties) const {
+    bool skip = false;
+    if (!instance_extensions.vk_khr_get_physical_device_properties_2) skip |= OutputExtensionError("vkGetPhysicalDeviceMemoryProperties2KHR", VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    skip |= validate_struct_type("vkGetPhysicalDeviceMemoryProperties2KHR", "pMemoryProperties", "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2", pMemoryProperties, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2, true, "VUID-vkGetPhysicalDeviceMemoryProperties2-pMemoryProperties-parameter", "VUID-VkPhysicalDeviceMemoryProperties2-sType-sType");
+    if (pMemoryProperties != NULL)
+    {
+        // No xml-driven validation
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateGetPhysicalDeviceSparseImageFormatProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo,
+    uint32_t*                                   pPropertyCount,
+    VkSparseImageFormatProperties2*             pProperties) const {
+    bool skip = false;
+    if (!instance_extensions.vk_khr_get_physical_device_properties_2) skip |= OutputExtensionError("vkGetPhysicalDeviceSparseImageFormatProperties2KHR", VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    skip |= validate_struct_type("vkGetPhysicalDeviceSparseImageFormatProperties2KHR", "pFormatInfo", "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2", pFormatInfo, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2, true, "VUID-vkGetPhysicalDeviceSparseImageFormatProperties2-pFormatInfo-parameter", "VUID-VkPhysicalDeviceSparseImageFormatInfo2-sType-sType");
+    if (pFormatInfo != NULL)
+    {
+        skip |= validate_struct_pnext("vkGetPhysicalDeviceSparseImageFormatProperties2KHR", "pFormatInfo->pNext", NULL, pFormatInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkPhysicalDeviceSparseImageFormatInfo2-pNext-pNext");
+
+        skip |= validate_ranged_enum("vkGetPhysicalDeviceSparseImageFormatProperties2KHR", "pFormatInfo->format", "VkFormat", AllVkFormatEnums, pFormatInfo->format, "VUID-VkPhysicalDeviceSparseImageFormatInfo2-format-parameter");
+
+        skip |= validate_ranged_enum("vkGetPhysicalDeviceSparseImageFormatProperties2KHR", "pFormatInfo->type", "VkImageType", AllVkImageTypeEnums, pFormatInfo->type, "VUID-VkPhysicalDeviceSparseImageFormatInfo2-type-parameter");
+
+        skip |= validate_flags("vkGetPhysicalDeviceSparseImageFormatProperties2KHR", "pFormatInfo->samples", "VkSampleCountFlagBits", AllVkSampleCountFlagBits, pFormatInfo->samples, kRequiredSingleBit, "VUID-VkPhysicalDeviceSparseImageFormatInfo2-samples-parameter", "VUID-VkPhysicalDeviceSparseImageFormatInfo2-samples-parameter");
+
+        skip |= validate_flags("vkGetPhysicalDeviceSparseImageFormatProperties2KHR", "pFormatInfo->usage", "VkImageUsageFlagBits", AllVkImageUsageFlagBits, pFormatInfo->usage, kRequiredFlags, "VUID-VkPhysicalDeviceSparseImageFormatInfo2-usage-parameter", "VUID-VkPhysicalDeviceSparseImageFormatInfo2-usage-requiredbitmask");
+
+        skip |= validate_ranged_enum("vkGetPhysicalDeviceSparseImageFormatProperties2KHR", "pFormatInfo->tiling", "VkImageTiling", AllVkImageTilingEnums, pFormatInfo->tiling, "VUID-VkPhysicalDeviceSparseImageFormatInfo2-tiling-parameter");
+    }
+    skip |= validate_struct_type_array("vkGetPhysicalDeviceSparseImageFormatProperties2KHR", "pPropertyCount", "pProperties", "VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2", pPropertyCount, pProperties, VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2, true, false, false, "VUID-VkSparseImageFormatProperties2-sType-sType", "VUID-vkGetPhysicalDeviceSparseImageFormatProperties2-pProperties-parameter", kVUIDUndefined);
+    if (pProperties != NULL)
+    {
+        for (uint32_t pPropertyIndex = 0; pPropertyIndex < *pPropertyCount; ++pPropertyIndex)
+        {
+            // No xml-driven validation
+        }
+    }
+    return skip;
+}
+
+
+
+bool StatelessValidation::PreCallValidateGetDeviceGroupPeerMemoryFeaturesKHR(
+    VkDevice                                    device,
+    uint32_t                                    heapIndex,
+    uint32_t                                    localDeviceIndex,
+    uint32_t                                    remoteDeviceIndex,
+    VkPeerMemoryFeatureFlags*                   pPeerMemoryFeatures) const {
+    bool skip = false;
+    if (!device_extensions.vk_khr_device_group_creation) skip |= OutputExtensionError("vkGetDeviceGroupPeerMemoryFeaturesKHR", VK_KHR_DEVICE_GROUP_CREATION_EXTENSION_NAME);
+    if (!device_extensions.vk_khr_device_group) skip |= OutputExtensionError("vkGetDeviceGroupPeerMemoryFeaturesKHR", VK_KHR_DEVICE_GROUP_EXTENSION_NAME);
+    skip |= validate_required_pointer("vkGetDeviceGroupPeerMemoryFeaturesKHR", "pPeerMemoryFeatures", pPeerMemoryFeatures, "VUID-vkGetDeviceGroupPeerMemoryFeatures-pPeerMemoryFeatures-parameter");
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateCmdSetDeviceMaskKHR(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    deviceMask) const {
+    bool skip = false;
+    if (!device_extensions.vk_khr_device_group_creation) skip |= OutputExtensionError("vkCmdSetDeviceMaskKHR", VK_KHR_DEVICE_GROUP_CREATION_EXTENSION_NAME);
+    if (!device_extensions.vk_khr_device_group) skip |= OutputExtensionError("vkCmdSetDeviceMaskKHR", VK_KHR_DEVICE_GROUP_EXTENSION_NAME);
+    // No xml-driven validation
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateCmdDispatchBaseKHR(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    baseGroupX,
+    uint32_t                                    baseGroupY,
+    uint32_t                                    baseGroupZ,
+    uint32_t                                    groupCountX,
+    uint32_t                                    groupCountY,
+    uint32_t                                    groupCountZ) const {
+    bool skip = false;
+    if (!device_extensions.vk_khr_device_group_creation) skip |= OutputExtensionError("vkCmdDispatchBaseKHR", VK_KHR_DEVICE_GROUP_CREATION_EXTENSION_NAME);
+    if (!device_extensions.vk_khr_device_group) skip |= OutputExtensionError("vkCmdDispatchBaseKHR", VK_KHR_DEVICE_GROUP_EXTENSION_NAME);
+    // No xml-driven validation
+    if (!skip) skip |= manual_PreCallValidateCmdDispatchBaseKHR(commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ);
+    return skip;
+}
+
+
+
+
+
+bool StatelessValidation::PreCallValidateTrimCommandPoolKHR(
+    VkDevice                                    device,
+    VkCommandPool                               commandPool,
+    VkCommandPoolTrimFlags                      flags) const {
+    bool skip = false;
+    if (!device_extensions.vk_khr_maintenance1) skip |= OutputExtensionError("vkTrimCommandPoolKHR", VK_KHR_MAINTENANCE1_EXTENSION_NAME);
+    skip |= validate_required_handle("vkTrimCommandPoolKHR", "commandPool", commandPool);
+    skip |= validate_reserved_flags("vkTrimCommandPoolKHR", "flags", flags, "VUID-vkTrimCommandPool-flags-zerobitmask");
+    return skip;
+}
+
+
+
+bool StatelessValidation::PreCallValidateEnumeratePhysicalDeviceGroupsKHR(
+    VkInstance                                  instance,
+    uint32_t*                                   pPhysicalDeviceGroupCount,
+    VkPhysicalDeviceGroupProperties*            pPhysicalDeviceGroupProperties) const {
+    bool skip = false;
+    if (!instance_extensions.vk_khr_device_group_creation) skip |= OutputExtensionError("vkEnumeratePhysicalDeviceGroupsKHR", VK_KHR_DEVICE_GROUP_CREATION_EXTENSION_NAME);
+    skip |= validate_struct_type_array("vkEnumeratePhysicalDeviceGroupsKHR", "pPhysicalDeviceGroupCount", "pPhysicalDeviceGroupProperties", "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES", pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES, true, false, false, "VUID-VkPhysicalDeviceGroupProperties-sType-sType", "VUID-vkEnumeratePhysicalDeviceGroups-pPhysicalDeviceGroupProperties-parameter", kVUIDUndefined);
+    if (pPhysicalDeviceGroupProperties != NULL)
+    {
+        for (uint32_t pPhysicalDeviceGroupIndex = 0; pPhysicalDeviceGroupIndex < *pPhysicalDeviceGroupCount; ++pPhysicalDeviceGroupIndex)
+        {
+            // No xml-driven validation
+        }
+    }
+    return skip;
+}
+
+
+
+bool StatelessValidation::PreCallValidateGetPhysicalDeviceExternalBufferPropertiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceExternalBufferInfo*   pExternalBufferInfo,
+    VkExternalBufferProperties*                 pExternalBufferProperties) const {
+    bool skip = false;
+    if (!instance_extensions.vk_khr_get_physical_device_properties_2) skip |= OutputExtensionError("vkGetPhysicalDeviceExternalBufferPropertiesKHR", VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    if (!instance_extensions.vk_khr_external_memory_capabilities) skip |= OutputExtensionError("vkGetPhysicalDeviceExternalBufferPropertiesKHR", VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME);
+    skip |= validate_struct_type("vkGetPhysicalDeviceExternalBufferPropertiesKHR", "pExternalBufferInfo", "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO", pExternalBufferInfo, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO, true, "VUID-vkGetPhysicalDeviceExternalBufferProperties-pExternalBufferInfo-parameter", "VUID-VkPhysicalDeviceExternalBufferInfo-sType-sType");
+    if (pExternalBufferInfo != NULL)
+    {
+        skip |= validate_struct_pnext("vkGetPhysicalDeviceExternalBufferPropertiesKHR", "pExternalBufferInfo->pNext", NULL, pExternalBufferInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkPhysicalDeviceExternalBufferInfo-pNext-pNext");
+
+        skip |= validate_flags("vkGetPhysicalDeviceExternalBufferPropertiesKHR", "pExternalBufferInfo->flags", "VkBufferCreateFlagBits", AllVkBufferCreateFlagBits, pExternalBufferInfo->flags, kOptionalFlags, "VUID-VkPhysicalDeviceExternalBufferInfo-flags-parameter");
+
+        skip |= validate_flags("vkGetPhysicalDeviceExternalBufferPropertiesKHR", "pExternalBufferInfo->usage", "VkBufferUsageFlagBits", AllVkBufferUsageFlagBits, pExternalBufferInfo->usage, kRequiredFlags, "VUID-VkPhysicalDeviceExternalBufferInfo-usage-parameter", "VUID-VkPhysicalDeviceExternalBufferInfo-usage-requiredbitmask");
+
+        skip |= validate_flags("vkGetPhysicalDeviceExternalBufferPropertiesKHR", "pExternalBufferInfo->handleType", "VkExternalMemoryHandleTypeFlagBits", AllVkExternalMemoryHandleTypeFlagBits, pExternalBufferInfo->handleType, kRequiredSingleBit, "VUID-VkPhysicalDeviceExternalBufferInfo-handleType-parameter", "VUID-VkPhysicalDeviceExternalBufferInfo-handleType-parameter");
+    }
+    skip |= validate_struct_type("vkGetPhysicalDeviceExternalBufferPropertiesKHR", "pExternalBufferProperties", "VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES", pExternalBufferProperties, VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES, true, "VUID-vkGetPhysicalDeviceExternalBufferProperties-pExternalBufferProperties-parameter", "VUID-VkExternalBufferProperties-sType-sType");
+    if (pExternalBufferProperties != NULL)
+    {
+        // No xml-driven validation
+    }
+    return skip;
+}
+
+
+
+
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+bool StatelessValidation::PreCallValidateGetMemoryWin32HandleKHR(
+    VkDevice                                    device,
+    const VkMemoryGetWin32HandleInfoKHR*        pGetWin32HandleInfo,
+    HANDLE*                                     pHandle) const {
+    bool skip = false;
+    if (!device_extensions.vk_khr_external_memory) skip |= OutputExtensionError("vkGetMemoryWin32HandleKHR", VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME);
+    if (!device_extensions.vk_khr_external_memory_win32) skip |= OutputExtensionError("vkGetMemoryWin32HandleKHR", VK_KHR_EXTERNAL_MEMORY_WIN32_EXTENSION_NAME);
+    skip |= validate_struct_type("vkGetMemoryWin32HandleKHR", "pGetWin32HandleInfo", "VK_STRUCTURE_TYPE_MEMORY_GET_WIN32_HANDLE_INFO_KHR", pGetWin32HandleInfo, VK_STRUCTURE_TYPE_MEMORY_GET_WIN32_HANDLE_INFO_KHR, true, "VUID-vkGetMemoryWin32HandleKHR-pGetWin32HandleInfo-parameter", "VUID-VkMemoryGetWin32HandleInfoKHR-sType-sType");
+    if (pGetWin32HandleInfo != NULL)
+    {
+        skip |= validate_struct_pnext("vkGetMemoryWin32HandleKHR", "pGetWin32HandleInfo->pNext", NULL, pGetWin32HandleInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkMemoryGetWin32HandleInfoKHR-pNext-pNext");
+
+        skip |= validate_required_handle("vkGetMemoryWin32HandleKHR", "pGetWin32HandleInfo->memory", pGetWin32HandleInfo->memory);
+
+        skip |= validate_flags("vkGetMemoryWin32HandleKHR", "pGetWin32HandleInfo->handleType", "VkExternalMemoryHandleTypeFlagBits", AllVkExternalMemoryHandleTypeFlagBits, pGetWin32HandleInfo->handleType, kRequiredSingleBit, "VUID-VkMemoryGetWin32HandleInfoKHR-handleType-parameter", "VUID-VkMemoryGetWin32HandleInfoKHR-handleType-parameter");
+    }
+    skip |= validate_required_pointer("vkGetMemoryWin32HandleKHR", "pHandle", pHandle, "VUID-vkGetMemoryWin32HandleKHR-pHandle-parameter");
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateGetMemoryWin32HandlePropertiesKHR(
+    VkDevice                                    device,
+    VkExternalMemoryHandleTypeFlagBits          handleType,
+    HANDLE                                      handle,
+    VkMemoryWin32HandlePropertiesKHR*           pMemoryWin32HandleProperties) const {
+    bool skip = false;
+    if (!device_extensions.vk_khr_external_memory) skip |= OutputExtensionError("vkGetMemoryWin32HandlePropertiesKHR", VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME);
+    if (!device_extensions.vk_khr_external_memory_win32) skip |= OutputExtensionError("vkGetMemoryWin32HandlePropertiesKHR", VK_KHR_EXTERNAL_MEMORY_WIN32_EXTENSION_NAME);
+    skip |= validate_flags("vkGetMemoryWin32HandlePropertiesKHR", "handleType", "VkExternalMemoryHandleTypeFlagBits", AllVkExternalMemoryHandleTypeFlagBits, handleType, kRequiredSingleBit, "VUID-vkGetMemoryWin32HandlePropertiesKHR-handleType-parameter", "VUID-vkGetMemoryWin32HandlePropertiesKHR-handleType-parameter");
+    skip |= validate_struct_type("vkGetMemoryWin32HandlePropertiesKHR", "pMemoryWin32HandleProperties", "VK_STRUCTURE_TYPE_MEMORY_WIN32_HANDLE_PROPERTIES_KHR", pMemoryWin32HandleProperties, VK_STRUCTURE_TYPE_MEMORY_WIN32_HANDLE_PROPERTIES_KHR, true, "VUID-vkGetMemoryWin32HandlePropertiesKHR-pMemoryWin32HandleProperties-parameter", "VUID-VkMemoryWin32HandlePropertiesKHR-sType-sType");
+    if (pMemoryWin32HandleProperties != NULL)
+    {
+        // No xml-driven validation
+    }
+    return skip;
+}
+
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+bool StatelessValidation::PreCallValidateGetMemoryFdKHR(
+    VkDevice                                    device,
+    const VkMemoryGetFdInfoKHR*                 pGetFdInfo,
+    int*                                        pFd) const {
+    bool skip = false;
+    if (!device_extensions.vk_khr_external_memory) skip |= OutputExtensionError("vkGetMemoryFdKHR", VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME);
+    if (!device_extensions.vk_khr_external_memory_fd) skip |= OutputExtensionError("vkGetMemoryFdKHR", VK_KHR_EXTERNAL_MEMORY_FD_EXTENSION_NAME);
+    skip |= validate_struct_type("vkGetMemoryFdKHR", "pGetFdInfo", "VK_STRUCTURE_TYPE_MEMORY_GET_FD_INFO_KHR", pGetFdInfo, VK_STRUCTURE_TYPE_MEMORY_GET_FD_INFO_KHR, true, "VUID-vkGetMemoryFdKHR-pGetFdInfo-parameter", "VUID-VkMemoryGetFdInfoKHR-sType-sType");
+    if (pGetFdInfo != NULL)
+    {
+        skip |= validate_struct_pnext("vkGetMemoryFdKHR", "pGetFdInfo->pNext", NULL, pGetFdInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkMemoryGetFdInfoKHR-pNext-pNext");
+
+        skip |= validate_required_handle("vkGetMemoryFdKHR", "pGetFdInfo->memory", pGetFdInfo->memory);
+
+        skip |= validate_flags("vkGetMemoryFdKHR", "pGetFdInfo->handleType", "VkExternalMemoryHandleTypeFlagBits", AllVkExternalMemoryHandleTypeFlagBits, pGetFdInfo->handleType, kRequiredSingleBit, "VUID-VkMemoryGetFdInfoKHR-handleType-parameter", "VUID-VkMemoryGetFdInfoKHR-handleType-parameter");
+    }
+    skip |= validate_required_pointer("vkGetMemoryFdKHR", "pFd", pFd, "VUID-vkGetMemoryFdKHR-pFd-parameter");
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateGetMemoryFdPropertiesKHR(
+    VkDevice                                    device,
+    VkExternalMemoryHandleTypeFlagBits          handleType,
+    int                                         fd,
+    VkMemoryFdPropertiesKHR*                    pMemoryFdProperties) const {
+    bool skip = false;
+    if (!device_extensions.vk_khr_external_memory) skip |= OutputExtensionError("vkGetMemoryFdPropertiesKHR", VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME);
+    if (!device_extensions.vk_khr_external_memory_fd) skip |= OutputExtensionError("vkGetMemoryFdPropertiesKHR", VK_KHR_EXTERNAL_MEMORY_FD_EXTENSION_NAME);
+    skip |= validate_flags("vkGetMemoryFdPropertiesKHR", "handleType", "VkExternalMemoryHandleTypeFlagBits", AllVkExternalMemoryHandleTypeFlagBits, handleType, kRequiredSingleBit, "VUID-vkGetMemoryFdPropertiesKHR-handleType-parameter", "VUID-vkGetMemoryFdPropertiesKHR-handleType-parameter");
+    skip |= validate_struct_type("vkGetMemoryFdPropertiesKHR", "pMemoryFdProperties", "VK_STRUCTURE_TYPE_MEMORY_FD_PROPERTIES_KHR", pMemoryFdProperties, VK_STRUCTURE_TYPE_MEMORY_FD_PROPERTIES_KHR, true, "VUID-vkGetMemoryFdPropertiesKHR-pMemoryFdProperties-parameter", "VUID-VkMemoryFdPropertiesKHR-sType-sType");
+    if (pMemoryFdProperties != NULL)
+    {
+        // No xml-driven validation
+    }
+    return skip;
+}
+
+
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+bool StatelessValidation::PreCallValidateGetPhysicalDeviceExternalSemaphorePropertiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo,
+    VkExternalSemaphoreProperties*              pExternalSemaphoreProperties) const {
+    bool skip = false;
+    if (!instance_extensions.vk_khr_get_physical_device_properties_2) skip |= OutputExtensionError("vkGetPhysicalDeviceExternalSemaphorePropertiesKHR", VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    if (!instance_extensions.vk_khr_external_semaphore_capabilities) skip |= OutputExtensionError("vkGetPhysicalDeviceExternalSemaphorePropertiesKHR", VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_EXTENSION_NAME);
+    skip |= validate_struct_type("vkGetPhysicalDeviceExternalSemaphorePropertiesKHR", "pExternalSemaphoreInfo", "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO", pExternalSemaphoreInfo, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO, true, "VUID-vkGetPhysicalDeviceExternalSemaphoreProperties-pExternalSemaphoreInfo-parameter", "VUID-VkPhysicalDeviceExternalSemaphoreInfo-sType-sType");
+    if (pExternalSemaphoreInfo != NULL)
+    {
+        const VkStructureType allowed_structs_VkPhysicalDeviceExternalSemaphoreInfo[] = { VK_STRUCTURE_TYPE_SEMAPHORE_TYPE_CREATE_INFO_KHR };
+
+        skip |= validate_struct_pnext("vkGetPhysicalDeviceExternalSemaphorePropertiesKHR", "pExternalSemaphoreInfo->pNext", "VkSemaphoreTypeCreateInfoKHR", pExternalSemaphoreInfo->pNext, ARRAY_SIZE(allowed_structs_VkPhysicalDeviceExternalSemaphoreInfo), allowed_structs_VkPhysicalDeviceExternalSemaphoreInfo, GeneratedVulkanHeaderVersion, "VUID-VkPhysicalDeviceExternalSemaphoreInfo-pNext-pNext");
+
+        skip |= validate_flags("vkGetPhysicalDeviceExternalSemaphorePropertiesKHR", "pExternalSemaphoreInfo->handleType", "VkExternalSemaphoreHandleTypeFlagBits", AllVkExternalSemaphoreHandleTypeFlagBits, pExternalSemaphoreInfo->handleType, kRequiredSingleBit, "VUID-VkPhysicalDeviceExternalSemaphoreInfo-handleType-parameter", "VUID-VkPhysicalDeviceExternalSemaphoreInfo-handleType-parameter");
+    }
+    skip |= validate_struct_type("vkGetPhysicalDeviceExternalSemaphorePropertiesKHR", "pExternalSemaphoreProperties", "VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES", pExternalSemaphoreProperties, VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES, true, "VUID-vkGetPhysicalDeviceExternalSemaphoreProperties-pExternalSemaphoreProperties-parameter", "VUID-VkExternalSemaphoreProperties-sType-sType");
+    if (pExternalSemaphoreProperties != NULL)
+    {
+        // No xml-driven validation
+    }
+    return skip;
+}
+
+
+
+
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+bool StatelessValidation::PreCallValidateImportSemaphoreWin32HandleKHR(
+    VkDevice                                    device,
+    const VkImportSemaphoreWin32HandleInfoKHR*  pImportSemaphoreWin32HandleInfo) const {
+    bool skip = false;
+    if (!device_extensions.vk_khr_external_semaphore) skip |= OutputExtensionError("vkImportSemaphoreWin32HandleKHR", VK_KHR_EXTERNAL_SEMAPHORE_EXTENSION_NAME);
+    if (!device_extensions.vk_khr_external_semaphore_win32) skip |= OutputExtensionError("vkImportSemaphoreWin32HandleKHR", VK_KHR_EXTERNAL_SEMAPHORE_WIN32_EXTENSION_NAME);
+    skip |= validate_struct_type("vkImportSemaphoreWin32HandleKHR", "pImportSemaphoreWin32HandleInfo", "VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR", pImportSemaphoreWin32HandleInfo, VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR, true, "VUID-vkImportSemaphoreWin32HandleKHR-pImportSemaphoreWin32HandleInfo-parameter", "VUID-VkImportSemaphoreWin32HandleInfoKHR-sType-sType");
+    if (pImportSemaphoreWin32HandleInfo != NULL)
+    {
+        skip |= validate_struct_pnext("vkImportSemaphoreWin32HandleKHR", "pImportSemaphoreWin32HandleInfo->pNext", NULL, pImportSemaphoreWin32HandleInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkImportSemaphoreWin32HandleInfoKHR-pNext-pNext");
+
+        skip |= validate_required_handle("vkImportSemaphoreWin32HandleKHR", "pImportSemaphoreWin32HandleInfo->semaphore", pImportSemaphoreWin32HandleInfo->semaphore);
+
+        skip |= validate_flags("vkImportSemaphoreWin32HandleKHR", "pImportSemaphoreWin32HandleInfo->flags", "VkSemaphoreImportFlagBits", AllVkSemaphoreImportFlagBits, pImportSemaphoreWin32HandleInfo->flags, kOptionalFlags, "VUID-VkImportSemaphoreWin32HandleInfoKHR-flags-parameter");
+
+        skip |= validate_flags("vkImportSemaphoreWin32HandleKHR", "pImportSemaphoreWin32HandleInfo->handleType", "VkExternalSemaphoreHandleTypeFlagBits", AllVkExternalSemaphoreHandleTypeFlagBits, pImportSemaphoreWin32HandleInfo->handleType, kOptionalSingleBit, "VUID-VkImportSemaphoreWin32HandleInfoKHR-handleType-parameter");
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateGetSemaphoreWin32HandleKHR(
+    VkDevice                                    device,
+    const VkSemaphoreGetWin32HandleInfoKHR*     pGetWin32HandleInfo,
+    HANDLE*                                     pHandle) const {
+    bool skip = false;
+    if (!device_extensions.vk_khr_external_semaphore) skip |= OutputExtensionError("vkGetSemaphoreWin32HandleKHR", VK_KHR_EXTERNAL_SEMAPHORE_EXTENSION_NAME);
+    if (!device_extensions.vk_khr_external_semaphore_win32) skip |= OutputExtensionError("vkGetSemaphoreWin32HandleKHR", VK_KHR_EXTERNAL_SEMAPHORE_WIN32_EXTENSION_NAME);
+    skip |= validate_struct_type("vkGetSemaphoreWin32HandleKHR", "pGetWin32HandleInfo", "VK_STRUCTURE_TYPE_SEMAPHORE_GET_WIN32_HANDLE_INFO_KHR", pGetWin32HandleInfo, VK_STRUCTURE_TYPE_SEMAPHORE_GET_WIN32_HANDLE_INFO_KHR, true, "VUID-vkGetSemaphoreWin32HandleKHR-pGetWin32HandleInfo-parameter", "VUID-VkSemaphoreGetWin32HandleInfoKHR-sType-sType");
+    if (pGetWin32HandleInfo != NULL)
+    {
+        skip |= validate_struct_pnext("vkGetSemaphoreWin32HandleKHR", "pGetWin32HandleInfo->pNext", NULL, pGetWin32HandleInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkSemaphoreGetWin32HandleInfoKHR-pNext-pNext");
+
+        skip |= validate_required_handle("vkGetSemaphoreWin32HandleKHR", "pGetWin32HandleInfo->semaphore", pGetWin32HandleInfo->semaphore);
+
+        skip |= validate_flags("vkGetSemaphoreWin32HandleKHR", "pGetWin32HandleInfo->handleType", "VkExternalSemaphoreHandleTypeFlagBits", AllVkExternalSemaphoreHandleTypeFlagBits, pGetWin32HandleInfo->handleType, kRequiredSingleBit, "VUID-VkSemaphoreGetWin32HandleInfoKHR-handleType-parameter", "VUID-VkSemaphoreGetWin32HandleInfoKHR-handleType-parameter");
+    }
+    skip |= validate_required_pointer("vkGetSemaphoreWin32HandleKHR", "pHandle", pHandle, "VUID-vkGetSemaphoreWin32HandleKHR-pHandle-parameter");
+    return skip;
+}
+
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+bool StatelessValidation::PreCallValidateImportSemaphoreFdKHR(
+    VkDevice                                    device,
+    const VkImportSemaphoreFdInfoKHR*           pImportSemaphoreFdInfo) const {
+    bool skip = false;
+    if (!device_extensions.vk_khr_external_semaphore) skip |= OutputExtensionError("vkImportSemaphoreFdKHR", VK_KHR_EXTERNAL_SEMAPHORE_EXTENSION_NAME);
+    if (!device_extensions.vk_khr_external_semaphore_fd) skip |= OutputExtensionError("vkImportSemaphoreFdKHR", VK_KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME);
+    skip |= validate_struct_type("vkImportSemaphoreFdKHR", "pImportSemaphoreFdInfo", "VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_FD_INFO_KHR", pImportSemaphoreFdInfo, VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_FD_INFO_KHR, true, "VUID-vkImportSemaphoreFdKHR-pImportSemaphoreFdInfo-parameter", "VUID-VkImportSemaphoreFdInfoKHR-sType-sType");
+    if (pImportSemaphoreFdInfo != NULL)
+    {
+        skip |= validate_struct_pnext("vkImportSemaphoreFdKHR", "pImportSemaphoreFdInfo->pNext", NULL, pImportSemaphoreFdInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkImportSemaphoreFdInfoKHR-pNext-pNext");
+
+        skip |= validate_required_handle("vkImportSemaphoreFdKHR", "pImportSemaphoreFdInfo->semaphore", pImportSemaphoreFdInfo->semaphore);
+
+        skip |= validate_flags("vkImportSemaphoreFdKHR", "pImportSemaphoreFdInfo->flags", "VkSemaphoreImportFlagBits", AllVkSemaphoreImportFlagBits, pImportSemaphoreFdInfo->flags, kOptionalFlags, "VUID-VkImportSemaphoreFdInfoKHR-flags-parameter");
+
+        skip |= validate_flags("vkImportSemaphoreFdKHR", "pImportSemaphoreFdInfo->handleType", "VkExternalSemaphoreHandleTypeFlagBits", AllVkExternalSemaphoreHandleTypeFlagBits, pImportSemaphoreFdInfo->handleType, kRequiredSingleBit, "VUID-VkImportSemaphoreFdInfoKHR-handleType-parameter", "VUID-VkImportSemaphoreFdInfoKHR-handleType-parameter");
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateGetSemaphoreFdKHR(
+    VkDevice                                    device,
+    const VkSemaphoreGetFdInfoKHR*              pGetFdInfo,
+    int*                                        pFd) const {
+    bool skip = false;
+    if (!device_extensions.vk_khr_external_semaphore) skip |= OutputExtensionError("vkGetSemaphoreFdKHR", VK_KHR_EXTERNAL_SEMAPHORE_EXTENSION_NAME);
+    if (!device_extensions.vk_khr_external_semaphore_fd) skip |= OutputExtensionError("vkGetSemaphoreFdKHR", VK_KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME);
+    skip |= validate_struct_type("vkGetSemaphoreFdKHR", "pGetFdInfo", "VK_STRUCTURE_TYPE_SEMAPHORE_GET_FD_INFO_KHR", pGetFdInfo, VK_STRUCTURE_TYPE_SEMAPHORE_GET_FD_INFO_KHR, true, "VUID-vkGetSemaphoreFdKHR-pGetFdInfo-parameter", "VUID-VkSemaphoreGetFdInfoKHR-sType-sType");
+    if (pGetFdInfo != NULL)
+    {
+        skip |= validate_struct_pnext("vkGetSemaphoreFdKHR", "pGetFdInfo->pNext", NULL, pGetFdInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkSemaphoreGetFdInfoKHR-pNext-pNext");
+
+        skip |= validate_required_handle("vkGetSemaphoreFdKHR", "pGetFdInfo->semaphore", pGetFdInfo->semaphore);
+
+        skip |= validate_flags("vkGetSemaphoreFdKHR", "pGetFdInfo->handleType", "VkExternalSemaphoreHandleTypeFlagBits", AllVkExternalSemaphoreHandleTypeFlagBits, pGetFdInfo->handleType, kRequiredSingleBit, "VUID-VkSemaphoreGetFdInfoKHR-handleType-parameter", "VUID-VkSemaphoreGetFdInfoKHR-handleType-parameter");
+    }
+    skip |= validate_required_pointer("vkGetSemaphoreFdKHR", "pFd", pFd, "VUID-vkGetSemaphoreFdKHR-pFd-parameter");
+    return skip;
+}
+
+
+
+bool StatelessValidation::PreCallValidateCmdPushDescriptorSetKHR(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineBindPoint                         pipelineBindPoint,
+    VkPipelineLayout                            layout,
+    uint32_t                                    set,
+    uint32_t                                    descriptorWriteCount,
+    const VkWriteDescriptorSet*                 pDescriptorWrites) const {
+    bool skip = false;
+    if (!device_extensions.vk_khr_get_physical_device_properties_2) skip |= OutputExtensionError("vkCmdPushDescriptorSetKHR", VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    if (!device_extensions.vk_khr_push_descriptor) skip |= OutputExtensionError("vkCmdPushDescriptorSetKHR", VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME);
+    skip |= validate_ranged_enum("vkCmdPushDescriptorSetKHR", "pipelineBindPoint", "VkPipelineBindPoint", AllVkPipelineBindPointEnums, pipelineBindPoint, "VUID-vkCmdPushDescriptorSetKHR-pipelineBindPoint-parameter");
+    skip |= validate_required_handle("vkCmdPushDescriptorSetKHR", "layout", layout);
+    skip |= validate_struct_type_array("vkCmdPushDescriptorSetKHR", "descriptorWriteCount", "pDescriptorWrites", "VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET", descriptorWriteCount, pDescriptorWrites, VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET, true, true, "VUID-VkWriteDescriptorSet-sType-sType", "VUID-vkCmdPushDescriptorSetKHR-pDescriptorWrites-parameter", "VUID-vkCmdPushDescriptorSetKHR-descriptorWriteCount-arraylength");
+    if (pDescriptorWrites != NULL)
+    {
+        for (uint32_t descriptorWriteIndex = 0; descriptorWriteIndex < descriptorWriteCount; ++descriptorWriteIndex)
+        {
+            const VkStructureType allowed_structs_VkWriteDescriptorSet[] = { VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_NV, VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK_EXT };
+
+            skip |= validate_struct_pnext("vkCmdPushDescriptorSetKHR", ParameterName("pDescriptorWrites[%i].pNext", ParameterName::IndexVector{ descriptorWriteIndex }), "VkWriteDescriptorSetAccelerationStructureNV, VkWriteDescriptorSetInlineUniformBlockEXT", pDescriptorWrites[descriptorWriteIndex].pNext, ARRAY_SIZE(allowed_structs_VkWriteDescriptorSet), allowed_structs_VkWriteDescriptorSet, GeneratedVulkanHeaderVersion, "VUID-VkWriteDescriptorSet-pNext-pNext");
+
+            skip |= validate_ranged_enum("vkCmdPushDescriptorSetKHR", ParameterName("pDescriptorWrites[%i].descriptorType", ParameterName::IndexVector{ descriptorWriteIndex }), "VkDescriptorType", AllVkDescriptorTypeEnums, pDescriptorWrites[descriptorWriteIndex].descriptorType, "VUID-VkWriteDescriptorSet-descriptorType-parameter");
+        }
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateCmdPushDescriptorSetWithTemplateKHR(
+    VkCommandBuffer                             commandBuffer,
+    VkDescriptorUpdateTemplate                  descriptorUpdateTemplate,
+    VkPipelineLayout                            layout,
+    uint32_t                                    set,
+    const void*                                 pData) const {
+    bool skip = false;
+    if (!device_extensions.vk_khr_get_physical_device_properties_2) skip |= OutputExtensionError("vkCmdPushDescriptorSetWithTemplateKHR", VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    if (!device_extensions.vk_khr_push_descriptor) skip |= OutputExtensionError("vkCmdPushDescriptorSetWithTemplateKHR", VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME);
+    skip |= validate_required_handle("vkCmdPushDescriptorSetWithTemplateKHR", "descriptorUpdateTemplate", descriptorUpdateTemplate);
+    skip |= validate_required_handle("vkCmdPushDescriptorSetWithTemplateKHR", "layout", layout);
+    return skip;
+}
+
+
+
+
+
+
+
+
+
+bool StatelessValidation::PreCallValidateCreateDescriptorUpdateTemplateKHR(
+    VkDevice                                    device,
+    const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDescriptorUpdateTemplate*                 pDescriptorUpdateTemplate) const {
+    bool skip = false;
+    if (!device_extensions.vk_khr_descriptor_update_template) skip |= OutputExtensionError("vkCreateDescriptorUpdateTemplateKHR", VK_KHR_DESCRIPTOR_UPDATE_TEMPLATE_EXTENSION_NAME);
+    skip |= validate_struct_type("vkCreateDescriptorUpdateTemplateKHR", "pCreateInfo", "VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO", pCreateInfo, VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO, true, "VUID-vkCreateDescriptorUpdateTemplate-pCreateInfo-parameter", "VUID-VkDescriptorUpdateTemplateCreateInfo-sType-sType");
+    if (pCreateInfo != NULL)
+    {
+        skip |= validate_struct_pnext("vkCreateDescriptorUpdateTemplateKHR", "pCreateInfo->pNext", NULL, pCreateInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkDescriptorUpdateTemplateCreateInfo-pNext-pNext");
+
+        skip |= validate_reserved_flags("vkCreateDescriptorUpdateTemplateKHR", "pCreateInfo->flags", pCreateInfo->flags, "VUID-VkDescriptorUpdateTemplateCreateInfo-flags-zerobitmask");
+
+        skip |= validate_array("vkCreateDescriptorUpdateTemplateKHR", "pCreateInfo->descriptorUpdateEntryCount", "pCreateInfo->pDescriptorUpdateEntries", pCreateInfo->descriptorUpdateEntryCount, &pCreateInfo->pDescriptorUpdateEntries, true, true, "VUID-VkDescriptorUpdateTemplateCreateInfo-descriptorUpdateEntryCount-arraylength", "VUID-VkDescriptorUpdateTemplateCreateInfo-pDescriptorUpdateEntries-parameter");
+
+        if (pCreateInfo->pDescriptorUpdateEntries != NULL)
+        {
+            for (uint32_t descriptorUpdateEntryIndex = 0; descriptorUpdateEntryIndex < pCreateInfo->descriptorUpdateEntryCount; ++descriptorUpdateEntryIndex)
+            {
+                skip |= validate_ranged_enum("vkCreateDescriptorUpdateTemplateKHR", ParameterName("pCreateInfo->pDescriptorUpdateEntries[%i].descriptorType", ParameterName::IndexVector{ descriptorUpdateEntryIndex }), "VkDescriptorType", AllVkDescriptorTypeEnums, pCreateInfo->pDescriptorUpdateEntries[descriptorUpdateEntryIndex].descriptorType, "VUID-VkDescriptorUpdateTemplateEntry-descriptorType-parameter");
+            }
+        }
+
+        skip |= validate_ranged_enum("vkCreateDescriptorUpdateTemplateKHR", "pCreateInfo->templateType", "VkDescriptorUpdateTemplateType", AllVkDescriptorUpdateTemplateTypeEnums, pCreateInfo->templateType, "VUID-VkDescriptorUpdateTemplateCreateInfo-templateType-parameter");
+    }
+    if (pAllocator != NULL)
+    {
+        skip |= validate_required_pointer("vkCreateDescriptorUpdateTemplateKHR", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
+
+        skip |= validate_required_pointer("vkCreateDescriptorUpdateTemplateKHR", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
+
+        skip |= validate_required_pointer("vkCreateDescriptorUpdateTemplateKHR", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
+
+        if (pAllocator->pfnInternalAllocation != NULL)
+        {
+            skip |= validate_required_pointer("vkCreateDescriptorUpdateTemplateKHR", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+
+        if (pAllocator->pfnInternalFree != NULL)
+        {
+            skip |= validate_required_pointer("vkCreateDescriptorUpdateTemplateKHR", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+    }
+    skip |= validate_required_pointer("vkCreateDescriptorUpdateTemplateKHR", "pDescriptorUpdateTemplate", pDescriptorUpdateTemplate, "VUID-vkCreateDescriptorUpdateTemplate-pDescriptorUpdateTemplate-parameter");
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateDestroyDescriptorUpdateTemplateKHR(
+    VkDevice                                    device,
+    VkDescriptorUpdateTemplate                  descriptorUpdateTemplate,
+    const VkAllocationCallbacks*                pAllocator) const {
+    bool skip = false;
+    if (!device_extensions.vk_khr_descriptor_update_template) skip |= OutputExtensionError("vkDestroyDescriptorUpdateTemplateKHR", VK_KHR_DESCRIPTOR_UPDATE_TEMPLATE_EXTENSION_NAME);
+    if (pAllocator != NULL)
+    {
+        skip |= validate_required_pointer("vkDestroyDescriptorUpdateTemplateKHR", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
+
+        skip |= validate_required_pointer("vkDestroyDescriptorUpdateTemplateKHR", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
+
+        skip |= validate_required_pointer("vkDestroyDescriptorUpdateTemplateKHR", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
+
+        if (pAllocator->pfnInternalAllocation != NULL)
+        {
+            skip |= validate_required_pointer("vkDestroyDescriptorUpdateTemplateKHR", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+
+        if (pAllocator->pfnInternalFree != NULL)
+        {
+            skip |= validate_required_pointer("vkDestroyDescriptorUpdateTemplateKHR", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateUpdateDescriptorSetWithTemplateKHR(
+    VkDevice                                    device,
+    VkDescriptorSet                             descriptorSet,
+    VkDescriptorUpdateTemplate                  descriptorUpdateTemplate,
+    const void*                                 pData) const {
+    bool skip = false;
+    if (!device_extensions.vk_khr_descriptor_update_template) skip |= OutputExtensionError("vkUpdateDescriptorSetWithTemplateKHR", VK_KHR_DESCRIPTOR_UPDATE_TEMPLATE_EXTENSION_NAME);
+    skip |= validate_required_handle("vkUpdateDescriptorSetWithTemplateKHR", "descriptorSet", descriptorSet);
+    skip |= validate_required_handle("vkUpdateDescriptorSetWithTemplateKHR", "descriptorUpdateTemplate", descriptorUpdateTemplate);
+    return skip;
+}
+
+
+
+
+
+bool StatelessValidation::PreCallValidateCreateRenderPass2KHR(
+    VkDevice                                    device,
+    const VkRenderPassCreateInfo2KHR*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkRenderPass*                               pRenderPass) const {
+    bool skip = false;
+    if (!device_extensions.vk_khr_maintenance2) skip |= OutputExtensionError("vkCreateRenderPass2KHR", VK_KHR_MAINTENANCE2_EXTENSION_NAME);
+    if (!device_extensions.vk_khr_multiview) skip |= OutputExtensionError("vkCreateRenderPass2KHR", VK_KHR_MULTIVIEW_EXTENSION_NAME);
+    if (!device_extensions.vk_khr_create_renderpass_2) skip |= OutputExtensionError("vkCreateRenderPass2KHR", VK_KHR_CREATE_RENDERPASS_2_EXTENSION_NAME);
+    skip |= validate_struct_type("vkCreateRenderPass2KHR", "pCreateInfo", "VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO_2_KHR", pCreateInfo, VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO_2_KHR, true, "VUID-vkCreateRenderPass2KHR-pCreateInfo-parameter", "VUID-VkRenderPassCreateInfo2KHR-sType-sType");
+    if (pCreateInfo != NULL)
+    {
+        const VkStructureType allowed_structs_VkRenderPassCreateInfo2KHR[] = { VK_STRUCTURE_TYPE_RENDER_PASS_FRAGMENT_DENSITY_MAP_CREATE_INFO_EXT };
+
+        skip |= validate_struct_pnext("vkCreateRenderPass2KHR", "pCreateInfo->pNext", "VkRenderPassFragmentDensityMapCreateInfoEXT", pCreateInfo->pNext, ARRAY_SIZE(allowed_structs_VkRenderPassCreateInfo2KHR), allowed_structs_VkRenderPassCreateInfo2KHR, GeneratedVulkanHeaderVersion, "VUID-VkRenderPassCreateInfo2KHR-pNext-pNext");
+
+        skip |= validate_reserved_flags("vkCreateRenderPass2KHR", "pCreateInfo->flags", pCreateInfo->flags, "VUID-VkRenderPassCreateInfo2KHR-flags-zerobitmask");
+
+        skip |= validate_struct_type_array("vkCreateRenderPass2KHR", "pCreateInfo->attachmentCount", "pCreateInfo->pAttachments", "VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_2_KHR", pCreateInfo->attachmentCount, pCreateInfo->pAttachments, VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_2_KHR, false, true, "VUID-VkAttachmentDescription2KHR-sType-sType", "VUID-VkRenderPassCreateInfo2KHR-pAttachments-parameter", kVUIDUndefined);
+
+        if (pCreateInfo->pAttachments != NULL)
+        {
+            for (uint32_t attachmentIndex = 0; attachmentIndex < pCreateInfo->attachmentCount; ++attachmentIndex)
+            {
+                skip |= validate_flags("vkCreateRenderPass2KHR", ParameterName("pCreateInfo->pAttachments[%i].flags", ParameterName::IndexVector{ attachmentIndex }), "VkAttachmentDescriptionFlagBits", AllVkAttachmentDescriptionFlagBits, pCreateInfo->pAttachments[attachmentIndex].flags, kOptionalFlags, "VUID-VkAttachmentDescription2KHR-flags-parameter");
+
+                skip |= validate_ranged_enum("vkCreateRenderPass2KHR", ParameterName("pCreateInfo->pAttachments[%i].format", ParameterName::IndexVector{ attachmentIndex }), "VkFormat", AllVkFormatEnums, pCreateInfo->pAttachments[attachmentIndex].format, "VUID-VkAttachmentDescription2KHR-format-parameter");
+
+                skip |= validate_flags("vkCreateRenderPass2KHR", ParameterName("pCreateInfo->pAttachments[%i].samples", ParameterName::IndexVector{ attachmentIndex }), "VkSampleCountFlagBits", AllVkSampleCountFlagBits, pCreateInfo->pAttachments[attachmentIndex].samples, kRequiredSingleBit, "VUID-VkAttachmentDescription2KHR-samples-parameter", "VUID-VkAttachmentDescription2KHR-samples-parameter");
+
+                skip |= validate_ranged_enum("vkCreateRenderPass2KHR", ParameterName("pCreateInfo->pAttachments[%i].loadOp", ParameterName::IndexVector{ attachmentIndex }), "VkAttachmentLoadOp", AllVkAttachmentLoadOpEnums, pCreateInfo->pAttachments[attachmentIndex].loadOp, "VUID-VkAttachmentDescription2KHR-loadOp-parameter");
+
+                skip |= validate_ranged_enum("vkCreateRenderPass2KHR", ParameterName("pCreateInfo->pAttachments[%i].storeOp", ParameterName::IndexVector{ attachmentIndex }), "VkAttachmentStoreOp", AllVkAttachmentStoreOpEnums, pCreateInfo->pAttachments[attachmentIndex].storeOp, "VUID-VkAttachmentDescription2KHR-storeOp-parameter");
+
+                skip |= validate_ranged_enum("vkCreateRenderPass2KHR", ParameterName("pCreateInfo->pAttachments[%i].stencilLoadOp", ParameterName::IndexVector{ attachmentIndex }), "VkAttachmentLoadOp", AllVkAttachmentLoadOpEnums, pCreateInfo->pAttachments[attachmentIndex].stencilLoadOp, "VUID-VkAttachmentDescription2KHR-stencilLoadOp-parameter");
+
+                skip |= validate_ranged_enum("vkCreateRenderPass2KHR", ParameterName("pCreateInfo->pAttachments[%i].stencilStoreOp", ParameterName::IndexVector{ attachmentIndex }), "VkAttachmentStoreOp", AllVkAttachmentStoreOpEnums, pCreateInfo->pAttachments[attachmentIndex].stencilStoreOp, "VUID-VkAttachmentDescription2KHR-stencilStoreOp-parameter");
+
+                skip |= validate_ranged_enum("vkCreateRenderPass2KHR", ParameterName("pCreateInfo->pAttachments[%i].initialLayout", ParameterName::IndexVector{ attachmentIndex }), "VkImageLayout", AllVkImageLayoutEnums, pCreateInfo->pAttachments[attachmentIndex].initialLayout, "VUID-VkAttachmentDescription2KHR-initialLayout-parameter");
+
+                skip |= validate_ranged_enum("vkCreateRenderPass2KHR", ParameterName("pCreateInfo->pAttachments[%i].finalLayout", ParameterName::IndexVector{ attachmentIndex }), "VkImageLayout", AllVkImageLayoutEnums, pCreateInfo->pAttachments[attachmentIndex].finalLayout, "VUID-VkAttachmentDescription2KHR-finalLayout-parameter");
+            }
+        }
+
+        skip |= validate_struct_type_array("vkCreateRenderPass2KHR", "pCreateInfo->subpassCount", "pCreateInfo->pSubpasses", "VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_2_KHR", pCreateInfo->subpassCount, pCreateInfo->pSubpasses, VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_2_KHR, true, true, "VUID-VkSubpassDescription2KHR-sType-sType", "VUID-VkRenderPassCreateInfo2KHR-pSubpasses-parameter", "VUID-VkRenderPassCreateInfo2KHR-subpassCount-arraylength");
+
+        if (pCreateInfo->pSubpasses != NULL)
+        {
+            for (uint32_t subpassIndex = 0; subpassIndex < pCreateInfo->subpassCount; ++subpassIndex)
+            {
+                skip |= validate_flags("vkCreateRenderPass2KHR", ParameterName("pCreateInfo->pSubpasses[%i].flags", ParameterName::IndexVector{ subpassIndex }), "VkSubpassDescriptionFlagBits", AllVkSubpassDescriptionFlagBits, pCreateInfo->pSubpasses[subpassIndex].flags, kOptionalFlags, "VUID-VkSubpassDescription2KHR-flags-parameter");
+
+                skip |= validate_ranged_enum("vkCreateRenderPass2KHR", ParameterName("pCreateInfo->pSubpasses[%i].pipelineBindPoint", ParameterName::IndexVector{ subpassIndex }), "VkPipelineBindPoint", AllVkPipelineBindPointEnums, pCreateInfo->pSubpasses[subpassIndex].pipelineBindPoint, "VUID-VkSubpassDescription2KHR-pipelineBindPoint-parameter");
+
+                skip |= validate_struct_type_array("vkCreateRenderPass2KHR", ParameterName("pCreateInfo->pSubpasses[%i].inputAttachmentCount", ParameterName::IndexVector{ subpassIndex }), ParameterName("pCreateInfo->pSubpasses[%i].pInputAttachments", ParameterName::IndexVector{ subpassIndex }), "VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2_KHR", pCreateInfo->pSubpasses[subpassIndex].inputAttachmentCount, pCreateInfo->pSubpasses[subpassIndex].pInputAttachments, VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2_KHR, false, true, "VUID-VkAttachmentReference2KHR-sType-sType", "VUID-VkSubpassDescription2KHR-pInputAttachments-parameter", kVUIDUndefined);
+
+                if (pCreateInfo->pSubpasses[subpassIndex].pInputAttachments != NULL)
+                {
+                    for (uint32_t inputAttachmentIndex = 0; inputAttachmentIndex < pCreateInfo->pSubpasses[subpassIndex].inputAttachmentCount; ++inputAttachmentIndex)
+                    {
+                        skip |= validate_ranged_enum("vkCreateRenderPass2KHR", ParameterName("pCreateInfo->pSubpasses[%i].pInputAttachments[%i].layout", ParameterName::IndexVector{ subpassIndex, inputAttachmentIndex }), "VkImageLayout", AllVkImageLayoutEnums, pCreateInfo->pSubpasses[subpassIndex].pInputAttachments[inputAttachmentIndex].layout, "VUID-VkAttachmentReference2KHR-layout-parameter");
+                    }
+                }
+
+                skip |= validate_struct_type_array("vkCreateRenderPass2KHR", ParameterName("pCreateInfo->pSubpasses[%i].colorAttachmentCount", ParameterName::IndexVector{ subpassIndex }), ParameterName("pCreateInfo->pSubpasses[%i].pColorAttachments", ParameterName::IndexVector{ subpassIndex }), "VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2_KHR", pCreateInfo->pSubpasses[subpassIndex].colorAttachmentCount, pCreateInfo->pSubpasses[subpassIndex].pColorAttachments, VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2_KHR, false, true, "VUID-VkAttachmentReference2KHR-sType-sType", "VUID-VkSubpassDescription2KHR-pColorAttachments-parameter", kVUIDUndefined);
+
+                if (pCreateInfo->pSubpasses[subpassIndex].pColorAttachments != NULL)
+                {
+                    for (uint32_t colorAttachmentIndex = 0; colorAttachmentIndex < pCreateInfo->pSubpasses[subpassIndex].colorAttachmentCount; ++colorAttachmentIndex)
+                    {
+                        skip |= validate_ranged_enum("vkCreateRenderPass2KHR", ParameterName("pCreateInfo->pSubpasses[%i].pColorAttachments[%i].layout", ParameterName::IndexVector{ subpassIndex, colorAttachmentIndex }), "VkImageLayout", AllVkImageLayoutEnums, pCreateInfo->pSubpasses[subpassIndex].pColorAttachments[colorAttachmentIndex].layout, "VUID-VkAttachmentReference2KHR-layout-parameter");
+                    }
+                }
+
+                skip |= validate_struct_type_array("vkCreateRenderPass2KHR", ParameterName("pCreateInfo->pSubpasses[%i].colorAttachmentCount", ParameterName::IndexVector{ subpassIndex }), ParameterName("pCreateInfo->pSubpasses[%i].pResolveAttachments", ParameterName::IndexVector{ subpassIndex }), "VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2_KHR", pCreateInfo->pSubpasses[subpassIndex].colorAttachmentCount, pCreateInfo->pSubpasses[subpassIndex].pResolveAttachments, VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2_KHR, false, false, "VUID-VkAttachmentReference2KHR-sType-sType", "VUID-VkSubpassDescription2KHR-pResolveAttachments-parameter", kVUIDUndefined);
+
+                if (pCreateInfo->pSubpasses[subpassIndex].pResolveAttachments != NULL)
+                {
+                    for (uint32_t colorAttachmentIndex = 0; colorAttachmentIndex < pCreateInfo->pSubpasses[subpassIndex].colorAttachmentCount; ++colorAttachmentIndex)
+                    {
+                        skip |= validate_ranged_enum("vkCreateRenderPass2KHR", ParameterName("pCreateInfo->pSubpasses[%i].pResolveAttachments[%i].layout", ParameterName::IndexVector{ subpassIndex, colorAttachmentIndex }), "VkImageLayout", AllVkImageLayoutEnums, pCreateInfo->pSubpasses[subpassIndex].pResolveAttachments[colorAttachmentIndex].layout, "VUID-VkAttachmentReference2KHR-layout-parameter");
+                    }
+                }
+
+                skip |= validate_struct_type("vkCreateRenderPass2KHR", ParameterName("pCreateInfo->pSubpasses[%i].pDepthStencilAttachment", ParameterName::IndexVector{ subpassIndex }), "VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2_KHR", pCreateInfo->pSubpasses[subpassIndex].pDepthStencilAttachment, VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2_KHR, false, "VUID-VkSubpassDescription2KHR-pDepthStencilAttachment-parameter", "VUID-VkAttachmentReference2KHR-sType-sType");
+
+                if (pCreateInfo->pSubpasses[subpassIndex].pDepthStencilAttachment != NULL)
+                {
+                    skip |= validate_ranged_enum("vkCreateRenderPass2KHR", ParameterName("pCreateInfo->pSubpasses[%i].pDepthStencilAttachment->layout", ParameterName::IndexVector{ subpassIndex }), "VkImageLayout", AllVkImageLayoutEnums, pCreateInfo->pSubpasses[subpassIndex].pDepthStencilAttachment->layout, "VUID-VkAttachmentReference2KHR-layout-parameter");
+                }
+
+                skip |= validate_array("vkCreateRenderPass2KHR", ParameterName("pCreateInfo->pSubpasses[%i].preserveAttachmentCount", ParameterName::IndexVector{ subpassIndex }), ParameterName("pCreateInfo->pSubpasses[%i].pPreserveAttachments", ParameterName::IndexVector{ subpassIndex }), pCreateInfo->pSubpasses[subpassIndex].preserveAttachmentCount, &pCreateInfo->pSubpasses[subpassIndex].pPreserveAttachments, false, true, kVUIDUndefined, "VUID-VkSubpassDescription2KHR-pPreserveAttachments-parameter");
+            }
+        }
+
+        skip |= validate_struct_type_array("vkCreateRenderPass2KHR", "pCreateInfo->dependencyCount", "pCreateInfo->pDependencies", "VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2_KHR", pCreateInfo->dependencyCount, pCreateInfo->pDependencies, VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2_KHR, false, true, "VUID-VkSubpassDependency2KHR-sType-sType", "VUID-VkRenderPassCreateInfo2KHR-pDependencies-parameter", kVUIDUndefined);
+
+        if (pCreateInfo->pDependencies != NULL)
+        {
+            for (uint32_t dependencyIndex = 0; dependencyIndex < pCreateInfo->dependencyCount; ++dependencyIndex)
+            {
+                skip |= validate_flags("vkCreateRenderPass2KHR", ParameterName("pCreateInfo->pDependencies[%i].srcStageMask", ParameterName::IndexVector{ dependencyIndex }), "VkPipelineStageFlagBits", AllVkPipelineStageFlagBits, pCreateInfo->pDependencies[dependencyIndex].srcStageMask, kRequiredFlags, "VUID-VkSubpassDependency2KHR-srcStageMask-parameter", "VUID-VkSubpassDependency2KHR-srcStageMask-requiredbitmask");
+
+                skip |= validate_flags("vkCreateRenderPass2KHR", ParameterName("pCreateInfo->pDependencies[%i].dstStageMask", ParameterName::IndexVector{ dependencyIndex }), "VkPipelineStageFlagBits", AllVkPipelineStageFlagBits, pCreateInfo->pDependencies[dependencyIndex].dstStageMask, kRequiredFlags, "VUID-VkSubpassDependency2KHR-dstStageMask-parameter", "VUID-VkSubpassDependency2KHR-dstStageMask-requiredbitmask");
+
+                skip |= validate_flags("vkCreateRenderPass2KHR", ParameterName("pCreateInfo->pDependencies[%i].srcAccessMask", ParameterName::IndexVector{ dependencyIndex }), "VkAccessFlagBits", AllVkAccessFlagBits, pCreateInfo->pDependencies[dependencyIndex].srcAccessMask, kOptionalFlags, "VUID-VkSubpassDependency2KHR-srcAccessMask-parameter");
+
+                skip |= validate_flags("vkCreateRenderPass2KHR", ParameterName("pCreateInfo->pDependencies[%i].dstAccessMask", ParameterName::IndexVector{ dependencyIndex }), "VkAccessFlagBits", AllVkAccessFlagBits, pCreateInfo->pDependencies[dependencyIndex].dstAccessMask, kOptionalFlags, "VUID-VkSubpassDependency2KHR-dstAccessMask-parameter");
+
+                skip |= validate_flags("vkCreateRenderPass2KHR", ParameterName("pCreateInfo->pDependencies[%i].dependencyFlags", ParameterName::IndexVector{ dependencyIndex }), "VkDependencyFlagBits", AllVkDependencyFlagBits, pCreateInfo->pDependencies[dependencyIndex].dependencyFlags, kOptionalFlags, "VUID-VkSubpassDependency2KHR-dependencyFlags-parameter");
+            }
+        }
+
+        skip |= validate_array("vkCreateRenderPass2KHR", "pCreateInfo->correlatedViewMaskCount", "pCreateInfo->pCorrelatedViewMasks", pCreateInfo->correlatedViewMaskCount, &pCreateInfo->pCorrelatedViewMasks, false, true, kVUIDUndefined, "VUID-VkRenderPassCreateInfo2KHR-pCorrelatedViewMasks-parameter");
+    }
+    if (pAllocator != NULL)
+    {
+        skip |= validate_required_pointer("vkCreateRenderPass2KHR", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
+
+        skip |= validate_required_pointer("vkCreateRenderPass2KHR", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
+
+        skip |= validate_required_pointer("vkCreateRenderPass2KHR", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
+
+        if (pAllocator->pfnInternalAllocation != NULL)
+        {
+            skip |= validate_required_pointer("vkCreateRenderPass2KHR", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+
+        if (pAllocator->pfnInternalFree != NULL)
+        {
+            skip |= validate_required_pointer("vkCreateRenderPass2KHR", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+    }
+    skip |= validate_required_pointer("vkCreateRenderPass2KHR", "pRenderPass", pRenderPass, "VUID-vkCreateRenderPass2KHR-pRenderPass-parameter");
+    if (!skip) skip |= manual_PreCallValidateCreateRenderPass2KHR(device, pCreateInfo, pAllocator, pRenderPass);
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateCmdBeginRenderPass2KHR(
+    VkCommandBuffer                             commandBuffer,
+    const VkRenderPassBeginInfo*                pRenderPassBegin,
+    const VkSubpassBeginInfoKHR*                pSubpassBeginInfo) const {
+    bool skip = false;
+    if (!device_extensions.vk_khr_maintenance2) skip |= OutputExtensionError("vkCmdBeginRenderPass2KHR", VK_KHR_MAINTENANCE2_EXTENSION_NAME);
+    if (!device_extensions.vk_khr_multiview) skip |= OutputExtensionError("vkCmdBeginRenderPass2KHR", VK_KHR_MULTIVIEW_EXTENSION_NAME);
+    if (!device_extensions.vk_khr_create_renderpass_2) skip |= OutputExtensionError("vkCmdBeginRenderPass2KHR", VK_KHR_CREATE_RENDERPASS_2_EXTENSION_NAME);
+    skip |= validate_struct_type("vkCmdBeginRenderPass2KHR", "pRenderPassBegin", "VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO", pRenderPassBegin, VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO, true, "VUID-vkCmdBeginRenderPass2KHR-pRenderPassBegin-parameter", "VUID-VkRenderPassBeginInfo-sType-sType");
+    if (pRenderPassBegin != NULL)
+    {
+        const VkStructureType allowed_structs_VkRenderPassBeginInfo[] = { VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO, VK_STRUCTURE_TYPE_RENDER_PASS_ATTACHMENT_BEGIN_INFO_KHR, VK_STRUCTURE_TYPE_RENDER_PASS_SAMPLE_LOCATIONS_BEGIN_INFO_EXT };
+
+        skip |= validate_struct_pnext("vkCmdBeginRenderPass2KHR", "pRenderPassBegin->pNext", "VkDeviceGroupRenderPassBeginInfo, VkRenderPassAttachmentBeginInfoKHR, VkRenderPassSampleLocationsBeginInfoEXT", pRenderPassBegin->pNext, ARRAY_SIZE(allowed_structs_VkRenderPassBeginInfo), allowed_structs_VkRenderPassBeginInfo, GeneratedVulkanHeaderVersion, "VUID-VkRenderPassBeginInfo-pNext-pNext");
+
+        skip |= validate_required_handle("vkCmdBeginRenderPass2KHR", "pRenderPassBegin->renderPass", pRenderPassBegin->renderPass);
+
+        skip |= validate_required_handle("vkCmdBeginRenderPass2KHR", "pRenderPassBegin->framebuffer", pRenderPassBegin->framebuffer);
+
+        // No xml-driven validation
+
+        // No xml-driven validation
+
+        skip |= validate_array("vkCmdBeginRenderPass2KHR", "pRenderPassBegin->clearValueCount", "pRenderPassBegin->pClearValues", pRenderPassBegin->clearValueCount, &pRenderPassBegin->pClearValues, false, true, kVUIDUndefined, "VUID-VkRenderPassBeginInfo-pClearValues-parameter");
+
+        if (pRenderPassBegin->pClearValues != NULL)
+        {
+            for (uint32_t clearValueIndex = 0; clearValueIndex < pRenderPassBegin->clearValueCount; ++clearValueIndex)
+            {
+                // No xml-driven validation
+
+                // No xml-driven validation
+            }
+        }
+    }
+    skip |= validate_struct_type("vkCmdBeginRenderPass2KHR", "pSubpassBeginInfo", "VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO_KHR", pSubpassBeginInfo, VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO_KHR, true, "VUID-vkCmdBeginRenderPass2KHR-pSubpassBeginInfo-parameter", "VUID-VkSubpassBeginInfoKHR-sType-sType");
+    if (pSubpassBeginInfo != NULL)
+    {
+        skip |= validate_struct_pnext("vkCmdBeginRenderPass2KHR", "pSubpassBeginInfo->pNext", NULL, pSubpassBeginInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkSubpassBeginInfoKHR-pNext-pNext");
+
+        skip |= validate_ranged_enum("vkCmdBeginRenderPass2KHR", "pSubpassBeginInfo->contents", "VkSubpassContents", AllVkSubpassContentsEnums, pSubpassBeginInfo->contents, "VUID-VkSubpassBeginInfoKHR-contents-parameter");
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateCmdNextSubpass2KHR(
+    VkCommandBuffer                             commandBuffer,
+    const VkSubpassBeginInfoKHR*                pSubpassBeginInfo,
+    const VkSubpassEndInfoKHR*                  pSubpassEndInfo) const {
+    bool skip = false;
+    if (!device_extensions.vk_khr_maintenance2) skip |= OutputExtensionError("vkCmdNextSubpass2KHR", VK_KHR_MAINTENANCE2_EXTENSION_NAME);
+    if (!device_extensions.vk_khr_multiview) skip |= OutputExtensionError("vkCmdNextSubpass2KHR", VK_KHR_MULTIVIEW_EXTENSION_NAME);
+    if (!device_extensions.vk_khr_create_renderpass_2) skip |= OutputExtensionError("vkCmdNextSubpass2KHR", VK_KHR_CREATE_RENDERPASS_2_EXTENSION_NAME);
+    skip |= validate_struct_type("vkCmdNextSubpass2KHR", "pSubpassBeginInfo", "VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO_KHR", pSubpassBeginInfo, VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO_KHR, true, "VUID-vkCmdNextSubpass2KHR-pSubpassBeginInfo-parameter", "VUID-VkSubpassBeginInfoKHR-sType-sType");
+    if (pSubpassBeginInfo != NULL)
+    {
+        skip |= validate_struct_pnext("vkCmdNextSubpass2KHR", "pSubpassBeginInfo->pNext", NULL, pSubpassBeginInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkSubpassBeginInfoKHR-pNext-pNext");
+
+        skip |= validate_ranged_enum("vkCmdNextSubpass2KHR", "pSubpassBeginInfo->contents", "VkSubpassContents", AllVkSubpassContentsEnums, pSubpassBeginInfo->contents, "VUID-VkSubpassBeginInfoKHR-contents-parameter");
+    }
+    skip |= validate_struct_type("vkCmdNextSubpass2KHR", "pSubpassEndInfo", "VK_STRUCTURE_TYPE_SUBPASS_END_INFO_KHR", pSubpassEndInfo, VK_STRUCTURE_TYPE_SUBPASS_END_INFO_KHR, true, "VUID-vkCmdNextSubpass2KHR-pSubpassEndInfo-parameter", "VUID-VkSubpassEndInfoKHR-sType-sType");
+    if (pSubpassEndInfo != NULL)
+    {
+        skip |= validate_struct_pnext("vkCmdNextSubpass2KHR", "pSubpassEndInfo->pNext", NULL, pSubpassEndInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkSubpassEndInfoKHR-pNext-pNext");
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateCmdEndRenderPass2KHR(
+    VkCommandBuffer                             commandBuffer,
+    const VkSubpassEndInfoKHR*                  pSubpassEndInfo) const {
+    bool skip = false;
+    if (!device_extensions.vk_khr_maintenance2) skip |= OutputExtensionError("vkCmdEndRenderPass2KHR", VK_KHR_MAINTENANCE2_EXTENSION_NAME);
+    if (!device_extensions.vk_khr_multiview) skip |= OutputExtensionError("vkCmdEndRenderPass2KHR", VK_KHR_MULTIVIEW_EXTENSION_NAME);
+    if (!device_extensions.vk_khr_create_renderpass_2) skip |= OutputExtensionError("vkCmdEndRenderPass2KHR", VK_KHR_CREATE_RENDERPASS_2_EXTENSION_NAME);
+    skip |= validate_struct_type("vkCmdEndRenderPass2KHR", "pSubpassEndInfo", "VK_STRUCTURE_TYPE_SUBPASS_END_INFO_KHR", pSubpassEndInfo, VK_STRUCTURE_TYPE_SUBPASS_END_INFO_KHR, true, "VUID-vkCmdEndRenderPass2KHR-pSubpassEndInfo-parameter", "VUID-VkSubpassEndInfoKHR-sType-sType");
+    if (pSubpassEndInfo != NULL)
+    {
+        skip |= validate_struct_pnext("vkCmdEndRenderPass2KHR", "pSubpassEndInfo->pNext", NULL, pSubpassEndInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkSubpassEndInfoKHR-pNext-pNext");
+    }
+    return skip;
+}
+
+
+
+bool StatelessValidation::PreCallValidateGetSwapchainStatusKHR(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain) const {
+    bool skip = false;
+    if (!device_extensions.vk_khr_get_surface_capabilities_2) skip |= OutputExtensionError("vkGetSwapchainStatusKHR", VK_KHR_GET_SURFACE_CAPABILITIES_2_EXTENSION_NAME);
+    if (!device_extensions.vk_khr_get_physical_device_properties_2) skip |= OutputExtensionError("vkGetSwapchainStatusKHR", VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    if (!device_extensions.vk_khr_swapchain) skip |= OutputExtensionError("vkGetSwapchainStatusKHR", VK_KHR_SWAPCHAIN_EXTENSION_NAME);
+    if (!device_extensions.vk_khr_shared_presentable_image) skip |= OutputExtensionError("vkGetSwapchainStatusKHR", VK_KHR_SHARED_PRESENTABLE_IMAGE_EXTENSION_NAME);
+    skip |= validate_required_handle("vkGetSwapchainStatusKHR", "swapchain", swapchain);
+    return skip;
+}
+
+
+
+bool StatelessValidation::PreCallValidateGetPhysicalDeviceExternalFencePropertiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceExternalFenceInfo*    pExternalFenceInfo,
+    VkExternalFenceProperties*                  pExternalFenceProperties) const {
+    bool skip = false;
+    if (!instance_extensions.vk_khr_get_physical_device_properties_2) skip |= OutputExtensionError("vkGetPhysicalDeviceExternalFencePropertiesKHR", VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    if (!instance_extensions.vk_khr_external_fence_capabilities) skip |= OutputExtensionError("vkGetPhysicalDeviceExternalFencePropertiesKHR", VK_KHR_EXTERNAL_FENCE_CAPABILITIES_EXTENSION_NAME);
+    skip |= validate_struct_type("vkGetPhysicalDeviceExternalFencePropertiesKHR", "pExternalFenceInfo", "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO", pExternalFenceInfo, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO, true, "VUID-vkGetPhysicalDeviceExternalFenceProperties-pExternalFenceInfo-parameter", "VUID-VkPhysicalDeviceExternalFenceInfo-sType-sType");
+    if (pExternalFenceInfo != NULL)
+    {
+        skip |= validate_struct_pnext("vkGetPhysicalDeviceExternalFencePropertiesKHR", "pExternalFenceInfo->pNext", NULL, pExternalFenceInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkPhysicalDeviceExternalFenceInfo-pNext-pNext");
+
+        skip |= validate_flags("vkGetPhysicalDeviceExternalFencePropertiesKHR", "pExternalFenceInfo->handleType", "VkExternalFenceHandleTypeFlagBits", AllVkExternalFenceHandleTypeFlagBits, pExternalFenceInfo->handleType, kRequiredSingleBit, "VUID-VkPhysicalDeviceExternalFenceInfo-handleType-parameter", "VUID-VkPhysicalDeviceExternalFenceInfo-handleType-parameter");
+    }
+    skip |= validate_struct_type("vkGetPhysicalDeviceExternalFencePropertiesKHR", "pExternalFenceProperties", "VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES", pExternalFenceProperties, VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES, true, "VUID-vkGetPhysicalDeviceExternalFenceProperties-pExternalFenceProperties-parameter", "VUID-VkExternalFenceProperties-sType-sType");
+    if (pExternalFenceProperties != NULL)
+    {
+        // No xml-driven validation
+    }
+    return skip;
+}
+
+
+
+
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+bool StatelessValidation::PreCallValidateImportFenceWin32HandleKHR(
+    VkDevice                                    device,
+    const VkImportFenceWin32HandleInfoKHR*      pImportFenceWin32HandleInfo) const {
+    bool skip = false;
+    if (!device_extensions.vk_khr_external_fence) skip |= OutputExtensionError("vkImportFenceWin32HandleKHR", VK_KHR_EXTERNAL_FENCE_EXTENSION_NAME);
+    if (!device_extensions.vk_khr_external_fence_win32) skip |= OutputExtensionError("vkImportFenceWin32HandleKHR", VK_KHR_EXTERNAL_FENCE_WIN32_EXTENSION_NAME);
+    skip |= validate_struct_type("vkImportFenceWin32HandleKHR", "pImportFenceWin32HandleInfo", "VK_STRUCTURE_TYPE_IMPORT_FENCE_WIN32_HANDLE_INFO_KHR", pImportFenceWin32HandleInfo, VK_STRUCTURE_TYPE_IMPORT_FENCE_WIN32_HANDLE_INFO_KHR, true, "VUID-vkImportFenceWin32HandleKHR-pImportFenceWin32HandleInfo-parameter", "VUID-VkImportFenceWin32HandleInfoKHR-sType-sType");
+    if (pImportFenceWin32HandleInfo != NULL)
+    {
+        skip |= validate_struct_pnext("vkImportFenceWin32HandleKHR", "pImportFenceWin32HandleInfo->pNext", NULL, pImportFenceWin32HandleInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkImportFenceWin32HandleInfoKHR-pNext-pNext");
+
+        skip |= validate_required_handle("vkImportFenceWin32HandleKHR", "pImportFenceWin32HandleInfo->fence", pImportFenceWin32HandleInfo->fence);
+
+        skip |= validate_flags("vkImportFenceWin32HandleKHR", "pImportFenceWin32HandleInfo->flags", "VkFenceImportFlagBits", AllVkFenceImportFlagBits, pImportFenceWin32HandleInfo->flags, kOptionalFlags, "VUID-VkImportFenceWin32HandleInfoKHR-flags-parameter");
+
+        skip |= validate_flags("vkImportFenceWin32HandleKHR", "pImportFenceWin32HandleInfo->handleType", "VkExternalFenceHandleTypeFlagBits", AllVkExternalFenceHandleTypeFlagBits, pImportFenceWin32HandleInfo->handleType, kOptionalSingleBit, "VUID-VkImportFenceWin32HandleInfoKHR-handleType-parameter");
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateGetFenceWin32HandleKHR(
+    VkDevice                                    device,
+    const VkFenceGetWin32HandleInfoKHR*         pGetWin32HandleInfo,
+    HANDLE*                                     pHandle) const {
+    bool skip = false;
+    if (!device_extensions.vk_khr_external_fence) skip |= OutputExtensionError("vkGetFenceWin32HandleKHR", VK_KHR_EXTERNAL_FENCE_EXTENSION_NAME);
+    if (!device_extensions.vk_khr_external_fence_win32) skip |= OutputExtensionError("vkGetFenceWin32HandleKHR", VK_KHR_EXTERNAL_FENCE_WIN32_EXTENSION_NAME);
+    skip |= validate_struct_type("vkGetFenceWin32HandleKHR", "pGetWin32HandleInfo", "VK_STRUCTURE_TYPE_FENCE_GET_WIN32_HANDLE_INFO_KHR", pGetWin32HandleInfo, VK_STRUCTURE_TYPE_FENCE_GET_WIN32_HANDLE_INFO_KHR, true, "VUID-vkGetFenceWin32HandleKHR-pGetWin32HandleInfo-parameter", "VUID-VkFenceGetWin32HandleInfoKHR-sType-sType");
+    if (pGetWin32HandleInfo != NULL)
+    {
+        skip |= validate_struct_pnext("vkGetFenceWin32HandleKHR", "pGetWin32HandleInfo->pNext", NULL, pGetWin32HandleInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkFenceGetWin32HandleInfoKHR-pNext-pNext");
+
+        skip |= validate_required_handle("vkGetFenceWin32HandleKHR", "pGetWin32HandleInfo->fence", pGetWin32HandleInfo->fence);
+
+        skip |= validate_flags("vkGetFenceWin32HandleKHR", "pGetWin32HandleInfo->handleType", "VkExternalFenceHandleTypeFlagBits", AllVkExternalFenceHandleTypeFlagBits, pGetWin32HandleInfo->handleType, kRequiredSingleBit, "VUID-VkFenceGetWin32HandleInfoKHR-handleType-parameter", "VUID-VkFenceGetWin32HandleInfoKHR-handleType-parameter");
+    }
+    skip |= validate_required_pointer("vkGetFenceWin32HandleKHR", "pHandle", pHandle, "VUID-vkGetFenceWin32HandleKHR-pHandle-parameter");
+    return skip;
+}
+
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+bool StatelessValidation::PreCallValidateImportFenceFdKHR(
+    VkDevice                                    device,
+    const VkImportFenceFdInfoKHR*               pImportFenceFdInfo) const {
+    bool skip = false;
+    if (!device_extensions.vk_khr_external_fence) skip |= OutputExtensionError("vkImportFenceFdKHR", VK_KHR_EXTERNAL_FENCE_EXTENSION_NAME);
+    if (!device_extensions.vk_khr_external_fence_fd) skip |= OutputExtensionError("vkImportFenceFdKHR", VK_KHR_EXTERNAL_FENCE_FD_EXTENSION_NAME);
+    skip |= validate_struct_type("vkImportFenceFdKHR", "pImportFenceFdInfo", "VK_STRUCTURE_TYPE_IMPORT_FENCE_FD_INFO_KHR", pImportFenceFdInfo, VK_STRUCTURE_TYPE_IMPORT_FENCE_FD_INFO_KHR, true, "VUID-vkImportFenceFdKHR-pImportFenceFdInfo-parameter", "VUID-VkImportFenceFdInfoKHR-sType-sType");
+    if (pImportFenceFdInfo != NULL)
+    {
+        skip |= validate_struct_pnext("vkImportFenceFdKHR", "pImportFenceFdInfo->pNext", NULL, pImportFenceFdInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkImportFenceFdInfoKHR-pNext-pNext");
+
+        skip |= validate_required_handle("vkImportFenceFdKHR", "pImportFenceFdInfo->fence", pImportFenceFdInfo->fence);
+
+        skip |= validate_flags("vkImportFenceFdKHR", "pImportFenceFdInfo->flags", "VkFenceImportFlagBits", AllVkFenceImportFlagBits, pImportFenceFdInfo->flags, kOptionalFlags, "VUID-VkImportFenceFdInfoKHR-flags-parameter");
+
+        skip |= validate_flags("vkImportFenceFdKHR", "pImportFenceFdInfo->handleType", "VkExternalFenceHandleTypeFlagBits", AllVkExternalFenceHandleTypeFlagBits, pImportFenceFdInfo->handleType, kRequiredSingleBit, "VUID-VkImportFenceFdInfoKHR-handleType-parameter", "VUID-VkImportFenceFdInfoKHR-handleType-parameter");
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateGetFenceFdKHR(
+    VkDevice                                    device,
+    const VkFenceGetFdInfoKHR*                  pGetFdInfo,
+    int*                                        pFd) const {
+    bool skip = false;
+    if (!device_extensions.vk_khr_external_fence) skip |= OutputExtensionError("vkGetFenceFdKHR", VK_KHR_EXTERNAL_FENCE_EXTENSION_NAME);
+    if (!device_extensions.vk_khr_external_fence_fd) skip |= OutputExtensionError("vkGetFenceFdKHR", VK_KHR_EXTERNAL_FENCE_FD_EXTENSION_NAME);
+    skip |= validate_struct_type("vkGetFenceFdKHR", "pGetFdInfo", "VK_STRUCTURE_TYPE_FENCE_GET_FD_INFO_KHR", pGetFdInfo, VK_STRUCTURE_TYPE_FENCE_GET_FD_INFO_KHR, true, "VUID-vkGetFenceFdKHR-pGetFdInfo-parameter", "VUID-VkFenceGetFdInfoKHR-sType-sType");
+    if (pGetFdInfo != NULL)
+    {
+        skip |= validate_struct_pnext("vkGetFenceFdKHR", "pGetFdInfo->pNext", NULL, pGetFdInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkFenceGetFdInfoKHR-pNext-pNext");
+
+        skip |= validate_required_handle("vkGetFenceFdKHR", "pGetFdInfo->fence", pGetFdInfo->fence);
+
+        skip |= validate_flags("vkGetFenceFdKHR", "pGetFdInfo->handleType", "VkExternalFenceHandleTypeFlagBits", AllVkExternalFenceHandleTypeFlagBits, pGetFdInfo->handleType, kRequiredSingleBit, "VUID-VkFenceGetFdInfoKHR-handleType-parameter", "VUID-VkFenceGetFdInfoKHR-handleType-parameter");
+    }
+    skip |= validate_required_pointer("vkGetFenceFdKHR", "pFd", pFd, "VUID-vkGetFenceFdKHR-pFd-parameter");
+    return skip;
+}
+
+
+
+bool StatelessValidation::PreCallValidateEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t                                    queueFamilyIndex,
+    uint32_t*                                   pCounterCount,
+    VkPerformanceCounterKHR*                    pCounters,
+    VkPerformanceCounterDescriptionKHR*         pCounterDescriptions) const {
+    bool skip = false;
+    skip |= validate_struct_type_array("vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR", "pCounterCount", "pCounters", "VK_STRUCTURE_TYPE_PERFORMANCE_COUNTER_KHR", pCounterCount, pCounters, VK_STRUCTURE_TYPE_PERFORMANCE_COUNTER_KHR, true, false, false, "VUID-VkPerformanceCounterKHR-sType-sType", "VUID-vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR-pCounters-parameter", kVUIDUndefined);
+    if (pCounters != NULL)
+    {
+        for (uint32_t pIndexerIndex = 0; pIndexerIndex < *pCounterCount; ++pIndexerIndex)
+        {
+            skip |= validate_struct_pnext("vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR", ParameterName("pCounters[%i].pNext", ParameterName::IndexVector{ pIndexerIndex }), NULL, pCounters[pIndexerIndex].pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkPerformanceCounterKHR-pNext-pNext");
+        }
+    }
+    skip |= validate_struct_type_array("vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR", "pCounterCount", "pCounterDescriptions", "VK_STRUCTURE_TYPE_PERFORMANCE_COUNTER_DESCRIPTION_KHR", pCounterCount, pCounterDescriptions, VK_STRUCTURE_TYPE_PERFORMANCE_COUNTER_DESCRIPTION_KHR, true, false, false, "VUID-VkPerformanceCounterDescriptionKHR-sType-sType", "VUID-vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR-pCounterDescriptions-parameter", kVUIDUndefined);
+    if (pCounterDescriptions != NULL)
+    {
+        for (uint32_t pIndexerIndex = 0; pIndexerIndex < *pCounterCount; ++pIndexerIndex)
+        {
+            skip |= validate_struct_pnext("vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR", ParameterName("pCounterDescriptions[%i].pNext", ParameterName::IndexVector{ pIndexerIndex }), NULL, pCounterDescriptions[pIndexerIndex].pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkPerformanceCounterDescriptionKHR-pNext-pNext");
+        }
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkQueryPoolPerformanceCreateInfoKHR*  pPerformanceQueryCreateInfo,
+    uint32_t*                                   pNumPasses) const {
+    bool skip = false;
+    skip |= validate_struct_type("vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR", "pPerformanceQueryCreateInfo", "VK_STRUCTURE_TYPE_QUERY_POOL_PERFORMANCE_CREATE_INFO_KHR", pPerformanceQueryCreateInfo, VK_STRUCTURE_TYPE_QUERY_POOL_PERFORMANCE_CREATE_INFO_KHR, true, "VUID-vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR-pPerformanceQueryCreateInfo-parameter", "VUID-VkQueryPoolPerformanceCreateInfoKHR-sType-sType");
+    if (pPerformanceQueryCreateInfo != NULL)
+    {
+        skip |= validate_struct_pnext("vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR", "pPerformanceQueryCreateInfo->pNext", NULL, pPerformanceQueryCreateInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, kVUIDUndefined);
+
+        skip |= validate_array("vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR", "pPerformanceQueryCreateInfo->counterIndexCount", "pPerformanceQueryCreateInfo->pCounterIndices", pPerformanceQueryCreateInfo->counterIndexCount, &pPerformanceQueryCreateInfo->pCounterIndices, true, true, "VUID-VkQueryPoolPerformanceCreateInfoKHR-counterIndexCount-arraylength", "VUID-VkQueryPoolPerformanceCreateInfoKHR-pCounterIndices-parameter");
+    }
+    skip |= validate_required_pointer("vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR", "pNumPasses", pNumPasses, "VUID-vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR-pNumPasses-parameter");
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateAcquireProfilingLockKHR(
+    VkDevice                                    device,
+    const VkAcquireProfilingLockInfoKHR*        pInfo) const {
+    bool skip = false;
+    if (!device_extensions.vk_khr_get_physical_device_properties_2) skip |= OutputExtensionError("vkAcquireProfilingLockKHR", VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    if (!device_extensions.vk_khr_performance_query) skip |= OutputExtensionError("vkAcquireProfilingLockKHR", VK_KHR_PERFORMANCE_QUERY_EXTENSION_NAME);
+    skip |= validate_struct_type("vkAcquireProfilingLockKHR", "pInfo", "VK_STRUCTURE_TYPE_ACQUIRE_PROFILING_LOCK_INFO_KHR", pInfo, VK_STRUCTURE_TYPE_ACQUIRE_PROFILING_LOCK_INFO_KHR, true, "VUID-vkAcquireProfilingLockKHR-pInfo-parameter", "VUID-VkAcquireProfilingLockInfoKHR-sType-sType");
+    if (pInfo != NULL)
+    {
+        skip |= validate_struct_pnext("vkAcquireProfilingLockKHR", "pInfo->pNext", NULL, pInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkAcquireProfilingLockInfoKHR-pNext-pNext");
+
+        skip |= validate_reserved_flags("vkAcquireProfilingLockKHR", "pInfo->flags", pInfo->flags, "VUID-VkAcquireProfilingLockInfoKHR-flags-zerobitmask");
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateReleaseProfilingLockKHR(
+    VkDevice                                    device) const {
+    bool skip = false;
+    if (!device_extensions.vk_khr_get_physical_device_properties_2) skip |= OutputExtensionError("vkReleaseProfilingLockKHR", VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    if (!device_extensions.vk_khr_performance_query) skip |= OutputExtensionError("vkReleaseProfilingLockKHR", VK_KHR_PERFORMANCE_QUERY_EXTENSION_NAME);
+    // No xml-driven validation
+    return skip;
+}
+
+
+
+
+
+bool StatelessValidation::PreCallValidateGetPhysicalDeviceSurfaceCapabilities2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceSurfaceInfo2KHR*      pSurfaceInfo,
+    VkSurfaceCapabilities2KHR*                  pSurfaceCapabilities) const {
+    bool skip = false;
+    if (!instance_extensions.vk_khr_surface) skip |= OutputExtensionError("vkGetPhysicalDeviceSurfaceCapabilities2KHR", VK_KHR_SURFACE_EXTENSION_NAME);
+    if (!instance_extensions.vk_khr_get_surface_capabilities_2) skip |= OutputExtensionError("vkGetPhysicalDeviceSurfaceCapabilities2KHR", VK_KHR_GET_SURFACE_CAPABILITIES_2_EXTENSION_NAME);
+    skip |= validate_struct_type("vkGetPhysicalDeviceSurfaceCapabilities2KHR", "pSurfaceInfo", "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SURFACE_INFO_2_KHR", pSurfaceInfo, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SURFACE_INFO_2_KHR, true, "VUID-vkGetPhysicalDeviceSurfaceCapabilities2KHR-pSurfaceInfo-parameter", "VUID-VkPhysicalDeviceSurfaceInfo2KHR-sType-sType");
+    if (pSurfaceInfo != NULL)
+    {
+        const VkStructureType allowed_structs_VkPhysicalDeviceSurfaceInfo2KHR[] = { VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_INFO_EXT, VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_WIN32_INFO_EXT };
+
+        skip |= validate_struct_pnext("vkGetPhysicalDeviceSurfaceCapabilities2KHR", "pSurfaceInfo->pNext", "VkSurfaceFullScreenExclusiveInfoEXT, VkSurfaceFullScreenExclusiveWin32InfoEXT", pSurfaceInfo->pNext, ARRAY_SIZE(allowed_structs_VkPhysicalDeviceSurfaceInfo2KHR), allowed_structs_VkPhysicalDeviceSurfaceInfo2KHR, GeneratedVulkanHeaderVersion, "VUID-VkPhysicalDeviceSurfaceInfo2KHR-pNext-pNext");
+
+        skip |= validate_required_handle("vkGetPhysicalDeviceSurfaceCapabilities2KHR", "pSurfaceInfo->surface", pSurfaceInfo->surface);
+    }
+    skip |= validate_struct_type("vkGetPhysicalDeviceSurfaceCapabilities2KHR", "pSurfaceCapabilities", "VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_KHR", pSurfaceCapabilities, VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_KHR, true, "VUID-vkGetPhysicalDeviceSurfaceCapabilities2KHR-pSurfaceCapabilities-parameter", "VUID-VkSurfaceCapabilities2KHR-sType-sType");
+    if (pSurfaceCapabilities != NULL)
+    {
+        // No xml-driven validation
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateGetPhysicalDeviceSurfaceFormats2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceSurfaceInfo2KHR*      pSurfaceInfo,
+    uint32_t*                                   pSurfaceFormatCount,
+    VkSurfaceFormat2KHR*                        pSurfaceFormats) const {
+    bool skip = false;
+    if (!instance_extensions.vk_khr_surface) skip |= OutputExtensionError("vkGetPhysicalDeviceSurfaceFormats2KHR", VK_KHR_SURFACE_EXTENSION_NAME);
+    if (!instance_extensions.vk_khr_get_surface_capabilities_2) skip |= OutputExtensionError("vkGetPhysicalDeviceSurfaceFormats2KHR", VK_KHR_GET_SURFACE_CAPABILITIES_2_EXTENSION_NAME);
+    skip |= validate_struct_type("vkGetPhysicalDeviceSurfaceFormats2KHR", "pSurfaceInfo", "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SURFACE_INFO_2_KHR", pSurfaceInfo, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SURFACE_INFO_2_KHR, true, "VUID-vkGetPhysicalDeviceSurfaceFormats2KHR-pSurfaceInfo-parameter", "VUID-VkPhysicalDeviceSurfaceInfo2KHR-sType-sType");
+    if (pSurfaceInfo != NULL)
+    {
+        const VkStructureType allowed_structs_VkPhysicalDeviceSurfaceInfo2KHR[] = { VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_INFO_EXT, VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_WIN32_INFO_EXT };
+
+        skip |= validate_struct_pnext("vkGetPhysicalDeviceSurfaceFormats2KHR", "pSurfaceInfo->pNext", "VkSurfaceFullScreenExclusiveInfoEXT, VkSurfaceFullScreenExclusiveWin32InfoEXT", pSurfaceInfo->pNext, ARRAY_SIZE(allowed_structs_VkPhysicalDeviceSurfaceInfo2KHR), allowed_structs_VkPhysicalDeviceSurfaceInfo2KHR, GeneratedVulkanHeaderVersion, "VUID-VkPhysicalDeviceSurfaceInfo2KHR-pNext-pNext");
+
+        skip |= validate_required_handle("vkGetPhysicalDeviceSurfaceFormats2KHR", "pSurfaceInfo->surface", pSurfaceInfo->surface);
+    }
+    skip |= validate_struct_type_array("vkGetPhysicalDeviceSurfaceFormats2KHR", "pSurfaceFormatCount", "pSurfaceFormats", "VK_STRUCTURE_TYPE_SURFACE_FORMAT_2_KHR", pSurfaceFormatCount, pSurfaceFormats, VK_STRUCTURE_TYPE_SURFACE_FORMAT_2_KHR, true, false, false, "VUID-VkSurfaceFormat2KHR-sType-sType", "VUID-vkGetPhysicalDeviceSurfaceFormats2KHR-pSurfaceFormats-parameter", kVUIDUndefined);
+    if (pSurfaceFormats != NULL)
+    {
+        for (uint32_t pSurfaceFormatIndex = 0; pSurfaceFormatIndex < *pSurfaceFormatCount; ++pSurfaceFormatIndex)
+        {
+            // No xml-driven validation
+        }
+    }
+    return skip;
+}
+
+
+
+
+
+bool StatelessValidation::PreCallValidateGetPhysicalDeviceDisplayProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pPropertyCount,
+    VkDisplayProperties2KHR*                    pProperties) const {
+    bool skip = false;
+    if (!instance_extensions.vk_khr_display) skip |= OutputExtensionError("vkGetPhysicalDeviceDisplayProperties2KHR", VK_KHR_DISPLAY_EXTENSION_NAME);
+    if (!instance_extensions.vk_khr_get_display_properties_2) skip |= OutputExtensionError("vkGetPhysicalDeviceDisplayProperties2KHR", VK_KHR_GET_DISPLAY_PROPERTIES_2_EXTENSION_NAME);
+    skip |= validate_struct_type_array("vkGetPhysicalDeviceDisplayProperties2KHR", "pPropertyCount", "pProperties", "VK_STRUCTURE_TYPE_DISPLAY_PROPERTIES_2_KHR", pPropertyCount, pProperties, VK_STRUCTURE_TYPE_DISPLAY_PROPERTIES_2_KHR, true, false, false, "VUID-VkDisplayProperties2KHR-sType-sType", "VUID-vkGetPhysicalDeviceDisplayProperties2KHR-pProperties-parameter", kVUIDUndefined);
+    if (pProperties != NULL)
+    {
+        for (uint32_t pPropertyIndex = 0; pPropertyIndex < *pPropertyCount; ++pPropertyIndex)
+        {
+            // No xml-driven validation
+        }
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateGetPhysicalDeviceDisplayPlaneProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pPropertyCount,
+    VkDisplayPlaneProperties2KHR*               pProperties) const {
+    bool skip = false;
+    if (!instance_extensions.vk_khr_display) skip |= OutputExtensionError("vkGetPhysicalDeviceDisplayPlaneProperties2KHR", VK_KHR_DISPLAY_EXTENSION_NAME);
+    if (!instance_extensions.vk_khr_get_display_properties_2) skip |= OutputExtensionError("vkGetPhysicalDeviceDisplayPlaneProperties2KHR", VK_KHR_GET_DISPLAY_PROPERTIES_2_EXTENSION_NAME);
+    skip |= validate_struct_type_array("vkGetPhysicalDeviceDisplayPlaneProperties2KHR", "pPropertyCount", "pProperties", "VK_STRUCTURE_TYPE_DISPLAY_PLANE_PROPERTIES_2_KHR", pPropertyCount, pProperties, VK_STRUCTURE_TYPE_DISPLAY_PLANE_PROPERTIES_2_KHR, true, false, false, "VUID-VkDisplayPlaneProperties2KHR-sType-sType", "VUID-vkGetPhysicalDeviceDisplayPlaneProperties2KHR-pProperties-parameter", kVUIDUndefined);
+    if (pProperties != NULL)
+    {
+        for (uint32_t pPropertyIndex = 0; pPropertyIndex < *pPropertyCount; ++pPropertyIndex)
+        {
+            // No xml-driven validation
+        }
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateGetDisplayModeProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkDisplayKHR                                display,
+    uint32_t*                                   pPropertyCount,
+    VkDisplayModeProperties2KHR*                pProperties) const {
+    bool skip = false;
+    if (!instance_extensions.vk_khr_display) skip |= OutputExtensionError("vkGetDisplayModeProperties2KHR", VK_KHR_DISPLAY_EXTENSION_NAME);
+    if (!instance_extensions.vk_khr_get_display_properties_2) skip |= OutputExtensionError("vkGetDisplayModeProperties2KHR", VK_KHR_GET_DISPLAY_PROPERTIES_2_EXTENSION_NAME);
+    skip |= validate_required_handle("vkGetDisplayModeProperties2KHR", "display", display);
+    skip |= validate_struct_type_array("vkGetDisplayModeProperties2KHR", "pPropertyCount", "pProperties", "VK_STRUCTURE_TYPE_DISPLAY_MODE_PROPERTIES_2_KHR", pPropertyCount, pProperties, VK_STRUCTURE_TYPE_DISPLAY_MODE_PROPERTIES_2_KHR, true, false, false, "VUID-VkDisplayModeProperties2KHR-sType-sType", "VUID-vkGetDisplayModeProperties2KHR-pProperties-parameter", kVUIDUndefined);
+    if (pProperties != NULL)
+    {
+        for (uint32_t pPropertyIndex = 0; pPropertyIndex < *pPropertyCount; ++pPropertyIndex)
+        {
+            // No xml-driven validation
+        }
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateGetDisplayPlaneCapabilities2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkDisplayPlaneInfo2KHR*               pDisplayPlaneInfo,
+    VkDisplayPlaneCapabilities2KHR*             pCapabilities) const {
+    bool skip = false;
+    if (!instance_extensions.vk_khr_display) skip |= OutputExtensionError("vkGetDisplayPlaneCapabilities2KHR", VK_KHR_DISPLAY_EXTENSION_NAME);
+    if (!instance_extensions.vk_khr_get_display_properties_2) skip |= OutputExtensionError("vkGetDisplayPlaneCapabilities2KHR", VK_KHR_GET_DISPLAY_PROPERTIES_2_EXTENSION_NAME);
+    skip |= validate_struct_type("vkGetDisplayPlaneCapabilities2KHR", "pDisplayPlaneInfo", "VK_STRUCTURE_TYPE_DISPLAY_PLANE_INFO_2_KHR", pDisplayPlaneInfo, VK_STRUCTURE_TYPE_DISPLAY_PLANE_INFO_2_KHR, true, "VUID-vkGetDisplayPlaneCapabilities2KHR-pDisplayPlaneInfo-parameter", "VUID-VkDisplayPlaneInfo2KHR-sType-sType");
+    if (pDisplayPlaneInfo != NULL)
+    {
+        skip |= validate_struct_pnext("vkGetDisplayPlaneCapabilities2KHR", "pDisplayPlaneInfo->pNext", NULL, pDisplayPlaneInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkDisplayPlaneInfo2KHR-pNext-pNext");
+
+        skip |= validate_required_handle("vkGetDisplayPlaneCapabilities2KHR", "pDisplayPlaneInfo->mode", pDisplayPlaneInfo->mode);
+    }
+    skip |= validate_struct_type("vkGetDisplayPlaneCapabilities2KHR", "pCapabilities", "VK_STRUCTURE_TYPE_DISPLAY_PLANE_CAPABILITIES_2_KHR", pCapabilities, VK_STRUCTURE_TYPE_DISPLAY_PLANE_CAPABILITIES_2_KHR, true, "VUID-vkGetDisplayPlaneCapabilities2KHR-pCapabilities-parameter", "VUID-VkDisplayPlaneCapabilities2KHR-sType-sType");
+    if (pCapabilities != NULL)
+    {
+        // No xml-driven validation
+    }
+    return skip;
+}
+
+
+
+
+
+
+
+
+
+bool StatelessValidation::PreCallValidateGetImageMemoryRequirements2KHR(
+    VkDevice                                    device,
+    const VkImageMemoryRequirementsInfo2*       pInfo,
+    VkMemoryRequirements2*                      pMemoryRequirements) const {
+    bool skip = false;
+    if (!device_extensions.vk_khr_get_memory_requirements_2) skip |= OutputExtensionError("vkGetImageMemoryRequirements2KHR", VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
+    skip |= validate_struct_type("vkGetImageMemoryRequirements2KHR", "pInfo", "VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2", pInfo, VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2, true, "VUID-vkGetImageMemoryRequirements2-pInfo-parameter", "VUID-VkImageMemoryRequirementsInfo2-sType-sType");
+    if (pInfo != NULL)
+    {
+        const VkStructureType allowed_structs_VkImageMemoryRequirementsInfo2[] = { VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO };
+
+        skip |= validate_struct_pnext("vkGetImageMemoryRequirements2KHR", "pInfo->pNext", "VkImagePlaneMemoryRequirementsInfo", pInfo->pNext, ARRAY_SIZE(allowed_structs_VkImageMemoryRequirementsInfo2), allowed_structs_VkImageMemoryRequirementsInfo2, GeneratedVulkanHeaderVersion, "VUID-VkImageMemoryRequirementsInfo2-pNext-pNext");
+
+        skip |= validate_required_handle("vkGetImageMemoryRequirements2KHR", "pInfo->image", pInfo->image);
+    }
+    skip |= validate_struct_type("vkGetImageMemoryRequirements2KHR", "pMemoryRequirements", "VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2", pMemoryRequirements, VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2, true, "VUID-vkGetImageMemoryRequirements2-pMemoryRequirements-parameter", "VUID-VkMemoryRequirements2-sType-sType");
+    if (pMemoryRequirements != NULL)
+    {
+        // No xml-driven validation
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateGetBufferMemoryRequirements2KHR(
+    VkDevice                                    device,
+    const VkBufferMemoryRequirementsInfo2*      pInfo,
+    VkMemoryRequirements2*                      pMemoryRequirements) const {
+    bool skip = false;
+    if (!device_extensions.vk_khr_get_memory_requirements_2) skip |= OutputExtensionError("vkGetBufferMemoryRequirements2KHR", VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
+    skip |= validate_struct_type("vkGetBufferMemoryRequirements2KHR", "pInfo", "VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2", pInfo, VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2, true, "VUID-vkGetBufferMemoryRequirements2-pInfo-parameter", "VUID-VkBufferMemoryRequirementsInfo2-sType-sType");
+    if (pInfo != NULL)
+    {
+        skip |= validate_struct_pnext("vkGetBufferMemoryRequirements2KHR", "pInfo->pNext", NULL, pInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkBufferMemoryRequirementsInfo2-pNext-pNext");
+
+        skip |= validate_required_handle("vkGetBufferMemoryRequirements2KHR", "pInfo->buffer", pInfo->buffer);
+    }
+    skip |= validate_struct_type("vkGetBufferMemoryRequirements2KHR", "pMemoryRequirements", "VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2", pMemoryRequirements, VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2, true, "VUID-vkGetBufferMemoryRequirements2-pMemoryRequirements-parameter", "VUID-VkMemoryRequirements2-sType-sType");
+    if (pMemoryRequirements != NULL)
+    {
+        // No xml-driven validation
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateGetImageSparseMemoryRequirements2KHR(
+    VkDevice                                    device,
+    const VkImageSparseMemoryRequirementsInfo2* pInfo,
+    uint32_t*                                   pSparseMemoryRequirementCount,
+    VkSparseImageMemoryRequirements2*           pSparseMemoryRequirements) const {
+    bool skip = false;
+    if (!device_extensions.vk_khr_get_memory_requirements_2) skip |= OutputExtensionError("vkGetImageSparseMemoryRequirements2KHR", VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
+    skip |= validate_struct_type("vkGetImageSparseMemoryRequirements2KHR", "pInfo", "VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2", pInfo, VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2, true, "VUID-vkGetImageSparseMemoryRequirements2-pInfo-parameter", "VUID-VkImageSparseMemoryRequirementsInfo2-sType-sType");
+    if (pInfo != NULL)
+    {
+        skip |= validate_struct_pnext("vkGetImageSparseMemoryRequirements2KHR", "pInfo->pNext", NULL, pInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkImageSparseMemoryRequirementsInfo2-pNext-pNext");
+
+        skip |= validate_required_handle("vkGetImageSparseMemoryRequirements2KHR", "pInfo->image", pInfo->image);
+    }
+    skip |= validate_struct_type_array("vkGetImageSparseMemoryRequirements2KHR", "pSparseMemoryRequirementCount", "pSparseMemoryRequirements", "VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2", pSparseMemoryRequirementCount, pSparseMemoryRequirements, VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2, true, false, false, "VUID-VkSparseImageMemoryRequirements2-sType-sType", "VUID-vkGetImageSparseMemoryRequirements2-pSparseMemoryRequirements-parameter", kVUIDUndefined);
+    if (pSparseMemoryRequirements != NULL)
+    {
+        for (uint32_t pSparseMemoryRequirementIndex = 0; pSparseMemoryRequirementIndex < *pSparseMemoryRequirementCount; ++pSparseMemoryRequirementIndex)
+        {
+            // No xml-driven validation
+        }
+    }
+    return skip;
+}
+
+
+
+
+
+bool StatelessValidation::PreCallValidateCreateSamplerYcbcrConversionKHR(
+    VkDevice                                    device,
+    const VkSamplerYcbcrConversionCreateInfo*   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSamplerYcbcrConversion*                   pYcbcrConversion) const {
+    bool skip = false;
+    if (!device_extensions.vk_khr_get_physical_device_properties_2) skip |= OutputExtensionError("vkCreateSamplerYcbcrConversionKHR", VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    if (!device_extensions.vk_khr_get_memory_requirements_2) skip |= OutputExtensionError("vkCreateSamplerYcbcrConversionKHR", VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
+    if (!device_extensions.vk_khr_bind_memory_2) skip |= OutputExtensionError("vkCreateSamplerYcbcrConversionKHR", VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
+    if (!device_extensions.vk_khr_maintenance1) skip |= OutputExtensionError("vkCreateSamplerYcbcrConversionKHR", VK_KHR_MAINTENANCE1_EXTENSION_NAME);
+    if (!device_extensions.vk_khr_sampler_ycbcr_conversion) skip |= OutputExtensionError("vkCreateSamplerYcbcrConversionKHR", VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
+    skip |= validate_struct_type("vkCreateSamplerYcbcrConversionKHR", "pCreateInfo", "VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO", pCreateInfo, VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO, true, "VUID-vkCreateSamplerYcbcrConversion-pCreateInfo-parameter", "VUID-VkSamplerYcbcrConversionCreateInfo-sType-sType");
+    if (pCreateInfo != NULL)
+    {
+        const VkStructureType allowed_structs_VkSamplerYcbcrConversionCreateInfo[] = { VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID };
+
+        skip |= validate_struct_pnext("vkCreateSamplerYcbcrConversionKHR", "pCreateInfo->pNext", "VkExternalFormatANDROID", pCreateInfo->pNext, ARRAY_SIZE(allowed_structs_VkSamplerYcbcrConversionCreateInfo), allowed_structs_VkSamplerYcbcrConversionCreateInfo, GeneratedVulkanHeaderVersion, "VUID-VkSamplerYcbcrConversionCreateInfo-pNext-pNext");
+
+        skip |= validate_ranged_enum("vkCreateSamplerYcbcrConversionKHR", "pCreateInfo->format", "VkFormat", AllVkFormatEnums, pCreateInfo->format, "VUID-VkSamplerYcbcrConversionCreateInfo-format-parameter");
+
+        skip |= validate_ranged_enum("vkCreateSamplerYcbcrConversionKHR", "pCreateInfo->ycbcrModel", "VkSamplerYcbcrModelConversion", AllVkSamplerYcbcrModelConversionEnums, pCreateInfo->ycbcrModel, "VUID-VkSamplerYcbcrConversionCreateInfo-ycbcrModel-parameter");
+
+        skip |= validate_ranged_enum("vkCreateSamplerYcbcrConversionKHR", "pCreateInfo->ycbcrRange", "VkSamplerYcbcrRange", AllVkSamplerYcbcrRangeEnums, pCreateInfo->ycbcrRange, "VUID-VkSamplerYcbcrConversionCreateInfo-ycbcrRange-parameter");
+
+        skip |= validate_ranged_enum("vkCreateSamplerYcbcrConversionKHR", "pCreateInfo->components.r", "VkComponentSwizzle", AllVkComponentSwizzleEnums, pCreateInfo->components.r, "VUID-VkComponentMapping-r-parameter");
+
+        skip |= validate_ranged_enum("vkCreateSamplerYcbcrConversionKHR", "pCreateInfo->components.g", "VkComponentSwizzle", AllVkComponentSwizzleEnums, pCreateInfo->components.g, "VUID-VkComponentMapping-g-parameter");
+
+        skip |= validate_ranged_enum("vkCreateSamplerYcbcrConversionKHR", "pCreateInfo->components.b", "VkComponentSwizzle", AllVkComponentSwizzleEnums, pCreateInfo->components.b, "VUID-VkComponentMapping-b-parameter");
+
+        skip |= validate_ranged_enum("vkCreateSamplerYcbcrConversionKHR", "pCreateInfo->components.a", "VkComponentSwizzle", AllVkComponentSwizzleEnums, pCreateInfo->components.a, "VUID-VkComponentMapping-a-parameter");
+
+        skip |= validate_ranged_enum("vkCreateSamplerYcbcrConversionKHR", "pCreateInfo->xChromaOffset", "VkChromaLocation", AllVkChromaLocationEnums, pCreateInfo->xChromaOffset, "VUID-VkSamplerYcbcrConversionCreateInfo-xChromaOffset-parameter");
+
+        skip |= validate_ranged_enum("vkCreateSamplerYcbcrConversionKHR", "pCreateInfo->yChromaOffset", "VkChromaLocation", AllVkChromaLocationEnums, pCreateInfo->yChromaOffset, "VUID-VkSamplerYcbcrConversionCreateInfo-yChromaOffset-parameter");
+
+        skip |= validate_ranged_enum("vkCreateSamplerYcbcrConversionKHR", "pCreateInfo->chromaFilter", "VkFilter", AllVkFilterEnums, pCreateInfo->chromaFilter, "VUID-VkSamplerYcbcrConversionCreateInfo-chromaFilter-parameter");
+
+        skip |= validate_bool32("vkCreateSamplerYcbcrConversionKHR", "pCreateInfo->forceExplicitReconstruction", pCreateInfo->forceExplicitReconstruction);
+    }
+    if (pAllocator != NULL)
+    {
+        skip |= validate_required_pointer("vkCreateSamplerYcbcrConversionKHR", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
+
+        skip |= validate_required_pointer("vkCreateSamplerYcbcrConversionKHR", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
+
+        skip |= validate_required_pointer("vkCreateSamplerYcbcrConversionKHR", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
+
+        if (pAllocator->pfnInternalAllocation != NULL)
+        {
+            skip |= validate_required_pointer("vkCreateSamplerYcbcrConversionKHR", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+
+        if (pAllocator->pfnInternalFree != NULL)
+        {
+            skip |= validate_required_pointer("vkCreateSamplerYcbcrConversionKHR", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+    }
+    skip |= validate_required_pointer("vkCreateSamplerYcbcrConversionKHR", "pYcbcrConversion", pYcbcrConversion, "VUID-vkCreateSamplerYcbcrConversion-pYcbcrConversion-parameter");
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateDestroySamplerYcbcrConversionKHR(
+    VkDevice                                    device,
+    VkSamplerYcbcrConversion                    ycbcrConversion,
+    const VkAllocationCallbacks*                pAllocator) const {
+    bool skip = false;
+    if (!device_extensions.vk_khr_get_physical_device_properties_2) skip |= OutputExtensionError("vkDestroySamplerYcbcrConversionKHR", VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    if (!device_extensions.vk_khr_get_memory_requirements_2) skip |= OutputExtensionError("vkDestroySamplerYcbcrConversionKHR", VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
+    if (!device_extensions.vk_khr_bind_memory_2) skip |= OutputExtensionError("vkDestroySamplerYcbcrConversionKHR", VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
+    if (!device_extensions.vk_khr_maintenance1) skip |= OutputExtensionError("vkDestroySamplerYcbcrConversionKHR", VK_KHR_MAINTENANCE1_EXTENSION_NAME);
+    if (!device_extensions.vk_khr_sampler_ycbcr_conversion) skip |= OutputExtensionError("vkDestroySamplerYcbcrConversionKHR", VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
+    if (pAllocator != NULL)
+    {
+        skip |= validate_required_pointer("vkDestroySamplerYcbcrConversionKHR", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
+
+        skip |= validate_required_pointer("vkDestroySamplerYcbcrConversionKHR", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
+
+        skip |= validate_required_pointer("vkDestroySamplerYcbcrConversionKHR", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
+
+        if (pAllocator->pfnInternalAllocation != NULL)
+        {
+            skip |= validate_required_pointer("vkDestroySamplerYcbcrConversionKHR", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+
+        if (pAllocator->pfnInternalFree != NULL)
+        {
+            skip |= validate_required_pointer("vkDestroySamplerYcbcrConversionKHR", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+    }
+    return skip;
+}
+
+
+
+bool StatelessValidation::PreCallValidateBindBufferMemory2KHR(
+    VkDevice                                    device,
+    uint32_t                                    bindInfoCount,
+    const VkBindBufferMemoryInfo*               pBindInfos) const {
+    bool skip = false;
+    if (!device_extensions.vk_khr_bind_memory_2) skip |= OutputExtensionError("vkBindBufferMemory2KHR", VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
+    skip |= validate_struct_type_array("vkBindBufferMemory2KHR", "bindInfoCount", "pBindInfos", "VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO", bindInfoCount, pBindInfos, VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO, true, true, "VUID-VkBindBufferMemoryInfo-sType-sType", "VUID-vkBindBufferMemory2-pBindInfos-parameter", "VUID-vkBindBufferMemory2-bindInfoCount-arraylength");
+    if (pBindInfos != NULL)
+    {
+        for (uint32_t bindInfoIndex = 0; bindInfoIndex < bindInfoCount; ++bindInfoIndex)
+        {
+            const VkStructureType allowed_structs_VkBindBufferMemoryInfo[] = { VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO };
+
+            skip |= validate_struct_pnext("vkBindBufferMemory2KHR", ParameterName("pBindInfos[%i].pNext", ParameterName::IndexVector{ bindInfoIndex }), "VkBindBufferMemoryDeviceGroupInfo", pBindInfos[bindInfoIndex].pNext, ARRAY_SIZE(allowed_structs_VkBindBufferMemoryInfo), allowed_structs_VkBindBufferMemoryInfo, GeneratedVulkanHeaderVersion, "VUID-VkBindBufferMemoryInfo-pNext-pNext");
+
+            skip |= validate_required_handle("vkBindBufferMemory2KHR", ParameterName("pBindInfos[%i].buffer", ParameterName::IndexVector{ bindInfoIndex }), pBindInfos[bindInfoIndex].buffer);
+
+            skip |= validate_required_handle("vkBindBufferMemory2KHR", ParameterName("pBindInfos[%i].memory", ParameterName::IndexVector{ bindInfoIndex }), pBindInfos[bindInfoIndex].memory);
+        }
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateBindImageMemory2KHR(
+    VkDevice                                    device,
+    uint32_t                                    bindInfoCount,
+    const VkBindImageMemoryInfo*                pBindInfos) const {
+    bool skip = false;
+    if (!device_extensions.vk_khr_bind_memory_2) skip |= OutputExtensionError("vkBindImageMemory2KHR", VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
+    skip |= validate_struct_type_array("vkBindImageMemory2KHR", "bindInfoCount", "pBindInfos", "VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO", bindInfoCount, pBindInfos, VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO, true, true, "VUID-VkBindImageMemoryInfo-sType-sType", "VUID-vkBindImageMemory2-pBindInfos-parameter", "VUID-vkBindImageMemory2-bindInfoCount-arraylength");
+    if (pBindInfos != NULL)
+    {
+        for (uint32_t bindInfoIndex = 0; bindInfoIndex < bindInfoCount; ++bindInfoIndex)
+        {
+            const VkStructureType allowed_structs_VkBindImageMemoryInfo[] = { VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO, VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_SWAPCHAIN_INFO_KHR, VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO };
+
+            skip |= validate_struct_pnext("vkBindImageMemory2KHR", ParameterName("pBindInfos[%i].pNext", ParameterName::IndexVector{ bindInfoIndex }), "VkBindImageMemoryDeviceGroupInfo, VkBindImageMemorySwapchainInfoKHR, VkBindImagePlaneMemoryInfo", pBindInfos[bindInfoIndex].pNext, ARRAY_SIZE(allowed_structs_VkBindImageMemoryInfo), allowed_structs_VkBindImageMemoryInfo, GeneratedVulkanHeaderVersion, "VUID-VkBindImageMemoryInfo-pNext-pNext");
+
+            skip |= validate_required_handle("vkBindImageMemory2KHR", ParameterName("pBindInfos[%i].image", ParameterName::IndexVector{ bindInfoIndex }), pBindInfos[bindInfoIndex].image);
+        }
+    }
+    return skip;
+}
+
+
+
+bool StatelessValidation::PreCallValidateGetDescriptorSetLayoutSupportKHR(
+    VkDevice                                    device,
+    const VkDescriptorSetLayoutCreateInfo*      pCreateInfo,
+    VkDescriptorSetLayoutSupport*               pSupport) const {
+    bool skip = false;
+    if (!device_extensions.vk_khr_get_physical_device_properties_2) skip |= OutputExtensionError("vkGetDescriptorSetLayoutSupportKHR", VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    if (!device_extensions.vk_khr_maintenance3) skip |= OutputExtensionError("vkGetDescriptorSetLayoutSupportKHR", VK_KHR_MAINTENANCE3_EXTENSION_NAME);
+    skip |= validate_struct_type("vkGetDescriptorSetLayoutSupportKHR", "pCreateInfo", "VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO", pCreateInfo, VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO, true, "VUID-vkGetDescriptorSetLayoutSupport-pCreateInfo-parameter", "VUID-VkDescriptorSetLayoutCreateInfo-sType-sType");
+    if (pCreateInfo != NULL)
+    {
+        const VkStructureType allowed_structs_VkDescriptorSetLayoutCreateInfo[] = { VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT };
+
+        skip |= validate_struct_pnext("vkGetDescriptorSetLayoutSupportKHR", "pCreateInfo->pNext", "VkDescriptorSetLayoutBindingFlagsCreateInfoEXT", pCreateInfo->pNext, ARRAY_SIZE(allowed_structs_VkDescriptorSetLayoutCreateInfo), allowed_structs_VkDescriptorSetLayoutCreateInfo, GeneratedVulkanHeaderVersion, "VUID-VkDescriptorSetLayoutCreateInfo-pNext-pNext");
+
+        skip |= validate_flags("vkGetDescriptorSetLayoutSupportKHR", "pCreateInfo->flags", "VkDescriptorSetLayoutCreateFlagBits", AllVkDescriptorSetLayoutCreateFlagBits, pCreateInfo->flags, kOptionalFlags, "VUID-VkDescriptorSetLayoutCreateInfo-flags-parameter");
+
+        skip |= validate_array("vkGetDescriptorSetLayoutSupportKHR", "pCreateInfo->bindingCount", "pCreateInfo->pBindings", pCreateInfo->bindingCount, &pCreateInfo->pBindings, false, true, kVUIDUndefined, "VUID-VkDescriptorSetLayoutCreateInfo-pBindings-parameter");
+
+        if (pCreateInfo->pBindings != NULL)
+        {
+            for (uint32_t bindingIndex = 0; bindingIndex < pCreateInfo->bindingCount; ++bindingIndex)
+            {
+                skip |= validate_ranged_enum("vkGetDescriptorSetLayoutSupportKHR", ParameterName("pCreateInfo->pBindings[%i].descriptorType", ParameterName::IndexVector{ bindingIndex }), "VkDescriptorType", AllVkDescriptorTypeEnums, pCreateInfo->pBindings[bindingIndex].descriptorType, "VUID-VkDescriptorSetLayoutBinding-descriptorType-parameter");
+            }
+        }
+    }
+    skip |= validate_struct_type("vkGetDescriptorSetLayoutSupportKHR", "pSupport", "VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT", pSupport, VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT, true, "VUID-vkGetDescriptorSetLayoutSupport-pSupport-parameter", "VUID-VkDescriptorSetLayoutSupport-sType-sType");
+    if (pSupport != NULL)
+    {
+        // No xml-driven validation
+    }
+    return skip;
+}
+
+
+
+bool StatelessValidation::PreCallValidateCmdDrawIndirectCountKHR(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkBuffer                                    countBuffer,
+    VkDeviceSize                                countBufferOffset,
+    uint32_t                                    maxDrawCount,
+    uint32_t                                    stride) const {
+    bool skip = false;
+    if (!device_extensions.vk_khr_draw_indirect_count) skip |= OutputExtensionError("vkCmdDrawIndirectCountKHR", VK_KHR_DRAW_INDIRECT_COUNT_EXTENSION_NAME);
+    skip |= validate_required_handle("vkCmdDrawIndirectCountKHR", "buffer", buffer);
+    skip |= validate_required_handle("vkCmdDrawIndirectCountKHR", "countBuffer", countBuffer);
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateCmdDrawIndexedIndirectCountKHR(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkBuffer                                    countBuffer,
+    VkDeviceSize                                countBufferOffset,
+    uint32_t                                    maxDrawCount,
+    uint32_t                                    stride) const {
+    bool skip = false;
+    if (!device_extensions.vk_khr_draw_indirect_count) skip |= OutputExtensionError("vkCmdDrawIndexedIndirectCountKHR", VK_KHR_DRAW_INDIRECT_COUNT_EXTENSION_NAME);
+    skip |= validate_required_handle("vkCmdDrawIndexedIndirectCountKHR", "buffer", buffer);
+    skip |= validate_required_handle("vkCmdDrawIndexedIndirectCountKHR", "countBuffer", countBuffer);
+    return skip;
+}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+bool StatelessValidation::PreCallValidateGetSemaphoreCounterValueKHR(
+    VkDevice                                    device,
+    VkSemaphore                                 semaphore,
+    uint64_t*                                   pValue) const {
+    bool skip = false;
+    if (!device_extensions.vk_khr_get_physical_device_properties_2) skip |= OutputExtensionError("vkGetSemaphoreCounterValueKHR", VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    if (!device_extensions.vk_khr_timeline_semaphore) skip |= OutputExtensionError("vkGetSemaphoreCounterValueKHR", VK_KHR_TIMELINE_SEMAPHORE_EXTENSION_NAME);
+    skip |= validate_required_handle("vkGetSemaphoreCounterValueKHR", "semaphore", semaphore);
+    skip |= validate_required_pointer("vkGetSemaphoreCounterValueKHR", "pValue", pValue, "VUID-vkGetSemaphoreCounterValueKHR-pValue-parameter");
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateWaitSemaphoresKHR(
+    VkDevice                                    device,
+    const VkSemaphoreWaitInfoKHR*               pWaitInfo,
+    uint64_t                                    timeout) const {
+    bool skip = false;
+    if (!device_extensions.vk_khr_get_physical_device_properties_2) skip |= OutputExtensionError("vkWaitSemaphoresKHR", VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    if (!device_extensions.vk_khr_timeline_semaphore) skip |= OutputExtensionError("vkWaitSemaphoresKHR", VK_KHR_TIMELINE_SEMAPHORE_EXTENSION_NAME);
+    skip |= validate_struct_type("vkWaitSemaphoresKHR", "pWaitInfo", "VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO_KHR", pWaitInfo, VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO_KHR, true, "VUID-vkWaitSemaphoresKHR-pWaitInfo-parameter", "VUID-VkSemaphoreWaitInfoKHR-sType-sType");
+    if (pWaitInfo != NULL)
+    {
+        skip |= validate_struct_pnext("vkWaitSemaphoresKHR", "pWaitInfo->pNext", NULL, pWaitInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkSemaphoreWaitInfoKHR-pNext-pNext");
+
+        skip |= validate_flags("vkWaitSemaphoresKHR", "pWaitInfo->flags", "VkSemaphoreWaitFlagBitsKHR", AllVkSemaphoreWaitFlagBitsKHR, pWaitInfo->flags, kOptionalFlags, "VUID-VkSemaphoreWaitInfoKHR-flags-parameter");
+
+        skip |= validate_handle_array("vkWaitSemaphoresKHR", "pWaitInfo->semaphoreCount", "pWaitInfo->pSemaphores", pWaitInfo->semaphoreCount, pWaitInfo->pSemaphores, true, true);
+
+        skip |= validate_array("vkWaitSemaphoresKHR", "pWaitInfo->semaphoreCount", "pWaitInfo->pValues", pWaitInfo->semaphoreCount, &pWaitInfo->pValues, true, true, "VUID-VkSemaphoreWaitInfoKHR-semaphoreCount-arraylength", "VUID-VkSemaphoreWaitInfoKHR-pValues-parameter");
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateSignalSemaphoreKHR(
+    VkDevice                                    device,
+    const VkSemaphoreSignalInfoKHR*             pSignalInfo) const {
+    bool skip = false;
+    if (!device_extensions.vk_khr_get_physical_device_properties_2) skip |= OutputExtensionError("vkSignalSemaphoreKHR", VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    if (!device_extensions.vk_khr_timeline_semaphore) skip |= OutputExtensionError("vkSignalSemaphoreKHR", VK_KHR_TIMELINE_SEMAPHORE_EXTENSION_NAME);
+    skip |= validate_struct_type("vkSignalSemaphoreKHR", "pSignalInfo", "VK_STRUCTURE_TYPE_SEMAPHORE_SIGNAL_INFO_KHR", pSignalInfo, VK_STRUCTURE_TYPE_SEMAPHORE_SIGNAL_INFO_KHR, true, "VUID-vkSignalSemaphoreKHR-pSignalInfo-parameter", "VUID-VkSemaphoreSignalInfoKHR-sType-sType");
+    if (pSignalInfo != NULL)
+    {
+        skip |= validate_struct_pnext("vkSignalSemaphoreKHR", "pSignalInfo->pNext", NULL, pSignalInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkSemaphoreSignalInfoKHR-pNext-pNext");
+
+        skip |= validate_required_handle("vkSignalSemaphoreKHR", "pSignalInfo->semaphore", pSignalInfo->semaphore);
+    }
+    return skip;
+}
+
+
+
+
+
+
+
+
+
+
+
+
+
+bool StatelessValidation::PreCallValidateGetBufferDeviceAddressKHR(
+    VkDevice                                    device,
+    const VkBufferDeviceAddressInfoKHR*         pInfo) const {
+    bool skip = false;
+    if (!device_extensions.vk_khr_get_physical_device_properties_2) skip |= OutputExtensionError("vkGetBufferDeviceAddressKHR", VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    if (!device_extensions.vk_khr_buffer_device_address) skip |= OutputExtensionError("vkGetBufferDeviceAddressKHR", VK_KHR_BUFFER_DEVICE_ADDRESS_EXTENSION_NAME);
+    skip |= validate_struct_type("vkGetBufferDeviceAddressKHR", "pInfo", "VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO_KHR", pInfo, VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO_KHR, true, "VUID-vkGetBufferDeviceAddressKHR-pInfo-parameter", "VUID-VkBufferDeviceAddressInfoKHR-sType-sType");
+    if (pInfo != NULL)
+    {
+        skip |= validate_struct_pnext("vkGetBufferDeviceAddressKHR", "pInfo->pNext", NULL, pInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkBufferDeviceAddressInfoKHR-pNext-pNext");
+
+        skip |= validate_required_handle("vkGetBufferDeviceAddressKHR", "pInfo->buffer", pInfo->buffer);
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateGetBufferOpaqueCaptureAddressKHR(
+    VkDevice                                    device,
+    const VkBufferDeviceAddressInfoKHR*         pInfo) const {
+    bool skip = false;
+    if (!device_extensions.vk_khr_get_physical_device_properties_2) skip |= OutputExtensionError("vkGetBufferOpaqueCaptureAddressKHR", VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    if (!device_extensions.vk_khr_buffer_device_address) skip |= OutputExtensionError("vkGetBufferOpaqueCaptureAddressKHR", VK_KHR_BUFFER_DEVICE_ADDRESS_EXTENSION_NAME);
+    skip |= validate_struct_type("vkGetBufferOpaqueCaptureAddressKHR", "pInfo", "VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO_KHR", pInfo, VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO_KHR, true, "VUID-vkGetBufferOpaqueCaptureAddressKHR-pInfo-parameter", "VUID-VkBufferDeviceAddressInfoKHR-sType-sType");
+    if (pInfo != NULL)
+    {
+        skip |= validate_struct_pnext("vkGetBufferOpaqueCaptureAddressKHR", "pInfo->pNext", NULL, pInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkBufferDeviceAddressInfoKHR-pNext-pNext");
+
+        skip |= validate_required_handle("vkGetBufferOpaqueCaptureAddressKHR", "pInfo->buffer", pInfo->buffer);
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateGetDeviceMemoryOpaqueCaptureAddressKHR(
+    VkDevice                                    device,
+    const VkDeviceMemoryOpaqueCaptureAddressInfoKHR* pInfo) const {
+    bool skip = false;
+    if (!device_extensions.vk_khr_get_physical_device_properties_2) skip |= OutputExtensionError("vkGetDeviceMemoryOpaqueCaptureAddressKHR", VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    if (!device_extensions.vk_khr_buffer_device_address) skip |= OutputExtensionError("vkGetDeviceMemoryOpaqueCaptureAddressKHR", VK_KHR_BUFFER_DEVICE_ADDRESS_EXTENSION_NAME);
+    skip |= validate_struct_type("vkGetDeviceMemoryOpaqueCaptureAddressKHR", "pInfo", "VK_STRUCTURE_TYPE_DEVICE_MEMORY_OPAQUE_CAPTURE_ADDRESS_INFO_KHR", pInfo, VK_STRUCTURE_TYPE_DEVICE_MEMORY_OPAQUE_CAPTURE_ADDRESS_INFO_KHR, true, "VUID-vkGetDeviceMemoryOpaqueCaptureAddressKHR-pInfo-parameter", "VUID-VkDeviceMemoryOpaqueCaptureAddressInfoKHR-sType-sType");
+    if (pInfo != NULL)
+    {
+        skip |= validate_struct_pnext("vkGetDeviceMemoryOpaqueCaptureAddressKHR", "pInfo->pNext", NULL, pInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkDeviceMemoryOpaqueCaptureAddressInfoKHR-pNext-pNext");
+
+        skip |= validate_required_handle("vkGetDeviceMemoryOpaqueCaptureAddressKHR", "pInfo->memory", pInfo->memory);
+    }
+    return skip;
+}
+
+
+
+bool StatelessValidation::PreCallValidateGetPipelineExecutablePropertiesKHR(
+    VkDevice                                    device,
+    const VkPipelineInfoKHR*                    pPipelineInfo,
+    uint32_t*                                   pExecutableCount,
+    VkPipelineExecutablePropertiesKHR*          pProperties) const {
+    bool skip = false;
+    if (!device_extensions.vk_khr_pipeline_executable_properties) skip |= OutputExtensionError("vkGetPipelineExecutablePropertiesKHR", VK_KHR_PIPELINE_EXECUTABLE_PROPERTIES_EXTENSION_NAME);
+    skip |= validate_struct_type("vkGetPipelineExecutablePropertiesKHR", "pPipelineInfo", "VK_STRUCTURE_TYPE_PIPELINE_INFO_KHR", pPipelineInfo, VK_STRUCTURE_TYPE_PIPELINE_INFO_KHR, true, "VUID-vkGetPipelineExecutablePropertiesKHR-pPipelineInfo-parameter", "VUID-VkPipelineInfoKHR-sType-sType");
+    if (pPipelineInfo != NULL)
+    {
+        skip |= validate_struct_pnext("vkGetPipelineExecutablePropertiesKHR", "pPipelineInfo->pNext", NULL, pPipelineInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkPipelineInfoKHR-pNext-pNext");
+
+        skip |= validate_required_handle("vkGetPipelineExecutablePropertiesKHR", "pPipelineInfo->pipeline", pPipelineInfo->pipeline);
+    }
+    skip |= validate_struct_type_array("vkGetPipelineExecutablePropertiesKHR", "pExecutableCount", "pProperties", "VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_PROPERTIES_KHR", pExecutableCount, pProperties, VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_PROPERTIES_KHR, true, false, false, "VUID-VkPipelineExecutablePropertiesKHR-sType-sType", "VUID-vkGetPipelineExecutablePropertiesKHR-pProperties-parameter", kVUIDUndefined);
+    if (pProperties != NULL)
+    {
+        for (uint32_t pExecutableIndex = 0; pExecutableIndex < *pExecutableCount; ++pExecutableIndex)
+        {
+            // No xml-driven validation
+        }
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateGetPipelineExecutableStatisticsKHR(
+    VkDevice                                    device,
+    const VkPipelineExecutableInfoKHR*          pExecutableInfo,
+    uint32_t*                                   pStatisticCount,
+    VkPipelineExecutableStatisticKHR*           pStatistics) const {
+    bool skip = false;
+    if (!device_extensions.vk_khr_pipeline_executable_properties) skip |= OutputExtensionError("vkGetPipelineExecutableStatisticsKHR", VK_KHR_PIPELINE_EXECUTABLE_PROPERTIES_EXTENSION_NAME);
+    skip |= validate_struct_type("vkGetPipelineExecutableStatisticsKHR", "pExecutableInfo", "VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INFO_KHR", pExecutableInfo, VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INFO_KHR, true, "VUID-vkGetPipelineExecutableStatisticsKHR-pExecutableInfo-parameter", "VUID-VkPipelineExecutableInfoKHR-sType-sType");
+    if (pExecutableInfo != NULL)
+    {
+        skip |= validate_struct_pnext("vkGetPipelineExecutableStatisticsKHR", "pExecutableInfo->pNext", NULL, pExecutableInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkPipelineExecutableInfoKHR-pNext-pNext");
+
+        skip |= validate_required_handle("vkGetPipelineExecutableStatisticsKHR", "pExecutableInfo->pipeline", pExecutableInfo->pipeline);
+    }
+    skip |= validate_struct_type_array("vkGetPipelineExecutableStatisticsKHR", "pStatisticCount", "pStatistics", "VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_STATISTIC_KHR", pStatisticCount, pStatistics, VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_STATISTIC_KHR, true, false, false, "VUID-VkPipelineExecutableStatisticKHR-sType-sType", "VUID-vkGetPipelineExecutableStatisticsKHR-pStatistics-parameter", kVUIDUndefined);
+    if (pStatistics != NULL)
+    {
+        for (uint32_t pStatisticIndex = 0; pStatisticIndex < *pStatisticCount; ++pStatisticIndex)
+        {
+            // No xml-driven validation
+        }
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateGetPipelineExecutableInternalRepresentationsKHR(
+    VkDevice                                    device,
+    const VkPipelineExecutableInfoKHR*          pExecutableInfo,
+    uint32_t*                                   pInternalRepresentationCount,
+    VkPipelineExecutableInternalRepresentationKHR* pInternalRepresentations) const {
+    bool skip = false;
+    if (!device_extensions.vk_khr_pipeline_executable_properties) skip |= OutputExtensionError("vkGetPipelineExecutableInternalRepresentationsKHR", VK_KHR_PIPELINE_EXECUTABLE_PROPERTIES_EXTENSION_NAME);
+    skip |= validate_struct_type("vkGetPipelineExecutableInternalRepresentationsKHR", "pExecutableInfo", "VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INFO_KHR", pExecutableInfo, VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INFO_KHR, true, "VUID-vkGetPipelineExecutableInternalRepresentationsKHR-pExecutableInfo-parameter", "VUID-VkPipelineExecutableInfoKHR-sType-sType");
+    if (pExecutableInfo != NULL)
+    {
+        skip |= validate_struct_pnext("vkGetPipelineExecutableInternalRepresentationsKHR", "pExecutableInfo->pNext", NULL, pExecutableInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkPipelineExecutableInfoKHR-pNext-pNext");
+
+        skip |= validate_required_handle("vkGetPipelineExecutableInternalRepresentationsKHR", "pExecutableInfo->pipeline", pExecutableInfo->pipeline);
+    }
+    skip |= validate_struct_type_array("vkGetPipelineExecutableInternalRepresentationsKHR", "pInternalRepresentationCount", "pInternalRepresentations", "VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INTERNAL_REPRESENTATION_KHR", pInternalRepresentationCount, pInternalRepresentations, VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INTERNAL_REPRESENTATION_KHR, true, false, false, "VUID-VkPipelineExecutableInternalRepresentationKHR-sType-sType", "VUID-vkGetPipelineExecutableInternalRepresentationsKHR-pInternalRepresentations-parameter", kVUIDUndefined);
+    if (pInternalRepresentations != NULL)
+    {
+        for (uint32_t pInternalRepresentationIndex = 0; pInternalRepresentationIndex < *pInternalRepresentationCount; ++pInternalRepresentationIndex)
+        {
+            // No xml-driven validation
+        }
+    }
+    return skip;
+}
+
+
+
+bool StatelessValidation::PreCallValidateCreateDebugReportCallbackEXT(
+    VkInstance                                  instance,
+    const VkDebugReportCallbackCreateInfoEXT*   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDebugReportCallbackEXT*                   pCallback) const {
+    bool skip = false;
+    if (!instance_extensions.vk_ext_debug_report) skip |= OutputExtensionError("vkCreateDebugReportCallbackEXT", VK_EXT_DEBUG_REPORT_EXTENSION_NAME);
+    skip |= validate_struct_type("vkCreateDebugReportCallbackEXT", "pCreateInfo", "VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT", pCreateInfo, VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT, true, "VUID-vkCreateDebugReportCallbackEXT-pCreateInfo-parameter", "VUID-VkDebugReportCallbackCreateInfoEXT-sType-sType");
+    if (pCreateInfo != NULL)
+    {
+        skip |= validate_struct_pnext("vkCreateDebugReportCallbackEXT", "pCreateInfo->pNext", NULL, pCreateInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, kVUIDUndefined);
+
+        skip |= validate_flags("vkCreateDebugReportCallbackEXT", "pCreateInfo->flags", "VkDebugReportFlagBitsEXT", AllVkDebugReportFlagBitsEXT, pCreateInfo->flags, kOptionalFlags, "VUID-VkDebugReportCallbackCreateInfoEXT-flags-parameter");
+
+        skip |= validate_required_pointer("vkCreateDebugReportCallbackEXT", "pCreateInfo->pfnCallback", reinterpret_cast<const void*>(pCreateInfo->pfnCallback), "VUID-VkDebugReportCallbackCreateInfoEXT-pfnCallback-parameter");
+    }
+    if (pAllocator != NULL)
+    {
+        skip |= validate_required_pointer("vkCreateDebugReportCallbackEXT", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
+
+        skip |= validate_required_pointer("vkCreateDebugReportCallbackEXT", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
+
+        skip |= validate_required_pointer("vkCreateDebugReportCallbackEXT", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
+
+        if (pAllocator->pfnInternalAllocation != NULL)
+        {
+            skip |= validate_required_pointer("vkCreateDebugReportCallbackEXT", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+
+        if (pAllocator->pfnInternalFree != NULL)
+        {
+            skip |= validate_required_pointer("vkCreateDebugReportCallbackEXT", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+    }
+    skip |= validate_required_pointer("vkCreateDebugReportCallbackEXT", "pCallback", pCallback, "VUID-vkCreateDebugReportCallbackEXT-pCallback-parameter");
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateDestroyDebugReportCallbackEXT(
+    VkInstance                                  instance,
+    VkDebugReportCallbackEXT                    callback,
+    const VkAllocationCallbacks*                pAllocator) const {
+    bool skip = false;
+    if (!instance_extensions.vk_ext_debug_report) skip |= OutputExtensionError("vkDestroyDebugReportCallbackEXT", VK_EXT_DEBUG_REPORT_EXTENSION_NAME);
+    skip |= validate_required_handle("vkDestroyDebugReportCallbackEXT", "callback", callback);
+    if (pAllocator != NULL)
+    {
+        skip |= validate_required_pointer("vkDestroyDebugReportCallbackEXT", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
+
+        skip |= validate_required_pointer("vkDestroyDebugReportCallbackEXT", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
+
+        skip |= validate_required_pointer("vkDestroyDebugReportCallbackEXT", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
+
+        if (pAllocator->pfnInternalAllocation != NULL)
+        {
+            skip |= validate_required_pointer("vkDestroyDebugReportCallbackEXT", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+
+        if (pAllocator->pfnInternalFree != NULL)
+        {
+            skip |= validate_required_pointer("vkDestroyDebugReportCallbackEXT", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateDebugReportMessageEXT(
+    VkInstance                                  instance,
+    VkDebugReportFlagsEXT                       flags,
+    VkDebugReportObjectTypeEXT                  objectType,
+    uint64_t                                    object,
+    size_t                                      location,
+    int32_t                                     messageCode,
+    const char*                                 pLayerPrefix,
+    const char*                                 pMessage) const {
+    bool skip = false;
+    if (!instance_extensions.vk_ext_debug_report) skip |= OutputExtensionError("vkDebugReportMessageEXT", VK_EXT_DEBUG_REPORT_EXTENSION_NAME);
+    skip |= validate_flags("vkDebugReportMessageEXT", "flags", "VkDebugReportFlagBitsEXT", AllVkDebugReportFlagBitsEXT, flags, kRequiredFlags, "VUID-vkDebugReportMessageEXT-flags-parameter", "VUID-vkDebugReportMessageEXT-flags-requiredbitmask");
+    skip |= validate_ranged_enum("vkDebugReportMessageEXT", "objectType", "VkDebugReportObjectTypeEXT", AllVkDebugReportObjectTypeEXTEnums, objectType, "VUID-vkDebugReportMessageEXT-objectType-parameter");
+    skip |= validate_required_pointer("vkDebugReportMessageEXT", "pLayerPrefix", pLayerPrefix, "VUID-vkDebugReportMessageEXT-pLayerPrefix-parameter");
+    skip |= validate_required_pointer("vkDebugReportMessageEXT", "pMessage", pMessage, "VUID-vkDebugReportMessageEXT-pMessage-parameter");
+    return skip;
+}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+bool StatelessValidation::PreCallValidateDebugMarkerSetObjectTagEXT(
+    VkDevice                                    device,
+    const VkDebugMarkerObjectTagInfoEXT*        pTagInfo) const {
+    bool skip = false;
+    if (!device_extensions.vk_ext_debug_report) skip |= OutputExtensionError("vkDebugMarkerSetObjectTagEXT", VK_EXT_DEBUG_REPORT_EXTENSION_NAME);
+    if (!device_extensions.vk_ext_debug_marker) skip |= OutputExtensionError("vkDebugMarkerSetObjectTagEXT", VK_EXT_DEBUG_MARKER_EXTENSION_NAME);
+    skip |= validate_struct_type("vkDebugMarkerSetObjectTagEXT", "pTagInfo", "VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_TAG_INFO_EXT", pTagInfo, VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_TAG_INFO_EXT, true, "VUID-vkDebugMarkerSetObjectTagEXT-pTagInfo-parameter", "VUID-VkDebugMarkerObjectTagInfoEXT-sType-sType");
+    if (pTagInfo != NULL)
+    {
+        skip |= validate_struct_pnext("vkDebugMarkerSetObjectTagEXT", "pTagInfo->pNext", NULL, pTagInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkDebugMarkerObjectTagInfoEXT-pNext-pNext");
+
+        skip |= validate_ranged_enum("vkDebugMarkerSetObjectTagEXT", "pTagInfo->objectType", "VkDebugReportObjectTypeEXT", AllVkDebugReportObjectTypeEXTEnums, pTagInfo->objectType, "VUID-VkDebugMarkerObjectTagInfoEXT-objectType-parameter");
+
+        skip |= validate_array("vkDebugMarkerSetObjectTagEXT", "pTagInfo->tagSize", "pTagInfo->pTag", pTagInfo->tagSize, &pTagInfo->pTag, true, true, "VUID-VkDebugMarkerObjectTagInfoEXT-tagSize-arraylength", "VUID-VkDebugMarkerObjectTagInfoEXT-pTag-parameter");
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateDebugMarkerSetObjectNameEXT(
+    VkDevice                                    device,
+    const VkDebugMarkerObjectNameInfoEXT*       pNameInfo) const {
+    bool skip = false;
+    if (!device_extensions.vk_ext_debug_report) skip |= OutputExtensionError("vkDebugMarkerSetObjectNameEXT", VK_EXT_DEBUG_REPORT_EXTENSION_NAME);
+    if (!device_extensions.vk_ext_debug_marker) skip |= OutputExtensionError("vkDebugMarkerSetObjectNameEXT", VK_EXT_DEBUG_MARKER_EXTENSION_NAME);
+    skip |= validate_struct_type("vkDebugMarkerSetObjectNameEXT", "pNameInfo", "VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_NAME_INFO_EXT", pNameInfo, VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_NAME_INFO_EXT, true, "VUID-vkDebugMarkerSetObjectNameEXT-pNameInfo-parameter", "VUID-VkDebugMarkerObjectNameInfoEXT-sType-sType");
+    if (pNameInfo != NULL)
+    {
+        skip |= validate_struct_pnext("vkDebugMarkerSetObjectNameEXT", "pNameInfo->pNext", NULL, pNameInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkDebugMarkerObjectNameInfoEXT-pNext-pNext");
+
+        skip |= validate_ranged_enum("vkDebugMarkerSetObjectNameEXT", "pNameInfo->objectType", "VkDebugReportObjectTypeEXT", AllVkDebugReportObjectTypeEXTEnums, pNameInfo->objectType, "VUID-VkDebugMarkerObjectNameInfoEXT-objectType-parameter");
+
+        skip |= validate_required_pointer("vkDebugMarkerSetObjectNameEXT", "pNameInfo->pObjectName", pNameInfo->pObjectName, "VUID-VkDebugMarkerObjectNameInfoEXT-pObjectName-parameter");
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateCmdDebugMarkerBeginEXT(
+    VkCommandBuffer                             commandBuffer,
+    const VkDebugMarkerMarkerInfoEXT*           pMarkerInfo) const {
+    bool skip = false;
+    if (!device_extensions.vk_ext_debug_report) skip |= OutputExtensionError("vkCmdDebugMarkerBeginEXT", VK_EXT_DEBUG_REPORT_EXTENSION_NAME);
+    if (!device_extensions.vk_ext_debug_marker) skip |= OutputExtensionError("vkCmdDebugMarkerBeginEXT", VK_EXT_DEBUG_MARKER_EXTENSION_NAME);
+    skip |= validate_struct_type("vkCmdDebugMarkerBeginEXT", "pMarkerInfo", "VK_STRUCTURE_TYPE_DEBUG_MARKER_MARKER_INFO_EXT", pMarkerInfo, VK_STRUCTURE_TYPE_DEBUG_MARKER_MARKER_INFO_EXT, true, "VUID-vkCmdDebugMarkerBeginEXT-pMarkerInfo-parameter", "VUID-VkDebugMarkerMarkerInfoEXT-sType-sType");
+    if (pMarkerInfo != NULL)
+    {
+        skip |= validate_struct_pnext("vkCmdDebugMarkerBeginEXT", "pMarkerInfo->pNext", NULL, pMarkerInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkDebugMarkerMarkerInfoEXT-pNext-pNext");
+
+        skip |= validate_required_pointer("vkCmdDebugMarkerBeginEXT", "pMarkerInfo->pMarkerName", pMarkerInfo->pMarkerName, "VUID-VkDebugMarkerMarkerInfoEXT-pMarkerName-parameter");
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateCmdDebugMarkerEndEXT(
+    VkCommandBuffer                             commandBuffer) const {
+    bool skip = false;
+    if (!device_extensions.vk_ext_debug_report) skip |= OutputExtensionError("vkCmdDebugMarkerEndEXT", VK_EXT_DEBUG_REPORT_EXTENSION_NAME);
+    if (!device_extensions.vk_ext_debug_marker) skip |= OutputExtensionError("vkCmdDebugMarkerEndEXT", VK_EXT_DEBUG_MARKER_EXTENSION_NAME);
+    // No xml-driven validation
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateCmdDebugMarkerInsertEXT(
+    VkCommandBuffer                             commandBuffer,
+    const VkDebugMarkerMarkerInfoEXT*           pMarkerInfo) const {
+    bool skip = false;
+    if (!device_extensions.vk_ext_debug_report) skip |= OutputExtensionError("vkCmdDebugMarkerInsertEXT", VK_EXT_DEBUG_REPORT_EXTENSION_NAME);
+    if (!device_extensions.vk_ext_debug_marker) skip |= OutputExtensionError("vkCmdDebugMarkerInsertEXT", VK_EXT_DEBUG_MARKER_EXTENSION_NAME);
+    skip |= validate_struct_type("vkCmdDebugMarkerInsertEXT", "pMarkerInfo", "VK_STRUCTURE_TYPE_DEBUG_MARKER_MARKER_INFO_EXT", pMarkerInfo, VK_STRUCTURE_TYPE_DEBUG_MARKER_MARKER_INFO_EXT, true, "VUID-vkCmdDebugMarkerInsertEXT-pMarkerInfo-parameter", "VUID-VkDebugMarkerMarkerInfoEXT-sType-sType");
+    if (pMarkerInfo != NULL)
+    {
+        skip |= validate_struct_pnext("vkCmdDebugMarkerInsertEXT", "pMarkerInfo->pNext", NULL, pMarkerInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkDebugMarkerMarkerInfoEXT-pNext-pNext");
+
+        skip |= validate_required_pointer("vkCmdDebugMarkerInsertEXT", "pMarkerInfo->pMarkerName", pMarkerInfo->pMarkerName, "VUID-VkDebugMarkerMarkerInfoEXT-pMarkerName-parameter");
+    }
+    return skip;
+}
+
+
+
+
+
+
+
+bool StatelessValidation::PreCallValidateCmdBindTransformFeedbackBuffersEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstBinding,
+    uint32_t                                    bindingCount,
+    const VkBuffer*                             pBuffers,
+    const VkDeviceSize*                         pOffsets,
+    const VkDeviceSize*                         pSizes) const {
+    bool skip = false;
+    if (!device_extensions.vk_khr_get_physical_device_properties_2) skip |= OutputExtensionError("vkCmdBindTransformFeedbackBuffersEXT", VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    if (!device_extensions.vk_ext_transform_feedback) skip |= OutputExtensionError("vkCmdBindTransformFeedbackBuffersEXT", VK_EXT_TRANSFORM_FEEDBACK_EXTENSION_NAME);
+    skip |= validate_handle_array("vkCmdBindTransformFeedbackBuffersEXT", "bindingCount", "pBuffers", bindingCount, pBuffers, true, true);
+    skip |= validate_array("vkCmdBindTransformFeedbackBuffersEXT", "bindingCount", "pOffsets", bindingCount, &pOffsets, true, true, "VUID-vkCmdBindTransformFeedbackBuffersEXT-bindingCount-arraylength", "VUID-vkCmdBindTransformFeedbackBuffersEXT-pOffsets-parameter");
+    skip |= validate_array("vkCmdBindTransformFeedbackBuffersEXT", "bindingCount", "pSizes", bindingCount, &pSizes, true, false, "VUID-vkCmdBindTransformFeedbackBuffersEXT-bindingCount-arraylength", "VUID-vkCmdBindTransformFeedbackBuffersEXT-pSizes-parameter");
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateCmdBeginTransformFeedbackEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstCounterBuffer,
+    uint32_t                                    counterBufferCount,
+    const VkBuffer*                             pCounterBuffers,
+    const VkDeviceSize*                         pCounterBufferOffsets) const {
+    bool skip = false;
+    if (!device_extensions.vk_khr_get_physical_device_properties_2) skip |= OutputExtensionError("vkCmdBeginTransformFeedbackEXT", VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    if (!device_extensions.vk_ext_transform_feedback) skip |= OutputExtensionError("vkCmdBeginTransformFeedbackEXT", VK_EXT_TRANSFORM_FEEDBACK_EXTENSION_NAME);
+    // No xml-driven validation
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateCmdEndTransformFeedbackEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstCounterBuffer,
+    uint32_t                                    counterBufferCount,
+    const VkBuffer*                             pCounterBuffers,
+    const VkDeviceSize*                         pCounterBufferOffsets) const {
+    bool skip = false;
+    if (!device_extensions.vk_khr_get_physical_device_properties_2) skip |= OutputExtensionError("vkCmdEndTransformFeedbackEXT", VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    if (!device_extensions.vk_ext_transform_feedback) skip |= OutputExtensionError("vkCmdEndTransformFeedbackEXT", VK_EXT_TRANSFORM_FEEDBACK_EXTENSION_NAME);
+    // No xml-driven validation
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateCmdBeginQueryIndexedEXT(
+    VkCommandBuffer                             commandBuffer,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    query,
+    VkQueryControlFlags                         flags,
+    uint32_t                                    index) const {
+    bool skip = false;
+    if (!device_extensions.vk_khr_get_physical_device_properties_2) skip |= OutputExtensionError("vkCmdBeginQueryIndexedEXT", VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    if (!device_extensions.vk_ext_transform_feedback) skip |= OutputExtensionError("vkCmdBeginQueryIndexedEXT", VK_EXT_TRANSFORM_FEEDBACK_EXTENSION_NAME);
+    skip |= validate_required_handle("vkCmdBeginQueryIndexedEXT", "queryPool", queryPool);
+    skip |= validate_flags("vkCmdBeginQueryIndexedEXT", "flags", "VkQueryControlFlagBits", AllVkQueryControlFlagBits, flags, kOptionalFlags, "VUID-vkCmdBeginQueryIndexedEXT-flags-parameter");
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateCmdEndQueryIndexedEXT(
+    VkCommandBuffer                             commandBuffer,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    query,
+    uint32_t                                    index) const {
+    bool skip = false;
+    if (!device_extensions.vk_khr_get_physical_device_properties_2) skip |= OutputExtensionError("vkCmdEndQueryIndexedEXT", VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    if (!device_extensions.vk_ext_transform_feedback) skip |= OutputExtensionError("vkCmdEndQueryIndexedEXT", VK_EXT_TRANSFORM_FEEDBACK_EXTENSION_NAME);
+    skip |= validate_required_handle("vkCmdEndQueryIndexedEXT", "queryPool", queryPool);
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateCmdDrawIndirectByteCountEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    instanceCount,
+    uint32_t                                    firstInstance,
+    VkBuffer                                    counterBuffer,
+    VkDeviceSize                                counterBufferOffset,
+    uint32_t                                    counterOffset,
+    uint32_t                                    vertexStride) const {
+    bool skip = false;
+    if (!device_extensions.vk_khr_get_physical_device_properties_2) skip |= OutputExtensionError("vkCmdDrawIndirectByteCountEXT", VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    if (!device_extensions.vk_ext_transform_feedback) skip |= OutputExtensionError("vkCmdDrawIndirectByteCountEXT", VK_EXT_TRANSFORM_FEEDBACK_EXTENSION_NAME);
+    skip |= validate_required_handle("vkCmdDrawIndirectByteCountEXT", "counterBuffer", counterBuffer);
+    return skip;
+}
+
+
+
+bool StatelessValidation::PreCallValidateGetImageViewHandleNVX(
+    VkDevice                                    device,
+    const VkImageViewHandleInfoNVX*             pInfo) const {
+    bool skip = false;
+    if (!device_extensions.vk_nvx_image_view_handle) skip |= OutputExtensionError("vkGetImageViewHandleNVX", VK_NVX_IMAGE_VIEW_HANDLE_EXTENSION_NAME);
+    skip |= validate_struct_type("vkGetImageViewHandleNVX", "pInfo", "VK_STRUCTURE_TYPE_IMAGE_VIEW_HANDLE_INFO_NVX", pInfo, VK_STRUCTURE_TYPE_IMAGE_VIEW_HANDLE_INFO_NVX, true, "VUID-vkGetImageViewHandleNVX-pInfo-parameter", "VUID-VkImageViewHandleInfoNVX-sType-sType");
+    if (pInfo != NULL)
+    {
+        skip |= validate_struct_pnext("vkGetImageViewHandleNVX", "pInfo->pNext", NULL, pInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkImageViewHandleInfoNVX-pNext-pNext");
+
+        skip |= validate_required_handle("vkGetImageViewHandleNVX", "pInfo->imageView", pInfo->imageView);
+
+        skip |= validate_ranged_enum("vkGetImageViewHandleNVX", "pInfo->descriptorType", "VkDescriptorType", AllVkDescriptorTypeEnums, pInfo->descriptorType, "VUID-VkImageViewHandleInfoNVX-descriptorType-parameter");
+    }
+    return skip;
+}
+
+
+
+bool StatelessValidation::PreCallValidateCmdDrawIndirectCountAMD(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkBuffer                                    countBuffer,
+    VkDeviceSize                                countBufferOffset,
+    uint32_t                                    maxDrawCount,
+    uint32_t                                    stride) const {
+    bool skip = false;
+    if (!device_extensions.vk_amd_draw_indirect_count) skip |= OutputExtensionError("vkCmdDrawIndirectCountAMD", VK_AMD_DRAW_INDIRECT_COUNT_EXTENSION_NAME);
+    skip |= validate_required_handle("vkCmdDrawIndirectCountAMD", "buffer", buffer);
+    skip |= validate_required_handle("vkCmdDrawIndirectCountAMD", "countBuffer", countBuffer);
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateCmdDrawIndexedIndirectCountAMD(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkBuffer                                    countBuffer,
+    VkDeviceSize                                countBufferOffset,
+    uint32_t                                    maxDrawCount,
+    uint32_t                                    stride) const {
+    bool skip = false;
+    if (!device_extensions.vk_amd_draw_indirect_count) skip |= OutputExtensionError("vkCmdDrawIndexedIndirectCountAMD", VK_AMD_DRAW_INDIRECT_COUNT_EXTENSION_NAME);
+    skip |= validate_required_handle("vkCmdDrawIndexedIndirectCountAMD", "buffer", buffer);
+    skip |= validate_required_handle("vkCmdDrawIndexedIndirectCountAMD", "countBuffer", countBuffer);
+    return skip;
+}
+
+
+
+
+
+
+
+
+
+
+
+bool StatelessValidation::PreCallValidateGetShaderInfoAMD(
+    VkDevice                                    device,
+    VkPipeline                                  pipeline,
+    VkShaderStageFlagBits                       shaderStage,
+    VkShaderInfoTypeAMD                         infoType,
+    size_t*                                     pInfoSize,
+    void*                                       pInfo) const {
+    bool skip = false;
+    if (!device_extensions.vk_amd_shader_info) skip |= OutputExtensionError("vkGetShaderInfoAMD", VK_AMD_SHADER_INFO_EXTENSION_NAME);
+    skip |= validate_required_handle("vkGetShaderInfoAMD", "pipeline", pipeline);
+    skip |= validate_flags("vkGetShaderInfoAMD", "shaderStage", "VkShaderStageFlagBits", AllVkShaderStageFlagBits, shaderStage, kRequiredSingleBit, "VUID-vkGetShaderInfoAMD-shaderStage-parameter", "VUID-vkGetShaderInfoAMD-shaderStage-parameter");
+    skip |= validate_ranged_enum("vkGetShaderInfoAMD", "infoType", "VkShaderInfoTypeAMD", AllVkShaderInfoTypeAMDEnums, infoType, "VUID-vkGetShaderInfoAMD-infoType-parameter");
+    skip |= validate_array("vkGetShaderInfoAMD", "pInfoSize", "pInfo", pInfoSize, &pInfo, true, false, false, kVUIDUndefined, "VUID-vkGetShaderInfoAMD-pInfo-parameter");
+    return skip;
+}
+
+
+
+
+
+#ifdef VK_USE_PLATFORM_GGP
+
+bool StatelessValidation::PreCallValidateCreateStreamDescriptorSurfaceGGP(
+    VkInstance                                  instance,
+    const VkStreamDescriptorSurfaceCreateInfoGGP* pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface) const {
+    bool skip = false;
+    if (!instance_extensions.vk_khr_surface) skip |= OutputExtensionError("vkCreateStreamDescriptorSurfaceGGP", VK_KHR_SURFACE_EXTENSION_NAME);
+    if (!instance_extensions.vk_ggp_stream_descriptor_surface) skip |= OutputExtensionError("vkCreateStreamDescriptorSurfaceGGP", VK_GGP_STREAM_DESCRIPTOR_SURFACE_EXTENSION_NAME);
+    skip |= validate_struct_type("vkCreateStreamDescriptorSurfaceGGP", "pCreateInfo", "VK_STRUCTURE_TYPE_STREAM_DESCRIPTOR_SURFACE_CREATE_INFO_GGP", pCreateInfo, VK_STRUCTURE_TYPE_STREAM_DESCRIPTOR_SURFACE_CREATE_INFO_GGP, true, "VUID-vkCreateStreamDescriptorSurfaceGGP-pCreateInfo-parameter", "VUID-VkStreamDescriptorSurfaceCreateInfoGGP-sType-sType");
+    if (pCreateInfo != NULL)
+    {
+        skip |= validate_struct_pnext("vkCreateStreamDescriptorSurfaceGGP", "pCreateInfo->pNext", NULL, pCreateInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkStreamDescriptorSurfaceCreateInfoGGP-pNext-pNext");
+
+        skip |= validate_reserved_flags("vkCreateStreamDescriptorSurfaceGGP", "pCreateInfo->flags", pCreateInfo->flags, "VUID-VkStreamDescriptorSurfaceCreateInfoGGP-flags-zerobitmask");
+    }
+    if (pAllocator != NULL)
+    {
+        skip |= validate_required_pointer("vkCreateStreamDescriptorSurfaceGGP", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
+
+        skip |= validate_required_pointer("vkCreateStreamDescriptorSurfaceGGP", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
+
+        skip |= validate_required_pointer("vkCreateStreamDescriptorSurfaceGGP", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
+
+        if (pAllocator->pfnInternalAllocation != NULL)
+        {
+            skip |= validate_required_pointer("vkCreateStreamDescriptorSurfaceGGP", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+
+        if (pAllocator->pfnInternalFree != NULL)
+        {
+            skip |= validate_required_pointer("vkCreateStreamDescriptorSurfaceGGP", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+    }
+    skip |= validate_required_pointer("vkCreateStreamDescriptorSurfaceGGP", "pSurface", pSurface, "VUID-vkCreateStreamDescriptorSurfaceGGP-pSurface-parameter");
+    return skip;
+}
+
+#endif // VK_USE_PLATFORM_GGP
+
+
+
+
+
+bool StatelessValidation::PreCallValidateGetPhysicalDeviceExternalImageFormatPropertiesNV(
+    VkPhysicalDevice                            physicalDevice,
+    VkFormat                                    format,
+    VkImageType                                 type,
+    VkImageTiling                               tiling,
+    VkImageUsageFlags                           usage,
+    VkImageCreateFlags                          flags,
+    VkExternalMemoryHandleTypeFlagsNV           externalHandleType,
+    VkExternalImageFormatPropertiesNV*          pExternalImageFormatProperties) const {
+    bool skip = false;
+    if (!instance_extensions.vk_nv_external_memory_capabilities) skip |= OutputExtensionError("vkGetPhysicalDeviceExternalImageFormatPropertiesNV", VK_NV_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME);
+    skip |= validate_ranged_enum("vkGetPhysicalDeviceExternalImageFormatPropertiesNV", "format", "VkFormat", AllVkFormatEnums, format, "VUID-vkGetPhysicalDeviceExternalImageFormatPropertiesNV-format-parameter");
+    skip |= validate_ranged_enum("vkGetPhysicalDeviceExternalImageFormatPropertiesNV", "type", "VkImageType", AllVkImageTypeEnums, type, "VUID-vkGetPhysicalDeviceExternalImageFormatPropertiesNV-type-parameter");
+    skip |= validate_ranged_enum("vkGetPhysicalDeviceExternalImageFormatPropertiesNV", "tiling", "VkImageTiling", AllVkImageTilingEnums, tiling, "VUID-vkGetPhysicalDeviceExternalImageFormatPropertiesNV-tiling-parameter");
+    skip |= validate_flags("vkGetPhysicalDeviceExternalImageFormatPropertiesNV", "usage", "VkImageUsageFlagBits", AllVkImageUsageFlagBits, usage, kRequiredFlags, "VUID-vkGetPhysicalDeviceExternalImageFormatPropertiesNV-usage-parameter", "VUID-vkGetPhysicalDeviceExternalImageFormatPropertiesNV-usage-requiredbitmask");
+    skip |= validate_flags("vkGetPhysicalDeviceExternalImageFormatPropertiesNV", "flags", "VkImageCreateFlagBits", AllVkImageCreateFlagBits, flags, kOptionalFlags, "VUID-vkGetPhysicalDeviceExternalImageFormatPropertiesNV-flags-parameter");
+    skip |= validate_flags("vkGetPhysicalDeviceExternalImageFormatPropertiesNV", "externalHandleType", "VkExternalMemoryHandleTypeFlagBitsNV", AllVkExternalMemoryHandleTypeFlagBitsNV, externalHandleType, kOptionalFlags, "VUID-vkGetPhysicalDeviceExternalImageFormatPropertiesNV-externalHandleType-parameter");
+    skip |= validate_required_pointer("vkGetPhysicalDeviceExternalImageFormatPropertiesNV", "pExternalImageFormatProperties", pExternalImageFormatProperties, "VUID-vkGetPhysicalDeviceExternalImageFormatPropertiesNV-pExternalImageFormatProperties-parameter");
+    if (pExternalImageFormatProperties != NULL)
+    {
+        // No xml-driven validation
+    }
+    return skip;
+}
+
+
+
+
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+bool StatelessValidation::PreCallValidateGetMemoryWin32HandleNV(
+    VkDevice                                    device,
+    VkDeviceMemory                              memory,
+    VkExternalMemoryHandleTypeFlagsNV           handleType,
+    HANDLE*                                     pHandle) const {
+    bool skip = false;
+    if (!device_extensions.vk_nv_external_memory) skip |= OutputExtensionError("vkGetMemoryWin32HandleNV", VK_NV_EXTERNAL_MEMORY_EXTENSION_NAME);
+    if (!device_extensions.vk_nv_external_memory_win32) skip |= OutputExtensionError("vkGetMemoryWin32HandleNV", VK_NV_EXTERNAL_MEMORY_WIN32_EXTENSION_NAME);
+    skip |= validate_required_handle("vkGetMemoryWin32HandleNV", "memory", memory);
+    skip |= validate_flags("vkGetMemoryWin32HandleNV", "handleType", "VkExternalMemoryHandleTypeFlagBitsNV", AllVkExternalMemoryHandleTypeFlagBitsNV, handleType, kRequiredFlags, "VUID-vkGetMemoryWin32HandleNV-handleType-parameter", "VUID-vkGetMemoryWin32HandleNV-handleType-requiredbitmask");
+    skip |= validate_required_pointer("vkGetMemoryWin32HandleNV", "pHandle", pHandle, "VUID-vkGetMemoryWin32HandleNV-pHandle-parameter");
+    return skip;
+}
+
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+
+
+#ifdef VK_USE_PLATFORM_VI_NN
+
+bool StatelessValidation::PreCallValidateCreateViSurfaceNN(
+    VkInstance                                  instance,
+    const VkViSurfaceCreateInfoNN*              pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface) const {
+    bool skip = false;
+    if (!instance_extensions.vk_khr_surface) skip |= OutputExtensionError("vkCreateViSurfaceNN", VK_KHR_SURFACE_EXTENSION_NAME);
+    if (!instance_extensions.vk_nn_vi_surface) skip |= OutputExtensionError("vkCreateViSurfaceNN", VK_NN_VI_SURFACE_EXTENSION_NAME);
+    skip |= validate_struct_type("vkCreateViSurfaceNN", "pCreateInfo", "VK_STRUCTURE_TYPE_VI_SURFACE_CREATE_INFO_NN", pCreateInfo, VK_STRUCTURE_TYPE_VI_SURFACE_CREATE_INFO_NN, true, "VUID-vkCreateViSurfaceNN-pCreateInfo-parameter", "VUID-VkViSurfaceCreateInfoNN-sType-sType");
+    if (pCreateInfo != NULL)
+    {
+        skip |= validate_struct_pnext("vkCreateViSurfaceNN", "pCreateInfo->pNext", NULL, pCreateInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkViSurfaceCreateInfoNN-pNext-pNext");
+
+        skip |= validate_reserved_flags("vkCreateViSurfaceNN", "pCreateInfo->flags", pCreateInfo->flags, "VUID-VkViSurfaceCreateInfoNN-flags-zerobitmask");
+    }
+    if (pAllocator != NULL)
+    {
+        skip |= validate_required_pointer("vkCreateViSurfaceNN", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
+
+        skip |= validate_required_pointer("vkCreateViSurfaceNN", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
+
+        skip |= validate_required_pointer("vkCreateViSurfaceNN", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
+
+        if (pAllocator->pfnInternalAllocation != NULL)
+        {
+            skip |= validate_required_pointer("vkCreateViSurfaceNN", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+
+        if (pAllocator->pfnInternalFree != NULL)
+        {
+            skip |= validate_required_pointer("vkCreateViSurfaceNN", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+    }
+    skip |= validate_required_pointer("vkCreateViSurfaceNN", "pSurface", pSurface, "VUID-vkCreateViSurfaceNN-pSurface-parameter");
+    return skip;
+}
+
+#endif // VK_USE_PLATFORM_VI_NN
+
+
+
+
+
+
+
+
+
+bool StatelessValidation::PreCallValidateCmdBeginConditionalRenderingEXT(
+    VkCommandBuffer                             commandBuffer,
+    const VkConditionalRenderingBeginInfoEXT*   pConditionalRenderingBegin) const {
+    bool skip = false;
+    if (!device_extensions.vk_ext_conditional_rendering) skip |= OutputExtensionError("vkCmdBeginConditionalRenderingEXT", VK_EXT_CONDITIONAL_RENDERING_EXTENSION_NAME);
+    skip |= validate_struct_type("vkCmdBeginConditionalRenderingEXT", "pConditionalRenderingBegin", "VK_STRUCTURE_TYPE_CONDITIONAL_RENDERING_BEGIN_INFO_EXT", pConditionalRenderingBegin, VK_STRUCTURE_TYPE_CONDITIONAL_RENDERING_BEGIN_INFO_EXT, true, "VUID-vkCmdBeginConditionalRenderingEXT-pConditionalRenderingBegin-parameter", "VUID-VkConditionalRenderingBeginInfoEXT-sType-sType");
+    if (pConditionalRenderingBegin != NULL)
+    {
+        skip |= validate_struct_pnext("vkCmdBeginConditionalRenderingEXT", "pConditionalRenderingBegin->pNext", NULL, pConditionalRenderingBegin->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkConditionalRenderingBeginInfoEXT-pNext-pNext");
+
+        skip |= validate_required_handle("vkCmdBeginConditionalRenderingEXT", "pConditionalRenderingBegin->buffer", pConditionalRenderingBegin->buffer);
+
+        skip |= validate_flags("vkCmdBeginConditionalRenderingEXT", "pConditionalRenderingBegin->flags", "VkConditionalRenderingFlagBitsEXT", AllVkConditionalRenderingFlagBitsEXT, pConditionalRenderingBegin->flags, kOptionalFlags, "VUID-VkConditionalRenderingBeginInfoEXT-flags-parameter");
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateCmdEndConditionalRenderingEXT(
+    VkCommandBuffer                             commandBuffer) const {
+    bool skip = false;
+    if (!device_extensions.vk_ext_conditional_rendering) skip |= OutputExtensionError("vkCmdEndConditionalRenderingEXT", VK_EXT_CONDITIONAL_RENDERING_EXTENSION_NAME);
+    // No xml-driven validation
+    return skip;
+}
+
+
+
+bool StatelessValidation::PreCallValidateCmdProcessCommandsNVX(
+    VkCommandBuffer                             commandBuffer,
+    const VkCmdProcessCommandsInfoNVX*          pProcessCommandsInfo) const {
+    bool skip = false;
+    if (!device_extensions.vk_nvx_device_generated_commands) skip |= OutputExtensionError("vkCmdProcessCommandsNVX", VK_NVX_DEVICE_GENERATED_COMMANDS_EXTENSION_NAME);
+    skip |= validate_struct_type("vkCmdProcessCommandsNVX", "pProcessCommandsInfo", "VK_STRUCTURE_TYPE_CMD_PROCESS_COMMANDS_INFO_NVX", pProcessCommandsInfo, VK_STRUCTURE_TYPE_CMD_PROCESS_COMMANDS_INFO_NVX, true, "VUID-vkCmdProcessCommandsNVX-pProcessCommandsInfo-parameter", "VUID-VkCmdProcessCommandsInfoNVX-sType-sType");
+    if (pProcessCommandsInfo != NULL)
+    {
+        skip |= validate_struct_pnext("vkCmdProcessCommandsNVX", "pProcessCommandsInfo->pNext", NULL, pProcessCommandsInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkCmdProcessCommandsInfoNVX-pNext-pNext");
+
+        skip |= validate_required_handle("vkCmdProcessCommandsNVX", "pProcessCommandsInfo->objectTable", pProcessCommandsInfo->objectTable);
+
+        skip |= validate_required_handle("vkCmdProcessCommandsNVX", "pProcessCommandsInfo->indirectCommandsLayout", pProcessCommandsInfo->indirectCommandsLayout);
+
+        skip |= validate_array("vkCmdProcessCommandsNVX", "pProcessCommandsInfo->indirectCommandsTokenCount", "pProcessCommandsInfo->pIndirectCommandsTokens", pProcessCommandsInfo->indirectCommandsTokenCount, &pProcessCommandsInfo->pIndirectCommandsTokens, true, true, "VUID-VkCmdProcessCommandsInfoNVX-indirectCommandsTokenCount-arraylength", "VUID-VkCmdProcessCommandsInfoNVX-pIndirectCommandsTokens-parameter");
+
+        if (pProcessCommandsInfo->pIndirectCommandsTokens != NULL)
+        {
+            for (uint32_t indirectCommandsTokenIndex = 0; indirectCommandsTokenIndex < pProcessCommandsInfo->indirectCommandsTokenCount; ++indirectCommandsTokenIndex)
+            {
+                skip |= validate_ranged_enum("vkCmdProcessCommandsNVX", ParameterName("pProcessCommandsInfo->pIndirectCommandsTokens[%i].tokenType", ParameterName::IndexVector{ indirectCommandsTokenIndex }), "VkIndirectCommandsTokenTypeNVX", AllVkIndirectCommandsTokenTypeNVXEnums, pProcessCommandsInfo->pIndirectCommandsTokens[indirectCommandsTokenIndex].tokenType, "VUID-VkIndirectCommandsTokenNVX-tokenType-parameter");
+
+                skip |= validate_required_handle("vkCmdProcessCommandsNVX", ParameterName("pProcessCommandsInfo->pIndirectCommandsTokens[%i].buffer", ParameterName::IndexVector{ indirectCommandsTokenIndex }), pProcessCommandsInfo->pIndirectCommandsTokens[indirectCommandsTokenIndex].buffer);
+            }
+        }
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateCmdReserveSpaceForCommandsNVX(
+    VkCommandBuffer                             commandBuffer,
+    const VkCmdReserveSpaceForCommandsInfoNVX*  pReserveSpaceInfo) const {
+    bool skip = false;
+    if (!device_extensions.vk_nvx_device_generated_commands) skip |= OutputExtensionError("vkCmdReserveSpaceForCommandsNVX", VK_NVX_DEVICE_GENERATED_COMMANDS_EXTENSION_NAME);
+    skip |= validate_struct_type("vkCmdReserveSpaceForCommandsNVX", "pReserveSpaceInfo", "VK_STRUCTURE_TYPE_CMD_RESERVE_SPACE_FOR_COMMANDS_INFO_NVX", pReserveSpaceInfo, VK_STRUCTURE_TYPE_CMD_RESERVE_SPACE_FOR_COMMANDS_INFO_NVX, true, "VUID-vkCmdReserveSpaceForCommandsNVX-pReserveSpaceInfo-parameter", "VUID-VkCmdReserveSpaceForCommandsInfoNVX-sType-sType");
+    if (pReserveSpaceInfo != NULL)
+    {
+        skip |= validate_struct_pnext("vkCmdReserveSpaceForCommandsNVX", "pReserveSpaceInfo->pNext", NULL, pReserveSpaceInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkCmdReserveSpaceForCommandsInfoNVX-pNext-pNext");
+
+        skip |= validate_required_handle("vkCmdReserveSpaceForCommandsNVX", "pReserveSpaceInfo->objectTable", pReserveSpaceInfo->objectTable);
+
+        skip |= validate_required_handle("vkCmdReserveSpaceForCommandsNVX", "pReserveSpaceInfo->indirectCommandsLayout", pReserveSpaceInfo->indirectCommandsLayout);
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateCreateIndirectCommandsLayoutNVX(
+    VkDevice                                    device,
+    const VkIndirectCommandsLayoutCreateInfoNVX* pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkIndirectCommandsLayoutNVX*                pIndirectCommandsLayout) const {
+    bool skip = false;
+    if (!device_extensions.vk_nvx_device_generated_commands) skip |= OutputExtensionError("vkCreateIndirectCommandsLayoutNVX", VK_NVX_DEVICE_GENERATED_COMMANDS_EXTENSION_NAME);
+    skip |= validate_struct_type("vkCreateIndirectCommandsLayoutNVX", "pCreateInfo", "VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_NVX", pCreateInfo, VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_NVX, true, "VUID-vkCreateIndirectCommandsLayoutNVX-pCreateInfo-parameter", "VUID-VkIndirectCommandsLayoutCreateInfoNVX-sType-sType");
+    if (pCreateInfo != NULL)
+    {
+        skip |= validate_struct_pnext("vkCreateIndirectCommandsLayoutNVX", "pCreateInfo->pNext", NULL, pCreateInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkIndirectCommandsLayoutCreateInfoNVX-pNext-pNext");
+
+        skip |= validate_ranged_enum("vkCreateIndirectCommandsLayoutNVX", "pCreateInfo->pipelineBindPoint", "VkPipelineBindPoint", AllVkPipelineBindPointEnums, pCreateInfo->pipelineBindPoint, "VUID-VkIndirectCommandsLayoutCreateInfoNVX-pipelineBindPoint-parameter");
+
+        skip |= validate_flags("vkCreateIndirectCommandsLayoutNVX", "pCreateInfo->flags", "VkIndirectCommandsLayoutUsageFlagBitsNVX", AllVkIndirectCommandsLayoutUsageFlagBitsNVX, pCreateInfo->flags, kRequiredFlags, "VUID-VkIndirectCommandsLayoutCreateInfoNVX-flags-parameter", "VUID-VkIndirectCommandsLayoutCreateInfoNVX-flags-requiredbitmask");
+
+        skip |= validate_array("vkCreateIndirectCommandsLayoutNVX", "pCreateInfo->tokenCount", "pCreateInfo->pTokens", pCreateInfo->tokenCount, &pCreateInfo->pTokens, true, true, "VUID-VkIndirectCommandsLayoutCreateInfoNVX-tokenCount-arraylength", "VUID-VkIndirectCommandsLayoutCreateInfoNVX-pTokens-parameter");
+
+        if (pCreateInfo->pTokens != NULL)
+        {
+            for (uint32_t tokenIndex = 0; tokenIndex < pCreateInfo->tokenCount; ++tokenIndex)
+            {
+                skip |= validate_ranged_enum("vkCreateIndirectCommandsLayoutNVX", ParameterName("pCreateInfo->pTokens[%i].tokenType", ParameterName::IndexVector{ tokenIndex }), "VkIndirectCommandsTokenTypeNVX", AllVkIndirectCommandsTokenTypeNVXEnums, pCreateInfo->pTokens[tokenIndex].tokenType, "VUID-VkIndirectCommandsLayoutTokenNVX-tokenType-parameter");
+            }
+        }
+    }
+    if (pAllocator != NULL)
+    {
+        skip |= validate_required_pointer("vkCreateIndirectCommandsLayoutNVX", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
+
+        skip |= validate_required_pointer("vkCreateIndirectCommandsLayoutNVX", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
+
+        skip |= validate_required_pointer("vkCreateIndirectCommandsLayoutNVX", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
+
+        if (pAllocator->pfnInternalAllocation != NULL)
+        {
+            skip |= validate_required_pointer("vkCreateIndirectCommandsLayoutNVX", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+
+        if (pAllocator->pfnInternalFree != NULL)
+        {
+            skip |= validate_required_pointer("vkCreateIndirectCommandsLayoutNVX", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+    }
+    skip |= validate_required_pointer("vkCreateIndirectCommandsLayoutNVX", "pIndirectCommandsLayout", pIndirectCommandsLayout, "VUID-vkCreateIndirectCommandsLayoutNVX-pIndirectCommandsLayout-parameter");
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateDestroyIndirectCommandsLayoutNVX(
+    VkDevice                                    device,
+    VkIndirectCommandsLayoutNVX                 indirectCommandsLayout,
+    const VkAllocationCallbacks*                pAllocator) const {
+    bool skip = false;
+    if (!device_extensions.vk_nvx_device_generated_commands) skip |= OutputExtensionError("vkDestroyIndirectCommandsLayoutNVX", VK_NVX_DEVICE_GENERATED_COMMANDS_EXTENSION_NAME);
+    skip |= validate_required_handle("vkDestroyIndirectCommandsLayoutNVX", "indirectCommandsLayout", indirectCommandsLayout);
+    if (pAllocator != NULL)
+    {
+        skip |= validate_required_pointer("vkDestroyIndirectCommandsLayoutNVX", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
+
+        skip |= validate_required_pointer("vkDestroyIndirectCommandsLayoutNVX", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
+
+        skip |= validate_required_pointer("vkDestroyIndirectCommandsLayoutNVX", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
+
+        if (pAllocator->pfnInternalAllocation != NULL)
+        {
+            skip |= validate_required_pointer("vkDestroyIndirectCommandsLayoutNVX", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+
+        if (pAllocator->pfnInternalFree != NULL)
+        {
+            skip |= validate_required_pointer("vkDestroyIndirectCommandsLayoutNVX", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateCreateObjectTableNVX(
+    VkDevice                                    device,
+    const VkObjectTableCreateInfoNVX*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkObjectTableNVX*                           pObjectTable) const {
+    bool skip = false;
+    if (!device_extensions.vk_nvx_device_generated_commands) skip |= OutputExtensionError("vkCreateObjectTableNVX", VK_NVX_DEVICE_GENERATED_COMMANDS_EXTENSION_NAME);
+    skip |= validate_struct_type("vkCreateObjectTableNVX", "pCreateInfo", "VK_STRUCTURE_TYPE_OBJECT_TABLE_CREATE_INFO_NVX", pCreateInfo, VK_STRUCTURE_TYPE_OBJECT_TABLE_CREATE_INFO_NVX, true, "VUID-vkCreateObjectTableNVX-pCreateInfo-parameter", "VUID-VkObjectTableCreateInfoNVX-sType-sType");
+    if (pCreateInfo != NULL)
+    {
+        skip |= validate_struct_pnext("vkCreateObjectTableNVX", "pCreateInfo->pNext", NULL, pCreateInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkObjectTableCreateInfoNVX-pNext-pNext");
+
+        skip |= validate_ranged_enum_array("vkCreateObjectTableNVX", "pCreateInfo->objectCount", "pCreateInfo->pObjectEntryTypes", "VkObjectEntryTypeNVX", AllVkObjectEntryTypeNVXEnums, pCreateInfo->objectCount, pCreateInfo->pObjectEntryTypes, true, true);
+
+        skip |= validate_array("vkCreateObjectTableNVX", "pCreateInfo->objectCount", "pCreateInfo->pObjectEntryCounts", pCreateInfo->objectCount, &pCreateInfo->pObjectEntryCounts, true, true, "VUID-VkObjectTableCreateInfoNVX-objectCount-arraylength", "VUID-VkObjectTableCreateInfoNVX-pObjectEntryCounts-parameter");
+
+        skip |= validate_flags_array("vkCreateObjectTableNVX", "pCreateInfo->objectCount", "pCreateInfo->pObjectEntryUsageFlags", "VkObjectEntryUsageFlagBitsNVX", AllVkObjectEntryUsageFlagBitsNVX, pCreateInfo->objectCount, pCreateInfo->pObjectEntryUsageFlags, true, true);
+    }
+    if (pAllocator != NULL)
+    {
+        skip |= validate_required_pointer("vkCreateObjectTableNVX", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
+
+        skip |= validate_required_pointer("vkCreateObjectTableNVX", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
+
+        skip |= validate_required_pointer("vkCreateObjectTableNVX", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
+
+        if (pAllocator->pfnInternalAllocation != NULL)
+        {
+            skip |= validate_required_pointer("vkCreateObjectTableNVX", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+
+        if (pAllocator->pfnInternalFree != NULL)
+        {
+            skip |= validate_required_pointer("vkCreateObjectTableNVX", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+    }
+    skip |= validate_required_pointer("vkCreateObjectTableNVX", "pObjectTable", pObjectTable, "VUID-vkCreateObjectTableNVX-pObjectTable-parameter");
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateDestroyObjectTableNVX(
+    VkDevice                                    device,
+    VkObjectTableNVX                            objectTable,
+    const VkAllocationCallbacks*                pAllocator) const {
+    bool skip = false;
+    if (!device_extensions.vk_nvx_device_generated_commands) skip |= OutputExtensionError("vkDestroyObjectTableNVX", VK_NVX_DEVICE_GENERATED_COMMANDS_EXTENSION_NAME);
+    skip |= validate_required_handle("vkDestroyObjectTableNVX", "objectTable", objectTable);
+    if (pAllocator != NULL)
+    {
+        skip |= validate_required_pointer("vkDestroyObjectTableNVX", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
+
+        skip |= validate_required_pointer("vkDestroyObjectTableNVX", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
+
+        skip |= validate_required_pointer("vkDestroyObjectTableNVX", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
+
+        if (pAllocator->pfnInternalAllocation != NULL)
+        {
+            skip |= validate_required_pointer("vkDestroyObjectTableNVX", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+
+        if (pAllocator->pfnInternalFree != NULL)
+        {
+            skip |= validate_required_pointer("vkDestroyObjectTableNVX", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateRegisterObjectsNVX(
+    VkDevice                                    device,
+    VkObjectTableNVX                            objectTable,
+    uint32_t                                    objectCount,
+    const VkObjectTableEntryNVX* const*         ppObjectTableEntries,
+    const uint32_t*                             pObjectIndices) const {
+    bool skip = false;
+    if (!device_extensions.vk_nvx_device_generated_commands) skip |= OutputExtensionError("vkRegisterObjectsNVX", VK_NVX_DEVICE_GENERATED_COMMANDS_EXTENSION_NAME);
+    skip |= validate_required_handle("vkRegisterObjectsNVX", "objectTable", objectTable);
+    skip |= validate_array("vkRegisterObjectsNVX", "objectCount", "ppObjectTableEntries", objectCount, &ppObjectTableEntries, true, true, "VUID-vkRegisterObjectsNVX-objectCount-arraylength", "VUID-vkRegisterObjectsNVX-ppObjectTableEntries-parameter");
+    if (ppObjectTableEntries != NULL)
+    {
+        for (uint32_t objectIndex = 0; objectIndex < objectCount; ++objectIndex)
+        {
+            skip |= validate_ranged_enum("vkRegisterObjectsNVX", ParameterName("ppObjectTableEntries[%i]->type", ParameterName::IndexVector{ objectIndex }), "VkObjectEntryTypeNVX", AllVkObjectEntryTypeNVXEnums, ppObjectTableEntries[objectIndex]->type, "VUID-VkObjectTableEntryNVX-type-parameter");
+
+            skip |= validate_flags("vkRegisterObjectsNVX", ParameterName("ppObjectTableEntries[%i]->flags", ParameterName::IndexVector{ objectIndex }), "VkObjectEntryUsageFlagBitsNVX", AllVkObjectEntryUsageFlagBitsNVX, ppObjectTableEntries[objectIndex]->flags, kRequiredFlags, "VUID-VkObjectTableEntryNVX-flags-parameter", "VUID-VkObjectTableEntryNVX-flags-requiredbitmask");
+        }
+    }
+    skip |= validate_array("vkRegisterObjectsNVX", "objectCount", "pObjectIndices", objectCount, &pObjectIndices, true, true, "VUID-vkRegisterObjectsNVX-objectCount-arraylength", "VUID-vkRegisterObjectsNVX-pObjectIndices-parameter");
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateUnregisterObjectsNVX(
+    VkDevice                                    device,
+    VkObjectTableNVX                            objectTable,
+    uint32_t                                    objectCount,
+    const VkObjectEntryTypeNVX*                 pObjectEntryTypes,
+    const uint32_t*                             pObjectIndices) const {
+    bool skip = false;
+    if (!device_extensions.vk_nvx_device_generated_commands) skip |= OutputExtensionError("vkUnregisterObjectsNVX", VK_NVX_DEVICE_GENERATED_COMMANDS_EXTENSION_NAME);
+    skip |= validate_required_handle("vkUnregisterObjectsNVX", "objectTable", objectTable);
+    skip |= validate_ranged_enum_array("vkUnregisterObjectsNVX", "objectCount", "pObjectEntryTypes", "VkObjectEntryTypeNVX", AllVkObjectEntryTypeNVXEnums, objectCount, pObjectEntryTypes, true, true);
+    skip |= validate_array("vkUnregisterObjectsNVX", "objectCount", "pObjectIndices", objectCount, &pObjectIndices, true, true, "VUID-vkUnregisterObjectsNVX-objectCount-arraylength", "VUID-vkUnregisterObjectsNVX-pObjectIndices-parameter");
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateGetPhysicalDeviceGeneratedCommandsPropertiesNVX(
+    VkPhysicalDevice                            physicalDevice,
+    VkDeviceGeneratedCommandsFeaturesNVX*       pFeatures,
+    VkDeviceGeneratedCommandsLimitsNVX*         pLimits) const {
+    bool skip = false;
+    skip |= validate_struct_type("vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX", "pFeatures", "VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_FEATURES_NVX", pFeatures, VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_FEATURES_NVX, true, "VUID-vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX-pFeatures-parameter", "VUID-VkDeviceGeneratedCommandsFeaturesNVX-sType-sType");
+    skip |= validate_struct_type("vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX", "pLimits", "VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_LIMITS_NVX", pLimits, VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_LIMITS_NVX, true, "VUID-vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX-pLimits-parameter", "VUID-VkDeviceGeneratedCommandsLimitsNVX-sType-sType");
+    return skip;
+}
+
+
+
+bool StatelessValidation::PreCallValidateCmdSetViewportWScalingNV(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstViewport,
+    uint32_t                                    viewportCount,
+    const VkViewportWScalingNV*                 pViewportWScalings) const {
+    bool skip = false;
+    if (!device_extensions.vk_nv_clip_space_w_scaling) skip |= OutputExtensionError("vkCmdSetViewportWScalingNV", VK_NV_CLIP_SPACE_W_SCALING_EXTENSION_NAME);
+    skip |= validate_array("vkCmdSetViewportWScalingNV", "viewportCount", "pViewportWScalings", viewportCount, &pViewportWScalings, true, true, "VUID-vkCmdSetViewportWScalingNV-viewportCount-arraylength", "VUID-vkCmdSetViewportWScalingNV-pViewportWScalings-parameter");
+    if (pViewportWScalings != NULL)
+    {
+        for (uint32_t viewportIndex = 0; viewportIndex < viewportCount; ++viewportIndex)
+        {
+            // No xml-driven validation
+        }
+    }
+    if (!skip) skip |= manual_PreCallValidateCmdSetViewportWScalingNV(commandBuffer, firstViewport, viewportCount, pViewportWScalings);
+    return skip;
+}
+
+
+
+bool StatelessValidation::PreCallValidateReleaseDisplayEXT(
+    VkPhysicalDevice                            physicalDevice,
+    VkDisplayKHR                                display) const {
+    bool skip = false;
+    if (!instance_extensions.vk_khr_display) skip |= OutputExtensionError("vkReleaseDisplayEXT", VK_KHR_DISPLAY_EXTENSION_NAME);
+    if (!instance_extensions.vk_ext_direct_mode_display) skip |= OutputExtensionError("vkReleaseDisplayEXT", VK_EXT_DIRECT_MODE_DISPLAY_EXTENSION_NAME);
+    skip |= validate_required_handle("vkReleaseDisplayEXT", "display", display);
+    return skip;
+}
+
+
+
+#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
+
+bool StatelessValidation::PreCallValidateAcquireXlibDisplayEXT(
+    VkPhysicalDevice                            physicalDevice,
+    Display*                                    dpy,
+    VkDisplayKHR                                display) const {
+    bool skip = false;
+    if (!instance_extensions.vk_ext_direct_mode_display) skip |= OutputExtensionError("vkAcquireXlibDisplayEXT", VK_EXT_DIRECT_MODE_DISPLAY_EXTENSION_NAME);
+    if (!instance_extensions.vk_ext_acquire_xlib_display) skip |= OutputExtensionError("vkAcquireXlibDisplayEXT", VK_EXT_ACQUIRE_XLIB_DISPLAY_EXTENSION_NAME);
+    skip |= validate_required_pointer("vkAcquireXlibDisplayEXT", "dpy", dpy, "VUID-vkAcquireXlibDisplayEXT-dpy-parameter");
+    skip |= validate_required_handle("vkAcquireXlibDisplayEXT", "display", display);
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateGetRandROutputDisplayEXT(
+    VkPhysicalDevice                            physicalDevice,
+    Display*                                    dpy,
+    RROutput                                    rrOutput,
+    VkDisplayKHR*                               pDisplay) const {
+    bool skip = false;
+    if (!instance_extensions.vk_ext_direct_mode_display) skip |= OutputExtensionError("vkGetRandROutputDisplayEXT", VK_EXT_DIRECT_MODE_DISPLAY_EXTENSION_NAME);
+    if (!instance_extensions.vk_ext_acquire_xlib_display) skip |= OutputExtensionError("vkGetRandROutputDisplayEXT", VK_EXT_ACQUIRE_XLIB_DISPLAY_EXTENSION_NAME);
+    skip |= validate_required_pointer("vkGetRandROutputDisplayEXT", "dpy", dpy, "VUID-vkGetRandROutputDisplayEXT-dpy-parameter");
+    skip |= validate_required_pointer("vkGetRandROutputDisplayEXT", "pDisplay", pDisplay, "VUID-vkGetRandROutputDisplayEXT-pDisplay-parameter");
+    return skip;
+}
+
+#endif // VK_USE_PLATFORM_XLIB_XRANDR_EXT
+
+bool StatelessValidation::PreCallValidateGetPhysicalDeviceSurfaceCapabilities2EXT(
+    VkPhysicalDevice                            physicalDevice,
+    VkSurfaceKHR                                surface,
+    VkSurfaceCapabilities2EXT*                  pSurfaceCapabilities) const {
+    bool skip = false;
+    if (!instance_extensions.vk_khr_display) skip |= OutputExtensionError("vkGetPhysicalDeviceSurfaceCapabilities2EXT", VK_KHR_DISPLAY_EXTENSION_NAME);
+    if (!instance_extensions.vk_ext_display_surface_counter) skip |= OutputExtensionError("vkGetPhysicalDeviceSurfaceCapabilities2EXT", VK_EXT_DISPLAY_SURFACE_COUNTER_EXTENSION_NAME);
+    skip |= validate_required_handle("vkGetPhysicalDeviceSurfaceCapabilities2EXT", "surface", surface);
+    skip |= validate_struct_type("vkGetPhysicalDeviceSurfaceCapabilities2EXT", "pSurfaceCapabilities", "VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_EXT", pSurfaceCapabilities, VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_EXT, true, "VUID-vkGetPhysicalDeviceSurfaceCapabilities2EXT-pSurfaceCapabilities-parameter", "VUID-VkSurfaceCapabilities2EXT-sType-sType");
+    if (pSurfaceCapabilities != NULL)
+    {
+        // No xml-driven validation
+    }
+    return skip;
+}
+
+
+
+bool StatelessValidation::PreCallValidateDisplayPowerControlEXT(
+    VkDevice                                    device,
+    VkDisplayKHR                                display,
+    const VkDisplayPowerInfoEXT*                pDisplayPowerInfo) const {
+    bool skip = false;
+    if (!device_extensions.vk_khr_swapchain) skip |= OutputExtensionError("vkDisplayPowerControlEXT", VK_KHR_SWAPCHAIN_EXTENSION_NAME);
+    if (!device_extensions.vk_ext_display_surface_counter) skip |= OutputExtensionError("vkDisplayPowerControlEXT", VK_EXT_DISPLAY_SURFACE_COUNTER_EXTENSION_NAME);
+    if (!device_extensions.vk_ext_display_control) skip |= OutputExtensionError("vkDisplayPowerControlEXT", VK_EXT_DISPLAY_CONTROL_EXTENSION_NAME);
+    skip |= validate_required_handle("vkDisplayPowerControlEXT", "display", display);
+    skip |= validate_struct_type("vkDisplayPowerControlEXT", "pDisplayPowerInfo", "VK_STRUCTURE_TYPE_DISPLAY_POWER_INFO_EXT", pDisplayPowerInfo, VK_STRUCTURE_TYPE_DISPLAY_POWER_INFO_EXT, true, "VUID-vkDisplayPowerControlEXT-pDisplayPowerInfo-parameter", "VUID-VkDisplayPowerInfoEXT-sType-sType");
+    if (pDisplayPowerInfo != NULL)
+    {
+        skip |= validate_struct_pnext("vkDisplayPowerControlEXT", "pDisplayPowerInfo->pNext", NULL, pDisplayPowerInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkDisplayPowerInfoEXT-pNext-pNext");
+
+        skip |= validate_ranged_enum("vkDisplayPowerControlEXT", "pDisplayPowerInfo->powerState", "VkDisplayPowerStateEXT", AllVkDisplayPowerStateEXTEnums, pDisplayPowerInfo->powerState, "VUID-VkDisplayPowerInfoEXT-powerState-parameter");
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateRegisterDeviceEventEXT(
+    VkDevice                                    device,
+    const VkDeviceEventInfoEXT*                 pDeviceEventInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkFence*                                    pFence) const {
+    bool skip = false;
+    if (!device_extensions.vk_khr_swapchain) skip |= OutputExtensionError("vkRegisterDeviceEventEXT", VK_KHR_SWAPCHAIN_EXTENSION_NAME);
+    if (!device_extensions.vk_ext_display_surface_counter) skip |= OutputExtensionError("vkRegisterDeviceEventEXT", VK_EXT_DISPLAY_SURFACE_COUNTER_EXTENSION_NAME);
+    if (!device_extensions.vk_ext_display_control) skip |= OutputExtensionError("vkRegisterDeviceEventEXT", VK_EXT_DISPLAY_CONTROL_EXTENSION_NAME);
+    skip |= validate_struct_type("vkRegisterDeviceEventEXT", "pDeviceEventInfo", "VK_STRUCTURE_TYPE_DEVICE_EVENT_INFO_EXT", pDeviceEventInfo, VK_STRUCTURE_TYPE_DEVICE_EVENT_INFO_EXT, true, "VUID-vkRegisterDeviceEventEXT-pDeviceEventInfo-parameter", "VUID-VkDeviceEventInfoEXT-sType-sType");
+    if (pDeviceEventInfo != NULL)
+    {
+        skip |= validate_struct_pnext("vkRegisterDeviceEventEXT", "pDeviceEventInfo->pNext", NULL, pDeviceEventInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkDeviceEventInfoEXT-pNext-pNext");
+
+        skip |= validate_ranged_enum("vkRegisterDeviceEventEXT", "pDeviceEventInfo->deviceEvent", "VkDeviceEventTypeEXT", AllVkDeviceEventTypeEXTEnums, pDeviceEventInfo->deviceEvent, "VUID-VkDeviceEventInfoEXT-deviceEvent-parameter");
+    }
+    if (pAllocator != NULL)
+    {
+        skip |= validate_required_pointer("vkRegisterDeviceEventEXT", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
+
+        skip |= validate_required_pointer("vkRegisterDeviceEventEXT", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
+
+        skip |= validate_required_pointer("vkRegisterDeviceEventEXT", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
+
+        if (pAllocator->pfnInternalAllocation != NULL)
+        {
+            skip |= validate_required_pointer("vkRegisterDeviceEventEXT", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+
+        if (pAllocator->pfnInternalFree != NULL)
+        {
+            skip |= validate_required_pointer("vkRegisterDeviceEventEXT", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+    }
+    skip |= validate_required_pointer("vkRegisterDeviceEventEXT", "pFence", pFence, "VUID-vkRegisterDeviceEventEXT-pFence-parameter");
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateRegisterDisplayEventEXT(
+    VkDevice                                    device,
+    VkDisplayKHR                                display,
+    const VkDisplayEventInfoEXT*                pDisplayEventInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkFence*                                    pFence) const {
+    bool skip = false;
+    if (!device_extensions.vk_khr_swapchain) skip |= OutputExtensionError("vkRegisterDisplayEventEXT", VK_KHR_SWAPCHAIN_EXTENSION_NAME);
+    if (!device_extensions.vk_ext_display_surface_counter) skip |= OutputExtensionError("vkRegisterDisplayEventEXT", VK_EXT_DISPLAY_SURFACE_COUNTER_EXTENSION_NAME);
+    if (!device_extensions.vk_ext_display_control) skip |= OutputExtensionError("vkRegisterDisplayEventEXT", VK_EXT_DISPLAY_CONTROL_EXTENSION_NAME);
+    skip |= validate_required_handle("vkRegisterDisplayEventEXT", "display", display);
+    skip |= validate_struct_type("vkRegisterDisplayEventEXT", "pDisplayEventInfo", "VK_STRUCTURE_TYPE_DISPLAY_EVENT_INFO_EXT", pDisplayEventInfo, VK_STRUCTURE_TYPE_DISPLAY_EVENT_INFO_EXT, true, "VUID-vkRegisterDisplayEventEXT-pDisplayEventInfo-parameter", "VUID-VkDisplayEventInfoEXT-sType-sType");
+    if (pDisplayEventInfo != NULL)
+    {
+        skip |= validate_struct_pnext("vkRegisterDisplayEventEXT", "pDisplayEventInfo->pNext", NULL, pDisplayEventInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkDisplayEventInfoEXT-pNext-pNext");
+
+        skip |= validate_ranged_enum("vkRegisterDisplayEventEXT", "pDisplayEventInfo->displayEvent", "VkDisplayEventTypeEXT", AllVkDisplayEventTypeEXTEnums, pDisplayEventInfo->displayEvent, "VUID-VkDisplayEventInfoEXT-displayEvent-parameter");
+    }
+    if (pAllocator != NULL)
+    {
+        skip |= validate_required_pointer("vkRegisterDisplayEventEXT", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
+
+        skip |= validate_required_pointer("vkRegisterDisplayEventEXT", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
+
+        skip |= validate_required_pointer("vkRegisterDisplayEventEXT", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
+
+        if (pAllocator->pfnInternalAllocation != NULL)
+        {
+            skip |= validate_required_pointer("vkRegisterDisplayEventEXT", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+
+        if (pAllocator->pfnInternalFree != NULL)
+        {
+            skip |= validate_required_pointer("vkRegisterDisplayEventEXT", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+    }
+    skip |= validate_required_pointer("vkRegisterDisplayEventEXT", "pFence", pFence, "VUID-vkRegisterDisplayEventEXT-pFence-parameter");
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateGetSwapchainCounterEXT(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    VkSurfaceCounterFlagBitsEXT                 counter,
+    uint64_t*                                   pCounterValue) const {
+    bool skip = false;
+    if (!device_extensions.vk_khr_swapchain) skip |= OutputExtensionError("vkGetSwapchainCounterEXT", VK_KHR_SWAPCHAIN_EXTENSION_NAME);
+    if (!device_extensions.vk_ext_display_surface_counter) skip |= OutputExtensionError("vkGetSwapchainCounterEXT", VK_EXT_DISPLAY_SURFACE_COUNTER_EXTENSION_NAME);
+    if (!device_extensions.vk_ext_display_control) skip |= OutputExtensionError("vkGetSwapchainCounterEXT", VK_EXT_DISPLAY_CONTROL_EXTENSION_NAME);
+    skip |= validate_required_handle("vkGetSwapchainCounterEXT", "swapchain", swapchain);
+    skip |= validate_flags("vkGetSwapchainCounterEXT", "counter", "VkSurfaceCounterFlagBitsEXT", AllVkSurfaceCounterFlagBitsEXT, counter, kRequiredSingleBit, "VUID-vkGetSwapchainCounterEXT-counter-parameter", "VUID-vkGetSwapchainCounterEXT-counter-parameter");
+    skip |= validate_required_pointer("vkGetSwapchainCounterEXT", "pCounterValue", pCounterValue, "VUID-vkGetSwapchainCounterEXT-pCounterValue-parameter");
+    return skip;
+}
+
+
+
+bool StatelessValidation::PreCallValidateGetRefreshCycleDurationGOOGLE(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    VkRefreshCycleDurationGOOGLE*               pDisplayTimingProperties) const {
+    bool skip = false;
+    if (!device_extensions.vk_khr_swapchain) skip |= OutputExtensionError("vkGetRefreshCycleDurationGOOGLE", VK_KHR_SWAPCHAIN_EXTENSION_NAME);
+    if (!device_extensions.vk_google_display_timing) skip |= OutputExtensionError("vkGetRefreshCycleDurationGOOGLE", VK_GOOGLE_DISPLAY_TIMING_EXTENSION_NAME);
+    skip |= validate_required_handle("vkGetRefreshCycleDurationGOOGLE", "swapchain", swapchain);
+    skip |= validate_required_pointer("vkGetRefreshCycleDurationGOOGLE", "pDisplayTimingProperties", pDisplayTimingProperties, "VUID-vkGetRefreshCycleDurationGOOGLE-pDisplayTimingProperties-parameter");
+    if (pDisplayTimingProperties != NULL)
+    {
+        // No xml-driven validation
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateGetPastPresentationTimingGOOGLE(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    uint32_t*                                   pPresentationTimingCount,
+    VkPastPresentationTimingGOOGLE*             pPresentationTimings) const {
+    bool skip = false;
+    if (!device_extensions.vk_khr_swapchain) skip |= OutputExtensionError("vkGetPastPresentationTimingGOOGLE", VK_KHR_SWAPCHAIN_EXTENSION_NAME);
+    if (!device_extensions.vk_google_display_timing) skip |= OutputExtensionError("vkGetPastPresentationTimingGOOGLE", VK_GOOGLE_DISPLAY_TIMING_EXTENSION_NAME);
+    skip |= validate_required_handle("vkGetPastPresentationTimingGOOGLE", "swapchain", swapchain);
+    skip |= validate_array("vkGetPastPresentationTimingGOOGLE", "pPresentationTimingCount", "pPresentationTimings", pPresentationTimingCount, &pPresentationTimings, true, false, false, kVUIDUndefined, "VUID-vkGetPastPresentationTimingGOOGLE-pPresentationTimings-parameter");
+    if (pPresentationTimings != NULL)
+    {
+        for (uint32_t pPresentationTimingIndex = 0; pPresentationTimingIndex < *pPresentationTimingCount; ++pPresentationTimingIndex)
+        {
+            // No xml-driven validation
+        }
+    }
+    return skip;
+}
+
+
+
+
+
+
+
+
+
+
+
+
+
+bool StatelessValidation::PreCallValidateCmdSetDiscardRectangleEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstDiscardRectangle,
+    uint32_t                                    discardRectangleCount,
+    const VkRect2D*                             pDiscardRectangles) const {
+    bool skip = false;
+    if (!device_extensions.vk_khr_get_physical_device_properties_2) skip |= OutputExtensionError("vkCmdSetDiscardRectangleEXT", VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    if (!device_extensions.vk_ext_discard_rectangles) skip |= OutputExtensionError("vkCmdSetDiscardRectangleEXT", VK_EXT_DISCARD_RECTANGLES_EXTENSION_NAME);
+    skip |= validate_array("vkCmdSetDiscardRectangleEXT", "discardRectangleCount", "pDiscardRectangles", discardRectangleCount, &pDiscardRectangles, true, true, "VUID-vkCmdSetDiscardRectangleEXT-discardRectangleCount-arraylength", "VUID-vkCmdSetDiscardRectangleEXT-pDiscardRectangles-parameter");
+    if (pDiscardRectangles != NULL)
+    {
+        for (uint32_t discardRectangleIndex = 0; discardRectangleIndex < discardRectangleCount; ++discardRectangleIndex)
+        {
+            // No xml-driven validation
+
+            // No xml-driven validation
+        }
+    }
+    return skip;
+}
+
+
+
+
+
+
+
+
+
+bool StatelessValidation::PreCallValidateSetHdrMetadataEXT(
+    VkDevice                                    device,
+    uint32_t                                    swapchainCount,
+    const VkSwapchainKHR*                       pSwapchains,
+    const VkHdrMetadataEXT*                     pMetadata) const {
+    bool skip = false;
+    if (!device_extensions.vk_khr_swapchain) skip |= OutputExtensionError("vkSetHdrMetadataEXT", VK_KHR_SWAPCHAIN_EXTENSION_NAME);
+    if (!device_extensions.vk_ext_hdr_metadata) skip |= OutputExtensionError("vkSetHdrMetadataEXT", VK_EXT_HDR_METADATA_EXTENSION_NAME);
+    skip |= validate_handle_array("vkSetHdrMetadataEXT", "swapchainCount", "pSwapchains", swapchainCount, pSwapchains, true, true);
+    skip |= validate_struct_type_array("vkSetHdrMetadataEXT", "swapchainCount", "pMetadata", "VK_STRUCTURE_TYPE_HDR_METADATA_EXT", swapchainCount, pMetadata, VK_STRUCTURE_TYPE_HDR_METADATA_EXT, true, true, "VUID-VkHdrMetadataEXT-sType-sType", "VUID-vkSetHdrMetadataEXT-pMetadata-parameter", "VUID-vkSetHdrMetadataEXT-swapchainCount-arraylength");
+    if (pMetadata != NULL)
+    {
+        for (uint32_t swapchainIndex = 0; swapchainIndex < swapchainCount; ++swapchainIndex)
+        {
+            skip |= validate_struct_pnext("vkSetHdrMetadataEXT", ParameterName("pMetadata[%i].pNext", ParameterName::IndexVector{ swapchainIndex }), NULL, pMetadata[swapchainIndex].pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkHdrMetadataEXT-pNext-pNext");
+        }
+    }
+    return skip;
+}
+
+
+
+#ifdef VK_USE_PLATFORM_IOS_MVK
+
+bool StatelessValidation::PreCallValidateCreateIOSSurfaceMVK(
+    VkInstance                                  instance,
+    const VkIOSSurfaceCreateInfoMVK*            pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface) const {
+    bool skip = false;
+    if (!instance_extensions.vk_khr_surface) skip |= OutputExtensionError("vkCreateIOSSurfaceMVK", VK_KHR_SURFACE_EXTENSION_NAME);
+    if (!instance_extensions.vk_mvk_ios_surface) skip |= OutputExtensionError("vkCreateIOSSurfaceMVK", VK_MVK_IOS_SURFACE_EXTENSION_NAME);
+    skip |= validate_struct_type("vkCreateIOSSurfaceMVK", "pCreateInfo", "VK_STRUCTURE_TYPE_IOS_SURFACE_CREATE_INFO_MVK", pCreateInfo, VK_STRUCTURE_TYPE_IOS_SURFACE_CREATE_INFO_MVK, true, "VUID-vkCreateIOSSurfaceMVK-pCreateInfo-parameter", "VUID-VkIOSSurfaceCreateInfoMVK-sType-sType");
+    if (pCreateInfo != NULL)
+    {
+        skip |= validate_struct_pnext("vkCreateIOSSurfaceMVK", "pCreateInfo->pNext", NULL, pCreateInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkIOSSurfaceCreateInfoMVK-pNext-pNext");
+
+        skip |= validate_reserved_flags("vkCreateIOSSurfaceMVK", "pCreateInfo->flags", pCreateInfo->flags, "VUID-VkIOSSurfaceCreateInfoMVK-flags-zerobitmask");
+    }
+    if (pAllocator != NULL)
+    {
+        skip |= validate_required_pointer("vkCreateIOSSurfaceMVK", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
+
+        skip |= validate_required_pointer("vkCreateIOSSurfaceMVK", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
+
+        skip |= validate_required_pointer("vkCreateIOSSurfaceMVK", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
+
+        if (pAllocator->pfnInternalAllocation != NULL)
+        {
+            skip |= validate_required_pointer("vkCreateIOSSurfaceMVK", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+
+        if (pAllocator->pfnInternalFree != NULL)
+        {
+            skip |= validate_required_pointer("vkCreateIOSSurfaceMVK", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+    }
+    skip |= validate_required_pointer("vkCreateIOSSurfaceMVK", "pSurface", pSurface, "VUID-vkCreateIOSSurfaceMVK-pSurface-parameter");
+    return skip;
+}
+
+#endif // VK_USE_PLATFORM_IOS_MVK
+
+#ifdef VK_USE_PLATFORM_MACOS_MVK
+
+bool StatelessValidation::PreCallValidateCreateMacOSSurfaceMVK(
+    VkInstance                                  instance,
+    const VkMacOSSurfaceCreateInfoMVK*          pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface) const {
+    bool skip = false;
+    if (!instance_extensions.vk_khr_surface) skip |= OutputExtensionError("vkCreateMacOSSurfaceMVK", VK_KHR_SURFACE_EXTENSION_NAME);
+    if (!instance_extensions.vk_mvk_macos_surface) skip |= OutputExtensionError("vkCreateMacOSSurfaceMVK", VK_MVK_MACOS_SURFACE_EXTENSION_NAME);
+    skip |= validate_struct_type("vkCreateMacOSSurfaceMVK", "pCreateInfo", "VK_STRUCTURE_TYPE_MACOS_SURFACE_CREATE_INFO_MVK", pCreateInfo, VK_STRUCTURE_TYPE_MACOS_SURFACE_CREATE_INFO_MVK, true, "VUID-vkCreateMacOSSurfaceMVK-pCreateInfo-parameter", "VUID-VkMacOSSurfaceCreateInfoMVK-sType-sType");
+    if (pCreateInfo != NULL)
+    {
+        skip |= validate_struct_pnext("vkCreateMacOSSurfaceMVK", "pCreateInfo->pNext", NULL, pCreateInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkMacOSSurfaceCreateInfoMVK-pNext-pNext");
+
+        skip |= validate_reserved_flags("vkCreateMacOSSurfaceMVK", "pCreateInfo->flags", pCreateInfo->flags, "VUID-VkMacOSSurfaceCreateInfoMVK-flags-zerobitmask");
+    }
+    if (pAllocator != NULL)
+    {
+        skip |= validate_required_pointer("vkCreateMacOSSurfaceMVK", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
+
+        skip |= validate_required_pointer("vkCreateMacOSSurfaceMVK", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
+
+        skip |= validate_required_pointer("vkCreateMacOSSurfaceMVK", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
+
+        if (pAllocator->pfnInternalAllocation != NULL)
+        {
+            skip |= validate_required_pointer("vkCreateMacOSSurfaceMVK", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+
+        if (pAllocator->pfnInternalFree != NULL)
+        {
+            skip |= validate_required_pointer("vkCreateMacOSSurfaceMVK", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+    }
+    skip |= validate_required_pointer("vkCreateMacOSSurfaceMVK", "pSurface", pSurface, "VUID-vkCreateMacOSSurfaceMVK-pSurface-parameter");
+    return skip;
+}
+
+#endif // VK_USE_PLATFORM_MACOS_MVK
+
+
+
+
+
+bool StatelessValidation::PreCallValidateSetDebugUtilsObjectNameEXT(
+    VkDevice                                    device,
+    const VkDebugUtilsObjectNameInfoEXT*        pNameInfo) const {
+    bool skip = false;
+    if (!device_extensions.vk_ext_debug_utils) skip |= OutputExtensionError("vkSetDebugUtilsObjectNameEXT", VK_EXT_DEBUG_UTILS_EXTENSION_NAME);
+    skip |= validate_struct_type("vkSetDebugUtilsObjectNameEXT", "pNameInfo", "VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT", pNameInfo, VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT, true, "VUID-vkSetDebugUtilsObjectNameEXT-pNameInfo-parameter", "VUID-VkDebugUtilsObjectNameInfoEXT-sType-sType");
+    if (pNameInfo != NULL)
+    {
+        skip |= validate_struct_pnext("vkSetDebugUtilsObjectNameEXT", "pNameInfo->pNext", NULL, pNameInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkDebugUtilsObjectNameInfoEXT-pNext-pNext");
+
+        skip |= validate_ranged_enum("vkSetDebugUtilsObjectNameEXT", "pNameInfo->objectType", "VkObjectType", AllVkObjectTypeEnums, pNameInfo->objectType, "VUID-VkDebugUtilsObjectNameInfoEXT-objectType-parameter");
+    }
+    if (!skip) skip |= manual_PreCallValidateSetDebugUtilsObjectNameEXT(device, pNameInfo);
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateSetDebugUtilsObjectTagEXT(
+    VkDevice                                    device,
+    const VkDebugUtilsObjectTagInfoEXT*         pTagInfo) const {
+    bool skip = false;
+    if (!device_extensions.vk_ext_debug_utils) skip |= OutputExtensionError("vkSetDebugUtilsObjectTagEXT", VK_EXT_DEBUG_UTILS_EXTENSION_NAME);
+    skip |= validate_struct_type("vkSetDebugUtilsObjectTagEXT", "pTagInfo", "VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_TAG_INFO_EXT", pTagInfo, VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_TAG_INFO_EXT, true, "VUID-vkSetDebugUtilsObjectTagEXT-pTagInfo-parameter", "VUID-VkDebugUtilsObjectTagInfoEXT-sType-sType");
+    if (pTagInfo != NULL)
+    {
+        skip |= validate_struct_pnext("vkSetDebugUtilsObjectTagEXT", "pTagInfo->pNext", NULL, pTagInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkDebugUtilsObjectTagInfoEXT-pNext-pNext");
+
+        skip |= validate_ranged_enum("vkSetDebugUtilsObjectTagEXT", "pTagInfo->objectType", "VkObjectType", AllVkObjectTypeEnums, pTagInfo->objectType, "VUID-VkDebugUtilsObjectTagInfoEXT-objectType-parameter");
+
+        skip |= validate_array("vkSetDebugUtilsObjectTagEXT", "pTagInfo->tagSize", "pTagInfo->pTag", pTagInfo->tagSize, &pTagInfo->pTag, true, true, "VUID-VkDebugUtilsObjectTagInfoEXT-tagSize-arraylength", "VUID-VkDebugUtilsObjectTagInfoEXT-pTag-parameter");
+    }
+    if (!skip) skip |= manual_PreCallValidateSetDebugUtilsObjectTagEXT(device, pTagInfo);
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateQueueBeginDebugUtilsLabelEXT(
+    VkQueue                                     queue,
+    const VkDebugUtilsLabelEXT*                 pLabelInfo) const {
+    bool skip = false;
+    if (!device_extensions.vk_ext_debug_utils) skip |= OutputExtensionError("vkQueueBeginDebugUtilsLabelEXT", VK_EXT_DEBUG_UTILS_EXTENSION_NAME);
+    skip |= validate_struct_type("vkQueueBeginDebugUtilsLabelEXT", "pLabelInfo", "VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT", pLabelInfo, VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT, true, "VUID-vkQueueBeginDebugUtilsLabelEXT-pLabelInfo-parameter", "VUID-VkDebugUtilsLabelEXT-sType-sType");
+    if (pLabelInfo != NULL)
+    {
+        skip |= validate_struct_pnext("vkQueueBeginDebugUtilsLabelEXT", "pLabelInfo->pNext", NULL, pLabelInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkDebugUtilsLabelEXT-pNext-pNext");
+
+        skip |= validate_required_pointer("vkQueueBeginDebugUtilsLabelEXT", "pLabelInfo->pLabelName", pLabelInfo->pLabelName, "VUID-VkDebugUtilsLabelEXT-pLabelName-parameter");
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateQueueEndDebugUtilsLabelEXT(
+    VkQueue                                     queue) const {
+    bool skip = false;
+    if (!device_extensions.vk_ext_debug_utils) skip |= OutputExtensionError("vkQueueEndDebugUtilsLabelEXT", VK_EXT_DEBUG_UTILS_EXTENSION_NAME);
+    // No xml-driven validation
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateQueueInsertDebugUtilsLabelEXT(
+    VkQueue                                     queue,
+    const VkDebugUtilsLabelEXT*                 pLabelInfo) const {
+    bool skip = false;
+    if (!device_extensions.vk_ext_debug_utils) skip |= OutputExtensionError("vkQueueInsertDebugUtilsLabelEXT", VK_EXT_DEBUG_UTILS_EXTENSION_NAME);
+    skip |= validate_struct_type("vkQueueInsertDebugUtilsLabelEXT", "pLabelInfo", "VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT", pLabelInfo, VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT, true, "VUID-vkQueueInsertDebugUtilsLabelEXT-pLabelInfo-parameter", "VUID-VkDebugUtilsLabelEXT-sType-sType");
+    if (pLabelInfo != NULL)
+    {
+        skip |= validate_struct_pnext("vkQueueInsertDebugUtilsLabelEXT", "pLabelInfo->pNext", NULL, pLabelInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkDebugUtilsLabelEXT-pNext-pNext");
+
+        skip |= validate_required_pointer("vkQueueInsertDebugUtilsLabelEXT", "pLabelInfo->pLabelName", pLabelInfo->pLabelName, "VUID-VkDebugUtilsLabelEXT-pLabelName-parameter");
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateCmdBeginDebugUtilsLabelEXT(
+    VkCommandBuffer                             commandBuffer,
+    const VkDebugUtilsLabelEXT*                 pLabelInfo) const {
+    bool skip = false;
+    if (!device_extensions.vk_ext_debug_utils) skip |= OutputExtensionError("vkCmdBeginDebugUtilsLabelEXT", VK_EXT_DEBUG_UTILS_EXTENSION_NAME);
+    skip |= validate_struct_type("vkCmdBeginDebugUtilsLabelEXT", "pLabelInfo", "VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT", pLabelInfo, VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT, true, "VUID-vkCmdBeginDebugUtilsLabelEXT-pLabelInfo-parameter", "VUID-VkDebugUtilsLabelEXT-sType-sType");
+    if (pLabelInfo != NULL)
+    {
+        skip |= validate_struct_pnext("vkCmdBeginDebugUtilsLabelEXT", "pLabelInfo->pNext", NULL, pLabelInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkDebugUtilsLabelEXT-pNext-pNext");
+
+        skip |= validate_required_pointer("vkCmdBeginDebugUtilsLabelEXT", "pLabelInfo->pLabelName", pLabelInfo->pLabelName, "VUID-VkDebugUtilsLabelEXT-pLabelName-parameter");
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateCmdEndDebugUtilsLabelEXT(
+    VkCommandBuffer                             commandBuffer) const {
+    bool skip = false;
+    if (!device_extensions.vk_ext_debug_utils) skip |= OutputExtensionError("vkCmdEndDebugUtilsLabelEXT", VK_EXT_DEBUG_UTILS_EXTENSION_NAME);
+    // No xml-driven validation
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateCmdInsertDebugUtilsLabelEXT(
+    VkCommandBuffer                             commandBuffer,
+    const VkDebugUtilsLabelEXT*                 pLabelInfo) const {
+    bool skip = false;
+    if (!device_extensions.vk_ext_debug_utils) skip |= OutputExtensionError("vkCmdInsertDebugUtilsLabelEXT", VK_EXT_DEBUG_UTILS_EXTENSION_NAME);
+    skip |= validate_struct_type("vkCmdInsertDebugUtilsLabelEXT", "pLabelInfo", "VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT", pLabelInfo, VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT, true, "VUID-vkCmdInsertDebugUtilsLabelEXT-pLabelInfo-parameter", "VUID-VkDebugUtilsLabelEXT-sType-sType");
+    if (pLabelInfo != NULL)
+    {
+        skip |= validate_struct_pnext("vkCmdInsertDebugUtilsLabelEXT", "pLabelInfo->pNext", NULL, pLabelInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkDebugUtilsLabelEXT-pNext-pNext");
+
+        skip |= validate_required_pointer("vkCmdInsertDebugUtilsLabelEXT", "pLabelInfo->pLabelName", pLabelInfo->pLabelName, "VUID-VkDebugUtilsLabelEXT-pLabelName-parameter");
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateCreateDebugUtilsMessengerEXT(
+    VkInstance                                  instance,
+    const VkDebugUtilsMessengerCreateInfoEXT*   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDebugUtilsMessengerEXT*                   pMessenger) const {
+    bool skip = false;
+    if (!instance_extensions.vk_ext_debug_utils) skip |= OutputExtensionError("vkCreateDebugUtilsMessengerEXT", VK_EXT_DEBUG_UTILS_EXTENSION_NAME);
+    skip |= validate_struct_type("vkCreateDebugUtilsMessengerEXT", "pCreateInfo", "VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT", pCreateInfo, VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT, true, "VUID-vkCreateDebugUtilsMessengerEXT-pCreateInfo-parameter", "VUID-VkDebugUtilsMessengerCreateInfoEXT-sType-sType");
+    if (pCreateInfo != NULL)
+    {
+        skip |= validate_struct_pnext("vkCreateDebugUtilsMessengerEXT", "pCreateInfo->pNext", NULL, pCreateInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, kVUIDUndefined);
+
+        skip |= validate_reserved_flags("vkCreateDebugUtilsMessengerEXT", "pCreateInfo->flags", pCreateInfo->flags, "VUID-VkDebugUtilsMessengerCreateInfoEXT-flags-zerobitmask");
+
+        skip |= validate_flags("vkCreateDebugUtilsMessengerEXT", "pCreateInfo->messageSeverity", "VkDebugUtilsMessageSeverityFlagBitsEXT", AllVkDebugUtilsMessageSeverityFlagBitsEXT, pCreateInfo->messageSeverity, kRequiredFlags, "VUID-VkDebugUtilsMessengerCreateInfoEXT-messageSeverity-parameter", "VUID-VkDebugUtilsMessengerCreateInfoEXT-messageSeverity-requiredbitmask");
+
+        skip |= validate_flags("vkCreateDebugUtilsMessengerEXT", "pCreateInfo->messageType", "VkDebugUtilsMessageTypeFlagBitsEXT", AllVkDebugUtilsMessageTypeFlagBitsEXT, pCreateInfo->messageType, kRequiredFlags, "VUID-VkDebugUtilsMessengerCreateInfoEXT-messageType-parameter", "VUID-VkDebugUtilsMessengerCreateInfoEXT-messageType-requiredbitmask");
+
+        skip |= validate_required_pointer("vkCreateDebugUtilsMessengerEXT", "pCreateInfo->pfnUserCallback", reinterpret_cast<const void*>(pCreateInfo->pfnUserCallback), "VUID-VkDebugUtilsMessengerCreateInfoEXT-pfnUserCallback-parameter");
+    }
+    if (pAllocator != NULL)
+    {
+        skip |= validate_required_pointer("vkCreateDebugUtilsMessengerEXT", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
+
+        skip |= validate_required_pointer("vkCreateDebugUtilsMessengerEXT", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
+
+        skip |= validate_required_pointer("vkCreateDebugUtilsMessengerEXT", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
+
+        if (pAllocator->pfnInternalAllocation != NULL)
+        {
+            skip |= validate_required_pointer("vkCreateDebugUtilsMessengerEXT", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+
+        if (pAllocator->pfnInternalFree != NULL)
+        {
+            skip |= validate_required_pointer("vkCreateDebugUtilsMessengerEXT", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+    }
+    skip |= validate_required_pointer("vkCreateDebugUtilsMessengerEXT", "pMessenger", pMessenger, "VUID-vkCreateDebugUtilsMessengerEXT-pMessenger-parameter");
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateDestroyDebugUtilsMessengerEXT(
+    VkInstance                                  instance,
+    VkDebugUtilsMessengerEXT                    messenger,
+    const VkAllocationCallbacks*                pAllocator) const {
+    bool skip = false;
+    if (!instance_extensions.vk_ext_debug_utils) skip |= OutputExtensionError("vkDestroyDebugUtilsMessengerEXT", VK_EXT_DEBUG_UTILS_EXTENSION_NAME);
+    skip |= validate_required_handle("vkDestroyDebugUtilsMessengerEXT", "messenger", messenger);
+    if (pAllocator != NULL)
+    {
+        skip |= validate_required_pointer("vkDestroyDebugUtilsMessengerEXT", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
+
+        skip |= validate_required_pointer("vkDestroyDebugUtilsMessengerEXT", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
+
+        skip |= validate_required_pointer("vkDestroyDebugUtilsMessengerEXT", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
+
+        if (pAllocator->pfnInternalAllocation != NULL)
+        {
+            skip |= validate_required_pointer("vkDestroyDebugUtilsMessengerEXT", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+
+        if (pAllocator->pfnInternalFree != NULL)
+        {
+            skip |= validate_required_pointer("vkDestroyDebugUtilsMessengerEXT", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateSubmitDebugUtilsMessageEXT(
+    VkInstance                                  instance,
+    VkDebugUtilsMessageSeverityFlagBitsEXT      messageSeverity,
+    VkDebugUtilsMessageTypeFlagsEXT             messageTypes,
+    const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData) const {
+    bool skip = false;
+    if (!instance_extensions.vk_ext_debug_utils) skip |= OutputExtensionError("vkSubmitDebugUtilsMessageEXT", VK_EXT_DEBUG_UTILS_EXTENSION_NAME);
+    skip |= validate_flags("vkSubmitDebugUtilsMessageEXT", "messageSeverity", "VkDebugUtilsMessageSeverityFlagBitsEXT", AllVkDebugUtilsMessageSeverityFlagBitsEXT, messageSeverity, kRequiredSingleBit, "VUID-vkSubmitDebugUtilsMessageEXT-messageSeverity-parameter", "VUID-vkSubmitDebugUtilsMessageEXT-messageSeverity-parameter");
+    skip |= validate_flags("vkSubmitDebugUtilsMessageEXT", "messageTypes", "VkDebugUtilsMessageTypeFlagBitsEXT", AllVkDebugUtilsMessageTypeFlagBitsEXT, messageTypes, kRequiredFlags, "VUID-vkSubmitDebugUtilsMessageEXT-messageTypes-parameter", "VUID-vkSubmitDebugUtilsMessageEXT-messageTypes-requiredbitmask");
+    skip |= validate_struct_type("vkSubmitDebugUtilsMessageEXT", "pCallbackData", "VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CALLBACK_DATA_EXT", pCallbackData, VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CALLBACK_DATA_EXT, true, "VUID-vkSubmitDebugUtilsMessageEXT-pCallbackData-parameter", "VUID-VkDebugUtilsMessengerCallbackDataEXT-sType-sType");
+    if (pCallbackData != NULL)
+    {
+        skip |= validate_struct_pnext("vkSubmitDebugUtilsMessageEXT", "pCallbackData->pNext", NULL, pCallbackData->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkDebugUtilsMessengerCallbackDataEXT-pNext-pNext");
+
+        skip |= validate_reserved_flags("vkSubmitDebugUtilsMessageEXT", "pCallbackData->flags", pCallbackData->flags, "VUID-VkDebugUtilsMessengerCallbackDataEXT-flags-zerobitmask");
+
+        skip |= validate_required_pointer("vkSubmitDebugUtilsMessageEXT", "pCallbackData->pMessage", pCallbackData->pMessage, "VUID-VkDebugUtilsMessengerCallbackDataEXT-pMessage-parameter");
+
+        skip |= validate_struct_type_array("vkSubmitDebugUtilsMessageEXT", "pCallbackData->queueLabelCount", "pCallbackData->pQueueLabels", "VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT", pCallbackData->queueLabelCount, pCallbackData->pQueueLabels, VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT, false, true, "VUID-VkDebugUtilsLabelEXT-sType-sType", "VUID-VkDebugUtilsMessengerCallbackDataEXT-pQueueLabels-parameter", kVUIDUndefined);
+
+        if (pCallbackData->pQueueLabels != NULL)
+        {
+            for (uint32_t queueLabelIndex = 0; queueLabelIndex < pCallbackData->queueLabelCount; ++queueLabelIndex)
+            {
+                skip |= validate_struct_pnext("vkSubmitDebugUtilsMessageEXT", ParameterName("pCallbackData->pQueueLabels[%i].pNext", ParameterName::IndexVector{ queueLabelIndex }), NULL, pCallbackData->pQueueLabels[queueLabelIndex].pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkDebugUtilsLabelEXT-pNext-pNext");
+
+                skip |= validate_required_pointer("vkSubmitDebugUtilsMessageEXT", ParameterName("pCallbackData->pQueueLabels[%i].pLabelName", ParameterName::IndexVector{ queueLabelIndex }), pCallbackData->pQueueLabels[queueLabelIndex].pLabelName, "VUID-VkDebugUtilsLabelEXT-pLabelName-parameter");
+            }
+        }
+
+        skip |= validate_struct_type_array("vkSubmitDebugUtilsMessageEXT", "pCallbackData->cmdBufLabelCount", "pCallbackData->pCmdBufLabels", "VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT", pCallbackData->cmdBufLabelCount, pCallbackData->pCmdBufLabels, VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT, false, true, "VUID-VkDebugUtilsLabelEXT-sType-sType", "VUID-VkDebugUtilsMessengerCallbackDataEXT-pCmdBufLabels-parameter", kVUIDUndefined);
+
+        if (pCallbackData->pCmdBufLabels != NULL)
+        {
+            for (uint32_t cmdBufLabelIndex = 0; cmdBufLabelIndex < pCallbackData->cmdBufLabelCount; ++cmdBufLabelIndex)
+            {
+                skip |= validate_struct_pnext("vkSubmitDebugUtilsMessageEXT", ParameterName("pCallbackData->pCmdBufLabels[%i].pNext", ParameterName::IndexVector{ cmdBufLabelIndex }), NULL, pCallbackData->pCmdBufLabels[cmdBufLabelIndex].pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkDebugUtilsLabelEXT-pNext-pNext");
+
+                skip |= validate_required_pointer("vkSubmitDebugUtilsMessageEXT", ParameterName("pCallbackData->pCmdBufLabels[%i].pLabelName", ParameterName::IndexVector{ cmdBufLabelIndex }), pCallbackData->pCmdBufLabels[cmdBufLabelIndex].pLabelName, "VUID-VkDebugUtilsLabelEXT-pLabelName-parameter");
+            }
+        }
+
+        skip |= validate_struct_type_array("vkSubmitDebugUtilsMessageEXT", "pCallbackData->objectCount", "pCallbackData->pObjects", "VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT", pCallbackData->objectCount, pCallbackData->pObjects, VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT, false, true, "VUID-VkDebugUtilsObjectNameInfoEXT-sType-sType", "VUID-VkDebugUtilsMessengerCallbackDataEXT-pObjects-parameter", kVUIDUndefined);
+
+        if (pCallbackData->pObjects != NULL)
+        {
+            for (uint32_t objectIndex = 0; objectIndex < pCallbackData->objectCount; ++objectIndex)
+            {
+                skip |= validate_struct_pnext("vkSubmitDebugUtilsMessageEXT", ParameterName("pCallbackData->pObjects[%i].pNext", ParameterName::IndexVector{ objectIndex }), NULL, pCallbackData->pObjects[objectIndex].pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkDebugUtilsObjectNameInfoEXT-pNext-pNext");
+
+                skip |= validate_ranged_enum("vkSubmitDebugUtilsMessageEXT", ParameterName("pCallbackData->pObjects[%i].objectType", ParameterName::IndexVector{ objectIndex }), "VkObjectType", AllVkObjectTypeEnums, pCallbackData->pObjects[objectIndex].objectType, "VUID-VkDebugUtilsObjectNameInfoEXT-objectType-parameter");
+            }
+        }
+    }
+    return skip;
+}
+
+
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+
+bool StatelessValidation::PreCallValidateGetAndroidHardwareBufferPropertiesANDROID(
+    VkDevice                                    device,
+    const struct AHardwareBuffer*               buffer,
+    VkAndroidHardwareBufferPropertiesANDROID*   pProperties) const {
+    bool skip = false;
+    if (!device_extensions.vk_ext_queue_family_foreign) skip |= OutputExtensionError("vkGetAndroidHardwareBufferPropertiesANDROID", VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME);
+    if (!device_extensions.vk_khr_external_memory) skip |= OutputExtensionError("vkGetAndroidHardwareBufferPropertiesANDROID", VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME);
+    if (!device_extensions.vk_khr_sampler_ycbcr_conversion) skip |= OutputExtensionError("vkGetAndroidHardwareBufferPropertiesANDROID", VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
+    if (!device_extensions.vk_android_external_memory_android_hardware_buffer) skip |= OutputExtensionError("vkGetAndroidHardwareBufferPropertiesANDROID", VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME);
+    skip |= validate_required_pointer("vkGetAndroidHardwareBufferPropertiesANDROID", "buffer", buffer, "VUID-vkGetAndroidHardwareBufferPropertiesANDROID-buffer-parameter");
+    skip |= validate_struct_type("vkGetAndroidHardwareBufferPropertiesANDROID", "pProperties", "VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID", pProperties, VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID, true, "VUID-vkGetAndroidHardwareBufferPropertiesANDROID-pProperties-parameter", "VUID-VkAndroidHardwareBufferPropertiesANDROID-sType-sType");
+    if (pProperties != NULL)
+    {
+        // No xml-driven validation
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateGetMemoryAndroidHardwareBufferANDROID(
+    VkDevice                                    device,
+    const VkMemoryGetAndroidHardwareBufferInfoANDROID* pInfo,
+    struct AHardwareBuffer**                    pBuffer) const {
+    bool skip = false;
+    if (!device_extensions.vk_ext_queue_family_foreign) skip |= OutputExtensionError("vkGetMemoryAndroidHardwareBufferANDROID", VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME);
+    if (!device_extensions.vk_khr_external_memory) skip |= OutputExtensionError("vkGetMemoryAndroidHardwareBufferANDROID", VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME);
+    if (!device_extensions.vk_khr_sampler_ycbcr_conversion) skip |= OutputExtensionError("vkGetMemoryAndroidHardwareBufferANDROID", VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
+    if (!device_extensions.vk_android_external_memory_android_hardware_buffer) skip |= OutputExtensionError("vkGetMemoryAndroidHardwareBufferANDROID", VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME);
+    skip |= validate_struct_type("vkGetMemoryAndroidHardwareBufferANDROID", "pInfo", "VK_STRUCTURE_TYPE_MEMORY_GET_ANDROID_HARDWARE_BUFFER_INFO_ANDROID", pInfo, VK_STRUCTURE_TYPE_MEMORY_GET_ANDROID_HARDWARE_BUFFER_INFO_ANDROID, true, "VUID-vkGetMemoryAndroidHardwareBufferANDROID-pInfo-parameter", "VUID-VkMemoryGetAndroidHardwareBufferInfoANDROID-sType-sType");
+    if (pInfo != NULL)
+    {
+        skip |= validate_struct_pnext("vkGetMemoryAndroidHardwareBufferANDROID", "pInfo->pNext", NULL, pInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkMemoryGetAndroidHardwareBufferInfoANDROID-pNext-pNext");
+
+        skip |= validate_required_handle("vkGetMemoryAndroidHardwareBufferANDROID", "pInfo->memory", pInfo->memory);
+    }
+    skip |= validate_required_pointer("vkGetMemoryAndroidHardwareBufferANDROID", "pBuffer", pBuffer, "VUID-vkGetMemoryAndroidHardwareBufferANDROID-pBuffer-parameter");
+    return skip;
+}
+
+#endif // VK_USE_PLATFORM_ANDROID_KHR
+
+
+
+
+
+
+
+
+
+
+
+
+
+bool StatelessValidation::PreCallValidateCmdSetSampleLocationsEXT(
+    VkCommandBuffer                             commandBuffer,
+    const VkSampleLocationsInfoEXT*             pSampleLocationsInfo) const {
+    bool skip = false;
+    if (!device_extensions.vk_khr_get_physical_device_properties_2) skip |= OutputExtensionError("vkCmdSetSampleLocationsEXT", VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    if (!device_extensions.vk_ext_sample_locations) skip |= OutputExtensionError("vkCmdSetSampleLocationsEXT", VK_EXT_SAMPLE_LOCATIONS_EXTENSION_NAME);
+    skip |= validate_struct_type("vkCmdSetSampleLocationsEXT", "pSampleLocationsInfo", "VK_STRUCTURE_TYPE_SAMPLE_LOCATIONS_INFO_EXT", pSampleLocationsInfo, VK_STRUCTURE_TYPE_SAMPLE_LOCATIONS_INFO_EXT, true, "VUID-vkCmdSetSampleLocationsEXT-pSampleLocationsInfo-parameter", "VUID-VkSampleLocationsInfoEXT-sType-sType");
+    if (pSampleLocationsInfo != NULL)
+    {
+        skip |= validate_struct_pnext("vkCmdSetSampleLocationsEXT", "pSampleLocationsInfo->pNext", NULL, pSampleLocationsInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, kVUIDUndefined);
+
+        skip |= validate_flags("vkCmdSetSampleLocationsEXT", "pSampleLocationsInfo->sampleLocationsPerPixel", "VkSampleCountFlagBits", AllVkSampleCountFlagBits, pSampleLocationsInfo->sampleLocationsPerPixel, kOptionalSingleBit, "VUID-VkSampleLocationsInfoEXT-sampleLocationsPerPixel-parameter");
+
+        // No xml-driven validation
+
+        skip |= validate_array("vkCmdSetSampleLocationsEXT", "pSampleLocationsInfo->sampleLocationsCount", "pSampleLocationsInfo->pSampleLocations", pSampleLocationsInfo->sampleLocationsCount, &pSampleLocationsInfo->pSampleLocations, false, true, kVUIDUndefined, "VUID-VkSampleLocationsInfoEXT-pSampleLocations-parameter");
+
+        if (pSampleLocationsInfo->pSampleLocations != NULL)
+        {
+            for (uint32_t sampleLocationsIndex = 0; sampleLocationsIndex < pSampleLocationsInfo->sampleLocationsCount; ++sampleLocationsIndex)
+            {
+                // No xml-driven validation
+            }
+        }
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateGetPhysicalDeviceMultisamplePropertiesEXT(
+    VkPhysicalDevice                            physicalDevice,
+    VkSampleCountFlagBits                       samples,
+    VkMultisamplePropertiesEXT*                 pMultisampleProperties) const {
+    bool skip = false;
+    skip |= validate_flags("vkGetPhysicalDeviceMultisamplePropertiesEXT", "samples", "VkSampleCountFlagBits", AllVkSampleCountFlagBits, samples, kRequiredSingleBit, "VUID-vkGetPhysicalDeviceMultisamplePropertiesEXT-samples-parameter", "VUID-vkGetPhysicalDeviceMultisamplePropertiesEXT-samples-parameter");
+    skip |= validate_struct_type("vkGetPhysicalDeviceMultisamplePropertiesEXT", "pMultisampleProperties", "VK_STRUCTURE_TYPE_MULTISAMPLE_PROPERTIES_EXT", pMultisampleProperties, VK_STRUCTURE_TYPE_MULTISAMPLE_PROPERTIES_EXT, true, "VUID-vkGetPhysicalDeviceMultisamplePropertiesEXT-pMultisampleProperties-parameter", "VUID-VkMultisamplePropertiesEXT-sType-sType");
+    if (pMultisampleProperties != NULL)
+    {
+        // No xml-driven validation
+    }
+    return skip;
+}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+bool StatelessValidation::PreCallValidateGetImageDrmFormatModifierPropertiesEXT(
+    VkDevice                                    device,
+    VkImage                                     image,
+    VkImageDrmFormatModifierPropertiesEXT*      pProperties) const {
+    bool skip = false;
+    if (!device_extensions.vk_khr_sampler_ycbcr_conversion) skip |= OutputExtensionError("vkGetImageDrmFormatModifierPropertiesEXT", VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
+    if (!device_extensions.vk_khr_image_format_list) skip |= OutputExtensionError("vkGetImageDrmFormatModifierPropertiesEXT", VK_KHR_IMAGE_FORMAT_LIST_EXTENSION_NAME);
+    if (!device_extensions.vk_khr_get_physical_device_properties_2) skip |= OutputExtensionError("vkGetImageDrmFormatModifierPropertiesEXT", VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    if (!device_extensions.vk_khr_bind_memory_2) skip |= OutputExtensionError("vkGetImageDrmFormatModifierPropertiesEXT", VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
+    if (!device_extensions.vk_ext_image_drm_format_modifier) skip |= OutputExtensionError("vkGetImageDrmFormatModifierPropertiesEXT", VK_EXT_IMAGE_DRM_FORMAT_MODIFIER_EXTENSION_NAME);
+    skip |= validate_required_handle("vkGetImageDrmFormatModifierPropertiesEXT", "image", image);
+    skip |= validate_struct_type("vkGetImageDrmFormatModifierPropertiesEXT", "pProperties", "VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT", pProperties, VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT, true, "VUID-vkGetImageDrmFormatModifierPropertiesEXT-pProperties-parameter", "VUID-VkImageDrmFormatModifierPropertiesEXT-sType-sType");
+    if (pProperties != NULL)
+    {
+        // No xml-driven validation
+    }
+    return skip;
+}
+
+
+
+bool StatelessValidation::PreCallValidateCreateValidationCacheEXT(
+    VkDevice                                    device,
+    const VkValidationCacheCreateInfoEXT*       pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkValidationCacheEXT*                       pValidationCache) const {
+    bool skip = false;
+    if (!device_extensions.vk_ext_validation_cache) skip |= OutputExtensionError("vkCreateValidationCacheEXT", VK_EXT_VALIDATION_CACHE_EXTENSION_NAME);
+    skip |= validate_struct_type("vkCreateValidationCacheEXT", "pCreateInfo", "VK_STRUCTURE_TYPE_VALIDATION_CACHE_CREATE_INFO_EXT", pCreateInfo, VK_STRUCTURE_TYPE_VALIDATION_CACHE_CREATE_INFO_EXT, true, "VUID-vkCreateValidationCacheEXT-pCreateInfo-parameter", "VUID-VkValidationCacheCreateInfoEXT-sType-sType");
+    if (pCreateInfo != NULL)
+    {
+        skip |= validate_struct_pnext("vkCreateValidationCacheEXT", "pCreateInfo->pNext", NULL, pCreateInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkValidationCacheCreateInfoEXT-pNext-pNext");
+
+        skip |= validate_reserved_flags("vkCreateValidationCacheEXT", "pCreateInfo->flags", pCreateInfo->flags, "VUID-VkValidationCacheCreateInfoEXT-flags-zerobitmask");
+
+        skip |= validate_array("vkCreateValidationCacheEXT", "pCreateInfo->initialDataSize", "pCreateInfo->pInitialData", pCreateInfo->initialDataSize, &pCreateInfo->pInitialData, false, true, kVUIDUndefined, "VUID-VkValidationCacheCreateInfoEXT-pInitialData-parameter");
+    }
+    if (pAllocator != NULL)
+    {
+        skip |= validate_required_pointer("vkCreateValidationCacheEXT", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
+
+        skip |= validate_required_pointer("vkCreateValidationCacheEXT", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
+
+        skip |= validate_required_pointer("vkCreateValidationCacheEXT", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
+
+        if (pAllocator->pfnInternalAllocation != NULL)
+        {
+            skip |= validate_required_pointer("vkCreateValidationCacheEXT", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+
+        if (pAllocator->pfnInternalFree != NULL)
+        {
+            skip |= validate_required_pointer("vkCreateValidationCacheEXT", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+    }
+    skip |= validate_required_pointer("vkCreateValidationCacheEXT", "pValidationCache", pValidationCache, "VUID-vkCreateValidationCacheEXT-pValidationCache-parameter");
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateDestroyValidationCacheEXT(
+    VkDevice                                    device,
+    VkValidationCacheEXT                        validationCache,
+    const VkAllocationCallbacks*                pAllocator) const {
+    bool skip = false;
+    if (!device_extensions.vk_ext_validation_cache) skip |= OutputExtensionError("vkDestroyValidationCacheEXT", VK_EXT_VALIDATION_CACHE_EXTENSION_NAME);
+    if (pAllocator != NULL)
+    {
+        skip |= validate_required_pointer("vkDestroyValidationCacheEXT", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
+
+        skip |= validate_required_pointer("vkDestroyValidationCacheEXT", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
+
+        skip |= validate_required_pointer("vkDestroyValidationCacheEXT", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
+
+        if (pAllocator->pfnInternalAllocation != NULL)
+        {
+            skip |= validate_required_pointer("vkDestroyValidationCacheEXT", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+
+        if (pAllocator->pfnInternalFree != NULL)
+        {
+            skip |= validate_required_pointer("vkDestroyValidationCacheEXT", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateMergeValidationCachesEXT(
+    VkDevice                                    device,
+    VkValidationCacheEXT                        dstCache,
+    uint32_t                                    srcCacheCount,
+    const VkValidationCacheEXT*                 pSrcCaches) const {
+    bool skip = false;
+    if (!device_extensions.vk_ext_validation_cache) skip |= OutputExtensionError("vkMergeValidationCachesEXT", VK_EXT_VALIDATION_CACHE_EXTENSION_NAME);
+    skip |= validate_required_handle("vkMergeValidationCachesEXT", "dstCache", dstCache);
+    skip |= validate_handle_array("vkMergeValidationCachesEXT", "srcCacheCount", "pSrcCaches", srcCacheCount, pSrcCaches, true, true);
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateGetValidationCacheDataEXT(
+    VkDevice                                    device,
+    VkValidationCacheEXT                        validationCache,
+    size_t*                                     pDataSize,
+    void*                                       pData) const {
+    bool skip = false;
+    if (!device_extensions.vk_ext_validation_cache) skip |= OutputExtensionError("vkGetValidationCacheDataEXT", VK_EXT_VALIDATION_CACHE_EXTENSION_NAME);
+    skip |= validate_required_handle("vkGetValidationCacheDataEXT", "validationCache", validationCache);
+    skip |= validate_array("vkGetValidationCacheDataEXT", "pDataSize", "pData", pDataSize, &pData, true, false, false, kVUIDUndefined, "VUID-vkGetValidationCacheDataEXT-pData-parameter");
+    return skip;
+}
+
+
+
+
+
+
+
+bool StatelessValidation::PreCallValidateCmdBindShadingRateImageNV(
+    VkCommandBuffer                             commandBuffer,
+    VkImageView                                 imageView,
+    VkImageLayout                               imageLayout) const {
+    bool skip = false;
+    if (!device_extensions.vk_khr_get_physical_device_properties_2) skip |= OutputExtensionError("vkCmdBindShadingRateImageNV", VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    if (!device_extensions.vk_nv_shading_rate_image) skip |= OutputExtensionError("vkCmdBindShadingRateImageNV", VK_NV_SHADING_RATE_IMAGE_EXTENSION_NAME);
+    skip |= validate_ranged_enum("vkCmdBindShadingRateImageNV", "imageLayout", "VkImageLayout", AllVkImageLayoutEnums, imageLayout, "VUID-vkCmdBindShadingRateImageNV-imageLayout-parameter");
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateCmdSetViewportShadingRatePaletteNV(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstViewport,
+    uint32_t                                    viewportCount,
+    const VkShadingRatePaletteNV*               pShadingRatePalettes) const {
+    bool skip = false;
+    if (!device_extensions.vk_khr_get_physical_device_properties_2) skip |= OutputExtensionError("vkCmdSetViewportShadingRatePaletteNV", VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    if (!device_extensions.vk_nv_shading_rate_image) skip |= OutputExtensionError("vkCmdSetViewportShadingRatePaletteNV", VK_NV_SHADING_RATE_IMAGE_EXTENSION_NAME);
+    skip |= validate_array("vkCmdSetViewportShadingRatePaletteNV", "viewportCount", "pShadingRatePalettes", viewportCount, &pShadingRatePalettes, true, true, "VUID-vkCmdSetViewportShadingRatePaletteNV-viewportCount-arraylength", "VUID-vkCmdSetViewportShadingRatePaletteNV-pShadingRatePalettes-parameter");
+    if (pShadingRatePalettes != NULL)
+    {
+        for (uint32_t viewportIndex = 0; viewportIndex < viewportCount; ++viewportIndex)
+        {
+            skip |= validate_ranged_enum_array("vkCmdSetViewportShadingRatePaletteNV", ParameterName("pShadingRatePalettes[%i].shadingRatePaletteEntryCount", ParameterName::IndexVector{ viewportIndex }), ParameterName("pShadingRatePalettes[%i].pShadingRatePaletteEntries", ParameterName::IndexVector{ viewportIndex }), "VkShadingRatePaletteEntryNV", AllVkShadingRatePaletteEntryNVEnums, pShadingRatePalettes[viewportIndex].shadingRatePaletteEntryCount, pShadingRatePalettes[viewportIndex].pShadingRatePaletteEntries, true, true);
+        }
+    }
+    if (!skip) skip |= manual_PreCallValidateCmdSetViewportShadingRatePaletteNV(commandBuffer, firstViewport, viewportCount, pShadingRatePalettes);
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateCmdSetCoarseSampleOrderNV(
+    VkCommandBuffer                             commandBuffer,
+    VkCoarseSampleOrderTypeNV                   sampleOrderType,
+    uint32_t                                    customSampleOrderCount,
+    const VkCoarseSampleOrderCustomNV*          pCustomSampleOrders) const {
+    bool skip = false;
+    if (!device_extensions.vk_khr_get_physical_device_properties_2) skip |= OutputExtensionError("vkCmdSetCoarseSampleOrderNV", VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    if (!device_extensions.vk_nv_shading_rate_image) skip |= OutputExtensionError("vkCmdSetCoarseSampleOrderNV", VK_NV_SHADING_RATE_IMAGE_EXTENSION_NAME);
+    skip |= validate_ranged_enum("vkCmdSetCoarseSampleOrderNV", "sampleOrderType", "VkCoarseSampleOrderTypeNV", AllVkCoarseSampleOrderTypeNVEnums, sampleOrderType, "VUID-vkCmdSetCoarseSampleOrderNV-sampleOrderType-parameter");
+    skip |= validate_array("vkCmdSetCoarseSampleOrderNV", "customSampleOrderCount", "pCustomSampleOrders", customSampleOrderCount, &pCustomSampleOrders, false, true, kVUIDUndefined, "VUID-vkCmdSetCoarseSampleOrderNV-pCustomSampleOrders-parameter");
+    if (pCustomSampleOrders != NULL)
+    {
+        for (uint32_t customSampleOrderIndex = 0; customSampleOrderIndex < customSampleOrderCount; ++customSampleOrderIndex)
+        {
+            skip |= validate_ranged_enum("vkCmdSetCoarseSampleOrderNV", ParameterName("pCustomSampleOrders[%i].shadingRate", ParameterName::IndexVector{ customSampleOrderIndex }), "VkShadingRatePaletteEntryNV", AllVkShadingRatePaletteEntryNVEnums, pCustomSampleOrders[customSampleOrderIndex].shadingRate, "VUID-VkCoarseSampleOrderCustomNV-shadingRate-parameter");
+
+            skip |= validate_array("vkCmdSetCoarseSampleOrderNV", ParameterName("pCustomSampleOrders[%i].sampleLocationCount", ParameterName::IndexVector{ customSampleOrderIndex }), ParameterName("pCustomSampleOrders[%i].pSampleLocations", ParameterName::IndexVector{ customSampleOrderIndex }), pCustomSampleOrders[customSampleOrderIndex].sampleLocationCount, &pCustomSampleOrders[customSampleOrderIndex].pSampleLocations, true, true, "VUID-VkCoarseSampleOrderCustomNV-sampleLocationCount-arraylength", "VUID-VkCoarseSampleOrderCustomNV-pSampleLocations-parameter");
+
+            if (pCustomSampleOrders[customSampleOrderIndex].pSampleLocations != NULL)
+            {
+                for (uint32_t sampleLocationIndex = 0; sampleLocationIndex < pCustomSampleOrders[customSampleOrderIndex].sampleLocationCount; ++sampleLocationIndex)
+                {
+                    // No xml-driven validation
+                }
+            }
+        }
+    }
+    if (!skip) skip |= manual_PreCallValidateCmdSetCoarseSampleOrderNV(commandBuffer, sampleOrderType, customSampleOrderCount, pCustomSampleOrders);
+    return skip;
+}
+
+
+
+bool StatelessValidation::PreCallValidateCreateAccelerationStructureNV(
+    VkDevice                                    device,
+    const VkAccelerationStructureCreateInfoNV*  pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkAccelerationStructureNV*                  pAccelerationStructure) const {
+    bool skip = false;
+    if (!device_extensions.vk_khr_get_memory_requirements_2) skip |= OutputExtensionError("vkCreateAccelerationStructureNV", VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
+    if (!device_extensions.vk_khr_get_physical_device_properties_2) skip |= OutputExtensionError("vkCreateAccelerationStructureNV", VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    if (!device_extensions.vk_nv_ray_tracing) skip |= OutputExtensionError("vkCreateAccelerationStructureNV", VK_NV_RAY_TRACING_EXTENSION_NAME);
+    skip |= validate_struct_type("vkCreateAccelerationStructureNV", "pCreateInfo", "VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_NV", pCreateInfo, VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_NV, true, "VUID-vkCreateAccelerationStructureNV-pCreateInfo-parameter", "VUID-VkAccelerationStructureCreateInfoNV-sType-sType");
+    if (pCreateInfo != NULL)
+    {
+        skip |= validate_struct_pnext("vkCreateAccelerationStructureNV", "pCreateInfo->pNext", NULL, pCreateInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkAccelerationStructureCreateInfoNV-pNext-pNext");
+
+        skip |= validate_struct_type("vkCreateAccelerationStructureNV", "pCreateInfo->info", "VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_INFO_NV", &(pCreateInfo->info), VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_INFO_NV, false, kVUIDUndefined, "VUID-VkAccelerationStructureInfoNV-sType-sType");
+
+        skip |= validate_struct_pnext("vkCreateAccelerationStructureNV", "pCreateInfo->info.pNext", NULL, pCreateInfo->info.pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkAccelerationStructureInfoNV-pNext-pNext");
+
+        skip |= validate_ranged_enum("vkCreateAccelerationStructureNV", "pCreateInfo->info.type", "VkAccelerationStructureTypeNV", AllVkAccelerationStructureTypeNVEnums, pCreateInfo->info.type, "VUID-VkAccelerationStructureInfoNV-type-parameter");
+
+        skip |= validate_flags("vkCreateAccelerationStructureNV", "pCreateInfo->info.flags", "VkBuildAccelerationStructureFlagBitsNV", AllVkBuildAccelerationStructureFlagBitsNV, pCreateInfo->info.flags, kOptionalFlags, "VUID-VkAccelerationStructureInfoNV-flags-parameter");
+
+        skip |= validate_struct_type_array("vkCreateAccelerationStructureNV", "pCreateInfo->info.geometryCount", "pCreateInfo->info.pGeometries", "VK_STRUCTURE_TYPE_GEOMETRY_NV", pCreateInfo->info.geometryCount, pCreateInfo->info.pGeometries, VK_STRUCTURE_TYPE_GEOMETRY_NV, false, true, "VUID-VkGeometryNV-sType-sType", "VUID-VkAccelerationStructureInfoNV-pGeometries-parameter", kVUIDUndefined);
+
+        if (pCreateInfo->info.pGeometries != NULL)
+        {
+            for (uint32_t geometryIndex = 0; geometryIndex < pCreateInfo->info.geometryCount; ++geometryIndex)
+            {
+                skip |= validate_struct_pnext("vkCreateAccelerationStructureNV", ParameterName("pCreateInfo->info.pGeometries[%i].pNext", ParameterName::IndexVector{ geometryIndex }), NULL, pCreateInfo->info.pGeometries[geometryIndex].pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkGeometryNV-pNext-pNext");
+
+                skip |= validate_ranged_enum("vkCreateAccelerationStructureNV", ParameterName("pCreateInfo->info.pGeometries[%i].geometryType", ParameterName::IndexVector{ geometryIndex }), "VkGeometryTypeNV", AllVkGeometryTypeNVEnums, pCreateInfo->info.pGeometries[geometryIndex].geometryType, "VUID-VkGeometryNV-geometryType-parameter");
+
+                skip |= validate_struct_type("vkCreateAccelerationStructureNV", ParameterName("pCreateInfo->info.pGeometries[%i].geometry.triangles", ParameterName::IndexVector{ geometryIndex }), "VK_STRUCTURE_TYPE_GEOMETRY_TRIANGLES_NV", &(pCreateInfo->info.pGeometries[geometryIndex].geometry.triangles), VK_STRUCTURE_TYPE_GEOMETRY_TRIANGLES_NV, false, kVUIDUndefined, "VUID-VkGeometryTrianglesNV-sType-sType");
+
+                skip |= validate_struct_pnext("vkCreateAccelerationStructureNV", ParameterName("pCreateInfo->info.pGeometries[%i].geometry.triangles.pNext", ParameterName::IndexVector{ geometryIndex }), NULL, pCreateInfo->info.pGeometries[geometryIndex].geometry.triangles.pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkGeometryTrianglesNV-pNext-pNext");
+
+                skip |= validate_ranged_enum("vkCreateAccelerationStructureNV", ParameterName("pCreateInfo->info.pGeometries[%i].geometry.triangles.vertexFormat", ParameterName::IndexVector{ geometryIndex }), "VkFormat", AllVkFormatEnums, pCreateInfo->info.pGeometries[geometryIndex].geometry.triangles.vertexFormat, "VUID-VkGeometryTrianglesNV-vertexFormat-parameter");
+
+                skip |= validate_ranged_enum("vkCreateAccelerationStructureNV", ParameterName("pCreateInfo->info.pGeometries[%i].geometry.triangles.indexType", ParameterName::IndexVector{ geometryIndex }), "VkIndexType", AllVkIndexTypeEnums, pCreateInfo->info.pGeometries[geometryIndex].geometry.triangles.indexType, "VUID-VkGeometryTrianglesNV-indexType-parameter");
+
+                skip |= validate_struct_type("vkCreateAccelerationStructureNV", ParameterName("pCreateInfo->info.pGeometries[%i].geometry.aabbs", ParameterName::IndexVector{ geometryIndex }), "VK_STRUCTURE_TYPE_GEOMETRY_AABB_NV", &(pCreateInfo->info.pGeometries[geometryIndex].geometry.aabbs), VK_STRUCTURE_TYPE_GEOMETRY_AABB_NV, false, kVUIDUndefined, "VUID-VkGeometryAABBNV-sType-sType");
+
+                skip |= validate_struct_pnext("vkCreateAccelerationStructureNV", ParameterName("pCreateInfo->info.pGeometries[%i].geometry.aabbs.pNext", ParameterName::IndexVector{ geometryIndex }), NULL, pCreateInfo->info.pGeometries[geometryIndex].geometry.aabbs.pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkGeometryAABBNV-pNext-pNext");
+
+                skip |= validate_flags("vkCreateAccelerationStructureNV", ParameterName("pCreateInfo->info.pGeometries[%i].flags", ParameterName::IndexVector{ geometryIndex }), "VkGeometryFlagBitsNV", AllVkGeometryFlagBitsNV, pCreateInfo->info.pGeometries[geometryIndex].flags, kOptionalFlags, "VUID-VkGeometryNV-flags-parameter");
+            }
+        }
+    }
+    if (pAllocator != NULL)
+    {
+        skip |= validate_required_pointer("vkCreateAccelerationStructureNV", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
+
+        skip |= validate_required_pointer("vkCreateAccelerationStructureNV", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
+
+        skip |= validate_required_pointer("vkCreateAccelerationStructureNV", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
+
+        if (pAllocator->pfnInternalAllocation != NULL)
+        {
+            skip |= validate_required_pointer("vkCreateAccelerationStructureNV", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+
+        if (pAllocator->pfnInternalFree != NULL)
+        {
+            skip |= validate_required_pointer("vkCreateAccelerationStructureNV", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+    }
+    skip |= validate_required_pointer("vkCreateAccelerationStructureNV", "pAccelerationStructure", pAccelerationStructure, "VUID-vkCreateAccelerationStructureNV-pAccelerationStructure-parameter");
+    if (!skip) skip |= manual_PreCallValidateCreateAccelerationStructureNV(device, pCreateInfo, pAllocator, pAccelerationStructure);
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateDestroyAccelerationStructureNV(
+    VkDevice                                    device,
+    VkAccelerationStructureNV                   accelerationStructure,
+    const VkAllocationCallbacks*                pAllocator) const {
+    bool skip = false;
+    if (!device_extensions.vk_khr_get_memory_requirements_2) skip |= OutputExtensionError("vkDestroyAccelerationStructureNV", VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
+    if (!device_extensions.vk_khr_get_physical_device_properties_2) skip |= OutputExtensionError("vkDestroyAccelerationStructureNV", VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    if (!device_extensions.vk_nv_ray_tracing) skip |= OutputExtensionError("vkDestroyAccelerationStructureNV", VK_NV_RAY_TRACING_EXTENSION_NAME);
+    skip |= validate_required_handle("vkDestroyAccelerationStructureNV", "accelerationStructure", accelerationStructure);
+    if (pAllocator != NULL)
+    {
+        skip |= validate_required_pointer("vkDestroyAccelerationStructureNV", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
+
+        skip |= validate_required_pointer("vkDestroyAccelerationStructureNV", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
+
+        skip |= validate_required_pointer("vkDestroyAccelerationStructureNV", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
+
+        if (pAllocator->pfnInternalAllocation != NULL)
+        {
+            skip |= validate_required_pointer("vkDestroyAccelerationStructureNV", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+
+        if (pAllocator->pfnInternalFree != NULL)
+        {
+            skip |= validate_required_pointer("vkDestroyAccelerationStructureNV", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateGetAccelerationStructureMemoryRequirementsNV(
+    VkDevice                                    device,
+    const VkAccelerationStructureMemoryRequirementsInfoNV* pInfo,
+    VkMemoryRequirements2KHR*                   pMemoryRequirements) const {
+    bool skip = false;
+    if (!device_extensions.vk_khr_get_memory_requirements_2) skip |= OutputExtensionError("vkGetAccelerationStructureMemoryRequirementsNV", VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
+    if (!device_extensions.vk_khr_get_physical_device_properties_2) skip |= OutputExtensionError("vkGetAccelerationStructureMemoryRequirementsNV", VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    if (!device_extensions.vk_nv_ray_tracing) skip |= OutputExtensionError("vkGetAccelerationStructureMemoryRequirementsNV", VK_NV_RAY_TRACING_EXTENSION_NAME);
+    skip |= validate_struct_type("vkGetAccelerationStructureMemoryRequirementsNV", "pInfo", "VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV", pInfo, VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV, true, "VUID-vkGetAccelerationStructureMemoryRequirementsNV-pInfo-parameter", "VUID-VkAccelerationStructureMemoryRequirementsInfoNV-sType-sType");
+    if (pInfo != NULL)
+    {
+        skip |= validate_struct_pnext("vkGetAccelerationStructureMemoryRequirementsNV", "pInfo->pNext", NULL, pInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkAccelerationStructureMemoryRequirementsInfoNV-pNext-pNext");
+
+        skip |= validate_ranged_enum("vkGetAccelerationStructureMemoryRequirementsNV", "pInfo->type", "VkAccelerationStructureMemoryRequirementsTypeNV", AllVkAccelerationStructureMemoryRequirementsTypeNVEnums, pInfo->type, "VUID-VkAccelerationStructureMemoryRequirementsInfoNV-type-parameter");
+
+        skip |= validate_required_handle("vkGetAccelerationStructureMemoryRequirementsNV", "pInfo->accelerationStructure", pInfo->accelerationStructure);
+    }
+    skip |= validate_required_pointer("vkGetAccelerationStructureMemoryRequirementsNV", "pMemoryRequirements", pMemoryRequirements, "VUID-vkGetAccelerationStructureMemoryRequirementsNV-pMemoryRequirements-parameter");
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateBindAccelerationStructureMemoryNV(
+    VkDevice                                    device,
+    uint32_t                                    bindInfoCount,
+    const VkBindAccelerationStructureMemoryInfoNV* pBindInfos) const {
+    bool skip = false;
+    if (!device_extensions.vk_khr_get_memory_requirements_2) skip |= OutputExtensionError("vkBindAccelerationStructureMemoryNV", VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
+    if (!device_extensions.vk_khr_get_physical_device_properties_2) skip |= OutputExtensionError("vkBindAccelerationStructureMemoryNV", VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    if (!device_extensions.vk_nv_ray_tracing) skip |= OutputExtensionError("vkBindAccelerationStructureMemoryNV", VK_NV_RAY_TRACING_EXTENSION_NAME);
+    skip |= validate_struct_type_array("vkBindAccelerationStructureMemoryNV", "bindInfoCount", "pBindInfos", "VK_STRUCTURE_TYPE_BIND_ACCELERATION_STRUCTURE_MEMORY_INFO_NV", bindInfoCount, pBindInfos, VK_STRUCTURE_TYPE_BIND_ACCELERATION_STRUCTURE_MEMORY_INFO_NV, true, true, "VUID-VkBindAccelerationStructureMemoryInfoNV-sType-sType", "VUID-vkBindAccelerationStructureMemoryNV-pBindInfos-parameter", "VUID-vkBindAccelerationStructureMemoryNV-bindInfoCount-arraylength");
+    if (pBindInfos != NULL)
+    {
+        for (uint32_t bindInfoIndex = 0; bindInfoIndex < bindInfoCount; ++bindInfoIndex)
+        {
+            skip |= validate_struct_pnext("vkBindAccelerationStructureMemoryNV", ParameterName("pBindInfos[%i].pNext", ParameterName::IndexVector{ bindInfoIndex }), NULL, pBindInfos[bindInfoIndex].pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkBindAccelerationStructureMemoryInfoNV-pNext-pNext");
+
+            skip |= validate_required_handle("vkBindAccelerationStructureMemoryNV", ParameterName("pBindInfos[%i].accelerationStructure", ParameterName::IndexVector{ bindInfoIndex }), pBindInfos[bindInfoIndex].accelerationStructure);
+
+            skip |= validate_required_handle("vkBindAccelerationStructureMemoryNV", ParameterName("pBindInfos[%i].memory", ParameterName::IndexVector{ bindInfoIndex }), pBindInfos[bindInfoIndex].memory);
+
+            skip |= validate_array("vkBindAccelerationStructureMemoryNV", ParameterName("pBindInfos[%i].deviceIndexCount", ParameterName::IndexVector{ bindInfoIndex }), ParameterName("pBindInfos[%i].pDeviceIndices", ParameterName::IndexVector{ bindInfoIndex }), pBindInfos[bindInfoIndex].deviceIndexCount, &pBindInfos[bindInfoIndex].pDeviceIndices, false, true, kVUIDUndefined, "VUID-VkBindAccelerationStructureMemoryInfoNV-pDeviceIndices-parameter");
+        }
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateCmdBuildAccelerationStructureNV(
+    VkCommandBuffer                             commandBuffer,
+    const VkAccelerationStructureInfoNV*        pInfo,
+    VkBuffer                                    instanceData,
+    VkDeviceSize                                instanceOffset,
+    VkBool32                                    update,
+    VkAccelerationStructureNV                   dst,
+    VkAccelerationStructureNV                   src,
+    VkBuffer                                    scratch,
+    VkDeviceSize                                scratchOffset) const {
+    bool skip = false;
+    if (!device_extensions.vk_khr_get_memory_requirements_2) skip |= OutputExtensionError("vkCmdBuildAccelerationStructureNV", VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
+    if (!device_extensions.vk_khr_get_physical_device_properties_2) skip |= OutputExtensionError("vkCmdBuildAccelerationStructureNV", VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    if (!device_extensions.vk_nv_ray_tracing) skip |= OutputExtensionError("vkCmdBuildAccelerationStructureNV", VK_NV_RAY_TRACING_EXTENSION_NAME);
+    skip |= validate_struct_type("vkCmdBuildAccelerationStructureNV", "pInfo", "VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_INFO_NV", pInfo, VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_INFO_NV, true, "VUID-vkCmdBuildAccelerationStructureNV-pInfo-parameter", "VUID-VkAccelerationStructureInfoNV-sType-sType");
+    if (pInfo != NULL)
+    {
+        skip |= validate_struct_pnext("vkCmdBuildAccelerationStructureNV", "pInfo->pNext", NULL, pInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkAccelerationStructureInfoNV-pNext-pNext");
+
+        skip |= validate_ranged_enum("vkCmdBuildAccelerationStructureNV", "pInfo->type", "VkAccelerationStructureTypeNV", AllVkAccelerationStructureTypeNVEnums, pInfo->type, "VUID-VkAccelerationStructureInfoNV-type-parameter");
+
+        skip |= validate_flags("vkCmdBuildAccelerationStructureNV", "pInfo->flags", "VkBuildAccelerationStructureFlagBitsNV", AllVkBuildAccelerationStructureFlagBitsNV, pInfo->flags, kOptionalFlags, "VUID-VkAccelerationStructureInfoNV-flags-parameter");
+
+        skip |= validate_struct_type_array("vkCmdBuildAccelerationStructureNV", "pInfo->geometryCount", "pInfo->pGeometries", "VK_STRUCTURE_TYPE_GEOMETRY_NV", pInfo->geometryCount, pInfo->pGeometries, VK_STRUCTURE_TYPE_GEOMETRY_NV, false, true, "VUID-VkGeometryNV-sType-sType", "VUID-VkAccelerationStructureInfoNV-pGeometries-parameter", kVUIDUndefined);
+
+        if (pInfo->pGeometries != NULL)
+        {
+            for (uint32_t geometryIndex = 0; geometryIndex < pInfo->geometryCount; ++geometryIndex)
+            {
+                skip |= validate_struct_pnext("vkCmdBuildAccelerationStructureNV", ParameterName("pInfo->pGeometries[%i].pNext", ParameterName::IndexVector{ geometryIndex }), NULL, pInfo->pGeometries[geometryIndex].pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkGeometryNV-pNext-pNext");
+
+                skip |= validate_ranged_enum("vkCmdBuildAccelerationStructureNV", ParameterName("pInfo->pGeometries[%i].geometryType", ParameterName::IndexVector{ geometryIndex }), "VkGeometryTypeNV", AllVkGeometryTypeNVEnums, pInfo->pGeometries[geometryIndex].geometryType, "VUID-VkGeometryNV-geometryType-parameter");
+
+                skip |= validate_struct_type("vkCmdBuildAccelerationStructureNV", ParameterName("pInfo->pGeometries[%i].geometry.triangles", ParameterName::IndexVector{ geometryIndex }), "VK_STRUCTURE_TYPE_GEOMETRY_TRIANGLES_NV", &(pInfo->pGeometries[geometryIndex].geometry.triangles), VK_STRUCTURE_TYPE_GEOMETRY_TRIANGLES_NV, false, kVUIDUndefined, "VUID-VkGeometryTrianglesNV-sType-sType");
+
+                skip |= validate_struct_pnext("vkCmdBuildAccelerationStructureNV", ParameterName("pInfo->pGeometries[%i].geometry.triangles.pNext", ParameterName::IndexVector{ geometryIndex }), NULL, pInfo->pGeometries[geometryIndex].geometry.triangles.pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkGeometryTrianglesNV-pNext-pNext");
+
+                skip |= validate_ranged_enum("vkCmdBuildAccelerationStructureNV", ParameterName("pInfo->pGeometries[%i].geometry.triangles.vertexFormat", ParameterName::IndexVector{ geometryIndex }), "VkFormat", AllVkFormatEnums, pInfo->pGeometries[geometryIndex].geometry.triangles.vertexFormat, "VUID-VkGeometryTrianglesNV-vertexFormat-parameter");
+
+                skip |= validate_ranged_enum("vkCmdBuildAccelerationStructureNV", ParameterName("pInfo->pGeometries[%i].geometry.triangles.indexType", ParameterName::IndexVector{ geometryIndex }), "VkIndexType", AllVkIndexTypeEnums, pInfo->pGeometries[geometryIndex].geometry.triangles.indexType, "VUID-VkGeometryTrianglesNV-indexType-parameter");
+
+                skip |= validate_struct_type("vkCmdBuildAccelerationStructureNV", ParameterName("pInfo->pGeometries[%i].geometry.aabbs", ParameterName::IndexVector{ geometryIndex }), "VK_STRUCTURE_TYPE_GEOMETRY_AABB_NV", &(pInfo->pGeometries[geometryIndex].geometry.aabbs), VK_STRUCTURE_TYPE_GEOMETRY_AABB_NV, false, kVUIDUndefined, "VUID-VkGeometryAABBNV-sType-sType");
+
+                skip |= validate_struct_pnext("vkCmdBuildAccelerationStructureNV", ParameterName("pInfo->pGeometries[%i].geometry.aabbs.pNext", ParameterName::IndexVector{ geometryIndex }), NULL, pInfo->pGeometries[geometryIndex].geometry.aabbs.pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkGeometryAABBNV-pNext-pNext");
+
+                skip |= validate_flags("vkCmdBuildAccelerationStructureNV", ParameterName("pInfo->pGeometries[%i].flags", ParameterName::IndexVector{ geometryIndex }), "VkGeometryFlagBitsNV", AllVkGeometryFlagBitsNV, pInfo->pGeometries[geometryIndex].flags, kOptionalFlags, "VUID-VkGeometryNV-flags-parameter");
+            }
+        }
+    }
+    skip |= validate_bool32("vkCmdBuildAccelerationStructureNV", "update", update);
+    skip |= validate_required_handle("vkCmdBuildAccelerationStructureNV", "dst", dst);
+    skip |= validate_required_handle("vkCmdBuildAccelerationStructureNV", "scratch", scratch);
+    if (!skip) skip |= manual_PreCallValidateCmdBuildAccelerationStructureNV(commandBuffer, pInfo, instanceData, instanceOffset, update, dst, src, scratch, scratchOffset);
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateCmdCopyAccelerationStructureNV(
+    VkCommandBuffer                             commandBuffer,
+    VkAccelerationStructureNV                   dst,
+    VkAccelerationStructureNV                   src,
+    VkCopyAccelerationStructureModeNV           mode) const {
+    bool skip = false;
+    if (!device_extensions.vk_khr_get_memory_requirements_2) skip |= OutputExtensionError("vkCmdCopyAccelerationStructureNV", VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
+    if (!device_extensions.vk_khr_get_physical_device_properties_2) skip |= OutputExtensionError("vkCmdCopyAccelerationStructureNV", VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    if (!device_extensions.vk_nv_ray_tracing) skip |= OutputExtensionError("vkCmdCopyAccelerationStructureNV", VK_NV_RAY_TRACING_EXTENSION_NAME);
+    skip |= validate_required_handle("vkCmdCopyAccelerationStructureNV", "dst", dst);
+    skip |= validate_required_handle("vkCmdCopyAccelerationStructureNV", "src", src);
+    skip |= validate_ranged_enum("vkCmdCopyAccelerationStructureNV", "mode", "VkCopyAccelerationStructureModeNV", AllVkCopyAccelerationStructureModeNVEnums, mode, "VUID-vkCmdCopyAccelerationStructureNV-mode-parameter");
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateCmdTraceRaysNV(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    raygenShaderBindingTableBuffer,
+    VkDeviceSize                                raygenShaderBindingOffset,
+    VkBuffer                                    missShaderBindingTableBuffer,
+    VkDeviceSize                                missShaderBindingOffset,
+    VkDeviceSize                                missShaderBindingStride,
+    VkBuffer                                    hitShaderBindingTableBuffer,
+    VkDeviceSize                                hitShaderBindingOffset,
+    VkDeviceSize                                hitShaderBindingStride,
+    VkBuffer                                    callableShaderBindingTableBuffer,
+    VkDeviceSize                                callableShaderBindingOffset,
+    VkDeviceSize                                callableShaderBindingStride,
+    uint32_t                                    width,
+    uint32_t                                    height,
+    uint32_t                                    depth) const {
+    bool skip = false;
+    if (!device_extensions.vk_khr_get_memory_requirements_2) skip |= OutputExtensionError("vkCmdTraceRaysNV", VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
+    if (!device_extensions.vk_khr_get_physical_device_properties_2) skip |= OutputExtensionError("vkCmdTraceRaysNV", VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    if (!device_extensions.vk_nv_ray_tracing) skip |= OutputExtensionError("vkCmdTraceRaysNV", VK_NV_RAY_TRACING_EXTENSION_NAME);
+    skip |= validate_required_handle("vkCmdTraceRaysNV", "raygenShaderBindingTableBuffer", raygenShaderBindingTableBuffer);
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateCreateRayTracingPipelinesNV(
+    VkDevice                                    device,
+    VkPipelineCache                             pipelineCache,
+    uint32_t                                    createInfoCount,
+    const VkRayTracingPipelineCreateInfoNV*     pCreateInfos,
+    const VkAllocationCallbacks*                pAllocator,
+    VkPipeline*                                 pPipelines) const {
+    bool skip = false;
+    if (!device_extensions.vk_khr_get_memory_requirements_2) skip |= OutputExtensionError("vkCreateRayTracingPipelinesNV", VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
+    if (!device_extensions.vk_khr_get_physical_device_properties_2) skip |= OutputExtensionError("vkCreateRayTracingPipelinesNV", VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    if (!device_extensions.vk_nv_ray_tracing) skip |= OutputExtensionError("vkCreateRayTracingPipelinesNV", VK_NV_RAY_TRACING_EXTENSION_NAME);
+    skip |= validate_struct_type_array("vkCreateRayTracingPipelinesNV", "createInfoCount", "pCreateInfos", "VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_NV", createInfoCount, pCreateInfos, VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_NV, true, true, "VUID-VkRayTracingPipelineCreateInfoNV-sType-sType", "VUID-vkCreateRayTracingPipelinesNV-pCreateInfos-parameter", "VUID-vkCreateRayTracingPipelinesNV-createInfoCount-arraylength");
+    if (pCreateInfos != NULL)
+    {
+        for (uint32_t createInfoIndex = 0; createInfoIndex < createInfoCount; ++createInfoIndex)
+        {
+            const VkStructureType allowed_structs_VkRayTracingPipelineCreateInfoNV[] = { VK_STRUCTURE_TYPE_PIPELINE_CREATION_FEEDBACK_CREATE_INFO_EXT };
+
+            skip |= validate_struct_pnext("vkCreateRayTracingPipelinesNV", ParameterName("pCreateInfos[%i].pNext", ParameterName::IndexVector{ createInfoIndex }), "VkPipelineCreationFeedbackCreateInfoEXT", pCreateInfos[createInfoIndex].pNext, ARRAY_SIZE(allowed_structs_VkRayTracingPipelineCreateInfoNV), allowed_structs_VkRayTracingPipelineCreateInfoNV, GeneratedVulkanHeaderVersion, "VUID-VkRayTracingPipelineCreateInfoNV-pNext-pNext");
+
+            skip |= validate_flags("vkCreateRayTracingPipelinesNV", ParameterName("pCreateInfos[%i].flags", ParameterName::IndexVector{ createInfoIndex }), "VkPipelineCreateFlagBits", AllVkPipelineCreateFlagBits, pCreateInfos[createInfoIndex].flags, kOptionalFlags, "VUID-VkRayTracingPipelineCreateInfoNV-flags-parameter");
+
+            skip |= validate_struct_type_array("vkCreateRayTracingPipelinesNV", ParameterName("pCreateInfos[%i].stageCount", ParameterName::IndexVector{ createInfoIndex }), ParameterName("pCreateInfos[%i].pStages", ParameterName::IndexVector{ createInfoIndex }), "VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO", pCreateInfos[createInfoIndex].stageCount, pCreateInfos[createInfoIndex].pStages, VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO, true, true, "VUID-VkPipelineShaderStageCreateInfo-sType-sType", "VUID-VkRayTracingPipelineCreateInfoNV-pStages-parameter", "VUID-VkRayTracingPipelineCreateInfoNV-stageCount-arraylength");
+
+            if (pCreateInfos[createInfoIndex].pStages != NULL)
+            {
+                for (uint32_t stageIndex = 0; stageIndex < pCreateInfos[createInfoIndex].stageCount; ++stageIndex)
+                {
+                    const VkStructureType allowed_structs_VkPipelineShaderStageCreateInfo[] = { VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO_EXT };
+
+                    skip |= validate_struct_pnext("vkCreateRayTracingPipelinesNV", ParameterName("pCreateInfos[%i].pStages[%i].pNext", ParameterName::IndexVector{ createInfoIndex, stageIndex }), "VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT", pCreateInfos[createInfoIndex].pStages[stageIndex].pNext, ARRAY_SIZE(allowed_structs_VkPipelineShaderStageCreateInfo), allowed_structs_VkPipelineShaderStageCreateInfo, GeneratedVulkanHeaderVersion, "VUID-VkPipelineShaderStageCreateInfo-pNext-pNext");
+
+                    skip |= validate_flags("vkCreateRayTracingPipelinesNV", ParameterName("pCreateInfos[%i].pStages[%i].flags", ParameterName::IndexVector{ createInfoIndex, stageIndex }), "VkPipelineShaderStageCreateFlagBits", AllVkPipelineShaderStageCreateFlagBits, pCreateInfos[createInfoIndex].pStages[stageIndex].flags, kOptionalFlags, "VUID-VkPipelineShaderStageCreateInfo-flags-parameter");
+
+                    skip |= validate_flags("vkCreateRayTracingPipelinesNV", ParameterName("pCreateInfos[%i].pStages[%i].stage", ParameterName::IndexVector{ createInfoIndex, stageIndex }), "VkShaderStageFlagBits", AllVkShaderStageFlagBits, pCreateInfos[createInfoIndex].pStages[stageIndex].stage, kRequiredSingleBit, "VUID-VkPipelineShaderStageCreateInfo-stage-parameter", "VUID-VkPipelineShaderStageCreateInfo-stage-parameter");
+
+                    skip |= validate_required_handle("vkCreateRayTracingPipelinesNV", ParameterName("pCreateInfos[%i].pStages[%i].module", ParameterName::IndexVector{ createInfoIndex, stageIndex }), pCreateInfos[createInfoIndex].pStages[stageIndex].module);
+
+                    skip |= validate_required_pointer("vkCreateRayTracingPipelinesNV", ParameterName("pCreateInfos[%i].pStages[%i].pName", ParameterName::IndexVector{ createInfoIndex, stageIndex }), pCreateInfos[createInfoIndex].pStages[stageIndex].pName, "VUID-VkPipelineShaderStageCreateInfo-pName-parameter");
+
+                    if (pCreateInfos[createInfoIndex].pStages[stageIndex].pSpecializationInfo != NULL)
+                    {
+                        skip |= validate_array("vkCreateRayTracingPipelinesNV", ParameterName("pCreateInfos[%i].pStages[%i].pSpecializationInfo->mapEntryCount", ParameterName::IndexVector{ createInfoIndex, stageIndex }), ParameterName("pCreateInfos[%i].pStages[%i].pSpecializationInfo->pMapEntries", ParameterName::IndexVector{ createInfoIndex, stageIndex }), pCreateInfos[createInfoIndex].pStages[stageIndex].pSpecializationInfo->mapEntryCount, &pCreateInfos[createInfoIndex].pStages[stageIndex].pSpecializationInfo->pMapEntries, false, true, kVUIDUndefined, "VUID-VkSpecializationInfo-pMapEntries-parameter");
+
+                        if (pCreateInfos[createInfoIndex].pStages[stageIndex].pSpecializationInfo->pMapEntries != NULL)
+                        {
+                            for (uint32_t mapEntryIndex = 0; mapEntryIndex < pCreateInfos[createInfoIndex].pStages[stageIndex].pSpecializationInfo->mapEntryCount; ++mapEntryIndex)
+                            {
+                                // No xml-driven validation
+                            }
+                        }
+
+                        skip |= validate_array("vkCreateRayTracingPipelinesNV", ParameterName("pCreateInfos[%i].pStages[%i].pSpecializationInfo->dataSize", ParameterName::IndexVector{ createInfoIndex, stageIndex }), ParameterName("pCreateInfos[%i].pStages[%i].pSpecializationInfo->pData", ParameterName::IndexVector{ createInfoIndex, stageIndex }), pCreateInfos[createInfoIndex].pStages[stageIndex].pSpecializationInfo->dataSize, &pCreateInfos[createInfoIndex].pStages[stageIndex].pSpecializationInfo->pData, false, true, kVUIDUndefined, "VUID-VkSpecializationInfo-pData-parameter");
+                    }
+                }
+            }
+
+            skip |= validate_struct_type_array("vkCreateRayTracingPipelinesNV", ParameterName("pCreateInfos[%i].groupCount", ParameterName::IndexVector{ createInfoIndex }), ParameterName("pCreateInfos[%i].pGroups", ParameterName::IndexVector{ createInfoIndex }), "VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV", pCreateInfos[createInfoIndex].groupCount, pCreateInfos[createInfoIndex].pGroups, VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV, true, true, "VUID-VkRayTracingShaderGroupCreateInfoNV-sType-sType", "VUID-VkRayTracingPipelineCreateInfoNV-pGroups-parameter", "VUID-VkRayTracingPipelineCreateInfoNV-groupCount-arraylength");
+
+            if (pCreateInfos[createInfoIndex].pGroups != NULL)
+            {
+                for (uint32_t groupIndex = 0; groupIndex < pCreateInfos[createInfoIndex].groupCount; ++groupIndex)
+                {
+                    skip |= validate_struct_pnext("vkCreateRayTracingPipelinesNV", ParameterName("pCreateInfos[%i].pGroups[%i].pNext", ParameterName::IndexVector{ createInfoIndex, groupIndex }), NULL, pCreateInfos[createInfoIndex].pGroups[groupIndex].pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkRayTracingShaderGroupCreateInfoNV-pNext-pNext");
+
+                    skip |= validate_ranged_enum("vkCreateRayTracingPipelinesNV", ParameterName("pCreateInfos[%i].pGroups[%i].type", ParameterName::IndexVector{ createInfoIndex, groupIndex }), "VkRayTracingShaderGroupTypeNV", AllVkRayTracingShaderGroupTypeNVEnums, pCreateInfos[createInfoIndex].pGroups[groupIndex].type, "VUID-VkRayTracingShaderGroupCreateInfoNV-type-parameter");
+                }
+            }
+
+            skip |= validate_required_handle("vkCreateRayTracingPipelinesNV", ParameterName("pCreateInfos[%i].layout", ParameterName::IndexVector{ createInfoIndex }), pCreateInfos[createInfoIndex].layout);
+        }
+    }
+    if (pAllocator != NULL)
+    {
+        skip |= validate_required_pointer("vkCreateRayTracingPipelinesNV", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
+
+        skip |= validate_required_pointer("vkCreateRayTracingPipelinesNV", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
+
+        skip |= validate_required_pointer("vkCreateRayTracingPipelinesNV", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
+
+        if (pAllocator->pfnInternalAllocation != NULL)
+        {
+            skip |= validate_required_pointer("vkCreateRayTracingPipelinesNV", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+
+        if (pAllocator->pfnInternalFree != NULL)
+        {
+            skip |= validate_required_pointer("vkCreateRayTracingPipelinesNV", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+    }
+    skip |= validate_array("vkCreateRayTracingPipelinesNV", "createInfoCount", "pPipelines", createInfoCount, &pPipelines, true, true, "VUID-vkCreateRayTracingPipelinesNV-createInfoCount-arraylength", "VUID-vkCreateRayTracingPipelinesNV-pPipelines-parameter");
+    if (!skip) skip |= manual_PreCallValidateCreateRayTracingPipelinesNV(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines);
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateGetRayTracingShaderGroupHandlesNV(
+    VkDevice                                    device,
+    VkPipeline                                  pipeline,
+    uint32_t                                    firstGroup,
+    uint32_t                                    groupCount,
+    size_t                                      dataSize,
+    void*                                       pData) const {
+    bool skip = false;
+    if (!device_extensions.vk_khr_get_memory_requirements_2) skip |= OutputExtensionError("vkGetRayTracingShaderGroupHandlesNV", VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
+    if (!device_extensions.vk_khr_get_physical_device_properties_2) skip |= OutputExtensionError("vkGetRayTracingShaderGroupHandlesNV", VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    if (!device_extensions.vk_nv_ray_tracing) skip |= OutputExtensionError("vkGetRayTracingShaderGroupHandlesNV", VK_NV_RAY_TRACING_EXTENSION_NAME);
+    skip |= validate_required_handle("vkGetRayTracingShaderGroupHandlesNV", "pipeline", pipeline);
+    skip |= validate_array("vkGetRayTracingShaderGroupHandlesNV", "dataSize", "pData", dataSize, &pData, true, true, "VUID-vkGetRayTracingShaderGroupHandlesNV-dataSize-arraylength", "VUID-vkGetRayTracingShaderGroupHandlesNV-pData-parameter");
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateGetAccelerationStructureHandleNV(
+    VkDevice                                    device,
+    VkAccelerationStructureNV                   accelerationStructure,
+    size_t                                      dataSize,
+    void*                                       pData) const {
+    bool skip = false;
+    if (!device_extensions.vk_khr_get_memory_requirements_2) skip |= OutputExtensionError("vkGetAccelerationStructureHandleNV", VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
+    if (!device_extensions.vk_khr_get_physical_device_properties_2) skip |= OutputExtensionError("vkGetAccelerationStructureHandleNV", VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    if (!device_extensions.vk_nv_ray_tracing) skip |= OutputExtensionError("vkGetAccelerationStructureHandleNV", VK_NV_RAY_TRACING_EXTENSION_NAME);
+    skip |= validate_required_handle("vkGetAccelerationStructureHandleNV", "accelerationStructure", accelerationStructure);
+    skip |= validate_array("vkGetAccelerationStructureHandleNV", "dataSize", "pData", dataSize, &pData, true, true, "VUID-vkGetAccelerationStructureHandleNV-dataSize-arraylength", "VUID-vkGetAccelerationStructureHandleNV-pData-parameter");
+    if (!skip) skip |= manual_PreCallValidateGetAccelerationStructureHandleNV(device, accelerationStructure, dataSize, pData);
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateCmdWriteAccelerationStructuresPropertiesNV(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    accelerationStructureCount,
+    const VkAccelerationStructureNV*            pAccelerationStructures,
+    VkQueryType                                 queryType,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    firstQuery) const {
+    bool skip = false;
+    if (!device_extensions.vk_khr_get_memory_requirements_2) skip |= OutputExtensionError("vkCmdWriteAccelerationStructuresPropertiesNV", VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
+    if (!device_extensions.vk_khr_get_physical_device_properties_2) skip |= OutputExtensionError("vkCmdWriteAccelerationStructuresPropertiesNV", VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    if (!device_extensions.vk_nv_ray_tracing) skip |= OutputExtensionError("vkCmdWriteAccelerationStructuresPropertiesNV", VK_NV_RAY_TRACING_EXTENSION_NAME);
+    skip |= validate_handle_array("vkCmdWriteAccelerationStructuresPropertiesNV", "accelerationStructureCount", "pAccelerationStructures", accelerationStructureCount, pAccelerationStructures, true, true);
+    skip |= validate_ranged_enum("vkCmdWriteAccelerationStructuresPropertiesNV", "queryType", "VkQueryType", AllVkQueryTypeEnums, queryType, "VUID-vkCmdWriteAccelerationStructuresPropertiesNV-queryType-parameter");
+    skip |= validate_required_handle("vkCmdWriteAccelerationStructuresPropertiesNV", "queryPool", queryPool);
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateCompileDeferredNV(
+    VkDevice                                    device,
+    VkPipeline                                  pipeline,
+    uint32_t                                    shader) const {
+    bool skip = false;
+    if (!device_extensions.vk_khr_get_memory_requirements_2) skip |= OutputExtensionError("vkCompileDeferredNV", VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
+    if (!device_extensions.vk_khr_get_physical_device_properties_2) skip |= OutputExtensionError("vkCompileDeferredNV", VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    if (!device_extensions.vk_nv_ray_tracing) skip |= OutputExtensionError("vkCompileDeferredNV", VK_NV_RAY_TRACING_EXTENSION_NAME);
+    skip |= validate_required_handle("vkCompileDeferredNV", "pipeline", pipeline);
+    return skip;
+}
+
+
+
+
+
+
+
+
+
+bool StatelessValidation::PreCallValidateGetMemoryHostPointerPropertiesEXT(
+    VkDevice                                    device,
+    VkExternalMemoryHandleTypeFlagBits          handleType,
+    const void*                                 pHostPointer,
+    VkMemoryHostPointerPropertiesEXT*           pMemoryHostPointerProperties) const {
+    bool skip = false;
+    if (!device_extensions.vk_khr_external_memory) skip |= OutputExtensionError("vkGetMemoryHostPointerPropertiesEXT", VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME);
+    if (!device_extensions.vk_ext_external_memory_host) skip |= OutputExtensionError("vkGetMemoryHostPointerPropertiesEXT", VK_EXT_EXTERNAL_MEMORY_HOST_EXTENSION_NAME);
+    skip |= validate_flags("vkGetMemoryHostPointerPropertiesEXT", "handleType", "VkExternalMemoryHandleTypeFlagBits", AllVkExternalMemoryHandleTypeFlagBits, handleType, kRequiredSingleBit, "VUID-vkGetMemoryHostPointerPropertiesEXT-handleType-parameter", "VUID-vkGetMemoryHostPointerPropertiesEXT-handleType-parameter");
+    skip |= validate_required_pointer("vkGetMemoryHostPointerPropertiesEXT", "pHostPointer", pHostPointer, kVUIDUndefined);
+    skip |= validate_struct_type("vkGetMemoryHostPointerPropertiesEXT", "pMemoryHostPointerProperties", "VK_STRUCTURE_TYPE_MEMORY_HOST_POINTER_PROPERTIES_EXT", pMemoryHostPointerProperties, VK_STRUCTURE_TYPE_MEMORY_HOST_POINTER_PROPERTIES_EXT, true, "VUID-vkGetMemoryHostPointerPropertiesEXT-pMemoryHostPointerProperties-parameter", "VUID-VkMemoryHostPointerPropertiesEXT-sType-sType");
+    if (pMemoryHostPointerProperties != NULL)
+    {
+        // No xml-driven validation
+    }
+    return skip;
+}
+
+
+
+bool StatelessValidation::PreCallValidateCmdWriteBufferMarkerAMD(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineStageFlagBits                     pipelineStage,
+    VkBuffer                                    dstBuffer,
+    VkDeviceSize                                dstOffset,
+    uint32_t                                    marker) const {
+    bool skip = false;
+    if (!device_extensions.vk_amd_buffer_marker) skip |= OutputExtensionError("vkCmdWriteBufferMarkerAMD", VK_AMD_BUFFER_MARKER_EXTENSION_NAME);
+    skip |= validate_flags("vkCmdWriteBufferMarkerAMD", "pipelineStage", "VkPipelineStageFlagBits", AllVkPipelineStageFlagBits, pipelineStage, kRequiredSingleBit, "VUID-vkCmdWriteBufferMarkerAMD-pipelineStage-parameter", "VUID-vkCmdWriteBufferMarkerAMD-pipelineStage-parameter");
+    skip |= validate_required_handle("vkCmdWriteBufferMarkerAMD", "dstBuffer", dstBuffer);
+    return skip;
+}
+
+
+
+
+
+bool StatelessValidation::PreCallValidateGetPhysicalDeviceCalibrateableTimeDomainsEXT(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pTimeDomainCount,
+    VkTimeDomainEXT*                            pTimeDomains) const {
+    bool skip = false;
+    skip |= validate_array("vkGetPhysicalDeviceCalibrateableTimeDomainsEXT", "pTimeDomainCount", "pTimeDomains", pTimeDomainCount, &pTimeDomains, true, false, false, kVUIDUndefined, "VUID-vkGetPhysicalDeviceCalibrateableTimeDomainsEXT-pTimeDomains-parameter");
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateGetCalibratedTimestampsEXT(
+    VkDevice                                    device,
+    uint32_t                                    timestampCount,
+    const VkCalibratedTimestampInfoEXT*         pTimestampInfos,
+    uint64_t*                                   pTimestamps,
+    uint64_t*                                   pMaxDeviation) const {
+    bool skip = false;
+    if (!device_extensions.vk_ext_calibrated_timestamps) skip |= OutputExtensionError("vkGetCalibratedTimestampsEXT", VK_EXT_CALIBRATED_TIMESTAMPS_EXTENSION_NAME);
+    skip |= validate_struct_type_array("vkGetCalibratedTimestampsEXT", "timestampCount", "pTimestampInfos", "VK_STRUCTURE_TYPE_CALIBRATED_TIMESTAMP_INFO_EXT", timestampCount, pTimestampInfos, VK_STRUCTURE_TYPE_CALIBRATED_TIMESTAMP_INFO_EXT, true, true, "VUID-VkCalibratedTimestampInfoEXT-sType-sType", "VUID-vkGetCalibratedTimestampsEXT-pTimestampInfos-parameter", "VUID-vkGetCalibratedTimestampsEXT-timestampCount-arraylength");
+    if (pTimestampInfos != NULL)
+    {
+        for (uint32_t timestampIndex = 0; timestampIndex < timestampCount; ++timestampIndex)
+        {
+            skip |= validate_struct_pnext("vkGetCalibratedTimestampsEXT", ParameterName("pTimestampInfos[%i].pNext", ParameterName::IndexVector{ timestampIndex }), NULL, pTimestampInfos[timestampIndex].pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkCalibratedTimestampInfoEXT-pNext-pNext");
+
+            skip |= validate_ranged_enum("vkGetCalibratedTimestampsEXT", ParameterName("pTimestampInfos[%i].timeDomain", ParameterName::IndexVector{ timestampIndex }), "VkTimeDomainEXT", AllVkTimeDomainEXTEnums, pTimestampInfos[timestampIndex].timeDomain, "VUID-VkCalibratedTimestampInfoEXT-timeDomain-parameter");
+        }
+    }
+    skip |= validate_array("vkGetCalibratedTimestampsEXT", "timestampCount", "pTimestamps", timestampCount, &pTimestamps, true, true, "VUID-vkGetCalibratedTimestampsEXT-timestampCount-arraylength", "VUID-vkGetCalibratedTimestampsEXT-pTimestamps-parameter");
+    skip |= validate_required_pointer("vkGetCalibratedTimestampsEXT", "pMaxDeviation", pMaxDeviation, "VUID-vkGetCalibratedTimestampsEXT-pMaxDeviation-parameter");
+    return skip;
+}
+
+
+
+
+
+
+
+
+
+#ifdef VK_USE_PLATFORM_GGP
+
+#endif // VK_USE_PLATFORM_GGP
+
+
+
+
+
+
+
+bool StatelessValidation::PreCallValidateCmdDrawMeshTasksNV(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    taskCount,
+    uint32_t                                    firstTask) const {
+    bool skip = false;
+    if (!device_extensions.vk_khr_get_physical_device_properties_2) skip |= OutputExtensionError("vkCmdDrawMeshTasksNV", VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    if (!device_extensions.vk_nv_mesh_shader) skip |= OutputExtensionError("vkCmdDrawMeshTasksNV", VK_NV_MESH_SHADER_EXTENSION_NAME);
+    // No xml-driven validation
+    if (!skip) skip |= manual_PreCallValidateCmdDrawMeshTasksNV(commandBuffer, taskCount, firstTask);
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateCmdDrawMeshTasksIndirectNV(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    uint32_t                                    drawCount,
+    uint32_t                                    stride) const {
+    bool skip = false;
+    if (!device_extensions.vk_khr_get_physical_device_properties_2) skip |= OutputExtensionError("vkCmdDrawMeshTasksIndirectNV", VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    if (!device_extensions.vk_nv_mesh_shader) skip |= OutputExtensionError("vkCmdDrawMeshTasksIndirectNV", VK_NV_MESH_SHADER_EXTENSION_NAME);
+    skip |= validate_required_handle("vkCmdDrawMeshTasksIndirectNV", "buffer", buffer);
+    if (!skip) skip |= manual_PreCallValidateCmdDrawMeshTasksIndirectNV(commandBuffer, buffer, offset, drawCount, stride);
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateCmdDrawMeshTasksIndirectCountNV(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkBuffer                                    countBuffer,
+    VkDeviceSize                                countBufferOffset,
+    uint32_t                                    maxDrawCount,
+    uint32_t                                    stride) const {
+    bool skip = false;
+    if (!device_extensions.vk_khr_get_physical_device_properties_2) skip |= OutputExtensionError("vkCmdDrawMeshTasksIndirectCountNV", VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    if (!device_extensions.vk_nv_mesh_shader) skip |= OutputExtensionError("vkCmdDrawMeshTasksIndirectCountNV", VK_NV_MESH_SHADER_EXTENSION_NAME);
+    skip |= validate_required_handle("vkCmdDrawMeshTasksIndirectCountNV", "buffer", buffer);
+    skip |= validate_required_handle("vkCmdDrawMeshTasksIndirectCountNV", "countBuffer", countBuffer);
+    if (!skip) skip |= manual_PreCallValidateCmdDrawMeshTasksIndirectCountNV(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
+    return skip;
+}
+
+
+
+
+
+
+
+bool StatelessValidation::PreCallValidateCmdSetExclusiveScissorNV(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstExclusiveScissor,
+    uint32_t                                    exclusiveScissorCount,
+    const VkRect2D*                             pExclusiveScissors) const {
+    bool skip = false;
+    if (!device_extensions.vk_khr_get_physical_device_properties_2) skip |= OutputExtensionError("vkCmdSetExclusiveScissorNV", VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    if (!device_extensions.vk_nv_scissor_exclusive) skip |= OutputExtensionError("vkCmdSetExclusiveScissorNV", VK_NV_SCISSOR_EXCLUSIVE_EXTENSION_NAME);
+    skip |= validate_array("vkCmdSetExclusiveScissorNV", "exclusiveScissorCount", "pExclusiveScissors", exclusiveScissorCount, &pExclusiveScissors, true, true, "VUID-vkCmdSetExclusiveScissorNV-exclusiveScissorCount-arraylength", "VUID-vkCmdSetExclusiveScissorNV-pExclusiveScissors-parameter");
+    if (pExclusiveScissors != NULL)
+    {
+        for (uint32_t exclusiveScissorIndex = 0; exclusiveScissorIndex < exclusiveScissorCount; ++exclusiveScissorIndex)
+        {
+            // No xml-driven validation
+
+            // No xml-driven validation
+        }
+    }
+    if (!skip) skip |= manual_PreCallValidateCmdSetExclusiveScissorNV(commandBuffer, firstExclusiveScissor, exclusiveScissorCount, pExclusiveScissors);
+    return skip;
+}
+
+
+
+bool StatelessValidation::PreCallValidateCmdSetCheckpointNV(
+    VkCommandBuffer                             commandBuffer,
+    const void*                                 pCheckpointMarker) const {
+    bool skip = false;
+    if (!device_extensions.vk_khr_get_physical_device_properties_2) skip |= OutputExtensionError("vkCmdSetCheckpointNV", VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    if (!device_extensions.vk_nv_device_diagnostic_checkpoints) skip |= OutputExtensionError("vkCmdSetCheckpointNV", VK_NV_DEVICE_DIAGNOSTIC_CHECKPOINTS_EXTENSION_NAME);
+    // No xml-driven validation
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateGetQueueCheckpointDataNV(
+    VkQueue                                     queue,
+    uint32_t*                                   pCheckpointDataCount,
+    VkCheckpointDataNV*                         pCheckpointData) const {
+    bool skip = false;
+    if (!device_extensions.vk_khr_get_physical_device_properties_2) skip |= OutputExtensionError("vkGetQueueCheckpointDataNV", VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    if (!device_extensions.vk_nv_device_diagnostic_checkpoints) skip |= OutputExtensionError("vkGetQueueCheckpointDataNV", VK_NV_DEVICE_DIAGNOSTIC_CHECKPOINTS_EXTENSION_NAME);
+    skip |= validate_struct_type_array("vkGetQueueCheckpointDataNV", "pCheckpointDataCount", "pCheckpointData", "VK_STRUCTURE_TYPE_CHECKPOINT_DATA_NV", pCheckpointDataCount, pCheckpointData, VK_STRUCTURE_TYPE_CHECKPOINT_DATA_NV, true, false, false, "VUID-VkCheckpointDataNV-sType-sType", "VUID-vkGetQueueCheckpointDataNV-pCheckpointData-parameter", kVUIDUndefined);
+    if (pCheckpointData != NULL)
+    {
+        for (uint32_t pCheckpointDataIndex = 0; pCheckpointDataIndex < *pCheckpointDataCount; ++pCheckpointDataIndex)
+        {
+            // No xml-driven validation
+        }
+    }
+    return skip;
+}
+
+
+
+
+
+bool StatelessValidation::PreCallValidateInitializePerformanceApiINTEL(
+    VkDevice                                    device,
+    const VkInitializePerformanceApiInfoINTEL*  pInitializeInfo) const {
+    bool skip = false;
+    if (!device_extensions.vk_intel_performance_query) skip |= OutputExtensionError("vkInitializePerformanceApiINTEL", VK_INTEL_PERFORMANCE_QUERY_EXTENSION_NAME);
+    skip |= validate_struct_type("vkInitializePerformanceApiINTEL", "pInitializeInfo", "VK_STRUCTURE_TYPE_INITIALIZE_PERFORMANCE_API_INFO_INTEL", pInitializeInfo, VK_STRUCTURE_TYPE_INITIALIZE_PERFORMANCE_API_INFO_INTEL, true, "VUID-vkInitializePerformanceApiINTEL-pInitializeInfo-parameter", "VUID-VkInitializePerformanceApiInfoINTEL-sType-sType");
+    if (pInitializeInfo != NULL)
+    {
+        skip |= validate_struct_pnext("vkInitializePerformanceApiINTEL", "pInitializeInfo->pNext", NULL, pInitializeInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkInitializePerformanceApiInfoINTEL-pNext-pNext");
+
+        skip |= validate_required_pointer("vkInitializePerformanceApiINTEL", "pInitializeInfo->pUserData", pInitializeInfo->pUserData, kVUIDUndefined);
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateUninitializePerformanceApiINTEL(
+    VkDevice                                    device) const {
+    bool skip = false;
+    if (!device_extensions.vk_intel_performance_query) skip |= OutputExtensionError("vkUninitializePerformanceApiINTEL", VK_INTEL_PERFORMANCE_QUERY_EXTENSION_NAME);
+    // No xml-driven validation
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateCmdSetPerformanceMarkerINTEL(
+    VkCommandBuffer                             commandBuffer,
+    const VkPerformanceMarkerInfoINTEL*         pMarkerInfo) const {
+    bool skip = false;
+    if (!device_extensions.vk_intel_performance_query) skip |= OutputExtensionError("vkCmdSetPerformanceMarkerINTEL", VK_INTEL_PERFORMANCE_QUERY_EXTENSION_NAME);
+    skip |= validate_struct_type("vkCmdSetPerformanceMarkerINTEL", "pMarkerInfo", "VK_STRUCTURE_TYPE_PERFORMANCE_MARKER_INFO_INTEL", pMarkerInfo, VK_STRUCTURE_TYPE_PERFORMANCE_MARKER_INFO_INTEL, true, "VUID-vkCmdSetPerformanceMarkerINTEL-pMarkerInfo-parameter", "VUID-VkPerformanceMarkerInfoINTEL-sType-sType");
+    if (pMarkerInfo != NULL)
+    {
+        skip |= validate_struct_pnext("vkCmdSetPerformanceMarkerINTEL", "pMarkerInfo->pNext", NULL, pMarkerInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkPerformanceMarkerInfoINTEL-pNext-pNext");
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateCmdSetPerformanceStreamMarkerINTEL(
+    VkCommandBuffer                             commandBuffer,
+    const VkPerformanceStreamMarkerInfoINTEL*   pMarkerInfo) const {
+    bool skip = false;
+    if (!device_extensions.vk_intel_performance_query) skip |= OutputExtensionError("vkCmdSetPerformanceStreamMarkerINTEL", VK_INTEL_PERFORMANCE_QUERY_EXTENSION_NAME);
+    skip |= validate_struct_type("vkCmdSetPerformanceStreamMarkerINTEL", "pMarkerInfo", "VK_STRUCTURE_TYPE_PERFORMANCE_STREAM_MARKER_INFO_INTEL", pMarkerInfo, VK_STRUCTURE_TYPE_PERFORMANCE_STREAM_MARKER_INFO_INTEL, true, "VUID-vkCmdSetPerformanceStreamMarkerINTEL-pMarkerInfo-parameter", "VUID-VkPerformanceStreamMarkerInfoINTEL-sType-sType");
+    if (pMarkerInfo != NULL)
+    {
+        skip |= validate_struct_pnext("vkCmdSetPerformanceStreamMarkerINTEL", "pMarkerInfo->pNext", NULL, pMarkerInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkPerformanceStreamMarkerInfoINTEL-pNext-pNext");
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateCmdSetPerformanceOverrideINTEL(
+    VkCommandBuffer                             commandBuffer,
+    const VkPerformanceOverrideInfoINTEL*       pOverrideInfo) const {
+    bool skip = false;
+    if (!device_extensions.vk_intel_performance_query) skip |= OutputExtensionError("vkCmdSetPerformanceOverrideINTEL", VK_INTEL_PERFORMANCE_QUERY_EXTENSION_NAME);
+    skip |= validate_struct_type("vkCmdSetPerformanceOverrideINTEL", "pOverrideInfo", "VK_STRUCTURE_TYPE_PERFORMANCE_OVERRIDE_INFO_INTEL", pOverrideInfo, VK_STRUCTURE_TYPE_PERFORMANCE_OVERRIDE_INFO_INTEL, true, "VUID-vkCmdSetPerformanceOverrideINTEL-pOverrideInfo-parameter", "VUID-VkPerformanceOverrideInfoINTEL-sType-sType");
+    if (pOverrideInfo != NULL)
+    {
+        skip |= validate_struct_pnext("vkCmdSetPerformanceOverrideINTEL", "pOverrideInfo->pNext", NULL, pOverrideInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkPerformanceOverrideInfoINTEL-pNext-pNext");
+
+        skip |= validate_ranged_enum("vkCmdSetPerformanceOverrideINTEL", "pOverrideInfo->type", "VkPerformanceOverrideTypeINTEL", AllVkPerformanceOverrideTypeINTELEnums, pOverrideInfo->type, "VUID-VkPerformanceOverrideInfoINTEL-type-parameter");
+
+        skip |= validate_bool32("vkCmdSetPerformanceOverrideINTEL", "pOverrideInfo->enable", pOverrideInfo->enable);
+    }
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateAcquirePerformanceConfigurationINTEL(
+    VkDevice                                    device,
+    const VkPerformanceConfigurationAcquireInfoINTEL* pAcquireInfo,
+    VkPerformanceConfigurationINTEL*            pConfiguration) const {
+    bool skip = false;
+    if (!device_extensions.vk_intel_performance_query) skip |= OutputExtensionError("vkAcquirePerformanceConfigurationINTEL", VK_INTEL_PERFORMANCE_QUERY_EXTENSION_NAME);
+    skip |= validate_struct_type("vkAcquirePerformanceConfigurationINTEL", "pAcquireInfo", "VK_STRUCTURE_TYPE_PERFORMANCE_CONFIGURATION_ACQUIRE_INFO_INTEL", pAcquireInfo, VK_STRUCTURE_TYPE_PERFORMANCE_CONFIGURATION_ACQUIRE_INFO_INTEL, true, "VUID-vkAcquirePerformanceConfigurationINTEL-pAcquireInfo-parameter", "VUID-VkPerformanceConfigurationAcquireInfoINTEL-sType-sType");
+    if (pAcquireInfo != NULL)
+    {
+        skip |= validate_struct_pnext("vkAcquirePerformanceConfigurationINTEL", "pAcquireInfo->pNext", NULL, pAcquireInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkPerformanceConfigurationAcquireInfoINTEL-pNext-pNext");
+
+        skip |= validate_ranged_enum("vkAcquirePerformanceConfigurationINTEL", "pAcquireInfo->type", "VkPerformanceConfigurationTypeINTEL", AllVkPerformanceConfigurationTypeINTELEnums, pAcquireInfo->type, "VUID-VkPerformanceConfigurationAcquireInfoINTEL-type-parameter");
+    }
+    skip |= validate_required_pointer("vkAcquirePerformanceConfigurationINTEL", "pConfiguration", pConfiguration, "VUID-vkAcquirePerformanceConfigurationINTEL-pConfiguration-parameter");
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateReleasePerformanceConfigurationINTEL(
+    VkDevice                                    device,
+    VkPerformanceConfigurationINTEL             configuration) const {
+    bool skip = false;
+    if (!device_extensions.vk_intel_performance_query) skip |= OutputExtensionError("vkReleasePerformanceConfigurationINTEL", VK_INTEL_PERFORMANCE_QUERY_EXTENSION_NAME);
+    skip |= validate_required_handle("vkReleasePerformanceConfigurationINTEL", "configuration", configuration);
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateQueueSetPerformanceConfigurationINTEL(
+    VkQueue                                     queue,
+    VkPerformanceConfigurationINTEL             configuration) const {
+    bool skip = false;
+    if (!device_extensions.vk_intel_performance_query) skip |= OutputExtensionError("vkQueueSetPerformanceConfigurationINTEL", VK_INTEL_PERFORMANCE_QUERY_EXTENSION_NAME);
+    skip |= validate_required_handle("vkQueueSetPerformanceConfigurationINTEL", "configuration", configuration);
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateGetPerformanceParameterINTEL(
+    VkDevice                                    device,
+    VkPerformanceParameterTypeINTEL             parameter,
+    VkPerformanceValueINTEL*                    pValue) const {
+    bool skip = false;
+    if (!device_extensions.vk_intel_performance_query) skip |= OutputExtensionError("vkGetPerformanceParameterINTEL", VK_INTEL_PERFORMANCE_QUERY_EXTENSION_NAME);
+    skip |= validate_ranged_enum("vkGetPerformanceParameterINTEL", "parameter", "VkPerformanceParameterTypeINTEL", AllVkPerformanceParameterTypeINTELEnums, parameter, "VUID-vkGetPerformanceParameterINTEL-parameter-parameter");
+    skip |= validate_required_pointer("vkGetPerformanceParameterINTEL", "pValue", pValue, "VUID-vkGetPerformanceParameterINTEL-pValue-parameter");
+    return skip;
+}
+
+
+
+
+
+bool StatelessValidation::PreCallValidateSetLocalDimmingAMD(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapChain,
+    VkBool32                                    localDimmingEnable) const {
+    bool skip = false;
+    if (!device_extensions.vk_khr_swapchain) skip |= OutputExtensionError("vkSetLocalDimmingAMD", VK_KHR_SWAPCHAIN_EXTENSION_NAME);
+    if (!device_extensions.vk_khr_get_surface_capabilities_2) skip |= OutputExtensionError("vkSetLocalDimmingAMD", VK_KHR_GET_SURFACE_CAPABILITIES_2_EXTENSION_NAME);
+    if (!device_extensions.vk_khr_get_physical_device_properties_2) skip |= OutputExtensionError("vkSetLocalDimmingAMD", VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    if (!device_extensions.vk_amd_display_native_hdr) skip |= OutputExtensionError("vkSetLocalDimmingAMD", VK_AMD_DISPLAY_NATIVE_HDR_EXTENSION_NAME);
+    skip |= validate_required_handle("vkSetLocalDimmingAMD", "swapChain", swapChain);
+    skip |= validate_bool32("vkSetLocalDimmingAMD", "localDimmingEnable", localDimmingEnable);
+    return skip;
+}
+
+
+
+#ifdef VK_USE_PLATFORM_FUCHSIA
+
+bool StatelessValidation::PreCallValidateCreateImagePipeSurfaceFUCHSIA(
+    VkInstance                                  instance,
+    const VkImagePipeSurfaceCreateInfoFUCHSIA*  pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface) const {
+    bool skip = false;
+    if (!instance_extensions.vk_khr_surface) skip |= OutputExtensionError("vkCreateImagePipeSurfaceFUCHSIA", VK_KHR_SURFACE_EXTENSION_NAME);
+    if (!instance_extensions.vk_fuchsia_imagepipe_surface) skip |= OutputExtensionError("vkCreateImagePipeSurfaceFUCHSIA", VK_FUCHSIA_IMAGEPIPE_SURFACE_EXTENSION_NAME);
+    skip |= validate_struct_type("vkCreateImagePipeSurfaceFUCHSIA", "pCreateInfo", "VK_STRUCTURE_TYPE_IMAGEPIPE_SURFACE_CREATE_INFO_FUCHSIA", pCreateInfo, VK_STRUCTURE_TYPE_IMAGEPIPE_SURFACE_CREATE_INFO_FUCHSIA, true, "VUID-vkCreateImagePipeSurfaceFUCHSIA-pCreateInfo-parameter", "VUID-VkImagePipeSurfaceCreateInfoFUCHSIA-sType-sType");
+    if (pCreateInfo != NULL)
+    {
+        skip |= validate_struct_pnext("vkCreateImagePipeSurfaceFUCHSIA", "pCreateInfo->pNext", NULL, pCreateInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkImagePipeSurfaceCreateInfoFUCHSIA-pNext-pNext");
+
+        skip |= validate_reserved_flags("vkCreateImagePipeSurfaceFUCHSIA", "pCreateInfo->flags", pCreateInfo->flags, "VUID-VkImagePipeSurfaceCreateInfoFUCHSIA-flags-zerobitmask");
+    }
+    if (pAllocator != NULL)
+    {
+        skip |= validate_required_pointer("vkCreateImagePipeSurfaceFUCHSIA", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
+
+        skip |= validate_required_pointer("vkCreateImagePipeSurfaceFUCHSIA", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
+
+        skip |= validate_required_pointer("vkCreateImagePipeSurfaceFUCHSIA", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
+
+        if (pAllocator->pfnInternalAllocation != NULL)
+        {
+            skip |= validate_required_pointer("vkCreateImagePipeSurfaceFUCHSIA", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+
+        if (pAllocator->pfnInternalFree != NULL)
+        {
+            skip |= validate_required_pointer("vkCreateImagePipeSurfaceFUCHSIA", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+    }
+    skip |= validate_required_pointer("vkCreateImagePipeSurfaceFUCHSIA", "pSurface", pSurface, "VUID-vkCreateImagePipeSurfaceFUCHSIA-pSurface-parameter");
+    return skip;
+}
+
+#endif // VK_USE_PLATFORM_FUCHSIA
+
+#ifdef VK_USE_PLATFORM_METAL_EXT
+
+bool StatelessValidation::PreCallValidateCreateMetalSurfaceEXT(
+    VkInstance                                  instance,
+    const VkMetalSurfaceCreateInfoEXT*          pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface) const {
+    bool skip = false;
+    if (!instance_extensions.vk_khr_surface) skip |= OutputExtensionError("vkCreateMetalSurfaceEXT", VK_KHR_SURFACE_EXTENSION_NAME);
+    if (!instance_extensions.vk_ext_metal_surface) skip |= OutputExtensionError("vkCreateMetalSurfaceEXT", VK_EXT_METAL_SURFACE_EXTENSION_NAME);
+    skip |= validate_struct_type("vkCreateMetalSurfaceEXT", "pCreateInfo", "VK_STRUCTURE_TYPE_METAL_SURFACE_CREATE_INFO_EXT", pCreateInfo, VK_STRUCTURE_TYPE_METAL_SURFACE_CREATE_INFO_EXT, true, "VUID-vkCreateMetalSurfaceEXT-pCreateInfo-parameter", "VUID-VkMetalSurfaceCreateInfoEXT-sType-sType");
+    if (pCreateInfo != NULL)
+    {
+        skip |= validate_struct_pnext("vkCreateMetalSurfaceEXT", "pCreateInfo->pNext", NULL, pCreateInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkMetalSurfaceCreateInfoEXT-pNext-pNext");
+
+        skip |= validate_reserved_flags("vkCreateMetalSurfaceEXT", "pCreateInfo->flags", pCreateInfo->flags, "VUID-VkMetalSurfaceCreateInfoEXT-flags-zerobitmask");
+    }
+    if (pAllocator != NULL)
+    {
+        skip |= validate_required_pointer("vkCreateMetalSurfaceEXT", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
+
+        skip |= validate_required_pointer("vkCreateMetalSurfaceEXT", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
+
+        skip |= validate_required_pointer("vkCreateMetalSurfaceEXT", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
+
+        if (pAllocator->pfnInternalAllocation != NULL)
+        {
+            skip |= validate_required_pointer("vkCreateMetalSurfaceEXT", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+
+        if (pAllocator->pfnInternalFree != NULL)
+        {
+            skip |= validate_required_pointer("vkCreateMetalSurfaceEXT", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+    }
+    skip |= validate_required_pointer("vkCreateMetalSurfaceEXT", "pSurface", pSurface, "VUID-vkCreateMetalSurfaceEXT-pSurface-parameter");
+    return skip;
+}
+
+#endif // VK_USE_PLATFORM_METAL_EXT
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+bool StatelessValidation::PreCallValidateGetBufferDeviceAddressEXT(
+    VkDevice                                    device,
+    const VkBufferDeviceAddressInfoKHR*         pInfo) const {
+    bool skip = false;
+    if (!device_extensions.vk_khr_get_physical_device_properties_2) skip |= OutputExtensionError("vkGetBufferDeviceAddressEXT", VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    if (!device_extensions.vk_ext_buffer_device_address) skip |= OutputExtensionError("vkGetBufferDeviceAddressEXT", VK_EXT_BUFFER_DEVICE_ADDRESS_EXTENSION_NAME);
+    skip |= validate_struct_type("vkGetBufferDeviceAddressEXT", "pInfo", "VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO_KHR", pInfo, VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO_KHR, true, "VUID-vkGetBufferDeviceAddressKHR-pInfo-parameter", "VUID-VkBufferDeviceAddressInfoKHR-sType-sType");
+    if (pInfo != NULL)
+    {
+        skip |= validate_struct_pnext("vkGetBufferDeviceAddressEXT", "pInfo->pNext", NULL, pInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkBufferDeviceAddressInfoKHR-pNext-pNext");
+
+        skip |= validate_required_handle("vkGetBufferDeviceAddressEXT", "pInfo->buffer", pInfo->buffer);
+    }
+    return skip;
+}
+
+
+
+bool StatelessValidation::PreCallValidateGetPhysicalDeviceToolPropertiesEXT(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pToolCount,
+    VkPhysicalDeviceToolPropertiesEXT*          pToolProperties) const {
+    bool skip = false;
+    skip |= validate_struct_type_array("vkGetPhysicalDeviceToolPropertiesEXT", "pToolCount", "pToolProperties", "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TOOL_PROPERTIES_EXT", pToolCount, pToolProperties, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TOOL_PROPERTIES_EXT, true, true, false, "VUID-VkPhysicalDeviceToolPropertiesEXT-sType-sType", "VUID-vkGetPhysicalDeviceToolPropertiesEXT-pToolProperties-parameter", "VUID-vkGetPhysicalDeviceToolPropertiesEXT-pToolCount-arraylength");
+    if (pToolProperties != NULL)
+    {
+        for (uint32_t pToolIndex = 0; pToolIndex < *pToolCount; ++pToolIndex)
+        {
+            // No xml-driven validation
+        }
+    }
+    return skip;
+}
+
+
+
+
+
+
+
+bool StatelessValidation::PreCallValidateGetPhysicalDeviceCooperativeMatrixPropertiesNV(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pPropertyCount,
+    VkCooperativeMatrixPropertiesNV*            pProperties) const {
+    bool skip = false;
+    skip |= validate_struct_type_array("vkGetPhysicalDeviceCooperativeMatrixPropertiesNV", "pPropertyCount", "pProperties", "VK_STRUCTURE_TYPE_COOPERATIVE_MATRIX_PROPERTIES_NV", pPropertyCount, pProperties, VK_STRUCTURE_TYPE_COOPERATIVE_MATRIX_PROPERTIES_NV, true, false, false, "VUID-VkCooperativeMatrixPropertiesNV-sType-sType", "VUID-vkGetPhysicalDeviceCooperativeMatrixPropertiesNV-pProperties-parameter", kVUIDUndefined);
+    return skip;
+}
+
+
+
+bool StatelessValidation::PreCallValidateGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pCombinationCount,
+    VkFramebufferMixedSamplesCombinationNV*     pCombinations) const {
+    bool skip = false;
+    skip |= validate_struct_type_array("vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV", "pCombinationCount", "pCombinations", "VK_STRUCTURE_TYPE_FRAMEBUFFER_MIXED_SAMPLES_COMBINATION_NV", pCombinationCount, pCombinations, VK_STRUCTURE_TYPE_FRAMEBUFFER_MIXED_SAMPLES_COMBINATION_NV, true, false, false, "VUID-VkFramebufferMixedSamplesCombinationNV-sType-sType", "VUID-vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV-pCombinations-parameter", kVUIDUndefined);
+    if (pCombinations != NULL)
+    {
+        for (uint32_t pCombinationIndex = 0; pCombinationIndex < *pCombinationCount; ++pCombinationIndex)
+        {
+            // No xml-driven validation
+        }
+    }
+    return skip;
+}
+
+
+
+
+
+
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+bool StatelessValidation::PreCallValidateGetPhysicalDeviceSurfacePresentModes2EXT(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceSurfaceInfo2KHR*      pSurfaceInfo,
+    uint32_t*                                   pPresentModeCount,
+    VkPresentModeKHR*                           pPresentModes) const {
+    bool skip = false;
+    skip |= validate_struct_type("vkGetPhysicalDeviceSurfacePresentModes2EXT", "pSurfaceInfo", "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SURFACE_INFO_2_KHR", pSurfaceInfo, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SURFACE_INFO_2_KHR, true, "VUID-vkGetPhysicalDeviceSurfacePresentModes2EXT-pSurfaceInfo-parameter", "VUID-VkPhysicalDeviceSurfaceInfo2KHR-sType-sType");
+    if (pSurfaceInfo != NULL)
+    {
+        const VkStructureType allowed_structs_VkPhysicalDeviceSurfaceInfo2KHR[] = { VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_INFO_EXT, VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_WIN32_INFO_EXT };
+
+        skip |= validate_struct_pnext("vkGetPhysicalDeviceSurfacePresentModes2EXT", "pSurfaceInfo->pNext", "VkSurfaceFullScreenExclusiveInfoEXT, VkSurfaceFullScreenExclusiveWin32InfoEXT", pSurfaceInfo->pNext, ARRAY_SIZE(allowed_structs_VkPhysicalDeviceSurfaceInfo2KHR), allowed_structs_VkPhysicalDeviceSurfaceInfo2KHR, GeneratedVulkanHeaderVersion, "VUID-VkPhysicalDeviceSurfaceInfo2KHR-pNext-pNext");
+
+        skip |= validate_required_handle("vkGetPhysicalDeviceSurfacePresentModes2EXT", "pSurfaceInfo->surface", pSurfaceInfo->surface);
+    }
+    skip |= validate_array("vkGetPhysicalDeviceSurfacePresentModes2EXT", "pPresentModeCount", "pPresentModes", pPresentModeCount, &pPresentModes, true, false, false, kVUIDUndefined, "VUID-vkGetPhysicalDeviceSurfacePresentModes2EXT-pPresentModes-parameter");
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateAcquireFullScreenExclusiveModeEXT(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain) const {
+    bool skip = false;
+    if (!device_extensions.vk_khr_swapchain) skip |= OutputExtensionError("vkAcquireFullScreenExclusiveModeEXT", VK_KHR_SWAPCHAIN_EXTENSION_NAME);
+    if (!device_extensions.vk_khr_get_surface_capabilities_2) skip |= OutputExtensionError("vkAcquireFullScreenExclusiveModeEXT", VK_KHR_GET_SURFACE_CAPABILITIES_2_EXTENSION_NAME);
+    if (!device_extensions.vk_khr_surface) skip |= OutputExtensionError("vkAcquireFullScreenExclusiveModeEXT", VK_KHR_SURFACE_EXTENSION_NAME);
+    if (!device_extensions.vk_khr_get_physical_device_properties_2) skip |= OutputExtensionError("vkAcquireFullScreenExclusiveModeEXT", VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    if (!device_extensions.vk_ext_full_screen_exclusive) skip |= OutputExtensionError("vkAcquireFullScreenExclusiveModeEXT", VK_EXT_FULL_SCREEN_EXCLUSIVE_EXTENSION_NAME);
+    skip |= validate_required_handle("vkAcquireFullScreenExclusiveModeEXT", "swapchain", swapchain);
+    return skip;
+}
+
+bool StatelessValidation::PreCallValidateReleaseFullScreenExclusiveModeEXT(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain) const {
+    bool skip = false;
+    if (!device_extensions.vk_khr_swapchain) skip |= OutputExtensionError("vkReleaseFullScreenExclusiveModeEXT", VK_KHR_SWAPCHAIN_EXTENSION_NAME);
+    if (!device_extensions.vk_khr_get_surface_capabilities_2) skip |= OutputExtensionError("vkReleaseFullScreenExclusiveModeEXT", VK_KHR_GET_SURFACE_CAPABILITIES_2_EXTENSION_NAME);
+    if (!device_extensions.vk_khr_surface) skip |= OutputExtensionError("vkReleaseFullScreenExclusiveModeEXT", VK_KHR_SURFACE_EXTENSION_NAME);
+    if (!device_extensions.vk_khr_get_physical_device_properties_2) skip |= OutputExtensionError("vkReleaseFullScreenExclusiveModeEXT", VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    if (!device_extensions.vk_ext_full_screen_exclusive) skip |= OutputExtensionError("vkReleaseFullScreenExclusiveModeEXT", VK_EXT_FULL_SCREEN_EXCLUSIVE_EXTENSION_NAME);
+    skip |= validate_required_handle("vkReleaseFullScreenExclusiveModeEXT", "swapchain", swapchain);
+    return skip;
+}
+
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+bool StatelessValidation::PreCallValidateCreateHeadlessSurfaceEXT(
+    VkInstance                                  instance,
+    const VkHeadlessSurfaceCreateInfoEXT*       pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface) const {
+    bool skip = false;
+    if (!instance_extensions.vk_khr_surface) skip |= OutputExtensionError("vkCreateHeadlessSurfaceEXT", VK_KHR_SURFACE_EXTENSION_NAME);
+    if (!instance_extensions.vk_ext_headless_surface) skip |= OutputExtensionError("vkCreateHeadlessSurfaceEXT", VK_EXT_HEADLESS_SURFACE_EXTENSION_NAME);
+    skip |= validate_struct_type("vkCreateHeadlessSurfaceEXT", "pCreateInfo", "VK_STRUCTURE_TYPE_HEADLESS_SURFACE_CREATE_INFO_EXT", pCreateInfo, VK_STRUCTURE_TYPE_HEADLESS_SURFACE_CREATE_INFO_EXT, true, "VUID-vkCreateHeadlessSurfaceEXT-pCreateInfo-parameter", "VUID-VkHeadlessSurfaceCreateInfoEXT-sType-sType");
+    if (pCreateInfo != NULL)
+    {
+        skip |= validate_struct_pnext("vkCreateHeadlessSurfaceEXT", "pCreateInfo->pNext", NULL, pCreateInfo->pNext, 0, NULL, GeneratedVulkanHeaderVersion, "VUID-VkHeadlessSurfaceCreateInfoEXT-pNext-pNext");
+
+        skip |= validate_reserved_flags("vkCreateHeadlessSurfaceEXT", "pCreateInfo->flags", pCreateInfo->flags, "VUID-VkHeadlessSurfaceCreateInfoEXT-flags-zerobitmask");
+    }
+    if (pAllocator != NULL)
+    {
+        skip |= validate_required_pointer("vkCreateHeadlessSurfaceEXT", "pAllocator->pfnAllocation", reinterpret_cast<const void*>(pAllocator->pfnAllocation), "VUID-VkAllocationCallbacks-pfnAllocation-00632");
+
+        skip |= validate_required_pointer("vkCreateHeadlessSurfaceEXT", "pAllocator->pfnReallocation", reinterpret_cast<const void*>(pAllocator->pfnReallocation), "VUID-VkAllocationCallbacks-pfnReallocation-00633");
+
+        skip |= validate_required_pointer("vkCreateHeadlessSurfaceEXT", "pAllocator->pfnFree", reinterpret_cast<const void*>(pAllocator->pfnFree), "VUID-VkAllocationCallbacks-pfnFree-00634");
+
+        if (pAllocator->pfnInternalAllocation != NULL)
+        {
+            skip |= validate_required_pointer("vkCreateHeadlessSurfaceEXT", "pAllocator->pfnInternalFree", reinterpret_cast<const void*>(pAllocator->pfnInternalFree), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+
+        if (pAllocator->pfnInternalFree != NULL)
+        {
+            skip |= validate_required_pointer("vkCreateHeadlessSurfaceEXT", "pAllocator->pfnInternalAllocation", reinterpret_cast<const void*>(pAllocator->pfnInternalAllocation), "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+
+        }
+    }
+    skip |= validate_required_pointer("vkCreateHeadlessSurfaceEXT", "pSurface", pSurface, "VUID-vkCreateHeadlessSurfaceEXT-pSurface-parameter");
+    return skip;
+}
+
+
+
+bool StatelessValidation::PreCallValidateCmdSetLineStippleEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    lineStippleFactor,
+    uint16_t                                    lineStipplePattern) const {
+    bool skip = false;
+    if (!device_extensions.vk_khr_get_physical_device_properties_2) skip |= OutputExtensionError("vkCmdSetLineStippleEXT", VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    if (!device_extensions.vk_ext_line_rasterization) skip |= OutputExtensionError("vkCmdSetLineStippleEXT", VK_EXT_LINE_RASTERIZATION_EXTENSION_NAME);
+    // No xml-driven validation
+    if (!skip) skip |= manual_PreCallValidateCmdSetLineStippleEXT(commandBuffer, lineStippleFactor, lineStipplePattern);
+    return skip;
+}
+
+
+
+bool StatelessValidation::PreCallValidateResetQueryPoolEXT(
+    VkDevice                                    device,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    firstQuery,
+    uint32_t                                    queryCount) const {
+    bool skip = false;
+    if (!device_extensions.vk_khr_get_physical_device_properties_2) skip |= OutputExtensionError("vkResetQueryPoolEXT", VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    if (!device_extensions.vk_ext_host_query_reset) skip |= OutputExtensionError("vkResetQueryPoolEXT", VK_EXT_HOST_QUERY_RESET_EXTENSION_NAME);
+    skip |= validate_required_handle("vkResetQueryPoolEXT", "queryPool", queryPool);
+    return skip;
+}
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/src/third_party/vulkan-validation-layers/src/layers/generated/parameter_validation.h b/src/third_party/vulkan-validation-layers/src/layers/generated/parameter_validation.h
new file mode 100644
index 0000000..c7b8da4
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/layers/generated/parameter_validation.h
@@ -0,0 +1,1796 @@
+/* *** THIS FILE IS GENERATED - DO NOT EDIT! ***
+ * See parameter_validation_generator.py for modifications
+ *
+ * Copyright (c) 2015-2019 The Khronos Group Inc.
+ * Copyright (c) 2015-2019 LunarG, Inc.
+ * Copyright (C) 2015-2019 Google Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * Copyright (c) 2015-2017 Valve Corporation
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Mark Lobodzinski <mark@LunarG.com>
+ * Author: Dave Houlton <daveh@LunarG.com>
+ */
+
+
+bool PreCallValidateCreateInstance(
+    const VkInstanceCreateInfo*                 pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkInstance*                                 pInstance) const;
+bool PreCallValidateDestroyInstance(
+    VkInstance                                  instance,
+    const VkAllocationCallbacks*                pAllocator) const;
+bool PreCallValidateEnumeratePhysicalDevices(
+    VkInstance                                  instance,
+    uint32_t*                                   pPhysicalDeviceCount,
+    VkPhysicalDevice*                           pPhysicalDevices) const;
+bool PreCallValidateGetPhysicalDeviceFeatures(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceFeatures*                   pFeatures) const;
+bool PreCallValidateGetPhysicalDeviceFormatProperties(
+    VkPhysicalDevice                            physicalDevice,
+    VkFormat                                    format,
+    VkFormatProperties*                         pFormatProperties) const;
+bool PreCallValidateGetPhysicalDeviceImageFormatProperties(
+    VkPhysicalDevice                            physicalDevice,
+    VkFormat                                    format,
+    VkImageType                                 type,
+    VkImageTiling                               tiling,
+    VkImageUsageFlags                           usage,
+    VkImageCreateFlags                          flags,
+    VkImageFormatProperties*                    pImageFormatProperties) const;
+bool PreCallValidateGetPhysicalDeviceProperties(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceProperties*                 pProperties) const;
+bool PreCallValidateGetPhysicalDeviceQueueFamilyProperties(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pQueueFamilyPropertyCount,
+    VkQueueFamilyProperties*                    pQueueFamilyProperties) const;
+bool PreCallValidateGetPhysicalDeviceMemoryProperties(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceMemoryProperties*           pMemoryProperties) const;
+bool PreCallValidateCreateDevice(
+    VkPhysicalDevice                            physicalDevice,
+    const VkDeviceCreateInfo*                   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDevice*                                   pDevice) const;
+bool PreCallValidateDestroyDevice(
+    VkDevice                                    device,
+    const VkAllocationCallbacks*                pAllocator) const;
+bool PreCallValidateGetDeviceQueue(
+    VkDevice                                    device,
+    uint32_t                                    queueFamilyIndex,
+    uint32_t                                    queueIndex,
+    VkQueue*                                    pQueue) const;
+bool PreCallValidateQueueSubmit(
+    VkQueue                                     queue,
+    uint32_t                                    submitCount,
+    const VkSubmitInfo*                         pSubmits,
+    VkFence                                     fence) const;
+bool PreCallValidateQueueWaitIdle(
+    VkQueue                                     queue) const;
+bool PreCallValidateDeviceWaitIdle(
+    VkDevice                                    device) const;
+bool PreCallValidateAllocateMemory(
+    VkDevice                                    device,
+    const VkMemoryAllocateInfo*                 pAllocateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDeviceMemory*                             pMemory) const;
+bool PreCallValidateFreeMemory(
+    VkDevice                                    device,
+    VkDeviceMemory                              memory,
+    const VkAllocationCallbacks*                pAllocator) const;
+bool PreCallValidateMapMemory(
+    VkDevice                                    device,
+    VkDeviceMemory                              memory,
+    VkDeviceSize                                offset,
+    VkDeviceSize                                size,
+    VkMemoryMapFlags                            flags,
+    void**                                      ppData) const;
+bool PreCallValidateUnmapMemory(
+    VkDevice                                    device,
+    VkDeviceMemory                              memory) const;
+bool PreCallValidateFlushMappedMemoryRanges(
+    VkDevice                                    device,
+    uint32_t                                    memoryRangeCount,
+    const VkMappedMemoryRange*                  pMemoryRanges) const;
+bool PreCallValidateInvalidateMappedMemoryRanges(
+    VkDevice                                    device,
+    uint32_t                                    memoryRangeCount,
+    const VkMappedMemoryRange*                  pMemoryRanges) const;
+bool PreCallValidateGetDeviceMemoryCommitment(
+    VkDevice                                    device,
+    VkDeviceMemory                              memory,
+    VkDeviceSize*                               pCommittedMemoryInBytes) const;
+bool PreCallValidateBindBufferMemory(
+    VkDevice                                    device,
+    VkBuffer                                    buffer,
+    VkDeviceMemory                              memory,
+    VkDeviceSize                                memoryOffset) const;
+bool PreCallValidateBindImageMemory(
+    VkDevice                                    device,
+    VkImage                                     image,
+    VkDeviceMemory                              memory,
+    VkDeviceSize                                memoryOffset) const;
+bool PreCallValidateGetBufferMemoryRequirements(
+    VkDevice                                    device,
+    VkBuffer                                    buffer,
+    VkMemoryRequirements*                       pMemoryRequirements) const;
+bool PreCallValidateGetImageMemoryRequirements(
+    VkDevice                                    device,
+    VkImage                                     image,
+    VkMemoryRequirements*                       pMemoryRequirements) const;
+bool PreCallValidateGetImageSparseMemoryRequirements(
+    VkDevice                                    device,
+    VkImage                                     image,
+    uint32_t*                                   pSparseMemoryRequirementCount,
+    VkSparseImageMemoryRequirements*            pSparseMemoryRequirements) const;
+bool PreCallValidateGetPhysicalDeviceSparseImageFormatProperties(
+    VkPhysicalDevice                            physicalDevice,
+    VkFormat                                    format,
+    VkImageType                                 type,
+    VkSampleCountFlagBits                       samples,
+    VkImageUsageFlags                           usage,
+    VkImageTiling                               tiling,
+    uint32_t*                                   pPropertyCount,
+    VkSparseImageFormatProperties*              pProperties) const;
+bool PreCallValidateQueueBindSparse(
+    VkQueue                                     queue,
+    uint32_t                                    bindInfoCount,
+    const VkBindSparseInfo*                     pBindInfo,
+    VkFence                                     fence) const;
+bool PreCallValidateCreateFence(
+    VkDevice                                    device,
+    const VkFenceCreateInfo*                    pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkFence*                                    pFence) const;
+bool PreCallValidateDestroyFence(
+    VkDevice                                    device,
+    VkFence                                     fence,
+    const VkAllocationCallbacks*                pAllocator) const;
+bool PreCallValidateResetFences(
+    VkDevice                                    device,
+    uint32_t                                    fenceCount,
+    const VkFence*                              pFences) const;
+bool PreCallValidateGetFenceStatus(
+    VkDevice                                    device,
+    VkFence                                     fence) const;
+bool PreCallValidateWaitForFences(
+    VkDevice                                    device,
+    uint32_t                                    fenceCount,
+    const VkFence*                              pFences,
+    VkBool32                                    waitAll,
+    uint64_t                                    timeout) const;
+bool PreCallValidateCreateSemaphore(
+    VkDevice                                    device,
+    const VkSemaphoreCreateInfo*                pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSemaphore*                                pSemaphore) const;
+bool PreCallValidateDestroySemaphore(
+    VkDevice                                    device,
+    VkSemaphore                                 semaphore,
+    const VkAllocationCallbacks*                pAllocator) const;
+bool PreCallValidateCreateEvent(
+    VkDevice                                    device,
+    const VkEventCreateInfo*                    pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkEvent*                                    pEvent) const;
+bool PreCallValidateDestroyEvent(
+    VkDevice                                    device,
+    VkEvent                                     event,
+    const VkAllocationCallbacks*                pAllocator) const;
+bool PreCallValidateGetEventStatus(
+    VkDevice                                    device,
+    VkEvent                                     event) const;
+bool PreCallValidateSetEvent(
+    VkDevice                                    device,
+    VkEvent                                     event) const;
+bool PreCallValidateResetEvent(
+    VkDevice                                    device,
+    VkEvent                                     event) const;
+bool PreCallValidateCreateQueryPool(
+    VkDevice                                    device,
+    const VkQueryPoolCreateInfo*                pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkQueryPool*                                pQueryPool) const;
+bool PreCallValidateDestroyQueryPool(
+    VkDevice                                    device,
+    VkQueryPool                                 queryPool,
+    const VkAllocationCallbacks*                pAllocator) const;
+bool PreCallValidateGetQueryPoolResults(
+    VkDevice                                    device,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    firstQuery,
+    uint32_t                                    queryCount,
+    size_t                                      dataSize,
+    void*                                       pData,
+    VkDeviceSize                                stride,
+    VkQueryResultFlags                          flags) const;
+bool PreCallValidateCreateBuffer(
+    VkDevice                                    device,
+    const VkBufferCreateInfo*                   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkBuffer*                                   pBuffer) const;
+bool PreCallValidateDestroyBuffer(
+    VkDevice                                    device,
+    VkBuffer                                    buffer,
+    const VkAllocationCallbacks*                pAllocator) const;
+bool PreCallValidateCreateBufferView(
+    VkDevice                                    device,
+    const VkBufferViewCreateInfo*               pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkBufferView*                               pView) const;
+bool PreCallValidateDestroyBufferView(
+    VkDevice                                    device,
+    VkBufferView                                bufferView,
+    const VkAllocationCallbacks*                pAllocator) const;
+bool PreCallValidateCreateImage(
+    VkDevice                                    device,
+    const VkImageCreateInfo*                    pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkImage*                                    pImage) const;
+bool PreCallValidateDestroyImage(
+    VkDevice                                    device,
+    VkImage                                     image,
+    const VkAllocationCallbacks*                pAllocator) const;
+bool PreCallValidateGetImageSubresourceLayout(
+    VkDevice                                    device,
+    VkImage                                     image,
+    const VkImageSubresource*                   pSubresource,
+    VkSubresourceLayout*                        pLayout) const;
+bool PreCallValidateCreateImageView(
+    VkDevice                                    device,
+    const VkImageViewCreateInfo*                pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkImageView*                                pView) const;
+bool PreCallValidateDestroyImageView(
+    VkDevice                                    device,
+    VkImageView                                 imageView,
+    const VkAllocationCallbacks*                pAllocator) const;
+bool PreCallValidateCreateShaderModule(
+    VkDevice                                    device,
+    const VkShaderModuleCreateInfo*             pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkShaderModule*                             pShaderModule) const;
+bool PreCallValidateDestroyShaderModule(
+    VkDevice                                    device,
+    VkShaderModule                              shaderModule,
+    const VkAllocationCallbacks*                pAllocator) const;
+bool PreCallValidateCreatePipelineCache(
+    VkDevice                                    device,
+    const VkPipelineCacheCreateInfo*            pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkPipelineCache*                            pPipelineCache) const;
+bool PreCallValidateDestroyPipelineCache(
+    VkDevice                                    device,
+    VkPipelineCache                             pipelineCache,
+    const VkAllocationCallbacks*                pAllocator) const;
+bool PreCallValidateGetPipelineCacheData(
+    VkDevice                                    device,
+    VkPipelineCache                             pipelineCache,
+    size_t*                                     pDataSize,
+    void*                                       pData) const;
+bool PreCallValidateMergePipelineCaches(
+    VkDevice                                    device,
+    VkPipelineCache                             dstCache,
+    uint32_t                                    srcCacheCount,
+    const VkPipelineCache*                      pSrcCaches) const;
+bool PreCallValidateCreateGraphicsPipelines(
+    VkDevice                                    device,
+    VkPipelineCache                             pipelineCache,
+    uint32_t                                    createInfoCount,
+    const VkGraphicsPipelineCreateInfo*         pCreateInfos,
+    const VkAllocationCallbacks*                pAllocator,
+    VkPipeline*                                 pPipelines) const;
+bool PreCallValidateCreateComputePipelines(
+    VkDevice                                    device,
+    VkPipelineCache                             pipelineCache,
+    uint32_t                                    createInfoCount,
+    const VkComputePipelineCreateInfo*          pCreateInfos,
+    const VkAllocationCallbacks*                pAllocator,
+    VkPipeline*                                 pPipelines) const;
+bool PreCallValidateDestroyPipeline(
+    VkDevice                                    device,
+    VkPipeline                                  pipeline,
+    const VkAllocationCallbacks*                pAllocator) const;
+bool PreCallValidateCreatePipelineLayout(
+    VkDevice                                    device,
+    const VkPipelineLayoutCreateInfo*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkPipelineLayout*                           pPipelineLayout) const;
+bool PreCallValidateDestroyPipelineLayout(
+    VkDevice                                    device,
+    VkPipelineLayout                            pipelineLayout,
+    const VkAllocationCallbacks*                pAllocator) const;
+bool PreCallValidateCreateSampler(
+    VkDevice                                    device,
+    const VkSamplerCreateInfo*                  pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSampler*                                  pSampler) const;
+bool PreCallValidateDestroySampler(
+    VkDevice                                    device,
+    VkSampler                                   sampler,
+    const VkAllocationCallbacks*                pAllocator) const;
+bool PreCallValidateCreateDescriptorSetLayout(
+    VkDevice                                    device,
+    const VkDescriptorSetLayoutCreateInfo*      pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDescriptorSetLayout*                      pSetLayout) const;
+bool PreCallValidateDestroyDescriptorSetLayout(
+    VkDevice                                    device,
+    VkDescriptorSetLayout                       descriptorSetLayout,
+    const VkAllocationCallbacks*                pAllocator) const;
+bool PreCallValidateCreateDescriptorPool(
+    VkDevice                                    device,
+    const VkDescriptorPoolCreateInfo*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDescriptorPool*                           pDescriptorPool) const;
+bool PreCallValidateDestroyDescriptorPool(
+    VkDevice                                    device,
+    VkDescriptorPool                            descriptorPool,
+    const VkAllocationCallbacks*                pAllocator) const;
+bool PreCallValidateResetDescriptorPool(
+    VkDevice                                    device,
+    VkDescriptorPool                            descriptorPool,
+    VkDescriptorPoolResetFlags                  flags) const;
+bool PreCallValidateAllocateDescriptorSets(
+    VkDevice                                    device,
+    const VkDescriptorSetAllocateInfo*          pAllocateInfo,
+    VkDescriptorSet*                            pDescriptorSets) const;
+bool PreCallValidateFreeDescriptorSets(
+    VkDevice                                    device,
+    VkDescriptorPool                            descriptorPool,
+    uint32_t                                    descriptorSetCount,
+    const VkDescriptorSet*                      pDescriptorSets) const;
+bool PreCallValidateUpdateDescriptorSets(
+    VkDevice                                    device,
+    uint32_t                                    descriptorWriteCount,
+    const VkWriteDescriptorSet*                 pDescriptorWrites,
+    uint32_t                                    descriptorCopyCount,
+    const VkCopyDescriptorSet*                  pDescriptorCopies) const;
+bool PreCallValidateCreateFramebuffer(
+    VkDevice                                    device,
+    const VkFramebufferCreateInfo*              pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkFramebuffer*                              pFramebuffer) const;
+bool PreCallValidateDestroyFramebuffer(
+    VkDevice                                    device,
+    VkFramebuffer                               framebuffer,
+    const VkAllocationCallbacks*                pAllocator) const;
+bool PreCallValidateCreateRenderPass(
+    VkDevice                                    device,
+    const VkRenderPassCreateInfo*               pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkRenderPass*                               pRenderPass) const;
+bool PreCallValidateDestroyRenderPass(
+    VkDevice                                    device,
+    VkRenderPass                                renderPass,
+    const VkAllocationCallbacks*                pAllocator) const;
+bool PreCallValidateGetRenderAreaGranularity(
+    VkDevice                                    device,
+    VkRenderPass                                renderPass,
+    VkExtent2D*                                 pGranularity) const;
+bool PreCallValidateCreateCommandPool(
+    VkDevice                                    device,
+    const VkCommandPoolCreateInfo*              pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkCommandPool*                              pCommandPool) const;
+bool PreCallValidateDestroyCommandPool(
+    VkDevice                                    device,
+    VkCommandPool                               commandPool,
+    const VkAllocationCallbacks*                pAllocator) const;
+bool PreCallValidateResetCommandPool(
+    VkDevice                                    device,
+    VkCommandPool                               commandPool,
+    VkCommandPoolResetFlags                     flags) const;
+bool PreCallValidateAllocateCommandBuffers(
+    VkDevice                                    device,
+    const VkCommandBufferAllocateInfo*          pAllocateInfo,
+    VkCommandBuffer*                            pCommandBuffers) const;
+bool PreCallValidateFreeCommandBuffers(
+    VkDevice                                    device,
+    VkCommandPool                               commandPool,
+    uint32_t                                    commandBufferCount,
+    const VkCommandBuffer*                      pCommandBuffers) const;
+bool PreCallValidateBeginCommandBuffer(
+    VkCommandBuffer                             commandBuffer,
+    const VkCommandBufferBeginInfo*             pBeginInfo) const;
+bool PreCallValidateEndCommandBuffer(
+    VkCommandBuffer                             commandBuffer) const;
+bool PreCallValidateResetCommandBuffer(
+    VkCommandBuffer                             commandBuffer,
+    VkCommandBufferResetFlags                   flags) const;
+bool PreCallValidateCmdBindPipeline(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineBindPoint                         pipelineBindPoint,
+    VkPipeline                                  pipeline) const;
+bool PreCallValidateCmdSetViewport(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstViewport,
+    uint32_t                                    viewportCount,
+    const VkViewport*                           pViewports) const;
+bool PreCallValidateCmdSetScissor(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstScissor,
+    uint32_t                                    scissorCount,
+    const VkRect2D*                             pScissors) const;
+bool PreCallValidateCmdSetLineWidth(
+    VkCommandBuffer                             commandBuffer,
+    float                                       lineWidth) const;
+bool PreCallValidateCmdSetDepthBias(
+    VkCommandBuffer                             commandBuffer,
+    float                                       depthBiasConstantFactor,
+    float                                       depthBiasClamp,
+    float                                       depthBiasSlopeFactor) const;
+bool PreCallValidateCmdSetBlendConstants(
+    VkCommandBuffer                             commandBuffer,
+    const float                                 blendConstants[4]) const;
+bool PreCallValidateCmdSetDepthBounds(
+    VkCommandBuffer                             commandBuffer,
+    float                                       minDepthBounds,
+    float                                       maxDepthBounds) const;
+bool PreCallValidateCmdSetStencilCompareMask(
+    VkCommandBuffer                             commandBuffer,
+    VkStencilFaceFlags                          faceMask,
+    uint32_t                                    compareMask) const;
+bool PreCallValidateCmdSetStencilWriteMask(
+    VkCommandBuffer                             commandBuffer,
+    VkStencilFaceFlags                          faceMask,
+    uint32_t                                    writeMask) const;
+bool PreCallValidateCmdSetStencilReference(
+    VkCommandBuffer                             commandBuffer,
+    VkStencilFaceFlags                          faceMask,
+    uint32_t                                    reference) const;
+bool PreCallValidateCmdBindDescriptorSets(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineBindPoint                         pipelineBindPoint,
+    VkPipelineLayout                            layout,
+    uint32_t                                    firstSet,
+    uint32_t                                    descriptorSetCount,
+    const VkDescriptorSet*                      pDescriptorSets,
+    uint32_t                                    dynamicOffsetCount,
+    const uint32_t*                             pDynamicOffsets) const;
+bool PreCallValidateCmdBindIndexBuffer(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkIndexType                                 indexType) const;
+bool PreCallValidateCmdBindVertexBuffers(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstBinding,
+    uint32_t                                    bindingCount,
+    const VkBuffer*                             pBuffers,
+    const VkDeviceSize*                         pOffsets) const;
+bool PreCallValidateCmdDraw(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    vertexCount,
+    uint32_t                                    instanceCount,
+    uint32_t                                    firstVertex,
+    uint32_t                                    firstInstance) const;
+bool PreCallValidateCmdDrawIndexed(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    indexCount,
+    uint32_t                                    instanceCount,
+    uint32_t                                    firstIndex,
+    int32_t                                     vertexOffset,
+    uint32_t                                    firstInstance) const;
+bool PreCallValidateCmdDrawIndirect(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    uint32_t                                    drawCount,
+    uint32_t                                    stride) const;
+bool PreCallValidateCmdDrawIndexedIndirect(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    uint32_t                                    drawCount,
+    uint32_t                                    stride) const;
+bool PreCallValidateCmdDispatch(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    groupCountX,
+    uint32_t                                    groupCountY,
+    uint32_t                                    groupCountZ) const;
+bool PreCallValidateCmdDispatchIndirect(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset) const;
+bool PreCallValidateCmdCopyBuffer(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    srcBuffer,
+    VkBuffer                                    dstBuffer,
+    uint32_t                                    regionCount,
+    const VkBufferCopy*                         pRegions) const;
+bool PreCallValidateCmdCopyImage(
+    VkCommandBuffer                             commandBuffer,
+    VkImage                                     srcImage,
+    VkImageLayout                               srcImageLayout,
+    VkImage                                     dstImage,
+    VkImageLayout                               dstImageLayout,
+    uint32_t                                    regionCount,
+    const VkImageCopy*                          pRegions) const;
+bool PreCallValidateCmdBlitImage(
+    VkCommandBuffer                             commandBuffer,
+    VkImage                                     srcImage,
+    VkImageLayout                               srcImageLayout,
+    VkImage                                     dstImage,
+    VkImageLayout                               dstImageLayout,
+    uint32_t                                    regionCount,
+    const VkImageBlit*                          pRegions,
+    VkFilter                                    filter) const;
+bool PreCallValidateCmdCopyBufferToImage(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    srcBuffer,
+    VkImage                                     dstImage,
+    VkImageLayout                               dstImageLayout,
+    uint32_t                                    regionCount,
+    const VkBufferImageCopy*                    pRegions) const;
+bool PreCallValidateCmdCopyImageToBuffer(
+    VkCommandBuffer                             commandBuffer,
+    VkImage                                     srcImage,
+    VkImageLayout                               srcImageLayout,
+    VkBuffer                                    dstBuffer,
+    uint32_t                                    regionCount,
+    const VkBufferImageCopy*                    pRegions) const;
+bool PreCallValidateCmdUpdateBuffer(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    dstBuffer,
+    VkDeviceSize                                dstOffset,
+    VkDeviceSize                                dataSize,
+    const void*                                 pData) const;
+bool PreCallValidateCmdFillBuffer(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    dstBuffer,
+    VkDeviceSize                                dstOffset,
+    VkDeviceSize                                size,
+    uint32_t                                    data) const;
+bool PreCallValidateCmdClearColorImage(
+    VkCommandBuffer                             commandBuffer,
+    VkImage                                     image,
+    VkImageLayout                               imageLayout,
+    const VkClearColorValue*                    pColor,
+    uint32_t                                    rangeCount,
+    const VkImageSubresourceRange*              pRanges) const;
+bool PreCallValidateCmdClearDepthStencilImage(
+    VkCommandBuffer                             commandBuffer,
+    VkImage                                     image,
+    VkImageLayout                               imageLayout,
+    const VkClearDepthStencilValue*             pDepthStencil,
+    uint32_t                                    rangeCount,
+    const VkImageSubresourceRange*              pRanges) const;
+bool PreCallValidateCmdClearAttachments(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    attachmentCount,
+    const VkClearAttachment*                    pAttachments,
+    uint32_t                                    rectCount,
+    const VkClearRect*                          pRects) const;
+bool PreCallValidateCmdResolveImage(
+    VkCommandBuffer                             commandBuffer,
+    VkImage                                     srcImage,
+    VkImageLayout                               srcImageLayout,
+    VkImage                                     dstImage,
+    VkImageLayout                               dstImageLayout,
+    uint32_t                                    regionCount,
+    const VkImageResolve*                       pRegions) const;
+bool PreCallValidateCmdSetEvent(
+    VkCommandBuffer                             commandBuffer,
+    VkEvent                                     event,
+    VkPipelineStageFlags                        stageMask) const;
+bool PreCallValidateCmdResetEvent(
+    VkCommandBuffer                             commandBuffer,
+    VkEvent                                     event,
+    VkPipelineStageFlags                        stageMask) const;
+bool PreCallValidateCmdWaitEvents(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    eventCount,
+    const VkEvent*                              pEvents,
+    VkPipelineStageFlags                        srcStageMask,
+    VkPipelineStageFlags                        dstStageMask,
+    uint32_t                                    memoryBarrierCount,
+    const VkMemoryBarrier*                      pMemoryBarriers,
+    uint32_t                                    bufferMemoryBarrierCount,
+    const VkBufferMemoryBarrier*                pBufferMemoryBarriers,
+    uint32_t                                    imageMemoryBarrierCount,
+    const VkImageMemoryBarrier*                 pImageMemoryBarriers) const;
+bool PreCallValidateCmdPipelineBarrier(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineStageFlags                        srcStageMask,
+    VkPipelineStageFlags                        dstStageMask,
+    VkDependencyFlags                           dependencyFlags,
+    uint32_t                                    memoryBarrierCount,
+    const VkMemoryBarrier*                      pMemoryBarriers,
+    uint32_t                                    bufferMemoryBarrierCount,
+    const VkBufferMemoryBarrier*                pBufferMemoryBarriers,
+    uint32_t                                    imageMemoryBarrierCount,
+    const VkImageMemoryBarrier*                 pImageMemoryBarriers) const;
+bool PreCallValidateCmdBeginQuery(
+    VkCommandBuffer                             commandBuffer,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    query,
+    VkQueryControlFlags                         flags) const;
+bool PreCallValidateCmdEndQuery(
+    VkCommandBuffer                             commandBuffer,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    query) const;
+bool PreCallValidateCmdResetQueryPool(
+    VkCommandBuffer                             commandBuffer,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    firstQuery,
+    uint32_t                                    queryCount) const;
+bool PreCallValidateCmdWriteTimestamp(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineStageFlagBits                     pipelineStage,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    query) const;
+bool PreCallValidateCmdCopyQueryPoolResults(
+    VkCommandBuffer                             commandBuffer,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    firstQuery,
+    uint32_t                                    queryCount,
+    VkBuffer                                    dstBuffer,
+    VkDeviceSize                                dstOffset,
+    VkDeviceSize                                stride,
+    VkQueryResultFlags                          flags) const;
+bool PreCallValidateCmdPushConstants(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineLayout                            layout,
+    VkShaderStageFlags                          stageFlags,
+    uint32_t                                    offset,
+    uint32_t                                    size,
+    const void*                                 pValues) const;
+bool PreCallValidateCmdBeginRenderPass(
+    VkCommandBuffer                             commandBuffer,
+    const VkRenderPassBeginInfo*                pRenderPassBegin,
+    VkSubpassContents                           contents) const;
+bool PreCallValidateCmdNextSubpass(
+    VkCommandBuffer                             commandBuffer,
+    VkSubpassContents                           contents) const;
+bool PreCallValidateCmdEndRenderPass(
+    VkCommandBuffer                             commandBuffer) const;
+bool PreCallValidateCmdExecuteCommands(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    commandBufferCount,
+    const VkCommandBuffer*                      pCommandBuffers) const;
+bool PreCallValidateBindBufferMemory2(
+    VkDevice                                    device,
+    uint32_t                                    bindInfoCount,
+    const VkBindBufferMemoryInfo*               pBindInfos) const;
+bool PreCallValidateBindImageMemory2(
+    VkDevice                                    device,
+    uint32_t                                    bindInfoCount,
+    const VkBindImageMemoryInfo*                pBindInfos) const;
+bool PreCallValidateGetDeviceGroupPeerMemoryFeatures(
+    VkDevice                                    device,
+    uint32_t                                    heapIndex,
+    uint32_t                                    localDeviceIndex,
+    uint32_t                                    remoteDeviceIndex,
+    VkPeerMemoryFeatureFlags*                   pPeerMemoryFeatures) const;
+bool PreCallValidateCmdSetDeviceMask(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    deviceMask) const;
+bool PreCallValidateCmdDispatchBase(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    baseGroupX,
+    uint32_t                                    baseGroupY,
+    uint32_t                                    baseGroupZ,
+    uint32_t                                    groupCountX,
+    uint32_t                                    groupCountY,
+    uint32_t                                    groupCountZ) const;
+bool PreCallValidateEnumeratePhysicalDeviceGroups(
+    VkInstance                                  instance,
+    uint32_t*                                   pPhysicalDeviceGroupCount,
+    VkPhysicalDeviceGroupProperties*            pPhysicalDeviceGroupProperties) const;
+bool PreCallValidateGetImageMemoryRequirements2(
+    VkDevice                                    device,
+    const VkImageMemoryRequirementsInfo2*       pInfo,
+    VkMemoryRequirements2*                      pMemoryRequirements) const;
+bool PreCallValidateGetBufferMemoryRequirements2(
+    VkDevice                                    device,
+    const VkBufferMemoryRequirementsInfo2*      pInfo,
+    VkMemoryRequirements2*                      pMemoryRequirements) const;
+bool PreCallValidateGetImageSparseMemoryRequirements2(
+    VkDevice                                    device,
+    const VkImageSparseMemoryRequirementsInfo2* pInfo,
+    uint32_t*                                   pSparseMemoryRequirementCount,
+    VkSparseImageMemoryRequirements2*           pSparseMemoryRequirements) const;
+bool PreCallValidateGetPhysicalDeviceFeatures2(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceFeatures2*                  pFeatures) const;
+bool PreCallValidateGetPhysicalDeviceProperties2(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceProperties2*                pProperties) const;
+bool PreCallValidateGetPhysicalDeviceFormatProperties2(
+    VkPhysicalDevice                            physicalDevice,
+    VkFormat                                    format,
+    VkFormatProperties2*                        pFormatProperties) const;
+bool PreCallValidateGetPhysicalDeviceImageFormatProperties2(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceImageFormatInfo2*     pImageFormatInfo,
+    VkImageFormatProperties2*                   pImageFormatProperties) const;
+bool PreCallValidateGetPhysicalDeviceQueueFamilyProperties2(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pQueueFamilyPropertyCount,
+    VkQueueFamilyProperties2*                   pQueueFamilyProperties) const;
+bool PreCallValidateGetPhysicalDeviceMemoryProperties2(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceMemoryProperties2*          pMemoryProperties) const;
+bool PreCallValidateGetPhysicalDeviceSparseImageFormatProperties2(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo,
+    uint32_t*                                   pPropertyCount,
+    VkSparseImageFormatProperties2*             pProperties) const;
+bool PreCallValidateTrimCommandPool(
+    VkDevice                                    device,
+    VkCommandPool                               commandPool,
+    VkCommandPoolTrimFlags                      flags) const;
+bool PreCallValidateGetDeviceQueue2(
+    VkDevice                                    device,
+    const VkDeviceQueueInfo2*                   pQueueInfo,
+    VkQueue*                                    pQueue) const;
+bool PreCallValidateCreateSamplerYcbcrConversion(
+    VkDevice                                    device,
+    const VkSamplerYcbcrConversionCreateInfo*   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSamplerYcbcrConversion*                   pYcbcrConversion) const;
+bool PreCallValidateDestroySamplerYcbcrConversion(
+    VkDevice                                    device,
+    VkSamplerYcbcrConversion                    ycbcrConversion,
+    const VkAllocationCallbacks*                pAllocator) const;
+bool PreCallValidateCreateDescriptorUpdateTemplate(
+    VkDevice                                    device,
+    const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDescriptorUpdateTemplate*                 pDescriptorUpdateTemplate) const;
+bool PreCallValidateDestroyDescriptorUpdateTemplate(
+    VkDevice                                    device,
+    VkDescriptorUpdateTemplate                  descriptorUpdateTemplate,
+    const VkAllocationCallbacks*                pAllocator) const;
+bool PreCallValidateUpdateDescriptorSetWithTemplate(
+    VkDevice                                    device,
+    VkDescriptorSet                             descriptorSet,
+    VkDescriptorUpdateTemplate                  descriptorUpdateTemplate,
+    const void*                                 pData) const;
+bool PreCallValidateGetPhysicalDeviceExternalBufferProperties(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceExternalBufferInfo*   pExternalBufferInfo,
+    VkExternalBufferProperties*                 pExternalBufferProperties) const;
+bool PreCallValidateGetPhysicalDeviceExternalFenceProperties(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceExternalFenceInfo*    pExternalFenceInfo,
+    VkExternalFenceProperties*                  pExternalFenceProperties) const;
+bool PreCallValidateGetPhysicalDeviceExternalSemaphoreProperties(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo,
+    VkExternalSemaphoreProperties*              pExternalSemaphoreProperties) const;
+bool PreCallValidateGetDescriptorSetLayoutSupport(
+    VkDevice                                    device,
+    const VkDescriptorSetLayoutCreateInfo*      pCreateInfo,
+    VkDescriptorSetLayoutSupport*               pSupport) const;
+bool PreCallValidateDestroySurfaceKHR(
+    VkInstance                                  instance,
+    VkSurfaceKHR                                surface,
+    const VkAllocationCallbacks*                pAllocator) const;
+bool PreCallValidateGetPhysicalDeviceSurfaceSupportKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t                                    queueFamilyIndex,
+    VkSurfaceKHR                                surface,
+    VkBool32*                                   pSupported) const;
+bool PreCallValidateGetPhysicalDeviceSurfaceCapabilitiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkSurfaceKHR                                surface,
+    VkSurfaceCapabilitiesKHR*                   pSurfaceCapabilities) const;
+bool PreCallValidateGetPhysicalDeviceSurfaceFormatsKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkSurfaceKHR                                surface,
+    uint32_t*                                   pSurfaceFormatCount,
+    VkSurfaceFormatKHR*                         pSurfaceFormats) const;
+bool PreCallValidateGetPhysicalDeviceSurfacePresentModesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkSurfaceKHR                                surface,
+    uint32_t*                                   pPresentModeCount,
+    VkPresentModeKHR*                           pPresentModes) const;
+bool PreCallValidateCreateSwapchainKHR(
+    VkDevice                                    device,
+    const VkSwapchainCreateInfoKHR*             pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSwapchainKHR*                             pSwapchain) const;
+bool PreCallValidateDestroySwapchainKHR(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    const VkAllocationCallbacks*                pAllocator) const;
+bool PreCallValidateGetSwapchainImagesKHR(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    uint32_t*                                   pSwapchainImageCount,
+    VkImage*                                    pSwapchainImages) const;
+bool PreCallValidateAcquireNextImageKHR(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    uint64_t                                    timeout,
+    VkSemaphore                                 semaphore,
+    VkFence                                     fence,
+    uint32_t*                                   pImageIndex) const;
+bool PreCallValidateQueuePresentKHR(
+    VkQueue                                     queue,
+    const VkPresentInfoKHR*                     pPresentInfo) const;
+bool PreCallValidateGetDeviceGroupPresentCapabilitiesKHR(
+    VkDevice                                    device,
+    VkDeviceGroupPresentCapabilitiesKHR*        pDeviceGroupPresentCapabilities) const;
+bool PreCallValidateGetDeviceGroupSurfacePresentModesKHR(
+    VkDevice                                    device,
+    VkSurfaceKHR                                surface,
+    VkDeviceGroupPresentModeFlagsKHR*           pModes) const;
+bool PreCallValidateGetPhysicalDevicePresentRectanglesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkSurfaceKHR                                surface,
+    uint32_t*                                   pRectCount,
+    VkRect2D*                                   pRects) const;
+bool PreCallValidateAcquireNextImage2KHR(
+    VkDevice                                    device,
+    const VkAcquireNextImageInfoKHR*            pAcquireInfo,
+    uint32_t*                                   pImageIndex) const;
+bool PreCallValidateGetPhysicalDeviceDisplayPropertiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pPropertyCount,
+    VkDisplayPropertiesKHR*                     pProperties) const;
+bool PreCallValidateGetPhysicalDeviceDisplayPlanePropertiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pPropertyCount,
+    VkDisplayPlanePropertiesKHR*                pProperties) const;
+bool PreCallValidateGetDisplayPlaneSupportedDisplaysKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t                                    planeIndex,
+    uint32_t*                                   pDisplayCount,
+    VkDisplayKHR*                               pDisplays) const;
+bool PreCallValidateGetDisplayModePropertiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkDisplayKHR                                display,
+    uint32_t*                                   pPropertyCount,
+    VkDisplayModePropertiesKHR*                 pProperties) const;
+bool PreCallValidateCreateDisplayModeKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkDisplayKHR                                display,
+    const VkDisplayModeCreateInfoKHR*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDisplayModeKHR*                           pMode) const;
+bool PreCallValidateGetDisplayPlaneCapabilitiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkDisplayModeKHR                            mode,
+    uint32_t                                    planeIndex,
+    VkDisplayPlaneCapabilitiesKHR*              pCapabilities) const;
+bool PreCallValidateCreateDisplayPlaneSurfaceKHR(
+    VkInstance                                  instance,
+    const VkDisplaySurfaceCreateInfoKHR*        pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface) const;
+bool PreCallValidateCreateSharedSwapchainsKHR(
+    VkDevice                                    device,
+    uint32_t                                    swapchainCount,
+    const VkSwapchainCreateInfoKHR*             pCreateInfos,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSwapchainKHR*                             pSwapchains) const;
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+bool PreCallValidateCreateXlibSurfaceKHR(
+    VkInstance                                  instance,
+    const VkXlibSurfaceCreateInfoKHR*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface) const;
+#endif
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+bool PreCallValidateGetPhysicalDeviceXlibPresentationSupportKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t                                    queueFamilyIndex,
+    Display*                                    dpy,
+    VisualID                                    visualID) const;
+#endif
+#ifdef VK_USE_PLATFORM_XCB_KHR
+bool PreCallValidateCreateXcbSurfaceKHR(
+    VkInstance                                  instance,
+    const VkXcbSurfaceCreateInfoKHR*            pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface) const;
+#endif
+#ifdef VK_USE_PLATFORM_XCB_KHR
+bool PreCallValidateGetPhysicalDeviceXcbPresentationSupportKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t                                    queueFamilyIndex,
+    xcb_connection_t*                           connection,
+    xcb_visualid_t                              visual_id) const;
+#endif
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+bool PreCallValidateCreateWaylandSurfaceKHR(
+    VkInstance                                  instance,
+    const VkWaylandSurfaceCreateInfoKHR*        pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface) const;
+#endif
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+bool PreCallValidateGetPhysicalDeviceWaylandPresentationSupportKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t                                    queueFamilyIndex,
+    struct wl_display*                          display) const;
+#endif
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+bool PreCallValidateCreateAndroidSurfaceKHR(
+    VkInstance                                  instance,
+    const VkAndroidSurfaceCreateInfoKHR*        pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface) const;
+#endif
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+bool PreCallValidateCreateWin32SurfaceKHR(
+    VkInstance                                  instance,
+    const VkWin32SurfaceCreateInfoKHR*          pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface) const;
+#endif
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+bool PreCallValidateGetPhysicalDeviceWin32PresentationSupportKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t                                    queueFamilyIndex) const;
+#endif
+bool PreCallValidateGetPhysicalDeviceFeatures2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceFeatures2*                  pFeatures) const;
+bool PreCallValidateGetPhysicalDeviceProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceProperties2*                pProperties) const;
+bool PreCallValidateGetPhysicalDeviceFormatProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkFormat                                    format,
+    VkFormatProperties2*                        pFormatProperties) const;
+bool PreCallValidateGetPhysicalDeviceImageFormatProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceImageFormatInfo2*     pImageFormatInfo,
+    VkImageFormatProperties2*                   pImageFormatProperties) const;
+bool PreCallValidateGetPhysicalDeviceQueueFamilyProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pQueueFamilyPropertyCount,
+    VkQueueFamilyProperties2*                   pQueueFamilyProperties) const;
+bool PreCallValidateGetPhysicalDeviceMemoryProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceMemoryProperties2*          pMemoryProperties) const;
+bool PreCallValidateGetPhysicalDeviceSparseImageFormatProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo,
+    uint32_t*                                   pPropertyCount,
+    VkSparseImageFormatProperties2*             pProperties) const;
+bool PreCallValidateGetDeviceGroupPeerMemoryFeaturesKHR(
+    VkDevice                                    device,
+    uint32_t                                    heapIndex,
+    uint32_t                                    localDeviceIndex,
+    uint32_t                                    remoteDeviceIndex,
+    VkPeerMemoryFeatureFlags*                   pPeerMemoryFeatures) const;
+bool PreCallValidateCmdSetDeviceMaskKHR(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    deviceMask) const;
+bool PreCallValidateCmdDispatchBaseKHR(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    baseGroupX,
+    uint32_t                                    baseGroupY,
+    uint32_t                                    baseGroupZ,
+    uint32_t                                    groupCountX,
+    uint32_t                                    groupCountY,
+    uint32_t                                    groupCountZ) const;
+bool PreCallValidateTrimCommandPoolKHR(
+    VkDevice                                    device,
+    VkCommandPool                               commandPool,
+    VkCommandPoolTrimFlags                      flags) const;
+bool PreCallValidateEnumeratePhysicalDeviceGroupsKHR(
+    VkInstance                                  instance,
+    uint32_t*                                   pPhysicalDeviceGroupCount,
+    VkPhysicalDeviceGroupProperties*            pPhysicalDeviceGroupProperties) const;
+bool PreCallValidateGetPhysicalDeviceExternalBufferPropertiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceExternalBufferInfo*   pExternalBufferInfo,
+    VkExternalBufferProperties*                 pExternalBufferProperties) const;
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+bool PreCallValidateGetMemoryWin32HandleKHR(
+    VkDevice                                    device,
+    const VkMemoryGetWin32HandleInfoKHR*        pGetWin32HandleInfo,
+    HANDLE*                                     pHandle) const;
+#endif
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+bool PreCallValidateGetMemoryWin32HandlePropertiesKHR(
+    VkDevice                                    device,
+    VkExternalMemoryHandleTypeFlagBits          handleType,
+    HANDLE                                      handle,
+    VkMemoryWin32HandlePropertiesKHR*           pMemoryWin32HandleProperties) const;
+#endif
+bool PreCallValidateGetMemoryFdKHR(
+    VkDevice                                    device,
+    const VkMemoryGetFdInfoKHR*                 pGetFdInfo,
+    int*                                        pFd) const;
+bool PreCallValidateGetMemoryFdPropertiesKHR(
+    VkDevice                                    device,
+    VkExternalMemoryHandleTypeFlagBits          handleType,
+    int                                         fd,
+    VkMemoryFdPropertiesKHR*                    pMemoryFdProperties) const;
+bool PreCallValidateGetPhysicalDeviceExternalSemaphorePropertiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo,
+    VkExternalSemaphoreProperties*              pExternalSemaphoreProperties) const;
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+bool PreCallValidateImportSemaphoreWin32HandleKHR(
+    VkDevice                                    device,
+    const VkImportSemaphoreWin32HandleInfoKHR*  pImportSemaphoreWin32HandleInfo) const;
+#endif
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+bool PreCallValidateGetSemaphoreWin32HandleKHR(
+    VkDevice                                    device,
+    const VkSemaphoreGetWin32HandleInfoKHR*     pGetWin32HandleInfo,
+    HANDLE*                                     pHandle) const;
+#endif
+bool PreCallValidateImportSemaphoreFdKHR(
+    VkDevice                                    device,
+    const VkImportSemaphoreFdInfoKHR*           pImportSemaphoreFdInfo) const;
+bool PreCallValidateGetSemaphoreFdKHR(
+    VkDevice                                    device,
+    const VkSemaphoreGetFdInfoKHR*              pGetFdInfo,
+    int*                                        pFd) const;
+bool PreCallValidateCmdPushDescriptorSetKHR(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineBindPoint                         pipelineBindPoint,
+    VkPipelineLayout                            layout,
+    uint32_t                                    set,
+    uint32_t                                    descriptorWriteCount,
+    const VkWriteDescriptorSet*                 pDescriptorWrites) const;
+bool PreCallValidateCmdPushDescriptorSetWithTemplateKHR(
+    VkCommandBuffer                             commandBuffer,
+    VkDescriptorUpdateTemplate                  descriptorUpdateTemplate,
+    VkPipelineLayout                            layout,
+    uint32_t                                    set,
+    const void*                                 pData) const;
+bool PreCallValidateCreateDescriptorUpdateTemplateKHR(
+    VkDevice                                    device,
+    const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDescriptorUpdateTemplate*                 pDescriptorUpdateTemplate) const;
+bool PreCallValidateDestroyDescriptorUpdateTemplateKHR(
+    VkDevice                                    device,
+    VkDescriptorUpdateTemplate                  descriptorUpdateTemplate,
+    const VkAllocationCallbacks*                pAllocator) const;
+bool PreCallValidateUpdateDescriptorSetWithTemplateKHR(
+    VkDevice                                    device,
+    VkDescriptorSet                             descriptorSet,
+    VkDescriptorUpdateTemplate                  descriptorUpdateTemplate,
+    const void*                                 pData) const;
+bool PreCallValidateCreateRenderPass2KHR(
+    VkDevice                                    device,
+    const VkRenderPassCreateInfo2KHR*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkRenderPass*                               pRenderPass) const;
+bool PreCallValidateCmdBeginRenderPass2KHR(
+    VkCommandBuffer                             commandBuffer,
+    const VkRenderPassBeginInfo*                pRenderPassBegin,
+    const VkSubpassBeginInfoKHR*                pSubpassBeginInfo) const;
+bool PreCallValidateCmdNextSubpass2KHR(
+    VkCommandBuffer                             commandBuffer,
+    const VkSubpassBeginInfoKHR*                pSubpassBeginInfo,
+    const VkSubpassEndInfoKHR*                  pSubpassEndInfo) const;
+bool PreCallValidateCmdEndRenderPass2KHR(
+    VkCommandBuffer                             commandBuffer,
+    const VkSubpassEndInfoKHR*                  pSubpassEndInfo) const;
+bool PreCallValidateGetSwapchainStatusKHR(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain) const;
+bool PreCallValidateGetPhysicalDeviceExternalFencePropertiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceExternalFenceInfo*    pExternalFenceInfo,
+    VkExternalFenceProperties*                  pExternalFenceProperties) const;
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+bool PreCallValidateImportFenceWin32HandleKHR(
+    VkDevice                                    device,
+    const VkImportFenceWin32HandleInfoKHR*      pImportFenceWin32HandleInfo) const;
+#endif
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+bool PreCallValidateGetFenceWin32HandleKHR(
+    VkDevice                                    device,
+    const VkFenceGetWin32HandleInfoKHR*         pGetWin32HandleInfo,
+    HANDLE*                                     pHandle) const;
+#endif
+bool PreCallValidateImportFenceFdKHR(
+    VkDevice                                    device,
+    const VkImportFenceFdInfoKHR*               pImportFenceFdInfo) const;
+bool PreCallValidateGetFenceFdKHR(
+    VkDevice                                    device,
+    const VkFenceGetFdInfoKHR*                  pGetFdInfo,
+    int*                                        pFd) const;
+bool PreCallValidateEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t                                    queueFamilyIndex,
+    uint32_t*                                   pCounterCount,
+    VkPerformanceCounterKHR*                    pCounters,
+    VkPerformanceCounterDescriptionKHR*         pCounterDescriptions) const;
+bool PreCallValidateGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkQueryPoolPerformanceCreateInfoKHR*  pPerformanceQueryCreateInfo,
+    uint32_t*                                   pNumPasses) const;
+bool PreCallValidateAcquireProfilingLockKHR(
+    VkDevice                                    device,
+    const VkAcquireProfilingLockInfoKHR*        pInfo) const;
+bool PreCallValidateReleaseProfilingLockKHR(
+    VkDevice                                    device) const;
+bool PreCallValidateGetPhysicalDeviceSurfaceCapabilities2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceSurfaceInfo2KHR*      pSurfaceInfo,
+    VkSurfaceCapabilities2KHR*                  pSurfaceCapabilities) const;
+bool PreCallValidateGetPhysicalDeviceSurfaceFormats2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceSurfaceInfo2KHR*      pSurfaceInfo,
+    uint32_t*                                   pSurfaceFormatCount,
+    VkSurfaceFormat2KHR*                        pSurfaceFormats) const;
+bool PreCallValidateGetPhysicalDeviceDisplayProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pPropertyCount,
+    VkDisplayProperties2KHR*                    pProperties) const;
+bool PreCallValidateGetPhysicalDeviceDisplayPlaneProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pPropertyCount,
+    VkDisplayPlaneProperties2KHR*               pProperties) const;
+bool PreCallValidateGetDisplayModeProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkDisplayKHR                                display,
+    uint32_t*                                   pPropertyCount,
+    VkDisplayModeProperties2KHR*                pProperties) const;
+bool PreCallValidateGetDisplayPlaneCapabilities2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkDisplayPlaneInfo2KHR*               pDisplayPlaneInfo,
+    VkDisplayPlaneCapabilities2KHR*             pCapabilities) const;
+bool PreCallValidateGetImageMemoryRequirements2KHR(
+    VkDevice                                    device,
+    const VkImageMemoryRequirementsInfo2*       pInfo,
+    VkMemoryRequirements2*                      pMemoryRequirements) const;
+bool PreCallValidateGetBufferMemoryRequirements2KHR(
+    VkDevice                                    device,
+    const VkBufferMemoryRequirementsInfo2*      pInfo,
+    VkMemoryRequirements2*                      pMemoryRequirements) const;
+bool PreCallValidateGetImageSparseMemoryRequirements2KHR(
+    VkDevice                                    device,
+    const VkImageSparseMemoryRequirementsInfo2* pInfo,
+    uint32_t*                                   pSparseMemoryRequirementCount,
+    VkSparseImageMemoryRequirements2*           pSparseMemoryRequirements) const;
+bool PreCallValidateCreateSamplerYcbcrConversionKHR(
+    VkDevice                                    device,
+    const VkSamplerYcbcrConversionCreateInfo*   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSamplerYcbcrConversion*                   pYcbcrConversion) const;
+bool PreCallValidateDestroySamplerYcbcrConversionKHR(
+    VkDevice                                    device,
+    VkSamplerYcbcrConversion                    ycbcrConversion,
+    const VkAllocationCallbacks*                pAllocator) const;
+bool PreCallValidateBindBufferMemory2KHR(
+    VkDevice                                    device,
+    uint32_t                                    bindInfoCount,
+    const VkBindBufferMemoryInfo*               pBindInfos) const;
+bool PreCallValidateBindImageMemory2KHR(
+    VkDevice                                    device,
+    uint32_t                                    bindInfoCount,
+    const VkBindImageMemoryInfo*                pBindInfos) const;
+bool PreCallValidateGetDescriptorSetLayoutSupportKHR(
+    VkDevice                                    device,
+    const VkDescriptorSetLayoutCreateInfo*      pCreateInfo,
+    VkDescriptorSetLayoutSupport*               pSupport) const;
+bool PreCallValidateCmdDrawIndirectCountKHR(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkBuffer                                    countBuffer,
+    VkDeviceSize                                countBufferOffset,
+    uint32_t                                    maxDrawCount,
+    uint32_t                                    stride) const;
+bool PreCallValidateCmdDrawIndexedIndirectCountKHR(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkBuffer                                    countBuffer,
+    VkDeviceSize                                countBufferOffset,
+    uint32_t                                    maxDrawCount,
+    uint32_t                                    stride) const;
+bool PreCallValidateGetSemaphoreCounterValueKHR(
+    VkDevice                                    device,
+    VkSemaphore                                 semaphore,
+    uint64_t*                                   pValue) const;
+bool PreCallValidateWaitSemaphoresKHR(
+    VkDevice                                    device,
+    const VkSemaphoreWaitInfoKHR*               pWaitInfo,
+    uint64_t                                    timeout) const;
+bool PreCallValidateSignalSemaphoreKHR(
+    VkDevice                                    device,
+    const VkSemaphoreSignalInfoKHR*             pSignalInfo) const;
+bool PreCallValidateGetBufferDeviceAddressKHR(
+    VkDevice                                    device,
+    const VkBufferDeviceAddressInfoKHR*         pInfo) const;
+bool PreCallValidateGetBufferOpaqueCaptureAddressKHR(
+    VkDevice                                    device,
+    const VkBufferDeviceAddressInfoKHR*         pInfo) const;
+bool PreCallValidateGetDeviceMemoryOpaqueCaptureAddressKHR(
+    VkDevice                                    device,
+    const VkDeviceMemoryOpaqueCaptureAddressInfoKHR* pInfo) const;
+bool PreCallValidateGetPipelineExecutablePropertiesKHR(
+    VkDevice                                    device,
+    const VkPipelineInfoKHR*                    pPipelineInfo,
+    uint32_t*                                   pExecutableCount,
+    VkPipelineExecutablePropertiesKHR*          pProperties) const;
+bool PreCallValidateGetPipelineExecutableStatisticsKHR(
+    VkDevice                                    device,
+    const VkPipelineExecutableInfoKHR*          pExecutableInfo,
+    uint32_t*                                   pStatisticCount,
+    VkPipelineExecutableStatisticKHR*           pStatistics) const;
+bool PreCallValidateGetPipelineExecutableInternalRepresentationsKHR(
+    VkDevice                                    device,
+    const VkPipelineExecutableInfoKHR*          pExecutableInfo,
+    uint32_t*                                   pInternalRepresentationCount,
+    VkPipelineExecutableInternalRepresentationKHR* pInternalRepresentations) const;
+bool PreCallValidateCreateDebugReportCallbackEXT(
+    VkInstance                                  instance,
+    const VkDebugReportCallbackCreateInfoEXT*   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDebugReportCallbackEXT*                   pCallback) const;
+bool PreCallValidateDestroyDebugReportCallbackEXT(
+    VkInstance                                  instance,
+    VkDebugReportCallbackEXT                    callback,
+    const VkAllocationCallbacks*                pAllocator) const;
+bool PreCallValidateDebugReportMessageEXT(
+    VkInstance                                  instance,
+    VkDebugReportFlagsEXT                       flags,
+    VkDebugReportObjectTypeEXT                  objectType,
+    uint64_t                                    object,
+    size_t                                      location,
+    int32_t                                     messageCode,
+    const char*                                 pLayerPrefix,
+    const char*                                 pMessage) const;
+bool PreCallValidateDebugMarkerSetObjectTagEXT(
+    VkDevice                                    device,
+    const VkDebugMarkerObjectTagInfoEXT*        pTagInfo) const;
+bool PreCallValidateDebugMarkerSetObjectNameEXT(
+    VkDevice                                    device,
+    const VkDebugMarkerObjectNameInfoEXT*       pNameInfo) const;
+bool PreCallValidateCmdDebugMarkerBeginEXT(
+    VkCommandBuffer                             commandBuffer,
+    const VkDebugMarkerMarkerInfoEXT*           pMarkerInfo) const;
+bool PreCallValidateCmdDebugMarkerEndEXT(
+    VkCommandBuffer                             commandBuffer) const;
+bool PreCallValidateCmdDebugMarkerInsertEXT(
+    VkCommandBuffer                             commandBuffer,
+    const VkDebugMarkerMarkerInfoEXT*           pMarkerInfo) const;
+bool PreCallValidateCmdBindTransformFeedbackBuffersEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstBinding,
+    uint32_t                                    bindingCount,
+    const VkBuffer*                             pBuffers,
+    const VkDeviceSize*                         pOffsets,
+    const VkDeviceSize*                         pSizes) const;
+bool PreCallValidateCmdBeginTransformFeedbackEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstCounterBuffer,
+    uint32_t                                    counterBufferCount,
+    const VkBuffer*                             pCounterBuffers,
+    const VkDeviceSize*                         pCounterBufferOffsets) const;
+bool PreCallValidateCmdEndTransformFeedbackEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstCounterBuffer,
+    uint32_t                                    counterBufferCount,
+    const VkBuffer*                             pCounterBuffers,
+    const VkDeviceSize*                         pCounterBufferOffsets) const;
+bool PreCallValidateCmdBeginQueryIndexedEXT(
+    VkCommandBuffer                             commandBuffer,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    query,
+    VkQueryControlFlags                         flags,
+    uint32_t                                    index) const;
+bool PreCallValidateCmdEndQueryIndexedEXT(
+    VkCommandBuffer                             commandBuffer,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    query,
+    uint32_t                                    index) const;
+bool PreCallValidateCmdDrawIndirectByteCountEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    instanceCount,
+    uint32_t                                    firstInstance,
+    VkBuffer                                    counterBuffer,
+    VkDeviceSize                                counterBufferOffset,
+    uint32_t                                    counterOffset,
+    uint32_t                                    vertexStride) const;
+bool PreCallValidateGetImageViewHandleNVX(
+    VkDevice                                    device,
+    const VkImageViewHandleInfoNVX*             pInfo) const;
+bool PreCallValidateCmdDrawIndirectCountAMD(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkBuffer                                    countBuffer,
+    VkDeviceSize                                countBufferOffset,
+    uint32_t                                    maxDrawCount,
+    uint32_t                                    stride) const;
+bool PreCallValidateCmdDrawIndexedIndirectCountAMD(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkBuffer                                    countBuffer,
+    VkDeviceSize                                countBufferOffset,
+    uint32_t                                    maxDrawCount,
+    uint32_t                                    stride) const;
+bool PreCallValidateGetShaderInfoAMD(
+    VkDevice                                    device,
+    VkPipeline                                  pipeline,
+    VkShaderStageFlagBits                       shaderStage,
+    VkShaderInfoTypeAMD                         infoType,
+    size_t*                                     pInfoSize,
+    void*                                       pInfo) const;
+#ifdef VK_USE_PLATFORM_GGP
+bool PreCallValidateCreateStreamDescriptorSurfaceGGP(
+    VkInstance                                  instance,
+    const VkStreamDescriptorSurfaceCreateInfoGGP* pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface) const;
+#endif
+bool PreCallValidateGetPhysicalDeviceExternalImageFormatPropertiesNV(
+    VkPhysicalDevice                            physicalDevice,
+    VkFormat                                    format,
+    VkImageType                                 type,
+    VkImageTiling                               tiling,
+    VkImageUsageFlags                           usage,
+    VkImageCreateFlags                          flags,
+    VkExternalMemoryHandleTypeFlagsNV           externalHandleType,
+    VkExternalImageFormatPropertiesNV*          pExternalImageFormatProperties) const;
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+bool PreCallValidateGetMemoryWin32HandleNV(
+    VkDevice                                    device,
+    VkDeviceMemory                              memory,
+    VkExternalMemoryHandleTypeFlagsNV           handleType,
+    HANDLE*                                     pHandle) const;
+#endif
+#ifdef VK_USE_PLATFORM_VI_NN
+bool PreCallValidateCreateViSurfaceNN(
+    VkInstance                                  instance,
+    const VkViSurfaceCreateInfoNN*              pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface) const;
+#endif
+bool PreCallValidateCmdBeginConditionalRenderingEXT(
+    VkCommandBuffer                             commandBuffer,
+    const VkConditionalRenderingBeginInfoEXT*   pConditionalRenderingBegin) const;
+bool PreCallValidateCmdEndConditionalRenderingEXT(
+    VkCommandBuffer                             commandBuffer) const;
+bool PreCallValidateCmdProcessCommandsNVX(
+    VkCommandBuffer                             commandBuffer,
+    const VkCmdProcessCommandsInfoNVX*          pProcessCommandsInfo) const;
+bool PreCallValidateCmdReserveSpaceForCommandsNVX(
+    VkCommandBuffer                             commandBuffer,
+    const VkCmdReserveSpaceForCommandsInfoNVX*  pReserveSpaceInfo) const;
+bool PreCallValidateCreateIndirectCommandsLayoutNVX(
+    VkDevice                                    device,
+    const VkIndirectCommandsLayoutCreateInfoNVX* pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkIndirectCommandsLayoutNVX*                pIndirectCommandsLayout) const;
+bool PreCallValidateDestroyIndirectCommandsLayoutNVX(
+    VkDevice                                    device,
+    VkIndirectCommandsLayoutNVX                 indirectCommandsLayout,
+    const VkAllocationCallbacks*                pAllocator) const;
+bool PreCallValidateCreateObjectTableNVX(
+    VkDevice                                    device,
+    const VkObjectTableCreateInfoNVX*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkObjectTableNVX*                           pObjectTable) const;
+bool PreCallValidateDestroyObjectTableNVX(
+    VkDevice                                    device,
+    VkObjectTableNVX                            objectTable,
+    const VkAllocationCallbacks*                pAllocator) const;
+bool PreCallValidateRegisterObjectsNVX(
+    VkDevice                                    device,
+    VkObjectTableNVX                            objectTable,
+    uint32_t                                    objectCount,
+    const VkObjectTableEntryNVX* const*         ppObjectTableEntries,
+    const uint32_t*                             pObjectIndices) const;
+bool PreCallValidateUnregisterObjectsNVX(
+    VkDevice                                    device,
+    VkObjectTableNVX                            objectTable,
+    uint32_t                                    objectCount,
+    const VkObjectEntryTypeNVX*                 pObjectEntryTypes,
+    const uint32_t*                             pObjectIndices) const;
+bool PreCallValidateGetPhysicalDeviceGeneratedCommandsPropertiesNVX(
+    VkPhysicalDevice                            physicalDevice,
+    VkDeviceGeneratedCommandsFeaturesNVX*       pFeatures,
+    VkDeviceGeneratedCommandsLimitsNVX*         pLimits) const;
+bool PreCallValidateCmdSetViewportWScalingNV(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstViewport,
+    uint32_t                                    viewportCount,
+    const VkViewportWScalingNV*                 pViewportWScalings) const;
+bool PreCallValidateReleaseDisplayEXT(
+    VkPhysicalDevice                            physicalDevice,
+    VkDisplayKHR                                display) const;
+#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
+bool PreCallValidateAcquireXlibDisplayEXT(
+    VkPhysicalDevice                            physicalDevice,
+    Display*                                    dpy,
+    VkDisplayKHR                                display) const;
+#endif
+#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
+bool PreCallValidateGetRandROutputDisplayEXT(
+    VkPhysicalDevice                            physicalDevice,
+    Display*                                    dpy,
+    RROutput                                    rrOutput,
+    VkDisplayKHR*                               pDisplay) const;
+#endif
+bool PreCallValidateGetPhysicalDeviceSurfaceCapabilities2EXT(
+    VkPhysicalDevice                            physicalDevice,
+    VkSurfaceKHR                                surface,
+    VkSurfaceCapabilities2EXT*                  pSurfaceCapabilities) const;
+bool PreCallValidateDisplayPowerControlEXT(
+    VkDevice                                    device,
+    VkDisplayKHR                                display,
+    const VkDisplayPowerInfoEXT*                pDisplayPowerInfo) const;
+bool PreCallValidateRegisterDeviceEventEXT(
+    VkDevice                                    device,
+    const VkDeviceEventInfoEXT*                 pDeviceEventInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkFence*                                    pFence) const;
+bool PreCallValidateRegisterDisplayEventEXT(
+    VkDevice                                    device,
+    VkDisplayKHR                                display,
+    const VkDisplayEventInfoEXT*                pDisplayEventInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkFence*                                    pFence) const;
+bool PreCallValidateGetSwapchainCounterEXT(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    VkSurfaceCounterFlagBitsEXT                 counter,
+    uint64_t*                                   pCounterValue) const;
+bool PreCallValidateGetRefreshCycleDurationGOOGLE(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    VkRefreshCycleDurationGOOGLE*               pDisplayTimingProperties) const;
+bool PreCallValidateGetPastPresentationTimingGOOGLE(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    uint32_t*                                   pPresentationTimingCount,
+    VkPastPresentationTimingGOOGLE*             pPresentationTimings) const;
+bool PreCallValidateCmdSetDiscardRectangleEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstDiscardRectangle,
+    uint32_t                                    discardRectangleCount,
+    const VkRect2D*                             pDiscardRectangles) const;
+bool PreCallValidateSetHdrMetadataEXT(
+    VkDevice                                    device,
+    uint32_t                                    swapchainCount,
+    const VkSwapchainKHR*                       pSwapchains,
+    const VkHdrMetadataEXT*                     pMetadata) const;
+#ifdef VK_USE_PLATFORM_IOS_MVK
+bool PreCallValidateCreateIOSSurfaceMVK(
+    VkInstance                                  instance,
+    const VkIOSSurfaceCreateInfoMVK*            pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface) const;
+#endif
+#ifdef VK_USE_PLATFORM_MACOS_MVK
+bool PreCallValidateCreateMacOSSurfaceMVK(
+    VkInstance                                  instance,
+    const VkMacOSSurfaceCreateInfoMVK*          pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface) const;
+#endif
+bool PreCallValidateSetDebugUtilsObjectNameEXT(
+    VkDevice                                    device,
+    const VkDebugUtilsObjectNameInfoEXT*        pNameInfo) const;
+bool PreCallValidateSetDebugUtilsObjectTagEXT(
+    VkDevice                                    device,
+    const VkDebugUtilsObjectTagInfoEXT*         pTagInfo) const;
+bool PreCallValidateQueueBeginDebugUtilsLabelEXT(
+    VkQueue                                     queue,
+    const VkDebugUtilsLabelEXT*                 pLabelInfo) const;
+bool PreCallValidateQueueEndDebugUtilsLabelEXT(
+    VkQueue                                     queue) const;
+bool PreCallValidateQueueInsertDebugUtilsLabelEXT(
+    VkQueue                                     queue,
+    const VkDebugUtilsLabelEXT*                 pLabelInfo) const;
+bool PreCallValidateCmdBeginDebugUtilsLabelEXT(
+    VkCommandBuffer                             commandBuffer,
+    const VkDebugUtilsLabelEXT*                 pLabelInfo) const;
+bool PreCallValidateCmdEndDebugUtilsLabelEXT(
+    VkCommandBuffer                             commandBuffer) const;
+bool PreCallValidateCmdInsertDebugUtilsLabelEXT(
+    VkCommandBuffer                             commandBuffer,
+    const VkDebugUtilsLabelEXT*                 pLabelInfo) const;
+bool PreCallValidateCreateDebugUtilsMessengerEXT(
+    VkInstance                                  instance,
+    const VkDebugUtilsMessengerCreateInfoEXT*   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDebugUtilsMessengerEXT*                   pMessenger) const;
+bool PreCallValidateDestroyDebugUtilsMessengerEXT(
+    VkInstance                                  instance,
+    VkDebugUtilsMessengerEXT                    messenger,
+    const VkAllocationCallbacks*                pAllocator) const;
+bool PreCallValidateSubmitDebugUtilsMessageEXT(
+    VkInstance                                  instance,
+    VkDebugUtilsMessageSeverityFlagBitsEXT      messageSeverity,
+    VkDebugUtilsMessageTypeFlagsEXT             messageTypes,
+    const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData) const;
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+bool PreCallValidateGetAndroidHardwareBufferPropertiesANDROID(
+    VkDevice                                    device,
+    const struct AHardwareBuffer*               buffer,
+    VkAndroidHardwareBufferPropertiesANDROID*   pProperties) const;
+#endif
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+bool PreCallValidateGetMemoryAndroidHardwareBufferANDROID(
+    VkDevice                                    device,
+    const VkMemoryGetAndroidHardwareBufferInfoANDROID* pInfo,
+    struct AHardwareBuffer**                    pBuffer) const;
+#endif
+bool PreCallValidateCmdSetSampleLocationsEXT(
+    VkCommandBuffer                             commandBuffer,
+    const VkSampleLocationsInfoEXT*             pSampleLocationsInfo) const;
+bool PreCallValidateGetPhysicalDeviceMultisamplePropertiesEXT(
+    VkPhysicalDevice                            physicalDevice,
+    VkSampleCountFlagBits                       samples,
+    VkMultisamplePropertiesEXT*                 pMultisampleProperties) const;
+bool PreCallValidateGetImageDrmFormatModifierPropertiesEXT(
+    VkDevice                                    device,
+    VkImage                                     image,
+    VkImageDrmFormatModifierPropertiesEXT*      pProperties) const;
+bool PreCallValidateCreateValidationCacheEXT(
+    VkDevice                                    device,
+    const VkValidationCacheCreateInfoEXT*       pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkValidationCacheEXT*                       pValidationCache) const;
+bool PreCallValidateDestroyValidationCacheEXT(
+    VkDevice                                    device,
+    VkValidationCacheEXT                        validationCache,
+    const VkAllocationCallbacks*                pAllocator) const;
+bool PreCallValidateMergeValidationCachesEXT(
+    VkDevice                                    device,
+    VkValidationCacheEXT                        dstCache,
+    uint32_t                                    srcCacheCount,
+    const VkValidationCacheEXT*                 pSrcCaches) const;
+bool PreCallValidateGetValidationCacheDataEXT(
+    VkDevice                                    device,
+    VkValidationCacheEXT                        validationCache,
+    size_t*                                     pDataSize,
+    void*                                       pData) const;
+bool PreCallValidateCmdBindShadingRateImageNV(
+    VkCommandBuffer                             commandBuffer,
+    VkImageView                                 imageView,
+    VkImageLayout                               imageLayout) const;
+bool PreCallValidateCmdSetViewportShadingRatePaletteNV(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstViewport,
+    uint32_t                                    viewportCount,
+    const VkShadingRatePaletteNV*               pShadingRatePalettes) const;
+bool PreCallValidateCmdSetCoarseSampleOrderNV(
+    VkCommandBuffer                             commandBuffer,
+    VkCoarseSampleOrderTypeNV                   sampleOrderType,
+    uint32_t                                    customSampleOrderCount,
+    const VkCoarseSampleOrderCustomNV*          pCustomSampleOrders) const;
+bool PreCallValidateCreateAccelerationStructureNV(
+    VkDevice                                    device,
+    const VkAccelerationStructureCreateInfoNV*  pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkAccelerationStructureNV*                  pAccelerationStructure) const;
+bool PreCallValidateDestroyAccelerationStructureNV(
+    VkDevice                                    device,
+    VkAccelerationStructureNV                   accelerationStructure,
+    const VkAllocationCallbacks*                pAllocator) const;
+bool PreCallValidateGetAccelerationStructureMemoryRequirementsNV(
+    VkDevice                                    device,
+    const VkAccelerationStructureMemoryRequirementsInfoNV* pInfo,
+    VkMemoryRequirements2KHR*                   pMemoryRequirements) const;
+bool PreCallValidateBindAccelerationStructureMemoryNV(
+    VkDevice                                    device,
+    uint32_t                                    bindInfoCount,
+    const VkBindAccelerationStructureMemoryInfoNV* pBindInfos) const;
+bool PreCallValidateCmdBuildAccelerationStructureNV(
+    VkCommandBuffer                             commandBuffer,
+    const VkAccelerationStructureInfoNV*        pInfo,
+    VkBuffer                                    instanceData,
+    VkDeviceSize                                instanceOffset,
+    VkBool32                                    update,
+    VkAccelerationStructureNV                   dst,
+    VkAccelerationStructureNV                   src,
+    VkBuffer                                    scratch,
+    VkDeviceSize                                scratchOffset) const;
+bool PreCallValidateCmdCopyAccelerationStructureNV(
+    VkCommandBuffer                             commandBuffer,
+    VkAccelerationStructureNV                   dst,
+    VkAccelerationStructureNV                   src,
+    VkCopyAccelerationStructureModeNV           mode) const;
+bool PreCallValidateCmdTraceRaysNV(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    raygenShaderBindingTableBuffer,
+    VkDeviceSize                                raygenShaderBindingOffset,
+    VkBuffer                                    missShaderBindingTableBuffer,
+    VkDeviceSize                                missShaderBindingOffset,
+    VkDeviceSize                                missShaderBindingStride,
+    VkBuffer                                    hitShaderBindingTableBuffer,
+    VkDeviceSize                                hitShaderBindingOffset,
+    VkDeviceSize                                hitShaderBindingStride,
+    VkBuffer                                    callableShaderBindingTableBuffer,
+    VkDeviceSize                                callableShaderBindingOffset,
+    VkDeviceSize                                callableShaderBindingStride,
+    uint32_t                                    width,
+    uint32_t                                    height,
+    uint32_t                                    depth) const;
+bool PreCallValidateCreateRayTracingPipelinesNV(
+    VkDevice                                    device,
+    VkPipelineCache                             pipelineCache,
+    uint32_t                                    createInfoCount,
+    const VkRayTracingPipelineCreateInfoNV*     pCreateInfos,
+    const VkAllocationCallbacks*                pAllocator,
+    VkPipeline*                                 pPipelines) const;
+bool PreCallValidateGetRayTracingShaderGroupHandlesNV(
+    VkDevice                                    device,
+    VkPipeline                                  pipeline,
+    uint32_t                                    firstGroup,
+    uint32_t                                    groupCount,
+    size_t                                      dataSize,
+    void*                                       pData) const;
+bool PreCallValidateGetAccelerationStructureHandleNV(
+    VkDevice                                    device,
+    VkAccelerationStructureNV                   accelerationStructure,
+    size_t                                      dataSize,
+    void*                                       pData) const;
+bool PreCallValidateCmdWriteAccelerationStructuresPropertiesNV(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    accelerationStructureCount,
+    const VkAccelerationStructureNV*            pAccelerationStructures,
+    VkQueryType                                 queryType,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    firstQuery) const;
+bool PreCallValidateCompileDeferredNV(
+    VkDevice                                    device,
+    VkPipeline                                  pipeline,
+    uint32_t                                    shader) const;
+bool PreCallValidateGetMemoryHostPointerPropertiesEXT(
+    VkDevice                                    device,
+    VkExternalMemoryHandleTypeFlagBits          handleType,
+    const void*                                 pHostPointer,
+    VkMemoryHostPointerPropertiesEXT*           pMemoryHostPointerProperties) const;
+bool PreCallValidateCmdWriteBufferMarkerAMD(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineStageFlagBits                     pipelineStage,
+    VkBuffer                                    dstBuffer,
+    VkDeviceSize                                dstOffset,
+    uint32_t                                    marker) const;
+bool PreCallValidateGetPhysicalDeviceCalibrateableTimeDomainsEXT(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pTimeDomainCount,
+    VkTimeDomainEXT*                            pTimeDomains) const;
+bool PreCallValidateGetCalibratedTimestampsEXT(
+    VkDevice                                    device,
+    uint32_t                                    timestampCount,
+    const VkCalibratedTimestampInfoEXT*         pTimestampInfos,
+    uint64_t*                                   pTimestamps,
+    uint64_t*                                   pMaxDeviation) const;
+bool PreCallValidateCmdDrawMeshTasksNV(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    taskCount,
+    uint32_t                                    firstTask) const;
+bool PreCallValidateCmdDrawMeshTasksIndirectNV(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    uint32_t                                    drawCount,
+    uint32_t                                    stride) const;
+bool PreCallValidateCmdDrawMeshTasksIndirectCountNV(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkBuffer                                    countBuffer,
+    VkDeviceSize                                countBufferOffset,
+    uint32_t                                    maxDrawCount,
+    uint32_t                                    stride) const;
+bool PreCallValidateCmdSetExclusiveScissorNV(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstExclusiveScissor,
+    uint32_t                                    exclusiveScissorCount,
+    const VkRect2D*                             pExclusiveScissors) const;
+bool PreCallValidateCmdSetCheckpointNV(
+    VkCommandBuffer                             commandBuffer,
+    const void*                                 pCheckpointMarker) const;
+bool PreCallValidateGetQueueCheckpointDataNV(
+    VkQueue                                     queue,
+    uint32_t*                                   pCheckpointDataCount,
+    VkCheckpointDataNV*                         pCheckpointData) const;
+bool PreCallValidateInitializePerformanceApiINTEL(
+    VkDevice                                    device,
+    const VkInitializePerformanceApiInfoINTEL*  pInitializeInfo) const;
+bool PreCallValidateUninitializePerformanceApiINTEL(
+    VkDevice                                    device) const;
+bool PreCallValidateCmdSetPerformanceMarkerINTEL(
+    VkCommandBuffer                             commandBuffer,
+    const VkPerformanceMarkerInfoINTEL*         pMarkerInfo) const;
+bool PreCallValidateCmdSetPerformanceStreamMarkerINTEL(
+    VkCommandBuffer                             commandBuffer,
+    const VkPerformanceStreamMarkerInfoINTEL*   pMarkerInfo) const;
+bool PreCallValidateCmdSetPerformanceOverrideINTEL(
+    VkCommandBuffer                             commandBuffer,
+    const VkPerformanceOverrideInfoINTEL*       pOverrideInfo) const;
+bool PreCallValidateAcquirePerformanceConfigurationINTEL(
+    VkDevice                                    device,
+    const VkPerformanceConfigurationAcquireInfoINTEL* pAcquireInfo,
+    VkPerformanceConfigurationINTEL*            pConfiguration) const;
+bool PreCallValidateReleasePerformanceConfigurationINTEL(
+    VkDevice                                    device,
+    VkPerformanceConfigurationINTEL             configuration) const;
+bool PreCallValidateQueueSetPerformanceConfigurationINTEL(
+    VkQueue                                     queue,
+    VkPerformanceConfigurationINTEL             configuration) const;
+bool PreCallValidateGetPerformanceParameterINTEL(
+    VkDevice                                    device,
+    VkPerformanceParameterTypeINTEL             parameter,
+    VkPerformanceValueINTEL*                    pValue) const;
+bool PreCallValidateSetLocalDimmingAMD(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapChain,
+    VkBool32                                    localDimmingEnable) const;
+#ifdef VK_USE_PLATFORM_FUCHSIA
+bool PreCallValidateCreateImagePipeSurfaceFUCHSIA(
+    VkInstance                                  instance,
+    const VkImagePipeSurfaceCreateInfoFUCHSIA*  pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface) const;
+#endif
+#ifdef VK_USE_PLATFORM_METAL_EXT
+bool PreCallValidateCreateMetalSurfaceEXT(
+    VkInstance                                  instance,
+    const VkMetalSurfaceCreateInfoEXT*          pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface) const;
+#endif
+bool PreCallValidateGetBufferDeviceAddressEXT(
+    VkDevice                                    device,
+    const VkBufferDeviceAddressInfoKHR*         pInfo) const;
+bool PreCallValidateGetPhysicalDeviceToolPropertiesEXT(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pToolCount,
+    VkPhysicalDeviceToolPropertiesEXT*          pToolProperties) const;
+bool PreCallValidateGetPhysicalDeviceCooperativeMatrixPropertiesNV(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pPropertyCount,
+    VkCooperativeMatrixPropertiesNV*            pProperties) const;
+bool PreCallValidateGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pCombinationCount,
+    VkFramebufferMixedSamplesCombinationNV*     pCombinations) const;
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+bool PreCallValidateGetPhysicalDeviceSurfacePresentModes2EXT(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceSurfaceInfo2KHR*      pSurfaceInfo,
+    uint32_t*                                   pPresentModeCount,
+    VkPresentModeKHR*                           pPresentModes) const;
+#endif
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+bool PreCallValidateAcquireFullScreenExclusiveModeEXT(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain) const;
+#endif
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+bool PreCallValidateReleaseFullScreenExclusiveModeEXT(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain) const;
+#endif
+bool PreCallValidateCreateHeadlessSurfaceEXT(
+    VkInstance                                  instance,
+    const VkHeadlessSurfaceCreateInfoEXT*       pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface) const;
+bool PreCallValidateCmdSetLineStippleEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    lineStippleFactor,
+    uint16_t                                    lineStipplePattern) const;
+bool PreCallValidateResetQueryPoolEXT(
+    VkDevice                                    device,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    firstQuery,
+    uint32_t                                    queryCount) const;
diff --git a/src/third_party/vulkan-validation-layers/src/layers/generated/spirv_tools_commit_id.h b/src/third_party/vulkan-validation-layers/src/layers/generated/spirv_tools_commit_id.h
new file mode 100644
index 0000000..3e9adc5
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/layers/generated/spirv_tools_commit_id.h
@@ -0,0 +1,29 @@
+// *** THIS FILE IS GENERATED - DO NOT EDIT ***
+// See external_revision_generator.py for modifications
+
+/***************************************************************************
+ *
+ * Copyright (c) 2015-2019 The Khronos Group Inc.
+ * Copyright (c) 2015-2019 Valve Corporation
+ * Copyright (c) 2015-2019 LunarG, Inc.
+ * Copyright (c) 2015-2019 Google Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Chris Forbes <chrisforbes@google.com>
+ * Author: Cort Stratton <cort@google.com>
+ *
+ ****************************************************************************/
+#pragma once
+
+#define SPIRV_TOOLS_COMMIT_ID "b131630e7c749a5dc19faa458024260c71fb170f"
diff --git a/src/third_party/vulkan-validation-layers/src/layers/generated/thread_safety.cpp b/src/third_party/vulkan-validation-layers/src/layers/generated/thread_safety.cpp
new file mode 100644
index 0000000..a975d02
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/layers/generated/thread_safety.cpp
@@ -0,0 +1,6346 @@
+
+// This file is ***GENERATED***.  Do Not Edit.
+// See thread_safety_generator.py for modifications.
+
+/* Copyright (c) 2015-2019 The Khronos Group Inc.
+ * Copyright (c) 2015-2019 Valve Corporation
+ * Copyright (c) 2015-2019 LunarG, Inc.
+ * Copyright (c) 2015-2019 Google Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Mark Lobodzinski <mark@lunarg.com>
+ */
+#include "chassis.h"
+#include "thread_safety.h"
+
+
+void ThreadSafety::PreCallRecordAllocateCommandBuffers(VkDevice device, const VkCommandBufferAllocateInfo *pAllocateInfo,
+                                                       VkCommandBuffer *pCommandBuffers) {
+    StartReadObjectParentInstance(device);
+    StartWriteObject(pAllocateInfo->commandPool);
+}
+
+void ThreadSafety::PostCallRecordAllocateCommandBuffers(VkDevice device, const VkCommandBufferAllocateInfo *pAllocateInfo,
+                                                        VkCommandBuffer *pCommandBuffers, VkResult result) {
+    FinishReadObjectParentInstance(device);
+    FinishWriteObject(pAllocateInfo->commandPool);
+
+    // Record mapping from command buffer to command pool
+    if(pCommandBuffers) {
+        auto lock = write_lock_guard_t(thread_safety_lock);
+        auto &pool_command_buffers = pool_command_buffers_map[pAllocateInfo->commandPool];
+        for (uint32_t index = 0; index < pAllocateInfo->commandBufferCount; index++) {
+            command_pool_map.insert_or_assign(pCommandBuffers[index], pAllocateInfo->commandPool);
+            CreateObject(pCommandBuffers[index]);
+            pool_command_buffers.insert(pCommandBuffers[index]);
+        }
+    }
+}
+
+
+void ThreadSafety::PreCallRecordCreateDescriptorSetLayout(
+    VkDevice                                    device,
+    const VkDescriptorSetLayoutCreateInfo*      pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDescriptorSetLayout*                      pSetLayout) {
+    StartReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PostCallRecordCreateDescriptorSetLayout(
+    VkDevice                                    device,
+    const VkDescriptorSetLayoutCreateInfo*      pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDescriptorSetLayout*                      pSetLayout,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(device);
+    if (result == VK_SUCCESS) {
+        CreateObject(*pSetLayout);
+
+        // Check whether any binding uses UPDATE_AFTER_BIND
+        bool update_after_bind = false;
+        const auto *flags_create_info = lvl_find_in_chain<VkDescriptorSetLayoutBindingFlagsCreateInfoEXT>(pCreateInfo->pNext);
+        if (flags_create_info) {
+            for (uint32_t i = 0; i < flags_create_info->bindingCount; ++i) {
+                if (flags_create_info->pBindingFlags[i] & VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT) {
+                    update_after_bind = true;
+                    break;
+                }
+            }
+        }
+        dsl_update_after_bind_map.insert_or_assign(*pSetLayout, update_after_bind);
+    }
+}
+
+void ThreadSafety::PreCallRecordAllocateDescriptorSets(VkDevice device, const VkDescriptorSetAllocateInfo *pAllocateInfo,
+                                                       VkDescriptorSet *pDescriptorSets) {
+    StartReadObjectParentInstance(device);
+    StartWriteObject(pAllocateInfo->descriptorPool);
+    // Host access to pAllocateInfo::descriptorPool must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordAllocateDescriptorSets(VkDevice device, const VkDescriptorSetAllocateInfo *pAllocateInfo,
+                                                        VkDescriptorSet *pDescriptorSets, VkResult result) {
+    FinishReadObjectParentInstance(device);
+    FinishWriteObject(pAllocateInfo->descriptorPool);
+    // Host access to pAllocateInfo::descriptorPool must be externally synchronized
+    if (VK_SUCCESS == result) {
+        auto lock = write_lock_guard_t(thread_safety_lock);
+        auto &pool_descriptor_sets = pool_descriptor_sets_map[pAllocateInfo->descriptorPool];
+        for (uint32_t index0 = 0; index0 < pAllocateInfo->descriptorSetCount; index0++) {
+            CreateObject(pDescriptorSets[index0]);
+            pool_descriptor_sets.insert(pDescriptorSets[index0]);
+
+            auto iter = dsl_update_after_bind_map.find(pAllocateInfo->pSetLayouts[index0]);
+            if (iter != dsl_update_after_bind_map.end()) {
+                ds_update_after_bind_map.insert_or_assign(pDescriptorSets[index0], iter->second);
+            } else {
+                assert(0 && "descriptor set layout not found");
+            }
+        }
+    }
+}
+
+void ThreadSafety::PreCallRecordFreeDescriptorSets(
+    VkDevice                                    device,
+    VkDescriptorPool                            descriptorPool,
+    uint32_t                                    descriptorSetCount,
+    const VkDescriptorSet*                      pDescriptorSets) {
+    StartReadObjectParentInstance(device);
+    StartWriteObject(descriptorPool);
+    if (pDescriptorSets) {
+        for (uint32_t index=0; index < descriptorSetCount; index++) {
+            StartWriteObject(pDescriptorSets[index]);
+        }
+    }
+    // Host access to descriptorPool must be externally synchronized
+    // Host access to each member of pDescriptorSets must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordFreeDescriptorSets(
+    VkDevice                                    device,
+    VkDescriptorPool                            descriptorPool,
+    uint32_t                                    descriptorSetCount,
+    const VkDescriptorSet*                      pDescriptorSets,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(device);
+    FinishWriteObject(descriptorPool);
+    if (pDescriptorSets) {
+        for (uint32_t index=0; index < descriptorSetCount; index++) {
+            FinishWriteObject(pDescriptorSets[index]);
+        }
+    }
+    // Host access to descriptorPool must be externally synchronized
+    // Host access to each member of pDescriptorSets must be externally synchronized
+    // Host access to pAllocateInfo::descriptorPool must be externally synchronized
+    if (VK_SUCCESS == result) {
+        auto lock = write_lock_guard_t(thread_safety_lock);
+        auto &pool_descriptor_sets = pool_descriptor_sets_map[descriptorPool];
+        for (uint32_t index0 = 0; index0 < descriptorSetCount; index0++) {
+            DestroyObject(pDescriptorSets[index0]);
+            pool_descriptor_sets.erase(pDescriptorSets[index0]);
+        }
+    }
+}
+
+void ThreadSafety::PreCallRecordDestroyDescriptorPool(
+    VkDevice                                    device,
+    VkDescriptorPool                            descriptorPool,
+    const VkAllocationCallbacks*                pAllocator) {
+    StartReadObjectParentInstance(device);
+    StartWriteObject(descriptorPool);
+    // Host access to descriptorPool must be externally synchronized
+    auto lock = read_lock_guard_t(thread_safety_lock);
+    auto iterator = pool_descriptor_sets_map.find(descriptorPool);
+    // Possible to have no descriptor sets allocated from pool
+    if (iterator != pool_descriptor_sets_map.end()) {
+        for(auto descriptor_set : pool_descriptor_sets_map[descriptorPool]) {
+            StartWriteObject(descriptor_set);
+        }
+    }
+}
+
+void ThreadSafety::PostCallRecordDestroyDescriptorPool(
+    VkDevice                                    device,
+    VkDescriptorPool                            descriptorPool,
+    const VkAllocationCallbacks*                pAllocator) {
+    FinishReadObjectParentInstance(device);
+    FinishWriteObject(descriptorPool);
+    DestroyObject(descriptorPool);
+    // Host access to descriptorPool must be externally synchronized
+    {
+        auto lock = write_lock_guard_t(thread_safety_lock);
+        // remove references to implicitly freed descriptor sets
+        for(auto descriptor_set : pool_descriptor_sets_map[descriptorPool]) {
+            FinishWriteObject(descriptor_set);
+            DestroyObject(descriptor_set);
+        }
+        pool_descriptor_sets_map[descriptorPool].clear();
+        pool_descriptor_sets_map.erase(descriptorPool);
+    }
+}
+
+void ThreadSafety::PreCallRecordResetDescriptorPool(
+    VkDevice                                    device,
+    VkDescriptorPool                            descriptorPool,
+    VkDescriptorPoolResetFlags                  flags) {
+    StartReadObjectParentInstance(device);
+    StartWriteObject(descriptorPool);
+    // Host access to descriptorPool must be externally synchronized
+    // any sname:VkDescriptorSet objects allocated from pname:descriptorPool must be externally synchronized between host accesses
+    auto lock = read_lock_guard_t(thread_safety_lock);
+    auto iterator = pool_descriptor_sets_map.find(descriptorPool);
+    // Possible to have no descriptor sets allocated from pool
+    if (iterator != pool_descriptor_sets_map.end()) {
+        for(auto descriptor_set : pool_descriptor_sets_map[descriptorPool]) {
+            StartWriteObject(descriptor_set);
+        }
+    }
+}
+
+void ThreadSafety::PostCallRecordResetDescriptorPool(
+    VkDevice                                    device,
+    VkDescriptorPool                            descriptorPool,
+    VkDescriptorPoolResetFlags                  flags,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(device);
+    FinishWriteObject(descriptorPool);
+    // Host access to descriptorPool must be externally synchronized
+    // any sname:VkDescriptorSet objects allocated from pname:descriptorPool must be externally synchronized between host accesses
+    if (VK_SUCCESS == result) {
+        // remove references to implicitly freed descriptor sets
+        auto lock = write_lock_guard_t(thread_safety_lock);
+        for(auto descriptor_set : pool_descriptor_sets_map[descriptorPool]) {
+            FinishWriteObject(descriptor_set);
+            DestroyObject(descriptor_set);
+        }
+        pool_descriptor_sets_map[descriptorPool].clear();
+    }
+}
+
+bool ThreadSafety::DsUpdateAfterBind(VkDescriptorSet set) const
+{
+    auto iter = ds_update_after_bind_map.find(set);
+    if (iter != ds_update_after_bind_map.end()) {
+        return iter->second;
+    }
+    return false;
+}
+
+void ThreadSafety::PreCallRecordUpdateDescriptorSets(
+    VkDevice                                    device,
+    uint32_t                                    descriptorWriteCount,
+    const VkWriteDescriptorSet*                 pDescriptorWrites,
+    uint32_t                                    descriptorCopyCount,
+    const VkCopyDescriptorSet*                  pDescriptorCopies) {
+    StartReadObjectParentInstance(device);
+    if (pDescriptorWrites) {
+        for (uint32_t index=0; index < descriptorWriteCount; index++) {
+            auto dstSet = pDescriptorWrites[index].dstSet;
+            bool update_after_bind = DsUpdateAfterBind(dstSet);
+            if (update_after_bind) {
+                StartReadObject(dstSet);
+            } else {
+                StartWriteObject(dstSet);
+            }
+        }
+    }
+    if (pDescriptorCopies) {
+        for (uint32_t index=0; index < descriptorCopyCount; index++) {
+            auto dstSet = pDescriptorCopies[index].dstSet;
+            bool update_after_bind = DsUpdateAfterBind(dstSet);
+            if (update_after_bind) {
+                StartReadObject(dstSet);
+            } else {
+                StartWriteObject(dstSet);
+            }
+            StartReadObject(pDescriptorCopies[index].srcSet);
+        }
+    }
+    // Host access to pDescriptorWrites[].dstSet must be externally synchronized
+    // Host access to pDescriptorCopies[].dstSet must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordUpdateDescriptorSets(
+    VkDevice                                    device,
+    uint32_t                                    descriptorWriteCount,
+    const VkWriteDescriptorSet*                 pDescriptorWrites,
+    uint32_t                                    descriptorCopyCount,
+    const VkCopyDescriptorSet*                  pDescriptorCopies) {
+    FinishReadObjectParentInstance(device);
+    if (pDescriptorWrites) {
+        for (uint32_t index=0; index < descriptorWriteCount; index++) {
+            auto dstSet = pDescriptorWrites[index].dstSet;
+            bool update_after_bind = DsUpdateAfterBind(dstSet);
+            if (update_after_bind) {
+                FinishReadObject(dstSet);
+            } else {
+                FinishWriteObject(dstSet);
+            }
+        }
+    }
+    if (pDescriptorCopies) {
+        for (uint32_t index=0; index < descriptorCopyCount; index++) {
+            auto dstSet = pDescriptorCopies[index].dstSet;
+            bool update_after_bind = DsUpdateAfterBind(dstSet);
+            if (update_after_bind) {
+                FinishReadObject(dstSet);
+            } else {
+                FinishWriteObject(dstSet);
+            }
+            FinishReadObject(pDescriptorCopies[index].srcSet);
+        }
+    }
+    // Host access to pDescriptorWrites[].dstSet must be externally synchronized
+    // Host access to pDescriptorCopies[].dstSet must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordUpdateDescriptorSetWithTemplate(
+    VkDevice                                    device,
+    VkDescriptorSet                             descriptorSet,
+    VkDescriptorUpdateTemplate                  descriptorUpdateTemplate,
+    const void*                                 pData) {
+    StartReadObjectParentInstance(device);
+    StartReadObject(descriptorUpdateTemplate);
+
+    bool update_after_bind = DsUpdateAfterBind(descriptorSet);
+    if (update_after_bind) {
+        StartReadObject(descriptorSet);
+    } else {
+        StartWriteObject(descriptorSet);
+    }
+    // Host access to descriptorSet must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordUpdateDescriptorSetWithTemplate(
+    VkDevice                                    device,
+    VkDescriptorSet                             descriptorSet,
+    VkDescriptorUpdateTemplate                  descriptorUpdateTemplate,
+    const void*                                 pData) {
+    FinishReadObjectParentInstance(device);
+    FinishReadObject(descriptorUpdateTemplate);
+
+    bool update_after_bind = DsUpdateAfterBind(descriptorSet);
+    if (update_after_bind) {
+        FinishReadObject(descriptorSet);
+    } else {
+        FinishWriteObject(descriptorSet);
+    }
+    // Host access to descriptorSet must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordUpdateDescriptorSetWithTemplateKHR(
+    VkDevice                                    device,
+    VkDescriptorSet                             descriptorSet,
+    VkDescriptorUpdateTemplate                  descriptorUpdateTemplate,
+    const void*                                 pData) {
+    StartReadObjectParentInstance(device);
+    StartReadObject(descriptorUpdateTemplate);
+
+    bool update_after_bind = DsUpdateAfterBind(descriptorSet);
+    if (update_after_bind) {
+        StartReadObject(descriptorSet);
+    } else {
+        StartWriteObject(descriptorSet);
+    }
+    // Host access to descriptorSet must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordUpdateDescriptorSetWithTemplateKHR(
+    VkDevice                                    device,
+    VkDescriptorSet                             descriptorSet,
+    VkDescriptorUpdateTemplate                  descriptorUpdateTemplate,
+    const void*                                 pData) {
+    FinishReadObjectParentInstance(device);
+    FinishReadObject(descriptorUpdateTemplate);
+
+    bool update_after_bind = DsUpdateAfterBind(descriptorSet);
+    if (update_after_bind) {
+        FinishReadObject(descriptorSet);
+    } else {
+        FinishWriteObject(descriptorSet);
+    }
+    // Host access to descriptorSet must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordFreeCommandBuffers(VkDevice device, VkCommandPool commandPool, uint32_t commandBufferCount,
+                                                   const VkCommandBuffer *pCommandBuffers) {
+    const bool lockCommandPool = false;  // pool is already directly locked
+    StartReadObjectParentInstance(device);
+    StartWriteObject(commandPool);
+    if(pCommandBuffers) {
+        // Even though we're immediately "finishing" below, we still are testing for concurrency with any call in process
+        // so this isn't a no-op
+        // The driver may immediately reuse command buffers in another thread.
+        // These updates need to be done before calling down to the driver.
+        auto lock = write_lock_guard_t(thread_safety_lock);
+        auto &pool_command_buffers = pool_command_buffers_map[commandPool];
+        for (uint32_t index = 0; index < commandBufferCount; index++) {
+            StartWriteObject(pCommandBuffers[index], lockCommandPool);
+            FinishWriteObject(pCommandBuffers[index], lockCommandPool);
+            DestroyObject(pCommandBuffers[index]);
+            pool_command_buffers.erase(pCommandBuffers[index]);
+            command_pool_map.erase(pCommandBuffers[index]);
+        }
+    }
+}
+
+void ThreadSafety::PostCallRecordFreeCommandBuffers(VkDevice device, VkCommandPool commandPool, uint32_t commandBufferCount,
+                                                    const VkCommandBuffer *pCommandBuffers) {
+    FinishReadObjectParentInstance(device);
+    FinishWriteObject(commandPool);
+}
+
+void ThreadSafety::PreCallRecordCreateCommandPool(
+    VkDevice                                    device,
+    const VkCommandPoolCreateInfo*              pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkCommandPool*                              pCommandPool) {
+    StartReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PostCallRecordCreateCommandPool(
+    VkDevice                                    device,
+    const VkCommandPoolCreateInfo*              pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkCommandPool*                              pCommandPool,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(device);
+    if (result == VK_SUCCESS) {
+        CreateObject(*pCommandPool);
+        c_VkCommandPoolContents.CreateObject(*pCommandPool);
+    }
+}
+
+void ThreadSafety::PreCallRecordResetCommandPool(VkDevice device, VkCommandPool commandPool, VkCommandPoolResetFlags flags) {
+    StartReadObjectParentInstance(device);
+    StartWriteObject(commandPool);
+    // Check for any uses of non-externally sync'd command buffers (for example from vkCmdExecuteCommands)
+    c_VkCommandPoolContents.StartWrite(commandPool);
+    // Host access to commandPool must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordResetCommandPool(VkDevice device, VkCommandPool commandPool, VkCommandPoolResetFlags flags, VkResult result) {
+    FinishReadObjectParentInstance(device);
+    FinishWriteObject(commandPool);
+    c_VkCommandPoolContents.FinishWrite(commandPool);
+    // Host access to commandPool must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordDestroyCommandPool(VkDevice device, VkCommandPool commandPool, const VkAllocationCallbacks *pAllocator) {
+    StartReadObjectParentInstance(device);
+    StartWriteObject(commandPool);
+    // Check for any uses of non-externally sync'd command buffers (for example from vkCmdExecuteCommands)
+    c_VkCommandPoolContents.StartWrite(commandPool);
+    // Host access to commandPool must be externally synchronized
+
+    auto lock = write_lock_guard_t(thread_safety_lock);
+    // The driver may immediately reuse command buffers in another thread.
+    // These updates need to be done before calling down to the driver.
+    // remove references to implicitly freed command pools
+    for(auto command_buffer : pool_command_buffers_map[commandPool]) {
+        DestroyObject(command_buffer);
+    }
+    pool_command_buffers_map[commandPool].clear();
+    pool_command_buffers_map.erase(commandPool);
+}
+
+void ThreadSafety::PostCallRecordDestroyCommandPool(VkDevice device, VkCommandPool commandPool, const VkAllocationCallbacks *pAllocator) {
+    FinishReadObjectParentInstance(device);
+    FinishWriteObject(commandPool);
+    DestroyObject(commandPool);
+    c_VkCommandPoolContents.FinishWrite(commandPool);
+    c_VkCommandPoolContents.DestroyObject(commandPool);
+}
+
+// GetSwapchainImages can return a non-zero count with a NULL pSwapchainImages pointer.  Let's avoid crashes by ignoring
+// pSwapchainImages.
+void ThreadSafety::PreCallRecordGetSwapchainImagesKHR(VkDevice device, VkSwapchainKHR swapchain, uint32_t *pSwapchainImageCount,
+                                                      VkImage *pSwapchainImages) {
+    StartReadObjectParentInstance(device);
+    StartReadObject(swapchain);
+}
+
+void ThreadSafety::PostCallRecordGetSwapchainImagesKHR(VkDevice device, VkSwapchainKHR swapchain, uint32_t *pSwapchainImageCount,
+                                                       VkImage *pSwapchainImages, VkResult result) {
+    FinishReadObjectParentInstance(device);
+    FinishReadObject(swapchain);
+    if (pSwapchainImages != NULL) {
+        auto lock = write_lock_guard_t(thread_safety_lock);
+        auto &wrapped_swapchain_image_handles = swapchain_wrapped_image_handle_map[swapchain];
+        for (uint32_t i = static_cast<uint32_t>(wrapped_swapchain_image_handles.size()); i < *pSwapchainImageCount; i++) {
+            CreateObject(pSwapchainImages[i]);
+            wrapped_swapchain_image_handles.emplace_back(pSwapchainImages[i]);
+        }
+    }
+}
+
+void ThreadSafety::PreCallRecordDestroySwapchainKHR(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    const VkAllocationCallbacks*                pAllocator) {
+    StartReadObjectParentInstance(device);
+    StartWriteObject(swapchain);
+    // Host access to swapchain must be externally synchronized
+    auto lock = read_lock_guard_t(thread_safety_lock);
+    for (auto &image_handle : swapchain_wrapped_image_handle_map[swapchain]) {
+        StartWriteObject(image_handle);
+    }
+}
+
+void ThreadSafety::PostCallRecordDestroySwapchainKHR(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    const VkAllocationCallbacks*                pAllocator) {
+    FinishReadObjectParentInstance(device);
+    FinishWriteObject(swapchain);
+    DestroyObject(swapchain);
+    // Host access to swapchain must be externally synchronized
+    auto lock = write_lock_guard_t(thread_safety_lock);
+    for (auto &image_handle : swapchain_wrapped_image_handle_map[swapchain]) {
+        FinishWriteObject(image_handle);
+        DestroyObject(image_handle);
+    }
+    swapchain_wrapped_image_handle_map.erase(swapchain);
+}
+
+void ThreadSafety::PreCallRecordDestroyDevice(
+    VkDevice                                    device,
+    const VkAllocationCallbacks*                pAllocator) {
+    StartWriteObjectParentInstance(device);
+    // Host access to device must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordDestroyDevice(
+    VkDevice                                    device,
+    const VkAllocationCallbacks*                pAllocator) {
+    FinishWriteObjectParentInstance(device);
+    DestroyObjectParentInstance(device);
+    // Host access to device must be externally synchronized
+    auto lock = write_lock_guard_t(thread_safety_lock);
+    for (auto &queue : device_queues_map[device]) {
+        DestroyObject(queue);
+    }
+    device_queues_map[device].clear();
+}
+
+void ThreadSafety::PreCallRecordGetDeviceQueue(
+    VkDevice                                    device,
+    uint32_t                                    queueFamilyIndex,
+    uint32_t                                    queueIndex,
+    VkQueue*                                    pQueue) {
+    StartReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PostCallRecordGetDeviceQueue(
+    VkDevice                                    device,
+    uint32_t                                    queueFamilyIndex,
+    uint32_t                                    queueIndex,
+    VkQueue*                                    pQueue) {
+    FinishReadObjectParentInstance(device);
+    CreateObject(*pQueue);
+    auto lock = write_lock_guard_t(thread_safety_lock);
+    device_queues_map[device].insert(*pQueue);
+}
+
+void ThreadSafety::PreCallRecordGetDeviceQueue2(
+    VkDevice                                    device,
+    const VkDeviceQueueInfo2*                   pQueueInfo,
+    VkQueue*                                    pQueue) {
+    StartReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PostCallRecordGetDeviceQueue2(
+    VkDevice                                    device,
+    const VkDeviceQueueInfo2*                   pQueueInfo,
+    VkQueue*                                    pQueue) {
+    FinishReadObjectParentInstance(device);
+    CreateObject(*pQueue);
+    auto lock = write_lock_guard_t(thread_safety_lock);
+    device_queues_map[device].insert(*pQueue);
+}
+
+
+
+void ThreadSafety::PreCallRecordCreateInstance(
+    const VkInstanceCreateInfo*                 pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkInstance*                                 pInstance) {
+    
+}
+
+void ThreadSafety::PostCallRecordCreateInstance(
+    const VkInstanceCreateInfo*                 pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkInstance*                                 pInstance,
+    VkResult                                    result) {
+    if (result == VK_SUCCESS) {
+        CreateObjectParentInstance(*pInstance);
+    }
+}
+
+void ThreadSafety::PreCallRecordDestroyInstance(
+    VkInstance                                  instance,
+    const VkAllocationCallbacks*                pAllocator) {
+    StartWriteObjectParentInstance(instance);
+    // Host access to instance must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordDestroyInstance(
+    VkInstance                                  instance,
+    const VkAllocationCallbacks*                pAllocator) {
+    FinishWriteObjectParentInstance(instance);
+    DestroyObjectParentInstance(instance);
+    // Host access to instance must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordEnumeratePhysicalDevices(
+    VkInstance                                  instance,
+    uint32_t*                                   pPhysicalDeviceCount,
+    VkPhysicalDevice*                           pPhysicalDevices) {
+    StartReadObjectParentInstance(instance);
+}
+
+void ThreadSafety::PostCallRecordEnumeratePhysicalDevices(
+    VkInstance                                  instance,
+    uint32_t*                                   pPhysicalDeviceCount,
+    VkPhysicalDevice*                           pPhysicalDevices,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(instance);
+}
+
+void ThreadSafety::PreCallRecordGetInstanceProcAddr(
+    VkInstance                                  instance,
+    const char*                                 pName) {
+    StartReadObjectParentInstance(instance);
+}
+
+void ThreadSafety::PostCallRecordGetInstanceProcAddr(
+    VkInstance                                  instance,
+    const char*                                 pName) {
+    FinishReadObjectParentInstance(instance);
+}
+
+void ThreadSafety::PreCallRecordGetDeviceProcAddr(
+    VkDevice                                    device,
+    const char*                                 pName) {
+    StartReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PostCallRecordGetDeviceProcAddr(
+    VkDevice                                    device,
+    const char*                                 pName) {
+    FinishReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PreCallRecordCreateDevice(
+    VkPhysicalDevice                            physicalDevice,
+    const VkDeviceCreateInfo*                   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDevice*                                   pDevice) {
+    
+}
+
+void ThreadSafety::PostCallRecordCreateDevice(
+    VkPhysicalDevice                            physicalDevice,
+    const VkDeviceCreateInfo*                   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDevice*                                   pDevice,
+    VkResult                                    result) {
+    if (result == VK_SUCCESS) {
+        CreateObjectParentInstance(*pDevice);
+    }
+}
+
+void ThreadSafety::PreCallRecordQueueSubmit(
+    VkQueue                                     queue,
+    uint32_t                                    submitCount,
+    const VkSubmitInfo*                         pSubmits,
+    VkFence                                     fence) {
+    StartWriteObject(queue);
+    StartWriteObject(fence);
+    // Host access to queue must be externally synchronized
+    // Host access to fence must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordQueueSubmit(
+    VkQueue                                     queue,
+    uint32_t                                    submitCount,
+    const VkSubmitInfo*                         pSubmits,
+    VkFence                                     fence,
+    VkResult                                    result) {
+    FinishWriteObject(queue);
+    FinishWriteObject(fence);
+    // Host access to queue must be externally synchronized
+    // Host access to fence must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordQueueWaitIdle(
+    VkQueue                                     queue) {
+    StartWriteObject(queue);
+    // Host access to queue must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordQueueWaitIdle(
+    VkQueue                                     queue,
+    VkResult                                    result) {
+    FinishWriteObject(queue);
+    // Host access to queue must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordDeviceWaitIdle(
+    VkDevice                                    device) {
+    StartReadObjectParentInstance(device);
+    // all sname:VkQueue objects created from pname:device must be externally synchronized between host accesses
+}
+
+void ThreadSafety::PostCallRecordDeviceWaitIdle(
+    VkDevice                                    device,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(device);
+    // all sname:VkQueue objects created from pname:device must be externally synchronized between host accesses
+}
+
+void ThreadSafety::PreCallRecordAllocateMemory(
+    VkDevice                                    device,
+    const VkMemoryAllocateInfo*                 pAllocateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDeviceMemory*                             pMemory) {
+    StartReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PostCallRecordAllocateMemory(
+    VkDevice                                    device,
+    const VkMemoryAllocateInfo*                 pAllocateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDeviceMemory*                             pMemory,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(device);
+    if (result == VK_SUCCESS) {
+        CreateObject(*pMemory);
+    }
+}
+
+void ThreadSafety::PreCallRecordFreeMemory(
+    VkDevice                                    device,
+    VkDeviceMemory                              memory,
+    const VkAllocationCallbacks*                pAllocator) {
+    StartReadObjectParentInstance(device);
+    StartWriteObject(memory);
+    // Host access to memory must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordFreeMemory(
+    VkDevice                                    device,
+    VkDeviceMemory                              memory,
+    const VkAllocationCallbacks*                pAllocator) {
+    FinishReadObjectParentInstance(device);
+    FinishWriteObject(memory);
+    DestroyObject(memory);
+    // Host access to memory must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordMapMemory(
+    VkDevice                                    device,
+    VkDeviceMemory                              memory,
+    VkDeviceSize                                offset,
+    VkDeviceSize                                size,
+    VkMemoryMapFlags                            flags,
+    void**                                      ppData) {
+    StartReadObjectParentInstance(device);
+    StartWriteObject(memory);
+    // Host access to memory must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordMapMemory(
+    VkDevice                                    device,
+    VkDeviceMemory                              memory,
+    VkDeviceSize                                offset,
+    VkDeviceSize                                size,
+    VkMemoryMapFlags                            flags,
+    void**                                      ppData,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(device);
+    FinishWriteObject(memory);
+    // Host access to memory must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordUnmapMemory(
+    VkDevice                                    device,
+    VkDeviceMemory                              memory) {
+    StartReadObjectParentInstance(device);
+    StartWriteObject(memory);
+    // Host access to memory must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordUnmapMemory(
+    VkDevice                                    device,
+    VkDeviceMemory                              memory) {
+    FinishReadObjectParentInstance(device);
+    FinishWriteObject(memory);
+    // Host access to memory must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordFlushMappedMemoryRanges(
+    VkDevice                                    device,
+    uint32_t                                    memoryRangeCount,
+    const VkMappedMemoryRange*                  pMemoryRanges) {
+    StartReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PostCallRecordFlushMappedMemoryRanges(
+    VkDevice                                    device,
+    uint32_t                                    memoryRangeCount,
+    const VkMappedMemoryRange*                  pMemoryRanges,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PreCallRecordInvalidateMappedMemoryRanges(
+    VkDevice                                    device,
+    uint32_t                                    memoryRangeCount,
+    const VkMappedMemoryRange*                  pMemoryRanges) {
+    StartReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PostCallRecordInvalidateMappedMemoryRanges(
+    VkDevice                                    device,
+    uint32_t                                    memoryRangeCount,
+    const VkMappedMemoryRange*                  pMemoryRanges,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PreCallRecordGetDeviceMemoryCommitment(
+    VkDevice                                    device,
+    VkDeviceMemory                              memory,
+    VkDeviceSize*                               pCommittedMemoryInBytes) {
+    StartReadObjectParentInstance(device);
+    StartReadObject(memory);
+}
+
+void ThreadSafety::PostCallRecordGetDeviceMemoryCommitment(
+    VkDevice                                    device,
+    VkDeviceMemory                              memory,
+    VkDeviceSize*                               pCommittedMemoryInBytes) {
+    FinishReadObjectParentInstance(device);
+    FinishReadObject(memory);
+}
+
+void ThreadSafety::PreCallRecordBindBufferMemory(
+    VkDevice                                    device,
+    VkBuffer                                    buffer,
+    VkDeviceMemory                              memory,
+    VkDeviceSize                                memoryOffset) {
+    StartReadObjectParentInstance(device);
+    StartWriteObject(buffer);
+    StartReadObject(memory);
+    // Host access to buffer must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordBindBufferMemory(
+    VkDevice                                    device,
+    VkBuffer                                    buffer,
+    VkDeviceMemory                              memory,
+    VkDeviceSize                                memoryOffset,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(device);
+    FinishWriteObject(buffer);
+    FinishReadObject(memory);
+    // Host access to buffer must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordBindImageMemory(
+    VkDevice                                    device,
+    VkImage                                     image,
+    VkDeviceMemory                              memory,
+    VkDeviceSize                                memoryOffset) {
+    StartReadObjectParentInstance(device);
+    StartWriteObject(image);
+    StartReadObject(memory);
+    // Host access to image must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordBindImageMemory(
+    VkDevice                                    device,
+    VkImage                                     image,
+    VkDeviceMemory                              memory,
+    VkDeviceSize                                memoryOffset,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(device);
+    FinishWriteObject(image);
+    FinishReadObject(memory);
+    // Host access to image must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordGetBufferMemoryRequirements(
+    VkDevice                                    device,
+    VkBuffer                                    buffer,
+    VkMemoryRequirements*                       pMemoryRequirements) {
+    StartReadObjectParentInstance(device);
+    StartReadObject(buffer);
+}
+
+void ThreadSafety::PostCallRecordGetBufferMemoryRequirements(
+    VkDevice                                    device,
+    VkBuffer                                    buffer,
+    VkMemoryRequirements*                       pMemoryRequirements) {
+    FinishReadObjectParentInstance(device);
+    FinishReadObject(buffer);
+}
+
+void ThreadSafety::PreCallRecordGetImageMemoryRequirements(
+    VkDevice                                    device,
+    VkImage                                     image,
+    VkMemoryRequirements*                       pMemoryRequirements) {
+    StartReadObjectParentInstance(device);
+    StartReadObject(image);
+}
+
+void ThreadSafety::PostCallRecordGetImageMemoryRequirements(
+    VkDevice                                    device,
+    VkImage                                     image,
+    VkMemoryRequirements*                       pMemoryRequirements) {
+    FinishReadObjectParentInstance(device);
+    FinishReadObject(image);
+}
+
+void ThreadSafety::PreCallRecordGetImageSparseMemoryRequirements(
+    VkDevice                                    device,
+    VkImage                                     image,
+    uint32_t*                                   pSparseMemoryRequirementCount,
+    VkSparseImageMemoryRequirements*            pSparseMemoryRequirements) {
+    StartReadObjectParentInstance(device);
+    StartReadObject(image);
+}
+
+void ThreadSafety::PostCallRecordGetImageSparseMemoryRequirements(
+    VkDevice                                    device,
+    VkImage                                     image,
+    uint32_t*                                   pSparseMemoryRequirementCount,
+    VkSparseImageMemoryRequirements*            pSparseMemoryRequirements) {
+    FinishReadObjectParentInstance(device);
+    FinishReadObject(image);
+}
+
+void ThreadSafety::PreCallRecordQueueBindSparse(
+    VkQueue                                     queue,
+    uint32_t                                    bindInfoCount,
+    const VkBindSparseInfo*                     pBindInfo,
+    VkFence                                     fence) {
+    StartWriteObject(queue);
+    if (pBindInfo) {
+        for (uint32_t index=0; index < bindInfoCount; index++) {
+            if (pBindInfo[index].pBufferBinds) {
+                for (uint32_t index2=0; index2 < pBindInfo[index].bufferBindCount; index2++) {
+                    StartWriteObject(pBindInfo[index].pBufferBinds[index2].buffer);
+                }
+            }
+            if (pBindInfo[index].pImageOpaqueBinds) {
+                for (uint32_t index2=0; index2 < pBindInfo[index].imageOpaqueBindCount; index2++) {
+                    StartWriteObject(pBindInfo[index].pImageOpaqueBinds[index2].image);
+                }
+            }
+            if (pBindInfo[index].pImageBinds) {
+                for (uint32_t index2=0; index2 < pBindInfo[index].imageBindCount; index2++) {
+                    StartWriteObject(pBindInfo[index].pImageBinds[index2].image);
+                }
+            }
+        }
+    }
+    StartWriteObject(fence);
+    // Host access to queue must be externally synchronized
+    // Host access to pBindInfo[].pBufferBinds[].buffer,pBindInfo[].pImageOpaqueBinds[].image,pBindInfo[].pImageBinds[].image must be externally synchronized
+    // Host access to fence must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordQueueBindSparse(
+    VkQueue                                     queue,
+    uint32_t                                    bindInfoCount,
+    const VkBindSparseInfo*                     pBindInfo,
+    VkFence                                     fence,
+    VkResult                                    result) {
+    FinishWriteObject(queue);
+    if (pBindInfo) {
+        for (uint32_t index=0; index < bindInfoCount; index++) {
+            if (pBindInfo[index].pBufferBinds) {
+                for (uint32_t index2=0; index2 < pBindInfo[index].bufferBindCount; index2++) {
+                    FinishWriteObject(pBindInfo[index].pBufferBinds[index2].buffer);
+                }
+            }
+            if (pBindInfo[index].pImageOpaqueBinds) {
+                for (uint32_t index2=0; index2 < pBindInfo[index].imageOpaqueBindCount; index2++) {
+                    FinishWriteObject(pBindInfo[index].pImageOpaqueBinds[index2].image);
+                }
+            }
+            if (pBindInfo[index].pImageBinds) {
+                for (uint32_t index2=0; index2 < pBindInfo[index].imageBindCount; index2++) {
+                    FinishWriteObject(pBindInfo[index].pImageBinds[index2].image);
+                }
+            }
+        }
+    }
+    FinishWriteObject(fence);
+    // Host access to queue must be externally synchronized
+    // Host access to pBindInfo[].pBufferBinds[].buffer,pBindInfo[].pImageOpaqueBinds[].image,pBindInfo[].pImageBinds[].image must be externally synchronized
+    // Host access to fence must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordCreateFence(
+    VkDevice                                    device,
+    const VkFenceCreateInfo*                    pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkFence*                                    pFence) {
+    StartReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PostCallRecordCreateFence(
+    VkDevice                                    device,
+    const VkFenceCreateInfo*                    pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkFence*                                    pFence,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(device);
+    if (result == VK_SUCCESS) {
+        CreateObject(*pFence);
+    }
+}
+
+void ThreadSafety::PreCallRecordDestroyFence(
+    VkDevice                                    device,
+    VkFence                                     fence,
+    const VkAllocationCallbacks*                pAllocator) {
+    StartReadObjectParentInstance(device);
+    StartWriteObject(fence);
+    // Host access to fence must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordDestroyFence(
+    VkDevice                                    device,
+    VkFence                                     fence,
+    const VkAllocationCallbacks*                pAllocator) {
+    FinishReadObjectParentInstance(device);
+    FinishWriteObject(fence);
+    DestroyObject(fence);
+    // Host access to fence must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordResetFences(
+    VkDevice                                    device,
+    uint32_t                                    fenceCount,
+    const VkFence*                              pFences) {
+    StartReadObjectParentInstance(device);
+    if (pFences) {
+        for (uint32_t index=0; index < fenceCount; index++) {
+            StartWriteObject(pFences[index]);
+        }
+    }
+    // Host access to each member of pFences must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordResetFences(
+    VkDevice                                    device,
+    uint32_t                                    fenceCount,
+    const VkFence*                              pFences,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(device);
+    if (pFences) {
+        for (uint32_t index=0; index < fenceCount; index++) {
+            FinishWriteObject(pFences[index]);
+        }
+    }
+    // Host access to each member of pFences must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordGetFenceStatus(
+    VkDevice                                    device,
+    VkFence                                     fence) {
+    StartReadObjectParentInstance(device);
+    StartReadObject(fence);
+}
+
+void ThreadSafety::PostCallRecordGetFenceStatus(
+    VkDevice                                    device,
+    VkFence                                     fence,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(device);
+    FinishReadObject(fence);
+}
+
+void ThreadSafety::PreCallRecordWaitForFences(
+    VkDevice                                    device,
+    uint32_t                                    fenceCount,
+    const VkFence*                              pFences,
+    VkBool32                                    waitAll,
+    uint64_t                                    timeout) {
+    StartReadObjectParentInstance(device);
+    if (pFences) {
+        for (uint32_t index = 0; index < fenceCount; index++) {
+            StartReadObject(pFences[index]);
+        }
+    }
+}
+
+void ThreadSafety::PostCallRecordWaitForFences(
+    VkDevice                                    device,
+    uint32_t                                    fenceCount,
+    const VkFence*                              pFences,
+    VkBool32                                    waitAll,
+    uint64_t                                    timeout,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(device);
+    if (pFences) {
+        for (uint32_t index = 0; index < fenceCount; index++) {
+            FinishReadObject(pFences[index]);
+        }
+    }
+}
+
+void ThreadSafety::PreCallRecordCreateSemaphore(
+    VkDevice                                    device,
+    const VkSemaphoreCreateInfo*                pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSemaphore*                                pSemaphore) {
+    StartReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PostCallRecordCreateSemaphore(
+    VkDevice                                    device,
+    const VkSemaphoreCreateInfo*                pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSemaphore*                                pSemaphore,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(device);
+    if (result == VK_SUCCESS) {
+        CreateObject(*pSemaphore);
+    }
+}
+
+void ThreadSafety::PreCallRecordDestroySemaphore(
+    VkDevice                                    device,
+    VkSemaphore                                 semaphore,
+    const VkAllocationCallbacks*                pAllocator) {
+    StartReadObjectParentInstance(device);
+    StartWriteObject(semaphore);
+    // Host access to semaphore must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordDestroySemaphore(
+    VkDevice                                    device,
+    VkSemaphore                                 semaphore,
+    const VkAllocationCallbacks*                pAllocator) {
+    FinishReadObjectParentInstance(device);
+    FinishWriteObject(semaphore);
+    DestroyObject(semaphore);
+    // Host access to semaphore must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordCreateEvent(
+    VkDevice                                    device,
+    const VkEventCreateInfo*                    pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkEvent*                                    pEvent) {
+    StartReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PostCallRecordCreateEvent(
+    VkDevice                                    device,
+    const VkEventCreateInfo*                    pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkEvent*                                    pEvent,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(device);
+    if (result == VK_SUCCESS) {
+        CreateObject(*pEvent);
+    }
+}
+
+void ThreadSafety::PreCallRecordDestroyEvent(
+    VkDevice                                    device,
+    VkEvent                                     event,
+    const VkAllocationCallbacks*                pAllocator) {
+    StartReadObjectParentInstance(device);
+    StartWriteObject(event);
+    // Host access to event must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordDestroyEvent(
+    VkDevice                                    device,
+    VkEvent                                     event,
+    const VkAllocationCallbacks*                pAllocator) {
+    FinishReadObjectParentInstance(device);
+    FinishWriteObject(event);
+    DestroyObject(event);
+    // Host access to event must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordGetEventStatus(
+    VkDevice                                    device,
+    VkEvent                                     event) {
+    StartReadObjectParentInstance(device);
+    StartReadObject(event);
+}
+
+void ThreadSafety::PostCallRecordGetEventStatus(
+    VkDevice                                    device,
+    VkEvent                                     event,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(device);
+    FinishReadObject(event);
+}
+
+void ThreadSafety::PreCallRecordSetEvent(
+    VkDevice                                    device,
+    VkEvent                                     event) {
+    StartReadObjectParentInstance(device);
+    StartWriteObject(event);
+    // Host access to event must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordSetEvent(
+    VkDevice                                    device,
+    VkEvent                                     event,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(device);
+    FinishWriteObject(event);
+    // Host access to event must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordResetEvent(
+    VkDevice                                    device,
+    VkEvent                                     event) {
+    StartReadObjectParentInstance(device);
+    StartWriteObject(event);
+    // Host access to event must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordResetEvent(
+    VkDevice                                    device,
+    VkEvent                                     event,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(device);
+    FinishWriteObject(event);
+    // Host access to event must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordCreateQueryPool(
+    VkDevice                                    device,
+    const VkQueryPoolCreateInfo*                pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkQueryPool*                                pQueryPool) {
+    StartReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PostCallRecordCreateQueryPool(
+    VkDevice                                    device,
+    const VkQueryPoolCreateInfo*                pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkQueryPool*                                pQueryPool,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(device);
+    if (result == VK_SUCCESS) {
+        CreateObject(*pQueryPool);
+    }
+}
+
+void ThreadSafety::PreCallRecordDestroyQueryPool(
+    VkDevice                                    device,
+    VkQueryPool                                 queryPool,
+    const VkAllocationCallbacks*                pAllocator) {
+    StartReadObjectParentInstance(device);
+    StartWriteObject(queryPool);
+    // Host access to queryPool must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordDestroyQueryPool(
+    VkDevice                                    device,
+    VkQueryPool                                 queryPool,
+    const VkAllocationCallbacks*                pAllocator) {
+    FinishReadObjectParentInstance(device);
+    FinishWriteObject(queryPool);
+    DestroyObject(queryPool);
+    // Host access to queryPool must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordGetQueryPoolResults(
+    VkDevice                                    device,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    firstQuery,
+    uint32_t                                    queryCount,
+    size_t                                      dataSize,
+    void*                                       pData,
+    VkDeviceSize                                stride,
+    VkQueryResultFlags                          flags) {
+    StartReadObjectParentInstance(device);
+    StartReadObject(queryPool);
+}
+
+void ThreadSafety::PostCallRecordGetQueryPoolResults(
+    VkDevice                                    device,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    firstQuery,
+    uint32_t                                    queryCount,
+    size_t                                      dataSize,
+    void*                                       pData,
+    VkDeviceSize                                stride,
+    VkQueryResultFlags                          flags,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(device);
+    FinishReadObject(queryPool);
+}
+
+void ThreadSafety::PreCallRecordCreateBuffer(
+    VkDevice                                    device,
+    const VkBufferCreateInfo*                   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkBuffer*                                   pBuffer) {
+    StartReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PostCallRecordCreateBuffer(
+    VkDevice                                    device,
+    const VkBufferCreateInfo*                   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkBuffer*                                   pBuffer,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(device);
+    if (result == VK_SUCCESS) {
+        CreateObject(*pBuffer);
+    }
+}
+
+void ThreadSafety::PreCallRecordDestroyBuffer(
+    VkDevice                                    device,
+    VkBuffer                                    buffer,
+    const VkAllocationCallbacks*                pAllocator) {
+    StartReadObjectParentInstance(device);
+    StartWriteObject(buffer);
+    // Host access to buffer must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordDestroyBuffer(
+    VkDevice                                    device,
+    VkBuffer                                    buffer,
+    const VkAllocationCallbacks*                pAllocator) {
+    FinishReadObjectParentInstance(device);
+    FinishWriteObject(buffer);
+    DestroyObject(buffer);
+    // Host access to buffer must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordCreateBufferView(
+    VkDevice                                    device,
+    const VkBufferViewCreateInfo*               pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkBufferView*                               pView) {
+    StartReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PostCallRecordCreateBufferView(
+    VkDevice                                    device,
+    const VkBufferViewCreateInfo*               pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkBufferView*                               pView,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(device);
+    if (result == VK_SUCCESS) {
+        CreateObject(*pView);
+    }
+}
+
+void ThreadSafety::PreCallRecordDestroyBufferView(
+    VkDevice                                    device,
+    VkBufferView                                bufferView,
+    const VkAllocationCallbacks*                pAllocator) {
+    StartReadObjectParentInstance(device);
+    StartWriteObject(bufferView);
+    // Host access to bufferView must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordDestroyBufferView(
+    VkDevice                                    device,
+    VkBufferView                                bufferView,
+    const VkAllocationCallbacks*                pAllocator) {
+    FinishReadObjectParentInstance(device);
+    FinishWriteObject(bufferView);
+    DestroyObject(bufferView);
+    // Host access to bufferView must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordCreateImage(
+    VkDevice                                    device,
+    const VkImageCreateInfo*                    pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkImage*                                    pImage) {
+    StartReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PostCallRecordCreateImage(
+    VkDevice                                    device,
+    const VkImageCreateInfo*                    pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkImage*                                    pImage,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(device);
+    if (result == VK_SUCCESS) {
+        CreateObject(*pImage);
+    }
+}
+
+void ThreadSafety::PreCallRecordDestroyImage(
+    VkDevice                                    device,
+    VkImage                                     image,
+    const VkAllocationCallbacks*                pAllocator) {
+    StartReadObjectParentInstance(device);
+    StartWriteObject(image);
+    // Host access to image must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordDestroyImage(
+    VkDevice                                    device,
+    VkImage                                     image,
+    const VkAllocationCallbacks*                pAllocator) {
+    FinishReadObjectParentInstance(device);
+    FinishWriteObject(image);
+    DestroyObject(image);
+    // Host access to image must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordGetImageSubresourceLayout(
+    VkDevice                                    device,
+    VkImage                                     image,
+    const VkImageSubresource*                   pSubresource,
+    VkSubresourceLayout*                        pLayout) {
+    StartReadObjectParentInstance(device);
+    StartReadObject(image);
+}
+
+void ThreadSafety::PostCallRecordGetImageSubresourceLayout(
+    VkDevice                                    device,
+    VkImage                                     image,
+    const VkImageSubresource*                   pSubresource,
+    VkSubresourceLayout*                        pLayout) {
+    FinishReadObjectParentInstance(device);
+    FinishReadObject(image);
+}
+
+void ThreadSafety::PreCallRecordCreateImageView(
+    VkDevice                                    device,
+    const VkImageViewCreateInfo*                pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkImageView*                                pView) {
+    StartReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PostCallRecordCreateImageView(
+    VkDevice                                    device,
+    const VkImageViewCreateInfo*                pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkImageView*                                pView,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(device);
+    if (result == VK_SUCCESS) {
+        CreateObject(*pView);
+    }
+}
+
+void ThreadSafety::PreCallRecordDestroyImageView(
+    VkDevice                                    device,
+    VkImageView                                 imageView,
+    const VkAllocationCallbacks*                pAllocator) {
+    StartReadObjectParentInstance(device);
+    StartWriteObject(imageView);
+    // Host access to imageView must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordDestroyImageView(
+    VkDevice                                    device,
+    VkImageView                                 imageView,
+    const VkAllocationCallbacks*                pAllocator) {
+    FinishReadObjectParentInstance(device);
+    FinishWriteObject(imageView);
+    DestroyObject(imageView);
+    // Host access to imageView must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordCreateShaderModule(
+    VkDevice                                    device,
+    const VkShaderModuleCreateInfo*             pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkShaderModule*                             pShaderModule) {
+    StartReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PostCallRecordCreateShaderModule(
+    VkDevice                                    device,
+    const VkShaderModuleCreateInfo*             pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkShaderModule*                             pShaderModule,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(device);
+    if (result == VK_SUCCESS) {
+        CreateObject(*pShaderModule);
+    }
+}
+
+void ThreadSafety::PreCallRecordDestroyShaderModule(
+    VkDevice                                    device,
+    VkShaderModule                              shaderModule,
+    const VkAllocationCallbacks*                pAllocator) {
+    StartReadObjectParentInstance(device);
+    StartWriteObject(shaderModule);
+    // Host access to shaderModule must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordDestroyShaderModule(
+    VkDevice                                    device,
+    VkShaderModule                              shaderModule,
+    const VkAllocationCallbacks*                pAllocator) {
+    FinishReadObjectParentInstance(device);
+    FinishWriteObject(shaderModule);
+    DestroyObject(shaderModule);
+    // Host access to shaderModule must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordCreatePipelineCache(
+    VkDevice                                    device,
+    const VkPipelineCacheCreateInfo*            pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkPipelineCache*                            pPipelineCache) {
+    StartReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PostCallRecordCreatePipelineCache(
+    VkDevice                                    device,
+    const VkPipelineCacheCreateInfo*            pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkPipelineCache*                            pPipelineCache,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(device);
+    if (result == VK_SUCCESS) {
+        CreateObject(*pPipelineCache);
+    }
+}
+
+void ThreadSafety::PreCallRecordDestroyPipelineCache(
+    VkDevice                                    device,
+    VkPipelineCache                             pipelineCache,
+    const VkAllocationCallbacks*                pAllocator) {
+    StartReadObjectParentInstance(device);
+    StartWriteObject(pipelineCache);
+    // Host access to pipelineCache must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordDestroyPipelineCache(
+    VkDevice                                    device,
+    VkPipelineCache                             pipelineCache,
+    const VkAllocationCallbacks*                pAllocator) {
+    FinishReadObjectParentInstance(device);
+    FinishWriteObject(pipelineCache);
+    DestroyObject(pipelineCache);
+    // Host access to pipelineCache must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordGetPipelineCacheData(
+    VkDevice                                    device,
+    VkPipelineCache                             pipelineCache,
+    size_t*                                     pDataSize,
+    void*                                       pData) {
+    StartReadObjectParentInstance(device);
+    StartReadObject(pipelineCache);
+}
+
+void ThreadSafety::PostCallRecordGetPipelineCacheData(
+    VkDevice                                    device,
+    VkPipelineCache                             pipelineCache,
+    size_t*                                     pDataSize,
+    void*                                       pData,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(device);
+    FinishReadObject(pipelineCache);
+}
+
+void ThreadSafety::PreCallRecordMergePipelineCaches(
+    VkDevice                                    device,
+    VkPipelineCache                             dstCache,
+    uint32_t                                    srcCacheCount,
+    const VkPipelineCache*                      pSrcCaches) {
+    StartReadObjectParentInstance(device);
+    StartWriteObject(dstCache);
+    if (pSrcCaches) {
+        for (uint32_t index = 0; index < srcCacheCount; index++) {
+            StartReadObject(pSrcCaches[index]);
+        }
+    }
+    // Host access to dstCache must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordMergePipelineCaches(
+    VkDevice                                    device,
+    VkPipelineCache                             dstCache,
+    uint32_t                                    srcCacheCount,
+    const VkPipelineCache*                      pSrcCaches,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(device);
+    FinishWriteObject(dstCache);
+    if (pSrcCaches) {
+        for (uint32_t index = 0; index < srcCacheCount; index++) {
+            FinishReadObject(pSrcCaches[index]);
+        }
+    }
+    // Host access to dstCache must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordCreateGraphicsPipelines(
+    VkDevice                                    device,
+    VkPipelineCache                             pipelineCache,
+    uint32_t                                    createInfoCount,
+    const VkGraphicsPipelineCreateInfo*         pCreateInfos,
+    const VkAllocationCallbacks*                pAllocator,
+    VkPipeline*                                 pPipelines) {
+    StartReadObjectParentInstance(device);
+    StartReadObject(pipelineCache);
+}
+
+void ThreadSafety::PostCallRecordCreateGraphicsPipelines(
+    VkDevice                                    device,
+    VkPipelineCache                             pipelineCache,
+    uint32_t                                    createInfoCount,
+    const VkGraphicsPipelineCreateInfo*         pCreateInfos,
+    const VkAllocationCallbacks*                pAllocator,
+    VkPipeline*                                 pPipelines,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(device);
+    FinishReadObject(pipelineCache);
+    if (pPipelines) {
+        for (uint32_t index = 0; index < createInfoCount; index++) {
+            if (!pPipelines[index]) continue;
+            CreateObject(pPipelines[index]);
+        }
+    }
+}
+
+void ThreadSafety::PreCallRecordCreateComputePipelines(
+    VkDevice                                    device,
+    VkPipelineCache                             pipelineCache,
+    uint32_t                                    createInfoCount,
+    const VkComputePipelineCreateInfo*          pCreateInfos,
+    const VkAllocationCallbacks*                pAllocator,
+    VkPipeline*                                 pPipelines) {
+    StartReadObjectParentInstance(device);
+    StartReadObject(pipelineCache);
+}
+
+void ThreadSafety::PostCallRecordCreateComputePipelines(
+    VkDevice                                    device,
+    VkPipelineCache                             pipelineCache,
+    uint32_t                                    createInfoCount,
+    const VkComputePipelineCreateInfo*          pCreateInfos,
+    const VkAllocationCallbacks*                pAllocator,
+    VkPipeline*                                 pPipelines,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(device);
+    FinishReadObject(pipelineCache);
+    if (pPipelines) {
+        for (uint32_t index = 0; index < createInfoCount; index++) {
+            if (!pPipelines[index]) continue;
+            CreateObject(pPipelines[index]);
+        }
+    }
+}
+
+void ThreadSafety::PreCallRecordDestroyPipeline(
+    VkDevice                                    device,
+    VkPipeline                                  pipeline,
+    const VkAllocationCallbacks*                pAllocator) {
+    StartReadObjectParentInstance(device);
+    StartWriteObject(pipeline);
+    // Host access to pipeline must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordDestroyPipeline(
+    VkDevice                                    device,
+    VkPipeline                                  pipeline,
+    const VkAllocationCallbacks*                pAllocator) {
+    FinishReadObjectParentInstance(device);
+    FinishWriteObject(pipeline);
+    DestroyObject(pipeline);
+    // Host access to pipeline must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordCreatePipelineLayout(
+    VkDevice                                    device,
+    const VkPipelineLayoutCreateInfo*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkPipelineLayout*                           pPipelineLayout) {
+    StartReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PostCallRecordCreatePipelineLayout(
+    VkDevice                                    device,
+    const VkPipelineLayoutCreateInfo*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkPipelineLayout*                           pPipelineLayout,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(device);
+    if (result == VK_SUCCESS) {
+        CreateObject(*pPipelineLayout);
+    }
+}
+
+void ThreadSafety::PreCallRecordDestroyPipelineLayout(
+    VkDevice                                    device,
+    VkPipelineLayout                            pipelineLayout,
+    const VkAllocationCallbacks*                pAllocator) {
+    StartReadObjectParentInstance(device);
+    StartWriteObject(pipelineLayout);
+    // Host access to pipelineLayout must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordDestroyPipelineLayout(
+    VkDevice                                    device,
+    VkPipelineLayout                            pipelineLayout,
+    const VkAllocationCallbacks*                pAllocator) {
+    FinishReadObjectParentInstance(device);
+    FinishWriteObject(pipelineLayout);
+    DestroyObject(pipelineLayout);
+    // Host access to pipelineLayout must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordCreateSampler(
+    VkDevice                                    device,
+    const VkSamplerCreateInfo*                  pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSampler*                                  pSampler) {
+    StartReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PostCallRecordCreateSampler(
+    VkDevice                                    device,
+    const VkSamplerCreateInfo*                  pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSampler*                                  pSampler,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(device);
+    if (result == VK_SUCCESS) {
+        CreateObject(*pSampler);
+    }
+}
+
+void ThreadSafety::PreCallRecordDestroySampler(
+    VkDevice                                    device,
+    VkSampler                                   sampler,
+    const VkAllocationCallbacks*                pAllocator) {
+    StartReadObjectParentInstance(device);
+    StartWriteObject(sampler);
+    // Host access to sampler must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordDestroySampler(
+    VkDevice                                    device,
+    VkSampler                                   sampler,
+    const VkAllocationCallbacks*                pAllocator) {
+    FinishReadObjectParentInstance(device);
+    FinishWriteObject(sampler);
+    DestroyObject(sampler);
+    // Host access to sampler must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordDestroyDescriptorSetLayout(
+    VkDevice                                    device,
+    VkDescriptorSetLayout                       descriptorSetLayout,
+    const VkAllocationCallbacks*                pAllocator) {
+    StartReadObjectParentInstance(device);
+    StartWriteObject(descriptorSetLayout);
+    // Host access to descriptorSetLayout must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordDestroyDescriptorSetLayout(
+    VkDevice                                    device,
+    VkDescriptorSetLayout                       descriptorSetLayout,
+    const VkAllocationCallbacks*                pAllocator) {
+    FinishReadObjectParentInstance(device);
+    FinishWriteObject(descriptorSetLayout);
+    DestroyObject(descriptorSetLayout);
+    // Host access to descriptorSetLayout must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordCreateDescriptorPool(
+    VkDevice                                    device,
+    const VkDescriptorPoolCreateInfo*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDescriptorPool*                           pDescriptorPool) {
+    StartReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PostCallRecordCreateDescriptorPool(
+    VkDevice                                    device,
+    const VkDescriptorPoolCreateInfo*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDescriptorPool*                           pDescriptorPool,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(device);
+    if (result == VK_SUCCESS) {
+        CreateObject(*pDescriptorPool);
+    }
+}
+
+void ThreadSafety::PreCallRecordCreateFramebuffer(
+    VkDevice                                    device,
+    const VkFramebufferCreateInfo*              pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkFramebuffer*                              pFramebuffer) {
+    StartReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PostCallRecordCreateFramebuffer(
+    VkDevice                                    device,
+    const VkFramebufferCreateInfo*              pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkFramebuffer*                              pFramebuffer,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(device);
+    if (result == VK_SUCCESS) {
+        CreateObject(*pFramebuffer);
+    }
+}
+
+void ThreadSafety::PreCallRecordDestroyFramebuffer(
+    VkDevice                                    device,
+    VkFramebuffer                               framebuffer,
+    const VkAllocationCallbacks*                pAllocator) {
+    StartReadObjectParentInstance(device);
+    StartWriteObject(framebuffer);
+    // Host access to framebuffer must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordDestroyFramebuffer(
+    VkDevice                                    device,
+    VkFramebuffer                               framebuffer,
+    const VkAllocationCallbacks*                pAllocator) {
+    FinishReadObjectParentInstance(device);
+    FinishWriteObject(framebuffer);
+    DestroyObject(framebuffer);
+    // Host access to framebuffer must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordCreateRenderPass(
+    VkDevice                                    device,
+    const VkRenderPassCreateInfo*               pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkRenderPass*                               pRenderPass) {
+    StartReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PostCallRecordCreateRenderPass(
+    VkDevice                                    device,
+    const VkRenderPassCreateInfo*               pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkRenderPass*                               pRenderPass,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(device);
+    if (result == VK_SUCCESS) {
+        CreateObject(*pRenderPass);
+    }
+}
+
+void ThreadSafety::PreCallRecordDestroyRenderPass(
+    VkDevice                                    device,
+    VkRenderPass                                renderPass,
+    const VkAllocationCallbacks*                pAllocator) {
+    StartReadObjectParentInstance(device);
+    StartWriteObject(renderPass);
+    // Host access to renderPass must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordDestroyRenderPass(
+    VkDevice                                    device,
+    VkRenderPass                                renderPass,
+    const VkAllocationCallbacks*                pAllocator) {
+    FinishReadObjectParentInstance(device);
+    FinishWriteObject(renderPass);
+    DestroyObject(renderPass);
+    // Host access to renderPass must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordGetRenderAreaGranularity(
+    VkDevice                                    device,
+    VkRenderPass                                renderPass,
+    VkExtent2D*                                 pGranularity) {
+    StartReadObjectParentInstance(device);
+    StartReadObject(renderPass);
+}
+
+void ThreadSafety::PostCallRecordGetRenderAreaGranularity(
+    VkDevice                                    device,
+    VkRenderPass                                renderPass,
+    VkExtent2D*                                 pGranularity) {
+    FinishReadObjectParentInstance(device);
+    FinishReadObject(renderPass);
+}
+
+void ThreadSafety::PreCallRecordBeginCommandBuffer(
+    VkCommandBuffer                             commandBuffer,
+    const VkCommandBufferBeginInfo*             pBeginInfo) {
+    StartWriteObject(commandBuffer);
+    // Host access to commandBuffer must be externally synchronized
+    // the sname:VkCommandPool that pname:commandBuffer was allocated from must be externally synchronized between host accesses
+}
+
+void ThreadSafety::PostCallRecordBeginCommandBuffer(
+    VkCommandBuffer                             commandBuffer,
+    const VkCommandBufferBeginInfo*             pBeginInfo,
+    VkResult                                    result) {
+    FinishWriteObject(commandBuffer);
+    // Host access to commandBuffer must be externally synchronized
+    // the sname:VkCommandPool that pname:commandBuffer was allocated from must be externally synchronized between host accesses
+}
+
+void ThreadSafety::PreCallRecordEndCommandBuffer(
+    VkCommandBuffer                             commandBuffer) {
+    StartWriteObject(commandBuffer);
+    // Host access to commandBuffer must be externally synchronized
+    // the sname:VkCommandPool that pname:commandBuffer was allocated from must be externally synchronized between host accesses
+}
+
+void ThreadSafety::PostCallRecordEndCommandBuffer(
+    VkCommandBuffer                             commandBuffer,
+    VkResult                                    result) {
+    FinishWriteObject(commandBuffer);
+    // Host access to commandBuffer must be externally synchronized
+    // the sname:VkCommandPool that pname:commandBuffer was allocated from must be externally synchronized between host accesses
+}
+
+void ThreadSafety::PreCallRecordResetCommandBuffer(
+    VkCommandBuffer                             commandBuffer,
+    VkCommandBufferResetFlags                   flags) {
+    StartWriteObject(commandBuffer);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordResetCommandBuffer(
+    VkCommandBuffer                             commandBuffer,
+    VkCommandBufferResetFlags                   flags,
+    VkResult                                    result) {
+    FinishWriteObject(commandBuffer);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordCmdBindPipeline(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineBindPoint                         pipelineBindPoint,
+    VkPipeline                                  pipeline) {
+    StartWriteObject(commandBuffer);
+    StartReadObject(pipeline);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordCmdBindPipeline(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineBindPoint                         pipelineBindPoint,
+    VkPipeline                                  pipeline) {
+    FinishWriteObject(commandBuffer);
+    FinishReadObject(pipeline);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordCmdSetViewport(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstViewport,
+    uint32_t                                    viewportCount,
+    const VkViewport*                           pViewports) {
+    StartWriteObject(commandBuffer);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordCmdSetViewport(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstViewport,
+    uint32_t                                    viewportCount,
+    const VkViewport*                           pViewports) {
+    FinishWriteObject(commandBuffer);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordCmdSetScissor(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstScissor,
+    uint32_t                                    scissorCount,
+    const VkRect2D*                             pScissors) {
+    StartWriteObject(commandBuffer);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordCmdSetScissor(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstScissor,
+    uint32_t                                    scissorCount,
+    const VkRect2D*                             pScissors) {
+    FinishWriteObject(commandBuffer);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordCmdSetLineWidth(
+    VkCommandBuffer                             commandBuffer,
+    float                                       lineWidth) {
+    StartWriteObject(commandBuffer);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordCmdSetLineWidth(
+    VkCommandBuffer                             commandBuffer,
+    float                                       lineWidth) {
+    FinishWriteObject(commandBuffer);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordCmdSetDepthBias(
+    VkCommandBuffer                             commandBuffer,
+    float                                       depthBiasConstantFactor,
+    float                                       depthBiasClamp,
+    float                                       depthBiasSlopeFactor) {
+    StartWriteObject(commandBuffer);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordCmdSetDepthBias(
+    VkCommandBuffer                             commandBuffer,
+    float                                       depthBiasConstantFactor,
+    float                                       depthBiasClamp,
+    float                                       depthBiasSlopeFactor) {
+    FinishWriteObject(commandBuffer);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordCmdSetBlendConstants(
+    VkCommandBuffer                             commandBuffer,
+    const float                                 blendConstants[4]) {
+    StartWriteObject(commandBuffer);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordCmdSetBlendConstants(
+    VkCommandBuffer                             commandBuffer,
+    const float                                 blendConstants[4]) {
+    FinishWriteObject(commandBuffer);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordCmdSetDepthBounds(
+    VkCommandBuffer                             commandBuffer,
+    float                                       minDepthBounds,
+    float                                       maxDepthBounds) {
+    StartWriteObject(commandBuffer);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordCmdSetDepthBounds(
+    VkCommandBuffer                             commandBuffer,
+    float                                       minDepthBounds,
+    float                                       maxDepthBounds) {
+    FinishWriteObject(commandBuffer);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordCmdSetStencilCompareMask(
+    VkCommandBuffer                             commandBuffer,
+    VkStencilFaceFlags                          faceMask,
+    uint32_t                                    compareMask) {
+    StartWriteObject(commandBuffer);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordCmdSetStencilCompareMask(
+    VkCommandBuffer                             commandBuffer,
+    VkStencilFaceFlags                          faceMask,
+    uint32_t                                    compareMask) {
+    FinishWriteObject(commandBuffer);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordCmdSetStencilWriteMask(
+    VkCommandBuffer                             commandBuffer,
+    VkStencilFaceFlags                          faceMask,
+    uint32_t                                    writeMask) {
+    StartWriteObject(commandBuffer);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordCmdSetStencilWriteMask(
+    VkCommandBuffer                             commandBuffer,
+    VkStencilFaceFlags                          faceMask,
+    uint32_t                                    writeMask) {
+    FinishWriteObject(commandBuffer);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordCmdSetStencilReference(
+    VkCommandBuffer                             commandBuffer,
+    VkStencilFaceFlags                          faceMask,
+    uint32_t                                    reference) {
+    StartWriteObject(commandBuffer);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordCmdSetStencilReference(
+    VkCommandBuffer                             commandBuffer,
+    VkStencilFaceFlags                          faceMask,
+    uint32_t                                    reference) {
+    FinishWriteObject(commandBuffer);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordCmdBindDescriptorSets(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineBindPoint                         pipelineBindPoint,
+    VkPipelineLayout                            layout,
+    uint32_t                                    firstSet,
+    uint32_t                                    descriptorSetCount,
+    const VkDescriptorSet*                      pDescriptorSets,
+    uint32_t                                    dynamicOffsetCount,
+    const uint32_t*                             pDynamicOffsets) {
+    StartWriteObject(commandBuffer);
+    StartReadObject(layout);
+    if (pDescriptorSets) {
+        for (uint32_t index = 0; index < descriptorSetCount; index++) {
+            StartReadObject(pDescriptorSets[index]);
+        }
+    }
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordCmdBindDescriptorSets(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineBindPoint                         pipelineBindPoint,
+    VkPipelineLayout                            layout,
+    uint32_t                                    firstSet,
+    uint32_t                                    descriptorSetCount,
+    const VkDescriptorSet*                      pDescriptorSets,
+    uint32_t                                    dynamicOffsetCount,
+    const uint32_t*                             pDynamicOffsets) {
+    FinishWriteObject(commandBuffer);
+    FinishReadObject(layout);
+    if (pDescriptorSets) {
+        for (uint32_t index = 0; index < descriptorSetCount; index++) {
+            FinishReadObject(pDescriptorSets[index]);
+        }
+    }
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordCmdBindIndexBuffer(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkIndexType                                 indexType) {
+    StartWriteObject(commandBuffer);
+    StartReadObject(buffer);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordCmdBindIndexBuffer(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkIndexType                                 indexType) {
+    FinishWriteObject(commandBuffer);
+    FinishReadObject(buffer);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordCmdBindVertexBuffers(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstBinding,
+    uint32_t                                    bindingCount,
+    const VkBuffer*                             pBuffers,
+    const VkDeviceSize*                         pOffsets) {
+    StartWriteObject(commandBuffer);
+    if (pBuffers) {
+        for (uint32_t index = 0; index < bindingCount; index++) {
+            StartReadObject(pBuffers[index]);
+        }
+    }
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordCmdBindVertexBuffers(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstBinding,
+    uint32_t                                    bindingCount,
+    const VkBuffer*                             pBuffers,
+    const VkDeviceSize*                         pOffsets) {
+    FinishWriteObject(commandBuffer);
+    if (pBuffers) {
+        for (uint32_t index = 0; index < bindingCount; index++) {
+            FinishReadObject(pBuffers[index]);
+        }
+    }
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordCmdDraw(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    vertexCount,
+    uint32_t                                    instanceCount,
+    uint32_t                                    firstVertex,
+    uint32_t                                    firstInstance) {
+    StartWriteObject(commandBuffer);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordCmdDraw(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    vertexCount,
+    uint32_t                                    instanceCount,
+    uint32_t                                    firstVertex,
+    uint32_t                                    firstInstance) {
+    FinishWriteObject(commandBuffer);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordCmdDrawIndexed(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    indexCount,
+    uint32_t                                    instanceCount,
+    uint32_t                                    firstIndex,
+    int32_t                                     vertexOffset,
+    uint32_t                                    firstInstance) {
+    StartWriteObject(commandBuffer);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordCmdDrawIndexed(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    indexCount,
+    uint32_t                                    instanceCount,
+    uint32_t                                    firstIndex,
+    int32_t                                     vertexOffset,
+    uint32_t                                    firstInstance) {
+    FinishWriteObject(commandBuffer);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordCmdDrawIndirect(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    uint32_t                                    drawCount,
+    uint32_t                                    stride) {
+    StartWriteObject(commandBuffer);
+    StartReadObject(buffer);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordCmdDrawIndirect(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    uint32_t                                    drawCount,
+    uint32_t                                    stride) {
+    FinishWriteObject(commandBuffer);
+    FinishReadObject(buffer);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordCmdDrawIndexedIndirect(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    uint32_t                                    drawCount,
+    uint32_t                                    stride) {
+    StartWriteObject(commandBuffer);
+    StartReadObject(buffer);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordCmdDrawIndexedIndirect(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    uint32_t                                    drawCount,
+    uint32_t                                    stride) {
+    FinishWriteObject(commandBuffer);
+    FinishReadObject(buffer);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordCmdDispatch(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    groupCountX,
+    uint32_t                                    groupCountY,
+    uint32_t                                    groupCountZ) {
+    StartWriteObject(commandBuffer);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordCmdDispatch(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    groupCountX,
+    uint32_t                                    groupCountY,
+    uint32_t                                    groupCountZ) {
+    FinishWriteObject(commandBuffer);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordCmdDispatchIndirect(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset) {
+    StartWriteObject(commandBuffer);
+    StartReadObject(buffer);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordCmdDispatchIndirect(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset) {
+    FinishWriteObject(commandBuffer);
+    FinishReadObject(buffer);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordCmdCopyBuffer(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    srcBuffer,
+    VkBuffer                                    dstBuffer,
+    uint32_t                                    regionCount,
+    const VkBufferCopy*                         pRegions) {
+    StartWriteObject(commandBuffer);
+    StartReadObject(srcBuffer);
+    StartReadObject(dstBuffer);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordCmdCopyBuffer(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    srcBuffer,
+    VkBuffer                                    dstBuffer,
+    uint32_t                                    regionCount,
+    const VkBufferCopy*                         pRegions) {
+    FinishWriteObject(commandBuffer);
+    FinishReadObject(srcBuffer);
+    FinishReadObject(dstBuffer);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordCmdCopyImage(
+    VkCommandBuffer                             commandBuffer,
+    VkImage                                     srcImage,
+    VkImageLayout                               srcImageLayout,
+    VkImage                                     dstImage,
+    VkImageLayout                               dstImageLayout,
+    uint32_t                                    regionCount,
+    const VkImageCopy*                          pRegions) {
+    StartWriteObject(commandBuffer);
+    StartReadObject(srcImage);
+    StartReadObject(dstImage);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordCmdCopyImage(
+    VkCommandBuffer                             commandBuffer,
+    VkImage                                     srcImage,
+    VkImageLayout                               srcImageLayout,
+    VkImage                                     dstImage,
+    VkImageLayout                               dstImageLayout,
+    uint32_t                                    regionCount,
+    const VkImageCopy*                          pRegions) {
+    FinishWriteObject(commandBuffer);
+    FinishReadObject(srcImage);
+    FinishReadObject(dstImage);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordCmdBlitImage(
+    VkCommandBuffer                             commandBuffer,
+    VkImage                                     srcImage,
+    VkImageLayout                               srcImageLayout,
+    VkImage                                     dstImage,
+    VkImageLayout                               dstImageLayout,
+    uint32_t                                    regionCount,
+    const VkImageBlit*                          pRegions,
+    VkFilter                                    filter) {
+    StartWriteObject(commandBuffer);
+    StartReadObject(srcImage);
+    StartReadObject(dstImage);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordCmdBlitImage(
+    VkCommandBuffer                             commandBuffer,
+    VkImage                                     srcImage,
+    VkImageLayout                               srcImageLayout,
+    VkImage                                     dstImage,
+    VkImageLayout                               dstImageLayout,
+    uint32_t                                    regionCount,
+    const VkImageBlit*                          pRegions,
+    VkFilter                                    filter) {
+    FinishWriteObject(commandBuffer);
+    FinishReadObject(srcImage);
+    FinishReadObject(dstImage);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordCmdCopyBufferToImage(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    srcBuffer,
+    VkImage                                     dstImage,
+    VkImageLayout                               dstImageLayout,
+    uint32_t                                    regionCount,
+    const VkBufferImageCopy*                    pRegions) {
+    StartWriteObject(commandBuffer);
+    StartReadObject(srcBuffer);
+    StartReadObject(dstImage);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordCmdCopyBufferToImage(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    srcBuffer,
+    VkImage                                     dstImage,
+    VkImageLayout                               dstImageLayout,
+    uint32_t                                    regionCount,
+    const VkBufferImageCopy*                    pRegions) {
+    FinishWriteObject(commandBuffer);
+    FinishReadObject(srcBuffer);
+    FinishReadObject(dstImage);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordCmdCopyImageToBuffer(
+    VkCommandBuffer                             commandBuffer,
+    VkImage                                     srcImage,
+    VkImageLayout                               srcImageLayout,
+    VkBuffer                                    dstBuffer,
+    uint32_t                                    regionCount,
+    const VkBufferImageCopy*                    pRegions) {
+    StartWriteObject(commandBuffer);
+    StartReadObject(srcImage);
+    StartReadObject(dstBuffer);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordCmdCopyImageToBuffer(
+    VkCommandBuffer                             commandBuffer,
+    VkImage                                     srcImage,
+    VkImageLayout                               srcImageLayout,
+    VkBuffer                                    dstBuffer,
+    uint32_t                                    regionCount,
+    const VkBufferImageCopy*                    pRegions) {
+    FinishWriteObject(commandBuffer);
+    FinishReadObject(srcImage);
+    FinishReadObject(dstBuffer);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordCmdUpdateBuffer(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    dstBuffer,
+    VkDeviceSize                                dstOffset,
+    VkDeviceSize                                dataSize,
+    const void*                                 pData) {
+    StartWriteObject(commandBuffer);
+    StartReadObject(dstBuffer);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordCmdUpdateBuffer(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    dstBuffer,
+    VkDeviceSize                                dstOffset,
+    VkDeviceSize                                dataSize,
+    const void*                                 pData) {
+    FinishWriteObject(commandBuffer);
+    FinishReadObject(dstBuffer);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordCmdFillBuffer(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    dstBuffer,
+    VkDeviceSize                                dstOffset,
+    VkDeviceSize                                size,
+    uint32_t                                    data) {
+    StartWriteObject(commandBuffer);
+    StartReadObject(dstBuffer);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordCmdFillBuffer(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    dstBuffer,
+    VkDeviceSize                                dstOffset,
+    VkDeviceSize                                size,
+    uint32_t                                    data) {
+    FinishWriteObject(commandBuffer);
+    FinishReadObject(dstBuffer);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordCmdClearColorImage(
+    VkCommandBuffer                             commandBuffer,
+    VkImage                                     image,
+    VkImageLayout                               imageLayout,
+    const VkClearColorValue*                    pColor,
+    uint32_t                                    rangeCount,
+    const VkImageSubresourceRange*              pRanges) {
+    StartWriteObject(commandBuffer);
+    StartReadObject(image);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordCmdClearColorImage(
+    VkCommandBuffer                             commandBuffer,
+    VkImage                                     image,
+    VkImageLayout                               imageLayout,
+    const VkClearColorValue*                    pColor,
+    uint32_t                                    rangeCount,
+    const VkImageSubresourceRange*              pRanges) {
+    FinishWriteObject(commandBuffer);
+    FinishReadObject(image);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordCmdClearDepthStencilImage(
+    VkCommandBuffer                             commandBuffer,
+    VkImage                                     image,
+    VkImageLayout                               imageLayout,
+    const VkClearDepthStencilValue*             pDepthStencil,
+    uint32_t                                    rangeCount,
+    const VkImageSubresourceRange*              pRanges) {
+    StartWriteObject(commandBuffer);
+    StartReadObject(image);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordCmdClearDepthStencilImage(
+    VkCommandBuffer                             commandBuffer,
+    VkImage                                     image,
+    VkImageLayout                               imageLayout,
+    const VkClearDepthStencilValue*             pDepthStencil,
+    uint32_t                                    rangeCount,
+    const VkImageSubresourceRange*              pRanges) {
+    FinishWriteObject(commandBuffer);
+    FinishReadObject(image);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordCmdClearAttachments(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    attachmentCount,
+    const VkClearAttachment*                    pAttachments,
+    uint32_t                                    rectCount,
+    const VkClearRect*                          pRects) {
+    StartWriteObject(commandBuffer);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordCmdClearAttachments(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    attachmentCount,
+    const VkClearAttachment*                    pAttachments,
+    uint32_t                                    rectCount,
+    const VkClearRect*                          pRects) {
+    FinishWriteObject(commandBuffer);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordCmdResolveImage(
+    VkCommandBuffer                             commandBuffer,
+    VkImage                                     srcImage,
+    VkImageLayout                               srcImageLayout,
+    VkImage                                     dstImage,
+    VkImageLayout                               dstImageLayout,
+    uint32_t                                    regionCount,
+    const VkImageResolve*                       pRegions) {
+    StartWriteObject(commandBuffer);
+    StartReadObject(srcImage);
+    StartReadObject(dstImage);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordCmdResolveImage(
+    VkCommandBuffer                             commandBuffer,
+    VkImage                                     srcImage,
+    VkImageLayout                               srcImageLayout,
+    VkImage                                     dstImage,
+    VkImageLayout                               dstImageLayout,
+    uint32_t                                    regionCount,
+    const VkImageResolve*                       pRegions) {
+    FinishWriteObject(commandBuffer);
+    FinishReadObject(srcImage);
+    FinishReadObject(dstImage);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordCmdSetEvent(
+    VkCommandBuffer                             commandBuffer,
+    VkEvent                                     event,
+    VkPipelineStageFlags                        stageMask) {
+    StartWriteObject(commandBuffer);
+    StartReadObject(event);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordCmdSetEvent(
+    VkCommandBuffer                             commandBuffer,
+    VkEvent                                     event,
+    VkPipelineStageFlags                        stageMask) {
+    FinishWriteObject(commandBuffer);
+    FinishReadObject(event);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordCmdResetEvent(
+    VkCommandBuffer                             commandBuffer,
+    VkEvent                                     event,
+    VkPipelineStageFlags                        stageMask) {
+    StartWriteObject(commandBuffer);
+    StartReadObject(event);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordCmdResetEvent(
+    VkCommandBuffer                             commandBuffer,
+    VkEvent                                     event,
+    VkPipelineStageFlags                        stageMask) {
+    FinishWriteObject(commandBuffer);
+    FinishReadObject(event);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordCmdWaitEvents(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    eventCount,
+    const VkEvent*                              pEvents,
+    VkPipelineStageFlags                        srcStageMask,
+    VkPipelineStageFlags                        dstStageMask,
+    uint32_t                                    memoryBarrierCount,
+    const VkMemoryBarrier*                      pMemoryBarriers,
+    uint32_t                                    bufferMemoryBarrierCount,
+    const VkBufferMemoryBarrier*                pBufferMemoryBarriers,
+    uint32_t                                    imageMemoryBarrierCount,
+    const VkImageMemoryBarrier*                 pImageMemoryBarriers) {
+    StartWriteObject(commandBuffer);
+    if (pEvents) {
+        for (uint32_t index = 0; index < eventCount; index++) {
+            StartReadObject(pEvents[index]);
+        }
+    }
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordCmdWaitEvents(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    eventCount,
+    const VkEvent*                              pEvents,
+    VkPipelineStageFlags                        srcStageMask,
+    VkPipelineStageFlags                        dstStageMask,
+    uint32_t                                    memoryBarrierCount,
+    const VkMemoryBarrier*                      pMemoryBarriers,
+    uint32_t                                    bufferMemoryBarrierCount,
+    const VkBufferMemoryBarrier*                pBufferMemoryBarriers,
+    uint32_t                                    imageMemoryBarrierCount,
+    const VkImageMemoryBarrier*                 pImageMemoryBarriers) {
+    FinishWriteObject(commandBuffer);
+    if (pEvents) {
+        for (uint32_t index = 0; index < eventCount; index++) {
+            FinishReadObject(pEvents[index]);
+        }
+    }
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordCmdPipelineBarrier(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineStageFlags                        srcStageMask,
+    VkPipelineStageFlags                        dstStageMask,
+    VkDependencyFlags                           dependencyFlags,
+    uint32_t                                    memoryBarrierCount,
+    const VkMemoryBarrier*                      pMemoryBarriers,
+    uint32_t                                    bufferMemoryBarrierCount,
+    const VkBufferMemoryBarrier*                pBufferMemoryBarriers,
+    uint32_t                                    imageMemoryBarrierCount,
+    const VkImageMemoryBarrier*                 pImageMemoryBarriers) {
+    StartWriteObject(commandBuffer);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordCmdPipelineBarrier(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineStageFlags                        srcStageMask,
+    VkPipelineStageFlags                        dstStageMask,
+    VkDependencyFlags                           dependencyFlags,
+    uint32_t                                    memoryBarrierCount,
+    const VkMemoryBarrier*                      pMemoryBarriers,
+    uint32_t                                    bufferMemoryBarrierCount,
+    const VkBufferMemoryBarrier*                pBufferMemoryBarriers,
+    uint32_t                                    imageMemoryBarrierCount,
+    const VkImageMemoryBarrier*                 pImageMemoryBarriers) {
+    FinishWriteObject(commandBuffer);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordCmdBeginQuery(
+    VkCommandBuffer                             commandBuffer,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    query,
+    VkQueryControlFlags                         flags) {
+    StartWriteObject(commandBuffer);
+    StartReadObject(queryPool);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordCmdBeginQuery(
+    VkCommandBuffer                             commandBuffer,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    query,
+    VkQueryControlFlags                         flags) {
+    FinishWriteObject(commandBuffer);
+    FinishReadObject(queryPool);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordCmdEndQuery(
+    VkCommandBuffer                             commandBuffer,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    query) {
+    StartWriteObject(commandBuffer);
+    StartReadObject(queryPool);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordCmdEndQuery(
+    VkCommandBuffer                             commandBuffer,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    query) {
+    FinishWriteObject(commandBuffer);
+    FinishReadObject(queryPool);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordCmdResetQueryPool(
+    VkCommandBuffer                             commandBuffer,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    firstQuery,
+    uint32_t                                    queryCount) {
+    StartWriteObject(commandBuffer);
+    StartReadObject(queryPool);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordCmdResetQueryPool(
+    VkCommandBuffer                             commandBuffer,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    firstQuery,
+    uint32_t                                    queryCount) {
+    FinishWriteObject(commandBuffer);
+    FinishReadObject(queryPool);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordCmdWriteTimestamp(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineStageFlagBits                     pipelineStage,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    query) {
+    StartWriteObject(commandBuffer);
+    StartReadObject(queryPool);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordCmdWriteTimestamp(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineStageFlagBits                     pipelineStage,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    query) {
+    FinishWriteObject(commandBuffer);
+    FinishReadObject(queryPool);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordCmdCopyQueryPoolResults(
+    VkCommandBuffer                             commandBuffer,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    firstQuery,
+    uint32_t                                    queryCount,
+    VkBuffer                                    dstBuffer,
+    VkDeviceSize                                dstOffset,
+    VkDeviceSize                                stride,
+    VkQueryResultFlags                          flags) {
+    StartWriteObject(commandBuffer);
+    StartReadObject(queryPool);
+    StartReadObject(dstBuffer);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordCmdCopyQueryPoolResults(
+    VkCommandBuffer                             commandBuffer,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    firstQuery,
+    uint32_t                                    queryCount,
+    VkBuffer                                    dstBuffer,
+    VkDeviceSize                                dstOffset,
+    VkDeviceSize                                stride,
+    VkQueryResultFlags                          flags) {
+    FinishWriteObject(commandBuffer);
+    FinishReadObject(queryPool);
+    FinishReadObject(dstBuffer);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordCmdPushConstants(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineLayout                            layout,
+    VkShaderStageFlags                          stageFlags,
+    uint32_t                                    offset,
+    uint32_t                                    size,
+    const void*                                 pValues) {
+    StartWriteObject(commandBuffer);
+    StartReadObject(layout);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordCmdPushConstants(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineLayout                            layout,
+    VkShaderStageFlags                          stageFlags,
+    uint32_t                                    offset,
+    uint32_t                                    size,
+    const void*                                 pValues) {
+    FinishWriteObject(commandBuffer);
+    FinishReadObject(layout);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordCmdBeginRenderPass(
+    VkCommandBuffer                             commandBuffer,
+    const VkRenderPassBeginInfo*                pRenderPassBegin,
+    VkSubpassContents                           contents) {
+    StartWriteObject(commandBuffer);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordCmdBeginRenderPass(
+    VkCommandBuffer                             commandBuffer,
+    const VkRenderPassBeginInfo*                pRenderPassBegin,
+    VkSubpassContents                           contents) {
+    FinishWriteObject(commandBuffer);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordCmdNextSubpass(
+    VkCommandBuffer                             commandBuffer,
+    VkSubpassContents                           contents) {
+    StartWriteObject(commandBuffer);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordCmdNextSubpass(
+    VkCommandBuffer                             commandBuffer,
+    VkSubpassContents                           contents) {
+    FinishWriteObject(commandBuffer);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordCmdEndRenderPass(
+    VkCommandBuffer                             commandBuffer) {
+    StartWriteObject(commandBuffer);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordCmdEndRenderPass(
+    VkCommandBuffer                             commandBuffer) {
+    FinishWriteObject(commandBuffer);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordCmdExecuteCommands(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    commandBufferCount,
+    const VkCommandBuffer*                      pCommandBuffers) {
+    StartWriteObject(commandBuffer);
+    if (pCommandBuffers) {
+        for (uint32_t index = 0; index < commandBufferCount; index++) {
+            StartReadObject(pCommandBuffers[index]);
+        }
+    }
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordCmdExecuteCommands(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    commandBufferCount,
+    const VkCommandBuffer*                      pCommandBuffers) {
+    FinishWriteObject(commandBuffer);
+    if (pCommandBuffers) {
+        for (uint32_t index = 0; index < commandBufferCount; index++) {
+            FinishReadObject(pCommandBuffers[index]);
+        }
+    }
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordBindBufferMemory2(
+    VkDevice                                    device,
+    uint32_t                                    bindInfoCount,
+    const VkBindBufferMemoryInfo*               pBindInfos) {
+    StartReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PostCallRecordBindBufferMemory2(
+    VkDevice                                    device,
+    uint32_t                                    bindInfoCount,
+    const VkBindBufferMemoryInfo*               pBindInfos,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PreCallRecordBindImageMemory2(
+    VkDevice                                    device,
+    uint32_t                                    bindInfoCount,
+    const VkBindImageMemoryInfo*                pBindInfos) {
+    StartReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PostCallRecordBindImageMemory2(
+    VkDevice                                    device,
+    uint32_t                                    bindInfoCount,
+    const VkBindImageMemoryInfo*                pBindInfos,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PreCallRecordGetDeviceGroupPeerMemoryFeatures(
+    VkDevice                                    device,
+    uint32_t                                    heapIndex,
+    uint32_t                                    localDeviceIndex,
+    uint32_t                                    remoteDeviceIndex,
+    VkPeerMemoryFeatureFlags*                   pPeerMemoryFeatures) {
+    StartReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PostCallRecordGetDeviceGroupPeerMemoryFeatures(
+    VkDevice                                    device,
+    uint32_t                                    heapIndex,
+    uint32_t                                    localDeviceIndex,
+    uint32_t                                    remoteDeviceIndex,
+    VkPeerMemoryFeatureFlags*                   pPeerMemoryFeatures) {
+    FinishReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PreCallRecordCmdSetDeviceMask(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    deviceMask) {
+    StartWriteObject(commandBuffer);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordCmdSetDeviceMask(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    deviceMask) {
+    FinishWriteObject(commandBuffer);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordCmdDispatchBase(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    baseGroupX,
+    uint32_t                                    baseGroupY,
+    uint32_t                                    baseGroupZ,
+    uint32_t                                    groupCountX,
+    uint32_t                                    groupCountY,
+    uint32_t                                    groupCountZ) {
+    StartWriteObject(commandBuffer);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordCmdDispatchBase(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    baseGroupX,
+    uint32_t                                    baseGroupY,
+    uint32_t                                    baseGroupZ,
+    uint32_t                                    groupCountX,
+    uint32_t                                    groupCountY,
+    uint32_t                                    groupCountZ) {
+    FinishWriteObject(commandBuffer);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordEnumeratePhysicalDeviceGroups(
+    VkInstance                                  instance,
+    uint32_t*                                   pPhysicalDeviceGroupCount,
+    VkPhysicalDeviceGroupProperties*            pPhysicalDeviceGroupProperties) {
+    StartReadObjectParentInstance(instance);
+}
+
+void ThreadSafety::PostCallRecordEnumeratePhysicalDeviceGroups(
+    VkInstance                                  instance,
+    uint32_t*                                   pPhysicalDeviceGroupCount,
+    VkPhysicalDeviceGroupProperties*            pPhysicalDeviceGroupProperties,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(instance);
+}
+
+void ThreadSafety::PreCallRecordGetImageMemoryRequirements2(
+    VkDevice                                    device,
+    const VkImageMemoryRequirementsInfo2*       pInfo,
+    VkMemoryRequirements2*                      pMemoryRequirements) {
+    StartReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PostCallRecordGetImageMemoryRequirements2(
+    VkDevice                                    device,
+    const VkImageMemoryRequirementsInfo2*       pInfo,
+    VkMemoryRequirements2*                      pMemoryRequirements) {
+    FinishReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PreCallRecordGetBufferMemoryRequirements2(
+    VkDevice                                    device,
+    const VkBufferMemoryRequirementsInfo2*      pInfo,
+    VkMemoryRequirements2*                      pMemoryRequirements) {
+    StartReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PostCallRecordGetBufferMemoryRequirements2(
+    VkDevice                                    device,
+    const VkBufferMemoryRequirementsInfo2*      pInfo,
+    VkMemoryRequirements2*                      pMemoryRequirements) {
+    FinishReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PreCallRecordGetImageSparseMemoryRequirements2(
+    VkDevice                                    device,
+    const VkImageSparseMemoryRequirementsInfo2* pInfo,
+    uint32_t*                                   pSparseMemoryRequirementCount,
+    VkSparseImageMemoryRequirements2*           pSparseMemoryRequirements) {
+    StartReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PostCallRecordGetImageSparseMemoryRequirements2(
+    VkDevice                                    device,
+    const VkImageSparseMemoryRequirementsInfo2* pInfo,
+    uint32_t*                                   pSparseMemoryRequirementCount,
+    VkSparseImageMemoryRequirements2*           pSparseMemoryRequirements) {
+    FinishReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PreCallRecordTrimCommandPool(
+    VkDevice                                    device,
+    VkCommandPool                               commandPool,
+    VkCommandPoolTrimFlags                      flags) {
+    StartReadObjectParentInstance(device);
+    StartWriteObject(commandPool);
+    // Host access to commandPool must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordTrimCommandPool(
+    VkDevice                                    device,
+    VkCommandPool                               commandPool,
+    VkCommandPoolTrimFlags                      flags) {
+    FinishReadObjectParentInstance(device);
+    FinishWriteObject(commandPool);
+    // Host access to commandPool must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordCreateSamplerYcbcrConversion(
+    VkDevice                                    device,
+    const VkSamplerYcbcrConversionCreateInfo*   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSamplerYcbcrConversion*                   pYcbcrConversion) {
+    StartReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PostCallRecordCreateSamplerYcbcrConversion(
+    VkDevice                                    device,
+    const VkSamplerYcbcrConversionCreateInfo*   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSamplerYcbcrConversion*                   pYcbcrConversion,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(device);
+    if (result == VK_SUCCESS) {
+        CreateObject(*pYcbcrConversion);
+    }
+}
+
+void ThreadSafety::PreCallRecordDestroySamplerYcbcrConversion(
+    VkDevice                                    device,
+    VkSamplerYcbcrConversion                    ycbcrConversion,
+    const VkAllocationCallbacks*                pAllocator) {
+    StartReadObjectParentInstance(device);
+    StartWriteObject(ycbcrConversion);
+    // Host access to ycbcrConversion must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordDestroySamplerYcbcrConversion(
+    VkDevice                                    device,
+    VkSamplerYcbcrConversion                    ycbcrConversion,
+    const VkAllocationCallbacks*                pAllocator) {
+    FinishReadObjectParentInstance(device);
+    FinishWriteObject(ycbcrConversion);
+    DestroyObject(ycbcrConversion);
+    // Host access to ycbcrConversion must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordCreateDescriptorUpdateTemplate(
+    VkDevice                                    device,
+    const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDescriptorUpdateTemplate*                 pDescriptorUpdateTemplate) {
+    StartReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PostCallRecordCreateDescriptorUpdateTemplate(
+    VkDevice                                    device,
+    const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDescriptorUpdateTemplate*                 pDescriptorUpdateTemplate,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(device);
+    if (result == VK_SUCCESS) {
+        CreateObject(*pDescriptorUpdateTemplate);
+    }
+}
+
+void ThreadSafety::PreCallRecordDestroyDescriptorUpdateTemplate(
+    VkDevice                                    device,
+    VkDescriptorUpdateTemplate                  descriptorUpdateTemplate,
+    const VkAllocationCallbacks*                pAllocator) {
+    StartReadObjectParentInstance(device);
+    StartWriteObject(descriptorUpdateTemplate);
+    // Host access to descriptorUpdateTemplate must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordDestroyDescriptorUpdateTemplate(
+    VkDevice                                    device,
+    VkDescriptorUpdateTemplate                  descriptorUpdateTemplate,
+    const VkAllocationCallbacks*                pAllocator) {
+    FinishReadObjectParentInstance(device);
+    FinishWriteObject(descriptorUpdateTemplate);
+    DestroyObject(descriptorUpdateTemplate);
+    // Host access to descriptorUpdateTemplate must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordGetDescriptorSetLayoutSupport(
+    VkDevice                                    device,
+    const VkDescriptorSetLayoutCreateInfo*      pCreateInfo,
+    VkDescriptorSetLayoutSupport*               pSupport) {
+    StartReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PostCallRecordGetDescriptorSetLayoutSupport(
+    VkDevice                                    device,
+    const VkDescriptorSetLayoutCreateInfo*      pCreateInfo,
+    VkDescriptorSetLayoutSupport*               pSupport) {
+    FinishReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PreCallRecordDestroySurfaceKHR(
+    VkInstance                                  instance,
+    VkSurfaceKHR                                surface,
+    const VkAllocationCallbacks*                pAllocator) {
+    StartReadObjectParentInstance(instance);
+    StartWriteObjectParentInstance(surface);
+    // Host access to surface must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordDestroySurfaceKHR(
+    VkInstance                                  instance,
+    VkSurfaceKHR                                surface,
+    const VkAllocationCallbacks*                pAllocator) {
+    FinishReadObjectParentInstance(instance);
+    FinishWriteObjectParentInstance(surface);
+    DestroyObjectParentInstance(surface);
+    // Host access to surface must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordGetPhysicalDeviceSurfaceSupportKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t                                    queueFamilyIndex,
+    VkSurfaceKHR                                surface,
+    VkBool32*                                   pSupported) {
+    StartReadObjectParentInstance(surface);
+}
+
+void ThreadSafety::PostCallRecordGetPhysicalDeviceSurfaceSupportKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t                                    queueFamilyIndex,
+    VkSurfaceKHR                                surface,
+    VkBool32*                                   pSupported,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(surface);
+}
+
+void ThreadSafety::PreCallRecordGetPhysicalDeviceSurfaceCapabilitiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkSurfaceKHR                                surface,
+    VkSurfaceCapabilitiesKHR*                   pSurfaceCapabilities) {
+    StartReadObjectParentInstance(surface);
+}
+
+void ThreadSafety::PostCallRecordGetPhysicalDeviceSurfaceCapabilitiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkSurfaceKHR                                surface,
+    VkSurfaceCapabilitiesKHR*                   pSurfaceCapabilities,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(surface);
+}
+
+void ThreadSafety::PreCallRecordGetPhysicalDeviceSurfaceFormatsKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkSurfaceKHR                                surface,
+    uint32_t*                                   pSurfaceFormatCount,
+    VkSurfaceFormatKHR*                         pSurfaceFormats) {
+    StartReadObjectParentInstance(surface);
+}
+
+void ThreadSafety::PostCallRecordGetPhysicalDeviceSurfaceFormatsKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkSurfaceKHR                                surface,
+    uint32_t*                                   pSurfaceFormatCount,
+    VkSurfaceFormatKHR*                         pSurfaceFormats,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(surface);
+}
+
+void ThreadSafety::PreCallRecordGetPhysicalDeviceSurfacePresentModesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkSurfaceKHR                                surface,
+    uint32_t*                                   pPresentModeCount,
+    VkPresentModeKHR*                           pPresentModes) {
+    StartReadObjectParentInstance(surface);
+}
+
+void ThreadSafety::PostCallRecordGetPhysicalDeviceSurfacePresentModesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkSurfaceKHR                                surface,
+    uint32_t*                                   pPresentModeCount,
+    VkPresentModeKHR*                           pPresentModes,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(surface);
+}
+
+void ThreadSafety::PreCallRecordCreateSwapchainKHR(
+    VkDevice                                    device,
+    const VkSwapchainCreateInfoKHR*             pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSwapchainKHR*                             pSwapchain) {
+    StartReadObjectParentInstance(device);
+        StartWriteObjectParentInstance(pCreateInfo->surface);
+        StartWriteObject(pCreateInfo->oldSwapchain);
+    // Host access to pCreateInfo.surface,pCreateInfo.oldSwapchain must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordCreateSwapchainKHR(
+    VkDevice                                    device,
+    const VkSwapchainCreateInfoKHR*             pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSwapchainKHR*                             pSwapchain,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(device);
+        FinishWriteObjectParentInstance(pCreateInfo->surface);
+        FinishWriteObject(pCreateInfo->oldSwapchain);
+    if (result == VK_SUCCESS) {
+        CreateObject(*pSwapchain);
+    }
+    // Host access to pCreateInfo.surface,pCreateInfo.oldSwapchain must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordAcquireNextImageKHR(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    uint64_t                                    timeout,
+    VkSemaphore                                 semaphore,
+    VkFence                                     fence,
+    uint32_t*                                   pImageIndex) {
+    StartReadObjectParentInstance(device);
+    StartWriteObject(swapchain);
+    StartWriteObject(semaphore);
+    StartWriteObject(fence);
+    // Host access to swapchain must be externally synchronized
+    // Host access to semaphore must be externally synchronized
+    // Host access to fence must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordAcquireNextImageKHR(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    uint64_t                                    timeout,
+    VkSemaphore                                 semaphore,
+    VkFence                                     fence,
+    uint32_t*                                   pImageIndex,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(device);
+    FinishWriteObject(swapchain);
+    FinishWriteObject(semaphore);
+    FinishWriteObject(fence);
+    // Host access to swapchain must be externally synchronized
+    // Host access to semaphore must be externally synchronized
+    // Host access to fence must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordGetDeviceGroupPresentCapabilitiesKHR(
+    VkDevice                                    device,
+    VkDeviceGroupPresentCapabilitiesKHR*        pDeviceGroupPresentCapabilities) {
+    StartReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PostCallRecordGetDeviceGroupPresentCapabilitiesKHR(
+    VkDevice                                    device,
+    VkDeviceGroupPresentCapabilitiesKHR*        pDeviceGroupPresentCapabilities,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PreCallRecordGetDeviceGroupSurfacePresentModesKHR(
+    VkDevice                                    device,
+    VkSurfaceKHR                                surface,
+    VkDeviceGroupPresentModeFlagsKHR*           pModes) {
+    StartReadObjectParentInstance(device);
+    StartWriteObjectParentInstance(surface);
+    // Host access to surface must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordGetDeviceGroupSurfacePresentModesKHR(
+    VkDevice                                    device,
+    VkSurfaceKHR                                surface,
+    VkDeviceGroupPresentModeFlagsKHR*           pModes,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(device);
+    FinishWriteObjectParentInstance(surface);
+    // Host access to surface must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordGetPhysicalDevicePresentRectanglesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkSurfaceKHR                                surface,
+    uint32_t*                                   pRectCount,
+    VkRect2D*                                   pRects) {
+    StartWriteObjectParentInstance(surface);
+    // Host access to surface must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordGetPhysicalDevicePresentRectanglesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkSurfaceKHR                                surface,
+    uint32_t*                                   pRectCount,
+    VkRect2D*                                   pRects,
+    VkResult                                    result) {
+    FinishWriteObjectParentInstance(surface);
+    // Host access to surface must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordAcquireNextImage2KHR(
+    VkDevice                                    device,
+    const VkAcquireNextImageInfoKHR*            pAcquireInfo,
+    uint32_t*                                   pImageIndex) {
+    StartReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PostCallRecordAcquireNextImage2KHR(
+    VkDevice                                    device,
+    const VkAcquireNextImageInfoKHR*            pAcquireInfo,
+    uint32_t*                                   pImageIndex,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PreCallRecordGetDisplayPlaneSupportedDisplaysKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t                                    planeIndex,
+    uint32_t*                                   pDisplayCount,
+    VkDisplayKHR*                               pDisplays) {
+    if (pDisplays) {
+        for (uint32_t index = 0; index < *pDisplayCount; index++) {
+            StartReadObject(pDisplays[index]);
+        }
+    }
+}
+
+void ThreadSafety::PostCallRecordGetDisplayPlaneSupportedDisplaysKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t                                    planeIndex,
+    uint32_t*                                   pDisplayCount,
+    VkDisplayKHR*                               pDisplays,
+    VkResult                                    result) {
+    if (pDisplays) {
+        for (uint32_t index = 0; index < *pDisplayCount; index++) {
+            FinishReadObject(pDisplays[index]);
+        }
+    }
+}
+
+void ThreadSafety::PreCallRecordGetDisplayModePropertiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkDisplayKHR                                display,
+    uint32_t*                                   pPropertyCount,
+    VkDisplayModePropertiesKHR*                 pProperties) {
+    StartReadObject(display);
+}
+
+void ThreadSafety::PostCallRecordGetDisplayModePropertiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkDisplayKHR                                display,
+    uint32_t*                                   pPropertyCount,
+    VkDisplayModePropertiesKHR*                 pProperties,
+    VkResult                                    result) {
+    FinishReadObject(display);
+}
+
+void ThreadSafety::PreCallRecordCreateDisplayModeKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkDisplayKHR                                display,
+    const VkDisplayModeCreateInfoKHR*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDisplayModeKHR*                           pMode) {
+    StartWriteObject(display);
+    // Host access to display must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordCreateDisplayModeKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkDisplayKHR                                display,
+    const VkDisplayModeCreateInfoKHR*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDisplayModeKHR*                           pMode,
+    VkResult                                    result) {
+    FinishWriteObject(display);
+    if (result == VK_SUCCESS) {
+        CreateObject(*pMode);
+    }
+    // Host access to display must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordGetDisplayPlaneCapabilitiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkDisplayModeKHR                            mode,
+    uint32_t                                    planeIndex,
+    VkDisplayPlaneCapabilitiesKHR*              pCapabilities) {
+    StartWriteObject(mode);
+    // Host access to mode must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordGetDisplayPlaneCapabilitiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkDisplayModeKHR                            mode,
+    uint32_t                                    planeIndex,
+    VkDisplayPlaneCapabilitiesKHR*              pCapabilities,
+    VkResult                                    result) {
+    FinishWriteObject(mode);
+    // Host access to mode must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordCreateDisplayPlaneSurfaceKHR(
+    VkInstance                                  instance,
+    const VkDisplaySurfaceCreateInfoKHR*        pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface) {
+    StartReadObjectParentInstance(instance);
+}
+
+void ThreadSafety::PostCallRecordCreateDisplayPlaneSurfaceKHR(
+    VkInstance                                  instance,
+    const VkDisplaySurfaceCreateInfoKHR*        pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(instance);
+    if (result == VK_SUCCESS) {
+        CreateObjectParentInstance(*pSurface);
+    }
+}
+
+void ThreadSafety::PreCallRecordCreateSharedSwapchainsKHR(
+    VkDevice                                    device,
+    uint32_t                                    swapchainCount,
+    const VkSwapchainCreateInfoKHR*             pCreateInfos,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSwapchainKHR*                             pSwapchains) {
+    StartReadObjectParentInstance(device);
+    if (pCreateInfos) {
+        for (uint32_t index=0; index < swapchainCount; index++) {
+            StartWriteObjectParentInstance(pCreateInfos[index].surface);
+            StartWriteObject(pCreateInfos[index].oldSwapchain);
+        }
+    }
+    if (pSwapchains) {
+        for (uint32_t index = 0; index < swapchainCount; index++) {
+            StartReadObject(pSwapchains[index]);
+        }
+    }
+    // Host access to pCreateInfos[].surface,pCreateInfos[].oldSwapchain must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordCreateSharedSwapchainsKHR(
+    VkDevice                                    device,
+    uint32_t                                    swapchainCount,
+    const VkSwapchainCreateInfoKHR*             pCreateInfos,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSwapchainKHR*                             pSwapchains,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(device);
+    if (pCreateInfos) {
+        for (uint32_t index=0; index < swapchainCount; index++) {
+            FinishWriteObjectParentInstance(pCreateInfos[index].surface);
+            FinishWriteObject(pCreateInfos[index].oldSwapchain);
+        }
+    }
+    if (result == VK_SUCCESS) {
+        if (pSwapchains) {
+            for (uint32_t index = 0; index < swapchainCount; index++) {
+                CreateObject(pSwapchains[index]);
+            }
+        }
+    }
+    // Host access to pCreateInfos[].surface,pCreateInfos[].oldSwapchain must be externally synchronized
+}
+
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+
+void ThreadSafety::PreCallRecordCreateXlibSurfaceKHR(
+    VkInstance                                  instance,
+    const VkXlibSurfaceCreateInfoKHR*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface) {
+    StartReadObjectParentInstance(instance);
+}
+
+void ThreadSafety::PostCallRecordCreateXlibSurfaceKHR(
+    VkInstance                                  instance,
+    const VkXlibSurfaceCreateInfoKHR*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(instance);
+    if (result == VK_SUCCESS) {
+        CreateObjectParentInstance(*pSurface);
+    }
+}
+#endif // VK_USE_PLATFORM_XLIB_KHR
+
+#ifdef VK_USE_PLATFORM_XCB_KHR
+
+void ThreadSafety::PreCallRecordCreateXcbSurfaceKHR(
+    VkInstance                                  instance,
+    const VkXcbSurfaceCreateInfoKHR*            pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface) {
+    StartReadObjectParentInstance(instance);
+}
+
+void ThreadSafety::PostCallRecordCreateXcbSurfaceKHR(
+    VkInstance                                  instance,
+    const VkXcbSurfaceCreateInfoKHR*            pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(instance);
+    if (result == VK_SUCCESS) {
+        CreateObjectParentInstance(*pSurface);
+    }
+}
+#endif // VK_USE_PLATFORM_XCB_KHR
+
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+
+void ThreadSafety::PreCallRecordCreateWaylandSurfaceKHR(
+    VkInstance                                  instance,
+    const VkWaylandSurfaceCreateInfoKHR*        pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface) {
+    StartReadObjectParentInstance(instance);
+}
+
+void ThreadSafety::PostCallRecordCreateWaylandSurfaceKHR(
+    VkInstance                                  instance,
+    const VkWaylandSurfaceCreateInfoKHR*        pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(instance);
+    if (result == VK_SUCCESS) {
+        CreateObjectParentInstance(*pSurface);
+    }
+}
+#endif // VK_USE_PLATFORM_WAYLAND_KHR
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+
+void ThreadSafety::PreCallRecordCreateAndroidSurfaceKHR(
+    VkInstance                                  instance,
+    const VkAndroidSurfaceCreateInfoKHR*        pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface) {
+    StartReadObjectParentInstance(instance);
+}
+
+void ThreadSafety::PostCallRecordCreateAndroidSurfaceKHR(
+    VkInstance                                  instance,
+    const VkAndroidSurfaceCreateInfoKHR*        pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(instance);
+    if (result == VK_SUCCESS) {
+        CreateObjectParentInstance(*pSurface);
+    }
+}
+#endif // VK_USE_PLATFORM_ANDROID_KHR
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+void ThreadSafety::PreCallRecordCreateWin32SurfaceKHR(
+    VkInstance                                  instance,
+    const VkWin32SurfaceCreateInfoKHR*          pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface) {
+    StartReadObjectParentInstance(instance);
+}
+
+void ThreadSafety::PostCallRecordCreateWin32SurfaceKHR(
+    VkInstance                                  instance,
+    const VkWin32SurfaceCreateInfoKHR*          pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(instance);
+    if (result == VK_SUCCESS) {
+        CreateObjectParentInstance(*pSurface);
+    }
+}
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+void ThreadSafety::PreCallRecordGetDeviceGroupPeerMemoryFeaturesKHR(
+    VkDevice                                    device,
+    uint32_t                                    heapIndex,
+    uint32_t                                    localDeviceIndex,
+    uint32_t                                    remoteDeviceIndex,
+    VkPeerMemoryFeatureFlags*                   pPeerMemoryFeatures) {
+    StartReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PostCallRecordGetDeviceGroupPeerMemoryFeaturesKHR(
+    VkDevice                                    device,
+    uint32_t                                    heapIndex,
+    uint32_t                                    localDeviceIndex,
+    uint32_t                                    remoteDeviceIndex,
+    VkPeerMemoryFeatureFlags*                   pPeerMemoryFeatures) {
+    FinishReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PreCallRecordCmdSetDeviceMaskKHR(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    deviceMask) {
+    StartWriteObject(commandBuffer);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordCmdSetDeviceMaskKHR(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    deviceMask) {
+    FinishWriteObject(commandBuffer);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordCmdDispatchBaseKHR(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    baseGroupX,
+    uint32_t                                    baseGroupY,
+    uint32_t                                    baseGroupZ,
+    uint32_t                                    groupCountX,
+    uint32_t                                    groupCountY,
+    uint32_t                                    groupCountZ) {
+    StartWriteObject(commandBuffer);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordCmdDispatchBaseKHR(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    baseGroupX,
+    uint32_t                                    baseGroupY,
+    uint32_t                                    baseGroupZ,
+    uint32_t                                    groupCountX,
+    uint32_t                                    groupCountY,
+    uint32_t                                    groupCountZ) {
+    FinishWriteObject(commandBuffer);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordTrimCommandPoolKHR(
+    VkDevice                                    device,
+    VkCommandPool                               commandPool,
+    VkCommandPoolTrimFlags                      flags) {
+    StartReadObjectParentInstance(device);
+    StartWriteObject(commandPool);
+    // Host access to commandPool must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordTrimCommandPoolKHR(
+    VkDevice                                    device,
+    VkCommandPool                               commandPool,
+    VkCommandPoolTrimFlags                      flags) {
+    FinishReadObjectParentInstance(device);
+    FinishWriteObject(commandPool);
+    // Host access to commandPool must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordEnumeratePhysicalDeviceGroupsKHR(
+    VkInstance                                  instance,
+    uint32_t*                                   pPhysicalDeviceGroupCount,
+    VkPhysicalDeviceGroupProperties*            pPhysicalDeviceGroupProperties) {
+    StartReadObjectParentInstance(instance);
+}
+
+void ThreadSafety::PostCallRecordEnumeratePhysicalDeviceGroupsKHR(
+    VkInstance                                  instance,
+    uint32_t*                                   pPhysicalDeviceGroupCount,
+    VkPhysicalDeviceGroupProperties*            pPhysicalDeviceGroupProperties,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(instance);
+}
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+void ThreadSafety::PreCallRecordGetMemoryWin32HandleKHR(
+    VkDevice                                    device,
+    const VkMemoryGetWin32HandleInfoKHR*        pGetWin32HandleInfo,
+    HANDLE*                                     pHandle) {
+    StartReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PostCallRecordGetMemoryWin32HandleKHR(
+    VkDevice                                    device,
+    const VkMemoryGetWin32HandleInfoKHR*        pGetWin32HandleInfo,
+    HANDLE*                                     pHandle,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PreCallRecordGetMemoryWin32HandlePropertiesKHR(
+    VkDevice                                    device,
+    VkExternalMemoryHandleTypeFlagBits          handleType,
+    HANDLE                                      handle,
+    VkMemoryWin32HandlePropertiesKHR*           pMemoryWin32HandleProperties) {
+    StartReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PostCallRecordGetMemoryWin32HandlePropertiesKHR(
+    VkDevice                                    device,
+    VkExternalMemoryHandleTypeFlagBits          handleType,
+    HANDLE                                      handle,
+    VkMemoryWin32HandlePropertiesKHR*           pMemoryWin32HandleProperties,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(device);
+}
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+void ThreadSafety::PreCallRecordGetMemoryFdKHR(
+    VkDevice                                    device,
+    const VkMemoryGetFdInfoKHR*                 pGetFdInfo,
+    int*                                        pFd) {
+    StartReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PostCallRecordGetMemoryFdKHR(
+    VkDevice                                    device,
+    const VkMemoryGetFdInfoKHR*                 pGetFdInfo,
+    int*                                        pFd,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PreCallRecordGetMemoryFdPropertiesKHR(
+    VkDevice                                    device,
+    VkExternalMemoryHandleTypeFlagBits          handleType,
+    int                                         fd,
+    VkMemoryFdPropertiesKHR*                    pMemoryFdProperties) {
+    StartReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PostCallRecordGetMemoryFdPropertiesKHR(
+    VkDevice                                    device,
+    VkExternalMemoryHandleTypeFlagBits          handleType,
+    int                                         fd,
+    VkMemoryFdPropertiesKHR*                    pMemoryFdProperties,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(device);
+}
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+void ThreadSafety::PreCallRecordImportSemaphoreWin32HandleKHR(
+    VkDevice                                    device,
+    const VkImportSemaphoreWin32HandleInfoKHR*  pImportSemaphoreWin32HandleInfo) {
+    StartReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PostCallRecordImportSemaphoreWin32HandleKHR(
+    VkDevice                                    device,
+    const VkImportSemaphoreWin32HandleInfoKHR*  pImportSemaphoreWin32HandleInfo,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PreCallRecordGetSemaphoreWin32HandleKHR(
+    VkDevice                                    device,
+    const VkSemaphoreGetWin32HandleInfoKHR*     pGetWin32HandleInfo,
+    HANDLE*                                     pHandle) {
+    StartReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PostCallRecordGetSemaphoreWin32HandleKHR(
+    VkDevice                                    device,
+    const VkSemaphoreGetWin32HandleInfoKHR*     pGetWin32HandleInfo,
+    HANDLE*                                     pHandle,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(device);
+}
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+void ThreadSafety::PreCallRecordImportSemaphoreFdKHR(
+    VkDevice                                    device,
+    const VkImportSemaphoreFdInfoKHR*           pImportSemaphoreFdInfo) {
+    StartReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PostCallRecordImportSemaphoreFdKHR(
+    VkDevice                                    device,
+    const VkImportSemaphoreFdInfoKHR*           pImportSemaphoreFdInfo,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PreCallRecordGetSemaphoreFdKHR(
+    VkDevice                                    device,
+    const VkSemaphoreGetFdInfoKHR*              pGetFdInfo,
+    int*                                        pFd) {
+    StartReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PostCallRecordGetSemaphoreFdKHR(
+    VkDevice                                    device,
+    const VkSemaphoreGetFdInfoKHR*              pGetFdInfo,
+    int*                                        pFd,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PreCallRecordCmdPushDescriptorSetKHR(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineBindPoint                         pipelineBindPoint,
+    VkPipelineLayout                            layout,
+    uint32_t                                    set,
+    uint32_t                                    descriptorWriteCount,
+    const VkWriteDescriptorSet*                 pDescriptorWrites) {
+    StartWriteObject(commandBuffer);
+    StartReadObject(layout);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordCmdPushDescriptorSetKHR(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineBindPoint                         pipelineBindPoint,
+    VkPipelineLayout                            layout,
+    uint32_t                                    set,
+    uint32_t                                    descriptorWriteCount,
+    const VkWriteDescriptorSet*                 pDescriptorWrites) {
+    FinishWriteObject(commandBuffer);
+    FinishReadObject(layout);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordCmdPushDescriptorSetWithTemplateKHR(
+    VkCommandBuffer                             commandBuffer,
+    VkDescriptorUpdateTemplate                  descriptorUpdateTemplate,
+    VkPipelineLayout                            layout,
+    uint32_t                                    set,
+    const void*                                 pData) {
+    StartWriteObject(commandBuffer);
+    StartReadObject(descriptorUpdateTemplate);
+    StartReadObject(layout);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordCmdPushDescriptorSetWithTemplateKHR(
+    VkCommandBuffer                             commandBuffer,
+    VkDescriptorUpdateTemplate                  descriptorUpdateTemplate,
+    VkPipelineLayout                            layout,
+    uint32_t                                    set,
+    const void*                                 pData) {
+    FinishWriteObject(commandBuffer);
+    FinishReadObject(descriptorUpdateTemplate);
+    FinishReadObject(layout);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordCreateDescriptorUpdateTemplateKHR(
+    VkDevice                                    device,
+    const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDescriptorUpdateTemplate*                 pDescriptorUpdateTemplate) {
+    StartReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PostCallRecordCreateDescriptorUpdateTemplateKHR(
+    VkDevice                                    device,
+    const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDescriptorUpdateTemplate*                 pDescriptorUpdateTemplate,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(device);
+    if (result == VK_SUCCESS) {
+        CreateObject(*pDescriptorUpdateTemplate);
+    }
+}
+
+void ThreadSafety::PreCallRecordDestroyDescriptorUpdateTemplateKHR(
+    VkDevice                                    device,
+    VkDescriptorUpdateTemplate                  descriptorUpdateTemplate,
+    const VkAllocationCallbacks*                pAllocator) {
+    StartReadObjectParentInstance(device);
+    StartWriteObject(descriptorUpdateTemplate);
+    // Host access to descriptorUpdateTemplate must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordDestroyDescriptorUpdateTemplateKHR(
+    VkDevice                                    device,
+    VkDescriptorUpdateTemplate                  descriptorUpdateTemplate,
+    const VkAllocationCallbacks*                pAllocator) {
+    FinishReadObjectParentInstance(device);
+    FinishWriteObject(descriptorUpdateTemplate);
+    DestroyObject(descriptorUpdateTemplate);
+    // Host access to descriptorUpdateTemplate must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordCreateRenderPass2KHR(
+    VkDevice                                    device,
+    const VkRenderPassCreateInfo2KHR*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkRenderPass*                               pRenderPass) {
+    StartReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PostCallRecordCreateRenderPass2KHR(
+    VkDevice                                    device,
+    const VkRenderPassCreateInfo2KHR*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkRenderPass*                               pRenderPass,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(device);
+    if (result == VK_SUCCESS) {
+        CreateObject(*pRenderPass);
+    }
+}
+
+void ThreadSafety::PreCallRecordCmdBeginRenderPass2KHR(
+    VkCommandBuffer                             commandBuffer,
+    const VkRenderPassBeginInfo*                pRenderPassBegin,
+    const VkSubpassBeginInfoKHR*                pSubpassBeginInfo) {
+    StartWriteObject(commandBuffer);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordCmdBeginRenderPass2KHR(
+    VkCommandBuffer                             commandBuffer,
+    const VkRenderPassBeginInfo*                pRenderPassBegin,
+    const VkSubpassBeginInfoKHR*                pSubpassBeginInfo) {
+    FinishWriteObject(commandBuffer);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordCmdNextSubpass2KHR(
+    VkCommandBuffer                             commandBuffer,
+    const VkSubpassBeginInfoKHR*                pSubpassBeginInfo,
+    const VkSubpassEndInfoKHR*                  pSubpassEndInfo) {
+    StartWriteObject(commandBuffer);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordCmdNextSubpass2KHR(
+    VkCommandBuffer                             commandBuffer,
+    const VkSubpassBeginInfoKHR*                pSubpassBeginInfo,
+    const VkSubpassEndInfoKHR*                  pSubpassEndInfo) {
+    FinishWriteObject(commandBuffer);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordCmdEndRenderPass2KHR(
+    VkCommandBuffer                             commandBuffer,
+    const VkSubpassEndInfoKHR*                  pSubpassEndInfo) {
+    StartWriteObject(commandBuffer);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordCmdEndRenderPass2KHR(
+    VkCommandBuffer                             commandBuffer,
+    const VkSubpassEndInfoKHR*                  pSubpassEndInfo) {
+    FinishWriteObject(commandBuffer);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordGetSwapchainStatusKHR(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain) {
+    StartReadObjectParentInstance(device);
+    StartWriteObject(swapchain);
+    // Host access to swapchain must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordGetSwapchainStatusKHR(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(device);
+    FinishWriteObject(swapchain);
+    // Host access to swapchain must be externally synchronized
+}
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+void ThreadSafety::PreCallRecordImportFenceWin32HandleKHR(
+    VkDevice                                    device,
+    const VkImportFenceWin32HandleInfoKHR*      pImportFenceWin32HandleInfo) {
+    StartReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PostCallRecordImportFenceWin32HandleKHR(
+    VkDevice                                    device,
+    const VkImportFenceWin32HandleInfoKHR*      pImportFenceWin32HandleInfo,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PreCallRecordGetFenceWin32HandleKHR(
+    VkDevice                                    device,
+    const VkFenceGetWin32HandleInfoKHR*         pGetWin32HandleInfo,
+    HANDLE*                                     pHandle) {
+    StartReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PostCallRecordGetFenceWin32HandleKHR(
+    VkDevice                                    device,
+    const VkFenceGetWin32HandleInfoKHR*         pGetWin32HandleInfo,
+    HANDLE*                                     pHandle,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(device);
+}
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+void ThreadSafety::PreCallRecordImportFenceFdKHR(
+    VkDevice                                    device,
+    const VkImportFenceFdInfoKHR*               pImportFenceFdInfo) {
+    StartReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PostCallRecordImportFenceFdKHR(
+    VkDevice                                    device,
+    const VkImportFenceFdInfoKHR*               pImportFenceFdInfo,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PreCallRecordGetFenceFdKHR(
+    VkDevice                                    device,
+    const VkFenceGetFdInfoKHR*                  pGetFdInfo,
+    int*                                        pFd) {
+    StartReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PostCallRecordGetFenceFdKHR(
+    VkDevice                                    device,
+    const VkFenceGetFdInfoKHR*                  pGetFdInfo,
+    int*                                        pFd,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PreCallRecordAcquireProfilingLockKHR(
+    VkDevice                                    device,
+    const VkAcquireProfilingLockInfoKHR*        pInfo) {
+    StartReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PostCallRecordAcquireProfilingLockKHR(
+    VkDevice                                    device,
+    const VkAcquireProfilingLockInfoKHR*        pInfo,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PreCallRecordReleaseProfilingLockKHR(
+    VkDevice                                    device) {
+    StartReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PostCallRecordReleaseProfilingLockKHR(
+    VkDevice                                    device) {
+    FinishReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PreCallRecordGetDisplayModeProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkDisplayKHR                                display,
+    uint32_t*                                   pPropertyCount,
+    VkDisplayModeProperties2KHR*                pProperties) {
+    StartReadObject(display);
+}
+
+void ThreadSafety::PostCallRecordGetDisplayModeProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkDisplayKHR                                display,
+    uint32_t*                                   pPropertyCount,
+    VkDisplayModeProperties2KHR*                pProperties,
+    VkResult                                    result) {
+    FinishReadObject(display);
+}
+
+void ThreadSafety::PreCallRecordGetImageMemoryRequirements2KHR(
+    VkDevice                                    device,
+    const VkImageMemoryRequirementsInfo2*       pInfo,
+    VkMemoryRequirements2*                      pMemoryRequirements) {
+    StartReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PostCallRecordGetImageMemoryRequirements2KHR(
+    VkDevice                                    device,
+    const VkImageMemoryRequirementsInfo2*       pInfo,
+    VkMemoryRequirements2*                      pMemoryRequirements) {
+    FinishReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PreCallRecordGetBufferMemoryRequirements2KHR(
+    VkDevice                                    device,
+    const VkBufferMemoryRequirementsInfo2*      pInfo,
+    VkMemoryRequirements2*                      pMemoryRequirements) {
+    StartReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PostCallRecordGetBufferMemoryRequirements2KHR(
+    VkDevice                                    device,
+    const VkBufferMemoryRequirementsInfo2*      pInfo,
+    VkMemoryRequirements2*                      pMemoryRequirements) {
+    FinishReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PreCallRecordGetImageSparseMemoryRequirements2KHR(
+    VkDevice                                    device,
+    const VkImageSparseMemoryRequirementsInfo2* pInfo,
+    uint32_t*                                   pSparseMemoryRequirementCount,
+    VkSparseImageMemoryRequirements2*           pSparseMemoryRequirements) {
+    StartReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PostCallRecordGetImageSparseMemoryRequirements2KHR(
+    VkDevice                                    device,
+    const VkImageSparseMemoryRequirementsInfo2* pInfo,
+    uint32_t*                                   pSparseMemoryRequirementCount,
+    VkSparseImageMemoryRequirements2*           pSparseMemoryRequirements) {
+    FinishReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PreCallRecordCreateSamplerYcbcrConversionKHR(
+    VkDevice                                    device,
+    const VkSamplerYcbcrConversionCreateInfo*   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSamplerYcbcrConversion*                   pYcbcrConversion) {
+    StartReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PostCallRecordCreateSamplerYcbcrConversionKHR(
+    VkDevice                                    device,
+    const VkSamplerYcbcrConversionCreateInfo*   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSamplerYcbcrConversion*                   pYcbcrConversion,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(device);
+    if (result == VK_SUCCESS) {
+        CreateObject(*pYcbcrConversion);
+    }
+}
+
+void ThreadSafety::PreCallRecordDestroySamplerYcbcrConversionKHR(
+    VkDevice                                    device,
+    VkSamplerYcbcrConversion                    ycbcrConversion,
+    const VkAllocationCallbacks*                pAllocator) {
+    StartReadObjectParentInstance(device);
+    StartWriteObject(ycbcrConversion);
+    // Host access to ycbcrConversion must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordDestroySamplerYcbcrConversionKHR(
+    VkDevice                                    device,
+    VkSamplerYcbcrConversion                    ycbcrConversion,
+    const VkAllocationCallbacks*                pAllocator) {
+    FinishReadObjectParentInstance(device);
+    FinishWriteObject(ycbcrConversion);
+    DestroyObject(ycbcrConversion);
+    // Host access to ycbcrConversion must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordBindBufferMemory2KHR(
+    VkDevice                                    device,
+    uint32_t                                    bindInfoCount,
+    const VkBindBufferMemoryInfo*               pBindInfos) {
+    StartReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PostCallRecordBindBufferMemory2KHR(
+    VkDevice                                    device,
+    uint32_t                                    bindInfoCount,
+    const VkBindBufferMemoryInfo*               pBindInfos,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PreCallRecordBindImageMemory2KHR(
+    VkDevice                                    device,
+    uint32_t                                    bindInfoCount,
+    const VkBindImageMemoryInfo*                pBindInfos) {
+    StartReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PostCallRecordBindImageMemory2KHR(
+    VkDevice                                    device,
+    uint32_t                                    bindInfoCount,
+    const VkBindImageMemoryInfo*                pBindInfos,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PreCallRecordGetDescriptorSetLayoutSupportKHR(
+    VkDevice                                    device,
+    const VkDescriptorSetLayoutCreateInfo*      pCreateInfo,
+    VkDescriptorSetLayoutSupport*               pSupport) {
+    StartReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PostCallRecordGetDescriptorSetLayoutSupportKHR(
+    VkDevice                                    device,
+    const VkDescriptorSetLayoutCreateInfo*      pCreateInfo,
+    VkDescriptorSetLayoutSupport*               pSupport) {
+    FinishReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PreCallRecordCmdDrawIndirectCountKHR(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkBuffer                                    countBuffer,
+    VkDeviceSize                                countBufferOffset,
+    uint32_t                                    maxDrawCount,
+    uint32_t                                    stride) {
+    StartWriteObject(commandBuffer);
+    StartReadObject(buffer);
+    StartReadObject(countBuffer);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordCmdDrawIndirectCountKHR(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkBuffer                                    countBuffer,
+    VkDeviceSize                                countBufferOffset,
+    uint32_t                                    maxDrawCount,
+    uint32_t                                    stride) {
+    FinishWriteObject(commandBuffer);
+    FinishReadObject(buffer);
+    FinishReadObject(countBuffer);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordCmdDrawIndexedIndirectCountKHR(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkBuffer                                    countBuffer,
+    VkDeviceSize                                countBufferOffset,
+    uint32_t                                    maxDrawCount,
+    uint32_t                                    stride) {
+    StartWriteObject(commandBuffer);
+    StartReadObject(buffer);
+    StartReadObject(countBuffer);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordCmdDrawIndexedIndirectCountKHR(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkBuffer                                    countBuffer,
+    VkDeviceSize                                countBufferOffset,
+    uint32_t                                    maxDrawCount,
+    uint32_t                                    stride) {
+    FinishWriteObject(commandBuffer);
+    FinishReadObject(buffer);
+    FinishReadObject(countBuffer);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordGetSemaphoreCounterValueKHR(
+    VkDevice                                    device,
+    VkSemaphore                                 semaphore,
+    uint64_t*                                   pValue) {
+    StartReadObjectParentInstance(device);
+    StartReadObject(semaphore);
+}
+
+void ThreadSafety::PostCallRecordGetSemaphoreCounterValueKHR(
+    VkDevice                                    device,
+    VkSemaphore                                 semaphore,
+    uint64_t*                                   pValue,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(device);
+    FinishReadObject(semaphore);
+}
+
+void ThreadSafety::PreCallRecordWaitSemaphoresKHR(
+    VkDevice                                    device,
+    const VkSemaphoreWaitInfoKHR*               pWaitInfo,
+    uint64_t                                    timeout) {
+    StartReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PostCallRecordWaitSemaphoresKHR(
+    VkDevice                                    device,
+    const VkSemaphoreWaitInfoKHR*               pWaitInfo,
+    uint64_t                                    timeout,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PreCallRecordSignalSemaphoreKHR(
+    VkDevice                                    device,
+    const VkSemaphoreSignalInfoKHR*             pSignalInfo) {
+    StartReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PostCallRecordSignalSemaphoreKHR(
+    VkDevice                                    device,
+    const VkSemaphoreSignalInfoKHR*             pSignalInfo,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PreCallRecordGetBufferDeviceAddressKHR(
+    VkDevice                                    device,
+    const VkBufferDeviceAddressInfoKHR*         pInfo) {
+    StartReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PostCallRecordGetBufferDeviceAddressKHR(
+    VkDevice                                    device,
+    const VkBufferDeviceAddressInfoKHR*         pInfo,
+    VkDeviceAddress                             result) {
+    FinishReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PreCallRecordGetBufferOpaqueCaptureAddressKHR(
+    VkDevice                                    device,
+    const VkBufferDeviceAddressInfoKHR*         pInfo) {
+    StartReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PostCallRecordGetBufferOpaqueCaptureAddressKHR(
+    VkDevice                                    device,
+    const VkBufferDeviceAddressInfoKHR*         pInfo) {
+    FinishReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PreCallRecordGetDeviceMemoryOpaqueCaptureAddressKHR(
+    VkDevice                                    device,
+    const VkDeviceMemoryOpaqueCaptureAddressInfoKHR* pInfo) {
+    StartReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PostCallRecordGetDeviceMemoryOpaqueCaptureAddressKHR(
+    VkDevice                                    device,
+    const VkDeviceMemoryOpaqueCaptureAddressInfoKHR* pInfo) {
+    FinishReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PreCallRecordGetPipelineExecutablePropertiesKHR(
+    VkDevice                                    device,
+    const VkPipelineInfoKHR*                    pPipelineInfo,
+    uint32_t*                                   pExecutableCount,
+    VkPipelineExecutablePropertiesKHR*          pProperties) {
+    StartReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PostCallRecordGetPipelineExecutablePropertiesKHR(
+    VkDevice                                    device,
+    const VkPipelineInfoKHR*                    pPipelineInfo,
+    uint32_t*                                   pExecutableCount,
+    VkPipelineExecutablePropertiesKHR*          pProperties,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PreCallRecordGetPipelineExecutableStatisticsKHR(
+    VkDevice                                    device,
+    const VkPipelineExecutableInfoKHR*          pExecutableInfo,
+    uint32_t*                                   pStatisticCount,
+    VkPipelineExecutableStatisticKHR*           pStatistics) {
+    StartReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PostCallRecordGetPipelineExecutableStatisticsKHR(
+    VkDevice                                    device,
+    const VkPipelineExecutableInfoKHR*          pExecutableInfo,
+    uint32_t*                                   pStatisticCount,
+    VkPipelineExecutableStatisticKHR*           pStatistics,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PreCallRecordGetPipelineExecutableInternalRepresentationsKHR(
+    VkDevice                                    device,
+    const VkPipelineExecutableInfoKHR*          pExecutableInfo,
+    uint32_t*                                   pInternalRepresentationCount,
+    VkPipelineExecutableInternalRepresentationKHR* pInternalRepresentations) {
+    StartReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PostCallRecordGetPipelineExecutableInternalRepresentationsKHR(
+    VkDevice                                    device,
+    const VkPipelineExecutableInfoKHR*          pExecutableInfo,
+    uint32_t*                                   pInternalRepresentationCount,
+    VkPipelineExecutableInternalRepresentationKHR* pInternalRepresentations,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PreCallRecordCreateDebugReportCallbackEXT(
+    VkInstance                                  instance,
+    const VkDebugReportCallbackCreateInfoEXT*   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDebugReportCallbackEXT*                   pCallback) {
+    StartReadObjectParentInstance(instance);
+}
+
+void ThreadSafety::PostCallRecordCreateDebugReportCallbackEXT(
+    VkInstance                                  instance,
+    const VkDebugReportCallbackCreateInfoEXT*   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDebugReportCallbackEXT*                   pCallback,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(instance);
+    if (result == VK_SUCCESS) {
+        CreateObjectParentInstance(*pCallback);
+    }
+}
+
+void ThreadSafety::PreCallRecordDestroyDebugReportCallbackEXT(
+    VkInstance                                  instance,
+    VkDebugReportCallbackEXT                    callback,
+    const VkAllocationCallbacks*                pAllocator) {
+    StartReadObjectParentInstance(instance);
+    StartWriteObjectParentInstance(callback);
+    // Host access to callback must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordDestroyDebugReportCallbackEXT(
+    VkInstance                                  instance,
+    VkDebugReportCallbackEXT                    callback,
+    const VkAllocationCallbacks*                pAllocator) {
+    FinishReadObjectParentInstance(instance);
+    FinishWriteObjectParentInstance(callback);
+    DestroyObjectParentInstance(callback);
+    // Host access to callback must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordDebugReportMessageEXT(
+    VkInstance                                  instance,
+    VkDebugReportFlagsEXT                       flags,
+    VkDebugReportObjectTypeEXT                  objectType,
+    uint64_t                                    object,
+    size_t                                      location,
+    int32_t                                     messageCode,
+    const char*                                 pLayerPrefix,
+    const char*                                 pMessage) {
+    StartReadObjectParentInstance(instance);
+}
+
+void ThreadSafety::PostCallRecordDebugReportMessageEXT(
+    VkInstance                                  instance,
+    VkDebugReportFlagsEXT                       flags,
+    VkDebugReportObjectTypeEXT                  objectType,
+    uint64_t                                    object,
+    size_t                                      location,
+    int32_t                                     messageCode,
+    const char*                                 pLayerPrefix,
+    const char*                                 pMessage) {
+    FinishReadObjectParentInstance(instance);
+}
+// TODO - not wrapping EXT function vkDebugMarkerSetObjectTagEXT
+// TODO - not wrapping EXT function vkDebugMarkerSetObjectNameEXT
+// TODO - not wrapping EXT function vkCmdDebugMarkerBeginEXT
+// TODO - not wrapping EXT function vkCmdDebugMarkerEndEXT
+// TODO - not wrapping EXT function vkCmdDebugMarkerInsertEXT
+
+void ThreadSafety::PreCallRecordCmdBindTransformFeedbackBuffersEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstBinding,
+    uint32_t                                    bindingCount,
+    const VkBuffer*                             pBuffers,
+    const VkDeviceSize*                         pOffsets,
+    const VkDeviceSize*                         pSizes) {
+    StartWriteObject(commandBuffer);
+    if (pBuffers) {
+        for (uint32_t index = 0; index < bindingCount; index++) {
+            StartReadObject(pBuffers[index]);
+        }
+    }
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordCmdBindTransformFeedbackBuffersEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstBinding,
+    uint32_t                                    bindingCount,
+    const VkBuffer*                             pBuffers,
+    const VkDeviceSize*                         pOffsets,
+    const VkDeviceSize*                         pSizes) {
+    FinishWriteObject(commandBuffer);
+    if (pBuffers) {
+        for (uint32_t index = 0; index < bindingCount; index++) {
+            FinishReadObject(pBuffers[index]);
+        }
+    }
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordCmdBeginTransformFeedbackEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstCounterBuffer,
+    uint32_t                                    counterBufferCount,
+    const VkBuffer*                             pCounterBuffers,
+    const VkDeviceSize*                         pCounterBufferOffsets) {
+    StartWriteObject(commandBuffer);
+    if (pCounterBuffers) {
+        for (uint32_t index = 0; index < counterBufferCount; index++) {
+            StartReadObject(pCounterBuffers[index]);
+        }
+    }
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordCmdBeginTransformFeedbackEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstCounterBuffer,
+    uint32_t                                    counterBufferCount,
+    const VkBuffer*                             pCounterBuffers,
+    const VkDeviceSize*                         pCounterBufferOffsets) {
+    FinishWriteObject(commandBuffer);
+    if (pCounterBuffers) {
+        for (uint32_t index = 0; index < counterBufferCount; index++) {
+            FinishReadObject(pCounterBuffers[index]);
+        }
+    }
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordCmdEndTransformFeedbackEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstCounterBuffer,
+    uint32_t                                    counterBufferCount,
+    const VkBuffer*                             pCounterBuffers,
+    const VkDeviceSize*                         pCounterBufferOffsets) {
+    StartWriteObject(commandBuffer);
+    if (pCounterBuffers) {
+        for (uint32_t index = 0; index < counterBufferCount; index++) {
+            StartReadObject(pCounterBuffers[index]);
+        }
+    }
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordCmdEndTransformFeedbackEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstCounterBuffer,
+    uint32_t                                    counterBufferCount,
+    const VkBuffer*                             pCounterBuffers,
+    const VkDeviceSize*                         pCounterBufferOffsets) {
+    FinishWriteObject(commandBuffer);
+    if (pCounterBuffers) {
+        for (uint32_t index = 0; index < counterBufferCount; index++) {
+            FinishReadObject(pCounterBuffers[index]);
+        }
+    }
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordCmdBeginQueryIndexedEXT(
+    VkCommandBuffer                             commandBuffer,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    query,
+    VkQueryControlFlags                         flags,
+    uint32_t                                    index) {
+    StartWriteObject(commandBuffer);
+    StartReadObject(queryPool);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordCmdBeginQueryIndexedEXT(
+    VkCommandBuffer                             commandBuffer,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    query,
+    VkQueryControlFlags                         flags,
+    uint32_t                                    index) {
+    FinishWriteObject(commandBuffer);
+    FinishReadObject(queryPool);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordCmdEndQueryIndexedEXT(
+    VkCommandBuffer                             commandBuffer,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    query,
+    uint32_t                                    index) {
+    StartWriteObject(commandBuffer);
+    StartReadObject(queryPool);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordCmdEndQueryIndexedEXT(
+    VkCommandBuffer                             commandBuffer,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    query,
+    uint32_t                                    index) {
+    FinishWriteObject(commandBuffer);
+    FinishReadObject(queryPool);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordCmdDrawIndirectByteCountEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    instanceCount,
+    uint32_t                                    firstInstance,
+    VkBuffer                                    counterBuffer,
+    VkDeviceSize                                counterBufferOffset,
+    uint32_t                                    counterOffset,
+    uint32_t                                    vertexStride) {
+    StartWriteObject(commandBuffer);
+    StartReadObject(counterBuffer);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordCmdDrawIndirectByteCountEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    instanceCount,
+    uint32_t                                    firstInstance,
+    VkBuffer                                    counterBuffer,
+    VkDeviceSize                                counterBufferOffset,
+    uint32_t                                    counterOffset,
+    uint32_t                                    vertexStride) {
+    FinishWriteObject(commandBuffer);
+    FinishReadObject(counterBuffer);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordGetImageViewHandleNVX(
+    VkDevice                                    device,
+    const VkImageViewHandleInfoNVX*             pInfo) {
+    StartReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PostCallRecordGetImageViewHandleNVX(
+    VkDevice                                    device,
+    const VkImageViewHandleInfoNVX*             pInfo) {
+    FinishReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PreCallRecordCmdDrawIndirectCountAMD(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkBuffer                                    countBuffer,
+    VkDeviceSize                                countBufferOffset,
+    uint32_t                                    maxDrawCount,
+    uint32_t                                    stride) {
+    StartWriteObject(commandBuffer);
+    StartReadObject(buffer);
+    StartReadObject(countBuffer);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordCmdDrawIndirectCountAMD(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkBuffer                                    countBuffer,
+    VkDeviceSize                                countBufferOffset,
+    uint32_t                                    maxDrawCount,
+    uint32_t                                    stride) {
+    FinishWriteObject(commandBuffer);
+    FinishReadObject(buffer);
+    FinishReadObject(countBuffer);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordCmdDrawIndexedIndirectCountAMD(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkBuffer                                    countBuffer,
+    VkDeviceSize                                countBufferOffset,
+    uint32_t                                    maxDrawCount,
+    uint32_t                                    stride) {
+    StartWriteObject(commandBuffer);
+    StartReadObject(buffer);
+    StartReadObject(countBuffer);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordCmdDrawIndexedIndirectCountAMD(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkBuffer                                    countBuffer,
+    VkDeviceSize                                countBufferOffset,
+    uint32_t                                    maxDrawCount,
+    uint32_t                                    stride) {
+    FinishWriteObject(commandBuffer);
+    FinishReadObject(buffer);
+    FinishReadObject(countBuffer);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordGetShaderInfoAMD(
+    VkDevice                                    device,
+    VkPipeline                                  pipeline,
+    VkShaderStageFlagBits                       shaderStage,
+    VkShaderInfoTypeAMD                         infoType,
+    size_t*                                     pInfoSize,
+    void*                                       pInfo) {
+    StartReadObjectParentInstance(device);
+    StartReadObject(pipeline);
+}
+
+void ThreadSafety::PostCallRecordGetShaderInfoAMD(
+    VkDevice                                    device,
+    VkPipeline                                  pipeline,
+    VkShaderStageFlagBits                       shaderStage,
+    VkShaderInfoTypeAMD                         infoType,
+    size_t*                                     pInfoSize,
+    void*                                       pInfo,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(device);
+    FinishReadObject(pipeline);
+}
+
+#ifdef VK_USE_PLATFORM_GGP
+
+void ThreadSafety::PreCallRecordCreateStreamDescriptorSurfaceGGP(
+    VkInstance                                  instance,
+    const VkStreamDescriptorSurfaceCreateInfoGGP* pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface) {
+    StartReadObjectParentInstance(instance);
+}
+
+void ThreadSafety::PostCallRecordCreateStreamDescriptorSurfaceGGP(
+    VkInstance                                  instance,
+    const VkStreamDescriptorSurfaceCreateInfoGGP* pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(instance);
+    if (result == VK_SUCCESS) {
+        CreateObjectParentInstance(*pSurface);
+    }
+}
+#endif // VK_USE_PLATFORM_GGP
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+void ThreadSafety::PreCallRecordGetMemoryWin32HandleNV(
+    VkDevice                                    device,
+    VkDeviceMemory                              memory,
+    VkExternalMemoryHandleTypeFlagsNV           handleType,
+    HANDLE*                                     pHandle) {
+    StartReadObjectParentInstance(device);
+    StartReadObject(memory);
+}
+
+void ThreadSafety::PostCallRecordGetMemoryWin32HandleNV(
+    VkDevice                                    device,
+    VkDeviceMemory                              memory,
+    VkExternalMemoryHandleTypeFlagsNV           handleType,
+    HANDLE*                                     pHandle,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(device);
+    FinishReadObject(memory);
+}
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+#ifdef VK_USE_PLATFORM_VI_NN
+
+void ThreadSafety::PreCallRecordCreateViSurfaceNN(
+    VkInstance                                  instance,
+    const VkViSurfaceCreateInfoNN*              pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface) {
+    StartReadObjectParentInstance(instance);
+}
+
+void ThreadSafety::PostCallRecordCreateViSurfaceNN(
+    VkInstance                                  instance,
+    const VkViSurfaceCreateInfoNN*              pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(instance);
+    if (result == VK_SUCCESS) {
+        CreateObjectParentInstance(*pSurface);
+    }
+}
+#endif // VK_USE_PLATFORM_VI_NN
+
+void ThreadSafety::PreCallRecordCmdBeginConditionalRenderingEXT(
+    VkCommandBuffer                             commandBuffer,
+    const VkConditionalRenderingBeginInfoEXT*   pConditionalRenderingBegin) {
+    StartWriteObject(commandBuffer);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordCmdBeginConditionalRenderingEXT(
+    VkCommandBuffer                             commandBuffer,
+    const VkConditionalRenderingBeginInfoEXT*   pConditionalRenderingBegin) {
+    FinishWriteObject(commandBuffer);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordCmdEndConditionalRenderingEXT(
+    VkCommandBuffer                             commandBuffer) {
+    StartWriteObject(commandBuffer);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordCmdEndConditionalRenderingEXT(
+    VkCommandBuffer                             commandBuffer) {
+    FinishWriteObject(commandBuffer);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordCmdProcessCommandsNVX(
+    VkCommandBuffer                             commandBuffer,
+    const VkCmdProcessCommandsInfoNVX*          pProcessCommandsInfo) {
+    StartWriteObject(commandBuffer);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordCmdProcessCommandsNVX(
+    VkCommandBuffer                             commandBuffer,
+    const VkCmdProcessCommandsInfoNVX*          pProcessCommandsInfo) {
+    FinishWriteObject(commandBuffer);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordCmdReserveSpaceForCommandsNVX(
+    VkCommandBuffer                             commandBuffer,
+    const VkCmdReserveSpaceForCommandsInfoNVX*  pReserveSpaceInfo) {
+    StartWriteObject(commandBuffer);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordCmdReserveSpaceForCommandsNVX(
+    VkCommandBuffer                             commandBuffer,
+    const VkCmdReserveSpaceForCommandsInfoNVX*  pReserveSpaceInfo) {
+    FinishWriteObject(commandBuffer);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordCreateIndirectCommandsLayoutNVX(
+    VkDevice                                    device,
+    const VkIndirectCommandsLayoutCreateInfoNVX* pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkIndirectCommandsLayoutNVX*                pIndirectCommandsLayout) {
+    StartReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PostCallRecordCreateIndirectCommandsLayoutNVX(
+    VkDevice                                    device,
+    const VkIndirectCommandsLayoutCreateInfoNVX* pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkIndirectCommandsLayoutNVX*                pIndirectCommandsLayout,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(device);
+    if (result == VK_SUCCESS) {
+        CreateObject(*pIndirectCommandsLayout);
+    }
+}
+
+void ThreadSafety::PreCallRecordDestroyIndirectCommandsLayoutNVX(
+    VkDevice                                    device,
+    VkIndirectCommandsLayoutNVX                 indirectCommandsLayout,
+    const VkAllocationCallbacks*                pAllocator) {
+    StartReadObjectParentInstance(device);
+    StartReadObject(indirectCommandsLayout);
+}
+
+void ThreadSafety::PostCallRecordDestroyIndirectCommandsLayoutNVX(
+    VkDevice                                    device,
+    VkIndirectCommandsLayoutNVX                 indirectCommandsLayout,
+    const VkAllocationCallbacks*                pAllocator) {
+    FinishReadObjectParentInstance(device);
+    FinishReadObject(indirectCommandsLayout);
+}
+
+void ThreadSafety::PreCallRecordCreateObjectTableNVX(
+    VkDevice                                    device,
+    const VkObjectTableCreateInfoNVX*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkObjectTableNVX*                           pObjectTable) {
+    StartReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PostCallRecordCreateObjectTableNVX(
+    VkDevice                                    device,
+    const VkObjectTableCreateInfoNVX*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkObjectTableNVX*                           pObjectTable,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(device);
+    if (result == VK_SUCCESS) {
+        CreateObject(*pObjectTable);
+    }
+}
+
+void ThreadSafety::PreCallRecordDestroyObjectTableNVX(
+    VkDevice                                    device,
+    VkObjectTableNVX                            objectTable,
+    const VkAllocationCallbacks*                pAllocator) {
+    StartReadObjectParentInstance(device);
+    StartWriteObject(objectTable);
+    // Host access to objectTable must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordDestroyObjectTableNVX(
+    VkDevice                                    device,
+    VkObjectTableNVX                            objectTable,
+    const VkAllocationCallbacks*                pAllocator) {
+    FinishReadObjectParentInstance(device);
+    FinishWriteObject(objectTable);
+    DestroyObject(objectTable);
+    // Host access to objectTable must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordRegisterObjectsNVX(
+    VkDevice                                    device,
+    VkObjectTableNVX                            objectTable,
+    uint32_t                                    objectCount,
+    const VkObjectTableEntryNVX* const*         ppObjectTableEntries,
+    const uint32_t*                             pObjectIndices) {
+    StartReadObjectParentInstance(device);
+    StartWriteObject(objectTable);
+    // Host access to objectTable must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordRegisterObjectsNVX(
+    VkDevice                                    device,
+    VkObjectTableNVX                            objectTable,
+    uint32_t                                    objectCount,
+    const VkObjectTableEntryNVX* const*         ppObjectTableEntries,
+    const uint32_t*                             pObjectIndices,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(device);
+    FinishWriteObject(objectTable);
+    // Host access to objectTable must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordUnregisterObjectsNVX(
+    VkDevice                                    device,
+    VkObjectTableNVX                            objectTable,
+    uint32_t                                    objectCount,
+    const VkObjectEntryTypeNVX*                 pObjectEntryTypes,
+    const uint32_t*                             pObjectIndices) {
+    StartReadObjectParentInstance(device);
+    StartWriteObject(objectTable);
+    // Host access to objectTable must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordUnregisterObjectsNVX(
+    VkDevice                                    device,
+    VkObjectTableNVX                            objectTable,
+    uint32_t                                    objectCount,
+    const VkObjectEntryTypeNVX*                 pObjectEntryTypes,
+    const uint32_t*                             pObjectIndices,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(device);
+    FinishWriteObject(objectTable);
+    // Host access to objectTable must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordCmdSetViewportWScalingNV(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstViewport,
+    uint32_t                                    viewportCount,
+    const VkViewportWScalingNV*                 pViewportWScalings) {
+    StartWriteObject(commandBuffer);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordCmdSetViewportWScalingNV(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstViewport,
+    uint32_t                                    viewportCount,
+    const VkViewportWScalingNV*                 pViewportWScalings) {
+    FinishWriteObject(commandBuffer);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordReleaseDisplayEXT(
+    VkPhysicalDevice                            physicalDevice,
+    VkDisplayKHR                                display) {
+    StartReadObject(display);
+}
+
+void ThreadSafety::PostCallRecordReleaseDisplayEXT(
+    VkPhysicalDevice                            physicalDevice,
+    VkDisplayKHR                                display,
+    VkResult                                    result) {
+    FinishReadObject(display);
+}
+
+#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
+
+void ThreadSafety::PreCallRecordAcquireXlibDisplayEXT(
+    VkPhysicalDevice                            physicalDevice,
+    Display*                                    dpy,
+    VkDisplayKHR                                display) {
+    StartReadObject(display);
+}
+
+void ThreadSafety::PostCallRecordAcquireXlibDisplayEXT(
+    VkPhysicalDevice                            physicalDevice,
+    Display*                                    dpy,
+    VkDisplayKHR                                display,
+    VkResult                                    result) {
+    FinishReadObject(display);
+}
+#endif // VK_USE_PLATFORM_XLIB_XRANDR_EXT
+
+void ThreadSafety::PreCallRecordGetPhysicalDeviceSurfaceCapabilities2EXT(
+    VkPhysicalDevice                            physicalDevice,
+    VkSurfaceKHR                                surface,
+    VkSurfaceCapabilities2EXT*                  pSurfaceCapabilities) {
+    StartReadObjectParentInstance(surface);
+}
+
+void ThreadSafety::PostCallRecordGetPhysicalDeviceSurfaceCapabilities2EXT(
+    VkPhysicalDevice                            physicalDevice,
+    VkSurfaceKHR                                surface,
+    VkSurfaceCapabilities2EXT*                  pSurfaceCapabilities,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(surface);
+}
+
+void ThreadSafety::PreCallRecordDisplayPowerControlEXT(
+    VkDevice                                    device,
+    VkDisplayKHR                                display,
+    const VkDisplayPowerInfoEXT*                pDisplayPowerInfo) {
+    StartReadObjectParentInstance(device);
+    StartReadObject(display);
+}
+
+void ThreadSafety::PostCallRecordDisplayPowerControlEXT(
+    VkDevice                                    device,
+    VkDisplayKHR                                display,
+    const VkDisplayPowerInfoEXT*                pDisplayPowerInfo,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(device);
+    FinishReadObject(display);
+}
+
+void ThreadSafety::PreCallRecordRegisterDeviceEventEXT(
+    VkDevice                                    device,
+    const VkDeviceEventInfoEXT*                 pDeviceEventInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkFence*                                    pFence) {
+    StartReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PostCallRecordRegisterDeviceEventEXT(
+    VkDevice                                    device,
+    const VkDeviceEventInfoEXT*                 pDeviceEventInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkFence*                                    pFence,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PreCallRecordRegisterDisplayEventEXT(
+    VkDevice                                    device,
+    VkDisplayKHR                                display,
+    const VkDisplayEventInfoEXT*                pDisplayEventInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkFence*                                    pFence) {
+    StartReadObjectParentInstance(device);
+    StartReadObject(display);
+}
+
+void ThreadSafety::PostCallRecordRegisterDisplayEventEXT(
+    VkDevice                                    device,
+    VkDisplayKHR                                display,
+    const VkDisplayEventInfoEXT*                pDisplayEventInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkFence*                                    pFence,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(device);
+    FinishReadObject(display);
+}
+
+void ThreadSafety::PreCallRecordGetSwapchainCounterEXT(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    VkSurfaceCounterFlagBitsEXT                 counter,
+    uint64_t*                                   pCounterValue) {
+    StartReadObjectParentInstance(device);
+    StartReadObject(swapchain);
+}
+
+void ThreadSafety::PostCallRecordGetSwapchainCounterEXT(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    VkSurfaceCounterFlagBitsEXT                 counter,
+    uint64_t*                                   pCounterValue,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(device);
+    FinishReadObject(swapchain);
+}
+
+void ThreadSafety::PreCallRecordGetRefreshCycleDurationGOOGLE(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    VkRefreshCycleDurationGOOGLE*               pDisplayTimingProperties) {
+    StartReadObjectParentInstance(device);
+    StartWriteObject(swapchain);
+    // Host access to swapchain must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordGetRefreshCycleDurationGOOGLE(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    VkRefreshCycleDurationGOOGLE*               pDisplayTimingProperties,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(device);
+    FinishWriteObject(swapchain);
+    // Host access to swapchain must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordGetPastPresentationTimingGOOGLE(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    uint32_t*                                   pPresentationTimingCount,
+    VkPastPresentationTimingGOOGLE*             pPresentationTimings) {
+    StartReadObjectParentInstance(device);
+    StartWriteObject(swapchain);
+    // Host access to swapchain must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordGetPastPresentationTimingGOOGLE(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    uint32_t*                                   pPresentationTimingCount,
+    VkPastPresentationTimingGOOGLE*             pPresentationTimings,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(device);
+    FinishWriteObject(swapchain);
+    // Host access to swapchain must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordCmdSetDiscardRectangleEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstDiscardRectangle,
+    uint32_t                                    discardRectangleCount,
+    const VkRect2D*                             pDiscardRectangles) {
+    StartWriteObject(commandBuffer);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordCmdSetDiscardRectangleEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstDiscardRectangle,
+    uint32_t                                    discardRectangleCount,
+    const VkRect2D*                             pDiscardRectangles) {
+    FinishWriteObject(commandBuffer);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordSetHdrMetadataEXT(
+    VkDevice                                    device,
+    uint32_t                                    swapchainCount,
+    const VkSwapchainKHR*                       pSwapchains,
+    const VkHdrMetadataEXT*                     pMetadata) {
+    StartReadObjectParentInstance(device);
+    if (pSwapchains) {
+        for (uint32_t index = 0; index < swapchainCount; index++) {
+            StartReadObject(pSwapchains[index]);
+        }
+    }
+}
+
+void ThreadSafety::PostCallRecordSetHdrMetadataEXT(
+    VkDevice                                    device,
+    uint32_t                                    swapchainCount,
+    const VkSwapchainKHR*                       pSwapchains,
+    const VkHdrMetadataEXT*                     pMetadata) {
+    FinishReadObjectParentInstance(device);
+    if (pSwapchains) {
+        for (uint32_t index = 0; index < swapchainCount; index++) {
+            FinishReadObject(pSwapchains[index]);
+        }
+    }
+}
+
+#ifdef VK_USE_PLATFORM_IOS_MVK
+
+void ThreadSafety::PreCallRecordCreateIOSSurfaceMVK(
+    VkInstance                                  instance,
+    const VkIOSSurfaceCreateInfoMVK*            pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface) {
+    StartReadObjectParentInstance(instance);
+}
+
+void ThreadSafety::PostCallRecordCreateIOSSurfaceMVK(
+    VkInstance                                  instance,
+    const VkIOSSurfaceCreateInfoMVK*            pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(instance);
+    if (result == VK_SUCCESS) {
+        CreateObjectParentInstance(*pSurface);
+    }
+}
+#endif // VK_USE_PLATFORM_IOS_MVK
+
+#ifdef VK_USE_PLATFORM_MACOS_MVK
+
+void ThreadSafety::PreCallRecordCreateMacOSSurfaceMVK(
+    VkInstance                                  instance,
+    const VkMacOSSurfaceCreateInfoMVK*          pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface) {
+    StartReadObjectParentInstance(instance);
+}
+
+void ThreadSafety::PostCallRecordCreateMacOSSurfaceMVK(
+    VkInstance                                  instance,
+    const VkMacOSSurfaceCreateInfoMVK*          pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(instance);
+    if (result == VK_SUCCESS) {
+        CreateObjectParentInstance(*pSurface);
+    }
+}
+#endif // VK_USE_PLATFORM_MACOS_MVK
+// TODO - not wrapping EXT function vkSetDebugUtilsObjectNameEXT
+// TODO - not wrapping EXT function vkSetDebugUtilsObjectTagEXT
+
+void ThreadSafety::PreCallRecordQueueBeginDebugUtilsLabelEXT(
+    VkQueue                                     queue,
+    const VkDebugUtilsLabelEXT*                 pLabelInfo) {
+    StartReadObject(queue);
+}
+
+void ThreadSafety::PostCallRecordQueueBeginDebugUtilsLabelEXT(
+    VkQueue                                     queue,
+    const VkDebugUtilsLabelEXT*                 pLabelInfo) {
+    FinishReadObject(queue);
+}
+
+void ThreadSafety::PreCallRecordQueueEndDebugUtilsLabelEXT(
+    VkQueue                                     queue) {
+    StartReadObject(queue);
+}
+
+void ThreadSafety::PostCallRecordQueueEndDebugUtilsLabelEXT(
+    VkQueue                                     queue) {
+    FinishReadObject(queue);
+}
+
+void ThreadSafety::PreCallRecordQueueInsertDebugUtilsLabelEXT(
+    VkQueue                                     queue,
+    const VkDebugUtilsLabelEXT*                 pLabelInfo) {
+    StartReadObject(queue);
+}
+
+void ThreadSafety::PostCallRecordQueueInsertDebugUtilsLabelEXT(
+    VkQueue                                     queue,
+    const VkDebugUtilsLabelEXT*                 pLabelInfo) {
+    FinishReadObject(queue);
+}
+
+void ThreadSafety::PreCallRecordCmdBeginDebugUtilsLabelEXT(
+    VkCommandBuffer                             commandBuffer,
+    const VkDebugUtilsLabelEXT*                 pLabelInfo) {
+    StartReadObject(commandBuffer);
+}
+
+void ThreadSafety::PostCallRecordCmdBeginDebugUtilsLabelEXT(
+    VkCommandBuffer                             commandBuffer,
+    const VkDebugUtilsLabelEXT*                 pLabelInfo) {
+    FinishReadObject(commandBuffer);
+}
+
+void ThreadSafety::PreCallRecordCmdEndDebugUtilsLabelEXT(
+    VkCommandBuffer                             commandBuffer) {
+    StartReadObject(commandBuffer);
+}
+
+void ThreadSafety::PostCallRecordCmdEndDebugUtilsLabelEXT(
+    VkCommandBuffer                             commandBuffer) {
+    FinishReadObject(commandBuffer);
+}
+
+void ThreadSafety::PreCallRecordCmdInsertDebugUtilsLabelEXT(
+    VkCommandBuffer                             commandBuffer,
+    const VkDebugUtilsLabelEXT*                 pLabelInfo) {
+    StartReadObject(commandBuffer);
+}
+
+void ThreadSafety::PostCallRecordCmdInsertDebugUtilsLabelEXT(
+    VkCommandBuffer                             commandBuffer,
+    const VkDebugUtilsLabelEXT*                 pLabelInfo) {
+    FinishReadObject(commandBuffer);
+}
+
+void ThreadSafety::PreCallRecordCreateDebugUtilsMessengerEXT(
+    VkInstance                                  instance,
+    const VkDebugUtilsMessengerCreateInfoEXT*   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDebugUtilsMessengerEXT*                   pMessenger) {
+    StartReadObjectParentInstance(instance);
+}
+
+void ThreadSafety::PostCallRecordCreateDebugUtilsMessengerEXT(
+    VkInstance                                  instance,
+    const VkDebugUtilsMessengerCreateInfoEXT*   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDebugUtilsMessengerEXT*                   pMessenger,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(instance);
+    if (result == VK_SUCCESS) {
+        CreateObjectParentInstance(*pMessenger);
+    }
+}
+
+void ThreadSafety::PreCallRecordDestroyDebugUtilsMessengerEXT(
+    VkInstance                                  instance,
+    VkDebugUtilsMessengerEXT                    messenger,
+    const VkAllocationCallbacks*                pAllocator) {
+    StartReadObjectParentInstance(instance);
+    StartWriteObjectParentInstance(messenger);
+    // Host access to messenger must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordDestroyDebugUtilsMessengerEXT(
+    VkInstance                                  instance,
+    VkDebugUtilsMessengerEXT                    messenger,
+    const VkAllocationCallbacks*                pAllocator) {
+    FinishReadObjectParentInstance(instance);
+    FinishWriteObjectParentInstance(messenger);
+    DestroyObjectParentInstance(messenger);
+    // Host access to messenger must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordSubmitDebugUtilsMessageEXT(
+    VkInstance                                  instance,
+    VkDebugUtilsMessageSeverityFlagBitsEXT      messageSeverity,
+    VkDebugUtilsMessageTypeFlagsEXT             messageTypes,
+    const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData) {
+    StartReadObjectParentInstance(instance);
+}
+
+void ThreadSafety::PostCallRecordSubmitDebugUtilsMessageEXT(
+    VkInstance                                  instance,
+    VkDebugUtilsMessageSeverityFlagBitsEXT      messageSeverity,
+    VkDebugUtilsMessageTypeFlagsEXT             messageTypes,
+    const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData) {
+    FinishReadObjectParentInstance(instance);
+}
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+
+void ThreadSafety::PreCallRecordGetAndroidHardwareBufferPropertiesANDROID(
+    VkDevice                                    device,
+    const struct AHardwareBuffer*               buffer,
+    VkAndroidHardwareBufferPropertiesANDROID*   pProperties) {
+    StartReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PostCallRecordGetAndroidHardwareBufferPropertiesANDROID(
+    VkDevice                                    device,
+    const struct AHardwareBuffer*               buffer,
+    VkAndroidHardwareBufferPropertiesANDROID*   pProperties,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PreCallRecordGetMemoryAndroidHardwareBufferANDROID(
+    VkDevice                                    device,
+    const VkMemoryGetAndroidHardwareBufferInfoANDROID* pInfo,
+    struct AHardwareBuffer**                    pBuffer) {
+    StartReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PostCallRecordGetMemoryAndroidHardwareBufferANDROID(
+    VkDevice                                    device,
+    const VkMemoryGetAndroidHardwareBufferInfoANDROID* pInfo,
+    struct AHardwareBuffer**                    pBuffer,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(device);
+}
+#endif // VK_USE_PLATFORM_ANDROID_KHR
+
+void ThreadSafety::PreCallRecordCmdSetSampleLocationsEXT(
+    VkCommandBuffer                             commandBuffer,
+    const VkSampleLocationsInfoEXT*             pSampleLocationsInfo) {
+    StartWriteObject(commandBuffer);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordCmdSetSampleLocationsEXT(
+    VkCommandBuffer                             commandBuffer,
+    const VkSampleLocationsInfoEXT*             pSampleLocationsInfo) {
+    FinishWriteObject(commandBuffer);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordGetImageDrmFormatModifierPropertiesEXT(
+    VkDevice                                    device,
+    VkImage                                     image,
+    VkImageDrmFormatModifierPropertiesEXT*      pProperties) {
+    StartReadObjectParentInstance(device);
+    StartReadObject(image);
+}
+
+void ThreadSafety::PostCallRecordGetImageDrmFormatModifierPropertiesEXT(
+    VkDevice                                    device,
+    VkImage                                     image,
+    VkImageDrmFormatModifierPropertiesEXT*      pProperties,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(device);
+    FinishReadObject(image);
+}
+
+void ThreadSafety::PreCallRecordCreateValidationCacheEXT(
+    VkDevice                                    device,
+    const VkValidationCacheCreateInfoEXT*       pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkValidationCacheEXT*                       pValidationCache) {
+    StartReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PostCallRecordCreateValidationCacheEXT(
+    VkDevice                                    device,
+    const VkValidationCacheCreateInfoEXT*       pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkValidationCacheEXT*                       pValidationCache,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(device);
+    if (result == VK_SUCCESS) {
+        CreateObject(*pValidationCache);
+    }
+}
+
+void ThreadSafety::PreCallRecordDestroyValidationCacheEXT(
+    VkDevice                                    device,
+    VkValidationCacheEXT                        validationCache,
+    const VkAllocationCallbacks*                pAllocator) {
+    StartReadObjectParentInstance(device);
+    StartWriteObject(validationCache);
+    // Host access to validationCache must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordDestroyValidationCacheEXT(
+    VkDevice                                    device,
+    VkValidationCacheEXT                        validationCache,
+    const VkAllocationCallbacks*                pAllocator) {
+    FinishReadObjectParentInstance(device);
+    FinishWriteObject(validationCache);
+    DestroyObject(validationCache);
+    // Host access to validationCache must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordMergeValidationCachesEXT(
+    VkDevice                                    device,
+    VkValidationCacheEXT                        dstCache,
+    uint32_t                                    srcCacheCount,
+    const VkValidationCacheEXT*                 pSrcCaches) {
+    StartReadObjectParentInstance(device);
+    StartWriteObject(dstCache);
+    if (pSrcCaches) {
+        for (uint32_t index = 0; index < srcCacheCount; index++) {
+            StartReadObject(pSrcCaches[index]);
+        }
+    }
+    // Host access to dstCache must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordMergeValidationCachesEXT(
+    VkDevice                                    device,
+    VkValidationCacheEXT                        dstCache,
+    uint32_t                                    srcCacheCount,
+    const VkValidationCacheEXT*                 pSrcCaches,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(device);
+    FinishWriteObject(dstCache);
+    if (pSrcCaches) {
+        for (uint32_t index = 0; index < srcCacheCount; index++) {
+            FinishReadObject(pSrcCaches[index]);
+        }
+    }
+    // Host access to dstCache must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordGetValidationCacheDataEXT(
+    VkDevice                                    device,
+    VkValidationCacheEXT                        validationCache,
+    size_t*                                     pDataSize,
+    void*                                       pData) {
+    StartReadObjectParentInstance(device);
+    StartReadObject(validationCache);
+}
+
+void ThreadSafety::PostCallRecordGetValidationCacheDataEXT(
+    VkDevice                                    device,
+    VkValidationCacheEXT                        validationCache,
+    size_t*                                     pDataSize,
+    void*                                       pData,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(device);
+    FinishReadObject(validationCache);
+}
+
+void ThreadSafety::PreCallRecordCmdBindShadingRateImageNV(
+    VkCommandBuffer                             commandBuffer,
+    VkImageView                                 imageView,
+    VkImageLayout                               imageLayout) {
+    StartWriteObject(commandBuffer);
+    StartReadObject(imageView);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordCmdBindShadingRateImageNV(
+    VkCommandBuffer                             commandBuffer,
+    VkImageView                                 imageView,
+    VkImageLayout                               imageLayout) {
+    FinishWriteObject(commandBuffer);
+    FinishReadObject(imageView);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordCmdSetViewportShadingRatePaletteNV(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstViewport,
+    uint32_t                                    viewportCount,
+    const VkShadingRatePaletteNV*               pShadingRatePalettes) {
+    StartWriteObject(commandBuffer);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordCmdSetViewportShadingRatePaletteNV(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstViewport,
+    uint32_t                                    viewportCount,
+    const VkShadingRatePaletteNV*               pShadingRatePalettes) {
+    FinishWriteObject(commandBuffer);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordCmdSetCoarseSampleOrderNV(
+    VkCommandBuffer                             commandBuffer,
+    VkCoarseSampleOrderTypeNV                   sampleOrderType,
+    uint32_t                                    customSampleOrderCount,
+    const VkCoarseSampleOrderCustomNV*          pCustomSampleOrders) {
+    StartWriteObject(commandBuffer);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordCmdSetCoarseSampleOrderNV(
+    VkCommandBuffer                             commandBuffer,
+    VkCoarseSampleOrderTypeNV                   sampleOrderType,
+    uint32_t                                    customSampleOrderCount,
+    const VkCoarseSampleOrderCustomNV*          pCustomSampleOrders) {
+    FinishWriteObject(commandBuffer);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordCreateAccelerationStructureNV(
+    VkDevice                                    device,
+    const VkAccelerationStructureCreateInfoNV*  pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkAccelerationStructureNV*                  pAccelerationStructure) {
+    StartReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PostCallRecordCreateAccelerationStructureNV(
+    VkDevice                                    device,
+    const VkAccelerationStructureCreateInfoNV*  pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkAccelerationStructureNV*                  pAccelerationStructure,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(device);
+    if (result == VK_SUCCESS) {
+        CreateObject(*pAccelerationStructure);
+    }
+}
+
+void ThreadSafety::PreCallRecordDestroyAccelerationStructureNV(
+    VkDevice                                    device,
+    VkAccelerationStructureNV                   accelerationStructure,
+    const VkAllocationCallbacks*                pAllocator) {
+    StartReadObjectParentInstance(device);
+    StartReadObject(accelerationStructure);
+}
+
+void ThreadSafety::PostCallRecordDestroyAccelerationStructureNV(
+    VkDevice                                    device,
+    VkAccelerationStructureNV                   accelerationStructure,
+    const VkAllocationCallbacks*                pAllocator) {
+    FinishReadObjectParentInstance(device);
+    FinishReadObject(accelerationStructure);
+}
+
+void ThreadSafety::PreCallRecordGetAccelerationStructureMemoryRequirementsNV(
+    VkDevice                                    device,
+    const VkAccelerationStructureMemoryRequirementsInfoNV* pInfo,
+    VkMemoryRequirements2KHR*                   pMemoryRequirements) {
+    StartReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PostCallRecordGetAccelerationStructureMemoryRequirementsNV(
+    VkDevice                                    device,
+    const VkAccelerationStructureMemoryRequirementsInfoNV* pInfo,
+    VkMemoryRequirements2KHR*                   pMemoryRequirements) {
+    FinishReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PreCallRecordBindAccelerationStructureMemoryNV(
+    VkDevice                                    device,
+    uint32_t                                    bindInfoCount,
+    const VkBindAccelerationStructureMemoryInfoNV* pBindInfos) {
+    StartReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PostCallRecordBindAccelerationStructureMemoryNV(
+    VkDevice                                    device,
+    uint32_t                                    bindInfoCount,
+    const VkBindAccelerationStructureMemoryInfoNV* pBindInfos,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PreCallRecordCmdBuildAccelerationStructureNV(
+    VkCommandBuffer                             commandBuffer,
+    const VkAccelerationStructureInfoNV*        pInfo,
+    VkBuffer                                    instanceData,
+    VkDeviceSize                                instanceOffset,
+    VkBool32                                    update,
+    VkAccelerationStructureNV                   dst,
+    VkAccelerationStructureNV                   src,
+    VkBuffer                                    scratch,
+    VkDeviceSize                                scratchOffset) {
+    StartReadObject(commandBuffer);
+    StartReadObject(instanceData);
+    StartReadObject(dst);
+    StartReadObject(src);
+    StartReadObject(scratch);
+}
+
+void ThreadSafety::PostCallRecordCmdBuildAccelerationStructureNV(
+    VkCommandBuffer                             commandBuffer,
+    const VkAccelerationStructureInfoNV*        pInfo,
+    VkBuffer                                    instanceData,
+    VkDeviceSize                                instanceOffset,
+    VkBool32                                    update,
+    VkAccelerationStructureNV                   dst,
+    VkAccelerationStructureNV                   src,
+    VkBuffer                                    scratch,
+    VkDeviceSize                                scratchOffset) {
+    FinishReadObject(commandBuffer);
+    FinishReadObject(instanceData);
+    FinishReadObject(dst);
+    FinishReadObject(src);
+    FinishReadObject(scratch);
+}
+
+void ThreadSafety::PreCallRecordCmdCopyAccelerationStructureNV(
+    VkCommandBuffer                             commandBuffer,
+    VkAccelerationStructureNV                   dst,
+    VkAccelerationStructureNV                   src,
+    VkCopyAccelerationStructureModeNV           mode) {
+    StartReadObject(commandBuffer);
+    StartReadObject(dst);
+    StartReadObject(src);
+}
+
+void ThreadSafety::PostCallRecordCmdCopyAccelerationStructureNV(
+    VkCommandBuffer                             commandBuffer,
+    VkAccelerationStructureNV                   dst,
+    VkAccelerationStructureNV                   src,
+    VkCopyAccelerationStructureModeNV           mode) {
+    FinishReadObject(commandBuffer);
+    FinishReadObject(dst);
+    FinishReadObject(src);
+}
+
+void ThreadSafety::PreCallRecordCmdTraceRaysNV(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    raygenShaderBindingTableBuffer,
+    VkDeviceSize                                raygenShaderBindingOffset,
+    VkBuffer                                    missShaderBindingTableBuffer,
+    VkDeviceSize                                missShaderBindingOffset,
+    VkDeviceSize                                missShaderBindingStride,
+    VkBuffer                                    hitShaderBindingTableBuffer,
+    VkDeviceSize                                hitShaderBindingOffset,
+    VkDeviceSize                                hitShaderBindingStride,
+    VkBuffer                                    callableShaderBindingTableBuffer,
+    VkDeviceSize                                callableShaderBindingOffset,
+    VkDeviceSize                                callableShaderBindingStride,
+    uint32_t                                    width,
+    uint32_t                                    height,
+    uint32_t                                    depth) {
+    StartReadObject(commandBuffer);
+    StartReadObject(raygenShaderBindingTableBuffer);
+    StartReadObject(missShaderBindingTableBuffer);
+    StartReadObject(hitShaderBindingTableBuffer);
+    StartReadObject(callableShaderBindingTableBuffer);
+}
+
+void ThreadSafety::PostCallRecordCmdTraceRaysNV(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    raygenShaderBindingTableBuffer,
+    VkDeviceSize                                raygenShaderBindingOffset,
+    VkBuffer                                    missShaderBindingTableBuffer,
+    VkDeviceSize                                missShaderBindingOffset,
+    VkDeviceSize                                missShaderBindingStride,
+    VkBuffer                                    hitShaderBindingTableBuffer,
+    VkDeviceSize                                hitShaderBindingOffset,
+    VkDeviceSize                                hitShaderBindingStride,
+    VkBuffer                                    callableShaderBindingTableBuffer,
+    VkDeviceSize                                callableShaderBindingOffset,
+    VkDeviceSize                                callableShaderBindingStride,
+    uint32_t                                    width,
+    uint32_t                                    height,
+    uint32_t                                    depth) {
+    FinishReadObject(commandBuffer);
+    FinishReadObject(raygenShaderBindingTableBuffer);
+    FinishReadObject(missShaderBindingTableBuffer);
+    FinishReadObject(hitShaderBindingTableBuffer);
+    FinishReadObject(callableShaderBindingTableBuffer);
+}
+
+void ThreadSafety::PreCallRecordCreateRayTracingPipelinesNV(
+    VkDevice                                    device,
+    VkPipelineCache                             pipelineCache,
+    uint32_t                                    createInfoCount,
+    const VkRayTracingPipelineCreateInfoNV*     pCreateInfos,
+    const VkAllocationCallbacks*                pAllocator,
+    VkPipeline*                                 pPipelines) {
+    StartReadObjectParentInstance(device);
+    StartReadObject(pipelineCache);
+}
+
+void ThreadSafety::PostCallRecordCreateRayTracingPipelinesNV(
+    VkDevice                                    device,
+    VkPipelineCache                             pipelineCache,
+    uint32_t                                    createInfoCount,
+    const VkRayTracingPipelineCreateInfoNV*     pCreateInfos,
+    const VkAllocationCallbacks*                pAllocator,
+    VkPipeline*                                 pPipelines,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(device);
+    FinishReadObject(pipelineCache);
+    if (pPipelines) {
+        for (uint32_t index = 0; index < createInfoCount; index++) {
+            if (!pPipelines[index]) continue;
+            CreateObject(pPipelines[index]);
+        }
+    }
+}
+
+void ThreadSafety::PreCallRecordGetRayTracingShaderGroupHandlesNV(
+    VkDevice                                    device,
+    VkPipeline                                  pipeline,
+    uint32_t                                    firstGroup,
+    uint32_t                                    groupCount,
+    size_t                                      dataSize,
+    void*                                       pData) {
+    StartReadObjectParentInstance(device);
+    StartReadObject(pipeline);
+}
+
+void ThreadSafety::PostCallRecordGetRayTracingShaderGroupHandlesNV(
+    VkDevice                                    device,
+    VkPipeline                                  pipeline,
+    uint32_t                                    firstGroup,
+    uint32_t                                    groupCount,
+    size_t                                      dataSize,
+    void*                                       pData,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(device);
+    FinishReadObject(pipeline);
+}
+
+void ThreadSafety::PreCallRecordGetAccelerationStructureHandleNV(
+    VkDevice                                    device,
+    VkAccelerationStructureNV                   accelerationStructure,
+    size_t                                      dataSize,
+    void*                                       pData) {
+    StartReadObjectParentInstance(device);
+    StartReadObject(accelerationStructure);
+}
+
+void ThreadSafety::PostCallRecordGetAccelerationStructureHandleNV(
+    VkDevice                                    device,
+    VkAccelerationStructureNV                   accelerationStructure,
+    size_t                                      dataSize,
+    void*                                       pData,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(device);
+    FinishReadObject(accelerationStructure);
+}
+
+void ThreadSafety::PreCallRecordCmdWriteAccelerationStructuresPropertiesNV(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    accelerationStructureCount,
+    const VkAccelerationStructureNV*            pAccelerationStructures,
+    VkQueryType                                 queryType,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    firstQuery) {
+    StartReadObject(commandBuffer);
+    if (pAccelerationStructures) {
+        for (uint32_t index = 0; index < accelerationStructureCount; index++) {
+            StartReadObject(pAccelerationStructures[index]);
+        }
+    }
+    StartReadObject(queryPool);
+}
+
+void ThreadSafety::PostCallRecordCmdWriteAccelerationStructuresPropertiesNV(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    accelerationStructureCount,
+    const VkAccelerationStructureNV*            pAccelerationStructures,
+    VkQueryType                                 queryType,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    firstQuery) {
+    FinishReadObject(commandBuffer);
+    if (pAccelerationStructures) {
+        for (uint32_t index = 0; index < accelerationStructureCount; index++) {
+            FinishReadObject(pAccelerationStructures[index]);
+        }
+    }
+    FinishReadObject(queryPool);
+}
+
+void ThreadSafety::PreCallRecordCompileDeferredNV(
+    VkDevice                                    device,
+    VkPipeline                                  pipeline,
+    uint32_t                                    shader) {
+    StartReadObjectParentInstance(device);
+    StartReadObject(pipeline);
+}
+
+void ThreadSafety::PostCallRecordCompileDeferredNV(
+    VkDevice                                    device,
+    VkPipeline                                  pipeline,
+    uint32_t                                    shader,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(device);
+    FinishReadObject(pipeline);
+}
+
+void ThreadSafety::PreCallRecordGetMemoryHostPointerPropertiesEXT(
+    VkDevice                                    device,
+    VkExternalMemoryHandleTypeFlagBits          handleType,
+    const void*                                 pHostPointer,
+    VkMemoryHostPointerPropertiesEXT*           pMemoryHostPointerProperties) {
+    StartReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PostCallRecordGetMemoryHostPointerPropertiesEXT(
+    VkDevice                                    device,
+    VkExternalMemoryHandleTypeFlagBits          handleType,
+    const void*                                 pHostPointer,
+    VkMemoryHostPointerPropertiesEXT*           pMemoryHostPointerProperties,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PreCallRecordCmdWriteBufferMarkerAMD(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineStageFlagBits                     pipelineStage,
+    VkBuffer                                    dstBuffer,
+    VkDeviceSize                                dstOffset,
+    uint32_t                                    marker) {
+    StartWriteObject(commandBuffer);
+    StartReadObject(dstBuffer);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordCmdWriteBufferMarkerAMD(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineStageFlagBits                     pipelineStage,
+    VkBuffer                                    dstBuffer,
+    VkDeviceSize                                dstOffset,
+    uint32_t                                    marker) {
+    FinishWriteObject(commandBuffer);
+    FinishReadObject(dstBuffer);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordGetCalibratedTimestampsEXT(
+    VkDevice                                    device,
+    uint32_t                                    timestampCount,
+    const VkCalibratedTimestampInfoEXT*         pTimestampInfos,
+    uint64_t*                                   pTimestamps,
+    uint64_t*                                   pMaxDeviation) {
+    StartReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PostCallRecordGetCalibratedTimestampsEXT(
+    VkDevice                                    device,
+    uint32_t                                    timestampCount,
+    const VkCalibratedTimestampInfoEXT*         pTimestampInfos,
+    uint64_t*                                   pTimestamps,
+    uint64_t*                                   pMaxDeviation,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(device);
+}
+
+#ifdef VK_USE_PLATFORM_GGP
+#endif // VK_USE_PLATFORM_GGP
+
+void ThreadSafety::PreCallRecordCmdDrawMeshTasksNV(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    taskCount,
+    uint32_t                                    firstTask) {
+    StartWriteObject(commandBuffer);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordCmdDrawMeshTasksNV(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    taskCount,
+    uint32_t                                    firstTask) {
+    FinishWriteObject(commandBuffer);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordCmdDrawMeshTasksIndirectNV(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    uint32_t                                    drawCount,
+    uint32_t                                    stride) {
+    StartWriteObject(commandBuffer);
+    StartReadObject(buffer);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordCmdDrawMeshTasksIndirectNV(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    uint32_t                                    drawCount,
+    uint32_t                                    stride) {
+    FinishWriteObject(commandBuffer);
+    FinishReadObject(buffer);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordCmdDrawMeshTasksIndirectCountNV(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkBuffer                                    countBuffer,
+    VkDeviceSize                                countBufferOffset,
+    uint32_t                                    maxDrawCount,
+    uint32_t                                    stride) {
+    StartWriteObject(commandBuffer);
+    StartReadObject(buffer);
+    StartReadObject(countBuffer);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordCmdDrawMeshTasksIndirectCountNV(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkBuffer                                    countBuffer,
+    VkDeviceSize                                countBufferOffset,
+    uint32_t                                    maxDrawCount,
+    uint32_t                                    stride) {
+    FinishWriteObject(commandBuffer);
+    FinishReadObject(buffer);
+    FinishReadObject(countBuffer);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordCmdSetExclusiveScissorNV(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstExclusiveScissor,
+    uint32_t                                    exclusiveScissorCount,
+    const VkRect2D*                             pExclusiveScissors) {
+    StartWriteObject(commandBuffer);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordCmdSetExclusiveScissorNV(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstExclusiveScissor,
+    uint32_t                                    exclusiveScissorCount,
+    const VkRect2D*                             pExclusiveScissors) {
+    FinishWriteObject(commandBuffer);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordCmdSetCheckpointNV(
+    VkCommandBuffer                             commandBuffer,
+    const void*                                 pCheckpointMarker) {
+    StartReadObject(commandBuffer);
+}
+
+void ThreadSafety::PostCallRecordCmdSetCheckpointNV(
+    VkCommandBuffer                             commandBuffer,
+    const void*                                 pCheckpointMarker) {
+    FinishReadObject(commandBuffer);
+}
+
+void ThreadSafety::PreCallRecordGetQueueCheckpointDataNV(
+    VkQueue                                     queue,
+    uint32_t*                                   pCheckpointDataCount,
+    VkCheckpointDataNV*                         pCheckpointData) {
+    StartReadObject(queue);
+}
+
+void ThreadSafety::PostCallRecordGetQueueCheckpointDataNV(
+    VkQueue                                     queue,
+    uint32_t*                                   pCheckpointDataCount,
+    VkCheckpointDataNV*                         pCheckpointData) {
+    FinishReadObject(queue);
+}
+
+void ThreadSafety::PreCallRecordInitializePerformanceApiINTEL(
+    VkDevice                                    device,
+    const VkInitializePerformanceApiInfoINTEL*  pInitializeInfo) {
+    StartReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PostCallRecordInitializePerformanceApiINTEL(
+    VkDevice                                    device,
+    const VkInitializePerformanceApiInfoINTEL*  pInitializeInfo,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PreCallRecordUninitializePerformanceApiINTEL(
+    VkDevice                                    device) {
+    StartReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PostCallRecordUninitializePerformanceApiINTEL(
+    VkDevice                                    device) {
+    FinishReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PreCallRecordCmdSetPerformanceMarkerINTEL(
+    VkCommandBuffer                             commandBuffer,
+    const VkPerformanceMarkerInfoINTEL*         pMarkerInfo) {
+    StartReadObject(commandBuffer);
+}
+
+void ThreadSafety::PostCallRecordCmdSetPerformanceMarkerINTEL(
+    VkCommandBuffer                             commandBuffer,
+    const VkPerformanceMarkerInfoINTEL*         pMarkerInfo,
+    VkResult                                    result) {
+    FinishReadObject(commandBuffer);
+}
+
+void ThreadSafety::PreCallRecordCmdSetPerformanceStreamMarkerINTEL(
+    VkCommandBuffer                             commandBuffer,
+    const VkPerformanceStreamMarkerInfoINTEL*   pMarkerInfo) {
+    StartReadObject(commandBuffer);
+}
+
+void ThreadSafety::PostCallRecordCmdSetPerformanceStreamMarkerINTEL(
+    VkCommandBuffer                             commandBuffer,
+    const VkPerformanceStreamMarkerInfoINTEL*   pMarkerInfo,
+    VkResult                                    result) {
+    FinishReadObject(commandBuffer);
+}
+
+void ThreadSafety::PreCallRecordCmdSetPerformanceOverrideINTEL(
+    VkCommandBuffer                             commandBuffer,
+    const VkPerformanceOverrideInfoINTEL*       pOverrideInfo) {
+    StartReadObject(commandBuffer);
+}
+
+void ThreadSafety::PostCallRecordCmdSetPerformanceOverrideINTEL(
+    VkCommandBuffer                             commandBuffer,
+    const VkPerformanceOverrideInfoINTEL*       pOverrideInfo,
+    VkResult                                    result) {
+    FinishReadObject(commandBuffer);
+}
+
+void ThreadSafety::PreCallRecordAcquirePerformanceConfigurationINTEL(
+    VkDevice                                    device,
+    const VkPerformanceConfigurationAcquireInfoINTEL* pAcquireInfo,
+    VkPerformanceConfigurationINTEL*            pConfiguration) {
+    StartReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PostCallRecordAcquirePerformanceConfigurationINTEL(
+    VkDevice                                    device,
+    const VkPerformanceConfigurationAcquireInfoINTEL* pAcquireInfo,
+    VkPerformanceConfigurationINTEL*            pConfiguration,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PreCallRecordReleasePerformanceConfigurationINTEL(
+    VkDevice                                    device,
+    VkPerformanceConfigurationINTEL             configuration) {
+    StartReadObjectParentInstance(device);
+    StartReadObject(configuration);
+}
+
+void ThreadSafety::PostCallRecordReleasePerformanceConfigurationINTEL(
+    VkDevice                                    device,
+    VkPerformanceConfigurationINTEL             configuration,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(device);
+    FinishReadObject(configuration);
+}
+
+void ThreadSafety::PreCallRecordQueueSetPerformanceConfigurationINTEL(
+    VkQueue                                     queue,
+    VkPerformanceConfigurationINTEL             configuration) {
+    StartReadObject(queue);
+    StartReadObject(configuration);
+}
+
+void ThreadSafety::PostCallRecordQueueSetPerformanceConfigurationINTEL(
+    VkQueue                                     queue,
+    VkPerformanceConfigurationINTEL             configuration,
+    VkResult                                    result) {
+    FinishReadObject(queue);
+    FinishReadObject(configuration);
+}
+
+void ThreadSafety::PreCallRecordGetPerformanceParameterINTEL(
+    VkDevice                                    device,
+    VkPerformanceParameterTypeINTEL             parameter,
+    VkPerformanceValueINTEL*                    pValue) {
+    StartReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PostCallRecordGetPerformanceParameterINTEL(
+    VkDevice                                    device,
+    VkPerformanceParameterTypeINTEL             parameter,
+    VkPerformanceValueINTEL*                    pValue,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PreCallRecordSetLocalDimmingAMD(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapChain,
+    VkBool32                                    localDimmingEnable) {
+    StartReadObjectParentInstance(device);
+    StartReadObject(swapChain);
+}
+
+void ThreadSafety::PostCallRecordSetLocalDimmingAMD(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapChain,
+    VkBool32                                    localDimmingEnable) {
+    FinishReadObjectParentInstance(device);
+    FinishReadObject(swapChain);
+}
+
+#ifdef VK_USE_PLATFORM_FUCHSIA
+
+void ThreadSafety::PreCallRecordCreateImagePipeSurfaceFUCHSIA(
+    VkInstance                                  instance,
+    const VkImagePipeSurfaceCreateInfoFUCHSIA*  pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface) {
+    StartReadObjectParentInstance(instance);
+}
+
+void ThreadSafety::PostCallRecordCreateImagePipeSurfaceFUCHSIA(
+    VkInstance                                  instance,
+    const VkImagePipeSurfaceCreateInfoFUCHSIA*  pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(instance);
+    if (result == VK_SUCCESS) {
+        CreateObjectParentInstance(*pSurface);
+    }
+}
+#endif // VK_USE_PLATFORM_FUCHSIA
+
+#ifdef VK_USE_PLATFORM_METAL_EXT
+
+void ThreadSafety::PreCallRecordCreateMetalSurfaceEXT(
+    VkInstance                                  instance,
+    const VkMetalSurfaceCreateInfoEXT*          pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface) {
+    StartReadObjectParentInstance(instance);
+}
+
+void ThreadSafety::PostCallRecordCreateMetalSurfaceEXT(
+    VkInstance                                  instance,
+    const VkMetalSurfaceCreateInfoEXT*          pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(instance);
+    if (result == VK_SUCCESS) {
+        CreateObjectParentInstance(*pSurface);
+    }
+}
+#endif // VK_USE_PLATFORM_METAL_EXT
+
+void ThreadSafety::PreCallRecordGetBufferDeviceAddressEXT(
+    VkDevice                                    device,
+    const VkBufferDeviceAddressInfoKHR*         pInfo) {
+    StartReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PostCallRecordGetBufferDeviceAddressEXT(
+    VkDevice                                    device,
+    const VkBufferDeviceAddressInfoKHR*         pInfo,
+    VkDeviceAddress                             result) {
+    FinishReadObjectParentInstance(device);
+}
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+void ThreadSafety::PreCallRecordAcquireFullScreenExclusiveModeEXT(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain) {
+    StartReadObjectParentInstance(device);
+    StartReadObject(swapchain);
+}
+
+void ThreadSafety::PostCallRecordAcquireFullScreenExclusiveModeEXT(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(device);
+    FinishReadObject(swapchain);
+}
+
+void ThreadSafety::PreCallRecordReleaseFullScreenExclusiveModeEXT(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain) {
+    StartReadObjectParentInstance(device);
+    StartReadObject(swapchain);
+}
+
+void ThreadSafety::PostCallRecordReleaseFullScreenExclusiveModeEXT(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(device);
+    FinishReadObject(swapchain);
+}
+
+void ThreadSafety::PreCallRecordGetDeviceGroupSurfacePresentModes2EXT(
+    VkDevice                                    device,
+    const VkPhysicalDeviceSurfaceInfo2KHR*      pSurfaceInfo,
+    VkDeviceGroupPresentModeFlagsKHR*           pModes) {
+    StartReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PostCallRecordGetDeviceGroupSurfacePresentModes2EXT(
+    VkDevice                                    device,
+    const VkPhysicalDeviceSurfaceInfo2KHR*      pSurfaceInfo,
+    VkDeviceGroupPresentModeFlagsKHR*           pModes,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(device);
+}
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+void ThreadSafety::PreCallRecordCreateHeadlessSurfaceEXT(
+    VkInstance                                  instance,
+    const VkHeadlessSurfaceCreateInfoEXT*       pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface) {
+    StartReadObjectParentInstance(instance);
+}
+
+void ThreadSafety::PostCallRecordCreateHeadlessSurfaceEXT(
+    VkInstance                                  instance,
+    const VkHeadlessSurfaceCreateInfoEXT*       pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(instance);
+    if (result == VK_SUCCESS) {
+        CreateObjectParentInstance(*pSurface);
+    }
+}
+
+void ThreadSafety::PreCallRecordCmdSetLineStippleEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    lineStippleFactor,
+    uint16_t                                    lineStipplePattern) {
+    StartWriteObject(commandBuffer);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordCmdSetLineStippleEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    lineStippleFactor,
+    uint16_t                                    lineStipplePattern) {
+    FinishWriteObject(commandBuffer);
+    // Host access to commandBuffer must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordResetQueryPoolEXT(
+    VkDevice                                    device,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    firstQuery,
+    uint32_t                                    queryCount) {
+    StartReadObjectParentInstance(device);
+    StartReadObject(queryPool);
+}
+
+void ThreadSafety::PostCallRecordResetQueryPoolEXT(
+    VkDevice                                    device,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    firstQuery,
+    uint32_t                                    queryCount) {
+    FinishReadObjectParentInstance(device);
+    FinishReadObject(queryPool);
+}
diff --git a/src/third_party/vulkan-validation-layers/src/layers/generated/thread_safety.h b/src/third_party/vulkan-validation-layers/src/layers/generated/thread_safety.h
new file mode 100644
index 0000000..f4c4afa
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/layers/generated/thread_safety.h
@@ -0,0 +1,4390 @@
+
+// This file is ***GENERATED***.  Do Not Edit.
+// See thread_safety_generator.py for modifications.
+
+/* Copyright (c) 2015-2019 The Khronos Group Inc.
+ * Copyright (c) 2015-2019 Valve Corporation
+ * Copyright (c) 2015-2019 LunarG, Inc.
+ * Copyright (c) 2015-2019 Google Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Mark Lobodzinski <mark@lunarg.com>
+ */
+
+#pragma once
+
+#include <atomic>
+#include <chrono>
+#include <mutex>
+#include <string>
+#include <thread>
+#include <unordered_set>
+#include <vector>
+
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(DISTINCT_NONDISPATCHABLE_PHONY_HANDLE)
+// The following line must match the vulkan_core.h condition guarding VK_DEFINE_NON_DISPATCHABLE_HANDLE
+#if defined(__LP64__) || defined(_WIN64) || (defined(__x86_64__) && !defined(__ILP32__)) || defined(_M_X64) || defined(__ia64) ||     defined(_M_IA64) || defined(__aarch64__) || defined(__powerpc64__)
+// If pointers are 64-bit, then there can be separate counters for each
+// NONDISPATCHABLE_HANDLE type.  Otherwise they are all typedef uint64_t.
+#define DISTINCT_NONDISPATCHABLE_HANDLES
+// Make sure we catch any disagreement between us and the vulkan definition
+static_assert(std::is_pointer<DISTINCT_NONDISPATCHABLE_PHONY_HANDLE>::value,
+              "Mismatched non-dispatchable handle handle, expected pointer type.");
+#else
+// Make sure we catch any disagreement between us and the vulkan definition
+static_assert(std::is_same<uint64_t, DISTINCT_NONDISPATCHABLE_PHONY_HANDLE>::value,
+              "Mismatched non-dispatchable handle handle, expected uint64_t.");
+#endif
+
+// Suppress unused warning on Linux
+#if defined(__GNUC__)
+#define DECORATE_UNUSED __attribute__((unused))
+#else
+#define DECORATE_UNUSED
+#endif
+
+// clang-format off
+static const char DECORATE_UNUSED *kVUID_Threading_Info = "UNASSIGNED-Threading-Info";
+static const char DECORATE_UNUSED *kVUID_Threading_MultipleThreads = "UNASSIGNED-Threading-MultipleThreads";
+static const char DECORATE_UNUSED *kVUID_Threading_SingleThreadReuse = "UNASSIGNED-Threading-SingleThreadReuse";
+// clang-format on
+
+#undef DECORATE_UNUSED
+
+class ObjectUseData
+{
+public:
+    class WriteReadCount
+    {
+    public:
+        WriteReadCount(int64_t v) : count(v) {}
+
+        int32_t GetReadCount() const { return (int32_t)(count & 0xFFFFFFFF); }
+        int32_t GetWriteCount() const { return (int32_t)(count >> 32); }
+
+    private:
+        int64_t count;
+    };
+
+    ObjectUseData() : thread(0), writer_reader_count(0) {
+        // silence -Wunused-private-field warning
+        padding[0] = 0;
+    }
+
+    WriteReadCount AddWriter() {
+        int64_t prev = writer_reader_count.fetch_add(1ULL << 32);
+        return WriteReadCount(prev);
+    }
+    WriteReadCount AddReader() {
+        int64_t prev = writer_reader_count.fetch_add(1ULL);
+        return WriteReadCount(prev);
+    }
+    WriteReadCount RemoveWriter() {
+        int64_t prev = writer_reader_count.fetch_add(-(1LL << 32));
+        return WriteReadCount(prev);
+    }
+    WriteReadCount RemoveReader() {
+        int64_t prev = writer_reader_count.fetch_add(-1LL);
+        return WriteReadCount(prev);
+    }
+    WriteReadCount GetCount() {
+        return WriteReadCount(writer_reader_count);
+    }
+
+    void WaitForObjectIdle(bool is_writer)  {
+        // Wait for thread-safe access to object instead of skipping call.
+        while (GetCount().GetReadCount() > (int)(!is_writer) || GetCount().GetWriteCount() > (int)is_writer) {
+            std::this_thread::sleep_for(std::chrono::microseconds(1));
+        }
+    }
+
+    std::atomic<loader_platform_thread_id> thread;
+
+private:
+    // need to update write and read counts atomically. Writer in high
+    // 32 bits, reader in low 32 bits.
+    std::atomic<int64_t> writer_reader_count;
+
+    // Put each lock on its own cache line to avoid false cache line sharing.
+    char padding[(-int(sizeof(std::atomic<loader_platform_thread_id>) + sizeof(std::atomic<int64_t>))) & 63];
+};
+
+
+template <typename T>
+class counter {
+public:
+    const char *typeName;
+    VkDebugReportObjectTypeEXT objectType;
+    debug_report_data **report_data;
+
+    vl_concurrent_unordered_map<T, std::shared_ptr<ObjectUseData>, 6> object_table;
+
+    void CreateObject(T object) {
+        object_table.insert_or_assign(object, std::make_shared<ObjectUseData>());
+    }
+
+    void DestroyObject(T object) {
+        if (object) {
+            object_table.erase(object);
+        }
+    }
+
+    std::shared_ptr<ObjectUseData> FindObject(T object) {
+        assert(object_table.contains(object));
+        auto iter = std::move(object_table.find(object));
+        if (iter != object_table.end()) {
+            return std::move(iter->second);
+        } else {
+            log_msg(*report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, objectType, (uint64_t)(object), kVUID_Threading_Info,
+                    "Couldn't find %s Object 0x%" PRIxLEAST64
+                    ". This should not happen and may indicate a bug in the application.",
+                    object_string[objectType], (uint64_t)(object));
+            return nullptr;
+        }
+    }
+
+    void StartWrite(T object) {
+        if (object == VK_NULL_HANDLE) {
+            return;
+        }
+        bool skip = false;
+        loader_platform_thread_id tid = loader_platform_get_thread_id();
+
+        auto use_data = FindObject(object);
+        if (!use_data) {
+            return;
+        }
+        const ObjectUseData::WriteReadCount prevCount = use_data->AddWriter();
+
+        if (prevCount.GetReadCount() == 0 && prevCount.GetWriteCount() == 0) {
+            // There is no current use of the object.  Record writer thread.
+            use_data->thread = tid;
+        } else {
+            if (prevCount.GetReadCount() == 0) {
+                assert(prevCount.GetWriteCount() != 0);
+                // There are no readers.  Two writers just collided.
+                if (use_data->thread != tid) {
+                    skip |= log_msg(*report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, objectType, (uint64_t)(object),
+                        kVUID_Threading_MultipleThreads,
+                        "THREADING ERROR : object of type %s is simultaneously used in "
+                        "thread 0x%" PRIx64 " and thread 0x%" PRIx64,
+                        typeName, (uint64_t)use_data->thread.load(std::memory_order_relaxed), (uint64_t)tid);
+                    if (skip) {
+                        // Wait for thread-safe access to object instead of skipping call.
+                        use_data->WaitForObjectIdle(true);
+                        // There is now no current use of the object.  Record writer thread.
+                        use_data->thread = tid;
+                    } else {
+                        // There is now no current use of the object.  Record writer thread.
+                        use_data->thread = tid;
+                    }
+                } else {
+                    // This is either safe multiple use in one call, or recursive use.
+                    // There is no way to make recursion safe.  Just forge ahead.
+                }
+            } else {
+                // There are readers.  This writer collided with them.
+                if (use_data->thread != tid) {
+                    skip |= log_msg(*report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, objectType, (uint64_t)(object),
+                        kVUID_Threading_MultipleThreads,
+                        "THREADING ERROR : object of type %s is simultaneously used in "
+                        "thread 0x%" PRIx64 " and thread 0x%" PRIx64,
+                        typeName, (uint64_t)use_data->thread.load(std::memory_order_relaxed), (uint64_t)tid);
+                    if (skip) {
+                        // Wait for thread-safe access to object instead of skipping call.
+                        use_data->WaitForObjectIdle(true);
+                        // There is now no current use of the object.  Record writer thread.
+                        use_data->thread = tid;
+                    } else {
+                        // Continue with an unsafe use of the object.
+                        use_data->thread = tid;
+                    }
+                } else {
+                    // This is either safe multiple use in one call, or recursive use.
+                    // There is no way to make recursion safe.  Just forge ahead.
+                }
+            }
+        }
+    }
+
+    void FinishWrite(T object) {
+        if (object == VK_NULL_HANDLE) {
+            return;
+        }
+        // Object is no longer in use
+        auto use_data = FindObject(object);
+        if (!use_data) {
+            return;
+        }
+        use_data->RemoveWriter();
+    }
+
+    void StartRead(T object) {
+        if (object == VK_NULL_HANDLE) {
+            return;
+        }
+        bool skip = false;
+        loader_platform_thread_id tid = loader_platform_get_thread_id();
+
+        auto use_data = FindObject(object);
+        if (!use_data) {
+            return;
+        }
+        const ObjectUseData::WriteReadCount prevCount = use_data->AddReader();
+
+        if (prevCount.GetReadCount() == 0 && prevCount.GetWriteCount() == 0) {
+            // There is no current use of the object.
+            use_data->thread = tid;
+        } else if (prevCount.GetWriteCount() > 0 && use_data->thread != tid) {
+            // There is a writer of the object.
+            skip |= log_msg(*report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, objectType, (uint64_t)(object),
+                kVUID_Threading_MultipleThreads,
+                "THREADING ERROR : object of type %s is simultaneously used in "
+                "thread 0x%" PRIx64 " and thread 0x%" PRIx64,
+                typeName, (uint64_t)use_data->thread.load(std::memory_order_relaxed), (uint64_t)tid);
+            if (skip) {
+                // Wait for thread-safe access to object instead of skipping call.
+                use_data->WaitForObjectIdle(false);
+                use_data->thread = tid;
+            }
+        } else {
+            // There are other readers of the object.
+        }
+    }
+    void FinishRead(T object) {
+        if (object == VK_NULL_HANDLE) {
+            return;
+        }
+
+        auto use_data = FindObject(object);
+        if (!use_data) {
+            return;
+        }
+        use_data->RemoveReader();
+    }
+    counter(const char *name = "", VkDebugReportObjectTypeEXT type = VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, debug_report_data **rep_data = nullptr) {
+        typeName = name;
+        objectType = type;
+        report_data = rep_data;
+    }
+
+private:
+};
+
+class ThreadSafety : public ValidationObject {
+public:
+
+    ReadWriteLock thread_safety_lock;
+
+    // Override chassis read/write locks for this validation object
+    // This override takes a deferred lock. i.e. it is not acquired.
+    virtual read_lock_guard_t read_lock() {
+        return read_lock_guard_t(validation_object_mutex, std::defer_lock);
+    }
+    virtual write_lock_guard_t write_lock() {
+        return write_lock_guard_t(validation_object_mutex, std::defer_lock);
+    }
+
+    // If this ThreadSafety is for a VkDevice, then parent_instance points to the
+    // ThreadSafety object of its parent VkInstance. This is used to get to the counters
+    // for objects created with the instance as parent.
+    ThreadSafety *parent_instance;
+
+    vl_concurrent_unordered_map<VkCommandBuffer, VkCommandPool, 6> command_pool_map;
+    std::unordered_map<VkCommandPool, std::unordered_set<VkCommandBuffer>> pool_command_buffers_map;
+    std::unordered_map<VkDevice, std::unordered_set<VkQueue>> device_queues_map;
+
+    // Track per-descriptorsetlayout and per-descriptorset whether UPDATE_AFTER_BIND is used.
+    // This is used to (sloppily) implement the relaxed externsync rules for UPDATE_AFTER_BIND
+    // descriptors. We model updates of UPDATE_AFTER_BIND descriptors as if they were reads
+    // rather than writes, because they only conflict with the set being freed or reset.
+    //
+    // We don't track the UPDATE_AFTER_BIND state per-binding for a couple reasons:
+    // (1) We only have one counter per object, and if we treated non-UAB as writes
+    //     and UAB as reads then they'd appear to conflict with each other.
+    // (2) Avoid additional tracking of descriptor binding state in the descriptor set
+    //     layout, and tracking of which bindings are accessed by a VkDescriptorUpdateTemplate.
+    vl_concurrent_unordered_map<VkDescriptorSetLayout, bool, 4> dsl_update_after_bind_map;
+    vl_concurrent_unordered_map<VkDescriptorSet, bool, 6> ds_update_after_bind_map;
+    bool DsUpdateAfterBind(VkDescriptorSet) const;
+
+    counter<VkCommandBuffer> c_VkCommandBuffer;
+    counter<VkDevice> c_VkDevice;
+    counter<VkInstance> c_VkInstance;
+    counter<VkQueue> c_VkQueue;
+#ifdef DISTINCT_NONDISPATCHABLE_HANDLES
+
+    // Special entry to allow tracking of command pool Reset and Destroy
+    counter<VkCommandPool> c_VkCommandPoolContents;
+    counter<VkAccelerationStructureNV> c_VkAccelerationStructureNV;
+    counter<VkBuffer> c_VkBuffer;
+    counter<VkBufferView> c_VkBufferView;
+    counter<VkCommandPool> c_VkCommandPool;
+    counter<VkDebugReportCallbackEXT> c_VkDebugReportCallbackEXT;
+    counter<VkDebugUtilsMessengerEXT> c_VkDebugUtilsMessengerEXT;
+    counter<VkDescriptorPool> c_VkDescriptorPool;
+    counter<VkDescriptorSet> c_VkDescriptorSet;
+    counter<VkDescriptorSetLayout> c_VkDescriptorSetLayout;
+    counter<VkDescriptorUpdateTemplate> c_VkDescriptorUpdateTemplate;
+    counter<VkDeviceMemory> c_VkDeviceMemory;
+    counter<VkDisplayKHR> c_VkDisplayKHR;
+    counter<VkDisplayModeKHR> c_VkDisplayModeKHR;
+    counter<VkEvent> c_VkEvent;
+    counter<VkFence> c_VkFence;
+    counter<VkFramebuffer> c_VkFramebuffer;
+    counter<VkImage> c_VkImage;
+    counter<VkImageView> c_VkImageView;
+    counter<VkIndirectCommandsLayoutNVX> c_VkIndirectCommandsLayoutNVX;
+    counter<VkObjectTableNVX> c_VkObjectTableNVX;
+    counter<VkPerformanceConfigurationINTEL> c_VkPerformanceConfigurationINTEL;
+    counter<VkPipeline> c_VkPipeline;
+    counter<VkPipelineCache> c_VkPipelineCache;
+    counter<VkPipelineLayout> c_VkPipelineLayout;
+    counter<VkQueryPool> c_VkQueryPool;
+    counter<VkRenderPass> c_VkRenderPass;
+    counter<VkSampler> c_VkSampler;
+    counter<VkSamplerYcbcrConversion> c_VkSamplerYcbcrConversion;
+    counter<VkSemaphore> c_VkSemaphore;
+    counter<VkShaderModule> c_VkShaderModule;
+    counter<VkSurfaceKHR> c_VkSurfaceKHR;
+    counter<VkSwapchainKHR> c_VkSwapchainKHR;
+    counter<VkValidationCacheEXT> c_VkValidationCacheEXT;
+
+
+#else   // DISTINCT_NONDISPATCHABLE_HANDLES
+    // Special entry to allow tracking of command pool Reset and Destroy
+    counter<uint64_t> c_VkCommandPoolContents;
+
+    counter<uint64_t> c_uint64_t;
+#endif  // DISTINCT_NONDISPATCHABLE_HANDLES
+
+    ThreadSafety(ThreadSafety *parent)
+        : parent_instance(parent),
+          c_VkCommandBuffer("VkCommandBuffer", VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, &report_data),
+          c_VkDevice("VkDevice", VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, &report_data),
+          c_VkInstance("VkInstance", VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT, &report_data),
+          c_VkQueue("VkQueue", VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT, &report_data),
+          c_VkCommandPoolContents("VkCommandPool", VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT, &report_data),
+
+#ifdef DISTINCT_NONDISPATCHABLE_HANDLES
+          c_VkAccelerationStructureNV("VkAccelerationStructureNV", VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV_EXT, &report_data),
+          c_VkBuffer("VkBuffer", VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, &report_data),
+          c_VkBufferView("VkBufferView", VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT, &report_data),
+          c_VkCommandPool("VkCommandPool", VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT, &report_data),
+          c_VkDebugReportCallbackEXT("VkDebugReportCallbackEXT", VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_EXT, &report_data),
+          c_VkDebugUtilsMessengerEXT("VkDebugUtilsMessengerEXT", VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, &report_data),
+          c_VkDescriptorPool("VkDescriptorPool", VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT, &report_data),
+          c_VkDescriptorSet("VkDescriptorSet", VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT, &report_data),
+          c_VkDescriptorSetLayout("VkDescriptorSetLayout", VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT, &report_data),
+          c_VkDescriptorUpdateTemplate("VkDescriptorUpdateTemplate", VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_EXT, &report_data),
+          c_VkDeviceMemory("VkDeviceMemory", VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, &report_data),
+          c_VkDisplayKHR("VkDisplayKHR", VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_KHR_EXT, &report_data),
+          c_VkDisplayModeKHR("VkDisplayModeKHR", VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_MODE_KHR_EXT, &report_data),
+          c_VkEvent("VkEvent", VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT, &report_data),
+          c_VkFence("VkFence", VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT, &report_data),
+          c_VkFramebuffer("VkFramebuffer", VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT, &report_data),
+          c_VkImage("VkImage", VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, &report_data),
+          c_VkImageView("VkImageView", VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT, &report_data),
+          c_VkIndirectCommandsLayoutNVX("VkIndirectCommandsLayoutNVX", VK_DEBUG_REPORT_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX_EXT, &report_data),
+          c_VkObjectTableNVX("VkObjectTableNVX", VK_DEBUG_REPORT_OBJECT_TYPE_OBJECT_TABLE_NVX_EXT, &report_data),
+          c_VkPerformanceConfigurationINTEL("VkPerformanceConfigurationINTEL", VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, &report_data),
+          c_VkPipeline("VkPipeline", VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT, &report_data),
+          c_VkPipelineCache("VkPipelineCache", VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT, &report_data),
+          c_VkPipelineLayout("VkPipelineLayout", VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT, &report_data),
+          c_VkQueryPool("VkQueryPool", VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT, &report_data),
+          c_VkRenderPass("VkRenderPass", VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT, &report_data),
+          c_VkSampler("VkSampler", VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT, &report_data),
+          c_VkSamplerYcbcrConversion("VkSamplerYcbcrConversion", VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT, &report_data),
+          c_VkSemaphore("VkSemaphore", VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT, &report_data),
+          c_VkShaderModule("VkShaderModule", VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT, &report_data),
+          c_VkSurfaceKHR("VkSurfaceKHR", VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT, &report_data),
+          c_VkSwapchainKHR("VkSwapchainKHR", VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT, &report_data),
+          c_VkValidationCacheEXT("VkValidationCacheEXT", VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT, &report_data)
+
+
+#else   // DISTINCT_NONDISPATCHABLE_HANDLES
+          c_uint64_t("NON_DISPATCHABLE_HANDLE", VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, &report_data)
+#endif  // DISTINCT_NONDISPATCHABLE_HANDLES
+              {};
+
+#define WRAPPER(type)                                                \
+    void StartWriteObject(type object) {                             \
+        c_##type.StartWrite(object);                                 \
+    }                                                                \
+    void FinishWriteObject(type object) {                            \
+        c_##type.FinishWrite(object);                                \
+    }                                                                \
+    void StartReadObject(type object) {                              \
+        c_##type.StartRead(object);                                  \
+    }                                                                \
+    void FinishReadObject(type object) {                             \
+        c_##type.FinishRead(object);                                 \
+    }                                                                \
+    void CreateObject(type object) {                                 \
+        c_##type.CreateObject(object);                               \
+    }                                                                \
+    void DestroyObject(type object) {                                \
+        c_##type.DestroyObject(object);                              \
+    }
+
+#define WRAPPER_PARENT_INSTANCE(type)                                                   \
+    void StartWriteObjectParentInstance(type object) {                                  \
+        (parent_instance ? parent_instance : this)->c_##type.StartWrite(object);        \
+    }                                                                                   \
+    void FinishWriteObjectParentInstance(type object) {                                 \
+        (parent_instance ? parent_instance : this)->c_##type.FinishWrite(object);       \
+    }                                                                                   \
+    void StartReadObjectParentInstance(type object) {                                   \
+        (parent_instance ? parent_instance : this)->c_##type.StartRead(object);         \
+    }                                                                                   \
+    void FinishReadObjectParentInstance(type object) {                                  \
+        (parent_instance ? parent_instance : this)->c_##type.FinishRead(object);        \
+    }                                                                                   \
+    void CreateObjectParentInstance(type object) {                                      \
+        (parent_instance ? parent_instance : this)->c_##type.CreateObject(object);      \
+    }                                                                                   \
+    void DestroyObjectParentInstance(type object) {                                     \
+        (parent_instance ? parent_instance : this)->c_##type.DestroyObject(object);     \
+    }
+
+WRAPPER_PARENT_INSTANCE(VkDevice)
+WRAPPER_PARENT_INSTANCE(VkInstance)
+WRAPPER(VkQueue)
+#ifdef DISTINCT_NONDISPATCHABLE_HANDLES
+WRAPPER(VkAccelerationStructureNV)
+WRAPPER(VkBuffer)
+WRAPPER(VkBufferView)
+WRAPPER(VkCommandPool)
+WRAPPER_PARENT_INSTANCE(VkDebugReportCallbackEXT)
+WRAPPER_PARENT_INSTANCE(VkDebugUtilsMessengerEXT)
+WRAPPER(VkDescriptorPool)
+WRAPPER(VkDescriptorSet)
+WRAPPER(VkDescriptorSetLayout)
+WRAPPER(VkDescriptorUpdateTemplate)
+WRAPPER(VkDeviceMemory)
+WRAPPER(VkDisplayKHR)
+WRAPPER(VkDisplayModeKHR)
+WRAPPER(VkEvent)
+WRAPPER(VkFence)
+WRAPPER(VkFramebuffer)
+WRAPPER(VkImage)
+WRAPPER(VkImageView)
+WRAPPER(VkIndirectCommandsLayoutNVX)
+WRAPPER(VkObjectTableNVX)
+WRAPPER(VkPerformanceConfigurationINTEL)
+WRAPPER(VkPipeline)
+WRAPPER(VkPipelineCache)
+WRAPPER(VkPipelineLayout)
+WRAPPER(VkQueryPool)
+WRAPPER(VkRenderPass)
+WRAPPER(VkSampler)
+WRAPPER(VkSamplerYcbcrConversion)
+WRAPPER(VkSemaphore)
+WRAPPER(VkShaderModule)
+WRAPPER_PARENT_INSTANCE(VkSurfaceKHR)
+WRAPPER(VkSwapchainKHR)
+WRAPPER(VkValidationCacheEXT)
+
+
+#else   // DISTINCT_NONDISPATCHABLE_HANDLES
+WRAPPER(uint64_t)
+WRAPPER_PARENT_INSTANCE(uint64_t)
+#endif  // DISTINCT_NONDISPATCHABLE_HANDLES
+
+    void CreateObject(VkCommandBuffer object) {
+        c_VkCommandBuffer.CreateObject(object);
+    }
+    void DestroyObject(VkCommandBuffer object) {
+        c_VkCommandBuffer.DestroyObject(object);
+    }
+
+    // VkCommandBuffer needs check for implicit use of command pool
+    void StartWriteObject(VkCommandBuffer object, bool lockPool = true) {
+        if (lockPool) {
+            auto iter = command_pool_map.find(object);
+            if (iter != command_pool_map.end()) {
+                VkCommandPool pool = iter->second;
+                StartWriteObject(pool);
+            }
+        }
+        c_VkCommandBuffer.StartWrite(object);
+    }
+    void FinishWriteObject(VkCommandBuffer object, bool lockPool = true) {
+        c_VkCommandBuffer.FinishWrite(object);
+        if (lockPool) {
+            auto iter = command_pool_map.find(object);
+            if (iter != command_pool_map.end()) {
+                VkCommandPool pool = iter->second;
+                FinishWriteObject(pool);
+            }
+        }
+    }
+    void StartReadObject(VkCommandBuffer object) {
+        auto iter = command_pool_map.find(object);
+        if (iter != command_pool_map.end()) {
+            VkCommandPool pool = iter->second;
+            // We set up a read guard against the "Contents" counter to catch conflict vs. vkResetCommandPool and vkDestroyCommandPool
+            // while *not* establishing a read guard against the command pool counter itself to avoid false postives for
+            // non-externally sync'd command buffers
+            c_VkCommandPoolContents.StartRead(pool);
+        }
+        c_VkCommandBuffer.StartRead(object);
+    }
+    void FinishReadObject(VkCommandBuffer object) {
+        c_VkCommandBuffer.FinishRead(object);
+        auto iter = command_pool_map.find(object);
+        if (iter != command_pool_map.end()) {
+            VkCommandPool pool = iter->second;
+            c_VkCommandPoolContents.FinishRead(pool);
+        }
+    } 
+
+void PreCallRecordCreateInstance(
+    const VkInstanceCreateInfo*                 pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkInstance*                                 pInstance);
+
+void PostCallRecordCreateInstance(
+    const VkInstanceCreateInfo*                 pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkInstance*                                 pInstance,
+    VkResult                                    result);
+
+void PreCallRecordDestroyInstance(
+    VkInstance                                  instance,
+    const VkAllocationCallbacks*                pAllocator);
+
+void PostCallRecordDestroyInstance(
+    VkInstance                                  instance,
+    const VkAllocationCallbacks*                pAllocator);
+
+void PreCallRecordEnumeratePhysicalDevices(
+    VkInstance                                  instance,
+    uint32_t*                                   pPhysicalDeviceCount,
+    VkPhysicalDevice*                           pPhysicalDevices);
+
+void PostCallRecordEnumeratePhysicalDevices(
+    VkInstance                                  instance,
+    uint32_t*                                   pPhysicalDeviceCount,
+    VkPhysicalDevice*                           pPhysicalDevices,
+    VkResult                                    result);
+
+void PreCallRecordGetInstanceProcAddr(
+    VkInstance                                  instance,
+    const char*                                 pName);
+
+void PostCallRecordGetInstanceProcAddr(
+    VkInstance                                  instance,
+    const char*                                 pName);
+
+void PreCallRecordGetDeviceProcAddr(
+    VkDevice                                    device,
+    const char*                                 pName);
+
+void PostCallRecordGetDeviceProcAddr(
+    VkDevice                                    device,
+    const char*                                 pName);
+
+void PreCallRecordCreateDevice(
+    VkPhysicalDevice                            physicalDevice,
+    const VkDeviceCreateInfo*                   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDevice*                                   pDevice);
+
+void PostCallRecordCreateDevice(
+    VkPhysicalDevice                            physicalDevice,
+    const VkDeviceCreateInfo*                   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDevice*                                   pDevice,
+    VkResult                                    result);
+
+void PreCallRecordDestroyDevice(
+    VkDevice                                    device,
+    const VkAllocationCallbacks*                pAllocator);
+
+void PostCallRecordDestroyDevice(
+    VkDevice                                    device,
+    const VkAllocationCallbacks*                pAllocator);
+
+void PreCallRecordGetDeviceQueue(
+    VkDevice                                    device,
+    uint32_t                                    queueFamilyIndex,
+    uint32_t                                    queueIndex,
+    VkQueue*                                    pQueue);
+
+void PostCallRecordGetDeviceQueue(
+    VkDevice                                    device,
+    uint32_t                                    queueFamilyIndex,
+    uint32_t                                    queueIndex,
+    VkQueue*                                    pQueue);
+
+void PreCallRecordQueueSubmit(
+    VkQueue                                     queue,
+    uint32_t                                    submitCount,
+    const VkSubmitInfo*                         pSubmits,
+    VkFence                                     fence);
+
+void PostCallRecordQueueSubmit(
+    VkQueue                                     queue,
+    uint32_t                                    submitCount,
+    const VkSubmitInfo*                         pSubmits,
+    VkFence                                     fence,
+    VkResult                                    result);
+
+void PreCallRecordQueueWaitIdle(
+    VkQueue                                     queue);
+
+void PostCallRecordQueueWaitIdle(
+    VkQueue                                     queue,
+    VkResult                                    result);
+
+void PreCallRecordDeviceWaitIdle(
+    VkDevice                                    device);
+
+void PostCallRecordDeviceWaitIdle(
+    VkDevice                                    device,
+    VkResult                                    result);
+
+void PreCallRecordAllocateMemory(
+    VkDevice                                    device,
+    const VkMemoryAllocateInfo*                 pAllocateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDeviceMemory*                             pMemory);
+
+void PostCallRecordAllocateMemory(
+    VkDevice                                    device,
+    const VkMemoryAllocateInfo*                 pAllocateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDeviceMemory*                             pMemory,
+    VkResult                                    result);
+
+void PreCallRecordFreeMemory(
+    VkDevice                                    device,
+    VkDeviceMemory                              memory,
+    const VkAllocationCallbacks*                pAllocator);
+
+void PostCallRecordFreeMemory(
+    VkDevice                                    device,
+    VkDeviceMemory                              memory,
+    const VkAllocationCallbacks*                pAllocator);
+
+void PreCallRecordMapMemory(
+    VkDevice                                    device,
+    VkDeviceMemory                              memory,
+    VkDeviceSize                                offset,
+    VkDeviceSize                                size,
+    VkMemoryMapFlags                            flags,
+    void**                                      ppData);
+
+void PostCallRecordMapMemory(
+    VkDevice                                    device,
+    VkDeviceMemory                              memory,
+    VkDeviceSize                                offset,
+    VkDeviceSize                                size,
+    VkMemoryMapFlags                            flags,
+    void**                                      ppData,
+    VkResult                                    result);
+
+void PreCallRecordUnmapMemory(
+    VkDevice                                    device,
+    VkDeviceMemory                              memory);
+
+void PostCallRecordUnmapMemory(
+    VkDevice                                    device,
+    VkDeviceMemory                              memory);
+
+void PreCallRecordFlushMappedMemoryRanges(
+    VkDevice                                    device,
+    uint32_t                                    memoryRangeCount,
+    const VkMappedMemoryRange*                  pMemoryRanges);
+
+void PostCallRecordFlushMappedMemoryRanges(
+    VkDevice                                    device,
+    uint32_t                                    memoryRangeCount,
+    const VkMappedMemoryRange*                  pMemoryRanges,
+    VkResult                                    result);
+
+void PreCallRecordInvalidateMappedMemoryRanges(
+    VkDevice                                    device,
+    uint32_t                                    memoryRangeCount,
+    const VkMappedMemoryRange*                  pMemoryRanges);
+
+void PostCallRecordInvalidateMappedMemoryRanges(
+    VkDevice                                    device,
+    uint32_t                                    memoryRangeCount,
+    const VkMappedMemoryRange*                  pMemoryRanges,
+    VkResult                                    result);
+
+void PreCallRecordGetDeviceMemoryCommitment(
+    VkDevice                                    device,
+    VkDeviceMemory                              memory,
+    VkDeviceSize*                               pCommittedMemoryInBytes);
+
+void PostCallRecordGetDeviceMemoryCommitment(
+    VkDevice                                    device,
+    VkDeviceMemory                              memory,
+    VkDeviceSize*                               pCommittedMemoryInBytes);
+
+void PreCallRecordBindBufferMemory(
+    VkDevice                                    device,
+    VkBuffer                                    buffer,
+    VkDeviceMemory                              memory,
+    VkDeviceSize                                memoryOffset);
+
+void PostCallRecordBindBufferMemory(
+    VkDevice                                    device,
+    VkBuffer                                    buffer,
+    VkDeviceMemory                              memory,
+    VkDeviceSize                                memoryOffset,
+    VkResult                                    result);
+
+void PreCallRecordBindImageMemory(
+    VkDevice                                    device,
+    VkImage                                     image,
+    VkDeviceMemory                              memory,
+    VkDeviceSize                                memoryOffset);
+
+void PostCallRecordBindImageMemory(
+    VkDevice                                    device,
+    VkImage                                     image,
+    VkDeviceMemory                              memory,
+    VkDeviceSize                                memoryOffset,
+    VkResult                                    result);
+
+void PreCallRecordGetBufferMemoryRequirements(
+    VkDevice                                    device,
+    VkBuffer                                    buffer,
+    VkMemoryRequirements*                       pMemoryRequirements);
+
+void PostCallRecordGetBufferMemoryRequirements(
+    VkDevice                                    device,
+    VkBuffer                                    buffer,
+    VkMemoryRequirements*                       pMemoryRequirements);
+
+void PreCallRecordGetImageMemoryRequirements(
+    VkDevice                                    device,
+    VkImage                                     image,
+    VkMemoryRequirements*                       pMemoryRequirements);
+
+void PostCallRecordGetImageMemoryRequirements(
+    VkDevice                                    device,
+    VkImage                                     image,
+    VkMemoryRequirements*                       pMemoryRequirements);
+
+void PreCallRecordGetImageSparseMemoryRequirements(
+    VkDevice                                    device,
+    VkImage                                     image,
+    uint32_t*                                   pSparseMemoryRequirementCount,
+    VkSparseImageMemoryRequirements*            pSparseMemoryRequirements);
+
+void PostCallRecordGetImageSparseMemoryRequirements(
+    VkDevice                                    device,
+    VkImage                                     image,
+    uint32_t*                                   pSparseMemoryRequirementCount,
+    VkSparseImageMemoryRequirements*            pSparseMemoryRequirements);
+
+void PreCallRecordQueueBindSparse(
+    VkQueue                                     queue,
+    uint32_t                                    bindInfoCount,
+    const VkBindSparseInfo*                     pBindInfo,
+    VkFence                                     fence);
+
+void PostCallRecordQueueBindSparse(
+    VkQueue                                     queue,
+    uint32_t                                    bindInfoCount,
+    const VkBindSparseInfo*                     pBindInfo,
+    VkFence                                     fence,
+    VkResult                                    result);
+
+void PreCallRecordCreateFence(
+    VkDevice                                    device,
+    const VkFenceCreateInfo*                    pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkFence*                                    pFence);
+
+void PostCallRecordCreateFence(
+    VkDevice                                    device,
+    const VkFenceCreateInfo*                    pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkFence*                                    pFence,
+    VkResult                                    result);
+
+void PreCallRecordDestroyFence(
+    VkDevice                                    device,
+    VkFence                                     fence,
+    const VkAllocationCallbacks*                pAllocator);
+
+void PostCallRecordDestroyFence(
+    VkDevice                                    device,
+    VkFence                                     fence,
+    const VkAllocationCallbacks*                pAllocator);
+
+void PreCallRecordResetFences(
+    VkDevice                                    device,
+    uint32_t                                    fenceCount,
+    const VkFence*                              pFences);
+
+void PostCallRecordResetFences(
+    VkDevice                                    device,
+    uint32_t                                    fenceCount,
+    const VkFence*                              pFences,
+    VkResult                                    result);
+
+void PreCallRecordGetFenceStatus(
+    VkDevice                                    device,
+    VkFence                                     fence);
+
+void PostCallRecordGetFenceStatus(
+    VkDevice                                    device,
+    VkFence                                     fence,
+    VkResult                                    result);
+
+void PreCallRecordWaitForFences(
+    VkDevice                                    device,
+    uint32_t                                    fenceCount,
+    const VkFence*                              pFences,
+    VkBool32                                    waitAll,
+    uint64_t                                    timeout);
+
+void PostCallRecordWaitForFences(
+    VkDevice                                    device,
+    uint32_t                                    fenceCount,
+    const VkFence*                              pFences,
+    VkBool32                                    waitAll,
+    uint64_t                                    timeout,
+    VkResult                                    result);
+
+void PreCallRecordCreateSemaphore(
+    VkDevice                                    device,
+    const VkSemaphoreCreateInfo*                pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSemaphore*                                pSemaphore);
+
+void PostCallRecordCreateSemaphore(
+    VkDevice                                    device,
+    const VkSemaphoreCreateInfo*                pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSemaphore*                                pSemaphore,
+    VkResult                                    result);
+
+void PreCallRecordDestroySemaphore(
+    VkDevice                                    device,
+    VkSemaphore                                 semaphore,
+    const VkAllocationCallbacks*                pAllocator);
+
+void PostCallRecordDestroySemaphore(
+    VkDevice                                    device,
+    VkSemaphore                                 semaphore,
+    const VkAllocationCallbacks*                pAllocator);
+
+void PreCallRecordCreateEvent(
+    VkDevice                                    device,
+    const VkEventCreateInfo*                    pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkEvent*                                    pEvent);
+
+void PostCallRecordCreateEvent(
+    VkDevice                                    device,
+    const VkEventCreateInfo*                    pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkEvent*                                    pEvent,
+    VkResult                                    result);
+
+void PreCallRecordDestroyEvent(
+    VkDevice                                    device,
+    VkEvent                                     event,
+    const VkAllocationCallbacks*                pAllocator);
+
+void PostCallRecordDestroyEvent(
+    VkDevice                                    device,
+    VkEvent                                     event,
+    const VkAllocationCallbacks*                pAllocator);
+
+void PreCallRecordGetEventStatus(
+    VkDevice                                    device,
+    VkEvent                                     event);
+
+void PostCallRecordGetEventStatus(
+    VkDevice                                    device,
+    VkEvent                                     event,
+    VkResult                                    result);
+
+void PreCallRecordSetEvent(
+    VkDevice                                    device,
+    VkEvent                                     event);
+
+void PostCallRecordSetEvent(
+    VkDevice                                    device,
+    VkEvent                                     event,
+    VkResult                                    result);
+
+void PreCallRecordResetEvent(
+    VkDevice                                    device,
+    VkEvent                                     event);
+
+void PostCallRecordResetEvent(
+    VkDevice                                    device,
+    VkEvent                                     event,
+    VkResult                                    result);
+
+void PreCallRecordCreateQueryPool(
+    VkDevice                                    device,
+    const VkQueryPoolCreateInfo*                pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkQueryPool*                                pQueryPool);
+
+void PostCallRecordCreateQueryPool(
+    VkDevice                                    device,
+    const VkQueryPoolCreateInfo*                pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkQueryPool*                                pQueryPool,
+    VkResult                                    result);
+
+void PreCallRecordDestroyQueryPool(
+    VkDevice                                    device,
+    VkQueryPool                                 queryPool,
+    const VkAllocationCallbacks*                pAllocator);
+
+void PostCallRecordDestroyQueryPool(
+    VkDevice                                    device,
+    VkQueryPool                                 queryPool,
+    const VkAllocationCallbacks*                pAllocator);
+
+void PreCallRecordGetQueryPoolResults(
+    VkDevice                                    device,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    firstQuery,
+    uint32_t                                    queryCount,
+    size_t                                      dataSize,
+    void*                                       pData,
+    VkDeviceSize                                stride,
+    VkQueryResultFlags                          flags);
+
+void PostCallRecordGetQueryPoolResults(
+    VkDevice                                    device,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    firstQuery,
+    uint32_t                                    queryCount,
+    size_t                                      dataSize,
+    void*                                       pData,
+    VkDeviceSize                                stride,
+    VkQueryResultFlags                          flags,
+    VkResult                                    result);
+
+void PreCallRecordCreateBuffer(
+    VkDevice                                    device,
+    const VkBufferCreateInfo*                   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkBuffer*                                   pBuffer);
+
+void PostCallRecordCreateBuffer(
+    VkDevice                                    device,
+    const VkBufferCreateInfo*                   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkBuffer*                                   pBuffer,
+    VkResult                                    result);
+
+void PreCallRecordDestroyBuffer(
+    VkDevice                                    device,
+    VkBuffer                                    buffer,
+    const VkAllocationCallbacks*                pAllocator);
+
+void PostCallRecordDestroyBuffer(
+    VkDevice                                    device,
+    VkBuffer                                    buffer,
+    const VkAllocationCallbacks*                pAllocator);
+
+void PreCallRecordCreateBufferView(
+    VkDevice                                    device,
+    const VkBufferViewCreateInfo*               pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkBufferView*                               pView);
+
+void PostCallRecordCreateBufferView(
+    VkDevice                                    device,
+    const VkBufferViewCreateInfo*               pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkBufferView*                               pView,
+    VkResult                                    result);
+
+void PreCallRecordDestroyBufferView(
+    VkDevice                                    device,
+    VkBufferView                                bufferView,
+    const VkAllocationCallbacks*                pAllocator);
+
+void PostCallRecordDestroyBufferView(
+    VkDevice                                    device,
+    VkBufferView                                bufferView,
+    const VkAllocationCallbacks*                pAllocator);
+
+void PreCallRecordCreateImage(
+    VkDevice                                    device,
+    const VkImageCreateInfo*                    pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkImage*                                    pImage);
+
+void PostCallRecordCreateImage(
+    VkDevice                                    device,
+    const VkImageCreateInfo*                    pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkImage*                                    pImage,
+    VkResult                                    result);
+
+void PreCallRecordDestroyImage(
+    VkDevice                                    device,
+    VkImage                                     image,
+    const VkAllocationCallbacks*                pAllocator);
+
+void PostCallRecordDestroyImage(
+    VkDevice                                    device,
+    VkImage                                     image,
+    const VkAllocationCallbacks*                pAllocator);
+
+void PreCallRecordGetImageSubresourceLayout(
+    VkDevice                                    device,
+    VkImage                                     image,
+    const VkImageSubresource*                   pSubresource,
+    VkSubresourceLayout*                        pLayout);
+
+void PostCallRecordGetImageSubresourceLayout(
+    VkDevice                                    device,
+    VkImage                                     image,
+    const VkImageSubresource*                   pSubresource,
+    VkSubresourceLayout*                        pLayout);
+
+void PreCallRecordCreateImageView(
+    VkDevice                                    device,
+    const VkImageViewCreateInfo*                pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkImageView*                                pView);
+
+void PostCallRecordCreateImageView(
+    VkDevice                                    device,
+    const VkImageViewCreateInfo*                pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkImageView*                                pView,
+    VkResult                                    result);
+
+void PreCallRecordDestroyImageView(
+    VkDevice                                    device,
+    VkImageView                                 imageView,
+    const VkAllocationCallbacks*                pAllocator);
+
+void PostCallRecordDestroyImageView(
+    VkDevice                                    device,
+    VkImageView                                 imageView,
+    const VkAllocationCallbacks*                pAllocator);
+
+void PreCallRecordCreateShaderModule(
+    VkDevice                                    device,
+    const VkShaderModuleCreateInfo*             pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkShaderModule*                             pShaderModule);
+
+void PostCallRecordCreateShaderModule(
+    VkDevice                                    device,
+    const VkShaderModuleCreateInfo*             pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkShaderModule*                             pShaderModule,
+    VkResult                                    result);
+
+void PreCallRecordDestroyShaderModule(
+    VkDevice                                    device,
+    VkShaderModule                              shaderModule,
+    const VkAllocationCallbacks*                pAllocator);
+
+void PostCallRecordDestroyShaderModule(
+    VkDevice                                    device,
+    VkShaderModule                              shaderModule,
+    const VkAllocationCallbacks*                pAllocator);
+
+void PreCallRecordCreatePipelineCache(
+    VkDevice                                    device,
+    const VkPipelineCacheCreateInfo*            pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkPipelineCache*                            pPipelineCache);
+
+void PostCallRecordCreatePipelineCache(
+    VkDevice                                    device,
+    const VkPipelineCacheCreateInfo*            pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkPipelineCache*                            pPipelineCache,
+    VkResult                                    result);
+
+void PreCallRecordDestroyPipelineCache(
+    VkDevice                                    device,
+    VkPipelineCache                             pipelineCache,
+    const VkAllocationCallbacks*                pAllocator);
+
+void PostCallRecordDestroyPipelineCache(
+    VkDevice                                    device,
+    VkPipelineCache                             pipelineCache,
+    const VkAllocationCallbacks*                pAllocator);
+
+void PreCallRecordGetPipelineCacheData(
+    VkDevice                                    device,
+    VkPipelineCache                             pipelineCache,
+    size_t*                                     pDataSize,
+    void*                                       pData);
+
+void PostCallRecordGetPipelineCacheData(
+    VkDevice                                    device,
+    VkPipelineCache                             pipelineCache,
+    size_t*                                     pDataSize,
+    void*                                       pData,
+    VkResult                                    result);
+
+void PreCallRecordMergePipelineCaches(
+    VkDevice                                    device,
+    VkPipelineCache                             dstCache,
+    uint32_t                                    srcCacheCount,
+    const VkPipelineCache*                      pSrcCaches);
+
+void PostCallRecordMergePipelineCaches(
+    VkDevice                                    device,
+    VkPipelineCache                             dstCache,
+    uint32_t                                    srcCacheCount,
+    const VkPipelineCache*                      pSrcCaches,
+    VkResult                                    result);
+
+void PreCallRecordCreateGraphicsPipelines(
+    VkDevice                                    device,
+    VkPipelineCache                             pipelineCache,
+    uint32_t                                    createInfoCount,
+    const VkGraphicsPipelineCreateInfo*         pCreateInfos,
+    const VkAllocationCallbacks*                pAllocator,
+    VkPipeline*                                 pPipelines);
+
+void PostCallRecordCreateGraphicsPipelines(
+    VkDevice                                    device,
+    VkPipelineCache                             pipelineCache,
+    uint32_t                                    createInfoCount,
+    const VkGraphicsPipelineCreateInfo*         pCreateInfos,
+    const VkAllocationCallbacks*                pAllocator,
+    VkPipeline*                                 pPipelines,
+    VkResult                                    result);
+
+void PreCallRecordCreateComputePipelines(
+    VkDevice                                    device,
+    VkPipelineCache                             pipelineCache,
+    uint32_t                                    createInfoCount,
+    const VkComputePipelineCreateInfo*          pCreateInfos,
+    const VkAllocationCallbacks*                pAllocator,
+    VkPipeline*                                 pPipelines);
+
+void PostCallRecordCreateComputePipelines(
+    VkDevice                                    device,
+    VkPipelineCache                             pipelineCache,
+    uint32_t                                    createInfoCount,
+    const VkComputePipelineCreateInfo*          pCreateInfos,
+    const VkAllocationCallbacks*                pAllocator,
+    VkPipeline*                                 pPipelines,
+    VkResult                                    result);
+
+void PreCallRecordDestroyPipeline(
+    VkDevice                                    device,
+    VkPipeline                                  pipeline,
+    const VkAllocationCallbacks*                pAllocator);
+
+void PostCallRecordDestroyPipeline(
+    VkDevice                                    device,
+    VkPipeline                                  pipeline,
+    const VkAllocationCallbacks*                pAllocator);
+
+void PreCallRecordCreatePipelineLayout(
+    VkDevice                                    device,
+    const VkPipelineLayoutCreateInfo*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkPipelineLayout*                           pPipelineLayout);
+
+void PostCallRecordCreatePipelineLayout(
+    VkDevice                                    device,
+    const VkPipelineLayoutCreateInfo*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkPipelineLayout*                           pPipelineLayout,
+    VkResult                                    result);
+
+void PreCallRecordDestroyPipelineLayout(
+    VkDevice                                    device,
+    VkPipelineLayout                            pipelineLayout,
+    const VkAllocationCallbacks*                pAllocator);
+
+void PostCallRecordDestroyPipelineLayout(
+    VkDevice                                    device,
+    VkPipelineLayout                            pipelineLayout,
+    const VkAllocationCallbacks*                pAllocator);
+
+void PreCallRecordCreateSampler(
+    VkDevice                                    device,
+    const VkSamplerCreateInfo*                  pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSampler*                                  pSampler);
+
+void PostCallRecordCreateSampler(
+    VkDevice                                    device,
+    const VkSamplerCreateInfo*                  pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSampler*                                  pSampler,
+    VkResult                                    result);
+
+void PreCallRecordDestroySampler(
+    VkDevice                                    device,
+    VkSampler                                   sampler,
+    const VkAllocationCallbacks*                pAllocator);
+
+void PostCallRecordDestroySampler(
+    VkDevice                                    device,
+    VkSampler                                   sampler,
+    const VkAllocationCallbacks*                pAllocator);
+
+void PreCallRecordCreateDescriptorSetLayout(
+    VkDevice                                    device,
+    const VkDescriptorSetLayoutCreateInfo*      pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDescriptorSetLayout*                      pSetLayout);
+
+void PostCallRecordCreateDescriptorSetLayout(
+    VkDevice                                    device,
+    const VkDescriptorSetLayoutCreateInfo*      pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDescriptorSetLayout*                      pSetLayout,
+    VkResult                                    result);
+
+void PreCallRecordDestroyDescriptorSetLayout(
+    VkDevice                                    device,
+    VkDescriptorSetLayout                       descriptorSetLayout,
+    const VkAllocationCallbacks*                pAllocator);
+
+void PostCallRecordDestroyDescriptorSetLayout(
+    VkDevice                                    device,
+    VkDescriptorSetLayout                       descriptorSetLayout,
+    const VkAllocationCallbacks*                pAllocator);
+
+void PreCallRecordCreateDescriptorPool(
+    VkDevice                                    device,
+    const VkDescriptorPoolCreateInfo*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDescriptorPool*                           pDescriptorPool);
+
+void PostCallRecordCreateDescriptorPool(
+    VkDevice                                    device,
+    const VkDescriptorPoolCreateInfo*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDescriptorPool*                           pDescriptorPool,
+    VkResult                                    result);
+
+void PreCallRecordDestroyDescriptorPool(
+    VkDevice                                    device,
+    VkDescriptorPool                            descriptorPool,
+    const VkAllocationCallbacks*                pAllocator);
+
+void PostCallRecordDestroyDescriptorPool(
+    VkDevice                                    device,
+    VkDescriptorPool                            descriptorPool,
+    const VkAllocationCallbacks*                pAllocator);
+
+void PreCallRecordResetDescriptorPool(
+    VkDevice                                    device,
+    VkDescriptorPool                            descriptorPool,
+    VkDescriptorPoolResetFlags                  flags);
+
+void PostCallRecordResetDescriptorPool(
+    VkDevice                                    device,
+    VkDescriptorPool                            descriptorPool,
+    VkDescriptorPoolResetFlags                  flags,
+    VkResult                                    result);
+
+void PreCallRecordAllocateDescriptorSets(
+    VkDevice                                    device,
+    const VkDescriptorSetAllocateInfo*          pAllocateInfo,
+    VkDescriptorSet*                            pDescriptorSets);
+
+void PostCallRecordAllocateDescriptorSets(
+    VkDevice                                    device,
+    const VkDescriptorSetAllocateInfo*          pAllocateInfo,
+    VkDescriptorSet*                            pDescriptorSets,
+    VkResult                                    result);
+
+void PreCallRecordFreeDescriptorSets(
+    VkDevice                                    device,
+    VkDescriptorPool                            descriptorPool,
+    uint32_t                                    descriptorSetCount,
+    const VkDescriptorSet*                      pDescriptorSets);
+
+void PostCallRecordFreeDescriptorSets(
+    VkDevice                                    device,
+    VkDescriptorPool                            descriptorPool,
+    uint32_t                                    descriptorSetCount,
+    const VkDescriptorSet*                      pDescriptorSets,
+    VkResult                                    result);
+
+void PreCallRecordUpdateDescriptorSets(
+    VkDevice                                    device,
+    uint32_t                                    descriptorWriteCount,
+    const VkWriteDescriptorSet*                 pDescriptorWrites,
+    uint32_t                                    descriptorCopyCount,
+    const VkCopyDescriptorSet*                  pDescriptorCopies);
+
+void PostCallRecordUpdateDescriptorSets(
+    VkDevice                                    device,
+    uint32_t                                    descriptorWriteCount,
+    const VkWriteDescriptorSet*                 pDescriptorWrites,
+    uint32_t                                    descriptorCopyCount,
+    const VkCopyDescriptorSet*                  pDescriptorCopies);
+
+void PreCallRecordCreateFramebuffer(
+    VkDevice                                    device,
+    const VkFramebufferCreateInfo*              pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkFramebuffer*                              pFramebuffer);
+
+void PostCallRecordCreateFramebuffer(
+    VkDevice                                    device,
+    const VkFramebufferCreateInfo*              pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkFramebuffer*                              pFramebuffer,
+    VkResult                                    result);
+
+void PreCallRecordDestroyFramebuffer(
+    VkDevice                                    device,
+    VkFramebuffer                               framebuffer,
+    const VkAllocationCallbacks*                pAllocator);
+
+void PostCallRecordDestroyFramebuffer(
+    VkDevice                                    device,
+    VkFramebuffer                               framebuffer,
+    const VkAllocationCallbacks*                pAllocator);
+
+void PreCallRecordCreateRenderPass(
+    VkDevice                                    device,
+    const VkRenderPassCreateInfo*               pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkRenderPass*                               pRenderPass);
+
+void PostCallRecordCreateRenderPass(
+    VkDevice                                    device,
+    const VkRenderPassCreateInfo*               pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkRenderPass*                               pRenderPass,
+    VkResult                                    result);
+
+void PreCallRecordDestroyRenderPass(
+    VkDevice                                    device,
+    VkRenderPass                                renderPass,
+    const VkAllocationCallbacks*                pAllocator);
+
+void PostCallRecordDestroyRenderPass(
+    VkDevice                                    device,
+    VkRenderPass                                renderPass,
+    const VkAllocationCallbacks*                pAllocator);
+
+void PreCallRecordGetRenderAreaGranularity(
+    VkDevice                                    device,
+    VkRenderPass                                renderPass,
+    VkExtent2D*                                 pGranularity);
+
+void PostCallRecordGetRenderAreaGranularity(
+    VkDevice                                    device,
+    VkRenderPass                                renderPass,
+    VkExtent2D*                                 pGranularity);
+
+void PreCallRecordCreateCommandPool(
+    VkDevice                                    device,
+    const VkCommandPoolCreateInfo*              pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkCommandPool*                              pCommandPool);
+
+void PostCallRecordCreateCommandPool(
+    VkDevice                                    device,
+    const VkCommandPoolCreateInfo*              pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkCommandPool*                              pCommandPool,
+    VkResult                                    result);
+
+void PreCallRecordDestroyCommandPool(
+    VkDevice                                    device,
+    VkCommandPool                               commandPool,
+    const VkAllocationCallbacks*                pAllocator);
+
+void PostCallRecordDestroyCommandPool(
+    VkDevice                                    device,
+    VkCommandPool                               commandPool,
+    const VkAllocationCallbacks*                pAllocator);
+
+void PreCallRecordResetCommandPool(
+    VkDevice                                    device,
+    VkCommandPool                               commandPool,
+    VkCommandPoolResetFlags                     flags);
+
+void PostCallRecordResetCommandPool(
+    VkDevice                                    device,
+    VkCommandPool                               commandPool,
+    VkCommandPoolResetFlags                     flags,
+    VkResult                                    result);
+
+void PreCallRecordAllocateCommandBuffers(
+    VkDevice                                    device,
+    const VkCommandBufferAllocateInfo*          pAllocateInfo,
+    VkCommandBuffer*                            pCommandBuffers);
+
+void PostCallRecordAllocateCommandBuffers(
+    VkDevice                                    device,
+    const VkCommandBufferAllocateInfo*          pAllocateInfo,
+    VkCommandBuffer*                            pCommandBuffers,
+    VkResult                                    result);
+
+void PreCallRecordFreeCommandBuffers(
+    VkDevice                                    device,
+    VkCommandPool                               commandPool,
+    uint32_t                                    commandBufferCount,
+    const VkCommandBuffer*                      pCommandBuffers);
+
+void PostCallRecordFreeCommandBuffers(
+    VkDevice                                    device,
+    VkCommandPool                               commandPool,
+    uint32_t                                    commandBufferCount,
+    const VkCommandBuffer*                      pCommandBuffers);
+
+void PreCallRecordBeginCommandBuffer(
+    VkCommandBuffer                             commandBuffer,
+    const VkCommandBufferBeginInfo*             pBeginInfo);
+
+void PostCallRecordBeginCommandBuffer(
+    VkCommandBuffer                             commandBuffer,
+    const VkCommandBufferBeginInfo*             pBeginInfo,
+    VkResult                                    result);
+
+void PreCallRecordEndCommandBuffer(
+    VkCommandBuffer                             commandBuffer);
+
+void PostCallRecordEndCommandBuffer(
+    VkCommandBuffer                             commandBuffer,
+    VkResult                                    result);
+
+void PreCallRecordResetCommandBuffer(
+    VkCommandBuffer                             commandBuffer,
+    VkCommandBufferResetFlags                   flags);
+
+void PostCallRecordResetCommandBuffer(
+    VkCommandBuffer                             commandBuffer,
+    VkCommandBufferResetFlags                   flags,
+    VkResult                                    result);
+
+void PreCallRecordCmdBindPipeline(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineBindPoint                         pipelineBindPoint,
+    VkPipeline                                  pipeline);
+
+void PostCallRecordCmdBindPipeline(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineBindPoint                         pipelineBindPoint,
+    VkPipeline                                  pipeline);
+
+void PreCallRecordCmdSetViewport(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstViewport,
+    uint32_t                                    viewportCount,
+    const VkViewport*                           pViewports);
+
+void PostCallRecordCmdSetViewport(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstViewport,
+    uint32_t                                    viewportCount,
+    const VkViewport*                           pViewports);
+
+void PreCallRecordCmdSetScissor(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstScissor,
+    uint32_t                                    scissorCount,
+    const VkRect2D*                             pScissors);
+
+void PostCallRecordCmdSetScissor(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstScissor,
+    uint32_t                                    scissorCount,
+    const VkRect2D*                             pScissors);
+
+void PreCallRecordCmdSetLineWidth(
+    VkCommandBuffer                             commandBuffer,
+    float                                       lineWidth);
+
+void PostCallRecordCmdSetLineWidth(
+    VkCommandBuffer                             commandBuffer,
+    float                                       lineWidth);
+
+void PreCallRecordCmdSetDepthBias(
+    VkCommandBuffer                             commandBuffer,
+    float                                       depthBiasConstantFactor,
+    float                                       depthBiasClamp,
+    float                                       depthBiasSlopeFactor);
+
+void PostCallRecordCmdSetDepthBias(
+    VkCommandBuffer                             commandBuffer,
+    float                                       depthBiasConstantFactor,
+    float                                       depthBiasClamp,
+    float                                       depthBiasSlopeFactor);
+
+void PreCallRecordCmdSetBlendConstants(
+    VkCommandBuffer                             commandBuffer,
+    const float                                 blendConstants[4]);
+
+void PostCallRecordCmdSetBlendConstants(
+    VkCommandBuffer                             commandBuffer,
+    const float                                 blendConstants[4]);
+
+void PreCallRecordCmdSetDepthBounds(
+    VkCommandBuffer                             commandBuffer,
+    float                                       minDepthBounds,
+    float                                       maxDepthBounds);
+
+void PostCallRecordCmdSetDepthBounds(
+    VkCommandBuffer                             commandBuffer,
+    float                                       minDepthBounds,
+    float                                       maxDepthBounds);
+
+void PreCallRecordCmdSetStencilCompareMask(
+    VkCommandBuffer                             commandBuffer,
+    VkStencilFaceFlags                          faceMask,
+    uint32_t                                    compareMask);
+
+void PostCallRecordCmdSetStencilCompareMask(
+    VkCommandBuffer                             commandBuffer,
+    VkStencilFaceFlags                          faceMask,
+    uint32_t                                    compareMask);
+
+void PreCallRecordCmdSetStencilWriteMask(
+    VkCommandBuffer                             commandBuffer,
+    VkStencilFaceFlags                          faceMask,
+    uint32_t                                    writeMask);
+
+void PostCallRecordCmdSetStencilWriteMask(
+    VkCommandBuffer                             commandBuffer,
+    VkStencilFaceFlags                          faceMask,
+    uint32_t                                    writeMask);
+
+void PreCallRecordCmdSetStencilReference(
+    VkCommandBuffer                             commandBuffer,
+    VkStencilFaceFlags                          faceMask,
+    uint32_t                                    reference);
+
+void PostCallRecordCmdSetStencilReference(
+    VkCommandBuffer                             commandBuffer,
+    VkStencilFaceFlags                          faceMask,
+    uint32_t                                    reference);
+
+void PreCallRecordCmdBindDescriptorSets(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineBindPoint                         pipelineBindPoint,
+    VkPipelineLayout                            layout,
+    uint32_t                                    firstSet,
+    uint32_t                                    descriptorSetCount,
+    const VkDescriptorSet*                      pDescriptorSets,
+    uint32_t                                    dynamicOffsetCount,
+    const uint32_t*                             pDynamicOffsets);
+
+void PostCallRecordCmdBindDescriptorSets(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineBindPoint                         pipelineBindPoint,
+    VkPipelineLayout                            layout,
+    uint32_t                                    firstSet,
+    uint32_t                                    descriptorSetCount,
+    const VkDescriptorSet*                      pDescriptorSets,
+    uint32_t                                    dynamicOffsetCount,
+    const uint32_t*                             pDynamicOffsets);
+
+void PreCallRecordCmdBindIndexBuffer(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkIndexType                                 indexType);
+
+void PostCallRecordCmdBindIndexBuffer(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkIndexType                                 indexType);
+
+void PreCallRecordCmdBindVertexBuffers(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstBinding,
+    uint32_t                                    bindingCount,
+    const VkBuffer*                             pBuffers,
+    const VkDeviceSize*                         pOffsets);
+
+void PostCallRecordCmdBindVertexBuffers(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstBinding,
+    uint32_t                                    bindingCount,
+    const VkBuffer*                             pBuffers,
+    const VkDeviceSize*                         pOffsets);
+
+void PreCallRecordCmdDraw(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    vertexCount,
+    uint32_t                                    instanceCount,
+    uint32_t                                    firstVertex,
+    uint32_t                                    firstInstance);
+
+void PostCallRecordCmdDraw(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    vertexCount,
+    uint32_t                                    instanceCount,
+    uint32_t                                    firstVertex,
+    uint32_t                                    firstInstance);
+
+void PreCallRecordCmdDrawIndexed(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    indexCount,
+    uint32_t                                    instanceCount,
+    uint32_t                                    firstIndex,
+    int32_t                                     vertexOffset,
+    uint32_t                                    firstInstance);
+
+void PostCallRecordCmdDrawIndexed(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    indexCount,
+    uint32_t                                    instanceCount,
+    uint32_t                                    firstIndex,
+    int32_t                                     vertexOffset,
+    uint32_t                                    firstInstance);
+
+void PreCallRecordCmdDrawIndirect(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    uint32_t                                    drawCount,
+    uint32_t                                    stride);
+
+void PostCallRecordCmdDrawIndirect(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    uint32_t                                    drawCount,
+    uint32_t                                    stride);
+
+void PreCallRecordCmdDrawIndexedIndirect(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    uint32_t                                    drawCount,
+    uint32_t                                    stride);
+
+void PostCallRecordCmdDrawIndexedIndirect(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    uint32_t                                    drawCount,
+    uint32_t                                    stride);
+
+void PreCallRecordCmdDispatch(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    groupCountX,
+    uint32_t                                    groupCountY,
+    uint32_t                                    groupCountZ);
+
+void PostCallRecordCmdDispatch(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    groupCountX,
+    uint32_t                                    groupCountY,
+    uint32_t                                    groupCountZ);
+
+void PreCallRecordCmdDispatchIndirect(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset);
+
+void PostCallRecordCmdDispatchIndirect(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset);
+
+void PreCallRecordCmdCopyBuffer(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    srcBuffer,
+    VkBuffer                                    dstBuffer,
+    uint32_t                                    regionCount,
+    const VkBufferCopy*                         pRegions);
+
+void PostCallRecordCmdCopyBuffer(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    srcBuffer,
+    VkBuffer                                    dstBuffer,
+    uint32_t                                    regionCount,
+    const VkBufferCopy*                         pRegions);
+
+void PreCallRecordCmdCopyImage(
+    VkCommandBuffer                             commandBuffer,
+    VkImage                                     srcImage,
+    VkImageLayout                               srcImageLayout,
+    VkImage                                     dstImage,
+    VkImageLayout                               dstImageLayout,
+    uint32_t                                    regionCount,
+    const VkImageCopy*                          pRegions);
+
+void PostCallRecordCmdCopyImage(
+    VkCommandBuffer                             commandBuffer,
+    VkImage                                     srcImage,
+    VkImageLayout                               srcImageLayout,
+    VkImage                                     dstImage,
+    VkImageLayout                               dstImageLayout,
+    uint32_t                                    regionCount,
+    const VkImageCopy*                          pRegions);
+
+void PreCallRecordCmdBlitImage(
+    VkCommandBuffer                             commandBuffer,
+    VkImage                                     srcImage,
+    VkImageLayout                               srcImageLayout,
+    VkImage                                     dstImage,
+    VkImageLayout                               dstImageLayout,
+    uint32_t                                    regionCount,
+    const VkImageBlit*                          pRegions,
+    VkFilter                                    filter);
+
+void PostCallRecordCmdBlitImage(
+    VkCommandBuffer                             commandBuffer,
+    VkImage                                     srcImage,
+    VkImageLayout                               srcImageLayout,
+    VkImage                                     dstImage,
+    VkImageLayout                               dstImageLayout,
+    uint32_t                                    regionCount,
+    const VkImageBlit*                          pRegions,
+    VkFilter                                    filter);
+
+void PreCallRecordCmdCopyBufferToImage(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    srcBuffer,
+    VkImage                                     dstImage,
+    VkImageLayout                               dstImageLayout,
+    uint32_t                                    regionCount,
+    const VkBufferImageCopy*                    pRegions);
+
+void PostCallRecordCmdCopyBufferToImage(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    srcBuffer,
+    VkImage                                     dstImage,
+    VkImageLayout                               dstImageLayout,
+    uint32_t                                    regionCount,
+    const VkBufferImageCopy*                    pRegions);
+
+void PreCallRecordCmdCopyImageToBuffer(
+    VkCommandBuffer                             commandBuffer,
+    VkImage                                     srcImage,
+    VkImageLayout                               srcImageLayout,
+    VkBuffer                                    dstBuffer,
+    uint32_t                                    regionCount,
+    const VkBufferImageCopy*                    pRegions);
+
+void PostCallRecordCmdCopyImageToBuffer(
+    VkCommandBuffer                             commandBuffer,
+    VkImage                                     srcImage,
+    VkImageLayout                               srcImageLayout,
+    VkBuffer                                    dstBuffer,
+    uint32_t                                    regionCount,
+    const VkBufferImageCopy*                    pRegions);
+
+void PreCallRecordCmdUpdateBuffer(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    dstBuffer,
+    VkDeviceSize                                dstOffset,
+    VkDeviceSize                                dataSize,
+    const void*                                 pData);
+
+void PostCallRecordCmdUpdateBuffer(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    dstBuffer,
+    VkDeviceSize                                dstOffset,
+    VkDeviceSize                                dataSize,
+    const void*                                 pData);
+
+void PreCallRecordCmdFillBuffer(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    dstBuffer,
+    VkDeviceSize                                dstOffset,
+    VkDeviceSize                                size,
+    uint32_t                                    data);
+
+void PostCallRecordCmdFillBuffer(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    dstBuffer,
+    VkDeviceSize                                dstOffset,
+    VkDeviceSize                                size,
+    uint32_t                                    data);
+
+void PreCallRecordCmdClearColorImage(
+    VkCommandBuffer                             commandBuffer,
+    VkImage                                     image,
+    VkImageLayout                               imageLayout,
+    const VkClearColorValue*                    pColor,
+    uint32_t                                    rangeCount,
+    const VkImageSubresourceRange*              pRanges);
+
+void PostCallRecordCmdClearColorImage(
+    VkCommandBuffer                             commandBuffer,
+    VkImage                                     image,
+    VkImageLayout                               imageLayout,
+    const VkClearColorValue*                    pColor,
+    uint32_t                                    rangeCount,
+    const VkImageSubresourceRange*              pRanges);
+
+void PreCallRecordCmdClearDepthStencilImage(
+    VkCommandBuffer                             commandBuffer,
+    VkImage                                     image,
+    VkImageLayout                               imageLayout,
+    const VkClearDepthStencilValue*             pDepthStencil,
+    uint32_t                                    rangeCount,
+    const VkImageSubresourceRange*              pRanges);
+
+void PostCallRecordCmdClearDepthStencilImage(
+    VkCommandBuffer                             commandBuffer,
+    VkImage                                     image,
+    VkImageLayout                               imageLayout,
+    const VkClearDepthStencilValue*             pDepthStencil,
+    uint32_t                                    rangeCount,
+    const VkImageSubresourceRange*              pRanges);
+
+void PreCallRecordCmdClearAttachments(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    attachmentCount,
+    const VkClearAttachment*                    pAttachments,
+    uint32_t                                    rectCount,
+    const VkClearRect*                          pRects);
+
+void PostCallRecordCmdClearAttachments(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    attachmentCount,
+    const VkClearAttachment*                    pAttachments,
+    uint32_t                                    rectCount,
+    const VkClearRect*                          pRects);
+
+void PreCallRecordCmdResolveImage(
+    VkCommandBuffer                             commandBuffer,
+    VkImage                                     srcImage,
+    VkImageLayout                               srcImageLayout,
+    VkImage                                     dstImage,
+    VkImageLayout                               dstImageLayout,
+    uint32_t                                    regionCount,
+    const VkImageResolve*                       pRegions);
+
+void PostCallRecordCmdResolveImage(
+    VkCommandBuffer                             commandBuffer,
+    VkImage                                     srcImage,
+    VkImageLayout                               srcImageLayout,
+    VkImage                                     dstImage,
+    VkImageLayout                               dstImageLayout,
+    uint32_t                                    regionCount,
+    const VkImageResolve*                       pRegions);
+
+void PreCallRecordCmdSetEvent(
+    VkCommandBuffer                             commandBuffer,
+    VkEvent                                     event,
+    VkPipelineStageFlags                        stageMask);
+
+void PostCallRecordCmdSetEvent(
+    VkCommandBuffer                             commandBuffer,
+    VkEvent                                     event,
+    VkPipelineStageFlags                        stageMask);
+
+void PreCallRecordCmdResetEvent(
+    VkCommandBuffer                             commandBuffer,
+    VkEvent                                     event,
+    VkPipelineStageFlags                        stageMask);
+
+void PostCallRecordCmdResetEvent(
+    VkCommandBuffer                             commandBuffer,
+    VkEvent                                     event,
+    VkPipelineStageFlags                        stageMask);
+
+void PreCallRecordCmdWaitEvents(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    eventCount,
+    const VkEvent*                              pEvents,
+    VkPipelineStageFlags                        srcStageMask,
+    VkPipelineStageFlags                        dstStageMask,
+    uint32_t                                    memoryBarrierCount,
+    const VkMemoryBarrier*                      pMemoryBarriers,
+    uint32_t                                    bufferMemoryBarrierCount,
+    const VkBufferMemoryBarrier*                pBufferMemoryBarriers,
+    uint32_t                                    imageMemoryBarrierCount,
+    const VkImageMemoryBarrier*                 pImageMemoryBarriers);
+
+void PostCallRecordCmdWaitEvents(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    eventCount,
+    const VkEvent*                              pEvents,
+    VkPipelineStageFlags                        srcStageMask,
+    VkPipelineStageFlags                        dstStageMask,
+    uint32_t                                    memoryBarrierCount,
+    const VkMemoryBarrier*                      pMemoryBarriers,
+    uint32_t                                    bufferMemoryBarrierCount,
+    const VkBufferMemoryBarrier*                pBufferMemoryBarriers,
+    uint32_t                                    imageMemoryBarrierCount,
+    const VkImageMemoryBarrier*                 pImageMemoryBarriers);
+
+void PreCallRecordCmdPipelineBarrier(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineStageFlags                        srcStageMask,
+    VkPipelineStageFlags                        dstStageMask,
+    VkDependencyFlags                           dependencyFlags,
+    uint32_t                                    memoryBarrierCount,
+    const VkMemoryBarrier*                      pMemoryBarriers,
+    uint32_t                                    bufferMemoryBarrierCount,
+    const VkBufferMemoryBarrier*                pBufferMemoryBarriers,
+    uint32_t                                    imageMemoryBarrierCount,
+    const VkImageMemoryBarrier*                 pImageMemoryBarriers);
+
+void PostCallRecordCmdPipelineBarrier(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineStageFlags                        srcStageMask,
+    VkPipelineStageFlags                        dstStageMask,
+    VkDependencyFlags                           dependencyFlags,
+    uint32_t                                    memoryBarrierCount,
+    const VkMemoryBarrier*                      pMemoryBarriers,
+    uint32_t                                    bufferMemoryBarrierCount,
+    const VkBufferMemoryBarrier*                pBufferMemoryBarriers,
+    uint32_t                                    imageMemoryBarrierCount,
+    const VkImageMemoryBarrier*                 pImageMemoryBarriers);
+
+void PreCallRecordCmdBeginQuery(
+    VkCommandBuffer                             commandBuffer,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    query,
+    VkQueryControlFlags                         flags);
+
+void PostCallRecordCmdBeginQuery(
+    VkCommandBuffer                             commandBuffer,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    query,
+    VkQueryControlFlags                         flags);
+
+void PreCallRecordCmdEndQuery(
+    VkCommandBuffer                             commandBuffer,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    query);
+
+void PostCallRecordCmdEndQuery(
+    VkCommandBuffer                             commandBuffer,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    query);
+
+void PreCallRecordCmdResetQueryPool(
+    VkCommandBuffer                             commandBuffer,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    firstQuery,
+    uint32_t                                    queryCount);
+
+void PostCallRecordCmdResetQueryPool(
+    VkCommandBuffer                             commandBuffer,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    firstQuery,
+    uint32_t                                    queryCount);
+
+void PreCallRecordCmdWriteTimestamp(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineStageFlagBits                     pipelineStage,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    query);
+
+void PostCallRecordCmdWriteTimestamp(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineStageFlagBits                     pipelineStage,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    query);
+
+void PreCallRecordCmdCopyQueryPoolResults(
+    VkCommandBuffer                             commandBuffer,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    firstQuery,
+    uint32_t                                    queryCount,
+    VkBuffer                                    dstBuffer,
+    VkDeviceSize                                dstOffset,
+    VkDeviceSize                                stride,
+    VkQueryResultFlags                          flags);
+
+void PostCallRecordCmdCopyQueryPoolResults(
+    VkCommandBuffer                             commandBuffer,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    firstQuery,
+    uint32_t                                    queryCount,
+    VkBuffer                                    dstBuffer,
+    VkDeviceSize                                dstOffset,
+    VkDeviceSize                                stride,
+    VkQueryResultFlags                          flags);
+
+void PreCallRecordCmdPushConstants(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineLayout                            layout,
+    VkShaderStageFlags                          stageFlags,
+    uint32_t                                    offset,
+    uint32_t                                    size,
+    const void*                                 pValues);
+
+void PostCallRecordCmdPushConstants(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineLayout                            layout,
+    VkShaderStageFlags                          stageFlags,
+    uint32_t                                    offset,
+    uint32_t                                    size,
+    const void*                                 pValues);
+
+void PreCallRecordCmdBeginRenderPass(
+    VkCommandBuffer                             commandBuffer,
+    const VkRenderPassBeginInfo*                pRenderPassBegin,
+    VkSubpassContents                           contents);
+
+void PostCallRecordCmdBeginRenderPass(
+    VkCommandBuffer                             commandBuffer,
+    const VkRenderPassBeginInfo*                pRenderPassBegin,
+    VkSubpassContents                           contents);
+
+void PreCallRecordCmdNextSubpass(
+    VkCommandBuffer                             commandBuffer,
+    VkSubpassContents                           contents);
+
+void PostCallRecordCmdNextSubpass(
+    VkCommandBuffer                             commandBuffer,
+    VkSubpassContents                           contents);
+
+void PreCallRecordCmdEndRenderPass(
+    VkCommandBuffer                             commandBuffer);
+
+void PostCallRecordCmdEndRenderPass(
+    VkCommandBuffer                             commandBuffer);
+
+void PreCallRecordCmdExecuteCommands(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    commandBufferCount,
+    const VkCommandBuffer*                      pCommandBuffers);
+
+void PostCallRecordCmdExecuteCommands(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    commandBufferCount,
+    const VkCommandBuffer*                      pCommandBuffers);
+
+void PreCallRecordBindBufferMemory2(
+    VkDevice                                    device,
+    uint32_t                                    bindInfoCount,
+    const VkBindBufferMemoryInfo*               pBindInfos);
+
+void PostCallRecordBindBufferMemory2(
+    VkDevice                                    device,
+    uint32_t                                    bindInfoCount,
+    const VkBindBufferMemoryInfo*               pBindInfos,
+    VkResult                                    result);
+
+void PreCallRecordBindImageMemory2(
+    VkDevice                                    device,
+    uint32_t                                    bindInfoCount,
+    const VkBindImageMemoryInfo*                pBindInfos);
+
+void PostCallRecordBindImageMemory2(
+    VkDevice                                    device,
+    uint32_t                                    bindInfoCount,
+    const VkBindImageMemoryInfo*                pBindInfos,
+    VkResult                                    result);
+
+void PreCallRecordGetDeviceGroupPeerMemoryFeatures(
+    VkDevice                                    device,
+    uint32_t                                    heapIndex,
+    uint32_t                                    localDeviceIndex,
+    uint32_t                                    remoteDeviceIndex,
+    VkPeerMemoryFeatureFlags*                   pPeerMemoryFeatures);
+
+void PostCallRecordGetDeviceGroupPeerMemoryFeatures(
+    VkDevice                                    device,
+    uint32_t                                    heapIndex,
+    uint32_t                                    localDeviceIndex,
+    uint32_t                                    remoteDeviceIndex,
+    VkPeerMemoryFeatureFlags*                   pPeerMemoryFeatures);
+
+void PreCallRecordCmdSetDeviceMask(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    deviceMask);
+
+void PostCallRecordCmdSetDeviceMask(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    deviceMask);
+
+void PreCallRecordCmdDispatchBase(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    baseGroupX,
+    uint32_t                                    baseGroupY,
+    uint32_t                                    baseGroupZ,
+    uint32_t                                    groupCountX,
+    uint32_t                                    groupCountY,
+    uint32_t                                    groupCountZ);
+
+void PostCallRecordCmdDispatchBase(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    baseGroupX,
+    uint32_t                                    baseGroupY,
+    uint32_t                                    baseGroupZ,
+    uint32_t                                    groupCountX,
+    uint32_t                                    groupCountY,
+    uint32_t                                    groupCountZ);
+
+void PreCallRecordEnumeratePhysicalDeviceGroups(
+    VkInstance                                  instance,
+    uint32_t*                                   pPhysicalDeviceGroupCount,
+    VkPhysicalDeviceGroupProperties*            pPhysicalDeviceGroupProperties);
+
+void PostCallRecordEnumeratePhysicalDeviceGroups(
+    VkInstance                                  instance,
+    uint32_t*                                   pPhysicalDeviceGroupCount,
+    VkPhysicalDeviceGroupProperties*            pPhysicalDeviceGroupProperties,
+    VkResult                                    result);
+
+void PreCallRecordGetImageMemoryRequirements2(
+    VkDevice                                    device,
+    const VkImageMemoryRequirementsInfo2*       pInfo,
+    VkMemoryRequirements2*                      pMemoryRequirements);
+
+void PostCallRecordGetImageMemoryRequirements2(
+    VkDevice                                    device,
+    const VkImageMemoryRequirementsInfo2*       pInfo,
+    VkMemoryRequirements2*                      pMemoryRequirements);
+
+void PreCallRecordGetBufferMemoryRequirements2(
+    VkDevice                                    device,
+    const VkBufferMemoryRequirementsInfo2*      pInfo,
+    VkMemoryRequirements2*                      pMemoryRequirements);
+
+void PostCallRecordGetBufferMemoryRequirements2(
+    VkDevice                                    device,
+    const VkBufferMemoryRequirementsInfo2*      pInfo,
+    VkMemoryRequirements2*                      pMemoryRequirements);
+
+void PreCallRecordGetImageSparseMemoryRequirements2(
+    VkDevice                                    device,
+    const VkImageSparseMemoryRequirementsInfo2* pInfo,
+    uint32_t*                                   pSparseMemoryRequirementCount,
+    VkSparseImageMemoryRequirements2*           pSparseMemoryRequirements);
+
+void PostCallRecordGetImageSparseMemoryRequirements2(
+    VkDevice                                    device,
+    const VkImageSparseMemoryRequirementsInfo2* pInfo,
+    uint32_t*                                   pSparseMemoryRequirementCount,
+    VkSparseImageMemoryRequirements2*           pSparseMemoryRequirements);
+
+void PreCallRecordTrimCommandPool(
+    VkDevice                                    device,
+    VkCommandPool                               commandPool,
+    VkCommandPoolTrimFlags                      flags);
+
+void PostCallRecordTrimCommandPool(
+    VkDevice                                    device,
+    VkCommandPool                               commandPool,
+    VkCommandPoolTrimFlags                      flags);
+
+void PreCallRecordGetDeviceQueue2(
+    VkDevice                                    device,
+    const VkDeviceQueueInfo2*                   pQueueInfo,
+    VkQueue*                                    pQueue);
+
+void PostCallRecordGetDeviceQueue2(
+    VkDevice                                    device,
+    const VkDeviceQueueInfo2*                   pQueueInfo,
+    VkQueue*                                    pQueue);
+
+void PreCallRecordCreateSamplerYcbcrConversion(
+    VkDevice                                    device,
+    const VkSamplerYcbcrConversionCreateInfo*   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSamplerYcbcrConversion*                   pYcbcrConversion);
+
+void PostCallRecordCreateSamplerYcbcrConversion(
+    VkDevice                                    device,
+    const VkSamplerYcbcrConversionCreateInfo*   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSamplerYcbcrConversion*                   pYcbcrConversion,
+    VkResult                                    result);
+
+void PreCallRecordDestroySamplerYcbcrConversion(
+    VkDevice                                    device,
+    VkSamplerYcbcrConversion                    ycbcrConversion,
+    const VkAllocationCallbacks*                pAllocator);
+
+void PostCallRecordDestroySamplerYcbcrConversion(
+    VkDevice                                    device,
+    VkSamplerYcbcrConversion                    ycbcrConversion,
+    const VkAllocationCallbacks*                pAllocator);
+
+void PreCallRecordCreateDescriptorUpdateTemplate(
+    VkDevice                                    device,
+    const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDescriptorUpdateTemplate*                 pDescriptorUpdateTemplate);
+
+void PostCallRecordCreateDescriptorUpdateTemplate(
+    VkDevice                                    device,
+    const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDescriptorUpdateTemplate*                 pDescriptorUpdateTemplate,
+    VkResult                                    result);
+
+void PreCallRecordDestroyDescriptorUpdateTemplate(
+    VkDevice                                    device,
+    VkDescriptorUpdateTemplate                  descriptorUpdateTemplate,
+    const VkAllocationCallbacks*                pAllocator);
+
+void PostCallRecordDestroyDescriptorUpdateTemplate(
+    VkDevice                                    device,
+    VkDescriptorUpdateTemplate                  descriptorUpdateTemplate,
+    const VkAllocationCallbacks*                pAllocator);
+
+void PreCallRecordUpdateDescriptorSetWithTemplate(
+    VkDevice                                    device,
+    VkDescriptorSet                             descriptorSet,
+    VkDescriptorUpdateTemplate                  descriptorUpdateTemplate,
+    const void*                                 pData);
+
+void PostCallRecordUpdateDescriptorSetWithTemplate(
+    VkDevice                                    device,
+    VkDescriptorSet                             descriptorSet,
+    VkDescriptorUpdateTemplate                  descriptorUpdateTemplate,
+    const void*                                 pData);
+
+void PreCallRecordGetDescriptorSetLayoutSupport(
+    VkDevice                                    device,
+    const VkDescriptorSetLayoutCreateInfo*      pCreateInfo,
+    VkDescriptorSetLayoutSupport*               pSupport);
+
+void PostCallRecordGetDescriptorSetLayoutSupport(
+    VkDevice                                    device,
+    const VkDescriptorSetLayoutCreateInfo*      pCreateInfo,
+    VkDescriptorSetLayoutSupport*               pSupport);
+
+void PreCallRecordDestroySurfaceKHR(
+    VkInstance                                  instance,
+    VkSurfaceKHR                                surface,
+    const VkAllocationCallbacks*                pAllocator);
+
+void PostCallRecordDestroySurfaceKHR(
+    VkInstance                                  instance,
+    VkSurfaceKHR                                surface,
+    const VkAllocationCallbacks*                pAllocator);
+
+void PreCallRecordGetPhysicalDeviceSurfaceSupportKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t                                    queueFamilyIndex,
+    VkSurfaceKHR                                surface,
+    VkBool32*                                   pSupported);
+
+void PostCallRecordGetPhysicalDeviceSurfaceSupportKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t                                    queueFamilyIndex,
+    VkSurfaceKHR                                surface,
+    VkBool32*                                   pSupported,
+    VkResult                                    result);
+
+void PreCallRecordGetPhysicalDeviceSurfaceCapabilitiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkSurfaceKHR                                surface,
+    VkSurfaceCapabilitiesKHR*                   pSurfaceCapabilities);
+
+void PostCallRecordGetPhysicalDeviceSurfaceCapabilitiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkSurfaceKHR                                surface,
+    VkSurfaceCapabilitiesKHR*                   pSurfaceCapabilities,
+    VkResult                                    result);
+
+void PreCallRecordGetPhysicalDeviceSurfaceFormatsKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkSurfaceKHR                                surface,
+    uint32_t*                                   pSurfaceFormatCount,
+    VkSurfaceFormatKHR*                         pSurfaceFormats);
+
+void PostCallRecordGetPhysicalDeviceSurfaceFormatsKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkSurfaceKHR                                surface,
+    uint32_t*                                   pSurfaceFormatCount,
+    VkSurfaceFormatKHR*                         pSurfaceFormats,
+    VkResult                                    result);
+
+void PreCallRecordGetPhysicalDeviceSurfacePresentModesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkSurfaceKHR                                surface,
+    uint32_t*                                   pPresentModeCount,
+    VkPresentModeKHR*                           pPresentModes);
+
+void PostCallRecordGetPhysicalDeviceSurfacePresentModesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkSurfaceKHR                                surface,
+    uint32_t*                                   pPresentModeCount,
+    VkPresentModeKHR*                           pPresentModes,
+    VkResult                                    result);
+
+void PreCallRecordCreateSwapchainKHR(
+    VkDevice                                    device,
+    const VkSwapchainCreateInfoKHR*             pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSwapchainKHR*                             pSwapchain);
+
+void PostCallRecordCreateSwapchainKHR(
+    VkDevice                                    device,
+    const VkSwapchainCreateInfoKHR*             pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSwapchainKHR*                             pSwapchain,
+    VkResult                                    result);
+
+void PreCallRecordDestroySwapchainKHR(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    const VkAllocationCallbacks*                pAllocator);
+
+void PostCallRecordDestroySwapchainKHR(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    const VkAllocationCallbacks*                pAllocator);
+
+void PreCallRecordGetSwapchainImagesKHR(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    uint32_t*                                   pSwapchainImageCount,
+    VkImage*                                    pSwapchainImages);
+
+void PostCallRecordGetSwapchainImagesKHR(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    uint32_t*                                   pSwapchainImageCount,
+    VkImage*                                    pSwapchainImages,
+    VkResult                                    result);
+
+void PreCallRecordAcquireNextImageKHR(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    uint64_t                                    timeout,
+    VkSemaphore                                 semaphore,
+    VkFence                                     fence,
+    uint32_t*                                   pImageIndex);
+
+void PostCallRecordAcquireNextImageKHR(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    uint64_t                                    timeout,
+    VkSemaphore                                 semaphore,
+    VkFence                                     fence,
+    uint32_t*                                   pImageIndex,
+    VkResult                                    result);
+
+void PreCallRecordGetDeviceGroupPresentCapabilitiesKHR(
+    VkDevice                                    device,
+    VkDeviceGroupPresentCapabilitiesKHR*        pDeviceGroupPresentCapabilities);
+
+void PostCallRecordGetDeviceGroupPresentCapabilitiesKHR(
+    VkDevice                                    device,
+    VkDeviceGroupPresentCapabilitiesKHR*        pDeviceGroupPresentCapabilities,
+    VkResult                                    result);
+
+void PreCallRecordGetDeviceGroupSurfacePresentModesKHR(
+    VkDevice                                    device,
+    VkSurfaceKHR                                surface,
+    VkDeviceGroupPresentModeFlagsKHR*           pModes);
+
+void PostCallRecordGetDeviceGroupSurfacePresentModesKHR(
+    VkDevice                                    device,
+    VkSurfaceKHR                                surface,
+    VkDeviceGroupPresentModeFlagsKHR*           pModes,
+    VkResult                                    result);
+
+void PreCallRecordGetPhysicalDevicePresentRectanglesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkSurfaceKHR                                surface,
+    uint32_t*                                   pRectCount,
+    VkRect2D*                                   pRects);
+
+void PostCallRecordGetPhysicalDevicePresentRectanglesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkSurfaceKHR                                surface,
+    uint32_t*                                   pRectCount,
+    VkRect2D*                                   pRects,
+    VkResult                                    result);
+
+void PreCallRecordAcquireNextImage2KHR(
+    VkDevice                                    device,
+    const VkAcquireNextImageInfoKHR*            pAcquireInfo,
+    uint32_t*                                   pImageIndex);
+
+void PostCallRecordAcquireNextImage2KHR(
+    VkDevice                                    device,
+    const VkAcquireNextImageInfoKHR*            pAcquireInfo,
+    uint32_t*                                   pImageIndex,
+    VkResult                                    result);
+
+void PreCallRecordGetDisplayPlaneSupportedDisplaysKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t                                    planeIndex,
+    uint32_t*                                   pDisplayCount,
+    VkDisplayKHR*                               pDisplays);
+
+void PostCallRecordGetDisplayPlaneSupportedDisplaysKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t                                    planeIndex,
+    uint32_t*                                   pDisplayCount,
+    VkDisplayKHR*                               pDisplays,
+    VkResult                                    result);
+
+void PreCallRecordGetDisplayModePropertiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkDisplayKHR                                display,
+    uint32_t*                                   pPropertyCount,
+    VkDisplayModePropertiesKHR*                 pProperties);
+
+void PostCallRecordGetDisplayModePropertiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkDisplayKHR                                display,
+    uint32_t*                                   pPropertyCount,
+    VkDisplayModePropertiesKHR*                 pProperties,
+    VkResult                                    result);
+
+void PreCallRecordCreateDisplayModeKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkDisplayKHR                                display,
+    const VkDisplayModeCreateInfoKHR*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDisplayModeKHR*                           pMode);
+
+void PostCallRecordCreateDisplayModeKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkDisplayKHR                                display,
+    const VkDisplayModeCreateInfoKHR*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDisplayModeKHR*                           pMode,
+    VkResult                                    result);
+
+void PreCallRecordGetDisplayPlaneCapabilitiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkDisplayModeKHR                            mode,
+    uint32_t                                    planeIndex,
+    VkDisplayPlaneCapabilitiesKHR*              pCapabilities);
+
+void PostCallRecordGetDisplayPlaneCapabilitiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkDisplayModeKHR                            mode,
+    uint32_t                                    planeIndex,
+    VkDisplayPlaneCapabilitiesKHR*              pCapabilities,
+    VkResult                                    result);
+
+void PreCallRecordCreateDisplayPlaneSurfaceKHR(
+    VkInstance                                  instance,
+    const VkDisplaySurfaceCreateInfoKHR*        pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface);
+
+void PostCallRecordCreateDisplayPlaneSurfaceKHR(
+    VkInstance                                  instance,
+    const VkDisplaySurfaceCreateInfoKHR*        pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface,
+    VkResult                                    result);
+
+void PreCallRecordCreateSharedSwapchainsKHR(
+    VkDevice                                    device,
+    uint32_t                                    swapchainCount,
+    const VkSwapchainCreateInfoKHR*             pCreateInfos,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSwapchainKHR*                             pSwapchains);
+
+void PostCallRecordCreateSharedSwapchainsKHR(
+    VkDevice                                    device,
+    uint32_t                                    swapchainCount,
+    const VkSwapchainCreateInfoKHR*             pCreateInfos,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSwapchainKHR*                             pSwapchains,
+    VkResult                                    result);
+
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+
+void PreCallRecordCreateXlibSurfaceKHR(
+    VkInstance                                  instance,
+    const VkXlibSurfaceCreateInfoKHR*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface);
+
+void PostCallRecordCreateXlibSurfaceKHR(
+    VkInstance                                  instance,
+    const VkXlibSurfaceCreateInfoKHR*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface,
+    VkResult                                    result);
+#endif // VK_USE_PLATFORM_XLIB_KHR
+
+#ifdef VK_USE_PLATFORM_XCB_KHR
+
+void PreCallRecordCreateXcbSurfaceKHR(
+    VkInstance                                  instance,
+    const VkXcbSurfaceCreateInfoKHR*            pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface);
+
+void PostCallRecordCreateXcbSurfaceKHR(
+    VkInstance                                  instance,
+    const VkXcbSurfaceCreateInfoKHR*            pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface,
+    VkResult                                    result);
+#endif // VK_USE_PLATFORM_XCB_KHR
+
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+
+void PreCallRecordCreateWaylandSurfaceKHR(
+    VkInstance                                  instance,
+    const VkWaylandSurfaceCreateInfoKHR*        pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface);
+
+void PostCallRecordCreateWaylandSurfaceKHR(
+    VkInstance                                  instance,
+    const VkWaylandSurfaceCreateInfoKHR*        pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface,
+    VkResult                                    result);
+#endif // VK_USE_PLATFORM_WAYLAND_KHR
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+
+void PreCallRecordCreateAndroidSurfaceKHR(
+    VkInstance                                  instance,
+    const VkAndroidSurfaceCreateInfoKHR*        pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface);
+
+void PostCallRecordCreateAndroidSurfaceKHR(
+    VkInstance                                  instance,
+    const VkAndroidSurfaceCreateInfoKHR*        pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface,
+    VkResult                                    result);
+#endif // VK_USE_PLATFORM_ANDROID_KHR
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+void PreCallRecordCreateWin32SurfaceKHR(
+    VkInstance                                  instance,
+    const VkWin32SurfaceCreateInfoKHR*          pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface);
+
+void PostCallRecordCreateWin32SurfaceKHR(
+    VkInstance                                  instance,
+    const VkWin32SurfaceCreateInfoKHR*          pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface,
+    VkResult                                    result);
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+void PreCallRecordGetDeviceGroupPeerMemoryFeaturesKHR(
+    VkDevice                                    device,
+    uint32_t                                    heapIndex,
+    uint32_t                                    localDeviceIndex,
+    uint32_t                                    remoteDeviceIndex,
+    VkPeerMemoryFeatureFlags*                   pPeerMemoryFeatures);
+
+void PostCallRecordGetDeviceGroupPeerMemoryFeaturesKHR(
+    VkDevice                                    device,
+    uint32_t                                    heapIndex,
+    uint32_t                                    localDeviceIndex,
+    uint32_t                                    remoteDeviceIndex,
+    VkPeerMemoryFeatureFlags*                   pPeerMemoryFeatures);
+
+void PreCallRecordCmdSetDeviceMaskKHR(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    deviceMask);
+
+void PostCallRecordCmdSetDeviceMaskKHR(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    deviceMask);
+
+void PreCallRecordCmdDispatchBaseKHR(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    baseGroupX,
+    uint32_t                                    baseGroupY,
+    uint32_t                                    baseGroupZ,
+    uint32_t                                    groupCountX,
+    uint32_t                                    groupCountY,
+    uint32_t                                    groupCountZ);
+
+void PostCallRecordCmdDispatchBaseKHR(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    baseGroupX,
+    uint32_t                                    baseGroupY,
+    uint32_t                                    baseGroupZ,
+    uint32_t                                    groupCountX,
+    uint32_t                                    groupCountY,
+    uint32_t                                    groupCountZ);
+
+void PreCallRecordTrimCommandPoolKHR(
+    VkDevice                                    device,
+    VkCommandPool                               commandPool,
+    VkCommandPoolTrimFlags                      flags);
+
+void PostCallRecordTrimCommandPoolKHR(
+    VkDevice                                    device,
+    VkCommandPool                               commandPool,
+    VkCommandPoolTrimFlags                      flags);
+
+void PreCallRecordEnumeratePhysicalDeviceGroupsKHR(
+    VkInstance                                  instance,
+    uint32_t*                                   pPhysicalDeviceGroupCount,
+    VkPhysicalDeviceGroupProperties*            pPhysicalDeviceGroupProperties);
+
+void PostCallRecordEnumeratePhysicalDeviceGroupsKHR(
+    VkInstance                                  instance,
+    uint32_t*                                   pPhysicalDeviceGroupCount,
+    VkPhysicalDeviceGroupProperties*            pPhysicalDeviceGroupProperties,
+    VkResult                                    result);
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+void PreCallRecordGetMemoryWin32HandleKHR(
+    VkDevice                                    device,
+    const VkMemoryGetWin32HandleInfoKHR*        pGetWin32HandleInfo,
+    HANDLE*                                     pHandle);
+
+void PostCallRecordGetMemoryWin32HandleKHR(
+    VkDevice                                    device,
+    const VkMemoryGetWin32HandleInfoKHR*        pGetWin32HandleInfo,
+    HANDLE*                                     pHandle,
+    VkResult                                    result);
+
+void PreCallRecordGetMemoryWin32HandlePropertiesKHR(
+    VkDevice                                    device,
+    VkExternalMemoryHandleTypeFlagBits          handleType,
+    HANDLE                                      handle,
+    VkMemoryWin32HandlePropertiesKHR*           pMemoryWin32HandleProperties);
+
+void PostCallRecordGetMemoryWin32HandlePropertiesKHR(
+    VkDevice                                    device,
+    VkExternalMemoryHandleTypeFlagBits          handleType,
+    HANDLE                                      handle,
+    VkMemoryWin32HandlePropertiesKHR*           pMemoryWin32HandleProperties,
+    VkResult                                    result);
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+void PreCallRecordGetMemoryFdKHR(
+    VkDevice                                    device,
+    const VkMemoryGetFdInfoKHR*                 pGetFdInfo,
+    int*                                        pFd);
+
+void PostCallRecordGetMemoryFdKHR(
+    VkDevice                                    device,
+    const VkMemoryGetFdInfoKHR*                 pGetFdInfo,
+    int*                                        pFd,
+    VkResult                                    result);
+
+void PreCallRecordGetMemoryFdPropertiesKHR(
+    VkDevice                                    device,
+    VkExternalMemoryHandleTypeFlagBits          handleType,
+    int                                         fd,
+    VkMemoryFdPropertiesKHR*                    pMemoryFdProperties);
+
+void PostCallRecordGetMemoryFdPropertiesKHR(
+    VkDevice                                    device,
+    VkExternalMemoryHandleTypeFlagBits          handleType,
+    int                                         fd,
+    VkMemoryFdPropertiesKHR*                    pMemoryFdProperties,
+    VkResult                                    result);
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+void PreCallRecordImportSemaphoreWin32HandleKHR(
+    VkDevice                                    device,
+    const VkImportSemaphoreWin32HandleInfoKHR*  pImportSemaphoreWin32HandleInfo);
+
+void PostCallRecordImportSemaphoreWin32HandleKHR(
+    VkDevice                                    device,
+    const VkImportSemaphoreWin32HandleInfoKHR*  pImportSemaphoreWin32HandleInfo,
+    VkResult                                    result);
+
+void PreCallRecordGetSemaphoreWin32HandleKHR(
+    VkDevice                                    device,
+    const VkSemaphoreGetWin32HandleInfoKHR*     pGetWin32HandleInfo,
+    HANDLE*                                     pHandle);
+
+void PostCallRecordGetSemaphoreWin32HandleKHR(
+    VkDevice                                    device,
+    const VkSemaphoreGetWin32HandleInfoKHR*     pGetWin32HandleInfo,
+    HANDLE*                                     pHandle,
+    VkResult                                    result);
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+void PreCallRecordImportSemaphoreFdKHR(
+    VkDevice                                    device,
+    const VkImportSemaphoreFdInfoKHR*           pImportSemaphoreFdInfo);
+
+void PostCallRecordImportSemaphoreFdKHR(
+    VkDevice                                    device,
+    const VkImportSemaphoreFdInfoKHR*           pImportSemaphoreFdInfo,
+    VkResult                                    result);
+
+void PreCallRecordGetSemaphoreFdKHR(
+    VkDevice                                    device,
+    const VkSemaphoreGetFdInfoKHR*              pGetFdInfo,
+    int*                                        pFd);
+
+void PostCallRecordGetSemaphoreFdKHR(
+    VkDevice                                    device,
+    const VkSemaphoreGetFdInfoKHR*              pGetFdInfo,
+    int*                                        pFd,
+    VkResult                                    result);
+
+void PreCallRecordCmdPushDescriptorSetKHR(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineBindPoint                         pipelineBindPoint,
+    VkPipelineLayout                            layout,
+    uint32_t                                    set,
+    uint32_t                                    descriptorWriteCount,
+    const VkWriteDescriptorSet*                 pDescriptorWrites);
+
+void PostCallRecordCmdPushDescriptorSetKHR(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineBindPoint                         pipelineBindPoint,
+    VkPipelineLayout                            layout,
+    uint32_t                                    set,
+    uint32_t                                    descriptorWriteCount,
+    const VkWriteDescriptorSet*                 pDescriptorWrites);
+
+void PreCallRecordCmdPushDescriptorSetWithTemplateKHR(
+    VkCommandBuffer                             commandBuffer,
+    VkDescriptorUpdateTemplate                  descriptorUpdateTemplate,
+    VkPipelineLayout                            layout,
+    uint32_t                                    set,
+    const void*                                 pData);
+
+void PostCallRecordCmdPushDescriptorSetWithTemplateKHR(
+    VkCommandBuffer                             commandBuffer,
+    VkDescriptorUpdateTemplate                  descriptorUpdateTemplate,
+    VkPipelineLayout                            layout,
+    uint32_t                                    set,
+    const void*                                 pData);
+
+void PreCallRecordCreateDescriptorUpdateTemplateKHR(
+    VkDevice                                    device,
+    const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDescriptorUpdateTemplate*                 pDescriptorUpdateTemplate);
+
+void PostCallRecordCreateDescriptorUpdateTemplateKHR(
+    VkDevice                                    device,
+    const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDescriptorUpdateTemplate*                 pDescriptorUpdateTemplate,
+    VkResult                                    result);
+
+void PreCallRecordDestroyDescriptorUpdateTemplateKHR(
+    VkDevice                                    device,
+    VkDescriptorUpdateTemplate                  descriptorUpdateTemplate,
+    const VkAllocationCallbacks*                pAllocator);
+
+void PostCallRecordDestroyDescriptorUpdateTemplateKHR(
+    VkDevice                                    device,
+    VkDescriptorUpdateTemplate                  descriptorUpdateTemplate,
+    const VkAllocationCallbacks*                pAllocator);
+
+void PreCallRecordUpdateDescriptorSetWithTemplateKHR(
+    VkDevice                                    device,
+    VkDescriptorSet                             descriptorSet,
+    VkDescriptorUpdateTemplate                  descriptorUpdateTemplate,
+    const void*                                 pData);
+
+void PostCallRecordUpdateDescriptorSetWithTemplateKHR(
+    VkDevice                                    device,
+    VkDescriptorSet                             descriptorSet,
+    VkDescriptorUpdateTemplate                  descriptorUpdateTemplate,
+    const void*                                 pData);
+
+void PreCallRecordCreateRenderPass2KHR(
+    VkDevice                                    device,
+    const VkRenderPassCreateInfo2KHR*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkRenderPass*                               pRenderPass);
+
+void PostCallRecordCreateRenderPass2KHR(
+    VkDevice                                    device,
+    const VkRenderPassCreateInfo2KHR*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkRenderPass*                               pRenderPass,
+    VkResult                                    result);
+
+void PreCallRecordCmdBeginRenderPass2KHR(
+    VkCommandBuffer                             commandBuffer,
+    const VkRenderPassBeginInfo*                pRenderPassBegin,
+    const VkSubpassBeginInfoKHR*                pSubpassBeginInfo);
+
+void PostCallRecordCmdBeginRenderPass2KHR(
+    VkCommandBuffer                             commandBuffer,
+    const VkRenderPassBeginInfo*                pRenderPassBegin,
+    const VkSubpassBeginInfoKHR*                pSubpassBeginInfo);
+
+void PreCallRecordCmdNextSubpass2KHR(
+    VkCommandBuffer                             commandBuffer,
+    const VkSubpassBeginInfoKHR*                pSubpassBeginInfo,
+    const VkSubpassEndInfoKHR*                  pSubpassEndInfo);
+
+void PostCallRecordCmdNextSubpass2KHR(
+    VkCommandBuffer                             commandBuffer,
+    const VkSubpassBeginInfoKHR*                pSubpassBeginInfo,
+    const VkSubpassEndInfoKHR*                  pSubpassEndInfo);
+
+void PreCallRecordCmdEndRenderPass2KHR(
+    VkCommandBuffer                             commandBuffer,
+    const VkSubpassEndInfoKHR*                  pSubpassEndInfo);
+
+void PostCallRecordCmdEndRenderPass2KHR(
+    VkCommandBuffer                             commandBuffer,
+    const VkSubpassEndInfoKHR*                  pSubpassEndInfo);
+
+void PreCallRecordGetSwapchainStatusKHR(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain);
+
+void PostCallRecordGetSwapchainStatusKHR(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    VkResult                                    result);
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+void PreCallRecordImportFenceWin32HandleKHR(
+    VkDevice                                    device,
+    const VkImportFenceWin32HandleInfoKHR*      pImportFenceWin32HandleInfo);
+
+void PostCallRecordImportFenceWin32HandleKHR(
+    VkDevice                                    device,
+    const VkImportFenceWin32HandleInfoKHR*      pImportFenceWin32HandleInfo,
+    VkResult                                    result);
+
+void PreCallRecordGetFenceWin32HandleKHR(
+    VkDevice                                    device,
+    const VkFenceGetWin32HandleInfoKHR*         pGetWin32HandleInfo,
+    HANDLE*                                     pHandle);
+
+void PostCallRecordGetFenceWin32HandleKHR(
+    VkDevice                                    device,
+    const VkFenceGetWin32HandleInfoKHR*         pGetWin32HandleInfo,
+    HANDLE*                                     pHandle,
+    VkResult                                    result);
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+void PreCallRecordImportFenceFdKHR(
+    VkDevice                                    device,
+    const VkImportFenceFdInfoKHR*               pImportFenceFdInfo);
+
+void PostCallRecordImportFenceFdKHR(
+    VkDevice                                    device,
+    const VkImportFenceFdInfoKHR*               pImportFenceFdInfo,
+    VkResult                                    result);
+
+void PreCallRecordGetFenceFdKHR(
+    VkDevice                                    device,
+    const VkFenceGetFdInfoKHR*                  pGetFdInfo,
+    int*                                        pFd);
+
+void PostCallRecordGetFenceFdKHR(
+    VkDevice                                    device,
+    const VkFenceGetFdInfoKHR*                  pGetFdInfo,
+    int*                                        pFd,
+    VkResult                                    result);
+
+void PreCallRecordAcquireProfilingLockKHR(
+    VkDevice                                    device,
+    const VkAcquireProfilingLockInfoKHR*        pInfo);
+
+void PostCallRecordAcquireProfilingLockKHR(
+    VkDevice                                    device,
+    const VkAcquireProfilingLockInfoKHR*        pInfo,
+    VkResult                                    result);
+
+void PreCallRecordReleaseProfilingLockKHR(
+    VkDevice                                    device);
+
+void PostCallRecordReleaseProfilingLockKHR(
+    VkDevice                                    device);
+
+void PreCallRecordGetDisplayModeProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkDisplayKHR                                display,
+    uint32_t*                                   pPropertyCount,
+    VkDisplayModeProperties2KHR*                pProperties);
+
+void PostCallRecordGetDisplayModeProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkDisplayKHR                                display,
+    uint32_t*                                   pPropertyCount,
+    VkDisplayModeProperties2KHR*                pProperties,
+    VkResult                                    result);
+
+void PreCallRecordGetImageMemoryRequirements2KHR(
+    VkDevice                                    device,
+    const VkImageMemoryRequirementsInfo2*       pInfo,
+    VkMemoryRequirements2*                      pMemoryRequirements);
+
+void PostCallRecordGetImageMemoryRequirements2KHR(
+    VkDevice                                    device,
+    const VkImageMemoryRequirementsInfo2*       pInfo,
+    VkMemoryRequirements2*                      pMemoryRequirements);
+
+void PreCallRecordGetBufferMemoryRequirements2KHR(
+    VkDevice                                    device,
+    const VkBufferMemoryRequirementsInfo2*      pInfo,
+    VkMemoryRequirements2*                      pMemoryRequirements);
+
+void PostCallRecordGetBufferMemoryRequirements2KHR(
+    VkDevice                                    device,
+    const VkBufferMemoryRequirementsInfo2*      pInfo,
+    VkMemoryRequirements2*                      pMemoryRequirements);
+
+void PreCallRecordGetImageSparseMemoryRequirements2KHR(
+    VkDevice                                    device,
+    const VkImageSparseMemoryRequirementsInfo2* pInfo,
+    uint32_t*                                   pSparseMemoryRequirementCount,
+    VkSparseImageMemoryRequirements2*           pSparseMemoryRequirements);
+
+void PostCallRecordGetImageSparseMemoryRequirements2KHR(
+    VkDevice                                    device,
+    const VkImageSparseMemoryRequirementsInfo2* pInfo,
+    uint32_t*                                   pSparseMemoryRequirementCount,
+    VkSparseImageMemoryRequirements2*           pSparseMemoryRequirements);
+
+void PreCallRecordCreateSamplerYcbcrConversionKHR(
+    VkDevice                                    device,
+    const VkSamplerYcbcrConversionCreateInfo*   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSamplerYcbcrConversion*                   pYcbcrConversion);
+
+void PostCallRecordCreateSamplerYcbcrConversionKHR(
+    VkDevice                                    device,
+    const VkSamplerYcbcrConversionCreateInfo*   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSamplerYcbcrConversion*                   pYcbcrConversion,
+    VkResult                                    result);
+
+void PreCallRecordDestroySamplerYcbcrConversionKHR(
+    VkDevice                                    device,
+    VkSamplerYcbcrConversion                    ycbcrConversion,
+    const VkAllocationCallbacks*                pAllocator);
+
+void PostCallRecordDestroySamplerYcbcrConversionKHR(
+    VkDevice                                    device,
+    VkSamplerYcbcrConversion                    ycbcrConversion,
+    const VkAllocationCallbacks*                pAllocator);
+
+void PreCallRecordBindBufferMemory2KHR(
+    VkDevice                                    device,
+    uint32_t                                    bindInfoCount,
+    const VkBindBufferMemoryInfo*               pBindInfos);
+
+void PostCallRecordBindBufferMemory2KHR(
+    VkDevice                                    device,
+    uint32_t                                    bindInfoCount,
+    const VkBindBufferMemoryInfo*               pBindInfos,
+    VkResult                                    result);
+
+void PreCallRecordBindImageMemory2KHR(
+    VkDevice                                    device,
+    uint32_t                                    bindInfoCount,
+    const VkBindImageMemoryInfo*                pBindInfos);
+
+void PostCallRecordBindImageMemory2KHR(
+    VkDevice                                    device,
+    uint32_t                                    bindInfoCount,
+    const VkBindImageMemoryInfo*                pBindInfos,
+    VkResult                                    result);
+
+void PreCallRecordGetDescriptorSetLayoutSupportKHR(
+    VkDevice                                    device,
+    const VkDescriptorSetLayoutCreateInfo*      pCreateInfo,
+    VkDescriptorSetLayoutSupport*               pSupport);
+
+void PostCallRecordGetDescriptorSetLayoutSupportKHR(
+    VkDevice                                    device,
+    const VkDescriptorSetLayoutCreateInfo*      pCreateInfo,
+    VkDescriptorSetLayoutSupport*               pSupport);
+
+void PreCallRecordCmdDrawIndirectCountKHR(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkBuffer                                    countBuffer,
+    VkDeviceSize                                countBufferOffset,
+    uint32_t                                    maxDrawCount,
+    uint32_t                                    stride);
+
+void PostCallRecordCmdDrawIndirectCountKHR(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkBuffer                                    countBuffer,
+    VkDeviceSize                                countBufferOffset,
+    uint32_t                                    maxDrawCount,
+    uint32_t                                    stride);
+
+void PreCallRecordCmdDrawIndexedIndirectCountKHR(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkBuffer                                    countBuffer,
+    VkDeviceSize                                countBufferOffset,
+    uint32_t                                    maxDrawCount,
+    uint32_t                                    stride);
+
+void PostCallRecordCmdDrawIndexedIndirectCountKHR(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkBuffer                                    countBuffer,
+    VkDeviceSize                                countBufferOffset,
+    uint32_t                                    maxDrawCount,
+    uint32_t                                    stride);
+
+void PreCallRecordGetSemaphoreCounterValueKHR(
+    VkDevice                                    device,
+    VkSemaphore                                 semaphore,
+    uint64_t*                                   pValue);
+
+void PostCallRecordGetSemaphoreCounterValueKHR(
+    VkDevice                                    device,
+    VkSemaphore                                 semaphore,
+    uint64_t*                                   pValue,
+    VkResult                                    result);
+
+void PreCallRecordWaitSemaphoresKHR(
+    VkDevice                                    device,
+    const VkSemaphoreWaitInfoKHR*               pWaitInfo,
+    uint64_t                                    timeout);
+
+void PostCallRecordWaitSemaphoresKHR(
+    VkDevice                                    device,
+    const VkSemaphoreWaitInfoKHR*               pWaitInfo,
+    uint64_t                                    timeout,
+    VkResult                                    result);
+
+void PreCallRecordSignalSemaphoreKHR(
+    VkDevice                                    device,
+    const VkSemaphoreSignalInfoKHR*             pSignalInfo);
+
+void PostCallRecordSignalSemaphoreKHR(
+    VkDevice                                    device,
+    const VkSemaphoreSignalInfoKHR*             pSignalInfo,
+    VkResult                                    result);
+
+void PreCallRecordGetBufferDeviceAddressKHR(
+    VkDevice                                    device,
+    const VkBufferDeviceAddressInfoKHR*         pInfo);
+
+void PostCallRecordGetBufferDeviceAddressKHR(
+    VkDevice                                    device,
+    const VkBufferDeviceAddressInfoKHR*         pInfo,
+    VkDeviceAddress                             result);
+
+void PreCallRecordGetBufferOpaqueCaptureAddressKHR(
+    VkDevice                                    device,
+    const VkBufferDeviceAddressInfoKHR*         pInfo);
+
+void PostCallRecordGetBufferOpaqueCaptureAddressKHR(
+    VkDevice                                    device,
+    const VkBufferDeviceAddressInfoKHR*         pInfo);
+
+void PreCallRecordGetDeviceMemoryOpaqueCaptureAddressKHR(
+    VkDevice                                    device,
+    const VkDeviceMemoryOpaqueCaptureAddressInfoKHR* pInfo);
+
+void PostCallRecordGetDeviceMemoryOpaqueCaptureAddressKHR(
+    VkDevice                                    device,
+    const VkDeviceMemoryOpaqueCaptureAddressInfoKHR* pInfo);
+
+void PreCallRecordGetPipelineExecutablePropertiesKHR(
+    VkDevice                                    device,
+    const VkPipelineInfoKHR*                    pPipelineInfo,
+    uint32_t*                                   pExecutableCount,
+    VkPipelineExecutablePropertiesKHR*          pProperties);
+
+void PostCallRecordGetPipelineExecutablePropertiesKHR(
+    VkDevice                                    device,
+    const VkPipelineInfoKHR*                    pPipelineInfo,
+    uint32_t*                                   pExecutableCount,
+    VkPipelineExecutablePropertiesKHR*          pProperties,
+    VkResult                                    result);
+
+void PreCallRecordGetPipelineExecutableStatisticsKHR(
+    VkDevice                                    device,
+    const VkPipelineExecutableInfoKHR*          pExecutableInfo,
+    uint32_t*                                   pStatisticCount,
+    VkPipelineExecutableStatisticKHR*           pStatistics);
+
+void PostCallRecordGetPipelineExecutableStatisticsKHR(
+    VkDevice                                    device,
+    const VkPipelineExecutableInfoKHR*          pExecutableInfo,
+    uint32_t*                                   pStatisticCount,
+    VkPipelineExecutableStatisticKHR*           pStatistics,
+    VkResult                                    result);
+
+void PreCallRecordGetPipelineExecutableInternalRepresentationsKHR(
+    VkDevice                                    device,
+    const VkPipelineExecutableInfoKHR*          pExecutableInfo,
+    uint32_t*                                   pInternalRepresentationCount,
+    VkPipelineExecutableInternalRepresentationKHR* pInternalRepresentations);
+
+void PostCallRecordGetPipelineExecutableInternalRepresentationsKHR(
+    VkDevice                                    device,
+    const VkPipelineExecutableInfoKHR*          pExecutableInfo,
+    uint32_t*                                   pInternalRepresentationCount,
+    VkPipelineExecutableInternalRepresentationKHR* pInternalRepresentations,
+    VkResult                                    result);
+
+void PreCallRecordCreateDebugReportCallbackEXT(
+    VkInstance                                  instance,
+    const VkDebugReportCallbackCreateInfoEXT*   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDebugReportCallbackEXT*                   pCallback);
+
+void PostCallRecordCreateDebugReportCallbackEXT(
+    VkInstance                                  instance,
+    const VkDebugReportCallbackCreateInfoEXT*   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDebugReportCallbackEXT*                   pCallback,
+    VkResult                                    result);
+
+void PreCallRecordDestroyDebugReportCallbackEXT(
+    VkInstance                                  instance,
+    VkDebugReportCallbackEXT                    callback,
+    const VkAllocationCallbacks*                pAllocator);
+
+void PostCallRecordDestroyDebugReportCallbackEXT(
+    VkInstance                                  instance,
+    VkDebugReportCallbackEXT                    callback,
+    const VkAllocationCallbacks*                pAllocator);
+
+void PreCallRecordDebugReportMessageEXT(
+    VkInstance                                  instance,
+    VkDebugReportFlagsEXT                       flags,
+    VkDebugReportObjectTypeEXT                  objectType,
+    uint64_t                                    object,
+    size_t                                      location,
+    int32_t                                     messageCode,
+    const char*                                 pLayerPrefix,
+    const char*                                 pMessage);
+
+void PostCallRecordDebugReportMessageEXT(
+    VkInstance                                  instance,
+    VkDebugReportFlagsEXT                       flags,
+    VkDebugReportObjectTypeEXT                  objectType,
+    uint64_t                                    object,
+    size_t                                      location,
+    int32_t                                     messageCode,
+    const char*                                 pLayerPrefix,
+    const char*                                 pMessage);
+// TODO - not wrapping EXT function vkDebugMarkerSetObjectTagEXT
+// TODO - not wrapping EXT function vkDebugMarkerSetObjectNameEXT
+// TODO - not wrapping EXT function vkCmdDebugMarkerBeginEXT
+// TODO - not wrapping EXT function vkCmdDebugMarkerEndEXT
+// TODO - not wrapping EXT function vkCmdDebugMarkerInsertEXT
+
+void PreCallRecordCmdBindTransformFeedbackBuffersEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstBinding,
+    uint32_t                                    bindingCount,
+    const VkBuffer*                             pBuffers,
+    const VkDeviceSize*                         pOffsets,
+    const VkDeviceSize*                         pSizes);
+
+void PostCallRecordCmdBindTransformFeedbackBuffersEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstBinding,
+    uint32_t                                    bindingCount,
+    const VkBuffer*                             pBuffers,
+    const VkDeviceSize*                         pOffsets,
+    const VkDeviceSize*                         pSizes);
+
+void PreCallRecordCmdBeginTransformFeedbackEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstCounterBuffer,
+    uint32_t                                    counterBufferCount,
+    const VkBuffer*                             pCounterBuffers,
+    const VkDeviceSize*                         pCounterBufferOffsets);
+
+void PostCallRecordCmdBeginTransformFeedbackEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstCounterBuffer,
+    uint32_t                                    counterBufferCount,
+    const VkBuffer*                             pCounterBuffers,
+    const VkDeviceSize*                         pCounterBufferOffsets);
+
+void PreCallRecordCmdEndTransformFeedbackEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstCounterBuffer,
+    uint32_t                                    counterBufferCount,
+    const VkBuffer*                             pCounterBuffers,
+    const VkDeviceSize*                         pCounterBufferOffsets);
+
+void PostCallRecordCmdEndTransformFeedbackEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstCounterBuffer,
+    uint32_t                                    counterBufferCount,
+    const VkBuffer*                             pCounterBuffers,
+    const VkDeviceSize*                         pCounterBufferOffsets);
+
+void PreCallRecordCmdBeginQueryIndexedEXT(
+    VkCommandBuffer                             commandBuffer,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    query,
+    VkQueryControlFlags                         flags,
+    uint32_t                                    index);
+
+void PostCallRecordCmdBeginQueryIndexedEXT(
+    VkCommandBuffer                             commandBuffer,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    query,
+    VkQueryControlFlags                         flags,
+    uint32_t                                    index);
+
+void PreCallRecordCmdEndQueryIndexedEXT(
+    VkCommandBuffer                             commandBuffer,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    query,
+    uint32_t                                    index);
+
+void PostCallRecordCmdEndQueryIndexedEXT(
+    VkCommandBuffer                             commandBuffer,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    query,
+    uint32_t                                    index);
+
+void PreCallRecordCmdDrawIndirectByteCountEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    instanceCount,
+    uint32_t                                    firstInstance,
+    VkBuffer                                    counterBuffer,
+    VkDeviceSize                                counterBufferOffset,
+    uint32_t                                    counterOffset,
+    uint32_t                                    vertexStride);
+
+void PostCallRecordCmdDrawIndirectByteCountEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    instanceCount,
+    uint32_t                                    firstInstance,
+    VkBuffer                                    counterBuffer,
+    VkDeviceSize                                counterBufferOffset,
+    uint32_t                                    counterOffset,
+    uint32_t                                    vertexStride);
+
+void PreCallRecordGetImageViewHandleNVX(
+    VkDevice                                    device,
+    const VkImageViewHandleInfoNVX*             pInfo);
+
+void PostCallRecordGetImageViewHandleNVX(
+    VkDevice                                    device,
+    const VkImageViewHandleInfoNVX*             pInfo);
+
+void PreCallRecordCmdDrawIndirectCountAMD(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkBuffer                                    countBuffer,
+    VkDeviceSize                                countBufferOffset,
+    uint32_t                                    maxDrawCount,
+    uint32_t                                    stride);
+
+void PostCallRecordCmdDrawIndirectCountAMD(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkBuffer                                    countBuffer,
+    VkDeviceSize                                countBufferOffset,
+    uint32_t                                    maxDrawCount,
+    uint32_t                                    stride);
+
+void PreCallRecordCmdDrawIndexedIndirectCountAMD(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkBuffer                                    countBuffer,
+    VkDeviceSize                                countBufferOffset,
+    uint32_t                                    maxDrawCount,
+    uint32_t                                    stride);
+
+void PostCallRecordCmdDrawIndexedIndirectCountAMD(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkBuffer                                    countBuffer,
+    VkDeviceSize                                countBufferOffset,
+    uint32_t                                    maxDrawCount,
+    uint32_t                                    stride);
+
+void PreCallRecordGetShaderInfoAMD(
+    VkDevice                                    device,
+    VkPipeline                                  pipeline,
+    VkShaderStageFlagBits                       shaderStage,
+    VkShaderInfoTypeAMD                         infoType,
+    size_t*                                     pInfoSize,
+    void*                                       pInfo);
+
+void PostCallRecordGetShaderInfoAMD(
+    VkDevice                                    device,
+    VkPipeline                                  pipeline,
+    VkShaderStageFlagBits                       shaderStage,
+    VkShaderInfoTypeAMD                         infoType,
+    size_t*                                     pInfoSize,
+    void*                                       pInfo,
+    VkResult                                    result);
+
+#ifdef VK_USE_PLATFORM_GGP
+
+void PreCallRecordCreateStreamDescriptorSurfaceGGP(
+    VkInstance                                  instance,
+    const VkStreamDescriptorSurfaceCreateInfoGGP* pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface);
+
+void PostCallRecordCreateStreamDescriptorSurfaceGGP(
+    VkInstance                                  instance,
+    const VkStreamDescriptorSurfaceCreateInfoGGP* pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface,
+    VkResult                                    result);
+#endif // VK_USE_PLATFORM_GGP
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+void PreCallRecordGetMemoryWin32HandleNV(
+    VkDevice                                    device,
+    VkDeviceMemory                              memory,
+    VkExternalMemoryHandleTypeFlagsNV           handleType,
+    HANDLE*                                     pHandle);
+
+void PostCallRecordGetMemoryWin32HandleNV(
+    VkDevice                                    device,
+    VkDeviceMemory                              memory,
+    VkExternalMemoryHandleTypeFlagsNV           handleType,
+    HANDLE*                                     pHandle,
+    VkResult                                    result);
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+#ifdef VK_USE_PLATFORM_VI_NN
+
+void PreCallRecordCreateViSurfaceNN(
+    VkInstance                                  instance,
+    const VkViSurfaceCreateInfoNN*              pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface);
+
+void PostCallRecordCreateViSurfaceNN(
+    VkInstance                                  instance,
+    const VkViSurfaceCreateInfoNN*              pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface,
+    VkResult                                    result);
+#endif // VK_USE_PLATFORM_VI_NN
+
+void PreCallRecordCmdBeginConditionalRenderingEXT(
+    VkCommandBuffer                             commandBuffer,
+    const VkConditionalRenderingBeginInfoEXT*   pConditionalRenderingBegin);
+
+void PostCallRecordCmdBeginConditionalRenderingEXT(
+    VkCommandBuffer                             commandBuffer,
+    const VkConditionalRenderingBeginInfoEXT*   pConditionalRenderingBegin);
+
+void PreCallRecordCmdEndConditionalRenderingEXT(
+    VkCommandBuffer                             commandBuffer);
+
+void PostCallRecordCmdEndConditionalRenderingEXT(
+    VkCommandBuffer                             commandBuffer);
+
+void PreCallRecordCmdProcessCommandsNVX(
+    VkCommandBuffer                             commandBuffer,
+    const VkCmdProcessCommandsInfoNVX*          pProcessCommandsInfo);
+
+void PostCallRecordCmdProcessCommandsNVX(
+    VkCommandBuffer                             commandBuffer,
+    const VkCmdProcessCommandsInfoNVX*          pProcessCommandsInfo);
+
+void PreCallRecordCmdReserveSpaceForCommandsNVX(
+    VkCommandBuffer                             commandBuffer,
+    const VkCmdReserveSpaceForCommandsInfoNVX*  pReserveSpaceInfo);
+
+void PostCallRecordCmdReserveSpaceForCommandsNVX(
+    VkCommandBuffer                             commandBuffer,
+    const VkCmdReserveSpaceForCommandsInfoNVX*  pReserveSpaceInfo);
+
+void PreCallRecordCreateIndirectCommandsLayoutNVX(
+    VkDevice                                    device,
+    const VkIndirectCommandsLayoutCreateInfoNVX* pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkIndirectCommandsLayoutNVX*                pIndirectCommandsLayout);
+
+void PostCallRecordCreateIndirectCommandsLayoutNVX(
+    VkDevice                                    device,
+    const VkIndirectCommandsLayoutCreateInfoNVX* pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkIndirectCommandsLayoutNVX*                pIndirectCommandsLayout,
+    VkResult                                    result);
+
+void PreCallRecordDestroyIndirectCommandsLayoutNVX(
+    VkDevice                                    device,
+    VkIndirectCommandsLayoutNVX                 indirectCommandsLayout,
+    const VkAllocationCallbacks*                pAllocator);
+
+void PostCallRecordDestroyIndirectCommandsLayoutNVX(
+    VkDevice                                    device,
+    VkIndirectCommandsLayoutNVX                 indirectCommandsLayout,
+    const VkAllocationCallbacks*                pAllocator);
+
+void PreCallRecordCreateObjectTableNVX(
+    VkDevice                                    device,
+    const VkObjectTableCreateInfoNVX*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkObjectTableNVX*                           pObjectTable);
+
+void PostCallRecordCreateObjectTableNVX(
+    VkDevice                                    device,
+    const VkObjectTableCreateInfoNVX*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkObjectTableNVX*                           pObjectTable,
+    VkResult                                    result);
+
+void PreCallRecordDestroyObjectTableNVX(
+    VkDevice                                    device,
+    VkObjectTableNVX                            objectTable,
+    const VkAllocationCallbacks*                pAllocator);
+
+void PostCallRecordDestroyObjectTableNVX(
+    VkDevice                                    device,
+    VkObjectTableNVX                            objectTable,
+    const VkAllocationCallbacks*                pAllocator);
+
+void PreCallRecordRegisterObjectsNVX(
+    VkDevice                                    device,
+    VkObjectTableNVX                            objectTable,
+    uint32_t                                    objectCount,
+    const VkObjectTableEntryNVX* const*         ppObjectTableEntries,
+    const uint32_t*                             pObjectIndices);
+
+void PostCallRecordRegisterObjectsNVX(
+    VkDevice                                    device,
+    VkObjectTableNVX                            objectTable,
+    uint32_t                                    objectCount,
+    const VkObjectTableEntryNVX* const*         ppObjectTableEntries,
+    const uint32_t*                             pObjectIndices,
+    VkResult                                    result);
+
+void PreCallRecordUnregisterObjectsNVX(
+    VkDevice                                    device,
+    VkObjectTableNVX                            objectTable,
+    uint32_t                                    objectCount,
+    const VkObjectEntryTypeNVX*                 pObjectEntryTypes,
+    const uint32_t*                             pObjectIndices);
+
+void PostCallRecordUnregisterObjectsNVX(
+    VkDevice                                    device,
+    VkObjectTableNVX                            objectTable,
+    uint32_t                                    objectCount,
+    const VkObjectEntryTypeNVX*                 pObjectEntryTypes,
+    const uint32_t*                             pObjectIndices,
+    VkResult                                    result);
+
+void PreCallRecordCmdSetViewportWScalingNV(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstViewport,
+    uint32_t                                    viewportCount,
+    const VkViewportWScalingNV*                 pViewportWScalings);
+
+void PostCallRecordCmdSetViewportWScalingNV(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstViewport,
+    uint32_t                                    viewportCount,
+    const VkViewportWScalingNV*                 pViewportWScalings);
+
+void PreCallRecordReleaseDisplayEXT(
+    VkPhysicalDevice                            physicalDevice,
+    VkDisplayKHR                                display);
+
+void PostCallRecordReleaseDisplayEXT(
+    VkPhysicalDevice                            physicalDevice,
+    VkDisplayKHR                                display,
+    VkResult                                    result);
+
+#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
+
+void PreCallRecordAcquireXlibDisplayEXT(
+    VkPhysicalDevice                            physicalDevice,
+    Display*                                    dpy,
+    VkDisplayKHR                                display);
+
+void PostCallRecordAcquireXlibDisplayEXT(
+    VkPhysicalDevice                            physicalDevice,
+    Display*                                    dpy,
+    VkDisplayKHR                                display,
+    VkResult                                    result);
+#endif // VK_USE_PLATFORM_XLIB_XRANDR_EXT
+
+void PreCallRecordGetPhysicalDeviceSurfaceCapabilities2EXT(
+    VkPhysicalDevice                            physicalDevice,
+    VkSurfaceKHR                                surface,
+    VkSurfaceCapabilities2EXT*                  pSurfaceCapabilities);
+
+void PostCallRecordGetPhysicalDeviceSurfaceCapabilities2EXT(
+    VkPhysicalDevice                            physicalDevice,
+    VkSurfaceKHR                                surface,
+    VkSurfaceCapabilities2EXT*                  pSurfaceCapabilities,
+    VkResult                                    result);
+
+void PreCallRecordDisplayPowerControlEXT(
+    VkDevice                                    device,
+    VkDisplayKHR                                display,
+    const VkDisplayPowerInfoEXT*                pDisplayPowerInfo);
+
+void PostCallRecordDisplayPowerControlEXT(
+    VkDevice                                    device,
+    VkDisplayKHR                                display,
+    const VkDisplayPowerInfoEXT*                pDisplayPowerInfo,
+    VkResult                                    result);
+
+void PreCallRecordRegisterDeviceEventEXT(
+    VkDevice                                    device,
+    const VkDeviceEventInfoEXT*                 pDeviceEventInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkFence*                                    pFence);
+
+void PostCallRecordRegisterDeviceEventEXT(
+    VkDevice                                    device,
+    const VkDeviceEventInfoEXT*                 pDeviceEventInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkFence*                                    pFence,
+    VkResult                                    result);
+
+void PreCallRecordRegisterDisplayEventEXT(
+    VkDevice                                    device,
+    VkDisplayKHR                                display,
+    const VkDisplayEventInfoEXT*                pDisplayEventInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkFence*                                    pFence);
+
+void PostCallRecordRegisterDisplayEventEXT(
+    VkDevice                                    device,
+    VkDisplayKHR                                display,
+    const VkDisplayEventInfoEXT*                pDisplayEventInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkFence*                                    pFence,
+    VkResult                                    result);
+
+void PreCallRecordGetSwapchainCounterEXT(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    VkSurfaceCounterFlagBitsEXT                 counter,
+    uint64_t*                                   pCounterValue);
+
+void PostCallRecordGetSwapchainCounterEXT(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    VkSurfaceCounterFlagBitsEXT                 counter,
+    uint64_t*                                   pCounterValue,
+    VkResult                                    result);
+
+void PreCallRecordGetRefreshCycleDurationGOOGLE(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    VkRefreshCycleDurationGOOGLE*               pDisplayTimingProperties);
+
+void PostCallRecordGetRefreshCycleDurationGOOGLE(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    VkRefreshCycleDurationGOOGLE*               pDisplayTimingProperties,
+    VkResult                                    result);
+
+void PreCallRecordGetPastPresentationTimingGOOGLE(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    uint32_t*                                   pPresentationTimingCount,
+    VkPastPresentationTimingGOOGLE*             pPresentationTimings);
+
+void PostCallRecordGetPastPresentationTimingGOOGLE(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    uint32_t*                                   pPresentationTimingCount,
+    VkPastPresentationTimingGOOGLE*             pPresentationTimings,
+    VkResult                                    result);
+
+void PreCallRecordCmdSetDiscardRectangleEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstDiscardRectangle,
+    uint32_t                                    discardRectangleCount,
+    const VkRect2D*                             pDiscardRectangles);
+
+void PostCallRecordCmdSetDiscardRectangleEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstDiscardRectangle,
+    uint32_t                                    discardRectangleCount,
+    const VkRect2D*                             pDiscardRectangles);
+
+void PreCallRecordSetHdrMetadataEXT(
+    VkDevice                                    device,
+    uint32_t                                    swapchainCount,
+    const VkSwapchainKHR*                       pSwapchains,
+    const VkHdrMetadataEXT*                     pMetadata);
+
+void PostCallRecordSetHdrMetadataEXT(
+    VkDevice                                    device,
+    uint32_t                                    swapchainCount,
+    const VkSwapchainKHR*                       pSwapchains,
+    const VkHdrMetadataEXT*                     pMetadata);
+
+#ifdef VK_USE_PLATFORM_IOS_MVK
+
+void PreCallRecordCreateIOSSurfaceMVK(
+    VkInstance                                  instance,
+    const VkIOSSurfaceCreateInfoMVK*            pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface);
+
+void PostCallRecordCreateIOSSurfaceMVK(
+    VkInstance                                  instance,
+    const VkIOSSurfaceCreateInfoMVK*            pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface,
+    VkResult                                    result);
+#endif // VK_USE_PLATFORM_IOS_MVK
+
+#ifdef VK_USE_PLATFORM_MACOS_MVK
+
+void PreCallRecordCreateMacOSSurfaceMVK(
+    VkInstance                                  instance,
+    const VkMacOSSurfaceCreateInfoMVK*          pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface);
+
+void PostCallRecordCreateMacOSSurfaceMVK(
+    VkInstance                                  instance,
+    const VkMacOSSurfaceCreateInfoMVK*          pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface,
+    VkResult                                    result);
+#endif // VK_USE_PLATFORM_MACOS_MVK
+// TODO - not wrapping EXT function vkSetDebugUtilsObjectNameEXT
+// TODO - not wrapping EXT function vkSetDebugUtilsObjectTagEXT
+
+void PreCallRecordQueueBeginDebugUtilsLabelEXT(
+    VkQueue                                     queue,
+    const VkDebugUtilsLabelEXT*                 pLabelInfo);
+
+void PostCallRecordQueueBeginDebugUtilsLabelEXT(
+    VkQueue                                     queue,
+    const VkDebugUtilsLabelEXT*                 pLabelInfo);
+
+void PreCallRecordQueueEndDebugUtilsLabelEXT(
+    VkQueue                                     queue);
+
+void PostCallRecordQueueEndDebugUtilsLabelEXT(
+    VkQueue                                     queue);
+
+void PreCallRecordQueueInsertDebugUtilsLabelEXT(
+    VkQueue                                     queue,
+    const VkDebugUtilsLabelEXT*                 pLabelInfo);
+
+void PostCallRecordQueueInsertDebugUtilsLabelEXT(
+    VkQueue                                     queue,
+    const VkDebugUtilsLabelEXT*                 pLabelInfo);
+
+void PreCallRecordCmdBeginDebugUtilsLabelEXT(
+    VkCommandBuffer                             commandBuffer,
+    const VkDebugUtilsLabelEXT*                 pLabelInfo);
+
+void PostCallRecordCmdBeginDebugUtilsLabelEXT(
+    VkCommandBuffer                             commandBuffer,
+    const VkDebugUtilsLabelEXT*                 pLabelInfo);
+
+void PreCallRecordCmdEndDebugUtilsLabelEXT(
+    VkCommandBuffer                             commandBuffer);
+
+void PostCallRecordCmdEndDebugUtilsLabelEXT(
+    VkCommandBuffer                             commandBuffer);
+
+void PreCallRecordCmdInsertDebugUtilsLabelEXT(
+    VkCommandBuffer                             commandBuffer,
+    const VkDebugUtilsLabelEXT*                 pLabelInfo);
+
+void PostCallRecordCmdInsertDebugUtilsLabelEXT(
+    VkCommandBuffer                             commandBuffer,
+    const VkDebugUtilsLabelEXT*                 pLabelInfo);
+
+void PreCallRecordCreateDebugUtilsMessengerEXT(
+    VkInstance                                  instance,
+    const VkDebugUtilsMessengerCreateInfoEXT*   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDebugUtilsMessengerEXT*                   pMessenger);
+
+void PostCallRecordCreateDebugUtilsMessengerEXT(
+    VkInstance                                  instance,
+    const VkDebugUtilsMessengerCreateInfoEXT*   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDebugUtilsMessengerEXT*                   pMessenger,
+    VkResult                                    result);
+
+void PreCallRecordDestroyDebugUtilsMessengerEXT(
+    VkInstance                                  instance,
+    VkDebugUtilsMessengerEXT                    messenger,
+    const VkAllocationCallbacks*                pAllocator);
+
+void PostCallRecordDestroyDebugUtilsMessengerEXT(
+    VkInstance                                  instance,
+    VkDebugUtilsMessengerEXT                    messenger,
+    const VkAllocationCallbacks*                pAllocator);
+
+void PreCallRecordSubmitDebugUtilsMessageEXT(
+    VkInstance                                  instance,
+    VkDebugUtilsMessageSeverityFlagBitsEXT      messageSeverity,
+    VkDebugUtilsMessageTypeFlagsEXT             messageTypes,
+    const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData);
+
+void PostCallRecordSubmitDebugUtilsMessageEXT(
+    VkInstance                                  instance,
+    VkDebugUtilsMessageSeverityFlagBitsEXT      messageSeverity,
+    VkDebugUtilsMessageTypeFlagsEXT             messageTypes,
+    const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData);
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+
+void PreCallRecordGetAndroidHardwareBufferPropertiesANDROID(
+    VkDevice                                    device,
+    const struct AHardwareBuffer*               buffer,
+    VkAndroidHardwareBufferPropertiesANDROID*   pProperties);
+
+void PostCallRecordGetAndroidHardwareBufferPropertiesANDROID(
+    VkDevice                                    device,
+    const struct AHardwareBuffer*               buffer,
+    VkAndroidHardwareBufferPropertiesANDROID*   pProperties,
+    VkResult                                    result);
+
+void PreCallRecordGetMemoryAndroidHardwareBufferANDROID(
+    VkDevice                                    device,
+    const VkMemoryGetAndroidHardwareBufferInfoANDROID* pInfo,
+    struct AHardwareBuffer**                    pBuffer);
+
+void PostCallRecordGetMemoryAndroidHardwareBufferANDROID(
+    VkDevice                                    device,
+    const VkMemoryGetAndroidHardwareBufferInfoANDROID* pInfo,
+    struct AHardwareBuffer**                    pBuffer,
+    VkResult                                    result);
+#endif // VK_USE_PLATFORM_ANDROID_KHR
+
+void PreCallRecordCmdSetSampleLocationsEXT(
+    VkCommandBuffer                             commandBuffer,
+    const VkSampleLocationsInfoEXT*             pSampleLocationsInfo);
+
+void PostCallRecordCmdSetSampleLocationsEXT(
+    VkCommandBuffer                             commandBuffer,
+    const VkSampleLocationsInfoEXT*             pSampleLocationsInfo);
+
+void PreCallRecordGetImageDrmFormatModifierPropertiesEXT(
+    VkDevice                                    device,
+    VkImage                                     image,
+    VkImageDrmFormatModifierPropertiesEXT*      pProperties);
+
+void PostCallRecordGetImageDrmFormatModifierPropertiesEXT(
+    VkDevice                                    device,
+    VkImage                                     image,
+    VkImageDrmFormatModifierPropertiesEXT*      pProperties,
+    VkResult                                    result);
+
+void PreCallRecordCreateValidationCacheEXT(
+    VkDevice                                    device,
+    const VkValidationCacheCreateInfoEXT*       pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkValidationCacheEXT*                       pValidationCache);
+
+void PostCallRecordCreateValidationCacheEXT(
+    VkDevice                                    device,
+    const VkValidationCacheCreateInfoEXT*       pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkValidationCacheEXT*                       pValidationCache,
+    VkResult                                    result);
+
+void PreCallRecordDestroyValidationCacheEXT(
+    VkDevice                                    device,
+    VkValidationCacheEXT                        validationCache,
+    const VkAllocationCallbacks*                pAllocator);
+
+void PostCallRecordDestroyValidationCacheEXT(
+    VkDevice                                    device,
+    VkValidationCacheEXT                        validationCache,
+    const VkAllocationCallbacks*                pAllocator);
+
+void PreCallRecordMergeValidationCachesEXT(
+    VkDevice                                    device,
+    VkValidationCacheEXT                        dstCache,
+    uint32_t                                    srcCacheCount,
+    const VkValidationCacheEXT*                 pSrcCaches);
+
+void PostCallRecordMergeValidationCachesEXT(
+    VkDevice                                    device,
+    VkValidationCacheEXT                        dstCache,
+    uint32_t                                    srcCacheCount,
+    const VkValidationCacheEXT*                 pSrcCaches,
+    VkResult                                    result);
+
+void PreCallRecordGetValidationCacheDataEXT(
+    VkDevice                                    device,
+    VkValidationCacheEXT                        validationCache,
+    size_t*                                     pDataSize,
+    void*                                       pData);
+
+void PostCallRecordGetValidationCacheDataEXT(
+    VkDevice                                    device,
+    VkValidationCacheEXT                        validationCache,
+    size_t*                                     pDataSize,
+    void*                                       pData,
+    VkResult                                    result);
+
+void PreCallRecordCmdBindShadingRateImageNV(
+    VkCommandBuffer                             commandBuffer,
+    VkImageView                                 imageView,
+    VkImageLayout                               imageLayout);
+
+void PostCallRecordCmdBindShadingRateImageNV(
+    VkCommandBuffer                             commandBuffer,
+    VkImageView                                 imageView,
+    VkImageLayout                               imageLayout);
+
+void PreCallRecordCmdSetViewportShadingRatePaletteNV(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstViewport,
+    uint32_t                                    viewportCount,
+    const VkShadingRatePaletteNV*               pShadingRatePalettes);
+
+void PostCallRecordCmdSetViewportShadingRatePaletteNV(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstViewport,
+    uint32_t                                    viewportCount,
+    const VkShadingRatePaletteNV*               pShadingRatePalettes);
+
+void PreCallRecordCmdSetCoarseSampleOrderNV(
+    VkCommandBuffer                             commandBuffer,
+    VkCoarseSampleOrderTypeNV                   sampleOrderType,
+    uint32_t                                    customSampleOrderCount,
+    const VkCoarseSampleOrderCustomNV*          pCustomSampleOrders);
+
+void PostCallRecordCmdSetCoarseSampleOrderNV(
+    VkCommandBuffer                             commandBuffer,
+    VkCoarseSampleOrderTypeNV                   sampleOrderType,
+    uint32_t                                    customSampleOrderCount,
+    const VkCoarseSampleOrderCustomNV*          pCustomSampleOrders);
+
+void PreCallRecordCreateAccelerationStructureNV(
+    VkDevice                                    device,
+    const VkAccelerationStructureCreateInfoNV*  pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkAccelerationStructureNV*                  pAccelerationStructure);
+
+void PostCallRecordCreateAccelerationStructureNV(
+    VkDevice                                    device,
+    const VkAccelerationStructureCreateInfoNV*  pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkAccelerationStructureNV*                  pAccelerationStructure,
+    VkResult                                    result);
+
+void PreCallRecordDestroyAccelerationStructureNV(
+    VkDevice                                    device,
+    VkAccelerationStructureNV                   accelerationStructure,
+    const VkAllocationCallbacks*                pAllocator);
+
+void PostCallRecordDestroyAccelerationStructureNV(
+    VkDevice                                    device,
+    VkAccelerationStructureNV                   accelerationStructure,
+    const VkAllocationCallbacks*                pAllocator);
+
+void PreCallRecordGetAccelerationStructureMemoryRequirementsNV(
+    VkDevice                                    device,
+    const VkAccelerationStructureMemoryRequirementsInfoNV* pInfo,
+    VkMemoryRequirements2KHR*                   pMemoryRequirements);
+
+void PostCallRecordGetAccelerationStructureMemoryRequirementsNV(
+    VkDevice                                    device,
+    const VkAccelerationStructureMemoryRequirementsInfoNV* pInfo,
+    VkMemoryRequirements2KHR*                   pMemoryRequirements);
+
+void PreCallRecordBindAccelerationStructureMemoryNV(
+    VkDevice                                    device,
+    uint32_t                                    bindInfoCount,
+    const VkBindAccelerationStructureMemoryInfoNV* pBindInfos);
+
+void PostCallRecordBindAccelerationStructureMemoryNV(
+    VkDevice                                    device,
+    uint32_t                                    bindInfoCount,
+    const VkBindAccelerationStructureMemoryInfoNV* pBindInfos,
+    VkResult                                    result);
+
+void PreCallRecordCmdBuildAccelerationStructureNV(
+    VkCommandBuffer                             commandBuffer,
+    const VkAccelerationStructureInfoNV*        pInfo,
+    VkBuffer                                    instanceData,
+    VkDeviceSize                                instanceOffset,
+    VkBool32                                    update,
+    VkAccelerationStructureNV                   dst,
+    VkAccelerationStructureNV                   src,
+    VkBuffer                                    scratch,
+    VkDeviceSize                                scratchOffset);
+
+void PostCallRecordCmdBuildAccelerationStructureNV(
+    VkCommandBuffer                             commandBuffer,
+    const VkAccelerationStructureInfoNV*        pInfo,
+    VkBuffer                                    instanceData,
+    VkDeviceSize                                instanceOffset,
+    VkBool32                                    update,
+    VkAccelerationStructureNV                   dst,
+    VkAccelerationStructureNV                   src,
+    VkBuffer                                    scratch,
+    VkDeviceSize                                scratchOffset);
+
+void PreCallRecordCmdCopyAccelerationStructureNV(
+    VkCommandBuffer                             commandBuffer,
+    VkAccelerationStructureNV                   dst,
+    VkAccelerationStructureNV                   src,
+    VkCopyAccelerationStructureModeNV           mode);
+
+void PostCallRecordCmdCopyAccelerationStructureNV(
+    VkCommandBuffer                             commandBuffer,
+    VkAccelerationStructureNV                   dst,
+    VkAccelerationStructureNV                   src,
+    VkCopyAccelerationStructureModeNV           mode);
+
+void PreCallRecordCmdTraceRaysNV(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    raygenShaderBindingTableBuffer,
+    VkDeviceSize                                raygenShaderBindingOffset,
+    VkBuffer                                    missShaderBindingTableBuffer,
+    VkDeviceSize                                missShaderBindingOffset,
+    VkDeviceSize                                missShaderBindingStride,
+    VkBuffer                                    hitShaderBindingTableBuffer,
+    VkDeviceSize                                hitShaderBindingOffset,
+    VkDeviceSize                                hitShaderBindingStride,
+    VkBuffer                                    callableShaderBindingTableBuffer,
+    VkDeviceSize                                callableShaderBindingOffset,
+    VkDeviceSize                                callableShaderBindingStride,
+    uint32_t                                    width,
+    uint32_t                                    height,
+    uint32_t                                    depth);
+
+void PostCallRecordCmdTraceRaysNV(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    raygenShaderBindingTableBuffer,
+    VkDeviceSize                                raygenShaderBindingOffset,
+    VkBuffer                                    missShaderBindingTableBuffer,
+    VkDeviceSize                                missShaderBindingOffset,
+    VkDeviceSize                                missShaderBindingStride,
+    VkBuffer                                    hitShaderBindingTableBuffer,
+    VkDeviceSize                                hitShaderBindingOffset,
+    VkDeviceSize                                hitShaderBindingStride,
+    VkBuffer                                    callableShaderBindingTableBuffer,
+    VkDeviceSize                                callableShaderBindingOffset,
+    VkDeviceSize                                callableShaderBindingStride,
+    uint32_t                                    width,
+    uint32_t                                    height,
+    uint32_t                                    depth);
+
+void PreCallRecordCreateRayTracingPipelinesNV(
+    VkDevice                                    device,
+    VkPipelineCache                             pipelineCache,
+    uint32_t                                    createInfoCount,
+    const VkRayTracingPipelineCreateInfoNV*     pCreateInfos,
+    const VkAllocationCallbacks*                pAllocator,
+    VkPipeline*                                 pPipelines);
+
+void PostCallRecordCreateRayTracingPipelinesNV(
+    VkDevice                                    device,
+    VkPipelineCache                             pipelineCache,
+    uint32_t                                    createInfoCount,
+    const VkRayTracingPipelineCreateInfoNV*     pCreateInfos,
+    const VkAllocationCallbacks*                pAllocator,
+    VkPipeline*                                 pPipelines,
+    VkResult                                    result);
+
+void PreCallRecordGetRayTracingShaderGroupHandlesNV(
+    VkDevice                                    device,
+    VkPipeline                                  pipeline,
+    uint32_t                                    firstGroup,
+    uint32_t                                    groupCount,
+    size_t                                      dataSize,
+    void*                                       pData);
+
+void PostCallRecordGetRayTracingShaderGroupHandlesNV(
+    VkDevice                                    device,
+    VkPipeline                                  pipeline,
+    uint32_t                                    firstGroup,
+    uint32_t                                    groupCount,
+    size_t                                      dataSize,
+    void*                                       pData,
+    VkResult                                    result);
+
+void PreCallRecordGetAccelerationStructureHandleNV(
+    VkDevice                                    device,
+    VkAccelerationStructureNV                   accelerationStructure,
+    size_t                                      dataSize,
+    void*                                       pData);
+
+void PostCallRecordGetAccelerationStructureHandleNV(
+    VkDevice                                    device,
+    VkAccelerationStructureNV                   accelerationStructure,
+    size_t                                      dataSize,
+    void*                                       pData,
+    VkResult                                    result);
+
+void PreCallRecordCmdWriteAccelerationStructuresPropertiesNV(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    accelerationStructureCount,
+    const VkAccelerationStructureNV*            pAccelerationStructures,
+    VkQueryType                                 queryType,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    firstQuery);
+
+void PostCallRecordCmdWriteAccelerationStructuresPropertiesNV(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    accelerationStructureCount,
+    const VkAccelerationStructureNV*            pAccelerationStructures,
+    VkQueryType                                 queryType,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    firstQuery);
+
+void PreCallRecordCompileDeferredNV(
+    VkDevice                                    device,
+    VkPipeline                                  pipeline,
+    uint32_t                                    shader);
+
+void PostCallRecordCompileDeferredNV(
+    VkDevice                                    device,
+    VkPipeline                                  pipeline,
+    uint32_t                                    shader,
+    VkResult                                    result);
+
+void PreCallRecordGetMemoryHostPointerPropertiesEXT(
+    VkDevice                                    device,
+    VkExternalMemoryHandleTypeFlagBits          handleType,
+    const void*                                 pHostPointer,
+    VkMemoryHostPointerPropertiesEXT*           pMemoryHostPointerProperties);
+
+void PostCallRecordGetMemoryHostPointerPropertiesEXT(
+    VkDevice                                    device,
+    VkExternalMemoryHandleTypeFlagBits          handleType,
+    const void*                                 pHostPointer,
+    VkMemoryHostPointerPropertiesEXT*           pMemoryHostPointerProperties,
+    VkResult                                    result);
+
+void PreCallRecordCmdWriteBufferMarkerAMD(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineStageFlagBits                     pipelineStage,
+    VkBuffer                                    dstBuffer,
+    VkDeviceSize                                dstOffset,
+    uint32_t                                    marker);
+
+void PostCallRecordCmdWriteBufferMarkerAMD(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineStageFlagBits                     pipelineStage,
+    VkBuffer                                    dstBuffer,
+    VkDeviceSize                                dstOffset,
+    uint32_t                                    marker);
+
+void PreCallRecordGetCalibratedTimestampsEXT(
+    VkDevice                                    device,
+    uint32_t                                    timestampCount,
+    const VkCalibratedTimestampInfoEXT*         pTimestampInfos,
+    uint64_t*                                   pTimestamps,
+    uint64_t*                                   pMaxDeviation);
+
+void PostCallRecordGetCalibratedTimestampsEXT(
+    VkDevice                                    device,
+    uint32_t                                    timestampCount,
+    const VkCalibratedTimestampInfoEXT*         pTimestampInfos,
+    uint64_t*                                   pTimestamps,
+    uint64_t*                                   pMaxDeviation,
+    VkResult                                    result);
+
+#ifdef VK_USE_PLATFORM_GGP
+#endif // VK_USE_PLATFORM_GGP
+
+void PreCallRecordCmdDrawMeshTasksNV(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    taskCount,
+    uint32_t                                    firstTask);
+
+void PostCallRecordCmdDrawMeshTasksNV(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    taskCount,
+    uint32_t                                    firstTask);
+
+void PreCallRecordCmdDrawMeshTasksIndirectNV(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    uint32_t                                    drawCount,
+    uint32_t                                    stride);
+
+void PostCallRecordCmdDrawMeshTasksIndirectNV(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    uint32_t                                    drawCount,
+    uint32_t                                    stride);
+
+void PreCallRecordCmdDrawMeshTasksIndirectCountNV(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkBuffer                                    countBuffer,
+    VkDeviceSize                                countBufferOffset,
+    uint32_t                                    maxDrawCount,
+    uint32_t                                    stride);
+
+void PostCallRecordCmdDrawMeshTasksIndirectCountNV(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkBuffer                                    countBuffer,
+    VkDeviceSize                                countBufferOffset,
+    uint32_t                                    maxDrawCount,
+    uint32_t                                    stride);
+
+void PreCallRecordCmdSetExclusiveScissorNV(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstExclusiveScissor,
+    uint32_t                                    exclusiveScissorCount,
+    const VkRect2D*                             pExclusiveScissors);
+
+void PostCallRecordCmdSetExclusiveScissorNV(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstExclusiveScissor,
+    uint32_t                                    exclusiveScissorCount,
+    const VkRect2D*                             pExclusiveScissors);
+
+void PreCallRecordCmdSetCheckpointNV(
+    VkCommandBuffer                             commandBuffer,
+    const void*                                 pCheckpointMarker);
+
+void PostCallRecordCmdSetCheckpointNV(
+    VkCommandBuffer                             commandBuffer,
+    const void*                                 pCheckpointMarker);
+
+void PreCallRecordGetQueueCheckpointDataNV(
+    VkQueue                                     queue,
+    uint32_t*                                   pCheckpointDataCount,
+    VkCheckpointDataNV*                         pCheckpointData);
+
+void PostCallRecordGetQueueCheckpointDataNV(
+    VkQueue                                     queue,
+    uint32_t*                                   pCheckpointDataCount,
+    VkCheckpointDataNV*                         pCheckpointData);
+
+void PreCallRecordInitializePerformanceApiINTEL(
+    VkDevice                                    device,
+    const VkInitializePerformanceApiInfoINTEL*  pInitializeInfo);
+
+void PostCallRecordInitializePerformanceApiINTEL(
+    VkDevice                                    device,
+    const VkInitializePerformanceApiInfoINTEL*  pInitializeInfo,
+    VkResult                                    result);
+
+void PreCallRecordUninitializePerformanceApiINTEL(
+    VkDevice                                    device);
+
+void PostCallRecordUninitializePerformanceApiINTEL(
+    VkDevice                                    device);
+
+void PreCallRecordCmdSetPerformanceMarkerINTEL(
+    VkCommandBuffer                             commandBuffer,
+    const VkPerformanceMarkerInfoINTEL*         pMarkerInfo);
+
+void PostCallRecordCmdSetPerformanceMarkerINTEL(
+    VkCommandBuffer                             commandBuffer,
+    const VkPerformanceMarkerInfoINTEL*         pMarkerInfo,
+    VkResult                                    result);
+
+void PreCallRecordCmdSetPerformanceStreamMarkerINTEL(
+    VkCommandBuffer                             commandBuffer,
+    const VkPerformanceStreamMarkerInfoINTEL*   pMarkerInfo);
+
+void PostCallRecordCmdSetPerformanceStreamMarkerINTEL(
+    VkCommandBuffer                             commandBuffer,
+    const VkPerformanceStreamMarkerInfoINTEL*   pMarkerInfo,
+    VkResult                                    result);
+
+void PreCallRecordCmdSetPerformanceOverrideINTEL(
+    VkCommandBuffer                             commandBuffer,
+    const VkPerformanceOverrideInfoINTEL*       pOverrideInfo);
+
+void PostCallRecordCmdSetPerformanceOverrideINTEL(
+    VkCommandBuffer                             commandBuffer,
+    const VkPerformanceOverrideInfoINTEL*       pOverrideInfo,
+    VkResult                                    result);
+
+void PreCallRecordAcquirePerformanceConfigurationINTEL(
+    VkDevice                                    device,
+    const VkPerformanceConfigurationAcquireInfoINTEL* pAcquireInfo,
+    VkPerformanceConfigurationINTEL*            pConfiguration);
+
+void PostCallRecordAcquirePerformanceConfigurationINTEL(
+    VkDevice                                    device,
+    const VkPerformanceConfigurationAcquireInfoINTEL* pAcquireInfo,
+    VkPerformanceConfigurationINTEL*            pConfiguration,
+    VkResult                                    result);
+
+void PreCallRecordReleasePerformanceConfigurationINTEL(
+    VkDevice                                    device,
+    VkPerformanceConfigurationINTEL             configuration);
+
+void PostCallRecordReleasePerformanceConfigurationINTEL(
+    VkDevice                                    device,
+    VkPerformanceConfigurationINTEL             configuration,
+    VkResult                                    result);
+
+void PreCallRecordQueueSetPerformanceConfigurationINTEL(
+    VkQueue                                     queue,
+    VkPerformanceConfigurationINTEL             configuration);
+
+void PostCallRecordQueueSetPerformanceConfigurationINTEL(
+    VkQueue                                     queue,
+    VkPerformanceConfigurationINTEL             configuration,
+    VkResult                                    result);
+
+void PreCallRecordGetPerformanceParameterINTEL(
+    VkDevice                                    device,
+    VkPerformanceParameterTypeINTEL             parameter,
+    VkPerformanceValueINTEL*                    pValue);
+
+void PostCallRecordGetPerformanceParameterINTEL(
+    VkDevice                                    device,
+    VkPerformanceParameterTypeINTEL             parameter,
+    VkPerformanceValueINTEL*                    pValue,
+    VkResult                                    result);
+
+void PreCallRecordSetLocalDimmingAMD(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapChain,
+    VkBool32                                    localDimmingEnable);
+
+void PostCallRecordSetLocalDimmingAMD(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapChain,
+    VkBool32                                    localDimmingEnable);
+
+#ifdef VK_USE_PLATFORM_FUCHSIA
+
+void PreCallRecordCreateImagePipeSurfaceFUCHSIA(
+    VkInstance                                  instance,
+    const VkImagePipeSurfaceCreateInfoFUCHSIA*  pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface);
+
+void PostCallRecordCreateImagePipeSurfaceFUCHSIA(
+    VkInstance                                  instance,
+    const VkImagePipeSurfaceCreateInfoFUCHSIA*  pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface,
+    VkResult                                    result);
+#endif // VK_USE_PLATFORM_FUCHSIA
+
+#ifdef VK_USE_PLATFORM_METAL_EXT
+
+void PreCallRecordCreateMetalSurfaceEXT(
+    VkInstance                                  instance,
+    const VkMetalSurfaceCreateInfoEXT*          pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface);
+
+void PostCallRecordCreateMetalSurfaceEXT(
+    VkInstance                                  instance,
+    const VkMetalSurfaceCreateInfoEXT*          pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface,
+    VkResult                                    result);
+#endif // VK_USE_PLATFORM_METAL_EXT
+
+void PreCallRecordGetBufferDeviceAddressEXT(
+    VkDevice                                    device,
+    const VkBufferDeviceAddressInfoKHR*         pInfo);
+
+void PostCallRecordGetBufferDeviceAddressEXT(
+    VkDevice                                    device,
+    const VkBufferDeviceAddressInfoKHR*         pInfo,
+    VkDeviceAddress                             result);
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+void PreCallRecordAcquireFullScreenExclusiveModeEXT(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain);
+
+void PostCallRecordAcquireFullScreenExclusiveModeEXT(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    VkResult                                    result);
+
+void PreCallRecordReleaseFullScreenExclusiveModeEXT(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain);
+
+void PostCallRecordReleaseFullScreenExclusiveModeEXT(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    VkResult                                    result);
+
+void PreCallRecordGetDeviceGroupSurfacePresentModes2EXT(
+    VkDevice                                    device,
+    const VkPhysicalDeviceSurfaceInfo2KHR*      pSurfaceInfo,
+    VkDeviceGroupPresentModeFlagsKHR*           pModes);
+
+void PostCallRecordGetDeviceGroupSurfacePresentModes2EXT(
+    VkDevice                                    device,
+    const VkPhysicalDeviceSurfaceInfo2KHR*      pSurfaceInfo,
+    VkDeviceGroupPresentModeFlagsKHR*           pModes,
+    VkResult                                    result);
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+void PreCallRecordCreateHeadlessSurfaceEXT(
+    VkInstance                                  instance,
+    const VkHeadlessSurfaceCreateInfoEXT*       pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface);
+
+void PostCallRecordCreateHeadlessSurfaceEXT(
+    VkInstance                                  instance,
+    const VkHeadlessSurfaceCreateInfoEXT*       pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface,
+    VkResult                                    result);
+
+void PreCallRecordCmdSetLineStippleEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    lineStippleFactor,
+    uint16_t                                    lineStipplePattern);
+
+void PostCallRecordCmdSetLineStippleEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    lineStippleFactor,
+    uint16_t                                    lineStipplePattern);
+
+void PreCallRecordResetQueryPoolEXT(
+    VkDevice                                    device,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    firstQuery,
+    uint32_t                                    queryCount);
+
+void PostCallRecordResetQueryPoolEXT(
+    VkDevice                                    device,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    firstQuery,
+    uint32_t                                    queryCount);
+};
diff --git a/src/third_party/vulkan-validation-layers/src/layers/generated/vk_dispatch_table_helper.h b/src/third_party/vulkan-validation-layers/src/layers/generated/vk_dispatch_table_helper.h
new file mode 100644
index 0000000..d2ae832
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/layers/generated/vk_dispatch_table_helper.h
@@ -0,0 +1,1016 @@
+#pragma once
+// *** THIS FILE IS GENERATED - DO NOT EDIT ***
+// See dispatch_helper_generator.py for modifications
+
+/*
+ * Copyright (c) 2015-2019 The Khronos Group Inc.
+ * Copyright (c) 2015-2019 Valve Corporation
+ * Copyright (c) 2015-2019 LunarG, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Courtney Goeltzenleuchter <courtney@LunarG.com>
+ * Author: Jon Ashburn <jon@lunarg.com>
+ * Author: Mark Lobodzinski <mark@lunarg.com>
+ */
+
+#include <vulkan/vulkan.h>
+#include <vulkan/vk_layer.h>
+#include <cstring>
+#include <string>
+#include <unordered_set>
+#include <unordered_map>
+#include "vk_layer_dispatch_table.h"
+#include "vk_extension_helper.h"
+
+static VKAPI_ATTR VkResult VKAPI_CALL StubBindBufferMemory2(VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo* pBindInfos) { return VK_SUCCESS; };
+static VKAPI_ATTR VkResult VKAPI_CALL StubBindImageMemory2(VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo* pBindInfos) { return VK_SUCCESS; };
+static VKAPI_ATTR void VKAPI_CALL StubGetDeviceGroupPeerMemoryFeatures(VkDevice device, uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VkPeerMemoryFeatureFlags* pPeerMemoryFeatures) {  };
+static VKAPI_ATTR void VKAPI_CALL StubCmdSetDeviceMask(VkCommandBuffer commandBuffer, uint32_t deviceMask) {  };
+static VKAPI_ATTR void VKAPI_CALL StubCmdDispatchBase(VkCommandBuffer commandBuffer, uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ) {  };
+static VKAPI_ATTR void VKAPI_CALL StubGetImageMemoryRequirements2(VkDevice device, const VkImageMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements) {  };
+static VKAPI_ATTR void VKAPI_CALL StubGetBufferMemoryRequirements2(VkDevice device, const VkBufferMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements) {  };
+static VKAPI_ATTR void VKAPI_CALL StubGetImageSparseMemoryRequirements2(VkDevice device, const VkImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2* pSparseMemoryRequirements) {  };
+static VKAPI_ATTR void VKAPI_CALL StubTrimCommandPool(VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlags flags) {  };
+static VKAPI_ATTR void VKAPI_CALL StubGetDeviceQueue2(VkDevice device, const VkDeviceQueueInfo2* pQueueInfo, VkQueue* pQueue) {  };
+static VKAPI_ATTR VkResult VKAPI_CALL StubCreateSamplerYcbcrConversion(VkDevice device, const VkSamplerYcbcrConversionCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSamplerYcbcrConversion* pYcbcrConversion) { return VK_SUCCESS; };
+static VKAPI_ATTR void VKAPI_CALL StubDestroySamplerYcbcrConversion(VkDevice device, VkSamplerYcbcrConversion ycbcrConversion, const VkAllocationCallbacks* pAllocator) {  };
+static VKAPI_ATTR VkResult VKAPI_CALL StubCreateDescriptorUpdateTemplate(VkDevice device, const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate) { return VK_SUCCESS; };
+static VKAPI_ATTR void VKAPI_CALL StubDestroyDescriptorUpdateTemplate(VkDevice device, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const VkAllocationCallbacks* pAllocator) {  };
+static VKAPI_ATTR void VKAPI_CALL StubUpdateDescriptorSetWithTemplate(VkDevice device, VkDescriptorSet descriptorSet, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData) {  };
+static VKAPI_ATTR void VKAPI_CALL StubGetDescriptorSetLayoutSupport(VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, VkDescriptorSetLayoutSupport* pSupport) {  };
+static VKAPI_ATTR VkResult VKAPI_CALL StubCreateSwapchainKHR(VkDevice device, const VkSwapchainCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSwapchainKHR* pSwapchain) { return VK_SUCCESS; };
+static VKAPI_ATTR void VKAPI_CALL StubDestroySwapchainKHR(VkDevice device, VkSwapchainKHR swapchain, const VkAllocationCallbacks* pAllocator) {  };
+static VKAPI_ATTR VkResult VKAPI_CALL StubGetSwapchainImagesKHR(VkDevice device, VkSwapchainKHR swapchain, uint32_t* pSwapchainImageCount, VkImage* pSwapchainImages) { return VK_SUCCESS; };
+static VKAPI_ATTR VkResult VKAPI_CALL StubAcquireNextImageKHR(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout, VkSemaphore semaphore, VkFence fence, uint32_t* pImageIndex) { return VK_SUCCESS; };
+static VKAPI_ATTR VkResult VKAPI_CALL StubQueuePresentKHR(VkQueue queue, const VkPresentInfoKHR* pPresentInfo) { return VK_SUCCESS; };
+static VKAPI_ATTR VkResult VKAPI_CALL StubGetDeviceGroupPresentCapabilitiesKHR(VkDevice device, VkDeviceGroupPresentCapabilitiesKHR* pDeviceGroupPresentCapabilities) { return VK_SUCCESS; };
+static VKAPI_ATTR VkResult VKAPI_CALL StubGetDeviceGroupSurfacePresentModesKHR(VkDevice device, VkSurfaceKHR surface, VkDeviceGroupPresentModeFlagsKHR* pModes) { return VK_SUCCESS; };
+static VKAPI_ATTR VkResult VKAPI_CALL StubAcquireNextImage2KHR(VkDevice device, const VkAcquireNextImageInfoKHR* pAcquireInfo, uint32_t* pImageIndex) { return VK_SUCCESS; };
+static VKAPI_ATTR VkResult VKAPI_CALL StubCreateSharedSwapchainsKHR(VkDevice device, uint32_t swapchainCount, const VkSwapchainCreateInfoKHR* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkSwapchainKHR* pSwapchains) { return VK_SUCCESS; };
+static VKAPI_ATTR void VKAPI_CALL StubGetDeviceGroupPeerMemoryFeaturesKHR(VkDevice device, uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VkPeerMemoryFeatureFlags* pPeerMemoryFeatures) {  };
+static VKAPI_ATTR void VKAPI_CALL StubCmdSetDeviceMaskKHR(VkCommandBuffer commandBuffer, uint32_t deviceMask) {  };
+static VKAPI_ATTR void VKAPI_CALL StubCmdDispatchBaseKHR(VkCommandBuffer commandBuffer, uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ) {  };
+static VKAPI_ATTR void VKAPI_CALL StubTrimCommandPoolKHR(VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlags flags) {  };
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+static VKAPI_ATTR VkResult VKAPI_CALL StubGetMemoryWin32HandleKHR(VkDevice device, const VkMemoryGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle) { return VK_SUCCESS; };
+#endif // VK_USE_PLATFORM_WIN32_KHR
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+static VKAPI_ATTR VkResult VKAPI_CALL StubGetMemoryWin32HandlePropertiesKHR(VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, VkMemoryWin32HandlePropertiesKHR* pMemoryWin32HandleProperties) { return VK_SUCCESS; };
+#endif // VK_USE_PLATFORM_WIN32_KHR
+static VKAPI_ATTR VkResult VKAPI_CALL StubGetMemoryFdKHR(VkDevice device, const VkMemoryGetFdInfoKHR* pGetFdInfo, int* pFd) { return VK_SUCCESS; };
+static VKAPI_ATTR VkResult VKAPI_CALL StubGetMemoryFdPropertiesKHR(VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, int fd, VkMemoryFdPropertiesKHR* pMemoryFdProperties) { return VK_SUCCESS; };
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+static VKAPI_ATTR VkResult VKAPI_CALL StubImportSemaphoreWin32HandleKHR(VkDevice device, const VkImportSemaphoreWin32HandleInfoKHR* pImportSemaphoreWin32HandleInfo) { return VK_SUCCESS; };
+#endif // VK_USE_PLATFORM_WIN32_KHR
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+static VKAPI_ATTR VkResult VKAPI_CALL StubGetSemaphoreWin32HandleKHR(VkDevice device, const VkSemaphoreGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle) { return VK_SUCCESS; };
+#endif // VK_USE_PLATFORM_WIN32_KHR
+static VKAPI_ATTR VkResult VKAPI_CALL StubImportSemaphoreFdKHR(VkDevice device, const VkImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo) { return VK_SUCCESS; };
+static VKAPI_ATTR VkResult VKAPI_CALL StubGetSemaphoreFdKHR(VkDevice device, const VkSemaphoreGetFdInfoKHR* pGetFdInfo, int* pFd) { return VK_SUCCESS; };
+static VKAPI_ATTR void VKAPI_CALL StubCmdPushDescriptorSetKHR(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount, const VkWriteDescriptorSet* pDescriptorWrites) {  };
+static VKAPI_ATTR void VKAPI_CALL StubCmdPushDescriptorSetWithTemplateKHR(VkCommandBuffer commandBuffer, VkDescriptorUpdateTemplate descriptorUpdateTemplate, VkPipelineLayout layout, uint32_t set, const void* pData) {  };
+static VKAPI_ATTR VkResult VKAPI_CALL StubCreateDescriptorUpdateTemplateKHR(VkDevice device, const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate) { return VK_SUCCESS; };
+static VKAPI_ATTR void VKAPI_CALL StubDestroyDescriptorUpdateTemplateKHR(VkDevice device, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const VkAllocationCallbacks* pAllocator) {  };
+static VKAPI_ATTR void VKAPI_CALL StubUpdateDescriptorSetWithTemplateKHR(VkDevice device, VkDescriptorSet descriptorSet, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData) {  };
+static VKAPI_ATTR VkResult VKAPI_CALL StubCreateRenderPass2KHR(VkDevice device, const VkRenderPassCreateInfo2KHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass) { return VK_SUCCESS; };
+static VKAPI_ATTR void VKAPI_CALL StubCmdBeginRenderPass2KHR(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo*      pRenderPassBegin, const VkSubpassBeginInfoKHR*      pSubpassBeginInfo) {  };
+static VKAPI_ATTR void VKAPI_CALL StubCmdNextSubpass2KHR(VkCommandBuffer commandBuffer, const VkSubpassBeginInfoKHR*      pSubpassBeginInfo, const VkSubpassEndInfoKHR*        pSubpassEndInfo) {  };
+static VKAPI_ATTR void VKAPI_CALL StubCmdEndRenderPass2KHR(VkCommandBuffer commandBuffer, const VkSubpassEndInfoKHR*        pSubpassEndInfo) {  };
+static VKAPI_ATTR VkResult VKAPI_CALL StubGetSwapchainStatusKHR(VkDevice device, VkSwapchainKHR swapchain) { return VK_SUCCESS; };
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+static VKAPI_ATTR VkResult VKAPI_CALL StubImportFenceWin32HandleKHR(VkDevice device, const VkImportFenceWin32HandleInfoKHR* pImportFenceWin32HandleInfo) { return VK_SUCCESS; };
+#endif // VK_USE_PLATFORM_WIN32_KHR
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+static VKAPI_ATTR VkResult VKAPI_CALL StubGetFenceWin32HandleKHR(VkDevice device, const VkFenceGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle) { return VK_SUCCESS; };
+#endif // VK_USE_PLATFORM_WIN32_KHR
+static VKAPI_ATTR VkResult VKAPI_CALL StubImportFenceFdKHR(VkDevice device, const VkImportFenceFdInfoKHR* pImportFenceFdInfo) { return VK_SUCCESS; };
+static VKAPI_ATTR VkResult VKAPI_CALL StubGetFenceFdKHR(VkDevice device, const VkFenceGetFdInfoKHR* pGetFdInfo, int* pFd) { return VK_SUCCESS; };
+static VKAPI_ATTR VkResult VKAPI_CALL StubAcquireProfilingLockKHR(VkDevice device, const VkAcquireProfilingLockInfoKHR* pInfo) { return VK_SUCCESS; };
+static VKAPI_ATTR void VKAPI_CALL StubReleaseProfilingLockKHR(VkDevice device) {  };
+static VKAPI_ATTR void VKAPI_CALL StubGetImageMemoryRequirements2KHR(VkDevice device, const VkImageMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements) {  };
+static VKAPI_ATTR void VKAPI_CALL StubGetBufferMemoryRequirements2KHR(VkDevice device, const VkBufferMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements) {  };
+static VKAPI_ATTR void VKAPI_CALL StubGetImageSparseMemoryRequirements2KHR(VkDevice device, const VkImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2* pSparseMemoryRequirements) {  };
+static VKAPI_ATTR VkResult VKAPI_CALL StubCreateSamplerYcbcrConversionKHR(VkDevice device, const VkSamplerYcbcrConversionCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSamplerYcbcrConversion* pYcbcrConversion) { return VK_SUCCESS; };
+static VKAPI_ATTR void VKAPI_CALL StubDestroySamplerYcbcrConversionKHR(VkDevice device, VkSamplerYcbcrConversion ycbcrConversion, const VkAllocationCallbacks* pAllocator) {  };
+static VKAPI_ATTR VkResult VKAPI_CALL StubBindBufferMemory2KHR(VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo* pBindInfos) { return VK_SUCCESS; };
+static VKAPI_ATTR VkResult VKAPI_CALL StubBindImageMemory2KHR(VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo* pBindInfos) { return VK_SUCCESS; };
+static VKAPI_ATTR void VKAPI_CALL StubGetDescriptorSetLayoutSupportKHR(VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, VkDescriptorSetLayoutSupport* pSupport) {  };
+static VKAPI_ATTR void VKAPI_CALL StubCmdDrawIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride) {  };
+static VKAPI_ATTR void VKAPI_CALL StubCmdDrawIndexedIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride) {  };
+static VKAPI_ATTR VkResult VKAPI_CALL StubGetSemaphoreCounterValueKHR(VkDevice device, VkSemaphore semaphore, uint64_t* pValue) { return VK_SUCCESS; };
+static VKAPI_ATTR VkResult VKAPI_CALL StubWaitSemaphoresKHR(VkDevice device, const VkSemaphoreWaitInfoKHR* pWaitInfo, uint64_t timeout) { return VK_SUCCESS; };
+static VKAPI_ATTR VkResult VKAPI_CALL StubSignalSemaphoreKHR(VkDevice device, const VkSemaphoreSignalInfoKHR* pSignalInfo) { return VK_SUCCESS; };
+static VKAPI_ATTR VkDeviceAddress VKAPI_CALL StubGetBufferDeviceAddressKHR(VkDevice device, const VkBufferDeviceAddressInfoKHR* pInfo) { return 0; };
+static VKAPI_ATTR uint64_t VKAPI_CALL StubGetBufferOpaqueCaptureAddressKHR(VkDevice device, const VkBufferDeviceAddressInfoKHR* pInfo) { return 0; };
+static VKAPI_ATTR uint64_t VKAPI_CALL StubGetDeviceMemoryOpaqueCaptureAddressKHR(VkDevice device, const VkDeviceMemoryOpaqueCaptureAddressInfoKHR* pInfo) { return 0; };
+static VKAPI_ATTR VkResult VKAPI_CALL StubGetPipelineExecutablePropertiesKHR(VkDevice                        device, const VkPipelineInfoKHR*        pPipelineInfo, uint32_t* pExecutableCount, VkPipelineExecutablePropertiesKHR* pProperties) { return VK_SUCCESS; };
+static VKAPI_ATTR VkResult VKAPI_CALL StubGetPipelineExecutableStatisticsKHR(VkDevice                        device, const VkPipelineExecutableInfoKHR*  pExecutableInfo, uint32_t* pStatisticCount, VkPipelineExecutableStatisticKHR* pStatistics) { return VK_SUCCESS; };
+static VKAPI_ATTR VkResult VKAPI_CALL StubGetPipelineExecutableInternalRepresentationsKHR(VkDevice                        device, const VkPipelineExecutableInfoKHR*  pExecutableInfo, uint32_t* pInternalRepresentationCount, VkPipelineExecutableInternalRepresentationKHR* pInternalRepresentations) { return VK_SUCCESS; };
+static VKAPI_ATTR VkResult VKAPI_CALL StubDebugMarkerSetObjectTagEXT(VkDevice device, const VkDebugMarkerObjectTagInfoEXT* pTagInfo) { return VK_SUCCESS; };
+static VKAPI_ATTR VkResult VKAPI_CALL StubDebugMarkerSetObjectNameEXT(VkDevice device, const VkDebugMarkerObjectNameInfoEXT* pNameInfo) { return VK_SUCCESS; };
+static VKAPI_ATTR void VKAPI_CALL StubCmdDebugMarkerBeginEXT(VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT* pMarkerInfo) {  };
+static VKAPI_ATTR void VKAPI_CALL StubCmdDebugMarkerEndEXT(VkCommandBuffer commandBuffer) {  };
+static VKAPI_ATTR void VKAPI_CALL StubCmdDebugMarkerInsertEXT(VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT* pMarkerInfo) {  };
+static VKAPI_ATTR void VKAPI_CALL StubCmdBindTransformFeedbackBuffersEXT(VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets, const VkDeviceSize* pSizes) {  };
+static VKAPI_ATTR void VKAPI_CALL StubCmdBeginTransformFeedbackEXT(VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VkBuffer* pCounterBuffers, const VkDeviceSize* pCounterBufferOffsets) {  };
+static VKAPI_ATTR void VKAPI_CALL StubCmdEndTransformFeedbackEXT(VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VkBuffer* pCounterBuffers, const VkDeviceSize* pCounterBufferOffsets) {  };
+static VKAPI_ATTR void VKAPI_CALL StubCmdBeginQueryIndexedEXT(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, VkQueryControlFlags flags, uint32_t index) {  };
+static VKAPI_ATTR void VKAPI_CALL StubCmdEndQueryIndexedEXT(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, uint32_t index) {  };
+static VKAPI_ATTR void VKAPI_CALL StubCmdDrawIndirectByteCountEXT(VkCommandBuffer commandBuffer, uint32_t instanceCount, uint32_t firstInstance, VkBuffer counterBuffer, VkDeviceSize counterBufferOffset, uint32_t counterOffset, uint32_t vertexStride) {  };
+static VKAPI_ATTR uint32_t VKAPI_CALL StubGetImageViewHandleNVX(VkDevice device, const VkImageViewHandleInfoNVX* pInfo) { return 0; };
+static VKAPI_ATTR void VKAPI_CALL StubCmdDrawIndirectCountAMD(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride) {  };
+static VKAPI_ATTR void VKAPI_CALL StubCmdDrawIndexedIndirectCountAMD(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride) {  };
+static VKAPI_ATTR VkResult VKAPI_CALL StubGetShaderInfoAMD(VkDevice device, VkPipeline pipeline, VkShaderStageFlagBits shaderStage, VkShaderInfoTypeAMD infoType, size_t* pInfoSize, void* pInfo) { return VK_SUCCESS; };
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+static VKAPI_ATTR VkResult VKAPI_CALL StubGetMemoryWin32HandleNV(VkDevice device, VkDeviceMemory memory, VkExternalMemoryHandleTypeFlagsNV handleType, HANDLE* pHandle) { return VK_SUCCESS; };
+#endif // VK_USE_PLATFORM_WIN32_KHR
+static VKAPI_ATTR void VKAPI_CALL StubCmdBeginConditionalRenderingEXT(VkCommandBuffer commandBuffer, const VkConditionalRenderingBeginInfoEXT* pConditionalRenderingBegin) {  };
+static VKAPI_ATTR void VKAPI_CALL StubCmdEndConditionalRenderingEXT(VkCommandBuffer commandBuffer) {  };
+static VKAPI_ATTR void VKAPI_CALL StubCmdProcessCommandsNVX(VkCommandBuffer commandBuffer, const VkCmdProcessCommandsInfoNVX* pProcessCommandsInfo) {  };
+static VKAPI_ATTR void VKAPI_CALL StubCmdReserveSpaceForCommandsNVX(VkCommandBuffer commandBuffer, const VkCmdReserveSpaceForCommandsInfoNVX* pReserveSpaceInfo) {  };
+static VKAPI_ATTR VkResult VKAPI_CALL StubCreateIndirectCommandsLayoutNVX(VkDevice device, const VkIndirectCommandsLayoutCreateInfoNVX* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkIndirectCommandsLayoutNVX* pIndirectCommandsLayout) { return VK_SUCCESS; };
+static VKAPI_ATTR void VKAPI_CALL StubDestroyIndirectCommandsLayoutNVX(VkDevice device, VkIndirectCommandsLayoutNVX indirectCommandsLayout, const VkAllocationCallbacks* pAllocator) {  };
+static VKAPI_ATTR VkResult VKAPI_CALL StubCreateObjectTableNVX(VkDevice device, const VkObjectTableCreateInfoNVX* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkObjectTableNVX* pObjectTable) { return VK_SUCCESS; };
+static VKAPI_ATTR void VKAPI_CALL StubDestroyObjectTableNVX(VkDevice device, VkObjectTableNVX objectTable, const VkAllocationCallbacks* pAllocator) {  };
+static VKAPI_ATTR VkResult VKAPI_CALL StubRegisterObjectsNVX(VkDevice device, VkObjectTableNVX objectTable, uint32_t objectCount, const VkObjectTableEntryNVX* const*    ppObjectTableEntries, const uint32_t* pObjectIndices) { return VK_SUCCESS; };
+static VKAPI_ATTR VkResult VKAPI_CALL StubUnregisterObjectsNVX(VkDevice device, VkObjectTableNVX objectTable, uint32_t objectCount, const VkObjectEntryTypeNVX* pObjectEntryTypes, const uint32_t* pObjectIndices) { return VK_SUCCESS; };
+static VKAPI_ATTR void VKAPI_CALL StubCmdSetViewportWScalingNV(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewportWScalingNV* pViewportWScalings) {  };
+static VKAPI_ATTR VkResult VKAPI_CALL StubDisplayPowerControlEXT(VkDevice device, VkDisplayKHR display, const VkDisplayPowerInfoEXT* pDisplayPowerInfo) { return VK_SUCCESS; };
+static VKAPI_ATTR VkResult VKAPI_CALL StubRegisterDeviceEventEXT(VkDevice device, const VkDeviceEventInfoEXT* pDeviceEventInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence) { return VK_SUCCESS; };
+static VKAPI_ATTR VkResult VKAPI_CALL StubRegisterDisplayEventEXT(VkDevice device, VkDisplayKHR display, const VkDisplayEventInfoEXT* pDisplayEventInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence) { return VK_SUCCESS; };
+static VKAPI_ATTR VkResult VKAPI_CALL StubGetSwapchainCounterEXT(VkDevice device, VkSwapchainKHR swapchain, VkSurfaceCounterFlagBitsEXT counter, uint64_t* pCounterValue) { return VK_SUCCESS; };
+static VKAPI_ATTR VkResult VKAPI_CALL StubGetRefreshCycleDurationGOOGLE(VkDevice device, VkSwapchainKHR swapchain, VkRefreshCycleDurationGOOGLE* pDisplayTimingProperties) { return VK_SUCCESS; };
+static VKAPI_ATTR VkResult VKAPI_CALL StubGetPastPresentationTimingGOOGLE(VkDevice device, VkSwapchainKHR swapchain, uint32_t* pPresentationTimingCount, VkPastPresentationTimingGOOGLE* pPresentationTimings) { return VK_SUCCESS; };
+static VKAPI_ATTR void VKAPI_CALL StubCmdSetDiscardRectangleEXT(VkCommandBuffer commandBuffer, uint32_t firstDiscardRectangle, uint32_t discardRectangleCount, const VkRect2D* pDiscardRectangles) {  };
+static VKAPI_ATTR void VKAPI_CALL StubSetHdrMetadataEXT(VkDevice device, uint32_t swapchainCount, const VkSwapchainKHR* pSwapchains, const VkHdrMetadataEXT* pMetadata) {  };
+static VKAPI_ATTR VkResult VKAPI_CALL StubSetDebugUtilsObjectNameEXT(VkDevice device, const VkDebugUtilsObjectNameInfoEXT* pNameInfo) { return VK_SUCCESS; };
+static VKAPI_ATTR VkResult VKAPI_CALL StubSetDebugUtilsObjectTagEXT(VkDevice device, const VkDebugUtilsObjectTagInfoEXT* pTagInfo) { return VK_SUCCESS; };
+static VKAPI_ATTR void VKAPI_CALL StubQueueBeginDebugUtilsLabelEXT(VkQueue queue, const VkDebugUtilsLabelEXT* pLabelInfo) {  };
+static VKAPI_ATTR void VKAPI_CALL StubQueueEndDebugUtilsLabelEXT(VkQueue queue) {  };
+static VKAPI_ATTR void VKAPI_CALL StubQueueInsertDebugUtilsLabelEXT(VkQueue queue, const VkDebugUtilsLabelEXT* pLabelInfo) {  };
+static VKAPI_ATTR void VKAPI_CALL StubCmdBeginDebugUtilsLabelEXT(VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT* pLabelInfo) {  };
+static VKAPI_ATTR void VKAPI_CALL StubCmdEndDebugUtilsLabelEXT(VkCommandBuffer commandBuffer) {  };
+static VKAPI_ATTR void VKAPI_CALL StubCmdInsertDebugUtilsLabelEXT(VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT* pLabelInfo) {  };
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+static VKAPI_ATTR VkResult VKAPI_CALL StubGetAndroidHardwareBufferPropertiesANDROID(VkDevice device, const struct AHardwareBuffer* buffer, VkAndroidHardwareBufferPropertiesANDROID* pProperties) { return VK_SUCCESS; };
+#endif // VK_USE_PLATFORM_ANDROID_KHR
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+static VKAPI_ATTR VkResult VKAPI_CALL StubGetMemoryAndroidHardwareBufferANDROID(VkDevice device, const VkMemoryGetAndroidHardwareBufferInfoANDROID* pInfo, struct AHardwareBuffer** pBuffer) { return VK_SUCCESS; };
+#endif // VK_USE_PLATFORM_ANDROID_KHR
+static VKAPI_ATTR void VKAPI_CALL StubCmdSetSampleLocationsEXT(VkCommandBuffer commandBuffer, const VkSampleLocationsInfoEXT* pSampleLocationsInfo) {  };
+static VKAPI_ATTR VkResult VKAPI_CALL StubGetImageDrmFormatModifierPropertiesEXT(VkDevice device, VkImage image, VkImageDrmFormatModifierPropertiesEXT* pProperties) { return VK_SUCCESS; };
+static VKAPI_ATTR VkResult VKAPI_CALL StubCreateValidationCacheEXT(VkDevice device, const VkValidationCacheCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkValidationCacheEXT* pValidationCache) { return VK_SUCCESS; };
+static VKAPI_ATTR void VKAPI_CALL StubDestroyValidationCacheEXT(VkDevice device, VkValidationCacheEXT validationCache, const VkAllocationCallbacks* pAllocator) {  };
+static VKAPI_ATTR VkResult VKAPI_CALL StubMergeValidationCachesEXT(VkDevice device, VkValidationCacheEXT dstCache, uint32_t srcCacheCount, const VkValidationCacheEXT* pSrcCaches) { return VK_SUCCESS; };
+static VKAPI_ATTR VkResult VKAPI_CALL StubGetValidationCacheDataEXT(VkDevice device, VkValidationCacheEXT validationCache, size_t* pDataSize, void* pData) { return VK_SUCCESS; };
+static VKAPI_ATTR void VKAPI_CALL StubCmdBindShadingRateImageNV(VkCommandBuffer commandBuffer, VkImageView imageView, VkImageLayout imageLayout) {  };
+static VKAPI_ATTR void VKAPI_CALL StubCmdSetViewportShadingRatePaletteNV(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkShadingRatePaletteNV* pShadingRatePalettes) {  };
+static VKAPI_ATTR void VKAPI_CALL StubCmdSetCoarseSampleOrderNV(VkCommandBuffer commandBuffer, VkCoarseSampleOrderTypeNV sampleOrderType, uint32_t customSampleOrderCount, const VkCoarseSampleOrderCustomNV* pCustomSampleOrders) {  };
+static VKAPI_ATTR VkResult VKAPI_CALL StubCreateAccelerationStructureNV(VkDevice device, const VkAccelerationStructureCreateInfoNV* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkAccelerationStructureNV* pAccelerationStructure) { return VK_SUCCESS; };
+static VKAPI_ATTR void VKAPI_CALL StubDestroyAccelerationStructureNV(VkDevice device, VkAccelerationStructureNV accelerationStructure, const VkAllocationCallbacks* pAllocator) {  };
+static VKAPI_ATTR void VKAPI_CALL StubGetAccelerationStructureMemoryRequirementsNV(VkDevice device, const VkAccelerationStructureMemoryRequirementsInfoNV* pInfo, VkMemoryRequirements2KHR* pMemoryRequirements) {  };
+static VKAPI_ATTR VkResult VKAPI_CALL StubBindAccelerationStructureMemoryNV(VkDevice device, uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoNV* pBindInfos) { return VK_SUCCESS; };
+static VKAPI_ATTR void VKAPI_CALL StubCmdBuildAccelerationStructureNV(VkCommandBuffer commandBuffer, const VkAccelerationStructureInfoNV* pInfo, VkBuffer instanceData, VkDeviceSize instanceOffset, VkBool32 update, VkAccelerationStructureNV dst, VkAccelerationStructureNV src, VkBuffer scratch, VkDeviceSize scratchOffset) {  };
+static VKAPI_ATTR void VKAPI_CALL StubCmdCopyAccelerationStructureNV(VkCommandBuffer commandBuffer, VkAccelerationStructureNV dst, VkAccelerationStructureNV src, VkCopyAccelerationStructureModeNV mode) {  };
+static VKAPI_ATTR void VKAPI_CALL StubCmdTraceRaysNV(VkCommandBuffer commandBuffer, VkBuffer raygenShaderBindingTableBuffer, VkDeviceSize raygenShaderBindingOffset, VkBuffer missShaderBindingTableBuffer, VkDeviceSize missShaderBindingOffset, VkDeviceSize missShaderBindingStride, VkBuffer hitShaderBindingTableBuffer, VkDeviceSize hitShaderBindingOffset, VkDeviceSize hitShaderBindingStride, VkBuffer callableShaderBindingTableBuffer, VkDeviceSize callableShaderBindingOffset, VkDeviceSize callableShaderBindingStride, uint32_t width, uint32_t height, uint32_t depth) {  };
+static VKAPI_ATTR VkResult VKAPI_CALL StubCreateRayTracingPipelinesNV(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkRayTracingPipelineCreateInfoNV* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines) { return VK_SUCCESS; };
+static VKAPI_ATTR VkResult VKAPI_CALL StubGetRayTracingShaderGroupHandlesNV(VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData) { return VK_SUCCESS; };
+static VKAPI_ATTR VkResult VKAPI_CALL StubGetAccelerationStructureHandleNV(VkDevice device, VkAccelerationStructureNV accelerationStructure, size_t dataSize, void* pData) { return VK_SUCCESS; };
+static VKAPI_ATTR void VKAPI_CALL StubCmdWriteAccelerationStructuresPropertiesNV(VkCommandBuffer commandBuffer, uint32_t accelerationStructureCount, const VkAccelerationStructureNV* pAccelerationStructures, VkQueryType queryType, VkQueryPool queryPool, uint32_t firstQuery) {  };
+static VKAPI_ATTR VkResult VKAPI_CALL StubCompileDeferredNV(VkDevice device, VkPipeline pipeline, uint32_t shader) { return VK_SUCCESS; };
+static VKAPI_ATTR VkResult VKAPI_CALL StubGetMemoryHostPointerPropertiesEXT(VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, const void* pHostPointer, VkMemoryHostPointerPropertiesEXT* pMemoryHostPointerProperties) { return VK_SUCCESS; };
+static VKAPI_ATTR void VKAPI_CALL StubCmdWriteBufferMarkerAMD(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage, VkBuffer dstBuffer, VkDeviceSize dstOffset, uint32_t marker) {  };
+static VKAPI_ATTR VkResult VKAPI_CALL StubGetCalibratedTimestampsEXT(VkDevice device, uint32_t timestampCount, const VkCalibratedTimestampInfoEXT* pTimestampInfos, uint64_t* pTimestamps, uint64_t* pMaxDeviation) { return VK_SUCCESS; };
+static VKAPI_ATTR void VKAPI_CALL StubCmdDrawMeshTasksNV(VkCommandBuffer commandBuffer, uint32_t taskCount, uint32_t firstTask) {  };
+static VKAPI_ATTR void VKAPI_CALL StubCmdDrawMeshTasksIndirectNV(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride) {  };
+static VKAPI_ATTR void VKAPI_CALL StubCmdDrawMeshTasksIndirectCountNV(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride) {  };
+static VKAPI_ATTR void VKAPI_CALL StubCmdSetExclusiveScissorNV(VkCommandBuffer commandBuffer, uint32_t firstExclusiveScissor, uint32_t exclusiveScissorCount, const VkRect2D* pExclusiveScissors) {  };
+static VKAPI_ATTR void VKAPI_CALL StubCmdSetCheckpointNV(VkCommandBuffer commandBuffer, const void* pCheckpointMarker) {  };
+static VKAPI_ATTR void VKAPI_CALL StubGetQueueCheckpointDataNV(VkQueue queue, uint32_t* pCheckpointDataCount, VkCheckpointDataNV* pCheckpointData) {  };
+static VKAPI_ATTR VkResult VKAPI_CALL StubInitializePerformanceApiINTEL(VkDevice device, const VkInitializePerformanceApiInfoINTEL* pInitializeInfo) { return VK_SUCCESS; };
+static VKAPI_ATTR void VKAPI_CALL StubUninitializePerformanceApiINTEL(VkDevice device) {  };
+static VKAPI_ATTR VkResult VKAPI_CALL StubCmdSetPerformanceMarkerINTEL(VkCommandBuffer commandBuffer, const VkPerformanceMarkerInfoINTEL* pMarkerInfo) { return VK_SUCCESS; };
+static VKAPI_ATTR VkResult VKAPI_CALL StubCmdSetPerformanceStreamMarkerINTEL(VkCommandBuffer commandBuffer, const VkPerformanceStreamMarkerInfoINTEL* pMarkerInfo) { return VK_SUCCESS; };
+static VKAPI_ATTR VkResult VKAPI_CALL StubCmdSetPerformanceOverrideINTEL(VkCommandBuffer commandBuffer, const VkPerformanceOverrideInfoINTEL* pOverrideInfo) { return VK_SUCCESS; };
+static VKAPI_ATTR VkResult VKAPI_CALL StubAcquirePerformanceConfigurationINTEL(VkDevice device, const VkPerformanceConfigurationAcquireInfoINTEL* pAcquireInfo, VkPerformanceConfigurationINTEL* pConfiguration) { return VK_SUCCESS; };
+static VKAPI_ATTR VkResult VKAPI_CALL StubReleasePerformanceConfigurationINTEL(VkDevice device, VkPerformanceConfigurationINTEL configuration) { return VK_SUCCESS; };
+static VKAPI_ATTR VkResult VKAPI_CALL StubQueueSetPerformanceConfigurationINTEL(VkQueue queue, VkPerformanceConfigurationINTEL configuration) { return VK_SUCCESS; };
+static VKAPI_ATTR VkResult VKAPI_CALL StubGetPerformanceParameterINTEL(VkDevice device, VkPerformanceParameterTypeINTEL parameter, VkPerformanceValueINTEL* pValue) { return VK_SUCCESS; };
+static VKAPI_ATTR void VKAPI_CALL StubSetLocalDimmingAMD(VkDevice device, VkSwapchainKHR swapChain, VkBool32 localDimmingEnable) {  };
+static VKAPI_ATTR VkDeviceAddress VKAPI_CALL StubGetBufferDeviceAddressEXT(VkDevice device, const VkBufferDeviceAddressInfoKHR* pInfo) { return 0; };
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+static VKAPI_ATTR VkResult VKAPI_CALL StubAcquireFullScreenExclusiveModeEXT(VkDevice device, VkSwapchainKHR swapchain) { return VK_SUCCESS; };
+#endif // VK_USE_PLATFORM_WIN32_KHR
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+static VKAPI_ATTR VkResult VKAPI_CALL StubReleaseFullScreenExclusiveModeEXT(VkDevice device, VkSwapchainKHR swapchain) { return VK_SUCCESS; };
+#endif // VK_USE_PLATFORM_WIN32_KHR
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+static VKAPI_ATTR VkResult VKAPI_CALL StubGetDeviceGroupSurfacePresentModes2EXT(VkDevice device, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, VkDeviceGroupPresentModeFlagsKHR* pModes) { return VK_SUCCESS; };
+#endif // VK_USE_PLATFORM_WIN32_KHR
+static VKAPI_ATTR void VKAPI_CALL StubCmdSetLineStippleEXT(VkCommandBuffer commandBuffer, uint32_t lineStippleFactor, uint16_t lineStipplePattern) {  };
+static VKAPI_ATTR void VKAPI_CALL StubResetQueryPoolEXT(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount) {  };
+
+
+
+const std::unordered_map<std::string, std::string> api_extension_map {
+    {"vkBindBufferMemory2", "VK_VERSION_1_1"},
+    {"vkBindImageMemory2", "VK_VERSION_1_1"},
+    {"vkGetDeviceGroupPeerMemoryFeatures", "VK_VERSION_1_1"},
+    {"vkCmdSetDeviceMask", "VK_VERSION_1_1"},
+    {"vkCmdDispatchBase", "VK_VERSION_1_1"},
+    {"vkGetImageMemoryRequirements2", "VK_VERSION_1_1"},
+    {"vkGetBufferMemoryRequirements2", "VK_VERSION_1_1"},
+    {"vkGetImageSparseMemoryRequirements2", "VK_VERSION_1_1"},
+    {"vkTrimCommandPool", "VK_VERSION_1_1"},
+    {"vkGetDeviceQueue2", "VK_VERSION_1_1"},
+    {"vkCreateSamplerYcbcrConversion", "VK_VERSION_1_1"},
+    {"vkDestroySamplerYcbcrConversion", "VK_VERSION_1_1"},
+    {"vkCreateDescriptorUpdateTemplate", "VK_VERSION_1_1"},
+    {"vkDestroyDescriptorUpdateTemplate", "VK_VERSION_1_1"},
+    {"vkUpdateDescriptorSetWithTemplate", "VK_VERSION_1_1"},
+    {"vkGetDescriptorSetLayoutSupport", "VK_VERSION_1_1"},
+    {"vkCreateSwapchainKHR", "VK_KHR_swapchain"},
+    {"vkDestroySwapchainKHR", "VK_KHR_swapchain"},
+    {"vkGetSwapchainImagesKHR", "VK_KHR_swapchain"},
+    {"vkAcquireNextImageKHR", "VK_KHR_swapchain"},
+    {"vkQueuePresentKHR", "VK_KHR_swapchain"},
+    {"vkGetDeviceGroupPresentCapabilitiesKHR", "VK_KHR_swapchain"},
+    {"vkGetDeviceGroupSurfacePresentModesKHR", "VK_KHR_swapchain"},
+    {"vkAcquireNextImage2KHR", "VK_KHR_swapchain"},
+    {"vkCreateSharedSwapchainsKHR", "VK_KHR_display_swapchain"},
+    {"vkGetDeviceGroupPeerMemoryFeaturesKHR", "VK_KHR_device_group"},
+    {"vkCmdSetDeviceMaskKHR", "VK_KHR_device_group"},
+    {"vkCmdDispatchBaseKHR", "VK_KHR_device_group"},
+    {"vkTrimCommandPoolKHR", "VK_KHR_maintenance1"},
+    {"vkGetMemoryWin32HandleKHR", "VK_KHR_external_memory_win32"},
+    {"vkGetMemoryWin32HandlePropertiesKHR", "VK_KHR_external_memory_win32"},
+    {"vkGetMemoryFdKHR", "VK_KHR_external_memory_fd"},
+    {"vkGetMemoryFdPropertiesKHR", "VK_KHR_external_memory_fd"},
+    {"vkImportSemaphoreWin32HandleKHR", "VK_KHR_external_semaphore_win32"},
+    {"vkGetSemaphoreWin32HandleKHR", "VK_KHR_external_semaphore_win32"},
+    {"vkImportSemaphoreFdKHR", "VK_KHR_external_semaphore_fd"},
+    {"vkGetSemaphoreFdKHR", "VK_KHR_external_semaphore_fd"},
+    {"vkCmdPushDescriptorSetKHR", "VK_KHR_push_descriptor"},
+    {"vkCmdPushDescriptorSetWithTemplateKHR", "VK_KHR_push_descriptor"},
+    {"vkCreateDescriptorUpdateTemplateKHR", "VK_KHR_descriptor_update_template"},
+    {"vkDestroyDescriptorUpdateTemplateKHR", "VK_KHR_descriptor_update_template"},
+    {"vkUpdateDescriptorSetWithTemplateKHR", "VK_KHR_descriptor_update_template"},
+    {"vkCreateRenderPass2KHR", "VK_KHR_create_renderpass2"},
+    {"vkCmdBeginRenderPass2KHR", "VK_KHR_create_renderpass2"},
+    {"vkCmdNextSubpass2KHR", "VK_KHR_create_renderpass2"},
+    {"vkCmdEndRenderPass2KHR", "VK_KHR_create_renderpass2"},
+    {"vkGetSwapchainStatusKHR", "VK_KHR_shared_presentable_image"},
+    {"vkImportFenceWin32HandleKHR", "VK_KHR_external_fence_win32"},
+    {"vkGetFenceWin32HandleKHR", "VK_KHR_external_fence_win32"},
+    {"vkImportFenceFdKHR", "VK_KHR_external_fence_fd"},
+    {"vkGetFenceFdKHR", "VK_KHR_external_fence_fd"},
+    {"vkAcquireProfilingLockKHR", "VK_KHR_performance_query"},
+    {"vkReleaseProfilingLockKHR", "VK_KHR_performance_query"},
+    {"vkGetImageMemoryRequirements2KHR", "VK_KHR_get_memory_requirements2"},
+    {"vkGetBufferMemoryRequirements2KHR", "VK_KHR_get_memory_requirements2"},
+    {"vkGetImageSparseMemoryRequirements2KHR", "VK_KHR_get_memory_requirements2"},
+    {"vkCreateSamplerYcbcrConversionKHR", "VK_KHR_sampler_ycbcr_conversion"},
+    {"vkDestroySamplerYcbcrConversionKHR", "VK_KHR_sampler_ycbcr_conversion"},
+    {"vkBindBufferMemory2KHR", "VK_KHR_bind_memory2"},
+    {"vkBindImageMemory2KHR", "VK_KHR_bind_memory2"},
+    {"vkGetDescriptorSetLayoutSupportKHR", "VK_KHR_maintenance3"},
+    {"vkCmdDrawIndirectCountKHR", "VK_KHR_draw_indirect_count"},
+    {"vkCmdDrawIndexedIndirectCountKHR", "VK_KHR_draw_indirect_count"},
+    {"vkGetSemaphoreCounterValueKHR", "VK_KHR_timeline_semaphore"},
+    {"vkWaitSemaphoresKHR", "VK_KHR_timeline_semaphore"},
+    {"vkSignalSemaphoreKHR", "VK_KHR_timeline_semaphore"},
+    {"vkGetBufferDeviceAddressKHR", "VK_KHR_buffer_device_address"},
+    {"vkGetBufferOpaqueCaptureAddressKHR", "VK_KHR_buffer_device_address"},
+    {"vkGetDeviceMemoryOpaqueCaptureAddressKHR", "VK_KHR_buffer_device_address"},
+    {"vkGetPipelineExecutablePropertiesKHR", "VK_KHR_pipeline_executable_properties"},
+    {"vkGetPipelineExecutableStatisticsKHR", "VK_KHR_pipeline_executable_properties"},
+    {"vkGetPipelineExecutableInternalRepresentationsKHR", "VK_KHR_pipeline_executable_properties"},
+    {"vkDebugMarkerSetObjectTagEXT", "VK_EXT_debug_marker"},
+    {"vkDebugMarkerSetObjectNameEXT", "VK_EXT_debug_marker"},
+    {"vkCmdDebugMarkerBeginEXT", "VK_EXT_debug_marker"},
+    {"vkCmdDebugMarkerEndEXT", "VK_EXT_debug_marker"},
+    {"vkCmdDebugMarkerInsertEXT", "VK_EXT_debug_marker"},
+    {"vkCmdBindTransformFeedbackBuffersEXT", "VK_EXT_transform_feedback"},
+    {"vkCmdBeginTransformFeedbackEXT", "VK_EXT_transform_feedback"},
+    {"vkCmdEndTransformFeedbackEXT", "VK_EXT_transform_feedback"},
+    {"vkCmdBeginQueryIndexedEXT", "VK_EXT_transform_feedback"},
+    {"vkCmdEndQueryIndexedEXT", "VK_EXT_transform_feedback"},
+    {"vkCmdDrawIndirectByteCountEXT", "VK_EXT_transform_feedback"},
+    {"vkGetImageViewHandleNVX", "VK_NVX_image_view_handle"},
+    {"vkCmdDrawIndirectCountAMD", "VK_AMD_draw_indirect_count"},
+    {"vkCmdDrawIndexedIndirectCountAMD", "VK_AMD_draw_indirect_count"},
+    {"vkGetShaderInfoAMD", "VK_AMD_shader_info"},
+    {"vkGetMemoryWin32HandleNV", "VK_NV_external_memory_win32"},
+    {"vkCmdBeginConditionalRenderingEXT", "VK_EXT_conditional_rendering"},
+    {"vkCmdEndConditionalRenderingEXT", "VK_EXT_conditional_rendering"},
+    {"vkCmdProcessCommandsNVX", "VK_NVX_device_generated_commands"},
+    {"vkCmdReserveSpaceForCommandsNVX", "VK_NVX_device_generated_commands"},
+    {"vkCreateIndirectCommandsLayoutNVX", "VK_NVX_device_generated_commands"},
+    {"vkDestroyIndirectCommandsLayoutNVX", "VK_NVX_device_generated_commands"},
+    {"vkCreateObjectTableNVX", "VK_NVX_device_generated_commands"},
+    {"vkDestroyObjectTableNVX", "VK_NVX_device_generated_commands"},
+    {"vkRegisterObjectsNVX", "VK_NVX_device_generated_commands"},
+    {"vkUnregisterObjectsNVX", "VK_NVX_device_generated_commands"},
+    {"vkCmdSetViewportWScalingNV", "VK_NV_clip_space_w_scaling"},
+    {"vkDisplayPowerControlEXT", "VK_EXT_display_control"},
+    {"vkRegisterDeviceEventEXT", "VK_EXT_display_control"},
+    {"vkRegisterDisplayEventEXT", "VK_EXT_display_control"},
+    {"vkGetSwapchainCounterEXT", "VK_EXT_display_control"},
+    {"vkGetRefreshCycleDurationGOOGLE", "VK_GOOGLE_display_timing"},
+    {"vkGetPastPresentationTimingGOOGLE", "VK_GOOGLE_display_timing"},
+    {"vkCmdSetDiscardRectangleEXT", "VK_EXT_discard_rectangles"},
+    {"vkSetHdrMetadataEXT", "VK_EXT_hdr_metadata"},
+    {"vkSetDebugUtilsObjectNameEXT", "VK_EXT_debug_utils"},
+    {"vkSetDebugUtilsObjectTagEXT", "VK_EXT_debug_utils"},
+    {"vkQueueBeginDebugUtilsLabelEXT", "VK_EXT_debug_utils"},
+    {"vkQueueEndDebugUtilsLabelEXT", "VK_EXT_debug_utils"},
+    {"vkQueueInsertDebugUtilsLabelEXT", "VK_EXT_debug_utils"},
+    {"vkCmdBeginDebugUtilsLabelEXT", "VK_EXT_debug_utils"},
+    {"vkCmdEndDebugUtilsLabelEXT", "VK_EXT_debug_utils"},
+    {"vkCmdInsertDebugUtilsLabelEXT", "VK_EXT_debug_utils"},
+    {"vkGetAndroidHardwareBufferPropertiesANDROID", "VK_ANDROID_external_memory_android_hardware_buffer"},
+    {"vkGetMemoryAndroidHardwareBufferANDROID", "VK_ANDROID_external_memory_android_hardware_buffer"},
+    {"vkCmdSetSampleLocationsEXT", "VK_EXT_sample_locations"},
+    {"vkGetImageDrmFormatModifierPropertiesEXT", "VK_EXT_image_drm_format_modifier"},
+    {"vkCreateValidationCacheEXT", "VK_EXT_validation_cache"},
+    {"vkDestroyValidationCacheEXT", "VK_EXT_validation_cache"},
+    {"vkMergeValidationCachesEXT", "VK_EXT_validation_cache"},
+    {"vkGetValidationCacheDataEXT", "VK_EXT_validation_cache"},
+    {"vkCmdBindShadingRateImageNV", "VK_NV_shading_rate_image"},
+    {"vkCmdSetViewportShadingRatePaletteNV", "VK_NV_shading_rate_image"},
+    {"vkCmdSetCoarseSampleOrderNV", "VK_NV_shading_rate_image"},
+    {"vkCreateAccelerationStructureNV", "VK_NV_ray_tracing"},
+    {"vkDestroyAccelerationStructureNV", "VK_NV_ray_tracing"},
+    {"vkGetAccelerationStructureMemoryRequirementsNV", "VK_NV_ray_tracing"},
+    {"vkBindAccelerationStructureMemoryNV", "VK_NV_ray_tracing"},
+    {"vkCmdBuildAccelerationStructureNV", "VK_NV_ray_tracing"},
+    {"vkCmdCopyAccelerationStructureNV", "VK_NV_ray_tracing"},
+    {"vkCmdTraceRaysNV", "VK_NV_ray_tracing"},
+    {"vkCreateRayTracingPipelinesNV", "VK_NV_ray_tracing"},
+    {"vkGetRayTracingShaderGroupHandlesNV", "VK_NV_ray_tracing"},
+    {"vkGetAccelerationStructureHandleNV", "VK_NV_ray_tracing"},
+    {"vkCmdWriteAccelerationStructuresPropertiesNV", "VK_NV_ray_tracing"},
+    {"vkCompileDeferredNV", "VK_NV_ray_tracing"},
+    {"vkGetMemoryHostPointerPropertiesEXT", "VK_EXT_external_memory_host"},
+    {"vkCmdWriteBufferMarkerAMD", "VK_AMD_buffer_marker"},
+    {"vkGetCalibratedTimestampsEXT", "VK_EXT_calibrated_timestamps"},
+    {"vkCmdDrawMeshTasksNV", "VK_NV_mesh_shader"},
+    {"vkCmdDrawMeshTasksIndirectNV", "VK_NV_mesh_shader"},
+    {"vkCmdDrawMeshTasksIndirectCountNV", "VK_NV_mesh_shader"},
+    {"vkCmdSetExclusiveScissorNV", "VK_NV_scissor_exclusive"},
+    {"vkCmdSetCheckpointNV", "VK_NV_device_diagnostic_checkpoints"},
+    {"vkGetQueueCheckpointDataNV", "VK_NV_device_diagnostic_checkpoints"},
+    {"vkInitializePerformanceApiINTEL", "VK_INTEL_performance_query"},
+    {"vkUninitializePerformanceApiINTEL", "VK_INTEL_performance_query"},
+    {"vkCmdSetPerformanceMarkerINTEL", "VK_INTEL_performance_query"},
+    {"vkCmdSetPerformanceStreamMarkerINTEL", "VK_INTEL_performance_query"},
+    {"vkCmdSetPerformanceOverrideINTEL", "VK_INTEL_performance_query"},
+    {"vkAcquirePerformanceConfigurationINTEL", "VK_INTEL_performance_query"},
+    {"vkReleasePerformanceConfigurationINTEL", "VK_INTEL_performance_query"},
+    {"vkQueueSetPerformanceConfigurationINTEL", "VK_INTEL_performance_query"},
+    {"vkGetPerformanceParameterINTEL", "VK_INTEL_performance_query"},
+    {"vkSetLocalDimmingAMD", "VK_AMD_display_native_hdr"},
+    {"vkGetBufferDeviceAddressEXT", "VK_EXT_buffer_device_address"},
+    {"vkAcquireFullScreenExclusiveModeEXT", "VK_EXT_full_screen_exclusive"},
+    {"vkReleaseFullScreenExclusiveModeEXT", "VK_EXT_full_screen_exclusive"},
+    {"vkGetDeviceGroupSurfacePresentModes2EXT", "VK_EXT_full_screen_exclusive"},
+    {"vkCmdSetLineStippleEXT", "VK_EXT_line_rasterization"},
+    {"vkResetQueryPoolEXT", "VK_EXT_host_query_reset"},
+};
+
+// Using the above code-generated map of APINames-to-parent extension names, this function will:
+//   o  Determine if the API has an associated extension
+//   o  If it does, determine if that extension name is present in the passed-in set of enabled_ext_names 
+//   If the APIname has no parent extension, OR its parent extension name is IN the set, return TRUE, else FALSE
+static inline bool ApiParentExtensionEnabled(const std::string api_name, const DeviceExtensions *device_extension_info) {
+    auto has_ext = api_extension_map.find(api_name);
+    // Is this API part of an extension or feature group?
+    if (has_ext != api_extension_map.end()) {
+        // Was the extension for this API enabled in the CreateDevice call?
+        auto info = device_extension_info->get_info(has_ext->second.c_str());
+        if ((!info.state) || (device_extension_info->*(info.state) != kEnabledByCreateinfo)) {
+            return false;
+        }
+    }
+    return true;
+}
+
+
+
+static inline void layer_init_device_dispatch_table(VkDevice device, VkLayerDispatchTable *table, PFN_vkGetDeviceProcAddr gpa) {
+    memset(table, 0, sizeof(*table));
+    // Device function pointers
+    table->GetDeviceProcAddr = gpa;
+    table->DestroyDevice = (PFN_vkDestroyDevice) gpa(device, "vkDestroyDevice");
+    table->GetDeviceQueue = (PFN_vkGetDeviceQueue) gpa(device, "vkGetDeviceQueue");
+    table->QueueSubmit = (PFN_vkQueueSubmit) gpa(device, "vkQueueSubmit");
+    table->QueueWaitIdle = (PFN_vkQueueWaitIdle) gpa(device, "vkQueueWaitIdle");
+    table->DeviceWaitIdle = (PFN_vkDeviceWaitIdle) gpa(device, "vkDeviceWaitIdle");
+    table->AllocateMemory = (PFN_vkAllocateMemory) gpa(device, "vkAllocateMemory");
+    table->FreeMemory = (PFN_vkFreeMemory) gpa(device, "vkFreeMemory");
+    table->MapMemory = (PFN_vkMapMemory) gpa(device, "vkMapMemory");
+    table->UnmapMemory = (PFN_vkUnmapMemory) gpa(device, "vkUnmapMemory");
+    table->FlushMappedMemoryRanges = (PFN_vkFlushMappedMemoryRanges) gpa(device, "vkFlushMappedMemoryRanges");
+    table->InvalidateMappedMemoryRanges = (PFN_vkInvalidateMappedMemoryRanges) gpa(device, "vkInvalidateMappedMemoryRanges");
+    table->GetDeviceMemoryCommitment = (PFN_vkGetDeviceMemoryCommitment) gpa(device, "vkGetDeviceMemoryCommitment");
+    table->BindBufferMemory = (PFN_vkBindBufferMemory) gpa(device, "vkBindBufferMemory");
+    table->BindImageMemory = (PFN_vkBindImageMemory) gpa(device, "vkBindImageMemory");
+    table->GetBufferMemoryRequirements = (PFN_vkGetBufferMemoryRequirements) gpa(device, "vkGetBufferMemoryRequirements");
+    table->GetImageMemoryRequirements = (PFN_vkGetImageMemoryRequirements) gpa(device, "vkGetImageMemoryRequirements");
+    table->GetImageSparseMemoryRequirements = (PFN_vkGetImageSparseMemoryRequirements) gpa(device, "vkGetImageSparseMemoryRequirements");
+    table->QueueBindSparse = (PFN_vkQueueBindSparse) gpa(device, "vkQueueBindSparse");
+    table->CreateFence = (PFN_vkCreateFence) gpa(device, "vkCreateFence");
+    table->DestroyFence = (PFN_vkDestroyFence) gpa(device, "vkDestroyFence");
+    table->ResetFences = (PFN_vkResetFences) gpa(device, "vkResetFences");
+    table->GetFenceStatus = (PFN_vkGetFenceStatus) gpa(device, "vkGetFenceStatus");
+    table->WaitForFences = (PFN_vkWaitForFences) gpa(device, "vkWaitForFences");
+    table->CreateSemaphore = (PFN_vkCreateSemaphore) gpa(device, "vkCreateSemaphore");
+    table->DestroySemaphore = (PFN_vkDestroySemaphore) gpa(device, "vkDestroySemaphore");
+    table->CreateEvent = (PFN_vkCreateEvent) gpa(device, "vkCreateEvent");
+    table->DestroyEvent = (PFN_vkDestroyEvent) gpa(device, "vkDestroyEvent");
+    table->GetEventStatus = (PFN_vkGetEventStatus) gpa(device, "vkGetEventStatus");
+    table->SetEvent = (PFN_vkSetEvent) gpa(device, "vkSetEvent");
+    table->ResetEvent = (PFN_vkResetEvent) gpa(device, "vkResetEvent");
+    table->CreateQueryPool = (PFN_vkCreateQueryPool) gpa(device, "vkCreateQueryPool");
+    table->DestroyQueryPool = (PFN_vkDestroyQueryPool) gpa(device, "vkDestroyQueryPool");
+    table->GetQueryPoolResults = (PFN_vkGetQueryPoolResults) gpa(device, "vkGetQueryPoolResults");
+    table->CreateBuffer = (PFN_vkCreateBuffer) gpa(device, "vkCreateBuffer");
+    table->DestroyBuffer = (PFN_vkDestroyBuffer) gpa(device, "vkDestroyBuffer");
+    table->CreateBufferView = (PFN_vkCreateBufferView) gpa(device, "vkCreateBufferView");
+    table->DestroyBufferView = (PFN_vkDestroyBufferView) gpa(device, "vkDestroyBufferView");
+    table->CreateImage = (PFN_vkCreateImage) gpa(device, "vkCreateImage");
+    table->DestroyImage = (PFN_vkDestroyImage) gpa(device, "vkDestroyImage");
+    table->GetImageSubresourceLayout = (PFN_vkGetImageSubresourceLayout) gpa(device, "vkGetImageSubresourceLayout");
+    table->CreateImageView = (PFN_vkCreateImageView) gpa(device, "vkCreateImageView");
+    table->DestroyImageView = (PFN_vkDestroyImageView) gpa(device, "vkDestroyImageView");
+    table->CreateShaderModule = (PFN_vkCreateShaderModule) gpa(device, "vkCreateShaderModule");
+    table->DestroyShaderModule = (PFN_vkDestroyShaderModule) gpa(device, "vkDestroyShaderModule");
+    table->CreatePipelineCache = (PFN_vkCreatePipelineCache) gpa(device, "vkCreatePipelineCache");
+    table->DestroyPipelineCache = (PFN_vkDestroyPipelineCache) gpa(device, "vkDestroyPipelineCache");
+    table->GetPipelineCacheData = (PFN_vkGetPipelineCacheData) gpa(device, "vkGetPipelineCacheData");
+    table->MergePipelineCaches = (PFN_vkMergePipelineCaches) gpa(device, "vkMergePipelineCaches");
+    table->CreateGraphicsPipelines = (PFN_vkCreateGraphicsPipelines) gpa(device, "vkCreateGraphicsPipelines");
+    table->CreateComputePipelines = (PFN_vkCreateComputePipelines) gpa(device, "vkCreateComputePipelines");
+    table->DestroyPipeline = (PFN_vkDestroyPipeline) gpa(device, "vkDestroyPipeline");
+    table->CreatePipelineLayout = (PFN_vkCreatePipelineLayout) gpa(device, "vkCreatePipelineLayout");
+    table->DestroyPipelineLayout = (PFN_vkDestroyPipelineLayout) gpa(device, "vkDestroyPipelineLayout");
+    table->CreateSampler = (PFN_vkCreateSampler) gpa(device, "vkCreateSampler");
+    table->DestroySampler = (PFN_vkDestroySampler) gpa(device, "vkDestroySampler");
+    table->CreateDescriptorSetLayout = (PFN_vkCreateDescriptorSetLayout) gpa(device, "vkCreateDescriptorSetLayout");
+    table->DestroyDescriptorSetLayout = (PFN_vkDestroyDescriptorSetLayout) gpa(device, "vkDestroyDescriptorSetLayout");
+    table->CreateDescriptorPool = (PFN_vkCreateDescriptorPool) gpa(device, "vkCreateDescriptorPool");
+    table->DestroyDescriptorPool = (PFN_vkDestroyDescriptorPool) gpa(device, "vkDestroyDescriptorPool");
+    table->ResetDescriptorPool = (PFN_vkResetDescriptorPool) gpa(device, "vkResetDescriptorPool");
+    table->AllocateDescriptorSets = (PFN_vkAllocateDescriptorSets) gpa(device, "vkAllocateDescriptorSets");
+    table->FreeDescriptorSets = (PFN_vkFreeDescriptorSets) gpa(device, "vkFreeDescriptorSets");
+    table->UpdateDescriptorSets = (PFN_vkUpdateDescriptorSets) gpa(device, "vkUpdateDescriptorSets");
+    table->CreateFramebuffer = (PFN_vkCreateFramebuffer) gpa(device, "vkCreateFramebuffer");
+    table->DestroyFramebuffer = (PFN_vkDestroyFramebuffer) gpa(device, "vkDestroyFramebuffer");
+    table->CreateRenderPass = (PFN_vkCreateRenderPass) gpa(device, "vkCreateRenderPass");
+    table->DestroyRenderPass = (PFN_vkDestroyRenderPass) gpa(device, "vkDestroyRenderPass");
+    table->GetRenderAreaGranularity = (PFN_vkGetRenderAreaGranularity) gpa(device, "vkGetRenderAreaGranularity");
+    table->CreateCommandPool = (PFN_vkCreateCommandPool) gpa(device, "vkCreateCommandPool");
+    table->DestroyCommandPool = (PFN_vkDestroyCommandPool) gpa(device, "vkDestroyCommandPool");
+    table->ResetCommandPool = (PFN_vkResetCommandPool) gpa(device, "vkResetCommandPool");
+    table->AllocateCommandBuffers = (PFN_vkAllocateCommandBuffers) gpa(device, "vkAllocateCommandBuffers");
+    table->FreeCommandBuffers = (PFN_vkFreeCommandBuffers) gpa(device, "vkFreeCommandBuffers");
+    table->BeginCommandBuffer = (PFN_vkBeginCommandBuffer) gpa(device, "vkBeginCommandBuffer");
+    table->EndCommandBuffer = (PFN_vkEndCommandBuffer) gpa(device, "vkEndCommandBuffer");
+    table->ResetCommandBuffer = (PFN_vkResetCommandBuffer) gpa(device, "vkResetCommandBuffer");
+    table->CmdBindPipeline = (PFN_vkCmdBindPipeline) gpa(device, "vkCmdBindPipeline");
+    table->CmdSetViewport = (PFN_vkCmdSetViewport) gpa(device, "vkCmdSetViewport");
+    table->CmdSetScissor = (PFN_vkCmdSetScissor) gpa(device, "vkCmdSetScissor");
+    table->CmdSetLineWidth = (PFN_vkCmdSetLineWidth) gpa(device, "vkCmdSetLineWidth");
+    table->CmdSetDepthBias = (PFN_vkCmdSetDepthBias) gpa(device, "vkCmdSetDepthBias");
+    table->CmdSetBlendConstants = (PFN_vkCmdSetBlendConstants) gpa(device, "vkCmdSetBlendConstants");
+    table->CmdSetDepthBounds = (PFN_vkCmdSetDepthBounds) gpa(device, "vkCmdSetDepthBounds");
+    table->CmdSetStencilCompareMask = (PFN_vkCmdSetStencilCompareMask) gpa(device, "vkCmdSetStencilCompareMask");
+    table->CmdSetStencilWriteMask = (PFN_vkCmdSetStencilWriteMask) gpa(device, "vkCmdSetStencilWriteMask");
+    table->CmdSetStencilReference = (PFN_vkCmdSetStencilReference) gpa(device, "vkCmdSetStencilReference");
+    table->CmdBindDescriptorSets = (PFN_vkCmdBindDescriptorSets) gpa(device, "vkCmdBindDescriptorSets");
+    table->CmdBindIndexBuffer = (PFN_vkCmdBindIndexBuffer) gpa(device, "vkCmdBindIndexBuffer");
+    table->CmdBindVertexBuffers = (PFN_vkCmdBindVertexBuffers) gpa(device, "vkCmdBindVertexBuffers");
+    table->CmdDraw = (PFN_vkCmdDraw) gpa(device, "vkCmdDraw");
+    table->CmdDrawIndexed = (PFN_vkCmdDrawIndexed) gpa(device, "vkCmdDrawIndexed");
+    table->CmdDrawIndirect = (PFN_vkCmdDrawIndirect) gpa(device, "vkCmdDrawIndirect");
+    table->CmdDrawIndexedIndirect = (PFN_vkCmdDrawIndexedIndirect) gpa(device, "vkCmdDrawIndexedIndirect");
+    table->CmdDispatch = (PFN_vkCmdDispatch) gpa(device, "vkCmdDispatch");
+    table->CmdDispatchIndirect = (PFN_vkCmdDispatchIndirect) gpa(device, "vkCmdDispatchIndirect");
+    table->CmdCopyBuffer = (PFN_vkCmdCopyBuffer) gpa(device, "vkCmdCopyBuffer");
+    table->CmdCopyImage = (PFN_vkCmdCopyImage) gpa(device, "vkCmdCopyImage");
+    table->CmdBlitImage = (PFN_vkCmdBlitImage) gpa(device, "vkCmdBlitImage");
+    table->CmdCopyBufferToImage = (PFN_vkCmdCopyBufferToImage) gpa(device, "vkCmdCopyBufferToImage");
+    table->CmdCopyImageToBuffer = (PFN_vkCmdCopyImageToBuffer) gpa(device, "vkCmdCopyImageToBuffer");
+    table->CmdUpdateBuffer = (PFN_vkCmdUpdateBuffer) gpa(device, "vkCmdUpdateBuffer");
+    table->CmdFillBuffer = (PFN_vkCmdFillBuffer) gpa(device, "vkCmdFillBuffer");
+    table->CmdClearColorImage = (PFN_vkCmdClearColorImage) gpa(device, "vkCmdClearColorImage");
+    table->CmdClearDepthStencilImage = (PFN_vkCmdClearDepthStencilImage) gpa(device, "vkCmdClearDepthStencilImage");
+    table->CmdClearAttachments = (PFN_vkCmdClearAttachments) gpa(device, "vkCmdClearAttachments");
+    table->CmdResolveImage = (PFN_vkCmdResolveImage) gpa(device, "vkCmdResolveImage");
+    table->CmdSetEvent = (PFN_vkCmdSetEvent) gpa(device, "vkCmdSetEvent");
+    table->CmdResetEvent = (PFN_vkCmdResetEvent) gpa(device, "vkCmdResetEvent");
+    table->CmdWaitEvents = (PFN_vkCmdWaitEvents) gpa(device, "vkCmdWaitEvents");
+    table->CmdPipelineBarrier = (PFN_vkCmdPipelineBarrier) gpa(device, "vkCmdPipelineBarrier");
+    table->CmdBeginQuery = (PFN_vkCmdBeginQuery) gpa(device, "vkCmdBeginQuery");
+    table->CmdEndQuery = (PFN_vkCmdEndQuery) gpa(device, "vkCmdEndQuery");
+    table->CmdResetQueryPool = (PFN_vkCmdResetQueryPool) gpa(device, "vkCmdResetQueryPool");
+    table->CmdWriteTimestamp = (PFN_vkCmdWriteTimestamp) gpa(device, "vkCmdWriteTimestamp");
+    table->CmdCopyQueryPoolResults = (PFN_vkCmdCopyQueryPoolResults) gpa(device, "vkCmdCopyQueryPoolResults");
+    table->CmdPushConstants = (PFN_vkCmdPushConstants) gpa(device, "vkCmdPushConstants");
+    table->CmdBeginRenderPass = (PFN_vkCmdBeginRenderPass) gpa(device, "vkCmdBeginRenderPass");
+    table->CmdNextSubpass = (PFN_vkCmdNextSubpass) gpa(device, "vkCmdNextSubpass");
+    table->CmdEndRenderPass = (PFN_vkCmdEndRenderPass) gpa(device, "vkCmdEndRenderPass");
+    table->CmdExecuteCommands = (PFN_vkCmdExecuteCommands) gpa(device, "vkCmdExecuteCommands");
+    table->BindBufferMemory2 = (PFN_vkBindBufferMemory2) gpa(device, "vkBindBufferMemory2");
+    if (table->BindBufferMemory2 == nullptr) { table->BindBufferMemory2 = (PFN_vkBindBufferMemory2)StubBindBufferMemory2; }
+    table->BindImageMemory2 = (PFN_vkBindImageMemory2) gpa(device, "vkBindImageMemory2");
+    if (table->BindImageMemory2 == nullptr) { table->BindImageMemory2 = (PFN_vkBindImageMemory2)StubBindImageMemory2; }
+    table->GetDeviceGroupPeerMemoryFeatures = (PFN_vkGetDeviceGroupPeerMemoryFeatures) gpa(device, "vkGetDeviceGroupPeerMemoryFeatures");
+    if (table->GetDeviceGroupPeerMemoryFeatures == nullptr) { table->GetDeviceGroupPeerMemoryFeatures = (PFN_vkGetDeviceGroupPeerMemoryFeatures)StubGetDeviceGroupPeerMemoryFeatures; }
+    table->CmdSetDeviceMask = (PFN_vkCmdSetDeviceMask) gpa(device, "vkCmdSetDeviceMask");
+    if (table->CmdSetDeviceMask == nullptr) { table->CmdSetDeviceMask = (PFN_vkCmdSetDeviceMask)StubCmdSetDeviceMask; }
+    table->CmdDispatchBase = (PFN_vkCmdDispatchBase) gpa(device, "vkCmdDispatchBase");
+    if (table->CmdDispatchBase == nullptr) { table->CmdDispatchBase = (PFN_vkCmdDispatchBase)StubCmdDispatchBase; }
+    table->GetImageMemoryRequirements2 = (PFN_vkGetImageMemoryRequirements2) gpa(device, "vkGetImageMemoryRequirements2");
+    if (table->GetImageMemoryRequirements2 == nullptr) { table->GetImageMemoryRequirements2 = (PFN_vkGetImageMemoryRequirements2)StubGetImageMemoryRequirements2; }
+    table->GetBufferMemoryRequirements2 = (PFN_vkGetBufferMemoryRequirements2) gpa(device, "vkGetBufferMemoryRequirements2");
+    if (table->GetBufferMemoryRequirements2 == nullptr) { table->GetBufferMemoryRequirements2 = (PFN_vkGetBufferMemoryRequirements2)StubGetBufferMemoryRequirements2; }
+    table->GetImageSparseMemoryRequirements2 = (PFN_vkGetImageSparseMemoryRequirements2) gpa(device, "vkGetImageSparseMemoryRequirements2");
+    if (table->GetImageSparseMemoryRequirements2 == nullptr) { table->GetImageSparseMemoryRequirements2 = (PFN_vkGetImageSparseMemoryRequirements2)StubGetImageSparseMemoryRequirements2; }
+    table->TrimCommandPool = (PFN_vkTrimCommandPool) gpa(device, "vkTrimCommandPool");
+    if (table->TrimCommandPool == nullptr) { table->TrimCommandPool = (PFN_vkTrimCommandPool)StubTrimCommandPool; }
+    table->GetDeviceQueue2 = (PFN_vkGetDeviceQueue2) gpa(device, "vkGetDeviceQueue2");
+    if (table->GetDeviceQueue2 == nullptr) { table->GetDeviceQueue2 = (PFN_vkGetDeviceQueue2)StubGetDeviceQueue2; }
+    table->CreateSamplerYcbcrConversion = (PFN_vkCreateSamplerYcbcrConversion) gpa(device, "vkCreateSamplerYcbcrConversion");
+    if (table->CreateSamplerYcbcrConversion == nullptr) { table->CreateSamplerYcbcrConversion = (PFN_vkCreateSamplerYcbcrConversion)StubCreateSamplerYcbcrConversion; }
+    table->DestroySamplerYcbcrConversion = (PFN_vkDestroySamplerYcbcrConversion) gpa(device, "vkDestroySamplerYcbcrConversion");
+    if (table->DestroySamplerYcbcrConversion == nullptr) { table->DestroySamplerYcbcrConversion = (PFN_vkDestroySamplerYcbcrConversion)StubDestroySamplerYcbcrConversion; }
+    table->CreateDescriptorUpdateTemplate = (PFN_vkCreateDescriptorUpdateTemplate) gpa(device, "vkCreateDescriptorUpdateTemplate");
+    if (table->CreateDescriptorUpdateTemplate == nullptr) { table->CreateDescriptorUpdateTemplate = (PFN_vkCreateDescriptorUpdateTemplate)StubCreateDescriptorUpdateTemplate; }
+    table->DestroyDescriptorUpdateTemplate = (PFN_vkDestroyDescriptorUpdateTemplate) gpa(device, "vkDestroyDescriptorUpdateTemplate");
+    if (table->DestroyDescriptorUpdateTemplate == nullptr) { table->DestroyDescriptorUpdateTemplate = (PFN_vkDestroyDescriptorUpdateTemplate)StubDestroyDescriptorUpdateTemplate; }
+    table->UpdateDescriptorSetWithTemplate = (PFN_vkUpdateDescriptorSetWithTemplate) gpa(device, "vkUpdateDescriptorSetWithTemplate");
+    if (table->UpdateDescriptorSetWithTemplate == nullptr) { table->UpdateDescriptorSetWithTemplate = (PFN_vkUpdateDescriptorSetWithTemplate)StubUpdateDescriptorSetWithTemplate; }
+    table->GetDescriptorSetLayoutSupport = (PFN_vkGetDescriptorSetLayoutSupport) gpa(device, "vkGetDescriptorSetLayoutSupport");
+    if (table->GetDescriptorSetLayoutSupport == nullptr) { table->GetDescriptorSetLayoutSupport = (PFN_vkGetDescriptorSetLayoutSupport)StubGetDescriptorSetLayoutSupport; }
+    table->CreateSwapchainKHR = (PFN_vkCreateSwapchainKHR) gpa(device, "vkCreateSwapchainKHR");
+    if (table->CreateSwapchainKHR == nullptr) { table->CreateSwapchainKHR = (PFN_vkCreateSwapchainKHR)StubCreateSwapchainKHR; }
+    table->DestroySwapchainKHR = (PFN_vkDestroySwapchainKHR) gpa(device, "vkDestroySwapchainKHR");
+    if (table->DestroySwapchainKHR == nullptr) { table->DestroySwapchainKHR = (PFN_vkDestroySwapchainKHR)StubDestroySwapchainKHR; }
+    table->GetSwapchainImagesKHR = (PFN_vkGetSwapchainImagesKHR) gpa(device, "vkGetSwapchainImagesKHR");
+    if (table->GetSwapchainImagesKHR == nullptr) { table->GetSwapchainImagesKHR = (PFN_vkGetSwapchainImagesKHR)StubGetSwapchainImagesKHR; }
+    table->AcquireNextImageKHR = (PFN_vkAcquireNextImageKHR) gpa(device, "vkAcquireNextImageKHR");
+    if (table->AcquireNextImageKHR == nullptr) { table->AcquireNextImageKHR = (PFN_vkAcquireNextImageKHR)StubAcquireNextImageKHR; }
+    table->QueuePresentKHR = (PFN_vkQueuePresentKHR) gpa(device, "vkQueuePresentKHR");
+    if (table->QueuePresentKHR == nullptr) { table->QueuePresentKHR = (PFN_vkQueuePresentKHR)StubQueuePresentKHR; }
+    table->GetDeviceGroupPresentCapabilitiesKHR = (PFN_vkGetDeviceGroupPresentCapabilitiesKHR) gpa(device, "vkGetDeviceGroupPresentCapabilitiesKHR");
+    if (table->GetDeviceGroupPresentCapabilitiesKHR == nullptr) { table->GetDeviceGroupPresentCapabilitiesKHR = (PFN_vkGetDeviceGroupPresentCapabilitiesKHR)StubGetDeviceGroupPresentCapabilitiesKHR; }
+    table->GetDeviceGroupSurfacePresentModesKHR = (PFN_vkGetDeviceGroupSurfacePresentModesKHR) gpa(device, "vkGetDeviceGroupSurfacePresentModesKHR");
+    if (table->GetDeviceGroupSurfacePresentModesKHR == nullptr) { table->GetDeviceGroupSurfacePresentModesKHR = (PFN_vkGetDeviceGroupSurfacePresentModesKHR)StubGetDeviceGroupSurfacePresentModesKHR; }
+    table->AcquireNextImage2KHR = (PFN_vkAcquireNextImage2KHR) gpa(device, "vkAcquireNextImage2KHR");
+    if (table->AcquireNextImage2KHR == nullptr) { table->AcquireNextImage2KHR = (PFN_vkAcquireNextImage2KHR)StubAcquireNextImage2KHR; }
+    table->CreateSharedSwapchainsKHR = (PFN_vkCreateSharedSwapchainsKHR) gpa(device, "vkCreateSharedSwapchainsKHR");
+    if (table->CreateSharedSwapchainsKHR == nullptr) { table->CreateSharedSwapchainsKHR = (PFN_vkCreateSharedSwapchainsKHR)StubCreateSharedSwapchainsKHR; }
+    table->GetDeviceGroupPeerMemoryFeaturesKHR = (PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR) gpa(device, "vkGetDeviceGroupPeerMemoryFeaturesKHR");
+    if (table->GetDeviceGroupPeerMemoryFeaturesKHR == nullptr) { table->GetDeviceGroupPeerMemoryFeaturesKHR = (PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR)StubGetDeviceGroupPeerMemoryFeaturesKHR; }
+    table->CmdSetDeviceMaskKHR = (PFN_vkCmdSetDeviceMaskKHR) gpa(device, "vkCmdSetDeviceMaskKHR");
+    if (table->CmdSetDeviceMaskKHR == nullptr) { table->CmdSetDeviceMaskKHR = (PFN_vkCmdSetDeviceMaskKHR)StubCmdSetDeviceMaskKHR; }
+    table->CmdDispatchBaseKHR = (PFN_vkCmdDispatchBaseKHR) gpa(device, "vkCmdDispatchBaseKHR");
+    if (table->CmdDispatchBaseKHR == nullptr) { table->CmdDispatchBaseKHR = (PFN_vkCmdDispatchBaseKHR)StubCmdDispatchBaseKHR; }
+    table->TrimCommandPoolKHR = (PFN_vkTrimCommandPoolKHR) gpa(device, "vkTrimCommandPoolKHR");
+    if (table->TrimCommandPoolKHR == nullptr) { table->TrimCommandPoolKHR = (PFN_vkTrimCommandPoolKHR)StubTrimCommandPoolKHR; }
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    table->GetMemoryWin32HandleKHR = (PFN_vkGetMemoryWin32HandleKHR) gpa(device, "vkGetMemoryWin32HandleKHR");
+    if (table->GetMemoryWin32HandleKHR == nullptr) { table->GetMemoryWin32HandleKHR = (PFN_vkGetMemoryWin32HandleKHR)StubGetMemoryWin32HandleKHR; }
+#endif // VK_USE_PLATFORM_WIN32_KHR
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    table->GetMemoryWin32HandlePropertiesKHR = (PFN_vkGetMemoryWin32HandlePropertiesKHR) gpa(device, "vkGetMemoryWin32HandlePropertiesKHR");
+    if (table->GetMemoryWin32HandlePropertiesKHR == nullptr) { table->GetMemoryWin32HandlePropertiesKHR = (PFN_vkGetMemoryWin32HandlePropertiesKHR)StubGetMemoryWin32HandlePropertiesKHR; }
+#endif // VK_USE_PLATFORM_WIN32_KHR
+    table->GetMemoryFdKHR = (PFN_vkGetMemoryFdKHR) gpa(device, "vkGetMemoryFdKHR");
+    if (table->GetMemoryFdKHR == nullptr) { table->GetMemoryFdKHR = (PFN_vkGetMemoryFdKHR)StubGetMemoryFdKHR; }
+    table->GetMemoryFdPropertiesKHR = (PFN_vkGetMemoryFdPropertiesKHR) gpa(device, "vkGetMemoryFdPropertiesKHR");
+    if (table->GetMemoryFdPropertiesKHR == nullptr) { table->GetMemoryFdPropertiesKHR = (PFN_vkGetMemoryFdPropertiesKHR)StubGetMemoryFdPropertiesKHR; }
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    table->ImportSemaphoreWin32HandleKHR = (PFN_vkImportSemaphoreWin32HandleKHR) gpa(device, "vkImportSemaphoreWin32HandleKHR");
+    if (table->ImportSemaphoreWin32HandleKHR == nullptr) { table->ImportSemaphoreWin32HandleKHR = (PFN_vkImportSemaphoreWin32HandleKHR)StubImportSemaphoreWin32HandleKHR; }
+#endif // VK_USE_PLATFORM_WIN32_KHR
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    table->GetSemaphoreWin32HandleKHR = (PFN_vkGetSemaphoreWin32HandleKHR) gpa(device, "vkGetSemaphoreWin32HandleKHR");
+    if (table->GetSemaphoreWin32HandleKHR == nullptr) { table->GetSemaphoreWin32HandleKHR = (PFN_vkGetSemaphoreWin32HandleKHR)StubGetSemaphoreWin32HandleKHR; }
+#endif // VK_USE_PLATFORM_WIN32_KHR
+    table->ImportSemaphoreFdKHR = (PFN_vkImportSemaphoreFdKHR) gpa(device, "vkImportSemaphoreFdKHR");
+    if (table->ImportSemaphoreFdKHR == nullptr) { table->ImportSemaphoreFdKHR = (PFN_vkImportSemaphoreFdKHR)StubImportSemaphoreFdKHR; }
+    table->GetSemaphoreFdKHR = (PFN_vkGetSemaphoreFdKHR) gpa(device, "vkGetSemaphoreFdKHR");
+    if (table->GetSemaphoreFdKHR == nullptr) { table->GetSemaphoreFdKHR = (PFN_vkGetSemaphoreFdKHR)StubGetSemaphoreFdKHR; }
+    table->CmdPushDescriptorSetKHR = (PFN_vkCmdPushDescriptorSetKHR) gpa(device, "vkCmdPushDescriptorSetKHR");
+    if (table->CmdPushDescriptorSetKHR == nullptr) { table->CmdPushDescriptorSetKHR = (PFN_vkCmdPushDescriptorSetKHR)StubCmdPushDescriptorSetKHR; }
+    table->CmdPushDescriptorSetWithTemplateKHR = (PFN_vkCmdPushDescriptorSetWithTemplateKHR) gpa(device, "vkCmdPushDescriptorSetWithTemplateKHR");
+    if (table->CmdPushDescriptorSetWithTemplateKHR == nullptr) { table->CmdPushDescriptorSetWithTemplateKHR = (PFN_vkCmdPushDescriptorSetWithTemplateKHR)StubCmdPushDescriptorSetWithTemplateKHR; }
+    table->CreateDescriptorUpdateTemplateKHR = (PFN_vkCreateDescriptorUpdateTemplateKHR) gpa(device, "vkCreateDescriptorUpdateTemplateKHR");
+    if (table->CreateDescriptorUpdateTemplateKHR == nullptr) { table->CreateDescriptorUpdateTemplateKHR = (PFN_vkCreateDescriptorUpdateTemplateKHR)StubCreateDescriptorUpdateTemplateKHR; }
+    table->DestroyDescriptorUpdateTemplateKHR = (PFN_vkDestroyDescriptorUpdateTemplateKHR) gpa(device, "vkDestroyDescriptorUpdateTemplateKHR");
+    if (table->DestroyDescriptorUpdateTemplateKHR == nullptr) { table->DestroyDescriptorUpdateTemplateKHR = (PFN_vkDestroyDescriptorUpdateTemplateKHR)StubDestroyDescriptorUpdateTemplateKHR; }
+    table->UpdateDescriptorSetWithTemplateKHR = (PFN_vkUpdateDescriptorSetWithTemplateKHR) gpa(device, "vkUpdateDescriptorSetWithTemplateKHR");
+    if (table->UpdateDescriptorSetWithTemplateKHR == nullptr) { table->UpdateDescriptorSetWithTemplateKHR = (PFN_vkUpdateDescriptorSetWithTemplateKHR)StubUpdateDescriptorSetWithTemplateKHR; }
+    table->CreateRenderPass2KHR = (PFN_vkCreateRenderPass2KHR) gpa(device, "vkCreateRenderPass2KHR");
+    if (table->CreateRenderPass2KHR == nullptr) { table->CreateRenderPass2KHR = (PFN_vkCreateRenderPass2KHR)StubCreateRenderPass2KHR; }
+    table->CmdBeginRenderPass2KHR = (PFN_vkCmdBeginRenderPass2KHR) gpa(device, "vkCmdBeginRenderPass2KHR");
+    if (table->CmdBeginRenderPass2KHR == nullptr) { table->CmdBeginRenderPass2KHR = (PFN_vkCmdBeginRenderPass2KHR)StubCmdBeginRenderPass2KHR; }
+    table->CmdNextSubpass2KHR = (PFN_vkCmdNextSubpass2KHR) gpa(device, "vkCmdNextSubpass2KHR");
+    if (table->CmdNextSubpass2KHR == nullptr) { table->CmdNextSubpass2KHR = (PFN_vkCmdNextSubpass2KHR)StubCmdNextSubpass2KHR; }
+    table->CmdEndRenderPass2KHR = (PFN_vkCmdEndRenderPass2KHR) gpa(device, "vkCmdEndRenderPass2KHR");
+    if (table->CmdEndRenderPass2KHR == nullptr) { table->CmdEndRenderPass2KHR = (PFN_vkCmdEndRenderPass2KHR)StubCmdEndRenderPass2KHR; }
+    table->GetSwapchainStatusKHR = (PFN_vkGetSwapchainStatusKHR) gpa(device, "vkGetSwapchainStatusKHR");
+    if (table->GetSwapchainStatusKHR == nullptr) { table->GetSwapchainStatusKHR = (PFN_vkGetSwapchainStatusKHR)StubGetSwapchainStatusKHR; }
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    table->ImportFenceWin32HandleKHR = (PFN_vkImportFenceWin32HandleKHR) gpa(device, "vkImportFenceWin32HandleKHR");
+    if (table->ImportFenceWin32HandleKHR == nullptr) { table->ImportFenceWin32HandleKHR = (PFN_vkImportFenceWin32HandleKHR)StubImportFenceWin32HandleKHR; }
+#endif // VK_USE_PLATFORM_WIN32_KHR
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    table->GetFenceWin32HandleKHR = (PFN_vkGetFenceWin32HandleKHR) gpa(device, "vkGetFenceWin32HandleKHR");
+    if (table->GetFenceWin32HandleKHR == nullptr) { table->GetFenceWin32HandleKHR = (PFN_vkGetFenceWin32HandleKHR)StubGetFenceWin32HandleKHR; }
+#endif // VK_USE_PLATFORM_WIN32_KHR
+    table->ImportFenceFdKHR = (PFN_vkImportFenceFdKHR) gpa(device, "vkImportFenceFdKHR");
+    if (table->ImportFenceFdKHR == nullptr) { table->ImportFenceFdKHR = (PFN_vkImportFenceFdKHR)StubImportFenceFdKHR; }
+    table->GetFenceFdKHR = (PFN_vkGetFenceFdKHR) gpa(device, "vkGetFenceFdKHR");
+    if (table->GetFenceFdKHR == nullptr) { table->GetFenceFdKHR = (PFN_vkGetFenceFdKHR)StubGetFenceFdKHR; }
+    table->AcquireProfilingLockKHR = (PFN_vkAcquireProfilingLockKHR) gpa(device, "vkAcquireProfilingLockKHR");
+    if (table->AcquireProfilingLockKHR == nullptr) { table->AcquireProfilingLockKHR = (PFN_vkAcquireProfilingLockKHR)StubAcquireProfilingLockKHR; }
+    table->ReleaseProfilingLockKHR = (PFN_vkReleaseProfilingLockKHR) gpa(device, "vkReleaseProfilingLockKHR");
+    if (table->ReleaseProfilingLockKHR == nullptr) { table->ReleaseProfilingLockKHR = (PFN_vkReleaseProfilingLockKHR)StubReleaseProfilingLockKHR; }
+    table->GetImageMemoryRequirements2KHR = (PFN_vkGetImageMemoryRequirements2KHR) gpa(device, "vkGetImageMemoryRequirements2KHR");
+    if (table->GetImageMemoryRequirements2KHR == nullptr) { table->GetImageMemoryRequirements2KHR = (PFN_vkGetImageMemoryRequirements2KHR)StubGetImageMemoryRequirements2KHR; }
+    table->GetBufferMemoryRequirements2KHR = (PFN_vkGetBufferMemoryRequirements2KHR) gpa(device, "vkGetBufferMemoryRequirements2KHR");
+    if (table->GetBufferMemoryRequirements2KHR == nullptr) { table->GetBufferMemoryRequirements2KHR = (PFN_vkGetBufferMemoryRequirements2KHR)StubGetBufferMemoryRequirements2KHR; }
+    table->GetImageSparseMemoryRequirements2KHR = (PFN_vkGetImageSparseMemoryRequirements2KHR) gpa(device, "vkGetImageSparseMemoryRequirements2KHR");
+    if (table->GetImageSparseMemoryRequirements2KHR == nullptr) { table->GetImageSparseMemoryRequirements2KHR = (PFN_vkGetImageSparseMemoryRequirements2KHR)StubGetImageSparseMemoryRequirements2KHR; }
+    table->CreateSamplerYcbcrConversionKHR = (PFN_vkCreateSamplerYcbcrConversionKHR) gpa(device, "vkCreateSamplerYcbcrConversionKHR");
+    if (table->CreateSamplerYcbcrConversionKHR == nullptr) { table->CreateSamplerYcbcrConversionKHR = (PFN_vkCreateSamplerYcbcrConversionKHR)StubCreateSamplerYcbcrConversionKHR; }
+    table->DestroySamplerYcbcrConversionKHR = (PFN_vkDestroySamplerYcbcrConversionKHR) gpa(device, "vkDestroySamplerYcbcrConversionKHR");
+    if (table->DestroySamplerYcbcrConversionKHR == nullptr) { table->DestroySamplerYcbcrConversionKHR = (PFN_vkDestroySamplerYcbcrConversionKHR)StubDestroySamplerYcbcrConversionKHR; }
+    table->BindBufferMemory2KHR = (PFN_vkBindBufferMemory2KHR) gpa(device, "vkBindBufferMemory2KHR");
+    if (table->BindBufferMemory2KHR == nullptr) { table->BindBufferMemory2KHR = (PFN_vkBindBufferMemory2KHR)StubBindBufferMemory2KHR; }
+    table->BindImageMemory2KHR = (PFN_vkBindImageMemory2KHR) gpa(device, "vkBindImageMemory2KHR");
+    if (table->BindImageMemory2KHR == nullptr) { table->BindImageMemory2KHR = (PFN_vkBindImageMemory2KHR)StubBindImageMemory2KHR; }
+    table->GetDescriptorSetLayoutSupportKHR = (PFN_vkGetDescriptorSetLayoutSupportKHR) gpa(device, "vkGetDescriptorSetLayoutSupportKHR");
+    if (table->GetDescriptorSetLayoutSupportKHR == nullptr) { table->GetDescriptorSetLayoutSupportKHR = (PFN_vkGetDescriptorSetLayoutSupportKHR)StubGetDescriptorSetLayoutSupportKHR; }
+    table->CmdDrawIndirectCountKHR = (PFN_vkCmdDrawIndirectCountKHR) gpa(device, "vkCmdDrawIndirectCountKHR");
+    if (table->CmdDrawIndirectCountKHR == nullptr) { table->CmdDrawIndirectCountKHR = (PFN_vkCmdDrawIndirectCountKHR)StubCmdDrawIndirectCountKHR; }
+    table->CmdDrawIndexedIndirectCountKHR = (PFN_vkCmdDrawIndexedIndirectCountKHR) gpa(device, "vkCmdDrawIndexedIndirectCountKHR");
+    if (table->CmdDrawIndexedIndirectCountKHR == nullptr) { table->CmdDrawIndexedIndirectCountKHR = (PFN_vkCmdDrawIndexedIndirectCountKHR)StubCmdDrawIndexedIndirectCountKHR; }
+    table->GetSemaphoreCounterValueKHR = (PFN_vkGetSemaphoreCounterValueKHR) gpa(device, "vkGetSemaphoreCounterValueKHR");
+    if (table->GetSemaphoreCounterValueKHR == nullptr) { table->GetSemaphoreCounterValueKHR = (PFN_vkGetSemaphoreCounterValueKHR)StubGetSemaphoreCounterValueKHR; }
+    table->WaitSemaphoresKHR = (PFN_vkWaitSemaphoresKHR) gpa(device, "vkWaitSemaphoresKHR");
+    if (table->WaitSemaphoresKHR == nullptr) { table->WaitSemaphoresKHR = (PFN_vkWaitSemaphoresKHR)StubWaitSemaphoresKHR; }
+    table->SignalSemaphoreKHR = (PFN_vkSignalSemaphoreKHR) gpa(device, "vkSignalSemaphoreKHR");
+    if (table->SignalSemaphoreKHR == nullptr) { table->SignalSemaphoreKHR = (PFN_vkSignalSemaphoreKHR)StubSignalSemaphoreKHR; }
+    table->GetBufferDeviceAddressKHR = (PFN_vkGetBufferDeviceAddressKHR) gpa(device, "vkGetBufferDeviceAddressKHR");
+    if (table->GetBufferDeviceAddressKHR == nullptr) { table->GetBufferDeviceAddressKHR = (PFN_vkGetBufferDeviceAddressKHR)StubGetBufferDeviceAddressKHR; }
+    table->GetBufferOpaqueCaptureAddressKHR = (PFN_vkGetBufferOpaqueCaptureAddressKHR) gpa(device, "vkGetBufferOpaqueCaptureAddressKHR");
+    if (table->GetBufferOpaqueCaptureAddressKHR == nullptr) { table->GetBufferOpaqueCaptureAddressKHR = (PFN_vkGetBufferOpaqueCaptureAddressKHR)StubGetBufferOpaqueCaptureAddressKHR; }
+    table->GetDeviceMemoryOpaqueCaptureAddressKHR = (PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR) gpa(device, "vkGetDeviceMemoryOpaqueCaptureAddressKHR");
+    if (table->GetDeviceMemoryOpaqueCaptureAddressKHR == nullptr) { table->GetDeviceMemoryOpaqueCaptureAddressKHR = (PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR)StubGetDeviceMemoryOpaqueCaptureAddressKHR; }
+    table->GetPipelineExecutablePropertiesKHR = (PFN_vkGetPipelineExecutablePropertiesKHR) gpa(device, "vkGetPipelineExecutablePropertiesKHR");
+    if (table->GetPipelineExecutablePropertiesKHR == nullptr) { table->GetPipelineExecutablePropertiesKHR = (PFN_vkGetPipelineExecutablePropertiesKHR)StubGetPipelineExecutablePropertiesKHR; }
+    table->GetPipelineExecutableStatisticsKHR = (PFN_vkGetPipelineExecutableStatisticsKHR) gpa(device, "vkGetPipelineExecutableStatisticsKHR");
+    if (table->GetPipelineExecutableStatisticsKHR == nullptr) { table->GetPipelineExecutableStatisticsKHR = (PFN_vkGetPipelineExecutableStatisticsKHR)StubGetPipelineExecutableStatisticsKHR; }
+    table->GetPipelineExecutableInternalRepresentationsKHR = (PFN_vkGetPipelineExecutableInternalRepresentationsKHR) gpa(device, "vkGetPipelineExecutableInternalRepresentationsKHR");
+    if (table->GetPipelineExecutableInternalRepresentationsKHR == nullptr) { table->GetPipelineExecutableInternalRepresentationsKHR = (PFN_vkGetPipelineExecutableInternalRepresentationsKHR)StubGetPipelineExecutableInternalRepresentationsKHR; }
+    table->DebugMarkerSetObjectTagEXT = (PFN_vkDebugMarkerSetObjectTagEXT) gpa(device, "vkDebugMarkerSetObjectTagEXT");
+    if (table->DebugMarkerSetObjectTagEXT == nullptr) { table->DebugMarkerSetObjectTagEXT = (PFN_vkDebugMarkerSetObjectTagEXT)StubDebugMarkerSetObjectTagEXT; }
+    table->DebugMarkerSetObjectNameEXT = (PFN_vkDebugMarkerSetObjectNameEXT) gpa(device, "vkDebugMarkerSetObjectNameEXT");
+    if (table->DebugMarkerSetObjectNameEXT == nullptr) { table->DebugMarkerSetObjectNameEXT = (PFN_vkDebugMarkerSetObjectNameEXT)StubDebugMarkerSetObjectNameEXT; }
+    table->CmdDebugMarkerBeginEXT = (PFN_vkCmdDebugMarkerBeginEXT) gpa(device, "vkCmdDebugMarkerBeginEXT");
+    if (table->CmdDebugMarkerBeginEXT == nullptr) { table->CmdDebugMarkerBeginEXT = (PFN_vkCmdDebugMarkerBeginEXT)StubCmdDebugMarkerBeginEXT; }
+    table->CmdDebugMarkerEndEXT = (PFN_vkCmdDebugMarkerEndEXT) gpa(device, "vkCmdDebugMarkerEndEXT");
+    if (table->CmdDebugMarkerEndEXT == nullptr) { table->CmdDebugMarkerEndEXT = (PFN_vkCmdDebugMarkerEndEXT)StubCmdDebugMarkerEndEXT; }
+    table->CmdDebugMarkerInsertEXT = (PFN_vkCmdDebugMarkerInsertEXT) gpa(device, "vkCmdDebugMarkerInsertEXT");
+    if (table->CmdDebugMarkerInsertEXT == nullptr) { table->CmdDebugMarkerInsertEXT = (PFN_vkCmdDebugMarkerInsertEXT)StubCmdDebugMarkerInsertEXT; }
+    table->CmdBindTransformFeedbackBuffersEXT = (PFN_vkCmdBindTransformFeedbackBuffersEXT) gpa(device, "vkCmdBindTransformFeedbackBuffersEXT");
+    if (table->CmdBindTransformFeedbackBuffersEXT == nullptr) { table->CmdBindTransformFeedbackBuffersEXT = (PFN_vkCmdBindTransformFeedbackBuffersEXT)StubCmdBindTransformFeedbackBuffersEXT; }
+    table->CmdBeginTransformFeedbackEXT = (PFN_vkCmdBeginTransformFeedbackEXT) gpa(device, "vkCmdBeginTransformFeedbackEXT");
+    if (table->CmdBeginTransformFeedbackEXT == nullptr) { table->CmdBeginTransformFeedbackEXT = (PFN_vkCmdBeginTransformFeedbackEXT)StubCmdBeginTransformFeedbackEXT; }
+    table->CmdEndTransformFeedbackEXT = (PFN_vkCmdEndTransformFeedbackEXT) gpa(device, "vkCmdEndTransformFeedbackEXT");
+    if (table->CmdEndTransformFeedbackEXT == nullptr) { table->CmdEndTransformFeedbackEXT = (PFN_vkCmdEndTransformFeedbackEXT)StubCmdEndTransformFeedbackEXT; }
+    table->CmdBeginQueryIndexedEXT = (PFN_vkCmdBeginQueryIndexedEXT) gpa(device, "vkCmdBeginQueryIndexedEXT");
+    if (table->CmdBeginQueryIndexedEXT == nullptr) { table->CmdBeginQueryIndexedEXT = (PFN_vkCmdBeginQueryIndexedEXT)StubCmdBeginQueryIndexedEXT; }
+    table->CmdEndQueryIndexedEXT = (PFN_vkCmdEndQueryIndexedEXT) gpa(device, "vkCmdEndQueryIndexedEXT");
+    if (table->CmdEndQueryIndexedEXT == nullptr) { table->CmdEndQueryIndexedEXT = (PFN_vkCmdEndQueryIndexedEXT)StubCmdEndQueryIndexedEXT; }
+    table->CmdDrawIndirectByteCountEXT = (PFN_vkCmdDrawIndirectByteCountEXT) gpa(device, "vkCmdDrawIndirectByteCountEXT");
+    if (table->CmdDrawIndirectByteCountEXT == nullptr) { table->CmdDrawIndirectByteCountEXT = (PFN_vkCmdDrawIndirectByteCountEXT)StubCmdDrawIndirectByteCountEXT; }
+    table->GetImageViewHandleNVX = (PFN_vkGetImageViewHandleNVX) gpa(device, "vkGetImageViewHandleNVX");
+    if (table->GetImageViewHandleNVX == nullptr) { table->GetImageViewHandleNVX = (PFN_vkGetImageViewHandleNVX)StubGetImageViewHandleNVX; }
+    table->CmdDrawIndirectCountAMD = (PFN_vkCmdDrawIndirectCountAMD) gpa(device, "vkCmdDrawIndirectCountAMD");
+    if (table->CmdDrawIndirectCountAMD == nullptr) { table->CmdDrawIndirectCountAMD = (PFN_vkCmdDrawIndirectCountAMD)StubCmdDrawIndirectCountAMD; }
+    table->CmdDrawIndexedIndirectCountAMD = (PFN_vkCmdDrawIndexedIndirectCountAMD) gpa(device, "vkCmdDrawIndexedIndirectCountAMD");
+    if (table->CmdDrawIndexedIndirectCountAMD == nullptr) { table->CmdDrawIndexedIndirectCountAMD = (PFN_vkCmdDrawIndexedIndirectCountAMD)StubCmdDrawIndexedIndirectCountAMD; }
+    table->GetShaderInfoAMD = (PFN_vkGetShaderInfoAMD) gpa(device, "vkGetShaderInfoAMD");
+    if (table->GetShaderInfoAMD == nullptr) { table->GetShaderInfoAMD = (PFN_vkGetShaderInfoAMD)StubGetShaderInfoAMD; }
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    table->GetMemoryWin32HandleNV = (PFN_vkGetMemoryWin32HandleNV) gpa(device, "vkGetMemoryWin32HandleNV");
+    if (table->GetMemoryWin32HandleNV == nullptr) { table->GetMemoryWin32HandleNV = (PFN_vkGetMemoryWin32HandleNV)StubGetMemoryWin32HandleNV; }
+#endif // VK_USE_PLATFORM_WIN32_KHR
+    table->CmdBeginConditionalRenderingEXT = (PFN_vkCmdBeginConditionalRenderingEXT) gpa(device, "vkCmdBeginConditionalRenderingEXT");
+    if (table->CmdBeginConditionalRenderingEXT == nullptr) { table->CmdBeginConditionalRenderingEXT = (PFN_vkCmdBeginConditionalRenderingEXT)StubCmdBeginConditionalRenderingEXT; }
+    table->CmdEndConditionalRenderingEXT = (PFN_vkCmdEndConditionalRenderingEXT) gpa(device, "vkCmdEndConditionalRenderingEXT");
+    if (table->CmdEndConditionalRenderingEXT == nullptr) { table->CmdEndConditionalRenderingEXT = (PFN_vkCmdEndConditionalRenderingEXT)StubCmdEndConditionalRenderingEXT; }
+    table->CmdProcessCommandsNVX = (PFN_vkCmdProcessCommandsNVX) gpa(device, "vkCmdProcessCommandsNVX");
+    if (table->CmdProcessCommandsNVX == nullptr) { table->CmdProcessCommandsNVX = (PFN_vkCmdProcessCommandsNVX)StubCmdProcessCommandsNVX; }
+    table->CmdReserveSpaceForCommandsNVX = (PFN_vkCmdReserveSpaceForCommandsNVX) gpa(device, "vkCmdReserveSpaceForCommandsNVX");
+    if (table->CmdReserveSpaceForCommandsNVX == nullptr) { table->CmdReserveSpaceForCommandsNVX = (PFN_vkCmdReserveSpaceForCommandsNVX)StubCmdReserveSpaceForCommandsNVX; }
+    table->CreateIndirectCommandsLayoutNVX = (PFN_vkCreateIndirectCommandsLayoutNVX) gpa(device, "vkCreateIndirectCommandsLayoutNVX");
+    if (table->CreateIndirectCommandsLayoutNVX == nullptr) { table->CreateIndirectCommandsLayoutNVX = (PFN_vkCreateIndirectCommandsLayoutNVX)StubCreateIndirectCommandsLayoutNVX; }
+    table->DestroyIndirectCommandsLayoutNVX = (PFN_vkDestroyIndirectCommandsLayoutNVX) gpa(device, "vkDestroyIndirectCommandsLayoutNVX");
+    if (table->DestroyIndirectCommandsLayoutNVX == nullptr) { table->DestroyIndirectCommandsLayoutNVX = (PFN_vkDestroyIndirectCommandsLayoutNVX)StubDestroyIndirectCommandsLayoutNVX; }
+    table->CreateObjectTableNVX = (PFN_vkCreateObjectTableNVX) gpa(device, "vkCreateObjectTableNVX");
+    if (table->CreateObjectTableNVX == nullptr) { table->CreateObjectTableNVX = (PFN_vkCreateObjectTableNVX)StubCreateObjectTableNVX; }
+    table->DestroyObjectTableNVX = (PFN_vkDestroyObjectTableNVX) gpa(device, "vkDestroyObjectTableNVX");
+    if (table->DestroyObjectTableNVX == nullptr) { table->DestroyObjectTableNVX = (PFN_vkDestroyObjectTableNVX)StubDestroyObjectTableNVX; }
+    table->RegisterObjectsNVX = (PFN_vkRegisterObjectsNVX) gpa(device, "vkRegisterObjectsNVX");
+    if (table->RegisterObjectsNVX == nullptr) { table->RegisterObjectsNVX = (PFN_vkRegisterObjectsNVX)StubRegisterObjectsNVX; }
+    table->UnregisterObjectsNVX = (PFN_vkUnregisterObjectsNVX) gpa(device, "vkUnregisterObjectsNVX");
+    if (table->UnregisterObjectsNVX == nullptr) { table->UnregisterObjectsNVX = (PFN_vkUnregisterObjectsNVX)StubUnregisterObjectsNVX; }
+    table->CmdSetViewportWScalingNV = (PFN_vkCmdSetViewportWScalingNV) gpa(device, "vkCmdSetViewportWScalingNV");
+    if (table->CmdSetViewportWScalingNV == nullptr) { table->CmdSetViewportWScalingNV = (PFN_vkCmdSetViewportWScalingNV)StubCmdSetViewportWScalingNV; }
+    table->DisplayPowerControlEXT = (PFN_vkDisplayPowerControlEXT) gpa(device, "vkDisplayPowerControlEXT");
+    if (table->DisplayPowerControlEXT == nullptr) { table->DisplayPowerControlEXT = (PFN_vkDisplayPowerControlEXT)StubDisplayPowerControlEXT; }
+    table->RegisterDeviceEventEXT = (PFN_vkRegisterDeviceEventEXT) gpa(device, "vkRegisterDeviceEventEXT");
+    if (table->RegisterDeviceEventEXT == nullptr) { table->RegisterDeviceEventEXT = (PFN_vkRegisterDeviceEventEXT)StubRegisterDeviceEventEXT; }
+    table->RegisterDisplayEventEXT = (PFN_vkRegisterDisplayEventEXT) gpa(device, "vkRegisterDisplayEventEXT");
+    if (table->RegisterDisplayEventEXT == nullptr) { table->RegisterDisplayEventEXT = (PFN_vkRegisterDisplayEventEXT)StubRegisterDisplayEventEXT; }
+    table->GetSwapchainCounterEXT = (PFN_vkGetSwapchainCounterEXT) gpa(device, "vkGetSwapchainCounterEXT");
+    if (table->GetSwapchainCounterEXT == nullptr) { table->GetSwapchainCounterEXT = (PFN_vkGetSwapchainCounterEXT)StubGetSwapchainCounterEXT; }
+    table->GetRefreshCycleDurationGOOGLE = (PFN_vkGetRefreshCycleDurationGOOGLE) gpa(device, "vkGetRefreshCycleDurationGOOGLE");
+    if (table->GetRefreshCycleDurationGOOGLE == nullptr) { table->GetRefreshCycleDurationGOOGLE = (PFN_vkGetRefreshCycleDurationGOOGLE)StubGetRefreshCycleDurationGOOGLE; }
+    table->GetPastPresentationTimingGOOGLE = (PFN_vkGetPastPresentationTimingGOOGLE) gpa(device, "vkGetPastPresentationTimingGOOGLE");
+    if (table->GetPastPresentationTimingGOOGLE == nullptr) { table->GetPastPresentationTimingGOOGLE = (PFN_vkGetPastPresentationTimingGOOGLE)StubGetPastPresentationTimingGOOGLE; }
+    table->CmdSetDiscardRectangleEXT = (PFN_vkCmdSetDiscardRectangleEXT) gpa(device, "vkCmdSetDiscardRectangleEXT");
+    if (table->CmdSetDiscardRectangleEXT == nullptr) { table->CmdSetDiscardRectangleEXT = (PFN_vkCmdSetDiscardRectangleEXT)StubCmdSetDiscardRectangleEXT; }
+    table->SetHdrMetadataEXT = (PFN_vkSetHdrMetadataEXT) gpa(device, "vkSetHdrMetadataEXT");
+    if (table->SetHdrMetadataEXT == nullptr) { table->SetHdrMetadataEXT = (PFN_vkSetHdrMetadataEXT)StubSetHdrMetadataEXT; }
+    table->SetDebugUtilsObjectNameEXT = (PFN_vkSetDebugUtilsObjectNameEXT) gpa(device, "vkSetDebugUtilsObjectNameEXT");
+    if (table->SetDebugUtilsObjectNameEXT == nullptr) { table->SetDebugUtilsObjectNameEXT = (PFN_vkSetDebugUtilsObjectNameEXT)StubSetDebugUtilsObjectNameEXT; }
+    table->SetDebugUtilsObjectTagEXT = (PFN_vkSetDebugUtilsObjectTagEXT) gpa(device, "vkSetDebugUtilsObjectTagEXT");
+    if (table->SetDebugUtilsObjectTagEXT == nullptr) { table->SetDebugUtilsObjectTagEXT = (PFN_vkSetDebugUtilsObjectTagEXT)StubSetDebugUtilsObjectTagEXT; }
+    table->QueueBeginDebugUtilsLabelEXT = (PFN_vkQueueBeginDebugUtilsLabelEXT) gpa(device, "vkQueueBeginDebugUtilsLabelEXT");
+    if (table->QueueBeginDebugUtilsLabelEXT == nullptr) { table->QueueBeginDebugUtilsLabelEXT = (PFN_vkQueueBeginDebugUtilsLabelEXT)StubQueueBeginDebugUtilsLabelEXT; }
+    table->QueueEndDebugUtilsLabelEXT = (PFN_vkQueueEndDebugUtilsLabelEXT) gpa(device, "vkQueueEndDebugUtilsLabelEXT");
+    if (table->QueueEndDebugUtilsLabelEXT == nullptr) { table->QueueEndDebugUtilsLabelEXT = (PFN_vkQueueEndDebugUtilsLabelEXT)StubQueueEndDebugUtilsLabelEXT; }
+    table->QueueInsertDebugUtilsLabelEXT = (PFN_vkQueueInsertDebugUtilsLabelEXT) gpa(device, "vkQueueInsertDebugUtilsLabelEXT");
+    if (table->QueueInsertDebugUtilsLabelEXT == nullptr) { table->QueueInsertDebugUtilsLabelEXT = (PFN_vkQueueInsertDebugUtilsLabelEXT)StubQueueInsertDebugUtilsLabelEXT; }
+    table->CmdBeginDebugUtilsLabelEXT = (PFN_vkCmdBeginDebugUtilsLabelEXT) gpa(device, "vkCmdBeginDebugUtilsLabelEXT");
+    if (table->CmdBeginDebugUtilsLabelEXT == nullptr) { table->CmdBeginDebugUtilsLabelEXT = (PFN_vkCmdBeginDebugUtilsLabelEXT)StubCmdBeginDebugUtilsLabelEXT; }
+    table->CmdEndDebugUtilsLabelEXT = (PFN_vkCmdEndDebugUtilsLabelEXT) gpa(device, "vkCmdEndDebugUtilsLabelEXT");
+    if (table->CmdEndDebugUtilsLabelEXT == nullptr) { table->CmdEndDebugUtilsLabelEXT = (PFN_vkCmdEndDebugUtilsLabelEXT)StubCmdEndDebugUtilsLabelEXT; }
+    table->CmdInsertDebugUtilsLabelEXT = (PFN_vkCmdInsertDebugUtilsLabelEXT) gpa(device, "vkCmdInsertDebugUtilsLabelEXT");
+    if (table->CmdInsertDebugUtilsLabelEXT == nullptr) { table->CmdInsertDebugUtilsLabelEXT = (PFN_vkCmdInsertDebugUtilsLabelEXT)StubCmdInsertDebugUtilsLabelEXT; }
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+    table->GetAndroidHardwareBufferPropertiesANDROID = (PFN_vkGetAndroidHardwareBufferPropertiesANDROID) gpa(device, "vkGetAndroidHardwareBufferPropertiesANDROID");
+    if (table->GetAndroidHardwareBufferPropertiesANDROID == nullptr) { table->GetAndroidHardwareBufferPropertiesANDROID = (PFN_vkGetAndroidHardwareBufferPropertiesANDROID)StubGetAndroidHardwareBufferPropertiesANDROID; }
+#endif // VK_USE_PLATFORM_ANDROID_KHR
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+    table->GetMemoryAndroidHardwareBufferANDROID = (PFN_vkGetMemoryAndroidHardwareBufferANDROID) gpa(device, "vkGetMemoryAndroidHardwareBufferANDROID");
+    if (table->GetMemoryAndroidHardwareBufferANDROID == nullptr) { table->GetMemoryAndroidHardwareBufferANDROID = (PFN_vkGetMemoryAndroidHardwareBufferANDROID)StubGetMemoryAndroidHardwareBufferANDROID; }
+#endif // VK_USE_PLATFORM_ANDROID_KHR
+    table->CmdSetSampleLocationsEXT = (PFN_vkCmdSetSampleLocationsEXT) gpa(device, "vkCmdSetSampleLocationsEXT");
+    if (table->CmdSetSampleLocationsEXT == nullptr) { table->CmdSetSampleLocationsEXT = (PFN_vkCmdSetSampleLocationsEXT)StubCmdSetSampleLocationsEXT; }
+    table->GetImageDrmFormatModifierPropertiesEXT = (PFN_vkGetImageDrmFormatModifierPropertiesEXT) gpa(device, "vkGetImageDrmFormatModifierPropertiesEXT");
+    if (table->GetImageDrmFormatModifierPropertiesEXT == nullptr) { table->GetImageDrmFormatModifierPropertiesEXT = (PFN_vkGetImageDrmFormatModifierPropertiesEXT)StubGetImageDrmFormatModifierPropertiesEXT; }
+    table->CreateValidationCacheEXT = (PFN_vkCreateValidationCacheEXT) gpa(device, "vkCreateValidationCacheEXT");
+    if (table->CreateValidationCacheEXT == nullptr) { table->CreateValidationCacheEXT = (PFN_vkCreateValidationCacheEXT)StubCreateValidationCacheEXT; }
+    table->DestroyValidationCacheEXT = (PFN_vkDestroyValidationCacheEXT) gpa(device, "vkDestroyValidationCacheEXT");
+    if (table->DestroyValidationCacheEXT == nullptr) { table->DestroyValidationCacheEXT = (PFN_vkDestroyValidationCacheEXT)StubDestroyValidationCacheEXT; }
+    table->MergeValidationCachesEXT = (PFN_vkMergeValidationCachesEXT) gpa(device, "vkMergeValidationCachesEXT");
+    if (table->MergeValidationCachesEXT == nullptr) { table->MergeValidationCachesEXT = (PFN_vkMergeValidationCachesEXT)StubMergeValidationCachesEXT; }
+    table->GetValidationCacheDataEXT = (PFN_vkGetValidationCacheDataEXT) gpa(device, "vkGetValidationCacheDataEXT");
+    if (table->GetValidationCacheDataEXT == nullptr) { table->GetValidationCacheDataEXT = (PFN_vkGetValidationCacheDataEXT)StubGetValidationCacheDataEXT; }
+    table->CmdBindShadingRateImageNV = (PFN_vkCmdBindShadingRateImageNV) gpa(device, "vkCmdBindShadingRateImageNV");
+    if (table->CmdBindShadingRateImageNV == nullptr) { table->CmdBindShadingRateImageNV = (PFN_vkCmdBindShadingRateImageNV)StubCmdBindShadingRateImageNV; }
+    table->CmdSetViewportShadingRatePaletteNV = (PFN_vkCmdSetViewportShadingRatePaletteNV) gpa(device, "vkCmdSetViewportShadingRatePaletteNV");
+    if (table->CmdSetViewportShadingRatePaletteNV == nullptr) { table->CmdSetViewportShadingRatePaletteNV = (PFN_vkCmdSetViewportShadingRatePaletteNV)StubCmdSetViewportShadingRatePaletteNV; }
+    table->CmdSetCoarseSampleOrderNV = (PFN_vkCmdSetCoarseSampleOrderNV) gpa(device, "vkCmdSetCoarseSampleOrderNV");
+    if (table->CmdSetCoarseSampleOrderNV == nullptr) { table->CmdSetCoarseSampleOrderNV = (PFN_vkCmdSetCoarseSampleOrderNV)StubCmdSetCoarseSampleOrderNV; }
+    table->CreateAccelerationStructureNV = (PFN_vkCreateAccelerationStructureNV) gpa(device, "vkCreateAccelerationStructureNV");
+    if (table->CreateAccelerationStructureNV == nullptr) { table->CreateAccelerationStructureNV = (PFN_vkCreateAccelerationStructureNV)StubCreateAccelerationStructureNV; }
+    table->DestroyAccelerationStructureNV = (PFN_vkDestroyAccelerationStructureNV) gpa(device, "vkDestroyAccelerationStructureNV");
+    if (table->DestroyAccelerationStructureNV == nullptr) { table->DestroyAccelerationStructureNV = (PFN_vkDestroyAccelerationStructureNV)StubDestroyAccelerationStructureNV; }
+    table->GetAccelerationStructureMemoryRequirementsNV = (PFN_vkGetAccelerationStructureMemoryRequirementsNV) gpa(device, "vkGetAccelerationStructureMemoryRequirementsNV");
+    if (table->GetAccelerationStructureMemoryRequirementsNV == nullptr) { table->GetAccelerationStructureMemoryRequirementsNV = (PFN_vkGetAccelerationStructureMemoryRequirementsNV)StubGetAccelerationStructureMemoryRequirementsNV; }
+    table->BindAccelerationStructureMemoryNV = (PFN_vkBindAccelerationStructureMemoryNV) gpa(device, "vkBindAccelerationStructureMemoryNV");
+    if (table->BindAccelerationStructureMemoryNV == nullptr) { table->BindAccelerationStructureMemoryNV = (PFN_vkBindAccelerationStructureMemoryNV)StubBindAccelerationStructureMemoryNV; }
+    table->CmdBuildAccelerationStructureNV = (PFN_vkCmdBuildAccelerationStructureNV) gpa(device, "vkCmdBuildAccelerationStructureNV");
+    if (table->CmdBuildAccelerationStructureNV == nullptr) { table->CmdBuildAccelerationStructureNV = (PFN_vkCmdBuildAccelerationStructureNV)StubCmdBuildAccelerationStructureNV; }
+    table->CmdCopyAccelerationStructureNV = (PFN_vkCmdCopyAccelerationStructureNV) gpa(device, "vkCmdCopyAccelerationStructureNV");
+    if (table->CmdCopyAccelerationStructureNV == nullptr) { table->CmdCopyAccelerationStructureNV = (PFN_vkCmdCopyAccelerationStructureNV)StubCmdCopyAccelerationStructureNV; }
+    table->CmdTraceRaysNV = (PFN_vkCmdTraceRaysNV) gpa(device, "vkCmdTraceRaysNV");
+    if (table->CmdTraceRaysNV == nullptr) { table->CmdTraceRaysNV = (PFN_vkCmdTraceRaysNV)StubCmdTraceRaysNV; }
+    table->CreateRayTracingPipelinesNV = (PFN_vkCreateRayTracingPipelinesNV) gpa(device, "vkCreateRayTracingPipelinesNV");
+    if (table->CreateRayTracingPipelinesNV == nullptr) { table->CreateRayTracingPipelinesNV = (PFN_vkCreateRayTracingPipelinesNV)StubCreateRayTracingPipelinesNV; }
+    table->GetRayTracingShaderGroupHandlesNV = (PFN_vkGetRayTracingShaderGroupHandlesNV) gpa(device, "vkGetRayTracingShaderGroupHandlesNV");
+    if (table->GetRayTracingShaderGroupHandlesNV == nullptr) { table->GetRayTracingShaderGroupHandlesNV = (PFN_vkGetRayTracingShaderGroupHandlesNV)StubGetRayTracingShaderGroupHandlesNV; }
+    table->GetAccelerationStructureHandleNV = (PFN_vkGetAccelerationStructureHandleNV) gpa(device, "vkGetAccelerationStructureHandleNV");
+    if (table->GetAccelerationStructureHandleNV == nullptr) { table->GetAccelerationStructureHandleNV = (PFN_vkGetAccelerationStructureHandleNV)StubGetAccelerationStructureHandleNV; }
+    table->CmdWriteAccelerationStructuresPropertiesNV = (PFN_vkCmdWriteAccelerationStructuresPropertiesNV) gpa(device, "vkCmdWriteAccelerationStructuresPropertiesNV");
+    if (table->CmdWriteAccelerationStructuresPropertiesNV == nullptr) { table->CmdWriteAccelerationStructuresPropertiesNV = (PFN_vkCmdWriteAccelerationStructuresPropertiesNV)StubCmdWriteAccelerationStructuresPropertiesNV; }
+    table->CompileDeferredNV = (PFN_vkCompileDeferredNV) gpa(device, "vkCompileDeferredNV");
+    if (table->CompileDeferredNV == nullptr) { table->CompileDeferredNV = (PFN_vkCompileDeferredNV)StubCompileDeferredNV; }
+    table->GetMemoryHostPointerPropertiesEXT = (PFN_vkGetMemoryHostPointerPropertiesEXT) gpa(device, "vkGetMemoryHostPointerPropertiesEXT");
+    if (table->GetMemoryHostPointerPropertiesEXT == nullptr) { table->GetMemoryHostPointerPropertiesEXT = (PFN_vkGetMemoryHostPointerPropertiesEXT)StubGetMemoryHostPointerPropertiesEXT; }
+    table->CmdWriteBufferMarkerAMD = (PFN_vkCmdWriteBufferMarkerAMD) gpa(device, "vkCmdWriteBufferMarkerAMD");
+    if (table->CmdWriteBufferMarkerAMD == nullptr) { table->CmdWriteBufferMarkerAMD = (PFN_vkCmdWriteBufferMarkerAMD)StubCmdWriteBufferMarkerAMD; }
+    table->GetCalibratedTimestampsEXT = (PFN_vkGetCalibratedTimestampsEXT) gpa(device, "vkGetCalibratedTimestampsEXT");
+    if (table->GetCalibratedTimestampsEXT == nullptr) { table->GetCalibratedTimestampsEXT = (PFN_vkGetCalibratedTimestampsEXT)StubGetCalibratedTimestampsEXT; }
+    table->CmdDrawMeshTasksNV = (PFN_vkCmdDrawMeshTasksNV) gpa(device, "vkCmdDrawMeshTasksNV");
+    if (table->CmdDrawMeshTasksNV == nullptr) { table->CmdDrawMeshTasksNV = (PFN_vkCmdDrawMeshTasksNV)StubCmdDrawMeshTasksNV; }
+    table->CmdDrawMeshTasksIndirectNV = (PFN_vkCmdDrawMeshTasksIndirectNV) gpa(device, "vkCmdDrawMeshTasksIndirectNV");
+    if (table->CmdDrawMeshTasksIndirectNV == nullptr) { table->CmdDrawMeshTasksIndirectNV = (PFN_vkCmdDrawMeshTasksIndirectNV)StubCmdDrawMeshTasksIndirectNV; }
+    table->CmdDrawMeshTasksIndirectCountNV = (PFN_vkCmdDrawMeshTasksIndirectCountNV) gpa(device, "vkCmdDrawMeshTasksIndirectCountNV");
+    if (table->CmdDrawMeshTasksIndirectCountNV == nullptr) { table->CmdDrawMeshTasksIndirectCountNV = (PFN_vkCmdDrawMeshTasksIndirectCountNV)StubCmdDrawMeshTasksIndirectCountNV; }
+    table->CmdSetExclusiveScissorNV = (PFN_vkCmdSetExclusiveScissorNV) gpa(device, "vkCmdSetExclusiveScissorNV");
+    if (table->CmdSetExclusiveScissorNV == nullptr) { table->CmdSetExclusiveScissorNV = (PFN_vkCmdSetExclusiveScissorNV)StubCmdSetExclusiveScissorNV; }
+    table->CmdSetCheckpointNV = (PFN_vkCmdSetCheckpointNV) gpa(device, "vkCmdSetCheckpointNV");
+    if (table->CmdSetCheckpointNV == nullptr) { table->CmdSetCheckpointNV = (PFN_vkCmdSetCheckpointNV)StubCmdSetCheckpointNV; }
+    table->GetQueueCheckpointDataNV = (PFN_vkGetQueueCheckpointDataNV) gpa(device, "vkGetQueueCheckpointDataNV");
+    if (table->GetQueueCheckpointDataNV == nullptr) { table->GetQueueCheckpointDataNV = (PFN_vkGetQueueCheckpointDataNV)StubGetQueueCheckpointDataNV; }
+    table->InitializePerformanceApiINTEL = (PFN_vkInitializePerformanceApiINTEL) gpa(device, "vkInitializePerformanceApiINTEL");
+    if (table->InitializePerformanceApiINTEL == nullptr) { table->InitializePerformanceApiINTEL = (PFN_vkInitializePerformanceApiINTEL)StubInitializePerformanceApiINTEL; }
+    table->UninitializePerformanceApiINTEL = (PFN_vkUninitializePerformanceApiINTEL) gpa(device, "vkUninitializePerformanceApiINTEL");
+    if (table->UninitializePerformanceApiINTEL == nullptr) { table->UninitializePerformanceApiINTEL = (PFN_vkUninitializePerformanceApiINTEL)StubUninitializePerformanceApiINTEL; }
+    table->CmdSetPerformanceMarkerINTEL = (PFN_vkCmdSetPerformanceMarkerINTEL) gpa(device, "vkCmdSetPerformanceMarkerINTEL");
+    if (table->CmdSetPerformanceMarkerINTEL == nullptr) { table->CmdSetPerformanceMarkerINTEL = (PFN_vkCmdSetPerformanceMarkerINTEL)StubCmdSetPerformanceMarkerINTEL; }
+    table->CmdSetPerformanceStreamMarkerINTEL = (PFN_vkCmdSetPerformanceStreamMarkerINTEL) gpa(device, "vkCmdSetPerformanceStreamMarkerINTEL");
+    if (table->CmdSetPerformanceStreamMarkerINTEL == nullptr) { table->CmdSetPerformanceStreamMarkerINTEL = (PFN_vkCmdSetPerformanceStreamMarkerINTEL)StubCmdSetPerformanceStreamMarkerINTEL; }
+    table->CmdSetPerformanceOverrideINTEL = (PFN_vkCmdSetPerformanceOverrideINTEL) gpa(device, "vkCmdSetPerformanceOverrideINTEL");
+    if (table->CmdSetPerformanceOverrideINTEL == nullptr) { table->CmdSetPerformanceOverrideINTEL = (PFN_vkCmdSetPerformanceOverrideINTEL)StubCmdSetPerformanceOverrideINTEL; }
+    table->AcquirePerformanceConfigurationINTEL = (PFN_vkAcquirePerformanceConfigurationINTEL) gpa(device, "vkAcquirePerformanceConfigurationINTEL");
+    if (table->AcquirePerformanceConfigurationINTEL == nullptr) { table->AcquirePerformanceConfigurationINTEL = (PFN_vkAcquirePerformanceConfigurationINTEL)StubAcquirePerformanceConfigurationINTEL; }
+    table->ReleasePerformanceConfigurationINTEL = (PFN_vkReleasePerformanceConfigurationINTEL) gpa(device, "vkReleasePerformanceConfigurationINTEL");
+    if (table->ReleasePerformanceConfigurationINTEL == nullptr) { table->ReleasePerformanceConfigurationINTEL = (PFN_vkReleasePerformanceConfigurationINTEL)StubReleasePerformanceConfigurationINTEL; }
+    table->QueueSetPerformanceConfigurationINTEL = (PFN_vkQueueSetPerformanceConfigurationINTEL) gpa(device, "vkQueueSetPerformanceConfigurationINTEL");
+    if (table->QueueSetPerformanceConfigurationINTEL == nullptr) { table->QueueSetPerformanceConfigurationINTEL = (PFN_vkQueueSetPerformanceConfigurationINTEL)StubQueueSetPerformanceConfigurationINTEL; }
+    table->GetPerformanceParameterINTEL = (PFN_vkGetPerformanceParameterINTEL) gpa(device, "vkGetPerformanceParameterINTEL");
+    if (table->GetPerformanceParameterINTEL == nullptr) { table->GetPerformanceParameterINTEL = (PFN_vkGetPerformanceParameterINTEL)StubGetPerformanceParameterINTEL; }
+    table->SetLocalDimmingAMD = (PFN_vkSetLocalDimmingAMD) gpa(device, "vkSetLocalDimmingAMD");
+    if (table->SetLocalDimmingAMD == nullptr) { table->SetLocalDimmingAMD = (PFN_vkSetLocalDimmingAMD)StubSetLocalDimmingAMD; }
+    table->GetBufferDeviceAddressEXT = (PFN_vkGetBufferDeviceAddressEXT) gpa(device, "vkGetBufferDeviceAddressEXT");
+    if (table->GetBufferDeviceAddressEXT == nullptr) { table->GetBufferDeviceAddressEXT = (PFN_vkGetBufferDeviceAddressEXT)StubGetBufferDeviceAddressEXT; }
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    table->AcquireFullScreenExclusiveModeEXT = (PFN_vkAcquireFullScreenExclusiveModeEXT) gpa(device, "vkAcquireFullScreenExclusiveModeEXT");
+    if (table->AcquireFullScreenExclusiveModeEXT == nullptr) { table->AcquireFullScreenExclusiveModeEXT = (PFN_vkAcquireFullScreenExclusiveModeEXT)StubAcquireFullScreenExclusiveModeEXT; }
+#endif // VK_USE_PLATFORM_WIN32_KHR
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    table->ReleaseFullScreenExclusiveModeEXT = (PFN_vkReleaseFullScreenExclusiveModeEXT) gpa(device, "vkReleaseFullScreenExclusiveModeEXT");
+    if (table->ReleaseFullScreenExclusiveModeEXT == nullptr) { table->ReleaseFullScreenExclusiveModeEXT = (PFN_vkReleaseFullScreenExclusiveModeEXT)StubReleaseFullScreenExclusiveModeEXT; }
+#endif // VK_USE_PLATFORM_WIN32_KHR
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    table->GetDeviceGroupSurfacePresentModes2EXT = (PFN_vkGetDeviceGroupSurfacePresentModes2EXT) gpa(device, "vkGetDeviceGroupSurfacePresentModes2EXT");
+    if (table->GetDeviceGroupSurfacePresentModes2EXT == nullptr) { table->GetDeviceGroupSurfacePresentModes2EXT = (PFN_vkGetDeviceGroupSurfacePresentModes2EXT)StubGetDeviceGroupSurfacePresentModes2EXT; }
+#endif // VK_USE_PLATFORM_WIN32_KHR
+    table->CmdSetLineStippleEXT = (PFN_vkCmdSetLineStippleEXT) gpa(device, "vkCmdSetLineStippleEXT");
+    if (table->CmdSetLineStippleEXT == nullptr) { table->CmdSetLineStippleEXT = (PFN_vkCmdSetLineStippleEXT)StubCmdSetLineStippleEXT; }
+    table->ResetQueryPoolEXT = (PFN_vkResetQueryPoolEXT) gpa(device, "vkResetQueryPoolEXT");
+    if (table->ResetQueryPoolEXT == nullptr) { table->ResetQueryPoolEXT = (PFN_vkResetQueryPoolEXT)StubResetQueryPoolEXT; }
+}
+
+
+static inline void layer_init_instance_dispatch_table(VkInstance instance, VkLayerInstanceDispatchTable *table, PFN_vkGetInstanceProcAddr gpa) {
+    memset(table, 0, sizeof(*table));
+    // Instance function pointers
+    table->DestroyInstance = (PFN_vkDestroyInstance) gpa(instance, "vkDestroyInstance");
+    table->EnumeratePhysicalDevices = (PFN_vkEnumeratePhysicalDevices) gpa(instance, "vkEnumeratePhysicalDevices");
+    table->GetPhysicalDeviceFeatures = (PFN_vkGetPhysicalDeviceFeatures) gpa(instance, "vkGetPhysicalDeviceFeatures");
+    table->GetPhysicalDeviceFormatProperties = (PFN_vkGetPhysicalDeviceFormatProperties) gpa(instance, "vkGetPhysicalDeviceFormatProperties");
+    table->GetPhysicalDeviceImageFormatProperties = (PFN_vkGetPhysicalDeviceImageFormatProperties) gpa(instance, "vkGetPhysicalDeviceImageFormatProperties");
+    table->GetPhysicalDeviceProperties = (PFN_vkGetPhysicalDeviceProperties) gpa(instance, "vkGetPhysicalDeviceProperties");
+    table->GetPhysicalDeviceQueueFamilyProperties = (PFN_vkGetPhysicalDeviceQueueFamilyProperties) gpa(instance, "vkGetPhysicalDeviceQueueFamilyProperties");
+    table->GetPhysicalDeviceMemoryProperties = (PFN_vkGetPhysicalDeviceMemoryProperties) gpa(instance, "vkGetPhysicalDeviceMemoryProperties");
+    table->GetInstanceProcAddr = gpa;
+    table->EnumerateDeviceExtensionProperties = (PFN_vkEnumerateDeviceExtensionProperties) gpa(instance, "vkEnumerateDeviceExtensionProperties");
+    table->EnumerateDeviceLayerProperties = (PFN_vkEnumerateDeviceLayerProperties) gpa(instance, "vkEnumerateDeviceLayerProperties");
+    table->GetPhysicalDeviceSparseImageFormatProperties = (PFN_vkGetPhysicalDeviceSparseImageFormatProperties) gpa(instance, "vkGetPhysicalDeviceSparseImageFormatProperties");
+    table->EnumeratePhysicalDeviceGroups = (PFN_vkEnumeratePhysicalDeviceGroups) gpa(instance, "vkEnumeratePhysicalDeviceGroups");
+    table->GetPhysicalDeviceFeatures2 = (PFN_vkGetPhysicalDeviceFeatures2) gpa(instance, "vkGetPhysicalDeviceFeatures2");
+    table->GetPhysicalDeviceProperties2 = (PFN_vkGetPhysicalDeviceProperties2) gpa(instance, "vkGetPhysicalDeviceProperties2");
+    table->GetPhysicalDeviceFormatProperties2 = (PFN_vkGetPhysicalDeviceFormatProperties2) gpa(instance, "vkGetPhysicalDeviceFormatProperties2");
+    table->GetPhysicalDeviceImageFormatProperties2 = (PFN_vkGetPhysicalDeviceImageFormatProperties2) gpa(instance, "vkGetPhysicalDeviceImageFormatProperties2");
+    table->GetPhysicalDeviceQueueFamilyProperties2 = (PFN_vkGetPhysicalDeviceQueueFamilyProperties2) gpa(instance, "vkGetPhysicalDeviceQueueFamilyProperties2");
+    table->GetPhysicalDeviceMemoryProperties2 = (PFN_vkGetPhysicalDeviceMemoryProperties2) gpa(instance, "vkGetPhysicalDeviceMemoryProperties2");
+    table->GetPhysicalDeviceSparseImageFormatProperties2 = (PFN_vkGetPhysicalDeviceSparseImageFormatProperties2) gpa(instance, "vkGetPhysicalDeviceSparseImageFormatProperties2");
+    table->GetPhysicalDeviceExternalBufferProperties = (PFN_vkGetPhysicalDeviceExternalBufferProperties) gpa(instance, "vkGetPhysicalDeviceExternalBufferProperties");
+    table->GetPhysicalDeviceExternalFenceProperties = (PFN_vkGetPhysicalDeviceExternalFenceProperties) gpa(instance, "vkGetPhysicalDeviceExternalFenceProperties");
+    table->GetPhysicalDeviceExternalSemaphoreProperties = (PFN_vkGetPhysicalDeviceExternalSemaphoreProperties) gpa(instance, "vkGetPhysicalDeviceExternalSemaphoreProperties");
+    table->DestroySurfaceKHR = (PFN_vkDestroySurfaceKHR) gpa(instance, "vkDestroySurfaceKHR");
+    table->GetPhysicalDeviceSurfaceSupportKHR = (PFN_vkGetPhysicalDeviceSurfaceSupportKHR) gpa(instance, "vkGetPhysicalDeviceSurfaceSupportKHR");
+    table->GetPhysicalDeviceSurfaceCapabilitiesKHR = (PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR) gpa(instance, "vkGetPhysicalDeviceSurfaceCapabilitiesKHR");
+    table->GetPhysicalDeviceSurfaceFormatsKHR = (PFN_vkGetPhysicalDeviceSurfaceFormatsKHR) gpa(instance, "vkGetPhysicalDeviceSurfaceFormatsKHR");
+    table->GetPhysicalDeviceSurfacePresentModesKHR = (PFN_vkGetPhysicalDeviceSurfacePresentModesKHR) gpa(instance, "vkGetPhysicalDeviceSurfacePresentModesKHR");
+    table->GetPhysicalDevicePresentRectanglesKHR = (PFN_vkGetPhysicalDevicePresentRectanglesKHR) gpa(instance, "vkGetPhysicalDevicePresentRectanglesKHR");
+    table->GetPhysicalDeviceDisplayPropertiesKHR = (PFN_vkGetPhysicalDeviceDisplayPropertiesKHR) gpa(instance, "vkGetPhysicalDeviceDisplayPropertiesKHR");
+    table->GetPhysicalDeviceDisplayPlanePropertiesKHR = (PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR) gpa(instance, "vkGetPhysicalDeviceDisplayPlanePropertiesKHR");
+    table->GetDisplayPlaneSupportedDisplaysKHR = (PFN_vkGetDisplayPlaneSupportedDisplaysKHR) gpa(instance, "vkGetDisplayPlaneSupportedDisplaysKHR");
+    table->GetDisplayModePropertiesKHR = (PFN_vkGetDisplayModePropertiesKHR) gpa(instance, "vkGetDisplayModePropertiesKHR");
+    table->CreateDisplayModeKHR = (PFN_vkCreateDisplayModeKHR) gpa(instance, "vkCreateDisplayModeKHR");
+    table->GetDisplayPlaneCapabilitiesKHR = (PFN_vkGetDisplayPlaneCapabilitiesKHR) gpa(instance, "vkGetDisplayPlaneCapabilitiesKHR");
+    table->CreateDisplayPlaneSurfaceKHR = (PFN_vkCreateDisplayPlaneSurfaceKHR) gpa(instance, "vkCreateDisplayPlaneSurfaceKHR");
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+    table->CreateXlibSurfaceKHR = (PFN_vkCreateXlibSurfaceKHR) gpa(instance, "vkCreateXlibSurfaceKHR");
+#endif // VK_USE_PLATFORM_XLIB_KHR
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+    table->GetPhysicalDeviceXlibPresentationSupportKHR = (PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR) gpa(instance, "vkGetPhysicalDeviceXlibPresentationSupportKHR");
+#endif // VK_USE_PLATFORM_XLIB_KHR
+#ifdef VK_USE_PLATFORM_XCB_KHR
+    table->CreateXcbSurfaceKHR = (PFN_vkCreateXcbSurfaceKHR) gpa(instance, "vkCreateXcbSurfaceKHR");
+#endif // VK_USE_PLATFORM_XCB_KHR
+#ifdef VK_USE_PLATFORM_XCB_KHR
+    table->GetPhysicalDeviceXcbPresentationSupportKHR = (PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR) gpa(instance, "vkGetPhysicalDeviceXcbPresentationSupportKHR");
+#endif // VK_USE_PLATFORM_XCB_KHR
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+    table->CreateWaylandSurfaceKHR = (PFN_vkCreateWaylandSurfaceKHR) gpa(instance, "vkCreateWaylandSurfaceKHR");
+#endif // VK_USE_PLATFORM_WAYLAND_KHR
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+    table->GetPhysicalDeviceWaylandPresentationSupportKHR = (PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR) gpa(instance, "vkGetPhysicalDeviceWaylandPresentationSupportKHR");
+#endif // VK_USE_PLATFORM_WAYLAND_KHR
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+    table->CreateAndroidSurfaceKHR = (PFN_vkCreateAndroidSurfaceKHR) gpa(instance, "vkCreateAndroidSurfaceKHR");
+#endif // VK_USE_PLATFORM_ANDROID_KHR
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    table->CreateWin32SurfaceKHR = (PFN_vkCreateWin32SurfaceKHR) gpa(instance, "vkCreateWin32SurfaceKHR");
+#endif // VK_USE_PLATFORM_WIN32_KHR
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    table->GetPhysicalDeviceWin32PresentationSupportKHR = (PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR) gpa(instance, "vkGetPhysicalDeviceWin32PresentationSupportKHR");
+#endif // VK_USE_PLATFORM_WIN32_KHR
+    table->GetPhysicalDeviceFeatures2KHR = (PFN_vkGetPhysicalDeviceFeatures2KHR) gpa(instance, "vkGetPhysicalDeviceFeatures2KHR");
+    table->GetPhysicalDeviceProperties2KHR = (PFN_vkGetPhysicalDeviceProperties2KHR) gpa(instance, "vkGetPhysicalDeviceProperties2KHR");
+    table->GetPhysicalDeviceFormatProperties2KHR = (PFN_vkGetPhysicalDeviceFormatProperties2KHR) gpa(instance, "vkGetPhysicalDeviceFormatProperties2KHR");
+    table->GetPhysicalDeviceImageFormatProperties2KHR = (PFN_vkGetPhysicalDeviceImageFormatProperties2KHR) gpa(instance, "vkGetPhysicalDeviceImageFormatProperties2KHR");
+    table->GetPhysicalDeviceQueueFamilyProperties2KHR = (PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR) gpa(instance, "vkGetPhysicalDeviceQueueFamilyProperties2KHR");
+    table->GetPhysicalDeviceMemoryProperties2KHR = (PFN_vkGetPhysicalDeviceMemoryProperties2KHR) gpa(instance, "vkGetPhysicalDeviceMemoryProperties2KHR");
+    table->GetPhysicalDeviceSparseImageFormatProperties2KHR = (PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR) gpa(instance, "vkGetPhysicalDeviceSparseImageFormatProperties2KHR");
+    table->EnumeratePhysicalDeviceGroupsKHR = (PFN_vkEnumeratePhysicalDeviceGroupsKHR) gpa(instance, "vkEnumeratePhysicalDeviceGroupsKHR");
+    table->GetPhysicalDeviceExternalBufferPropertiesKHR = (PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR) gpa(instance, "vkGetPhysicalDeviceExternalBufferPropertiesKHR");
+    table->GetPhysicalDeviceExternalSemaphorePropertiesKHR = (PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR) gpa(instance, "vkGetPhysicalDeviceExternalSemaphorePropertiesKHR");
+    table->GetPhysicalDeviceExternalFencePropertiesKHR = (PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR) gpa(instance, "vkGetPhysicalDeviceExternalFencePropertiesKHR");
+    table->EnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR = (PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR) gpa(instance, "vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR");
+    table->GetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR = (PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR) gpa(instance, "vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR");
+    table->GetPhysicalDeviceSurfaceCapabilities2KHR = (PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR) gpa(instance, "vkGetPhysicalDeviceSurfaceCapabilities2KHR");
+    table->GetPhysicalDeviceSurfaceFormats2KHR = (PFN_vkGetPhysicalDeviceSurfaceFormats2KHR) gpa(instance, "vkGetPhysicalDeviceSurfaceFormats2KHR");
+    table->GetPhysicalDeviceDisplayProperties2KHR = (PFN_vkGetPhysicalDeviceDisplayProperties2KHR) gpa(instance, "vkGetPhysicalDeviceDisplayProperties2KHR");
+    table->GetPhysicalDeviceDisplayPlaneProperties2KHR = (PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR) gpa(instance, "vkGetPhysicalDeviceDisplayPlaneProperties2KHR");
+    table->GetDisplayModeProperties2KHR = (PFN_vkGetDisplayModeProperties2KHR) gpa(instance, "vkGetDisplayModeProperties2KHR");
+    table->GetDisplayPlaneCapabilities2KHR = (PFN_vkGetDisplayPlaneCapabilities2KHR) gpa(instance, "vkGetDisplayPlaneCapabilities2KHR");
+    table->CreateDebugReportCallbackEXT = (PFN_vkCreateDebugReportCallbackEXT) gpa(instance, "vkCreateDebugReportCallbackEXT");
+    table->DestroyDebugReportCallbackEXT = (PFN_vkDestroyDebugReportCallbackEXT) gpa(instance, "vkDestroyDebugReportCallbackEXT");
+    table->DebugReportMessageEXT = (PFN_vkDebugReportMessageEXT) gpa(instance, "vkDebugReportMessageEXT");
+#ifdef VK_USE_PLATFORM_GGP
+    table->CreateStreamDescriptorSurfaceGGP = (PFN_vkCreateStreamDescriptorSurfaceGGP) gpa(instance, "vkCreateStreamDescriptorSurfaceGGP");
+#endif // VK_USE_PLATFORM_GGP
+    table->GetPhysicalDeviceExternalImageFormatPropertiesNV = (PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV) gpa(instance, "vkGetPhysicalDeviceExternalImageFormatPropertiesNV");
+#ifdef VK_USE_PLATFORM_VI_NN
+    table->CreateViSurfaceNN = (PFN_vkCreateViSurfaceNN) gpa(instance, "vkCreateViSurfaceNN");
+#endif // VK_USE_PLATFORM_VI_NN
+    table->GetPhysicalDeviceGeneratedCommandsPropertiesNVX = (PFN_vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX) gpa(instance, "vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX");
+    table->ReleaseDisplayEXT = (PFN_vkReleaseDisplayEXT) gpa(instance, "vkReleaseDisplayEXT");
+#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
+    table->AcquireXlibDisplayEXT = (PFN_vkAcquireXlibDisplayEXT) gpa(instance, "vkAcquireXlibDisplayEXT");
+#endif // VK_USE_PLATFORM_XLIB_XRANDR_EXT
+#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
+    table->GetRandROutputDisplayEXT = (PFN_vkGetRandROutputDisplayEXT) gpa(instance, "vkGetRandROutputDisplayEXT");
+#endif // VK_USE_PLATFORM_XLIB_XRANDR_EXT
+    table->GetPhysicalDeviceSurfaceCapabilities2EXT = (PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT) gpa(instance, "vkGetPhysicalDeviceSurfaceCapabilities2EXT");
+#ifdef VK_USE_PLATFORM_IOS_MVK
+    table->CreateIOSSurfaceMVK = (PFN_vkCreateIOSSurfaceMVK) gpa(instance, "vkCreateIOSSurfaceMVK");
+#endif // VK_USE_PLATFORM_IOS_MVK
+#ifdef VK_USE_PLATFORM_MACOS_MVK
+    table->CreateMacOSSurfaceMVK = (PFN_vkCreateMacOSSurfaceMVK) gpa(instance, "vkCreateMacOSSurfaceMVK");
+#endif // VK_USE_PLATFORM_MACOS_MVK
+    table->CreateDebugUtilsMessengerEXT = (PFN_vkCreateDebugUtilsMessengerEXT) gpa(instance, "vkCreateDebugUtilsMessengerEXT");
+    table->DestroyDebugUtilsMessengerEXT = (PFN_vkDestroyDebugUtilsMessengerEXT) gpa(instance, "vkDestroyDebugUtilsMessengerEXT");
+    table->SubmitDebugUtilsMessageEXT = (PFN_vkSubmitDebugUtilsMessageEXT) gpa(instance, "vkSubmitDebugUtilsMessageEXT");
+    table->GetPhysicalDeviceMultisamplePropertiesEXT = (PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT) gpa(instance, "vkGetPhysicalDeviceMultisamplePropertiesEXT");
+    table->GetPhysicalDeviceCalibrateableTimeDomainsEXT = (PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT) gpa(instance, "vkGetPhysicalDeviceCalibrateableTimeDomainsEXT");
+#ifdef VK_USE_PLATFORM_FUCHSIA
+    table->CreateImagePipeSurfaceFUCHSIA = (PFN_vkCreateImagePipeSurfaceFUCHSIA) gpa(instance, "vkCreateImagePipeSurfaceFUCHSIA");
+#endif // VK_USE_PLATFORM_FUCHSIA
+#ifdef VK_USE_PLATFORM_METAL_EXT
+    table->CreateMetalSurfaceEXT = (PFN_vkCreateMetalSurfaceEXT) gpa(instance, "vkCreateMetalSurfaceEXT");
+#endif // VK_USE_PLATFORM_METAL_EXT
+    table->GetPhysicalDeviceToolPropertiesEXT = (PFN_vkGetPhysicalDeviceToolPropertiesEXT) gpa(instance, "vkGetPhysicalDeviceToolPropertiesEXT");
+    table->GetPhysicalDeviceCooperativeMatrixPropertiesNV = (PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV) gpa(instance, "vkGetPhysicalDeviceCooperativeMatrixPropertiesNV");
+    table->GetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV = (PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV) gpa(instance, "vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV");
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    table->GetPhysicalDeviceSurfacePresentModes2EXT = (PFN_vkGetPhysicalDeviceSurfacePresentModes2EXT) gpa(instance, "vkGetPhysicalDeviceSurfacePresentModes2EXT");
+#endif // VK_USE_PLATFORM_WIN32_KHR
+    table->CreateHeadlessSurfaceEXT = (PFN_vkCreateHeadlessSurfaceEXT) gpa(instance, "vkCreateHeadlessSurfaceEXT");
+}
diff --git a/src/third_party/vulkan-validation-layers/src/layers/generated/vk_enum_string_helper.h b/src/third_party/vulkan-validation-layers/src/layers/generated/vk_enum_string_helper.h
new file mode 100644
index 0000000..b54b9b8
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/layers/generated/vk_enum_string_helper.h
@@ -0,0 +1,6233 @@
+// *** THIS FILE IS GENERATED - DO NOT EDIT ***
+// See helper_file_generator.py for modifications
+
+
+/***************************************************************************
+ *
+ * Copyright (c) 2015-2019 The Khronos Group Inc.
+ * Copyright (c) 2015-2019 Valve Corporation
+ * Copyright (c) 2015-2019 LunarG, Inc.
+ * Copyright (c) 2015-2019 Google Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Mark Lobodzinski <mark@lunarg.com>
+ * Author: Courtney Goeltzenleuchter <courtneygo@google.com>
+ * Author: Tobin Ehlis <tobine@google.com>
+ * Author: Chris Forbes <chrisforbes@google.com>
+ * Author: John Zulauf<jzulauf@lunarg.com>
+ *
+ ****************************************************************************/
+
+
+#pragma once
+#ifdef _WIN32
+#pragma warning( disable : 4065 )
+#endif
+
+#include <string>
+#include <vulkan/vulkan.h>
+
+
+static inline const char* string_VkPipelineCacheHeaderVersion(VkPipelineCacheHeaderVersion input_value)
+{
+    switch ((VkPipelineCacheHeaderVersion)input_value)
+    {
+        case VK_PIPELINE_CACHE_HEADER_VERSION_ONE:
+            return "VK_PIPELINE_CACHE_HEADER_VERSION_ONE";
+        default:
+            return "Unhandled VkPipelineCacheHeaderVersion";
+    }
+}
+
+static inline const char* string_VkResult(VkResult input_value)
+{
+    switch ((VkResult)input_value)
+    {
+        case VK_ERROR_DEVICE_LOST:
+            return "VK_ERROR_DEVICE_LOST";
+        case VK_ERROR_EXTENSION_NOT_PRESENT:
+            return "VK_ERROR_EXTENSION_NOT_PRESENT";
+        case VK_ERROR_FEATURE_NOT_PRESENT:
+            return "VK_ERROR_FEATURE_NOT_PRESENT";
+        case VK_ERROR_FORMAT_NOT_SUPPORTED:
+            return "VK_ERROR_FORMAT_NOT_SUPPORTED";
+        case VK_ERROR_FRAGMENTATION_EXT:
+            return "VK_ERROR_FRAGMENTATION_EXT";
+        case VK_ERROR_FRAGMENTED_POOL:
+            return "VK_ERROR_FRAGMENTED_POOL";
+        case VK_ERROR_FULL_SCREEN_EXCLUSIVE_MODE_LOST_EXT:
+            return "VK_ERROR_FULL_SCREEN_EXCLUSIVE_MODE_LOST_EXT";
+        case VK_ERROR_INCOMPATIBLE_DISPLAY_KHR:
+            return "VK_ERROR_INCOMPATIBLE_DISPLAY_KHR";
+        case VK_ERROR_INCOMPATIBLE_DRIVER:
+            return "VK_ERROR_INCOMPATIBLE_DRIVER";
+        case VK_ERROR_INITIALIZATION_FAILED:
+            return "VK_ERROR_INITIALIZATION_FAILED";
+        case VK_ERROR_INVALID_DRM_FORMAT_MODIFIER_PLANE_LAYOUT_EXT:
+            return "VK_ERROR_INVALID_DRM_FORMAT_MODIFIER_PLANE_LAYOUT_EXT";
+        case VK_ERROR_INVALID_EXTERNAL_HANDLE:
+            return "VK_ERROR_INVALID_EXTERNAL_HANDLE";
+        case VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS_KHR:
+            return "VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS_KHR";
+        case VK_ERROR_INVALID_SHADER_NV:
+            return "VK_ERROR_INVALID_SHADER_NV";
+        case VK_ERROR_LAYER_NOT_PRESENT:
+            return "VK_ERROR_LAYER_NOT_PRESENT";
+        case VK_ERROR_MEMORY_MAP_FAILED:
+            return "VK_ERROR_MEMORY_MAP_FAILED";
+        case VK_ERROR_NATIVE_WINDOW_IN_USE_KHR:
+            return "VK_ERROR_NATIVE_WINDOW_IN_USE_KHR";
+        case VK_ERROR_NOT_PERMITTED_EXT:
+            return "VK_ERROR_NOT_PERMITTED_EXT";
+        case VK_ERROR_OUT_OF_DATE_KHR:
+            return "VK_ERROR_OUT_OF_DATE_KHR";
+        case VK_ERROR_OUT_OF_DEVICE_MEMORY:
+            return "VK_ERROR_OUT_OF_DEVICE_MEMORY";
+        case VK_ERROR_OUT_OF_HOST_MEMORY:
+            return "VK_ERROR_OUT_OF_HOST_MEMORY";
+        case VK_ERROR_OUT_OF_POOL_MEMORY:
+            return "VK_ERROR_OUT_OF_POOL_MEMORY";
+        case VK_ERROR_SURFACE_LOST_KHR:
+            return "VK_ERROR_SURFACE_LOST_KHR";
+        case VK_ERROR_TOO_MANY_OBJECTS:
+            return "VK_ERROR_TOO_MANY_OBJECTS";
+        case VK_ERROR_VALIDATION_FAILED_EXT:
+            return "VK_ERROR_VALIDATION_FAILED_EXT";
+        case VK_EVENT_RESET:
+            return "VK_EVENT_RESET";
+        case VK_EVENT_SET:
+            return "VK_EVENT_SET";
+        case VK_INCOMPLETE:
+            return "VK_INCOMPLETE";
+        case VK_NOT_READY:
+            return "VK_NOT_READY";
+        case VK_SUBOPTIMAL_KHR:
+            return "VK_SUBOPTIMAL_KHR";
+        case VK_SUCCESS:
+            return "VK_SUCCESS";
+        case VK_TIMEOUT:
+            return "VK_TIMEOUT";
+        default:
+            return "Unhandled VkResult";
+    }
+}
+
+static inline const char* string_VkStructureType(VkStructureType input_value)
+{
+    switch ((VkStructureType)input_value)
+    {
+        case VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_NV:
+            return "VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_NV";
+        case VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_INFO_NV:
+            return "VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_INFO_NV";
+        case VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV:
+            return "VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV";
+        case VK_STRUCTURE_TYPE_ACQUIRE_NEXT_IMAGE_INFO_KHR:
+            return "VK_STRUCTURE_TYPE_ACQUIRE_NEXT_IMAGE_INFO_KHR";
+        case VK_STRUCTURE_TYPE_ACQUIRE_PROFILING_LOCK_INFO_KHR:
+            return "VK_STRUCTURE_TYPE_ACQUIRE_PROFILING_LOCK_INFO_KHR";
+        case VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID:
+            return "VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID";
+        case VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID:
+            return "VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID";
+        case VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_USAGE_ANDROID:
+            return "VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_USAGE_ANDROID";
+        case VK_STRUCTURE_TYPE_ANDROID_SURFACE_CREATE_INFO_KHR:
+            return "VK_STRUCTURE_TYPE_ANDROID_SURFACE_CREATE_INFO_KHR";
+        case VK_STRUCTURE_TYPE_APPLICATION_INFO:
+            return "VK_STRUCTURE_TYPE_APPLICATION_INFO";
+        case VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_2_KHR:
+            return "VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_2_KHR";
+        case VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT_KHR:
+            return "VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT_KHR";
+        case VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2_KHR:
+            return "VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2_KHR";
+        case VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_STENCIL_LAYOUT_KHR:
+            return "VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_STENCIL_LAYOUT_KHR";
+        case VK_STRUCTURE_TYPE_BIND_ACCELERATION_STRUCTURE_MEMORY_INFO_NV:
+            return "VK_STRUCTURE_TYPE_BIND_ACCELERATION_STRUCTURE_MEMORY_INFO_NV";
+        case VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO:
+            return "VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO";
+        case VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO:
+            return "VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO";
+        case VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO:
+            return "VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO";
+        case VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO:
+            return "VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO";
+        case VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_SWAPCHAIN_INFO_KHR:
+            return "VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_SWAPCHAIN_INFO_KHR";
+        case VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO:
+            return "VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO";
+        case VK_STRUCTURE_TYPE_BIND_SPARSE_INFO:
+            return "VK_STRUCTURE_TYPE_BIND_SPARSE_INFO";
+        case VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO:
+            return "VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO";
+        case VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_CREATE_INFO_EXT:
+            return "VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_CREATE_INFO_EXT";
+        case VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO_KHR:
+            return "VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO_KHR";
+        case VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER:
+            return "VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER";
+        case VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2:
+            return "VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2";
+        case VK_STRUCTURE_TYPE_BUFFER_OPAQUE_CAPTURE_ADDRESS_CREATE_INFO_KHR:
+            return "VK_STRUCTURE_TYPE_BUFFER_OPAQUE_CAPTURE_ADDRESS_CREATE_INFO_KHR";
+        case VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO:
+            return "VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO";
+        case VK_STRUCTURE_TYPE_CALIBRATED_TIMESTAMP_INFO_EXT:
+            return "VK_STRUCTURE_TYPE_CALIBRATED_TIMESTAMP_INFO_EXT";
+        case VK_STRUCTURE_TYPE_CHECKPOINT_DATA_NV:
+            return "VK_STRUCTURE_TYPE_CHECKPOINT_DATA_NV";
+        case VK_STRUCTURE_TYPE_CMD_PROCESS_COMMANDS_INFO_NVX:
+            return "VK_STRUCTURE_TYPE_CMD_PROCESS_COMMANDS_INFO_NVX";
+        case VK_STRUCTURE_TYPE_CMD_RESERVE_SPACE_FOR_COMMANDS_INFO_NVX:
+            return "VK_STRUCTURE_TYPE_CMD_RESERVE_SPACE_FOR_COMMANDS_INFO_NVX";
+        case VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO:
+            return "VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO";
+        case VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO:
+            return "VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO";
+        case VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_CONDITIONAL_RENDERING_INFO_EXT:
+            return "VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_CONDITIONAL_RENDERING_INFO_EXT";
+        case VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO:
+            return "VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO";
+        case VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO:
+            return "VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO";
+        case VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO:
+            return "VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO";
+        case VK_STRUCTURE_TYPE_CONDITIONAL_RENDERING_BEGIN_INFO_EXT:
+            return "VK_STRUCTURE_TYPE_CONDITIONAL_RENDERING_BEGIN_INFO_EXT";
+        case VK_STRUCTURE_TYPE_COOPERATIVE_MATRIX_PROPERTIES_NV:
+            return "VK_STRUCTURE_TYPE_COOPERATIVE_MATRIX_PROPERTIES_NV";
+        case VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET:
+            return "VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET";
+        case VK_STRUCTURE_TYPE_D3D12_FENCE_SUBMIT_INFO_KHR:
+            return "VK_STRUCTURE_TYPE_D3D12_FENCE_SUBMIT_INFO_KHR";
+        case VK_STRUCTURE_TYPE_DEBUG_MARKER_MARKER_INFO_EXT:
+            return "VK_STRUCTURE_TYPE_DEBUG_MARKER_MARKER_INFO_EXT";
+        case VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_NAME_INFO_EXT:
+            return "VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_NAME_INFO_EXT";
+        case VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_TAG_INFO_EXT:
+            return "VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_TAG_INFO_EXT";
+        case VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT:
+            return "VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT";
+        case VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT:
+            return "VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT";
+        case VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CALLBACK_DATA_EXT:
+            return "VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CALLBACK_DATA_EXT";
+        case VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT:
+            return "VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT";
+        case VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT:
+            return "VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT";
+        case VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_TAG_INFO_EXT:
+            return "VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_TAG_INFO_EXT";
+        case VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_BUFFER_CREATE_INFO_NV:
+            return "VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_BUFFER_CREATE_INFO_NV";
+        case VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_IMAGE_CREATE_INFO_NV:
+            return "VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_IMAGE_CREATE_INFO_NV";
+        case VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_MEMORY_ALLOCATE_INFO_NV:
+            return "VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_MEMORY_ALLOCATE_INFO_NV";
+        case VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO:
+            return "VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO";
+        case VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO_EXT:
+            return "VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO_EXT";
+        case VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO:
+            return "VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO";
+        case VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT:
+            return "VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT";
+        case VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO:
+            return "VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO";
+        case VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT:
+            return "VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT";
+        case VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO_EXT:
+            return "VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO_EXT";
+        case VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT_EXT:
+            return "VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT_EXT";
+        case VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO:
+            return "VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO";
+        case VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO:
+            return "VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO";
+        case VK_STRUCTURE_TYPE_DEVICE_EVENT_INFO_EXT:
+            return "VK_STRUCTURE_TYPE_DEVICE_EVENT_INFO_EXT";
+        case VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_FEATURES_NVX:
+            return "VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_FEATURES_NVX";
+        case VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_LIMITS_NVX:
+            return "VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_LIMITS_NVX";
+        case VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO:
+            return "VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO";
+        case VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO:
+            return "VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO";
+        case VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO:
+            return "VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO";
+        case VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_CAPABILITIES_KHR:
+            return "VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_CAPABILITIES_KHR";
+        case VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_INFO_KHR:
+            return "VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_INFO_KHR";
+        case VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO:
+            return "VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO";
+        case VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO:
+            return "VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO";
+        case VK_STRUCTURE_TYPE_DEVICE_GROUP_SWAPCHAIN_CREATE_INFO_KHR:
+            return "VK_STRUCTURE_TYPE_DEVICE_GROUP_SWAPCHAIN_CREATE_INFO_KHR";
+        case VK_STRUCTURE_TYPE_DEVICE_MEMORY_OPAQUE_CAPTURE_ADDRESS_INFO_KHR:
+            return "VK_STRUCTURE_TYPE_DEVICE_MEMORY_OPAQUE_CAPTURE_ADDRESS_INFO_KHR";
+        case VK_STRUCTURE_TYPE_DEVICE_MEMORY_OVERALLOCATION_CREATE_INFO_AMD:
+            return "VK_STRUCTURE_TYPE_DEVICE_MEMORY_OVERALLOCATION_CREATE_INFO_AMD";
+        case VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO:
+            return "VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO";
+        case VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_EXT:
+            return "VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_EXT";
+        case VK_STRUCTURE_TYPE_DEVICE_QUEUE_INFO_2:
+            return "VK_STRUCTURE_TYPE_DEVICE_QUEUE_INFO_2";
+        case VK_STRUCTURE_TYPE_DISPLAY_EVENT_INFO_EXT:
+            return "VK_STRUCTURE_TYPE_DISPLAY_EVENT_INFO_EXT";
+        case VK_STRUCTURE_TYPE_DISPLAY_MODE_CREATE_INFO_KHR:
+            return "VK_STRUCTURE_TYPE_DISPLAY_MODE_CREATE_INFO_KHR";
+        case VK_STRUCTURE_TYPE_DISPLAY_MODE_PROPERTIES_2_KHR:
+            return "VK_STRUCTURE_TYPE_DISPLAY_MODE_PROPERTIES_2_KHR";
+        case VK_STRUCTURE_TYPE_DISPLAY_NATIVE_HDR_SURFACE_CAPABILITIES_AMD:
+            return "VK_STRUCTURE_TYPE_DISPLAY_NATIVE_HDR_SURFACE_CAPABILITIES_AMD";
+        case VK_STRUCTURE_TYPE_DISPLAY_PLANE_CAPABILITIES_2_KHR:
+            return "VK_STRUCTURE_TYPE_DISPLAY_PLANE_CAPABILITIES_2_KHR";
+        case VK_STRUCTURE_TYPE_DISPLAY_PLANE_INFO_2_KHR:
+            return "VK_STRUCTURE_TYPE_DISPLAY_PLANE_INFO_2_KHR";
+        case VK_STRUCTURE_TYPE_DISPLAY_PLANE_PROPERTIES_2_KHR:
+            return "VK_STRUCTURE_TYPE_DISPLAY_PLANE_PROPERTIES_2_KHR";
+        case VK_STRUCTURE_TYPE_DISPLAY_POWER_INFO_EXT:
+            return "VK_STRUCTURE_TYPE_DISPLAY_POWER_INFO_EXT";
+        case VK_STRUCTURE_TYPE_DISPLAY_PRESENT_INFO_KHR:
+            return "VK_STRUCTURE_TYPE_DISPLAY_PRESENT_INFO_KHR";
+        case VK_STRUCTURE_TYPE_DISPLAY_PROPERTIES_2_KHR:
+            return "VK_STRUCTURE_TYPE_DISPLAY_PROPERTIES_2_KHR";
+        case VK_STRUCTURE_TYPE_DISPLAY_SURFACE_CREATE_INFO_KHR:
+            return "VK_STRUCTURE_TYPE_DISPLAY_SURFACE_CREATE_INFO_KHR";
+        case VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT:
+            return "VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT";
+        case VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT:
+            return "VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT";
+        case VK_STRUCTURE_TYPE_EVENT_CREATE_INFO:
+            return "VK_STRUCTURE_TYPE_EVENT_CREATE_INFO";
+        case VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO:
+            return "VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO";
+        case VK_STRUCTURE_TYPE_EXPORT_FENCE_WIN32_HANDLE_INFO_KHR:
+            return "VK_STRUCTURE_TYPE_EXPORT_FENCE_WIN32_HANDLE_INFO_KHR";
+        case VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO:
+            return "VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO";
+        case VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_NV:
+            return "VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_NV";
+        case VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_KHR:
+            return "VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_KHR";
+        case VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_NV:
+            return "VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_NV";
+        case VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO:
+            return "VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO";
+        case VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR:
+            return "VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR";
+        case VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES:
+            return "VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES";
+        case VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES:
+            return "VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES";
+        case VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID:
+            return "VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID";
+        case VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES:
+            return "VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES";
+        case VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO:
+            return "VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO";
+        case VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO:
+            return "VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO";
+        case VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_NV:
+            return "VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_NV";
+        case VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES:
+            return "VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES";
+        case VK_STRUCTURE_TYPE_FENCE_CREATE_INFO:
+            return "VK_STRUCTURE_TYPE_FENCE_CREATE_INFO";
+        case VK_STRUCTURE_TYPE_FENCE_GET_FD_INFO_KHR:
+            return "VK_STRUCTURE_TYPE_FENCE_GET_FD_INFO_KHR";
+        case VK_STRUCTURE_TYPE_FENCE_GET_WIN32_HANDLE_INFO_KHR:
+            return "VK_STRUCTURE_TYPE_FENCE_GET_WIN32_HANDLE_INFO_KHR";
+        case VK_STRUCTURE_TYPE_FILTER_CUBIC_IMAGE_VIEW_IMAGE_FORMAT_PROPERTIES_EXT:
+            return "VK_STRUCTURE_TYPE_FILTER_CUBIC_IMAGE_VIEW_IMAGE_FORMAT_PROPERTIES_EXT";
+        case VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2:
+            return "VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2";
+        case VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO_KHR:
+            return "VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO_KHR";
+        case VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO_KHR:
+            return "VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO_KHR";
+        case VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO:
+            return "VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO";
+        case VK_STRUCTURE_TYPE_FRAMEBUFFER_MIXED_SAMPLES_COMBINATION_NV:
+            return "VK_STRUCTURE_TYPE_FRAMEBUFFER_MIXED_SAMPLES_COMBINATION_NV";
+        case VK_STRUCTURE_TYPE_GEOMETRY_AABB_NV:
+            return "VK_STRUCTURE_TYPE_GEOMETRY_AABB_NV";
+        case VK_STRUCTURE_TYPE_GEOMETRY_NV:
+            return "VK_STRUCTURE_TYPE_GEOMETRY_NV";
+        case VK_STRUCTURE_TYPE_GEOMETRY_TRIANGLES_NV:
+            return "VK_STRUCTURE_TYPE_GEOMETRY_TRIANGLES_NV";
+        case VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO:
+            return "VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO";
+        case VK_STRUCTURE_TYPE_HDR_METADATA_EXT:
+            return "VK_STRUCTURE_TYPE_HDR_METADATA_EXT";
+        case VK_STRUCTURE_TYPE_HEADLESS_SURFACE_CREATE_INFO_EXT:
+            return "VK_STRUCTURE_TYPE_HEADLESS_SURFACE_CREATE_INFO_EXT";
+        case VK_STRUCTURE_TYPE_IMAGEPIPE_SURFACE_CREATE_INFO_FUCHSIA:
+            return "VK_STRUCTURE_TYPE_IMAGEPIPE_SURFACE_CREATE_INFO_FUCHSIA";
+        case VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO:
+            return "VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO";
+        case VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_EXPLICIT_CREATE_INFO_EXT:
+            return "VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_EXPLICIT_CREATE_INFO_EXT";
+        case VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_LIST_CREATE_INFO_EXT:
+            return "VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_LIST_CREATE_INFO_EXT";
+        case VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT:
+            return "VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT";
+        case VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO_KHR:
+            return "VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO_KHR";
+        case VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2:
+            return "VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2";
+        case VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER:
+            return "VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER";
+        case VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2:
+            return "VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2";
+        case VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO:
+            return "VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO";
+        case VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2:
+            return "VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2";
+        case VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO_EXT:
+            return "VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO_EXT";
+        case VK_STRUCTURE_TYPE_IMAGE_SWAPCHAIN_CREATE_INFO_KHR:
+            return "VK_STRUCTURE_TYPE_IMAGE_SWAPCHAIN_CREATE_INFO_KHR";
+        case VK_STRUCTURE_TYPE_IMAGE_VIEW_ASTC_DECODE_MODE_EXT:
+            return "VK_STRUCTURE_TYPE_IMAGE_VIEW_ASTC_DECODE_MODE_EXT";
+        case VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO:
+            return "VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO";
+        case VK_STRUCTURE_TYPE_IMAGE_VIEW_HANDLE_INFO_NVX:
+            return "VK_STRUCTURE_TYPE_IMAGE_VIEW_HANDLE_INFO_NVX";
+        case VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO:
+            return "VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO";
+        case VK_STRUCTURE_TYPE_IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID:
+            return "VK_STRUCTURE_TYPE_IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID";
+        case VK_STRUCTURE_TYPE_IMPORT_FENCE_FD_INFO_KHR:
+            return "VK_STRUCTURE_TYPE_IMPORT_FENCE_FD_INFO_KHR";
+        case VK_STRUCTURE_TYPE_IMPORT_FENCE_WIN32_HANDLE_INFO_KHR:
+            return "VK_STRUCTURE_TYPE_IMPORT_FENCE_WIN32_HANDLE_INFO_KHR";
+        case VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR:
+            return "VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR";
+        case VK_STRUCTURE_TYPE_IMPORT_MEMORY_HOST_POINTER_INFO_EXT:
+            return "VK_STRUCTURE_TYPE_IMPORT_MEMORY_HOST_POINTER_INFO_EXT";
+        case VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_KHR:
+            return "VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_KHR";
+        case VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_NV:
+            return "VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_NV";
+        case VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_FD_INFO_KHR:
+            return "VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_FD_INFO_KHR";
+        case VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR:
+            return "VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR";
+        case VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_NVX:
+            return "VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_NVX";
+        case VK_STRUCTURE_TYPE_INITIALIZE_PERFORMANCE_API_INFO_INTEL:
+            return "VK_STRUCTURE_TYPE_INITIALIZE_PERFORMANCE_API_INFO_INTEL";
+        case VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO:
+            return "VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO";
+        case VK_STRUCTURE_TYPE_IOS_SURFACE_CREATE_INFO_MVK:
+            return "VK_STRUCTURE_TYPE_IOS_SURFACE_CREATE_INFO_MVK";
+        case VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO:
+            return "VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO";
+        case VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO:
+            return "VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO";
+        case VK_STRUCTURE_TYPE_MACOS_SURFACE_CREATE_INFO_MVK:
+            return "VK_STRUCTURE_TYPE_MACOS_SURFACE_CREATE_INFO_MVK";
+        case VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE:
+            return "VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE";
+        case VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO:
+            return "VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO";
+        case VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO:
+            return "VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO";
+        case VK_STRUCTURE_TYPE_MEMORY_BARRIER:
+            return "VK_STRUCTURE_TYPE_MEMORY_BARRIER";
+        case VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO:
+            return "VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO";
+        case VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS:
+            return "VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS";
+        case VK_STRUCTURE_TYPE_MEMORY_FD_PROPERTIES_KHR:
+            return "VK_STRUCTURE_TYPE_MEMORY_FD_PROPERTIES_KHR";
+        case VK_STRUCTURE_TYPE_MEMORY_GET_ANDROID_HARDWARE_BUFFER_INFO_ANDROID:
+            return "VK_STRUCTURE_TYPE_MEMORY_GET_ANDROID_HARDWARE_BUFFER_INFO_ANDROID";
+        case VK_STRUCTURE_TYPE_MEMORY_GET_FD_INFO_KHR:
+            return "VK_STRUCTURE_TYPE_MEMORY_GET_FD_INFO_KHR";
+        case VK_STRUCTURE_TYPE_MEMORY_GET_WIN32_HANDLE_INFO_KHR:
+            return "VK_STRUCTURE_TYPE_MEMORY_GET_WIN32_HANDLE_INFO_KHR";
+        case VK_STRUCTURE_TYPE_MEMORY_HOST_POINTER_PROPERTIES_EXT:
+            return "VK_STRUCTURE_TYPE_MEMORY_HOST_POINTER_PROPERTIES_EXT";
+        case VK_STRUCTURE_TYPE_MEMORY_OPAQUE_CAPTURE_ADDRESS_ALLOCATE_INFO_KHR:
+            return "VK_STRUCTURE_TYPE_MEMORY_OPAQUE_CAPTURE_ADDRESS_ALLOCATE_INFO_KHR";
+        case VK_STRUCTURE_TYPE_MEMORY_PRIORITY_ALLOCATE_INFO_EXT:
+            return "VK_STRUCTURE_TYPE_MEMORY_PRIORITY_ALLOCATE_INFO_EXT";
+        case VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2:
+            return "VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2";
+        case VK_STRUCTURE_TYPE_MEMORY_WIN32_HANDLE_PROPERTIES_KHR:
+            return "VK_STRUCTURE_TYPE_MEMORY_WIN32_HANDLE_PROPERTIES_KHR";
+        case VK_STRUCTURE_TYPE_METAL_SURFACE_CREATE_INFO_EXT:
+            return "VK_STRUCTURE_TYPE_METAL_SURFACE_CREATE_INFO_EXT";
+        case VK_STRUCTURE_TYPE_MULTISAMPLE_PROPERTIES_EXT:
+            return "VK_STRUCTURE_TYPE_MULTISAMPLE_PROPERTIES_EXT";
+        case VK_STRUCTURE_TYPE_OBJECT_TABLE_CREATE_INFO_NVX:
+            return "VK_STRUCTURE_TYPE_OBJECT_TABLE_CREATE_INFO_NVX";
+        case VK_STRUCTURE_TYPE_PERFORMANCE_CONFIGURATION_ACQUIRE_INFO_INTEL:
+            return "VK_STRUCTURE_TYPE_PERFORMANCE_CONFIGURATION_ACQUIRE_INFO_INTEL";
+        case VK_STRUCTURE_TYPE_PERFORMANCE_COUNTER_DESCRIPTION_KHR:
+            return "VK_STRUCTURE_TYPE_PERFORMANCE_COUNTER_DESCRIPTION_KHR";
+        case VK_STRUCTURE_TYPE_PERFORMANCE_COUNTER_KHR:
+            return "VK_STRUCTURE_TYPE_PERFORMANCE_COUNTER_KHR";
+        case VK_STRUCTURE_TYPE_PERFORMANCE_MARKER_INFO_INTEL:
+            return "VK_STRUCTURE_TYPE_PERFORMANCE_MARKER_INFO_INTEL";
+        case VK_STRUCTURE_TYPE_PERFORMANCE_OVERRIDE_INFO_INTEL:
+            return "VK_STRUCTURE_TYPE_PERFORMANCE_OVERRIDE_INFO_INTEL";
+        case VK_STRUCTURE_TYPE_PERFORMANCE_QUERY_SUBMIT_INFO_KHR:
+            return "VK_STRUCTURE_TYPE_PERFORMANCE_QUERY_SUBMIT_INFO_KHR";
+        case VK_STRUCTURE_TYPE_PERFORMANCE_STREAM_MARKER_INFO_INTEL:
+            return "VK_STRUCTURE_TYPE_PERFORMANCE_STREAM_MARKER_INFO_INTEL";
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES:
+            return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES";
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES_KHR:
+            return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES_KHR";
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ASTC_DECODE_FEATURES_EXT:
+            return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ASTC_DECODE_FEATURES_EXT";
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_FEATURES_EXT:
+            return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_FEATURES_EXT";
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_PROPERTIES_EXT:
+            return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_PROPERTIES_EXT";
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_EXT:
+            return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_EXT";
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_KHR:
+            return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_KHR";
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COHERENT_MEMORY_FEATURES_AMD:
+            return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COHERENT_MEMORY_FEATURES_AMD";
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_FEATURES_NV:
+            return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_FEATURES_NV";
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONDITIONAL_RENDERING_FEATURES_EXT:
+            return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONDITIONAL_RENDERING_FEATURES_EXT";
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONSERVATIVE_RASTERIZATION_PROPERTIES_EXT:
+            return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONSERVATIVE_RASTERIZATION_PROPERTIES_EXT";
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_FEATURES_NV:
+            return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_FEATURES_NV";
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_PROPERTIES_NV:
+            return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_PROPERTIES_NV";
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CORNER_SAMPLED_IMAGE_FEATURES_NV:
+            return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CORNER_SAMPLED_IMAGE_FEATURES_NV";
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COVERAGE_REDUCTION_MODE_FEATURES_NV:
+            return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COVERAGE_REDUCTION_MODE_FEATURES_NV";
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEDICATED_ALLOCATION_IMAGE_ALIASING_FEATURES_NV:
+            return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEDICATED_ALLOCATION_IMAGE_ALIASING_FEATURES_NV";
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLIP_ENABLE_FEATURES_EXT:
+            return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLIP_ENABLE_FEATURES_EXT";
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES_KHR:
+            return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES_KHR";
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES_EXT:
+            return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES_EXT";
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES_EXT:
+            return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES_EXT";
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DISCARD_RECTANGLE_PROPERTIES_EXT:
+            return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DISCARD_RECTANGLE_PROPERTIES_EXT";
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES_KHR:
+            return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES_KHR";
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXCLUSIVE_SCISSOR_FEATURES_NV:
+            return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXCLUSIVE_SCISSOR_FEATURES_NV";
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO:
+            return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO";
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO:
+            return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO";
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO:
+            return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO";
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_HOST_PROPERTIES_EXT:
+            return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_HOST_PROPERTIES_EXT";
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO:
+            return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO";
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2:
+            return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2";
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES_KHR:
+            return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES_KHR";
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_FEATURES_EXT:
+            return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_FEATURES_EXT";
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_PROPERTIES_EXT:
+            return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_PROPERTIES_EXT";
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_FEATURES_NV:
+            return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_FEATURES_NV";
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_INTERLOCK_FEATURES_EXT:
+            return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_INTERLOCK_FEATURES_EXT";
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES:
+            return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES";
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES_EXT:
+            return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES_EXT";
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES:
+            return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES";
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES_KHR:
+            return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES_KHR";
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_DRM_FORMAT_MODIFIER_INFO_EXT:
+            return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_DRM_FORMAT_MODIFIER_INFO_EXT";
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2:
+            return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2";
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_VIEW_IMAGE_FORMAT_INFO_EXT:
+            return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_VIEW_IMAGE_FORMAT_INFO_EXT";
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_EXT:
+            return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_EXT";
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES_EXT:
+            return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES_EXT";
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_PROPERTIES_EXT:
+            return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_PROPERTIES_EXT";
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_EXT:
+            return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_EXT";
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES_EXT:
+            return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES_EXT";
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES:
+            return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES";
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT:
+            return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT";
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PRIORITY_FEATURES_EXT:
+            return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PRIORITY_FEATURES_EXT";
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2:
+            return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2";
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_FEATURES_NV:
+            return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_FEATURES_NV";
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_PROPERTIES_NV:
+            return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_PROPERTIES_NV";
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES:
+            return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES";
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_ATTRIBUTES_PROPERTIES_NVX:
+            return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_ATTRIBUTES_PROPERTIES_NVX";
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES:
+            return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES";
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PCI_BUS_INFO_PROPERTIES_EXT:
+            return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PCI_BUS_INFO_PROPERTIES_EXT";
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PERFORMANCE_QUERY_FEATURES_KHR:
+            return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PERFORMANCE_QUERY_FEATURES_KHR";
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PERFORMANCE_QUERY_PROPERTIES_KHR:
+            return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PERFORMANCE_QUERY_PROPERTIES_KHR";
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_EXECUTABLE_PROPERTIES_FEATURES_KHR:
+            return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_EXECUTABLE_PROPERTIES_FEATURES_KHR";
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES:
+            return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES";
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2:
+            return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2";
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_FEATURES:
+            return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_FEATURES";
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_PROPERTIES:
+            return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_PROPERTIES";
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR:
+            return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR";
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PROPERTIES_NV:
+            return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PROPERTIES_NV";
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_REPRESENTATIVE_FRAGMENT_TEST_FEATURES_NV:
+            return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_REPRESENTATIVE_FRAGMENT_TEST_FEATURES_NV";
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES_EXT:
+            return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES_EXT";
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES:
+            return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES";
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLE_LOCATIONS_PROPERTIES_EXT:
+            return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLE_LOCATIONS_PROPERTIES_EXT";
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES_EXT:
+            return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES_EXT";
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES_KHR:
+            return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES_KHR";
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES_KHR:
+            return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES_KHR";
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CLOCK_FEATURES_KHR:
+            return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CLOCK_FEATURES_KHR";
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_2_AMD:
+            return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_2_AMD";
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_AMD:
+            return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_AMD";
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES_EXT:
+            return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES_EXT";
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES:
+            return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES";
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES_KHR:
+            return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES_KHR";
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_IMAGE_FOOTPRINT_FEATURES_NV:
+            return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_IMAGE_FOOTPRINT_FEATURES_NV";
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_FUNCTIONS_2_FEATURES_INTEL:
+            return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_FUNCTIONS_2_FEATURES_INTEL";
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_FEATURES_NV:
+            return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_FEATURES_NV";
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_PROPERTIES_NV:
+            return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_PROPERTIES_NV";
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES_KHR:
+            return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES_KHR";
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_FEATURES_NV:
+            return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_FEATURES_NV";
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_PROPERTIES_NV:
+            return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_PROPERTIES_NV";
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2:
+            return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2";
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_PROPERTIES:
+            return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_PROPERTIES";
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES_EXT:
+            return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES_EXT";
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES_EXT:
+            return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES_EXT";
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SURFACE_INFO_2_KHR:
+            return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SURFACE_INFO_2_KHR";
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_FEATURES_EXT:
+            return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_FEATURES_EXT";
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES_EXT:
+            return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES_EXT";
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXTURE_COMPRESSION_ASTC_HDR_FEATURES_EXT:
+            return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXTURE_COMPRESSION_ASTC_HDR_FEATURES_EXT";
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES_KHR:
+            return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES_KHR";
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES_KHR:
+            return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES_KHR";
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TOOL_PROPERTIES_EXT:
+            return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TOOL_PROPERTIES_EXT";
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_FEATURES_EXT:
+            return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_FEATURES_EXT";
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_PROPERTIES_EXT:
+            return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_PROPERTIES_EXT";
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES_KHR:
+            return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES_KHR";
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES:
+            return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES";
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_EXT:
+            return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_EXT";
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_EXT:
+            return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_EXT";
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES_KHR:
+            return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES_KHR";
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_YCBCR_IMAGE_ARRAYS_FEATURES_EXT:
+            return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_YCBCR_IMAGE_ARRAYS_FEATURES_EXT";
+        case VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO:
+            return "VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO";
+        case VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_ADVANCED_STATE_CREATE_INFO_EXT:
+            return "VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_ADVANCED_STATE_CREATE_INFO_EXT";
+        case VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO:
+            return "VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO";
+        case VK_STRUCTURE_TYPE_PIPELINE_COMPILER_CONTROL_CREATE_INFO_AMD:
+            return "VK_STRUCTURE_TYPE_PIPELINE_COMPILER_CONTROL_CREATE_INFO_AMD";
+        case VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_MODULATION_STATE_CREATE_INFO_NV:
+            return "VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_MODULATION_STATE_CREATE_INFO_NV";
+        case VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_REDUCTION_STATE_CREATE_INFO_NV:
+            return "VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_REDUCTION_STATE_CREATE_INFO_NV";
+        case VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_TO_COLOR_STATE_CREATE_INFO_NV:
+            return "VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_TO_COLOR_STATE_CREATE_INFO_NV";
+        case VK_STRUCTURE_TYPE_PIPELINE_CREATION_FEEDBACK_CREATE_INFO_EXT:
+            return "VK_STRUCTURE_TYPE_PIPELINE_CREATION_FEEDBACK_CREATE_INFO_EXT";
+        case VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO:
+            return "VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO";
+        case VK_STRUCTURE_TYPE_PIPELINE_DISCARD_RECTANGLE_STATE_CREATE_INFO_EXT:
+            return "VK_STRUCTURE_TYPE_PIPELINE_DISCARD_RECTANGLE_STATE_CREATE_INFO_EXT";
+        case VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO:
+            return "VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO";
+        case VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INFO_KHR:
+            return "VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INFO_KHR";
+        case VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INTERNAL_REPRESENTATION_KHR:
+            return "VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INTERNAL_REPRESENTATION_KHR";
+        case VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_PROPERTIES_KHR:
+            return "VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_PROPERTIES_KHR";
+        case VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_STATISTIC_KHR:
+            return "VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_STATISTIC_KHR";
+        case VK_STRUCTURE_TYPE_PIPELINE_INFO_KHR:
+            return "VK_STRUCTURE_TYPE_PIPELINE_INFO_KHR";
+        case VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO:
+            return "VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO";
+        case VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO:
+            return "VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO";
+        case VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO:
+            return "VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO";
+        case VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_CONSERVATIVE_STATE_CREATE_INFO_EXT:
+            return "VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_CONSERVATIVE_STATE_CREATE_INFO_EXT";
+        case VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_DEPTH_CLIP_STATE_CREATE_INFO_EXT:
+            return "VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_DEPTH_CLIP_STATE_CREATE_INFO_EXT";
+        case VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO_EXT:
+            return "VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO_EXT";
+        case VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO:
+            return "VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO";
+        case VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_RASTERIZATION_ORDER_AMD:
+            return "VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_RASTERIZATION_ORDER_AMD";
+        case VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_STREAM_CREATE_INFO_EXT:
+            return "VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_STREAM_CREATE_INFO_EXT";
+        case VK_STRUCTURE_TYPE_PIPELINE_REPRESENTATIVE_FRAGMENT_TEST_STATE_CREATE_INFO_NV:
+            return "VK_STRUCTURE_TYPE_PIPELINE_REPRESENTATIVE_FRAGMENT_TEST_STATE_CREATE_INFO_NV";
+        case VK_STRUCTURE_TYPE_PIPELINE_SAMPLE_LOCATIONS_STATE_CREATE_INFO_EXT:
+            return "VK_STRUCTURE_TYPE_PIPELINE_SAMPLE_LOCATIONS_STATE_CREATE_INFO_EXT";
+        case VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO:
+            return "VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO";
+        case VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO_EXT:
+            return "VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO_EXT";
+        case VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO:
+            return "VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO";
+        case VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO:
+            return "VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO";
+        case VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_EXT:
+            return "VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_EXT";
+        case VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO:
+            return "VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO";
+        case VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_COARSE_SAMPLE_ORDER_STATE_CREATE_INFO_NV:
+            return "VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_COARSE_SAMPLE_ORDER_STATE_CREATE_INFO_NV";
+        case VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_EXCLUSIVE_SCISSOR_STATE_CREATE_INFO_NV:
+            return "VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_EXCLUSIVE_SCISSOR_STATE_CREATE_INFO_NV";
+        case VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SHADING_RATE_IMAGE_STATE_CREATE_INFO_NV:
+            return "VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SHADING_RATE_IMAGE_STATE_CREATE_INFO_NV";
+        case VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO:
+            return "VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO";
+        case VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SWIZZLE_STATE_CREATE_INFO_NV:
+            return "VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SWIZZLE_STATE_CREATE_INFO_NV";
+        case VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_W_SCALING_STATE_CREATE_INFO_NV:
+            return "VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_W_SCALING_STATE_CREATE_INFO_NV";
+        case VK_STRUCTURE_TYPE_PRESENT_FRAME_TOKEN_GGP:
+            return "VK_STRUCTURE_TYPE_PRESENT_FRAME_TOKEN_GGP";
+        case VK_STRUCTURE_TYPE_PRESENT_INFO_KHR:
+            return "VK_STRUCTURE_TYPE_PRESENT_INFO_KHR";
+        case VK_STRUCTURE_TYPE_PRESENT_REGIONS_KHR:
+            return "VK_STRUCTURE_TYPE_PRESENT_REGIONS_KHR";
+        case VK_STRUCTURE_TYPE_PRESENT_TIMES_INFO_GOOGLE:
+            return "VK_STRUCTURE_TYPE_PRESENT_TIMES_INFO_GOOGLE";
+        case VK_STRUCTURE_TYPE_PROTECTED_SUBMIT_INFO:
+            return "VK_STRUCTURE_TYPE_PROTECTED_SUBMIT_INFO";
+        case VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO:
+            return "VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO";
+        case VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO_INTEL:
+            return "VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO_INTEL";
+        case VK_STRUCTURE_TYPE_QUERY_POOL_PERFORMANCE_CREATE_INFO_KHR:
+            return "VK_STRUCTURE_TYPE_QUERY_POOL_PERFORMANCE_CREATE_INFO_KHR";
+        case VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_NV:
+            return "VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_NV";
+        case VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2:
+            return "VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2";
+        case VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_NV:
+            return "VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_NV";
+        case VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV:
+            return "VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV";
+        case VK_STRUCTURE_TYPE_RENDER_PASS_ATTACHMENT_BEGIN_INFO_KHR:
+            return "VK_STRUCTURE_TYPE_RENDER_PASS_ATTACHMENT_BEGIN_INFO_KHR";
+        case VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO:
+            return "VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO";
+        case VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO:
+            return "VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO";
+        case VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO_2_KHR:
+            return "VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO_2_KHR";
+        case VK_STRUCTURE_TYPE_RENDER_PASS_FRAGMENT_DENSITY_MAP_CREATE_INFO_EXT:
+            return "VK_STRUCTURE_TYPE_RENDER_PASS_FRAGMENT_DENSITY_MAP_CREATE_INFO_EXT";
+        case VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO:
+            return "VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO";
+        case VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO:
+            return "VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO";
+        case VK_STRUCTURE_TYPE_RENDER_PASS_SAMPLE_LOCATIONS_BEGIN_INFO_EXT:
+            return "VK_STRUCTURE_TYPE_RENDER_PASS_SAMPLE_LOCATIONS_BEGIN_INFO_EXT";
+        case VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO:
+            return "VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO";
+        case VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO_EXT:
+            return "VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO_EXT";
+        case VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO:
+            return "VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO";
+        case VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES:
+            return "VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES";
+        case VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO:
+            return "VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO";
+        case VK_STRUCTURE_TYPE_SAMPLE_LOCATIONS_INFO_EXT:
+            return "VK_STRUCTURE_TYPE_SAMPLE_LOCATIONS_INFO_EXT";
+        case VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO:
+            return "VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO";
+        case VK_STRUCTURE_TYPE_SEMAPHORE_GET_FD_INFO_KHR:
+            return "VK_STRUCTURE_TYPE_SEMAPHORE_GET_FD_INFO_KHR";
+        case VK_STRUCTURE_TYPE_SEMAPHORE_GET_WIN32_HANDLE_INFO_KHR:
+            return "VK_STRUCTURE_TYPE_SEMAPHORE_GET_WIN32_HANDLE_INFO_KHR";
+        case VK_STRUCTURE_TYPE_SEMAPHORE_SIGNAL_INFO_KHR:
+            return "VK_STRUCTURE_TYPE_SEMAPHORE_SIGNAL_INFO_KHR";
+        case VK_STRUCTURE_TYPE_SEMAPHORE_TYPE_CREATE_INFO_KHR:
+            return "VK_STRUCTURE_TYPE_SEMAPHORE_TYPE_CREATE_INFO_KHR";
+        case VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO_KHR:
+            return "VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO_KHR";
+        case VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO:
+            return "VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO";
+        case VK_STRUCTURE_TYPE_SHADER_MODULE_VALIDATION_CACHE_CREATE_INFO_EXT:
+            return "VK_STRUCTURE_TYPE_SHADER_MODULE_VALIDATION_CACHE_CREATE_INFO_EXT";
+        case VK_STRUCTURE_TYPE_SHARED_PRESENT_SURFACE_CAPABILITIES_KHR:
+            return "VK_STRUCTURE_TYPE_SHARED_PRESENT_SURFACE_CAPABILITIES_KHR";
+        case VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2:
+            return "VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2";
+        case VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2:
+            return "VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2";
+        case VK_STRUCTURE_TYPE_STREAM_DESCRIPTOR_SURFACE_CREATE_INFO_GGP:
+            return "VK_STRUCTURE_TYPE_STREAM_DESCRIPTOR_SURFACE_CREATE_INFO_GGP";
+        case VK_STRUCTURE_TYPE_SUBMIT_INFO:
+            return "VK_STRUCTURE_TYPE_SUBMIT_INFO";
+        case VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO_KHR:
+            return "VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO_KHR";
+        case VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2_KHR:
+            return "VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2_KHR";
+        case VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_2_KHR:
+            return "VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_2_KHR";
+        case VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE_KHR:
+            return "VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE_KHR";
+        case VK_STRUCTURE_TYPE_SUBPASS_END_INFO_KHR:
+            return "VK_STRUCTURE_TYPE_SUBPASS_END_INFO_KHR";
+        case VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_EXT:
+            return "VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_EXT";
+        case VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_KHR:
+            return "VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_KHR";
+        case VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_FULL_SCREEN_EXCLUSIVE_EXT:
+            return "VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_FULL_SCREEN_EXCLUSIVE_EXT";
+        case VK_STRUCTURE_TYPE_SURFACE_FORMAT_2_KHR:
+            return "VK_STRUCTURE_TYPE_SURFACE_FORMAT_2_KHR";
+        case VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_INFO_EXT:
+            return "VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_INFO_EXT";
+        case VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_WIN32_INFO_EXT:
+            return "VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_WIN32_INFO_EXT";
+        case VK_STRUCTURE_TYPE_SURFACE_PROTECTED_CAPABILITIES_KHR:
+            return "VK_STRUCTURE_TYPE_SURFACE_PROTECTED_CAPABILITIES_KHR";
+        case VK_STRUCTURE_TYPE_SWAPCHAIN_COUNTER_CREATE_INFO_EXT:
+            return "VK_STRUCTURE_TYPE_SWAPCHAIN_COUNTER_CREATE_INFO_EXT";
+        case VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR:
+            return "VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR";
+        case VK_STRUCTURE_TYPE_SWAPCHAIN_DISPLAY_NATIVE_HDR_CREATE_INFO_AMD:
+            return "VK_STRUCTURE_TYPE_SWAPCHAIN_DISPLAY_NATIVE_HDR_CREATE_INFO_AMD";
+        case VK_STRUCTURE_TYPE_TEXTURE_LOD_GATHER_FORMAT_PROPERTIES_AMD:
+            return "VK_STRUCTURE_TYPE_TEXTURE_LOD_GATHER_FORMAT_PROPERTIES_AMD";
+        case VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO_KHR:
+            return "VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO_KHR";
+        case VK_STRUCTURE_TYPE_VALIDATION_CACHE_CREATE_INFO_EXT:
+            return "VK_STRUCTURE_TYPE_VALIDATION_CACHE_CREATE_INFO_EXT";
+        case VK_STRUCTURE_TYPE_VALIDATION_FEATURES_EXT:
+            return "VK_STRUCTURE_TYPE_VALIDATION_FEATURES_EXT";
+        case VK_STRUCTURE_TYPE_VALIDATION_FLAGS_EXT:
+            return "VK_STRUCTURE_TYPE_VALIDATION_FLAGS_EXT";
+        case VK_STRUCTURE_TYPE_VI_SURFACE_CREATE_INFO_NN:
+            return "VK_STRUCTURE_TYPE_VI_SURFACE_CREATE_INFO_NN";
+        case VK_STRUCTURE_TYPE_WAYLAND_SURFACE_CREATE_INFO_KHR:
+            return "VK_STRUCTURE_TYPE_WAYLAND_SURFACE_CREATE_INFO_KHR";
+        case VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_KHR:
+            return "VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_KHR";
+        case VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_NV:
+            return "VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_NV";
+        case VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR:
+            return "VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR";
+        case VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET:
+            return "VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET";
+        case VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_NV:
+            return "VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_NV";
+        case VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK_EXT:
+            return "VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK_EXT";
+        case VK_STRUCTURE_TYPE_XCB_SURFACE_CREATE_INFO_KHR:
+            return "VK_STRUCTURE_TYPE_XCB_SURFACE_CREATE_INFO_KHR";
+        case VK_STRUCTURE_TYPE_XLIB_SURFACE_CREATE_INFO_KHR:
+            return "VK_STRUCTURE_TYPE_XLIB_SURFACE_CREATE_INFO_KHR";
+        default:
+            return "Unhandled VkStructureType";
+    }
+}
+
+static inline const char* string_VkSystemAllocationScope(VkSystemAllocationScope input_value)
+{
+    switch ((VkSystemAllocationScope)input_value)
+    {
+        case VK_SYSTEM_ALLOCATION_SCOPE_CACHE:
+            return "VK_SYSTEM_ALLOCATION_SCOPE_CACHE";
+        case VK_SYSTEM_ALLOCATION_SCOPE_COMMAND:
+            return "VK_SYSTEM_ALLOCATION_SCOPE_COMMAND";
+        case VK_SYSTEM_ALLOCATION_SCOPE_DEVICE:
+            return "VK_SYSTEM_ALLOCATION_SCOPE_DEVICE";
+        case VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE:
+            return "VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE";
+        case VK_SYSTEM_ALLOCATION_SCOPE_OBJECT:
+            return "VK_SYSTEM_ALLOCATION_SCOPE_OBJECT";
+        default:
+            return "Unhandled VkSystemAllocationScope";
+    }
+}
+
+static inline const char* string_VkInternalAllocationType(VkInternalAllocationType input_value)
+{
+    switch ((VkInternalAllocationType)input_value)
+    {
+        case VK_INTERNAL_ALLOCATION_TYPE_EXECUTABLE:
+            return "VK_INTERNAL_ALLOCATION_TYPE_EXECUTABLE";
+        default:
+            return "Unhandled VkInternalAllocationType";
+    }
+}
+
+static inline const char* string_VkFormat(VkFormat input_value)
+{
+    switch ((VkFormat)input_value)
+    {
+        case VK_FORMAT_A1R5G5B5_UNORM_PACK16:
+            return "VK_FORMAT_A1R5G5B5_UNORM_PACK16";
+        case VK_FORMAT_A2B10G10R10_SINT_PACK32:
+            return "VK_FORMAT_A2B10G10R10_SINT_PACK32";
+        case VK_FORMAT_A2B10G10R10_SNORM_PACK32:
+            return "VK_FORMAT_A2B10G10R10_SNORM_PACK32";
+        case VK_FORMAT_A2B10G10R10_SSCALED_PACK32:
+            return "VK_FORMAT_A2B10G10R10_SSCALED_PACK32";
+        case VK_FORMAT_A2B10G10R10_UINT_PACK32:
+            return "VK_FORMAT_A2B10G10R10_UINT_PACK32";
+        case VK_FORMAT_A2B10G10R10_UNORM_PACK32:
+            return "VK_FORMAT_A2B10G10R10_UNORM_PACK32";
+        case VK_FORMAT_A2B10G10R10_USCALED_PACK32:
+            return "VK_FORMAT_A2B10G10R10_USCALED_PACK32";
+        case VK_FORMAT_A2R10G10B10_SINT_PACK32:
+            return "VK_FORMAT_A2R10G10B10_SINT_PACK32";
+        case VK_FORMAT_A2R10G10B10_SNORM_PACK32:
+            return "VK_FORMAT_A2R10G10B10_SNORM_PACK32";
+        case VK_FORMAT_A2R10G10B10_SSCALED_PACK32:
+            return "VK_FORMAT_A2R10G10B10_SSCALED_PACK32";
+        case VK_FORMAT_A2R10G10B10_UINT_PACK32:
+            return "VK_FORMAT_A2R10G10B10_UINT_PACK32";
+        case VK_FORMAT_A2R10G10B10_UNORM_PACK32:
+            return "VK_FORMAT_A2R10G10B10_UNORM_PACK32";
+        case VK_FORMAT_A2R10G10B10_USCALED_PACK32:
+            return "VK_FORMAT_A2R10G10B10_USCALED_PACK32";
+        case VK_FORMAT_A8B8G8R8_SINT_PACK32:
+            return "VK_FORMAT_A8B8G8R8_SINT_PACK32";
+        case VK_FORMAT_A8B8G8R8_SNORM_PACK32:
+            return "VK_FORMAT_A8B8G8R8_SNORM_PACK32";
+        case VK_FORMAT_A8B8G8R8_SRGB_PACK32:
+            return "VK_FORMAT_A8B8G8R8_SRGB_PACK32";
+        case VK_FORMAT_A8B8G8R8_SSCALED_PACK32:
+            return "VK_FORMAT_A8B8G8R8_SSCALED_PACK32";
+        case VK_FORMAT_A8B8G8R8_UINT_PACK32:
+            return "VK_FORMAT_A8B8G8R8_UINT_PACK32";
+        case VK_FORMAT_A8B8G8R8_UNORM_PACK32:
+            return "VK_FORMAT_A8B8G8R8_UNORM_PACK32";
+        case VK_FORMAT_A8B8G8R8_USCALED_PACK32:
+            return "VK_FORMAT_A8B8G8R8_USCALED_PACK32";
+        case VK_FORMAT_ASTC_10x10_SFLOAT_BLOCK_EXT:
+            return "VK_FORMAT_ASTC_10x10_SFLOAT_BLOCK_EXT";
+        case VK_FORMAT_ASTC_10x10_SRGB_BLOCK:
+            return "VK_FORMAT_ASTC_10x10_SRGB_BLOCK";
+        case VK_FORMAT_ASTC_10x10_UNORM_BLOCK:
+            return "VK_FORMAT_ASTC_10x10_UNORM_BLOCK";
+        case VK_FORMAT_ASTC_10x5_SFLOAT_BLOCK_EXT:
+            return "VK_FORMAT_ASTC_10x5_SFLOAT_BLOCK_EXT";
+        case VK_FORMAT_ASTC_10x5_SRGB_BLOCK:
+            return "VK_FORMAT_ASTC_10x5_SRGB_BLOCK";
+        case VK_FORMAT_ASTC_10x5_UNORM_BLOCK:
+            return "VK_FORMAT_ASTC_10x5_UNORM_BLOCK";
+        case VK_FORMAT_ASTC_10x6_SFLOAT_BLOCK_EXT:
+            return "VK_FORMAT_ASTC_10x6_SFLOAT_BLOCK_EXT";
+        case VK_FORMAT_ASTC_10x6_SRGB_BLOCK:
+            return "VK_FORMAT_ASTC_10x6_SRGB_BLOCK";
+        case VK_FORMAT_ASTC_10x6_UNORM_BLOCK:
+            return "VK_FORMAT_ASTC_10x6_UNORM_BLOCK";
+        case VK_FORMAT_ASTC_10x8_SFLOAT_BLOCK_EXT:
+            return "VK_FORMAT_ASTC_10x8_SFLOAT_BLOCK_EXT";
+        case VK_FORMAT_ASTC_10x8_SRGB_BLOCK:
+            return "VK_FORMAT_ASTC_10x8_SRGB_BLOCK";
+        case VK_FORMAT_ASTC_10x8_UNORM_BLOCK:
+            return "VK_FORMAT_ASTC_10x8_UNORM_BLOCK";
+        case VK_FORMAT_ASTC_12x10_SFLOAT_BLOCK_EXT:
+            return "VK_FORMAT_ASTC_12x10_SFLOAT_BLOCK_EXT";
+        case VK_FORMAT_ASTC_12x10_SRGB_BLOCK:
+            return "VK_FORMAT_ASTC_12x10_SRGB_BLOCK";
+        case VK_FORMAT_ASTC_12x10_UNORM_BLOCK:
+            return "VK_FORMAT_ASTC_12x10_UNORM_BLOCK";
+        case VK_FORMAT_ASTC_12x12_SFLOAT_BLOCK_EXT:
+            return "VK_FORMAT_ASTC_12x12_SFLOAT_BLOCK_EXT";
+        case VK_FORMAT_ASTC_12x12_SRGB_BLOCK:
+            return "VK_FORMAT_ASTC_12x12_SRGB_BLOCK";
+        case VK_FORMAT_ASTC_12x12_UNORM_BLOCK:
+            return "VK_FORMAT_ASTC_12x12_UNORM_BLOCK";
+        case VK_FORMAT_ASTC_4x4_SFLOAT_BLOCK_EXT:
+            return "VK_FORMAT_ASTC_4x4_SFLOAT_BLOCK_EXT";
+        case VK_FORMAT_ASTC_4x4_SRGB_BLOCK:
+            return "VK_FORMAT_ASTC_4x4_SRGB_BLOCK";
+        case VK_FORMAT_ASTC_4x4_UNORM_BLOCK:
+            return "VK_FORMAT_ASTC_4x4_UNORM_BLOCK";
+        case VK_FORMAT_ASTC_5x4_SFLOAT_BLOCK_EXT:
+            return "VK_FORMAT_ASTC_5x4_SFLOAT_BLOCK_EXT";
+        case VK_FORMAT_ASTC_5x4_SRGB_BLOCK:
+            return "VK_FORMAT_ASTC_5x4_SRGB_BLOCK";
+        case VK_FORMAT_ASTC_5x4_UNORM_BLOCK:
+            return "VK_FORMAT_ASTC_5x4_UNORM_BLOCK";
+        case VK_FORMAT_ASTC_5x5_SFLOAT_BLOCK_EXT:
+            return "VK_FORMAT_ASTC_5x5_SFLOAT_BLOCK_EXT";
+        case VK_FORMAT_ASTC_5x5_SRGB_BLOCK:
+            return "VK_FORMAT_ASTC_5x5_SRGB_BLOCK";
+        case VK_FORMAT_ASTC_5x5_UNORM_BLOCK:
+            return "VK_FORMAT_ASTC_5x5_UNORM_BLOCK";
+        case VK_FORMAT_ASTC_6x5_SFLOAT_BLOCK_EXT:
+            return "VK_FORMAT_ASTC_6x5_SFLOAT_BLOCK_EXT";
+        case VK_FORMAT_ASTC_6x5_SRGB_BLOCK:
+            return "VK_FORMAT_ASTC_6x5_SRGB_BLOCK";
+        case VK_FORMAT_ASTC_6x5_UNORM_BLOCK:
+            return "VK_FORMAT_ASTC_6x5_UNORM_BLOCK";
+        case VK_FORMAT_ASTC_6x6_SFLOAT_BLOCK_EXT:
+            return "VK_FORMAT_ASTC_6x6_SFLOAT_BLOCK_EXT";
+        case VK_FORMAT_ASTC_6x6_SRGB_BLOCK:
+            return "VK_FORMAT_ASTC_6x6_SRGB_BLOCK";
+        case VK_FORMAT_ASTC_6x6_UNORM_BLOCK:
+            return "VK_FORMAT_ASTC_6x6_UNORM_BLOCK";
+        case VK_FORMAT_ASTC_8x5_SFLOAT_BLOCK_EXT:
+            return "VK_FORMAT_ASTC_8x5_SFLOAT_BLOCK_EXT";
+        case VK_FORMAT_ASTC_8x5_SRGB_BLOCK:
+            return "VK_FORMAT_ASTC_8x5_SRGB_BLOCK";
+        case VK_FORMAT_ASTC_8x5_UNORM_BLOCK:
+            return "VK_FORMAT_ASTC_8x5_UNORM_BLOCK";
+        case VK_FORMAT_ASTC_8x6_SFLOAT_BLOCK_EXT:
+            return "VK_FORMAT_ASTC_8x6_SFLOAT_BLOCK_EXT";
+        case VK_FORMAT_ASTC_8x6_SRGB_BLOCK:
+            return "VK_FORMAT_ASTC_8x6_SRGB_BLOCK";
+        case VK_FORMAT_ASTC_8x6_UNORM_BLOCK:
+            return "VK_FORMAT_ASTC_8x6_UNORM_BLOCK";
+        case VK_FORMAT_ASTC_8x8_SFLOAT_BLOCK_EXT:
+            return "VK_FORMAT_ASTC_8x8_SFLOAT_BLOCK_EXT";
+        case VK_FORMAT_ASTC_8x8_SRGB_BLOCK:
+            return "VK_FORMAT_ASTC_8x8_SRGB_BLOCK";
+        case VK_FORMAT_ASTC_8x8_UNORM_BLOCK:
+            return "VK_FORMAT_ASTC_8x8_UNORM_BLOCK";
+        case VK_FORMAT_B10G11R11_UFLOAT_PACK32:
+            return "VK_FORMAT_B10G11R11_UFLOAT_PACK32";
+        case VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16:
+            return "VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16";
+        case VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16:
+            return "VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16";
+        case VK_FORMAT_B16G16R16G16_422_UNORM:
+            return "VK_FORMAT_B16G16R16G16_422_UNORM";
+        case VK_FORMAT_B4G4R4A4_UNORM_PACK16:
+            return "VK_FORMAT_B4G4R4A4_UNORM_PACK16";
+        case VK_FORMAT_B5G5R5A1_UNORM_PACK16:
+            return "VK_FORMAT_B5G5R5A1_UNORM_PACK16";
+        case VK_FORMAT_B5G6R5_UNORM_PACK16:
+            return "VK_FORMAT_B5G6R5_UNORM_PACK16";
+        case VK_FORMAT_B8G8R8A8_SINT:
+            return "VK_FORMAT_B8G8R8A8_SINT";
+        case VK_FORMAT_B8G8R8A8_SNORM:
+            return "VK_FORMAT_B8G8R8A8_SNORM";
+        case VK_FORMAT_B8G8R8A8_SRGB:
+            return "VK_FORMAT_B8G8R8A8_SRGB";
+        case VK_FORMAT_B8G8R8A8_SSCALED:
+            return "VK_FORMAT_B8G8R8A8_SSCALED";
+        case VK_FORMAT_B8G8R8A8_UINT:
+            return "VK_FORMAT_B8G8R8A8_UINT";
+        case VK_FORMAT_B8G8R8A8_UNORM:
+            return "VK_FORMAT_B8G8R8A8_UNORM";
+        case VK_FORMAT_B8G8R8A8_USCALED:
+            return "VK_FORMAT_B8G8R8A8_USCALED";
+        case VK_FORMAT_B8G8R8G8_422_UNORM:
+            return "VK_FORMAT_B8G8R8G8_422_UNORM";
+        case VK_FORMAT_B8G8R8_SINT:
+            return "VK_FORMAT_B8G8R8_SINT";
+        case VK_FORMAT_B8G8R8_SNORM:
+            return "VK_FORMAT_B8G8R8_SNORM";
+        case VK_FORMAT_B8G8R8_SRGB:
+            return "VK_FORMAT_B8G8R8_SRGB";
+        case VK_FORMAT_B8G8R8_SSCALED:
+            return "VK_FORMAT_B8G8R8_SSCALED";
+        case VK_FORMAT_B8G8R8_UINT:
+            return "VK_FORMAT_B8G8R8_UINT";
+        case VK_FORMAT_B8G8R8_UNORM:
+            return "VK_FORMAT_B8G8R8_UNORM";
+        case VK_FORMAT_B8G8R8_USCALED:
+            return "VK_FORMAT_B8G8R8_USCALED";
+        case VK_FORMAT_BC1_RGBA_SRGB_BLOCK:
+            return "VK_FORMAT_BC1_RGBA_SRGB_BLOCK";
+        case VK_FORMAT_BC1_RGBA_UNORM_BLOCK:
+            return "VK_FORMAT_BC1_RGBA_UNORM_BLOCK";
+        case VK_FORMAT_BC1_RGB_SRGB_BLOCK:
+            return "VK_FORMAT_BC1_RGB_SRGB_BLOCK";
+        case VK_FORMAT_BC1_RGB_UNORM_BLOCK:
+            return "VK_FORMAT_BC1_RGB_UNORM_BLOCK";
+        case VK_FORMAT_BC2_SRGB_BLOCK:
+            return "VK_FORMAT_BC2_SRGB_BLOCK";
+        case VK_FORMAT_BC2_UNORM_BLOCK:
+            return "VK_FORMAT_BC2_UNORM_BLOCK";
+        case VK_FORMAT_BC3_SRGB_BLOCK:
+            return "VK_FORMAT_BC3_SRGB_BLOCK";
+        case VK_FORMAT_BC3_UNORM_BLOCK:
+            return "VK_FORMAT_BC3_UNORM_BLOCK";
+        case VK_FORMAT_BC4_SNORM_BLOCK:
+            return "VK_FORMAT_BC4_SNORM_BLOCK";
+        case VK_FORMAT_BC4_UNORM_BLOCK:
+            return "VK_FORMAT_BC4_UNORM_BLOCK";
+        case VK_FORMAT_BC5_SNORM_BLOCK:
+            return "VK_FORMAT_BC5_SNORM_BLOCK";
+        case VK_FORMAT_BC5_UNORM_BLOCK:
+            return "VK_FORMAT_BC5_UNORM_BLOCK";
+        case VK_FORMAT_BC6H_SFLOAT_BLOCK:
+            return "VK_FORMAT_BC6H_SFLOAT_BLOCK";
+        case VK_FORMAT_BC6H_UFLOAT_BLOCK:
+            return "VK_FORMAT_BC6H_UFLOAT_BLOCK";
+        case VK_FORMAT_BC7_SRGB_BLOCK:
+            return "VK_FORMAT_BC7_SRGB_BLOCK";
+        case VK_FORMAT_BC7_UNORM_BLOCK:
+            return "VK_FORMAT_BC7_UNORM_BLOCK";
+        case VK_FORMAT_D16_UNORM:
+            return "VK_FORMAT_D16_UNORM";
+        case VK_FORMAT_D16_UNORM_S8_UINT:
+            return "VK_FORMAT_D16_UNORM_S8_UINT";
+        case VK_FORMAT_D24_UNORM_S8_UINT:
+            return "VK_FORMAT_D24_UNORM_S8_UINT";
+        case VK_FORMAT_D32_SFLOAT:
+            return "VK_FORMAT_D32_SFLOAT";
+        case VK_FORMAT_D32_SFLOAT_S8_UINT:
+            return "VK_FORMAT_D32_SFLOAT_S8_UINT";
+        case VK_FORMAT_E5B9G9R9_UFLOAT_PACK32:
+            return "VK_FORMAT_E5B9G9R9_UFLOAT_PACK32";
+        case VK_FORMAT_EAC_R11G11_SNORM_BLOCK:
+            return "VK_FORMAT_EAC_R11G11_SNORM_BLOCK";
+        case VK_FORMAT_EAC_R11G11_UNORM_BLOCK:
+            return "VK_FORMAT_EAC_R11G11_UNORM_BLOCK";
+        case VK_FORMAT_EAC_R11_SNORM_BLOCK:
+            return "VK_FORMAT_EAC_R11_SNORM_BLOCK";
+        case VK_FORMAT_EAC_R11_UNORM_BLOCK:
+            return "VK_FORMAT_EAC_R11_UNORM_BLOCK";
+        case VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK:
+            return "VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK";
+        case VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK:
+            return "VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK";
+        case VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK:
+            return "VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK";
+        case VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK:
+            return "VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK";
+        case VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK:
+            return "VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK";
+        case VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK:
+            return "VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK";
+        case VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16:
+            return "VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16";
+        case VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16:
+            return "VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16";
+        case VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16:
+            return "VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16";
+        case VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16:
+            return "VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16";
+        case VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16:
+            return "VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16";
+        case VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16:
+            return "VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16";
+        case VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16:
+            return "VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16";
+        case VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16:
+            return "VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16";
+        case VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16:
+            return "VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16";
+        case VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16:
+            return "VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16";
+        case VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16:
+            return "VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16";
+        case VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16:
+            return "VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16";
+        case VK_FORMAT_G16B16G16R16_422_UNORM:
+            return "VK_FORMAT_G16B16G16R16_422_UNORM";
+        case VK_FORMAT_G16_B16R16_2PLANE_420_UNORM:
+            return "VK_FORMAT_G16_B16R16_2PLANE_420_UNORM";
+        case VK_FORMAT_G16_B16R16_2PLANE_422_UNORM:
+            return "VK_FORMAT_G16_B16R16_2PLANE_422_UNORM";
+        case VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM:
+            return "VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM";
+        case VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM:
+            return "VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM";
+        case VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM:
+            return "VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM";
+        case VK_FORMAT_G8B8G8R8_422_UNORM:
+            return "VK_FORMAT_G8B8G8R8_422_UNORM";
+        case VK_FORMAT_G8_B8R8_2PLANE_420_UNORM:
+            return "VK_FORMAT_G8_B8R8_2PLANE_420_UNORM";
+        case VK_FORMAT_G8_B8R8_2PLANE_422_UNORM:
+            return "VK_FORMAT_G8_B8R8_2PLANE_422_UNORM";
+        case VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM:
+            return "VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM";
+        case VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM:
+            return "VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM";
+        case VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM:
+            return "VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM";
+        case VK_FORMAT_PVRTC1_2BPP_SRGB_BLOCK_IMG:
+            return "VK_FORMAT_PVRTC1_2BPP_SRGB_BLOCK_IMG";
+        case VK_FORMAT_PVRTC1_2BPP_UNORM_BLOCK_IMG:
+            return "VK_FORMAT_PVRTC1_2BPP_UNORM_BLOCK_IMG";
+        case VK_FORMAT_PVRTC1_4BPP_SRGB_BLOCK_IMG:
+            return "VK_FORMAT_PVRTC1_4BPP_SRGB_BLOCK_IMG";
+        case VK_FORMAT_PVRTC1_4BPP_UNORM_BLOCK_IMG:
+            return "VK_FORMAT_PVRTC1_4BPP_UNORM_BLOCK_IMG";
+        case VK_FORMAT_PVRTC2_2BPP_SRGB_BLOCK_IMG:
+            return "VK_FORMAT_PVRTC2_2BPP_SRGB_BLOCK_IMG";
+        case VK_FORMAT_PVRTC2_2BPP_UNORM_BLOCK_IMG:
+            return "VK_FORMAT_PVRTC2_2BPP_UNORM_BLOCK_IMG";
+        case VK_FORMAT_PVRTC2_4BPP_SRGB_BLOCK_IMG:
+            return "VK_FORMAT_PVRTC2_4BPP_SRGB_BLOCK_IMG";
+        case VK_FORMAT_PVRTC2_4BPP_UNORM_BLOCK_IMG:
+            return "VK_FORMAT_PVRTC2_4BPP_UNORM_BLOCK_IMG";
+        case VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16:
+            return "VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16";
+        case VK_FORMAT_R10X6G10X6_UNORM_2PACK16:
+            return "VK_FORMAT_R10X6G10X6_UNORM_2PACK16";
+        case VK_FORMAT_R10X6_UNORM_PACK16:
+            return "VK_FORMAT_R10X6_UNORM_PACK16";
+        case VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16:
+            return "VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16";
+        case VK_FORMAT_R12X4G12X4_UNORM_2PACK16:
+            return "VK_FORMAT_R12X4G12X4_UNORM_2PACK16";
+        case VK_FORMAT_R12X4_UNORM_PACK16:
+            return "VK_FORMAT_R12X4_UNORM_PACK16";
+        case VK_FORMAT_R16G16B16A16_SFLOAT:
+            return "VK_FORMAT_R16G16B16A16_SFLOAT";
+        case VK_FORMAT_R16G16B16A16_SINT:
+            return "VK_FORMAT_R16G16B16A16_SINT";
+        case VK_FORMAT_R16G16B16A16_SNORM:
+            return "VK_FORMAT_R16G16B16A16_SNORM";
+        case VK_FORMAT_R16G16B16A16_SSCALED:
+            return "VK_FORMAT_R16G16B16A16_SSCALED";
+        case VK_FORMAT_R16G16B16A16_UINT:
+            return "VK_FORMAT_R16G16B16A16_UINT";
+        case VK_FORMAT_R16G16B16A16_UNORM:
+            return "VK_FORMAT_R16G16B16A16_UNORM";
+        case VK_FORMAT_R16G16B16A16_USCALED:
+            return "VK_FORMAT_R16G16B16A16_USCALED";
+        case VK_FORMAT_R16G16B16_SFLOAT:
+            return "VK_FORMAT_R16G16B16_SFLOAT";
+        case VK_FORMAT_R16G16B16_SINT:
+            return "VK_FORMAT_R16G16B16_SINT";
+        case VK_FORMAT_R16G16B16_SNORM:
+            return "VK_FORMAT_R16G16B16_SNORM";
+        case VK_FORMAT_R16G16B16_SSCALED:
+            return "VK_FORMAT_R16G16B16_SSCALED";
+        case VK_FORMAT_R16G16B16_UINT:
+            return "VK_FORMAT_R16G16B16_UINT";
+        case VK_FORMAT_R16G16B16_UNORM:
+            return "VK_FORMAT_R16G16B16_UNORM";
+        case VK_FORMAT_R16G16B16_USCALED:
+            return "VK_FORMAT_R16G16B16_USCALED";
+        case VK_FORMAT_R16G16_SFLOAT:
+            return "VK_FORMAT_R16G16_SFLOAT";
+        case VK_FORMAT_R16G16_SINT:
+            return "VK_FORMAT_R16G16_SINT";
+        case VK_FORMAT_R16G16_SNORM:
+            return "VK_FORMAT_R16G16_SNORM";
+        case VK_FORMAT_R16G16_SSCALED:
+            return "VK_FORMAT_R16G16_SSCALED";
+        case VK_FORMAT_R16G16_UINT:
+            return "VK_FORMAT_R16G16_UINT";
+        case VK_FORMAT_R16G16_UNORM:
+            return "VK_FORMAT_R16G16_UNORM";
+        case VK_FORMAT_R16G16_USCALED:
+            return "VK_FORMAT_R16G16_USCALED";
+        case VK_FORMAT_R16_SFLOAT:
+            return "VK_FORMAT_R16_SFLOAT";
+        case VK_FORMAT_R16_SINT:
+            return "VK_FORMAT_R16_SINT";
+        case VK_FORMAT_R16_SNORM:
+            return "VK_FORMAT_R16_SNORM";
+        case VK_FORMAT_R16_SSCALED:
+            return "VK_FORMAT_R16_SSCALED";
+        case VK_FORMAT_R16_UINT:
+            return "VK_FORMAT_R16_UINT";
+        case VK_FORMAT_R16_UNORM:
+            return "VK_FORMAT_R16_UNORM";
+        case VK_FORMAT_R16_USCALED:
+            return "VK_FORMAT_R16_USCALED";
+        case VK_FORMAT_R32G32B32A32_SFLOAT:
+            return "VK_FORMAT_R32G32B32A32_SFLOAT";
+        case VK_FORMAT_R32G32B32A32_SINT:
+            return "VK_FORMAT_R32G32B32A32_SINT";
+        case VK_FORMAT_R32G32B32A32_UINT:
+            return "VK_FORMAT_R32G32B32A32_UINT";
+        case VK_FORMAT_R32G32B32_SFLOAT:
+            return "VK_FORMAT_R32G32B32_SFLOAT";
+        case VK_FORMAT_R32G32B32_SINT:
+            return "VK_FORMAT_R32G32B32_SINT";
+        case VK_FORMAT_R32G32B32_UINT:
+            return "VK_FORMAT_R32G32B32_UINT";
+        case VK_FORMAT_R32G32_SFLOAT:
+            return "VK_FORMAT_R32G32_SFLOAT";
+        case VK_FORMAT_R32G32_SINT:
+            return "VK_FORMAT_R32G32_SINT";
+        case VK_FORMAT_R32G32_UINT:
+            return "VK_FORMAT_R32G32_UINT";
+        case VK_FORMAT_R32_SFLOAT:
+            return "VK_FORMAT_R32_SFLOAT";
+        case VK_FORMAT_R32_SINT:
+            return "VK_FORMAT_R32_SINT";
+        case VK_FORMAT_R32_UINT:
+            return "VK_FORMAT_R32_UINT";
+        case VK_FORMAT_R4G4B4A4_UNORM_PACK16:
+            return "VK_FORMAT_R4G4B4A4_UNORM_PACK16";
+        case VK_FORMAT_R4G4_UNORM_PACK8:
+            return "VK_FORMAT_R4G4_UNORM_PACK8";
+        case VK_FORMAT_R5G5B5A1_UNORM_PACK16:
+            return "VK_FORMAT_R5G5B5A1_UNORM_PACK16";
+        case VK_FORMAT_R5G6B5_UNORM_PACK16:
+            return "VK_FORMAT_R5G6B5_UNORM_PACK16";
+        case VK_FORMAT_R64G64B64A64_SFLOAT:
+            return "VK_FORMAT_R64G64B64A64_SFLOAT";
+        case VK_FORMAT_R64G64B64A64_SINT:
+            return "VK_FORMAT_R64G64B64A64_SINT";
+        case VK_FORMAT_R64G64B64A64_UINT:
+            return "VK_FORMAT_R64G64B64A64_UINT";
+        case VK_FORMAT_R64G64B64_SFLOAT:
+            return "VK_FORMAT_R64G64B64_SFLOAT";
+        case VK_FORMAT_R64G64B64_SINT:
+            return "VK_FORMAT_R64G64B64_SINT";
+        case VK_FORMAT_R64G64B64_UINT:
+            return "VK_FORMAT_R64G64B64_UINT";
+        case VK_FORMAT_R64G64_SFLOAT:
+            return "VK_FORMAT_R64G64_SFLOAT";
+        case VK_FORMAT_R64G64_SINT:
+            return "VK_FORMAT_R64G64_SINT";
+        case VK_FORMAT_R64G64_UINT:
+            return "VK_FORMAT_R64G64_UINT";
+        case VK_FORMAT_R64_SFLOAT:
+            return "VK_FORMAT_R64_SFLOAT";
+        case VK_FORMAT_R64_SINT:
+            return "VK_FORMAT_R64_SINT";
+        case VK_FORMAT_R64_UINT:
+            return "VK_FORMAT_R64_UINT";
+        case VK_FORMAT_R8G8B8A8_SINT:
+            return "VK_FORMAT_R8G8B8A8_SINT";
+        case VK_FORMAT_R8G8B8A8_SNORM:
+            return "VK_FORMAT_R8G8B8A8_SNORM";
+        case VK_FORMAT_R8G8B8A8_SRGB:
+            return "VK_FORMAT_R8G8B8A8_SRGB";
+        case VK_FORMAT_R8G8B8A8_SSCALED:
+            return "VK_FORMAT_R8G8B8A8_SSCALED";
+        case VK_FORMAT_R8G8B8A8_UINT:
+            return "VK_FORMAT_R8G8B8A8_UINT";
+        case VK_FORMAT_R8G8B8A8_UNORM:
+            return "VK_FORMAT_R8G8B8A8_UNORM";
+        case VK_FORMAT_R8G8B8A8_USCALED:
+            return "VK_FORMAT_R8G8B8A8_USCALED";
+        case VK_FORMAT_R8G8B8_SINT:
+            return "VK_FORMAT_R8G8B8_SINT";
+        case VK_FORMAT_R8G8B8_SNORM:
+            return "VK_FORMAT_R8G8B8_SNORM";
+        case VK_FORMAT_R8G8B8_SRGB:
+            return "VK_FORMAT_R8G8B8_SRGB";
+        case VK_FORMAT_R8G8B8_SSCALED:
+            return "VK_FORMAT_R8G8B8_SSCALED";
+        case VK_FORMAT_R8G8B8_UINT:
+            return "VK_FORMAT_R8G8B8_UINT";
+        case VK_FORMAT_R8G8B8_UNORM:
+            return "VK_FORMAT_R8G8B8_UNORM";
+        case VK_FORMAT_R8G8B8_USCALED:
+            return "VK_FORMAT_R8G8B8_USCALED";
+        case VK_FORMAT_R8G8_SINT:
+            return "VK_FORMAT_R8G8_SINT";
+        case VK_FORMAT_R8G8_SNORM:
+            return "VK_FORMAT_R8G8_SNORM";
+        case VK_FORMAT_R8G8_SRGB:
+            return "VK_FORMAT_R8G8_SRGB";
+        case VK_FORMAT_R8G8_SSCALED:
+            return "VK_FORMAT_R8G8_SSCALED";
+        case VK_FORMAT_R8G8_UINT:
+            return "VK_FORMAT_R8G8_UINT";
+        case VK_FORMAT_R8G8_UNORM:
+            return "VK_FORMAT_R8G8_UNORM";
+        case VK_FORMAT_R8G8_USCALED:
+            return "VK_FORMAT_R8G8_USCALED";
+        case VK_FORMAT_R8_SINT:
+            return "VK_FORMAT_R8_SINT";
+        case VK_FORMAT_R8_SNORM:
+            return "VK_FORMAT_R8_SNORM";
+        case VK_FORMAT_R8_SRGB:
+            return "VK_FORMAT_R8_SRGB";
+        case VK_FORMAT_R8_SSCALED:
+            return "VK_FORMAT_R8_SSCALED";
+        case VK_FORMAT_R8_UINT:
+            return "VK_FORMAT_R8_UINT";
+        case VK_FORMAT_R8_UNORM:
+            return "VK_FORMAT_R8_UNORM";
+        case VK_FORMAT_R8_USCALED:
+            return "VK_FORMAT_R8_USCALED";
+        case VK_FORMAT_S8_UINT:
+            return "VK_FORMAT_S8_UINT";
+        case VK_FORMAT_UNDEFINED:
+            return "VK_FORMAT_UNDEFINED";
+        case VK_FORMAT_X8_D24_UNORM_PACK32:
+            return "VK_FORMAT_X8_D24_UNORM_PACK32";
+        default:
+            return "Unhandled VkFormat";
+    }
+}
+
+static inline const char* string_VkFormatFeatureFlagBits(VkFormatFeatureFlagBits input_value)
+{
+    switch ((VkFormatFeatureFlagBits)input_value)
+    {
+        case VK_FORMAT_FEATURE_BLIT_DST_BIT:
+            return "VK_FORMAT_FEATURE_BLIT_DST_BIT";
+        case VK_FORMAT_FEATURE_BLIT_SRC_BIT:
+            return "VK_FORMAT_FEATURE_BLIT_SRC_BIT";
+        case VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT:
+            return "VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT";
+        case VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT:
+            return "VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT";
+        case VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT:
+            return "VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT";
+        case VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT:
+            return "VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT";
+        case VK_FORMAT_FEATURE_DISJOINT_BIT:
+            return "VK_FORMAT_FEATURE_DISJOINT_BIT";
+        case VK_FORMAT_FEATURE_FRAGMENT_DENSITY_MAP_BIT_EXT:
+            return "VK_FORMAT_FEATURE_FRAGMENT_DENSITY_MAP_BIT_EXT";
+        case VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT:
+            return "VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT";
+        case VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT:
+            return "VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT";
+        case VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_IMG:
+            return "VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_IMG";
+        case VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT:
+            return "VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT";
+        case VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT_EXT:
+            return "VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT_EXT";
+        case VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT:
+            return "VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT";
+        case VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT:
+            return "VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT";
+        case VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT:
+            return "VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT";
+        case VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT:
+            return "VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT";
+        case VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT:
+            return "VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT";
+        case VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT:
+            return "VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT";
+        case VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_ATOMIC_BIT:
+            return "VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_ATOMIC_BIT";
+        case VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT:
+            return "VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT";
+        case VK_FORMAT_FEATURE_TRANSFER_DST_BIT:
+            return "VK_FORMAT_FEATURE_TRANSFER_DST_BIT";
+        case VK_FORMAT_FEATURE_TRANSFER_SRC_BIT:
+            return "VK_FORMAT_FEATURE_TRANSFER_SRC_BIT";
+        case VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT:
+            return "VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT";
+        case VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT:
+            return "VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT";
+        default:
+            return "Unhandled VkFormatFeatureFlagBits";
+    }
+}
+
+static inline std::string string_VkFormatFeatureFlags(VkFormatFeatureFlags input_value)
+{
+    std::string ret;
+    int index = 0;
+    while(input_value) {
+        if (input_value & 1) {
+            if( !ret.empty()) ret.append("|");
+            ret.append(string_VkFormatFeatureFlagBits(static_cast<VkFormatFeatureFlagBits>(1 << index)));
+        }
+        ++index;
+        input_value >>= 1;
+    }
+    if( ret.empty()) ret.append(string_VkFormatFeatureFlagBits(static_cast<VkFormatFeatureFlagBits>(0)));
+    return ret;
+}
+
+static inline const char* string_VkImageType(VkImageType input_value)
+{
+    switch ((VkImageType)input_value)
+    {
+        case VK_IMAGE_TYPE_1D:
+            return "VK_IMAGE_TYPE_1D";
+        case VK_IMAGE_TYPE_2D:
+            return "VK_IMAGE_TYPE_2D";
+        case VK_IMAGE_TYPE_3D:
+            return "VK_IMAGE_TYPE_3D";
+        default:
+            return "Unhandled VkImageType";
+    }
+}
+
+static inline const char* string_VkImageTiling(VkImageTiling input_value)
+{
+    switch ((VkImageTiling)input_value)
+    {
+        case VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT:
+            return "VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT";
+        case VK_IMAGE_TILING_LINEAR:
+            return "VK_IMAGE_TILING_LINEAR";
+        case VK_IMAGE_TILING_OPTIMAL:
+            return "VK_IMAGE_TILING_OPTIMAL";
+        default:
+            return "Unhandled VkImageTiling";
+    }
+}
+
+static inline const char* string_VkImageUsageFlagBits(VkImageUsageFlagBits input_value)
+{
+    switch ((VkImageUsageFlagBits)input_value)
+    {
+        case VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT:
+            return "VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT";
+        case VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT:
+            return "VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT";
+        case VK_IMAGE_USAGE_FRAGMENT_DENSITY_MAP_BIT_EXT:
+            return "VK_IMAGE_USAGE_FRAGMENT_DENSITY_MAP_BIT_EXT";
+        case VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT:
+            return "VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT";
+        case VK_IMAGE_USAGE_SAMPLED_BIT:
+            return "VK_IMAGE_USAGE_SAMPLED_BIT";
+        case VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV:
+            return "VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV";
+        case VK_IMAGE_USAGE_STORAGE_BIT:
+            return "VK_IMAGE_USAGE_STORAGE_BIT";
+        case VK_IMAGE_USAGE_TRANSFER_DST_BIT:
+            return "VK_IMAGE_USAGE_TRANSFER_DST_BIT";
+        case VK_IMAGE_USAGE_TRANSFER_SRC_BIT:
+            return "VK_IMAGE_USAGE_TRANSFER_SRC_BIT";
+        case VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT:
+            return "VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT";
+        default:
+            return "Unhandled VkImageUsageFlagBits";
+    }
+}
+
+static inline std::string string_VkImageUsageFlags(VkImageUsageFlags input_value)
+{
+    std::string ret;
+    int index = 0;
+    while(input_value) {
+        if (input_value & 1) {
+            if( !ret.empty()) ret.append("|");
+            ret.append(string_VkImageUsageFlagBits(static_cast<VkImageUsageFlagBits>(1 << index)));
+        }
+        ++index;
+        input_value >>= 1;
+    }
+    if( ret.empty()) ret.append(string_VkImageUsageFlagBits(static_cast<VkImageUsageFlagBits>(0)));
+    return ret;
+}
+
+static inline const char* string_VkImageCreateFlagBits(VkImageCreateFlagBits input_value)
+{
+    switch ((VkImageCreateFlagBits)input_value)
+    {
+        case VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT:
+            return "VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT";
+        case VK_IMAGE_CREATE_ALIAS_BIT:
+            return "VK_IMAGE_CREATE_ALIAS_BIT";
+        case VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT:
+            return "VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT";
+        case VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV:
+            return "VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV";
+        case VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT:
+            return "VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT";
+        case VK_IMAGE_CREATE_DISJOINT_BIT:
+            return "VK_IMAGE_CREATE_DISJOINT_BIT";
+        case VK_IMAGE_CREATE_EXTENDED_USAGE_BIT:
+            return "VK_IMAGE_CREATE_EXTENDED_USAGE_BIT";
+        case VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT:
+            return "VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT";
+        case VK_IMAGE_CREATE_PROTECTED_BIT:
+            return "VK_IMAGE_CREATE_PROTECTED_BIT";
+        case VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT:
+            return "VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT";
+        case VK_IMAGE_CREATE_SPARSE_ALIASED_BIT:
+            return "VK_IMAGE_CREATE_SPARSE_ALIASED_BIT";
+        case VK_IMAGE_CREATE_SPARSE_BINDING_BIT:
+            return "VK_IMAGE_CREATE_SPARSE_BINDING_BIT";
+        case VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT:
+            return "VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT";
+        case VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT:
+            return "VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT";
+        case VK_IMAGE_CREATE_SUBSAMPLED_BIT_EXT:
+            return "VK_IMAGE_CREATE_SUBSAMPLED_BIT_EXT";
+        default:
+            return "Unhandled VkImageCreateFlagBits";
+    }
+}
+
+static inline std::string string_VkImageCreateFlags(VkImageCreateFlags input_value)
+{
+    std::string ret;
+    int index = 0;
+    while(input_value) {
+        if (input_value & 1) {
+            if( !ret.empty()) ret.append("|");
+            ret.append(string_VkImageCreateFlagBits(static_cast<VkImageCreateFlagBits>(1 << index)));
+        }
+        ++index;
+        input_value >>= 1;
+    }
+    if( ret.empty()) ret.append(string_VkImageCreateFlagBits(static_cast<VkImageCreateFlagBits>(0)));
+    return ret;
+}
+
+static inline const char* string_VkSampleCountFlagBits(VkSampleCountFlagBits input_value)
+{
+    switch ((VkSampleCountFlagBits)input_value)
+    {
+        case VK_SAMPLE_COUNT_16_BIT:
+            return "VK_SAMPLE_COUNT_16_BIT";
+        case VK_SAMPLE_COUNT_1_BIT:
+            return "VK_SAMPLE_COUNT_1_BIT";
+        case VK_SAMPLE_COUNT_2_BIT:
+            return "VK_SAMPLE_COUNT_2_BIT";
+        case VK_SAMPLE_COUNT_32_BIT:
+            return "VK_SAMPLE_COUNT_32_BIT";
+        case VK_SAMPLE_COUNT_4_BIT:
+            return "VK_SAMPLE_COUNT_4_BIT";
+        case VK_SAMPLE_COUNT_64_BIT:
+            return "VK_SAMPLE_COUNT_64_BIT";
+        case VK_SAMPLE_COUNT_8_BIT:
+            return "VK_SAMPLE_COUNT_8_BIT";
+        default:
+            return "Unhandled VkSampleCountFlagBits";
+    }
+}
+
+static inline std::string string_VkSampleCountFlags(VkSampleCountFlags input_value)
+{
+    std::string ret;
+    int index = 0;
+    while(input_value) {
+        if (input_value & 1) {
+            if( !ret.empty()) ret.append("|");
+            ret.append(string_VkSampleCountFlagBits(static_cast<VkSampleCountFlagBits>(1 << index)));
+        }
+        ++index;
+        input_value >>= 1;
+    }
+    if( ret.empty()) ret.append(string_VkSampleCountFlagBits(static_cast<VkSampleCountFlagBits>(0)));
+    return ret;
+}
+
+static inline const char* string_VkPhysicalDeviceType(VkPhysicalDeviceType input_value)
+{
+    switch ((VkPhysicalDeviceType)input_value)
+    {
+        case VK_PHYSICAL_DEVICE_TYPE_CPU:
+            return "VK_PHYSICAL_DEVICE_TYPE_CPU";
+        case VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU:
+            return "VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU";
+        case VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU:
+            return "VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU";
+        case VK_PHYSICAL_DEVICE_TYPE_OTHER:
+            return "VK_PHYSICAL_DEVICE_TYPE_OTHER";
+        case VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU:
+            return "VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU";
+        default:
+            return "Unhandled VkPhysicalDeviceType";
+    }
+}
+
+static inline const char* string_VkQueueFlagBits(VkQueueFlagBits input_value)
+{
+    switch ((VkQueueFlagBits)input_value)
+    {
+        case VK_QUEUE_COMPUTE_BIT:
+            return "VK_QUEUE_COMPUTE_BIT";
+        case VK_QUEUE_GRAPHICS_BIT:
+            return "VK_QUEUE_GRAPHICS_BIT";
+        case VK_QUEUE_PROTECTED_BIT:
+            return "VK_QUEUE_PROTECTED_BIT";
+        case VK_QUEUE_SPARSE_BINDING_BIT:
+            return "VK_QUEUE_SPARSE_BINDING_BIT";
+        case VK_QUEUE_TRANSFER_BIT:
+            return "VK_QUEUE_TRANSFER_BIT";
+        default:
+            return "Unhandled VkQueueFlagBits";
+    }
+}
+
+static inline std::string string_VkQueueFlags(VkQueueFlags input_value)
+{
+    std::string ret;
+    int index = 0;
+    while(input_value) {
+        if (input_value & 1) {
+            if( !ret.empty()) ret.append("|");
+            ret.append(string_VkQueueFlagBits(static_cast<VkQueueFlagBits>(1 << index)));
+        }
+        ++index;
+        input_value >>= 1;
+    }
+    if( ret.empty()) ret.append(string_VkQueueFlagBits(static_cast<VkQueueFlagBits>(0)));
+    return ret;
+}
+
+static inline const char* string_VkMemoryPropertyFlagBits(VkMemoryPropertyFlagBits input_value)
+{
+    switch ((VkMemoryPropertyFlagBits)input_value)
+    {
+        case VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD:
+            return "VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD";
+        case VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT:
+            return "VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT";
+        case VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD:
+            return "VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD";
+        case VK_MEMORY_PROPERTY_HOST_CACHED_BIT:
+            return "VK_MEMORY_PROPERTY_HOST_CACHED_BIT";
+        case VK_MEMORY_PROPERTY_HOST_COHERENT_BIT:
+            return "VK_MEMORY_PROPERTY_HOST_COHERENT_BIT";
+        case VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT:
+            return "VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT";
+        case VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT:
+            return "VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT";
+        case VK_MEMORY_PROPERTY_PROTECTED_BIT:
+            return "VK_MEMORY_PROPERTY_PROTECTED_BIT";
+        default:
+            return "Unhandled VkMemoryPropertyFlagBits";
+    }
+}
+
+static inline std::string string_VkMemoryPropertyFlags(VkMemoryPropertyFlags input_value)
+{
+    std::string ret;
+    int index = 0;
+    while(input_value) {
+        if (input_value & 1) {
+            if( !ret.empty()) ret.append("|");
+            ret.append(string_VkMemoryPropertyFlagBits(static_cast<VkMemoryPropertyFlagBits>(1 << index)));
+        }
+        ++index;
+        input_value >>= 1;
+    }
+    if( ret.empty()) ret.append(string_VkMemoryPropertyFlagBits(static_cast<VkMemoryPropertyFlagBits>(0)));
+    return ret;
+}
+
+static inline const char* string_VkMemoryHeapFlagBits(VkMemoryHeapFlagBits input_value)
+{
+    switch ((VkMemoryHeapFlagBits)input_value)
+    {
+        case VK_MEMORY_HEAP_DEVICE_LOCAL_BIT:
+            return "VK_MEMORY_HEAP_DEVICE_LOCAL_BIT";
+        case VK_MEMORY_HEAP_MULTI_INSTANCE_BIT:
+            return "VK_MEMORY_HEAP_MULTI_INSTANCE_BIT";
+        default:
+            return "Unhandled VkMemoryHeapFlagBits";
+    }
+}
+
+static inline std::string string_VkMemoryHeapFlags(VkMemoryHeapFlags input_value)
+{
+    std::string ret;
+    int index = 0;
+    while(input_value) {
+        if (input_value & 1) {
+            if( !ret.empty()) ret.append("|");
+            ret.append(string_VkMemoryHeapFlagBits(static_cast<VkMemoryHeapFlagBits>(1 << index)));
+        }
+        ++index;
+        input_value >>= 1;
+    }
+    if( ret.empty()) ret.append(string_VkMemoryHeapFlagBits(static_cast<VkMemoryHeapFlagBits>(0)));
+    return ret;
+}
+
+static inline const char* string_VkDeviceQueueCreateFlagBits(VkDeviceQueueCreateFlagBits input_value)
+{
+    switch ((VkDeviceQueueCreateFlagBits)input_value)
+    {
+        case VK_DEVICE_QUEUE_CREATE_PROTECTED_BIT:
+            return "VK_DEVICE_QUEUE_CREATE_PROTECTED_BIT";
+        default:
+            return "Unhandled VkDeviceQueueCreateFlagBits";
+    }
+}
+
+static inline std::string string_VkDeviceQueueCreateFlags(VkDeviceQueueCreateFlags input_value)
+{
+    std::string ret;
+    int index = 0;
+    while(input_value) {
+        if (input_value & 1) {
+            if( !ret.empty()) ret.append("|");
+            ret.append(string_VkDeviceQueueCreateFlagBits(static_cast<VkDeviceQueueCreateFlagBits>(1 << index)));
+        }
+        ++index;
+        input_value >>= 1;
+    }
+    if( ret.empty()) ret.append(string_VkDeviceQueueCreateFlagBits(static_cast<VkDeviceQueueCreateFlagBits>(0)));
+    return ret;
+}
+
+static inline const char* string_VkPipelineStageFlagBits(VkPipelineStageFlagBits input_value)
+{
+    switch ((VkPipelineStageFlagBits)input_value)
+    {
+        case VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_NV:
+            return "VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_NV";
+        case VK_PIPELINE_STAGE_ALL_COMMANDS_BIT:
+            return "VK_PIPELINE_STAGE_ALL_COMMANDS_BIT";
+        case VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT:
+            return "VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT";
+        case VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT:
+            return "VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT";
+        case VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT:
+            return "VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT";
+        case VK_PIPELINE_STAGE_COMMAND_PROCESS_BIT_NVX:
+            return "VK_PIPELINE_STAGE_COMMAND_PROCESS_BIT_NVX";
+        case VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT:
+            return "VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT";
+        case VK_PIPELINE_STAGE_CONDITIONAL_RENDERING_BIT_EXT:
+            return "VK_PIPELINE_STAGE_CONDITIONAL_RENDERING_BIT_EXT";
+        case VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT:
+            return "VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT";
+        case VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT:
+            return "VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT";
+        case VK_PIPELINE_STAGE_FRAGMENT_DENSITY_PROCESS_BIT_EXT:
+            return "VK_PIPELINE_STAGE_FRAGMENT_DENSITY_PROCESS_BIT_EXT";
+        case VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT:
+            return "VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT";
+        case VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT:
+            return "VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT";
+        case VK_PIPELINE_STAGE_HOST_BIT:
+            return "VK_PIPELINE_STAGE_HOST_BIT";
+        case VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT:
+            return "VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT";
+        case VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV:
+            return "VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV";
+        case VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_NV:
+            return "VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_NV";
+        case VK_PIPELINE_STAGE_SHADING_RATE_IMAGE_BIT_NV:
+            return "VK_PIPELINE_STAGE_SHADING_RATE_IMAGE_BIT_NV";
+        case VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV:
+            return "VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV";
+        case VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT:
+            return "VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT";
+        case VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT:
+            return "VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT";
+        case VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT:
+            return "VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT";
+        case VK_PIPELINE_STAGE_TRANSFER_BIT:
+            return "VK_PIPELINE_STAGE_TRANSFER_BIT";
+        case VK_PIPELINE_STAGE_TRANSFORM_FEEDBACK_BIT_EXT:
+            return "VK_PIPELINE_STAGE_TRANSFORM_FEEDBACK_BIT_EXT";
+        case VK_PIPELINE_STAGE_VERTEX_INPUT_BIT:
+            return "VK_PIPELINE_STAGE_VERTEX_INPUT_BIT";
+        case VK_PIPELINE_STAGE_VERTEX_SHADER_BIT:
+            return "VK_PIPELINE_STAGE_VERTEX_SHADER_BIT";
+        default:
+            return "Unhandled VkPipelineStageFlagBits";
+    }
+}
+
+static inline std::string string_VkPipelineStageFlags(VkPipelineStageFlags input_value)
+{
+    std::string ret;
+    int index = 0;
+    while(input_value) {
+        if (input_value & 1) {
+            if( !ret.empty()) ret.append("|");
+            ret.append(string_VkPipelineStageFlagBits(static_cast<VkPipelineStageFlagBits>(1 << index)));
+        }
+        ++index;
+        input_value >>= 1;
+    }
+    if( ret.empty()) ret.append(string_VkPipelineStageFlagBits(static_cast<VkPipelineStageFlagBits>(0)));
+    return ret;
+}
+
+static inline const char* string_VkImageAspectFlagBits(VkImageAspectFlagBits input_value)
+{
+    switch ((VkImageAspectFlagBits)input_value)
+    {
+        case VK_IMAGE_ASPECT_COLOR_BIT:
+            return "VK_IMAGE_ASPECT_COLOR_BIT";
+        case VK_IMAGE_ASPECT_DEPTH_BIT:
+            return "VK_IMAGE_ASPECT_DEPTH_BIT";
+        case VK_IMAGE_ASPECT_MEMORY_PLANE_0_BIT_EXT:
+            return "VK_IMAGE_ASPECT_MEMORY_PLANE_0_BIT_EXT";
+        case VK_IMAGE_ASPECT_MEMORY_PLANE_1_BIT_EXT:
+            return "VK_IMAGE_ASPECT_MEMORY_PLANE_1_BIT_EXT";
+        case VK_IMAGE_ASPECT_MEMORY_PLANE_2_BIT_EXT:
+            return "VK_IMAGE_ASPECT_MEMORY_PLANE_2_BIT_EXT";
+        case VK_IMAGE_ASPECT_MEMORY_PLANE_3_BIT_EXT:
+            return "VK_IMAGE_ASPECT_MEMORY_PLANE_3_BIT_EXT";
+        case VK_IMAGE_ASPECT_METADATA_BIT:
+            return "VK_IMAGE_ASPECT_METADATA_BIT";
+        case VK_IMAGE_ASPECT_PLANE_0_BIT:
+            return "VK_IMAGE_ASPECT_PLANE_0_BIT";
+        case VK_IMAGE_ASPECT_PLANE_1_BIT:
+            return "VK_IMAGE_ASPECT_PLANE_1_BIT";
+        case VK_IMAGE_ASPECT_PLANE_2_BIT:
+            return "VK_IMAGE_ASPECT_PLANE_2_BIT";
+        case VK_IMAGE_ASPECT_STENCIL_BIT:
+            return "VK_IMAGE_ASPECT_STENCIL_BIT";
+        default:
+            return "Unhandled VkImageAspectFlagBits";
+    }
+}
+
+static inline std::string string_VkImageAspectFlags(VkImageAspectFlags input_value)
+{
+    std::string ret;
+    int index = 0;
+    while(input_value) {
+        if (input_value & 1) {
+            if( !ret.empty()) ret.append("|");
+            ret.append(string_VkImageAspectFlagBits(static_cast<VkImageAspectFlagBits>(1 << index)));
+        }
+        ++index;
+        input_value >>= 1;
+    }
+    if( ret.empty()) ret.append(string_VkImageAspectFlagBits(static_cast<VkImageAspectFlagBits>(0)));
+    return ret;
+}
+
+static inline const char* string_VkSparseImageFormatFlagBits(VkSparseImageFormatFlagBits input_value)
+{
+    switch ((VkSparseImageFormatFlagBits)input_value)
+    {
+        case VK_SPARSE_IMAGE_FORMAT_ALIGNED_MIP_SIZE_BIT:
+            return "VK_SPARSE_IMAGE_FORMAT_ALIGNED_MIP_SIZE_BIT";
+        case VK_SPARSE_IMAGE_FORMAT_NONSTANDARD_BLOCK_SIZE_BIT:
+            return "VK_SPARSE_IMAGE_FORMAT_NONSTANDARD_BLOCK_SIZE_BIT";
+        case VK_SPARSE_IMAGE_FORMAT_SINGLE_MIPTAIL_BIT:
+            return "VK_SPARSE_IMAGE_FORMAT_SINGLE_MIPTAIL_BIT";
+        default:
+            return "Unhandled VkSparseImageFormatFlagBits";
+    }
+}
+
+static inline std::string string_VkSparseImageFormatFlags(VkSparseImageFormatFlags input_value)
+{
+    std::string ret;
+    int index = 0;
+    while(input_value) {
+        if (input_value & 1) {
+            if( !ret.empty()) ret.append("|");
+            ret.append(string_VkSparseImageFormatFlagBits(static_cast<VkSparseImageFormatFlagBits>(1 << index)));
+        }
+        ++index;
+        input_value >>= 1;
+    }
+    if( ret.empty()) ret.append(string_VkSparseImageFormatFlagBits(static_cast<VkSparseImageFormatFlagBits>(0)));
+    return ret;
+}
+
+static inline const char* string_VkSparseMemoryBindFlagBits(VkSparseMemoryBindFlagBits input_value)
+{
+    switch ((VkSparseMemoryBindFlagBits)input_value)
+    {
+        case VK_SPARSE_MEMORY_BIND_METADATA_BIT:
+            return "VK_SPARSE_MEMORY_BIND_METADATA_BIT";
+        default:
+            return "Unhandled VkSparseMemoryBindFlagBits";
+    }
+}
+
+static inline std::string string_VkSparseMemoryBindFlags(VkSparseMemoryBindFlags input_value)
+{
+    std::string ret;
+    int index = 0;
+    while(input_value) {
+        if (input_value & 1) {
+            if( !ret.empty()) ret.append("|");
+            ret.append(string_VkSparseMemoryBindFlagBits(static_cast<VkSparseMemoryBindFlagBits>(1 << index)));
+        }
+        ++index;
+        input_value >>= 1;
+    }
+    if( ret.empty()) ret.append(string_VkSparseMemoryBindFlagBits(static_cast<VkSparseMemoryBindFlagBits>(0)));
+    return ret;
+}
+
+static inline const char* string_VkFenceCreateFlagBits(VkFenceCreateFlagBits input_value)
+{
+    switch ((VkFenceCreateFlagBits)input_value)
+    {
+        case VK_FENCE_CREATE_SIGNALED_BIT:
+            return "VK_FENCE_CREATE_SIGNALED_BIT";
+        default:
+            return "Unhandled VkFenceCreateFlagBits";
+    }
+}
+
+static inline std::string string_VkFenceCreateFlags(VkFenceCreateFlags input_value)
+{
+    std::string ret;
+    int index = 0;
+    while(input_value) {
+        if (input_value & 1) {
+            if( !ret.empty()) ret.append("|");
+            ret.append(string_VkFenceCreateFlagBits(static_cast<VkFenceCreateFlagBits>(1 << index)));
+        }
+        ++index;
+        input_value >>= 1;
+    }
+    if( ret.empty()) ret.append(string_VkFenceCreateFlagBits(static_cast<VkFenceCreateFlagBits>(0)));
+    return ret;
+}
+
+static inline const char* string_VkQueryType(VkQueryType input_value)
+{
+    switch ((VkQueryType)input_value)
+    {
+        case VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_NV:
+            return "VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_NV";
+        case VK_QUERY_TYPE_OCCLUSION:
+            return "VK_QUERY_TYPE_OCCLUSION";
+        case VK_QUERY_TYPE_PERFORMANCE_QUERY_INTEL:
+            return "VK_QUERY_TYPE_PERFORMANCE_QUERY_INTEL";
+        case VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR:
+            return "VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR";
+        case VK_QUERY_TYPE_PIPELINE_STATISTICS:
+            return "VK_QUERY_TYPE_PIPELINE_STATISTICS";
+        case VK_QUERY_TYPE_TIMESTAMP:
+            return "VK_QUERY_TYPE_TIMESTAMP";
+        case VK_QUERY_TYPE_TRANSFORM_FEEDBACK_STREAM_EXT:
+            return "VK_QUERY_TYPE_TRANSFORM_FEEDBACK_STREAM_EXT";
+        default:
+            return "Unhandled VkQueryType";
+    }
+}
+
+static inline const char* string_VkQueryPipelineStatisticFlagBits(VkQueryPipelineStatisticFlagBits input_value)
+{
+    switch ((VkQueryPipelineStatisticFlagBits)input_value)
+    {
+        case VK_QUERY_PIPELINE_STATISTIC_CLIPPING_INVOCATIONS_BIT:
+            return "VK_QUERY_PIPELINE_STATISTIC_CLIPPING_INVOCATIONS_BIT";
+        case VK_QUERY_PIPELINE_STATISTIC_CLIPPING_PRIMITIVES_BIT:
+            return "VK_QUERY_PIPELINE_STATISTIC_CLIPPING_PRIMITIVES_BIT";
+        case VK_QUERY_PIPELINE_STATISTIC_COMPUTE_SHADER_INVOCATIONS_BIT:
+            return "VK_QUERY_PIPELINE_STATISTIC_COMPUTE_SHADER_INVOCATIONS_BIT";
+        case VK_QUERY_PIPELINE_STATISTIC_FRAGMENT_SHADER_INVOCATIONS_BIT:
+            return "VK_QUERY_PIPELINE_STATISTIC_FRAGMENT_SHADER_INVOCATIONS_BIT";
+        case VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_INVOCATIONS_BIT:
+            return "VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_INVOCATIONS_BIT";
+        case VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_PRIMITIVES_BIT:
+            return "VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_PRIMITIVES_BIT";
+        case VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_PRIMITIVES_BIT:
+            return "VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_PRIMITIVES_BIT";
+        case VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_VERTICES_BIT:
+            return "VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_VERTICES_BIT";
+        case VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_CONTROL_SHADER_PATCHES_BIT:
+            return "VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_CONTROL_SHADER_PATCHES_BIT";
+        case VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_EVALUATION_SHADER_INVOCATIONS_BIT:
+            return "VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_EVALUATION_SHADER_INVOCATIONS_BIT";
+        case VK_QUERY_PIPELINE_STATISTIC_VERTEX_SHADER_INVOCATIONS_BIT:
+            return "VK_QUERY_PIPELINE_STATISTIC_VERTEX_SHADER_INVOCATIONS_BIT";
+        default:
+            return "Unhandled VkQueryPipelineStatisticFlagBits";
+    }
+}
+
+static inline std::string string_VkQueryPipelineStatisticFlags(VkQueryPipelineStatisticFlags input_value)
+{
+    std::string ret;
+    int index = 0;
+    while(input_value) {
+        if (input_value & 1) {
+            if( !ret.empty()) ret.append("|");
+            ret.append(string_VkQueryPipelineStatisticFlagBits(static_cast<VkQueryPipelineStatisticFlagBits>(1 << index)));
+        }
+        ++index;
+        input_value >>= 1;
+    }
+    if( ret.empty()) ret.append(string_VkQueryPipelineStatisticFlagBits(static_cast<VkQueryPipelineStatisticFlagBits>(0)));
+    return ret;
+}
+
+static inline const char* string_VkQueryResultFlagBits(VkQueryResultFlagBits input_value)
+{
+    switch ((VkQueryResultFlagBits)input_value)
+    {
+        case VK_QUERY_RESULT_64_BIT:
+            return "VK_QUERY_RESULT_64_BIT";
+        case VK_QUERY_RESULT_PARTIAL_BIT:
+            return "VK_QUERY_RESULT_PARTIAL_BIT";
+        case VK_QUERY_RESULT_WAIT_BIT:
+            return "VK_QUERY_RESULT_WAIT_BIT";
+        case VK_QUERY_RESULT_WITH_AVAILABILITY_BIT:
+            return "VK_QUERY_RESULT_WITH_AVAILABILITY_BIT";
+        default:
+            return "Unhandled VkQueryResultFlagBits";
+    }
+}
+
+static inline std::string string_VkQueryResultFlags(VkQueryResultFlags input_value)
+{
+    std::string ret;
+    int index = 0;
+    while(input_value) {
+        if (input_value & 1) {
+            if( !ret.empty()) ret.append("|");
+            ret.append(string_VkQueryResultFlagBits(static_cast<VkQueryResultFlagBits>(1 << index)));
+        }
+        ++index;
+        input_value >>= 1;
+    }
+    if( ret.empty()) ret.append(string_VkQueryResultFlagBits(static_cast<VkQueryResultFlagBits>(0)));
+    return ret;
+}
+
+static inline const char* string_VkBufferCreateFlagBits(VkBufferCreateFlagBits input_value)
+{
+    switch ((VkBufferCreateFlagBits)input_value)
+    {
+        case VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_KHR:
+            return "VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_KHR";
+        case VK_BUFFER_CREATE_PROTECTED_BIT:
+            return "VK_BUFFER_CREATE_PROTECTED_BIT";
+        case VK_BUFFER_CREATE_SPARSE_ALIASED_BIT:
+            return "VK_BUFFER_CREATE_SPARSE_ALIASED_BIT";
+        case VK_BUFFER_CREATE_SPARSE_BINDING_BIT:
+            return "VK_BUFFER_CREATE_SPARSE_BINDING_BIT";
+        case VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT:
+            return "VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT";
+        default:
+            return "Unhandled VkBufferCreateFlagBits";
+    }
+}
+
+static inline std::string string_VkBufferCreateFlags(VkBufferCreateFlags input_value)
+{
+    std::string ret;
+    int index = 0;
+    while(input_value) {
+        if (input_value & 1) {
+            if( !ret.empty()) ret.append("|");
+            ret.append(string_VkBufferCreateFlagBits(static_cast<VkBufferCreateFlagBits>(1 << index)));
+        }
+        ++index;
+        input_value >>= 1;
+    }
+    if( ret.empty()) ret.append(string_VkBufferCreateFlagBits(static_cast<VkBufferCreateFlagBits>(0)));
+    return ret;
+}
+
+static inline const char* string_VkBufferUsageFlagBits(VkBufferUsageFlagBits input_value)
+{
+    switch ((VkBufferUsageFlagBits)input_value)
+    {
+        case VK_BUFFER_USAGE_CONDITIONAL_RENDERING_BIT_EXT:
+            return "VK_BUFFER_USAGE_CONDITIONAL_RENDERING_BIT_EXT";
+        case VK_BUFFER_USAGE_INDEX_BUFFER_BIT:
+            return "VK_BUFFER_USAGE_INDEX_BUFFER_BIT";
+        case VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT:
+            return "VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT";
+        case VK_BUFFER_USAGE_RAY_TRACING_BIT_NV:
+            return "VK_BUFFER_USAGE_RAY_TRACING_BIT_NV";
+        case VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_KHR:
+            return "VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_KHR";
+        case VK_BUFFER_USAGE_STORAGE_BUFFER_BIT:
+            return "VK_BUFFER_USAGE_STORAGE_BUFFER_BIT";
+        case VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT:
+            return "VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT";
+        case VK_BUFFER_USAGE_TRANSFER_DST_BIT:
+            return "VK_BUFFER_USAGE_TRANSFER_DST_BIT";
+        case VK_BUFFER_USAGE_TRANSFER_SRC_BIT:
+            return "VK_BUFFER_USAGE_TRANSFER_SRC_BIT";
+        case VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_BUFFER_BIT_EXT:
+            return "VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_BUFFER_BIT_EXT";
+        case VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_COUNTER_BUFFER_BIT_EXT:
+            return "VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_COUNTER_BUFFER_BIT_EXT";
+        case VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT:
+            return "VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT";
+        case VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT:
+            return "VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT";
+        case VK_BUFFER_USAGE_VERTEX_BUFFER_BIT:
+            return "VK_BUFFER_USAGE_VERTEX_BUFFER_BIT";
+        default:
+            return "Unhandled VkBufferUsageFlagBits";
+    }
+}
+
+static inline std::string string_VkBufferUsageFlags(VkBufferUsageFlags input_value)
+{
+    std::string ret;
+    int index = 0;
+    while(input_value) {
+        if (input_value & 1) {
+            if( !ret.empty()) ret.append("|");
+            ret.append(string_VkBufferUsageFlagBits(static_cast<VkBufferUsageFlagBits>(1 << index)));
+        }
+        ++index;
+        input_value >>= 1;
+    }
+    if( ret.empty()) ret.append(string_VkBufferUsageFlagBits(static_cast<VkBufferUsageFlagBits>(0)));
+    return ret;
+}
+
+static inline const char* string_VkSharingMode(VkSharingMode input_value)
+{
+    switch ((VkSharingMode)input_value)
+    {
+        case VK_SHARING_MODE_CONCURRENT:
+            return "VK_SHARING_MODE_CONCURRENT";
+        case VK_SHARING_MODE_EXCLUSIVE:
+            return "VK_SHARING_MODE_EXCLUSIVE";
+        default:
+            return "Unhandled VkSharingMode";
+    }
+}
+
+static inline const char* string_VkImageLayout(VkImageLayout input_value)
+{
+    switch ((VkImageLayout)input_value)
+    {
+        case VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL:
+            return "VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL";
+        case VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR:
+            return "VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR";
+        case VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL:
+            return "VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL";
+        case VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR:
+            return "VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR";
+        case VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL:
+            return "VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL";
+        case VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL:
+            return "VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL";
+        case VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL:
+            return "VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL";
+        case VK_IMAGE_LAYOUT_FRAGMENT_DENSITY_MAP_OPTIMAL_EXT:
+            return "VK_IMAGE_LAYOUT_FRAGMENT_DENSITY_MAP_OPTIMAL_EXT";
+        case VK_IMAGE_LAYOUT_GENERAL:
+            return "VK_IMAGE_LAYOUT_GENERAL";
+        case VK_IMAGE_LAYOUT_PREINITIALIZED:
+            return "VK_IMAGE_LAYOUT_PREINITIALIZED";
+        case VK_IMAGE_LAYOUT_PRESENT_SRC_KHR:
+            return "VK_IMAGE_LAYOUT_PRESENT_SRC_KHR";
+        case VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL:
+            return "VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL";
+        case VK_IMAGE_LAYOUT_SHADING_RATE_OPTIMAL_NV:
+            return "VK_IMAGE_LAYOUT_SHADING_RATE_OPTIMAL_NV";
+        case VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR:
+            return "VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR";
+        case VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR:
+            return "VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR";
+        case VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR:
+            return "VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR";
+        case VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL:
+            return "VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL";
+        case VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL:
+            return "VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL";
+        case VK_IMAGE_LAYOUT_UNDEFINED:
+            return "VK_IMAGE_LAYOUT_UNDEFINED";
+        default:
+            return "Unhandled VkImageLayout";
+    }
+}
+
+static inline const char* string_VkImageViewCreateFlagBits(VkImageViewCreateFlagBits input_value)
+{
+    switch ((VkImageViewCreateFlagBits)input_value)
+    {
+        case VK_IMAGE_VIEW_CREATE_FRAGMENT_DENSITY_MAP_DYNAMIC_BIT_EXT:
+            return "VK_IMAGE_VIEW_CREATE_FRAGMENT_DENSITY_MAP_DYNAMIC_BIT_EXT";
+        default:
+            return "Unhandled VkImageViewCreateFlagBits";
+    }
+}
+
+static inline std::string string_VkImageViewCreateFlags(VkImageViewCreateFlags input_value)
+{
+    std::string ret;
+    int index = 0;
+    while(input_value) {
+        if (input_value & 1) {
+            if( !ret.empty()) ret.append("|");
+            ret.append(string_VkImageViewCreateFlagBits(static_cast<VkImageViewCreateFlagBits>(1 << index)));
+        }
+        ++index;
+        input_value >>= 1;
+    }
+    if( ret.empty()) ret.append(string_VkImageViewCreateFlagBits(static_cast<VkImageViewCreateFlagBits>(0)));
+    return ret;
+}
+
+static inline const char* string_VkImageViewType(VkImageViewType input_value)
+{
+    switch ((VkImageViewType)input_value)
+    {
+        case VK_IMAGE_VIEW_TYPE_1D:
+            return "VK_IMAGE_VIEW_TYPE_1D";
+        case VK_IMAGE_VIEW_TYPE_1D_ARRAY:
+            return "VK_IMAGE_VIEW_TYPE_1D_ARRAY";
+        case VK_IMAGE_VIEW_TYPE_2D:
+            return "VK_IMAGE_VIEW_TYPE_2D";
+        case VK_IMAGE_VIEW_TYPE_2D_ARRAY:
+            return "VK_IMAGE_VIEW_TYPE_2D_ARRAY";
+        case VK_IMAGE_VIEW_TYPE_3D:
+            return "VK_IMAGE_VIEW_TYPE_3D";
+        case VK_IMAGE_VIEW_TYPE_CUBE:
+            return "VK_IMAGE_VIEW_TYPE_CUBE";
+        case VK_IMAGE_VIEW_TYPE_CUBE_ARRAY:
+            return "VK_IMAGE_VIEW_TYPE_CUBE_ARRAY";
+        default:
+            return "Unhandled VkImageViewType";
+    }
+}
+
+static inline const char* string_VkComponentSwizzle(VkComponentSwizzle input_value)
+{
+    switch ((VkComponentSwizzle)input_value)
+    {
+        case VK_COMPONENT_SWIZZLE_A:
+            return "VK_COMPONENT_SWIZZLE_A";
+        case VK_COMPONENT_SWIZZLE_B:
+            return "VK_COMPONENT_SWIZZLE_B";
+        case VK_COMPONENT_SWIZZLE_G:
+            return "VK_COMPONENT_SWIZZLE_G";
+        case VK_COMPONENT_SWIZZLE_IDENTITY:
+            return "VK_COMPONENT_SWIZZLE_IDENTITY";
+        case VK_COMPONENT_SWIZZLE_ONE:
+            return "VK_COMPONENT_SWIZZLE_ONE";
+        case VK_COMPONENT_SWIZZLE_R:
+            return "VK_COMPONENT_SWIZZLE_R";
+        case VK_COMPONENT_SWIZZLE_ZERO:
+            return "VK_COMPONENT_SWIZZLE_ZERO";
+        default:
+            return "Unhandled VkComponentSwizzle";
+    }
+}
+
+static inline const char* string_VkPipelineCreateFlagBits(VkPipelineCreateFlagBits input_value)
+{
+    switch ((VkPipelineCreateFlagBits)input_value)
+    {
+        case VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT:
+            return "VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT";
+        case VK_PIPELINE_CREATE_CAPTURE_INTERNAL_REPRESENTATIONS_BIT_KHR:
+            return "VK_PIPELINE_CREATE_CAPTURE_INTERNAL_REPRESENTATIONS_BIT_KHR";
+        case VK_PIPELINE_CREATE_CAPTURE_STATISTICS_BIT_KHR:
+            return "VK_PIPELINE_CREATE_CAPTURE_STATISTICS_BIT_KHR";
+        case VK_PIPELINE_CREATE_DEFER_COMPILE_BIT_NV:
+            return "VK_PIPELINE_CREATE_DEFER_COMPILE_BIT_NV";
+        case VK_PIPELINE_CREATE_DERIVATIVE_BIT:
+            return "VK_PIPELINE_CREATE_DERIVATIVE_BIT";
+        case VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT:
+            return "VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT";
+        case VK_PIPELINE_CREATE_DISPATCH_BASE_BIT:
+            return "VK_PIPELINE_CREATE_DISPATCH_BASE_BIT";
+        case VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT:
+            return "VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT";
+        default:
+            return "Unhandled VkPipelineCreateFlagBits";
+    }
+}
+
+static inline std::string string_VkPipelineCreateFlags(VkPipelineCreateFlags input_value)
+{
+    std::string ret;
+    int index = 0;
+    while(input_value) {
+        if (input_value & 1) {
+            if( !ret.empty()) ret.append("|");
+            ret.append(string_VkPipelineCreateFlagBits(static_cast<VkPipelineCreateFlagBits>(1 << index)));
+        }
+        ++index;
+        input_value >>= 1;
+    }
+    if( ret.empty()) ret.append(string_VkPipelineCreateFlagBits(static_cast<VkPipelineCreateFlagBits>(0)));
+    return ret;
+}
+
+static inline const char* string_VkPipelineShaderStageCreateFlagBits(VkPipelineShaderStageCreateFlagBits input_value)
+{
+    switch ((VkPipelineShaderStageCreateFlagBits)input_value)
+    {
+        case VK_PIPELINE_SHADER_STAGE_CREATE_ALLOW_VARYING_SUBGROUP_SIZE_BIT_EXT:
+            return "VK_PIPELINE_SHADER_STAGE_CREATE_ALLOW_VARYING_SUBGROUP_SIZE_BIT_EXT";
+        case VK_PIPELINE_SHADER_STAGE_CREATE_REQUIRE_FULL_SUBGROUPS_BIT_EXT:
+            return "VK_PIPELINE_SHADER_STAGE_CREATE_REQUIRE_FULL_SUBGROUPS_BIT_EXT";
+        default:
+            return "Unhandled VkPipelineShaderStageCreateFlagBits";
+    }
+}
+
+static inline std::string string_VkPipelineShaderStageCreateFlags(VkPipelineShaderStageCreateFlags input_value)
+{
+    std::string ret;
+    int index = 0;
+    while(input_value) {
+        if (input_value & 1) {
+            if( !ret.empty()) ret.append("|");
+            ret.append(string_VkPipelineShaderStageCreateFlagBits(static_cast<VkPipelineShaderStageCreateFlagBits>(1 << index)));
+        }
+        ++index;
+        input_value >>= 1;
+    }
+    if( ret.empty()) ret.append(string_VkPipelineShaderStageCreateFlagBits(static_cast<VkPipelineShaderStageCreateFlagBits>(0)));
+    return ret;
+}
+
+static inline const char* string_VkShaderStageFlagBits(VkShaderStageFlagBits input_value)
+{
+    switch ((VkShaderStageFlagBits)input_value)
+    {
+        case VK_SHADER_STAGE_ALL:
+            return "VK_SHADER_STAGE_ALL";
+        case VK_SHADER_STAGE_ALL_GRAPHICS:
+            return "VK_SHADER_STAGE_ALL_GRAPHICS";
+        case VK_SHADER_STAGE_ANY_HIT_BIT_NV:
+            return "VK_SHADER_STAGE_ANY_HIT_BIT_NV";
+        case VK_SHADER_STAGE_CALLABLE_BIT_NV:
+            return "VK_SHADER_STAGE_CALLABLE_BIT_NV";
+        case VK_SHADER_STAGE_CLOSEST_HIT_BIT_NV:
+            return "VK_SHADER_STAGE_CLOSEST_HIT_BIT_NV";
+        case VK_SHADER_STAGE_COMPUTE_BIT:
+            return "VK_SHADER_STAGE_COMPUTE_BIT";
+        case VK_SHADER_STAGE_FRAGMENT_BIT:
+            return "VK_SHADER_STAGE_FRAGMENT_BIT";
+        case VK_SHADER_STAGE_GEOMETRY_BIT:
+            return "VK_SHADER_STAGE_GEOMETRY_BIT";
+        case VK_SHADER_STAGE_INTERSECTION_BIT_NV:
+            return "VK_SHADER_STAGE_INTERSECTION_BIT_NV";
+        case VK_SHADER_STAGE_MESH_BIT_NV:
+            return "VK_SHADER_STAGE_MESH_BIT_NV";
+        case VK_SHADER_STAGE_MISS_BIT_NV:
+            return "VK_SHADER_STAGE_MISS_BIT_NV";
+        case VK_SHADER_STAGE_RAYGEN_BIT_NV:
+            return "VK_SHADER_STAGE_RAYGEN_BIT_NV";
+        case VK_SHADER_STAGE_TASK_BIT_NV:
+            return "VK_SHADER_STAGE_TASK_BIT_NV";
+        case VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT:
+            return "VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT";
+        case VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT:
+            return "VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT";
+        case VK_SHADER_STAGE_VERTEX_BIT:
+            return "VK_SHADER_STAGE_VERTEX_BIT";
+        default:
+            return "Unhandled VkShaderStageFlagBits";
+    }
+}
+
+static inline std::string string_VkShaderStageFlags(VkShaderStageFlags input_value)
+{
+    std::string ret;
+    int index = 0;
+    while(input_value) {
+        if (input_value & 1) {
+            if( !ret.empty()) ret.append("|");
+            ret.append(string_VkShaderStageFlagBits(static_cast<VkShaderStageFlagBits>(1 << index)));
+        }
+        ++index;
+        input_value >>= 1;
+    }
+    if( ret.empty()) ret.append(string_VkShaderStageFlagBits(static_cast<VkShaderStageFlagBits>(0)));
+    return ret;
+}
+
+static inline const char* string_VkVertexInputRate(VkVertexInputRate input_value)
+{
+    switch ((VkVertexInputRate)input_value)
+    {
+        case VK_VERTEX_INPUT_RATE_INSTANCE:
+            return "VK_VERTEX_INPUT_RATE_INSTANCE";
+        case VK_VERTEX_INPUT_RATE_VERTEX:
+            return "VK_VERTEX_INPUT_RATE_VERTEX";
+        default:
+            return "Unhandled VkVertexInputRate";
+    }
+}
+
+static inline const char* string_VkPrimitiveTopology(VkPrimitiveTopology input_value)
+{
+    switch ((VkPrimitiveTopology)input_value)
+    {
+        case VK_PRIMITIVE_TOPOLOGY_LINE_LIST:
+            return "VK_PRIMITIVE_TOPOLOGY_LINE_LIST";
+        case VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY:
+            return "VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY";
+        case VK_PRIMITIVE_TOPOLOGY_LINE_STRIP:
+            return "VK_PRIMITIVE_TOPOLOGY_LINE_STRIP";
+        case VK_PRIMITIVE_TOPOLOGY_LINE_STRIP_WITH_ADJACENCY:
+            return "VK_PRIMITIVE_TOPOLOGY_LINE_STRIP_WITH_ADJACENCY";
+        case VK_PRIMITIVE_TOPOLOGY_PATCH_LIST:
+            return "VK_PRIMITIVE_TOPOLOGY_PATCH_LIST";
+        case VK_PRIMITIVE_TOPOLOGY_POINT_LIST:
+            return "VK_PRIMITIVE_TOPOLOGY_POINT_LIST";
+        case VK_PRIMITIVE_TOPOLOGY_TRIANGLE_FAN:
+            return "VK_PRIMITIVE_TOPOLOGY_TRIANGLE_FAN";
+        case VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST:
+            return "VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST";
+        case VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY:
+            return "VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY";
+        case VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP:
+            return "VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP";
+        case VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY:
+            return "VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY";
+        default:
+            return "Unhandled VkPrimitiveTopology";
+    }
+}
+
+static inline const char* string_VkPolygonMode(VkPolygonMode input_value)
+{
+    switch ((VkPolygonMode)input_value)
+    {
+        case VK_POLYGON_MODE_FILL:
+            return "VK_POLYGON_MODE_FILL";
+        case VK_POLYGON_MODE_FILL_RECTANGLE_NV:
+            return "VK_POLYGON_MODE_FILL_RECTANGLE_NV";
+        case VK_POLYGON_MODE_LINE:
+            return "VK_POLYGON_MODE_LINE";
+        case VK_POLYGON_MODE_POINT:
+            return "VK_POLYGON_MODE_POINT";
+        default:
+            return "Unhandled VkPolygonMode";
+    }
+}
+
+static inline const char* string_VkCullModeFlagBits(VkCullModeFlagBits input_value)
+{
+    switch ((VkCullModeFlagBits)input_value)
+    {
+        case VK_CULL_MODE_BACK_BIT:
+            return "VK_CULL_MODE_BACK_BIT";
+        case VK_CULL_MODE_FRONT_AND_BACK:
+            return "VK_CULL_MODE_FRONT_AND_BACK";
+        case VK_CULL_MODE_FRONT_BIT:
+            return "VK_CULL_MODE_FRONT_BIT";
+        case VK_CULL_MODE_NONE:
+            return "VK_CULL_MODE_NONE";
+        default:
+            return "Unhandled VkCullModeFlagBits";
+    }
+}
+
+static inline std::string string_VkCullModeFlags(VkCullModeFlags input_value)
+{
+    std::string ret;
+    int index = 0;
+    while(input_value) {
+        if (input_value & 1) {
+            if( !ret.empty()) ret.append("|");
+            ret.append(string_VkCullModeFlagBits(static_cast<VkCullModeFlagBits>(1 << index)));
+        }
+        ++index;
+        input_value >>= 1;
+    }
+    if( ret.empty()) ret.append(string_VkCullModeFlagBits(static_cast<VkCullModeFlagBits>(0)));
+    return ret;
+}
+
+static inline const char* string_VkFrontFace(VkFrontFace input_value)
+{
+    switch ((VkFrontFace)input_value)
+    {
+        case VK_FRONT_FACE_CLOCKWISE:
+            return "VK_FRONT_FACE_CLOCKWISE";
+        case VK_FRONT_FACE_COUNTER_CLOCKWISE:
+            return "VK_FRONT_FACE_COUNTER_CLOCKWISE";
+        default:
+            return "Unhandled VkFrontFace";
+    }
+}
+
+static inline const char* string_VkCompareOp(VkCompareOp input_value)
+{
+    switch ((VkCompareOp)input_value)
+    {
+        case VK_COMPARE_OP_ALWAYS:
+            return "VK_COMPARE_OP_ALWAYS";
+        case VK_COMPARE_OP_EQUAL:
+            return "VK_COMPARE_OP_EQUAL";
+        case VK_COMPARE_OP_GREATER:
+            return "VK_COMPARE_OP_GREATER";
+        case VK_COMPARE_OP_GREATER_OR_EQUAL:
+            return "VK_COMPARE_OP_GREATER_OR_EQUAL";
+        case VK_COMPARE_OP_LESS:
+            return "VK_COMPARE_OP_LESS";
+        case VK_COMPARE_OP_LESS_OR_EQUAL:
+            return "VK_COMPARE_OP_LESS_OR_EQUAL";
+        case VK_COMPARE_OP_NEVER:
+            return "VK_COMPARE_OP_NEVER";
+        case VK_COMPARE_OP_NOT_EQUAL:
+            return "VK_COMPARE_OP_NOT_EQUAL";
+        default:
+            return "Unhandled VkCompareOp";
+    }
+}
+
+static inline const char* string_VkStencilOp(VkStencilOp input_value)
+{
+    switch ((VkStencilOp)input_value)
+    {
+        case VK_STENCIL_OP_DECREMENT_AND_CLAMP:
+            return "VK_STENCIL_OP_DECREMENT_AND_CLAMP";
+        case VK_STENCIL_OP_DECREMENT_AND_WRAP:
+            return "VK_STENCIL_OP_DECREMENT_AND_WRAP";
+        case VK_STENCIL_OP_INCREMENT_AND_CLAMP:
+            return "VK_STENCIL_OP_INCREMENT_AND_CLAMP";
+        case VK_STENCIL_OP_INCREMENT_AND_WRAP:
+            return "VK_STENCIL_OP_INCREMENT_AND_WRAP";
+        case VK_STENCIL_OP_INVERT:
+            return "VK_STENCIL_OP_INVERT";
+        case VK_STENCIL_OP_KEEP:
+            return "VK_STENCIL_OP_KEEP";
+        case VK_STENCIL_OP_REPLACE:
+            return "VK_STENCIL_OP_REPLACE";
+        case VK_STENCIL_OP_ZERO:
+            return "VK_STENCIL_OP_ZERO";
+        default:
+            return "Unhandled VkStencilOp";
+    }
+}
+
+static inline const char* string_VkLogicOp(VkLogicOp input_value)
+{
+    switch ((VkLogicOp)input_value)
+    {
+        case VK_LOGIC_OP_AND:
+            return "VK_LOGIC_OP_AND";
+        case VK_LOGIC_OP_AND_INVERTED:
+            return "VK_LOGIC_OP_AND_INVERTED";
+        case VK_LOGIC_OP_AND_REVERSE:
+            return "VK_LOGIC_OP_AND_REVERSE";
+        case VK_LOGIC_OP_CLEAR:
+            return "VK_LOGIC_OP_CLEAR";
+        case VK_LOGIC_OP_COPY:
+            return "VK_LOGIC_OP_COPY";
+        case VK_LOGIC_OP_COPY_INVERTED:
+            return "VK_LOGIC_OP_COPY_INVERTED";
+        case VK_LOGIC_OP_EQUIVALENT:
+            return "VK_LOGIC_OP_EQUIVALENT";
+        case VK_LOGIC_OP_INVERT:
+            return "VK_LOGIC_OP_INVERT";
+        case VK_LOGIC_OP_NAND:
+            return "VK_LOGIC_OP_NAND";
+        case VK_LOGIC_OP_NOR:
+            return "VK_LOGIC_OP_NOR";
+        case VK_LOGIC_OP_NO_OP:
+            return "VK_LOGIC_OP_NO_OP";
+        case VK_LOGIC_OP_OR:
+            return "VK_LOGIC_OP_OR";
+        case VK_LOGIC_OP_OR_INVERTED:
+            return "VK_LOGIC_OP_OR_INVERTED";
+        case VK_LOGIC_OP_OR_REVERSE:
+            return "VK_LOGIC_OP_OR_REVERSE";
+        case VK_LOGIC_OP_SET:
+            return "VK_LOGIC_OP_SET";
+        case VK_LOGIC_OP_XOR:
+            return "VK_LOGIC_OP_XOR";
+        default:
+            return "Unhandled VkLogicOp";
+    }
+}
+
+static inline const char* string_VkBlendFactor(VkBlendFactor input_value)
+{
+    switch ((VkBlendFactor)input_value)
+    {
+        case VK_BLEND_FACTOR_CONSTANT_ALPHA:
+            return "VK_BLEND_FACTOR_CONSTANT_ALPHA";
+        case VK_BLEND_FACTOR_CONSTANT_COLOR:
+            return "VK_BLEND_FACTOR_CONSTANT_COLOR";
+        case VK_BLEND_FACTOR_DST_ALPHA:
+            return "VK_BLEND_FACTOR_DST_ALPHA";
+        case VK_BLEND_FACTOR_DST_COLOR:
+            return "VK_BLEND_FACTOR_DST_COLOR";
+        case VK_BLEND_FACTOR_ONE:
+            return "VK_BLEND_FACTOR_ONE";
+        case VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA:
+            return "VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA";
+        case VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_COLOR:
+            return "VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_COLOR";
+        case VK_BLEND_FACTOR_ONE_MINUS_DST_ALPHA:
+            return "VK_BLEND_FACTOR_ONE_MINUS_DST_ALPHA";
+        case VK_BLEND_FACTOR_ONE_MINUS_DST_COLOR:
+            return "VK_BLEND_FACTOR_ONE_MINUS_DST_COLOR";
+        case VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA:
+            return "VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA";
+        case VK_BLEND_FACTOR_ONE_MINUS_SRC1_COLOR:
+            return "VK_BLEND_FACTOR_ONE_MINUS_SRC1_COLOR";
+        case VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA:
+            return "VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA";
+        case VK_BLEND_FACTOR_ONE_MINUS_SRC_COLOR:
+            return "VK_BLEND_FACTOR_ONE_MINUS_SRC_COLOR";
+        case VK_BLEND_FACTOR_SRC1_ALPHA:
+            return "VK_BLEND_FACTOR_SRC1_ALPHA";
+        case VK_BLEND_FACTOR_SRC1_COLOR:
+            return "VK_BLEND_FACTOR_SRC1_COLOR";
+        case VK_BLEND_FACTOR_SRC_ALPHA:
+            return "VK_BLEND_FACTOR_SRC_ALPHA";
+        case VK_BLEND_FACTOR_SRC_ALPHA_SATURATE:
+            return "VK_BLEND_FACTOR_SRC_ALPHA_SATURATE";
+        case VK_BLEND_FACTOR_SRC_COLOR:
+            return "VK_BLEND_FACTOR_SRC_COLOR";
+        case VK_BLEND_FACTOR_ZERO:
+            return "VK_BLEND_FACTOR_ZERO";
+        default:
+            return "Unhandled VkBlendFactor";
+    }
+}
+
+static inline const char* string_VkBlendOp(VkBlendOp input_value)
+{
+    switch ((VkBlendOp)input_value)
+    {
+        case VK_BLEND_OP_ADD:
+            return "VK_BLEND_OP_ADD";
+        case VK_BLEND_OP_BLUE_EXT:
+            return "VK_BLEND_OP_BLUE_EXT";
+        case VK_BLEND_OP_COLORBURN_EXT:
+            return "VK_BLEND_OP_COLORBURN_EXT";
+        case VK_BLEND_OP_COLORDODGE_EXT:
+            return "VK_BLEND_OP_COLORDODGE_EXT";
+        case VK_BLEND_OP_CONTRAST_EXT:
+            return "VK_BLEND_OP_CONTRAST_EXT";
+        case VK_BLEND_OP_DARKEN_EXT:
+            return "VK_BLEND_OP_DARKEN_EXT";
+        case VK_BLEND_OP_DIFFERENCE_EXT:
+            return "VK_BLEND_OP_DIFFERENCE_EXT";
+        case VK_BLEND_OP_DST_ATOP_EXT:
+            return "VK_BLEND_OP_DST_ATOP_EXT";
+        case VK_BLEND_OP_DST_EXT:
+            return "VK_BLEND_OP_DST_EXT";
+        case VK_BLEND_OP_DST_IN_EXT:
+            return "VK_BLEND_OP_DST_IN_EXT";
+        case VK_BLEND_OP_DST_OUT_EXT:
+            return "VK_BLEND_OP_DST_OUT_EXT";
+        case VK_BLEND_OP_DST_OVER_EXT:
+            return "VK_BLEND_OP_DST_OVER_EXT";
+        case VK_BLEND_OP_EXCLUSION_EXT:
+            return "VK_BLEND_OP_EXCLUSION_EXT";
+        case VK_BLEND_OP_GREEN_EXT:
+            return "VK_BLEND_OP_GREEN_EXT";
+        case VK_BLEND_OP_HARDLIGHT_EXT:
+            return "VK_BLEND_OP_HARDLIGHT_EXT";
+        case VK_BLEND_OP_HARDMIX_EXT:
+            return "VK_BLEND_OP_HARDMIX_EXT";
+        case VK_BLEND_OP_HSL_COLOR_EXT:
+            return "VK_BLEND_OP_HSL_COLOR_EXT";
+        case VK_BLEND_OP_HSL_HUE_EXT:
+            return "VK_BLEND_OP_HSL_HUE_EXT";
+        case VK_BLEND_OP_HSL_LUMINOSITY_EXT:
+            return "VK_BLEND_OP_HSL_LUMINOSITY_EXT";
+        case VK_BLEND_OP_HSL_SATURATION_EXT:
+            return "VK_BLEND_OP_HSL_SATURATION_EXT";
+        case VK_BLEND_OP_INVERT_EXT:
+            return "VK_BLEND_OP_INVERT_EXT";
+        case VK_BLEND_OP_INVERT_OVG_EXT:
+            return "VK_BLEND_OP_INVERT_OVG_EXT";
+        case VK_BLEND_OP_INVERT_RGB_EXT:
+            return "VK_BLEND_OP_INVERT_RGB_EXT";
+        case VK_BLEND_OP_LIGHTEN_EXT:
+            return "VK_BLEND_OP_LIGHTEN_EXT";
+        case VK_BLEND_OP_LINEARBURN_EXT:
+            return "VK_BLEND_OP_LINEARBURN_EXT";
+        case VK_BLEND_OP_LINEARDODGE_EXT:
+            return "VK_BLEND_OP_LINEARDODGE_EXT";
+        case VK_BLEND_OP_LINEARLIGHT_EXT:
+            return "VK_BLEND_OP_LINEARLIGHT_EXT";
+        case VK_BLEND_OP_MAX:
+            return "VK_BLEND_OP_MAX";
+        case VK_BLEND_OP_MIN:
+            return "VK_BLEND_OP_MIN";
+        case VK_BLEND_OP_MINUS_CLAMPED_EXT:
+            return "VK_BLEND_OP_MINUS_CLAMPED_EXT";
+        case VK_BLEND_OP_MINUS_EXT:
+            return "VK_BLEND_OP_MINUS_EXT";
+        case VK_BLEND_OP_MULTIPLY_EXT:
+            return "VK_BLEND_OP_MULTIPLY_EXT";
+        case VK_BLEND_OP_OVERLAY_EXT:
+            return "VK_BLEND_OP_OVERLAY_EXT";
+        case VK_BLEND_OP_PINLIGHT_EXT:
+            return "VK_BLEND_OP_PINLIGHT_EXT";
+        case VK_BLEND_OP_PLUS_CLAMPED_ALPHA_EXT:
+            return "VK_BLEND_OP_PLUS_CLAMPED_ALPHA_EXT";
+        case VK_BLEND_OP_PLUS_CLAMPED_EXT:
+            return "VK_BLEND_OP_PLUS_CLAMPED_EXT";
+        case VK_BLEND_OP_PLUS_DARKER_EXT:
+            return "VK_BLEND_OP_PLUS_DARKER_EXT";
+        case VK_BLEND_OP_PLUS_EXT:
+            return "VK_BLEND_OP_PLUS_EXT";
+        case VK_BLEND_OP_RED_EXT:
+            return "VK_BLEND_OP_RED_EXT";
+        case VK_BLEND_OP_REVERSE_SUBTRACT:
+            return "VK_BLEND_OP_REVERSE_SUBTRACT";
+        case VK_BLEND_OP_SCREEN_EXT:
+            return "VK_BLEND_OP_SCREEN_EXT";
+        case VK_BLEND_OP_SOFTLIGHT_EXT:
+            return "VK_BLEND_OP_SOFTLIGHT_EXT";
+        case VK_BLEND_OP_SRC_ATOP_EXT:
+            return "VK_BLEND_OP_SRC_ATOP_EXT";
+        case VK_BLEND_OP_SRC_EXT:
+            return "VK_BLEND_OP_SRC_EXT";
+        case VK_BLEND_OP_SRC_IN_EXT:
+            return "VK_BLEND_OP_SRC_IN_EXT";
+        case VK_BLEND_OP_SRC_OUT_EXT:
+            return "VK_BLEND_OP_SRC_OUT_EXT";
+        case VK_BLEND_OP_SRC_OVER_EXT:
+            return "VK_BLEND_OP_SRC_OVER_EXT";
+        case VK_BLEND_OP_SUBTRACT:
+            return "VK_BLEND_OP_SUBTRACT";
+        case VK_BLEND_OP_VIVIDLIGHT_EXT:
+            return "VK_BLEND_OP_VIVIDLIGHT_EXT";
+        case VK_BLEND_OP_XOR_EXT:
+            return "VK_BLEND_OP_XOR_EXT";
+        case VK_BLEND_OP_ZERO_EXT:
+            return "VK_BLEND_OP_ZERO_EXT";
+        default:
+            return "Unhandled VkBlendOp";
+    }
+}
+
+static inline const char* string_VkColorComponentFlagBits(VkColorComponentFlagBits input_value)
+{
+    switch ((VkColorComponentFlagBits)input_value)
+    {
+        case VK_COLOR_COMPONENT_A_BIT:
+            return "VK_COLOR_COMPONENT_A_BIT";
+        case VK_COLOR_COMPONENT_B_BIT:
+            return "VK_COLOR_COMPONENT_B_BIT";
+        case VK_COLOR_COMPONENT_G_BIT:
+            return "VK_COLOR_COMPONENT_G_BIT";
+        case VK_COLOR_COMPONENT_R_BIT:
+            return "VK_COLOR_COMPONENT_R_BIT";
+        default:
+            return "Unhandled VkColorComponentFlagBits";
+    }
+}
+
+static inline std::string string_VkColorComponentFlags(VkColorComponentFlags input_value)
+{
+    std::string ret;
+    int index = 0;
+    while(input_value) {
+        if (input_value & 1) {
+            if( !ret.empty()) ret.append("|");
+            ret.append(string_VkColorComponentFlagBits(static_cast<VkColorComponentFlagBits>(1 << index)));
+        }
+        ++index;
+        input_value >>= 1;
+    }
+    if( ret.empty()) ret.append(string_VkColorComponentFlagBits(static_cast<VkColorComponentFlagBits>(0)));
+    return ret;
+}
+
+static inline const char* string_VkDynamicState(VkDynamicState input_value)
+{
+    switch ((VkDynamicState)input_value)
+    {
+        case VK_DYNAMIC_STATE_BLEND_CONSTANTS:
+            return "VK_DYNAMIC_STATE_BLEND_CONSTANTS";
+        case VK_DYNAMIC_STATE_DEPTH_BIAS:
+            return "VK_DYNAMIC_STATE_DEPTH_BIAS";
+        case VK_DYNAMIC_STATE_DEPTH_BOUNDS:
+            return "VK_DYNAMIC_STATE_DEPTH_BOUNDS";
+        case VK_DYNAMIC_STATE_DISCARD_RECTANGLE_EXT:
+            return "VK_DYNAMIC_STATE_DISCARD_RECTANGLE_EXT";
+        case VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_NV:
+            return "VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_NV";
+        case VK_DYNAMIC_STATE_LINE_STIPPLE_EXT:
+            return "VK_DYNAMIC_STATE_LINE_STIPPLE_EXT";
+        case VK_DYNAMIC_STATE_LINE_WIDTH:
+            return "VK_DYNAMIC_STATE_LINE_WIDTH";
+        case VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_EXT:
+            return "VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_EXT";
+        case VK_DYNAMIC_STATE_SCISSOR:
+            return "VK_DYNAMIC_STATE_SCISSOR";
+        case VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK:
+            return "VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK";
+        case VK_DYNAMIC_STATE_STENCIL_REFERENCE:
+            return "VK_DYNAMIC_STATE_STENCIL_REFERENCE";
+        case VK_DYNAMIC_STATE_STENCIL_WRITE_MASK:
+            return "VK_DYNAMIC_STATE_STENCIL_WRITE_MASK";
+        case VK_DYNAMIC_STATE_VIEWPORT:
+            return "VK_DYNAMIC_STATE_VIEWPORT";
+        case VK_DYNAMIC_STATE_VIEWPORT_COARSE_SAMPLE_ORDER_NV:
+            return "VK_DYNAMIC_STATE_VIEWPORT_COARSE_SAMPLE_ORDER_NV";
+        case VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV:
+            return "VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV";
+        case VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV:
+            return "VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV";
+        default:
+            return "Unhandled VkDynamicState";
+    }
+}
+
+static inline const char* string_VkSamplerCreateFlagBits(VkSamplerCreateFlagBits input_value)
+{
+    switch ((VkSamplerCreateFlagBits)input_value)
+    {
+        case VK_SAMPLER_CREATE_SUBSAMPLED_BIT_EXT:
+            return "VK_SAMPLER_CREATE_SUBSAMPLED_BIT_EXT";
+        case VK_SAMPLER_CREATE_SUBSAMPLED_COARSE_RECONSTRUCTION_BIT_EXT:
+            return "VK_SAMPLER_CREATE_SUBSAMPLED_COARSE_RECONSTRUCTION_BIT_EXT";
+        default:
+            return "Unhandled VkSamplerCreateFlagBits";
+    }
+}
+
+static inline std::string string_VkSamplerCreateFlags(VkSamplerCreateFlags input_value)
+{
+    std::string ret;
+    int index = 0;
+    while(input_value) {
+        if (input_value & 1) {
+            if( !ret.empty()) ret.append("|");
+            ret.append(string_VkSamplerCreateFlagBits(static_cast<VkSamplerCreateFlagBits>(1 << index)));
+        }
+        ++index;
+        input_value >>= 1;
+    }
+    if( ret.empty()) ret.append(string_VkSamplerCreateFlagBits(static_cast<VkSamplerCreateFlagBits>(0)));
+    return ret;
+}
+
+static inline const char* string_VkFilter(VkFilter input_value)
+{
+    switch ((VkFilter)input_value)
+    {
+        case VK_FILTER_CUBIC_IMG:
+            return "VK_FILTER_CUBIC_IMG";
+        case VK_FILTER_LINEAR:
+            return "VK_FILTER_LINEAR";
+        case VK_FILTER_NEAREST:
+            return "VK_FILTER_NEAREST";
+        default:
+            return "Unhandled VkFilter";
+    }
+}
+
+static inline const char* string_VkSamplerMipmapMode(VkSamplerMipmapMode input_value)
+{
+    switch ((VkSamplerMipmapMode)input_value)
+    {
+        case VK_SAMPLER_MIPMAP_MODE_LINEAR:
+            return "VK_SAMPLER_MIPMAP_MODE_LINEAR";
+        case VK_SAMPLER_MIPMAP_MODE_NEAREST:
+            return "VK_SAMPLER_MIPMAP_MODE_NEAREST";
+        default:
+            return "Unhandled VkSamplerMipmapMode";
+    }
+}
+
+static inline const char* string_VkSamplerAddressMode(VkSamplerAddressMode input_value)
+{
+    switch ((VkSamplerAddressMode)input_value)
+    {
+        case VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER:
+            return "VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER";
+        case VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE:
+            return "VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE";
+        case VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT:
+            return "VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT";
+        case VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE:
+            return "VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE";
+        case VK_SAMPLER_ADDRESS_MODE_REPEAT:
+            return "VK_SAMPLER_ADDRESS_MODE_REPEAT";
+        default:
+            return "Unhandled VkSamplerAddressMode";
+    }
+}
+
+static inline const char* string_VkBorderColor(VkBorderColor input_value)
+{
+    switch ((VkBorderColor)input_value)
+    {
+        case VK_BORDER_COLOR_FLOAT_OPAQUE_BLACK:
+            return "VK_BORDER_COLOR_FLOAT_OPAQUE_BLACK";
+        case VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE:
+            return "VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE";
+        case VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK:
+            return "VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK";
+        case VK_BORDER_COLOR_INT_OPAQUE_BLACK:
+            return "VK_BORDER_COLOR_INT_OPAQUE_BLACK";
+        case VK_BORDER_COLOR_INT_OPAQUE_WHITE:
+            return "VK_BORDER_COLOR_INT_OPAQUE_WHITE";
+        case VK_BORDER_COLOR_INT_TRANSPARENT_BLACK:
+            return "VK_BORDER_COLOR_INT_TRANSPARENT_BLACK";
+        default:
+            return "Unhandled VkBorderColor";
+    }
+}
+
+static inline const char* string_VkDescriptorSetLayoutCreateFlagBits(VkDescriptorSetLayoutCreateFlagBits input_value)
+{
+    switch ((VkDescriptorSetLayoutCreateFlagBits)input_value)
+    {
+        case VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR:
+            return "VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR";
+        case VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT:
+            return "VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT";
+        default:
+            return "Unhandled VkDescriptorSetLayoutCreateFlagBits";
+    }
+}
+
+static inline std::string string_VkDescriptorSetLayoutCreateFlags(VkDescriptorSetLayoutCreateFlags input_value)
+{
+    std::string ret;
+    int index = 0;
+    while(input_value) {
+        if (input_value & 1) {
+            if( !ret.empty()) ret.append("|");
+            ret.append(string_VkDescriptorSetLayoutCreateFlagBits(static_cast<VkDescriptorSetLayoutCreateFlagBits>(1 << index)));
+        }
+        ++index;
+        input_value >>= 1;
+    }
+    if( ret.empty()) ret.append(string_VkDescriptorSetLayoutCreateFlagBits(static_cast<VkDescriptorSetLayoutCreateFlagBits>(0)));
+    return ret;
+}
+
+static inline const char* string_VkDescriptorType(VkDescriptorType input_value)
+{
+    switch ((VkDescriptorType)input_value)
+    {
+        case VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV:
+            return "VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV";
+        case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
+            return "VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER";
+        case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT:
+            return "VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT";
+        case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
+            return "VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT";
+        case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
+            return "VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE";
+        case VK_DESCRIPTOR_TYPE_SAMPLER:
+            return "VK_DESCRIPTOR_TYPE_SAMPLER";
+        case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
+            return "VK_DESCRIPTOR_TYPE_STORAGE_BUFFER";
+        case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
+            return "VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC";
+        case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
+            return "VK_DESCRIPTOR_TYPE_STORAGE_IMAGE";
+        case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
+            return "VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER";
+        case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
+            return "VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER";
+        case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
+            return "VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC";
+        case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
+            return "VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER";
+        default:
+            return "Unhandled VkDescriptorType";
+    }
+}
+
+static inline const char* string_VkDescriptorPoolCreateFlagBits(VkDescriptorPoolCreateFlagBits input_value)
+{
+    switch ((VkDescriptorPoolCreateFlagBits)input_value)
+    {
+        case VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT:
+            return "VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT";
+        case VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT_EXT:
+            return "VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT_EXT";
+        default:
+            return "Unhandled VkDescriptorPoolCreateFlagBits";
+    }
+}
+
+static inline std::string string_VkDescriptorPoolCreateFlags(VkDescriptorPoolCreateFlags input_value)
+{
+    std::string ret;
+    int index = 0;
+    while(input_value) {
+        if (input_value & 1) {
+            if( !ret.empty()) ret.append("|");
+            ret.append(string_VkDescriptorPoolCreateFlagBits(static_cast<VkDescriptorPoolCreateFlagBits>(1 << index)));
+        }
+        ++index;
+        input_value >>= 1;
+    }
+    if( ret.empty()) ret.append(string_VkDescriptorPoolCreateFlagBits(static_cast<VkDescriptorPoolCreateFlagBits>(0)));
+    return ret;
+}
+
+static inline const char* string_VkFramebufferCreateFlagBits(VkFramebufferCreateFlagBits input_value)
+{
+    switch ((VkFramebufferCreateFlagBits)input_value)
+    {
+        case VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR:
+            return "VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR";
+        default:
+            return "Unhandled VkFramebufferCreateFlagBits";
+    }
+}
+
+static inline std::string string_VkFramebufferCreateFlags(VkFramebufferCreateFlags input_value)
+{
+    std::string ret;
+    int index = 0;
+    while(input_value) {
+        if (input_value & 1) {
+            if( !ret.empty()) ret.append("|");
+            ret.append(string_VkFramebufferCreateFlagBits(static_cast<VkFramebufferCreateFlagBits>(1 << index)));
+        }
+        ++index;
+        input_value >>= 1;
+    }
+    if( ret.empty()) ret.append(string_VkFramebufferCreateFlagBits(static_cast<VkFramebufferCreateFlagBits>(0)));
+    return ret;
+}
+
+static inline const char* string_VkAttachmentDescriptionFlagBits(VkAttachmentDescriptionFlagBits input_value)
+{
+    switch ((VkAttachmentDescriptionFlagBits)input_value)
+    {
+        case VK_ATTACHMENT_DESCRIPTION_MAY_ALIAS_BIT:
+            return "VK_ATTACHMENT_DESCRIPTION_MAY_ALIAS_BIT";
+        default:
+            return "Unhandled VkAttachmentDescriptionFlagBits";
+    }
+}
+
+static inline std::string string_VkAttachmentDescriptionFlags(VkAttachmentDescriptionFlags input_value)
+{
+    std::string ret;
+    int index = 0;
+    while(input_value) {
+        if (input_value & 1) {
+            if( !ret.empty()) ret.append("|");
+            ret.append(string_VkAttachmentDescriptionFlagBits(static_cast<VkAttachmentDescriptionFlagBits>(1 << index)));
+        }
+        ++index;
+        input_value >>= 1;
+    }
+    if( ret.empty()) ret.append(string_VkAttachmentDescriptionFlagBits(static_cast<VkAttachmentDescriptionFlagBits>(0)));
+    return ret;
+}
+
+static inline const char* string_VkAttachmentLoadOp(VkAttachmentLoadOp input_value)
+{
+    switch ((VkAttachmentLoadOp)input_value)
+    {
+        case VK_ATTACHMENT_LOAD_OP_CLEAR:
+            return "VK_ATTACHMENT_LOAD_OP_CLEAR";
+        case VK_ATTACHMENT_LOAD_OP_DONT_CARE:
+            return "VK_ATTACHMENT_LOAD_OP_DONT_CARE";
+        case VK_ATTACHMENT_LOAD_OP_LOAD:
+            return "VK_ATTACHMENT_LOAD_OP_LOAD";
+        default:
+            return "Unhandled VkAttachmentLoadOp";
+    }
+}
+
+static inline const char* string_VkAttachmentStoreOp(VkAttachmentStoreOp input_value)
+{
+    switch ((VkAttachmentStoreOp)input_value)
+    {
+        case VK_ATTACHMENT_STORE_OP_DONT_CARE:
+            return "VK_ATTACHMENT_STORE_OP_DONT_CARE";
+        case VK_ATTACHMENT_STORE_OP_STORE:
+            return "VK_ATTACHMENT_STORE_OP_STORE";
+        default:
+            return "Unhandled VkAttachmentStoreOp";
+    }
+}
+
+static inline const char* string_VkSubpassDescriptionFlagBits(VkSubpassDescriptionFlagBits input_value)
+{
+    switch ((VkSubpassDescriptionFlagBits)input_value)
+    {
+        case VK_SUBPASS_DESCRIPTION_PER_VIEW_ATTRIBUTES_BIT_NVX:
+            return "VK_SUBPASS_DESCRIPTION_PER_VIEW_ATTRIBUTES_BIT_NVX";
+        case VK_SUBPASS_DESCRIPTION_PER_VIEW_POSITION_X_ONLY_BIT_NVX:
+            return "VK_SUBPASS_DESCRIPTION_PER_VIEW_POSITION_X_ONLY_BIT_NVX";
+        default:
+            return "Unhandled VkSubpassDescriptionFlagBits";
+    }
+}
+
+static inline std::string string_VkSubpassDescriptionFlags(VkSubpassDescriptionFlags input_value)
+{
+    std::string ret;
+    int index = 0;
+    while(input_value) {
+        if (input_value & 1) {
+            if( !ret.empty()) ret.append("|");
+            ret.append(string_VkSubpassDescriptionFlagBits(static_cast<VkSubpassDescriptionFlagBits>(1 << index)));
+        }
+        ++index;
+        input_value >>= 1;
+    }
+    if( ret.empty()) ret.append(string_VkSubpassDescriptionFlagBits(static_cast<VkSubpassDescriptionFlagBits>(0)));
+    return ret;
+}
+
+static inline const char* string_VkPipelineBindPoint(VkPipelineBindPoint input_value)
+{
+    switch ((VkPipelineBindPoint)input_value)
+    {
+        case VK_PIPELINE_BIND_POINT_COMPUTE:
+            return "VK_PIPELINE_BIND_POINT_COMPUTE";
+        case VK_PIPELINE_BIND_POINT_GRAPHICS:
+            return "VK_PIPELINE_BIND_POINT_GRAPHICS";
+        case VK_PIPELINE_BIND_POINT_RAY_TRACING_NV:
+            return "VK_PIPELINE_BIND_POINT_RAY_TRACING_NV";
+        default:
+            return "Unhandled VkPipelineBindPoint";
+    }
+}
+
+static inline const char* string_VkAccessFlagBits(VkAccessFlagBits input_value)
+{
+    switch ((VkAccessFlagBits)input_value)
+    {
+        case VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_NV:
+            return "VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_NV";
+        case VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_NV:
+            return "VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_NV";
+        case VK_ACCESS_COLOR_ATTACHMENT_READ_BIT:
+            return "VK_ACCESS_COLOR_ATTACHMENT_READ_BIT";
+        case VK_ACCESS_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT:
+            return "VK_ACCESS_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT";
+        case VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT:
+            return "VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT";
+        case VK_ACCESS_COMMAND_PROCESS_READ_BIT_NVX:
+            return "VK_ACCESS_COMMAND_PROCESS_READ_BIT_NVX";
+        case VK_ACCESS_COMMAND_PROCESS_WRITE_BIT_NVX:
+            return "VK_ACCESS_COMMAND_PROCESS_WRITE_BIT_NVX";
+        case VK_ACCESS_CONDITIONAL_RENDERING_READ_BIT_EXT:
+            return "VK_ACCESS_CONDITIONAL_RENDERING_READ_BIT_EXT";
+        case VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT:
+            return "VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT";
+        case VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT:
+            return "VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT";
+        case VK_ACCESS_FRAGMENT_DENSITY_MAP_READ_BIT_EXT:
+            return "VK_ACCESS_FRAGMENT_DENSITY_MAP_READ_BIT_EXT";
+        case VK_ACCESS_HOST_READ_BIT:
+            return "VK_ACCESS_HOST_READ_BIT";
+        case VK_ACCESS_HOST_WRITE_BIT:
+            return "VK_ACCESS_HOST_WRITE_BIT";
+        case VK_ACCESS_INDEX_READ_BIT:
+            return "VK_ACCESS_INDEX_READ_BIT";
+        case VK_ACCESS_INDIRECT_COMMAND_READ_BIT:
+            return "VK_ACCESS_INDIRECT_COMMAND_READ_BIT";
+        case VK_ACCESS_INPUT_ATTACHMENT_READ_BIT:
+            return "VK_ACCESS_INPUT_ATTACHMENT_READ_BIT";
+        case VK_ACCESS_MEMORY_READ_BIT:
+            return "VK_ACCESS_MEMORY_READ_BIT";
+        case VK_ACCESS_MEMORY_WRITE_BIT:
+            return "VK_ACCESS_MEMORY_WRITE_BIT";
+        case VK_ACCESS_SHADER_READ_BIT:
+            return "VK_ACCESS_SHADER_READ_BIT";
+        case VK_ACCESS_SHADER_WRITE_BIT:
+            return "VK_ACCESS_SHADER_WRITE_BIT";
+        case VK_ACCESS_SHADING_RATE_IMAGE_READ_BIT_NV:
+            return "VK_ACCESS_SHADING_RATE_IMAGE_READ_BIT_NV";
+        case VK_ACCESS_TRANSFER_READ_BIT:
+            return "VK_ACCESS_TRANSFER_READ_BIT";
+        case VK_ACCESS_TRANSFER_WRITE_BIT:
+            return "VK_ACCESS_TRANSFER_WRITE_BIT";
+        case VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_READ_BIT_EXT:
+            return "VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_READ_BIT_EXT";
+        case VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_WRITE_BIT_EXT:
+            return "VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_WRITE_BIT_EXT";
+        case VK_ACCESS_TRANSFORM_FEEDBACK_WRITE_BIT_EXT:
+            return "VK_ACCESS_TRANSFORM_FEEDBACK_WRITE_BIT_EXT";
+        case VK_ACCESS_UNIFORM_READ_BIT:
+            return "VK_ACCESS_UNIFORM_READ_BIT";
+        case VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT:
+            return "VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT";
+        default:
+            return "Unhandled VkAccessFlagBits";
+    }
+}
+
+static inline std::string string_VkAccessFlags(VkAccessFlags input_value)
+{
+    std::string ret;
+    int index = 0;
+    while(input_value) {
+        if (input_value & 1) {
+            if( !ret.empty()) ret.append("|");
+            ret.append(string_VkAccessFlagBits(static_cast<VkAccessFlagBits>(1 << index)));
+        }
+        ++index;
+        input_value >>= 1;
+    }
+    if( ret.empty()) ret.append(string_VkAccessFlagBits(static_cast<VkAccessFlagBits>(0)));
+    return ret;
+}
+
+static inline const char* string_VkDependencyFlagBits(VkDependencyFlagBits input_value)
+{
+    switch ((VkDependencyFlagBits)input_value)
+    {
+        case VK_DEPENDENCY_BY_REGION_BIT:
+            return "VK_DEPENDENCY_BY_REGION_BIT";
+        case VK_DEPENDENCY_DEVICE_GROUP_BIT:
+            return "VK_DEPENDENCY_DEVICE_GROUP_BIT";
+        case VK_DEPENDENCY_VIEW_LOCAL_BIT:
+            return "VK_DEPENDENCY_VIEW_LOCAL_BIT";
+        default:
+            return "Unhandled VkDependencyFlagBits";
+    }
+}
+
+static inline std::string string_VkDependencyFlags(VkDependencyFlags input_value)
+{
+    std::string ret;
+    int index = 0;
+    while(input_value) {
+        if (input_value & 1) {
+            if( !ret.empty()) ret.append("|");
+            ret.append(string_VkDependencyFlagBits(static_cast<VkDependencyFlagBits>(1 << index)));
+        }
+        ++index;
+        input_value >>= 1;
+    }
+    if( ret.empty()) ret.append(string_VkDependencyFlagBits(static_cast<VkDependencyFlagBits>(0)));
+    return ret;
+}
+
+static inline const char* string_VkCommandPoolCreateFlagBits(VkCommandPoolCreateFlagBits input_value)
+{
+    switch ((VkCommandPoolCreateFlagBits)input_value)
+    {
+        case VK_COMMAND_POOL_CREATE_PROTECTED_BIT:
+            return "VK_COMMAND_POOL_CREATE_PROTECTED_BIT";
+        case VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT:
+            return "VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT";
+        case VK_COMMAND_POOL_CREATE_TRANSIENT_BIT:
+            return "VK_COMMAND_POOL_CREATE_TRANSIENT_BIT";
+        default:
+            return "Unhandled VkCommandPoolCreateFlagBits";
+    }
+}
+
+static inline std::string string_VkCommandPoolCreateFlags(VkCommandPoolCreateFlags input_value)
+{
+    std::string ret;
+    int index = 0;
+    while(input_value) {
+        if (input_value & 1) {
+            if( !ret.empty()) ret.append("|");
+            ret.append(string_VkCommandPoolCreateFlagBits(static_cast<VkCommandPoolCreateFlagBits>(1 << index)));
+        }
+        ++index;
+        input_value >>= 1;
+    }
+    if( ret.empty()) ret.append(string_VkCommandPoolCreateFlagBits(static_cast<VkCommandPoolCreateFlagBits>(0)));
+    return ret;
+}
+
+static inline const char* string_VkCommandPoolResetFlagBits(VkCommandPoolResetFlagBits input_value)
+{
+    switch ((VkCommandPoolResetFlagBits)input_value)
+    {
+        case VK_COMMAND_POOL_RESET_RELEASE_RESOURCES_BIT:
+            return "VK_COMMAND_POOL_RESET_RELEASE_RESOURCES_BIT";
+        default:
+            return "Unhandled VkCommandPoolResetFlagBits";
+    }
+}
+
+static inline std::string string_VkCommandPoolResetFlags(VkCommandPoolResetFlags input_value)
+{
+    std::string ret;
+    int index = 0;
+    while(input_value) {
+        if (input_value & 1) {
+            if( !ret.empty()) ret.append("|");
+            ret.append(string_VkCommandPoolResetFlagBits(static_cast<VkCommandPoolResetFlagBits>(1 << index)));
+        }
+        ++index;
+        input_value >>= 1;
+    }
+    if( ret.empty()) ret.append(string_VkCommandPoolResetFlagBits(static_cast<VkCommandPoolResetFlagBits>(0)));
+    return ret;
+}
+
+static inline const char* string_VkCommandBufferLevel(VkCommandBufferLevel input_value)
+{
+    switch ((VkCommandBufferLevel)input_value)
+    {
+        case VK_COMMAND_BUFFER_LEVEL_PRIMARY:
+            return "VK_COMMAND_BUFFER_LEVEL_PRIMARY";
+        case VK_COMMAND_BUFFER_LEVEL_SECONDARY:
+            return "VK_COMMAND_BUFFER_LEVEL_SECONDARY";
+        default:
+            return "Unhandled VkCommandBufferLevel";
+    }
+}
+
+static inline const char* string_VkCommandBufferUsageFlagBits(VkCommandBufferUsageFlagBits input_value)
+{
+    switch ((VkCommandBufferUsageFlagBits)input_value)
+    {
+        case VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT:
+            return "VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT";
+        case VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT:
+            return "VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT";
+        case VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT:
+            return "VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT";
+        default:
+            return "Unhandled VkCommandBufferUsageFlagBits";
+    }
+}
+
+static inline std::string string_VkCommandBufferUsageFlags(VkCommandBufferUsageFlags input_value)
+{
+    std::string ret;
+    int index = 0;
+    while(input_value) {
+        if (input_value & 1) {
+            if( !ret.empty()) ret.append("|");
+            ret.append(string_VkCommandBufferUsageFlagBits(static_cast<VkCommandBufferUsageFlagBits>(1 << index)));
+        }
+        ++index;
+        input_value >>= 1;
+    }
+    if( ret.empty()) ret.append(string_VkCommandBufferUsageFlagBits(static_cast<VkCommandBufferUsageFlagBits>(0)));
+    return ret;
+}
+
+static inline const char* string_VkQueryControlFlagBits(VkQueryControlFlagBits input_value)
+{
+    switch ((VkQueryControlFlagBits)input_value)
+    {
+        case VK_QUERY_CONTROL_PRECISE_BIT:
+            return "VK_QUERY_CONTROL_PRECISE_BIT";
+        default:
+            return "Unhandled VkQueryControlFlagBits";
+    }
+}
+
+static inline std::string string_VkQueryControlFlags(VkQueryControlFlags input_value)
+{
+    std::string ret;
+    int index = 0;
+    while(input_value) {
+        if (input_value & 1) {
+            if( !ret.empty()) ret.append("|");
+            ret.append(string_VkQueryControlFlagBits(static_cast<VkQueryControlFlagBits>(1 << index)));
+        }
+        ++index;
+        input_value >>= 1;
+    }
+    if( ret.empty()) ret.append(string_VkQueryControlFlagBits(static_cast<VkQueryControlFlagBits>(0)));
+    return ret;
+}
+
+static inline const char* string_VkCommandBufferResetFlagBits(VkCommandBufferResetFlagBits input_value)
+{
+    switch ((VkCommandBufferResetFlagBits)input_value)
+    {
+        case VK_COMMAND_BUFFER_RESET_RELEASE_RESOURCES_BIT:
+            return "VK_COMMAND_BUFFER_RESET_RELEASE_RESOURCES_BIT";
+        default:
+            return "Unhandled VkCommandBufferResetFlagBits";
+    }
+}
+
+static inline std::string string_VkCommandBufferResetFlags(VkCommandBufferResetFlags input_value)
+{
+    std::string ret;
+    int index = 0;
+    while(input_value) {
+        if (input_value & 1) {
+            if( !ret.empty()) ret.append("|");
+            ret.append(string_VkCommandBufferResetFlagBits(static_cast<VkCommandBufferResetFlagBits>(1 << index)));
+        }
+        ++index;
+        input_value >>= 1;
+    }
+    if( ret.empty()) ret.append(string_VkCommandBufferResetFlagBits(static_cast<VkCommandBufferResetFlagBits>(0)));
+    return ret;
+}
+
+static inline const char* string_VkStencilFaceFlagBits(VkStencilFaceFlagBits input_value)
+{
+    switch ((VkStencilFaceFlagBits)input_value)
+    {
+        case VK_STENCIL_FACE_BACK_BIT:
+            return "VK_STENCIL_FACE_BACK_BIT";
+        case VK_STENCIL_FACE_FRONT_AND_BACK:
+            return "VK_STENCIL_FACE_FRONT_AND_BACK";
+        case VK_STENCIL_FACE_FRONT_BIT:
+            return "VK_STENCIL_FACE_FRONT_BIT";
+        default:
+            return "Unhandled VkStencilFaceFlagBits";
+    }
+}
+
+static inline std::string string_VkStencilFaceFlags(VkStencilFaceFlags input_value)
+{
+    std::string ret;
+    int index = 0;
+    while(input_value) {
+        if (input_value & 1) {
+            if( !ret.empty()) ret.append("|");
+            ret.append(string_VkStencilFaceFlagBits(static_cast<VkStencilFaceFlagBits>(1 << index)));
+        }
+        ++index;
+        input_value >>= 1;
+    }
+    if( ret.empty()) ret.append(string_VkStencilFaceFlagBits(static_cast<VkStencilFaceFlagBits>(0)));
+    return ret;
+}
+
+static inline const char* string_VkIndexType(VkIndexType input_value)
+{
+    switch ((VkIndexType)input_value)
+    {
+        case VK_INDEX_TYPE_NONE_NV:
+            return "VK_INDEX_TYPE_NONE_NV";
+        case VK_INDEX_TYPE_UINT16:
+            return "VK_INDEX_TYPE_UINT16";
+        case VK_INDEX_TYPE_UINT32:
+            return "VK_INDEX_TYPE_UINT32";
+        case VK_INDEX_TYPE_UINT8_EXT:
+            return "VK_INDEX_TYPE_UINT8_EXT";
+        default:
+            return "Unhandled VkIndexType";
+    }
+}
+
+static inline const char* string_VkSubpassContents(VkSubpassContents input_value)
+{
+    switch ((VkSubpassContents)input_value)
+    {
+        case VK_SUBPASS_CONTENTS_INLINE:
+            return "VK_SUBPASS_CONTENTS_INLINE";
+        case VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS:
+            return "VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS";
+        default:
+            return "Unhandled VkSubpassContents";
+    }
+}
+
+static inline const char* string_VkObjectType(VkObjectType input_value)
+{
+    switch ((VkObjectType)input_value)
+    {
+        case VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV:
+            return "VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV";
+        case VK_OBJECT_TYPE_BUFFER:
+            return "VK_OBJECT_TYPE_BUFFER";
+        case VK_OBJECT_TYPE_BUFFER_VIEW:
+            return "VK_OBJECT_TYPE_BUFFER_VIEW";
+        case VK_OBJECT_TYPE_COMMAND_BUFFER:
+            return "VK_OBJECT_TYPE_COMMAND_BUFFER";
+        case VK_OBJECT_TYPE_COMMAND_POOL:
+            return "VK_OBJECT_TYPE_COMMAND_POOL";
+        case VK_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT:
+            return "VK_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT";
+        case VK_OBJECT_TYPE_DEBUG_UTILS_MESSENGER_EXT:
+            return "VK_OBJECT_TYPE_DEBUG_UTILS_MESSENGER_EXT";
+        case VK_OBJECT_TYPE_DESCRIPTOR_POOL:
+            return "VK_OBJECT_TYPE_DESCRIPTOR_POOL";
+        case VK_OBJECT_TYPE_DESCRIPTOR_SET:
+            return "VK_OBJECT_TYPE_DESCRIPTOR_SET";
+        case VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT:
+            return "VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT";
+        case VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE:
+            return "VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE";
+        case VK_OBJECT_TYPE_DEVICE:
+            return "VK_OBJECT_TYPE_DEVICE";
+        case VK_OBJECT_TYPE_DEVICE_MEMORY:
+            return "VK_OBJECT_TYPE_DEVICE_MEMORY";
+        case VK_OBJECT_TYPE_DISPLAY_KHR:
+            return "VK_OBJECT_TYPE_DISPLAY_KHR";
+        case VK_OBJECT_TYPE_DISPLAY_MODE_KHR:
+            return "VK_OBJECT_TYPE_DISPLAY_MODE_KHR";
+        case VK_OBJECT_TYPE_EVENT:
+            return "VK_OBJECT_TYPE_EVENT";
+        case VK_OBJECT_TYPE_FENCE:
+            return "VK_OBJECT_TYPE_FENCE";
+        case VK_OBJECT_TYPE_FRAMEBUFFER:
+            return "VK_OBJECT_TYPE_FRAMEBUFFER";
+        case VK_OBJECT_TYPE_IMAGE:
+            return "VK_OBJECT_TYPE_IMAGE";
+        case VK_OBJECT_TYPE_IMAGE_VIEW:
+            return "VK_OBJECT_TYPE_IMAGE_VIEW";
+        case VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX:
+            return "VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX";
+        case VK_OBJECT_TYPE_INSTANCE:
+            return "VK_OBJECT_TYPE_INSTANCE";
+        case VK_OBJECT_TYPE_OBJECT_TABLE_NVX:
+            return "VK_OBJECT_TYPE_OBJECT_TABLE_NVX";
+        case VK_OBJECT_TYPE_PERFORMANCE_CONFIGURATION_INTEL:
+            return "VK_OBJECT_TYPE_PERFORMANCE_CONFIGURATION_INTEL";
+        case VK_OBJECT_TYPE_PHYSICAL_DEVICE:
+            return "VK_OBJECT_TYPE_PHYSICAL_DEVICE";
+        case VK_OBJECT_TYPE_PIPELINE:
+            return "VK_OBJECT_TYPE_PIPELINE";
+        case VK_OBJECT_TYPE_PIPELINE_CACHE:
+            return "VK_OBJECT_TYPE_PIPELINE_CACHE";
+        case VK_OBJECT_TYPE_PIPELINE_LAYOUT:
+            return "VK_OBJECT_TYPE_PIPELINE_LAYOUT";
+        case VK_OBJECT_TYPE_QUERY_POOL:
+            return "VK_OBJECT_TYPE_QUERY_POOL";
+        case VK_OBJECT_TYPE_QUEUE:
+            return "VK_OBJECT_TYPE_QUEUE";
+        case VK_OBJECT_TYPE_RENDER_PASS:
+            return "VK_OBJECT_TYPE_RENDER_PASS";
+        case VK_OBJECT_TYPE_SAMPLER:
+            return "VK_OBJECT_TYPE_SAMPLER";
+        case VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION:
+            return "VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION";
+        case VK_OBJECT_TYPE_SEMAPHORE:
+            return "VK_OBJECT_TYPE_SEMAPHORE";
+        case VK_OBJECT_TYPE_SHADER_MODULE:
+            return "VK_OBJECT_TYPE_SHADER_MODULE";
+        case VK_OBJECT_TYPE_SURFACE_KHR:
+            return "VK_OBJECT_TYPE_SURFACE_KHR";
+        case VK_OBJECT_TYPE_SWAPCHAIN_KHR:
+            return "VK_OBJECT_TYPE_SWAPCHAIN_KHR";
+        case VK_OBJECT_TYPE_UNKNOWN:
+            return "VK_OBJECT_TYPE_UNKNOWN";
+        case VK_OBJECT_TYPE_VALIDATION_CACHE_EXT:
+            return "VK_OBJECT_TYPE_VALIDATION_CACHE_EXT";
+        default:
+            return "Unhandled VkObjectType";
+    }
+}
+
+static inline const char* string_VkVendorId(VkVendorId input_value)
+{
+    switch ((VkVendorId)input_value)
+    {
+        case VK_VENDOR_ID_KAZAN:
+            return "VK_VENDOR_ID_KAZAN";
+        case VK_VENDOR_ID_VIV:
+            return "VK_VENDOR_ID_VIV";
+        case VK_VENDOR_ID_VSI:
+            return "VK_VENDOR_ID_VSI";
+        default:
+            return "Unhandled VkVendorId";
+    }
+}
+
+static inline const char* string_VkSubgroupFeatureFlagBits(VkSubgroupFeatureFlagBits input_value)
+{
+    switch ((VkSubgroupFeatureFlagBits)input_value)
+    {
+        case VK_SUBGROUP_FEATURE_ARITHMETIC_BIT:
+            return "VK_SUBGROUP_FEATURE_ARITHMETIC_BIT";
+        case VK_SUBGROUP_FEATURE_BALLOT_BIT:
+            return "VK_SUBGROUP_FEATURE_BALLOT_BIT";
+        case VK_SUBGROUP_FEATURE_BASIC_BIT:
+            return "VK_SUBGROUP_FEATURE_BASIC_BIT";
+        case VK_SUBGROUP_FEATURE_CLUSTERED_BIT:
+            return "VK_SUBGROUP_FEATURE_CLUSTERED_BIT";
+        case VK_SUBGROUP_FEATURE_PARTITIONED_BIT_NV:
+            return "VK_SUBGROUP_FEATURE_PARTITIONED_BIT_NV";
+        case VK_SUBGROUP_FEATURE_QUAD_BIT:
+            return "VK_SUBGROUP_FEATURE_QUAD_BIT";
+        case VK_SUBGROUP_FEATURE_SHUFFLE_BIT:
+            return "VK_SUBGROUP_FEATURE_SHUFFLE_BIT";
+        case VK_SUBGROUP_FEATURE_SHUFFLE_RELATIVE_BIT:
+            return "VK_SUBGROUP_FEATURE_SHUFFLE_RELATIVE_BIT";
+        case VK_SUBGROUP_FEATURE_VOTE_BIT:
+            return "VK_SUBGROUP_FEATURE_VOTE_BIT";
+        default:
+            return "Unhandled VkSubgroupFeatureFlagBits";
+    }
+}
+
+static inline std::string string_VkSubgroupFeatureFlags(VkSubgroupFeatureFlags input_value)
+{
+    std::string ret;
+    int index = 0;
+    while(input_value) {
+        if (input_value & 1) {
+            if( !ret.empty()) ret.append("|");
+            ret.append(string_VkSubgroupFeatureFlagBits(static_cast<VkSubgroupFeatureFlagBits>(1 << index)));
+        }
+        ++index;
+        input_value >>= 1;
+    }
+    if( ret.empty()) ret.append(string_VkSubgroupFeatureFlagBits(static_cast<VkSubgroupFeatureFlagBits>(0)));
+    return ret;
+}
+
+static inline const char* string_VkPeerMemoryFeatureFlagBits(VkPeerMemoryFeatureFlagBits input_value)
+{
+    switch ((VkPeerMemoryFeatureFlagBits)input_value)
+    {
+        case VK_PEER_MEMORY_FEATURE_COPY_DST_BIT:
+            return "VK_PEER_MEMORY_FEATURE_COPY_DST_BIT";
+        case VK_PEER_MEMORY_FEATURE_COPY_SRC_BIT:
+            return "VK_PEER_MEMORY_FEATURE_COPY_SRC_BIT";
+        case VK_PEER_MEMORY_FEATURE_GENERIC_DST_BIT:
+            return "VK_PEER_MEMORY_FEATURE_GENERIC_DST_BIT";
+        case VK_PEER_MEMORY_FEATURE_GENERIC_SRC_BIT:
+            return "VK_PEER_MEMORY_FEATURE_GENERIC_SRC_BIT";
+        default:
+            return "Unhandled VkPeerMemoryFeatureFlagBits";
+    }
+}
+
+static inline std::string string_VkPeerMemoryFeatureFlags(VkPeerMemoryFeatureFlags input_value)
+{
+    std::string ret;
+    int index = 0;
+    while(input_value) {
+        if (input_value & 1) {
+            if( !ret.empty()) ret.append("|");
+            ret.append(string_VkPeerMemoryFeatureFlagBits(static_cast<VkPeerMemoryFeatureFlagBits>(1 << index)));
+        }
+        ++index;
+        input_value >>= 1;
+    }
+    if( ret.empty()) ret.append(string_VkPeerMemoryFeatureFlagBits(static_cast<VkPeerMemoryFeatureFlagBits>(0)));
+    return ret;
+}
+
+static inline const char* string_VkMemoryAllocateFlagBits(VkMemoryAllocateFlagBits input_value)
+{
+    switch ((VkMemoryAllocateFlagBits)input_value)
+    {
+        case VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT_KHR:
+            return "VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT_KHR";
+        case VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_KHR:
+            return "VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_KHR";
+        case VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT:
+            return "VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT";
+        default:
+            return "Unhandled VkMemoryAllocateFlagBits";
+    }
+}
+
+static inline std::string string_VkMemoryAllocateFlags(VkMemoryAllocateFlags input_value)
+{
+    std::string ret;
+    int index = 0;
+    while(input_value) {
+        if (input_value & 1) {
+            if( !ret.empty()) ret.append("|");
+            ret.append(string_VkMemoryAllocateFlagBits(static_cast<VkMemoryAllocateFlagBits>(1 << index)));
+        }
+        ++index;
+        input_value >>= 1;
+    }
+    if( ret.empty()) ret.append(string_VkMemoryAllocateFlagBits(static_cast<VkMemoryAllocateFlagBits>(0)));
+    return ret;
+}
+
+static inline const char* string_VkPointClippingBehavior(VkPointClippingBehavior input_value)
+{
+    switch ((VkPointClippingBehavior)input_value)
+    {
+        case VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES:
+            return "VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES";
+        case VK_POINT_CLIPPING_BEHAVIOR_USER_CLIP_PLANES_ONLY:
+            return "VK_POINT_CLIPPING_BEHAVIOR_USER_CLIP_PLANES_ONLY";
+        default:
+            return "Unhandled VkPointClippingBehavior";
+    }
+}
+
+static inline const char* string_VkTessellationDomainOrigin(VkTessellationDomainOrigin input_value)
+{
+    switch ((VkTessellationDomainOrigin)input_value)
+    {
+        case VK_TESSELLATION_DOMAIN_ORIGIN_LOWER_LEFT:
+            return "VK_TESSELLATION_DOMAIN_ORIGIN_LOWER_LEFT";
+        case VK_TESSELLATION_DOMAIN_ORIGIN_UPPER_LEFT:
+            return "VK_TESSELLATION_DOMAIN_ORIGIN_UPPER_LEFT";
+        default:
+            return "Unhandled VkTessellationDomainOrigin";
+    }
+}
+
+static inline const char* string_VkSamplerYcbcrModelConversion(VkSamplerYcbcrModelConversion input_value)
+{
+    switch ((VkSamplerYcbcrModelConversion)input_value)
+    {
+        case VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY:
+            return "VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY";
+        case VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020:
+            return "VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020";
+        case VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_601:
+            return "VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_601";
+        case VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709:
+            return "VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709";
+        case VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_IDENTITY:
+            return "VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_IDENTITY";
+        default:
+            return "Unhandled VkSamplerYcbcrModelConversion";
+    }
+}
+
+static inline const char* string_VkSamplerYcbcrRange(VkSamplerYcbcrRange input_value)
+{
+    switch ((VkSamplerYcbcrRange)input_value)
+    {
+        case VK_SAMPLER_YCBCR_RANGE_ITU_FULL:
+            return "VK_SAMPLER_YCBCR_RANGE_ITU_FULL";
+        case VK_SAMPLER_YCBCR_RANGE_ITU_NARROW:
+            return "VK_SAMPLER_YCBCR_RANGE_ITU_NARROW";
+        default:
+            return "Unhandled VkSamplerYcbcrRange";
+    }
+}
+
+static inline const char* string_VkChromaLocation(VkChromaLocation input_value)
+{
+    switch ((VkChromaLocation)input_value)
+    {
+        case VK_CHROMA_LOCATION_COSITED_EVEN:
+            return "VK_CHROMA_LOCATION_COSITED_EVEN";
+        case VK_CHROMA_LOCATION_MIDPOINT:
+            return "VK_CHROMA_LOCATION_MIDPOINT";
+        default:
+            return "Unhandled VkChromaLocation";
+    }
+}
+
+static inline const char* string_VkDescriptorUpdateTemplateType(VkDescriptorUpdateTemplateType input_value)
+{
+    switch ((VkDescriptorUpdateTemplateType)input_value)
+    {
+        case VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET:
+            return "VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET";
+        case VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR:
+            return "VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR";
+        default:
+            return "Unhandled VkDescriptorUpdateTemplateType";
+    }
+}
+
+static inline const char* string_VkExternalMemoryHandleTypeFlagBits(VkExternalMemoryHandleTypeFlagBits input_value)
+{
+    switch ((VkExternalMemoryHandleTypeFlagBits)input_value)
+    {
+        case VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID:
+            return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID";
+        case VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT:
+            return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT";
+        case VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_KMT_BIT:
+            return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_KMT_BIT";
+        case VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP_BIT:
+            return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP_BIT";
+        case VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT:
+            return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT";
+        case VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT:
+            return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT";
+        case VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT:
+            return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT";
+        case VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_MAPPED_FOREIGN_MEMORY_BIT_EXT:
+            return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_MAPPED_FOREIGN_MEMORY_BIT_EXT";
+        case VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT:
+            return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT";
+        case VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT:
+            return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT";
+        case VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT:
+            return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT";
+        default:
+            return "Unhandled VkExternalMemoryHandleTypeFlagBits";
+    }
+}
+
+static inline std::string string_VkExternalMemoryHandleTypeFlags(VkExternalMemoryHandleTypeFlags input_value)
+{
+    std::string ret;
+    int index = 0;
+    while(input_value) {
+        if (input_value & 1) {
+            if( !ret.empty()) ret.append("|");
+            ret.append(string_VkExternalMemoryHandleTypeFlagBits(static_cast<VkExternalMemoryHandleTypeFlagBits>(1 << index)));
+        }
+        ++index;
+        input_value >>= 1;
+    }
+    if( ret.empty()) ret.append(string_VkExternalMemoryHandleTypeFlagBits(static_cast<VkExternalMemoryHandleTypeFlagBits>(0)));
+    return ret;
+}
+
+static inline const char* string_VkExternalMemoryFeatureFlagBits(VkExternalMemoryFeatureFlagBits input_value)
+{
+    switch ((VkExternalMemoryFeatureFlagBits)input_value)
+    {
+        case VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT:
+            return "VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT";
+        case VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT:
+            return "VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT";
+        case VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT:
+            return "VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT";
+        default:
+            return "Unhandled VkExternalMemoryFeatureFlagBits";
+    }
+}
+
+static inline std::string string_VkExternalMemoryFeatureFlags(VkExternalMemoryFeatureFlags input_value)
+{
+    std::string ret;
+    int index = 0;
+    while(input_value) {
+        if (input_value & 1) {
+            if( !ret.empty()) ret.append("|");
+            ret.append(string_VkExternalMemoryFeatureFlagBits(static_cast<VkExternalMemoryFeatureFlagBits>(1 << index)));
+        }
+        ++index;
+        input_value >>= 1;
+    }
+    if( ret.empty()) ret.append(string_VkExternalMemoryFeatureFlagBits(static_cast<VkExternalMemoryFeatureFlagBits>(0)));
+    return ret;
+}
+
+static inline const char* string_VkExternalFenceHandleTypeFlagBits(VkExternalFenceHandleTypeFlagBits input_value)
+{
+    switch ((VkExternalFenceHandleTypeFlagBits)input_value)
+    {
+        case VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_FD_BIT:
+            return "VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_FD_BIT";
+        case VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_BIT:
+            return "VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_BIT";
+        case VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT:
+            return "VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT";
+        case VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT:
+            return "VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT";
+        default:
+            return "Unhandled VkExternalFenceHandleTypeFlagBits";
+    }
+}
+
+static inline std::string string_VkExternalFenceHandleTypeFlags(VkExternalFenceHandleTypeFlags input_value)
+{
+    std::string ret;
+    int index = 0;
+    while(input_value) {
+        if (input_value & 1) {
+            if( !ret.empty()) ret.append("|");
+            ret.append(string_VkExternalFenceHandleTypeFlagBits(static_cast<VkExternalFenceHandleTypeFlagBits>(1 << index)));
+        }
+        ++index;
+        input_value >>= 1;
+    }
+    if( ret.empty()) ret.append(string_VkExternalFenceHandleTypeFlagBits(static_cast<VkExternalFenceHandleTypeFlagBits>(0)));
+    return ret;
+}
+
+static inline const char* string_VkExternalFenceFeatureFlagBits(VkExternalFenceFeatureFlagBits input_value)
+{
+    switch ((VkExternalFenceFeatureFlagBits)input_value)
+    {
+        case VK_EXTERNAL_FENCE_FEATURE_EXPORTABLE_BIT:
+            return "VK_EXTERNAL_FENCE_FEATURE_EXPORTABLE_BIT";
+        case VK_EXTERNAL_FENCE_FEATURE_IMPORTABLE_BIT:
+            return "VK_EXTERNAL_FENCE_FEATURE_IMPORTABLE_BIT";
+        default:
+            return "Unhandled VkExternalFenceFeatureFlagBits";
+    }
+}
+
+static inline std::string string_VkExternalFenceFeatureFlags(VkExternalFenceFeatureFlags input_value)
+{
+    std::string ret;
+    int index = 0;
+    while(input_value) {
+        if (input_value & 1) {
+            if( !ret.empty()) ret.append("|");
+            ret.append(string_VkExternalFenceFeatureFlagBits(static_cast<VkExternalFenceFeatureFlagBits>(1 << index)));
+        }
+        ++index;
+        input_value >>= 1;
+    }
+    if( ret.empty()) ret.append(string_VkExternalFenceFeatureFlagBits(static_cast<VkExternalFenceFeatureFlagBits>(0)));
+    return ret;
+}
+
+static inline const char* string_VkFenceImportFlagBits(VkFenceImportFlagBits input_value)
+{
+    switch ((VkFenceImportFlagBits)input_value)
+    {
+        case VK_FENCE_IMPORT_TEMPORARY_BIT:
+            return "VK_FENCE_IMPORT_TEMPORARY_BIT";
+        default:
+            return "Unhandled VkFenceImportFlagBits";
+    }
+}
+
+static inline std::string string_VkFenceImportFlags(VkFenceImportFlags input_value)
+{
+    std::string ret;
+    int index = 0;
+    while(input_value) {
+        if (input_value & 1) {
+            if( !ret.empty()) ret.append("|");
+            ret.append(string_VkFenceImportFlagBits(static_cast<VkFenceImportFlagBits>(1 << index)));
+        }
+        ++index;
+        input_value >>= 1;
+    }
+    if( ret.empty()) ret.append(string_VkFenceImportFlagBits(static_cast<VkFenceImportFlagBits>(0)));
+    return ret;
+}
+
+static inline const char* string_VkSemaphoreImportFlagBits(VkSemaphoreImportFlagBits input_value)
+{
+    switch ((VkSemaphoreImportFlagBits)input_value)
+    {
+        case VK_SEMAPHORE_IMPORT_TEMPORARY_BIT:
+            return "VK_SEMAPHORE_IMPORT_TEMPORARY_BIT";
+        default:
+            return "Unhandled VkSemaphoreImportFlagBits";
+    }
+}
+
+static inline std::string string_VkSemaphoreImportFlags(VkSemaphoreImportFlags input_value)
+{
+    std::string ret;
+    int index = 0;
+    while(input_value) {
+        if (input_value & 1) {
+            if( !ret.empty()) ret.append("|");
+            ret.append(string_VkSemaphoreImportFlagBits(static_cast<VkSemaphoreImportFlagBits>(1 << index)));
+        }
+        ++index;
+        input_value >>= 1;
+    }
+    if( ret.empty()) ret.append(string_VkSemaphoreImportFlagBits(static_cast<VkSemaphoreImportFlagBits>(0)));
+    return ret;
+}
+
+static inline const char* string_VkExternalSemaphoreHandleTypeFlagBits(VkExternalSemaphoreHandleTypeFlagBits input_value)
+{
+    switch ((VkExternalSemaphoreHandleTypeFlagBits)input_value)
+    {
+        case VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT:
+            return "VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT";
+        case VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT:
+            return "VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT";
+        case VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT:
+            return "VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT";
+        case VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT:
+            return "VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT";
+        case VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT:
+            return "VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT";
+        default:
+            return "Unhandled VkExternalSemaphoreHandleTypeFlagBits";
+    }
+}
+
+static inline std::string string_VkExternalSemaphoreHandleTypeFlags(VkExternalSemaphoreHandleTypeFlags input_value)
+{
+    std::string ret;
+    int index = 0;
+    while(input_value) {
+        if (input_value & 1) {
+            if( !ret.empty()) ret.append("|");
+            ret.append(string_VkExternalSemaphoreHandleTypeFlagBits(static_cast<VkExternalSemaphoreHandleTypeFlagBits>(1 << index)));
+        }
+        ++index;
+        input_value >>= 1;
+    }
+    if( ret.empty()) ret.append(string_VkExternalSemaphoreHandleTypeFlagBits(static_cast<VkExternalSemaphoreHandleTypeFlagBits>(0)));
+    return ret;
+}
+
+static inline const char* string_VkExternalSemaphoreFeatureFlagBits(VkExternalSemaphoreFeatureFlagBits input_value)
+{
+    switch ((VkExternalSemaphoreFeatureFlagBits)input_value)
+    {
+        case VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT:
+            return "VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT";
+        case VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT:
+            return "VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT";
+        default:
+            return "Unhandled VkExternalSemaphoreFeatureFlagBits";
+    }
+}
+
+static inline std::string string_VkExternalSemaphoreFeatureFlags(VkExternalSemaphoreFeatureFlags input_value)
+{
+    std::string ret;
+    int index = 0;
+    while(input_value) {
+        if (input_value & 1) {
+            if( !ret.empty()) ret.append("|");
+            ret.append(string_VkExternalSemaphoreFeatureFlagBits(static_cast<VkExternalSemaphoreFeatureFlagBits>(1 << index)));
+        }
+        ++index;
+        input_value >>= 1;
+    }
+    if( ret.empty()) ret.append(string_VkExternalSemaphoreFeatureFlagBits(static_cast<VkExternalSemaphoreFeatureFlagBits>(0)));
+    return ret;
+}
+
+static inline const char* string_VkSurfaceTransformFlagBitsKHR(VkSurfaceTransformFlagBitsKHR input_value)
+{
+    switch ((VkSurfaceTransformFlagBitsKHR)input_value)
+    {
+        case VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_BIT_KHR:
+            return "VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_BIT_KHR";
+        case VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_180_BIT_KHR:
+            return "VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_180_BIT_KHR";
+        case VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_270_BIT_KHR:
+            return "VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_270_BIT_KHR";
+        case VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_90_BIT_KHR:
+            return "VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_90_BIT_KHR";
+        case VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR:
+            return "VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR";
+        case VK_SURFACE_TRANSFORM_INHERIT_BIT_KHR:
+            return "VK_SURFACE_TRANSFORM_INHERIT_BIT_KHR";
+        case VK_SURFACE_TRANSFORM_ROTATE_180_BIT_KHR:
+            return "VK_SURFACE_TRANSFORM_ROTATE_180_BIT_KHR";
+        case VK_SURFACE_TRANSFORM_ROTATE_270_BIT_KHR:
+            return "VK_SURFACE_TRANSFORM_ROTATE_270_BIT_KHR";
+        case VK_SURFACE_TRANSFORM_ROTATE_90_BIT_KHR:
+            return "VK_SURFACE_TRANSFORM_ROTATE_90_BIT_KHR";
+        default:
+            return "Unhandled VkSurfaceTransformFlagBitsKHR";
+    }
+}
+
+static inline std::string string_VkSurfaceTransformFlagsKHR(VkSurfaceTransformFlagsKHR input_value)
+{
+    std::string ret;
+    int index = 0;
+    while(input_value) {
+        if (input_value & 1) {
+            if( !ret.empty()) ret.append("|");
+            ret.append(string_VkSurfaceTransformFlagBitsKHR(static_cast<VkSurfaceTransformFlagBitsKHR>(1 << index)));
+        }
+        ++index;
+        input_value >>= 1;
+    }
+    if( ret.empty()) ret.append(string_VkSurfaceTransformFlagBitsKHR(static_cast<VkSurfaceTransformFlagBitsKHR>(0)));
+    return ret;
+}
+
+static inline const char* string_VkCompositeAlphaFlagBitsKHR(VkCompositeAlphaFlagBitsKHR input_value)
+{
+    switch ((VkCompositeAlphaFlagBitsKHR)input_value)
+    {
+        case VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR:
+            return "VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR";
+        case VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR:
+            return "VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR";
+        case VK_COMPOSITE_ALPHA_POST_MULTIPLIED_BIT_KHR:
+            return "VK_COMPOSITE_ALPHA_POST_MULTIPLIED_BIT_KHR";
+        case VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR:
+            return "VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR";
+        default:
+            return "Unhandled VkCompositeAlphaFlagBitsKHR";
+    }
+}
+
+static inline std::string string_VkCompositeAlphaFlagsKHR(VkCompositeAlphaFlagsKHR input_value)
+{
+    std::string ret;
+    int index = 0;
+    while(input_value) {
+        if (input_value & 1) {
+            if( !ret.empty()) ret.append("|");
+            ret.append(string_VkCompositeAlphaFlagBitsKHR(static_cast<VkCompositeAlphaFlagBitsKHR>(1 << index)));
+        }
+        ++index;
+        input_value >>= 1;
+    }
+    if( ret.empty()) ret.append(string_VkCompositeAlphaFlagBitsKHR(static_cast<VkCompositeAlphaFlagBitsKHR>(0)));
+    return ret;
+}
+
+static inline const char* string_VkColorSpaceKHR(VkColorSpaceKHR input_value)
+{
+    switch ((VkColorSpaceKHR)input_value)
+    {
+        case VK_COLOR_SPACE_ADOBERGB_LINEAR_EXT:
+            return "VK_COLOR_SPACE_ADOBERGB_LINEAR_EXT";
+        case VK_COLOR_SPACE_ADOBERGB_NONLINEAR_EXT:
+            return "VK_COLOR_SPACE_ADOBERGB_NONLINEAR_EXT";
+        case VK_COLOR_SPACE_BT2020_LINEAR_EXT:
+            return "VK_COLOR_SPACE_BT2020_LINEAR_EXT";
+        case VK_COLOR_SPACE_BT709_LINEAR_EXT:
+            return "VK_COLOR_SPACE_BT709_LINEAR_EXT";
+        case VK_COLOR_SPACE_BT709_NONLINEAR_EXT:
+            return "VK_COLOR_SPACE_BT709_NONLINEAR_EXT";
+        case VK_COLOR_SPACE_DCI_P3_NONLINEAR_EXT:
+            return "VK_COLOR_SPACE_DCI_P3_NONLINEAR_EXT";
+        case VK_COLOR_SPACE_DISPLAY_NATIVE_AMD:
+            return "VK_COLOR_SPACE_DISPLAY_NATIVE_AMD";
+        case VK_COLOR_SPACE_DISPLAY_P3_LINEAR_EXT:
+            return "VK_COLOR_SPACE_DISPLAY_P3_LINEAR_EXT";
+        case VK_COLOR_SPACE_DISPLAY_P3_NONLINEAR_EXT:
+            return "VK_COLOR_SPACE_DISPLAY_P3_NONLINEAR_EXT";
+        case VK_COLOR_SPACE_DOLBYVISION_EXT:
+            return "VK_COLOR_SPACE_DOLBYVISION_EXT";
+        case VK_COLOR_SPACE_EXTENDED_SRGB_LINEAR_EXT:
+            return "VK_COLOR_SPACE_EXTENDED_SRGB_LINEAR_EXT";
+        case VK_COLOR_SPACE_EXTENDED_SRGB_NONLINEAR_EXT:
+            return "VK_COLOR_SPACE_EXTENDED_SRGB_NONLINEAR_EXT";
+        case VK_COLOR_SPACE_HDR10_HLG_EXT:
+            return "VK_COLOR_SPACE_HDR10_HLG_EXT";
+        case VK_COLOR_SPACE_HDR10_ST2084_EXT:
+            return "VK_COLOR_SPACE_HDR10_ST2084_EXT";
+        case VK_COLOR_SPACE_PASS_THROUGH_EXT:
+            return "VK_COLOR_SPACE_PASS_THROUGH_EXT";
+        case VK_COLOR_SPACE_SRGB_NONLINEAR_KHR:
+            return "VK_COLOR_SPACE_SRGB_NONLINEAR_KHR";
+        default:
+            return "Unhandled VkColorSpaceKHR";
+    }
+}
+
+static inline const char* string_VkPresentModeKHR(VkPresentModeKHR input_value)
+{
+    switch ((VkPresentModeKHR)input_value)
+    {
+        case VK_PRESENT_MODE_FIFO_KHR:
+            return "VK_PRESENT_MODE_FIFO_KHR";
+        case VK_PRESENT_MODE_FIFO_RELAXED_KHR:
+            return "VK_PRESENT_MODE_FIFO_RELAXED_KHR";
+        case VK_PRESENT_MODE_IMMEDIATE_KHR:
+            return "VK_PRESENT_MODE_IMMEDIATE_KHR";
+        case VK_PRESENT_MODE_MAILBOX_KHR:
+            return "VK_PRESENT_MODE_MAILBOX_KHR";
+        case VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR:
+            return "VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR";
+        case VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR:
+            return "VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR";
+        default:
+            return "Unhandled VkPresentModeKHR";
+    }
+}
+
+static inline const char* string_VkSwapchainCreateFlagBitsKHR(VkSwapchainCreateFlagBitsKHR input_value)
+{
+    switch ((VkSwapchainCreateFlagBitsKHR)input_value)
+    {
+        case VK_SWAPCHAIN_CREATE_MUTABLE_FORMAT_BIT_KHR:
+            return "VK_SWAPCHAIN_CREATE_MUTABLE_FORMAT_BIT_KHR";
+        case VK_SWAPCHAIN_CREATE_PROTECTED_BIT_KHR:
+            return "VK_SWAPCHAIN_CREATE_PROTECTED_BIT_KHR";
+        case VK_SWAPCHAIN_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR:
+            return "VK_SWAPCHAIN_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR";
+        default:
+            return "Unhandled VkSwapchainCreateFlagBitsKHR";
+    }
+}
+
+static inline std::string string_VkSwapchainCreateFlagsKHR(VkSwapchainCreateFlagsKHR input_value)
+{
+    std::string ret;
+    int index = 0;
+    while(input_value) {
+        if (input_value & 1) {
+            if( !ret.empty()) ret.append("|");
+            ret.append(string_VkSwapchainCreateFlagBitsKHR(static_cast<VkSwapchainCreateFlagBitsKHR>(1 << index)));
+        }
+        ++index;
+        input_value >>= 1;
+    }
+    if( ret.empty()) ret.append(string_VkSwapchainCreateFlagBitsKHR(static_cast<VkSwapchainCreateFlagBitsKHR>(0)));
+    return ret;
+}
+
+static inline const char* string_VkDeviceGroupPresentModeFlagBitsKHR(VkDeviceGroupPresentModeFlagBitsKHR input_value)
+{
+    switch ((VkDeviceGroupPresentModeFlagBitsKHR)input_value)
+    {
+        case VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_BIT_KHR:
+            return "VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_BIT_KHR";
+        case VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_MULTI_DEVICE_BIT_KHR:
+            return "VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_MULTI_DEVICE_BIT_KHR";
+        case VK_DEVICE_GROUP_PRESENT_MODE_REMOTE_BIT_KHR:
+            return "VK_DEVICE_GROUP_PRESENT_MODE_REMOTE_BIT_KHR";
+        case VK_DEVICE_GROUP_PRESENT_MODE_SUM_BIT_KHR:
+            return "VK_DEVICE_GROUP_PRESENT_MODE_SUM_BIT_KHR";
+        default:
+            return "Unhandled VkDeviceGroupPresentModeFlagBitsKHR";
+    }
+}
+
+static inline std::string string_VkDeviceGroupPresentModeFlagsKHR(VkDeviceGroupPresentModeFlagsKHR input_value)
+{
+    std::string ret;
+    int index = 0;
+    while(input_value) {
+        if (input_value & 1) {
+            if( !ret.empty()) ret.append("|");
+            ret.append(string_VkDeviceGroupPresentModeFlagBitsKHR(static_cast<VkDeviceGroupPresentModeFlagBitsKHR>(1 << index)));
+        }
+        ++index;
+        input_value >>= 1;
+    }
+    if( ret.empty()) ret.append(string_VkDeviceGroupPresentModeFlagBitsKHR(static_cast<VkDeviceGroupPresentModeFlagBitsKHR>(0)));
+    return ret;
+}
+
+static inline const char* string_VkDisplayPlaneAlphaFlagBitsKHR(VkDisplayPlaneAlphaFlagBitsKHR input_value)
+{
+    switch ((VkDisplayPlaneAlphaFlagBitsKHR)input_value)
+    {
+        case VK_DISPLAY_PLANE_ALPHA_GLOBAL_BIT_KHR:
+            return "VK_DISPLAY_PLANE_ALPHA_GLOBAL_BIT_KHR";
+        case VK_DISPLAY_PLANE_ALPHA_OPAQUE_BIT_KHR:
+            return "VK_DISPLAY_PLANE_ALPHA_OPAQUE_BIT_KHR";
+        case VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_BIT_KHR:
+            return "VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_BIT_KHR";
+        case VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_PREMULTIPLIED_BIT_KHR:
+            return "VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_PREMULTIPLIED_BIT_KHR";
+        default:
+            return "Unhandled VkDisplayPlaneAlphaFlagBitsKHR";
+    }
+}
+
+static inline std::string string_VkDisplayPlaneAlphaFlagsKHR(VkDisplayPlaneAlphaFlagsKHR input_value)
+{
+    std::string ret;
+    int index = 0;
+    while(input_value) {
+        if (input_value & 1) {
+            if( !ret.empty()) ret.append("|");
+            ret.append(string_VkDisplayPlaneAlphaFlagBitsKHR(static_cast<VkDisplayPlaneAlphaFlagBitsKHR>(1 << index)));
+        }
+        ++index;
+        input_value >>= 1;
+    }
+    if( ret.empty()) ret.append(string_VkDisplayPlaneAlphaFlagBitsKHR(static_cast<VkDisplayPlaneAlphaFlagBitsKHR>(0)));
+    return ret;
+}
+
+static inline const char* string_VkPeerMemoryFeatureFlagBitsKHR(VkPeerMemoryFeatureFlagBitsKHR input_value)
+{
+    switch ((VkPeerMemoryFeatureFlagBitsKHR)input_value)
+    {
+        case VK_PEER_MEMORY_FEATURE_COPY_DST_BIT:
+            return "VK_PEER_MEMORY_FEATURE_COPY_DST_BIT";
+        case VK_PEER_MEMORY_FEATURE_COPY_SRC_BIT:
+            return "VK_PEER_MEMORY_FEATURE_COPY_SRC_BIT";
+        case VK_PEER_MEMORY_FEATURE_GENERIC_DST_BIT:
+            return "VK_PEER_MEMORY_FEATURE_GENERIC_DST_BIT";
+        case VK_PEER_MEMORY_FEATURE_GENERIC_SRC_BIT:
+            return "VK_PEER_MEMORY_FEATURE_GENERIC_SRC_BIT";
+        default:
+            return "Unhandled VkPeerMemoryFeatureFlagBitsKHR";
+    }
+}
+
+static inline std::string string_VkPeerMemoryFeatureFlagsKHR(VkPeerMemoryFeatureFlagsKHR input_value)
+{
+    std::string ret;
+    int index = 0;
+    while(input_value) {
+        if (input_value & 1) {
+            if( !ret.empty()) ret.append("|");
+            ret.append(string_VkPeerMemoryFeatureFlagBitsKHR(static_cast<VkPeerMemoryFeatureFlagBitsKHR>(1 << index)));
+        }
+        ++index;
+        input_value >>= 1;
+    }
+    if( ret.empty()) ret.append(string_VkPeerMemoryFeatureFlagBitsKHR(static_cast<VkPeerMemoryFeatureFlagBitsKHR>(0)));
+    return ret;
+}
+
+static inline const char* string_VkMemoryAllocateFlagBitsKHR(VkMemoryAllocateFlagBitsKHR input_value)
+{
+    switch ((VkMemoryAllocateFlagBitsKHR)input_value)
+    {
+        case VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT_KHR:
+            return "VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT_KHR";
+        case VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_KHR:
+            return "VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_KHR";
+        case VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT:
+            return "VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT";
+        default:
+            return "Unhandled VkMemoryAllocateFlagBitsKHR";
+    }
+}
+
+static inline std::string string_VkMemoryAllocateFlagsKHR(VkMemoryAllocateFlagsKHR input_value)
+{
+    std::string ret;
+    int index = 0;
+    while(input_value) {
+        if (input_value & 1) {
+            if( !ret.empty()) ret.append("|");
+            ret.append(string_VkMemoryAllocateFlagBitsKHR(static_cast<VkMemoryAllocateFlagBitsKHR>(1 << index)));
+        }
+        ++index;
+        input_value >>= 1;
+    }
+    if( ret.empty()) ret.append(string_VkMemoryAllocateFlagBitsKHR(static_cast<VkMemoryAllocateFlagBitsKHR>(0)));
+    return ret;
+}
+
+static inline const char* string_VkExternalMemoryHandleTypeFlagBitsKHR(VkExternalMemoryHandleTypeFlagBitsKHR input_value)
+{
+    switch ((VkExternalMemoryHandleTypeFlagBitsKHR)input_value)
+    {
+        case VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID:
+            return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID";
+        case VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT:
+            return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT";
+        case VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_KMT_BIT:
+            return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_KMT_BIT";
+        case VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP_BIT:
+            return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP_BIT";
+        case VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT:
+            return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT";
+        case VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT:
+            return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT";
+        case VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT:
+            return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT";
+        case VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_MAPPED_FOREIGN_MEMORY_BIT_EXT:
+            return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_MAPPED_FOREIGN_MEMORY_BIT_EXT";
+        case VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT:
+            return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT";
+        case VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT:
+            return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT";
+        case VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT:
+            return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT";
+        default:
+            return "Unhandled VkExternalMemoryHandleTypeFlagBitsKHR";
+    }
+}
+
+static inline std::string string_VkExternalMemoryHandleTypeFlagsKHR(VkExternalMemoryHandleTypeFlagsKHR input_value)
+{
+    std::string ret;
+    int index = 0;
+    while(input_value) {
+        if (input_value & 1) {
+            if( !ret.empty()) ret.append("|");
+            ret.append(string_VkExternalMemoryHandleTypeFlagBitsKHR(static_cast<VkExternalMemoryHandleTypeFlagBitsKHR>(1 << index)));
+        }
+        ++index;
+        input_value >>= 1;
+    }
+    if( ret.empty()) ret.append(string_VkExternalMemoryHandleTypeFlagBitsKHR(static_cast<VkExternalMemoryHandleTypeFlagBitsKHR>(0)));
+    return ret;
+}
+
+static inline const char* string_VkExternalMemoryFeatureFlagBitsKHR(VkExternalMemoryFeatureFlagBitsKHR input_value)
+{
+    switch ((VkExternalMemoryFeatureFlagBitsKHR)input_value)
+    {
+        case VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT:
+            return "VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT";
+        case VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT:
+            return "VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT";
+        case VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT:
+            return "VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT";
+        default:
+            return "Unhandled VkExternalMemoryFeatureFlagBitsKHR";
+    }
+}
+
+static inline std::string string_VkExternalMemoryFeatureFlagsKHR(VkExternalMemoryFeatureFlagsKHR input_value)
+{
+    std::string ret;
+    int index = 0;
+    while(input_value) {
+        if (input_value & 1) {
+            if( !ret.empty()) ret.append("|");
+            ret.append(string_VkExternalMemoryFeatureFlagBitsKHR(static_cast<VkExternalMemoryFeatureFlagBitsKHR>(1 << index)));
+        }
+        ++index;
+        input_value >>= 1;
+    }
+    if( ret.empty()) ret.append(string_VkExternalMemoryFeatureFlagBitsKHR(static_cast<VkExternalMemoryFeatureFlagBitsKHR>(0)));
+    return ret;
+}
+
+static inline const char* string_VkExternalSemaphoreHandleTypeFlagBitsKHR(VkExternalSemaphoreHandleTypeFlagBitsKHR input_value)
+{
+    switch ((VkExternalSemaphoreHandleTypeFlagBitsKHR)input_value)
+    {
+        case VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT:
+            return "VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT";
+        case VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT:
+            return "VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT";
+        case VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT:
+            return "VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT";
+        case VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT:
+            return "VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT";
+        case VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT:
+            return "VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT";
+        default:
+            return "Unhandled VkExternalSemaphoreHandleTypeFlagBitsKHR";
+    }
+}
+
+static inline std::string string_VkExternalSemaphoreHandleTypeFlagsKHR(VkExternalSemaphoreHandleTypeFlagsKHR input_value)
+{
+    std::string ret;
+    int index = 0;
+    while(input_value) {
+        if (input_value & 1) {
+            if( !ret.empty()) ret.append("|");
+            ret.append(string_VkExternalSemaphoreHandleTypeFlagBitsKHR(static_cast<VkExternalSemaphoreHandleTypeFlagBitsKHR>(1 << index)));
+        }
+        ++index;
+        input_value >>= 1;
+    }
+    if( ret.empty()) ret.append(string_VkExternalSemaphoreHandleTypeFlagBitsKHR(static_cast<VkExternalSemaphoreHandleTypeFlagBitsKHR>(0)));
+    return ret;
+}
+
+static inline const char* string_VkExternalSemaphoreFeatureFlagBitsKHR(VkExternalSemaphoreFeatureFlagBitsKHR input_value)
+{
+    switch ((VkExternalSemaphoreFeatureFlagBitsKHR)input_value)
+    {
+        case VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT:
+            return "VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT";
+        case VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT:
+            return "VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT";
+        default:
+            return "Unhandled VkExternalSemaphoreFeatureFlagBitsKHR";
+    }
+}
+
+static inline std::string string_VkExternalSemaphoreFeatureFlagsKHR(VkExternalSemaphoreFeatureFlagsKHR input_value)
+{
+    std::string ret;
+    int index = 0;
+    while(input_value) {
+        if (input_value & 1) {
+            if( !ret.empty()) ret.append("|");
+            ret.append(string_VkExternalSemaphoreFeatureFlagBitsKHR(static_cast<VkExternalSemaphoreFeatureFlagBitsKHR>(1 << index)));
+        }
+        ++index;
+        input_value >>= 1;
+    }
+    if( ret.empty()) ret.append(string_VkExternalSemaphoreFeatureFlagBitsKHR(static_cast<VkExternalSemaphoreFeatureFlagBitsKHR>(0)));
+    return ret;
+}
+
+static inline const char* string_VkSemaphoreImportFlagBitsKHR(VkSemaphoreImportFlagBitsKHR input_value)
+{
+    switch ((VkSemaphoreImportFlagBitsKHR)input_value)
+    {
+        case VK_SEMAPHORE_IMPORT_TEMPORARY_BIT:
+            return "VK_SEMAPHORE_IMPORT_TEMPORARY_BIT";
+        default:
+            return "Unhandled VkSemaphoreImportFlagBitsKHR";
+    }
+}
+
+static inline std::string string_VkSemaphoreImportFlagsKHR(VkSemaphoreImportFlagsKHR input_value)
+{
+    std::string ret;
+    int index = 0;
+    while(input_value) {
+        if (input_value & 1) {
+            if( !ret.empty()) ret.append("|");
+            ret.append(string_VkSemaphoreImportFlagBitsKHR(static_cast<VkSemaphoreImportFlagBitsKHR>(1 << index)));
+        }
+        ++index;
+        input_value >>= 1;
+    }
+    if( ret.empty()) ret.append(string_VkSemaphoreImportFlagBitsKHR(static_cast<VkSemaphoreImportFlagBitsKHR>(0)));
+    return ret;
+}
+
+static inline const char* string_VkDescriptorUpdateTemplateTypeKHR(VkDescriptorUpdateTemplateTypeKHR input_value)
+{
+    switch ((VkDescriptorUpdateTemplateTypeKHR)input_value)
+    {
+        case VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET:
+            return "VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET";
+        case VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR:
+            return "VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR";
+        default:
+            return "Unhandled VkDescriptorUpdateTemplateTypeKHR";
+    }
+}
+
+static inline const char* string_VkExternalFenceHandleTypeFlagBitsKHR(VkExternalFenceHandleTypeFlagBitsKHR input_value)
+{
+    switch ((VkExternalFenceHandleTypeFlagBitsKHR)input_value)
+    {
+        case VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_FD_BIT:
+            return "VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_FD_BIT";
+        case VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_BIT:
+            return "VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_BIT";
+        case VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT:
+            return "VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT";
+        case VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT:
+            return "VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT";
+        default:
+            return "Unhandled VkExternalFenceHandleTypeFlagBitsKHR";
+    }
+}
+
+static inline std::string string_VkExternalFenceHandleTypeFlagsKHR(VkExternalFenceHandleTypeFlagsKHR input_value)
+{
+    std::string ret;
+    int index = 0;
+    while(input_value) {
+        if (input_value & 1) {
+            if( !ret.empty()) ret.append("|");
+            ret.append(string_VkExternalFenceHandleTypeFlagBitsKHR(static_cast<VkExternalFenceHandleTypeFlagBitsKHR>(1 << index)));
+        }
+        ++index;
+        input_value >>= 1;
+    }
+    if( ret.empty()) ret.append(string_VkExternalFenceHandleTypeFlagBitsKHR(static_cast<VkExternalFenceHandleTypeFlagBitsKHR>(0)));
+    return ret;
+}
+
+static inline const char* string_VkExternalFenceFeatureFlagBitsKHR(VkExternalFenceFeatureFlagBitsKHR input_value)
+{
+    switch ((VkExternalFenceFeatureFlagBitsKHR)input_value)
+    {
+        case VK_EXTERNAL_FENCE_FEATURE_EXPORTABLE_BIT:
+            return "VK_EXTERNAL_FENCE_FEATURE_EXPORTABLE_BIT";
+        case VK_EXTERNAL_FENCE_FEATURE_IMPORTABLE_BIT:
+            return "VK_EXTERNAL_FENCE_FEATURE_IMPORTABLE_BIT";
+        default:
+            return "Unhandled VkExternalFenceFeatureFlagBitsKHR";
+    }
+}
+
+static inline std::string string_VkExternalFenceFeatureFlagsKHR(VkExternalFenceFeatureFlagsKHR input_value)
+{
+    std::string ret;
+    int index = 0;
+    while(input_value) {
+        if (input_value & 1) {
+            if( !ret.empty()) ret.append("|");
+            ret.append(string_VkExternalFenceFeatureFlagBitsKHR(static_cast<VkExternalFenceFeatureFlagBitsKHR>(1 << index)));
+        }
+        ++index;
+        input_value >>= 1;
+    }
+    if( ret.empty()) ret.append(string_VkExternalFenceFeatureFlagBitsKHR(static_cast<VkExternalFenceFeatureFlagBitsKHR>(0)));
+    return ret;
+}
+
+static inline const char* string_VkFenceImportFlagBitsKHR(VkFenceImportFlagBitsKHR input_value)
+{
+    switch ((VkFenceImportFlagBitsKHR)input_value)
+    {
+        case VK_FENCE_IMPORT_TEMPORARY_BIT:
+            return "VK_FENCE_IMPORT_TEMPORARY_BIT";
+        default:
+            return "Unhandled VkFenceImportFlagBitsKHR";
+    }
+}
+
+static inline std::string string_VkFenceImportFlagsKHR(VkFenceImportFlagsKHR input_value)
+{
+    std::string ret;
+    int index = 0;
+    while(input_value) {
+        if (input_value & 1) {
+            if( !ret.empty()) ret.append("|");
+            ret.append(string_VkFenceImportFlagBitsKHR(static_cast<VkFenceImportFlagBitsKHR>(1 << index)));
+        }
+        ++index;
+        input_value >>= 1;
+    }
+    if( ret.empty()) ret.append(string_VkFenceImportFlagBitsKHR(static_cast<VkFenceImportFlagBitsKHR>(0)));
+    return ret;
+}
+
+static inline const char* string_VkPerformanceCounterUnitKHR(VkPerformanceCounterUnitKHR input_value)
+{
+    switch ((VkPerformanceCounterUnitKHR)input_value)
+    {
+        case VK_PERFORMANCE_COUNTER_UNIT_AMPS_KHR:
+            return "VK_PERFORMANCE_COUNTER_UNIT_AMPS_KHR";
+        case VK_PERFORMANCE_COUNTER_UNIT_BYTES_KHR:
+            return "VK_PERFORMANCE_COUNTER_UNIT_BYTES_KHR";
+        case VK_PERFORMANCE_COUNTER_UNIT_BYTES_PER_SECOND_KHR:
+            return "VK_PERFORMANCE_COUNTER_UNIT_BYTES_PER_SECOND_KHR";
+        case VK_PERFORMANCE_COUNTER_UNIT_CYCLES_KHR:
+            return "VK_PERFORMANCE_COUNTER_UNIT_CYCLES_KHR";
+        case VK_PERFORMANCE_COUNTER_UNIT_GENERIC_KHR:
+            return "VK_PERFORMANCE_COUNTER_UNIT_GENERIC_KHR";
+        case VK_PERFORMANCE_COUNTER_UNIT_HERTZ_KHR:
+            return "VK_PERFORMANCE_COUNTER_UNIT_HERTZ_KHR";
+        case VK_PERFORMANCE_COUNTER_UNIT_KELVIN_KHR:
+            return "VK_PERFORMANCE_COUNTER_UNIT_KELVIN_KHR";
+        case VK_PERFORMANCE_COUNTER_UNIT_NANOSECONDS_KHR:
+            return "VK_PERFORMANCE_COUNTER_UNIT_NANOSECONDS_KHR";
+        case VK_PERFORMANCE_COUNTER_UNIT_PERCENTAGE_KHR:
+            return "VK_PERFORMANCE_COUNTER_UNIT_PERCENTAGE_KHR";
+        case VK_PERFORMANCE_COUNTER_UNIT_VOLTS_KHR:
+            return "VK_PERFORMANCE_COUNTER_UNIT_VOLTS_KHR";
+        case VK_PERFORMANCE_COUNTER_UNIT_WATTS_KHR:
+            return "VK_PERFORMANCE_COUNTER_UNIT_WATTS_KHR";
+        default:
+            return "Unhandled VkPerformanceCounterUnitKHR";
+    }
+}
+
+static inline const char* string_VkPerformanceCounterScopeKHR(VkPerformanceCounterScopeKHR input_value)
+{
+    switch ((VkPerformanceCounterScopeKHR)input_value)
+    {
+        case VK_QUERY_SCOPE_COMMAND_BUFFER_KHR:
+            return "VK_QUERY_SCOPE_COMMAND_BUFFER_KHR";
+        case VK_QUERY_SCOPE_COMMAND_KHR:
+            return "VK_QUERY_SCOPE_COMMAND_KHR";
+        case VK_QUERY_SCOPE_RENDER_PASS_KHR:
+            return "VK_QUERY_SCOPE_RENDER_PASS_KHR";
+        default:
+            return "Unhandled VkPerformanceCounterScopeKHR";
+    }
+}
+
+static inline const char* string_VkPerformanceCounterStorageKHR(VkPerformanceCounterStorageKHR input_value)
+{
+    switch ((VkPerformanceCounterStorageKHR)input_value)
+    {
+        case VK_PERFORMANCE_COUNTER_STORAGE_FLOAT32_KHR:
+            return "VK_PERFORMANCE_COUNTER_STORAGE_FLOAT32_KHR";
+        case VK_PERFORMANCE_COUNTER_STORAGE_FLOAT64_KHR:
+            return "VK_PERFORMANCE_COUNTER_STORAGE_FLOAT64_KHR";
+        case VK_PERFORMANCE_COUNTER_STORAGE_INT32_KHR:
+            return "VK_PERFORMANCE_COUNTER_STORAGE_INT32_KHR";
+        case VK_PERFORMANCE_COUNTER_STORAGE_INT64_KHR:
+            return "VK_PERFORMANCE_COUNTER_STORAGE_INT64_KHR";
+        case VK_PERFORMANCE_COUNTER_STORAGE_UINT32_KHR:
+            return "VK_PERFORMANCE_COUNTER_STORAGE_UINT32_KHR";
+        case VK_PERFORMANCE_COUNTER_STORAGE_UINT64_KHR:
+            return "VK_PERFORMANCE_COUNTER_STORAGE_UINT64_KHR";
+        default:
+            return "Unhandled VkPerformanceCounterStorageKHR";
+    }
+}
+
+static inline const char* string_VkPerformanceCounterDescriptionFlagBitsKHR(VkPerformanceCounterDescriptionFlagBitsKHR input_value)
+{
+    switch ((VkPerformanceCounterDescriptionFlagBitsKHR)input_value)
+    {
+        case VK_PERFORMANCE_COUNTER_DESCRIPTION_CONCURRENTLY_IMPACTED_KHR:
+            return "VK_PERFORMANCE_COUNTER_DESCRIPTION_CONCURRENTLY_IMPACTED_KHR";
+        case VK_PERFORMANCE_COUNTER_DESCRIPTION_PERFORMANCE_IMPACTING_KHR:
+            return "VK_PERFORMANCE_COUNTER_DESCRIPTION_PERFORMANCE_IMPACTING_KHR";
+        default:
+            return "Unhandled VkPerformanceCounterDescriptionFlagBitsKHR";
+    }
+}
+
+static inline std::string string_VkPerformanceCounterDescriptionFlagsKHR(VkPerformanceCounterDescriptionFlagsKHR input_value)
+{
+    std::string ret;
+    int index = 0;
+    while(input_value) {
+        if (input_value & 1) {
+            if( !ret.empty()) ret.append("|");
+            ret.append(string_VkPerformanceCounterDescriptionFlagBitsKHR(static_cast<VkPerformanceCounterDescriptionFlagBitsKHR>(1 << index)));
+        }
+        ++index;
+        input_value >>= 1;
+    }
+    if( ret.empty()) ret.append(string_VkPerformanceCounterDescriptionFlagBitsKHR(static_cast<VkPerformanceCounterDescriptionFlagBitsKHR>(0)));
+    return ret;
+}
+
+static inline const char* string_VkPointClippingBehaviorKHR(VkPointClippingBehaviorKHR input_value)
+{
+    switch ((VkPointClippingBehaviorKHR)input_value)
+    {
+        case VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES:
+            return "VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES";
+        case VK_POINT_CLIPPING_BEHAVIOR_USER_CLIP_PLANES_ONLY:
+            return "VK_POINT_CLIPPING_BEHAVIOR_USER_CLIP_PLANES_ONLY";
+        default:
+            return "Unhandled VkPointClippingBehaviorKHR";
+    }
+}
+
+static inline const char* string_VkTessellationDomainOriginKHR(VkTessellationDomainOriginKHR input_value)
+{
+    switch ((VkTessellationDomainOriginKHR)input_value)
+    {
+        case VK_TESSELLATION_DOMAIN_ORIGIN_LOWER_LEFT:
+            return "VK_TESSELLATION_DOMAIN_ORIGIN_LOWER_LEFT";
+        case VK_TESSELLATION_DOMAIN_ORIGIN_UPPER_LEFT:
+            return "VK_TESSELLATION_DOMAIN_ORIGIN_UPPER_LEFT";
+        default:
+            return "Unhandled VkTessellationDomainOriginKHR";
+    }
+}
+
+static inline const char* string_VkSamplerYcbcrModelConversionKHR(VkSamplerYcbcrModelConversionKHR input_value)
+{
+    switch ((VkSamplerYcbcrModelConversionKHR)input_value)
+    {
+        case VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY:
+            return "VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY";
+        case VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020:
+            return "VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020";
+        case VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_601:
+            return "VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_601";
+        case VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709:
+            return "VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709";
+        case VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_IDENTITY:
+            return "VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_IDENTITY";
+        default:
+            return "Unhandled VkSamplerYcbcrModelConversionKHR";
+    }
+}
+
+static inline const char* string_VkSamplerYcbcrRangeKHR(VkSamplerYcbcrRangeKHR input_value)
+{
+    switch ((VkSamplerYcbcrRangeKHR)input_value)
+    {
+        case VK_SAMPLER_YCBCR_RANGE_ITU_FULL:
+            return "VK_SAMPLER_YCBCR_RANGE_ITU_FULL";
+        case VK_SAMPLER_YCBCR_RANGE_ITU_NARROW:
+            return "VK_SAMPLER_YCBCR_RANGE_ITU_NARROW";
+        default:
+            return "Unhandled VkSamplerYcbcrRangeKHR";
+    }
+}
+
+static inline const char* string_VkChromaLocationKHR(VkChromaLocationKHR input_value)
+{
+    switch ((VkChromaLocationKHR)input_value)
+    {
+        case VK_CHROMA_LOCATION_COSITED_EVEN:
+            return "VK_CHROMA_LOCATION_COSITED_EVEN";
+        case VK_CHROMA_LOCATION_MIDPOINT:
+            return "VK_CHROMA_LOCATION_MIDPOINT";
+        default:
+            return "Unhandled VkChromaLocationKHR";
+    }
+}
+
+static inline const char* string_VkDriverIdKHR(VkDriverIdKHR input_value)
+{
+    switch ((VkDriverIdKHR)input_value)
+    {
+        case VK_DRIVER_ID_AMD_OPEN_SOURCE_KHR:
+            return "VK_DRIVER_ID_AMD_OPEN_SOURCE_KHR";
+        case VK_DRIVER_ID_AMD_PROPRIETARY_KHR:
+            return "VK_DRIVER_ID_AMD_PROPRIETARY_KHR";
+        case VK_DRIVER_ID_ARM_PROPRIETARY_KHR:
+            return "VK_DRIVER_ID_ARM_PROPRIETARY_KHR";
+        case VK_DRIVER_ID_BROADCOM_PROPRIETARY_KHR:
+            return "VK_DRIVER_ID_BROADCOM_PROPRIETARY_KHR";
+        case VK_DRIVER_ID_GGP_PROPRIETARY_KHR:
+            return "VK_DRIVER_ID_GGP_PROPRIETARY_KHR";
+        case VK_DRIVER_ID_GOOGLE_SWIFTSHADER_KHR:
+            return "VK_DRIVER_ID_GOOGLE_SWIFTSHADER_KHR";
+        case VK_DRIVER_ID_IMAGINATION_PROPRIETARY_KHR:
+            return "VK_DRIVER_ID_IMAGINATION_PROPRIETARY_KHR";
+        case VK_DRIVER_ID_INTEL_OPEN_SOURCE_MESA_KHR:
+            return "VK_DRIVER_ID_INTEL_OPEN_SOURCE_MESA_KHR";
+        case VK_DRIVER_ID_INTEL_PROPRIETARY_WINDOWS_KHR:
+            return "VK_DRIVER_ID_INTEL_PROPRIETARY_WINDOWS_KHR";
+        case VK_DRIVER_ID_MESA_RADV_KHR:
+            return "VK_DRIVER_ID_MESA_RADV_KHR";
+        case VK_DRIVER_ID_NVIDIA_PROPRIETARY_KHR:
+            return "VK_DRIVER_ID_NVIDIA_PROPRIETARY_KHR";
+        case VK_DRIVER_ID_QUALCOMM_PROPRIETARY_KHR:
+            return "VK_DRIVER_ID_QUALCOMM_PROPRIETARY_KHR";
+        default:
+            return "Unhandled VkDriverIdKHR";
+    }
+}
+
+static inline const char* string_VkShaderFloatControlsIndependenceKHR(VkShaderFloatControlsIndependenceKHR input_value)
+{
+    switch ((VkShaderFloatControlsIndependenceKHR)input_value)
+    {
+        case VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_32_BIT_ONLY_KHR:
+            return "VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_32_BIT_ONLY_KHR";
+        case VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_ALL_KHR:
+            return "VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_ALL_KHR";
+        case VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_NONE_KHR:
+            return "VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_NONE_KHR";
+        default:
+            return "Unhandled VkShaderFloatControlsIndependenceKHR";
+    }
+}
+
+static inline const char* string_VkResolveModeFlagBitsKHR(VkResolveModeFlagBitsKHR input_value)
+{
+    switch ((VkResolveModeFlagBitsKHR)input_value)
+    {
+        case VK_RESOLVE_MODE_AVERAGE_BIT_KHR:
+            return "VK_RESOLVE_MODE_AVERAGE_BIT_KHR";
+        case VK_RESOLVE_MODE_MAX_BIT_KHR:
+            return "VK_RESOLVE_MODE_MAX_BIT_KHR";
+        case VK_RESOLVE_MODE_MIN_BIT_KHR:
+            return "VK_RESOLVE_MODE_MIN_BIT_KHR";
+        case VK_RESOLVE_MODE_NONE_KHR:
+            return "VK_RESOLVE_MODE_NONE_KHR";
+        case VK_RESOLVE_MODE_SAMPLE_ZERO_BIT_KHR:
+            return "VK_RESOLVE_MODE_SAMPLE_ZERO_BIT_KHR";
+        default:
+            return "Unhandled VkResolveModeFlagBitsKHR";
+    }
+}
+
+static inline std::string string_VkResolveModeFlagsKHR(VkResolveModeFlagsKHR input_value)
+{
+    std::string ret;
+    int index = 0;
+    while(input_value) {
+        if (input_value & 1) {
+            if( !ret.empty()) ret.append("|");
+            ret.append(string_VkResolveModeFlagBitsKHR(static_cast<VkResolveModeFlagBitsKHR>(1 << index)));
+        }
+        ++index;
+        input_value >>= 1;
+    }
+    if( ret.empty()) ret.append(string_VkResolveModeFlagBitsKHR(static_cast<VkResolveModeFlagBitsKHR>(0)));
+    return ret;
+}
+
+static inline const char* string_VkSemaphoreTypeKHR(VkSemaphoreTypeKHR input_value)
+{
+    switch ((VkSemaphoreTypeKHR)input_value)
+    {
+        case VK_SEMAPHORE_TYPE_BINARY_KHR:
+            return "VK_SEMAPHORE_TYPE_BINARY_KHR";
+        case VK_SEMAPHORE_TYPE_TIMELINE_KHR:
+            return "VK_SEMAPHORE_TYPE_TIMELINE_KHR";
+        default:
+            return "Unhandled VkSemaphoreTypeKHR";
+    }
+}
+
+static inline const char* string_VkSemaphoreWaitFlagBitsKHR(VkSemaphoreWaitFlagBitsKHR input_value)
+{
+    switch ((VkSemaphoreWaitFlagBitsKHR)input_value)
+    {
+        case VK_SEMAPHORE_WAIT_ANY_BIT_KHR:
+            return "VK_SEMAPHORE_WAIT_ANY_BIT_KHR";
+        default:
+            return "Unhandled VkSemaphoreWaitFlagBitsKHR";
+    }
+}
+
+static inline std::string string_VkSemaphoreWaitFlagsKHR(VkSemaphoreWaitFlagsKHR input_value)
+{
+    std::string ret;
+    int index = 0;
+    while(input_value) {
+        if (input_value & 1) {
+            if( !ret.empty()) ret.append("|");
+            ret.append(string_VkSemaphoreWaitFlagBitsKHR(static_cast<VkSemaphoreWaitFlagBitsKHR>(1 << index)));
+        }
+        ++index;
+        input_value >>= 1;
+    }
+    if( ret.empty()) ret.append(string_VkSemaphoreWaitFlagBitsKHR(static_cast<VkSemaphoreWaitFlagBitsKHR>(0)));
+    return ret;
+}
+
+static inline const char* string_VkPipelineExecutableStatisticFormatKHR(VkPipelineExecutableStatisticFormatKHR input_value)
+{
+    switch ((VkPipelineExecutableStatisticFormatKHR)input_value)
+    {
+        case VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_BOOL32_KHR:
+            return "VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_BOOL32_KHR";
+        case VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_FLOAT64_KHR:
+            return "VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_FLOAT64_KHR";
+        case VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_INT64_KHR:
+            return "VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_INT64_KHR";
+        case VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_UINT64_KHR:
+            return "VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_UINT64_KHR";
+        default:
+            return "Unhandled VkPipelineExecutableStatisticFormatKHR";
+    }
+}
+
+static inline const char* string_VkDebugReportObjectTypeEXT(VkDebugReportObjectTypeEXT input_value)
+{
+    switch ((VkDebugReportObjectTypeEXT)input_value)
+    {
+        case VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV_EXT:
+            return "VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV_EXT";
+        case VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT:
+            return "VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT";
+        case VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT:
+            return "VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT";
+        case VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT:
+            return "VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT";
+        case VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT:
+            return "VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT";
+        case VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT_EXT:
+            return "VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT_EXT";
+        case VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT:
+            return "VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT";
+        case VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT:
+            return "VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT";
+        case VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT:
+            return "VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT";
+        case VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_EXT:
+            return "VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_EXT";
+        case VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT:
+            return "VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT";
+        case VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT:
+            return "VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT";
+        case VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_KHR_EXT:
+            return "VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_KHR_EXT";
+        case VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_MODE_KHR_EXT:
+            return "VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_MODE_KHR_EXT";
+        case VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT:
+            return "VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT";
+        case VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT:
+            return "VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT";
+        case VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT:
+            return "VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT";
+        case VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT:
+            return "VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT";
+        case VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT:
+            return "VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT";
+        case VK_DEBUG_REPORT_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX_EXT:
+            return "VK_DEBUG_REPORT_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX_EXT";
+        case VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT:
+            return "VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT";
+        case VK_DEBUG_REPORT_OBJECT_TYPE_OBJECT_TABLE_NVX_EXT:
+            return "VK_DEBUG_REPORT_OBJECT_TYPE_OBJECT_TABLE_NVX_EXT";
+        case VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT:
+            return "VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT";
+        case VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT:
+            return "VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT";
+        case VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT:
+            return "VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT";
+        case VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT:
+            return "VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT";
+        case VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT:
+            return "VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT";
+        case VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT:
+            return "VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT";
+        case VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT:
+            return "VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT";
+        case VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT:
+            return "VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT";
+        case VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT:
+            return "VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT";
+        case VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT:
+            return "VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT";
+        case VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT:
+            return "VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT";
+        case VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT:
+            return "VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT";
+        case VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT:
+            return "VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT";
+        case VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT:
+            return "VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT";
+        case VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT_EXT:
+            return "VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT_EXT";
+        default:
+            return "Unhandled VkDebugReportObjectTypeEXT";
+    }
+}
+
+static inline const char* string_VkDebugReportFlagBitsEXT(VkDebugReportFlagBitsEXT input_value)
+{
+    switch ((VkDebugReportFlagBitsEXT)input_value)
+    {
+        case VK_DEBUG_REPORT_DEBUG_BIT_EXT:
+            return "VK_DEBUG_REPORT_DEBUG_BIT_EXT";
+        case VK_DEBUG_REPORT_ERROR_BIT_EXT:
+            return "VK_DEBUG_REPORT_ERROR_BIT_EXT";
+        case VK_DEBUG_REPORT_INFORMATION_BIT_EXT:
+            return "VK_DEBUG_REPORT_INFORMATION_BIT_EXT";
+        case VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT:
+            return "VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT";
+        case VK_DEBUG_REPORT_WARNING_BIT_EXT:
+            return "VK_DEBUG_REPORT_WARNING_BIT_EXT";
+        default:
+            return "Unhandled VkDebugReportFlagBitsEXT";
+    }
+}
+
+static inline std::string string_VkDebugReportFlagsEXT(VkDebugReportFlagsEXT input_value)
+{
+    std::string ret;
+    int index = 0;
+    while(input_value) {
+        if (input_value & 1) {
+            if( !ret.empty()) ret.append("|");
+            ret.append(string_VkDebugReportFlagBitsEXT(static_cast<VkDebugReportFlagBitsEXT>(1 << index)));
+        }
+        ++index;
+        input_value >>= 1;
+    }
+    if( ret.empty()) ret.append(string_VkDebugReportFlagBitsEXT(static_cast<VkDebugReportFlagBitsEXT>(0)));
+    return ret;
+}
+
+static inline const char* string_VkRasterizationOrderAMD(VkRasterizationOrderAMD input_value)
+{
+    switch ((VkRasterizationOrderAMD)input_value)
+    {
+        case VK_RASTERIZATION_ORDER_RELAXED_AMD:
+            return "VK_RASTERIZATION_ORDER_RELAXED_AMD";
+        case VK_RASTERIZATION_ORDER_STRICT_AMD:
+            return "VK_RASTERIZATION_ORDER_STRICT_AMD";
+        default:
+            return "Unhandled VkRasterizationOrderAMD";
+    }
+}
+
+static inline const char* string_VkShaderInfoTypeAMD(VkShaderInfoTypeAMD input_value)
+{
+    switch ((VkShaderInfoTypeAMD)input_value)
+    {
+        case VK_SHADER_INFO_TYPE_BINARY_AMD:
+            return "VK_SHADER_INFO_TYPE_BINARY_AMD";
+        case VK_SHADER_INFO_TYPE_DISASSEMBLY_AMD:
+            return "VK_SHADER_INFO_TYPE_DISASSEMBLY_AMD";
+        case VK_SHADER_INFO_TYPE_STATISTICS_AMD:
+            return "VK_SHADER_INFO_TYPE_STATISTICS_AMD";
+        default:
+            return "Unhandled VkShaderInfoTypeAMD";
+    }
+}
+
+static inline const char* string_VkExternalMemoryHandleTypeFlagBitsNV(VkExternalMemoryHandleTypeFlagBitsNV input_value)
+{
+    switch ((VkExternalMemoryHandleTypeFlagBitsNV)input_value)
+    {
+        case VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_BIT_NV:
+            return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_BIT_NV";
+        case VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_KMT_BIT_NV:
+            return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_KMT_BIT_NV";
+        case VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT_NV:
+            return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT_NV";
+        case VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_NV:
+            return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_NV";
+        default:
+            return "Unhandled VkExternalMemoryHandleTypeFlagBitsNV";
+    }
+}
+
+static inline std::string string_VkExternalMemoryHandleTypeFlagsNV(VkExternalMemoryHandleTypeFlagsNV input_value)
+{
+    std::string ret;
+    int index = 0;
+    while(input_value) {
+        if (input_value & 1) {
+            if( !ret.empty()) ret.append("|");
+            ret.append(string_VkExternalMemoryHandleTypeFlagBitsNV(static_cast<VkExternalMemoryHandleTypeFlagBitsNV>(1 << index)));
+        }
+        ++index;
+        input_value >>= 1;
+    }
+    if( ret.empty()) ret.append(string_VkExternalMemoryHandleTypeFlagBitsNV(static_cast<VkExternalMemoryHandleTypeFlagBitsNV>(0)));
+    return ret;
+}
+
+static inline const char* string_VkExternalMemoryFeatureFlagBitsNV(VkExternalMemoryFeatureFlagBitsNV input_value)
+{
+    switch ((VkExternalMemoryFeatureFlagBitsNV)input_value)
+    {
+        case VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT_NV:
+            return "VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT_NV";
+        case VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT_NV:
+            return "VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT_NV";
+        case VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT_NV:
+            return "VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT_NV";
+        default:
+            return "Unhandled VkExternalMemoryFeatureFlagBitsNV";
+    }
+}
+
+static inline std::string string_VkExternalMemoryFeatureFlagsNV(VkExternalMemoryFeatureFlagsNV input_value)
+{
+    std::string ret;
+    int index = 0;
+    while(input_value) {
+        if (input_value & 1) {
+            if( !ret.empty()) ret.append("|");
+            ret.append(string_VkExternalMemoryFeatureFlagBitsNV(static_cast<VkExternalMemoryFeatureFlagBitsNV>(1 << index)));
+        }
+        ++index;
+        input_value >>= 1;
+    }
+    if( ret.empty()) ret.append(string_VkExternalMemoryFeatureFlagBitsNV(static_cast<VkExternalMemoryFeatureFlagBitsNV>(0)));
+    return ret;
+}
+
+static inline const char* string_VkValidationCheckEXT(VkValidationCheckEXT input_value)
+{
+    switch ((VkValidationCheckEXT)input_value)
+    {
+        case VK_VALIDATION_CHECK_ALL_EXT:
+            return "VK_VALIDATION_CHECK_ALL_EXT";
+        case VK_VALIDATION_CHECK_SHADERS_EXT:
+            return "VK_VALIDATION_CHECK_SHADERS_EXT";
+        default:
+            return "Unhandled VkValidationCheckEXT";
+    }
+}
+
+static inline const char* string_VkConditionalRenderingFlagBitsEXT(VkConditionalRenderingFlagBitsEXT input_value)
+{
+    switch ((VkConditionalRenderingFlagBitsEXT)input_value)
+    {
+        case VK_CONDITIONAL_RENDERING_INVERTED_BIT_EXT:
+            return "VK_CONDITIONAL_RENDERING_INVERTED_BIT_EXT";
+        default:
+            return "Unhandled VkConditionalRenderingFlagBitsEXT";
+    }
+}
+
+static inline std::string string_VkConditionalRenderingFlagsEXT(VkConditionalRenderingFlagsEXT input_value)
+{
+    std::string ret;
+    int index = 0;
+    while(input_value) {
+        if (input_value & 1) {
+            if( !ret.empty()) ret.append("|");
+            ret.append(string_VkConditionalRenderingFlagBitsEXT(static_cast<VkConditionalRenderingFlagBitsEXT>(1 << index)));
+        }
+        ++index;
+        input_value >>= 1;
+    }
+    if( ret.empty()) ret.append(string_VkConditionalRenderingFlagBitsEXT(static_cast<VkConditionalRenderingFlagBitsEXT>(0)));
+    return ret;
+}
+
+static inline const char* string_VkIndirectCommandsLayoutUsageFlagBitsNVX(VkIndirectCommandsLayoutUsageFlagBitsNVX input_value)
+{
+    switch ((VkIndirectCommandsLayoutUsageFlagBitsNVX)input_value)
+    {
+        case VK_INDIRECT_COMMANDS_LAYOUT_USAGE_EMPTY_EXECUTIONS_BIT_NVX:
+            return "VK_INDIRECT_COMMANDS_LAYOUT_USAGE_EMPTY_EXECUTIONS_BIT_NVX";
+        case VK_INDIRECT_COMMANDS_LAYOUT_USAGE_INDEXED_SEQUENCES_BIT_NVX:
+            return "VK_INDIRECT_COMMANDS_LAYOUT_USAGE_INDEXED_SEQUENCES_BIT_NVX";
+        case VK_INDIRECT_COMMANDS_LAYOUT_USAGE_SPARSE_SEQUENCES_BIT_NVX:
+            return "VK_INDIRECT_COMMANDS_LAYOUT_USAGE_SPARSE_SEQUENCES_BIT_NVX";
+        case VK_INDIRECT_COMMANDS_LAYOUT_USAGE_UNORDERED_SEQUENCES_BIT_NVX:
+            return "VK_INDIRECT_COMMANDS_LAYOUT_USAGE_UNORDERED_SEQUENCES_BIT_NVX";
+        default:
+            return "Unhandled VkIndirectCommandsLayoutUsageFlagBitsNVX";
+    }
+}
+
+static inline std::string string_VkIndirectCommandsLayoutUsageFlagsNVX(VkIndirectCommandsLayoutUsageFlagsNVX input_value)
+{
+    std::string ret;
+    int index = 0;
+    while(input_value) {
+        if (input_value & 1) {
+            if( !ret.empty()) ret.append("|");
+            ret.append(string_VkIndirectCommandsLayoutUsageFlagBitsNVX(static_cast<VkIndirectCommandsLayoutUsageFlagBitsNVX>(1 << index)));
+        }
+        ++index;
+        input_value >>= 1;
+    }
+    if( ret.empty()) ret.append(string_VkIndirectCommandsLayoutUsageFlagBitsNVX(static_cast<VkIndirectCommandsLayoutUsageFlagBitsNVX>(0)));
+    return ret;
+}
+
+static inline const char* string_VkObjectEntryUsageFlagBitsNVX(VkObjectEntryUsageFlagBitsNVX input_value)
+{
+    switch ((VkObjectEntryUsageFlagBitsNVX)input_value)
+    {
+        case VK_OBJECT_ENTRY_USAGE_COMPUTE_BIT_NVX:
+            return "VK_OBJECT_ENTRY_USAGE_COMPUTE_BIT_NVX";
+        case VK_OBJECT_ENTRY_USAGE_GRAPHICS_BIT_NVX:
+            return "VK_OBJECT_ENTRY_USAGE_GRAPHICS_BIT_NVX";
+        default:
+            return "Unhandled VkObjectEntryUsageFlagBitsNVX";
+    }
+}
+
+static inline std::string string_VkObjectEntryUsageFlagsNVX(VkObjectEntryUsageFlagsNVX input_value)
+{
+    std::string ret;
+    int index = 0;
+    while(input_value) {
+        if (input_value & 1) {
+            if( !ret.empty()) ret.append("|");
+            ret.append(string_VkObjectEntryUsageFlagBitsNVX(static_cast<VkObjectEntryUsageFlagBitsNVX>(1 << index)));
+        }
+        ++index;
+        input_value >>= 1;
+    }
+    if( ret.empty()) ret.append(string_VkObjectEntryUsageFlagBitsNVX(static_cast<VkObjectEntryUsageFlagBitsNVX>(0)));
+    return ret;
+}
+
+static inline const char* string_VkIndirectCommandsTokenTypeNVX(VkIndirectCommandsTokenTypeNVX input_value)
+{
+    switch ((VkIndirectCommandsTokenTypeNVX)input_value)
+    {
+        case VK_INDIRECT_COMMANDS_TOKEN_TYPE_DESCRIPTOR_SET_NVX:
+            return "VK_INDIRECT_COMMANDS_TOKEN_TYPE_DESCRIPTOR_SET_NVX";
+        case VK_INDIRECT_COMMANDS_TOKEN_TYPE_DISPATCH_NVX:
+            return "VK_INDIRECT_COMMANDS_TOKEN_TYPE_DISPATCH_NVX";
+        case VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_NVX:
+            return "VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_NVX";
+        case VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_NVX:
+            return "VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_NVX";
+        case VK_INDIRECT_COMMANDS_TOKEN_TYPE_INDEX_BUFFER_NVX:
+            return "VK_INDIRECT_COMMANDS_TOKEN_TYPE_INDEX_BUFFER_NVX";
+        case VK_INDIRECT_COMMANDS_TOKEN_TYPE_PIPELINE_NVX:
+            return "VK_INDIRECT_COMMANDS_TOKEN_TYPE_PIPELINE_NVX";
+        case VK_INDIRECT_COMMANDS_TOKEN_TYPE_PUSH_CONSTANT_NVX:
+            return "VK_INDIRECT_COMMANDS_TOKEN_TYPE_PUSH_CONSTANT_NVX";
+        case VK_INDIRECT_COMMANDS_TOKEN_TYPE_VERTEX_BUFFER_NVX:
+            return "VK_INDIRECT_COMMANDS_TOKEN_TYPE_VERTEX_BUFFER_NVX";
+        default:
+            return "Unhandled VkIndirectCommandsTokenTypeNVX";
+    }
+}
+
+static inline const char* string_VkObjectEntryTypeNVX(VkObjectEntryTypeNVX input_value)
+{
+    switch ((VkObjectEntryTypeNVX)input_value)
+    {
+        case VK_OBJECT_ENTRY_TYPE_DESCRIPTOR_SET_NVX:
+            return "VK_OBJECT_ENTRY_TYPE_DESCRIPTOR_SET_NVX";
+        case VK_OBJECT_ENTRY_TYPE_INDEX_BUFFER_NVX:
+            return "VK_OBJECT_ENTRY_TYPE_INDEX_BUFFER_NVX";
+        case VK_OBJECT_ENTRY_TYPE_PIPELINE_NVX:
+            return "VK_OBJECT_ENTRY_TYPE_PIPELINE_NVX";
+        case VK_OBJECT_ENTRY_TYPE_PUSH_CONSTANT_NVX:
+            return "VK_OBJECT_ENTRY_TYPE_PUSH_CONSTANT_NVX";
+        case VK_OBJECT_ENTRY_TYPE_VERTEX_BUFFER_NVX:
+            return "VK_OBJECT_ENTRY_TYPE_VERTEX_BUFFER_NVX";
+        default:
+            return "Unhandled VkObjectEntryTypeNVX";
+    }
+}
+
+static inline const char* string_VkSurfaceCounterFlagBitsEXT(VkSurfaceCounterFlagBitsEXT input_value)
+{
+    switch ((VkSurfaceCounterFlagBitsEXT)input_value)
+    {
+        case VK_SURFACE_COUNTER_VBLANK_EXT:
+            return "VK_SURFACE_COUNTER_VBLANK_EXT";
+        default:
+            return "Unhandled VkSurfaceCounterFlagBitsEXT";
+    }
+}
+
+static inline std::string string_VkSurfaceCounterFlagsEXT(VkSurfaceCounterFlagsEXT input_value)
+{
+    std::string ret;
+    int index = 0;
+    while(input_value) {
+        if (input_value & 1) {
+            if( !ret.empty()) ret.append("|");
+            ret.append(string_VkSurfaceCounterFlagBitsEXT(static_cast<VkSurfaceCounterFlagBitsEXT>(1 << index)));
+        }
+        ++index;
+        input_value >>= 1;
+    }
+    if( ret.empty()) ret.append(string_VkSurfaceCounterFlagBitsEXT(static_cast<VkSurfaceCounterFlagBitsEXT>(0)));
+    return ret;
+}
+
+static inline const char* string_VkDisplayPowerStateEXT(VkDisplayPowerStateEXT input_value)
+{
+    switch ((VkDisplayPowerStateEXT)input_value)
+    {
+        case VK_DISPLAY_POWER_STATE_OFF_EXT:
+            return "VK_DISPLAY_POWER_STATE_OFF_EXT";
+        case VK_DISPLAY_POWER_STATE_ON_EXT:
+            return "VK_DISPLAY_POWER_STATE_ON_EXT";
+        case VK_DISPLAY_POWER_STATE_SUSPEND_EXT:
+            return "VK_DISPLAY_POWER_STATE_SUSPEND_EXT";
+        default:
+            return "Unhandled VkDisplayPowerStateEXT";
+    }
+}
+
+static inline const char* string_VkDeviceEventTypeEXT(VkDeviceEventTypeEXT input_value)
+{
+    switch ((VkDeviceEventTypeEXT)input_value)
+    {
+        case VK_DEVICE_EVENT_TYPE_DISPLAY_HOTPLUG_EXT:
+            return "VK_DEVICE_EVENT_TYPE_DISPLAY_HOTPLUG_EXT";
+        default:
+            return "Unhandled VkDeviceEventTypeEXT";
+    }
+}
+
+static inline const char* string_VkDisplayEventTypeEXT(VkDisplayEventTypeEXT input_value)
+{
+    switch ((VkDisplayEventTypeEXT)input_value)
+    {
+        case VK_DISPLAY_EVENT_TYPE_FIRST_PIXEL_OUT_EXT:
+            return "VK_DISPLAY_EVENT_TYPE_FIRST_PIXEL_OUT_EXT";
+        default:
+            return "Unhandled VkDisplayEventTypeEXT";
+    }
+}
+
+static inline const char* string_VkViewportCoordinateSwizzleNV(VkViewportCoordinateSwizzleNV input_value)
+{
+    switch ((VkViewportCoordinateSwizzleNV)input_value)
+    {
+        case VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_W_NV:
+            return "VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_W_NV";
+        case VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_X_NV:
+            return "VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_X_NV";
+        case VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_Y_NV:
+            return "VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_Y_NV";
+        case VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_Z_NV:
+            return "VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_Z_NV";
+        case VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_W_NV:
+            return "VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_W_NV";
+        case VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_X_NV:
+            return "VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_X_NV";
+        case VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_Y_NV:
+            return "VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_Y_NV";
+        case VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_Z_NV:
+            return "VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_Z_NV";
+        default:
+            return "Unhandled VkViewportCoordinateSwizzleNV";
+    }
+}
+
+static inline const char* string_VkDiscardRectangleModeEXT(VkDiscardRectangleModeEXT input_value)
+{
+    switch ((VkDiscardRectangleModeEXT)input_value)
+    {
+        case VK_DISCARD_RECTANGLE_MODE_EXCLUSIVE_EXT:
+            return "VK_DISCARD_RECTANGLE_MODE_EXCLUSIVE_EXT";
+        case VK_DISCARD_RECTANGLE_MODE_INCLUSIVE_EXT:
+            return "VK_DISCARD_RECTANGLE_MODE_INCLUSIVE_EXT";
+        default:
+            return "Unhandled VkDiscardRectangleModeEXT";
+    }
+}
+
+static inline const char* string_VkConservativeRasterizationModeEXT(VkConservativeRasterizationModeEXT input_value)
+{
+    switch ((VkConservativeRasterizationModeEXT)input_value)
+    {
+        case VK_CONSERVATIVE_RASTERIZATION_MODE_DISABLED_EXT:
+            return "VK_CONSERVATIVE_RASTERIZATION_MODE_DISABLED_EXT";
+        case VK_CONSERVATIVE_RASTERIZATION_MODE_OVERESTIMATE_EXT:
+            return "VK_CONSERVATIVE_RASTERIZATION_MODE_OVERESTIMATE_EXT";
+        case VK_CONSERVATIVE_RASTERIZATION_MODE_UNDERESTIMATE_EXT:
+            return "VK_CONSERVATIVE_RASTERIZATION_MODE_UNDERESTIMATE_EXT";
+        default:
+            return "Unhandled VkConservativeRasterizationModeEXT";
+    }
+}
+
+static inline const char* string_VkDebugUtilsMessageSeverityFlagBitsEXT(VkDebugUtilsMessageSeverityFlagBitsEXT input_value)
+{
+    switch ((VkDebugUtilsMessageSeverityFlagBitsEXT)input_value)
+    {
+        case VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT:
+            return "VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT";
+        case VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT:
+            return "VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT";
+        case VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT:
+            return "VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT";
+        case VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT:
+            return "VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT";
+        default:
+            return "Unhandled VkDebugUtilsMessageSeverityFlagBitsEXT";
+    }
+}
+
+static inline std::string string_VkDebugUtilsMessageSeverityFlagsEXT(VkDebugUtilsMessageSeverityFlagsEXT input_value)
+{
+    std::string ret;
+    int index = 0;
+    while(input_value) {
+        if (input_value & 1) {
+            if( !ret.empty()) ret.append("|");
+            ret.append(string_VkDebugUtilsMessageSeverityFlagBitsEXT(static_cast<VkDebugUtilsMessageSeverityFlagBitsEXT>(1 << index)));
+        }
+        ++index;
+        input_value >>= 1;
+    }
+    if( ret.empty()) ret.append(string_VkDebugUtilsMessageSeverityFlagBitsEXT(static_cast<VkDebugUtilsMessageSeverityFlagBitsEXT>(0)));
+    return ret;
+}
+
+static inline const char* string_VkDebugUtilsMessageTypeFlagBitsEXT(VkDebugUtilsMessageTypeFlagBitsEXT input_value)
+{
+    switch ((VkDebugUtilsMessageTypeFlagBitsEXT)input_value)
+    {
+        case VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT:
+            return "VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT";
+        case VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT:
+            return "VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT";
+        case VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT:
+            return "VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT";
+        default:
+            return "Unhandled VkDebugUtilsMessageTypeFlagBitsEXT";
+    }
+}
+
+static inline std::string string_VkDebugUtilsMessageTypeFlagsEXT(VkDebugUtilsMessageTypeFlagsEXT input_value)
+{
+    std::string ret;
+    int index = 0;
+    while(input_value) {
+        if (input_value & 1) {
+            if( !ret.empty()) ret.append("|");
+            ret.append(string_VkDebugUtilsMessageTypeFlagBitsEXT(static_cast<VkDebugUtilsMessageTypeFlagBitsEXT>(1 << index)));
+        }
+        ++index;
+        input_value >>= 1;
+    }
+    if( ret.empty()) ret.append(string_VkDebugUtilsMessageTypeFlagBitsEXT(static_cast<VkDebugUtilsMessageTypeFlagBitsEXT>(0)));
+    return ret;
+}
+
+static inline const char* string_VkSamplerReductionModeEXT(VkSamplerReductionModeEXT input_value)
+{
+    switch ((VkSamplerReductionModeEXT)input_value)
+    {
+        case VK_SAMPLER_REDUCTION_MODE_MAX_EXT:
+            return "VK_SAMPLER_REDUCTION_MODE_MAX_EXT";
+        case VK_SAMPLER_REDUCTION_MODE_MIN_EXT:
+            return "VK_SAMPLER_REDUCTION_MODE_MIN_EXT";
+        case VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE_EXT:
+            return "VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE_EXT";
+        default:
+            return "Unhandled VkSamplerReductionModeEXT";
+    }
+}
+
+static inline const char* string_VkBlendOverlapEXT(VkBlendOverlapEXT input_value)
+{
+    switch ((VkBlendOverlapEXT)input_value)
+    {
+        case VK_BLEND_OVERLAP_CONJOINT_EXT:
+            return "VK_BLEND_OVERLAP_CONJOINT_EXT";
+        case VK_BLEND_OVERLAP_DISJOINT_EXT:
+            return "VK_BLEND_OVERLAP_DISJOINT_EXT";
+        case VK_BLEND_OVERLAP_UNCORRELATED_EXT:
+            return "VK_BLEND_OVERLAP_UNCORRELATED_EXT";
+        default:
+            return "Unhandled VkBlendOverlapEXT";
+    }
+}
+
+static inline const char* string_VkCoverageModulationModeNV(VkCoverageModulationModeNV input_value)
+{
+    switch ((VkCoverageModulationModeNV)input_value)
+    {
+        case VK_COVERAGE_MODULATION_MODE_ALPHA_NV:
+            return "VK_COVERAGE_MODULATION_MODE_ALPHA_NV";
+        case VK_COVERAGE_MODULATION_MODE_NONE_NV:
+            return "VK_COVERAGE_MODULATION_MODE_NONE_NV";
+        case VK_COVERAGE_MODULATION_MODE_RGBA_NV:
+            return "VK_COVERAGE_MODULATION_MODE_RGBA_NV";
+        case VK_COVERAGE_MODULATION_MODE_RGB_NV:
+            return "VK_COVERAGE_MODULATION_MODE_RGB_NV";
+        default:
+            return "Unhandled VkCoverageModulationModeNV";
+    }
+}
+
+static inline const char* string_VkValidationCacheHeaderVersionEXT(VkValidationCacheHeaderVersionEXT input_value)
+{
+    switch ((VkValidationCacheHeaderVersionEXT)input_value)
+    {
+        case VK_VALIDATION_CACHE_HEADER_VERSION_ONE_EXT:
+            return "VK_VALIDATION_CACHE_HEADER_VERSION_ONE_EXT";
+        default:
+            return "Unhandled VkValidationCacheHeaderVersionEXT";
+    }
+}
+
+static inline const char* string_VkDescriptorBindingFlagBitsEXT(VkDescriptorBindingFlagBitsEXT input_value)
+{
+    switch ((VkDescriptorBindingFlagBitsEXT)input_value)
+    {
+        case VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT_EXT:
+            return "VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT_EXT";
+        case VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT:
+            return "VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT";
+        case VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT_EXT:
+            return "VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT_EXT";
+        case VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT_EXT:
+            return "VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT_EXT";
+        default:
+            return "Unhandled VkDescriptorBindingFlagBitsEXT";
+    }
+}
+
+static inline std::string string_VkDescriptorBindingFlagsEXT(VkDescriptorBindingFlagsEXT input_value)
+{
+    std::string ret;
+    int index = 0;
+    while(input_value) {
+        if (input_value & 1) {
+            if( !ret.empty()) ret.append("|");
+            ret.append(string_VkDescriptorBindingFlagBitsEXT(static_cast<VkDescriptorBindingFlagBitsEXT>(1 << index)));
+        }
+        ++index;
+        input_value >>= 1;
+    }
+    if( ret.empty()) ret.append(string_VkDescriptorBindingFlagBitsEXT(static_cast<VkDescriptorBindingFlagBitsEXT>(0)));
+    return ret;
+}
+
+static inline const char* string_VkShadingRatePaletteEntryNV(VkShadingRatePaletteEntryNV input_value)
+{
+    switch ((VkShadingRatePaletteEntryNV)input_value)
+    {
+        case VK_SHADING_RATE_PALETTE_ENTRY_16_INVOCATIONS_PER_PIXEL_NV:
+            return "VK_SHADING_RATE_PALETTE_ENTRY_16_INVOCATIONS_PER_PIXEL_NV";
+        case VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_1X2_PIXELS_NV:
+            return "VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_1X2_PIXELS_NV";
+        case VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X1_PIXELS_NV:
+            return "VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X1_PIXELS_NV";
+        case VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X2_PIXELS_NV:
+            return "VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X2_PIXELS_NV";
+        case VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X4_PIXELS_NV:
+            return "VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X4_PIXELS_NV";
+        case VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_4X2_PIXELS_NV:
+            return "VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_4X2_PIXELS_NV";
+        case VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_4X4_PIXELS_NV:
+            return "VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_4X4_PIXELS_NV";
+        case VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_PIXEL_NV:
+            return "VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_PIXEL_NV";
+        case VK_SHADING_RATE_PALETTE_ENTRY_2_INVOCATIONS_PER_PIXEL_NV:
+            return "VK_SHADING_RATE_PALETTE_ENTRY_2_INVOCATIONS_PER_PIXEL_NV";
+        case VK_SHADING_RATE_PALETTE_ENTRY_4_INVOCATIONS_PER_PIXEL_NV:
+            return "VK_SHADING_RATE_PALETTE_ENTRY_4_INVOCATIONS_PER_PIXEL_NV";
+        case VK_SHADING_RATE_PALETTE_ENTRY_8_INVOCATIONS_PER_PIXEL_NV:
+            return "VK_SHADING_RATE_PALETTE_ENTRY_8_INVOCATIONS_PER_PIXEL_NV";
+        case VK_SHADING_RATE_PALETTE_ENTRY_NO_INVOCATIONS_NV:
+            return "VK_SHADING_RATE_PALETTE_ENTRY_NO_INVOCATIONS_NV";
+        default:
+            return "Unhandled VkShadingRatePaletteEntryNV";
+    }
+}
+
+static inline const char* string_VkCoarseSampleOrderTypeNV(VkCoarseSampleOrderTypeNV input_value)
+{
+    switch ((VkCoarseSampleOrderTypeNV)input_value)
+    {
+        case VK_COARSE_SAMPLE_ORDER_TYPE_CUSTOM_NV:
+            return "VK_COARSE_SAMPLE_ORDER_TYPE_CUSTOM_NV";
+        case VK_COARSE_SAMPLE_ORDER_TYPE_DEFAULT_NV:
+            return "VK_COARSE_SAMPLE_ORDER_TYPE_DEFAULT_NV";
+        case VK_COARSE_SAMPLE_ORDER_TYPE_PIXEL_MAJOR_NV:
+            return "VK_COARSE_SAMPLE_ORDER_TYPE_PIXEL_MAJOR_NV";
+        case VK_COARSE_SAMPLE_ORDER_TYPE_SAMPLE_MAJOR_NV:
+            return "VK_COARSE_SAMPLE_ORDER_TYPE_SAMPLE_MAJOR_NV";
+        default:
+            return "Unhandled VkCoarseSampleOrderTypeNV";
+    }
+}
+
+static inline const char* string_VkAccelerationStructureTypeNV(VkAccelerationStructureTypeNV input_value)
+{
+    switch ((VkAccelerationStructureTypeNV)input_value)
+    {
+        case VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_NV:
+            return "VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_NV";
+        case VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_NV:
+            return "VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_NV";
+        default:
+            return "Unhandled VkAccelerationStructureTypeNV";
+    }
+}
+
+static inline const char* string_VkRayTracingShaderGroupTypeNV(VkRayTracingShaderGroupTypeNV input_value)
+{
+    switch ((VkRayTracingShaderGroupTypeNV)input_value)
+    {
+        case VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_NV:
+            return "VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_NV";
+        case VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_NV:
+            return "VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_NV";
+        case VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_NV:
+            return "VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_NV";
+        default:
+            return "Unhandled VkRayTracingShaderGroupTypeNV";
+    }
+}
+
+static inline const char* string_VkGeometryTypeNV(VkGeometryTypeNV input_value)
+{
+    switch ((VkGeometryTypeNV)input_value)
+    {
+        case VK_GEOMETRY_TYPE_AABBS_NV:
+            return "VK_GEOMETRY_TYPE_AABBS_NV";
+        case VK_GEOMETRY_TYPE_TRIANGLES_NV:
+            return "VK_GEOMETRY_TYPE_TRIANGLES_NV";
+        default:
+            return "Unhandled VkGeometryTypeNV";
+    }
+}
+
+static inline const char* string_VkGeometryFlagBitsNV(VkGeometryFlagBitsNV input_value)
+{
+    switch ((VkGeometryFlagBitsNV)input_value)
+    {
+        case VK_GEOMETRY_NO_DUPLICATE_ANY_HIT_INVOCATION_BIT_NV:
+            return "VK_GEOMETRY_NO_DUPLICATE_ANY_HIT_INVOCATION_BIT_NV";
+        case VK_GEOMETRY_OPAQUE_BIT_NV:
+            return "VK_GEOMETRY_OPAQUE_BIT_NV";
+        default:
+            return "Unhandled VkGeometryFlagBitsNV";
+    }
+}
+
+static inline std::string string_VkGeometryFlagsNV(VkGeometryFlagsNV input_value)
+{
+    std::string ret;
+    int index = 0;
+    while(input_value) {
+        if (input_value & 1) {
+            if( !ret.empty()) ret.append("|");
+            ret.append(string_VkGeometryFlagBitsNV(static_cast<VkGeometryFlagBitsNV>(1 << index)));
+        }
+        ++index;
+        input_value >>= 1;
+    }
+    if( ret.empty()) ret.append(string_VkGeometryFlagBitsNV(static_cast<VkGeometryFlagBitsNV>(0)));
+    return ret;
+}
+
+static inline const char* string_VkGeometryInstanceFlagBitsNV(VkGeometryInstanceFlagBitsNV input_value)
+{
+    switch ((VkGeometryInstanceFlagBitsNV)input_value)
+    {
+        case VK_GEOMETRY_INSTANCE_FORCE_NO_OPAQUE_BIT_NV:
+            return "VK_GEOMETRY_INSTANCE_FORCE_NO_OPAQUE_BIT_NV";
+        case VK_GEOMETRY_INSTANCE_FORCE_OPAQUE_BIT_NV:
+            return "VK_GEOMETRY_INSTANCE_FORCE_OPAQUE_BIT_NV";
+        case VK_GEOMETRY_INSTANCE_TRIANGLE_CULL_DISABLE_BIT_NV:
+            return "VK_GEOMETRY_INSTANCE_TRIANGLE_CULL_DISABLE_BIT_NV";
+        case VK_GEOMETRY_INSTANCE_TRIANGLE_FRONT_COUNTERCLOCKWISE_BIT_NV:
+            return "VK_GEOMETRY_INSTANCE_TRIANGLE_FRONT_COUNTERCLOCKWISE_BIT_NV";
+        default:
+            return "Unhandled VkGeometryInstanceFlagBitsNV";
+    }
+}
+
+static inline std::string string_VkGeometryInstanceFlagsNV(VkGeometryInstanceFlagsNV input_value)
+{
+    std::string ret;
+    int index = 0;
+    while(input_value) {
+        if (input_value & 1) {
+            if( !ret.empty()) ret.append("|");
+            ret.append(string_VkGeometryInstanceFlagBitsNV(static_cast<VkGeometryInstanceFlagBitsNV>(1 << index)));
+        }
+        ++index;
+        input_value >>= 1;
+    }
+    if( ret.empty()) ret.append(string_VkGeometryInstanceFlagBitsNV(static_cast<VkGeometryInstanceFlagBitsNV>(0)));
+    return ret;
+}
+
+static inline const char* string_VkBuildAccelerationStructureFlagBitsNV(VkBuildAccelerationStructureFlagBitsNV input_value)
+{
+    switch ((VkBuildAccelerationStructureFlagBitsNV)input_value)
+    {
+        case VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_NV:
+            return "VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_NV";
+        case VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_NV:
+            return "VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_NV";
+        case VK_BUILD_ACCELERATION_STRUCTURE_LOW_MEMORY_BIT_NV:
+            return "VK_BUILD_ACCELERATION_STRUCTURE_LOW_MEMORY_BIT_NV";
+        case VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_BUILD_BIT_NV:
+            return "VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_BUILD_BIT_NV";
+        case VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_TRACE_BIT_NV:
+            return "VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_TRACE_BIT_NV";
+        default:
+            return "Unhandled VkBuildAccelerationStructureFlagBitsNV";
+    }
+}
+
+static inline std::string string_VkBuildAccelerationStructureFlagsNV(VkBuildAccelerationStructureFlagsNV input_value)
+{
+    std::string ret;
+    int index = 0;
+    while(input_value) {
+        if (input_value & 1) {
+            if( !ret.empty()) ret.append("|");
+            ret.append(string_VkBuildAccelerationStructureFlagBitsNV(static_cast<VkBuildAccelerationStructureFlagBitsNV>(1 << index)));
+        }
+        ++index;
+        input_value >>= 1;
+    }
+    if( ret.empty()) ret.append(string_VkBuildAccelerationStructureFlagBitsNV(static_cast<VkBuildAccelerationStructureFlagBitsNV>(0)));
+    return ret;
+}
+
+static inline const char* string_VkCopyAccelerationStructureModeNV(VkCopyAccelerationStructureModeNV input_value)
+{
+    switch ((VkCopyAccelerationStructureModeNV)input_value)
+    {
+        case VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_NV:
+            return "VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_NV";
+        case VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_NV:
+            return "VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_NV";
+        default:
+            return "Unhandled VkCopyAccelerationStructureModeNV";
+    }
+}
+
+static inline const char* string_VkAccelerationStructureMemoryRequirementsTypeNV(VkAccelerationStructureMemoryRequirementsTypeNV input_value)
+{
+    switch ((VkAccelerationStructureMemoryRequirementsTypeNV)input_value)
+    {
+        case VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV:
+            return "VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV";
+        case VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV:
+            return "VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV";
+        case VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV:
+            return "VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV";
+        default:
+            return "Unhandled VkAccelerationStructureMemoryRequirementsTypeNV";
+    }
+}
+
+static inline const char* string_VkQueueGlobalPriorityEXT(VkQueueGlobalPriorityEXT input_value)
+{
+    switch ((VkQueueGlobalPriorityEXT)input_value)
+    {
+        case VK_QUEUE_GLOBAL_PRIORITY_HIGH_EXT:
+            return "VK_QUEUE_GLOBAL_PRIORITY_HIGH_EXT";
+        case VK_QUEUE_GLOBAL_PRIORITY_LOW_EXT:
+            return "VK_QUEUE_GLOBAL_PRIORITY_LOW_EXT";
+        case VK_QUEUE_GLOBAL_PRIORITY_MEDIUM_EXT:
+            return "VK_QUEUE_GLOBAL_PRIORITY_MEDIUM_EXT";
+        case VK_QUEUE_GLOBAL_PRIORITY_REALTIME_EXT:
+            return "VK_QUEUE_GLOBAL_PRIORITY_REALTIME_EXT";
+        default:
+            return "Unhandled VkQueueGlobalPriorityEXT";
+    }
+}
+
+static inline const char* string_VkTimeDomainEXT(VkTimeDomainEXT input_value)
+{
+    switch ((VkTimeDomainEXT)input_value)
+    {
+        case VK_TIME_DOMAIN_CLOCK_MONOTONIC_EXT:
+            return "VK_TIME_DOMAIN_CLOCK_MONOTONIC_EXT";
+        case VK_TIME_DOMAIN_CLOCK_MONOTONIC_RAW_EXT:
+            return "VK_TIME_DOMAIN_CLOCK_MONOTONIC_RAW_EXT";
+        case VK_TIME_DOMAIN_DEVICE_EXT:
+            return "VK_TIME_DOMAIN_DEVICE_EXT";
+        case VK_TIME_DOMAIN_QUERY_PERFORMANCE_COUNTER_EXT:
+            return "VK_TIME_DOMAIN_QUERY_PERFORMANCE_COUNTER_EXT";
+        default:
+            return "Unhandled VkTimeDomainEXT";
+    }
+}
+
+static inline const char* string_VkMemoryOverallocationBehaviorAMD(VkMemoryOverallocationBehaviorAMD input_value)
+{
+    switch ((VkMemoryOverallocationBehaviorAMD)input_value)
+    {
+        case VK_MEMORY_OVERALLOCATION_BEHAVIOR_ALLOWED_AMD:
+            return "VK_MEMORY_OVERALLOCATION_BEHAVIOR_ALLOWED_AMD";
+        case VK_MEMORY_OVERALLOCATION_BEHAVIOR_DEFAULT_AMD:
+            return "VK_MEMORY_OVERALLOCATION_BEHAVIOR_DEFAULT_AMD";
+        case VK_MEMORY_OVERALLOCATION_BEHAVIOR_DISALLOWED_AMD:
+            return "VK_MEMORY_OVERALLOCATION_BEHAVIOR_DISALLOWED_AMD";
+        default:
+            return "Unhandled VkMemoryOverallocationBehaviorAMD";
+    }
+}
+
+static inline const char* string_VkPipelineCreationFeedbackFlagBitsEXT(VkPipelineCreationFeedbackFlagBitsEXT input_value)
+{
+    switch ((VkPipelineCreationFeedbackFlagBitsEXT)input_value)
+    {
+        case VK_PIPELINE_CREATION_FEEDBACK_APPLICATION_PIPELINE_CACHE_HIT_BIT_EXT:
+            return "VK_PIPELINE_CREATION_FEEDBACK_APPLICATION_PIPELINE_CACHE_HIT_BIT_EXT";
+        case VK_PIPELINE_CREATION_FEEDBACK_BASE_PIPELINE_ACCELERATION_BIT_EXT:
+            return "VK_PIPELINE_CREATION_FEEDBACK_BASE_PIPELINE_ACCELERATION_BIT_EXT";
+        case VK_PIPELINE_CREATION_FEEDBACK_VALID_BIT_EXT:
+            return "VK_PIPELINE_CREATION_FEEDBACK_VALID_BIT_EXT";
+        default:
+            return "Unhandled VkPipelineCreationFeedbackFlagBitsEXT";
+    }
+}
+
+static inline std::string string_VkPipelineCreationFeedbackFlagsEXT(VkPipelineCreationFeedbackFlagsEXT input_value)
+{
+    std::string ret;
+    int index = 0;
+    while(input_value) {
+        if (input_value & 1) {
+            if( !ret.empty()) ret.append("|");
+            ret.append(string_VkPipelineCreationFeedbackFlagBitsEXT(static_cast<VkPipelineCreationFeedbackFlagBitsEXT>(1 << index)));
+        }
+        ++index;
+        input_value >>= 1;
+    }
+    if( ret.empty()) ret.append(string_VkPipelineCreationFeedbackFlagBitsEXT(static_cast<VkPipelineCreationFeedbackFlagBitsEXT>(0)));
+    return ret;
+}
+
+static inline const char* string_VkPerformanceConfigurationTypeINTEL(VkPerformanceConfigurationTypeINTEL input_value)
+{
+    switch ((VkPerformanceConfigurationTypeINTEL)input_value)
+    {
+        case VK_PERFORMANCE_CONFIGURATION_TYPE_COMMAND_QUEUE_METRICS_DISCOVERY_ACTIVATED_INTEL:
+            return "VK_PERFORMANCE_CONFIGURATION_TYPE_COMMAND_QUEUE_METRICS_DISCOVERY_ACTIVATED_INTEL";
+        default:
+            return "Unhandled VkPerformanceConfigurationTypeINTEL";
+    }
+}
+
+static inline const char* string_VkQueryPoolSamplingModeINTEL(VkQueryPoolSamplingModeINTEL input_value)
+{
+    switch ((VkQueryPoolSamplingModeINTEL)input_value)
+    {
+        case VK_QUERY_POOL_SAMPLING_MODE_MANUAL_INTEL:
+            return "VK_QUERY_POOL_SAMPLING_MODE_MANUAL_INTEL";
+        default:
+            return "Unhandled VkQueryPoolSamplingModeINTEL";
+    }
+}
+
+static inline const char* string_VkPerformanceOverrideTypeINTEL(VkPerformanceOverrideTypeINTEL input_value)
+{
+    switch ((VkPerformanceOverrideTypeINTEL)input_value)
+    {
+        case VK_PERFORMANCE_OVERRIDE_TYPE_FLUSH_GPU_CACHES_INTEL:
+            return "VK_PERFORMANCE_OVERRIDE_TYPE_FLUSH_GPU_CACHES_INTEL";
+        case VK_PERFORMANCE_OVERRIDE_TYPE_NULL_HARDWARE_INTEL:
+            return "VK_PERFORMANCE_OVERRIDE_TYPE_NULL_HARDWARE_INTEL";
+        default:
+            return "Unhandled VkPerformanceOverrideTypeINTEL";
+    }
+}
+
+static inline const char* string_VkPerformanceParameterTypeINTEL(VkPerformanceParameterTypeINTEL input_value)
+{
+    switch ((VkPerformanceParameterTypeINTEL)input_value)
+    {
+        case VK_PERFORMANCE_PARAMETER_TYPE_HW_COUNTERS_SUPPORTED_INTEL:
+            return "VK_PERFORMANCE_PARAMETER_TYPE_HW_COUNTERS_SUPPORTED_INTEL";
+        case VK_PERFORMANCE_PARAMETER_TYPE_STREAM_MARKER_VALID_BITS_INTEL:
+            return "VK_PERFORMANCE_PARAMETER_TYPE_STREAM_MARKER_VALID_BITS_INTEL";
+        default:
+            return "Unhandled VkPerformanceParameterTypeINTEL";
+    }
+}
+
+static inline const char* string_VkPerformanceValueTypeINTEL(VkPerformanceValueTypeINTEL input_value)
+{
+    switch ((VkPerformanceValueTypeINTEL)input_value)
+    {
+        case VK_PERFORMANCE_VALUE_TYPE_BOOL_INTEL:
+            return "VK_PERFORMANCE_VALUE_TYPE_BOOL_INTEL";
+        case VK_PERFORMANCE_VALUE_TYPE_FLOAT_INTEL:
+            return "VK_PERFORMANCE_VALUE_TYPE_FLOAT_INTEL";
+        case VK_PERFORMANCE_VALUE_TYPE_STRING_INTEL:
+            return "VK_PERFORMANCE_VALUE_TYPE_STRING_INTEL";
+        case VK_PERFORMANCE_VALUE_TYPE_UINT32_INTEL:
+            return "VK_PERFORMANCE_VALUE_TYPE_UINT32_INTEL";
+        case VK_PERFORMANCE_VALUE_TYPE_UINT64_INTEL:
+            return "VK_PERFORMANCE_VALUE_TYPE_UINT64_INTEL";
+        default:
+            return "Unhandled VkPerformanceValueTypeINTEL";
+    }
+}
+
+static inline const char* string_VkToolPurposeFlagBitsEXT(VkToolPurposeFlagBitsEXT input_value)
+{
+    switch ((VkToolPurposeFlagBitsEXT)input_value)
+    {
+        case VK_TOOL_PURPOSE_ADDITIONAL_FEATURES_BIT_EXT:
+            return "VK_TOOL_PURPOSE_ADDITIONAL_FEATURES_BIT_EXT";
+        case VK_TOOL_PURPOSE_DEBUG_MARKERS_BIT_EXT:
+            return "VK_TOOL_PURPOSE_DEBUG_MARKERS_BIT_EXT";
+        case VK_TOOL_PURPOSE_DEBUG_REPORTING_BIT_EXT:
+            return "VK_TOOL_PURPOSE_DEBUG_REPORTING_BIT_EXT";
+        case VK_TOOL_PURPOSE_MODIFYING_FEATURES_BIT_EXT:
+            return "VK_TOOL_PURPOSE_MODIFYING_FEATURES_BIT_EXT";
+        case VK_TOOL_PURPOSE_PROFILING_BIT_EXT:
+            return "VK_TOOL_PURPOSE_PROFILING_BIT_EXT";
+        case VK_TOOL_PURPOSE_TRACING_BIT_EXT:
+            return "VK_TOOL_PURPOSE_TRACING_BIT_EXT";
+        case VK_TOOL_PURPOSE_VALIDATION_BIT_EXT:
+            return "VK_TOOL_PURPOSE_VALIDATION_BIT_EXT";
+        default:
+            return "Unhandled VkToolPurposeFlagBitsEXT";
+    }
+}
+
+static inline std::string string_VkToolPurposeFlagsEXT(VkToolPurposeFlagsEXT input_value)
+{
+    std::string ret;
+    int index = 0;
+    while(input_value) {
+        if (input_value & 1) {
+            if( !ret.empty()) ret.append("|");
+            ret.append(string_VkToolPurposeFlagBitsEXT(static_cast<VkToolPurposeFlagBitsEXT>(1 << index)));
+        }
+        ++index;
+        input_value >>= 1;
+    }
+    if( ret.empty()) ret.append(string_VkToolPurposeFlagBitsEXT(static_cast<VkToolPurposeFlagBitsEXT>(0)));
+    return ret;
+}
+
+static inline const char* string_VkValidationFeatureEnableEXT(VkValidationFeatureEnableEXT input_value)
+{
+    switch ((VkValidationFeatureEnableEXT)input_value)
+    {
+        case VK_VALIDATION_FEATURE_ENABLE_BEST_PRACTICES_EXT:
+            return "VK_VALIDATION_FEATURE_ENABLE_BEST_PRACTICES_EXT";
+        case VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_EXT:
+            return "VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_EXT";
+        case VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_RESERVE_BINDING_SLOT_EXT:
+            return "VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_RESERVE_BINDING_SLOT_EXT";
+        default:
+            return "Unhandled VkValidationFeatureEnableEXT";
+    }
+}
+
+static inline const char* string_VkValidationFeatureDisableEXT(VkValidationFeatureDisableEXT input_value)
+{
+    switch ((VkValidationFeatureDisableEXT)input_value)
+    {
+        case VK_VALIDATION_FEATURE_DISABLE_ALL_EXT:
+            return "VK_VALIDATION_FEATURE_DISABLE_ALL_EXT";
+        case VK_VALIDATION_FEATURE_DISABLE_API_PARAMETERS_EXT:
+            return "VK_VALIDATION_FEATURE_DISABLE_API_PARAMETERS_EXT";
+        case VK_VALIDATION_FEATURE_DISABLE_CORE_CHECKS_EXT:
+            return "VK_VALIDATION_FEATURE_DISABLE_CORE_CHECKS_EXT";
+        case VK_VALIDATION_FEATURE_DISABLE_OBJECT_LIFETIMES_EXT:
+            return "VK_VALIDATION_FEATURE_DISABLE_OBJECT_LIFETIMES_EXT";
+        case VK_VALIDATION_FEATURE_DISABLE_SHADERS_EXT:
+            return "VK_VALIDATION_FEATURE_DISABLE_SHADERS_EXT";
+        case VK_VALIDATION_FEATURE_DISABLE_THREAD_SAFETY_EXT:
+            return "VK_VALIDATION_FEATURE_DISABLE_THREAD_SAFETY_EXT";
+        case VK_VALIDATION_FEATURE_DISABLE_UNIQUE_HANDLES_EXT:
+            return "VK_VALIDATION_FEATURE_DISABLE_UNIQUE_HANDLES_EXT";
+        default:
+            return "Unhandled VkValidationFeatureDisableEXT";
+    }
+}
+
+static inline const char* string_VkComponentTypeNV(VkComponentTypeNV input_value)
+{
+    switch ((VkComponentTypeNV)input_value)
+    {
+        case VK_COMPONENT_TYPE_FLOAT16_NV:
+            return "VK_COMPONENT_TYPE_FLOAT16_NV";
+        case VK_COMPONENT_TYPE_FLOAT32_NV:
+            return "VK_COMPONENT_TYPE_FLOAT32_NV";
+        case VK_COMPONENT_TYPE_FLOAT64_NV:
+            return "VK_COMPONENT_TYPE_FLOAT64_NV";
+        case VK_COMPONENT_TYPE_SINT16_NV:
+            return "VK_COMPONENT_TYPE_SINT16_NV";
+        case VK_COMPONENT_TYPE_SINT32_NV:
+            return "VK_COMPONENT_TYPE_SINT32_NV";
+        case VK_COMPONENT_TYPE_SINT64_NV:
+            return "VK_COMPONENT_TYPE_SINT64_NV";
+        case VK_COMPONENT_TYPE_SINT8_NV:
+            return "VK_COMPONENT_TYPE_SINT8_NV";
+        case VK_COMPONENT_TYPE_UINT16_NV:
+            return "VK_COMPONENT_TYPE_UINT16_NV";
+        case VK_COMPONENT_TYPE_UINT32_NV:
+            return "VK_COMPONENT_TYPE_UINT32_NV";
+        case VK_COMPONENT_TYPE_UINT64_NV:
+            return "VK_COMPONENT_TYPE_UINT64_NV";
+        case VK_COMPONENT_TYPE_UINT8_NV:
+            return "VK_COMPONENT_TYPE_UINT8_NV";
+        default:
+            return "Unhandled VkComponentTypeNV";
+    }
+}
+
+static inline const char* string_VkScopeNV(VkScopeNV input_value)
+{
+    switch ((VkScopeNV)input_value)
+    {
+        case VK_SCOPE_DEVICE_NV:
+            return "VK_SCOPE_DEVICE_NV";
+        case VK_SCOPE_QUEUE_FAMILY_NV:
+            return "VK_SCOPE_QUEUE_FAMILY_NV";
+        case VK_SCOPE_SUBGROUP_NV:
+            return "VK_SCOPE_SUBGROUP_NV";
+        case VK_SCOPE_WORKGROUP_NV:
+            return "VK_SCOPE_WORKGROUP_NV";
+        default:
+            return "Unhandled VkScopeNV";
+    }
+}
+
+static inline const char* string_VkCoverageReductionModeNV(VkCoverageReductionModeNV input_value)
+{
+    switch ((VkCoverageReductionModeNV)input_value)
+    {
+        case VK_COVERAGE_REDUCTION_MODE_MERGE_NV:
+            return "VK_COVERAGE_REDUCTION_MODE_MERGE_NV";
+        case VK_COVERAGE_REDUCTION_MODE_TRUNCATE_NV:
+            return "VK_COVERAGE_REDUCTION_MODE_TRUNCATE_NV";
+        default:
+            return "Unhandled VkCoverageReductionModeNV";
+    }
+}
+
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+static inline const char* string_VkFullScreenExclusiveEXT(VkFullScreenExclusiveEXT input_value)
+{
+    switch ((VkFullScreenExclusiveEXT)input_value)
+    {
+        case VK_FULL_SCREEN_EXCLUSIVE_ALLOWED_EXT:
+            return "VK_FULL_SCREEN_EXCLUSIVE_ALLOWED_EXT";
+        case VK_FULL_SCREEN_EXCLUSIVE_APPLICATION_CONTROLLED_EXT:
+            return "VK_FULL_SCREEN_EXCLUSIVE_APPLICATION_CONTROLLED_EXT";
+        case VK_FULL_SCREEN_EXCLUSIVE_DEFAULT_EXT:
+            return "VK_FULL_SCREEN_EXCLUSIVE_DEFAULT_EXT";
+        case VK_FULL_SCREEN_EXCLUSIVE_DISALLOWED_EXT:
+            return "VK_FULL_SCREEN_EXCLUSIVE_DISALLOWED_EXT";
+        default:
+            return "Unhandled VkFullScreenExclusiveEXT";
+    }
+}
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+static inline const char* string_VkLineRasterizationModeEXT(VkLineRasterizationModeEXT input_value)
+{
+    switch ((VkLineRasterizationModeEXT)input_value)
+    {
+        case VK_LINE_RASTERIZATION_MODE_BRESENHAM_EXT:
+            return "VK_LINE_RASTERIZATION_MODE_BRESENHAM_EXT";
+        case VK_LINE_RASTERIZATION_MODE_DEFAULT_EXT:
+            return "VK_LINE_RASTERIZATION_MODE_DEFAULT_EXT";
+        case VK_LINE_RASTERIZATION_MODE_RECTANGULAR_EXT:
+            return "VK_LINE_RASTERIZATION_MODE_RECTANGULAR_EXT";
+        case VK_LINE_RASTERIZATION_MODE_RECTANGULAR_SMOOTH_EXT:
+            return "VK_LINE_RASTERIZATION_MODE_RECTANGULAR_SMOOTH_EXT";
+        default:
+            return "Unhandled VkLineRasterizationModeEXT";
+    }
+}
+
+static inline const char * GetPhysDevFeatureString(uint32_t index) {
+    const char * IndexToPhysDevFeatureString[] = {
+        "robustBufferAccess",
+        "fullDrawIndexUint32",
+        "imageCubeArray",
+        "independentBlend",
+        "geometryShader",
+        "tessellationShader",
+        "sampleRateShading",
+        "dualSrcBlend",
+        "logicOp",
+        "multiDrawIndirect",
+        "drawIndirectFirstInstance",
+        "depthClamp",
+        "depthBiasClamp",
+        "fillModeNonSolid",
+        "depthBounds",
+        "wideLines",
+        "largePoints",
+        "alphaToOne",
+        "multiViewport",
+        "samplerAnisotropy",
+        "textureCompressionETC2",
+        "textureCompressionASTC_LDR",
+        "textureCompressionBC",
+        "occlusionQueryPrecise",
+        "pipelineStatisticsQuery",
+        "vertexPipelineStoresAndAtomics",
+        "fragmentStoresAndAtomics",
+        "shaderTessellationAndGeometryPointSize",
+        "shaderImageGatherExtended",
+        "shaderStorageImageExtendedFormats",
+        "shaderStorageImageMultisample",
+        "shaderStorageImageReadWithoutFormat",
+        "shaderStorageImageWriteWithoutFormat",
+        "shaderUniformBufferArrayDynamicIndexing",
+        "shaderSampledImageArrayDynamicIndexing",
+        "shaderStorageBufferArrayDynamicIndexing",
+        "shaderStorageImageArrayDynamicIndexing",
+        "shaderClipDistance",
+        "shaderCullDistance",
+        "shaderFloat64",
+        "shaderInt64",
+        "shaderInt16",
+        "shaderResourceResidency",
+        "shaderResourceMinLod",
+        "sparseBinding",
+        "sparseResidencyBuffer",
+        "sparseResidencyImage2D",
+        "sparseResidencyImage3D",
+        "sparseResidency2Samples",
+        "sparseResidency4Samples",
+        "sparseResidency8Samples",
+        "sparseResidency16Samples",
+        "sparseResidencyAliased",
+        "variableMultisampleRate",
+        "inheritedQueries",
+    };
+
+    return IndexToPhysDevFeatureString[index];
+}
diff --git a/src/third_party/vulkan-validation-layers/src/layers/generated/vk_extension_helper.h b/src/third_party/vulkan-validation-layers/src/layers/generated/vk_extension_helper.h
new file mode 100644
index 0000000..fc28669
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/layers/generated/vk_extension_helper.h
@@ -0,0 +1,1027 @@
+// *** THIS FILE IS GENERATED - DO NOT EDIT ***
+// See helper_file_generator.py for modifications
+
+
+/***************************************************************************
+ *
+ * Copyright (c) 2015-2019 The Khronos Group Inc.
+ * Copyright (c) 2015-2019 Valve Corporation
+ * Copyright (c) 2015-2019 LunarG, Inc.
+ * Copyright (c) 2015-2019 Google Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Mark Lobodzinski <mark@lunarg.com>
+ * Author: Courtney Goeltzenleuchter <courtneygo@google.com>
+ * Author: Tobin Ehlis <tobine@google.com>
+ * Author: Chris Forbes <chrisforbes@google.com>
+ * Author: John Zulauf<jzulauf@lunarg.com>
+ *
+ ****************************************************************************/
+
+
+#ifndef VK_EXTENSION_HELPER_H_
+#define VK_EXTENSION_HELPER_H_
+#include <unordered_set>
+#include <string>
+#include <unordered_map>
+#include <utility>
+#include <set>
+#include <vector>
+#include <cassert>
+
+#include <vulkan/vulkan.h>
+
+#define VK_VERSION_1_1_NAME "VK_VERSION_1_1"
+
+enum ExtEnabled : unsigned char {
+    kNotEnabled,
+    kEnabledByCreateinfo,
+    kEnabledByApiLevel,
+};
+
+static bool IsExtEnabled(ExtEnabled feature) {
+    if (feature == kNotEnabled) return false;
+    return true;
+};
+
+struct InstanceExtensions {
+    ExtEnabled vk_feature_version_1_1{kNotEnabled};
+    ExtEnabled vk_ext_acquire_xlib_display{kNotEnabled};
+    ExtEnabled vk_ext_debug_report{kNotEnabled};
+    ExtEnabled vk_ext_debug_utils{kNotEnabled};
+    ExtEnabled vk_ext_direct_mode_display{kNotEnabled};
+    ExtEnabled vk_ext_display_surface_counter{kNotEnabled};
+    ExtEnabled vk_ext_headless_surface{kNotEnabled};
+    ExtEnabled vk_ext_metal_surface{kNotEnabled};
+    ExtEnabled vk_ext_swapchain_color_space{kNotEnabled};
+    ExtEnabled vk_ext_validation_features{kNotEnabled};
+    ExtEnabled vk_ext_validation_flags{kNotEnabled};
+    ExtEnabled vk_fuchsia_imagepipe_surface{kNotEnabled};
+    ExtEnabled vk_ggp_stream_descriptor_surface{kNotEnabled};
+    ExtEnabled vk_khr_android_surface{kNotEnabled};
+    ExtEnabled vk_khr_device_group_creation{kNotEnabled};
+    ExtEnabled vk_khr_display{kNotEnabled};
+    ExtEnabled vk_khr_external_fence_capabilities{kNotEnabled};
+    ExtEnabled vk_khr_external_memory_capabilities{kNotEnabled};
+    ExtEnabled vk_khr_external_semaphore_capabilities{kNotEnabled};
+    ExtEnabled vk_khr_get_display_properties_2{kNotEnabled};
+    ExtEnabled vk_khr_get_physical_device_properties_2{kNotEnabled};
+    ExtEnabled vk_khr_get_surface_capabilities_2{kNotEnabled};
+    ExtEnabled vk_khr_surface{kNotEnabled};
+    ExtEnabled vk_khr_surface_protected_capabilities{kNotEnabled};
+    ExtEnabled vk_khr_wayland_surface{kNotEnabled};
+    ExtEnabled vk_khr_win32_surface{kNotEnabled};
+    ExtEnabled vk_khr_xcb_surface{kNotEnabled};
+    ExtEnabled vk_khr_xlib_surface{kNotEnabled};
+    ExtEnabled vk_mvk_ios_surface{kNotEnabled};
+    ExtEnabled vk_mvk_macos_surface{kNotEnabled};
+    ExtEnabled vk_nn_vi_surface{kNotEnabled};
+    ExtEnabled vk_nv_external_memory_capabilities{kNotEnabled};
+
+    struct InstanceReq {
+        const ExtEnabled InstanceExtensions::* enabled;
+        const char *name;
+    };
+    typedef std::vector<InstanceReq> InstanceReqVec;
+    struct InstanceInfo {
+       InstanceInfo(ExtEnabled InstanceExtensions::* state_, const InstanceReqVec requires_): state(state_), requires(requires_) {}
+       ExtEnabled InstanceExtensions::* state;
+       InstanceReqVec requires;
+    };
+
+    typedef std::unordered_map<std::string,InstanceInfo> InstanceInfoMap;
+    static const InstanceInfo &get_info(const char *name) {
+        static const InstanceInfoMap info_map = {
+            std::make_pair("VK_VERSION_1_1", InstanceInfo(&InstanceExtensions::vk_feature_version_1_1, {})),
+#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
+            std::make_pair(VK_EXT_ACQUIRE_XLIB_DISPLAY_EXTENSION_NAME, InstanceInfo(&InstanceExtensions::vk_ext_acquire_xlib_display, {{
+                           {&InstanceExtensions::vk_ext_direct_mode_display, VK_EXT_DIRECT_MODE_DISPLAY_EXTENSION_NAME}}})),
+#endif
+            std::make_pair(VK_EXT_DEBUG_REPORT_EXTENSION_NAME, InstanceInfo(&InstanceExtensions::vk_ext_debug_report, {})),
+            std::make_pair(VK_EXT_DEBUG_UTILS_EXTENSION_NAME, InstanceInfo(&InstanceExtensions::vk_ext_debug_utils, {})),
+            std::make_pair(VK_EXT_DIRECT_MODE_DISPLAY_EXTENSION_NAME, InstanceInfo(&InstanceExtensions::vk_ext_direct_mode_display, {{
+                           {&InstanceExtensions::vk_khr_display, VK_KHR_DISPLAY_EXTENSION_NAME}}})),
+            std::make_pair(VK_EXT_DISPLAY_SURFACE_COUNTER_EXTENSION_NAME, InstanceInfo(&InstanceExtensions::vk_ext_display_surface_counter, {{
+                           {&InstanceExtensions::vk_khr_display, VK_KHR_DISPLAY_EXTENSION_NAME}}})),
+            std::make_pair(VK_EXT_HEADLESS_SURFACE_EXTENSION_NAME, InstanceInfo(&InstanceExtensions::vk_ext_headless_surface, {{
+                           {&InstanceExtensions::vk_khr_surface, VK_KHR_SURFACE_EXTENSION_NAME}}})),
+#ifdef VK_USE_PLATFORM_METAL_EXT
+            std::make_pair(VK_EXT_METAL_SURFACE_EXTENSION_NAME, InstanceInfo(&InstanceExtensions::vk_ext_metal_surface, {{
+                           {&InstanceExtensions::vk_khr_surface, VK_KHR_SURFACE_EXTENSION_NAME}}})),
+#endif
+            std::make_pair(VK_EXT_SWAPCHAIN_COLOR_SPACE_EXTENSION_NAME, InstanceInfo(&InstanceExtensions::vk_ext_swapchain_color_space, {{
+                           {&InstanceExtensions::vk_khr_surface, VK_KHR_SURFACE_EXTENSION_NAME}}})),
+            std::make_pair(VK_EXT_VALIDATION_FEATURES_EXTENSION_NAME, InstanceInfo(&InstanceExtensions::vk_ext_validation_features, {})),
+            std::make_pair(VK_EXT_VALIDATION_FLAGS_EXTENSION_NAME, InstanceInfo(&InstanceExtensions::vk_ext_validation_flags, {})),
+#ifdef VK_USE_PLATFORM_FUCHSIA
+            std::make_pair(VK_FUCHSIA_IMAGEPIPE_SURFACE_EXTENSION_NAME, InstanceInfo(&InstanceExtensions::vk_fuchsia_imagepipe_surface, {{
+                           {&InstanceExtensions::vk_khr_surface, VK_KHR_SURFACE_EXTENSION_NAME}}})),
+#endif
+#ifdef VK_USE_PLATFORM_GGP
+            std::make_pair(VK_GGP_STREAM_DESCRIPTOR_SURFACE_EXTENSION_NAME, InstanceInfo(&InstanceExtensions::vk_ggp_stream_descriptor_surface, {{
+                           {&InstanceExtensions::vk_khr_surface, VK_KHR_SURFACE_EXTENSION_NAME}}})),
+#endif
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+            std::make_pair(VK_KHR_ANDROID_SURFACE_EXTENSION_NAME, InstanceInfo(&InstanceExtensions::vk_khr_android_surface, {{
+                           {&InstanceExtensions::vk_khr_surface, VK_KHR_SURFACE_EXTENSION_NAME}}})),
+#endif
+            std::make_pair(VK_KHR_DEVICE_GROUP_CREATION_EXTENSION_NAME, InstanceInfo(&InstanceExtensions::vk_khr_device_group_creation, {})),
+            std::make_pair(VK_KHR_DISPLAY_EXTENSION_NAME, InstanceInfo(&InstanceExtensions::vk_khr_display, {{
+                           {&InstanceExtensions::vk_khr_surface, VK_KHR_SURFACE_EXTENSION_NAME}}})),
+            std::make_pair(VK_KHR_EXTERNAL_FENCE_CAPABILITIES_EXTENSION_NAME, InstanceInfo(&InstanceExtensions::vk_khr_external_fence_capabilities, {{
+                           {&InstanceExtensions::vk_khr_get_physical_device_properties_2, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME}}})),
+            std::make_pair(VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME, InstanceInfo(&InstanceExtensions::vk_khr_external_memory_capabilities, {{
+                           {&InstanceExtensions::vk_khr_get_physical_device_properties_2, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME}}})),
+            std::make_pair(VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_EXTENSION_NAME, InstanceInfo(&InstanceExtensions::vk_khr_external_semaphore_capabilities, {{
+                           {&InstanceExtensions::vk_khr_get_physical_device_properties_2, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME}}})),
+            std::make_pair(VK_KHR_GET_DISPLAY_PROPERTIES_2_EXTENSION_NAME, InstanceInfo(&InstanceExtensions::vk_khr_get_display_properties_2, {{
+                           {&InstanceExtensions::vk_khr_display, VK_KHR_DISPLAY_EXTENSION_NAME}}})),
+            std::make_pair(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME, InstanceInfo(&InstanceExtensions::vk_khr_get_physical_device_properties_2, {})),
+            std::make_pair(VK_KHR_GET_SURFACE_CAPABILITIES_2_EXTENSION_NAME, InstanceInfo(&InstanceExtensions::vk_khr_get_surface_capabilities_2, {{
+                           {&InstanceExtensions::vk_khr_surface, VK_KHR_SURFACE_EXTENSION_NAME}}})),
+            std::make_pair(VK_KHR_SURFACE_EXTENSION_NAME, InstanceInfo(&InstanceExtensions::vk_khr_surface, {})),
+            std::make_pair(VK_KHR_SURFACE_PROTECTED_CAPABILITIES_EXTENSION_NAME, InstanceInfo(&InstanceExtensions::vk_khr_surface_protected_capabilities, {{
+                           {&InstanceExtensions::vk_khr_get_surface_capabilities_2, VK_KHR_GET_SURFACE_CAPABILITIES_2_EXTENSION_NAME},
+                           {&InstanceExtensions::vk_feature_version_1_1, VK_VERSION_1_1_NAME}}})),
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+            std::make_pair(VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME, InstanceInfo(&InstanceExtensions::vk_khr_wayland_surface, {{
+                           {&InstanceExtensions::vk_khr_surface, VK_KHR_SURFACE_EXTENSION_NAME}}})),
+#endif
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+            std::make_pair(VK_KHR_WIN32_SURFACE_EXTENSION_NAME, InstanceInfo(&InstanceExtensions::vk_khr_win32_surface, {{
+                           {&InstanceExtensions::vk_khr_surface, VK_KHR_SURFACE_EXTENSION_NAME}}})),
+#endif
+#ifdef VK_USE_PLATFORM_XCB_KHR
+            std::make_pair(VK_KHR_XCB_SURFACE_EXTENSION_NAME, InstanceInfo(&InstanceExtensions::vk_khr_xcb_surface, {{
+                           {&InstanceExtensions::vk_khr_surface, VK_KHR_SURFACE_EXTENSION_NAME}}})),
+#endif
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+            std::make_pair(VK_KHR_XLIB_SURFACE_EXTENSION_NAME, InstanceInfo(&InstanceExtensions::vk_khr_xlib_surface, {{
+                           {&InstanceExtensions::vk_khr_surface, VK_KHR_SURFACE_EXTENSION_NAME}}})),
+#endif
+#ifdef VK_USE_PLATFORM_IOS_MVK
+            std::make_pair(VK_MVK_IOS_SURFACE_EXTENSION_NAME, InstanceInfo(&InstanceExtensions::vk_mvk_ios_surface, {{
+                           {&InstanceExtensions::vk_khr_surface, VK_KHR_SURFACE_EXTENSION_NAME}}})),
+#endif
+#ifdef VK_USE_PLATFORM_MACOS_MVK
+            std::make_pair(VK_MVK_MACOS_SURFACE_EXTENSION_NAME, InstanceInfo(&InstanceExtensions::vk_mvk_macos_surface, {{
+                           {&InstanceExtensions::vk_khr_surface, VK_KHR_SURFACE_EXTENSION_NAME}}})),
+#endif
+#ifdef VK_USE_PLATFORM_VI_NN
+            std::make_pair(VK_NN_VI_SURFACE_EXTENSION_NAME, InstanceInfo(&InstanceExtensions::vk_nn_vi_surface, {{
+                           {&InstanceExtensions::vk_khr_surface, VK_KHR_SURFACE_EXTENSION_NAME}}})),
+#endif
+            std::make_pair(VK_NV_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME, InstanceInfo(&InstanceExtensions::vk_nv_external_memory_capabilities, {})),
+        };
+
+        static const InstanceInfo empty_info {nullptr, InstanceReqVec()};
+        InstanceInfoMap::const_iterator info = info_map.find(name);
+        if ( info != info_map.cend()) {
+            return info->second;
+        }
+        return empty_info;
+    }
+
+    uint32_t NormalizeApiVersion(uint32_t specified_version) {
+        uint32_t api_version = (specified_version < VK_API_VERSION_1_1) ? VK_API_VERSION_1_0 : VK_API_VERSION_1_1;
+        return api_version;
+    }
+
+    uint32_t InitFromInstanceCreateInfo(uint32_t requested_api_version, const VkInstanceCreateInfo *pCreateInfo) {
+
+        static const std::vector<const char *> V_1_1_promoted_instance_apis = {
+            VK_KHR_DEVICE_GROUP_CREATION_EXTENSION_NAME,
+            VK_KHR_EXTERNAL_FENCE_CAPABILITIES_EXTENSION_NAME,
+            VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME,
+            VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_EXTENSION_NAME,
+            VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME,
+        };
+
+        // Initialize struct data, robust to invalid pCreateInfo
+        uint32_t api_version = NormalizeApiVersion(requested_api_version);
+        if (api_version >= VK_API_VERSION_1_1) {
+            auto info = get_info("VK_VERSION_1_1");
+            if (info.state) this->*(info.state) = kEnabledByCreateinfo;
+            for (auto promoted_ext : V_1_1_promoted_instance_apis) {
+                info = get_info(promoted_ext);
+                assert(info.state);
+                if (info.state) this->*(info.state) = kEnabledByApiLevel;
+            }
+        }
+        // CreateInfo takes precedence over promoted
+        if (pCreateInfo->ppEnabledExtensionNames) {
+            for (uint32_t i = 0; i < pCreateInfo->enabledExtensionCount; i++) {
+                if (!pCreateInfo->ppEnabledExtensionNames[i]) continue;
+                auto info = get_info(pCreateInfo->ppEnabledExtensionNames[i]);
+                if (info.state) this->*(info.state) = kEnabledByCreateinfo;
+            }
+        }
+        return api_version;
+    }
+};
+
+static const std::set<std::string> kInstanceExtensionNames = {
+#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
+    VK_EXT_ACQUIRE_XLIB_DISPLAY_EXTENSION_NAME,
+#endif
+    VK_EXT_DEBUG_REPORT_EXTENSION_NAME,
+    VK_EXT_DEBUG_UTILS_EXTENSION_NAME,
+    VK_EXT_DIRECT_MODE_DISPLAY_EXTENSION_NAME,
+    VK_EXT_DISPLAY_SURFACE_COUNTER_EXTENSION_NAME,
+    VK_EXT_HEADLESS_SURFACE_EXTENSION_NAME,
+#ifdef VK_USE_PLATFORM_METAL_EXT
+    VK_EXT_METAL_SURFACE_EXTENSION_NAME,
+#endif
+    VK_EXT_SWAPCHAIN_COLOR_SPACE_EXTENSION_NAME,
+    VK_EXT_VALIDATION_FEATURES_EXTENSION_NAME,
+    VK_EXT_VALIDATION_FLAGS_EXTENSION_NAME,
+#ifdef VK_USE_PLATFORM_FUCHSIA
+    VK_FUCHSIA_IMAGEPIPE_SURFACE_EXTENSION_NAME,
+#endif
+#ifdef VK_USE_PLATFORM_GGP
+    VK_GGP_STREAM_DESCRIPTOR_SURFACE_EXTENSION_NAME,
+#endif
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+    VK_KHR_ANDROID_SURFACE_EXTENSION_NAME,
+#endif
+    VK_KHR_DEVICE_GROUP_CREATION_EXTENSION_NAME,
+    VK_KHR_DISPLAY_EXTENSION_NAME,
+    VK_KHR_EXTERNAL_FENCE_CAPABILITIES_EXTENSION_NAME,
+    VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME,
+    VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_EXTENSION_NAME,
+    VK_KHR_GET_DISPLAY_PROPERTIES_2_EXTENSION_NAME,
+    VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME,
+    VK_KHR_GET_SURFACE_CAPABILITIES_2_EXTENSION_NAME,
+    VK_KHR_SURFACE_EXTENSION_NAME,
+    VK_KHR_SURFACE_PROTECTED_CAPABILITIES_EXTENSION_NAME,
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+    VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME,
+#endif
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    VK_KHR_WIN32_SURFACE_EXTENSION_NAME,
+#endif
+#ifdef VK_USE_PLATFORM_XCB_KHR
+    VK_KHR_XCB_SURFACE_EXTENSION_NAME,
+#endif
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+    VK_KHR_XLIB_SURFACE_EXTENSION_NAME,
+#endif
+#ifdef VK_USE_PLATFORM_IOS_MVK
+    VK_MVK_IOS_SURFACE_EXTENSION_NAME,
+#endif
+#ifdef VK_USE_PLATFORM_MACOS_MVK
+    VK_MVK_MACOS_SURFACE_EXTENSION_NAME,
+#endif
+#ifdef VK_USE_PLATFORM_VI_NN
+    VK_NN_VI_SURFACE_EXTENSION_NAME,
+#endif
+    VK_NV_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME,
+};
+
+struct DeviceExtensions : public InstanceExtensions {
+    ExtEnabled vk_feature_version_1_1{kNotEnabled};
+    ExtEnabled vk_amd_buffer_marker{kNotEnabled};
+    ExtEnabled vk_amd_device_coherent_memory{kNotEnabled};
+    ExtEnabled vk_amd_display_native_hdr{kNotEnabled};
+    ExtEnabled vk_amd_draw_indirect_count{kNotEnabled};
+    ExtEnabled vk_amd_gcn_shader{kNotEnabled};
+    ExtEnabled vk_amd_gpu_shader_half_float{kNotEnabled};
+    ExtEnabled vk_amd_gpu_shader_int16{kNotEnabled};
+    ExtEnabled vk_amd_memory_overallocation_behavior{kNotEnabled};
+    ExtEnabled vk_amd_mixed_attachment_samples{kNotEnabled};
+    ExtEnabled vk_amd_negative_viewport_height{kNotEnabled};
+    ExtEnabled vk_amd_pipeline_compiler_control{kNotEnabled};
+    ExtEnabled vk_amd_rasterization_order{kNotEnabled};
+    ExtEnabled vk_amd_shader_ballot{kNotEnabled};
+    ExtEnabled vk_amd_shader_core_properties{kNotEnabled};
+    ExtEnabled vk_amd_shader_core_properties_2{kNotEnabled};
+    ExtEnabled vk_amd_shader_explicit_vertex_parameter{kNotEnabled};
+    ExtEnabled vk_amd_shader_fragment_mask{kNotEnabled};
+    ExtEnabled vk_amd_shader_image_load_store_lod{kNotEnabled};
+    ExtEnabled vk_amd_shader_info{kNotEnabled};
+    ExtEnabled vk_amd_shader_trinary_minmax{kNotEnabled};
+    ExtEnabled vk_amd_texture_gather_bias_lod{kNotEnabled};
+    ExtEnabled vk_android_external_memory_android_hardware_buffer{kNotEnabled};
+    ExtEnabled vk_ext_astc_decode_mode{kNotEnabled};
+    ExtEnabled vk_ext_blend_operation_advanced{kNotEnabled};
+    ExtEnabled vk_ext_buffer_device_address{kNotEnabled};
+    ExtEnabled vk_ext_calibrated_timestamps{kNotEnabled};
+    ExtEnabled vk_ext_conditional_rendering{kNotEnabled};
+    ExtEnabled vk_ext_conservative_rasterization{kNotEnabled};
+    ExtEnabled vk_ext_debug_marker{kNotEnabled};
+    ExtEnabled vk_ext_depth_clip_enable{kNotEnabled};
+    ExtEnabled vk_ext_depth_range_unrestricted{kNotEnabled};
+    ExtEnabled vk_ext_descriptor_indexing{kNotEnabled};
+    ExtEnabled vk_ext_discard_rectangles{kNotEnabled};
+    ExtEnabled vk_ext_display_control{kNotEnabled};
+    ExtEnabled vk_ext_external_memory_dma_buf{kNotEnabled};
+    ExtEnabled vk_ext_external_memory_host{kNotEnabled};
+    ExtEnabled vk_ext_filter_cubic{kNotEnabled};
+    ExtEnabled vk_ext_fragment_density_map{kNotEnabled};
+    ExtEnabled vk_ext_fragment_shader_interlock{kNotEnabled};
+    ExtEnabled vk_ext_full_screen_exclusive{kNotEnabled};
+    ExtEnabled vk_ext_global_priority{kNotEnabled};
+    ExtEnabled vk_ext_hdr_metadata{kNotEnabled};
+    ExtEnabled vk_ext_host_query_reset{kNotEnabled};
+    ExtEnabled vk_ext_image_drm_format_modifier{kNotEnabled};
+    ExtEnabled vk_ext_index_type_uint8{kNotEnabled};
+    ExtEnabled vk_ext_inline_uniform_block{kNotEnabled};
+    ExtEnabled vk_ext_line_rasterization{kNotEnabled};
+    ExtEnabled vk_ext_memory_budget{kNotEnabled};
+    ExtEnabled vk_ext_memory_priority{kNotEnabled};
+    ExtEnabled vk_ext_pci_bus_info{kNotEnabled};
+    ExtEnabled vk_ext_pipeline_creation_feedback{kNotEnabled};
+    ExtEnabled vk_ext_post_depth_coverage{kNotEnabled};
+    ExtEnabled vk_ext_queue_family_foreign{kNotEnabled};
+    ExtEnabled vk_ext_sample_locations{kNotEnabled};
+    ExtEnabled vk_ext_sampler_filter_minmax{kNotEnabled};
+    ExtEnabled vk_ext_scalar_block_layout{kNotEnabled};
+    ExtEnabled vk_ext_separate_stencil_usage{kNotEnabled};
+    ExtEnabled vk_ext_shader_demote_to_helper_invocation{kNotEnabled};
+    ExtEnabled vk_ext_shader_stencil_export{kNotEnabled};
+    ExtEnabled vk_ext_shader_subgroup_ballot{kNotEnabled};
+    ExtEnabled vk_ext_shader_subgroup_vote{kNotEnabled};
+    ExtEnabled vk_ext_shader_viewport_index_layer{kNotEnabled};
+    ExtEnabled vk_ext_subgroup_size_control{kNotEnabled};
+    ExtEnabled vk_ext_texel_buffer_alignment{kNotEnabled};
+    ExtEnabled vk_ext_texture_compression_astc_hdr{kNotEnabled};
+    ExtEnabled vk_ext_tooling_info{kNotEnabled};
+    ExtEnabled vk_ext_transform_feedback{kNotEnabled};
+    ExtEnabled vk_ext_validation_cache{kNotEnabled};
+    ExtEnabled vk_ext_vertex_attribute_divisor{kNotEnabled};
+    ExtEnabled vk_ext_ycbcr_image_arrays{kNotEnabled};
+    ExtEnabled vk_ggp_frame_token{kNotEnabled};
+    ExtEnabled vk_google_decorate_string{kNotEnabled};
+    ExtEnabled vk_google_display_timing{kNotEnabled};
+    ExtEnabled vk_google_hlsl_functionality1{kNotEnabled};
+    ExtEnabled vk_google_user_type{kNotEnabled};
+    ExtEnabled vk_img_filter_cubic{kNotEnabled};
+    ExtEnabled vk_img_format_pvrtc{kNotEnabled};
+    ExtEnabled vk_intel_performance_query{kNotEnabled};
+    ExtEnabled vk_intel_shader_integer_functions_2{kNotEnabled};
+    ExtEnabled vk_khr_16bit_storage{kNotEnabled};
+    ExtEnabled vk_khr_8bit_storage{kNotEnabled};
+    ExtEnabled vk_khr_bind_memory_2{kNotEnabled};
+    ExtEnabled vk_khr_buffer_device_address{kNotEnabled};
+    ExtEnabled vk_khr_create_renderpass_2{kNotEnabled};
+    ExtEnabled vk_khr_dedicated_allocation{kNotEnabled};
+    ExtEnabled vk_khr_depth_stencil_resolve{kNotEnabled};
+    ExtEnabled vk_khr_descriptor_update_template{kNotEnabled};
+    ExtEnabled vk_khr_device_group{kNotEnabled};
+    ExtEnabled vk_khr_display_swapchain{kNotEnabled};
+    ExtEnabled vk_khr_draw_indirect_count{kNotEnabled};
+    ExtEnabled vk_khr_driver_properties{kNotEnabled};
+    ExtEnabled vk_khr_external_fence{kNotEnabled};
+    ExtEnabled vk_khr_external_fence_fd{kNotEnabled};
+    ExtEnabled vk_khr_external_fence_win32{kNotEnabled};
+    ExtEnabled vk_khr_external_memory{kNotEnabled};
+    ExtEnabled vk_khr_external_memory_fd{kNotEnabled};
+    ExtEnabled vk_khr_external_memory_win32{kNotEnabled};
+    ExtEnabled vk_khr_external_semaphore{kNotEnabled};
+    ExtEnabled vk_khr_external_semaphore_fd{kNotEnabled};
+    ExtEnabled vk_khr_external_semaphore_win32{kNotEnabled};
+    ExtEnabled vk_khr_get_memory_requirements_2{kNotEnabled};
+    ExtEnabled vk_khr_image_format_list{kNotEnabled};
+    ExtEnabled vk_khr_imageless_framebuffer{kNotEnabled};
+    ExtEnabled vk_khr_incremental_present{kNotEnabled};
+    ExtEnabled vk_khr_maintenance1{kNotEnabled};
+    ExtEnabled vk_khr_maintenance2{kNotEnabled};
+    ExtEnabled vk_khr_maintenance3{kNotEnabled};
+    ExtEnabled vk_khr_multiview{kNotEnabled};
+    ExtEnabled vk_khr_performance_query{kNotEnabled};
+    ExtEnabled vk_khr_pipeline_executable_properties{kNotEnabled};
+    ExtEnabled vk_khr_push_descriptor{kNotEnabled};
+    ExtEnabled vk_khr_relaxed_block_layout{kNotEnabled};
+    ExtEnabled vk_khr_sampler_mirror_clamp_to_edge{kNotEnabled};
+    ExtEnabled vk_khr_sampler_ycbcr_conversion{kNotEnabled};
+    ExtEnabled vk_khr_separate_depth_stencil_layouts{kNotEnabled};
+    ExtEnabled vk_khr_shader_atomic_int64{kNotEnabled};
+    ExtEnabled vk_khr_shader_clock{kNotEnabled};
+    ExtEnabled vk_khr_shader_draw_parameters{kNotEnabled};
+    ExtEnabled vk_khr_shader_float16_int8{kNotEnabled};
+    ExtEnabled vk_khr_shader_float_controls{kNotEnabled};
+    ExtEnabled vk_khr_shader_subgroup_extended_types{kNotEnabled};
+    ExtEnabled vk_khr_shared_presentable_image{kNotEnabled};
+    ExtEnabled vk_khr_spirv_1_4{kNotEnabled};
+    ExtEnabled vk_khr_storage_buffer_storage_class{kNotEnabled};
+    ExtEnabled vk_khr_swapchain{kNotEnabled};
+    ExtEnabled vk_khr_swapchain_mutable_format{kNotEnabled};
+    ExtEnabled vk_khr_timeline_semaphore{kNotEnabled};
+    ExtEnabled vk_khr_uniform_buffer_standard_layout{kNotEnabled};
+    ExtEnabled vk_khr_variable_pointers{kNotEnabled};
+    ExtEnabled vk_khr_vulkan_memory_model{kNotEnabled};
+    ExtEnabled vk_khr_win32_keyed_mutex{kNotEnabled};
+    ExtEnabled vk_nvx_device_generated_commands{kNotEnabled};
+    ExtEnabled vk_nvx_image_view_handle{kNotEnabled};
+    ExtEnabled vk_nvx_multiview_per_view_attributes{kNotEnabled};
+    ExtEnabled vk_nv_clip_space_w_scaling{kNotEnabled};
+    ExtEnabled vk_nv_compute_shader_derivatives{kNotEnabled};
+    ExtEnabled vk_nv_cooperative_matrix{kNotEnabled};
+    ExtEnabled vk_nv_corner_sampled_image{kNotEnabled};
+    ExtEnabled vk_nv_coverage_reduction_mode{kNotEnabled};
+    ExtEnabled vk_nv_dedicated_allocation{kNotEnabled};
+    ExtEnabled vk_nv_dedicated_allocation_image_aliasing{kNotEnabled};
+    ExtEnabled vk_nv_device_diagnostic_checkpoints{kNotEnabled};
+    ExtEnabled vk_nv_external_memory{kNotEnabled};
+    ExtEnabled vk_nv_external_memory_win32{kNotEnabled};
+    ExtEnabled vk_nv_fill_rectangle{kNotEnabled};
+    ExtEnabled vk_nv_fragment_coverage_to_color{kNotEnabled};
+    ExtEnabled vk_nv_fragment_shader_barycentric{kNotEnabled};
+    ExtEnabled vk_nv_framebuffer_mixed_samples{kNotEnabled};
+    ExtEnabled vk_nv_geometry_shader_passthrough{kNotEnabled};
+    ExtEnabled vk_nv_glsl_shader{kNotEnabled};
+    ExtEnabled vk_nv_mesh_shader{kNotEnabled};
+    ExtEnabled vk_nv_ray_tracing{kNotEnabled};
+    ExtEnabled vk_nv_representative_fragment_test{kNotEnabled};
+    ExtEnabled vk_nv_sample_mask_override_coverage{kNotEnabled};
+    ExtEnabled vk_nv_scissor_exclusive{kNotEnabled};
+    ExtEnabled vk_nv_shader_image_footprint{kNotEnabled};
+    ExtEnabled vk_nv_shader_sm_builtins{kNotEnabled};
+    ExtEnabled vk_nv_shader_subgroup_partitioned{kNotEnabled};
+    ExtEnabled vk_nv_shading_rate_image{kNotEnabled};
+    ExtEnabled vk_nv_viewport_array2{kNotEnabled};
+    ExtEnabled vk_nv_viewport_swizzle{kNotEnabled};
+    ExtEnabled vk_nv_win32_keyed_mutex{kNotEnabled};
+
+    struct DeviceReq {
+        const ExtEnabled DeviceExtensions::* enabled;
+        const char *name;
+    };
+    typedef std::vector<DeviceReq> DeviceReqVec;
+    struct DeviceInfo {
+       DeviceInfo(ExtEnabled DeviceExtensions::* state_, const DeviceReqVec requires_): state(state_), requires(requires_) {}
+       ExtEnabled DeviceExtensions::* state;
+       DeviceReqVec requires;
+    };
+
+    typedef std::unordered_map<std::string,DeviceInfo> DeviceInfoMap;
+    static const DeviceInfo &get_info(const char *name) {
+        static const DeviceInfoMap info_map = {
+            std::make_pair("VK_VERSION_1_1", DeviceInfo(&DeviceExtensions::vk_feature_version_1_1, {})),
+            std::make_pair(VK_AMD_BUFFER_MARKER_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_amd_buffer_marker, {})),
+            std::make_pair(VK_AMD_DEVICE_COHERENT_MEMORY_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_amd_device_coherent_memory, {})),
+            std::make_pair(VK_AMD_DISPLAY_NATIVE_HDR_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_amd_display_native_hdr, {{
+                           {&DeviceExtensions::vk_khr_get_physical_device_properties_2, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME},
+                           {&DeviceExtensions::vk_khr_get_surface_capabilities_2, VK_KHR_GET_SURFACE_CAPABILITIES_2_EXTENSION_NAME},
+                           {&DeviceExtensions::vk_khr_swapchain, VK_KHR_SWAPCHAIN_EXTENSION_NAME}}})),
+            std::make_pair(VK_AMD_DRAW_INDIRECT_COUNT_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_amd_draw_indirect_count, {})),
+            std::make_pair(VK_AMD_GCN_SHADER_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_amd_gcn_shader, {})),
+            std::make_pair(VK_AMD_GPU_SHADER_HALF_FLOAT_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_amd_gpu_shader_half_float, {})),
+            std::make_pair(VK_AMD_GPU_SHADER_INT16_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_amd_gpu_shader_int16, {})),
+            std::make_pair(VK_AMD_MEMORY_OVERALLOCATION_BEHAVIOR_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_amd_memory_overallocation_behavior, {})),
+            std::make_pair(VK_AMD_MIXED_ATTACHMENT_SAMPLES_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_amd_mixed_attachment_samples, {})),
+            std::make_pair(VK_AMD_NEGATIVE_VIEWPORT_HEIGHT_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_amd_negative_viewport_height, {})),
+            std::make_pair(VK_AMD_PIPELINE_COMPILER_CONTROL_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_amd_pipeline_compiler_control, {})),
+            std::make_pair(VK_AMD_RASTERIZATION_ORDER_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_amd_rasterization_order, {})),
+            std::make_pair(VK_AMD_SHADER_BALLOT_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_amd_shader_ballot, {})),
+            std::make_pair(VK_AMD_SHADER_CORE_PROPERTIES_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_amd_shader_core_properties, {{
+                           {&DeviceExtensions::vk_khr_get_physical_device_properties_2, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME}}})),
+            std::make_pair(VK_AMD_SHADER_CORE_PROPERTIES_2_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_amd_shader_core_properties_2, {{
+                           {&DeviceExtensions::vk_amd_shader_core_properties, VK_AMD_SHADER_CORE_PROPERTIES_EXTENSION_NAME}}})),
+            std::make_pair(VK_AMD_SHADER_EXPLICIT_VERTEX_PARAMETER_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_amd_shader_explicit_vertex_parameter, {})),
+            std::make_pair(VK_AMD_SHADER_FRAGMENT_MASK_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_amd_shader_fragment_mask, {})),
+            std::make_pair(VK_AMD_SHADER_IMAGE_LOAD_STORE_LOD_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_amd_shader_image_load_store_lod, {})),
+            std::make_pair(VK_AMD_SHADER_INFO_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_amd_shader_info, {})),
+            std::make_pair(VK_AMD_SHADER_TRINARY_MINMAX_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_amd_shader_trinary_minmax, {})),
+            std::make_pair(VK_AMD_TEXTURE_GATHER_BIAS_LOD_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_amd_texture_gather_bias_lod, {{
+                           {&DeviceExtensions::vk_khr_get_physical_device_properties_2, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME}}})),
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+            std::make_pair(VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_android_external_memory_android_hardware_buffer, {{
+                           {&DeviceExtensions::vk_khr_sampler_ycbcr_conversion, VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME},
+                           {&DeviceExtensions::vk_khr_external_memory, VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME},
+                           {&DeviceExtensions::vk_ext_queue_family_foreign, VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME}}})),
+#endif
+            std::make_pair(VK_EXT_ASTC_DECODE_MODE_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_ext_astc_decode_mode, {{
+                           {&DeviceExtensions::vk_khr_get_physical_device_properties_2, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME}}})),
+            std::make_pair(VK_EXT_BLEND_OPERATION_ADVANCED_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_ext_blend_operation_advanced, {})),
+            std::make_pair(VK_EXT_BUFFER_DEVICE_ADDRESS_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_ext_buffer_device_address, {{
+                           {&DeviceExtensions::vk_khr_get_physical_device_properties_2, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME}}})),
+            std::make_pair(VK_EXT_CALIBRATED_TIMESTAMPS_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_ext_calibrated_timestamps, {})),
+            std::make_pair(VK_EXT_CONDITIONAL_RENDERING_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_ext_conditional_rendering, {})),
+            std::make_pair(VK_EXT_CONSERVATIVE_RASTERIZATION_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_ext_conservative_rasterization, {{
+                           {&DeviceExtensions::vk_khr_get_physical_device_properties_2, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME}}})),
+            std::make_pair(VK_EXT_DEBUG_MARKER_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_ext_debug_marker, {{
+                           {&DeviceExtensions::vk_ext_debug_report, VK_EXT_DEBUG_REPORT_EXTENSION_NAME}}})),
+            std::make_pair(VK_EXT_DEPTH_CLIP_ENABLE_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_ext_depth_clip_enable, {})),
+            std::make_pair(VK_EXT_DEPTH_RANGE_UNRESTRICTED_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_ext_depth_range_unrestricted, {})),
+            std::make_pair(VK_EXT_DESCRIPTOR_INDEXING_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_ext_descriptor_indexing, {{
+                           {&DeviceExtensions::vk_khr_get_physical_device_properties_2, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME},
+                           {&DeviceExtensions::vk_khr_maintenance3, VK_KHR_MAINTENANCE3_EXTENSION_NAME}}})),
+            std::make_pair(VK_EXT_DISCARD_RECTANGLES_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_ext_discard_rectangles, {{
+                           {&DeviceExtensions::vk_khr_get_physical_device_properties_2, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME}}})),
+            std::make_pair(VK_EXT_DISPLAY_CONTROL_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_ext_display_control, {{
+                           {&DeviceExtensions::vk_ext_display_surface_counter, VK_EXT_DISPLAY_SURFACE_COUNTER_EXTENSION_NAME},
+                           {&DeviceExtensions::vk_khr_swapchain, VK_KHR_SWAPCHAIN_EXTENSION_NAME}}})),
+            std::make_pair(VK_EXT_EXTERNAL_MEMORY_DMA_BUF_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_ext_external_memory_dma_buf, {{
+                           {&DeviceExtensions::vk_khr_external_memory_fd, VK_KHR_EXTERNAL_MEMORY_FD_EXTENSION_NAME}}})),
+            std::make_pair(VK_EXT_EXTERNAL_MEMORY_HOST_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_ext_external_memory_host, {{
+                           {&DeviceExtensions::vk_khr_external_memory, VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME}}})),
+            std::make_pair(VK_EXT_FILTER_CUBIC_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_ext_filter_cubic, {{
+                           {&DeviceExtensions::vk_img_filter_cubic, VK_IMG_FILTER_CUBIC_EXTENSION_NAME}}})),
+            std::make_pair(VK_EXT_FRAGMENT_DENSITY_MAP_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_ext_fragment_density_map, {{
+                           {&DeviceExtensions::vk_khr_get_physical_device_properties_2, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME}}})),
+            std::make_pair(VK_EXT_FRAGMENT_SHADER_INTERLOCK_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_ext_fragment_shader_interlock, {{
+                           {&DeviceExtensions::vk_khr_get_physical_device_properties_2, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME}}})),
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+            std::make_pair(VK_EXT_FULL_SCREEN_EXCLUSIVE_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_ext_full_screen_exclusive, {{
+                           {&DeviceExtensions::vk_khr_get_physical_device_properties_2, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME},
+                           {&DeviceExtensions::vk_khr_surface, VK_KHR_SURFACE_EXTENSION_NAME},
+                           {&DeviceExtensions::vk_khr_get_surface_capabilities_2, VK_KHR_GET_SURFACE_CAPABILITIES_2_EXTENSION_NAME},
+                           {&DeviceExtensions::vk_khr_swapchain, VK_KHR_SWAPCHAIN_EXTENSION_NAME}}})),
+#endif
+            std::make_pair(VK_EXT_GLOBAL_PRIORITY_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_ext_global_priority, {})),
+            std::make_pair(VK_EXT_HDR_METADATA_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_ext_hdr_metadata, {{
+                           {&DeviceExtensions::vk_khr_swapchain, VK_KHR_SWAPCHAIN_EXTENSION_NAME}}})),
+            std::make_pair(VK_EXT_HOST_QUERY_RESET_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_ext_host_query_reset, {{
+                           {&DeviceExtensions::vk_khr_get_physical_device_properties_2, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME}}})),
+            std::make_pair(VK_EXT_IMAGE_DRM_FORMAT_MODIFIER_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_ext_image_drm_format_modifier, {{
+                           {&DeviceExtensions::vk_khr_bind_memory_2, VK_KHR_BIND_MEMORY_2_EXTENSION_NAME},
+                           {&DeviceExtensions::vk_khr_get_physical_device_properties_2, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME},
+                           {&DeviceExtensions::vk_khr_image_format_list, VK_KHR_IMAGE_FORMAT_LIST_EXTENSION_NAME},
+                           {&DeviceExtensions::vk_khr_sampler_ycbcr_conversion, VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME}}})),
+            std::make_pair(VK_EXT_INDEX_TYPE_UINT8_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_ext_index_type_uint8, {})),
+            std::make_pair(VK_EXT_INLINE_UNIFORM_BLOCK_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_ext_inline_uniform_block, {{
+                           {&DeviceExtensions::vk_khr_get_physical_device_properties_2, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME},
+                           {&DeviceExtensions::vk_khr_maintenance1, VK_KHR_MAINTENANCE1_EXTENSION_NAME}}})),
+            std::make_pair(VK_EXT_LINE_RASTERIZATION_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_ext_line_rasterization, {{
+                           {&DeviceExtensions::vk_khr_get_physical_device_properties_2, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME}}})),
+            std::make_pair(VK_EXT_MEMORY_BUDGET_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_ext_memory_budget, {{
+                           {&DeviceExtensions::vk_khr_get_physical_device_properties_2, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME}}})),
+            std::make_pair(VK_EXT_MEMORY_PRIORITY_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_ext_memory_priority, {{
+                           {&DeviceExtensions::vk_khr_get_physical_device_properties_2, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME}}})),
+            std::make_pair(VK_EXT_PCI_BUS_INFO_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_ext_pci_bus_info, {{
+                           {&DeviceExtensions::vk_khr_get_physical_device_properties_2, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME}}})),
+            std::make_pair(VK_EXT_PIPELINE_CREATION_FEEDBACK_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_ext_pipeline_creation_feedback, {})),
+            std::make_pair(VK_EXT_POST_DEPTH_COVERAGE_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_ext_post_depth_coverage, {})),
+            std::make_pair(VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_ext_queue_family_foreign, {{
+                           {&DeviceExtensions::vk_khr_external_memory, VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME}}})),
+            std::make_pair(VK_EXT_SAMPLE_LOCATIONS_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_ext_sample_locations, {{
+                           {&DeviceExtensions::vk_khr_get_physical_device_properties_2, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME}}})),
+            std::make_pair(VK_EXT_SAMPLER_FILTER_MINMAX_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_ext_sampler_filter_minmax, {{
+                           {&DeviceExtensions::vk_khr_get_physical_device_properties_2, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME}}})),
+            std::make_pair(VK_EXT_SCALAR_BLOCK_LAYOUT_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_ext_scalar_block_layout, {{
+                           {&DeviceExtensions::vk_khr_get_physical_device_properties_2, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME}}})),
+            std::make_pair(VK_EXT_SEPARATE_STENCIL_USAGE_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_ext_separate_stencil_usage, {})),
+            std::make_pair(VK_EXT_SHADER_DEMOTE_TO_HELPER_INVOCATION_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_ext_shader_demote_to_helper_invocation, {{
+                           {&DeviceExtensions::vk_khr_get_physical_device_properties_2, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME}}})),
+            std::make_pair(VK_EXT_SHADER_STENCIL_EXPORT_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_ext_shader_stencil_export, {})),
+            std::make_pair(VK_EXT_SHADER_SUBGROUP_BALLOT_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_ext_shader_subgroup_ballot, {})),
+            std::make_pair(VK_EXT_SHADER_SUBGROUP_VOTE_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_ext_shader_subgroup_vote, {})),
+            std::make_pair(VK_EXT_SHADER_VIEWPORT_INDEX_LAYER_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_ext_shader_viewport_index_layer, {})),
+            std::make_pair(VK_EXT_SUBGROUP_SIZE_CONTROL_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_ext_subgroup_size_control, {{
+                           {&DeviceExtensions::vk_feature_version_1_1, VK_VERSION_1_1_NAME}}})),
+            std::make_pair(VK_EXT_TEXEL_BUFFER_ALIGNMENT_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_ext_texel_buffer_alignment, {{
+                           {&DeviceExtensions::vk_khr_get_physical_device_properties_2, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME}}})),
+            std::make_pair(VK_EXT_TEXTURE_COMPRESSION_ASTC_HDR_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_ext_texture_compression_astc_hdr, {{
+                           {&DeviceExtensions::vk_khr_get_physical_device_properties_2, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME}}})),
+            std::make_pair(VK_EXT_TOOLING_INFO_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_ext_tooling_info, {})),
+            std::make_pair(VK_EXT_TRANSFORM_FEEDBACK_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_ext_transform_feedback, {{
+                           {&DeviceExtensions::vk_khr_get_physical_device_properties_2, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME}}})),
+            std::make_pair(VK_EXT_VALIDATION_CACHE_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_ext_validation_cache, {})),
+            std::make_pair(VK_EXT_VERTEX_ATTRIBUTE_DIVISOR_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_ext_vertex_attribute_divisor, {{
+                           {&DeviceExtensions::vk_khr_get_physical_device_properties_2, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME}}})),
+            std::make_pair(VK_EXT_YCBCR_IMAGE_ARRAYS_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_ext_ycbcr_image_arrays, {{
+                           {&DeviceExtensions::vk_khr_sampler_ycbcr_conversion, VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME}}})),
+#ifdef VK_USE_PLATFORM_GGP
+            std::make_pair(VK_GGP_FRAME_TOKEN_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_ggp_frame_token, {{
+                           {&DeviceExtensions::vk_khr_swapchain, VK_KHR_SWAPCHAIN_EXTENSION_NAME},
+                           {&DeviceExtensions::vk_ggp_stream_descriptor_surface, VK_GGP_STREAM_DESCRIPTOR_SURFACE_EXTENSION_NAME}}})),
+#endif
+            std::make_pair(VK_GOOGLE_DECORATE_STRING_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_google_decorate_string, {})),
+            std::make_pair(VK_GOOGLE_DISPLAY_TIMING_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_google_display_timing, {{
+                           {&DeviceExtensions::vk_khr_swapchain, VK_KHR_SWAPCHAIN_EXTENSION_NAME}}})),
+            std::make_pair(VK_GOOGLE_HLSL_FUNCTIONALITY1_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_google_hlsl_functionality1, {})),
+            std::make_pair(VK_GOOGLE_USER_TYPE_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_google_user_type, {})),
+            std::make_pair(VK_IMG_FILTER_CUBIC_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_img_filter_cubic, {})),
+            std::make_pair(VK_IMG_FORMAT_PVRTC_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_img_format_pvrtc, {})),
+            std::make_pair(VK_INTEL_PERFORMANCE_QUERY_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_intel_performance_query, {})),
+            std::make_pair(VK_INTEL_SHADER_INTEGER_FUNCTIONS_2_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_intel_shader_integer_functions_2, {{
+                           {&DeviceExtensions::vk_khr_get_physical_device_properties_2, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME}}})),
+            std::make_pair(VK_KHR_16BIT_STORAGE_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_khr_16bit_storage, {{
+                           {&DeviceExtensions::vk_khr_get_physical_device_properties_2, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME},
+                           {&DeviceExtensions::vk_khr_storage_buffer_storage_class, VK_KHR_STORAGE_BUFFER_STORAGE_CLASS_EXTENSION_NAME}}})),
+            std::make_pair(VK_KHR_8BIT_STORAGE_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_khr_8bit_storage, {{
+                           {&DeviceExtensions::vk_khr_get_physical_device_properties_2, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME},
+                           {&DeviceExtensions::vk_khr_storage_buffer_storage_class, VK_KHR_STORAGE_BUFFER_STORAGE_CLASS_EXTENSION_NAME}}})),
+            std::make_pair(VK_KHR_BIND_MEMORY_2_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_khr_bind_memory_2, {})),
+            std::make_pair(VK_KHR_BUFFER_DEVICE_ADDRESS_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_khr_buffer_device_address, {{
+                           {&DeviceExtensions::vk_khr_get_physical_device_properties_2, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME}}})),
+            std::make_pair(VK_KHR_CREATE_RENDERPASS_2_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_khr_create_renderpass_2, {{
+                           {&DeviceExtensions::vk_khr_multiview, VK_KHR_MULTIVIEW_EXTENSION_NAME},
+                           {&DeviceExtensions::vk_khr_maintenance2, VK_KHR_MAINTENANCE2_EXTENSION_NAME}}})),
+            std::make_pair(VK_KHR_DEDICATED_ALLOCATION_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_khr_dedicated_allocation, {{
+                           {&DeviceExtensions::vk_khr_get_memory_requirements_2, VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME}}})),
+            std::make_pair(VK_KHR_DEPTH_STENCIL_RESOLVE_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_khr_depth_stencil_resolve, {{
+                           {&DeviceExtensions::vk_khr_create_renderpass_2, VK_KHR_CREATE_RENDERPASS_2_EXTENSION_NAME}}})),
+            std::make_pair(VK_KHR_DESCRIPTOR_UPDATE_TEMPLATE_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_khr_descriptor_update_template, {})),
+            std::make_pair(VK_KHR_DEVICE_GROUP_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_khr_device_group, {{
+                           {&DeviceExtensions::vk_khr_device_group_creation, VK_KHR_DEVICE_GROUP_CREATION_EXTENSION_NAME}}})),
+            std::make_pair(VK_KHR_DISPLAY_SWAPCHAIN_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_khr_display_swapchain, {{
+                           {&DeviceExtensions::vk_khr_swapchain, VK_KHR_SWAPCHAIN_EXTENSION_NAME},
+                           {&DeviceExtensions::vk_khr_display, VK_KHR_DISPLAY_EXTENSION_NAME}}})),
+            std::make_pair(VK_KHR_DRAW_INDIRECT_COUNT_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_khr_draw_indirect_count, {})),
+            std::make_pair(VK_KHR_DRIVER_PROPERTIES_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_khr_driver_properties, {{
+                           {&DeviceExtensions::vk_khr_get_physical_device_properties_2, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME}}})),
+            std::make_pair(VK_KHR_EXTERNAL_FENCE_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_khr_external_fence, {{
+                           {&DeviceExtensions::vk_khr_external_fence_capabilities, VK_KHR_EXTERNAL_FENCE_CAPABILITIES_EXTENSION_NAME}}})),
+            std::make_pair(VK_KHR_EXTERNAL_FENCE_FD_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_khr_external_fence_fd, {{
+                           {&DeviceExtensions::vk_khr_external_fence, VK_KHR_EXTERNAL_FENCE_EXTENSION_NAME}}})),
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+            std::make_pair(VK_KHR_EXTERNAL_FENCE_WIN32_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_khr_external_fence_win32, {{
+                           {&DeviceExtensions::vk_khr_external_fence, VK_KHR_EXTERNAL_FENCE_EXTENSION_NAME}}})),
+#endif
+            std::make_pair(VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_khr_external_memory, {{
+                           {&DeviceExtensions::vk_khr_external_memory_capabilities, VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME}}})),
+            std::make_pair(VK_KHR_EXTERNAL_MEMORY_FD_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_khr_external_memory_fd, {{
+                           {&DeviceExtensions::vk_khr_external_memory, VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME}}})),
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+            std::make_pair(VK_KHR_EXTERNAL_MEMORY_WIN32_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_khr_external_memory_win32, {{
+                           {&DeviceExtensions::vk_khr_external_memory, VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME}}})),
+#endif
+            std::make_pair(VK_KHR_EXTERNAL_SEMAPHORE_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_khr_external_semaphore, {{
+                           {&DeviceExtensions::vk_khr_external_semaphore_capabilities, VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_EXTENSION_NAME}}})),
+            std::make_pair(VK_KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_khr_external_semaphore_fd, {{
+                           {&DeviceExtensions::vk_khr_external_semaphore, VK_KHR_EXTERNAL_SEMAPHORE_EXTENSION_NAME}}})),
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+            std::make_pair(VK_KHR_EXTERNAL_SEMAPHORE_WIN32_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_khr_external_semaphore_win32, {{
+                           {&DeviceExtensions::vk_khr_external_semaphore, VK_KHR_EXTERNAL_SEMAPHORE_EXTENSION_NAME}}})),
+#endif
+            std::make_pair(VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_khr_get_memory_requirements_2, {})),
+            std::make_pair(VK_KHR_IMAGE_FORMAT_LIST_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_khr_image_format_list, {})),
+            std::make_pair(VK_KHR_IMAGELESS_FRAMEBUFFER_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_khr_imageless_framebuffer, {{
+                           {&DeviceExtensions::vk_khr_maintenance2, VK_KHR_MAINTENANCE2_EXTENSION_NAME},
+                           {&DeviceExtensions::vk_khr_image_format_list, VK_KHR_IMAGE_FORMAT_LIST_EXTENSION_NAME}}})),
+            std::make_pair(VK_KHR_INCREMENTAL_PRESENT_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_khr_incremental_present, {{
+                           {&DeviceExtensions::vk_khr_swapchain, VK_KHR_SWAPCHAIN_EXTENSION_NAME}}})),
+            std::make_pair(VK_KHR_MAINTENANCE1_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_khr_maintenance1, {})),
+            std::make_pair(VK_KHR_MAINTENANCE2_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_khr_maintenance2, {})),
+            std::make_pair(VK_KHR_MAINTENANCE3_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_khr_maintenance3, {{
+                           {&DeviceExtensions::vk_khr_get_physical_device_properties_2, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME}}})),
+            std::make_pair(VK_KHR_MULTIVIEW_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_khr_multiview, {{
+                           {&DeviceExtensions::vk_khr_get_physical_device_properties_2, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME}}})),
+            std::make_pair(VK_KHR_PERFORMANCE_QUERY_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_khr_performance_query, {{
+                           {&DeviceExtensions::vk_khr_get_physical_device_properties_2, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME}}})),
+            std::make_pair(VK_KHR_PIPELINE_EXECUTABLE_PROPERTIES_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_khr_pipeline_executable_properties, {})),
+            std::make_pair(VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_khr_push_descriptor, {{
+                           {&DeviceExtensions::vk_khr_get_physical_device_properties_2, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME}}})),
+            std::make_pair(VK_KHR_RELAXED_BLOCK_LAYOUT_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_khr_relaxed_block_layout, {})),
+            std::make_pair(VK_KHR_SAMPLER_MIRROR_CLAMP_TO_EDGE_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_khr_sampler_mirror_clamp_to_edge, {})),
+            std::make_pair(VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_khr_sampler_ycbcr_conversion, {{
+                           {&DeviceExtensions::vk_khr_maintenance1, VK_KHR_MAINTENANCE1_EXTENSION_NAME},
+                           {&DeviceExtensions::vk_khr_bind_memory_2, VK_KHR_BIND_MEMORY_2_EXTENSION_NAME},
+                           {&DeviceExtensions::vk_khr_get_memory_requirements_2, VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME},
+                           {&DeviceExtensions::vk_khr_get_physical_device_properties_2, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME}}})),
+            std::make_pair(VK_KHR_SEPARATE_DEPTH_STENCIL_LAYOUTS_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_khr_separate_depth_stencil_layouts, {{
+                           {&DeviceExtensions::vk_khr_get_physical_device_properties_2, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME},
+                           {&DeviceExtensions::vk_khr_create_renderpass_2, VK_KHR_CREATE_RENDERPASS_2_EXTENSION_NAME}}})),
+            std::make_pair(VK_KHR_SHADER_ATOMIC_INT64_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_khr_shader_atomic_int64, {{
+                           {&DeviceExtensions::vk_khr_get_physical_device_properties_2, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME}}})),
+            std::make_pair(VK_KHR_SHADER_CLOCK_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_khr_shader_clock, {{
+                           {&DeviceExtensions::vk_khr_get_physical_device_properties_2, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME}}})),
+            std::make_pair(VK_KHR_SHADER_DRAW_PARAMETERS_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_khr_shader_draw_parameters, {})),
+            std::make_pair(VK_KHR_SHADER_FLOAT16_INT8_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_khr_shader_float16_int8, {{
+                           {&DeviceExtensions::vk_khr_get_physical_device_properties_2, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME}}})),
+            std::make_pair(VK_KHR_SHADER_FLOAT_CONTROLS_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_khr_shader_float_controls, {{
+                           {&DeviceExtensions::vk_khr_get_physical_device_properties_2, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME}}})),
+            std::make_pair(VK_KHR_SHADER_SUBGROUP_EXTENDED_TYPES_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_khr_shader_subgroup_extended_types, {{
+                           {&DeviceExtensions::vk_feature_version_1_1, VK_VERSION_1_1_NAME}}})),
+            std::make_pair(VK_KHR_SHARED_PRESENTABLE_IMAGE_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_khr_shared_presentable_image, {{
+                           {&DeviceExtensions::vk_khr_swapchain, VK_KHR_SWAPCHAIN_EXTENSION_NAME},
+                           {&DeviceExtensions::vk_khr_get_physical_device_properties_2, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME},
+                           {&DeviceExtensions::vk_khr_get_surface_capabilities_2, VK_KHR_GET_SURFACE_CAPABILITIES_2_EXTENSION_NAME}}})),
+            std::make_pair(VK_KHR_SPIRV_1_4_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_khr_spirv_1_4, {{
+                           {&DeviceExtensions::vk_khr_shader_float_controls, VK_KHR_SHADER_FLOAT_CONTROLS_EXTENSION_NAME},
+                           {&DeviceExtensions::vk_feature_version_1_1, VK_VERSION_1_1_NAME}}})),
+            std::make_pair(VK_KHR_STORAGE_BUFFER_STORAGE_CLASS_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_khr_storage_buffer_storage_class, {})),
+            std::make_pair(VK_KHR_SWAPCHAIN_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_khr_swapchain, {{
+                           {&DeviceExtensions::vk_khr_surface, VK_KHR_SURFACE_EXTENSION_NAME}}})),
+            std::make_pair(VK_KHR_SWAPCHAIN_MUTABLE_FORMAT_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_khr_swapchain_mutable_format, {{
+                           {&DeviceExtensions::vk_khr_swapchain, VK_KHR_SWAPCHAIN_EXTENSION_NAME},
+                           {&DeviceExtensions::vk_khr_maintenance2, VK_KHR_MAINTENANCE2_EXTENSION_NAME},
+                           {&DeviceExtensions::vk_khr_image_format_list, VK_KHR_IMAGE_FORMAT_LIST_EXTENSION_NAME}}})),
+            std::make_pair(VK_KHR_TIMELINE_SEMAPHORE_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_khr_timeline_semaphore, {{
+                           {&DeviceExtensions::vk_khr_get_physical_device_properties_2, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME}}})),
+            std::make_pair(VK_KHR_UNIFORM_BUFFER_STANDARD_LAYOUT_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_khr_uniform_buffer_standard_layout, {{
+                           {&DeviceExtensions::vk_khr_get_physical_device_properties_2, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME}}})),
+            std::make_pair(VK_KHR_VARIABLE_POINTERS_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_khr_variable_pointers, {{
+                           {&DeviceExtensions::vk_khr_get_physical_device_properties_2, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME},
+                           {&DeviceExtensions::vk_khr_storage_buffer_storage_class, VK_KHR_STORAGE_BUFFER_STORAGE_CLASS_EXTENSION_NAME}}})),
+            std::make_pair(VK_KHR_VULKAN_MEMORY_MODEL_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_khr_vulkan_memory_model, {})),
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+            std::make_pair(VK_KHR_WIN32_KEYED_MUTEX_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_khr_win32_keyed_mutex, {{
+                           {&DeviceExtensions::vk_khr_external_memory_win32, VK_KHR_EXTERNAL_MEMORY_WIN32_EXTENSION_NAME}}})),
+#endif
+            std::make_pair(VK_NVX_DEVICE_GENERATED_COMMANDS_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_nvx_device_generated_commands, {})),
+            std::make_pair(VK_NVX_IMAGE_VIEW_HANDLE_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_nvx_image_view_handle, {})),
+            std::make_pair(VK_NVX_MULTIVIEW_PER_VIEW_ATTRIBUTES_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_nvx_multiview_per_view_attributes, {{
+                           {&DeviceExtensions::vk_khr_multiview, VK_KHR_MULTIVIEW_EXTENSION_NAME}}})),
+            std::make_pair(VK_NV_CLIP_SPACE_W_SCALING_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_nv_clip_space_w_scaling, {})),
+            std::make_pair(VK_NV_COMPUTE_SHADER_DERIVATIVES_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_nv_compute_shader_derivatives, {{
+                           {&DeviceExtensions::vk_khr_get_physical_device_properties_2, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME}}})),
+            std::make_pair(VK_NV_COOPERATIVE_MATRIX_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_nv_cooperative_matrix, {{
+                           {&DeviceExtensions::vk_khr_get_physical_device_properties_2, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME}}})),
+            std::make_pair(VK_NV_CORNER_SAMPLED_IMAGE_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_nv_corner_sampled_image, {{
+                           {&DeviceExtensions::vk_khr_get_physical_device_properties_2, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME}}})),
+            std::make_pair(VK_NV_COVERAGE_REDUCTION_MODE_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_nv_coverage_reduction_mode, {{
+                           {&DeviceExtensions::vk_nv_framebuffer_mixed_samples, VK_NV_FRAMEBUFFER_MIXED_SAMPLES_EXTENSION_NAME}}})),
+            std::make_pair(VK_NV_DEDICATED_ALLOCATION_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_nv_dedicated_allocation, {})),
+            std::make_pair(VK_NV_DEDICATED_ALLOCATION_IMAGE_ALIASING_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_nv_dedicated_allocation_image_aliasing, {{
+                           {&DeviceExtensions::vk_khr_dedicated_allocation, VK_KHR_DEDICATED_ALLOCATION_EXTENSION_NAME}}})),
+            std::make_pair(VK_NV_DEVICE_DIAGNOSTIC_CHECKPOINTS_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_nv_device_diagnostic_checkpoints, {{
+                           {&DeviceExtensions::vk_khr_get_physical_device_properties_2, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME}}})),
+            std::make_pair(VK_NV_EXTERNAL_MEMORY_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_nv_external_memory, {{
+                           {&DeviceExtensions::vk_nv_external_memory_capabilities, VK_NV_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME}}})),
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+            std::make_pair(VK_NV_EXTERNAL_MEMORY_WIN32_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_nv_external_memory_win32, {{
+                           {&DeviceExtensions::vk_nv_external_memory, VK_NV_EXTERNAL_MEMORY_EXTENSION_NAME}}})),
+#endif
+            std::make_pair(VK_NV_FILL_RECTANGLE_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_nv_fill_rectangle, {})),
+            std::make_pair(VK_NV_FRAGMENT_COVERAGE_TO_COLOR_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_nv_fragment_coverage_to_color, {})),
+            std::make_pair(VK_NV_FRAGMENT_SHADER_BARYCENTRIC_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_nv_fragment_shader_barycentric, {{
+                           {&DeviceExtensions::vk_khr_get_physical_device_properties_2, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME}}})),
+            std::make_pair(VK_NV_FRAMEBUFFER_MIXED_SAMPLES_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_nv_framebuffer_mixed_samples, {})),
+            std::make_pair(VK_NV_GEOMETRY_SHADER_PASSTHROUGH_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_nv_geometry_shader_passthrough, {})),
+            std::make_pair(VK_NV_GLSL_SHADER_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_nv_glsl_shader, {})),
+            std::make_pair(VK_NV_MESH_SHADER_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_nv_mesh_shader, {{
+                           {&DeviceExtensions::vk_khr_get_physical_device_properties_2, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME}}})),
+            std::make_pair(VK_NV_RAY_TRACING_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_nv_ray_tracing, {{
+                           {&DeviceExtensions::vk_khr_get_physical_device_properties_2, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME},
+                           {&DeviceExtensions::vk_khr_get_memory_requirements_2, VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME}}})),
+            std::make_pair(VK_NV_REPRESENTATIVE_FRAGMENT_TEST_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_nv_representative_fragment_test, {})),
+            std::make_pair(VK_NV_SAMPLE_MASK_OVERRIDE_COVERAGE_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_nv_sample_mask_override_coverage, {})),
+            std::make_pair(VK_NV_SCISSOR_EXCLUSIVE_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_nv_scissor_exclusive, {{
+                           {&DeviceExtensions::vk_khr_get_physical_device_properties_2, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME}}})),
+            std::make_pair(VK_NV_SHADER_IMAGE_FOOTPRINT_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_nv_shader_image_footprint, {{
+                           {&DeviceExtensions::vk_khr_get_physical_device_properties_2, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME}}})),
+            std::make_pair(VK_NV_SHADER_SM_BUILTINS_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_nv_shader_sm_builtins, {{
+                           {&DeviceExtensions::vk_feature_version_1_1, VK_VERSION_1_1_NAME}}})),
+            std::make_pair(VK_NV_SHADER_SUBGROUP_PARTITIONED_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_nv_shader_subgroup_partitioned, {{
+                           {&DeviceExtensions::vk_feature_version_1_1, VK_VERSION_1_1_NAME}}})),
+            std::make_pair(VK_NV_SHADING_RATE_IMAGE_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_nv_shading_rate_image, {{
+                           {&DeviceExtensions::vk_khr_get_physical_device_properties_2, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME}}})),
+            std::make_pair(VK_NV_VIEWPORT_ARRAY2_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_nv_viewport_array2, {})),
+            std::make_pair(VK_NV_VIEWPORT_SWIZZLE_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_nv_viewport_swizzle, {})),
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+            std::make_pair(VK_NV_WIN32_KEYED_MUTEX_EXTENSION_NAME, DeviceInfo(&DeviceExtensions::vk_nv_win32_keyed_mutex, {{
+                           {&DeviceExtensions::vk_nv_external_memory_win32, VK_NV_EXTERNAL_MEMORY_WIN32_EXTENSION_NAME}}})),
+#endif
+        };
+
+        static const DeviceInfo empty_info {nullptr, DeviceReqVec()};
+        DeviceInfoMap::const_iterator info = info_map.find(name);
+        if ( info != info_map.cend()) {
+            return info->second;
+        }
+        return empty_info;
+    }
+
+    DeviceExtensions() = default;
+    DeviceExtensions(const InstanceExtensions& instance_ext) : InstanceExtensions(instance_ext) {}
+
+    uint32_t InitFromDeviceCreateInfo(const InstanceExtensions *instance_extensions, uint32_t requested_api_version,
+                                      const VkDeviceCreateInfo *pCreateInfo) {
+        // Initialize: this to defaults,  base class fields to input.
+        assert(instance_extensions);
+        *this = DeviceExtensions(*instance_extensions);
+
+
+        static const std::vector<const char *> V_1_1_promoted_device_apis = {
+            VK_KHR_16BIT_STORAGE_EXTENSION_NAME,
+            VK_KHR_BIND_MEMORY_2_EXTENSION_NAME,
+            VK_KHR_DEDICATED_ALLOCATION_EXTENSION_NAME,
+            VK_KHR_DESCRIPTOR_UPDATE_TEMPLATE_EXTENSION_NAME,
+            VK_KHR_DEVICE_GROUP_EXTENSION_NAME,
+            VK_KHR_EXTERNAL_FENCE_EXTENSION_NAME,
+            VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME,
+            VK_KHR_EXTERNAL_SEMAPHORE_EXTENSION_NAME,
+            VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME,
+            VK_KHR_MAINTENANCE1_EXTENSION_NAME,
+            VK_KHR_MAINTENANCE2_EXTENSION_NAME,
+            VK_KHR_MAINTENANCE3_EXTENSION_NAME,
+            VK_KHR_MULTIVIEW_EXTENSION_NAME,
+            VK_KHR_RELAXED_BLOCK_LAYOUT_EXTENSION_NAME,
+            VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME,
+            VK_KHR_SHADER_DRAW_PARAMETERS_EXTENSION_NAME,
+            VK_KHR_STORAGE_BUFFER_STORAGE_CLASS_EXTENSION_NAME,
+            VK_KHR_VARIABLE_POINTERS_EXTENSION_NAME,
+        };
+
+        // Initialize struct data, robust to invalid pCreateInfo
+        uint32_t api_version = NormalizeApiVersion(requested_api_version);
+        if (api_version >= VK_API_VERSION_1_1) {
+            auto info = get_info("VK_VERSION_1_1");
+            if (info.state) this->*(info.state) = kEnabledByCreateinfo;
+            for (auto promoted_ext : V_1_1_promoted_device_apis) {
+                info = get_info(promoted_ext);
+                assert(info.state);
+                if (info.state) this->*(info.state) = kEnabledByApiLevel;
+            }
+        }
+        // CreateInfo takes precedence over promoted
+        if (pCreateInfo->ppEnabledExtensionNames) {
+            for (uint32_t i = 0; i < pCreateInfo->enabledExtensionCount; i++) {
+                if (!pCreateInfo->ppEnabledExtensionNames[i]) continue;
+                auto info = get_info(pCreateInfo->ppEnabledExtensionNames[i]);
+                if (info.state) this->*(info.state) = kEnabledByCreateinfo;
+            }
+        }
+        return api_version;
+    }
+};
+
+static const std::set<std::string> kDeviceExtensionNames = {
+    VK_AMD_BUFFER_MARKER_EXTENSION_NAME,
+    VK_AMD_DEVICE_COHERENT_MEMORY_EXTENSION_NAME,
+    VK_AMD_DISPLAY_NATIVE_HDR_EXTENSION_NAME,
+    VK_AMD_DRAW_INDIRECT_COUNT_EXTENSION_NAME,
+    VK_AMD_GCN_SHADER_EXTENSION_NAME,
+    VK_AMD_GPU_SHADER_HALF_FLOAT_EXTENSION_NAME,
+    VK_AMD_GPU_SHADER_INT16_EXTENSION_NAME,
+    VK_AMD_MEMORY_OVERALLOCATION_BEHAVIOR_EXTENSION_NAME,
+    VK_AMD_MIXED_ATTACHMENT_SAMPLES_EXTENSION_NAME,
+    VK_AMD_NEGATIVE_VIEWPORT_HEIGHT_EXTENSION_NAME,
+    VK_AMD_PIPELINE_COMPILER_CONTROL_EXTENSION_NAME,
+    VK_AMD_RASTERIZATION_ORDER_EXTENSION_NAME,
+    VK_AMD_SHADER_BALLOT_EXTENSION_NAME,
+    VK_AMD_SHADER_CORE_PROPERTIES_EXTENSION_NAME,
+    VK_AMD_SHADER_CORE_PROPERTIES_2_EXTENSION_NAME,
+    VK_AMD_SHADER_EXPLICIT_VERTEX_PARAMETER_EXTENSION_NAME,
+    VK_AMD_SHADER_FRAGMENT_MASK_EXTENSION_NAME,
+    VK_AMD_SHADER_IMAGE_LOAD_STORE_LOD_EXTENSION_NAME,
+    VK_AMD_SHADER_INFO_EXTENSION_NAME,
+    VK_AMD_SHADER_TRINARY_MINMAX_EXTENSION_NAME,
+    VK_AMD_TEXTURE_GATHER_BIAS_LOD_EXTENSION_NAME,
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+    VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME,
+#endif
+    VK_EXT_ASTC_DECODE_MODE_EXTENSION_NAME,
+    VK_EXT_BLEND_OPERATION_ADVANCED_EXTENSION_NAME,
+    VK_EXT_BUFFER_DEVICE_ADDRESS_EXTENSION_NAME,
+    VK_EXT_CALIBRATED_TIMESTAMPS_EXTENSION_NAME,
+    VK_EXT_CONDITIONAL_RENDERING_EXTENSION_NAME,
+    VK_EXT_CONSERVATIVE_RASTERIZATION_EXTENSION_NAME,
+    VK_EXT_DEBUG_MARKER_EXTENSION_NAME,
+    VK_EXT_DEPTH_CLIP_ENABLE_EXTENSION_NAME,
+    VK_EXT_DEPTH_RANGE_UNRESTRICTED_EXTENSION_NAME,
+    VK_EXT_DESCRIPTOR_INDEXING_EXTENSION_NAME,
+    VK_EXT_DISCARD_RECTANGLES_EXTENSION_NAME,
+    VK_EXT_DISPLAY_CONTROL_EXTENSION_NAME,
+    VK_EXT_EXTERNAL_MEMORY_DMA_BUF_EXTENSION_NAME,
+    VK_EXT_EXTERNAL_MEMORY_HOST_EXTENSION_NAME,
+    VK_EXT_FILTER_CUBIC_EXTENSION_NAME,
+    VK_EXT_FRAGMENT_DENSITY_MAP_EXTENSION_NAME,
+    VK_EXT_FRAGMENT_SHADER_INTERLOCK_EXTENSION_NAME,
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    VK_EXT_FULL_SCREEN_EXCLUSIVE_EXTENSION_NAME,
+#endif
+    VK_EXT_GLOBAL_PRIORITY_EXTENSION_NAME,
+    VK_EXT_HDR_METADATA_EXTENSION_NAME,
+    VK_EXT_HOST_QUERY_RESET_EXTENSION_NAME,
+    VK_EXT_IMAGE_DRM_FORMAT_MODIFIER_EXTENSION_NAME,
+    VK_EXT_INDEX_TYPE_UINT8_EXTENSION_NAME,
+    VK_EXT_INLINE_UNIFORM_BLOCK_EXTENSION_NAME,
+    VK_EXT_LINE_RASTERIZATION_EXTENSION_NAME,
+    VK_EXT_MEMORY_BUDGET_EXTENSION_NAME,
+    VK_EXT_MEMORY_PRIORITY_EXTENSION_NAME,
+    VK_EXT_PCI_BUS_INFO_EXTENSION_NAME,
+    VK_EXT_PIPELINE_CREATION_FEEDBACK_EXTENSION_NAME,
+    VK_EXT_POST_DEPTH_COVERAGE_EXTENSION_NAME,
+    VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME,
+    VK_EXT_SAMPLE_LOCATIONS_EXTENSION_NAME,
+    VK_EXT_SAMPLER_FILTER_MINMAX_EXTENSION_NAME,
+    VK_EXT_SCALAR_BLOCK_LAYOUT_EXTENSION_NAME,
+    VK_EXT_SEPARATE_STENCIL_USAGE_EXTENSION_NAME,
+    VK_EXT_SHADER_DEMOTE_TO_HELPER_INVOCATION_EXTENSION_NAME,
+    VK_EXT_SHADER_STENCIL_EXPORT_EXTENSION_NAME,
+    VK_EXT_SHADER_SUBGROUP_BALLOT_EXTENSION_NAME,
+    VK_EXT_SHADER_SUBGROUP_VOTE_EXTENSION_NAME,
+    VK_EXT_SHADER_VIEWPORT_INDEX_LAYER_EXTENSION_NAME,
+    VK_EXT_SUBGROUP_SIZE_CONTROL_EXTENSION_NAME,
+    VK_EXT_TEXEL_BUFFER_ALIGNMENT_EXTENSION_NAME,
+    VK_EXT_TEXTURE_COMPRESSION_ASTC_HDR_EXTENSION_NAME,
+    VK_EXT_TOOLING_INFO_EXTENSION_NAME,
+    VK_EXT_TRANSFORM_FEEDBACK_EXTENSION_NAME,
+    VK_EXT_VALIDATION_CACHE_EXTENSION_NAME,
+    VK_EXT_VERTEX_ATTRIBUTE_DIVISOR_EXTENSION_NAME,
+    VK_EXT_YCBCR_IMAGE_ARRAYS_EXTENSION_NAME,
+#ifdef VK_USE_PLATFORM_GGP
+    VK_GGP_FRAME_TOKEN_EXTENSION_NAME,
+#endif
+    VK_GOOGLE_DECORATE_STRING_EXTENSION_NAME,
+    VK_GOOGLE_DISPLAY_TIMING_EXTENSION_NAME,
+    VK_GOOGLE_HLSL_FUNCTIONALITY1_EXTENSION_NAME,
+    VK_GOOGLE_USER_TYPE_EXTENSION_NAME,
+    VK_IMG_FILTER_CUBIC_EXTENSION_NAME,
+    VK_IMG_FORMAT_PVRTC_EXTENSION_NAME,
+    VK_INTEL_PERFORMANCE_QUERY_EXTENSION_NAME,
+    VK_INTEL_SHADER_INTEGER_FUNCTIONS_2_EXTENSION_NAME,
+    VK_KHR_16BIT_STORAGE_EXTENSION_NAME,
+    VK_KHR_8BIT_STORAGE_EXTENSION_NAME,
+    VK_KHR_BIND_MEMORY_2_EXTENSION_NAME,
+    VK_KHR_BUFFER_DEVICE_ADDRESS_EXTENSION_NAME,
+    VK_KHR_CREATE_RENDERPASS_2_EXTENSION_NAME,
+    VK_KHR_DEDICATED_ALLOCATION_EXTENSION_NAME,
+    VK_KHR_DEPTH_STENCIL_RESOLVE_EXTENSION_NAME,
+    VK_KHR_DESCRIPTOR_UPDATE_TEMPLATE_EXTENSION_NAME,
+    VK_KHR_DEVICE_GROUP_EXTENSION_NAME,
+    VK_KHR_DISPLAY_SWAPCHAIN_EXTENSION_NAME,
+    VK_KHR_DRAW_INDIRECT_COUNT_EXTENSION_NAME,
+    VK_KHR_DRIVER_PROPERTIES_EXTENSION_NAME,
+    VK_KHR_EXTERNAL_FENCE_EXTENSION_NAME,
+    VK_KHR_EXTERNAL_FENCE_FD_EXTENSION_NAME,
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    VK_KHR_EXTERNAL_FENCE_WIN32_EXTENSION_NAME,
+#endif
+    VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME,
+    VK_KHR_EXTERNAL_MEMORY_FD_EXTENSION_NAME,
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    VK_KHR_EXTERNAL_MEMORY_WIN32_EXTENSION_NAME,
+#endif
+    VK_KHR_EXTERNAL_SEMAPHORE_EXTENSION_NAME,
+    VK_KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME,
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    VK_KHR_EXTERNAL_SEMAPHORE_WIN32_EXTENSION_NAME,
+#endif
+    VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME,
+    VK_KHR_IMAGE_FORMAT_LIST_EXTENSION_NAME,
+    VK_KHR_IMAGELESS_FRAMEBUFFER_EXTENSION_NAME,
+    VK_KHR_INCREMENTAL_PRESENT_EXTENSION_NAME,
+    VK_KHR_MAINTENANCE1_EXTENSION_NAME,
+    VK_KHR_MAINTENANCE2_EXTENSION_NAME,
+    VK_KHR_MAINTENANCE3_EXTENSION_NAME,
+    VK_KHR_MULTIVIEW_EXTENSION_NAME,
+    VK_KHR_PERFORMANCE_QUERY_EXTENSION_NAME,
+    VK_KHR_PIPELINE_EXECUTABLE_PROPERTIES_EXTENSION_NAME,
+    VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME,
+    VK_KHR_RELAXED_BLOCK_LAYOUT_EXTENSION_NAME,
+    VK_KHR_SAMPLER_MIRROR_CLAMP_TO_EDGE_EXTENSION_NAME,
+    VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME,
+    VK_KHR_SEPARATE_DEPTH_STENCIL_LAYOUTS_EXTENSION_NAME,
+    VK_KHR_SHADER_ATOMIC_INT64_EXTENSION_NAME,
+    VK_KHR_SHADER_CLOCK_EXTENSION_NAME,
+    VK_KHR_SHADER_DRAW_PARAMETERS_EXTENSION_NAME,
+    VK_KHR_SHADER_FLOAT16_INT8_EXTENSION_NAME,
+    VK_KHR_SHADER_FLOAT_CONTROLS_EXTENSION_NAME,
+    VK_KHR_SHADER_SUBGROUP_EXTENDED_TYPES_EXTENSION_NAME,
+    VK_KHR_SHARED_PRESENTABLE_IMAGE_EXTENSION_NAME,
+    VK_KHR_SPIRV_1_4_EXTENSION_NAME,
+    VK_KHR_STORAGE_BUFFER_STORAGE_CLASS_EXTENSION_NAME,
+    VK_KHR_SWAPCHAIN_EXTENSION_NAME,
+    VK_KHR_SWAPCHAIN_MUTABLE_FORMAT_EXTENSION_NAME,
+    VK_KHR_TIMELINE_SEMAPHORE_EXTENSION_NAME,
+    VK_KHR_UNIFORM_BUFFER_STANDARD_LAYOUT_EXTENSION_NAME,
+    VK_KHR_VARIABLE_POINTERS_EXTENSION_NAME,
+    VK_KHR_VULKAN_MEMORY_MODEL_EXTENSION_NAME,
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    VK_KHR_WIN32_KEYED_MUTEX_EXTENSION_NAME,
+#endif
+    VK_NVX_DEVICE_GENERATED_COMMANDS_EXTENSION_NAME,
+    VK_NVX_IMAGE_VIEW_HANDLE_EXTENSION_NAME,
+    VK_NVX_MULTIVIEW_PER_VIEW_ATTRIBUTES_EXTENSION_NAME,
+    VK_NV_CLIP_SPACE_W_SCALING_EXTENSION_NAME,
+    VK_NV_COMPUTE_SHADER_DERIVATIVES_EXTENSION_NAME,
+    VK_NV_COOPERATIVE_MATRIX_EXTENSION_NAME,
+    VK_NV_CORNER_SAMPLED_IMAGE_EXTENSION_NAME,
+    VK_NV_COVERAGE_REDUCTION_MODE_EXTENSION_NAME,
+    VK_NV_DEDICATED_ALLOCATION_EXTENSION_NAME,
+    VK_NV_DEDICATED_ALLOCATION_IMAGE_ALIASING_EXTENSION_NAME,
+    VK_NV_DEVICE_DIAGNOSTIC_CHECKPOINTS_EXTENSION_NAME,
+    VK_NV_EXTERNAL_MEMORY_EXTENSION_NAME,
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    VK_NV_EXTERNAL_MEMORY_WIN32_EXTENSION_NAME,
+#endif
+    VK_NV_FILL_RECTANGLE_EXTENSION_NAME,
+    VK_NV_FRAGMENT_COVERAGE_TO_COLOR_EXTENSION_NAME,
+    VK_NV_FRAGMENT_SHADER_BARYCENTRIC_EXTENSION_NAME,
+    VK_NV_FRAMEBUFFER_MIXED_SAMPLES_EXTENSION_NAME,
+    VK_NV_GEOMETRY_SHADER_PASSTHROUGH_EXTENSION_NAME,
+    VK_NV_GLSL_SHADER_EXTENSION_NAME,
+    VK_NV_MESH_SHADER_EXTENSION_NAME,
+    VK_NV_RAY_TRACING_EXTENSION_NAME,
+    VK_NV_REPRESENTATIVE_FRAGMENT_TEST_EXTENSION_NAME,
+    VK_NV_SAMPLE_MASK_OVERRIDE_COVERAGE_EXTENSION_NAME,
+    VK_NV_SCISSOR_EXCLUSIVE_EXTENSION_NAME,
+    VK_NV_SHADER_IMAGE_FOOTPRINT_EXTENSION_NAME,
+    VK_NV_SHADER_SM_BUILTINS_EXTENSION_NAME,
+    VK_NV_SHADER_SUBGROUP_PARTITIONED_EXTENSION_NAME,
+    VK_NV_SHADING_RATE_IMAGE_EXTENSION_NAME,
+    VK_NV_VIEWPORT_ARRAY2_EXTENSION_NAME,
+    VK_NV_VIEWPORT_SWIZZLE_EXTENSION_NAME,
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    VK_NV_WIN32_KEYED_MUTEX_EXTENSION_NAME,
+#endif
+};
+
+
+#endif // VK_EXTENSION_HELPER_H_
diff --git a/src/third_party/vulkan-validation-layers/src/layers/generated/vk_layer_dispatch_table.h b/src/third_party/vulkan-validation-layers/src/layers/generated/vk_layer_dispatch_table.h
new file mode 100644
index 0000000..ad30909
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/layers/generated/vk_layer_dispatch_table.h
@@ -0,0 +1,667 @@
+// *** THIS FILE IS GENERATED - DO NOT EDIT ***
+// See layer_dispatch_table_generator.py for modifications
+
+/*
+ * Copyright (c) 2015-2019 The Khronos Group Inc.
+ * Copyright (c) 2015-2019 Valve Corporation
+ * Copyright (c) 2015-2019 LunarG, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Mark Lobodzinski <mark@lunarg.com>
+ * Author: Mark Young <marky@lunarg.com>
+ */
+
+#pragma once
+
+typedef PFN_vkVoidFunction (VKAPI_PTR *PFN_GetPhysicalDeviceProcAddr)(VkInstance instance, const char* pName);
+
+// Instance function pointer dispatch table
+typedef struct VkLayerInstanceDispatchTable_ {
+    // Manually add in GetPhysicalDeviceProcAddr entry
+    PFN_GetPhysicalDeviceProcAddr GetPhysicalDeviceProcAddr;
+
+    // ---- Core 1_0 commands
+    PFN_vkCreateInstance CreateInstance;
+    PFN_vkDestroyInstance DestroyInstance;
+    PFN_vkEnumeratePhysicalDevices EnumeratePhysicalDevices;
+    PFN_vkGetPhysicalDeviceFeatures GetPhysicalDeviceFeatures;
+    PFN_vkGetPhysicalDeviceFormatProperties GetPhysicalDeviceFormatProperties;
+    PFN_vkGetPhysicalDeviceImageFormatProperties GetPhysicalDeviceImageFormatProperties;
+    PFN_vkGetPhysicalDeviceProperties GetPhysicalDeviceProperties;
+    PFN_vkGetPhysicalDeviceQueueFamilyProperties GetPhysicalDeviceQueueFamilyProperties;
+    PFN_vkGetPhysicalDeviceMemoryProperties GetPhysicalDeviceMemoryProperties;
+    PFN_vkGetInstanceProcAddr GetInstanceProcAddr;
+    PFN_vkCreateDevice CreateDevice;
+    PFN_vkEnumerateInstanceExtensionProperties EnumerateInstanceExtensionProperties;
+    PFN_vkEnumerateDeviceExtensionProperties EnumerateDeviceExtensionProperties;
+    PFN_vkEnumerateInstanceLayerProperties EnumerateInstanceLayerProperties;
+    PFN_vkEnumerateDeviceLayerProperties EnumerateDeviceLayerProperties;
+    PFN_vkGetPhysicalDeviceSparseImageFormatProperties GetPhysicalDeviceSparseImageFormatProperties;
+
+    // ---- Core 1_1 commands
+    PFN_vkEnumerateInstanceVersion EnumerateInstanceVersion;
+    PFN_vkEnumeratePhysicalDeviceGroups EnumeratePhysicalDeviceGroups;
+    PFN_vkGetPhysicalDeviceFeatures2 GetPhysicalDeviceFeatures2;
+    PFN_vkGetPhysicalDeviceProperties2 GetPhysicalDeviceProperties2;
+    PFN_vkGetPhysicalDeviceFormatProperties2 GetPhysicalDeviceFormatProperties2;
+    PFN_vkGetPhysicalDeviceImageFormatProperties2 GetPhysicalDeviceImageFormatProperties2;
+    PFN_vkGetPhysicalDeviceQueueFamilyProperties2 GetPhysicalDeviceQueueFamilyProperties2;
+    PFN_vkGetPhysicalDeviceMemoryProperties2 GetPhysicalDeviceMemoryProperties2;
+    PFN_vkGetPhysicalDeviceSparseImageFormatProperties2 GetPhysicalDeviceSparseImageFormatProperties2;
+    PFN_vkGetPhysicalDeviceExternalBufferProperties GetPhysicalDeviceExternalBufferProperties;
+    PFN_vkGetPhysicalDeviceExternalFenceProperties GetPhysicalDeviceExternalFenceProperties;
+    PFN_vkGetPhysicalDeviceExternalSemaphoreProperties GetPhysicalDeviceExternalSemaphoreProperties;
+
+    // ---- VK_KHR_surface extension commands
+    PFN_vkDestroySurfaceKHR DestroySurfaceKHR;
+    PFN_vkGetPhysicalDeviceSurfaceSupportKHR GetPhysicalDeviceSurfaceSupportKHR;
+    PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR GetPhysicalDeviceSurfaceCapabilitiesKHR;
+    PFN_vkGetPhysicalDeviceSurfaceFormatsKHR GetPhysicalDeviceSurfaceFormatsKHR;
+    PFN_vkGetPhysicalDeviceSurfacePresentModesKHR GetPhysicalDeviceSurfacePresentModesKHR;
+
+    // ---- VK_KHR_swapchain extension commands
+    PFN_vkGetPhysicalDevicePresentRectanglesKHR GetPhysicalDevicePresentRectanglesKHR;
+
+    // ---- VK_KHR_display extension commands
+    PFN_vkGetPhysicalDeviceDisplayPropertiesKHR GetPhysicalDeviceDisplayPropertiesKHR;
+    PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR GetPhysicalDeviceDisplayPlanePropertiesKHR;
+    PFN_vkGetDisplayPlaneSupportedDisplaysKHR GetDisplayPlaneSupportedDisplaysKHR;
+    PFN_vkGetDisplayModePropertiesKHR GetDisplayModePropertiesKHR;
+    PFN_vkCreateDisplayModeKHR CreateDisplayModeKHR;
+    PFN_vkGetDisplayPlaneCapabilitiesKHR GetDisplayPlaneCapabilitiesKHR;
+    PFN_vkCreateDisplayPlaneSurfaceKHR CreateDisplayPlaneSurfaceKHR;
+
+    // ---- VK_KHR_xlib_surface extension commands
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+    PFN_vkCreateXlibSurfaceKHR CreateXlibSurfaceKHR;
+#endif // VK_USE_PLATFORM_XLIB_KHR
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+    PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR GetPhysicalDeviceXlibPresentationSupportKHR;
+#endif // VK_USE_PLATFORM_XLIB_KHR
+
+    // ---- VK_KHR_xcb_surface extension commands
+#ifdef VK_USE_PLATFORM_XCB_KHR
+    PFN_vkCreateXcbSurfaceKHR CreateXcbSurfaceKHR;
+#endif // VK_USE_PLATFORM_XCB_KHR
+#ifdef VK_USE_PLATFORM_XCB_KHR
+    PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR GetPhysicalDeviceXcbPresentationSupportKHR;
+#endif // VK_USE_PLATFORM_XCB_KHR
+
+    // ---- VK_KHR_wayland_surface extension commands
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+    PFN_vkCreateWaylandSurfaceKHR CreateWaylandSurfaceKHR;
+#endif // VK_USE_PLATFORM_WAYLAND_KHR
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+    PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR GetPhysicalDeviceWaylandPresentationSupportKHR;
+#endif // VK_USE_PLATFORM_WAYLAND_KHR
+
+    // ---- VK_KHR_android_surface extension commands
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+    PFN_vkCreateAndroidSurfaceKHR CreateAndroidSurfaceKHR;
+#endif // VK_USE_PLATFORM_ANDROID_KHR
+
+    // ---- VK_KHR_win32_surface extension commands
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    PFN_vkCreateWin32SurfaceKHR CreateWin32SurfaceKHR;
+#endif // VK_USE_PLATFORM_WIN32_KHR
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR GetPhysicalDeviceWin32PresentationSupportKHR;
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+    // ---- VK_KHR_get_physical_device_properties2 extension commands
+    PFN_vkGetPhysicalDeviceFeatures2KHR GetPhysicalDeviceFeatures2KHR;
+    PFN_vkGetPhysicalDeviceProperties2KHR GetPhysicalDeviceProperties2KHR;
+    PFN_vkGetPhysicalDeviceFormatProperties2KHR GetPhysicalDeviceFormatProperties2KHR;
+    PFN_vkGetPhysicalDeviceImageFormatProperties2KHR GetPhysicalDeviceImageFormatProperties2KHR;
+    PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR GetPhysicalDeviceQueueFamilyProperties2KHR;
+    PFN_vkGetPhysicalDeviceMemoryProperties2KHR GetPhysicalDeviceMemoryProperties2KHR;
+    PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR GetPhysicalDeviceSparseImageFormatProperties2KHR;
+
+    // ---- VK_KHR_device_group_creation extension commands
+    PFN_vkEnumeratePhysicalDeviceGroupsKHR EnumeratePhysicalDeviceGroupsKHR;
+
+    // ---- VK_KHR_external_memory_capabilities extension commands
+    PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR GetPhysicalDeviceExternalBufferPropertiesKHR;
+
+    // ---- VK_KHR_external_semaphore_capabilities extension commands
+    PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR GetPhysicalDeviceExternalSemaphorePropertiesKHR;
+
+    // ---- VK_KHR_external_fence_capabilities extension commands
+    PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR GetPhysicalDeviceExternalFencePropertiesKHR;
+
+    // ---- VK_KHR_performance_query extension commands
+    PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR EnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR;
+    PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR GetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR;
+
+    // ---- VK_KHR_get_surface_capabilities2 extension commands
+    PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR GetPhysicalDeviceSurfaceCapabilities2KHR;
+    PFN_vkGetPhysicalDeviceSurfaceFormats2KHR GetPhysicalDeviceSurfaceFormats2KHR;
+
+    // ---- VK_KHR_get_display_properties2 extension commands
+    PFN_vkGetPhysicalDeviceDisplayProperties2KHR GetPhysicalDeviceDisplayProperties2KHR;
+    PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR GetPhysicalDeviceDisplayPlaneProperties2KHR;
+    PFN_vkGetDisplayModeProperties2KHR GetDisplayModeProperties2KHR;
+    PFN_vkGetDisplayPlaneCapabilities2KHR GetDisplayPlaneCapabilities2KHR;
+
+    // ---- VK_EXT_debug_report extension commands
+    PFN_vkCreateDebugReportCallbackEXT CreateDebugReportCallbackEXT;
+    PFN_vkDestroyDebugReportCallbackEXT DestroyDebugReportCallbackEXT;
+    PFN_vkDebugReportMessageEXT DebugReportMessageEXT;
+
+    // ---- VK_GGP_stream_descriptor_surface extension commands
+#ifdef VK_USE_PLATFORM_GGP
+    PFN_vkCreateStreamDescriptorSurfaceGGP CreateStreamDescriptorSurfaceGGP;
+#endif // VK_USE_PLATFORM_GGP
+
+    // ---- VK_NV_external_memory_capabilities extension commands
+    PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV GetPhysicalDeviceExternalImageFormatPropertiesNV;
+
+    // ---- VK_NN_vi_surface extension commands
+#ifdef VK_USE_PLATFORM_VI_NN
+    PFN_vkCreateViSurfaceNN CreateViSurfaceNN;
+#endif // VK_USE_PLATFORM_VI_NN
+
+    // ---- VK_NVX_device_generated_commands extension commands
+    PFN_vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX GetPhysicalDeviceGeneratedCommandsPropertiesNVX;
+
+    // ---- VK_EXT_direct_mode_display extension commands
+    PFN_vkReleaseDisplayEXT ReleaseDisplayEXT;
+
+    // ---- VK_EXT_acquire_xlib_display extension commands
+#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
+    PFN_vkAcquireXlibDisplayEXT AcquireXlibDisplayEXT;
+#endif // VK_USE_PLATFORM_XLIB_XRANDR_EXT
+#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
+    PFN_vkGetRandROutputDisplayEXT GetRandROutputDisplayEXT;
+#endif // VK_USE_PLATFORM_XLIB_XRANDR_EXT
+
+    // ---- VK_EXT_display_surface_counter extension commands
+    PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT GetPhysicalDeviceSurfaceCapabilities2EXT;
+
+    // ---- VK_MVK_ios_surface extension commands
+#ifdef VK_USE_PLATFORM_IOS_MVK
+    PFN_vkCreateIOSSurfaceMVK CreateIOSSurfaceMVK;
+#endif // VK_USE_PLATFORM_IOS_MVK
+
+    // ---- VK_MVK_macos_surface extension commands
+#ifdef VK_USE_PLATFORM_MACOS_MVK
+    PFN_vkCreateMacOSSurfaceMVK CreateMacOSSurfaceMVK;
+#endif // VK_USE_PLATFORM_MACOS_MVK
+
+    // ---- VK_EXT_debug_utils extension commands
+    PFN_vkCreateDebugUtilsMessengerEXT CreateDebugUtilsMessengerEXT;
+    PFN_vkDestroyDebugUtilsMessengerEXT DestroyDebugUtilsMessengerEXT;
+    PFN_vkSubmitDebugUtilsMessageEXT SubmitDebugUtilsMessageEXT;
+
+    // ---- VK_EXT_sample_locations extension commands
+    PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT GetPhysicalDeviceMultisamplePropertiesEXT;
+
+    // ---- VK_EXT_calibrated_timestamps extension commands
+    PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT GetPhysicalDeviceCalibrateableTimeDomainsEXT;
+
+    // ---- VK_FUCHSIA_imagepipe_surface extension commands
+#ifdef VK_USE_PLATFORM_FUCHSIA
+    PFN_vkCreateImagePipeSurfaceFUCHSIA CreateImagePipeSurfaceFUCHSIA;
+#endif // VK_USE_PLATFORM_FUCHSIA
+
+    // ---- VK_EXT_metal_surface extension commands
+#ifdef VK_USE_PLATFORM_METAL_EXT
+    PFN_vkCreateMetalSurfaceEXT CreateMetalSurfaceEXT;
+#endif // VK_USE_PLATFORM_METAL_EXT
+
+    // ---- VK_EXT_tooling_info extension commands
+    PFN_vkGetPhysicalDeviceToolPropertiesEXT GetPhysicalDeviceToolPropertiesEXT;
+
+    // ---- VK_NV_cooperative_matrix extension commands
+    PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV GetPhysicalDeviceCooperativeMatrixPropertiesNV;
+
+    // ---- VK_NV_coverage_reduction_mode extension commands
+    PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV GetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV;
+
+    // ---- VK_EXT_full_screen_exclusive extension commands
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    PFN_vkGetPhysicalDeviceSurfacePresentModes2EXT GetPhysicalDeviceSurfacePresentModes2EXT;
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+    // ---- VK_EXT_headless_surface extension commands
+    PFN_vkCreateHeadlessSurfaceEXT CreateHeadlessSurfaceEXT;
+} VkLayerInstanceDispatchTable;
+
+// Device function pointer dispatch table
+typedef struct VkLayerDispatchTable_ {
+
+    // ---- Core 1_0 commands
+    PFN_vkGetDeviceProcAddr GetDeviceProcAddr;
+    PFN_vkDestroyDevice DestroyDevice;
+    PFN_vkGetDeviceQueue GetDeviceQueue;
+    PFN_vkQueueSubmit QueueSubmit;
+    PFN_vkQueueWaitIdle QueueWaitIdle;
+    PFN_vkDeviceWaitIdle DeviceWaitIdle;
+    PFN_vkAllocateMemory AllocateMemory;
+    PFN_vkFreeMemory FreeMemory;
+    PFN_vkMapMemory MapMemory;
+    PFN_vkUnmapMemory UnmapMemory;
+    PFN_vkFlushMappedMemoryRanges FlushMappedMemoryRanges;
+    PFN_vkInvalidateMappedMemoryRanges InvalidateMappedMemoryRanges;
+    PFN_vkGetDeviceMemoryCommitment GetDeviceMemoryCommitment;
+    PFN_vkBindBufferMemory BindBufferMemory;
+    PFN_vkBindImageMemory BindImageMemory;
+    PFN_vkGetBufferMemoryRequirements GetBufferMemoryRequirements;
+    PFN_vkGetImageMemoryRequirements GetImageMemoryRequirements;
+    PFN_vkGetImageSparseMemoryRequirements GetImageSparseMemoryRequirements;
+    PFN_vkQueueBindSparse QueueBindSparse;
+    PFN_vkCreateFence CreateFence;
+    PFN_vkDestroyFence DestroyFence;
+    PFN_vkResetFences ResetFences;
+    PFN_vkGetFenceStatus GetFenceStatus;
+    PFN_vkWaitForFences WaitForFences;
+    PFN_vkCreateSemaphore CreateSemaphore;
+    PFN_vkDestroySemaphore DestroySemaphore;
+    PFN_vkCreateEvent CreateEvent;
+    PFN_vkDestroyEvent DestroyEvent;
+    PFN_vkGetEventStatus GetEventStatus;
+    PFN_vkSetEvent SetEvent;
+    PFN_vkResetEvent ResetEvent;
+    PFN_vkCreateQueryPool CreateQueryPool;
+    PFN_vkDestroyQueryPool DestroyQueryPool;
+    PFN_vkGetQueryPoolResults GetQueryPoolResults;
+    PFN_vkCreateBuffer CreateBuffer;
+    PFN_vkDestroyBuffer DestroyBuffer;
+    PFN_vkCreateBufferView CreateBufferView;
+    PFN_vkDestroyBufferView DestroyBufferView;
+    PFN_vkCreateImage CreateImage;
+    PFN_vkDestroyImage DestroyImage;
+    PFN_vkGetImageSubresourceLayout GetImageSubresourceLayout;
+    PFN_vkCreateImageView CreateImageView;
+    PFN_vkDestroyImageView DestroyImageView;
+    PFN_vkCreateShaderModule CreateShaderModule;
+    PFN_vkDestroyShaderModule DestroyShaderModule;
+    PFN_vkCreatePipelineCache CreatePipelineCache;
+    PFN_vkDestroyPipelineCache DestroyPipelineCache;
+    PFN_vkGetPipelineCacheData GetPipelineCacheData;
+    PFN_vkMergePipelineCaches MergePipelineCaches;
+    PFN_vkCreateGraphicsPipelines CreateGraphicsPipelines;
+    PFN_vkCreateComputePipelines CreateComputePipelines;
+    PFN_vkDestroyPipeline DestroyPipeline;
+    PFN_vkCreatePipelineLayout CreatePipelineLayout;
+    PFN_vkDestroyPipelineLayout DestroyPipelineLayout;
+    PFN_vkCreateSampler CreateSampler;
+    PFN_vkDestroySampler DestroySampler;
+    PFN_vkCreateDescriptorSetLayout CreateDescriptorSetLayout;
+    PFN_vkDestroyDescriptorSetLayout DestroyDescriptorSetLayout;
+    PFN_vkCreateDescriptorPool CreateDescriptorPool;
+    PFN_vkDestroyDescriptorPool DestroyDescriptorPool;
+    PFN_vkResetDescriptorPool ResetDescriptorPool;
+    PFN_vkAllocateDescriptorSets AllocateDescriptorSets;
+    PFN_vkFreeDescriptorSets FreeDescriptorSets;
+    PFN_vkUpdateDescriptorSets UpdateDescriptorSets;
+    PFN_vkCreateFramebuffer CreateFramebuffer;
+    PFN_vkDestroyFramebuffer DestroyFramebuffer;
+    PFN_vkCreateRenderPass CreateRenderPass;
+    PFN_vkDestroyRenderPass DestroyRenderPass;
+    PFN_vkGetRenderAreaGranularity GetRenderAreaGranularity;
+    PFN_vkCreateCommandPool CreateCommandPool;
+    PFN_vkDestroyCommandPool DestroyCommandPool;
+    PFN_vkResetCommandPool ResetCommandPool;
+    PFN_vkAllocateCommandBuffers AllocateCommandBuffers;
+    PFN_vkFreeCommandBuffers FreeCommandBuffers;
+    PFN_vkBeginCommandBuffer BeginCommandBuffer;
+    PFN_vkEndCommandBuffer EndCommandBuffer;
+    PFN_vkResetCommandBuffer ResetCommandBuffer;
+    PFN_vkCmdBindPipeline CmdBindPipeline;
+    PFN_vkCmdSetViewport CmdSetViewport;
+    PFN_vkCmdSetScissor CmdSetScissor;
+    PFN_vkCmdSetLineWidth CmdSetLineWidth;
+    PFN_vkCmdSetDepthBias CmdSetDepthBias;
+    PFN_vkCmdSetBlendConstants CmdSetBlendConstants;
+    PFN_vkCmdSetDepthBounds CmdSetDepthBounds;
+    PFN_vkCmdSetStencilCompareMask CmdSetStencilCompareMask;
+    PFN_vkCmdSetStencilWriteMask CmdSetStencilWriteMask;
+    PFN_vkCmdSetStencilReference CmdSetStencilReference;
+    PFN_vkCmdBindDescriptorSets CmdBindDescriptorSets;
+    PFN_vkCmdBindIndexBuffer CmdBindIndexBuffer;
+    PFN_vkCmdBindVertexBuffers CmdBindVertexBuffers;
+    PFN_vkCmdDraw CmdDraw;
+    PFN_vkCmdDrawIndexed CmdDrawIndexed;
+    PFN_vkCmdDrawIndirect CmdDrawIndirect;
+    PFN_vkCmdDrawIndexedIndirect CmdDrawIndexedIndirect;
+    PFN_vkCmdDispatch CmdDispatch;
+    PFN_vkCmdDispatchIndirect CmdDispatchIndirect;
+    PFN_vkCmdCopyBuffer CmdCopyBuffer;
+    PFN_vkCmdCopyImage CmdCopyImage;
+    PFN_vkCmdBlitImage CmdBlitImage;
+    PFN_vkCmdCopyBufferToImage CmdCopyBufferToImage;
+    PFN_vkCmdCopyImageToBuffer CmdCopyImageToBuffer;
+    PFN_vkCmdUpdateBuffer CmdUpdateBuffer;
+    PFN_vkCmdFillBuffer CmdFillBuffer;
+    PFN_vkCmdClearColorImage CmdClearColorImage;
+    PFN_vkCmdClearDepthStencilImage CmdClearDepthStencilImage;
+    PFN_vkCmdClearAttachments CmdClearAttachments;
+    PFN_vkCmdResolveImage CmdResolveImage;
+    PFN_vkCmdSetEvent CmdSetEvent;
+    PFN_vkCmdResetEvent CmdResetEvent;
+    PFN_vkCmdWaitEvents CmdWaitEvents;
+    PFN_vkCmdPipelineBarrier CmdPipelineBarrier;
+    PFN_vkCmdBeginQuery CmdBeginQuery;
+    PFN_vkCmdEndQuery CmdEndQuery;
+    PFN_vkCmdResetQueryPool CmdResetQueryPool;
+    PFN_vkCmdWriteTimestamp CmdWriteTimestamp;
+    PFN_vkCmdCopyQueryPoolResults CmdCopyQueryPoolResults;
+    PFN_vkCmdPushConstants CmdPushConstants;
+    PFN_vkCmdBeginRenderPass CmdBeginRenderPass;
+    PFN_vkCmdNextSubpass CmdNextSubpass;
+    PFN_vkCmdEndRenderPass CmdEndRenderPass;
+    PFN_vkCmdExecuteCommands CmdExecuteCommands;
+
+    // ---- Core 1_1 commands
+    PFN_vkBindBufferMemory2 BindBufferMemory2;
+    PFN_vkBindImageMemory2 BindImageMemory2;
+    PFN_vkGetDeviceGroupPeerMemoryFeatures GetDeviceGroupPeerMemoryFeatures;
+    PFN_vkCmdSetDeviceMask CmdSetDeviceMask;
+    PFN_vkCmdDispatchBase CmdDispatchBase;
+    PFN_vkGetImageMemoryRequirements2 GetImageMemoryRequirements2;
+    PFN_vkGetBufferMemoryRequirements2 GetBufferMemoryRequirements2;
+    PFN_vkGetImageSparseMemoryRequirements2 GetImageSparseMemoryRequirements2;
+    PFN_vkTrimCommandPool TrimCommandPool;
+    PFN_vkGetDeviceQueue2 GetDeviceQueue2;
+    PFN_vkCreateSamplerYcbcrConversion CreateSamplerYcbcrConversion;
+    PFN_vkDestroySamplerYcbcrConversion DestroySamplerYcbcrConversion;
+    PFN_vkCreateDescriptorUpdateTemplate CreateDescriptorUpdateTemplate;
+    PFN_vkDestroyDescriptorUpdateTemplate DestroyDescriptorUpdateTemplate;
+    PFN_vkUpdateDescriptorSetWithTemplate UpdateDescriptorSetWithTemplate;
+    PFN_vkGetDescriptorSetLayoutSupport GetDescriptorSetLayoutSupport;
+
+    // ---- VK_KHR_swapchain extension commands
+    PFN_vkCreateSwapchainKHR CreateSwapchainKHR;
+    PFN_vkDestroySwapchainKHR DestroySwapchainKHR;
+    PFN_vkGetSwapchainImagesKHR GetSwapchainImagesKHR;
+    PFN_vkAcquireNextImageKHR AcquireNextImageKHR;
+    PFN_vkQueuePresentKHR QueuePresentKHR;
+    PFN_vkGetDeviceGroupPresentCapabilitiesKHR GetDeviceGroupPresentCapabilitiesKHR;
+    PFN_vkGetDeviceGroupSurfacePresentModesKHR GetDeviceGroupSurfacePresentModesKHR;
+    PFN_vkAcquireNextImage2KHR AcquireNextImage2KHR;
+
+    // ---- VK_KHR_display_swapchain extension commands
+    PFN_vkCreateSharedSwapchainsKHR CreateSharedSwapchainsKHR;
+
+    // ---- VK_KHR_device_group extension commands
+    PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR GetDeviceGroupPeerMemoryFeaturesKHR;
+    PFN_vkCmdSetDeviceMaskKHR CmdSetDeviceMaskKHR;
+    PFN_vkCmdDispatchBaseKHR CmdDispatchBaseKHR;
+
+    // ---- VK_KHR_maintenance1 extension commands
+    PFN_vkTrimCommandPoolKHR TrimCommandPoolKHR;
+
+    // ---- VK_KHR_external_memory_win32 extension commands
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    PFN_vkGetMemoryWin32HandleKHR GetMemoryWin32HandleKHR;
+#endif // VK_USE_PLATFORM_WIN32_KHR
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    PFN_vkGetMemoryWin32HandlePropertiesKHR GetMemoryWin32HandlePropertiesKHR;
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+    // ---- VK_KHR_external_memory_fd extension commands
+    PFN_vkGetMemoryFdKHR GetMemoryFdKHR;
+    PFN_vkGetMemoryFdPropertiesKHR GetMemoryFdPropertiesKHR;
+
+    // ---- VK_KHR_external_semaphore_win32 extension commands
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    PFN_vkImportSemaphoreWin32HandleKHR ImportSemaphoreWin32HandleKHR;
+#endif // VK_USE_PLATFORM_WIN32_KHR
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    PFN_vkGetSemaphoreWin32HandleKHR GetSemaphoreWin32HandleKHR;
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+    // ---- VK_KHR_external_semaphore_fd extension commands
+    PFN_vkImportSemaphoreFdKHR ImportSemaphoreFdKHR;
+    PFN_vkGetSemaphoreFdKHR GetSemaphoreFdKHR;
+
+    // ---- VK_KHR_push_descriptor extension commands
+    PFN_vkCmdPushDescriptorSetKHR CmdPushDescriptorSetKHR;
+    PFN_vkCmdPushDescriptorSetWithTemplateKHR CmdPushDescriptorSetWithTemplateKHR;
+
+    // ---- VK_KHR_descriptor_update_template extension commands
+    PFN_vkCreateDescriptorUpdateTemplateKHR CreateDescriptorUpdateTemplateKHR;
+    PFN_vkDestroyDescriptorUpdateTemplateKHR DestroyDescriptorUpdateTemplateKHR;
+    PFN_vkUpdateDescriptorSetWithTemplateKHR UpdateDescriptorSetWithTemplateKHR;
+
+    // ---- VK_KHR_create_renderpass2 extension commands
+    PFN_vkCreateRenderPass2KHR CreateRenderPass2KHR;
+    PFN_vkCmdBeginRenderPass2KHR CmdBeginRenderPass2KHR;
+    PFN_vkCmdNextSubpass2KHR CmdNextSubpass2KHR;
+    PFN_vkCmdEndRenderPass2KHR CmdEndRenderPass2KHR;
+
+    // ---- VK_KHR_shared_presentable_image extension commands
+    PFN_vkGetSwapchainStatusKHR GetSwapchainStatusKHR;
+
+    // ---- VK_KHR_external_fence_win32 extension commands
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    PFN_vkImportFenceWin32HandleKHR ImportFenceWin32HandleKHR;
+#endif // VK_USE_PLATFORM_WIN32_KHR
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    PFN_vkGetFenceWin32HandleKHR GetFenceWin32HandleKHR;
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+    // ---- VK_KHR_external_fence_fd extension commands
+    PFN_vkImportFenceFdKHR ImportFenceFdKHR;
+    PFN_vkGetFenceFdKHR GetFenceFdKHR;
+
+    // ---- VK_KHR_performance_query extension commands
+    PFN_vkAcquireProfilingLockKHR AcquireProfilingLockKHR;
+    PFN_vkReleaseProfilingLockKHR ReleaseProfilingLockKHR;
+
+    // ---- VK_KHR_get_memory_requirements2 extension commands
+    PFN_vkGetImageMemoryRequirements2KHR GetImageMemoryRequirements2KHR;
+    PFN_vkGetBufferMemoryRequirements2KHR GetBufferMemoryRequirements2KHR;
+    PFN_vkGetImageSparseMemoryRequirements2KHR GetImageSparseMemoryRequirements2KHR;
+
+    // ---- VK_KHR_sampler_ycbcr_conversion extension commands
+    PFN_vkCreateSamplerYcbcrConversionKHR CreateSamplerYcbcrConversionKHR;
+    PFN_vkDestroySamplerYcbcrConversionKHR DestroySamplerYcbcrConversionKHR;
+
+    // ---- VK_KHR_bind_memory2 extension commands
+    PFN_vkBindBufferMemory2KHR BindBufferMemory2KHR;
+    PFN_vkBindImageMemory2KHR BindImageMemory2KHR;
+
+    // ---- VK_KHR_maintenance3 extension commands
+    PFN_vkGetDescriptorSetLayoutSupportKHR GetDescriptorSetLayoutSupportKHR;
+
+    // ---- VK_KHR_draw_indirect_count extension commands
+    PFN_vkCmdDrawIndirectCountKHR CmdDrawIndirectCountKHR;
+    PFN_vkCmdDrawIndexedIndirectCountKHR CmdDrawIndexedIndirectCountKHR;
+
+    // ---- VK_KHR_timeline_semaphore extension commands
+    PFN_vkGetSemaphoreCounterValueKHR GetSemaphoreCounterValueKHR;
+    PFN_vkWaitSemaphoresKHR WaitSemaphoresKHR;
+    PFN_vkSignalSemaphoreKHR SignalSemaphoreKHR;
+
+    // ---- VK_KHR_buffer_device_address extension commands
+    PFN_vkGetBufferDeviceAddressKHR GetBufferDeviceAddressKHR;
+    PFN_vkGetBufferOpaqueCaptureAddressKHR GetBufferOpaqueCaptureAddressKHR;
+    PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR GetDeviceMemoryOpaqueCaptureAddressKHR;
+
+    // ---- VK_KHR_pipeline_executable_properties extension commands
+    PFN_vkGetPipelineExecutablePropertiesKHR GetPipelineExecutablePropertiesKHR;
+    PFN_vkGetPipelineExecutableStatisticsKHR GetPipelineExecutableStatisticsKHR;
+    PFN_vkGetPipelineExecutableInternalRepresentationsKHR GetPipelineExecutableInternalRepresentationsKHR;
+
+    // ---- VK_EXT_debug_marker extension commands
+    PFN_vkDebugMarkerSetObjectTagEXT DebugMarkerSetObjectTagEXT;
+    PFN_vkDebugMarkerSetObjectNameEXT DebugMarkerSetObjectNameEXT;
+    PFN_vkCmdDebugMarkerBeginEXT CmdDebugMarkerBeginEXT;
+    PFN_vkCmdDebugMarkerEndEXT CmdDebugMarkerEndEXT;
+    PFN_vkCmdDebugMarkerInsertEXT CmdDebugMarkerInsertEXT;
+
+    // ---- VK_EXT_transform_feedback extension commands
+    PFN_vkCmdBindTransformFeedbackBuffersEXT CmdBindTransformFeedbackBuffersEXT;
+    PFN_vkCmdBeginTransformFeedbackEXT CmdBeginTransformFeedbackEXT;
+    PFN_vkCmdEndTransformFeedbackEXT CmdEndTransformFeedbackEXT;
+    PFN_vkCmdBeginQueryIndexedEXT CmdBeginQueryIndexedEXT;
+    PFN_vkCmdEndQueryIndexedEXT CmdEndQueryIndexedEXT;
+    PFN_vkCmdDrawIndirectByteCountEXT CmdDrawIndirectByteCountEXT;
+
+    // ---- VK_NVX_image_view_handle extension commands
+    PFN_vkGetImageViewHandleNVX GetImageViewHandleNVX;
+
+    // ---- VK_AMD_draw_indirect_count extension commands
+    PFN_vkCmdDrawIndirectCountAMD CmdDrawIndirectCountAMD;
+    PFN_vkCmdDrawIndexedIndirectCountAMD CmdDrawIndexedIndirectCountAMD;
+
+    // ---- VK_AMD_shader_info extension commands
+    PFN_vkGetShaderInfoAMD GetShaderInfoAMD;
+
+    // ---- VK_NV_external_memory_win32 extension commands
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    PFN_vkGetMemoryWin32HandleNV GetMemoryWin32HandleNV;
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+    // ---- VK_EXT_conditional_rendering extension commands
+    PFN_vkCmdBeginConditionalRenderingEXT CmdBeginConditionalRenderingEXT;
+    PFN_vkCmdEndConditionalRenderingEXT CmdEndConditionalRenderingEXT;
+
+    // ---- VK_NVX_device_generated_commands extension commands
+    PFN_vkCmdProcessCommandsNVX CmdProcessCommandsNVX;
+    PFN_vkCmdReserveSpaceForCommandsNVX CmdReserveSpaceForCommandsNVX;
+    PFN_vkCreateIndirectCommandsLayoutNVX CreateIndirectCommandsLayoutNVX;
+    PFN_vkDestroyIndirectCommandsLayoutNVX DestroyIndirectCommandsLayoutNVX;
+    PFN_vkCreateObjectTableNVX CreateObjectTableNVX;
+    PFN_vkDestroyObjectTableNVX DestroyObjectTableNVX;
+    PFN_vkRegisterObjectsNVX RegisterObjectsNVX;
+    PFN_vkUnregisterObjectsNVX UnregisterObjectsNVX;
+
+    // ---- VK_NV_clip_space_w_scaling extension commands
+    PFN_vkCmdSetViewportWScalingNV CmdSetViewportWScalingNV;
+
+    // ---- VK_EXT_display_control extension commands
+    PFN_vkDisplayPowerControlEXT DisplayPowerControlEXT;
+    PFN_vkRegisterDeviceEventEXT RegisterDeviceEventEXT;
+    PFN_vkRegisterDisplayEventEXT RegisterDisplayEventEXT;
+    PFN_vkGetSwapchainCounterEXT GetSwapchainCounterEXT;
+
+    // ---- VK_GOOGLE_display_timing extension commands
+    PFN_vkGetRefreshCycleDurationGOOGLE GetRefreshCycleDurationGOOGLE;
+    PFN_vkGetPastPresentationTimingGOOGLE GetPastPresentationTimingGOOGLE;
+
+    // ---- VK_EXT_discard_rectangles extension commands
+    PFN_vkCmdSetDiscardRectangleEXT CmdSetDiscardRectangleEXT;
+
+    // ---- VK_EXT_hdr_metadata extension commands
+    PFN_vkSetHdrMetadataEXT SetHdrMetadataEXT;
+
+    // ---- VK_EXT_debug_utils extension commands
+    PFN_vkSetDebugUtilsObjectNameEXT SetDebugUtilsObjectNameEXT;
+    PFN_vkSetDebugUtilsObjectTagEXT SetDebugUtilsObjectTagEXT;
+    PFN_vkQueueBeginDebugUtilsLabelEXT QueueBeginDebugUtilsLabelEXT;
+    PFN_vkQueueEndDebugUtilsLabelEXT QueueEndDebugUtilsLabelEXT;
+    PFN_vkQueueInsertDebugUtilsLabelEXT QueueInsertDebugUtilsLabelEXT;
+    PFN_vkCmdBeginDebugUtilsLabelEXT CmdBeginDebugUtilsLabelEXT;
+    PFN_vkCmdEndDebugUtilsLabelEXT CmdEndDebugUtilsLabelEXT;
+    PFN_vkCmdInsertDebugUtilsLabelEXT CmdInsertDebugUtilsLabelEXT;
+
+    // ---- VK_ANDROID_external_memory_android_hardware_buffer extension commands
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+    PFN_vkGetAndroidHardwareBufferPropertiesANDROID GetAndroidHardwareBufferPropertiesANDROID;
+#endif // VK_USE_PLATFORM_ANDROID_KHR
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+    PFN_vkGetMemoryAndroidHardwareBufferANDROID GetMemoryAndroidHardwareBufferANDROID;
+#endif // VK_USE_PLATFORM_ANDROID_KHR
+
+    // ---- VK_EXT_sample_locations extension commands
+    PFN_vkCmdSetSampleLocationsEXT CmdSetSampleLocationsEXT;
+
+    // ---- VK_EXT_image_drm_format_modifier extension commands
+    PFN_vkGetImageDrmFormatModifierPropertiesEXT GetImageDrmFormatModifierPropertiesEXT;
+
+    // ---- VK_EXT_validation_cache extension commands
+    PFN_vkCreateValidationCacheEXT CreateValidationCacheEXT;
+    PFN_vkDestroyValidationCacheEXT DestroyValidationCacheEXT;
+    PFN_vkMergeValidationCachesEXT MergeValidationCachesEXT;
+    PFN_vkGetValidationCacheDataEXT GetValidationCacheDataEXT;
+
+    // ---- VK_NV_shading_rate_image extension commands
+    PFN_vkCmdBindShadingRateImageNV CmdBindShadingRateImageNV;
+    PFN_vkCmdSetViewportShadingRatePaletteNV CmdSetViewportShadingRatePaletteNV;
+    PFN_vkCmdSetCoarseSampleOrderNV CmdSetCoarseSampleOrderNV;
+
+    // ---- VK_NV_ray_tracing extension commands
+    PFN_vkCreateAccelerationStructureNV CreateAccelerationStructureNV;
+    PFN_vkDestroyAccelerationStructureNV DestroyAccelerationStructureNV;
+    PFN_vkGetAccelerationStructureMemoryRequirementsNV GetAccelerationStructureMemoryRequirementsNV;
+    PFN_vkBindAccelerationStructureMemoryNV BindAccelerationStructureMemoryNV;
+    PFN_vkCmdBuildAccelerationStructureNV CmdBuildAccelerationStructureNV;
+    PFN_vkCmdCopyAccelerationStructureNV CmdCopyAccelerationStructureNV;
+    PFN_vkCmdTraceRaysNV CmdTraceRaysNV;
+    PFN_vkCreateRayTracingPipelinesNV CreateRayTracingPipelinesNV;
+    PFN_vkGetRayTracingShaderGroupHandlesNV GetRayTracingShaderGroupHandlesNV;
+    PFN_vkGetAccelerationStructureHandleNV GetAccelerationStructureHandleNV;
+    PFN_vkCmdWriteAccelerationStructuresPropertiesNV CmdWriteAccelerationStructuresPropertiesNV;
+    PFN_vkCompileDeferredNV CompileDeferredNV;
+
+    // ---- VK_EXT_external_memory_host extension commands
+    PFN_vkGetMemoryHostPointerPropertiesEXT GetMemoryHostPointerPropertiesEXT;
+
+    // ---- VK_AMD_buffer_marker extension commands
+    PFN_vkCmdWriteBufferMarkerAMD CmdWriteBufferMarkerAMD;
+
+    // ---- VK_EXT_calibrated_timestamps extension commands
+    PFN_vkGetCalibratedTimestampsEXT GetCalibratedTimestampsEXT;
+
+    // ---- VK_NV_mesh_shader extension commands
+    PFN_vkCmdDrawMeshTasksNV CmdDrawMeshTasksNV;
+    PFN_vkCmdDrawMeshTasksIndirectNV CmdDrawMeshTasksIndirectNV;
+    PFN_vkCmdDrawMeshTasksIndirectCountNV CmdDrawMeshTasksIndirectCountNV;
+
+    // ---- VK_NV_scissor_exclusive extension commands
+    PFN_vkCmdSetExclusiveScissorNV CmdSetExclusiveScissorNV;
+
+    // ---- VK_NV_device_diagnostic_checkpoints extension commands
+    PFN_vkCmdSetCheckpointNV CmdSetCheckpointNV;
+    PFN_vkGetQueueCheckpointDataNV GetQueueCheckpointDataNV;
+
+    // ---- VK_INTEL_performance_query extension commands
+    PFN_vkInitializePerformanceApiINTEL InitializePerformanceApiINTEL;
+    PFN_vkUninitializePerformanceApiINTEL UninitializePerformanceApiINTEL;
+    PFN_vkCmdSetPerformanceMarkerINTEL CmdSetPerformanceMarkerINTEL;
+    PFN_vkCmdSetPerformanceStreamMarkerINTEL CmdSetPerformanceStreamMarkerINTEL;
+    PFN_vkCmdSetPerformanceOverrideINTEL CmdSetPerformanceOverrideINTEL;
+    PFN_vkAcquirePerformanceConfigurationINTEL AcquirePerformanceConfigurationINTEL;
+    PFN_vkReleasePerformanceConfigurationINTEL ReleasePerformanceConfigurationINTEL;
+    PFN_vkQueueSetPerformanceConfigurationINTEL QueueSetPerformanceConfigurationINTEL;
+    PFN_vkGetPerformanceParameterINTEL GetPerformanceParameterINTEL;
+
+    // ---- VK_AMD_display_native_hdr extension commands
+    PFN_vkSetLocalDimmingAMD SetLocalDimmingAMD;
+
+    // ---- VK_EXT_buffer_device_address extension commands
+    PFN_vkGetBufferDeviceAddressEXT GetBufferDeviceAddressEXT;
+
+    // ---- VK_EXT_full_screen_exclusive extension commands
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    PFN_vkAcquireFullScreenExclusiveModeEXT AcquireFullScreenExclusiveModeEXT;
+#endif // VK_USE_PLATFORM_WIN32_KHR
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    PFN_vkReleaseFullScreenExclusiveModeEXT ReleaseFullScreenExclusiveModeEXT;
+#endif // VK_USE_PLATFORM_WIN32_KHR
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    PFN_vkGetDeviceGroupSurfacePresentModes2EXT GetDeviceGroupSurfacePresentModes2EXT;
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+    // ---- VK_EXT_line_rasterization extension commands
+    PFN_vkCmdSetLineStippleEXT CmdSetLineStippleEXT;
+
+    // ---- VK_EXT_host_query_reset extension commands
+    PFN_vkResetQueryPoolEXT ResetQueryPoolEXT;
+} VkLayerDispatchTable;
+
+
diff --git a/src/third_party/vulkan-validation-layers/src/layers/generated/vk_object_types.h b/src/third_party/vulkan-validation-layers/src/layers/generated/vk_object_types.h
new file mode 100644
index 0000000..bbb6aac
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/layers/generated/vk_object_types.h
@@ -0,0 +1,816 @@
+// *** THIS FILE IS GENERATED - DO NOT EDIT ***
+// See helper_file_generator.py for modifications
+
+
+/***************************************************************************
+ *
+ * Copyright (c) 2015-2019 The Khronos Group Inc.
+ * Copyright (c) 2015-2019 Valve Corporation
+ * Copyright (c) 2015-2019 LunarG, Inc.
+ * Copyright (c) 2015-2019 Google Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Mark Lobodzinski <mark@lunarg.com>
+ * Author: Courtney Goeltzenleuchter <courtneygo@google.com>
+ * Author: Tobin Ehlis <tobine@google.com>
+ * Author: Chris Forbes <chrisforbes@google.com>
+ * Author: John Zulauf<jzulauf@lunarg.com>
+ *
+ ****************************************************************************/
+
+
+#pragma once
+
+#include "cast_utils.h"
+
+// Object Type enum for validation layer internal object handling
+typedef enum VulkanObjectType {
+    kVulkanObjectTypeUnknown = 0,
+    kVulkanObjectTypeInstance = 1,
+    kVulkanObjectTypePhysicalDevice = 2,
+    kVulkanObjectTypeDevice = 3,
+    kVulkanObjectTypeQueue = 4,
+    kVulkanObjectTypeSemaphore = 5,
+    kVulkanObjectTypeCommandBuffer = 6,
+    kVulkanObjectTypeFence = 7,
+    kVulkanObjectTypeDeviceMemory = 8,
+    kVulkanObjectTypeBuffer = 9,
+    kVulkanObjectTypeImage = 10,
+    kVulkanObjectTypeEvent = 11,
+    kVulkanObjectTypeQueryPool = 12,
+    kVulkanObjectTypeBufferView = 13,
+    kVulkanObjectTypeImageView = 14,
+    kVulkanObjectTypeShaderModule = 15,
+    kVulkanObjectTypePipelineCache = 16,
+    kVulkanObjectTypePipelineLayout = 17,
+    kVulkanObjectTypeRenderPass = 18,
+    kVulkanObjectTypePipeline = 19,
+    kVulkanObjectTypeDescriptorSetLayout = 20,
+    kVulkanObjectTypeSampler = 21,
+    kVulkanObjectTypeDescriptorPool = 22,
+    kVulkanObjectTypeDescriptorSet = 23,
+    kVulkanObjectTypeFramebuffer = 24,
+    kVulkanObjectTypeCommandPool = 25,
+    kVulkanObjectTypeSamplerYcbcrConversion = 26,
+    kVulkanObjectTypeDescriptorUpdateTemplate = 27,
+    kVulkanObjectTypeSurfaceKHR = 28,
+    kVulkanObjectTypeSwapchainKHR = 29,
+    kVulkanObjectTypeDisplayKHR = 30,
+    kVulkanObjectTypeDisplayModeKHR = 31,
+    kVulkanObjectTypeDebugReportCallbackEXT = 32,
+    kVulkanObjectTypeObjectTableNVX = 33,
+    kVulkanObjectTypeIndirectCommandsLayoutNVX = 34,
+    kVulkanObjectTypeDebugUtilsMessengerEXT = 35,
+    kVulkanObjectTypeValidationCacheEXT = 36,
+    kVulkanObjectTypeAccelerationStructureNV = 37,
+    kVulkanObjectTypePerformanceConfigurationINTEL = 38,
+    kVulkanObjectTypeMax = 39,
+    // Aliases for backwards compatibilty of "promoted" types
+    kVulkanObjectTypeDescriptorUpdateTemplateKHR = kVulkanObjectTypeDescriptorUpdateTemplate,
+    kVulkanObjectTypeSamplerYcbcrConversionKHR = kVulkanObjectTypeSamplerYcbcrConversion,
+} VulkanObjectType;
+
+// Array of object name strings for OBJECT_TYPE enum conversion
+static const char * const object_string[kVulkanObjectTypeMax] = {
+    "VkNonDispatchableHandle",
+    "VkInstance",
+    "VkPhysicalDevice",
+    "VkDevice",
+    "VkQueue",
+    "VkSemaphore",
+    "VkCommandBuffer",
+    "VkFence",
+    "VkDeviceMemory",
+    "VkBuffer",
+    "VkImage",
+    "VkEvent",
+    "VkQueryPool",
+    "VkBufferView",
+    "VkImageView",
+    "VkShaderModule",
+    "VkPipelineCache",
+    "VkPipelineLayout",
+    "VkRenderPass",
+    "VkPipeline",
+    "VkDescriptorSetLayout",
+    "VkSampler",
+    "VkDescriptorPool",
+    "VkDescriptorSet",
+    "VkFramebuffer",
+    "VkCommandPool",
+    "VkSamplerYcbcrConversion",
+    "VkDescriptorUpdateTemplate",
+    "VkSurfaceKHR",
+    "VkSwapchainKHR",
+    "VkDisplayKHR",
+    "VkDisplayModeKHR",
+    "VkDebugReportCallbackEXT",
+    "VkObjectTableNVX",
+    "VkIndirectCommandsLayoutNVX",
+    "VkDebugUtilsMessengerEXT",
+    "VkValidationCacheEXT",
+    "VkAccelerationStructureNV",
+    "VkPerformanceConfigurationINTEL",
+};
+
+// Helper array to get Vulkan VK_EXT_debug_report object type enum from the internal layers version
+const VkDebugReportObjectTypeEXT get_debug_report_enum[] = {
+    VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, // kVulkanObjectTypeUnknown
+    VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT,   // kVulkanObjectTypeInstance
+    VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT,   // kVulkanObjectTypePhysicalDevice
+    VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,   // kVulkanObjectTypeDevice
+    VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT,   // kVulkanObjectTypeQueue
+    VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT,   // kVulkanObjectTypeSemaphore
+    VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,   // kVulkanObjectTypeCommandBuffer
+    VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT,   // kVulkanObjectTypeFence
+    VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,   // kVulkanObjectTypeDeviceMemory
+    VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT,   // kVulkanObjectTypeBuffer
+    VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,   // kVulkanObjectTypeImage
+    VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT,   // kVulkanObjectTypeEvent
+    VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT,   // kVulkanObjectTypeQueryPool
+    VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT,   // kVulkanObjectTypeBufferView
+    VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT,   // kVulkanObjectTypeImageView
+    VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT,   // kVulkanObjectTypeShaderModule
+    VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT,   // kVulkanObjectTypePipelineCache
+    VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT,   // kVulkanObjectTypePipelineLayout
+    VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,   // kVulkanObjectTypeRenderPass
+    VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,   // kVulkanObjectTypePipeline
+    VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT,   // kVulkanObjectTypeDescriptorSetLayout
+    VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT,   // kVulkanObjectTypeSampler
+    VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT,   // kVulkanObjectTypeDescriptorPool
+    VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT,   // kVulkanObjectTypeDescriptorSet
+    VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT,   // kVulkanObjectTypeFramebuffer
+    VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT,   // kVulkanObjectTypeCommandPool
+    VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT,   // kVulkanObjectTypeSamplerYcbcrConversion
+    VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_EXT,   // kVulkanObjectTypeDescriptorUpdateTemplate
+    VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT,   // kVulkanObjectTypeSurfaceKHR
+    VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT,   // kVulkanObjectTypeSwapchainKHR
+    VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_KHR_EXT,   // kVulkanObjectTypeDisplayKHR
+    VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_MODE_KHR_EXT,   // kVulkanObjectTypeDisplayModeKHR
+    VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT_EXT,   // kVulkanObjectTypeDebugReportCallbackEXT
+    VK_DEBUG_REPORT_OBJECT_TYPE_OBJECT_TABLE_NVX_EXT,   // kVulkanObjectTypeObjectTableNVX
+    VK_DEBUG_REPORT_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX_EXT,   // kVulkanObjectTypeIndirectCommandsLayoutNVX
+    VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT,   // kVulkanObjectTypeDebugUtilsMessengerEXT
+    VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT_EXT,   // kVulkanObjectTypeValidationCacheEXT
+    VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV_EXT,   // kVulkanObjectTypeAccelerationStructureNV
+    VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT,   // kVulkanObjectTypePerformanceConfigurationINTEL
+};
+
+// Helper array to get Official Vulkan VkObjectType enum from the internal layers version
+const VkObjectType get_object_type_enum[] = {
+    VK_OBJECT_TYPE_UNKNOWN, // kVulkanObjectTypeUnknown
+    VK_OBJECT_TYPE_INSTANCE,   // kVulkanObjectTypeInstance
+    VK_OBJECT_TYPE_PHYSICAL_DEVICE,   // kVulkanObjectTypePhysicalDevice
+    VK_OBJECT_TYPE_DEVICE,   // kVulkanObjectTypeDevice
+    VK_OBJECT_TYPE_QUEUE,   // kVulkanObjectTypeQueue
+    VK_OBJECT_TYPE_SEMAPHORE,   // kVulkanObjectTypeSemaphore
+    VK_OBJECT_TYPE_COMMAND_BUFFER,   // kVulkanObjectTypeCommandBuffer
+    VK_OBJECT_TYPE_FENCE,   // kVulkanObjectTypeFence
+    VK_OBJECT_TYPE_DEVICE_MEMORY,   // kVulkanObjectTypeDeviceMemory
+    VK_OBJECT_TYPE_BUFFER,   // kVulkanObjectTypeBuffer
+    VK_OBJECT_TYPE_IMAGE,   // kVulkanObjectTypeImage
+    VK_OBJECT_TYPE_EVENT,   // kVulkanObjectTypeEvent
+    VK_OBJECT_TYPE_QUERY_POOL,   // kVulkanObjectTypeQueryPool
+    VK_OBJECT_TYPE_BUFFER_VIEW,   // kVulkanObjectTypeBufferView
+    VK_OBJECT_TYPE_IMAGE_VIEW,   // kVulkanObjectTypeImageView
+    VK_OBJECT_TYPE_SHADER_MODULE,   // kVulkanObjectTypeShaderModule
+    VK_OBJECT_TYPE_PIPELINE_CACHE,   // kVulkanObjectTypePipelineCache
+    VK_OBJECT_TYPE_PIPELINE_LAYOUT,   // kVulkanObjectTypePipelineLayout
+    VK_OBJECT_TYPE_RENDER_PASS,   // kVulkanObjectTypeRenderPass
+    VK_OBJECT_TYPE_PIPELINE,   // kVulkanObjectTypePipeline
+    VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT,   // kVulkanObjectTypeDescriptorSetLayout
+    VK_OBJECT_TYPE_SAMPLER,   // kVulkanObjectTypeSampler
+    VK_OBJECT_TYPE_DESCRIPTOR_POOL,   // kVulkanObjectTypeDescriptorPool
+    VK_OBJECT_TYPE_DESCRIPTOR_SET,   // kVulkanObjectTypeDescriptorSet
+    VK_OBJECT_TYPE_FRAMEBUFFER,   // kVulkanObjectTypeFramebuffer
+    VK_OBJECT_TYPE_COMMAND_POOL,   // kVulkanObjectTypeCommandPool
+    VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION,   // kVulkanObjectTypeSamplerYcbcrConversion
+    VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE,   // kVulkanObjectTypeDescriptorUpdateTemplate
+    VK_OBJECT_TYPE_SURFACE_KHR,   // kVulkanObjectTypeSurfaceKHR
+    VK_OBJECT_TYPE_SWAPCHAIN_KHR,   // kVulkanObjectTypeSwapchainKHR
+    VK_OBJECT_TYPE_DISPLAY_KHR,   // kVulkanObjectTypeDisplayKHR
+    VK_OBJECT_TYPE_DISPLAY_MODE_KHR,   // kVulkanObjectTypeDisplayModeKHR
+    VK_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT,   // kVulkanObjectTypeDebugReportCallbackEXT
+    VK_OBJECT_TYPE_OBJECT_TABLE_NVX,   // kVulkanObjectTypeObjectTableNVX
+    VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX,   // kVulkanObjectTypeIndirectCommandsLayoutNVX
+    VK_OBJECT_TYPE_DEBUG_UTILS_MESSENGER_EXT,   // kVulkanObjectTypeDebugUtilsMessengerEXT
+    VK_OBJECT_TYPE_VALIDATION_CACHE_EXT,   // kVulkanObjectTypeValidationCacheEXT
+    VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV,   // kVulkanObjectTypeAccelerationStructureNV
+    VK_OBJECT_TYPE_PERFORMANCE_CONFIGURATION_INTEL,   // kVulkanObjectTypePerformanceConfigurationINTEL
+};
+
+// Helper function to get internal layers object ids from the official Vulkan VkObjectType enum
+static inline VulkanObjectType ConvertCoreObjectToVulkanObject(VkObjectType vulkan_object_type) {
+    switch (vulkan_object_type) {
+        case VK_OBJECT_TYPE_INSTANCE: return kVulkanObjectTypeInstance;
+        case VK_OBJECT_TYPE_PHYSICAL_DEVICE: return kVulkanObjectTypePhysicalDevice;
+        case VK_OBJECT_TYPE_DEVICE: return kVulkanObjectTypeDevice;
+        case VK_OBJECT_TYPE_QUEUE: return kVulkanObjectTypeQueue;
+        case VK_OBJECT_TYPE_SEMAPHORE: return kVulkanObjectTypeSemaphore;
+        case VK_OBJECT_TYPE_COMMAND_BUFFER: return kVulkanObjectTypeCommandBuffer;
+        case VK_OBJECT_TYPE_FENCE: return kVulkanObjectTypeFence;
+        case VK_OBJECT_TYPE_DEVICE_MEMORY: return kVulkanObjectTypeDeviceMemory;
+        case VK_OBJECT_TYPE_BUFFER: return kVulkanObjectTypeBuffer;
+        case VK_OBJECT_TYPE_IMAGE: return kVulkanObjectTypeImage;
+        case VK_OBJECT_TYPE_EVENT: return kVulkanObjectTypeEvent;
+        case VK_OBJECT_TYPE_QUERY_POOL: return kVulkanObjectTypeQueryPool;
+        case VK_OBJECT_TYPE_BUFFER_VIEW: return kVulkanObjectTypeBufferView;
+        case VK_OBJECT_TYPE_IMAGE_VIEW: return kVulkanObjectTypeImageView;
+        case VK_OBJECT_TYPE_SHADER_MODULE: return kVulkanObjectTypeShaderModule;
+        case VK_OBJECT_TYPE_PIPELINE_CACHE: return kVulkanObjectTypePipelineCache;
+        case VK_OBJECT_TYPE_PIPELINE_LAYOUT: return kVulkanObjectTypePipelineLayout;
+        case VK_OBJECT_TYPE_RENDER_PASS: return kVulkanObjectTypeRenderPass;
+        case VK_OBJECT_TYPE_PIPELINE: return kVulkanObjectTypePipeline;
+        case VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT: return kVulkanObjectTypeDescriptorSetLayout;
+        case VK_OBJECT_TYPE_SAMPLER: return kVulkanObjectTypeSampler;
+        case VK_OBJECT_TYPE_DESCRIPTOR_POOL: return kVulkanObjectTypeDescriptorPool;
+        case VK_OBJECT_TYPE_DESCRIPTOR_SET: return kVulkanObjectTypeDescriptorSet;
+        case VK_OBJECT_TYPE_FRAMEBUFFER: return kVulkanObjectTypeFramebuffer;
+        case VK_OBJECT_TYPE_COMMAND_POOL: return kVulkanObjectTypeCommandPool;
+        case VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION: return kVulkanObjectTypeSamplerYcbcrConversion;
+        case VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE: return kVulkanObjectTypeDescriptorUpdateTemplate;
+        case VK_OBJECT_TYPE_SURFACE_KHR: return kVulkanObjectTypeSurfaceKHR;
+        case VK_OBJECT_TYPE_SWAPCHAIN_KHR: return kVulkanObjectTypeSwapchainKHR;
+        case VK_OBJECT_TYPE_DISPLAY_KHR: return kVulkanObjectTypeDisplayKHR;
+        case VK_OBJECT_TYPE_DISPLAY_MODE_KHR: return kVulkanObjectTypeDisplayModeKHR;
+        case VK_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT: return kVulkanObjectTypeDebugReportCallbackEXT;
+        case VK_OBJECT_TYPE_OBJECT_TABLE_NVX: return kVulkanObjectTypeObjectTableNVX;
+        case VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX: return kVulkanObjectTypeIndirectCommandsLayoutNVX;
+        case VK_OBJECT_TYPE_DEBUG_UTILS_MESSENGER_EXT: return kVulkanObjectTypeDebugUtilsMessengerEXT;
+        case VK_OBJECT_TYPE_VALIDATION_CACHE_EXT: return kVulkanObjectTypeValidationCacheEXT;
+        case VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV: return kVulkanObjectTypeAccelerationStructureNV;
+        case VK_OBJECT_TYPE_PERFORMANCE_CONFIGURATION_INTEL: return kVulkanObjectTypePerformanceConfigurationINTEL;
+        default: return kVulkanObjectTypeUnknown;
+    }
+};
+
+static inline VkObjectType convertDebugReportObjectToCoreObject(VkDebugReportObjectTypeEXT debug_report_obj) {
+    switch (debug_report_obj) {
+        case VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT: return VK_OBJECT_TYPE_UNKNOWN;
+        case VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT: return VK_OBJECT_TYPE_INSTANCE;
+        case VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT: return VK_OBJECT_TYPE_PHYSICAL_DEVICE;
+        case VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT: return VK_OBJECT_TYPE_DEVICE;
+        case VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT: return VK_OBJECT_TYPE_QUEUE;
+        case VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT: return VK_OBJECT_TYPE_SEMAPHORE;
+        case VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT: return VK_OBJECT_TYPE_COMMAND_BUFFER;
+        case VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT: return VK_OBJECT_TYPE_FENCE;
+        case VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT: return VK_OBJECT_TYPE_DEVICE_MEMORY;
+        case VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT: return VK_OBJECT_TYPE_BUFFER;
+        case VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT: return VK_OBJECT_TYPE_IMAGE;
+        case VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT: return VK_OBJECT_TYPE_EVENT;
+        case VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT: return VK_OBJECT_TYPE_QUERY_POOL;
+        case VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT: return VK_OBJECT_TYPE_BUFFER_VIEW;
+        case VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT: return VK_OBJECT_TYPE_IMAGE_VIEW;
+        case VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT: return VK_OBJECT_TYPE_SHADER_MODULE;
+        case VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT: return VK_OBJECT_TYPE_PIPELINE_CACHE;
+        case VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT: return VK_OBJECT_TYPE_PIPELINE_LAYOUT;
+        case VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT: return VK_OBJECT_TYPE_RENDER_PASS;
+        case VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT: return VK_OBJECT_TYPE_PIPELINE;
+        case VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT: return VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT;
+        case VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT: return VK_OBJECT_TYPE_SAMPLER;
+        case VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT: return VK_OBJECT_TYPE_DESCRIPTOR_POOL;
+        case VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT: return VK_OBJECT_TYPE_DESCRIPTOR_SET;
+        case VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT: return VK_OBJECT_TYPE_FRAMEBUFFER;
+        case VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT: return VK_OBJECT_TYPE_COMMAND_POOL;
+        case VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT: return VK_OBJECT_TYPE_SURFACE_KHR;
+        case VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT: return VK_OBJECT_TYPE_SWAPCHAIN_KHR;
+        case VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT_EXT: return VK_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT;
+        case VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_KHR_EXT: return VK_OBJECT_TYPE_DISPLAY_KHR;
+        case VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_MODE_KHR_EXT: return VK_OBJECT_TYPE_DISPLAY_MODE_KHR;
+        case VK_DEBUG_REPORT_OBJECT_TYPE_OBJECT_TABLE_NVX_EXT: return VK_OBJECT_TYPE_OBJECT_TABLE_NVX;
+        case VK_DEBUG_REPORT_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX_EXT: return VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX;
+        case VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT_EXT: return VK_OBJECT_TYPE_VALIDATION_CACHE_EXT;
+        case VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT: return VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION;
+        case VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_EXT: return VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE;
+        case VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV_EXT: return VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV;
+        default: return VK_OBJECT_TYPE_UNKNOWN;
+    }
+}
+
+static inline VkDebugReportObjectTypeEXT convertCoreObjectToDebugReportObject(VkObjectType core_report_obj) {
+    switch (core_report_obj) {
+        case VK_OBJECT_TYPE_UNKNOWN: return VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT;
+        case VK_OBJECT_TYPE_INSTANCE: return VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT;
+        case VK_OBJECT_TYPE_PHYSICAL_DEVICE: return VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT;
+        case VK_OBJECT_TYPE_DEVICE: return VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT;
+        case VK_OBJECT_TYPE_QUEUE: return VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT;
+        case VK_OBJECT_TYPE_SEMAPHORE: return VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT;
+        case VK_OBJECT_TYPE_COMMAND_BUFFER: return VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT;
+        case VK_OBJECT_TYPE_FENCE: return VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT;
+        case VK_OBJECT_TYPE_DEVICE_MEMORY: return VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT;
+        case VK_OBJECT_TYPE_BUFFER: return VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT;
+        case VK_OBJECT_TYPE_IMAGE: return VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT;
+        case VK_OBJECT_TYPE_EVENT: return VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT;
+        case VK_OBJECT_TYPE_QUERY_POOL: return VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT;
+        case VK_OBJECT_TYPE_BUFFER_VIEW: return VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT;
+        case VK_OBJECT_TYPE_IMAGE_VIEW: return VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT;
+        case VK_OBJECT_TYPE_SHADER_MODULE: return VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT;
+        case VK_OBJECT_TYPE_PIPELINE_CACHE: return VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT;
+        case VK_OBJECT_TYPE_PIPELINE_LAYOUT: return VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT;
+        case VK_OBJECT_TYPE_RENDER_PASS: return VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT;
+        case VK_OBJECT_TYPE_PIPELINE: return VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT;
+        case VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT: return VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT;
+        case VK_OBJECT_TYPE_SAMPLER: return VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT;
+        case VK_OBJECT_TYPE_DESCRIPTOR_POOL: return VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT;
+        case VK_OBJECT_TYPE_DESCRIPTOR_SET: return VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT;
+        case VK_OBJECT_TYPE_FRAMEBUFFER: return VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT;
+        case VK_OBJECT_TYPE_COMMAND_POOL: return VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT;
+        case VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION: return VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT;
+        case VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE: return VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_EXT;
+        case VK_OBJECT_TYPE_SURFACE_KHR: return VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT;
+        case VK_OBJECT_TYPE_SWAPCHAIN_KHR: return VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT;
+        case VK_OBJECT_TYPE_DISPLAY_KHR: return VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_KHR_EXT;
+        case VK_OBJECT_TYPE_DISPLAY_MODE_KHR: return VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_MODE_KHR_EXT;
+        case VK_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT: return VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT_EXT;
+        case VK_OBJECT_TYPE_OBJECT_TABLE_NVX: return VK_DEBUG_REPORT_OBJECT_TYPE_OBJECT_TABLE_NVX_EXT;
+        case VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX: return VK_DEBUG_REPORT_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX_EXT;
+        case VK_OBJECT_TYPE_VALIDATION_CACHE_EXT: return VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT_EXT;
+        case VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV: return VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV_EXT;
+        default: return VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT;
+    }
+}
+
+// Traits objects from each type statically map from Vk<handleType> to the various enums
+template <typename VkType> struct VkHandleInfo {};
+template <VulkanObjectType id> struct VulkanObjectTypeInfo {};
+
+// The following line must match the vulkan_core.h condition guarding VK_DEFINE_NON_DISPATCHABLE_HANDLE
+#if defined(__LP64__) || defined(_WIN64) || (defined(__x86_64__) && !defined(__ILP32__)) || defined(_M_X64) || defined(__ia64) ||                 defined(_M_IA64) || defined(__aarch64__) || defined(__powerpc64__)
+#define TYPESAFE_NONDISPATCHABLE_HANDLES
+#else
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkNonDispatchableHandle)
+
+template <> struct VkHandleInfo<VkNonDispatchableHandle> {
+    static const VulkanObjectType kVulkanObjectType = kVulkanObjectTypeUnknown;
+    static const VkDebugReportObjectTypeEXT kDebugReportObjectType = VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT;
+    static const VkObjectType kVkObjectType = VK_OBJECT_TYPE_UNKNOWN;
+    static const char* Typename() {
+        return "VkNonDispatchableHandle";
+    }
+};
+template <> struct VulkanObjectTypeInfo<kVulkanObjectTypeUnknown> {
+    typedef VkNonDispatchableHandle Type;
+};
+
+#endif //  VK_DEFINE_HANDLE logic duplication
+template <> struct VkHandleInfo<VkCommandBuffer> {
+    static const VulkanObjectType kVulkanObjectType = kVulkanObjectTypeCommandBuffer;
+    static const VkDebugReportObjectTypeEXT kDebugReportObjectType = VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT;
+    static const VkObjectType kVkObjectType = VK_OBJECT_TYPE_COMMAND_BUFFER;
+    static const char* Typename() {
+        return "VkCommandBuffer";
+    }
+};
+template <> struct VulkanObjectTypeInfo<kVulkanObjectTypeCommandBuffer> {
+    typedef VkCommandBuffer Type;
+};
+template <> struct VkHandleInfo<VkDevice> {
+    static const VulkanObjectType kVulkanObjectType = kVulkanObjectTypeDevice;
+    static const VkDebugReportObjectTypeEXT kDebugReportObjectType = VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT;
+    static const VkObjectType kVkObjectType = VK_OBJECT_TYPE_DEVICE;
+    static const char* Typename() {
+        return "VkDevice";
+    }
+};
+template <> struct VulkanObjectTypeInfo<kVulkanObjectTypeDevice> {
+    typedef VkDevice Type;
+};
+template <> struct VkHandleInfo<VkInstance> {
+    static const VulkanObjectType kVulkanObjectType = kVulkanObjectTypeInstance;
+    static const VkDebugReportObjectTypeEXT kDebugReportObjectType = VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT;
+    static const VkObjectType kVkObjectType = VK_OBJECT_TYPE_INSTANCE;
+    static const char* Typename() {
+        return "VkInstance";
+    }
+};
+template <> struct VulkanObjectTypeInfo<kVulkanObjectTypeInstance> {
+    typedef VkInstance Type;
+};
+template <> struct VkHandleInfo<VkPhysicalDevice> {
+    static const VulkanObjectType kVulkanObjectType = kVulkanObjectTypePhysicalDevice;
+    static const VkDebugReportObjectTypeEXT kDebugReportObjectType = VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT;
+    static const VkObjectType kVkObjectType = VK_OBJECT_TYPE_PHYSICAL_DEVICE;
+    static const char* Typename() {
+        return "VkPhysicalDevice";
+    }
+};
+template <> struct VulkanObjectTypeInfo<kVulkanObjectTypePhysicalDevice> {
+    typedef VkPhysicalDevice Type;
+};
+template <> struct VkHandleInfo<VkQueue> {
+    static const VulkanObjectType kVulkanObjectType = kVulkanObjectTypeQueue;
+    static const VkDebugReportObjectTypeEXT kDebugReportObjectType = VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT;
+    static const VkObjectType kVkObjectType = VK_OBJECT_TYPE_QUEUE;
+    static const char* Typename() {
+        return "VkQueue";
+    }
+};
+template <> struct VulkanObjectTypeInfo<kVulkanObjectTypeQueue> {
+    typedef VkQueue Type;
+};
+#ifdef TYPESAFE_NONDISPATCHABLE_HANDLES
+template <> struct VkHandleInfo<VkAccelerationStructureNV> {
+    static const VulkanObjectType kVulkanObjectType = kVulkanObjectTypeAccelerationStructureNV;
+    static const VkDebugReportObjectTypeEXT kDebugReportObjectType = VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV_EXT;
+    static const VkObjectType kVkObjectType = VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV;
+    static const char* Typename() {
+        return "VkAccelerationStructureNV";
+    }
+};
+template <> struct VulkanObjectTypeInfo<kVulkanObjectTypeAccelerationStructureNV> {
+    typedef VkAccelerationStructureNV Type;
+};
+template <> struct VkHandleInfo<VkBuffer> {
+    static const VulkanObjectType kVulkanObjectType = kVulkanObjectTypeBuffer;
+    static const VkDebugReportObjectTypeEXT kDebugReportObjectType = VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT;
+    static const VkObjectType kVkObjectType = VK_OBJECT_TYPE_BUFFER;
+    static const char* Typename() {
+        return "VkBuffer";
+    }
+};
+template <> struct VulkanObjectTypeInfo<kVulkanObjectTypeBuffer> {
+    typedef VkBuffer Type;
+};
+template <> struct VkHandleInfo<VkBufferView> {
+    static const VulkanObjectType kVulkanObjectType = kVulkanObjectTypeBufferView;
+    static const VkDebugReportObjectTypeEXT kDebugReportObjectType = VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT;
+    static const VkObjectType kVkObjectType = VK_OBJECT_TYPE_BUFFER_VIEW;
+    static const char* Typename() {
+        return "VkBufferView";
+    }
+};
+template <> struct VulkanObjectTypeInfo<kVulkanObjectTypeBufferView> {
+    typedef VkBufferView Type;
+};
+template <> struct VkHandleInfo<VkCommandPool> {
+    static const VulkanObjectType kVulkanObjectType = kVulkanObjectTypeCommandPool;
+    static const VkDebugReportObjectTypeEXT kDebugReportObjectType = VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT;
+    static const VkObjectType kVkObjectType = VK_OBJECT_TYPE_COMMAND_POOL;
+    static const char* Typename() {
+        return "VkCommandPool";
+    }
+};
+template <> struct VulkanObjectTypeInfo<kVulkanObjectTypeCommandPool> {
+    typedef VkCommandPool Type;
+};
+template <> struct VkHandleInfo<VkDebugReportCallbackEXT> {
+    static const VulkanObjectType kVulkanObjectType = kVulkanObjectTypeDebugReportCallbackEXT;
+    static const VkDebugReportObjectTypeEXT kDebugReportObjectType = VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT_EXT;
+    static const VkObjectType kVkObjectType = VK_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT;
+    static const char* Typename() {
+        return "VkDebugReportCallbackEXT";
+    }
+};
+template <> struct VulkanObjectTypeInfo<kVulkanObjectTypeDebugReportCallbackEXT> {
+    typedef VkDebugReportCallbackEXT Type;
+};
+template <> struct VkHandleInfo<VkDebugUtilsMessengerEXT> {
+    static const VulkanObjectType kVulkanObjectType = kVulkanObjectTypeDebugUtilsMessengerEXT;
+    static const VkDebugReportObjectTypeEXT kDebugReportObjectType = VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT;
+    static const VkObjectType kVkObjectType = VK_OBJECT_TYPE_DEBUG_UTILS_MESSENGER_EXT;
+    static const char* Typename() {
+        return "VkDebugUtilsMessengerEXT";
+    }
+};
+template <> struct VulkanObjectTypeInfo<kVulkanObjectTypeDebugUtilsMessengerEXT> {
+    typedef VkDebugUtilsMessengerEXT Type;
+};
+template <> struct VkHandleInfo<VkDescriptorPool> {
+    static const VulkanObjectType kVulkanObjectType = kVulkanObjectTypeDescriptorPool;
+    static const VkDebugReportObjectTypeEXT kDebugReportObjectType = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT;
+    static const VkObjectType kVkObjectType = VK_OBJECT_TYPE_DESCRIPTOR_POOL;
+    static const char* Typename() {
+        return "VkDescriptorPool";
+    }
+};
+template <> struct VulkanObjectTypeInfo<kVulkanObjectTypeDescriptorPool> {
+    typedef VkDescriptorPool Type;
+};
+template <> struct VkHandleInfo<VkDescriptorSet> {
+    static const VulkanObjectType kVulkanObjectType = kVulkanObjectTypeDescriptorSet;
+    static const VkDebugReportObjectTypeEXT kDebugReportObjectType = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT;
+    static const VkObjectType kVkObjectType = VK_OBJECT_TYPE_DESCRIPTOR_SET;
+    static const char* Typename() {
+        return "VkDescriptorSet";
+    }
+};
+template <> struct VulkanObjectTypeInfo<kVulkanObjectTypeDescriptorSet> {
+    typedef VkDescriptorSet Type;
+};
+template <> struct VkHandleInfo<VkDescriptorSetLayout> {
+    static const VulkanObjectType kVulkanObjectType = kVulkanObjectTypeDescriptorSetLayout;
+    static const VkDebugReportObjectTypeEXT kDebugReportObjectType = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT;
+    static const VkObjectType kVkObjectType = VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT;
+    static const char* Typename() {
+        return "VkDescriptorSetLayout";
+    }
+};
+template <> struct VulkanObjectTypeInfo<kVulkanObjectTypeDescriptorSetLayout> {
+    typedef VkDescriptorSetLayout Type;
+};
+template <> struct VkHandleInfo<VkDescriptorUpdateTemplate> {
+    static const VulkanObjectType kVulkanObjectType = kVulkanObjectTypeDescriptorUpdateTemplate;
+    static const VkDebugReportObjectTypeEXT kDebugReportObjectType = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_EXT;
+    static const VkObjectType kVkObjectType = VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE;
+    static const char* Typename() {
+        return "VkDescriptorUpdateTemplate";
+    }
+};
+template <> struct VulkanObjectTypeInfo<kVulkanObjectTypeDescriptorUpdateTemplate> {
+    typedef VkDescriptorUpdateTemplate Type;
+};
+template <> struct VkHandleInfo<VkDeviceMemory> {
+    static const VulkanObjectType kVulkanObjectType = kVulkanObjectTypeDeviceMemory;
+    static const VkDebugReportObjectTypeEXT kDebugReportObjectType = VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT;
+    static const VkObjectType kVkObjectType = VK_OBJECT_TYPE_DEVICE_MEMORY;
+    static const char* Typename() {
+        return "VkDeviceMemory";
+    }
+};
+template <> struct VulkanObjectTypeInfo<kVulkanObjectTypeDeviceMemory> {
+    typedef VkDeviceMemory Type;
+};
+template <> struct VkHandleInfo<VkDisplayKHR> {
+    static const VulkanObjectType kVulkanObjectType = kVulkanObjectTypeDisplayKHR;
+    static const VkDebugReportObjectTypeEXT kDebugReportObjectType = VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_KHR_EXT;
+    static const VkObjectType kVkObjectType = VK_OBJECT_TYPE_DISPLAY_KHR;
+    static const char* Typename() {
+        return "VkDisplayKHR";
+    }
+};
+template <> struct VulkanObjectTypeInfo<kVulkanObjectTypeDisplayKHR> {
+    typedef VkDisplayKHR Type;
+};
+template <> struct VkHandleInfo<VkDisplayModeKHR> {
+    static const VulkanObjectType kVulkanObjectType = kVulkanObjectTypeDisplayModeKHR;
+    static const VkDebugReportObjectTypeEXT kDebugReportObjectType = VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_MODE_KHR_EXT;
+    static const VkObjectType kVkObjectType = VK_OBJECT_TYPE_DISPLAY_MODE_KHR;
+    static const char* Typename() {
+        return "VkDisplayModeKHR";
+    }
+};
+template <> struct VulkanObjectTypeInfo<kVulkanObjectTypeDisplayModeKHR> {
+    typedef VkDisplayModeKHR Type;
+};
+template <> struct VkHandleInfo<VkEvent> {
+    static const VulkanObjectType kVulkanObjectType = kVulkanObjectTypeEvent;
+    static const VkDebugReportObjectTypeEXT kDebugReportObjectType = VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT;
+    static const VkObjectType kVkObjectType = VK_OBJECT_TYPE_EVENT;
+    static const char* Typename() {
+        return "VkEvent";
+    }
+};
+template <> struct VulkanObjectTypeInfo<kVulkanObjectTypeEvent> {
+    typedef VkEvent Type;
+};
+template <> struct VkHandleInfo<VkFence> {
+    static const VulkanObjectType kVulkanObjectType = kVulkanObjectTypeFence;
+    static const VkDebugReportObjectTypeEXT kDebugReportObjectType = VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT;
+    static const VkObjectType kVkObjectType = VK_OBJECT_TYPE_FENCE;
+    static const char* Typename() {
+        return "VkFence";
+    }
+};
+template <> struct VulkanObjectTypeInfo<kVulkanObjectTypeFence> {
+    typedef VkFence Type;
+};
+template <> struct VkHandleInfo<VkFramebuffer> {
+    static const VulkanObjectType kVulkanObjectType = kVulkanObjectTypeFramebuffer;
+    static const VkDebugReportObjectTypeEXT kDebugReportObjectType = VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT;
+    static const VkObjectType kVkObjectType = VK_OBJECT_TYPE_FRAMEBUFFER;
+    static const char* Typename() {
+        return "VkFramebuffer";
+    }
+};
+template <> struct VulkanObjectTypeInfo<kVulkanObjectTypeFramebuffer> {
+    typedef VkFramebuffer Type;
+};
+template <> struct VkHandleInfo<VkImage> {
+    static const VulkanObjectType kVulkanObjectType = kVulkanObjectTypeImage;
+    static const VkDebugReportObjectTypeEXT kDebugReportObjectType = VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT;
+    static const VkObjectType kVkObjectType = VK_OBJECT_TYPE_IMAGE;
+    static const char* Typename() {
+        return "VkImage";
+    }
+};
+template <> struct VulkanObjectTypeInfo<kVulkanObjectTypeImage> {
+    typedef VkImage Type;
+};
+template <> struct VkHandleInfo<VkImageView> {
+    static const VulkanObjectType kVulkanObjectType = kVulkanObjectTypeImageView;
+    static const VkDebugReportObjectTypeEXT kDebugReportObjectType = VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT;
+    static const VkObjectType kVkObjectType = VK_OBJECT_TYPE_IMAGE_VIEW;
+    static const char* Typename() {
+        return "VkImageView";
+    }
+};
+template <> struct VulkanObjectTypeInfo<kVulkanObjectTypeImageView> {
+    typedef VkImageView Type;
+};
+template <> struct VkHandleInfo<VkIndirectCommandsLayoutNVX> {
+    static const VulkanObjectType kVulkanObjectType = kVulkanObjectTypeIndirectCommandsLayoutNVX;
+    static const VkDebugReportObjectTypeEXT kDebugReportObjectType = VK_DEBUG_REPORT_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX_EXT;
+    static const VkObjectType kVkObjectType = VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX;
+    static const char* Typename() {
+        return "VkIndirectCommandsLayoutNVX";
+    }
+};
+template <> struct VulkanObjectTypeInfo<kVulkanObjectTypeIndirectCommandsLayoutNVX> {
+    typedef VkIndirectCommandsLayoutNVX Type;
+};
+template <> struct VkHandleInfo<VkObjectTableNVX> {
+    static const VulkanObjectType kVulkanObjectType = kVulkanObjectTypeObjectTableNVX;
+    static const VkDebugReportObjectTypeEXT kDebugReportObjectType = VK_DEBUG_REPORT_OBJECT_TYPE_OBJECT_TABLE_NVX_EXT;
+    static const VkObjectType kVkObjectType = VK_OBJECT_TYPE_OBJECT_TABLE_NVX;
+    static const char* Typename() {
+        return "VkObjectTableNVX";
+    }
+};
+template <> struct VulkanObjectTypeInfo<kVulkanObjectTypeObjectTableNVX> {
+    typedef VkObjectTableNVX Type;
+};
+template <> struct VkHandleInfo<VkPerformanceConfigurationINTEL> {
+    static const VulkanObjectType kVulkanObjectType = kVulkanObjectTypePerformanceConfigurationINTEL;
+    static const VkDebugReportObjectTypeEXT kDebugReportObjectType = VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT;
+    static const VkObjectType kVkObjectType = VK_OBJECT_TYPE_PERFORMANCE_CONFIGURATION_INTEL;
+    static const char* Typename() {
+        return "VkPerformanceConfigurationINTEL";
+    }
+};
+template <> struct VulkanObjectTypeInfo<kVulkanObjectTypePerformanceConfigurationINTEL> {
+    typedef VkPerformanceConfigurationINTEL Type;
+};
+template <> struct VkHandleInfo<VkPipeline> {
+    static const VulkanObjectType kVulkanObjectType = kVulkanObjectTypePipeline;
+    static const VkDebugReportObjectTypeEXT kDebugReportObjectType = VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT;
+    static const VkObjectType kVkObjectType = VK_OBJECT_TYPE_PIPELINE;
+    static const char* Typename() {
+        return "VkPipeline";
+    }
+};
+template <> struct VulkanObjectTypeInfo<kVulkanObjectTypePipeline> {
+    typedef VkPipeline Type;
+};
+template <> struct VkHandleInfo<VkPipelineCache> {
+    static const VulkanObjectType kVulkanObjectType = kVulkanObjectTypePipelineCache;
+    static const VkDebugReportObjectTypeEXT kDebugReportObjectType = VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT;
+    static const VkObjectType kVkObjectType = VK_OBJECT_TYPE_PIPELINE_CACHE;
+    static const char* Typename() {
+        return "VkPipelineCache";
+    }
+};
+template <> struct VulkanObjectTypeInfo<kVulkanObjectTypePipelineCache> {
+    typedef VkPipelineCache Type;
+};
+template <> struct VkHandleInfo<VkPipelineLayout> {
+    static const VulkanObjectType kVulkanObjectType = kVulkanObjectTypePipelineLayout;
+    static const VkDebugReportObjectTypeEXT kDebugReportObjectType = VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT;
+    static const VkObjectType kVkObjectType = VK_OBJECT_TYPE_PIPELINE_LAYOUT;
+    static const char* Typename() {
+        return "VkPipelineLayout";
+    }
+};
+template <> struct VulkanObjectTypeInfo<kVulkanObjectTypePipelineLayout> {
+    typedef VkPipelineLayout Type;
+};
+template <> struct VkHandleInfo<VkQueryPool> {
+    static const VulkanObjectType kVulkanObjectType = kVulkanObjectTypeQueryPool;
+    static const VkDebugReportObjectTypeEXT kDebugReportObjectType = VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT;
+    static const VkObjectType kVkObjectType = VK_OBJECT_TYPE_QUERY_POOL;
+    static const char* Typename() {
+        return "VkQueryPool";
+    }
+};
+template <> struct VulkanObjectTypeInfo<kVulkanObjectTypeQueryPool> {
+    typedef VkQueryPool Type;
+};
+template <> struct VkHandleInfo<VkRenderPass> {
+    static const VulkanObjectType kVulkanObjectType = kVulkanObjectTypeRenderPass;
+    static const VkDebugReportObjectTypeEXT kDebugReportObjectType = VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT;
+    static const VkObjectType kVkObjectType = VK_OBJECT_TYPE_RENDER_PASS;
+    static const char* Typename() {
+        return "VkRenderPass";
+    }
+};
+template <> struct VulkanObjectTypeInfo<kVulkanObjectTypeRenderPass> {
+    typedef VkRenderPass Type;
+};
+template <> struct VkHandleInfo<VkSampler> {
+    static const VulkanObjectType kVulkanObjectType = kVulkanObjectTypeSampler;
+    static const VkDebugReportObjectTypeEXT kDebugReportObjectType = VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT;
+    static const VkObjectType kVkObjectType = VK_OBJECT_TYPE_SAMPLER;
+    static const char* Typename() {
+        return "VkSampler";
+    }
+};
+template <> struct VulkanObjectTypeInfo<kVulkanObjectTypeSampler> {
+    typedef VkSampler Type;
+};
+template <> struct VkHandleInfo<VkSamplerYcbcrConversion> {
+    static const VulkanObjectType kVulkanObjectType = kVulkanObjectTypeSamplerYcbcrConversion;
+    static const VkDebugReportObjectTypeEXT kDebugReportObjectType = VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT;
+    static const VkObjectType kVkObjectType = VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION;
+    static const char* Typename() {
+        return "VkSamplerYcbcrConversion";
+    }
+};
+template <> struct VulkanObjectTypeInfo<kVulkanObjectTypeSamplerYcbcrConversion> {
+    typedef VkSamplerYcbcrConversion Type;
+};
+template <> struct VkHandleInfo<VkSemaphore> {
+    static const VulkanObjectType kVulkanObjectType = kVulkanObjectTypeSemaphore;
+    static const VkDebugReportObjectTypeEXT kDebugReportObjectType = VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT;
+    static const VkObjectType kVkObjectType = VK_OBJECT_TYPE_SEMAPHORE;
+    static const char* Typename() {
+        return "VkSemaphore";
+    }
+};
+template <> struct VulkanObjectTypeInfo<kVulkanObjectTypeSemaphore> {
+    typedef VkSemaphore Type;
+};
+template <> struct VkHandleInfo<VkShaderModule> {
+    static const VulkanObjectType kVulkanObjectType = kVulkanObjectTypeShaderModule;
+    static const VkDebugReportObjectTypeEXT kDebugReportObjectType = VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT;
+    static const VkObjectType kVkObjectType = VK_OBJECT_TYPE_SHADER_MODULE;
+    static const char* Typename() {
+        return "VkShaderModule";
+    }
+};
+template <> struct VulkanObjectTypeInfo<kVulkanObjectTypeShaderModule> {
+    typedef VkShaderModule Type;
+};
+template <> struct VkHandleInfo<VkSurfaceKHR> {
+    static const VulkanObjectType kVulkanObjectType = kVulkanObjectTypeSurfaceKHR;
+    static const VkDebugReportObjectTypeEXT kDebugReportObjectType = VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT;
+    static const VkObjectType kVkObjectType = VK_OBJECT_TYPE_SURFACE_KHR;
+    static const char* Typename() {
+        return "VkSurfaceKHR";
+    }
+};
+template <> struct VulkanObjectTypeInfo<kVulkanObjectTypeSurfaceKHR> {
+    typedef VkSurfaceKHR Type;
+};
+template <> struct VkHandleInfo<VkSwapchainKHR> {
+    static const VulkanObjectType kVulkanObjectType = kVulkanObjectTypeSwapchainKHR;
+    static const VkDebugReportObjectTypeEXT kDebugReportObjectType = VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT;
+    static const VkObjectType kVkObjectType = VK_OBJECT_TYPE_SWAPCHAIN_KHR;
+    static const char* Typename() {
+        return "VkSwapchainKHR";
+    }
+};
+template <> struct VulkanObjectTypeInfo<kVulkanObjectTypeSwapchainKHR> {
+    typedef VkSwapchainKHR Type;
+};
+template <> struct VkHandleInfo<VkValidationCacheEXT> {
+    static const VulkanObjectType kVulkanObjectType = kVulkanObjectTypeValidationCacheEXT;
+    static const VkDebugReportObjectTypeEXT kDebugReportObjectType = VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT_EXT;
+    static const VkObjectType kVkObjectType = VK_OBJECT_TYPE_VALIDATION_CACHE_EXT;
+    static const char* Typename() {
+        return "VkValidationCacheEXT";
+    }
+};
+template <> struct VulkanObjectTypeInfo<kVulkanObjectTypeValidationCacheEXT> {
+    typedef VkValidationCacheEXT Type;
+};
+#endif // TYPESAFE_NONDISPATCHABLE_HANDLES
+struct VulkanTypedHandle {
+    uint64_t handle;
+    VulkanObjectType type;
+    // node is optional, and if non-NULL is used to avoid a hash table lookup
+    class BASE_NODE *node;
+    template <typename Handle>
+    VulkanTypedHandle(Handle handle_, VulkanObjectType type_, class BASE_NODE *node_ = nullptr) :
+        handle(CastToUint64(handle_)),
+        type(type_),
+        node(node_) {
+#ifdef TYPESAFE_NONDISPATCHABLE_HANDLES
+        // For 32 bit it's not always safe to check for traits <-> type
+        // as all non-dispatchable handles have the same type-id and thus traits,
+        // but on 64 bit we can validate the passed type matches the passed handle
+        assert(type == VkHandleInfo<Handle>::kVulkanObjectType);
+#endif // TYPESAFE_NONDISPATCHABLE_HANDLES
+    }
+    template <typename Handle>
+    Handle Cast() const {
+#ifdef TYPESAFE_NONDISPATCHABLE_HANDLES
+        assert(type == VkHandleInfo<Handle>::kVulkanObjectType);
+#endif // TYPESAFE_NONDISPATCHABLE_HANDLES
+        return CastFromUint64<Handle>(handle);
+    }
+    VulkanTypedHandle() :
+        handle(VK_NULL_HANDLE),
+        type(kVulkanObjectTypeUnknown),
+        node(nullptr) {}
+};
+
diff --git a/src/third_party/vulkan-validation-layers/src/layers/generated/vk_safe_struct.cpp b/src/third_party/vulkan-validation-layers/src/layers/generated/vk_safe_struct.cpp
new file mode 100644
index 0000000..26ee9fe
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/layers/generated/vk_safe_struct.cpp
@@ -0,0 +1,31321 @@
+// *** THIS FILE IS GENERATED - DO NOT EDIT ***
+// See helper_file_generator.py for modifications
+
+
+/***************************************************************************
+ *
+ * Copyright (c) 2015-2019 The Khronos Group Inc.
+ * Copyright (c) 2015-2019 Valve Corporation
+ * Copyright (c) 2015-2019 LunarG, Inc.
+ * Copyright (c) 2015-2019 Google Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Mark Lobodzinski <mark@lunarg.com>
+ * Author: Courtney Goeltzenleuchter <courtneygo@google.com>
+ * Author: Tobin Ehlis <tobine@google.com>
+ * Author: Chris Forbes <chrisforbes@google.com>
+ * Author: John Zulauf<jzulauf@lunarg.com>
+ *
+ ****************************************************************************/
+
+
+#include "vk_safe_struct.h"
+
+#include <string.h>
+#include <cassert>
+#include <cstring>
+
+#include <vulkan/vk_layer.h>
+
+
+safe_VkApplicationInfo::safe_VkApplicationInfo(const VkApplicationInfo* in_struct) :
+    sType(in_struct->sType),
+    applicationVersion(in_struct->applicationVersion),
+    engineVersion(in_struct->engineVersion),
+    apiVersion(in_struct->apiVersion)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+    pApplicationName = SafeStringCopy(in_struct->pApplicationName);
+    pEngineName = SafeStringCopy(in_struct->pEngineName);
+}
+
+safe_VkApplicationInfo::safe_VkApplicationInfo() :
+    pNext(nullptr),
+    pApplicationName(nullptr),
+    pEngineName(nullptr)
+{}
+
+safe_VkApplicationInfo::safe_VkApplicationInfo(const safe_VkApplicationInfo& src)
+{
+    sType = src.sType;
+    applicationVersion = src.applicationVersion;
+    engineVersion = src.engineVersion;
+    apiVersion = src.apiVersion;
+    pNext = SafePnextCopy(src.pNext);
+    pApplicationName = SafeStringCopy(src.pApplicationName);
+    pEngineName = SafeStringCopy(src.pEngineName);
+}
+
+safe_VkApplicationInfo& safe_VkApplicationInfo::operator=(const safe_VkApplicationInfo& src)
+{
+    if (&src == this) return *this;
+
+    if (pApplicationName) delete [] pApplicationName;
+    if (pEngineName) delete [] pEngineName;
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    applicationVersion = src.applicationVersion;
+    engineVersion = src.engineVersion;
+    apiVersion = src.apiVersion;
+    pNext = SafePnextCopy(src.pNext);
+    pApplicationName = SafeStringCopy(src.pApplicationName);
+    pEngineName = SafeStringCopy(src.pEngineName);
+
+    return *this;
+}
+
+safe_VkApplicationInfo::~safe_VkApplicationInfo()
+{
+    if (pApplicationName) delete [] pApplicationName;
+    if (pEngineName) delete [] pEngineName;
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkApplicationInfo::initialize(const VkApplicationInfo* in_struct)
+{
+    sType = in_struct->sType;
+    applicationVersion = in_struct->applicationVersion;
+    engineVersion = in_struct->engineVersion;
+    apiVersion = in_struct->apiVersion;
+    pNext = SafePnextCopy(in_struct->pNext);
+    pApplicationName = SafeStringCopy(in_struct->pApplicationName);
+    pEngineName = SafeStringCopy(in_struct->pEngineName);
+}
+
+void safe_VkApplicationInfo::initialize(const safe_VkApplicationInfo* src)
+{
+    sType = src->sType;
+    applicationVersion = src->applicationVersion;
+    engineVersion = src->engineVersion;
+    apiVersion = src->apiVersion;
+    pNext = SafePnextCopy(src->pNext);
+    pApplicationName = SafeStringCopy(src->pApplicationName);
+    pEngineName = SafeStringCopy(src->pEngineName);
+}
+
+safe_VkInstanceCreateInfo::safe_VkInstanceCreateInfo(const VkInstanceCreateInfo* in_struct) :
+    sType(in_struct->sType),
+    flags(in_struct->flags),
+    pApplicationInfo(nullptr),
+    enabledLayerCount(in_struct->enabledLayerCount),
+    enabledExtensionCount(in_struct->enabledExtensionCount)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+    char **tmp_ppEnabledLayerNames = new char *[in_struct->enabledLayerCount];
+    for (uint32_t i = 0; i < enabledLayerCount; ++i) {
+        tmp_ppEnabledLayerNames[i] = SafeStringCopy(in_struct->ppEnabledLayerNames[i]);
+    }
+    ppEnabledLayerNames = tmp_ppEnabledLayerNames;
+    char **tmp_ppEnabledExtensionNames = new char *[in_struct->enabledExtensionCount];
+    for (uint32_t i = 0; i < enabledExtensionCount; ++i) {
+        tmp_ppEnabledExtensionNames[i] = SafeStringCopy(in_struct->ppEnabledExtensionNames[i]);
+    }
+    ppEnabledExtensionNames = tmp_ppEnabledExtensionNames;
+    if (in_struct->pApplicationInfo)
+        pApplicationInfo = new safe_VkApplicationInfo(in_struct->pApplicationInfo);
+}
+
+safe_VkInstanceCreateInfo::safe_VkInstanceCreateInfo() :
+    pNext(nullptr),
+    pApplicationInfo(nullptr),
+    ppEnabledLayerNames(nullptr),
+    ppEnabledExtensionNames(nullptr)
+{}
+
+safe_VkInstanceCreateInfo::safe_VkInstanceCreateInfo(const safe_VkInstanceCreateInfo& src)
+{
+    sType = src.sType;
+    flags = src.flags;
+    pApplicationInfo = nullptr;
+    enabledLayerCount = src.enabledLayerCount;
+    enabledExtensionCount = src.enabledExtensionCount;
+    pNext = SafePnextCopy(src.pNext);
+    char **tmp_ppEnabledLayerNames = new char *[src.enabledLayerCount];
+    for (uint32_t i = 0; i < enabledLayerCount; ++i) {
+        tmp_ppEnabledLayerNames[i] = SafeStringCopy(src.ppEnabledLayerNames[i]);
+    }
+    ppEnabledLayerNames = tmp_ppEnabledLayerNames;
+    char **tmp_ppEnabledExtensionNames = new char *[src.enabledExtensionCount];
+    for (uint32_t i = 0; i < enabledExtensionCount; ++i) {
+        tmp_ppEnabledExtensionNames[i] = SafeStringCopy(src.ppEnabledExtensionNames[i]);
+    }
+    ppEnabledExtensionNames = tmp_ppEnabledExtensionNames;
+    if (src.pApplicationInfo)
+        pApplicationInfo = new safe_VkApplicationInfo(*src.pApplicationInfo);
+}
+
+safe_VkInstanceCreateInfo& safe_VkInstanceCreateInfo::operator=(const safe_VkInstanceCreateInfo& src)
+{
+    if (&src == this) return *this;
+
+    if (pApplicationInfo)
+        delete pApplicationInfo;
+    if (ppEnabledLayerNames) {
+        for (uint32_t i = 0; i < enabledLayerCount; ++i) {
+            delete [] ppEnabledLayerNames[i];
+        }
+        delete [] ppEnabledLayerNames;
+    }
+    if (ppEnabledExtensionNames) {
+        for (uint32_t i = 0; i < enabledExtensionCount; ++i) {
+            delete [] ppEnabledExtensionNames[i];
+        }
+        delete [] ppEnabledExtensionNames;
+    }
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    flags = src.flags;
+    pApplicationInfo = nullptr;
+    enabledLayerCount = src.enabledLayerCount;
+    enabledExtensionCount = src.enabledExtensionCount;
+    pNext = SafePnextCopy(src.pNext);
+    char **tmp_ppEnabledLayerNames = new char *[src.enabledLayerCount];
+    for (uint32_t i = 0; i < enabledLayerCount; ++i) {
+        tmp_ppEnabledLayerNames[i] = SafeStringCopy(src.ppEnabledLayerNames[i]);
+    }
+    ppEnabledLayerNames = tmp_ppEnabledLayerNames;
+    char **tmp_ppEnabledExtensionNames = new char *[src.enabledExtensionCount];
+    for (uint32_t i = 0; i < enabledExtensionCount; ++i) {
+        tmp_ppEnabledExtensionNames[i] = SafeStringCopy(src.ppEnabledExtensionNames[i]);
+    }
+    ppEnabledExtensionNames = tmp_ppEnabledExtensionNames;
+    if (src.pApplicationInfo)
+        pApplicationInfo = new safe_VkApplicationInfo(*src.pApplicationInfo);
+
+    return *this;
+}
+
+safe_VkInstanceCreateInfo::~safe_VkInstanceCreateInfo()
+{
+    if (pApplicationInfo)
+        delete pApplicationInfo;
+    if (ppEnabledLayerNames) {
+        for (uint32_t i = 0; i < enabledLayerCount; ++i) {
+            delete [] ppEnabledLayerNames[i];
+        }
+        delete [] ppEnabledLayerNames;
+    }
+    if (ppEnabledExtensionNames) {
+        for (uint32_t i = 0; i < enabledExtensionCount; ++i) {
+            delete [] ppEnabledExtensionNames[i];
+        }
+        delete [] ppEnabledExtensionNames;
+    }
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkInstanceCreateInfo::initialize(const VkInstanceCreateInfo* in_struct)
+{
+    sType = in_struct->sType;
+    flags = in_struct->flags;
+    pApplicationInfo = nullptr;
+    enabledLayerCount = in_struct->enabledLayerCount;
+    enabledExtensionCount = in_struct->enabledExtensionCount;
+    pNext = SafePnextCopy(in_struct->pNext);
+    char **tmp_ppEnabledLayerNames = new char *[in_struct->enabledLayerCount];
+    for (uint32_t i = 0; i < enabledLayerCount; ++i) {
+        tmp_ppEnabledLayerNames[i] = SafeStringCopy(in_struct->ppEnabledLayerNames[i]);
+    }
+    ppEnabledLayerNames = tmp_ppEnabledLayerNames;
+    char **tmp_ppEnabledExtensionNames = new char *[in_struct->enabledExtensionCount];
+    for (uint32_t i = 0; i < enabledExtensionCount; ++i) {
+        tmp_ppEnabledExtensionNames[i] = SafeStringCopy(in_struct->ppEnabledExtensionNames[i]);
+    }
+    ppEnabledExtensionNames = tmp_ppEnabledExtensionNames;
+    if (in_struct->pApplicationInfo)
+        pApplicationInfo = new safe_VkApplicationInfo(in_struct->pApplicationInfo);
+}
+
+void safe_VkInstanceCreateInfo::initialize(const safe_VkInstanceCreateInfo* src)
+{
+    sType = src->sType;
+    flags = src->flags;
+    pApplicationInfo = nullptr;
+    enabledLayerCount = src->enabledLayerCount;
+    enabledExtensionCount = src->enabledExtensionCount;
+    pNext = SafePnextCopy(src->pNext);
+    char **tmp_ppEnabledLayerNames = new char *[src->enabledLayerCount];
+    for (uint32_t i = 0; i < enabledLayerCount; ++i) {
+        tmp_ppEnabledLayerNames[i] = SafeStringCopy(src->ppEnabledLayerNames[i]);
+    }
+    ppEnabledLayerNames = tmp_ppEnabledLayerNames;
+    char **tmp_ppEnabledExtensionNames = new char *[src->enabledExtensionCount];
+    for (uint32_t i = 0; i < enabledExtensionCount; ++i) {
+        tmp_ppEnabledExtensionNames[i] = SafeStringCopy(src->ppEnabledExtensionNames[i]);
+    }
+    ppEnabledExtensionNames = tmp_ppEnabledExtensionNames;
+    if (src->pApplicationInfo)
+        pApplicationInfo = new safe_VkApplicationInfo(*src->pApplicationInfo);
+}
+
+safe_VkAllocationCallbacks::safe_VkAllocationCallbacks(const VkAllocationCallbacks* in_struct) :
+    pUserData(in_struct->pUserData),
+    pfnAllocation(in_struct->pfnAllocation),
+    pfnReallocation(in_struct->pfnReallocation),
+    pfnFree(in_struct->pfnFree),
+    pfnInternalAllocation(in_struct->pfnInternalAllocation),
+    pfnInternalFree(in_struct->pfnInternalFree)
+{
+}
+
+safe_VkAllocationCallbacks::safe_VkAllocationCallbacks() :
+    pUserData(nullptr)
+{}
+
+safe_VkAllocationCallbacks::safe_VkAllocationCallbacks(const safe_VkAllocationCallbacks& src)
+{
+    pUserData = src.pUserData;
+    pfnAllocation = src.pfnAllocation;
+    pfnReallocation = src.pfnReallocation;
+    pfnFree = src.pfnFree;
+    pfnInternalAllocation = src.pfnInternalAllocation;
+    pfnInternalFree = src.pfnInternalFree;
+}
+
+safe_VkAllocationCallbacks& safe_VkAllocationCallbacks::operator=(const safe_VkAllocationCallbacks& src)
+{
+    if (&src == this) return *this;
+
+
+    pUserData = src.pUserData;
+    pfnAllocation = src.pfnAllocation;
+    pfnReallocation = src.pfnReallocation;
+    pfnFree = src.pfnFree;
+    pfnInternalAllocation = src.pfnInternalAllocation;
+    pfnInternalFree = src.pfnInternalFree;
+
+    return *this;
+}
+
+safe_VkAllocationCallbacks::~safe_VkAllocationCallbacks()
+{
+}
+
+void safe_VkAllocationCallbacks::initialize(const VkAllocationCallbacks* in_struct)
+{
+    pUserData = in_struct->pUserData;
+    pfnAllocation = in_struct->pfnAllocation;
+    pfnReallocation = in_struct->pfnReallocation;
+    pfnFree = in_struct->pfnFree;
+    pfnInternalAllocation = in_struct->pfnInternalAllocation;
+    pfnInternalFree = in_struct->pfnInternalFree;
+}
+
+void safe_VkAllocationCallbacks::initialize(const safe_VkAllocationCallbacks* src)
+{
+    pUserData = src->pUserData;
+    pfnAllocation = src->pfnAllocation;
+    pfnReallocation = src->pfnReallocation;
+    pfnFree = src->pfnFree;
+    pfnInternalAllocation = src->pfnInternalAllocation;
+    pfnInternalFree = src->pfnInternalFree;
+}
+
+safe_VkDeviceQueueCreateInfo::safe_VkDeviceQueueCreateInfo(const VkDeviceQueueCreateInfo* in_struct) :
+    sType(in_struct->sType),
+    flags(in_struct->flags),
+    queueFamilyIndex(in_struct->queueFamilyIndex),
+    queueCount(in_struct->queueCount),
+    pQueuePriorities(nullptr)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (in_struct->pQueuePriorities) {
+        pQueuePriorities = new float[in_struct->queueCount];
+        memcpy ((void *)pQueuePriorities, (void *)in_struct->pQueuePriorities, sizeof(float)*in_struct->queueCount);
+    }
+}
+
+safe_VkDeviceQueueCreateInfo::safe_VkDeviceQueueCreateInfo() :
+    pNext(nullptr),
+    pQueuePriorities(nullptr)
+{}
+
+safe_VkDeviceQueueCreateInfo::safe_VkDeviceQueueCreateInfo(const safe_VkDeviceQueueCreateInfo& src)
+{
+    sType = src.sType;
+    flags = src.flags;
+    queueFamilyIndex = src.queueFamilyIndex;
+    queueCount = src.queueCount;
+    pQueuePriorities = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (src.pQueuePriorities) {
+        pQueuePriorities = new float[src.queueCount];
+        memcpy ((void *)pQueuePriorities, (void *)src.pQueuePriorities, sizeof(float)*src.queueCount);
+    }
+}
+
+safe_VkDeviceQueueCreateInfo& safe_VkDeviceQueueCreateInfo::operator=(const safe_VkDeviceQueueCreateInfo& src)
+{
+    if (&src == this) return *this;
+
+    if (pQueuePriorities)
+        delete[] pQueuePriorities;
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    flags = src.flags;
+    queueFamilyIndex = src.queueFamilyIndex;
+    queueCount = src.queueCount;
+    pQueuePriorities = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (src.pQueuePriorities) {
+        pQueuePriorities = new float[src.queueCount];
+        memcpy ((void *)pQueuePriorities, (void *)src.pQueuePriorities, sizeof(float)*src.queueCount);
+    }
+
+    return *this;
+}
+
+safe_VkDeviceQueueCreateInfo::~safe_VkDeviceQueueCreateInfo()
+{
+    if (pQueuePriorities)
+        delete[] pQueuePriorities;
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkDeviceQueueCreateInfo::initialize(const VkDeviceQueueCreateInfo* in_struct)
+{
+    sType = in_struct->sType;
+    flags = in_struct->flags;
+    queueFamilyIndex = in_struct->queueFamilyIndex;
+    queueCount = in_struct->queueCount;
+    pQueuePriorities = nullptr;
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (in_struct->pQueuePriorities) {
+        pQueuePriorities = new float[in_struct->queueCount];
+        memcpy ((void *)pQueuePriorities, (void *)in_struct->pQueuePriorities, sizeof(float)*in_struct->queueCount);
+    }
+}
+
+void safe_VkDeviceQueueCreateInfo::initialize(const safe_VkDeviceQueueCreateInfo* src)
+{
+    sType = src->sType;
+    flags = src->flags;
+    queueFamilyIndex = src->queueFamilyIndex;
+    queueCount = src->queueCount;
+    pQueuePriorities = nullptr;
+    pNext = SafePnextCopy(src->pNext);
+    if (src->pQueuePriorities) {
+        pQueuePriorities = new float[src->queueCount];
+        memcpy ((void *)pQueuePriorities, (void *)src->pQueuePriorities, sizeof(float)*src->queueCount);
+    }
+}
+
+safe_VkDeviceCreateInfo::safe_VkDeviceCreateInfo(const VkDeviceCreateInfo* in_struct) :
+    sType(in_struct->sType),
+    flags(in_struct->flags),
+    queueCreateInfoCount(in_struct->queueCreateInfoCount),
+    pQueueCreateInfos(nullptr),
+    enabledLayerCount(in_struct->enabledLayerCount),
+    enabledExtensionCount(in_struct->enabledExtensionCount),
+    pEnabledFeatures(nullptr)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+    char **tmp_ppEnabledLayerNames = new char *[in_struct->enabledLayerCount];
+    for (uint32_t i = 0; i < enabledLayerCount; ++i) {
+        tmp_ppEnabledLayerNames[i] = SafeStringCopy(in_struct->ppEnabledLayerNames[i]);
+    }
+    ppEnabledLayerNames = tmp_ppEnabledLayerNames;
+    char **tmp_ppEnabledExtensionNames = new char *[in_struct->enabledExtensionCount];
+    for (uint32_t i = 0; i < enabledExtensionCount; ++i) {
+        tmp_ppEnabledExtensionNames[i] = SafeStringCopy(in_struct->ppEnabledExtensionNames[i]);
+    }
+    ppEnabledExtensionNames = tmp_ppEnabledExtensionNames;
+    if (queueCreateInfoCount && in_struct->pQueueCreateInfos) {
+        pQueueCreateInfos = new safe_VkDeviceQueueCreateInfo[queueCreateInfoCount];
+        for (uint32_t i = 0; i < queueCreateInfoCount; ++i) {
+            pQueueCreateInfos[i].initialize(&in_struct->pQueueCreateInfos[i]);
+        }
+    }
+    if (in_struct->pEnabledFeatures) {
+        pEnabledFeatures = new VkPhysicalDeviceFeatures(*in_struct->pEnabledFeatures);
+    }
+}
+
+safe_VkDeviceCreateInfo::safe_VkDeviceCreateInfo() :
+    pNext(nullptr),
+    pQueueCreateInfos(nullptr),
+    ppEnabledLayerNames(nullptr),
+    ppEnabledExtensionNames(nullptr),
+    pEnabledFeatures(nullptr)
+{}
+
+safe_VkDeviceCreateInfo::safe_VkDeviceCreateInfo(const safe_VkDeviceCreateInfo& src)
+{
+    sType = src.sType;
+    flags = src.flags;
+    queueCreateInfoCount = src.queueCreateInfoCount;
+    pQueueCreateInfos = nullptr;
+    enabledLayerCount = src.enabledLayerCount;
+    enabledExtensionCount = src.enabledExtensionCount;
+    pEnabledFeatures = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    char **tmp_ppEnabledLayerNames = new char *[src.enabledLayerCount];
+    for (uint32_t i = 0; i < enabledLayerCount; ++i) {
+        tmp_ppEnabledLayerNames[i] = SafeStringCopy(src.ppEnabledLayerNames[i]);
+    }
+    ppEnabledLayerNames = tmp_ppEnabledLayerNames;
+    char **tmp_ppEnabledExtensionNames = new char *[src.enabledExtensionCount];
+    for (uint32_t i = 0; i < enabledExtensionCount; ++i) {
+        tmp_ppEnabledExtensionNames[i] = SafeStringCopy(src.ppEnabledExtensionNames[i]);
+    }
+    ppEnabledExtensionNames = tmp_ppEnabledExtensionNames;
+    if (queueCreateInfoCount && src.pQueueCreateInfos) {
+        pQueueCreateInfos = new safe_VkDeviceQueueCreateInfo[queueCreateInfoCount];
+        for (uint32_t i = 0; i < queueCreateInfoCount; ++i) {
+            pQueueCreateInfos[i].initialize(&src.pQueueCreateInfos[i]);
+        }
+    }
+    if (src.pEnabledFeatures) {
+        pEnabledFeatures = new VkPhysicalDeviceFeatures(*src.pEnabledFeatures);
+    }
+}
+
+safe_VkDeviceCreateInfo& safe_VkDeviceCreateInfo::operator=(const safe_VkDeviceCreateInfo& src)
+{
+    if (&src == this) return *this;
+
+    if (pQueueCreateInfos)
+        delete[] pQueueCreateInfos;
+    if (ppEnabledLayerNames) {
+        for (uint32_t i = 0; i < enabledLayerCount; ++i) {
+            delete [] ppEnabledLayerNames[i];
+        }
+        delete [] ppEnabledLayerNames;
+    }
+    if (ppEnabledExtensionNames) {
+        for (uint32_t i = 0; i < enabledExtensionCount; ++i) {
+            delete [] ppEnabledExtensionNames[i];
+        }
+        delete [] ppEnabledExtensionNames;
+    }
+    if (pEnabledFeatures)
+        delete pEnabledFeatures;
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    flags = src.flags;
+    queueCreateInfoCount = src.queueCreateInfoCount;
+    pQueueCreateInfos = nullptr;
+    enabledLayerCount = src.enabledLayerCount;
+    enabledExtensionCount = src.enabledExtensionCount;
+    pEnabledFeatures = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    char **tmp_ppEnabledLayerNames = new char *[src.enabledLayerCount];
+    for (uint32_t i = 0; i < enabledLayerCount; ++i) {
+        tmp_ppEnabledLayerNames[i] = SafeStringCopy(src.ppEnabledLayerNames[i]);
+    }
+    ppEnabledLayerNames = tmp_ppEnabledLayerNames;
+    char **tmp_ppEnabledExtensionNames = new char *[src.enabledExtensionCount];
+    for (uint32_t i = 0; i < enabledExtensionCount; ++i) {
+        tmp_ppEnabledExtensionNames[i] = SafeStringCopy(src.ppEnabledExtensionNames[i]);
+    }
+    ppEnabledExtensionNames = tmp_ppEnabledExtensionNames;
+    if (queueCreateInfoCount && src.pQueueCreateInfos) {
+        pQueueCreateInfos = new safe_VkDeviceQueueCreateInfo[queueCreateInfoCount];
+        for (uint32_t i = 0; i < queueCreateInfoCount; ++i) {
+            pQueueCreateInfos[i].initialize(&src.pQueueCreateInfos[i]);
+        }
+    }
+    if (src.pEnabledFeatures) {
+        pEnabledFeatures = new VkPhysicalDeviceFeatures(*src.pEnabledFeatures);
+    }
+
+    return *this;
+}
+
+safe_VkDeviceCreateInfo::~safe_VkDeviceCreateInfo()
+{
+    if (pQueueCreateInfos)
+        delete[] pQueueCreateInfos;
+    if (ppEnabledLayerNames) {
+        for (uint32_t i = 0; i < enabledLayerCount; ++i) {
+            delete [] ppEnabledLayerNames[i];
+        }
+        delete [] ppEnabledLayerNames;
+    }
+    if (ppEnabledExtensionNames) {
+        for (uint32_t i = 0; i < enabledExtensionCount; ++i) {
+            delete [] ppEnabledExtensionNames[i];
+        }
+        delete [] ppEnabledExtensionNames;
+    }
+    if (pEnabledFeatures)
+        delete pEnabledFeatures;
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkDeviceCreateInfo::initialize(const VkDeviceCreateInfo* in_struct)
+{
+    sType = in_struct->sType;
+    flags = in_struct->flags;
+    queueCreateInfoCount = in_struct->queueCreateInfoCount;
+    pQueueCreateInfos = nullptr;
+    enabledLayerCount = in_struct->enabledLayerCount;
+    enabledExtensionCount = in_struct->enabledExtensionCount;
+    pEnabledFeatures = nullptr;
+    pNext = SafePnextCopy(in_struct->pNext);
+    char **tmp_ppEnabledLayerNames = new char *[in_struct->enabledLayerCount];
+    for (uint32_t i = 0; i < enabledLayerCount; ++i) {
+        tmp_ppEnabledLayerNames[i] = SafeStringCopy(in_struct->ppEnabledLayerNames[i]);
+    }
+    ppEnabledLayerNames = tmp_ppEnabledLayerNames;
+    char **tmp_ppEnabledExtensionNames = new char *[in_struct->enabledExtensionCount];
+    for (uint32_t i = 0; i < enabledExtensionCount; ++i) {
+        tmp_ppEnabledExtensionNames[i] = SafeStringCopy(in_struct->ppEnabledExtensionNames[i]);
+    }
+    ppEnabledExtensionNames = tmp_ppEnabledExtensionNames;
+    if (queueCreateInfoCount && in_struct->pQueueCreateInfos) {
+        pQueueCreateInfos = new safe_VkDeviceQueueCreateInfo[queueCreateInfoCount];
+        for (uint32_t i = 0; i < queueCreateInfoCount; ++i) {
+            pQueueCreateInfos[i].initialize(&in_struct->pQueueCreateInfos[i]);
+        }
+    }
+    if (in_struct->pEnabledFeatures) {
+        pEnabledFeatures = new VkPhysicalDeviceFeatures(*in_struct->pEnabledFeatures);
+    }
+}
+
+void safe_VkDeviceCreateInfo::initialize(const safe_VkDeviceCreateInfo* src)
+{
+    sType = src->sType;
+    flags = src->flags;
+    queueCreateInfoCount = src->queueCreateInfoCount;
+    pQueueCreateInfos = nullptr;
+    enabledLayerCount = src->enabledLayerCount;
+    enabledExtensionCount = src->enabledExtensionCount;
+    pEnabledFeatures = nullptr;
+    pNext = SafePnextCopy(src->pNext);
+    char **tmp_ppEnabledLayerNames = new char *[src->enabledLayerCount];
+    for (uint32_t i = 0; i < enabledLayerCount; ++i) {
+        tmp_ppEnabledLayerNames[i] = SafeStringCopy(src->ppEnabledLayerNames[i]);
+    }
+    ppEnabledLayerNames = tmp_ppEnabledLayerNames;
+    char **tmp_ppEnabledExtensionNames = new char *[src->enabledExtensionCount];
+    for (uint32_t i = 0; i < enabledExtensionCount; ++i) {
+        tmp_ppEnabledExtensionNames[i] = SafeStringCopy(src->ppEnabledExtensionNames[i]);
+    }
+    ppEnabledExtensionNames = tmp_ppEnabledExtensionNames;
+    if (queueCreateInfoCount && src->pQueueCreateInfos) {
+        pQueueCreateInfos = new safe_VkDeviceQueueCreateInfo[queueCreateInfoCount];
+        for (uint32_t i = 0; i < queueCreateInfoCount; ++i) {
+            pQueueCreateInfos[i].initialize(&src->pQueueCreateInfos[i]);
+        }
+    }
+    if (src->pEnabledFeatures) {
+        pEnabledFeatures = new VkPhysicalDeviceFeatures(*src->pEnabledFeatures);
+    }
+}
+
+safe_VkSubmitInfo::safe_VkSubmitInfo(const VkSubmitInfo* in_struct) :
+    sType(in_struct->sType),
+    waitSemaphoreCount(in_struct->waitSemaphoreCount),
+    pWaitSemaphores(nullptr),
+    pWaitDstStageMask(nullptr),
+    commandBufferCount(in_struct->commandBufferCount),
+    pCommandBuffers(nullptr),
+    signalSemaphoreCount(in_struct->signalSemaphoreCount),
+    pSignalSemaphores(nullptr)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (waitSemaphoreCount && in_struct->pWaitSemaphores) {
+        pWaitSemaphores = new VkSemaphore[waitSemaphoreCount];
+        for (uint32_t i = 0; i < waitSemaphoreCount; ++i) {
+            pWaitSemaphores[i] = in_struct->pWaitSemaphores[i];
+        }
+    }
+    if (in_struct->pWaitDstStageMask) {
+        pWaitDstStageMask = new VkPipelineStageFlags[in_struct->waitSemaphoreCount];
+        memcpy ((void *)pWaitDstStageMask, (void *)in_struct->pWaitDstStageMask, sizeof(VkPipelineStageFlags)*in_struct->waitSemaphoreCount);
+    }
+    if (in_struct->pCommandBuffers) {
+        pCommandBuffers = new VkCommandBuffer[in_struct->commandBufferCount];
+        memcpy ((void *)pCommandBuffers, (void *)in_struct->pCommandBuffers, sizeof(VkCommandBuffer)*in_struct->commandBufferCount);
+    }
+    if (signalSemaphoreCount && in_struct->pSignalSemaphores) {
+        pSignalSemaphores = new VkSemaphore[signalSemaphoreCount];
+        for (uint32_t i = 0; i < signalSemaphoreCount; ++i) {
+            pSignalSemaphores[i] = in_struct->pSignalSemaphores[i];
+        }
+    }
+}
+
+safe_VkSubmitInfo::safe_VkSubmitInfo() :
+    pNext(nullptr),
+    pWaitSemaphores(nullptr),
+    pWaitDstStageMask(nullptr),
+    pCommandBuffers(nullptr),
+    pSignalSemaphores(nullptr)
+{}
+
+safe_VkSubmitInfo::safe_VkSubmitInfo(const safe_VkSubmitInfo& src)
+{
+    sType = src.sType;
+    waitSemaphoreCount = src.waitSemaphoreCount;
+    pWaitSemaphores = nullptr;
+    pWaitDstStageMask = nullptr;
+    commandBufferCount = src.commandBufferCount;
+    pCommandBuffers = nullptr;
+    signalSemaphoreCount = src.signalSemaphoreCount;
+    pSignalSemaphores = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (waitSemaphoreCount && src.pWaitSemaphores) {
+        pWaitSemaphores = new VkSemaphore[waitSemaphoreCount];
+        for (uint32_t i = 0; i < waitSemaphoreCount; ++i) {
+            pWaitSemaphores[i] = src.pWaitSemaphores[i];
+        }
+    }
+    if (src.pWaitDstStageMask) {
+        pWaitDstStageMask = new VkPipelineStageFlags[src.waitSemaphoreCount];
+        memcpy ((void *)pWaitDstStageMask, (void *)src.pWaitDstStageMask, sizeof(VkPipelineStageFlags)*src.waitSemaphoreCount);
+    }
+    if (src.pCommandBuffers) {
+        pCommandBuffers = new VkCommandBuffer[src.commandBufferCount];
+        memcpy ((void *)pCommandBuffers, (void *)src.pCommandBuffers, sizeof(VkCommandBuffer)*src.commandBufferCount);
+    }
+    if (signalSemaphoreCount && src.pSignalSemaphores) {
+        pSignalSemaphores = new VkSemaphore[signalSemaphoreCount];
+        for (uint32_t i = 0; i < signalSemaphoreCount; ++i) {
+            pSignalSemaphores[i] = src.pSignalSemaphores[i];
+        }
+    }
+}
+
+safe_VkSubmitInfo& safe_VkSubmitInfo::operator=(const safe_VkSubmitInfo& src)
+{
+    if (&src == this) return *this;
+
+    if (pWaitSemaphores)
+        delete[] pWaitSemaphores;
+    if (pWaitDstStageMask)
+        delete[] pWaitDstStageMask;
+    if (pCommandBuffers)
+        delete[] pCommandBuffers;
+    if (pSignalSemaphores)
+        delete[] pSignalSemaphores;
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    waitSemaphoreCount = src.waitSemaphoreCount;
+    pWaitSemaphores = nullptr;
+    pWaitDstStageMask = nullptr;
+    commandBufferCount = src.commandBufferCount;
+    pCommandBuffers = nullptr;
+    signalSemaphoreCount = src.signalSemaphoreCount;
+    pSignalSemaphores = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (waitSemaphoreCount && src.pWaitSemaphores) {
+        pWaitSemaphores = new VkSemaphore[waitSemaphoreCount];
+        for (uint32_t i = 0; i < waitSemaphoreCount; ++i) {
+            pWaitSemaphores[i] = src.pWaitSemaphores[i];
+        }
+    }
+    if (src.pWaitDstStageMask) {
+        pWaitDstStageMask = new VkPipelineStageFlags[src.waitSemaphoreCount];
+        memcpy ((void *)pWaitDstStageMask, (void *)src.pWaitDstStageMask, sizeof(VkPipelineStageFlags)*src.waitSemaphoreCount);
+    }
+    if (src.pCommandBuffers) {
+        pCommandBuffers = new VkCommandBuffer[src.commandBufferCount];
+        memcpy ((void *)pCommandBuffers, (void *)src.pCommandBuffers, sizeof(VkCommandBuffer)*src.commandBufferCount);
+    }
+    if (signalSemaphoreCount && src.pSignalSemaphores) {
+        pSignalSemaphores = new VkSemaphore[signalSemaphoreCount];
+        for (uint32_t i = 0; i < signalSemaphoreCount; ++i) {
+            pSignalSemaphores[i] = src.pSignalSemaphores[i];
+        }
+    }
+
+    return *this;
+}
+
+safe_VkSubmitInfo::~safe_VkSubmitInfo()
+{
+    if (pWaitSemaphores)
+        delete[] pWaitSemaphores;
+    if (pWaitDstStageMask)
+        delete[] pWaitDstStageMask;
+    if (pCommandBuffers)
+        delete[] pCommandBuffers;
+    if (pSignalSemaphores)
+        delete[] pSignalSemaphores;
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkSubmitInfo::initialize(const VkSubmitInfo* in_struct)
+{
+    sType = in_struct->sType;
+    waitSemaphoreCount = in_struct->waitSemaphoreCount;
+    pWaitSemaphores = nullptr;
+    pWaitDstStageMask = nullptr;
+    commandBufferCount = in_struct->commandBufferCount;
+    pCommandBuffers = nullptr;
+    signalSemaphoreCount = in_struct->signalSemaphoreCount;
+    pSignalSemaphores = nullptr;
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (waitSemaphoreCount && in_struct->pWaitSemaphores) {
+        pWaitSemaphores = new VkSemaphore[waitSemaphoreCount];
+        for (uint32_t i = 0; i < waitSemaphoreCount; ++i) {
+            pWaitSemaphores[i] = in_struct->pWaitSemaphores[i];
+        }
+    }
+    if (in_struct->pWaitDstStageMask) {
+        pWaitDstStageMask = new VkPipelineStageFlags[in_struct->waitSemaphoreCount];
+        memcpy ((void *)pWaitDstStageMask, (void *)in_struct->pWaitDstStageMask, sizeof(VkPipelineStageFlags)*in_struct->waitSemaphoreCount);
+    }
+    if (in_struct->pCommandBuffers) {
+        pCommandBuffers = new VkCommandBuffer[in_struct->commandBufferCount];
+        memcpy ((void *)pCommandBuffers, (void *)in_struct->pCommandBuffers, sizeof(VkCommandBuffer)*in_struct->commandBufferCount);
+    }
+    if (signalSemaphoreCount && in_struct->pSignalSemaphores) {
+        pSignalSemaphores = new VkSemaphore[signalSemaphoreCount];
+        for (uint32_t i = 0; i < signalSemaphoreCount; ++i) {
+            pSignalSemaphores[i] = in_struct->pSignalSemaphores[i];
+        }
+    }
+}
+
+void safe_VkSubmitInfo::initialize(const safe_VkSubmitInfo* src)
+{
+    sType = src->sType;
+    waitSemaphoreCount = src->waitSemaphoreCount;
+    pWaitSemaphores = nullptr;
+    pWaitDstStageMask = nullptr;
+    commandBufferCount = src->commandBufferCount;
+    pCommandBuffers = nullptr;
+    signalSemaphoreCount = src->signalSemaphoreCount;
+    pSignalSemaphores = nullptr;
+    pNext = SafePnextCopy(src->pNext);
+    if (waitSemaphoreCount && src->pWaitSemaphores) {
+        pWaitSemaphores = new VkSemaphore[waitSemaphoreCount];
+        for (uint32_t i = 0; i < waitSemaphoreCount; ++i) {
+            pWaitSemaphores[i] = src->pWaitSemaphores[i];
+        }
+    }
+    if (src->pWaitDstStageMask) {
+        pWaitDstStageMask = new VkPipelineStageFlags[src->waitSemaphoreCount];
+        memcpy ((void *)pWaitDstStageMask, (void *)src->pWaitDstStageMask, sizeof(VkPipelineStageFlags)*src->waitSemaphoreCount);
+    }
+    if (src->pCommandBuffers) {
+        pCommandBuffers = new VkCommandBuffer[src->commandBufferCount];
+        memcpy ((void *)pCommandBuffers, (void *)src->pCommandBuffers, sizeof(VkCommandBuffer)*src->commandBufferCount);
+    }
+    if (signalSemaphoreCount && src->pSignalSemaphores) {
+        pSignalSemaphores = new VkSemaphore[signalSemaphoreCount];
+        for (uint32_t i = 0; i < signalSemaphoreCount; ++i) {
+            pSignalSemaphores[i] = src->pSignalSemaphores[i];
+        }
+    }
+}
+
+safe_VkMemoryAllocateInfo::safe_VkMemoryAllocateInfo(const VkMemoryAllocateInfo* in_struct) :
+    sType(in_struct->sType),
+    allocationSize(in_struct->allocationSize),
+    memoryTypeIndex(in_struct->memoryTypeIndex)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkMemoryAllocateInfo::safe_VkMemoryAllocateInfo() :
+    pNext(nullptr)
+{}
+
+safe_VkMemoryAllocateInfo::safe_VkMemoryAllocateInfo(const safe_VkMemoryAllocateInfo& src)
+{
+    sType = src.sType;
+    allocationSize = src.allocationSize;
+    memoryTypeIndex = src.memoryTypeIndex;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkMemoryAllocateInfo& safe_VkMemoryAllocateInfo::operator=(const safe_VkMemoryAllocateInfo& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    allocationSize = src.allocationSize;
+    memoryTypeIndex = src.memoryTypeIndex;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkMemoryAllocateInfo::~safe_VkMemoryAllocateInfo()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkMemoryAllocateInfo::initialize(const VkMemoryAllocateInfo* in_struct)
+{
+    sType = in_struct->sType;
+    allocationSize = in_struct->allocationSize;
+    memoryTypeIndex = in_struct->memoryTypeIndex;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkMemoryAllocateInfo::initialize(const safe_VkMemoryAllocateInfo* src)
+{
+    sType = src->sType;
+    allocationSize = src->allocationSize;
+    memoryTypeIndex = src->memoryTypeIndex;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkMappedMemoryRange::safe_VkMappedMemoryRange(const VkMappedMemoryRange* in_struct) :
+    sType(in_struct->sType),
+    memory(in_struct->memory),
+    offset(in_struct->offset),
+    size(in_struct->size)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkMappedMemoryRange::safe_VkMappedMemoryRange() :
+    pNext(nullptr)
+{}
+
+safe_VkMappedMemoryRange::safe_VkMappedMemoryRange(const safe_VkMappedMemoryRange& src)
+{
+    sType = src.sType;
+    memory = src.memory;
+    offset = src.offset;
+    size = src.size;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkMappedMemoryRange& safe_VkMappedMemoryRange::operator=(const safe_VkMappedMemoryRange& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    memory = src.memory;
+    offset = src.offset;
+    size = src.size;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkMappedMemoryRange::~safe_VkMappedMemoryRange()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkMappedMemoryRange::initialize(const VkMappedMemoryRange* in_struct)
+{
+    sType = in_struct->sType;
+    memory = in_struct->memory;
+    offset = in_struct->offset;
+    size = in_struct->size;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkMappedMemoryRange::initialize(const safe_VkMappedMemoryRange* src)
+{
+    sType = src->sType;
+    memory = src->memory;
+    offset = src->offset;
+    size = src->size;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkSparseBufferMemoryBindInfo::safe_VkSparseBufferMemoryBindInfo(const VkSparseBufferMemoryBindInfo* in_struct) :
+    buffer(in_struct->buffer),
+    bindCount(in_struct->bindCount),
+    pBinds(nullptr)
+{
+    if (bindCount && in_struct->pBinds) {
+        pBinds = new VkSparseMemoryBind[bindCount];
+        for (uint32_t i = 0; i < bindCount; ++i) {
+            pBinds[i] = in_struct->pBinds[i];
+        }
+    }
+}
+
+safe_VkSparseBufferMemoryBindInfo::safe_VkSparseBufferMemoryBindInfo() :
+    pBinds(nullptr)
+{}
+
+safe_VkSparseBufferMemoryBindInfo::safe_VkSparseBufferMemoryBindInfo(const safe_VkSparseBufferMemoryBindInfo& src)
+{
+    buffer = src.buffer;
+    bindCount = src.bindCount;
+    pBinds = nullptr;
+    if (bindCount && src.pBinds) {
+        pBinds = new VkSparseMemoryBind[bindCount];
+        for (uint32_t i = 0; i < bindCount; ++i) {
+            pBinds[i] = src.pBinds[i];
+        }
+    }
+}
+
+safe_VkSparseBufferMemoryBindInfo& safe_VkSparseBufferMemoryBindInfo::operator=(const safe_VkSparseBufferMemoryBindInfo& src)
+{
+    if (&src == this) return *this;
+
+    if (pBinds)
+        delete[] pBinds;
+
+    buffer = src.buffer;
+    bindCount = src.bindCount;
+    pBinds = nullptr;
+    if (bindCount && src.pBinds) {
+        pBinds = new VkSparseMemoryBind[bindCount];
+        for (uint32_t i = 0; i < bindCount; ++i) {
+            pBinds[i] = src.pBinds[i];
+        }
+    }
+
+    return *this;
+}
+
+safe_VkSparseBufferMemoryBindInfo::~safe_VkSparseBufferMemoryBindInfo()
+{
+    if (pBinds)
+        delete[] pBinds;
+}
+
+void safe_VkSparseBufferMemoryBindInfo::initialize(const VkSparseBufferMemoryBindInfo* in_struct)
+{
+    buffer = in_struct->buffer;
+    bindCount = in_struct->bindCount;
+    pBinds = nullptr;
+    if (bindCount && in_struct->pBinds) {
+        pBinds = new VkSparseMemoryBind[bindCount];
+        for (uint32_t i = 0; i < bindCount; ++i) {
+            pBinds[i] = in_struct->pBinds[i];
+        }
+    }
+}
+
+void safe_VkSparseBufferMemoryBindInfo::initialize(const safe_VkSparseBufferMemoryBindInfo* src)
+{
+    buffer = src->buffer;
+    bindCount = src->bindCount;
+    pBinds = nullptr;
+    if (bindCount && src->pBinds) {
+        pBinds = new VkSparseMemoryBind[bindCount];
+        for (uint32_t i = 0; i < bindCount; ++i) {
+            pBinds[i] = src->pBinds[i];
+        }
+    }
+}
+
+safe_VkSparseImageOpaqueMemoryBindInfo::safe_VkSparseImageOpaqueMemoryBindInfo(const VkSparseImageOpaqueMemoryBindInfo* in_struct) :
+    image(in_struct->image),
+    bindCount(in_struct->bindCount),
+    pBinds(nullptr)
+{
+    if (bindCount && in_struct->pBinds) {
+        pBinds = new VkSparseMemoryBind[bindCount];
+        for (uint32_t i = 0; i < bindCount; ++i) {
+            pBinds[i] = in_struct->pBinds[i];
+        }
+    }
+}
+
+safe_VkSparseImageOpaqueMemoryBindInfo::safe_VkSparseImageOpaqueMemoryBindInfo() :
+    pBinds(nullptr)
+{}
+
+safe_VkSparseImageOpaqueMemoryBindInfo::safe_VkSparseImageOpaqueMemoryBindInfo(const safe_VkSparseImageOpaqueMemoryBindInfo& src)
+{
+    image = src.image;
+    bindCount = src.bindCount;
+    pBinds = nullptr;
+    if (bindCount && src.pBinds) {
+        pBinds = new VkSparseMemoryBind[bindCount];
+        for (uint32_t i = 0; i < bindCount; ++i) {
+            pBinds[i] = src.pBinds[i];
+        }
+    }
+}
+
+safe_VkSparseImageOpaqueMemoryBindInfo& safe_VkSparseImageOpaqueMemoryBindInfo::operator=(const safe_VkSparseImageOpaqueMemoryBindInfo& src)
+{
+    if (&src == this) return *this;
+
+    if (pBinds)
+        delete[] pBinds;
+
+    image = src.image;
+    bindCount = src.bindCount;
+    pBinds = nullptr;
+    if (bindCount && src.pBinds) {
+        pBinds = new VkSparseMemoryBind[bindCount];
+        for (uint32_t i = 0; i < bindCount; ++i) {
+            pBinds[i] = src.pBinds[i];
+        }
+    }
+
+    return *this;
+}
+
+safe_VkSparseImageOpaqueMemoryBindInfo::~safe_VkSparseImageOpaqueMemoryBindInfo()
+{
+    if (pBinds)
+        delete[] pBinds;
+}
+
+void safe_VkSparseImageOpaqueMemoryBindInfo::initialize(const VkSparseImageOpaqueMemoryBindInfo* in_struct)
+{
+    image = in_struct->image;
+    bindCount = in_struct->bindCount;
+    pBinds = nullptr;
+    if (bindCount && in_struct->pBinds) {
+        pBinds = new VkSparseMemoryBind[bindCount];
+        for (uint32_t i = 0; i < bindCount; ++i) {
+            pBinds[i] = in_struct->pBinds[i];
+        }
+    }
+}
+
+void safe_VkSparseImageOpaqueMemoryBindInfo::initialize(const safe_VkSparseImageOpaqueMemoryBindInfo* src)
+{
+    image = src->image;
+    bindCount = src->bindCount;
+    pBinds = nullptr;
+    if (bindCount && src->pBinds) {
+        pBinds = new VkSparseMemoryBind[bindCount];
+        for (uint32_t i = 0; i < bindCount; ++i) {
+            pBinds[i] = src->pBinds[i];
+        }
+    }
+}
+
+safe_VkSparseImageMemoryBindInfo::safe_VkSparseImageMemoryBindInfo(const VkSparseImageMemoryBindInfo* in_struct) :
+    image(in_struct->image),
+    bindCount(in_struct->bindCount),
+    pBinds(nullptr)
+{
+    if (bindCount && in_struct->pBinds) {
+        pBinds = new VkSparseImageMemoryBind[bindCount];
+        for (uint32_t i = 0; i < bindCount; ++i) {
+            pBinds[i] = in_struct->pBinds[i];
+        }
+    }
+}
+
+safe_VkSparseImageMemoryBindInfo::safe_VkSparseImageMemoryBindInfo() :
+    pBinds(nullptr)
+{}
+
+safe_VkSparseImageMemoryBindInfo::safe_VkSparseImageMemoryBindInfo(const safe_VkSparseImageMemoryBindInfo& src)
+{
+    image = src.image;
+    bindCount = src.bindCount;
+    pBinds = nullptr;
+    if (bindCount && src.pBinds) {
+        pBinds = new VkSparseImageMemoryBind[bindCount];
+        for (uint32_t i = 0; i < bindCount; ++i) {
+            pBinds[i] = src.pBinds[i];
+        }
+    }
+}
+
+safe_VkSparseImageMemoryBindInfo& safe_VkSparseImageMemoryBindInfo::operator=(const safe_VkSparseImageMemoryBindInfo& src)
+{
+    if (&src == this) return *this;
+
+    if (pBinds)
+        delete[] pBinds;
+
+    image = src.image;
+    bindCount = src.bindCount;
+    pBinds = nullptr;
+    if (bindCount && src.pBinds) {
+        pBinds = new VkSparseImageMemoryBind[bindCount];
+        for (uint32_t i = 0; i < bindCount; ++i) {
+            pBinds[i] = src.pBinds[i];
+        }
+    }
+
+    return *this;
+}
+
+safe_VkSparseImageMemoryBindInfo::~safe_VkSparseImageMemoryBindInfo()
+{
+    if (pBinds)
+        delete[] pBinds;
+}
+
+void safe_VkSparseImageMemoryBindInfo::initialize(const VkSparseImageMemoryBindInfo* in_struct)
+{
+    image = in_struct->image;
+    bindCount = in_struct->bindCount;
+    pBinds = nullptr;
+    if (bindCount && in_struct->pBinds) {
+        pBinds = new VkSparseImageMemoryBind[bindCount];
+        for (uint32_t i = 0; i < bindCount; ++i) {
+            pBinds[i] = in_struct->pBinds[i];
+        }
+    }
+}
+
+void safe_VkSparseImageMemoryBindInfo::initialize(const safe_VkSparseImageMemoryBindInfo* src)
+{
+    image = src->image;
+    bindCount = src->bindCount;
+    pBinds = nullptr;
+    if (bindCount && src->pBinds) {
+        pBinds = new VkSparseImageMemoryBind[bindCount];
+        for (uint32_t i = 0; i < bindCount; ++i) {
+            pBinds[i] = src->pBinds[i];
+        }
+    }
+}
+
+safe_VkBindSparseInfo::safe_VkBindSparseInfo(const VkBindSparseInfo* in_struct) :
+    sType(in_struct->sType),
+    waitSemaphoreCount(in_struct->waitSemaphoreCount),
+    pWaitSemaphores(nullptr),
+    bufferBindCount(in_struct->bufferBindCount),
+    pBufferBinds(nullptr),
+    imageOpaqueBindCount(in_struct->imageOpaqueBindCount),
+    pImageOpaqueBinds(nullptr),
+    imageBindCount(in_struct->imageBindCount),
+    pImageBinds(nullptr),
+    signalSemaphoreCount(in_struct->signalSemaphoreCount),
+    pSignalSemaphores(nullptr)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (waitSemaphoreCount && in_struct->pWaitSemaphores) {
+        pWaitSemaphores = new VkSemaphore[waitSemaphoreCount];
+        for (uint32_t i = 0; i < waitSemaphoreCount; ++i) {
+            pWaitSemaphores[i] = in_struct->pWaitSemaphores[i];
+        }
+    }
+    if (bufferBindCount && in_struct->pBufferBinds) {
+        pBufferBinds = new safe_VkSparseBufferMemoryBindInfo[bufferBindCount];
+        for (uint32_t i = 0; i < bufferBindCount; ++i) {
+            pBufferBinds[i].initialize(&in_struct->pBufferBinds[i]);
+        }
+    }
+    if (imageOpaqueBindCount && in_struct->pImageOpaqueBinds) {
+        pImageOpaqueBinds = new safe_VkSparseImageOpaqueMemoryBindInfo[imageOpaqueBindCount];
+        for (uint32_t i = 0; i < imageOpaqueBindCount; ++i) {
+            pImageOpaqueBinds[i].initialize(&in_struct->pImageOpaqueBinds[i]);
+        }
+    }
+    if (imageBindCount && in_struct->pImageBinds) {
+        pImageBinds = new safe_VkSparseImageMemoryBindInfo[imageBindCount];
+        for (uint32_t i = 0; i < imageBindCount; ++i) {
+            pImageBinds[i].initialize(&in_struct->pImageBinds[i]);
+        }
+    }
+    if (signalSemaphoreCount && in_struct->pSignalSemaphores) {
+        pSignalSemaphores = new VkSemaphore[signalSemaphoreCount];
+        for (uint32_t i = 0; i < signalSemaphoreCount; ++i) {
+            pSignalSemaphores[i] = in_struct->pSignalSemaphores[i];
+        }
+    }
+}
+
+safe_VkBindSparseInfo::safe_VkBindSparseInfo() :
+    pNext(nullptr),
+    pWaitSemaphores(nullptr),
+    pBufferBinds(nullptr),
+    pImageOpaqueBinds(nullptr),
+    pImageBinds(nullptr),
+    pSignalSemaphores(nullptr)
+{}
+
+safe_VkBindSparseInfo::safe_VkBindSparseInfo(const safe_VkBindSparseInfo& src)
+{
+    sType = src.sType;
+    waitSemaphoreCount = src.waitSemaphoreCount;
+    pWaitSemaphores = nullptr;
+    bufferBindCount = src.bufferBindCount;
+    pBufferBinds = nullptr;
+    imageOpaqueBindCount = src.imageOpaqueBindCount;
+    pImageOpaqueBinds = nullptr;
+    imageBindCount = src.imageBindCount;
+    pImageBinds = nullptr;
+    signalSemaphoreCount = src.signalSemaphoreCount;
+    pSignalSemaphores = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (waitSemaphoreCount && src.pWaitSemaphores) {
+        pWaitSemaphores = new VkSemaphore[waitSemaphoreCount];
+        for (uint32_t i = 0; i < waitSemaphoreCount; ++i) {
+            pWaitSemaphores[i] = src.pWaitSemaphores[i];
+        }
+    }
+    if (bufferBindCount && src.pBufferBinds) {
+        pBufferBinds = new safe_VkSparseBufferMemoryBindInfo[bufferBindCount];
+        for (uint32_t i = 0; i < bufferBindCount; ++i) {
+            pBufferBinds[i].initialize(&src.pBufferBinds[i]);
+        }
+    }
+    if (imageOpaqueBindCount && src.pImageOpaqueBinds) {
+        pImageOpaqueBinds = new safe_VkSparseImageOpaqueMemoryBindInfo[imageOpaqueBindCount];
+        for (uint32_t i = 0; i < imageOpaqueBindCount; ++i) {
+            pImageOpaqueBinds[i].initialize(&src.pImageOpaqueBinds[i]);
+        }
+    }
+    if (imageBindCount && src.pImageBinds) {
+        pImageBinds = new safe_VkSparseImageMemoryBindInfo[imageBindCount];
+        for (uint32_t i = 0; i < imageBindCount; ++i) {
+            pImageBinds[i].initialize(&src.pImageBinds[i]);
+        }
+    }
+    if (signalSemaphoreCount && src.pSignalSemaphores) {
+        pSignalSemaphores = new VkSemaphore[signalSemaphoreCount];
+        for (uint32_t i = 0; i < signalSemaphoreCount; ++i) {
+            pSignalSemaphores[i] = src.pSignalSemaphores[i];
+        }
+    }
+}
+
+safe_VkBindSparseInfo& safe_VkBindSparseInfo::operator=(const safe_VkBindSparseInfo& src)
+{
+    if (&src == this) return *this;
+
+    if (pWaitSemaphores)
+        delete[] pWaitSemaphores;
+    if (pBufferBinds)
+        delete[] pBufferBinds;
+    if (pImageOpaqueBinds)
+        delete[] pImageOpaqueBinds;
+    if (pImageBinds)
+        delete[] pImageBinds;
+    if (pSignalSemaphores)
+        delete[] pSignalSemaphores;
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    waitSemaphoreCount = src.waitSemaphoreCount;
+    pWaitSemaphores = nullptr;
+    bufferBindCount = src.bufferBindCount;
+    pBufferBinds = nullptr;
+    imageOpaqueBindCount = src.imageOpaqueBindCount;
+    pImageOpaqueBinds = nullptr;
+    imageBindCount = src.imageBindCount;
+    pImageBinds = nullptr;
+    signalSemaphoreCount = src.signalSemaphoreCount;
+    pSignalSemaphores = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (waitSemaphoreCount && src.pWaitSemaphores) {
+        pWaitSemaphores = new VkSemaphore[waitSemaphoreCount];
+        for (uint32_t i = 0; i < waitSemaphoreCount; ++i) {
+            pWaitSemaphores[i] = src.pWaitSemaphores[i];
+        }
+    }
+    if (bufferBindCount && src.pBufferBinds) {
+        pBufferBinds = new safe_VkSparseBufferMemoryBindInfo[bufferBindCount];
+        for (uint32_t i = 0; i < bufferBindCount; ++i) {
+            pBufferBinds[i].initialize(&src.pBufferBinds[i]);
+        }
+    }
+    if (imageOpaqueBindCount && src.pImageOpaqueBinds) {
+        pImageOpaqueBinds = new safe_VkSparseImageOpaqueMemoryBindInfo[imageOpaqueBindCount];
+        for (uint32_t i = 0; i < imageOpaqueBindCount; ++i) {
+            pImageOpaqueBinds[i].initialize(&src.pImageOpaqueBinds[i]);
+        }
+    }
+    if (imageBindCount && src.pImageBinds) {
+        pImageBinds = new safe_VkSparseImageMemoryBindInfo[imageBindCount];
+        for (uint32_t i = 0; i < imageBindCount; ++i) {
+            pImageBinds[i].initialize(&src.pImageBinds[i]);
+        }
+    }
+    if (signalSemaphoreCount && src.pSignalSemaphores) {
+        pSignalSemaphores = new VkSemaphore[signalSemaphoreCount];
+        for (uint32_t i = 0; i < signalSemaphoreCount; ++i) {
+            pSignalSemaphores[i] = src.pSignalSemaphores[i];
+        }
+    }
+
+    return *this;
+}
+
+safe_VkBindSparseInfo::~safe_VkBindSparseInfo()
+{
+    if (pWaitSemaphores)
+        delete[] pWaitSemaphores;
+    if (pBufferBinds)
+        delete[] pBufferBinds;
+    if (pImageOpaqueBinds)
+        delete[] pImageOpaqueBinds;
+    if (pImageBinds)
+        delete[] pImageBinds;
+    if (pSignalSemaphores)
+        delete[] pSignalSemaphores;
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkBindSparseInfo::initialize(const VkBindSparseInfo* in_struct)
+{
+    sType = in_struct->sType;
+    waitSemaphoreCount = in_struct->waitSemaphoreCount;
+    pWaitSemaphores = nullptr;
+    bufferBindCount = in_struct->bufferBindCount;
+    pBufferBinds = nullptr;
+    imageOpaqueBindCount = in_struct->imageOpaqueBindCount;
+    pImageOpaqueBinds = nullptr;
+    imageBindCount = in_struct->imageBindCount;
+    pImageBinds = nullptr;
+    signalSemaphoreCount = in_struct->signalSemaphoreCount;
+    pSignalSemaphores = nullptr;
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (waitSemaphoreCount && in_struct->pWaitSemaphores) {
+        pWaitSemaphores = new VkSemaphore[waitSemaphoreCount];
+        for (uint32_t i = 0; i < waitSemaphoreCount; ++i) {
+            pWaitSemaphores[i] = in_struct->pWaitSemaphores[i];
+        }
+    }
+    if (bufferBindCount && in_struct->pBufferBinds) {
+        pBufferBinds = new safe_VkSparseBufferMemoryBindInfo[bufferBindCount];
+        for (uint32_t i = 0; i < bufferBindCount; ++i) {
+            pBufferBinds[i].initialize(&in_struct->pBufferBinds[i]);
+        }
+    }
+    if (imageOpaqueBindCount && in_struct->pImageOpaqueBinds) {
+        pImageOpaqueBinds = new safe_VkSparseImageOpaqueMemoryBindInfo[imageOpaqueBindCount];
+        for (uint32_t i = 0; i < imageOpaqueBindCount; ++i) {
+            pImageOpaqueBinds[i].initialize(&in_struct->pImageOpaqueBinds[i]);
+        }
+    }
+    if (imageBindCount && in_struct->pImageBinds) {
+        pImageBinds = new safe_VkSparseImageMemoryBindInfo[imageBindCount];
+        for (uint32_t i = 0; i < imageBindCount; ++i) {
+            pImageBinds[i].initialize(&in_struct->pImageBinds[i]);
+        }
+    }
+    if (signalSemaphoreCount && in_struct->pSignalSemaphores) {
+        pSignalSemaphores = new VkSemaphore[signalSemaphoreCount];
+        for (uint32_t i = 0; i < signalSemaphoreCount; ++i) {
+            pSignalSemaphores[i] = in_struct->pSignalSemaphores[i];
+        }
+    }
+}
+
+void safe_VkBindSparseInfo::initialize(const safe_VkBindSparseInfo* src)
+{
+    sType = src->sType;
+    waitSemaphoreCount = src->waitSemaphoreCount;
+    pWaitSemaphores = nullptr;
+    bufferBindCount = src->bufferBindCount;
+    pBufferBinds = nullptr;
+    imageOpaqueBindCount = src->imageOpaqueBindCount;
+    pImageOpaqueBinds = nullptr;
+    imageBindCount = src->imageBindCount;
+    pImageBinds = nullptr;
+    signalSemaphoreCount = src->signalSemaphoreCount;
+    pSignalSemaphores = nullptr;
+    pNext = SafePnextCopy(src->pNext);
+    if (waitSemaphoreCount && src->pWaitSemaphores) {
+        pWaitSemaphores = new VkSemaphore[waitSemaphoreCount];
+        for (uint32_t i = 0; i < waitSemaphoreCount; ++i) {
+            pWaitSemaphores[i] = src->pWaitSemaphores[i];
+        }
+    }
+    if (bufferBindCount && src->pBufferBinds) {
+        pBufferBinds = new safe_VkSparseBufferMemoryBindInfo[bufferBindCount];
+        for (uint32_t i = 0; i < bufferBindCount; ++i) {
+            pBufferBinds[i].initialize(&src->pBufferBinds[i]);
+        }
+    }
+    if (imageOpaqueBindCount && src->pImageOpaqueBinds) {
+        pImageOpaqueBinds = new safe_VkSparseImageOpaqueMemoryBindInfo[imageOpaqueBindCount];
+        for (uint32_t i = 0; i < imageOpaqueBindCount; ++i) {
+            pImageOpaqueBinds[i].initialize(&src->pImageOpaqueBinds[i]);
+        }
+    }
+    if (imageBindCount && src->pImageBinds) {
+        pImageBinds = new safe_VkSparseImageMemoryBindInfo[imageBindCount];
+        for (uint32_t i = 0; i < imageBindCount; ++i) {
+            pImageBinds[i].initialize(&src->pImageBinds[i]);
+        }
+    }
+    if (signalSemaphoreCount && src->pSignalSemaphores) {
+        pSignalSemaphores = new VkSemaphore[signalSemaphoreCount];
+        for (uint32_t i = 0; i < signalSemaphoreCount; ++i) {
+            pSignalSemaphores[i] = src->pSignalSemaphores[i];
+        }
+    }
+}
+
+safe_VkFenceCreateInfo::safe_VkFenceCreateInfo(const VkFenceCreateInfo* in_struct) :
+    sType(in_struct->sType),
+    flags(in_struct->flags)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkFenceCreateInfo::safe_VkFenceCreateInfo() :
+    pNext(nullptr)
+{}
+
+safe_VkFenceCreateInfo::safe_VkFenceCreateInfo(const safe_VkFenceCreateInfo& src)
+{
+    sType = src.sType;
+    flags = src.flags;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkFenceCreateInfo& safe_VkFenceCreateInfo::operator=(const safe_VkFenceCreateInfo& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    flags = src.flags;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkFenceCreateInfo::~safe_VkFenceCreateInfo()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkFenceCreateInfo::initialize(const VkFenceCreateInfo* in_struct)
+{
+    sType = in_struct->sType;
+    flags = in_struct->flags;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkFenceCreateInfo::initialize(const safe_VkFenceCreateInfo* src)
+{
+    sType = src->sType;
+    flags = src->flags;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkSemaphoreCreateInfo::safe_VkSemaphoreCreateInfo(const VkSemaphoreCreateInfo* in_struct) :
+    sType(in_struct->sType),
+    flags(in_struct->flags)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkSemaphoreCreateInfo::safe_VkSemaphoreCreateInfo() :
+    pNext(nullptr)
+{}
+
+safe_VkSemaphoreCreateInfo::safe_VkSemaphoreCreateInfo(const safe_VkSemaphoreCreateInfo& src)
+{
+    sType = src.sType;
+    flags = src.flags;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkSemaphoreCreateInfo& safe_VkSemaphoreCreateInfo::operator=(const safe_VkSemaphoreCreateInfo& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    flags = src.flags;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkSemaphoreCreateInfo::~safe_VkSemaphoreCreateInfo()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkSemaphoreCreateInfo::initialize(const VkSemaphoreCreateInfo* in_struct)
+{
+    sType = in_struct->sType;
+    flags = in_struct->flags;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkSemaphoreCreateInfo::initialize(const safe_VkSemaphoreCreateInfo* src)
+{
+    sType = src->sType;
+    flags = src->flags;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkEventCreateInfo::safe_VkEventCreateInfo(const VkEventCreateInfo* in_struct) :
+    sType(in_struct->sType),
+    flags(in_struct->flags)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkEventCreateInfo::safe_VkEventCreateInfo() :
+    pNext(nullptr)
+{}
+
+safe_VkEventCreateInfo::safe_VkEventCreateInfo(const safe_VkEventCreateInfo& src)
+{
+    sType = src.sType;
+    flags = src.flags;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkEventCreateInfo& safe_VkEventCreateInfo::operator=(const safe_VkEventCreateInfo& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    flags = src.flags;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkEventCreateInfo::~safe_VkEventCreateInfo()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkEventCreateInfo::initialize(const VkEventCreateInfo* in_struct)
+{
+    sType = in_struct->sType;
+    flags = in_struct->flags;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkEventCreateInfo::initialize(const safe_VkEventCreateInfo* src)
+{
+    sType = src->sType;
+    flags = src->flags;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkQueryPoolCreateInfo::safe_VkQueryPoolCreateInfo(const VkQueryPoolCreateInfo* in_struct) :
+    sType(in_struct->sType),
+    flags(in_struct->flags),
+    queryType(in_struct->queryType),
+    queryCount(in_struct->queryCount),
+    pipelineStatistics(in_struct->pipelineStatistics)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkQueryPoolCreateInfo::safe_VkQueryPoolCreateInfo() :
+    pNext(nullptr)
+{}
+
+safe_VkQueryPoolCreateInfo::safe_VkQueryPoolCreateInfo(const safe_VkQueryPoolCreateInfo& src)
+{
+    sType = src.sType;
+    flags = src.flags;
+    queryType = src.queryType;
+    queryCount = src.queryCount;
+    pipelineStatistics = src.pipelineStatistics;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkQueryPoolCreateInfo& safe_VkQueryPoolCreateInfo::operator=(const safe_VkQueryPoolCreateInfo& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    flags = src.flags;
+    queryType = src.queryType;
+    queryCount = src.queryCount;
+    pipelineStatistics = src.pipelineStatistics;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkQueryPoolCreateInfo::~safe_VkQueryPoolCreateInfo()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkQueryPoolCreateInfo::initialize(const VkQueryPoolCreateInfo* in_struct)
+{
+    sType = in_struct->sType;
+    flags = in_struct->flags;
+    queryType = in_struct->queryType;
+    queryCount = in_struct->queryCount;
+    pipelineStatistics = in_struct->pipelineStatistics;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkQueryPoolCreateInfo::initialize(const safe_VkQueryPoolCreateInfo* src)
+{
+    sType = src->sType;
+    flags = src->flags;
+    queryType = src->queryType;
+    queryCount = src->queryCount;
+    pipelineStatistics = src->pipelineStatistics;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkBufferCreateInfo::safe_VkBufferCreateInfo(const VkBufferCreateInfo* in_struct) :
+    sType(in_struct->sType),
+    flags(in_struct->flags),
+    size(in_struct->size),
+    usage(in_struct->usage),
+    sharingMode(in_struct->sharingMode),
+    queueFamilyIndexCount(in_struct->queueFamilyIndexCount),
+    pQueueFamilyIndices(nullptr)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (in_struct->pQueueFamilyIndices) {
+        pQueueFamilyIndices = new uint32_t[in_struct->queueFamilyIndexCount];
+        memcpy ((void *)pQueueFamilyIndices, (void *)in_struct->pQueueFamilyIndices, sizeof(uint32_t)*in_struct->queueFamilyIndexCount);
+    }
+}
+
+safe_VkBufferCreateInfo::safe_VkBufferCreateInfo() :
+    pNext(nullptr),
+    pQueueFamilyIndices(nullptr)
+{}
+
+safe_VkBufferCreateInfo::safe_VkBufferCreateInfo(const safe_VkBufferCreateInfo& src)
+{
+    sType = src.sType;
+    flags = src.flags;
+    size = src.size;
+    usage = src.usage;
+    sharingMode = src.sharingMode;
+    queueFamilyIndexCount = src.queueFamilyIndexCount;
+    pQueueFamilyIndices = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (src.pQueueFamilyIndices) {
+        pQueueFamilyIndices = new uint32_t[src.queueFamilyIndexCount];
+        memcpy ((void *)pQueueFamilyIndices, (void *)src.pQueueFamilyIndices, sizeof(uint32_t)*src.queueFamilyIndexCount);
+    }
+}
+
+safe_VkBufferCreateInfo& safe_VkBufferCreateInfo::operator=(const safe_VkBufferCreateInfo& src)
+{
+    if (&src == this) return *this;
+
+    if (pQueueFamilyIndices)
+        delete[] pQueueFamilyIndices;
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    flags = src.flags;
+    size = src.size;
+    usage = src.usage;
+    sharingMode = src.sharingMode;
+    queueFamilyIndexCount = src.queueFamilyIndexCount;
+    pQueueFamilyIndices = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (src.pQueueFamilyIndices) {
+        pQueueFamilyIndices = new uint32_t[src.queueFamilyIndexCount];
+        memcpy ((void *)pQueueFamilyIndices, (void *)src.pQueueFamilyIndices, sizeof(uint32_t)*src.queueFamilyIndexCount);
+    }
+
+    return *this;
+}
+
+safe_VkBufferCreateInfo::~safe_VkBufferCreateInfo()
+{
+    if (pQueueFamilyIndices)
+        delete[] pQueueFamilyIndices;
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkBufferCreateInfo::initialize(const VkBufferCreateInfo* in_struct)
+{
+    sType = in_struct->sType;
+    flags = in_struct->flags;
+    size = in_struct->size;
+    usage = in_struct->usage;
+    sharingMode = in_struct->sharingMode;
+    queueFamilyIndexCount = in_struct->queueFamilyIndexCount;
+    pQueueFamilyIndices = nullptr;
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (in_struct->pQueueFamilyIndices) {
+        pQueueFamilyIndices = new uint32_t[in_struct->queueFamilyIndexCount];
+        memcpy ((void *)pQueueFamilyIndices, (void *)in_struct->pQueueFamilyIndices, sizeof(uint32_t)*in_struct->queueFamilyIndexCount);
+    }
+}
+
+void safe_VkBufferCreateInfo::initialize(const safe_VkBufferCreateInfo* src)
+{
+    sType = src->sType;
+    flags = src->flags;
+    size = src->size;
+    usage = src->usage;
+    sharingMode = src->sharingMode;
+    queueFamilyIndexCount = src->queueFamilyIndexCount;
+    pQueueFamilyIndices = nullptr;
+    pNext = SafePnextCopy(src->pNext);
+    if (src->pQueueFamilyIndices) {
+        pQueueFamilyIndices = new uint32_t[src->queueFamilyIndexCount];
+        memcpy ((void *)pQueueFamilyIndices, (void *)src->pQueueFamilyIndices, sizeof(uint32_t)*src->queueFamilyIndexCount);
+    }
+}
+
+safe_VkBufferViewCreateInfo::safe_VkBufferViewCreateInfo(const VkBufferViewCreateInfo* in_struct) :
+    sType(in_struct->sType),
+    flags(in_struct->flags),
+    buffer(in_struct->buffer),
+    format(in_struct->format),
+    offset(in_struct->offset),
+    range(in_struct->range)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkBufferViewCreateInfo::safe_VkBufferViewCreateInfo() :
+    pNext(nullptr)
+{}
+
+safe_VkBufferViewCreateInfo::safe_VkBufferViewCreateInfo(const safe_VkBufferViewCreateInfo& src)
+{
+    sType = src.sType;
+    flags = src.flags;
+    buffer = src.buffer;
+    format = src.format;
+    offset = src.offset;
+    range = src.range;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkBufferViewCreateInfo& safe_VkBufferViewCreateInfo::operator=(const safe_VkBufferViewCreateInfo& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    flags = src.flags;
+    buffer = src.buffer;
+    format = src.format;
+    offset = src.offset;
+    range = src.range;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkBufferViewCreateInfo::~safe_VkBufferViewCreateInfo()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkBufferViewCreateInfo::initialize(const VkBufferViewCreateInfo* in_struct)
+{
+    sType = in_struct->sType;
+    flags = in_struct->flags;
+    buffer = in_struct->buffer;
+    format = in_struct->format;
+    offset = in_struct->offset;
+    range = in_struct->range;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkBufferViewCreateInfo::initialize(const safe_VkBufferViewCreateInfo* src)
+{
+    sType = src->sType;
+    flags = src->flags;
+    buffer = src->buffer;
+    format = src->format;
+    offset = src->offset;
+    range = src->range;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkImageCreateInfo::safe_VkImageCreateInfo(const VkImageCreateInfo* in_struct) :
+    sType(in_struct->sType),
+    flags(in_struct->flags),
+    imageType(in_struct->imageType),
+    format(in_struct->format),
+    extent(in_struct->extent),
+    mipLevels(in_struct->mipLevels),
+    arrayLayers(in_struct->arrayLayers),
+    samples(in_struct->samples),
+    tiling(in_struct->tiling),
+    usage(in_struct->usage),
+    sharingMode(in_struct->sharingMode),
+    queueFamilyIndexCount(in_struct->queueFamilyIndexCount),
+    pQueueFamilyIndices(nullptr),
+    initialLayout(in_struct->initialLayout)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (in_struct->pQueueFamilyIndices) {
+        pQueueFamilyIndices = new uint32_t[in_struct->queueFamilyIndexCount];
+        memcpy ((void *)pQueueFamilyIndices, (void *)in_struct->pQueueFamilyIndices, sizeof(uint32_t)*in_struct->queueFamilyIndexCount);
+    }
+}
+
+safe_VkImageCreateInfo::safe_VkImageCreateInfo() :
+    pNext(nullptr),
+    pQueueFamilyIndices(nullptr)
+{}
+
+safe_VkImageCreateInfo::safe_VkImageCreateInfo(const safe_VkImageCreateInfo& src)
+{
+    sType = src.sType;
+    flags = src.flags;
+    imageType = src.imageType;
+    format = src.format;
+    extent = src.extent;
+    mipLevels = src.mipLevels;
+    arrayLayers = src.arrayLayers;
+    samples = src.samples;
+    tiling = src.tiling;
+    usage = src.usage;
+    sharingMode = src.sharingMode;
+    queueFamilyIndexCount = src.queueFamilyIndexCount;
+    pQueueFamilyIndices = nullptr;
+    initialLayout = src.initialLayout;
+    pNext = SafePnextCopy(src.pNext);
+    if (src.pQueueFamilyIndices) {
+        pQueueFamilyIndices = new uint32_t[src.queueFamilyIndexCount];
+        memcpy ((void *)pQueueFamilyIndices, (void *)src.pQueueFamilyIndices, sizeof(uint32_t)*src.queueFamilyIndexCount);
+    }
+}
+
+safe_VkImageCreateInfo& safe_VkImageCreateInfo::operator=(const safe_VkImageCreateInfo& src)
+{
+    if (&src == this) return *this;
+
+    if (pQueueFamilyIndices)
+        delete[] pQueueFamilyIndices;
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    flags = src.flags;
+    imageType = src.imageType;
+    format = src.format;
+    extent = src.extent;
+    mipLevels = src.mipLevels;
+    arrayLayers = src.arrayLayers;
+    samples = src.samples;
+    tiling = src.tiling;
+    usage = src.usage;
+    sharingMode = src.sharingMode;
+    queueFamilyIndexCount = src.queueFamilyIndexCount;
+    pQueueFamilyIndices = nullptr;
+    initialLayout = src.initialLayout;
+    pNext = SafePnextCopy(src.pNext);
+    if (src.pQueueFamilyIndices) {
+        pQueueFamilyIndices = new uint32_t[src.queueFamilyIndexCount];
+        memcpy ((void *)pQueueFamilyIndices, (void *)src.pQueueFamilyIndices, sizeof(uint32_t)*src.queueFamilyIndexCount);
+    }
+
+    return *this;
+}
+
+safe_VkImageCreateInfo::~safe_VkImageCreateInfo()
+{
+    if (pQueueFamilyIndices)
+        delete[] pQueueFamilyIndices;
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkImageCreateInfo::initialize(const VkImageCreateInfo* in_struct)
+{
+    sType = in_struct->sType;
+    flags = in_struct->flags;
+    imageType = in_struct->imageType;
+    format = in_struct->format;
+    extent = in_struct->extent;
+    mipLevels = in_struct->mipLevels;
+    arrayLayers = in_struct->arrayLayers;
+    samples = in_struct->samples;
+    tiling = in_struct->tiling;
+    usage = in_struct->usage;
+    sharingMode = in_struct->sharingMode;
+    queueFamilyIndexCount = in_struct->queueFamilyIndexCount;
+    pQueueFamilyIndices = nullptr;
+    initialLayout = in_struct->initialLayout;
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (in_struct->pQueueFamilyIndices) {
+        pQueueFamilyIndices = new uint32_t[in_struct->queueFamilyIndexCount];
+        memcpy ((void *)pQueueFamilyIndices, (void *)in_struct->pQueueFamilyIndices, sizeof(uint32_t)*in_struct->queueFamilyIndexCount);
+    }
+}
+
+void safe_VkImageCreateInfo::initialize(const safe_VkImageCreateInfo* src)
+{
+    sType = src->sType;
+    flags = src->flags;
+    imageType = src->imageType;
+    format = src->format;
+    extent = src->extent;
+    mipLevels = src->mipLevels;
+    arrayLayers = src->arrayLayers;
+    samples = src->samples;
+    tiling = src->tiling;
+    usage = src->usage;
+    sharingMode = src->sharingMode;
+    queueFamilyIndexCount = src->queueFamilyIndexCount;
+    pQueueFamilyIndices = nullptr;
+    initialLayout = src->initialLayout;
+    pNext = SafePnextCopy(src->pNext);
+    if (src->pQueueFamilyIndices) {
+        pQueueFamilyIndices = new uint32_t[src->queueFamilyIndexCount];
+        memcpy ((void *)pQueueFamilyIndices, (void *)src->pQueueFamilyIndices, sizeof(uint32_t)*src->queueFamilyIndexCount);
+    }
+}
+
+safe_VkImageViewCreateInfo::safe_VkImageViewCreateInfo(const VkImageViewCreateInfo* in_struct) :
+    sType(in_struct->sType),
+    flags(in_struct->flags),
+    image(in_struct->image),
+    viewType(in_struct->viewType),
+    format(in_struct->format),
+    components(in_struct->components),
+    subresourceRange(in_struct->subresourceRange)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkImageViewCreateInfo::safe_VkImageViewCreateInfo() :
+    pNext(nullptr)
+{}
+
+safe_VkImageViewCreateInfo::safe_VkImageViewCreateInfo(const safe_VkImageViewCreateInfo& src)
+{
+    sType = src.sType;
+    flags = src.flags;
+    image = src.image;
+    viewType = src.viewType;
+    format = src.format;
+    components = src.components;
+    subresourceRange = src.subresourceRange;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkImageViewCreateInfo& safe_VkImageViewCreateInfo::operator=(const safe_VkImageViewCreateInfo& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    flags = src.flags;
+    image = src.image;
+    viewType = src.viewType;
+    format = src.format;
+    components = src.components;
+    subresourceRange = src.subresourceRange;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkImageViewCreateInfo::~safe_VkImageViewCreateInfo()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkImageViewCreateInfo::initialize(const VkImageViewCreateInfo* in_struct)
+{
+    sType = in_struct->sType;
+    flags = in_struct->flags;
+    image = in_struct->image;
+    viewType = in_struct->viewType;
+    format = in_struct->format;
+    components = in_struct->components;
+    subresourceRange = in_struct->subresourceRange;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkImageViewCreateInfo::initialize(const safe_VkImageViewCreateInfo* src)
+{
+    sType = src->sType;
+    flags = src->flags;
+    image = src->image;
+    viewType = src->viewType;
+    format = src->format;
+    components = src->components;
+    subresourceRange = src->subresourceRange;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkShaderModuleCreateInfo::safe_VkShaderModuleCreateInfo(const VkShaderModuleCreateInfo* in_struct) :
+    sType(in_struct->sType),
+    flags(in_struct->flags),
+    codeSize(in_struct->codeSize),
+    pCode(nullptr)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (in_struct->pCode) {
+        pCode = reinterpret_cast<uint32_t *>(new uint8_t[codeSize]);
+        memcpy((void *)pCode, (void *)in_struct->pCode, codeSize);
+    }
+}
+
+safe_VkShaderModuleCreateInfo::safe_VkShaderModuleCreateInfo() :
+    pNext(nullptr),
+    pCode(nullptr)
+{}
+
+safe_VkShaderModuleCreateInfo::safe_VkShaderModuleCreateInfo(const safe_VkShaderModuleCreateInfo& src)
+{
+    sType = src.sType;
+    flags = src.flags;
+    codeSize = src.codeSize;
+    pCode = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (src.pCode) {
+        pCode = reinterpret_cast<uint32_t *>(new uint8_t[codeSize]);
+        memcpy((void *)pCode, (void *)src.pCode, codeSize);
+    }
+}
+
+safe_VkShaderModuleCreateInfo& safe_VkShaderModuleCreateInfo::operator=(const safe_VkShaderModuleCreateInfo& src)
+{
+    if (&src == this) return *this;
+
+    if (pCode)
+        delete[] reinterpret_cast<const uint8_t *>(pCode);
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    flags = src.flags;
+    codeSize = src.codeSize;
+    pCode = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (src.pCode) {
+        pCode = reinterpret_cast<uint32_t *>(new uint8_t[codeSize]);
+        memcpy((void *)pCode, (void *)src.pCode, codeSize);
+    }
+
+    return *this;
+}
+
+safe_VkShaderModuleCreateInfo::~safe_VkShaderModuleCreateInfo()
+{
+    if (pCode)
+        delete[] reinterpret_cast<const uint8_t *>(pCode);
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkShaderModuleCreateInfo::initialize(const VkShaderModuleCreateInfo* in_struct)
+{
+    sType = in_struct->sType;
+    flags = in_struct->flags;
+    codeSize = in_struct->codeSize;
+    pCode = nullptr;
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (in_struct->pCode) {
+        pCode = reinterpret_cast<uint32_t *>(new uint8_t[codeSize]);
+        memcpy((void *)pCode, (void *)in_struct->pCode, codeSize);
+    }
+}
+
+void safe_VkShaderModuleCreateInfo::initialize(const safe_VkShaderModuleCreateInfo* src)
+{
+    sType = src->sType;
+    flags = src->flags;
+    codeSize = src->codeSize;
+    pCode = nullptr;
+    pNext = SafePnextCopy(src->pNext);
+    if (src->pCode) {
+        pCode = reinterpret_cast<uint32_t *>(new uint8_t[codeSize]);
+        memcpy((void *)pCode, (void *)src->pCode, codeSize);
+    }
+}
+
+safe_VkPipelineCacheCreateInfo::safe_VkPipelineCacheCreateInfo(const VkPipelineCacheCreateInfo* in_struct) :
+    sType(in_struct->sType),
+    flags(in_struct->flags),
+    initialDataSize(in_struct->initialDataSize),
+    pInitialData(in_struct->pInitialData)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPipelineCacheCreateInfo::safe_VkPipelineCacheCreateInfo() :
+    pNext(nullptr),
+    pInitialData(nullptr)
+{}
+
+safe_VkPipelineCacheCreateInfo::safe_VkPipelineCacheCreateInfo(const safe_VkPipelineCacheCreateInfo& src)
+{
+    sType = src.sType;
+    flags = src.flags;
+    initialDataSize = src.initialDataSize;
+    pInitialData = src.pInitialData;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPipelineCacheCreateInfo& safe_VkPipelineCacheCreateInfo::operator=(const safe_VkPipelineCacheCreateInfo& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    flags = src.flags;
+    initialDataSize = src.initialDataSize;
+    pInitialData = src.pInitialData;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPipelineCacheCreateInfo::~safe_VkPipelineCacheCreateInfo()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPipelineCacheCreateInfo::initialize(const VkPipelineCacheCreateInfo* in_struct)
+{
+    sType = in_struct->sType;
+    flags = in_struct->flags;
+    initialDataSize = in_struct->initialDataSize;
+    pInitialData = in_struct->pInitialData;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPipelineCacheCreateInfo::initialize(const safe_VkPipelineCacheCreateInfo* src)
+{
+    sType = src->sType;
+    flags = src->flags;
+    initialDataSize = src->initialDataSize;
+    pInitialData = src->pInitialData;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkSpecializationInfo::safe_VkSpecializationInfo(const VkSpecializationInfo* in_struct) :
+    mapEntryCount(in_struct->mapEntryCount),
+    pMapEntries(nullptr),
+    dataSize(in_struct->dataSize),
+    pData(in_struct->pData)
+{
+    if (in_struct->pMapEntries) {
+        pMapEntries = new VkSpecializationMapEntry[in_struct->mapEntryCount];
+        memcpy ((void *)pMapEntries, (void *)in_struct->pMapEntries, sizeof(VkSpecializationMapEntry)*in_struct->mapEntryCount);
+    }
+}
+
+safe_VkSpecializationInfo::safe_VkSpecializationInfo() :
+    pMapEntries(nullptr),
+    pData(nullptr)
+{}
+
+safe_VkSpecializationInfo::safe_VkSpecializationInfo(const safe_VkSpecializationInfo& src)
+{
+    mapEntryCount = src.mapEntryCount;
+    pMapEntries = nullptr;
+    dataSize = src.dataSize;
+    pData = src.pData;
+    if (src.pMapEntries) {
+        pMapEntries = new VkSpecializationMapEntry[src.mapEntryCount];
+        memcpy ((void *)pMapEntries, (void *)src.pMapEntries, sizeof(VkSpecializationMapEntry)*src.mapEntryCount);
+    }
+}
+
+safe_VkSpecializationInfo& safe_VkSpecializationInfo::operator=(const safe_VkSpecializationInfo& src)
+{
+    if (&src == this) return *this;
+
+    if (pMapEntries)
+        delete[] pMapEntries;
+
+    mapEntryCount = src.mapEntryCount;
+    pMapEntries = nullptr;
+    dataSize = src.dataSize;
+    pData = src.pData;
+    if (src.pMapEntries) {
+        pMapEntries = new VkSpecializationMapEntry[src.mapEntryCount];
+        memcpy ((void *)pMapEntries, (void *)src.pMapEntries, sizeof(VkSpecializationMapEntry)*src.mapEntryCount);
+    }
+
+    return *this;
+}
+
+safe_VkSpecializationInfo::~safe_VkSpecializationInfo()
+{
+    if (pMapEntries)
+        delete[] pMapEntries;
+}
+
+void safe_VkSpecializationInfo::initialize(const VkSpecializationInfo* in_struct)
+{
+    mapEntryCount = in_struct->mapEntryCount;
+    pMapEntries = nullptr;
+    dataSize = in_struct->dataSize;
+    pData = in_struct->pData;
+    if (in_struct->pMapEntries) {
+        pMapEntries = new VkSpecializationMapEntry[in_struct->mapEntryCount];
+        memcpy ((void *)pMapEntries, (void *)in_struct->pMapEntries, sizeof(VkSpecializationMapEntry)*in_struct->mapEntryCount);
+    }
+}
+
+void safe_VkSpecializationInfo::initialize(const safe_VkSpecializationInfo* src)
+{
+    mapEntryCount = src->mapEntryCount;
+    pMapEntries = nullptr;
+    dataSize = src->dataSize;
+    pData = src->pData;
+    if (src->pMapEntries) {
+        pMapEntries = new VkSpecializationMapEntry[src->mapEntryCount];
+        memcpy ((void *)pMapEntries, (void *)src->pMapEntries, sizeof(VkSpecializationMapEntry)*src->mapEntryCount);
+    }
+}
+
+safe_VkPipelineShaderStageCreateInfo::safe_VkPipelineShaderStageCreateInfo(const VkPipelineShaderStageCreateInfo* in_struct) :
+    sType(in_struct->sType),
+    flags(in_struct->flags),
+    stage(in_struct->stage),
+    module(in_struct->module),
+    pSpecializationInfo(nullptr)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+    pName = SafeStringCopy(in_struct->pName);
+    if (in_struct->pSpecializationInfo)
+        pSpecializationInfo = new safe_VkSpecializationInfo(in_struct->pSpecializationInfo);
+}
+
+safe_VkPipelineShaderStageCreateInfo::safe_VkPipelineShaderStageCreateInfo() :
+    pNext(nullptr),
+    pName(nullptr),
+    pSpecializationInfo(nullptr)
+{}
+
+safe_VkPipelineShaderStageCreateInfo::safe_VkPipelineShaderStageCreateInfo(const safe_VkPipelineShaderStageCreateInfo& src)
+{
+    sType = src.sType;
+    flags = src.flags;
+    stage = src.stage;
+    module = src.module;
+    pSpecializationInfo = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    pName = SafeStringCopy(src.pName);
+    if (src.pSpecializationInfo)
+        pSpecializationInfo = new safe_VkSpecializationInfo(*src.pSpecializationInfo);
+}
+
+safe_VkPipelineShaderStageCreateInfo& safe_VkPipelineShaderStageCreateInfo::operator=(const safe_VkPipelineShaderStageCreateInfo& src)
+{
+    if (&src == this) return *this;
+
+    if (pName) delete [] pName;
+    if (pSpecializationInfo)
+        delete pSpecializationInfo;
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    flags = src.flags;
+    stage = src.stage;
+    module = src.module;
+    pSpecializationInfo = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    pName = SafeStringCopy(src.pName);
+    if (src.pSpecializationInfo)
+        pSpecializationInfo = new safe_VkSpecializationInfo(*src.pSpecializationInfo);
+
+    return *this;
+}
+
+safe_VkPipelineShaderStageCreateInfo::~safe_VkPipelineShaderStageCreateInfo()
+{
+    if (pName) delete [] pName;
+    if (pSpecializationInfo)
+        delete pSpecializationInfo;
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPipelineShaderStageCreateInfo::initialize(const VkPipelineShaderStageCreateInfo* in_struct)
+{
+    sType = in_struct->sType;
+    flags = in_struct->flags;
+    stage = in_struct->stage;
+    module = in_struct->module;
+    pSpecializationInfo = nullptr;
+    pNext = SafePnextCopy(in_struct->pNext);
+    pName = SafeStringCopy(in_struct->pName);
+    if (in_struct->pSpecializationInfo)
+        pSpecializationInfo = new safe_VkSpecializationInfo(in_struct->pSpecializationInfo);
+}
+
+void safe_VkPipelineShaderStageCreateInfo::initialize(const safe_VkPipelineShaderStageCreateInfo* src)
+{
+    sType = src->sType;
+    flags = src->flags;
+    stage = src->stage;
+    module = src->module;
+    pSpecializationInfo = nullptr;
+    pNext = SafePnextCopy(src->pNext);
+    pName = SafeStringCopy(src->pName);
+    if (src->pSpecializationInfo)
+        pSpecializationInfo = new safe_VkSpecializationInfo(*src->pSpecializationInfo);
+}
+
+safe_VkPipelineVertexInputStateCreateInfo::safe_VkPipelineVertexInputStateCreateInfo(const VkPipelineVertexInputStateCreateInfo* in_struct) :
+    sType(in_struct->sType),
+    flags(in_struct->flags),
+    vertexBindingDescriptionCount(in_struct->vertexBindingDescriptionCount),
+    pVertexBindingDescriptions(nullptr),
+    vertexAttributeDescriptionCount(in_struct->vertexAttributeDescriptionCount),
+    pVertexAttributeDescriptions(nullptr)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (in_struct->pVertexBindingDescriptions) {
+        pVertexBindingDescriptions = new VkVertexInputBindingDescription[in_struct->vertexBindingDescriptionCount];
+        memcpy ((void *)pVertexBindingDescriptions, (void *)in_struct->pVertexBindingDescriptions, sizeof(VkVertexInputBindingDescription)*in_struct->vertexBindingDescriptionCount);
+    }
+    if (in_struct->pVertexAttributeDescriptions) {
+        pVertexAttributeDescriptions = new VkVertexInputAttributeDescription[in_struct->vertexAttributeDescriptionCount];
+        memcpy ((void *)pVertexAttributeDescriptions, (void *)in_struct->pVertexAttributeDescriptions, sizeof(VkVertexInputAttributeDescription)*in_struct->vertexAttributeDescriptionCount);
+    }
+}
+
+safe_VkPipelineVertexInputStateCreateInfo::safe_VkPipelineVertexInputStateCreateInfo() :
+    pNext(nullptr),
+    pVertexBindingDescriptions(nullptr),
+    pVertexAttributeDescriptions(nullptr)
+{}
+
+safe_VkPipelineVertexInputStateCreateInfo::safe_VkPipelineVertexInputStateCreateInfo(const safe_VkPipelineVertexInputStateCreateInfo& src)
+{
+    sType = src.sType;
+    flags = src.flags;
+    vertexBindingDescriptionCount = src.vertexBindingDescriptionCount;
+    pVertexBindingDescriptions = nullptr;
+    vertexAttributeDescriptionCount = src.vertexAttributeDescriptionCount;
+    pVertexAttributeDescriptions = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (src.pVertexBindingDescriptions) {
+        pVertexBindingDescriptions = new VkVertexInputBindingDescription[src.vertexBindingDescriptionCount];
+        memcpy ((void *)pVertexBindingDescriptions, (void *)src.pVertexBindingDescriptions, sizeof(VkVertexInputBindingDescription)*src.vertexBindingDescriptionCount);
+    }
+    if (src.pVertexAttributeDescriptions) {
+        pVertexAttributeDescriptions = new VkVertexInputAttributeDescription[src.vertexAttributeDescriptionCount];
+        memcpy ((void *)pVertexAttributeDescriptions, (void *)src.pVertexAttributeDescriptions, sizeof(VkVertexInputAttributeDescription)*src.vertexAttributeDescriptionCount);
+    }
+}
+
+safe_VkPipelineVertexInputStateCreateInfo& safe_VkPipelineVertexInputStateCreateInfo::operator=(const safe_VkPipelineVertexInputStateCreateInfo& src)
+{
+    if (&src == this) return *this;
+
+    if (pVertexBindingDescriptions)
+        delete[] pVertexBindingDescriptions;
+    if (pVertexAttributeDescriptions)
+        delete[] pVertexAttributeDescriptions;
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    flags = src.flags;
+    vertexBindingDescriptionCount = src.vertexBindingDescriptionCount;
+    pVertexBindingDescriptions = nullptr;
+    vertexAttributeDescriptionCount = src.vertexAttributeDescriptionCount;
+    pVertexAttributeDescriptions = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (src.pVertexBindingDescriptions) {
+        pVertexBindingDescriptions = new VkVertexInputBindingDescription[src.vertexBindingDescriptionCount];
+        memcpy ((void *)pVertexBindingDescriptions, (void *)src.pVertexBindingDescriptions, sizeof(VkVertexInputBindingDescription)*src.vertexBindingDescriptionCount);
+    }
+    if (src.pVertexAttributeDescriptions) {
+        pVertexAttributeDescriptions = new VkVertexInputAttributeDescription[src.vertexAttributeDescriptionCount];
+        memcpy ((void *)pVertexAttributeDescriptions, (void *)src.pVertexAttributeDescriptions, sizeof(VkVertexInputAttributeDescription)*src.vertexAttributeDescriptionCount);
+    }
+
+    return *this;
+}
+
+safe_VkPipelineVertexInputStateCreateInfo::~safe_VkPipelineVertexInputStateCreateInfo()
+{
+    if (pVertexBindingDescriptions)
+        delete[] pVertexBindingDescriptions;
+    if (pVertexAttributeDescriptions)
+        delete[] pVertexAttributeDescriptions;
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPipelineVertexInputStateCreateInfo::initialize(const VkPipelineVertexInputStateCreateInfo* in_struct)
+{
+    sType = in_struct->sType;
+    flags = in_struct->flags;
+    vertexBindingDescriptionCount = in_struct->vertexBindingDescriptionCount;
+    pVertexBindingDescriptions = nullptr;
+    vertexAttributeDescriptionCount = in_struct->vertexAttributeDescriptionCount;
+    pVertexAttributeDescriptions = nullptr;
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (in_struct->pVertexBindingDescriptions) {
+        pVertexBindingDescriptions = new VkVertexInputBindingDescription[in_struct->vertexBindingDescriptionCount];
+        memcpy ((void *)pVertexBindingDescriptions, (void *)in_struct->pVertexBindingDescriptions, sizeof(VkVertexInputBindingDescription)*in_struct->vertexBindingDescriptionCount);
+    }
+    if (in_struct->pVertexAttributeDescriptions) {
+        pVertexAttributeDescriptions = new VkVertexInputAttributeDescription[in_struct->vertexAttributeDescriptionCount];
+        memcpy ((void *)pVertexAttributeDescriptions, (void *)in_struct->pVertexAttributeDescriptions, sizeof(VkVertexInputAttributeDescription)*in_struct->vertexAttributeDescriptionCount);
+    }
+}
+
+void safe_VkPipelineVertexInputStateCreateInfo::initialize(const safe_VkPipelineVertexInputStateCreateInfo* src)
+{
+    sType = src->sType;
+    flags = src->flags;
+    vertexBindingDescriptionCount = src->vertexBindingDescriptionCount;
+    pVertexBindingDescriptions = nullptr;
+    vertexAttributeDescriptionCount = src->vertexAttributeDescriptionCount;
+    pVertexAttributeDescriptions = nullptr;
+    pNext = SafePnextCopy(src->pNext);
+    if (src->pVertexBindingDescriptions) {
+        pVertexBindingDescriptions = new VkVertexInputBindingDescription[src->vertexBindingDescriptionCount];
+        memcpy ((void *)pVertexBindingDescriptions, (void *)src->pVertexBindingDescriptions, sizeof(VkVertexInputBindingDescription)*src->vertexBindingDescriptionCount);
+    }
+    if (src->pVertexAttributeDescriptions) {
+        pVertexAttributeDescriptions = new VkVertexInputAttributeDescription[src->vertexAttributeDescriptionCount];
+        memcpy ((void *)pVertexAttributeDescriptions, (void *)src->pVertexAttributeDescriptions, sizeof(VkVertexInputAttributeDescription)*src->vertexAttributeDescriptionCount);
+    }
+}
+
+safe_VkPipelineInputAssemblyStateCreateInfo::safe_VkPipelineInputAssemblyStateCreateInfo(const VkPipelineInputAssemblyStateCreateInfo* in_struct) :
+    sType(in_struct->sType),
+    flags(in_struct->flags),
+    topology(in_struct->topology),
+    primitiveRestartEnable(in_struct->primitiveRestartEnable)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPipelineInputAssemblyStateCreateInfo::safe_VkPipelineInputAssemblyStateCreateInfo() :
+    pNext(nullptr)
+{}
+
+safe_VkPipelineInputAssemblyStateCreateInfo::safe_VkPipelineInputAssemblyStateCreateInfo(const safe_VkPipelineInputAssemblyStateCreateInfo& src)
+{
+    sType = src.sType;
+    flags = src.flags;
+    topology = src.topology;
+    primitiveRestartEnable = src.primitiveRestartEnable;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPipelineInputAssemblyStateCreateInfo& safe_VkPipelineInputAssemblyStateCreateInfo::operator=(const safe_VkPipelineInputAssemblyStateCreateInfo& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    flags = src.flags;
+    topology = src.topology;
+    primitiveRestartEnable = src.primitiveRestartEnable;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPipelineInputAssemblyStateCreateInfo::~safe_VkPipelineInputAssemblyStateCreateInfo()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPipelineInputAssemblyStateCreateInfo::initialize(const VkPipelineInputAssemblyStateCreateInfo* in_struct)
+{
+    sType = in_struct->sType;
+    flags = in_struct->flags;
+    topology = in_struct->topology;
+    primitiveRestartEnable = in_struct->primitiveRestartEnable;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPipelineInputAssemblyStateCreateInfo::initialize(const safe_VkPipelineInputAssemblyStateCreateInfo* src)
+{
+    sType = src->sType;
+    flags = src->flags;
+    topology = src->topology;
+    primitiveRestartEnable = src->primitiveRestartEnable;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkPipelineTessellationStateCreateInfo::safe_VkPipelineTessellationStateCreateInfo(const VkPipelineTessellationStateCreateInfo* in_struct) :
+    sType(in_struct->sType),
+    flags(in_struct->flags),
+    patchControlPoints(in_struct->patchControlPoints)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPipelineTessellationStateCreateInfo::safe_VkPipelineTessellationStateCreateInfo() :
+    pNext(nullptr)
+{}
+
+safe_VkPipelineTessellationStateCreateInfo::safe_VkPipelineTessellationStateCreateInfo(const safe_VkPipelineTessellationStateCreateInfo& src)
+{
+    sType = src.sType;
+    flags = src.flags;
+    patchControlPoints = src.patchControlPoints;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPipelineTessellationStateCreateInfo& safe_VkPipelineTessellationStateCreateInfo::operator=(const safe_VkPipelineTessellationStateCreateInfo& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    flags = src.flags;
+    patchControlPoints = src.patchControlPoints;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPipelineTessellationStateCreateInfo::~safe_VkPipelineTessellationStateCreateInfo()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPipelineTessellationStateCreateInfo::initialize(const VkPipelineTessellationStateCreateInfo* in_struct)
+{
+    sType = in_struct->sType;
+    flags = in_struct->flags;
+    patchControlPoints = in_struct->patchControlPoints;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPipelineTessellationStateCreateInfo::initialize(const safe_VkPipelineTessellationStateCreateInfo* src)
+{
+    sType = src->sType;
+    flags = src->flags;
+    patchControlPoints = src->patchControlPoints;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkPipelineViewportStateCreateInfo::safe_VkPipelineViewportStateCreateInfo(const VkPipelineViewportStateCreateInfo* in_struct, const bool is_dynamic_viewports, const bool is_dynamic_scissors) :
+    sType(in_struct->sType),
+    flags(in_struct->flags),
+    viewportCount(in_struct->viewportCount),
+    pViewports(nullptr),
+    scissorCount(in_struct->scissorCount),
+    pScissors(nullptr)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (in_struct->pViewports && !is_dynamic_viewports) {
+        pViewports = new VkViewport[in_struct->viewportCount];
+        memcpy ((void *)pViewports, (void *)in_struct->pViewports, sizeof(VkViewport)*in_struct->viewportCount);
+    }
+    else
+        pViewports = NULL;
+    if (in_struct->pScissors && !is_dynamic_scissors) {
+        pScissors = new VkRect2D[in_struct->scissorCount];
+        memcpy ((void *)pScissors, (void *)in_struct->pScissors, sizeof(VkRect2D)*in_struct->scissorCount);
+    }
+    else
+        pScissors = NULL;
+}
+
+safe_VkPipelineViewportStateCreateInfo::safe_VkPipelineViewportStateCreateInfo() :
+    pNext(nullptr),
+    pViewports(nullptr),
+    pScissors(nullptr)
+{}
+
+safe_VkPipelineViewportStateCreateInfo::safe_VkPipelineViewportStateCreateInfo(const safe_VkPipelineViewportStateCreateInfo& src)
+{
+    sType = src.sType;
+    flags = src.flags;
+    viewportCount = src.viewportCount;
+    pViewports = nullptr;
+    scissorCount = src.scissorCount;
+    pScissors = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (src.pViewports) {
+        pViewports = new VkViewport[src.viewportCount];
+        memcpy ((void *)pViewports, (void *)src.pViewports, sizeof(VkViewport)*src.viewportCount);
+    }
+    else
+        pViewports = NULL;
+    if (src.pScissors) {
+        pScissors = new VkRect2D[src.scissorCount];
+        memcpy ((void *)pScissors, (void *)src.pScissors, sizeof(VkRect2D)*src.scissorCount);
+    }
+    else
+        pScissors = NULL;
+}
+
+safe_VkPipelineViewportStateCreateInfo& safe_VkPipelineViewportStateCreateInfo::operator=(const safe_VkPipelineViewportStateCreateInfo& src)
+{
+    if (&src == this) return *this;
+
+    if (pViewports)
+        delete[] pViewports;
+    if (pScissors)
+        delete[] pScissors;
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    flags = src.flags;
+    viewportCount = src.viewportCount;
+    pViewports = nullptr;
+    scissorCount = src.scissorCount;
+    pScissors = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (src.pViewports) {
+        pViewports = new VkViewport[src.viewportCount];
+        memcpy ((void *)pViewports, (void *)src.pViewports, sizeof(VkViewport)*src.viewportCount);
+    }
+    else
+        pViewports = NULL;
+    if (src.pScissors) {
+        pScissors = new VkRect2D[src.scissorCount];
+        memcpy ((void *)pScissors, (void *)src.pScissors, sizeof(VkRect2D)*src.scissorCount);
+    }
+    else
+        pScissors = NULL;
+
+    return *this;
+}
+
+safe_VkPipelineViewportStateCreateInfo::~safe_VkPipelineViewportStateCreateInfo()
+{
+    if (pViewports)
+        delete[] pViewports;
+    if (pScissors)
+        delete[] pScissors;
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPipelineViewportStateCreateInfo::initialize(const VkPipelineViewportStateCreateInfo* in_struct, const bool is_dynamic_viewports, const bool is_dynamic_scissors)
+{
+    sType = in_struct->sType;
+    flags = in_struct->flags;
+    viewportCount = in_struct->viewportCount;
+    pViewports = nullptr;
+    scissorCount = in_struct->scissorCount;
+    pScissors = nullptr;
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (in_struct->pViewports && !is_dynamic_viewports) {
+        pViewports = new VkViewport[in_struct->viewportCount];
+        memcpy ((void *)pViewports, (void *)in_struct->pViewports, sizeof(VkViewport)*in_struct->viewportCount);
+    }
+    else
+        pViewports = NULL;
+    if (in_struct->pScissors && !is_dynamic_scissors) {
+        pScissors = new VkRect2D[in_struct->scissorCount];
+        memcpy ((void *)pScissors, (void *)in_struct->pScissors, sizeof(VkRect2D)*in_struct->scissorCount);
+    }
+    else
+        pScissors = NULL;
+}
+
+void safe_VkPipelineViewportStateCreateInfo::initialize(const safe_VkPipelineViewportStateCreateInfo* src)
+{
+    sType = src->sType;
+    flags = src->flags;
+    viewportCount = src->viewportCount;
+    pViewports = nullptr;
+    scissorCount = src->scissorCount;
+    pScissors = nullptr;
+    pNext = SafePnextCopy(src->pNext);
+    if (src->pViewports) {
+        pViewports = new VkViewport[src->viewportCount];
+        memcpy ((void *)pViewports, (void *)src->pViewports, sizeof(VkViewport)*src->viewportCount);
+    }
+    else
+        pViewports = NULL;
+    if (src->pScissors) {
+        pScissors = new VkRect2D[src->scissorCount];
+        memcpy ((void *)pScissors, (void *)src->pScissors, sizeof(VkRect2D)*src->scissorCount);
+    }
+    else
+        pScissors = NULL;
+}
+
+safe_VkPipelineRasterizationStateCreateInfo::safe_VkPipelineRasterizationStateCreateInfo(const VkPipelineRasterizationStateCreateInfo* in_struct) :
+    sType(in_struct->sType),
+    flags(in_struct->flags),
+    depthClampEnable(in_struct->depthClampEnable),
+    rasterizerDiscardEnable(in_struct->rasterizerDiscardEnable),
+    polygonMode(in_struct->polygonMode),
+    cullMode(in_struct->cullMode),
+    frontFace(in_struct->frontFace),
+    depthBiasEnable(in_struct->depthBiasEnable),
+    depthBiasConstantFactor(in_struct->depthBiasConstantFactor),
+    depthBiasClamp(in_struct->depthBiasClamp),
+    depthBiasSlopeFactor(in_struct->depthBiasSlopeFactor),
+    lineWidth(in_struct->lineWidth)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPipelineRasterizationStateCreateInfo::safe_VkPipelineRasterizationStateCreateInfo() :
+    pNext(nullptr)
+{}
+
+safe_VkPipelineRasterizationStateCreateInfo::safe_VkPipelineRasterizationStateCreateInfo(const safe_VkPipelineRasterizationStateCreateInfo& src)
+{
+    sType = src.sType;
+    flags = src.flags;
+    depthClampEnable = src.depthClampEnable;
+    rasterizerDiscardEnable = src.rasterizerDiscardEnable;
+    polygonMode = src.polygonMode;
+    cullMode = src.cullMode;
+    frontFace = src.frontFace;
+    depthBiasEnable = src.depthBiasEnable;
+    depthBiasConstantFactor = src.depthBiasConstantFactor;
+    depthBiasClamp = src.depthBiasClamp;
+    depthBiasSlopeFactor = src.depthBiasSlopeFactor;
+    lineWidth = src.lineWidth;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPipelineRasterizationStateCreateInfo& safe_VkPipelineRasterizationStateCreateInfo::operator=(const safe_VkPipelineRasterizationStateCreateInfo& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    flags = src.flags;
+    depthClampEnable = src.depthClampEnable;
+    rasterizerDiscardEnable = src.rasterizerDiscardEnable;
+    polygonMode = src.polygonMode;
+    cullMode = src.cullMode;
+    frontFace = src.frontFace;
+    depthBiasEnable = src.depthBiasEnable;
+    depthBiasConstantFactor = src.depthBiasConstantFactor;
+    depthBiasClamp = src.depthBiasClamp;
+    depthBiasSlopeFactor = src.depthBiasSlopeFactor;
+    lineWidth = src.lineWidth;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPipelineRasterizationStateCreateInfo::~safe_VkPipelineRasterizationStateCreateInfo()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPipelineRasterizationStateCreateInfo::initialize(const VkPipelineRasterizationStateCreateInfo* in_struct)
+{
+    sType = in_struct->sType;
+    flags = in_struct->flags;
+    depthClampEnable = in_struct->depthClampEnable;
+    rasterizerDiscardEnable = in_struct->rasterizerDiscardEnable;
+    polygonMode = in_struct->polygonMode;
+    cullMode = in_struct->cullMode;
+    frontFace = in_struct->frontFace;
+    depthBiasEnable = in_struct->depthBiasEnable;
+    depthBiasConstantFactor = in_struct->depthBiasConstantFactor;
+    depthBiasClamp = in_struct->depthBiasClamp;
+    depthBiasSlopeFactor = in_struct->depthBiasSlopeFactor;
+    lineWidth = in_struct->lineWidth;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPipelineRasterizationStateCreateInfo::initialize(const safe_VkPipelineRasterizationStateCreateInfo* src)
+{
+    sType = src->sType;
+    flags = src->flags;
+    depthClampEnable = src->depthClampEnable;
+    rasterizerDiscardEnable = src->rasterizerDiscardEnable;
+    polygonMode = src->polygonMode;
+    cullMode = src->cullMode;
+    frontFace = src->frontFace;
+    depthBiasEnable = src->depthBiasEnable;
+    depthBiasConstantFactor = src->depthBiasConstantFactor;
+    depthBiasClamp = src->depthBiasClamp;
+    depthBiasSlopeFactor = src->depthBiasSlopeFactor;
+    lineWidth = src->lineWidth;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkPipelineMultisampleStateCreateInfo::safe_VkPipelineMultisampleStateCreateInfo(const VkPipelineMultisampleStateCreateInfo* in_struct) :
+    sType(in_struct->sType),
+    flags(in_struct->flags),
+    rasterizationSamples(in_struct->rasterizationSamples),
+    sampleShadingEnable(in_struct->sampleShadingEnable),
+    minSampleShading(in_struct->minSampleShading),
+    pSampleMask(nullptr),
+    alphaToCoverageEnable(in_struct->alphaToCoverageEnable),
+    alphaToOneEnable(in_struct->alphaToOneEnable)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (in_struct->pSampleMask) {
+        pSampleMask = new VkSampleMask(*in_struct->pSampleMask);
+    }
+}
+
+safe_VkPipelineMultisampleStateCreateInfo::safe_VkPipelineMultisampleStateCreateInfo() :
+    pNext(nullptr),
+    pSampleMask(nullptr)
+{}
+
+safe_VkPipelineMultisampleStateCreateInfo::safe_VkPipelineMultisampleStateCreateInfo(const safe_VkPipelineMultisampleStateCreateInfo& src)
+{
+    sType = src.sType;
+    flags = src.flags;
+    rasterizationSamples = src.rasterizationSamples;
+    sampleShadingEnable = src.sampleShadingEnable;
+    minSampleShading = src.minSampleShading;
+    pSampleMask = nullptr;
+    alphaToCoverageEnable = src.alphaToCoverageEnable;
+    alphaToOneEnable = src.alphaToOneEnable;
+    pNext = SafePnextCopy(src.pNext);
+    if (src.pSampleMask) {
+        pSampleMask = new VkSampleMask(*src.pSampleMask);
+    }
+}
+
+safe_VkPipelineMultisampleStateCreateInfo& safe_VkPipelineMultisampleStateCreateInfo::operator=(const safe_VkPipelineMultisampleStateCreateInfo& src)
+{
+    if (&src == this) return *this;
+
+    if (pSampleMask)
+        delete pSampleMask;
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    flags = src.flags;
+    rasterizationSamples = src.rasterizationSamples;
+    sampleShadingEnable = src.sampleShadingEnable;
+    minSampleShading = src.minSampleShading;
+    pSampleMask = nullptr;
+    alphaToCoverageEnable = src.alphaToCoverageEnable;
+    alphaToOneEnable = src.alphaToOneEnable;
+    pNext = SafePnextCopy(src.pNext);
+    if (src.pSampleMask) {
+        pSampleMask = new VkSampleMask(*src.pSampleMask);
+    }
+
+    return *this;
+}
+
+safe_VkPipelineMultisampleStateCreateInfo::~safe_VkPipelineMultisampleStateCreateInfo()
+{
+    if (pSampleMask)
+        delete pSampleMask;
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPipelineMultisampleStateCreateInfo::initialize(const VkPipelineMultisampleStateCreateInfo* in_struct)
+{
+    sType = in_struct->sType;
+    flags = in_struct->flags;
+    rasterizationSamples = in_struct->rasterizationSamples;
+    sampleShadingEnable = in_struct->sampleShadingEnable;
+    minSampleShading = in_struct->minSampleShading;
+    pSampleMask = nullptr;
+    alphaToCoverageEnable = in_struct->alphaToCoverageEnable;
+    alphaToOneEnable = in_struct->alphaToOneEnable;
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (in_struct->pSampleMask) {
+        pSampleMask = new VkSampleMask(*in_struct->pSampleMask);
+    }
+}
+
+void safe_VkPipelineMultisampleStateCreateInfo::initialize(const safe_VkPipelineMultisampleStateCreateInfo* src)
+{
+    sType = src->sType;
+    flags = src->flags;
+    rasterizationSamples = src->rasterizationSamples;
+    sampleShadingEnable = src->sampleShadingEnable;
+    minSampleShading = src->minSampleShading;
+    pSampleMask = nullptr;
+    alphaToCoverageEnable = src->alphaToCoverageEnable;
+    alphaToOneEnable = src->alphaToOneEnable;
+    pNext = SafePnextCopy(src->pNext);
+    if (src->pSampleMask) {
+        pSampleMask = new VkSampleMask(*src->pSampleMask);
+    }
+}
+
+safe_VkPipelineDepthStencilStateCreateInfo::safe_VkPipelineDepthStencilStateCreateInfo(const VkPipelineDepthStencilStateCreateInfo* in_struct) :
+    sType(in_struct->sType),
+    flags(in_struct->flags),
+    depthTestEnable(in_struct->depthTestEnable),
+    depthWriteEnable(in_struct->depthWriteEnable),
+    depthCompareOp(in_struct->depthCompareOp),
+    depthBoundsTestEnable(in_struct->depthBoundsTestEnable),
+    stencilTestEnable(in_struct->stencilTestEnable),
+    front(in_struct->front),
+    back(in_struct->back),
+    minDepthBounds(in_struct->minDepthBounds),
+    maxDepthBounds(in_struct->maxDepthBounds)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPipelineDepthStencilStateCreateInfo::safe_VkPipelineDepthStencilStateCreateInfo() :
+    pNext(nullptr)
+{}
+
+safe_VkPipelineDepthStencilStateCreateInfo::safe_VkPipelineDepthStencilStateCreateInfo(const safe_VkPipelineDepthStencilStateCreateInfo& src)
+{
+    sType = src.sType;
+    flags = src.flags;
+    depthTestEnable = src.depthTestEnable;
+    depthWriteEnable = src.depthWriteEnable;
+    depthCompareOp = src.depthCompareOp;
+    depthBoundsTestEnable = src.depthBoundsTestEnable;
+    stencilTestEnable = src.stencilTestEnable;
+    front = src.front;
+    back = src.back;
+    minDepthBounds = src.minDepthBounds;
+    maxDepthBounds = src.maxDepthBounds;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPipelineDepthStencilStateCreateInfo& safe_VkPipelineDepthStencilStateCreateInfo::operator=(const safe_VkPipelineDepthStencilStateCreateInfo& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    flags = src.flags;
+    depthTestEnable = src.depthTestEnable;
+    depthWriteEnable = src.depthWriteEnable;
+    depthCompareOp = src.depthCompareOp;
+    depthBoundsTestEnable = src.depthBoundsTestEnable;
+    stencilTestEnable = src.stencilTestEnable;
+    front = src.front;
+    back = src.back;
+    minDepthBounds = src.minDepthBounds;
+    maxDepthBounds = src.maxDepthBounds;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPipelineDepthStencilStateCreateInfo::~safe_VkPipelineDepthStencilStateCreateInfo()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPipelineDepthStencilStateCreateInfo::initialize(const VkPipelineDepthStencilStateCreateInfo* in_struct)
+{
+    sType = in_struct->sType;
+    flags = in_struct->flags;
+    depthTestEnable = in_struct->depthTestEnable;
+    depthWriteEnable = in_struct->depthWriteEnable;
+    depthCompareOp = in_struct->depthCompareOp;
+    depthBoundsTestEnable = in_struct->depthBoundsTestEnable;
+    stencilTestEnable = in_struct->stencilTestEnable;
+    front = in_struct->front;
+    back = in_struct->back;
+    minDepthBounds = in_struct->minDepthBounds;
+    maxDepthBounds = in_struct->maxDepthBounds;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPipelineDepthStencilStateCreateInfo::initialize(const safe_VkPipelineDepthStencilStateCreateInfo* src)
+{
+    sType = src->sType;
+    flags = src->flags;
+    depthTestEnable = src->depthTestEnable;
+    depthWriteEnable = src->depthWriteEnable;
+    depthCompareOp = src->depthCompareOp;
+    depthBoundsTestEnable = src->depthBoundsTestEnable;
+    stencilTestEnable = src->stencilTestEnable;
+    front = src->front;
+    back = src->back;
+    minDepthBounds = src->minDepthBounds;
+    maxDepthBounds = src->maxDepthBounds;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkPipelineColorBlendStateCreateInfo::safe_VkPipelineColorBlendStateCreateInfo(const VkPipelineColorBlendStateCreateInfo* in_struct) :
+    sType(in_struct->sType),
+    flags(in_struct->flags),
+    logicOpEnable(in_struct->logicOpEnable),
+    logicOp(in_struct->logicOp),
+    attachmentCount(in_struct->attachmentCount),
+    pAttachments(nullptr)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (in_struct->pAttachments) {
+        pAttachments = new VkPipelineColorBlendAttachmentState[in_struct->attachmentCount];
+        memcpy ((void *)pAttachments, (void *)in_struct->pAttachments, sizeof(VkPipelineColorBlendAttachmentState)*in_struct->attachmentCount);
+    }
+    for (uint32_t i = 0; i < 4; ++i) {
+        blendConstants[i] = in_struct->blendConstants[i];
+    }
+}
+
+safe_VkPipelineColorBlendStateCreateInfo::safe_VkPipelineColorBlendStateCreateInfo() :
+    pNext(nullptr),
+    pAttachments(nullptr)
+{}
+
+safe_VkPipelineColorBlendStateCreateInfo::safe_VkPipelineColorBlendStateCreateInfo(const safe_VkPipelineColorBlendStateCreateInfo& src)
+{
+    sType = src.sType;
+    flags = src.flags;
+    logicOpEnable = src.logicOpEnable;
+    logicOp = src.logicOp;
+    attachmentCount = src.attachmentCount;
+    pAttachments = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (src.pAttachments) {
+        pAttachments = new VkPipelineColorBlendAttachmentState[src.attachmentCount];
+        memcpy ((void *)pAttachments, (void *)src.pAttachments, sizeof(VkPipelineColorBlendAttachmentState)*src.attachmentCount);
+    }
+    for (uint32_t i = 0; i < 4; ++i) {
+        blendConstants[i] = src.blendConstants[i];
+    }
+}
+
+safe_VkPipelineColorBlendStateCreateInfo& safe_VkPipelineColorBlendStateCreateInfo::operator=(const safe_VkPipelineColorBlendStateCreateInfo& src)
+{
+    if (&src == this) return *this;
+
+    if (pAttachments)
+        delete[] pAttachments;
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    flags = src.flags;
+    logicOpEnable = src.logicOpEnable;
+    logicOp = src.logicOp;
+    attachmentCount = src.attachmentCount;
+    pAttachments = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (src.pAttachments) {
+        pAttachments = new VkPipelineColorBlendAttachmentState[src.attachmentCount];
+        memcpy ((void *)pAttachments, (void *)src.pAttachments, sizeof(VkPipelineColorBlendAttachmentState)*src.attachmentCount);
+    }
+    for (uint32_t i = 0; i < 4; ++i) {
+        blendConstants[i] = src.blendConstants[i];
+    }
+
+    return *this;
+}
+
+safe_VkPipelineColorBlendStateCreateInfo::~safe_VkPipelineColorBlendStateCreateInfo()
+{
+    if (pAttachments)
+        delete[] pAttachments;
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPipelineColorBlendStateCreateInfo::initialize(const VkPipelineColorBlendStateCreateInfo* in_struct)
+{
+    sType = in_struct->sType;
+    flags = in_struct->flags;
+    logicOpEnable = in_struct->logicOpEnable;
+    logicOp = in_struct->logicOp;
+    attachmentCount = in_struct->attachmentCount;
+    pAttachments = nullptr;
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (in_struct->pAttachments) {
+        pAttachments = new VkPipelineColorBlendAttachmentState[in_struct->attachmentCount];
+        memcpy ((void *)pAttachments, (void *)in_struct->pAttachments, sizeof(VkPipelineColorBlendAttachmentState)*in_struct->attachmentCount);
+    }
+    for (uint32_t i = 0; i < 4; ++i) {
+        blendConstants[i] = in_struct->blendConstants[i];
+    }
+}
+
+void safe_VkPipelineColorBlendStateCreateInfo::initialize(const safe_VkPipelineColorBlendStateCreateInfo* src)
+{
+    sType = src->sType;
+    flags = src->flags;
+    logicOpEnable = src->logicOpEnable;
+    logicOp = src->logicOp;
+    attachmentCount = src->attachmentCount;
+    pAttachments = nullptr;
+    pNext = SafePnextCopy(src->pNext);
+    if (src->pAttachments) {
+        pAttachments = new VkPipelineColorBlendAttachmentState[src->attachmentCount];
+        memcpy ((void *)pAttachments, (void *)src->pAttachments, sizeof(VkPipelineColorBlendAttachmentState)*src->attachmentCount);
+    }
+    for (uint32_t i = 0; i < 4; ++i) {
+        blendConstants[i] = src->blendConstants[i];
+    }
+}
+
+safe_VkPipelineDynamicStateCreateInfo::safe_VkPipelineDynamicStateCreateInfo(const VkPipelineDynamicStateCreateInfo* in_struct) :
+    sType(in_struct->sType),
+    flags(in_struct->flags),
+    dynamicStateCount(in_struct->dynamicStateCount),
+    pDynamicStates(nullptr)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (in_struct->pDynamicStates) {
+        pDynamicStates = new VkDynamicState[in_struct->dynamicStateCount];
+        memcpy ((void *)pDynamicStates, (void *)in_struct->pDynamicStates, sizeof(VkDynamicState)*in_struct->dynamicStateCount);
+    }
+}
+
+safe_VkPipelineDynamicStateCreateInfo::safe_VkPipelineDynamicStateCreateInfo() :
+    pNext(nullptr),
+    pDynamicStates(nullptr)
+{}
+
+safe_VkPipelineDynamicStateCreateInfo::safe_VkPipelineDynamicStateCreateInfo(const safe_VkPipelineDynamicStateCreateInfo& src)
+{
+    sType = src.sType;
+    flags = src.flags;
+    dynamicStateCount = src.dynamicStateCount;
+    pDynamicStates = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (src.pDynamicStates) {
+        pDynamicStates = new VkDynamicState[src.dynamicStateCount];
+        memcpy ((void *)pDynamicStates, (void *)src.pDynamicStates, sizeof(VkDynamicState)*src.dynamicStateCount);
+    }
+}
+
+safe_VkPipelineDynamicStateCreateInfo& safe_VkPipelineDynamicStateCreateInfo::operator=(const safe_VkPipelineDynamicStateCreateInfo& src)
+{
+    if (&src == this) return *this;
+
+    if (pDynamicStates)
+        delete[] pDynamicStates;
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    flags = src.flags;
+    dynamicStateCount = src.dynamicStateCount;
+    pDynamicStates = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (src.pDynamicStates) {
+        pDynamicStates = new VkDynamicState[src.dynamicStateCount];
+        memcpy ((void *)pDynamicStates, (void *)src.pDynamicStates, sizeof(VkDynamicState)*src.dynamicStateCount);
+    }
+
+    return *this;
+}
+
+safe_VkPipelineDynamicStateCreateInfo::~safe_VkPipelineDynamicStateCreateInfo()
+{
+    if (pDynamicStates)
+        delete[] pDynamicStates;
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPipelineDynamicStateCreateInfo::initialize(const VkPipelineDynamicStateCreateInfo* in_struct)
+{
+    sType = in_struct->sType;
+    flags = in_struct->flags;
+    dynamicStateCount = in_struct->dynamicStateCount;
+    pDynamicStates = nullptr;
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (in_struct->pDynamicStates) {
+        pDynamicStates = new VkDynamicState[in_struct->dynamicStateCount];
+        memcpy ((void *)pDynamicStates, (void *)in_struct->pDynamicStates, sizeof(VkDynamicState)*in_struct->dynamicStateCount);
+    }
+}
+
+void safe_VkPipelineDynamicStateCreateInfo::initialize(const safe_VkPipelineDynamicStateCreateInfo* src)
+{
+    sType = src->sType;
+    flags = src->flags;
+    dynamicStateCount = src->dynamicStateCount;
+    pDynamicStates = nullptr;
+    pNext = SafePnextCopy(src->pNext);
+    if (src->pDynamicStates) {
+        pDynamicStates = new VkDynamicState[src->dynamicStateCount];
+        memcpy ((void *)pDynamicStates, (void *)src->pDynamicStates, sizeof(VkDynamicState)*src->dynamicStateCount);
+    }
+}
+
+safe_VkGraphicsPipelineCreateInfo::safe_VkGraphicsPipelineCreateInfo(const VkGraphicsPipelineCreateInfo* in_struct, const bool uses_color_attachment, const bool uses_depthstencil_attachment) :
+    sType(in_struct->sType),
+    flags(in_struct->flags),
+    stageCount(in_struct->stageCount),
+    pStages(nullptr),
+    pVertexInputState(nullptr),
+    pInputAssemblyState(nullptr),
+    pTessellationState(nullptr),
+    pViewportState(nullptr),
+    pRasterizationState(nullptr),
+    pMultisampleState(nullptr),
+    pDepthStencilState(nullptr),
+    pColorBlendState(nullptr),
+    pDynamicState(nullptr),
+    layout(in_struct->layout),
+    renderPass(in_struct->renderPass),
+    subpass(in_struct->subpass),
+    basePipelineHandle(in_struct->basePipelineHandle),
+    basePipelineIndex(in_struct->basePipelineIndex)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (stageCount && in_struct->pStages) {
+        pStages = new safe_VkPipelineShaderStageCreateInfo[stageCount];
+        for (uint32_t i = 0; i < stageCount; ++i) {
+            pStages[i].initialize(&in_struct->pStages[i]);
+        }
+    }
+    if (in_struct->pVertexInputState)
+        pVertexInputState = new safe_VkPipelineVertexInputStateCreateInfo(in_struct->pVertexInputState);
+    else
+        pVertexInputState = NULL;
+    if (in_struct->pInputAssemblyState)
+        pInputAssemblyState = new safe_VkPipelineInputAssemblyStateCreateInfo(in_struct->pInputAssemblyState);
+    else
+        pInputAssemblyState = NULL;
+    bool has_tessellation_stage = false;
+    if (stageCount && pStages)
+        for (uint32_t i = 0; i < stageCount && !has_tessellation_stage; ++i)
+            if (pStages[i].stage == VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT || pStages[i].stage == VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT)
+                has_tessellation_stage = true;
+    if (in_struct->pTessellationState && has_tessellation_stage)
+        pTessellationState = new safe_VkPipelineTessellationStateCreateInfo(in_struct->pTessellationState);
+    else
+        pTessellationState = NULL; // original pTessellationState pointer ignored
+    bool has_rasterization = in_struct->pRasterizationState ? !in_struct->pRasterizationState->rasterizerDiscardEnable : false;
+    if (in_struct->pViewportState && has_rasterization) {
+        bool is_dynamic_viewports = false;
+        bool is_dynamic_scissors = false;
+        if (in_struct->pDynamicState && in_struct->pDynamicState->pDynamicStates) {
+            for (uint32_t i = 0; i < in_struct->pDynamicState->dynamicStateCount && !is_dynamic_viewports; ++i)
+                if (in_struct->pDynamicState->pDynamicStates[i] == VK_DYNAMIC_STATE_VIEWPORT)
+                    is_dynamic_viewports = true;
+            for (uint32_t i = 0; i < in_struct->pDynamicState->dynamicStateCount && !is_dynamic_scissors; ++i)
+                if (in_struct->pDynamicState->pDynamicStates[i] == VK_DYNAMIC_STATE_SCISSOR)
+                    is_dynamic_scissors = true;
+        }
+        pViewportState = new safe_VkPipelineViewportStateCreateInfo(in_struct->pViewportState, is_dynamic_viewports, is_dynamic_scissors);
+    } else
+        pViewportState = NULL; // original pViewportState pointer ignored
+    if (in_struct->pRasterizationState)
+        pRasterizationState = new safe_VkPipelineRasterizationStateCreateInfo(in_struct->pRasterizationState);
+    else
+        pRasterizationState = NULL;
+    if (in_struct->pMultisampleState && has_rasterization)
+        pMultisampleState = new safe_VkPipelineMultisampleStateCreateInfo(in_struct->pMultisampleState);
+    else
+        pMultisampleState = NULL; // original pMultisampleState pointer ignored
+    // needs a tracked subpass state uses_depthstencil_attachment
+    if (in_struct->pDepthStencilState && has_rasterization && uses_depthstencil_attachment)
+        pDepthStencilState = new safe_VkPipelineDepthStencilStateCreateInfo(in_struct->pDepthStencilState);
+    else
+        pDepthStencilState = NULL; // original pDepthStencilState pointer ignored
+    // needs a tracked subpass state usesColorAttachment
+    if (in_struct->pColorBlendState && has_rasterization && uses_color_attachment)
+        pColorBlendState = new safe_VkPipelineColorBlendStateCreateInfo(in_struct->pColorBlendState);
+    else
+        pColorBlendState = NULL; // original pColorBlendState pointer ignored
+    if (in_struct->pDynamicState)
+        pDynamicState = new safe_VkPipelineDynamicStateCreateInfo(in_struct->pDynamicState);
+    else
+        pDynamicState = NULL;
+}
+
+safe_VkGraphicsPipelineCreateInfo::safe_VkGraphicsPipelineCreateInfo() :
+    pNext(nullptr),
+    pStages(nullptr),
+    pVertexInputState(nullptr),
+    pInputAssemblyState(nullptr),
+    pTessellationState(nullptr),
+    pViewportState(nullptr),
+    pRasterizationState(nullptr),
+    pMultisampleState(nullptr),
+    pDepthStencilState(nullptr),
+    pColorBlendState(nullptr),
+    pDynamicState(nullptr)
+{}
+
+safe_VkGraphicsPipelineCreateInfo::safe_VkGraphicsPipelineCreateInfo(const safe_VkGraphicsPipelineCreateInfo& src)
+{
+    sType = src.sType;
+    flags = src.flags;
+    stageCount = src.stageCount;
+    pStages = nullptr;
+    pVertexInputState = nullptr;
+    pInputAssemblyState = nullptr;
+    pTessellationState = nullptr;
+    pViewportState = nullptr;
+    pRasterizationState = nullptr;
+    pMultisampleState = nullptr;
+    pDepthStencilState = nullptr;
+    pColorBlendState = nullptr;
+    pDynamicState = nullptr;
+    layout = src.layout;
+    renderPass = src.renderPass;
+    subpass = src.subpass;
+    basePipelineHandle = src.basePipelineHandle;
+    basePipelineIndex = src.basePipelineIndex;
+    pNext = SafePnextCopy(src.pNext);
+    if (stageCount && src.pStages) {
+        pStages = new safe_VkPipelineShaderStageCreateInfo[stageCount];
+        for (uint32_t i = 0; i < stageCount; ++i) {
+            pStages[i].initialize(&src.pStages[i]);
+        }
+    }
+    if (src.pVertexInputState)
+        pVertexInputState = new safe_VkPipelineVertexInputStateCreateInfo(*src.pVertexInputState);
+    else
+        pVertexInputState = NULL;
+    if (src.pInputAssemblyState)
+        pInputAssemblyState = new safe_VkPipelineInputAssemblyStateCreateInfo(*src.pInputAssemblyState);
+    else
+        pInputAssemblyState = NULL;
+    bool has_tessellation_stage = false;
+    if (stageCount && pStages)
+        for (uint32_t i = 0; i < stageCount && !has_tessellation_stage; ++i)
+            if (pStages[i].stage == VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT || pStages[i].stage == VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT)
+                has_tessellation_stage = true;
+    if (src.pTessellationState && has_tessellation_stage)
+        pTessellationState = new safe_VkPipelineTessellationStateCreateInfo(*src.pTessellationState);
+    else
+        pTessellationState = NULL; // original pTessellationState pointer ignored
+    bool has_rasterization = src.pRasterizationState ? !src.pRasterizationState->rasterizerDiscardEnable : false;
+    if (src.pViewportState && has_rasterization) {
+        pViewportState = new safe_VkPipelineViewportStateCreateInfo(*src.pViewportState);
+    } else
+        pViewportState = NULL; // original pViewportState pointer ignored
+    if (src.pRasterizationState)
+        pRasterizationState = new safe_VkPipelineRasterizationStateCreateInfo(*src.pRasterizationState);
+    else
+        pRasterizationState = NULL;
+    if (src.pMultisampleState && has_rasterization)
+        pMultisampleState = new safe_VkPipelineMultisampleStateCreateInfo(*src.pMultisampleState);
+    else
+        pMultisampleState = NULL; // original pMultisampleState pointer ignored
+    if (src.pDepthStencilState && has_rasterization)
+        pDepthStencilState = new safe_VkPipelineDepthStencilStateCreateInfo(*src.pDepthStencilState);
+    else
+        pDepthStencilState = NULL; // original pDepthStencilState pointer ignored
+    if (src.pColorBlendState && has_rasterization)
+        pColorBlendState = new safe_VkPipelineColorBlendStateCreateInfo(*src.pColorBlendState);
+    else
+        pColorBlendState = NULL; // original pColorBlendState pointer ignored
+    if (src.pDynamicState)
+        pDynamicState = new safe_VkPipelineDynamicStateCreateInfo(*src.pDynamicState);
+    else
+        pDynamicState = NULL;
+}
+
+safe_VkGraphicsPipelineCreateInfo& safe_VkGraphicsPipelineCreateInfo::operator=(const safe_VkGraphicsPipelineCreateInfo& src)
+{
+    if (&src == this) return *this;
+
+    if (pStages)
+        delete[] pStages;
+    if (pVertexInputState)
+        delete pVertexInputState;
+    if (pInputAssemblyState)
+        delete pInputAssemblyState;
+    if (pTessellationState)
+        delete pTessellationState;
+    if (pViewportState)
+        delete pViewportState;
+    if (pRasterizationState)
+        delete pRasterizationState;
+    if (pMultisampleState)
+        delete pMultisampleState;
+    if (pDepthStencilState)
+        delete pDepthStencilState;
+    if (pColorBlendState)
+        delete pColorBlendState;
+    if (pDynamicState)
+        delete pDynamicState;
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    flags = src.flags;
+    stageCount = src.stageCount;
+    pStages = nullptr;
+    pVertexInputState = nullptr;
+    pInputAssemblyState = nullptr;
+    pTessellationState = nullptr;
+    pViewportState = nullptr;
+    pRasterizationState = nullptr;
+    pMultisampleState = nullptr;
+    pDepthStencilState = nullptr;
+    pColorBlendState = nullptr;
+    pDynamicState = nullptr;
+    layout = src.layout;
+    renderPass = src.renderPass;
+    subpass = src.subpass;
+    basePipelineHandle = src.basePipelineHandle;
+    basePipelineIndex = src.basePipelineIndex;
+    pNext = SafePnextCopy(src.pNext);
+    if (stageCount && src.pStages) {
+        pStages = new safe_VkPipelineShaderStageCreateInfo[stageCount];
+        for (uint32_t i = 0; i < stageCount; ++i) {
+            pStages[i].initialize(&src.pStages[i]);
+        }
+    }
+    if (src.pVertexInputState)
+        pVertexInputState = new safe_VkPipelineVertexInputStateCreateInfo(*src.pVertexInputState);
+    else
+        pVertexInputState = NULL;
+    if (src.pInputAssemblyState)
+        pInputAssemblyState = new safe_VkPipelineInputAssemblyStateCreateInfo(*src.pInputAssemblyState);
+    else
+        pInputAssemblyState = NULL;
+    bool has_tessellation_stage = false;
+    if (stageCount && pStages)
+        for (uint32_t i = 0; i < stageCount && !has_tessellation_stage; ++i)
+            if (pStages[i].stage == VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT || pStages[i].stage == VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT)
+                has_tessellation_stage = true;
+    if (src.pTessellationState && has_tessellation_stage)
+        pTessellationState = new safe_VkPipelineTessellationStateCreateInfo(*src.pTessellationState);
+    else
+        pTessellationState = NULL; // original pTessellationState pointer ignored
+    bool has_rasterization = src.pRasterizationState ? !src.pRasterizationState->rasterizerDiscardEnable : false;
+    if (src.pViewportState && has_rasterization) {
+        pViewportState = new safe_VkPipelineViewportStateCreateInfo(*src.pViewportState);
+    } else
+        pViewportState = NULL; // original pViewportState pointer ignored
+    if (src.pRasterizationState)
+        pRasterizationState = new safe_VkPipelineRasterizationStateCreateInfo(*src.pRasterizationState);
+    else
+        pRasterizationState = NULL;
+    if (src.pMultisampleState && has_rasterization)
+        pMultisampleState = new safe_VkPipelineMultisampleStateCreateInfo(*src.pMultisampleState);
+    else
+        pMultisampleState = NULL; // original pMultisampleState pointer ignored
+    if (src.pDepthStencilState && has_rasterization)
+        pDepthStencilState = new safe_VkPipelineDepthStencilStateCreateInfo(*src.pDepthStencilState);
+    else
+        pDepthStencilState = NULL; // original pDepthStencilState pointer ignored
+    if (src.pColorBlendState && has_rasterization)
+        pColorBlendState = new safe_VkPipelineColorBlendStateCreateInfo(*src.pColorBlendState);
+    else
+        pColorBlendState = NULL; // original pColorBlendState pointer ignored
+    if (src.pDynamicState)
+        pDynamicState = new safe_VkPipelineDynamicStateCreateInfo(*src.pDynamicState);
+    else
+        pDynamicState = NULL;
+
+    return *this;
+}
+
+safe_VkGraphicsPipelineCreateInfo::~safe_VkGraphicsPipelineCreateInfo()
+{
+    if (pStages)
+        delete[] pStages;
+    if (pVertexInputState)
+        delete pVertexInputState;
+    if (pInputAssemblyState)
+        delete pInputAssemblyState;
+    if (pTessellationState)
+        delete pTessellationState;
+    if (pViewportState)
+        delete pViewportState;
+    if (pRasterizationState)
+        delete pRasterizationState;
+    if (pMultisampleState)
+        delete pMultisampleState;
+    if (pDepthStencilState)
+        delete pDepthStencilState;
+    if (pColorBlendState)
+        delete pColorBlendState;
+    if (pDynamicState)
+        delete pDynamicState;
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkGraphicsPipelineCreateInfo::initialize(const VkGraphicsPipelineCreateInfo* in_struct, const bool uses_color_attachment, const bool uses_depthstencil_attachment)
+{
+    sType = in_struct->sType;
+    flags = in_struct->flags;
+    stageCount = in_struct->stageCount;
+    pStages = nullptr;
+    pVertexInputState = nullptr;
+    pInputAssemblyState = nullptr;
+    pTessellationState = nullptr;
+    pViewportState = nullptr;
+    pRasterizationState = nullptr;
+    pMultisampleState = nullptr;
+    pDepthStencilState = nullptr;
+    pColorBlendState = nullptr;
+    pDynamicState = nullptr;
+    layout = in_struct->layout;
+    renderPass = in_struct->renderPass;
+    subpass = in_struct->subpass;
+    basePipelineHandle = in_struct->basePipelineHandle;
+    basePipelineIndex = in_struct->basePipelineIndex;
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (stageCount && in_struct->pStages) {
+        pStages = new safe_VkPipelineShaderStageCreateInfo[stageCount];
+        for (uint32_t i = 0; i < stageCount; ++i) {
+            pStages[i].initialize(&in_struct->pStages[i]);
+        }
+    }
+    if (in_struct->pVertexInputState)
+        pVertexInputState = new safe_VkPipelineVertexInputStateCreateInfo(in_struct->pVertexInputState);
+    else
+        pVertexInputState = NULL;
+    if (in_struct->pInputAssemblyState)
+        pInputAssemblyState = new safe_VkPipelineInputAssemblyStateCreateInfo(in_struct->pInputAssemblyState);
+    else
+        pInputAssemblyState = NULL;
+    bool has_tessellation_stage = false;
+    if (stageCount && pStages)
+        for (uint32_t i = 0; i < stageCount && !has_tessellation_stage; ++i)
+            if (pStages[i].stage == VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT || pStages[i].stage == VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT)
+                has_tessellation_stage = true;
+    if (in_struct->pTessellationState && has_tessellation_stage)
+        pTessellationState = new safe_VkPipelineTessellationStateCreateInfo(in_struct->pTessellationState);
+    else
+        pTessellationState = NULL; // original pTessellationState pointer ignored
+    bool has_rasterization = in_struct->pRasterizationState ? !in_struct->pRasterizationState->rasterizerDiscardEnable : false;
+    if (in_struct->pViewportState && has_rasterization) {
+        bool is_dynamic_viewports = false;
+        bool is_dynamic_scissors = false;
+        if (in_struct->pDynamicState && in_struct->pDynamicState->pDynamicStates) {
+            for (uint32_t i = 0; i < in_struct->pDynamicState->dynamicStateCount && !is_dynamic_viewports; ++i)
+                if (in_struct->pDynamicState->pDynamicStates[i] == VK_DYNAMIC_STATE_VIEWPORT)
+                    is_dynamic_viewports = true;
+            for (uint32_t i = 0; i < in_struct->pDynamicState->dynamicStateCount && !is_dynamic_scissors; ++i)
+                if (in_struct->pDynamicState->pDynamicStates[i] == VK_DYNAMIC_STATE_SCISSOR)
+                    is_dynamic_scissors = true;
+        }
+        pViewportState = new safe_VkPipelineViewportStateCreateInfo(in_struct->pViewportState, is_dynamic_viewports, is_dynamic_scissors);
+    } else
+        pViewportState = NULL; // original pViewportState pointer ignored
+    if (in_struct->pRasterizationState)
+        pRasterizationState = new safe_VkPipelineRasterizationStateCreateInfo(in_struct->pRasterizationState);
+    else
+        pRasterizationState = NULL;
+    if (in_struct->pMultisampleState && has_rasterization)
+        pMultisampleState = new safe_VkPipelineMultisampleStateCreateInfo(in_struct->pMultisampleState);
+    else
+        pMultisampleState = NULL; // original pMultisampleState pointer ignored
+    // needs a tracked subpass state uses_depthstencil_attachment
+    if (in_struct->pDepthStencilState && has_rasterization && uses_depthstencil_attachment)
+        pDepthStencilState = new safe_VkPipelineDepthStencilStateCreateInfo(in_struct->pDepthStencilState);
+    else
+        pDepthStencilState = NULL; // original pDepthStencilState pointer ignored
+    // needs a tracked subpass state usesColorAttachment
+    if (in_struct->pColorBlendState && has_rasterization && uses_color_attachment)
+        pColorBlendState = new safe_VkPipelineColorBlendStateCreateInfo(in_struct->pColorBlendState);
+    else
+        pColorBlendState = NULL; // original pColorBlendState pointer ignored
+    if (in_struct->pDynamicState)
+        pDynamicState = new safe_VkPipelineDynamicStateCreateInfo(in_struct->pDynamicState);
+    else
+        pDynamicState = NULL;
+}
+
+void safe_VkGraphicsPipelineCreateInfo::initialize(const safe_VkGraphicsPipelineCreateInfo* src)
+{
+    sType = src->sType;
+    flags = src->flags;
+    stageCount = src->stageCount;
+    pStages = nullptr;
+    pVertexInputState = nullptr;
+    pInputAssemblyState = nullptr;
+    pTessellationState = nullptr;
+    pViewportState = nullptr;
+    pRasterizationState = nullptr;
+    pMultisampleState = nullptr;
+    pDepthStencilState = nullptr;
+    pColorBlendState = nullptr;
+    pDynamicState = nullptr;
+    layout = src->layout;
+    renderPass = src->renderPass;
+    subpass = src->subpass;
+    basePipelineHandle = src->basePipelineHandle;
+    basePipelineIndex = src->basePipelineIndex;
+    pNext = SafePnextCopy(src->pNext);
+    if (stageCount && src->pStages) {
+        pStages = new safe_VkPipelineShaderStageCreateInfo[stageCount];
+        for (uint32_t i = 0; i < stageCount; ++i) {
+            pStages[i].initialize(&src->pStages[i]);
+        }
+    }
+    if (src->pVertexInputState)
+        pVertexInputState = new safe_VkPipelineVertexInputStateCreateInfo(*src->pVertexInputState);
+    else
+        pVertexInputState = NULL;
+    if (src->pInputAssemblyState)
+        pInputAssemblyState = new safe_VkPipelineInputAssemblyStateCreateInfo(*src->pInputAssemblyState);
+    else
+        pInputAssemblyState = NULL;
+    bool has_tessellation_stage = false;
+    if (stageCount && pStages)
+        for (uint32_t i = 0; i < stageCount && !has_tessellation_stage; ++i)
+            if (pStages[i].stage == VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT || pStages[i].stage == VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT)
+                has_tessellation_stage = true;
+    if (src->pTessellationState && has_tessellation_stage)
+        pTessellationState = new safe_VkPipelineTessellationStateCreateInfo(*src->pTessellationState);
+    else
+        pTessellationState = NULL; // original pTessellationState pointer ignored
+    bool has_rasterization = src->pRasterizationState ? !src->pRasterizationState->rasterizerDiscardEnable : false;
+    if (src->pViewportState && has_rasterization) {
+        pViewportState = new safe_VkPipelineViewportStateCreateInfo(*src->pViewportState);
+    } else
+        pViewportState = NULL; // original pViewportState pointer ignored
+    if (src->pRasterizationState)
+        pRasterizationState = new safe_VkPipelineRasterizationStateCreateInfo(*src->pRasterizationState);
+    else
+        pRasterizationState = NULL;
+    if (src->pMultisampleState && has_rasterization)
+        pMultisampleState = new safe_VkPipelineMultisampleStateCreateInfo(*src->pMultisampleState);
+    else
+        pMultisampleState = NULL; // original pMultisampleState pointer ignored
+    if (src->pDepthStencilState && has_rasterization)
+        pDepthStencilState = new safe_VkPipelineDepthStencilStateCreateInfo(*src->pDepthStencilState);
+    else
+        pDepthStencilState = NULL; // original pDepthStencilState pointer ignored
+    if (src->pColorBlendState && has_rasterization)
+        pColorBlendState = new safe_VkPipelineColorBlendStateCreateInfo(*src->pColorBlendState);
+    else
+        pColorBlendState = NULL; // original pColorBlendState pointer ignored
+    if (src->pDynamicState)
+        pDynamicState = new safe_VkPipelineDynamicStateCreateInfo(*src->pDynamicState);
+    else
+        pDynamicState = NULL;
+}
+
+safe_VkComputePipelineCreateInfo::safe_VkComputePipelineCreateInfo(const VkComputePipelineCreateInfo* in_struct) :
+    sType(in_struct->sType),
+    flags(in_struct->flags),
+    stage(&in_struct->stage),
+    layout(in_struct->layout),
+    basePipelineHandle(in_struct->basePipelineHandle),
+    basePipelineIndex(in_struct->basePipelineIndex)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkComputePipelineCreateInfo::safe_VkComputePipelineCreateInfo() :
+    pNext(nullptr)
+{}
+
+safe_VkComputePipelineCreateInfo::safe_VkComputePipelineCreateInfo(const safe_VkComputePipelineCreateInfo& src)
+{
+    sType = src.sType;
+    flags = src.flags;
+    stage.initialize(&src.stage);
+    layout = src.layout;
+    basePipelineHandle = src.basePipelineHandle;
+    basePipelineIndex = src.basePipelineIndex;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkComputePipelineCreateInfo& safe_VkComputePipelineCreateInfo::operator=(const safe_VkComputePipelineCreateInfo& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    flags = src.flags;
+    stage.initialize(&src.stage);
+    layout = src.layout;
+    basePipelineHandle = src.basePipelineHandle;
+    basePipelineIndex = src.basePipelineIndex;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkComputePipelineCreateInfo::~safe_VkComputePipelineCreateInfo()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkComputePipelineCreateInfo::initialize(const VkComputePipelineCreateInfo* in_struct)
+{
+    sType = in_struct->sType;
+    flags = in_struct->flags;
+    stage.initialize(&in_struct->stage);
+    layout = in_struct->layout;
+    basePipelineHandle = in_struct->basePipelineHandle;
+    basePipelineIndex = in_struct->basePipelineIndex;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkComputePipelineCreateInfo::initialize(const safe_VkComputePipelineCreateInfo* src)
+{
+    sType = src->sType;
+    flags = src->flags;
+    stage.initialize(&src->stage);
+    layout = src->layout;
+    basePipelineHandle = src->basePipelineHandle;
+    basePipelineIndex = src->basePipelineIndex;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkPipelineLayoutCreateInfo::safe_VkPipelineLayoutCreateInfo(const VkPipelineLayoutCreateInfo* in_struct) :
+    sType(in_struct->sType),
+    flags(in_struct->flags),
+    setLayoutCount(in_struct->setLayoutCount),
+    pSetLayouts(nullptr),
+    pushConstantRangeCount(in_struct->pushConstantRangeCount),
+    pPushConstantRanges(nullptr)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (setLayoutCount && in_struct->pSetLayouts) {
+        pSetLayouts = new VkDescriptorSetLayout[setLayoutCount];
+        for (uint32_t i = 0; i < setLayoutCount; ++i) {
+            pSetLayouts[i] = in_struct->pSetLayouts[i];
+        }
+    }
+    if (in_struct->pPushConstantRanges) {
+        pPushConstantRanges = new VkPushConstantRange[in_struct->pushConstantRangeCount];
+        memcpy ((void *)pPushConstantRanges, (void *)in_struct->pPushConstantRanges, sizeof(VkPushConstantRange)*in_struct->pushConstantRangeCount);
+    }
+}
+
+safe_VkPipelineLayoutCreateInfo::safe_VkPipelineLayoutCreateInfo() :
+    pNext(nullptr),
+    pSetLayouts(nullptr),
+    pPushConstantRanges(nullptr)
+{}
+
+safe_VkPipelineLayoutCreateInfo::safe_VkPipelineLayoutCreateInfo(const safe_VkPipelineLayoutCreateInfo& src)
+{
+    sType = src.sType;
+    flags = src.flags;
+    setLayoutCount = src.setLayoutCount;
+    pSetLayouts = nullptr;
+    pushConstantRangeCount = src.pushConstantRangeCount;
+    pPushConstantRanges = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (setLayoutCount && src.pSetLayouts) {
+        pSetLayouts = new VkDescriptorSetLayout[setLayoutCount];
+        for (uint32_t i = 0; i < setLayoutCount; ++i) {
+            pSetLayouts[i] = src.pSetLayouts[i];
+        }
+    }
+    if (src.pPushConstantRanges) {
+        pPushConstantRanges = new VkPushConstantRange[src.pushConstantRangeCount];
+        memcpy ((void *)pPushConstantRanges, (void *)src.pPushConstantRanges, sizeof(VkPushConstantRange)*src.pushConstantRangeCount);
+    }
+}
+
+safe_VkPipelineLayoutCreateInfo& safe_VkPipelineLayoutCreateInfo::operator=(const safe_VkPipelineLayoutCreateInfo& src)
+{
+    if (&src == this) return *this;
+
+    if (pSetLayouts)
+        delete[] pSetLayouts;
+    if (pPushConstantRanges)
+        delete[] pPushConstantRanges;
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    flags = src.flags;
+    setLayoutCount = src.setLayoutCount;
+    pSetLayouts = nullptr;
+    pushConstantRangeCount = src.pushConstantRangeCount;
+    pPushConstantRanges = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (setLayoutCount && src.pSetLayouts) {
+        pSetLayouts = new VkDescriptorSetLayout[setLayoutCount];
+        for (uint32_t i = 0; i < setLayoutCount; ++i) {
+            pSetLayouts[i] = src.pSetLayouts[i];
+        }
+    }
+    if (src.pPushConstantRanges) {
+        pPushConstantRanges = new VkPushConstantRange[src.pushConstantRangeCount];
+        memcpy ((void *)pPushConstantRanges, (void *)src.pPushConstantRanges, sizeof(VkPushConstantRange)*src.pushConstantRangeCount);
+    }
+
+    return *this;
+}
+
+safe_VkPipelineLayoutCreateInfo::~safe_VkPipelineLayoutCreateInfo()
+{
+    if (pSetLayouts)
+        delete[] pSetLayouts;
+    if (pPushConstantRanges)
+        delete[] pPushConstantRanges;
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPipelineLayoutCreateInfo::initialize(const VkPipelineLayoutCreateInfo* in_struct)
+{
+    sType = in_struct->sType;
+    flags = in_struct->flags;
+    setLayoutCount = in_struct->setLayoutCount;
+    pSetLayouts = nullptr;
+    pushConstantRangeCount = in_struct->pushConstantRangeCount;
+    pPushConstantRanges = nullptr;
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (setLayoutCount && in_struct->pSetLayouts) {
+        pSetLayouts = new VkDescriptorSetLayout[setLayoutCount];
+        for (uint32_t i = 0; i < setLayoutCount; ++i) {
+            pSetLayouts[i] = in_struct->pSetLayouts[i];
+        }
+    }
+    if (in_struct->pPushConstantRanges) {
+        pPushConstantRanges = new VkPushConstantRange[in_struct->pushConstantRangeCount];
+        memcpy ((void *)pPushConstantRanges, (void *)in_struct->pPushConstantRanges, sizeof(VkPushConstantRange)*in_struct->pushConstantRangeCount);
+    }
+}
+
+void safe_VkPipelineLayoutCreateInfo::initialize(const safe_VkPipelineLayoutCreateInfo* src)
+{
+    sType = src->sType;
+    flags = src->flags;
+    setLayoutCount = src->setLayoutCount;
+    pSetLayouts = nullptr;
+    pushConstantRangeCount = src->pushConstantRangeCount;
+    pPushConstantRanges = nullptr;
+    pNext = SafePnextCopy(src->pNext);
+    if (setLayoutCount && src->pSetLayouts) {
+        pSetLayouts = new VkDescriptorSetLayout[setLayoutCount];
+        for (uint32_t i = 0; i < setLayoutCount; ++i) {
+            pSetLayouts[i] = src->pSetLayouts[i];
+        }
+    }
+    if (src->pPushConstantRanges) {
+        pPushConstantRanges = new VkPushConstantRange[src->pushConstantRangeCount];
+        memcpy ((void *)pPushConstantRanges, (void *)src->pPushConstantRanges, sizeof(VkPushConstantRange)*src->pushConstantRangeCount);
+    }
+}
+
+safe_VkSamplerCreateInfo::safe_VkSamplerCreateInfo(const VkSamplerCreateInfo* in_struct) :
+    sType(in_struct->sType),
+    flags(in_struct->flags),
+    magFilter(in_struct->magFilter),
+    minFilter(in_struct->minFilter),
+    mipmapMode(in_struct->mipmapMode),
+    addressModeU(in_struct->addressModeU),
+    addressModeV(in_struct->addressModeV),
+    addressModeW(in_struct->addressModeW),
+    mipLodBias(in_struct->mipLodBias),
+    anisotropyEnable(in_struct->anisotropyEnable),
+    maxAnisotropy(in_struct->maxAnisotropy),
+    compareEnable(in_struct->compareEnable),
+    compareOp(in_struct->compareOp),
+    minLod(in_struct->minLod),
+    maxLod(in_struct->maxLod),
+    borderColor(in_struct->borderColor),
+    unnormalizedCoordinates(in_struct->unnormalizedCoordinates)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkSamplerCreateInfo::safe_VkSamplerCreateInfo() :
+    pNext(nullptr)
+{}
+
+safe_VkSamplerCreateInfo::safe_VkSamplerCreateInfo(const safe_VkSamplerCreateInfo& src)
+{
+    sType = src.sType;
+    flags = src.flags;
+    magFilter = src.magFilter;
+    minFilter = src.minFilter;
+    mipmapMode = src.mipmapMode;
+    addressModeU = src.addressModeU;
+    addressModeV = src.addressModeV;
+    addressModeW = src.addressModeW;
+    mipLodBias = src.mipLodBias;
+    anisotropyEnable = src.anisotropyEnable;
+    maxAnisotropy = src.maxAnisotropy;
+    compareEnable = src.compareEnable;
+    compareOp = src.compareOp;
+    minLod = src.minLod;
+    maxLod = src.maxLod;
+    borderColor = src.borderColor;
+    unnormalizedCoordinates = src.unnormalizedCoordinates;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkSamplerCreateInfo& safe_VkSamplerCreateInfo::operator=(const safe_VkSamplerCreateInfo& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    flags = src.flags;
+    magFilter = src.magFilter;
+    minFilter = src.minFilter;
+    mipmapMode = src.mipmapMode;
+    addressModeU = src.addressModeU;
+    addressModeV = src.addressModeV;
+    addressModeW = src.addressModeW;
+    mipLodBias = src.mipLodBias;
+    anisotropyEnable = src.anisotropyEnable;
+    maxAnisotropy = src.maxAnisotropy;
+    compareEnable = src.compareEnable;
+    compareOp = src.compareOp;
+    minLod = src.minLod;
+    maxLod = src.maxLod;
+    borderColor = src.borderColor;
+    unnormalizedCoordinates = src.unnormalizedCoordinates;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkSamplerCreateInfo::~safe_VkSamplerCreateInfo()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkSamplerCreateInfo::initialize(const VkSamplerCreateInfo* in_struct)
+{
+    sType = in_struct->sType;
+    flags = in_struct->flags;
+    magFilter = in_struct->magFilter;
+    minFilter = in_struct->minFilter;
+    mipmapMode = in_struct->mipmapMode;
+    addressModeU = in_struct->addressModeU;
+    addressModeV = in_struct->addressModeV;
+    addressModeW = in_struct->addressModeW;
+    mipLodBias = in_struct->mipLodBias;
+    anisotropyEnable = in_struct->anisotropyEnable;
+    maxAnisotropy = in_struct->maxAnisotropy;
+    compareEnable = in_struct->compareEnable;
+    compareOp = in_struct->compareOp;
+    minLod = in_struct->minLod;
+    maxLod = in_struct->maxLod;
+    borderColor = in_struct->borderColor;
+    unnormalizedCoordinates = in_struct->unnormalizedCoordinates;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkSamplerCreateInfo::initialize(const safe_VkSamplerCreateInfo* src)
+{
+    sType = src->sType;
+    flags = src->flags;
+    magFilter = src->magFilter;
+    minFilter = src->minFilter;
+    mipmapMode = src->mipmapMode;
+    addressModeU = src->addressModeU;
+    addressModeV = src->addressModeV;
+    addressModeW = src->addressModeW;
+    mipLodBias = src->mipLodBias;
+    anisotropyEnable = src->anisotropyEnable;
+    maxAnisotropy = src->maxAnisotropy;
+    compareEnable = src->compareEnable;
+    compareOp = src->compareOp;
+    minLod = src->minLod;
+    maxLod = src->maxLod;
+    borderColor = src->borderColor;
+    unnormalizedCoordinates = src->unnormalizedCoordinates;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkDescriptorSetLayoutBinding::safe_VkDescriptorSetLayoutBinding(const VkDescriptorSetLayoutBinding* in_struct) :
+    binding(in_struct->binding),
+    descriptorType(in_struct->descriptorType),
+    descriptorCount(in_struct->descriptorCount),
+    stageFlags(in_struct->stageFlags),
+    pImmutableSamplers(nullptr)
+{
+    const bool sampler_type = in_struct->descriptorType == VK_DESCRIPTOR_TYPE_SAMPLER || in_struct->descriptorType == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
+    if (descriptorCount && in_struct->pImmutableSamplers && sampler_type) {
+        pImmutableSamplers = new VkSampler[descriptorCount];
+        for (uint32_t i = 0; i < descriptorCount; ++i) {
+            pImmutableSamplers[i] = in_struct->pImmutableSamplers[i];
+        }
+    }
+}
+
+safe_VkDescriptorSetLayoutBinding::safe_VkDescriptorSetLayoutBinding() :
+    pImmutableSamplers(nullptr)
+{}
+
+safe_VkDescriptorSetLayoutBinding::safe_VkDescriptorSetLayoutBinding(const safe_VkDescriptorSetLayoutBinding& src)
+{
+    binding = src.binding;
+    descriptorType = src.descriptorType;
+    descriptorCount = src.descriptorCount;
+    stageFlags = src.stageFlags;
+    pImmutableSamplers = nullptr;
+    const bool sampler_type = src.descriptorType == VK_DESCRIPTOR_TYPE_SAMPLER || src.descriptorType == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
+    if (descriptorCount && src.pImmutableSamplers && sampler_type) {
+        pImmutableSamplers = new VkSampler[descriptorCount];
+        for (uint32_t i = 0; i < descriptorCount; ++i) {
+            pImmutableSamplers[i] = src.pImmutableSamplers[i];
+        }
+    }
+}
+
+safe_VkDescriptorSetLayoutBinding& safe_VkDescriptorSetLayoutBinding::operator=(const safe_VkDescriptorSetLayoutBinding& src)
+{
+    if (&src == this) return *this;
+
+    if (pImmutableSamplers)
+        delete[] pImmutableSamplers;
+
+    binding = src.binding;
+    descriptorType = src.descriptorType;
+    descriptorCount = src.descriptorCount;
+    stageFlags = src.stageFlags;
+    pImmutableSamplers = nullptr;
+    const bool sampler_type = src.descriptorType == VK_DESCRIPTOR_TYPE_SAMPLER || src.descriptorType == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
+    if (descriptorCount && src.pImmutableSamplers && sampler_type) {
+        pImmutableSamplers = new VkSampler[descriptorCount];
+        for (uint32_t i = 0; i < descriptorCount; ++i) {
+            pImmutableSamplers[i] = src.pImmutableSamplers[i];
+        }
+    }
+
+    return *this;
+}
+
+safe_VkDescriptorSetLayoutBinding::~safe_VkDescriptorSetLayoutBinding()
+{
+    if (pImmutableSamplers)
+        delete[] pImmutableSamplers;
+}
+
+void safe_VkDescriptorSetLayoutBinding::initialize(const VkDescriptorSetLayoutBinding* in_struct)
+{
+    binding = in_struct->binding;
+    descriptorType = in_struct->descriptorType;
+    descriptorCount = in_struct->descriptorCount;
+    stageFlags = in_struct->stageFlags;
+    pImmutableSamplers = nullptr;
+    const bool sampler_type = in_struct->descriptorType == VK_DESCRIPTOR_TYPE_SAMPLER || in_struct->descriptorType == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
+    if (descriptorCount && in_struct->pImmutableSamplers && sampler_type) {
+        pImmutableSamplers = new VkSampler[descriptorCount];
+        for (uint32_t i = 0; i < descriptorCount; ++i) {
+            pImmutableSamplers[i] = in_struct->pImmutableSamplers[i];
+        }
+    }
+}
+
+void safe_VkDescriptorSetLayoutBinding::initialize(const safe_VkDescriptorSetLayoutBinding* src)
+{
+    binding = src->binding;
+    descriptorType = src->descriptorType;
+    descriptorCount = src->descriptorCount;
+    stageFlags = src->stageFlags;
+    pImmutableSamplers = nullptr;
+    const bool sampler_type = src->descriptorType == VK_DESCRIPTOR_TYPE_SAMPLER || src->descriptorType == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
+    if (descriptorCount && src->pImmutableSamplers && sampler_type) {
+        pImmutableSamplers = new VkSampler[descriptorCount];
+        for (uint32_t i = 0; i < descriptorCount; ++i) {
+            pImmutableSamplers[i] = src->pImmutableSamplers[i];
+        }
+    }
+}
+
+safe_VkDescriptorSetLayoutCreateInfo::safe_VkDescriptorSetLayoutCreateInfo(const VkDescriptorSetLayoutCreateInfo* in_struct) :
+    sType(in_struct->sType),
+    flags(in_struct->flags),
+    bindingCount(in_struct->bindingCount),
+    pBindings(nullptr)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (bindingCount && in_struct->pBindings) {
+        pBindings = new safe_VkDescriptorSetLayoutBinding[bindingCount];
+        for (uint32_t i = 0; i < bindingCount; ++i) {
+            pBindings[i].initialize(&in_struct->pBindings[i]);
+        }
+    }
+}
+
+safe_VkDescriptorSetLayoutCreateInfo::safe_VkDescriptorSetLayoutCreateInfo() :
+    pNext(nullptr),
+    pBindings(nullptr)
+{}
+
+safe_VkDescriptorSetLayoutCreateInfo::safe_VkDescriptorSetLayoutCreateInfo(const safe_VkDescriptorSetLayoutCreateInfo& src)
+{
+    sType = src.sType;
+    flags = src.flags;
+    bindingCount = src.bindingCount;
+    pBindings = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (bindingCount && src.pBindings) {
+        pBindings = new safe_VkDescriptorSetLayoutBinding[bindingCount];
+        for (uint32_t i = 0; i < bindingCount; ++i) {
+            pBindings[i].initialize(&src.pBindings[i]);
+        }
+    }
+}
+
+safe_VkDescriptorSetLayoutCreateInfo& safe_VkDescriptorSetLayoutCreateInfo::operator=(const safe_VkDescriptorSetLayoutCreateInfo& src)
+{
+    if (&src == this) return *this;
+
+    if (pBindings)
+        delete[] pBindings;
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    flags = src.flags;
+    bindingCount = src.bindingCount;
+    pBindings = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (bindingCount && src.pBindings) {
+        pBindings = new safe_VkDescriptorSetLayoutBinding[bindingCount];
+        for (uint32_t i = 0; i < bindingCount; ++i) {
+            pBindings[i].initialize(&src.pBindings[i]);
+        }
+    }
+
+    return *this;
+}
+
+safe_VkDescriptorSetLayoutCreateInfo::~safe_VkDescriptorSetLayoutCreateInfo()
+{
+    if (pBindings)
+        delete[] pBindings;
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkDescriptorSetLayoutCreateInfo::initialize(const VkDescriptorSetLayoutCreateInfo* in_struct)
+{
+    sType = in_struct->sType;
+    flags = in_struct->flags;
+    bindingCount = in_struct->bindingCount;
+    pBindings = nullptr;
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (bindingCount && in_struct->pBindings) {
+        pBindings = new safe_VkDescriptorSetLayoutBinding[bindingCount];
+        for (uint32_t i = 0; i < bindingCount; ++i) {
+            pBindings[i].initialize(&in_struct->pBindings[i]);
+        }
+    }
+}
+
+void safe_VkDescriptorSetLayoutCreateInfo::initialize(const safe_VkDescriptorSetLayoutCreateInfo* src)
+{
+    sType = src->sType;
+    flags = src->flags;
+    bindingCount = src->bindingCount;
+    pBindings = nullptr;
+    pNext = SafePnextCopy(src->pNext);
+    if (bindingCount && src->pBindings) {
+        pBindings = new safe_VkDescriptorSetLayoutBinding[bindingCount];
+        for (uint32_t i = 0; i < bindingCount; ++i) {
+            pBindings[i].initialize(&src->pBindings[i]);
+        }
+    }
+}
+
+safe_VkDescriptorPoolCreateInfo::safe_VkDescriptorPoolCreateInfo(const VkDescriptorPoolCreateInfo* in_struct) :
+    sType(in_struct->sType),
+    flags(in_struct->flags),
+    maxSets(in_struct->maxSets),
+    poolSizeCount(in_struct->poolSizeCount),
+    pPoolSizes(nullptr)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (in_struct->pPoolSizes) {
+        pPoolSizes = new VkDescriptorPoolSize[in_struct->poolSizeCount];
+        memcpy ((void *)pPoolSizes, (void *)in_struct->pPoolSizes, sizeof(VkDescriptorPoolSize)*in_struct->poolSizeCount);
+    }
+}
+
+safe_VkDescriptorPoolCreateInfo::safe_VkDescriptorPoolCreateInfo() :
+    pNext(nullptr),
+    pPoolSizes(nullptr)
+{}
+
+safe_VkDescriptorPoolCreateInfo::safe_VkDescriptorPoolCreateInfo(const safe_VkDescriptorPoolCreateInfo& src)
+{
+    sType = src.sType;
+    flags = src.flags;
+    maxSets = src.maxSets;
+    poolSizeCount = src.poolSizeCount;
+    pPoolSizes = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (src.pPoolSizes) {
+        pPoolSizes = new VkDescriptorPoolSize[src.poolSizeCount];
+        memcpy ((void *)pPoolSizes, (void *)src.pPoolSizes, sizeof(VkDescriptorPoolSize)*src.poolSizeCount);
+    }
+}
+
+safe_VkDescriptorPoolCreateInfo& safe_VkDescriptorPoolCreateInfo::operator=(const safe_VkDescriptorPoolCreateInfo& src)
+{
+    if (&src == this) return *this;
+
+    if (pPoolSizes)
+        delete[] pPoolSizes;
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    flags = src.flags;
+    maxSets = src.maxSets;
+    poolSizeCount = src.poolSizeCount;
+    pPoolSizes = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (src.pPoolSizes) {
+        pPoolSizes = new VkDescriptorPoolSize[src.poolSizeCount];
+        memcpy ((void *)pPoolSizes, (void *)src.pPoolSizes, sizeof(VkDescriptorPoolSize)*src.poolSizeCount);
+    }
+
+    return *this;
+}
+
+safe_VkDescriptorPoolCreateInfo::~safe_VkDescriptorPoolCreateInfo()
+{
+    if (pPoolSizes)
+        delete[] pPoolSizes;
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkDescriptorPoolCreateInfo::initialize(const VkDescriptorPoolCreateInfo* in_struct)
+{
+    sType = in_struct->sType;
+    flags = in_struct->flags;
+    maxSets = in_struct->maxSets;
+    poolSizeCount = in_struct->poolSizeCount;
+    pPoolSizes = nullptr;
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (in_struct->pPoolSizes) {
+        pPoolSizes = new VkDescriptorPoolSize[in_struct->poolSizeCount];
+        memcpy ((void *)pPoolSizes, (void *)in_struct->pPoolSizes, sizeof(VkDescriptorPoolSize)*in_struct->poolSizeCount);
+    }
+}
+
+void safe_VkDescriptorPoolCreateInfo::initialize(const safe_VkDescriptorPoolCreateInfo* src)
+{
+    sType = src->sType;
+    flags = src->flags;
+    maxSets = src->maxSets;
+    poolSizeCount = src->poolSizeCount;
+    pPoolSizes = nullptr;
+    pNext = SafePnextCopy(src->pNext);
+    if (src->pPoolSizes) {
+        pPoolSizes = new VkDescriptorPoolSize[src->poolSizeCount];
+        memcpy ((void *)pPoolSizes, (void *)src->pPoolSizes, sizeof(VkDescriptorPoolSize)*src->poolSizeCount);
+    }
+}
+
+safe_VkDescriptorSetAllocateInfo::safe_VkDescriptorSetAllocateInfo(const VkDescriptorSetAllocateInfo* in_struct) :
+    sType(in_struct->sType),
+    descriptorPool(in_struct->descriptorPool),
+    descriptorSetCount(in_struct->descriptorSetCount),
+    pSetLayouts(nullptr)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (descriptorSetCount && in_struct->pSetLayouts) {
+        pSetLayouts = new VkDescriptorSetLayout[descriptorSetCount];
+        for (uint32_t i = 0; i < descriptorSetCount; ++i) {
+            pSetLayouts[i] = in_struct->pSetLayouts[i];
+        }
+    }
+}
+
+safe_VkDescriptorSetAllocateInfo::safe_VkDescriptorSetAllocateInfo() :
+    pNext(nullptr),
+    pSetLayouts(nullptr)
+{}
+
+safe_VkDescriptorSetAllocateInfo::safe_VkDescriptorSetAllocateInfo(const safe_VkDescriptorSetAllocateInfo& src)
+{
+    sType = src.sType;
+    descriptorPool = src.descriptorPool;
+    descriptorSetCount = src.descriptorSetCount;
+    pSetLayouts = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (descriptorSetCount && src.pSetLayouts) {
+        pSetLayouts = new VkDescriptorSetLayout[descriptorSetCount];
+        for (uint32_t i = 0; i < descriptorSetCount; ++i) {
+            pSetLayouts[i] = src.pSetLayouts[i];
+        }
+    }
+}
+
+safe_VkDescriptorSetAllocateInfo& safe_VkDescriptorSetAllocateInfo::operator=(const safe_VkDescriptorSetAllocateInfo& src)
+{
+    if (&src == this) return *this;
+
+    if (pSetLayouts)
+        delete[] pSetLayouts;
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    descriptorPool = src.descriptorPool;
+    descriptorSetCount = src.descriptorSetCount;
+    pSetLayouts = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (descriptorSetCount && src.pSetLayouts) {
+        pSetLayouts = new VkDescriptorSetLayout[descriptorSetCount];
+        for (uint32_t i = 0; i < descriptorSetCount; ++i) {
+            pSetLayouts[i] = src.pSetLayouts[i];
+        }
+    }
+
+    return *this;
+}
+
+safe_VkDescriptorSetAllocateInfo::~safe_VkDescriptorSetAllocateInfo()
+{
+    if (pSetLayouts)
+        delete[] pSetLayouts;
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkDescriptorSetAllocateInfo::initialize(const VkDescriptorSetAllocateInfo* in_struct)
+{
+    sType = in_struct->sType;
+    descriptorPool = in_struct->descriptorPool;
+    descriptorSetCount = in_struct->descriptorSetCount;
+    pSetLayouts = nullptr;
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (descriptorSetCount && in_struct->pSetLayouts) {
+        pSetLayouts = new VkDescriptorSetLayout[descriptorSetCount];
+        for (uint32_t i = 0; i < descriptorSetCount; ++i) {
+            pSetLayouts[i] = in_struct->pSetLayouts[i];
+        }
+    }
+}
+
+void safe_VkDescriptorSetAllocateInfo::initialize(const safe_VkDescriptorSetAllocateInfo* src)
+{
+    sType = src->sType;
+    descriptorPool = src->descriptorPool;
+    descriptorSetCount = src->descriptorSetCount;
+    pSetLayouts = nullptr;
+    pNext = SafePnextCopy(src->pNext);
+    if (descriptorSetCount && src->pSetLayouts) {
+        pSetLayouts = new VkDescriptorSetLayout[descriptorSetCount];
+        for (uint32_t i = 0; i < descriptorSetCount; ++i) {
+            pSetLayouts[i] = src->pSetLayouts[i];
+        }
+    }
+}
+
+safe_VkWriteDescriptorSet::safe_VkWriteDescriptorSet(const VkWriteDescriptorSet* in_struct) :
+    sType(in_struct->sType),
+    dstSet(in_struct->dstSet),
+    dstBinding(in_struct->dstBinding),
+    dstArrayElement(in_struct->dstArrayElement),
+    descriptorCount(in_struct->descriptorCount),
+    descriptorType(in_struct->descriptorType),
+    pImageInfo(nullptr),
+    pBufferInfo(nullptr),
+    pTexelBufferView(nullptr)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+    switch (descriptorType) {
+        case VK_DESCRIPTOR_TYPE_SAMPLER:
+        case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
+        case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
+        case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
+        case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
+        if (descriptorCount && in_struct->pImageInfo) {
+            pImageInfo = new VkDescriptorImageInfo[descriptorCount];
+            for (uint32_t i = 0; i < descriptorCount; ++i) {
+                pImageInfo[i] = in_struct->pImageInfo[i];
+            }
+        }
+        break;
+        case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
+        case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
+        case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
+        case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
+        if (descriptorCount && in_struct->pBufferInfo) {
+            pBufferInfo = new VkDescriptorBufferInfo[descriptorCount];
+            for (uint32_t i = 0; i < descriptorCount; ++i) {
+                pBufferInfo[i] = in_struct->pBufferInfo[i];
+            }
+        }
+        break;
+        case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
+        case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
+        if (descriptorCount && in_struct->pTexelBufferView) {
+            pTexelBufferView = new VkBufferView[descriptorCount];
+            for (uint32_t i = 0; i < descriptorCount; ++i) {
+                pTexelBufferView[i] = in_struct->pTexelBufferView[i];
+            }
+        }
+        break;
+        default:
+        break;
+    }
+}
+
+safe_VkWriteDescriptorSet::safe_VkWriteDescriptorSet() :
+    pNext(nullptr),
+    pImageInfo(nullptr),
+    pBufferInfo(nullptr),
+    pTexelBufferView(nullptr)
+{}
+
+safe_VkWriteDescriptorSet::safe_VkWriteDescriptorSet(const safe_VkWriteDescriptorSet& src)
+{
+    sType = src.sType;
+    dstSet = src.dstSet;
+    dstBinding = src.dstBinding;
+    dstArrayElement = src.dstArrayElement;
+    descriptorCount = src.descriptorCount;
+    descriptorType = src.descriptorType;
+    pImageInfo = nullptr;
+    pBufferInfo = nullptr;
+    pTexelBufferView = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    switch (descriptorType) {
+        case VK_DESCRIPTOR_TYPE_SAMPLER:
+        case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
+        case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
+        case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
+        case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
+        if (descriptorCount && src.pImageInfo) {
+            pImageInfo = new VkDescriptorImageInfo[descriptorCount];
+            for (uint32_t i = 0; i < descriptorCount; ++i) {
+                pImageInfo[i] = src.pImageInfo[i];
+            }
+        }
+        break;
+        case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
+        case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
+        case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
+        case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
+        if (descriptorCount && src.pBufferInfo) {
+            pBufferInfo = new VkDescriptorBufferInfo[descriptorCount];
+            for (uint32_t i = 0; i < descriptorCount; ++i) {
+                pBufferInfo[i] = src.pBufferInfo[i];
+            }
+        }
+        break;
+        case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
+        case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
+        if (descriptorCount && src.pTexelBufferView) {
+            pTexelBufferView = new VkBufferView[descriptorCount];
+            for (uint32_t i = 0; i < descriptorCount; ++i) {
+                pTexelBufferView[i] = src.pTexelBufferView[i];
+            }
+        }
+        break;
+        default:
+        break;
+    }
+}
+
+safe_VkWriteDescriptorSet& safe_VkWriteDescriptorSet::operator=(const safe_VkWriteDescriptorSet& src)
+{
+    if (&src == this) return *this;
+
+    if (pImageInfo)
+        delete[] pImageInfo;
+    if (pBufferInfo)
+        delete[] pBufferInfo;
+    if (pTexelBufferView)
+        delete[] pTexelBufferView;
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    dstSet = src.dstSet;
+    dstBinding = src.dstBinding;
+    dstArrayElement = src.dstArrayElement;
+    descriptorCount = src.descriptorCount;
+    descriptorType = src.descriptorType;
+    pImageInfo = nullptr;
+    pBufferInfo = nullptr;
+    pTexelBufferView = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    switch (descriptorType) {
+        case VK_DESCRIPTOR_TYPE_SAMPLER:
+        case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
+        case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
+        case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
+        case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
+        if (descriptorCount && src.pImageInfo) {
+            pImageInfo = new VkDescriptorImageInfo[descriptorCount];
+            for (uint32_t i = 0; i < descriptorCount; ++i) {
+                pImageInfo[i] = src.pImageInfo[i];
+            }
+        }
+        break;
+        case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
+        case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
+        case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
+        case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
+        if (descriptorCount && src.pBufferInfo) {
+            pBufferInfo = new VkDescriptorBufferInfo[descriptorCount];
+            for (uint32_t i = 0; i < descriptorCount; ++i) {
+                pBufferInfo[i] = src.pBufferInfo[i];
+            }
+        }
+        break;
+        case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
+        case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
+        if (descriptorCount && src.pTexelBufferView) {
+            pTexelBufferView = new VkBufferView[descriptorCount];
+            for (uint32_t i = 0; i < descriptorCount; ++i) {
+                pTexelBufferView[i] = src.pTexelBufferView[i];
+            }
+        }
+        break;
+        default:
+        break;
+    }
+
+    return *this;
+}
+
+safe_VkWriteDescriptorSet::~safe_VkWriteDescriptorSet()
+{
+    if (pImageInfo)
+        delete[] pImageInfo;
+    if (pBufferInfo)
+        delete[] pBufferInfo;
+    if (pTexelBufferView)
+        delete[] pTexelBufferView;
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkWriteDescriptorSet::initialize(const VkWriteDescriptorSet* in_struct)
+{
+    sType = in_struct->sType;
+    dstSet = in_struct->dstSet;
+    dstBinding = in_struct->dstBinding;
+    dstArrayElement = in_struct->dstArrayElement;
+    descriptorCount = in_struct->descriptorCount;
+    descriptorType = in_struct->descriptorType;
+    pImageInfo = nullptr;
+    pBufferInfo = nullptr;
+    pTexelBufferView = nullptr;
+    pNext = SafePnextCopy(in_struct->pNext);
+    switch (descriptorType) {
+        case VK_DESCRIPTOR_TYPE_SAMPLER:
+        case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
+        case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
+        case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
+        case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
+        if (descriptorCount && in_struct->pImageInfo) {
+            pImageInfo = new VkDescriptorImageInfo[descriptorCount];
+            for (uint32_t i = 0; i < descriptorCount; ++i) {
+                pImageInfo[i] = in_struct->pImageInfo[i];
+            }
+        }
+        break;
+        case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
+        case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
+        case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
+        case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
+        if (descriptorCount && in_struct->pBufferInfo) {
+            pBufferInfo = new VkDescriptorBufferInfo[descriptorCount];
+            for (uint32_t i = 0; i < descriptorCount; ++i) {
+                pBufferInfo[i] = in_struct->pBufferInfo[i];
+            }
+        }
+        break;
+        case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
+        case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
+        if (descriptorCount && in_struct->pTexelBufferView) {
+            pTexelBufferView = new VkBufferView[descriptorCount];
+            for (uint32_t i = 0; i < descriptorCount; ++i) {
+                pTexelBufferView[i] = in_struct->pTexelBufferView[i];
+            }
+        }
+        break;
+        default:
+        break;
+    }
+}
+
+void safe_VkWriteDescriptorSet::initialize(const safe_VkWriteDescriptorSet* src)
+{
+    sType = src->sType;
+    dstSet = src->dstSet;
+    dstBinding = src->dstBinding;
+    dstArrayElement = src->dstArrayElement;
+    descriptorCount = src->descriptorCount;
+    descriptorType = src->descriptorType;
+    pImageInfo = nullptr;
+    pBufferInfo = nullptr;
+    pTexelBufferView = nullptr;
+    pNext = SafePnextCopy(src->pNext);
+    switch (descriptorType) {
+        case VK_DESCRIPTOR_TYPE_SAMPLER:
+        case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
+        case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
+        case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
+        case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
+        if (descriptorCount && src->pImageInfo) {
+            pImageInfo = new VkDescriptorImageInfo[descriptorCount];
+            for (uint32_t i = 0; i < descriptorCount; ++i) {
+                pImageInfo[i] = src->pImageInfo[i];
+            }
+        }
+        break;
+        case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
+        case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
+        case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
+        case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
+        if (descriptorCount && src->pBufferInfo) {
+            pBufferInfo = new VkDescriptorBufferInfo[descriptorCount];
+            for (uint32_t i = 0; i < descriptorCount; ++i) {
+                pBufferInfo[i] = src->pBufferInfo[i];
+            }
+        }
+        break;
+        case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
+        case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
+        if (descriptorCount && src->pTexelBufferView) {
+            pTexelBufferView = new VkBufferView[descriptorCount];
+            for (uint32_t i = 0; i < descriptorCount; ++i) {
+                pTexelBufferView[i] = src->pTexelBufferView[i];
+            }
+        }
+        break;
+        default:
+        break;
+    }
+}
+
+safe_VkCopyDescriptorSet::safe_VkCopyDescriptorSet(const VkCopyDescriptorSet* in_struct) :
+    sType(in_struct->sType),
+    srcSet(in_struct->srcSet),
+    srcBinding(in_struct->srcBinding),
+    srcArrayElement(in_struct->srcArrayElement),
+    dstSet(in_struct->dstSet),
+    dstBinding(in_struct->dstBinding),
+    dstArrayElement(in_struct->dstArrayElement),
+    descriptorCount(in_struct->descriptorCount)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkCopyDescriptorSet::safe_VkCopyDescriptorSet() :
+    pNext(nullptr)
+{}
+
+safe_VkCopyDescriptorSet::safe_VkCopyDescriptorSet(const safe_VkCopyDescriptorSet& src)
+{
+    sType = src.sType;
+    srcSet = src.srcSet;
+    srcBinding = src.srcBinding;
+    srcArrayElement = src.srcArrayElement;
+    dstSet = src.dstSet;
+    dstBinding = src.dstBinding;
+    dstArrayElement = src.dstArrayElement;
+    descriptorCount = src.descriptorCount;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkCopyDescriptorSet& safe_VkCopyDescriptorSet::operator=(const safe_VkCopyDescriptorSet& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    srcSet = src.srcSet;
+    srcBinding = src.srcBinding;
+    srcArrayElement = src.srcArrayElement;
+    dstSet = src.dstSet;
+    dstBinding = src.dstBinding;
+    dstArrayElement = src.dstArrayElement;
+    descriptorCount = src.descriptorCount;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkCopyDescriptorSet::~safe_VkCopyDescriptorSet()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkCopyDescriptorSet::initialize(const VkCopyDescriptorSet* in_struct)
+{
+    sType = in_struct->sType;
+    srcSet = in_struct->srcSet;
+    srcBinding = in_struct->srcBinding;
+    srcArrayElement = in_struct->srcArrayElement;
+    dstSet = in_struct->dstSet;
+    dstBinding = in_struct->dstBinding;
+    dstArrayElement = in_struct->dstArrayElement;
+    descriptorCount = in_struct->descriptorCount;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkCopyDescriptorSet::initialize(const safe_VkCopyDescriptorSet* src)
+{
+    sType = src->sType;
+    srcSet = src->srcSet;
+    srcBinding = src->srcBinding;
+    srcArrayElement = src->srcArrayElement;
+    dstSet = src->dstSet;
+    dstBinding = src->dstBinding;
+    dstArrayElement = src->dstArrayElement;
+    descriptorCount = src->descriptorCount;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkFramebufferCreateInfo::safe_VkFramebufferCreateInfo(const VkFramebufferCreateInfo* in_struct) :
+    sType(in_struct->sType),
+    flags(in_struct->flags),
+    renderPass(in_struct->renderPass),
+    attachmentCount(in_struct->attachmentCount),
+    pAttachments(nullptr),
+    width(in_struct->width),
+    height(in_struct->height),
+    layers(in_struct->layers)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (attachmentCount && in_struct->pAttachments) {
+        pAttachments = new VkImageView[attachmentCount];
+        for (uint32_t i = 0; i < attachmentCount; ++i) {
+            pAttachments[i] = in_struct->pAttachments[i];
+        }
+    }
+}
+
+safe_VkFramebufferCreateInfo::safe_VkFramebufferCreateInfo() :
+    pNext(nullptr),
+    pAttachments(nullptr)
+{}
+
+safe_VkFramebufferCreateInfo::safe_VkFramebufferCreateInfo(const safe_VkFramebufferCreateInfo& src)
+{
+    sType = src.sType;
+    flags = src.flags;
+    renderPass = src.renderPass;
+    attachmentCount = src.attachmentCount;
+    pAttachments = nullptr;
+    width = src.width;
+    height = src.height;
+    layers = src.layers;
+    pNext = SafePnextCopy(src.pNext);
+    if (attachmentCount && src.pAttachments) {
+        pAttachments = new VkImageView[attachmentCount];
+        for (uint32_t i = 0; i < attachmentCount; ++i) {
+            pAttachments[i] = src.pAttachments[i];
+        }
+    }
+}
+
+safe_VkFramebufferCreateInfo& safe_VkFramebufferCreateInfo::operator=(const safe_VkFramebufferCreateInfo& src)
+{
+    if (&src == this) return *this;
+
+    if (pAttachments)
+        delete[] pAttachments;
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    flags = src.flags;
+    renderPass = src.renderPass;
+    attachmentCount = src.attachmentCount;
+    pAttachments = nullptr;
+    width = src.width;
+    height = src.height;
+    layers = src.layers;
+    pNext = SafePnextCopy(src.pNext);
+    if (attachmentCount && src.pAttachments) {
+        pAttachments = new VkImageView[attachmentCount];
+        for (uint32_t i = 0; i < attachmentCount; ++i) {
+            pAttachments[i] = src.pAttachments[i];
+        }
+    }
+
+    return *this;
+}
+
+safe_VkFramebufferCreateInfo::~safe_VkFramebufferCreateInfo()
+{
+    if (pAttachments)
+        delete[] pAttachments;
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkFramebufferCreateInfo::initialize(const VkFramebufferCreateInfo* in_struct)
+{
+    sType = in_struct->sType;
+    flags = in_struct->flags;
+    renderPass = in_struct->renderPass;
+    attachmentCount = in_struct->attachmentCount;
+    pAttachments = nullptr;
+    width = in_struct->width;
+    height = in_struct->height;
+    layers = in_struct->layers;
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (attachmentCount && in_struct->pAttachments) {
+        pAttachments = new VkImageView[attachmentCount];
+        for (uint32_t i = 0; i < attachmentCount; ++i) {
+            pAttachments[i] = in_struct->pAttachments[i];
+        }
+    }
+}
+
+void safe_VkFramebufferCreateInfo::initialize(const safe_VkFramebufferCreateInfo* src)
+{
+    sType = src->sType;
+    flags = src->flags;
+    renderPass = src->renderPass;
+    attachmentCount = src->attachmentCount;
+    pAttachments = nullptr;
+    width = src->width;
+    height = src->height;
+    layers = src->layers;
+    pNext = SafePnextCopy(src->pNext);
+    if (attachmentCount && src->pAttachments) {
+        pAttachments = new VkImageView[attachmentCount];
+        for (uint32_t i = 0; i < attachmentCount; ++i) {
+            pAttachments[i] = src->pAttachments[i];
+        }
+    }
+}
+
+safe_VkSubpassDescription::safe_VkSubpassDescription(const VkSubpassDescription* in_struct) :
+    flags(in_struct->flags),
+    pipelineBindPoint(in_struct->pipelineBindPoint),
+    inputAttachmentCount(in_struct->inputAttachmentCount),
+    pInputAttachments(nullptr),
+    colorAttachmentCount(in_struct->colorAttachmentCount),
+    pColorAttachments(nullptr),
+    pResolveAttachments(nullptr),
+    pDepthStencilAttachment(nullptr),
+    preserveAttachmentCount(in_struct->preserveAttachmentCount),
+    pPreserveAttachments(nullptr)
+{
+    if (in_struct->pInputAttachments) {
+        pInputAttachments = new VkAttachmentReference[in_struct->inputAttachmentCount];
+        memcpy ((void *)pInputAttachments, (void *)in_struct->pInputAttachments, sizeof(VkAttachmentReference)*in_struct->inputAttachmentCount);
+    }
+    if (in_struct->pColorAttachments) {
+        pColorAttachments = new VkAttachmentReference[in_struct->colorAttachmentCount];
+        memcpy ((void *)pColorAttachments, (void *)in_struct->pColorAttachments, sizeof(VkAttachmentReference)*in_struct->colorAttachmentCount);
+    }
+    if (in_struct->pResolveAttachments) {
+        pResolveAttachments = new VkAttachmentReference[in_struct->colorAttachmentCount];
+        memcpy ((void *)pResolveAttachments, (void *)in_struct->pResolveAttachments, sizeof(VkAttachmentReference)*in_struct->colorAttachmentCount);
+    }
+    if (in_struct->pDepthStencilAttachment) {
+        pDepthStencilAttachment = new VkAttachmentReference(*in_struct->pDepthStencilAttachment);
+    }
+    if (in_struct->pPreserveAttachments) {
+        pPreserveAttachments = new uint32_t[in_struct->preserveAttachmentCount];
+        memcpy ((void *)pPreserveAttachments, (void *)in_struct->pPreserveAttachments, sizeof(uint32_t)*in_struct->preserveAttachmentCount);
+    }
+}
+
+safe_VkSubpassDescription::safe_VkSubpassDescription() :
+    pInputAttachments(nullptr),
+    pColorAttachments(nullptr),
+    pResolveAttachments(nullptr),
+    pDepthStencilAttachment(nullptr),
+    pPreserveAttachments(nullptr)
+{}
+
+safe_VkSubpassDescription::safe_VkSubpassDescription(const safe_VkSubpassDescription& src)
+{
+    flags = src.flags;
+    pipelineBindPoint = src.pipelineBindPoint;
+    inputAttachmentCount = src.inputAttachmentCount;
+    pInputAttachments = nullptr;
+    colorAttachmentCount = src.colorAttachmentCount;
+    pColorAttachments = nullptr;
+    pResolveAttachments = nullptr;
+    pDepthStencilAttachment = nullptr;
+    preserveAttachmentCount = src.preserveAttachmentCount;
+    pPreserveAttachments = nullptr;
+    if (src.pInputAttachments) {
+        pInputAttachments = new VkAttachmentReference[src.inputAttachmentCount];
+        memcpy ((void *)pInputAttachments, (void *)src.pInputAttachments, sizeof(VkAttachmentReference)*src.inputAttachmentCount);
+    }
+    if (src.pColorAttachments) {
+        pColorAttachments = new VkAttachmentReference[src.colorAttachmentCount];
+        memcpy ((void *)pColorAttachments, (void *)src.pColorAttachments, sizeof(VkAttachmentReference)*src.colorAttachmentCount);
+    }
+    if (src.pResolveAttachments) {
+        pResolveAttachments = new VkAttachmentReference[src.colorAttachmentCount];
+        memcpy ((void *)pResolveAttachments, (void *)src.pResolveAttachments, sizeof(VkAttachmentReference)*src.colorAttachmentCount);
+    }
+    if (src.pDepthStencilAttachment) {
+        pDepthStencilAttachment = new VkAttachmentReference(*src.pDepthStencilAttachment);
+    }
+    if (src.pPreserveAttachments) {
+        pPreserveAttachments = new uint32_t[src.preserveAttachmentCount];
+        memcpy ((void *)pPreserveAttachments, (void *)src.pPreserveAttachments, sizeof(uint32_t)*src.preserveAttachmentCount);
+    }
+}
+
+safe_VkSubpassDescription& safe_VkSubpassDescription::operator=(const safe_VkSubpassDescription& src)
+{
+    if (&src == this) return *this;
+
+    if (pInputAttachments)
+        delete[] pInputAttachments;
+    if (pColorAttachments)
+        delete[] pColorAttachments;
+    if (pResolveAttachments)
+        delete[] pResolveAttachments;
+    if (pDepthStencilAttachment)
+        delete pDepthStencilAttachment;
+    if (pPreserveAttachments)
+        delete[] pPreserveAttachments;
+
+    flags = src.flags;
+    pipelineBindPoint = src.pipelineBindPoint;
+    inputAttachmentCount = src.inputAttachmentCount;
+    pInputAttachments = nullptr;
+    colorAttachmentCount = src.colorAttachmentCount;
+    pColorAttachments = nullptr;
+    pResolveAttachments = nullptr;
+    pDepthStencilAttachment = nullptr;
+    preserveAttachmentCount = src.preserveAttachmentCount;
+    pPreserveAttachments = nullptr;
+    if (src.pInputAttachments) {
+        pInputAttachments = new VkAttachmentReference[src.inputAttachmentCount];
+        memcpy ((void *)pInputAttachments, (void *)src.pInputAttachments, sizeof(VkAttachmentReference)*src.inputAttachmentCount);
+    }
+    if (src.pColorAttachments) {
+        pColorAttachments = new VkAttachmentReference[src.colorAttachmentCount];
+        memcpy ((void *)pColorAttachments, (void *)src.pColorAttachments, sizeof(VkAttachmentReference)*src.colorAttachmentCount);
+    }
+    if (src.pResolveAttachments) {
+        pResolveAttachments = new VkAttachmentReference[src.colorAttachmentCount];
+        memcpy ((void *)pResolveAttachments, (void *)src.pResolveAttachments, sizeof(VkAttachmentReference)*src.colorAttachmentCount);
+    }
+    if (src.pDepthStencilAttachment) {
+        pDepthStencilAttachment = new VkAttachmentReference(*src.pDepthStencilAttachment);
+    }
+    if (src.pPreserveAttachments) {
+        pPreserveAttachments = new uint32_t[src.preserveAttachmentCount];
+        memcpy ((void *)pPreserveAttachments, (void *)src.pPreserveAttachments, sizeof(uint32_t)*src.preserveAttachmentCount);
+    }
+
+    return *this;
+}
+
+safe_VkSubpassDescription::~safe_VkSubpassDescription()
+{
+    if (pInputAttachments)
+        delete[] pInputAttachments;
+    if (pColorAttachments)
+        delete[] pColorAttachments;
+    if (pResolveAttachments)
+        delete[] pResolveAttachments;
+    if (pDepthStencilAttachment)
+        delete pDepthStencilAttachment;
+    if (pPreserveAttachments)
+        delete[] pPreserveAttachments;
+}
+
+void safe_VkSubpassDescription::initialize(const VkSubpassDescription* in_struct)
+{
+    flags = in_struct->flags;
+    pipelineBindPoint = in_struct->pipelineBindPoint;
+    inputAttachmentCount = in_struct->inputAttachmentCount;
+    pInputAttachments = nullptr;
+    colorAttachmentCount = in_struct->colorAttachmentCount;
+    pColorAttachments = nullptr;
+    pResolveAttachments = nullptr;
+    pDepthStencilAttachment = nullptr;
+    preserveAttachmentCount = in_struct->preserveAttachmentCount;
+    pPreserveAttachments = nullptr;
+    if (in_struct->pInputAttachments) {
+        pInputAttachments = new VkAttachmentReference[in_struct->inputAttachmentCount];
+        memcpy ((void *)pInputAttachments, (void *)in_struct->pInputAttachments, sizeof(VkAttachmentReference)*in_struct->inputAttachmentCount);
+    }
+    if (in_struct->pColorAttachments) {
+        pColorAttachments = new VkAttachmentReference[in_struct->colorAttachmentCount];
+        memcpy ((void *)pColorAttachments, (void *)in_struct->pColorAttachments, sizeof(VkAttachmentReference)*in_struct->colorAttachmentCount);
+    }
+    if (in_struct->pResolveAttachments) {
+        pResolveAttachments = new VkAttachmentReference[in_struct->colorAttachmentCount];
+        memcpy ((void *)pResolveAttachments, (void *)in_struct->pResolveAttachments, sizeof(VkAttachmentReference)*in_struct->colorAttachmentCount);
+    }
+    if (in_struct->pDepthStencilAttachment) {
+        pDepthStencilAttachment = new VkAttachmentReference(*in_struct->pDepthStencilAttachment);
+    }
+    if (in_struct->pPreserveAttachments) {
+        pPreserveAttachments = new uint32_t[in_struct->preserveAttachmentCount];
+        memcpy ((void *)pPreserveAttachments, (void *)in_struct->pPreserveAttachments, sizeof(uint32_t)*in_struct->preserveAttachmentCount);
+    }
+}
+
+void safe_VkSubpassDescription::initialize(const safe_VkSubpassDescription* src)
+{
+    flags = src->flags;
+    pipelineBindPoint = src->pipelineBindPoint;
+    inputAttachmentCount = src->inputAttachmentCount;
+    pInputAttachments = nullptr;
+    colorAttachmentCount = src->colorAttachmentCount;
+    pColorAttachments = nullptr;
+    pResolveAttachments = nullptr;
+    pDepthStencilAttachment = nullptr;
+    preserveAttachmentCount = src->preserveAttachmentCount;
+    pPreserveAttachments = nullptr;
+    if (src->pInputAttachments) {
+        pInputAttachments = new VkAttachmentReference[src->inputAttachmentCount];
+        memcpy ((void *)pInputAttachments, (void *)src->pInputAttachments, sizeof(VkAttachmentReference)*src->inputAttachmentCount);
+    }
+    if (src->pColorAttachments) {
+        pColorAttachments = new VkAttachmentReference[src->colorAttachmentCount];
+        memcpy ((void *)pColorAttachments, (void *)src->pColorAttachments, sizeof(VkAttachmentReference)*src->colorAttachmentCount);
+    }
+    if (src->pResolveAttachments) {
+        pResolveAttachments = new VkAttachmentReference[src->colorAttachmentCount];
+        memcpy ((void *)pResolveAttachments, (void *)src->pResolveAttachments, sizeof(VkAttachmentReference)*src->colorAttachmentCount);
+    }
+    if (src->pDepthStencilAttachment) {
+        pDepthStencilAttachment = new VkAttachmentReference(*src->pDepthStencilAttachment);
+    }
+    if (src->pPreserveAttachments) {
+        pPreserveAttachments = new uint32_t[src->preserveAttachmentCount];
+        memcpy ((void *)pPreserveAttachments, (void *)src->pPreserveAttachments, sizeof(uint32_t)*src->preserveAttachmentCount);
+    }
+}
+
+safe_VkRenderPassCreateInfo::safe_VkRenderPassCreateInfo(const VkRenderPassCreateInfo* in_struct) :
+    sType(in_struct->sType),
+    flags(in_struct->flags),
+    attachmentCount(in_struct->attachmentCount),
+    pAttachments(nullptr),
+    subpassCount(in_struct->subpassCount),
+    pSubpasses(nullptr),
+    dependencyCount(in_struct->dependencyCount),
+    pDependencies(nullptr)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (in_struct->pAttachments) {
+        pAttachments = new VkAttachmentDescription[in_struct->attachmentCount];
+        memcpy ((void *)pAttachments, (void *)in_struct->pAttachments, sizeof(VkAttachmentDescription)*in_struct->attachmentCount);
+    }
+    if (subpassCount && in_struct->pSubpasses) {
+        pSubpasses = new safe_VkSubpassDescription[subpassCount];
+        for (uint32_t i = 0; i < subpassCount; ++i) {
+            pSubpasses[i].initialize(&in_struct->pSubpasses[i]);
+        }
+    }
+    if (in_struct->pDependencies) {
+        pDependencies = new VkSubpassDependency[in_struct->dependencyCount];
+        memcpy ((void *)pDependencies, (void *)in_struct->pDependencies, sizeof(VkSubpassDependency)*in_struct->dependencyCount);
+    }
+}
+
+safe_VkRenderPassCreateInfo::safe_VkRenderPassCreateInfo() :
+    pNext(nullptr),
+    pAttachments(nullptr),
+    pSubpasses(nullptr),
+    pDependencies(nullptr)
+{}
+
+safe_VkRenderPassCreateInfo::safe_VkRenderPassCreateInfo(const safe_VkRenderPassCreateInfo& src)
+{
+    sType = src.sType;
+    flags = src.flags;
+    attachmentCount = src.attachmentCount;
+    pAttachments = nullptr;
+    subpassCount = src.subpassCount;
+    pSubpasses = nullptr;
+    dependencyCount = src.dependencyCount;
+    pDependencies = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (src.pAttachments) {
+        pAttachments = new VkAttachmentDescription[src.attachmentCount];
+        memcpy ((void *)pAttachments, (void *)src.pAttachments, sizeof(VkAttachmentDescription)*src.attachmentCount);
+    }
+    if (subpassCount && src.pSubpasses) {
+        pSubpasses = new safe_VkSubpassDescription[subpassCount];
+        for (uint32_t i = 0; i < subpassCount; ++i) {
+            pSubpasses[i].initialize(&src.pSubpasses[i]);
+        }
+    }
+    if (src.pDependencies) {
+        pDependencies = new VkSubpassDependency[src.dependencyCount];
+        memcpy ((void *)pDependencies, (void *)src.pDependencies, sizeof(VkSubpassDependency)*src.dependencyCount);
+    }
+}
+
+safe_VkRenderPassCreateInfo& safe_VkRenderPassCreateInfo::operator=(const safe_VkRenderPassCreateInfo& src)
+{
+    if (&src == this) return *this;
+
+    if (pAttachments)
+        delete[] pAttachments;
+    if (pSubpasses)
+        delete[] pSubpasses;
+    if (pDependencies)
+        delete[] pDependencies;
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    flags = src.flags;
+    attachmentCount = src.attachmentCount;
+    pAttachments = nullptr;
+    subpassCount = src.subpassCount;
+    pSubpasses = nullptr;
+    dependencyCount = src.dependencyCount;
+    pDependencies = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (src.pAttachments) {
+        pAttachments = new VkAttachmentDescription[src.attachmentCount];
+        memcpy ((void *)pAttachments, (void *)src.pAttachments, sizeof(VkAttachmentDescription)*src.attachmentCount);
+    }
+    if (subpassCount && src.pSubpasses) {
+        pSubpasses = new safe_VkSubpassDescription[subpassCount];
+        for (uint32_t i = 0; i < subpassCount; ++i) {
+            pSubpasses[i].initialize(&src.pSubpasses[i]);
+        }
+    }
+    if (src.pDependencies) {
+        pDependencies = new VkSubpassDependency[src.dependencyCount];
+        memcpy ((void *)pDependencies, (void *)src.pDependencies, sizeof(VkSubpassDependency)*src.dependencyCount);
+    }
+
+    return *this;
+}
+
+safe_VkRenderPassCreateInfo::~safe_VkRenderPassCreateInfo()
+{
+    if (pAttachments)
+        delete[] pAttachments;
+    if (pSubpasses)
+        delete[] pSubpasses;
+    if (pDependencies)
+        delete[] pDependencies;
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkRenderPassCreateInfo::initialize(const VkRenderPassCreateInfo* in_struct)
+{
+    sType = in_struct->sType;
+    flags = in_struct->flags;
+    attachmentCount = in_struct->attachmentCount;
+    pAttachments = nullptr;
+    subpassCount = in_struct->subpassCount;
+    pSubpasses = nullptr;
+    dependencyCount = in_struct->dependencyCount;
+    pDependencies = nullptr;
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (in_struct->pAttachments) {
+        pAttachments = new VkAttachmentDescription[in_struct->attachmentCount];
+        memcpy ((void *)pAttachments, (void *)in_struct->pAttachments, sizeof(VkAttachmentDescription)*in_struct->attachmentCount);
+    }
+    if (subpassCount && in_struct->pSubpasses) {
+        pSubpasses = new safe_VkSubpassDescription[subpassCount];
+        for (uint32_t i = 0; i < subpassCount; ++i) {
+            pSubpasses[i].initialize(&in_struct->pSubpasses[i]);
+        }
+    }
+    if (in_struct->pDependencies) {
+        pDependencies = new VkSubpassDependency[in_struct->dependencyCount];
+        memcpy ((void *)pDependencies, (void *)in_struct->pDependencies, sizeof(VkSubpassDependency)*in_struct->dependencyCount);
+    }
+}
+
+void safe_VkRenderPassCreateInfo::initialize(const safe_VkRenderPassCreateInfo* src)
+{
+    sType = src->sType;
+    flags = src->flags;
+    attachmentCount = src->attachmentCount;
+    pAttachments = nullptr;
+    subpassCount = src->subpassCount;
+    pSubpasses = nullptr;
+    dependencyCount = src->dependencyCount;
+    pDependencies = nullptr;
+    pNext = SafePnextCopy(src->pNext);
+    if (src->pAttachments) {
+        pAttachments = new VkAttachmentDescription[src->attachmentCount];
+        memcpy ((void *)pAttachments, (void *)src->pAttachments, sizeof(VkAttachmentDescription)*src->attachmentCount);
+    }
+    if (subpassCount && src->pSubpasses) {
+        pSubpasses = new safe_VkSubpassDescription[subpassCount];
+        for (uint32_t i = 0; i < subpassCount; ++i) {
+            pSubpasses[i].initialize(&src->pSubpasses[i]);
+        }
+    }
+    if (src->pDependencies) {
+        pDependencies = new VkSubpassDependency[src->dependencyCount];
+        memcpy ((void *)pDependencies, (void *)src->pDependencies, sizeof(VkSubpassDependency)*src->dependencyCount);
+    }
+}
+
+safe_VkCommandPoolCreateInfo::safe_VkCommandPoolCreateInfo(const VkCommandPoolCreateInfo* in_struct) :
+    sType(in_struct->sType),
+    flags(in_struct->flags),
+    queueFamilyIndex(in_struct->queueFamilyIndex)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkCommandPoolCreateInfo::safe_VkCommandPoolCreateInfo() :
+    pNext(nullptr)
+{}
+
+safe_VkCommandPoolCreateInfo::safe_VkCommandPoolCreateInfo(const safe_VkCommandPoolCreateInfo& src)
+{
+    sType = src.sType;
+    flags = src.flags;
+    queueFamilyIndex = src.queueFamilyIndex;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkCommandPoolCreateInfo& safe_VkCommandPoolCreateInfo::operator=(const safe_VkCommandPoolCreateInfo& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    flags = src.flags;
+    queueFamilyIndex = src.queueFamilyIndex;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkCommandPoolCreateInfo::~safe_VkCommandPoolCreateInfo()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkCommandPoolCreateInfo::initialize(const VkCommandPoolCreateInfo* in_struct)
+{
+    sType = in_struct->sType;
+    flags = in_struct->flags;
+    queueFamilyIndex = in_struct->queueFamilyIndex;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkCommandPoolCreateInfo::initialize(const safe_VkCommandPoolCreateInfo* src)
+{
+    sType = src->sType;
+    flags = src->flags;
+    queueFamilyIndex = src->queueFamilyIndex;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkCommandBufferAllocateInfo::safe_VkCommandBufferAllocateInfo(const VkCommandBufferAllocateInfo* in_struct) :
+    sType(in_struct->sType),
+    commandPool(in_struct->commandPool),
+    level(in_struct->level),
+    commandBufferCount(in_struct->commandBufferCount)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkCommandBufferAllocateInfo::safe_VkCommandBufferAllocateInfo() :
+    pNext(nullptr)
+{}
+
+safe_VkCommandBufferAllocateInfo::safe_VkCommandBufferAllocateInfo(const safe_VkCommandBufferAllocateInfo& src)
+{
+    sType = src.sType;
+    commandPool = src.commandPool;
+    level = src.level;
+    commandBufferCount = src.commandBufferCount;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkCommandBufferAllocateInfo& safe_VkCommandBufferAllocateInfo::operator=(const safe_VkCommandBufferAllocateInfo& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    commandPool = src.commandPool;
+    level = src.level;
+    commandBufferCount = src.commandBufferCount;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkCommandBufferAllocateInfo::~safe_VkCommandBufferAllocateInfo()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkCommandBufferAllocateInfo::initialize(const VkCommandBufferAllocateInfo* in_struct)
+{
+    sType = in_struct->sType;
+    commandPool = in_struct->commandPool;
+    level = in_struct->level;
+    commandBufferCount = in_struct->commandBufferCount;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkCommandBufferAllocateInfo::initialize(const safe_VkCommandBufferAllocateInfo* src)
+{
+    sType = src->sType;
+    commandPool = src->commandPool;
+    level = src->level;
+    commandBufferCount = src->commandBufferCount;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkCommandBufferInheritanceInfo::safe_VkCommandBufferInheritanceInfo(const VkCommandBufferInheritanceInfo* in_struct) :
+    sType(in_struct->sType),
+    renderPass(in_struct->renderPass),
+    subpass(in_struct->subpass),
+    framebuffer(in_struct->framebuffer),
+    occlusionQueryEnable(in_struct->occlusionQueryEnable),
+    queryFlags(in_struct->queryFlags),
+    pipelineStatistics(in_struct->pipelineStatistics)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkCommandBufferInheritanceInfo::safe_VkCommandBufferInheritanceInfo() :
+    pNext(nullptr)
+{}
+
+safe_VkCommandBufferInheritanceInfo::safe_VkCommandBufferInheritanceInfo(const safe_VkCommandBufferInheritanceInfo& src)
+{
+    sType = src.sType;
+    renderPass = src.renderPass;
+    subpass = src.subpass;
+    framebuffer = src.framebuffer;
+    occlusionQueryEnable = src.occlusionQueryEnable;
+    queryFlags = src.queryFlags;
+    pipelineStatistics = src.pipelineStatistics;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkCommandBufferInheritanceInfo& safe_VkCommandBufferInheritanceInfo::operator=(const safe_VkCommandBufferInheritanceInfo& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    renderPass = src.renderPass;
+    subpass = src.subpass;
+    framebuffer = src.framebuffer;
+    occlusionQueryEnable = src.occlusionQueryEnable;
+    queryFlags = src.queryFlags;
+    pipelineStatistics = src.pipelineStatistics;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkCommandBufferInheritanceInfo::~safe_VkCommandBufferInheritanceInfo()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkCommandBufferInheritanceInfo::initialize(const VkCommandBufferInheritanceInfo* in_struct)
+{
+    sType = in_struct->sType;
+    renderPass = in_struct->renderPass;
+    subpass = in_struct->subpass;
+    framebuffer = in_struct->framebuffer;
+    occlusionQueryEnable = in_struct->occlusionQueryEnable;
+    queryFlags = in_struct->queryFlags;
+    pipelineStatistics = in_struct->pipelineStatistics;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkCommandBufferInheritanceInfo::initialize(const safe_VkCommandBufferInheritanceInfo* src)
+{
+    sType = src->sType;
+    renderPass = src->renderPass;
+    subpass = src->subpass;
+    framebuffer = src->framebuffer;
+    occlusionQueryEnable = src->occlusionQueryEnable;
+    queryFlags = src->queryFlags;
+    pipelineStatistics = src->pipelineStatistics;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkCommandBufferBeginInfo::safe_VkCommandBufferBeginInfo(const VkCommandBufferBeginInfo* in_struct) :
+    sType(in_struct->sType),
+    flags(in_struct->flags),
+    pInheritanceInfo(nullptr)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (in_struct->pInheritanceInfo)
+        pInheritanceInfo = new safe_VkCommandBufferInheritanceInfo(in_struct->pInheritanceInfo);
+}
+
+safe_VkCommandBufferBeginInfo::safe_VkCommandBufferBeginInfo() :
+    pNext(nullptr),
+    pInheritanceInfo(nullptr)
+{}
+
+safe_VkCommandBufferBeginInfo::safe_VkCommandBufferBeginInfo(const safe_VkCommandBufferBeginInfo& src)
+{
+    sType = src.sType;
+    flags = src.flags;
+    pInheritanceInfo = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (src.pInheritanceInfo)
+        pInheritanceInfo = new safe_VkCommandBufferInheritanceInfo(*src.pInheritanceInfo);
+}
+
+safe_VkCommandBufferBeginInfo& safe_VkCommandBufferBeginInfo::operator=(const safe_VkCommandBufferBeginInfo& src)
+{
+    if (&src == this) return *this;
+
+    if (pInheritanceInfo)
+        delete pInheritanceInfo;
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    flags = src.flags;
+    pInheritanceInfo = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (src.pInheritanceInfo)
+        pInheritanceInfo = new safe_VkCommandBufferInheritanceInfo(*src.pInheritanceInfo);
+
+    return *this;
+}
+
+safe_VkCommandBufferBeginInfo::~safe_VkCommandBufferBeginInfo()
+{
+    if (pInheritanceInfo)
+        delete pInheritanceInfo;
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkCommandBufferBeginInfo::initialize(const VkCommandBufferBeginInfo* in_struct)
+{
+    sType = in_struct->sType;
+    flags = in_struct->flags;
+    pInheritanceInfo = nullptr;
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (in_struct->pInheritanceInfo)
+        pInheritanceInfo = new safe_VkCommandBufferInheritanceInfo(in_struct->pInheritanceInfo);
+}
+
+void safe_VkCommandBufferBeginInfo::initialize(const safe_VkCommandBufferBeginInfo* src)
+{
+    sType = src->sType;
+    flags = src->flags;
+    pInheritanceInfo = nullptr;
+    pNext = SafePnextCopy(src->pNext);
+    if (src->pInheritanceInfo)
+        pInheritanceInfo = new safe_VkCommandBufferInheritanceInfo(*src->pInheritanceInfo);
+}
+
+safe_VkMemoryBarrier::safe_VkMemoryBarrier(const VkMemoryBarrier* in_struct) :
+    sType(in_struct->sType),
+    srcAccessMask(in_struct->srcAccessMask),
+    dstAccessMask(in_struct->dstAccessMask)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkMemoryBarrier::safe_VkMemoryBarrier() :
+    pNext(nullptr)
+{}
+
+safe_VkMemoryBarrier::safe_VkMemoryBarrier(const safe_VkMemoryBarrier& src)
+{
+    sType = src.sType;
+    srcAccessMask = src.srcAccessMask;
+    dstAccessMask = src.dstAccessMask;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkMemoryBarrier& safe_VkMemoryBarrier::operator=(const safe_VkMemoryBarrier& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    srcAccessMask = src.srcAccessMask;
+    dstAccessMask = src.dstAccessMask;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkMemoryBarrier::~safe_VkMemoryBarrier()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkMemoryBarrier::initialize(const VkMemoryBarrier* in_struct)
+{
+    sType = in_struct->sType;
+    srcAccessMask = in_struct->srcAccessMask;
+    dstAccessMask = in_struct->dstAccessMask;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkMemoryBarrier::initialize(const safe_VkMemoryBarrier* src)
+{
+    sType = src->sType;
+    srcAccessMask = src->srcAccessMask;
+    dstAccessMask = src->dstAccessMask;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkBufferMemoryBarrier::safe_VkBufferMemoryBarrier(const VkBufferMemoryBarrier* in_struct) :
+    sType(in_struct->sType),
+    srcAccessMask(in_struct->srcAccessMask),
+    dstAccessMask(in_struct->dstAccessMask),
+    srcQueueFamilyIndex(in_struct->srcQueueFamilyIndex),
+    dstQueueFamilyIndex(in_struct->dstQueueFamilyIndex),
+    buffer(in_struct->buffer),
+    offset(in_struct->offset),
+    size(in_struct->size)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkBufferMemoryBarrier::safe_VkBufferMemoryBarrier() :
+    pNext(nullptr)
+{}
+
+safe_VkBufferMemoryBarrier::safe_VkBufferMemoryBarrier(const safe_VkBufferMemoryBarrier& src)
+{
+    sType = src.sType;
+    srcAccessMask = src.srcAccessMask;
+    dstAccessMask = src.dstAccessMask;
+    srcQueueFamilyIndex = src.srcQueueFamilyIndex;
+    dstQueueFamilyIndex = src.dstQueueFamilyIndex;
+    buffer = src.buffer;
+    offset = src.offset;
+    size = src.size;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkBufferMemoryBarrier& safe_VkBufferMemoryBarrier::operator=(const safe_VkBufferMemoryBarrier& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    srcAccessMask = src.srcAccessMask;
+    dstAccessMask = src.dstAccessMask;
+    srcQueueFamilyIndex = src.srcQueueFamilyIndex;
+    dstQueueFamilyIndex = src.dstQueueFamilyIndex;
+    buffer = src.buffer;
+    offset = src.offset;
+    size = src.size;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkBufferMemoryBarrier::~safe_VkBufferMemoryBarrier()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkBufferMemoryBarrier::initialize(const VkBufferMemoryBarrier* in_struct)
+{
+    sType = in_struct->sType;
+    srcAccessMask = in_struct->srcAccessMask;
+    dstAccessMask = in_struct->dstAccessMask;
+    srcQueueFamilyIndex = in_struct->srcQueueFamilyIndex;
+    dstQueueFamilyIndex = in_struct->dstQueueFamilyIndex;
+    buffer = in_struct->buffer;
+    offset = in_struct->offset;
+    size = in_struct->size;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkBufferMemoryBarrier::initialize(const safe_VkBufferMemoryBarrier* src)
+{
+    sType = src->sType;
+    srcAccessMask = src->srcAccessMask;
+    dstAccessMask = src->dstAccessMask;
+    srcQueueFamilyIndex = src->srcQueueFamilyIndex;
+    dstQueueFamilyIndex = src->dstQueueFamilyIndex;
+    buffer = src->buffer;
+    offset = src->offset;
+    size = src->size;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkImageMemoryBarrier::safe_VkImageMemoryBarrier(const VkImageMemoryBarrier* in_struct) :
+    sType(in_struct->sType),
+    srcAccessMask(in_struct->srcAccessMask),
+    dstAccessMask(in_struct->dstAccessMask),
+    oldLayout(in_struct->oldLayout),
+    newLayout(in_struct->newLayout),
+    srcQueueFamilyIndex(in_struct->srcQueueFamilyIndex),
+    dstQueueFamilyIndex(in_struct->dstQueueFamilyIndex),
+    image(in_struct->image),
+    subresourceRange(in_struct->subresourceRange)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkImageMemoryBarrier::safe_VkImageMemoryBarrier() :
+    pNext(nullptr)
+{}
+
+safe_VkImageMemoryBarrier::safe_VkImageMemoryBarrier(const safe_VkImageMemoryBarrier& src)
+{
+    sType = src.sType;
+    srcAccessMask = src.srcAccessMask;
+    dstAccessMask = src.dstAccessMask;
+    oldLayout = src.oldLayout;
+    newLayout = src.newLayout;
+    srcQueueFamilyIndex = src.srcQueueFamilyIndex;
+    dstQueueFamilyIndex = src.dstQueueFamilyIndex;
+    image = src.image;
+    subresourceRange = src.subresourceRange;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkImageMemoryBarrier& safe_VkImageMemoryBarrier::operator=(const safe_VkImageMemoryBarrier& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    srcAccessMask = src.srcAccessMask;
+    dstAccessMask = src.dstAccessMask;
+    oldLayout = src.oldLayout;
+    newLayout = src.newLayout;
+    srcQueueFamilyIndex = src.srcQueueFamilyIndex;
+    dstQueueFamilyIndex = src.dstQueueFamilyIndex;
+    image = src.image;
+    subresourceRange = src.subresourceRange;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkImageMemoryBarrier::~safe_VkImageMemoryBarrier()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkImageMemoryBarrier::initialize(const VkImageMemoryBarrier* in_struct)
+{
+    sType = in_struct->sType;
+    srcAccessMask = in_struct->srcAccessMask;
+    dstAccessMask = in_struct->dstAccessMask;
+    oldLayout = in_struct->oldLayout;
+    newLayout = in_struct->newLayout;
+    srcQueueFamilyIndex = in_struct->srcQueueFamilyIndex;
+    dstQueueFamilyIndex = in_struct->dstQueueFamilyIndex;
+    image = in_struct->image;
+    subresourceRange = in_struct->subresourceRange;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkImageMemoryBarrier::initialize(const safe_VkImageMemoryBarrier* src)
+{
+    sType = src->sType;
+    srcAccessMask = src->srcAccessMask;
+    dstAccessMask = src->dstAccessMask;
+    oldLayout = src->oldLayout;
+    newLayout = src->newLayout;
+    srcQueueFamilyIndex = src->srcQueueFamilyIndex;
+    dstQueueFamilyIndex = src->dstQueueFamilyIndex;
+    image = src->image;
+    subresourceRange = src->subresourceRange;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkRenderPassBeginInfo::safe_VkRenderPassBeginInfo(const VkRenderPassBeginInfo* in_struct) :
+    sType(in_struct->sType),
+    renderPass(in_struct->renderPass),
+    framebuffer(in_struct->framebuffer),
+    renderArea(in_struct->renderArea),
+    clearValueCount(in_struct->clearValueCount),
+    pClearValues(nullptr)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (in_struct->pClearValues) {
+        pClearValues = new VkClearValue[in_struct->clearValueCount];
+        memcpy ((void *)pClearValues, (void *)in_struct->pClearValues, sizeof(VkClearValue)*in_struct->clearValueCount);
+    }
+}
+
+safe_VkRenderPassBeginInfo::safe_VkRenderPassBeginInfo() :
+    pNext(nullptr),
+    pClearValues(nullptr)
+{}
+
+safe_VkRenderPassBeginInfo::safe_VkRenderPassBeginInfo(const safe_VkRenderPassBeginInfo& src)
+{
+    sType = src.sType;
+    renderPass = src.renderPass;
+    framebuffer = src.framebuffer;
+    renderArea = src.renderArea;
+    clearValueCount = src.clearValueCount;
+    pClearValues = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (src.pClearValues) {
+        pClearValues = new VkClearValue[src.clearValueCount];
+        memcpy ((void *)pClearValues, (void *)src.pClearValues, sizeof(VkClearValue)*src.clearValueCount);
+    }
+}
+
+safe_VkRenderPassBeginInfo& safe_VkRenderPassBeginInfo::operator=(const safe_VkRenderPassBeginInfo& src)
+{
+    if (&src == this) return *this;
+
+    if (pClearValues)
+        delete[] pClearValues;
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    renderPass = src.renderPass;
+    framebuffer = src.framebuffer;
+    renderArea = src.renderArea;
+    clearValueCount = src.clearValueCount;
+    pClearValues = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (src.pClearValues) {
+        pClearValues = new VkClearValue[src.clearValueCount];
+        memcpy ((void *)pClearValues, (void *)src.pClearValues, sizeof(VkClearValue)*src.clearValueCount);
+    }
+
+    return *this;
+}
+
+safe_VkRenderPassBeginInfo::~safe_VkRenderPassBeginInfo()
+{
+    if (pClearValues)
+        delete[] pClearValues;
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkRenderPassBeginInfo::initialize(const VkRenderPassBeginInfo* in_struct)
+{
+    sType = in_struct->sType;
+    renderPass = in_struct->renderPass;
+    framebuffer = in_struct->framebuffer;
+    renderArea = in_struct->renderArea;
+    clearValueCount = in_struct->clearValueCount;
+    pClearValues = nullptr;
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (in_struct->pClearValues) {
+        pClearValues = new VkClearValue[in_struct->clearValueCount];
+        memcpy ((void *)pClearValues, (void *)in_struct->pClearValues, sizeof(VkClearValue)*in_struct->clearValueCount);
+    }
+}
+
+void safe_VkRenderPassBeginInfo::initialize(const safe_VkRenderPassBeginInfo* src)
+{
+    sType = src->sType;
+    renderPass = src->renderPass;
+    framebuffer = src->framebuffer;
+    renderArea = src->renderArea;
+    clearValueCount = src->clearValueCount;
+    pClearValues = nullptr;
+    pNext = SafePnextCopy(src->pNext);
+    if (src->pClearValues) {
+        pClearValues = new VkClearValue[src->clearValueCount];
+        memcpy ((void *)pClearValues, (void *)src->pClearValues, sizeof(VkClearValue)*src->clearValueCount);
+    }
+}
+
+safe_VkPhysicalDeviceSubgroupProperties::safe_VkPhysicalDeviceSubgroupProperties(const VkPhysicalDeviceSubgroupProperties* in_struct) :
+    sType(in_struct->sType),
+    subgroupSize(in_struct->subgroupSize),
+    supportedStages(in_struct->supportedStages),
+    supportedOperations(in_struct->supportedOperations),
+    quadOperationsInAllStages(in_struct->quadOperationsInAllStages)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPhysicalDeviceSubgroupProperties::safe_VkPhysicalDeviceSubgroupProperties() :
+    pNext(nullptr)
+{}
+
+safe_VkPhysicalDeviceSubgroupProperties::safe_VkPhysicalDeviceSubgroupProperties(const safe_VkPhysicalDeviceSubgroupProperties& src)
+{
+    sType = src.sType;
+    subgroupSize = src.subgroupSize;
+    supportedStages = src.supportedStages;
+    supportedOperations = src.supportedOperations;
+    quadOperationsInAllStages = src.quadOperationsInAllStages;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPhysicalDeviceSubgroupProperties& safe_VkPhysicalDeviceSubgroupProperties::operator=(const safe_VkPhysicalDeviceSubgroupProperties& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    subgroupSize = src.subgroupSize;
+    supportedStages = src.supportedStages;
+    supportedOperations = src.supportedOperations;
+    quadOperationsInAllStages = src.quadOperationsInAllStages;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPhysicalDeviceSubgroupProperties::~safe_VkPhysicalDeviceSubgroupProperties()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPhysicalDeviceSubgroupProperties::initialize(const VkPhysicalDeviceSubgroupProperties* in_struct)
+{
+    sType = in_struct->sType;
+    subgroupSize = in_struct->subgroupSize;
+    supportedStages = in_struct->supportedStages;
+    supportedOperations = in_struct->supportedOperations;
+    quadOperationsInAllStages = in_struct->quadOperationsInAllStages;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPhysicalDeviceSubgroupProperties::initialize(const safe_VkPhysicalDeviceSubgroupProperties* src)
+{
+    sType = src->sType;
+    subgroupSize = src->subgroupSize;
+    supportedStages = src->supportedStages;
+    supportedOperations = src->supportedOperations;
+    quadOperationsInAllStages = src->quadOperationsInAllStages;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkBindBufferMemoryInfo::safe_VkBindBufferMemoryInfo(const VkBindBufferMemoryInfo* in_struct) :
+    sType(in_struct->sType),
+    buffer(in_struct->buffer),
+    memory(in_struct->memory),
+    memoryOffset(in_struct->memoryOffset)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkBindBufferMemoryInfo::safe_VkBindBufferMemoryInfo() :
+    pNext(nullptr)
+{}
+
+safe_VkBindBufferMemoryInfo::safe_VkBindBufferMemoryInfo(const safe_VkBindBufferMemoryInfo& src)
+{
+    sType = src.sType;
+    buffer = src.buffer;
+    memory = src.memory;
+    memoryOffset = src.memoryOffset;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkBindBufferMemoryInfo& safe_VkBindBufferMemoryInfo::operator=(const safe_VkBindBufferMemoryInfo& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    buffer = src.buffer;
+    memory = src.memory;
+    memoryOffset = src.memoryOffset;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkBindBufferMemoryInfo::~safe_VkBindBufferMemoryInfo()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkBindBufferMemoryInfo::initialize(const VkBindBufferMemoryInfo* in_struct)
+{
+    sType = in_struct->sType;
+    buffer = in_struct->buffer;
+    memory = in_struct->memory;
+    memoryOffset = in_struct->memoryOffset;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkBindBufferMemoryInfo::initialize(const safe_VkBindBufferMemoryInfo* src)
+{
+    sType = src->sType;
+    buffer = src->buffer;
+    memory = src->memory;
+    memoryOffset = src->memoryOffset;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkBindImageMemoryInfo::safe_VkBindImageMemoryInfo(const VkBindImageMemoryInfo* in_struct) :
+    sType(in_struct->sType),
+    image(in_struct->image),
+    memory(in_struct->memory),
+    memoryOffset(in_struct->memoryOffset)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkBindImageMemoryInfo::safe_VkBindImageMemoryInfo() :
+    pNext(nullptr)
+{}
+
+safe_VkBindImageMemoryInfo::safe_VkBindImageMemoryInfo(const safe_VkBindImageMemoryInfo& src)
+{
+    sType = src.sType;
+    image = src.image;
+    memory = src.memory;
+    memoryOffset = src.memoryOffset;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkBindImageMemoryInfo& safe_VkBindImageMemoryInfo::operator=(const safe_VkBindImageMemoryInfo& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    image = src.image;
+    memory = src.memory;
+    memoryOffset = src.memoryOffset;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkBindImageMemoryInfo::~safe_VkBindImageMemoryInfo()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkBindImageMemoryInfo::initialize(const VkBindImageMemoryInfo* in_struct)
+{
+    sType = in_struct->sType;
+    image = in_struct->image;
+    memory = in_struct->memory;
+    memoryOffset = in_struct->memoryOffset;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkBindImageMemoryInfo::initialize(const safe_VkBindImageMemoryInfo* src)
+{
+    sType = src->sType;
+    image = src->image;
+    memory = src->memory;
+    memoryOffset = src->memoryOffset;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkPhysicalDevice16BitStorageFeatures::safe_VkPhysicalDevice16BitStorageFeatures(const VkPhysicalDevice16BitStorageFeatures* in_struct) :
+    sType(in_struct->sType),
+    storageBuffer16BitAccess(in_struct->storageBuffer16BitAccess),
+    uniformAndStorageBuffer16BitAccess(in_struct->uniformAndStorageBuffer16BitAccess),
+    storagePushConstant16(in_struct->storagePushConstant16),
+    storageInputOutput16(in_struct->storageInputOutput16)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPhysicalDevice16BitStorageFeatures::safe_VkPhysicalDevice16BitStorageFeatures() :
+    pNext(nullptr)
+{}
+
+safe_VkPhysicalDevice16BitStorageFeatures::safe_VkPhysicalDevice16BitStorageFeatures(const safe_VkPhysicalDevice16BitStorageFeatures& src)
+{
+    sType = src.sType;
+    storageBuffer16BitAccess = src.storageBuffer16BitAccess;
+    uniformAndStorageBuffer16BitAccess = src.uniformAndStorageBuffer16BitAccess;
+    storagePushConstant16 = src.storagePushConstant16;
+    storageInputOutput16 = src.storageInputOutput16;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPhysicalDevice16BitStorageFeatures& safe_VkPhysicalDevice16BitStorageFeatures::operator=(const safe_VkPhysicalDevice16BitStorageFeatures& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    storageBuffer16BitAccess = src.storageBuffer16BitAccess;
+    uniformAndStorageBuffer16BitAccess = src.uniformAndStorageBuffer16BitAccess;
+    storagePushConstant16 = src.storagePushConstant16;
+    storageInputOutput16 = src.storageInputOutput16;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPhysicalDevice16BitStorageFeatures::~safe_VkPhysicalDevice16BitStorageFeatures()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPhysicalDevice16BitStorageFeatures::initialize(const VkPhysicalDevice16BitStorageFeatures* in_struct)
+{
+    sType = in_struct->sType;
+    storageBuffer16BitAccess = in_struct->storageBuffer16BitAccess;
+    uniformAndStorageBuffer16BitAccess = in_struct->uniformAndStorageBuffer16BitAccess;
+    storagePushConstant16 = in_struct->storagePushConstant16;
+    storageInputOutput16 = in_struct->storageInputOutput16;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPhysicalDevice16BitStorageFeatures::initialize(const safe_VkPhysicalDevice16BitStorageFeatures* src)
+{
+    sType = src->sType;
+    storageBuffer16BitAccess = src->storageBuffer16BitAccess;
+    uniformAndStorageBuffer16BitAccess = src->uniformAndStorageBuffer16BitAccess;
+    storagePushConstant16 = src->storagePushConstant16;
+    storageInputOutput16 = src->storageInputOutput16;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkMemoryDedicatedRequirements::safe_VkMemoryDedicatedRequirements(const VkMemoryDedicatedRequirements* in_struct) :
+    sType(in_struct->sType),
+    prefersDedicatedAllocation(in_struct->prefersDedicatedAllocation),
+    requiresDedicatedAllocation(in_struct->requiresDedicatedAllocation)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkMemoryDedicatedRequirements::safe_VkMemoryDedicatedRequirements() :
+    pNext(nullptr)
+{}
+
+safe_VkMemoryDedicatedRequirements::safe_VkMemoryDedicatedRequirements(const safe_VkMemoryDedicatedRequirements& src)
+{
+    sType = src.sType;
+    prefersDedicatedAllocation = src.prefersDedicatedAllocation;
+    requiresDedicatedAllocation = src.requiresDedicatedAllocation;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkMemoryDedicatedRequirements& safe_VkMemoryDedicatedRequirements::operator=(const safe_VkMemoryDedicatedRequirements& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    prefersDedicatedAllocation = src.prefersDedicatedAllocation;
+    requiresDedicatedAllocation = src.requiresDedicatedAllocation;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkMemoryDedicatedRequirements::~safe_VkMemoryDedicatedRequirements()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkMemoryDedicatedRequirements::initialize(const VkMemoryDedicatedRequirements* in_struct)
+{
+    sType = in_struct->sType;
+    prefersDedicatedAllocation = in_struct->prefersDedicatedAllocation;
+    requiresDedicatedAllocation = in_struct->requiresDedicatedAllocation;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkMemoryDedicatedRequirements::initialize(const safe_VkMemoryDedicatedRequirements* src)
+{
+    sType = src->sType;
+    prefersDedicatedAllocation = src->prefersDedicatedAllocation;
+    requiresDedicatedAllocation = src->requiresDedicatedAllocation;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkMemoryDedicatedAllocateInfo::safe_VkMemoryDedicatedAllocateInfo(const VkMemoryDedicatedAllocateInfo* in_struct) :
+    sType(in_struct->sType),
+    image(in_struct->image),
+    buffer(in_struct->buffer)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkMemoryDedicatedAllocateInfo::safe_VkMemoryDedicatedAllocateInfo() :
+    pNext(nullptr)
+{}
+
+safe_VkMemoryDedicatedAllocateInfo::safe_VkMemoryDedicatedAllocateInfo(const safe_VkMemoryDedicatedAllocateInfo& src)
+{
+    sType = src.sType;
+    image = src.image;
+    buffer = src.buffer;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkMemoryDedicatedAllocateInfo& safe_VkMemoryDedicatedAllocateInfo::operator=(const safe_VkMemoryDedicatedAllocateInfo& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    image = src.image;
+    buffer = src.buffer;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkMemoryDedicatedAllocateInfo::~safe_VkMemoryDedicatedAllocateInfo()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkMemoryDedicatedAllocateInfo::initialize(const VkMemoryDedicatedAllocateInfo* in_struct)
+{
+    sType = in_struct->sType;
+    image = in_struct->image;
+    buffer = in_struct->buffer;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkMemoryDedicatedAllocateInfo::initialize(const safe_VkMemoryDedicatedAllocateInfo* src)
+{
+    sType = src->sType;
+    image = src->image;
+    buffer = src->buffer;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkMemoryAllocateFlagsInfo::safe_VkMemoryAllocateFlagsInfo(const VkMemoryAllocateFlagsInfo* in_struct) :
+    sType(in_struct->sType),
+    flags(in_struct->flags),
+    deviceMask(in_struct->deviceMask)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkMemoryAllocateFlagsInfo::safe_VkMemoryAllocateFlagsInfo() :
+    pNext(nullptr)
+{}
+
+safe_VkMemoryAllocateFlagsInfo::safe_VkMemoryAllocateFlagsInfo(const safe_VkMemoryAllocateFlagsInfo& src)
+{
+    sType = src.sType;
+    flags = src.flags;
+    deviceMask = src.deviceMask;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkMemoryAllocateFlagsInfo& safe_VkMemoryAllocateFlagsInfo::operator=(const safe_VkMemoryAllocateFlagsInfo& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    flags = src.flags;
+    deviceMask = src.deviceMask;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkMemoryAllocateFlagsInfo::~safe_VkMemoryAllocateFlagsInfo()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkMemoryAllocateFlagsInfo::initialize(const VkMemoryAllocateFlagsInfo* in_struct)
+{
+    sType = in_struct->sType;
+    flags = in_struct->flags;
+    deviceMask = in_struct->deviceMask;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkMemoryAllocateFlagsInfo::initialize(const safe_VkMemoryAllocateFlagsInfo* src)
+{
+    sType = src->sType;
+    flags = src->flags;
+    deviceMask = src->deviceMask;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkDeviceGroupRenderPassBeginInfo::safe_VkDeviceGroupRenderPassBeginInfo(const VkDeviceGroupRenderPassBeginInfo* in_struct) :
+    sType(in_struct->sType),
+    deviceMask(in_struct->deviceMask),
+    deviceRenderAreaCount(in_struct->deviceRenderAreaCount),
+    pDeviceRenderAreas(nullptr)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (in_struct->pDeviceRenderAreas) {
+        pDeviceRenderAreas = new VkRect2D[in_struct->deviceRenderAreaCount];
+        memcpy ((void *)pDeviceRenderAreas, (void *)in_struct->pDeviceRenderAreas, sizeof(VkRect2D)*in_struct->deviceRenderAreaCount);
+    }
+}
+
+safe_VkDeviceGroupRenderPassBeginInfo::safe_VkDeviceGroupRenderPassBeginInfo() :
+    pNext(nullptr),
+    pDeviceRenderAreas(nullptr)
+{}
+
+safe_VkDeviceGroupRenderPassBeginInfo::safe_VkDeviceGroupRenderPassBeginInfo(const safe_VkDeviceGroupRenderPassBeginInfo& src)
+{
+    sType = src.sType;
+    deviceMask = src.deviceMask;
+    deviceRenderAreaCount = src.deviceRenderAreaCount;
+    pDeviceRenderAreas = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (src.pDeviceRenderAreas) {
+        pDeviceRenderAreas = new VkRect2D[src.deviceRenderAreaCount];
+        memcpy ((void *)pDeviceRenderAreas, (void *)src.pDeviceRenderAreas, sizeof(VkRect2D)*src.deviceRenderAreaCount);
+    }
+}
+
+safe_VkDeviceGroupRenderPassBeginInfo& safe_VkDeviceGroupRenderPassBeginInfo::operator=(const safe_VkDeviceGroupRenderPassBeginInfo& src)
+{
+    if (&src == this) return *this;
+
+    if (pDeviceRenderAreas)
+        delete[] pDeviceRenderAreas;
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    deviceMask = src.deviceMask;
+    deviceRenderAreaCount = src.deviceRenderAreaCount;
+    pDeviceRenderAreas = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (src.pDeviceRenderAreas) {
+        pDeviceRenderAreas = new VkRect2D[src.deviceRenderAreaCount];
+        memcpy ((void *)pDeviceRenderAreas, (void *)src.pDeviceRenderAreas, sizeof(VkRect2D)*src.deviceRenderAreaCount);
+    }
+
+    return *this;
+}
+
+safe_VkDeviceGroupRenderPassBeginInfo::~safe_VkDeviceGroupRenderPassBeginInfo()
+{
+    if (pDeviceRenderAreas)
+        delete[] pDeviceRenderAreas;
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkDeviceGroupRenderPassBeginInfo::initialize(const VkDeviceGroupRenderPassBeginInfo* in_struct)
+{
+    sType = in_struct->sType;
+    deviceMask = in_struct->deviceMask;
+    deviceRenderAreaCount = in_struct->deviceRenderAreaCount;
+    pDeviceRenderAreas = nullptr;
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (in_struct->pDeviceRenderAreas) {
+        pDeviceRenderAreas = new VkRect2D[in_struct->deviceRenderAreaCount];
+        memcpy ((void *)pDeviceRenderAreas, (void *)in_struct->pDeviceRenderAreas, sizeof(VkRect2D)*in_struct->deviceRenderAreaCount);
+    }
+}
+
+void safe_VkDeviceGroupRenderPassBeginInfo::initialize(const safe_VkDeviceGroupRenderPassBeginInfo* src)
+{
+    sType = src->sType;
+    deviceMask = src->deviceMask;
+    deviceRenderAreaCount = src->deviceRenderAreaCount;
+    pDeviceRenderAreas = nullptr;
+    pNext = SafePnextCopy(src->pNext);
+    if (src->pDeviceRenderAreas) {
+        pDeviceRenderAreas = new VkRect2D[src->deviceRenderAreaCount];
+        memcpy ((void *)pDeviceRenderAreas, (void *)src->pDeviceRenderAreas, sizeof(VkRect2D)*src->deviceRenderAreaCount);
+    }
+}
+
+safe_VkDeviceGroupCommandBufferBeginInfo::safe_VkDeviceGroupCommandBufferBeginInfo(const VkDeviceGroupCommandBufferBeginInfo* in_struct) :
+    sType(in_struct->sType),
+    deviceMask(in_struct->deviceMask)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkDeviceGroupCommandBufferBeginInfo::safe_VkDeviceGroupCommandBufferBeginInfo() :
+    pNext(nullptr)
+{}
+
+safe_VkDeviceGroupCommandBufferBeginInfo::safe_VkDeviceGroupCommandBufferBeginInfo(const safe_VkDeviceGroupCommandBufferBeginInfo& src)
+{
+    sType = src.sType;
+    deviceMask = src.deviceMask;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkDeviceGroupCommandBufferBeginInfo& safe_VkDeviceGroupCommandBufferBeginInfo::operator=(const safe_VkDeviceGroupCommandBufferBeginInfo& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    deviceMask = src.deviceMask;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkDeviceGroupCommandBufferBeginInfo::~safe_VkDeviceGroupCommandBufferBeginInfo()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkDeviceGroupCommandBufferBeginInfo::initialize(const VkDeviceGroupCommandBufferBeginInfo* in_struct)
+{
+    sType = in_struct->sType;
+    deviceMask = in_struct->deviceMask;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkDeviceGroupCommandBufferBeginInfo::initialize(const safe_VkDeviceGroupCommandBufferBeginInfo* src)
+{
+    sType = src->sType;
+    deviceMask = src->deviceMask;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkDeviceGroupSubmitInfo::safe_VkDeviceGroupSubmitInfo(const VkDeviceGroupSubmitInfo* in_struct) :
+    sType(in_struct->sType),
+    waitSemaphoreCount(in_struct->waitSemaphoreCount),
+    pWaitSemaphoreDeviceIndices(nullptr),
+    commandBufferCount(in_struct->commandBufferCount),
+    pCommandBufferDeviceMasks(nullptr),
+    signalSemaphoreCount(in_struct->signalSemaphoreCount),
+    pSignalSemaphoreDeviceIndices(nullptr)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (in_struct->pWaitSemaphoreDeviceIndices) {
+        pWaitSemaphoreDeviceIndices = new uint32_t[in_struct->waitSemaphoreCount];
+        memcpy ((void *)pWaitSemaphoreDeviceIndices, (void *)in_struct->pWaitSemaphoreDeviceIndices, sizeof(uint32_t)*in_struct->waitSemaphoreCount);
+    }
+    if (in_struct->pCommandBufferDeviceMasks) {
+        pCommandBufferDeviceMasks = new uint32_t[in_struct->commandBufferCount];
+        memcpy ((void *)pCommandBufferDeviceMasks, (void *)in_struct->pCommandBufferDeviceMasks, sizeof(uint32_t)*in_struct->commandBufferCount);
+    }
+    if (in_struct->pSignalSemaphoreDeviceIndices) {
+        pSignalSemaphoreDeviceIndices = new uint32_t[in_struct->signalSemaphoreCount];
+        memcpy ((void *)pSignalSemaphoreDeviceIndices, (void *)in_struct->pSignalSemaphoreDeviceIndices, sizeof(uint32_t)*in_struct->signalSemaphoreCount);
+    }
+}
+
+safe_VkDeviceGroupSubmitInfo::safe_VkDeviceGroupSubmitInfo() :
+    pNext(nullptr),
+    pWaitSemaphoreDeviceIndices(nullptr),
+    pCommandBufferDeviceMasks(nullptr),
+    pSignalSemaphoreDeviceIndices(nullptr)
+{}
+
+safe_VkDeviceGroupSubmitInfo::safe_VkDeviceGroupSubmitInfo(const safe_VkDeviceGroupSubmitInfo& src)
+{
+    sType = src.sType;
+    waitSemaphoreCount = src.waitSemaphoreCount;
+    pWaitSemaphoreDeviceIndices = nullptr;
+    commandBufferCount = src.commandBufferCount;
+    pCommandBufferDeviceMasks = nullptr;
+    signalSemaphoreCount = src.signalSemaphoreCount;
+    pSignalSemaphoreDeviceIndices = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (src.pWaitSemaphoreDeviceIndices) {
+        pWaitSemaphoreDeviceIndices = new uint32_t[src.waitSemaphoreCount];
+        memcpy ((void *)pWaitSemaphoreDeviceIndices, (void *)src.pWaitSemaphoreDeviceIndices, sizeof(uint32_t)*src.waitSemaphoreCount);
+    }
+    if (src.pCommandBufferDeviceMasks) {
+        pCommandBufferDeviceMasks = new uint32_t[src.commandBufferCount];
+        memcpy ((void *)pCommandBufferDeviceMasks, (void *)src.pCommandBufferDeviceMasks, sizeof(uint32_t)*src.commandBufferCount);
+    }
+    if (src.pSignalSemaphoreDeviceIndices) {
+        pSignalSemaphoreDeviceIndices = new uint32_t[src.signalSemaphoreCount];
+        memcpy ((void *)pSignalSemaphoreDeviceIndices, (void *)src.pSignalSemaphoreDeviceIndices, sizeof(uint32_t)*src.signalSemaphoreCount);
+    }
+}
+
+safe_VkDeviceGroupSubmitInfo& safe_VkDeviceGroupSubmitInfo::operator=(const safe_VkDeviceGroupSubmitInfo& src)
+{
+    if (&src == this) return *this;
+
+    if (pWaitSemaphoreDeviceIndices)
+        delete[] pWaitSemaphoreDeviceIndices;
+    if (pCommandBufferDeviceMasks)
+        delete[] pCommandBufferDeviceMasks;
+    if (pSignalSemaphoreDeviceIndices)
+        delete[] pSignalSemaphoreDeviceIndices;
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    waitSemaphoreCount = src.waitSemaphoreCount;
+    pWaitSemaphoreDeviceIndices = nullptr;
+    commandBufferCount = src.commandBufferCount;
+    pCommandBufferDeviceMasks = nullptr;
+    signalSemaphoreCount = src.signalSemaphoreCount;
+    pSignalSemaphoreDeviceIndices = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (src.pWaitSemaphoreDeviceIndices) {
+        pWaitSemaphoreDeviceIndices = new uint32_t[src.waitSemaphoreCount];
+        memcpy ((void *)pWaitSemaphoreDeviceIndices, (void *)src.pWaitSemaphoreDeviceIndices, sizeof(uint32_t)*src.waitSemaphoreCount);
+    }
+    if (src.pCommandBufferDeviceMasks) {
+        pCommandBufferDeviceMasks = new uint32_t[src.commandBufferCount];
+        memcpy ((void *)pCommandBufferDeviceMasks, (void *)src.pCommandBufferDeviceMasks, sizeof(uint32_t)*src.commandBufferCount);
+    }
+    if (src.pSignalSemaphoreDeviceIndices) {
+        pSignalSemaphoreDeviceIndices = new uint32_t[src.signalSemaphoreCount];
+        memcpy ((void *)pSignalSemaphoreDeviceIndices, (void *)src.pSignalSemaphoreDeviceIndices, sizeof(uint32_t)*src.signalSemaphoreCount);
+    }
+
+    return *this;
+}
+
+safe_VkDeviceGroupSubmitInfo::~safe_VkDeviceGroupSubmitInfo()
+{
+    if (pWaitSemaphoreDeviceIndices)
+        delete[] pWaitSemaphoreDeviceIndices;
+    if (pCommandBufferDeviceMasks)
+        delete[] pCommandBufferDeviceMasks;
+    if (pSignalSemaphoreDeviceIndices)
+        delete[] pSignalSemaphoreDeviceIndices;
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkDeviceGroupSubmitInfo::initialize(const VkDeviceGroupSubmitInfo* in_struct)
+{
+    sType = in_struct->sType;
+    waitSemaphoreCount = in_struct->waitSemaphoreCount;
+    pWaitSemaphoreDeviceIndices = nullptr;
+    commandBufferCount = in_struct->commandBufferCount;
+    pCommandBufferDeviceMasks = nullptr;
+    signalSemaphoreCount = in_struct->signalSemaphoreCount;
+    pSignalSemaphoreDeviceIndices = nullptr;
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (in_struct->pWaitSemaphoreDeviceIndices) {
+        pWaitSemaphoreDeviceIndices = new uint32_t[in_struct->waitSemaphoreCount];
+        memcpy ((void *)pWaitSemaphoreDeviceIndices, (void *)in_struct->pWaitSemaphoreDeviceIndices, sizeof(uint32_t)*in_struct->waitSemaphoreCount);
+    }
+    if (in_struct->pCommandBufferDeviceMasks) {
+        pCommandBufferDeviceMasks = new uint32_t[in_struct->commandBufferCount];
+        memcpy ((void *)pCommandBufferDeviceMasks, (void *)in_struct->pCommandBufferDeviceMasks, sizeof(uint32_t)*in_struct->commandBufferCount);
+    }
+    if (in_struct->pSignalSemaphoreDeviceIndices) {
+        pSignalSemaphoreDeviceIndices = new uint32_t[in_struct->signalSemaphoreCount];
+        memcpy ((void *)pSignalSemaphoreDeviceIndices, (void *)in_struct->pSignalSemaphoreDeviceIndices, sizeof(uint32_t)*in_struct->signalSemaphoreCount);
+    }
+}
+
+void safe_VkDeviceGroupSubmitInfo::initialize(const safe_VkDeviceGroupSubmitInfo* src)
+{
+    sType = src->sType;
+    waitSemaphoreCount = src->waitSemaphoreCount;
+    pWaitSemaphoreDeviceIndices = nullptr;
+    commandBufferCount = src->commandBufferCount;
+    pCommandBufferDeviceMasks = nullptr;
+    signalSemaphoreCount = src->signalSemaphoreCount;
+    pSignalSemaphoreDeviceIndices = nullptr;
+    pNext = SafePnextCopy(src->pNext);
+    if (src->pWaitSemaphoreDeviceIndices) {
+        pWaitSemaphoreDeviceIndices = new uint32_t[src->waitSemaphoreCount];
+        memcpy ((void *)pWaitSemaphoreDeviceIndices, (void *)src->pWaitSemaphoreDeviceIndices, sizeof(uint32_t)*src->waitSemaphoreCount);
+    }
+    if (src->pCommandBufferDeviceMasks) {
+        pCommandBufferDeviceMasks = new uint32_t[src->commandBufferCount];
+        memcpy ((void *)pCommandBufferDeviceMasks, (void *)src->pCommandBufferDeviceMasks, sizeof(uint32_t)*src->commandBufferCount);
+    }
+    if (src->pSignalSemaphoreDeviceIndices) {
+        pSignalSemaphoreDeviceIndices = new uint32_t[src->signalSemaphoreCount];
+        memcpy ((void *)pSignalSemaphoreDeviceIndices, (void *)src->pSignalSemaphoreDeviceIndices, sizeof(uint32_t)*src->signalSemaphoreCount);
+    }
+}
+
+safe_VkDeviceGroupBindSparseInfo::safe_VkDeviceGroupBindSparseInfo(const VkDeviceGroupBindSparseInfo* in_struct) :
+    sType(in_struct->sType),
+    resourceDeviceIndex(in_struct->resourceDeviceIndex),
+    memoryDeviceIndex(in_struct->memoryDeviceIndex)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkDeviceGroupBindSparseInfo::safe_VkDeviceGroupBindSparseInfo() :
+    pNext(nullptr)
+{}
+
+safe_VkDeviceGroupBindSparseInfo::safe_VkDeviceGroupBindSparseInfo(const safe_VkDeviceGroupBindSparseInfo& src)
+{
+    sType = src.sType;
+    resourceDeviceIndex = src.resourceDeviceIndex;
+    memoryDeviceIndex = src.memoryDeviceIndex;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkDeviceGroupBindSparseInfo& safe_VkDeviceGroupBindSparseInfo::operator=(const safe_VkDeviceGroupBindSparseInfo& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    resourceDeviceIndex = src.resourceDeviceIndex;
+    memoryDeviceIndex = src.memoryDeviceIndex;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkDeviceGroupBindSparseInfo::~safe_VkDeviceGroupBindSparseInfo()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkDeviceGroupBindSparseInfo::initialize(const VkDeviceGroupBindSparseInfo* in_struct)
+{
+    sType = in_struct->sType;
+    resourceDeviceIndex = in_struct->resourceDeviceIndex;
+    memoryDeviceIndex = in_struct->memoryDeviceIndex;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkDeviceGroupBindSparseInfo::initialize(const safe_VkDeviceGroupBindSparseInfo* src)
+{
+    sType = src->sType;
+    resourceDeviceIndex = src->resourceDeviceIndex;
+    memoryDeviceIndex = src->memoryDeviceIndex;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkBindBufferMemoryDeviceGroupInfo::safe_VkBindBufferMemoryDeviceGroupInfo(const VkBindBufferMemoryDeviceGroupInfo* in_struct) :
+    sType(in_struct->sType),
+    deviceIndexCount(in_struct->deviceIndexCount),
+    pDeviceIndices(nullptr)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (in_struct->pDeviceIndices) {
+        pDeviceIndices = new uint32_t[in_struct->deviceIndexCount];
+        memcpy ((void *)pDeviceIndices, (void *)in_struct->pDeviceIndices, sizeof(uint32_t)*in_struct->deviceIndexCount);
+    }
+}
+
+safe_VkBindBufferMemoryDeviceGroupInfo::safe_VkBindBufferMemoryDeviceGroupInfo() :
+    pNext(nullptr),
+    pDeviceIndices(nullptr)
+{}
+
+safe_VkBindBufferMemoryDeviceGroupInfo::safe_VkBindBufferMemoryDeviceGroupInfo(const safe_VkBindBufferMemoryDeviceGroupInfo& src)
+{
+    sType = src.sType;
+    deviceIndexCount = src.deviceIndexCount;
+    pDeviceIndices = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (src.pDeviceIndices) {
+        pDeviceIndices = new uint32_t[src.deviceIndexCount];
+        memcpy ((void *)pDeviceIndices, (void *)src.pDeviceIndices, sizeof(uint32_t)*src.deviceIndexCount);
+    }
+}
+
+safe_VkBindBufferMemoryDeviceGroupInfo& safe_VkBindBufferMemoryDeviceGroupInfo::operator=(const safe_VkBindBufferMemoryDeviceGroupInfo& src)
+{
+    if (&src == this) return *this;
+
+    if (pDeviceIndices)
+        delete[] pDeviceIndices;
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    deviceIndexCount = src.deviceIndexCount;
+    pDeviceIndices = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (src.pDeviceIndices) {
+        pDeviceIndices = new uint32_t[src.deviceIndexCount];
+        memcpy ((void *)pDeviceIndices, (void *)src.pDeviceIndices, sizeof(uint32_t)*src.deviceIndexCount);
+    }
+
+    return *this;
+}
+
+safe_VkBindBufferMemoryDeviceGroupInfo::~safe_VkBindBufferMemoryDeviceGroupInfo()
+{
+    if (pDeviceIndices)
+        delete[] pDeviceIndices;
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkBindBufferMemoryDeviceGroupInfo::initialize(const VkBindBufferMemoryDeviceGroupInfo* in_struct)
+{
+    sType = in_struct->sType;
+    deviceIndexCount = in_struct->deviceIndexCount;
+    pDeviceIndices = nullptr;
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (in_struct->pDeviceIndices) {
+        pDeviceIndices = new uint32_t[in_struct->deviceIndexCount];
+        memcpy ((void *)pDeviceIndices, (void *)in_struct->pDeviceIndices, sizeof(uint32_t)*in_struct->deviceIndexCount);
+    }
+}
+
+void safe_VkBindBufferMemoryDeviceGroupInfo::initialize(const safe_VkBindBufferMemoryDeviceGroupInfo* src)
+{
+    sType = src->sType;
+    deviceIndexCount = src->deviceIndexCount;
+    pDeviceIndices = nullptr;
+    pNext = SafePnextCopy(src->pNext);
+    if (src->pDeviceIndices) {
+        pDeviceIndices = new uint32_t[src->deviceIndexCount];
+        memcpy ((void *)pDeviceIndices, (void *)src->pDeviceIndices, sizeof(uint32_t)*src->deviceIndexCount);
+    }
+}
+
+safe_VkBindImageMemoryDeviceGroupInfo::safe_VkBindImageMemoryDeviceGroupInfo(const VkBindImageMemoryDeviceGroupInfo* in_struct) :
+    sType(in_struct->sType),
+    deviceIndexCount(in_struct->deviceIndexCount),
+    pDeviceIndices(nullptr),
+    splitInstanceBindRegionCount(in_struct->splitInstanceBindRegionCount),
+    pSplitInstanceBindRegions(nullptr)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (in_struct->pDeviceIndices) {
+        pDeviceIndices = new uint32_t[in_struct->deviceIndexCount];
+        memcpy ((void *)pDeviceIndices, (void *)in_struct->pDeviceIndices, sizeof(uint32_t)*in_struct->deviceIndexCount);
+    }
+    if (in_struct->pSplitInstanceBindRegions) {
+        pSplitInstanceBindRegions = new VkRect2D[in_struct->splitInstanceBindRegionCount];
+        memcpy ((void *)pSplitInstanceBindRegions, (void *)in_struct->pSplitInstanceBindRegions, sizeof(VkRect2D)*in_struct->splitInstanceBindRegionCount);
+    }
+}
+
+safe_VkBindImageMemoryDeviceGroupInfo::safe_VkBindImageMemoryDeviceGroupInfo() :
+    pNext(nullptr),
+    pDeviceIndices(nullptr),
+    pSplitInstanceBindRegions(nullptr)
+{}
+
+safe_VkBindImageMemoryDeviceGroupInfo::safe_VkBindImageMemoryDeviceGroupInfo(const safe_VkBindImageMemoryDeviceGroupInfo& src)
+{
+    sType = src.sType;
+    deviceIndexCount = src.deviceIndexCount;
+    pDeviceIndices = nullptr;
+    splitInstanceBindRegionCount = src.splitInstanceBindRegionCount;
+    pSplitInstanceBindRegions = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (src.pDeviceIndices) {
+        pDeviceIndices = new uint32_t[src.deviceIndexCount];
+        memcpy ((void *)pDeviceIndices, (void *)src.pDeviceIndices, sizeof(uint32_t)*src.deviceIndexCount);
+    }
+    if (src.pSplitInstanceBindRegions) {
+        pSplitInstanceBindRegions = new VkRect2D[src.splitInstanceBindRegionCount];
+        memcpy ((void *)pSplitInstanceBindRegions, (void *)src.pSplitInstanceBindRegions, sizeof(VkRect2D)*src.splitInstanceBindRegionCount);
+    }
+}
+
+safe_VkBindImageMemoryDeviceGroupInfo& safe_VkBindImageMemoryDeviceGroupInfo::operator=(const safe_VkBindImageMemoryDeviceGroupInfo& src)
+{
+    if (&src == this) return *this;
+
+    if (pDeviceIndices)
+        delete[] pDeviceIndices;
+    if (pSplitInstanceBindRegions)
+        delete[] pSplitInstanceBindRegions;
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    deviceIndexCount = src.deviceIndexCount;
+    pDeviceIndices = nullptr;
+    splitInstanceBindRegionCount = src.splitInstanceBindRegionCount;
+    pSplitInstanceBindRegions = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (src.pDeviceIndices) {
+        pDeviceIndices = new uint32_t[src.deviceIndexCount];
+        memcpy ((void *)pDeviceIndices, (void *)src.pDeviceIndices, sizeof(uint32_t)*src.deviceIndexCount);
+    }
+    if (src.pSplitInstanceBindRegions) {
+        pSplitInstanceBindRegions = new VkRect2D[src.splitInstanceBindRegionCount];
+        memcpy ((void *)pSplitInstanceBindRegions, (void *)src.pSplitInstanceBindRegions, sizeof(VkRect2D)*src.splitInstanceBindRegionCount);
+    }
+
+    return *this;
+}
+
+safe_VkBindImageMemoryDeviceGroupInfo::~safe_VkBindImageMemoryDeviceGroupInfo()
+{
+    if (pDeviceIndices)
+        delete[] pDeviceIndices;
+    if (pSplitInstanceBindRegions)
+        delete[] pSplitInstanceBindRegions;
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkBindImageMemoryDeviceGroupInfo::initialize(const VkBindImageMemoryDeviceGroupInfo* in_struct)
+{
+    sType = in_struct->sType;
+    deviceIndexCount = in_struct->deviceIndexCount;
+    pDeviceIndices = nullptr;
+    splitInstanceBindRegionCount = in_struct->splitInstanceBindRegionCount;
+    pSplitInstanceBindRegions = nullptr;
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (in_struct->pDeviceIndices) {
+        pDeviceIndices = new uint32_t[in_struct->deviceIndexCount];
+        memcpy ((void *)pDeviceIndices, (void *)in_struct->pDeviceIndices, sizeof(uint32_t)*in_struct->deviceIndexCount);
+    }
+    if (in_struct->pSplitInstanceBindRegions) {
+        pSplitInstanceBindRegions = new VkRect2D[in_struct->splitInstanceBindRegionCount];
+        memcpy ((void *)pSplitInstanceBindRegions, (void *)in_struct->pSplitInstanceBindRegions, sizeof(VkRect2D)*in_struct->splitInstanceBindRegionCount);
+    }
+}
+
+void safe_VkBindImageMemoryDeviceGroupInfo::initialize(const safe_VkBindImageMemoryDeviceGroupInfo* src)
+{
+    sType = src->sType;
+    deviceIndexCount = src->deviceIndexCount;
+    pDeviceIndices = nullptr;
+    splitInstanceBindRegionCount = src->splitInstanceBindRegionCount;
+    pSplitInstanceBindRegions = nullptr;
+    pNext = SafePnextCopy(src->pNext);
+    if (src->pDeviceIndices) {
+        pDeviceIndices = new uint32_t[src->deviceIndexCount];
+        memcpy ((void *)pDeviceIndices, (void *)src->pDeviceIndices, sizeof(uint32_t)*src->deviceIndexCount);
+    }
+    if (src->pSplitInstanceBindRegions) {
+        pSplitInstanceBindRegions = new VkRect2D[src->splitInstanceBindRegionCount];
+        memcpy ((void *)pSplitInstanceBindRegions, (void *)src->pSplitInstanceBindRegions, sizeof(VkRect2D)*src->splitInstanceBindRegionCount);
+    }
+}
+
+safe_VkPhysicalDeviceGroupProperties::safe_VkPhysicalDeviceGroupProperties(const VkPhysicalDeviceGroupProperties* in_struct) :
+    sType(in_struct->sType),
+    physicalDeviceCount(in_struct->physicalDeviceCount),
+    subsetAllocation(in_struct->subsetAllocation)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+    for (uint32_t i = 0; i < VK_MAX_DEVICE_GROUP_SIZE; ++i) {
+        physicalDevices[i] = in_struct->physicalDevices[i];
+    }
+}
+
+safe_VkPhysicalDeviceGroupProperties::safe_VkPhysicalDeviceGroupProperties() :
+    pNext(nullptr)
+{}
+
+safe_VkPhysicalDeviceGroupProperties::safe_VkPhysicalDeviceGroupProperties(const safe_VkPhysicalDeviceGroupProperties& src)
+{
+    sType = src.sType;
+    physicalDeviceCount = src.physicalDeviceCount;
+    subsetAllocation = src.subsetAllocation;
+    pNext = SafePnextCopy(src.pNext);
+    for (uint32_t i = 0; i < VK_MAX_DEVICE_GROUP_SIZE; ++i) {
+        physicalDevices[i] = src.physicalDevices[i];
+    }
+}
+
+safe_VkPhysicalDeviceGroupProperties& safe_VkPhysicalDeviceGroupProperties::operator=(const safe_VkPhysicalDeviceGroupProperties& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    physicalDeviceCount = src.physicalDeviceCount;
+    subsetAllocation = src.subsetAllocation;
+    pNext = SafePnextCopy(src.pNext);
+    for (uint32_t i = 0; i < VK_MAX_DEVICE_GROUP_SIZE; ++i) {
+        physicalDevices[i] = src.physicalDevices[i];
+    }
+
+    return *this;
+}
+
+safe_VkPhysicalDeviceGroupProperties::~safe_VkPhysicalDeviceGroupProperties()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPhysicalDeviceGroupProperties::initialize(const VkPhysicalDeviceGroupProperties* in_struct)
+{
+    sType = in_struct->sType;
+    physicalDeviceCount = in_struct->physicalDeviceCount;
+    subsetAllocation = in_struct->subsetAllocation;
+    pNext = SafePnextCopy(in_struct->pNext);
+    for (uint32_t i = 0; i < VK_MAX_DEVICE_GROUP_SIZE; ++i) {
+        physicalDevices[i] = in_struct->physicalDevices[i];
+    }
+}
+
+void safe_VkPhysicalDeviceGroupProperties::initialize(const safe_VkPhysicalDeviceGroupProperties* src)
+{
+    sType = src->sType;
+    physicalDeviceCount = src->physicalDeviceCount;
+    subsetAllocation = src->subsetAllocation;
+    pNext = SafePnextCopy(src->pNext);
+    for (uint32_t i = 0; i < VK_MAX_DEVICE_GROUP_SIZE; ++i) {
+        physicalDevices[i] = src->physicalDevices[i];
+    }
+}
+
+safe_VkDeviceGroupDeviceCreateInfo::safe_VkDeviceGroupDeviceCreateInfo(const VkDeviceGroupDeviceCreateInfo* in_struct) :
+    sType(in_struct->sType),
+    physicalDeviceCount(in_struct->physicalDeviceCount),
+    pPhysicalDevices(nullptr)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (in_struct->pPhysicalDevices) {
+        pPhysicalDevices = new VkPhysicalDevice[in_struct->physicalDeviceCount];
+        memcpy ((void *)pPhysicalDevices, (void *)in_struct->pPhysicalDevices, sizeof(VkPhysicalDevice)*in_struct->physicalDeviceCount);
+    }
+}
+
+safe_VkDeviceGroupDeviceCreateInfo::safe_VkDeviceGroupDeviceCreateInfo() :
+    pNext(nullptr),
+    pPhysicalDevices(nullptr)
+{}
+
+safe_VkDeviceGroupDeviceCreateInfo::safe_VkDeviceGroupDeviceCreateInfo(const safe_VkDeviceGroupDeviceCreateInfo& src)
+{
+    sType = src.sType;
+    physicalDeviceCount = src.physicalDeviceCount;
+    pPhysicalDevices = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (src.pPhysicalDevices) {
+        pPhysicalDevices = new VkPhysicalDevice[src.physicalDeviceCount];
+        memcpy ((void *)pPhysicalDevices, (void *)src.pPhysicalDevices, sizeof(VkPhysicalDevice)*src.physicalDeviceCount);
+    }
+}
+
+safe_VkDeviceGroupDeviceCreateInfo& safe_VkDeviceGroupDeviceCreateInfo::operator=(const safe_VkDeviceGroupDeviceCreateInfo& src)
+{
+    if (&src == this) return *this;
+
+    if (pPhysicalDevices)
+        delete[] pPhysicalDevices;
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    physicalDeviceCount = src.physicalDeviceCount;
+    pPhysicalDevices = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (src.pPhysicalDevices) {
+        pPhysicalDevices = new VkPhysicalDevice[src.physicalDeviceCount];
+        memcpy ((void *)pPhysicalDevices, (void *)src.pPhysicalDevices, sizeof(VkPhysicalDevice)*src.physicalDeviceCount);
+    }
+
+    return *this;
+}
+
+safe_VkDeviceGroupDeviceCreateInfo::~safe_VkDeviceGroupDeviceCreateInfo()
+{
+    if (pPhysicalDevices)
+        delete[] pPhysicalDevices;
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkDeviceGroupDeviceCreateInfo::initialize(const VkDeviceGroupDeviceCreateInfo* in_struct)
+{
+    sType = in_struct->sType;
+    physicalDeviceCount = in_struct->physicalDeviceCount;
+    pPhysicalDevices = nullptr;
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (in_struct->pPhysicalDevices) {
+        pPhysicalDevices = new VkPhysicalDevice[in_struct->physicalDeviceCount];
+        memcpy ((void *)pPhysicalDevices, (void *)in_struct->pPhysicalDevices, sizeof(VkPhysicalDevice)*in_struct->physicalDeviceCount);
+    }
+}
+
+void safe_VkDeviceGroupDeviceCreateInfo::initialize(const safe_VkDeviceGroupDeviceCreateInfo* src)
+{
+    sType = src->sType;
+    physicalDeviceCount = src->physicalDeviceCount;
+    pPhysicalDevices = nullptr;
+    pNext = SafePnextCopy(src->pNext);
+    if (src->pPhysicalDevices) {
+        pPhysicalDevices = new VkPhysicalDevice[src->physicalDeviceCount];
+        memcpy ((void *)pPhysicalDevices, (void *)src->pPhysicalDevices, sizeof(VkPhysicalDevice)*src->physicalDeviceCount);
+    }
+}
+
+safe_VkBufferMemoryRequirementsInfo2::safe_VkBufferMemoryRequirementsInfo2(const VkBufferMemoryRequirementsInfo2* in_struct) :
+    sType(in_struct->sType),
+    buffer(in_struct->buffer)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkBufferMemoryRequirementsInfo2::safe_VkBufferMemoryRequirementsInfo2() :
+    pNext(nullptr)
+{}
+
+safe_VkBufferMemoryRequirementsInfo2::safe_VkBufferMemoryRequirementsInfo2(const safe_VkBufferMemoryRequirementsInfo2& src)
+{
+    sType = src.sType;
+    buffer = src.buffer;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkBufferMemoryRequirementsInfo2& safe_VkBufferMemoryRequirementsInfo2::operator=(const safe_VkBufferMemoryRequirementsInfo2& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    buffer = src.buffer;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkBufferMemoryRequirementsInfo2::~safe_VkBufferMemoryRequirementsInfo2()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkBufferMemoryRequirementsInfo2::initialize(const VkBufferMemoryRequirementsInfo2* in_struct)
+{
+    sType = in_struct->sType;
+    buffer = in_struct->buffer;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkBufferMemoryRequirementsInfo2::initialize(const safe_VkBufferMemoryRequirementsInfo2* src)
+{
+    sType = src->sType;
+    buffer = src->buffer;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkImageMemoryRequirementsInfo2::safe_VkImageMemoryRequirementsInfo2(const VkImageMemoryRequirementsInfo2* in_struct) :
+    sType(in_struct->sType),
+    image(in_struct->image)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkImageMemoryRequirementsInfo2::safe_VkImageMemoryRequirementsInfo2() :
+    pNext(nullptr)
+{}
+
+safe_VkImageMemoryRequirementsInfo2::safe_VkImageMemoryRequirementsInfo2(const safe_VkImageMemoryRequirementsInfo2& src)
+{
+    sType = src.sType;
+    image = src.image;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkImageMemoryRequirementsInfo2& safe_VkImageMemoryRequirementsInfo2::operator=(const safe_VkImageMemoryRequirementsInfo2& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    image = src.image;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkImageMemoryRequirementsInfo2::~safe_VkImageMemoryRequirementsInfo2()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkImageMemoryRequirementsInfo2::initialize(const VkImageMemoryRequirementsInfo2* in_struct)
+{
+    sType = in_struct->sType;
+    image = in_struct->image;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkImageMemoryRequirementsInfo2::initialize(const safe_VkImageMemoryRequirementsInfo2* src)
+{
+    sType = src->sType;
+    image = src->image;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkImageSparseMemoryRequirementsInfo2::safe_VkImageSparseMemoryRequirementsInfo2(const VkImageSparseMemoryRequirementsInfo2* in_struct) :
+    sType(in_struct->sType),
+    image(in_struct->image)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkImageSparseMemoryRequirementsInfo2::safe_VkImageSparseMemoryRequirementsInfo2() :
+    pNext(nullptr)
+{}
+
+safe_VkImageSparseMemoryRequirementsInfo2::safe_VkImageSparseMemoryRequirementsInfo2(const safe_VkImageSparseMemoryRequirementsInfo2& src)
+{
+    sType = src.sType;
+    image = src.image;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkImageSparseMemoryRequirementsInfo2& safe_VkImageSparseMemoryRequirementsInfo2::operator=(const safe_VkImageSparseMemoryRequirementsInfo2& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    image = src.image;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkImageSparseMemoryRequirementsInfo2::~safe_VkImageSparseMemoryRequirementsInfo2()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkImageSparseMemoryRequirementsInfo2::initialize(const VkImageSparseMemoryRequirementsInfo2* in_struct)
+{
+    sType = in_struct->sType;
+    image = in_struct->image;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkImageSparseMemoryRequirementsInfo2::initialize(const safe_VkImageSparseMemoryRequirementsInfo2* src)
+{
+    sType = src->sType;
+    image = src->image;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkMemoryRequirements2::safe_VkMemoryRequirements2(const VkMemoryRequirements2* in_struct) :
+    sType(in_struct->sType),
+    memoryRequirements(in_struct->memoryRequirements)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkMemoryRequirements2::safe_VkMemoryRequirements2() :
+    pNext(nullptr)
+{}
+
+safe_VkMemoryRequirements2::safe_VkMemoryRequirements2(const safe_VkMemoryRequirements2& src)
+{
+    sType = src.sType;
+    memoryRequirements = src.memoryRequirements;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkMemoryRequirements2& safe_VkMemoryRequirements2::operator=(const safe_VkMemoryRequirements2& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    memoryRequirements = src.memoryRequirements;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkMemoryRequirements2::~safe_VkMemoryRequirements2()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkMemoryRequirements2::initialize(const VkMemoryRequirements2* in_struct)
+{
+    sType = in_struct->sType;
+    memoryRequirements = in_struct->memoryRequirements;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkMemoryRequirements2::initialize(const safe_VkMemoryRequirements2* src)
+{
+    sType = src->sType;
+    memoryRequirements = src->memoryRequirements;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkSparseImageMemoryRequirements2::safe_VkSparseImageMemoryRequirements2(const VkSparseImageMemoryRequirements2* in_struct) :
+    sType(in_struct->sType),
+    memoryRequirements(in_struct->memoryRequirements)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkSparseImageMemoryRequirements2::safe_VkSparseImageMemoryRequirements2() :
+    pNext(nullptr)
+{}
+
+safe_VkSparseImageMemoryRequirements2::safe_VkSparseImageMemoryRequirements2(const safe_VkSparseImageMemoryRequirements2& src)
+{
+    sType = src.sType;
+    memoryRequirements = src.memoryRequirements;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkSparseImageMemoryRequirements2& safe_VkSparseImageMemoryRequirements2::operator=(const safe_VkSparseImageMemoryRequirements2& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    memoryRequirements = src.memoryRequirements;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkSparseImageMemoryRequirements2::~safe_VkSparseImageMemoryRequirements2()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkSparseImageMemoryRequirements2::initialize(const VkSparseImageMemoryRequirements2* in_struct)
+{
+    sType = in_struct->sType;
+    memoryRequirements = in_struct->memoryRequirements;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkSparseImageMemoryRequirements2::initialize(const safe_VkSparseImageMemoryRequirements2* src)
+{
+    sType = src->sType;
+    memoryRequirements = src->memoryRequirements;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkPhysicalDeviceFeatures2::safe_VkPhysicalDeviceFeatures2(const VkPhysicalDeviceFeatures2* in_struct) :
+    sType(in_struct->sType),
+    features(in_struct->features)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPhysicalDeviceFeatures2::safe_VkPhysicalDeviceFeatures2() :
+    pNext(nullptr)
+{}
+
+safe_VkPhysicalDeviceFeatures2::safe_VkPhysicalDeviceFeatures2(const safe_VkPhysicalDeviceFeatures2& src)
+{
+    sType = src.sType;
+    features = src.features;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPhysicalDeviceFeatures2& safe_VkPhysicalDeviceFeatures2::operator=(const safe_VkPhysicalDeviceFeatures2& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    features = src.features;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPhysicalDeviceFeatures2::~safe_VkPhysicalDeviceFeatures2()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPhysicalDeviceFeatures2::initialize(const VkPhysicalDeviceFeatures2* in_struct)
+{
+    sType = in_struct->sType;
+    features = in_struct->features;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPhysicalDeviceFeatures2::initialize(const safe_VkPhysicalDeviceFeatures2* src)
+{
+    sType = src->sType;
+    features = src->features;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkPhysicalDeviceProperties2::safe_VkPhysicalDeviceProperties2(const VkPhysicalDeviceProperties2* in_struct) :
+    sType(in_struct->sType),
+    properties(in_struct->properties)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPhysicalDeviceProperties2::safe_VkPhysicalDeviceProperties2() :
+    pNext(nullptr)
+{}
+
+safe_VkPhysicalDeviceProperties2::safe_VkPhysicalDeviceProperties2(const safe_VkPhysicalDeviceProperties2& src)
+{
+    sType = src.sType;
+    properties = src.properties;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPhysicalDeviceProperties2& safe_VkPhysicalDeviceProperties2::operator=(const safe_VkPhysicalDeviceProperties2& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    properties = src.properties;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPhysicalDeviceProperties2::~safe_VkPhysicalDeviceProperties2()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPhysicalDeviceProperties2::initialize(const VkPhysicalDeviceProperties2* in_struct)
+{
+    sType = in_struct->sType;
+    properties = in_struct->properties;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPhysicalDeviceProperties2::initialize(const safe_VkPhysicalDeviceProperties2* src)
+{
+    sType = src->sType;
+    properties = src->properties;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkFormatProperties2::safe_VkFormatProperties2(const VkFormatProperties2* in_struct) :
+    sType(in_struct->sType),
+    formatProperties(in_struct->formatProperties)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkFormatProperties2::safe_VkFormatProperties2() :
+    pNext(nullptr)
+{}
+
+safe_VkFormatProperties2::safe_VkFormatProperties2(const safe_VkFormatProperties2& src)
+{
+    sType = src.sType;
+    formatProperties = src.formatProperties;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkFormatProperties2& safe_VkFormatProperties2::operator=(const safe_VkFormatProperties2& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    formatProperties = src.formatProperties;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkFormatProperties2::~safe_VkFormatProperties2()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkFormatProperties2::initialize(const VkFormatProperties2* in_struct)
+{
+    sType = in_struct->sType;
+    formatProperties = in_struct->formatProperties;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkFormatProperties2::initialize(const safe_VkFormatProperties2* src)
+{
+    sType = src->sType;
+    formatProperties = src->formatProperties;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkImageFormatProperties2::safe_VkImageFormatProperties2(const VkImageFormatProperties2* in_struct) :
+    sType(in_struct->sType),
+    imageFormatProperties(in_struct->imageFormatProperties)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkImageFormatProperties2::safe_VkImageFormatProperties2() :
+    pNext(nullptr)
+{}
+
+safe_VkImageFormatProperties2::safe_VkImageFormatProperties2(const safe_VkImageFormatProperties2& src)
+{
+    sType = src.sType;
+    imageFormatProperties = src.imageFormatProperties;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkImageFormatProperties2& safe_VkImageFormatProperties2::operator=(const safe_VkImageFormatProperties2& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    imageFormatProperties = src.imageFormatProperties;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkImageFormatProperties2::~safe_VkImageFormatProperties2()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkImageFormatProperties2::initialize(const VkImageFormatProperties2* in_struct)
+{
+    sType = in_struct->sType;
+    imageFormatProperties = in_struct->imageFormatProperties;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkImageFormatProperties2::initialize(const safe_VkImageFormatProperties2* src)
+{
+    sType = src->sType;
+    imageFormatProperties = src->imageFormatProperties;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkPhysicalDeviceImageFormatInfo2::safe_VkPhysicalDeviceImageFormatInfo2(const VkPhysicalDeviceImageFormatInfo2* in_struct) :
+    sType(in_struct->sType),
+    format(in_struct->format),
+    type(in_struct->type),
+    tiling(in_struct->tiling),
+    usage(in_struct->usage),
+    flags(in_struct->flags)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPhysicalDeviceImageFormatInfo2::safe_VkPhysicalDeviceImageFormatInfo2() :
+    pNext(nullptr)
+{}
+
+safe_VkPhysicalDeviceImageFormatInfo2::safe_VkPhysicalDeviceImageFormatInfo2(const safe_VkPhysicalDeviceImageFormatInfo2& src)
+{
+    sType = src.sType;
+    format = src.format;
+    type = src.type;
+    tiling = src.tiling;
+    usage = src.usage;
+    flags = src.flags;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPhysicalDeviceImageFormatInfo2& safe_VkPhysicalDeviceImageFormatInfo2::operator=(const safe_VkPhysicalDeviceImageFormatInfo2& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    format = src.format;
+    type = src.type;
+    tiling = src.tiling;
+    usage = src.usage;
+    flags = src.flags;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPhysicalDeviceImageFormatInfo2::~safe_VkPhysicalDeviceImageFormatInfo2()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPhysicalDeviceImageFormatInfo2::initialize(const VkPhysicalDeviceImageFormatInfo2* in_struct)
+{
+    sType = in_struct->sType;
+    format = in_struct->format;
+    type = in_struct->type;
+    tiling = in_struct->tiling;
+    usage = in_struct->usage;
+    flags = in_struct->flags;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPhysicalDeviceImageFormatInfo2::initialize(const safe_VkPhysicalDeviceImageFormatInfo2* src)
+{
+    sType = src->sType;
+    format = src->format;
+    type = src->type;
+    tiling = src->tiling;
+    usage = src->usage;
+    flags = src->flags;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkQueueFamilyProperties2::safe_VkQueueFamilyProperties2(const VkQueueFamilyProperties2* in_struct) :
+    sType(in_struct->sType),
+    queueFamilyProperties(in_struct->queueFamilyProperties)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkQueueFamilyProperties2::safe_VkQueueFamilyProperties2() :
+    pNext(nullptr)
+{}
+
+safe_VkQueueFamilyProperties2::safe_VkQueueFamilyProperties2(const safe_VkQueueFamilyProperties2& src)
+{
+    sType = src.sType;
+    queueFamilyProperties = src.queueFamilyProperties;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkQueueFamilyProperties2& safe_VkQueueFamilyProperties2::operator=(const safe_VkQueueFamilyProperties2& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    queueFamilyProperties = src.queueFamilyProperties;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkQueueFamilyProperties2::~safe_VkQueueFamilyProperties2()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkQueueFamilyProperties2::initialize(const VkQueueFamilyProperties2* in_struct)
+{
+    sType = in_struct->sType;
+    queueFamilyProperties = in_struct->queueFamilyProperties;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkQueueFamilyProperties2::initialize(const safe_VkQueueFamilyProperties2* src)
+{
+    sType = src->sType;
+    queueFamilyProperties = src->queueFamilyProperties;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkPhysicalDeviceMemoryProperties2::safe_VkPhysicalDeviceMemoryProperties2(const VkPhysicalDeviceMemoryProperties2* in_struct) :
+    sType(in_struct->sType),
+    memoryProperties(in_struct->memoryProperties)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPhysicalDeviceMemoryProperties2::safe_VkPhysicalDeviceMemoryProperties2() :
+    pNext(nullptr)
+{}
+
+safe_VkPhysicalDeviceMemoryProperties2::safe_VkPhysicalDeviceMemoryProperties2(const safe_VkPhysicalDeviceMemoryProperties2& src)
+{
+    sType = src.sType;
+    memoryProperties = src.memoryProperties;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPhysicalDeviceMemoryProperties2& safe_VkPhysicalDeviceMemoryProperties2::operator=(const safe_VkPhysicalDeviceMemoryProperties2& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    memoryProperties = src.memoryProperties;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPhysicalDeviceMemoryProperties2::~safe_VkPhysicalDeviceMemoryProperties2()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPhysicalDeviceMemoryProperties2::initialize(const VkPhysicalDeviceMemoryProperties2* in_struct)
+{
+    sType = in_struct->sType;
+    memoryProperties = in_struct->memoryProperties;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPhysicalDeviceMemoryProperties2::initialize(const safe_VkPhysicalDeviceMemoryProperties2* src)
+{
+    sType = src->sType;
+    memoryProperties = src->memoryProperties;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkSparseImageFormatProperties2::safe_VkSparseImageFormatProperties2(const VkSparseImageFormatProperties2* in_struct) :
+    sType(in_struct->sType),
+    properties(in_struct->properties)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkSparseImageFormatProperties2::safe_VkSparseImageFormatProperties2() :
+    pNext(nullptr)
+{}
+
+safe_VkSparseImageFormatProperties2::safe_VkSparseImageFormatProperties2(const safe_VkSparseImageFormatProperties2& src)
+{
+    sType = src.sType;
+    properties = src.properties;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkSparseImageFormatProperties2& safe_VkSparseImageFormatProperties2::operator=(const safe_VkSparseImageFormatProperties2& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    properties = src.properties;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkSparseImageFormatProperties2::~safe_VkSparseImageFormatProperties2()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkSparseImageFormatProperties2::initialize(const VkSparseImageFormatProperties2* in_struct)
+{
+    sType = in_struct->sType;
+    properties = in_struct->properties;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkSparseImageFormatProperties2::initialize(const safe_VkSparseImageFormatProperties2* src)
+{
+    sType = src->sType;
+    properties = src->properties;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkPhysicalDeviceSparseImageFormatInfo2::safe_VkPhysicalDeviceSparseImageFormatInfo2(const VkPhysicalDeviceSparseImageFormatInfo2* in_struct) :
+    sType(in_struct->sType),
+    format(in_struct->format),
+    type(in_struct->type),
+    samples(in_struct->samples),
+    usage(in_struct->usage),
+    tiling(in_struct->tiling)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPhysicalDeviceSparseImageFormatInfo2::safe_VkPhysicalDeviceSparseImageFormatInfo2() :
+    pNext(nullptr)
+{}
+
+safe_VkPhysicalDeviceSparseImageFormatInfo2::safe_VkPhysicalDeviceSparseImageFormatInfo2(const safe_VkPhysicalDeviceSparseImageFormatInfo2& src)
+{
+    sType = src.sType;
+    format = src.format;
+    type = src.type;
+    samples = src.samples;
+    usage = src.usage;
+    tiling = src.tiling;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPhysicalDeviceSparseImageFormatInfo2& safe_VkPhysicalDeviceSparseImageFormatInfo2::operator=(const safe_VkPhysicalDeviceSparseImageFormatInfo2& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    format = src.format;
+    type = src.type;
+    samples = src.samples;
+    usage = src.usage;
+    tiling = src.tiling;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPhysicalDeviceSparseImageFormatInfo2::~safe_VkPhysicalDeviceSparseImageFormatInfo2()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPhysicalDeviceSparseImageFormatInfo2::initialize(const VkPhysicalDeviceSparseImageFormatInfo2* in_struct)
+{
+    sType = in_struct->sType;
+    format = in_struct->format;
+    type = in_struct->type;
+    samples = in_struct->samples;
+    usage = in_struct->usage;
+    tiling = in_struct->tiling;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPhysicalDeviceSparseImageFormatInfo2::initialize(const safe_VkPhysicalDeviceSparseImageFormatInfo2* src)
+{
+    sType = src->sType;
+    format = src->format;
+    type = src->type;
+    samples = src->samples;
+    usage = src->usage;
+    tiling = src->tiling;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkPhysicalDevicePointClippingProperties::safe_VkPhysicalDevicePointClippingProperties(const VkPhysicalDevicePointClippingProperties* in_struct) :
+    sType(in_struct->sType),
+    pointClippingBehavior(in_struct->pointClippingBehavior)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPhysicalDevicePointClippingProperties::safe_VkPhysicalDevicePointClippingProperties() :
+    pNext(nullptr)
+{}
+
+safe_VkPhysicalDevicePointClippingProperties::safe_VkPhysicalDevicePointClippingProperties(const safe_VkPhysicalDevicePointClippingProperties& src)
+{
+    sType = src.sType;
+    pointClippingBehavior = src.pointClippingBehavior;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPhysicalDevicePointClippingProperties& safe_VkPhysicalDevicePointClippingProperties::operator=(const safe_VkPhysicalDevicePointClippingProperties& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    pointClippingBehavior = src.pointClippingBehavior;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPhysicalDevicePointClippingProperties::~safe_VkPhysicalDevicePointClippingProperties()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPhysicalDevicePointClippingProperties::initialize(const VkPhysicalDevicePointClippingProperties* in_struct)
+{
+    sType = in_struct->sType;
+    pointClippingBehavior = in_struct->pointClippingBehavior;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPhysicalDevicePointClippingProperties::initialize(const safe_VkPhysicalDevicePointClippingProperties* src)
+{
+    sType = src->sType;
+    pointClippingBehavior = src->pointClippingBehavior;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkRenderPassInputAttachmentAspectCreateInfo::safe_VkRenderPassInputAttachmentAspectCreateInfo(const VkRenderPassInputAttachmentAspectCreateInfo* in_struct) :
+    sType(in_struct->sType),
+    aspectReferenceCount(in_struct->aspectReferenceCount),
+    pAspectReferences(nullptr)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (in_struct->pAspectReferences) {
+        pAspectReferences = new VkInputAttachmentAspectReference[in_struct->aspectReferenceCount];
+        memcpy ((void *)pAspectReferences, (void *)in_struct->pAspectReferences, sizeof(VkInputAttachmentAspectReference)*in_struct->aspectReferenceCount);
+    }
+}
+
+safe_VkRenderPassInputAttachmentAspectCreateInfo::safe_VkRenderPassInputAttachmentAspectCreateInfo() :
+    pNext(nullptr),
+    pAspectReferences(nullptr)
+{}
+
+safe_VkRenderPassInputAttachmentAspectCreateInfo::safe_VkRenderPassInputAttachmentAspectCreateInfo(const safe_VkRenderPassInputAttachmentAspectCreateInfo& src)
+{
+    sType = src.sType;
+    aspectReferenceCount = src.aspectReferenceCount;
+    pAspectReferences = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (src.pAspectReferences) {
+        pAspectReferences = new VkInputAttachmentAspectReference[src.aspectReferenceCount];
+        memcpy ((void *)pAspectReferences, (void *)src.pAspectReferences, sizeof(VkInputAttachmentAspectReference)*src.aspectReferenceCount);
+    }
+}
+
+safe_VkRenderPassInputAttachmentAspectCreateInfo& safe_VkRenderPassInputAttachmentAspectCreateInfo::operator=(const safe_VkRenderPassInputAttachmentAspectCreateInfo& src)
+{
+    if (&src == this) return *this;
+
+    if (pAspectReferences)
+        delete[] pAspectReferences;
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    aspectReferenceCount = src.aspectReferenceCount;
+    pAspectReferences = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (src.pAspectReferences) {
+        pAspectReferences = new VkInputAttachmentAspectReference[src.aspectReferenceCount];
+        memcpy ((void *)pAspectReferences, (void *)src.pAspectReferences, sizeof(VkInputAttachmentAspectReference)*src.aspectReferenceCount);
+    }
+
+    return *this;
+}
+
+safe_VkRenderPassInputAttachmentAspectCreateInfo::~safe_VkRenderPassInputAttachmentAspectCreateInfo()
+{
+    if (pAspectReferences)
+        delete[] pAspectReferences;
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkRenderPassInputAttachmentAspectCreateInfo::initialize(const VkRenderPassInputAttachmentAspectCreateInfo* in_struct)
+{
+    sType = in_struct->sType;
+    aspectReferenceCount = in_struct->aspectReferenceCount;
+    pAspectReferences = nullptr;
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (in_struct->pAspectReferences) {
+        pAspectReferences = new VkInputAttachmentAspectReference[in_struct->aspectReferenceCount];
+        memcpy ((void *)pAspectReferences, (void *)in_struct->pAspectReferences, sizeof(VkInputAttachmentAspectReference)*in_struct->aspectReferenceCount);
+    }
+}
+
+void safe_VkRenderPassInputAttachmentAspectCreateInfo::initialize(const safe_VkRenderPassInputAttachmentAspectCreateInfo* src)
+{
+    sType = src->sType;
+    aspectReferenceCount = src->aspectReferenceCount;
+    pAspectReferences = nullptr;
+    pNext = SafePnextCopy(src->pNext);
+    if (src->pAspectReferences) {
+        pAspectReferences = new VkInputAttachmentAspectReference[src->aspectReferenceCount];
+        memcpy ((void *)pAspectReferences, (void *)src->pAspectReferences, sizeof(VkInputAttachmentAspectReference)*src->aspectReferenceCount);
+    }
+}
+
+safe_VkImageViewUsageCreateInfo::safe_VkImageViewUsageCreateInfo(const VkImageViewUsageCreateInfo* in_struct) :
+    sType(in_struct->sType),
+    usage(in_struct->usage)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkImageViewUsageCreateInfo::safe_VkImageViewUsageCreateInfo() :
+    pNext(nullptr)
+{}
+
+safe_VkImageViewUsageCreateInfo::safe_VkImageViewUsageCreateInfo(const safe_VkImageViewUsageCreateInfo& src)
+{
+    sType = src.sType;
+    usage = src.usage;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkImageViewUsageCreateInfo& safe_VkImageViewUsageCreateInfo::operator=(const safe_VkImageViewUsageCreateInfo& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    usage = src.usage;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkImageViewUsageCreateInfo::~safe_VkImageViewUsageCreateInfo()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkImageViewUsageCreateInfo::initialize(const VkImageViewUsageCreateInfo* in_struct)
+{
+    sType = in_struct->sType;
+    usage = in_struct->usage;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkImageViewUsageCreateInfo::initialize(const safe_VkImageViewUsageCreateInfo* src)
+{
+    sType = src->sType;
+    usage = src->usage;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkPipelineTessellationDomainOriginStateCreateInfo::safe_VkPipelineTessellationDomainOriginStateCreateInfo(const VkPipelineTessellationDomainOriginStateCreateInfo* in_struct) :
+    sType(in_struct->sType),
+    domainOrigin(in_struct->domainOrigin)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPipelineTessellationDomainOriginStateCreateInfo::safe_VkPipelineTessellationDomainOriginStateCreateInfo() :
+    pNext(nullptr)
+{}
+
+safe_VkPipelineTessellationDomainOriginStateCreateInfo::safe_VkPipelineTessellationDomainOriginStateCreateInfo(const safe_VkPipelineTessellationDomainOriginStateCreateInfo& src)
+{
+    sType = src.sType;
+    domainOrigin = src.domainOrigin;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPipelineTessellationDomainOriginStateCreateInfo& safe_VkPipelineTessellationDomainOriginStateCreateInfo::operator=(const safe_VkPipelineTessellationDomainOriginStateCreateInfo& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    domainOrigin = src.domainOrigin;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPipelineTessellationDomainOriginStateCreateInfo::~safe_VkPipelineTessellationDomainOriginStateCreateInfo()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPipelineTessellationDomainOriginStateCreateInfo::initialize(const VkPipelineTessellationDomainOriginStateCreateInfo* in_struct)
+{
+    sType = in_struct->sType;
+    domainOrigin = in_struct->domainOrigin;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPipelineTessellationDomainOriginStateCreateInfo::initialize(const safe_VkPipelineTessellationDomainOriginStateCreateInfo* src)
+{
+    sType = src->sType;
+    domainOrigin = src->domainOrigin;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkRenderPassMultiviewCreateInfo::safe_VkRenderPassMultiviewCreateInfo(const VkRenderPassMultiviewCreateInfo* in_struct) :
+    sType(in_struct->sType),
+    subpassCount(in_struct->subpassCount),
+    pViewMasks(nullptr),
+    dependencyCount(in_struct->dependencyCount),
+    pViewOffsets(nullptr),
+    correlationMaskCount(in_struct->correlationMaskCount),
+    pCorrelationMasks(nullptr)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (in_struct->pViewMasks) {
+        pViewMasks = new uint32_t[in_struct->subpassCount];
+        memcpy ((void *)pViewMasks, (void *)in_struct->pViewMasks, sizeof(uint32_t)*in_struct->subpassCount);
+    }
+    if (in_struct->pViewOffsets) {
+        pViewOffsets = new int32_t[in_struct->dependencyCount];
+        memcpy ((void *)pViewOffsets, (void *)in_struct->pViewOffsets, sizeof(int32_t)*in_struct->dependencyCount);
+    }
+    if (in_struct->pCorrelationMasks) {
+        pCorrelationMasks = new uint32_t[in_struct->correlationMaskCount];
+        memcpy ((void *)pCorrelationMasks, (void *)in_struct->pCorrelationMasks, sizeof(uint32_t)*in_struct->correlationMaskCount);
+    }
+}
+
+safe_VkRenderPassMultiviewCreateInfo::safe_VkRenderPassMultiviewCreateInfo() :
+    pNext(nullptr),
+    pViewMasks(nullptr),
+    pViewOffsets(nullptr),
+    pCorrelationMasks(nullptr)
+{}
+
+safe_VkRenderPassMultiviewCreateInfo::safe_VkRenderPassMultiviewCreateInfo(const safe_VkRenderPassMultiviewCreateInfo& src)
+{
+    sType = src.sType;
+    subpassCount = src.subpassCount;
+    pViewMasks = nullptr;
+    dependencyCount = src.dependencyCount;
+    pViewOffsets = nullptr;
+    correlationMaskCount = src.correlationMaskCount;
+    pCorrelationMasks = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (src.pViewMasks) {
+        pViewMasks = new uint32_t[src.subpassCount];
+        memcpy ((void *)pViewMasks, (void *)src.pViewMasks, sizeof(uint32_t)*src.subpassCount);
+    }
+    if (src.pViewOffsets) {
+        pViewOffsets = new int32_t[src.dependencyCount];
+        memcpy ((void *)pViewOffsets, (void *)src.pViewOffsets, sizeof(int32_t)*src.dependencyCount);
+    }
+    if (src.pCorrelationMasks) {
+        pCorrelationMasks = new uint32_t[src.correlationMaskCount];
+        memcpy ((void *)pCorrelationMasks, (void *)src.pCorrelationMasks, sizeof(uint32_t)*src.correlationMaskCount);
+    }
+}
+
+safe_VkRenderPassMultiviewCreateInfo& safe_VkRenderPassMultiviewCreateInfo::operator=(const safe_VkRenderPassMultiviewCreateInfo& src)
+{
+    if (&src == this) return *this;
+
+    if (pViewMasks)
+        delete[] pViewMasks;
+    if (pViewOffsets)
+        delete[] pViewOffsets;
+    if (pCorrelationMasks)
+        delete[] pCorrelationMasks;
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    subpassCount = src.subpassCount;
+    pViewMasks = nullptr;
+    dependencyCount = src.dependencyCount;
+    pViewOffsets = nullptr;
+    correlationMaskCount = src.correlationMaskCount;
+    pCorrelationMasks = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (src.pViewMasks) {
+        pViewMasks = new uint32_t[src.subpassCount];
+        memcpy ((void *)pViewMasks, (void *)src.pViewMasks, sizeof(uint32_t)*src.subpassCount);
+    }
+    if (src.pViewOffsets) {
+        pViewOffsets = new int32_t[src.dependencyCount];
+        memcpy ((void *)pViewOffsets, (void *)src.pViewOffsets, sizeof(int32_t)*src.dependencyCount);
+    }
+    if (src.pCorrelationMasks) {
+        pCorrelationMasks = new uint32_t[src.correlationMaskCount];
+        memcpy ((void *)pCorrelationMasks, (void *)src.pCorrelationMasks, sizeof(uint32_t)*src.correlationMaskCount);
+    }
+
+    return *this;
+}
+
+safe_VkRenderPassMultiviewCreateInfo::~safe_VkRenderPassMultiviewCreateInfo()
+{
+    if (pViewMasks)
+        delete[] pViewMasks;
+    if (pViewOffsets)
+        delete[] pViewOffsets;
+    if (pCorrelationMasks)
+        delete[] pCorrelationMasks;
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkRenderPassMultiviewCreateInfo::initialize(const VkRenderPassMultiviewCreateInfo* in_struct)
+{
+    sType = in_struct->sType;
+    subpassCount = in_struct->subpassCount;
+    pViewMasks = nullptr;
+    dependencyCount = in_struct->dependencyCount;
+    pViewOffsets = nullptr;
+    correlationMaskCount = in_struct->correlationMaskCount;
+    pCorrelationMasks = nullptr;
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (in_struct->pViewMasks) {
+        pViewMasks = new uint32_t[in_struct->subpassCount];
+        memcpy ((void *)pViewMasks, (void *)in_struct->pViewMasks, sizeof(uint32_t)*in_struct->subpassCount);
+    }
+    if (in_struct->pViewOffsets) {
+        pViewOffsets = new int32_t[in_struct->dependencyCount];
+        memcpy ((void *)pViewOffsets, (void *)in_struct->pViewOffsets, sizeof(int32_t)*in_struct->dependencyCount);
+    }
+    if (in_struct->pCorrelationMasks) {
+        pCorrelationMasks = new uint32_t[in_struct->correlationMaskCount];
+        memcpy ((void *)pCorrelationMasks, (void *)in_struct->pCorrelationMasks, sizeof(uint32_t)*in_struct->correlationMaskCount);
+    }
+}
+
+void safe_VkRenderPassMultiviewCreateInfo::initialize(const safe_VkRenderPassMultiviewCreateInfo* src)
+{
+    sType = src->sType;
+    subpassCount = src->subpassCount;
+    pViewMasks = nullptr;
+    dependencyCount = src->dependencyCount;
+    pViewOffsets = nullptr;
+    correlationMaskCount = src->correlationMaskCount;
+    pCorrelationMasks = nullptr;
+    pNext = SafePnextCopy(src->pNext);
+    if (src->pViewMasks) {
+        pViewMasks = new uint32_t[src->subpassCount];
+        memcpy ((void *)pViewMasks, (void *)src->pViewMasks, sizeof(uint32_t)*src->subpassCount);
+    }
+    if (src->pViewOffsets) {
+        pViewOffsets = new int32_t[src->dependencyCount];
+        memcpy ((void *)pViewOffsets, (void *)src->pViewOffsets, sizeof(int32_t)*src->dependencyCount);
+    }
+    if (src->pCorrelationMasks) {
+        pCorrelationMasks = new uint32_t[src->correlationMaskCount];
+        memcpy ((void *)pCorrelationMasks, (void *)src->pCorrelationMasks, sizeof(uint32_t)*src->correlationMaskCount);
+    }
+}
+
+safe_VkPhysicalDeviceMultiviewFeatures::safe_VkPhysicalDeviceMultiviewFeatures(const VkPhysicalDeviceMultiviewFeatures* in_struct) :
+    sType(in_struct->sType),
+    multiview(in_struct->multiview),
+    multiviewGeometryShader(in_struct->multiviewGeometryShader),
+    multiviewTessellationShader(in_struct->multiviewTessellationShader)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPhysicalDeviceMultiviewFeatures::safe_VkPhysicalDeviceMultiviewFeatures() :
+    pNext(nullptr)
+{}
+
+safe_VkPhysicalDeviceMultiviewFeatures::safe_VkPhysicalDeviceMultiviewFeatures(const safe_VkPhysicalDeviceMultiviewFeatures& src)
+{
+    sType = src.sType;
+    multiview = src.multiview;
+    multiviewGeometryShader = src.multiviewGeometryShader;
+    multiviewTessellationShader = src.multiviewTessellationShader;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPhysicalDeviceMultiviewFeatures& safe_VkPhysicalDeviceMultiviewFeatures::operator=(const safe_VkPhysicalDeviceMultiviewFeatures& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    multiview = src.multiview;
+    multiviewGeometryShader = src.multiviewGeometryShader;
+    multiviewTessellationShader = src.multiviewTessellationShader;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPhysicalDeviceMultiviewFeatures::~safe_VkPhysicalDeviceMultiviewFeatures()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPhysicalDeviceMultiviewFeatures::initialize(const VkPhysicalDeviceMultiviewFeatures* in_struct)
+{
+    sType = in_struct->sType;
+    multiview = in_struct->multiview;
+    multiviewGeometryShader = in_struct->multiviewGeometryShader;
+    multiviewTessellationShader = in_struct->multiviewTessellationShader;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPhysicalDeviceMultiviewFeatures::initialize(const safe_VkPhysicalDeviceMultiviewFeatures* src)
+{
+    sType = src->sType;
+    multiview = src->multiview;
+    multiviewGeometryShader = src->multiviewGeometryShader;
+    multiviewTessellationShader = src->multiviewTessellationShader;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkPhysicalDeviceMultiviewProperties::safe_VkPhysicalDeviceMultiviewProperties(const VkPhysicalDeviceMultiviewProperties* in_struct) :
+    sType(in_struct->sType),
+    maxMultiviewViewCount(in_struct->maxMultiviewViewCount),
+    maxMultiviewInstanceIndex(in_struct->maxMultiviewInstanceIndex)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPhysicalDeviceMultiviewProperties::safe_VkPhysicalDeviceMultiviewProperties() :
+    pNext(nullptr)
+{}
+
+safe_VkPhysicalDeviceMultiviewProperties::safe_VkPhysicalDeviceMultiviewProperties(const safe_VkPhysicalDeviceMultiviewProperties& src)
+{
+    sType = src.sType;
+    maxMultiviewViewCount = src.maxMultiviewViewCount;
+    maxMultiviewInstanceIndex = src.maxMultiviewInstanceIndex;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPhysicalDeviceMultiviewProperties& safe_VkPhysicalDeviceMultiviewProperties::operator=(const safe_VkPhysicalDeviceMultiviewProperties& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    maxMultiviewViewCount = src.maxMultiviewViewCount;
+    maxMultiviewInstanceIndex = src.maxMultiviewInstanceIndex;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPhysicalDeviceMultiviewProperties::~safe_VkPhysicalDeviceMultiviewProperties()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPhysicalDeviceMultiviewProperties::initialize(const VkPhysicalDeviceMultiviewProperties* in_struct)
+{
+    sType = in_struct->sType;
+    maxMultiviewViewCount = in_struct->maxMultiviewViewCount;
+    maxMultiviewInstanceIndex = in_struct->maxMultiviewInstanceIndex;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPhysicalDeviceMultiviewProperties::initialize(const safe_VkPhysicalDeviceMultiviewProperties* src)
+{
+    sType = src->sType;
+    maxMultiviewViewCount = src->maxMultiviewViewCount;
+    maxMultiviewInstanceIndex = src->maxMultiviewInstanceIndex;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkPhysicalDeviceVariablePointersFeatures::safe_VkPhysicalDeviceVariablePointersFeatures(const VkPhysicalDeviceVariablePointersFeatures* in_struct) :
+    sType(in_struct->sType),
+    variablePointersStorageBuffer(in_struct->variablePointersStorageBuffer),
+    variablePointers(in_struct->variablePointers)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPhysicalDeviceVariablePointersFeatures::safe_VkPhysicalDeviceVariablePointersFeatures() :
+    pNext(nullptr)
+{}
+
+safe_VkPhysicalDeviceVariablePointersFeatures::safe_VkPhysicalDeviceVariablePointersFeatures(const safe_VkPhysicalDeviceVariablePointersFeatures& src)
+{
+    sType = src.sType;
+    variablePointersStorageBuffer = src.variablePointersStorageBuffer;
+    variablePointers = src.variablePointers;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPhysicalDeviceVariablePointersFeatures& safe_VkPhysicalDeviceVariablePointersFeatures::operator=(const safe_VkPhysicalDeviceVariablePointersFeatures& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    variablePointersStorageBuffer = src.variablePointersStorageBuffer;
+    variablePointers = src.variablePointers;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPhysicalDeviceVariablePointersFeatures::~safe_VkPhysicalDeviceVariablePointersFeatures()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPhysicalDeviceVariablePointersFeatures::initialize(const VkPhysicalDeviceVariablePointersFeatures* in_struct)
+{
+    sType = in_struct->sType;
+    variablePointersStorageBuffer = in_struct->variablePointersStorageBuffer;
+    variablePointers = in_struct->variablePointers;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPhysicalDeviceVariablePointersFeatures::initialize(const safe_VkPhysicalDeviceVariablePointersFeatures* src)
+{
+    sType = src->sType;
+    variablePointersStorageBuffer = src->variablePointersStorageBuffer;
+    variablePointers = src->variablePointers;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkPhysicalDeviceProtectedMemoryFeatures::safe_VkPhysicalDeviceProtectedMemoryFeatures(const VkPhysicalDeviceProtectedMemoryFeatures* in_struct) :
+    sType(in_struct->sType),
+    protectedMemory(in_struct->protectedMemory)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPhysicalDeviceProtectedMemoryFeatures::safe_VkPhysicalDeviceProtectedMemoryFeatures() :
+    pNext(nullptr)
+{}
+
+safe_VkPhysicalDeviceProtectedMemoryFeatures::safe_VkPhysicalDeviceProtectedMemoryFeatures(const safe_VkPhysicalDeviceProtectedMemoryFeatures& src)
+{
+    sType = src.sType;
+    protectedMemory = src.protectedMemory;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPhysicalDeviceProtectedMemoryFeatures& safe_VkPhysicalDeviceProtectedMemoryFeatures::operator=(const safe_VkPhysicalDeviceProtectedMemoryFeatures& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    protectedMemory = src.protectedMemory;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPhysicalDeviceProtectedMemoryFeatures::~safe_VkPhysicalDeviceProtectedMemoryFeatures()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPhysicalDeviceProtectedMemoryFeatures::initialize(const VkPhysicalDeviceProtectedMemoryFeatures* in_struct)
+{
+    sType = in_struct->sType;
+    protectedMemory = in_struct->protectedMemory;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPhysicalDeviceProtectedMemoryFeatures::initialize(const safe_VkPhysicalDeviceProtectedMemoryFeatures* src)
+{
+    sType = src->sType;
+    protectedMemory = src->protectedMemory;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkPhysicalDeviceProtectedMemoryProperties::safe_VkPhysicalDeviceProtectedMemoryProperties(const VkPhysicalDeviceProtectedMemoryProperties* in_struct) :
+    sType(in_struct->sType),
+    protectedNoFault(in_struct->protectedNoFault)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPhysicalDeviceProtectedMemoryProperties::safe_VkPhysicalDeviceProtectedMemoryProperties() :
+    pNext(nullptr)
+{}
+
+safe_VkPhysicalDeviceProtectedMemoryProperties::safe_VkPhysicalDeviceProtectedMemoryProperties(const safe_VkPhysicalDeviceProtectedMemoryProperties& src)
+{
+    sType = src.sType;
+    protectedNoFault = src.protectedNoFault;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPhysicalDeviceProtectedMemoryProperties& safe_VkPhysicalDeviceProtectedMemoryProperties::operator=(const safe_VkPhysicalDeviceProtectedMemoryProperties& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    protectedNoFault = src.protectedNoFault;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPhysicalDeviceProtectedMemoryProperties::~safe_VkPhysicalDeviceProtectedMemoryProperties()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPhysicalDeviceProtectedMemoryProperties::initialize(const VkPhysicalDeviceProtectedMemoryProperties* in_struct)
+{
+    sType = in_struct->sType;
+    protectedNoFault = in_struct->protectedNoFault;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPhysicalDeviceProtectedMemoryProperties::initialize(const safe_VkPhysicalDeviceProtectedMemoryProperties* src)
+{
+    sType = src->sType;
+    protectedNoFault = src->protectedNoFault;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkDeviceQueueInfo2::safe_VkDeviceQueueInfo2(const VkDeviceQueueInfo2* in_struct) :
+    sType(in_struct->sType),
+    flags(in_struct->flags),
+    queueFamilyIndex(in_struct->queueFamilyIndex),
+    queueIndex(in_struct->queueIndex)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkDeviceQueueInfo2::safe_VkDeviceQueueInfo2() :
+    pNext(nullptr)
+{}
+
+safe_VkDeviceQueueInfo2::safe_VkDeviceQueueInfo2(const safe_VkDeviceQueueInfo2& src)
+{
+    sType = src.sType;
+    flags = src.flags;
+    queueFamilyIndex = src.queueFamilyIndex;
+    queueIndex = src.queueIndex;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkDeviceQueueInfo2& safe_VkDeviceQueueInfo2::operator=(const safe_VkDeviceQueueInfo2& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    flags = src.flags;
+    queueFamilyIndex = src.queueFamilyIndex;
+    queueIndex = src.queueIndex;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkDeviceQueueInfo2::~safe_VkDeviceQueueInfo2()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkDeviceQueueInfo2::initialize(const VkDeviceQueueInfo2* in_struct)
+{
+    sType = in_struct->sType;
+    flags = in_struct->flags;
+    queueFamilyIndex = in_struct->queueFamilyIndex;
+    queueIndex = in_struct->queueIndex;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkDeviceQueueInfo2::initialize(const safe_VkDeviceQueueInfo2* src)
+{
+    sType = src->sType;
+    flags = src->flags;
+    queueFamilyIndex = src->queueFamilyIndex;
+    queueIndex = src->queueIndex;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkProtectedSubmitInfo::safe_VkProtectedSubmitInfo(const VkProtectedSubmitInfo* in_struct) :
+    sType(in_struct->sType),
+    protectedSubmit(in_struct->protectedSubmit)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkProtectedSubmitInfo::safe_VkProtectedSubmitInfo() :
+    pNext(nullptr)
+{}
+
+safe_VkProtectedSubmitInfo::safe_VkProtectedSubmitInfo(const safe_VkProtectedSubmitInfo& src)
+{
+    sType = src.sType;
+    protectedSubmit = src.protectedSubmit;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkProtectedSubmitInfo& safe_VkProtectedSubmitInfo::operator=(const safe_VkProtectedSubmitInfo& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    protectedSubmit = src.protectedSubmit;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkProtectedSubmitInfo::~safe_VkProtectedSubmitInfo()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkProtectedSubmitInfo::initialize(const VkProtectedSubmitInfo* in_struct)
+{
+    sType = in_struct->sType;
+    protectedSubmit = in_struct->protectedSubmit;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkProtectedSubmitInfo::initialize(const safe_VkProtectedSubmitInfo* src)
+{
+    sType = src->sType;
+    protectedSubmit = src->protectedSubmit;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkSamplerYcbcrConversionCreateInfo::safe_VkSamplerYcbcrConversionCreateInfo(const VkSamplerYcbcrConversionCreateInfo* in_struct) :
+    sType(in_struct->sType),
+    format(in_struct->format),
+    ycbcrModel(in_struct->ycbcrModel),
+    ycbcrRange(in_struct->ycbcrRange),
+    components(in_struct->components),
+    xChromaOffset(in_struct->xChromaOffset),
+    yChromaOffset(in_struct->yChromaOffset),
+    chromaFilter(in_struct->chromaFilter),
+    forceExplicitReconstruction(in_struct->forceExplicitReconstruction)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkSamplerYcbcrConversionCreateInfo::safe_VkSamplerYcbcrConversionCreateInfo() :
+    pNext(nullptr)
+{}
+
+safe_VkSamplerYcbcrConversionCreateInfo::safe_VkSamplerYcbcrConversionCreateInfo(const safe_VkSamplerYcbcrConversionCreateInfo& src)
+{
+    sType = src.sType;
+    format = src.format;
+    ycbcrModel = src.ycbcrModel;
+    ycbcrRange = src.ycbcrRange;
+    components = src.components;
+    xChromaOffset = src.xChromaOffset;
+    yChromaOffset = src.yChromaOffset;
+    chromaFilter = src.chromaFilter;
+    forceExplicitReconstruction = src.forceExplicitReconstruction;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkSamplerYcbcrConversionCreateInfo& safe_VkSamplerYcbcrConversionCreateInfo::operator=(const safe_VkSamplerYcbcrConversionCreateInfo& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    format = src.format;
+    ycbcrModel = src.ycbcrModel;
+    ycbcrRange = src.ycbcrRange;
+    components = src.components;
+    xChromaOffset = src.xChromaOffset;
+    yChromaOffset = src.yChromaOffset;
+    chromaFilter = src.chromaFilter;
+    forceExplicitReconstruction = src.forceExplicitReconstruction;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkSamplerYcbcrConversionCreateInfo::~safe_VkSamplerYcbcrConversionCreateInfo()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkSamplerYcbcrConversionCreateInfo::initialize(const VkSamplerYcbcrConversionCreateInfo* in_struct)
+{
+    sType = in_struct->sType;
+    format = in_struct->format;
+    ycbcrModel = in_struct->ycbcrModel;
+    ycbcrRange = in_struct->ycbcrRange;
+    components = in_struct->components;
+    xChromaOffset = in_struct->xChromaOffset;
+    yChromaOffset = in_struct->yChromaOffset;
+    chromaFilter = in_struct->chromaFilter;
+    forceExplicitReconstruction = in_struct->forceExplicitReconstruction;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkSamplerYcbcrConversionCreateInfo::initialize(const safe_VkSamplerYcbcrConversionCreateInfo* src)
+{
+    sType = src->sType;
+    format = src->format;
+    ycbcrModel = src->ycbcrModel;
+    ycbcrRange = src->ycbcrRange;
+    components = src->components;
+    xChromaOffset = src->xChromaOffset;
+    yChromaOffset = src->yChromaOffset;
+    chromaFilter = src->chromaFilter;
+    forceExplicitReconstruction = src->forceExplicitReconstruction;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkSamplerYcbcrConversionInfo::safe_VkSamplerYcbcrConversionInfo(const VkSamplerYcbcrConversionInfo* in_struct) :
+    sType(in_struct->sType),
+    conversion(in_struct->conversion)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkSamplerYcbcrConversionInfo::safe_VkSamplerYcbcrConversionInfo() :
+    pNext(nullptr)
+{}
+
+safe_VkSamplerYcbcrConversionInfo::safe_VkSamplerYcbcrConversionInfo(const safe_VkSamplerYcbcrConversionInfo& src)
+{
+    sType = src.sType;
+    conversion = src.conversion;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkSamplerYcbcrConversionInfo& safe_VkSamplerYcbcrConversionInfo::operator=(const safe_VkSamplerYcbcrConversionInfo& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    conversion = src.conversion;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkSamplerYcbcrConversionInfo::~safe_VkSamplerYcbcrConversionInfo()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkSamplerYcbcrConversionInfo::initialize(const VkSamplerYcbcrConversionInfo* in_struct)
+{
+    sType = in_struct->sType;
+    conversion = in_struct->conversion;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkSamplerYcbcrConversionInfo::initialize(const safe_VkSamplerYcbcrConversionInfo* src)
+{
+    sType = src->sType;
+    conversion = src->conversion;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkBindImagePlaneMemoryInfo::safe_VkBindImagePlaneMemoryInfo(const VkBindImagePlaneMemoryInfo* in_struct) :
+    sType(in_struct->sType),
+    planeAspect(in_struct->planeAspect)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkBindImagePlaneMemoryInfo::safe_VkBindImagePlaneMemoryInfo() :
+    pNext(nullptr)
+{}
+
+safe_VkBindImagePlaneMemoryInfo::safe_VkBindImagePlaneMemoryInfo(const safe_VkBindImagePlaneMemoryInfo& src)
+{
+    sType = src.sType;
+    planeAspect = src.planeAspect;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkBindImagePlaneMemoryInfo& safe_VkBindImagePlaneMemoryInfo::operator=(const safe_VkBindImagePlaneMemoryInfo& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    planeAspect = src.planeAspect;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkBindImagePlaneMemoryInfo::~safe_VkBindImagePlaneMemoryInfo()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkBindImagePlaneMemoryInfo::initialize(const VkBindImagePlaneMemoryInfo* in_struct)
+{
+    sType = in_struct->sType;
+    planeAspect = in_struct->planeAspect;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkBindImagePlaneMemoryInfo::initialize(const safe_VkBindImagePlaneMemoryInfo* src)
+{
+    sType = src->sType;
+    planeAspect = src->planeAspect;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkImagePlaneMemoryRequirementsInfo::safe_VkImagePlaneMemoryRequirementsInfo(const VkImagePlaneMemoryRequirementsInfo* in_struct) :
+    sType(in_struct->sType),
+    planeAspect(in_struct->planeAspect)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkImagePlaneMemoryRequirementsInfo::safe_VkImagePlaneMemoryRequirementsInfo() :
+    pNext(nullptr)
+{}
+
+safe_VkImagePlaneMemoryRequirementsInfo::safe_VkImagePlaneMemoryRequirementsInfo(const safe_VkImagePlaneMemoryRequirementsInfo& src)
+{
+    sType = src.sType;
+    planeAspect = src.planeAspect;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkImagePlaneMemoryRequirementsInfo& safe_VkImagePlaneMemoryRequirementsInfo::operator=(const safe_VkImagePlaneMemoryRequirementsInfo& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    planeAspect = src.planeAspect;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkImagePlaneMemoryRequirementsInfo::~safe_VkImagePlaneMemoryRequirementsInfo()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkImagePlaneMemoryRequirementsInfo::initialize(const VkImagePlaneMemoryRequirementsInfo* in_struct)
+{
+    sType = in_struct->sType;
+    planeAspect = in_struct->planeAspect;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkImagePlaneMemoryRequirementsInfo::initialize(const safe_VkImagePlaneMemoryRequirementsInfo* src)
+{
+    sType = src->sType;
+    planeAspect = src->planeAspect;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkPhysicalDeviceSamplerYcbcrConversionFeatures::safe_VkPhysicalDeviceSamplerYcbcrConversionFeatures(const VkPhysicalDeviceSamplerYcbcrConversionFeatures* in_struct) :
+    sType(in_struct->sType),
+    samplerYcbcrConversion(in_struct->samplerYcbcrConversion)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPhysicalDeviceSamplerYcbcrConversionFeatures::safe_VkPhysicalDeviceSamplerYcbcrConversionFeatures() :
+    pNext(nullptr)
+{}
+
+safe_VkPhysicalDeviceSamplerYcbcrConversionFeatures::safe_VkPhysicalDeviceSamplerYcbcrConversionFeatures(const safe_VkPhysicalDeviceSamplerYcbcrConversionFeatures& src)
+{
+    sType = src.sType;
+    samplerYcbcrConversion = src.samplerYcbcrConversion;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPhysicalDeviceSamplerYcbcrConversionFeatures& safe_VkPhysicalDeviceSamplerYcbcrConversionFeatures::operator=(const safe_VkPhysicalDeviceSamplerYcbcrConversionFeatures& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    samplerYcbcrConversion = src.samplerYcbcrConversion;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPhysicalDeviceSamplerYcbcrConversionFeatures::~safe_VkPhysicalDeviceSamplerYcbcrConversionFeatures()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPhysicalDeviceSamplerYcbcrConversionFeatures::initialize(const VkPhysicalDeviceSamplerYcbcrConversionFeatures* in_struct)
+{
+    sType = in_struct->sType;
+    samplerYcbcrConversion = in_struct->samplerYcbcrConversion;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPhysicalDeviceSamplerYcbcrConversionFeatures::initialize(const safe_VkPhysicalDeviceSamplerYcbcrConversionFeatures* src)
+{
+    sType = src->sType;
+    samplerYcbcrConversion = src->samplerYcbcrConversion;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkSamplerYcbcrConversionImageFormatProperties::safe_VkSamplerYcbcrConversionImageFormatProperties(const VkSamplerYcbcrConversionImageFormatProperties* in_struct) :
+    sType(in_struct->sType),
+    combinedImageSamplerDescriptorCount(in_struct->combinedImageSamplerDescriptorCount)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkSamplerYcbcrConversionImageFormatProperties::safe_VkSamplerYcbcrConversionImageFormatProperties() :
+    pNext(nullptr)
+{}
+
+safe_VkSamplerYcbcrConversionImageFormatProperties::safe_VkSamplerYcbcrConversionImageFormatProperties(const safe_VkSamplerYcbcrConversionImageFormatProperties& src)
+{
+    sType = src.sType;
+    combinedImageSamplerDescriptorCount = src.combinedImageSamplerDescriptorCount;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkSamplerYcbcrConversionImageFormatProperties& safe_VkSamplerYcbcrConversionImageFormatProperties::operator=(const safe_VkSamplerYcbcrConversionImageFormatProperties& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    combinedImageSamplerDescriptorCount = src.combinedImageSamplerDescriptorCount;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkSamplerYcbcrConversionImageFormatProperties::~safe_VkSamplerYcbcrConversionImageFormatProperties()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkSamplerYcbcrConversionImageFormatProperties::initialize(const VkSamplerYcbcrConversionImageFormatProperties* in_struct)
+{
+    sType = in_struct->sType;
+    combinedImageSamplerDescriptorCount = in_struct->combinedImageSamplerDescriptorCount;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkSamplerYcbcrConversionImageFormatProperties::initialize(const safe_VkSamplerYcbcrConversionImageFormatProperties* src)
+{
+    sType = src->sType;
+    combinedImageSamplerDescriptorCount = src->combinedImageSamplerDescriptorCount;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkDescriptorUpdateTemplateCreateInfo::safe_VkDescriptorUpdateTemplateCreateInfo(const VkDescriptorUpdateTemplateCreateInfo* in_struct) :
+    sType(in_struct->sType),
+    flags(in_struct->flags),
+    descriptorUpdateEntryCount(in_struct->descriptorUpdateEntryCount),
+    pDescriptorUpdateEntries(nullptr),
+    templateType(in_struct->templateType),
+    descriptorSetLayout(in_struct->descriptorSetLayout),
+    pipelineBindPoint(in_struct->pipelineBindPoint),
+    pipelineLayout(in_struct->pipelineLayout),
+    set(in_struct->set)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (in_struct->pDescriptorUpdateEntries) {
+        pDescriptorUpdateEntries = new VkDescriptorUpdateTemplateEntry[in_struct->descriptorUpdateEntryCount];
+        memcpy ((void *)pDescriptorUpdateEntries, (void *)in_struct->pDescriptorUpdateEntries, sizeof(VkDescriptorUpdateTemplateEntry)*in_struct->descriptorUpdateEntryCount);
+    }
+}
+
+safe_VkDescriptorUpdateTemplateCreateInfo::safe_VkDescriptorUpdateTemplateCreateInfo() :
+    pNext(nullptr),
+    pDescriptorUpdateEntries(nullptr)
+{}
+
+safe_VkDescriptorUpdateTemplateCreateInfo::safe_VkDescriptorUpdateTemplateCreateInfo(const safe_VkDescriptorUpdateTemplateCreateInfo& src)
+{
+    sType = src.sType;
+    flags = src.flags;
+    descriptorUpdateEntryCount = src.descriptorUpdateEntryCount;
+    pDescriptorUpdateEntries = nullptr;
+    templateType = src.templateType;
+    descriptorSetLayout = src.descriptorSetLayout;
+    pipelineBindPoint = src.pipelineBindPoint;
+    pipelineLayout = src.pipelineLayout;
+    set = src.set;
+    pNext = SafePnextCopy(src.pNext);
+    if (src.pDescriptorUpdateEntries) {
+        pDescriptorUpdateEntries = new VkDescriptorUpdateTemplateEntry[src.descriptorUpdateEntryCount];
+        memcpy ((void *)pDescriptorUpdateEntries, (void *)src.pDescriptorUpdateEntries, sizeof(VkDescriptorUpdateTemplateEntry)*src.descriptorUpdateEntryCount);
+    }
+}
+
+safe_VkDescriptorUpdateTemplateCreateInfo& safe_VkDescriptorUpdateTemplateCreateInfo::operator=(const safe_VkDescriptorUpdateTemplateCreateInfo& src)
+{
+    if (&src == this) return *this;
+
+    if (pDescriptorUpdateEntries)
+        delete[] pDescriptorUpdateEntries;
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    flags = src.flags;
+    descriptorUpdateEntryCount = src.descriptorUpdateEntryCount;
+    pDescriptorUpdateEntries = nullptr;
+    templateType = src.templateType;
+    descriptorSetLayout = src.descriptorSetLayout;
+    pipelineBindPoint = src.pipelineBindPoint;
+    pipelineLayout = src.pipelineLayout;
+    set = src.set;
+    pNext = SafePnextCopy(src.pNext);
+    if (src.pDescriptorUpdateEntries) {
+        pDescriptorUpdateEntries = new VkDescriptorUpdateTemplateEntry[src.descriptorUpdateEntryCount];
+        memcpy ((void *)pDescriptorUpdateEntries, (void *)src.pDescriptorUpdateEntries, sizeof(VkDescriptorUpdateTemplateEntry)*src.descriptorUpdateEntryCount);
+    }
+
+    return *this;
+}
+
+safe_VkDescriptorUpdateTemplateCreateInfo::~safe_VkDescriptorUpdateTemplateCreateInfo()
+{
+    if (pDescriptorUpdateEntries)
+        delete[] pDescriptorUpdateEntries;
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkDescriptorUpdateTemplateCreateInfo::initialize(const VkDescriptorUpdateTemplateCreateInfo* in_struct)
+{
+    sType = in_struct->sType;
+    flags = in_struct->flags;
+    descriptorUpdateEntryCount = in_struct->descriptorUpdateEntryCount;
+    pDescriptorUpdateEntries = nullptr;
+    templateType = in_struct->templateType;
+    descriptorSetLayout = in_struct->descriptorSetLayout;
+    pipelineBindPoint = in_struct->pipelineBindPoint;
+    pipelineLayout = in_struct->pipelineLayout;
+    set = in_struct->set;
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (in_struct->pDescriptorUpdateEntries) {
+        pDescriptorUpdateEntries = new VkDescriptorUpdateTemplateEntry[in_struct->descriptorUpdateEntryCount];
+        memcpy ((void *)pDescriptorUpdateEntries, (void *)in_struct->pDescriptorUpdateEntries, sizeof(VkDescriptorUpdateTemplateEntry)*in_struct->descriptorUpdateEntryCount);
+    }
+}
+
+void safe_VkDescriptorUpdateTemplateCreateInfo::initialize(const safe_VkDescriptorUpdateTemplateCreateInfo* src)
+{
+    sType = src->sType;
+    flags = src->flags;
+    descriptorUpdateEntryCount = src->descriptorUpdateEntryCount;
+    pDescriptorUpdateEntries = nullptr;
+    templateType = src->templateType;
+    descriptorSetLayout = src->descriptorSetLayout;
+    pipelineBindPoint = src->pipelineBindPoint;
+    pipelineLayout = src->pipelineLayout;
+    set = src->set;
+    pNext = SafePnextCopy(src->pNext);
+    if (src->pDescriptorUpdateEntries) {
+        pDescriptorUpdateEntries = new VkDescriptorUpdateTemplateEntry[src->descriptorUpdateEntryCount];
+        memcpy ((void *)pDescriptorUpdateEntries, (void *)src->pDescriptorUpdateEntries, sizeof(VkDescriptorUpdateTemplateEntry)*src->descriptorUpdateEntryCount);
+    }
+}
+
+safe_VkPhysicalDeviceExternalImageFormatInfo::safe_VkPhysicalDeviceExternalImageFormatInfo(const VkPhysicalDeviceExternalImageFormatInfo* in_struct) :
+    sType(in_struct->sType),
+    handleType(in_struct->handleType)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPhysicalDeviceExternalImageFormatInfo::safe_VkPhysicalDeviceExternalImageFormatInfo() :
+    pNext(nullptr)
+{}
+
+safe_VkPhysicalDeviceExternalImageFormatInfo::safe_VkPhysicalDeviceExternalImageFormatInfo(const safe_VkPhysicalDeviceExternalImageFormatInfo& src)
+{
+    sType = src.sType;
+    handleType = src.handleType;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPhysicalDeviceExternalImageFormatInfo& safe_VkPhysicalDeviceExternalImageFormatInfo::operator=(const safe_VkPhysicalDeviceExternalImageFormatInfo& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    handleType = src.handleType;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPhysicalDeviceExternalImageFormatInfo::~safe_VkPhysicalDeviceExternalImageFormatInfo()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPhysicalDeviceExternalImageFormatInfo::initialize(const VkPhysicalDeviceExternalImageFormatInfo* in_struct)
+{
+    sType = in_struct->sType;
+    handleType = in_struct->handleType;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPhysicalDeviceExternalImageFormatInfo::initialize(const safe_VkPhysicalDeviceExternalImageFormatInfo* src)
+{
+    sType = src->sType;
+    handleType = src->handleType;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkExternalImageFormatProperties::safe_VkExternalImageFormatProperties(const VkExternalImageFormatProperties* in_struct) :
+    sType(in_struct->sType),
+    externalMemoryProperties(in_struct->externalMemoryProperties)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkExternalImageFormatProperties::safe_VkExternalImageFormatProperties() :
+    pNext(nullptr)
+{}
+
+safe_VkExternalImageFormatProperties::safe_VkExternalImageFormatProperties(const safe_VkExternalImageFormatProperties& src)
+{
+    sType = src.sType;
+    externalMemoryProperties = src.externalMemoryProperties;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkExternalImageFormatProperties& safe_VkExternalImageFormatProperties::operator=(const safe_VkExternalImageFormatProperties& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    externalMemoryProperties = src.externalMemoryProperties;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkExternalImageFormatProperties::~safe_VkExternalImageFormatProperties()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkExternalImageFormatProperties::initialize(const VkExternalImageFormatProperties* in_struct)
+{
+    sType = in_struct->sType;
+    externalMemoryProperties = in_struct->externalMemoryProperties;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkExternalImageFormatProperties::initialize(const safe_VkExternalImageFormatProperties* src)
+{
+    sType = src->sType;
+    externalMemoryProperties = src->externalMemoryProperties;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkPhysicalDeviceExternalBufferInfo::safe_VkPhysicalDeviceExternalBufferInfo(const VkPhysicalDeviceExternalBufferInfo* in_struct) :
+    sType(in_struct->sType),
+    flags(in_struct->flags),
+    usage(in_struct->usage),
+    handleType(in_struct->handleType)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPhysicalDeviceExternalBufferInfo::safe_VkPhysicalDeviceExternalBufferInfo() :
+    pNext(nullptr)
+{}
+
+safe_VkPhysicalDeviceExternalBufferInfo::safe_VkPhysicalDeviceExternalBufferInfo(const safe_VkPhysicalDeviceExternalBufferInfo& src)
+{
+    sType = src.sType;
+    flags = src.flags;
+    usage = src.usage;
+    handleType = src.handleType;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPhysicalDeviceExternalBufferInfo& safe_VkPhysicalDeviceExternalBufferInfo::operator=(const safe_VkPhysicalDeviceExternalBufferInfo& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    flags = src.flags;
+    usage = src.usage;
+    handleType = src.handleType;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPhysicalDeviceExternalBufferInfo::~safe_VkPhysicalDeviceExternalBufferInfo()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPhysicalDeviceExternalBufferInfo::initialize(const VkPhysicalDeviceExternalBufferInfo* in_struct)
+{
+    sType = in_struct->sType;
+    flags = in_struct->flags;
+    usage = in_struct->usage;
+    handleType = in_struct->handleType;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPhysicalDeviceExternalBufferInfo::initialize(const safe_VkPhysicalDeviceExternalBufferInfo* src)
+{
+    sType = src->sType;
+    flags = src->flags;
+    usage = src->usage;
+    handleType = src->handleType;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkExternalBufferProperties::safe_VkExternalBufferProperties(const VkExternalBufferProperties* in_struct) :
+    sType(in_struct->sType),
+    externalMemoryProperties(in_struct->externalMemoryProperties)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkExternalBufferProperties::safe_VkExternalBufferProperties() :
+    pNext(nullptr)
+{}
+
+safe_VkExternalBufferProperties::safe_VkExternalBufferProperties(const safe_VkExternalBufferProperties& src)
+{
+    sType = src.sType;
+    externalMemoryProperties = src.externalMemoryProperties;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkExternalBufferProperties& safe_VkExternalBufferProperties::operator=(const safe_VkExternalBufferProperties& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    externalMemoryProperties = src.externalMemoryProperties;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkExternalBufferProperties::~safe_VkExternalBufferProperties()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkExternalBufferProperties::initialize(const VkExternalBufferProperties* in_struct)
+{
+    sType = in_struct->sType;
+    externalMemoryProperties = in_struct->externalMemoryProperties;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkExternalBufferProperties::initialize(const safe_VkExternalBufferProperties* src)
+{
+    sType = src->sType;
+    externalMemoryProperties = src->externalMemoryProperties;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkPhysicalDeviceIDProperties::safe_VkPhysicalDeviceIDProperties(const VkPhysicalDeviceIDProperties* in_struct) :
+    sType(in_struct->sType),
+    deviceNodeMask(in_struct->deviceNodeMask),
+    deviceLUIDValid(in_struct->deviceLUIDValid)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+    for (uint32_t i = 0; i < VK_UUID_SIZE; ++i) {
+        deviceUUID[i] = in_struct->deviceUUID[i];
+    }
+    for (uint32_t i = 0; i < VK_UUID_SIZE; ++i) {
+        driverUUID[i] = in_struct->driverUUID[i];
+    }
+    for (uint32_t i = 0; i < VK_LUID_SIZE; ++i) {
+        deviceLUID[i] = in_struct->deviceLUID[i];
+    }
+}
+
+safe_VkPhysicalDeviceIDProperties::safe_VkPhysicalDeviceIDProperties() :
+    pNext(nullptr)
+{}
+
+safe_VkPhysicalDeviceIDProperties::safe_VkPhysicalDeviceIDProperties(const safe_VkPhysicalDeviceIDProperties& src)
+{
+    sType = src.sType;
+    deviceNodeMask = src.deviceNodeMask;
+    deviceLUIDValid = src.deviceLUIDValid;
+    pNext = SafePnextCopy(src.pNext);
+    for (uint32_t i = 0; i < VK_UUID_SIZE; ++i) {
+        deviceUUID[i] = src.deviceUUID[i];
+    }
+    for (uint32_t i = 0; i < VK_UUID_SIZE; ++i) {
+        driverUUID[i] = src.driverUUID[i];
+    }
+    for (uint32_t i = 0; i < VK_LUID_SIZE; ++i) {
+        deviceLUID[i] = src.deviceLUID[i];
+    }
+}
+
+safe_VkPhysicalDeviceIDProperties& safe_VkPhysicalDeviceIDProperties::operator=(const safe_VkPhysicalDeviceIDProperties& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    deviceNodeMask = src.deviceNodeMask;
+    deviceLUIDValid = src.deviceLUIDValid;
+    pNext = SafePnextCopy(src.pNext);
+    for (uint32_t i = 0; i < VK_UUID_SIZE; ++i) {
+        deviceUUID[i] = src.deviceUUID[i];
+    }
+    for (uint32_t i = 0; i < VK_UUID_SIZE; ++i) {
+        driverUUID[i] = src.driverUUID[i];
+    }
+    for (uint32_t i = 0; i < VK_LUID_SIZE; ++i) {
+        deviceLUID[i] = src.deviceLUID[i];
+    }
+
+    return *this;
+}
+
+safe_VkPhysicalDeviceIDProperties::~safe_VkPhysicalDeviceIDProperties()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPhysicalDeviceIDProperties::initialize(const VkPhysicalDeviceIDProperties* in_struct)
+{
+    sType = in_struct->sType;
+    deviceNodeMask = in_struct->deviceNodeMask;
+    deviceLUIDValid = in_struct->deviceLUIDValid;
+    pNext = SafePnextCopy(in_struct->pNext);
+    for (uint32_t i = 0; i < VK_UUID_SIZE; ++i) {
+        deviceUUID[i] = in_struct->deviceUUID[i];
+    }
+    for (uint32_t i = 0; i < VK_UUID_SIZE; ++i) {
+        driverUUID[i] = in_struct->driverUUID[i];
+    }
+    for (uint32_t i = 0; i < VK_LUID_SIZE; ++i) {
+        deviceLUID[i] = in_struct->deviceLUID[i];
+    }
+}
+
+void safe_VkPhysicalDeviceIDProperties::initialize(const safe_VkPhysicalDeviceIDProperties* src)
+{
+    sType = src->sType;
+    deviceNodeMask = src->deviceNodeMask;
+    deviceLUIDValid = src->deviceLUIDValid;
+    pNext = SafePnextCopy(src->pNext);
+    for (uint32_t i = 0; i < VK_UUID_SIZE; ++i) {
+        deviceUUID[i] = src->deviceUUID[i];
+    }
+    for (uint32_t i = 0; i < VK_UUID_SIZE; ++i) {
+        driverUUID[i] = src->driverUUID[i];
+    }
+    for (uint32_t i = 0; i < VK_LUID_SIZE; ++i) {
+        deviceLUID[i] = src->deviceLUID[i];
+    }
+}
+
+safe_VkExternalMemoryImageCreateInfo::safe_VkExternalMemoryImageCreateInfo(const VkExternalMemoryImageCreateInfo* in_struct) :
+    sType(in_struct->sType),
+    handleTypes(in_struct->handleTypes)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkExternalMemoryImageCreateInfo::safe_VkExternalMemoryImageCreateInfo() :
+    pNext(nullptr)
+{}
+
+safe_VkExternalMemoryImageCreateInfo::safe_VkExternalMemoryImageCreateInfo(const safe_VkExternalMemoryImageCreateInfo& src)
+{
+    sType = src.sType;
+    handleTypes = src.handleTypes;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkExternalMemoryImageCreateInfo& safe_VkExternalMemoryImageCreateInfo::operator=(const safe_VkExternalMemoryImageCreateInfo& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    handleTypes = src.handleTypes;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkExternalMemoryImageCreateInfo::~safe_VkExternalMemoryImageCreateInfo()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkExternalMemoryImageCreateInfo::initialize(const VkExternalMemoryImageCreateInfo* in_struct)
+{
+    sType = in_struct->sType;
+    handleTypes = in_struct->handleTypes;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkExternalMemoryImageCreateInfo::initialize(const safe_VkExternalMemoryImageCreateInfo* src)
+{
+    sType = src->sType;
+    handleTypes = src->handleTypes;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkExternalMemoryBufferCreateInfo::safe_VkExternalMemoryBufferCreateInfo(const VkExternalMemoryBufferCreateInfo* in_struct) :
+    sType(in_struct->sType),
+    handleTypes(in_struct->handleTypes)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkExternalMemoryBufferCreateInfo::safe_VkExternalMemoryBufferCreateInfo() :
+    pNext(nullptr)
+{}
+
+safe_VkExternalMemoryBufferCreateInfo::safe_VkExternalMemoryBufferCreateInfo(const safe_VkExternalMemoryBufferCreateInfo& src)
+{
+    sType = src.sType;
+    handleTypes = src.handleTypes;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkExternalMemoryBufferCreateInfo& safe_VkExternalMemoryBufferCreateInfo::operator=(const safe_VkExternalMemoryBufferCreateInfo& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    handleTypes = src.handleTypes;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkExternalMemoryBufferCreateInfo::~safe_VkExternalMemoryBufferCreateInfo()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkExternalMemoryBufferCreateInfo::initialize(const VkExternalMemoryBufferCreateInfo* in_struct)
+{
+    sType = in_struct->sType;
+    handleTypes = in_struct->handleTypes;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkExternalMemoryBufferCreateInfo::initialize(const safe_VkExternalMemoryBufferCreateInfo* src)
+{
+    sType = src->sType;
+    handleTypes = src->handleTypes;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkExportMemoryAllocateInfo::safe_VkExportMemoryAllocateInfo(const VkExportMemoryAllocateInfo* in_struct) :
+    sType(in_struct->sType),
+    handleTypes(in_struct->handleTypes)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkExportMemoryAllocateInfo::safe_VkExportMemoryAllocateInfo() :
+    pNext(nullptr)
+{}
+
+safe_VkExportMemoryAllocateInfo::safe_VkExportMemoryAllocateInfo(const safe_VkExportMemoryAllocateInfo& src)
+{
+    sType = src.sType;
+    handleTypes = src.handleTypes;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkExportMemoryAllocateInfo& safe_VkExportMemoryAllocateInfo::operator=(const safe_VkExportMemoryAllocateInfo& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    handleTypes = src.handleTypes;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkExportMemoryAllocateInfo::~safe_VkExportMemoryAllocateInfo()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkExportMemoryAllocateInfo::initialize(const VkExportMemoryAllocateInfo* in_struct)
+{
+    sType = in_struct->sType;
+    handleTypes = in_struct->handleTypes;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkExportMemoryAllocateInfo::initialize(const safe_VkExportMemoryAllocateInfo* src)
+{
+    sType = src->sType;
+    handleTypes = src->handleTypes;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkPhysicalDeviceExternalFenceInfo::safe_VkPhysicalDeviceExternalFenceInfo(const VkPhysicalDeviceExternalFenceInfo* in_struct) :
+    sType(in_struct->sType),
+    handleType(in_struct->handleType)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPhysicalDeviceExternalFenceInfo::safe_VkPhysicalDeviceExternalFenceInfo() :
+    pNext(nullptr)
+{}
+
+safe_VkPhysicalDeviceExternalFenceInfo::safe_VkPhysicalDeviceExternalFenceInfo(const safe_VkPhysicalDeviceExternalFenceInfo& src)
+{
+    sType = src.sType;
+    handleType = src.handleType;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPhysicalDeviceExternalFenceInfo& safe_VkPhysicalDeviceExternalFenceInfo::operator=(const safe_VkPhysicalDeviceExternalFenceInfo& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    handleType = src.handleType;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPhysicalDeviceExternalFenceInfo::~safe_VkPhysicalDeviceExternalFenceInfo()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPhysicalDeviceExternalFenceInfo::initialize(const VkPhysicalDeviceExternalFenceInfo* in_struct)
+{
+    sType = in_struct->sType;
+    handleType = in_struct->handleType;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPhysicalDeviceExternalFenceInfo::initialize(const safe_VkPhysicalDeviceExternalFenceInfo* src)
+{
+    sType = src->sType;
+    handleType = src->handleType;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkExternalFenceProperties::safe_VkExternalFenceProperties(const VkExternalFenceProperties* in_struct) :
+    sType(in_struct->sType),
+    exportFromImportedHandleTypes(in_struct->exportFromImportedHandleTypes),
+    compatibleHandleTypes(in_struct->compatibleHandleTypes),
+    externalFenceFeatures(in_struct->externalFenceFeatures)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkExternalFenceProperties::safe_VkExternalFenceProperties() :
+    pNext(nullptr)
+{}
+
+safe_VkExternalFenceProperties::safe_VkExternalFenceProperties(const safe_VkExternalFenceProperties& src)
+{
+    sType = src.sType;
+    exportFromImportedHandleTypes = src.exportFromImportedHandleTypes;
+    compatibleHandleTypes = src.compatibleHandleTypes;
+    externalFenceFeatures = src.externalFenceFeatures;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkExternalFenceProperties& safe_VkExternalFenceProperties::operator=(const safe_VkExternalFenceProperties& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    exportFromImportedHandleTypes = src.exportFromImportedHandleTypes;
+    compatibleHandleTypes = src.compatibleHandleTypes;
+    externalFenceFeatures = src.externalFenceFeatures;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkExternalFenceProperties::~safe_VkExternalFenceProperties()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkExternalFenceProperties::initialize(const VkExternalFenceProperties* in_struct)
+{
+    sType = in_struct->sType;
+    exportFromImportedHandleTypes = in_struct->exportFromImportedHandleTypes;
+    compatibleHandleTypes = in_struct->compatibleHandleTypes;
+    externalFenceFeatures = in_struct->externalFenceFeatures;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkExternalFenceProperties::initialize(const safe_VkExternalFenceProperties* src)
+{
+    sType = src->sType;
+    exportFromImportedHandleTypes = src->exportFromImportedHandleTypes;
+    compatibleHandleTypes = src->compatibleHandleTypes;
+    externalFenceFeatures = src->externalFenceFeatures;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkExportFenceCreateInfo::safe_VkExportFenceCreateInfo(const VkExportFenceCreateInfo* in_struct) :
+    sType(in_struct->sType),
+    handleTypes(in_struct->handleTypes)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkExportFenceCreateInfo::safe_VkExportFenceCreateInfo() :
+    pNext(nullptr)
+{}
+
+safe_VkExportFenceCreateInfo::safe_VkExportFenceCreateInfo(const safe_VkExportFenceCreateInfo& src)
+{
+    sType = src.sType;
+    handleTypes = src.handleTypes;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkExportFenceCreateInfo& safe_VkExportFenceCreateInfo::operator=(const safe_VkExportFenceCreateInfo& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    handleTypes = src.handleTypes;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkExportFenceCreateInfo::~safe_VkExportFenceCreateInfo()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkExportFenceCreateInfo::initialize(const VkExportFenceCreateInfo* in_struct)
+{
+    sType = in_struct->sType;
+    handleTypes = in_struct->handleTypes;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkExportFenceCreateInfo::initialize(const safe_VkExportFenceCreateInfo* src)
+{
+    sType = src->sType;
+    handleTypes = src->handleTypes;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkExportSemaphoreCreateInfo::safe_VkExportSemaphoreCreateInfo(const VkExportSemaphoreCreateInfo* in_struct) :
+    sType(in_struct->sType),
+    handleTypes(in_struct->handleTypes)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkExportSemaphoreCreateInfo::safe_VkExportSemaphoreCreateInfo() :
+    pNext(nullptr)
+{}
+
+safe_VkExportSemaphoreCreateInfo::safe_VkExportSemaphoreCreateInfo(const safe_VkExportSemaphoreCreateInfo& src)
+{
+    sType = src.sType;
+    handleTypes = src.handleTypes;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkExportSemaphoreCreateInfo& safe_VkExportSemaphoreCreateInfo::operator=(const safe_VkExportSemaphoreCreateInfo& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    handleTypes = src.handleTypes;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkExportSemaphoreCreateInfo::~safe_VkExportSemaphoreCreateInfo()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkExportSemaphoreCreateInfo::initialize(const VkExportSemaphoreCreateInfo* in_struct)
+{
+    sType = in_struct->sType;
+    handleTypes = in_struct->handleTypes;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkExportSemaphoreCreateInfo::initialize(const safe_VkExportSemaphoreCreateInfo* src)
+{
+    sType = src->sType;
+    handleTypes = src->handleTypes;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkPhysicalDeviceExternalSemaphoreInfo::safe_VkPhysicalDeviceExternalSemaphoreInfo(const VkPhysicalDeviceExternalSemaphoreInfo* in_struct) :
+    sType(in_struct->sType),
+    handleType(in_struct->handleType)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPhysicalDeviceExternalSemaphoreInfo::safe_VkPhysicalDeviceExternalSemaphoreInfo() :
+    pNext(nullptr)
+{}
+
+safe_VkPhysicalDeviceExternalSemaphoreInfo::safe_VkPhysicalDeviceExternalSemaphoreInfo(const safe_VkPhysicalDeviceExternalSemaphoreInfo& src)
+{
+    sType = src.sType;
+    handleType = src.handleType;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPhysicalDeviceExternalSemaphoreInfo& safe_VkPhysicalDeviceExternalSemaphoreInfo::operator=(const safe_VkPhysicalDeviceExternalSemaphoreInfo& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    handleType = src.handleType;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPhysicalDeviceExternalSemaphoreInfo::~safe_VkPhysicalDeviceExternalSemaphoreInfo()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPhysicalDeviceExternalSemaphoreInfo::initialize(const VkPhysicalDeviceExternalSemaphoreInfo* in_struct)
+{
+    sType = in_struct->sType;
+    handleType = in_struct->handleType;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPhysicalDeviceExternalSemaphoreInfo::initialize(const safe_VkPhysicalDeviceExternalSemaphoreInfo* src)
+{
+    sType = src->sType;
+    handleType = src->handleType;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkExternalSemaphoreProperties::safe_VkExternalSemaphoreProperties(const VkExternalSemaphoreProperties* in_struct) :
+    sType(in_struct->sType),
+    exportFromImportedHandleTypes(in_struct->exportFromImportedHandleTypes),
+    compatibleHandleTypes(in_struct->compatibleHandleTypes),
+    externalSemaphoreFeatures(in_struct->externalSemaphoreFeatures)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkExternalSemaphoreProperties::safe_VkExternalSemaphoreProperties() :
+    pNext(nullptr)
+{}
+
+safe_VkExternalSemaphoreProperties::safe_VkExternalSemaphoreProperties(const safe_VkExternalSemaphoreProperties& src)
+{
+    sType = src.sType;
+    exportFromImportedHandleTypes = src.exportFromImportedHandleTypes;
+    compatibleHandleTypes = src.compatibleHandleTypes;
+    externalSemaphoreFeatures = src.externalSemaphoreFeatures;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkExternalSemaphoreProperties& safe_VkExternalSemaphoreProperties::operator=(const safe_VkExternalSemaphoreProperties& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    exportFromImportedHandleTypes = src.exportFromImportedHandleTypes;
+    compatibleHandleTypes = src.compatibleHandleTypes;
+    externalSemaphoreFeatures = src.externalSemaphoreFeatures;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkExternalSemaphoreProperties::~safe_VkExternalSemaphoreProperties()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkExternalSemaphoreProperties::initialize(const VkExternalSemaphoreProperties* in_struct)
+{
+    sType = in_struct->sType;
+    exportFromImportedHandleTypes = in_struct->exportFromImportedHandleTypes;
+    compatibleHandleTypes = in_struct->compatibleHandleTypes;
+    externalSemaphoreFeatures = in_struct->externalSemaphoreFeatures;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkExternalSemaphoreProperties::initialize(const safe_VkExternalSemaphoreProperties* src)
+{
+    sType = src->sType;
+    exportFromImportedHandleTypes = src->exportFromImportedHandleTypes;
+    compatibleHandleTypes = src->compatibleHandleTypes;
+    externalSemaphoreFeatures = src->externalSemaphoreFeatures;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkPhysicalDeviceMaintenance3Properties::safe_VkPhysicalDeviceMaintenance3Properties(const VkPhysicalDeviceMaintenance3Properties* in_struct) :
+    sType(in_struct->sType),
+    maxPerSetDescriptors(in_struct->maxPerSetDescriptors),
+    maxMemoryAllocationSize(in_struct->maxMemoryAllocationSize)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPhysicalDeviceMaintenance3Properties::safe_VkPhysicalDeviceMaintenance3Properties() :
+    pNext(nullptr)
+{}
+
+safe_VkPhysicalDeviceMaintenance3Properties::safe_VkPhysicalDeviceMaintenance3Properties(const safe_VkPhysicalDeviceMaintenance3Properties& src)
+{
+    sType = src.sType;
+    maxPerSetDescriptors = src.maxPerSetDescriptors;
+    maxMemoryAllocationSize = src.maxMemoryAllocationSize;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPhysicalDeviceMaintenance3Properties& safe_VkPhysicalDeviceMaintenance3Properties::operator=(const safe_VkPhysicalDeviceMaintenance3Properties& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    maxPerSetDescriptors = src.maxPerSetDescriptors;
+    maxMemoryAllocationSize = src.maxMemoryAllocationSize;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPhysicalDeviceMaintenance3Properties::~safe_VkPhysicalDeviceMaintenance3Properties()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPhysicalDeviceMaintenance3Properties::initialize(const VkPhysicalDeviceMaintenance3Properties* in_struct)
+{
+    sType = in_struct->sType;
+    maxPerSetDescriptors = in_struct->maxPerSetDescriptors;
+    maxMemoryAllocationSize = in_struct->maxMemoryAllocationSize;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPhysicalDeviceMaintenance3Properties::initialize(const safe_VkPhysicalDeviceMaintenance3Properties* src)
+{
+    sType = src->sType;
+    maxPerSetDescriptors = src->maxPerSetDescriptors;
+    maxMemoryAllocationSize = src->maxMemoryAllocationSize;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkDescriptorSetLayoutSupport::safe_VkDescriptorSetLayoutSupport(const VkDescriptorSetLayoutSupport* in_struct) :
+    sType(in_struct->sType),
+    supported(in_struct->supported)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkDescriptorSetLayoutSupport::safe_VkDescriptorSetLayoutSupport() :
+    pNext(nullptr)
+{}
+
+safe_VkDescriptorSetLayoutSupport::safe_VkDescriptorSetLayoutSupport(const safe_VkDescriptorSetLayoutSupport& src)
+{
+    sType = src.sType;
+    supported = src.supported;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkDescriptorSetLayoutSupport& safe_VkDescriptorSetLayoutSupport::operator=(const safe_VkDescriptorSetLayoutSupport& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    supported = src.supported;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkDescriptorSetLayoutSupport::~safe_VkDescriptorSetLayoutSupport()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkDescriptorSetLayoutSupport::initialize(const VkDescriptorSetLayoutSupport* in_struct)
+{
+    sType = in_struct->sType;
+    supported = in_struct->supported;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkDescriptorSetLayoutSupport::initialize(const safe_VkDescriptorSetLayoutSupport* src)
+{
+    sType = src->sType;
+    supported = src->supported;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkPhysicalDeviceShaderDrawParametersFeatures::safe_VkPhysicalDeviceShaderDrawParametersFeatures(const VkPhysicalDeviceShaderDrawParametersFeatures* in_struct) :
+    sType(in_struct->sType),
+    shaderDrawParameters(in_struct->shaderDrawParameters)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPhysicalDeviceShaderDrawParametersFeatures::safe_VkPhysicalDeviceShaderDrawParametersFeatures() :
+    pNext(nullptr)
+{}
+
+safe_VkPhysicalDeviceShaderDrawParametersFeatures::safe_VkPhysicalDeviceShaderDrawParametersFeatures(const safe_VkPhysicalDeviceShaderDrawParametersFeatures& src)
+{
+    sType = src.sType;
+    shaderDrawParameters = src.shaderDrawParameters;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPhysicalDeviceShaderDrawParametersFeatures& safe_VkPhysicalDeviceShaderDrawParametersFeatures::operator=(const safe_VkPhysicalDeviceShaderDrawParametersFeatures& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    shaderDrawParameters = src.shaderDrawParameters;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPhysicalDeviceShaderDrawParametersFeatures::~safe_VkPhysicalDeviceShaderDrawParametersFeatures()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPhysicalDeviceShaderDrawParametersFeatures::initialize(const VkPhysicalDeviceShaderDrawParametersFeatures* in_struct)
+{
+    sType = in_struct->sType;
+    shaderDrawParameters = in_struct->shaderDrawParameters;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPhysicalDeviceShaderDrawParametersFeatures::initialize(const safe_VkPhysicalDeviceShaderDrawParametersFeatures* src)
+{
+    sType = src->sType;
+    shaderDrawParameters = src->shaderDrawParameters;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkSwapchainCreateInfoKHR::safe_VkSwapchainCreateInfoKHR(const VkSwapchainCreateInfoKHR* in_struct) :
+    sType(in_struct->sType),
+    flags(in_struct->flags),
+    surface(in_struct->surface),
+    minImageCount(in_struct->minImageCount),
+    imageFormat(in_struct->imageFormat),
+    imageColorSpace(in_struct->imageColorSpace),
+    imageExtent(in_struct->imageExtent),
+    imageArrayLayers(in_struct->imageArrayLayers),
+    imageUsage(in_struct->imageUsage),
+    imageSharingMode(in_struct->imageSharingMode),
+    queueFamilyIndexCount(in_struct->queueFamilyIndexCount),
+    pQueueFamilyIndices(nullptr),
+    preTransform(in_struct->preTransform),
+    compositeAlpha(in_struct->compositeAlpha),
+    presentMode(in_struct->presentMode),
+    clipped(in_struct->clipped),
+    oldSwapchain(in_struct->oldSwapchain)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (in_struct->pQueueFamilyIndices) {
+        pQueueFamilyIndices = new uint32_t[in_struct->queueFamilyIndexCount];
+        memcpy ((void *)pQueueFamilyIndices, (void *)in_struct->pQueueFamilyIndices, sizeof(uint32_t)*in_struct->queueFamilyIndexCount);
+    }
+}
+
+safe_VkSwapchainCreateInfoKHR::safe_VkSwapchainCreateInfoKHR() :
+    pNext(nullptr),
+    pQueueFamilyIndices(nullptr)
+{}
+
+safe_VkSwapchainCreateInfoKHR::safe_VkSwapchainCreateInfoKHR(const safe_VkSwapchainCreateInfoKHR& src)
+{
+    sType = src.sType;
+    flags = src.flags;
+    surface = src.surface;
+    minImageCount = src.minImageCount;
+    imageFormat = src.imageFormat;
+    imageColorSpace = src.imageColorSpace;
+    imageExtent = src.imageExtent;
+    imageArrayLayers = src.imageArrayLayers;
+    imageUsage = src.imageUsage;
+    imageSharingMode = src.imageSharingMode;
+    queueFamilyIndexCount = src.queueFamilyIndexCount;
+    pQueueFamilyIndices = nullptr;
+    preTransform = src.preTransform;
+    compositeAlpha = src.compositeAlpha;
+    presentMode = src.presentMode;
+    clipped = src.clipped;
+    oldSwapchain = src.oldSwapchain;
+    pNext = SafePnextCopy(src.pNext);
+    if (src.pQueueFamilyIndices) {
+        pQueueFamilyIndices = new uint32_t[src.queueFamilyIndexCount];
+        memcpy ((void *)pQueueFamilyIndices, (void *)src.pQueueFamilyIndices, sizeof(uint32_t)*src.queueFamilyIndexCount);
+    }
+}
+
+safe_VkSwapchainCreateInfoKHR& safe_VkSwapchainCreateInfoKHR::operator=(const safe_VkSwapchainCreateInfoKHR& src)
+{
+    if (&src == this) return *this;
+
+    if (pQueueFamilyIndices)
+        delete[] pQueueFamilyIndices;
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    flags = src.flags;
+    surface = src.surface;
+    minImageCount = src.minImageCount;
+    imageFormat = src.imageFormat;
+    imageColorSpace = src.imageColorSpace;
+    imageExtent = src.imageExtent;
+    imageArrayLayers = src.imageArrayLayers;
+    imageUsage = src.imageUsage;
+    imageSharingMode = src.imageSharingMode;
+    queueFamilyIndexCount = src.queueFamilyIndexCount;
+    pQueueFamilyIndices = nullptr;
+    preTransform = src.preTransform;
+    compositeAlpha = src.compositeAlpha;
+    presentMode = src.presentMode;
+    clipped = src.clipped;
+    oldSwapchain = src.oldSwapchain;
+    pNext = SafePnextCopy(src.pNext);
+    if (src.pQueueFamilyIndices) {
+        pQueueFamilyIndices = new uint32_t[src.queueFamilyIndexCount];
+        memcpy ((void *)pQueueFamilyIndices, (void *)src.pQueueFamilyIndices, sizeof(uint32_t)*src.queueFamilyIndexCount);
+    }
+
+    return *this;
+}
+
+safe_VkSwapchainCreateInfoKHR::~safe_VkSwapchainCreateInfoKHR()
+{
+    if (pQueueFamilyIndices)
+        delete[] pQueueFamilyIndices;
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkSwapchainCreateInfoKHR::initialize(const VkSwapchainCreateInfoKHR* in_struct)
+{
+    sType = in_struct->sType;
+    flags = in_struct->flags;
+    surface = in_struct->surface;
+    minImageCount = in_struct->minImageCount;
+    imageFormat = in_struct->imageFormat;
+    imageColorSpace = in_struct->imageColorSpace;
+    imageExtent = in_struct->imageExtent;
+    imageArrayLayers = in_struct->imageArrayLayers;
+    imageUsage = in_struct->imageUsage;
+    imageSharingMode = in_struct->imageSharingMode;
+    queueFamilyIndexCount = in_struct->queueFamilyIndexCount;
+    pQueueFamilyIndices = nullptr;
+    preTransform = in_struct->preTransform;
+    compositeAlpha = in_struct->compositeAlpha;
+    presentMode = in_struct->presentMode;
+    clipped = in_struct->clipped;
+    oldSwapchain = in_struct->oldSwapchain;
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (in_struct->pQueueFamilyIndices) {
+        pQueueFamilyIndices = new uint32_t[in_struct->queueFamilyIndexCount];
+        memcpy ((void *)pQueueFamilyIndices, (void *)in_struct->pQueueFamilyIndices, sizeof(uint32_t)*in_struct->queueFamilyIndexCount);
+    }
+}
+
+void safe_VkSwapchainCreateInfoKHR::initialize(const safe_VkSwapchainCreateInfoKHR* src)
+{
+    sType = src->sType;
+    flags = src->flags;
+    surface = src->surface;
+    minImageCount = src->minImageCount;
+    imageFormat = src->imageFormat;
+    imageColorSpace = src->imageColorSpace;
+    imageExtent = src->imageExtent;
+    imageArrayLayers = src->imageArrayLayers;
+    imageUsage = src->imageUsage;
+    imageSharingMode = src->imageSharingMode;
+    queueFamilyIndexCount = src->queueFamilyIndexCount;
+    pQueueFamilyIndices = nullptr;
+    preTransform = src->preTransform;
+    compositeAlpha = src->compositeAlpha;
+    presentMode = src->presentMode;
+    clipped = src->clipped;
+    oldSwapchain = src->oldSwapchain;
+    pNext = SafePnextCopy(src->pNext);
+    if (src->pQueueFamilyIndices) {
+        pQueueFamilyIndices = new uint32_t[src->queueFamilyIndexCount];
+        memcpy ((void *)pQueueFamilyIndices, (void *)src->pQueueFamilyIndices, sizeof(uint32_t)*src->queueFamilyIndexCount);
+    }
+}
+
+safe_VkPresentInfoKHR::safe_VkPresentInfoKHR(const VkPresentInfoKHR* in_struct) :
+    sType(in_struct->sType),
+    waitSemaphoreCount(in_struct->waitSemaphoreCount),
+    pWaitSemaphores(nullptr),
+    swapchainCount(in_struct->swapchainCount),
+    pSwapchains(nullptr),
+    pImageIndices(nullptr),
+    pResults(nullptr)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (waitSemaphoreCount && in_struct->pWaitSemaphores) {
+        pWaitSemaphores = new VkSemaphore[waitSemaphoreCount];
+        for (uint32_t i = 0; i < waitSemaphoreCount; ++i) {
+            pWaitSemaphores[i] = in_struct->pWaitSemaphores[i];
+        }
+    }
+    if (swapchainCount && in_struct->pSwapchains) {
+        pSwapchains = new VkSwapchainKHR[swapchainCount];
+        for (uint32_t i = 0; i < swapchainCount; ++i) {
+            pSwapchains[i] = in_struct->pSwapchains[i];
+        }
+    }
+    if (in_struct->pImageIndices) {
+        pImageIndices = new uint32_t[in_struct->swapchainCount];
+        memcpy ((void *)pImageIndices, (void *)in_struct->pImageIndices, sizeof(uint32_t)*in_struct->swapchainCount);
+    }
+    if (in_struct->pResults) {
+        pResults = new VkResult[in_struct->swapchainCount];
+        memcpy ((void *)pResults, (void *)in_struct->pResults, sizeof(VkResult)*in_struct->swapchainCount);
+    }
+}
+
+safe_VkPresentInfoKHR::safe_VkPresentInfoKHR() :
+    pNext(nullptr),
+    pWaitSemaphores(nullptr),
+    pSwapchains(nullptr),
+    pImageIndices(nullptr),
+    pResults(nullptr)
+{}
+
+safe_VkPresentInfoKHR::safe_VkPresentInfoKHR(const safe_VkPresentInfoKHR& src)
+{
+    sType = src.sType;
+    waitSemaphoreCount = src.waitSemaphoreCount;
+    pWaitSemaphores = nullptr;
+    swapchainCount = src.swapchainCount;
+    pSwapchains = nullptr;
+    pImageIndices = nullptr;
+    pResults = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (waitSemaphoreCount && src.pWaitSemaphores) {
+        pWaitSemaphores = new VkSemaphore[waitSemaphoreCount];
+        for (uint32_t i = 0; i < waitSemaphoreCount; ++i) {
+            pWaitSemaphores[i] = src.pWaitSemaphores[i];
+        }
+    }
+    if (swapchainCount && src.pSwapchains) {
+        pSwapchains = new VkSwapchainKHR[swapchainCount];
+        for (uint32_t i = 0; i < swapchainCount; ++i) {
+            pSwapchains[i] = src.pSwapchains[i];
+        }
+    }
+    if (src.pImageIndices) {
+        pImageIndices = new uint32_t[src.swapchainCount];
+        memcpy ((void *)pImageIndices, (void *)src.pImageIndices, sizeof(uint32_t)*src.swapchainCount);
+    }
+    if (src.pResults) {
+        pResults = new VkResult[src.swapchainCount];
+        memcpy ((void *)pResults, (void *)src.pResults, sizeof(VkResult)*src.swapchainCount);
+    }
+}
+
+safe_VkPresentInfoKHR& safe_VkPresentInfoKHR::operator=(const safe_VkPresentInfoKHR& src)
+{
+    if (&src == this) return *this;
+
+    if (pWaitSemaphores)
+        delete[] pWaitSemaphores;
+    if (pSwapchains)
+        delete[] pSwapchains;
+    if (pImageIndices)
+        delete[] pImageIndices;
+    if (pResults)
+        delete[] pResults;
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    waitSemaphoreCount = src.waitSemaphoreCount;
+    pWaitSemaphores = nullptr;
+    swapchainCount = src.swapchainCount;
+    pSwapchains = nullptr;
+    pImageIndices = nullptr;
+    pResults = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (waitSemaphoreCount && src.pWaitSemaphores) {
+        pWaitSemaphores = new VkSemaphore[waitSemaphoreCount];
+        for (uint32_t i = 0; i < waitSemaphoreCount; ++i) {
+            pWaitSemaphores[i] = src.pWaitSemaphores[i];
+        }
+    }
+    if (swapchainCount && src.pSwapchains) {
+        pSwapchains = new VkSwapchainKHR[swapchainCount];
+        for (uint32_t i = 0; i < swapchainCount; ++i) {
+            pSwapchains[i] = src.pSwapchains[i];
+        }
+    }
+    if (src.pImageIndices) {
+        pImageIndices = new uint32_t[src.swapchainCount];
+        memcpy ((void *)pImageIndices, (void *)src.pImageIndices, sizeof(uint32_t)*src.swapchainCount);
+    }
+    if (src.pResults) {
+        pResults = new VkResult[src.swapchainCount];
+        memcpy ((void *)pResults, (void *)src.pResults, sizeof(VkResult)*src.swapchainCount);
+    }
+
+    return *this;
+}
+
+safe_VkPresentInfoKHR::~safe_VkPresentInfoKHR()
+{
+    if (pWaitSemaphores)
+        delete[] pWaitSemaphores;
+    if (pSwapchains)
+        delete[] pSwapchains;
+    if (pImageIndices)
+        delete[] pImageIndices;
+    if (pResults)
+        delete[] pResults;
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPresentInfoKHR::initialize(const VkPresentInfoKHR* in_struct)
+{
+    sType = in_struct->sType;
+    waitSemaphoreCount = in_struct->waitSemaphoreCount;
+    pWaitSemaphores = nullptr;
+    swapchainCount = in_struct->swapchainCount;
+    pSwapchains = nullptr;
+    pImageIndices = nullptr;
+    pResults = nullptr;
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (waitSemaphoreCount && in_struct->pWaitSemaphores) {
+        pWaitSemaphores = new VkSemaphore[waitSemaphoreCount];
+        for (uint32_t i = 0; i < waitSemaphoreCount; ++i) {
+            pWaitSemaphores[i] = in_struct->pWaitSemaphores[i];
+        }
+    }
+    if (swapchainCount && in_struct->pSwapchains) {
+        pSwapchains = new VkSwapchainKHR[swapchainCount];
+        for (uint32_t i = 0; i < swapchainCount; ++i) {
+            pSwapchains[i] = in_struct->pSwapchains[i];
+        }
+    }
+    if (in_struct->pImageIndices) {
+        pImageIndices = new uint32_t[in_struct->swapchainCount];
+        memcpy ((void *)pImageIndices, (void *)in_struct->pImageIndices, sizeof(uint32_t)*in_struct->swapchainCount);
+    }
+    if (in_struct->pResults) {
+        pResults = new VkResult[in_struct->swapchainCount];
+        memcpy ((void *)pResults, (void *)in_struct->pResults, sizeof(VkResult)*in_struct->swapchainCount);
+    }
+}
+
+void safe_VkPresentInfoKHR::initialize(const safe_VkPresentInfoKHR* src)
+{
+    sType = src->sType;
+    waitSemaphoreCount = src->waitSemaphoreCount;
+    pWaitSemaphores = nullptr;
+    swapchainCount = src->swapchainCount;
+    pSwapchains = nullptr;
+    pImageIndices = nullptr;
+    pResults = nullptr;
+    pNext = SafePnextCopy(src->pNext);
+    if (waitSemaphoreCount && src->pWaitSemaphores) {
+        pWaitSemaphores = new VkSemaphore[waitSemaphoreCount];
+        for (uint32_t i = 0; i < waitSemaphoreCount; ++i) {
+            pWaitSemaphores[i] = src->pWaitSemaphores[i];
+        }
+    }
+    if (swapchainCount && src->pSwapchains) {
+        pSwapchains = new VkSwapchainKHR[swapchainCount];
+        for (uint32_t i = 0; i < swapchainCount; ++i) {
+            pSwapchains[i] = src->pSwapchains[i];
+        }
+    }
+    if (src->pImageIndices) {
+        pImageIndices = new uint32_t[src->swapchainCount];
+        memcpy ((void *)pImageIndices, (void *)src->pImageIndices, sizeof(uint32_t)*src->swapchainCount);
+    }
+    if (src->pResults) {
+        pResults = new VkResult[src->swapchainCount];
+        memcpy ((void *)pResults, (void *)src->pResults, sizeof(VkResult)*src->swapchainCount);
+    }
+}
+
+safe_VkImageSwapchainCreateInfoKHR::safe_VkImageSwapchainCreateInfoKHR(const VkImageSwapchainCreateInfoKHR* in_struct) :
+    sType(in_struct->sType),
+    swapchain(in_struct->swapchain)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkImageSwapchainCreateInfoKHR::safe_VkImageSwapchainCreateInfoKHR() :
+    pNext(nullptr)
+{}
+
+safe_VkImageSwapchainCreateInfoKHR::safe_VkImageSwapchainCreateInfoKHR(const safe_VkImageSwapchainCreateInfoKHR& src)
+{
+    sType = src.sType;
+    swapchain = src.swapchain;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkImageSwapchainCreateInfoKHR& safe_VkImageSwapchainCreateInfoKHR::operator=(const safe_VkImageSwapchainCreateInfoKHR& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    swapchain = src.swapchain;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkImageSwapchainCreateInfoKHR::~safe_VkImageSwapchainCreateInfoKHR()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkImageSwapchainCreateInfoKHR::initialize(const VkImageSwapchainCreateInfoKHR* in_struct)
+{
+    sType = in_struct->sType;
+    swapchain = in_struct->swapchain;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkImageSwapchainCreateInfoKHR::initialize(const safe_VkImageSwapchainCreateInfoKHR* src)
+{
+    sType = src->sType;
+    swapchain = src->swapchain;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkBindImageMemorySwapchainInfoKHR::safe_VkBindImageMemorySwapchainInfoKHR(const VkBindImageMemorySwapchainInfoKHR* in_struct) :
+    sType(in_struct->sType),
+    swapchain(in_struct->swapchain),
+    imageIndex(in_struct->imageIndex)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkBindImageMemorySwapchainInfoKHR::safe_VkBindImageMemorySwapchainInfoKHR() :
+    pNext(nullptr)
+{}
+
+safe_VkBindImageMemorySwapchainInfoKHR::safe_VkBindImageMemorySwapchainInfoKHR(const safe_VkBindImageMemorySwapchainInfoKHR& src)
+{
+    sType = src.sType;
+    swapchain = src.swapchain;
+    imageIndex = src.imageIndex;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkBindImageMemorySwapchainInfoKHR& safe_VkBindImageMemorySwapchainInfoKHR::operator=(const safe_VkBindImageMemorySwapchainInfoKHR& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    swapchain = src.swapchain;
+    imageIndex = src.imageIndex;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkBindImageMemorySwapchainInfoKHR::~safe_VkBindImageMemorySwapchainInfoKHR()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkBindImageMemorySwapchainInfoKHR::initialize(const VkBindImageMemorySwapchainInfoKHR* in_struct)
+{
+    sType = in_struct->sType;
+    swapchain = in_struct->swapchain;
+    imageIndex = in_struct->imageIndex;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkBindImageMemorySwapchainInfoKHR::initialize(const safe_VkBindImageMemorySwapchainInfoKHR* src)
+{
+    sType = src->sType;
+    swapchain = src->swapchain;
+    imageIndex = src->imageIndex;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkAcquireNextImageInfoKHR::safe_VkAcquireNextImageInfoKHR(const VkAcquireNextImageInfoKHR* in_struct) :
+    sType(in_struct->sType),
+    swapchain(in_struct->swapchain),
+    timeout(in_struct->timeout),
+    semaphore(in_struct->semaphore),
+    fence(in_struct->fence),
+    deviceMask(in_struct->deviceMask)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkAcquireNextImageInfoKHR::safe_VkAcquireNextImageInfoKHR() :
+    pNext(nullptr)
+{}
+
+safe_VkAcquireNextImageInfoKHR::safe_VkAcquireNextImageInfoKHR(const safe_VkAcquireNextImageInfoKHR& src)
+{
+    sType = src.sType;
+    swapchain = src.swapchain;
+    timeout = src.timeout;
+    semaphore = src.semaphore;
+    fence = src.fence;
+    deviceMask = src.deviceMask;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkAcquireNextImageInfoKHR& safe_VkAcquireNextImageInfoKHR::operator=(const safe_VkAcquireNextImageInfoKHR& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    swapchain = src.swapchain;
+    timeout = src.timeout;
+    semaphore = src.semaphore;
+    fence = src.fence;
+    deviceMask = src.deviceMask;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkAcquireNextImageInfoKHR::~safe_VkAcquireNextImageInfoKHR()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkAcquireNextImageInfoKHR::initialize(const VkAcquireNextImageInfoKHR* in_struct)
+{
+    sType = in_struct->sType;
+    swapchain = in_struct->swapchain;
+    timeout = in_struct->timeout;
+    semaphore = in_struct->semaphore;
+    fence = in_struct->fence;
+    deviceMask = in_struct->deviceMask;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkAcquireNextImageInfoKHR::initialize(const safe_VkAcquireNextImageInfoKHR* src)
+{
+    sType = src->sType;
+    swapchain = src->swapchain;
+    timeout = src->timeout;
+    semaphore = src->semaphore;
+    fence = src->fence;
+    deviceMask = src->deviceMask;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkDeviceGroupPresentCapabilitiesKHR::safe_VkDeviceGroupPresentCapabilitiesKHR(const VkDeviceGroupPresentCapabilitiesKHR* in_struct) :
+    sType(in_struct->sType),
+    modes(in_struct->modes)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+    for (uint32_t i = 0; i < VK_MAX_DEVICE_GROUP_SIZE; ++i) {
+        presentMask[i] = in_struct->presentMask[i];
+    }
+}
+
+safe_VkDeviceGroupPresentCapabilitiesKHR::safe_VkDeviceGroupPresentCapabilitiesKHR() :
+    pNext(nullptr)
+{}
+
+safe_VkDeviceGroupPresentCapabilitiesKHR::safe_VkDeviceGroupPresentCapabilitiesKHR(const safe_VkDeviceGroupPresentCapabilitiesKHR& src)
+{
+    sType = src.sType;
+    modes = src.modes;
+    pNext = SafePnextCopy(src.pNext);
+    for (uint32_t i = 0; i < VK_MAX_DEVICE_GROUP_SIZE; ++i) {
+        presentMask[i] = src.presentMask[i];
+    }
+}
+
+safe_VkDeviceGroupPresentCapabilitiesKHR& safe_VkDeviceGroupPresentCapabilitiesKHR::operator=(const safe_VkDeviceGroupPresentCapabilitiesKHR& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    modes = src.modes;
+    pNext = SafePnextCopy(src.pNext);
+    for (uint32_t i = 0; i < VK_MAX_DEVICE_GROUP_SIZE; ++i) {
+        presentMask[i] = src.presentMask[i];
+    }
+
+    return *this;
+}
+
+safe_VkDeviceGroupPresentCapabilitiesKHR::~safe_VkDeviceGroupPresentCapabilitiesKHR()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkDeviceGroupPresentCapabilitiesKHR::initialize(const VkDeviceGroupPresentCapabilitiesKHR* in_struct)
+{
+    sType = in_struct->sType;
+    modes = in_struct->modes;
+    pNext = SafePnextCopy(in_struct->pNext);
+    for (uint32_t i = 0; i < VK_MAX_DEVICE_GROUP_SIZE; ++i) {
+        presentMask[i] = in_struct->presentMask[i];
+    }
+}
+
+void safe_VkDeviceGroupPresentCapabilitiesKHR::initialize(const safe_VkDeviceGroupPresentCapabilitiesKHR* src)
+{
+    sType = src->sType;
+    modes = src->modes;
+    pNext = SafePnextCopy(src->pNext);
+    for (uint32_t i = 0; i < VK_MAX_DEVICE_GROUP_SIZE; ++i) {
+        presentMask[i] = src->presentMask[i];
+    }
+}
+
+safe_VkDeviceGroupPresentInfoKHR::safe_VkDeviceGroupPresentInfoKHR(const VkDeviceGroupPresentInfoKHR* in_struct) :
+    sType(in_struct->sType),
+    swapchainCount(in_struct->swapchainCount),
+    pDeviceMasks(nullptr),
+    mode(in_struct->mode)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (in_struct->pDeviceMasks) {
+        pDeviceMasks = new uint32_t[in_struct->swapchainCount];
+        memcpy ((void *)pDeviceMasks, (void *)in_struct->pDeviceMasks, sizeof(uint32_t)*in_struct->swapchainCount);
+    }
+}
+
+safe_VkDeviceGroupPresentInfoKHR::safe_VkDeviceGroupPresentInfoKHR() :
+    pNext(nullptr),
+    pDeviceMasks(nullptr)
+{}
+
+safe_VkDeviceGroupPresentInfoKHR::safe_VkDeviceGroupPresentInfoKHR(const safe_VkDeviceGroupPresentInfoKHR& src)
+{
+    sType = src.sType;
+    swapchainCount = src.swapchainCount;
+    pDeviceMasks = nullptr;
+    mode = src.mode;
+    pNext = SafePnextCopy(src.pNext);
+    if (src.pDeviceMasks) {
+        pDeviceMasks = new uint32_t[src.swapchainCount];
+        memcpy ((void *)pDeviceMasks, (void *)src.pDeviceMasks, sizeof(uint32_t)*src.swapchainCount);
+    }
+}
+
+safe_VkDeviceGroupPresentInfoKHR& safe_VkDeviceGroupPresentInfoKHR::operator=(const safe_VkDeviceGroupPresentInfoKHR& src)
+{
+    if (&src == this) return *this;
+
+    if (pDeviceMasks)
+        delete[] pDeviceMasks;
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    swapchainCount = src.swapchainCount;
+    pDeviceMasks = nullptr;
+    mode = src.mode;
+    pNext = SafePnextCopy(src.pNext);
+    if (src.pDeviceMasks) {
+        pDeviceMasks = new uint32_t[src.swapchainCount];
+        memcpy ((void *)pDeviceMasks, (void *)src.pDeviceMasks, sizeof(uint32_t)*src.swapchainCount);
+    }
+
+    return *this;
+}
+
+safe_VkDeviceGroupPresentInfoKHR::~safe_VkDeviceGroupPresentInfoKHR()
+{
+    if (pDeviceMasks)
+        delete[] pDeviceMasks;
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkDeviceGroupPresentInfoKHR::initialize(const VkDeviceGroupPresentInfoKHR* in_struct)
+{
+    sType = in_struct->sType;
+    swapchainCount = in_struct->swapchainCount;
+    pDeviceMasks = nullptr;
+    mode = in_struct->mode;
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (in_struct->pDeviceMasks) {
+        pDeviceMasks = new uint32_t[in_struct->swapchainCount];
+        memcpy ((void *)pDeviceMasks, (void *)in_struct->pDeviceMasks, sizeof(uint32_t)*in_struct->swapchainCount);
+    }
+}
+
+void safe_VkDeviceGroupPresentInfoKHR::initialize(const safe_VkDeviceGroupPresentInfoKHR* src)
+{
+    sType = src->sType;
+    swapchainCount = src->swapchainCount;
+    pDeviceMasks = nullptr;
+    mode = src->mode;
+    pNext = SafePnextCopy(src->pNext);
+    if (src->pDeviceMasks) {
+        pDeviceMasks = new uint32_t[src->swapchainCount];
+        memcpy ((void *)pDeviceMasks, (void *)src->pDeviceMasks, sizeof(uint32_t)*src->swapchainCount);
+    }
+}
+
+safe_VkDeviceGroupSwapchainCreateInfoKHR::safe_VkDeviceGroupSwapchainCreateInfoKHR(const VkDeviceGroupSwapchainCreateInfoKHR* in_struct) :
+    sType(in_struct->sType),
+    modes(in_struct->modes)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkDeviceGroupSwapchainCreateInfoKHR::safe_VkDeviceGroupSwapchainCreateInfoKHR() :
+    pNext(nullptr)
+{}
+
+safe_VkDeviceGroupSwapchainCreateInfoKHR::safe_VkDeviceGroupSwapchainCreateInfoKHR(const safe_VkDeviceGroupSwapchainCreateInfoKHR& src)
+{
+    sType = src.sType;
+    modes = src.modes;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkDeviceGroupSwapchainCreateInfoKHR& safe_VkDeviceGroupSwapchainCreateInfoKHR::operator=(const safe_VkDeviceGroupSwapchainCreateInfoKHR& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    modes = src.modes;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkDeviceGroupSwapchainCreateInfoKHR::~safe_VkDeviceGroupSwapchainCreateInfoKHR()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkDeviceGroupSwapchainCreateInfoKHR::initialize(const VkDeviceGroupSwapchainCreateInfoKHR* in_struct)
+{
+    sType = in_struct->sType;
+    modes = in_struct->modes;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkDeviceGroupSwapchainCreateInfoKHR::initialize(const safe_VkDeviceGroupSwapchainCreateInfoKHR* src)
+{
+    sType = src->sType;
+    modes = src->modes;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkDisplayPropertiesKHR::safe_VkDisplayPropertiesKHR(const VkDisplayPropertiesKHR* in_struct) :
+    display(in_struct->display),
+    physicalDimensions(in_struct->physicalDimensions),
+    physicalResolution(in_struct->physicalResolution),
+    supportedTransforms(in_struct->supportedTransforms),
+    planeReorderPossible(in_struct->planeReorderPossible),
+    persistentContent(in_struct->persistentContent)
+{
+    displayName = SafeStringCopy(in_struct->displayName);
+}
+
+safe_VkDisplayPropertiesKHR::safe_VkDisplayPropertiesKHR() :
+    displayName(nullptr)
+{}
+
+safe_VkDisplayPropertiesKHR::safe_VkDisplayPropertiesKHR(const safe_VkDisplayPropertiesKHR& src)
+{
+    display = src.display;
+    physicalDimensions = src.physicalDimensions;
+    physicalResolution = src.physicalResolution;
+    supportedTransforms = src.supportedTransforms;
+    planeReorderPossible = src.planeReorderPossible;
+    persistentContent = src.persistentContent;
+    displayName = SafeStringCopy(src.displayName);
+}
+
+safe_VkDisplayPropertiesKHR& safe_VkDisplayPropertiesKHR::operator=(const safe_VkDisplayPropertiesKHR& src)
+{
+    if (&src == this) return *this;
+
+    if (displayName) delete [] displayName;
+
+    display = src.display;
+    physicalDimensions = src.physicalDimensions;
+    physicalResolution = src.physicalResolution;
+    supportedTransforms = src.supportedTransforms;
+    planeReorderPossible = src.planeReorderPossible;
+    persistentContent = src.persistentContent;
+    displayName = SafeStringCopy(src.displayName);
+
+    return *this;
+}
+
+safe_VkDisplayPropertiesKHR::~safe_VkDisplayPropertiesKHR()
+{
+    if (displayName) delete [] displayName;
+}
+
+void safe_VkDisplayPropertiesKHR::initialize(const VkDisplayPropertiesKHR* in_struct)
+{
+    display = in_struct->display;
+    physicalDimensions = in_struct->physicalDimensions;
+    physicalResolution = in_struct->physicalResolution;
+    supportedTransforms = in_struct->supportedTransforms;
+    planeReorderPossible = in_struct->planeReorderPossible;
+    persistentContent = in_struct->persistentContent;
+    displayName = SafeStringCopy(in_struct->displayName);
+}
+
+void safe_VkDisplayPropertiesKHR::initialize(const safe_VkDisplayPropertiesKHR* src)
+{
+    display = src->display;
+    physicalDimensions = src->physicalDimensions;
+    physicalResolution = src->physicalResolution;
+    supportedTransforms = src->supportedTransforms;
+    planeReorderPossible = src->planeReorderPossible;
+    persistentContent = src->persistentContent;
+    displayName = SafeStringCopy(src->displayName);
+}
+
+safe_VkDisplayModeCreateInfoKHR::safe_VkDisplayModeCreateInfoKHR(const VkDisplayModeCreateInfoKHR* in_struct) :
+    sType(in_struct->sType),
+    flags(in_struct->flags),
+    parameters(in_struct->parameters)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkDisplayModeCreateInfoKHR::safe_VkDisplayModeCreateInfoKHR() :
+    pNext(nullptr)
+{}
+
+safe_VkDisplayModeCreateInfoKHR::safe_VkDisplayModeCreateInfoKHR(const safe_VkDisplayModeCreateInfoKHR& src)
+{
+    sType = src.sType;
+    flags = src.flags;
+    parameters = src.parameters;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkDisplayModeCreateInfoKHR& safe_VkDisplayModeCreateInfoKHR::operator=(const safe_VkDisplayModeCreateInfoKHR& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    flags = src.flags;
+    parameters = src.parameters;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkDisplayModeCreateInfoKHR::~safe_VkDisplayModeCreateInfoKHR()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkDisplayModeCreateInfoKHR::initialize(const VkDisplayModeCreateInfoKHR* in_struct)
+{
+    sType = in_struct->sType;
+    flags = in_struct->flags;
+    parameters = in_struct->parameters;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkDisplayModeCreateInfoKHR::initialize(const safe_VkDisplayModeCreateInfoKHR* src)
+{
+    sType = src->sType;
+    flags = src->flags;
+    parameters = src->parameters;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkDisplaySurfaceCreateInfoKHR::safe_VkDisplaySurfaceCreateInfoKHR(const VkDisplaySurfaceCreateInfoKHR* in_struct) :
+    sType(in_struct->sType),
+    flags(in_struct->flags),
+    displayMode(in_struct->displayMode),
+    planeIndex(in_struct->planeIndex),
+    planeStackIndex(in_struct->planeStackIndex),
+    transform(in_struct->transform),
+    globalAlpha(in_struct->globalAlpha),
+    alphaMode(in_struct->alphaMode),
+    imageExtent(in_struct->imageExtent)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkDisplaySurfaceCreateInfoKHR::safe_VkDisplaySurfaceCreateInfoKHR() :
+    pNext(nullptr)
+{}
+
+safe_VkDisplaySurfaceCreateInfoKHR::safe_VkDisplaySurfaceCreateInfoKHR(const safe_VkDisplaySurfaceCreateInfoKHR& src)
+{
+    sType = src.sType;
+    flags = src.flags;
+    displayMode = src.displayMode;
+    planeIndex = src.planeIndex;
+    planeStackIndex = src.planeStackIndex;
+    transform = src.transform;
+    globalAlpha = src.globalAlpha;
+    alphaMode = src.alphaMode;
+    imageExtent = src.imageExtent;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkDisplaySurfaceCreateInfoKHR& safe_VkDisplaySurfaceCreateInfoKHR::operator=(const safe_VkDisplaySurfaceCreateInfoKHR& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    flags = src.flags;
+    displayMode = src.displayMode;
+    planeIndex = src.planeIndex;
+    planeStackIndex = src.planeStackIndex;
+    transform = src.transform;
+    globalAlpha = src.globalAlpha;
+    alphaMode = src.alphaMode;
+    imageExtent = src.imageExtent;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkDisplaySurfaceCreateInfoKHR::~safe_VkDisplaySurfaceCreateInfoKHR()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkDisplaySurfaceCreateInfoKHR::initialize(const VkDisplaySurfaceCreateInfoKHR* in_struct)
+{
+    sType = in_struct->sType;
+    flags = in_struct->flags;
+    displayMode = in_struct->displayMode;
+    planeIndex = in_struct->planeIndex;
+    planeStackIndex = in_struct->planeStackIndex;
+    transform = in_struct->transform;
+    globalAlpha = in_struct->globalAlpha;
+    alphaMode = in_struct->alphaMode;
+    imageExtent = in_struct->imageExtent;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkDisplaySurfaceCreateInfoKHR::initialize(const safe_VkDisplaySurfaceCreateInfoKHR* src)
+{
+    sType = src->sType;
+    flags = src->flags;
+    displayMode = src->displayMode;
+    planeIndex = src->planeIndex;
+    planeStackIndex = src->planeStackIndex;
+    transform = src->transform;
+    globalAlpha = src->globalAlpha;
+    alphaMode = src->alphaMode;
+    imageExtent = src->imageExtent;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkDisplayPresentInfoKHR::safe_VkDisplayPresentInfoKHR(const VkDisplayPresentInfoKHR* in_struct) :
+    sType(in_struct->sType),
+    srcRect(in_struct->srcRect),
+    dstRect(in_struct->dstRect),
+    persistent(in_struct->persistent)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkDisplayPresentInfoKHR::safe_VkDisplayPresentInfoKHR() :
+    pNext(nullptr)
+{}
+
+safe_VkDisplayPresentInfoKHR::safe_VkDisplayPresentInfoKHR(const safe_VkDisplayPresentInfoKHR& src)
+{
+    sType = src.sType;
+    srcRect = src.srcRect;
+    dstRect = src.dstRect;
+    persistent = src.persistent;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkDisplayPresentInfoKHR& safe_VkDisplayPresentInfoKHR::operator=(const safe_VkDisplayPresentInfoKHR& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    srcRect = src.srcRect;
+    dstRect = src.dstRect;
+    persistent = src.persistent;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkDisplayPresentInfoKHR::~safe_VkDisplayPresentInfoKHR()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkDisplayPresentInfoKHR::initialize(const VkDisplayPresentInfoKHR* in_struct)
+{
+    sType = in_struct->sType;
+    srcRect = in_struct->srcRect;
+    dstRect = in_struct->dstRect;
+    persistent = in_struct->persistent;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkDisplayPresentInfoKHR::initialize(const safe_VkDisplayPresentInfoKHR* src)
+{
+    sType = src->sType;
+    srcRect = src->srcRect;
+    dstRect = src->dstRect;
+    persistent = src->persistent;
+    pNext = SafePnextCopy(src->pNext);
+}
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+
+safe_VkImportMemoryWin32HandleInfoKHR::safe_VkImportMemoryWin32HandleInfoKHR(const VkImportMemoryWin32HandleInfoKHR* in_struct) :
+    sType(in_struct->sType),
+    handleType(in_struct->handleType),
+    handle(in_struct->handle),
+    name(in_struct->name)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkImportMemoryWin32HandleInfoKHR::safe_VkImportMemoryWin32HandleInfoKHR() :
+    pNext(nullptr)
+{}
+
+safe_VkImportMemoryWin32HandleInfoKHR::safe_VkImportMemoryWin32HandleInfoKHR(const safe_VkImportMemoryWin32HandleInfoKHR& src)
+{
+    sType = src.sType;
+    handleType = src.handleType;
+    handle = src.handle;
+    name = src.name;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkImportMemoryWin32HandleInfoKHR& safe_VkImportMemoryWin32HandleInfoKHR::operator=(const safe_VkImportMemoryWin32HandleInfoKHR& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    handleType = src.handleType;
+    handle = src.handle;
+    name = src.name;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkImportMemoryWin32HandleInfoKHR::~safe_VkImportMemoryWin32HandleInfoKHR()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkImportMemoryWin32HandleInfoKHR::initialize(const VkImportMemoryWin32HandleInfoKHR* in_struct)
+{
+    sType = in_struct->sType;
+    handleType = in_struct->handleType;
+    handle = in_struct->handle;
+    name = in_struct->name;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkImportMemoryWin32HandleInfoKHR::initialize(const safe_VkImportMemoryWin32HandleInfoKHR* src)
+{
+    sType = src->sType;
+    handleType = src->handleType;
+    handle = src->handle;
+    name = src->name;
+    pNext = SafePnextCopy(src->pNext);
+}
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+
+safe_VkExportMemoryWin32HandleInfoKHR::safe_VkExportMemoryWin32HandleInfoKHR(const VkExportMemoryWin32HandleInfoKHR* in_struct) :
+    sType(in_struct->sType),
+    pAttributes(nullptr),
+    dwAccess(in_struct->dwAccess),
+    name(in_struct->name)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (in_struct->pAttributes) {
+        pAttributes = new SECURITY_ATTRIBUTES(*in_struct->pAttributes);
+    }
+}
+
+safe_VkExportMemoryWin32HandleInfoKHR::safe_VkExportMemoryWin32HandleInfoKHR() :
+    pNext(nullptr),
+    pAttributes(nullptr)
+{}
+
+safe_VkExportMemoryWin32HandleInfoKHR::safe_VkExportMemoryWin32HandleInfoKHR(const safe_VkExportMemoryWin32HandleInfoKHR& src)
+{
+    sType = src.sType;
+    pAttributes = nullptr;
+    dwAccess = src.dwAccess;
+    name = src.name;
+    pNext = SafePnextCopy(src.pNext);
+    if (src.pAttributes) {
+        pAttributes = new SECURITY_ATTRIBUTES(*src.pAttributes);
+    }
+}
+
+safe_VkExportMemoryWin32HandleInfoKHR& safe_VkExportMemoryWin32HandleInfoKHR::operator=(const safe_VkExportMemoryWin32HandleInfoKHR& src)
+{
+    if (&src == this) return *this;
+
+    if (pAttributes)
+        delete pAttributes;
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    pAttributes = nullptr;
+    dwAccess = src.dwAccess;
+    name = src.name;
+    pNext = SafePnextCopy(src.pNext);
+    if (src.pAttributes) {
+        pAttributes = new SECURITY_ATTRIBUTES(*src.pAttributes);
+    }
+
+    return *this;
+}
+
+safe_VkExportMemoryWin32HandleInfoKHR::~safe_VkExportMemoryWin32HandleInfoKHR()
+{
+    if (pAttributes)
+        delete pAttributes;
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkExportMemoryWin32HandleInfoKHR::initialize(const VkExportMemoryWin32HandleInfoKHR* in_struct)
+{
+    sType = in_struct->sType;
+    pAttributes = nullptr;
+    dwAccess = in_struct->dwAccess;
+    name = in_struct->name;
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (in_struct->pAttributes) {
+        pAttributes = new SECURITY_ATTRIBUTES(*in_struct->pAttributes);
+    }
+}
+
+void safe_VkExportMemoryWin32HandleInfoKHR::initialize(const safe_VkExportMemoryWin32HandleInfoKHR* src)
+{
+    sType = src->sType;
+    pAttributes = nullptr;
+    dwAccess = src->dwAccess;
+    name = src->name;
+    pNext = SafePnextCopy(src->pNext);
+    if (src->pAttributes) {
+        pAttributes = new SECURITY_ATTRIBUTES(*src->pAttributes);
+    }
+}
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+
+safe_VkMemoryWin32HandlePropertiesKHR::safe_VkMemoryWin32HandlePropertiesKHR(const VkMemoryWin32HandlePropertiesKHR* in_struct) :
+    sType(in_struct->sType),
+    memoryTypeBits(in_struct->memoryTypeBits)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkMemoryWin32HandlePropertiesKHR::safe_VkMemoryWin32HandlePropertiesKHR() :
+    pNext(nullptr)
+{}
+
+safe_VkMemoryWin32HandlePropertiesKHR::safe_VkMemoryWin32HandlePropertiesKHR(const safe_VkMemoryWin32HandlePropertiesKHR& src)
+{
+    sType = src.sType;
+    memoryTypeBits = src.memoryTypeBits;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkMemoryWin32HandlePropertiesKHR& safe_VkMemoryWin32HandlePropertiesKHR::operator=(const safe_VkMemoryWin32HandlePropertiesKHR& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    memoryTypeBits = src.memoryTypeBits;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkMemoryWin32HandlePropertiesKHR::~safe_VkMemoryWin32HandlePropertiesKHR()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkMemoryWin32HandlePropertiesKHR::initialize(const VkMemoryWin32HandlePropertiesKHR* in_struct)
+{
+    sType = in_struct->sType;
+    memoryTypeBits = in_struct->memoryTypeBits;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkMemoryWin32HandlePropertiesKHR::initialize(const safe_VkMemoryWin32HandlePropertiesKHR* src)
+{
+    sType = src->sType;
+    memoryTypeBits = src->memoryTypeBits;
+    pNext = SafePnextCopy(src->pNext);
+}
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+
+safe_VkMemoryGetWin32HandleInfoKHR::safe_VkMemoryGetWin32HandleInfoKHR(const VkMemoryGetWin32HandleInfoKHR* in_struct) :
+    sType(in_struct->sType),
+    memory(in_struct->memory),
+    handleType(in_struct->handleType)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkMemoryGetWin32HandleInfoKHR::safe_VkMemoryGetWin32HandleInfoKHR() :
+    pNext(nullptr)
+{}
+
+safe_VkMemoryGetWin32HandleInfoKHR::safe_VkMemoryGetWin32HandleInfoKHR(const safe_VkMemoryGetWin32HandleInfoKHR& src)
+{
+    sType = src.sType;
+    memory = src.memory;
+    handleType = src.handleType;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkMemoryGetWin32HandleInfoKHR& safe_VkMemoryGetWin32HandleInfoKHR::operator=(const safe_VkMemoryGetWin32HandleInfoKHR& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    memory = src.memory;
+    handleType = src.handleType;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkMemoryGetWin32HandleInfoKHR::~safe_VkMemoryGetWin32HandleInfoKHR()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkMemoryGetWin32HandleInfoKHR::initialize(const VkMemoryGetWin32HandleInfoKHR* in_struct)
+{
+    sType = in_struct->sType;
+    memory = in_struct->memory;
+    handleType = in_struct->handleType;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkMemoryGetWin32HandleInfoKHR::initialize(const safe_VkMemoryGetWin32HandleInfoKHR* src)
+{
+    sType = src->sType;
+    memory = src->memory;
+    handleType = src->handleType;
+    pNext = SafePnextCopy(src->pNext);
+}
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+
+safe_VkImportMemoryFdInfoKHR::safe_VkImportMemoryFdInfoKHR(const VkImportMemoryFdInfoKHR* in_struct) :
+    sType(in_struct->sType),
+    handleType(in_struct->handleType),
+    fd(in_struct->fd)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkImportMemoryFdInfoKHR::safe_VkImportMemoryFdInfoKHR() :
+    pNext(nullptr)
+{}
+
+safe_VkImportMemoryFdInfoKHR::safe_VkImportMemoryFdInfoKHR(const safe_VkImportMemoryFdInfoKHR& src)
+{
+    sType = src.sType;
+    handleType = src.handleType;
+    fd = src.fd;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkImportMemoryFdInfoKHR& safe_VkImportMemoryFdInfoKHR::operator=(const safe_VkImportMemoryFdInfoKHR& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    handleType = src.handleType;
+    fd = src.fd;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkImportMemoryFdInfoKHR::~safe_VkImportMemoryFdInfoKHR()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkImportMemoryFdInfoKHR::initialize(const VkImportMemoryFdInfoKHR* in_struct)
+{
+    sType = in_struct->sType;
+    handleType = in_struct->handleType;
+    fd = in_struct->fd;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkImportMemoryFdInfoKHR::initialize(const safe_VkImportMemoryFdInfoKHR* src)
+{
+    sType = src->sType;
+    handleType = src->handleType;
+    fd = src->fd;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkMemoryFdPropertiesKHR::safe_VkMemoryFdPropertiesKHR(const VkMemoryFdPropertiesKHR* in_struct) :
+    sType(in_struct->sType),
+    memoryTypeBits(in_struct->memoryTypeBits)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkMemoryFdPropertiesKHR::safe_VkMemoryFdPropertiesKHR() :
+    pNext(nullptr)
+{}
+
+safe_VkMemoryFdPropertiesKHR::safe_VkMemoryFdPropertiesKHR(const safe_VkMemoryFdPropertiesKHR& src)
+{
+    sType = src.sType;
+    memoryTypeBits = src.memoryTypeBits;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkMemoryFdPropertiesKHR& safe_VkMemoryFdPropertiesKHR::operator=(const safe_VkMemoryFdPropertiesKHR& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    memoryTypeBits = src.memoryTypeBits;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkMemoryFdPropertiesKHR::~safe_VkMemoryFdPropertiesKHR()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkMemoryFdPropertiesKHR::initialize(const VkMemoryFdPropertiesKHR* in_struct)
+{
+    sType = in_struct->sType;
+    memoryTypeBits = in_struct->memoryTypeBits;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkMemoryFdPropertiesKHR::initialize(const safe_VkMemoryFdPropertiesKHR* src)
+{
+    sType = src->sType;
+    memoryTypeBits = src->memoryTypeBits;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkMemoryGetFdInfoKHR::safe_VkMemoryGetFdInfoKHR(const VkMemoryGetFdInfoKHR* in_struct) :
+    sType(in_struct->sType),
+    memory(in_struct->memory),
+    handleType(in_struct->handleType)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkMemoryGetFdInfoKHR::safe_VkMemoryGetFdInfoKHR() :
+    pNext(nullptr)
+{}
+
+safe_VkMemoryGetFdInfoKHR::safe_VkMemoryGetFdInfoKHR(const safe_VkMemoryGetFdInfoKHR& src)
+{
+    sType = src.sType;
+    memory = src.memory;
+    handleType = src.handleType;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkMemoryGetFdInfoKHR& safe_VkMemoryGetFdInfoKHR::operator=(const safe_VkMemoryGetFdInfoKHR& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    memory = src.memory;
+    handleType = src.handleType;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkMemoryGetFdInfoKHR::~safe_VkMemoryGetFdInfoKHR()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkMemoryGetFdInfoKHR::initialize(const VkMemoryGetFdInfoKHR* in_struct)
+{
+    sType = in_struct->sType;
+    memory = in_struct->memory;
+    handleType = in_struct->handleType;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkMemoryGetFdInfoKHR::initialize(const safe_VkMemoryGetFdInfoKHR* src)
+{
+    sType = src->sType;
+    memory = src->memory;
+    handleType = src->handleType;
+    pNext = SafePnextCopy(src->pNext);
+}
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+
+safe_VkWin32KeyedMutexAcquireReleaseInfoKHR::safe_VkWin32KeyedMutexAcquireReleaseInfoKHR(const VkWin32KeyedMutexAcquireReleaseInfoKHR* in_struct) :
+    sType(in_struct->sType),
+    acquireCount(in_struct->acquireCount),
+    pAcquireSyncs(nullptr),
+    pAcquireKeys(nullptr),
+    pAcquireTimeouts(nullptr),
+    releaseCount(in_struct->releaseCount),
+    pReleaseSyncs(nullptr),
+    pReleaseKeys(nullptr)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (acquireCount && in_struct->pAcquireSyncs) {
+        pAcquireSyncs = new VkDeviceMemory[acquireCount];
+        for (uint32_t i = 0; i < acquireCount; ++i) {
+            pAcquireSyncs[i] = in_struct->pAcquireSyncs[i];
+        }
+    }
+    if (in_struct->pAcquireKeys) {
+        pAcquireKeys = new uint64_t[in_struct->acquireCount];
+        memcpy ((void *)pAcquireKeys, (void *)in_struct->pAcquireKeys, sizeof(uint64_t)*in_struct->acquireCount);
+    }
+    if (in_struct->pAcquireTimeouts) {
+        pAcquireTimeouts = new uint32_t[in_struct->acquireCount];
+        memcpy ((void *)pAcquireTimeouts, (void *)in_struct->pAcquireTimeouts, sizeof(uint32_t)*in_struct->acquireCount);
+    }
+    if (releaseCount && in_struct->pReleaseSyncs) {
+        pReleaseSyncs = new VkDeviceMemory[releaseCount];
+        for (uint32_t i = 0; i < releaseCount; ++i) {
+            pReleaseSyncs[i] = in_struct->pReleaseSyncs[i];
+        }
+    }
+    if (in_struct->pReleaseKeys) {
+        pReleaseKeys = new uint64_t[in_struct->releaseCount];
+        memcpy ((void *)pReleaseKeys, (void *)in_struct->pReleaseKeys, sizeof(uint64_t)*in_struct->releaseCount);
+    }
+}
+
+safe_VkWin32KeyedMutexAcquireReleaseInfoKHR::safe_VkWin32KeyedMutexAcquireReleaseInfoKHR() :
+    pNext(nullptr),
+    pAcquireSyncs(nullptr),
+    pAcquireKeys(nullptr),
+    pAcquireTimeouts(nullptr),
+    pReleaseSyncs(nullptr),
+    pReleaseKeys(nullptr)
+{}
+
+safe_VkWin32KeyedMutexAcquireReleaseInfoKHR::safe_VkWin32KeyedMutexAcquireReleaseInfoKHR(const safe_VkWin32KeyedMutexAcquireReleaseInfoKHR& src)
+{
+    sType = src.sType;
+    acquireCount = src.acquireCount;
+    pAcquireSyncs = nullptr;
+    pAcquireKeys = nullptr;
+    pAcquireTimeouts = nullptr;
+    releaseCount = src.releaseCount;
+    pReleaseSyncs = nullptr;
+    pReleaseKeys = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (acquireCount && src.pAcquireSyncs) {
+        pAcquireSyncs = new VkDeviceMemory[acquireCount];
+        for (uint32_t i = 0; i < acquireCount; ++i) {
+            pAcquireSyncs[i] = src.pAcquireSyncs[i];
+        }
+    }
+    if (src.pAcquireKeys) {
+        pAcquireKeys = new uint64_t[src.acquireCount];
+        memcpy ((void *)pAcquireKeys, (void *)src.pAcquireKeys, sizeof(uint64_t)*src.acquireCount);
+    }
+    if (src.pAcquireTimeouts) {
+        pAcquireTimeouts = new uint32_t[src.acquireCount];
+        memcpy ((void *)pAcquireTimeouts, (void *)src.pAcquireTimeouts, sizeof(uint32_t)*src.acquireCount);
+    }
+    if (releaseCount && src.pReleaseSyncs) {
+        pReleaseSyncs = new VkDeviceMemory[releaseCount];
+        for (uint32_t i = 0; i < releaseCount; ++i) {
+            pReleaseSyncs[i] = src.pReleaseSyncs[i];
+        }
+    }
+    if (src.pReleaseKeys) {
+        pReleaseKeys = new uint64_t[src.releaseCount];
+        memcpy ((void *)pReleaseKeys, (void *)src.pReleaseKeys, sizeof(uint64_t)*src.releaseCount);
+    }
+}
+
+safe_VkWin32KeyedMutexAcquireReleaseInfoKHR& safe_VkWin32KeyedMutexAcquireReleaseInfoKHR::operator=(const safe_VkWin32KeyedMutexAcquireReleaseInfoKHR& src)
+{
+    if (&src == this) return *this;
+
+    if (pAcquireSyncs)
+        delete[] pAcquireSyncs;
+    if (pAcquireKeys)
+        delete[] pAcquireKeys;
+    if (pAcquireTimeouts)
+        delete[] pAcquireTimeouts;
+    if (pReleaseSyncs)
+        delete[] pReleaseSyncs;
+    if (pReleaseKeys)
+        delete[] pReleaseKeys;
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    acquireCount = src.acquireCount;
+    pAcquireSyncs = nullptr;
+    pAcquireKeys = nullptr;
+    pAcquireTimeouts = nullptr;
+    releaseCount = src.releaseCount;
+    pReleaseSyncs = nullptr;
+    pReleaseKeys = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (acquireCount && src.pAcquireSyncs) {
+        pAcquireSyncs = new VkDeviceMemory[acquireCount];
+        for (uint32_t i = 0; i < acquireCount; ++i) {
+            pAcquireSyncs[i] = src.pAcquireSyncs[i];
+        }
+    }
+    if (src.pAcquireKeys) {
+        pAcquireKeys = new uint64_t[src.acquireCount];
+        memcpy ((void *)pAcquireKeys, (void *)src.pAcquireKeys, sizeof(uint64_t)*src.acquireCount);
+    }
+    if (src.pAcquireTimeouts) {
+        pAcquireTimeouts = new uint32_t[src.acquireCount];
+        memcpy ((void *)pAcquireTimeouts, (void *)src.pAcquireTimeouts, sizeof(uint32_t)*src.acquireCount);
+    }
+    if (releaseCount && src.pReleaseSyncs) {
+        pReleaseSyncs = new VkDeviceMemory[releaseCount];
+        for (uint32_t i = 0; i < releaseCount; ++i) {
+            pReleaseSyncs[i] = src.pReleaseSyncs[i];
+        }
+    }
+    if (src.pReleaseKeys) {
+        pReleaseKeys = new uint64_t[src.releaseCount];
+        memcpy ((void *)pReleaseKeys, (void *)src.pReleaseKeys, sizeof(uint64_t)*src.releaseCount);
+    }
+
+    return *this;
+}
+
+safe_VkWin32KeyedMutexAcquireReleaseInfoKHR::~safe_VkWin32KeyedMutexAcquireReleaseInfoKHR()
+{
+    if (pAcquireSyncs)
+        delete[] pAcquireSyncs;
+    if (pAcquireKeys)
+        delete[] pAcquireKeys;
+    if (pAcquireTimeouts)
+        delete[] pAcquireTimeouts;
+    if (pReleaseSyncs)
+        delete[] pReleaseSyncs;
+    if (pReleaseKeys)
+        delete[] pReleaseKeys;
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkWin32KeyedMutexAcquireReleaseInfoKHR::initialize(const VkWin32KeyedMutexAcquireReleaseInfoKHR* in_struct)
+{
+    sType = in_struct->sType;
+    acquireCount = in_struct->acquireCount;
+    pAcquireSyncs = nullptr;
+    pAcquireKeys = nullptr;
+    pAcquireTimeouts = nullptr;
+    releaseCount = in_struct->releaseCount;
+    pReleaseSyncs = nullptr;
+    pReleaseKeys = nullptr;
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (acquireCount && in_struct->pAcquireSyncs) {
+        pAcquireSyncs = new VkDeviceMemory[acquireCount];
+        for (uint32_t i = 0; i < acquireCount; ++i) {
+            pAcquireSyncs[i] = in_struct->pAcquireSyncs[i];
+        }
+    }
+    if (in_struct->pAcquireKeys) {
+        pAcquireKeys = new uint64_t[in_struct->acquireCount];
+        memcpy ((void *)pAcquireKeys, (void *)in_struct->pAcquireKeys, sizeof(uint64_t)*in_struct->acquireCount);
+    }
+    if (in_struct->pAcquireTimeouts) {
+        pAcquireTimeouts = new uint32_t[in_struct->acquireCount];
+        memcpy ((void *)pAcquireTimeouts, (void *)in_struct->pAcquireTimeouts, sizeof(uint32_t)*in_struct->acquireCount);
+    }
+    if (releaseCount && in_struct->pReleaseSyncs) {
+        pReleaseSyncs = new VkDeviceMemory[releaseCount];
+        for (uint32_t i = 0; i < releaseCount; ++i) {
+            pReleaseSyncs[i] = in_struct->pReleaseSyncs[i];
+        }
+    }
+    if (in_struct->pReleaseKeys) {
+        pReleaseKeys = new uint64_t[in_struct->releaseCount];
+        memcpy ((void *)pReleaseKeys, (void *)in_struct->pReleaseKeys, sizeof(uint64_t)*in_struct->releaseCount);
+    }
+}
+
+void safe_VkWin32KeyedMutexAcquireReleaseInfoKHR::initialize(const safe_VkWin32KeyedMutexAcquireReleaseInfoKHR* src)
+{
+    sType = src->sType;
+    acquireCount = src->acquireCount;
+    pAcquireSyncs = nullptr;
+    pAcquireKeys = nullptr;
+    pAcquireTimeouts = nullptr;
+    releaseCount = src->releaseCount;
+    pReleaseSyncs = nullptr;
+    pReleaseKeys = nullptr;
+    pNext = SafePnextCopy(src->pNext);
+    if (acquireCount && src->pAcquireSyncs) {
+        pAcquireSyncs = new VkDeviceMemory[acquireCount];
+        for (uint32_t i = 0; i < acquireCount; ++i) {
+            pAcquireSyncs[i] = src->pAcquireSyncs[i];
+        }
+    }
+    if (src->pAcquireKeys) {
+        pAcquireKeys = new uint64_t[src->acquireCount];
+        memcpy ((void *)pAcquireKeys, (void *)src->pAcquireKeys, sizeof(uint64_t)*src->acquireCount);
+    }
+    if (src->pAcquireTimeouts) {
+        pAcquireTimeouts = new uint32_t[src->acquireCount];
+        memcpy ((void *)pAcquireTimeouts, (void *)src->pAcquireTimeouts, sizeof(uint32_t)*src->acquireCount);
+    }
+    if (releaseCount && src->pReleaseSyncs) {
+        pReleaseSyncs = new VkDeviceMemory[releaseCount];
+        for (uint32_t i = 0; i < releaseCount; ++i) {
+            pReleaseSyncs[i] = src->pReleaseSyncs[i];
+        }
+    }
+    if (src->pReleaseKeys) {
+        pReleaseKeys = new uint64_t[src->releaseCount];
+        memcpy ((void *)pReleaseKeys, (void *)src->pReleaseKeys, sizeof(uint64_t)*src->releaseCount);
+    }
+}
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+
+safe_VkImportSemaphoreWin32HandleInfoKHR::safe_VkImportSemaphoreWin32HandleInfoKHR(const VkImportSemaphoreWin32HandleInfoKHR* in_struct) :
+    sType(in_struct->sType),
+    semaphore(in_struct->semaphore),
+    flags(in_struct->flags),
+    handleType(in_struct->handleType),
+    handle(in_struct->handle),
+    name(in_struct->name)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkImportSemaphoreWin32HandleInfoKHR::safe_VkImportSemaphoreWin32HandleInfoKHR() :
+    pNext(nullptr)
+{}
+
+safe_VkImportSemaphoreWin32HandleInfoKHR::safe_VkImportSemaphoreWin32HandleInfoKHR(const safe_VkImportSemaphoreWin32HandleInfoKHR& src)
+{
+    sType = src.sType;
+    semaphore = src.semaphore;
+    flags = src.flags;
+    handleType = src.handleType;
+    handle = src.handle;
+    name = src.name;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkImportSemaphoreWin32HandleInfoKHR& safe_VkImportSemaphoreWin32HandleInfoKHR::operator=(const safe_VkImportSemaphoreWin32HandleInfoKHR& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    semaphore = src.semaphore;
+    flags = src.flags;
+    handleType = src.handleType;
+    handle = src.handle;
+    name = src.name;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkImportSemaphoreWin32HandleInfoKHR::~safe_VkImportSemaphoreWin32HandleInfoKHR()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkImportSemaphoreWin32HandleInfoKHR::initialize(const VkImportSemaphoreWin32HandleInfoKHR* in_struct)
+{
+    sType = in_struct->sType;
+    semaphore = in_struct->semaphore;
+    flags = in_struct->flags;
+    handleType = in_struct->handleType;
+    handle = in_struct->handle;
+    name = in_struct->name;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkImportSemaphoreWin32HandleInfoKHR::initialize(const safe_VkImportSemaphoreWin32HandleInfoKHR* src)
+{
+    sType = src->sType;
+    semaphore = src->semaphore;
+    flags = src->flags;
+    handleType = src->handleType;
+    handle = src->handle;
+    name = src->name;
+    pNext = SafePnextCopy(src->pNext);
+}
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+
+safe_VkExportSemaphoreWin32HandleInfoKHR::safe_VkExportSemaphoreWin32HandleInfoKHR(const VkExportSemaphoreWin32HandleInfoKHR* in_struct) :
+    sType(in_struct->sType),
+    pAttributes(nullptr),
+    dwAccess(in_struct->dwAccess),
+    name(in_struct->name)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (in_struct->pAttributes) {
+        pAttributes = new SECURITY_ATTRIBUTES(*in_struct->pAttributes);
+    }
+}
+
+safe_VkExportSemaphoreWin32HandleInfoKHR::safe_VkExportSemaphoreWin32HandleInfoKHR() :
+    pNext(nullptr),
+    pAttributes(nullptr)
+{}
+
+safe_VkExportSemaphoreWin32HandleInfoKHR::safe_VkExportSemaphoreWin32HandleInfoKHR(const safe_VkExportSemaphoreWin32HandleInfoKHR& src)
+{
+    sType = src.sType;
+    pAttributes = nullptr;
+    dwAccess = src.dwAccess;
+    name = src.name;
+    pNext = SafePnextCopy(src.pNext);
+    if (src.pAttributes) {
+        pAttributes = new SECURITY_ATTRIBUTES(*src.pAttributes);
+    }
+}
+
+safe_VkExportSemaphoreWin32HandleInfoKHR& safe_VkExportSemaphoreWin32HandleInfoKHR::operator=(const safe_VkExportSemaphoreWin32HandleInfoKHR& src)
+{
+    if (&src == this) return *this;
+
+    if (pAttributes)
+        delete pAttributes;
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    pAttributes = nullptr;
+    dwAccess = src.dwAccess;
+    name = src.name;
+    pNext = SafePnextCopy(src.pNext);
+    if (src.pAttributes) {
+        pAttributes = new SECURITY_ATTRIBUTES(*src.pAttributes);
+    }
+
+    return *this;
+}
+
+safe_VkExportSemaphoreWin32HandleInfoKHR::~safe_VkExportSemaphoreWin32HandleInfoKHR()
+{
+    if (pAttributes)
+        delete pAttributes;
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkExportSemaphoreWin32HandleInfoKHR::initialize(const VkExportSemaphoreWin32HandleInfoKHR* in_struct)
+{
+    sType = in_struct->sType;
+    pAttributes = nullptr;
+    dwAccess = in_struct->dwAccess;
+    name = in_struct->name;
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (in_struct->pAttributes) {
+        pAttributes = new SECURITY_ATTRIBUTES(*in_struct->pAttributes);
+    }
+}
+
+void safe_VkExportSemaphoreWin32HandleInfoKHR::initialize(const safe_VkExportSemaphoreWin32HandleInfoKHR* src)
+{
+    sType = src->sType;
+    pAttributes = nullptr;
+    dwAccess = src->dwAccess;
+    name = src->name;
+    pNext = SafePnextCopy(src->pNext);
+    if (src->pAttributes) {
+        pAttributes = new SECURITY_ATTRIBUTES(*src->pAttributes);
+    }
+}
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+
+safe_VkD3D12FenceSubmitInfoKHR::safe_VkD3D12FenceSubmitInfoKHR(const VkD3D12FenceSubmitInfoKHR* in_struct) :
+    sType(in_struct->sType),
+    waitSemaphoreValuesCount(in_struct->waitSemaphoreValuesCount),
+    pWaitSemaphoreValues(nullptr),
+    signalSemaphoreValuesCount(in_struct->signalSemaphoreValuesCount),
+    pSignalSemaphoreValues(nullptr)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (in_struct->pWaitSemaphoreValues) {
+        pWaitSemaphoreValues = new uint64_t[in_struct->waitSemaphoreValuesCount];
+        memcpy ((void *)pWaitSemaphoreValues, (void *)in_struct->pWaitSemaphoreValues, sizeof(uint64_t)*in_struct->waitSemaphoreValuesCount);
+    }
+    if (in_struct->pSignalSemaphoreValues) {
+        pSignalSemaphoreValues = new uint64_t[in_struct->signalSemaphoreValuesCount];
+        memcpy ((void *)pSignalSemaphoreValues, (void *)in_struct->pSignalSemaphoreValues, sizeof(uint64_t)*in_struct->signalSemaphoreValuesCount);
+    }
+}
+
+safe_VkD3D12FenceSubmitInfoKHR::safe_VkD3D12FenceSubmitInfoKHR() :
+    pNext(nullptr),
+    pWaitSemaphoreValues(nullptr),
+    pSignalSemaphoreValues(nullptr)
+{}
+
+safe_VkD3D12FenceSubmitInfoKHR::safe_VkD3D12FenceSubmitInfoKHR(const safe_VkD3D12FenceSubmitInfoKHR& src)
+{
+    sType = src.sType;
+    waitSemaphoreValuesCount = src.waitSemaphoreValuesCount;
+    pWaitSemaphoreValues = nullptr;
+    signalSemaphoreValuesCount = src.signalSemaphoreValuesCount;
+    pSignalSemaphoreValues = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (src.pWaitSemaphoreValues) {
+        pWaitSemaphoreValues = new uint64_t[src.waitSemaphoreValuesCount];
+        memcpy ((void *)pWaitSemaphoreValues, (void *)src.pWaitSemaphoreValues, sizeof(uint64_t)*src.waitSemaphoreValuesCount);
+    }
+    if (src.pSignalSemaphoreValues) {
+        pSignalSemaphoreValues = new uint64_t[src.signalSemaphoreValuesCount];
+        memcpy ((void *)pSignalSemaphoreValues, (void *)src.pSignalSemaphoreValues, sizeof(uint64_t)*src.signalSemaphoreValuesCount);
+    }
+}
+
+safe_VkD3D12FenceSubmitInfoKHR& safe_VkD3D12FenceSubmitInfoKHR::operator=(const safe_VkD3D12FenceSubmitInfoKHR& src)
+{
+    if (&src == this) return *this;
+
+    if (pWaitSemaphoreValues)
+        delete[] pWaitSemaphoreValues;
+    if (pSignalSemaphoreValues)
+        delete[] pSignalSemaphoreValues;
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    waitSemaphoreValuesCount = src.waitSemaphoreValuesCount;
+    pWaitSemaphoreValues = nullptr;
+    signalSemaphoreValuesCount = src.signalSemaphoreValuesCount;
+    pSignalSemaphoreValues = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (src.pWaitSemaphoreValues) {
+        pWaitSemaphoreValues = new uint64_t[src.waitSemaphoreValuesCount];
+        memcpy ((void *)pWaitSemaphoreValues, (void *)src.pWaitSemaphoreValues, sizeof(uint64_t)*src.waitSemaphoreValuesCount);
+    }
+    if (src.pSignalSemaphoreValues) {
+        pSignalSemaphoreValues = new uint64_t[src.signalSemaphoreValuesCount];
+        memcpy ((void *)pSignalSemaphoreValues, (void *)src.pSignalSemaphoreValues, sizeof(uint64_t)*src.signalSemaphoreValuesCount);
+    }
+
+    return *this;
+}
+
+safe_VkD3D12FenceSubmitInfoKHR::~safe_VkD3D12FenceSubmitInfoKHR()
+{
+    if (pWaitSemaphoreValues)
+        delete[] pWaitSemaphoreValues;
+    if (pSignalSemaphoreValues)
+        delete[] pSignalSemaphoreValues;
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkD3D12FenceSubmitInfoKHR::initialize(const VkD3D12FenceSubmitInfoKHR* in_struct)
+{
+    sType = in_struct->sType;
+    waitSemaphoreValuesCount = in_struct->waitSemaphoreValuesCount;
+    pWaitSemaphoreValues = nullptr;
+    signalSemaphoreValuesCount = in_struct->signalSemaphoreValuesCount;
+    pSignalSemaphoreValues = nullptr;
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (in_struct->pWaitSemaphoreValues) {
+        pWaitSemaphoreValues = new uint64_t[in_struct->waitSemaphoreValuesCount];
+        memcpy ((void *)pWaitSemaphoreValues, (void *)in_struct->pWaitSemaphoreValues, sizeof(uint64_t)*in_struct->waitSemaphoreValuesCount);
+    }
+    if (in_struct->pSignalSemaphoreValues) {
+        pSignalSemaphoreValues = new uint64_t[in_struct->signalSemaphoreValuesCount];
+        memcpy ((void *)pSignalSemaphoreValues, (void *)in_struct->pSignalSemaphoreValues, sizeof(uint64_t)*in_struct->signalSemaphoreValuesCount);
+    }
+}
+
+void safe_VkD3D12FenceSubmitInfoKHR::initialize(const safe_VkD3D12FenceSubmitInfoKHR* src)
+{
+    sType = src->sType;
+    waitSemaphoreValuesCount = src->waitSemaphoreValuesCount;
+    pWaitSemaphoreValues = nullptr;
+    signalSemaphoreValuesCount = src->signalSemaphoreValuesCount;
+    pSignalSemaphoreValues = nullptr;
+    pNext = SafePnextCopy(src->pNext);
+    if (src->pWaitSemaphoreValues) {
+        pWaitSemaphoreValues = new uint64_t[src->waitSemaphoreValuesCount];
+        memcpy ((void *)pWaitSemaphoreValues, (void *)src->pWaitSemaphoreValues, sizeof(uint64_t)*src->waitSemaphoreValuesCount);
+    }
+    if (src->pSignalSemaphoreValues) {
+        pSignalSemaphoreValues = new uint64_t[src->signalSemaphoreValuesCount];
+        memcpy ((void *)pSignalSemaphoreValues, (void *)src->pSignalSemaphoreValues, sizeof(uint64_t)*src->signalSemaphoreValuesCount);
+    }
+}
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+
+safe_VkSemaphoreGetWin32HandleInfoKHR::safe_VkSemaphoreGetWin32HandleInfoKHR(const VkSemaphoreGetWin32HandleInfoKHR* in_struct) :
+    sType(in_struct->sType),
+    semaphore(in_struct->semaphore),
+    handleType(in_struct->handleType)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkSemaphoreGetWin32HandleInfoKHR::safe_VkSemaphoreGetWin32HandleInfoKHR() :
+    pNext(nullptr)
+{}
+
+safe_VkSemaphoreGetWin32HandleInfoKHR::safe_VkSemaphoreGetWin32HandleInfoKHR(const safe_VkSemaphoreGetWin32HandleInfoKHR& src)
+{
+    sType = src.sType;
+    semaphore = src.semaphore;
+    handleType = src.handleType;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkSemaphoreGetWin32HandleInfoKHR& safe_VkSemaphoreGetWin32HandleInfoKHR::operator=(const safe_VkSemaphoreGetWin32HandleInfoKHR& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    semaphore = src.semaphore;
+    handleType = src.handleType;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkSemaphoreGetWin32HandleInfoKHR::~safe_VkSemaphoreGetWin32HandleInfoKHR()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkSemaphoreGetWin32HandleInfoKHR::initialize(const VkSemaphoreGetWin32HandleInfoKHR* in_struct)
+{
+    sType = in_struct->sType;
+    semaphore = in_struct->semaphore;
+    handleType = in_struct->handleType;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkSemaphoreGetWin32HandleInfoKHR::initialize(const safe_VkSemaphoreGetWin32HandleInfoKHR* src)
+{
+    sType = src->sType;
+    semaphore = src->semaphore;
+    handleType = src->handleType;
+    pNext = SafePnextCopy(src->pNext);
+}
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+
+safe_VkImportSemaphoreFdInfoKHR::safe_VkImportSemaphoreFdInfoKHR(const VkImportSemaphoreFdInfoKHR* in_struct) :
+    sType(in_struct->sType),
+    semaphore(in_struct->semaphore),
+    flags(in_struct->flags),
+    handleType(in_struct->handleType),
+    fd(in_struct->fd)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkImportSemaphoreFdInfoKHR::safe_VkImportSemaphoreFdInfoKHR() :
+    pNext(nullptr)
+{}
+
+safe_VkImportSemaphoreFdInfoKHR::safe_VkImportSemaphoreFdInfoKHR(const safe_VkImportSemaphoreFdInfoKHR& src)
+{
+    sType = src.sType;
+    semaphore = src.semaphore;
+    flags = src.flags;
+    handleType = src.handleType;
+    fd = src.fd;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkImportSemaphoreFdInfoKHR& safe_VkImportSemaphoreFdInfoKHR::operator=(const safe_VkImportSemaphoreFdInfoKHR& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    semaphore = src.semaphore;
+    flags = src.flags;
+    handleType = src.handleType;
+    fd = src.fd;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkImportSemaphoreFdInfoKHR::~safe_VkImportSemaphoreFdInfoKHR()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkImportSemaphoreFdInfoKHR::initialize(const VkImportSemaphoreFdInfoKHR* in_struct)
+{
+    sType = in_struct->sType;
+    semaphore = in_struct->semaphore;
+    flags = in_struct->flags;
+    handleType = in_struct->handleType;
+    fd = in_struct->fd;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkImportSemaphoreFdInfoKHR::initialize(const safe_VkImportSemaphoreFdInfoKHR* src)
+{
+    sType = src->sType;
+    semaphore = src->semaphore;
+    flags = src->flags;
+    handleType = src->handleType;
+    fd = src->fd;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkSemaphoreGetFdInfoKHR::safe_VkSemaphoreGetFdInfoKHR(const VkSemaphoreGetFdInfoKHR* in_struct) :
+    sType(in_struct->sType),
+    semaphore(in_struct->semaphore),
+    handleType(in_struct->handleType)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkSemaphoreGetFdInfoKHR::safe_VkSemaphoreGetFdInfoKHR() :
+    pNext(nullptr)
+{}
+
+safe_VkSemaphoreGetFdInfoKHR::safe_VkSemaphoreGetFdInfoKHR(const safe_VkSemaphoreGetFdInfoKHR& src)
+{
+    sType = src.sType;
+    semaphore = src.semaphore;
+    handleType = src.handleType;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkSemaphoreGetFdInfoKHR& safe_VkSemaphoreGetFdInfoKHR::operator=(const safe_VkSemaphoreGetFdInfoKHR& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    semaphore = src.semaphore;
+    handleType = src.handleType;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkSemaphoreGetFdInfoKHR::~safe_VkSemaphoreGetFdInfoKHR()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkSemaphoreGetFdInfoKHR::initialize(const VkSemaphoreGetFdInfoKHR* in_struct)
+{
+    sType = in_struct->sType;
+    semaphore = in_struct->semaphore;
+    handleType = in_struct->handleType;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkSemaphoreGetFdInfoKHR::initialize(const safe_VkSemaphoreGetFdInfoKHR* src)
+{
+    sType = src->sType;
+    semaphore = src->semaphore;
+    handleType = src->handleType;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkPhysicalDevicePushDescriptorPropertiesKHR::safe_VkPhysicalDevicePushDescriptorPropertiesKHR(const VkPhysicalDevicePushDescriptorPropertiesKHR* in_struct) :
+    sType(in_struct->sType),
+    maxPushDescriptors(in_struct->maxPushDescriptors)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPhysicalDevicePushDescriptorPropertiesKHR::safe_VkPhysicalDevicePushDescriptorPropertiesKHR() :
+    pNext(nullptr)
+{}
+
+safe_VkPhysicalDevicePushDescriptorPropertiesKHR::safe_VkPhysicalDevicePushDescriptorPropertiesKHR(const safe_VkPhysicalDevicePushDescriptorPropertiesKHR& src)
+{
+    sType = src.sType;
+    maxPushDescriptors = src.maxPushDescriptors;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPhysicalDevicePushDescriptorPropertiesKHR& safe_VkPhysicalDevicePushDescriptorPropertiesKHR::operator=(const safe_VkPhysicalDevicePushDescriptorPropertiesKHR& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    maxPushDescriptors = src.maxPushDescriptors;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPhysicalDevicePushDescriptorPropertiesKHR::~safe_VkPhysicalDevicePushDescriptorPropertiesKHR()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPhysicalDevicePushDescriptorPropertiesKHR::initialize(const VkPhysicalDevicePushDescriptorPropertiesKHR* in_struct)
+{
+    sType = in_struct->sType;
+    maxPushDescriptors = in_struct->maxPushDescriptors;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPhysicalDevicePushDescriptorPropertiesKHR::initialize(const safe_VkPhysicalDevicePushDescriptorPropertiesKHR* src)
+{
+    sType = src->sType;
+    maxPushDescriptors = src->maxPushDescriptors;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkPhysicalDeviceShaderFloat16Int8FeaturesKHR::safe_VkPhysicalDeviceShaderFloat16Int8FeaturesKHR(const VkPhysicalDeviceShaderFloat16Int8FeaturesKHR* in_struct) :
+    sType(in_struct->sType),
+    shaderFloat16(in_struct->shaderFloat16),
+    shaderInt8(in_struct->shaderInt8)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPhysicalDeviceShaderFloat16Int8FeaturesKHR::safe_VkPhysicalDeviceShaderFloat16Int8FeaturesKHR() :
+    pNext(nullptr)
+{}
+
+safe_VkPhysicalDeviceShaderFloat16Int8FeaturesKHR::safe_VkPhysicalDeviceShaderFloat16Int8FeaturesKHR(const safe_VkPhysicalDeviceShaderFloat16Int8FeaturesKHR& src)
+{
+    sType = src.sType;
+    shaderFloat16 = src.shaderFloat16;
+    shaderInt8 = src.shaderInt8;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPhysicalDeviceShaderFloat16Int8FeaturesKHR& safe_VkPhysicalDeviceShaderFloat16Int8FeaturesKHR::operator=(const safe_VkPhysicalDeviceShaderFloat16Int8FeaturesKHR& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    shaderFloat16 = src.shaderFloat16;
+    shaderInt8 = src.shaderInt8;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPhysicalDeviceShaderFloat16Int8FeaturesKHR::~safe_VkPhysicalDeviceShaderFloat16Int8FeaturesKHR()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPhysicalDeviceShaderFloat16Int8FeaturesKHR::initialize(const VkPhysicalDeviceShaderFloat16Int8FeaturesKHR* in_struct)
+{
+    sType = in_struct->sType;
+    shaderFloat16 = in_struct->shaderFloat16;
+    shaderInt8 = in_struct->shaderInt8;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPhysicalDeviceShaderFloat16Int8FeaturesKHR::initialize(const safe_VkPhysicalDeviceShaderFloat16Int8FeaturesKHR* src)
+{
+    sType = src->sType;
+    shaderFloat16 = src->shaderFloat16;
+    shaderInt8 = src->shaderInt8;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkPresentRegionKHR::safe_VkPresentRegionKHR(const VkPresentRegionKHR* in_struct) :
+    rectangleCount(in_struct->rectangleCount),
+    pRectangles(nullptr)
+{
+    if (in_struct->pRectangles) {
+        pRectangles = new VkRectLayerKHR[in_struct->rectangleCount];
+        memcpy ((void *)pRectangles, (void *)in_struct->pRectangles, sizeof(VkRectLayerKHR)*in_struct->rectangleCount);
+    }
+}
+
+safe_VkPresentRegionKHR::safe_VkPresentRegionKHR() :
+    pRectangles(nullptr)
+{}
+
+safe_VkPresentRegionKHR::safe_VkPresentRegionKHR(const safe_VkPresentRegionKHR& src)
+{
+    rectangleCount = src.rectangleCount;
+    pRectangles = nullptr;
+    if (src.pRectangles) {
+        pRectangles = new VkRectLayerKHR[src.rectangleCount];
+        memcpy ((void *)pRectangles, (void *)src.pRectangles, sizeof(VkRectLayerKHR)*src.rectangleCount);
+    }
+}
+
+safe_VkPresentRegionKHR& safe_VkPresentRegionKHR::operator=(const safe_VkPresentRegionKHR& src)
+{
+    if (&src == this) return *this;
+
+    if (pRectangles)
+        delete[] pRectangles;
+
+    rectangleCount = src.rectangleCount;
+    pRectangles = nullptr;
+    if (src.pRectangles) {
+        pRectangles = new VkRectLayerKHR[src.rectangleCount];
+        memcpy ((void *)pRectangles, (void *)src.pRectangles, sizeof(VkRectLayerKHR)*src.rectangleCount);
+    }
+
+    return *this;
+}
+
+safe_VkPresentRegionKHR::~safe_VkPresentRegionKHR()
+{
+    if (pRectangles)
+        delete[] pRectangles;
+}
+
+void safe_VkPresentRegionKHR::initialize(const VkPresentRegionKHR* in_struct)
+{
+    rectangleCount = in_struct->rectangleCount;
+    pRectangles = nullptr;
+    if (in_struct->pRectangles) {
+        pRectangles = new VkRectLayerKHR[in_struct->rectangleCount];
+        memcpy ((void *)pRectangles, (void *)in_struct->pRectangles, sizeof(VkRectLayerKHR)*in_struct->rectangleCount);
+    }
+}
+
+void safe_VkPresentRegionKHR::initialize(const safe_VkPresentRegionKHR* src)
+{
+    rectangleCount = src->rectangleCount;
+    pRectangles = nullptr;
+    if (src->pRectangles) {
+        pRectangles = new VkRectLayerKHR[src->rectangleCount];
+        memcpy ((void *)pRectangles, (void *)src->pRectangles, sizeof(VkRectLayerKHR)*src->rectangleCount);
+    }
+}
+
+safe_VkPresentRegionsKHR::safe_VkPresentRegionsKHR(const VkPresentRegionsKHR* in_struct) :
+    sType(in_struct->sType),
+    swapchainCount(in_struct->swapchainCount),
+    pRegions(nullptr)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (swapchainCount && in_struct->pRegions) {
+        pRegions = new safe_VkPresentRegionKHR[swapchainCount];
+        for (uint32_t i = 0; i < swapchainCount; ++i) {
+            pRegions[i].initialize(&in_struct->pRegions[i]);
+        }
+    }
+}
+
+safe_VkPresentRegionsKHR::safe_VkPresentRegionsKHR() :
+    pNext(nullptr),
+    pRegions(nullptr)
+{}
+
+safe_VkPresentRegionsKHR::safe_VkPresentRegionsKHR(const safe_VkPresentRegionsKHR& src)
+{
+    sType = src.sType;
+    swapchainCount = src.swapchainCount;
+    pRegions = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (swapchainCount && src.pRegions) {
+        pRegions = new safe_VkPresentRegionKHR[swapchainCount];
+        for (uint32_t i = 0; i < swapchainCount; ++i) {
+            pRegions[i].initialize(&src.pRegions[i]);
+        }
+    }
+}
+
+safe_VkPresentRegionsKHR& safe_VkPresentRegionsKHR::operator=(const safe_VkPresentRegionsKHR& src)
+{
+    if (&src == this) return *this;
+
+    if (pRegions)
+        delete[] pRegions;
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    swapchainCount = src.swapchainCount;
+    pRegions = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (swapchainCount && src.pRegions) {
+        pRegions = new safe_VkPresentRegionKHR[swapchainCount];
+        for (uint32_t i = 0; i < swapchainCount; ++i) {
+            pRegions[i].initialize(&src.pRegions[i]);
+        }
+    }
+
+    return *this;
+}
+
+safe_VkPresentRegionsKHR::~safe_VkPresentRegionsKHR()
+{
+    if (pRegions)
+        delete[] pRegions;
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPresentRegionsKHR::initialize(const VkPresentRegionsKHR* in_struct)
+{
+    sType = in_struct->sType;
+    swapchainCount = in_struct->swapchainCount;
+    pRegions = nullptr;
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (swapchainCount && in_struct->pRegions) {
+        pRegions = new safe_VkPresentRegionKHR[swapchainCount];
+        for (uint32_t i = 0; i < swapchainCount; ++i) {
+            pRegions[i].initialize(&in_struct->pRegions[i]);
+        }
+    }
+}
+
+void safe_VkPresentRegionsKHR::initialize(const safe_VkPresentRegionsKHR* src)
+{
+    sType = src->sType;
+    swapchainCount = src->swapchainCount;
+    pRegions = nullptr;
+    pNext = SafePnextCopy(src->pNext);
+    if (swapchainCount && src->pRegions) {
+        pRegions = new safe_VkPresentRegionKHR[swapchainCount];
+        for (uint32_t i = 0; i < swapchainCount; ++i) {
+            pRegions[i].initialize(&src->pRegions[i]);
+        }
+    }
+}
+
+safe_VkPhysicalDeviceImagelessFramebufferFeaturesKHR::safe_VkPhysicalDeviceImagelessFramebufferFeaturesKHR(const VkPhysicalDeviceImagelessFramebufferFeaturesKHR* in_struct) :
+    sType(in_struct->sType),
+    imagelessFramebuffer(in_struct->imagelessFramebuffer)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPhysicalDeviceImagelessFramebufferFeaturesKHR::safe_VkPhysicalDeviceImagelessFramebufferFeaturesKHR() :
+    pNext(nullptr)
+{}
+
+safe_VkPhysicalDeviceImagelessFramebufferFeaturesKHR::safe_VkPhysicalDeviceImagelessFramebufferFeaturesKHR(const safe_VkPhysicalDeviceImagelessFramebufferFeaturesKHR& src)
+{
+    sType = src.sType;
+    imagelessFramebuffer = src.imagelessFramebuffer;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPhysicalDeviceImagelessFramebufferFeaturesKHR& safe_VkPhysicalDeviceImagelessFramebufferFeaturesKHR::operator=(const safe_VkPhysicalDeviceImagelessFramebufferFeaturesKHR& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    imagelessFramebuffer = src.imagelessFramebuffer;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPhysicalDeviceImagelessFramebufferFeaturesKHR::~safe_VkPhysicalDeviceImagelessFramebufferFeaturesKHR()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPhysicalDeviceImagelessFramebufferFeaturesKHR::initialize(const VkPhysicalDeviceImagelessFramebufferFeaturesKHR* in_struct)
+{
+    sType = in_struct->sType;
+    imagelessFramebuffer = in_struct->imagelessFramebuffer;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPhysicalDeviceImagelessFramebufferFeaturesKHR::initialize(const safe_VkPhysicalDeviceImagelessFramebufferFeaturesKHR* src)
+{
+    sType = src->sType;
+    imagelessFramebuffer = src->imagelessFramebuffer;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkFramebufferAttachmentImageInfoKHR::safe_VkFramebufferAttachmentImageInfoKHR(const VkFramebufferAttachmentImageInfoKHR* in_struct) :
+    sType(in_struct->sType),
+    flags(in_struct->flags),
+    usage(in_struct->usage),
+    width(in_struct->width),
+    height(in_struct->height),
+    layerCount(in_struct->layerCount),
+    viewFormatCount(in_struct->viewFormatCount),
+    pViewFormats(nullptr)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (in_struct->pViewFormats) {
+        pViewFormats = new VkFormat[in_struct->viewFormatCount];
+        memcpy ((void *)pViewFormats, (void *)in_struct->pViewFormats, sizeof(VkFormat)*in_struct->viewFormatCount);
+    }
+}
+
+safe_VkFramebufferAttachmentImageInfoKHR::safe_VkFramebufferAttachmentImageInfoKHR() :
+    pNext(nullptr),
+    pViewFormats(nullptr)
+{}
+
+safe_VkFramebufferAttachmentImageInfoKHR::safe_VkFramebufferAttachmentImageInfoKHR(const safe_VkFramebufferAttachmentImageInfoKHR& src)
+{
+    sType = src.sType;
+    flags = src.flags;
+    usage = src.usage;
+    width = src.width;
+    height = src.height;
+    layerCount = src.layerCount;
+    viewFormatCount = src.viewFormatCount;
+    pViewFormats = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (src.pViewFormats) {
+        pViewFormats = new VkFormat[src.viewFormatCount];
+        memcpy ((void *)pViewFormats, (void *)src.pViewFormats, sizeof(VkFormat)*src.viewFormatCount);
+    }
+}
+
+safe_VkFramebufferAttachmentImageInfoKHR& safe_VkFramebufferAttachmentImageInfoKHR::operator=(const safe_VkFramebufferAttachmentImageInfoKHR& src)
+{
+    if (&src == this) return *this;
+
+    if (pViewFormats)
+        delete[] pViewFormats;
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    flags = src.flags;
+    usage = src.usage;
+    width = src.width;
+    height = src.height;
+    layerCount = src.layerCount;
+    viewFormatCount = src.viewFormatCount;
+    pViewFormats = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (src.pViewFormats) {
+        pViewFormats = new VkFormat[src.viewFormatCount];
+        memcpy ((void *)pViewFormats, (void *)src.pViewFormats, sizeof(VkFormat)*src.viewFormatCount);
+    }
+
+    return *this;
+}
+
+safe_VkFramebufferAttachmentImageInfoKHR::~safe_VkFramebufferAttachmentImageInfoKHR()
+{
+    if (pViewFormats)
+        delete[] pViewFormats;
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkFramebufferAttachmentImageInfoKHR::initialize(const VkFramebufferAttachmentImageInfoKHR* in_struct)
+{
+    sType = in_struct->sType;
+    flags = in_struct->flags;
+    usage = in_struct->usage;
+    width = in_struct->width;
+    height = in_struct->height;
+    layerCount = in_struct->layerCount;
+    viewFormatCount = in_struct->viewFormatCount;
+    pViewFormats = nullptr;
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (in_struct->pViewFormats) {
+        pViewFormats = new VkFormat[in_struct->viewFormatCount];
+        memcpy ((void *)pViewFormats, (void *)in_struct->pViewFormats, sizeof(VkFormat)*in_struct->viewFormatCount);
+    }
+}
+
+void safe_VkFramebufferAttachmentImageInfoKHR::initialize(const safe_VkFramebufferAttachmentImageInfoKHR* src)
+{
+    sType = src->sType;
+    flags = src->flags;
+    usage = src->usage;
+    width = src->width;
+    height = src->height;
+    layerCount = src->layerCount;
+    viewFormatCount = src->viewFormatCount;
+    pViewFormats = nullptr;
+    pNext = SafePnextCopy(src->pNext);
+    if (src->pViewFormats) {
+        pViewFormats = new VkFormat[src->viewFormatCount];
+        memcpy ((void *)pViewFormats, (void *)src->pViewFormats, sizeof(VkFormat)*src->viewFormatCount);
+    }
+}
+
+safe_VkFramebufferAttachmentsCreateInfoKHR::safe_VkFramebufferAttachmentsCreateInfoKHR(const VkFramebufferAttachmentsCreateInfoKHR* in_struct) :
+    sType(in_struct->sType),
+    attachmentImageInfoCount(in_struct->attachmentImageInfoCount),
+    pAttachmentImageInfos(nullptr)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (attachmentImageInfoCount && in_struct->pAttachmentImageInfos) {
+        pAttachmentImageInfos = new safe_VkFramebufferAttachmentImageInfoKHR[attachmentImageInfoCount];
+        for (uint32_t i = 0; i < attachmentImageInfoCount; ++i) {
+            pAttachmentImageInfos[i].initialize(&in_struct->pAttachmentImageInfos[i]);
+        }
+    }
+}
+
+safe_VkFramebufferAttachmentsCreateInfoKHR::safe_VkFramebufferAttachmentsCreateInfoKHR() :
+    pNext(nullptr),
+    pAttachmentImageInfos(nullptr)
+{}
+
+safe_VkFramebufferAttachmentsCreateInfoKHR::safe_VkFramebufferAttachmentsCreateInfoKHR(const safe_VkFramebufferAttachmentsCreateInfoKHR& src)
+{
+    sType = src.sType;
+    attachmentImageInfoCount = src.attachmentImageInfoCount;
+    pAttachmentImageInfos = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (attachmentImageInfoCount && src.pAttachmentImageInfos) {
+        pAttachmentImageInfos = new safe_VkFramebufferAttachmentImageInfoKHR[attachmentImageInfoCount];
+        for (uint32_t i = 0; i < attachmentImageInfoCount; ++i) {
+            pAttachmentImageInfos[i].initialize(&src.pAttachmentImageInfos[i]);
+        }
+    }
+}
+
+safe_VkFramebufferAttachmentsCreateInfoKHR& safe_VkFramebufferAttachmentsCreateInfoKHR::operator=(const safe_VkFramebufferAttachmentsCreateInfoKHR& src)
+{
+    if (&src == this) return *this;
+
+    if (pAttachmentImageInfos)
+        delete[] pAttachmentImageInfos;
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    attachmentImageInfoCount = src.attachmentImageInfoCount;
+    pAttachmentImageInfos = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (attachmentImageInfoCount && src.pAttachmentImageInfos) {
+        pAttachmentImageInfos = new safe_VkFramebufferAttachmentImageInfoKHR[attachmentImageInfoCount];
+        for (uint32_t i = 0; i < attachmentImageInfoCount; ++i) {
+            pAttachmentImageInfos[i].initialize(&src.pAttachmentImageInfos[i]);
+        }
+    }
+
+    return *this;
+}
+
+safe_VkFramebufferAttachmentsCreateInfoKHR::~safe_VkFramebufferAttachmentsCreateInfoKHR()
+{
+    if (pAttachmentImageInfos)
+        delete[] pAttachmentImageInfos;
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkFramebufferAttachmentsCreateInfoKHR::initialize(const VkFramebufferAttachmentsCreateInfoKHR* in_struct)
+{
+    sType = in_struct->sType;
+    attachmentImageInfoCount = in_struct->attachmentImageInfoCount;
+    pAttachmentImageInfos = nullptr;
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (attachmentImageInfoCount && in_struct->pAttachmentImageInfos) {
+        pAttachmentImageInfos = new safe_VkFramebufferAttachmentImageInfoKHR[attachmentImageInfoCount];
+        for (uint32_t i = 0; i < attachmentImageInfoCount; ++i) {
+            pAttachmentImageInfos[i].initialize(&in_struct->pAttachmentImageInfos[i]);
+        }
+    }
+}
+
+void safe_VkFramebufferAttachmentsCreateInfoKHR::initialize(const safe_VkFramebufferAttachmentsCreateInfoKHR* src)
+{
+    sType = src->sType;
+    attachmentImageInfoCount = src->attachmentImageInfoCount;
+    pAttachmentImageInfos = nullptr;
+    pNext = SafePnextCopy(src->pNext);
+    if (attachmentImageInfoCount && src->pAttachmentImageInfos) {
+        pAttachmentImageInfos = new safe_VkFramebufferAttachmentImageInfoKHR[attachmentImageInfoCount];
+        for (uint32_t i = 0; i < attachmentImageInfoCount; ++i) {
+            pAttachmentImageInfos[i].initialize(&src->pAttachmentImageInfos[i]);
+        }
+    }
+}
+
+safe_VkRenderPassAttachmentBeginInfoKHR::safe_VkRenderPassAttachmentBeginInfoKHR(const VkRenderPassAttachmentBeginInfoKHR* in_struct) :
+    sType(in_struct->sType),
+    attachmentCount(in_struct->attachmentCount),
+    pAttachments(nullptr)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (attachmentCount && in_struct->pAttachments) {
+        pAttachments = new VkImageView[attachmentCount];
+        for (uint32_t i = 0; i < attachmentCount; ++i) {
+            pAttachments[i] = in_struct->pAttachments[i];
+        }
+    }
+}
+
+safe_VkRenderPassAttachmentBeginInfoKHR::safe_VkRenderPassAttachmentBeginInfoKHR() :
+    pNext(nullptr),
+    pAttachments(nullptr)
+{}
+
+safe_VkRenderPassAttachmentBeginInfoKHR::safe_VkRenderPassAttachmentBeginInfoKHR(const safe_VkRenderPassAttachmentBeginInfoKHR& src)
+{
+    sType = src.sType;
+    attachmentCount = src.attachmentCount;
+    pAttachments = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (attachmentCount && src.pAttachments) {
+        pAttachments = new VkImageView[attachmentCount];
+        for (uint32_t i = 0; i < attachmentCount; ++i) {
+            pAttachments[i] = src.pAttachments[i];
+        }
+    }
+}
+
+safe_VkRenderPassAttachmentBeginInfoKHR& safe_VkRenderPassAttachmentBeginInfoKHR::operator=(const safe_VkRenderPassAttachmentBeginInfoKHR& src)
+{
+    if (&src == this) return *this;
+
+    if (pAttachments)
+        delete[] pAttachments;
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    attachmentCount = src.attachmentCount;
+    pAttachments = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (attachmentCount && src.pAttachments) {
+        pAttachments = new VkImageView[attachmentCount];
+        for (uint32_t i = 0; i < attachmentCount; ++i) {
+            pAttachments[i] = src.pAttachments[i];
+        }
+    }
+
+    return *this;
+}
+
+safe_VkRenderPassAttachmentBeginInfoKHR::~safe_VkRenderPassAttachmentBeginInfoKHR()
+{
+    if (pAttachments)
+        delete[] pAttachments;
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkRenderPassAttachmentBeginInfoKHR::initialize(const VkRenderPassAttachmentBeginInfoKHR* in_struct)
+{
+    sType = in_struct->sType;
+    attachmentCount = in_struct->attachmentCount;
+    pAttachments = nullptr;
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (attachmentCount && in_struct->pAttachments) {
+        pAttachments = new VkImageView[attachmentCount];
+        for (uint32_t i = 0; i < attachmentCount; ++i) {
+            pAttachments[i] = in_struct->pAttachments[i];
+        }
+    }
+}
+
+void safe_VkRenderPassAttachmentBeginInfoKHR::initialize(const safe_VkRenderPassAttachmentBeginInfoKHR* src)
+{
+    sType = src->sType;
+    attachmentCount = src->attachmentCount;
+    pAttachments = nullptr;
+    pNext = SafePnextCopy(src->pNext);
+    if (attachmentCount && src->pAttachments) {
+        pAttachments = new VkImageView[attachmentCount];
+        for (uint32_t i = 0; i < attachmentCount; ++i) {
+            pAttachments[i] = src->pAttachments[i];
+        }
+    }
+}
+
+safe_VkAttachmentDescription2KHR::safe_VkAttachmentDescription2KHR(const VkAttachmentDescription2KHR* in_struct) :
+    sType(in_struct->sType),
+    flags(in_struct->flags),
+    format(in_struct->format),
+    samples(in_struct->samples),
+    loadOp(in_struct->loadOp),
+    storeOp(in_struct->storeOp),
+    stencilLoadOp(in_struct->stencilLoadOp),
+    stencilStoreOp(in_struct->stencilStoreOp),
+    initialLayout(in_struct->initialLayout),
+    finalLayout(in_struct->finalLayout)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkAttachmentDescription2KHR::safe_VkAttachmentDescription2KHR() :
+    pNext(nullptr)
+{}
+
+safe_VkAttachmentDescription2KHR::safe_VkAttachmentDescription2KHR(const safe_VkAttachmentDescription2KHR& src)
+{
+    sType = src.sType;
+    flags = src.flags;
+    format = src.format;
+    samples = src.samples;
+    loadOp = src.loadOp;
+    storeOp = src.storeOp;
+    stencilLoadOp = src.stencilLoadOp;
+    stencilStoreOp = src.stencilStoreOp;
+    initialLayout = src.initialLayout;
+    finalLayout = src.finalLayout;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkAttachmentDescription2KHR& safe_VkAttachmentDescription2KHR::operator=(const safe_VkAttachmentDescription2KHR& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    flags = src.flags;
+    format = src.format;
+    samples = src.samples;
+    loadOp = src.loadOp;
+    storeOp = src.storeOp;
+    stencilLoadOp = src.stencilLoadOp;
+    stencilStoreOp = src.stencilStoreOp;
+    initialLayout = src.initialLayout;
+    finalLayout = src.finalLayout;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkAttachmentDescription2KHR::~safe_VkAttachmentDescription2KHR()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkAttachmentDescription2KHR::initialize(const VkAttachmentDescription2KHR* in_struct)
+{
+    sType = in_struct->sType;
+    flags = in_struct->flags;
+    format = in_struct->format;
+    samples = in_struct->samples;
+    loadOp = in_struct->loadOp;
+    storeOp = in_struct->storeOp;
+    stencilLoadOp = in_struct->stencilLoadOp;
+    stencilStoreOp = in_struct->stencilStoreOp;
+    initialLayout = in_struct->initialLayout;
+    finalLayout = in_struct->finalLayout;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkAttachmentDescription2KHR::initialize(const safe_VkAttachmentDescription2KHR* src)
+{
+    sType = src->sType;
+    flags = src->flags;
+    format = src->format;
+    samples = src->samples;
+    loadOp = src->loadOp;
+    storeOp = src->storeOp;
+    stencilLoadOp = src->stencilLoadOp;
+    stencilStoreOp = src->stencilStoreOp;
+    initialLayout = src->initialLayout;
+    finalLayout = src->finalLayout;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkAttachmentReference2KHR::safe_VkAttachmentReference2KHR(const VkAttachmentReference2KHR* in_struct) :
+    sType(in_struct->sType),
+    attachment(in_struct->attachment),
+    layout(in_struct->layout),
+    aspectMask(in_struct->aspectMask)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkAttachmentReference2KHR::safe_VkAttachmentReference2KHR() :
+    pNext(nullptr)
+{}
+
+safe_VkAttachmentReference2KHR::safe_VkAttachmentReference2KHR(const safe_VkAttachmentReference2KHR& src)
+{
+    sType = src.sType;
+    attachment = src.attachment;
+    layout = src.layout;
+    aspectMask = src.aspectMask;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkAttachmentReference2KHR& safe_VkAttachmentReference2KHR::operator=(const safe_VkAttachmentReference2KHR& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    attachment = src.attachment;
+    layout = src.layout;
+    aspectMask = src.aspectMask;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkAttachmentReference2KHR::~safe_VkAttachmentReference2KHR()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkAttachmentReference2KHR::initialize(const VkAttachmentReference2KHR* in_struct)
+{
+    sType = in_struct->sType;
+    attachment = in_struct->attachment;
+    layout = in_struct->layout;
+    aspectMask = in_struct->aspectMask;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkAttachmentReference2KHR::initialize(const safe_VkAttachmentReference2KHR* src)
+{
+    sType = src->sType;
+    attachment = src->attachment;
+    layout = src->layout;
+    aspectMask = src->aspectMask;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkSubpassDescription2KHR::safe_VkSubpassDescription2KHR(const VkSubpassDescription2KHR* in_struct) :
+    sType(in_struct->sType),
+    flags(in_struct->flags),
+    pipelineBindPoint(in_struct->pipelineBindPoint),
+    viewMask(in_struct->viewMask),
+    inputAttachmentCount(in_struct->inputAttachmentCount),
+    pInputAttachments(nullptr),
+    colorAttachmentCount(in_struct->colorAttachmentCount),
+    pColorAttachments(nullptr),
+    pResolveAttachments(nullptr),
+    pDepthStencilAttachment(nullptr),
+    preserveAttachmentCount(in_struct->preserveAttachmentCount),
+    pPreserveAttachments(nullptr)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (inputAttachmentCount && in_struct->pInputAttachments) {
+        pInputAttachments = new safe_VkAttachmentReference2KHR[inputAttachmentCount];
+        for (uint32_t i = 0; i < inputAttachmentCount; ++i) {
+            pInputAttachments[i].initialize(&in_struct->pInputAttachments[i]);
+        }
+    }
+    if (colorAttachmentCount && in_struct->pColorAttachments) {
+        pColorAttachments = new safe_VkAttachmentReference2KHR[colorAttachmentCount];
+        for (uint32_t i = 0; i < colorAttachmentCount; ++i) {
+            pColorAttachments[i].initialize(&in_struct->pColorAttachments[i]);
+        }
+    }
+    if (colorAttachmentCount && in_struct->pResolveAttachments) {
+        pResolveAttachments = new safe_VkAttachmentReference2KHR[colorAttachmentCount];
+        for (uint32_t i = 0; i < colorAttachmentCount; ++i) {
+            pResolveAttachments[i].initialize(&in_struct->pResolveAttachments[i]);
+        }
+    }
+    if (in_struct->pDepthStencilAttachment)
+        pDepthStencilAttachment = new safe_VkAttachmentReference2KHR(in_struct->pDepthStencilAttachment);
+    if (in_struct->pPreserveAttachments) {
+        pPreserveAttachments = new uint32_t[in_struct->preserveAttachmentCount];
+        memcpy ((void *)pPreserveAttachments, (void *)in_struct->pPreserveAttachments, sizeof(uint32_t)*in_struct->preserveAttachmentCount);
+    }
+}
+
+safe_VkSubpassDescription2KHR::safe_VkSubpassDescription2KHR() :
+    pNext(nullptr),
+    pInputAttachments(nullptr),
+    pColorAttachments(nullptr),
+    pResolveAttachments(nullptr),
+    pDepthStencilAttachment(nullptr),
+    pPreserveAttachments(nullptr)
+{}
+
+safe_VkSubpassDescription2KHR::safe_VkSubpassDescription2KHR(const safe_VkSubpassDescription2KHR& src)
+{
+    sType = src.sType;
+    flags = src.flags;
+    pipelineBindPoint = src.pipelineBindPoint;
+    viewMask = src.viewMask;
+    inputAttachmentCount = src.inputAttachmentCount;
+    pInputAttachments = nullptr;
+    colorAttachmentCount = src.colorAttachmentCount;
+    pColorAttachments = nullptr;
+    pResolveAttachments = nullptr;
+    pDepthStencilAttachment = nullptr;
+    preserveAttachmentCount = src.preserveAttachmentCount;
+    pPreserveAttachments = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (inputAttachmentCount && src.pInputAttachments) {
+        pInputAttachments = new safe_VkAttachmentReference2KHR[inputAttachmentCount];
+        for (uint32_t i = 0; i < inputAttachmentCount; ++i) {
+            pInputAttachments[i].initialize(&src.pInputAttachments[i]);
+        }
+    }
+    if (colorAttachmentCount && src.pColorAttachments) {
+        pColorAttachments = new safe_VkAttachmentReference2KHR[colorAttachmentCount];
+        for (uint32_t i = 0; i < colorAttachmentCount; ++i) {
+            pColorAttachments[i].initialize(&src.pColorAttachments[i]);
+        }
+    }
+    if (colorAttachmentCount && src.pResolveAttachments) {
+        pResolveAttachments = new safe_VkAttachmentReference2KHR[colorAttachmentCount];
+        for (uint32_t i = 0; i < colorAttachmentCount; ++i) {
+            pResolveAttachments[i].initialize(&src.pResolveAttachments[i]);
+        }
+    }
+    if (src.pDepthStencilAttachment)
+        pDepthStencilAttachment = new safe_VkAttachmentReference2KHR(*src.pDepthStencilAttachment);
+    if (src.pPreserveAttachments) {
+        pPreserveAttachments = new uint32_t[src.preserveAttachmentCount];
+        memcpy ((void *)pPreserveAttachments, (void *)src.pPreserveAttachments, sizeof(uint32_t)*src.preserveAttachmentCount);
+    }
+}
+
+safe_VkSubpassDescription2KHR& safe_VkSubpassDescription2KHR::operator=(const safe_VkSubpassDescription2KHR& src)
+{
+    if (&src == this) return *this;
+
+    if (pInputAttachments)
+        delete[] pInputAttachments;
+    if (pColorAttachments)
+        delete[] pColorAttachments;
+    if (pResolveAttachments)
+        delete[] pResolveAttachments;
+    if (pDepthStencilAttachment)
+        delete pDepthStencilAttachment;
+    if (pPreserveAttachments)
+        delete[] pPreserveAttachments;
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    flags = src.flags;
+    pipelineBindPoint = src.pipelineBindPoint;
+    viewMask = src.viewMask;
+    inputAttachmentCount = src.inputAttachmentCount;
+    pInputAttachments = nullptr;
+    colorAttachmentCount = src.colorAttachmentCount;
+    pColorAttachments = nullptr;
+    pResolveAttachments = nullptr;
+    pDepthStencilAttachment = nullptr;
+    preserveAttachmentCount = src.preserveAttachmentCount;
+    pPreserveAttachments = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (inputAttachmentCount && src.pInputAttachments) {
+        pInputAttachments = new safe_VkAttachmentReference2KHR[inputAttachmentCount];
+        for (uint32_t i = 0; i < inputAttachmentCount; ++i) {
+            pInputAttachments[i].initialize(&src.pInputAttachments[i]);
+        }
+    }
+    if (colorAttachmentCount && src.pColorAttachments) {
+        pColorAttachments = new safe_VkAttachmentReference2KHR[colorAttachmentCount];
+        for (uint32_t i = 0; i < colorAttachmentCount; ++i) {
+            pColorAttachments[i].initialize(&src.pColorAttachments[i]);
+        }
+    }
+    if (colorAttachmentCount && src.pResolveAttachments) {
+        pResolveAttachments = new safe_VkAttachmentReference2KHR[colorAttachmentCount];
+        for (uint32_t i = 0; i < colorAttachmentCount; ++i) {
+            pResolveAttachments[i].initialize(&src.pResolveAttachments[i]);
+        }
+    }
+    if (src.pDepthStencilAttachment)
+        pDepthStencilAttachment = new safe_VkAttachmentReference2KHR(*src.pDepthStencilAttachment);
+    if (src.pPreserveAttachments) {
+        pPreserveAttachments = new uint32_t[src.preserveAttachmentCount];
+        memcpy ((void *)pPreserveAttachments, (void *)src.pPreserveAttachments, sizeof(uint32_t)*src.preserveAttachmentCount);
+    }
+
+    return *this;
+}
+
+safe_VkSubpassDescription2KHR::~safe_VkSubpassDescription2KHR()
+{
+    if (pInputAttachments)
+        delete[] pInputAttachments;
+    if (pColorAttachments)
+        delete[] pColorAttachments;
+    if (pResolveAttachments)
+        delete[] pResolveAttachments;
+    if (pDepthStencilAttachment)
+        delete pDepthStencilAttachment;
+    if (pPreserveAttachments)
+        delete[] pPreserveAttachments;
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkSubpassDescription2KHR::initialize(const VkSubpassDescription2KHR* in_struct)
+{
+    sType = in_struct->sType;
+    flags = in_struct->flags;
+    pipelineBindPoint = in_struct->pipelineBindPoint;
+    viewMask = in_struct->viewMask;
+    inputAttachmentCount = in_struct->inputAttachmentCount;
+    pInputAttachments = nullptr;
+    colorAttachmentCount = in_struct->colorAttachmentCount;
+    pColorAttachments = nullptr;
+    pResolveAttachments = nullptr;
+    pDepthStencilAttachment = nullptr;
+    preserveAttachmentCount = in_struct->preserveAttachmentCount;
+    pPreserveAttachments = nullptr;
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (inputAttachmentCount && in_struct->pInputAttachments) {
+        pInputAttachments = new safe_VkAttachmentReference2KHR[inputAttachmentCount];
+        for (uint32_t i = 0; i < inputAttachmentCount; ++i) {
+            pInputAttachments[i].initialize(&in_struct->pInputAttachments[i]);
+        }
+    }
+    if (colorAttachmentCount && in_struct->pColorAttachments) {
+        pColorAttachments = new safe_VkAttachmentReference2KHR[colorAttachmentCount];
+        for (uint32_t i = 0; i < colorAttachmentCount; ++i) {
+            pColorAttachments[i].initialize(&in_struct->pColorAttachments[i]);
+        }
+    }
+    if (colorAttachmentCount && in_struct->pResolveAttachments) {
+        pResolveAttachments = new safe_VkAttachmentReference2KHR[colorAttachmentCount];
+        for (uint32_t i = 0; i < colorAttachmentCount; ++i) {
+            pResolveAttachments[i].initialize(&in_struct->pResolveAttachments[i]);
+        }
+    }
+    if (in_struct->pDepthStencilAttachment)
+        pDepthStencilAttachment = new safe_VkAttachmentReference2KHR(in_struct->pDepthStencilAttachment);
+    if (in_struct->pPreserveAttachments) {
+        pPreserveAttachments = new uint32_t[in_struct->preserveAttachmentCount];
+        memcpy ((void *)pPreserveAttachments, (void *)in_struct->pPreserveAttachments, sizeof(uint32_t)*in_struct->preserveAttachmentCount);
+    }
+}
+
+void safe_VkSubpassDescription2KHR::initialize(const safe_VkSubpassDescription2KHR* src)
+{
+    sType = src->sType;
+    flags = src->flags;
+    pipelineBindPoint = src->pipelineBindPoint;
+    viewMask = src->viewMask;
+    inputAttachmentCount = src->inputAttachmentCount;
+    pInputAttachments = nullptr;
+    colorAttachmentCount = src->colorAttachmentCount;
+    pColorAttachments = nullptr;
+    pResolveAttachments = nullptr;
+    pDepthStencilAttachment = nullptr;
+    preserveAttachmentCount = src->preserveAttachmentCount;
+    pPreserveAttachments = nullptr;
+    pNext = SafePnextCopy(src->pNext);
+    if (inputAttachmentCount && src->pInputAttachments) {
+        pInputAttachments = new safe_VkAttachmentReference2KHR[inputAttachmentCount];
+        for (uint32_t i = 0; i < inputAttachmentCount; ++i) {
+            pInputAttachments[i].initialize(&src->pInputAttachments[i]);
+        }
+    }
+    if (colorAttachmentCount && src->pColorAttachments) {
+        pColorAttachments = new safe_VkAttachmentReference2KHR[colorAttachmentCount];
+        for (uint32_t i = 0; i < colorAttachmentCount; ++i) {
+            pColorAttachments[i].initialize(&src->pColorAttachments[i]);
+        }
+    }
+    if (colorAttachmentCount && src->pResolveAttachments) {
+        pResolveAttachments = new safe_VkAttachmentReference2KHR[colorAttachmentCount];
+        for (uint32_t i = 0; i < colorAttachmentCount; ++i) {
+            pResolveAttachments[i].initialize(&src->pResolveAttachments[i]);
+        }
+    }
+    if (src->pDepthStencilAttachment)
+        pDepthStencilAttachment = new safe_VkAttachmentReference2KHR(*src->pDepthStencilAttachment);
+    if (src->pPreserveAttachments) {
+        pPreserveAttachments = new uint32_t[src->preserveAttachmentCount];
+        memcpy ((void *)pPreserveAttachments, (void *)src->pPreserveAttachments, sizeof(uint32_t)*src->preserveAttachmentCount);
+    }
+}
+
+safe_VkSubpassDependency2KHR::safe_VkSubpassDependency2KHR(const VkSubpassDependency2KHR* in_struct) :
+    sType(in_struct->sType),
+    srcSubpass(in_struct->srcSubpass),
+    dstSubpass(in_struct->dstSubpass),
+    srcStageMask(in_struct->srcStageMask),
+    dstStageMask(in_struct->dstStageMask),
+    srcAccessMask(in_struct->srcAccessMask),
+    dstAccessMask(in_struct->dstAccessMask),
+    dependencyFlags(in_struct->dependencyFlags),
+    viewOffset(in_struct->viewOffset)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkSubpassDependency2KHR::safe_VkSubpassDependency2KHR() :
+    pNext(nullptr)
+{}
+
+safe_VkSubpassDependency2KHR::safe_VkSubpassDependency2KHR(const safe_VkSubpassDependency2KHR& src)
+{
+    sType = src.sType;
+    srcSubpass = src.srcSubpass;
+    dstSubpass = src.dstSubpass;
+    srcStageMask = src.srcStageMask;
+    dstStageMask = src.dstStageMask;
+    srcAccessMask = src.srcAccessMask;
+    dstAccessMask = src.dstAccessMask;
+    dependencyFlags = src.dependencyFlags;
+    viewOffset = src.viewOffset;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkSubpassDependency2KHR& safe_VkSubpassDependency2KHR::operator=(const safe_VkSubpassDependency2KHR& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    srcSubpass = src.srcSubpass;
+    dstSubpass = src.dstSubpass;
+    srcStageMask = src.srcStageMask;
+    dstStageMask = src.dstStageMask;
+    srcAccessMask = src.srcAccessMask;
+    dstAccessMask = src.dstAccessMask;
+    dependencyFlags = src.dependencyFlags;
+    viewOffset = src.viewOffset;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkSubpassDependency2KHR::~safe_VkSubpassDependency2KHR()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkSubpassDependency2KHR::initialize(const VkSubpassDependency2KHR* in_struct)
+{
+    sType = in_struct->sType;
+    srcSubpass = in_struct->srcSubpass;
+    dstSubpass = in_struct->dstSubpass;
+    srcStageMask = in_struct->srcStageMask;
+    dstStageMask = in_struct->dstStageMask;
+    srcAccessMask = in_struct->srcAccessMask;
+    dstAccessMask = in_struct->dstAccessMask;
+    dependencyFlags = in_struct->dependencyFlags;
+    viewOffset = in_struct->viewOffset;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkSubpassDependency2KHR::initialize(const safe_VkSubpassDependency2KHR* src)
+{
+    sType = src->sType;
+    srcSubpass = src->srcSubpass;
+    dstSubpass = src->dstSubpass;
+    srcStageMask = src->srcStageMask;
+    dstStageMask = src->dstStageMask;
+    srcAccessMask = src->srcAccessMask;
+    dstAccessMask = src->dstAccessMask;
+    dependencyFlags = src->dependencyFlags;
+    viewOffset = src->viewOffset;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkRenderPassCreateInfo2KHR::safe_VkRenderPassCreateInfo2KHR(const VkRenderPassCreateInfo2KHR* in_struct) :
+    sType(in_struct->sType),
+    flags(in_struct->flags),
+    attachmentCount(in_struct->attachmentCount),
+    pAttachments(nullptr),
+    subpassCount(in_struct->subpassCount),
+    pSubpasses(nullptr),
+    dependencyCount(in_struct->dependencyCount),
+    pDependencies(nullptr),
+    correlatedViewMaskCount(in_struct->correlatedViewMaskCount),
+    pCorrelatedViewMasks(nullptr)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (attachmentCount && in_struct->pAttachments) {
+        pAttachments = new safe_VkAttachmentDescription2KHR[attachmentCount];
+        for (uint32_t i = 0; i < attachmentCount; ++i) {
+            pAttachments[i].initialize(&in_struct->pAttachments[i]);
+        }
+    }
+    if (subpassCount && in_struct->pSubpasses) {
+        pSubpasses = new safe_VkSubpassDescription2KHR[subpassCount];
+        for (uint32_t i = 0; i < subpassCount; ++i) {
+            pSubpasses[i].initialize(&in_struct->pSubpasses[i]);
+        }
+    }
+    if (dependencyCount && in_struct->pDependencies) {
+        pDependencies = new safe_VkSubpassDependency2KHR[dependencyCount];
+        for (uint32_t i = 0; i < dependencyCount; ++i) {
+            pDependencies[i].initialize(&in_struct->pDependencies[i]);
+        }
+    }
+    if (in_struct->pCorrelatedViewMasks) {
+        pCorrelatedViewMasks = new uint32_t[in_struct->correlatedViewMaskCount];
+        memcpy ((void *)pCorrelatedViewMasks, (void *)in_struct->pCorrelatedViewMasks, sizeof(uint32_t)*in_struct->correlatedViewMaskCount);
+    }
+}
+
+safe_VkRenderPassCreateInfo2KHR::safe_VkRenderPassCreateInfo2KHR() :
+    pNext(nullptr),
+    pAttachments(nullptr),
+    pSubpasses(nullptr),
+    pDependencies(nullptr),
+    pCorrelatedViewMasks(nullptr)
+{}
+
+safe_VkRenderPassCreateInfo2KHR::safe_VkRenderPassCreateInfo2KHR(const safe_VkRenderPassCreateInfo2KHR& src)
+{
+    sType = src.sType;
+    flags = src.flags;
+    attachmentCount = src.attachmentCount;
+    pAttachments = nullptr;
+    subpassCount = src.subpassCount;
+    pSubpasses = nullptr;
+    dependencyCount = src.dependencyCount;
+    pDependencies = nullptr;
+    correlatedViewMaskCount = src.correlatedViewMaskCount;
+    pCorrelatedViewMasks = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (attachmentCount && src.pAttachments) {
+        pAttachments = new safe_VkAttachmentDescription2KHR[attachmentCount];
+        for (uint32_t i = 0; i < attachmentCount; ++i) {
+            pAttachments[i].initialize(&src.pAttachments[i]);
+        }
+    }
+    if (subpassCount && src.pSubpasses) {
+        pSubpasses = new safe_VkSubpassDescription2KHR[subpassCount];
+        for (uint32_t i = 0; i < subpassCount; ++i) {
+            pSubpasses[i].initialize(&src.pSubpasses[i]);
+        }
+    }
+    if (dependencyCount && src.pDependencies) {
+        pDependencies = new safe_VkSubpassDependency2KHR[dependencyCount];
+        for (uint32_t i = 0; i < dependencyCount; ++i) {
+            pDependencies[i].initialize(&src.pDependencies[i]);
+        }
+    }
+    if (src.pCorrelatedViewMasks) {
+        pCorrelatedViewMasks = new uint32_t[src.correlatedViewMaskCount];
+        memcpy ((void *)pCorrelatedViewMasks, (void *)src.pCorrelatedViewMasks, sizeof(uint32_t)*src.correlatedViewMaskCount);
+    }
+}
+
+safe_VkRenderPassCreateInfo2KHR& safe_VkRenderPassCreateInfo2KHR::operator=(const safe_VkRenderPassCreateInfo2KHR& src)
+{
+    if (&src == this) return *this;
+
+    if (pAttachments)
+        delete[] pAttachments;
+    if (pSubpasses)
+        delete[] pSubpasses;
+    if (pDependencies)
+        delete[] pDependencies;
+    if (pCorrelatedViewMasks)
+        delete[] pCorrelatedViewMasks;
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    flags = src.flags;
+    attachmentCount = src.attachmentCount;
+    pAttachments = nullptr;
+    subpassCount = src.subpassCount;
+    pSubpasses = nullptr;
+    dependencyCount = src.dependencyCount;
+    pDependencies = nullptr;
+    correlatedViewMaskCount = src.correlatedViewMaskCount;
+    pCorrelatedViewMasks = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (attachmentCount && src.pAttachments) {
+        pAttachments = new safe_VkAttachmentDescription2KHR[attachmentCount];
+        for (uint32_t i = 0; i < attachmentCount; ++i) {
+            pAttachments[i].initialize(&src.pAttachments[i]);
+        }
+    }
+    if (subpassCount && src.pSubpasses) {
+        pSubpasses = new safe_VkSubpassDescription2KHR[subpassCount];
+        for (uint32_t i = 0; i < subpassCount; ++i) {
+            pSubpasses[i].initialize(&src.pSubpasses[i]);
+        }
+    }
+    if (dependencyCount && src.pDependencies) {
+        pDependencies = new safe_VkSubpassDependency2KHR[dependencyCount];
+        for (uint32_t i = 0; i < dependencyCount; ++i) {
+            pDependencies[i].initialize(&src.pDependencies[i]);
+        }
+    }
+    if (src.pCorrelatedViewMasks) {
+        pCorrelatedViewMasks = new uint32_t[src.correlatedViewMaskCount];
+        memcpy ((void *)pCorrelatedViewMasks, (void *)src.pCorrelatedViewMasks, sizeof(uint32_t)*src.correlatedViewMaskCount);
+    }
+
+    return *this;
+}
+
+safe_VkRenderPassCreateInfo2KHR::~safe_VkRenderPassCreateInfo2KHR()
+{
+    if (pAttachments)
+        delete[] pAttachments;
+    if (pSubpasses)
+        delete[] pSubpasses;
+    if (pDependencies)
+        delete[] pDependencies;
+    if (pCorrelatedViewMasks)
+        delete[] pCorrelatedViewMasks;
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkRenderPassCreateInfo2KHR::initialize(const VkRenderPassCreateInfo2KHR* in_struct)
+{
+    sType = in_struct->sType;
+    flags = in_struct->flags;
+    attachmentCount = in_struct->attachmentCount;
+    pAttachments = nullptr;
+    subpassCount = in_struct->subpassCount;
+    pSubpasses = nullptr;
+    dependencyCount = in_struct->dependencyCount;
+    pDependencies = nullptr;
+    correlatedViewMaskCount = in_struct->correlatedViewMaskCount;
+    pCorrelatedViewMasks = nullptr;
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (attachmentCount && in_struct->pAttachments) {
+        pAttachments = new safe_VkAttachmentDescription2KHR[attachmentCount];
+        for (uint32_t i = 0; i < attachmentCount; ++i) {
+            pAttachments[i].initialize(&in_struct->pAttachments[i]);
+        }
+    }
+    if (subpassCount && in_struct->pSubpasses) {
+        pSubpasses = new safe_VkSubpassDescription2KHR[subpassCount];
+        for (uint32_t i = 0; i < subpassCount; ++i) {
+            pSubpasses[i].initialize(&in_struct->pSubpasses[i]);
+        }
+    }
+    if (dependencyCount && in_struct->pDependencies) {
+        pDependencies = new safe_VkSubpassDependency2KHR[dependencyCount];
+        for (uint32_t i = 0; i < dependencyCount; ++i) {
+            pDependencies[i].initialize(&in_struct->pDependencies[i]);
+        }
+    }
+    if (in_struct->pCorrelatedViewMasks) {
+        pCorrelatedViewMasks = new uint32_t[in_struct->correlatedViewMaskCount];
+        memcpy ((void *)pCorrelatedViewMasks, (void *)in_struct->pCorrelatedViewMasks, sizeof(uint32_t)*in_struct->correlatedViewMaskCount);
+    }
+}
+
+void safe_VkRenderPassCreateInfo2KHR::initialize(const safe_VkRenderPassCreateInfo2KHR* src)
+{
+    sType = src->sType;
+    flags = src->flags;
+    attachmentCount = src->attachmentCount;
+    pAttachments = nullptr;
+    subpassCount = src->subpassCount;
+    pSubpasses = nullptr;
+    dependencyCount = src->dependencyCount;
+    pDependencies = nullptr;
+    correlatedViewMaskCount = src->correlatedViewMaskCount;
+    pCorrelatedViewMasks = nullptr;
+    pNext = SafePnextCopy(src->pNext);
+    if (attachmentCount && src->pAttachments) {
+        pAttachments = new safe_VkAttachmentDescription2KHR[attachmentCount];
+        for (uint32_t i = 0; i < attachmentCount; ++i) {
+            pAttachments[i].initialize(&src->pAttachments[i]);
+        }
+    }
+    if (subpassCount && src->pSubpasses) {
+        pSubpasses = new safe_VkSubpassDescription2KHR[subpassCount];
+        for (uint32_t i = 0; i < subpassCount; ++i) {
+            pSubpasses[i].initialize(&src->pSubpasses[i]);
+        }
+    }
+    if (dependencyCount && src->pDependencies) {
+        pDependencies = new safe_VkSubpassDependency2KHR[dependencyCount];
+        for (uint32_t i = 0; i < dependencyCount; ++i) {
+            pDependencies[i].initialize(&src->pDependencies[i]);
+        }
+    }
+    if (src->pCorrelatedViewMasks) {
+        pCorrelatedViewMasks = new uint32_t[src->correlatedViewMaskCount];
+        memcpy ((void *)pCorrelatedViewMasks, (void *)src->pCorrelatedViewMasks, sizeof(uint32_t)*src->correlatedViewMaskCount);
+    }
+}
+
+safe_VkSubpassBeginInfoKHR::safe_VkSubpassBeginInfoKHR(const VkSubpassBeginInfoKHR* in_struct) :
+    sType(in_struct->sType),
+    contents(in_struct->contents)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkSubpassBeginInfoKHR::safe_VkSubpassBeginInfoKHR() :
+    pNext(nullptr)
+{}
+
+safe_VkSubpassBeginInfoKHR::safe_VkSubpassBeginInfoKHR(const safe_VkSubpassBeginInfoKHR& src)
+{
+    sType = src.sType;
+    contents = src.contents;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkSubpassBeginInfoKHR& safe_VkSubpassBeginInfoKHR::operator=(const safe_VkSubpassBeginInfoKHR& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    contents = src.contents;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkSubpassBeginInfoKHR::~safe_VkSubpassBeginInfoKHR()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkSubpassBeginInfoKHR::initialize(const VkSubpassBeginInfoKHR* in_struct)
+{
+    sType = in_struct->sType;
+    contents = in_struct->contents;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkSubpassBeginInfoKHR::initialize(const safe_VkSubpassBeginInfoKHR* src)
+{
+    sType = src->sType;
+    contents = src->contents;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkSubpassEndInfoKHR::safe_VkSubpassEndInfoKHR(const VkSubpassEndInfoKHR* in_struct) :
+    sType(in_struct->sType)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkSubpassEndInfoKHR::safe_VkSubpassEndInfoKHR() :
+    pNext(nullptr)
+{}
+
+safe_VkSubpassEndInfoKHR::safe_VkSubpassEndInfoKHR(const safe_VkSubpassEndInfoKHR& src)
+{
+    sType = src.sType;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkSubpassEndInfoKHR& safe_VkSubpassEndInfoKHR::operator=(const safe_VkSubpassEndInfoKHR& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkSubpassEndInfoKHR::~safe_VkSubpassEndInfoKHR()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkSubpassEndInfoKHR::initialize(const VkSubpassEndInfoKHR* in_struct)
+{
+    sType = in_struct->sType;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkSubpassEndInfoKHR::initialize(const safe_VkSubpassEndInfoKHR* src)
+{
+    sType = src->sType;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkSharedPresentSurfaceCapabilitiesKHR::safe_VkSharedPresentSurfaceCapabilitiesKHR(const VkSharedPresentSurfaceCapabilitiesKHR* in_struct) :
+    sType(in_struct->sType),
+    sharedPresentSupportedUsageFlags(in_struct->sharedPresentSupportedUsageFlags)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkSharedPresentSurfaceCapabilitiesKHR::safe_VkSharedPresentSurfaceCapabilitiesKHR() :
+    pNext(nullptr)
+{}
+
+safe_VkSharedPresentSurfaceCapabilitiesKHR::safe_VkSharedPresentSurfaceCapabilitiesKHR(const safe_VkSharedPresentSurfaceCapabilitiesKHR& src)
+{
+    sType = src.sType;
+    sharedPresentSupportedUsageFlags = src.sharedPresentSupportedUsageFlags;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkSharedPresentSurfaceCapabilitiesKHR& safe_VkSharedPresentSurfaceCapabilitiesKHR::operator=(const safe_VkSharedPresentSurfaceCapabilitiesKHR& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    sharedPresentSupportedUsageFlags = src.sharedPresentSupportedUsageFlags;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkSharedPresentSurfaceCapabilitiesKHR::~safe_VkSharedPresentSurfaceCapabilitiesKHR()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkSharedPresentSurfaceCapabilitiesKHR::initialize(const VkSharedPresentSurfaceCapabilitiesKHR* in_struct)
+{
+    sType = in_struct->sType;
+    sharedPresentSupportedUsageFlags = in_struct->sharedPresentSupportedUsageFlags;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkSharedPresentSurfaceCapabilitiesKHR::initialize(const safe_VkSharedPresentSurfaceCapabilitiesKHR* src)
+{
+    sType = src->sType;
+    sharedPresentSupportedUsageFlags = src->sharedPresentSupportedUsageFlags;
+    pNext = SafePnextCopy(src->pNext);
+}
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+
+safe_VkImportFenceWin32HandleInfoKHR::safe_VkImportFenceWin32HandleInfoKHR(const VkImportFenceWin32HandleInfoKHR* in_struct) :
+    sType(in_struct->sType),
+    fence(in_struct->fence),
+    flags(in_struct->flags),
+    handleType(in_struct->handleType),
+    handle(in_struct->handle),
+    name(in_struct->name)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkImportFenceWin32HandleInfoKHR::safe_VkImportFenceWin32HandleInfoKHR() :
+    pNext(nullptr)
+{}
+
+safe_VkImportFenceWin32HandleInfoKHR::safe_VkImportFenceWin32HandleInfoKHR(const safe_VkImportFenceWin32HandleInfoKHR& src)
+{
+    sType = src.sType;
+    fence = src.fence;
+    flags = src.flags;
+    handleType = src.handleType;
+    handle = src.handle;
+    name = src.name;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkImportFenceWin32HandleInfoKHR& safe_VkImportFenceWin32HandleInfoKHR::operator=(const safe_VkImportFenceWin32HandleInfoKHR& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    fence = src.fence;
+    flags = src.flags;
+    handleType = src.handleType;
+    handle = src.handle;
+    name = src.name;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkImportFenceWin32HandleInfoKHR::~safe_VkImportFenceWin32HandleInfoKHR()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkImportFenceWin32HandleInfoKHR::initialize(const VkImportFenceWin32HandleInfoKHR* in_struct)
+{
+    sType = in_struct->sType;
+    fence = in_struct->fence;
+    flags = in_struct->flags;
+    handleType = in_struct->handleType;
+    handle = in_struct->handle;
+    name = in_struct->name;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkImportFenceWin32HandleInfoKHR::initialize(const safe_VkImportFenceWin32HandleInfoKHR* src)
+{
+    sType = src->sType;
+    fence = src->fence;
+    flags = src->flags;
+    handleType = src->handleType;
+    handle = src->handle;
+    name = src->name;
+    pNext = SafePnextCopy(src->pNext);
+}
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+
+safe_VkExportFenceWin32HandleInfoKHR::safe_VkExportFenceWin32HandleInfoKHR(const VkExportFenceWin32HandleInfoKHR* in_struct) :
+    sType(in_struct->sType),
+    pAttributes(nullptr),
+    dwAccess(in_struct->dwAccess),
+    name(in_struct->name)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (in_struct->pAttributes) {
+        pAttributes = new SECURITY_ATTRIBUTES(*in_struct->pAttributes);
+    }
+}
+
+safe_VkExportFenceWin32HandleInfoKHR::safe_VkExportFenceWin32HandleInfoKHR() :
+    pNext(nullptr),
+    pAttributes(nullptr)
+{}
+
+safe_VkExportFenceWin32HandleInfoKHR::safe_VkExportFenceWin32HandleInfoKHR(const safe_VkExportFenceWin32HandleInfoKHR& src)
+{
+    sType = src.sType;
+    pAttributes = nullptr;
+    dwAccess = src.dwAccess;
+    name = src.name;
+    pNext = SafePnextCopy(src.pNext);
+    if (src.pAttributes) {
+        pAttributes = new SECURITY_ATTRIBUTES(*src.pAttributes);
+    }
+}
+
+safe_VkExportFenceWin32HandleInfoKHR& safe_VkExportFenceWin32HandleInfoKHR::operator=(const safe_VkExportFenceWin32HandleInfoKHR& src)
+{
+    if (&src == this) return *this;
+
+    if (pAttributes)
+        delete pAttributes;
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    pAttributes = nullptr;
+    dwAccess = src.dwAccess;
+    name = src.name;
+    pNext = SafePnextCopy(src.pNext);
+    if (src.pAttributes) {
+        pAttributes = new SECURITY_ATTRIBUTES(*src.pAttributes);
+    }
+
+    return *this;
+}
+
+safe_VkExportFenceWin32HandleInfoKHR::~safe_VkExportFenceWin32HandleInfoKHR()
+{
+    if (pAttributes)
+        delete pAttributes;
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkExportFenceWin32HandleInfoKHR::initialize(const VkExportFenceWin32HandleInfoKHR* in_struct)
+{
+    sType = in_struct->sType;
+    pAttributes = nullptr;
+    dwAccess = in_struct->dwAccess;
+    name = in_struct->name;
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (in_struct->pAttributes) {
+        pAttributes = new SECURITY_ATTRIBUTES(*in_struct->pAttributes);
+    }
+}
+
+void safe_VkExportFenceWin32HandleInfoKHR::initialize(const safe_VkExportFenceWin32HandleInfoKHR* src)
+{
+    sType = src->sType;
+    pAttributes = nullptr;
+    dwAccess = src->dwAccess;
+    name = src->name;
+    pNext = SafePnextCopy(src->pNext);
+    if (src->pAttributes) {
+        pAttributes = new SECURITY_ATTRIBUTES(*src->pAttributes);
+    }
+}
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+
+safe_VkFenceGetWin32HandleInfoKHR::safe_VkFenceGetWin32HandleInfoKHR(const VkFenceGetWin32HandleInfoKHR* in_struct) :
+    sType(in_struct->sType),
+    fence(in_struct->fence),
+    handleType(in_struct->handleType)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkFenceGetWin32HandleInfoKHR::safe_VkFenceGetWin32HandleInfoKHR() :
+    pNext(nullptr)
+{}
+
+safe_VkFenceGetWin32HandleInfoKHR::safe_VkFenceGetWin32HandleInfoKHR(const safe_VkFenceGetWin32HandleInfoKHR& src)
+{
+    sType = src.sType;
+    fence = src.fence;
+    handleType = src.handleType;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkFenceGetWin32HandleInfoKHR& safe_VkFenceGetWin32HandleInfoKHR::operator=(const safe_VkFenceGetWin32HandleInfoKHR& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    fence = src.fence;
+    handleType = src.handleType;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkFenceGetWin32HandleInfoKHR::~safe_VkFenceGetWin32HandleInfoKHR()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkFenceGetWin32HandleInfoKHR::initialize(const VkFenceGetWin32HandleInfoKHR* in_struct)
+{
+    sType = in_struct->sType;
+    fence = in_struct->fence;
+    handleType = in_struct->handleType;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkFenceGetWin32HandleInfoKHR::initialize(const safe_VkFenceGetWin32HandleInfoKHR* src)
+{
+    sType = src->sType;
+    fence = src->fence;
+    handleType = src->handleType;
+    pNext = SafePnextCopy(src->pNext);
+}
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+
+safe_VkImportFenceFdInfoKHR::safe_VkImportFenceFdInfoKHR(const VkImportFenceFdInfoKHR* in_struct) :
+    sType(in_struct->sType),
+    fence(in_struct->fence),
+    flags(in_struct->flags),
+    handleType(in_struct->handleType),
+    fd(in_struct->fd)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkImportFenceFdInfoKHR::safe_VkImportFenceFdInfoKHR() :
+    pNext(nullptr)
+{}
+
+safe_VkImportFenceFdInfoKHR::safe_VkImportFenceFdInfoKHR(const safe_VkImportFenceFdInfoKHR& src)
+{
+    sType = src.sType;
+    fence = src.fence;
+    flags = src.flags;
+    handleType = src.handleType;
+    fd = src.fd;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkImportFenceFdInfoKHR& safe_VkImportFenceFdInfoKHR::operator=(const safe_VkImportFenceFdInfoKHR& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    fence = src.fence;
+    flags = src.flags;
+    handleType = src.handleType;
+    fd = src.fd;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkImportFenceFdInfoKHR::~safe_VkImportFenceFdInfoKHR()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkImportFenceFdInfoKHR::initialize(const VkImportFenceFdInfoKHR* in_struct)
+{
+    sType = in_struct->sType;
+    fence = in_struct->fence;
+    flags = in_struct->flags;
+    handleType = in_struct->handleType;
+    fd = in_struct->fd;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkImportFenceFdInfoKHR::initialize(const safe_VkImportFenceFdInfoKHR* src)
+{
+    sType = src->sType;
+    fence = src->fence;
+    flags = src->flags;
+    handleType = src->handleType;
+    fd = src->fd;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkFenceGetFdInfoKHR::safe_VkFenceGetFdInfoKHR(const VkFenceGetFdInfoKHR* in_struct) :
+    sType(in_struct->sType),
+    fence(in_struct->fence),
+    handleType(in_struct->handleType)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkFenceGetFdInfoKHR::safe_VkFenceGetFdInfoKHR() :
+    pNext(nullptr)
+{}
+
+safe_VkFenceGetFdInfoKHR::safe_VkFenceGetFdInfoKHR(const safe_VkFenceGetFdInfoKHR& src)
+{
+    sType = src.sType;
+    fence = src.fence;
+    handleType = src.handleType;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkFenceGetFdInfoKHR& safe_VkFenceGetFdInfoKHR::operator=(const safe_VkFenceGetFdInfoKHR& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    fence = src.fence;
+    handleType = src.handleType;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkFenceGetFdInfoKHR::~safe_VkFenceGetFdInfoKHR()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkFenceGetFdInfoKHR::initialize(const VkFenceGetFdInfoKHR* in_struct)
+{
+    sType = in_struct->sType;
+    fence = in_struct->fence;
+    handleType = in_struct->handleType;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkFenceGetFdInfoKHR::initialize(const safe_VkFenceGetFdInfoKHR* src)
+{
+    sType = src->sType;
+    fence = src->fence;
+    handleType = src->handleType;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkPhysicalDevicePerformanceQueryFeaturesKHR::safe_VkPhysicalDevicePerformanceQueryFeaturesKHR(const VkPhysicalDevicePerformanceQueryFeaturesKHR* in_struct) :
+    sType(in_struct->sType),
+    performanceCounterQueryPools(in_struct->performanceCounterQueryPools),
+    performanceCounterMultipleQueryPools(in_struct->performanceCounterMultipleQueryPools)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPhysicalDevicePerformanceQueryFeaturesKHR::safe_VkPhysicalDevicePerformanceQueryFeaturesKHR() :
+    pNext(nullptr)
+{}
+
+safe_VkPhysicalDevicePerformanceQueryFeaturesKHR::safe_VkPhysicalDevicePerformanceQueryFeaturesKHR(const safe_VkPhysicalDevicePerformanceQueryFeaturesKHR& src)
+{
+    sType = src.sType;
+    performanceCounterQueryPools = src.performanceCounterQueryPools;
+    performanceCounterMultipleQueryPools = src.performanceCounterMultipleQueryPools;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPhysicalDevicePerformanceQueryFeaturesKHR& safe_VkPhysicalDevicePerformanceQueryFeaturesKHR::operator=(const safe_VkPhysicalDevicePerformanceQueryFeaturesKHR& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    performanceCounterQueryPools = src.performanceCounterQueryPools;
+    performanceCounterMultipleQueryPools = src.performanceCounterMultipleQueryPools;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPhysicalDevicePerformanceQueryFeaturesKHR::~safe_VkPhysicalDevicePerformanceQueryFeaturesKHR()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPhysicalDevicePerformanceQueryFeaturesKHR::initialize(const VkPhysicalDevicePerformanceQueryFeaturesKHR* in_struct)
+{
+    sType = in_struct->sType;
+    performanceCounterQueryPools = in_struct->performanceCounterQueryPools;
+    performanceCounterMultipleQueryPools = in_struct->performanceCounterMultipleQueryPools;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPhysicalDevicePerformanceQueryFeaturesKHR::initialize(const safe_VkPhysicalDevicePerformanceQueryFeaturesKHR* src)
+{
+    sType = src->sType;
+    performanceCounterQueryPools = src->performanceCounterQueryPools;
+    performanceCounterMultipleQueryPools = src->performanceCounterMultipleQueryPools;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkPhysicalDevicePerformanceQueryPropertiesKHR::safe_VkPhysicalDevicePerformanceQueryPropertiesKHR(const VkPhysicalDevicePerformanceQueryPropertiesKHR* in_struct) :
+    sType(in_struct->sType),
+    allowCommandBufferQueryCopies(in_struct->allowCommandBufferQueryCopies)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPhysicalDevicePerformanceQueryPropertiesKHR::safe_VkPhysicalDevicePerformanceQueryPropertiesKHR() :
+    pNext(nullptr)
+{}
+
+safe_VkPhysicalDevicePerformanceQueryPropertiesKHR::safe_VkPhysicalDevicePerformanceQueryPropertiesKHR(const safe_VkPhysicalDevicePerformanceQueryPropertiesKHR& src)
+{
+    sType = src.sType;
+    allowCommandBufferQueryCopies = src.allowCommandBufferQueryCopies;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPhysicalDevicePerformanceQueryPropertiesKHR& safe_VkPhysicalDevicePerformanceQueryPropertiesKHR::operator=(const safe_VkPhysicalDevicePerformanceQueryPropertiesKHR& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    allowCommandBufferQueryCopies = src.allowCommandBufferQueryCopies;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPhysicalDevicePerformanceQueryPropertiesKHR::~safe_VkPhysicalDevicePerformanceQueryPropertiesKHR()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPhysicalDevicePerformanceQueryPropertiesKHR::initialize(const VkPhysicalDevicePerformanceQueryPropertiesKHR* in_struct)
+{
+    sType = in_struct->sType;
+    allowCommandBufferQueryCopies = in_struct->allowCommandBufferQueryCopies;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPhysicalDevicePerformanceQueryPropertiesKHR::initialize(const safe_VkPhysicalDevicePerformanceQueryPropertiesKHR* src)
+{
+    sType = src->sType;
+    allowCommandBufferQueryCopies = src->allowCommandBufferQueryCopies;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkPerformanceCounterKHR::safe_VkPerformanceCounterKHR(const VkPerformanceCounterKHR* in_struct) :
+    sType(in_struct->sType),
+    unit(in_struct->unit),
+    scope(in_struct->scope),
+    storage(in_struct->storage)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+    for (uint32_t i = 0; i < VK_UUID_SIZE; ++i) {
+        uuid[i] = in_struct->uuid[i];
+    }
+}
+
+safe_VkPerformanceCounterKHR::safe_VkPerformanceCounterKHR() :
+    pNext(nullptr)
+{}
+
+safe_VkPerformanceCounterKHR::safe_VkPerformanceCounterKHR(const safe_VkPerformanceCounterKHR& src)
+{
+    sType = src.sType;
+    unit = src.unit;
+    scope = src.scope;
+    storage = src.storage;
+    pNext = SafePnextCopy(src.pNext);
+    for (uint32_t i = 0; i < VK_UUID_SIZE; ++i) {
+        uuid[i] = src.uuid[i];
+    }
+}
+
+safe_VkPerformanceCounterKHR& safe_VkPerformanceCounterKHR::operator=(const safe_VkPerformanceCounterKHR& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    unit = src.unit;
+    scope = src.scope;
+    storage = src.storage;
+    pNext = SafePnextCopy(src.pNext);
+    for (uint32_t i = 0; i < VK_UUID_SIZE; ++i) {
+        uuid[i] = src.uuid[i];
+    }
+
+    return *this;
+}
+
+safe_VkPerformanceCounterKHR::~safe_VkPerformanceCounterKHR()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPerformanceCounterKHR::initialize(const VkPerformanceCounterKHR* in_struct)
+{
+    sType = in_struct->sType;
+    unit = in_struct->unit;
+    scope = in_struct->scope;
+    storage = in_struct->storage;
+    pNext = SafePnextCopy(in_struct->pNext);
+    for (uint32_t i = 0; i < VK_UUID_SIZE; ++i) {
+        uuid[i] = in_struct->uuid[i];
+    }
+}
+
+void safe_VkPerformanceCounterKHR::initialize(const safe_VkPerformanceCounterKHR* src)
+{
+    sType = src->sType;
+    unit = src->unit;
+    scope = src->scope;
+    storage = src->storage;
+    pNext = SafePnextCopy(src->pNext);
+    for (uint32_t i = 0; i < VK_UUID_SIZE; ++i) {
+        uuid[i] = src->uuid[i];
+    }
+}
+
+safe_VkPerformanceCounterDescriptionKHR::safe_VkPerformanceCounterDescriptionKHR(const VkPerformanceCounterDescriptionKHR* in_struct) :
+    sType(in_struct->sType),
+    flags(in_struct->flags)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+    for (uint32_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i) {
+        name[i] = in_struct->name[i];
+    }
+    for (uint32_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i) {
+        category[i] = in_struct->category[i];
+    }
+    for (uint32_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i) {
+        description[i] = in_struct->description[i];
+    }
+}
+
+safe_VkPerformanceCounterDescriptionKHR::safe_VkPerformanceCounterDescriptionKHR() :
+    pNext(nullptr)
+{}
+
+safe_VkPerformanceCounterDescriptionKHR::safe_VkPerformanceCounterDescriptionKHR(const safe_VkPerformanceCounterDescriptionKHR& src)
+{
+    sType = src.sType;
+    flags = src.flags;
+    pNext = SafePnextCopy(src.pNext);
+    for (uint32_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i) {
+        name[i] = src.name[i];
+    }
+    for (uint32_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i) {
+        category[i] = src.category[i];
+    }
+    for (uint32_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i) {
+        description[i] = src.description[i];
+    }
+}
+
+safe_VkPerformanceCounterDescriptionKHR& safe_VkPerformanceCounterDescriptionKHR::operator=(const safe_VkPerformanceCounterDescriptionKHR& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    flags = src.flags;
+    pNext = SafePnextCopy(src.pNext);
+    for (uint32_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i) {
+        name[i] = src.name[i];
+    }
+    for (uint32_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i) {
+        category[i] = src.category[i];
+    }
+    for (uint32_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i) {
+        description[i] = src.description[i];
+    }
+
+    return *this;
+}
+
+safe_VkPerformanceCounterDescriptionKHR::~safe_VkPerformanceCounterDescriptionKHR()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPerformanceCounterDescriptionKHR::initialize(const VkPerformanceCounterDescriptionKHR* in_struct)
+{
+    sType = in_struct->sType;
+    flags = in_struct->flags;
+    pNext = SafePnextCopy(in_struct->pNext);
+    for (uint32_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i) {
+        name[i] = in_struct->name[i];
+    }
+    for (uint32_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i) {
+        category[i] = in_struct->category[i];
+    }
+    for (uint32_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i) {
+        description[i] = in_struct->description[i];
+    }
+}
+
+void safe_VkPerformanceCounterDescriptionKHR::initialize(const safe_VkPerformanceCounterDescriptionKHR* src)
+{
+    sType = src->sType;
+    flags = src->flags;
+    pNext = SafePnextCopy(src->pNext);
+    for (uint32_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i) {
+        name[i] = src->name[i];
+    }
+    for (uint32_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i) {
+        category[i] = src->category[i];
+    }
+    for (uint32_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i) {
+        description[i] = src->description[i];
+    }
+}
+
+safe_VkQueryPoolPerformanceCreateInfoKHR::safe_VkQueryPoolPerformanceCreateInfoKHR(const VkQueryPoolPerformanceCreateInfoKHR* in_struct) :
+    sType(in_struct->sType),
+    queueFamilyIndex(in_struct->queueFamilyIndex),
+    counterIndexCount(in_struct->counterIndexCount),
+    pCounterIndices(nullptr)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (in_struct->pCounterIndices) {
+        pCounterIndices = new uint32_t[in_struct->counterIndexCount];
+        memcpy ((void *)pCounterIndices, (void *)in_struct->pCounterIndices, sizeof(uint32_t)*in_struct->counterIndexCount);
+    }
+}
+
+safe_VkQueryPoolPerformanceCreateInfoKHR::safe_VkQueryPoolPerformanceCreateInfoKHR() :
+    pNext(nullptr),
+    pCounterIndices(nullptr)
+{}
+
+safe_VkQueryPoolPerformanceCreateInfoKHR::safe_VkQueryPoolPerformanceCreateInfoKHR(const safe_VkQueryPoolPerformanceCreateInfoKHR& src)
+{
+    sType = src.sType;
+    queueFamilyIndex = src.queueFamilyIndex;
+    counterIndexCount = src.counterIndexCount;
+    pCounterIndices = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (src.pCounterIndices) {
+        pCounterIndices = new uint32_t[src.counterIndexCount];
+        memcpy ((void *)pCounterIndices, (void *)src.pCounterIndices, sizeof(uint32_t)*src.counterIndexCount);
+    }
+}
+
+safe_VkQueryPoolPerformanceCreateInfoKHR& safe_VkQueryPoolPerformanceCreateInfoKHR::operator=(const safe_VkQueryPoolPerformanceCreateInfoKHR& src)
+{
+    if (&src == this) return *this;
+
+    if (pCounterIndices)
+        delete[] pCounterIndices;
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    queueFamilyIndex = src.queueFamilyIndex;
+    counterIndexCount = src.counterIndexCount;
+    pCounterIndices = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (src.pCounterIndices) {
+        pCounterIndices = new uint32_t[src.counterIndexCount];
+        memcpy ((void *)pCounterIndices, (void *)src.pCounterIndices, sizeof(uint32_t)*src.counterIndexCount);
+    }
+
+    return *this;
+}
+
+safe_VkQueryPoolPerformanceCreateInfoKHR::~safe_VkQueryPoolPerformanceCreateInfoKHR()
+{
+    if (pCounterIndices)
+        delete[] pCounterIndices;
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkQueryPoolPerformanceCreateInfoKHR::initialize(const VkQueryPoolPerformanceCreateInfoKHR* in_struct)
+{
+    sType = in_struct->sType;
+    queueFamilyIndex = in_struct->queueFamilyIndex;
+    counterIndexCount = in_struct->counterIndexCount;
+    pCounterIndices = nullptr;
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (in_struct->pCounterIndices) {
+        pCounterIndices = new uint32_t[in_struct->counterIndexCount];
+        memcpy ((void *)pCounterIndices, (void *)in_struct->pCounterIndices, sizeof(uint32_t)*in_struct->counterIndexCount);
+    }
+}
+
+void safe_VkQueryPoolPerformanceCreateInfoKHR::initialize(const safe_VkQueryPoolPerformanceCreateInfoKHR* src)
+{
+    sType = src->sType;
+    queueFamilyIndex = src->queueFamilyIndex;
+    counterIndexCount = src->counterIndexCount;
+    pCounterIndices = nullptr;
+    pNext = SafePnextCopy(src->pNext);
+    if (src->pCounterIndices) {
+        pCounterIndices = new uint32_t[src->counterIndexCount];
+        memcpy ((void *)pCounterIndices, (void *)src->pCounterIndices, sizeof(uint32_t)*src->counterIndexCount);
+    }
+}
+
+safe_VkAcquireProfilingLockInfoKHR::safe_VkAcquireProfilingLockInfoKHR(const VkAcquireProfilingLockInfoKHR* in_struct) :
+    sType(in_struct->sType),
+    flags(in_struct->flags),
+    timeout(in_struct->timeout)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkAcquireProfilingLockInfoKHR::safe_VkAcquireProfilingLockInfoKHR() :
+    pNext(nullptr)
+{}
+
+safe_VkAcquireProfilingLockInfoKHR::safe_VkAcquireProfilingLockInfoKHR(const safe_VkAcquireProfilingLockInfoKHR& src)
+{
+    sType = src.sType;
+    flags = src.flags;
+    timeout = src.timeout;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkAcquireProfilingLockInfoKHR& safe_VkAcquireProfilingLockInfoKHR::operator=(const safe_VkAcquireProfilingLockInfoKHR& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    flags = src.flags;
+    timeout = src.timeout;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkAcquireProfilingLockInfoKHR::~safe_VkAcquireProfilingLockInfoKHR()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkAcquireProfilingLockInfoKHR::initialize(const VkAcquireProfilingLockInfoKHR* in_struct)
+{
+    sType = in_struct->sType;
+    flags = in_struct->flags;
+    timeout = in_struct->timeout;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkAcquireProfilingLockInfoKHR::initialize(const safe_VkAcquireProfilingLockInfoKHR* src)
+{
+    sType = src->sType;
+    flags = src->flags;
+    timeout = src->timeout;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkPerformanceQuerySubmitInfoKHR::safe_VkPerformanceQuerySubmitInfoKHR(const VkPerformanceQuerySubmitInfoKHR* in_struct) :
+    sType(in_struct->sType),
+    counterPassIndex(in_struct->counterPassIndex)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPerformanceQuerySubmitInfoKHR::safe_VkPerformanceQuerySubmitInfoKHR() :
+    pNext(nullptr)
+{}
+
+safe_VkPerformanceQuerySubmitInfoKHR::safe_VkPerformanceQuerySubmitInfoKHR(const safe_VkPerformanceQuerySubmitInfoKHR& src)
+{
+    sType = src.sType;
+    counterPassIndex = src.counterPassIndex;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPerformanceQuerySubmitInfoKHR& safe_VkPerformanceQuerySubmitInfoKHR::operator=(const safe_VkPerformanceQuerySubmitInfoKHR& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    counterPassIndex = src.counterPassIndex;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPerformanceQuerySubmitInfoKHR::~safe_VkPerformanceQuerySubmitInfoKHR()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPerformanceQuerySubmitInfoKHR::initialize(const VkPerformanceQuerySubmitInfoKHR* in_struct)
+{
+    sType = in_struct->sType;
+    counterPassIndex = in_struct->counterPassIndex;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPerformanceQuerySubmitInfoKHR::initialize(const safe_VkPerformanceQuerySubmitInfoKHR* src)
+{
+    sType = src->sType;
+    counterPassIndex = src->counterPassIndex;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkPhysicalDeviceSurfaceInfo2KHR::safe_VkPhysicalDeviceSurfaceInfo2KHR(const VkPhysicalDeviceSurfaceInfo2KHR* in_struct) :
+    sType(in_struct->sType),
+    surface(in_struct->surface)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPhysicalDeviceSurfaceInfo2KHR::safe_VkPhysicalDeviceSurfaceInfo2KHR() :
+    pNext(nullptr)
+{}
+
+safe_VkPhysicalDeviceSurfaceInfo2KHR::safe_VkPhysicalDeviceSurfaceInfo2KHR(const safe_VkPhysicalDeviceSurfaceInfo2KHR& src)
+{
+    sType = src.sType;
+    surface = src.surface;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPhysicalDeviceSurfaceInfo2KHR& safe_VkPhysicalDeviceSurfaceInfo2KHR::operator=(const safe_VkPhysicalDeviceSurfaceInfo2KHR& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    surface = src.surface;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPhysicalDeviceSurfaceInfo2KHR::~safe_VkPhysicalDeviceSurfaceInfo2KHR()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPhysicalDeviceSurfaceInfo2KHR::initialize(const VkPhysicalDeviceSurfaceInfo2KHR* in_struct)
+{
+    sType = in_struct->sType;
+    surface = in_struct->surface;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPhysicalDeviceSurfaceInfo2KHR::initialize(const safe_VkPhysicalDeviceSurfaceInfo2KHR* src)
+{
+    sType = src->sType;
+    surface = src->surface;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkSurfaceCapabilities2KHR::safe_VkSurfaceCapabilities2KHR(const VkSurfaceCapabilities2KHR* in_struct) :
+    sType(in_struct->sType),
+    surfaceCapabilities(in_struct->surfaceCapabilities)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkSurfaceCapabilities2KHR::safe_VkSurfaceCapabilities2KHR() :
+    pNext(nullptr)
+{}
+
+safe_VkSurfaceCapabilities2KHR::safe_VkSurfaceCapabilities2KHR(const safe_VkSurfaceCapabilities2KHR& src)
+{
+    sType = src.sType;
+    surfaceCapabilities = src.surfaceCapabilities;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkSurfaceCapabilities2KHR& safe_VkSurfaceCapabilities2KHR::operator=(const safe_VkSurfaceCapabilities2KHR& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    surfaceCapabilities = src.surfaceCapabilities;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkSurfaceCapabilities2KHR::~safe_VkSurfaceCapabilities2KHR()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkSurfaceCapabilities2KHR::initialize(const VkSurfaceCapabilities2KHR* in_struct)
+{
+    sType = in_struct->sType;
+    surfaceCapabilities = in_struct->surfaceCapabilities;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkSurfaceCapabilities2KHR::initialize(const safe_VkSurfaceCapabilities2KHR* src)
+{
+    sType = src->sType;
+    surfaceCapabilities = src->surfaceCapabilities;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkSurfaceFormat2KHR::safe_VkSurfaceFormat2KHR(const VkSurfaceFormat2KHR* in_struct) :
+    sType(in_struct->sType),
+    surfaceFormat(in_struct->surfaceFormat)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkSurfaceFormat2KHR::safe_VkSurfaceFormat2KHR() :
+    pNext(nullptr)
+{}
+
+safe_VkSurfaceFormat2KHR::safe_VkSurfaceFormat2KHR(const safe_VkSurfaceFormat2KHR& src)
+{
+    sType = src.sType;
+    surfaceFormat = src.surfaceFormat;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkSurfaceFormat2KHR& safe_VkSurfaceFormat2KHR::operator=(const safe_VkSurfaceFormat2KHR& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    surfaceFormat = src.surfaceFormat;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkSurfaceFormat2KHR::~safe_VkSurfaceFormat2KHR()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkSurfaceFormat2KHR::initialize(const VkSurfaceFormat2KHR* in_struct)
+{
+    sType = in_struct->sType;
+    surfaceFormat = in_struct->surfaceFormat;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkSurfaceFormat2KHR::initialize(const safe_VkSurfaceFormat2KHR* src)
+{
+    sType = src->sType;
+    surfaceFormat = src->surfaceFormat;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkDisplayProperties2KHR::safe_VkDisplayProperties2KHR(const VkDisplayProperties2KHR* in_struct) :
+    sType(in_struct->sType),
+    displayProperties(&in_struct->displayProperties)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkDisplayProperties2KHR::safe_VkDisplayProperties2KHR() :
+    pNext(nullptr)
+{}
+
+safe_VkDisplayProperties2KHR::safe_VkDisplayProperties2KHR(const safe_VkDisplayProperties2KHR& src)
+{
+    sType = src.sType;
+    displayProperties.initialize(&src.displayProperties);
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkDisplayProperties2KHR& safe_VkDisplayProperties2KHR::operator=(const safe_VkDisplayProperties2KHR& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    displayProperties.initialize(&src.displayProperties);
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkDisplayProperties2KHR::~safe_VkDisplayProperties2KHR()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkDisplayProperties2KHR::initialize(const VkDisplayProperties2KHR* in_struct)
+{
+    sType = in_struct->sType;
+    displayProperties.initialize(&in_struct->displayProperties);
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkDisplayProperties2KHR::initialize(const safe_VkDisplayProperties2KHR* src)
+{
+    sType = src->sType;
+    displayProperties.initialize(&src->displayProperties);
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkDisplayPlaneProperties2KHR::safe_VkDisplayPlaneProperties2KHR(const VkDisplayPlaneProperties2KHR* in_struct) :
+    sType(in_struct->sType),
+    displayPlaneProperties(in_struct->displayPlaneProperties)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkDisplayPlaneProperties2KHR::safe_VkDisplayPlaneProperties2KHR() :
+    pNext(nullptr)
+{}
+
+safe_VkDisplayPlaneProperties2KHR::safe_VkDisplayPlaneProperties2KHR(const safe_VkDisplayPlaneProperties2KHR& src)
+{
+    sType = src.sType;
+    displayPlaneProperties = src.displayPlaneProperties;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkDisplayPlaneProperties2KHR& safe_VkDisplayPlaneProperties2KHR::operator=(const safe_VkDisplayPlaneProperties2KHR& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    displayPlaneProperties = src.displayPlaneProperties;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkDisplayPlaneProperties2KHR::~safe_VkDisplayPlaneProperties2KHR()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkDisplayPlaneProperties2KHR::initialize(const VkDisplayPlaneProperties2KHR* in_struct)
+{
+    sType = in_struct->sType;
+    displayPlaneProperties = in_struct->displayPlaneProperties;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkDisplayPlaneProperties2KHR::initialize(const safe_VkDisplayPlaneProperties2KHR* src)
+{
+    sType = src->sType;
+    displayPlaneProperties = src->displayPlaneProperties;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkDisplayModeProperties2KHR::safe_VkDisplayModeProperties2KHR(const VkDisplayModeProperties2KHR* in_struct) :
+    sType(in_struct->sType),
+    displayModeProperties(in_struct->displayModeProperties)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkDisplayModeProperties2KHR::safe_VkDisplayModeProperties2KHR() :
+    pNext(nullptr)
+{}
+
+safe_VkDisplayModeProperties2KHR::safe_VkDisplayModeProperties2KHR(const safe_VkDisplayModeProperties2KHR& src)
+{
+    sType = src.sType;
+    displayModeProperties = src.displayModeProperties;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkDisplayModeProperties2KHR& safe_VkDisplayModeProperties2KHR::operator=(const safe_VkDisplayModeProperties2KHR& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    displayModeProperties = src.displayModeProperties;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkDisplayModeProperties2KHR::~safe_VkDisplayModeProperties2KHR()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkDisplayModeProperties2KHR::initialize(const VkDisplayModeProperties2KHR* in_struct)
+{
+    sType = in_struct->sType;
+    displayModeProperties = in_struct->displayModeProperties;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkDisplayModeProperties2KHR::initialize(const safe_VkDisplayModeProperties2KHR* src)
+{
+    sType = src->sType;
+    displayModeProperties = src->displayModeProperties;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkDisplayPlaneInfo2KHR::safe_VkDisplayPlaneInfo2KHR(const VkDisplayPlaneInfo2KHR* in_struct) :
+    sType(in_struct->sType),
+    mode(in_struct->mode),
+    planeIndex(in_struct->planeIndex)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkDisplayPlaneInfo2KHR::safe_VkDisplayPlaneInfo2KHR() :
+    pNext(nullptr)
+{}
+
+safe_VkDisplayPlaneInfo2KHR::safe_VkDisplayPlaneInfo2KHR(const safe_VkDisplayPlaneInfo2KHR& src)
+{
+    sType = src.sType;
+    mode = src.mode;
+    planeIndex = src.planeIndex;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkDisplayPlaneInfo2KHR& safe_VkDisplayPlaneInfo2KHR::operator=(const safe_VkDisplayPlaneInfo2KHR& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    mode = src.mode;
+    planeIndex = src.planeIndex;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkDisplayPlaneInfo2KHR::~safe_VkDisplayPlaneInfo2KHR()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkDisplayPlaneInfo2KHR::initialize(const VkDisplayPlaneInfo2KHR* in_struct)
+{
+    sType = in_struct->sType;
+    mode = in_struct->mode;
+    planeIndex = in_struct->planeIndex;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkDisplayPlaneInfo2KHR::initialize(const safe_VkDisplayPlaneInfo2KHR* src)
+{
+    sType = src->sType;
+    mode = src->mode;
+    planeIndex = src->planeIndex;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkDisplayPlaneCapabilities2KHR::safe_VkDisplayPlaneCapabilities2KHR(const VkDisplayPlaneCapabilities2KHR* in_struct) :
+    sType(in_struct->sType),
+    capabilities(in_struct->capabilities)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkDisplayPlaneCapabilities2KHR::safe_VkDisplayPlaneCapabilities2KHR() :
+    pNext(nullptr)
+{}
+
+safe_VkDisplayPlaneCapabilities2KHR::safe_VkDisplayPlaneCapabilities2KHR(const safe_VkDisplayPlaneCapabilities2KHR& src)
+{
+    sType = src.sType;
+    capabilities = src.capabilities;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkDisplayPlaneCapabilities2KHR& safe_VkDisplayPlaneCapabilities2KHR::operator=(const safe_VkDisplayPlaneCapabilities2KHR& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    capabilities = src.capabilities;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkDisplayPlaneCapabilities2KHR::~safe_VkDisplayPlaneCapabilities2KHR()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkDisplayPlaneCapabilities2KHR::initialize(const VkDisplayPlaneCapabilities2KHR* in_struct)
+{
+    sType = in_struct->sType;
+    capabilities = in_struct->capabilities;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkDisplayPlaneCapabilities2KHR::initialize(const safe_VkDisplayPlaneCapabilities2KHR* src)
+{
+    sType = src->sType;
+    capabilities = src->capabilities;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkImageFormatListCreateInfoKHR::safe_VkImageFormatListCreateInfoKHR(const VkImageFormatListCreateInfoKHR* in_struct) :
+    sType(in_struct->sType),
+    viewFormatCount(in_struct->viewFormatCount),
+    pViewFormats(nullptr)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (in_struct->pViewFormats) {
+        pViewFormats = new VkFormat[in_struct->viewFormatCount];
+        memcpy ((void *)pViewFormats, (void *)in_struct->pViewFormats, sizeof(VkFormat)*in_struct->viewFormatCount);
+    }
+}
+
+safe_VkImageFormatListCreateInfoKHR::safe_VkImageFormatListCreateInfoKHR() :
+    pNext(nullptr),
+    pViewFormats(nullptr)
+{}
+
+safe_VkImageFormatListCreateInfoKHR::safe_VkImageFormatListCreateInfoKHR(const safe_VkImageFormatListCreateInfoKHR& src)
+{
+    sType = src.sType;
+    viewFormatCount = src.viewFormatCount;
+    pViewFormats = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (src.pViewFormats) {
+        pViewFormats = new VkFormat[src.viewFormatCount];
+        memcpy ((void *)pViewFormats, (void *)src.pViewFormats, sizeof(VkFormat)*src.viewFormatCount);
+    }
+}
+
+safe_VkImageFormatListCreateInfoKHR& safe_VkImageFormatListCreateInfoKHR::operator=(const safe_VkImageFormatListCreateInfoKHR& src)
+{
+    if (&src == this) return *this;
+
+    if (pViewFormats)
+        delete[] pViewFormats;
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    viewFormatCount = src.viewFormatCount;
+    pViewFormats = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (src.pViewFormats) {
+        pViewFormats = new VkFormat[src.viewFormatCount];
+        memcpy ((void *)pViewFormats, (void *)src.pViewFormats, sizeof(VkFormat)*src.viewFormatCount);
+    }
+
+    return *this;
+}
+
+safe_VkImageFormatListCreateInfoKHR::~safe_VkImageFormatListCreateInfoKHR()
+{
+    if (pViewFormats)
+        delete[] pViewFormats;
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkImageFormatListCreateInfoKHR::initialize(const VkImageFormatListCreateInfoKHR* in_struct)
+{
+    sType = in_struct->sType;
+    viewFormatCount = in_struct->viewFormatCount;
+    pViewFormats = nullptr;
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (in_struct->pViewFormats) {
+        pViewFormats = new VkFormat[in_struct->viewFormatCount];
+        memcpy ((void *)pViewFormats, (void *)in_struct->pViewFormats, sizeof(VkFormat)*in_struct->viewFormatCount);
+    }
+}
+
+void safe_VkImageFormatListCreateInfoKHR::initialize(const safe_VkImageFormatListCreateInfoKHR* src)
+{
+    sType = src->sType;
+    viewFormatCount = src->viewFormatCount;
+    pViewFormats = nullptr;
+    pNext = SafePnextCopy(src->pNext);
+    if (src->pViewFormats) {
+        pViewFormats = new VkFormat[src->viewFormatCount];
+        memcpy ((void *)pViewFormats, (void *)src->pViewFormats, sizeof(VkFormat)*src->viewFormatCount);
+    }
+}
+
+safe_VkPhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR::safe_VkPhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR(const VkPhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR* in_struct) :
+    sType(in_struct->sType),
+    shaderSubgroupExtendedTypes(in_struct->shaderSubgroupExtendedTypes)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR::safe_VkPhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR() :
+    pNext(nullptr)
+{}
+
+safe_VkPhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR::safe_VkPhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR(const safe_VkPhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR& src)
+{
+    sType = src.sType;
+    shaderSubgroupExtendedTypes = src.shaderSubgroupExtendedTypes;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR& safe_VkPhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR::operator=(const safe_VkPhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    shaderSubgroupExtendedTypes = src.shaderSubgroupExtendedTypes;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR::~safe_VkPhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR::initialize(const VkPhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR* in_struct)
+{
+    sType = in_struct->sType;
+    shaderSubgroupExtendedTypes = in_struct->shaderSubgroupExtendedTypes;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR::initialize(const safe_VkPhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR* src)
+{
+    sType = src->sType;
+    shaderSubgroupExtendedTypes = src->shaderSubgroupExtendedTypes;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkPhysicalDevice8BitStorageFeaturesKHR::safe_VkPhysicalDevice8BitStorageFeaturesKHR(const VkPhysicalDevice8BitStorageFeaturesKHR* in_struct) :
+    sType(in_struct->sType),
+    storageBuffer8BitAccess(in_struct->storageBuffer8BitAccess),
+    uniformAndStorageBuffer8BitAccess(in_struct->uniformAndStorageBuffer8BitAccess),
+    storagePushConstant8(in_struct->storagePushConstant8)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPhysicalDevice8BitStorageFeaturesKHR::safe_VkPhysicalDevice8BitStorageFeaturesKHR() :
+    pNext(nullptr)
+{}
+
+safe_VkPhysicalDevice8BitStorageFeaturesKHR::safe_VkPhysicalDevice8BitStorageFeaturesKHR(const safe_VkPhysicalDevice8BitStorageFeaturesKHR& src)
+{
+    sType = src.sType;
+    storageBuffer8BitAccess = src.storageBuffer8BitAccess;
+    uniformAndStorageBuffer8BitAccess = src.uniformAndStorageBuffer8BitAccess;
+    storagePushConstant8 = src.storagePushConstant8;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPhysicalDevice8BitStorageFeaturesKHR& safe_VkPhysicalDevice8BitStorageFeaturesKHR::operator=(const safe_VkPhysicalDevice8BitStorageFeaturesKHR& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    storageBuffer8BitAccess = src.storageBuffer8BitAccess;
+    uniformAndStorageBuffer8BitAccess = src.uniformAndStorageBuffer8BitAccess;
+    storagePushConstant8 = src.storagePushConstant8;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPhysicalDevice8BitStorageFeaturesKHR::~safe_VkPhysicalDevice8BitStorageFeaturesKHR()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPhysicalDevice8BitStorageFeaturesKHR::initialize(const VkPhysicalDevice8BitStorageFeaturesKHR* in_struct)
+{
+    sType = in_struct->sType;
+    storageBuffer8BitAccess = in_struct->storageBuffer8BitAccess;
+    uniformAndStorageBuffer8BitAccess = in_struct->uniformAndStorageBuffer8BitAccess;
+    storagePushConstant8 = in_struct->storagePushConstant8;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPhysicalDevice8BitStorageFeaturesKHR::initialize(const safe_VkPhysicalDevice8BitStorageFeaturesKHR* src)
+{
+    sType = src->sType;
+    storageBuffer8BitAccess = src->storageBuffer8BitAccess;
+    uniformAndStorageBuffer8BitAccess = src->uniformAndStorageBuffer8BitAccess;
+    storagePushConstant8 = src->storagePushConstant8;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkPhysicalDeviceShaderAtomicInt64FeaturesKHR::safe_VkPhysicalDeviceShaderAtomicInt64FeaturesKHR(const VkPhysicalDeviceShaderAtomicInt64FeaturesKHR* in_struct) :
+    sType(in_struct->sType),
+    shaderBufferInt64Atomics(in_struct->shaderBufferInt64Atomics),
+    shaderSharedInt64Atomics(in_struct->shaderSharedInt64Atomics)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPhysicalDeviceShaderAtomicInt64FeaturesKHR::safe_VkPhysicalDeviceShaderAtomicInt64FeaturesKHR() :
+    pNext(nullptr)
+{}
+
+safe_VkPhysicalDeviceShaderAtomicInt64FeaturesKHR::safe_VkPhysicalDeviceShaderAtomicInt64FeaturesKHR(const safe_VkPhysicalDeviceShaderAtomicInt64FeaturesKHR& src)
+{
+    sType = src.sType;
+    shaderBufferInt64Atomics = src.shaderBufferInt64Atomics;
+    shaderSharedInt64Atomics = src.shaderSharedInt64Atomics;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPhysicalDeviceShaderAtomicInt64FeaturesKHR& safe_VkPhysicalDeviceShaderAtomicInt64FeaturesKHR::operator=(const safe_VkPhysicalDeviceShaderAtomicInt64FeaturesKHR& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    shaderBufferInt64Atomics = src.shaderBufferInt64Atomics;
+    shaderSharedInt64Atomics = src.shaderSharedInt64Atomics;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPhysicalDeviceShaderAtomicInt64FeaturesKHR::~safe_VkPhysicalDeviceShaderAtomicInt64FeaturesKHR()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPhysicalDeviceShaderAtomicInt64FeaturesKHR::initialize(const VkPhysicalDeviceShaderAtomicInt64FeaturesKHR* in_struct)
+{
+    sType = in_struct->sType;
+    shaderBufferInt64Atomics = in_struct->shaderBufferInt64Atomics;
+    shaderSharedInt64Atomics = in_struct->shaderSharedInt64Atomics;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPhysicalDeviceShaderAtomicInt64FeaturesKHR::initialize(const safe_VkPhysicalDeviceShaderAtomicInt64FeaturesKHR* src)
+{
+    sType = src->sType;
+    shaderBufferInt64Atomics = src->shaderBufferInt64Atomics;
+    shaderSharedInt64Atomics = src->shaderSharedInt64Atomics;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkPhysicalDeviceShaderClockFeaturesKHR::safe_VkPhysicalDeviceShaderClockFeaturesKHR(const VkPhysicalDeviceShaderClockFeaturesKHR* in_struct) :
+    sType(in_struct->sType),
+    shaderSubgroupClock(in_struct->shaderSubgroupClock),
+    shaderDeviceClock(in_struct->shaderDeviceClock)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPhysicalDeviceShaderClockFeaturesKHR::safe_VkPhysicalDeviceShaderClockFeaturesKHR() :
+    pNext(nullptr)
+{}
+
+safe_VkPhysicalDeviceShaderClockFeaturesKHR::safe_VkPhysicalDeviceShaderClockFeaturesKHR(const safe_VkPhysicalDeviceShaderClockFeaturesKHR& src)
+{
+    sType = src.sType;
+    shaderSubgroupClock = src.shaderSubgroupClock;
+    shaderDeviceClock = src.shaderDeviceClock;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPhysicalDeviceShaderClockFeaturesKHR& safe_VkPhysicalDeviceShaderClockFeaturesKHR::operator=(const safe_VkPhysicalDeviceShaderClockFeaturesKHR& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    shaderSubgroupClock = src.shaderSubgroupClock;
+    shaderDeviceClock = src.shaderDeviceClock;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPhysicalDeviceShaderClockFeaturesKHR::~safe_VkPhysicalDeviceShaderClockFeaturesKHR()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPhysicalDeviceShaderClockFeaturesKHR::initialize(const VkPhysicalDeviceShaderClockFeaturesKHR* in_struct)
+{
+    sType = in_struct->sType;
+    shaderSubgroupClock = in_struct->shaderSubgroupClock;
+    shaderDeviceClock = in_struct->shaderDeviceClock;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPhysicalDeviceShaderClockFeaturesKHR::initialize(const safe_VkPhysicalDeviceShaderClockFeaturesKHR* src)
+{
+    sType = src->sType;
+    shaderSubgroupClock = src->shaderSubgroupClock;
+    shaderDeviceClock = src->shaderDeviceClock;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkPhysicalDeviceDriverPropertiesKHR::safe_VkPhysicalDeviceDriverPropertiesKHR(const VkPhysicalDeviceDriverPropertiesKHR* in_struct) :
+    sType(in_struct->sType),
+    driverID(in_struct->driverID),
+    conformanceVersion(in_struct->conformanceVersion)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+    for (uint32_t i = 0; i < VK_MAX_DRIVER_NAME_SIZE_KHR; ++i) {
+        driverName[i] = in_struct->driverName[i];
+    }
+    for (uint32_t i = 0; i < VK_MAX_DRIVER_INFO_SIZE_KHR; ++i) {
+        driverInfo[i] = in_struct->driverInfo[i];
+    }
+}
+
+safe_VkPhysicalDeviceDriverPropertiesKHR::safe_VkPhysicalDeviceDriverPropertiesKHR() :
+    pNext(nullptr)
+{}
+
+safe_VkPhysicalDeviceDriverPropertiesKHR::safe_VkPhysicalDeviceDriverPropertiesKHR(const safe_VkPhysicalDeviceDriverPropertiesKHR& src)
+{
+    sType = src.sType;
+    driverID = src.driverID;
+    conformanceVersion = src.conformanceVersion;
+    pNext = SafePnextCopy(src.pNext);
+    for (uint32_t i = 0; i < VK_MAX_DRIVER_NAME_SIZE_KHR; ++i) {
+        driverName[i] = src.driverName[i];
+    }
+    for (uint32_t i = 0; i < VK_MAX_DRIVER_INFO_SIZE_KHR; ++i) {
+        driverInfo[i] = src.driverInfo[i];
+    }
+}
+
+safe_VkPhysicalDeviceDriverPropertiesKHR& safe_VkPhysicalDeviceDriverPropertiesKHR::operator=(const safe_VkPhysicalDeviceDriverPropertiesKHR& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    driverID = src.driverID;
+    conformanceVersion = src.conformanceVersion;
+    pNext = SafePnextCopy(src.pNext);
+    for (uint32_t i = 0; i < VK_MAX_DRIVER_NAME_SIZE_KHR; ++i) {
+        driverName[i] = src.driverName[i];
+    }
+    for (uint32_t i = 0; i < VK_MAX_DRIVER_INFO_SIZE_KHR; ++i) {
+        driverInfo[i] = src.driverInfo[i];
+    }
+
+    return *this;
+}
+
+safe_VkPhysicalDeviceDriverPropertiesKHR::~safe_VkPhysicalDeviceDriverPropertiesKHR()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPhysicalDeviceDriverPropertiesKHR::initialize(const VkPhysicalDeviceDriverPropertiesKHR* in_struct)
+{
+    sType = in_struct->sType;
+    driverID = in_struct->driverID;
+    conformanceVersion = in_struct->conformanceVersion;
+    pNext = SafePnextCopy(in_struct->pNext);
+    for (uint32_t i = 0; i < VK_MAX_DRIVER_NAME_SIZE_KHR; ++i) {
+        driverName[i] = in_struct->driverName[i];
+    }
+    for (uint32_t i = 0; i < VK_MAX_DRIVER_INFO_SIZE_KHR; ++i) {
+        driverInfo[i] = in_struct->driverInfo[i];
+    }
+}
+
+void safe_VkPhysicalDeviceDriverPropertiesKHR::initialize(const safe_VkPhysicalDeviceDriverPropertiesKHR* src)
+{
+    sType = src->sType;
+    driverID = src->driverID;
+    conformanceVersion = src->conformanceVersion;
+    pNext = SafePnextCopy(src->pNext);
+    for (uint32_t i = 0; i < VK_MAX_DRIVER_NAME_SIZE_KHR; ++i) {
+        driverName[i] = src->driverName[i];
+    }
+    for (uint32_t i = 0; i < VK_MAX_DRIVER_INFO_SIZE_KHR; ++i) {
+        driverInfo[i] = src->driverInfo[i];
+    }
+}
+
+safe_VkPhysicalDeviceFloatControlsPropertiesKHR::safe_VkPhysicalDeviceFloatControlsPropertiesKHR(const VkPhysicalDeviceFloatControlsPropertiesKHR* in_struct) :
+    sType(in_struct->sType),
+    denormBehaviorIndependence(in_struct->denormBehaviorIndependence),
+    roundingModeIndependence(in_struct->roundingModeIndependence),
+    shaderSignedZeroInfNanPreserveFloat16(in_struct->shaderSignedZeroInfNanPreserveFloat16),
+    shaderSignedZeroInfNanPreserveFloat32(in_struct->shaderSignedZeroInfNanPreserveFloat32),
+    shaderSignedZeroInfNanPreserveFloat64(in_struct->shaderSignedZeroInfNanPreserveFloat64),
+    shaderDenormPreserveFloat16(in_struct->shaderDenormPreserveFloat16),
+    shaderDenormPreserveFloat32(in_struct->shaderDenormPreserveFloat32),
+    shaderDenormPreserveFloat64(in_struct->shaderDenormPreserveFloat64),
+    shaderDenormFlushToZeroFloat16(in_struct->shaderDenormFlushToZeroFloat16),
+    shaderDenormFlushToZeroFloat32(in_struct->shaderDenormFlushToZeroFloat32),
+    shaderDenormFlushToZeroFloat64(in_struct->shaderDenormFlushToZeroFloat64),
+    shaderRoundingModeRTEFloat16(in_struct->shaderRoundingModeRTEFloat16),
+    shaderRoundingModeRTEFloat32(in_struct->shaderRoundingModeRTEFloat32),
+    shaderRoundingModeRTEFloat64(in_struct->shaderRoundingModeRTEFloat64),
+    shaderRoundingModeRTZFloat16(in_struct->shaderRoundingModeRTZFloat16),
+    shaderRoundingModeRTZFloat32(in_struct->shaderRoundingModeRTZFloat32),
+    shaderRoundingModeRTZFloat64(in_struct->shaderRoundingModeRTZFloat64)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPhysicalDeviceFloatControlsPropertiesKHR::safe_VkPhysicalDeviceFloatControlsPropertiesKHR() :
+    pNext(nullptr)
+{}
+
+safe_VkPhysicalDeviceFloatControlsPropertiesKHR::safe_VkPhysicalDeviceFloatControlsPropertiesKHR(const safe_VkPhysicalDeviceFloatControlsPropertiesKHR& src)
+{
+    sType = src.sType;
+    denormBehaviorIndependence = src.denormBehaviorIndependence;
+    roundingModeIndependence = src.roundingModeIndependence;
+    shaderSignedZeroInfNanPreserveFloat16 = src.shaderSignedZeroInfNanPreserveFloat16;
+    shaderSignedZeroInfNanPreserveFloat32 = src.shaderSignedZeroInfNanPreserveFloat32;
+    shaderSignedZeroInfNanPreserveFloat64 = src.shaderSignedZeroInfNanPreserveFloat64;
+    shaderDenormPreserveFloat16 = src.shaderDenormPreserveFloat16;
+    shaderDenormPreserveFloat32 = src.shaderDenormPreserveFloat32;
+    shaderDenormPreserveFloat64 = src.shaderDenormPreserveFloat64;
+    shaderDenormFlushToZeroFloat16 = src.shaderDenormFlushToZeroFloat16;
+    shaderDenormFlushToZeroFloat32 = src.shaderDenormFlushToZeroFloat32;
+    shaderDenormFlushToZeroFloat64 = src.shaderDenormFlushToZeroFloat64;
+    shaderRoundingModeRTEFloat16 = src.shaderRoundingModeRTEFloat16;
+    shaderRoundingModeRTEFloat32 = src.shaderRoundingModeRTEFloat32;
+    shaderRoundingModeRTEFloat64 = src.shaderRoundingModeRTEFloat64;
+    shaderRoundingModeRTZFloat16 = src.shaderRoundingModeRTZFloat16;
+    shaderRoundingModeRTZFloat32 = src.shaderRoundingModeRTZFloat32;
+    shaderRoundingModeRTZFloat64 = src.shaderRoundingModeRTZFloat64;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPhysicalDeviceFloatControlsPropertiesKHR& safe_VkPhysicalDeviceFloatControlsPropertiesKHR::operator=(const safe_VkPhysicalDeviceFloatControlsPropertiesKHR& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    denormBehaviorIndependence = src.denormBehaviorIndependence;
+    roundingModeIndependence = src.roundingModeIndependence;
+    shaderSignedZeroInfNanPreserveFloat16 = src.shaderSignedZeroInfNanPreserveFloat16;
+    shaderSignedZeroInfNanPreserveFloat32 = src.shaderSignedZeroInfNanPreserveFloat32;
+    shaderSignedZeroInfNanPreserveFloat64 = src.shaderSignedZeroInfNanPreserveFloat64;
+    shaderDenormPreserveFloat16 = src.shaderDenormPreserveFloat16;
+    shaderDenormPreserveFloat32 = src.shaderDenormPreserveFloat32;
+    shaderDenormPreserveFloat64 = src.shaderDenormPreserveFloat64;
+    shaderDenormFlushToZeroFloat16 = src.shaderDenormFlushToZeroFloat16;
+    shaderDenormFlushToZeroFloat32 = src.shaderDenormFlushToZeroFloat32;
+    shaderDenormFlushToZeroFloat64 = src.shaderDenormFlushToZeroFloat64;
+    shaderRoundingModeRTEFloat16 = src.shaderRoundingModeRTEFloat16;
+    shaderRoundingModeRTEFloat32 = src.shaderRoundingModeRTEFloat32;
+    shaderRoundingModeRTEFloat64 = src.shaderRoundingModeRTEFloat64;
+    shaderRoundingModeRTZFloat16 = src.shaderRoundingModeRTZFloat16;
+    shaderRoundingModeRTZFloat32 = src.shaderRoundingModeRTZFloat32;
+    shaderRoundingModeRTZFloat64 = src.shaderRoundingModeRTZFloat64;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPhysicalDeviceFloatControlsPropertiesKHR::~safe_VkPhysicalDeviceFloatControlsPropertiesKHR()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPhysicalDeviceFloatControlsPropertiesKHR::initialize(const VkPhysicalDeviceFloatControlsPropertiesKHR* in_struct)
+{
+    sType = in_struct->sType;
+    denormBehaviorIndependence = in_struct->denormBehaviorIndependence;
+    roundingModeIndependence = in_struct->roundingModeIndependence;
+    shaderSignedZeroInfNanPreserveFloat16 = in_struct->shaderSignedZeroInfNanPreserveFloat16;
+    shaderSignedZeroInfNanPreserveFloat32 = in_struct->shaderSignedZeroInfNanPreserveFloat32;
+    shaderSignedZeroInfNanPreserveFloat64 = in_struct->shaderSignedZeroInfNanPreserveFloat64;
+    shaderDenormPreserveFloat16 = in_struct->shaderDenormPreserveFloat16;
+    shaderDenormPreserveFloat32 = in_struct->shaderDenormPreserveFloat32;
+    shaderDenormPreserveFloat64 = in_struct->shaderDenormPreserveFloat64;
+    shaderDenormFlushToZeroFloat16 = in_struct->shaderDenormFlushToZeroFloat16;
+    shaderDenormFlushToZeroFloat32 = in_struct->shaderDenormFlushToZeroFloat32;
+    shaderDenormFlushToZeroFloat64 = in_struct->shaderDenormFlushToZeroFloat64;
+    shaderRoundingModeRTEFloat16 = in_struct->shaderRoundingModeRTEFloat16;
+    shaderRoundingModeRTEFloat32 = in_struct->shaderRoundingModeRTEFloat32;
+    shaderRoundingModeRTEFloat64 = in_struct->shaderRoundingModeRTEFloat64;
+    shaderRoundingModeRTZFloat16 = in_struct->shaderRoundingModeRTZFloat16;
+    shaderRoundingModeRTZFloat32 = in_struct->shaderRoundingModeRTZFloat32;
+    shaderRoundingModeRTZFloat64 = in_struct->shaderRoundingModeRTZFloat64;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPhysicalDeviceFloatControlsPropertiesKHR::initialize(const safe_VkPhysicalDeviceFloatControlsPropertiesKHR* src)
+{
+    sType = src->sType;
+    denormBehaviorIndependence = src->denormBehaviorIndependence;
+    roundingModeIndependence = src->roundingModeIndependence;
+    shaderSignedZeroInfNanPreserveFloat16 = src->shaderSignedZeroInfNanPreserveFloat16;
+    shaderSignedZeroInfNanPreserveFloat32 = src->shaderSignedZeroInfNanPreserveFloat32;
+    shaderSignedZeroInfNanPreserveFloat64 = src->shaderSignedZeroInfNanPreserveFloat64;
+    shaderDenormPreserveFloat16 = src->shaderDenormPreserveFloat16;
+    shaderDenormPreserveFloat32 = src->shaderDenormPreserveFloat32;
+    shaderDenormPreserveFloat64 = src->shaderDenormPreserveFloat64;
+    shaderDenormFlushToZeroFloat16 = src->shaderDenormFlushToZeroFloat16;
+    shaderDenormFlushToZeroFloat32 = src->shaderDenormFlushToZeroFloat32;
+    shaderDenormFlushToZeroFloat64 = src->shaderDenormFlushToZeroFloat64;
+    shaderRoundingModeRTEFloat16 = src->shaderRoundingModeRTEFloat16;
+    shaderRoundingModeRTEFloat32 = src->shaderRoundingModeRTEFloat32;
+    shaderRoundingModeRTEFloat64 = src->shaderRoundingModeRTEFloat64;
+    shaderRoundingModeRTZFloat16 = src->shaderRoundingModeRTZFloat16;
+    shaderRoundingModeRTZFloat32 = src->shaderRoundingModeRTZFloat32;
+    shaderRoundingModeRTZFloat64 = src->shaderRoundingModeRTZFloat64;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkSubpassDescriptionDepthStencilResolveKHR::safe_VkSubpassDescriptionDepthStencilResolveKHR(const VkSubpassDescriptionDepthStencilResolveKHR* in_struct) :
+    sType(in_struct->sType),
+    depthResolveMode(in_struct->depthResolveMode),
+    stencilResolveMode(in_struct->stencilResolveMode),
+    pDepthStencilResolveAttachment(nullptr)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (in_struct->pDepthStencilResolveAttachment)
+        pDepthStencilResolveAttachment = new safe_VkAttachmentReference2KHR(in_struct->pDepthStencilResolveAttachment);
+}
+
+safe_VkSubpassDescriptionDepthStencilResolveKHR::safe_VkSubpassDescriptionDepthStencilResolveKHR() :
+    pNext(nullptr),
+    pDepthStencilResolveAttachment(nullptr)
+{}
+
+safe_VkSubpassDescriptionDepthStencilResolveKHR::safe_VkSubpassDescriptionDepthStencilResolveKHR(const safe_VkSubpassDescriptionDepthStencilResolveKHR& src)
+{
+    sType = src.sType;
+    depthResolveMode = src.depthResolveMode;
+    stencilResolveMode = src.stencilResolveMode;
+    pDepthStencilResolveAttachment = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (src.pDepthStencilResolveAttachment)
+        pDepthStencilResolveAttachment = new safe_VkAttachmentReference2KHR(*src.pDepthStencilResolveAttachment);
+}
+
+safe_VkSubpassDescriptionDepthStencilResolveKHR& safe_VkSubpassDescriptionDepthStencilResolveKHR::operator=(const safe_VkSubpassDescriptionDepthStencilResolveKHR& src)
+{
+    if (&src == this) return *this;
+
+    if (pDepthStencilResolveAttachment)
+        delete pDepthStencilResolveAttachment;
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    depthResolveMode = src.depthResolveMode;
+    stencilResolveMode = src.stencilResolveMode;
+    pDepthStencilResolveAttachment = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (src.pDepthStencilResolveAttachment)
+        pDepthStencilResolveAttachment = new safe_VkAttachmentReference2KHR(*src.pDepthStencilResolveAttachment);
+
+    return *this;
+}
+
+safe_VkSubpassDescriptionDepthStencilResolveKHR::~safe_VkSubpassDescriptionDepthStencilResolveKHR()
+{
+    if (pDepthStencilResolveAttachment)
+        delete pDepthStencilResolveAttachment;
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkSubpassDescriptionDepthStencilResolveKHR::initialize(const VkSubpassDescriptionDepthStencilResolveKHR* in_struct)
+{
+    sType = in_struct->sType;
+    depthResolveMode = in_struct->depthResolveMode;
+    stencilResolveMode = in_struct->stencilResolveMode;
+    pDepthStencilResolveAttachment = nullptr;
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (in_struct->pDepthStencilResolveAttachment)
+        pDepthStencilResolveAttachment = new safe_VkAttachmentReference2KHR(in_struct->pDepthStencilResolveAttachment);
+}
+
+void safe_VkSubpassDescriptionDepthStencilResolveKHR::initialize(const safe_VkSubpassDescriptionDepthStencilResolveKHR* src)
+{
+    sType = src->sType;
+    depthResolveMode = src->depthResolveMode;
+    stencilResolveMode = src->stencilResolveMode;
+    pDepthStencilResolveAttachment = nullptr;
+    pNext = SafePnextCopy(src->pNext);
+    if (src->pDepthStencilResolveAttachment)
+        pDepthStencilResolveAttachment = new safe_VkAttachmentReference2KHR(*src->pDepthStencilResolveAttachment);
+}
+
+safe_VkPhysicalDeviceDepthStencilResolvePropertiesKHR::safe_VkPhysicalDeviceDepthStencilResolvePropertiesKHR(const VkPhysicalDeviceDepthStencilResolvePropertiesKHR* in_struct) :
+    sType(in_struct->sType),
+    supportedDepthResolveModes(in_struct->supportedDepthResolveModes),
+    supportedStencilResolveModes(in_struct->supportedStencilResolveModes),
+    independentResolveNone(in_struct->independentResolveNone),
+    independentResolve(in_struct->independentResolve)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPhysicalDeviceDepthStencilResolvePropertiesKHR::safe_VkPhysicalDeviceDepthStencilResolvePropertiesKHR() :
+    pNext(nullptr)
+{}
+
+safe_VkPhysicalDeviceDepthStencilResolvePropertiesKHR::safe_VkPhysicalDeviceDepthStencilResolvePropertiesKHR(const safe_VkPhysicalDeviceDepthStencilResolvePropertiesKHR& src)
+{
+    sType = src.sType;
+    supportedDepthResolveModes = src.supportedDepthResolveModes;
+    supportedStencilResolveModes = src.supportedStencilResolveModes;
+    independentResolveNone = src.independentResolveNone;
+    independentResolve = src.independentResolve;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPhysicalDeviceDepthStencilResolvePropertiesKHR& safe_VkPhysicalDeviceDepthStencilResolvePropertiesKHR::operator=(const safe_VkPhysicalDeviceDepthStencilResolvePropertiesKHR& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    supportedDepthResolveModes = src.supportedDepthResolveModes;
+    supportedStencilResolveModes = src.supportedStencilResolveModes;
+    independentResolveNone = src.independentResolveNone;
+    independentResolve = src.independentResolve;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPhysicalDeviceDepthStencilResolvePropertiesKHR::~safe_VkPhysicalDeviceDepthStencilResolvePropertiesKHR()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPhysicalDeviceDepthStencilResolvePropertiesKHR::initialize(const VkPhysicalDeviceDepthStencilResolvePropertiesKHR* in_struct)
+{
+    sType = in_struct->sType;
+    supportedDepthResolveModes = in_struct->supportedDepthResolveModes;
+    supportedStencilResolveModes = in_struct->supportedStencilResolveModes;
+    independentResolveNone = in_struct->independentResolveNone;
+    independentResolve = in_struct->independentResolve;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPhysicalDeviceDepthStencilResolvePropertiesKHR::initialize(const safe_VkPhysicalDeviceDepthStencilResolvePropertiesKHR* src)
+{
+    sType = src->sType;
+    supportedDepthResolveModes = src->supportedDepthResolveModes;
+    supportedStencilResolveModes = src->supportedStencilResolveModes;
+    independentResolveNone = src->independentResolveNone;
+    independentResolve = src->independentResolve;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkPhysicalDeviceTimelineSemaphoreFeaturesKHR::safe_VkPhysicalDeviceTimelineSemaphoreFeaturesKHR(const VkPhysicalDeviceTimelineSemaphoreFeaturesKHR* in_struct) :
+    sType(in_struct->sType),
+    timelineSemaphore(in_struct->timelineSemaphore)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPhysicalDeviceTimelineSemaphoreFeaturesKHR::safe_VkPhysicalDeviceTimelineSemaphoreFeaturesKHR() :
+    pNext(nullptr)
+{}
+
+safe_VkPhysicalDeviceTimelineSemaphoreFeaturesKHR::safe_VkPhysicalDeviceTimelineSemaphoreFeaturesKHR(const safe_VkPhysicalDeviceTimelineSemaphoreFeaturesKHR& src)
+{
+    sType = src.sType;
+    timelineSemaphore = src.timelineSemaphore;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPhysicalDeviceTimelineSemaphoreFeaturesKHR& safe_VkPhysicalDeviceTimelineSemaphoreFeaturesKHR::operator=(const safe_VkPhysicalDeviceTimelineSemaphoreFeaturesKHR& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    timelineSemaphore = src.timelineSemaphore;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPhysicalDeviceTimelineSemaphoreFeaturesKHR::~safe_VkPhysicalDeviceTimelineSemaphoreFeaturesKHR()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPhysicalDeviceTimelineSemaphoreFeaturesKHR::initialize(const VkPhysicalDeviceTimelineSemaphoreFeaturesKHR* in_struct)
+{
+    sType = in_struct->sType;
+    timelineSemaphore = in_struct->timelineSemaphore;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPhysicalDeviceTimelineSemaphoreFeaturesKHR::initialize(const safe_VkPhysicalDeviceTimelineSemaphoreFeaturesKHR* src)
+{
+    sType = src->sType;
+    timelineSemaphore = src->timelineSemaphore;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkPhysicalDeviceTimelineSemaphorePropertiesKHR::safe_VkPhysicalDeviceTimelineSemaphorePropertiesKHR(const VkPhysicalDeviceTimelineSemaphorePropertiesKHR* in_struct) :
+    sType(in_struct->sType),
+    maxTimelineSemaphoreValueDifference(in_struct->maxTimelineSemaphoreValueDifference)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPhysicalDeviceTimelineSemaphorePropertiesKHR::safe_VkPhysicalDeviceTimelineSemaphorePropertiesKHR() :
+    pNext(nullptr)
+{}
+
+safe_VkPhysicalDeviceTimelineSemaphorePropertiesKHR::safe_VkPhysicalDeviceTimelineSemaphorePropertiesKHR(const safe_VkPhysicalDeviceTimelineSemaphorePropertiesKHR& src)
+{
+    sType = src.sType;
+    maxTimelineSemaphoreValueDifference = src.maxTimelineSemaphoreValueDifference;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPhysicalDeviceTimelineSemaphorePropertiesKHR& safe_VkPhysicalDeviceTimelineSemaphorePropertiesKHR::operator=(const safe_VkPhysicalDeviceTimelineSemaphorePropertiesKHR& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    maxTimelineSemaphoreValueDifference = src.maxTimelineSemaphoreValueDifference;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPhysicalDeviceTimelineSemaphorePropertiesKHR::~safe_VkPhysicalDeviceTimelineSemaphorePropertiesKHR()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPhysicalDeviceTimelineSemaphorePropertiesKHR::initialize(const VkPhysicalDeviceTimelineSemaphorePropertiesKHR* in_struct)
+{
+    sType = in_struct->sType;
+    maxTimelineSemaphoreValueDifference = in_struct->maxTimelineSemaphoreValueDifference;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPhysicalDeviceTimelineSemaphorePropertiesKHR::initialize(const safe_VkPhysicalDeviceTimelineSemaphorePropertiesKHR* src)
+{
+    sType = src->sType;
+    maxTimelineSemaphoreValueDifference = src->maxTimelineSemaphoreValueDifference;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkSemaphoreTypeCreateInfoKHR::safe_VkSemaphoreTypeCreateInfoKHR(const VkSemaphoreTypeCreateInfoKHR* in_struct) :
+    sType(in_struct->sType),
+    semaphoreType(in_struct->semaphoreType),
+    initialValue(in_struct->initialValue)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkSemaphoreTypeCreateInfoKHR::safe_VkSemaphoreTypeCreateInfoKHR() :
+    pNext(nullptr)
+{}
+
+safe_VkSemaphoreTypeCreateInfoKHR::safe_VkSemaphoreTypeCreateInfoKHR(const safe_VkSemaphoreTypeCreateInfoKHR& src)
+{
+    sType = src.sType;
+    semaphoreType = src.semaphoreType;
+    initialValue = src.initialValue;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkSemaphoreTypeCreateInfoKHR& safe_VkSemaphoreTypeCreateInfoKHR::operator=(const safe_VkSemaphoreTypeCreateInfoKHR& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    semaphoreType = src.semaphoreType;
+    initialValue = src.initialValue;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkSemaphoreTypeCreateInfoKHR::~safe_VkSemaphoreTypeCreateInfoKHR()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkSemaphoreTypeCreateInfoKHR::initialize(const VkSemaphoreTypeCreateInfoKHR* in_struct)
+{
+    sType = in_struct->sType;
+    semaphoreType = in_struct->semaphoreType;
+    initialValue = in_struct->initialValue;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkSemaphoreTypeCreateInfoKHR::initialize(const safe_VkSemaphoreTypeCreateInfoKHR* src)
+{
+    sType = src->sType;
+    semaphoreType = src->semaphoreType;
+    initialValue = src->initialValue;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkTimelineSemaphoreSubmitInfoKHR::safe_VkTimelineSemaphoreSubmitInfoKHR(const VkTimelineSemaphoreSubmitInfoKHR* in_struct) :
+    sType(in_struct->sType),
+    waitSemaphoreValueCount(in_struct->waitSemaphoreValueCount),
+    pWaitSemaphoreValues(nullptr),
+    signalSemaphoreValueCount(in_struct->signalSemaphoreValueCount),
+    pSignalSemaphoreValues(nullptr)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (in_struct->pWaitSemaphoreValues) {
+        pWaitSemaphoreValues = new uint64_t[in_struct->waitSemaphoreValueCount];
+        memcpy ((void *)pWaitSemaphoreValues, (void *)in_struct->pWaitSemaphoreValues, sizeof(uint64_t)*in_struct->waitSemaphoreValueCount);
+    }
+    if (in_struct->pSignalSemaphoreValues) {
+        pSignalSemaphoreValues = new uint64_t[in_struct->signalSemaphoreValueCount];
+        memcpy ((void *)pSignalSemaphoreValues, (void *)in_struct->pSignalSemaphoreValues, sizeof(uint64_t)*in_struct->signalSemaphoreValueCount);
+    }
+}
+
+safe_VkTimelineSemaphoreSubmitInfoKHR::safe_VkTimelineSemaphoreSubmitInfoKHR() :
+    pNext(nullptr),
+    pWaitSemaphoreValues(nullptr),
+    pSignalSemaphoreValues(nullptr)
+{}
+
+safe_VkTimelineSemaphoreSubmitInfoKHR::safe_VkTimelineSemaphoreSubmitInfoKHR(const safe_VkTimelineSemaphoreSubmitInfoKHR& src)
+{
+    sType = src.sType;
+    waitSemaphoreValueCount = src.waitSemaphoreValueCount;
+    pWaitSemaphoreValues = nullptr;
+    signalSemaphoreValueCount = src.signalSemaphoreValueCount;
+    pSignalSemaphoreValues = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (src.pWaitSemaphoreValues) {
+        pWaitSemaphoreValues = new uint64_t[src.waitSemaphoreValueCount];
+        memcpy ((void *)pWaitSemaphoreValues, (void *)src.pWaitSemaphoreValues, sizeof(uint64_t)*src.waitSemaphoreValueCount);
+    }
+    if (src.pSignalSemaphoreValues) {
+        pSignalSemaphoreValues = new uint64_t[src.signalSemaphoreValueCount];
+        memcpy ((void *)pSignalSemaphoreValues, (void *)src.pSignalSemaphoreValues, sizeof(uint64_t)*src.signalSemaphoreValueCount);
+    }
+}
+
+safe_VkTimelineSemaphoreSubmitInfoKHR& safe_VkTimelineSemaphoreSubmitInfoKHR::operator=(const safe_VkTimelineSemaphoreSubmitInfoKHR& src)
+{
+    if (&src == this) return *this;
+
+    if (pWaitSemaphoreValues)
+        delete[] pWaitSemaphoreValues;
+    if (pSignalSemaphoreValues)
+        delete[] pSignalSemaphoreValues;
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    waitSemaphoreValueCount = src.waitSemaphoreValueCount;
+    pWaitSemaphoreValues = nullptr;
+    signalSemaphoreValueCount = src.signalSemaphoreValueCount;
+    pSignalSemaphoreValues = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (src.pWaitSemaphoreValues) {
+        pWaitSemaphoreValues = new uint64_t[src.waitSemaphoreValueCount];
+        memcpy ((void *)pWaitSemaphoreValues, (void *)src.pWaitSemaphoreValues, sizeof(uint64_t)*src.waitSemaphoreValueCount);
+    }
+    if (src.pSignalSemaphoreValues) {
+        pSignalSemaphoreValues = new uint64_t[src.signalSemaphoreValueCount];
+        memcpy ((void *)pSignalSemaphoreValues, (void *)src.pSignalSemaphoreValues, sizeof(uint64_t)*src.signalSemaphoreValueCount);
+    }
+
+    return *this;
+}
+
+safe_VkTimelineSemaphoreSubmitInfoKHR::~safe_VkTimelineSemaphoreSubmitInfoKHR()
+{
+    if (pWaitSemaphoreValues)
+        delete[] pWaitSemaphoreValues;
+    if (pSignalSemaphoreValues)
+        delete[] pSignalSemaphoreValues;
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkTimelineSemaphoreSubmitInfoKHR::initialize(const VkTimelineSemaphoreSubmitInfoKHR* in_struct)
+{
+    sType = in_struct->sType;
+    waitSemaphoreValueCount = in_struct->waitSemaphoreValueCount;
+    pWaitSemaphoreValues = nullptr;
+    signalSemaphoreValueCount = in_struct->signalSemaphoreValueCount;
+    pSignalSemaphoreValues = nullptr;
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (in_struct->pWaitSemaphoreValues) {
+        pWaitSemaphoreValues = new uint64_t[in_struct->waitSemaphoreValueCount];
+        memcpy ((void *)pWaitSemaphoreValues, (void *)in_struct->pWaitSemaphoreValues, sizeof(uint64_t)*in_struct->waitSemaphoreValueCount);
+    }
+    if (in_struct->pSignalSemaphoreValues) {
+        pSignalSemaphoreValues = new uint64_t[in_struct->signalSemaphoreValueCount];
+        memcpy ((void *)pSignalSemaphoreValues, (void *)in_struct->pSignalSemaphoreValues, sizeof(uint64_t)*in_struct->signalSemaphoreValueCount);
+    }
+}
+
+void safe_VkTimelineSemaphoreSubmitInfoKHR::initialize(const safe_VkTimelineSemaphoreSubmitInfoKHR* src)
+{
+    sType = src->sType;
+    waitSemaphoreValueCount = src->waitSemaphoreValueCount;
+    pWaitSemaphoreValues = nullptr;
+    signalSemaphoreValueCount = src->signalSemaphoreValueCount;
+    pSignalSemaphoreValues = nullptr;
+    pNext = SafePnextCopy(src->pNext);
+    if (src->pWaitSemaphoreValues) {
+        pWaitSemaphoreValues = new uint64_t[src->waitSemaphoreValueCount];
+        memcpy ((void *)pWaitSemaphoreValues, (void *)src->pWaitSemaphoreValues, sizeof(uint64_t)*src->waitSemaphoreValueCount);
+    }
+    if (src->pSignalSemaphoreValues) {
+        pSignalSemaphoreValues = new uint64_t[src->signalSemaphoreValueCount];
+        memcpy ((void *)pSignalSemaphoreValues, (void *)src->pSignalSemaphoreValues, sizeof(uint64_t)*src->signalSemaphoreValueCount);
+    }
+}
+
+safe_VkSemaphoreWaitInfoKHR::safe_VkSemaphoreWaitInfoKHR(const VkSemaphoreWaitInfoKHR* in_struct) :
+    sType(in_struct->sType),
+    flags(in_struct->flags),
+    semaphoreCount(in_struct->semaphoreCount),
+    pSemaphores(nullptr),
+    pValues(nullptr)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (semaphoreCount && in_struct->pSemaphores) {
+        pSemaphores = new VkSemaphore[semaphoreCount];
+        for (uint32_t i = 0; i < semaphoreCount; ++i) {
+            pSemaphores[i] = in_struct->pSemaphores[i];
+        }
+    }
+    if (in_struct->pValues) {
+        pValues = new uint64_t[in_struct->semaphoreCount];
+        memcpy ((void *)pValues, (void *)in_struct->pValues, sizeof(uint64_t)*in_struct->semaphoreCount);
+    }
+}
+
+safe_VkSemaphoreWaitInfoKHR::safe_VkSemaphoreWaitInfoKHR() :
+    pNext(nullptr),
+    pSemaphores(nullptr),
+    pValues(nullptr)
+{}
+
+safe_VkSemaphoreWaitInfoKHR::safe_VkSemaphoreWaitInfoKHR(const safe_VkSemaphoreWaitInfoKHR& src)
+{
+    sType = src.sType;
+    flags = src.flags;
+    semaphoreCount = src.semaphoreCount;
+    pSemaphores = nullptr;
+    pValues = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (semaphoreCount && src.pSemaphores) {
+        pSemaphores = new VkSemaphore[semaphoreCount];
+        for (uint32_t i = 0; i < semaphoreCount; ++i) {
+            pSemaphores[i] = src.pSemaphores[i];
+        }
+    }
+    if (src.pValues) {
+        pValues = new uint64_t[src.semaphoreCount];
+        memcpy ((void *)pValues, (void *)src.pValues, sizeof(uint64_t)*src.semaphoreCount);
+    }
+}
+
+safe_VkSemaphoreWaitInfoKHR& safe_VkSemaphoreWaitInfoKHR::operator=(const safe_VkSemaphoreWaitInfoKHR& src)
+{
+    if (&src == this) return *this;
+
+    if (pSemaphores)
+        delete[] pSemaphores;
+    if (pValues)
+        delete[] pValues;
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    flags = src.flags;
+    semaphoreCount = src.semaphoreCount;
+    pSemaphores = nullptr;
+    pValues = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (semaphoreCount && src.pSemaphores) {
+        pSemaphores = new VkSemaphore[semaphoreCount];
+        for (uint32_t i = 0; i < semaphoreCount; ++i) {
+            pSemaphores[i] = src.pSemaphores[i];
+        }
+    }
+    if (src.pValues) {
+        pValues = new uint64_t[src.semaphoreCount];
+        memcpy ((void *)pValues, (void *)src.pValues, sizeof(uint64_t)*src.semaphoreCount);
+    }
+
+    return *this;
+}
+
+safe_VkSemaphoreWaitInfoKHR::~safe_VkSemaphoreWaitInfoKHR()
+{
+    if (pSemaphores)
+        delete[] pSemaphores;
+    if (pValues)
+        delete[] pValues;
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkSemaphoreWaitInfoKHR::initialize(const VkSemaphoreWaitInfoKHR* in_struct)
+{
+    sType = in_struct->sType;
+    flags = in_struct->flags;
+    semaphoreCount = in_struct->semaphoreCount;
+    pSemaphores = nullptr;
+    pValues = nullptr;
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (semaphoreCount && in_struct->pSemaphores) {
+        pSemaphores = new VkSemaphore[semaphoreCount];
+        for (uint32_t i = 0; i < semaphoreCount; ++i) {
+            pSemaphores[i] = in_struct->pSemaphores[i];
+        }
+    }
+    if (in_struct->pValues) {
+        pValues = new uint64_t[in_struct->semaphoreCount];
+        memcpy ((void *)pValues, (void *)in_struct->pValues, sizeof(uint64_t)*in_struct->semaphoreCount);
+    }
+}
+
+void safe_VkSemaphoreWaitInfoKHR::initialize(const safe_VkSemaphoreWaitInfoKHR* src)
+{
+    sType = src->sType;
+    flags = src->flags;
+    semaphoreCount = src->semaphoreCount;
+    pSemaphores = nullptr;
+    pValues = nullptr;
+    pNext = SafePnextCopy(src->pNext);
+    if (semaphoreCount && src->pSemaphores) {
+        pSemaphores = new VkSemaphore[semaphoreCount];
+        for (uint32_t i = 0; i < semaphoreCount; ++i) {
+            pSemaphores[i] = src->pSemaphores[i];
+        }
+    }
+    if (src->pValues) {
+        pValues = new uint64_t[src->semaphoreCount];
+        memcpy ((void *)pValues, (void *)src->pValues, sizeof(uint64_t)*src->semaphoreCount);
+    }
+}
+
+safe_VkSemaphoreSignalInfoKHR::safe_VkSemaphoreSignalInfoKHR(const VkSemaphoreSignalInfoKHR* in_struct) :
+    sType(in_struct->sType),
+    semaphore(in_struct->semaphore),
+    value(in_struct->value)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkSemaphoreSignalInfoKHR::safe_VkSemaphoreSignalInfoKHR() :
+    pNext(nullptr)
+{}
+
+safe_VkSemaphoreSignalInfoKHR::safe_VkSemaphoreSignalInfoKHR(const safe_VkSemaphoreSignalInfoKHR& src)
+{
+    sType = src.sType;
+    semaphore = src.semaphore;
+    value = src.value;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkSemaphoreSignalInfoKHR& safe_VkSemaphoreSignalInfoKHR::operator=(const safe_VkSemaphoreSignalInfoKHR& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    semaphore = src.semaphore;
+    value = src.value;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkSemaphoreSignalInfoKHR::~safe_VkSemaphoreSignalInfoKHR()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkSemaphoreSignalInfoKHR::initialize(const VkSemaphoreSignalInfoKHR* in_struct)
+{
+    sType = in_struct->sType;
+    semaphore = in_struct->semaphore;
+    value = in_struct->value;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkSemaphoreSignalInfoKHR::initialize(const safe_VkSemaphoreSignalInfoKHR* src)
+{
+    sType = src->sType;
+    semaphore = src->semaphore;
+    value = src->value;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkPhysicalDeviceVulkanMemoryModelFeaturesKHR::safe_VkPhysicalDeviceVulkanMemoryModelFeaturesKHR(const VkPhysicalDeviceVulkanMemoryModelFeaturesKHR* in_struct) :
+    sType(in_struct->sType),
+    vulkanMemoryModel(in_struct->vulkanMemoryModel),
+    vulkanMemoryModelDeviceScope(in_struct->vulkanMemoryModelDeviceScope),
+    vulkanMemoryModelAvailabilityVisibilityChains(in_struct->vulkanMemoryModelAvailabilityVisibilityChains)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPhysicalDeviceVulkanMemoryModelFeaturesKHR::safe_VkPhysicalDeviceVulkanMemoryModelFeaturesKHR() :
+    pNext(nullptr)
+{}
+
+safe_VkPhysicalDeviceVulkanMemoryModelFeaturesKHR::safe_VkPhysicalDeviceVulkanMemoryModelFeaturesKHR(const safe_VkPhysicalDeviceVulkanMemoryModelFeaturesKHR& src)
+{
+    sType = src.sType;
+    vulkanMemoryModel = src.vulkanMemoryModel;
+    vulkanMemoryModelDeviceScope = src.vulkanMemoryModelDeviceScope;
+    vulkanMemoryModelAvailabilityVisibilityChains = src.vulkanMemoryModelAvailabilityVisibilityChains;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPhysicalDeviceVulkanMemoryModelFeaturesKHR& safe_VkPhysicalDeviceVulkanMemoryModelFeaturesKHR::operator=(const safe_VkPhysicalDeviceVulkanMemoryModelFeaturesKHR& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    vulkanMemoryModel = src.vulkanMemoryModel;
+    vulkanMemoryModelDeviceScope = src.vulkanMemoryModelDeviceScope;
+    vulkanMemoryModelAvailabilityVisibilityChains = src.vulkanMemoryModelAvailabilityVisibilityChains;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPhysicalDeviceVulkanMemoryModelFeaturesKHR::~safe_VkPhysicalDeviceVulkanMemoryModelFeaturesKHR()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPhysicalDeviceVulkanMemoryModelFeaturesKHR::initialize(const VkPhysicalDeviceVulkanMemoryModelFeaturesKHR* in_struct)
+{
+    sType = in_struct->sType;
+    vulkanMemoryModel = in_struct->vulkanMemoryModel;
+    vulkanMemoryModelDeviceScope = in_struct->vulkanMemoryModelDeviceScope;
+    vulkanMemoryModelAvailabilityVisibilityChains = in_struct->vulkanMemoryModelAvailabilityVisibilityChains;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPhysicalDeviceVulkanMemoryModelFeaturesKHR::initialize(const safe_VkPhysicalDeviceVulkanMemoryModelFeaturesKHR* src)
+{
+    sType = src->sType;
+    vulkanMemoryModel = src->vulkanMemoryModel;
+    vulkanMemoryModelDeviceScope = src->vulkanMemoryModelDeviceScope;
+    vulkanMemoryModelAvailabilityVisibilityChains = src->vulkanMemoryModelAvailabilityVisibilityChains;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkSurfaceProtectedCapabilitiesKHR::safe_VkSurfaceProtectedCapabilitiesKHR(const VkSurfaceProtectedCapabilitiesKHR* in_struct) :
+    sType(in_struct->sType),
+    supportsProtected(in_struct->supportsProtected)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkSurfaceProtectedCapabilitiesKHR::safe_VkSurfaceProtectedCapabilitiesKHR() :
+    pNext(nullptr)
+{}
+
+safe_VkSurfaceProtectedCapabilitiesKHR::safe_VkSurfaceProtectedCapabilitiesKHR(const safe_VkSurfaceProtectedCapabilitiesKHR& src)
+{
+    sType = src.sType;
+    supportsProtected = src.supportsProtected;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkSurfaceProtectedCapabilitiesKHR& safe_VkSurfaceProtectedCapabilitiesKHR::operator=(const safe_VkSurfaceProtectedCapabilitiesKHR& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    supportsProtected = src.supportsProtected;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkSurfaceProtectedCapabilitiesKHR::~safe_VkSurfaceProtectedCapabilitiesKHR()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkSurfaceProtectedCapabilitiesKHR::initialize(const VkSurfaceProtectedCapabilitiesKHR* in_struct)
+{
+    sType = in_struct->sType;
+    supportsProtected = in_struct->supportsProtected;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkSurfaceProtectedCapabilitiesKHR::initialize(const safe_VkSurfaceProtectedCapabilitiesKHR* src)
+{
+    sType = src->sType;
+    supportsProtected = src->supportsProtected;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR::safe_VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR(const VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR* in_struct) :
+    sType(in_struct->sType),
+    separateDepthStencilLayouts(in_struct->separateDepthStencilLayouts)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR::safe_VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR() :
+    pNext(nullptr)
+{}
+
+safe_VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR::safe_VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR(const safe_VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR& src)
+{
+    sType = src.sType;
+    separateDepthStencilLayouts = src.separateDepthStencilLayouts;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR& safe_VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR::operator=(const safe_VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    separateDepthStencilLayouts = src.separateDepthStencilLayouts;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR::~safe_VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR::initialize(const VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR* in_struct)
+{
+    sType = in_struct->sType;
+    separateDepthStencilLayouts = in_struct->separateDepthStencilLayouts;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR::initialize(const safe_VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR* src)
+{
+    sType = src->sType;
+    separateDepthStencilLayouts = src->separateDepthStencilLayouts;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkAttachmentReferenceStencilLayoutKHR::safe_VkAttachmentReferenceStencilLayoutKHR(const VkAttachmentReferenceStencilLayoutKHR* in_struct) :
+    sType(in_struct->sType),
+    stencilLayout(in_struct->stencilLayout)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkAttachmentReferenceStencilLayoutKHR::safe_VkAttachmentReferenceStencilLayoutKHR() :
+    pNext(nullptr)
+{}
+
+safe_VkAttachmentReferenceStencilLayoutKHR::safe_VkAttachmentReferenceStencilLayoutKHR(const safe_VkAttachmentReferenceStencilLayoutKHR& src)
+{
+    sType = src.sType;
+    stencilLayout = src.stencilLayout;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkAttachmentReferenceStencilLayoutKHR& safe_VkAttachmentReferenceStencilLayoutKHR::operator=(const safe_VkAttachmentReferenceStencilLayoutKHR& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    stencilLayout = src.stencilLayout;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkAttachmentReferenceStencilLayoutKHR::~safe_VkAttachmentReferenceStencilLayoutKHR()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkAttachmentReferenceStencilLayoutKHR::initialize(const VkAttachmentReferenceStencilLayoutKHR* in_struct)
+{
+    sType = in_struct->sType;
+    stencilLayout = in_struct->stencilLayout;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkAttachmentReferenceStencilLayoutKHR::initialize(const safe_VkAttachmentReferenceStencilLayoutKHR* src)
+{
+    sType = src->sType;
+    stencilLayout = src->stencilLayout;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkAttachmentDescriptionStencilLayoutKHR::safe_VkAttachmentDescriptionStencilLayoutKHR(const VkAttachmentDescriptionStencilLayoutKHR* in_struct) :
+    sType(in_struct->sType),
+    stencilInitialLayout(in_struct->stencilInitialLayout),
+    stencilFinalLayout(in_struct->stencilFinalLayout)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkAttachmentDescriptionStencilLayoutKHR::safe_VkAttachmentDescriptionStencilLayoutKHR() :
+    pNext(nullptr)
+{}
+
+safe_VkAttachmentDescriptionStencilLayoutKHR::safe_VkAttachmentDescriptionStencilLayoutKHR(const safe_VkAttachmentDescriptionStencilLayoutKHR& src)
+{
+    sType = src.sType;
+    stencilInitialLayout = src.stencilInitialLayout;
+    stencilFinalLayout = src.stencilFinalLayout;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkAttachmentDescriptionStencilLayoutKHR& safe_VkAttachmentDescriptionStencilLayoutKHR::operator=(const safe_VkAttachmentDescriptionStencilLayoutKHR& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    stencilInitialLayout = src.stencilInitialLayout;
+    stencilFinalLayout = src.stencilFinalLayout;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkAttachmentDescriptionStencilLayoutKHR::~safe_VkAttachmentDescriptionStencilLayoutKHR()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkAttachmentDescriptionStencilLayoutKHR::initialize(const VkAttachmentDescriptionStencilLayoutKHR* in_struct)
+{
+    sType = in_struct->sType;
+    stencilInitialLayout = in_struct->stencilInitialLayout;
+    stencilFinalLayout = in_struct->stencilFinalLayout;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkAttachmentDescriptionStencilLayoutKHR::initialize(const safe_VkAttachmentDescriptionStencilLayoutKHR* src)
+{
+    sType = src->sType;
+    stencilInitialLayout = src->stencilInitialLayout;
+    stencilFinalLayout = src->stencilFinalLayout;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR::safe_VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR(const VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR* in_struct) :
+    sType(in_struct->sType),
+    uniformBufferStandardLayout(in_struct->uniformBufferStandardLayout)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR::safe_VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR() :
+    pNext(nullptr)
+{}
+
+safe_VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR::safe_VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR(const safe_VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR& src)
+{
+    sType = src.sType;
+    uniformBufferStandardLayout = src.uniformBufferStandardLayout;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR& safe_VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR::operator=(const safe_VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    uniformBufferStandardLayout = src.uniformBufferStandardLayout;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR::~safe_VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR::initialize(const VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR* in_struct)
+{
+    sType = in_struct->sType;
+    uniformBufferStandardLayout = in_struct->uniformBufferStandardLayout;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR::initialize(const safe_VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR* src)
+{
+    sType = src->sType;
+    uniformBufferStandardLayout = src->uniformBufferStandardLayout;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkPhysicalDeviceBufferDeviceAddressFeaturesKHR::safe_VkPhysicalDeviceBufferDeviceAddressFeaturesKHR(const VkPhysicalDeviceBufferDeviceAddressFeaturesKHR* in_struct) :
+    sType(in_struct->sType),
+    bufferDeviceAddress(in_struct->bufferDeviceAddress),
+    bufferDeviceAddressCaptureReplay(in_struct->bufferDeviceAddressCaptureReplay),
+    bufferDeviceAddressMultiDevice(in_struct->bufferDeviceAddressMultiDevice)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPhysicalDeviceBufferDeviceAddressFeaturesKHR::safe_VkPhysicalDeviceBufferDeviceAddressFeaturesKHR() :
+    pNext(nullptr)
+{}
+
+safe_VkPhysicalDeviceBufferDeviceAddressFeaturesKHR::safe_VkPhysicalDeviceBufferDeviceAddressFeaturesKHR(const safe_VkPhysicalDeviceBufferDeviceAddressFeaturesKHR& src)
+{
+    sType = src.sType;
+    bufferDeviceAddress = src.bufferDeviceAddress;
+    bufferDeviceAddressCaptureReplay = src.bufferDeviceAddressCaptureReplay;
+    bufferDeviceAddressMultiDevice = src.bufferDeviceAddressMultiDevice;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPhysicalDeviceBufferDeviceAddressFeaturesKHR& safe_VkPhysicalDeviceBufferDeviceAddressFeaturesKHR::operator=(const safe_VkPhysicalDeviceBufferDeviceAddressFeaturesKHR& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    bufferDeviceAddress = src.bufferDeviceAddress;
+    bufferDeviceAddressCaptureReplay = src.bufferDeviceAddressCaptureReplay;
+    bufferDeviceAddressMultiDevice = src.bufferDeviceAddressMultiDevice;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPhysicalDeviceBufferDeviceAddressFeaturesKHR::~safe_VkPhysicalDeviceBufferDeviceAddressFeaturesKHR()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPhysicalDeviceBufferDeviceAddressFeaturesKHR::initialize(const VkPhysicalDeviceBufferDeviceAddressFeaturesKHR* in_struct)
+{
+    sType = in_struct->sType;
+    bufferDeviceAddress = in_struct->bufferDeviceAddress;
+    bufferDeviceAddressCaptureReplay = in_struct->bufferDeviceAddressCaptureReplay;
+    bufferDeviceAddressMultiDevice = in_struct->bufferDeviceAddressMultiDevice;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPhysicalDeviceBufferDeviceAddressFeaturesKHR::initialize(const safe_VkPhysicalDeviceBufferDeviceAddressFeaturesKHR* src)
+{
+    sType = src->sType;
+    bufferDeviceAddress = src->bufferDeviceAddress;
+    bufferDeviceAddressCaptureReplay = src->bufferDeviceAddressCaptureReplay;
+    bufferDeviceAddressMultiDevice = src->bufferDeviceAddressMultiDevice;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkBufferDeviceAddressInfoKHR::safe_VkBufferDeviceAddressInfoKHR(const VkBufferDeviceAddressInfoKHR* in_struct) :
+    sType(in_struct->sType),
+    buffer(in_struct->buffer)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkBufferDeviceAddressInfoKHR::safe_VkBufferDeviceAddressInfoKHR() :
+    pNext(nullptr)
+{}
+
+safe_VkBufferDeviceAddressInfoKHR::safe_VkBufferDeviceAddressInfoKHR(const safe_VkBufferDeviceAddressInfoKHR& src)
+{
+    sType = src.sType;
+    buffer = src.buffer;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkBufferDeviceAddressInfoKHR& safe_VkBufferDeviceAddressInfoKHR::operator=(const safe_VkBufferDeviceAddressInfoKHR& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    buffer = src.buffer;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkBufferDeviceAddressInfoKHR::~safe_VkBufferDeviceAddressInfoKHR()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkBufferDeviceAddressInfoKHR::initialize(const VkBufferDeviceAddressInfoKHR* in_struct)
+{
+    sType = in_struct->sType;
+    buffer = in_struct->buffer;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkBufferDeviceAddressInfoKHR::initialize(const safe_VkBufferDeviceAddressInfoKHR* src)
+{
+    sType = src->sType;
+    buffer = src->buffer;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkBufferOpaqueCaptureAddressCreateInfoKHR::safe_VkBufferOpaqueCaptureAddressCreateInfoKHR(const VkBufferOpaqueCaptureAddressCreateInfoKHR* in_struct) :
+    sType(in_struct->sType),
+    opaqueCaptureAddress(in_struct->opaqueCaptureAddress)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkBufferOpaqueCaptureAddressCreateInfoKHR::safe_VkBufferOpaqueCaptureAddressCreateInfoKHR() :
+    pNext(nullptr)
+{}
+
+safe_VkBufferOpaqueCaptureAddressCreateInfoKHR::safe_VkBufferOpaqueCaptureAddressCreateInfoKHR(const safe_VkBufferOpaqueCaptureAddressCreateInfoKHR& src)
+{
+    sType = src.sType;
+    opaqueCaptureAddress = src.opaqueCaptureAddress;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkBufferOpaqueCaptureAddressCreateInfoKHR& safe_VkBufferOpaqueCaptureAddressCreateInfoKHR::operator=(const safe_VkBufferOpaqueCaptureAddressCreateInfoKHR& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    opaqueCaptureAddress = src.opaqueCaptureAddress;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkBufferOpaqueCaptureAddressCreateInfoKHR::~safe_VkBufferOpaqueCaptureAddressCreateInfoKHR()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkBufferOpaqueCaptureAddressCreateInfoKHR::initialize(const VkBufferOpaqueCaptureAddressCreateInfoKHR* in_struct)
+{
+    sType = in_struct->sType;
+    opaqueCaptureAddress = in_struct->opaqueCaptureAddress;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkBufferOpaqueCaptureAddressCreateInfoKHR::initialize(const safe_VkBufferOpaqueCaptureAddressCreateInfoKHR* src)
+{
+    sType = src->sType;
+    opaqueCaptureAddress = src->opaqueCaptureAddress;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkMemoryOpaqueCaptureAddressAllocateInfoKHR::safe_VkMemoryOpaqueCaptureAddressAllocateInfoKHR(const VkMemoryOpaqueCaptureAddressAllocateInfoKHR* in_struct) :
+    sType(in_struct->sType),
+    opaqueCaptureAddress(in_struct->opaqueCaptureAddress)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkMemoryOpaqueCaptureAddressAllocateInfoKHR::safe_VkMemoryOpaqueCaptureAddressAllocateInfoKHR() :
+    pNext(nullptr)
+{}
+
+safe_VkMemoryOpaqueCaptureAddressAllocateInfoKHR::safe_VkMemoryOpaqueCaptureAddressAllocateInfoKHR(const safe_VkMemoryOpaqueCaptureAddressAllocateInfoKHR& src)
+{
+    sType = src.sType;
+    opaqueCaptureAddress = src.opaqueCaptureAddress;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkMemoryOpaqueCaptureAddressAllocateInfoKHR& safe_VkMemoryOpaqueCaptureAddressAllocateInfoKHR::operator=(const safe_VkMemoryOpaqueCaptureAddressAllocateInfoKHR& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    opaqueCaptureAddress = src.opaqueCaptureAddress;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkMemoryOpaqueCaptureAddressAllocateInfoKHR::~safe_VkMemoryOpaqueCaptureAddressAllocateInfoKHR()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkMemoryOpaqueCaptureAddressAllocateInfoKHR::initialize(const VkMemoryOpaqueCaptureAddressAllocateInfoKHR* in_struct)
+{
+    sType = in_struct->sType;
+    opaqueCaptureAddress = in_struct->opaqueCaptureAddress;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkMemoryOpaqueCaptureAddressAllocateInfoKHR::initialize(const safe_VkMemoryOpaqueCaptureAddressAllocateInfoKHR* src)
+{
+    sType = src->sType;
+    opaqueCaptureAddress = src->opaqueCaptureAddress;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkDeviceMemoryOpaqueCaptureAddressInfoKHR::safe_VkDeviceMemoryOpaqueCaptureAddressInfoKHR(const VkDeviceMemoryOpaqueCaptureAddressInfoKHR* in_struct) :
+    sType(in_struct->sType),
+    memory(in_struct->memory)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkDeviceMemoryOpaqueCaptureAddressInfoKHR::safe_VkDeviceMemoryOpaqueCaptureAddressInfoKHR() :
+    pNext(nullptr)
+{}
+
+safe_VkDeviceMemoryOpaqueCaptureAddressInfoKHR::safe_VkDeviceMemoryOpaqueCaptureAddressInfoKHR(const safe_VkDeviceMemoryOpaqueCaptureAddressInfoKHR& src)
+{
+    sType = src.sType;
+    memory = src.memory;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkDeviceMemoryOpaqueCaptureAddressInfoKHR& safe_VkDeviceMemoryOpaqueCaptureAddressInfoKHR::operator=(const safe_VkDeviceMemoryOpaqueCaptureAddressInfoKHR& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    memory = src.memory;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkDeviceMemoryOpaqueCaptureAddressInfoKHR::~safe_VkDeviceMemoryOpaqueCaptureAddressInfoKHR()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkDeviceMemoryOpaqueCaptureAddressInfoKHR::initialize(const VkDeviceMemoryOpaqueCaptureAddressInfoKHR* in_struct)
+{
+    sType = in_struct->sType;
+    memory = in_struct->memory;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkDeviceMemoryOpaqueCaptureAddressInfoKHR::initialize(const safe_VkDeviceMemoryOpaqueCaptureAddressInfoKHR* src)
+{
+    sType = src->sType;
+    memory = src->memory;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR::safe_VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR(const VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR* in_struct) :
+    sType(in_struct->sType),
+    pipelineExecutableInfo(in_struct->pipelineExecutableInfo)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR::safe_VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR() :
+    pNext(nullptr)
+{}
+
+safe_VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR::safe_VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR(const safe_VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR& src)
+{
+    sType = src.sType;
+    pipelineExecutableInfo = src.pipelineExecutableInfo;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR& safe_VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR::operator=(const safe_VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    pipelineExecutableInfo = src.pipelineExecutableInfo;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR::~safe_VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR::initialize(const VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR* in_struct)
+{
+    sType = in_struct->sType;
+    pipelineExecutableInfo = in_struct->pipelineExecutableInfo;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR::initialize(const safe_VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR* src)
+{
+    sType = src->sType;
+    pipelineExecutableInfo = src->pipelineExecutableInfo;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkPipelineInfoKHR::safe_VkPipelineInfoKHR(const VkPipelineInfoKHR* in_struct) :
+    sType(in_struct->sType),
+    pipeline(in_struct->pipeline)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPipelineInfoKHR::safe_VkPipelineInfoKHR() :
+    pNext(nullptr)
+{}
+
+safe_VkPipelineInfoKHR::safe_VkPipelineInfoKHR(const safe_VkPipelineInfoKHR& src)
+{
+    sType = src.sType;
+    pipeline = src.pipeline;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPipelineInfoKHR& safe_VkPipelineInfoKHR::operator=(const safe_VkPipelineInfoKHR& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    pipeline = src.pipeline;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPipelineInfoKHR::~safe_VkPipelineInfoKHR()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPipelineInfoKHR::initialize(const VkPipelineInfoKHR* in_struct)
+{
+    sType = in_struct->sType;
+    pipeline = in_struct->pipeline;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPipelineInfoKHR::initialize(const safe_VkPipelineInfoKHR* src)
+{
+    sType = src->sType;
+    pipeline = src->pipeline;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkPipelineExecutablePropertiesKHR::safe_VkPipelineExecutablePropertiesKHR(const VkPipelineExecutablePropertiesKHR* in_struct) :
+    sType(in_struct->sType),
+    stages(in_struct->stages),
+    subgroupSize(in_struct->subgroupSize)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+    for (uint32_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i) {
+        name[i] = in_struct->name[i];
+    }
+    for (uint32_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i) {
+        description[i] = in_struct->description[i];
+    }
+}
+
+safe_VkPipelineExecutablePropertiesKHR::safe_VkPipelineExecutablePropertiesKHR() :
+    pNext(nullptr)
+{}
+
+safe_VkPipelineExecutablePropertiesKHR::safe_VkPipelineExecutablePropertiesKHR(const safe_VkPipelineExecutablePropertiesKHR& src)
+{
+    sType = src.sType;
+    stages = src.stages;
+    subgroupSize = src.subgroupSize;
+    pNext = SafePnextCopy(src.pNext);
+    for (uint32_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i) {
+        name[i] = src.name[i];
+    }
+    for (uint32_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i) {
+        description[i] = src.description[i];
+    }
+}
+
+safe_VkPipelineExecutablePropertiesKHR& safe_VkPipelineExecutablePropertiesKHR::operator=(const safe_VkPipelineExecutablePropertiesKHR& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    stages = src.stages;
+    subgroupSize = src.subgroupSize;
+    pNext = SafePnextCopy(src.pNext);
+    for (uint32_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i) {
+        name[i] = src.name[i];
+    }
+    for (uint32_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i) {
+        description[i] = src.description[i];
+    }
+
+    return *this;
+}
+
+safe_VkPipelineExecutablePropertiesKHR::~safe_VkPipelineExecutablePropertiesKHR()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPipelineExecutablePropertiesKHR::initialize(const VkPipelineExecutablePropertiesKHR* in_struct)
+{
+    sType = in_struct->sType;
+    stages = in_struct->stages;
+    subgroupSize = in_struct->subgroupSize;
+    pNext = SafePnextCopy(in_struct->pNext);
+    for (uint32_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i) {
+        name[i] = in_struct->name[i];
+    }
+    for (uint32_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i) {
+        description[i] = in_struct->description[i];
+    }
+}
+
+void safe_VkPipelineExecutablePropertiesKHR::initialize(const safe_VkPipelineExecutablePropertiesKHR* src)
+{
+    sType = src->sType;
+    stages = src->stages;
+    subgroupSize = src->subgroupSize;
+    pNext = SafePnextCopy(src->pNext);
+    for (uint32_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i) {
+        name[i] = src->name[i];
+    }
+    for (uint32_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i) {
+        description[i] = src->description[i];
+    }
+}
+
+safe_VkPipelineExecutableInfoKHR::safe_VkPipelineExecutableInfoKHR(const VkPipelineExecutableInfoKHR* in_struct) :
+    sType(in_struct->sType),
+    pipeline(in_struct->pipeline),
+    executableIndex(in_struct->executableIndex)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPipelineExecutableInfoKHR::safe_VkPipelineExecutableInfoKHR() :
+    pNext(nullptr)
+{}
+
+safe_VkPipelineExecutableInfoKHR::safe_VkPipelineExecutableInfoKHR(const safe_VkPipelineExecutableInfoKHR& src)
+{
+    sType = src.sType;
+    pipeline = src.pipeline;
+    executableIndex = src.executableIndex;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPipelineExecutableInfoKHR& safe_VkPipelineExecutableInfoKHR::operator=(const safe_VkPipelineExecutableInfoKHR& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    pipeline = src.pipeline;
+    executableIndex = src.executableIndex;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPipelineExecutableInfoKHR::~safe_VkPipelineExecutableInfoKHR()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPipelineExecutableInfoKHR::initialize(const VkPipelineExecutableInfoKHR* in_struct)
+{
+    sType = in_struct->sType;
+    pipeline = in_struct->pipeline;
+    executableIndex = in_struct->executableIndex;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPipelineExecutableInfoKHR::initialize(const safe_VkPipelineExecutableInfoKHR* src)
+{
+    sType = src->sType;
+    pipeline = src->pipeline;
+    executableIndex = src->executableIndex;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkPipelineExecutableStatisticKHR::safe_VkPipelineExecutableStatisticKHR(const VkPipelineExecutableStatisticKHR* in_struct) :
+    sType(in_struct->sType),
+    format(in_struct->format),
+    value(in_struct->value)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+    for (uint32_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i) {
+        name[i] = in_struct->name[i];
+    }
+    for (uint32_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i) {
+        description[i] = in_struct->description[i];
+    }
+}
+
+safe_VkPipelineExecutableStatisticKHR::safe_VkPipelineExecutableStatisticKHR() :
+    pNext(nullptr)
+{}
+
+safe_VkPipelineExecutableStatisticKHR::safe_VkPipelineExecutableStatisticKHR(const safe_VkPipelineExecutableStatisticKHR& src)
+{
+    sType = src.sType;
+    format = src.format;
+    value = src.value;
+    pNext = SafePnextCopy(src.pNext);
+    for (uint32_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i) {
+        name[i] = src.name[i];
+    }
+    for (uint32_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i) {
+        description[i] = src.description[i];
+    }
+}
+
+safe_VkPipelineExecutableStatisticKHR& safe_VkPipelineExecutableStatisticKHR::operator=(const safe_VkPipelineExecutableStatisticKHR& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    format = src.format;
+    value = src.value;
+    pNext = SafePnextCopy(src.pNext);
+    for (uint32_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i) {
+        name[i] = src.name[i];
+    }
+    for (uint32_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i) {
+        description[i] = src.description[i];
+    }
+
+    return *this;
+}
+
+safe_VkPipelineExecutableStatisticKHR::~safe_VkPipelineExecutableStatisticKHR()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPipelineExecutableStatisticKHR::initialize(const VkPipelineExecutableStatisticKHR* in_struct)
+{
+    sType = in_struct->sType;
+    format = in_struct->format;
+    value = in_struct->value;
+    pNext = SafePnextCopy(in_struct->pNext);
+    for (uint32_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i) {
+        name[i] = in_struct->name[i];
+    }
+    for (uint32_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i) {
+        description[i] = in_struct->description[i];
+    }
+}
+
+void safe_VkPipelineExecutableStatisticKHR::initialize(const safe_VkPipelineExecutableStatisticKHR* src)
+{
+    sType = src->sType;
+    format = src->format;
+    value = src->value;
+    pNext = SafePnextCopy(src->pNext);
+    for (uint32_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i) {
+        name[i] = src->name[i];
+    }
+    for (uint32_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i) {
+        description[i] = src->description[i];
+    }
+}
+
+safe_VkPipelineExecutableInternalRepresentationKHR::safe_VkPipelineExecutableInternalRepresentationKHR(const VkPipelineExecutableInternalRepresentationKHR* in_struct) :
+    sType(in_struct->sType),
+    isText(in_struct->isText),
+    dataSize(in_struct->dataSize),
+    pData(in_struct->pData)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+    for (uint32_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i) {
+        name[i] = in_struct->name[i];
+    }
+    for (uint32_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i) {
+        description[i] = in_struct->description[i];
+    }
+}
+
+safe_VkPipelineExecutableInternalRepresentationKHR::safe_VkPipelineExecutableInternalRepresentationKHR() :
+    pNext(nullptr),
+    pData(nullptr)
+{}
+
+safe_VkPipelineExecutableInternalRepresentationKHR::safe_VkPipelineExecutableInternalRepresentationKHR(const safe_VkPipelineExecutableInternalRepresentationKHR& src)
+{
+    sType = src.sType;
+    isText = src.isText;
+    dataSize = src.dataSize;
+    pData = src.pData;
+    pNext = SafePnextCopy(src.pNext);
+    for (uint32_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i) {
+        name[i] = src.name[i];
+    }
+    for (uint32_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i) {
+        description[i] = src.description[i];
+    }
+}
+
+safe_VkPipelineExecutableInternalRepresentationKHR& safe_VkPipelineExecutableInternalRepresentationKHR::operator=(const safe_VkPipelineExecutableInternalRepresentationKHR& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    isText = src.isText;
+    dataSize = src.dataSize;
+    pData = src.pData;
+    pNext = SafePnextCopy(src.pNext);
+    for (uint32_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i) {
+        name[i] = src.name[i];
+    }
+    for (uint32_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i) {
+        description[i] = src.description[i];
+    }
+
+    return *this;
+}
+
+safe_VkPipelineExecutableInternalRepresentationKHR::~safe_VkPipelineExecutableInternalRepresentationKHR()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPipelineExecutableInternalRepresentationKHR::initialize(const VkPipelineExecutableInternalRepresentationKHR* in_struct)
+{
+    sType = in_struct->sType;
+    isText = in_struct->isText;
+    dataSize = in_struct->dataSize;
+    pData = in_struct->pData;
+    pNext = SafePnextCopy(in_struct->pNext);
+    for (uint32_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i) {
+        name[i] = in_struct->name[i];
+    }
+    for (uint32_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i) {
+        description[i] = in_struct->description[i];
+    }
+}
+
+void safe_VkPipelineExecutableInternalRepresentationKHR::initialize(const safe_VkPipelineExecutableInternalRepresentationKHR* src)
+{
+    sType = src->sType;
+    isText = src->isText;
+    dataSize = src->dataSize;
+    pData = src->pData;
+    pNext = SafePnextCopy(src->pNext);
+    for (uint32_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i) {
+        name[i] = src->name[i];
+    }
+    for (uint32_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i) {
+        description[i] = src->description[i];
+    }
+}
+
+safe_VkDebugReportCallbackCreateInfoEXT::safe_VkDebugReportCallbackCreateInfoEXT(const VkDebugReportCallbackCreateInfoEXT* in_struct) :
+    sType(in_struct->sType),
+    flags(in_struct->flags),
+    pfnCallback(in_struct->pfnCallback),
+    pUserData(in_struct->pUserData)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkDebugReportCallbackCreateInfoEXT::safe_VkDebugReportCallbackCreateInfoEXT() :
+    pNext(nullptr),
+    pUserData(nullptr)
+{}
+
+safe_VkDebugReportCallbackCreateInfoEXT::safe_VkDebugReportCallbackCreateInfoEXT(const safe_VkDebugReportCallbackCreateInfoEXT& src)
+{
+    sType = src.sType;
+    flags = src.flags;
+    pfnCallback = src.pfnCallback;
+    pUserData = src.pUserData;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkDebugReportCallbackCreateInfoEXT& safe_VkDebugReportCallbackCreateInfoEXT::operator=(const safe_VkDebugReportCallbackCreateInfoEXT& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    flags = src.flags;
+    pfnCallback = src.pfnCallback;
+    pUserData = src.pUserData;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkDebugReportCallbackCreateInfoEXT::~safe_VkDebugReportCallbackCreateInfoEXT()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkDebugReportCallbackCreateInfoEXT::initialize(const VkDebugReportCallbackCreateInfoEXT* in_struct)
+{
+    sType = in_struct->sType;
+    flags = in_struct->flags;
+    pfnCallback = in_struct->pfnCallback;
+    pUserData = in_struct->pUserData;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkDebugReportCallbackCreateInfoEXT::initialize(const safe_VkDebugReportCallbackCreateInfoEXT* src)
+{
+    sType = src->sType;
+    flags = src->flags;
+    pfnCallback = src->pfnCallback;
+    pUserData = src->pUserData;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkPipelineRasterizationStateRasterizationOrderAMD::safe_VkPipelineRasterizationStateRasterizationOrderAMD(const VkPipelineRasterizationStateRasterizationOrderAMD* in_struct) :
+    sType(in_struct->sType),
+    rasterizationOrder(in_struct->rasterizationOrder)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPipelineRasterizationStateRasterizationOrderAMD::safe_VkPipelineRasterizationStateRasterizationOrderAMD() :
+    pNext(nullptr)
+{}
+
+safe_VkPipelineRasterizationStateRasterizationOrderAMD::safe_VkPipelineRasterizationStateRasterizationOrderAMD(const safe_VkPipelineRasterizationStateRasterizationOrderAMD& src)
+{
+    sType = src.sType;
+    rasterizationOrder = src.rasterizationOrder;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPipelineRasterizationStateRasterizationOrderAMD& safe_VkPipelineRasterizationStateRasterizationOrderAMD::operator=(const safe_VkPipelineRasterizationStateRasterizationOrderAMD& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    rasterizationOrder = src.rasterizationOrder;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPipelineRasterizationStateRasterizationOrderAMD::~safe_VkPipelineRasterizationStateRasterizationOrderAMD()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPipelineRasterizationStateRasterizationOrderAMD::initialize(const VkPipelineRasterizationStateRasterizationOrderAMD* in_struct)
+{
+    sType = in_struct->sType;
+    rasterizationOrder = in_struct->rasterizationOrder;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPipelineRasterizationStateRasterizationOrderAMD::initialize(const safe_VkPipelineRasterizationStateRasterizationOrderAMD* src)
+{
+    sType = src->sType;
+    rasterizationOrder = src->rasterizationOrder;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkDebugMarkerObjectNameInfoEXT::safe_VkDebugMarkerObjectNameInfoEXT(const VkDebugMarkerObjectNameInfoEXT* in_struct) :
+    sType(in_struct->sType),
+    objectType(in_struct->objectType),
+    object(in_struct->object)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+    pObjectName = SafeStringCopy(in_struct->pObjectName);
+}
+
+safe_VkDebugMarkerObjectNameInfoEXT::safe_VkDebugMarkerObjectNameInfoEXT() :
+    pNext(nullptr),
+    pObjectName(nullptr)
+{}
+
+safe_VkDebugMarkerObjectNameInfoEXT::safe_VkDebugMarkerObjectNameInfoEXT(const safe_VkDebugMarkerObjectNameInfoEXT& src)
+{
+    sType = src.sType;
+    objectType = src.objectType;
+    object = src.object;
+    pNext = SafePnextCopy(src.pNext);
+    pObjectName = SafeStringCopy(src.pObjectName);
+}
+
+safe_VkDebugMarkerObjectNameInfoEXT& safe_VkDebugMarkerObjectNameInfoEXT::operator=(const safe_VkDebugMarkerObjectNameInfoEXT& src)
+{
+    if (&src == this) return *this;
+
+    if (pObjectName) delete [] pObjectName;
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    objectType = src.objectType;
+    object = src.object;
+    pNext = SafePnextCopy(src.pNext);
+    pObjectName = SafeStringCopy(src.pObjectName);
+
+    return *this;
+}
+
+safe_VkDebugMarkerObjectNameInfoEXT::~safe_VkDebugMarkerObjectNameInfoEXT()
+{
+    if (pObjectName) delete [] pObjectName;
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkDebugMarkerObjectNameInfoEXT::initialize(const VkDebugMarkerObjectNameInfoEXT* in_struct)
+{
+    sType = in_struct->sType;
+    objectType = in_struct->objectType;
+    object = in_struct->object;
+    pNext = SafePnextCopy(in_struct->pNext);
+    pObjectName = SafeStringCopy(in_struct->pObjectName);
+}
+
+void safe_VkDebugMarkerObjectNameInfoEXT::initialize(const safe_VkDebugMarkerObjectNameInfoEXT* src)
+{
+    sType = src->sType;
+    objectType = src->objectType;
+    object = src->object;
+    pNext = SafePnextCopy(src->pNext);
+    pObjectName = SafeStringCopy(src->pObjectName);
+}
+
+safe_VkDebugMarkerObjectTagInfoEXT::safe_VkDebugMarkerObjectTagInfoEXT(const VkDebugMarkerObjectTagInfoEXT* in_struct) :
+    sType(in_struct->sType),
+    objectType(in_struct->objectType),
+    object(in_struct->object),
+    tagName(in_struct->tagName),
+    tagSize(in_struct->tagSize),
+    pTag(in_struct->pTag)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkDebugMarkerObjectTagInfoEXT::safe_VkDebugMarkerObjectTagInfoEXT() :
+    pNext(nullptr),
+    pTag(nullptr)
+{}
+
+safe_VkDebugMarkerObjectTagInfoEXT::safe_VkDebugMarkerObjectTagInfoEXT(const safe_VkDebugMarkerObjectTagInfoEXT& src)
+{
+    sType = src.sType;
+    objectType = src.objectType;
+    object = src.object;
+    tagName = src.tagName;
+    tagSize = src.tagSize;
+    pTag = src.pTag;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkDebugMarkerObjectTagInfoEXT& safe_VkDebugMarkerObjectTagInfoEXT::operator=(const safe_VkDebugMarkerObjectTagInfoEXT& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    objectType = src.objectType;
+    object = src.object;
+    tagName = src.tagName;
+    tagSize = src.tagSize;
+    pTag = src.pTag;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkDebugMarkerObjectTagInfoEXT::~safe_VkDebugMarkerObjectTagInfoEXT()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkDebugMarkerObjectTagInfoEXT::initialize(const VkDebugMarkerObjectTagInfoEXT* in_struct)
+{
+    sType = in_struct->sType;
+    objectType = in_struct->objectType;
+    object = in_struct->object;
+    tagName = in_struct->tagName;
+    tagSize = in_struct->tagSize;
+    pTag = in_struct->pTag;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkDebugMarkerObjectTagInfoEXT::initialize(const safe_VkDebugMarkerObjectTagInfoEXT* src)
+{
+    sType = src->sType;
+    objectType = src->objectType;
+    object = src->object;
+    tagName = src->tagName;
+    tagSize = src->tagSize;
+    pTag = src->pTag;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkDebugMarkerMarkerInfoEXT::safe_VkDebugMarkerMarkerInfoEXT(const VkDebugMarkerMarkerInfoEXT* in_struct) :
+    sType(in_struct->sType)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+    pMarkerName = SafeStringCopy(in_struct->pMarkerName);
+    for (uint32_t i = 0; i < 4; ++i) {
+        color[i] = in_struct->color[i];
+    }
+}
+
+safe_VkDebugMarkerMarkerInfoEXT::safe_VkDebugMarkerMarkerInfoEXT() :
+    pNext(nullptr),
+    pMarkerName(nullptr)
+{}
+
+safe_VkDebugMarkerMarkerInfoEXT::safe_VkDebugMarkerMarkerInfoEXT(const safe_VkDebugMarkerMarkerInfoEXT& src)
+{
+    sType = src.sType;
+    pNext = SafePnextCopy(src.pNext);
+    pMarkerName = SafeStringCopy(src.pMarkerName);
+    for (uint32_t i = 0; i < 4; ++i) {
+        color[i] = src.color[i];
+    }
+}
+
+safe_VkDebugMarkerMarkerInfoEXT& safe_VkDebugMarkerMarkerInfoEXT::operator=(const safe_VkDebugMarkerMarkerInfoEXT& src)
+{
+    if (&src == this) return *this;
+
+    if (pMarkerName) delete [] pMarkerName;
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    pNext = SafePnextCopy(src.pNext);
+    pMarkerName = SafeStringCopy(src.pMarkerName);
+    for (uint32_t i = 0; i < 4; ++i) {
+        color[i] = src.color[i];
+    }
+
+    return *this;
+}
+
+safe_VkDebugMarkerMarkerInfoEXT::~safe_VkDebugMarkerMarkerInfoEXT()
+{
+    if (pMarkerName) delete [] pMarkerName;
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkDebugMarkerMarkerInfoEXT::initialize(const VkDebugMarkerMarkerInfoEXT* in_struct)
+{
+    sType = in_struct->sType;
+    pNext = SafePnextCopy(in_struct->pNext);
+    pMarkerName = SafeStringCopy(in_struct->pMarkerName);
+    for (uint32_t i = 0; i < 4; ++i) {
+        color[i] = in_struct->color[i];
+    }
+}
+
+void safe_VkDebugMarkerMarkerInfoEXT::initialize(const safe_VkDebugMarkerMarkerInfoEXT* src)
+{
+    sType = src->sType;
+    pNext = SafePnextCopy(src->pNext);
+    pMarkerName = SafeStringCopy(src->pMarkerName);
+    for (uint32_t i = 0; i < 4; ++i) {
+        color[i] = src->color[i];
+    }
+}
+
+safe_VkDedicatedAllocationImageCreateInfoNV::safe_VkDedicatedAllocationImageCreateInfoNV(const VkDedicatedAllocationImageCreateInfoNV* in_struct) :
+    sType(in_struct->sType),
+    dedicatedAllocation(in_struct->dedicatedAllocation)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkDedicatedAllocationImageCreateInfoNV::safe_VkDedicatedAllocationImageCreateInfoNV() :
+    pNext(nullptr)
+{}
+
+safe_VkDedicatedAllocationImageCreateInfoNV::safe_VkDedicatedAllocationImageCreateInfoNV(const safe_VkDedicatedAllocationImageCreateInfoNV& src)
+{
+    sType = src.sType;
+    dedicatedAllocation = src.dedicatedAllocation;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkDedicatedAllocationImageCreateInfoNV& safe_VkDedicatedAllocationImageCreateInfoNV::operator=(const safe_VkDedicatedAllocationImageCreateInfoNV& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    dedicatedAllocation = src.dedicatedAllocation;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkDedicatedAllocationImageCreateInfoNV::~safe_VkDedicatedAllocationImageCreateInfoNV()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkDedicatedAllocationImageCreateInfoNV::initialize(const VkDedicatedAllocationImageCreateInfoNV* in_struct)
+{
+    sType = in_struct->sType;
+    dedicatedAllocation = in_struct->dedicatedAllocation;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkDedicatedAllocationImageCreateInfoNV::initialize(const safe_VkDedicatedAllocationImageCreateInfoNV* src)
+{
+    sType = src->sType;
+    dedicatedAllocation = src->dedicatedAllocation;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkDedicatedAllocationBufferCreateInfoNV::safe_VkDedicatedAllocationBufferCreateInfoNV(const VkDedicatedAllocationBufferCreateInfoNV* in_struct) :
+    sType(in_struct->sType),
+    dedicatedAllocation(in_struct->dedicatedAllocation)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkDedicatedAllocationBufferCreateInfoNV::safe_VkDedicatedAllocationBufferCreateInfoNV() :
+    pNext(nullptr)
+{}
+
+safe_VkDedicatedAllocationBufferCreateInfoNV::safe_VkDedicatedAllocationBufferCreateInfoNV(const safe_VkDedicatedAllocationBufferCreateInfoNV& src)
+{
+    sType = src.sType;
+    dedicatedAllocation = src.dedicatedAllocation;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkDedicatedAllocationBufferCreateInfoNV& safe_VkDedicatedAllocationBufferCreateInfoNV::operator=(const safe_VkDedicatedAllocationBufferCreateInfoNV& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    dedicatedAllocation = src.dedicatedAllocation;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkDedicatedAllocationBufferCreateInfoNV::~safe_VkDedicatedAllocationBufferCreateInfoNV()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkDedicatedAllocationBufferCreateInfoNV::initialize(const VkDedicatedAllocationBufferCreateInfoNV* in_struct)
+{
+    sType = in_struct->sType;
+    dedicatedAllocation = in_struct->dedicatedAllocation;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkDedicatedAllocationBufferCreateInfoNV::initialize(const safe_VkDedicatedAllocationBufferCreateInfoNV* src)
+{
+    sType = src->sType;
+    dedicatedAllocation = src->dedicatedAllocation;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkDedicatedAllocationMemoryAllocateInfoNV::safe_VkDedicatedAllocationMemoryAllocateInfoNV(const VkDedicatedAllocationMemoryAllocateInfoNV* in_struct) :
+    sType(in_struct->sType),
+    image(in_struct->image),
+    buffer(in_struct->buffer)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkDedicatedAllocationMemoryAllocateInfoNV::safe_VkDedicatedAllocationMemoryAllocateInfoNV() :
+    pNext(nullptr)
+{}
+
+safe_VkDedicatedAllocationMemoryAllocateInfoNV::safe_VkDedicatedAllocationMemoryAllocateInfoNV(const safe_VkDedicatedAllocationMemoryAllocateInfoNV& src)
+{
+    sType = src.sType;
+    image = src.image;
+    buffer = src.buffer;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkDedicatedAllocationMemoryAllocateInfoNV& safe_VkDedicatedAllocationMemoryAllocateInfoNV::operator=(const safe_VkDedicatedAllocationMemoryAllocateInfoNV& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    image = src.image;
+    buffer = src.buffer;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkDedicatedAllocationMemoryAllocateInfoNV::~safe_VkDedicatedAllocationMemoryAllocateInfoNV()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkDedicatedAllocationMemoryAllocateInfoNV::initialize(const VkDedicatedAllocationMemoryAllocateInfoNV* in_struct)
+{
+    sType = in_struct->sType;
+    image = in_struct->image;
+    buffer = in_struct->buffer;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkDedicatedAllocationMemoryAllocateInfoNV::initialize(const safe_VkDedicatedAllocationMemoryAllocateInfoNV* src)
+{
+    sType = src->sType;
+    image = src->image;
+    buffer = src->buffer;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkPhysicalDeviceTransformFeedbackFeaturesEXT::safe_VkPhysicalDeviceTransformFeedbackFeaturesEXT(const VkPhysicalDeviceTransformFeedbackFeaturesEXT* in_struct) :
+    sType(in_struct->sType),
+    transformFeedback(in_struct->transformFeedback),
+    geometryStreams(in_struct->geometryStreams)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPhysicalDeviceTransformFeedbackFeaturesEXT::safe_VkPhysicalDeviceTransformFeedbackFeaturesEXT() :
+    pNext(nullptr)
+{}
+
+safe_VkPhysicalDeviceTransformFeedbackFeaturesEXT::safe_VkPhysicalDeviceTransformFeedbackFeaturesEXT(const safe_VkPhysicalDeviceTransformFeedbackFeaturesEXT& src)
+{
+    sType = src.sType;
+    transformFeedback = src.transformFeedback;
+    geometryStreams = src.geometryStreams;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPhysicalDeviceTransformFeedbackFeaturesEXT& safe_VkPhysicalDeviceTransformFeedbackFeaturesEXT::operator=(const safe_VkPhysicalDeviceTransformFeedbackFeaturesEXT& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    transformFeedback = src.transformFeedback;
+    geometryStreams = src.geometryStreams;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPhysicalDeviceTransformFeedbackFeaturesEXT::~safe_VkPhysicalDeviceTransformFeedbackFeaturesEXT()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPhysicalDeviceTransformFeedbackFeaturesEXT::initialize(const VkPhysicalDeviceTransformFeedbackFeaturesEXT* in_struct)
+{
+    sType = in_struct->sType;
+    transformFeedback = in_struct->transformFeedback;
+    geometryStreams = in_struct->geometryStreams;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPhysicalDeviceTransformFeedbackFeaturesEXT::initialize(const safe_VkPhysicalDeviceTransformFeedbackFeaturesEXT* src)
+{
+    sType = src->sType;
+    transformFeedback = src->transformFeedback;
+    geometryStreams = src->geometryStreams;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkPhysicalDeviceTransformFeedbackPropertiesEXT::safe_VkPhysicalDeviceTransformFeedbackPropertiesEXT(const VkPhysicalDeviceTransformFeedbackPropertiesEXT* in_struct) :
+    sType(in_struct->sType),
+    maxTransformFeedbackStreams(in_struct->maxTransformFeedbackStreams),
+    maxTransformFeedbackBuffers(in_struct->maxTransformFeedbackBuffers),
+    maxTransformFeedbackBufferSize(in_struct->maxTransformFeedbackBufferSize),
+    maxTransformFeedbackStreamDataSize(in_struct->maxTransformFeedbackStreamDataSize),
+    maxTransformFeedbackBufferDataSize(in_struct->maxTransformFeedbackBufferDataSize),
+    maxTransformFeedbackBufferDataStride(in_struct->maxTransformFeedbackBufferDataStride),
+    transformFeedbackQueries(in_struct->transformFeedbackQueries),
+    transformFeedbackStreamsLinesTriangles(in_struct->transformFeedbackStreamsLinesTriangles),
+    transformFeedbackRasterizationStreamSelect(in_struct->transformFeedbackRasterizationStreamSelect),
+    transformFeedbackDraw(in_struct->transformFeedbackDraw)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPhysicalDeviceTransformFeedbackPropertiesEXT::safe_VkPhysicalDeviceTransformFeedbackPropertiesEXT() :
+    pNext(nullptr)
+{}
+
+safe_VkPhysicalDeviceTransformFeedbackPropertiesEXT::safe_VkPhysicalDeviceTransformFeedbackPropertiesEXT(const safe_VkPhysicalDeviceTransformFeedbackPropertiesEXT& src)
+{
+    sType = src.sType;
+    maxTransformFeedbackStreams = src.maxTransformFeedbackStreams;
+    maxTransformFeedbackBuffers = src.maxTransformFeedbackBuffers;
+    maxTransformFeedbackBufferSize = src.maxTransformFeedbackBufferSize;
+    maxTransformFeedbackStreamDataSize = src.maxTransformFeedbackStreamDataSize;
+    maxTransformFeedbackBufferDataSize = src.maxTransformFeedbackBufferDataSize;
+    maxTransformFeedbackBufferDataStride = src.maxTransformFeedbackBufferDataStride;
+    transformFeedbackQueries = src.transformFeedbackQueries;
+    transformFeedbackStreamsLinesTriangles = src.transformFeedbackStreamsLinesTriangles;
+    transformFeedbackRasterizationStreamSelect = src.transformFeedbackRasterizationStreamSelect;
+    transformFeedbackDraw = src.transformFeedbackDraw;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPhysicalDeviceTransformFeedbackPropertiesEXT& safe_VkPhysicalDeviceTransformFeedbackPropertiesEXT::operator=(const safe_VkPhysicalDeviceTransformFeedbackPropertiesEXT& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    maxTransformFeedbackStreams = src.maxTransformFeedbackStreams;
+    maxTransformFeedbackBuffers = src.maxTransformFeedbackBuffers;
+    maxTransformFeedbackBufferSize = src.maxTransformFeedbackBufferSize;
+    maxTransformFeedbackStreamDataSize = src.maxTransformFeedbackStreamDataSize;
+    maxTransformFeedbackBufferDataSize = src.maxTransformFeedbackBufferDataSize;
+    maxTransformFeedbackBufferDataStride = src.maxTransformFeedbackBufferDataStride;
+    transformFeedbackQueries = src.transformFeedbackQueries;
+    transformFeedbackStreamsLinesTriangles = src.transformFeedbackStreamsLinesTriangles;
+    transformFeedbackRasterizationStreamSelect = src.transformFeedbackRasterizationStreamSelect;
+    transformFeedbackDraw = src.transformFeedbackDraw;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPhysicalDeviceTransformFeedbackPropertiesEXT::~safe_VkPhysicalDeviceTransformFeedbackPropertiesEXT()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPhysicalDeviceTransformFeedbackPropertiesEXT::initialize(const VkPhysicalDeviceTransformFeedbackPropertiesEXT* in_struct)
+{
+    sType = in_struct->sType;
+    maxTransformFeedbackStreams = in_struct->maxTransformFeedbackStreams;
+    maxTransformFeedbackBuffers = in_struct->maxTransformFeedbackBuffers;
+    maxTransformFeedbackBufferSize = in_struct->maxTransformFeedbackBufferSize;
+    maxTransformFeedbackStreamDataSize = in_struct->maxTransformFeedbackStreamDataSize;
+    maxTransformFeedbackBufferDataSize = in_struct->maxTransformFeedbackBufferDataSize;
+    maxTransformFeedbackBufferDataStride = in_struct->maxTransformFeedbackBufferDataStride;
+    transformFeedbackQueries = in_struct->transformFeedbackQueries;
+    transformFeedbackStreamsLinesTriangles = in_struct->transformFeedbackStreamsLinesTriangles;
+    transformFeedbackRasterizationStreamSelect = in_struct->transformFeedbackRasterizationStreamSelect;
+    transformFeedbackDraw = in_struct->transformFeedbackDraw;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPhysicalDeviceTransformFeedbackPropertiesEXT::initialize(const safe_VkPhysicalDeviceTransformFeedbackPropertiesEXT* src)
+{
+    sType = src->sType;
+    maxTransformFeedbackStreams = src->maxTransformFeedbackStreams;
+    maxTransformFeedbackBuffers = src->maxTransformFeedbackBuffers;
+    maxTransformFeedbackBufferSize = src->maxTransformFeedbackBufferSize;
+    maxTransformFeedbackStreamDataSize = src->maxTransformFeedbackStreamDataSize;
+    maxTransformFeedbackBufferDataSize = src->maxTransformFeedbackBufferDataSize;
+    maxTransformFeedbackBufferDataStride = src->maxTransformFeedbackBufferDataStride;
+    transformFeedbackQueries = src->transformFeedbackQueries;
+    transformFeedbackStreamsLinesTriangles = src->transformFeedbackStreamsLinesTriangles;
+    transformFeedbackRasterizationStreamSelect = src->transformFeedbackRasterizationStreamSelect;
+    transformFeedbackDraw = src->transformFeedbackDraw;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkPipelineRasterizationStateStreamCreateInfoEXT::safe_VkPipelineRasterizationStateStreamCreateInfoEXT(const VkPipelineRasterizationStateStreamCreateInfoEXT* in_struct) :
+    sType(in_struct->sType),
+    flags(in_struct->flags),
+    rasterizationStream(in_struct->rasterizationStream)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPipelineRasterizationStateStreamCreateInfoEXT::safe_VkPipelineRasterizationStateStreamCreateInfoEXT() :
+    pNext(nullptr)
+{}
+
+safe_VkPipelineRasterizationStateStreamCreateInfoEXT::safe_VkPipelineRasterizationStateStreamCreateInfoEXT(const safe_VkPipelineRasterizationStateStreamCreateInfoEXT& src)
+{
+    sType = src.sType;
+    flags = src.flags;
+    rasterizationStream = src.rasterizationStream;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPipelineRasterizationStateStreamCreateInfoEXT& safe_VkPipelineRasterizationStateStreamCreateInfoEXT::operator=(const safe_VkPipelineRasterizationStateStreamCreateInfoEXT& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    flags = src.flags;
+    rasterizationStream = src.rasterizationStream;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPipelineRasterizationStateStreamCreateInfoEXT::~safe_VkPipelineRasterizationStateStreamCreateInfoEXT()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPipelineRasterizationStateStreamCreateInfoEXT::initialize(const VkPipelineRasterizationStateStreamCreateInfoEXT* in_struct)
+{
+    sType = in_struct->sType;
+    flags = in_struct->flags;
+    rasterizationStream = in_struct->rasterizationStream;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPipelineRasterizationStateStreamCreateInfoEXT::initialize(const safe_VkPipelineRasterizationStateStreamCreateInfoEXT* src)
+{
+    sType = src->sType;
+    flags = src->flags;
+    rasterizationStream = src->rasterizationStream;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkImageViewHandleInfoNVX::safe_VkImageViewHandleInfoNVX(const VkImageViewHandleInfoNVX* in_struct) :
+    sType(in_struct->sType),
+    imageView(in_struct->imageView),
+    descriptorType(in_struct->descriptorType),
+    sampler(in_struct->sampler)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkImageViewHandleInfoNVX::safe_VkImageViewHandleInfoNVX() :
+    pNext(nullptr)
+{}
+
+safe_VkImageViewHandleInfoNVX::safe_VkImageViewHandleInfoNVX(const safe_VkImageViewHandleInfoNVX& src)
+{
+    sType = src.sType;
+    imageView = src.imageView;
+    descriptorType = src.descriptorType;
+    sampler = src.sampler;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkImageViewHandleInfoNVX& safe_VkImageViewHandleInfoNVX::operator=(const safe_VkImageViewHandleInfoNVX& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    imageView = src.imageView;
+    descriptorType = src.descriptorType;
+    sampler = src.sampler;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkImageViewHandleInfoNVX::~safe_VkImageViewHandleInfoNVX()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkImageViewHandleInfoNVX::initialize(const VkImageViewHandleInfoNVX* in_struct)
+{
+    sType = in_struct->sType;
+    imageView = in_struct->imageView;
+    descriptorType = in_struct->descriptorType;
+    sampler = in_struct->sampler;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkImageViewHandleInfoNVX::initialize(const safe_VkImageViewHandleInfoNVX* src)
+{
+    sType = src->sType;
+    imageView = src->imageView;
+    descriptorType = src->descriptorType;
+    sampler = src->sampler;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkTextureLODGatherFormatPropertiesAMD::safe_VkTextureLODGatherFormatPropertiesAMD(const VkTextureLODGatherFormatPropertiesAMD* in_struct) :
+    sType(in_struct->sType),
+    supportsTextureGatherLODBiasAMD(in_struct->supportsTextureGatherLODBiasAMD)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkTextureLODGatherFormatPropertiesAMD::safe_VkTextureLODGatherFormatPropertiesAMD() :
+    pNext(nullptr)
+{}
+
+safe_VkTextureLODGatherFormatPropertiesAMD::safe_VkTextureLODGatherFormatPropertiesAMD(const safe_VkTextureLODGatherFormatPropertiesAMD& src)
+{
+    sType = src.sType;
+    supportsTextureGatherLODBiasAMD = src.supportsTextureGatherLODBiasAMD;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkTextureLODGatherFormatPropertiesAMD& safe_VkTextureLODGatherFormatPropertiesAMD::operator=(const safe_VkTextureLODGatherFormatPropertiesAMD& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    supportsTextureGatherLODBiasAMD = src.supportsTextureGatherLODBiasAMD;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkTextureLODGatherFormatPropertiesAMD::~safe_VkTextureLODGatherFormatPropertiesAMD()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkTextureLODGatherFormatPropertiesAMD::initialize(const VkTextureLODGatherFormatPropertiesAMD* in_struct)
+{
+    sType = in_struct->sType;
+    supportsTextureGatherLODBiasAMD = in_struct->supportsTextureGatherLODBiasAMD;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkTextureLODGatherFormatPropertiesAMD::initialize(const safe_VkTextureLODGatherFormatPropertiesAMD* src)
+{
+    sType = src->sType;
+    supportsTextureGatherLODBiasAMD = src->supportsTextureGatherLODBiasAMD;
+    pNext = SafePnextCopy(src->pNext);
+}
+#ifdef VK_USE_PLATFORM_GGP
+
+
+safe_VkStreamDescriptorSurfaceCreateInfoGGP::safe_VkStreamDescriptorSurfaceCreateInfoGGP(const VkStreamDescriptorSurfaceCreateInfoGGP* in_struct) :
+    sType(in_struct->sType),
+    flags(in_struct->flags),
+    streamDescriptor(in_struct->streamDescriptor)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkStreamDescriptorSurfaceCreateInfoGGP::safe_VkStreamDescriptorSurfaceCreateInfoGGP() :
+    pNext(nullptr)
+{}
+
+safe_VkStreamDescriptorSurfaceCreateInfoGGP::safe_VkStreamDescriptorSurfaceCreateInfoGGP(const safe_VkStreamDescriptorSurfaceCreateInfoGGP& src)
+{
+    sType = src.sType;
+    flags = src.flags;
+    streamDescriptor = src.streamDescriptor;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkStreamDescriptorSurfaceCreateInfoGGP& safe_VkStreamDescriptorSurfaceCreateInfoGGP::operator=(const safe_VkStreamDescriptorSurfaceCreateInfoGGP& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    flags = src.flags;
+    streamDescriptor = src.streamDescriptor;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkStreamDescriptorSurfaceCreateInfoGGP::~safe_VkStreamDescriptorSurfaceCreateInfoGGP()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkStreamDescriptorSurfaceCreateInfoGGP::initialize(const VkStreamDescriptorSurfaceCreateInfoGGP* in_struct)
+{
+    sType = in_struct->sType;
+    flags = in_struct->flags;
+    streamDescriptor = in_struct->streamDescriptor;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkStreamDescriptorSurfaceCreateInfoGGP::initialize(const safe_VkStreamDescriptorSurfaceCreateInfoGGP* src)
+{
+    sType = src->sType;
+    flags = src->flags;
+    streamDescriptor = src->streamDescriptor;
+    pNext = SafePnextCopy(src->pNext);
+}
+#endif // VK_USE_PLATFORM_GGP
+
+
+safe_VkPhysicalDeviceCornerSampledImageFeaturesNV::safe_VkPhysicalDeviceCornerSampledImageFeaturesNV(const VkPhysicalDeviceCornerSampledImageFeaturesNV* in_struct) :
+    sType(in_struct->sType),
+    cornerSampledImage(in_struct->cornerSampledImage)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPhysicalDeviceCornerSampledImageFeaturesNV::safe_VkPhysicalDeviceCornerSampledImageFeaturesNV() :
+    pNext(nullptr)
+{}
+
+safe_VkPhysicalDeviceCornerSampledImageFeaturesNV::safe_VkPhysicalDeviceCornerSampledImageFeaturesNV(const safe_VkPhysicalDeviceCornerSampledImageFeaturesNV& src)
+{
+    sType = src.sType;
+    cornerSampledImage = src.cornerSampledImage;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPhysicalDeviceCornerSampledImageFeaturesNV& safe_VkPhysicalDeviceCornerSampledImageFeaturesNV::operator=(const safe_VkPhysicalDeviceCornerSampledImageFeaturesNV& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    cornerSampledImage = src.cornerSampledImage;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPhysicalDeviceCornerSampledImageFeaturesNV::~safe_VkPhysicalDeviceCornerSampledImageFeaturesNV()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPhysicalDeviceCornerSampledImageFeaturesNV::initialize(const VkPhysicalDeviceCornerSampledImageFeaturesNV* in_struct)
+{
+    sType = in_struct->sType;
+    cornerSampledImage = in_struct->cornerSampledImage;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPhysicalDeviceCornerSampledImageFeaturesNV::initialize(const safe_VkPhysicalDeviceCornerSampledImageFeaturesNV* src)
+{
+    sType = src->sType;
+    cornerSampledImage = src->cornerSampledImage;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkExternalMemoryImageCreateInfoNV::safe_VkExternalMemoryImageCreateInfoNV(const VkExternalMemoryImageCreateInfoNV* in_struct) :
+    sType(in_struct->sType),
+    handleTypes(in_struct->handleTypes)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkExternalMemoryImageCreateInfoNV::safe_VkExternalMemoryImageCreateInfoNV() :
+    pNext(nullptr)
+{}
+
+safe_VkExternalMemoryImageCreateInfoNV::safe_VkExternalMemoryImageCreateInfoNV(const safe_VkExternalMemoryImageCreateInfoNV& src)
+{
+    sType = src.sType;
+    handleTypes = src.handleTypes;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkExternalMemoryImageCreateInfoNV& safe_VkExternalMemoryImageCreateInfoNV::operator=(const safe_VkExternalMemoryImageCreateInfoNV& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    handleTypes = src.handleTypes;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkExternalMemoryImageCreateInfoNV::~safe_VkExternalMemoryImageCreateInfoNV()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkExternalMemoryImageCreateInfoNV::initialize(const VkExternalMemoryImageCreateInfoNV* in_struct)
+{
+    sType = in_struct->sType;
+    handleTypes = in_struct->handleTypes;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkExternalMemoryImageCreateInfoNV::initialize(const safe_VkExternalMemoryImageCreateInfoNV* src)
+{
+    sType = src->sType;
+    handleTypes = src->handleTypes;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkExportMemoryAllocateInfoNV::safe_VkExportMemoryAllocateInfoNV(const VkExportMemoryAllocateInfoNV* in_struct) :
+    sType(in_struct->sType),
+    handleTypes(in_struct->handleTypes)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkExportMemoryAllocateInfoNV::safe_VkExportMemoryAllocateInfoNV() :
+    pNext(nullptr)
+{}
+
+safe_VkExportMemoryAllocateInfoNV::safe_VkExportMemoryAllocateInfoNV(const safe_VkExportMemoryAllocateInfoNV& src)
+{
+    sType = src.sType;
+    handleTypes = src.handleTypes;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkExportMemoryAllocateInfoNV& safe_VkExportMemoryAllocateInfoNV::operator=(const safe_VkExportMemoryAllocateInfoNV& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    handleTypes = src.handleTypes;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkExportMemoryAllocateInfoNV::~safe_VkExportMemoryAllocateInfoNV()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkExportMemoryAllocateInfoNV::initialize(const VkExportMemoryAllocateInfoNV* in_struct)
+{
+    sType = in_struct->sType;
+    handleTypes = in_struct->handleTypes;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkExportMemoryAllocateInfoNV::initialize(const safe_VkExportMemoryAllocateInfoNV* src)
+{
+    sType = src->sType;
+    handleTypes = src->handleTypes;
+    pNext = SafePnextCopy(src->pNext);
+}
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+
+safe_VkImportMemoryWin32HandleInfoNV::safe_VkImportMemoryWin32HandleInfoNV(const VkImportMemoryWin32HandleInfoNV* in_struct) :
+    sType(in_struct->sType),
+    handleType(in_struct->handleType),
+    handle(in_struct->handle)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkImportMemoryWin32HandleInfoNV::safe_VkImportMemoryWin32HandleInfoNV() :
+    pNext(nullptr)
+{}
+
+safe_VkImportMemoryWin32HandleInfoNV::safe_VkImportMemoryWin32HandleInfoNV(const safe_VkImportMemoryWin32HandleInfoNV& src)
+{
+    sType = src.sType;
+    handleType = src.handleType;
+    handle = src.handle;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkImportMemoryWin32HandleInfoNV& safe_VkImportMemoryWin32HandleInfoNV::operator=(const safe_VkImportMemoryWin32HandleInfoNV& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    handleType = src.handleType;
+    handle = src.handle;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkImportMemoryWin32HandleInfoNV::~safe_VkImportMemoryWin32HandleInfoNV()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkImportMemoryWin32HandleInfoNV::initialize(const VkImportMemoryWin32HandleInfoNV* in_struct)
+{
+    sType = in_struct->sType;
+    handleType = in_struct->handleType;
+    handle = in_struct->handle;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkImportMemoryWin32HandleInfoNV::initialize(const safe_VkImportMemoryWin32HandleInfoNV* src)
+{
+    sType = src->sType;
+    handleType = src->handleType;
+    handle = src->handle;
+    pNext = SafePnextCopy(src->pNext);
+}
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+
+safe_VkExportMemoryWin32HandleInfoNV::safe_VkExportMemoryWin32HandleInfoNV(const VkExportMemoryWin32HandleInfoNV* in_struct) :
+    sType(in_struct->sType),
+    pAttributes(nullptr),
+    dwAccess(in_struct->dwAccess)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (in_struct->pAttributes) {
+        pAttributes = new SECURITY_ATTRIBUTES(*in_struct->pAttributes);
+    }
+}
+
+safe_VkExportMemoryWin32HandleInfoNV::safe_VkExportMemoryWin32HandleInfoNV() :
+    pNext(nullptr),
+    pAttributes(nullptr)
+{}
+
+safe_VkExportMemoryWin32HandleInfoNV::safe_VkExportMemoryWin32HandleInfoNV(const safe_VkExportMemoryWin32HandleInfoNV& src)
+{
+    sType = src.sType;
+    pAttributes = nullptr;
+    dwAccess = src.dwAccess;
+    pNext = SafePnextCopy(src.pNext);
+    if (src.pAttributes) {
+        pAttributes = new SECURITY_ATTRIBUTES(*src.pAttributes);
+    }
+}
+
+safe_VkExportMemoryWin32HandleInfoNV& safe_VkExportMemoryWin32HandleInfoNV::operator=(const safe_VkExportMemoryWin32HandleInfoNV& src)
+{
+    if (&src == this) return *this;
+
+    if (pAttributes)
+        delete pAttributes;
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    pAttributes = nullptr;
+    dwAccess = src.dwAccess;
+    pNext = SafePnextCopy(src.pNext);
+    if (src.pAttributes) {
+        pAttributes = new SECURITY_ATTRIBUTES(*src.pAttributes);
+    }
+
+    return *this;
+}
+
+safe_VkExportMemoryWin32HandleInfoNV::~safe_VkExportMemoryWin32HandleInfoNV()
+{
+    if (pAttributes)
+        delete pAttributes;
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkExportMemoryWin32HandleInfoNV::initialize(const VkExportMemoryWin32HandleInfoNV* in_struct)
+{
+    sType = in_struct->sType;
+    pAttributes = nullptr;
+    dwAccess = in_struct->dwAccess;
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (in_struct->pAttributes) {
+        pAttributes = new SECURITY_ATTRIBUTES(*in_struct->pAttributes);
+    }
+}
+
+void safe_VkExportMemoryWin32HandleInfoNV::initialize(const safe_VkExportMemoryWin32HandleInfoNV* src)
+{
+    sType = src->sType;
+    pAttributes = nullptr;
+    dwAccess = src->dwAccess;
+    pNext = SafePnextCopy(src->pNext);
+    if (src->pAttributes) {
+        pAttributes = new SECURITY_ATTRIBUTES(*src->pAttributes);
+    }
+}
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+
+safe_VkWin32KeyedMutexAcquireReleaseInfoNV::safe_VkWin32KeyedMutexAcquireReleaseInfoNV(const VkWin32KeyedMutexAcquireReleaseInfoNV* in_struct) :
+    sType(in_struct->sType),
+    acquireCount(in_struct->acquireCount),
+    pAcquireSyncs(nullptr),
+    pAcquireKeys(nullptr),
+    pAcquireTimeoutMilliseconds(nullptr),
+    releaseCount(in_struct->releaseCount),
+    pReleaseSyncs(nullptr),
+    pReleaseKeys(nullptr)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (acquireCount && in_struct->pAcquireSyncs) {
+        pAcquireSyncs = new VkDeviceMemory[acquireCount];
+        for (uint32_t i = 0; i < acquireCount; ++i) {
+            pAcquireSyncs[i] = in_struct->pAcquireSyncs[i];
+        }
+    }
+    if (in_struct->pAcquireKeys) {
+        pAcquireKeys = new uint64_t[in_struct->acquireCount];
+        memcpy ((void *)pAcquireKeys, (void *)in_struct->pAcquireKeys, sizeof(uint64_t)*in_struct->acquireCount);
+    }
+    if (in_struct->pAcquireTimeoutMilliseconds) {
+        pAcquireTimeoutMilliseconds = new uint32_t[in_struct->acquireCount];
+        memcpy ((void *)pAcquireTimeoutMilliseconds, (void *)in_struct->pAcquireTimeoutMilliseconds, sizeof(uint32_t)*in_struct->acquireCount);
+    }
+    if (releaseCount && in_struct->pReleaseSyncs) {
+        pReleaseSyncs = new VkDeviceMemory[releaseCount];
+        for (uint32_t i = 0; i < releaseCount; ++i) {
+            pReleaseSyncs[i] = in_struct->pReleaseSyncs[i];
+        }
+    }
+    if (in_struct->pReleaseKeys) {
+        pReleaseKeys = new uint64_t[in_struct->releaseCount];
+        memcpy ((void *)pReleaseKeys, (void *)in_struct->pReleaseKeys, sizeof(uint64_t)*in_struct->releaseCount);
+    }
+}
+
+safe_VkWin32KeyedMutexAcquireReleaseInfoNV::safe_VkWin32KeyedMutexAcquireReleaseInfoNV() :
+    pNext(nullptr),
+    pAcquireSyncs(nullptr),
+    pAcquireKeys(nullptr),
+    pAcquireTimeoutMilliseconds(nullptr),
+    pReleaseSyncs(nullptr),
+    pReleaseKeys(nullptr)
+{}
+
+safe_VkWin32KeyedMutexAcquireReleaseInfoNV::safe_VkWin32KeyedMutexAcquireReleaseInfoNV(const safe_VkWin32KeyedMutexAcquireReleaseInfoNV& src)
+{
+    sType = src.sType;
+    acquireCount = src.acquireCount;
+    pAcquireSyncs = nullptr;
+    pAcquireKeys = nullptr;
+    pAcquireTimeoutMilliseconds = nullptr;
+    releaseCount = src.releaseCount;
+    pReleaseSyncs = nullptr;
+    pReleaseKeys = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (acquireCount && src.pAcquireSyncs) {
+        pAcquireSyncs = new VkDeviceMemory[acquireCount];
+        for (uint32_t i = 0; i < acquireCount; ++i) {
+            pAcquireSyncs[i] = src.pAcquireSyncs[i];
+        }
+    }
+    if (src.pAcquireKeys) {
+        pAcquireKeys = new uint64_t[src.acquireCount];
+        memcpy ((void *)pAcquireKeys, (void *)src.pAcquireKeys, sizeof(uint64_t)*src.acquireCount);
+    }
+    if (src.pAcquireTimeoutMilliseconds) {
+        pAcquireTimeoutMilliseconds = new uint32_t[src.acquireCount];
+        memcpy ((void *)pAcquireTimeoutMilliseconds, (void *)src.pAcquireTimeoutMilliseconds, sizeof(uint32_t)*src.acquireCount);
+    }
+    if (releaseCount && src.pReleaseSyncs) {
+        pReleaseSyncs = new VkDeviceMemory[releaseCount];
+        for (uint32_t i = 0; i < releaseCount; ++i) {
+            pReleaseSyncs[i] = src.pReleaseSyncs[i];
+        }
+    }
+    if (src.pReleaseKeys) {
+        pReleaseKeys = new uint64_t[src.releaseCount];
+        memcpy ((void *)pReleaseKeys, (void *)src.pReleaseKeys, sizeof(uint64_t)*src.releaseCount);
+    }
+}
+
+safe_VkWin32KeyedMutexAcquireReleaseInfoNV& safe_VkWin32KeyedMutexAcquireReleaseInfoNV::operator=(const safe_VkWin32KeyedMutexAcquireReleaseInfoNV& src)
+{
+    if (&src == this) return *this;
+
+    if (pAcquireSyncs)
+        delete[] pAcquireSyncs;
+    if (pAcquireKeys)
+        delete[] pAcquireKeys;
+    if (pAcquireTimeoutMilliseconds)
+        delete[] pAcquireTimeoutMilliseconds;
+    if (pReleaseSyncs)
+        delete[] pReleaseSyncs;
+    if (pReleaseKeys)
+        delete[] pReleaseKeys;
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    acquireCount = src.acquireCount;
+    pAcquireSyncs = nullptr;
+    pAcquireKeys = nullptr;
+    pAcquireTimeoutMilliseconds = nullptr;
+    releaseCount = src.releaseCount;
+    pReleaseSyncs = nullptr;
+    pReleaseKeys = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (acquireCount && src.pAcquireSyncs) {
+        pAcquireSyncs = new VkDeviceMemory[acquireCount];
+        for (uint32_t i = 0; i < acquireCount; ++i) {
+            pAcquireSyncs[i] = src.pAcquireSyncs[i];
+        }
+    }
+    if (src.pAcquireKeys) {
+        pAcquireKeys = new uint64_t[src.acquireCount];
+        memcpy ((void *)pAcquireKeys, (void *)src.pAcquireKeys, sizeof(uint64_t)*src.acquireCount);
+    }
+    if (src.pAcquireTimeoutMilliseconds) {
+        pAcquireTimeoutMilliseconds = new uint32_t[src.acquireCount];
+        memcpy ((void *)pAcquireTimeoutMilliseconds, (void *)src.pAcquireTimeoutMilliseconds, sizeof(uint32_t)*src.acquireCount);
+    }
+    if (releaseCount && src.pReleaseSyncs) {
+        pReleaseSyncs = new VkDeviceMemory[releaseCount];
+        for (uint32_t i = 0; i < releaseCount; ++i) {
+            pReleaseSyncs[i] = src.pReleaseSyncs[i];
+        }
+    }
+    if (src.pReleaseKeys) {
+        pReleaseKeys = new uint64_t[src.releaseCount];
+        memcpy ((void *)pReleaseKeys, (void *)src.pReleaseKeys, sizeof(uint64_t)*src.releaseCount);
+    }
+
+    return *this;
+}
+
+safe_VkWin32KeyedMutexAcquireReleaseInfoNV::~safe_VkWin32KeyedMutexAcquireReleaseInfoNV()
+{
+    if (pAcquireSyncs)
+        delete[] pAcquireSyncs;
+    if (pAcquireKeys)
+        delete[] pAcquireKeys;
+    if (pAcquireTimeoutMilliseconds)
+        delete[] pAcquireTimeoutMilliseconds;
+    if (pReleaseSyncs)
+        delete[] pReleaseSyncs;
+    if (pReleaseKeys)
+        delete[] pReleaseKeys;
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkWin32KeyedMutexAcquireReleaseInfoNV::initialize(const VkWin32KeyedMutexAcquireReleaseInfoNV* in_struct)
+{
+    sType = in_struct->sType;
+    acquireCount = in_struct->acquireCount;
+    pAcquireSyncs = nullptr;
+    pAcquireKeys = nullptr;
+    pAcquireTimeoutMilliseconds = nullptr;
+    releaseCount = in_struct->releaseCount;
+    pReleaseSyncs = nullptr;
+    pReleaseKeys = nullptr;
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (acquireCount && in_struct->pAcquireSyncs) {
+        pAcquireSyncs = new VkDeviceMemory[acquireCount];
+        for (uint32_t i = 0; i < acquireCount; ++i) {
+            pAcquireSyncs[i] = in_struct->pAcquireSyncs[i];
+        }
+    }
+    if (in_struct->pAcquireKeys) {
+        pAcquireKeys = new uint64_t[in_struct->acquireCount];
+        memcpy ((void *)pAcquireKeys, (void *)in_struct->pAcquireKeys, sizeof(uint64_t)*in_struct->acquireCount);
+    }
+    if (in_struct->pAcquireTimeoutMilliseconds) {
+        pAcquireTimeoutMilliseconds = new uint32_t[in_struct->acquireCount];
+        memcpy ((void *)pAcquireTimeoutMilliseconds, (void *)in_struct->pAcquireTimeoutMilliseconds, sizeof(uint32_t)*in_struct->acquireCount);
+    }
+    if (releaseCount && in_struct->pReleaseSyncs) {
+        pReleaseSyncs = new VkDeviceMemory[releaseCount];
+        for (uint32_t i = 0; i < releaseCount; ++i) {
+            pReleaseSyncs[i] = in_struct->pReleaseSyncs[i];
+        }
+    }
+    if (in_struct->pReleaseKeys) {
+        pReleaseKeys = new uint64_t[in_struct->releaseCount];
+        memcpy ((void *)pReleaseKeys, (void *)in_struct->pReleaseKeys, sizeof(uint64_t)*in_struct->releaseCount);
+    }
+}
+
+void safe_VkWin32KeyedMutexAcquireReleaseInfoNV::initialize(const safe_VkWin32KeyedMutexAcquireReleaseInfoNV* src)
+{
+    sType = src->sType;
+    acquireCount = src->acquireCount;
+    pAcquireSyncs = nullptr;
+    pAcquireKeys = nullptr;
+    pAcquireTimeoutMilliseconds = nullptr;
+    releaseCount = src->releaseCount;
+    pReleaseSyncs = nullptr;
+    pReleaseKeys = nullptr;
+    pNext = SafePnextCopy(src->pNext);
+    if (acquireCount && src->pAcquireSyncs) {
+        pAcquireSyncs = new VkDeviceMemory[acquireCount];
+        for (uint32_t i = 0; i < acquireCount; ++i) {
+            pAcquireSyncs[i] = src->pAcquireSyncs[i];
+        }
+    }
+    if (src->pAcquireKeys) {
+        pAcquireKeys = new uint64_t[src->acquireCount];
+        memcpy ((void *)pAcquireKeys, (void *)src->pAcquireKeys, sizeof(uint64_t)*src->acquireCount);
+    }
+    if (src->pAcquireTimeoutMilliseconds) {
+        pAcquireTimeoutMilliseconds = new uint32_t[src->acquireCount];
+        memcpy ((void *)pAcquireTimeoutMilliseconds, (void *)src->pAcquireTimeoutMilliseconds, sizeof(uint32_t)*src->acquireCount);
+    }
+    if (releaseCount && src->pReleaseSyncs) {
+        pReleaseSyncs = new VkDeviceMemory[releaseCount];
+        for (uint32_t i = 0; i < releaseCount; ++i) {
+            pReleaseSyncs[i] = src->pReleaseSyncs[i];
+        }
+    }
+    if (src->pReleaseKeys) {
+        pReleaseKeys = new uint64_t[src->releaseCount];
+        memcpy ((void *)pReleaseKeys, (void *)src->pReleaseKeys, sizeof(uint64_t)*src->releaseCount);
+    }
+}
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+
+safe_VkValidationFlagsEXT::safe_VkValidationFlagsEXT(const VkValidationFlagsEXT* in_struct) :
+    sType(in_struct->sType),
+    disabledValidationCheckCount(in_struct->disabledValidationCheckCount),
+    pDisabledValidationChecks(nullptr)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (in_struct->pDisabledValidationChecks) {
+        pDisabledValidationChecks = new VkValidationCheckEXT[in_struct->disabledValidationCheckCount];
+        memcpy ((void *)pDisabledValidationChecks, (void *)in_struct->pDisabledValidationChecks, sizeof(VkValidationCheckEXT)*in_struct->disabledValidationCheckCount);
+    }
+}
+
+safe_VkValidationFlagsEXT::safe_VkValidationFlagsEXT() :
+    pNext(nullptr),
+    pDisabledValidationChecks(nullptr)
+{}
+
+safe_VkValidationFlagsEXT::safe_VkValidationFlagsEXT(const safe_VkValidationFlagsEXT& src)
+{
+    sType = src.sType;
+    disabledValidationCheckCount = src.disabledValidationCheckCount;
+    pDisabledValidationChecks = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (src.pDisabledValidationChecks) {
+        pDisabledValidationChecks = new VkValidationCheckEXT[src.disabledValidationCheckCount];
+        memcpy ((void *)pDisabledValidationChecks, (void *)src.pDisabledValidationChecks, sizeof(VkValidationCheckEXT)*src.disabledValidationCheckCount);
+    }
+}
+
+safe_VkValidationFlagsEXT& safe_VkValidationFlagsEXT::operator=(const safe_VkValidationFlagsEXT& src)
+{
+    if (&src == this) return *this;
+
+    if (pDisabledValidationChecks)
+        delete[] pDisabledValidationChecks;
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    disabledValidationCheckCount = src.disabledValidationCheckCount;
+    pDisabledValidationChecks = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (src.pDisabledValidationChecks) {
+        pDisabledValidationChecks = new VkValidationCheckEXT[src.disabledValidationCheckCount];
+        memcpy ((void *)pDisabledValidationChecks, (void *)src.pDisabledValidationChecks, sizeof(VkValidationCheckEXT)*src.disabledValidationCheckCount);
+    }
+
+    return *this;
+}
+
+safe_VkValidationFlagsEXT::~safe_VkValidationFlagsEXT()
+{
+    if (pDisabledValidationChecks)
+        delete[] pDisabledValidationChecks;
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkValidationFlagsEXT::initialize(const VkValidationFlagsEXT* in_struct)
+{
+    sType = in_struct->sType;
+    disabledValidationCheckCount = in_struct->disabledValidationCheckCount;
+    pDisabledValidationChecks = nullptr;
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (in_struct->pDisabledValidationChecks) {
+        pDisabledValidationChecks = new VkValidationCheckEXT[in_struct->disabledValidationCheckCount];
+        memcpy ((void *)pDisabledValidationChecks, (void *)in_struct->pDisabledValidationChecks, sizeof(VkValidationCheckEXT)*in_struct->disabledValidationCheckCount);
+    }
+}
+
+void safe_VkValidationFlagsEXT::initialize(const safe_VkValidationFlagsEXT* src)
+{
+    sType = src->sType;
+    disabledValidationCheckCount = src->disabledValidationCheckCount;
+    pDisabledValidationChecks = nullptr;
+    pNext = SafePnextCopy(src->pNext);
+    if (src->pDisabledValidationChecks) {
+        pDisabledValidationChecks = new VkValidationCheckEXT[src->disabledValidationCheckCount];
+        memcpy ((void *)pDisabledValidationChecks, (void *)src->pDisabledValidationChecks, sizeof(VkValidationCheckEXT)*src->disabledValidationCheckCount);
+    }
+}
+#ifdef VK_USE_PLATFORM_VI_NN
+
+
+safe_VkViSurfaceCreateInfoNN::safe_VkViSurfaceCreateInfoNN(const VkViSurfaceCreateInfoNN* in_struct) :
+    sType(in_struct->sType),
+    flags(in_struct->flags),
+    window(in_struct->window)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkViSurfaceCreateInfoNN::safe_VkViSurfaceCreateInfoNN() :
+    pNext(nullptr),
+    window(nullptr)
+{}
+
+safe_VkViSurfaceCreateInfoNN::safe_VkViSurfaceCreateInfoNN(const safe_VkViSurfaceCreateInfoNN& src)
+{
+    sType = src.sType;
+    flags = src.flags;
+    window = src.window;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkViSurfaceCreateInfoNN& safe_VkViSurfaceCreateInfoNN::operator=(const safe_VkViSurfaceCreateInfoNN& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    flags = src.flags;
+    window = src.window;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkViSurfaceCreateInfoNN::~safe_VkViSurfaceCreateInfoNN()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkViSurfaceCreateInfoNN::initialize(const VkViSurfaceCreateInfoNN* in_struct)
+{
+    sType = in_struct->sType;
+    flags = in_struct->flags;
+    window = in_struct->window;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkViSurfaceCreateInfoNN::initialize(const safe_VkViSurfaceCreateInfoNN* src)
+{
+    sType = src->sType;
+    flags = src->flags;
+    window = src->window;
+    pNext = SafePnextCopy(src->pNext);
+}
+#endif // VK_USE_PLATFORM_VI_NN
+
+
+safe_VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT::safe_VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT(const VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT* in_struct) :
+    sType(in_struct->sType),
+    textureCompressionASTC_HDR(in_struct->textureCompressionASTC_HDR)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT::safe_VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT() :
+    pNext(nullptr)
+{}
+
+safe_VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT::safe_VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT(const safe_VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT& src)
+{
+    sType = src.sType;
+    textureCompressionASTC_HDR = src.textureCompressionASTC_HDR;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT& safe_VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT::operator=(const safe_VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    textureCompressionASTC_HDR = src.textureCompressionASTC_HDR;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT::~safe_VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT::initialize(const VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT* in_struct)
+{
+    sType = in_struct->sType;
+    textureCompressionASTC_HDR = in_struct->textureCompressionASTC_HDR;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT::initialize(const safe_VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT* src)
+{
+    sType = src->sType;
+    textureCompressionASTC_HDR = src->textureCompressionASTC_HDR;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkImageViewASTCDecodeModeEXT::safe_VkImageViewASTCDecodeModeEXT(const VkImageViewASTCDecodeModeEXT* in_struct) :
+    sType(in_struct->sType),
+    decodeMode(in_struct->decodeMode)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkImageViewASTCDecodeModeEXT::safe_VkImageViewASTCDecodeModeEXT() :
+    pNext(nullptr)
+{}
+
+safe_VkImageViewASTCDecodeModeEXT::safe_VkImageViewASTCDecodeModeEXT(const safe_VkImageViewASTCDecodeModeEXT& src)
+{
+    sType = src.sType;
+    decodeMode = src.decodeMode;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkImageViewASTCDecodeModeEXT& safe_VkImageViewASTCDecodeModeEXT::operator=(const safe_VkImageViewASTCDecodeModeEXT& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    decodeMode = src.decodeMode;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkImageViewASTCDecodeModeEXT::~safe_VkImageViewASTCDecodeModeEXT()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkImageViewASTCDecodeModeEXT::initialize(const VkImageViewASTCDecodeModeEXT* in_struct)
+{
+    sType = in_struct->sType;
+    decodeMode = in_struct->decodeMode;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkImageViewASTCDecodeModeEXT::initialize(const safe_VkImageViewASTCDecodeModeEXT* src)
+{
+    sType = src->sType;
+    decodeMode = src->decodeMode;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkPhysicalDeviceASTCDecodeFeaturesEXT::safe_VkPhysicalDeviceASTCDecodeFeaturesEXT(const VkPhysicalDeviceASTCDecodeFeaturesEXT* in_struct) :
+    sType(in_struct->sType),
+    decodeModeSharedExponent(in_struct->decodeModeSharedExponent)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPhysicalDeviceASTCDecodeFeaturesEXT::safe_VkPhysicalDeviceASTCDecodeFeaturesEXT() :
+    pNext(nullptr)
+{}
+
+safe_VkPhysicalDeviceASTCDecodeFeaturesEXT::safe_VkPhysicalDeviceASTCDecodeFeaturesEXT(const safe_VkPhysicalDeviceASTCDecodeFeaturesEXT& src)
+{
+    sType = src.sType;
+    decodeModeSharedExponent = src.decodeModeSharedExponent;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPhysicalDeviceASTCDecodeFeaturesEXT& safe_VkPhysicalDeviceASTCDecodeFeaturesEXT::operator=(const safe_VkPhysicalDeviceASTCDecodeFeaturesEXT& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    decodeModeSharedExponent = src.decodeModeSharedExponent;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPhysicalDeviceASTCDecodeFeaturesEXT::~safe_VkPhysicalDeviceASTCDecodeFeaturesEXT()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPhysicalDeviceASTCDecodeFeaturesEXT::initialize(const VkPhysicalDeviceASTCDecodeFeaturesEXT* in_struct)
+{
+    sType = in_struct->sType;
+    decodeModeSharedExponent = in_struct->decodeModeSharedExponent;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPhysicalDeviceASTCDecodeFeaturesEXT::initialize(const safe_VkPhysicalDeviceASTCDecodeFeaturesEXT* src)
+{
+    sType = src->sType;
+    decodeModeSharedExponent = src->decodeModeSharedExponent;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkConditionalRenderingBeginInfoEXT::safe_VkConditionalRenderingBeginInfoEXT(const VkConditionalRenderingBeginInfoEXT* in_struct) :
+    sType(in_struct->sType),
+    buffer(in_struct->buffer),
+    offset(in_struct->offset),
+    flags(in_struct->flags)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkConditionalRenderingBeginInfoEXT::safe_VkConditionalRenderingBeginInfoEXT() :
+    pNext(nullptr)
+{}
+
+safe_VkConditionalRenderingBeginInfoEXT::safe_VkConditionalRenderingBeginInfoEXT(const safe_VkConditionalRenderingBeginInfoEXT& src)
+{
+    sType = src.sType;
+    buffer = src.buffer;
+    offset = src.offset;
+    flags = src.flags;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkConditionalRenderingBeginInfoEXT& safe_VkConditionalRenderingBeginInfoEXT::operator=(const safe_VkConditionalRenderingBeginInfoEXT& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    buffer = src.buffer;
+    offset = src.offset;
+    flags = src.flags;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkConditionalRenderingBeginInfoEXT::~safe_VkConditionalRenderingBeginInfoEXT()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkConditionalRenderingBeginInfoEXT::initialize(const VkConditionalRenderingBeginInfoEXT* in_struct)
+{
+    sType = in_struct->sType;
+    buffer = in_struct->buffer;
+    offset = in_struct->offset;
+    flags = in_struct->flags;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkConditionalRenderingBeginInfoEXT::initialize(const safe_VkConditionalRenderingBeginInfoEXT* src)
+{
+    sType = src->sType;
+    buffer = src->buffer;
+    offset = src->offset;
+    flags = src->flags;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkPhysicalDeviceConditionalRenderingFeaturesEXT::safe_VkPhysicalDeviceConditionalRenderingFeaturesEXT(const VkPhysicalDeviceConditionalRenderingFeaturesEXT* in_struct) :
+    sType(in_struct->sType),
+    conditionalRendering(in_struct->conditionalRendering),
+    inheritedConditionalRendering(in_struct->inheritedConditionalRendering)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPhysicalDeviceConditionalRenderingFeaturesEXT::safe_VkPhysicalDeviceConditionalRenderingFeaturesEXT() :
+    pNext(nullptr)
+{}
+
+safe_VkPhysicalDeviceConditionalRenderingFeaturesEXT::safe_VkPhysicalDeviceConditionalRenderingFeaturesEXT(const safe_VkPhysicalDeviceConditionalRenderingFeaturesEXT& src)
+{
+    sType = src.sType;
+    conditionalRendering = src.conditionalRendering;
+    inheritedConditionalRendering = src.inheritedConditionalRendering;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPhysicalDeviceConditionalRenderingFeaturesEXT& safe_VkPhysicalDeviceConditionalRenderingFeaturesEXT::operator=(const safe_VkPhysicalDeviceConditionalRenderingFeaturesEXT& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    conditionalRendering = src.conditionalRendering;
+    inheritedConditionalRendering = src.inheritedConditionalRendering;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPhysicalDeviceConditionalRenderingFeaturesEXT::~safe_VkPhysicalDeviceConditionalRenderingFeaturesEXT()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPhysicalDeviceConditionalRenderingFeaturesEXT::initialize(const VkPhysicalDeviceConditionalRenderingFeaturesEXT* in_struct)
+{
+    sType = in_struct->sType;
+    conditionalRendering = in_struct->conditionalRendering;
+    inheritedConditionalRendering = in_struct->inheritedConditionalRendering;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPhysicalDeviceConditionalRenderingFeaturesEXT::initialize(const safe_VkPhysicalDeviceConditionalRenderingFeaturesEXT* src)
+{
+    sType = src->sType;
+    conditionalRendering = src->conditionalRendering;
+    inheritedConditionalRendering = src->inheritedConditionalRendering;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkCommandBufferInheritanceConditionalRenderingInfoEXT::safe_VkCommandBufferInheritanceConditionalRenderingInfoEXT(const VkCommandBufferInheritanceConditionalRenderingInfoEXT* in_struct) :
+    sType(in_struct->sType),
+    conditionalRenderingEnable(in_struct->conditionalRenderingEnable)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkCommandBufferInheritanceConditionalRenderingInfoEXT::safe_VkCommandBufferInheritanceConditionalRenderingInfoEXT() :
+    pNext(nullptr)
+{}
+
+safe_VkCommandBufferInheritanceConditionalRenderingInfoEXT::safe_VkCommandBufferInheritanceConditionalRenderingInfoEXT(const safe_VkCommandBufferInheritanceConditionalRenderingInfoEXT& src)
+{
+    sType = src.sType;
+    conditionalRenderingEnable = src.conditionalRenderingEnable;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkCommandBufferInheritanceConditionalRenderingInfoEXT& safe_VkCommandBufferInheritanceConditionalRenderingInfoEXT::operator=(const safe_VkCommandBufferInheritanceConditionalRenderingInfoEXT& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    conditionalRenderingEnable = src.conditionalRenderingEnable;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkCommandBufferInheritanceConditionalRenderingInfoEXT::~safe_VkCommandBufferInheritanceConditionalRenderingInfoEXT()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkCommandBufferInheritanceConditionalRenderingInfoEXT::initialize(const VkCommandBufferInheritanceConditionalRenderingInfoEXT* in_struct)
+{
+    sType = in_struct->sType;
+    conditionalRenderingEnable = in_struct->conditionalRenderingEnable;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkCommandBufferInheritanceConditionalRenderingInfoEXT::initialize(const safe_VkCommandBufferInheritanceConditionalRenderingInfoEXT* src)
+{
+    sType = src->sType;
+    conditionalRenderingEnable = src->conditionalRenderingEnable;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkDeviceGeneratedCommandsFeaturesNVX::safe_VkDeviceGeneratedCommandsFeaturesNVX(const VkDeviceGeneratedCommandsFeaturesNVX* in_struct) :
+    sType(in_struct->sType),
+    computeBindingPointSupport(in_struct->computeBindingPointSupport)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkDeviceGeneratedCommandsFeaturesNVX::safe_VkDeviceGeneratedCommandsFeaturesNVX() :
+    pNext(nullptr)
+{}
+
+safe_VkDeviceGeneratedCommandsFeaturesNVX::safe_VkDeviceGeneratedCommandsFeaturesNVX(const safe_VkDeviceGeneratedCommandsFeaturesNVX& src)
+{
+    sType = src.sType;
+    computeBindingPointSupport = src.computeBindingPointSupport;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkDeviceGeneratedCommandsFeaturesNVX& safe_VkDeviceGeneratedCommandsFeaturesNVX::operator=(const safe_VkDeviceGeneratedCommandsFeaturesNVX& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    computeBindingPointSupport = src.computeBindingPointSupport;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkDeviceGeneratedCommandsFeaturesNVX::~safe_VkDeviceGeneratedCommandsFeaturesNVX()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkDeviceGeneratedCommandsFeaturesNVX::initialize(const VkDeviceGeneratedCommandsFeaturesNVX* in_struct)
+{
+    sType = in_struct->sType;
+    computeBindingPointSupport = in_struct->computeBindingPointSupport;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkDeviceGeneratedCommandsFeaturesNVX::initialize(const safe_VkDeviceGeneratedCommandsFeaturesNVX* src)
+{
+    sType = src->sType;
+    computeBindingPointSupport = src->computeBindingPointSupport;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkDeviceGeneratedCommandsLimitsNVX::safe_VkDeviceGeneratedCommandsLimitsNVX(const VkDeviceGeneratedCommandsLimitsNVX* in_struct) :
+    sType(in_struct->sType),
+    maxIndirectCommandsLayoutTokenCount(in_struct->maxIndirectCommandsLayoutTokenCount),
+    maxObjectEntryCounts(in_struct->maxObjectEntryCounts),
+    minSequenceCountBufferOffsetAlignment(in_struct->minSequenceCountBufferOffsetAlignment),
+    minSequenceIndexBufferOffsetAlignment(in_struct->minSequenceIndexBufferOffsetAlignment),
+    minCommandsTokenBufferOffsetAlignment(in_struct->minCommandsTokenBufferOffsetAlignment)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkDeviceGeneratedCommandsLimitsNVX::safe_VkDeviceGeneratedCommandsLimitsNVX() :
+    pNext(nullptr)
+{}
+
+safe_VkDeviceGeneratedCommandsLimitsNVX::safe_VkDeviceGeneratedCommandsLimitsNVX(const safe_VkDeviceGeneratedCommandsLimitsNVX& src)
+{
+    sType = src.sType;
+    maxIndirectCommandsLayoutTokenCount = src.maxIndirectCommandsLayoutTokenCount;
+    maxObjectEntryCounts = src.maxObjectEntryCounts;
+    minSequenceCountBufferOffsetAlignment = src.minSequenceCountBufferOffsetAlignment;
+    minSequenceIndexBufferOffsetAlignment = src.minSequenceIndexBufferOffsetAlignment;
+    minCommandsTokenBufferOffsetAlignment = src.minCommandsTokenBufferOffsetAlignment;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkDeviceGeneratedCommandsLimitsNVX& safe_VkDeviceGeneratedCommandsLimitsNVX::operator=(const safe_VkDeviceGeneratedCommandsLimitsNVX& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    maxIndirectCommandsLayoutTokenCount = src.maxIndirectCommandsLayoutTokenCount;
+    maxObjectEntryCounts = src.maxObjectEntryCounts;
+    minSequenceCountBufferOffsetAlignment = src.minSequenceCountBufferOffsetAlignment;
+    minSequenceIndexBufferOffsetAlignment = src.minSequenceIndexBufferOffsetAlignment;
+    minCommandsTokenBufferOffsetAlignment = src.minCommandsTokenBufferOffsetAlignment;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkDeviceGeneratedCommandsLimitsNVX::~safe_VkDeviceGeneratedCommandsLimitsNVX()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkDeviceGeneratedCommandsLimitsNVX::initialize(const VkDeviceGeneratedCommandsLimitsNVX* in_struct)
+{
+    sType = in_struct->sType;
+    maxIndirectCommandsLayoutTokenCount = in_struct->maxIndirectCommandsLayoutTokenCount;
+    maxObjectEntryCounts = in_struct->maxObjectEntryCounts;
+    minSequenceCountBufferOffsetAlignment = in_struct->minSequenceCountBufferOffsetAlignment;
+    minSequenceIndexBufferOffsetAlignment = in_struct->minSequenceIndexBufferOffsetAlignment;
+    minCommandsTokenBufferOffsetAlignment = in_struct->minCommandsTokenBufferOffsetAlignment;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkDeviceGeneratedCommandsLimitsNVX::initialize(const safe_VkDeviceGeneratedCommandsLimitsNVX* src)
+{
+    sType = src->sType;
+    maxIndirectCommandsLayoutTokenCount = src->maxIndirectCommandsLayoutTokenCount;
+    maxObjectEntryCounts = src->maxObjectEntryCounts;
+    minSequenceCountBufferOffsetAlignment = src->minSequenceCountBufferOffsetAlignment;
+    minSequenceIndexBufferOffsetAlignment = src->minSequenceIndexBufferOffsetAlignment;
+    minCommandsTokenBufferOffsetAlignment = src->minCommandsTokenBufferOffsetAlignment;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkIndirectCommandsLayoutCreateInfoNVX::safe_VkIndirectCommandsLayoutCreateInfoNVX(const VkIndirectCommandsLayoutCreateInfoNVX* in_struct) :
+    sType(in_struct->sType),
+    pipelineBindPoint(in_struct->pipelineBindPoint),
+    flags(in_struct->flags),
+    tokenCount(in_struct->tokenCount),
+    pTokens(nullptr)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (in_struct->pTokens) {
+        pTokens = new VkIndirectCommandsLayoutTokenNVX[in_struct->tokenCount];
+        memcpy ((void *)pTokens, (void *)in_struct->pTokens, sizeof(VkIndirectCommandsLayoutTokenNVX)*in_struct->tokenCount);
+    }
+}
+
+safe_VkIndirectCommandsLayoutCreateInfoNVX::safe_VkIndirectCommandsLayoutCreateInfoNVX() :
+    pNext(nullptr),
+    pTokens(nullptr)
+{}
+
+safe_VkIndirectCommandsLayoutCreateInfoNVX::safe_VkIndirectCommandsLayoutCreateInfoNVX(const safe_VkIndirectCommandsLayoutCreateInfoNVX& src)
+{
+    sType = src.sType;
+    pipelineBindPoint = src.pipelineBindPoint;
+    flags = src.flags;
+    tokenCount = src.tokenCount;
+    pTokens = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (src.pTokens) {
+        pTokens = new VkIndirectCommandsLayoutTokenNVX[src.tokenCount];
+        memcpy ((void *)pTokens, (void *)src.pTokens, sizeof(VkIndirectCommandsLayoutTokenNVX)*src.tokenCount);
+    }
+}
+
+safe_VkIndirectCommandsLayoutCreateInfoNVX& safe_VkIndirectCommandsLayoutCreateInfoNVX::operator=(const safe_VkIndirectCommandsLayoutCreateInfoNVX& src)
+{
+    if (&src == this) return *this;
+
+    if (pTokens)
+        delete[] pTokens;
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    pipelineBindPoint = src.pipelineBindPoint;
+    flags = src.flags;
+    tokenCount = src.tokenCount;
+    pTokens = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (src.pTokens) {
+        pTokens = new VkIndirectCommandsLayoutTokenNVX[src.tokenCount];
+        memcpy ((void *)pTokens, (void *)src.pTokens, sizeof(VkIndirectCommandsLayoutTokenNVX)*src.tokenCount);
+    }
+
+    return *this;
+}
+
+safe_VkIndirectCommandsLayoutCreateInfoNVX::~safe_VkIndirectCommandsLayoutCreateInfoNVX()
+{
+    if (pTokens)
+        delete[] pTokens;
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkIndirectCommandsLayoutCreateInfoNVX::initialize(const VkIndirectCommandsLayoutCreateInfoNVX* in_struct)
+{
+    sType = in_struct->sType;
+    pipelineBindPoint = in_struct->pipelineBindPoint;
+    flags = in_struct->flags;
+    tokenCount = in_struct->tokenCount;
+    pTokens = nullptr;
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (in_struct->pTokens) {
+        pTokens = new VkIndirectCommandsLayoutTokenNVX[in_struct->tokenCount];
+        memcpy ((void *)pTokens, (void *)in_struct->pTokens, sizeof(VkIndirectCommandsLayoutTokenNVX)*in_struct->tokenCount);
+    }
+}
+
+void safe_VkIndirectCommandsLayoutCreateInfoNVX::initialize(const safe_VkIndirectCommandsLayoutCreateInfoNVX* src)
+{
+    sType = src->sType;
+    pipelineBindPoint = src->pipelineBindPoint;
+    flags = src->flags;
+    tokenCount = src->tokenCount;
+    pTokens = nullptr;
+    pNext = SafePnextCopy(src->pNext);
+    if (src->pTokens) {
+        pTokens = new VkIndirectCommandsLayoutTokenNVX[src->tokenCount];
+        memcpy ((void *)pTokens, (void *)src->pTokens, sizeof(VkIndirectCommandsLayoutTokenNVX)*src->tokenCount);
+    }
+}
+
+safe_VkCmdProcessCommandsInfoNVX::safe_VkCmdProcessCommandsInfoNVX(const VkCmdProcessCommandsInfoNVX* in_struct) :
+    sType(in_struct->sType),
+    objectTable(in_struct->objectTable),
+    indirectCommandsLayout(in_struct->indirectCommandsLayout),
+    indirectCommandsTokenCount(in_struct->indirectCommandsTokenCount),
+    pIndirectCommandsTokens(nullptr),
+    maxSequencesCount(in_struct->maxSequencesCount),
+    targetCommandBuffer(in_struct->targetCommandBuffer),
+    sequencesCountBuffer(in_struct->sequencesCountBuffer),
+    sequencesCountOffset(in_struct->sequencesCountOffset),
+    sequencesIndexBuffer(in_struct->sequencesIndexBuffer),
+    sequencesIndexOffset(in_struct->sequencesIndexOffset)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (indirectCommandsTokenCount && in_struct->pIndirectCommandsTokens) {
+        pIndirectCommandsTokens = new VkIndirectCommandsTokenNVX[indirectCommandsTokenCount];
+        for (uint32_t i = 0; i < indirectCommandsTokenCount; ++i) {
+            pIndirectCommandsTokens[i] = in_struct->pIndirectCommandsTokens[i];
+        }
+    }
+}
+
+safe_VkCmdProcessCommandsInfoNVX::safe_VkCmdProcessCommandsInfoNVX() :
+    pNext(nullptr),
+    pIndirectCommandsTokens(nullptr)
+{}
+
+safe_VkCmdProcessCommandsInfoNVX::safe_VkCmdProcessCommandsInfoNVX(const safe_VkCmdProcessCommandsInfoNVX& src)
+{
+    sType = src.sType;
+    objectTable = src.objectTable;
+    indirectCommandsLayout = src.indirectCommandsLayout;
+    indirectCommandsTokenCount = src.indirectCommandsTokenCount;
+    pIndirectCommandsTokens = nullptr;
+    maxSequencesCount = src.maxSequencesCount;
+    targetCommandBuffer = src.targetCommandBuffer;
+    sequencesCountBuffer = src.sequencesCountBuffer;
+    sequencesCountOffset = src.sequencesCountOffset;
+    sequencesIndexBuffer = src.sequencesIndexBuffer;
+    sequencesIndexOffset = src.sequencesIndexOffset;
+    pNext = SafePnextCopy(src.pNext);
+    if (indirectCommandsTokenCount && src.pIndirectCommandsTokens) {
+        pIndirectCommandsTokens = new VkIndirectCommandsTokenNVX[indirectCommandsTokenCount];
+        for (uint32_t i = 0; i < indirectCommandsTokenCount; ++i) {
+            pIndirectCommandsTokens[i] = src.pIndirectCommandsTokens[i];
+        }
+    }
+}
+
+safe_VkCmdProcessCommandsInfoNVX& safe_VkCmdProcessCommandsInfoNVX::operator=(const safe_VkCmdProcessCommandsInfoNVX& src)
+{
+    if (&src == this) return *this;
+
+    if (pIndirectCommandsTokens)
+        delete[] pIndirectCommandsTokens;
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    objectTable = src.objectTable;
+    indirectCommandsLayout = src.indirectCommandsLayout;
+    indirectCommandsTokenCount = src.indirectCommandsTokenCount;
+    pIndirectCommandsTokens = nullptr;
+    maxSequencesCount = src.maxSequencesCount;
+    targetCommandBuffer = src.targetCommandBuffer;
+    sequencesCountBuffer = src.sequencesCountBuffer;
+    sequencesCountOffset = src.sequencesCountOffset;
+    sequencesIndexBuffer = src.sequencesIndexBuffer;
+    sequencesIndexOffset = src.sequencesIndexOffset;
+    pNext = SafePnextCopy(src.pNext);
+    if (indirectCommandsTokenCount && src.pIndirectCommandsTokens) {
+        pIndirectCommandsTokens = new VkIndirectCommandsTokenNVX[indirectCommandsTokenCount];
+        for (uint32_t i = 0; i < indirectCommandsTokenCount; ++i) {
+            pIndirectCommandsTokens[i] = src.pIndirectCommandsTokens[i];
+        }
+    }
+
+    return *this;
+}
+
+safe_VkCmdProcessCommandsInfoNVX::~safe_VkCmdProcessCommandsInfoNVX()
+{
+    if (pIndirectCommandsTokens)
+        delete[] pIndirectCommandsTokens;
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkCmdProcessCommandsInfoNVX::initialize(const VkCmdProcessCommandsInfoNVX* in_struct)
+{
+    sType = in_struct->sType;
+    objectTable = in_struct->objectTable;
+    indirectCommandsLayout = in_struct->indirectCommandsLayout;
+    indirectCommandsTokenCount = in_struct->indirectCommandsTokenCount;
+    pIndirectCommandsTokens = nullptr;
+    maxSequencesCount = in_struct->maxSequencesCount;
+    targetCommandBuffer = in_struct->targetCommandBuffer;
+    sequencesCountBuffer = in_struct->sequencesCountBuffer;
+    sequencesCountOffset = in_struct->sequencesCountOffset;
+    sequencesIndexBuffer = in_struct->sequencesIndexBuffer;
+    sequencesIndexOffset = in_struct->sequencesIndexOffset;
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (indirectCommandsTokenCount && in_struct->pIndirectCommandsTokens) {
+        pIndirectCommandsTokens = new VkIndirectCommandsTokenNVX[indirectCommandsTokenCount];
+        for (uint32_t i = 0; i < indirectCommandsTokenCount; ++i) {
+            pIndirectCommandsTokens[i] = in_struct->pIndirectCommandsTokens[i];
+        }
+    }
+}
+
+void safe_VkCmdProcessCommandsInfoNVX::initialize(const safe_VkCmdProcessCommandsInfoNVX* src)
+{
+    sType = src->sType;
+    objectTable = src->objectTable;
+    indirectCommandsLayout = src->indirectCommandsLayout;
+    indirectCommandsTokenCount = src->indirectCommandsTokenCount;
+    pIndirectCommandsTokens = nullptr;
+    maxSequencesCount = src->maxSequencesCount;
+    targetCommandBuffer = src->targetCommandBuffer;
+    sequencesCountBuffer = src->sequencesCountBuffer;
+    sequencesCountOffset = src->sequencesCountOffset;
+    sequencesIndexBuffer = src->sequencesIndexBuffer;
+    sequencesIndexOffset = src->sequencesIndexOffset;
+    pNext = SafePnextCopy(src->pNext);
+    if (indirectCommandsTokenCount && src->pIndirectCommandsTokens) {
+        pIndirectCommandsTokens = new VkIndirectCommandsTokenNVX[indirectCommandsTokenCount];
+        for (uint32_t i = 0; i < indirectCommandsTokenCount; ++i) {
+            pIndirectCommandsTokens[i] = src->pIndirectCommandsTokens[i];
+        }
+    }
+}
+
+safe_VkCmdReserveSpaceForCommandsInfoNVX::safe_VkCmdReserveSpaceForCommandsInfoNVX(const VkCmdReserveSpaceForCommandsInfoNVX* in_struct) :
+    sType(in_struct->sType),
+    objectTable(in_struct->objectTable),
+    indirectCommandsLayout(in_struct->indirectCommandsLayout),
+    maxSequencesCount(in_struct->maxSequencesCount)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkCmdReserveSpaceForCommandsInfoNVX::safe_VkCmdReserveSpaceForCommandsInfoNVX() :
+    pNext(nullptr)
+{}
+
+safe_VkCmdReserveSpaceForCommandsInfoNVX::safe_VkCmdReserveSpaceForCommandsInfoNVX(const safe_VkCmdReserveSpaceForCommandsInfoNVX& src)
+{
+    sType = src.sType;
+    objectTable = src.objectTable;
+    indirectCommandsLayout = src.indirectCommandsLayout;
+    maxSequencesCount = src.maxSequencesCount;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkCmdReserveSpaceForCommandsInfoNVX& safe_VkCmdReserveSpaceForCommandsInfoNVX::operator=(const safe_VkCmdReserveSpaceForCommandsInfoNVX& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    objectTable = src.objectTable;
+    indirectCommandsLayout = src.indirectCommandsLayout;
+    maxSequencesCount = src.maxSequencesCount;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkCmdReserveSpaceForCommandsInfoNVX::~safe_VkCmdReserveSpaceForCommandsInfoNVX()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkCmdReserveSpaceForCommandsInfoNVX::initialize(const VkCmdReserveSpaceForCommandsInfoNVX* in_struct)
+{
+    sType = in_struct->sType;
+    objectTable = in_struct->objectTable;
+    indirectCommandsLayout = in_struct->indirectCommandsLayout;
+    maxSequencesCount = in_struct->maxSequencesCount;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkCmdReserveSpaceForCommandsInfoNVX::initialize(const safe_VkCmdReserveSpaceForCommandsInfoNVX* src)
+{
+    sType = src->sType;
+    objectTable = src->objectTable;
+    indirectCommandsLayout = src->indirectCommandsLayout;
+    maxSequencesCount = src->maxSequencesCount;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkObjectTableCreateInfoNVX::safe_VkObjectTableCreateInfoNVX(const VkObjectTableCreateInfoNVX* in_struct) :
+    sType(in_struct->sType),
+    objectCount(in_struct->objectCount),
+    pObjectEntryTypes(nullptr),
+    pObjectEntryCounts(nullptr),
+    pObjectEntryUsageFlags(nullptr),
+    maxUniformBuffersPerDescriptor(in_struct->maxUniformBuffersPerDescriptor),
+    maxStorageBuffersPerDescriptor(in_struct->maxStorageBuffersPerDescriptor),
+    maxStorageImagesPerDescriptor(in_struct->maxStorageImagesPerDescriptor),
+    maxSampledImagesPerDescriptor(in_struct->maxSampledImagesPerDescriptor),
+    maxPipelineLayouts(in_struct->maxPipelineLayouts)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (in_struct->pObjectEntryTypes) {
+        pObjectEntryTypes = new VkObjectEntryTypeNVX[in_struct->objectCount];
+        memcpy ((void *)pObjectEntryTypes, (void *)in_struct->pObjectEntryTypes, sizeof(VkObjectEntryTypeNVX)*in_struct->objectCount);
+    }
+    if (in_struct->pObjectEntryCounts) {
+        pObjectEntryCounts = new uint32_t[in_struct->objectCount];
+        memcpy ((void *)pObjectEntryCounts, (void *)in_struct->pObjectEntryCounts, sizeof(uint32_t)*in_struct->objectCount);
+    }
+    if (in_struct->pObjectEntryUsageFlags) {
+        pObjectEntryUsageFlags = new VkObjectEntryUsageFlagsNVX[in_struct->objectCount];
+        memcpy ((void *)pObjectEntryUsageFlags, (void *)in_struct->pObjectEntryUsageFlags, sizeof(VkObjectEntryUsageFlagsNVX)*in_struct->objectCount);
+    }
+}
+
+safe_VkObjectTableCreateInfoNVX::safe_VkObjectTableCreateInfoNVX() :
+    pNext(nullptr),
+    pObjectEntryTypes(nullptr),
+    pObjectEntryCounts(nullptr),
+    pObjectEntryUsageFlags(nullptr)
+{}
+
+safe_VkObjectTableCreateInfoNVX::safe_VkObjectTableCreateInfoNVX(const safe_VkObjectTableCreateInfoNVX& src)
+{
+    sType = src.sType;
+    objectCount = src.objectCount;
+    pObjectEntryTypes = nullptr;
+    pObjectEntryCounts = nullptr;
+    pObjectEntryUsageFlags = nullptr;
+    maxUniformBuffersPerDescriptor = src.maxUniformBuffersPerDescriptor;
+    maxStorageBuffersPerDescriptor = src.maxStorageBuffersPerDescriptor;
+    maxStorageImagesPerDescriptor = src.maxStorageImagesPerDescriptor;
+    maxSampledImagesPerDescriptor = src.maxSampledImagesPerDescriptor;
+    maxPipelineLayouts = src.maxPipelineLayouts;
+    pNext = SafePnextCopy(src.pNext);
+    if (src.pObjectEntryTypes) {
+        pObjectEntryTypes = new VkObjectEntryTypeNVX[src.objectCount];
+        memcpy ((void *)pObjectEntryTypes, (void *)src.pObjectEntryTypes, sizeof(VkObjectEntryTypeNVX)*src.objectCount);
+    }
+    if (src.pObjectEntryCounts) {
+        pObjectEntryCounts = new uint32_t[src.objectCount];
+        memcpy ((void *)pObjectEntryCounts, (void *)src.pObjectEntryCounts, sizeof(uint32_t)*src.objectCount);
+    }
+    if (src.pObjectEntryUsageFlags) {
+        pObjectEntryUsageFlags = new VkObjectEntryUsageFlagsNVX[src.objectCount];
+        memcpy ((void *)pObjectEntryUsageFlags, (void *)src.pObjectEntryUsageFlags, sizeof(VkObjectEntryUsageFlagsNVX)*src.objectCount);
+    }
+}
+
+safe_VkObjectTableCreateInfoNVX& safe_VkObjectTableCreateInfoNVX::operator=(const safe_VkObjectTableCreateInfoNVX& src)
+{
+    if (&src == this) return *this;
+
+    if (pObjectEntryTypes)
+        delete[] pObjectEntryTypes;
+    if (pObjectEntryCounts)
+        delete[] pObjectEntryCounts;
+    if (pObjectEntryUsageFlags)
+        delete[] pObjectEntryUsageFlags;
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    objectCount = src.objectCount;
+    pObjectEntryTypes = nullptr;
+    pObjectEntryCounts = nullptr;
+    pObjectEntryUsageFlags = nullptr;
+    maxUniformBuffersPerDescriptor = src.maxUniformBuffersPerDescriptor;
+    maxStorageBuffersPerDescriptor = src.maxStorageBuffersPerDescriptor;
+    maxStorageImagesPerDescriptor = src.maxStorageImagesPerDescriptor;
+    maxSampledImagesPerDescriptor = src.maxSampledImagesPerDescriptor;
+    maxPipelineLayouts = src.maxPipelineLayouts;
+    pNext = SafePnextCopy(src.pNext);
+    if (src.pObjectEntryTypes) {
+        pObjectEntryTypes = new VkObjectEntryTypeNVX[src.objectCount];
+        memcpy ((void *)pObjectEntryTypes, (void *)src.pObjectEntryTypes, sizeof(VkObjectEntryTypeNVX)*src.objectCount);
+    }
+    if (src.pObjectEntryCounts) {
+        pObjectEntryCounts = new uint32_t[src.objectCount];
+        memcpy ((void *)pObjectEntryCounts, (void *)src.pObjectEntryCounts, sizeof(uint32_t)*src.objectCount);
+    }
+    if (src.pObjectEntryUsageFlags) {
+        pObjectEntryUsageFlags = new VkObjectEntryUsageFlagsNVX[src.objectCount];
+        memcpy ((void *)pObjectEntryUsageFlags, (void *)src.pObjectEntryUsageFlags, sizeof(VkObjectEntryUsageFlagsNVX)*src.objectCount);
+    }
+
+    return *this;
+}
+
+safe_VkObjectTableCreateInfoNVX::~safe_VkObjectTableCreateInfoNVX()
+{
+    if (pObjectEntryTypes)
+        delete[] pObjectEntryTypes;
+    if (pObjectEntryCounts)
+        delete[] pObjectEntryCounts;
+    if (pObjectEntryUsageFlags)
+        delete[] pObjectEntryUsageFlags;
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkObjectTableCreateInfoNVX::initialize(const VkObjectTableCreateInfoNVX* in_struct)
+{
+    sType = in_struct->sType;
+    objectCount = in_struct->objectCount;
+    pObjectEntryTypes = nullptr;
+    pObjectEntryCounts = nullptr;
+    pObjectEntryUsageFlags = nullptr;
+    maxUniformBuffersPerDescriptor = in_struct->maxUniformBuffersPerDescriptor;
+    maxStorageBuffersPerDescriptor = in_struct->maxStorageBuffersPerDescriptor;
+    maxStorageImagesPerDescriptor = in_struct->maxStorageImagesPerDescriptor;
+    maxSampledImagesPerDescriptor = in_struct->maxSampledImagesPerDescriptor;
+    maxPipelineLayouts = in_struct->maxPipelineLayouts;
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (in_struct->pObjectEntryTypes) {
+        pObjectEntryTypes = new VkObjectEntryTypeNVX[in_struct->objectCount];
+        memcpy ((void *)pObjectEntryTypes, (void *)in_struct->pObjectEntryTypes, sizeof(VkObjectEntryTypeNVX)*in_struct->objectCount);
+    }
+    if (in_struct->pObjectEntryCounts) {
+        pObjectEntryCounts = new uint32_t[in_struct->objectCount];
+        memcpy ((void *)pObjectEntryCounts, (void *)in_struct->pObjectEntryCounts, sizeof(uint32_t)*in_struct->objectCount);
+    }
+    if (in_struct->pObjectEntryUsageFlags) {
+        pObjectEntryUsageFlags = new VkObjectEntryUsageFlagsNVX[in_struct->objectCount];
+        memcpy ((void *)pObjectEntryUsageFlags, (void *)in_struct->pObjectEntryUsageFlags, sizeof(VkObjectEntryUsageFlagsNVX)*in_struct->objectCount);
+    }
+}
+
+void safe_VkObjectTableCreateInfoNVX::initialize(const safe_VkObjectTableCreateInfoNVX* src)
+{
+    sType = src->sType;
+    objectCount = src->objectCount;
+    pObjectEntryTypes = nullptr;
+    pObjectEntryCounts = nullptr;
+    pObjectEntryUsageFlags = nullptr;
+    maxUniformBuffersPerDescriptor = src->maxUniformBuffersPerDescriptor;
+    maxStorageBuffersPerDescriptor = src->maxStorageBuffersPerDescriptor;
+    maxStorageImagesPerDescriptor = src->maxStorageImagesPerDescriptor;
+    maxSampledImagesPerDescriptor = src->maxSampledImagesPerDescriptor;
+    maxPipelineLayouts = src->maxPipelineLayouts;
+    pNext = SafePnextCopy(src->pNext);
+    if (src->pObjectEntryTypes) {
+        pObjectEntryTypes = new VkObjectEntryTypeNVX[src->objectCount];
+        memcpy ((void *)pObjectEntryTypes, (void *)src->pObjectEntryTypes, sizeof(VkObjectEntryTypeNVX)*src->objectCount);
+    }
+    if (src->pObjectEntryCounts) {
+        pObjectEntryCounts = new uint32_t[src->objectCount];
+        memcpy ((void *)pObjectEntryCounts, (void *)src->pObjectEntryCounts, sizeof(uint32_t)*src->objectCount);
+    }
+    if (src->pObjectEntryUsageFlags) {
+        pObjectEntryUsageFlags = new VkObjectEntryUsageFlagsNVX[src->objectCount];
+        memcpy ((void *)pObjectEntryUsageFlags, (void *)src->pObjectEntryUsageFlags, sizeof(VkObjectEntryUsageFlagsNVX)*src->objectCount);
+    }
+}
+
+safe_VkPipelineViewportWScalingStateCreateInfoNV::safe_VkPipelineViewportWScalingStateCreateInfoNV(const VkPipelineViewportWScalingStateCreateInfoNV* in_struct) :
+    sType(in_struct->sType),
+    viewportWScalingEnable(in_struct->viewportWScalingEnable),
+    viewportCount(in_struct->viewportCount),
+    pViewportWScalings(nullptr)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (in_struct->pViewportWScalings) {
+        pViewportWScalings = new VkViewportWScalingNV[in_struct->viewportCount];
+        memcpy ((void *)pViewportWScalings, (void *)in_struct->pViewportWScalings, sizeof(VkViewportWScalingNV)*in_struct->viewportCount);
+    }
+}
+
+safe_VkPipelineViewportWScalingStateCreateInfoNV::safe_VkPipelineViewportWScalingStateCreateInfoNV() :
+    pNext(nullptr),
+    pViewportWScalings(nullptr)
+{}
+
+safe_VkPipelineViewportWScalingStateCreateInfoNV::safe_VkPipelineViewportWScalingStateCreateInfoNV(const safe_VkPipelineViewportWScalingStateCreateInfoNV& src)
+{
+    sType = src.sType;
+    viewportWScalingEnable = src.viewportWScalingEnable;
+    viewportCount = src.viewportCount;
+    pViewportWScalings = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (src.pViewportWScalings) {
+        pViewportWScalings = new VkViewportWScalingNV[src.viewportCount];
+        memcpy ((void *)pViewportWScalings, (void *)src.pViewportWScalings, sizeof(VkViewportWScalingNV)*src.viewportCount);
+    }
+}
+
+safe_VkPipelineViewportWScalingStateCreateInfoNV& safe_VkPipelineViewportWScalingStateCreateInfoNV::operator=(const safe_VkPipelineViewportWScalingStateCreateInfoNV& src)
+{
+    if (&src == this) return *this;
+
+    if (pViewportWScalings)
+        delete[] pViewportWScalings;
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    viewportWScalingEnable = src.viewportWScalingEnable;
+    viewportCount = src.viewportCount;
+    pViewportWScalings = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (src.pViewportWScalings) {
+        pViewportWScalings = new VkViewportWScalingNV[src.viewportCount];
+        memcpy ((void *)pViewportWScalings, (void *)src.pViewportWScalings, sizeof(VkViewportWScalingNV)*src.viewportCount);
+    }
+
+    return *this;
+}
+
+safe_VkPipelineViewportWScalingStateCreateInfoNV::~safe_VkPipelineViewportWScalingStateCreateInfoNV()
+{
+    if (pViewportWScalings)
+        delete[] pViewportWScalings;
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPipelineViewportWScalingStateCreateInfoNV::initialize(const VkPipelineViewportWScalingStateCreateInfoNV* in_struct)
+{
+    sType = in_struct->sType;
+    viewportWScalingEnable = in_struct->viewportWScalingEnable;
+    viewportCount = in_struct->viewportCount;
+    pViewportWScalings = nullptr;
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (in_struct->pViewportWScalings) {
+        pViewportWScalings = new VkViewportWScalingNV[in_struct->viewportCount];
+        memcpy ((void *)pViewportWScalings, (void *)in_struct->pViewportWScalings, sizeof(VkViewportWScalingNV)*in_struct->viewportCount);
+    }
+}
+
+void safe_VkPipelineViewportWScalingStateCreateInfoNV::initialize(const safe_VkPipelineViewportWScalingStateCreateInfoNV* src)
+{
+    sType = src->sType;
+    viewportWScalingEnable = src->viewportWScalingEnable;
+    viewportCount = src->viewportCount;
+    pViewportWScalings = nullptr;
+    pNext = SafePnextCopy(src->pNext);
+    if (src->pViewportWScalings) {
+        pViewportWScalings = new VkViewportWScalingNV[src->viewportCount];
+        memcpy ((void *)pViewportWScalings, (void *)src->pViewportWScalings, sizeof(VkViewportWScalingNV)*src->viewportCount);
+    }
+}
+
+safe_VkSurfaceCapabilities2EXT::safe_VkSurfaceCapabilities2EXT(const VkSurfaceCapabilities2EXT* in_struct) :
+    sType(in_struct->sType),
+    minImageCount(in_struct->minImageCount),
+    maxImageCount(in_struct->maxImageCount),
+    currentExtent(in_struct->currentExtent),
+    minImageExtent(in_struct->minImageExtent),
+    maxImageExtent(in_struct->maxImageExtent),
+    maxImageArrayLayers(in_struct->maxImageArrayLayers),
+    supportedTransforms(in_struct->supportedTransforms),
+    currentTransform(in_struct->currentTransform),
+    supportedCompositeAlpha(in_struct->supportedCompositeAlpha),
+    supportedUsageFlags(in_struct->supportedUsageFlags),
+    supportedSurfaceCounters(in_struct->supportedSurfaceCounters)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkSurfaceCapabilities2EXT::safe_VkSurfaceCapabilities2EXT() :
+    pNext(nullptr)
+{}
+
+safe_VkSurfaceCapabilities2EXT::safe_VkSurfaceCapabilities2EXT(const safe_VkSurfaceCapabilities2EXT& src)
+{
+    sType = src.sType;
+    minImageCount = src.minImageCount;
+    maxImageCount = src.maxImageCount;
+    currentExtent = src.currentExtent;
+    minImageExtent = src.minImageExtent;
+    maxImageExtent = src.maxImageExtent;
+    maxImageArrayLayers = src.maxImageArrayLayers;
+    supportedTransforms = src.supportedTransforms;
+    currentTransform = src.currentTransform;
+    supportedCompositeAlpha = src.supportedCompositeAlpha;
+    supportedUsageFlags = src.supportedUsageFlags;
+    supportedSurfaceCounters = src.supportedSurfaceCounters;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkSurfaceCapabilities2EXT& safe_VkSurfaceCapabilities2EXT::operator=(const safe_VkSurfaceCapabilities2EXT& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    minImageCount = src.minImageCount;
+    maxImageCount = src.maxImageCount;
+    currentExtent = src.currentExtent;
+    minImageExtent = src.minImageExtent;
+    maxImageExtent = src.maxImageExtent;
+    maxImageArrayLayers = src.maxImageArrayLayers;
+    supportedTransforms = src.supportedTransforms;
+    currentTransform = src.currentTransform;
+    supportedCompositeAlpha = src.supportedCompositeAlpha;
+    supportedUsageFlags = src.supportedUsageFlags;
+    supportedSurfaceCounters = src.supportedSurfaceCounters;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkSurfaceCapabilities2EXT::~safe_VkSurfaceCapabilities2EXT()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkSurfaceCapabilities2EXT::initialize(const VkSurfaceCapabilities2EXT* in_struct)
+{
+    sType = in_struct->sType;
+    minImageCount = in_struct->minImageCount;
+    maxImageCount = in_struct->maxImageCount;
+    currentExtent = in_struct->currentExtent;
+    minImageExtent = in_struct->minImageExtent;
+    maxImageExtent = in_struct->maxImageExtent;
+    maxImageArrayLayers = in_struct->maxImageArrayLayers;
+    supportedTransforms = in_struct->supportedTransforms;
+    currentTransform = in_struct->currentTransform;
+    supportedCompositeAlpha = in_struct->supportedCompositeAlpha;
+    supportedUsageFlags = in_struct->supportedUsageFlags;
+    supportedSurfaceCounters = in_struct->supportedSurfaceCounters;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkSurfaceCapabilities2EXT::initialize(const safe_VkSurfaceCapabilities2EXT* src)
+{
+    sType = src->sType;
+    minImageCount = src->minImageCount;
+    maxImageCount = src->maxImageCount;
+    currentExtent = src->currentExtent;
+    minImageExtent = src->minImageExtent;
+    maxImageExtent = src->maxImageExtent;
+    maxImageArrayLayers = src->maxImageArrayLayers;
+    supportedTransforms = src->supportedTransforms;
+    currentTransform = src->currentTransform;
+    supportedCompositeAlpha = src->supportedCompositeAlpha;
+    supportedUsageFlags = src->supportedUsageFlags;
+    supportedSurfaceCounters = src->supportedSurfaceCounters;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkDisplayPowerInfoEXT::safe_VkDisplayPowerInfoEXT(const VkDisplayPowerInfoEXT* in_struct) :
+    sType(in_struct->sType),
+    powerState(in_struct->powerState)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkDisplayPowerInfoEXT::safe_VkDisplayPowerInfoEXT() :
+    pNext(nullptr)
+{}
+
+safe_VkDisplayPowerInfoEXT::safe_VkDisplayPowerInfoEXT(const safe_VkDisplayPowerInfoEXT& src)
+{
+    sType = src.sType;
+    powerState = src.powerState;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkDisplayPowerInfoEXT& safe_VkDisplayPowerInfoEXT::operator=(const safe_VkDisplayPowerInfoEXT& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    powerState = src.powerState;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkDisplayPowerInfoEXT::~safe_VkDisplayPowerInfoEXT()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkDisplayPowerInfoEXT::initialize(const VkDisplayPowerInfoEXT* in_struct)
+{
+    sType = in_struct->sType;
+    powerState = in_struct->powerState;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkDisplayPowerInfoEXT::initialize(const safe_VkDisplayPowerInfoEXT* src)
+{
+    sType = src->sType;
+    powerState = src->powerState;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkDeviceEventInfoEXT::safe_VkDeviceEventInfoEXT(const VkDeviceEventInfoEXT* in_struct) :
+    sType(in_struct->sType),
+    deviceEvent(in_struct->deviceEvent)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkDeviceEventInfoEXT::safe_VkDeviceEventInfoEXT() :
+    pNext(nullptr)
+{}
+
+safe_VkDeviceEventInfoEXT::safe_VkDeviceEventInfoEXT(const safe_VkDeviceEventInfoEXT& src)
+{
+    sType = src.sType;
+    deviceEvent = src.deviceEvent;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkDeviceEventInfoEXT& safe_VkDeviceEventInfoEXT::operator=(const safe_VkDeviceEventInfoEXT& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    deviceEvent = src.deviceEvent;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkDeviceEventInfoEXT::~safe_VkDeviceEventInfoEXT()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkDeviceEventInfoEXT::initialize(const VkDeviceEventInfoEXT* in_struct)
+{
+    sType = in_struct->sType;
+    deviceEvent = in_struct->deviceEvent;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkDeviceEventInfoEXT::initialize(const safe_VkDeviceEventInfoEXT* src)
+{
+    sType = src->sType;
+    deviceEvent = src->deviceEvent;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkDisplayEventInfoEXT::safe_VkDisplayEventInfoEXT(const VkDisplayEventInfoEXT* in_struct) :
+    sType(in_struct->sType),
+    displayEvent(in_struct->displayEvent)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkDisplayEventInfoEXT::safe_VkDisplayEventInfoEXT() :
+    pNext(nullptr)
+{}
+
+safe_VkDisplayEventInfoEXT::safe_VkDisplayEventInfoEXT(const safe_VkDisplayEventInfoEXT& src)
+{
+    sType = src.sType;
+    displayEvent = src.displayEvent;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkDisplayEventInfoEXT& safe_VkDisplayEventInfoEXT::operator=(const safe_VkDisplayEventInfoEXT& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    displayEvent = src.displayEvent;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkDisplayEventInfoEXT::~safe_VkDisplayEventInfoEXT()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkDisplayEventInfoEXT::initialize(const VkDisplayEventInfoEXT* in_struct)
+{
+    sType = in_struct->sType;
+    displayEvent = in_struct->displayEvent;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkDisplayEventInfoEXT::initialize(const safe_VkDisplayEventInfoEXT* src)
+{
+    sType = src->sType;
+    displayEvent = src->displayEvent;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkSwapchainCounterCreateInfoEXT::safe_VkSwapchainCounterCreateInfoEXT(const VkSwapchainCounterCreateInfoEXT* in_struct) :
+    sType(in_struct->sType),
+    surfaceCounters(in_struct->surfaceCounters)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkSwapchainCounterCreateInfoEXT::safe_VkSwapchainCounterCreateInfoEXT() :
+    pNext(nullptr)
+{}
+
+safe_VkSwapchainCounterCreateInfoEXT::safe_VkSwapchainCounterCreateInfoEXT(const safe_VkSwapchainCounterCreateInfoEXT& src)
+{
+    sType = src.sType;
+    surfaceCounters = src.surfaceCounters;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkSwapchainCounterCreateInfoEXT& safe_VkSwapchainCounterCreateInfoEXT::operator=(const safe_VkSwapchainCounterCreateInfoEXT& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    surfaceCounters = src.surfaceCounters;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkSwapchainCounterCreateInfoEXT::~safe_VkSwapchainCounterCreateInfoEXT()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkSwapchainCounterCreateInfoEXT::initialize(const VkSwapchainCounterCreateInfoEXT* in_struct)
+{
+    sType = in_struct->sType;
+    surfaceCounters = in_struct->surfaceCounters;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkSwapchainCounterCreateInfoEXT::initialize(const safe_VkSwapchainCounterCreateInfoEXT* src)
+{
+    sType = src->sType;
+    surfaceCounters = src->surfaceCounters;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkPresentTimesInfoGOOGLE::safe_VkPresentTimesInfoGOOGLE(const VkPresentTimesInfoGOOGLE* in_struct) :
+    sType(in_struct->sType),
+    swapchainCount(in_struct->swapchainCount),
+    pTimes(nullptr)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (in_struct->pTimes) {
+        pTimes = new VkPresentTimeGOOGLE[in_struct->swapchainCount];
+        memcpy ((void *)pTimes, (void *)in_struct->pTimes, sizeof(VkPresentTimeGOOGLE)*in_struct->swapchainCount);
+    }
+}
+
+safe_VkPresentTimesInfoGOOGLE::safe_VkPresentTimesInfoGOOGLE() :
+    pNext(nullptr),
+    pTimes(nullptr)
+{}
+
+safe_VkPresentTimesInfoGOOGLE::safe_VkPresentTimesInfoGOOGLE(const safe_VkPresentTimesInfoGOOGLE& src)
+{
+    sType = src.sType;
+    swapchainCount = src.swapchainCount;
+    pTimes = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (src.pTimes) {
+        pTimes = new VkPresentTimeGOOGLE[src.swapchainCount];
+        memcpy ((void *)pTimes, (void *)src.pTimes, sizeof(VkPresentTimeGOOGLE)*src.swapchainCount);
+    }
+}
+
+safe_VkPresentTimesInfoGOOGLE& safe_VkPresentTimesInfoGOOGLE::operator=(const safe_VkPresentTimesInfoGOOGLE& src)
+{
+    if (&src == this) return *this;
+
+    if (pTimes)
+        delete[] pTimes;
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    swapchainCount = src.swapchainCount;
+    pTimes = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (src.pTimes) {
+        pTimes = new VkPresentTimeGOOGLE[src.swapchainCount];
+        memcpy ((void *)pTimes, (void *)src.pTimes, sizeof(VkPresentTimeGOOGLE)*src.swapchainCount);
+    }
+
+    return *this;
+}
+
+safe_VkPresentTimesInfoGOOGLE::~safe_VkPresentTimesInfoGOOGLE()
+{
+    if (pTimes)
+        delete[] pTimes;
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPresentTimesInfoGOOGLE::initialize(const VkPresentTimesInfoGOOGLE* in_struct)
+{
+    sType = in_struct->sType;
+    swapchainCount = in_struct->swapchainCount;
+    pTimes = nullptr;
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (in_struct->pTimes) {
+        pTimes = new VkPresentTimeGOOGLE[in_struct->swapchainCount];
+        memcpy ((void *)pTimes, (void *)in_struct->pTimes, sizeof(VkPresentTimeGOOGLE)*in_struct->swapchainCount);
+    }
+}
+
+void safe_VkPresentTimesInfoGOOGLE::initialize(const safe_VkPresentTimesInfoGOOGLE* src)
+{
+    sType = src->sType;
+    swapchainCount = src->swapchainCount;
+    pTimes = nullptr;
+    pNext = SafePnextCopy(src->pNext);
+    if (src->pTimes) {
+        pTimes = new VkPresentTimeGOOGLE[src->swapchainCount];
+        memcpy ((void *)pTimes, (void *)src->pTimes, sizeof(VkPresentTimeGOOGLE)*src->swapchainCount);
+    }
+}
+
+safe_VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX::safe_VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX(const VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX* in_struct) :
+    sType(in_struct->sType),
+    perViewPositionAllComponents(in_struct->perViewPositionAllComponents)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX::safe_VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX() :
+    pNext(nullptr)
+{}
+
+safe_VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX::safe_VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX(const safe_VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX& src)
+{
+    sType = src.sType;
+    perViewPositionAllComponents = src.perViewPositionAllComponents;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX& safe_VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX::operator=(const safe_VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    perViewPositionAllComponents = src.perViewPositionAllComponents;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX::~safe_VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX::initialize(const VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX* in_struct)
+{
+    sType = in_struct->sType;
+    perViewPositionAllComponents = in_struct->perViewPositionAllComponents;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX::initialize(const safe_VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX* src)
+{
+    sType = src->sType;
+    perViewPositionAllComponents = src->perViewPositionAllComponents;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkPipelineViewportSwizzleStateCreateInfoNV::safe_VkPipelineViewportSwizzleStateCreateInfoNV(const VkPipelineViewportSwizzleStateCreateInfoNV* in_struct) :
+    sType(in_struct->sType),
+    flags(in_struct->flags),
+    viewportCount(in_struct->viewportCount),
+    pViewportSwizzles(nullptr)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (in_struct->pViewportSwizzles) {
+        pViewportSwizzles = new VkViewportSwizzleNV[in_struct->viewportCount];
+        memcpy ((void *)pViewportSwizzles, (void *)in_struct->pViewportSwizzles, sizeof(VkViewportSwizzleNV)*in_struct->viewportCount);
+    }
+}
+
+safe_VkPipelineViewportSwizzleStateCreateInfoNV::safe_VkPipelineViewportSwizzleStateCreateInfoNV() :
+    pNext(nullptr),
+    pViewportSwizzles(nullptr)
+{}
+
+safe_VkPipelineViewportSwizzleStateCreateInfoNV::safe_VkPipelineViewportSwizzleStateCreateInfoNV(const safe_VkPipelineViewportSwizzleStateCreateInfoNV& src)
+{
+    sType = src.sType;
+    flags = src.flags;
+    viewportCount = src.viewportCount;
+    pViewportSwizzles = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (src.pViewportSwizzles) {
+        pViewportSwizzles = new VkViewportSwizzleNV[src.viewportCount];
+        memcpy ((void *)pViewportSwizzles, (void *)src.pViewportSwizzles, sizeof(VkViewportSwizzleNV)*src.viewportCount);
+    }
+}
+
+safe_VkPipelineViewportSwizzleStateCreateInfoNV& safe_VkPipelineViewportSwizzleStateCreateInfoNV::operator=(const safe_VkPipelineViewportSwizzleStateCreateInfoNV& src)
+{
+    if (&src == this) return *this;
+
+    if (pViewportSwizzles)
+        delete[] pViewportSwizzles;
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    flags = src.flags;
+    viewportCount = src.viewportCount;
+    pViewportSwizzles = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (src.pViewportSwizzles) {
+        pViewportSwizzles = new VkViewportSwizzleNV[src.viewportCount];
+        memcpy ((void *)pViewportSwizzles, (void *)src.pViewportSwizzles, sizeof(VkViewportSwizzleNV)*src.viewportCount);
+    }
+
+    return *this;
+}
+
+safe_VkPipelineViewportSwizzleStateCreateInfoNV::~safe_VkPipelineViewportSwizzleStateCreateInfoNV()
+{
+    if (pViewportSwizzles)
+        delete[] pViewportSwizzles;
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPipelineViewportSwizzleStateCreateInfoNV::initialize(const VkPipelineViewportSwizzleStateCreateInfoNV* in_struct)
+{
+    sType = in_struct->sType;
+    flags = in_struct->flags;
+    viewportCount = in_struct->viewportCount;
+    pViewportSwizzles = nullptr;
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (in_struct->pViewportSwizzles) {
+        pViewportSwizzles = new VkViewportSwizzleNV[in_struct->viewportCount];
+        memcpy ((void *)pViewportSwizzles, (void *)in_struct->pViewportSwizzles, sizeof(VkViewportSwizzleNV)*in_struct->viewportCount);
+    }
+}
+
+void safe_VkPipelineViewportSwizzleStateCreateInfoNV::initialize(const safe_VkPipelineViewportSwizzleStateCreateInfoNV* src)
+{
+    sType = src->sType;
+    flags = src->flags;
+    viewportCount = src->viewportCount;
+    pViewportSwizzles = nullptr;
+    pNext = SafePnextCopy(src->pNext);
+    if (src->pViewportSwizzles) {
+        pViewportSwizzles = new VkViewportSwizzleNV[src->viewportCount];
+        memcpy ((void *)pViewportSwizzles, (void *)src->pViewportSwizzles, sizeof(VkViewportSwizzleNV)*src->viewportCount);
+    }
+}
+
+safe_VkPhysicalDeviceDiscardRectanglePropertiesEXT::safe_VkPhysicalDeviceDiscardRectanglePropertiesEXT(const VkPhysicalDeviceDiscardRectanglePropertiesEXT* in_struct) :
+    sType(in_struct->sType),
+    maxDiscardRectangles(in_struct->maxDiscardRectangles)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPhysicalDeviceDiscardRectanglePropertiesEXT::safe_VkPhysicalDeviceDiscardRectanglePropertiesEXT() :
+    pNext(nullptr)
+{}
+
+safe_VkPhysicalDeviceDiscardRectanglePropertiesEXT::safe_VkPhysicalDeviceDiscardRectanglePropertiesEXT(const safe_VkPhysicalDeviceDiscardRectanglePropertiesEXT& src)
+{
+    sType = src.sType;
+    maxDiscardRectangles = src.maxDiscardRectangles;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPhysicalDeviceDiscardRectanglePropertiesEXT& safe_VkPhysicalDeviceDiscardRectanglePropertiesEXT::operator=(const safe_VkPhysicalDeviceDiscardRectanglePropertiesEXT& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    maxDiscardRectangles = src.maxDiscardRectangles;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPhysicalDeviceDiscardRectanglePropertiesEXT::~safe_VkPhysicalDeviceDiscardRectanglePropertiesEXT()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPhysicalDeviceDiscardRectanglePropertiesEXT::initialize(const VkPhysicalDeviceDiscardRectanglePropertiesEXT* in_struct)
+{
+    sType = in_struct->sType;
+    maxDiscardRectangles = in_struct->maxDiscardRectangles;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPhysicalDeviceDiscardRectanglePropertiesEXT::initialize(const safe_VkPhysicalDeviceDiscardRectanglePropertiesEXT* src)
+{
+    sType = src->sType;
+    maxDiscardRectangles = src->maxDiscardRectangles;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkPipelineDiscardRectangleStateCreateInfoEXT::safe_VkPipelineDiscardRectangleStateCreateInfoEXT(const VkPipelineDiscardRectangleStateCreateInfoEXT* in_struct) :
+    sType(in_struct->sType),
+    flags(in_struct->flags),
+    discardRectangleMode(in_struct->discardRectangleMode),
+    discardRectangleCount(in_struct->discardRectangleCount),
+    pDiscardRectangles(nullptr)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (in_struct->pDiscardRectangles) {
+        pDiscardRectangles = new VkRect2D[in_struct->discardRectangleCount];
+        memcpy ((void *)pDiscardRectangles, (void *)in_struct->pDiscardRectangles, sizeof(VkRect2D)*in_struct->discardRectangleCount);
+    }
+}
+
+safe_VkPipelineDiscardRectangleStateCreateInfoEXT::safe_VkPipelineDiscardRectangleStateCreateInfoEXT() :
+    pNext(nullptr),
+    pDiscardRectangles(nullptr)
+{}
+
+safe_VkPipelineDiscardRectangleStateCreateInfoEXT::safe_VkPipelineDiscardRectangleStateCreateInfoEXT(const safe_VkPipelineDiscardRectangleStateCreateInfoEXT& src)
+{
+    sType = src.sType;
+    flags = src.flags;
+    discardRectangleMode = src.discardRectangleMode;
+    discardRectangleCount = src.discardRectangleCount;
+    pDiscardRectangles = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (src.pDiscardRectangles) {
+        pDiscardRectangles = new VkRect2D[src.discardRectangleCount];
+        memcpy ((void *)pDiscardRectangles, (void *)src.pDiscardRectangles, sizeof(VkRect2D)*src.discardRectangleCount);
+    }
+}
+
+safe_VkPipelineDiscardRectangleStateCreateInfoEXT& safe_VkPipelineDiscardRectangleStateCreateInfoEXT::operator=(const safe_VkPipelineDiscardRectangleStateCreateInfoEXT& src)
+{
+    if (&src == this) return *this;
+
+    if (pDiscardRectangles)
+        delete[] pDiscardRectangles;
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    flags = src.flags;
+    discardRectangleMode = src.discardRectangleMode;
+    discardRectangleCount = src.discardRectangleCount;
+    pDiscardRectangles = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (src.pDiscardRectangles) {
+        pDiscardRectangles = new VkRect2D[src.discardRectangleCount];
+        memcpy ((void *)pDiscardRectangles, (void *)src.pDiscardRectangles, sizeof(VkRect2D)*src.discardRectangleCount);
+    }
+
+    return *this;
+}
+
+safe_VkPipelineDiscardRectangleStateCreateInfoEXT::~safe_VkPipelineDiscardRectangleStateCreateInfoEXT()
+{
+    if (pDiscardRectangles)
+        delete[] pDiscardRectangles;
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPipelineDiscardRectangleStateCreateInfoEXT::initialize(const VkPipelineDiscardRectangleStateCreateInfoEXT* in_struct)
+{
+    sType = in_struct->sType;
+    flags = in_struct->flags;
+    discardRectangleMode = in_struct->discardRectangleMode;
+    discardRectangleCount = in_struct->discardRectangleCount;
+    pDiscardRectangles = nullptr;
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (in_struct->pDiscardRectangles) {
+        pDiscardRectangles = new VkRect2D[in_struct->discardRectangleCount];
+        memcpy ((void *)pDiscardRectangles, (void *)in_struct->pDiscardRectangles, sizeof(VkRect2D)*in_struct->discardRectangleCount);
+    }
+}
+
+void safe_VkPipelineDiscardRectangleStateCreateInfoEXT::initialize(const safe_VkPipelineDiscardRectangleStateCreateInfoEXT* src)
+{
+    sType = src->sType;
+    flags = src->flags;
+    discardRectangleMode = src->discardRectangleMode;
+    discardRectangleCount = src->discardRectangleCount;
+    pDiscardRectangles = nullptr;
+    pNext = SafePnextCopy(src->pNext);
+    if (src->pDiscardRectangles) {
+        pDiscardRectangles = new VkRect2D[src->discardRectangleCount];
+        memcpy ((void *)pDiscardRectangles, (void *)src->pDiscardRectangles, sizeof(VkRect2D)*src->discardRectangleCount);
+    }
+}
+
+safe_VkPhysicalDeviceConservativeRasterizationPropertiesEXT::safe_VkPhysicalDeviceConservativeRasterizationPropertiesEXT(const VkPhysicalDeviceConservativeRasterizationPropertiesEXT* in_struct) :
+    sType(in_struct->sType),
+    primitiveOverestimationSize(in_struct->primitiveOverestimationSize),
+    maxExtraPrimitiveOverestimationSize(in_struct->maxExtraPrimitiveOverestimationSize),
+    extraPrimitiveOverestimationSizeGranularity(in_struct->extraPrimitiveOverestimationSizeGranularity),
+    primitiveUnderestimation(in_struct->primitiveUnderestimation),
+    conservativePointAndLineRasterization(in_struct->conservativePointAndLineRasterization),
+    degenerateTrianglesRasterized(in_struct->degenerateTrianglesRasterized),
+    degenerateLinesRasterized(in_struct->degenerateLinesRasterized),
+    fullyCoveredFragmentShaderInputVariable(in_struct->fullyCoveredFragmentShaderInputVariable),
+    conservativeRasterizationPostDepthCoverage(in_struct->conservativeRasterizationPostDepthCoverage)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPhysicalDeviceConservativeRasterizationPropertiesEXT::safe_VkPhysicalDeviceConservativeRasterizationPropertiesEXT() :
+    pNext(nullptr)
+{}
+
+safe_VkPhysicalDeviceConservativeRasterizationPropertiesEXT::safe_VkPhysicalDeviceConservativeRasterizationPropertiesEXT(const safe_VkPhysicalDeviceConservativeRasterizationPropertiesEXT& src)
+{
+    sType = src.sType;
+    primitiveOverestimationSize = src.primitiveOverestimationSize;
+    maxExtraPrimitiveOverestimationSize = src.maxExtraPrimitiveOverestimationSize;
+    extraPrimitiveOverestimationSizeGranularity = src.extraPrimitiveOverestimationSizeGranularity;
+    primitiveUnderestimation = src.primitiveUnderestimation;
+    conservativePointAndLineRasterization = src.conservativePointAndLineRasterization;
+    degenerateTrianglesRasterized = src.degenerateTrianglesRasterized;
+    degenerateLinesRasterized = src.degenerateLinesRasterized;
+    fullyCoveredFragmentShaderInputVariable = src.fullyCoveredFragmentShaderInputVariable;
+    conservativeRasterizationPostDepthCoverage = src.conservativeRasterizationPostDepthCoverage;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPhysicalDeviceConservativeRasterizationPropertiesEXT& safe_VkPhysicalDeviceConservativeRasterizationPropertiesEXT::operator=(const safe_VkPhysicalDeviceConservativeRasterizationPropertiesEXT& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    primitiveOverestimationSize = src.primitiveOverestimationSize;
+    maxExtraPrimitiveOverestimationSize = src.maxExtraPrimitiveOverestimationSize;
+    extraPrimitiveOverestimationSizeGranularity = src.extraPrimitiveOverestimationSizeGranularity;
+    primitiveUnderestimation = src.primitiveUnderestimation;
+    conservativePointAndLineRasterization = src.conservativePointAndLineRasterization;
+    degenerateTrianglesRasterized = src.degenerateTrianglesRasterized;
+    degenerateLinesRasterized = src.degenerateLinesRasterized;
+    fullyCoveredFragmentShaderInputVariable = src.fullyCoveredFragmentShaderInputVariable;
+    conservativeRasterizationPostDepthCoverage = src.conservativeRasterizationPostDepthCoverage;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPhysicalDeviceConservativeRasterizationPropertiesEXT::~safe_VkPhysicalDeviceConservativeRasterizationPropertiesEXT()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPhysicalDeviceConservativeRasterizationPropertiesEXT::initialize(const VkPhysicalDeviceConservativeRasterizationPropertiesEXT* in_struct)
+{
+    sType = in_struct->sType;
+    primitiveOverestimationSize = in_struct->primitiveOverestimationSize;
+    maxExtraPrimitiveOverestimationSize = in_struct->maxExtraPrimitiveOverestimationSize;
+    extraPrimitiveOverestimationSizeGranularity = in_struct->extraPrimitiveOverestimationSizeGranularity;
+    primitiveUnderestimation = in_struct->primitiveUnderestimation;
+    conservativePointAndLineRasterization = in_struct->conservativePointAndLineRasterization;
+    degenerateTrianglesRasterized = in_struct->degenerateTrianglesRasterized;
+    degenerateLinesRasterized = in_struct->degenerateLinesRasterized;
+    fullyCoveredFragmentShaderInputVariable = in_struct->fullyCoveredFragmentShaderInputVariable;
+    conservativeRasterizationPostDepthCoverage = in_struct->conservativeRasterizationPostDepthCoverage;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPhysicalDeviceConservativeRasterizationPropertiesEXT::initialize(const safe_VkPhysicalDeviceConservativeRasterizationPropertiesEXT* src)
+{
+    sType = src->sType;
+    primitiveOverestimationSize = src->primitiveOverestimationSize;
+    maxExtraPrimitiveOverestimationSize = src->maxExtraPrimitiveOverestimationSize;
+    extraPrimitiveOverestimationSizeGranularity = src->extraPrimitiveOverestimationSizeGranularity;
+    primitiveUnderestimation = src->primitiveUnderestimation;
+    conservativePointAndLineRasterization = src->conservativePointAndLineRasterization;
+    degenerateTrianglesRasterized = src->degenerateTrianglesRasterized;
+    degenerateLinesRasterized = src->degenerateLinesRasterized;
+    fullyCoveredFragmentShaderInputVariable = src->fullyCoveredFragmentShaderInputVariable;
+    conservativeRasterizationPostDepthCoverage = src->conservativeRasterizationPostDepthCoverage;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkPipelineRasterizationConservativeStateCreateInfoEXT::safe_VkPipelineRasterizationConservativeStateCreateInfoEXT(const VkPipelineRasterizationConservativeStateCreateInfoEXT* in_struct) :
+    sType(in_struct->sType),
+    flags(in_struct->flags),
+    conservativeRasterizationMode(in_struct->conservativeRasterizationMode),
+    extraPrimitiveOverestimationSize(in_struct->extraPrimitiveOverestimationSize)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPipelineRasterizationConservativeStateCreateInfoEXT::safe_VkPipelineRasterizationConservativeStateCreateInfoEXT() :
+    pNext(nullptr)
+{}
+
+safe_VkPipelineRasterizationConservativeStateCreateInfoEXT::safe_VkPipelineRasterizationConservativeStateCreateInfoEXT(const safe_VkPipelineRasterizationConservativeStateCreateInfoEXT& src)
+{
+    sType = src.sType;
+    flags = src.flags;
+    conservativeRasterizationMode = src.conservativeRasterizationMode;
+    extraPrimitiveOverestimationSize = src.extraPrimitiveOverestimationSize;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPipelineRasterizationConservativeStateCreateInfoEXT& safe_VkPipelineRasterizationConservativeStateCreateInfoEXT::operator=(const safe_VkPipelineRasterizationConservativeStateCreateInfoEXT& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    flags = src.flags;
+    conservativeRasterizationMode = src.conservativeRasterizationMode;
+    extraPrimitiveOverestimationSize = src.extraPrimitiveOverestimationSize;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPipelineRasterizationConservativeStateCreateInfoEXT::~safe_VkPipelineRasterizationConservativeStateCreateInfoEXT()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPipelineRasterizationConservativeStateCreateInfoEXT::initialize(const VkPipelineRasterizationConservativeStateCreateInfoEXT* in_struct)
+{
+    sType = in_struct->sType;
+    flags = in_struct->flags;
+    conservativeRasterizationMode = in_struct->conservativeRasterizationMode;
+    extraPrimitiveOverestimationSize = in_struct->extraPrimitiveOverestimationSize;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPipelineRasterizationConservativeStateCreateInfoEXT::initialize(const safe_VkPipelineRasterizationConservativeStateCreateInfoEXT* src)
+{
+    sType = src->sType;
+    flags = src->flags;
+    conservativeRasterizationMode = src->conservativeRasterizationMode;
+    extraPrimitiveOverestimationSize = src->extraPrimitiveOverestimationSize;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkPhysicalDeviceDepthClipEnableFeaturesEXT::safe_VkPhysicalDeviceDepthClipEnableFeaturesEXT(const VkPhysicalDeviceDepthClipEnableFeaturesEXT* in_struct) :
+    sType(in_struct->sType),
+    depthClipEnable(in_struct->depthClipEnable)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPhysicalDeviceDepthClipEnableFeaturesEXT::safe_VkPhysicalDeviceDepthClipEnableFeaturesEXT() :
+    pNext(nullptr)
+{}
+
+safe_VkPhysicalDeviceDepthClipEnableFeaturesEXT::safe_VkPhysicalDeviceDepthClipEnableFeaturesEXT(const safe_VkPhysicalDeviceDepthClipEnableFeaturesEXT& src)
+{
+    sType = src.sType;
+    depthClipEnable = src.depthClipEnable;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPhysicalDeviceDepthClipEnableFeaturesEXT& safe_VkPhysicalDeviceDepthClipEnableFeaturesEXT::operator=(const safe_VkPhysicalDeviceDepthClipEnableFeaturesEXT& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    depthClipEnable = src.depthClipEnable;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPhysicalDeviceDepthClipEnableFeaturesEXT::~safe_VkPhysicalDeviceDepthClipEnableFeaturesEXT()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPhysicalDeviceDepthClipEnableFeaturesEXT::initialize(const VkPhysicalDeviceDepthClipEnableFeaturesEXT* in_struct)
+{
+    sType = in_struct->sType;
+    depthClipEnable = in_struct->depthClipEnable;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPhysicalDeviceDepthClipEnableFeaturesEXT::initialize(const safe_VkPhysicalDeviceDepthClipEnableFeaturesEXT* src)
+{
+    sType = src->sType;
+    depthClipEnable = src->depthClipEnable;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkPipelineRasterizationDepthClipStateCreateInfoEXT::safe_VkPipelineRasterizationDepthClipStateCreateInfoEXT(const VkPipelineRasterizationDepthClipStateCreateInfoEXT* in_struct) :
+    sType(in_struct->sType),
+    flags(in_struct->flags),
+    depthClipEnable(in_struct->depthClipEnable)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPipelineRasterizationDepthClipStateCreateInfoEXT::safe_VkPipelineRasterizationDepthClipStateCreateInfoEXT() :
+    pNext(nullptr)
+{}
+
+safe_VkPipelineRasterizationDepthClipStateCreateInfoEXT::safe_VkPipelineRasterizationDepthClipStateCreateInfoEXT(const safe_VkPipelineRasterizationDepthClipStateCreateInfoEXT& src)
+{
+    sType = src.sType;
+    flags = src.flags;
+    depthClipEnable = src.depthClipEnable;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPipelineRasterizationDepthClipStateCreateInfoEXT& safe_VkPipelineRasterizationDepthClipStateCreateInfoEXT::operator=(const safe_VkPipelineRasterizationDepthClipStateCreateInfoEXT& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    flags = src.flags;
+    depthClipEnable = src.depthClipEnable;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPipelineRasterizationDepthClipStateCreateInfoEXT::~safe_VkPipelineRasterizationDepthClipStateCreateInfoEXT()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPipelineRasterizationDepthClipStateCreateInfoEXT::initialize(const VkPipelineRasterizationDepthClipStateCreateInfoEXT* in_struct)
+{
+    sType = in_struct->sType;
+    flags = in_struct->flags;
+    depthClipEnable = in_struct->depthClipEnable;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPipelineRasterizationDepthClipStateCreateInfoEXT::initialize(const safe_VkPipelineRasterizationDepthClipStateCreateInfoEXT* src)
+{
+    sType = src->sType;
+    flags = src->flags;
+    depthClipEnable = src->depthClipEnable;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkHdrMetadataEXT::safe_VkHdrMetadataEXT(const VkHdrMetadataEXT* in_struct) :
+    sType(in_struct->sType),
+    displayPrimaryRed(in_struct->displayPrimaryRed),
+    displayPrimaryGreen(in_struct->displayPrimaryGreen),
+    displayPrimaryBlue(in_struct->displayPrimaryBlue),
+    whitePoint(in_struct->whitePoint),
+    maxLuminance(in_struct->maxLuminance),
+    minLuminance(in_struct->minLuminance),
+    maxContentLightLevel(in_struct->maxContentLightLevel),
+    maxFrameAverageLightLevel(in_struct->maxFrameAverageLightLevel)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkHdrMetadataEXT::safe_VkHdrMetadataEXT() :
+    pNext(nullptr)
+{}
+
+safe_VkHdrMetadataEXT::safe_VkHdrMetadataEXT(const safe_VkHdrMetadataEXT& src)
+{
+    sType = src.sType;
+    displayPrimaryRed = src.displayPrimaryRed;
+    displayPrimaryGreen = src.displayPrimaryGreen;
+    displayPrimaryBlue = src.displayPrimaryBlue;
+    whitePoint = src.whitePoint;
+    maxLuminance = src.maxLuminance;
+    minLuminance = src.minLuminance;
+    maxContentLightLevel = src.maxContentLightLevel;
+    maxFrameAverageLightLevel = src.maxFrameAverageLightLevel;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkHdrMetadataEXT& safe_VkHdrMetadataEXT::operator=(const safe_VkHdrMetadataEXT& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    displayPrimaryRed = src.displayPrimaryRed;
+    displayPrimaryGreen = src.displayPrimaryGreen;
+    displayPrimaryBlue = src.displayPrimaryBlue;
+    whitePoint = src.whitePoint;
+    maxLuminance = src.maxLuminance;
+    minLuminance = src.minLuminance;
+    maxContentLightLevel = src.maxContentLightLevel;
+    maxFrameAverageLightLevel = src.maxFrameAverageLightLevel;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkHdrMetadataEXT::~safe_VkHdrMetadataEXT()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkHdrMetadataEXT::initialize(const VkHdrMetadataEXT* in_struct)
+{
+    sType = in_struct->sType;
+    displayPrimaryRed = in_struct->displayPrimaryRed;
+    displayPrimaryGreen = in_struct->displayPrimaryGreen;
+    displayPrimaryBlue = in_struct->displayPrimaryBlue;
+    whitePoint = in_struct->whitePoint;
+    maxLuminance = in_struct->maxLuminance;
+    minLuminance = in_struct->minLuminance;
+    maxContentLightLevel = in_struct->maxContentLightLevel;
+    maxFrameAverageLightLevel = in_struct->maxFrameAverageLightLevel;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkHdrMetadataEXT::initialize(const safe_VkHdrMetadataEXT* src)
+{
+    sType = src->sType;
+    displayPrimaryRed = src->displayPrimaryRed;
+    displayPrimaryGreen = src->displayPrimaryGreen;
+    displayPrimaryBlue = src->displayPrimaryBlue;
+    whitePoint = src->whitePoint;
+    maxLuminance = src->maxLuminance;
+    minLuminance = src->minLuminance;
+    maxContentLightLevel = src->maxContentLightLevel;
+    maxFrameAverageLightLevel = src->maxFrameAverageLightLevel;
+    pNext = SafePnextCopy(src->pNext);
+}
+#ifdef VK_USE_PLATFORM_IOS_MVK
+
+
+safe_VkIOSSurfaceCreateInfoMVK::safe_VkIOSSurfaceCreateInfoMVK(const VkIOSSurfaceCreateInfoMVK* in_struct) :
+    sType(in_struct->sType),
+    flags(in_struct->flags),
+    pView(in_struct->pView)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkIOSSurfaceCreateInfoMVK::safe_VkIOSSurfaceCreateInfoMVK() :
+    pNext(nullptr),
+    pView(nullptr)
+{}
+
+safe_VkIOSSurfaceCreateInfoMVK::safe_VkIOSSurfaceCreateInfoMVK(const safe_VkIOSSurfaceCreateInfoMVK& src)
+{
+    sType = src.sType;
+    flags = src.flags;
+    pView = src.pView;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkIOSSurfaceCreateInfoMVK& safe_VkIOSSurfaceCreateInfoMVK::operator=(const safe_VkIOSSurfaceCreateInfoMVK& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    flags = src.flags;
+    pView = src.pView;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkIOSSurfaceCreateInfoMVK::~safe_VkIOSSurfaceCreateInfoMVK()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkIOSSurfaceCreateInfoMVK::initialize(const VkIOSSurfaceCreateInfoMVK* in_struct)
+{
+    sType = in_struct->sType;
+    flags = in_struct->flags;
+    pView = in_struct->pView;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkIOSSurfaceCreateInfoMVK::initialize(const safe_VkIOSSurfaceCreateInfoMVK* src)
+{
+    sType = src->sType;
+    flags = src->flags;
+    pView = src->pView;
+    pNext = SafePnextCopy(src->pNext);
+}
+#endif // VK_USE_PLATFORM_IOS_MVK
+
+#ifdef VK_USE_PLATFORM_MACOS_MVK
+
+
+safe_VkMacOSSurfaceCreateInfoMVK::safe_VkMacOSSurfaceCreateInfoMVK(const VkMacOSSurfaceCreateInfoMVK* in_struct) :
+    sType(in_struct->sType),
+    flags(in_struct->flags),
+    pView(in_struct->pView)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkMacOSSurfaceCreateInfoMVK::safe_VkMacOSSurfaceCreateInfoMVK() :
+    pNext(nullptr),
+    pView(nullptr)
+{}
+
+safe_VkMacOSSurfaceCreateInfoMVK::safe_VkMacOSSurfaceCreateInfoMVK(const safe_VkMacOSSurfaceCreateInfoMVK& src)
+{
+    sType = src.sType;
+    flags = src.flags;
+    pView = src.pView;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkMacOSSurfaceCreateInfoMVK& safe_VkMacOSSurfaceCreateInfoMVK::operator=(const safe_VkMacOSSurfaceCreateInfoMVK& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    flags = src.flags;
+    pView = src.pView;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkMacOSSurfaceCreateInfoMVK::~safe_VkMacOSSurfaceCreateInfoMVK()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkMacOSSurfaceCreateInfoMVK::initialize(const VkMacOSSurfaceCreateInfoMVK* in_struct)
+{
+    sType = in_struct->sType;
+    flags = in_struct->flags;
+    pView = in_struct->pView;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkMacOSSurfaceCreateInfoMVK::initialize(const safe_VkMacOSSurfaceCreateInfoMVK* src)
+{
+    sType = src->sType;
+    flags = src->flags;
+    pView = src->pView;
+    pNext = SafePnextCopy(src->pNext);
+}
+#endif // VK_USE_PLATFORM_MACOS_MVK
+
+
+safe_VkDebugUtilsObjectNameInfoEXT::safe_VkDebugUtilsObjectNameInfoEXT(const VkDebugUtilsObjectNameInfoEXT* in_struct) :
+    sType(in_struct->sType),
+    objectType(in_struct->objectType),
+    objectHandle(in_struct->objectHandle)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+    pObjectName = SafeStringCopy(in_struct->pObjectName);
+}
+
+safe_VkDebugUtilsObjectNameInfoEXT::safe_VkDebugUtilsObjectNameInfoEXT() :
+    pNext(nullptr),
+    pObjectName(nullptr)
+{}
+
+safe_VkDebugUtilsObjectNameInfoEXT::safe_VkDebugUtilsObjectNameInfoEXT(const safe_VkDebugUtilsObjectNameInfoEXT& src)
+{
+    sType = src.sType;
+    objectType = src.objectType;
+    objectHandle = src.objectHandle;
+    pNext = SafePnextCopy(src.pNext);
+    pObjectName = SafeStringCopy(src.pObjectName);
+}
+
+safe_VkDebugUtilsObjectNameInfoEXT& safe_VkDebugUtilsObjectNameInfoEXT::operator=(const safe_VkDebugUtilsObjectNameInfoEXT& src)
+{
+    if (&src == this) return *this;
+
+    if (pObjectName) delete [] pObjectName;
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    objectType = src.objectType;
+    objectHandle = src.objectHandle;
+    pNext = SafePnextCopy(src.pNext);
+    pObjectName = SafeStringCopy(src.pObjectName);
+
+    return *this;
+}
+
+safe_VkDebugUtilsObjectNameInfoEXT::~safe_VkDebugUtilsObjectNameInfoEXT()
+{
+    if (pObjectName) delete [] pObjectName;
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkDebugUtilsObjectNameInfoEXT::initialize(const VkDebugUtilsObjectNameInfoEXT* in_struct)
+{
+    sType = in_struct->sType;
+    objectType = in_struct->objectType;
+    objectHandle = in_struct->objectHandle;
+    pNext = SafePnextCopy(in_struct->pNext);
+    pObjectName = SafeStringCopy(in_struct->pObjectName);
+}
+
+void safe_VkDebugUtilsObjectNameInfoEXT::initialize(const safe_VkDebugUtilsObjectNameInfoEXT* src)
+{
+    sType = src->sType;
+    objectType = src->objectType;
+    objectHandle = src->objectHandle;
+    pNext = SafePnextCopy(src->pNext);
+    pObjectName = SafeStringCopy(src->pObjectName);
+}
+
+safe_VkDebugUtilsObjectTagInfoEXT::safe_VkDebugUtilsObjectTagInfoEXT(const VkDebugUtilsObjectTagInfoEXT* in_struct) :
+    sType(in_struct->sType),
+    objectType(in_struct->objectType),
+    objectHandle(in_struct->objectHandle),
+    tagName(in_struct->tagName),
+    tagSize(in_struct->tagSize),
+    pTag(in_struct->pTag)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkDebugUtilsObjectTagInfoEXT::safe_VkDebugUtilsObjectTagInfoEXT() :
+    pNext(nullptr),
+    pTag(nullptr)
+{}
+
+safe_VkDebugUtilsObjectTagInfoEXT::safe_VkDebugUtilsObjectTagInfoEXT(const safe_VkDebugUtilsObjectTagInfoEXT& src)
+{
+    sType = src.sType;
+    objectType = src.objectType;
+    objectHandle = src.objectHandle;
+    tagName = src.tagName;
+    tagSize = src.tagSize;
+    pTag = src.pTag;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkDebugUtilsObjectTagInfoEXT& safe_VkDebugUtilsObjectTagInfoEXT::operator=(const safe_VkDebugUtilsObjectTagInfoEXT& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    objectType = src.objectType;
+    objectHandle = src.objectHandle;
+    tagName = src.tagName;
+    tagSize = src.tagSize;
+    pTag = src.pTag;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkDebugUtilsObjectTagInfoEXT::~safe_VkDebugUtilsObjectTagInfoEXT()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkDebugUtilsObjectTagInfoEXT::initialize(const VkDebugUtilsObjectTagInfoEXT* in_struct)
+{
+    sType = in_struct->sType;
+    objectType = in_struct->objectType;
+    objectHandle = in_struct->objectHandle;
+    tagName = in_struct->tagName;
+    tagSize = in_struct->tagSize;
+    pTag = in_struct->pTag;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkDebugUtilsObjectTagInfoEXT::initialize(const safe_VkDebugUtilsObjectTagInfoEXT* src)
+{
+    sType = src->sType;
+    objectType = src->objectType;
+    objectHandle = src->objectHandle;
+    tagName = src->tagName;
+    tagSize = src->tagSize;
+    pTag = src->pTag;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkDebugUtilsLabelEXT::safe_VkDebugUtilsLabelEXT(const VkDebugUtilsLabelEXT* in_struct) :
+    sType(in_struct->sType)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+    pLabelName = SafeStringCopy(in_struct->pLabelName);
+    for (uint32_t i = 0; i < 4; ++i) {
+        color[i] = in_struct->color[i];
+    }
+}
+
+safe_VkDebugUtilsLabelEXT::safe_VkDebugUtilsLabelEXT() :
+    pNext(nullptr),
+    pLabelName(nullptr)
+{}
+
+safe_VkDebugUtilsLabelEXT::safe_VkDebugUtilsLabelEXT(const safe_VkDebugUtilsLabelEXT& src)
+{
+    sType = src.sType;
+    pNext = SafePnextCopy(src.pNext);
+    pLabelName = SafeStringCopy(src.pLabelName);
+    for (uint32_t i = 0; i < 4; ++i) {
+        color[i] = src.color[i];
+    }
+}
+
+safe_VkDebugUtilsLabelEXT& safe_VkDebugUtilsLabelEXT::operator=(const safe_VkDebugUtilsLabelEXT& src)
+{
+    if (&src == this) return *this;
+
+    if (pLabelName) delete [] pLabelName;
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    pNext = SafePnextCopy(src.pNext);
+    pLabelName = SafeStringCopy(src.pLabelName);
+    for (uint32_t i = 0; i < 4; ++i) {
+        color[i] = src.color[i];
+    }
+
+    return *this;
+}
+
+safe_VkDebugUtilsLabelEXT::~safe_VkDebugUtilsLabelEXT()
+{
+    if (pLabelName) delete [] pLabelName;
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkDebugUtilsLabelEXT::initialize(const VkDebugUtilsLabelEXT* in_struct)
+{
+    sType = in_struct->sType;
+    pNext = SafePnextCopy(in_struct->pNext);
+    pLabelName = SafeStringCopy(in_struct->pLabelName);
+    for (uint32_t i = 0; i < 4; ++i) {
+        color[i] = in_struct->color[i];
+    }
+}
+
+void safe_VkDebugUtilsLabelEXT::initialize(const safe_VkDebugUtilsLabelEXT* src)
+{
+    sType = src->sType;
+    pNext = SafePnextCopy(src->pNext);
+    pLabelName = SafeStringCopy(src->pLabelName);
+    for (uint32_t i = 0; i < 4; ++i) {
+        color[i] = src->color[i];
+    }
+}
+
+safe_VkDebugUtilsMessengerCallbackDataEXT::safe_VkDebugUtilsMessengerCallbackDataEXT(const VkDebugUtilsMessengerCallbackDataEXT* in_struct) :
+    sType(in_struct->sType),
+    flags(in_struct->flags),
+    messageIdNumber(in_struct->messageIdNumber),
+    queueLabelCount(in_struct->queueLabelCount),
+    pQueueLabels(nullptr),
+    cmdBufLabelCount(in_struct->cmdBufLabelCount),
+    pCmdBufLabels(nullptr),
+    objectCount(in_struct->objectCount),
+    pObjects(nullptr)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+    pMessageIdName = SafeStringCopy(in_struct->pMessageIdName);
+    pMessage = SafeStringCopy(in_struct->pMessage);
+    if (queueLabelCount && in_struct->pQueueLabels) {
+        pQueueLabels = new safe_VkDebugUtilsLabelEXT[queueLabelCount];
+        for (uint32_t i = 0; i < queueLabelCount; ++i) {
+            pQueueLabels[i].initialize(&in_struct->pQueueLabels[i]);
+        }
+    }
+    if (cmdBufLabelCount && in_struct->pCmdBufLabels) {
+        pCmdBufLabels = new safe_VkDebugUtilsLabelEXT[cmdBufLabelCount];
+        for (uint32_t i = 0; i < cmdBufLabelCount; ++i) {
+            pCmdBufLabels[i].initialize(&in_struct->pCmdBufLabels[i]);
+        }
+    }
+    if (objectCount && in_struct->pObjects) {
+        pObjects = new safe_VkDebugUtilsObjectNameInfoEXT[objectCount];
+        for (uint32_t i = 0; i < objectCount; ++i) {
+            pObjects[i].initialize(&in_struct->pObjects[i]);
+        }
+    }
+}
+
+safe_VkDebugUtilsMessengerCallbackDataEXT::safe_VkDebugUtilsMessengerCallbackDataEXT() :
+    pNext(nullptr),
+    pMessageIdName(nullptr),
+    pMessage(nullptr),
+    pQueueLabels(nullptr),
+    pCmdBufLabels(nullptr),
+    pObjects(nullptr)
+{}
+
+safe_VkDebugUtilsMessengerCallbackDataEXT::safe_VkDebugUtilsMessengerCallbackDataEXT(const safe_VkDebugUtilsMessengerCallbackDataEXT& src)
+{
+    sType = src.sType;
+    flags = src.flags;
+    messageIdNumber = src.messageIdNumber;
+    queueLabelCount = src.queueLabelCount;
+    pQueueLabels = nullptr;
+    cmdBufLabelCount = src.cmdBufLabelCount;
+    pCmdBufLabels = nullptr;
+    objectCount = src.objectCount;
+    pObjects = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    pMessageIdName = SafeStringCopy(src.pMessageIdName);
+    pMessage = SafeStringCopy(src.pMessage);
+    if (queueLabelCount && src.pQueueLabels) {
+        pQueueLabels = new safe_VkDebugUtilsLabelEXT[queueLabelCount];
+        for (uint32_t i = 0; i < queueLabelCount; ++i) {
+            pQueueLabels[i].initialize(&src.pQueueLabels[i]);
+        }
+    }
+    if (cmdBufLabelCount && src.pCmdBufLabels) {
+        pCmdBufLabels = new safe_VkDebugUtilsLabelEXT[cmdBufLabelCount];
+        for (uint32_t i = 0; i < cmdBufLabelCount; ++i) {
+            pCmdBufLabels[i].initialize(&src.pCmdBufLabels[i]);
+        }
+    }
+    if (objectCount && src.pObjects) {
+        pObjects = new safe_VkDebugUtilsObjectNameInfoEXT[objectCount];
+        for (uint32_t i = 0; i < objectCount; ++i) {
+            pObjects[i].initialize(&src.pObjects[i]);
+        }
+    }
+}
+
+safe_VkDebugUtilsMessengerCallbackDataEXT& safe_VkDebugUtilsMessengerCallbackDataEXT::operator=(const safe_VkDebugUtilsMessengerCallbackDataEXT& src)
+{
+    if (&src == this) return *this;
+
+    if (pMessageIdName) delete [] pMessageIdName;
+    if (pMessage) delete [] pMessage;
+    if (pQueueLabels)
+        delete[] pQueueLabels;
+    if (pCmdBufLabels)
+        delete[] pCmdBufLabels;
+    if (pObjects)
+        delete[] pObjects;
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    flags = src.flags;
+    messageIdNumber = src.messageIdNumber;
+    queueLabelCount = src.queueLabelCount;
+    pQueueLabels = nullptr;
+    cmdBufLabelCount = src.cmdBufLabelCount;
+    pCmdBufLabels = nullptr;
+    objectCount = src.objectCount;
+    pObjects = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    pMessageIdName = SafeStringCopy(src.pMessageIdName);
+    pMessage = SafeStringCopy(src.pMessage);
+    if (queueLabelCount && src.pQueueLabels) {
+        pQueueLabels = new safe_VkDebugUtilsLabelEXT[queueLabelCount];
+        for (uint32_t i = 0; i < queueLabelCount; ++i) {
+            pQueueLabels[i].initialize(&src.pQueueLabels[i]);
+        }
+    }
+    if (cmdBufLabelCount && src.pCmdBufLabels) {
+        pCmdBufLabels = new safe_VkDebugUtilsLabelEXT[cmdBufLabelCount];
+        for (uint32_t i = 0; i < cmdBufLabelCount; ++i) {
+            pCmdBufLabels[i].initialize(&src.pCmdBufLabels[i]);
+        }
+    }
+    if (objectCount && src.pObjects) {
+        pObjects = new safe_VkDebugUtilsObjectNameInfoEXT[objectCount];
+        for (uint32_t i = 0; i < objectCount; ++i) {
+            pObjects[i].initialize(&src.pObjects[i]);
+        }
+    }
+
+    return *this;
+}
+
+safe_VkDebugUtilsMessengerCallbackDataEXT::~safe_VkDebugUtilsMessengerCallbackDataEXT()
+{
+    if (pMessageIdName) delete [] pMessageIdName;
+    if (pMessage) delete [] pMessage;
+    if (pQueueLabels)
+        delete[] pQueueLabels;
+    if (pCmdBufLabels)
+        delete[] pCmdBufLabels;
+    if (pObjects)
+        delete[] pObjects;
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkDebugUtilsMessengerCallbackDataEXT::initialize(const VkDebugUtilsMessengerCallbackDataEXT* in_struct)
+{
+    sType = in_struct->sType;
+    flags = in_struct->flags;
+    messageIdNumber = in_struct->messageIdNumber;
+    queueLabelCount = in_struct->queueLabelCount;
+    pQueueLabels = nullptr;
+    cmdBufLabelCount = in_struct->cmdBufLabelCount;
+    pCmdBufLabels = nullptr;
+    objectCount = in_struct->objectCount;
+    pObjects = nullptr;
+    pNext = SafePnextCopy(in_struct->pNext);
+    pMessageIdName = SafeStringCopy(in_struct->pMessageIdName);
+    pMessage = SafeStringCopy(in_struct->pMessage);
+    if (queueLabelCount && in_struct->pQueueLabels) {
+        pQueueLabels = new safe_VkDebugUtilsLabelEXT[queueLabelCount];
+        for (uint32_t i = 0; i < queueLabelCount; ++i) {
+            pQueueLabels[i].initialize(&in_struct->pQueueLabels[i]);
+        }
+    }
+    if (cmdBufLabelCount && in_struct->pCmdBufLabels) {
+        pCmdBufLabels = new safe_VkDebugUtilsLabelEXT[cmdBufLabelCount];
+        for (uint32_t i = 0; i < cmdBufLabelCount; ++i) {
+            pCmdBufLabels[i].initialize(&in_struct->pCmdBufLabels[i]);
+        }
+    }
+    if (objectCount && in_struct->pObjects) {
+        pObjects = new safe_VkDebugUtilsObjectNameInfoEXT[objectCount];
+        for (uint32_t i = 0; i < objectCount; ++i) {
+            pObjects[i].initialize(&in_struct->pObjects[i]);
+        }
+    }
+}
+
+void safe_VkDebugUtilsMessengerCallbackDataEXT::initialize(const safe_VkDebugUtilsMessengerCallbackDataEXT* src)
+{
+    sType = src->sType;
+    flags = src->flags;
+    messageIdNumber = src->messageIdNumber;
+    queueLabelCount = src->queueLabelCount;
+    pQueueLabels = nullptr;
+    cmdBufLabelCount = src->cmdBufLabelCount;
+    pCmdBufLabels = nullptr;
+    objectCount = src->objectCount;
+    pObjects = nullptr;
+    pNext = SafePnextCopy(src->pNext);
+    pMessageIdName = SafeStringCopy(src->pMessageIdName);
+    pMessage = SafeStringCopy(src->pMessage);
+    if (queueLabelCount && src->pQueueLabels) {
+        pQueueLabels = new safe_VkDebugUtilsLabelEXT[queueLabelCount];
+        for (uint32_t i = 0; i < queueLabelCount; ++i) {
+            pQueueLabels[i].initialize(&src->pQueueLabels[i]);
+        }
+    }
+    if (cmdBufLabelCount && src->pCmdBufLabels) {
+        pCmdBufLabels = new safe_VkDebugUtilsLabelEXT[cmdBufLabelCount];
+        for (uint32_t i = 0; i < cmdBufLabelCount; ++i) {
+            pCmdBufLabels[i].initialize(&src->pCmdBufLabels[i]);
+        }
+    }
+    if (objectCount && src->pObjects) {
+        pObjects = new safe_VkDebugUtilsObjectNameInfoEXT[objectCount];
+        for (uint32_t i = 0; i < objectCount; ++i) {
+            pObjects[i].initialize(&src->pObjects[i]);
+        }
+    }
+}
+
+safe_VkDebugUtilsMessengerCreateInfoEXT::safe_VkDebugUtilsMessengerCreateInfoEXT(const VkDebugUtilsMessengerCreateInfoEXT* in_struct) :
+    sType(in_struct->sType),
+    flags(in_struct->flags),
+    messageSeverity(in_struct->messageSeverity),
+    messageType(in_struct->messageType),
+    pfnUserCallback(in_struct->pfnUserCallback),
+    pUserData(in_struct->pUserData)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkDebugUtilsMessengerCreateInfoEXT::safe_VkDebugUtilsMessengerCreateInfoEXT() :
+    pNext(nullptr),
+    pUserData(nullptr)
+{}
+
+safe_VkDebugUtilsMessengerCreateInfoEXT::safe_VkDebugUtilsMessengerCreateInfoEXT(const safe_VkDebugUtilsMessengerCreateInfoEXT& src)
+{
+    sType = src.sType;
+    flags = src.flags;
+    messageSeverity = src.messageSeverity;
+    messageType = src.messageType;
+    pfnUserCallback = src.pfnUserCallback;
+    pUserData = src.pUserData;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkDebugUtilsMessengerCreateInfoEXT& safe_VkDebugUtilsMessengerCreateInfoEXT::operator=(const safe_VkDebugUtilsMessengerCreateInfoEXT& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    flags = src.flags;
+    messageSeverity = src.messageSeverity;
+    messageType = src.messageType;
+    pfnUserCallback = src.pfnUserCallback;
+    pUserData = src.pUserData;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkDebugUtilsMessengerCreateInfoEXT::~safe_VkDebugUtilsMessengerCreateInfoEXT()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkDebugUtilsMessengerCreateInfoEXT::initialize(const VkDebugUtilsMessengerCreateInfoEXT* in_struct)
+{
+    sType = in_struct->sType;
+    flags = in_struct->flags;
+    messageSeverity = in_struct->messageSeverity;
+    messageType = in_struct->messageType;
+    pfnUserCallback = in_struct->pfnUserCallback;
+    pUserData = in_struct->pUserData;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkDebugUtilsMessengerCreateInfoEXT::initialize(const safe_VkDebugUtilsMessengerCreateInfoEXT* src)
+{
+    sType = src->sType;
+    flags = src->flags;
+    messageSeverity = src->messageSeverity;
+    messageType = src->messageType;
+    pfnUserCallback = src->pfnUserCallback;
+    pUserData = src->pUserData;
+    pNext = SafePnextCopy(src->pNext);
+}
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+
+
+safe_VkAndroidHardwareBufferUsageANDROID::safe_VkAndroidHardwareBufferUsageANDROID(const VkAndroidHardwareBufferUsageANDROID* in_struct) :
+    sType(in_struct->sType),
+    androidHardwareBufferUsage(in_struct->androidHardwareBufferUsage)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkAndroidHardwareBufferUsageANDROID::safe_VkAndroidHardwareBufferUsageANDROID() :
+    pNext(nullptr)
+{}
+
+safe_VkAndroidHardwareBufferUsageANDROID::safe_VkAndroidHardwareBufferUsageANDROID(const safe_VkAndroidHardwareBufferUsageANDROID& src)
+{
+    sType = src.sType;
+    androidHardwareBufferUsage = src.androidHardwareBufferUsage;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkAndroidHardwareBufferUsageANDROID& safe_VkAndroidHardwareBufferUsageANDROID::operator=(const safe_VkAndroidHardwareBufferUsageANDROID& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    androidHardwareBufferUsage = src.androidHardwareBufferUsage;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkAndroidHardwareBufferUsageANDROID::~safe_VkAndroidHardwareBufferUsageANDROID()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkAndroidHardwareBufferUsageANDROID::initialize(const VkAndroidHardwareBufferUsageANDROID* in_struct)
+{
+    sType = in_struct->sType;
+    androidHardwareBufferUsage = in_struct->androidHardwareBufferUsage;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkAndroidHardwareBufferUsageANDROID::initialize(const safe_VkAndroidHardwareBufferUsageANDROID* src)
+{
+    sType = src->sType;
+    androidHardwareBufferUsage = src->androidHardwareBufferUsage;
+    pNext = SafePnextCopy(src->pNext);
+}
+#endif // VK_USE_PLATFORM_ANDROID_KHR
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+
+
+safe_VkAndroidHardwareBufferPropertiesANDROID::safe_VkAndroidHardwareBufferPropertiesANDROID(const VkAndroidHardwareBufferPropertiesANDROID* in_struct) :
+    sType(in_struct->sType),
+    allocationSize(in_struct->allocationSize),
+    memoryTypeBits(in_struct->memoryTypeBits)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkAndroidHardwareBufferPropertiesANDROID::safe_VkAndroidHardwareBufferPropertiesANDROID() :
+    pNext(nullptr)
+{}
+
+safe_VkAndroidHardwareBufferPropertiesANDROID::safe_VkAndroidHardwareBufferPropertiesANDROID(const safe_VkAndroidHardwareBufferPropertiesANDROID& src)
+{
+    sType = src.sType;
+    allocationSize = src.allocationSize;
+    memoryTypeBits = src.memoryTypeBits;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkAndroidHardwareBufferPropertiesANDROID& safe_VkAndroidHardwareBufferPropertiesANDROID::operator=(const safe_VkAndroidHardwareBufferPropertiesANDROID& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    allocationSize = src.allocationSize;
+    memoryTypeBits = src.memoryTypeBits;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkAndroidHardwareBufferPropertiesANDROID::~safe_VkAndroidHardwareBufferPropertiesANDROID()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkAndroidHardwareBufferPropertiesANDROID::initialize(const VkAndroidHardwareBufferPropertiesANDROID* in_struct)
+{
+    sType = in_struct->sType;
+    allocationSize = in_struct->allocationSize;
+    memoryTypeBits = in_struct->memoryTypeBits;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkAndroidHardwareBufferPropertiesANDROID::initialize(const safe_VkAndroidHardwareBufferPropertiesANDROID* src)
+{
+    sType = src->sType;
+    allocationSize = src->allocationSize;
+    memoryTypeBits = src->memoryTypeBits;
+    pNext = SafePnextCopy(src->pNext);
+}
+#endif // VK_USE_PLATFORM_ANDROID_KHR
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+
+
+safe_VkAndroidHardwareBufferFormatPropertiesANDROID::safe_VkAndroidHardwareBufferFormatPropertiesANDROID(const VkAndroidHardwareBufferFormatPropertiesANDROID* in_struct) :
+    sType(in_struct->sType),
+    format(in_struct->format),
+    externalFormat(in_struct->externalFormat),
+    formatFeatures(in_struct->formatFeatures),
+    samplerYcbcrConversionComponents(in_struct->samplerYcbcrConversionComponents),
+    suggestedYcbcrModel(in_struct->suggestedYcbcrModel),
+    suggestedYcbcrRange(in_struct->suggestedYcbcrRange),
+    suggestedXChromaOffset(in_struct->suggestedXChromaOffset),
+    suggestedYChromaOffset(in_struct->suggestedYChromaOffset)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkAndroidHardwareBufferFormatPropertiesANDROID::safe_VkAndroidHardwareBufferFormatPropertiesANDROID() :
+    pNext(nullptr)
+{}
+
+safe_VkAndroidHardwareBufferFormatPropertiesANDROID::safe_VkAndroidHardwareBufferFormatPropertiesANDROID(const safe_VkAndroidHardwareBufferFormatPropertiesANDROID& src)
+{
+    sType = src.sType;
+    format = src.format;
+    externalFormat = src.externalFormat;
+    formatFeatures = src.formatFeatures;
+    samplerYcbcrConversionComponents = src.samplerYcbcrConversionComponents;
+    suggestedYcbcrModel = src.suggestedYcbcrModel;
+    suggestedYcbcrRange = src.suggestedYcbcrRange;
+    suggestedXChromaOffset = src.suggestedXChromaOffset;
+    suggestedYChromaOffset = src.suggestedYChromaOffset;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkAndroidHardwareBufferFormatPropertiesANDROID& safe_VkAndroidHardwareBufferFormatPropertiesANDROID::operator=(const safe_VkAndroidHardwareBufferFormatPropertiesANDROID& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    format = src.format;
+    externalFormat = src.externalFormat;
+    formatFeatures = src.formatFeatures;
+    samplerYcbcrConversionComponents = src.samplerYcbcrConversionComponents;
+    suggestedYcbcrModel = src.suggestedYcbcrModel;
+    suggestedYcbcrRange = src.suggestedYcbcrRange;
+    suggestedXChromaOffset = src.suggestedXChromaOffset;
+    suggestedYChromaOffset = src.suggestedYChromaOffset;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkAndroidHardwareBufferFormatPropertiesANDROID::~safe_VkAndroidHardwareBufferFormatPropertiesANDROID()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkAndroidHardwareBufferFormatPropertiesANDROID::initialize(const VkAndroidHardwareBufferFormatPropertiesANDROID* in_struct)
+{
+    sType = in_struct->sType;
+    format = in_struct->format;
+    externalFormat = in_struct->externalFormat;
+    formatFeatures = in_struct->formatFeatures;
+    samplerYcbcrConversionComponents = in_struct->samplerYcbcrConversionComponents;
+    suggestedYcbcrModel = in_struct->suggestedYcbcrModel;
+    suggestedYcbcrRange = in_struct->suggestedYcbcrRange;
+    suggestedXChromaOffset = in_struct->suggestedXChromaOffset;
+    suggestedYChromaOffset = in_struct->suggestedYChromaOffset;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkAndroidHardwareBufferFormatPropertiesANDROID::initialize(const safe_VkAndroidHardwareBufferFormatPropertiesANDROID* src)
+{
+    sType = src->sType;
+    format = src->format;
+    externalFormat = src->externalFormat;
+    formatFeatures = src->formatFeatures;
+    samplerYcbcrConversionComponents = src->samplerYcbcrConversionComponents;
+    suggestedYcbcrModel = src->suggestedYcbcrModel;
+    suggestedYcbcrRange = src->suggestedYcbcrRange;
+    suggestedXChromaOffset = src->suggestedXChromaOffset;
+    suggestedYChromaOffset = src->suggestedYChromaOffset;
+    pNext = SafePnextCopy(src->pNext);
+}
+#endif // VK_USE_PLATFORM_ANDROID_KHR
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+
+
+safe_VkImportAndroidHardwareBufferInfoANDROID::safe_VkImportAndroidHardwareBufferInfoANDROID(const VkImportAndroidHardwareBufferInfoANDROID* in_struct) :
+    sType(in_struct->sType),
+    buffer(nullptr)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+    buffer = in_struct->buffer;
+}
+
+safe_VkImportAndroidHardwareBufferInfoANDROID::safe_VkImportAndroidHardwareBufferInfoANDROID() :
+    pNext(nullptr),
+    buffer(nullptr)
+{}
+
+safe_VkImportAndroidHardwareBufferInfoANDROID::safe_VkImportAndroidHardwareBufferInfoANDROID(const safe_VkImportAndroidHardwareBufferInfoANDROID& src)
+{
+    sType = src.sType;
+    pNext = SafePnextCopy(src.pNext);
+    buffer = src.buffer;
+}
+
+safe_VkImportAndroidHardwareBufferInfoANDROID& safe_VkImportAndroidHardwareBufferInfoANDROID::operator=(const safe_VkImportAndroidHardwareBufferInfoANDROID& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    pNext = SafePnextCopy(src.pNext);
+    buffer = src.buffer;
+
+    return *this;
+}
+
+safe_VkImportAndroidHardwareBufferInfoANDROID::~safe_VkImportAndroidHardwareBufferInfoANDROID()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkImportAndroidHardwareBufferInfoANDROID::initialize(const VkImportAndroidHardwareBufferInfoANDROID* in_struct)
+{
+    sType = in_struct->sType;
+    pNext = SafePnextCopy(in_struct->pNext);
+    buffer = in_struct->buffer;
+}
+
+void safe_VkImportAndroidHardwareBufferInfoANDROID::initialize(const safe_VkImportAndroidHardwareBufferInfoANDROID* src)
+{
+    sType = src->sType;
+    pNext = SafePnextCopy(src->pNext);
+    buffer = src->buffer;
+}
+#endif // VK_USE_PLATFORM_ANDROID_KHR
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+
+
+safe_VkMemoryGetAndroidHardwareBufferInfoANDROID::safe_VkMemoryGetAndroidHardwareBufferInfoANDROID(const VkMemoryGetAndroidHardwareBufferInfoANDROID* in_struct) :
+    sType(in_struct->sType),
+    memory(in_struct->memory)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkMemoryGetAndroidHardwareBufferInfoANDROID::safe_VkMemoryGetAndroidHardwareBufferInfoANDROID() :
+    pNext(nullptr)
+{}
+
+safe_VkMemoryGetAndroidHardwareBufferInfoANDROID::safe_VkMemoryGetAndroidHardwareBufferInfoANDROID(const safe_VkMemoryGetAndroidHardwareBufferInfoANDROID& src)
+{
+    sType = src.sType;
+    memory = src.memory;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkMemoryGetAndroidHardwareBufferInfoANDROID& safe_VkMemoryGetAndroidHardwareBufferInfoANDROID::operator=(const safe_VkMemoryGetAndroidHardwareBufferInfoANDROID& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    memory = src.memory;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkMemoryGetAndroidHardwareBufferInfoANDROID::~safe_VkMemoryGetAndroidHardwareBufferInfoANDROID()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkMemoryGetAndroidHardwareBufferInfoANDROID::initialize(const VkMemoryGetAndroidHardwareBufferInfoANDROID* in_struct)
+{
+    sType = in_struct->sType;
+    memory = in_struct->memory;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkMemoryGetAndroidHardwareBufferInfoANDROID::initialize(const safe_VkMemoryGetAndroidHardwareBufferInfoANDROID* src)
+{
+    sType = src->sType;
+    memory = src->memory;
+    pNext = SafePnextCopy(src->pNext);
+}
+#endif // VK_USE_PLATFORM_ANDROID_KHR
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+
+
+safe_VkExternalFormatANDROID::safe_VkExternalFormatANDROID(const VkExternalFormatANDROID* in_struct) :
+    sType(in_struct->sType),
+    externalFormat(in_struct->externalFormat)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkExternalFormatANDROID::safe_VkExternalFormatANDROID() :
+    pNext(nullptr)
+{}
+
+safe_VkExternalFormatANDROID::safe_VkExternalFormatANDROID(const safe_VkExternalFormatANDROID& src)
+{
+    sType = src.sType;
+    externalFormat = src.externalFormat;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkExternalFormatANDROID& safe_VkExternalFormatANDROID::operator=(const safe_VkExternalFormatANDROID& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    externalFormat = src.externalFormat;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkExternalFormatANDROID::~safe_VkExternalFormatANDROID()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkExternalFormatANDROID::initialize(const VkExternalFormatANDROID* in_struct)
+{
+    sType = in_struct->sType;
+    externalFormat = in_struct->externalFormat;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkExternalFormatANDROID::initialize(const safe_VkExternalFormatANDROID* src)
+{
+    sType = src->sType;
+    externalFormat = src->externalFormat;
+    pNext = SafePnextCopy(src->pNext);
+}
+#endif // VK_USE_PLATFORM_ANDROID_KHR
+
+
+safe_VkSamplerReductionModeCreateInfoEXT::safe_VkSamplerReductionModeCreateInfoEXT(const VkSamplerReductionModeCreateInfoEXT* in_struct) :
+    sType(in_struct->sType),
+    reductionMode(in_struct->reductionMode)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkSamplerReductionModeCreateInfoEXT::safe_VkSamplerReductionModeCreateInfoEXT() :
+    pNext(nullptr)
+{}
+
+safe_VkSamplerReductionModeCreateInfoEXT::safe_VkSamplerReductionModeCreateInfoEXT(const safe_VkSamplerReductionModeCreateInfoEXT& src)
+{
+    sType = src.sType;
+    reductionMode = src.reductionMode;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkSamplerReductionModeCreateInfoEXT& safe_VkSamplerReductionModeCreateInfoEXT::operator=(const safe_VkSamplerReductionModeCreateInfoEXT& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    reductionMode = src.reductionMode;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkSamplerReductionModeCreateInfoEXT::~safe_VkSamplerReductionModeCreateInfoEXT()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkSamplerReductionModeCreateInfoEXT::initialize(const VkSamplerReductionModeCreateInfoEXT* in_struct)
+{
+    sType = in_struct->sType;
+    reductionMode = in_struct->reductionMode;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkSamplerReductionModeCreateInfoEXT::initialize(const safe_VkSamplerReductionModeCreateInfoEXT* src)
+{
+    sType = src->sType;
+    reductionMode = src->reductionMode;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT::safe_VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT(const VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT* in_struct) :
+    sType(in_struct->sType),
+    filterMinmaxSingleComponentFormats(in_struct->filterMinmaxSingleComponentFormats),
+    filterMinmaxImageComponentMapping(in_struct->filterMinmaxImageComponentMapping)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT::safe_VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT() :
+    pNext(nullptr)
+{}
+
+safe_VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT::safe_VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT(const safe_VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT& src)
+{
+    sType = src.sType;
+    filterMinmaxSingleComponentFormats = src.filterMinmaxSingleComponentFormats;
+    filterMinmaxImageComponentMapping = src.filterMinmaxImageComponentMapping;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT& safe_VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT::operator=(const safe_VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    filterMinmaxSingleComponentFormats = src.filterMinmaxSingleComponentFormats;
+    filterMinmaxImageComponentMapping = src.filterMinmaxImageComponentMapping;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT::~safe_VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT::initialize(const VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT* in_struct)
+{
+    sType = in_struct->sType;
+    filterMinmaxSingleComponentFormats = in_struct->filterMinmaxSingleComponentFormats;
+    filterMinmaxImageComponentMapping = in_struct->filterMinmaxImageComponentMapping;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT::initialize(const safe_VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT* src)
+{
+    sType = src->sType;
+    filterMinmaxSingleComponentFormats = src->filterMinmaxSingleComponentFormats;
+    filterMinmaxImageComponentMapping = src->filterMinmaxImageComponentMapping;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkPhysicalDeviceInlineUniformBlockFeaturesEXT::safe_VkPhysicalDeviceInlineUniformBlockFeaturesEXT(const VkPhysicalDeviceInlineUniformBlockFeaturesEXT* in_struct) :
+    sType(in_struct->sType),
+    inlineUniformBlock(in_struct->inlineUniformBlock),
+    descriptorBindingInlineUniformBlockUpdateAfterBind(in_struct->descriptorBindingInlineUniformBlockUpdateAfterBind)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPhysicalDeviceInlineUniformBlockFeaturesEXT::safe_VkPhysicalDeviceInlineUniformBlockFeaturesEXT() :
+    pNext(nullptr)
+{}
+
+safe_VkPhysicalDeviceInlineUniformBlockFeaturesEXT::safe_VkPhysicalDeviceInlineUniformBlockFeaturesEXT(const safe_VkPhysicalDeviceInlineUniformBlockFeaturesEXT& src)
+{
+    sType = src.sType;
+    inlineUniformBlock = src.inlineUniformBlock;
+    descriptorBindingInlineUniformBlockUpdateAfterBind = src.descriptorBindingInlineUniformBlockUpdateAfterBind;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPhysicalDeviceInlineUniformBlockFeaturesEXT& safe_VkPhysicalDeviceInlineUniformBlockFeaturesEXT::operator=(const safe_VkPhysicalDeviceInlineUniformBlockFeaturesEXT& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    inlineUniformBlock = src.inlineUniformBlock;
+    descriptorBindingInlineUniformBlockUpdateAfterBind = src.descriptorBindingInlineUniformBlockUpdateAfterBind;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPhysicalDeviceInlineUniformBlockFeaturesEXT::~safe_VkPhysicalDeviceInlineUniformBlockFeaturesEXT()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPhysicalDeviceInlineUniformBlockFeaturesEXT::initialize(const VkPhysicalDeviceInlineUniformBlockFeaturesEXT* in_struct)
+{
+    sType = in_struct->sType;
+    inlineUniformBlock = in_struct->inlineUniformBlock;
+    descriptorBindingInlineUniformBlockUpdateAfterBind = in_struct->descriptorBindingInlineUniformBlockUpdateAfterBind;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPhysicalDeviceInlineUniformBlockFeaturesEXT::initialize(const safe_VkPhysicalDeviceInlineUniformBlockFeaturesEXT* src)
+{
+    sType = src->sType;
+    inlineUniformBlock = src->inlineUniformBlock;
+    descriptorBindingInlineUniformBlockUpdateAfterBind = src->descriptorBindingInlineUniformBlockUpdateAfterBind;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkPhysicalDeviceInlineUniformBlockPropertiesEXT::safe_VkPhysicalDeviceInlineUniformBlockPropertiesEXT(const VkPhysicalDeviceInlineUniformBlockPropertiesEXT* in_struct) :
+    sType(in_struct->sType),
+    maxInlineUniformBlockSize(in_struct->maxInlineUniformBlockSize),
+    maxPerStageDescriptorInlineUniformBlocks(in_struct->maxPerStageDescriptorInlineUniformBlocks),
+    maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks(in_struct->maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks),
+    maxDescriptorSetInlineUniformBlocks(in_struct->maxDescriptorSetInlineUniformBlocks),
+    maxDescriptorSetUpdateAfterBindInlineUniformBlocks(in_struct->maxDescriptorSetUpdateAfterBindInlineUniformBlocks)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPhysicalDeviceInlineUniformBlockPropertiesEXT::safe_VkPhysicalDeviceInlineUniformBlockPropertiesEXT() :
+    pNext(nullptr)
+{}
+
+safe_VkPhysicalDeviceInlineUniformBlockPropertiesEXT::safe_VkPhysicalDeviceInlineUniformBlockPropertiesEXT(const safe_VkPhysicalDeviceInlineUniformBlockPropertiesEXT& src)
+{
+    sType = src.sType;
+    maxInlineUniformBlockSize = src.maxInlineUniformBlockSize;
+    maxPerStageDescriptorInlineUniformBlocks = src.maxPerStageDescriptorInlineUniformBlocks;
+    maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks = src.maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks;
+    maxDescriptorSetInlineUniformBlocks = src.maxDescriptorSetInlineUniformBlocks;
+    maxDescriptorSetUpdateAfterBindInlineUniformBlocks = src.maxDescriptorSetUpdateAfterBindInlineUniformBlocks;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPhysicalDeviceInlineUniformBlockPropertiesEXT& safe_VkPhysicalDeviceInlineUniformBlockPropertiesEXT::operator=(const safe_VkPhysicalDeviceInlineUniformBlockPropertiesEXT& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    maxInlineUniformBlockSize = src.maxInlineUniformBlockSize;
+    maxPerStageDescriptorInlineUniformBlocks = src.maxPerStageDescriptorInlineUniformBlocks;
+    maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks = src.maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks;
+    maxDescriptorSetInlineUniformBlocks = src.maxDescriptorSetInlineUniformBlocks;
+    maxDescriptorSetUpdateAfterBindInlineUniformBlocks = src.maxDescriptorSetUpdateAfterBindInlineUniformBlocks;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPhysicalDeviceInlineUniformBlockPropertiesEXT::~safe_VkPhysicalDeviceInlineUniformBlockPropertiesEXT()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPhysicalDeviceInlineUniformBlockPropertiesEXT::initialize(const VkPhysicalDeviceInlineUniformBlockPropertiesEXT* in_struct)
+{
+    sType = in_struct->sType;
+    maxInlineUniformBlockSize = in_struct->maxInlineUniformBlockSize;
+    maxPerStageDescriptorInlineUniformBlocks = in_struct->maxPerStageDescriptorInlineUniformBlocks;
+    maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks = in_struct->maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks;
+    maxDescriptorSetInlineUniformBlocks = in_struct->maxDescriptorSetInlineUniformBlocks;
+    maxDescriptorSetUpdateAfterBindInlineUniformBlocks = in_struct->maxDescriptorSetUpdateAfterBindInlineUniformBlocks;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPhysicalDeviceInlineUniformBlockPropertiesEXT::initialize(const safe_VkPhysicalDeviceInlineUniformBlockPropertiesEXT* src)
+{
+    sType = src->sType;
+    maxInlineUniformBlockSize = src->maxInlineUniformBlockSize;
+    maxPerStageDescriptorInlineUniformBlocks = src->maxPerStageDescriptorInlineUniformBlocks;
+    maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks = src->maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks;
+    maxDescriptorSetInlineUniformBlocks = src->maxDescriptorSetInlineUniformBlocks;
+    maxDescriptorSetUpdateAfterBindInlineUniformBlocks = src->maxDescriptorSetUpdateAfterBindInlineUniformBlocks;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkWriteDescriptorSetInlineUniformBlockEXT::safe_VkWriteDescriptorSetInlineUniformBlockEXT(const VkWriteDescriptorSetInlineUniformBlockEXT* in_struct) :
+    sType(in_struct->sType),
+    dataSize(in_struct->dataSize),
+    pData(in_struct->pData)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkWriteDescriptorSetInlineUniformBlockEXT::safe_VkWriteDescriptorSetInlineUniformBlockEXT() :
+    pNext(nullptr),
+    pData(nullptr)
+{}
+
+safe_VkWriteDescriptorSetInlineUniformBlockEXT::safe_VkWriteDescriptorSetInlineUniformBlockEXT(const safe_VkWriteDescriptorSetInlineUniformBlockEXT& src)
+{
+    sType = src.sType;
+    dataSize = src.dataSize;
+    pData = src.pData;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkWriteDescriptorSetInlineUniformBlockEXT& safe_VkWriteDescriptorSetInlineUniformBlockEXT::operator=(const safe_VkWriteDescriptorSetInlineUniformBlockEXT& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    dataSize = src.dataSize;
+    pData = src.pData;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkWriteDescriptorSetInlineUniformBlockEXT::~safe_VkWriteDescriptorSetInlineUniformBlockEXT()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkWriteDescriptorSetInlineUniformBlockEXT::initialize(const VkWriteDescriptorSetInlineUniformBlockEXT* in_struct)
+{
+    sType = in_struct->sType;
+    dataSize = in_struct->dataSize;
+    pData = in_struct->pData;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkWriteDescriptorSetInlineUniformBlockEXT::initialize(const safe_VkWriteDescriptorSetInlineUniformBlockEXT* src)
+{
+    sType = src->sType;
+    dataSize = src->dataSize;
+    pData = src->pData;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkDescriptorPoolInlineUniformBlockCreateInfoEXT::safe_VkDescriptorPoolInlineUniformBlockCreateInfoEXT(const VkDescriptorPoolInlineUniformBlockCreateInfoEXT* in_struct) :
+    sType(in_struct->sType),
+    maxInlineUniformBlockBindings(in_struct->maxInlineUniformBlockBindings)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkDescriptorPoolInlineUniformBlockCreateInfoEXT::safe_VkDescriptorPoolInlineUniformBlockCreateInfoEXT() :
+    pNext(nullptr)
+{}
+
+safe_VkDescriptorPoolInlineUniformBlockCreateInfoEXT::safe_VkDescriptorPoolInlineUniformBlockCreateInfoEXT(const safe_VkDescriptorPoolInlineUniformBlockCreateInfoEXT& src)
+{
+    sType = src.sType;
+    maxInlineUniformBlockBindings = src.maxInlineUniformBlockBindings;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkDescriptorPoolInlineUniformBlockCreateInfoEXT& safe_VkDescriptorPoolInlineUniformBlockCreateInfoEXT::operator=(const safe_VkDescriptorPoolInlineUniformBlockCreateInfoEXT& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    maxInlineUniformBlockBindings = src.maxInlineUniformBlockBindings;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkDescriptorPoolInlineUniformBlockCreateInfoEXT::~safe_VkDescriptorPoolInlineUniformBlockCreateInfoEXT()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkDescriptorPoolInlineUniformBlockCreateInfoEXT::initialize(const VkDescriptorPoolInlineUniformBlockCreateInfoEXT* in_struct)
+{
+    sType = in_struct->sType;
+    maxInlineUniformBlockBindings = in_struct->maxInlineUniformBlockBindings;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkDescriptorPoolInlineUniformBlockCreateInfoEXT::initialize(const safe_VkDescriptorPoolInlineUniformBlockCreateInfoEXT* src)
+{
+    sType = src->sType;
+    maxInlineUniformBlockBindings = src->maxInlineUniformBlockBindings;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkSampleLocationsInfoEXT::safe_VkSampleLocationsInfoEXT(const VkSampleLocationsInfoEXT* in_struct) :
+    sType(in_struct->sType),
+    sampleLocationsPerPixel(in_struct->sampleLocationsPerPixel),
+    sampleLocationGridSize(in_struct->sampleLocationGridSize),
+    sampleLocationsCount(in_struct->sampleLocationsCount),
+    pSampleLocations(nullptr)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (in_struct->pSampleLocations) {
+        pSampleLocations = new VkSampleLocationEXT[in_struct->sampleLocationsCount];
+        memcpy ((void *)pSampleLocations, (void *)in_struct->pSampleLocations, sizeof(VkSampleLocationEXT)*in_struct->sampleLocationsCount);
+    }
+}
+
+safe_VkSampleLocationsInfoEXT::safe_VkSampleLocationsInfoEXT() :
+    pNext(nullptr),
+    pSampleLocations(nullptr)
+{}
+
+safe_VkSampleLocationsInfoEXT::safe_VkSampleLocationsInfoEXT(const safe_VkSampleLocationsInfoEXT& src)
+{
+    sType = src.sType;
+    sampleLocationsPerPixel = src.sampleLocationsPerPixel;
+    sampleLocationGridSize = src.sampleLocationGridSize;
+    sampleLocationsCount = src.sampleLocationsCount;
+    pSampleLocations = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (src.pSampleLocations) {
+        pSampleLocations = new VkSampleLocationEXT[src.sampleLocationsCount];
+        memcpy ((void *)pSampleLocations, (void *)src.pSampleLocations, sizeof(VkSampleLocationEXT)*src.sampleLocationsCount);
+    }
+}
+
+safe_VkSampleLocationsInfoEXT& safe_VkSampleLocationsInfoEXT::operator=(const safe_VkSampleLocationsInfoEXT& src)
+{
+    if (&src == this) return *this;
+
+    if (pSampleLocations)
+        delete[] pSampleLocations;
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    sampleLocationsPerPixel = src.sampleLocationsPerPixel;
+    sampleLocationGridSize = src.sampleLocationGridSize;
+    sampleLocationsCount = src.sampleLocationsCount;
+    pSampleLocations = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (src.pSampleLocations) {
+        pSampleLocations = new VkSampleLocationEXT[src.sampleLocationsCount];
+        memcpy ((void *)pSampleLocations, (void *)src.pSampleLocations, sizeof(VkSampleLocationEXT)*src.sampleLocationsCount);
+    }
+
+    return *this;
+}
+
+safe_VkSampleLocationsInfoEXT::~safe_VkSampleLocationsInfoEXT()
+{
+    if (pSampleLocations)
+        delete[] pSampleLocations;
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkSampleLocationsInfoEXT::initialize(const VkSampleLocationsInfoEXT* in_struct)
+{
+    sType = in_struct->sType;
+    sampleLocationsPerPixel = in_struct->sampleLocationsPerPixel;
+    sampleLocationGridSize = in_struct->sampleLocationGridSize;
+    sampleLocationsCount = in_struct->sampleLocationsCount;
+    pSampleLocations = nullptr;
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (in_struct->pSampleLocations) {
+        pSampleLocations = new VkSampleLocationEXT[in_struct->sampleLocationsCount];
+        memcpy ((void *)pSampleLocations, (void *)in_struct->pSampleLocations, sizeof(VkSampleLocationEXT)*in_struct->sampleLocationsCount);
+    }
+}
+
+void safe_VkSampleLocationsInfoEXT::initialize(const safe_VkSampleLocationsInfoEXT* src)
+{
+    sType = src->sType;
+    sampleLocationsPerPixel = src->sampleLocationsPerPixel;
+    sampleLocationGridSize = src->sampleLocationGridSize;
+    sampleLocationsCount = src->sampleLocationsCount;
+    pSampleLocations = nullptr;
+    pNext = SafePnextCopy(src->pNext);
+    if (src->pSampleLocations) {
+        pSampleLocations = new VkSampleLocationEXT[src->sampleLocationsCount];
+        memcpy ((void *)pSampleLocations, (void *)src->pSampleLocations, sizeof(VkSampleLocationEXT)*src->sampleLocationsCount);
+    }
+}
+
+safe_VkRenderPassSampleLocationsBeginInfoEXT::safe_VkRenderPassSampleLocationsBeginInfoEXT(const VkRenderPassSampleLocationsBeginInfoEXT* in_struct) :
+    sType(in_struct->sType),
+    attachmentInitialSampleLocationsCount(in_struct->attachmentInitialSampleLocationsCount),
+    pAttachmentInitialSampleLocations(nullptr),
+    postSubpassSampleLocationsCount(in_struct->postSubpassSampleLocationsCount),
+    pPostSubpassSampleLocations(nullptr)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (in_struct->pAttachmentInitialSampleLocations) {
+        pAttachmentInitialSampleLocations = new VkAttachmentSampleLocationsEXT[in_struct->attachmentInitialSampleLocationsCount];
+        memcpy ((void *)pAttachmentInitialSampleLocations, (void *)in_struct->pAttachmentInitialSampleLocations, sizeof(VkAttachmentSampleLocationsEXT)*in_struct->attachmentInitialSampleLocationsCount);
+    }
+    if (in_struct->pPostSubpassSampleLocations) {
+        pPostSubpassSampleLocations = new VkSubpassSampleLocationsEXT[in_struct->postSubpassSampleLocationsCount];
+        memcpy ((void *)pPostSubpassSampleLocations, (void *)in_struct->pPostSubpassSampleLocations, sizeof(VkSubpassSampleLocationsEXT)*in_struct->postSubpassSampleLocationsCount);
+    }
+}
+
+safe_VkRenderPassSampleLocationsBeginInfoEXT::safe_VkRenderPassSampleLocationsBeginInfoEXT() :
+    pNext(nullptr),
+    pAttachmentInitialSampleLocations(nullptr),
+    pPostSubpassSampleLocations(nullptr)
+{}
+
+safe_VkRenderPassSampleLocationsBeginInfoEXT::safe_VkRenderPassSampleLocationsBeginInfoEXT(const safe_VkRenderPassSampleLocationsBeginInfoEXT& src)
+{
+    sType = src.sType;
+    attachmentInitialSampleLocationsCount = src.attachmentInitialSampleLocationsCount;
+    pAttachmentInitialSampleLocations = nullptr;
+    postSubpassSampleLocationsCount = src.postSubpassSampleLocationsCount;
+    pPostSubpassSampleLocations = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (src.pAttachmentInitialSampleLocations) {
+        pAttachmentInitialSampleLocations = new VkAttachmentSampleLocationsEXT[src.attachmentInitialSampleLocationsCount];
+        memcpy ((void *)pAttachmentInitialSampleLocations, (void *)src.pAttachmentInitialSampleLocations, sizeof(VkAttachmentSampleLocationsEXT)*src.attachmentInitialSampleLocationsCount);
+    }
+    if (src.pPostSubpassSampleLocations) {
+        pPostSubpassSampleLocations = new VkSubpassSampleLocationsEXT[src.postSubpassSampleLocationsCount];
+        memcpy ((void *)pPostSubpassSampleLocations, (void *)src.pPostSubpassSampleLocations, sizeof(VkSubpassSampleLocationsEXT)*src.postSubpassSampleLocationsCount);
+    }
+}
+
+safe_VkRenderPassSampleLocationsBeginInfoEXT& safe_VkRenderPassSampleLocationsBeginInfoEXT::operator=(const safe_VkRenderPassSampleLocationsBeginInfoEXT& src)
+{
+    if (&src == this) return *this;
+
+    if (pAttachmentInitialSampleLocations)
+        delete[] pAttachmentInitialSampleLocations;
+    if (pPostSubpassSampleLocations)
+        delete[] pPostSubpassSampleLocations;
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    attachmentInitialSampleLocationsCount = src.attachmentInitialSampleLocationsCount;
+    pAttachmentInitialSampleLocations = nullptr;
+    postSubpassSampleLocationsCount = src.postSubpassSampleLocationsCount;
+    pPostSubpassSampleLocations = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (src.pAttachmentInitialSampleLocations) {
+        pAttachmentInitialSampleLocations = new VkAttachmentSampleLocationsEXT[src.attachmentInitialSampleLocationsCount];
+        memcpy ((void *)pAttachmentInitialSampleLocations, (void *)src.pAttachmentInitialSampleLocations, sizeof(VkAttachmentSampleLocationsEXT)*src.attachmentInitialSampleLocationsCount);
+    }
+    if (src.pPostSubpassSampleLocations) {
+        pPostSubpassSampleLocations = new VkSubpassSampleLocationsEXT[src.postSubpassSampleLocationsCount];
+        memcpy ((void *)pPostSubpassSampleLocations, (void *)src.pPostSubpassSampleLocations, sizeof(VkSubpassSampleLocationsEXT)*src.postSubpassSampleLocationsCount);
+    }
+
+    return *this;
+}
+
+safe_VkRenderPassSampleLocationsBeginInfoEXT::~safe_VkRenderPassSampleLocationsBeginInfoEXT()
+{
+    if (pAttachmentInitialSampleLocations)
+        delete[] pAttachmentInitialSampleLocations;
+    if (pPostSubpassSampleLocations)
+        delete[] pPostSubpassSampleLocations;
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkRenderPassSampleLocationsBeginInfoEXT::initialize(const VkRenderPassSampleLocationsBeginInfoEXT* in_struct)
+{
+    sType = in_struct->sType;
+    attachmentInitialSampleLocationsCount = in_struct->attachmentInitialSampleLocationsCount;
+    pAttachmentInitialSampleLocations = nullptr;
+    postSubpassSampleLocationsCount = in_struct->postSubpassSampleLocationsCount;
+    pPostSubpassSampleLocations = nullptr;
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (in_struct->pAttachmentInitialSampleLocations) {
+        pAttachmentInitialSampleLocations = new VkAttachmentSampleLocationsEXT[in_struct->attachmentInitialSampleLocationsCount];
+        memcpy ((void *)pAttachmentInitialSampleLocations, (void *)in_struct->pAttachmentInitialSampleLocations, sizeof(VkAttachmentSampleLocationsEXT)*in_struct->attachmentInitialSampleLocationsCount);
+    }
+    if (in_struct->pPostSubpassSampleLocations) {
+        pPostSubpassSampleLocations = new VkSubpassSampleLocationsEXT[in_struct->postSubpassSampleLocationsCount];
+        memcpy ((void *)pPostSubpassSampleLocations, (void *)in_struct->pPostSubpassSampleLocations, sizeof(VkSubpassSampleLocationsEXT)*in_struct->postSubpassSampleLocationsCount);
+    }
+}
+
+void safe_VkRenderPassSampleLocationsBeginInfoEXT::initialize(const safe_VkRenderPassSampleLocationsBeginInfoEXT* src)
+{
+    sType = src->sType;
+    attachmentInitialSampleLocationsCount = src->attachmentInitialSampleLocationsCount;
+    pAttachmentInitialSampleLocations = nullptr;
+    postSubpassSampleLocationsCount = src->postSubpassSampleLocationsCount;
+    pPostSubpassSampleLocations = nullptr;
+    pNext = SafePnextCopy(src->pNext);
+    if (src->pAttachmentInitialSampleLocations) {
+        pAttachmentInitialSampleLocations = new VkAttachmentSampleLocationsEXT[src->attachmentInitialSampleLocationsCount];
+        memcpy ((void *)pAttachmentInitialSampleLocations, (void *)src->pAttachmentInitialSampleLocations, sizeof(VkAttachmentSampleLocationsEXT)*src->attachmentInitialSampleLocationsCount);
+    }
+    if (src->pPostSubpassSampleLocations) {
+        pPostSubpassSampleLocations = new VkSubpassSampleLocationsEXT[src->postSubpassSampleLocationsCount];
+        memcpy ((void *)pPostSubpassSampleLocations, (void *)src->pPostSubpassSampleLocations, sizeof(VkSubpassSampleLocationsEXT)*src->postSubpassSampleLocationsCount);
+    }
+}
+
+safe_VkPipelineSampleLocationsStateCreateInfoEXT::safe_VkPipelineSampleLocationsStateCreateInfoEXT(const VkPipelineSampleLocationsStateCreateInfoEXT* in_struct) :
+    sType(in_struct->sType),
+    sampleLocationsEnable(in_struct->sampleLocationsEnable),
+    sampleLocationsInfo(&in_struct->sampleLocationsInfo)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPipelineSampleLocationsStateCreateInfoEXT::safe_VkPipelineSampleLocationsStateCreateInfoEXT() :
+    pNext(nullptr)
+{}
+
+safe_VkPipelineSampleLocationsStateCreateInfoEXT::safe_VkPipelineSampleLocationsStateCreateInfoEXT(const safe_VkPipelineSampleLocationsStateCreateInfoEXT& src)
+{
+    sType = src.sType;
+    sampleLocationsEnable = src.sampleLocationsEnable;
+    sampleLocationsInfo.initialize(&src.sampleLocationsInfo);
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPipelineSampleLocationsStateCreateInfoEXT& safe_VkPipelineSampleLocationsStateCreateInfoEXT::operator=(const safe_VkPipelineSampleLocationsStateCreateInfoEXT& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    sampleLocationsEnable = src.sampleLocationsEnable;
+    sampleLocationsInfo.initialize(&src.sampleLocationsInfo);
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPipelineSampleLocationsStateCreateInfoEXT::~safe_VkPipelineSampleLocationsStateCreateInfoEXT()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPipelineSampleLocationsStateCreateInfoEXT::initialize(const VkPipelineSampleLocationsStateCreateInfoEXT* in_struct)
+{
+    sType = in_struct->sType;
+    sampleLocationsEnable = in_struct->sampleLocationsEnable;
+    sampleLocationsInfo.initialize(&in_struct->sampleLocationsInfo);
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPipelineSampleLocationsStateCreateInfoEXT::initialize(const safe_VkPipelineSampleLocationsStateCreateInfoEXT* src)
+{
+    sType = src->sType;
+    sampleLocationsEnable = src->sampleLocationsEnable;
+    sampleLocationsInfo.initialize(&src->sampleLocationsInfo);
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkPhysicalDeviceSampleLocationsPropertiesEXT::safe_VkPhysicalDeviceSampleLocationsPropertiesEXT(const VkPhysicalDeviceSampleLocationsPropertiesEXT* in_struct) :
+    sType(in_struct->sType),
+    sampleLocationSampleCounts(in_struct->sampleLocationSampleCounts),
+    maxSampleLocationGridSize(in_struct->maxSampleLocationGridSize),
+    sampleLocationSubPixelBits(in_struct->sampleLocationSubPixelBits),
+    variableSampleLocations(in_struct->variableSampleLocations)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+    for (uint32_t i = 0; i < 2; ++i) {
+        sampleLocationCoordinateRange[i] = in_struct->sampleLocationCoordinateRange[i];
+    }
+}
+
+safe_VkPhysicalDeviceSampleLocationsPropertiesEXT::safe_VkPhysicalDeviceSampleLocationsPropertiesEXT() :
+    pNext(nullptr)
+{}
+
+safe_VkPhysicalDeviceSampleLocationsPropertiesEXT::safe_VkPhysicalDeviceSampleLocationsPropertiesEXT(const safe_VkPhysicalDeviceSampleLocationsPropertiesEXT& src)
+{
+    sType = src.sType;
+    sampleLocationSampleCounts = src.sampleLocationSampleCounts;
+    maxSampleLocationGridSize = src.maxSampleLocationGridSize;
+    sampleLocationSubPixelBits = src.sampleLocationSubPixelBits;
+    variableSampleLocations = src.variableSampleLocations;
+    pNext = SafePnextCopy(src.pNext);
+    for (uint32_t i = 0; i < 2; ++i) {
+        sampleLocationCoordinateRange[i] = src.sampleLocationCoordinateRange[i];
+    }
+}
+
+safe_VkPhysicalDeviceSampleLocationsPropertiesEXT& safe_VkPhysicalDeviceSampleLocationsPropertiesEXT::operator=(const safe_VkPhysicalDeviceSampleLocationsPropertiesEXT& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    sampleLocationSampleCounts = src.sampleLocationSampleCounts;
+    maxSampleLocationGridSize = src.maxSampleLocationGridSize;
+    sampleLocationSubPixelBits = src.sampleLocationSubPixelBits;
+    variableSampleLocations = src.variableSampleLocations;
+    pNext = SafePnextCopy(src.pNext);
+    for (uint32_t i = 0; i < 2; ++i) {
+        sampleLocationCoordinateRange[i] = src.sampleLocationCoordinateRange[i];
+    }
+
+    return *this;
+}
+
+safe_VkPhysicalDeviceSampleLocationsPropertiesEXT::~safe_VkPhysicalDeviceSampleLocationsPropertiesEXT()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPhysicalDeviceSampleLocationsPropertiesEXT::initialize(const VkPhysicalDeviceSampleLocationsPropertiesEXT* in_struct)
+{
+    sType = in_struct->sType;
+    sampleLocationSampleCounts = in_struct->sampleLocationSampleCounts;
+    maxSampleLocationGridSize = in_struct->maxSampleLocationGridSize;
+    sampleLocationSubPixelBits = in_struct->sampleLocationSubPixelBits;
+    variableSampleLocations = in_struct->variableSampleLocations;
+    pNext = SafePnextCopy(in_struct->pNext);
+    for (uint32_t i = 0; i < 2; ++i) {
+        sampleLocationCoordinateRange[i] = in_struct->sampleLocationCoordinateRange[i];
+    }
+}
+
+void safe_VkPhysicalDeviceSampleLocationsPropertiesEXT::initialize(const safe_VkPhysicalDeviceSampleLocationsPropertiesEXT* src)
+{
+    sType = src->sType;
+    sampleLocationSampleCounts = src->sampleLocationSampleCounts;
+    maxSampleLocationGridSize = src->maxSampleLocationGridSize;
+    sampleLocationSubPixelBits = src->sampleLocationSubPixelBits;
+    variableSampleLocations = src->variableSampleLocations;
+    pNext = SafePnextCopy(src->pNext);
+    for (uint32_t i = 0; i < 2; ++i) {
+        sampleLocationCoordinateRange[i] = src->sampleLocationCoordinateRange[i];
+    }
+}
+
+safe_VkMultisamplePropertiesEXT::safe_VkMultisamplePropertiesEXT(const VkMultisamplePropertiesEXT* in_struct) :
+    sType(in_struct->sType),
+    maxSampleLocationGridSize(in_struct->maxSampleLocationGridSize)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkMultisamplePropertiesEXT::safe_VkMultisamplePropertiesEXT() :
+    pNext(nullptr)
+{}
+
+safe_VkMultisamplePropertiesEXT::safe_VkMultisamplePropertiesEXT(const safe_VkMultisamplePropertiesEXT& src)
+{
+    sType = src.sType;
+    maxSampleLocationGridSize = src.maxSampleLocationGridSize;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkMultisamplePropertiesEXT& safe_VkMultisamplePropertiesEXT::operator=(const safe_VkMultisamplePropertiesEXT& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    maxSampleLocationGridSize = src.maxSampleLocationGridSize;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkMultisamplePropertiesEXT::~safe_VkMultisamplePropertiesEXT()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkMultisamplePropertiesEXT::initialize(const VkMultisamplePropertiesEXT* in_struct)
+{
+    sType = in_struct->sType;
+    maxSampleLocationGridSize = in_struct->maxSampleLocationGridSize;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkMultisamplePropertiesEXT::initialize(const safe_VkMultisamplePropertiesEXT* src)
+{
+    sType = src->sType;
+    maxSampleLocationGridSize = src->maxSampleLocationGridSize;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT::safe_VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT(const VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT* in_struct) :
+    sType(in_struct->sType),
+    advancedBlendCoherentOperations(in_struct->advancedBlendCoherentOperations)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT::safe_VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT() :
+    pNext(nullptr)
+{}
+
+safe_VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT::safe_VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT(const safe_VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT& src)
+{
+    sType = src.sType;
+    advancedBlendCoherentOperations = src.advancedBlendCoherentOperations;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT& safe_VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT::operator=(const safe_VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    advancedBlendCoherentOperations = src.advancedBlendCoherentOperations;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT::~safe_VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT::initialize(const VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT* in_struct)
+{
+    sType = in_struct->sType;
+    advancedBlendCoherentOperations = in_struct->advancedBlendCoherentOperations;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT::initialize(const safe_VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT* src)
+{
+    sType = src->sType;
+    advancedBlendCoherentOperations = src->advancedBlendCoherentOperations;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT::safe_VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT(const VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT* in_struct) :
+    sType(in_struct->sType),
+    advancedBlendMaxColorAttachments(in_struct->advancedBlendMaxColorAttachments),
+    advancedBlendIndependentBlend(in_struct->advancedBlendIndependentBlend),
+    advancedBlendNonPremultipliedSrcColor(in_struct->advancedBlendNonPremultipliedSrcColor),
+    advancedBlendNonPremultipliedDstColor(in_struct->advancedBlendNonPremultipliedDstColor),
+    advancedBlendCorrelatedOverlap(in_struct->advancedBlendCorrelatedOverlap),
+    advancedBlendAllOperations(in_struct->advancedBlendAllOperations)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT::safe_VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT() :
+    pNext(nullptr)
+{}
+
+safe_VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT::safe_VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT(const safe_VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT& src)
+{
+    sType = src.sType;
+    advancedBlendMaxColorAttachments = src.advancedBlendMaxColorAttachments;
+    advancedBlendIndependentBlend = src.advancedBlendIndependentBlend;
+    advancedBlendNonPremultipliedSrcColor = src.advancedBlendNonPremultipliedSrcColor;
+    advancedBlendNonPremultipliedDstColor = src.advancedBlendNonPremultipliedDstColor;
+    advancedBlendCorrelatedOverlap = src.advancedBlendCorrelatedOverlap;
+    advancedBlendAllOperations = src.advancedBlendAllOperations;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT& safe_VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT::operator=(const safe_VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    advancedBlendMaxColorAttachments = src.advancedBlendMaxColorAttachments;
+    advancedBlendIndependentBlend = src.advancedBlendIndependentBlend;
+    advancedBlendNonPremultipliedSrcColor = src.advancedBlendNonPremultipliedSrcColor;
+    advancedBlendNonPremultipliedDstColor = src.advancedBlendNonPremultipliedDstColor;
+    advancedBlendCorrelatedOverlap = src.advancedBlendCorrelatedOverlap;
+    advancedBlendAllOperations = src.advancedBlendAllOperations;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT::~safe_VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT::initialize(const VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT* in_struct)
+{
+    sType = in_struct->sType;
+    advancedBlendMaxColorAttachments = in_struct->advancedBlendMaxColorAttachments;
+    advancedBlendIndependentBlend = in_struct->advancedBlendIndependentBlend;
+    advancedBlendNonPremultipliedSrcColor = in_struct->advancedBlendNonPremultipliedSrcColor;
+    advancedBlendNonPremultipliedDstColor = in_struct->advancedBlendNonPremultipliedDstColor;
+    advancedBlendCorrelatedOverlap = in_struct->advancedBlendCorrelatedOverlap;
+    advancedBlendAllOperations = in_struct->advancedBlendAllOperations;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT::initialize(const safe_VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT* src)
+{
+    sType = src->sType;
+    advancedBlendMaxColorAttachments = src->advancedBlendMaxColorAttachments;
+    advancedBlendIndependentBlend = src->advancedBlendIndependentBlend;
+    advancedBlendNonPremultipliedSrcColor = src->advancedBlendNonPremultipliedSrcColor;
+    advancedBlendNonPremultipliedDstColor = src->advancedBlendNonPremultipliedDstColor;
+    advancedBlendCorrelatedOverlap = src->advancedBlendCorrelatedOverlap;
+    advancedBlendAllOperations = src->advancedBlendAllOperations;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkPipelineColorBlendAdvancedStateCreateInfoEXT::safe_VkPipelineColorBlendAdvancedStateCreateInfoEXT(const VkPipelineColorBlendAdvancedStateCreateInfoEXT* in_struct) :
+    sType(in_struct->sType),
+    srcPremultiplied(in_struct->srcPremultiplied),
+    dstPremultiplied(in_struct->dstPremultiplied),
+    blendOverlap(in_struct->blendOverlap)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPipelineColorBlendAdvancedStateCreateInfoEXT::safe_VkPipelineColorBlendAdvancedStateCreateInfoEXT() :
+    pNext(nullptr)
+{}
+
+safe_VkPipelineColorBlendAdvancedStateCreateInfoEXT::safe_VkPipelineColorBlendAdvancedStateCreateInfoEXT(const safe_VkPipelineColorBlendAdvancedStateCreateInfoEXT& src)
+{
+    sType = src.sType;
+    srcPremultiplied = src.srcPremultiplied;
+    dstPremultiplied = src.dstPremultiplied;
+    blendOverlap = src.blendOverlap;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPipelineColorBlendAdvancedStateCreateInfoEXT& safe_VkPipelineColorBlendAdvancedStateCreateInfoEXT::operator=(const safe_VkPipelineColorBlendAdvancedStateCreateInfoEXT& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    srcPremultiplied = src.srcPremultiplied;
+    dstPremultiplied = src.dstPremultiplied;
+    blendOverlap = src.blendOverlap;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPipelineColorBlendAdvancedStateCreateInfoEXT::~safe_VkPipelineColorBlendAdvancedStateCreateInfoEXT()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPipelineColorBlendAdvancedStateCreateInfoEXT::initialize(const VkPipelineColorBlendAdvancedStateCreateInfoEXT* in_struct)
+{
+    sType = in_struct->sType;
+    srcPremultiplied = in_struct->srcPremultiplied;
+    dstPremultiplied = in_struct->dstPremultiplied;
+    blendOverlap = in_struct->blendOverlap;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPipelineColorBlendAdvancedStateCreateInfoEXT::initialize(const safe_VkPipelineColorBlendAdvancedStateCreateInfoEXT* src)
+{
+    sType = src->sType;
+    srcPremultiplied = src->srcPremultiplied;
+    dstPremultiplied = src->dstPremultiplied;
+    blendOverlap = src->blendOverlap;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkPipelineCoverageToColorStateCreateInfoNV::safe_VkPipelineCoverageToColorStateCreateInfoNV(const VkPipelineCoverageToColorStateCreateInfoNV* in_struct) :
+    sType(in_struct->sType),
+    flags(in_struct->flags),
+    coverageToColorEnable(in_struct->coverageToColorEnable),
+    coverageToColorLocation(in_struct->coverageToColorLocation)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPipelineCoverageToColorStateCreateInfoNV::safe_VkPipelineCoverageToColorStateCreateInfoNV() :
+    pNext(nullptr)
+{}
+
+safe_VkPipelineCoverageToColorStateCreateInfoNV::safe_VkPipelineCoverageToColorStateCreateInfoNV(const safe_VkPipelineCoverageToColorStateCreateInfoNV& src)
+{
+    sType = src.sType;
+    flags = src.flags;
+    coverageToColorEnable = src.coverageToColorEnable;
+    coverageToColorLocation = src.coverageToColorLocation;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPipelineCoverageToColorStateCreateInfoNV& safe_VkPipelineCoverageToColorStateCreateInfoNV::operator=(const safe_VkPipelineCoverageToColorStateCreateInfoNV& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    flags = src.flags;
+    coverageToColorEnable = src.coverageToColorEnable;
+    coverageToColorLocation = src.coverageToColorLocation;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPipelineCoverageToColorStateCreateInfoNV::~safe_VkPipelineCoverageToColorStateCreateInfoNV()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPipelineCoverageToColorStateCreateInfoNV::initialize(const VkPipelineCoverageToColorStateCreateInfoNV* in_struct)
+{
+    sType = in_struct->sType;
+    flags = in_struct->flags;
+    coverageToColorEnable = in_struct->coverageToColorEnable;
+    coverageToColorLocation = in_struct->coverageToColorLocation;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPipelineCoverageToColorStateCreateInfoNV::initialize(const safe_VkPipelineCoverageToColorStateCreateInfoNV* src)
+{
+    sType = src->sType;
+    flags = src->flags;
+    coverageToColorEnable = src->coverageToColorEnable;
+    coverageToColorLocation = src->coverageToColorLocation;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkPipelineCoverageModulationStateCreateInfoNV::safe_VkPipelineCoverageModulationStateCreateInfoNV(const VkPipelineCoverageModulationStateCreateInfoNV* in_struct) :
+    sType(in_struct->sType),
+    flags(in_struct->flags),
+    coverageModulationMode(in_struct->coverageModulationMode),
+    coverageModulationTableEnable(in_struct->coverageModulationTableEnable),
+    coverageModulationTableCount(in_struct->coverageModulationTableCount),
+    pCoverageModulationTable(nullptr)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (in_struct->pCoverageModulationTable) {
+        pCoverageModulationTable = new float[in_struct->coverageModulationTableCount];
+        memcpy ((void *)pCoverageModulationTable, (void *)in_struct->pCoverageModulationTable, sizeof(float)*in_struct->coverageModulationTableCount);
+    }
+}
+
+safe_VkPipelineCoverageModulationStateCreateInfoNV::safe_VkPipelineCoverageModulationStateCreateInfoNV() :
+    pNext(nullptr),
+    pCoverageModulationTable(nullptr)
+{}
+
+safe_VkPipelineCoverageModulationStateCreateInfoNV::safe_VkPipelineCoverageModulationStateCreateInfoNV(const safe_VkPipelineCoverageModulationStateCreateInfoNV& src)
+{
+    sType = src.sType;
+    flags = src.flags;
+    coverageModulationMode = src.coverageModulationMode;
+    coverageModulationTableEnable = src.coverageModulationTableEnable;
+    coverageModulationTableCount = src.coverageModulationTableCount;
+    pCoverageModulationTable = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (src.pCoverageModulationTable) {
+        pCoverageModulationTable = new float[src.coverageModulationTableCount];
+        memcpy ((void *)pCoverageModulationTable, (void *)src.pCoverageModulationTable, sizeof(float)*src.coverageModulationTableCount);
+    }
+}
+
+safe_VkPipelineCoverageModulationStateCreateInfoNV& safe_VkPipelineCoverageModulationStateCreateInfoNV::operator=(const safe_VkPipelineCoverageModulationStateCreateInfoNV& src)
+{
+    if (&src == this) return *this;
+
+    if (pCoverageModulationTable)
+        delete[] pCoverageModulationTable;
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    flags = src.flags;
+    coverageModulationMode = src.coverageModulationMode;
+    coverageModulationTableEnable = src.coverageModulationTableEnable;
+    coverageModulationTableCount = src.coverageModulationTableCount;
+    pCoverageModulationTable = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (src.pCoverageModulationTable) {
+        pCoverageModulationTable = new float[src.coverageModulationTableCount];
+        memcpy ((void *)pCoverageModulationTable, (void *)src.pCoverageModulationTable, sizeof(float)*src.coverageModulationTableCount);
+    }
+
+    return *this;
+}
+
+safe_VkPipelineCoverageModulationStateCreateInfoNV::~safe_VkPipelineCoverageModulationStateCreateInfoNV()
+{
+    if (pCoverageModulationTable)
+        delete[] pCoverageModulationTable;
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPipelineCoverageModulationStateCreateInfoNV::initialize(const VkPipelineCoverageModulationStateCreateInfoNV* in_struct)
+{
+    sType = in_struct->sType;
+    flags = in_struct->flags;
+    coverageModulationMode = in_struct->coverageModulationMode;
+    coverageModulationTableEnable = in_struct->coverageModulationTableEnable;
+    coverageModulationTableCount = in_struct->coverageModulationTableCount;
+    pCoverageModulationTable = nullptr;
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (in_struct->pCoverageModulationTable) {
+        pCoverageModulationTable = new float[in_struct->coverageModulationTableCount];
+        memcpy ((void *)pCoverageModulationTable, (void *)in_struct->pCoverageModulationTable, sizeof(float)*in_struct->coverageModulationTableCount);
+    }
+}
+
+void safe_VkPipelineCoverageModulationStateCreateInfoNV::initialize(const safe_VkPipelineCoverageModulationStateCreateInfoNV* src)
+{
+    sType = src->sType;
+    flags = src->flags;
+    coverageModulationMode = src->coverageModulationMode;
+    coverageModulationTableEnable = src->coverageModulationTableEnable;
+    coverageModulationTableCount = src->coverageModulationTableCount;
+    pCoverageModulationTable = nullptr;
+    pNext = SafePnextCopy(src->pNext);
+    if (src->pCoverageModulationTable) {
+        pCoverageModulationTable = new float[src->coverageModulationTableCount];
+        memcpy ((void *)pCoverageModulationTable, (void *)src->pCoverageModulationTable, sizeof(float)*src->coverageModulationTableCount);
+    }
+}
+
+safe_VkPhysicalDeviceShaderSMBuiltinsPropertiesNV::safe_VkPhysicalDeviceShaderSMBuiltinsPropertiesNV(const VkPhysicalDeviceShaderSMBuiltinsPropertiesNV* in_struct) :
+    sType(in_struct->sType),
+    shaderSMCount(in_struct->shaderSMCount),
+    shaderWarpsPerSM(in_struct->shaderWarpsPerSM)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPhysicalDeviceShaderSMBuiltinsPropertiesNV::safe_VkPhysicalDeviceShaderSMBuiltinsPropertiesNV() :
+    pNext(nullptr)
+{}
+
+safe_VkPhysicalDeviceShaderSMBuiltinsPropertiesNV::safe_VkPhysicalDeviceShaderSMBuiltinsPropertiesNV(const safe_VkPhysicalDeviceShaderSMBuiltinsPropertiesNV& src)
+{
+    sType = src.sType;
+    shaderSMCount = src.shaderSMCount;
+    shaderWarpsPerSM = src.shaderWarpsPerSM;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPhysicalDeviceShaderSMBuiltinsPropertiesNV& safe_VkPhysicalDeviceShaderSMBuiltinsPropertiesNV::operator=(const safe_VkPhysicalDeviceShaderSMBuiltinsPropertiesNV& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    shaderSMCount = src.shaderSMCount;
+    shaderWarpsPerSM = src.shaderWarpsPerSM;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPhysicalDeviceShaderSMBuiltinsPropertiesNV::~safe_VkPhysicalDeviceShaderSMBuiltinsPropertiesNV()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPhysicalDeviceShaderSMBuiltinsPropertiesNV::initialize(const VkPhysicalDeviceShaderSMBuiltinsPropertiesNV* in_struct)
+{
+    sType = in_struct->sType;
+    shaderSMCount = in_struct->shaderSMCount;
+    shaderWarpsPerSM = in_struct->shaderWarpsPerSM;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPhysicalDeviceShaderSMBuiltinsPropertiesNV::initialize(const safe_VkPhysicalDeviceShaderSMBuiltinsPropertiesNV* src)
+{
+    sType = src->sType;
+    shaderSMCount = src->shaderSMCount;
+    shaderWarpsPerSM = src->shaderWarpsPerSM;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkPhysicalDeviceShaderSMBuiltinsFeaturesNV::safe_VkPhysicalDeviceShaderSMBuiltinsFeaturesNV(const VkPhysicalDeviceShaderSMBuiltinsFeaturesNV* in_struct) :
+    sType(in_struct->sType),
+    shaderSMBuiltins(in_struct->shaderSMBuiltins)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPhysicalDeviceShaderSMBuiltinsFeaturesNV::safe_VkPhysicalDeviceShaderSMBuiltinsFeaturesNV() :
+    pNext(nullptr)
+{}
+
+safe_VkPhysicalDeviceShaderSMBuiltinsFeaturesNV::safe_VkPhysicalDeviceShaderSMBuiltinsFeaturesNV(const safe_VkPhysicalDeviceShaderSMBuiltinsFeaturesNV& src)
+{
+    sType = src.sType;
+    shaderSMBuiltins = src.shaderSMBuiltins;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPhysicalDeviceShaderSMBuiltinsFeaturesNV& safe_VkPhysicalDeviceShaderSMBuiltinsFeaturesNV::operator=(const safe_VkPhysicalDeviceShaderSMBuiltinsFeaturesNV& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    shaderSMBuiltins = src.shaderSMBuiltins;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPhysicalDeviceShaderSMBuiltinsFeaturesNV::~safe_VkPhysicalDeviceShaderSMBuiltinsFeaturesNV()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPhysicalDeviceShaderSMBuiltinsFeaturesNV::initialize(const VkPhysicalDeviceShaderSMBuiltinsFeaturesNV* in_struct)
+{
+    sType = in_struct->sType;
+    shaderSMBuiltins = in_struct->shaderSMBuiltins;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPhysicalDeviceShaderSMBuiltinsFeaturesNV::initialize(const safe_VkPhysicalDeviceShaderSMBuiltinsFeaturesNV* src)
+{
+    sType = src->sType;
+    shaderSMBuiltins = src->shaderSMBuiltins;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkDrmFormatModifierPropertiesListEXT::safe_VkDrmFormatModifierPropertiesListEXT(const VkDrmFormatModifierPropertiesListEXT* in_struct) :
+    sType(in_struct->sType),
+    drmFormatModifierCount(in_struct->drmFormatModifierCount),
+    pDrmFormatModifierProperties(nullptr)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (in_struct->pDrmFormatModifierProperties) {
+        pDrmFormatModifierProperties = new VkDrmFormatModifierPropertiesEXT[in_struct->drmFormatModifierCount];
+        memcpy ((void *)pDrmFormatModifierProperties, (void *)in_struct->pDrmFormatModifierProperties, sizeof(VkDrmFormatModifierPropertiesEXT)*in_struct->drmFormatModifierCount);
+    }
+}
+
+safe_VkDrmFormatModifierPropertiesListEXT::safe_VkDrmFormatModifierPropertiesListEXT() :
+    pNext(nullptr),
+    pDrmFormatModifierProperties(nullptr)
+{}
+
+safe_VkDrmFormatModifierPropertiesListEXT::safe_VkDrmFormatModifierPropertiesListEXT(const safe_VkDrmFormatModifierPropertiesListEXT& src)
+{
+    sType = src.sType;
+    drmFormatModifierCount = src.drmFormatModifierCount;
+    pDrmFormatModifierProperties = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (src.pDrmFormatModifierProperties) {
+        pDrmFormatModifierProperties = new VkDrmFormatModifierPropertiesEXT[src.drmFormatModifierCount];
+        memcpy ((void *)pDrmFormatModifierProperties, (void *)src.pDrmFormatModifierProperties, sizeof(VkDrmFormatModifierPropertiesEXT)*src.drmFormatModifierCount);
+    }
+}
+
+safe_VkDrmFormatModifierPropertiesListEXT& safe_VkDrmFormatModifierPropertiesListEXT::operator=(const safe_VkDrmFormatModifierPropertiesListEXT& src)
+{
+    if (&src == this) return *this;
+
+    if (pDrmFormatModifierProperties)
+        delete[] pDrmFormatModifierProperties;
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    drmFormatModifierCount = src.drmFormatModifierCount;
+    pDrmFormatModifierProperties = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (src.pDrmFormatModifierProperties) {
+        pDrmFormatModifierProperties = new VkDrmFormatModifierPropertiesEXT[src.drmFormatModifierCount];
+        memcpy ((void *)pDrmFormatModifierProperties, (void *)src.pDrmFormatModifierProperties, sizeof(VkDrmFormatModifierPropertiesEXT)*src.drmFormatModifierCount);
+    }
+
+    return *this;
+}
+
+safe_VkDrmFormatModifierPropertiesListEXT::~safe_VkDrmFormatModifierPropertiesListEXT()
+{
+    if (pDrmFormatModifierProperties)
+        delete[] pDrmFormatModifierProperties;
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkDrmFormatModifierPropertiesListEXT::initialize(const VkDrmFormatModifierPropertiesListEXT* in_struct)
+{
+    sType = in_struct->sType;
+    drmFormatModifierCount = in_struct->drmFormatModifierCount;
+    pDrmFormatModifierProperties = nullptr;
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (in_struct->pDrmFormatModifierProperties) {
+        pDrmFormatModifierProperties = new VkDrmFormatModifierPropertiesEXT[in_struct->drmFormatModifierCount];
+        memcpy ((void *)pDrmFormatModifierProperties, (void *)in_struct->pDrmFormatModifierProperties, sizeof(VkDrmFormatModifierPropertiesEXT)*in_struct->drmFormatModifierCount);
+    }
+}
+
+void safe_VkDrmFormatModifierPropertiesListEXT::initialize(const safe_VkDrmFormatModifierPropertiesListEXT* src)
+{
+    sType = src->sType;
+    drmFormatModifierCount = src->drmFormatModifierCount;
+    pDrmFormatModifierProperties = nullptr;
+    pNext = SafePnextCopy(src->pNext);
+    if (src->pDrmFormatModifierProperties) {
+        pDrmFormatModifierProperties = new VkDrmFormatModifierPropertiesEXT[src->drmFormatModifierCount];
+        memcpy ((void *)pDrmFormatModifierProperties, (void *)src->pDrmFormatModifierProperties, sizeof(VkDrmFormatModifierPropertiesEXT)*src->drmFormatModifierCount);
+    }
+}
+
+safe_VkPhysicalDeviceImageDrmFormatModifierInfoEXT::safe_VkPhysicalDeviceImageDrmFormatModifierInfoEXT(const VkPhysicalDeviceImageDrmFormatModifierInfoEXT* in_struct) :
+    sType(in_struct->sType),
+    drmFormatModifier(in_struct->drmFormatModifier),
+    sharingMode(in_struct->sharingMode),
+    queueFamilyIndexCount(in_struct->queueFamilyIndexCount),
+    pQueueFamilyIndices(nullptr)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (in_struct->pQueueFamilyIndices) {
+        pQueueFamilyIndices = new uint32_t[in_struct->queueFamilyIndexCount];
+        memcpy ((void *)pQueueFamilyIndices, (void *)in_struct->pQueueFamilyIndices, sizeof(uint32_t)*in_struct->queueFamilyIndexCount);
+    }
+}
+
+safe_VkPhysicalDeviceImageDrmFormatModifierInfoEXT::safe_VkPhysicalDeviceImageDrmFormatModifierInfoEXT() :
+    pNext(nullptr),
+    pQueueFamilyIndices(nullptr)
+{}
+
+safe_VkPhysicalDeviceImageDrmFormatModifierInfoEXT::safe_VkPhysicalDeviceImageDrmFormatModifierInfoEXT(const safe_VkPhysicalDeviceImageDrmFormatModifierInfoEXT& src)
+{
+    sType = src.sType;
+    drmFormatModifier = src.drmFormatModifier;
+    sharingMode = src.sharingMode;
+    queueFamilyIndexCount = src.queueFamilyIndexCount;
+    pQueueFamilyIndices = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (src.pQueueFamilyIndices) {
+        pQueueFamilyIndices = new uint32_t[src.queueFamilyIndexCount];
+        memcpy ((void *)pQueueFamilyIndices, (void *)src.pQueueFamilyIndices, sizeof(uint32_t)*src.queueFamilyIndexCount);
+    }
+}
+
+safe_VkPhysicalDeviceImageDrmFormatModifierInfoEXT& safe_VkPhysicalDeviceImageDrmFormatModifierInfoEXT::operator=(const safe_VkPhysicalDeviceImageDrmFormatModifierInfoEXT& src)
+{
+    if (&src == this) return *this;
+
+    if (pQueueFamilyIndices)
+        delete[] pQueueFamilyIndices;
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    drmFormatModifier = src.drmFormatModifier;
+    sharingMode = src.sharingMode;
+    queueFamilyIndexCount = src.queueFamilyIndexCount;
+    pQueueFamilyIndices = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (src.pQueueFamilyIndices) {
+        pQueueFamilyIndices = new uint32_t[src.queueFamilyIndexCount];
+        memcpy ((void *)pQueueFamilyIndices, (void *)src.pQueueFamilyIndices, sizeof(uint32_t)*src.queueFamilyIndexCount);
+    }
+
+    return *this;
+}
+
+safe_VkPhysicalDeviceImageDrmFormatModifierInfoEXT::~safe_VkPhysicalDeviceImageDrmFormatModifierInfoEXT()
+{
+    if (pQueueFamilyIndices)
+        delete[] pQueueFamilyIndices;
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPhysicalDeviceImageDrmFormatModifierInfoEXT::initialize(const VkPhysicalDeviceImageDrmFormatModifierInfoEXT* in_struct)
+{
+    sType = in_struct->sType;
+    drmFormatModifier = in_struct->drmFormatModifier;
+    sharingMode = in_struct->sharingMode;
+    queueFamilyIndexCount = in_struct->queueFamilyIndexCount;
+    pQueueFamilyIndices = nullptr;
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (in_struct->pQueueFamilyIndices) {
+        pQueueFamilyIndices = new uint32_t[in_struct->queueFamilyIndexCount];
+        memcpy ((void *)pQueueFamilyIndices, (void *)in_struct->pQueueFamilyIndices, sizeof(uint32_t)*in_struct->queueFamilyIndexCount);
+    }
+}
+
+void safe_VkPhysicalDeviceImageDrmFormatModifierInfoEXT::initialize(const safe_VkPhysicalDeviceImageDrmFormatModifierInfoEXT* src)
+{
+    sType = src->sType;
+    drmFormatModifier = src->drmFormatModifier;
+    sharingMode = src->sharingMode;
+    queueFamilyIndexCount = src->queueFamilyIndexCount;
+    pQueueFamilyIndices = nullptr;
+    pNext = SafePnextCopy(src->pNext);
+    if (src->pQueueFamilyIndices) {
+        pQueueFamilyIndices = new uint32_t[src->queueFamilyIndexCount];
+        memcpy ((void *)pQueueFamilyIndices, (void *)src->pQueueFamilyIndices, sizeof(uint32_t)*src->queueFamilyIndexCount);
+    }
+}
+
+safe_VkImageDrmFormatModifierListCreateInfoEXT::safe_VkImageDrmFormatModifierListCreateInfoEXT(const VkImageDrmFormatModifierListCreateInfoEXT* in_struct) :
+    sType(in_struct->sType),
+    drmFormatModifierCount(in_struct->drmFormatModifierCount),
+    pDrmFormatModifiers(nullptr)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (in_struct->pDrmFormatModifiers) {
+        pDrmFormatModifiers = new uint64_t[in_struct->drmFormatModifierCount];
+        memcpy ((void *)pDrmFormatModifiers, (void *)in_struct->pDrmFormatModifiers, sizeof(uint64_t)*in_struct->drmFormatModifierCount);
+    }
+}
+
+safe_VkImageDrmFormatModifierListCreateInfoEXT::safe_VkImageDrmFormatModifierListCreateInfoEXT() :
+    pNext(nullptr),
+    pDrmFormatModifiers(nullptr)
+{}
+
+safe_VkImageDrmFormatModifierListCreateInfoEXT::safe_VkImageDrmFormatModifierListCreateInfoEXT(const safe_VkImageDrmFormatModifierListCreateInfoEXT& src)
+{
+    sType = src.sType;
+    drmFormatModifierCount = src.drmFormatModifierCount;
+    pDrmFormatModifiers = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (src.pDrmFormatModifiers) {
+        pDrmFormatModifiers = new uint64_t[src.drmFormatModifierCount];
+        memcpy ((void *)pDrmFormatModifiers, (void *)src.pDrmFormatModifiers, sizeof(uint64_t)*src.drmFormatModifierCount);
+    }
+}
+
+safe_VkImageDrmFormatModifierListCreateInfoEXT& safe_VkImageDrmFormatModifierListCreateInfoEXT::operator=(const safe_VkImageDrmFormatModifierListCreateInfoEXT& src)
+{
+    if (&src == this) return *this;
+
+    if (pDrmFormatModifiers)
+        delete[] pDrmFormatModifiers;
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    drmFormatModifierCount = src.drmFormatModifierCount;
+    pDrmFormatModifiers = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (src.pDrmFormatModifiers) {
+        pDrmFormatModifiers = new uint64_t[src.drmFormatModifierCount];
+        memcpy ((void *)pDrmFormatModifiers, (void *)src.pDrmFormatModifiers, sizeof(uint64_t)*src.drmFormatModifierCount);
+    }
+
+    return *this;
+}
+
+safe_VkImageDrmFormatModifierListCreateInfoEXT::~safe_VkImageDrmFormatModifierListCreateInfoEXT()
+{
+    if (pDrmFormatModifiers)
+        delete[] pDrmFormatModifiers;
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkImageDrmFormatModifierListCreateInfoEXT::initialize(const VkImageDrmFormatModifierListCreateInfoEXT* in_struct)
+{
+    sType = in_struct->sType;
+    drmFormatModifierCount = in_struct->drmFormatModifierCount;
+    pDrmFormatModifiers = nullptr;
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (in_struct->pDrmFormatModifiers) {
+        pDrmFormatModifiers = new uint64_t[in_struct->drmFormatModifierCount];
+        memcpy ((void *)pDrmFormatModifiers, (void *)in_struct->pDrmFormatModifiers, sizeof(uint64_t)*in_struct->drmFormatModifierCount);
+    }
+}
+
+void safe_VkImageDrmFormatModifierListCreateInfoEXT::initialize(const safe_VkImageDrmFormatModifierListCreateInfoEXT* src)
+{
+    sType = src->sType;
+    drmFormatModifierCount = src->drmFormatModifierCount;
+    pDrmFormatModifiers = nullptr;
+    pNext = SafePnextCopy(src->pNext);
+    if (src->pDrmFormatModifiers) {
+        pDrmFormatModifiers = new uint64_t[src->drmFormatModifierCount];
+        memcpy ((void *)pDrmFormatModifiers, (void *)src->pDrmFormatModifiers, sizeof(uint64_t)*src->drmFormatModifierCount);
+    }
+}
+
+safe_VkImageDrmFormatModifierExplicitCreateInfoEXT::safe_VkImageDrmFormatModifierExplicitCreateInfoEXT(const VkImageDrmFormatModifierExplicitCreateInfoEXT* in_struct) :
+    sType(in_struct->sType),
+    drmFormatModifier(in_struct->drmFormatModifier),
+    drmFormatModifierPlaneCount(in_struct->drmFormatModifierPlaneCount),
+    pPlaneLayouts(nullptr)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (in_struct->pPlaneLayouts) {
+        pPlaneLayouts = new VkSubresourceLayout[in_struct->drmFormatModifierPlaneCount];
+        memcpy ((void *)pPlaneLayouts, (void *)in_struct->pPlaneLayouts, sizeof(VkSubresourceLayout)*in_struct->drmFormatModifierPlaneCount);
+    }
+}
+
+safe_VkImageDrmFormatModifierExplicitCreateInfoEXT::safe_VkImageDrmFormatModifierExplicitCreateInfoEXT() :
+    pNext(nullptr),
+    pPlaneLayouts(nullptr)
+{}
+
+safe_VkImageDrmFormatModifierExplicitCreateInfoEXT::safe_VkImageDrmFormatModifierExplicitCreateInfoEXT(const safe_VkImageDrmFormatModifierExplicitCreateInfoEXT& src)
+{
+    sType = src.sType;
+    drmFormatModifier = src.drmFormatModifier;
+    drmFormatModifierPlaneCount = src.drmFormatModifierPlaneCount;
+    pPlaneLayouts = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (src.pPlaneLayouts) {
+        pPlaneLayouts = new VkSubresourceLayout[src.drmFormatModifierPlaneCount];
+        memcpy ((void *)pPlaneLayouts, (void *)src.pPlaneLayouts, sizeof(VkSubresourceLayout)*src.drmFormatModifierPlaneCount);
+    }
+}
+
+safe_VkImageDrmFormatModifierExplicitCreateInfoEXT& safe_VkImageDrmFormatModifierExplicitCreateInfoEXT::operator=(const safe_VkImageDrmFormatModifierExplicitCreateInfoEXT& src)
+{
+    if (&src == this) return *this;
+
+    if (pPlaneLayouts)
+        delete[] pPlaneLayouts;
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    drmFormatModifier = src.drmFormatModifier;
+    drmFormatModifierPlaneCount = src.drmFormatModifierPlaneCount;
+    pPlaneLayouts = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (src.pPlaneLayouts) {
+        pPlaneLayouts = new VkSubresourceLayout[src.drmFormatModifierPlaneCount];
+        memcpy ((void *)pPlaneLayouts, (void *)src.pPlaneLayouts, sizeof(VkSubresourceLayout)*src.drmFormatModifierPlaneCount);
+    }
+
+    return *this;
+}
+
+safe_VkImageDrmFormatModifierExplicitCreateInfoEXT::~safe_VkImageDrmFormatModifierExplicitCreateInfoEXT()
+{
+    if (pPlaneLayouts)
+        delete[] pPlaneLayouts;
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkImageDrmFormatModifierExplicitCreateInfoEXT::initialize(const VkImageDrmFormatModifierExplicitCreateInfoEXT* in_struct)
+{
+    sType = in_struct->sType;
+    drmFormatModifier = in_struct->drmFormatModifier;
+    drmFormatModifierPlaneCount = in_struct->drmFormatModifierPlaneCount;
+    pPlaneLayouts = nullptr;
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (in_struct->pPlaneLayouts) {
+        pPlaneLayouts = new VkSubresourceLayout[in_struct->drmFormatModifierPlaneCount];
+        memcpy ((void *)pPlaneLayouts, (void *)in_struct->pPlaneLayouts, sizeof(VkSubresourceLayout)*in_struct->drmFormatModifierPlaneCount);
+    }
+}
+
+void safe_VkImageDrmFormatModifierExplicitCreateInfoEXT::initialize(const safe_VkImageDrmFormatModifierExplicitCreateInfoEXT* src)
+{
+    sType = src->sType;
+    drmFormatModifier = src->drmFormatModifier;
+    drmFormatModifierPlaneCount = src->drmFormatModifierPlaneCount;
+    pPlaneLayouts = nullptr;
+    pNext = SafePnextCopy(src->pNext);
+    if (src->pPlaneLayouts) {
+        pPlaneLayouts = new VkSubresourceLayout[src->drmFormatModifierPlaneCount];
+        memcpy ((void *)pPlaneLayouts, (void *)src->pPlaneLayouts, sizeof(VkSubresourceLayout)*src->drmFormatModifierPlaneCount);
+    }
+}
+
+safe_VkImageDrmFormatModifierPropertiesEXT::safe_VkImageDrmFormatModifierPropertiesEXT(const VkImageDrmFormatModifierPropertiesEXT* in_struct) :
+    sType(in_struct->sType),
+    drmFormatModifier(in_struct->drmFormatModifier)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkImageDrmFormatModifierPropertiesEXT::safe_VkImageDrmFormatModifierPropertiesEXT() :
+    pNext(nullptr)
+{}
+
+safe_VkImageDrmFormatModifierPropertiesEXT::safe_VkImageDrmFormatModifierPropertiesEXT(const safe_VkImageDrmFormatModifierPropertiesEXT& src)
+{
+    sType = src.sType;
+    drmFormatModifier = src.drmFormatModifier;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkImageDrmFormatModifierPropertiesEXT& safe_VkImageDrmFormatModifierPropertiesEXT::operator=(const safe_VkImageDrmFormatModifierPropertiesEXT& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    drmFormatModifier = src.drmFormatModifier;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkImageDrmFormatModifierPropertiesEXT::~safe_VkImageDrmFormatModifierPropertiesEXT()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkImageDrmFormatModifierPropertiesEXT::initialize(const VkImageDrmFormatModifierPropertiesEXT* in_struct)
+{
+    sType = in_struct->sType;
+    drmFormatModifier = in_struct->drmFormatModifier;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkImageDrmFormatModifierPropertiesEXT::initialize(const safe_VkImageDrmFormatModifierPropertiesEXT* src)
+{
+    sType = src->sType;
+    drmFormatModifier = src->drmFormatModifier;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkValidationCacheCreateInfoEXT::safe_VkValidationCacheCreateInfoEXT(const VkValidationCacheCreateInfoEXT* in_struct) :
+    sType(in_struct->sType),
+    flags(in_struct->flags),
+    initialDataSize(in_struct->initialDataSize),
+    pInitialData(in_struct->pInitialData)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkValidationCacheCreateInfoEXT::safe_VkValidationCacheCreateInfoEXT() :
+    pNext(nullptr),
+    pInitialData(nullptr)
+{}
+
+safe_VkValidationCacheCreateInfoEXT::safe_VkValidationCacheCreateInfoEXT(const safe_VkValidationCacheCreateInfoEXT& src)
+{
+    sType = src.sType;
+    flags = src.flags;
+    initialDataSize = src.initialDataSize;
+    pInitialData = src.pInitialData;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkValidationCacheCreateInfoEXT& safe_VkValidationCacheCreateInfoEXT::operator=(const safe_VkValidationCacheCreateInfoEXT& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    flags = src.flags;
+    initialDataSize = src.initialDataSize;
+    pInitialData = src.pInitialData;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkValidationCacheCreateInfoEXT::~safe_VkValidationCacheCreateInfoEXT()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkValidationCacheCreateInfoEXT::initialize(const VkValidationCacheCreateInfoEXT* in_struct)
+{
+    sType = in_struct->sType;
+    flags = in_struct->flags;
+    initialDataSize = in_struct->initialDataSize;
+    pInitialData = in_struct->pInitialData;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkValidationCacheCreateInfoEXT::initialize(const safe_VkValidationCacheCreateInfoEXT* src)
+{
+    sType = src->sType;
+    flags = src->flags;
+    initialDataSize = src->initialDataSize;
+    pInitialData = src->pInitialData;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkShaderModuleValidationCacheCreateInfoEXT::safe_VkShaderModuleValidationCacheCreateInfoEXT(const VkShaderModuleValidationCacheCreateInfoEXT* in_struct) :
+    sType(in_struct->sType),
+    validationCache(in_struct->validationCache)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkShaderModuleValidationCacheCreateInfoEXT::safe_VkShaderModuleValidationCacheCreateInfoEXT() :
+    pNext(nullptr)
+{}
+
+safe_VkShaderModuleValidationCacheCreateInfoEXT::safe_VkShaderModuleValidationCacheCreateInfoEXT(const safe_VkShaderModuleValidationCacheCreateInfoEXT& src)
+{
+    sType = src.sType;
+    validationCache = src.validationCache;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkShaderModuleValidationCacheCreateInfoEXT& safe_VkShaderModuleValidationCacheCreateInfoEXT::operator=(const safe_VkShaderModuleValidationCacheCreateInfoEXT& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    validationCache = src.validationCache;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkShaderModuleValidationCacheCreateInfoEXT::~safe_VkShaderModuleValidationCacheCreateInfoEXT()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkShaderModuleValidationCacheCreateInfoEXT::initialize(const VkShaderModuleValidationCacheCreateInfoEXT* in_struct)
+{
+    sType = in_struct->sType;
+    validationCache = in_struct->validationCache;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkShaderModuleValidationCacheCreateInfoEXT::initialize(const safe_VkShaderModuleValidationCacheCreateInfoEXT* src)
+{
+    sType = src->sType;
+    validationCache = src->validationCache;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkDescriptorSetLayoutBindingFlagsCreateInfoEXT::safe_VkDescriptorSetLayoutBindingFlagsCreateInfoEXT(const VkDescriptorSetLayoutBindingFlagsCreateInfoEXT* in_struct) :
+    sType(in_struct->sType),
+    bindingCount(in_struct->bindingCount),
+    pBindingFlags(nullptr)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (in_struct->pBindingFlags) {
+        pBindingFlags = new VkDescriptorBindingFlagsEXT[in_struct->bindingCount];
+        memcpy ((void *)pBindingFlags, (void *)in_struct->pBindingFlags, sizeof(VkDescriptorBindingFlagsEXT)*in_struct->bindingCount);
+    }
+}
+
+safe_VkDescriptorSetLayoutBindingFlagsCreateInfoEXT::safe_VkDescriptorSetLayoutBindingFlagsCreateInfoEXT() :
+    pNext(nullptr),
+    pBindingFlags(nullptr)
+{}
+
+safe_VkDescriptorSetLayoutBindingFlagsCreateInfoEXT::safe_VkDescriptorSetLayoutBindingFlagsCreateInfoEXT(const safe_VkDescriptorSetLayoutBindingFlagsCreateInfoEXT& src)
+{
+    sType = src.sType;
+    bindingCount = src.bindingCount;
+    pBindingFlags = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (src.pBindingFlags) {
+        pBindingFlags = new VkDescriptorBindingFlagsEXT[src.bindingCount];
+        memcpy ((void *)pBindingFlags, (void *)src.pBindingFlags, sizeof(VkDescriptorBindingFlagsEXT)*src.bindingCount);
+    }
+}
+
+safe_VkDescriptorSetLayoutBindingFlagsCreateInfoEXT& safe_VkDescriptorSetLayoutBindingFlagsCreateInfoEXT::operator=(const safe_VkDescriptorSetLayoutBindingFlagsCreateInfoEXT& src)
+{
+    if (&src == this) return *this;
+
+    if (pBindingFlags)
+        delete[] pBindingFlags;
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    bindingCount = src.bindingCount;
+    pBindingFlags = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (src.pBindingFlags) {
+        pBindingFlags = new VkDescriptorBindingFlagsEXT[src.bindingCount];
+        memcpy ((void *)pBindingFlags, (void *)src.pBindingFlags, sizeof(VkDescriptorBindingFlagsEXT)*src.bindingCount);
+    }
+
+    return *this;
+}
+
+safe_VkDescriptorSetLayoutBindingFlagsCreateInfoEXT::~safe_VkDescriptorSetLayoutBindingFlagsCreateInfoEXT()
+{
+    if (pBindingFlags)
+        delete[] pBindingFlags;
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkDescriptorSetLayoutBindingFlagsCreateInfoEXT::initialize(const VkDescriptorSetLayoutBindingFlagsCreateInfoEXT* in_struct)
+{
+    sType = in_struct->sType;
+    bindingCount = in_struct->bindingCount;
+    pBindingFlags = nullptr;
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (in_struct->pBindingFlags) {
+        pBindingFlags = new VkDescriptorBindingFlagsEXT[in_struct->bindingCount];
+        memcpy ((void *)pBindingFlags, (void *)in_struct->pBindingFlags, sizeof(VkDescriptorBindingFlagsEXT)*in_struct->bindingCount);
+    }
+}
+
+void safe_VkDescriptorSetLayoutBindingFlagsCreateInfoEXT::initialize(const safe_VkDescriptorSetLayoutBindingFlagsCreateInfoEXT* src)
+{
+    sType = src->sType;
+    bindingCount = src->bindingCount;
+    pBindingFlags = nullptr;
+    pNext = SafePnextCopy(src->pNext);
+    if (src->pBindingFlags) {
+        pBindingFlags = new VkDescriptorBindingFlagsEXT[src->bindingCount];
+        memcpy ((void *)pBindingFlags, (void *)src->pBindingFlags, sizeof(VkDescriptorBindingFlagsEXT)*src->bindingCount);
+    }
+}
+
+safe_VkPhysicalDeviceDescriptorIndexingFeaturesEXT::safe_VkPhysicalDeviceDescriptorIndexingFeaturesEXT(const VkPhysicalDeviceDescriptorIndexingFeaturesEXT* in_struct) :
+    sType(in_struct->sType),
+    shaderInputAttachmentArrayDynamicIndexing(in_struct->shaderInputAttachmentArrayDynamicIndexing),
+    shaderUniformTexelBufferArrayDynamicIndexing(in_struct->shaderUniformTexelBufferArrayDynamicIndexing),
+    shaderStorageTexelBufferArrayDynamicIndexing(in_struct->shaderStorageTexelBufferArrayDynamicIndexing),
+    shaderUniformBufferArrayNonUniformIndexing(in_struct->shaderUniformBufferArrayNonUniformIndexing),
+    shaderSampledImageArrayNonUniformIndexing(in_struct->shaderSampledImageArrayNonUniformIndexing),
+    shaderStorageBufferArrayNonUniformIndexing(in_struct->shaderStorageBufferArrayNonUniformIndexing),
+    shaderStorageImageArrayNonUniformIndexing(in_struct->shaderStorageImageArrayNonUniformIndexing),
+    shaderInputAttachmentArrayNonUniformIndexing(in_struct->shaderInputAttachmentArrayNonUniformIndexing),
+    shaderUniformTexelBufferArrayNonUniformIndexing(in_struct->shaderUniformTexelBufferArrayNonUniformIndexing),
+    shaderStorageTexelBufferArrayNonUniformIndexing(in_struct->shaderStorageTexelBufferArrayNonUniformIndexing),
+    descriptorBindingUniformBufferUpdateAfterBind(in_struct->descriptorBindingUniformBufferUpdateAfterBind),
+    descriptorBindingSampledImageUpdateAfterBind(in_struct->descriptorBindingSampledImageUpdateAfterBind),
+    descriptorBindingStorageImageUpdateAfterBind(in_struct->descriptorBindingStorageImageUpdateAfterBind),
+    descriptorBindingStorageBufferUpdateAfterBind(in_struct->descriptorBindingStorageBufferUpdateAfterBind),
+    descriptorBindingUniformTexelBufferUpdateAfterBind(in_struct->descriptorBindingUniformTexelBufferUpdateAfterBind),
+    descriptorBindingStorageTexelBufferUpdateAfterBind(in_struct->descriptorBindingStorageTexelBufferUpdateAfterBind),
+    descriptorBindingUpdateUnusedWhilePending(in_struct->descriptorBindingUpdateUnusedWhilePending),
+    descriptorBindingPartiallyBound(in_struct->descriptorBindingPartiallyBound),
+    descriptorBindingVariableDescriptorCount(in_struct->descriptorBindingVariableDescriptorCount),
+    runtimeDescriptorArray(in_struct->runtimeDescriptorArray)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPhysicalDeviceDescriptorIndexingFeaturesEXT::safe_VkPhysicalDeviceDescriptorIndexingFeaturesEXT() :
+    pNext(nullptr)
+{}
+
+safe_VkPhysicalDeviceDescriptorIndexingFeaturesEXT::safe_VkPhysicalDeviceDescriptorIndexingFeaturesEXT(const safe_VkPhysicalDeviceDescriptorIndexingFeaturesEXT& src)
+{
+    sType = src.sType;
+    shaderInputAttachmentArrayDynamicIndexing = src.shaderInputAttachmentArrayDynamicIndexing;
+    shaderUniformTexelBufferArrayDynamicIndexing = src.shaderUniformTexelBufferArrayDynamicIndexing;
+    shaderStorageTexelBufferArrayDynamicIndexing = src.shaderStorageTexelBufferArrayDynamicIndexing;
+    shaderUniformBufferArrayNonUniformIndexing = src.shaderUniformBufferArrayNonUniformIndexing;
+    shaderSampledImageArrayNonUniformIndexing = src.shaderSampledImageArrayNonUniformIndexing;
+    shaderStorageBufferArrayNonUniformIndexing = src.shaderStorageBufferArrayNonUniformIndexing;
+    shaderStorageImageArrayNonUniformIndexing = src.shaderStorageImageArrayNonUniformIndexing;
+    shaderInputAttachmentArrayNonUniformIndexing = src.shaderInputAttachmentArrayNonUniformIndexing;
+    shaderUniformTexelBufferArrayNonUniformIndexing = src.shaderUniformTexelBufferArrayNonUniformIndexing;
+    shaderStorageTexelBufferArrayNonUniformIndexing = src.shaderStorageTexelBufferArrayNonUniformIndexing;
+    descriptorBindingUniformBufferUpdateAfterBind = src.descriptorBindingUniformBufferUpdateAfterBind;
+    descriptorBindingSampledImageUpdateAfterBind = src.descriptorBindingSampledImageUpdateAfterBind;
+    descriptorBindingStorageImageUpdateAfterBind = src.descriptorBindingStorageImageUpdateAfterBind;
+    descriptorBindingStorageBufferUpdateAfterBind = src.descriptorBindingStorageBufferUpdateAfterBind;
+    descriptorBindingUniformTexelBufferUpdateAfterBind = src.descriptorBindingUniformTexelBufferUpdateAfterBind;
+    descriptorBindingStorageTexelBufferUpdateAfterBind = src.descriptorBindingStorageTexelBufferUpdateAfterBind;
+    descriptorBindingUpdateUnusedWhilePending = src.descriptorBindingUpdateUnusedWhilePending;
+    descriptorBindingPartiallyBound = src.descriptorBindingPartiallyBound;
+    descriptorBindingVariableDescriptorCount = src.descriptorBindingVariableDescriptorCount;
+    runtimeDescriptorArray = src.runtimeDescriptorArray;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPhysicalDeviceDescriptorIndexingFeaturesEXT& safe_VkPhysicalDeviceDescriptorIndexingFeaturesEXT::operator=(const safe_VkPhysicalDeviceDescriptorIndexingFeaturesEXT& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    shaderInputAttachmentArrayDynamicIndexing = src.shaderInputAttachmentArrayDynamicIndexing;
+    shaderUniformTexelBufferArrayDynamicIndexing = src.shaderUniformTexelBufferArrayDynamicIndexing;
+    shaderStorageTexelBufferArrayDynamicIndexing = src.shaderStorageTexelBufferArrayDynamicIndexing;
+    shaderUniformBufferArrayNonUniformIndexing = src.shaderUniformBufferArrayNonUniformIndexing;
+    shaderSampledImageArrayNonUniformIndexing = src.shaderSampledImageArrayNonUniformIndexing;
+    shaderStorageBufferArrayNonUniformIndexing = src.shaderStorageBufferArrayNonUniformIndexing;
+    shaderStorageImageArrayNonUniformIndexing = src.shaderStorageImageArrayNonUniformIndexing;
+    shaderInputAttachmentArrayNonUniformIndexing = src.shaderInputAttachmentArrayNonUniformIndexing;
+    shaderUniformTexelBufferArrayNonUniformIndexing = src.shaderUniformTexelBufferArrayNonUniformIndexing;
+    shaderStorageTexelBufferArrayNonUniformIndexing = src.shaderStorageTexelBufferArrayNonUniformIndexing;
+    descriptorBindingUniformBufferUpdateAfterBind = src.descriptorBindingUniformBufferUpdateAfterBind;
+    descriptorBindingSampledImageUpdateAfterBind = src.descriptorBindingSampledImageUpdateAfterBind;
+    descriptorBindingStorageImageUpdateAfterBind = src.descriptorBindingStorageImageUpdateAfterBind;
+    descriptorBindingStorageBufferUpdateAfterBind = src.descriptorBindingStorageBufferUpdateAfterBind;
+    descriptorBindingUniformTexelBufferUpdateAfterBind = src.descriptorBindingUniformTexelBufferUpdateAfterBind;
+    descriptorBindingStorageTexelBufferUpdateAfterBind = src.descriptorBindingStorageTexelBufferUpdateAfterBind;
+    descriptorBindingUpdateUnusedWhilePending = src.descriptorBindingUpdateUnusedWhilePending;
+    descriptorBindingPartiallyBound = src.descriptorBindingPartiallyBound;
+    descriptorBindingVariableDescriptorCount = src.descriptorBindingVariableDescriptorCount;
+    runtimeDescriptorArray = src.runtimeDescriptorArray;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPhysicalDeviceDescriptorIndexingFeaturesEXT::~safe_VkPhysicalDeviceDescriptorIndexingFeaturesEXT()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPhysicalDeviceDescriptorIndexingFeaturesEXT::initialize(const VkPhysicalDeviceDescriptorIndexingFeaturesEXT* in_struct)
+{
+    sType = in_struct->sType;
+    shaderInputAttachmentArrayDynamicIndexing = in_struct->shaderInputAttachmentArrayDynamicIndexing;
+    shaderUniformTexelBufferArrayDynamicIndexing = in_struct->shaderUniformTexelBufferArrayDynamicIndexing;
+    shaderStorageTexelBufferArrayDynamicIndexing = in_struct->shaderStorageTexelBufferArrayDynamicIndexing;
+    shaderUniformBufferArrayNonUniformIndexing = in_struct->shaderUniformBufferArrayNonUniformIndexing;
+    shaderSampledImageArrayNonUniformIndexing = in_struct->shaderSampledImageArrayNonUniformIndexing;
+    shaderStorageBufferArrayNonUniformIndexing = in_struct->shaderStorageBufferArrayNonUniformIndexing;
+    shaderStorageImageArrayNonUniformIndexing = in_struct->shaderStorageImageArrayNonUniformIndexing;
+    shaderInputAttachmentArrayNonUniformIndexing = in_struct->shaderInputAttachmentArrayNonUniformIndexing;
+    shaderUniformTexelBufferArrayNonUniformIndexing = in_struct->shaderUniformTexelBufferArrayNonUniformIndexing;
+    shaderStorageTexelBufferArrayNonUniformIndexing = in_struct->shaderStorageTexelBufferArrayNonUniformIndexing;
+    descriptorBindingUniformBufferUpdateAfterBind = in_struct->descriptorBindingUniformBufferUpdateAfterBind;
+    descriptorBindingSampledImageUpdateAfterBind = in_struct->descriptorBindingSampledImageUpdateAfterBind;
+    descriptorBindingStorageImageUpdateAfterBind = in_struct->descriptorBindingStorageImageUpdateAfterBind;
+    descriptorBindingStorageBufferUpdateAfterBind = in_struct->descriptorBindingStorageBufferUpdateAfterBind;
+    descriptorBindingUniformTexelBufferUpdateAfterBind = in_struct->descriptorBindingUniformTexelBufferUpdateAfterBind;
+    descriptorBindingStorageTexelBufferUpdateAfterBind = in_struct->descriptorBindingStorageTexelBufferUpdateAfterBind;
+    descriptorBindingUpdateUnusedWhilePending = in_struct->descriptorBindingUpdateUnusedWhilePending;
+    descriptorBindingPartiallyBound = in_struct->descriptorBindingPartiallyBound;
+    descriptorBindingVariableDescriptorCount = in_struct->descriptorBindingVariableDescriptorCount;
+    runtimeDescriptorArray = in_struct->runtimeDescriptorArray;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPhysicalDeviceDescriptorIndexingFeaturesEXT::initialize(const safe_VkPhysicalDeviceDescriptorIndexingFeaturesEXT* src)
+{
+    sType = src->sType;
+    shaderInputAttachmentArrayDynamicIndexing = src->shaderInputAttachmentArrayDynamicIndexing;
+    shaderUniformTexelBufferArrayDynamicIndexing = src->shaderUniformTexelBufferArrayDynamicIndexing;
+    shaderStorageTexelBufferArrayDynamicIndexing = src->shaderStorageTexelBufferArrayDynamicIndexing;
+    shaderUniformBufferArrayNonUniformIndexing = src->shaderUniformBufferArrayNonUniformIndexing;
+    shaderSampledImageArrayNonUniformIndexing = src->shaderSampledImageArrayNonUniformIndexing;
+    shaderStorageBufferArrayNonUniformIndexing = src->shaderStorageBufferArrayNonUniformIndexing;
+    shaderStorageImageArrayNonUniformIndexing = src->shaderStorageImageArrayNonUniformIndexing;
+    shaderInputAttachmentArrayNonUniformIndexing = src->shaderInputAttachmentArrayNonUniformIndexing;
+    shaderUniformTexelBufferArrayNonUniformIndexing = src->shaderUniformTexelBufferArrayNonUniformIndexing;
+    shaderStorageTexelBufferArrayNonUniformIndexing = src->shaderStorageTexelBufferArrayNonUniformIndexing;
+    descriptorBindingUniformBufferUpdateAfterBind = src->descriptorBindingUniformBufferUpdateAfterBind;
+    descriptorBindingSampledImageUpdateAfterBind = src->descriptorBindingSampledImageUpdateAfterBind;
+    descriptorBindingStorageImageUpdateAfterBind = src->descriptorBindingStorageImageUpdateAfterBind;
+    descriptorBindingStorageBufferUpdateAfterBind = src->descriptorBindingStorageBufferUpdateAfterBind;
+    descriptorBindingUniformTexelBufferUpdateAfterBind = src->descriptorBindingUniformTexelBufferUpdateAfterBind;
+    descriptorBindingStorageTexelBufferUpdateAfterBind = src->descriptorBindingStorageTexelBufferUpdateAfterBind;
+    descriptorBindingUpdateUnusedWhilePending = src->descriptorBindingUpdateUnusedWhilePending;
+    descriptorBindingPartiallyBound = src->descriptorBindingPartiallyBound;
+    descriptorBindingVariableDescriptorCount = src->descriptorBindingVariableDescriptorCount;
+    runtimeDescriptorArray = src->runtimeDescriptorArray;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkPhysicalDeviceDescriptorIndexingPropertiesEXT::safe_VkPhysicalDeviceDescriptorIndexingPropertiesEXT(const VkPhysicalDeviceDescriptorIndexingPropertiesEXT* in_struct) :
+    sType(in_struct->sType),
+    maxUpdateAfterBindDescriptorsInAllPools(in_struct->maxUpdateAfterBindDescriptorsInAllPools),
+    shaderUniformBufferArrayNonUniformIndexingNative(in_struct->shaderUniformBufferArrayNonUniformIndexingNative),
+    shaderSampledImageArrayNonUniformIndexingNative(in_struct->shaderSampledImageArrayNonUniformIndexingNative),
+    shaderStorageBufferArrayNonUniformIndexingNative(in_struct->shaderStorageBufferArrayNonUniformIndexingNative),
+    shaderStorageImageArrayNonUniformIndexingNative(in_struct->shaderStorageImageArrayNonUniformIndexingNative),
+    shaderInputAttachmentArrayNonUniformIndexingNative(in_struct->shaderInputAttachmentArrayNonUniformIndexingNative),
+    robustBufferAccessUpdateAfterBind(in_struct->robustBufferAccessUpdateAfterBind),
+    quadDivergentImplicitLod(in_struct->quadDivergentImplicitLod),
+    maxPerStageDescriptorUpdateAfterBindSamplers(in_struct->maxPerStageDescriptorUpdateAfterBindSamplers),
+    maxPerStageDescriptorUpdateAfterBindUniformBuffers(in_struct->maxPerStageDescriptorUpdateAfterBindUniformBuffers),
+    maxPerStageDescriptorUpdateAfterBindStorageBuffers(in_struct->maxPerStageDescriptorUpdateAfterBindStorageBuffers),
+    maxPerStageDescriptorUpdateAfterBindSampledImages(in_struct->maxPerStageDescriptorUpdateAfterBindSampledImages),
+    maxPerStageDescriptorUpdateAfterBindStorageImages(in_struct->maxPerStageDescriptorUpdateAfterBindStorageImages),
+    maxPerStageDescriptorUpdateAfterBindInputAttachments(in_struct->maxPerStageDescriptorUpdateAfterBindInputAttachments),
+    maxPerStageUpdateAfterBindResources(in_struct->maxPerStageUpdateAfterBindResources),
+    maxDescriptorSetUpdateAfterBindSamplers(in_struct->maxDescriptorSetUpdateAfterBindSamplers),
+    maxDescriptorSetUpdateAfterBindUniformBuffers(in_struct->maxDescriptorSetUpdateAfterBindUniformBuffers),
+    maxDescriptorSetUpdateAfterBindUniformBuffersDynamic(in_struct->maxDescriptorSetUpdateAfterBindUniformBuffersDynamic),
+    maxDescriptorSetUpdateAfterBindStorageBuffers(in_struct->maxDescriptorSetUpdateAfterBindStorageBuffers),
+    maxDescriptorSetUpdateAfterBindStorageBuffersDynamic(in_struct->maxDescriptorSetUpdateAfterBindStorageBuffersDynamic),
+    maxDescriptorSetUpdateAfterBindSampledImages(in_struct->maxDescriptorSetUpdateAfterBindSampledImages),
+    maxDescriptorSetUpdateAfterBindStorageImages(in_struct->maxDescriptorSetUpdateAfterBindStorageImages),
+    maxDescriptorSetUpdateAfterBindInputAttachments(in_struct->maxDescriptorSetUpdateAfterBindInputAttachments)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPhysicalDeviceDescriptorIndexingPropertiesEXT::safe_VkPhysicalDeviceDescriptorIndexingPropertiesEXT() :
+    pNext(nullptr)
+{}
+
+safe_VkPhysicalDeviceDescriptorIndexingPropertiesEXT::safe_VkPhysicalDeviceDescriptorIndexingPropertiesEXT(const safe_VkPhysicalDeviceDescriptorIndexingPropertiesEXT& src)
+{
+    sType = src.sType;
+    maxUpdateAfterBindDescriptorsInAllPools = src.maxUpdateAfterBindDescriptorsInAllPools;
+    shaderUniformBufferArrayNonUniformIndexingNative = src.shaderUniformBufferArrayNonUniformIndexingNative;
+    shaderSampledImageArrayNonUniformIndexingNative = src.shaderSampledImageArrayNonUniformIndexingNative;
+    shaderStorageBufferArrayNonUniformIndexingNative = src.shaderStorageBufferArrayNonUniformIndexingNative;
+    shaderStorageImageArrayNonUniformIndexingNative = src.shaderStorageImageArrayNonUniformIndexingNative;
+    shaderInputAttachmentArrayNonUniformIndexingNative = src.shaderInputAttachmentArrayNonUniformIndexingNative;
+    robustBufferAccessUpdateAfterBind = src.robustBufferAccessUpdateAfterBind;
+    quadDivergentImplicitLod = src.quadDivergentImplicitLod;
+    maxPerStageDescriptorUpdateAfterBindSamplers = src.maxPerStageDescriptorUpdateAfterBindSamplers;
+    maxPerStageDescriptorUpdateAfterBindUniformBuffers = src.maxPerStageDescriptorUpdateAfterBindUniformBuffers;
+    maxPerStageDescriptorUpdateAfterBindStorageBuffers = src.maxPerStageDescriptorUpdateAfterBindStorageBuffers;
+    maxPerStageDescriptorUpdateAfterBindSampledImages = src.maxPerStageDescriptorUpdateAfterBindSampledImages;
+    maxPerStageDescriptorUpdateAfterBindStorageImages = src.maxPerStageDescriptorUpdateAfterBindStorageImages;
+    maxPerStageDescriptorUpdateAfterBindInputAttachments = src.maxPerStageDescriptorUpdateAfterBindInputAttachments;
+    maxPerStageUpdateAfterBindResources = src.maxPerStageUpdateAfterBindResources;
+    maxDescriptorSetUpdateAfterBindSamplers = src.maxDescriptorSetUpdateAfterBindSamplers;
+    maxDescriptorSetUpdateAfterBindUniformBuffers = src.maxDescriptorSetUpdateAfterBindUniformBuffers;
+    maxDescriptorSetUpdateAfterBindUniformBuffersDynamic = src.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic;
+    maxDescriptorSetUpdateAfterBindStorageBuffers = src.maxDescriptorSetUpdateAfterBindStorageBuffers;
+    maxDescriptorSetUpdateAfterBindStorageBuffersDynamic = src.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic;
+    maxDescriptorSetUpdateAfterBindSampledImages = src.maxDescriptorSetUpdateAfterBindSampledImages;
+    maxDescriptorSetUpdateAfterBindStorageImages = src.maxDescriptorSetUpdateAfterBindStorageImages;
+    maxDescriptorSetUpdateAfterBindInputAttachments = src.maxDescriptorSetUpdateAfterBindInputAttachments;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPhysicalDeviceDescriptorIndexingPropertiesEXT& safe_VkPhysicalDeviceDescriptorIndexingPropertiesEXT::operator=(const safe_VkPhysicalDeviceDescriptorIndexingPropertiesEXT& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    maxUpdateAfterBindDescriptorsInAllPools = src.maxUpdateAfterBindDescriptorsInAllPools;
+    shaderUniformBufferArrayNonUniformIndexingNative = src.shaderUniformBufferArrayNonUniformIndexingNative;
+    shaderSampledImageArrayNonUniformIndexingNative = src.shaderSampledImageArrayNonUniformIndexingNative;
+    shaderStorageBufferArrayNonUniformIndexingNative = src.shaderStorageBufferArrayNonUniformIndexingNative;
+    shaderStorageImageArrayNonUniformIndexingNative = src.shaderStorageImageArrayNonUniformIndexingNative;
+    shaderInputAttachmentArrayNonUniformIndexingNative = src.shaderInputAttachmentArrayNonUniformIndexingNative;
+    robustBufferAccessUpdateAfterBind = src.robustBufferAccessUpdateAfterBind;
+    quadDivergentImplicitLod = src.quadDivergentImplicitLod;
+    maxPerStageDescriptorUpdateAfterBindSamplers = src.maxPerStageDescriptorUpdateAfterBindSamplers;
+    maxPerStageDescriptorUpdateAfterBindUniformBuffers = src.maxPerStageDescriptorUpdateAfterBindUniformBuffers;
+    maxPerStageDescriptorUpdateAfterBindStorageBuffers = src.maxPerStageDescriptorUpdateAfterBindStorageBuffers;
+    maxPerStageDescriptorUpdateAfterBindSampledImages = src.maxPerStageDescriptorUpdateAfterBindSampledImages;
+    maxPerStageDescriptorUpdateAfterBindStorageImages = src.maxPerStageDescriptorUpdateAfterBindStorageImages;
+    maxPerStageDescriptorUpdateAfterBindInputAttachments = src.maxPerStageDescriptorUpdateAfterBindInputAttachments;
+    maxPerStageUpdateAfterBindResources = src.maxPerStageUpdateAfterBindResources;
+    maxDescriptorSetUpdateAfterBindSamplers = src.maxDescriptorSetUpdateAfterBindSamplers;
+    maxDescriptorSetUpdateAfterBindUniformBuffers = src.maxDescriptorSetUpdateAfterBindUniformBuffers;
+    maxDescriptorSetUpdateAfterBindUniformBuffersDynamic = src.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic;
+    maxDescriptorSetUpdateAfterBindStorageBuffers = src.maxDescriptorSetUpdateAfterBindStorageBuffers;
+    maxDescriptorSetUpdateAfterBindStorageBuffersDynamic = src.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic;
+    maxDescriptorSetUpdateAfterBindSampledImages = src.maxDescriptorSetUpdateAfterBindSampledImages;
+    maxDescriptorSetUpdateAfterBindStorageImages = src.maxDescriptorSetUpdateAfterBindStorageImages;
+    maxDescriptorSetUpdateAfterBindInputAttachments = src.maxDescriptorSetUpdateAfterBindInputAttachments;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPhysicalDeviceDescriptorIndexingPropertiesEXT::~safe_VkPhysicalDeviceDescriptorIndexingPropertiesEXT()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPhysicalDeviceDescriptorIndexingPropertiesEXT::initialize(const VkPhysicalDeviceDescriptorIndexingPropertiesEXT* in_struct)
+{
+    sType = in_struct->sType;
+    maxUpdateAfterBindDescriptorsInAllPools = in_struct->maxUpdateAfterBindDescriptorsInAllPools;
+    shaderUniformBufferArrayNonUniformIndexingNative = in_struct->shaderUniformBufferArrayNonUniformIndexingNative;
+    shaderSampledImageArrayNonUniformIndexingNative = in_struct->shaderSampledImageArrayNonUniformIndexingNative;
+    shaderStorageBufferArrayNonUniformIndexingNative = in_struct->shaderStorageBufferArrayNonUniformIndexingNative;
+    shaderStorageImageArrayNonUniformIndexingNative = in_struct->shaderStorageImageArrayNonUniformIndexingNative;
+    shaderInputAttachmentArrayNonUniformIndexingNative = in_struct->shaderInputAttachmentArrayNonUniformIndexingNative;
+    robustBufferAccessUpdateAfterBind = in_struct->robustBufferAccessUpdateAfterBind;
+    quadDivergentImplicitLod = in_struct->quadDivergentImplicitLod;
+    maxPerStageDescriptorUpdateAfterBindSamplers = in_struct->maxPerStageDescriptorUpdateAfterBindSamplers;
+    maxPerStageDescriptorUpdateAfterBindUniformBuffers = in_struct->maxPerStageDescriptorUpdateAfterBindUniformBuffers;
+    maxPerStageDescriptorUpdateAfterBindStorageBuffers = in_struct->maxPerStageDescriptorUpdateAfterBindStorageBuffers;
+    maxPerStageDescriptorUpdateAfterBindSampledImages = in_struct->maxPerStageDescriptorUpdateAfterBindSampledImages;
+    maxPerStageDescriptorUpdateAfterBindStorageImages = in_struct->maxPerStageDescriptorUpdateAfterBindStorageImages;
+    maxPerStageDescriptorUpdateAfterBindInputAttachments = in_struct->maxPerStageDescriptorUpdateAfterBindInputAttachments;
+    maxPerStageUpdateAfterBindResources = in_struct->maxPerStageUpdateAfterBindResources;
+    maxDescriptorSetUpdateAfterBindSamplers = in_struct->maxDescriptorSetUpdateAfterBindSamplers;
+    maxDescriptorSetUpdateAfterBindUniformBuffers = in_struct->maxDescriptorSetUpdateAfterBindUniformBuffers;
+    maxDescriptorSetUpdateAfterBindUniformBuffersDynamic = in_struct->maxDescriptorSetUpdateAfterBindUniformBuffersDynamic;
+    maxDescriptorSetUpdateAfterBindStorageBuffers = in_struct->maxDescriptorSetUpdateAfterBindStorageBuffers;
+    maxDescriptorSetUpdateAfterBindStorageBuffersDynamic = in_struct->maxDescriptorSetUpdateAfterBindStorageBuffersDynamic;
+    maxDescriptorSetUpdateAfterBindSampledImages = in_struct->maxDescriptorSetUpdateAfterBindSampledImages;
+    maxDescriptorSetUpdateAfterBindStorageImages = in_struct->maxDescriptorSetUpdateAfterBindStorageImages;
+    maxDescriptorSetUpdateAfterBindInputAttachments = in_struct->maxDescriptorSetUpdateAfterBindInputAttachments;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPhysicalDeviceDescriptorIndexingPropertiesEXT::initialize(const safe_VkPhysicalDeviceDescriptorIndexingPropertiesEXT* src)
+{
+    sType = src->sType;
+    maxUpdateAfterBindDescriptorsInAllPools = src->maxUpdateAfterBindDescriptorsInAllPools;
+    shaderUniformBufferArrayNonUniformIndexingNative = src->shaderUniformBufferArrayNonUniformIndexingNative;
+    shaderSampledImageArrayNonUniformIndexingNative = src->shaderSampledImageArrayNonUniformIndexingNative;
+    shaderStorageBufferArrayNonUniformIndexingNative = src->shaderStorageBufferArrayNonUniformIndexingNative;
+    shaderStorageImageArrayNonUniformIndexingNative = src->shaderStorageImageArrayNonUniformIndexingNative;
+    shaderInputAttachmentArrayNonUniformIndexingNative = src->shaderInputAttachmentArrayNonUniformIndexingNative;
+    robustBufferAccessUpdateAfterBind = src->robustBufferAccessUpdateAfterBind;
+    quadDivergentImplicitLod = src->quadDivergentImplicitLod;
+    maxPerStageDescriptorUpdateAfterBindSamplers = src->maxPerStageDescriptorUpdateAfterBindSamplers;
+    maxPerStageDescriptorUpdateAfterBindUniformBuffers = src->maxPerStageDescriptorUpdateAfterBindUniformBuffers;
+    maxPerStageDescriptorUpdateAfterBindStorageBuffers = src->maxPerStageDescriptorUpdateAfterBindStorageBuffers;
+    maxPerStageDescriptorUpdateAfterBindSampledImages = src->maxPerStageDescriptorUpdateAfterBindSampledImages;
+    maxPerStageDescriptorUpdateAfterBindStorageImages = src->maxPerStageDescriptorUpdateAfterBindStorageImages;
+    maxPerStageDescriptorUpdateAfterBindInputAttachments = src->maxPerStageDescriptorUpdateAfterBindInputAttachments;
+    maxPerStageUpdateAfterBindResources = src->maxPerStageUpdateAfterBindResources;
+    maxDescriptorSetUpdateAfterBindSamplers = src->maxDescriptorSetUpdateAfterBindSamplers;
+    maxDescriptorSetUpdateAfterBindUniformBuffers = src->maxDescriptorSetUpdateAfterBindUniformBuffers;
+    maxDescriptorSetUpdateAfterBindUniformBuffersDynamic = src->maxDescriptorSetUpdateAfterBindUniformBuffersDynamic;
+    maxDescriptorSetUpdateAfterBindStorageBuffers = src->maxDescriptorSetUpdateAfterBindStorageBuffers;
+    maxDescriptorSetUpdateAfterBindStorageBuffersDynamic = src->maxDescriptorSetUpdateAfterBindStorageBuffersDynamic;
+    maxDescriptorSetUpdateAfterBindSampledImages = src->maxDescriptorSetUpdateAfterBindSampledImages;
+    maxDescriptorSetUpdateAfterBindStorageImages = src->maxDescriptorSetUpdateAfterBindStorageImages;
+    maxDescriptorSetUpdateAfterBindInputAttachments = src->maxDescriptorSetUpdateAfterBindInputAttachments;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkDescriptorSetVariableDescriptorCountAllocateInfoEXT::safe_VkDescriptorSetVariableDescriptorCountAllocateInfoEXT(const VkDescriptorSetVariableDescriptorCountAllocateInfoEXT* in_struct) :
+    sType(in_struct->sType),
+    descriptorSetCount(in_struct->descriptorSetCount),
+    pDescriptorCounts(nullptr)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (in_struct->pDescriptorCounts) {
+        pDescriptorCounts = new uint32_t[in_struct->descriptorSetCount];
+        memcpy ((void *)pDescriptorCounts, (void *)in_struct->pDescriptorCounts, sizeof(uint32_t)*in_struct->descriptorSetCount);
+    }
+}
+
+safe_VkDescriptorSetVariableDescriptorCountAllocateInfoEXT::safe_VkDescriptorSetVariableDescriptorCountAllocateInfoEXT() :
+    pNext(nullptr),
+    pDescriptorCounts(nullptr)
+{}
+
+safe_VkDescriptorSetVariableDescriptorCountAllocateInfoEXT::safe_VkDescriptorSetVariableDescriptorCountAllocateInfoEXT(const safe_VkDescriptorSetVariableDescriptorCountAllocateInfoEXT& src)
+{
+    sType = src.sType;
+    descriptorSetCount = src.descriptorSetCount;
+    pDescriptorCounts = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (src.pDescriptorCounts) {
+        pDescriptorCounts = new uint32_t[src.descriptorSetCount];
+        memcpy ((void *)pDescriptorCounts, (void *)src.pDescriptorCounts, sizeof(uint32_t)*src.descriptorSetCount);
+    }
+}
+
+safe_VkDescriptorSetVariableDescriptorCountAllocateInfoEXT& safe_VkDescriptorSetVariableDescriptorCountAllocateInfoEXT::operator=(const safe_VkDescriptorSetVariableDescriptorCountAllocateInfoEXT& src)
+{
+    if (&src == this) return *this;
+
+    if (pDescriptorCounts)
+        delete[] pDescriptorCounts;
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    descriptorSetCount = src.descriptorSetCount;
+    pDescriptorCounts = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (src.pDescriptorCounts) {
+        pDescriptorCounts = new uint32_t[src.descriptorSetCount];
+        memcpy ((void *)pDescriptorCounts, (void *)src.pDescriptorCounts, sizeof(uint32_t)*src.descriptorSetCount);
+    }
+
+    return *this;
+}
+
+safe_VkDescriptorSetVariableDescriptorCountAllocateInfoEXT::~safe_VkDescriptorSetVariableDescriptorCountAllocateInfoEXT()
+{
+    if (pDescriptorCounts)
+        delete[] pDescriptorCounts;
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkDescriptorSetVariableDescriptorCountAllocateInfoEXT::initialize(const VkDescriptorSetVariableDescriptorCountAllocateInfoEXT* in_struct)
+{
+    sType = in_struct->sType;
+    descriptorSetCount = in_struct->descriptorSetCount;
+    pDescriptorCounts = nullptr;
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (in_struct->pDescriptorCounts) {
+        pDescriptorCounts = new uint32_t[in_struct->descriptorSetCount];
+        memcpy ((void *)pDescriptorCounts, (void *)in_struct->pDescriptorCounts, sizeof(uint32_t)*in_struct->descriptorSetCount);
+    }
+}
+
+void safe_VkDescriptorSetVariableDescriptorCountAllocateInfoEXT::initialize(const safe_VkDescriptorSetVariableDescriptorCountAllocateInfoEXT* src)
+{
+    sType = src->sType;
+    descriptorSetCount = src->descriptorSetCount;
+    pDescriptorCounts = nullptr;
+    pNext = SafePnextCopy(src->pNext);
+    if (src->pDescriptorCounts) {
+        pDescriptorCounts = new uint32_t[src->descriptorSetCount];
+        memcpy ((void *)pDescriptorCounts, (void *)src->pDescriptorCounts, sizeof(uint32_t)*src->descriptorSetCount);
+    }
+}
+
+safe_VkDescriptorSetVariableDescriptorCountLayoutSupportEXT::safe_VkDescriptorSetVariableDescriptorCountLayoutSupportEXT(const VkDescriptorSetVariableDescriptorCountLayoutSupportEXT* in_struct) :
+    sType(in_struct->sType),
+    maxVariableDescriptorCount(in_struct->maxVariableDescriptorCount)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkDescriptorSetVariableDescriptorCountLayoutSupportEXT::safe_VkDescriptorSetVariableDescriptorCountLayoutSupportEXT() :
+    pNext(nullptr)
+{}
+
+safe_VkDescriptorSetVariableDescriptorCountLayoutSupportEXT::safe_VkDescriptorSetVariableDescriptorCountLayoutSupportEXT(const safe_VkDescriptorSetVariableDescriptorCountLayoutSupportEXT& src)
+{
+    sType = src.sType;
+    maxVariableDescriptorCount = src.maxVariableDescriptorCount;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkDescriptorSetVariableDescriptorCountLayoutSupportEXT& safe_VkDescriptorSetVariableDescriptorCountLayoutSupportEXT::operator=(const safe_VkDescriptorSetVariableDescriptorCountLayoutSupportEXT& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    maxVariableDescriptorCount = src.maxVariableDescriptorCount;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkDescriptorSetVariableDescriptorCountLayoutSupportEXT::~safe_VkDescriptorSetVariableDescriptorCountLayoutSupportEXT()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkDescriptorSetVariableDescriptorCountLayoutSupportEXT::initialize(const VkDescriptorSetVariableDescriptorCountLayoutSupportEXT* in_struct)
+{
+    sType = in_struct->sType;
+    maxVariableDescriptorCount = in_struct->maxVariableDescriptorCount;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkDescriptorSetVariableDescriptorCountLayoutSupportEXT::initialize(const safe_VkDescriptorSetVariableDescriptorCountLayoutSupportEXT* src)
+{
+    sType = src->sType;
+    maxVariableDescriptorCount = src->maxVariableDescriptorCount;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkShadingRatePaletteNV::safe_VkShadingRatePaletteNV(const VkShadingRatePaletteNV* in_struct) :
+    shadingRatePaletteEntryCount(in_struct->shadingRatePaletteEntryCount),
+    pShadingRatePaletteEntries(nullptr)
+{
+    if (in_struct->pShadingRatePaletteEntries) {
+        pShadingRatePaletteEntries = new VkShadingRatePaletteEntryNV[in_struct->shadingRatePaletteEntryCount];
+        memcpy ((void *)pShadingRatePaletteEntries, (void *)in_struct->pShadingRatePaletteEntries, sizeof(VkShadingRatePaletteEntryNV)*in_struct->shadingRatePaletteEntryCount);
+    }
+}
+
+safe_VkShadingRatePaletteNV::safe_VkShadingRatePaletteNV() :
+    pShadingRatePaletteEntries(nullptr)
+{}
+
+safe_VkShadingRatePaletteNV::safe_VkShadingRatePaletteNV(const safe_VkShadingRatePaletteNV& src)
+{
+    shadingRatePaletteEntryCount = src.shadingRatePaletteEntryCount;
+    pShadingRatePaletteEntries = nullptr;
+    if (src.pShadingRatePaletteEntries) {
+        pShadingRatePaletteEntries = new VkShadingRatePaletteEntryNV[src.shadingRatePaletteEntryCount];
+        memcpy ((void *)pShadingRatePaletteEntries, (void *)src.pShadingRatePaletteEntries, sizeof(VkShadingRatePaletteEntryNV)*src.shadingRatePaletteEntryCount);
+    }
+}
+
+safe_VkShadingRatePaletteNV& safe_VkShadingRatePaletteNV::operator=(const safe_VkShadingRatePaletteNV& src)
+{
+    if (&src == this) return *this;
+
+    if (pShadingRatePaletteEntries)
+        delete[] pShadingRatePaletteEntries;
+
+    shadingRatePaletteEntryCount = src.shadingRatePaletteEntryCount;
+    pShadingRatePaletteEntries = nullptr;
+    if (src.pShadingRatePaletteEntries) {
+        pShadingRatePaletteEntries = new VkShadingRatePaletteEntryNV[src.shadingRatePaletteEntryCount];
+        memcpy ((void *)pShadingRatePaletteEntries, (void *)src.pShadingRatePaletteEntries, sizeof(VkShadingRatePaletteEntryNV)*src.shadingRatePaletteEntryCount);
+    }
+
+    return *this;
+}
+
+safe_VkShadingRatePaletteNV::~safe_VkShadingRatePaletteNV()
+{
+    if (pShadingRatePaletteEntries)
+        delete[] pShadingRatePaletteEntries;
+}
+
+void safe_VkShadingRatePaletteNV::initialize(const VkShadingRatePaletteNV* in_struct)
+{
+    shadingRatePaletteEntryCount = in_struct->shadingRatePaletteEntryCount;
+    pShadingRatePaletteEntries = nullptr;
+    if (in_struct->pShadingRatePaletteEntries) {
+        pShadingRatePaletteEntries = new VkShadingRatePaletteEntryNV[in_struct->shadingRatePaletteEntryCount];
+        memcpy ((void *)pShadingRatePaletteEntries, (void *)in_struct->pShadingRatePaletteEntries, sizeof(VkShadingRatePaletteEntryNV)*in_struct->shadingRatePaletteEntryCount);
+    }
+}
+
+void safe_VkShadingRatePaletteNV::initialize(const safe_VkShadingRatePaletteNV* src)
+{
+    shadingRatePaletteEntryCount = src->shadingRatePaletteEntryCount;
+    pShadingRatePaletteEntries = nullptr;
+    if (src->pShadingRatePaletteEntries) {
+        pShadingRatePaletteEntries = new VkShadingRatePaletteEntryNV[src->shadingRatePaletteEntryCount];
+        memcpy ((void *)pShadingRatePaletteEntries, (void *)src->pShadingRatePaletteEntries, sizeof(VkShadingRatePaletteEntryNV)*src->shadingRatePaletteEntryCount);
+    }
+}
+
+safe_VkPipelineViewportShadingRateImageStateCreateInfoNV::safe_VkPipelineViewportShadingRateImageStateCreateInfoNV(const VkPipelineViewportShadingRateImageStateCreateInfoNV* in_struct) :
+    sType(in_struct->sType),
+    shadingRateImageEnable(in_struct->shadingRateImageEnable),
+    viewportCount(in_struct->viewportCount),
+    pShadingRatePalettes(nullptr)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (viewportCount && in_struct->pShadingRatePalettes) {
+        pShadingRatePalettes = new safe_VkShadingRatePaletteNV[viewportCount];
+        for (uint32_t i = 0; i < viewportCount; ++i) {
+            pShadingRatePalettes[i].initialize(&in_struct->pShadingRatePalettes[i]);
+        }
+    }
+}
+
+safe_VkPipelineViewportShadingRateImageStateCreateInfoNV::safe_VkPipelineViewportShadingRateImageStateCreateInfoNV() :
+    pNext(nullptr),
+    pShadingRatePalettes(nullptr)
+{}
+
+safe_VkPipelineViewportShadingRateImageStateCreateInfoNV::safe_VkPipelineViewportShadingRateImageStateCreateInfoNV(const safe_VkPipelineViewportShadingRateImageStateCreateInfoNV& src)
+{
+    sType = src.sType;
+    shadingRateImageEnable = src.shadingRateImageEnable;
+    viewportCount = src.viewportCount;
+    pShadingRatePalettes = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (viewportCount && src.pShadingRatePalettes) {
+        pShadingRatePalettes = new safe_VkShadingRatePaletteNV[viewportCount];
+        for (uint32_t i = 0; i < viewportCount; ++i) {
+            pShadingRatePalettes[i].initialize(&src.pShadingRatePalettes[i]);
+        }
+    }
+}
+
+safe_VkPipelineViewportShadingRateImageStateCreateInfoNV& safe_VkPipelineViewportShadingRateImageStateCreateInfoNV::operator=(const safe_VkPipelineViewportShadingRateImageStateCreateInfoNV& src)
+{
+    if (&src == this) return *this;
+
+    if (pShadingRatePalettes)
+        delete[] pShadingRatePalettes;
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    shadingRateImageEnable = src.shadingRateImageEnable;
+    viewportCount = src.viewportCount;
+    pShadingRatePalettes = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (viewportCount && src.pShadingRatePalettes) {
+        pShadingRatePalettes = new safe_VkShadingRatePaletteNV[viewportCount];
+        for (uint32_t i = 0; i < viewportCount; ++i) {
+            pShadingRatePalettes[i].initialize(&src.pShadingRatePalettes[i]);
+        }
+    }
+
+    return *this;
+}
+
+safe_VkPipelineViewportShadingRateImageStateCreateInfoNV::~safe_VkPipelineViewportShadingRateImageStateCreateInfoNV()
+{
+    if (pShadingRatePalettes)
+        delete[] pShadingRatePalettes;
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPipelineViewportShadingRateImageStateCreateInfoNV::initialize(const VkPipelineViewportShadingRateImageStateCreateInfoNV* in_struct)
+{
+    sType = in_struct->sType;
+    shadingRateImageEnable = in_struct->shadingRateImageEnable;
+    viewportCount = in_struct->viewportCount;
+    pShadingRatePalettes = nullptr;
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (viewportCount && in_struct->pShadingRatePalettes) {
+        pShadingRatePalettes = new safe_VkShadingRatePaletteNV[viewportCount];
+        for (uint32_t i = 0; i < viewportCount; ++i) {
+            pShadingRatePalettes[i].initialize(&in_struct->pShadingRatePalettes[i]);
+        }
+    }
+}
+
+void safe_VkPipelineViewportShadingRateImageStateCreateInfoNV::initialize(const safe_VkPipelineViewportShadingRateImageStateCreateInfoNV* src)
+{
+    sType = src->sType;
+    shadingRateImageEnable = src->shadingRateImageEnable;
+    viewportCount = src->viewportCount;
+    pShadingRatePalettes = nullptr;
+    pNext = SafePnextCopy(src->pNext);
+    if (viewportCount && src->pShadingRatePalettes) {
+        pShadingRatePalettes = new safe_VkShadingRatePaletteNV[viewportCount];
+        for (uint32_t i = 0; i < viewportCount; ++i) {
+            pShadingRatePalettes[i].initialize(&src->pShadingRatePalettes[i]);
+        }
+    }
+}
+
+safe_VkPhysicalDeviceShadingRateImageFeaturesNV::safe_VkPhysicalDeviceShadingRateImageFeaturesNV(const VkPhysicalDeviceShadingRateImageFeaturesNV* in_struct) :
+    sType(in_struct->sType),
+    shadingRateImage(in_struct->shadingRateImage),
+    shadingRateCoarseSampleOrder(in_struct->shadingRateCoarseSampleOrder)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPhysicalDeviceShadingRateImageFeaturesNV::safe_VkPhysicalDeviceShadingRateImageFeaturesNV() :
+    pNext(nullptr)
+{}
+
+safe_VkPhysicalDeviceShadingRateImageFeaturesNV::safe_VkPhysicalDeviceShadingRateImageFeaturesNV(const safe_VkPhysicalDeviceShadingRateImageFeaturesNV& src)
+{
+    sType = src.sType;
+    shadingRateImage = src.shadingRateImage;
+    shadingRateCoarseSampleOrder = src.shadingRateCoarseSampleOrder;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPhysicalDeviceShadingRateImageFeaturesNV& safe_VkPhysicalDeviceShadingRateImageFeaturesNV::operator=(const safe_VkPhysicalDeviceShadingRateImageFeaturesNV& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    shadingRateImage = src.shadingRateImage;
+    shadingRateCoarseSampleOrder = src.shadingRateCoarseSampleOrder;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPhysicalDeviceShadingRateImageFeaturesNV::~safe_VkPhysicalDeviceShadingRateImageFeaturesNV()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPhysicalDeviceShadingRateImageFeaturesNV::initialize(const VkPhysicalDeviceShadingRateImageFeaturesNV* in_struct)
+{
+    sType = in_struct->sType;
+    shadingRateImage = in_struct->shadingRateImage;
+    shadingRateCoarseSampleOrder = in_struct->shadingRateCoarseSampleOrder;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPhysicalDeviceShadingRateImageFeaturesNV::initialize(const safe_VkPhysicalDeviceShadingRateImageFeaturesNV* src)
+{
+    sType = src->sType;
+    shadingRateImage = src->shadingRateImage;
+    shadingRateCoarseSampleOrder = src->shadingRateCoarseSampleOrder;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkPhysicalDeviceShadingRateImagePropertiesNV::safe_VkPhysicalDeviceShadingRateImagePropertiesNV(const VkPhysicalDeviceShadingRateImagePropertiesNV* in_struct) :
+    sType(in_struct->sType),
+    shadingRateTexelSize(in_struct->shadingRateTexelSize),
+    shadingRatePaletteSize(in_struct->shadingRatePaletteSize),
+    shadingRateMaxCoarseSamples(in_struct->shadingRateMaxCoarseSamples)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPhysicalDeviceShadingRateImagePropertiesNV::safe_VkPhysicalDeviceShadingRateImagePropertiesNV() :
+    pNext(nullptr)
+{}
+
+safe_VkPhysicalDeviceShadingRateImagePropertiesNV::safe_VkPhysicalDeviceShadingRateImagePropertiesNV(const safe_VkPhysicalDeviceShadingRateImagePropertiesNV& src)
+{
+    sType = src.sType;
+    shadingRateTexelSize = src.shadingRateTexelSize;
+    shadingRatePaletteSize = src.shadingRatePaletteSize;
+    shadingRateMaxCoarseSamples = src.shadingRateMaxCoarseSamples;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPhysicalDeviceShadingRateImagePropertiesNV& safe_VkPhysicalDeviceShadingRateImagePropertiesNV::operator=(const safe_VkPhysicalDeviceShadingRateImagePropertiesNV& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    shadingRateTexelSize = src.shadingRateTexelSize;
+    shadingRatePaletteSize = src.shadingRatePaletteSize;
+    shadingRateMaxCoarseSamples = src.shadingRateMaxCoarseSamples;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPhysicalDeviceShadingRateImagePropertiesNV::~safe_VkPhysicalDeviceShadingRateImagePropertiesNV()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPhysicalDeviceShadingRateImagePropertiesNV::initialize(const VkPhysicalDeviceShadingRateImagePropertiesNV* in_struct)
+{
+    sType = in_struct->sType;
+    shadingRateTexelSize = in_struct->shadingRateTexelSize;
+    shadingRatePaletteSize = in_struct->shadingRatePaletteSize;
+    shadingRateMaxCoarseSamples = in_struct->shadingRateMaxCoarseSamples;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPhysicalDeviceShadingRateImagePropertiesNV::initialize(const safe_VkPhysicalDeviceShadingRateImagePropertiesNV* src)
+{
+    sType = src->sType;
+    shadingRateTexelSize = src->shadingRateTexelSize;
+    shadingRatePaletteSize = src->shadingRatePaletteSize;
+    shadingRateMaxCoarseSamples = src->shadingRateMaxCoarseSamples;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkCoarseSampleOrderCustomNV::safe_VkCoarseSampleOrderCustomNV(const VkCoarseSampleOrderCustomNV* in_struct) :
+    shadingRate(in_struct->shadingRate),
+    sampleCount(in_struct->sampleCount),
+    sampleLocationCount(in_struct->sampleLocationCount),
+    pSampleLocations(nullptr)
+{
+    if (in_struct->pSampleLocations) {
+        pSampleLocations = new VkCoarseSampleLocationNV[in_struct->sampleLocationCount];
+        memcpy ((void *)pSampleLocations, (void *)in_struct->pSampleLocations, sizeof(VkCoarseSampleLocationNV)*in_struct->sampleLocationCount);
+    }
+}
+
+safe_VkCoarseSampleOrderCustomNV::safe_VkCoarseSampleOrderCustomNV() :
+    pSampleLocations(nullptr)
+{}
+
+safe_VkCoarseSampleOrderCustomNV::safe_VkCoarseSampleOrderCustomNV(const safe_VkCoarseSampleOrderCustomNV& src)
+{
+    shadingRate = src.shadingRate;
+    sampleCount = src.sampleCount;
+    sampleLocationCount = src.sampleLocationCount;
+    pSampleLocations = nullptr;
+    if (src.pSampleLocations) {
+        pSampleLocations = new VkCoarseSampleLocationNV[src.sampleLocationCount];
+        memcpy ((void *)pSampleLocations, (void *)src.pSampleLocations, sizeof(VkCoarseSampleLocationNV)*src.sampleLocationCount);
+    }
+}
+
+safe_VkCoarseSampleOrderCustomNV& safe_VkCoarseSampleOrderCustomNV::operator=(const safe_VkCoarseSampleOrderCustomNV& src)
+{
+    if (&src == this) return *this;
+
+    if (pSampleLocations)
+        delete[] pSampleLocations;
+
+    shadingRate = src.shadingRate;
+    sampleCount = src.sampleCount;
+    sampleLocationCount = src.sampleLocationCount;
+    pSampleLocations = nullptr;
+    if (src.pSampleLocations) {
+        pSampleLocations = new VkCoarseSampleLocationNV[src.sampleLocationCount];
+        memcpy ((void *)pSampleLocations, (void *)src.pSampleLocations, sizeof(VkCoarseSampleLocationNV)*src.sampleLocationCount);
+    }
+
+    return *this;
+}
+
+safe_VkCoarseSampleOrderCustomNV::~safe_VkCoarseSampleOrderCustomNV()
+{
+    if (pSampleLocations)
+        delete[] pSampleLocations;
+}
+
+void safe_VkCoarseSampleOrderCustomNV::initialize(const VkCoarseSampleOrderCustomNV* in_struct)
+{
+    shadingRate = in_struct->shadingRate;
+    sampleCount = in_struct->sampleCount;
+    sampleLocationCount = in_struct->sampleLocationCount;
+    pSampleLocations = nullptr;
+    if (in_struct->pSampleLocations) {
+        pSampleLocations = new VkCoarseSampleLocationNV[in_struct->sampleLocationCount];
+        memcpy ((void *)pSampleLocations, (void *)in_struct->pSampleLocations, sizeof(VkCoarseSampleLocationNV)*in_struct->sampleLocationCount);
+    }
+}
+
+void safe_VkCoarseSampleOrderCustomNV::initialize(const safe_VkCoarseSampleOrderCustomNV* src)
+{
+    shadingRate = src->shadingRate;
+    sampleCount = src->sampleCount;
+    sampleLocationCount = src->sampleLocationCount;
+    pSampleLocations = nullptr;
+    if (src->pSampleLocations) {
+        pSampleLocations = new VkCoarseSampleLocationNV[src->sampleLocationCount];
+        memcpy ((void *)pSampleLocations, (void *)src->pSampleLocations, sizeof(VkCoarseSampleLocationNV)*src->sampleLocationCount);
+    }
+}
+
+safe_VkPipelineViewportCoarseSampleOrderStateCreateInfoNV::safe_VkPipelineViewportCoarseSampleOrderStateCreateInfoNV(const VkPipelineViewportCoarseSampleOrderStateCreateInfoNV* in_struct) :
+    sType(in_struct->sType),
+    sampleOrderType(in_struct->sampleOrderType),
+    customSampleOrderCount(in_struct->customSampleOrderCount),
+    pCustomSampleOrders(nullptr)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (customSampleOrderCount && in_struct->pCustomSampleOrders) {
+        pCustomSampleOrders = new safe_VkCoarseSampleOrderCustomNV[customSampleOrderCount];
+        for (uint32_t i = 0; i < customSampleOrderCount; ++i) {
+            pCustomSampleOrders[i].initialize(&in_struct->pCustomSampleOrders[i]);
+        }
+    }
+}
+
+safe_VkPipelineViewportCoarseSampleOrderStateCreateInfoNV::safe_VkPipelineViewportCoarseSampleOrderStateCreateInfoNV() :
+    pNext(nullptr),
+    pCustomSampleOrders(nullptr)
+{}
+
+safe_VkPipelineViewportCoarseSampleOrderStateCreateInfoNV::safe_VkPipelineViewportCoarseSampleOrderStateCreateInfoNV(const safe_VkPipelineViewportCoarseSampleOrderStateCreateInfoNV& src)
+{
+    sType = src.sType;
+    sampleOrderType = src.sampleOrderType;
+    customSampleOrderCount = src.customSampleOrderCount;
+    pCustomSampleOrders = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (customSampleOrderCount && src.pCustomSampleOrders) {
+        pCustomSampleOrders = new safe_VkCoarseSampleOrderCustomNV[customSampleOrderCount];
+        for (uint32_t i = 0; i < customSampleOrderCount; ++i) {
+            pCustomSampleOrders[i].initialize(&src.pCustomSampleOrders[i]);
+        }
+    }
+}
+
+safe_VkPipelineViewportCoarseSampleOrderStateCreateInfoNV& safe_VkPipelineViewportCoarseSampleOrderStateCreateInfoNV::operator=(const safe_VkPipelineViewportCoarseSampleOrderStateCreateInfoNV& src)
+{
+    if (&src == this) return *this;
+
+    if (pCustomSampleOrders)
+        delete[] pCustomSampleOrders;
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    sampleOrderType = src.sampleOrderType;
+    customSampleOrderCount = src.customSampleOrderCount;
+    pCustomSampleOrders = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (customSampleOrderCount && src.pCustomSampleOrders) {
+        pCustomSampleOrders = new safe_VkCoarseSampleOrderCustomNV[customSampleOrderCount];
+        for (uint32_t i = 0; i < customSampleOrderCount; ++i) {
+            pCustomSampleOrders[i].initialize(&src.pCustomSampleOrders[i]);
+        }
+    }
+
+    return *this;
+}
+
+safe_VkPipelineViewportCoarseSampleOrderStateCreateInfoNV::~safe_VkPipelineViewportCoarseSampleOrderStateCreateInfoNV()
+{
+    if (pCustomSampleOrders)
+        delete[] pCustomSampleOrders;
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPipelineViewportCoarseSampleOrderStateCreateInfoNV::initialize(const VkPipelineViewportCoarseSampleOrderStateCreateInfoNV* in_struct)
+{
+    sType = in_struct->sType;
+    sampleOrderType = in_struct->sampleOrderType;
+    customSampleOrderCount = in_struct->customSampleOrderCount;
+    pCustomSampleOrders = nullptr;
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (customSampleOrderCount && in_struct->pCustomSampleOrders) {
+        pCustomSampleOrders = new safe_VkCoarseSampleOrderCustomNV[customSampleOrderCount];
+        for (uint32_t i = 0; i < customSampleOrderCount; ++i) {
+            pCustomSampleOrders[i].initialize(&in_struct->pCustomSampleOrders[i]);
+        }
+    }
+}
+
+void safe_VkPipelineViewportCoarseSampleOrderStateCreateInfoNV::initialize(const safe_VkPipelineViewportCoarseSampleOrderStateCreateInfoNV* src)
+{
+    sType = src->sType;
+    sampleOrderType = src->sampleOrderType;
+    customSampleOrderCount = src->customSampleOrderCount;
+    pCustomSampleOrders = nullptr;
+    pNext = SafePnextCopy(src->pNext);
+    if (customSampleOrderCount && src->pCustomSampleOrders) {
+        pCustomSampleOrders = new safe_VkCoarseSampleOrderCustomNV[customSampleOrderCount];
+        for (uint32_t i = 0; i < customSampleOrderCount; ++i) {
+            pCustomSampleOrders[i].initialize(&src->pCustomSampleOrders[i]);
+        }
+    }
+}
+
+safe_VkRayTracingShaderGroupCreateInfoNV::safe_VkRayTracingShaderGroupCreateInfoNV(const VkRayTracingShaderGroupCreateInfoNV* in_struct) :
+    sType(in_struct->sType),
+    type(in_struct->type),
+    generalShader(in_struct->generalShader),
+    closestHitShader(in_struct->closestHitShader),
+    anyHitShader(in_struct->anyHitShader),
+    intersectionShader(in_struct->intersectionShader)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkRayTracingShaderGroupCreateInfoNV::safe_VkRayTracingShaderGroupCreateInfoNV() :
+    pNext(nullptr)
+{}
+
+safe_VkRayTracingShaderGroupCreateInfoNV::safe_VkRayTracingShaderGroupCreateInfoNV(const safe_VkRayTracingShaderGroupCreateInfoNV& src)
+{
+    sType = src.sType;
+    type = src.type;
+    generalShader = src.generalShader;
+    closestHitShader = src.closestHitShader;
+    anyHitShader = src.anyHitShader;
+    intersectionShader = src.intersectionShader;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkRayTracingShaderGroupCreateInfoNV& safe_VkRayTracingShaderGroupCreateInfoNV::operator=(const safe_VkRayTracingShaderGroupCreateInfoNV& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    type = src.type;
+    generalShader = src.generalShader;
+    closestHitShader = src.closestHitShader;
+    anyHitShader = src.anyHitShader;
+    intersectionShader = src.intersectionShader;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkRayTracingShaderGroupCreateInfoNV::~safe_VkRayTracingShaderGroupCreateInfoNV()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkRayTracingShaderGroupCreateInfoNV::initialize(const VkRayTracingShaderGroupCreateInfoNV* in_struct)
+{
+    sType = in_struct->sType;
+    type = in_struct->type;
+    generalShader = in_struct->generalShader;
+    closestHitShader = in_struct->closestHitShader;
+    anyHitShader = in_struct->anyHitShader;
+    intersectionShader = in_struct->intersectionShader;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkRayTracingShaderGroupCreateInfoNV::initialize(const safe_VkRayTracingShaderGroupCreateInfoNV* src)
+{
+    sType = src->sType;
+    type = src->type;
+    generalShader = src->generalShader;
+    closestHitShader = src->closestHitShader;
+    anyHitShader = src->anyHitShader;
+    intersectionShader = src->intersectionShader;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkRayTracingPipelineCreateInfoNV::safe_VkRayTracingPipelineCreateInfoNV(const VkRayTracingPipelineCreateInfoNV* in_struct) :
+    sType(in_struct->sType),
+    flags(in_struct->flags),
+    stageCount(in_struct->stageCount),
+    pStages(nullptr),
+    groupCount(in_struct->groupCount),
+    pGroups(nullptr),
+    maxRecursionDepth(in_struct->maxRecursionDepth),
+    layout(in_struct->layout),
+    basePipelineHandle(in_struct->basePipelineHandle),
+    basePipelineIndex(in_struct->basePipelineIndex)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (stageCount && in_struct->pStages) {
+        pStages = new safe_VkPipelineShaderStageCreateInfo[stageCount];
+        for (uint32_t i = 0; i < stageCount; ++i) {
+            pStages[i].initialize(&in_struct->pStages[i]);
+        }
+    }
+    if (groupCount && in_struct->pGroups) {
+        pGroups = new safe_VkRayTracingShaderGroupCreateInfoNV[groupCount];
+        for (uint32_t i = 0; i < groupCount; ++i) {
+            pGroups[i].initialize(&in_struct->pGroups[i]);
+        }
+    }
+}
+
+safe_VkRayTracingPipelineCreateInfoNV::safe_VkRayTracingPipelineCreateInfoNV() :
+    pNext(nullptr),
+    pStages(nullptr),
+    pGroups(nullptr)
+{}
+
+safe_VkRayTracingPipelineCreateInfoNV::safe_VkRayTracingPipelineCreateInfoNV(const safe_VkRayTracingPipelineCreateInfoNV& src)
+{
+    sType = src.sType;
+    flags = src.flags;
+    stageCount = src.stageCount;
+    pStages = nullptr;
+    groupCount = src.groupCount;
+    pGroups = nullptr;
+    maxRecursionDepth = src.maxRecursionDepth;
+    layout = src.layout;
+    basePipelineHandle = src.basePipelineHandle;
+    basePipelineIndex = src.basePipelineIndex;
+    pNext = SafePnextCopy(src.pNext);
+    if (stageCount && src.pStages) {
+        pStages = new safe_VkPipelineShaderStageCreateInfo[stageCount];
+        for (uint32_t i = 0; i < stageCount; ++i) {
+            pStages[i].initialize(&src.pStages[i]);
+        }
+    }
+    if (groupCount && src.pGroups) {
+        pGroups = new safe_VkRayTracingShaderGroupCreateInfoNV[groupCount];
+        for (uint32_t i = 0; i < groupCount; ++i) {
+            pGroups[i].initialize(&src.pGroups[i]);
+        }
+    }
+}
+
+safe_VkRayTracingPipelineCreateInfoNV& safe_VkRayTracingPipelineCreateInfoNV::operator=(const safe_VkRayTracingPipelineCreateInfoNV& src)
+{
+    if (&src == this) return *this;
+
+    if (pStages)
+        delete[] pStages;
+    if (pGroups)
+        delete[] pGroups;
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    flags = src.flags;
+    stageCount = src.stageCount;
+    pStages = nullptr;
+    groupCount = src.groupCount;
+    pGroups = nullptr;
+    maxRecursionDepth = src.maxRecursionDepth;
+    layout = src.layout;
+    basePipelineHandle = src.basePipelineHandle;
+    basePipelineIndex = src.basePipelineIndex;
+    pNext = SafePnextCopy(src.pNext);
+    if (stageCount && src.pStages) {
+        pStages = new safe_VkPipelineShaderStageCreateInfo[stageCount];
+        for (uint32_t i = 0; i < stageCount; ++i) {
+            pStages[i].initialize(&src.pStages[i]);
+        }
+    }
+    if (groupCount && src.pGroups) {
+        pGroups = new safe_VkRayTracingShaderGroupCreateInfoNV[groupCount];
+        for (uint32_t i = 0; i < groupCount; ++i) {
+            pGroups[i].initialize(&src.pGroups[i]);
+        }
+    }
+
+    return *this;
+}
+
+safe_VkRayTracingPipelineCreateInfoNV::~safe_VkRayTracingPipelineCreateInfoNV()
+{
+    if (pStages)
+        delete[] pStages;
+    if (pGroups)
+        delete[] pGroups;
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkRayTracingPipelineCreateInfoNV::initialize(const VkRayTracingPipelineCreateInfoNV* in_struct)
+{
+    sType = in_struct->sType;
+    flags = in_struct->flags;
+    stageCount = in_struct->stageCount;
+    pStages = nullptr;
+    groupCount = in_struct->groupCount;
+    pGroups = nullptr;
+    maxRecursionDepth = in_struct->maxRecursionDepth;
+    layout = in_struct->layout;
+    basePipelineHandle = in_struct->basePipelineHandle;
+    basePipelineIndex = in_struct->basePipelineIndex;
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (stageCount && in_struct->pStages) {
+        pStages = new safe_VkPipelineShaderStageCreateInfo[stageCount];
+        for (uint32_t i = 0; i < stageCount; ++i) {
+            pStages[i].initialize(&in_struct->pStages[i]);
+        }
+    }
+    if (groupCount && in_struct->pGroups) {
+        pGroups = new safe_VkRayTracingShaderGroupCreateInfoNV[groupCount];
+        for (uint32_t i = 0; i < groupCount; ++i) {
+            pGroups[i].initialize(&in_struct->pGroups[i]);
+        }
+    }
+}
+
+void safe_VkRayTracingPipelineCreateInfoNV::initialize(const safe_VkRayTracingPipelineCreateInfoNV* src)
+{
+    sType = src->sType;
+    flags = src->flags;
+    stageCount = src->stageCount;
+    pStages = nullptr;
+    groupCount = src->groupCount;
+    pGroups = nullptr;
+    maxRecursionDepth = src->maxRecursionDepth;
+    layout = src->layout;
+    basePipelineHandle = src->basePipelineHandle;
+    basePipelineIndex = src->basePipelineIndex;
+    pNext = SafePnextCopy(src->pNext);
+    if (stageCount && src->pStages) {
+        pStages = new safe_VkPipelineShaderStageCreateInfo[stageCount];
+        for (uint32_t i = 0; i < stageCount; ++i) {
+            pStages[i].initialize(&src->pStages[i]);
+        }
+    }
+    if (groupCount && src->pGroups) {
+        pGroups = new safe_VkRayTracingShaderGroupCreateInfoNV[groupCount];
+        for (uint32_t i = 0; i < groupCount; ++i) {
+            pGroups[i].initialize(&src->pGroups[i]);
+        }
+    }
+}
+
+safe_VkGeometryTrianglesNV::safe_VkGeometryTrianglesNV(const VkGeometryTrianglesNV* in_struct) :
+    sType(in_struct->sType),
+    vertexData(in_struct->vertexData),
+    vertexOffset(in_struct->vertexOffset),
+    vertexCount(in_struct->vertexCount),
+    vertexStride(in_struct->vertexStride),
+    vertexFormat(in_struct->vertexFormat),
+    indexData(in_struct->indexData),
+    indexOffset(in_struct->indexOffset),
+    indexCount(in_struct->indexCount),
+    indexType(in_struct->indexType),
+    transformData(in_struct->transformData),
+    transformOffset(in_struct->transformOffset)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkGeometryTrianglesNV::safe_VkGeometryTrianglesNV() :
+    pNext(nullptr)
+{}
+
+safe_VkGeometryTrianglesNV::safe_VkGeometryTrianglesNV(const safe_VkGeometryTrianglesNV& src)
+{
+    sType = src.sType;
+    vertexData = src.vertexData;
+    vertexOffset = src.vertexOffset;
+    vertexCount = src.vertexCount;
+    vertexStride = src.vertexStride;
+    vertexFormat = src.vertexFormat;
+    indexData = src.indexData;
+    indexOffset = src.indexOffset;
+    indexCount = src.indexCount;
+    indexType = src.indexType;
+    transformData = src.transformData;
+    transformOffset = src.transformOffset;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkGeometryTrianglesNV& safe_VkGeometryTrianglesNV::operator=(const safe_VkGeometryTrianglesNV& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    vertexData = src.vertexData;
+    vertexOffset = src.vertexOffset;
+    vertexCount = src.vertexCount;
+    vertexStride = src.vertexStride;
+    vertexFormat = src.vertexFormat;
+    indexData = src.indexData;
+    indexOffset = src.indexOffset;
+    indexCount = src.indexCount;
+    indexType = src.indexType;
+    transformData = src.transformData;
+    transformOffset = src.transformOffset;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkGeometryTrianglesNV::~safe_VkGeometryTrianglesNV()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkGeometryTrianglesNV::initialize(const VkGeometryTrianglesNV* in_struct)
+{
+    sType = in_struct->sType;
+    vertexData = in_struct->vertexData;
+    vertexOffset = in_struct->vertexOffset;
+    vertexCount = in_struct->vertexCount;
+    vertexStride = in_struct->vertexStride;
+    vertexFormat = in_struct->vertexFormat;
+    indexData = in_struct->indexData;
+    indexOffset = in_struct->indexOffset;
+    indexCount = in_struct->indexCount;
+    indexType = in_struct->indexType;
+    transformData = in_struct->transformData;
+    transformOffset = in_struct->transformOffset;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkGeometryTrianglesNV::initialize(const safe_VkGeometryTrianglesNV* src)
+{
+    sType = src->sType;
+    vertexData = src->vertexData;
+    vertexOffset = src->vertexOffset;
+    vertexCount = src->vertexCount;
+    vertexStride = src->vertexStride;
+    vertexFormat = src->vertexFormat;
+    indexData = src->indexData;
+    indexOffset = src->indexOffset;
+    indexCount = src->indexCount;
+    indexType = src->indexType;
+    transformData = src->transformData;
+    transformOffset = src->transformOffset;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkGeometryAABBNV::safe_VkGeometryAABBNV(const VkGeometryAABBNV* in_struct) :
+    sType(in_struct->sType),
+    aabbData(in_struct->aabbData),
+    numAABBs(in_struct->numAABBs),
+    stride(in_struct->stride),
+    offset(in_struct->offset)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkGeometryAABBNV::safe_VkGeometryAABBNV() :
+    pNext(nullptr)
+{}
+
+safe_VkGeometryAABBNV::safe_VkGeometryAABBNV(const safe_VkGeometryAABBNV& src)
+{
+    sType = src.sType;
+    aabbData = src.aabbData;
+    numAABBs = src.numAABBs;
+    stride = src.stride;
+    offset = src.offset;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkGeometryAABBNV& safe_VkGeometryAABBNV::operator=(const safe_VkGeometryAABBNV& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    aabbData = src.aabbData;
+    numAABBs = src.numAABBs;
+    stride = src.stride;
+    offset = src.offset;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkGeometryAABBNV::~safe_VkGeometryAABBNV()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkGeometryAABBNV::initialize(const VkGeometryAABBNV* in_struct)
+{
+    sType = in_struct->sType;
+    aabbData = in_struct->aabbData;
+    numAABBs = in_struct->numAABBs;
+    stride = in_struct->stride;
+    offset = in_struct->offset;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkGeometryAABBNV::initialize(const safe_VkGeometryAABBNV* src)
+{
+    sType = src->sType;
+    aabbData = src->aabbData;
+    numAABBs = src->numAABBs;
+    stride = src->stride;
+    offset = src->offset;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkGeometryNV::safe_VkGeometryNV(const VkGeometryNV* in_struct) :
+    sType(in_struct->sType),
+    geometryType(in_struct->geometryType),
+    geometry(in_struct->geometry),
+    flags(in_struct->flags)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkGeometryNV::safe_VkGeometryNV() :
+    pNext(nullptr)
+{}
+
+safe_VkGeometryNV::safe_VkGeometryNV(const safe_VkGeometryNV& src)
+{
+    sType = src.sType;
+    geometryType = src.geometryType;
+    geometry = src.geometry;
+    flags = src.flags;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkGeometryNV& safe_VkGeometryNV::operator=(const safe_VkGeometryNV& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    geometryType = src.geometryType;
+    geometry = src.geometry;
+    flags = src.flags;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkGeometryNV::~safe_VkGeometryNV()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkGeometryNV::initialize(const VkGeometryNV* in_struct)
+{
+    sType = in_struct->sType;
+    geometryType = in_struct->geometryType;
+    geometry = in_struct->geometry;
+    flags = in_struct->flags;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkGeometryNV::initialize(const safe_VkGeometryNV* src)
+{
+    sType = src->sType;
+    geometryType = src->geometryType;
+    geometry = src->geometry;
+    flags = src->flags;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkAccelerationStructureInfoNV::safe_VkAccelerationStructureInfoNV(const VkAccelerationStructureInfoNV* in_struct) :
+    sType(in_struct->sType),
+    type(in_struct->type),
+    flags(in_struct->flags),
+    instanceCount(in_struct->instanceCount),
+    geometryCount(in_struct->geometryCount),
+    pGeometries(nullptr)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (geometryCount && in_struct->pGeometries) {
+        pGeometries = new safe_VkGeometryNV[geometryCount];
+        for (uint32_t i = 0; i < geometryCount; ++i) {
+            pGeometries[i].initialize(&in_struct->pGeometries[i]);
+        }
+    }
+}
+
+safe_VkAccelerationStructureInfoNV::safe_VkAccelerationStructureInfoNV() :
+    pNext(nullptr),
+    pGeometries(nullptr)
+{}
+
+safe_VkAccelerationStructureInfoNV::safe_VkAccelerationStructureInfoNV(const safe_VkAccelerationStructureInfoNV& src)
+{
+    sType = src.sType;
+    type = src.type;
+    flags = src.flags;
+    instanceCount = src.instanceCount;
+    geometryCount = src.geometryCount;
+    pGeometries = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (geometryCount && src.pGeometries) {
+        pGeometries = new safe_VkGeometryNV[geometryCount];
+        for (uint32_t i = 0; i < geometryCount; ++i) {
+            pGeometries[i].initialize(&src.pGeometries[i]);
+        }
+    }
+}
+
+safe_VkAccelerationStructureInfoNV& safe_VkAccelerationStructureInfoNV::operator=(const safe_VkAccelerationStructureInfoNV& src)
+{
+    if (&src == this) return *this;
+
+    if (pGeometries)
+        delete[] pGeometries;
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    type = src.type;
+    flags = src.flags;
+    instanceCount = src.instanceCount;
+    geometryCount = src.geometryCount;
+    pGeometries = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (geometryCount && src.pGeometries) {
+        pGeometries = new safe_VkGeometryNV[geometryCount];
+        for (uint32_t i = 0; i < geometryCount; ++i) {
+            pGeometries[i].initialize(&src.pGeometries[i]);
+        }
+    }
+
+    return *this;
+}
+
+safe_VkAccelerationStructureInfoNV::~safe_VkAccelerationStructureInfoNV()
+{
+    if (pGeometries)
+        delete[] pGeometries;
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkAccelerationStructureInfoNV::initialize(const VkAccelerationStructureInfoNV* in_struct)
+{
+    sType = in_struct->sType;
+    type = in_struct->type;
+    flags = in_struct->flags;
+    instanceCount = in_struct->instanceCount;
+    geometryCount = in_struct->geometryCount;
+    pGeometries = nullptr;
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (geometryCount && in_struct->pGeometries) {
+        pGeometries = new safe_VkGeometryNV[geometryCount];
+        for (uint32_t i = 0; i < geometryCount; ++i) {
+            pGeometries[i].initialize(&in_struct->pGeometries[i]);
+        }
+    }
+}
+
+void safe_VkAccelerationStructureInfoNV::initialize(const safe_VkAccelerationStructureInfoNV* src)
+{
+    sType = src->sType;
+    type = src->type;
+    flags = src->flags;
+    instanceCount = src->instanceCount;
+    geometryCount = src->geometryCount;
+    pGeometries = nullptr;
+    pNext = SafePnextCopy(src->pNext);
+    if (geometryCount && src->pGeometries) {
+        pGeometries = new safe_VkGeometryNV[geometryCount];
+        for (uint32_t i = 0; i < geometryCount; ++i) {
+            pGeometries[i].initialize(&src->pGeometries[i]);
+        }
+    }
+}
+
+safe_VkAccelerationStructureCreateInfoNV::safe_VkAccelerationStructureCreateInfoNV(const VkAccelerationStructureCreateInfoNV* in_struct) :
+    sType(in_struct->sType),
+    compactedSize(in_struct->compactedSize),
+    info(&in_struct->info)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkAccelerationStructureCreateInfoNV::safe_VkAccelerationStructureCreateInfoNV() :
+    pNext(nullptr)
+{}
+
+safe_VkAccelerationStructureCreateInfoNV::safe_VkAccelerationStructureCreateInfoNV(const safe_VkAccelerationStructureCreateInfoNV& src)
+{
+    sType = src.sType;
+    compactedSize = src.compactedSize;
+    info.initialize(&src.info);
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkAccelerationStructureCreateInfoNV& safe_VkAccelerationStructureCreateInfoNV::operator=(const safe_VkAccelerationStructureCreateInfoNV& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    compactedSize = src.compactedSize;
+    info.initialize(&src.info);
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkAccelerationStructureCreateInfoNV::~safe_VkAccelerationStructureCreateInfoNV()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkAccelerationStructureCreateInfoNV::initialize(const VkAccelerationStructureCreateInfoNV* in_struct)
+{
+    sType = in_struct->sType;
+    compactedSize = in_struct->compactedSize;
+    info.initialize(&in_struct->info);
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkAccelerationStructureCreateInfoNV::initialize(const safe_VkAccelerationStructureCreateInfoNV* src)
+{
+    sType = src->sType;
+    compactedSize = src->compactedSize;
+    info.initialize(&src->info);
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkBindAccelerationStructureMemoryInfoNV::safe_VkBindAccelerationStructureMemoryInfoNV(const VkBindAccelerationStructureMemoryInfoNV* in_struct) :
+    sType(in_struct->sType),
+    accelerationStructure(in_struct->accelerationStructure),
+    memory(in_struct->memory),
+    memoryOffset(in_struct->memoryOffset),
+    deviceIndexCount(in_struct->deviceIndexCount),
+    pDeviceIndices(nullptr)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (in_struct->pDeviceIndices) {
+        pDeviceIndices = new uint32_t[in_struct->deviceIndexCount];
+        memcpy ((void *)pDeviceIndices, (void *)in_struct->pDeviceIndices, sizeof(uint32_t)*in_struct->deviceIndexCount);
+    }
+}
+
+safe_VkBindAccelerationStructureMemoryInfoNV::safe_VkBindAccelerationStructureMemoryInfoNV() :
+    pNext(nullptr),
+    pDeviceIndices(nullptr)
+{}
+
+safe_VkBindAccelerationStructureMemoryInfoNV::safe_VkBindAccelerationStructureMemoryInfoNV(const safe_VkBindAccelerationStructureMemoryInfoNV& src)
+{
+    sType = src.sType;
+    accelerationStructure = src.accelerationStructure;
+    memory = src.memory;
+    memoryOffset = src.memoryOffset;
+    deviceIndexCount = src.deviceIndexCount;
+    pDeviceIndices = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (src.pDeviceIndices) {
+        pDeviceIndices = new uint32_t[src.deviceIndexCount];
+        memcpy ((void *)pDeviceIndices, (void *)src.pDeviceIndices, sizeof(uint32_t)*src.deviceIndexCount);
+    }
+}
+
+safe_VkBindAccelerationStructureMemoryInfoNV& safe_VkBindAccelerationStructureMemoryInfoNV::operator=(const safe_VkBindAccelerationStructureMemoryInfoNV& src)
+{
+    if (&src == this) return *this;
+
+    if (pDeviceIndices)
+        delete[] pDeviceIndices;
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    accelerationStructure = src.accelerationStructure;
+    memory = src.memory;
+    memoryOffset = src.memoryOffset;
+    deviceIndexCount = src.deviceIndexCount;
+    pDeviceIndices = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (src.pDeviceIndices) {
+        pDeviceIndices = new uint32_t[src.deviceIndexCount];
+        memcpy ((void *)pDeviceIndices, (void *)src.pDeviceIndices, sizeof(uint32_t)*src.deviceIndexCount);
+    }
+
+    return *this;
+}
+
+safe_VkBindAccelerationStructureMemoryInfoNV::~safe_VkBindAccelerationStructureMemoryInfoNV()
+{
+    if (pDeviceIndices)
+        delete[] pDeviceIndices;
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkBindAccelerationStructureMemoryInfoNV::initialize(const VkBindAccelerationStructureMemoryInfoNV* in_struct)
+{
+    sType = in_struct->sType;
+    accelerationStructure = in_struct->accelerationStructure;
+    memory = in_struct->memory;
+    memoryOffset = in_struct->memoryOffset;
+    deviceIndexCount = in_struct->deviceIndexCount;
+    pDeviceIndices = nullptr;
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (in_struct->pDeviceIndices) {
+        pDeviceIndices = new uint32_t[in_struct->deviceIndexCount];
+        memcpy ((void *)pDeviceIndices, (void *)in_struct->pDeviceIndices, sizeof(uint32_t)*in_struct->deviceIndexCount);
+    }
+}
+
+void safe_VkBindAccelerationStructureMemoryInfoNV::initialize(const safe_VkBindAccelerationStructureMemoryInfoNV* src)
+{
+    sType = src->sType;
+    accelerationStructure = src->accelerationStructure;
+    memory = src->memory;
+    memoryOffset = src->memoryOffset;
+    deviceIndexCount = src->deviceIndexCount;
+    pDeviceIndices = nullptr;
+    pNext = SafePnextCopy(src->pNext);
+    if (src->pDeviceIndices) {
+        pDeviceIndices = new uint32_t[src->deviceIndexCount];
+        memcpy ((void *)pDeviceIndices, (void *)src->pDeviceIndices, sizeof(uint32_t)*src->deviceIndexCount);
+    }
+}
+
+safe_VkWriteDescriptorSetAccelerationStructureNV::safe_VkWriteDescriptorSetAccelerationStructureNV(const VkWriteDescriptorSetAccelerationStructureNV* in_struct) :
+    sType(in_struct->sType),
+    accelerationStructureCount(in_struct->accelerationStructureCount),
+    pAccelerationStructures(nullptr)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (accelerationStructureCount && in_struct->pAccelerationStructures) {
+        pAccelerationStructures = new VkAccelerationStructureNV[accelerationStructureCount];
+        for (uint32_t i = 0; i < accelerationStructureCount; ++i) {
+            pAccelerationStructures[i] = in_struct->pAccelerationStructures[i];
+        }
+    }
+}
+
+safe_VkWriteDescriptorSetAccelerationStructureNV::safe_VkWriteDescriptorSetAccelerationStructureNV() :
+    pNext(nullptr),
+    pAccelerationStructures(nullptr)
+{}
+
+safe_VkWriteDescriptorSetAccelerationStructureNV::safe_VkWriteDescriptorSetAccelerationStructureNV(const safe_VkWriteDescriptorSetAccelerationStructureNV& src)
+{
+    sType = src.sType;
+    accelerationStructureCount = src.accelerationStructureCount;
+    pAccelerationStructures = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (accelerationStructureCount && src.pAccelerationStructures) {
+        pAccelerationStructures = new VkAccelerationStructureNV[accelerationStructureCount];
+        for (uint32_t i = 0; i < accelerationStructureCount; ++i) {
+            pAccelerationStructures[i] = src.pAccelerationStructures[i];
+        }
+    }
+}
+
+safe_VkWriteDescriptorSetAccelerationStructureNV& safe_VkWriteDescriptorSetAccelerationStructureNV::operator=(const safe_VkWriteDescriptorSetAccelerationStructureNV& src)
+{
+    if (&src == this) return *this;
+
+    if (pAccelerationStructures)
+        delete[] pAccelerationStructures;
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    accelerationStructureCount = src.accelerationStructureCount;
+    pAccelerationStructures = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (accelerationStructureCount && src.pAccelerationStructures) {
+        pAccelerationStructures = new VkAccelerationStructureNV[accelerationStructureCount];
+        for (uint32_t i = 0; i < accelerationStructureCount; ++i) {
+            pAccelerationStructures[i] = src.pAccelerationStructures[i];
+        }
+    }
+
+    return *this;
+}
+
+safe_VkWriteDescriptorSetAccelerationStructureNV::~safe_VkWriteDescriptorSetAccelerationStructureNV()
+{
+    if (pAccelerationStructures)
+        delete[] pAccelerationStructures;
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkWriteDescriptorSetAccelerationStructureNV::initialize(const VkWriteDescriptorSetAccelerationStructureNV* in_struct)
+{
+    sType = in_struct->sType;
+    accelerationStructureCount = in_struct->accelerationStructureCount;
+    pAccelerationStructures = nullptr;
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (accelerationStructureCount && in_struct->pAccelerationStructures) {
+        pAccelerationStructures = new VkAccelerationStructureNV[accelerationStructureCount];
+        for (uint32_t i = 0; i < accelerationStructureCount; ++i) {
+            pAccelerationStructures[i] = in_struct->pAccelerationStructures[i];
+        }
+    }
+}
+
+void safe_VkWriteDescriptorSetAccelerationStructureNV::initialize(const safe_VkWriteDescriptorSetAccelerationStructureNV* src)
+{
+    sType = src->sType;
+    accelerationStructureCount = src->accelerationStructureCount;
+    pAccelerationStructures = nullptr;
+    pNext = SafePnextCopy(src->pNext);
+    if (accelerationStructureCount && src->pAccelerationStructures) {
+        pAccelerationStructures = new VkAccelerationStructureNV[accelerationStructureCount];
+        for (uint32_t i = 0; i < accelerationStructureCount; ++i) {
+            pAccelerationStructures[i] = src->pAccelerationStructures[i];
+        }
+    }
+}
+
+safe_VkAccelerationStructureMemoryRequirementsInfoNV::safe_VkAccelerationStructureMemoryRequirementsInfoNV(const VkAccelerationStructureMemoryRequirementsInfoNV* in_struct) :
+    sType(in_struct->sType),
+    type(in_struct->type),
+    accelerationStructure(in_struct->accelerationStructure)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkAccelerationStructureMemoryRequirementsInfoNV::safe_VkAccelerationStructureMemoryRequirementsInfoNV() :
+    pNext(nullptr)
+{}
+
+safe_VkAccelerationStructureMemoryRequirementsInfoNV::safe_VkAccelerationStructureMemoryRequirementsInfoNV(const safe_VkAccelerationStructureMemoryRequirementsInfoNV& src)
+{
+    sType = src.sType;
+    type = src.type;
+    accelerationStructure = src.accelerationStructure;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkAccelerationStructureMemoryRequirementsInfoNV& safe_VkAccelerationStructureMemoryRequirementsInfoNV::operator=(const safe_VkAccelerationStructureMemoryRequirementsInfoNV& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    type = src.type;
+    accelerationStructure = src.accelerationStructure;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkAccelerationStructureMemoryRequirementsInfoNV::~safe_VkAccelerationStructureMemoryRequirementsInfoNV()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkAccelerationStructureMemoryRequirementsInfoNV::initialize(const VkAccelerationStructureMemoryRequirementsInfoNV* in_struct)
+{
+    sType = in_struct->sType;
+    type = in_struct->type;
+    accelerationStructure = in_struct->accelerationStructure;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkAccelerationStructureMemoryRequirementsInfoNV::initialize(const safe_VkAccelerationStructureMemoryRequirementsInfoNV* src)
+{
+    sType = src->sType;
+    type = src->type;
+    accelerationStructure = src->accelerationStructure;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkPhysicalDeviceRayTracingPropertiesNV::safe_VkPhysicalDeviceRayTracingPropertiesNV(const VkPhysicalDeviceRayTracingPropertiesNV* in_struct) :
+    sType(in_struct->sType),
+    shaderGroupHandleSize(in_struct->shaderGroupHandleSize),
+    maxRecursionDepth(in_struct->maxRecursionDepth),
+    maxShaderGroupStride(in_struct->maxShaderGroupStride),
+    shaderGroupBaseAlignment(in_struct->shaderGroupBaseAlignment),
+    maxGeometryCount(in_struct->maxGeometryCount),
+    maxInstanceCount(in_struct->maxInstanceCount),
+    maxTriangleCount(in_struct->maxTriangleCount),
+    maxDescriptorSetAccelerationStructures(in_struct->maxDescriptorSetAccelerationStructures)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPhysicalDeviceRayTracingPropertiesNV::safe_VkPhysicalDeviceRayTracingPropertiesNV() :
+    pNext(nullptr)
+{}
+
+safe_VkPhysicalDeviceRayTracingPropertiesNV::safe_VkPhysicalDeviceRayTracingPropertiesNV(const safe_VkPhysicalDeviceRayTracingPropertiesNV& src)
+{
+    sType = src.sType;
+    shaderGroupHandleSize = src.shaderGroupHandleSize;
+    maxRecursionDepth = src.maxRecursionDepth;
+    maxShaderGroupStride = src.maxShaderGroupStride;
+    shaderGroupBaseAlignment = src.shaderGroupBaseAlignment;
+    maxGeometryCount = src.maxGeometryCount;
+    maxInstanceCount = src.maxInstanceCount;
+    maxTriangleCount = src.maxTriangleCount;
+    maxDescriptorSetAccelerationStructures = src.maxDescriptorSetAccelerationStructures;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPhysicalDeviceRayTracingPropertiesNV& safe_VkPhysicalDeviceRayTracingPropertiesNV::operator=(const safe_VkPhysicalDeviceRayTracingPropertiesNV& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    shaderGroupHandleSize = src.shaderGroupHandleSize;
+    maxRecursionDepth = src.maxRecursionDepth;
+    maxShaderGroupStride = src.maxShaderGroupStride;
+    shaderGroupBaseAlignment = src.shaderGroupBaseAlignment;
+    maxGeometryCount = src.maxGeometryCount;
+    maxInstanceCount = src.maxInstanceCount;
+    maxTriangleCount = src.maxTriangleCount;
+    maxDescriptorSetAccelerationStructures = src.maxDescriptorSetAccelerationStructures;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPhysicalDeviceRayTracingPropertiesNV::~safe_VkPhysicalDeviceRayTracingPropertiesNV()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPhysicalDeviceRayTracingPropertiesNV::initialize(const VkPhysicalDeviceRayTracingPropertiesNV* in_struct)
+{
+    sType = in_struct->sType;
+    shaderGroupHandleSize = in_struct->shaderGroupHandleSize;
+    maxRecursionDepth = in_struct->maxRecursionDepth;
+    maxShaderGroupStride = in_struct->maxShaderGroupStride;
+    shaderGroupBaseAlignment = in_struct->shaderGroupBaseAlignment;
+    maxGeometryCount = in_struct->maxGeometryCount;
+    maxInstanceCount = in_struct->maxInstanceCount;
+    maxTriangleCount = in_struct->maxTriangleCount;
+    maxDescriptorSetAccelerationStructures = in_struct->maxDescriptorSetAccelerationStructures;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPhysicalDeviceRayTracingPropertiesNV::initialize(const safe_VkPhysicalDeviceRayTracingPropertiesNV* src)
+{
+    sType = src->sType;
+    shaderGroupHandleSize = src->shaderGroupHandleSize;
+    maxRecursionDepth = src->maxRecursionDepth;
+    maxShaderGroupStride = src->maxShaderGroupStride;
+    shaderGroupBaseAlignment = src->shaderGroupBaseAlignment;
+    maxGeometryCount = src->maxGeometryCount;
+    maxInstanceCount = src->maxInstanceCount;
+    maxTriangleCount = src->maxTriangleCount;
+    maxDescriptorSetAccelerationStructures = src->maxDescriptorSetAccelerationStructures;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV::safe_VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV(const VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV* in_struct) :
+    sType(in_struct->sType),
+    representativeFragmentTest(in_struct->representativeFragmentTest)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV::safe_VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV() :
+    pNext(nullptr)
+{}
+
+safe_VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV::safe_VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV(const safe_VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV& src)
+{
+    sType = src.sType;
+    representativeFragmentTest = src.representativeFragmentTest;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV& safe_VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV::operator=(const safe_VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    representativeFragmentTest = src.representativeFragmentTest;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV::~safe_VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV::initialize(const VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV* in_struct)
+{
+    sType = in_struct->sType;
+    representativeFragmentTest = in_struct->representativeFragmentTest;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV::initialize(const safe_VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV* src)
+{
+    sType = src->sType;
+    representativeFragmentTest = src->representativeFragmentTest;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkPipelineRepresentativeFragmentTestStateCreateInfoNV::safe_VkPipelineRepresentativeFragmentTestStateCreateInfoNV(const VkPipelineRepresentativeFragmentTestStateCreateInfoNV* in_struct) :
+    sType(in_struct->sType),
+    representativeFragmentTestEnable(in_struct->representativeFragmentTestEnable)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPipelineRepresentativeFragmentTestStateCreateInfoNV::safe_VkPipelineRepresentativeFragmentTestStateCreateInfoNV() :
+    pNext(nullptr)
+{}
+
+safe_VkPipelineRepresentativeFragmentTestStateCreateInfoNV::safe_VkPipelineRepresentativeFragmentTestStateCreateInfoNV(const safe_VkPipelineRepresentativeFragmentTestStateCreateInfoNV& src)
+{
+    sType = src.sType;
+    representativeFragmentTestEnable = src.representativeFragmentTestEnable;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPipelineRepresentativeFragmentTestStateCreateInfoNV& safe_VkPipelineRepresentativeFragmentTestStateCreateInfoNV::operator=(const safe_VkPipelineRepresentativeFragmentTestStateCreateInfoNV& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    representativeFragmentTestEnable = src.representativeFragmentTestEnable;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPipelineRepresentativeFragmentTestStateCreateInfoNV::~safe_VkPipelineRepresentativeFragmentTestStateCreateInfoNV()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPipelineRepresentativeFragmentTestStateCreateInfoNV::initialize(const VkPipelineRepresentativeFragmentTestStateCreateInfoNV* in_struct)
+{
+    sType = in_struct->sType;
+    representativeFragmentTestEnable = in_struct->representativeFragmentTestEnable;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPipelineRepresentativeFragmentTestStateCreateInfoNV::initialize(const safe_VkPipelineRepresentativeFragmentTestStateCreateInfoNV* src)
+{
+    sType = src->sType;
+    representativeFragmentTestEnable = src->representativeFragmentTestEnable;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkPhysicalDeviceImageViewImageFormatInfoEXT::safe_VkPhysicalDeviceImageViewImageFormatInfoEXT(const VkPhysicalDeviceImageViewImageFormatInfoEXT* in_struct) :
+    sType(in_struct->sType),
+    imageViewType(in_struct->imageViewType)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPhysicalDeviceImageViewImageFormatInfoEXT::safe_VkPhysicalDeviceImageViewImageFormatInfoEXT() :
+    pNext(nullptr)
+{}
+
+safe_VkPhysicalDeviceImageViewImageFormatInfoEXT::safe_VkPhysicalDeviceImageViewImageFormatInfoEXT(const safe_VkPhysicalDeviceImageViewImageFormatInfoEXT& src)
+{
+    sType = src.sType;
+    imageViewType = src.imageViewType;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPhysicalDeviceImageViewImageFormatInfoEXT& safe_VkPhysicalDeviceImageViewImageFormatInfoEXT::operator=(const safe_VkPhysicalDeviceImageViewImageFormatInfoEXT& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    imageViewType = src.imageViewType;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPhysicalDeviceImageViewImageFormatInfoEXT::~safe_VkPhysicalDeviceImageViewImageFormatInfoEXT()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPhysicalDeviceImageViewImageFormatInfoEXT::initialize(const VkPhysicalDeviceImageViewImageFormatInfoEXT* in_struct)
+{
+    sType = in_struct->sType;
+    imageViewType = in_struct->imageViewType;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPhysicalDeviceImageViewImageFormatInfoEXT::initialize(const safe_VkPhysicalDeviceImageViewImageFormatInfoEXT* src)
+{
+    sType = src->sType;
+    imageViewType = src->imageViewType;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkFilterCubicImageViewImageFormatPropertiesEXT::safe_VkFilterCubicImageViewImageFormatPropertiesEXT(const VkFilterCubicImageViewImageFormatPropertiesEXT* in_struct) :
+    sType(in_struct->sType),
+    filterCubic(in_struct->filterCubic),
+    filterCubicMinmax(in_struct->filterCubicMinmax)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkFilterCubicImageViewImageFormatPropertiesEXT::safe_VkFilterCubicImageViewImageFormatPropertiesEXT() :
+    pNext(nullptr)
+{}
+
+safe_VkFilterCubicImageViewImageFormatPropertiesEXT::safe_VkFilterCubicImageViewImageFormatPropertiesEXT(const safe_VkFilterCubicImageViewImageFormatPropertiesEXT& src)
+{
+    sType = src.sType;
+    filterCubic = src.filterCubic;
+    filterCubicMinmax = src.filterCubicMinmax;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkFilterCubicImageViewImageFormatPropertiesEXT& safe_VkFilterCubicImageViewImageFormatPropertiesEXT::operator=(const safe_VkFilterCubicImageViewImageFormatPropertiesEXT& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    filterCubic = src.filterCubic;
+    filterCubicMinmax = src.filterCubicMinmax;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkFilterCubicImageViewImageFormatPropertiesEXT::~safe_VkFilterCubicImageViewImageFormatPropertiesEXT()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkFilterCubicImageViewImageFormatPropertiesEXT::initialize(const VkFilterCubicImageViewImageFormatPropertiesEXT* in_struct)
+{
+    sType = in_struct->sType;
+    filterCubic = in_struct->filterCubic;
+    filterCubicMinmax = in_struct->filterCubicMinmax;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkFilterCubicImageViewImageFormatPropertiesEXT::initialize(const safe_VkFilterCubicImageViewImageFormatPropertiesEXT* src)
+{
+    sType = src->sType;
+    filterCubic = src->filterCubic;
+    filterCubicMinmax = src->filterCubicMinmax;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkDeviceQueueGlobalPriorityCreateInfoEXT::safe_VkDeviceQueueGlobalPriorityCreateInfoEXT(const VkDeviceQueueGlobalPriorityCreateInfoEXT* in_struct) :
+    sType(in_struct->sType),
+    globalPriority(in_struct->globalPriority)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkDeviceQueueGlobalPriorityCreateInfoEXT::safe_VkDeviceQueueGlobalPriorityCreateInfoEXT() :
+    pNext(nullptr)
+{}
+
+safe_VkDeviceQueueGlobalPriorityCreateInfoEXT::safe_VkDeviceQueueGlobalPriorityCreateInfoEXT(const safe_VkDeviceQueueGlobalPriorityCreateInfoEXT& src)
+{
+    sType = src.sType;
+    globalPriority = src.globalPriority;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkDeviceQueueGlobalPriorityCreateInfoEXT& safe_VkDeviceQueueGlobalPriorityCreateInfoEXT::operator=(const safe_VkDeviceQueueGlobalPriorityCreateInfoEXT& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    globalPriority = src.globalPriority;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkDeviceQueueGlobalPriorityCreateInfoEXT::~safe_VkDeviceQueueGlobalPriorityCreateInfoEXT()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkDeviceQueueGlobalPriorityCreateInfoEXT::initialize(const VkDeviceQueueGlobalPriorityCreateInfoEXT* in_struct)
+{
+    sType = in_struct->sType;
+    globalPriority = in_struct->globalPriority;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkDeviceQueueGlobalPriorityCreateInfoEXT::initialize(const safe_VkDeviceQueueGlobalPriorityCreateInfoEXT* src)
+{
+    sType = src->sType;
+    globalPriority = src->globalPriority;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkImportMemoryHostPointerInfoEXT::safe_VkImportMemoryHostPointerInfoEXT(const VkImportMemoryHostPointerInfoEXT* in_struct) :
+    sType(in_struct->sType),
+    handleType(in_struct->handleType),
+    pHostPointer(in_struct->pHostPointer)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkImportMemoryHostPointerInfoEXT::safe_VkImportMemoryHostPointerInfoEXT() :
+    pNext(nullptr),
+    pHostPointer(nullptr)
+{}
+
+safe_VkImportMemoryHostPointerInfoEXT::safe_VkImportMemoryHostPointerInfoEXT(const safe_VkImportMemoryHostPointerInfoEXT& src)
+{
+    sType = src.sType;
+    handleType = src.handleType;
+    pHostPointer = src.pHostPointer;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkImportMemoryHostPointerInfoEXT& safe_VkImportMemoryHostPointerInfoEXT::operator=(const safe_VkImportMemoryHostPointerInfoEXT& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    handleType = src.handleType;
+    pHostPointer = src.pHostPointer;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkImportMemoryHostPointerInfoEXT::~safe_VkImportMemoryHostPointerInfoEXT()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkImportMemoryHostPointerInfoEXT::initialize(const VkImportMemoryHostPointerInfoEXT* in_struct)
+{
+    sType = in_struct->sType;
+    handleType = in_struct->handleType;
+    pHostPointer = in_struct->pHostPointer;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkImportMemoryHostPointerInfoEXT::initialize(const safe_VkImportMemoryHostPointerInfoEXT* src)
+{
+    sType = src->sType;
+    handleType = src->handleType;
+    pHostPointer = src->pHostPointer;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkMemoryHostPointerPropertiesEXT::safe_VkMemoryHostPointerPropertiesEXT(const VkMemoryHostPointerPropertiesEXT* in_struct) :
+    sType(in_struct->sType),
+    memoryTypeBits(in_struct->memoryTypeBits)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkMemoryHostPointerPropertiesEXT::safe_VkMemoryHostPointerPropertiesEXT() :
+    pNext(nullptr)
+{}
+
+safe_VkMemoryHostPointerPropertiesEXT::safe_VkMemoryHostPointerPropertiesEXT(const safe_VkMemoryHostPointerPropertiesEXT& src)
+{
+    sType = src.sType;
+    memoryTypeBits = src.memoryTypeBits;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkMemoryHostPointerPropertiesEXT& safe_VkMemoryHostPointerPropertiesEXT::operator=(const safe_VkMemoryHostPointerPropertiesEXT& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    memoryTypeBits = src.memoryTypeBits;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkMemoryHostPointerPropertiesEXT::~safe_VkMemoryHostPointerPropertiesEXT()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkMemoryHostPointerPropertiesEXT::initialize(const VkMemoryHostPointerPropertiesEXT* in_struct)
+{
+    sType = in_struct->sType;
+    memoryTypeBits = in_struct->memoryTypeBits;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkMemoryHostPointerPropertiesEXT::initialize(const safe_VkMemoryHostPointerPropertiesEXT* src)
+{
+    sType = src->sType;
+    memoryTypeBits = src->memoryTypeBits;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkPhysicalDeviceExternalMemoryHostPropertiesEXT::safe_VkPhysicalDeviceExternalMemoryHostPropertiesEXT(const VkPhysicalDeviceExternalMemoryHostPropertiesEXT* in_struct) :
+    sType(in_struct->sType),
+    minImportedHostPointerAlignment(in_struct->minImportedHostPointerAlignment)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPhysicalDeviceExternalMemoryHostPropertiesEXT::safe_VkPhysicalDeviceExternalMemoryHostPropertiesEXT() :
+    pNext(nullptr)
+{}
+
+safe_VkPhysicalDeviceExternalMemoryHostPropertiesEXT::safe_VkPhysicalDeviceExternalMemoryHostPropertiesEXT(const safe_VkPhysicalDeviceExternalMemoryHostPropertiesEXT& src)
+{
+    sType = src.sType;
+    minImportedHostPointerAlignment = src.minImportedHostPointerAlignment;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPhysicalDeviceExternalMemoryHostPropertiesEXT& safe_VkPhysicalDeviceExternalMemoryHostPropertiesEXT::operator=(const safe_VkPhysicalDeviceExternalMemoryHostPropertiesEXT& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    minImportedHostPointerAlignment = src.minImportedHostPointerAlignment;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPhysicalDeviceExternalMemoryHostPropertiesEXT::~safe_VkPhysicalDeviceExternalMemoryHostPropertiesEXT()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPhysicalDeviceExternalMemoryHostPropertiesEXT::initialize(const VkPhysicalDeviceExternalMemoryHostPropertiesEXT* in_struct)
+{
+    sType = in_struct->sType;
+    minImportedHostPointerAlignment = in_struct->minImportedHostPointerAlignment;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPhysicalDeviceExternalMemoryHostPropertiesEXT::initialize(const safe_VkPhysicalDeviceExternalMemoryHostPropertiesEXT* src)
+{
+    sType = src->sType;
+    minImportedHostPointerAlignment = src->minImportedHostPointerAlignment;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkPipelineCompilerControlCreateInfoAMD::safe_VkPipelineCompilerControlCreateInfoAMD(const VkPipelineCompilerControlCreateInfoAMD* in_struct) :
+    sType(in_struct->sType),
+    compilerControlFlags(in_struct->compilerControlFlags)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPipelineCompilerControlCreateInfoAMD::safe_VkPipelineCompilerControlCreateInfoAMD() :
+    pNext(nullptr)
+{}
+
+safe_VkPipelineCompilerControlCreateInfoAMD::safe_VkPipelineCompilerControlCreateInfoAMD(const safe_VkPipelineCompilerControlCreateInfoAMD& src)
+{
+    sType = src.sType;
+    compilerControlFlags = src.compilerControlFlags;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPipelineCompilerControlCreateInfoAMD& safe_VkPipelineCompilerControlCreateInfoAMD::operator=(const safe_VkPipelineCompilerControlCreateInfoAMD& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    compilerControlFlags = src.compilerControlFlags;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPipelineCompilerControlCreateInfoAMD::~safe_VkPipelineCompilerControlCreateInfoAMD()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPipelineCompilerControlCreateInfoAMD::initialize(const VkPipelineCompilerControlCreateInfoAMD* in_struct)
+{
+    sType = in_struct->sType;
+    compilerControlFlags = in_struct->compilerControlFlags;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPipelineCompilerControlCreateInfoAMD::initialize(const safe_VkPipelineCompilerControlCreateInfoAMD* src)
+{
+    sType = src->sType;
+    compilerControlFlags = src->compilerControlFlags;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkCalibratedTimestampInfoEXT::safe_VkCalibratedTimestampInfoEXT(const VkCalibratedTimestampInfoEXT* in_struct) :
+    sType(in_struct->sType),
+    timeDomain(in_struct->timeDomain)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkCalibratedTimestampInfoEXT::safe_VkCalibratedTimestampInfoEXT() :
+    pNext(nullptr)
+{}
+
+safe_VkCalibratedTimestampInfoEXT::safe_VkCalibratedTimestampInfoEXT(const safe_VkCalibratedTimestampInfoEXT& src)
+{
+    sType = src.sType;
+    timeDomain = src.timeDomain;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkCalibratedTimestampInfoEXT& safe_VkCalibratedTimestampInfoEXT::operator=(const safe_VkCalibratedTimestampInfoEXT& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    timeDomain = src.timeDomain;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkCalibratedTimestampInfoEXT::~safe_VkCalibratedTimestampInfoEXT()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkCalibratedTimestampInfoEXT::initialize(const VkCalibratedTimestampInfoEXT* in_struct)
+{
+    sType = in_struct->sType;
+    timeDomain = in_struct->timeDomain;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkCalibratedTimestampInfoEXT::initialize(const safe_VkCalibratedTimestampInfoEXT* src)
+{
+    sType = src->sType;
+    timeDomain = src->timeDomain;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkPhysicalDeviceShaderCorePropertiesAMD::safe_VkPhysicalDeviceShaderCorePropertiesAMD(const VkPhysicalDeviceShaderCorePropertiesAMD* in_struct) :
+    sType(in_struct->sType),
+    shaderEngineCount(in_struct->shaderEngineCount),
+    shaderArraysPerEngineCount(in_struct->shaderArraysPerEngineCount),
+    computeUnitsPerShaderArray(in_struct->computeUnitsPerShaderArray),
+    simdPerComputeUnit(in_struct->simdPerComputeUnit),
+    wavefrontsPerSimd(in_struct->wavefrontsPerSimd),
+    wavefrontSize(in_struct->wavefrontSize),
+    sgprsPerSimd(in_struct->sgprsPerSimd),
+    minSgprAllocation(in_struct->minSgprAllocation),
+    maxSgprAllocation(in_struct->maxSgprAllocation),
+    sgprAllocationGranularity(in_struct->sgprAllocationGranularity),
+    vgprsPerSimd(in_struct->vgprsPerSimd),
+    minVgprAllocation(in_struct->minVgprAllocation),
+    maxVgprAllocation(in_struct->maxVgprAllocation),
+    vgprAllocationGranularity(in_struct->vgprAllocationGranularity)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPhysicalDeviceShaderCorePropertiesAMD::safe_VkPhysicalDeviceShaderCorePropertiesAMD() :
+    pNext(nullptr)
+{}
+
+safe_VkPhysicalDeviceShaderCorePropertiesAMD::safe_VkPhysicalDeviceShaderCorePropertiesAMD(const safe_VkPhysicalDeviceShaderCorePropertiesAMD& src)
+{
+    sType = src.sType;
+    shaderEngineCount = src.shaderEngineCount;
+    shaderArraysPerEngineCount = src.shaderArraysPerEngineCount;
+    computeUnitsPerShaderArray = src.computeUnitsPerShaderArray;
+    simdPerComputeUnit = src.simdPerComputeUnit;
+    wavefrontsPerSimd = src.wavefrontsPerSimd;
+    wavefrontSize = src.wavefrontSize;
+    sgprsPerSimd = src.sgprsPerSimd;
+    minSgprAllocation = src.minSgprAllocation;
+    maxSgprAllocation = src.maxSgprAllocation;
+    sgprAllocationGranularity = src.sgprAllocationGranularity;
+    vgprsPerSimd = src.vgprsPerSimd;
+    minVgprAllocation = src.minVgprAllocation;
+    maxVgprAllocation = src.maxVgprAllocation;
+    vgprAllocationGranularity = src.vgprAllocationGranularity;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPhysicalDeviceShaderCorePropertiesAMD& safe_VkPhysicalDeviceShaderCorePropertiesAMD::operator=(const safe_VkPhysicalDeviceShaderCorePropertiesAMD& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    shaderEngineCount = src.shaderEngineCount;
+    shaderArraysPerEngineCount = src.shaderArraysPerEngineCount;
+    computeUnitsPerShaderArray = src.computeUnitsPerShaderArray;
+    simdPerComputeUnit = src.simdPerComputeUnit;
+    wavefrontsPerSimd = src.wavefrontsPerSimd;
+    wavefrontSize = src.wavefrontSize;
+    sgprsPerSimd = src.sgprsPerSimd;
+    minSgprAllocation = src.minSgprAllocation;
+    maxSgprAllocation = src.maxSgprAllocation;
+    sgprAllocationGranularity = src.sgprAllocationGranularity;
+    vgprsPerSimd = src.vgprsPerSimd;
+    minVgprAllocation = src.minVgprAllocation;
+    maxVgprAllocation = src.maxVgprAllocation;
+    vgprAllocationGranularity = src.vgprAllocationGranularity;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPhysicalDeviceShaderCorePropertiesAMD::~safe_VkPhysicalDeviceShaderCorePropertiesAMD()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPhysicalDeviceShaderCorePropertiesAMD::initialize(const VkPhysicalDeviceShaderCorePropertiesAMD* in_struct)
+{
+    sType = in_struct->sType;
+    shaderEngineCount = in_struct->shaderEngineCount;
+    shaderArraysPerEngineCount = in_struct->shaderArraysPerEngineCount;
+    computeUnitsPerShaderArray = in_struct->computeUnitsPerShaderArray;
+    simdPerComputeUnit = in_struct->simdPerComputeUnit;
+    wavefrontsPerSimd = in_struct->wavefrontsPerSimd;
+    wavefrontSize = in_struct->wavefrontSize;
+    sgprsPerSimd = in_struct->sgprsPerSimd;
+    minSgprAllocation = in_struct->minSgprAllocation;
+    maxSgprAllocation = in_struct->maxSgprAllocation;
+    sgprAllocationGranularity = in_struct->sgprAllocationGranularity;
+    vgprsPerSimd = in_struct->vgprsPerSimd;
+    minVgprAllocation = in_struct->minVgprAllocation;
+    maxVgprAllocation = in_struct->maxVgprAllocation;
+    vgprAllocationGranularity = in_struct->vgprAllocationGranularity;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPhysicalDeviceShaderCorePropertiesAMD::initialize(const safe_VkPhysicalDeviceShaderCorePropertiesAMD* src)
+{
+    sType = src->sType;
+    shaderEngineCount = src->shaderEngineCount;
+    shaderArraysPerEngineCount = src->shaderArraysPerEngineCount;
+    computeUnitsPerShaderArray = src->computeUnitsPerShaderArray;
+    simdPerComputeUnit = src->simdPerComputeUnit;
+    wavefrontsPerSimd = src->wavefrontsPerSimd;
+    wavefrontSize = src->wavefrontSize;
+    sgprsPerSimd = src->sgprsPerSimd;
+    minSgprAllocation = src->minSgprAllocation;
+    maxSgprAllocation = src->maxSgprAllocation;
+    sgprAllocationGranularity = src->sgprAllocationGranularity;
+    vgprsPerSimd = src->vgprsPerSimd;
+    minVgprAllocation = src->minVgprAllocation;
+    maxVgprAllocation = src->maxVgprAllocation;
+    vgprAllocationGranularity = src->vgprAllocationGranularity;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkDeviceMemoryOverallocationCreateInfoAMD::safe_VkDeviceMemoryOverallocationCreateInfoAMD(const VkDeviceMemoryOverallocationCreateInfoAMD* in_struct) :
+    sType(in_struct->sType),
+    overallocationBehavior(in_struct->overallocationBehavior)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkDeviceMemoryOverallocationCreateInfoAMD::safe_VkDeviceMemoryOverallocationCreateInfoAMD() :
+    pNext(nullptr)
+{}
+
+safe_VkDeviceMemoryOverallocationCreateInfoAMD::safe_VkDeviceMemoryOverallocationCreateInfoAMD(const safe_VkDeviceMemoryOverallocationCreateInfoAMD& src)
+{
+    sType = src.sType;
+    overallocationBehavior = src.overallocationBehavior;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkDeviceMemoryOverallocationCreateInfoAMD& safe_VkDeviceMemoryOverallocationCreateInfoAMD::operator=(const safe_VkDeviceMemoryOverallocationCreateInfoAMD& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    overallocationBehavior = src.overallocationBehavior;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkDeviceMemoryOverallocationCreateInfoAMD::~safe_VkDeviceMemoryOverallocationCreateInfoAMD()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkDeviceMemoryOverallocationCreateInfoAMD::initialize(const VkDeviceMemoryOverallocationCreateInfoAMD* in_struct)
+{
+    sType = in_struct->sType;
+    overallocationBehavior = in_struct->overallocationBehavior;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkDeviceMemoryOverallocationCreateInfoAMD::initialize(const safe_VkDeviceMemoryOverallocationCreateInfoAMD* src)
+{
+    sType = src->sType;
+    overallocationBehavior = src->overallocationBehavior;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT::safe_VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT(const VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT* in_struct) :
+    sType(in_struct->sType),
+    maxVertexAttribDivisor(in_struct->maxVertexAttribDivisor)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT::safe_VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT() :
+    pNext(nullptr)
+{}
+
+safe_VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT::safe_VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT(const safe_VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT& src)
+{
+    sType = src.sType;
+    maxVertexAttribDivisor = src.maxVertexAttribDivisor;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT& safe_VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT::operator=(const safe_VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    maxVertexAttribDivisor = src.maxVertexAttribDivisor;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT::~safe_VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT::initialize(const VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT* in_struct)
+{
+    sType = in_struct->sType;
+    maxVertexAttribDivisor = in_struct->maxVertexAttribDivisor;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT::initialize(const safe_VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT* src)
+{
+    sType = src->sType;
+    maxVertexAttribDivisor = src->maxVertexAttribDivisor;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkPipelineVertexInputDivisorStateCreateInfoEXT::safe_VkPipelineVertexInputDivisorStateCreateInfoEXT(const VkPipelineVertexInputDivisorStateCreateInfoEXT* in_struct) :
+    sType(in_struct->sType),
+    vertexBindingDivisorCount(in_struct->vertexBindingDivisorCount),
+    pVertexBindingDivisors(nullptr)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (in_struct->pVertexBindingDivisors) {
+        pVertexBindingDivisors = new VkVertexInputBindingDivisorDescriptionEXT[in_struct->vertexBindingDivisorCount];
+        memcpy ((void *)pVertexBindingDivisors, (void *)in_struct->pVertexBindingDivisors, sizeof(VkVertexInputBindingDivisorDescriptionEXT)*in_struct->vertexBindingDivisorCount);
+    }
+}
+
+safe_VkPipelineVertexInputDivisorStateCreateInfoEXT::safe_VkPipelineVertexInputDivisorStateCreateInfoEXT() :
+    pNext(nullptr),
+    pVertexBindingDivisors(nullptr)
+{}
+
+safe_VkPipelineVertexInputDivisorStateCreateInfoEXT::safe_VkPipelineVertexInputDivisorStateCreateInfoEXT(const safe_VkPipelineVertexInputDivisorStateCreateInfoEXT& src)
+{
+    sType = src.sType;
+    vertexBindingDivisorCount = src.vertexBindingDivisorCount;
+    pVertexBindingDivisors = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (src.pVertexBindingDivisors) {
+        pVertexBindingDivisors = new VkVertexInputBindingDivisorDescriptionEXT[src.vertexBindingDivisorCount];
+        memcpy ((void *)pVertexBindingDivisors, (void *)src.pVertexBindingDivisors, sizeof(VkVertexInputBindingDivisorDescriptionEXT)*src.vertexBindingDivisorCount);
+    }
+}
+
+safe_VkPipelineVertexInputDivisorStateCreateInfoEXT& safe_VkPipelineVertexInputDivisorStateCreateInfoEXT::operator=(const safe_VkPipelineVertexInputDivisorStateCreateInfoEXT& src)
+{
+    if (&src == this) return *this;
+
+    if (pVertexBindingDivisors)
+        delete[] pVertexBindingDivisors;
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    vertexBindingDivisorCount = src.vertexBindingDivisorCount;
+    pVertexBindingDivisors = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (src.pVertexBindingDivisors) {
+        pVertexBindingDivisors = new VkVertexInputBindingDivisorDescriptionEXT[src.vertexBindingDivisorCount];
+        memcpy ((void *)pVertexBindingDivisors, (void *)src.pVertexBindingDivisors, sizeof(VkVertexInputBindingDivisorDescriptionEXT)*src.vertexBindingDivisorCount);
+    }
+
+    return *this;
+}
+
+safe_VkPipelineVertexInputDivisorStateCreateInfoEXT::~safe_VkPipelineVertexInputDivisorStateCreateInfoEXT()
+{
+    if (pVertexBindingDivisors)
+        delete[] pVertexBindingDivisors;
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPipelineVertexInputDivisorStateCreateInfoEXT::initialize(const VkPipelineVertexInputDivisorStateCreateInfoEXT* in_struct)
+{
+    sType = in_struct->sType;
+    vertexBindingDivisorCount = in_struct->vertexBindingDivisorCount;
+    pVertexBindingDivisors = nullptr;
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (in_struct->pVertexBindingDivisors) {
+        pVertexBindingDivisors = new VkVertexInputBindingDivisorDescriptionEXT[in_struct->vertexBindingDivisorCount];
+        memcpy ((void *)pVertexBindingDivisors, (void *)in_struct->pVertexBindingDivisors, sizeof(VkVertexInputBindingDivisorDescriptionEXT)*in_struct->vertexBindingDivisorCount);
+    }
+}
+
+void safe_VkPipelineVertexInputDivisorStateCreateInfoEXT::initialize(const safe_VkPipelineVertexInputDivisorStateCreateInfoEXT* src)
+{
+    sType = src->sType;
+    vertexBindingDivisorCount = src->vertexBindingDivisorCount;
+    pVertexBindingDivisors = nullptr;
+    pNext = SafePnextCopy(src->pNext);
+    if (src->pVertexBindingDivisors) {
+        pVertexBindingDivisors = new VkVertexInputBindingDivisorDescriptionEXT[src->vertexBindingDivisorCount];
+        memcpy ((void *)pVertexBindingDivisors, (void *)src->pVertexBindingDivisors, sizeof(VkVertexInputBindingDivisorDescriptionEXT)*src->vertexBindingDivisorCount);
+    }
+}
+
+safe_VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT::safe_VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT(const VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT* in_struct) :
+    sType(in_struct->sType),
+    vertexAttributeInstanceRateDivisor(in_struct->vertexAttributeInstanceRateDivisor),
+    vertexAttributeInstanceRateZeroDivisor(in_struct->vertexAttributeInstanceRateZeroDivisor)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT::safe_VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT() :
+    pNext(nullptr)
+{}
+
+safe_VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT::safe_VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT(const safe_VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT& src)
+{
+    sType = src.sType;
+    vertexAttributeInstanceRateDivisor = src.vertexAttributeInstanceRateDivisor;
+    vertexAttributeInstanceRateZeroDivisor = src.vertexAttributeInstanceRateZeroDivisor;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT& safe_VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT::operator=(const safe_VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    vertexAttributeInstanceRateDivisor = src.vertexAttributeInstanceRateDivisor;
+    vertexAttributeInstanceRateZeroDivisor = src.vertexAttributeInstanceRateZeroDivisor;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT::~safe_VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT::initialize(const VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT* in_struct)
+{
+    sType = in_struct->sType;
+    vertexAttributeInstanceRateDivisor = in_struct->vertexAttributeInstanceRateDivisor;
+    vertexAttributeInstanceRateZeroDivisor = in_struct->vertexAttributeInstanceRateZeroDivisor;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT::initialize(const safe_VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT* src)
+{
+    sType = src->sType;
+    vertexAttributeInstanceRateDivisor = src->vertexAttributeInstanceRateDivisor;
+    vertexAttributeInstanceRateZeroDivisor = src->vertexAttributeInstanceRateZeroDivisor;
+    pNext = SafePnextCopy(src->pNext);
+}
+#ifdef VK_USE_PLATFORM_GGP
+
+
+safe_VkPresentFrameTokenGGP::safe_VkPresentFrameTokenGGP(const VkPresentFrameTokenGGP* in_struct) :
+    sType(in_struct->sType),
+    frameToken(in_struct->frameToken)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPresentFrameTokenGGP::safe_VkPresentFrameTokenGGP() :
+    pNext(nullptr)
+{}
+
+safe_VkPresentFrameTokenGGP::safe_VkPresentFrameTokenGGP(const safe_VkPresentFrameTokenGGP& src)
+{
+    sType = src.sType;
+    frameToken = src.frameToken;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPresentFrameTokenGGP& safe_VkPresentFrameTokenGGP::operator=(const safe_VkPresentFrameTokenGGP& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    frameToken = src.frameToken;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPresentFrameTokenGGP::~safe_VkPresentFrameTokenGGP()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPresentFrameTokenGGP::initialize(const VkPresentFrameTokenGGP* in_struct)
+{
+    sType = in_struct->sType;
+    frameToken = in_struct->frameToken;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPresentFrameTokenGGP::initialize(const safe_VkPresentFrameTokenGGP* src)
+{
+    sType = src->sType;
+    frameToken = src->frameToken;
+    pNext = SafePnextCopy(src->pNext);
+}
+#endif // VK_USE_PLATFORM_GGP
+
+
+safe_VkPipelineCreationFeedbackCreateInfoEXT::safe_VkPipelineCreationFeedbackCreateInfoEXT(const VkPipelineCreationFeedbackCreateInfoEXT* in_struct) :
+    sType(in_struct->sType),
+    pPipelineCreationFeedback(nullptr),
+    pipelineStageCreationFeedbackCount(in_struct->pipelineStageCreationFeedbackCount),
+    pPipelineStageCreationFeedbacks(nullptr)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (in_struct->pPipelineCreationFeedback) {
+        pPipelineCreationFeedback = new VkPipelineCreationFeedbackEXT(*in_struct->pPipelineCreationFeedback);
+    }
+    if (in_struct->pPipelineStageCreationFeedbacks) {
+        pPipelineStageCreationFeedbacks = new VkPipelineCreationFeedbackEXT[in_struct->pipelineStageCreationFeedbackCount];
+        memcpy ((void *)pPipelineStageCreationFeedbacks, (void *)in_struct->pPipelineStageCreationFeedbacks, sizeof(VkPipelineCreationFeedbackEXT)*in_struct->pipelineStageCreationFeedbackCount);
+    }
+}
+
+safe_VkPipelineCreationFeedbackCreateInfoEXT::safe_VkPipelineCreationFeedbackCreateInfoEXT() :
+    pNext(nullptr),
+    pPipelineCreationFeedback(nullptr),
+    pPipelineStageCreationFeedbacks(nullptr)
+{}
+
+safe_VkPipelineCreationFeedbackCreateInfoEXT::safe_VkPipelineCreationFeedbackCreateInfoEXT(const safe_VkPipelineCreationFeedbackCreateInfoEXT& src)
+{
+    sType = src.sType;
+    pPipelineCreationFeedback = nullptr;
+    pipelineStageCreationFeedbackCount = src.pipelineStageCreationFeedbackCount;
+    pPipelineStageCreationFeedbacks = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (src.pPipelineCreationFeedback) {
+        pPipelineCreationFeedback = new VkPipelineCreationFeedbackEXT(*src.pPipelineCreationFeedback);
+    }
+    if (src.pPipelineStageCreationFeedbacks) {
+        pPipelineStageCreationFeedbacks = new VkPipelineCreationFeedbackEXT[src.pipelineStageCreationFeedbackCount];
+        memcpy ((void *)pPipelineStageCreationFeedbacks, (void *)src.pPipelineStageCreationFeedbacks, sizeof(VkPipelineCreationFeedbackEXT)*src.pipelineStageCreationFeedbackCount);
+    }
+}
+
+safe_VkPipelineCreationFeedbackCreateInfoEXT& safe_VkPipelineCreationFeedbackCreateInfoEXT::operator=(const safe_VkPipelineCreationFeedbackCreateInfoEXT& src)
+{
+    if (&src == this) return *this;
+
+    if (pPipelineCreationFeedback)
+        delete pPipelineCreationFeedback;
+    if (pPipelineStageCreationFeedbacks)
+        delete[] pPipelineStageCreationFeedbacks;
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    pPipelineCreationFeedback = nullptr;
+    pipelineStageCreationFeedbackCount = src.pipelineStageCreationFeedbackCount;
+    pPipelineStageCreationFeedbacks = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (src.pPipelineCreationFeedback) {
+        pPipelineCreationFeedback = new VkPipelineCreationFeedbackEXT(*src.pPipelineCreationFeedback);
+    }
+    if (src.pPipelineStageCreationFeedbacks) {
+        pPipelineStageCreationFeedbacks = new VkPipelineCreationFeedbackEXT[src.pipelineStageCreationFeedbackCount];
+        memcpy ((void *)pPipelineStageCreationFeedbacks, (void *)src.pPipelineStageCreationFeedbacks, sizeof(VkPipelineCreationFeedbackEXT)*src.pipelineStageCreationFeedbackCount);
+    }
+
+    return *this;
+}
+
+safe_VkPipelineCreationFeedbackCreateInfoEXT::~safe_VkPipelineCreationFeedbackCreateInfoEXT()
+{
+    if (pPipelineCreationFeedback)
+        delete pPipelineCreationFeedback;
+    if (pPipelineStageCreationFeedbacks)
+        delete[] pPipelineStageCreationFeedbacks;
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPipelineCreationFeedbackCreateInfoEXT::initialize(const VkPipelineCreationFeedbackCreateInfoEXT* in_struct)
+{
+    sType = in_struct->sType;
+    pPipelineCreationFeedback = nullptr;
+    pipelineStageCreationFeedbackCount = in_struct->pipelineStageCreationFeedbackCount;
+    pPipelineStageCreationFeedbacks = nullptr;
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (in_struct->pPipelineCreationFeedback) {
+        pPipelineCreationFeedback = new VkPipelineCreationFeedbackEXT(*in_struct->pPipelineCreationFeedback);
+    }
+    if (in_struct->pPipelineStageCreationFeedbacks) {
+        pPipelineStageCreationFeedbacks = new VkPipelineCreationFeedbackEXT[in_struct->pipelineStageCreationFeedbackCount];
+        memcpy ((void *)pPipelineStageCreationFeedbacks, (void *)in_struct->pPipelineStageCreationFeedbacks, sizeof(VkPipelineCreationFeedbackEXT)*in_struct->pipelineStageCreationFeedbackCount);
+    }
+}
+
+void safe_VkPipelineCreationFeedbackCreateInfoEXT::initialize(const safe_VkPipelineCreationFeedbackCreateInfoEXT* src)
+{
+    sType = src->sType;
+    pPipelineCreationFeedback = nullptr;
+    pipelineStageCreationFeedbackCount = src->pipelineStageCreationFeedbackCount;
+    pPipelineStageCreationFeedbacks = nullptr;
+    pNext = SafePnextCopy(src->pNext);
+    if (src->pPipelineCreationFeedback) {
+        pPipelineCreationFeedback = new VkPipelineCreationFeedbackEXT(*src->pPipelineCreationFeedback);
+    }
+    if (src->pPipelineStageCreationFeedbacks) {
+        pPipelineStageCreationFeedbacks = new VkPipelineCreationFeedbackEXT[src->pipelineStageCreationFeedbackCount];
+        memcpy ((void *)pPipelineStageCreationFeedbacks, (void *)src->pPipelineStageCreationFeedbacks, sizeof(VkPipelineCreationFeedbackEXT)*src->pipelineStageCreationFeedbackCount);
+    }
+}
+
+safe_VkPhysicalDeviceComputeShaderDerivativesFeaturesNV::safe_VkPhysicalDeviceComputeShaderDerivativesFeaturesNV(const VkPhysicalDeviceComputeShaderDerivativesFeaturesNV* in_struct) :
+    sType(in_struct->sType),
+    computeDerivativeGroupQuads(in_struct->computeDerivativeGroupQuads),
+    computeDerivativeGroupLinear(in_struct->computeDerivativeGroupLinear)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPhysicalDeviceComputeShaderDerivativesFeaturesNV::safe_VkPhysicalDeviceComputeShaderDerivativesFeaturesNV() :
+    pNext(nullptr)
+{}
+
+safe_VkPhysicalDeviceComputeShaderDerivativesFeaturesNV::safe_VkPhysicalDeviceComputeShaderDerivativesFeaturesNV(const safe_VkPhysicalDeviceComputeShaderDerivativesFeaturesNV& src)
+{
+    sType = src.sType;
+    computeDerivativeGroupQuads = src.computeDerivativeGroupQuads;
+    computeDerivativeGroupLinear = src.computeDerivativeGroupLinear;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPhysicalDeviceComputeShaderDerivativesFeaturesNV& safe_VkPhysicalDeviceComputeShaderDerivativesFeaturesNV::operator=(const safe_VkPhysicalDeviceComputeShaderDerivativesFeaturesNV& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    computeDerivativeGroupQuads = src.computeDerivativeGroupQuads;
+    computeDerivativeGroupLinear = src.computeDerivativeGroupLinear;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPhysicalDeviceComputeShaderDerivativesFeaturesNV::~safe_VkPhysicalDeviceComputeShaderDerivativesFeaturesNV()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPhysicalDeviceComputeShaderDerivativesFeaturesNV::initialize(const VkPhysicalDeviceComputeShaderDerivativesFeaturesNV* in_struct)
+{
+    sType = in_struct->sType;
+    computeDerivativeGroupQuads = in_struct->computeDerivativeGroupQuads;
+    computeDerivativeGroupLinear = in_struct->computeDerivativeGroupLinear;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPhysicalDeviceComputeShaderDerivativesFeaturesNV::initialize(const safe_VkPhysicalDeviceComputeShaderDerivativesFeaturesNV* src)
+{
+    sType = src->sType;
+    computeDerivativeGroupQuads = src->computeDerivativeGroupQuads;
+    computeDerivativeGroupLinear = src->computeDerivativeGroupLinear;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkPhysicalDeviceMeshShaderFeaturesNV::safe_VkPhysicalDeviceMeshShaderFeaturesNV(const VkPhysicalDeviceMeshShaderFeaturesNV* in_struct) :
+    sType(in_struct->sType),
+    taskShader(in_struct->taskShader),
+    meshShader(in_struct->meshShader)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPhysicalDeviceMeshShaderFeaturesNV::safe_VkPhysicalDeviceMeshShaderFeaturesNV() :
+    pNext(nullptr)
+{}
+
+safe_VkPhysicalDeviceMeshShaderFeaturesNV::safe_VkPhysicalDeviceMeshShaderFeaturesNV(const safe_VkPhysicalDeviceMeshShaderFeaturesNV& src)
+{
+    sType = src.sType;
+    taskShader = src.taskShader;
+    meshShader = src.meshShader;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPhysicalDeviceMeshShaderFeaturesNV& safe_VkPhysicalDeviceMeshShaderFeaturesNV::operator=(const safe_VkPhysicalDeviceMeshShaderFeaturesNV& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    taskShader = src.taskShader;
+    meshShader = src.meshShader;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPhysicalDeviceMeshShaderFeaturesNV::~safe_VkPhysicalDeviceMeshShaderFeaturesNV()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPhysicalDeviceMeshShaderFeaturesNV::initialize(const VkPhysicalDeviceMeshShaderFeaturesNV* in_struct)
+{
+    sType = in_struct->sType;
+    taskShader = in_struct->taskShader;
+    meshShader = in_struct->meshShader;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPhysicalDeviceMeshShaderFeaturesNV::initialize(const safe_VkPhysicalDeviceMeshShaderFeaturesNV* src)
+{
+    sType = src->sType;
+    taskShader = src->taskShader;
+    meshShader = src->meshShader;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkPhysicalDeviceMeshShaderPropertiesNV::safe_VkPhysicalDeviceMeshShaderPropertiesNV(const VkPhysicalDeviceMeshShaderPropertiesNV* in_struct) :
+    sType(in_struct->sType),
+    maxDrawMeshTasksCount(in_struct->maxDrawMeshTasksCount),
+    maxTaskWorkGroupInvocations(in_struct->maxTaskWorkGroupInvocations),
+    maxTaskTotalMemorySize(in_struct->maxTaskTotalMemorySize),
+    maxTaskOutputCount(in_struct->maxTaskOutputCount),
+    maxMeshWorkGroupInvocations(in_struct->maxMeshWorkGroupInvocations),
+    maxMeshTotalMemorySize(in_struct->maxMeshTotalMemorySize),
+    maxMeshOutputVertices(in_struct->maxMeshOutputVertices),
+    maxMeshOutputPrimitives(in_struct->maxMeshOutputPrimitives),
+    maxMeshMultiviewViewCount(in_struct->maxMeshMultiviewViewCount),
+    meshOutputPerVertexGranularity(in_struct->meshOutputPerVertexGranularity),
+    meshOutputPerPrimitiveGranularity(in_struct->meshOutputPerPrimitiveGranularity)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+    for (uint32_t i = 0; i < 3; ++i) {
+        maxTaskWorkGroupSize[i] = in_struct->maxTaskWorkGroupSize[i];
+    }
+    for (uint32_t i = 0; i < 3; ++i) {
+        maxMeshWorkGroupSize[i] = in_struct->maxMeshWorkGroupSize[i];
+    }
+}
+
+safe_VkPhysicalDeviceMeshShaderPropertiesNV::safe_VkPhysicalDeviceMeshShaderPropertiesNV() :
+    pNext(nullptr)
+{}
+
+safe_VkPhysicalDeviceMeshShaderPropertiesNV::safe_VkPhysicalDeviceMeshShaderPropertiesNV(const safe_VkPhysicalDeviceMeshShaderPropertiesNV& src)
+{
+    sType = src.sType;
+    maxDrawMeshTasksCount = src.maxDrawMeshTasksCount;
+    maxTaskWorkGroupInvocations = src.maxTaskWorkGroupInvocations;
+    maxTaskTotalMemorySize = src.maxTaskTotalMemorySize;
+    maxTaskOutputCount = src.maxTaskOutputCount;
+    maxMeshWorkGroupInvocations = src.maxMeshWorkGroupInvocations;
+    maxMeshTotalMemorySize = src.maxMeshTotalMemorySize;
+    maxMeshOutputVertices = src.maxMeshOutputVertices;
+    maxMeshOutputPrimitives = src.maxMeshOutputPrimitives;
+    maxMeshMultiviewViewCount = src.maxMeshMultiviewViewCount;
+    meshOutputPerVertexGranularity = src.meshOutputPerVertexGranularity;
+    meshOutputPerPrimitiveGranularity = src.meshOutputPerPrimitiveGranularity;
+    pNext = SafePnextCopy(src.pNext);
+    for (uint32_t i = 0; i < 3; ++i) {
+        maxTaskWorkGroupSize[i] = src.maxTaskWorkGroupSize[i];
+    }
+    for (uint32_t i = 0; i < 3; ++i) {
+        maxMeshWorkGroupSize[i] = src.maxMeshWorkGroupSize[i];
+    }
+}
+
+safe_VkPhysicalDeviceMeshShaderPropertiesNV& safe_VkPhysicalDeviceMeshShaderPropertiesNV::operator=(const safe_VkPhysicalDeviceMeshShaderPropertiesNV& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    maxDrawMeshTasksCount = src.maxDrawMeshTasksCount;
+    maxTaskWorkGroupInvocations = src.maxTaskWorkGroupInvocations;
+    maxTaskTotalMemorySize = src.maxTaskTotalMemorySize;
+    maxTaskOutputCount = src.maxTaskOutputCount;
+    maxMeshWorkGroupInvocations = src.maxMeshWorkGroupInvocations;
+    maxMeshTotalMemorySize = src.maxMeshTotalMemorySize;
+    maxMeshOutputVertices = src.maxMeshOutputVertices;
+    maxMeshOutputPrimitives = src.maxMeshOutputPrimitives;
+    maxMeshMultiviewViewCount = src.maxMeshMultiviewViewCount;
+    meshOutputPerVertexGranularity = src.meshOutputPerVertexGranularity;
+    meshOutputPerPrimitiveGranularity = src.meshOutputPerPrimitiveGranularity;
+    pNext = SafePnextCopy(src.pNext);
+    for (uint32_t i = 0; i < 3; ++i) {
+        maxTaskWorkGroupSize[i] = src.maxTaskWorkGroupSize[i];
+    }
+    for (uint32_t i = 0; i < 3; ++i) {
+        maxMeshWorkGroupSize[i] = src.maxMeshWorkGroupSize[i];
+    }
+
+    return *this;
+}
+
+safe_VkPhysicalDeviceMeshShaderPropertiesNV::~safe_VkPhysicalDeviceMeshShaderPropertiesNV()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPhysicalDeviceMeshShaderPropertiesNV::initialize(const VkPhysicalDeviceMeshShaderPropertiesNV* in_struct)
+{
+    sType = in_struct->sType;
+    maxDrawMeshTasksCount = in_struct->maxDrawMeshTasksCount;
+    maxTaskWorkGroupInvocations = in_struct->maxTaskWorkGroupInvocations;
+    maxTaskTotalMemorySize = in_struct->maxTaskTotalMemorySize;
+    maxTaskOutputCount = in_struct->maxTaskOutputCount;
+    maxMeshWorkGroupInvocations = in_struct->maxMeshWorkGroupInvocations;
+    maxMeshTotalMemorySize = in_struct->maxMeshTotalMemorySize;
+    maxMeshOutputVertices = in_struct->maxMeshOutputVertices;
+    maxMeshOutputPrimitives = in_struct->maxMeshOutputPrimitives;
+    maxMeshMultiviewViewCount = in_struct->maxMeshMultiviewViewCount;
+    meshOutputPerVertexGranularity = in_struct->meshOutputPerVertexGranularity;
+    meshOutputPerPrimitiveGranularity = in_struct->meshOutputPerPrimitiveGranularity;
+    pNext = SafePnextCopy(in_struct->pNext);
+    for (uint32_t i = 0; i < 3; ++i) {
+        maxTaskWorkGroupSize[i] = in_struct->maxTaskWorkGroupSize[i];
+    }
+    for (uint32_t i = 0; i < 3; ++i) {
+        maxMeshWorkGroupSize[i] = in_struct->maxMeshWorkGroupSize[i];
+    }
+}
+
+void safe_VkPhysicalDeviceMeshShaderPropertiesNV::initialize(const safe_VkPhysicalDeviceMeshShaderPropertiesNV* src)
+{
+    sType = src->sType;
+    maxDrawMeshTasksCount = src->maxDrawMeshTasksCount;
+    maxTaskWorkGroupInvocations = src->maxTaskWorkGroupInvocations;
+    maxTaskTotalMemorySize = src->maxTaskTotalMemorySize;
+    maxTaskOutputCount = src->maxTaskOutputCount;
+    maxMeshWorkGroupInvocations = src->maxMeshWorkGroupInvocations;
+    maxMeshTotalMemorySize = src->maxMeshTotalMemorySize;
+    maxMeshOutputVertices = src->maxMeshOutputVertices;
+    maxMeshOutputPrimitives = src->maxMeshOutputPrimitives;
+    maxMeshMultiviewViewCount = src->maxMeshMultiviewViewCount;
+    meshOutputPerVertexGranularity = src->meshOutputPerVertexGranularity;
+    meshOutputPerPrimitiveGranularity = src->meshOutputPerPrimitiveGranularity;
+    pNext = SafePnextCopy(src->pNext);
+    for (uint32_t i = 0; i < 3; ++i) {
+        maxTaskWorkGroupSize[i] = src->maxTaskWorkGroupSize[i];
+    }
+    for (uint32_t i = 0; i < 3; ++i) {
+        maxMeshWorkGroupSize[i] = src->maxMeshWorkGroupSize[i];
+    }
+}
+
+safe_VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV::safe_VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV(const VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV* in_struct) :
+    sType(in_struct->sType),
+    fragmentShaderBarycentric(in_struct->fragmentShaderBarycentric)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV::safe_VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV() :
+    pNext(nullptr)
+{}
+
+safe_VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV::safe_VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV(const safe_VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV& src)
+{
+    sType = src.sType;
+    fragmentShaderBarycentric = src.fragmentShaderBarycentric;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV& safe_VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV::operator=(const safe_VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    fragmentShaderBarycentric = src.fragmentShaderBarycentric;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV::~safe_VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV::initialize(const VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV* in_struct)
+{
+    sType = in_struct->sType;
+    fragmentShaderBarycentric = in_struct->fragmentShaderBarycentric;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV::initialize(const safe_VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV* src)
+{
+    sType = src->sType;
+    fragmentShaderBarycentric = src->fragmentShaderBarycentric;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkPhysicalDeviceShaderImageFootprintFeaturesNV::safe_VkPhysicalDeviceShaderImageFootprintFeaturesNV(const VkPhysicalDeviceShaderImageFootprintFeaturesNV* in_struct) :
+    sType(in_struct->sType),
+    imageFootprint(in_struct->imageFootprint)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPhysicalDeviceShaderImageFootprintFeaturesNV::safe_VkPhysicalDeviceShaderImageFootprintFeaturesNV() :
+    pNext(nullptr)
+{}
+
+safe_VkPhysicalDeviceShaderImageFootprintFeaturesNV::safe_VkPhysicalDeviceShaderImageFootprintFeaturesNV(const safe_VkPhysicalDeviceShaderImageFootprintFeaturesNV& src)
+{
+    sType = src.sType;
+    imageFootprint = src.imageFootprint;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPhysicalDeviceShaderImageFootprintFeaturesNV& safe_VkPhysicalDeviceShaderImageFootprintFeaturesNV::operator=(const safe_VkPhysicalDeviceShaderImageFootprintFeaturesNV& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    imageFootprint = src.imageFootprint;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPhysicalDeviceShaderImageFootprintFeaturesNV::~safe_VkPhysicalDeviceShaderImageFootprintFeaturesNV()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPhysicalDeviceShaderImageFootprintFeaturesNV::initialize(const VkPhysicalDeviceShaderImageFootprintFeaturesNV* in_struct)
+{
+    sType = in_struct->sType;
+    imageFootprint = in_struct->imageFootprint;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPhysicalDeviceShaderImageFootprintFeaturesNV::initialize(const safe_VkPhysicalDeviceShaderImageFootprintFeaturesNV* src)
+{
+    sType = src->sType;
+    imageFootprint = src->imageFootprint;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkPipelineViewportExclusiveScissorStateCreateInfoNV::safe_VkPipelineViewportExclusiveScissorStateCreateInfoNV(const VkPipelineViewportExclusiveScissorStateCreateInfoNV* in_struct) :
+    sType(in_struct->sType),
+    exclusiveScissorCount(in_struct->exclusiveScissorCount),
+    pExclusiveScissors(nullptr)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (in_struct->pExclusiveScissors) {
+        pExclusiveScissors = new VkRect2D[in_struct->exclusiveScissorCount];
+        memcpy ((void *)pExclusiveScissors, (void *)in_struct->pExclusiveScissors, sizeof(VkRect2D)*in_struct->exclusiveScissorCount);
+    }
+}
+
+safe_VkPipelineViewportExclusiveScissorStateCreateInfoNV::safe_VkPipelineViewportExclusiveScissorStateCreateInfoNV() :
+    pNext(nullptr),
+    pExclusiveScissors(nullptr)
+{}
+
+safe_VkPipelineViewportExclusiveScissorStateCreateInfoNV::safe_VkPipelineViewportExclusiveScissorStateCreateInfoNV(const safe_VkPipelineViewportExclusiveScissorStateCreateInfoNV& src)
+{
+    sType = src.sType;
+    exclusiveScissorCount = src.exclusiveScissorCount;
+    pExclusiveScissors = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (src.pExclusiveScissors) {
+        pExclusiveScissors = new VkRect2D[src.exclusiveScissorCount];
+        memcpy ((void *)pExclusiveScissors, (void *)src.pExclusiveScissors, sizeof(VkRect2D)*src.exclusiveScissorCount);
+    }
+}
+
+safe_VkPipelineViewportExclusiveScissorStateCreateInfoNV& safe_VkPipelineViewportExclusiveScissorStateCreateInfoNV::operator=(const safe_VkPipelineViewportExclusiveScissorStateCreateInfoNV& src)
+{
+    if (&src == this) return *this;
+
+    if (pExclusiveScissors)
+        delete[] pExclusiveScissors;
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    exclusiveScissorCount = src.exclusiveScissorCount;
+    pExclusiveScissors = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (src.pExclusiveScissors) {
+        pExclusiveScissors = new VkRect2D[src.exclusiveScissorCount];
+        memcpy ((void *)pExclusiveScissors, (void *)src.pExclusiveScissors, sizeof(VkRect2D)*src.exclusiveScissorCount);
+    }
+
+    return *this;
+}
+
+safe_VkPipelineViewportExclusiveScissorStateCreateInfoNV::~safe_VkPipelineViewportExclusiveScissorStateCreateInfoNV()
+{
+    if (pExclusiveScissors)
+        delete[] pExclusiveScissors;
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPipelineViewportExclusiveScissorStateCreateInfoNV::initialize(const VkPipelineViewportExclusiveScissorStateCreateInfoNV* in_struct)
+{
+    sType = in_struct->sType;
+    exclusiveScissorCount = in_struct->exclusiveScissorCount;
+    pExclusiveScissors = nullptr;
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (in_struct->pExclusiveScissors) {
+        pExclusiveScissors = new VkRect2D[in_struct->exclusiveScissorCount];
+        memcpy ((void *)pExclusiveScissors, (void *)in_struct->pExclusiveScissors, sizeof(VkRect2D)*in_struct->exclusiveScissorCount);
+    }
+}
+
+void safe_VkPipelineViewportExclusiveScissorStateCreateInfoNV::initialize(const safe_VkPipelineViewportExclusiveScissorStateCreateInfoNV* src)
+{
+    sType = src->sType;
+    exclusiveScissorCount = src->exclusiveScissorCount;
+    pExclusiveScissors = nullptr;
+    pNext = SafePnextCopy(src->pNext);
+    if (src->pExclusiveScissors) {
+        pExclusiveScissors = new VkRect2D[src->exclusiveScissorCount];
+        memcpy ((void *)pExclusiveScissors, (void *)src->pExclusiveScissors, sizeof(VkRect2D)*src->exclusiveScissorCount);
+    }
+}
+
+safe_VkPhysicalDeviceExclusiveScissorFeaturesNV::safe_VkPhysicalDeviceExclusiveScissorFeaturesNV(const VkPhysicalDeviceExclusiveScissorFeaturesNV* in_struct) :
+    sType(in_struct->sType),
+    exclusiveScissor(in_struct->exclusiveScissor)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPhysicalDeviceExclusiveScissorFeaturesNV::safe_VkPhysicalDeviceExclusiveScissorFeaturesNV() :
+    pNext(nullptr)
+{}
+
+safe_VkPhysicalDeviceExclusiveScissorFeaturesNV::safe_VkPhysicalDeviceExclusiveScissorFeaturesNV(const safe_VkPhysicalDeviceExclusiveScissorFeaturesNV& src)
+{
+    sType = src.sType;
+    exclusiveScissor = src.exclusiveScissor;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPhysicalDeviceExclusiveScissorFeaturesNV& safe_VkPhysicalDeviceExclusiveScissorFeaturesNV::operator=(const safe_VkPhysicalDeviceExclusiveScissorFeaturesNV& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    exclusiveScissor = src.exclusiveScissor;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPhysicalDeviceExclusiveScissorFeaturesNV::~safe_VkPhysicalDeviceExclusiveScissorFeaturesNV()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPhysicalDeviceExclusiveScissorFeaturesNV::initialize(const VkPhysicalDeviceExclusiveScissorFeaturesNV* in_struct)
+{
+    sType = in_struct->sType;
+    exclusiveScissor = in_struct->exclusiveScissor;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPhysicalDeviceExclusiveScissorFeaturesNV::initialize(const safe_VkPhysicalDeviceExclusiveScissorFeaturesNV* src)
+{
+    sType = src->sType;
+    exclusiveScissor = src->exclusiveScissor;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkQueueFamilyCheckpointPropertiesNV::safe_VkQueueFamilyCheckpointPropertiesNV(const VkQueueFamilyCheckpointPropertiesNV* in_struct) :
+    sType(in_struct->sType),
+    checkpointExecutionStageMask(in_struct->checkpointExecutionStageMask)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkQueueFamilyCheckpointPropertiesNV::safe_VkQueueFamilyCheckpointPropertiesNV() :
+    pNext(nullptr)
+{}
+
+safe_VkQueueFamilyCheckpointPropertiesNV::safe_VkQueueFamilyCheckpointPropertiesNV(const safe_VkQueueFamilyCheckpointPropertiesNV& src)
+{
+    sType = src.sType;
+    checkpointExecutionStageMask = src.checkpointExecutionStageMask;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkQueueFamilyCheckpointPropertiesNV& safe_VkQueueFamilyCheckpointPropertiesNV::operator=(const safe_VkQueueFamilyCheckpointPropertiesNV& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    checkpointExecutionStageMask = src.checkpointExecutionStageMask;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkQueueFamilyCheckpointPropertiesNV::~safe_VkQueueFamilyCheckpointPropertiesNV()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkQueueFamilyCheckpointPropertiesNV::initialize(const VkQueueFamilyCheckpointPropertiesNV* in_struct)
+{
+    sType = in_struct->sType;
+    checkpointExecutionStageMask = in_struct->checkpointExecutionStageMask;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkQueueFamilyCheckpointPropertiesNV::initialize(const safe_VkQueueFamilyCheckpointPropertiesNV* src)
+{
+    sType = src->sType;
+    checkpointExecutionStageMask = src->checkpointExecutionStageMask;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkCheckpointDataNV::safe_VkCheckpointDataNV(const VkCheckpointDataNV* in_struct) :
+    sType(in_struct->sType),
+    stage(in_struct->stage),
+    pCheckpointMarker(in_struct->pCheckpointMarker)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkCheckpointDataNV::safe_VkCheckpointDataNV() :
+    pNext(nullptr),
+    pCheckpointMarker(nullptr)
+{}
+
+safe_VkCheckpointDataNV::safe_VkCheckpointDataNV(const safe_VkCheckpointDataNV& src)
+{
+    sType = src.sType;
+    stage = src.stage;
+    pCheckpointMarker = src.pCheckpointMarker;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkCheckpointDataNV& safe_VkCheckpointDataNV::operator=(const safe_VkCheckpointDataNV& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    stage = src.stage;
+    pCheckpointMarker = src.pCheckpointMarker;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkCheckpointDataNV::~safe_VkCheckpointDataNV()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkCheckpointDataNV::initialize(const VkCheckpointDataNV* in_struct)
+{
+    sType = in_struct->sType;
+    stage = in_struct->stage;
+    pCheckpointMarker = in_struct->pCheckpointMarker;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkCheckpointDataNV::initialize(const safe_VkCheckpointDataNV* src)
+{
+    sType = src->sType;
+    stage = src->stage;
+    pCheckpointMarker = src->pCheckpointMarker;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL::safe_VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL(const VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL* in_struct) :
+    sType(in_struct->sType),
+    shaderIntegerFunctions2(in_struct->shaderIntegerFunctions2)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL::safe_VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL() :
+    pNext(nullptr)
+{}
+
+safe_VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL::safe_VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL(const safe_VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL& src)
+{
+    sType = src.sType;
+    shaderIntegerFunctions2 = src.shaderIntegerFunctions2;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL& safe_VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL::operator=(const safe_VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    shaderIntegerFunctions2 = src.shaderIntegerFunctions2;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL::~safe_VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL::initialize(const VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL* in_struct)
+{
+    sType = in_struct->sType;
+    shaderIntegerFunctions2 = in_struct->shaderIntegerFunctions2;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL::initialize(const safe_VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL* src)
+{
+    sType = src->sType;
+    shaderIntegerFunctions2 = src->shaderIntegerFunctions2;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkPerformanceValueDataINTEL::safe_VkPerformanceValueDataINTEL(const VkPerformanceValueDataINTEL* in_struct) :
+    value32(in_struct->value32),
+    value64(in_struct->value64),
+    valueFloat(in_struct->valueFloat),
+    valueBool(in_struct->valueBool)
+{
+    valueString = SafeStringCopy(in_struct->valueString);
+}
+
+safe_VkPerformanceValueDataINTEL::safe_VkPerformanceValueDataINTEL() :
+    valueString(nullptr)
+{}
+
+safe_VkPerformanceValueDataINTEL::safe_VkPerformanceValueDataINTEL(const safe_VkPerformanceValueDataINTEL& src)
+{
+    value32 = src.value32;
+    value64 = src.value64;
+    valueFloat = src.valueFloat;
+    valueBool = src.valueBool;
+    valueString = SafeStringCopy(src.valueString);
+}
+
+safe_VkPerformanceValueDataINTEL& safe_VkPerformanceValueDataINTEL::operator=(const safe_VkPerformanceValueDataINTEL& src)
+{
+    if (&src == this) return *this;
+
+    if (valueString) delete [] valueString;
+
+    value32 = src.value32;
+    value64 = src.value64;
+    valueFloat = src.valueFloat;
+    valueBool = src.valueBool;
+    valueString = SafeStringCopy(src.valueString);
+
+    return *this;
+}
+
+safe_VkPerformanceValueDataINTEL::~safe_VkPerformanceValueDataINTEL()
+{
+    if (valueString) delete [] valueString;
+}
+
+void safe_VkPerformanceValueDataINTEL::initialize(const VkPerformanceValueDataINTEL* in_struct)
+{
+    value32 = in_struct->value32;
+    value64 = in_struct->value64;
+    valueFloat = in_struct->valueFloat;
+    valueBool = in_struct->valueBool;
+    valueString = SafeStringCopy(in_struct->valueString);
+}
+
+void safe_VkPerformanceValueDataINTEL::initialize(const safe_VkPerformanceValueDataINTEL* src)
+{
+    value32 = src->value32;
+    value64 = src->value64;
+    valueFloat = src->valueFloat;
+    valueBool = src->valueBool;
+    valueString = SafeStringCopy(src->valueString);
+}
+
+safe_VkInitializePerformanceApiInfoINTEL::safe_VkInitializePerformanceApiInfoINTEL(const VkInitializePerformanceApiInfoINTEL* in_struct) :
+    sType(in_struct->sType),
+    pUserData(in_struct->pUserData)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkInitializePerformanceApiInfoINTEL::safe_VkInitializePerformanceApiInfoINTEL() :
+    pNext(nullptr),
+    pUserData(nullptr)
+{}
+
+safe_VkInitializePerformanceApiInfoINTEL::safe_VkInitializePerformanceApiInfoINTEL(const safe_VkInitializePerformanceApiInfoINTEL& src)
+{
+    sType = src.sType;
+    pUserData = src.pUserData;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkInitializePerformanceApiInfoINTEL& safe_VkInitializePerformanceApiInfoINTEL::operator=(const safe_VkInitializePerformanceApiInfoINTEL& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    pUserData = src.pUserData;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkInitializePerformanceApiInfoINTEL::~safe_VkInitializePerformanceApiInfoINTEL()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkInitializePerformanceApiInfoINTEL::initialize(const VkInitializePerformanceApiInfoINTEL* in_struct)
+{
+    sType = in_struct->sType;
+    pUserData = in_struct->pUserData;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkInitializePerformanceApiInfoINTEL::initialize(const safe_VkInitializePerformanceApiInfoINTEL* src)
+{
+    sType = src->sType;
+    pUserData = src->pUserData;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkQueryPoolCreateInfoINTEL::safe_VkQueryPoolCreateInfoINTEL(const VkQueryPoolCreateInfoINTEL* in_struct) :
+    sType(in_struct->sType),
+    performanceCountersSampling(in_struct->performanceCountersSampling)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkQueryPoolCreateInfoINTEL::safe_VkQueryPoolCreateInfoINTEL() :
+    pNext(nullptr)
+{}
+
+safe_VkQueryPoolCreateInfoINTEL::safe_VkQueryPoolCreateInfoINTEL(const safe_VkQueryPoolCreateInfoINTEL& src)
+{
+    sType = src.sType;
+    performanceCountersSampling = src.performanceCountersSampling;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkQueryPoolCreateInfoINTEL& safe_VkQueryPoolCreateInfoINTEL::operator=(const safe_VkQueryPoolCreateInfoINTEL& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    performanceCountersSampling = src.performanceCountersSampling;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkQueryPoolCreateInfoINTEL::~safe_VkQueryPoolCreateInfoINTEL()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkQueryPoolCreateInfoINTEL::initialize(const VkQueryPoolCreateInfoINTEL* in_struct)
+{
+    sType = in_struct->sType;
+    performanceCountersSampling = in_struct->performanceCountersSampling;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkQueryPoolCreateInfoINTEL::initialize(const safe_VkQueryPoolCreateInfoINTEL* src)
+{
+    sType = src->sType;
+    performanceCountersSampling = src->performanceCountersSampling;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkPerformanceMarkerInfoINTEL::safe_VkPerformanceMarkerInfoINTEL(const VkPerformanceMarkerInfoINTEL* in_struct) :
+    sType(in_struct->sType),
+    marker(in_struct->marker)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPerformanceMarkerInfoINTEL::safe_VkPerformanceMarkerInfoINTEL() :
+    pNext(nullptr)
+{}
+
+safe_VkPerformanceMarkerInfoINTEL::safe_VkPerformanceMarkerInfoINTEL(const safe_VkPerformanceMarkerInfoINTEL& src)
+{
+    sType = src.sType;
+    marker = src.marker;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPerformanceMarkerInfoINTEL& safe_VkPerformanceMarkerInfoINTEL::operator=(const safe_VkPerformanceMarkerInfoINTEL& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    marker = src.marker;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPerformanceMarkerInfoINTEL::~safe_VkPerformanceMarkerInfoINTEL()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPerformanceMarkerInfoINTEL::initialize(const VkPerformanceMarkerInfoINTEL* in_struct)
+{
+    sType = in_struct->sType;
+    marker = in_struct->marker;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPerformanceMarkerInfoINTEL::initialize(const safe_VkPerformanceMarkerInfoINTEL* src)
+{
+    sType = src->sType;
+    marker = src->marker;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkPerformanceStreamMarkerInfoINTEL::safe_VkPerformanceStreamMarkerInfoINTEL(const VkPerformanceStreamMarkerInfoINTEL* in_struct) :
+    sType(in_struct->sType),
+    marker(in_struct->marker)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPerformanceStreamMarkerInfoINTEL::safe_VkPerformanceStreamMarkerInfoINTEL() :
+    pNext(nullptr)
+{}
+
+safe_VkPerformanceStreamMarkerInfoINTEL::safe_VkPerformanceStreamMarkerInfoINTEL(const safe_VkPerformanceStreamMarkerInfoINTEL& src)
+{
+    sType = src.sType;
+    marker = src.marker;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPerformanceStreamMarkerInfoINTEL& safe_VkPerformanceStreamMarkerInfoINTEL::operator=(const safe_VkPerformanceStreamMarkerInfoINTEL& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    marker = src.marker;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPerformanceStreamMarkerInfoINTEL::~safe_VkPerformanceStreamMarkerInfoINTEL()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPerformanceStreamMarkerInfoINTEL::initialize(const VkPerformanceStreamMarkerInfoINTEL* in_struct)
+{
+    sType = in_struct->sType;
+    marker = in_struct->marker;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPerformanceStreamMarkerInfoINTEL::initialize(const safe_VkPerformanceStreamMarkerInfoINTEL* src)
+{
+    sType = src->sType;
+    marker = src->marker;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkPerformanceOverrideInfoINTEL::safe_VkPerformanceOverrideInfoINTEL(const VkPerformanceOverrideInfoINTEL* in_struct) :
+    sType(in_struct->sType),
+    type(in_struct->type),
+    enable(in_struct->enable),
+    parameter(in_struct->parameter)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPerformanceOverrideInfoINTEL::safe_VkPerformanceOverrideInfoINTEL() :
+    pNext(nullptr)
+{}
+
+safe_VkPerformanceOverrideInfoINTEL::safe_VkPerformanceOverrideInfoINTEL(const safe_VkPerformanceOverrideInfoINTEL& src)
+{
+    sType = src.sType;
+    type = src.type;
+    enable = src.enable;
+    parameter = src.parameter;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPerformanceOverrideInfoINTEL& safe_VkPerformanceOverrideInfoINTEL::operator=(const safe_VkPerformanceOverrideInfoINTEL& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    type = src.type;
+    enable = src.enable;
+    parameter = src.parameter;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPerformanceOverrideInfoINTEL::~safe_VkPerformanceOverrideInfoINTEL()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPerformanceOverrideInfoINTEL::initialize(const VkPerformanceOverrideInfoINTEL* in_struct)
+{
+    sType = in_struct->sType;
+    type = in_struct->type;
+    enable = in_struct->enable;
+    parameter = in_struct->parameter;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPerformanceOverrideInfoINTEL::initialize(const safe_VkPerformanceOverrideInfoINTEL* src)
+{
+    sType = src->sType;
+    type = src->type;
+    enable = src->enable;
+    parameter = src->parameter;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkPerformanceConfigurationAcquireInfoINTEL::safe_VkPerformanceConfigurationAcquireInfoINTEL(const VkPerformanceConfigurationAcquireInfoINTEL* in_struct) :
+    sType(in_struct->sType),
+    type(in_struct->type)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPerformanceConfigurationAcquireInfoINTEL::safe_VkPerformanceConfigurationAcquireInfoINTEL() :
+    pNext(nullptr)
+{}
+
+safe_VkPerformanceConfigurationAcquireInfoINTEL::safe_VkPerformanceConfigurationAcquireInfoINTEL(const safe_VkPerformanceConfigurationAcquireInfoINTEL& src)
+{
+    sType = src.sType;
+    type = src.type;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPerformanceConfigurationAcquireInfoINTEL& safe_VkPerformanceConfigurationAcquireInfoINTEL::operator=(const safe_VkPerformanceConfigurationAcquireInfoINTEL& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    type = src.type;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPerformanceConfigurationAcquireInfoINTEL::~safe_VkPerformanceConfigurationAcquireInfoINTEL()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPerformanceConfigurationAcquireInfoINTEL::initialize(const VkPerformanceConfigurationAcquireInfoINTEL* in_struct)
+{
+    sType = in_struct->sType;
+    type = in_struct->type;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPerformanceConfigurationAcquireInfoINTEL::initialize(const safe_VkPerformanceConfigurationAcquireInfoINTEL* src)
+{
+    sType = src->sType;
+    type = src->type;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkPhysicalDevicePCIBusInfoPropertiesEXT::safe_VkPhysicalDevicePCIBusInfoPropertiesEXT(const VkPhysicalDevicePCIBusInfoPropertiesEXT* in_struct) :
+    sType(in_struct->sType),
+    pciDomain(in_struct->pciDomain),
+    pciBus(in_struct->pciBus),
+    pciDevice(in_struct->pciDevice),
+    pciFunction(in_struct->pciFunction)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPhysicalDevicePCIBusInfoPropertiesEXT::safe_VkPhysicalDevicePCIBusInfoPropertiesEXT() :
+    pNext(nullptr)
+{}
+
+safe_VkPhysicalDevicePCIBusInfoPropertiesEXT::safe_VkPhysicalDevicePCIBusInfoPropertiesEXT(const safe_VkPhysicalDevicePCIBusInfoPropertiesEXT& src)
+{
+    sType = src.sType;
+    pciDomain = src.pciDomain;
+    pciBus = src.pciBus;
+    pciDevice = src.pciDevice;
+    pciFunction = src.pciFunction;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPhysicalDevicePCIBusInfoPropertiesEXT& safe_VkPhysicalDevicePCIBusInfoPropertiesEXT::operator=(const safe_VkPhysicalDevicePCIBusInfoPropertiesEXT& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    pciDomain = src.pciDomain;
+    pciBus = src.pciBus;
+    pciDevice = src.pciDevice;
+    pciFunction = src.pciFunction;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPhysicalDevicePCIBusInfoPropertiesEXT::~safe_VkPhysicalDevicePCIBusInfoPropertiesEXT()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPhysicalDevicePCIBusInfoPropertiesEXT::initialize(const VkPhysicalDevicePCIBusInfoPropertiesEXT* in_struct)
+{
+    sType = in_struct->sType;
+    pciDomain = in_struct->pciDomain;
+    pciBus = in_struct->pciBus;
+    pciDevice = in_struct->pciDevice;
+    pciFunction = in_struct->pciFunction;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPhysicalDevicePCIBusInfoPropertiesEXT::initialize(const safe_VkPhysicalDevicePCIBusInfoPropertiesEXT* src)
+{
+    sType = src->sType;
+    pciDomain = src->pciDomain;
+    pciBus = src->pciBus;
+    pciDevice = src->pciDevice;
+    pciFunction = src->pciFunction;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkDisplayNativeHdrSurfaceCapabilitiesAMD::safe_VkDisplayNativeHdrSurfaceCapabilitiesAMD(const VkDisplayNativeHdrSurfaceCapabilitiesAMD* in_struct) :
+    sType(in_struct->sType),
+    localDimmingSupport(in_struct->localDimmingSupport)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkDisplayNativeHdrSurfaceCapabilitiesAMD::safe_VkDisplayNativeHdrSurfaceCapabilitiesAMD() :
+    pNext(nullptr)
+{}
+
+safe_VkDisplayNativeHdrSurfaceCapabilitiesAMD::safe_VkDisplayNativeHdrSurfaceCapabilitiesAMD(const safe_VkDisplayNativeHdrSurfaceCapabilitiesAMD& src)
+{
+    sType = src.sType;
+    localDimmingSupport = src.localDimmingSupport;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkDisplayNativeHdrSurfaceCapabilitiesAMD& safe_VkDisplayNativeHdrSurfaceCapabilitiesAMD::operator=(const safe_VkDisplayNativeHdrSurfaceCapabilitiesAMD& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    localDimmingSupport = src.localDimmingSupport;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkDisplayNativeHdrSurfaceCapabilitiesAMD::~safe_VkDisplayNativeHdrSurfaceCapabilitiesAMD()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkDisplayNativeHdrSurfaceCapabilitiesAMD::initialize(const VkDisplayNativeHdrSurfaceCapabilitiesAMD* in_struct)
+{
+    sType = in_struct->sType;
+    localDimmingSupport = in_struct->localDimmingSupport;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkDisplayNativeHdrSurfaceCapabilitiesAMD::initialize(const safe_VkDisplayNativeHdrSurfaceCapabilitiesAMD* src)
+{
+    sType = src->sType;
+    localDimmingSupport = src->localDimmingSupport;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkSwapchainDisplayNativeHdrCreateInfoAMD::safe_VkSwapchainDisplayNativeHdrCreateInfoAMD(const VkSwapchainDisplayNativeHdrCreateInfoAMD* in_struct) :
+    sType(in_struct->sType),
+    localDimmingEnable(in_struct->localDimmingEnable)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkSwapchainDisplayNativeHdrCreateInfoAMD::safe_VkSwapchainDisplayNativeHdrCreateInfoAMD() :
+    pNext(nullptr)
+{}
+
+safe_VkSwapchainDisplayNativeHdrCreateInfoAMD::safe_VkSwapchainDisplayNativeHdrCreateInfoAMD(const safe_VkSwapchainDisplayNativeHdrCreateInfoAMD& src)
+{
+    sType = src.sType;
+    localDimmingEnable = src.localDimmingEnable;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkSwapchainDisplayNativeHdrCreateInfoAMD& safe_VkSwapchainDisplayNativeHdrCreateInfoAMD::operator=(const safe_VkSwapchainDisplayNativeHdrCreateInfoAMD& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    localDimmingEnable = src.localDimmingEnable;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkSwapchainDisplayNativeHdrCreateInfoAMD::~safe_VkSwapchainDisplayNativeHdrCreateInfoAMD()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkSwapchainDisplayNativeHdrCreateInfoAMD::initialize(const VkSwapchainDisplayNativeHdrCreateInfoAMD* in_struct)
+{
+    sType = in_struct->sType;
+    localDimmingEnable = in_struct->localDimmingEnable;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkSwapchainDisplayNativeHdrCreateInfoAMD::initialize(const safe_VkSwapchainDisplayNativeHdrCreateInfoAMD* src)
+{
+    sType = src->sType;
+    localDimmingEnable = src->localDimmingEnable;
+    pNext = SafePnextCopy(src->pNext);
+}
+#ifdef VK_USE_PLATFORM_FUCHSIA
+
+
+safe_VkImagePipeSurfaceCreateInfoFUCHSIA::safe_VkImagePipeSurfaceCreateInfoFUCHSIA(const VkImagePipeSurfaceCreateInfoFUCHSIA* in_struct) :
+    sType(in_struct->sType),
+    flags(in_struct->flags),
+    imagePipeHandle(in_struct->imagePipeHandle)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkImagePipeSurfaceCreateInfoFUCHSIA::safe_VkImagePipeSurfaceCreateInfoFUCHSIA() :
+    pNext(nullptr)
+{}
+
+safe_VkImagePipeSurfaceCreateInfoFUCHSIA::safe_VkImagePipeSurfaceCreateInfoFUCHSIA(const safe_VkImagePipeSurfaceCreateInfoFUCHSIA& src)
+{
+    sType = src.sType;
+    flags = src.flags;
+    imagePipeHandle = src.imagePipeHandle;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkImagePipeSurfaceCreateInfoFUCHSIA& safe_VkImagePipeSurfaceCreateInfoFUCHSIA::operator=(const safe_VkImagePipeSurfaceCreateInfoFUCHSIA& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    flags = src.flags;
+    imagePipeHandle = src.imagePipeHandle;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkImagePipeSurfaceCreateInfoFUCHSIA::~safe_VkImagePipeSurfaceCreateInfoFUCHSIA()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkImagePipeSurfaceCreateInfoFUCHSIA::initialize(const VkImagePipeSurfaceCreateInfoFUCHSIA* in_struct)
+{
+    sType = in_struct->sType;
+    flags = in_struct->flags;
+    imagePipeHandle = in_struct->imagePipeHandle;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkImagePipeSurfaceCreateInfoFUCHSIA::initialize(const safe_VkImagePipeSurfaceCreateInfoFUCHSIA* src)
+{
+    sType = src->sType;
+    flags = src->flags;
+    imagePipeHandle = src->imagePipeHandle;
+    pNext = SafePnextCopy(src->pNext);
+}
+#endif // VK_USE_PLATFORM_FUCHSIA
+
+#ifdef VK_USE_PLATFORM_METAL_EXT
+
+
+safe_VkMetalSurfaceCreateInfoEXT::safe_VkMetalSurfaceCreateInfoEXT(const VkMetalSurfaceCreateInfoEXT* in_struct) :
+    sType(in_struct->sType),
+    flags(in_struct->flags),
+    pLayer(nullptr)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (in_struct->pLayer) {
+        pLayer = new CAMetalLayer(*in_struct->pLayer);
+    }
+}
+
+safe_VkMetalSurfaceCreateInfoEXT::safe_VkMetalSurfaceCreateInfoEXT() :
+    pNext(nullptr),
+    pLayer(nullptr)
+{}
+
+safe_VkMetalSurfaceCreateInfoEXT::safe_VkMetalSurfaceCreateInfoEXT(const safe_VkMetalSurfaceCreateInfoEXT& src)
+{
+    sType = src.sType;
+    flags = src.flags;
+    pLayer = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (src.pLayer) {
+        pLayer = new CAMetalLayer(*src.pLayer);
+    }
+}
+
+safe_VkMetalSurfaceCreateInfoEXT& safe_VkMetalSurfaceCreateInfoEXT::operator=(const safe_VkMetalSurfaceCreateInfoEXT& src)
+{
+    if (&src == this) return *this;
+
+    if (pLayer)
+        delete pLayer;
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    flags = src.flags;
+    pLayer = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (src.pLayer) {
+        pLayer = new CAMetalLayer(*src.pLayer);
+    }
+
+    return *this;
+}
+
+safe_VkMetalSurfaceCreateInfoEXT::~safe_VkMetalSurfaceCreateInfoEXT()
+{
+    if (pLayer)
+        delete pLayer;
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkMetalSurfaceCreateInfoEXT::initialize(const VkMetalSurfaceCreateInfoEXT* in_struct)
+{
+    sType = in_struct->sType;
+    flags = in_struct->flags;
+    pLayer = nullptr;
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (in_struct->pLayer) {
+        pLayer = new CAMetalLayer(*in_struct->pLayer);
+    }
+}
+
+void safe_VkMetalSurfaceCreateInfoEXT::initialize(const safe_VkMetalSurfaceCreateInfoEXT* src)
+{
+    sType = src->sType;
+    flags = src->flags;
+    pLayer = nullptr;
+    pNext = SafePnextCopy(src->pNext);
+    if (src->pLayer) {
+        pLayer = new CAMetalLayer(*src->pLayer);
+    }
+}
+#endif // VK_USE_PLATFORM_METAL_EXT
+
+
+safe_VkPhysicalDeviceFragmentDensityMapFeaturesEXT::safe_VkPhysicalDeviceFragmentDensityMapFeaturesEXT(const VkPhysicalDeviceFragmentDensityMapFeaturesEXT* in_struct) :
+    sType(in_struct->sType),
+    fragmentDensityMap(in_struct->fragmentDensityMap),
+    fragmentDensityMapDynamic(in_struct->fragmentDensityMapDynamic),
+    fragmentDensityMapNonSubsampledImages(in_struct->fragmentDensityMapNonSubsampledImages)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPhysicalDeviceFragmentDensityMapFeaturesEXT::safe_VkPhysicalDeviceFragmentDensityMapFeaturesEXT() :
+    pNext(nullptr)
+{}
+
+safe_VkPhysicalDeviceFragmentDensityMapFeaturesEXT::safe_VkPhysicalDeviceFragmentDensityMapFeaturesEXT(const safe_VkPhysicalDeviceFragmentDensityMapFeaturesEXT& src)
+{
+    sType = src.sType;
+    fragmentDensityMap = src.fragmentDensityMap;
+    fragmentDensityMapDynamic = src.fragmentDensityMapDynamic;
+    fragmentDensityMapNonSubsampledImages = src.fragmentDensityMapNonSubsampledImages;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPhysicalDeviceFragmentDensityMapFeaturesEXT& safe_VkPhysicalDeviceFragmentDensityMapFeaturesEXT::operator=(const safe_VkPhysicalDeviceFragmentDensityMapFeaturesEXT& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    fragmentDensityMap = src.fragmentDensityMap;
+    fragmentDensityMapDynamic = src.fragmentDensityMapDynamic;
+    fragmentDensityMapNonSubsampledImages = src.fragmentDensityMapNonSubsampledImages;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPhysicalDeviceFragmentDensityMapFeaturesEXT::~safe_VkPhysicalDeviceFragmentDensityMapFeaturesEXT()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPhysicalDeviceFragmentDensityMapFeaturesEXT::initialize(const VkPhysicalDeviceFragmentDensityMapFeaturesEXT* in_struct)
+{
+    sType = in_struct->sType;
+    fragmentDensityMap = in_struct->fragmentDensityMap;
+    fragmentDensityMapDynamic = in_struct->fragmentDensityMapDynamic;
+    fragmentDensityMapNonSubsampledImages = in_struct->fragmentDensityMapNonSubsampledImages;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPhysicalDeviceFragmentDensityMapFeaturesEXT::initialize(const safe_VkPhysicalDeviceFragmentDensityMapFeaturesEXT* src)
+{
+    sType = src->sType;
+    fragmentDensityMap = src->fragmentDensityMap;
+    fragmentDensityMapDynamic = src->fragmentDensityMapDynamic;
+    fragmentDensityMapNonSubsampledImages = src->fragmentDensityMapNonSubsampledImages;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkPhysicalDeviceFragmentDensityMapPropertiesEXT::safe_VkPhysicalDeviceFragmentDensityMapPropertiesEXT(const VkPhysicalDeviceFragmentDensityMapPropertiesEXT* in_struct) :
+    sType(in_struct->sType),
+    minFragmentDensityTexelSize(in_struct->minFragmentDensityTexelSize),
+    maxFragmentDensityTexelSize(in_struct->maxFragmentDensityTexelSize),
+    fragmentDensityInvocations(in_struct->fragmentDensityInvocations)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPhysicalDeviceFragmentDensityMapPropertiesEXT::safe_VkPhysicalDeviceFragmentDensityMapPropertiesEXT() :
+    pNext(nullptr)
+{}
+
+safe_VkPhysicalDeviceFragmentDensityMapPropertiesEXT::safe_VkPhysicalDeviceFragmentDensityMapPropertiesEXT(const safe_VkPhysicalDeviceFragmentDensityMapPropertiesEXT& src)
+{
+    sType = src.sType;
+    minFragmentDensityTexelSize = src.minFragmentDensityTexelSize;
+    maxFragmentDensityTexelSize = src.maxFragmentDensityTexelSize;
+    fragmentDensityInvocations = src.fragmentDensityInvocations;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPhysicalDeviceFragmentDensityMapPropertiesEXT& safe_VkPhysicalDeviceFragmentDensityMapPropertiesEXT::operator=(const safe_VkPhysicalDeviceFragmentDensityMapPropertiesEXT& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    minFragmentDensityTexelSize = src.minFragmentDensityTexelSize;
+    maxFragmentDensityTexelSize = src.maxFragmentDensityTexelSize;
+    fragmentDensityInvocations = src.fragmentDensityInvocations;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPhysicalDeviceFragmentDensityMapPropertiesEXT::~safe_VkPhysicalDeviceFragmentDensityMapPropertiesEXT()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPhysicalDeviceFragmentDensityMapPropertiesEXT::initialize(const VkPhysicalDeviceFragmentDensityMapPropertiesEXT* in_struct)
+{
+    sType = in_struct->sType;
+    minFragmentDensityTexelSize = in_struct->minFragmentDensityTexelSize;
+    maxFragmentDensityTexelSize = in_struct->maxFragmentDensityTexelSize;
+    fragmentDensityInvocations = in_struct->fragmentDensityInvocations;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPhysicalDeviceFragmentDensityMapPropertiesEXT::initialize(const safe_VkPhysicalDeviceFragmentDensityMapPropertiesEXT* src)
+{
+    sType = src->sType;
+    minFragmentDensityTexelSize = src->minFragmentDensityTexelSize;
+    maxFragmentDensityTexelSize = src->maxFragmentDensityTexelSize;
+    fragmentDensityInvocations = src->fragmentDensityInvocations;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkRenderPassFragmentDensityMapCreateInfoEXT::safe_VkRenderPassFragmentDensityMapCreateInfoEXT(const VkRenderPassFragmentDensityMapCreateInfoEXT* in_struct) :
+    sType(in_struct->sType),
+    fragmentDensityMapAttachment(in_struct->fragmentDensityMapAttachment)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkRenderPassFragmentDensityMapCreateInfoEXT::safe_VkRenderPassFragmentDensityMapCreateInfoEXT() :
+    pNext(nullptr)
+{}
+
+safe_VkRenderPassFragmentDensityMapCreateInfoEXT::safe_VkRenderPassFragmentDensityMapCreateInfoEXT(const safe_VkRenderPassFragmentDensityMapCreateInfoEXT& src)
+{
+    sType = src.sType;
+    fragmentDensityMapAttachment = src.fragmentDensityMapAttachment;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkRenderPassFragmentDensityMapCreateInfoEXT& safe_VkRenderPassFragmentDensityMapCreateInfoEXT::operator=(const safe_VkRenderPassFragmentDensityMapCreateInfoEXT& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    fragmentDensityMapAttachment = src.fragmentDensityMapAttachment;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkRenderPassFragmentDensityMapCreateInfoEXT::~safe_VkRenderPassFragmentDensityMapCreateInfoEXT()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkRenderPassFragmentDensityMapCreateInfoEXT::initialize(const VkRenderPassFragmentDensityMapCreateInfoEXT* in_struct)
+{
+    sType = in_struct->sType;
+    fragmentDensityMapAttachment = in_struct->fragmentDensityMapAttachment;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkRenderPassFragmentDensityMapCreateInfoEXT::initialize(const safe_VkRenderPassFragmentDensityMapCreateInfoEXT* src)
+{
+    sType = src->sType;
+    fragmentDensityMapAttachment = src->fragmentDensityMapAttachment;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkPhysicalDeviceScalarBlockLayoutFeaturesEXT::safe_VkPhysicalDeviceScalarBlockLayoutFeaturesEXT(const VkPhysicalDeviceScalarBlockLayoutFeaturesEXT* in_struct) :
+    sType(in_struct->sType),
+    scalarBlockLayout(in_struct->scalarBlockLayout)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPhysicalDeviceScalarBlockLayoutFeaturesEXT::safe_VkPhysicalDeviceScalarBlockLayoutFeaturesEXT() :
+    pNext(nullptr)
+{}
+
+safe_VkPhysicalDeviceScalarBlockLayoutFeaturesEXT::safe_VkPhysicalDeviceScalarBlockLayoutFeaturesEXT(const safe_VkPhysicalDeviceScalarBlockLayoutFeaturesEXT& src)
+{
+    sType = src.sType;
+    scalarBlockLayout = src.scalarBlockLayout;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPhysicalDeviceScalarBlockLayoutFeaturesEXT& safe_VkPhysicalDeviceScalarBlockLayoutFeaturesEXT::operator=(const safe_VkPhysicalDeviceScalarBlockLayoutFeaturesEXT& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    scalarBlockLayout = src.scalarBlockLayout;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPhysicalDeviceScalarBlockLayoutFeaturesEXT::~safe_VkPhysicalDeviceScalarBlockLayoutFeaturesEXT()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPhysicalDeviceScalarBlockLayoutFeaturesEXT::initialize(const VkPhysicalDeviceScalarBlockLayoutFeaturesEXT* in_struct)
+{
+    sType = in_struct->sType;
+    scalarBlockLayout = in_struct->scalarBlockLayout;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPhysicalDeviceScalarBlockLayoutFeaturesEXT::initialize(const safe_VkPhysicalDeviceScalarBlockLayoutFeaturesEXT* src)
+{
+    sType = src->sType;
+    scalarBlockLayout = src->scalarBlockLayout;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkPhysicalDeviceSubgroupSizeControlFeaturesEXT::safe_VkPhysicalDeviceSubgroupSizeControlFeaturesEXT(const VkPhysicalDeviceSubgroupSizeControlFeaturesEXT* in_struct) :
+    sType(in_struct->sType),
+    subgroupSizeControl(in_struct->subgroupSizeControl),
+    computeFullSubgroups(in_struct->computeFullSubgroups)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPhysicalDeviceSubgroupSizeControlFeaturesEXT::safe_VkPhysicalDeviceSubgroupSizeControlFeaturesEXT() :
+    pNext(nullptr)
+{}
+
+safe_VkPhysicalDeviceSubgroupSizeControlFeaturesEXT::safe_VkPhysicalDeviceSubgroupSizeControlFeaturesEXT(const safe_VkPhysicalDeviceSubgroupSizeControlFeaturesEXT& src)
+{
+    sType = src.sType;
+    subgroupSizeControl = src.subgroupSizeControl;
+    computeFullSubgroups = src.computeFullSubgroups;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPhysicalDeviceSubgroupSizeControlFeaturesEXT& safe_VkPhysicalDeviceSubgroupSizeControlFeaturesEXT::operator=(const safe_VkPhysicalDeviceSubgroupSizeControlFeaturesEXT& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    subgroupSizeControl = src.subgroupSizeControl;
+    computeFullSubgroups = src.computeFullSubgroups;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPhysicalDeviceSubgroupSizeControlFeaturesEXT::~safe_VkPhysicalDeviceSubgroupSizeControlFeaturesEXT()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPhysicalDeviceSubgroupSizeControlFeaturesEXT::initialize(const VkPhysicalDeviceSubgroupSizeControlFeaturesEXT* in_struct)
+{
+    sType = in_struct->sType;
+    subgroupSizeControl = in_struct->subgroupSizeControl;
+    computeFullSubgroups = in_struct->computeFullSubgroups;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPhysicalDeviceSubgroupSizeControlFeaturesEXT::initialize(const safe_VkPhysicalDeviceSubgroupSizeControlFeaturesEXT* src)
+{
+    sType = src->sType;
+    subgroupSizeControl = src->subgroupSizeControl;
+    computeFullSubgroups = src->computeFullSubgroups;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkPhysicalDeviceSubgroupSizeControlPropertiesEXT::safe_VkPhysicalDeviceSubgroupSizeControlPropertiesEXT(const VkPhysicalDeviceSubgroupSizeControlPropertiesEXT* in_struct) :
+    sType(in_struct->sType),
+    minSubgroupSize(in_struct->minSubgroupSize),
+    maxSubgroupSize(in_struct->maxSubgroupSize),
+    maxComputeWorkgroupSubgroups(in_struct->maxComputeWorkgroupSubgroups),
+    requiredSubgroupSizeStages(in_struct->requiredSubgroupSizeStages)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPhysicalDeviceSubgroupSizeControlPropertiesEXT::safe_VkPhysicalDeviceSubgroupSizeControlPropertiesEXT() :
+    pNext(nullptr)
+{}
+
+safe_VkPhysicalDeviceSubgroupSizeControlPropertiesEXT::safe_VkPhysicalDeviceSubgroupSizeControlPropertiesEXT(const safe_VkPhysicalDeviceSubgroupSizeControlPropertiesEXT& src)
+{
+    sType = src.sType;
+    minSubgroupSize = src.minSubgroupSize;
+    maxSubgroupSize = src.maxSubgroupSize;
+    maxComputeWorkgroupSubgroups = src.maxComputeWorkgroupSubgroups;
+    requiredSubgroupSizeStages = src.requiredSubgroupSizeStages;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPhysicalDeviceSubgroupSizeControlPropertiesEXT& safe_VkPhysicalDeviceSubgroupSizeControlPropertiesEXT::operator=(const safe_VkPhysicalDeviceSubgroupSizeControlPropertiesEXT& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    minSubgroupSize = src.minSubgroupSize;
+    maxSubgroupSize = src.maxSubgroupSize;
+    maxComputeWorkgroupSubgroups = src.maxComputeWorkgroupSubgroups;
+    requiredSubgroupSizeStages = src.requiredSubgroupSizeStages;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPhysicalDeviceSubgroupSizeControlPropertiesEXT::~safe_VkPhysicalDeviceSubgroupSizeControlPropertiesEXT()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPhysicalDeviceSubgroupSizeControlPropertiesEXT::initialize(const VkPhysicalDeviceSubgroupSizeControlPropertiesEXT* in_struct)
+{
+    sType = in_struct->sType;
+    minSubgroupSize = in_struct->minSubgroupSize;
+    maxSubgroupSize = in_struct->maxSubgroupSize;
+    maxComputeWorkgroupSubgroups = in_struct->maxComputeWorkgroupSubgroups;
+    requiredSubgroupSizeStages = in_struct->requiredSubgroupSizeStages;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPhysicalDeviceSubgroupSizeControlPropertiesEXT::initialize(const safe_VkPhysicalDeviceSubgroupSizeControlPropertiesEXT* src)
+{
+    sType = src->sType;
+    minSubgroupSize = src->minSubgroupSize;
+    maxSubgroupSize = src->maxSubgroupSize;
+    maxComputeWorkgroupSubgroups = src->maxComputeWorkgroupSubgroups;
+    requiredSubgroupSizeStages = src->requiredSubgroupSizeStages;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT::safe_VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT(const VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT* in_struct) :
+    sType(in_struct->sType),
+    requiredSubgroupSize(in_struct->requiredSubgroupSize)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT::safe_VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT() :
+    pNext(nullptr)
+{}
+
+safe_VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT::safe_VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT(const safe_VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT& src)
+{
+    sType = src.sType;
+    requiredSubgroupSize = src.requiredSubgroupSize;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT& safe_VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT::operator=(const safe_VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    requiredSubgroupSize = src.requiredSubgroupSize;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT::~safe_VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT::initialize(const VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT* in_struct)
+{
+    sType = in_struct->sType;
+    requiredSubgroupSize = in_struct->requiredSubgroupSize;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT::initialize(const safe_VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT* src)
+{
+    sType = src->sType;
+    requiredSubgroupSize = src->requiredSubgroupSize;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkPhysicalDeviceShaderCoreProperties2AMD::safe_VkPhysicalDeviceShaderCoreProperties2AMD(const VkPhysicalDeviceShaderCoreProperties2AMD* in_struct) :
+    sType(in_struct->sType),
+    shaderCoreFeatures(in_struct->shaderCoreFeatures),
+    activeComputeUnitCount(in_struct->activeComputeUnitCount)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPhysicalDeviceShaderCoreProperties2AMD::safe_VkPhysicalDeviceShaderCoreProperties2AMD() :
+    pNext(nullptr)
+{}
+
+safe_VkPhysicalDeviceShaderCoreProperties2AMD::safe_VkPhysicalDeviceShaderCoreProperties2AMD(const safe_VkPhysicalDeviceShaderCoreProperties2AMD& src)
+{
+    sType = src.sType;
+    shaderCoreFeatures = src.shaderCoreFeatures;
+    activeComputeUnitCount = src.activeComputeUnitCount;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPhysicalDeviceShaderCoreProperties2AMD& safe_VkPhysicalDeviceShaderCoreProperties2AMD::operator=(const safe_VkPhysicalDeviceShaderCoreProperties2AMD& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    shaderCoreFeatures = src.shaderCoreFeatures;
+    activeComputeUnitCount = src.activeComputeUnitCount;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPhysicalDeviceShaderCoreProperties2AMD::~safe_VkPhysicalDeviceShaderCoreProperties2AMD()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPhysicalDeviceShaderCoreProperties2AMD::initialize(const VkPhysicalDeviceShaderCoreProperties2AMD* in_struct)
+{
+    sType = in_struct->sType;
+    shaderCoreFeatures = in_struct->shaderCoreFeatures;
+    activeComputeUnitCount = in_struct->activeComputeUnitCount;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPhysicalDeviceShaderCoreProperties2AMD::initialize(const safe_VkPhysicalDeviceShaderCoreProperties2AMD* src)
+{
+    sType = src->sType;
+    shaderCoreFeatures = src->shaderCoreFeatures;
+    activeComputeUnitCount = src->activeComputeUnitCount;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkPhysicalDeviceCoherentMemoryFeaturesAMD::safe_VkPhysicalDeviceCoherentMemoryFeaturesAMD(const VkPhysicalDeviceCoherentMemoryFeaturesAMD* in_struct) :
+    sType(in_struct->sType),
+    deviceCoherentMemory(in_struct->deviceCoherentMemory)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPhysicalDeviceCoherentMemoryFeaturesAMD::safe_VkPhysicalDeviceCoherentMemoryFeaturesAMD() :
+    pNext(nullptr)
+{}
+
+safe_VkPhysicalDeviceCoherentMemoryFeaturesAMD::safe_VkPhysicalDeviceCoherentMemoryFeaturesAMD(const safe_VkPhysicalDeviceCoherentMemoryFeaturesAMD& src)
+{
+    sType = src.sType;
+    deviceCoherentMemory = src.deviceCoherentMemory;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPhysicalDeviceCoherentMemoryFeaturesAMD& safe_VkPhysicalDeviceCoherentMemoryFeaturesAMD::operator=(const safe_VkPhysicalDeviceCoherentMemoryFeaturesAMD& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    deviceCoherentMemory = src.deviceCoherentMemory;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPhysicalDeviceCoherentMemoryFeaturesAMD::~safe_VkPhysicalDeviceCoherentMemoryFeaturesAMD()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPhysicalDeviceCoherentMemoryFeaturesAMD::initialize(const VkPhysicalDeviceCoherentMemoryFeaturesAMD* in_struct)
+{
+    sType = in_struct->sType;
+    deviceCoherentMemory = in_struct->deviceCoherentMemory;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPhysicalDeviceCoherentMemoryFeaturesAMD::initialize(const safe_VkPhysicalDeviceCoherentMemoryFeaturesAMD* src)
+{
+    sType = src->sType;
+    deviceCoherentMemory = src->deviceCoherentMemory;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkPhysicalDeviceMemoryBudgetPropertiesEXT::safe_VkPhysicalDeviceMemoryBudgetPropertiesEXT(const VkPhysicalDeviceMemoryBudgetPropertiesEXT* in_struct) :
+    sType(in_struct->sType)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+    for (uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i) {
+        heapBudget[i] = in_struct->heapBudget[i];
+    }
+    for (uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i) {
+        heapUsage[i] = in_struct->heapUsage[i];
+    }
+}
+
+safe_VkPhysicalDeviceMemoryBudgetPropertiesEXT::safe_VkPhysicalDeviceMemoryBudgetPropertiesEXT() :
+    pNext(nullptr)
+{}
+
+safe_VkPhysicalDeviceMemoryBudgetPropertiesEXT::safe_VkPhysicalDeviceMemoryBudgetPropertiesEXT(const safe_VkPhysicalDeviceMemoryBudgetPropertiesEXT& src)
+{
+    sType = src.sType;
+    pNext = SafePnextCopy(src.pNext);
+    for (uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i) {
+        heapBudget[i] = src.heapBudget[i];
+    }
+    for (uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i) {
+        heapUsage[i] = src.heapUsage[i];
+    }
+}
+
+safe_VkPhysicalDeviceMemoryBudgetPropertiesEXT& safe_VkPhysicalDeviceMemoryBudgetPropertiesEXT::operator=(const safe_VkPhysicalDeviceMemoryBudgetPropertiesEXT& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    pNext = SafePnextCopy(src.pNext);
+    for (uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i) {
+        heapBudget[i] = src.heapBudget[i];
+    }
+    for (uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i) {
+        heapUsage[i] = src.heapUsage[i];
+    }
+
+    return *this;
+}
+
+safe_VkPhysicalDeviceMemoryBudgetPropertiesEXT::~safe_VkPhysicalDeviceMemoryBudgetPropertiesEXT()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPhysicalDeviceMemoryBudgetPropertiesEXT::initialize(const VkPhysicalDeviceMemoryBudgetPropertiesEXT* in_struct)
+{
+    sType = in_struct->sType;
+    pNext = SafePnextCopy(in_struct->pNext);
+    for (uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i) {
+        heapBudget[i] = in_struct->heapBudget[i];
+    }
+    for (uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i) {
+        heapUsage[i] = in_struct->heapUsage[i];
+    }
+}
+
+void safe_VkPhysicalDeviceMemoryBudgetPropertiesEXT::initialize(const safe_VkPhysicalDeviceMemoryBudgetPropertiesEXT* src)
+{
+    sType = src->sType;
+    pNext = SafePnextCopy(src->pNext);
+    for (uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i) {
+        heapBudget[i] = src->heapBudget[i];
+    }
+    for (uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i) {
+        heapUsage[i] = src->heapUsage[i];
+    }
+}
+
+safe_VkPhysicalDeviceMemoryPriorityFeaturesEXT::safe_VkPhysicalDeviceMemoryPriorityFeaturesEXT(const VkPhysicalDeviceMemoryPriorityFeaturesEXT* in_struct) :
+    sType(in_struct->sType),
+    memoryPriority(in_struct->memoryPriority)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPhysicalDeviceMemoryPriorityFeaturesEXT::safe_VkPhysicalDeviceMemoryPriorityFeaturesEXT() :
+    pNext(nullptr)
+{}
+
+safe_VkPhysicalDeviceMemoryPriorityFeaturesEXT::safe_VkPhysicalDeviceMemoryPriorityFeaturesEXT(const safe_VkPhysicalDeviceMemoryPriorityFeaturesEXT& src)
+{
+    sType = src.sType;
+    memoryPriority = src.memoryPriority;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPhysicalDeviceMemoryPriorityFeaturesEXT& safe_VkPhysicalDeviceMemoryPriorityFeaturesEXT::operator=(const safe_VkPhysicalDeviceMemoryPriorityFeaturesEXT& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    memoryPriority = src.memoryPriority;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPhysicalDeviceMemoryPriorityFeaturesEXT::~safe_VkPhysicalDeviceMemoryPriorityFeaturesEXT()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPhysicalDeviceMemoryPriorityFeaturesEXT::initialize(const VkPhysicalDeviceMemoryPriorityFeaturesEXT* in_struct)
+{
+    sType = in_struct->sType;
+    memoryPriority = in_struct->memoryPriority;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPhysicalDeviceMemoryPriorityFeaturesEXT::initialize(const safe_VkPhysicalDeviceMemoryPriorityFeaturesEXT* src)
+{
+    sType = src->sType;
+    memoryPriority = src->memoryPriority;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkMemoryPriorityAllocateInfoEXT::safe_VkMemoryPriorityAllocateInfoEXT(const VkMemoryPriorityAllocateInfoEXT* in_struct) :
+    sType(in_struct->sType),
+    priority(in_struct->priority)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkMemoryPriorityAllocateInfoEXT::safe_VkMemoryPriorityAllocateInfoEXT() :
+    pNext(nullptr)
+{}
+
+safe_VkMemoryPriorityAllocateInfoEXT::safe_VkMemoryPriorityAllocateInfoEXT(const safe_VkMemoryPriorityAllocateInfoEXT& src)
+{
+    sType = src.sType;
+    priority = src.priority;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkMemoryPriorityAllocateInfoEXT& safe_VkMemoryPriorityAllocateInfoEXT::operator=(const safe_VkMemoryPriorityAllocateInfoEXT& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    priority = src.priority;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkMemoryPriorityAllocateInfoEXT::~safe_VkMemoryPriorityAllocateInfoEXT()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkMemoryPriorityAllocateInfoEXT::initialize(const VkMemoryPriorityAllocateInfoEXT* in_struct)
+{
+    sType = in_struct->sType;
+    priority = in_struct->priority;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkMemoryPriorityAllocateInfoEXT::initialize(const safe_VkMemoryPriorityAllocateInfoEXT* src)
+{
+    sType = src->sType;
+    priority = src->priority;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV::safe_VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV(const VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV* in_struct) :
+    sType(in_struct->sType),
+    dedicatedAllocationImageAliasing(in_struct->dedicatedAllocationImageAliasing)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV::safe_VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV() :
+    pNext(nullptr)
+{}
+
+safe_VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV::safe_VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV(const safe_VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV& src)
+{
+    sType = src.sType;
+    dedicatedAllocationImageAliasing = src.dedicatedAllocationImageAliasing;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV& safe_VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV::operator=(const safe_VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    dedicatedAllocationImageAliasing = src.dedicatedAllocationImageAliasing;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV::~safe_VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV::initialize(const VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV* in_struct)
+{
+    sType = in_struct->sType;
+    dedicatedAllocationImageAliasing = in_struct->dedicatedAllocationImageAliasing;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV::initialize(const safe_VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV* src)
+{
+    sType = src->sType;
+    dedicatedAllocationImageAliasing = src->dedicatedAllocationImageAliasing;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkPhysicalDeviceBufferDeviceAddressFeaturesEXT::safe_VkPhysicalDeviceBufferDeviceAddressFeaturesEXT(const VkPhysicalDeviceBufferDeviceAddressFeaturesEXT* in_struct) :
+    sType(in_struct->sType),
+    bufferDeviceAddress(in_struct->bufferDeviceAddress),
+    bufferDeviceAddressCaptureReplay(in_struct->bufferDeviceAddressCaptureReplay),
+    bufferDeviceAddressMultiDevice(in_struct->bufferDeviceAddressMultiDevice)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPhysicalDeviceBufferDeviceAddressFeaturesEXT::safe_VkPhysicalDeviceBufferDeviceAddressFeaturesEXT() :
+    pNext(nullptr)
+{}
+
+safe_VkPhysicalDeviceBufferDeviceAddressFeaturesEXT::safe_VkPhysicalDeviceBufferDeviceAddressFeaturesEXT(const safe_VkPhysicalDeviceBufferDeviceAddressFeaturesEXT& src)
+{
+    sType = src.sType;
+    bufferDeviceAddress = src.bufferDeviceAddress;
+    bufferDeviceAddressCaptureReplay = src.bufferDeviceAddressCaptureReplay;
+    bufferDeviceAddressMultiDevice = src.bufferDeviceAddressMultiDevice;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPhysicalDeviceBufferDeviceAddressFeaturesEXT& safe_VkPhysicalDeviceBufferDeviceAddressFeaturesEXT::operator=(const safe_VkPhysicalDeviceBufferDeviceAddressFeaturesEXT& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    bufferDeviceAddress = src.bufferDeviceAddress;
+    bufferDeviceAddressCaptureReplay = src.bufferDeviceAddressCaptureReplay;
+    bufferDeviceAddressMultiDevice = src.bufferDeviceAddressMultiDevice;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPhysicalDeviceBufferDeviceAddressFeaturesEXT::~safe_VkPhysicalDeviceBufferDeviceAddressFeaturesEXT()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPhysicalDeviceBufferDeviceAddressFeaturesEXT::initialize(const VkPhysicalDeviceBufferDeviceAddressFeaturesEXT* in_struct)
+{
+    sType = in_struct->sType;
+    bufferDeviceAddress = in_struct->bufferDeviceAddress;
+    bufferDeviceAddressCaptureReplay = in_struct->bufferDeviceAddressCaptureReplay;
+    bufferDeviceAddressMultiDevice = in_struct->bufferDeviceAddressMultiDevice;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPhysicalDeviceBufferDeviceAddressFeaturesEXT::initialize(const safe_VkPhysicalDeviceBufferDeviceAddressFeaturesEXT* src)
+{
+    sType = src->sType;
+    bufferDeviceAddress = src->bufferDeviceAddress;
+    bufferDeviceAddressCaptureReplay = src->bufferDeviceAddressCaptureReplay;
+    bufferDeviceAddressMultiDevice = src->bufferDeviceAddressMultiDevice;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkBufferDeviceAddressCreateInfoEXT::safe_VkBufferDeviceAddressCreateInfoEXT(const VkBufferDeviceAddressCreateInfoEXT* in_struct) :
+    sType(in_struct->sType),
+    deviceAddress(in_struct->deviceAddress)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkBufferDeviceAddressCreateInfoEXT::safe_VkBufferDeviceAddressCreateInfoEXT() :
+    pNext(nullptr)
+{}
+
+safe_VkBufferDeviceAddressCreateInfoEXT::safe_VkBufferDeviceAddressCreateInfoEXT(const safe_VkBufferDeviceAddressCreateInfoEXT& src)
+{
+    sType = src.sType;
+    deviceAddress = src.deviceAddress;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkBufferDeviceAddressCreateInfoEXT& safe_VkBufferDeviceAddressCreateInfoEXT::operator=(const safe_VkBufferDeviceAddressCreateInfoEXT& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    deviceAddress = src.deviceAddress;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkBufferDeviceAddressCreateInfoEXT::~safe_VkBufferDeviceAddressCreateInfoEXT()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkBufferDeviceAddressCreateInfoEXT::initialize(const VkBufferDeviceAddressCreateInfoEXT* in_struct)
+{
+    sType = in_struct->sType;
+    deviceAddress = in_struct->deviceAddress;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkBufferDeviceAddressCreateInfoEXT::initialize(const safe_VkBufferDeviceAddressCreateInfoEXT* src)
+{
+    sType = src->sType;
+    deviceAddress = src->deviceAddress;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkPhysicalDeviceToolPropertiesEXT::safe_VkPhysicalDeviceToolPropertiesEXT(const VkPhysicalDeviceToolPropertiesEXT* in_struct) :
+    sType(in_struct->sType),
+    purposes(in_struct->purposes)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+    for (uint32_t i = 0; i < VK_MAX_EXTENSION_NAME_SIZE; ++i) {
+        name[i] = in_struct->name[i];
+    }
+    for (uint32_t i = 0; i < VK_MAX_EXTENSION_NAME_SIZE; ++i) {
+        version[i] = in_struct->version[i];
+    }
+    for (uint32_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i) {
+        description[i] = in_struct->description[i];
+    }
+    for (uint32_t i = 0; i < VK_MAX_EXTENSION_NAME_SIZE; ++i) {
+        layer[i] = in_struct->layer[i];
+    }
+}
+
+safe_VkPhysicalDeviceToolPropertiesEXT::safe_VkPhysicalDeviceToolPropertiesEXT() :
+    pNext(nullptr)
+{}
+
+safe_VkPhysicalDeviceToolPropertiesEXT::safe_VkPhysicalDeviceToolPropertiesEXT(const safe_VkPhysicalDeviceToolPropertiesEXT& src)
+{
+    sType = src.sType;
+    purposes = src.purposes;
+    pNext = SafePnextCopy(src.pNext);
+    for (uint32_t i = 0; i < VK_MAX_EXTENSION_NAME_SIZE; ++i) {
+        name[i] = src.name[i];
+    }
+    for (uint32_t i = 0; i < VK_MAX_EXTENSION_NAME_SIZE; ++i) {
+        version[i] = src.version[i];
+    }
+    for (uint32_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i) {
+        description[i] = src.description[i];
+    }
+    for (uint32_t i = 0; i < VK_MAX_EXTENSION_NAME_SIZE; ++i) {
+        layer[i] = src.layer[i];
+    }
+}
+
+safe_VkPhysicalDeviceToolPropertiesEXT& safe_VkPhysicalDeviceToolPropertiesEXT::operator=(const safe_VkPhysicalDeviceToolPropertiesEXT& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    purposes = src.purposes;
+    pNext = SafePnextCopy(src.pNext);
+    for (uint32_t i = 0; i < VK_MAX_EXTENSION_NAME_SIZE; ++i) {
+        name[i] = src.name[i];
+    }
+    for (uint32_t i = 0; i < VK_MAX_EXTENSION_NAME_SIZE; ++i) {
+        version[i] = src.version[i];
+    }
+    for (uint32_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i) {
+        description[i] = src.description[i];
+    }
+    for (uint32_t i = 0; i < VK_MAX_EXTENSION_NAME_SIZE; ++i) {
+        layer[i] = src.layer[i];
+    }
+
+    return *this;
+}
+
+safe_VkPhysicalDeviceToolPropertiesEXT::~safe_VkPhysicalDeviceToolPropertiesEXT()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPhysicalDeviceToolPropertiesEXT::initialize(const VkPhysicalDeviceToolPropertiesEXT* in_struct)
+{
+    sType = in_struct->sType;
+    purposes = in_struct->purposes;
+    pNext = SafePnextCopy(in_struct->pNext);
+    for (uint32_t i = 0; i < VK_MAX_EXTENSION_NAME_SIZE; ++i) {
+        name[i] = in_struct->name[i];
+    }
+    for (uint32_t i = 0; i < VK_MAX_EXTENSION_NAME_SIZE; ++i) {
+        version[i] = in_struct->version[i];
+    }
+    for (uint32_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i) {
+        description[i] = in_struct->description[i];
+    }
+    for (uint32_t i = 0; i < VK_MAX_EXTENSION_NAME_SIZE; ++i) {
+        layer[i] = in_struct->layer[i];
+    }
+}
+
+void safe_VkPhysicalDeviceToolPropertiesEXT::initialize(const safe_VkPhysicalDeviceToolPropertiesEXT* src)
+{
+    sType = src->sType;
+    purposes = src->purposes;
+    pNext = SafePnextCopy(src->pNext);
+    for (uint32_t i = 0; i < VK_MAX_EXTENSION_NAME_SIZE; ++i) {
+        name[i] = src->name[i];
+    }
+    for (uint32_t i = 0; i < VK_MAX_EXTENSION_NAME_SIZE; ++i) {
+        version[i] = src->version[i];
+    }
+    for (uint32_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i) {
+        description[i] = src->description[i];
+    }
+    for (uint32_t i = 0; i < VK_MAX_EXTENSION_NAME_SIZE; ++i) {
+        layer[i] = src->layer[i];
+    }
+}
+
+safe_VkImageStencilUsageCreateInfoEXT::safe_VkImageStencilUsageCreateInfoEXT(const VkImageStencilUsageCreateInfoEXT* in_struct) :
+    sType(in_struct->sType),
+    stencilUsage(in_struct->stencilUsage)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkImageStencilUsageCreateInfoEXT::safe_VkImageStencilUsageCreateInfoEXT() :
+    pNext(nullptr)
+{}
+
+safe_VkImageStencilUsageCreateInfoEXT::safe_VkImageStencilUsageCreateInfoEXT(const safe_VkImageStencilUsageCreateInfoEXT& src)
+{
+    sType = src.sType;
+    stencilUsage = src.stencilUsage;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkImageStencilUsageCreateInfoEXT& safe_VkImageStencilUsageCreateInfoEXT::operator=(const safe_VkImageStencilUsageCreateInfoEXT& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    stencilUsage = src.stencilUsage;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkImageStencilUsageCreateInfoEXT::~safe_VkImageStencilUsageCreateInfoEXT()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkImageStencilUsageCreateInfoEXT::initialize(const VkImageStencilUsageCreateInfoEXT* in_struct)
+{
+    sType = in_struct->sType;
+    stencilUsage = in_struct->stencilUsage;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkImageStencilUsageCreateInfoEXT::initialize(const safe_VkImageStencilUsageCreateInfoEXT* src)
+{
+    sType = src->sType;
+    stencilUsage = src->stencilUsage;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkValidationFeaturesEXT::safe_VkValidationFeaturesEXT(const VkValidationFeaturesEXT* in_struct) :
+    sType(in_struct->sType),
+    enabledValidationFeatureCount(in_struct->enabledValidationFeatureCount),
+    pEnabledValidationFeatures(nullptr),
+    disabledValidationFeatureCount(in_struct->disabledValidationFeatureCount),
+    pDisabledValidationFeatures(nullptr)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (in_struct->pEnabledValidationFeatures) {
+        pEnabledValidationFeatures = new VkValidationFeatureEnableEXT[in_struct->enabledValidationFeatureCount];
+        memcpy ((void *)pEnabledValidationFeatures, (void *)in_struct->pEnabledValidationFeatures, sizeof(VkValidationFeatureEnableEXT)*in_struct->enabledValidationFeatureCount);
+    }
+    if (in_struct->pDisabledValidationFeatures) {
+        pDisabledValidationFeatures = new VkValidationFeatureDisableEXT[in_struct->disabledValidationFeatureCount];
+        memcpy ((void *)pDisabledValidationFeatures, (void *)in_struct->pDisabledValidationFeatures, sizeof(VkValidationFeatureDisableEXT)*in_struct->disabledValidationFeatureCount);
+    }
+}
+
+safe_VkValidationFeaturesEXT::safe_VkValidationFeaturesEXT() :
+    pNext(nullptr),
+    pEnabledValidationFeatures(nullptr),
+    pDisabledValidationFeatures(nullptr)
+{}
+
+safe_VkValidationFeaturesEXT::safe_VkValidationFeaturesEXT(const safe_VkValidationFeaturesEXT& src)
+{
+    sType = src.sType;
+    enabledValidationFeatureCount = src.enabledValidationFeatureCount;
+    pEnabledValidationFeatures = nullptr;
+    disabledValidationFeatureCount = src.disabledValidationFeatureCount;
+    pDisabledValidationFeatures = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (src.pEnabledValidationFeatures) {
+        pEnabledValidationFeatures = new VkValidationFeatureEnableEXT[src.enabledValidationFeatureCount];
+        memcpy ((void *)pEnabledValidationFeatures, (void *)src.pEnabledValidationFeatures, sizeof(VkValidationFeatureEnableEXT)*src.enabledValidationFeatureCount);
+    }
+    if (src.pDisabledValidationFeatures) {
+        pDisabledValidationFeatures = new VkValidationFeatureDisableEXT[src.disabledValidationFeatureCount];
+        memcpy ((void *)pDisabledValidationFeatures, (void *)src.pDisabledValidationFeatures, sizeof(VkValidationFeatureDisableEXT)*src.disabledValidationFeatureCount);
+    }
+}
+
+safe_VkValidationFeaturesEXT& safe_VkValidationFeaturesEXT::operator=(const safe_VkValidationFeaturesEXT& src)
+{
+    if (&src == this) return *this;
+
+    if (pEnabledValidationFeatures)
+        delete[] pEnabledValidationFeatures;
+    if (pDisabledValidationFeatures)
+        delete[] pDisabledValidationFeatures;
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    enabledValidationFeatureCount = src.enabledValidationFeatureCount;
+    pEnabledValidationFeatures = nullptr;
+    disabledValidationFeatureCount = src.disabledValidationFeatureCount;
+    pDisabledValidationFeatures = nullptr;
+    pNext = SafePnextCopy(src.pNext);
+    if (src.pEnabledValidationFeatures) {
+        pEnabledValidationFeatures = new VkValidationFeatureEnableEXT[src.enabledValidationFeatureCount];
+        memcpy ((void *)pEnabledValidationFeatures, (void *)src.pEnabledValidationFeatures, sizeof(VkValidationFeatureEnableEXT)*src.enabledValidationFeatureCount);
+    }
+    if (src.pDisabledValidationFeatures) {
+        pDisabledValidationFeatures = new VkValidationFeatureDisableEXT[src.disabledValidationFeatureCount];
+        memcpy ((void *)pDisabledValidationFeatures, (void *)src.pDisabledValidationFeatures, sizeof(VkValidationFeatureDisableEXT)*src.disabledValidationFeatureCount);
+    }
+
+    return *this;
+}
+
+safe_VkValidationFeaturesEXT::~safe_VkValidationFeaturesEXT()
+{
+    if (pEnabledValidationFeatures)
+        delete[] pEnabledValidationFeatures;
+    if (pDisabledValidationFeatures)
+        delete[] pDisabledValidationFeatures;
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkValidationFeaturesEXT::initialize(const VkValidationFeaturesEXT* in_struct)
+{
+    sType = in_struct->sType;
+    enabledValidationFeatureCount = in_struct->enabledValidationFeatureCount;
+    pEnabledValidationFeatures = nullptr;
+    disabledValidationFeatureCount = in_struct->disabledValidationFeatureCount;
+    pDisabledValidationFeatures = nullptr;
+    pNext = SafePnextCopy(in_struct->pNext);
+    if (in_struct->pEnabledValidationFeatures) {
+        pEnabledValidationFeatures = new VkValidationFeatureEnableEXT[in_struct->enabledValidationFeatureCount];
+        memcpy ((void *)pEnabledValidationFeatures, (void *)in_struct->pEnabledValidationFeatures, sizeof(VkValidationFeatureEnableEXT)*in_struct->enabledValidationFeatureCount);
+    }
+    if (in_struct->pDisabledValidationFeatures) {
+        pDisabledValidationFeatures = new VkValidationFeatureDisableEXT[in_struct->disabledValidationFeatureCount];
+        memcpy ((void *)pDisabledValidationFeatures, (void *)in_struct->pDisabledValidationFeatures, sizeof(VkValidationFeatureDisableEXT)*in_struct->disabledValidationFeatureCount);
+    }
+}
+
+void safe_VkValidationFeaturesEXT::initialize(const safe_VkValidationFeaturesEXT* src)
+{
+    sType = src->sType;
+    enabledValidationFeatureCount = src->enabledValidationFeatureCount;
+    pEnabledValidationFeatures = nullptr;
+    disabledValidationFeatureCount = src->disabledValidationFeatureCount;
+    pDisabledValidationFeatures = nullptr;
+    pNext = SafePnextCopy(src->pNext);
+    if (src->pEnabledValidationFeatures) {
+        pEnabledValidationFeatures = new VkValidationFeatureEnableEXT[src->enabledValidationFeatureCount];
+        memcpy ((void *)pEnabledValidationFeatures, (void *)src->pEnabledValidationFeatures, sizeof(VkValidationFeatureEnableEXT)*src->enabledValidationFeatureCount);
+    }
+    if (src->pDisabledValidationFeatures) {
+        pDisabledValidationFeatures = new VkValidationFeatureDisableEXT[src->disabledValidationFeatureCount];
+        memcpy ((void *)pDisabledValidationFeatures, (void *)src->pDisabledValidationFeatures, sizeof(VkValidationFeatureDisableEXT)*src->disabledValidationFeatureCount);
+    }
+}
+
+safe_VkCooperativeMatrixPropertiesNV::safe_VkCooperativeMatrixPropertiesNV(const VkCooperativeMatrixPropertiesNV* in_struct) :
+    sType(in_struct->sType),
+    MSize(in_struct->MSize),
+    NSize(in_struct->NSize),
+    KSize(in_struct->KSize),
+    AType(in_struct->AType),
+    BType(in_struct->BType),
+    CType(in_struct->CType),
+    DType(in_struct->DType),
+    scope(in_struct->scope)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkCooperativeMatrixPropertiesNV::safe_VkCooperativeMatrixPropertiesNV() :
+    pNext(nullptr)
+{}
+
+safe_VkCooperativeMatrixPropertiesNV::safe_VkCooperativeMatrixPropertiesNV(const safe_VkCooperativeMatrixPropertiesNV& src)
+{
+    sType = src.sType;
+    MSize = src.MSize;
+    NSize = src.NSize;
+    KSize = src.KSize;
+    AType = src.AType;
+    BType = src.BType;
+    CType = src.CType;
+    DType = src.DType;
+    scope = src.scope;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkCooperativeMatrixPropertiesNV& safe_VkCooperativeMatrixPropertiesNV::operator=(const safe_VkCooperativeMatrixPropertiesNV& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    MSize = src.MSize;
+    NSize = src.NSize;
+    KSize = src.KSize;
+    AType = src.AType;
+    BType = src.BType;
+    CType = src.CType;
+    DType = src.DType;
+    scope = src.scope;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkCooperativeMatrixPropertiesNV::~safe_VkCooperativeMatrixPropertiesNV()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkCooperativeMatrixPropertiesNV::initialize(const VkCooperativeMatrixPropertiesNV* in_struct)
+{
+    sType = in_struct->sType;
+    MSize = in_struct->MSize;
+    NSize = in_struct->NSize;
+    KSize = in_struct->KSize;
+    AType = in_struct->AType;
+    BType = in_struct->BType;
+    CType = in_struct->CType;
+    DType = in_struct->DType;
+    scope = in_struct->scope;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkCooperativeMatrixPropertiesNV::initialize(const safe_VkCooperativeMatrixPropertiesNV* src)
+{
+    sType = src->sType;
+    MSize = src->MSize;
+    NSize = src->NSize;
+    KSize = src->KSize;
+    AType = src->AType;
+    BType = src->BType;
+    CType = src->CType;
+    DType = src->DType;
+    scope = src->scope;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkPhysicalDeviceCooperativeMatrixFeaturesNV::safe_VkPhysicalDeviceCooperativeMatrixFeaturesNV(const VkPhysicalDeviceCooperativeMatrixFeaturesNV* in_struct) :
+    sType(in_struct->sType),
+    cooperativeMatrix(in_struct->cooperativeMatrix),
+    cooperativeMatrixRobustBufferAccess(in_struct->cooperativeMatrixRobustBufferAccess)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPhysicalDeviceCooperativeMatrixFeaturesNV::safe_VkPhysicalDeviceCooperativeMatrixFeaturesNV() :
+    pNext(nullptr)
+{}
+
+safe_VkPhysicalDeviceCooperativeMatrixFeaturesNV::safe_VkPhysicalDeviceCooperativeMatrixFeaturesNV(const safe_VkPhysicalDeviceCooperativeMatrixFeaturesNV& src)
+{
+    sType = src.sType;
+    cooperativeMatrix = src.cooperativeMatrix;
+    cooperativeMatrixRobustBufferAccess = src.cooperativeMatrixRobustBufferAccess;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPhysicalDeviceCooperativeMatrixFeaturesNV& safe_VkPhysicalDeviceCooperativeMatrixFeaturesNV::operator=(const safe_VkPhysicalDeviceCooperativeMatrixFeaturesNV& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    cooperativeMatrix = src.cooperativeMatrix;
+    cooperativeMatrixRobustBufferAccess = src.cooperativeMatrixRobustBufferAccess;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPhysicalDeviceCooperativeMatrixFeaturesNV::~safe_VkPhysicalDeviceCooperativeMatrixFeaturesNV()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPhysicalDeviceCooperativeMatrixFeaturesNV::initialize(const VkPhysicalDeviceCooperativeMatrixFeaturesNV* in_struct)
+{
+    sType = in_struct->sType;
+    cooperativeMatrix = in_struct->cooperativeMatrix;
+    cooperativeMatrixRobustBufferAccess = in_struct->cooperativeMatrixRobustBufferAccess;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPhysicalDeviceCooperativeMatrixFeaturesNV::initialize(const safe_VkPhysicalDeviceCooperativeMatrixFeaturesNV* src)
+{
+    sType = src->sType;
+    cooperativeMatrix = src->cooperativeMatrix;
+    cooperativeMatrixRobustBufferAccess = src->cooperativeMatrixRobustBufferAccess;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkPhysicalDeviceCooperativeMatrixPropertiesNV::safe_VkPhysicalDeviceCooperativeMatrixPropertiesNV(const VkPhysicalDeviceCooperativeMatrixPropertiesNV* in_struct) :
+    sType(in_struct->sType),
+    cooperativeMatrixSupportedStages(in_struct->cooperativeMatrixSupportedStages)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPhysicalDeviceCooperativeMatrixPropertiesNV::safe_VkPhysicalDeviceCooperativeMatrixPropertiesNV() :
+    pNext(nullptr)
+{}
+
+safe_VkPhysicalDeviceCooperativeMatrixPropertiesNV::safe_VkPhysicalDeviceCooperativeMatrixPropertiesNV(const safe_VkPhysicalDeviceCooperativeMatrixPropertiesNV& src)
+{
+    sType = src.sType;
+    cooperativeMatrixSupportedStages = src.cooperativeMatrixSupportedStages;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPhysicalDeviceCooperativeMatrixPropertiesNV& safe_VkPhysicalDeviceCooperativeMatrixPropertiesNV::operator=(const safe_VkPhysicalDeviceCooperativeMatrixPropertiesNV& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    cooperativeMatrixSupportedStages = src.cooperativeMatrixSupportedStages;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPhysicalDeviceCooperativeMatrixPropertiesNV::~safe_VkPhysicalDeviceCooperativeMatrixPropertiesNV()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPhysicalDeviceCooperativeMatrixPropertiesNV::initialize(const VkPhysicalDeviceCooperativeMatrixPropertiesNV* in_struct)
+{
+    sType = in_struct->sType;
+    cooperativeMatrixSupportedStages = in_struct->cooperativeMatrixSupportedStages;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPhysicalDeviceCooperativeMatrixPropertiesNV::initialize(const safe_VkPhysicalDeviceCooperativeMatrixPropertiesNV* src)
+{
+    sType = src->sType;
+    cooperativeMatrixSupportedStages = src->cooperativeMatrixSupportedStages;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkPhysicalDeviceCoverageReductionModeFeaturesNV::safe_VkPhysicalDeviceCoverageReductionModeFeaturesNV(const VkPhysicalDeviceCoverageReductionModeFeaturesNV* in_struct) :
+    sType(in_struct->sType),
+    coverageReductionMode(in_struct->coverageReductionMode)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPhysicalDeviceCoverageReductionModeFeaturesNV::safe_VkPhysicalDeviceCoverageReductionModeFeaturesNV() :
+    pNext(nullptr)
+{}
+
+safe_VkPhysicalDeviceCoverageReductionModeFeaturesNV::safe_VkPhysicalDeviceCoverageReductionModeFeaturesNV(const safe_VkPhysicalDeviceCoverageReductionModeFeaturesNV& src)
+{
+    sType = src.sType;
+    coverageReductionMode = src.coverageReductionMode;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPhysicalDeviceCoverageReductionModeFeaturesNV& safe_VkPhysicalDeviceCoverageReductionModeFeaturesNV::operator=(const safe_VkPhysicalDeviceCoverageReductionModeFeaturesNV& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    coverageReductionMode = src.coverageReductionMode;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPhysicalDeviceCoverageReductionModeFeaturesNV::~safe_VkPhysicalDeviceCoverageReductionModeFeaturesNV()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPhysicalDeviceCoverageReductionModeFeaturesNV::initialize(const VkPhysicalDeviceCoverageReductionModeFeaturesNV* in_struct)
+{
+    sType = in_struct->sType;
+    coverageReductionMode = in_struct->coverageReductionMode;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPhysicalDeviceCoverageReductionModeFeaturesNV::initialize(const safe_VkPhysicalDeviceCoverageReductionModeFeaturesNV* src)
+{
+    sType = src->sType;
+    coverageReductionMode = src->coverageReductionMode;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkPipelineCoverageReductionStateCreateInfoNV::safe_VkPipelineCoverageReductionStateCreateInfoNV(const VkPipelineCoverageReductionStateCreateInfoNV* in_struct) :
+    sType(in_struct->sType),
+    flags(in_struct->flags),
+    coverageReductionMode(in_struct->coverageReductionMode)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPipelineCoverageReductionStateCreateInfoNV::safe_VkPipelineCoverageReductionStateCreateInfoNV() :
+    pNext(nullptr)
+{}
+
+safe_VkPipelineCoverageReductionStateCreateInfoNV::safe_VkPipelineCoverageReductionStateCreateInfoNV(const safe_VkPipelineCoverageReductionStateCreateInfoNV& src)
+{
+    sType = src.sType;
+    flags = src.flags;
+    coverageReductionMode = src.coverageReductionMode;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPipelineCoverageReductionStateCreateInfoNV& safe_VkPipelineCoverageReductionStateCreateInfoNV::operator=(const safe_VkPipelineCoverageReductionStateCreateInfoNV& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    flags = src.flags;
+    coverageReductionMode = src.coverageReductionMode;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPipelineCoverageReductionStateCreateInfoNV::~safe_VkPipelineCoverageReductionStateCreateInfoNV()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPipelineCoverageReductionStateCreateInfoNV::initialize(const VkPipelineCoverageReductionStateCreateInfoNV* in_struct)
+{
+    sType = in_struct->sType;
+    flags = in_struct->flags;
+    coverageReductionMode = in_struct->coverageReductionMode;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPipelineCoverageReductionStateCreateInfoNV::initialize(const safe_VkPipelineCoverageReductionStateCreateInfoNV* src)
+{
+    sType = src->sType;
+    flags = src->flags;
+    coverageReductionMode = src->coverageReductionMode;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkFramebufferMixedSamplesCombinationNV::safe_VkFramebufferMixedSamplesCombinationNV(const VkFramebufferMixedSamplesCombinationNV* in_struct) :
+    sType(in_struct->sType),
+    coverageReductionMode(in_struct->coverageReductionMode),
+    rasterizationSamples(in_struct->rasterizationSamples),
+    depthStencilSamples(in_struct->depthStencilSamples),
+    colorSamples(in_struct->colorSamples)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkFramebufferMixedSamplesCombinationNV::safe_VkFramebufferMixedSamplesCombinationNV() :
+    pNext(nullptr)
+{}
+
+safe_VkFramebufferMixedSamplesCombinationNV::safe_VkFramebufferMixedSamplesCombinationNV(const safe_VkFramebufferMixedSamplesCombinationNV& src)
+{
+    sType = src.sType;
+    coverageReductionMode = src.coverageReductionMode;
+    rasterizationSamples = src.rasterizationSamples;
+    depthStencilSamples = src.depthStencilSamples;
+    colorSamples = src.colorSamples;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkFramebufferMixedSamplesCombinationNV& safe_VkFramebufferMixedSamplesCombinationNV::operator=(const safe_VkFramebufferMixedSamplesCombinationNV& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    coverageReductionMode = src.coverageReductionMode;
+    rasterizationSamples = src.rasterizationSamples;
+    depthStencilSamples = src.depthStencilSamples;
+    colorSamples = src.colorSamples;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkFramebufferMixedSamplesCombinationNV::~safe_VkFramebufferMixedSamplesCombinationNV()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkFramebufferMixedSamplesCombinationNV::initialize(const VkFramebufferMixedSamplesCombinationNV* in_struct)
+{
+    sType = in_struct->sType;
+    coverageReductionMode = in_struct->coverageReductionMode;
+    rasterizationSamples = in_struct->rasterizationSamples;
+    depthStencilSamples = in_struct->depthStencilSamples;
+    colorSamples = in_struct->colorSamples;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkFramebufferMixedSamplesCombinationNV::initialize(const safe_VkFramebufferMixedSamplesCombinationNV* src)
+{
+    sType = src->sType;
+    coverageReductionMode = src->coverageReductionMode;
+    rasterizationSamples = src->rasterizationSamples;
+    depthStencilSamples = src->depthStencilSamples;
+    colorSamples = src->colorSamples;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT::safe_VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT(const VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT* in_struct) :
+    sType(in_struct->sType),
+    fragmentShaderSampleInterlock(in_struct->fragmentShaderSampleInterlock),
+    fragmentShaderPixelInterlock(in_struct->fragmentShaderPixelInterlock),
+    fragmentShaderShadingRateInterlock(in_struct->fragmentShaderShadingRateInterlock)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT::safe_VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT() :
+    pNext(nullptr)
+{}
+
+safe_VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT::safe_VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT(const safe_VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT& src)
+{
+    sType = src.sType;
+    fragmentShaderSampleInterlock = src.fragmentShaderSampleInterlock;
+    fragmentShaderPixelInterlock = src.fragmentShaderPixelInterlock;
+    fragmentShaderShadingRateInterlock = src.fragmentShaderShadingRateInterlock;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT& safe_VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT::operator=(const safe_VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    fragmentShaderSampleInterlock = src.fragmentShaderSampleInterlock;
+    fragmentShaderPixelInterlock = src.fragmentShaderPixelInterlock;
+    fragmentShaderShadingRateInterlock = src.fragmentShaderShadingRateInterlock;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT::~safe_VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT::initialize(const VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT* in_struct)
+{
+    sType = in_struct->sType;
+    fragmentShaderSampleInterlock = in_struct->fragmentShaderSampleInterlock;
+    fragmentShaderPixelInterlock = in_struct->fragmentShaderPixelInterlock;
+    fragmentShaderShadingRateInterlock = in_struct->fragmentShaderShadingRateInterlock;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT::initialize(const safe_VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT* src)
+{
+    sType = src->sType;
+    fragmentShaderSampleInterlock = src->fragmentShaderSampleInterlock;
+    fragmentShaderPixelInterlock = src->fragmentShaderPixelInterlock;
+    fragmentShaderShadingRateInterlock = src->fragmentShaderShadingRateInterlock;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkPhysicalDeviceYcbcrImageArraysFeaturesEXT::safe_VkPhysicalDeviceYcbcrImageArraysFeaturesEXT(const VkPhysicalDeviceYcbcrImageArraysFeaturesEXT* in_struct) :
+    sType(in_struct->sType),
+    ycbcrImageArrays(in_struct->ycbcrImageArrays)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPhysicalDeviceYcbcrImageArraysFeaturesEXT::safe_VkPhysicalDeviceYcbcrImageArraysFeaturesEXT() :
+    pNext(nullptr)
+{}
+
+safe_VkPhysicalDeviceYcbcrImageArraysFeaturesEXT::safe_VkPhysicalDeviceYcbcrImageArraysFeaturesEXT(const safe_VkPhysicalDeviceYcbcrImageArraysFeaturesEXT& src)
+{
+    sType = src.sType;
+    ycbcrImageArrays = src.ycbcrImageArrays;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPhysicalDeviceYcbcrImageArraysFeaturesEXT& safe_VkPhysicalDeviceYcbcrImageArraysFeaturesEXT::operator=(const safe_VkPhysicalDeviceYcbcrImageArraysFeaturesEXT& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    ycbcrImageArrays = src.ycbcrImageArrays;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPhysicalDeviceYcbcrImageArraysFeaturesEXT::~safe_VkPhysicalDeviceYcbcrImageArraysFeaturesEXT()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPhysicalDeviceYcbcrImageArraysFeaturesEXT::initialize(const VkPhysicalDeviceYcbcrImageArraysFeaturesEXT* in_struct)
+{
+    sType = in_struct->sType;
+    ycbcrImageArrays = in_struct->ycbcrImageArrays;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPhysicalDeviceYcbcrImageArraysFeaturesEXT::initialize(const safe_VkPhysicalDeviceYcbcrImageArraysFeaturesEXT* src)
+{
+    sType = src->sType;
+    ycbcrImageArrays = src->ycbcrImageArrays;
+    pNext = SafePnextCopy(src->pNext);
+}
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+
+safe_VkSurfaceFullScreenExclusiveInfoEXT::safe_VkSurfaceFullScreenExclusiveInfoEXT(const VkSurfaceFullScreenExclusiveInfoEXT* in_struct) :
+    sType(in_struct->sType),
+    fullScreenExclusive(in_struct->fullScreenExclusive)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkSurfaceFullScreenExclusiveInfoEXT::safe_VkSurfaceFullScreenExclusiveInfoEXT() :
+    pNext(nullptr)
+{}
+
+safe_VkSurfaceFullScreenExclusiveInfoEXT::safe_VkSurfaceFullScreenExclusiveInfoEXT(const safe_VkSurfaceFullScreenExclusiveInfoEXT& src)
+{
+    sType = src.sType;
+    fullScreenExclusive = src.fullScreenExclusive;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkSurfaceFullScreenExclusiveInfoEXT& safe_VkSurfaceFullScreenExclusiveInfoEXT::operator=(const safe_VkSurfaceFullScreenExclusiveInfoEXT& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    fullScreenExclusive = src.fullScreenExclusive;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkSurfaceFullScreenExclusiveInfoEXT::~safe_VkSurfaceFullScreenExclusiveInfoEXT()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkSurfaceFullScreenExclusiveInfoEXT::initialize(const VkSurfaceFullScreenExclusiveInfoEXT* in_struct)
+{
+    sType = in_struct->sType;
+    fullScreenExclusive = in_struct->fullScreenExclusive;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkSurfaceFullScreenExclusiveInfoEXT::initialize(const safe_VkSurfaceFullScreenExclusiveInfoEXT* src)
+{
+    sType = src->sType;
+    fullScreenExclusive = src->fullScreenExclusive;
+    pNext = SafePnextCopy(src->pNext);
+}
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+
+safe_VkSurfaceCapabilitiesFullScreenExclusiveEXT::safe_VkSurfaceCapabilitiesFullScreenExclusiveEXT(const VkSurfaceCapabilitiesFullScreenExclusiveEXT* in_struct) :
+    sType(in_struct->sType),
+    fullScreenExclusiveSupported(in_struct->fullScreenExclusiveSupported)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkSurfaceCapabilitiesFullScreenExclusiveEXT::safe_VkSurfaceCapabilitiesFullScreenExclusiveEXT() :
+    pNext(nullptr)
+{}
+
+safe_VkSurfaceCapabilitiesFullScreenExclusiveEXT::safe_VkSurfaceCapabilitiesFullScreenExclusiveEXT(const safe_VkSurfaceCapabilitiesFullScreenExclusiveEXT& src)
+{
+    sType = src.sType;
+    fullScreenExclusiveSupported = src.fullScreenExclusiveSupported;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkSurfaceCapabilitiesFullScreenExclusiveEXT& safe_VkSurfaceCapabilitiesFullScreenExclusiveEXT::operator=(const safe_VkSurfaceCapabilitiesFullScreenExclusiveEXT& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    fullScreenExclusiveSupported = src.fullScreenExclusiveSupported;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkSurfaceCapabilitiesFullScreenExclusiveEXT::~safe_VkSurfaceCapabilitiesFullScreenExclusiveEXT()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkSurfaceCapabilitiesFullScreenExclusiveEXT::initialize(const VkSurfaceCapabilitiesFullScreenExclusiveEXT* in_struct)
+{
+    sType = in_struct->sType;
+    fullScreenExclusiveSupported = in_struct->fullScreenExclusiveSupported;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkSurfaceCapabilitiesFullScreenExclusiveEXT::initialize(const safe_VkSurfaceCapabilitiesFullScreenExclusiveEXT* src)
+{
+    sType = src->sType;
+    fullScreenExclusiveSupported = src->fullScreenExclusiveSupported;
+    pNext = SafePnextCopy(src->pNext);
+}
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+
+safe_VkSurfaceFullScreenExclusiveWin32InfoEXT::safe_VkSurfaceFullScreenExclusiveWin32InfoEXT(const VkSurfaceFullScreenExclusiveWin32InfoEXT* in_struct) :
+    sType(in_struct->sType),
+    hmonitor(in_struct->hmonitor)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkSurfaceFullScreenExclusiveWin32InfoEXT::safe_VkSurfaceFullScreenExclusiveWin32InfoEXT() :
+    pNext(nullptr)
+{}
+
+safe_VkSurfaceFullScreenExclusiveWin32InfoEXT::safe_VkSurfaceFullScreenExclusiveWin32InfoEXT(const safe_VkSurfaceFullScreenExclusiveWin32InfoEXT& src)
+{
+    sType = src.sType;
+    hmonitor = src.hmonitor;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkSurfaceFullScreenExclusiveWin32InfoEXT& safe_VkSurfaceFullScreenExclusiveWin32InfoEXT::operator=(const safe_VkSurfaceFullScreenExclusiveWin32InfoEXT& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    hmonitor = src.hmonitor;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkSurfaceFullScreenExclusiveWin32InfoEXT::~safe_VkSurfaceFullScreenExclusiveWin32InfoEXT()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkSurfaceFullScreenExclusiveWin32InfoEXT::initialize(const VkSurfaceFullScreenExclusiveWin32InfoEXT* in_struct)
+{
+    sType = in_struct->sType;
+    hmonitor = in_struct->hmonitor;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkSurfaceFullScreenExclusiveWin32InfoEXT::initialize(const safe_VkSurfaceFullScreenExclusiveWin32InfoEXT* src)
+{
+    sType = src->sType;
+    hmonitor = src->hmonitor;
+    pNext = SafePnextCopy(src->pNext);
+}
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+
+safe_VkHeadlessSurfaceCreateInfoEXT::safe_VkHeadlessSurfaceCreateInfoEXT(const VkHeadlessSurfaceCreateInfoEXT* in_struct) :
+    sType(in_struct->sType),
+    flags(in_struct->flags)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkHeadlessSurfaceCreateInfoEXT::safe_VkHeadlessSurfaceCreateInfoEXT() :
+    pNext(nullptr)
+{}
+
+safe_VkHeadlessSurfaceCreateInfoEXT::safe_VkHeadlessSurfaceCreateInfoEXT(const safe_VkHeadlessSurfaceCreateInfoEXT& src)
+{
+    sType = src.sType;
+    flags = src.flags;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkHeadlessSurfaceCreateInfoEXT& safe_VkHeadlessSurfaceCreateInfoEXT::operator=(const safe_VkHeadlessSurfaceCreateInfoEXT& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    flags = src.flags;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkHeadlessSurfaceCreateInfoEXT::~safe_VkHeadlessSurfaceCreateInfoEXT()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkHeadlessSurfaceCreateInfoEXT::initialize(const VkHeadlessSurfaceCreateInfoEXT* in_struct)
+{
+    sType = in_struct->sType;
+    flags = in_struct->flags;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkHeadlessSurfaceCreateInfoEXT::initialize(const safe_VkHeadlessSurfaceCreateInfoEXT* src)
+{
+    sType = src->sType;
+    flags = src->flags;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkPhysicalDeviceLineRasterizationFeaturesEXT::safe_VkPhysicalDeviceLineRasterizationFeaturesEXT(const VkPhysicalDeviceLineRasterizationFeaturesEXT* in_struct) :
+    sType(in_struct->sType),
+    rectangularLines(in_struct->rectangularLines),
+    bresenhamLines(in_struct->bresenhamLines),
+    smoothLines(in_struct->smoothLines),
+    stippledRectangularLines(in_struct->stippledRectangularLines),
+    stippledBresenhamLines(in_struct->stippledBresenhamLines),
+    stippledSmoothLines(in_struct->stippledSmoothLines)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPhysicalDeviceLineRasterizationFeaturesEXT::safe_VkPhysicalDeviceLineRasterizationFeaturesEXT() :
+    pNext(nullptr)
+{}
+
+safe_VkPhysicalDeviceLineRasterizationFeaturesEXT::safe_VkPhysicalDeviceLineRasterizationFeaturesEXT(const safe_VkPhysicalDeviceLineRasterizationFeaturesEXT& src)
+{
+    sType = src.sType;
+    rectangularLines = src.rectangularLines;
+    bresenhamLines = src.bresenhamLines;
+    smoothLines = src.smoothLines;
+    stippledRectangularLines = src.stippledRectangularLines;
+    stippledBresenhamLines = src.stippledBresenhamLines;
+    stippledSmoothLines = src.stippledSmoothLines;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPhysicalDeviceLineRasterizationFeaturesEXT& safe_VkPhysicalDeviceLineRasterizationFeaturesEXT::operator=(const safe_VkPhysicalDeviceLineRasterizationFeaturesEXT& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    rectangularLines = src.rectangularLines;
+    bresenhamLines = src.bresenhamLines;
+    smoothLines = src.smoothLines;
+    stippledRectangularLines = src.stippledRectangularLines;
+    stippledBresenhamLines = src.stippledBresenhamLines;
+    stippledSmoothLines = src.stippledSmoothLines;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPhysicalDeviceLineRasterizationFeaturesEXT::~safe_VkPhysicalDeviceLineRasterizationFeaturesEXT()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPhysicalDeviceLineRasterizationFeaturesEXT::initialize(const VkPhysicalDeviceLineRasterizationFeaturesEXT* in_struct)
+{
+    sType = in_struct->sType;
+    rectangularLines = in_struct->rectangularLines;
+    bresenhamLines = in_struct->bresenhamLines;
+    smoothLines = in_struct->smoothLines;
+    stippledRectangularLines = in_struct->stippledRectangularLines;
+    stippledBresenhamLines = in_struct->stippledBresenhamLines;
+    stippledSmoothLines = in_struct->stippledSmoothLines;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPhysicalDeviceLineRasterizationFeaturesEXT::initialize(const safe_VkPhysicalDeviceLineRasterizationFeaturesEXT* src)
+{
+    sType = src->sType;
+    rectangularLines = src->rectangularLines;
+    bresenhamLines = src->bresenhamLines;
+    smoothLines = src->smoothLines;
+    stippledRectangularLines = src->stippledRectangularLines;
+    stippledBresenhamLines = src->stippledBresenhamLines;
+    stippledSmoothLines = src->stippledSmoothLines;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkPhysicalDeviceLineRasterizationPropertiesEXT::safe_VkPhysicalDeviceLineRasterizationPropertiesEXT(const VkPhysicalDeviceLineRasterizationPropertiesEXT* in_struct) :
+    sType(in_struct->sType),
+    lineSubPixelPrecisionBits(in_struct->lineSubPixelPrecisionBits)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPhysicalDeviceLineRasterizationPropertiesEXT::safe_VkPhysicalDeviceLineRasterizationPropertiesEXT() :
+    pNext(nullptr)
+{}
+
+safe_VkPhysicalDeviceLineRasterizationPropertiesEXT::safe_VkPhysicalDeviceLineRasterizationPropertiesEXT(const safe_VkPhysicalDeviceLineRasterizationPropertiesEXT& src)
+{
+    sType = src.sType;
+    lineSubPixelPrecisionBits = src.lineSubPixelPrecisionBits;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPhysicalDeviceLineRasterizationPropertiesEXT& safe_VkPhysicalDeviceLineRasterizationPropertiesEXT::operator=(const safe_VkPhysicalDeviceLineRasterizationPropertiesEXT& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    lineSubPixelPrecisionBits = src.lineSubPixelPrecisionBits;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPhysicalDeviceLineRasterizationPropertiesEXT::~safe_VkPhysicalDeviceLineRasterizationPropertiesEXT()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPhysicalDeviceLineRasterizationPropertiesEXT::initialize(const VkPhysicalDeviceLineRasterizationPropertiesEXT* in_struct)
+{
+    sType = in_struct->sType;
+    lineSubPixelPrecisionBits = in_struct->lineSubPixelPrecisionBits;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPhysicalDeviceLineRasterizationPropertiesEXT::initialize(const safe_VkPhysicalDeviceLineRasterizationPropertiesEXT* src)
+{
+    sType = src->sType;
+    lineSubPixelPrecisionBits = src->lineSubPixelPrecisionBits;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkPipelineRasterizationLineStateCreateInfoEXT::safe_VkPipelineRasterizationLineStateCreateInfoEXT(const VkPipelineRasterizationLineStateCreateInfoEXT* in_struct) :
+    sType(in_struct->sType),
+    lineRasterizationMode(in_struct->lineRasterizationMode),
+    stippledLineEnable(in_struct->stippledLineEnable),
+    lineStippleFactor(in_struct->lineStippleFactor),
+    lineStipplePattern(in_struct->lineStipplePattern)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPipelineRasterizationLineStateCreateInfoEXT::safe_VkPipelineRasterizationLineStateCreateInfoEXT() :
+    pNext(nullptr)
+{}
+
+safe_VkPipelineRasterizationLineStateCreateInfoEXT::safe_VkPipelineRasterizationLineStateCreateInfoEXT(const safe_VkPipelineRasterizationLineStateCreateInfoEXT& src)
+{
+    sType = src.sType;
+    lineRasterizationMode = src.lineRasterizationMode;
+    stippledLineEnable = src.stippledLineEnable;
+    lineStippleFactor = src.lineStippleFactor;
+    lineStipplePattern = src.lineStipplePattern;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPipelineRasterizationLineStateCreateInfoEXT& safe_VkPipelineRasterizationLineStateCreateInfoEXT::operator=(const safe_VkPipelineRasterizationLineStateCreateInfoEXT& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    lineRasterizationMode = src.lineRasterizationMode;
+    stippledLineEnable = src.stippledLineEnable;
+    lineStippleFactor = src.lineStippleFactor;
+    lineStipplePattern = src.lineStipplePattern;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPipelineRasterizationLineStateCreateInfoEXT::~safe_VkPipelineRasterizationLineStateCreateInfoEXT()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPipelineRasterizationLineStateCreateInfoEXT::initialize(const VkPipelineRasterizationLineStateCreateInfoEXT* in_struct)
+{
+    sType = in_struct->sType;
+    lineRasterizationMode = in_struct->lineRasterizationMode;
+    stippledLineEnable = in_struct->stippledLineEnable;
+    lineStippleFactor = in_struct->lineStippleFactor;
+    lineStipplePattern = in_struct->lineStipplePattern;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPipelineRasterizationLineStateCreateInfoEXT::initialize(const safe_VkPipelineRasterizationLineStateCreateInfoEXT* src)
+{
+    sType = src->sType;
+    lineRasterizationMode = src->lineRasterizationMode;
+    stippledLineEnable = src->stippledLineEnable;
+    lineStippleFactor = src->lineStippleFactor;
+    lineStipplePattern = src->lineStipplePattern;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkPhysicalDeviceHostQueryResetFeaturesEXT::safe_VkPhysicalDeviceHostQueryResetFeaturesEXT(const VkPhysicalDeviceHostQueryResetFeaturesEXT* in_struct) :
+    sType(in_struct->sType),
+    hostQueryReset(in_struct->hostQueryReset)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPhysicalDeviceHostQueryResetFeaturesEXT::safe_VkPhysicalDeviceHostQueryResetFeaturesEXT() :
+    pNext(nullptr)
+{}
+
+safe_VkPhysicalDeviceHostQueryResetFeaturesEXT::safe_VkPhysicalDeviceHostQueryResetFeaturesEXT(const safe_VkPhysicalDeviceHostQueryResetFeaturesEXT& src)
+{
+    sType = src.sType;
+    hostQueryReset = src.hostQueryReset;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPhysicalDeviceHostQueryResetFeaturesEXT& safe_VkPhysicalDeviceHostQueryResetFeaturesEXT::operator=(const safe_VkPhysicalDeviceHostQueryResetFeaturesEXT& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    hostQueryReset = src.hostQueryReset;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPhysicalDeviceHostQueryResetFeaturesEXT::~safe_VkPhysicalDeviceHostQueryResetFeaturesEXT()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPhysicalDeviceHostQueryResetFeaturesEXT::initialize(const VkPhysicalDeviceHostQueryResetFeaturesEXT* in_struct)
+{
+    sType = in_struct->sType;
+    hostQueryReset = in_struct->hostQueryReset;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPhysicalDeviceHostQueryResetFeaturesEXT::initialize(const safe_VkPhysicalDeviceHostQueryResetFeaturesEXT* src)
+{
+    sType = src->sType;
+    hostQueryReset = src->hostQueryReset;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkPhysicalDeviceIndexTypeUint8FeaturesEXT::safe_VkPhysicalDeviceIndexTypeUint8FeaturesEXT(const VkPhysicalDeviceIndexTypeUint8FeaturesEXT* in_struct) :
+    sType(in_struct->sType),
+    indexTypeUint8(in_struct->indexTypeUint8)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPhysicalDeviceIndexTypeUint8FeaturesEXT::safe_VkPhysicalDeviceIndexTypeUint8FeaturesEXT() :
+    pNext(nullptr)
+{}
+
+safe_VkPhysicalDeviceIndexTypeUint8FeaturesEXT::safe_VkPhysicalDeviceIndexTypeUint8FeaturesEXT(const safe_VkPhysicalDeviceIndexTypeUint8FeaturesEXT& src)
+{
+    sType = src.sType;
+    indexTypeUint8 = src.indexTypeUint8;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPhysicalDeviceIndexTypeUint8FeaturesEXT& safe_VkPhysicalDeviceIndexTypeUint8FeaturesEXT::operator=(const safe_VkPhysicalDeviceIndexTypeUint8FeaturesEXT& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    indexTypeUint8 = src.indexTypeUint8;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPhysicalDeviceIndexTypeUint8FeaturesEXT::~safe_VkPhysicalDeviceIndexTypeUint8FeaturesEXT()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPhysicalDeviceIndexTypeUint8FeaturesEXT::initialize(const VkPhysicalDeviceIndexTypeUint8FeaturesEXT* in_struct)
+{
+    sType = in_struct->sType;
+    indexTypeUint8 = in_struct->indexTypeUint8;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPhysicalDeviceIndexTypeUint8FeaturesEXT::initialize(const safe_VkPhysicalDeviceIndexTypeUint8FeaturesEXT* src)
+{
+    sType = src->sType;
+    indexTypeUint8 = src->indexTypeUint8;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT::safe_VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT(const VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT* in_struct) :
+    sType(in_struct->sType),
+    shaderDemoteToHelperInvocation(in_struct->shaderDemoteToHelperInvocation)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT::safe_VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT() :
+    pNext(nullptr)
+{}
+
+safe_VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT::safe_VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT(const safe_VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT& src)
+{
+    sType = src.sType;
+    shaderDemoteToHelperInvocation = src.shaderDemoteToHelperInvocation;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT& safe_VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT::operator=(const safe_VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    shaderDemoteToHelperInvocation = src.shaderDemoteToHelperInvocation;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT::~safe_VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT::initialize(const VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT* in_struct)
+{
+    sType = in_struct->sType;
+    shaderDemoteToHelperInvocation = in_struct->shaderDemoteToHelperInvocation;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT::initialize(const safe_VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT* src)
+{
+    sType = src->sType;
+    shaderDemoteToHelperInvocation = src->shaderDemoteToHelperInvocation;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT::safe_VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT(const VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT* in_struct) :
+    sType(in_struct->sType),
+    texelBufferAlignment(in_struct->texelBufferAlignment)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT::safe_VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT() :
+    pNext(nullptr)
+{}
+
+safe_VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT::safe_VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT(const safe_VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT& src)
+{
+    sType = src.sType;
+    texelBufferAlignment = src.texelBufferAlignment;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT& safe_VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT::operator=(const safe_VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    texelBufferAlignment = src.texelBufferAlignment;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT::~safe_VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT::initialize(const VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT* in_struct)
+{
+    sType = in_struct->sType;
+    texelBufferAlignment = in_struct->texelBufferAlignment;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT::initialize(const safe_VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT* src)
+{
+    sType = src->sType;
+    texelBufferAlignment = src->texelBufferAlignment;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+safe_VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT::safe_VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT(const VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT* in_struct) :
+    sType(in_struct->sType),
+    storageTexelBufferOffsetAlignmentBytes(in_struct->storageTexelBufferOffsetAlignmentBytes),
+    storageTexelBufferOffsetSingleTexelAlignment(in_struct->storageTexelBufferOffsetSingleTexelAlignment),
+    uniformTexelBufferOffsetAlignmentBytes(in_struct->uniformTexelBufferOffsetAlignmentBytes),
+    uniformTexelBufferOffsetSingleTexelAlignment(in_struct->uniformTexelBufferOffsetSingleTexelAlignment)
+{
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+safe_VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT::safe_VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT() :
+    pNext(nullptr)
+{}
+
+safe_VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT::safe_VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT(const safe_VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT& src)
+{
+    sType = src.sType;
+    storageTexelBufferOffsetAlignmentBytes = src.storageTexelBufferOffsetAlignmentBytes;
+    storageTexelBufferOffsetSingleTexelAlignment = src.storageTexelBufferOffsetSingleTexelAlignment;
+    uniformTexelBufferOffsetAlignmentBytes = src.uniformTexelBufferOffsetAlignmentBytes;
+    uniformTexelBufferOffsetSingleTexelAlignment = src.uniformTexelBufferOffsetSingleTexelAlignment;
+    pNext = SafePnextCopy(src.pNext);
+}
+
+safe_VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT& safe_VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT::operator=(const safe_VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT& src)
+{
+    if (&src == this) return *this;
+
+    if (pNext)
+        FreePnextChain(pNext);
+
+    sType = src.sType;
+    storageTexelBufferOffsetAlignmentBytes = src.storageTexelBufferOffsetAlignmentBytes;
+    storageTexelBufferOffsetSingleTexelAlignment = src.storageTexelBufferOffsetSingleTexelAlignment;
+    uniformTexelBufferOffsetAlignmentBytes = src.uniformTexelBufferOffsetAlignmentBytes;
+    uniformTexelBufferOffsetSingleTexelAlignment = src.uniformTexelBufferOffsetSingleTexelAlignment;
+    pNext = SafePnextCopy(src.pNext);
+
+    return *this;
+}
+
+safe_VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT::~safe_VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT()
+{
+    if (pNext)
+        FreePnextChain(pNext);
+}
+
+void safe_VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT::initialize(const VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT* in_struct)
+{
+    sType = in_struct->sType;
+    storageTexelBufferOffsetAlignmentBytes = in_struct->storageTexelBufferOffsetAlignmentBytes;
+    storageTexelBufferOffsetSingleTexelAlignment = in_struct->storageTexelBufferOffsetSingleTexelAlignment;
+    uniformTexelBufferOffsetAlignmentBytes = in_struct->uniformTexelBufferOffsetAlignmentBytes;
+    uniformTexelBufferOffsetSingleTexelAlignment = in_struct->uniformTexelBufferOffsetSingleTexelAlignment;
+    pNext = SafePnextCopy(in_struct->pNext);
+}
+
+void safe_VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT::initialize(const safe_VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT* src)
+{
+    sType = src->sType;
+    storageTexelBufferOffsetAlignmentBytes = src->storageTexelBufferOffsetAlignmentBytes;
+    storageTexelBufferOffsetSingleTexelAlignment = src->storageTexelBufferOffsetSingleTexelAlignment;
+    uniformTexelBufferOffsetAlignmentBytes = src->uniformTexelBufferOffsetAlignmentBytes;
+    uniformTexelBufferOffsetSingleTexelAlignment = src->uniformTexelBufferOffsetSingleTexelAlignment;
+    pNext = SafePnextCopy(src->pNext);
+}
+
+char *SafeStringCopy(const char *in_string) {
+    if (nullptr == in_string) return nullptr;
+    char* dest = new char[std::strlen(in_string) + 1];
+    return std::strcpy(dest, in_string);
+}
+
+void *SafePnextCopy(const void *pNext) {
+    if (!pNext) return nullptr;
+
+    void *safe_pNext;
+    const VkBaseOutStructure *header = reinterpret_cast<const VkBaseOutStructure *>(pNext);
+
+    switch (header->sType) {
+        // Special-case Loader Instance Struct passed to/from layer in pNext chain
+        case VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO: {
+            VkLayerInstanceCreateInfo *struct_copy = new VkLayerInstanceCreateInfo;
+            // TODO: Uses original VkLayerInstanceLink* chain, which should be okay for our uses
+            memcpy(struct_copy, pNext, sizeof(VkLayerInstanceCreateInfo));
+            struct_copy->pNext = SafePnextCopy(header->pNext);
+            safe_pNext = struct_copy;
+            break;
+        }
+        // Special-case Loader Device Struct passed to/from layer in pNext chain
+        case VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO: {
+            VkLayerDeviceCreateInfo *struct_copy = new VkLayerDeviceCreateInfo;
+            // TODO: Uses original VkLayerDeviceLink*, which should be okay for our uses
+            memcpy(struct_copy, pNext, sizeof(VkLayerDeviceCreateInfo));
+            struct_copy->pNext = SafePnextCopy(header->pNext);
+            safe_pNext = struct_copy;
+            break;
+        }
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_PROPERTIES:
+            safe_pNext = new safe_VkPhysicalDeviceSubgroupProperties(reinterpret_cast<const VkPhysicalDeviceSubgroupProperties *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES:
+            safe_pNext = new safe_VkPhysicalDevice16BitStorageFeatures(reinterpret_cast<const VkPhysicalDevice16BitStorageFeatures *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS:
+            safe_pNext = new safe_VkMemoryDedicatedRequirements(reinterpret_cast<const VkMemoryDedicatedRequirements *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO:
+            safe_pNext = new safe_VkMemoryDedicatedAllocateInfo(reinterpret_cast<const VkMemoryDedicatedAllocateInfo *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO:
+            safe_pNext = new safe_VkMemoryAllocateFlagsInfo(reinterpret_cast<const VkMemoryAllocateFlagsInfo *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO:
+            safe_pNext = new safe_VkDeviceGroupRenderPassBeginInfo(reinterpret_cast<const VkDeviceGroupRenderPassBeginInfo *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO:
+            safe_pNext = new safe_VkDeviceGroupCommandBufferBeginInfo(reinterpret_cast<const VkDeviceGroupCommandBufferBeginInfo *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO:
+            safe_pNext = new safe_VkDeviceGroupSubmitInfo(reinterpret_cast<const VkDeviceGroupSubmitInfo *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO:
+            safe_pNext = new safe_VkDeviceGroupBindSparseInfo(reinterpret_cast<const VkDeviceGroupBindSparseInfo *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO:
+            safe_pNext = new safe_VkBindBufferMemoryDeviceGroupInfo(reinterpret_cast<const VkBindBufferMemoryDeviceGroupInfo *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO:
+            safe_pNext = new safe_VkBindImageMemoryDeviceGroupInfo(reinterpret_cast<const VkBindImageMemoryDeviceGroupInfo *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO:
+            safe_pNext = new safe_VkDeviceGroupDeviceCreateInfo(reinterpret_cast<const VkDeviceGroupDeviceCreateInfo *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2:
+            safe_pNext = new safe_VkPhysicalDeviceFeatures2(reinterpret_cast<const VkPhysicalDeviceFeatures2 *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES:
+            safe_pNext = new safe_VkPhysicalDevicePointClippingProperties(reinterpret_cast<const VkPhysicalDevicePointClippingProperties *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO:
+            safe_pNext = new safe_VkRenderPassInputAttachmentAspectCreateInfo(reinterpret_cast<const VkRenderPassInputAttachmentAspectCreateInfo *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO:
+            safe_pNext = new safe_VkImageViewUsageCreateInfo(reinterpret_cast<const VkImageViewUsageCreateInfo *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO:
+            safe_pNext = new safe_VkPipelineTessellationDomainOriginStateCreateInfo(reinterpret_cast<const VkPipelineTessellationDomainOriginStateCreateInfo *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO:
+            safe_pNext = new safe_VkRenderPassMultiviewCreateInfo(reinterpret_cast<const VkRenderPassMultiviewCreateInfo *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES:
+            safe_pNext = new safe_VkPhysicalDeviceMultiviewFeatures(reinterpret_cast<const VkPhysicalDeviceMultiviewFeatures *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES:
+            safe_pNext = new safe_VkPhysicalDeviceMultiviewProperties(reinterpret_cast<const VkPhysicalDeviceMultiviewProperties *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES:
+            safe_pNext = new safe_VkPhysicalDeviceVariablePointersFeatures(reinterpret_cast<const VkPhysicalDeviceVariablePointersFeatures *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_FEATURES:
+            safe_pNext = new safe_VkPhysicalDeviceProtectedMemoryFeatures(reinterpret_cast<const VkPhysicalDeviceProtectedMemoryFeatures *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_PROPERTIES:
+            safe_pNext = new safe_VkPhysicalDeviceProtectedMemoryProperties(reinterpret_cast<const VkPhysicalDeviceProtectedMemoryProperties *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PROTECTED_SUBMIT_INFO:
+            safe_pNext = new safe_VkProtectedSubmitInfo(reinterpret_cast<const VkProtectedSubmitInfo *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO:
+            safe_pNext = new safe_VkSamplerYcbcrConversionInfo(reinterpret_cast<const VkSamplerYcbcrConversionInfo *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO:
+            safe_pNext = new safe_VkBindImagePlaneMemoryInfo(reinterpret_cast<const VkBindImagePlaneMemoryInfo *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO:
+            safe_pNext = new safe_VkImagePlaneMemoryRequirementsInfo(reinterpret_cast<const VkImagePlaneMemoryRequirementsInfo *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES:
+            safe_pNext = new safe_VkPhysicalDeviceSamplerYcbcrConversionFeatures(reinterpret_cast<const VkPhysicalDeviceSamplerYcbcrConversionFeatures *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES:
+            safe_pNext = new safe_VkSamplerYcbcrConversionImageFormatProperties(reinterpret_cast<const VkSamplerYcbcrConversionImageFormatProperties *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO:
+            safe_pNext = new safe_VkPhysicalDeviceExternalImageFormatInfo(reinterpret_cast<const VkPhysicalDeviceExternalImageFormatInfo *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES:
+            safe_pNext = new safe_VkExternalImageFormatProperties(reinterpret_cast<const VkExternalImageFormatProperties *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES:
+            safe_pNext = new safe_VkPhysicalDeviceIDProperties(reinterpret_cast<const VkPhysicalDeviceIDProperties *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO:
+            safe_pNext = new safe_VkExternalMemoryImageCreateInfo(reinterpret_cast<const VkExternalMemoryImageCreateInfo *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO:
+            safe_pNext = new safe_VkExternalMemoryBufferCreateInfo(reinterpret_cast<const VkExternalMemoryBufferCreateInfo *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO:
+            safe_pNext = new safe_VkExportMemoryAllocateInfo(reinterpret_cast<const VkExportMemoryAllocateInfo *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO:
+            safe_pNext = new safe_VkExportFenceCreateInfo(reinterpret_cast<const VkExportFenceCreateInfo *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO:
+            safe_pNext = new safe_VkExportSemaphoreCreateInfo(reinterpret_cast<const VkExportSemaphoreCreateInfo *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES:
+            safe_pNext = new safe_VkPhysicalDeviceMaintenance3Properties(reinterpret_cast<const VkPhysicalDeviceMaintenance3Properties *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES:
+            safe_pNext = new safe_VkPhysicalDeviceShaderDrawParametersFeatures(reinterpret_cast<const VkPhysicalDeviceShaderDrawParametersFeatures *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_IMAGE_SWAPCHAIN_CREATE_INFO_KHR:
+            safe_pNext = new safe_VkImageSwapchainCreateInfoKHR(reinterpret_cast<const VkImageSwapchainCreateInfoKHR *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_SWAPCHAIN_INFO_KHR:
+            safe_pNext = new safe_VkBindImageMemorySwapchainInfoKHR(reinterpret_cast<const VkBindImageMemorySwapchainInfoKHR *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_INFO_KHR:
+            safe_pNext = new safe_VkDeviceGroupPresentInfoKHR(reinterpret_cast<const VkDeviceGroupPresentInfoKHR *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_DEVICE_GROUP_SWAPCHAIN_CREATE_INFO_KHR:
+            safe_pNext = new safe_VkDeviceGroupSwapchainCreateInfoKHR(reinterpret_cast<const VkDeviceGroupSwapchainCreateInfoKHR *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_DISPLAY_PRESENT_INFO_KHR:
+            safe_pNext = new safe_VkDisplayPresentInfoKHR(reinterpret_cast<const VkDisplayPresentInfoKHR *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR:
+            safe_pNext = new safe_VkImportMemoryFdInfoKHR(reinterpret_cast<const VkImportMemoryFdInfoKHR *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR:
+            safe_pNext = new safe_VkPhysicalDevicePushDescriptorPropertiesKHR(reinterpret_cast<const VkPhysicalDevicePushDescriptorPropertiesKHR *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES_KHR:
+            safe_pNext = new safe_VkPhysicalDeviceShaderFloat16Int8FeaturesKHR(reinterpret_cast<const VkPhysicalDeviceShaderFloat16Int8FeaturesKHR *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PRESENT_REGIONS_KHR:
+            safe_pNext = new safe_VkPresentRegionsKHR(reinterpret_cast<const VkPresentRegionsKHR *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES_KHR:
+            safe_pNext = new safe_VkPhysicalDeviceImagelessFramebufferFeaturesKHR(reinterpret_cast<const VkPhysicalDeviceImagelessFramebufferFeaturesKHR *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO_KHR:
+            safe_pNext = new safe_VkFramebufferAttachmentsCreateInfoKHR(reinterpret_cast<const VkFramebufferAttachmentsCreateInfoKHR *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_RENDER_PASS_ATTACHMENT_BEGIN_INFO_KHR:
+            safe_pNext = new safe_VkRenderPassAttachmentBeginInfoKHR(reinterpret_cast<const VkRenderPassAttachmentBeginInfoKHR *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_SHARED_PRESENT_SURFACE_CAPABILITIES_KHR:
+            safe_pNext = new safe_VkSharedPresentSurfaceCapabilitiesKHR(reinterpret_cast<const VkSharedPresentSurfaceCapabilitiesKHR *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PERFORMANCE_QUERY_FEATURES_KHR:
+            safe_pNext = new safe_VkPhysicalDevicePerformanceQueryFeaturesKHR(reinterpret_cast<const VkPhysicalDevicePerformanceQueryFeaturesKHR *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PERFORMANCE_QUERY_PROPERTIES_KHR:
+            safe_pNext = new safe_VkPhysicalDevicePerformanceQueryPropertiesKHR(reinterpret_cast<const VkPhysicalDevicePerformanceQueryPropertiesKHR *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_QUERY_POOL_PERFORMANCE_CREATE_INFO_KHR:
+            safe_pNext = new safe_VkQueryPoolPerformanceCreateInfoKHR(reinterpret_cast<const VkQueryPoolPerformanceCreateInfoKHR *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PERFORMANCE_QUERY_SUBMIT_INFO_KHR:
+            safe_pNext = new safe_VkPerformanceQuerySubmitInfoKHR(reinterpret_cast<const VkPerformanceQuerySubmitInfoKHR *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO_KHR:
+            safe_pNext = new safe_VkImageFormatListCreateInfoKHR(reinterpret_cast<const VkImageFormatListCreateInfoKHR *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES_KHR:
+            safe_pNext = new safe_VkPhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR(reinterpret_cast<const VkPhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES_KHR:
+            safe_pNext = new safe_VkPhysicalDevice8BitStorageFeaturesKHR(reinterpret_cast<const VkPhysicalDevice8BitStorageFeaturesKHR *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES_KHR:
+            safe_pNext = new safe_VkPhysicalDeviceShaderAtomicInt64FeaturesKHR(reinterpret_cast<const VkPhysicalDeviceShaderAtomicInt64FeaturesKHR *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CLOCK_FEATURES_KHR:
+            safe_pNext = new safe_VkPhysicalDeviceShaderClockFeaturesKHR(reinterpret_cast<const VkPhysicalDeviceShaderClockFeaturesKHR *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES_KHR:
+            safe_pNext = new safe_VkPhysicalDeviceDriverPropertiesKHR(reinterpret_cast<const VkPhysicalDeviceDriverPropertiesKHR *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES_KHR:
+            safe_pNext = new safe_VkPhysicalDeviceFloatControlsPropertiesKHR(reinterpret_cast<const VkPhysicalDeviceFloatControlsPropertiesKHR *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE_KHR:
+            safe_pNext = new safe_VkSubpassDescriptionDepthStencilResolveKHR(reinterpret_cast<const VkSubpassDescriptionDepthStencilResolveKHR *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES_KHR:
+            safe_pNext = new safe_VkPhysicalDeviceDepthStencilResolvePropertiesKHR(reinterpret_cast<const VkPhysicalDeviceDepthStencilResolvePropertiesKHR *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES_KHR:
+            safe_pNext = new safe_VkPhysicalDeviceTimelineSemaphoreFeaturesKHR(reinterpret_cast<const VkPhysicalDeviceTimelineSemaphoreFeaturesKHR *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES_KHR:
+            safe_pNext = new safe_VkPhysicalDeviceTimelineSemaphorePropertiesKHR(reinterpret_cast<const VkPhysicalDeviceTimelineSemaphorePropertiesKHR *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_SEMAPHORE_TYPE_CREATE_INFO_KHR:
+            safe_pNext = new safe_VkSemaphoreTypeCreateInfoKHR(reinterpret_cast<const VkSemaphoreTypeCreateInfoKHR *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO_KHR:
+            safe_pNext = new safe_VkTimelineSemaphoreSubmitInfoKHR(reinterpret_cast<const VkTimelineSemaphoreSubmitInfoKHR *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES_KHR:
+            safe_pNext = new safe_VkPhysicalDeviceVulkanMemoryModelFeaturesKHR(reinterpret_cast<const VkPhysicalDeviceVulkanMemoryModelFeaturesKHR *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_SURFACE_PROTECTED_CAPABILITIES_KHR:
+            safe_pNext = new safe_VkSurfaceProtectedCapabilitiesKHR(reinterpret_cast<const VkSurfaceProtectedCapabilitiesKHR *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES_KHR:
+            safe_pNext = new safe_VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR(reinterpret_cast<const VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_STENCIL_LAYOUT_KHR:
+            safe_pNext = new safe_VkAttachmentReferenceStencilLayoutKHR(reinterpret_cast<const VkAttachmentReferenceStencilLayoutKHR *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT_KHR:
+            safe_pNext = new safe_VkAttachmentDescriptionStencilLayoutKHR(reinterpret_cast<const VkAttachmentDescriptionStencilLayoutKHR *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES_KHR:
+            safe_pNext = new safe_VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR(reinterpret_cast<const VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_KHR:
+            safe_pNext = new safe_VkPhysicalDeviceBufferDeviceAddressFeaturesKHR(reinterpret_cast<const VkPhysicalDeviceBufferDeviceAddressFeaturesKHR *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_BUFFER_OPAQUE_CAPTURE_ADDRESS_CREATE_INFO_KHR:
+            safe_pNext = new safe_VkBufferOpaqueCaptureAddressCreateInfoKHR(reinterpret_cast<const VkBufferOpaqueCaptureAddressCreateInfoKHR *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_MEMORY_OPAQUE_CAPTURE_ADDRESS_ALLOCATE_INFO_KHR:
+            safe_pNext = new safe_VkMemoryOpaqueCaptureAddressAllocateInfoKHR(reinterpret_cast<const VkMemoryOpaqueCaptureAddressAllocateInfoKHR *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_EXECUTABLE_PROPERTIES_FEATURES_KHR:
+            safe_pNext = new safe_VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR(reinterpret_cast<const VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT:
+            safe_pNext = new safe_VkDebugReportCallbackCreateInfoEXT(reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_RASTERIZATION_ORDER_AMD:
+            safe_pNext = new safe_VkPipelineRasterizationStateRasterizationOrderAMD(reinterpret_cast<const VkPipelineRasterizationStateRasterizationOrderAMD *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_IMAGE_CREATE_INFO_NV:
+            safe_pNext = new safe_VkDedicatedAllocationImageCreateInfoNV(reinterpret_cast<const VkDedicatedAllocationImageCreateInfoNV *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_BUFFER_CREATE_INFO_NV:
+            safe_pNext = new safe_VkDedicatedAllocationBufferCreateInfoNV(reinterpret_cast<const VkDedicatedAllocationBufferCreateInfoNV *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_MEMORY_ALLOCATE_INFO_NV:
+            safe_pNext = new safe_VkDedicatedAllocationMemoryAllocateInfoNV(reinterpret_cast<const VkDedicatedAllocationMemoryAllocateInfoNV *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_FEATURES_EXT:
+            safe_pNext = new safe_VkPhysicalDeviceTransformFeedbackFeaturesEXT(reinterpret_cast<const VkPhysicalDeviceTransformFeedbackFeaturesEXT *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_PROPERTIES_EXT:
+            safe_pNext = new safe_VkPhysicalDeviceTransformFeedbackPropertiesEXT(reinterpret_cast<const VkPhysicalDeviceTransformFeedbackPropertiesEXT *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_STREAM_CREATE_INFO_EXT:
+            safe_pNext = new safe_VkPipelineRasterizationStateStreamCreateInfoEXT(reinterpret_cast<const VkPipelineRasterizationStateStreamCreateInfoEXT *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_TEXTURE_LOD_GATHER_FORMAT_PROPERTIES_AMD:
+            safe_pNext = new safe_VkTextureLODGatherFormatPropertiesAMD(reinterpret_cast<const VkTextureLODGatherFormatPropertiesAMD *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CORNER_SAMPLED_IMAGE_FEATURES_NV:
+            safe_pNext = new safe_VkPhysicalDeviceCornerSampledImageFeaturesNV(reinterpret_cast<const VkPhysicalDeviceCornerSampledImageFeaturesNV *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_NV:
+            safe_pNext = new safe_VkExternalMemoryImageCreateInfoNV(reinterpret_cast<const VkExternalMemoryImageCreateInfoNV *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_NV:
+            safe_pNext = new safe_VkExportMemoryAllocateInfoNV(reinterpret_cast<const VkExportMemoryAllocateInfoNV *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_VALIDATION_FLAGS_EXT:
+            safe_pNext = new safe_VkValidationFlagsEXT(reinterpret_cast<const VkValidationFlagsEXT *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXTURE_COMPRESSION_ASTC_HDR_FEATURES_EXT:
+            safe_pNext = new safe_VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT(reinterpret_cast<const VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_IMAGE_VIEW_ASTC_DECODE_MODE_EXT:
+            safe_pNext = new safe_VkImageViewASTCDecodeModeEXT(reinterpret_cast<const VkImageViewASTCDecodeModeEXT *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ASTC_DECODE_FEATURES_EXT:
+            safe_pNext = new safe_VkPhysicalDeviceASTCDecodeFeaturesEXT(reinterpret_cast<const VkPhysicalDeviceASTCDecodeFeaturesEXT *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONDITIONAL_RENDERING_FEATURES_EXT:
+            safe_pNext = new safe_VkPhysicalDeviceConditionalRenderingFeaturesEXT(reinterpret_cast<const VkPhysicalDeviceConditionalRenderingFeaturesEXT *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_CONDITIONAL_RENDERING_INFO_EXT:
+            safe_pNext = new safe_VkCommandBufferInheritanceConditionalRenderingInfoEXT(reinterpret_cast<const VkCommandBufferInheritanceConditionalRenderingInfoEXT *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_W_SCALING_STATE_CREATE_INFO_NV:
+            safe_pNext = new safe_VkPipelineViewportWScalingStateCreateInfoNV(reinterpret_cast<const VkPipelineViewportWScalingStateCreateInfoNV *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_SWAPCHAIN_COUNTER_CREATE_INFO_EXT:
+            safe_pNext = new safe_VkSwapchainCounterCreateInfoEXT(reinterpret_cast<const VkSwapchainCounterCreateInfoEXT *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PRESENT_TIMES_INFO_GOOGLE:
+            safe_pNext = new safe_VkPresentTimesInfoGOOGLE(reinterpret_cast<const VkPresentTimesInfoGOOGLE *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_ATTRIBUTES_PROPERTIES_NVX:
+            safe_pNext = new safe_VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX(reinterpret_cast<const VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SWIZZLE_STATE_CREATE_INFO_NV:
+            safe_pNext = new safe_VkPipelineViewportSwizzleStateCreateInfoNV(reinterpret_cast<const VkPipelineViewportSwizzleStateCreateInfoNV *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DISCARD_RECTANGLE_PROPERTIES_EXT:
+            safe_pNext = new safe_VkPhysicalDeviceDiscardRectanglePropertiesEXT(reinterpret_cast<const VkPhysicalDeviceDiscardRectanglePropertiesEXT *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PIPELINE_DISCARD_RECTANGLE_STATE_CREATE_INFO_EXT:
+            safe_pNext = new safe_VkPipelineDiscardRectangleStateCreateInfoEXT(reinterpret_cast<const VkPipelineDiscardRectangleStateCreateInfoEXT *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONSERVATIVE_RASTERIZATION_PROPERTIES_EXT:
+            safe_pNext = new safe_VkPhysicalDeviceConservativeRasterizationPropertiesEXT(reinterpret_cast<const VkPhysicalDeviceConservativeRasterizationPropertiesEXT *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_CONSERVATIVE_STATE_CREATE_INFO_EXT:
+            safe_pNext = new safe_VkPipelineRasterizationConservativeStateCreateInfoEXT(reinterpret_cast<const VkPipelineRasterizationConservativeStateCreateInfoEXT *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLIP_ENABLE_FEATURES_EXT:
+            safe_pNext = new safe_VkPhysicalDeviceDepthClipEnableFeaturesEXT(reinterpret_cast<const VkPhysicalDeviceDepthClipEnableFeaturesEXT *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_DEPTH_CLIP_STATE_CREATE_INFO_EXT:
+            safe_pNext = new safe_VkPipelineRasterizationDepthClipStateCreateInfoEXT(reinterpret_cast<const VkPipelineRasterizationDepthClipStateCreateInfoEXT *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT:
+            safe_pNext = new safe_VkDebugUtilsMessengerCreateInfoEXT(reinterpret_cast<const VkDebugUtilsMessengerCreateInfoEXT *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO_EXT:
+            safe_pNext = new safe_VkSamplerReductionModeCreateInfoEXT(reinterpret_cast<const VkSamplerReductionModeCreateInfoEXT *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES_EXT:
+            safe_pNext = new safe_VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT(reinterpret_cast<const VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES_EXT:
+            safe_pNext = new safe_VkPhysicalDeviceInlineUniformBlockFeaturesEXT(reinterpret_cast<const VkPhysicalDeviceInlineUniformBlockFeaturesEXT *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_PROPERTIES_EXT:
+            safe_pNext = new safe_VkPhysicalDeviceInlineUniformBlockPropertiesEXT(reinterpret_cast<const VkPhysicalDeviceInlineUniformBlockPropertiesEXT *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK_EXT:
+            safe_pNext = new safe_VkWriteDescriptorSetInlineUniformBlockEXT(reinterpret_cast<const VkWriteDescriptorSetInlineUniformBlockEXT *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO_EXT:
+            safe_pNext = new safe_VkDescriptorPoolInlineUniformBlockCreateInfoEXT(reinterpret_cast<const VkDescriptorPoolInlineUniformBlockCreateInfoEXT *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_SAMPLE_LOCATIONS_INFO_EXT:
+            safe_pNext = new safe_VkSampleLocationsInfoEXT(reinterpret_cast<const VkSampleLocationsInfoEXT *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_RENDER_PASS_SAMPLE_LOCATIONS_BEGIN_INFO_EXT:
+            safe_pNext = new safe_VkRenderPassSampleLocationsBeginInfoEXT(reinterpret_cast<const VkRenderPassSampleLocationsBeginInfoEXT *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PIPELINE_SAMPLE_LOCATIONS_STATE_CREATE_INFO_EXT:
+            safe_pNext = new safe_VkPipelineSampleLocationsStateCreateInfoEXT(reinterpret_cast<const VkPipelineSampleLocationsStateCreateInfoEXT *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLE_LOCATIONS_PROPERTIES_EXT:
+            safe_pNext = new safe_VkPhysicalDeviceSampleLocationsPropertiesEXT(reinterpret_cast<const VkPhysicalDeviceSampleLocationsPropertiesEXT *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_FEATURES_EXT:
+            safe_pNext = new safe_VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT(reinterpret_cast<const VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_PROPERTIES_EXT:
+            safe_pNext = new safe_VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT(reinterpret_cast<const VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_ADVANCED_STATE_CREATE_INFO_EXT:
+            safe_pNext = new safe_VkPipelineColorBlendAdvancedStateCreateInfoEXT(reinterpret_cast<const VkPipelineColorBlendAdvancedStateCreateInfoEXT *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_TO_COLOR_STATE_CREATE_INFO_NV:
+            safe_pNext = new safe_VkPipelineCoverageToColorStateCreateInfoNV(reinterpret_cast<const VkPipelineCoverageToColorStateCreateInfoNV *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_MODULATION_STATE_CREATE_INFO_NV:
+            safe_pNext = new safe_VkPipelineCoverageModulationStateCreateInfoNV(reinterpret_cast<const VkPipelineCoverageModulationStateCreateInfoNV *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_PROPERTIES_NV:
+            safe_pNext = new safe_VkPhysicalDeviceShaderSMBuiltinsPropertiesNV(reinterpret_cast<const VkPhysicalDeviceShaderSMBuiltinsPropertiesNV *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_FEATURES_NV:
+            safe_pNext = new safe_VkPhysicalDeviceShaderSMBuiltinsFeaturesNV(reinterpret_cast<const VkPhysicalDeviceShaderSMBuiltinsFeaturesNV *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT:
+            safe_pNext = new safe_VkDrmFormatModifierPropertiesListEXT(reinterpret_cast<const VkDrmFormatModifierPropertiesListEXT *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_DRM_FORMAT_MODIFIER_INFO_EXT:
+            safe_pNext = new safe_VkPhysicalDeviceImageDrmFormatModifierInfoEXT(reinterpret_cast<const VkPhysicalDeviceImageDrmFormatModifierInfoEXT *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_LIST_CREATE_INFO_EXT:
+            safe_pNext = new safe_VkImageDrmFormatModifierListCreateInfoEXT(reinterpret_cast<const VkImageDrmFormatModifierListCreateInfoEXT *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_EXPLICIT_CREATE_INFO_EXT:
+            safe_pNext = new safe_VkImageDrmFormatModifierExplicitCreateInfoEXT(reinterpret_cast<const VkImageDrmFormatModifierExplicitCreateInfoEXT *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_SHADER_MODULE_VALIDATION_CACHE_CREATE_INFO_EXT:
+            safe_pNext = new safe_VkShaderModuleValidationCacheCreateInfoEXT(reinterpret_cast<const VkShaderModuleValidationCacheCreateInfoEXT *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT:
+            safe_pNext = new safe_VkDescriptorSetLayoutBindingFlagsCreateInfoEXT(reinterpret_cast<const VkDescriptorSetLayoutBindingFlagsCreateInfoEXT *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES_EXT:
+            safe_pNext = new safe_VkPhysicalDeviceDescriptorIndexingFeaturesEXT(reinterpret_cast<const VkPhysicalDeviceDescriptorIndexingFeaturesEXT *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES_EXT:
+            safe_pNext = new safe_VkPhysicalDeviceDescriptorIndexingPropertiesEXT(reinterpret_cast<const VkPhysicalDeviceDescriptorIndexingPropertiesEXT *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO_EXT:
+            safe_pNext = new safe_VkDescriptorSetVariableDescriptorCountAllocateInfoEXT(reinterpret_cast<const VkDescriptorSetVariableDescriptorCountAllocateInfoEXT *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT_EXT:
+            safe_pNext = new safe_VkDescriptorSetVariableDescriptorCountLayoutSupportEXT(reinterpret_cast<const VkDescriptorSetVariableDescriptorCountLayoutSupportEXT *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SHADING_RATE_IMAGE_STATE_CREATE_INFO_NV:
+            safe_pNext = new safe_VkPipelineViewportShadingRateImageStateCreateInfoNV(reinterpret_cast<const VkPipelineViewportShadingRateImageStateCreateInfoNV *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_FEATURES_NV:
+            safe_pNext = new safe_VkPhysicalDeviceShadingRateImageFeaturesNV(reinterpret_cast<const VkPhysicalDeviceShadingRateImageFeaturesNV *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_PROPERTIES_NV:
+            safe_pNext = new safe_VkPhysicalDeviceShadingRateImagePropertiesNV(reinterpret_cast<const VkPhysicalDeviceShadingRateImagePropertiesNV *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_COARSE_SAMPLE_ORDER_STATE_CREATE_INFO_NV:
+            safe_pNext = new safe_VkPipelineViewportCoarseSampleOrderStateCreateInfoNV(reinterpret_cast<const VkPipelineViewportCoarseSampleOrderStateCreateInfoNV *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_NV:
+            safe_pNext = new safe_VkWriteDescriptorSetAccelerationStructureNV(reinterpret_cast<const VkWriteDescriptorSetAccelerationStructureNV *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PROPERTIES_NV:
+            safe_pNext = new safe_VkPhysicalDeviceRayTracingPropertiesNV(reinterpret_cast<const VkPhysicalDeviceRayTracingPropertiesNV *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_REPRESENTATIVE_FRAGMENT_TEST_FEATURES_NV:
+            safe_pNext = new safe_VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV(reinterpret_cast<const VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PIPELINE_REPRESENTATIVE_FRAGMENT_TEST_STATE_CREATE_INFO_NV:
+            safe_pNext = new safe_VkPipelineRepresentativeFragmentTestStateCreateInfoNV(reinterpret_cast<const VkPipelineRepresentativeFragmentTestStateCreateInfoNV *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_VIEW_IMAGE_FORMAT_INFO_EXT:
+            safe_pNext = new safe_VkPhysicalDeviceImageViewImageFormatInfoEXT(reinterpret_cast<const VkPhysicalDeviceImageViewImageFormatInfoEXT *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_FILTER_CUBIC_IMAGE_VIEW_IMAGE_FORMAT_PROPERTIES_EXT:
+            safe_pNext = new safe_VkFilterCubicImageViewImageFormatPropertiesEXT(reinterpret_cast<const VkFilterCubicImageViewImageFormatPropertiesEXT *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_EXT:
+            safe_pNext = new safe_VkDeviceQueueGlobalPriorityCreateInfoEXT(reinterpret_cast<const VkDeviceQueueGlobalPriorityCreateInfoEXT *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_IMPORT_MEMORY_HOST_POINTER_INFO_EXT:
+            safe_pNext = new safe_VkImportMemoryHostPointerInfoEXT(reinterpret_cast<const VkImportMemoryHostPointerInfoEXT *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_HOST_PROPERTIES_EXT:
+            safe_pNext = new safe_VkPhysicalDeviceExternalMemoryHostPropertiesEXT(reinterpret_cast<const VkPhysicalDeviceExternalMemoryHostPropertiesEXT *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PIPELINE_COMPILER_CONTROL_CREATE_INFO_AMD:
+            safe_pNext = new safe_VkPipelineCompilerControlCreateInfoAMD(reinterpret_cast<const VkPipelineCompilerControlCreateInfoAMD *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_AMD:
+            safe_pNext = new safe_VkPhysicalDeviceShaderCorePropertiesAMD(reinterpret_cast<const VkPhysicalDeviceShaderCorePropertiesAMD *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_DEVICE_MEMORY_OVERALLOCATION_CREATE_INFO_AMD:
+            safe_pNext = new safe_VkDeviceMemoryOverallocationCreateInfoAMD(reinterpret_cast<const VkDeviceMemoryOverallocationCreateInfoAMD *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_EXT:
+            safe_pNext = new safe_VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT(reinterpret_cast<const VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_EXT:
+            safe_pNext = new safe_VkPipelineVertexInputDivisorStateCreateInfoEXT(reinterpret_cast<const VkPipelineVertexInputDivisorStateCreateInfoEXT *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_EXT:
+            safe_pNext = new safe_VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT(reinterpret_cast<const VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PIPELINE_CREATION_FEEDBACK_CREATE_INFO_EXT:
+            safe_pNext = new safe_VkPipelineCreationFeedbackCreateInfoEXT(reinterpret_cast<const VkPipelineCreationFeedbackCreateInfoEXT *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_FEATURES_NV:
+            safe_pNext = new safe_VkPhysicalDeviceComputeShaderDerivativesFeaturesNV(reinterpret_cast<const VkPhysicalDeviceComputeShaderDerivativesFeaturesNV *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_FEATURES_NV:
+            safe_pNext = new safe_VkPhysicalDeviceMeshShaderFeaturesNV(reinterpret_cast<const VkPhysicalDeviceMeshShaderFeaturesNV *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_PROPERTIES_NV:
+            safe_pNext = new safe_VkPhysicalDeviceMeshShaderPropertiesNV(reinterpret_cast<const VkPhysicalDeviceMeshShaderPropertiesNV *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_FEATURES_NV:
+            safe_pNext = new safe_VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV(reinterpret_cast<const VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_IMAGE_FOOTPRINT_FEATURES_NV:
+            safe_pNext = new safe_VkPhysicalDeviceShaderImageFootprintFeaturesNV(reinterpret_cast<const VkPhysicalDeviceShaderImageFootprintFeaturesNV *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_EXCLUSIVE_SCISSOR_STATE_CREATE_INFO_NV:
+            safe_pNext = new safe_VkPipelineViewportExclusiveScissorStateCreateInfoNV(reinterpret_cast<const VkPipelineViewportExclusiveScissorStateCreateInfoNV *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXCLUSIVE_SCISSOR_FEATURES_NV:
+            safe_pNext = new safe_VkPhysicalDeviceExclusiveScissorFeaturesNV(reinterpret_cast<const VkPhysicalDeviceExclusiveScissorFeaturesNV *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_NV:
+            safe_pNext = new safe_VkQueueFamilyCheckpointPropertiesNV(reinterpret_cast<const VkQueueFamilyCheckpointPropertiesNV *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_FUNCTIONS_2_FEATURES_INTEL:
+            safe_pNext = new safe_VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL(reinterpret_cast<const VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PCI_BUS_INFO_PROPERTIES_EXT:
+            safe_pNext = new safe_VkPhysicalDevicePCIBusInfoPropertiesEXT(reinterpret_cast<const VkPhysicalDevicePCIBusInfoPropertiesEXT *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_DISPLAY_NATIVE_HDR_SURFACE_CAPABILITIES_AMD:
+            safe_pNext = new safe_VkDisplayNativeHdrSurfaceCapabilitiesAMD(reinterpret_cast<const VkDisplayNativeHdrSurfaceCapabilitiesAMD *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_SWAPCHAIN_DISPLAY_NATIVE_HDR_CREATE_INFO_AMD:
+            safe_pNext = new safe_VkSwapchainDisplayNativeHdrCreateInfoAMD(reinterpret_cast<const VkSwapchainDisplayNativeHdrCreateInfoAMD *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_FEATURES_EXT:
+            safe_pNext = new safe_VkPhysicalDeviceFragmentDensityMapFeaturesEXT(reinterpret_cast<const VkPhysicalDeviceFragmentDensityMapFeaturesEXT *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_PROPERTIES_EXT:
+            safe_pNext = new safe_VkPhysicalDeviceFragmentDensityMapPropertiesEXT(reinterpret_cast<const VkPhysicalDeviceFragmentDensityMapPropertiesEXT *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_RENDER_PASS_FRAGMENT_DENSITY_MAP_CREATE_INFO_EXT:
+            safe_pNext = new safe_VkRenderPassFragmentDensityMapCreateInfoEXT(reinterpret_cast<const VkRenderPassFragmentDensityMapCreateInfoEXT *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES_EXT:
+            safe_pNext = new safe_VkPhysicalDeviceScalarBlockLayoutFeaturesEXT(reinterpret_cast<const VkPhysicalDeviceScalarBlockLayoutFeaturesEXT *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES_EXT:
+            safe_pNext = new safe_VkPhysicalDeviceSubgroupSizeControlFeaturesEXT(reinterpret_cast<const VkPhysicalDeviceSubgroupSizeControlFeaturesEXT *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES_EXT:
+            safe_pNext = new safe_VkPhysicalDeviceSubgroupSizeControlPropertiesEXT(reinterpret_cast<const VkPhysicalDeviceSubgroupSizeControlPropertiesEXT *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO_EXT:
+            safe_pNext = new safe_VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT(reinterpret_cast<const VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_2_AMD:
+            safe_pNext = new safe_VkPhysicalDeviceShaderCoreProperties2AMD(reinterpret_cast<const VkPhysicalDeviceShaderCoreProperties2AMD *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COHERENT_MEMORY_FEATURES_AMD:
+            safe_pNext = new safe_VkPhysicalDeviceCoherentMemoryFeaturesAMD(reinterpret_cast<const VkPhysicalDeviceCoherentMemoryFeaturesAMD *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT:
+            safe_pNext = new safe_VkPhysicalDeviceMemoryBudgetPropertiesEXT(reinterpret_cast<const VkPhysicalDeviceMemoryBudgetPropertiesEXT *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PRIORITY_FEATURES_EXT:
+            safe_pNext = new safe_VkPhysicalDeviceMemoryPriorityFeaturesEXT(reinterpret_cast<const VkPhysicalDeviceMemoryPriorityFeaturesEXT *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_MEMORY_PRIORITY_ALLOCATE_INFO_EXT:
+            safe_pNext = new safe_VkMemoryPriorityAllocateInfoEXT(reinterpret_cast<const VkMemoryPriorityAllocateInfoEXT *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEDICATED_ALLOCATION_IMAGE_ALIASING_FEATURES_NV:
+            safe_pNext = new safe_VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV(reinterpret_cast<const VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_EXT:
+            safe_pNext = new safe_VkPhysicalDeviceBufferDeviceAddressFeaturesEXT(reinterpret_cast<const VkPhysicalDeviceBufferDeviceAddressFeaturesEXT *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_CREATE_INFO_EXT:
+            safe_pNext = new safe_VkBufferDeviceAddressCreateInfoEXT(reinterpret_cast<const VkBufferDeviceAddressCreateInfoEXT *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO_EXT:
+            safe_pNext = new safe_VkImageStencilUsageCreateInfoEXT(reinterpret_cast<const VkImageStencilUsageCreateInfoEXT *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_VALIDATION_FEATURES_EXT:
+            safe_pNext = new safe_VkValidationFeaturesEXT(reinterpret_cast<const VkValidationFeaturesEXT *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_FEATURES_NV:
+            safe_pNext = new safe_VkPhysicalDeviceCooperativeMatrixFeaturesNV(reinterpret_cast<const VkPhysicalDeviceCooperativeMatrixFeaturesNV *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_PROPERTIES_NV:
+            safe_pNext = new safe_VkPhysicalDeviceCooperativeMatrixPropertiesNV(reinterpret_cast<const VkPhysicalDeviceCooperativeMatrixPropertiesNV *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COVERAGE_REDUCTION_MODE_FEATURES_NV:
+            safe_pNext = new safe_VkPhysicalDeviceCoverageReductionModeFeaturesNV(reinterpret_cast<const VkPhysicalDeviceCoverageReductionModeFeaturesNV *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_REDUCTION_STATE_CREATE_INFO_NV:
+            safe_pNext = new safe_VkPipelineCoverageReductionStateCreateInfoNV(reinterpret_cast<const VkPipelineCoverageReductionStateCreateInfoNV *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_INTERLOCK_FEATURES_EXT:
+            safe_pNext = new safe_VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT(reinterpret_cast<const VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_YCBCR_IMAGE_ARRAYS_FEATURES_EXT:
+            safe_pNext = new safe_VkPhysicalDeviceYcbcrImageArraysFeaturesEXT(reinterpret_cast<const VkPhysicalDeviceYcbcrImageArraysFeaturesEXT *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_EXT:
+            safe_pNext = new safe_VkPhysicalDeviceLineRasterizationFeaturesEXT(reinterpret_cast<const VkPhysicalDeviceLineRasterizationFeaturesEXT *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES_EXT:
+            safe_pNext = new safe_VkPhysicalDeviceLineRasterizationPropertiesEXT(reinterpret_cast<const VkPhysicalDeviceLineRasterizationPropertiesEXT *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO_EXT:
+            safe_pNext = new safe_VkPipelineRasterizationLineStateCreateInfoEXT(reinterpret_cast<const VkPipelineRasterizationLineStateCreateInfoEXT *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES_EXT:
+            safe_pNext = new safe_VkPhysicalDeviceHostQueryResetFeaturesEXT(reinterpret_cast<const VkPhysicalDeviceHostQueryResetFeaturesEXT *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_EXT:
+            safe_pNext = new safe_VkPhysicalDeviceIndexTypeUint8FeaturesEXT(reinterpret_cast<const VkPhysicalDeviceIndexTypeUint8FeaturesEXT *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES_EXT:
+            safe_pNext = new safe_VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT(reinterpret_cast<const VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_FEATURES_EXT:
+            safe_pNext = new safe_VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT(reinterpret_cast<const VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES_EXT:
+            safe_pNext = new safe_VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT(reinterpret_cast<const VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT *>(pNext));
+            break;
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+        case VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_USAGE_ANDROID:
+            safe_pNext = new safe_VkAndroidHardwareBufferUsageANDROID(reinterpret_cast<const VkAndroidHardwareBufferUsageANDROID *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID:
+            safe_pNext = new safe_VkAndroidHardwareBufferFormatPropertiesANDROID(reinterpret_cast<const VkAndroidHardwareBufferFormatPropertiesANDROID *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID:
+            safe_pNext = new safe_VkImportAndroidHardwareBufferInfoANDROID(reinterpret_cast<const VkImportAndroidHardwareBufferInfoANDROID *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID:
+            safe_pNext = new safe_VkExternalFormatANDROID(reinterpret_cast<const VkExternalFormatANDROID *>(pNext));
+            break;
+#endif // VK_USE_PLATFORM_ANDROID_KHR
+#ifdef VK_USE_PLATFORM_GGP
+        case VK_STRUCTURE_TYPE_PRESENT_FRAME_TOKEN_GGP:
+            safe_pNext = new safe_VkPresentFrameTokenGGP(reinterpret_cast<const VkPresentFrameTokenGGP *>(pNext));
+            break;
+#endif // VK_USE_PLATFORM_GGP
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+        case VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_KHR:
+            safe_pNext = new safe_VkImportMemoryWin32HandleInfoKHR(reinterpret_cast<const VkImportMemoryWin32HandleInfoKHR *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_KHR:
+            safe_pNext = new safe_VkExportMemoryWin32HandleInfoKHR(reinterpret_cast<const VkExportMemoryWin32HandleInfoKHR *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_KHR:
+            safe_pNext = new safe_VkWin32KeyedMutexAcquireReleaseInfoKHR(reinterpret_cast<const VkWin32KeyedMutexAcquireReleaseInfoKHR *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR:
+            safe_pNext = new safe_VkExportSemaphoreWin32HandleInfoKHR(reinterpret_cast<const VkExportSemaphoreWin32HandleInfoKHR *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_D3D12_FENCE_SUBMIT_INFO_KHR:
+            safe_pNext = new safe_VkD3D12FenceSubmitInfoKHR(reinterpret_cast<const VkD3D12FenceSubmitInfoKHR *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_EXPORT_FENCE_WIN32_HANDLE_INFO_KHR:
+            safe_pNext = new safe_VkExportFenceWin32HandleInfoKHR(reinterpret_cast<const VkExportFenceWin32HandleInfoKHR *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_NV:
+            safe_pNext = new safe_VkImportMemoryWin32HandleInfoNV(reinterpret_cast<const VkImportMemoryWin32HandleInfoNV *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_NV:
+            safe_pNext = new safe_VkExportMemoryWin32HandleInfoNV(reinterpret_cast<const VkExportMemoryWin32HandleInfoNV *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_NV:
+            safe_pNext = new safe_VkWin32KeyedMutexAcquireReleaseInfoNV(reinterpret_cast<const VkWin32KeyedMutexAcquireReleaseInfoNV *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_INFO_EXT:
+            safe_pNext = new safe_VkSurfaceFullScreenExclusiveInfoEXT(reinterpret_cast<const VkSurfaceFullScreenExclusiveInfoEXT *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_FULL_SCREEN_EXCLUSIVE_EXT:
+            safe_pNext = new safe_VkSurfaceCapabilitiesFullScreenExclusiveEXT(reinterpret_cast<const VkSurfaceCapabilitiesFullScreenExclusiveEXT *>(pNext));
+            break;
+        case VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_WIN32_INFO_EXT:
+            safe_pNext = new safe_VkSurfaceFullScreenExclusiveWin32InfoEXT(reinterpret_cast<const VkSurfaceFullScreenExclusiveWin32InfoEXT *>(pNext));
+            break;
+#endif // VK_USE_PLATFORM_WIN32_KHR
+        default: // Encountered an unknown sType -- skip (do not copy) this entry in the chain
+            safe_pNext = SafePnextCopy(header->pNext);
+            break;
+    }
+
+    return safe_pNext;
+}
+
+void FreePnextChain(const void *pNext) {
+    if (!pNext) return;
+
+    auto header = reinterpret_cast<const VkBaseOutStructure *>(pNext);
+
+    switch (header->sType) {
+        // Special-case Loader Instance Struct passed to/from layer in pNext chain
+        case VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO:
+            FreePnextChain(header->pNext);
+            delete reinterpret_cast<const VkLayerInstanceCreateInfo *>(pNext);
+            break;
+        // Special-case Loader Device Struct passed to/from layer in pNext chain
+        case VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO:
+            FreePnextChain(header->pNext);
+            delete reinterpret_cast<const VkLayerDeviceCreateInfo *>(pNext);
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_PROPERTIES:
+            delete reinterpret_cast<const safe_VkPhysicalDeviceSubgroupProperties *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES:
+            delete reinterpret_cast<const safe_VkPhysicalDevice16BitStorageFeatures *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS:
+            delete reinterpret_cast<const safe_VkMemoryDedicatedRequirements *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO:
+            delete reinterpret_cast<const safe_VkMemoryDedicatedAllocateInfo *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO:
+            delete reinterpret_cast<const safe_VkMemoryAllocateFlagsInfo *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO:
+            delete reinterpret_cast<const safe_VkDeviceGroupRenderPassBeginInfo *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO:
+            delete reinterpret_cast<const safe_VkDeviceGroupCommandBufferBeginInfo *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO:
+            delete reinterpret_cast<const safe_VkDeviceGroupSubmitInfo *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO:
+            delete reinterpret_cast<const safe_VkDeviceGroupBindSparseInfo *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO:
+            delete reinterpret_cast<const safe_VkBindBufferMemoryDeviceGroupInfo *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO:
+            delete reinterpret_cast<const safe_VkBindImageMemoryDeviceGroupInfo *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO:
+            delete reinterpret_cast<const safe_VkDeviceGroupDeviceCreateInfo *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2:
+            delete reinterpret_cast<const safe_VkPhysicalDeviceFeatures2 *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES:
+            delete reinterpret_cast<const safe_VkPhysicalDevicePointClippingProperties *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO:
+            delete reinterpret_cast<const safe_VkRenderPassInputAttachmentAspectCreateInfo *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO:
+            delete reinterpret_cast<const safe_VkImageViewUsageCreateInfo *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO:
+            delete reinterpret_cast<const safe_VkPipelineTessellationDomainOriginStateCreateInfo *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO:
+            delete reinterpret_cast<const safe_VkRenderPassMultiviewCreateInfo *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES:
+            delete reinterpret_cast<const safe_VkPhysicalDeviceMultiviewFeatures *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES:
+            delete reinterpret_cast<const safe_VkPhysicalDeviceMultiviewProperties *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES:
+            delete reinterpret_cast<const safe_VkPhysicalDeviceVariablePointersFeatures *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_FEATURES:
+            delete reinterpret_cast<const safe_VkPhysicalDeviceProtectedMemoryFeatures *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_PROPERTIES:
+            delete reinterpret_cast<const safe_VkPhysicalDeviceProtectedMemoryProperties *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PROTECTED_SUBMIT_INFO:
+            delete reinterpret_cast<const safe_VkProtectedSubmitInfo *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO:
+            delete reinterpret_cast<const safe_VkSamplerYcbcrConversionInfo *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO:
+            delete reinterpret_cast<const safe_VkBindImagePlaneMemoryInfo *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO:
+            delete reinterpret_cast<const safe_VkImagePlaneMemoryRequirementsInfo *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES:
+            delete reinterpret_cast<const safe_VkPhysicalDeviceSamplerYcbcrConversionFeatures *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES:
+            delete reinterpret_cast<const safe_VkSamplerYcbcrConversionImageFormatProperties *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO:
+            delete reinterpret_cast<const safe_VkPhysicalDeviceExternalImageFormatInfo *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES:
+            delete reinterpret_cast<const safe_VkExternalImageFormatProperties *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES:
+            delete reinterpret_cast<const safe_VkPhysicalDeviceIDProperties *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO:
+            delete reinterpret_cast<const safe_VkExternalMemoryImageCreateInfo *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO:
+            delete reinterpret_cast<const safe_VkExternalMemoryBufferCreateInfo *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO:
+            delete reinterpret_cast<const safe_VkExportMemoryAllocateInfo *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO:
+            delete reinterpret_cast<const safe_VkExportFenceCreateInfo *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO:
+            delete reinterpret_cast<const safe_VkExportSemaphoreCreateInfo *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES:
+            delete reinterpret_cast<const safe_VkPhysicalDeviceMaintenance3Properties *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES:
+            delete reinterpret_cast<const safe_VkPhysicalDeviceShaderDrawParametersFeatures *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_IMAGE_SWAPCHAIN_CREATE_INFO_KHR:
+            delete reinterpret_cast<const safe_VkImageSwapchainCreateInfoKHR *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_SWAPCHAIN_INFO_KHR:
+            delete reinterpret_cast<const safe_VkBindImageMemorySwapchainInfoKHR *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_INFO_KHR:
+            delete reinterpret_cast<const safe_VkDeviceGroupPresentInfoKHR *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_DEVICE_GROUP_SWAPCHAIN_CREATE_INFO_KHR:
+            delete reinterpret_cast<const safe_VkDeviceGroupSwapchainCreateInfoKHR *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_DISPLAY_PRESENT_INFO_KHR:
+            delete reinterpret_cast<const safe_VkDisplayPresentInfoKHR *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR:
+            delete reinterpret_cast<const safe_VkImportMemoryFdInfoKHR *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR:
+            delete reinterpret_cast<const safe_VkPhysicalDevicePushDescriptorPropertiesKHR *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES_KHR:
+            delete reinterpret_cast<const safe_VkPhysicalDeviceShaderFloat16Int8FeaturesKHR *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PRESENT_REGIONS_KHR:
+            delete reinterpret_cast<const safe_VkPresentRegionsKHR *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES_KHR:
+            delete reinterpret_cast<const safe_VkPhysicalDeviceImagelessFramebufferFeaturesKHR *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO_KHR:
+            delete reinterpret_cast<const safe_VkFramebufferAttachmentsCreateInfoKHR *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_RENDER_PASS_ATTACHMENT_BEGIN_INFO_KHR:
+            delete reinterpret_cast<const safe_VkRenderPassAttachmentBeginInfoKHR *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_SHARED_PRESENT_SURFACE_CAPABILITIES_KHR:
+            delete reinterpret_cast<const safe_VkSharedPresentSurfaceCapabilitiesKHR *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PERFORMANCE_QUERY_FEATURES_KHR:
+            delete reinterpret_cast<const safe_VkPhysicalDevicePerformanceQueryFeaturesKHR *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PERFORMANCE_QUERY_PROPERTIES_KHR:
+            delete reinterpret_cast<const safe_VkPhysicalDevicePerformanceQueryPropertiesKHR *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_QUERY_POOL_PERFORMANCE_CREATE_INFO_KHR:
+            delete reinterpret_cast<const safe_VkQueryPoolPerformanceCreateInfoKHR *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PERFORMANCE_QUERY_SUBMIT_INFO_KHR:
+            delete reinterpret_cast<const safe_VkPerformanceQuerySubmitInfoKHR *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO_KHR:
+            delete reinterpret_cast<const safe_VkImageFormatListCreateInfoKHR *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES_KHR:
+            delete reinterpret_cast<const safe_VkPhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES_KHR:
+            delete reinterpret_cast<const safe_VkPhysicalDevice8BitStorageFeaturesKHR *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES_KHR:
+            delete reinterpret_cast<const safe_VkPhysicalDeviceShaderAtomicInt64FeaturesKHR *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CLOCK_FEATURES_KHR:
+            delete reinterpret_cast<const safe_VkPhysicalDeviceShaderClockFeaturesKHR *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES_KHR:
+            delete reinterpret_cast<const safe_VkPhysicalDeviceDriverPropertiesKHR *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES_KHR:
+            delete reinterpret_cast<const safe_VkPhysicalDeviceFloatControlsPropertiesKHR *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE_KHR:
+            delete reinterpret_cast<const safe_VkSubpassDescriptionDepthStencilResolveKHR *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES_KHR:
+            delete reinterpret_cast<const safe_VkPhysicalDeviceDepthStencilResolvePropertiesKHR *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES_KHR:
+            delete reinterpret_cast<const safe_VkPhysicalDeviceTimelineSemaphoreFeaturesKHR *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES_KHR:
+            delete reinterpret_cast<const safe_VkPhysicalDeviceTimelineSemaphorePropertiesKHR *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_SEMAPHORE_TYPE_CREATE_INFO_KHR:
+            delete reinterpret_cast<const safe_VkSemaphoreTypeCreateInfoKHR *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO_KHR:
+            delete reinterpret_cast<const safe_VkTimelineSemaphoreSubmitInfoKHR *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES_KHR:
+            delete reinterpret_cast<const safe_VkPhysicalDeviceVulkanMemoryModelFeaturesKHR *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_SURFACE_PROTECTED_CAPABILITIES_KHR:
+            delete reinterpret_cast<const safe_VkSurfaceProtectedCapabilitiesKHR *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES_KHR:
+            delete reinterpret_cast<const safe_VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_STENCIL_LAYOUT_KHR:
+            delete reinterpret_cast<const safe_VkAttachmentReferenceStencilLayoutKHR *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT_KHR:
+            delete reinterpret_cast<const safe_VkAttachmentDescriptionStencilLayoutKHR *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES_KHR:
+            delete reinterpret_cast<const safe_VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_KHR:
+            delete reinterpret_cast<const safe_VkPhysicalDeviceBufferDeviceAddressFeaturesKHR *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_BUFFER_OPAQUE_CAPTURE_ADDRESS_CREATE_INFO_KHR:
+            delete reinterpret_cast<const safe_VkBufferOpaqueCaptureAddressCreateInfoKHR *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_MEMORY_OPAQUE_CAPTURE_ADDRESS_ALLOCATE_INFO_KHR:
+            delete reinterpret_cast<const safe_VkMemoryOpaqueCaptureAddressAllocateInfoKHR *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_EXECUTABLE_PROPERTIES_FEATURES_KHR:
+            delete reinterpret_cast<const safe_VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT:
+            delete reinterpret_cast<const safe_VkDebugReportCallbackCreateInfoEXT *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_RASTERIZATION_ORDER_AMD:
+            delete reinterpret_cast<const safe_VkPipelineRasterizationStateRasterizationOrderAMD *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_IMAGE_CREATE_INFO_NV:
+            delete reinterpret_cast<const safe_VkDedicatedAllocationImageCreateInfoNV *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_BUFFER_CREATE_INFO_NV:
+            delete reinterpret_cast<const safe_VkDedicatedAllocationBufferCreateInfoNV *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_MEMORY_ALLOCATE_INFO_NV:
+            delete reinterpret_cast<const safe_VkDedicatedAllocationMemoryAllocateInfoNV *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_FEATURES_EXT:
+            delete reinterpret_cast<const safe_VkPhysicalDeviceTransformFeedbackFeaturesEXT *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_PROPERTIES_EXT:
+            delete reinterpret_cast<const safe_VkPhysicalDeviceTransformFeedbackPropertiesEXT *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_STREAM_CREATE_INFO_EXT:
+            delete reinterpret_cast<const safe_VkPipelineRasterizationStateStreamCreateInfoEXT *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_TEXTURE_LOD_GATHER_FORMAT_PROPERTIES_AMD:
+            delete reinterpret_cast<const safe_VkTextureLODGatherFormatPropertiesAMD *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CORNER_SAMPLED_IMAGE_FEATURES_NV:
+            delete reinterpret_cast<const safe_VkPhysicalDeviceCornerSampledImageFeaturesNV *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_NV:
+            delete reinterpret_cast<const safe_VkExternalMemoryImageCreateInfoNV *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_NV:
+            delete reinterpret_cast<const safe_VkExportMemoryAllocateInfoNV *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_VALIDATION_FLAGS_EXT:
+            delete reinterpret_cast<const safe_VkValidationFlagsEXT *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXTURE_COMPRESSION_ASTC_HDR_FEATURES_EXT:
+            delete reinterpret_cast<const safe_VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_IMAGE_VIEW_ASTC_DECODE_MODE_EXT:
+            delete reinterpret_cast<const safe_VkImageViewASTCDecodeModeEXT *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ASTC_DECODE_FEATURES_EXT:
+            delete reinterpret_cast<const safe_VkPhysicalDeviceASTCDecodeFeaturesEXT *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONDITIONAL_RENDERING_FEATURES_EXT:
+            delete reinterpret_cast<const safe_VkPhysicalDeviceConditionalRenderingFeaturesEXT *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_CONDITIONAL_RENDERING_INFO_EXT:
+            delete reinterpret_cast<const safe_VkCommandBufferInheritanceConditionalRenderingInfoEXT *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_W_SCALING_STATE_CREATE_INFO_NV:
+            delete reinterpret_cast<const safe_VkPipelineViewportWScalingStateCreateInfoNV *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_SWAPCHAIN_COUNTER_CREATE_INFO_EXT:
+            delete reinterpret_cast<const safe_VkSwapchainCounterCreateInfoEXT *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PRESENT_TIMES_INFO_GOOGLE:
+            delete reinterpret_cast<const safe_VkPresentTimesInfoGOOGLE *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_ATTRIBUTES_PROPERTIES_NVX:
+            delete reinterpret_cast<const safe_VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SWIZZLE_STATE_CREATE_INFO_NV:
+            delete reinterpret_cast<const safe_VkPipelineViewportSwizzleStateCreateInfoNV *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DISCARD_RECTANGLE_PROPERTIES_EXT:
+            delete reinterpret_cast<const safe_VkPhysicalDeviceDiscardRectanglePropertiesEXT *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PIPELINE_DISCARD_RECTANGLE_STATE_CREATE_INFO_EXT:
+            delete reinterpret_cast<const safe_VkPipelineDiscardRectangleStateCreateInfoEXT *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONSERVATIVE_RASTERIZATION_PROPERTIES_EXT:
+            delete reinterpret_cast<const safe_VkPhysicalDeviceConservativeRasterizationPropertiesEXT *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_CONSERVATIVE_STATE_CREATE_INFO_EXT:
+            delete reinterpret_cast<const safe_VkPipelineRasterizationConservativeStateCreateInfoEXT *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLIP_ENABLE_FEATURES_EXT:
+            delete reinterpret_cast<const safe_VkPhysicalDeviceDepthClipEnableFeaturesEXT *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_DEPTH_CLIP_STATE_CREATE_INFO_EXT:
+            delete reinterpret_cast<const safe_VkPipelineRasterizationDepthClipStateCreateInfoEXT *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT:
+            delete reinterpret_cast<const safe_VkDebugUtilsMessengerCreateInfoEXT *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO_EXT:
+            delete reinterpret_cast<const safe_VkSamplerReductionModeCreateInfoEXT *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES_EXT:
+            delete reinterpret_cast<const safe_VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES_EXT:
+            delete reinterpret_cast<const safe_VkPhysicalDeviceInlineUniformBlockFeaturesEXT *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_PROPERTIES_EXT:
+            delete reinterpret_cast<const safe_VkPhysicalDeviceInlineUniformBlockPropertiesEXT *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK_EXT:
+            delete reinterpret_cast<const safe_VkWriteDescriptorSetInlineUniformBlockEXT *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO_EXT:
+            delete reinterpret_cast<const safe_VkDescriptorPoolInlineUniformBlockCreateInfoEXT *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_SAMPLE_LOCATIONS_INFO_EXT:
+            delete reinterpret_cast<const safe_VkSampleLocationsInfoEXT *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_RENDER_PASS_SAMPLE_LOCATIONS_BEGIN_INFO_EXT:
+            delete reinterpret_cast<const safe_VkRenderPassSampleLocationsBeginInfoEXT *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PIPELINE_SAMPLE_LOCATIONS_STATE_CREATE_INFO_EXT:
+            delete reinterpret_cast<const safe_VkPipelineSampleLocationsStateCreateInfoEXT *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLE_LOCATIONS_PROPERTIES_EXT:
+            delete reinterpret_cast<const safe_VkPhysicalDeviceSampleLocationsPropertiesEXT *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_FEATURES_EXT:
+            delete reinterpret_cast<const safe_VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_PROPERTIES_EXT:
+            delete reinterpret_cast<const safe_VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_ADVANCED_STATE_CREATE_INFO_EXT:
+            delete reinterpret_cast<const safe_VkPipelineColorBlendAdvancedStateCreateInfoEXT *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_TO_COLOR_STATE_CREATE_INFO_NV:
+            delete reinterpret_cast<const safe_VkPipelineCoverageToColorStateCreateInfoNV *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_MODULATION_STATE_CREATE_INFO_NV:
+            delete reinterpret_cast<const safe_VkPipelineCoverageModulationStateCreateInfoNV *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_PROPERTIES_NV:
+            delete reinterpret_cast<const safe_VkPhysicalDeviceShaderSMBuiltinsPropertiesNV *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_FEATURES_NV:
+            delete reinterpret_cast<const safe_VkPhysicalDeviceShaderSMBuiltinsFeaturesNV *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT:
+            delete reinterpret_cast<const safe_VkDrmFormatModifierPropertiesListEXT *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_DRM_FORMAT_MODIFIER_INFO_EXT:
+            delete reinterpret_cast<const safe_VkPhysicalDeviceImageDrmFormatModifierInfoEXT *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_LIST_CREATE_INFO_EXT:
+            delete reinterpret_cast<const safe_VkImageDrmFormatModifierListCreateInfoEXT *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_EXPLICIT_CREATE_INFO_EXT:
+            delete reinterpret_cast<const safe_VkImageDrmFormatModifierExplicitCreateInfoEXT *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_SHADER_MODULE_VALIDATION_CACHE_CREATE_INFO_EXT:
+            delete reinterpret_cast<const safe_VkShaderModuleValidationCacheCreateInfoEXT *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT:
+            delete reinterpret_cast<const safe_VkDescriptorSetLayoutBindingFlagsCreateInfoEXT *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES_EXT:
+            delete reinterpret_cast<const safe_VkPhysicalDeviceDescriptorIndexingFeaturesEXT *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES_EXT:
+            delete reinterpret_cast<const safe_VkPhysicalDeviceDescriptorIndexingPropertiesEXT *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO_EXT:
+            delete reinterpret_cast<const safe_VkDescriptorSetVariableDescriptorCountAllocateInfoEXT *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT_EXT:
+            delete reinterpret_cast<const safe_VkDescriptorSetVariableDescriptorCountLayoutSupportEXT *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SHADING_RATE_IMAGE_STATE_CREATE_INFO_NV:
+            delete reinterpret_cast<const safe_VkPipelineViewportShadingRateImageStateCreateInfoNV *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_FEATURES_NV:
+            delete reinterpret_cast<const safe_VkPhysicalDeviceShadingRateImageFeaturesNV *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_PROPERTIES_NV:
+            delete reinterpret_cast<const safe_VkPhysicalDeviceShadingRateImagePropertiesNV *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_COARSE_SAMPLE_ORDER_STATE_CREATE_INFO_NV:
+            delete reinterpret_cast<const safe_VkPipelineViewportCoarseSampleOrderStateCreateInfoNV *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_NV:
+            delete reinterpret_cast<const safe_VkWriteDescriptorSetAccelerationStructureNV *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PROPERTIES_NV:
+            delete reinterpret_cast<const safe_VkPhysicalDeviceRayTracingPropertiesNV *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_REPRESENTATIVE_FRAGMENT_TEST_FEATURES_NV:
+            delete reinterpret_cast<const safe_VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PIPELINE_REPRESENTATIVE_FRAGMENT_TEST_STATE_CREATE_INFO_NV:
+            delete reinterpret_cast<const safe_VkPipelineRepresentativeFragmentTestStateCreateInfoNV *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_VIEW_IMAGE_FORMAT_INFO_EXT:
+            delete reinterpret_cast<const safe_VkPhysicalDeviceImageViewImageFormatInfoEXT *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_FILTER_CUBIC_IMAGE_VIEW_IMAGE_FORMAT_PROPERTIES_EXT:
+            delete reinterpret_cast<const safe_VkFilterCubicImageViewImageFormatPropertiesEXT *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_EXT:
+            delete reinterpret_cast<const safe_VkDeviceQueueGlobalPriorityCreateInfoEXT *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_IMPORT_MEMORY_HOST_POINTER_INFO_EXT:
+            delete reinterpret_cast<const safe_VkImportMemoryHostPointerInfoEXT *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_HOST_PROPERTIES_EXT:
+            delete reinterpret_cast<const safe_VkPhysicalDeviceExternalMemoryHostPropertiesEXT *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PIPELINE_COMPILER_CONTROL_CREATE_INFO_AMD:
+            delete reinterpret_cast<const safe_VkPipelineCompilerControlCreateInfoAMD *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_AMD:
+            delete reinterpret_cast<const safe_VkPhysicalDeviceShaderCorePropertiesAMD *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_DEVICE_MEMORY_OVERALLOCATION_CREATE_INFO_AMD:
+            delete reinterpret_cast<const safe_VkDeviceMemoryOverallocationCreateInfoAMD *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_EXT:
+            delete reinterpret_cast<const safe_VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_EXT:
+            delete reinterpret_cast<const safe_VkPipelineVertexInputDivisorStateCreateInfoEXT *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_EXT:
+            delete reinterpret_cast<const safe_VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PIPELINE_CREATION_FEEDBACK_CREATE_INFO_EXT:
+            delete reinterpret_cast<const safe_VkPipelineCreationFeedbackCreateInfoEXT *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_FEATURES_NV:
+            delete reinterpret_cast<const safe_VkPhysicalDeviceComputeShaderDerivativesFeaturesNV *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_FEATURES_NV:
+            delete reinterpret_cast<const safe_VkPhysicalDeviceMeshShaderFeaturesNV *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_PROPERTIES_NV:
+            delete reinterpret_cast<const safe_VkPhysicalDeviceMeshShaderPropertiesNV *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_FEATURES_NV:
+            delete reinterpret_cast<const safe_VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_IMAGE_FOOTPRINT_FEATURES_NV:
+            delete reinterpret_cast<const safe_VkPhysicalDeviceShaderImageFootprintFeaturesNV *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_EXCLUSIVE_SCISSOR_STATE_CREATE_INFO_NV:
+            delete reinterpret_cast<const safe_VkPipelineViewportExclusiveScissorStateCreateInfoNV *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXCLUSIVE_SCISSOR_FEATURES_NV:
+            delete reinterpret_cast<const safe_VkPhysicalDeviceExclusiveScissorFeaturesNV *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_NV:
+            delete reinterpret_cast<const safe_VkQueueFamilyCheckpointPropertiesNV *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_FUNCTIONS_2_FEATURES_INTEL:
+            delete reinterpret_cast<const safe_VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PCI_BUS_INFO_PROPERTIES_EXT:
+            delete reinterpret_cast<const safe_VkPhysicalDevicePCIBusInfoPropertiesEXT *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_DISPLAY_NATIVE_HDR_SURFACE_CAPABILITIES_AMD:
+            delete reinterpret_cast<const safe_VkDisplayNativeHdrSurfaceCapabilitiesAMD *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_SWAPCHAIN_DISPLAY_NATIVE_HDR_CREATE_INFO_AMD:
+            delete reinterpret_cast<const safe_VkSwapchainDisplayNativeHdrCreateInfoAMD *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_FEATURES_EXT:
+            delete reinterpret_cast<const safe_VkPhysicalDeviceFragmentDensityMapFeaturesEXT *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_PROPERTIES_EXT:
+            delete reinterpret_cast<const safe_VkPhysicalDeviceFragmentDensityMapPropertiesEXT *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_RENDER_PASS_FRAGMENT_DENSITY_MAP_CREATE_INFO_EXT:
+            delete reinterpret_cast<const safe_VkRenderPassFragmentDensityMapCreateInfoEXT *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES_EXT:
+            delete reinterpret_cast<const safe_VkPhysicalDeviceScalarBlockLayoutFeaturesEXT *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES_EXT:
+            delete reinterpret_cast<const safe_VkPhysicalDeviceSubgroupSizeControlFeaturesEXT *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES_EXT:
+            delete reinterpret_cast<const safe_VkPhysicalDeviceSubgroupSizeControlPropertiesEXT *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO_EXT:
+            delete reinterpret_cast<const safe_VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_2_AMD:
+            delete reinterpret_cast<const safe_VkPhysicalDeviceShaderCoreProperties2AMD *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COHERENT_MEMORY_FEATURES_AMD:
+            delete reinterpret_cast<const safe_VkPhysicalDeviceCoherentMemoryFeaturesAMD *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT:
+            delete reinterpret_cast<const safe_VkPhysicalDeviceMemoryBudgetPropertiesEXT *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PRIORITY_FEATURES_EXT:
+            delete reinterpret_cast<const safe_VkPhysicalDeviceMemoryPriorityFeaturesEXT *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_MEMORY_PRIORITY_ALLOCATE_INFO_EXT:
+            delete reinterpret_cast<const safe_VkMemoryPriorityAllocateInfoEXT *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEDICATED_ALLOCATION_IMAGE_ALIASING_FEATURES_NV:
+            delete reinterpret_cast<const safe_VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_EXT:
+            delete reinterpret_cast<const safe_VkPhysicalDeviceBufferDeviceAddressFeaturesEXT *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_CREATE_INFO_EXT:
+            delete reinterpret_cast<const safe_VkBufferDeviceAddressCreateInfoEXT *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO_EXT:
+            delete reinterpret_cast<const safe_VkImageStencilUsageCreateInfoEXT *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_VALIDATION_FEATURES_EXT:
+            delete reinterpret_cast<const safe_VkValidationFeaturesEXT *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_FEATURES_NV:
+            delete reinterpret_cast<const safe_VkPhysicalDeviceCooperativeMatrixFeaturesNV *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_PROPERTIES_NV:
+            delete reinterpret_cast<const safe_VkPhysicalDeviceCooperativeMatrixPropertiesNV *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COVERAGE_REDUCTION_MODE_FEATURES_NV:
+            delete reinterpret_cast<const safe_VkPhysicalDeviceCoverageReductionModeFeaturesNV *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_REDUCTION_STATE_CREATE_INFO_NV:
+            delete reinterpret_cast<const safe_VkPipelineCoverageReductionStateCreateInfoNV *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_INTERLOCK_FEATURES_EXT:
+            delete reinterpret_cast<const safe_VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_YCBCR_IMAGE_ARRAYS_FEATURES_EXT:
+            delete reinterpret_cast<const safe_VkPhysicalDeviceYcbcrImageArraysFeaturesEXT *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_EXT:
+            delete reinterpret_cast<const safe_VkPhysicalDeviceLineRasterizationFeaturesEXT *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES_EXT:
+            delete reinterpret_cast<const safe_VkPhysicalDeviceLineRasterizationPropertiesEXT *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO_EXT:
+            delete reinterpret_cast<const safe_VkPipelineRasterizationLineStateCreateInfoEXT *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES_EXT:
+            delete reinterpret_cast<const safe_VkPhysicalDeviceHostQueryResetFeaturesEXT *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_EXT:
+            delete reinterpret_cast<const safe_VkPhysicalDeviceIndexTypeUint8FeaturesEXT *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES_EXT:
+            delete reinterpret_cast<const safe_VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_FEATURES_EXT:
+            delete reinterpret_cast<const safe_VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES_EXT:
+            delete reinterpret_cast<const safe_VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT *>(header);
+            break;
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+        case VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_USAGE_ANDROID:
+            delete reinterpret_cast<const safe_VkAndroidHardwareBufferUsageANDROID *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID:
+            delete reinterpret_cast<const safe_VkAndroidHardwareBufferFormatPropertiesANDROID *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID:
+            delete reinterpret_cast<const safe_VkImportAndroidHardwareBufferInfoANDROID *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID:
+            delete reinterpret_cast<const safe_VkExternalFormatANDROID *>(header);
+            break;
+#endif // VK_USE_PLATFORM_ANDROID_KHR
+#ifdef VK_USE_PLATFORM_GGP
+        case VK_STRUCTURE_TYPE_PRESENT_FRAME_TOKEN_GGP:
+            delete reinterpret_cast<const safe_VkPresentFrameTokenGGP *>(header);
+            break;
+#endif // VK_USE_PLATFORM_GGP
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+        case VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_KHR:
+            delete reinterpret_cast<const safe_VkImportMemoryWin32HandleInfoKHR *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_KHR:
+            delete reinterpret_cast<const safe_VkExportMemoryWin32HandleInfoKHR *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_KHR:
+            delete reinterpret_cast<const safe_VkWin32KeyedMutexAcquireReleaseInfoKHR *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR:
+            delete reinterpret_cast<const safe_VkExportSemaphoreWin32HandleInfoKHR *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_D3D12_FENCE_SUBMIT_INFO_KHR:
+            delete reinterpret_cast<const safe_VkD3D12FenceSubmitInfoKHR *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_EXPORT_FENCE_WIN32_HANDLE_INFO_KHR:
+            delete reinterpret_cast<const safe_VkExportFenceWin32HandleInfoKHR *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_NV:
+            delete reinterpret_cast<const safe_VkImportMemoryWin32HandleInfoNV *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_NV:
+            delete reinterpret_cast<const safe_VkExportMemoryWin32HandleInfoNV *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_NV:
+            delete reinterpret_cast<const safe_VkWin32KeyedMutexAcquireReleaseInfoNV *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_INFO_EXT:
+            delete reinterpret_cast<const safe_VkSurfaceFullScreenExclusiveInfoEXT *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_FULL_SCREEN_EXCLUSIVE_EXT:
+            delete reinterpret_cast<const safe_VkSurfaceCapabilitiesFullScreenExclusiveEXT *>(header);
+            break;
+        case VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_WIN32_INFO_EXT:
+            delete reinterpret_cast<const safe_VkSurfaceFullScreenExclusiveWin32InfoEXT *>(header);
+            break;
+#endif // VK_USE_PLATFORM_WIN32_KHR
+        default: // Encountered an unknown sType -- panic, there should be none such in safe chain
+            assert(false);
+            FreePnextChain(header->pNext);
+            break;
+    }
+}
diff --git a/src/third_party/vulkan-validation-layers/src/layers/generated/vk_safe_struct.h b/src/third_party/vulkan-validation-layers/src/layers/generated/vk_safe_struct.h
new file mode 100644
index 0000000..b92fff8
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/layers/generated/vk_safe_struct.h
@@ -0,0 +1,7036 @@
+// *** THIS FILE IS GENERATED - DO NOT EDIT ***
+// See helper_file_generator.py for modifications
+
+
+/***************************************************************************
+ *
+ * Copyright (c) 2015-2019 The Khronos Group Inc.
+ * Copyright (c) 2015-2019 Valve Corporation
+ * Copyright (c) 2015-2019 LunarG, Inc.
+ * Copyright (c) 2015-2019 Google Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Mark Lobodzinski <mark@lunarg.com>
+ * Author: Courtney Goeltzenleuchter <courtneygo@google.com>
+ * Author: Tobin Ehlis <tobine@google.com>
+ * Author: Chris Forbes <chrisforbes@google.com>
+ * Author: John Zulauf<jzulauf@lunarg.com>
+ *
+ ****************************************************************************/
+
+
+#pragma once
+#include <vulkan/vulkan.h>
+
+void *SafePnextCopy(const void *pNext);
+void FreePnextChain(const void *pNext);
+char *SafeStringCopy(const char *in_string);
+
+
+struct safe_VkApplicationInfo {
+    VkStructureType sType;
+    const void* pNext;
+    const char* pApplicationName;
+    uint32_t applicationVersion;
+    const char* pEngineName;
+    uint32_t engineVersion;
+    uint32_t apiVersion;
+    safe_VkApplicationInfo(const VkApplicationInfo* in_struct);
+    safe_VkApplicationInfo(const safe_VkApplicationInfo& src);
+    safe_VkApplicationInfo& operator=(const safe_VkApplicationInfo& src);
+    safe_VkApplicationInfo();
+    ~safe_VkApplicationInfo();
+    void initialize(const VkApplicationInfo* in_struct);
+    void initialize(const safe_VkApplicationInfo* src);
+    VkApplicationInfo *ptr() { return reinterpret_cast<VkApplicationInfo *>(this); }
+    VkApplicationInfo const *ptr() const { return reinterpret_cast<VkApplicationInfo const *>(this); }
+};
+
+struct safe_VkInstanceCreateInfo {
+    VkStructureType sType;
+    const void* pNext;
+    VkInstanceCreateFlags flags;
+    safe_VkApplicationInfo* pApplicationInfo;
+    uint32_t enabledLayerCount;
+    const char* const* ppEnabledLayerNames;
+    uint32_t enabledExtensionCount;
+    const char* const* ppEnabledExtensionNames;
+    safe_VkInstanceCreateInfo(const VkInstanceCreateInfo* in_struct);
+    safe_VkInstanceCreateInfo(const safe_VkInstanceCreateInfo& src);
+    safe_VkInstanceCreateInfo& operator=(const safe_VkInstanceCreateInfo& src);
+    safe_VkInstanceCreateInfo();
+    ~safe_VkInstanceCreateInfo();
+    void initialize(const VkInstanceCreateInfo* in_struct);
+    void initialize(const safe_VkInstanceCreateInfo* src);
+    VkInstanceCreateInfo *ptr() { return reinterpret_cast<VkInstanceCreateInfo *>(this); }
+    VkInstanceCreateInfo const *ptr() const { return reinterpret_cast<VkInstanceCreateInfo const *>(this); }
+};
+
+struct safe_VkAllocationCallbacks {
+    void* pUserData;
+    PFN_vkAllocationFunction pfnAllocation;
+    PFN_vkReallocationFunction pfnReallocation;
+    PFN_vkFreeFunction pfnFree;
+    PFN_vkInternalAllocationNotification pfnInternalAllocation;
+    PFN_vkInternalFreeNotification pfnInternalFree;
+    safe_VkAllocationCallbacks(const VkAllocationCallbacks* in_struct);
+    safe_VkAllocationCallbacks(const safe_VkAllocationCallbacks& src);
+    safe_VkAllocationCallbacks& operator=(const safe_VkAllocationCallbacks& src);
+    safe_VkAllocationCallbacks();
+    ~safe_VkAllocationCallbacks();
+    void initialize(const VkAllocationCallbacks* in_struct);
+    void initialize(const safe_VkAllocationCallbacks* src);
+    VkAllocationCallbacks *ptr() { return reinterpret_cast<VkAllocationCallbacks *>(this); }
+    VkAllocationCallbacks const *ptr() const { return reinterpret_cast<VkAllocationCallbacks const *>(this); }
+};
+
+struct safe_VkDeviceQueueCreateInfo {
+    VkStructureType sType;
+    const void* pNext;
+    VkDeviceQueueCreateFlags flags;
+    uint32_t queueFamilyIndex;
+    uint32_t queueCount;
+    const float* pQueuePriorities;
+    safe_VkDeviceQueueCreateInfo(const VkDeviceQueueCreateInfo* in_struct);
+    safe_VkDeviceQueueCreateInfo(const safe_VkDeviceQueueCreateInfo& src);
+    safe_VkDeviceQueueCreateInfo& operator=(const safe_VkDeviceQueueCreateInfo& src);
+    safe_VkDeviceQueueCreateInfo();
+    ~safe_VkDeviceQueueCreateInfo();
+    void initialize(const VkDeviceQueueCreateInfo* in_struct);
+    void initialize(const safe_VkDeviceQueueCreateInfo* src);
+    VkDeviceQueueCreateInfo *ptr() { return reinterpret_cast<VkDeviceQueueCreateInfo *>(this); }
+    VkDeviceQueueCreateInfo const *ptr() const { return reinterpret_cast<VkDeviceQueueCreateInfo const *>(this); }
+};
+
+struct safe_VkDeviceCreateInfo {
+    VkStructureType sType;
+    const void* pNext;
+    VkDeviceCreateFlags flags;
+    uint32_t queueCreateInfoCount;
+    safe_VkDeviceQueueCreateInfo* pQueueCreateInfos;
+    uint32_t enabledLayerCount;
+    const char* const* ppEnabledLayerNames;
+    uint32_t enabledExtensionCount;
+    const char* const* ppEnabledExtensionNames;
+    const VkPhysicalDeviceFeatures* pEnabledFeatures;
+    safe_VkDeviceCreateInfo(const VkDeviceCreateInfo* in_struct);
+    safe_VkDeviceCreateInfo(const safe_VkDeviceCreateInfo& src);
+    safe_VkDeviceCreateInfo& operator=(const safe_VkDeviceCreateInfo& src);
+    safe_VkDeviceCreateInfo();
+    ~safe_VkDeviceCreateInfo();
+    void initialize(const VkDeviceCreateInfo* in_struct);
+    void initialize(const safe_VkDeviceCreateInfo* src);
+    VkDeviceCreateInfo *ptr() { return reinterpret_cast<VkDeviceCreateInfo *>(this); }
+    VkDeviceCreateInfo const *ptr() const { return reinterpret_cast<VkDeviceCreateInfo const *>(this); }
+};
+
+struct safe_VkSubmitInfo {
+    VkStructureType sType;
+    const void* pNext;
+    uint32_t waitSemaphoreCount;
+    VkSemaphore* pWaitSemaphores;
+    const VkPipelineStageFlags* pWaitDstStageMask;
+    uint32_t commandBufferCount;
+    VkCommandBuffer* pCommandBuffers;
+    uint32_t signalSemaphoreCount;
+    VkSemaphore* pSignalSemaphores;
+    safe_VkSubmitInfo(const VkSubmitInfo* in_struct);
+    safe_VkSubmitInfo(const safe_VkSubmitInfo& src);
+    safe_VkSubmitInfo& operator=(const safe_VkSubmitInfo& src);
+    safe_VkSubmitInfo();
+    ~safe_VkSubmitInfo();
+    void initialize(const VkSubmitInfo* in_struct);
+    void initialize(const safe_VkSubmitInfo* src);
+    VkSubmitInfo *ptr() { return reinterpret_cast<VkSubmitInfo *>(this); }
+    VkSubmitInfo const *ptr() const { return reinterpret_cast<VkSubmitInfo const *>(this); }
+};
+
+struct safe_VkMemoryAllocateInfo {
+    VkStructureType sType;
+    const void* pNext;
+    VkDeviceSize allocationSize;
+    uint32_t memoryTypeIndex;
+    safe_VkMemoryAllocateInfo(const VkMemoryAllocateInfo* in_struct);
+    safe_VkMemoryAllocateInfo(const safe_VkMemoryAllocateInfo& src);
+    safe_VkMemoryAllocateInfo& operator=(const safe_VkMemoryAllocateInfo& src);
+    safe_VkMemoryAllocateInfo();
+    ~safe_VkMemoryAllocateInfo();
+    void initialize(const VkMemoryAllocateInfo* in_struct);
+    void initialize(const safe_VkMemoryAllocateInfo* src);
+    VkMemoryAllocateInfo *ptr() { return reinterpret_cast<VkMemoryAllocateInfo *>(this); }
+    VkMemoryAllocateInfo const *ptr() const { return reinterpret_cast<VkMemoryAllocateInfo const *>(this); }
+};
+
+struct safe_VkMappedMemoryRange {
+    VkStructureType sType;
+    const void* pNext;
+    VkDeviceMemory memory;
+    VkDeviceSize offset;
+    VkDeviceSize size;
+    safe_VkMappedMemoryRange(const VkMappedMemoryRange* in_struct);
+    safe_VkMappedMemoryRange(const safe_VkMappedMemoryRange& src);
+    safe_VkMappedMemoryRange& operator=(const safe_VkMappedMemoryRange& src);
+    safe_VkMappedMemoryRange();
+    ~safe_VkMappedMemoryRange();
+    void initialize(const VkMappedMemoryRange* in_struct);
+    void initialize(const safe_VkMappedMemoryRange* src);
+    VkMappedMemoryRange *ptr() { return reinterpret_cast<VkMappedMemoryRange *>(this); }
+    VkMappedMemoryRange const *ptr() const { return reinterpret_cast<VkMappedMemoryRange const *>(this); }
+};
+
+struct safe_VkSparseBufferMemoryBindInfo {
+    VkBuffer buffer;
+    uint32_t bindCount;
+    VkSparseMemoryBind* pBinds;
+    safe_VkSparseBufferMemoryBindInfo(const VkSparseBufferMemoryBindInfo* in_struct);
+    safe_VkSparseBufferMemoryBindInfo(const safe_VkSparseBufferMemoryBindInfo& src);
+    safe_VkSparseBufferMemoryBindInfo& operator=(const safe_VkSparseBufferMemoryBindInfo& src);
+    safe_VkSparseBufferMemoryBindInfo();
+    ~safe_VkSparseBufferMemoryBindInfo();
+    void initialize(const VkSparseBufferMemoryBindInfo* in_struct);
+    void initialize(const safe_VkSparseBufferMemoryBindInfo* src);
+    VkSparseBufferMemoryBindInfo *ptr() { return reinterpret_cast<VkSparseBufferMemoryBindInfo *>(this); }
+    VkSparseBufferMemoryBindInfo const *ptr() const { return reinterpret_cast<VkSparseBufferMemoryBindInfo const *>(this); }
+};
+
+struct safe_VkSparseImageOpaqueMemoryBindInfo {
+    VkImage image;
+    uint32_t bindCount;
+    VkSparseMemoryBind* pBinds;
+    safe_VkSparseImageOpaqueMemoryBindInfo(const VkSparseImageOpaqueMemoryBindInfo* in_struct);
+    safe_VkSparseImageOpaqueMemoryBindInfo(const safe_VkSparseImageOpaqueMemoryBindInfo& src);
+    safe_VkSparseImageOpaqueMemoryBindInfo& operator=(const safe_VkSparseImageOpaqueMemoryBindInfo& src);
+    safe_VkSparseImageOpaqueMemoryBindInfo();
+    ~safe_VkSparseImageOpaqueMemoryBindInfo();
+    void initialize(const VkSparseImageOpaqueMemoryBindInfo* in_struct);
+    void initialize(const safe_VkSparseImageOpaqueMemoryBindInfo* src);
+    VkSparseImageOpaqueMemoryBindInfo *ptr() { return reinterpret_cast<VkSparseImageOpaqueMemoryBindInfo *>(this); }
+    VkSparseImageOpaqueMemoryBindInfo const *ptr() const { return reinterpret_cast<VkSparseImageOpaqueMemoryBindInfo const *>(this); }
+};
+
+struct safe_VkSparseImageMemoryBindInfo {
+    VkImage image;
+    uint32_t bindCount;
+    VkSparseImageMemoryBind* pBinds;
+    safe_VkSparseImageMemoryBindInfo(const VkSparseImageMemoryBindInfo* in_struct);
+    safe_VkSparseImageMemoryBindInfo(const safe_VkSparseImageMemoryBindInfo& src);
+    safe_VkSparseImageMemoryBindInfo& operator=(const safe_VkSparseImageMemoryBindInfo& src);
+    safe_VkSparseImageMemoryBindInfo();
+    ~safe_VkSparseImageMemoryBindInfo();
+    void initialize(const VkSparseImageMemoryBindInfo* in_struct);
+    void initialize(const safe_VkSparseImageMemoryBindInfo* src);
+    VkSparseImageMemoryBindInfo *ptr() { return reinterpret_cast<VkSparseImageMemoryBindInfo *>(this); }
+    VkSparseImageMemoryBindInfo const *ptr() const { return reinterpret_cast<VkSparseImageMemoryBindInfo const *>(this); }
+};
+
+struct safe_VkBindSparseInfo {
+    VkStructureType sType;
+    const void* pNext;
+    uint32_t waitSemaphoreCount;
+    VkSemaphore* pWaitSemaphores;
+    uint32_t bufferBindCount;
+    safe_VkSparseBufferMemoryBindInfo* pBufferBinds;
+    uint32_t imageOpaqueBindCount;
+    safe_VkSparseImageOpaqueMemoryBindInfo* pImageOpaqueBinds;
+    uint32_t imageBindCount;
+    safe_VkSparseImageMemoryBindInfo* pImageBinds;
+    uint32_t signalSemaphoreCount;
+    VkSemaphore* pSignalSemaphores;
+    safe_VkBindSparseInfo(const VkBindSparseInfo* in_struct);
+    safe_VkBindSparseInfo(const safe_VkBindSparseInfo& src);
+    safe_VkBindSparseInfo& operator=(const safe_VkBindSparseInfo& src);
+    safe_VkBindSparseInfo();
+    ~safe_VkBindSparseInfo();
+    void initialize(const VkBindSparseInfo* in_struct);
+    void initialize(const safe_VkBindSparseInfo* src);
+    VkBindSparseInfo *ptr() { return reinterpret_cast<VkBindSparseInfo *>(this); }
+    VkBindSparseInfo const *ptr() const { return reinterpret_cast<VkBindSparseInfo const *>(this); }
+};
+
+struct safe_VkFenceCreateInfo {
+    VkStructureType sType;
+    const void* pNext;
+    VkFenceCreateFlags flags;
+    safe_VkFenceCreateInfo(const VkFenceCreateInfo* in_struct);
+    safe_VkFenceCreateInfo(const safe_VkFenceCreateInfo& src);
+    safe_VkFenceCreateInfo& operator=(const safe_VkFenceCreateInfo& src);
+    safe_VkFenceCreateInfo();
+    ~safe_VkFenceCreateInfo();
+    void initialize(const VkFenceCreateInfo* in_struct);
+    void initialize(const safe_VkFenceCreateInfo* src);
+    VkFenceCreateInfo *ptr() { return reinterpret_cast<VkFenceCreateInfo *>(this); }
+    VkFenceCreateInfo const *ptr() const { return reinterpret_cast<VkFenceCreateInfo const *>(this); }
+};
+
+struct safe_VkSemaphoreCreateInfo {
+    VkStructureType sType;
+    const void* pNext;
+    VkSemaphoreCreateFlags flags;
+    safe_VkSemaphoreCreateInfo(const VkSemaphoreCreateInfo* in_struct);
+    safe_VkSemaphoreCreateInfo(const safe_VkSemaphoreCreateInfo& src);
+    safe_VkSemaphoreCreateInfo& operator=(const safe_VkSemaphoreCreateInfo& src);
+    safe_VkSemaphoreCreateInfo();
+    ~safe_VkSemaphoreCreateInfo();
+    void initialize(const VkSemaphoreCreateInfo* in_struct);
+    void initialize(const safe_VkSemaphoreCreateInfo* src);
+    VkSemaphoreCreateInfo *ptr() { return reinterpret_cast<VkSemaphoreCreateInfo *>(this); }
+    VkSemaphoreCreateInfo const *ptr() const { return reinterpret_cast<VkSemaphoreCreateInfo const *>(this); }
+};
+
+struct safe_VkEventCreateInfo {
+    VkStructureType sType;
+    const void* pNext;
+    VkEventCreateFlags flags;
+    safe_VkEventCreateInfo(const VkEventCreateInfo* in_struct);
+    safe_VkEventCreateInfo(const safe_VkEventCreateInfo& src);
+    safe_VkEventCreateInfo& operator=(const safe_VkEventCreateInfo& src);
+    safe_VkEventCreateInfo();
+    ~safe_VkEventCreateInfo();
+    void initialize(const VkEventCreateInfo* in_struct);
+    void initialize(const safe_VkEventCreateInfo* src);
+    VkEventCreateInfo *ptr() { return reinterpret_cast<VkEventCreateInfo *>(this); }
+    VkEventCreateInfo const *ptr() const { return reinterpret_cast<VkEventCreateInfo const *>(this); }
+};
+
+struct safe_VkQueryPoolCreateInfo {
+    VkStructureType sType;
+    const void* pNext;
+    VkQueryPoolCreateFlags flags;
+    VkQueryType queryType;
+    uint32_t queryCount;
+    VkQueryPipelineStatisticFlags pipelineStatistics;
+    safe_VkQueryPoolCreateInfo(const VkQueryPoolCreateInfo* in_struct);
+    safe_VkQueryPoolCreateInfo(const safe_VkQueryPoolCreateInfo& src);
+    safe_VkQueryPoolCreateInfo& operator=(const safe_VkQueryPoolCreateInfo& src);
+    safe_VkQueryPoolCreateInfo();
+    ~safe_VkQueryPoolCreateInfo();
+    void initialize(const VkQueryPoolCreateInfo* in_struct);
+    void initialize(const safe_VkQueryPoolCreateInfo* src);
+    VkQueryPoolCreateInfo *ptr() { return reinterpret_cast<VkQueryPoolCreateInfo *>(this); }
+    VkQueryPoolCreateInfo const *ptr() const { return reinterpret_cast<VkQueryPoolCreateInfo const *>(this); }
+};
+
+struct safe_VkBufferCreateInfo {
+    VkStructureType sType;
+    const void* pNext;
+    VkBufferCreateFlags flags;
+    VkDeviceSize size;
+    VkBufferUsageFlags usage;
+    VkSharingMode sharingMode;
+    uint32_t queueFamilyIndexCount;
+    const uint32_t* pQueueFamilyIndices;
+    safe_VkBufferCreateInfo(const VkBufferCreateInfo* in_struct);
+    safe_VkBufferCreateInfo(const safe_VkBufferCreateInfo& src);
+    safe_VkBufferCreateInfo& operator=(const safe_VkBufferCreateInfo& src);
+    safe_VkBufferCreateInfo();
+    ~safe_VkBufferCreateInfo();
+    void initialize(const VkBufferCreateInfo* in_struct);
+    void initialize(const safe_VkBufferCreateInfo* src);
+    VkBufferCreateInfo *ptr() { return reinterpret_cast<VkBufferCreateInfo *>(this); }
+    VkBufferCreateInfo const *ptr() const { return reinterpret_cast<VkBufferCreateInfo const *>(this); }
+};
+
+struct safe_VkBufferViewCreateInfo {
+    VkStructureType sType;
+    const void* pNext;
+    VkBufferViewCreateFlags flags;
+    VkBuffer buffer;
+    VkFormat format;
+    VkDeviceSize offset;
+    VkDeviceSize range;
+    safe_VkBufferViewCreateInfo(const VkBufferViewCreateInfo* in_struct);
+    safe_VkBufferViewCreateInfo(const safe_VkBufferViewCreateInfo& src);
+    safe_VkBufferViewCreateInfo& operator=(const safe_VkBufferViewCreateInfo& src);
+    safe_VkBufferViewCreateInfo();
+    ~safe_VkBufferViewCreateInfo();
+    void initialize(const VkBufferViewCreateInfo* in_struct);
+    void initialize(const safe_VkBufferViewCreateInfo* src);
+    VkBufferViewCreateInfo *ptr() { return reinterpret_cast<VkBufferViewCreateInfo *>(this); }
+    VkBufferViewCreateInfo const *ptr() const { return reinterpret_cast<VkBufferViewCreateInfo const *>(this); }
+};
+
+struct safe_VkImageCreateInfo {
+    VkStructureType sType;
+    const void* pNext;
+    VkImageCreateFlags flags;
+    VkImageType imageType;
+    VkFormat format;
+    VkExtent3D extent;
+    uint32_t mipLevels;
+    uint32_t arrayLayers;
+    VkSampleCountFlagBits samples;
+    VkImageTiling tiling;
+    VkImageUsageFlags usage;
+    VkSharingMode sharingMode;
+    uint32_t queueFamilyIndexCount;
+    const uint32_t* pQueueFamilyIndices;
+    VkImageLayout initialLayout;
+    safe_VkImageCreateInfo(const VkImageCreateInfo* in_struct);
+    safe_VkImageCreateInfo(const safe_VkImageCreateInfo& src);
+    safe_VkImageCreateInfo& operator=(const safe_VkImageCreateInfo& src);
+    safe_VkImageCreateInfo();
+    ~safe_VkImageCreateInfo();
+    void initialize(const VkImageCreateInfo* in_struct);
+    void initialize(const safe_VkImageCreateInfo* src);
+    VkImageCreateInfo *ptr() { return reinterpret_cast<VkImageCreateInfo *>(this); }
+    VkImageCreateInfo const *ptr() const { return reinterpret_cast<VkImageCreateInfo const *>(this); }
+};
+
+struct safe_VkImageViewCreateInfo {
+    VkStructureType sType;
+    const void* pNext;
+    VkImageViewCreateFlags flags;
+    VkImage image;
+    VkImageViewType viewType;
+    VkFormat format;
+    VkComponentMapping components;
+    VkImageSubresourceRange subresourceRange;
+    safe_VkImageViewCreateInfo(const VkImageViewCreateInfo* in_struct);
+    safe_VkImageViewCreateInfo(const safe_VkImageViewCreateInfo& src);
+    safe_VkImageViewCreateInfo& operator=(const safe_VkImageViewCreateInfo& src);
+    safe_VkImageViewCreateInfo();
+    ~safe_VkImageViewCreateInfo();
+    void initialize(const VkImageViewCreateInfo* in_struct);
+    void initialize(const safe_VkImageViewCreateInfo* src);
+    VkImageViewCreateInfo *ptr() { return reinterpret_cast<VkImageViewCreateInfo *>(this); }
+    VkImageViewCreateInfo const *ptr() const { return reinterpret_cast<VkImageViewCreateInfo const *>(this); }
+};
+
+struct safe_VkShaderModuleCreateInfo {
+    VkStructureType sType;
+    const void* pNext;
+    VkShaderModuleCreateFlags flags;
+    size_t codeSize;
+    const uint32_t* pCode;
+    safe_VkShaderModuleCreateInfo(const VkShaderModuleCreateInfo* in_struct);
+    safe_VkShaderModuleCreateInfo(const safe_VkShaderModuleCreateInfo& src);
+    safe_VkShaderModuleCreateInfo& operator=(const safe_VkShaderModuleCreateInfo& src);
+    safe_VkShaderModuleCreateInfo();
+    ~safe_VkShaderModuleCreateInfo();
+    void initialize(const VkShaderModuleCreateInfo* in_struct);
+    void initialize(const safe_VkShaderModuleCreateInfo* src);
+    VkShaderModuleCreateInfo *ptr() { return reinterpret_cast<VkShaderModuleCreateInfo *>(this); }
+    VkShaderModuleCreateInfo const *ptr() const { return reinterpret_cast<VkShaderModuleCreateInfo const *>(this); }
+};
+
+struct safe_VkPipelineCacheCreateInfo {
+    VkStructureType sType;
+    const void* pNext;
+    VkPipelineCacheCreateFlags flags;
+    size_t initialDataSize;
+    const void* pInitialData;
+    safe_VkPipelineCacheCreateInfo(const VkPipelineCacheCreateInfo* in_struct);
+    safe_VkPipelineCacheCreateInfo(const safe_VkPipelineCacheCreateInfo& src);
+    safe_VkPipelineCacheCreateInfo& operator=(const safe_VkPipelineCacheCreateInfo& src);
+    safe_VkPipelineCacheCreateInfo();
+    ~safe_VkPipelineCacheCreateInfo();
+    void initialize(const VkPipelineCacheCreateInfo* in_struct);
+    void initialize(const safe_VkPipelineCacheCreateInfo* src);
+    VkPipelineCacheCreateInfo *ptr() { return reinterpret_cast<VkPipelineCacheCreateInfo *>(this); }
+    VkPipelineCacheCreateInfo const *ptr() const { return reinterpret_cast<VkPipelineCacheCreateInfo const *>(this); }
+};
+
+struct safe_VkSpecializationInfo {
+    uint32_t mapEntryCount;
+    const VkSpecializationMapEntry* pMapEntries;
+    size_t dataSize;
+    const void* pData;
+    safe_VkSpecializationInfo(const VkSpecializationInfo* in_struct);
+    safe_VkSpecializationInfo(const safe_VkSpecializationInfo& src);
+    safe_VkSpecializationInfo& operator=(const safe_VkSpecializationInfo& src);
+    safe_VkSpecializationInfo();
+    ~safe_VkSpecializationInfo();
+    void initialize(const VkSpecializationInfo* in_struct);
+    void initialize(const safe_VkSpecializationInfo* src);
+    VkSpecializationInfo *ptr() { return reinterpret_cast<VkSpecializationInfo *>(this); }
+    VkSpecializationInfo const *ptr() const { return reinterpret_cast<VkSpecializationInfo const *>(this); }
+};
+
+struct safe_VkPipelineShaderStageCreateInfo {
+    VkStructureType sType;
+    const void* pNext;
+    VkPipelineShaderStageCreateFlags flags;
+    VkShaderStageFlagBits stage;
+    VkShaderModule module;
+    const char* pName;
+    safe_VkSpecializationInfo* pSpecializationInfo;
+    safe_VkPipelineShaderStageCreateInfo(const VkPipelineShaderStageCreateInfo* in_struct);
+    safe_VkPipelineShaderStageCreateInfo(const safe_VkPipelineShaderStageCreateInfo& src);
+    safe_VkPipelineShaderStageCreateInfo& operator=(const safe_VkPipelineShaderStageCreateInfo& src);
+    safe_VkPipelineShaderStageCreateInfo();
+    ~safe_VkPipelineShaderStageCreateInfo();
+    void initialize(const VkPipelineShaderStageCreateInfo* in_struct);
+    void initialize(const safe_VkPipelineShaderStageCreateInfo* src);
+    VkPipelineShaderStageCreateInfo *ptr() { return reinterpret_cast<VkPipelineShaderStageCreateInfo *>(this); }
+    VkPipelineShaderStageCreateInfo const *ptr() const { return reinterpret_cast<VkPipelineShaderStageCreateInfo const *>(this); }
+};
+
+struct safe_VkPipelineVertexInputStateCreateInfo {
+    VkStructureType sType;
+    const void* pNext;
+    VkPipelineVertexInputStateCreateFlags flags;
+    uint32_t vertexBindingDescriptionCount;
+    const VkVertexInputBindingDescription* pVertexBindingDescriptions;
+    uint32_t vertexAttributeDescriptionCount;
+    const VkVertexInputAttributeDescription* pVertexAttributeDescriptions;
+    safe_VkPipelineVertexInputStateCreateInfo(const VkPipelineVertexInputStateCreateInfo* in_struct);
+    safe_VkPipelineVertexInputStateCreateInfo(const safe_VkPipelineVertexInputStateCreateInfo& src);
+    safe_VkPipelineVertexInputStateCreateInfo& operator=(const safe_VkPipelineVertexInputStateCreateInfo& src);
+    safe_VkPipelineVertexInputStateCreateInfo();
+    ~safe_VkPipelineVertexInputStateCreateInfo();
+    void initialize(const VkPipelineVertexInputStateCreateInfo* in_struct);
+    void initialize(const safe_VkPipelineVertexInputStateCreateInfo* src);
+    VkPipelineVertexInputStateCreateInfo *ptr() { return reinterpret_cast<VkPipelineVertexInputStateCreateInfo *>(this); }
+    VkPipelineVertexInputStateCreateInfo const *ptr() const { return reinterpret_cast<VkPipelineVertexInputStateCreateInfo const *>(this); }
+};
+
+struct safe_VkPipelineInputAssemblyStateCreateInfo {
+    VkStructureType sType;
+    const void* pNext;
+    VkPipelineInputAssemblyStateCreateFlags flags;
+    VkPrimitiveTopology topology;
+    VkBool32 primitiveRestartEnable;
+    safe_VkPipelineInputAssemblyStateCreateInfo(const VkPipelineInputAssemblyStateCreateInfo* in_struct);
+    safe_VkPipelineInputAssemblyStateCreateInfo(const safe_VkPipelineInputAssemblyStateCreateInfo& src);
+    safe_VkPipelineInputAssemblyStateCreateInfo& operator=(const safe_VkPipelineInputAssemblyStateCreateInfo& src);
+    safe_VkPipelineInputAssemblyStateCreateInfo();
+    ~safe_VkPipelineInputAssemblyStateCreateInfo();
+    void initialize(const VkPipelineInputAssemblyStateCreateInfo* in_struct);
+    void initialize(const safe_VkPipelineInputAssemblyStateCreateInfo* src);
+    VkPipelineInputAssemblyStateCreateInfo *ptr() { return reinterpret_cast<VkPipelineInputAssemblyStateCreateInfo *>(this); }
+    VkPipelineInputAssemblyStateCreateInfo const *ptr() const { return reinterpret_cast<VkPipelineInputAssemblyStateCreateInfo const *>(this); }
+};
+
+struct safe_VkPipelineTessellationStateCreateInfo {
+    VkStructureType sType;
+    const void* pNext;
+    VkPipelineTessellationStateCreateFlags flags;
+    uint32_t patchControlPoints;
+    safe_VkPipelineTessellationStateCreateInfo(const VkPipelineTessellationStateCreateInfo* in_struct);
+    safe_VkPipelineTessellationStateCreateInfo(const safe_VkPipelineTessellationStateCreateInfo& src);
+    safe_VkPipelineTessellationStateCreateInfo& operator=(const safe_VkPipelineTessellationStateCreateInfo& src);
+    safe_VkPipelineTessellationStateCreateInfo();
+    ~safe_VkPipelineTessellationStateCreateInfo();
+    void initialize(const VkPipelineTessellationStateCreateInfo* in_struct);
+    void initialize(const safe_VkPipelineTessellationStateCreateInfo* src);
+    VkPipelineTessellationStateCreateInfo *ptr() { return reinterpret_cast<VkPipelineTessellationStateCreateInfo *>(this); }
+    VkPipelineTessellationStateCreateInfo const *ptr() const { return reinterpret_cast<VkPipelineTessellationStateCreateInfo const *>(this); }
+};
+
+struct safe_VkPipelineViewportStateCreateInfo {
+    VkStructureType sType;
+    const void* pNext;
+    VkPipelineViewportStateCreateFlags flags;
+    uint32_t viewportCount;
+    const VkViewport* pViewports;
+    uint32_t scissorCount;
+    const VkRect2D* pScissors;
+    safe_VkPipelineViewportStateCreateInfo(const VkPipelineViewportStateCreateInfo* in_struct, const bool is_dynamic_viewports, const bool is_dynamic_scissors);
+    safe_VkPipelineViewportStateCreateInfo(const safe_VkPipelineViewportStateCreateInfo& src);
+    safe_VkPipelineViewportStateCreateInfo& operator=(const safe_VkPipelineViewportStateCreateInfo& src);
+    safe_VkPipelineViewportStateCreateInfo();
+    ~safe_VkPipelineViewportStateCreateInfo();
+    void initialize(const VkPipelineViewportStateCreateInfo* in_struct, const bool is_dynamic_viewports, const bool is_dynamic_scissors);
+    void initialize(const safe_VkPipelineViewportStateCreateInfo* src);
+    VkPipelineViewportStateCreateInfo *ptr() { return reinterpret_cast<VkPipelineViewportStateCreateInfo *>(this); }
+    VkPipelineViewportStateCreateInfo const *ptr() const { return reinterpret_cast<VkPipelineViewportStateCreateInfo const *>(this); }
+};
+
+struct safe_VkPipelineRasterizationStateCreateInfo {
+    VkStructureType sType;
+    const void* pNext;
+    VkPipelineRasterizationStateCreateFlags flags;
+    VkBool32 depthClampEnable;
+    VkBool32 rasterizerDiscardEnable;
+    VkPolygonMode polygonMode;
+    VkCullModeFlags cullMode;
+    VkFrontFace frontFace;
+    VkBool32 depthBiasEnable;
+    float depthBiasConstantFactor;
+    float depthBiasClamp;
+    float depthBiasSlopeFactor;
+    float lineWidth;
+    safe_VkPipelineRasterizationStateCreateInfo(const VkPipelineRasterizationStateCreateInfo* in_struct);
+    safe_VkPipelineRasterizationStateCreateInfo(const safe_VkPipelineRasterizationStateCreateInfo& src);
+    safe_VkPipelineRasterizationStateCreateInfo& operator=(const safe_VkPipelineRasterizationStateCreateInfo& src);
+    safe_VkPipelineRasterizationStateCreateInfo();
+    ~safe_VkPipelineRasterizationStateCreateInfo();
+    void initialize(const VkPipelineRasterizationStateCreateInfo* in_struct);
+    void initialize(const safe_VkPipelineRasterizationStateCreateInfo* src);
+    VkPipelineRasterizationStateCreateInfo *ptr() { return reinterpret_cast<VkPipelineRasterizationStateCreateInfo *>(this); }
+    VkPipelineRasterizationStateCreateInfo const *ptr() const { return reinterpret_cast<VkPipelineRasterizationStateCreateInfo const *>(this); }
+};
+
+struct safe_VkPipelineMultisampleStateCreateInfo {
+    VkStructureType sType;
+    const void* pNext;
+    VkPipelineMultisampleStateCreateFlags flags;
+    VkSampleCountFlagBits rasterizationSamples;
+    VkBool32 sampleShadingEnable;
+    float minSampleShading;
+    const VkSampleMask* pSampleMask;
+    VkBool32 alphaToCoverageEnable;
+    VkBool32 alphaToOneEnable;
+    safe_VkPipelineMultisampleStateCreateInfo(const VkPipelineMultisampleStateCreateInfo* in_struct);
+    safe_VkPipelineMultisampleStateCreateInfo(const safe_VkPipelineMultisampleStateCreateInfo& src);
+    safe_VkPipelineMultisampleStateCreateInfo& operator=(const safe_VkPipelineMultisampleStateCreateInfo& src);
+    safe_VkPipelineMultisampleStateCreateInfo();
+    ~safe_VkPipelineMultisampleStateCreateInfo();
+    void initialize(const VkPipelineMultisampleStateCreateInfo* in_struct);
+    void initialize(const safe_VkPipelineMultisampleStateCreateInfo* src);
+    VkPipelineMultisampleStateCreateInfo *ptr() { return reinterpret_cast<VkPipelineMultisampleStateCreateInfo *>(this); }
+    VkPipelineMultisampleStateCreateInfo const *ptr() const { return reinterpret_cast<VkPipelineMultisampleStateCreateInfo const *>(this); }
+};
+
+struct safe_VkPipelineDepthStencilStateCreateInfo {
+    VkStructureType sType;
+    const void* pNext;
+    VkPipelineDepthStencilStateCreateFlags flags;
+    VkBool32 depthTestEnable;
+    VkBool32 depthWriteEnable;
+    VkCompareOp depthCompareOp;
+    VkBool32 depthBoundsTestEnable;
+    VkBool32 stencilTestEnable;
+    VkStencilOpState front;
+    VkStencilOpState back;
+    float minDepthBounds;
+    float maxDepthBounds;
+    safe_VkPipelineDepthStencilStateCreateInfo(const VkPipelineDepthStencilStateCreateInfo* in_struct);
+    safe_VkPipelineDepthStencilStateCreateInfo(const safe_VkPipelineDepthStencilStateCreateInfo& src);
+    safe_VkPipelineDepthStencilStateCreateInfo& operator=(const safe_VkPipelineDepthStencilStateCreateInfo& src);
+    safe_VkPipelineDepthStencilStateCreateInfo();
+    ~safe_VkPipelineDepthStencilStateCreateInfo();
+    void initialize(const VkPipelineDepthStencilStateCreateInfo* in_struct);
+    void initialize(const safe_VkPipelineDepthStencilStateCreateInfo* src);
+    VkPipelineDepthStencilStateCreateInfo *ptr() { return reinterpret_cast<VkPipelineDepthStencilStateCreateInfo *>(this); }
+    VkPipelineDepthStencilStateCreateInfo const *ptr() const { return reinterpret_cast<VkPipelineDepthStencilStateCreateInfo const *>(this); }
+};
+
+struct safe_VkPipelineColorBlendStateCreateInfo {
+    VkStructureType sType;
+    const void* pNext;
+    VkPipelineColorBlendStateCreateFlags flags;
+    VkBool32 logicOpEnable;
+    VkLogicOp logicOp;
+    uint32_t attachmentCount;
+    const VkPipelineColorBlendAttachmentState* pAttachments;
+    float blendConstants[4];
+    safe_VkPipelineColorBlendStateCreateInfo(const VkPipelineColorBlendStateCreateInfo* in_struct);
+    safe_VkPipelineColorBlendStateCreateInfo(const safe_VkPipelineColorBlendStateCreateInfo& src);
+    safe_VkPipelineColorBlendStateCreateInfo& operator=(const safe_VkPipelineColorBlendStateCreateInfo& src);
+    safe_VkPipelineColorBlendStateCreateInfo();
+    ~safe_VkPipelineColorBlendStateCreateInfo();
+    void initialize(const VkPipelineColorBlendStateCreateInfo* in_struct);
+    void initialize(const safe_VkPipelineColorBlendStateCreateInfo* src);
+    VkPipelineColorBlendStateCreateInfo *ptr() { return reinterpret_cast<VkPipelineColorBlendStateCreateInfo *>(this); }
+    VkPipelineColorBlendStateCreateInfo const *ptr() const { return reinterpret_cast<VkPipelineColorBlendStateCreateInfo const *>(this); }
+};
+
+struct safe_VkPipelineDynamicStateCreateInfo {
+    VkStructureType sType;
+    const void* pNext;
+    VkPipelineDynamicStateCreateFlags flags;
+    uint32_t dynamicStateCount;
+    const VkDynamicState* pDynamicStates;
+    safe_VkPipelineDynamicStateCreateInfo(const VkPipelineDynamicStateCreateInfo* in_struct);
+    safe_VkPipelineDynamicStateCreateInfo(const safe_VkPipelineDynamicStateCreateInfo& src);
+    safe_VkPipelineDynamicStateCreateInfo& operator=(const safe_VkPipelineDynamicStateCreateInfo& src);
+    safe_VkPipelineDynamicStateCreateInfo();
+    ~safe_VkPipelineDynamicStateCreateInfo();
+    void initialize(const VkPipelineDynamicStateCreateInfo* in_struct);
+    void initialize(const safe_VkPipelineDynamicStateCreateInfo* src);
+    VkPipelineDynamicStateCreateInfo *ptr() { return reinterpret_cast<VkPipelineDynamicStateCreateInfo *>(this); }
+    VkPipelineDynamicStateCreateInfo const *ptr() const { return reinterpret_cast<VkPipelineDynamicStateCreateInfo const *>(this); }
+};
+
+struct safe_VkGraphicsPipelineCreateInfo {
+    VkStructureType sType;
+    const void* pNext;
+    VkPipelineCreateFlags flags;
+    uint32_t stageCount;
+    safe_VkPipelineShaderStageCreateInfo* pStages;
+    safe_VkPipelineVertexInputStateCreateInfo* pVertexInputState;
+    safe_VkPipelineInputAssemblyStateCreateInfo* pInputAssemblyState;
+    safe_VkPipelineTessellationStateCreateInfo* pTessellationState;
+    safe_VkPipelineViewportStateCreateInfo* pViewportState;
+    safe_VkPipelineRasterizationStateCreateInfo* pRasterizationState;
+    safe_VkPipelineMultisampleStateCreateInfo* pMultisampleState;
+    safe_VkPipelineDepthStencilStateCreateInfo* pDepthStencilState;
+    safe_VkPipelineColorBlendStateCreateInfo* pColorBlendState;
+    safe_VkPipelineDynamicStateCreateInfo* pDynamicState;
+    VkPipelineLayout layout;
+    VkRenderPass renderPass;
+    uint32_t subpass;
+    VkPipeline basePipelineHandle;
+    int32_t basePipelineIndex;
+    safe_VkGraphicsPipelineCreateInfo(const VkGraphicsPipelineCreateInfo* in_struct, const bool uses_color_attachment, const bool uses_depthstencil_attachment);
+    safe_VkGraphicsPipelineCreateInfo(const safe_VkGraphicsPipelineCreateInfo& src);
+    safe_VkGraphicsPipelineCreateInfo& operator=(const safe_VkGraphicsPipelineCreateInfo& src);
+    safe_VkGraphicsPipelineCreateInfo();
+    ~safe_VkGraphicsPipelineCreateInfo();
+    void initialize(const VkGraphicsPipelineCreateInfo* in_struct, const bool uses_color_attachment, const bool uses_depthstencil_attachment);
+    void initialize(const safe_VkGraphicsPipelineCreateInfo* src);
+    VkGraphicsPipelineCreateInfo *ptr() { return reinterpret_cast<VkGraphicsPipelineCreateInfo *>(this); }
+    VkGraphicsPipelineCreateInfo const *ptr() const { return reinterpret_cast<VkGraphicsPipelineCreateInfo const *>(this); }
+};
+
+struct safe_VkComputePipelineCreateInfo {
+    VkStructureType sType;
+    const void* pNext;
+    VkPipelineCreateFlags flags;
+    safe_VkPipelineShaderStageCreateInfo stage;
+    VkPipelineLayout layout;
+    VkPipeline basePipelineHandle;
+    int32_t basePipelineIndex;
+    safe_VkComputePipelineCreateInfo(const VkComputePipelineCreateInfo* in_struct);
+    safe_VkComputePipelineCreateInfo(const safe_VkComputePipelineCreateInfo& src);
+    safe_VkComputePipelineCreateInfo& operator=(const safe_VkComputePipelineCreateInfo& src);
+    safe_VkComputePipelineCreateInfo();
+    ~safe_VkComputePipelineCreateInfo();
+    void initialize(const VkComputePipelineCreateInfo* in_struct);
+    void initialize(const safe_VkComputePipelineCreateInfo* src);
+    VkComputePipelineCreateInfo *ptr() { return reinterpret_cast<VkComputePipelineCreateInfo *>(this); }
+    VkComputePipelineCreateInfo const *ptr() const { return reinterpret_cast<VkComputePipelineCreateInfo const *>(this); }
+};
+
+struct safe_VkPipelineLayoutCreateInfo {
+    VkStructureType sType;
+    const void* pNext;
+    VkPipelineLayoutCreateFlags flags;
+    uint32_t setLayoutCount;
+    VkDescriptorSetLayout* pSetLayouts;
+    uint32_t pushConstantRangeCount;
+    const VkPushConstantRange* pPushConstantRanges;
+    safe_VkPipelineLayoutCreateInfo(const VkPipelineLayoutCreateInfo* in_struct);
+    safe_VkPipelineLayoutCreateInfo(const safe_VkPipelineLayoutCreateInfo& src);
+    safe_VkPipelineLayoutCreateInfo& operator=(const safe_VkPipelineLayoutCreateInfo& src);
+    safe_VkPipelineLayoutCreateInfo();
+    ~safe_VkPipelineLayoutCreateInfo();
+    void initialize(const VkPipelineLayoutCreateInfo* in_struct);
+    void initialize(const safe_VkPipelineLayoutCreateInfo* src);
+    VkPipelineLayoutCreateInfo *ptr() { return reinterpret_cast<VkPipelineLayoutCreateInfo *>(this); }
+    VkPipelineLayoutCreateInfo const *ptr() const { return reinterpret_cast<VkPipelineLayoutCreateInfo const *>(this); }
+};
+
+struct safe_VkSamplerCreateInfo {
+    VkStructureType sType;
+    const void* pNext;
+    VkSamplerCreateFlags flags;
+    VkFilter magFilter;
+    VkFilter minFilter;
+    VkSamplerMipmapMode mipmapMode;
+    VkSamplerAddressMode addressModeU;
+    VkSamplerAddressMode addressModeV;
+    VkSamplerAddressMode addressModeW;
+    float mipLodBias;
+    VkBool32 anisotropyEnable;
+    float maxAnisotropy;
+    VkBool32 compareEnable;
+    VkCompareOp compareOp;
+    float minLod;
+    float maxLod;
+    VkBorderColor borderColor;
+    VkBool32 unnormalizedCoordinates;
+    safe_VkSamplerCreateInfo(const VkSamplerCreateInfo* in_struct);
+    safe_VkSamplerCreateInfo(const safe_VkSamplerCreateInfo& src);
+    safe_VkSamplerCreateInfo& operator=(const safe_VkSamplerCreateInfo& src);
+    safe_VkSamplerCreateInfo();
+    ~safe_VkSamplerCreateInfo();
+    void initialize(const VkSamplerCreateInfo* in_struct);
+    void initialize(const safe_VkSamplerCreateInfo* src);
+    VkSamplerCreateInfo *ptr() { return reinterpret_cast<VkSamplerCreateInfo *>(this); }
+    VkSamplerCreateInfo const *ptr() const { return reinterpret_cast<VkSamplerCreateInfo const *>(this); }
+};
+
+struct safe_VkDescriptorSetLayoutBinding {
+    uint32_t binding;
+    VkDescriptorType descriptorType;
+    uint32_t descriptorCount;
+    VkShaderStageFlags stageFlags;
+    VkSampler* pImmutableSamplers;
+    safe_VkDescriptorSetLayoutBinding(const VkDescriptorSetLayoutBinding* in_struct);
+    safe_VkDescriptorSetLayoutBinding(const safe_VkDescriptorSetLayoutBinding& src);
+    safe_VkDescriptorSetLayoutBinding& operator=(const safe_VkDescriptorSetLayoutBinding& src);
+    safe_VkDescriptorSetLayoutBinding();
+    ~safe_VkDescriptorSetLayoutBinding();
+    void initialize(const VkDescriptorSetLayoutBinding* in_struct);
+    void initialize(const safe_VkDescriptorSetLayoutBinding* src);
+    VkDescriptorSetLayoutBinding *ptr() { return reinterpret_cast<VkDescriptorSetLayoutBinding *>(this); }
+    VkDescriptorSetLayoutBinding const *ptr() const { return reinterpret_cast<VkDescriptorSetLayoutBinding const *>(this); }
+};
+
+struct safe_VkDescriptorSetLayoutCreateInfo {
+    VkStructureType sType;
+    const void* pNext;
+    VkDescriptorSetLayoutCreateFlags flags;
+    uint32_t bindingCount;
+    safe_VkDescriptorSetLayoutBinding* pBindings;
+    safe_VkDescriptorSetLayoutCreateInfo(const VkDescriptorSetLayoutCreateInfo* in_struct);
+    safe_VkDescriptorSetLayoutCreateInfo(const safe_VkDescriptorSetLayoutCreateInfo& src);
+    safe_VkDescriptorSetLayoutCreateInfo& operator=(const safe_VkDescriptorSetLayoutCreateInfo& src);
+    safe_VkDescriptorSetLayoutCreateInfo();
+    ~safe_VkDescriptorSetLayoutCreateInfo();
+    void initialize(const VkDescriptorSetLayoutCreateInfo* in_struct);
+    void initialize(const safe_VkDescriptorSetLayoutCreateInfo* src);
+    VkDescriptorSetLayoutCreateInfo *ptr() { return reinterpret_cast<VkDescriptorSetLayoutCreateInfo *>(this); }
+    VkDescriptorSetLayoutCreateInfo const *ptr() const { return reinterpret_cast<VkDescriptorSetLayoutCreateInfo const *>(this); }
+};
+
+struct safe_VkDescriptorPoolCreateInfo {
+    VkStructureType sType;
+    const void* pNext;
+    VkDescriptorPoolCreateFlags flags;
+    uint32_t maxSets;
+    uint32_t poolSizeCount;
+    const VkDescriptorPoolSize* pPoolSizes;
+    safe_VkDescriptorPoolCreateInfo(const VkDescriptorPoolCreateInfo* in_struct);
+    safe_VkDescriptorPoolCreateInfo(const safe_VkDescriptorPoolCreateInfo& src);
+    safe_VkDescriptorPoolCreateInfo& operator=(const safe_VkDescriptorPoolCreateInfo& src);
+    safe_VkDescriptorPoolCreateInfo();
+    ~safe_VkDescriptorPoolCreateInfo();
+    void initialize(const VkDescriptorPoolCreateInfo* in_struct);
+    void initialize(const safe_VkDescriptorPoolCreateInfo* src);
+    VkDescriptorPoolCreateInfo *ptr() { return reinterpret_cast<VkDescriptorPoolCreateInfo *>(this); }
+    VkDescriptorPoolCreateInfo const *ptr() const { return reinterpret_cast<VkDescriptorPoolCreateInfo const *>(this); }
+};
+
+struct safe_VkDescriptorSetAllocateInfo {
+    VkStructureType sType;
+    const void* pNext;
+    VkDescriptorPool descriptorPool;
+    uint32_t descriptorSetCount;
+    VkDescriptorSetLayout* pSetLayouts;
+    safe_VkDescriptorSetAllocateInfo(const VkDescriptorSetAllocateInfo* in_struct);
+    safe_VkDescriptorSetAllocateInfo(const safe_VkDescriptorSetAllocateInfo& src);
+    safe_VkDescriptorSetAllocateInfo& operator=(const safe_VkDescriptorSetAllocateInfo& src);
+    safe_VkDescriptorSetAllocateInfo();
+    ~safe_VkDescriptorSetAllocateInfo();
+    void initialize(const VkDescriptorSetAllocateInfo* in_struct);
+    void initialize(const safe_VkDescriptorSetAllocateInfo* src);
+    VkDescriptorSetAllocateInfo *ptr() { return reinterpret_cast<VkDescriptorSetAllocateInfo *>(this); }
+    VkDescriptorSetAllocateInfo const *ptr() const { return reinterpret_cast<VkDescriptorSetAllocateInfo const *>(this); }
+};
+
+struct safe_VkWriteDescriptorSet {
+    VkStructureType sType;
+    const void* pNext;
+    VkDescriptorSet dstSet;
+    uint32_t dstBinding;
+    uint32_t dstArrayElement;
+    uint32_t descriptorCount;
+    VkDescriptorType descriptorType;
+    VkDescriptorImageInfo* pImageInfo;
+    VkDescriptorBufferInfo* pBufferInfo;
+    VkBufferView* pTexelBufferView;
+    safe_VkWriteDescriptorSet(const VkWriteDescriptorSet* in_struct);
+    safe_VkWriteDescriptorSet(const safe_VkWriteDescriptorSet& src);
+    safe_VkWriteDescriptorSet& operator=(const safe_VkWriteDescriptorSet& src);
+    safe_VkWriteDescriptorSet();
+    ~safe_VkWriteDescriptorSet();
+    void initialize(const VkWriteDescriptorSet* in_struct);
+    void initialize(const safe_VkWriteDescriptorSet* src);
+    VkWriteDescriptorSet *ptr() { return reinterpret_cast<VkWriteDescriptorSet *>(this); }
+    VkWriteDescriptorSet const *ptr() const { return reinterpret_cast<VkWriteDescriptorSet const *>(this); }
+};
+
+struct safe_VkCopyDescriptorSet {
+    VkStructureType sType;
+    const void* pNext;
+    VkDescriptorSet srcSet;
+    uint32_t srcBinding;
+    uint32_t srcArrayElement;
+    VkDescriptorSet dstSet;
+    uint32_t dstBinding;
+    uint32_t dstArrayElement;
+    uint32_t descriptorCount;
+    safe_VkCopyDescriptorSet(const VkCopyDescriptorSet* in_struct);
+    safe_VkCopyDescriptorSet(const safe_VkCopyDescriptorSet& src);
+    safe_VkCopyDescriptorSet& operator=(const safe_VkCopyDescriptorSet& src);
+    safe_VkCopyDescriptorSet();
+    ~safe_VkCopyDescriptorSet();
+    void initialize(const VkCopyDescriptorSet* in_struct);
+    void initialize(const safe_VkCopyDescriptorSet* src);
+    VkCopyDescriptorSet *ptr() { return reinterpret_cast<VkCopyDescriptorSet *>(this); }
+    VkCopyDescriptorSet const *ptr() const { return reinterpret_cast<VkCopyDescriptorSet const *>(this); }
+};
+
+struct safe_VkFramebufferCreateInfo {
+    VkStructureType sType;
+    const void* pNext;
+    VkFramebufferCreateFlags flags;
+    VkRenderPass renderPass;
+    uint32_t attachmentCount;
+    VkImageView* pAttachments;
+    uint32_t width;
+    uint32_t height;
+    uint32_t layers;
+    safe_VkFramebufferCreateInfo(const VkFramebufferCreateInfo* in_struct);
+    safe_VkFramebufferCreateInfo(const safe_VkFramebufferCreateInfo& src);
+    safe_VkFramebufferCreateInfo& operator=(const safe_VkFramebufferCreateInfo& src);
+    safe_VkFramebufferCreateInfo();
+    ~safe_VkFramebufferCreateInfo();
+    void initialize(const VkFramebufferCreateInfo* in_struct);
+    void initialize(const safe_VkFramebufferCreateInfo* src);
+    VkFramebufferCreateInfo *ptr() { return reinterpret_cast<VkFramebufferCreateInfo *>(this); }
+    VkFramebufferCreateInfo const *ptr() const { return reinterpret_cast<VkFramebufferCreateInfo const *>(this); }
+};
+
+struct safe_VkSubpassDescription {
+    VkSubpassDescriptionFlags flags;
+    VkPipelineBindPoint pipelineBindPoint;
+    uint32_t inputAttachmentCount;
+    const VkAttachmentReference* pInputAttachments;
+    uint32_t colorAttachmentCount;
+    const VkAttachmentReference* pColorAttachments;
+    const VkAttachmentReference* pResolveAttachments;
+    const VkAttachmentReference* pDepthStencilAttachment;
+    uint32_t preserveAttachmentCount;
+    const uint32_t* pPreserveAttachments;
+    safe_VkSubpassDescription(const VkSubpassDescription* in_struct);
+    safe_VkSubpassDescription(const safe_VkSubpassDescription& src);
+    safe_VkSubpassDescription& operator=(const safe_VkSubpassDescription& src);
+    safe_VkSubpassDescription();
+    ~safe_VkSubpassDescription();
+    void initialize(const VkSubpassDescription* in_struct);
+    void initialize(const safe_VkSubpassDescription* src);
+    VkSubpassDescription *ptr() { return reinterpret_cast<VkSubpassDescription *>(this); }
+    VkSubpassDescription const *ptr() const { return reinterpret_cast<VkSubpassDescription const *>(this); }
+};
+
+struct safe_VkRenderPassCreateInfo {
+    VkStructureType sType;
+    const void* pNext;
+    VkRenderPassCreateFlags flags;
+    uint32_t attachmentCount;
+    const VkAttachmentDescription* pAttachments;
+    uint32_t subpassCount;
+    safe_VkSubpassDescription* pSubpasses;
+    uint32_t dependencyCount;
+    const VkSubpassDependency* pDependencies;
+    safe_VkRenderPassCreateInfo(const VkRenderPassCreateInfo* in_struct);
+    safe_VkRenderPassCreateInfo(const safe_VkRenderPassCreateInfo& src);
+    safe_VkRenderPassCreateInfo& operator=(const safe_VkRenderPassCreateInfo& src);
+    safe_VkRenderPassCreateInfo();
+    ~safe_VkRenderPassCreateInfo();
+    void initialize(const VkRenderPassCreateInfo* in_struct);
+    void initialize(const safe_VkRenderPassCreateInfo* src);
+    VkRenderPassCreateInfo *ptr() { return reinterpret_cast<VkRenderPassCreateInfo *>(this); }
+    VkRenderPassCreateInfo const *ptr() const { return reinterpret_cast<VkRenderPassCreateInfo const *>(this); }
+};
+
+struct safe_VkCommandPoolCreateInfo {
+    VkStructureType sType;
+    const void* pNext;
+    VkCommandPoolCreateFlags flags;
+    uint32_t queueFamilyIndex;
+    safe_VkCommandPoolCreateInfo(const VkCommandPoolCreateInfo* in_struct);
+    safe_VkCommandPoolCreateInfo(const safe_VkCommandPoolCreateInfo& src);
+    safe_VkCommandPoolCreateInfo& operator=(const safe_VkCommandPoolCreateInfo& src);
+    safe_VkCommandPoolCreateInfo();
+    ~safe_VkCommandPoolCreateInfo();
+    void initialize(const VkCommandPoolCreateInfo* in_struct);
+    void initialize(const safe_VkCommandPoolCreateInfo* src);
+    VkCommandPoolCreateInfo *ptr() { return reinterpret_cast<VkCommandPoolCreateInfo *>(this); }
+    VkCommandPoolCreateInfo const *ptr() const { return reinterpret_cast<VkCommandPoolCreateInfo const *>(this); }
+};
+
+struct safe_VkCommandBufferAllocateInfo {
+    VkStructureType sType;
+    const void* pNext;
+    VkCommandPool commandPool;
+    VkCommandBufferLevel level;
+    uint32_t commandBufferCount;
+    safe_VkCommandBufferAllocateInfo(const VkCommandBufferAllocateInfo* in_struct);
+    safe_VkCommandBufferAllocateInfo(const safe_VkCommandBufferAllocateInfo& src);
+    safe_VkCommandBufferAllocateInfo& operator=(const safe_VkCommandBufferAllocateInfo& src);
+    safe_VkCommandBufferAllocateInfo();
+    ~safe_VkCommandBufferAllocateInfo();
+    void initialize(const VkCommandBufferAllocateInfo* in_struct);
+    void initialize(const safe_VkCommandBufferAllocateInfo* src);
+    VkCommandBufferAllocateInfo *ptr() { return reinterpret_cast<VkCommandBufferAllocateInfo *>(this); }
+    VkCommandBufferAllocateInfo const *ptr() const { return reinterpret_cast<VkCommandBufferAllocateInfo const *>(this); }
+};
+
+struct safe_VkCommandBufferInheritanceInfo {
+    VkStructureType sType;
+    const void* pNext;
+    VkRenderPass renderPass;
+    uint32_t subpass;
+    VkFramebuffer framebuffer;
+    VkBool32 occlusionQueryEnable;
+    VkQueryControlFlags queryFlags;
+    VkQueryPipelineStatisticFlags pipelineStatistics;
+    safe_VkCommandBufferInheritanceInfo(const VkCommandBufferInheritanceInfo* in_struct);
+    safe_VkCommandBufferInheritanceInfo(const safe_VkCommandBufferInheritanceInfo& src);
+    safe_VkCommandBufferInheritanceInfo& operator=(const safe_VkCommandBufferInheritanceInfo& src);
+    safe_VkCommandBufferInheritanceInfo();
+    ~safe_VkCommandBufferInheritanceInfo();
+    void initialize(const VkCommandBufferInheritanceInfo* in_struct);
+    void initialize(const safe_VkCommandBufferInheritanceInfo* src);
+    VkCommandBufferInheritanceInfo *ptr() { return reinterpret_cast<VkCommandBufferInheritanceInfo *>(this); }
+    VkCommandBufferInheritanceInfo const *ptr() const { return reinterpret_cast<VkCommandBufferInheritanceInfo const *>(this); }
+};
+
+struct safe_VkCommandBufferBeginInfo {
+    VkStructureType sType;
+    const void* pNext;
+    VkCommandBufferUsageFlags flags;
+    safe_VkCommandBufferInheritanceInfo* pInheritanceInfo;
+    safe_VkCommandBufferBeginInfo(const VkCommandBufferBeginInfo* in_struct);
+    safe_VkCommandBufferBeginInfo(const safe_VkCommandBufferBeginInfo& src);
+    safe_VkCommandBufferBeginInfo& operator=(const safe_VkCommandBufferBeginInfo& src);
+    safe_VkCommandBufferBeginInfo();
+    ~safe_VkCommandBufferBeginInfo();
+    void initialize(const VkCommandBufferBeginInfo* in_struct);
+    void initialize(const safe_VkCommandBufferBeginInfo* src);
+    VkCommandBufferBeginInfo *ptr() { return reinterpret_cast<VkCommandBufferBeginInfo *>(this); }
+    VkCommandBufferBeginInfo const *ptr() const { return reinterpret_cast<VkCommandBufferBeginInfo const *>(this); }
+};
+
+struct safe_VkMemoryBarrier {
+    VkStructureType sType;
+    const void* pNext;
+    VkAccessFlags srcAccessMask;
+    VkAccessFlags dstAccessMask;
+    safe_VkMemoryBarrier(const VkMemoryBarrier* in_struct);
+    safe_VkMemoryBarrier(const safe_VkMemoryBarrier& src);
+    safe_VkMemoryBarrier& operator=(const safe_VkMemoryBarrier& src);
+    safe_VkMemoryBarrier();
+    ~safe_VkMemoryBarrier();
+    void initialize(const VkMemoryBarrier* in_struct);
+    void initialize(const safe_VkMemoryBarrier* src);
+    VkMemoryBarrier *ptr() { return reinterpret_cast<VkMemoryBarrier *>(this); }
+    VkMemoryBarrier const *ptr() const { return reinterpret_cast<VkMemoryBarrier const *>(this); }
+};
+
+struct safe_VkBufferMemoryBarrier {
+    VkStructureType sType;
+    const void* pNext;
+    VkAccessFlags srcAccessMask;
+    VkAccessFlags dstAccessMask;
+    uint32_t srcQueueFamilyIndex;
+    uint32_t dstQueueFamilyIndex;
+    VkBuffer buffer;
+    VkDeviceSize offset;
+    VkDeviceSize size;
+    safe_VkBufferMemoryBarrier(const VkBufferMemoryBarrier* in_struct);
+    safe_VkBufferMemoryBarrier(const safe_VkBufferMemoryBarrier& src);
+    safe_VkBufferMemoryBarrier& operator=(const safe_VkBufferMemoryBarrier& src);
+    safe_VkBufferMemoryBarrier();
+    ~safe_VkBufferMemoryBarrier();
+    void initialize(const VkBufferMemoryBarrier* in_struct);
+    void initialize(const safe_VkBufferMemoryBarrier* src);
+    VkBufferMemoryBarrier *ptr() { return reinterpret_cast<VkBufferMemoryBarrier *>(this); }
+    VkBufferMemoryBarrier const *ptr() const { return reinterpret_cast<VkBufferMemoryBarrier const *>(this); }
+};
+
+struct safe_VkImageMemoryBarrier {
+    VkStructureType sType;
+    const void* pNext;
+    VkAccessFlags srcAccessMask;
+    VkAccessFlags dstAccessMask;
+    VkImageLayout oldLayout;
+    VkImageLayout newLayout;
+    uint32_t srcQueueFamilyIndex;
+    uint32_t dstQueueFamilyIndex;
+    VkImage image;
+    VkImageSubresourceRange subresourceRange;
+    safe_VkImageMemoryBarrier(const VkImageMemoryBarrier* in_struct);
+    safe_VkImageMemoryBarrier(const safe_VkImageMemoryBarrier& src);
+    safe_VkImageMemoryBarrier& operator=(const safe_VkImageMemoryBarrier& src);
+    safe_VkImageMemoryBarrier();
+    ~safe_VkImageMemoryBarrier();
+    void initialize(const VkImageMemoryBarrier* in_struct);
+    void initialize(const safe_VkImageMemoryBarrier* src);
+    VkImageMemoryBarrier *ptr() { return reinterpret_cast<VkImageMemoryBarrier *>(this); }
+    VkImageMemoryBarrier const *ptr() const { return reinterpret_cast<VkImageMemoryBarrier const *>(this); }
+};
+
+struct safe_VkRenderPassBeginInfo {
+    VkStructureType sType;
+    const void* pNext;
+    VkRenderPass renderPass;
+    VkFramebuffer framebuffer;
+    VkRect2D renderArea;
+    uint32_t clearValueCount;
+    const VkClearValue* pClearValues;
+    safe_VkRenderPassBeginInfo(const VkRenderPassBeginInfo* in_struct);
+    safe_VkRenderPassBeginInfo(const safe_VkRenderPassBeginInfo& src);
+    safe_VkRenderPassBeginInfo& operator=(const safe_VkRenderPassBeginInfo& src);
+    safe_VkRenderPassBeginInfo();
+    ~safe_VkRenderPassBeginInfo();
+    void initialize(const VkRenderPassBeginInfo* in_struct);
+    void initialize(const safe_VkRenderPassBeginInfo* src);
+    VkRenderPassBeginInfo *ptr() { return reinterpret_cast<VkRenderPassBeginInfo *>(this); }
+    VkRenderPassBeginInfo const *ptr() const { return reinterpret_cast<VkRenderPassBeginInfo const *>(this); }
+};
+
+struct safe_VkPhysicalDeviceSubgroupProperties {
+    VkStructureType sType;
+    void* pNext;
+    uint32_t subgroupSize;
+    VkShaderStageFlags supportedStages;
+    VkSubgroupFeatureFlags supportedOperations;
+    VkBool32 quadOperationsInAllStages;
+    safe_VkPhysicalDeviceSubgroupProperties(const VkPhysicalDeviceSubgroupProperties* in_struct);
+    safe_VkPhysicalDeviceSubgroupProperties(const safe_VkPhysicalDeviceSubgroupProperties& src);
+    safe_VkPhysicalDeviceSubgroupProperties& operator=(const safe_VkPhysicalDeviceSubgroupProperties& src);
+    safe_VkPhysicalDeviceSubgroupProperties();
+    ~safe_VkPhysicalDeviceSubgroupProperties();
+    void initialize(const VkPhysicalDeviceSubgroupProperties* in_struct);
+    void initialize(const safe_VkPhysicalDeviceSubgroupProperties* src);
+    VkPhysicalDeviceSubgroupProperties *ptr() { return reinterpret_cast<VkPhysicalDeviceSubgroupProperties *>(this); }
+    VkPhysicalDeviceSubgroupProperties const *ptr() const { return reinterpret_cast<VkPhysicalDeviceSubgroupProperties const *>(this); }
+};
+
+struct safe_VkBindBufferMemoryInfo {
+    VkStructureType sType;
+    const void* pNext;
+    VkBuffer buffer;
+    VkDeviceMemory memory;
+    VkDeviceSize memoryOffset;
+    safe_VkBindBufferMemoryInfo(const VkBindBufferMemoryInfo* in_struct);
+    safe_VkBindBufferMemoryInfo(const safe_VkBindBufferMemoryInfo& src);
+    safe_VkBindBufferMemoryInfo& operator=(const safe_VkBindBufferMemoryInfo& src);
+    safe_VkBindBufferMemoryInfo();
+    ~safe_VkBindBufferMemoryInfo();
+    void initialize(const VkBindBufferMemoryInfo* in_struct);
+    void initialize(const safe_VkBindBufferMemoryInfo* src);
+    VkBindBufferMemoryInfo *ptr() { return reinterpret_cast<VkBindBufferMemoryInfo *>(this); }
+    VkBindBufferMemoryInfo const *ptr() const { return reinterpret_cast<VkBindBufferMemoryInfo const *>(this); }
+};
+
+struct safe_VkBindImageMemoryInfo {
+    VkStructureType sType;
+    const void* pNext;
+    VkImage image;
+    VkDeviceMemory memory;
+    VkDeviceSize memoryOffset;
+    safe_VkBindImageMemoryInfo(const VkBindImageMemoryInfo* in_struct);
+    safe_VkBindImageMemoryInfo(const safe_VkBindImageMemoryInfo& src);
+    safe_VkBindImageMemoryInfo& operator=(const safe_VkBindImageMemoryInfo& src);
+    safe_VkBindImageMemoryInfo();
+    ~safe_VkBindImageMemoryInfo();
+    void initialize(const VkBindImageMemoryInfo* in_struct);
+    void initialize(const safe_VkBindImageMemoryInfo* src);
+    VkBindImageMemoryInfo *ptr() { return reinterpret_cast<VkBindImageMemoryInfo *>(this); }
+    VkBindImageMemoryInfo const *ptr() const { return reinterpret_cast<VkBindImageMemoryInfo const *>(this); }
+};
+
+struct safe_VkPhysicalDevice16BitStorageFeatures {
+    VkStructureType sType;
+    void* pNext;
+    VkBool32 storageBuffer16BitAccess;
+    VkBool32 uniformAndStorageBuffer16BitAccess;
+    VkBool32 storagePushConstant16;
+    VkBool32 storageInputOutput16;
+    safe_VkPhysicalDevice16BitStorageFeatures(const VkPhysicalDevice16BitStorageFeatures* in_struct);
+    safe_VkPhysicalDevice16BitStorageFeatures(const safe_VkPhysicalDevice16BitStorageFeatures& src);
+    safe_VkPhysicalDevice16BitStorageFeatures& operator=(const safe_VkPhysicalDevice16BitStorageFeatures& src);
+    safe_VkPhysicalDevice16BitStorageFeatures();
+    ~safe_VkPhysicalDevice16BitStorageFeatures();
+    void initialize(const VkPhysicalDevice16BitStorageFeatures* in_struct);
+    void initialize(const safe_VkPhysicalDevice16BitStorageFeatures* src);
+    VkPhysicalDevice16BitStorageFeatures *ptr() { return reinterpret_cast<VkPhysicalDevice16BitStorageFeatures *>(this); }
+    VkPhysicalDevice16BitStorageFeatures const *ptr() const { return reinterpret_cast<VkPhysicalDevice16BitStorageFeatures const *>(this); }
+};
+
+struct safe_VkMemoryDedicatedRequirements {
+    VkStructureType sType;
+    void* pNext;
+    VkBool32 prefersDedicatedAllocation;
+    VkBool32 requiresDedicatedAllocation;
+    safe_VkMemoryDedicatedRequirements(const VkMemoryDedicatedRequirements* in_struct);
+    safe_VkMemoryDedicatedRequirements(const safe_VkMemoryDedicatedRequirements& src);
+    safe_VkMemoryDedicatedRequirements& operator=(const safe_VkMemoryDedicatedRequirements& src);
+    safe_VkMemoryDedicatedRequirements();
+    ~safe_VkMemoryDedicatedRequirements();
+    void initialize(const VkMemoryDedicatedRequirements* in_struct);
+    void initialize(const safe_VkMemoryDedicatedRequirements* src);
+    VkMemoryDedicatedRequirements *ptr() { return reinterpret_cast<VkMemoryDedicatedRequirements *>(this); }
+    VkMemoryDedicatedRequirements const *ptr() const { return reinterpret_cast<VkMemoryDedicatedRequirements const *>(this); }
+};
+
+struct safe_VkMemoryDedicatedAllocateInfo {
+    VkStructureType sType;
+    const void* pNext;
+    VkImage image;
+    VkBuffer buffer;
+    safe_VkMemoryDedicatedAllocateInfo(const VkMemoryDedicatedAllocateInfo* in_struct);
+    safe_VkMemoryDedicatedAllocateInfo(const safe_VkMemoryDedicatedAllocateInfo& src);
+    safe_VkMemoryDedicatedAllocateInfo& operator=(const safe_VkMemoryDedicatedAllocateInfo& src);
+    safe_VkMemoryDedicatedAllocateInfo();
+    ~safe_VkMemoryDedicatedAllocateInfo();
+    void initialize(const VkMemoryDedicatedAllocateInfo* in_struct);
+    void initialize(const safe_VkMemoryDedicatedAllocateInfo* src);
+    VkMemoryDedicatedAllocateInfo *ptr() { return reinterpret_cast<VkMemoryDedicatedAllocateInfo *>(this); }
+    VkMemoryDedicatedAllocateInfo const *ptr() const { return reinterpret_cast<VkMemoryDedicatedAllocateInfo const *>(this); }
+};
+
+struct safe_VkMemoryAllocateFlagsInfo {
+    VkStructureType sType;
+    const void* pNext;
+    VkMemoryAllocateFlags flags;
+    uint32_t deviceMask;
+    safe_VkMemoryAllocateFlagsInfo(const VkMemoryAllocateFlagsInfo* in_struct);
+    safe_VkMemoryAllocateFlagsInfo(const safe_VkMemoryAllocateFlagsInfo& src);
+    safe_VkMemoryAllocateFlagsInfo& operator=(const safe_VkMemoryAllocateFlagsInfo& src);
+    safe_VkMemoryAllocateFlagsInfo();
+    ~safe_VkMemoryAllocateFlagsInfo();
+    void initialize(const VkMemoryAllocateFlagsInfo* in_struct);
+    void initialize(const safe_VkMemoryAllocateFlagsInfo* src);
+    VkMemoryAllocateFlagsInfo *ptr() { return reinterpret_cast<VkMemoryAllocateFlagsInfo *>(this); }
+    VkMemoryAllocateFlagsInfo const *ptr() const { return reinterpret_cast<VkMemoryAllocateFlagsInfo const *>(this); }
+};
+
+struct safe_VkDeviceGroupRenderPassBeginInfo {
+    VkStructureType sType;
+    const void* pNext;
+    uint32_t deviceMask;
+    uint32_t deviceRenderAreaCount;
+    const VkRect2D* pDeviceRenderAreas;
+    safe_VkDeviceGroupRenderPassBeginInfo(const VkDeviceGroupRenderPassBeginInfo* in_struct);
+    safe_VkDeviceGroupRenderPassBeginInfo(const safe_VkDeviceGroupRenderPassBeginInfo& src);
+    safe_VkDeviceGroupRenderPassBeginInfo& operator=(const safe_VkDeviceGroupRenderPassBeginInfo& src);
+    safe_VkDeviceGroupRenderPassBeginInfo();
+    ~safe_VkDeviceGroupRenderPassBeginInfo();
+    void initialize(const VkDeviceGroupRenderPassBeginInfo* in_struct);
+    void initialize(const safe_VkDeviceGroupRenderPassBeginInfo* src);
+    VkDeviceGroupRenderPassBeginInfo *ptr() { return reinterpret_cast<VkDeviceGroupRenderPassBeginInfo *>(this); }
+    VkDeviceGroupRenderPassBeginInfo const *ptr() const { return reinterpret_cast<VkDeviceGroupRenderPassBeginInfo const *>(this); }
+};
+
+struct safe_VkDeviceGroupCommandBufferBeginInfo {
+    VkStructureType sType;
+    const void* pNext;
+    uint32_t deviceMask;
+    safe_VkDeviceGroupCommandBufferBeginInfo(const VkDeviceGroupCommandBufferBeginInfo* in_struct);
+    safe_VkDeviceGroupCommandBufferBeginInfo(const safe_VkDeviceGroupCommandBufferBeginInfo& src);
+    safe_VkDeviceGroupCommandBufferBeginInfo& operator=(const safe_VkDeviceGroupCommandBufferBeginInfo& src);
+    safe_VkDeviceGroupCommandBufferBeginInfo();
+    ~safe_VkDeviceGroupCommandBufferBeginInfo();
+    void initialize(const VkDeviceGroupCommandBufferBeginInfo* in_struct);
+    void initialize(const safe_VkDeviceGroupCommandBufferBeginInfo* src);
+    VkDeviceGroupCommandBufferBeginInfo *ptr() { return reinterpret_cast<VkDeviceGroupCommandBufferBeginInfo *>(this); }
+    VkDeviceGroupCommandBufferBeginInfo const *ptr() const { return reinterpret_cast<VkDeviceGroupCommandBufferBeginInfo const *>(this); }
+};
+
+struct safe_VkDeviceGroupSubmitInfo {
+    VkStructureType sType;
+    const void* pNext;
+    uint32_t waitSemaphoreCount;
+    const uint32_t* pWaitSemaphoreDeviceIndices;
+    uint32_t commandBufferCount;
+    const uint32_t* pCommandBufferDeviceMasks;
+    uint32_t signalSemaphoreCount;
+    const uint32_t* pSignalSemaphoreDeviceIndices;
+    safe_VkDeviceGroupSubmitInfo(const VkDeviceGroupSubmitInfo* in_struct);
+    safe_VkDeviceGroupSubmitInfo(const safe_VkDeviceGroupSubmitInfo& src);
+    safe_VkDeviceGroupSubmitInfo& operator=(const safe_VkDeviceGroupSubmitInfo& src);
+    safe_VkDeviceGroupSubmitInfo();
+    ~safe_VkDeviceGroupSubmitInfo();
+    void initialize(const VkDeviceGroupSubmitInfo* in_struct);
+    void initialize(const safe_VkDeviceGroupSubmitInfo* src);
+    VkDeviceGroupSubmitInfo *ptr() { return reinterpret_cast<VkDeviceGroupSubmitInfo *>(this); }
+    VkDeviceGroupSubmitInfo const *ptr() const { return reinterpret_cast<VkDeviceGroupSubmitInfo const *>(this); }
+};
+
+struct safe_VkDeviceGroupBindSparseInfo {
+    VkStructureType sType;
+    const void* pNext;
+    uint32_t resourceDeviceIndex;
+    uint32_t memoryDeviceIndex;
+    safe_VkDeviceGroupBindSparseInfo(const VkDeviceGroupBindSparseInfo* in_struct);
+    safe_VkDeviceGroupBindSparseInfo(const safe_VkDeviceGroupBindSparseInfo& src);
+    safe_VkDeviceGroupBindSparseInfo& operator=(const safe_VkDeviceGroupBindSparseInfo& src);
+    safe_VkDeviceGroupBindSparseInfo();
+    ~safe_VkDeviceGroupBindSparseInfo();
+    void initialize(const VkDeviceGroupBindSparseInfo* in_struct);
+    void initialize(const safe_VkDeviceGroupBindSparseInfo* src);
+    VkDeviceGroupBindSparseInfo *ptr() { return reinterpret_cast<VkDeviceGroupBindSparseInfo *>(this); }
+    VkDeviceGroupBindSparseInfo const *ptr() const { return reinterpret_cast<VkDeviceGroupBindSparseInfo const *>(this); }
+};
+
+struct safe_VkBindBufferMemoryDeviceGroupInfo {
+    VkStructureType sType;
+    const void* pNext;
+    uint32_t deviceIndexCount;
+    const uint32_t* pDeviceIndices;
+    safe_VkBindBufferMemoryDeviceGroupInfo(const VkBindBufferMemoryDeviceGroupInfo* in_struct);
+    safe_VkBindBufferMemoryDeviceGroupInfo(const safe_VkBindBufferMemoryDeviceGroupInfo& src);
+    safe_VkBindBufferMemoryDeviceGroupInfo& operator=(const safe_VkBindBufferMemoryDeviceGroupInfo& src);
+    safe_VkBindBufferMemoryDeviceGroupInfo();
+    ~safe_VkBindBufferMemoryDeviceGroupInfo();
+    void initialize(const VkBindBufferMemoryDeviceGroupInfo* in_struct);
+    void initialize(const safe_VkBindBufferMemoryDeviceGroupInfo* src);
+    VkBindBufferMemoryDeviceGroupInfo *ptr() { return reinterpret_cast<VkBindBufferMemoryDeviceGroupInfo *>(this); }
+    VkBindBufferMemoryDeviceGroupInfo const *ptr() const { return reinterpret_cast<VkBindBufferMemoryDeviceGroupInfo const *>(this); }
+};
+
+struct safe_VkBindImageMemoryDeviceGroupInfo {
+    VkStructureType sType;
+    const void* pNext;
+    uint32_t deviceIndexCount;
+    const uint32_t* pDeviceIndices;
+    uint32_t splitInstanceBindRegionCount;
+    const VkRect2D* pSplitInstanceBindRegions;
+    safe_VkBindImageMemoryDeviceGroupInfo(const VkBindImageMemoryDeviceGroupInfo* in_struct);
+    safe_VkBindImageMemoryDeviceGroupInfo(const safe_VkBindImageMemoryDeviceGroupInfo& src);
+    safe_VkBindImageMemoryDeviceGroupInfo& operator=(const safe_VkBindImageMemoryDeviceGroupInfo& src);
+    safe_VkBindImageMemoryDeviceGroupInfo();
+    ~safe_VkBindImageMemoryDeviceGroupInfo();
+    void initialize(const VkBindImageMemoryDeviceGroupInfo* in_struct);
+    void initialize(const safe_VkBindImageMemoryDeviceGroupInfo* src);
+    VkBindImageMemoryDeviceGroupInfo *ptr() { return reinterpret_cast<VkBindImageMemoryDeviceGroupInfo *>(this); }
+    VkBindImageMemoryDeviceGroupInfo const *ptr() const { return reinterpret_cast<VkBindImageMemoryDeviceGroupInfo const *>(this); }
+};
+
+struct safe_VkPhysicalDeviceGroupProperties {
+    VkStructureType sType;
+    void* pNext;
+    uint32_t physicalDeviceCount;
+    VkPhysicalDevice physicalDevices[VK_MAX_DEVICE_GROUP_SIZE];
+    VkBool32 subsetAllocation;
+    safe_VkPhysicalDeviceGroupProperties(const VkPhysicalDeviceGroupProperties* in_struct);
+    safe_VkPhysicalDeviceGroupProperties(const safe_VkPhysicalDeviceGroupProperties& src);
+    safe_VkPhysicalDeviceGroupProperties& operator=(const safe_VkPhysicalDeviceGroupProperties& src);
+    safe_VkPhysicalDeviceGroupProperties();
+    ~safe_VkPhysicalDeviceGroupProperties();
+    void initialize(const VkPhysicalDeviceGroupProperties* in_struct);
+    void initialize(const safe_VkPhysicalDeviceGroupProperties* src);
+    VkPhysicalDeviceGroupProperties *ptr() { return reinterpret_cast<VkPhysicalDeviceGroupProperties *>(this); }
+    VkPhysicalDeviceGroupProperties const *ptr() const { return reinterpret_cast<VkPhysicalDeviceGroupProperties const *>(this); }
+};
+
+struct safe_VkDeviceGroupDeviceCreateInfo {
+    VkStructureType sType;
+    const void* pNext;
+    uint32_t physicalDeviceCount;
+    VkPhysicalDevice* pPhysicalDevices;
+    safe_VkDeviceGroupDeviceCreateInfo(const VkDeviceGroupDeviceCreateInfo* in_struct);
+    safe_VkDeviceGroupDeviceCreateInfo(const safe_VkDeviceGroupDeviceCreateInfo& src);
+    safe_VkDeviceGroupDeviceCreateInfo& operator=(const safe_VkDeviceGroupDeviceCreateInfo& src);
+    safe_VkDeviceGroupDeviceCreateInfo();
+    ~safe_VkDeviceGroupDeviceCreateInfo();
+    void initialize(const VkDeviceGroupDeviceCreateInfo* in_struct);
+    void initialize(const safe_VkDeviceGroupDeviceCreateInfo* src);
+    VkDeviceGroupDeviceCreateInfo *ptr() { return reinterpret_cast<VkDeviceGroupDeviceCreateInfo *>(this); }
+    VkDeviceGroupDeviceCreateInfo const *ptr() const { return reinterpret_cast<VkDeviceGroupDeviceCreateInfo const *>(this); }
+};
+
+struct safe_VkBufferMemoryRequirementsInfo2 {
+    VkStructureType sType;
+    const void* pNext;
+    VkBuffer buffer;
+    safe_VkBufferMemoryRequirementsInfo2(const VkBufferMemoryRequirementsInfo2* in_struct);
+    safe_VkBufferMemoryRequirementsInfo2(const safe_VkBufferMemoryRequirementsInfo2& src);
+    safe_VkBufferMemoryRequirementsInfo2& operator=(const safe_VkBufferMemoryRequirementsInfo2& src);
+    safe_VkBufferMemoryRequirementsInfo2();
+    ~safe_VkBufferMemoryRequirementsInfo2();
+    void initialize(const VkBufferMemoryRequirementsInfo2* in_struct);
+    void initialize(const safe_VkBufferMemoryRequirementsInfo2* src);
+    VkBufferMemoryRequirementsInfo2 *ptr() { return reinterpret_cast<VkBufferMemoryRequirementsInfo2 *>(this); }
+    VkBufferMemoryRequirementsInfo2 const *ptr() const { return reinterpret_cast<VkBufferMemoryRequirementsInfo2 const *>(this); }
+};
+
+struct safe_VkImageMemoryRequirementsInfo2 {
+    VkStructureType sType;
+    const void* pNext;
+    VkImage image;
+    safe_VkImageMemoryRequirementsInfo2(const VkImageMemoryRequirementsInfo2* in_struct);
+    safe_VkImageMemoryRequirementsInfo2(const safe_VkImageMemoryRequirementsInfo2& src);
+    safe_VkImageMemoryRequirementsInfo2& operator=(const safe_VkImageMemoryRequirementsInfo2& src);
+    safe_VkImageMemoryRequirementsInfo2();
+    ~safe_VkImageMemoryRequirementsInfo2();
+    void initialize(const VkImageMemoryRequirementsInfo2* in_struct);
+    void initialize(const safe_VkImageMemoryRequirementsInfo2* src);
+    VkImageMemoryRequirementsInfo2 *ptr() { return reinterpret_cast<VkImageMemoryRequirementsInfo2 *>(this); }
+    VkImageMemoryRequirementsInfo2 const *ptr() const { return reinterpret_cast<VkImageMemoryRequirementsInfo2 const *>(this); }
+};
+
+struct safe_VkImageSparseMemoryRequirementsInfo2 {
+    VkStructureType sType;
+    const void* pNext;
+    VkImage image;
+    safe_VkImageSparseMemoryRequirementsInfo2(const VkImageSparseMemoryRequirementsInfo2* in_struct);
+    safe_VkImageSparseMemoryRequirementsInfo2(const safe_VkImageSparseMemoryRequirementsInfo2& src);
+    safe_VkImageSparseMemoryRequirementsInfo2& operator=(const safe_VkImageSparseMemoryRequirementsInfo2& src);
+    safe_VkImageSparseMemoryRequirementsInfo2();
+    ~safe_VkImageSparseMemoryRequirementsInfo2();
+    void initialize(const VkImageSparseMemoryRequirementsInfo2* in_struct);
+    void initialize(const safe_VkImageSparseMemoryRequirementsInfo2* src);
+    VkImageSparseMemoryRequirementsInfo2 *ptr() { return reinterpret_cast<VkImageSparseMemoryRequirementsInfo2 *>(this); }
+    VkImageSparseMemoryRequirementsInfo2 const *ptr() const { return reinterpret_cast<VkImageSparseMemoryRequirementsInfo2 const *>(this); }
+};
+
+struct safe_VkMemoryRequirements2 {
+    VkStructureType sType;
+    void* pNext;
+    VkMemoryRequirements memoryRequirements;
+    safe_VkMemoryRequirements2(const VkMemoryRequirements2* in_struct);
+    safe_VkMemoryRequirements2(const safe_VkMemoryRequirements2& src);
+    safe_VkMemoryRequirements2& operator=(const safe_VkMemoryRequirements2& src);
+    safe_VkMemoryRequirements2();
+    ~safe_VkMemoryRequirements2();
+    void initialize(const VkMemoryRequirements2* in_struct);
+    void initialize(const safe_VkMemoryRequirements2* src);
+    VkMemoryRequirements2 *ptr() { return reinterpret_cast<VkMemoryRequirements2 *>(this); }
+    VkMemoryRequirements2 const *ptr() const { return reinterpret_cast<VkMemoryRequirements2 const *>(this); }
+};
+
+struct safe_VkSparseImageMemoryRequirements2 {
+    VkStructureType sType;
+    void* pNext;
+    VkSparseImageMemoryRequirements memoryRequirements;
+    safe_VkSparseImageMemoryRequirements2(const VkSparseImageMemoryRequirements2* in_struct);
+    safe_VkSparseImageMemoryRequirements2(const safe_VkSparseImageMemoryRequirements2& src);
+    safe_VkSparseImageMemoryRequirements2& operator=(const safe_VkSparseImageMemoryRequirements2& src);
+    safe_VkSparseImageMemoryRequirements2();
+    ~safe_VkSparseImageMemoryRequirements2();
+    void initialize(const VkSparseImageMemoryRequirements2* in_struct);
+    void initialize(const safe_VkSparseImageMemoryRequirements2* src);
+    VkSparseImageMemoryRequirements2 *ptr() { return reinterpret_cast<VkSparseImageMemoryRequirements2 *>(this); }
+    VkSparseImageMemoryRequirements2 const *ptr() const { return reinterpret_cast<VkSparseImageMemoryRequirements2 const *>(this); }
+};
+
+struct safe_VkPhysicalDeviceFeatures2 {
+    VkStructureType sType;
+    void* pNext;
+    VkPhysicalDeviceFeatures features;
+    safe_VkPhysicalDeviceFeatures2(const VkPhysicalDeviceFeatures2* in_struct);
+    safe_VkPhysicalDeviceFeatures2(const safe_VkPhysicalDeviceFeatures2& src);
+    safe_VkPhysicalDeviceFeatures2& operator=(const safe_VkPhysicalDeviceFeatures2& src);
+    safe_VkPhysicalDeviceFeatures2();
+    ~safe_VkPhysicalDeviceFeatures2();
+    void initialize(const VkPhysicalDeviceFeatures2* in_struct);
+    void initialize(const safe_VkPhysicalDeviceFeatures2* src);
+    VkPhysicalDeviceFeatures2 *ptr() { return reinterpret_cast<VkPhysicalDeviceFeatures2 *>(this); }
+    VkPhysicalDeviceFeatures2 const *ptr() const { return reinterpret_cast<VkPhysicalDeviceFeatures2 const *>(this); }
+};
+
+struct safe_VkPhysicalDeviceProperties2 {
+    VkStructureType sType;
+    void* pNext;
+    VkPhysicalDeviceProperties properties;
+    safe_VkPhysicalDeviceProperties2(const VkPhysicalDeviceProperties2* in_struct);
+    safe_VkPhysicalDeviceProperties2(const safe_VkPhysicalDeviceProperties2& src);
+    safe_VkPhysicalDeviceProperties2& operator=(const safe_VkPhysicalDeviceProperties2& src);
+    safe_VkPhysicalDeviceProperties2();
+    ~safe_VkPhysicalDeviceProperties2();
+    void initialize(const VkPhysicalDeviceProperties2* in_struct);
+    void initialize(const safe_VkPhysicalDeviceProperties2* src);
+    VkPhysicalDeviceProperties2 *ptr() { return reinterpret_cast<VkPhysicalDeviceProperties2 *>(this); }
+    VkPhysicalDeviceProperties2 const *ptr() const { return reinterpret_cast<VkPhysicalDeviceProperties2 const *>(this); }
+};
+
+struct safe_VkFormatProperties2 {
+    VkStructureType sType;
+    void* pNext;
+    VkFormatProperties formatProperties;
+    safe_VkFormatProperties2(const VkFormatProperties2* in_struct);
+    safe_VkFormatProperties2(const safe_VkFormatProperties2& src);
+    safe_VkFormatProperties2& operator=(const safe_VkFormatProperties2& src);
+    safe_VkFormatProperties2();
+    ~safe_VkFormatProperties2();
+    void initialize(const VkFormatProperties2* in_struct);
+    void initialize(const safe_VkFormatProperties2* src);
+    VkFormatProperties2 *ptr() { return reinterpret_cast<VkFormatProperties2 *>(this); }
+    VkFormatProperties2 const *ptr() const { return reinterpret_cast<VkFormatProperties2 const *>(this); }
+};
+
+struct safe_VkImageFormatProperties2 {
+    VkStructureType sType;
+    void* pNext;
+    VkImageFormatProperties imageFormatProperties;
+    safe_VkImageFormatProperties2(const VkImageFormatProperties2* in_struct);
+    safe_VkImageFormatProperties2(const safe_VkImageFormatProperties2& src);
+    safe_VkImageFormatProperties2& operator=(const safe_VkImageFormatProperties2& src);
+    safe_VkImageFormatProperties2();
+    ~safe_VkImageFormatProperties2();
+    void initialize(const VkImageFormatProperties2* in_struct);
+    void initialize(const safe_VkImageFormatProperties2* src);
+    VkImageFormatProperties2 *ptr() { return reinterpret_cast<VkImageFormatProperties2 *>(this); }
+    VkImageFormatProperties2 const *ptr() const { return reinterpret_cast<VkImageFormatProperties2 const *>(this); }
+};
+
+struct safe_VkPhysicalDeviceImageFormatInfo2 {
+    VkStructureType sType;
+    const void* pNext;
+    VkFormat format;
+    VkImageType type;
+    VkImageTiling tiling;
+    VkImageUsageFlags usage;
+    VkImageCreateFlags flags;
+    safe_VkPhysicalDeviceImageFormatInfo2(const VkPhysicalDeviceImageFormatInfo2* in_struct);
+    safe_VkPhysicalDeviceImageFormatInfo2(const safe_VkPhysicalDeviceImageFormatInfo2& src);
+    safe_VkPhysicalDeviceImageFormatInfo2& operator=(const safe_VkPhysicalDeviceImageFormatInfo2& src);
+    safe_VkPhysicalDeviceImageFormatInfo2();
+    ~safe_VkPhysicalDeviceImageFormatInfo2();
+    void initialize(const VkPhysicalDeviceImageFormatInfo2* in_struct);
+    void initialize(const safe_VkPhysicalDeviceImageFormatInfo2* src);
+    VkPhysicalDeviceImageFormatInfo2 *ptr() { return reinterpret_cast<VkPhysicalDeviceImageFormatInfo2 *>(this); }
+    VkPhysicalDeviceImageFormatInfo2 const *ptr() const { return reinterpret_cast<VkPhysicalDeviceImageFormatInfo2 const *>(this); }
+};
+
+struct safe_VkQueueFamilyProperties2 {
+    VkStructureType sType;
+    void* pNext;
+    VkQueueFamilyProperties queueFamilyProperties;
+    safe_VkQueueFamilyProperties2(const VkQueueFamilyProperties2* in_struct);
+    safe_VkQueueFamilyProperties2(const safe_VkQueueFamilyProperties2& src);
+    safe_VkQueueFamilyProperties2& operator=(const safe_VkQueueFamilyProperties2& src);
+    safe_VkQueueFamilyProperties2();
+    ~safe_VkQueueFamilyProperties2();
+    void initialize(const VkQueueFamilyProperties2* in_struct);
+    void initialize(const safe_VkQueueFamilyProperties2* src);
+    VkQueueFamilyProperties2 *ptr() { return reinterpret_cast<VkQueueFamilyProperties2 *>(this); }
+    VkQueueFamilyProperties2 const *ptr() const { return reinterpret_cast<VkQueueFamilyProperties2 const *>(this); }
+};
+
+struct safe_VkPhysicalDeviceMemoryProperties2 {
+    VkStructureType sType;
+    void* pNext;
+    VkPhysicalDeviceMemoryProperties memoryProperties;
+    safe_VkPhysicalDeviceMemoryProperties2(const VkPhysicalDeviceMemoryProperties2* in_struct);
+    safe_VkPhysicalDeviceMemoryProperties2(const safe_VkPhysicalDeviceMemoryProperties2& src);
+    safe_VkPhysicalDeviceMemoryProperties2& operator=(const safe_VkPhysicalDeviceMemoryProperties2& src);
+    safe_VkPhysicalDeviceMemoryProperties2();
+    ~safe_VkPhysicalDeviceMemoryProperties2();
+    void initialize(const VkPhysicalDeviceMemoryProperties2* in_struct);
+    void initialize(const safe_VkPhysicalDeviceMemoryProperties2* src);
+    VkPhysicalDeviceMemoryProperties2 *ptr() { return reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>(this); }
+    VkPhysicalDeviceMemoryProperties2 const *ptr() const { return reinterpret_cast<VkPhysicalDeviceMemoryProperties2 const *>(this); }
+};
+
+struct safe_VkSparseImageFormatProperties2 {
+    VkStructureType sType;
+    void* pNext;
+    VkSparseImageFormatProperties properties;
+    safe_VkSparseImageFormatProperties2(const VkSparseImageFormatProperties2* in_struct);
+    safe_VkSparseImageFormatProperties2(const safe_VkSparseImageFormatProperties2& src);
+    safe_VkSparseImageFormatProperties2& operator=(const safe_VkSparseImageFormatProperties2& src);
+    safe_VkSparseImageFormatProperties2();
+    ~safe_VkSparseImageFormatProperties2();
+    void initialize(const VkSparseImageFormatProperties2* in_struct);
+    void initialize(const safe_VkSparseImageFormatProperties2* src);
+    VkSparseImageFormatProperties2 *ptr() { return reinterpret_cast<VkSparseImageFormatProperties2 *>(this); }
+    VkSparseImageFormatProperties2 const *ptr() const { return reinterpret_cast<VkSparseImageFormatProperties2 const *>(this); }
+};
+
+struct safe_VkPhysicalDeviceSparseImageFormatInfo2 {
+    VkStructureType sType;
+    const void* pNext;
+    VkFormat format;
+    VkImageType type;
+    VkSampleCountFlagBits samples;
+    VkImageUsageFlags usage;
+    VkImageTiling tiling;
+    safe_VkPhysicalDeviceSparseImageFormatInfo2(const VkPhysicalDeviceSparseImageFormatInfo2* in_struct);
+    safe_VkPhysicalDeviceSparseImageFormatInfo2(const safe_VkPhysicalDeviceSparseImageFormatInfo2& src);
+    safe_VkPhysicalDeviceSparseImageFormatInfo2& operator=(const safe_VkPhysicalDeviceSparseImageFormatInfo2& src);
+    safe_VkPhysicalDeviceSparseImageFormatInfo2();
+    ~safe_VkPhysicalDeviceSparseImageFormatInfo2();
+    void initialize(const VkPhysicalDeviceSparseImageFormatInfo2* in_struct);
+    void initialize(const safe_VkPhysicalDeviceSparseImageFormatInfo2* src);
+    VkPhysicalDeviceSparseImageFormatInfo2 *ptr() { return reinterpret_cast<VkPhysicalDeviceSparseImageFormatInfo2 *>(this); }
+    VkPhysicalDeviceSparseImageFormatInfo2 const *ptr() const { return reinterpret_cast<VkPhysicalDeviceSparseImageFormatInfo2 const *>(this); }
+};
+
+struct safe_VkPhysicalDevicePointClippingProperties {
+    VkStructureType sType;
+    void* pNext;
+    VkPointClippingBehavior pointClippingBehavior;
+    safe_VkPhysicalDevicePointClippingProperties(const VkPhysicalDevicePointClippingProperties* in_struct);
+    safe_VkPhysicalDevicePointClippingProperties(const safe_VkPhysicalDevicePointClippingProperties& src);
+    safe_VkPhysicalDevicePointClippingProperties& operator=(const safe_VkPhysicalDevicePointClippingProperties& src);
+    safe_VkPhysicalDevicePointClippingProperties();
+    ~safe_VkPhysicalDevicePointClippingProperties();
+    void initialize(const VkPhysicalDevicePointClippingProperties* in_struct);
+    void initialize(const safe_VkPhysicalDevicePointClippingProperties* src);
+    VkPhysicalDevicePointClippingProperties *ptr() { return reinterpret_cast<VkPhysicalDevicePointClippingProperties *>(this); }
+    VkPhysicalDevicePointClippingProperties const *ptr() const { return reinterpret_cast<VkPhysicalDevicePointClippingProperties const *>(this); }
+};
+
+struct safe_VkRenderPassInputAttachmentAspectCreateInfo {
+    VkStructureType sType;
+    const void* pNext;
+    uint32_t aspectReferenceCount;
+    const VkInputAttachmentAspectReference* pAspectReferences;
+    safe_VkRenderPassInputAttachmentAspectCreateInfo(const VkRenderPassInputAttachmentAspectCreateInfo* in_struct);
+    safe_VkRenderPassInputAttachmentAspectCreateInfo(const safe_VkRenderPassInputAttachmentAspectCreateInfo& src);
+    safe_VkRenderPassInputAttachmentAspectCreateInfo& operator=(const safe_VkRenderPassInputAttachmentAspectCreateInfo& src);
+    safe_VkRenderPassInputAttachmentAspectCreateInfo();
+    ~safe_VkRenderPassInputAttachmentAspectCreateInfo();
+    void initialize(const VkRenderPassInputAttachmentAspectCreateInfo* in_struct);
+    void initialize(const safe_VkRenderPassInputAttachmentAspectCreateInfo* src);
+    VkRenderPassInputAttachmentAspectCreateInfo *ptr() { return reinterpret_cast<VkRenderPassInputAttachmentAspectCreateInfo *>(this); }
+    VkRenderPassInputAttachmentAspectCreateInfo const *ptr() const { return reinterpret_cast<VkRenderPassInputAttachmentAspectCreateInfo const *>(this); }
+};
+
+struct safe_VkImageViewUsageCreateInfo {
+    VkStructureType sType;
+    const void* pNext;
+    VkImageUsageFlags usage;
+    safe_VkImageViewUsageCreateInfo(const VkImageViewUsageCreateInfo* in_struct);
+    safe_VkImageViewUsageCreateInfo(const safe_VkImageViewUsageCreateInfo& src);
+    safe_VkImageViewUsageCreateInfo& operator=(const safe_VkImageViewUsageCreateInfo& src);
+    safe_VkImageViewUsageCreateInfo();
+    ~safe_VkImageViewUsageCreateInfo();
+    void initialize(const VkImageViewUsageCreateInfo* in_struct);
+    void initialize(const safe_VkImageViewUsageCreateInfo* src);
+    VkImageViewUsageCreateInfo *ptr() { return reinterpret_cast<VkImageViewUsageCreateInfo *>(this); }
+    VkImageViewUsageCreateInfo const *ptr() const { return reinterpret_cast<VkImageViewUsageCreateInfo const *>(this); }
+};
+
+struct safe_VkPipelineTessellationDomainOriginStateCreateInfo {
+    VkStructureType sType;
+    const void* pNext;
+    VkTessellationDomainOrigin domainOrigin;
+    safe_VkPipelineTessellationDomainOriginStateCreateInfo(const VkPipelineTessellationDomainOriginStateCreateInfo* in_struct);
+    safe_VkPipelineTessellationDomainOriginStateCreateInfo(const safe_VkPipelineTessellationDomainOriginStateCreateInfo& src);
+    safe_VkPipelineTessellationDomainOriginStateCreateInfo& operator=(const safe_VkPipelineTessellationDomainOriginStateCreateInfo& src);
+    safe_VkPipelineTessellationDomainOriginStateCreateInfo();
+    ~safe_VkPipelineTessellationDomainOriginStateCreateInfo();
+    void initialize(const VkPipelineTessellationDomainOriginStateCreateInfo* in_struct);
+    void initialize(const safe_VkPipelineTessellationDomainOriginStateCreateInfo* src);
+    VkPipelineTessellationDomainOriginStateCreateInfo *ptr() { return reinterpret_cast<VkPipelineTessellationDomainOriginStateCreateInfo *>(this); }
+    VkPipelineTessellationDomainOriginStateCreateInfo const *ptr() const { return reinterpret_cast<VkPipelineTessellationDomainOriginStateCreateInfo const *>(this); }
+};
+
+struct safe_VkRenderPassMultiviewCreateInfo {
+    VkStructureType sType;
+    const void* pNext;
+    uint32_t subpassCount;
+    const uint32_t* pViewMasks;
+    uint32_t dependencyCount;
+    const int32_t* pViewOffsets;
+    uint32_t correlationMaskCount;
+    const uint32_t* pCorrelationMasks;
+    safe_VkRenderPassMultiviewCreateInfo(const VkRenderPassMultiviewCreateInfo* in_struct);
+    safe_VkRenderPassMultiviewCreateInfo(const safe_VkRenderPassMultiviewCreateInfo& src);
+    safe_VkRenderPassMultiviewCreateInfo& operator=(const safe_VkRenderPassMultiviewCreateInfo& src);
+    safe_VkRenderPassMultiviewCreateInfo();
+    ~safe_VkRenderPassMultiviewCreateInfo();
+    void initialize(const VkRenderPassMultiviewCreateInfo* in_struct);
+    void initialize(const safe_VkRenderPassMultiviewCreateInfo* src);
+    VkRenderPassMultiviewCreateInfo *ptr() { return reinterpret_cast<VkRenderPassMultiviewCreateInfo *>(this); }
+    VkRenderPassMultiviewCreateInfo const *ptr() const { return reinterpret_cast<VkRenderPassMultiviewCreateInfo const *>(this); }
+};
+
+struct safe_VkPhysicalDeviceMultiviewFeatures {
+    VkStructureType sType;
+    void* pNext;
+    VkBool32 multiview;
+    VkBool32 multiviewGeometryShader;
+    VkBool32 multiviewTessellationShader;
+    safe_VkPhysicalDeviceMultiviewFeatures(const VkPhysicalDeviceMultiviewFeatures* in_struct);
+    safe_VkPhysicalDeviceMultiviewFeatures(const safe_VkPhysicalDeviceMultiviewFeatures& src);
+    safe_VkPhysicalDeviceMultiviewFeatures& operator=(const safe_VkPhysicalDeviceMultiviewFeatures& src);
+    safe_VkPhysicalDeviceMultiviewFeatures();
+    ~safe_VkPhysicalDeviceMultiviewFeatures();
+    void initialize(const VkPhysicalDeviceMultiviewFeatures* in_struct);
+    void initialize(const safe_VkPhysicalDeviceMultiviewFeatures* src);
+    VkPhysicalDeviceMultiviewFeatures *ptr() { return reinterpret_cast<VkPhysicalDeviceMultiviewFeatures *>(this); }
+    VkPhysicalDeviceMultiviewFeatures const *ptr() const { return reinterpret_cast<VkPhysicalDeviceMultiviewFeatures const *>(this); }
+};
+
+struct safe_VkPhysicalDeviceMultiviewProperties {
+    VkStructureType sType;
+    void* pNext;
+    uint32_t maxMultiviewViewCount;
+    uint32_t maxMultiviewInstanceIndex;
+    safe_VkPhysicalDeviceMultiviewProperties(const VkPhysicalDeviceMultiviewProperties* in_struct);
+    safe_VkPhysicalDeviceMultiviewProperties(const safe_VkPhysicalDeviceMultiviewProperties& src);
+    safe_VkPhysicalDeviceMultiviewProperties& operator=(const safe_VkPhysicalDeviceMultiviewProperties& src);
+    safe_VkPhysicalDeviceMultiviewProperties();
+    ~safe_VkPhysicalDeviceMultiviewProperties();
+    void initialize(const VkPhysicalDeviceMultiviewProperties* in_struct);
+    void initialize(const safe_VkPhysicalDeviceMultiviewProperties* src);
+    VkPhysicalDeviceMultiviewProperties *ptr() { return reinterpret_cast<VkPhysicalDeviceMultiviewProperties *>(this); }
+    VkPhysicalDeviceMultiviewProperties const *ptr() const { return reinterpret_cast<VkPhysicalDeviceMultiviewProperties const *>(this); }
+};
+
+struct safe_VkPhysicalDeviceVariablePointersFeatures {
+    VkStructureType sType;
+    void* pNext;
+    VkBool32 variablePointersStorageBuffer;
+    VkBool32 variablePointers;
+    safe_VkPhysicalDeviceVariablePointersFeatures(const VkPhysicalDeviceVariablePointersFeatures* in_struct);
+    safe_VkPhysicalDeviceVariablePointersFeatures(const safe_VkPhysicalDeviceVariablePointersFeatures& src);
+    safe_VkPhysicalDeviceVariablePointersFeatures& operator=(const safe_VkPhysicalDeviceVariablePointersFeatures& src);
+    safe_VkPhysicalDeviceVariablePointersFeatures();
+    ~safe_VkPhysicalDeviceVariablePointersFeatures();
+    void initialize(const VkPhysicalDeviceVariablePointersFeatures* in_struct);
+    void initialize(const safe_VkPhysicalDeviceVariablePointersFeatures* src);
+    VkPhysicalDeviceVariablePointersFeatures *ptr() { return reinterpret_cast<VkPhysicalDeviceVariablePointersFeatures *>(this); }
+    VkPhysicalDeviceVariablePointersFeatures const *ptr() const { return reinterpret_cast<VkPhysicalDeviceVariablePointersFeatures const *>(this); }
+};
+
+struct safe_VkPhysicalDeviceProtectedMemoryFeatures {
+    VkStructureType sType;
+    void* pNext;
+    VkBool32 protectedMemory;
+    safe_VkPhysicalDeviceProtectedMemoryFeatures(const VkPhysicalDeviceProtectedMemoryFeatures* in_struct);
+    safe_VkPhysicalDeviceProtectedMemoryFeatures(const safe_VkPhysicalDeviceProtectedMemoryFeatures& src);
+    safe_VkPhysicalDeviceProtectedMemoryFeatures& operator=(const safe_VkPhysicalDeviceProtectedMemoryFeatures& src);
+    safe_VkPhysicalDeviceProtectedMemoryFeatures();
+    ~safe_VkPhysicalDeviceProtectedMemoryFeatures();
+    void initialize(const VkPhysicalDeviceProtectedMemoryFeatures* in_struct);
+    void initialize(const safe_VkPhysicalDeviceProtectedMemoryFeatures* src);
+    VkPhysicalDeviceProtectedMemoryFeatures *ptr() { return reinterpret_cast<VkPhysicalDeviceProtectedMemoryFeatures *>(this); }
+    VkPhysicalDeviceProtectedMemoryFeatures const *ptr() const { return reinterpret_cast<VkPhysicalDeviceProtectedMemoryFeatures const *>(this); }
+};
+
+struct safe_VkPhysicalDeviceProtectedMemoryProperties {
+    VkStructureType sType;
+    void* pNext;
+    VkBool32 protectedNoFault;
+    safe_VkPhysicalDeviceProtectedMemoryProperties(const VkPhysicalDeviceProtectedMemoryProperties* in_struct);
+    safe_VkPhysicalDeviceProtectedMemoryProperties(const safe_VkPhysicalDeviceProtectedMemoryProperties& src);
+    safe_VkPhysicalDeviceProtectedMemoryProperties& operator=(const safe_VkPhysicalDeviceProtectedMemoryProperties& src);
+    safe_VkPhysicalDeviceProtectedMemoryProperties();
+    ~safe_VkPhysicalDeviceProtectedMemoryProperties();
+    void initialize(const VkPhysicalDeviceProtectedMemoryProperties* in_struct);
+    void initialize(const safe_VkPhysicalDeviceProtectedMemoryProperties* src);
+    VkPhysicalDeviceProtectedMemoryProperties *ptr() { return reinterpret_cast<VkPhysicalDeviceProtectedMemoryProperties *>(this); }
+    VkPhysicalDeviceProtectedMemoryProperties const *ptr() const { return reinterpret_cast<VkPhysicalDeviceProtectedMemoryProperties const *>(this); }
+};
+
+struct safe_VkDeviceQueueInfo2 {
+    VkStructureType sType;
+    const void* pNext;
+    VkDeviceQueueCreateFlags flags;
+    uint32_t queueFamilyIndex;
+    uint32_t queueIndex;
+    safe_VkDeviceQueueInfo2(const VkDeviceQueueInfo2* in_struct);
+    safe_VkDeviceQueueInfo2(const safe_VkDeviceQueueInfo2& src);
+    safe_VkDeviceQueueInfo2& operator=(const safe_VkDeviceQueueInfo2& src);
+    safe_VkDeviceQueueInfo2();
+    ~safe_VkDeviceQueueInfo2();
+    void initialize(const VkDeviceQueueInfo2* in_struct);
+    void initialize(const safe_VkDeviceQueueInfo2* src);
+    VkDeviceQueueInfo2 *ptr() { return reinterpret_cast<VkDeviceQueueInfo2 *>(this); }
+    VkDeviceQueueInfo2 const *ptr() const { return reinterpret_cast<VkDeviceQueueInfo2 const *>(this); }
+};
+
+struct safe_VkProtectedSubmitInfo {
+    VkStructureType sType;
+    const void* pNext;
+    VkBool32 protectedSubmit;
+    safe_VkProtectedSubmitInfo(const VkProtectedSubmitInfo* in_struct);
+    safe_VkProtectedSubmitInfo(const safe_VkProtectedSubmitInfo& src);
+    safe_VkProtectedSubmitInfo& operator=(const safe_VkProtectedSubmitInfo& src);
+    safe_VkProtectedSubmitInfo();
+    ~safe_VkProtectedSubmitInfo();
+    void initialize(const VkProtectedSubmitInfo* in_struct);
+    void initialize(const safe_VkProtectedSubmitInfo* src);
+    VkProtectedSubmitInfo *ptr() { return reinterpret_cast<VkProtectedSubmitInfo *>(this); }
+    VkProtectedSubmitInfo const *ptr() const { return reinterpret_cast<VkProtectedSubmitInfo const *>(this); }
+};
+
+struct safe_VkSamplerYcbcrConversionCreateInfo {
+    VkStructureType sType;
+    const void* pNext;
+    VkFormat format;
+    VkSamplerYcbcrModelConversion ycbcrModel;
+    VkSamplerYcbcrRange ycbcrRange;
+    VkComponentMapping components;
+    VkChromaLocation xChromaOffset;
+    VkChromaLocation yChromaOffset;
+    VkFilter chromaFilter;
+    VkBool32 forceExplicitReconstruction;
+    safe_VkSamplerYcbcrConversionCreateInfo(const VkSamplerYcbcrConversionCreateInfo* in_struct);
+    safe_VkSamplerYcbcrConversionCreateInfo(const safe_VkSamplerYcbcrConversionCreateInfo& src);
+    safe_VkSamplerYcbcrConversionCreateInfo& operator=(const safe_VkSamplerYcbcrConversionCreateInfo& src);
+    safe_VkSamplerYcbcrConversionCreateInfo();
+    ~safe_VkSamplerYcbcrConversionCreateInfo();
+    void initialize(const VkSamplerYcbcrConversionCreateInfo* in_struct);
+    void initialize(const safe_VkSamplerYcbcrConversionCreateInfo* src);
+    VkSamplerYcbcrConversionCreateInfo *ptr() { return reinterpret_cast<VkSamplerYcbcrConversionCreateInfo *>(this); }
+    VkSamplerYcbcrConversionCreateInfo const *ptr() const { return reinterpret_cast<VkSamplerYcbcrConversionCreateInfo const *>(this); }
+};
+
+struct safe_VkSamplerYcbcrConversionInfo {
+    VkStructureType sType;
+    const void* pNext;
+    VkSamplerYcbcrConversion conversion;
+    safe_VkSamplerYcbcrConversionInfo(const VkSamplerYcbcrConversionInfo* in_struct);
+    safe_VkSamplerYcbcrConversionInfo(const safe_VkSamplerYcbcrConversionInfo& src);
+    safe_VkSamplerYcbcrConversionInfo& operator=(const safe_VkSamplerYcbcrConversionInfo& src);
+    safe_VkSamplerYcbcrConversionInfo();
+    ~safe_VkSamplerYcbcrConversionInfo();
+    void initialize(const VkSamplerYcbcrConversionInfo* in_struct);
+    void initialize(const safe_VkSamplerYcbcrConversionInfo* src);
+    VkSamplerYcbcrConversionInfo *ptr() { return reinterpret_cast<VkSamplerYcbcrConversionInfo *>(this); }
+    VkSamplerYcbcrConversionInfo const *ptr() const { return reinterpret_cast<VkSamplerYcbcrConversionInfo const *>(this); }
+};
+
+struct safe_VkBindImagePlaneMemoryInfo {
+    VkStructureType sType;
+    const void* pNext;
+    VkImageAspectFlagBits planeAspect;
+    safe_VkBindImagePlaneMemoryInfo(const VkBindImagePlaneMemoryInfo* in_struct);
+    safe_VkBindImagePlaneMemoryInfo(const safe_VkBindImagePlaneMemoryInfo& src);
+    safe_VkBindImagePlaneMemoryInfo& operator=(const safe_VkBindImagePlaneMemoryInfo& src);
+    safe_VkBindImagePlaneMemoryInfo();
+    ~safe_VkBindImagePlaneMemoryInfo();
+    void initialize(const VkBindImagePlaneMemoryInfo* in_struct);
+    void initialize(const safe_VkBindImagePlaneMemoryInfo* src);
+    VkBindImagePlaneMemoryInfo *ptr() { return reinterpret_cast<VkBindImagePlaneMemoryInfo *>(this); }
+    VkBindImagePlaneMemoryInfo const *ptr() const { return reinterpret_cast<VkBindImagePlaneMemoryInfo const *>(this); }
+};
+
+struct safe_VkImagePlaneMemoryRequirementsInfo {
+    VkStructureType sType;
+    const void* pNext;
+    VkImageAspectFlagBits planeAspect;
+    safe_VkImagePlaneMemoryRequirementsInfo(const VkImagePlaneMemoryRequirementsInfo* in_struct);
+    safe_VkImagePlaneMemoryRequirementsInfo(const safe_VkImagePlaneMemoryRequirementsInfo& src);
+    safe_VkImagePlaneMemoryRequirementsInfo& operator=(const safe_VkImagePlaneMemoryRequirementsInfo& src);
+    safe_VkImagePlaneMemoryRequirementsInfo();
+    ~safe_VkImagePlaneMemoryRequirementsInfo();
+    void initialize(const VkImagePlaneMemoryRequirementsInfo* in_struct);
+    void initialize(const safe_VkImagePlaneMemoryRequirementsInfo* src);
+    VkImagePlaneMemoryRequirementsInfo *ptr() { return reinterpret_cast<VkImagePlaneMemoryRequirementsInfo *>(this); }
+    VkImagePlaneMemoryRequirementsInfo const *ptr() const { return reinterpret_cast<VkImagePlaneMemoryRequirementsInfo const *>(this); }
+};
+
+struct safe_VkPhysicalDeviceSamplerYcbcrConversionFeatures {
+    VkStructureType sType;
+    void* pNext;
+    VkBool32 samplerYcbcrConversion;
+    safe_VkPhysicalDeviceSamplerYcbcrConversionFeatures(const VkPhysicalDeviceSamplerYcbcrConversionFeatures* in_struct);
+    safe_VkPhysicalDeviceSamplerYcbcrConversionFeatures(const safe_VkPhysicalDeviceSamplerYcbcrConversionFeatures& src);
+    safe_VkPhysicalDeviceSamplerYcbcrConversionFeatures& operator=(const safe_VkPhysicalDeviceSamplerYcbcrConversionFeatures& src);
+    safe_VkPhysicalDeviceSamplerYcbcrConversionFeatures();
+    ~safe_VkPhysicalDeviceSamplerYcbcrConversionFeatures();
+    void initialize(const VkPhysicalDeviceSamplerYcbcrConversionFeatures* in_struct);
+    void initialize(const safe_VkPhysicalDeviceSamplerYcbcrConversionFeatures* src);
+    VkPhysicalDeviceSamplerYcbcrConversionFeatures *ptr() { return reinterpret_cast<VkPhysicalDeviceSamplerYcbcrConversionFeatures *>(this); }
+    VkPhysicalDeviceSamplerYcbcrConversionFeatures const *ptr() const { return reinterpret_cast<VkPhysicalDeviceSamplerYcbcrConversionFeatures const *>(this); }
+};
+
+struct safe_VkSamplerYcbcrConversionImageFormatProperties {
+    VkStructureType sType;
+    void* pNext;
+    uint32_t combinedImageSamplerDescriptorCount;
+    safe_VkSamplerYcbcrConversionImageFormatProperties(const VkSamplerYcbcrConversionImageFormatProperties* in_struct);
+    safe_VkSamplerYcbcrConversionImageFormatProperties(const safe_VkSamplerYcbcrConversionImageFormatProperties& src);
+    safe_VkSamplerYcbcrConversionImageFormatProperties& operator=(const safe_VkSamplerYcbcrConversionImageFormatProperties& src);
+    safe_VkSamplerYcbcrConversionImageFormatProperties();
+    ~safe_VkSamplerYcbcrConversionImageFormatProperties();
+    void initialize(const VkSamplerYcbcrConversionImageFormatProperties* in_struct);
+    void initialize(const safe_VkSamplerYcbcrConversionImageFormatProperties* src);
+    VkSamplerYcbcrConversionImageFormatProperties *ptr() { return reinterpret_cast<VkSamplerYcbcrConversionImageFormatProperties *>(this); }
+    VkSamplerYcbcrConversionImageFormatProperties const *ptr() const { return reinterpret_cast<VkSamplerYcbcrConversionImageFormatProperties const *>(this); }
+};
+
+struct safe_VkDescriptorUpdateTemplateCreateInfo {
+    VkStructureType sType;
+    const void* pNext;
+    VkDescriptorUpdateTemplateCreateFlags flags;
+    uint32_t descriptorUpdateEntryCount;
+    const VkDescriptorUpdateTemplateEntry* pDescriptorUpdateEntries;
+    VkDescriptorUpdateTemplateType templateType;
+    VkDescriptorSetLayout descriptorSetLayout;
+    VkPipelineBindPoint pipelineBindPoint;
+    VkPipelineLayout pipelineLayout;
+    uint32_t set;
+    safe_VkDescriptorUpdateTemplateCreateInfo(const VkDescriptorUpdateTemplateCreateInfo* in_struct);
+    safe_VkDescriptorUpdateTemplateCreateInfo(const safe_VkDescriptorUpdateTemplateCreateInfo& src);
+    safe_VkDescriptorUpdateTemplateCreateInfo& operator=(const safe_VkDescriptorUpdateTemplateCreateInfo& src);
+    safe_VkDescriptorUpdateTemplateCreateInfo();
+    ~safe_VkDescriptorUpdateTemplateCreateInfo();
+    void initialize(const VkDescriptorUpdateTemplateCreateInfo* in_struct);
+    void initialize(const safe_VkDescriptorUpdateTemplateCreateInfo* src);
+    VkDescriptorUpdateTemplateCreateInfo *ptr() { return reinterpret_cast<VkDescriptorUpdateTemplateCreateInfo *>(this); }
+    VkDescriptorUpdateTemplateCreateInfo const *ptr() const { return reinterpret_cast<VkDescriptorUpdateTemplateCreateInfo const *>(this); }
+};
+
+struct safe_VkPhysicalDeviceExternalImageFormatInfo {
+    VkStructureType sType;
+    const void* pNext;
+    VkExternalMemoryHandleTypeFlagBits handleType;
+    safe_VkPhysicalDeviceExternalImageFormatInfo(const VkPhysicalDeviceExternalImageFormatInfo* in_struct);
+    safe_VkPhysicalDeviceExternalImageFormatInfo(const safe_VkPhysicalDeviceExternalImageFormatInfo& src);
+    safe_VkPhysicalDeviceExternalImageFormatInfo& operator=(const safe_VkPhysicalDeviceExternalImageFormatInfo& src);
+    safe_VkPhysicalDeviceExternalImageFormatInfo();
+    ~safe_VkPhysicalDeviceExternalImageFormatInfo();
+    void initialize(const VkPhysicalDeviceExternalImageFormatInfo* in_struct);
+    void initialize(const safe_VkPhysicalDeviceExternalImageFormatInfo* src);
+    VkPhysicalDeviceExternalImageFormatInfo *ptr() { return reinterpret_cast<VkPhysicalDeviceExternalImageFormatInfo *>(this); }
+    VkPhysicalDeviceExternalImageFormatInfo const *ptr() const { return reinterpret_cast<VkPhysicalDeviceExternalImageFormatInfo const *>(this); }
+};
+
+struct safe_VkExternalImageFormatProperties {
+    VkStructureType sType;
+    void* pNext;
+    VkExternalMemoryProperties externalMemoryProperties;
+    safe_VkExternalImageFormatProperties(const VkExternalImageFormatProperties* in_struct);
+    safe_VkExternalImageFormatProperties(const safe_VkExternalImageFormatProperties& src);
+    safe_VkExternalImageFormatProperties& operator=(const safe_VkExternalImageFormatProperties& src);
+    safe_VkExternalImageFormatProperties();
+    ~safe_VkExternalImageFormatProperties();
+    void initialize(const VkExternalImageFormatProperties* in_struct);
+    void initialize(const safe_VkExternalImageFormatProperties* src);
+    VkExternalImageFormatProperties *ptr() { return reinterpret_cast<VkExternalImageFormatProperties *>(this); }
+    VkExternalImageFormatProperties const *ptr() const { return reinterpret_cast<VkExternalImageFormatProperties const *>(this); }
+};
+
+struct safe_VkPhysicalDeviceExternalBufferInfo {
+    VkStructureType sType;
+    const void* pNext;
+    VkBufferCreateFlags flags;
+    VkBufferUsageFlags usage;
+    VkExternalMemoryHandleTypeFlagBits handleType;
+    safe_VkPhysicalDeviceExternalBufferInfo(const VkPhysicalDeviceExternalBufferInfo* in_struct);
+    safe_VkPhysicalDeviceExternalBufferInfo(const safe_VkPhysicalDeviceExternalBufferInfo& src);
+    safe_VkPhysicalDeviceExternalBufferInfo& operator=(const safe_VkPhysicalDeviceExternalBufferInfo& src);
+    safe_VkPhysicalDeviceExternalBufferInfo();
+    ~safe_VkPhysicalDeviceExternalBufferInfo();
+    void initialize(const VkPhysicalDeviceExternalBufferInfo* in_struct);
+    void initialize(const safe_VkPhysicalDeviceExternalBufferInfo* src);
+    VkPhysicalDeviceExternalBufferInfo *ptr() { return reinterpret_cast<VkPhysicalDeviceExternalBufferInfo *>(this); }
+    VkPhysicalDeviceExternalBufferInfo const *ptr() const { return reinterpret_cast<VkPhysicalDeviceExternalBufferInfo const *>(this); }
+};
+
+struct safe_VkExternalBufferProperties {
+    VkStructureType sType;
+    void* pNext;
+    VkExternalMemoryProperties externalMemoryProperties;
+    safe_VkExternalBufferProperties(const VkExternalBufferProperties* in_struct);
+    safe_VkExternalBufferProperties(const safe_VkExternalBufferProperties& src);
+    safe_VkExternalBufferProperties& operator=(const safe_VkExternalBufferProperties& src);
+    safe_VkExternalBufferProperties();
+    ~safe_VkExternalBufferProperties();
+    void initialize(const VkExternalBufferProperties* in_struct);
+    void initialize(const safe_VkExternalBufferProperties* src);
+    VkExternalBufferProperties *ptr() { return reinterpret_cast<VkExternalBufferProperties *>(this); }
+    VkExternalBufferProperties const *ptr() const { return reinterpret_cast<VkExternalBufferProperties const *>(this); }
+};
+
+struct safe_VkPhysicalDeviceIDProperties {
+    VkStructureType sType;
+    void* pNext;
+    uint8_t deviceUUID[VK_UUID_SIZE];
+    uint8_t driverUUID[VK_UUID_SIZE];
+    uint8_t deviceLUID[VK_LUID_SIZE];
+    uint32_t deviceNodeMask;
+    VkBool32 deviceLUIDValid;
+    safe_VkPhysicalDeviceIDProperties(const VkPhysicalDeviceIDProperties* in_struct);
+    safe_VkPhysicalDeviceIDProperties(const safe_VkPhysicalDeviceIDProperties& src);
+    safe_VkPhysicalDeviceIDProperties& operator=(const safe_VkPhysicalDeviceIDProperties& src);
+    safe_VkPhysicalDeviceIDProperties();
+    ~safe_VkPhysicalDeviceIDProperties();
+    void initialize(const VkPhysicalDeviceIDProperties* in_struct);
+    void initialize(const safe_VkPhysicalDeviceIDProperties* src);
+    VkPhysicalDeviceIDProperties *ptr() { return reinterpret_cast<VkPhysicalDeviceIDProperties *>(this); }
+    VkPhysicalDeviceIDProperties const *ptr() const { return reinterpret_cast<VkPhysicalDeviceIDProperties const *>(this); }
+};
+
+struct safe_VkExternalMemoryImageCreateInfo {
+    VkStructureType sType;
+    const void* pNext;
+    VkExternalMemoryHandleTypeFlags handleTypes;
+    safe_VkExternalMemoryImageCreateInfo(const VkExternalMemoryImageCreateInfo* in_struct);
+    safe_VkExternalMemoryImageCreateInfo(const safe_VkExternalMemoryImageCreateInfo& src);
+    safe_VkExternalMemoryImageCreateInfo& operator=(const safe_VkExternalMemoryImageCreateInfo& src);
+    safe_VkExternalMemoryImageCreateInfo();
+    ~safe_VkExternalMemoryImageCreateInfo();
+    void initialize(const VkExternalMemoryImageCreateInfo* in_struct);
+    void initialize(const safe_VkExternalMemoryImageCreateInfo* src);
+    VkExternalMemoryImageCreateInfo *ptr() { return reinterpret_cast<VkExternalMemoryImageCreateInfo *>(this); }
+    VkExternalMemoryImageCreateInfo const *ptr() const { return reinterpret_cast<VkExternalMemoryImageCreateInfo const *>(this); }
+};
+
+struct safe_VkExternalMemoryBufferCreateInfo {
+    VkStructureType sType;
+    const void* pNext;
+    VkExternalMemoryHandleTypeFlags handleTypes;
+    safe_VkExternalMemoryBufferCreateInfo(const VkExternalMemoryBufferCreateInfo* in_struct);
+    safe_VkExternalMemoryBufferCreateInfo(const safe_VkExternalMemoryBufferCreateInfo& src);
+    safe_VkExternalMemoryBufferCreateInfo& operator=(const safe_VkExternalMemoryBufferCreateInfo& src);
+    safe_VkExternalMemoryBufferCreateInfo();
+    ~safe_VkExternalMemoryBufferCreateInfo();
+    void initialize(const VkExternalMemoryBufferCreateInfo* in_struct);
+    void initialize(const safe_VkExternalMemoryBufferCreateInfo* src);
+    VkExternalMemoryBufferCreateInfo *ptr() { return reinterpret_cast<VkExternalMemoryBufferCreateInfo *>(this); }
+    VkExternalMemoryBufferCreateInfo const *ptr() const { return reinterpret_cast<VkExternalMemoryBufferCreateInfo const *>(this); }
+};
+
+struct safe_VkExportMemoryAllocateInfo {
+    VkStructureType sType;
+    const void* pNext;
+    VkExternalMemoryHandleTypeFlags handleTypes;
+    safe_VkExportMemoryAllocateInfo(const VkExportMemoryAllocateInfo* in_struct);
+    safe_VkExportMemoryAllocateInfo(const safe_VkExportMemoryAllocateInfo& src);
+    safe_VkExportMemoryAllocateInfo& operator=(const safe_VkExportMemoryAllocateInfo& src);
+    safe_VkExportMemoryAllocateInfo();
+    ~safe_VkExportMemoryAllocateInfo();
+    void initialize(const VkExportMemoryAllocateInfo* in_struct);
+    void initialize(const safe_VkExportMemoryAllocateInfo* src);
+    VkExportMemoryAllocateInfo *ptr() { return reinterpret_cast<VkExportMemoryAllocateInfo *>(this); }
+    VkExportMemoryAllocateInfo const *ptr() const { return reinterpret_cast<VkExportMemoryAllocateInfo const *>(this); }
+};
+
+struct safe_VkPhysicalDeviceExternalFenceInfo {
+    VkStructureType sType;
+    const void* pNext;
+    VkExternalFenceHandleTypeFlagBits handleType;
+    safe_VkPhysicalDeviceExternalFenceInfo(const VkPhysicalDeviceExternalFenceInfo* in_struct);
+    safe_VkPhysicalDeviceExternalFenceInfo(const safe_VkPhysicalDeviceExternalFenceInfo& src);
+    safe_VkPhysicalDeviceExternalFenceInfo& operator=(const safe_VkPhysicalDeviceExternalFenceInfo& src);
+    safe_VkPhysicalDeviceExternalFenceInfo();
+    ~safe_VkPhysicalDeviceExternalFenceInfo();
+    void initialize(const VkPhysicalDeviceExternalFenceInfo* in_struct);
+    void initialize(const safe_VkPhysicalDeviceExternalFenceInfo* src);
+    VkPhysicalDeviceExternalFenceInfo *ptr() { return reinterpret_cast<VkPhysicalDeviceExternalFenceInfo *>(this); }
+    VkPhysicalDeviceExternalFenceInfo const *ptr() const { return reinterpret_cast<VkPhysicalDeviceExternalFenceInfo const *>(this); }
+};
+
+struct safe_VkExternalFenceProperties {
+    VkStructureType sType;
+    void* pNext;
+    VkExternalFenceHandleTypeFlags exportFromImportedHandleTypes;
+    VkExternalFenceHandleTypeFlags compatibleHandleTypes;
+    VkExternalFenceFeatureFlags externalFenceFeatures;
+    safe_VkExternalFenceProperties(const VkExternalFenceProperties* in_struct);
+    safe_VkExternalFenceProperties(const safe_VkExternalFenceProperties& src);
+    safe_VkExternalFenceProperties& operator=(const safe_VkExternalFenceProperties& src);
+    safe_VkExternalFenceProperties();
+    ~safe_VkExternalFenceProperties();
+    void initialize(const VkExternalFenceProperties* in_struct);
+    void initialize(const safe_VkExternalFenceProperties* src);
+    VkExternalFenceProperties *ptr() { return reinterpret_cast<VkExternalFenceProperties *>(this); }
+    VkExternalFenceProperties const *ptr() const { return reinterpret_cast<VkExternalFenceProperties const *>(this); }
+};
+
+struct safe_VkExportFenceCreateInfo {
+    VkStructureType sType;
+    const void* pNext;
+    VkExternalFenceHandleTypeFlags handleTypes;
+    safe_VkExportFenceCreateInfo(const VkExportFenceCreateInfo* in_struct);
+    safe_VkExportFenceCreateInfo(const safe_VkExportFenceCreateInfo& src);
+    safe_VkExportFenceCreateInfo& operator=(const safe_VkExportFenceCreateInfo& src);
+    safe_VkExportFenceCreateInfo();
+    ~safe_VkExportFenceCreateInfo();
+    void initialize(const VkExportFenceCreateInfo* in_struct);
+    void initialize(const safe_VkExportFenceCreateInfo* src);
+    VkExportFenceCreateInfo *ptr() { return reinterpret_cast<VkExportFenceCreateInfo *>(this); }
+    VkExportFenceCreateInfo const *ptr() const { return reinterpret_cast<VkExportFenceCreateInfo const *>(this); }
+};
+
+struct safe_VkExportSemaphoreCreateInfo {
+    VkStructureType sType;
+    const void* pNext;
+    VkExternalSemaphoreHandleTypeFlags handleTypes;
+    safe_VkExportSemaphoreCreateInfo(const VkExportSemaphoreCreateInfo* in_struct);
+    safe_VkExportSemaphoreCreateInfo(const safe_VkExportSemaphoreCreateInfo& src);
+    safe_VkExportSemaphoreCreateInfo& operator=(const safe_VkExportSemaphoreCreateInfo& src);
+    safe_VkExportSemaphoreCreateInfo();
+    ~safe_VkExportSemaphoreCreateInfo();
+    void initialize(const VkExportSemaphoreCreateInfo* in_struct);
+    void initialize(const safe_VkExportSemaphoreCreateInfo* src);
+    VkExportSemaphoreCreateInfo *ptr() { return reinterpret_cast<VkExportSemaphoreCreateInfo *>(this); }
+    VkExportSemaphoreCreateInfo const *ptr() const { return reinterpret_cast<VkExportSemaphoreCreateInfo const *>(this); }
+};
+
+struct safe_VkPhysicalDeviceExternalSemaphoreInfo {
+    VkStructureType sType;
+    const void* pNext;
+    VkExternalSemaphoreHandleTypeFlagBits handleType;
+    safe_VkPhysicalDeviceExternalSemaphoreInfo(const VkPhysicalDeviceExternalSemaphoreInfo* in_struct);
+    safe_VkPhysicalDeviceExternalSemaphoreInfo(const safe_VkPhysicalDeviceExternalSemaphoreInfo& src);
+    safe_VkPhysicalDeviceExternalSemaphoreInfo& operator=(const safe_VkPhysicalDeviceExternalSemaphoreInfo& src);
+    safe_VkPhysicalDeviceExternalSemaphoreInfo();
+    ~safe_VkPhysicalDeviceExternalSemaphoreInfo();
+    void initialize(const VkPhysicalDeviceExternalSemaphoreInfo* in_struct);
+    void initialize(const safe_VkPhysicalDeviceExternalSemaphoreInfo* src);
+    VkPhysicalDeviceExternalSemaphoreInfo *ptr() { return reinterpret_cast<VkPhysicalDeviceExternalSemaphoreInfo *>(this); }
+    VkPhysicalDeviceExternalSemaphoreInfo const *ptr() const { return reinterpret_cast<VkPhysicalDeviceExternalSemaphoreInfo const *>(this); }
+};
+
+struct safe_VkExternalSemaphoreProperties {
+    VkStructureType sType;
+    void* pNext;
+    VkExternalSemaphoreHandleTypeFlags exportFromImportedHandleTypes;
+    VkExternalSemaphoreHandleTypeFlags compatibleHandleTypes;
+    VkExternalSemaphoreFeatureFlags externalSemaphoreFeatures;
+    safe_VkExternalSemaphoreProperties(const VkExternalSemaphoreProperties* in_struct);
+    safe_VkExternalSemaphoreProperties(const safe_VkExternalSemaphoreProperties& src);
+    safe_VkExternalSemaphoreProperties& operator=(const safe_VkExternalSemaphoreProperties& src);
+    safe_VkExternalSemaphoreProperties();
+    ~safe_VkExternalSemaphoreProperties();
+    void initialize(const VkExternalSemaphoreProperties* in_struct);
+    void initialize(const safe_VkExternalSemaphoreProperties* src);
+    VkExternalSemaphoreProperties *ptr() { return reinterpret_cast<VkExternalSemaphoreProperties *>(this); }
+    VkExternalSemaphoreProperties const *ptr() const { return reinterpret_cast<VkExternalSemaphoreProperties const *>(this); }
+};
+
+struct safe_VkPhysicalDeviceMaintenance3Properties {
+    VkStructureType sType;
+    void* pNext;
+    uint32_t maxPerSetDescriptors;
+    VkDeviceSize maxMemoryAllocationSize;
+    safe_VkPhysicalDeviceMaintenance3Properties(const VkPhysicalDeviceMaintenance3Properties* in_struct);
+    safe_VkPhysicalDeviceMaintenance3Properties(const safe_VkPhysicalDeviceMaintenance3Properties& src);
+    safe_VkPhysicalDeviceMaintenance3Properties& operator=(const safe_VkPhysicalDeviceMaintenance3Properties& src);
+    safe_VkPhysicalDeviceMaintenance3Properties();
+    ~safe_VkPhysicalDeviceMaintenance3Properties();
+    void initialize(const VkPhysicalDeviceMaintenance3Properties* in_struct);
+    void initialize(const safe_VkPhysicalDeviceMaintenance3Properties* src);
+    VkPhysicalDeviceMaintenance3Properties *ptr() { return reinterpret_cast<VkPhysicalDeviceMaintenance3Properties *>(this); }
+    VkPhysicalDeviceMaintenance3Properties const *ptr() const { return reinterpret_cast<VkPhysicalDeviceMaintenance3Properties const *>(this); }
+};
+
+struct safe_VkDescriptorSetLayoutSupport {
+    VkStructureType sType;
+    void* pNext;
+    VkBool32 supported;
+    safe_VkDescriptorSetLayoutSupport(const VkDescriptorSetLayoutSupport* in_struct);
+    safe_VkDescriptorSetLayoutSupport(const safe_VkDescriptorSetLayoutSupport& src);
+    safe_VkDescriptorSetLayoutSupport& operator=(const safe_VkDescriptorSetLayoutSupport& src);
+    safe_VkDescriptorSetLayoutSupport();
+    ~safe_VkDescriptorSetLayoutSupport();
+    void initialize(const VkDescriptorSetLayoutSupport* in_struct);
+    void initialize(const safe_VkDescriptorSetLayoutSupport* src);
+    VkDescriptorSetLayoutSupport *ptr() { return reinterpret_cast<VkDescriptorSetLayoutSupport *>(this); }
+    VkDescriptorSetLayoutSupport const *ptr() const { return reinterpret_cast<VkDescriptorSetLayoutSupport const *>(this); }
+};
+
+struct safe_VkPhysicalDeviceShaderDrawParametersFeatures {
+    VkStructureType sType;
+    void* pNext;
+    VkBool32 shaderDrawParameters;
+    safe_VkPhysicalDeviceShaderDrawParametersFeatures(const VkPhysicalDeviceShaderDrawParametersFeatures* in_struct);
+    safe_VkPhysicalDeviceShaderDrawParametersFeatures(const safe_VkPhysicalDeviceShaderDrawParametersFeatures& src);
+    safe_VkPhysicalDeviceShaderDrawParametersFeatures& operator=(const safe_VkPhysicalDeviceShaderDrawParametersFeatures& src);
+    safe_VkPhysicalDeviceShaderDrawParametersFeatures();
+    ~safe_VkPhysicalDeviceShaderDrawParametersFeatures();
+    void initialize(const VkPhysicalDeviceShaderDrawParametersFeatures* in_struct);
+    void initialize(const safe_VkPhysicalDeviceShaderDrawParametersFeatures* src);
+    VkPhysicalDeviceShaderDrawParametersFeatures *ptr() { return reinterpret_cast<VkPhysicalDeviceShaderDrawParametersFeatures *>(this); }
+    VkPhysicalDeviceShaderDrawParametersFeatures const *ptr() const { return reinterpret_cast<VkPhysicalDeviceShaderDrawParametersFeatures const *>(this); }
+};
+
+struct safe_VkSwapchainCreateInfoKHR {
+    VkStructureType sType;
+    const void* pNext;
+    VkSwapchainCreateFlagsKHR flags;
+    VkSurfaceKHR surface;
+    uint32_t minImageCount;
+    VkFormat imageFormat;
+    VkColorSpaceKHR imageColorSpace;
+    VkExtent2D imageExtent;
+    uint32_t imageArrayLayers;
+    VkImageUsageFlags imageUsage;
+    VkSharingMode imageSharingMode;
+    uint32_t queueFamilyIndexCount;
+    const uint32_t* pQueueFamilyIndices;
+    VkSurfaceTransformFlagBitsKHR preTransform;
+    VkCompositeAlphaFlagBitsKHR compositeAlpha;
+    VkPresentModeKHR presentMode;
+    VkBool32 clipped;
+    VkSwapchainKHR oldSwapchain;
+    safe_VkSwapchainCreateInfoKHR(const VkSwapchainCreateInfoKHR* in_struct);
+    safe_VkSwapchainCreateInfoKHR(const safe_VkSwapchainCreateInfoKHR& src);
+    safe_VkSwapchainCreateInfoKHR& operator=(const safe_VkSwapchainCreateInfoKHR& src);
+    safe_VkSwapchainCreateInfoKHR();
+    ~safe_VkSwapchainCreateInfoKHR();
+    void initialize(const VkSwapchainCreateInfoKHR* in_struct);
+    void initialize(const safe_VkSwapchainCreateInfoKHR* src);
+    VkSwapchainCreateInfoKHR *ptr() { return reinterpret_cast<VkSwapchainCreateInfoKHR *>(this); }
+    VkSwapchainCreateInfoKHR const *ptr() const { return reinterpret_cast<VkSwapchainCreateInfoKHR const *>(this); }
+};
+
+struct safe_VkPresentInfoKHR {
+    VkStructureType sType;
+    const void* pNext;
+    uint32_t waitSemaphoreCount;
+    VkSemaphore* pWaitSemaphores;
+    uint32_t swapchainCount;
+    VkSwapchainKHR* pSwapchains;
+    const uint32_t* pImageIndices;
+    VkResult* pResults;
+    safe_VkPresentInfoKHR(const VkPresentInfoKHR* in_struct);
+    safe_VkPresentInfoKHR(const safe_VkPresentInfoKHR& src);
+    safe_VkPresentInfoKHR& operator=(const safe_VkPresentInfoKHR& src);
+    safe_VkPresentInfoKHR();
+    ~safe_VkPresentInfoKHR();
+    void initialize(const VkPresentInfoKHR* in_struct);
+    void initialize(const safe_VkPresentInfoKHR* src);
+    VkPresentInfoKHR *ptr() { return reinterpret_cast<VkPresentInfoKHR *>(this); }
+    VkPresentInfoKHR const *ptr() const { return reinterpret_cast<VkPresentInfoKHR const *>(this); }
+};
+
+struct safe_VkImageSwapchainCreateInfoKHR {
+    VkStructureType sType;
+    const void* pNext;
+    VkSwapchainKHR swapchain;
+    safe_VkImageSwapchainCreateInfoKHR(const VkImageSwapchainCreateInfoKHR* in_struct);
+    safe_VkImageSwapchainCreateInfoKHR(const safe_VkImageSwapchainCreateInfoKHR& src);
+    safe_VkImageSwapchainCreateInfoKHR& operator=(const safe_VkImageSwapchainCreateInfoKHR& src);
+    safe_VkImageSwapchainCreateInfoKHR();
+    ~safe_VkImageSwapchainCreateInfoKHR();
+    void initialize(const VkImageSwapchainCreateInfoKHR* in_struct);
+    void initialize(const safe_VkImageSwapchainCreateInfoKHR* src);
+    VkImageSwapchainCreateInfoKHR *ptr() { return reinterpret_cast<VkImageSwapchainCreateInfoKHR *>(this); }
+    VkImageSwapchainCreateInfoKHR const *ptr() const { return reinterpret_cast<VkImageSwapchainCreateInfoKHR const *>(this); }
+};
+
+struct safe_VkBindImageMemorySwapchainInfoKHR {
+    VkStructureType sType;
+    const void* pNext;
+    VkSwapchainKHR swapchain;
+    uint32_t imageIndex;
+    safe_VkBindImageMemorySwapchainInfoKHR(const VkBindImageMemorySwapchainInfoKHR* in_struct);
+    safe_VkBindImageMemorySwapchainInfoKHR(const safe_VkBindImageMemorySwapchainInfoKHR& src);
+    safe_VkBindImageMemorySwapchainInfoKHR& operator=(const safe_VkBindImageMemorySwapchainInfoKHR& src);
+    safe_VkBindImageMemorySwapchainInfoKHR();
+    ~safe_VkBindImageMemorySwapchainInfoKHR();
+    void initialize(const VkBindImageMemorySwapchainInfoKHR* in_struct);
+    void initialize(const safe_VkBindImageMemorySwapchainInfoKHR* src);
+    VkBindImageMemorySwapchainInfoKHR *ptr() { return reinterpret_cast<VkBindImageMemorySwapchainInfoKHR *>(this); }
+    VkBindImageMemorySwapchainInfoKHR const *ptr() const { return reinterpret_cast<VkBindImageMemorySwapchainInfoKHR const *>(this); }
+};
+
+struct safe_VkAcquireNextImageInfoKHR {
+    VkStructureType sType;
+    const void* pNext;
+    VkSwapchainKHR swapchain;
+    uint64_t timeout;
+    VkSemaphore semaphore;
+    VkFence fence;
+    uint32_t deviceMask;
+    safe_VkAcquireNextImageInfoKHR(const VkAcquireNextImageInfoKHR* in_struct);
+    safe_VkAcquireNextImageInfoKHR(const safe_VkAcquireNextImageInfoKHR& src);
+    safe_VkAcquireNextImageInfoKHR& operator=(const safe_VkAcquireNextImageInfoKHR& src);
+    safe_VkAcquireNextImageInfoKHR();
+    ~safe_VkAcquireNextImageInfoKHR();
+    void initialize(const VkAcquireNextImageInfoKHR* in_struct);
+    void initialize(const safe_VkAcquireNextImageInfoKHR* src);
+    VkAcquireNextImageInfoKHR *ptr() { return reinterpret_cast<VkAcquireNextImageInfoKHR *>(this); }
+    VkAcquireNextImageInfoKHR const *ptr() const { return reinterpret_cast<VkAcquireNextImageInfoKHR const *>(this); }
+};
+
+struct safe_VkDeviceGroupPresentCapabilitiesKHR {
+    VkStructureType sType;
+    const void* pNext;
+    uint32_t presentMask[VK_MAX_DEVICE_GROUP_SIZE];
+    VkDeviceGroupPresentModeFlagsKHR modes;
+    safe_VkDeviceGroupPresentCapabilitiesKHR(const VkDeviceGroupPresentCapabilitiesKHR* in_struct);
+    safe_VkDeviceGroupPresentCapabilitiesKHR(const safe_VkDeviceGroupPresentCapabilitiesKHR& src);
+    safe_VkDeviceGroupPresentCapabilitiesKHR& operator=(const safe_VkDeviceGroupPresentCapabilitiesKHR& src);
+    safe_VkDeviceGroupPresentCapabilitiesKHR();
+    ~safe_VkDeviceGroupPresentCapabilitiesKHR();
+    void initialize(const VkDeviceGroupPresentCapabilitiesKHR* in_struct);
+    void initialize(const safe_VkDeviceGroupPresentCapabilitiesKHR* src);
+    VkDeviceGroupPresentCapabilitiesKHR *ptr() { return reinterpret_cast<VkDeviceGroupPresentCapabilitiesKHR *>(this); }
+    VkDeviceGroupPresentCapabilitiesKHR const *ptr() const { return reinterpret_cast<VkDeviceGroupPresentCapabilitiesKHR const *>(this); }
+};
+
+struct safe_VkDeviceGroupPresentInfoKHR {
+    VkStructureType sType;
+    const void* pNext;
+    uint32_t swapchainCount;
+    const uint32_t* pDeviceMasks;
+    VkDeviceGroupPresentModeFlagBitsKHR mode;
+    safe_VkDeviceGroupPresentInfoKHR(const VkDeviceGroupPresentInfoKHR* in_struct);
+    safe_VkDeviceGroupPresentInfoKHR(const safe_VkDeviceGroupPresentInfoKHR& src);
+    safe_VkDeviceGroupPresentInfoKHR& operator=(const safe_VkDeviceGroupPresentInfoKHR& src);
+    safe_VkDeviceGroupPresentInfoKHR();
+    ~safe_VkDeviceGroupPresentInfoKHR();
+    void initialize(const VkDeviceGroupPresentInfoKHR* in_struct);
+    void initialize(const safe_VkDeviceGroupPresentInfoKHR* src);
+    VkDeviceGroupPresentInfoKHR *ptr() { return reinterpret_cast<VkDeviceGroupPresentInfoKHR *>(this); }
+    VkDeviceGroupPresentInfoKHR const *ptr() const { return reinterpret_cast<VkDeviceGroupPresentInfoKHR const *>(this); }
+};
+
+struct safe_VkDeviceGroupSwapchainCreateInfoKHR {
+    VkStructureType sType;
+    const void* pNext;
+    VkDeviceGroupPresentModeFlagsKHR modes;
+    safe_VkDeviceGroupSwapchainCreateInfoKHR(const VkDeviceGroupSwapchainCreateInfoKHR* in_struct);
+    safe_VkDeviceGroupSwapchainCreateInfoKHR(const safe_VkDeviceGroupSwapchainCreateInfoKHR& src);
+    safe_VkDeviceGroupSwapchainCreateInfoKHR& operator=(const safe_VkDeviceGroupSwapchainCreateInfoKHR& src);
+    safe_VkDeviceGroupSwapchainCreateInfoKHR();
+    ~safe_VkDeviceGroupSwapchainCreateInfoKHR();
+    void initialize(const VkDeviceGroupSwapchainCreateInfoKHR* in_struct);
+    void initialize(const safe_VkDeviceGroupSwapchainCreateInfoKHR* src);
+    VkDeviceGroupSwapchainCreateInfoKHR *ptr() { return reinterpret_cast<VkDeviceGroupSwapchainCreateInfoKHR *>(this); }
+    VkDeviceGroupSwapchainCreateInfoKHR const *ptr() const { return reinterpret_cast<VkDeviceGroupSwapchainCreateInfoKHR const *>(this); }
+};
+
+struct safe_VkDisplayPropertiesKHR {
+    VkDisplayKHR display;
+    const char* displayName;
+    VkExtent2D physicalDimensions;
+    VkExtent2D physicalResolution;
+    VkSurfaceTransformFlagsKHR supportedTransforms;
+    VkBool32 planeReorderPossible;
+    VkBool32 persistentContent;
+    safe_VkDisplayPropertiesKHR(const VkDisplayPropertiesKHR* in_struct);
+    safe_VkDisplayPropertiesKHR(const safe_VkDisplayPropertiesKHR& src);
+    safe_VkDisplayPropertiesKHR& operator=(const safe_VkDisplayPropertiesKHR& src);
+    safe_VkDisplayPropertiesKHR();
+    ~safe_VkDisplayPropertiesKHR();
+    void initialize(const VkDisplayPropertiesKHR* in_struct);
+    void initialize(const safe_VkDisplayPropertiesKHR* src);
+    VkDisplayPropertiesKHR *ptr() { return reinterpret_cast<VkDisplayPropertiesKHR *>(this); }
+    VkDisplayPropertiesKHR const *ptr() const { return reinterpret_cast<VkDisplayPropertiesKHR const *>(this); }
+};
+
+struct safe_VkDisplayModeCreateInfoKHR {
+    VkStructureType sType;
+    const void* pNext;
+    VkDisplayModeCreateFlagsKHR flags;
+    VkDisplayModeParametersKHR parameters;
+    safe_VkDisplayModeCreateInfoKHR(const VkDisplayModeCreateInfoKHR* in_struct);
+    safe_VkDisplayModeCreateInfoKHR(const safe_VkDisplayModeCreateInfoKHR& src);
+    safe_VkDisplayModeCreateInfoKHR& operator=(const safe_VkDisplayModeCreateInfoKHR& src);
+    safe_VkDisplayModeCreateInfoKHR();
+    ~safe_VkDisplayModeCreateInfoKHR();
+    void initialize(const VkDisplayModeCreateInfoKHR* in_struct);
+    void initialize(const safe_VkDisplayModeCreateInfoKHR* src);
+    VkDisplayModeCreateInfoKHR *ptr() { return reinterpret_cast<VkDisplayModeCreateInfoKHR *>(this); }
+    VkDisplayModeCreateInfoKHR const *ptr() const { return reinterpret_cast<VkDisplayModeCreateInfoKHR const *>(this); }
+};
+
+struct safe_VkDisplaySurfaceCreateInfoKHR {
+    VkStructureType sType;
+    const void* pNext;
+    VkDisplaySurfaceCreateFlagsKHR flags;
+    VkDisplayModeKHR displayMode;
+    uint32_t planeIndex;
+    uint32_t planeStackIndex;
+    VkSurfaceTransformFlagBitsKHR transform;
+    float globalAlpha;
+    VkDisplayPlaneAlphaFlagBitsKHR alphaMode;
+    VkExtent2D imageExtent;
+    safe_VkDisplaySurfaceCreateInfoKHR(const VkDisplaySurfaceCreateInfoKHR* in_struct);
+    safe_VkDisplaySurfaceCreateInfoKHR(const safe_VkDisplaySurfaceCreateInfoKHR& src);
+    safe_VkDisplaySurfaceCreateInfoKHR& operator=(const safe_VkDisplaySurfaceCreateInfoKHR& src);
+    safe_VkDisplaySurfaceCreateInfoKHR();
+    ~safe_VkDisplaySurfaceCreateInfoKHR();
+    void initialize(const VkDisplaySurfaceCreateInfoKHR* in_struct);
+    void initialize(const safe_VkDisplaySurfaceCreateInfoKHR* src);
+    VkDisplaySurfaceCreateInfoKHR *ptr() { return reinterpret_cast<VkDisplaySurfaceCreateInfoKHR *>(this); }
+    VkDisplaySurfaceCreateInfoKHR const *ptr() const { return reinterpret_cast<VkDisplaySurfaceCreateInfoKHR const *>(this); }
+};
+
+struct safe_VkDisplayPresentInfoKHR {
+    VkStructureType sType;
+    const void* pNext;
+    VkRect2D srcRect;
+    VkRect2D dstRect;
+    VkBool32 persistent;
+    safe_VkDisplayPresentInfoKHR(const VkDisplayPresentInfoKHR* in_struct);
+    safe_VkDisplayPresentInfoKHR(const safe_VkDisplayPresentInfoKHR& src);
+    safe_VkDisplayPresentInfoKHR& operator=(const safe_VkDisplayPresentInfoKHR& src);
+    safe_VkDisplayPresentInfoKHR();
+    ~safe_VkDisplayPresentInfoKHR();
+    void initialize(const VkDisplayPresentInfoKHR* in_struct);
+    void initialize(const safe_VkDisplayPresentInfoKHR* src);
+    VkDisplayPresentInfoKHR *ptr() { return reinterpret_cast<VkDisplayPresentInfoKHR *>(this); }
+    VkDisplayPresentInfoKHR const *ptr() const { return reinterpret_cast<VkDisplayPresentInfoKHR const *>(this); }
+};
+
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+struct safe_VkXlibSurfaceCreateInfoKHR {
+    VkStructureType sType;
+    const void* pNext;
+    VkXlibSurfaceCreateFlagsKHR flags;
+    Display* dpy;
+    Window window;
+    safe_VkXlibSurfaceCreateInfoKHR(const VkXlibSurfaceCreateInfoKHR* in_struct);
+    safe_VkXlibSurfaceCreateInfoKHR(const safe_VkXlibSurfaceCreateInfoKHR& src);
+    safe_VkXlibSurfaceCreateInfoKHR& operator=(const safe_VkXlibSurfaceCreateInfoKHR& src);
+    safe_VkXlibSurfaceCreateInfoKHR();
+    ~safe_VkXlibSurfaceCreateInfoKHR();
+    void initialize(const VkXlibSurfaceCreateInfoKHR* in_struct);
+    void initialize(const safe_VkXlibSurfaceCreateInfoKHR* src);
+    VkXlibSurfaceCreateInfoKHR *ptr() { return reinterpret_cast<VkXlibSurfaceCreateInfoKHR *>(this); }
+    VkXlibSurfaceCreateInfoKHR const *ptr() const { return reinterpret_cast<VkXlibSurfaceCreateInfoKHR const *>(this); }
+};
+#endif // VK_USE_PLATFORM_XLIB_KHR
+
+#ifdef VK_USE_PLATFORM_XCB_KHR
+struct safe_VkXcbSurfaceCreateInfoKHR {
+    VkStructureType sType;
+    const void* pNext;
+    VkXcbSurfaceCreateFlagsKHR flags;
+    xcb_connection_t* connection;
+    xcb_window_t window;
+    safe_VkXcbSurfaceCreateInfoKHR(const VkXcbSurfaceCreateInfoKHR* in_struct);
+    safe_VkXcbSurfaceCreateInfoKHR(const safe_VkXcbSurfaceCreateInfoKHR& src);
+    safe_VkXcbSurfaceCreateInfoKHR& operator=(const safe_VkXcbSurfaceCreateInfoKHR& src);
+    safe_VkXcbSurfaceCreateInfoKHR();
+    ~safe_VkXcbSurfaceCreateInfoKHR();
+    void initialize(const VkXcbSurfaceCreateInfoKHR* in_struct);
+    void initialize(const safe_VkXcbSurfaceCreateInfoKHR* src);
+    VkXcbSurfaceCreateInfoKHR *ptr() { return reinterpret_cast<VkXcbSurfaceCreateInfoKHR *>(this); }
+    VkXcbSurfaceCreateInfoKHR const *ptr() const { return reinterpret_cast<VkXcbSurfaceCreateInfoKHR const *>(this); }
+};
+#endif // VK_USE_PLATFORM_XCB_KHR
+
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+struct safe_VkWaylandSurfaceCreateInfoKHR {
+    VkStructureType sType;
+    const void* pNext;
+    VkWaylandSurfaceCreateFlagsKHR flags;
+    struct wl_display* display;
+    struct wl_surface* surface;
+    safe_VkWaylandSurfaceCreateInfoKHR(const VkWaylandSurfaceCreateInfoKHR* in_struct);
+    safe_VkWaylandSurfaceCreateInfoKHR(const safe_VkWaylandSurfaceCreateInfoKHR& src);
+    safe_VkWaylandSurfaceCreateInfoKHR& operator=(const safe_VkWaylandSurfaceCreateInfoKHR& src);
+    safe_VkWaylandSurfaceCreateInfoKHR();
+    ~safe_VkWaylandSurfaceCreateInfoKHR();
+    void initialize(const VkWaylandSurfaceCreateInfoKHR* in_struct);
+    void initialize(const safe_VkWaylandSurfaceCreateInfoKHR* src);
+    VkWaylandSurfaceCreateInfoKHR *ptr() { return reinterpret_cast<VkWaylandSurfaceCreateInfoKHR *>(this); }
+    VkWaylandSurfaceCreateInfoKHR const *ptr() const { return reinterpret_cast<VkWaylandSurfaceCreateInfoKHR const *>(this); }
+};
+#endif // VK_USE_PLATFORM_WAYLAND_KHR
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+struct safe_VkAndroidSurfaceCreateInfoKHR {
+    VkStructureType sType;
+    const void* pNext;
+    VkAndroidSurfaceCreateFlagsKHR flags;
+    struct ANativeWindow* window;
+    safe_VkAndroidSurfaceCreateInfoKHR(const VkAndroidSurfaceCreateInfoKHR* in_struct);
+    safe_VkAndroidSurfaceCreateInfoKHR(const safe_VkAndroidSurfaceCreateInfoKHR& src);
+    safe_VkAndroidSurfaceCreateInfoKHR& operator=(const safe_VkAndroidSurfaceCreateInfoKHR& src);
+    safe_VkAndroidSurfaceCreateInfoKHR();
+    ~safe_VkAndroidSurfaceCreateInfoKHR();
+    void initialize(const VkAndroidSurfaceCreateInfoKHR* in_struct);
+    void initialize(const safe_VkAndroidSurfaceCreateInfoKHR* src);
+    VkAndroidSurfaceCreateInfoKHR *ptr() { return reinterpret_cast<VkAndroidSurfaceCreateInfoKHR *>(this); }
+    VkAndroidSurfaceCreateInfoKHR const *ptr() const { return reinterpret_cast<VkAndroidSurfaceCreateInfoKHR const *>(this); }
+};
+#endif // VK_USE_PLATFORM_ANDROID_KHR
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+struct safe_VkWin32SurfaceCreateInfoKHR {
+    VkStructureType sType;
+    const void* pNext;
+    VkWin32SurfaceCreateFlagsKHR flags;
+    HINSTANCE hinstance;
+    HWND hwnd;
+    safe_VkWin32SurfaceCreateInfoKHR(const VkWin32SurfaceCreateInfoKHR* in_struct);
+    safe_VkWin32SurfaceCreateInfoKHR(const safe_VkWin32SurfaceCreateInfoKHR& src);
+    safe_VkWin32SurfaceCreateInfoKHR& operator=(const safe_VkWin32SurfaceCreateInfoKHR& src);
+    safe_VkWin32SurfaceCreateInfoKHR();
+    ~safe_VkWin32SurfaceCreateInfoKHR();
+    void initialize(const VkWin32SurfaceCreateInfoKHR* in_struct);
+    void initialize(const safe_VkWin32SurfaceCreateInfoKHR* src);
+    VkWin32SurfaceCreateInfoKHR *ptr() { return reinterpret_cast<VkWin32SurfaceCreateInfoKHR *>(this); }
+    VkWin32SurfaceCreateInfoKHR const *ptr() const { return reinterpret_cast<VkWin32SurfaceCreateInfoKHR const *>(this); }
+};
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+struct safe_VkImportMemoryWin32HandleInfoKHR {
+    VkStructureType sType;
+    const void* pNext;
+    VkExternalMemoryHandleTypeFlagBits handleType;
+    HANDLE handle;
+    LPCWSTR name;
+    safe_VkImportMemoryWin32HandleInfoKHR(const VkImportMemoryWin32HandleInfoKHR* in_struct);
+    safe_VkImportMemoryWin32HandleInfoKHR(const safe_VkImportMemoryWin32HandleInfoKHR& src);
+    safe_VkImportMemoryWin32HandleInfoKHR& operator=(const safe_VkImportMemoryWin32HandleInfoKHR& src);
+    safe_VkImportMemoryWin32HandleInfoKHR();
+    ~safe_VkImportMemoryWin32HandleInfoKHR();
+    void initialize(const VkImportMemoryWin32HandleInfoKHR* in_struct);
+    void initialize(const safe_VkImportMemoryWin32HandleInfoKHR* src);
+    VkImportMemoryWin32HandleInfoKHR *ptr() { return reinterpret_cast<VkImportMemoryWin32HandleInfoKHR *>(this); }
+    VkImportMemoryWin32HandleInfoKHR const *ptr() const { return reinterpret_cast<VkImportMemoryWin32HandleInfoKHR const *>(this); }
+};
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+struct safe_VkExportMemoryWin32HandleInfoKHR {
+    VkStructureType sType;
+    const void* pNext;
+    const SECURITY_ATTRIBUTES* pAttributes;
+    DWORD dwAccess;
+    LPCWSTR name;
+    safe_VkExportMemoryWin32HandleInfoKHR(const VkExportMemoryWin32HandleInfoKHR* in_struct);
+    safe_VkExportMemoryWin32HandleInfoKHR(const safe_VkExportMemoryWin32HandleInfoKHR& src);
+    safe_VkExportMemoryWin32HandleInfoKHR& operator=(const safe_VkExportMemoryWin32HandleInfoKHR& src);
+    safe_VkExportMemoryWin32HandleInfoKHR();
+    ~safe_VkExportMemoryWin32HandleInfoKHR();
+    void initialize(const VkExportMemoryWin32HandleInfoKHR* in_struct);
+    void initialize(const safe_VkExportMemoryWin32HandleInfoKHR* src);
+    VkExportMemoryWin32HandleInfoKHR *ptr() { return reinterpret_cast<VkExportMemoryWin32HandleInfoKHR *>(this); }
+    VkExportMemoryWin32HandleInfoKHR const *ptr() const { return reinterpret_cast<VkExportMemoryWin32HandleInfoKHR const *>(this); }
+};
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+struct safe_VkMemoryWin32HandlePropertiesKHR {
+    VkStructureType sType;
+    void* pNext;
+    uint32_t memoryTypeBits;
+    safe_VkMemoryWin32HandlePropertiesKHR(const VkMemoryWin32HandlePropertiesKHR* in_struct);
+    safe_VkMemoryWin32HandlePropertiesKHR(const safe_VkMemoryWin32HandlePropertiesKHR& src);
+    safe_VkMemoryWin32HandlePropertiesKHR& operator=(const safe_VkMemoryWin32HandlePropertiesKHR& src);
+    safe_VkMemoryWin32HandlePropertiesKHR();
+    ~safe_VkMemoryWin32HandlePropertiesKHR();
+    void initialize(const VkMemoryWin32HandlePropertiesKHR* in_struct);
+    void initialize(const safe_VkMemoryWin32HandlePropertiesKHR* src);
+    VkMemoryWin32HandlePropertiesKHR *ptr() { return reinterpret_cast<VkMemoryWin32HandlePropertiesKHR *>(this); }
+    VkMemoryWin32HandlePropertiesKHR const *ptr() const { return reinterpret_cast<VkMemoryWin32HandlePropertiesKHR const *>(this); }
+};
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+struct safe_VkMemoryGetWin32HandleInfoKHR {
+    VkStructureType sType;
+    const void* pNext;
+    VkDeviceMemory memory;
+    VkExternalMemoryHandleTypeFlagBits handleType;
+    safe_VkMemoryGetWin32HandleInfoKHR(const VkMemoryGetWin32HandleInfoKHR* in_struct);
+    safe_VkMemoryGetWin32HandleInfoKHR(const safe_VkMemoryGetWin32HandleInfoKHR& src);
+    safe_VkMemoryGetWin32HandleInfoKHR& operator=(const safe_VkMemoryGetWin32HandleInfoKHR& src);
+    safe_VkMemoryGetWin32HandleInfoKHR();
+    ~safe_VkMemoryGetWin32HandleInfoKHR();
+    void initialize(const VkMemoryGetWin32HandleInfoKHR* in_struct);
+    void initialize(const safe_VkMemoryGetWin32HandleInfoKHR* src);
+    VkMemoryGetWin32HandleInfoKHR *ptr() { return reinterpret_cast<VkMemoryGetWin32HandleInfoKHR *>(this); }
+    VkMemoryGetWin32HandleInfoKHR const *ptr() const { return reinterpret_cast<VkMemoryGetWin32HandleInfoKHR const *>(this); }
+};
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+struct safe_VkImportMemoryFdInfoKHR {
+    VkStructureType sType;
+    const void* pNext;
+    VkExternalMemoryHandleTypeFlagBits handleType;
+    int fd;
+    safe_VkImportMemoryFdInfoKHR(const VkImportMemoryFdInfoKHR* in_struct);
+    safe_VkImportMemoryFdInfoKHR(const safe_VkImportMemoryFdInfoKHR& src);
+    safe_VkImportMemoryFdInfoKHR& operator=(const safe_VkImportMemoryFdInfoKHR& src);
+    safe_VkImportMemoryFdInfoKHR();
+    ~safe_VkImportMemoryFdInfoKHR();
+    void initialize(const VkImportMemoryFdInfoKHR* in_struct);
+    void initialize(const safe_VkImportMemoryFdInfoKHR* src);
+    VkImportMemoryFdInfoKHR *ptr() { return reinterpret_cast<VkImportMemoryFdInfoKHR *>(this); }
+    VkImportMemoryFdInfoKHR const *ptr() const { return reinterpret_cast<VkImportMemoryFdInfoKHR const *>(this); }
+};
+
+struct safe_VkMemoryFdPropertiesKHR {
+    VkStructureType sType;
+    void* pNext;
+    uint32_t memoryTypeBits;
+    safe_VkMemoryFdPropertiesKHR(const VkMemoryFdPropertiesKHR* in_struct);
+    safe_VkMemoryFdPropertiesKHR(const safe_VkMemoryFdPropertiesKHR& src);
+    safe_VkMemoryFdPropertiesKHR& operator=(const safe_VkMemoryFdPropertiesKHR& src);
+    safe_VkMemoryFdPropertiesKHR();
+    ~safe_VkMemoryFdPropertiesKHR();
+    void initialize(const VkMemoryFdPropertiesKHR* in_struct);
+    void initialize(const safe_VkMemoryFdPropertiesKHR* src);
+    VkMemoryFdPropertiesKHR *ptr() { return reinterpret_cast<VkMemoryFdPropertiesKHR *>(this); }
+    VkMemoryFdPropertiesKHR const *ptr() const { return reinterpret_cast<VkMemoryFdPropertiesKHR const *>(this); }
+};
+
+struct safe_VkMemoryGetFdInfoKHR {
+    VkStructureType sType;
+    const void* pNext;
+    VkDeviceMemory memory;
+    VkExternalMemoryHandleTypeFlagBits handleType;
+    safe_VkMemoryGetFdInfoKHR(const VkMemoryGetFdInfoKHR* in_struct);
+    safe_VkMemoryGetFdInfoKHR(const safe_VkMemoryGetFdInfoKHR& src);
+    safe_VkMemoryGetFdInfoKHR& operator=(const safe_VkMemoryGetFdInfoKHR& src);
+    safe_VkMemoryGetFdInfoKHR();
+    ~safe_VkMemoryGetFdInfoKHR();
+    void initialize(const VkMemoryGetFdInfoKHR* in_struct);
+    void initialize(const safe_VkMemoryGetFdInfoKHR* src);
+    VkMemoryGetFdInfoKHR *ptr() { return reinterpret_cast<VkMemoryGetFdInfoKHR *>(this); }
+    VkMemoryGetFdInfoKHR const *ptr() const { return reinterpret_cast<VkMemoryGetFdInfoKHR const *>(this); }
+};
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+struct safe_VkWin32KeyedMutexAcquireReleaseInfoKHR {
+    VkStructureType sType;
+    const void* pNext;
+    uint32_t acquireCount;
+    VkDeviceMemory* pAcquireSyncs;
+    const uint64_t* pAcquireKeys;
+    const uint32_t* pAcquireTimeouts;
+    uint32_t releaseCount;
+    VkDeviceMemory* pReleaseSyncs;
+    const uint64_t* pReleaseKeys;
+    safe_VkWin32KeyedMutexAcquireReleaseInfoKHR(const VkWin32KeyedMutexAcquireReleaseInfoKHR* in_struct);
+    safe_VkWin32KeyedMutexAcquireReleaseInfoKHR(const safe_VkWin32KeyedMutexAcquireReleaseInfoKHR& src);
+    safe_VkWin32KeyedMutexAcquireReleaseInfoKHR& operator=(const safe_VkWin32KeyedMutexAcquireReleaseInfoKHR& src);
+    safe_VkWin32KeyedMutexAcquireReleaseInfoKHR();
+    ~safe_VkWin32KeyedMutexAcquireReleaseInfoKHR();
+    void initialize(const VkWin32KeyedMutexAcquireReleaseInfoKHR* in_struct);
+    void initialize(const safe_VkWin32KeyedMutexAcquireReleaseInfoKHR* src);
+    VkWin32KeyedMutexAcquireReleaseInfoKHR *ptr() { return reinterpret_cast<VkWin32KeyedMutexAcquireReleaseInfoKHR *>(this); }
+    VkWin32KeyedMutexAcquireReleaseInfoKHR const *ptr() const { return reinterpret_cast<VkWin32KeyedMutexAcquireReleaseInfoKHR const *>(this); }
+};
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+struct safe_VkImportSemaphoreWin32HandleInfoKHR {
+    VkStructureType sType;
+    const void* pNext;
+    VkSemaphore semaphore;
+    VkSemaphoreImportFlags flags;
+    VkExternalSemaphoreHandleTypeFlagBits handleType;
+    HANDLE handle;
+    LPCWSTR name;
+    safe_VkImportSemaphoreWin32HandleInfoKHR(const VkImportSemaphoreWin32HandleInfoKHR* in_struct);
+    safe_VkImportSemaphoreWin32HandleInfoKHR(const safe_VkImportSemaphoreWin32HandleInfoKHR& src);
+    safe_VkImportSemaphoreWin32HandleInfoKHR& operator=(const safe_VkImportSemaphoreWin32HandleInfoKHR& src);
+    safe_VkImportSemaphoreWin32HandleInfoKHR();
+    ~safe_VkImportSemaphoreWin32HandleInfoKHR();
+    void initialize(const VkImportSemaphoreWin32HandleInfoKHR* in_struct);
+    void initialize(const safe_VkImportSemaphoreWin32HandleInfoKHR* src);
+    VkImportSemaphoreWin32HandleInfoKHR *ptr() { return reinterpret_cast<VkImportSemaphoreWin32HandleInfoKHR *>(this); }
+    VkImportSemaphoreWin32HandleInfoKHR const *ptr() const { return reinterpret_cast<VkImportSemaphoreWin32HandleInfoKHR const *>(this); }
+};
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+struct safe_VkExportSemaphoreWin32HandleInfoKHR {
+    VkStructureType sType;
+    const void* pNext;
+    const SECURITY_ATTRIBUTES* pAttributes;
+    DWORD dwAccess;
+    LPCWSTR name;
+    safe_VkExportSemaphoreWin32HandleInfoKHR(const VkExportSemaphoreWin32HandleInfoKHR* in_struct);
+    safe_VkExportSemaphoreWin32HandleInfoKHR(const safe_VkExportSemaphoreWin32HandleInfoKHR& src);
+    safe_VkExportSemaphoreWin32HandleInfoKHR& operator=(const safe_VkExportSemaphoreWin32HandleInfoKHR& src);
+    safe_VkExportSemaphoreWin32HandleInfoKHR();
+    ~safe_VkExportSemaphoreWin32HandleInfoKHR();
+    void initialize(const VkExportSemaphoreWin32HandleInfoKHR* in_struct);
+    void initialize(const safe_VkExportSemaphoreWin32HandleInfoKHR* src);
+    VkExportSemaphoreWin32HandleInfoKHR *ptr() { return reinterpret_cast<VkExportSemaphoreWin32HandleInfoKHR *>(this); }
+    VkExportSemaphoreWin32HandleInfoKHR const *ptr() const { return reinterpret_cast<VkExportSemaphoreWin32HandleInfoKHR const *>(this); }
+};
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+struct safe_VkD3D12FenceSubmitInfoKHR {
+    VkStructureType sType;
+    const void* pNext;
+    uint32_t waitSemaphoreValuesCount;
+    const uint64_t* pWaitSemaphoreValues;
+    uint32_t signalSemaphoreValuesCount;
+    const uint64_t* pSignalSemaphoreValues;
+    safe_VkD3D12FenceSubmitInfoKHR(const VkD3D12FenceSubmitInfoKHR* in_struct);
+    safe_VkD3D12FenceSubmitInfoKHR(const safe_VkD3D12FenceSubmitInfoKHR& src);
+    safe_VkD3D12FenceSubmitInfoKHR& operator=(const safe_VkD3D12FenceSubmitInfoKHR& src);
+    safe_VkD3D12FenceSubmitInfoKHR();
+    ~safe_VkD3D12FenceSubmitInfoKHR();
+    void initialize(const VkD3D12FenceSubmitInfoKHR* in_struct);
+    void initialize(const safe_VkD3D12FenceSubmitInfoKHR* src);
+    VkD3D12FenceSubmitInfoKHR *ptr() { return reinterpret_cast<VkD3D12FenceSubmitInfoKHR *>(this); }
+    VkD3D12FenceSubmitInfoKHR const *ptr() const { return reinterpret_cast<VkD3D12FenceSubmitInfoKHR const *>(this); }
+};
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+struct safe_VkSemaphoreGetWin32HandleInfoKHR {
+    VkStructureType sType;
+    const void* pNext;
+    VkSemaphore semaphore;
+    VkExternalSemaphoreHandleTypeFlagBits handleType;
+    safe_VkSemaphoreGetWin32HandleInfoKHR(const VkSemaphoreGetWin32HandleInfoKHR* in_struct);
+    safe_VkSemaphoreGetWin32HandleInfoKHR(const safe_VkSemaphoreGetWin32HandleInfoKHR& src);
+    safe_VkSemaphoreGetWin32HandleInfoKHR& operator=(const safe_VkSemaphoreGetWin32HandleInfoKHR& src);
+    safe_VkSemaphoreGetWin32HandleInfoKHR();
+    ~safe_VkSemaphoreGetWin32HandleInfoKHR();
+    void initialize(const VkSemaphoreGetWin32HandleInfoKHR* in_struct);
+    void initialize(const safe_VkSemaphoreGetWin32HandleInfoKHR* src);
+    VkSemaphoreGetWin32HandleInfoKHR *ptr() { return reinterpret_cast<VkSemaphoreGetWin32HandleInfoKHR *>(this); }
+    VkSemaphoreGetWin32HandleInfoKHR const *ptr() const { return reinterpret_cast<VkSemaphoreGetWin32HandleInfoKHR const *>(this); }
+};
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+struct safe_VkImportSemaphoreFdInfoKHR {
+    VkStructureType sType;
+    const void* pNext;
+    VkSemaphore semaphore;
+    VkSemaphoreImportFlags flags;
+    VkExternalSemaphoreHandleTypeFlagBits handleType;
+    int fd;
+    safe_VkImportSemaphoreFdInfoKHR(const VkImportSemaphoreFdInfoKHR* in_struct);
+    safe_VkImportSemaphoreFdInfoKHR(const safe_VkImportSemaphoreFdInfoKHR& src);
+    safe_VkImportSemaphoreFdInfoKHR& operator=(const safe_VkImportSemaphoreFdInfoKHR& src);
+    safe_VkImportSemaphoreFdInfoKHR();
+    ~safe_VkImportSemaphoreFdInfoKHR();
+    void initialize(const VkImportSemaphoreFdInfoKHR* in_struct);
+    void initialize(const safe_VkImportSemaphoreFdInfoKHR* src);
+    VkImportSemaphoreFdInfoKHR *ptr() { return reinterpret_cast<VkImportSemaphoreFdInfoKHR *>(this); }
+    VkImportSemaphoreFdInfoKHR const *ptr() const { return reinterpret_cast<VkImportSemaphoreFdInfoKHR const *>(this); }
+};
+
+struct safe_VkSemaphoreGetFdInfoKHR {
+    VkStructureType sType;
+    const void* pNext;
+    VkSemaphore semaphore;
+    VkExternalSemaphoreHandleTypeFlagBits handleType;
+    safe_VkSemaphoreGetFdInfoKHR(const VkSemaphoreGetFdInfoKHR* in_struct);
+    safe_VkSemaphoreGetFdInfoKHR(const safe_VkSemaphoreGetFdInfoKHR& src);
+    safe_VkSemaphoreGetFdInfoKHR& operator=(const safe_VkSemaphoreGetFdInfoKHR& src);
+    safe_VkSemaphoreGetFdInfoKHR();
+    ~safe_VkSemaphoreGetFdInfoKHR();
+    void initialize(const VkSemaphoreGetFdInfoKHR* in_struct);
+    void initialize(const safe_VkSemaphoreGetFdInfoKHR* src);
+    VkSemaphoreGetFdInfoKHR *ptr() { return reinterpret_cast<VkSemaphoreGetFdInfoKHR *>(this); }
+    VkSemaphoreGetFdInfoKHR const *ptr() const { return reinterpret_cast<VkSemaphoreGetFdInfoKHR const *>(this); }
+};
+
+struct safe_VkPhysicalDevicePushDescriptorPropertiesKHR {
+    VkStructureType sType;
+    void* pNext;
+    uint32_t maxPushDescriptors;
+    safe_VkPhysicalDevicePushDescriptorPropertiesKHR(const VkPhysicalDevicePushDescriptorPropertiesKHR* in_struct);
+    safe_VkPhysicalDevicePushDescriptorPropertiesKHR(const safe_VkPhysicalDevicePushDescriptorPropertiesKHR& src);
+    safe_VkPhysicalDevicePushDescriptorPropertiesKHR& operator=(const safe_VkPhysicalDevicePushDescriptorPropertiesKHR& src);
+    safe_VkPhysicalDevicePushDescriptorPropertiesKHR();
+    ~safe_VkPhysicalDevicePushDescriptorPropertiesKHR();
+    void initialize(const VkPhysicalDevicePushDescriptorPropertiesKHR* in_struct);
+    void initialize(const safe_VkPhysicalDevicePushDescriptorPropertiesKHR* src);
+    VkPhysicalDevicePushDescriptorPropertiesKHR *ptr() { return reinterpret_cast<VkPhysicalDevicePushDescriptorPropertiesKHR *>(this); }
+    VkPhysicalDevicePushDescriptorPropertiesKHR const *ptr() const { return reinterpret_cast<VkPhysicalDevicePushDescriptorPropertiesKHR const *>(this); }
+};
+
+struct safe_VkPhysicalDeviceShaderFloat16Int8FeaturesKHR {
+    VkStructureType sType;
+    void* pNext;
+    VkBool32 shaderFloat16;
+    VkBool32 shaderInt8;
+    safe_VkPhysicalDeviceShaderFloat16Int8FeaturesKHR(const VkPhysicalDeviceShaderFloat16Int8FeaturesKHR* in_struct);
+    safe_VkPhysicalDeviceShaderFloat16Int8FeaturesKHR(const safe_VkPhysicalDeviceShaderFloat16Int8FeaturesKHR& src);
+    safe_VkPhysicalDeviceShaderFloat16Int8FeaturesKHR& operator=(const safe_VkPhysicalDeviceShaderFloat16Int8FeaturesKHR& src);
+    safe_VkPhysicalDeviceShaderFloat16Int8FeaturesKHR();
+    ~safe_VkPhysicalDeviceShaderFloat16Int8FeaturesKHR();
+    void initialize(const VkPhysicalDeviceShaderFloat16Int8FeaturesKHR* in_struct);
+    void initialize(const safe_VkPhysicalDeviceShaderFloat16Int8FeaturesKHR* src);
+    VkPhysicalDeviceShaderFloat16Int8FeaturesKHR *ptr() { return reinterpret_cast<VkPhysicalDeviceShaderFloat16Int8FeaturesKHR *>(this); }
+    VkPhysicalDeviceShaderFloat16Int8FeaturesKHR const *ptr() const { return reinterpret_cast<VkPhysicalDeviceShaderFloat16Int8FeaturesKHR const *>(this); }
+};
+
+struct safe_VkPresentRegionKHR {
+    uint32_t rectangleCount;
+    const VkRectLayerKHR* pRectangles;
+    safe_VkPresentRegionKHR(const VkPresentRegionKHR* in_struct);
+    safe_VkPresentRegionKHR(const safe_VkPresentRegionKHR& src);
+    safe_VkPresentRegionKHR& operator=(const safe_VkPresentRegionKHR& src);
+    safe_VkPresentRegionKHR();
+    ~safe_VkPresentRegionKHR();
+    void initialize(const VkPresentRegionKHR* in_struct);
+    void initialize(const safe_VkPresentRegionKHR* src);
+    VkPresentRegionKHR *ptr() { return reinterpret_cast<VkPresentRegionKHR *>(this); }
+    VkPresentRegionKHR const *ptr() const { return reinterpret_cast<VkPresentRegionKHR const *>(this); }
+};
+
+struct safe_VkPresentRegionsKHR {
+    VkStructureType sType;
+    const void* pNext;
+    uint32_t swapchainCount;
+    safe_VkPresentRegionKHR* pRegions;
+    safe_VkPresentRegionsKHR(const VkPresentRegionsKHR* in_struct);
+    safe_VkPresentRegionsKHR(const safe_VkPresentRegionsKHR& src);
+    safe_VkPresentRegionsKHR& operator=(const safe_VkPresentRegionsKHR& src);
+    safe_VkPresentRegionsKHR();
+    ~safe_VkPresentRegionsKHR();
+    void initialize(const VkPresentRegionsKHR* in_struct);
+    void initialize(const safe_VkPresentRegionsKHR* src);
+    VkPresentRegionsKHR *ptr() { return reinterpret_cast<VkPresentRegionsKHR *>(this); }
+    VkPresentRegionsKHR const *ptr() const { return reinterpret_cast<VkPresentRegionsKHR const *>(this); }
+};
+
+struct safe_VkPhysicalDeviceImagelessFramebufferFeaturesKHR {
+    VkStructureType sType;
+    void* pNext;
+    VkBool32 imagelessFramebuffer;
+    safe_VkPhysicalDeviceImagelessFramebufferFeaturesKHR(const VkPhysicalDeviceImagelessFramebufferFeaturesKHR* in_struct);
+    safe_VkPhysicalDeviceImagelessFramebufferFeaturesKHR(const safe_VkPhysicalDeviceImagelessFramebufferFeaturesKHR& src);
+    safe_VkPhysicalDeviceImagelessFramebufferFeaturesKHR& operator=(const safe_VkPhysicalDeviceImagelessFramebufferFeaturesKHR& src);
+    safe_VkPhysicalDeviceImagelessFramebufferFeaturesKHR();
+    ~safe_VkPhysicalDeviceImagelessFramebufferFeaturesKHR();
+    void initialize(const VkPhysicalDeviceImagelessFramebufferFeaturesKHR* in_struct);
+    void initialize(const safe_VkPhysicalDeviceImagelessFramebufferFeaturesKHR* src);
+    VkPhysicalDeviceImagelessFramebufferFeaturesKHR *ptr() { return reinterpret_cast<VkPhysicalDeviceImagelessFramebufferFeaturesKHR *>(this); }
+    VkPhysicalDeviceImagelessFramebufferFeaturesKHR const *ptr() const { return reinterpret_cast<VkPhysicalDeviceImagelessFramebufferFeaturesKHR const *>(this); }
+};
+
+struct safe_VkFramebufferAttachmentImageInfoKHR {
+    VkStructureType sType;
+    const void* pNext;
+    VkImageCreateFlags flags;
+    VkImageUsageFlags usage;
+    uint32_t width;
+    uint32_t height;
+    uint32_t layerCount;
+    uint32_t viewFormatCount;
+    const VkFormat* pViewFormats;
+    safe_VkFramebufferAttachmentImageInfoKHR(const VkFramebufferAttachmentImageInfoKHR* in_struct);
+    safe_VkFramebufferAttachmentImageInfoKHR(const safe_VkFramebufferAttachmentImageInfoKHR& src);
+    safe_VkFramebufferAttachmentImageInfoKHR& operator=(const safe_VkFramebufferAttachmentImageInfoKHR& src);
+    safe_VkFramebufferAttachmentImageInfoKHR();
+    ~safe_VkFramebufferAttachmentImageInfoKHR();
+    void initialize(const VkFramebufferAttachmentImageInfoKHR* in_struct);
+    void initialize(const safe_VkFramebufferAttachmentImageInfoKHR* src);
+    VkFramebufferAttachmentImageInfoKHR *ptr() { return reinterpret_cast<VkFramebufferAttachmentImageInfoKHR *>(this); }
+    VkFramebufferAttachmentImageInfoKHR const *ptr() const { return reinterpret_cast<VkFramebufferAttachmentImageInfoKHR const *>(this); }
+};
+
+struct safe_VkFramebufferAttachmentsCreateInfoKHR {
+    VkStructureType sType;
+    const void* pNext;
+    uint32_t attachmentImageInfoCount;
+    safe_VkFramebufferAttachmentImageInfoKHR* pAttachmentImageInfos;
+    safe_VkFramebufferAttachmentsCreateInfoKHR(const VkFramebufferAttachmentsCreateInfoKHR* in_struct);
+    safe_VkFramebufferAttachmentsCreateInfoKHR(const safe_VkFramebufferAttachmentsCreateInfoKHR& src);
+    safe_VkFramebufferAttachmentsCreateInfoKHR& operator=(const safe_VkFramebufferAttachmentsCreateInfoKHR& src);
+    safe_VkFramebufferAttachmentsCreateInfoKHR();
+    ~safe_VkFramebufferAttachmentsCreateInfoKHR();
+    void initialize(const VkFramebufferAttachmentsCreateInfoKHR* in_struct);
+    void initialize(const safe_VkFramebufferAttachmentsCreateInfoKHR* src);
+    VkFramebufferAttachmentsCreateInfoKHR *ptr() { return reinterpret_cast<VkFramebufferAttachmentsCreateInfoKHR *>(this); }
+    VkFramebufferAttachmentsCreateInfoKHR const *ptr() const { return reinterpret_cast<VkFramebufferAttachmentsCreateInfoKHR const *>(this); }
+};
+
+struct safe_VkRenderPassAttachmentBeginInfoKHR {
+    VkStructureType sType;
+    const void* pNext;
+    uint32_t attachmentCount;
+    VkImageView* pAttachments;
+    safe_VkRenderPassAttachmentBeginInfoKHR(const VkRenderPassAttachmentBeginInfoKHR* in_struct);
+    safe_VkRenderPassAttachmentBeginInfoKHR(const safe_VkRenderPassAttachmentBeginInfoKHR& src);
+    safe_VkRenderPassAttachmentBeginInfoKHR& operator=(const safe_VkRenderPassAttachmentBeginInfoKHR& src);
+    safe_VkRenderPassAttachmentBeginInfoKHR();
+    ~safe_VkRenderPassAttachmentBeginInfoKHR();
+    void initialize(const VkRenderPassAttachmentBeginInfoKHR* in_struct);
+    void initialize(const safe_VkRenderPassAttachmentBeginInfoKHR* src);
+    VkRenderPassAttachmentBeginInfoKHR *ptr() { return reinterpret_cast<VkRenderPassAttachmentBeginInfoKHR *>(this); }
+    VkRenderPassAttachmentBeginInfoKHR const *ptr() const { return reinterpret_cast<VkRenderPassAttachmentBeginInfoKHR const *>(this); }
+};
+
+struct safe_VkAttachmentDescription2KHR {
+    VkStructureType sType;
+    const void* pNext;
+    VkAttachmentDescriptionFlags flags;
+    VkFormat format;
+    VkSampleCountFlagBits samples;
+    VkAttachmentLoadOp loadOp;
+    VkAttachmentStoreOp storeOp;
+    VkAttachmentLoadOp stencilLoadOp;
+    VkAttachmentStoreOp stencilStoreOp;
+    VkImageLayout initialLayout;
+    VkImageLayout finalLayout;
+    safe_VkAttachmentDescription2KHR(const VkAttachmentDescription2KHR* in_struct);
+    safe_VkAttachmentDescription2KHR(const safe_VkAttachmentDescription2KHR& src);
+    safe_VkAttachmentDescription2KHR& operator=(const safe_VkAttachmentDescription2KHR& src);
+    safe_VkAttachmentDescription2KHR();
+    ~safe_VkAttachmentDescription2KHR();
+    void initialize(const VkAttachmentDescription2KHR* in_struct);
+    void initialize(const safe_VkAttachmentDescription2KHR* src);
+    VkAttachmentDescription2KHR *ptr() { return reinterpret_cast<VkAttachmentDescription2KHR *>(this); }
+    VkAttachmentDescription2KHR const *ptr() const { return reinterpret_cast<VkAttachmentDescription2KHR const *>(this); }
+};
+
+struct safe_VkAttachmentReference2KHR {
+    VkStructureType sType;
+    const void* pNext;
+    uint32_t attachment;
+    VkImageLayout layout;
+    VkImageAspectFlags aspectMask;
+    safe_VkAttachmentReference2KHR(const VkAttachmentReference2KHR* in_struct);
+    safe_VkAttachmentReference2KHR(const safe_VkAttachmentReference2KHR& src);
+    safe_VkAttachmentReference2KHR& operator=(const safe_VkAttachmentReference2KHR& src);
+    safe_VkAttachmentReference2KHR();
+    ~safe_VkAttachmentReference2KHR();
+    void initialize(const VkAttachmentReference2KHR* in_struct);
+    void initialize(const safe_VkAttachmentReference2KHR* src);
+    VkAttachmentReference2KHR *ptr() { return reinterpret_cast<VkAttachmentReference2KHR *>(this); }
+    VkAttachmentReference2KHR const *ptr() const { return reinterpret_cast<VkAttachmentReference2KHR const *>(this); }
+};
+
+struct safe_VkSubpassDescription2KHR {
+    VkStructureType sType;
+    const void* pNext;
+    VkSubpassDescriptionFlags flags;
+    VkPipelineBindPoint pipelineBindPoint;
+    uint32_t viewMask;
+    uint32_t inputAttachmentCount;
+    safe_VkAttachmentReference2KHR* pInputAttachments;
+    uint32_t colorAttachmentCount;
+    safe_VkAttachmentReference2KHR* pColorAttachments;
+    safe_VkAttachmentReference2KHR* pResolveAttachments;
+    safe_VkAttachmentReference2KHR* pDepthStencilAttachment;
+    uint32_t preserveAttachmentCount;
+    const uint32_t* pPreserveAttachments;
+    safe_VkSubpassDescription2KHR(const VkSubpassDescription2KHR* in_struct);
+    safe_VkSubpassDescription2KHR(const safe_VkSubpassDescription2KHR& src);
+    safe_VkSubpassDescription2KHR& operator=(const safe_VkSubpassDescription2KHR& src);
+    safe_VkSubpassDescription2KHR();
+    ~safe_VkSubpassDescription2KHR();
+    void initialize(const VkSubpassDescription2KHR* in_struct);
+    void initialize(const safe_VkSubpassDescription2KHR* src);
+    VkSubpassDescription2KHR *ptr() { return reinterpret_cast<VkSubpassDescription2KHR *>(this); }
+    VkSubpassDescription2KHR const *ptr() const { return reinterpret_cast<VkSubpassDescription2KHR const *>(this); }
+};
+
+struct safe_VkSubpassDependency2KHR {
+    VkStructureType sType;
+    const void* pNext;
+    uint32_t srcSubpass;
+    uint32_t dstSubpass;
+    VkPipelineStageFlags srcStageMask;
+    VkPipelineStageFlags dstStageMask;
+    VkAccessFlags srcAccessMask;
+    VkAccessFlags dstAccessMask;
+    VkDependencyFlags dependencyFlags;
+    int32_t viewOffset;
+    safe_VkSubpassDependency2KHR(const VkSubpassDependency2KHR* in_struct);
+    safe_VkSubpassDependency2KHR(const safe_VkSubpassDependency2KHR& src);
+    safe_VkSubpassDependency2KHR& operator=(const safe_VkSubpassDependency2KHR& src);
+    safe_VkSubpassDependency2KHR();
+    ~safe_VkSubpassDependency2KHR();
+    void initialize(const VkSubpassDependency2KHR* in_struct);
+    void initialize(const safe_VkSubpassDependency2KHR* src);
+    VkSubpassDependency2KHR *ptr() { return reinterpret_cast<VkSubpassDependency2KHR *>(this); }
+    VkSubpassDependency2KHR const *ptr() const { return reinterpret_cast<VkSubpassDependency2KHR const *>(this); }
+};
+
+struct safe_VkRenderPassCreateInfo2KHR {
+    VkStructureType sType;
+    const void* pNext;
+    VkRenderPassCreateFlags flags;
+    uint32_t attachmentCount;
+    safe_VkAttachmentDescription2KHR* pAttachments;
+    uint32_t subpassCount;
+    safe_VkSubpassDescription2KHR* pSubpasses;
+    uint32_t dependencyCount;
+    safe_VkSubpassDependency2KHR* pDependencies;
+    uint32_t correlatedViewMaskCount;
+    const uint32_t* pCorrelatedViewMasks;
+    safe_VkRenderPassCreateInfo2KHR(const VkRenderPassCreateInfo2KHR* in_struct);
+    safe_VkRenderPassCreateInfo2KHR(const safe_VkRenderPassCreateInfo2KHR& src);
+    safe_VkRenderPassCreateInfo2KHR& operator=(const safe_VkRenderPassCreateInfo2KHR& src);
+    safe_VkRenderPassCreateInfo2KHR();
+    ~safe_VkRenderPassCreateInfo2KHR();
+    void initialize(const VkRenderPassCreateInfo2KHR* in_struct);
+    void initialize(const safe_VkRenderPassCreateInfo2KHR* src);
+    VkRenderPassCreateInfo2KHR *ptr() { return reinterpret_cast<VkRenderPassCreateInfo2KHR *>(this); }
+    VkRenderPassCreateInfo2KHR const *ptr() const { return reinterpret_cast<VkRenderPassCreateInfo2KHR const *>(this); }
+};
+
+struct safe_VkSubpassBeginInfoKHR {
+    VkStructureType sType;
+    const void* pNext;
+    VkSubpassContents contents;
+    safe_VkSubpassBeginInfoKHR(const VkSubpassBeginInfoKHR* in_struct);
+    safe_VkSubpassBeginInfoKHR(const safe_VkSubpassBeginInfoKHR& src);
+    safe_VkSubpassBeginInfoKHR& operator=(const safe_VkSubpassBeginInfoKHR& src);
+    safe_VkSubpassBeginInfoKHR();
+    ~safe_VkSubpassBeginInfoKHR();
+    void initialize(const VkSubpassBeginInfoKHR* in_struct);
+    void initialize(const safe_VkSubpassBeginInfoKHR* src);
+    VkSubpassBeginInfoKHR *ptr() { return reinterpret_cast<VkSubpassBeginInfoKHR *>(this); }
+    VkSubpassBeginInfoKHR const *ptr() const { return reinterpret_cast<VkSubpassBeginInfoKHR const *>(this); }
+};
+
+struct safe_VkSubpassEndInfoKHR {
+    VkStructureType sType;
+    const void* pNext;
+    safe_VkSubpassEndInfoKHR(const VkSubpassEndInfoKHR* in_struct);
+    safe_VkSubpassEndInfoKHR(const safe_VkSubpassEndInfoKHR& src);
+    safe_VkSubpassEndInfoKHR& operator=(const safe_VkSubpassEndInfoKHR& src);
+    safe_VkSubpassEndInfoKHR();
+    ~safe_VkSubpassEndInfoKHR();
+    void initialize(const VkSubpassEndInfoKHR* in_struct);
+    void initialize(const safe_VkSubpassEndInfoKHR* src);
+    VkSubpassEndInfoKHR *ptr() { return reinterpret_cast<VkSubpassEndInfoKHR *>(this); }
+    VkSubpassEndInfoKHR const *ptr() const { return reinterpret_cast<VkSubpassEndInfoKHR const *>(this); }
+};
+
+struct safe_VkSharedPresentSurfaceCapabilitiesKHR {
+    VkStructureType sType;
+    void* pNext;
+    VkImageUsageFlags sharedPresentSupportedUsageFlags;
+    safe_VkSharedPresentSurfaceCapabilitiesKHR(const VkSharedPresentSurfaceCapabilitiesKHR* in_struct);
+    safe_VkSharedPresentSurfaceCapabilitiesKHR(const safe_VkSharedPresentSurfaceCapabilitiesKHR& src);
+    safe_VkSharedPresentSurfaceCapabilitiesKHR& operator=(const safe_VkSharedPresentSurfaceCapabilitiesKHR& src);
+    safe_VkSharedPresentSurfaceCapabilitiesKHR();
+    ~safe_VkSharedPresentSurfaceCapabilitiesKHR();
+    void initialize(const VkSharedPresentSurfaceCapabilitiesKHR* in_struct);
+    void initialize(const safe_VkSharedPresentSurfaceCapabilitiesKHR* src);
+    VkSharedPresentSurfaceCapabilitiesKHR *ptr() { return reinterpret_cast<VkSharedPresentSurfaceCapabilitiesKHR *>(this); }
+    VkSharedPresentSurfaceCapabilitiesKHR const *ptr() const { return reinterpret_cast<VkSharedPresentSurfaceCapabilitiesKHR const *>(this); }
+};
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+struct safe_VkImportFenceWin32HandleInfoKHR {
+    VkStructureType sType;
+    const void* pNext;
+    VkFence fence;
+    VkFenceImportFlags flags;
+    VkExternalFenceHandleTypeFlagBits handleType;
+    HANDLE handle;
+    LPCWSTR name;
+    safe_VkImportFenceWin32HandleInfoKHR(const VkImportFenceWin32HandleInfoKHR* in_struct);
+    safe_VkImportFenceWin32HandleInfoKHR(const safe_VkImportFenceWin32HandleInfoKHR& src);
+    safe_VkImportFenceWin32HandleInfoKHR& operator=(const safe_VkImportFenceWin32HandleInfoKHR& src);
+    safe_VkImportFenceWin32HandleInfoKHR();
+    ~safe_VkImportFenceWin32HandleInfoKHR();
+    void initialize(const VkImportFenceWin32HandleInfoKHR* in_struct);
+    void initialize(const safe_VkImportFenceWin32HandleInfoKHR* src);
+    VkImportFenceWin32HandleInfoKHR *ptr() { return reinterpret_cast<VkImportFenceWin32HandleInfoKHR *>(this); }
+    VkImportFenceWin32HandleInfoKHR const *ptr() const { return reinterpret_cast<VkImportFenceWin32HandleInfoKHR const *>(this); }
+};
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+struct safe_VkExportFenceWin32HandleInfoKHR {
+    VkStructureType sType;
+    const void* pNext;
+    const SECURITY_ATTRIBUTES* pAttributes;
+    DWORD dwAccess;
+    LPCWSTR name;
+    safe_VkExportFenceWin32HandleInfoKHR(const VkExportFenceWin32HandleInfoKHR* in_struct);
+    safe_VkExportFenceWin32HandleInfoKHR(const safe_VkExportFenceWin32HandleInfoKHR& src);
+    safe_VkExportFenceWin32HandleInfoKHR& operator=(const safe_VkExportFenceWin32HandleInfoKHR& src);
+    safe_VkExportFenceWin32HandleInfoKHR();
+    ~safe_VkExportFenceWin32HandleInfoKHR();
+    void initialize(const VkExportFenceWin32HandleInfoKHR* in_struct);
+    void initialize(const safe_VkExportFenceWin32HandleInfoKHR* src);
+    VkExportFenceWin32HandleInfoKHR *ptr() { return reinterpret_cast<VkExportFenceWin32HandleInfoKHR *>(this); }
+    VkExportFenceWin32HandleInfoKHR const *ptr() const { return reinterpret_cast<VkExportFenceWin32HandleInfoKHR const *>(this); }
+};
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+struct safe_VkFenceGetWin32HandleInfoKHR {
+    VkStructureType sType;
+    const void* pNext;
+    VkFence fence;
+    VkExternalFenceHandleTypeFlagBits handleType;
+    safe_VkFenceGetWin32HandleInfoKHR(const VkFenceGetWin32HandleInfoKHR* in_struct);
+    safe_VkFenceGetWin32HandleInfoKHR(const safe_VkFenceGetWin32HandleInfoKHR& src);
+    safe_VkFenceGetWin32HandleInfoKHR& operator=(const safe_VkFenceGetWin32HandleInfoKHR& src);
+    safe_VkFenceGetWin32HandleInfoKHR();
+    ~safe_VkFenceGetWin32HandleInfoKHR();
+    void initialize(const VkFenceGetWin32HandleInfoKHR* in_struct);
+    void initialize(const safe_VkFenceGetWin32HandleInfoKHR* src);
+    VkFenceGetWin32HandleInfoKHR *ptr() { return reinterpret_cast<VkFenceGetWin32HandleInfoKHR *>(this); }
+    VkFenceGetWin32HandleInfoKHR const *ptr() const { return reinterpret_cast<VkFenceGetWin32HandleInfoKHR const *>(this); }
+};
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+struct safe_VkImportFenceFdInfoKHR {
+    VkStructureType sType;
+    const void* pNext;
+    VkFence fence;
+    VkFenceImportFlags flags;
+    VkExternalFenceHandleTypeFlagBits handleType;
+    int fd;
+    safe_VkImportFenceFdInfoKHR(const VkImportFenceFdInfoKHR* in_struct);
+    safe_VkImportFenceFdInfoKHR(const safe_VkImportFenceFdInfoKHR& src);
+    safe_VkImportFenceFdInfoKHR& operator=(const safe_VkImportFenceFdInfoKHR& src);
+    safe_VkImportFenceFdInfoKHR();
+    ~safe_VkImportFenceFdInfoKHR();
+    void initialize(const VkImportFenceFdInfoKHR* in_struct);
+    void initialize(const safe_VkImportFenceFdInfoKHR* src);
+    VkImportFenceFdInfoKHR *ptr() { return reinterpret_cast<VkImportFenceFdInfoKHR *>(this); }
+    VkImportFenceFdInfoKHR const *ptr() const { return reinterpret_cast<VkImportFenceFdInfoKHR const *>(this); }
+};
+
+struct safe_VkFenceGetFdInfoKHR {
+    VkStructureType sType;
+    const void* pNext;
+    VkFence fence;
+    VkExternalFenceHandleTypeFlagBits handleType;
+    safe_VkFenceGetFdInfoKHR(const VkFenceGetFdInfoKHR* in_struct);
+    safe_VkFenceGetFdInfoKHR(const safe_VkFenceGetFdInfoKHR& src);
+    safe_VkFenceGetFdInfoKHR& operator=(const safe_VkFenceGetFdInfoKHR& src);
+    safe_VkFenceGetFdInfoKHR();
+    ~safe_VkFenceGetFdInfoKHR();
+    void initialize(const VkFenceGetFdInfoKHR* in_struct);
+    void initialize(const safe_VkFenceGetFdInfoKHR* src);
+    VkFenceGetFdInfoKHR *ptr() { return reinterpret_cast<VkFenceGetFdInfoKHR *>(this); }
+    VkFenceGetFdInfoKHR const *ptr() const { return reinterpret_cast<VkFenceGetFdInfoKHR const *>(this); }
+};
+
+struct safe_VkPhysicalDevicePerformanceQueryFeaturesKHR {
+    VkStructureType sType;
+    void* pNext;
+    VkBool32 performanceCounterQueryPools;
+    VkBool32 performanceCounterMultipleQueryPools;
+    safe_VkPhysicalDevicePerformanceQueryFeaturesKHR(const VkPhysicalDevicePerformanceQueryFeaturesKHR* in_struct);
+    safe_VkPhysicalDevicePerformanceQueryFeaturesKHR(const safe_VkPhysicalDevicePerformanceQueryFeaturesKHR& src);
+    safe_VkPhysicalDevicePerformanceQueryFeaturesKHR& operator=(const safe_VkPhysicalDevicePerformanceQueryFeaturesKHR& src);
+    safe_VkPhysicalDevicePerformanceQueryFeaturesKHR();
+    ~safe_VkPhysicalDevicePerformanceQueryFeaturesKHR();
+    void initialize(const VkPhysicalDevicePerformanceQueryFeaturesKHR* in_struct);
+    void initialize(const safe_VkPhysicalDevicePerformanceQueryFeaturesKHR* src);
+    VkPhysicalDevicePerformanceQueryFeaturesKHR *ptr() { return reinterpret_cast<VkPhysicalDevicePerformanceQueryFeaturesKHR *>(this); }
+    VkPhysicalDevicePerformanceQueryFeaturesKHR const *ptr() const { return reinterpret_cast<VkPhysicalDevicePerformanceQueryFeaturesKHR const *>(this); }
+};
+
+struct safe_VkPhysicalDevicePerformanceQueryPropertiesKHR {
+    VkStructureType sType;
+    void* pNext;
+    VkBool32 allowCommandBufferQueryCopies;
+    safe_VkPhysicalDevicePerformanceQueryPropertiesKHR(const VkPhysicalDevicePerformanceQueryPropertiesKHR* in_struct);
+    safe_VkPhysicalDevicePerformanceQueryPropertiesKHR(const safe_VkPhysicalDevicePerformanceQueryPropertiesKHR& src);
+    safe_VkPhysicalDevicePerformanceQueryPropertiesKHR& operator=(const safe_VkPhysicalDevicePerformanceQueryPropertiesKHR& src);
+    safe_VkPhysicalDevicePerformanceQueryPropertiesKHR();
+    ~safe_VkPhysicalDevicePerformanceQueryPropertiesKHR();
+    void initialize(const VkPhysicalDevicePerformanceQueryPropertiesKHR* in_struct);
+    void initialize(const safe_VkPhysicalDevicePerformanceQueryPropertiesKHR* src);
+    VkPhysicalDevicePerformanceQueryPropertiesKHR *ptr() { return reinterpret_cast<VkPhysicalDevicePerformanceQueryPropertiesKHR *>(this); }
+    VkPhysicalDevicePerformanceQueryPropertiesKHR const *ptr() const { return reinterpret_cast<VkPhysicalDevicePerformanceQueryPropertiesKHR const *>(this); }
+};
+
+struct safe_VkPerformanceCounterKHR {
+    VkStructureType sType;
+    const void* pNext;
+    VkPerformanceCounterUnitKHR unit;
+    VkPerformanceCounterScopeKHR scope;
+    VkPerformanceCounterStorageKHR storage;
+    uint8_t uuid[VK_UUID_SIZE];
+    safe_VkPerformanceCounterKHR(const VkPerformanceCounterKHR* in_struct);
+    safe_VkPerformanceCounterKHR(const safe_VkPerformanceCounterKHR& src);
+    safe_VkPerformanceCounterKHR& operator=(const safe_VkPerformanceCounterKHR& src);
+    safe_VkPerformanceCounterKHR();
+    ~safe_VkPerformanceCounterKHR();
+    void initialize(const VkPerformanceCounterKHR* in_struct);
+    void initialize(const safe_VkPerformanceCounterKHR* src);
+    VkPerformanceCounterKHR *ptr() { return reinterpret_cast<VkPerformanceCounterKHR *>(this); }
+    VkPerformanceCounterKHR const *ptr() const { return reinterpret_cast<VkPerformanceCounterKHR const *>(this); }
+};
+
+struct safe_VkPerformanceCounterDescriptionKHR {
+    VkStructureType sType;
+    const void* pNext;
+    VkPerformanceCounterDescriptionFlagsKHR flags;
+    char name[VK_MAX_DESCRIPTION_SIZE];
+    char category[VK_MAX_DESCRIPTION_SIZE];
+    char description[VK_MAX_DESCRIPTION_SIZE];
+    safe_VkPerformanceCounterDescriptionKHR(const VkPerformanceCounterDescriptionKHR* in_struct);
+    safe_VkPerformanceCounterDescriptionKHR(const safe_VkPerformanceCounterDescriptionKHR& src);
+    safe_VkPerformanceCounterDescriptionKHR& operator=(const safe_VkPerformanceCounterDescriptionKHR& src);
+    safe_VkPerformanceCounterDescriptionKHR();
+    ~safe_VkPerformanceCounterDescriptionKHR();
+    void initialize(const VkPerformanceCounterDescriptionKHR* in_struct);
+    void initialize(const safe_VkPerformanceCounterDescriptionKHR* src);
+    VkPerformanceCounterDescriptionKHR *ptr() { return reinterpret_cast<VkPerformanceCounterDescriptionKHR *>(this); }
+    VkPerformanceCounterDescriptionKHR const *ptr() const { return reinterpret_cast<VkPerformanceCounterDescriptionKHR const *>(this); }
+};
+
+struct safe_VkQueryPoolPerformanceCreateInfoKHR {
+    VkStructureType sType;
+    const void* pNext;
+    uint32_t queueFamilyIndex;
+    uint32_t counterIndexCount;
+    const uint32_t* pCounterIndices;
+    safe_VkQueryPoolPerformanceCreateInfoKHR(const VkQueryPoolPerformanceCreateInfoKHR* in_struct);
+    safe_VkQueryPoolPerformanceCreateInfoKHR(const safe_VkQueryPoolPerformanceCreateInfoKHR& src);
+    safe_VkQueryPoolPerformanceCreateInfoKHR& operator=(const safe_VkQueryPoolPerformanceCreateInfoKHR& src);
+    safe_VkQueryPoolPerformanceCreateInfoKHR();
+    ~safe_VkQueryPoolPerformanceCreateInfoKHR();
+    void initialize(const VkQueryPoolPerformanceCreateInfoKHR* in_struct);
+    void initialize(const safe_VkQueryPoolPerformanceCreateInfoKHR* src);
+    VkQueryPoolPerformanceCreateInfoKHR *ptr() { return reinterpret_cast<VkQueryPoolPerformanceCreateInfoKHR *>(this); }
+    VkQueryPoolPerformanceCreateInfoKHR const *ptr() const { return reinterpret_cast<VkQueryPoolPerformanceCreateInfoKHR const *>(this); }
+};
+
+struct safe_VkAcquireProfilingLockInfoKHR {
+    VkStructureType sType;
+    const void* pNext;
+    VkAcquireProfilingLockFlagsKHR flags;
+    uint64_t timeout;
+    safe_VkAcquireProfilingLockInfoKHR(const VkAcquireProfilingLockInfoKHR* in_struct);
+    safe_VkAcquireProfilingLockInfoKHR(const safe_VkAcquireProfilingLockInfoKHR& src);
+    safe_VkAcquireProfilingLockInfoKHR& operator=(const safe_VkAcquireProfilingLockInfoKHR& src);
+    safe_VkAcquireProfilingLockInfoKHR();
+    ~safe_VkAcquireProfilingLockInfoKHR();
+    void initialize(const VkAcquireProfilingLockInfoKHR* in_struct);
+    void initialize(const safe_VkAcquireProfilingLockInfoKHR* src);
+    VkAcquireProfilingLockInfoKHR *ptr() { return reinterpret_cast<VkAcquireProfilingLockInfoKHR *>(this); }
+    VkAcquireProfilingLockInfoKHR const *ptr() const { return reinterpret_cast<VkAcquireProfilingLockInfoKHR const *>(this); }
+};
+
+struct safe_VkPerformanceQuerySubmitInfoKHR {
+    VkStructureType sType;
+    const void* pNext;
+    uint32_t counterPassIndex;
+    safe_VkPerformanceQuerySubmitInfoKHR(const VkPerformanceQuerySubmitInfoKHR* in_struct);
+    safe_VkPerformanceQuerySubmitInfoKHR(const safe_VkPerformanceQuerySubmitInfoKHR& src);
+    safe_VkPerformanceQuerySubmitInfoKHR& operator=(const safe_VkPerformanceQuerySubmitInfoKHR& src);
+    safe_VkPerformanceQuerySubmitInfoKHR();
+    ~safe_VkPerformanceQuerySubmitInfoKHR();
+    void initialize(const VkPerformanceQuerySubmitInfoKHR* in_struct);
+    void initialize(const safe_VkPerformanceQuerySubmitInfoKHR* src);
+    VkPerformanceQuerySubmitInfoKHR *ptr() { return reinterpret_cast<VkPerformanceQuerySubmitInfoKHR *>(this); }
+    VkPerformanceQuerySubmitInfoKHR const *ptr() const { return reinterpret_cast<VkPerformanceQuerySubmitInfoKHR const *>(this); }
+};
+
+struct safe_VkPhysicalDeviceSurfaceInfo2KHR {
+    VkStructureType sType;
+    const void* pNext;
+    VkSurfaceKHR surface;
+    safe_VkPhysicalDeviceSurfaceInfo2KHR(const VkPhysicalDeviceSurfaceInfo2KHR* in_struct);
+    safe_VkPhysicalDeviceSurfaceInfo2KHR(const safe_VkPhysicalDeviceSurfaceInfo2KHR& src);
+    safe_VkPhysicalDeviceSurfaceInfo2KHR& operator=(const safe_VkPhysicalDeviceSurfaceInfo2KHR& src);
+    safe_VkPhysicalDeviceSurfaceInfo2KHR();
+    ~safe_VkPhysicalDeviceSurfaceInfo2KHR();
+    void initialize(const VkPhysicalDeviceSurfaceInfo2KHR* in_struct);
+    void initialize(const safe_VkPhysicalDeviceSurfaceInfo2KHR* src);
+    VkPhysicalDeviceSurfaceInfo2KHR *ptr() { return reinterpret_cast<VkPhysicalDeviceSurfaceInfo2KHR *>(this); }
+    VkPhysicalDeviceSurfaceInfo2KHR const *ptr() const { return reinterpret_cast<VkPhysicalDeviceSurfaceInfo2KHR const *>(this); }
+};
+
+struct safe_VkSurfaceCapabilities2KHR {
+    VkStructureType sType;
+    void* pNext;
+    VkSurfaceCapabilitiesKHR surfaceCapabilities;
+    safe_VkSurfaceCapabilities2KHR(const VkSurfaceCapabilities2KHR* in_struct);
+    safe_VkSurfaceCapabilities2KHR(const safe_VkSurfaceCapabilities2KHR& src);
+    safe_VkSurfaceCapabilities2KHR& operator=(const safe_VkSurfaceCapabilities2KHR& src);
+    safe_VkSurfaceCapabilities2KHR();
+    ~safe_VkSurfaceCapabilities2KHR();
+    void initialize(const VkSurfaceCapabilities2KHR* in_struct);
+    void initialize(const safe_VkSurfaceCapabilities2KHR* src);
+    VkSurfaceCapabilities2KHR *ptr() { return reinterpret_cast<VkSurfaceCapabilities2KHR *>(this); }
+    VkSurfaceCapabilities2KHR const *ptr() const { return reinterpret_cast<VkSurfaceCapabilities2KHR const *>(this); }
+};
+
+struct safe_VkSurfaceFormat2KHR {
+    VkStructureType sType;
+    void* pNext;
+    VkSurfaceFormatKHR surfaceFormat;
+    safe_VkSurfaceFormat2KHR(const VkSurfaceFormat2KHR* in_struct);
+    safe_VkSurfaceFormat2KHR(const safe_VkSurfaceFormat2KHR& src);
+    safe_VkSurfaceFormat2KHR& operator=(const safe_VkSurfaceFormat2KHR& src);
+    safe_VkSurfaceFormat2KHR();
+    ~safe_VkSurfaceFormat2KHR();
+    void initialize(const VkSurfaceFormat2KHR* in_struct);
+    void initialize(const safe_VkSurfaceFormat2KHR* src);
+    VkSurfaceFormat2KHR *ptr() { return reinterpret_cast<VkSurfaceFormat2KHR *>(this); }
+    VkSurfaceFormat2KHR const *ptr() const { return reinterpret_cast<VkSurfaceFormat2KHR const *>(this); }
+};
+
+struct safe_VkDisplayProperties2KHR {
+    VkStructureType sType;
+    void* pNext;
+    safe_VkDisplayPropertiesKHR displayProperties;
+    safe_VkDisplayProperties2KHR(const VkDisplayProperties2KHR* in_struct);
+    safe_VkDisplayProperties2KHR(const safe_VkDisplayProperties2KHR& src);
+    safe_VkDisplayProperties2KHR& operator=(const safe_VkDisplayProperties2KHR& src);
+    safe_VkDisplayProperties2KHR();
+    ~safe_VkDisplayProperties2KHR();
+    void initialize(const VkDisplayProperties2KHR* in_struct);
+    void initialize(const safe_VkDisplayProperties2KHR* src);
+    VkDisplayProperties2KHR *ptr() { return reinterpret_cast<VkDisplayProperties2KHR *>(this); }
+    VkDisplayProperties2KHR const *ptr() const { return reinterpret_cast<VkDisplayProperties2KHR const *>(this); }
+};
+
+struct safe_VkDisplayPlaneProperties2KHR {
+    VkStructureType sType;
+    void* pNext;
+    VkDisplayPlanePropertiesKHR displayPlaneProperties;
+    safe_VkDisplayPlaneProperties2KHR(const VkDisplayPlaneProperties2KHR* in_struct);
+    safe_VkDisplayPlaneProperties2KHR(const safe_VkDisplayPlaneProperties2KHR& src);
+    safe_VkDisplayPlaneProperties2KHR& operator=(const safe_VkDisplayPlaneProperties2KHR& src);
+    safe_VkDisplayPlaneProperties2KHR();
+    ~safe_VkDisplayPlaneProperties2KHR();
+    void initialize(const VkDisplayPlaneProperties2KHR* in_struct);
+    void initialize(const safe_VkDisplayPlaneProperties2KHR* src);
+    VkDisplayPlaneProperties2KHR *ptr() { return reinterpret_cast<VkDisplayPlaneProperties2KHR *>(this); }
+    VkDisplayPlaneProperties2KHR const *ptr() const { return reinterpret_cast<VkDisplayPlaneProperties2KHR const *>(this); }
+};
+
+struct safe_VkDisplayModeProperties2KHR {
+    VkStructureType sType;
+    void* pNext;
+    VkDisplayModePropertiesKHR displayModeProperties;
+    safe_VkDisplayModeProperties2KHR(const VkDisplayModeProperties2KHR* in_struct);
+    safe_VkDisplayModeProperties2KHR(const safe_VkDisplayModeProperties2KHR& src);
+    safe_VkDisplayModeProperties2KHR& operator=(const safe_VkDisplayModeProperties2KHR& src);
+    safe_VkDisplayModeProperties2KHR();
+    ~safe_VkDisplayModeProperties2KHR();
+    void initialize(const VkDisplayModeProperties2KHR* in_struct);
+    void initialize(const safe_VkDisplayModeProperties2KHR* src);
+    VkDisplayModeProperties2KHR *ptr() { return reinterpret_cast<VkDisplayModeProperties2KHR *>(this); }
+    VkDisplayModeProperties2KHR const *ptr() const { return reinterpret_cast<VkDisplayModeProperties2KHR const *>(this); }
+};
+
+struct safe_VkDisplayPlaneInfo2KHR {
+    VkStructureType sType;
+    const void* pNext;
+    VkDisplayModeKHR mode;
+    uint32_t planeIndex;
+    safe_VkDisplayPlaneInfo2KHR(const VkDisplayPlaneInfo2KHR* in_struct);
+    safe_VkDisplayPlaneInfo2KHR(const safe_VkDisplayPlaneInfo2KHR& src);
+    safe_VkDisplayPlaneInfo2KHR& operator=(const safe_VkDisplayPlaneInfo2KHR& src);
+    safe_VkDisplayPlaneInfo2KHR();
+    ~safe_VkDisplayPlaneInfo2KHR();
+    void initialize(const VkDisplayPlaneInfo2KHR* in_struct);
+    void initialize(const safe_VkDisplayPlaneInfo2KHR* src);
+    VkDisplayPlaneInfo2KHR *ptr() { return reinterpret_cast<VkDisplayPlaneInfo2KHR *>(this); }
+    VkDisplayPlaneInfo2KHR const *ptr() const { return reinterpret_cast<VkDisplayPlaneInfo2KHR const *>(this); }
+};
+
+struct safe_VkDisplayPlaneCapabilities2KHR {
+    VkStructureType sType;
+    void* pNext;
+    VkDisplayPlaneCapabilitiesKHR capabilities;
+    safe_VkDisplayPlaneCapabilities2KHR(const VkDisplayPlaneCapabilities2KHR* in_struct);
+    safe_VkDisplayPlaneCapabilities2KHR(const safe_VkDisplayPlaneCapabilities2KHR& src);
+    safe_VkDisplayPlaneCapabilities2KHR& operator=(const safe_VkDisplayPlaneCapabilities2KHR& src);
+    safe_VkDisplayPlaneCapabilities2KHR();
+    ~safe_VkDisplayPlaneCapabilities2KHR();
+    void initialize(const VkDisplayPlaneCapabilities2KHR* in_struct);
+    void initialize(const safe_VkDisplayPlaneCapabilities2KHR* src);
+    VkDisplayPlaneCapabilities2KHR *ptr() { return reinterpret_cast<VkDisplayPlaneCapabilities2KHR *>(this); }
+    VkDisplayPlaneCapabilities2KHR const *ptr() const { return reinterpret_cast<VkDisplayPlaneCapabilities2KHR const *>(this); }
+};
+
+struct safe_VkImageFormatListCreateInfoKHR {
+    VkStructureType sType;
+    const void* pNext;
+    uint32_t viewFormatCount;
+    const VkFormat* pViewFormats;
+    safe_VkImageFormatListCreateInfoKHR(const VkImageFormatListCreateInfoKHR* in_struct);
+    safe_VkImageFormatListCreateInfoKHR(const safe_VkImageFormatListCreateInfoKHR& src);
+    safe_VkImageFormatListCreateInfoKHR& operator=(const safe_VkImageFormatListCreateInfoKHR& src);
+    safe_VkImageFormatListCreateInfoKHR();
+    ~safe_VkImageFormatListCreateInfoKHR();
+    void initialize(const VkImageFormatListCreateInfoKHR* in_struct);
+    void initialize(const safe_VkImageFormatListCreateInfoKHR* src);
+    VkImageFormatListCreateInfoKHR *ptr() { return reinterpret_cast<VkImageFormatListCreateInfoKHR *>(this); }
+    VkImageFormatListCreateInfoKHR const *ptr() const { return reinterpret_cast<VkImageFormatListCreateInfoKHR const *>(this); }
+};
+
+struct safe_VkPhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR {
+    VkStructureType sType;
+    void* pNext;
+    VkBool32 shaderSubgroupExtendedTypes;
+    safe_VkPhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR(const VkPhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR* in_struct);
+    safe_VkPhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR(const safe_VkPhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR& src);
+    safe_VkPhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR& operator=(const safe_VkPhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR& src);
+    safe_VkPhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR();
+    ~safe_VkPhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR();
+    void initialize(const VkPhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR* in_struct);
+    void initialize(const safe_VkPhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR* src);
+    VkPhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR *ptr() { return reinterpret_cast<VkPhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR *>(this); }
+    VkPhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR const *ptr() const { return reinterpret_cast<VkPhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR const *>(this); }
+};
+
+struct safe_VkPhysicalDevice8BitStorageFeaturesKHR {
+    VkStructureType sType;
+    void* pNext;
+    VkBool32 storageBuffer8BitAccess;
+    VkBool32 uniformAndStorageBuffer8BitAccess;
+    VkBool32 storagePushConstant8;
+    safe_VkPhysicalDevice8BitStorageFeaturesKHR(const VkPhysicalDevice8BitStorageFeaturesKHR* in_struct);
+    safe_VkPhysicalDevice8BitStorageFeaturesKHR(const safe_VkPhysicalDevice8BitStorageFeaturesKHR& src);
+    safe_VkPhysicalDevice8BitStorageFeaturesKHR& operator=(const safe_VkPhysicalDevice8BitStorageFeaturesKHR& src);
+    safe_VkPhysicalDevice8BitStorageFeaturesKHR();
+    ~safe_VkPhysicalDevice8BitStorageFeaturesKHR();
+    void initialize(const VkPhysicalDevice8BitStorageFeaturesKHR* in_struct);
+    void initialize(const safe_VkPhysicalDevice8BitStorageFeaturesKHR* src);
+    VkPhysicalDevice8BitStorageFeaturesKHR *ptr() { return reinterpret_cast<VkPhysicalDevice8BitStorageFeaturesKHR *>(this); }
+    VkPhysicalDevice8BitStorageFeaturesKHR const *ptr() const { return reinterpret_cast<VkPhysicalDevice8BitStorageFeaturesKHR const *>(this); }
+};
+
+struct safe_VkPhysicalDeviceShaderAtomicInt64FeaturesKHR {
+    VkStructureType sType;
+    void* pNext;
+    VkBool32 shaderBufferInt64Atomics;
+    VkBool32 shaderSharedInt64Atomics;
+    safe_VkPhysicalDeviceShaderAtomicInt64FeaturesKHR(const VkPhysicalDeviceShaderAtomicInt64FeaturesKHR* in_struct);
+    safe_VkPhysicalDeviceShaderAtomicInt64FeaturesKHR(const safe_VkPhysicalDeviceShaderAtomicInt64FeaturesKHR& src);
+    safe_VkPhysicalDeviceShaderAtomicInt64FeaturesKHR& operator=(const safe_VkPhysicalDeviceShaderAtomicInt64FeaturesKHR& src);
+    safe_VkPhysicalDeviceShaderAtomicInt64FeaturesKHR();
+    ~safe_VkPhysicalDeviceShaderAtomicInt64FeaturesKHR();
+    void initialize(const VkPhysicalDeviceShaderAtomicInt64FeaturesKHR* in_struct);
+    void initialize(const safe_VkPhysicalDeviceShaderAtomicInt64FeaturesKHR* src);
+    VkPhysicalDeviceShaderAtomicInt64FeaturesKHR *ptr() { return reinterpret_cast<VkPhysicalDeviceShaderAtomicInt64FeaturesKHR *>(this); }
+    VkPhysicalDeviceShaderAtomicInt64FeaturesKHR const *ptr() const { return reinterpret_cast<VkPhysicalDeviceShaderAtomicInt64FeaturesKHR const *>(this); }
+};
+
+struct safe_VkPhysicalDeviceShaderClockFeaturesKHR {
+    VkStructureType sType;
+    void* pNext;
+    VkBool32 shaderSubgroupClock;
+    VkBool32 shaderDeviceClock;
+    safe_VkPhysicalDeviceShaderClockFeaturesKHR(const VkPhysicalDeviceShaderClockFeaturesKHR* in_struct);
+    safe_VkPhysicalDeviceShaderClockFeaturesKHR(const safe_VkPhysicalDeviceShaderClockFeaturesKHR& src);
+    safe_VkPhysicalDeviceShaderClockFeaturesKHR& operator=(const safe_VkPhysicalDeviceShaderClockFeaturesKHR& src);
+    safe_VkPhysicalDeviceShaderClockFeaturesKHR();
+    ~safe_VkPhysicalDeviceShaderClockFeaturesKHR();
+    void initialize(const VkPhysicalDeviceShaderClockFeaturesKHR* in_struct);
+    void initialize(const safe_VkPhysicalDeviceShaderClockFeaturesKHR* src);
+    VkPhysicalDeviceShaderClockFeaturesKHR *ptr() { return reinterpret_cast<VkPhysicalDeviceShaderClockFeaturesKHR *>(this); }
+    VkPhysicalDeviceShaderClockFeaturesKHR const *ptr() const { return reinterpret_cast<VkPhysicalDeviceShaderClockFeaturesKHR const *>(this); }
+};
+
+struct safe_VkPhysicalDeviceDriverPropertiesKHR {
+    VkStructureType sType;
+    void* pNext;
+    VkDriverIdKHR driverID;
+    char driverName[VK_MAX_DRIVER_NAME_SIZE_KHR];
+    char driverInfo[VK_MAX_DRIVER_INFO_SIZE_KHR];
+    VkConformanceVersionKHR conformanceVersion;
+    safe_VkPhysicalDeviceDriverPropertiesKHR(const VkPhysicalDeviceDriverPropertiesKHR* in_struct);
+    safe_VkPhysicalDeviceDriverPropertiesKHR(const safe_VkPhysicalDeviceDriverPropertiesKHR& src);
+    safe_VkPhysicalDeviceDriverPropertiesKHR& operator=(const safe_VkPhysicalDeviceDriverPropertiesKHR& src);
+    safe_VkPhysicalDeviceDriverPropertiesKHR();
+    ~safe_VkPhysicalDeviceDriverPropertiesKHR();
+    void initialize(const VkPhysicalDeviceDriverPropertiesKHR* in_struct);
+    void initialize(const safe_VkPhysicalDeviceDriverPropertiesKHR* src);
+    VkPhysicalDeviceDriverPropertiesKHR *ptr() { return reinterpret_cast<VkPhysicalDeviceDriverPropertiesKHR *>(this); }
+    VkPhysicalDeviceDriverPropertiesKHR const *ptr() const { return reinterpret_cast<VkPhysicalDeviceDriverPropertiesKHR const *>(this); }
+};
+
+struct safe_VkPhysicalDeviceFloatControlsPropertiesKHR {
+    VkStructureType sType;
+    void* pNext;
+    VkShaderFloatControlsIndependenceKHR denormBehaviorIndependence;
+    VkShaderFloatControlsIndependenceKHR roundingModeIndependence;
+    VkBool32 shaderSignedZeroInfNanPreserveFloat16;
+    VkBool32 shaderSignedZeroInfNanPreserveFloat32;
+    VkBool32 shaderSignedZeroInfNanPreserveFloat64;
+    VkBool32 shaderDenormPreserveFloat16;
+    VkBool32 shaderDenormPreserveFloat32;
+    VkBool32 shaderDenormPreserveFloat64;
+    VkBool32 shaderDenormFlushToZeroFloat16;
+    VkBool32 shaderDenormFlushToZeroFloat32;
+    VkBool32 shaderDenormFlushToZeroFloat64;
+    VkBool32 shaderRoundingModeRTEFloat16;
+    VkBool32 shaderRoundingModeRTEFloat32;
+    VkBool32 shaderRoundingModeRTEFloat64;
+    VkBool32 shaderRoundingModeRTZFloat16;
+    VkBool32 shaderRoundingModeRTZFloat32;
+    VkBool32 shaderRoundingModeRTZFloat64;
+    safe_VkPhysicalDeviceFloatControlsPropertiesKHR(const VkPhysicalDeviceFloatControlsPropertiesKHR* in_struct);
+    safe_VkPhysicalDeviceFloatControlsPropertiesKHR(const safe_VkPhysicalDeviceFloatControlsPropertiesKHR& src);
+    safe_VkPhysicalDeviceFloatControlsPropertiesKHR& operator=(const safe_VkPhysicalDeviceFloatControlsPropertiesKHR& src);
+    safe_VkPhysicalDeviceFloatControlsPropertiesKHR();
+    ~safe_VkPhysicalDeviceFloatControlsPropertiesKHR();
+    void initialize(const VkPhysicalDeviceFloatControlsPropertiesKHR* in_struct);
+    void initialize(const safe_VkPhysicalDeviceFloatControlsPropertiesKHR* src);
+    VkPhysicalDeviceFloatControlsPropertiesKHR *ptr() { return reinterpret_cast<VkPhysicalDeviceFloatControlsPropertiesKHR *>(this); }
+    VkPhysicalDeviceFloatControlsPropertiesKHR const *ptr() const { return reinterpret_cast<VkPhysicalDeviceFloatControlsPropertiesKHR const *>(this); }
+};
+
+struct safe_VkSubpassDescriptionDepthStencilResolveKHR {
+    VkStructureType sType;
+    const void* pNext;
+    VkResolveModeFlagBitsKHR depthResolveMode;
+    VkResolveModeFlagBitsKHR stencilResolveMode;
+    safe_VkAttachmentReference2KHR* pDepthStencilResolveAttachment;
+    safe_VkSubpassDescriptionDepthStencilResolveKHR(const VkSubpassDescriptionDepthStencilResolveKHR* in_struct);
+    safe_VkSubpassDescriptionDepthStencilResolveKHR(const safe_VkSubpassDescriptionDepthStencilResolveKHR& src);
+    safe_VkSubpassDescriptionDepthStencilResolveKHR& operator=(const safe_VkSubpassDescriptionDepthStencilResolveKHR& src);
+    safe_VkSubpassDescriptionDepthStencilResolveKHR();
+    ~safe_VkSubpassDescriptionDepthStencilResolveKHR();
+    void initialize(const VkSubpassDescriptionDepthStencilResolveKHR* in_struct);
+    void initialize(const safe_VkSubpassDescriptionDepthStencilResolveKHR* src);
+    VkSubpassDescriptionDepthStencilResolveKHR *ptr() { return reinterpret_cast<VkSubpassDescriptionDepthStencilResolveKHR *>(this); }
+    VkSubpassDescriptionDepthStencilResolveKHR const *ptr() const { return reinterpret_cast<VkSubpassDescriptionDepthStencilResolveKHR const *>(this); }
+};
+
+struct safe_VkPhysicalDeviceDepthStencilResolvePropertiesKHR {
+    VkStructureType sType;
+    void* pNext;
+    VkResolveModeFlagsKHR supportedDepthResolveModes;
+    VkResolveModeFlagsKHR supportedStencilResolveModes;
+    VkBool32 independentResolveNone;
+    VkBool32 independentResolve;
+    safe_VkPhysicalDeviceDepthStencilResolvePropertiesKHR(const VkPhysicalDeviceDepthStencilResolvePropertiesKHR* in_struct);
+    safe_VkPhysicalDeviceDepthStencilResolvePropertiesKHR(const safe_VkPhysicalDeviceDepthStencilResolvePropertiesKHR& src);
+    safe_VkPhysicalDeviceDepthStencilResolvePropertiesKHR& operator=(const safe_VkPhysicalDeviceDepthStencilResolvePropertiesKHR& src);
+    safe_VkPhysicalDeviceDepthStencilResolvePropertiesKHR();
+    ~safe_VkPhysicalDeviceDepthStencilResolvePropertiesKHR();
+    void initialize(const VkPhysicalDeviceDepthStencilResolvePropertiesKHR* in_struct);
+    void initialize(const safe_VkPhysicalDeviceDepthStencilResolvePropertiesKHR* src);
+    VkPhysicalDeviceDepthStencilResolvePropertiesKHR *ptr() { return reinterpret_cast<VkPhysicalDeviceDepthStencilResolvePropertiesKHR *>(this); }
+    VkPhysicalDeviceDepthStencilResolvePropertiesKHR const *ptr() const { return reinterpret_cast<VkPhysicalDeviceDepthStencilResolvePropertiesKHR const *>(this); }
+};
+
+struct safe_VkPhysicalDeviceTimelineSemaphoreFeaturesKHR {
+    VkStructureType sType;
+    void* pNext;
+    VkBool32 timelineSemaphore;
+    safe_VkPhysicalDeviceTimelineSemaphoreFeaturesKHR(const VkPhysicalDeviceTimelineSemaphoreFeaturesKHR* in_struct);
+    safe_VkPhysicalDeviceTimelineSemaphoreFeaturesKHR(const safe_VkPhysicalDeviceTimelineSemaphoreFeaturesKHR& src);
+    safe_VkPhysicalDeviceTimelineSemaphoreFeaturesKHR& operator=(const safe_VkPhysicalDeviceTimelineSemaphoreFeaturesKHR& src);
+    safe_VkPhysicalDeviceTimelineSemaphoreFeaturesKHR();
+    ~safe_VkPhysicalDeviceTimelineSemaphoreFeaturesKHR();
+    void initialize(const VkPhysicalDeviceTimelineSemaphoreFeaturesKHR* in_struct);
+    void initialize(const safe_VkPhysicalDeviceTimelineSemaphoreFeaturesKHR* src);
+    VkPhysicalDeviceTimelineSemaphoreFeaturesKHR *ptr() { return reinterpret_cast<VkPhysicalDeviceTimelineSemaphoreFeaturesKHR *>(this); }
+    VkPhysicalDeviceTimelineSemaphoreFeaturesKHR const *ptr() const { return reinterpret_cast<VkPhysicalDeviceTimelineSemaphoreFeaturesKHR const *>(this); }
+};
+
+struct safe_VkPhysicalDeviceTimelineSemaphorePropertiesKHR {
+    VkStructureType sType;
+    void* pNext;
+    uint64_t maxTimelineSemaphoreValueDifference;
+    safe_VkPhysicalDeviceTimelineSemaphorePropertiesKHR(const VkPhysicalDeviceTimelineSemaphorePropertiesKHR* in_struct);
+    safe_VkPhysicalDeviceTimelineSemaphorePropertiesKHR(const safe_VkPhysicalDeviceTimelineSemaphorePropertiesKHR& src);
+    safe_VkPhysicalDeviceTimelineSemaphorePropertiesKHR& operator=(const safe_VkPhysicalDeviceTimelineSemaphorePropertiesKHR& src);
+    safe_VkPhysicalDeviceTimelineSemaphorePropertiesKHR();
+    ~safe_VkPhysicalDeviceTimelineSemaphorePropertiesKHR();
+    void initialize(const VkPhysicalDeviceTimelineSemaphorePropertiesKHR* in_struct);
+    void initialize(const safe_VkPhysicalDeviceTimelineSemaphorePropertiesKHR* src);
+    VkPhysicalDeviceTimelineSemaphorePropertiesKHR *ptr() { return reinterpret_cast<VkPhysicalDeviceTimelineSemaphorePropertiesKHR *>(this); }
+    VkPhysicalDeviceTimelineSemaphorePropertiesKHR const *ptr() const { return reinterpret_cast<VkPhysicalDeviceTimelineSemaphorePropertiesKHR const *>(this); }
+};
+
+struct safe_VkSemaphoreTypeCreateInfoKHR {
+    VkStructureType sType;
+    const void* pNext;
+    VkSemaphoreTypeKHR semaphoreType;
+    uint64_t initialValue;
+    safe_VkSemaphoreTypeCreateInfoKHR(const VkSemaphoreTypeCreateInfoKHR* in_struct);
+    safe_VkSemaphoreTypeCreateInfoKHR(const safe_VkSemaphoreTypeCreateInfoKHR& src);
+    safe_VkSemaphoreTypeCreateInfoKHR& operator=(const safe_VkSemaphoreTypeCreateInfoKHR& src);
+    safe_VkSemaphoreTypeCreateInfoKHR();
+    ~safe_VkSemaphoreTypeCreateInfoKHR();
+    void initialize(const VkSemaphoreTypeCreateInfoKHR* in_struct);
+    void initialize(const safe_VkSemaphoreTypeCreateInfoKHR* src);
+    VkSemaphoreTypeCreateInfoKHR *ptr() { return reinterpret_cast<VkSemaphoreTypeCreateInfoKHR *>(this); }
+    VkSemaphoreTypeCreateInfoKHR const *ptr() const { return reinterpret_cast<VkSemaphoreTypeCreateInfoKHR const *>(this); }
+};
+
+struct safe_VkTimelineSemaphoreSubmitInfoKHR {
+    VkStructureType sType;
+    const void* pNext;
+    uint32_t waitSemaphoreValueCount;
+    const uint64_t* pWaitSemaphoreValues;
+    uint32_t signalSemaphoreValueCount;
+    const uint64_t* pSignalSemaphoreValues;
+    safe_VkTimelineSemaphoreSubmitInfoKHR(const VkTimelineSemaphoreSubmitInfoKHR* in_struct);
+    safe_VkTimelineSemaphoreSubmitInfoKHR(const safe_VkTimelineSemaphoreSubmitInfoKHR& src);
+    safe_VkTimelineSemaphoreSubmitInfoKHR& operator=(const safe_VkTimelineSemaphoreSubmitInfoKHR& src);
+    safe_VkTimelineSemaphoreSubmitInfoKHR();
+    ~safe_VkTimelineSemaphoreSubmitInfoKHR();
+    void initialize(const VkTimelineSemaphoreSubmitInfoKHR* in_struct);
+    void initialize(const safe_VkTimelineSemaphoreSubmitInfoKHR* src);
+    VkTimelineSemaphoreSubmitInfoKHR *ptr() { return reinterpret_cast<VkTimelineSemaphoreSubmitInfoKHR *>(this); }
+    VkTimelineSemaphoreSubmitInfoKHR const *ptr() const { return reinterpret_cast<VkTimelineSemaphoreSubmitInfoKHR const *>(this); }
+};
+
+struct safe_VkSemaphoreWaitInfoKHR {
+    VkStructureType sType;
+    const void* pNext;
+    VkSemaphoreWaitFlagsKHR flags;
+    uint32_t semaphoreCount;
+    VkSemaphore* pSemaphores;
+    const uint64_t* pValues;
+    safe_VkSemaphoreWaitInfoKHR(const VkSemaphoreWaitInfoKHR* in_struct);
+    safe_VkSemaphoreWaitInfoKHR(const safe_VkSemaphoreWaitInfoKHR& src);
+    safe_VkSemaphoreWaitInfoKHR& operator=(const safe_VkSemaphoreWaitInfoKHR& src);
+    safe_VkSemaphoreWaitInfoKHR();
+    ~safe_VkSemaphoreWaitInfoKHR();
+    void initialize(const VkSemaphoreWaitInfoKHR* in_struct);
+    void initialize(const safe_VkSemaphoreWaitInfoKHR* src);
+    VkSemaphoreWaitInfoKHR *ptr() { return reinterpret_cast<VkSemaphoreWaitInfoKHR *>(this); }
+    VkSemaphoreWaitInfoKHR const *ptr() const { return reinterpret_cast<VkSemaphoreWaitInfoKHR const *>(this); }
+};
+
+struct safe_VkSemaphoreSignalInfoKHR {
+    VkStructureType sType;
+    const void* pNext;
+    VkSemaphore semaphore;
+    uint64_t value;
+    safe_VkSemaphoreSignalInfoKHR(const VkSemaphoreSignalInfoKHR* in_struct);
+    safe_VkSemaphoreSignalInfoKHR(const safe_VkSemaphoreSignalInfoKHR& src);
+    safe_VkSemaphoreSignalInfoKHR& operator=(const safe_VkSemaphoreSignalInfoKHR& src);
+    safe_VkSemaphoreSignalInfoKHR();
+    ~safe_VkSemaphoreSignalInfoKHR();
+    void initialize(const VkSemaphoreSignalInfoKHR* in_struct);
+    void initialize(const safe_VkSemaphoreSignalInfoKHR* src);
+    VkSemaphoreSignalInfoKHR *ptr() { return reinterpret_cast<VkSemaphoreSignalInfoKHR *>(this); }
+    VkSemaphoreSignalInfoKHR const *ptr() const { return reinterpret_cast<VkSemaphoreSignalInfoKHR const *>(this); }
+};
+
+struct safe_VkPhysicalDeviceVulkanMemoryModelFeaturesKHR {
+    VkStructureType sType;
+    void* pNext;
+    VkBool32 vulkanMemoryModel;
+    VkBool32 vulkanMemoryModelDeviceScope;
+    VkBool32 vulkanMemoryModelAvailabilityVisibilityChains;
+    safe_VkPhysicalDeviceVulkanMemoryModelFeaturesKHR(const VkPhysicalDeviceVulkanMemoryModelFeaturesKHR* in_struct);
+    safe_VkPhysicalDeviceVulkanMemoryModelFeaturesKHR(const safe_VkPhysicalDeviceVulkanMemoryModelFeaturesKHR& src);
+    safe_VkPhysicalDeviceVulkanMemoryModelFeaturesKHR& operator=(const safe_VkPhysicalDeviceVulkanMemoryModelFeaturesKHR& src);
+    safe_VkPhysicalDeviceVulkanMemoryModelFeaturesKHR();
+    ~safe_VkPhysicalDeviceVulkanMemoryModelFeaturesKHR();
+    void initialize(const VkPhysicalDeviceVulkanMemoryModelFeaturesKHR* in_struct);
+    void initialize(const safe_VkPhysicalDeviceVulkanMemoryModelFeaturesKHR* src);
+    VkPhysicalDeviceVulkanMemoryModelFeaturesKHR *ptr() { return reinterpret_cast<VkPhysicalDeviceVulkanMemoryModelFeaturesKHR *>(this); }
+    VkPhysicalDeviceVulkanMemoryModelFeaturesKHR const *ptr() const { return reinterpret_cast<VkPhysicalDeviceVulkanMemoryModelFeaturesKHR const *>(this); }
+};
+
+struct safe_VkSurfaceProtectedCapabilitiesKHR {
+    VkStructureType sType;
+    const void* pNext;
+    VkBool32 supportsProtected;
+    safe_VkSurfaceProtectedCapabilitiesKHR(const VkSurfaceProtectedCapabilitiesKHR* in_struct);
+    safe_VkSurfaceProtectedCapabilitiesKHR(const safe_VkSurfaceProtectedCapabilitiesKHR& src);
+    safe_VkSurfaceProtectedCapabilitiesKHR& operator=(const safe_VkSurfaceProtectedCapabilitiesKHR& src);
+    safe_VkSurfaceProtectedCapabilitiesKHR();
+    ~safe_VkSurfaceProtectedCapabilitiesKHR();
+    void initialize(const VkSurfaceProtectedCapabilitiesKHR* in_struct);
+    void initialize(const safe_VkSurfaceProtectedCapabilitiesKHR* src);
+    VkSurfaceProtectedCapabilitiesKHR *ptr() { return reinterpret_cast<VkSurfaceProtectedCapabilitiesKHR *>(this); }
+    VkSurfaceProtectedCapabilitiesKHR const *ptr() const { return reinterpret_cast<VkSurfaceProtectedCapabilitiesKHR const *>(this); }
+};
+
+struct safe_VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR {
+    VkStructureType sType;
+    void* pNext;
+    VkBool32 separateDepthStencilLayouts;
+    safe_VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR(const VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR* in_struct);
+    safe_VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR(const safe_VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR& src);
+    safe_VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR& operator=(const safe_VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR& src);
+    safe_VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR();
+    ~safe_VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR();
+    void initialize(const VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR* in_struct);
+    void initialize(const safe_VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR* src);
+    VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR *ptr() { return reinterpret_cast<VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR *>(this); }
+    VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR const *ptr() const { return reinterpret_cast<VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR const *>(this); }
+};
+
+struct safe_VkAttachmentReferenceStencilLayoutKHR {
+    VkStructureType sType;
+    void* pNext;
+    VkImageLayout stencilLayout;
+    safe_VkAttachmentReferenceStencilLayoutKHR(const VkAttachmentReferenceStencilLayoutKHR* in_struct);
+    safe_VkAttachmentReferenceStencilLayoutKHR(const safe_VkAttachmentReferenceStencilLayoutKHR& src);
+    safe_VkAttachmentReferenceStencilLayoutKHR& operator=(const safe_VkAttachmentReferenceStencilLayoutKHR& src);
+    safe_VkAttachmentReferenceStencilLayoutKHR();
+    ~safe_VkAttachmentReferenceStencilLayoutKHR();
+    void initialize(const VkAttachmentReferenceStencilLayoutKHR* in_struct);
+    void initialize(const safe_VkAttachmentReferenceStencilLayoutKHR* src);
+    VkAttachmentReferenceStencilLayoutKHR *ptr() { return reinterpret_cast<VkAttachmentReferenceStencilLayoutKHR *>(this); }
+    VkAttachmentReferenceStencilLayoutKHR const *ptr() const { return reinterpret_cast<VkAttachmentReferenceStencilLayoutKHR const *>(this); }
+};
+
+struct safe_VkAttachmentDescriptionStencilLayoutKHR {
+    VkStructureType sType;
+    void* pNext;
+    VkImageLayout stencilInitialLayout;
+    VkImageLayout stencilFinalLayout;
+    safe_VkAttachmentDescriptionStencilLayoutKHR(const VkAttachmentDescriptionStencilLayoutKHR* in_struct);
+    safe_VkAttachmentDescriptionStencilLayoutKHR(const safe_VkAttachmentDescriptionStencilLayoutKHR& src);
+    safe_VkAttachmentDescriptionStencilLayoutKHR& operator=(const safe_VkAttachmentDescriptionStencilLayoutKHR& src);
+    safe_VkAttachmentDescriptionStencilLayoutKHR();
+    ~safe_VkAttachmentDescriptionStencilLayoutKHR();
+    void initialize(const VkAttachmentDescriptionStencilLayoutKHR* in_struct);
+    void initialize(const safe_VkAttachmentDescriptionStencilLayoutKHR* src);
+    VkAttachmentDescriptionStencilLayoutKHR *ptr() { return reinterpret_cast<VkAttachmentDescriptionStencilLayoutKHR *>(this); }
+    VkAttachmentDescriptionStencilLayoutKHR const *ptr() const { return reinterpret_cast<VkAttachmentDescriptionStencilLayoutKHR const *>(this); }
+};
+
+struct safe_VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR {
+    VkStructureType sType;
+    void* pNext;
+    VkBool32 uniformBufferStandardLayout;
+    safe_VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR(const VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR* in_struct);
+    safe_VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR(const safe_VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR& src);
+    safe_VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR& operator=(const safe_VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR& src);
+    safe_VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR();
+    ~safe_VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR();
+    void initialize(const VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR* in_struct);
+    void initialize(const safe_VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR* src);
+    VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR *ptr() { return reinterpret_cast<VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR *>(this); }
+    VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR const *ptr() const { return reinterpret_cast<VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR const *>(this); }
+};
+
+struct safe_VkPhysicalDeviceBufferDeviceAddressFeaturesKHR {
+    VkStructureType sType;
+    void* pNext;
+    VkBool32 bufferDeviceAddress;
+    VkBool32 bufferDeviceAddressCaptureReplay;
+    VkBool32 bufferDeviceAddressMultiDevice;
+    safe_VkPhysicalDeviceBufferDeviceAddressFeaturesKHR(const VkPhysicalDeviceBufferDeviceAddressFeaturesKHR* in_struct);
+    safe_VkPhysicalDeviceBufferDeviceAddressFeaturesKHR(const safe_VkPhysicalDeviceBufferDeviceAddressFeaturesKHR& src);
+    safe_VkPhysicalDeviceBufferDeviceAddressFeaturesKHR& operator=(const safe_VkPhysicalDeviceBufferDeviceAddressFeaturesKHR& src);
+    safe_VkPhysicalDeviceBufferDeviceAddressFeaturesKHR();
+    ~safe_VkPhysicalDeviceBufferDeviceAddressFeaturesKHR();
+    void initialize(const VkPhysicalDeviceBufferDeviceAddressFeaturesKHR* in_struct);
+    void initialize(const safe_VkPhysicalDeviceBufferDeviceAddressFeaturesKHR* src);
+    VkPhysicalDeviceBufferDeviceAddressFeaturesKHR *ptr() { return reinterpret_cast<VkPhysicalDeviceBufferDeviceAddressFeaturesKHR *>(this); }
+    VkPhysicalDeviceBufferDeviceAddressFeaturesKHR const *ptr() const { return reinterpret_cast<VkPhysicalDeviceBufferDeviceAddressFeaturesKHR const *>(this); }
+};
+
+struct safe_VkBufferDeviceAddressInfoKHR {
+    VkStructureType sType;
+    const void* pNext;
+    VkBuffer buffer;
+    safe_VkBufferDeviceAddressInfoKHR(const VkBufferDeviceAddressInfoKHR* in_struct);
+    safe_VkBufferDeviceAddressInfoKHR(const safe_VkBufferDeviceAddressInfoKHR& src);
+    safe_VkBufferDeviceAddressInfoKHR& operator=(const safe_VkBufferDeviceAddressInfoKHR& src);
+    safe_VkBufferDeviceAddressInfoKHR();
+    ~safe_VkBufferDeviceAddressInfoKHR();
+    void initialize(const VkBufferDeviceAddressInfoKHR* in_struct);
+    void initialize(const safe_VkBufferDeviceAddressInfoKHR* src);
+    VkBufferDeviceAddressInfoKHR *ptr() { return reinterpret_cast<VkBufferDeviceAddressInfoKHR *>(this); }
+    VkBufferDeviceAddressInfoKHR const *ptr() const { return reinterpret_cast<VkBufferDeviceAddressInfoKHR const *>(this); }
+};
+
+struct safe_VkBufferOpaqueCaptureAddressCreateInfoKHR {
+    VkStructureType sType;
+    const void* pNext;
+    uint64_t opaqueCaptureAddress;
+    safe_VkBufferOpaqueCaptureAddressCreateInfoKHR(const VkBufferOpaqueCaptureAddressCreateInfoKHR* in_struct);
+    safe_VkBufferOpaqueCaptureAddressCreateInfoKHR(const safe_VkBufferOpaqueCaptureAddressCreateInfoKHR& src);
+    safe_VkBufferOpaqueCaptureAddressCreateInfoKHR& operator=(const safe_VkBufferOpaqueCaptureAddressCreateInfoKHR& src);
+    safe_VkBufferOpaqueCaptureAddressCreateInfoKHR();
+    ~safe_VkBufferOpaqueCaptureAddressCreateInfoKHR();
+    void initialize(const VkBufferOpaqueCaptureAddressCreateInfoKHR* in_struct);
+    void initialize(const safe_VkBufferOpaqueCaptureAddressCreateInfoKHR* src);
+    VkBufferOpaqueCaptureAddressCreateInfoKHR *ptr() { return reinterpret_cast<VkBufferOpaqueCaptureAddressCreateInfoKHR *>(this); }
+    VkBufferOpaqueCaptureAddressCreateInfoKHR const *ptr() const { return reinterpret_cast<VkBufferOpaqueCaptureAddressCreateInfoKHR const *>(this); }
+};
+
+struct safe_VkMemoryOpaqueCaptureAddressAllocateInfoKHR {
+    VkStructureType sType;
+    const void* pNext;
+    uint64_t opaqueCaptureAddress;
+    safe_VkMemoryOpaqueCaptureAddressAllocateInfoKHR(const VkMemoryOpaqueCaptureAddressAllocateInfoKHR* in_struct);
+    safe_VkMemoryOpaqueCaptureAddressAllocateInfoKHR(const safe_VkMemoryOpaqueCaptureAddressAllocateInfoKHR& src);
+    safe_VkMemoryOpaqueCaptureAddressAllocateInfoKHR& operator=(const safe_VkMemoryOpaqueCaptureAddressAllocateInfoKHR& src);
+    safe_VkMemoryOpaqueCaptureAddressAllocateInfoKHR();
+    ~safe_VkMemoryOpaqueCaptureAddressAllocateInfoKHR();
+    void initialize(const VkMemoryOpaqueCaptureAddressAllocateInfoKHR* in_struct);
+    void initialize(const safe_VkMemoryOpaqueCaptureAddressAllocateInfoKHR* src);
+    VkMemoryOpaqueCaptureAddressAllocateInfoKHR *ptr() { return reinterpret_cast<VkMemoryOpaqueCaptureAddressAllocateInfoKHR *>(this); }
+    VkMemoryOpaqueCaptureAddressAllocateInfoKHR const *ptr() const { return reinterpret_cast<VkMemoryOpaqueCaptureAddressAllocateInfoKHR const *>(this); }
+};
+
+struct safe_VkDeviceMemoryOpaqueCaptureAddressInfoKHR {
+    VkStructureType sType;
+    const void* pNext;
+    VkDeviceMemory memory;
+    safe_VkDeviceMemoryOpaqueCaptureAddressInfoKHR(const VkDeviceMemoryOpaqueCaptureAddressInfoKHR* in_struct);
+    safe_VkDeviceMemoryOpaqueCaptureAddressInfoKHR(const safe_VkDeviceMemoryOpaqueCaptureAddressInfoKHR& src);
+    safe_VkDeviceMemoryOpaqueCaptureAddressInfoKHR& operator=(const safe_VkDeviceMemoryOpaqueCaptureAddressInfoKHR& src);
+    safe_VkDeviceMemoryOpaqueCaptureAddressInfoKHR();
+    ~safe_VkDeviceMemoryOpaqueCaptureAddressInfoKHR();
+    void initialize(const VkDeviceMemoryOpaqueCaptureAddressInfoKHR* in_struct);
+    void initialize(const safe_VkDeviceMemoryOpaqueCaptureAddressInfoKHR* src);
+    VkDeviceMemoryOpaqueCaptureAddressInfoKHR *ptr() { return reinterpret_cast<VkDeviceMemoryOpaqueCaptureAddressInfoKHR *>(this); }
+    VkDeviceMemoryOpaqueCaptureAddressInfoKHR const *ptr() const { return reinterpret_cast<VkDeviceMemoryOpaqueCaptureAddressInfoKHR const *>(this); }
+};
+
+struct safe_VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR {
+    VkStructureType sType;
+    void* pNext;
+    VkBool32 pipelineExecutableInfo;
+    safe_VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR(const VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR* in_struct);
+    safe_VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR(const safe_VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR& src);
+    safe_VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR& operator=(const safe_VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR& src);
+    safe_VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR();
+    ~safe_VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR();
+    void initialize(const VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR* in_struct);
+    void initialize(const safe_VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR* src);
+    VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR *ptr() { return reinterpret_cast<VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR *>(this); }
+    VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR const *ptr() const { return reinterpret_cast<VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR const *>(this); }
+};
+
+struct safe_VkPipelineInfoKHR {
+    VkStructureType sType;
+    const void* pNext;
+    VkPipeline pipeline;
+    safe_VkPipelineInfoKHR(const VkPipelineInfoKHR* in_struct);
+    safe_VkPipelineInfoKHR(const safe_VkPipelineInfoKHR& src);
+    safe_VkPipelineInfoKHR& operator=(const safe_VkPipelineInfoKHR& src);
+    safe_VkPipelineInfoKHR();
+    ~safe_VkPipelineInfoKHR();
+    void initialize(const VkPipelineInfoKHR* in_struct);
+    void initialize(const safe_VkPipelineInfoKHR* src);
+    VkPipelineInfoKHR *ptr() { return reinterpret_cast<VkPipelineInfoKHR *>(this); }
+    VkPipelineInfoKHR const *ptr() const { return reinterpret_cast<VkPipelineInfoKHR const *>(this); }
+};
+
+struct safe_VkPipelineExecutablePropertiesKHR {
+    VkStructureType sType;
+    void* pNext;
+    VkShaderStageFlags stages;
+    char name[VK_MAX_DESCRIPTION_SIZE];
+    char description[VK_MAX_DESCRIPTION_SIZE];
+    uint32_t subgroupSize;
+    safe_VkPipelineExecutablePropertiesKHR(const VkPipelineExecutablePropertiesKHR* in_struct);
+    safe_VkPipelineExecutablePropertiesKHR(const safe_VkPipelineExecutablePropertiesKHR& src);
+    safe_VkPipelineExecutablePropertiesKHR& operator=(const safe_VkPipelineExecutablePropertiesKHR& src);
+    safe_VkPipelineExecutablePropertiesKHR();
+    ~safe_VkPipelineExecutablePropertiesKHR();
+    void initialize(const VkPipelineExecutablePropertiesKHR* in_struct);
+    void initialize(const safe_VkPipelineExecutablePropertiesKHR* src);
+    VkPipelineExecutablePropertiesKHR *ptr() { return reinterpret_cast<VkPipelineExecutablePropertiesKHR *>(this); }
+    VkPipelineExecutablePropertiesKHR const *ptr() const { return reinterpret_cast<VkPipelineExecutablePropertiesKHR const *>(this); }
+};
+
+struct safe_VkPipelineExecutableInfoKHR {
+    VkStructureType sType;
+    const void* pNext;
+    VkPipeline pipeline;
+    uint32_t executableIndex;
+    safe_VkPipelineExecutableInfoKHR(const VkPipelineExecutableInfoKHR* in_struct);
+    safe_VkPipelineExecutableInfoKHR(const safe_VkPipelineExecutableInfoKHR& src);
+    safe_VkPipelineExecutableInfoKHR& operator=(const safe_VkPipelineExecutableInfoKHR& src);
+    safe_VkPipelineExecutableInfoKHR();
+    ~safe_VkPipelineExecutableInfoKHR();
+    void initialize(const VkPipelineExecutableInfoKHR* in_struct);
+    void initialize(const safe_VkPipelineExecutableInfoKHR* src);
+    VkPipelineExecutableInfoKHR *ptr() { return reinterpret_cast<VkPipelineExecutableInfoKHR *>(this); }
+    VkPipelineExecutableInfoKHR const *ptr() const { return reinterpret_cast<VkPipelineExecutableInfoKHR const *>(this); }
+};
+
+struct safe_VkPipelineExecutableStatisticKHR {
+    VkStructureType sType;
+    void* pNext;
+    char name[VK_MAX_DESCRIPTION_SIZE];
+    char description[VK_MAX_DESCRIPTION_SIZE];
+    VkPipelineExecutableStatisticFormatKHR format;
+    VkPipelineExecutableStatisticValueKHR value;
+    safe_VkPipelineExecutableStatisticKHR(const VkPipelineExecutableStatisticKHR* in_struct);
+    safe_VkPipelineExecutableStatisticKHR(const safe_VkPipelineExecutableStatisticKHR& src);
+    safe_VkPipelineExecutableStatisticKHR& operator=(const safe_VkPipelineExecutableStatisticKHR& src);
+    safe_VkPipelineExecutableStatisticKHR();
+    ~safe_VkPipelineExecutableStatisticKHR();
+    void initialize(const VkPipelineExecutableStatisticKHR* in_struct);
+    void initialize(const safe_VkPipelineExecutableStatisticKHR* src);
+    VkPipelineExecutableStatisticKHR *ptr() { return reinterpret_cast<VkPipelineExecutableStatisticKHR *>(this); }
+    VkPipelineExecutableStatisticKHR const *ptr() const { return reinterpret_cast<VkPipelineExecutableStatisticKHR const *>(this); }
+};
+
+struct safe_VkPipelineExecutableInternalRepresentationKHR {
+    VkStructureType sType;
+    void* pNext;
+    char name[VK_MAX_DESCRIPTION_SIZE];
+    char description[VK_MAX_DESCRIPTION_SIZE];
+    VkBool32 isText;
+    size_t dataSize;
+    void* pData;
+    safe_VkPipelineExecutableInternalRepresentationKHR(const VkPipelineExecutableInternalRepresentationKHR* in_struct);
+    safe_VkPipelineExecutableInternalRepresentationKHR(const safe_VkPipelineExecutableInternalRepresentationKHR& src);
+    safe_VkPipelineExecutableInternalRepresentationKHR& operator=(const safe_VkPipelineExecutableInternalRepresentationKHR& src);
+    safe_VkPipelineExecutableInternalRepresentationKHR();
+    ~safe_VkPipelineExecutableInternalRepresentationKHR();
+    void initialize(const VkPipelineExecutableInternalRepresentationKHR* in_struct);
+    void initialize(const safe_VkPipelineExecutableInternalRepresentationKHR* src);
+    VkPipelineExecutableInternalRepresentationKHR *ptr() { return reinterpret_cast<VkPipelineExecutableInternalRepresentationKHR *>(this); }
+    VkPipelineExecutableInternalRepresentationKHR const *ptr() const { return reinterpret_cast<VkPipelineExecutableInternalRepresentationKHR const *>(this); }
+};
+
+struct safe_VkDebugReportCallbackCreateInfoEXT {
+    VkStructureType sType;
+    const void* pNext;
+    VkDebugReportFlagsEXT flags;
+    PFN_vkDebugReportCallbackEXT pfnCallback;
+    void* pUserData;
+    safe_VkDebugReportCallbackCreateInfoEXT(const VkDebugReportCallbackCreateInfoEXT* in_struct);
+    safe_VkDebugReportCallbackCreateInfoEXT(const safe_VkDebugReportCallbackCreateInfoEXT& src);
+    safe_VkDebugReportCallbackCreateInfoEXT& operator=(const safe_VkDebugReportCallbackCreateInfoEXT& src);
+    safe_VkDebugReportCallbackCreateInfoEXT();
+    ~safe_VkDebugReportCallbackCreateInfoEXT();
+    void initialize(const VkDebugReportCallbackCreateInfoEXT* in_struct);
+    void initialize(const safe_VkDebugReportCallbackCreateInfoEXT* src);
+    VkDebugReportCallbackCreateInfoEXT *ptr() { return reinterpret_cast<VkDebugReportCallbackCreateInfoEXT *>(this); }
+    VkDebugReportCallbackCreateInfoEXT const *ptr() const { return reinterpret_cast<VkDebugReportCallbackCreateInfoEXT const *>(this); }
+};
+
+struct safe_VkPipelineRasterizationStateRasterizationOrderAMD {
+    VkStructureType sType;
+    const void* pNext;
+    VkRasterizationOrderAMD rasterizationOrder;
+    safe_VkPipelineRasterizationStateRasterizationOrderAMD(const VkPipelineRasterizationStateRasterizationOrderAMD* in_struct);
+    safe_VkPipelineRasterizationStateRasterizationOrderAMD(const safe_VkPipelineRasterizationStateRasterizationOrderAMD& src);
+    safe_VkPipelineRasterizationStateRasterizationOrderAMD& operator=(const safe_VkPipelineRasterizationStateRasterizationOrderAMD& src);
+    safe_VkPipelineRasterizationStateRasterizationOrderAMD();
+    ~safe_VkPipelineRasterizationStateRasterizationOrderAMD();
+    void initialize(const VkPipelineRasterizationStateRasterizationOrderAMD* in_struct);
+    void initialize(const safe_VkPipelineRasterizationStateRasterizationOrderAMD* src);
+    VkPipelineRasterizationStateRasterizationOrderAMD *ptr() { return reinterpret_cast<VkPipelineRasterizationStateRasterizationOrderAMD *>(this); }
+    VkPipelineRasterizationStateRasterizationOrderAMD const *ptr() const { return reinterpret_cast<VkPipelineRasterizationStateRasterizationOrderAMD const *>(this); }
+};
+
+struct safe_VkDebugMarkerObjectNameInfoEXT {
+    VkStructureType sType;
+    const void* pNext;
+    VkDebugReportObjectTypeEXT objectType;
+    uint64_t object;
+    const char* pObjectName;
+    safe_VkDebugMarkerObjectNameInfoEXT(const VkDebugMarkerObjectNameInfoEXT* in_struct);
+    safe_VkDebugMarkerObjectNameInfoEXT(const safe_VkDebugMarkerObjectNameInfoEXT& src);
+    safe_VkDebugMarkerObjectNameInfoEXT& operator=(const safe_VkDebugMarkerObjectNameInfoEXT& src);
+    safe_VkDebugMarkerObjectNameInfoEXT();
+    ~safe_VkDebugMarkerObjectNameInfoEXT();
+    void initialize(const VkDebugMarkerObjectNameInfoEXT* in_struct);
+    void initialize(const safe_VkDebugMarkerObjectNameInfoEXT* src);
+    VkDebugMarkerObjectNameInfoEXT *ptr() { return reinterpret_cast<VkDebugMarkerObjectNameInfoEXT *>(this); }
+    VkDebugMarkerObjectNameInfoEXT const *ptr() const { return reinterpret_cast<VkDebugMarkerObjectNameInfoEXT const *>(this); }
+};
+
+struct safe_VkDebugMarkerObjectTagInfoEXT {
+    VkStructureType sType;
+    const void* pNext;
+    VkDebugReportObjectTypeEXT objectType;
+    uint64_t object;
+    uint64_t tagName;
+    size_t tagSize;
+    const void* pTag;
+    safe_VkDebugMarkerObjectTagInfoEXT(const VkDebugMarkerObjectTagInfoEXT* in_struct);
+    safe_VkDebugMarkerObjectTagInfoEXT(const safe_VkDebugMarkerObjectTagInfoEXT& src);
+    safe_VkDebugMarkerObjectTagInfoEXT& operator=(const safe_VkDebugMarkerObjectTagInfoEXT& src);
+    safe_VkDebugMarkerObjectTagInfoEXT();
+    ~safe_VkDebugMarkerObjectTagInfoEXT();
+    void initialize(const VkDebugMarkerObjectTagInfoEXT* in_struct);
+    void initialize(const safe_VkDebugMarkerObjectTagInfoEXT* src);
+    VkDebugMarkerObjectTagInfoEXT *ptr() { return reinterpret_cast<VkDebugMarkerObjectTagInfoEXT *>(this); }
+    VkDebugMarkerObjectTagInfoEXT const *ptr() const { return reinterpret_cast<VkDebugMarkerObjectTagInfoEXT const *>(this); }
+};
+
+struct safe_VkDebugMarkerMarkerInfoEXT {
+    VkStructureType sType;
+    const void* pNext;
+    const char* pMarkerName;
+    float color[4];
+    safe_VkDebugMarkerMarkerInfoEXT(const VkDebugMarkerMarkerInfoEXT* in_struct);
+    safe_VkDebugMarkerMarkerInfoEXT(const safe_VkDebugMarkerMarkerInfoEXT& src);
+    safe_VkDebugMarkerMarkerInfoEXT& operator=(const safe_VkDebugMarkerMarkerInfoEXT& src);
+    safe_VkDebugMarkerMarkerInfoEXT();
+    ~safe_VkDebugMarkerMarkerInfoEXT();
+    void initialize(const VkDebugMarkerMarkerInfoEXT* in_struct);
+    void initialize(const safe_VkDebugMarkerMarkerInfoEXT* src);
+    VkDebugMarkerMarkerInfoEXT *ptr() { return reinterpret_cast<VkDebugMarkerMarkerInfoEXT *>(this); }
+    VkDebugMarkerMarkerInfoEXT const *ptr() const { return reinterpret_cast<VkDebugMarkerMarkerInfoEXT const *>(this); }
+};
+
+struct safe_VkDedicatedAllocationImageCreateInfoNV {
+    VkStructureType sType;
+    const void* pNext;
+    VkBool32 dedicatedAllocation;
+    safe_VkDedicatedAllocationImageCreateInfoNV(const VkDedicatedAllocationImageCreateInfoNV* in_struct);
+    safe_VkDedicatedAllocationImageCreateInfoNV(const safe_VkDedicatedAllocationImageCreateInfoNV& src);
+    safe_VkDedicatedAllocationImageCreateInfoNV& operator=(const safe_VkDedicatedAllocationImageCreateInfoNV& src);
+    safe_VkDedicatedAllocationImageCreateInfoNV();
+    ~safe_VkDedicatedAllocationImageCreateInfoNV();
+    void initialize(const VkDedicatedAllocationImageCreateInfoNV* in_struct);
+    void initialize(const safe_VkDedicatedAllocationImageCreateInfoNV* src);
+    VkDedicatedAllocationImageCreateInfoNV *ptr() { return reinterpret_cast<VkDedicatedAllocationImageCreateInfoNV *>(this); }
+    VkDedicatedAllocationImageCreateInfoNV const *ptr() const { return reinterpret_cast<VkDedicatedAllocationImageCreateInfoNV const *>(this); }
+};
+
+struct safe_VkDedicatedAllocationBufferCreateInfoNV {
+    VkStructureType sType;
+    const void* pNext;
+    VkBool32 dedicatedAllocation;
+    safe_VkDedicatedAllocationBufferCreateInfoNV(const VkDedicatedAllocationBufferCreateInfoNV* in_struct);
+    safe_VkDedicatedAllocationBufferCreateInfoNV(const safe_VkDedicatedAllocationBufferCreateInfoNV& src);
+    safe_VkDedicatedAllocationBufferCreateInfoNV& operator=(const safe_VkDedicatedAllocationBufferCreateInfoNV& src);
+    safe_VkDedicatedAllocationBufferCreateInfoNV();
+    ~safe_VkDedicatedAllocationBufferCreateInfoNV();
+    void initialize(const VkDedicatedAllocationBufferCreateInfoNV* in_struct);
+    void initialize(const safe_VkDedicatedAllocationBufferCreateInfoNV* src);
+    VkDedicatedAllocationBufferCreateInfoNV *ptr() { return reinterpret_cast<VkDedicatedAllocationBufferCreateInfoNV *>(this); }
+    VkDedicatedAllocationBufferCreateInfoNV const *ptr() const { return reinterpret_cast<VkDedicatedAllocationBufferCreateInfoNV const *>(this); }
+};
+
+struct safe_VkDedicatedAllocationMemoryAllocateInfoNV {
+    VkStructureType sType;
+    const void* pNext;
+    VkImage image;
+    VkBuffer buffer;
+    safe_VkDedicatedAllocationMemoryAllocateInfoNV(const VkDedicatedAllocationMemoryAllocateInfoNV* in_struct);
+    safe_VkDedicatedAllocationMemoryAllocateInfoNV(const safe_VkDedicatedAllocationMemoryAllocateInfoNV& src);
+    safe_VkDedicatedAllocationMemoryAllocateInfoNV& operator=(const safe_VkDedicatedAllocationMemoryAllocateInfoNV& src);
+    safe_VkDedicatedAllocationMemoryAllocateInfoNV();
+    ~safe_VkDedicatedAllocationMemoryAllocateInfoNV();
+    void initialize(const VkDedicatedAllocationMemoryAllocateInfoNV* in_struct);
+    void initialize(const safe_VkDedicatedAllocationMemoryAllocateInfoNV* src);
+    VkDedicatedAllocationMemoryAllocateInfoNV *ptr() { return reinterpret_cast<VkDedicatedAllocationMemoryAllocateInfoNV *>(this); }
+    VkDedicatedAllocationMemoryAllocateInfoNV const *ptr() const { return reinterpret_cast<VkDedicatedAllocationMemoryAllocateInfoNV const *>(this); }
+};
+
+struct safe_VkPhysicalDeviceTransformFeedbackFeaturesEXT {
+    VkStructureType sType;
+    void* pNext;
+    VkBool32 transformFeedback;
+    VkBool32 geometryStreams;
+    safe_VkPhysicalDeviceTransformFeedbackFeaturesEXT(const VkPhysicalDeviceTransformFeedbackFeaturesEXT* in_struct);
+    safe_VkPhysicalDeviceTransformFeedbackFeaturesEXT(const safe_VkPhysicalDeviceTransformFeedbackFeaturesEXT& src);
+    safe_VkPhysicalDeviceTransformFeedbackFeaturesEXT& operator=(const safe_VkPhysicalDeviceTransformFeedbackFeaturesEXT& src);
+    safe_VkPhysicalDeviceTransformFeedbackFeaturesEXT();
+    ~safe_VkPhysicalDeviceTransformFeedbackFeaturesEXT();
+    void initialize(const VkPhysicalDeviceTransformFeedbackFeaturesEXT* in_struct);
+    void initialize(const safe_VkPhysicalDeviceTransformFeedbackFeaturesEXT* src);
+    VkPhysicalDeviceTransformFeedbackFeaturesEXT *ptr() { return reinterpret_cast<VkPhysicalDeviceTransformFeedbackFeaturesEXT *>(this); }
+    VkPhysicalDeviceTransformFeedbackFeaturesEXT const *ptr() const { return reinterpret_cast<VkPhysicalDeviceTransformFeedbackFeaturesEXT const *>(this); }
+};
+
+struct safe_VkPhysicalDeviceTransformFeedbackPropertiesEXT {
+    VkStructureType sType;
+    void* pNext;
+    uint32_t maxTransformFeedbackStreams;
+    uint32_t maxTransformFeedbackBuffers;
+    VkDeviceSize maxTransformFeedbackBufferSize;
+    uint32_t maxTransformFeedbackStreamDataSize;
+    uint32_t maxTransformFeedbackBufferDataSize;
+    uint32_t maxTransformFeedbackBufferDataStride;
+    VkBool32 transformFeedbackQueries;
+    VkBool32 transformFeedbackStreamsLinesTriangles;
+    VkBool32 transformFeedbackRasterizationStreamSelect;
+    VkBool32 transformFeedbackDraw;
+    safe_VkPhysicalDeviceTransformFeedbackPropertiesEXT(const VkPhysicalDeviceTransformFeedbackPropertiesEXT* in_struct);
+    safe_VkPhysicalDeviceTransformFeedbackPropertiesEXT(const safe_VkPhysicalDeviceTransformFeedbackPropertiesEXT& src);
+    safe_VkPhysicalDeviceTransformFeedbackPropertiesEXT& operator=(const safe_VkPhysicalDeviceTransformFeedbackPropertiesEXT& src);
+    safe_VkPhysicalDeviceTransformFeedbackPropertiesEXT();
+    ~safe_VkPhysicalDeviceTransformFeedbackPropertiesEXT();
+    void initialize(const VkPhysicalDeviceTransformFeedbackPropertiesEXT* in_struct);
+    void initialize(const safe_VkPhysicalDeviceTransformFeedbackPropertiesEXT* src);
+    VkPhysicalDeviceTransformFeedbackPropertiesEXT *ptr() { return reinterpret_cast<VkPhysicalDeviceTransformFeedbackPropertiesEXT *>(this); }
+    VkPhysicalDeviceTransformFeedbackPropertiesEXT const *ptr() const { return reinterpret_cast<VkPhysicalDeviceTransformFeedbackPropertiesEXT const *>(this); }
+};
+
+struct safe_VkPipelineRasterizationStateStreamCreateInfoEXT {
+    VkStructureType sType;
+    const void* pNext;
+    VkPipelineRasterizationStateStreamCreateFlagsEXT flags;
+    uint32_t rasterizationStream;
+    safe_VkPipelineRasterizationStateStreamCreateInfoEXT(const VkPipelineRasterizationStateStreamCreateInfoEXT* in_struct);
+    safe_VkPipelineRasterizationStateStreamCreateInfoEXT(const safe_VkPipelineRasterizationStateStreamCreateInfoEXT& src);
+    safe_VkPipelineRasterizationStateStreamCreateInfoEXT& operator=(const safe_VkPipelineRasterizationStateStreamCreateInfoEXT& src);
+    safe_VkPipelineRasterizationStateStreamCreateInfoEXT();
+    ~safe_VkPipelineRasterizationStateStreamCreateInfoEXT();
+    void initialize(const VkPipelineRasterizationStateStreamCreateInfoEXT* in_struct);
+    void initialize(const safe_VkPipelineRasterizationStateStreamCreateInfoEXT* src);
+    VkPipelineRasterizationStateStreamCreateInfoEXT *ptr() { return reinterpret_cast<VkPipelineRasterizationStateStreamCreateInfoEXT *>(this); }
+    VkPipelineRasterizationStateStreamCreateInfoEXT const *ptr() const { return reinterpret_cast<VkPipelineRasterizationStateStreamCreateInfoEXT const *>(this); }
+};
+
+struct safe_VkImageViewHandleInfoNVX {
+    VkStructureType sType;
+    const void* pNext;
+    VkImageView imageView;
+    VkDescriptorType descriptorType;
+    VkSampler sampler;
+    safe_VkImageViewHandleInfoNVX(const VkImageViewHandleInfoNVX* in_struct);
+    safe_VkImageViewHandleInfoNVX(const safe_VkImageViewHandleInfoNVX& src);
+    safe_VkImageViewHandleInfoNVX& operator=(const safe_VkImageViewHandleInfoNVX& src);
+    safe_VkImageViewHandleInfoNVX();
+    ~safe_VkImageViewHandleInfoNVX();
+    void initialize(const VkImageViewHandleInfoNVX* in_struct);
+    void initialize(const safe_VkImageViewHandleInfoNVX* src);
+    VkImageViewHandleInfoNVX *ptr() { return reinterpret_cast<VkImageViewHandleInfoNVX *>(this); }
+    VkImageViewHandleInfoNVX const *ptr() const { return reinterpret_cast<VkImageViewHandleInfoNVX const *>(this); }
+};
+
+struct safe_VkTextureLODGatherFormatPropertiesAMD {
+    VkStructureType sType;
+    void* pNext;
+    VkBool32 supportsTextureGatherLODBiasAMD;
+    safe_VkTextureLODGatherFormatPropertiesAMD(const VkTextureLODGatherFormatPropertiesAMD* in_struct);
+    safe_VkTextureLODGatherFormatPropertiesAMD(const safe_VkTextureLODGatherFormatPropertiesAMD& src);
+    safe_VkTextureLODGatherFormatPropertiesAMD& operator=(const safe_VkTextureLODGatherFormatPropertiesAMD& src);
+    safe_VkTextureLODGatherFormatPropertiesAMD();
+    ~safe_VkTextureLODGatherFormatPropertiesAMD();
+    void initialize(const VkTextureLODGatherFormatPropertiesAMD* in_struct);
+    void initialize(const safe_VkTextureLODGatherFormatPropertiesAMD* src);
+    VkTextureLODGatherFormatPropertiesAMD *ptr() { return reinterpret_cast<VkTextureLODGatherFormatPropertiesAMD *>(this); }
+    VkTextureLODGatherFormatPropertiesAMD const *ptr() const { return reinterpret_cast<VkTextureLODGatherFormatPropertiesAMD const *>(this); }
+};
+
+#ifdef VK_USE_PLATFORM_GGP
+struct safe_VkStreamDescriptorSurfaceCreateInfoGGP {
+    VkStructureType sType;
+    const void* pNext;
+    VkStreamDescriptorSurfaceCreateFlagsGGP flags;
+    GgpStreamDescriptor streamDescriptor;
+    safe_VkStreamDescriptorSurfaceCreateInfoGGP(const VkStreamDescriptorSurfaceCreateInfoGGP* in_struct);
+    safe_VkStreamDescriptorSurfaceCreateInfoGGP(const safe_VkStreamDescriptorSurfaceCreateInfoGGP& src);
+    safe_VkStreamDescriptorSurfaceCreateInfoGGP& operator=(const safe_VkStreamDescriptorSurfaceCreateInfoGGP& src);
+    safe_VkStreamDescriptorSurfaceCreateInfoGGP();
+    ~safe_VkStreamDescriptorSurfaceCreateInfoGGP();
+    void initialize(const VkStreamDescriptorSurfaceCreateInfoGGP* in_struct);
+    void initialize(const safe_VkStreamDescriptorSurfaceCreateInfoGGP* src);
+    VkStreamDescriptorSurfaceCreateInfoGGP *ptr() { return reinterpret_cast<VkStreamDescriptorSurfaceCreateInfoGGP *>(this); }
+    VkStreamDescriptorSurfaceCreateInfoGGP const *ptr() const { return reinterpret_cast<VkStreamDescriptorSurfaceCreateInfoGGP const *>(this); }
+};
+#endif // VK_USE_PLATFORM_GGP
+
+struct safe_VkPhysicalDeviceCornerSampledImageFeaturesNV {
+    VkStructureType sType;
+    void* pNext;
+    VkBool32 cornerSampledImage;
+    safe_VkPhysicalDeviceCornerSampledImageFeaturesNV(const VkPhysicalDeviceCornerSampledImageFeaturesNV* in_struct);
+    safe_VkPhysicalDeviceCornerSampledImageFeaturesNV(const safe_VkPhysicalDeviceCornerSampledImageFeaturesNV& src);
+    safe_VkPhysicalDeviceCornerSampledImageFeaturesNV& operator=(const safe_VkPhysicalDeviceCornerSampledImageFeaturesNV& src);
+    safe_VkPhysicalDeviceCornerSampledImageFeaturesNV();
+    ~safe_VkPhysicalDeviceCornerSampledImageFeaturesNV();
+    void initialize(const VkPhysicalDeviceCornerSampledImageFeaturesNV* in_struct);
+    void initialize(const safe_VkPhysicalDeviceCornerSampledImageFeaturesNV* src);
+    VkPhysicalDeviceCornerSampledImageFeaturesNV *ptr() { return reinterpret_cast<VkPhysicalDeviceCornerSampledImageFeaturesNV *>(this); }
+    VkPhysicalDeviceCornerSampledImageFeaturesNV const *ptr() const { return reinterpret_cast<VkPhysicalDeviceCornerSampledImageFeaturesNV const *>(this); }
+};
+
+struct safe_VkExternalMemoryImageCreateInfoNV {
+    VkStructureType sType;
+    const void* pNext;
+    VkExternalMemoryHandleTypeFlagsNV handleTypes;
+    safe_VkExternalMemoryImageCreateInfoNV(const VkExternalMemoryImageCreateInfoNV* in_struct);
+    safe_VkExternalMemoryImageCreateInfoNV(const safe_VkExternalMemoryImageCreateInfoNV& src);
+    safe_VkExternalMemoryImageCreateInfoNV& operator=(const safe_VkExternalMemoryImageCreateInfoNV& src);
+    safe_VkExternalMemoryImageCreateInfoNV();
+    ~safe_VkExternalMemoryImageCreateInfoNV();
+    void initialize(const VkExternalMemoryImageCreateInfoNV* in_struct);
+    void initialize(const safe_VkExternalMemoryImageCreateInfoNV* src);
+    VkExternalMemoryImageCreateInfoNV *ptr() { return reinterpret_cast<VkExternalMemoryImageCreateInfoNV *>(this); }
+    VkExternalMemoryImageCreateInfoNV const *ptr() const { return reinterpret_cast<VkExternalMemoryImageCreateInfoNV const *>(this); }
+};
+
+struct safe_VkExportMemoryAllocateInfoNV {
+    VkStructureType sType;
+    const void* pNext;
+    VkExternalMemoryHandleTypeFlagsNV handleTypes;
+    safe_VkExportMemoryAllocateInfoNV(const VkExportMemoryAllocateInfoNV* in_struct);
+    safe_VkExportMemoryAllocateInfoNV(const safe_VkExportMemoryAllocateInfoNV& src);
+    safe_VkExportMemoryAllocateInfoNV& operator=(const safe_VkExportMemoryAllocateInfoNV& src);
+    safe_VkExportMemoryAllocateInfoNV();
+    ~safe_VkExportMemoryAllocateInfoNV();
+    void initialize(const VkExportMemoryAllocateInfoNV* in_struct);
+    void initialize(const safe_VkExportMemoryAllocateInfoNV* src);
+    VkExportMemoryAllocateInfoNV *ptr() { return reinterpret_cast<VkExportMemoryAllocateInfoNV *>(this); }
+    VkExportMemoryAllocateInfoNV const *ptr() const { return reinterpret_cast<VkExportMemoryAllocateInfoNV const *>(this); }
+};
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+struct safe_VkImportMemoryWin32HandleInfoNV {
+    VkStructureType sType;
+    const void* pNext;
+    VkExternalMemoryHandleTypeFlagsNV handleType;
+    HANDLE handle;
+    safe_VkImportMemoryWin32HandleInfoNV(const VkImportMemoryWin32HandleInfoNV* in_struct);
+    safe_VkImportMemoryWin32HandleInfoNV(const safe_VkImportMemoryWin32HandleInfoNV& src);
+    safe_VkImportMemoryWin32HandleInfoNV& operator=(const safe_VkImportMemoryWin32HandleInfoNV& src);
+    safe_VkImportMemoryWin32HandleInfoNV();
+    ~safe_VkImportMemoryWin32HandleInfoNV();
+    void initialize(const VkImportMemoryWin32HandleInfoNV* in_struct);
+    void initialize(const safe_VkImportMemoryWin32HandleInfoNV* src);
+    VkImportMemoryWin32HandleInfoNV *ptr() { return reinterpret_cast<VkImportMemoryWin32HandleInfoNV *>(this); }
+    VkImportMemoryWin32HandleInfoNV const *ptr() const { return reinterpret_cast<VkImportMemoryWin32HandleInfoNV const *>(this); }
+};
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+struct safe_VkExportMemoryWin32HandleInfoNV {
+    VkStructureType sType;
+    const void* pNext;
+    const SECURITY_ATTRIBUTES* pAttributes;
+    DWORD dwAccess;
+    safe_VkExportMemoryWin32HandleInfoNV(const VkExportMemoryWin32HandleInfoNV* in_struct);
+    safe_VkExportMemoryWin32HandleInfoNV(const safe_VkExportMemoryWin32HandleInfoNV& src);
+    safe_VkExportMemoryWin32HandleInfoNV& operator=(const safe_VkExportMemoryWin32HandleInfoNV& src);
+    safe_VkExportMemoryWin32HandleInfoNV();
+    ~safe_VkExportMemoryWin32HandleInfoNV();
+    void initialize(const VkExportMemoryWin32HandleInfoNV* in_struct);
+    void initialize(const safe_VkExportMemoryWin32HandleInfoNV* src);
+    VkExportMemoryWin32HandleInfoNV *ptr() { return reinterpret_cast<VkExportMemoryWin32HandleInfoNV *>(this); }
+    VkExportMemoryWin32HandleInfoNV const *ptr() const { return reinterpret_cast<VkExportMemoryWin32HandleInfoNV const *>(this); }
+};
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+struct safe_VkWin32KeyedMutexAcquireReleaseInfoNV {
+    VkStructureType sType;
+    const void* pNext;
+    uint32_t acquireCount;
+    VkDeviceMemory* pAcquireSyncs;
+    const uint64_t* pAcquireKeys;
+    const uint32_t* pAcquireTimeoutMilliseconds;
+    uint32_t releaseCount;
+    VkDeviceMemory* pReleaseSyncs;
+    const uint64_t* pReleaseKeys;
+    safe_VkWin32KeyedMutexAcquireReleaseInfoNV(const VkWin32KeyedMutexAcquireReleaseInfoNV* in_struct);
+    safe_VkWin32KeyedMutexAcquireReleaseInfoNV(const safe_VkWin32KeyedMutexAcquireReleaseInfoNV& src);
+    safe_VkWin32KeyedMutexAcquireReleaseInfoNV& operator=(const safe_VkWin32KeyedMutexAcquireReleaseInfoNV& src);
+    safe_VkWin32KeyedMutexAcquireReleaseInfoNV();
+    ~safe_VkWin32KeyedMutexAcquireReleaseInfoNV();
+    void initialize(const VkWin32KeyedMutexAcquireReleaseInfoNV* in_struct);
+    void initialize(const safe_VkWin32KeyedMutexAcquireReleaseInfoNV* src);
+    VkWin32KeyedMutexAcquireReleaseInfoNV *ptr() { return reinterpret_cast<VkWin32KeyedMutexAcquireReleaseInfoNV *>(this); }
+    VkWin32KeyedMutexAcquireReleaseInfoNV const *ptr() const { return reinterpret_cast<VkWin32KeyedMutexAcquireReleaseInfoNV const *>(this); }
+};
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+struct safe_VkValidationFlagsEXT {
+    VkStructureType sType;
+    const void* pNext;
+    uint32_t disabledValidationCheckCount;
+    const VkValidationCheckEXT* pDisabledValidationChecks;
+    safe_VkValidationFlagsEXT(const VkValidationFlagsEXT* in_struct);
+    safe_VkValidationFlagsEXT(const safe_VkValidationFlagsEXT& src);
+    safe_VkValidationFlagsEXT& operator=(const safe_VkValidationFlagsEXT& src);
+    safe_VkValidationFlagsEXT();
+    ~safe_VkValidationFlagsEXT();
+    void initialize(const VkValidationFlagsEXT* in_struct);
+    void initialize(const safe_VkValidationFlagsEXT* src);
+    VkValidationFlagsEXT *ptr() { return reinterpret_cast<VkValidationFlagsEXT *>(this); }
+    VkValidationFlagsEXT const *ptr() const { return reinterpret_cast<VkValidationFlagsEXT const *>(this); }
+};
+
+#ifdef VK_USE_PLATFORM_VI_NN
+struct safe_VkViSurfaceCreateInfoNN {
+    VkStructureType sType;
+    const void* pNext;
+    VkViSurfaceCreateFlagsNN flags;
+    void* window;
+    safe_VkViSurfaceCreateInfoNN(const VkViSurfaceCreateInfoNN* in_struct);
+    safe_VkViSurfaceCreateInfoNN(const safe_VkViSurfaceCreateInfoNN& src);
+    safe_VkViSurfaceCreateInfoNN& operator=(const safe_VkViSurfaceCreateInfoNN& src);
+    safe_VkViSurfaceCreateInfoNN();
+    ~safe_VkViSurfaceCreateInfoNN();
+    void initialize(const VkViSurfaceCreateInfoNN* in_struct);
+    void initialize(const safe_VkViSurfaceCreateInfoNN* src);
+    VkViSurfaceCreateInfoNN *ptr() { return reinterpret_cast<VkViSurfaceCreateInfoNN *>(this); }
+    VkViSurfaceCreateInfoNN const *ptr() const { return reinterpret_cast<VkViSurfaceCreateInfoNN const *>(this); }
+};
+#endif // VK_USE_PLATFORM_VI_NN
+
+struct safe_VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT {
+    VkStructureType sType;
+    void* pNext;
+    VkBool32 textureCompressionASTC_HDR;
+    safe_VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT(const VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT* in_struct);
+    safe_VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT(const safe_VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT& src);
+    safe_VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT& operator=(const safe_VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT& src);
+    safe_VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT();
+    ~safe_VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT();
+    void initialize(const VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT* in_struct);
+    void initialize(const safe_VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT* src);
+    VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT *ptr() { return reinterpret_cast<VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT *>(this); }
+    VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const *ptr() const { return reinterpret_cast<VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const *>(this); }
+};
+
+struct safe_VkImageViewASTCDecodeModeEXT {
+    VkStructureType sType;
+    const void* pNext;
+    VkFormat decodeMode;
+    safe_VkImageViewASTCDecodeModeEXT(const VkImageViewASTCDecodeModeEXT* in_struct);
+    safe_VkImageViewASTCDecodeModeEXT(const safe_VkImageViewASTCDecodeModeEXT& src);
+    safe_VkImageViewASTCDecodeModeEXT& operator=(const safe_VkImageViewASTCDecodeModeEXT& src);
+    safe_VkImageViewASTCDecodeModeEXT();
+    ~safe_VkImageViewASTCDecodeModeEXT();
+    void initialize(const VkImageViewASTCDecodeModeEXT* in_struct);
+    void initialize(const safe_VkImageViewASTCDecodeModeEXT* src);
+    VkImageViewASTCDecodeModeEXT *ptr() { return reinterpret_cast<VkImageViewASTCDecodeModeEXT *>(this); }
+    VkImageViewASTCDecodeModeEXT const *ptr() const { return reinterpret_cast<VkImageViewASTCDecodeModeEXT const *>(this); }
+};
+
+struct safe_VkPhysicalDeviceASTCDecodeFeaturesEXT {
+    VkStructureType sType;
+    void* pNext;
+    VkBool32 decodeModeSharedExponent;
+    safe_VkPhysicalDeviceASTCDecodeFeaturesEXT(const VkPhysicalDeviceASTCDecodeFeaturesEXT* in_struct);
+    safe_VkPhysicalDeviceASTCDecodeFeaturesEXT(const safe_VkPhysicalDeviceASTCDecodeFeaturesEXT& src);
+    safe_VkPhysicalDeviceASTCDecodeFeaturesEXT& operator=(const safe_VkPhysicalDeviceASTCDecodeFeaturesEXT& src);
+    safe_VkPhysicalDeviceASTCDecodeFeaturesEXT();
+    ~safe_VkPhysicalDeviceASTCDecodeFeaturesEXT();
+    void initialize(const VkPhysicalDeviceASTCDecodeFeaturesEXT* in_struct);
+    void initialize(const safe_VkPhysicalDeviceASTCDecodeFeaturesEXT* src);
+    VkPhysicalDeviceASTCDecodeFeaturesEXT *ptr() { return reinterpret_cast<VkPhysicalDeviceASTCDecodeFeaturesEXT *>(this); }
+    VkPhysicalDeviceASTCDecodeFeaturesEXT const *ptr() const { return reinterpret_cast<VkPhysicalDeviceASTCDecodeFeaturesEXT const *>(this); }
+};
+
+struct safe_VkConditionalRenderingBeginInfoEXT {
+    VkStructureType sType;
+    const void* pNext;
+    VkBuffer buffer;
+    VkDeviceSize offset;
+    VkConditionalRenderingFlagsEXT flags;
+    safe_VkConditionalRenderingBeginInfoEXT(const VkConditionalRenderingBeginInfoEXT* in_struct);
+    safe_VkConditionalRenderingBeginInfoEXT(const safe_VkConditionalRenderingBeginInfoEXT& src);
+    safe_VkConditionalRenderingBeginInfoEXT& operator=(const safe_VkConditionalRenderingBeginInfoEXT& src);
+    safe_VkConditionalRenderingBeginInfoEXT();
+    ~safe_VkConditionalRenderingBeginInfoEXT();
+    void initialize(const VkConditionalRenderingBeginInfoEXT* in_struct);
+    void initialize(const safe_VkConditionalRenderingBeginInfoEXT* src);
+    VkConditionalRenderingBeginInfoEXT *ptr() { return reinterpret_cast<VkConditionalRenderingBeginInfoEXT *>(this); }
+    VkConditionalRenderingBeginInfoEXT const *ptr() const { return reinterpret_cast<VkConditionalRenderingBeginInfoEXT const *>(this); }
+};
+
+struct safe_VkPhysicalDeviceConditionalRenderingFeaturesEXT {
+    VkStructureType sType;
+    void* pNext;
+    VkBool32 conditionalRendering;
+    VkBool32 inheritedConditionalRendering;
+    safe_VkPhysicalDeviceConditionalRenderingFeaturesEXT(const VkPhysicalDeviceConditionalRenderingFeaturesEXT* in_struct);
+    safe_VkPhysicalDeviceConditionalRenderingFeaturesEXT(const safe_VkPhysicalDeviceConditionalRenderingFeaturesEXT& src);
+    safe_VkPhysicalDeviceConditionalRenderingFeaturesEXT& operator=(const safe_VkPhysicalDeviceConditionalRenderingFeaturesEXT& src);
+    safe_VkPhysicalDeviceConditionalRenderingFeaturesEXT();
+    ~safe_VkPhysicalDeviceConditionalRenderingFeaturesEXT();
+    void initialize(const VkPhysicalDeviceConditionalRenderingFeaturesEXT* in_struct);
+    void initialize(const safe_VkPhysicalDeviceConditionalRenderingFeaturesEXT* src);
+    VkPhysicalDeviceConditionalRenderingFeaturesEXT *ptr() { return reinterpret_cast<VkPhysicalDeviceConditionalRenderingFeaturesEXT *>(this); }
+    VkPhysicalDeviceConditionalRenderingFeaturesEXT const *ptr() const { return reinterpret_cast<VkPhysicalDeviceConditionalRenderingFeaturesEXT const *>(this); }
+};
+
+struct safe_VkCommandBufferInheritanceConditionalRenderingInfoEXT {
+    VkStructureType sType;
+    const void* pNext;
+    VkBool32 conditionalRenderingEnable;
+    safe_VkCommandBufferInheritanceConditionalRenderingInfoEXT(const VkCommandBufferInheritanceConditionalRenderingInfoEXT* in_struct);
+    safe_VkCommandBufferInheritanceConditionalRenderingInfoEXT(const safe_VkCommandBufferInheritanceConditionalRenderingInfoEXT& src);
+    safe_VkCommandBufferInheritanceConditionalRenderingInfoEXT& operator=(const safe_VkCommandBufferInheritanceConditionalRenderingInfoEXT& src);
+    safe_VkCommandBufferInheritanceConditionalRenderingInfoEXT();
+    ~safe_VkCommandBufferInheritanceConditionalRenderingInfoEXT();
+    void initialize(const VkCommandBufferInheritanceConditionalRenderingInfoEXT* in_struct);
+    void initialize(const safe_VkCommandBufferInheritanceConditionalRenderingInfoEXT* src);
+    VkCommandBufferInheritanceConditionalRenderingInfoEXT *ptr() { return reinterpret_cast<VkCommandBufferInheritanceConditionalRenderingInfoEXT *>(this); }
+    VkCommandBufferInheritanceConditionalRenderingInfoEXT const *ptr() const { return reinterpret_cast<VkCommandBufferInheritanceConditionalRenderingInfoEXT const *>(this); }
+};
+
+struct safe_VkDeviceGeneratedCommandsFeaturesNVX {
+    VkStructureType sType;
+    const void* pNext;
+    VkBool32 computeBindingPointSupport;
+    safe_VkDeviceGeneratedCommandsFeaturesNVX(const VkDeviceGeneratedCommandsFeaturesNVX* in_struct);
+    safe_VkDeviceGeneratedCommandsFeaturesNVX(const safe_VkDeviceGeneratedCommandsFeaturesNVX& src);
+    safe_VkDeviceGeneratedCommandsFeaturesNVX& operator=(const safe_VkDeviceGeneratedCommandsFeaturesNVX& src);
+    safe_VkDeviceGeneratedCommandsFeaturesNVX();
+    ~safe_VkDeviceGeneratedCommandsFeaturesNVX();
+    void initialize(const VkDeviceGeneratedCommandsFeaturesNVX* in_struct);
+    void initialize(const safe_VkDeviceGeneratedCommandsFeaturesNVX* src);
+    VkDeviceGeneratedCommandsFeaturesNVX *ptr() { return reinterpret_cast<VkDeviceGeneratedCommandsFeaturesNVX *>(this); }
+    VkDeviceGeneratedCommandsFeaturesNVX const *ptr() const { return reinterpret_cast<VkDeviceGeneratedCommandsFeaturesNVX const *>(this); }
+};
+
+struct safe_VkDeviceGeneratedCommandsLimitsNVX {
+    VkStructureType sType;
+    const void* pNext;
+    uint32_t maxIndirectCommandsLayoutTokenCount;
+    uint32_t maxObjectEntryCounts;
+    uint32_t minSequenceCountBufferOffsetAlignment;
+    uint32_t minSequenceIndexBufferOffsetAlignment;
+    uint32_t minCommandsTokenBufferOffsetAlignment;
+    safe_VkDeviceGeneratedCommandsLimitsNVX(const VkDeviceGeneratedCommandsLimitsNVX* in_struct);
+    safe_VkDeviceGeneratedCommandsLimitsNVX(const safe_VkDeviceGeneratedCommandsLimitsNVX& src);
+    safe_VkDeviceGeneratedCommandsLimitsNVX& operator=(const safe_VkDeviceGeneratedCommandsLimitsNVX& src);
+    safe_VkDeviceGeneratedCommandsLimitsNVX();
+    ~safe_VkDeviceGeneratedCommandsLimitsNVX();
+    void initialize(const VkDeviceGeneratedCommandsLimitsNVX* in_struct);
+    void initialize(const safe_VkDeviceGeneratedCommandsLimitsNVX* src);
+    VkDeviceGeneratedCommandsLimitsNVX *ptr() { return reinterpret_cast<VkDeviceGeneratedCommandsLimitsNVX *>(this); }
+    VkDeviceGeneratedCommandsLimitsNVX const *ptr() const { return reinterpret_cast<VkDeviceGeneratedCommandsLimitsNVX const *>(this); }
+};
+
+struct safe_VkIndirectCommandsLayoutCreateInfoNVX {
+    VkStructureType sType;
+    const void* pNext;
+    VkPipelineBindPoint pipelineBindPoint;
+    VkIndirectCommandsLayoutUsageFlagsNVX flags;
+    uint32_t tokenCount;
+    const VkIndirectCommandsLayoutTokenNVX* pTokens;
+    safe_VkIndirectCommandsLayoutCreateInfoNVX(const VkIndirectCommandsLayoutCreateInfoNVX* in_struct);
+    safe_VkIndirectCommandsLayoutCreateInfoNVX(const safe_VkIndirectCommandsLayoutCreateInfoNVX& src);
+    safe_VkIndirectCommandsLayoutCreateInfoNVX& operator=(const safe_VkIndirectCommandsLayoutCreateInfoNVX& src);
+    safe_VkIndirectCommandsLayoutCreateInfoNVX();
+    ~safe_VkIndirectCommandsLayoutCreateInfoNVX();
+    void initialize(const VkIndirectCommandsLayoutCreateInfoNVX* in_struct);
+    void initialize(const safe_VkIndirectCommandsLayoutCreateInfoNVX* src);
+    VkIndirectCommandsLayoutCreateInfoNVX *ptr() { return reinterpret_cast<VkIndirectCommandsLayoutCreateInfoNVX *>(this); }
+    VkIndirectCommandsLayoutCreateInfoNVX const *ptr() const { return reinterpret_cast<VkIndirectCommandsLayoutCreateInfoNVX const *>(this); }
+};
+
+struct safe_VkCmdProcessCommandsInfoNVX {
+    VkStructureType sType;
+    const void* pNext;
+    VkObjectTableNVX objectTable;
+    VkIndirectCommandsLayoutNVX indirectCommandsLayout;
+    uint32_t indirectCommandsTokenCount;
+    VkIndirectCommandsTokenNVX* pIndirectCommandsTokens;
+    uint32_t maxSequencesCount;
+    VkCommandBuffer targetCommandBuffer;
+    VkBuffer sequencesCountBuffer;
+    VkDeviceSize sequencesCountOffset;
+    VkBuffer sequencesIndexBuffer;
+    VkDeviceSize sequencesIndexOffset;
+    safe_VkCmdProcessCommandsInfoNVX(const VkCmdProcessCommandsInfoNVX* in_struct);
+    safe_VkCmdProcessCommandsInfoNVX(const safe_VkCmdProcessCommandsInfoNVX& src);
+    safe_VkCmdProcessCommandsInfoNVX& operator=(const safe_VkCmdProcessCommandsInfoNVX& src);
+    safe_VkCmdProcessCommandsInfoNVX();
+    ~safe_VkCmdProcessCommandsInfoNVX();
+    void initialize(const VkCmdProcessCommandsInfoNVX* in_struct);
+    void initialize(const safe_VkCmdProcessCommandsInfoNVX* src);
+    VkCmdProcessCommandsInfoNVX *ptr() { return reinterpret_cast<VkCmdProcessCommandsInfoNVX *>(this); }
+    VkCmdProcessCommandsInfoNVX const *ptr() const { return reinterpret_cast<VkCmdProcessCommandsInfoNVX const *>(this); }
+};
+
+struct safe_VkCmdReserveSpaceForCommandsInfoNVX {
+    VkStructureType sType;
+    const void* pNext;
+    VkObjectTableNVX objectTable;
+    VkIndirectCommandsLayoutNVX indirectCommandsLayout;
+    uint32_t maxSequencesCount;
+    safe_VkCmdReserveSpaceForCommandsInfoNVX(const VkCmdReserveSpaceForCommandsInfoNVX* in_struct);
+    safe_VkCmdReserveSpaceForCommandsInfoNVX(const safe_VkCmdReserveSpaceForCommandsInfoNVX& src);
+    safe_VkCmdReserveSpaceForCommandsInfoNVX& operator=(const safe_VkCmdReserveSpaceForCommandsInfoNVX& src);
+    safe_VkCmdReserveSpaceForCommandsInfoNVX();
+    ~safe_VkCmdReserveSpaceForCommandsInfoNVX();
+    void initialize(const VkCmdReserveSpaceForCommandsInfoNVX* in_struct);
+    void initialize(const safe_VkCmdReserveSpaceForCommandsInfoNVX* src);
+    VkCmdReserveSpaceForCommandsInfoNVX *ptr() { return reinterpret_cast<VkCmdReserveSpaceForCommandsInfoNVX *>(this); }
+    VkCmdReserveSpaceForCommandsInfoNVX const *ptr() const { return reinterpret_cast<VkCmdReserveSpaceForCommandsInfoNVX const *>(this); }
+};
+
+struct safe_VkObjectTableCreateInfoNVX {
+    VkStructureType sType;
+    const void* pNext;
+    uint32_t objectCount;
+    const VkObjectEntryTypeNVX* pObjectEntryTypes;
+    const uint32_t* pObjectEntryCounts;
+    const VkObjectEntryUsageFlagsNVX* pObjectEntryUsageFlags;
+    uint32_t maxUniformBuffersPerDescriptor;
+    uint32_t maxStorageBuffersPerDescriptor;
+    uint32_t maxStorageImagesPerDescriptor;
+    uint32_t maxSampledImagesPerDescriptor;
+    uint32_t maxPipelineLayouts;
+    safe_VkObjectTableCreateInfoNVX(const VkObjectTableCreateInfoNVX* in_struct);
+    safe_VkObjectTableCreateInfoNVX(const safe_VkObjectTableCreateInfoNVX& src);
+    safe_VkObjectTableCreateInfoNVX& operator=(const safe_VkObjectTableCreateInfoNVX& src);
+    safe_VkObjectTableCreateInfoNVX();
+    ~safe_VkObjectTableCreateInfoNVX();
+    void initialize(const VkObjectTableCreateInfoNVX* in_struct);
+    void initialize(const safe_VkObjectTableCreateInfoNVX* src);
+    VkObjectTableCreateInfoNVX *ptr() { return reinterpret_cast<VkObjectTableCreateInfoNVX *>(this); }
+    VkObjectTableCreateInfoNVX const *ptr() const { return reinterpret_cast<VkObjectTableCreateInfoNVX const *>(this); }
+};
+
+struct safe_VkPipelineViewportWScalingStateCreateInfoNV {
+    VkStructureType sType;
+    const void* pNext;
+    VkBool32 viewportWScalingEnable;
+    uint32_t viewportCount;
+    const VkViewportWScalingNV* pViewportWScalings;
+    safe_VkPipelineViewportWScalingStateCreateInfoNV(const VkPipelineViewportWScalingStateCreateInfoNV* in_struct);
+    safe_VkPipelineViewportWScalingStateCreateInfoNV(const safe_VkPipelineViewportWScalingStateCreateInfoNV& src);
+    safe_VkPipelineViewportWScalingStateCreateInfoNV& operator=(const safe_VkPipelineViewportWScalingStateCreateInfoNV& src);
+    safe_VkPipelineViewportWScalingStateCreateInfoNV();
+    ~safe_VkPipelineViewportWScalingStateCreateInfoNV();
+    void initialize(const VkPipelineViewportWScalingStateCreateInfoNV* in_struct);
+    void initialize(const safe_VkPipelineViewportWScalingStateCreateInfoNV* src);
+    VkPipelineViewportWScalingStateCreateInfoNV *ptr() { return reinterpret_cast<VkPipelineViewportWScalingStateCreateInfoNV *>(this); }
+    VkPipelineViewportWScalingStateCreateInfoNV const *ptr() const { return reinterpret_cast<VkPipelineViewportWScalingStateCreateInfoNV const *>(this); }
+};
+
+struct safe_VkSurfaceCapabilities2EXT {
+    VkStructureType sType;
+    void* pNext;
+    uint32_t minImageCount;
+    uint32_t maxImageCount;
+    VkExtent2D currentExtent;
+    VkExtent2D minImageExtent;
+    VkExtent2D maxImageExtent;
+    uint32_t maxImageArrayLayers;
+    VkSurfaceTransformFlagsKHR supportedTransforms;
+    VkSurfaceTransformFlagBitsKHR currentTransform;
+    VkCompositeAlphaFlagsKHR supportedCompositeAlpha;
+    VkImageUsageFlags supportedUsageFlags;
+    VkSurfaceCounterFlagsEXT supportedSurfaceCounters;
+    safe_VkSurfaceCapabilities2EXT(const VkSurfaceCapabilities2EXT* in_struct);
+    safe_VkSurfaceCapabilities2EXT(const safe_VkSurfaceCapabilities2EXT& src);
+    safe_VkSurfaceCapabilities2EXT& operator=(const safe_VkSurfaceCapabilities2EXT& src);
+    safe_VkSurfaceCapabilities2EXT();
+    ~safe_VkSurfaceCapabilities2EXT();
+    void initialize(const VkSurfaceCapabilities2EXT* in_struct);
+    void initialize(const safe_VkSurfaceCapabilities2EXT* src);
+    VkSurfaceCapabilities2EXT *ptr() { return reinterpret_cast<VkSurfaceCapabilities2EXT *>(this); }
+    VkSurfaceCapabilities2EXT const *ptr() const { return reinterpret_cast<VkSurfaceCapabilities2EXT const *>(this); }
+};
+
+struct safe_VkDisplayPowerInfoEXT {
+    VkStructureType sType;
+    const void* pNext;
+    VkDisplayPowerStateEXT powerState;
+    safe_VkDisplayPowerInfoEXT(const VkDisplayPowerInfoEXT* in_struct);
+    safe_VkDisplayPowerInfoEXT(const safe_VkDisplayPowerInfoEXT& src);
+    safe_VkDisplayPowerInfoEXT& operator=(const safe_VkDisplayPowerInfoEXT& src);
+    safe_VkDisplayPowerInfoEXT();
+    ~safe_VkDisplayPowerInfoEXT();
+    void initialize(const VkDisplayPowerInfoEXT* in_struct);
+    void initialize(const safe_VkDisplayPowerInfoEXT* src);
+    VkDisplayPowerInfoEXT *ptr() { return reinterpret_cast<VkDisplayPowerInfoEXT *>(this); }
+    VkDisplayPowerInfoEXT const *ptr() const { return reinterpret_cast<VkDisplayPowerInfoEXT const *>(this); }
+};
+
+struct safe_VkDeviceEventInfoEXT {
+    VkStructureType sType;
+    const void* pNext;
+    VkDeviceEventTypeEXT deviceEvent;
+    safe_VkDeviceEventInfoEXT(const VkDeviceEventInfoEXT* in_struct);
+    safe_VkDeviceEventInfoEXT(const safe_VkDeviceEventInfoEXT& src);
+    safe_VkDeviceEventInfoEXT& operator=(const safe_VkDeviceEventInfoEXT& src);
+    safe_VkDeviceEventInfoEXT();
+    ~safe_VkDeviceEventInfoEXT();
+    void initialize(const VkDeviceEventInfoEXT* in_struct);
+    void initialize(const safe_VkDeviceEventInfoEXT* src);
+    VkDeviceEventInfoEXT *ptr() { return reinterpret_cast<VkDeviceEventInfoEXT *>(this); }
+    VkDeviceEventInfoEXT const *ptr() const { return reinterpret_cast<VkDeviceEventInfoEXT const *>(this); }
+};
+
+struct safe_VkDisplayEventInfoEXT {
+    VkStructureType sType;
+    const void* pNext;
+    VkDisplayEventTypeEXT displayEvent;
+    safe_VkDisplayEventInfoEXT(const VkDisplayEventInfoEXT* in_struct);
+    safe_VkDisplayEventInfoEXT(const safe_VkDisplayEventInfoEXT& src);
+    safe_VkDisplayEventInfoEXT& operator=(const safe_VkDisplayEventInfoEXT& src);
+    safe_VkDisplayEventInfoEXT();
+    ~safe_VkDisplayEventInfoEXT();
+    void initialize(const VkDisplayEventInfoEXT* in_struct);
+    void initialize(const safe_VkDisplayEventInfoEXT* src);
+    VkDisplayEventInfoEXT *ptr() { return reinterpret_cast<VkDisplayEventInfoEXT *>(this); }
+    VkDisplayEventInfoEXT const *ptr() const { return reinterpret_cast<VkDisplayEventInfoEXT const *>(this); }
+};
+
+struct safe_VkSwapchainCounterCreateInfoEXT {
+    VkStructureType sType;
+    const void* pNext;
+    VkSurfaceCounterFlagsEXT surfaceCounters;
+    safe_VkSwapchainCounterCreateInfoEXT(const VkSwapchainCounterCreateInfoEXT* in_struct);
+    safe_VkSwapchainCounterCreateInfoEXT(const safe_VkSwapchainCounterCreateInfoEXT& src);
+    safe_VkSwapchainCounterCreateInfoEXT& operator=(const safe_VkSwapchainCounterCreateInfoEXT& src);
+    safe_VkSwapchainCounterCreateInfoEXT();
+    ~safe_VkSwapchainCounterCreateInfoEXT();
+    void initialize(const VkSwapchainCounterCreateInfoEXT* in_struct);
+    void initialize(const safe_VkSwapchainCounterCreateInfoEXT* src);
+    VkSwapchainCounterCreateInfoEXT *ptr() { return reinterpret_cast<VkSwapchainCounterCreateInfoEXT *>(this); }
+    VkSwapchainCounterCreateInfoEXT const *ptr() const { return reinterpret_cast<VkSwapchainCounterCreateInfoEXT const *>(this); }
+};
+
+struct safe_VkPresentTimesInfoGOOGLE {
+    VkStructureType sType;
+    const void* pNext;
+    uint32_t swapchainCount;
+    const VkPresentTimeGOOGLE* pTimes;
+    safe_VkPresentTimesInfoGOOGLE(const VkPresentTimesInfoGOOGLE* in_struct);
+    safe_VkPresentTimesInfoGOOGLE(const safe_VkPresentTimesInfoGOOGLE& src);
+    safe_VkPresentTimesInfoGOOGLE& operator=(const safe_VkPresentTimesInfoGOOGLE& src);
+    safe_VkPresentTimesInfoGOOGLE();
+    ~safe_VkPresentTimesInfoGOOGLE();
+    void initialize(const VkPresentTimesInfoGOOGLE* in_struct);
+    void initialize(const safe_VkPresentTimesInfoGOOGLE* src);
+    VkPresentTimesInfoGOOGLE *ptr() { return reinterpret_cast<VkPresentTimesInfoGOOGLE *>(this); }
+    VkPresentTimesInfoGOOGLE const *ptr() const { return reinterpret_cast<VkPresentTimesInfoGOOGLE const *>(this); }
+};
+
+struct safe_VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX {
+    VkStructureType sType;
+    void* pNext;
+    VkBool32 perViewPositionAllComponents;
+    safe_VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX(const VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX* in_struct);
+    safe_VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX(const safe_VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX& src);
+    safe_VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX& operator=(const safe_VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX& src);
+    safe_VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX();
+    ~safe_VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX();
+    void initialize(const VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX* in_struct);
+    void initialize(const safe_VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX* src);
+    VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX *ptr() { return reinterpret_cast<VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX *>(this); }
+    VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const *ptr() const { return reinterpret_cast<VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const *>(this); }
+};
+
+struct safe_VkPipelineViewportSwizzleStateCreateInfoNV {
+    VkStructureType sType;
+    const void* pNext;
+    VkPipelineViewportSwizzleStateCreateFlagsNV flags;
+    uint32_t viewportCount;
+    const VkViewportSwizzleNV* pViewportSwizzles;
+    safe_VkPipelineViewportSwizzleStateCreateInfoNV(const VkPipelineViewportSwizzleStateCreateInfoNV* in_struct);
+    safe_VkPipelineViewportSwizzleStateCreateInfoNV(const safe_VkPipelineViewportSwizzleStateCreateInfoNV& src);
+    safe_VkPipelineViewportSwizzleStateCreateInfoNV& operator=(const safe_VkPipelineViewportSwizzleStateCreateInfoNV& src);
+    safe_VkPipelineViewportSwizzleStateCreateInfoNV();
+    ~safe_VkPipelineViewportSwizzleStateCreateInfoNV();
+    void initialize(const VkPipelineViewportSwizzleStateCreateInfoNV* in_struct);
+    void initialize(const safe_VkPipelineViewportSwizzleStateCreateInfoNV* src);
+    VkPipelineViewportSwizzleStateCreateInfoNV *ptr() { return reinterpret_cast<VkPipelineViewportSwizzleStateCreateInfoNV *>(this); }
+    VkPipelineViewportSwizzleStateCreateInfoNV const *ptr() const { return reinterpret_cast<VkPipelineViewportSwizzleStateCreateInfoNV const *>(this); }
+};
+
+struct safe_VkPhysicalDeviceDiscardRectanglePropertiesEXT {
+    VkStructureType sType;
+    void* pNext;
+    uint32_t maxDiscardRectangles;
+    safe_VkPhysicalDeviceDiscardRectanglePropertiesEXT(const VkPhysicalDeviceDiscardRectanglePropertiesEXT* in_struct);
+    safe_VkPhysicalDeviceDiscardRectanglePropertiesEXT(const safe_VkPhysicalDeviceDiscardRectanglePropertiesEXT& src);
+    safe_VkPhysicalDeviceDiscardRectanglePropertiesEXT& operator=(const safe_VkPhysicalDeviceDiscardRectanglePropertiesEXT& src);
+    safe_VkPhysicalDeviceDiscardRectanglePropertiesEXT();
+    ~safe_VkPhysicalDeviceDiscardRectanglePropertiesEXT();
+    void initialize(const VkPhysicalDeviceDiscardRectanglePropertiesEXT* in_struct);
+    void initialize(const safe_VkPhysicalDeviceDiscardRectanglePropertiesEXT* src);
+    VkPhysicalDeviceDiscardRectanglePropertiesEXT *ptr() { return reinterpret_cast<VkPhysicalDeviceDiscardRectanglePropertiesEXT *>(this); }
+    VkPhysicalDeviceDiscardRectanglePropertiesEXT const *ptr() const { return reinterpret_cast<VkPhysicalDeviceDiscardRectanglePropertiesEXT const *>(this); }
+};
+
+struct safe_VkPipelineDiscardRectangleStateCreateInfoEXT {
+    VkStructureType sType;
+    const void* pNext;
+    VkPipelineDiscardRectangleStateCreateFlagsEXT flags;
+    VkDiscardRectangleModeEXT discardRectangleMode;
+    uint32_t discardRectangleCount;
+    const VkRect2D* pDiscardRectangles;
+    safe_VkPipelineDiscardRectangleStateCreateInfoEXT(const VkPipelineDiscardRectangleStateCreateInfoEXT* in_struct);
+    safe_VkPipelineDiscardRectangleStateCreateInfoEXT(const safe_VkPipelineDiscardRectangleStateCreateInfoEXT& src);
+    safe_VkPipelineDiscardRectangleStateCreateInfoEXT& operator=(const safe_VkPipelineDiscardRectangleStateCreateInfoEXT& src);
+    safe_VkPipelineDiscardRectangleStateCreateInfoEXT();
+    ~safe_VkPipelineDiscardRectangleStateCreateInfoEXT();
+    void initialize(const VkPipelineDiscardRectangleStateCreateInfoEXT* in_struct);
+    void initialize(const safe_VkPipelineDiscardRectangleStateCreateInfoEXT* src);
+    VkPipelineDiscardRectangleStateCreateInfoEXT *ptr() { return reinterpret_cast<VkPipelineDiscardRectangleStateCreateInfoEXT *>(this); }
+    VkPipelineDiscardRectangleStateCreateInfoEXT const *ptr() const { return reinterpret_cast<VkPipelineDiscardRectangleStateCreateInfoEXT const *>(this); }
+};
+
+struct safe_VkPhysicalDeviceConservativeRasterizationPropertiesEXT {
+    VkStructureType sType;
+    void* pNext;
+    float primitiveOverestimationSize;
+    float maxExtraPrimitiveOverestimationSize;
+    float extraPrimitiveOverestimationSizeGranularity;
+    VkBool32 primitiveUnderestimation;
+    VkBool32 conservativePointAndLineRasterization;
+    VkBool32 degenerateTrianglesRasterized;
+    VkBool32 degenerateLinesRasterized;
+    VkBool32 fullyCoveredFragmentShaderInputVariable;
+    VkBool32 conservativeRasterizationPostDepthCoverage;
+    safe_VkPhysicalDeviceConservativeRasterizationPropertiesEXT(const VkPhysicalDeviceConservativeRasterizationPropertiesEXT* in_struct);
+    safe_VkPhysicalDeviceConservativeRasterizationPropertiesEXT(const safe_VkPhysicalDeviceConservativeRasterizationPropertiesEXT& src);
+    safe_VkPhysicalDeviceConservativeRasterizationPropertiesEXT& operator=(const safe_VkPhysicalDeviceConservativeRasterizationPropertiesEXT& src);
+    safe_VkPhysicalDeviceConservativeRasterizationPropertiesEXT();
+    ~safe_VkPhysicalDeviceConservativeRasterizationPropertiesEXT();
+    void initialize(const VkPhysicalDeviceConservativeRasterizationPropertiesEXT* in_struct);
+    void initialize(const safe_VkPhysicalDeviceConservativeRasterizationPropertiesEXT* src);
+    VkPhysicalDeviceConservativeRasterizationPropertiesEXT *ptr() { return reinterpret_cast<VkPhysicalDeviceConservativeRasterizationPropertiesEXT *>(this); }
+    VkPhysicalDeviceConservativeRasterizationPropertiesEXT const *ptr() const { return reinterpret_cast<VkPhysicalDeviceConservativeRasterizationPropertiesEXT const *>(this); }
+};
+
+struct safe_VkPipelineRasterizationConservativeStateCreateInfoEXT {
+    VkStructureType sType;
+    const void* pNext;
+    VkPipelineRasterizationConservativeStateCreateFlagsEXT flags;
+    VkConservativeRasterizationModeEXT conservativeRasterizationMode;
+    float extraPrimitiveOverestimationSize;
+    safe_VkPipelineRasterizationConservativeStateCreateInfoEXT(const VkPipelineRasterizationConservativeStateCreateInfoEXT* in_struct);
+    safe_VkPipelineRasterizationConservativeStateCreateInfoEXT(const safe_VkPipelineRasterizationConservativeStateCreateInfoEXT& src);
+    safe_VkPipelineRasterizationConservativeStateCreateInfoEXT& operator=(const safe_VkPipelineRasterizationConservativeStateCreateInfoEXT& src);
+    safe_VkPipelineRasterizationConservativeStateCreateInfoEXT();
+    ~safe_VkPipelineRasterizationConservativeStateCreateInfoEXT();
+    void initialize(const VkPipelineRasterizationConservativeStateCreateInfoEXT* in_struct);
+    void initialize(const safe_VkPipelineRasterizationConservativeStateCreateInfoEXT* src);
+    VkPipelineRasterizationConservativeStateCreateInfoEXT *ptr() { return reinterpret_cast<VkPipelineRasterizationConservativeStateCreateInfoEXT *>(this); }
+    VkPipelineRasterizationConservativeStateCreateInfoEXT const *ptr() const { return reinterpret_cast<VkPipelineRasterizationConservativeStateCreateInfoEXT const *>(this); }
+};
+
+struct safe_VkPhysicalDeviceDepthClipEnableFeaturesEXT {
+    VkStructureType sType;
+    void* pNext;
+    VkBool32 depthClipEnable;
+    safe_VkPhysicalDeviceDepthClipEnableFeaturesEXT(const VkPhysicalDeviceDepthClipEnableFeaturesEXT* in_struct);
+    safe_VkPhysicalDeviceDepthClipEnableFeaturesEXT(const safe_VkPhysicalDeviceDepthClipEnableFeaturesEXT& src);
+    safe_VkPhysicalDeviceDepthClipEnableFeaturesEXT& operator=(const safe_VkPhysicalDeviceDepthClipEnableFeaturesEXT& src);
+    safe_VkPhysicalDeviceDepthClipEnableFeaturesEXT();
+    ~safe_VkPhysicalDeviceDepthClipEnableFeaturesEXT();
+    void initialize(const VkPhysicalDeviceDepthClipEnableFeaturesEXT* in_struct);
+    void initialize(const safe_VkPhysicalDeviceDepthClipEnableFeaturesEXT* src);
+    VkPhysicalDeviceDepthClipEnableFeaturesEXT *ptr() { return reinterpret_cast<VkPhysicalDeviceDepthClipEnableFeaturesEXT *>(this); }
+    VkPhysicalDeviceDepthClipEnableFeaturesEXT const *ptr() const { return reinterpret_cast<VkPhysicalDeviceDepthClipEnableFeaturesEXT const *>(this); }
+};
+
+struct safe_VkPipelineRasterizationDepthClipStateCreateInfoEXT {
+    VkStructureType sType;
+    const void* pNext;
+    VkPipelineRasterizationDepthClipStateCreateFlagsEXT flags;
+    VkBool32 depthClipEnable;
+    safe_VkPipelineRasterizationDepthClipStateCreateInfoEXT(const VkPipelineRasterizationDepthClipStateCreateInfoEXT* in_struct);
+    safe_VkPipelineRasterizationDepthClipStateCreateInfoEXT(const safe_VkPipelineRasterizationDepthClipStateCreateInfoEXT& src);
+    safe_VkPipelineRasterizationDepthClipStateCreateInfoEXT& operator=(const safe_VkPipelineRasterizationDepthClipStateCreateInfoEXT& src);
+    safe_VkPipelineRasterizationDepthClipStateCreateInfoEXT();
+    ~safe_VkPipelineRasterizationDepthClipStateCreateInfoEXT();
+    void initialize(const VkPipelineRasterizationDepthClipStateCreateInfoEXT* in_struct);
+    void initialize(const safe_VkPipelineRasterizationDepthClipStateCreateInfoEXT* src);
+    VkPipelineRasterizationDepthClipStateCreateInfoEXT *ptr() { return reinterpret_cast<VkPipelineRasterizationDepthClipStateCreateInfoEXT *>(this); }
+    VkPipelineRasterizationDepthClipStateCreateInfoEXT const *ptr() const { return reinterpret_cast<VkPipelineRasterizationDepthClipStateCreateInfoEXT const *>(this); }
+};
+
+struct safe_VkHdrMetadataEXT {
+    VkStructureType sType;
+    const void* pNext;
+    VkXYColorEXT displayPrimaryRed;
+    VkXYColorEXT displayPrimaryGreen;
+    VkXYColorEXT displayPrimaryBlue;
+    VkXYColorEXT whitePoint;
+    float maxLuminance;
+    float minLuminance;
+    float maxContentLightLevel;
+    float maxFrameAverageLightLevel;
+    safe_VkHdrMetadataEXT(const VkHdrMetadataEXT* in_struct);
+    safe_VkHdrMetadataEXT(const safe_VkHdrMetadataEXT& src);
+    safe_VkHdrMetadataEXT& operator=(const safe_VkHdrMetadataEXT& src);
+    safe_VkHdrMetadataEXT();
+    ~safe_VkHdrMetadataEXT();
+    void initialize(const VkHdrMetadataEXT* in_struct);
+    void initialize(const safe_VkHdrMetadataEXT* src);
+    VkHdrMetadataEXT *ptr() { return reinterpret_cast<VkHdrMetadataEXT *>(this); }
+    VkHdrMetadataEXT const *ptr() const { return reinterpret_cast<VkHdrMetadataEXT const *>(this); }
+};
+
+#ifdef VK_USE_PLATFORM_IOS_MVK
+struct safe_VkIOSSurfaceCreateInfoMVK {
+    VkStructureType sType;
+    const void* pNext;
+    VkIOSSurfaceCreateFlagsMVK flags;
+    const void* pView;
+    safe_VkIOSSurfaceCreateInfoMVK(const VkIOSSurfaceCreateInfoMVK* in_struct);
+    safe_VkIOSSurfaceCreateInfoMVK(const safe_VkIOSSurfaceCreateInfoMVK& src);
+    safe_VkIOSSurfaceCreateInfoMVK& operator=(const safe_VkIOSSurfaceCreateInfoMVK& src);
+    safe_VkIOSSurfaceCreateInfoMVK();
+    ~safe_VkIOSSurfaceCreateInfoMVK();
+    void initialize(const VkIOSSurfaceCreateInfoMVK* in_struct);
+    void initialize(const safe_VkIOSSurfaceCreateInfoMVK* src);
+    VkIOSSurfaceCreateInfoMVK *ptr() { return reinterpret_cast<VkIOSSurfaceCreateInfoMVK *>(this); }
+    VkIOSSurfaceCreateInfoMVK const *ptr() const { return reinterpret_cast<VkIOSSurfaceCreateInfoMVK const *>(this); }
+};
+#endif // VK_USE_PLATFORM_IOS_MVK
+
+#ifdef VK_USE_PLATFORM_MACOS_MVK
+struct safe_VkMacOSSurfaceCreateInfoMVK {
+    VkStructureType sType;
+    const void* pNext;
+    VkMacOSSurfaceCreateFlagsMVK flags;
+    const void* pView;
+    safe_VkMacOSSurfaceCreateInfoMVK(const VkMacOSSurfaceCreateInfoMVK* in_struct);
+    safe_VkMacOSSurfaceCreateInfoMVK(const safe_VkMacOSSurfaceCreateInfoMVK& src);
+    safe_VkMacOSSurfaceCreateInfoMVK& operator=(const safe_VkMacOSSurfaceCreateInfoMVK& src);
+    safe_VkMacOSSurfaceCreateInfoMVK();
+    ~safe_VkMacOSSurfaceCreateInfoMVK();
+    void initialize(const VkMacOSSurfaceCreateInfoMVK* in_struct);
+    void initialize(const safe_VkMacOSSurfaceCreateInfoMVK* src);
+    VkMacOSSurfaceCreateInfoMVK *ptr() { return reinterpret_cast<VkMacOSSurfaceCreateInfoMVK *>(this); }
+    VkMacOSSurfaceCreateInfoMVK const *ptr() const { return reinterpret_cast<VkMacOSSurfaceCreateInfoMVK const *>(this); }
+};
+#endif // VK_USE_PLATFORM_MACOS_MVK
+
+struct safe_VkDebugUtilsObjectNameInfoEXT {
+    VkStructureType sType;
+    const void* pNext;
+    VkObjectType objectType;
+    uint64_t objectHandle;
+    const char* pObjectName;
+    safe_VkDebugUtilsObjectNameInfoEXT(const VkDebugUtilsObjectNameInfoEXT* in_struct);
+    safe_VkDebugUtilsObjectNameInfoEXT(const safe_VkDebugUtilsObjectNameInfoEXT& src);
+    safe_VkDebugUtilsObjectNameInfoEXT& operator=(const safe_VkDebugUtilsObjectNameInfoEXT& src);
+    safe_VkDebugUtilsObjectNameInfoEXT();
+    ~safe_VkDebugUtilsObjectNameInfoEXT();
+    void initialize(const VkDebugUtilsObjectNameInfoEXT* in_struct);
+    void initialize(const safe_VkDebugUtilsObjectNameInfoEXT* src);
+    VkDebugUtilsObjectNameInfoEXT *ptr() { return reinterpret_cast<VkDebugUtilsObjectNameInfoEXT *>(this); }
+    VkDebugUtilsObjectNameInfoEXT const *ptr() const { return reinterpret_cast<VkDebugUtilsObjectNameInfoEXT const *>(this); }
+};
+
+struct safe_VkDebugUtilsObjectTagInfoEXT {
+    VkStructureType sType;
+    const void* pNext;
+    VkObjectType objectType;
+    uint64_t objectHandle;
+    uint64_t tagName;
+    size_t tagSize;
+    const void* pTag;
+    safe_VkDebugUtilsObjectTagInfoEXT(const VkDebugUtilsObjectTagInfoEXT* in_struct);
+    safe_VkDebugUtilsObjectTagInfoEXT(const safe_VkDebugUtilsObjectTagInfoEXT& src);
+    safe_VkDebugUtilsObjectTagInfoEXT& operator=(const safe_VkDebugUtilsObjectTagInfoEXT& src);
+    safe_VkDebugUtilsObjectTagInfoEXT();
+    ~safe_VkDebugUtilsObjectTagInfoEXT();
+    void initialize(const VkDebugUtilsObjectTagInfoEXT* in_struct);
+    void initialize(const safe_VkDebugUtilsObjectTagInfoEXT* src);
+    VkDebugUtilsObjectTagInfoEXT *ptr() { return reinterpret_cast<VkDebugUtilsObjectTagInfoEXT *>(this); }
+    VkDebugUtilsObjectTagInfoEXT const *ptr() const { return reinterpret_cast<VkDebugUtilsObjectTagInfoEXT const *>(this); }
+};
+
+struct safe_VkDebugUtilsLabelEXT {
+    VkStructureType sType;
+    const void* pNext;
+    const char* pLabelName;
+    float color[4];
+    safe_VkDebugUtilsLabelEXT(const VkDebugUtilsLabelEXT* in_struct);
+    safe_VkDebugUtilsLabelEXT(const safe_VkDebugUtilsLabelEXT& src);
+    safe_VkDebugUtilsLabelEXT& operator=(const safe_VkDebugUtilsLabelEXT& src);
+    safe_VkDebugUtilsLabelEXT();
+    ~safe_VkDebugUtilsLabelEXT();
+    void initialize(const VkDebugUtilsLabelEXT* in_struct);
+    void initialize(const safe_VkDebugUtilsLabelEXT* src);
+    VkDebugUtilsLabelEXT *ptr() { return reinterpret_cast<VkDebugUtilsLabelEXT *>(this); }
+    VkDebugUtilsLabelEXT const *ptr() const { return reinterpret_cast<VkDebugUtilsLabelEXT const *>(this); }
+};
+
+struct safe_VkDebugUtilsMessengerCallbackDataEXT {
+    VkStructureType sType;
+    const void* pNext;
+    VkDebugUtilsMessengerCallbackDataFlagsEXT flags;
+    const char* pMessageIdName;
+    int32_t messageIdNumber;
+    const char* pMessage;
+    uint32_t queueLabelCount;
+    safe_VkDebugUtilsLabelEXT* pQueueLabels;
+    uint32_t cmdBufLabelCount;
+    safe_VkDebugUtilsLabelEXT* pCmdBufLabels;
+    uint32_t objectCount;
+    safe_VkDebugUtilsObjectNameInfoEXT* pObjects;
+    safe_VkDebugUtilsMessengerCallbackDataEXT(const VkDebugUtilsMessengerCallbackDataEXT* in_struct);
+    safe_VkDebugUtilsMessengerCallbackDataEXT(const safe_VkDebugUtilsMessengerCallbackDataEXT& src);
+    safe_VkDebugUtilsMessengerCallbackDataEXT& operator=(const safe_VkDebugUtilsMessengerCallbackDataEXT& src);
+    safe_VkDebugUtilsMessengerCallbackDataEXT();
+    ~safe_VkDebugUtilsMessengerCallbackDataEXT();
+    void initialize(const VkDebugUtilsMessengerCallbackDataEXT* in_struct);
+    void initialize(const safe_VkDebugUtilsMessengerCallbackDataEXT* src);
+    VkDebugUtilsMessengerCallbackDataEXT *ptr() { return reinterpret_cast<VkDebugUtilsMessengerCallbackDataEXT *>(this); }
+    VkDebugUtilsMessengerCallbackDataEXT const *ptr() const { return reinterpret_cast<VkDebugUtilsMessengerCallbackDataEXT const *>(this); }
+};
+
+struct safe_VkDebugUtilsMessengerCreateInfoEXT {
+    VkStructureType sType;
+    const void* pNext;
+    VkDebugUtilsMessengerCreateFlagsEXT flags;
+    VkDebugUtilsMessageSeverityFlagsEXT messageSeverity;
+    VkDebugUtilsMessageTypeFlagsEXT messageType;
+    PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback;
+    void* pUserData;
+    safe_VkDebugUtilsMessengerCreateInfoEXT(const VkDebugUtilsMessengerCreateInfoEXT* in_struct);
+    safe_VkDebugUtilsMessengerCreateInfoEXT(const safe_VkDebugUtilsMessengerCreateInfoEXT& src);
+    safe_VkDebugUtilsMessengerCreateInfoEXT& operator=(const safe_VkDebugUtilsMessengerCreateInfoEXT& src);
+    safe_VkDebugUtilsMessengerCreateInfoEXT();
+    ~safe_VkDebugUtilsMessengerCreateInfoEXT();
+    void initialize(const VkDebugUtilsMessengerCreateInfoEXT* in_struct);
+    void initialize(const safe_VkDebugUtilsMessengerCreateInfoEXT* src);
+    VkDebugUtilsMessengerCreateInfoEXT *ptr() { return reinterpret_cast<VkDebugUtilsMessengerCreateInfoEXT *>(this); }
+    VkDebugUtilsMessengerCreateInfoEXT const *ptr() const { return reinterpret_cast<VkDebugUtilsMessengerCreateInfoEXT const *>(this); }
+};
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+struct safe_VkAndroidHardwareBufferUsageANDROID {
+    VkStructureType sType;
+    void* pNext;
+    uint64_t androidHardwareBufferUsage;
+    safe_VkAndroidHardwareBufferUsageANDROID(const VkAndroidHardwareBufferUsageANDROID* in_struct);
+    safe_VkAndroidHardwareBufferUsageANDROID(const safe_VkAndroidHardwareBufferUsageANDROID& src);
+    safe_VkAndroidHardwareBufferUsageANDROID& operator=(const safe_VkAndroidHardwareBufferUsageANDROID& src);
+    safe_VkAndroidHardwareBufferUsageANDROID();
+    ~safe_VkAndroidHardwareBufferUsageANDROID();
+    void initialize(const VkAndroidHardwareBufferUsageANDROID* in_struct);
+    void initialize(const safe_VkAndroidHardwareBufferUsageANDROID* src);
+    VkAndroidHardwareBufferUsageANDROID *ptr() { return reinterpret_cast<VkAndroidHardwareBufferUsageANDROID *>(this); }
+    VkAndroidHardwareBufferUsageANDROID const *ptr() const { return reinterpret_cast<VkAndroidHardwareBufferUsageANDROID const *>(this); }
+};
+#endif // VK_USE_PLATFORM_ANDROID_KHR
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+struct safe_VkAndroidHardwareBufferPropertiesANDROID {
+    VkStructureType sType;
+    void* pNext;
+    VkDeviceSize allocationSize;
+    uint32_t memoryTypeBits;
+    safe_VkAndroidHardwareBufferPropertiesANDROID(const VkAndroidHardwareBufferPropertiesANDROID* in_struct);
+    safe_VkAndroidHardwareBufferPropertiesANDROID(const safe_VkAndroidHardwareBufferPropertiesANDROID& src);
+    safe_VkAndroidHardwareBufferPropertiesANDROID& operator=(const safe_VkAndroidHardwareBufferPropertiesANDROID& src);
+    safe_VkAndroidHardwareBufferPropertiesANDROID();
+    ~safe_VkAndroidHardwareBufferPropertiesANDROID();
+    void initialize(const VkAndroidHardwareBufferPropertiesANDROID* in_struct);
+    void initialize(const safe_VkAndroidHardwareBufferPropertiesANDROID* src);
+    VkAndroidHardwareBufferPropertiesANDROID *ptr() { return reinterpret_cast<VkAndroidHardwareBufferPropertiesANDROID *>(this); }
+    VkAndroidHardwareBufferPropertiesANDROID const *ptr() const { return reinterpret_cast<VkAndroidHardwareBufferPropertiesANDROID const *>(this); }
+};
+#endif // VK_USE_PLATFORM_ANDROID_KHR
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+struct safe_VkAndroidHardwareBufferFormatPropertiesANDROID {
+    VkStructureType sType;
+    void* pNext;
+    VkFormat format;
+    uint64_t externalFormat;
+    VkFormatFeatureFlags formatFeatures;
+    VkComponentMapping samplerYcbcrConversionComponents;
+    VkSamplerYcbcrModelConversion suggestedYcbcrModel;
+    VkSamplerYcbcrRange suggestedYcbcrRange;
+    VkChromaLocation suggestedXChromaOffset;
+    VkChromaLocation suggestedYChromaOffset;
+    safe_VkAndroidHardwareBufferFormatPropertiesANDROID(const VkAndroidHardwareBufferFormatPropertiesANDROID* in_struct);
+    safe_VkAndroidHardwareBufferFormatPropertiesANDROID(const safe_VkAndroidHardwareBufferFormatPropertiesANDROID& src);
+    safe_VkAndroidHardwareBufferFormatPropertiesANDROID& operator=(const safe_VkAndroidHardwareBufferFormatPropertiesANDROID& src);
+    safe_VkAndroidHardwareBufferFormatPropertiesANDROID();
+    ~safe_VkAndroidHardwareBufferFormatPropertiesANDROID();
+    void initialize(const VkAndroidHardwareBufferFormatPropertiesANDROID* in_struct);
+    void initialize(const safe_VkAndroidHardwareBufferFormatPropertiesANDROID* src);
+    VkAndroidHardwareBufferFormatPropertiesANDROID *ptr() { return reinterpret_cast<VkAndroidHardwareBufferFormatPropertiesANDROID *>(this); }
+    VkAndroidHardwareBufferFormatPropertiesANDROID const *ptr() const { return reinterpret_cast<VkAndroidHardwareBufferFormatPropertiesANDROID const *>(this); }
+};
+#endif // VK_USE_PLATFORM_ANDROID_KHR
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+struct safe_VkImportAndroidHardwareBufferInfoANDROID {
+    VkStructureType sType;
+    const void* pNext;
+    struct AHardwareBuffer* buffer;
+    safe_VkImportAndroidHardwareBufferInfoANDROID(const VkImportAndroidHardwareBufferInfoANDROID* in_struct);
+    safe_VkImportAndroidHardwareBufferInfoANDROID(const safe_VkImportAndroidHardwareBufferInfoANDROID& src);
+    safe_VkImportAndroidHardwareBufferInfoANDROID& operator=(const safe_VkImportAndroidHardwareBufferInfoANDROID& src);
+    safe_VkImportAndroidHardwareBufferInfoANDROID();
+    ~safe_VkImportAndroidHardwareBufferInfoANDROID();
+    void initialize(const VkImportAndroidHardwareBufferInfoANDROID* in_struct);
+    void initialize(const safe_VkImportAndroidHardwareBufferInfoANDROID* src);
+    VkImportAndroidHardwareBufferInfoANDROID *ptr() { return reinterpret_cast<VkImportAndroidHardwareBufferInfoANDROID *>(this); }
+    VkImportAndroidHardwareBufferInfoANDROID const *ptr() const { return reinterpret_cast<VkImportAndroidHardwareBufferInfoANDROID const *>(this); }
+};
+#endif // VK_USE_PLATFORM_ANDROID_KHR
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+struct safe_VkMemoryGetAndroidHardwareBufferInfoANDROID {
+    VkStructureType sType;
+    const void* pNext;
+    VkDeviceMemory memory;
+    safe_VkMemoryGetAndroidHardwareBufferInfoANDROID(const VkMemoryGetAndroidHardwareBufferInfoANDROID* in_struct);
+    safe_VkMemoryGetAndroidHardwareBufferInfoANDROID(const safe_VkMemoryGetAndroidHardwareBufferInfoANDROID& src);
+    safe_VkMemoryGetAndroidHardwareBufferInfoANDROID& operator=(const safe_VkMemoryGetAndroidHardwareBufferInfoANDROID& src);
+    safe_VkMemoryGetAndroidHardwareBufferInfoANDROID();
+    ~safe_VkMemoryGetAndroidHardwareBufferInfoANDROID();
+    void initialize(const VkMemoryGetAndroidHardwareBufferInfoANDROID* in_struct);
+    void initialize(const safe_VkMemoryGetAndroidHardwareBufferInfoANDROID* src);
+    VkMemoryGetAndroidHardwareBufferInfoANDROID *ptr() { return reinterpret_cast<VkMemoryGetAndroidHardwareBufferInfoANDROID *>(this); }
+    VkMemoryGetAndroidHardwareBufferInfoANDROID const *ptr() const { return reinterpret_cast<VkMemoryGetAndroidHardwareBufferInfoANDROID const *>(this); }
+};
+#endif // VK_USE_PLATFORM_ANDROID_KHR
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+struct safe_VkExternalFormatANDROID {
+    VkStructureType sType;
+    void* pNext;
+    uint64_t externalFormat;
+    safe_VkExternalFormatANDROID(const VkExternalFormatANDROID* in_struct);
+    safe_VkExternalFormatANDROID(const safe_VkExternalFormatANDROID& src);
+    safe_VkExternalFormatANDROID& operator=(const safe_VkExternalFormatANDROID& src);
+    safe_VkExternalFormatANDROID();
+    ~safe_VkExternalFormatANDROID();
+    void initialize(const VkExternalFormatANDROID* in_struct);
+    void initialize(const safe_VkExternalFormatANDROID* src);
+    VkExternalFormatANDROID *ptr() { return reinterpret_cast<VkExternalFormatANDROID *>(this); }
+    VkExternalFormatANDROID const *ptr() const { return reinterpret_cast<VkExternalFormatANDROID const *>(this); }
+};
+#endif // VK_USE_PLATFORM_ANDROID_KHR
+
+struct safe_VkSamplerReductionModeCreateInfoEXT {
+    VkStructureType sType;
+    const void* pNext;
+    VkSamplerReductionModeEXT reductionMode;
+    safe_VkSamplerReductionModeCreateInfoEXT(const VkSamplerReductionModeCreateInfoEXT* in_struct);
+    safe_VkSamplerReductionModeCreateInfoEXT(const safe_VkSamplerReductionModeCreateInfoEXT& src);
+    safe_VkSamplerReductionModeCreateInfoEXT& operator=(const safe_VkSamplerReductionModeCreateInfoEXT& src);
+    safe_VkSamplerReductionModeCreateInfoEXT();
+    ~safe_VkSamplerReductionModeCreateInfoEXT();
+    void initialize(const VkSamplerReductionModeCreateInfoEXT* in_struct);
+    void initialize(const safe_VkSamplerReductionModeCreateInfoEXT* src);
+    VkSamplerReductionModeCreateInfoEXT *ptr() { return reinterpret_cast<VkSamplerReductionModeCreateInfoEXT *>(this); }
+    VkSamplerReductionModeCreateInfoEXT const *ptr() const { return reinterpret_cast<VkSamplerReductionModeCreateInfoEXT const *>(this); }
+};
+
+struct safe_VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT {
+    VkStructureType sType;
+    void* pNext;
+    VkBool32 filterMinmaxSingleComponentFormats;
+    VkBool32 filterMinmaxImageComponentMapping;
+    safe_VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT(const VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT* in_struct);
+    safe_VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT(const safe_VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT& src);
+    safe_VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT& operator=(const safe_VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT& src);
+    safe_VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT();
+    ~safe_VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT();
+    void initialize(const VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT* in_struct);
+    void initialize(const safe_VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT* src);
+    VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT *ptr() { return reinterpret_cast<VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT *>(this); }
+    VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT const *ptr() const { return reinterpret_cast<VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT const *>(this); }
+};
+
+struct safe_VkPhysicalDeviceInlineUniformBlockFeaturesEXT {
+    VkStructureType sType;
+    void* pNext;
+    VkBool32 inlineUniformBlock;
+    VkBool32 descriptorBindingInlineUniformBlockUpdateAfterBind;
+    safe_VkPhysicalDeviceInlineUniformBlockFeaturesEXT(const VkPhysicalDeviceInlineUniformBlockFeaturesEXT* in_struct);
+    safe_VkPhysicalDeviceInlineUniformBlockFeaturesEXT(const safe_VkPhysicalDeviceInlineUniformBlockFeaturesEXT& src);
+    safe_VkPhysicalDeviceInlineUniformBlockFeaturesEXT& operator=(const safe_VkPhysicalDeviceInlineUniformBlockFeaturesEXT& src);
+    safe_VkPhysicalDeviceInlineUniformBlockFeaturesEXT();
+    ~safe_VkPhysicalDeviceInlineUniformBlockFeaturesEXT();
+    void initialize(const VkPhysicalDeviceInlineUniformBlockFeaturesEXT* in_struct);
+    void initialize(const safe_VkPhysicalDeviceInlineUniformBlockFeaturesEXT* src);
+    VkPhysicalDeviceInlineUniformBlockFeaturesEXT *ptr() { return reinterpret_cast<VkPhysicalDeviceInlineUniformBlockFeaturesEXT *>(this); }
+    VkPhysicalDeviceInlineUniformBlockFeaturesEXT const *ptr() const { return reinterpret_cast<VkPhysicalDeviceInlineUniformBlockFeaturesEXT const *>(this); }
+};
+
+struct safe_VkPhysicalDeviceInlineUniformBlockPropertiesEXT {
+    VkStructureType sType;
+    void* pNext;
+    uint32_t maxInlineUniformBlockSize;
+    uint32_t maxPerStageDescriptorInlineUniformBlocks;
+    uint32_t maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks;
+    uint32_t maxDescriptorSetInlineUniformBlocks;
+    uint32_t maxDescriptorSetUpdateAfterBindInlineUniformBlocks;
+    safe_VkPhysicalDeviceInlineUniformBlockPropertiesEXT(const VkPhysicalDeviceInlineUniformBlockPropertiesEXT* in_struct);
+    safe_VkPhysicalDeviceInlineUniformBlockPropertiesEXT(const safe_VkPhysicalDeviceInlineUniformBlockPropertiesEXT& src);
+    safe_VkPhysicalDeviceInlineUniformBlockPropertiesEXT& operator=(const safe_VkPhysicalDeviceInlineUniformBlockPropertiesEXT& src);
+    safe_VkPhysicalDeviceInlineUniformBlockPropertiesEXT();
+    ~safe_VkPhysicalDeviceInlineUniformBlockPropertiesEXT();
+    void initialize(const VkPhysicalDeviceInlineUniformBlockPropertiesEXT* in_struct);
+    void initialize(const safe_VkPhysicalDeviceInlineUniformBlockPropertiesEXT* src);
+    VkPhysicalDeviceInlineUniformBlockPropertiesEXT *ptr() { return reinterpret_cast<VkPhysicalDeviceInlineUniformBlockPropertiesEXT *>(this); }
+    VkPhysicalDeviceInlineUniformBlockPropertiesEXT const *ptr() const { return reinterpret_cast<VkPhysicalDeviceInlineUniformBlockPropertiesEXT const *>(this); }
+};
+
+struct safe_VkWriteDescriptorSetInlineUniformBlockEXT {
+    VkStructureType sType;
+    const void* pNext;
+    uint32_t dataSize;
+    const void* pData;
+    safe_VkWriteDescriptorSetInlineUniformBlockEXT(const VkWriteDescriptorSetInlineUniformBlockEXT* in_struct);
+    safe_VkWriteDescriptorSetInlineUniformBlockEXT(const safe_VkWriteDescriptorSetInlineUniformBlockEXT& src);
+    safe_VkWriteDescriptorSetInlineUniformBlockEXT& operator=(const safe_VkWriteDescriptorSetInlineUniformBlockEXT& src);
+    safe_VkWriteDescriptorSetInlineUniformBlockEXT();
+    ~safe_VkWriteDescriptorSetInlineUniformBlockEXT();
+    void initialize(const VkWriteDescriptorSetInlineUniformBlockEXT* in_struct);
+    void initialize(const safe_VkWriteDescriptorSetInlineUniformBlockEXT* src);
+    VkWriteDescriptorSetInlineUniformBlockEXT *ptr() { return reinterpret_cast<VkWriteDescriptorSetInlineUniformBlockEXT *>(this); }
+    VkWriteDescriptorSetInlineUniformBlockEXT const *ptr() const { return reinterpret_cast<VkWriteDescriptorSetInlineUniformBlockEXT const *>(this); }
+};
+
+struct safe_VkDescriptorPoolInlineUniformBlockCreateInfoEXT {
+    VkStructureType sType;
+    const void* pNext;
+    uint32_t maxInlineUniformBlockBindings;
+    safe_VkDescriptorPoolInlineUniformBlockCreateInfoEXT(const VkDescriptorPoolInlineUniformBlockCreateInfoEXT* in_struct);
+    safe_VkDescriptorPoolInlineUniformBlockCreateInfoEXT(const safe_VkDescriptorPoolInlineUniformBlockCreateInfoEXT& src);
+    safe_VkDescriptorPoolInlineUniformBlockCreateInfoEXT& operator=(const safe_VkDescriptorPoolInlineUniformBlockCreateInfoEXT& src);
+    safe_VkDescriptorPoolInlineUniformBlockCreateInfoEXT();
+    ~safe_VkDescriptorPoolInlineUniformBlockCreateInfoEXT();
+    void initialize(const VkDescriptorPoolInlineUniformBlockCreateInfoEXT* in_struct);
+    void initialize(const safe_VkDescriptorPoolInlineUniformBlockCreateInfoEXT* src);
+    VkDescriptorPoolInlineUniformBlockCreateInfoEXT *ptr() { return reinterpret_cast<VkDescriptorPoolInlineUniformBlockCreateInfoEXT *>(this); }
+    VkDescriptorPoolInlineUniformBlockCreateInfoEXT const *ptr() const { return reinterpret_cast<VkDescriptorPoolInlineUniformBlockCreateInfoEXT const *>(this); }
+};
+
+struct safe_VkSampleLocationsInfoEXT {
+    VkStructureType sType;
+    const void* pNext;
+    VkSampleCountFlagBits sampleLocationsPerPixel;
+    VkExtent2D sampleLocationGridSize;
+    uint32_t sampleLocationsCount;
+    const VkSampleLocationEXT* pSampleLocations;
+    safe_VkSampleLocationsInfoEXT(const VkSampleLocationsInfoEXT* in_struct);
+    safe_VkSampleLocationsInfoEXT(const safe_VkSampleLocationsInfoEXT& src);
+    safe_VkSampleLocationsInfoEXT& operator=(const safe_VkSampleLocationsInfoEXT& src);
+    safe_VkSampleLocationsInfoEXT();
+    ~safe_VkSampleLocationsInfoEXT();
+    void initialize(const VkSampleLocationsInfoEXT* in_struct);
+    void initialize(const safe_VkSampleLocationsInfoEXT* src);
+    VkSampleLocationsInfoEXT *ptr() { return reinterpret_cast<VkSampleLocationsInfoEXT *>(this); }
+    VkSampleLocationsInfoEXT const *ptr() const { return reinterpret_cast<VkSampleLocationsInfoEXT const *>(this); }
+};
+
+struct safe_VkRenderPassSampleLocationsBeginInfoEXT {
+    VkStructureType sType;
+    const void* pNext;
+    uint32_t attachmentInitialSampleLocationsCount;
+    const VkAttachmentSampleLocationsEXT* pAttachmentInitialSampleLocations;
+    uint32_t postSubpassSampleLocationsCount;
+    const VkSubpassSampleLocationsEXT* pPostSubpassSampleLocations;
+    safe_VkRenderPassSampleLocationsBeginInfoEXT(const VkRenderPassSampleLocationsBeginInfoEXT* in_struct);
+    safe_VkRenderPassSampleLocationsBeginInfoEXT(const safe_VkRenderPassSampleLocationsBeginInfoEXT& src);
+    safe_VkRenderPassSampleLocationsBeginInfoEXT& operator=(const safe_VkRenderPassSampleLocationsBeginInfoEXT& src);
+    safe_VkRenderPassSampleLocationsBeginInfoEXT();
+    ~safe_VkRenderPassSampleLocationsBeginInfoEXT();
+    void initialize(const VkRenderPassSampleLocationsBeginInfoEXT* in_struct);
+    void initialize(const safe_VkRenderPassSampleLocationsBeginInfoEXT* src);
+    VkRenderPassSampleLocationsBeginInfoEXT *ptr() { return reinterpret_cast<VkRenderPassSampleLocationsBeginInfoEXT *>(this); }
+    VkRenderPassSampleLocationsBeginInfoEXT const *ptr() const { return reinterpret_cast<VkRenderPassSampleLocationsBeginInfoEXT const *>(this); }
+};
+
+struct safe_VkPipelineSampleLocationsStateCreateInfoEXT {
+    VkStructureType sType;
+    const void* pNext;
+    VkBool32 sampleLocationsEnable;
+    safe_VkSampleLocationsInfoEXT sampleLocationsInfo;
+    safe_VkPipelineSampleLocationsStateCreateInfoEXT(const VkPipelineSampleLocationsStateCreateInfoEXT* in_struct);
+    safe_VkPipelineSampleLocationsStateCreateInfoEXT(const safe_VkPipelineSampleLocationsStateCreateInfoEXT& src);
+    safe_VkPipelineSampleLocationsStateCreateInfoEXT& operator=(const safe_VkPipelineSampleLocationsStateCreateInfoEXT& src);
+    safe_VkPipelineSampleLocationsStateCreateInfoEXT();
+    ~safe_VkPipelineSampleLocationsStateCreateInfoEXT();
+    void initialize(const VkPipelineSampleLocationsStateCreateInfoEXT* in_struct);
+    void initialize(const safe_VkPipelineSampleLocationsStateCreateInfoEXT* src);
+    VkPipelineSampleLocationsStateCreateInfoEXT *ptr() { return reinterpret_cast<VkPipelineSampleLocationsStateCreateInfoEXT *>(this); }
+    VkPipelineSampleLocationsStateCreateInfoEXT const *ptr() const { return reinterpret_cast<VkPipelineSampleLocationsStateCreateInfoEXT const *>(this); }
+};
+
+struct safe_VkPhysicalDeviceSampleLocationsPropertiesEXT {
+    VkStructureType sType;
+    void* pNext;
+    VkSampleCountFlags sampleLocationSampleCounts;
+    VkExtent2D maxSampleLocationGridSize;
+    float sampleLocationCoordinateRange[2];
+    uint32_t sampleLocationSubPixelBits;
+    VkBool32 variableSampleLocations;
+    safe_VkPhysicalDeviceSampleLocationsPropertiesEXT(const VkPhysicalDeviceSampleLocationsPropertiesEXT* in_struct);
+    safe_VkPhysicalDeviceSampleLocationsPropertiesEXT(const safe_VkPhysicalDeviceSampleLocationsPropertiesEXT& src);
+    safe_VkPhysicalDeviceSampleLocationsPropertiesEXT& operator=(const safe_VkPhysicalDeviceSampleLocationsPropertiesEXT& src);
+    safe_VkPhysicalDeviceSampleLocationsPropertiesEXT();
+    ~safe_VkPhysicalDeviceSampleLocationsPropertiesEXT();
+    void initialize(const VkPhysicalDeviceSampleLocationsPropertiesEXT* in_struct);
+    void initialize(const safe_VkPhysicalDeviceSampleLocationsPropertiesEXT* src);
+    VkPhysicalDeviceSampleLocationsPropertiesEXT *ptr() { return reinterpret_cast<VkPhysicalDeviceSampleLocationsPropertiesEXT *>(this); }
+    VkPhysicalDeviceSampleLocationsPropertiesEXT const *ptr() const { return reinterpret_cast<VkPhysicalDeviceSampleLocationsPropertiesEXT const *>(this); }
+};
+
+struct safe_VkMultisamplePropertiesEXT {
+    VkStructureType sType;
+    void* pNext;
+    VkExtent2D maxSampleLocationGridSize;
+    safe_VkMultisamplePropertiesEXT(const VkMultisamplePropertiesEXT* in_struct);
+    safe_VkMultisamplePropertiesEXT(const safe_VkMultisamplePropertiesEXT& src);
+    safe_VkMultisamplePropertiesEXT& operator=(const safe_VkMultisamplePropertiesEXT& src);
+    safe_VkMultisamplePropertiesEXT();
+    ~safe_VkMultisamplePropertiesEXT();
+    void initialize(const VkMultisamplePropertiesEXT* in_struct);
+    void initialize(const safe_VkMultisamplePropertiesEXT* src);
+    VkMultisamplePropertiesEXT *ptr() { return reinterpret_cast<VkMultisamplePropertiesEXT *>(this); }
+    VkMultisamplePropertiesEXT const *ptr() const { return reinterpret_cast<VkMultisamplePropertiesEXT const *>(this); }
+};
+
+struct safe_VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT {
+    VkStructureType sType;
+    void* pNext;
+    VkBool32 advancedBlendCoherentOperations;
+    safe_VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT(const VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT* in_struct);
+    safe_VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT(const safe_VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT& src);
+    safe_VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT& operator=(const safe_VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT& src);
+    safe_VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT();
+    ~safe_VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT();
+    void initialize(const VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT* in_struct);
+    void initialize(const safe_VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT* src);
+    VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT *ptr() { return reinterpret_cast<VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT *>(this); }
+    VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT const *ptr() const { return reinterpret_cast<VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT const *>(this); }
+};
+
+struct safe_VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT {
+    VkStructureType sType;
+    void* pNext;
+    uint32_t advancedBlendMaxColorAttachments;
+    VkBool32 advancedBlendIndependentBlend;
+    VkBool32 advancedBlendNonPremultipliedSrcColor;
+    VkBool32 advancedBlendNonPremultipliedDstColor;
+    VkBool32 advancedBlendCorrelatedOverlap;
+    VkBool32 advancedBlendAllOperations;
+    safe_VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT(const VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT* in_struct);
+    safe_VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT(const safe_VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT& src);
+    safe_VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT& operator=(const safe_VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT& src);
+    safe_VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT();
+    ~safe_VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT();
+    void initialize(const VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT* in_struct);
+    void initialize(const safe_VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT* src);
+    VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT *ptr() { return reinterpret_cast<VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT *>(this); }
+    VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT const *ptr() const { return reinterpret_cast<VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT const *>(this); }
+};
+
+struct safe_VkPipelineColorBlendAdvancedStateCreateInfoEXT {
+    VkStructureType sType;
+    const void* pNext;
+    VkBool32 srcPremultiplied;
+    VkBool32 dstPremultiplied;
+    VkBlendOverlapEXT blendOverlap;
+    safe_VkPipelineColorBlendAdvancedStateCreateInfoEXT(const VkPipelineColorBlendAdvancedStateCreateInfoEXT* in_struct);
+    safe_VkPipelineColorBlendAdvancedStateCreateInfoEXT(const safe_VkPipelineColorBlendAdvancedStateCreateInfoEXT& src);
+    safe_VkPipelineColorBlendAdvancedStateCreateInfoEXT& operator=(const safe_VkPipelineColorBlendAdvancedStateCreateInfoEXT& src);
+    safe_VkPipelineColorBlendAdvancedStateCreateInfoEXT();
+    ~safe_VkPipelineColorBlendAdvancedStateCreateInfoEXT();
+    void initialize(const VkPipelineColorBlendAdvancedStateCreateInfoEXT* in_struct);
+    void initialize(const safe_VkPipelineColorBlendAdvancedStateCreateInfoEXT* src);
+    VkPipelineColorBlendAdvancedStateCreateInfoEXT *ptr() { return reinterpret_cast<VkPipelineColorBlendAdvancedStateCreateInfoEXT *>(this); }
+    VkPipelineColorBlendAdvancedStateCreateInfoEXT const *ptr() const { return reinterpret_cast<VkPipelineColorBlendAdvancedStateCreateInfoEXT const *>(this); }
+};
+
+struct safe_VkPipelineCoverageToColorStateCreateInfoNV {
+    VkStructureType sType;
+    const void* pNext;
+    VkPipelineCoverageToColorStateCreateFlagsNV flags;
+    VkBool32 coverageToColorEnable;
+    uint32_t coverageToColorLocation;
+    safe_VkPipelineCoverageToColorStateCreateInfoNV(const VkPipelineCoverageToColorStateCreateInfoNV* in_struct);
+    safe_VkPipelineCoverageToColorStateCreateInfoNV(const safe_VkPipelineCoverageToColorStateCreateInfoNV& src);
+    safe_VkPipelineCoverageToColorStateCreateInfoNV& operator=(const safe_VkPipelineCoverageToColorStateCreateInfoNV& src);
+    safe_VkPipelineCoverageToColorStateCreateInfoNV();
+    ~safe_VkPipelineCoverageToColorStateCreateInfoNV();
+    void initialize(const VkPipelineCoverageToColorStateCreateInfoNV* in_struct);
+    void initialize(const safe_VkPipelineCoverageToColorStateCreateInfoNV* src);
+    VkPipelineCoverageToColorStateCreateInfoNV *ptr() { return reinterpret_cast<VkPipelineCoverageToColorStateCreateInfoNV *>(this); }
+    VkPipelineCoverageToColorStateCreateInfoNV const *ptr() const { return reinterpret_cast<VkPipelineCoverageToColorStateCreateInfoNV const *>(this); }
+};
+
+struct safe_VkPipelineCoverageModulationStateCreateInfoNV {
+    VkStructureType sType;
+    const void* pNext;
+    VkPipelineCoverageModulationStateCreateFlagsNV flags;
+    VkCoverageModulationModeNV coverageModulationMode;
+    VkBool32 coverageModulationTableEnable;
+    uint32_t coverageModulationTableCount;
+    const float* pCoverageModulationTable;
+    safe_VkPipelineCoverageModulationStateCreateInfoNV(const VkPipelineCoverageModulationStateCreateInfoNV* in_struct);
+    safe_VkPipelineCoverageModulationStateCreateInfoNV(const safe_VkPipelineCoverageModulationStateCreateInfoNV& src);
+    safe_VkPipelineCoverageModulationStateCreateInfoNV& operator=(const safe_VkPipelineCoverageModulationStateCreateInfoNV& src);
+    safe_VkPipelineCoverageModulationStateCreateInfoNV();
+    ~safe_VkPipelineCoverageModulationStateCreateInfoNV();
+    void initialize(const VkPipelineCoverageModulationStateCreateInfoNV* in_struct);
+    void initialize(const safe_VkPipelineCoverageModulationStateCreateInfoNV* src);
+    VkPipelineCoverageModulationStateCreateInfoNV *ptr() { return reinterpret_cast<VkPipelineCoverageModulationStateCreateInfoNV *>(this); }
+    VkPipelineCoverageModulationStateCreateInfoNV const *ptr() const { return reinterpret_cast<VkPipelineCoverageModulationStateCreateInfoNV const *>(this); }
+};
+
+struct safe_VkPhysicalDeviceShaderSMBuiltinsPropertiesNV {
+    VkStructureType sType;
+    void* pNext;
+    uint32_t shaderSMCount;
+    uint32_t shaderWarpsPerSM;
+    safe_VkPhysicalDeviceShaderSMBuiltinsPropertiesNV(const VkPhysicalDeviceShaderSMBuiltinsPropertiesNV* in_struct);
+    safe_VkPhysicalDeviceShaderSMBuiltinsPropertiesNV(const safe_VkPhysicalDeviceShaderSMBuiltinsPropertiesNV& src);
+    safe_VkPhysicalDeviceShaderSMBuiltinsPropertiesNV& operator=(const safe_VkPhysicalDeviceShaderSMBuiltinsPropertiesNV& src);
+    safe_VkPhysicalDeviceShaderSMBuiltinsPropertiesNV();
+    ~safe_VkPhysicalDeviceShaderSMBuiltinsPropertiesNV();
+    void initialize(const VkPhysicalDeviceShaderSMBuiltinsPropertiesNV* in_struct);
+    void initialize(const safe_VkPhysicalDeviceShaderSMBuiltinsPropertiesNV* src);
+    VkPhysicalDeviceShaderSMBuiltinsPropertiesNV *ptr() { return reinterpret_cast<VkPhysicalDeviceShaderSMBuiltinsPropertiesNV *>(this); }
+    VkPhysicalDeviceShaderSMBuiltinsPropertiesNV const *ptr() const { return reinterpret_cast<VkPhysicalDeviceShaderSMBuiltinsPropertiesNV const *>(this); }
+};
+
+struct safe_VkPhysicalDeviceShaderSMBuiltinsFeaturesNV {
+    VkStructureType sType;
+    void* pNext;
+    VkBool32 shaderSMBuiltins;
+    safe_VkPhysicalDeviceShaderSMBuiltinsFeaturesNV(const VkPhysicalDeviceShaderSMBuiltinsFeaturesNV* in_struct);
+    safe_VkPhysicalDeviceShaderSMBuiltinsFeaturesNV(const safe_VkPhysicalDeviceShaderSMBuiltinsFeaturesNV& src);
+    safe_VkPhysicalDeviceShaderSMBuiltinsFeaturesNV& operator=(const safe_VkPhysicalDeviceShaderSMBuiltinsFeaturesNV& src);
+    safe_VkPhysicalDeviceShaderSMBuiltinsFeaturesNV();
+    ~safe_VkPhysicalDeviceShaderSMBuiltinsFeaturesNV();
+    void initialize(const VkPhysicalDeviceShaderSMBuiltinsFeaturesNV* in_struct);
+    void initialize(const safe_VkPhysicalDeviceShaderSMBuiltinsFeaturesNV* src);
+    VkPhysicalDeviceShaderSMBuiltinsFeaturesNV *ptr() { return reinterpret_cast<VkPhysicalDeviceShaderSMBuiltinsFeaturesNV *>(this); }
+    VkPhysicalDeviceShaderSMBuiltinsFeaturesNV const *ptr() const { return reinterpret_cast<VkPhysicalDeviceShaderSMBuiltinsFeaturesNV const *>(this); }
+};
+
+struct safe_VkDrmFormatModifierPropertiesListEXT {
+    VkStructureType sType;
+    void* pNext;
+    uint32_t drmFormatModifierCount;
+    VkDrmFormatModifierPropertiesEXT* pDrmFormatModifierProperties;
+    safe_VkDrmFormatModifierPropertiesListEXT(const VkDrmFormatModifierPropertiesListEXT* in_struct);
+    safe_VkDrmFormatModifierPropertiesListEXT(const safe_VkDrmFormatModifierPropertiesListEXT& src);
+    safe_VkDrmFormatModifierPropertiesListEXT& operator=(const safe_VkDrmFormatModifierPropertiesListEXT& src);
+    safe_VkDrmFormatModifierPropertiesListEXT();
+    ~safe_VkDrmFormatModifierPropertiesListEXT();
+    void initialize(const VkDrmFormatModifierPropertiesListEXT* in_struct);
+    void initialize(const safe_VkDrmFormatModifierPropertiesListEXT* src);
+    VkDrmFormatModifierPropertiesListEXT *ptr() { return reinterpret_cast<VkDrmFormatModifierPropertiesListEXT *>(this); }
+    VkDrmFormatModifierPropertiesListEXT const *ptr() const { return reinterpret_cast<VkDrmFormatModifierPropertiesListEXT const *>(this); }
+};
+
+struct safe_VkPhysicalDeviceImageDrmFormatModifierInfoEXT {
+    VkStructureType sType;
+    const void* pNext;
+    uint64_t drmFormatModifier;
+    VkSharingMode sharingMode;
+    uint32_t queueFamilyIndexCount;
+    const uint32_t* pQueueFamilyIndices;
+    safe_VkPhysicalDeviceImageDrmFormatModifierInfoEXT(const VkPhysicalDeviceImageDrmFormatModifierInfoEXT* in_struct);
+    safe_VkPhysicalDeviceImageDrmFormatModifierInfoEXT(const safe_VkPhysicalDeviceImageDrmFormatModifierInfoEXT& src);
+    safe_VkPhysicalDeviceImageDrmFormatModifierInfoEXT& operator=(const safe_VkPhysicalDeviceImageDrmFormatModifierInfoEXT& src);
+    safe_VkPhysicalDeviceImageDrmFormatModifierInfoEXT();
+    ~safe_VkPhysicalDeviceImageDrmFormatModifierInfoEXT();
+    void initialize(const VkPhysicalDeviceImageDrmFormatModifierInfoEXT* in_struct);
+    void initialize(const safe_VkPhysicalDeviceImageDrmFormatModifierInfoEXT* src);
+    VkPhysicalDeviceImageDrmFormatModifierInfoEXT *ptr() { return reinterpret_cast<VkPhysicalDeviceImageDrmFormatModifierInfoEXT *>(this); }
+    VkPhysicalDeviceImageDrmFormatModifierInfoEXT const *ptr() const { return reinterpret_cast<VkPhysicalDeviceImageDrmFormatModifierInfoEXT const *>(this); }
+};
+
+struct safe_VkImageDrmFormatModifierListCreateInfoEXT {
+    VkStructureType sType;
+    const void* pNext;
+    uint32_t drmFormatModifierCount;
+    const uint64_t* pDrmFormatModifiers;
+    safe_VkImageDrmFormatModifierListCreateInfoEXT(const VkImageDrmFormatModifierListCreateInfoEXT* in_struct);
+    safe_VkImageDrmFormatModifierListCreateInfoEXT(const safe_VkImageDrmFormatModifierListCreateInfoEXT& src);
+    safe_VkImageDrmFormatModifierListCreateInfoEXT& operator=(const safe_VkImageDrmFormatModifierListCreateInfoEXT& src);
+    safe_VkImageDrmFormatModifierListCreateInfoEXT();
+    ~safe_VkImageDrmFormatModifierListCreateInfoEXT();
+    void initialize(const VkImageDrmFormatModifierListCreateInfoEXT* in_struct);
+    void initialize(const safe_VkImageDrmFormatModifierListCreateInfoEXT* src);
+    VkImageDrmFormatModifierListCreateInfoEXT *ptr() { return reinterpret_cast<VkImageDrmFormatModifierListCreateInfoEXT *>(this); }
+    VkImageDrmFormatModifierListCreateInfoEXT const *ptr() const { return reinterpret_cast<VkImageDrmFormatModifierListCreateInfoEXT const *>(this); }
+};
+
+struct safe_VkImageDrmFormatModifierExplicitCreateInfoEXT {
+    VkStructureType sType;
+    const void* pNext;
+    uint64_t drmFormatModifier;
+    uint32_t drmFormatModifierPlaneCount;
+    const VkSubresourceLayout* pPlaneLayouts;
+    safe_VkImageDrmFormatModifierExplicitCreateInfoEXT(const VkImageDrmFormatModifierExplicitCreateInfoEXT* in_struct);
+    safe_VkImageDrmFormatModifierExplicitCreateInfoEXT(const safe_VkImageDrmFormatModifierExplicitCreateInfoEXT& src);
+    safe_VkImageDrmFormatModifierExplicitCreateInfoEXT& operator=(const safe_VkImageDrmFormatModifierExplicitCreateInfoEXT& src);
+    safe_VkImageDrmFormatModifierExplicitCreateInfoEXT();
+    ~safe_VkImageDrmFormatModifierExplicitCreateInfoEXT();
+    void initialize(const VkImageDrmFormatModifierExplicitCreateInfoEXT* in_struct);
+    void initialize(const safe_VkImageDrmFormatModifierExplicitCreateInfoEXT* src);
+    VkImageDrmFormatModifierExplicitCreateInfoEXT *ptr() { return reinterpret_cast<VkImageDrmFormatModifierExplicitCreateInfoEXT *>(this); }
+    VkImageDrmFormatModifierExplicitCreateInfoEXT const *ptr() const { return reinterpret_cast<VkImageDrmFormatModifierExplicitCreateInfoEXT const *>(this); }
+};
+
+struct safe_VkImageDrmFormatModifierPropertiesEXT {
+    VkStructureType sType;
+    void* pNext;
+    uint64_t drmFormatModifier;
+    safe_VkImageDrmFormatModifierPropertiesEXT(const VkImageDrmFormatModifierPropertiesEXT* in_struct);
+    safe_VkImageDrmFormatModifierPropertiesEXT(const safe_VkImageDrmFormatModifierPropertiesEXT& src);
+    safe_VkImageDrmFormatModifierPropertiesEXT& operator=(const safe_VkImageDrmFormatModifierPropertiesEXT& src);
+    safe_VkImageDrmFormatModifierPropertiesEXT();
+    ~safe_VkImageDrmFormatModifierPropertiesEXT();
+    void initialize(const VkImageDrmFormatModifierPropertiesEXT* in_struct);
+    void initialize(const safe_VkImageDrmFormatModifierPropertiesEXT* src);
+    VkImageDrmFormatModifierPropertiesEXT *ptr() { return reinterpret_cast<VkImageDrmFormatModifierPropertiesEXT *>(this); }
+    VkImageDrmFormatModifierPropertiesEXT const *ptr() const { return reinterpret_cast<VkImageDrmFormatModifierPropertiesEXT const *>(this); }
+};
+
+struct safe_VkValidationCacheCreateInfoEXT {
+    VkStructureType sType;
+    const void* pNext;
+    VkValidationCacheCreateFlagsEXT flags;
+    size_t initialDataSize;
+    const void* pInitialData;
+    safe_VkValidationCacheCreateInfoEXT(const VkValidationCacheCreateInfoEXT* in_struct);
+    safe_VkValidationCacheCreateInfoEXT(const safe_VkValidationCacheCreateInfoEXT& src);
+    safe_VkValidationCacheCreateInfoEXT& operator=(const safe_VkValidationCacheCreateInfoEXT& src);
+    safe_VkValidationCacheCreateInfoEXT();
+    ~safe_VkValidationCacheCreateInfoEXT();
+    void initialize(const VkValidationCacheCreateInfoEXT* in_struct);
+    void initialize(const safe_VkValidationCacheCreateInfoEXT* src);
+    VkValidationCacheCreateInfoEXT *ptr() { return reinterpret_cast<VkValidationCacheCreateInfoEXT *>(this); }
+    VkValidationCacheCreateInfoEXT const *ptr() const { return reinterpret_cast<VkValidationCacheCreateInfoEXT const *>(this); }
+};
+
+struct safe_VkShaderModuleValidationCacheCreateInfoEXT {
+    VkStructureType sType;
+    const void* pNext;
+    VkValidationCacheEXT validationCache;
+    safe_VkShaderModuleValidationCacheCreateInfoEXT(const VkShaderModuleValidationCacheCreateInfoEXT* in_struct);
+    safe_VkShaderModuleValidationCacheCreateInfoEXT(const safe_VkShaderModuleValidationCacheCreateInfoEXT& src);
+    safe_VkShaderModuleValidationCacheCreateInfoEXT& operator=(const safe_VkShaderModuleValidationCacheCreateInfoEXT& src);
+    safe_VkShaderModuleValidationCacheCreateInfoEXT();
+    ~safe_VkShaderModuleValidationCacheCreateInfoEXT();
+    void initialize(const VkShaderModuleValidationCacheCreateInfoEXT* in_struct);
+    void initialize(const safe_VkShaderModuleValidationCacheCreateInfoEXT* src);
+    VkShaderModuleValidationCacheCreateInfoEXT *ptr() { return reinterpret_cast<VkShaderModuleValidationCacheCreateInfoEXT *>(this); }
+    VkShaderModuleValidationCacheCreateInfoEXT const *ptr() const { return reinterpret_cast<VkShaderModuleValidationCacheCreateInfoEXT const *>(this); }
+};
+
+struct safe_VkDescriptorSetLayoutBindingFlagsCreateInfoEXT {
+    VkStructureType sType;
+    const void* pNext;
+    uint32_t bindingCount;
+    const VkDescriptorBindingFlagsEXT* pBindingFlags;
+    safe_VkDescriptorSetLayoutBindingFlagsCreateInfoEXT(const VkDescriptorSetLayoutBindingFlagsCreateInfoEXT* in_struct);
+    safe_VkDescriptorSetLayoutBindingFlagsCreateInfoEXT(const safe_VkDescriptorSetLayoutBindingFlagsCreateInfoEXT& src);
+    safe_VkDescriptorSetLayoutBindingFlagsCreateInfoEXT& operator=(const safe_VkDescriptorSetLayoutBindingFlagsCreateInfoEXT& src);
+    safe_VkDescriptorSetLayoutBindingFlagsCreateInfoEXT();
+    ~safe_VkDescriptorSetLayoutBindingFlagsCreateInfoEXT();
+    void initialize(const VkDescriptorSetLayoutBindingFlagsCreateInfoEXT* in_struct);
+    void initialize(const safe_VkDescriptorSetLayoutBindingFlagsCreateInfoEXT* src);
+    VkDescriptorSetLayoutBindingFlagsCreateInfoEXT *ptr() { return reinterpret_cast<VkDescriptorSetLayoutBindingFlagsCreateInfoEXT *>(this); }
+    VkDescriptorSetLayoutBindingFlagsCreateInfoEXT const *ptr() const { return reinterpret_cast<VkDescriptorSetLayoutBindingFlagsCreateInfoEXT const *>(this); }
+};
+
+struct safe_VkPhysicalDeviceDescriptorIndexingFeaturesEXT {
+    VkStructureType sType;
+    void* pNext;
+    VkBool32 shaderInputAttachmentArrayDynamicIndexing;
+    VkBool32 shaderUniformTexelBufferArrayDynamicIndexing;
+    VkBool32 shaderStorageTexelBufferArrayDynamicIndexing;
+    VkBool32 shaderUniformBufferArrayNonUniformIndexing;
+    VkBool32 shaderSampledImageArrayNonUniformIndexing;
+    VkBool32 shaderStorageBufferArrayNonUniformIndexing;
+    VkBool32 shaderStorageImageArrayNonUniformIndexing;
+    VkBool32 shaderInputAttachmentArrayNonUniformIndexing;
+    VkBool32 shaderUniformTexelBufferArrayNonUniformIndexing;
+    VkBool32 shaderStorageTexelBufferArrayNonUniformIndexing;
+    VkBool32 descriptorBindingUniformBufferUpdateAfterBind;
+    VkBool32 descriptorBindingSampledImageUpdateAfterBind;
+    VkBool32 descriptorBindingStorageImageUpdateAfterBind;
+    VkBool32 descriptorBindingStorageBufferUpdateAfterBind;
+    VkBool32 descriptorBindingUniformTexelBufferUpdateAfterBind;
+    VkBool32 descriptorBindingStorageTexelBufferUpdateAfterBind;
+    VkBool32 descriptorBindingUpdateUnusedWhilePending;
+    VkBool32 descriptorBindingPartiallyBound;
+    VkBool32 descriptorBindingVariableDescriptorCount;
+    VkBool32 runtimeDescriptorArray;
+    safe_VkPhysicalDeviceDescriptorIndexingFeaturesEXT(const VkPhysicalDeviceDescriptorIndexingFeaturesEXT* in_struct);
+    safe_VkPhysicalDeviceDescriptorIndexingFeaturesEXT(const safe_VkPhysicalDeviceDescriptorIndexingFeaturesEXT& src);
+    safe_VkPhysicalDeviceDescriptorIndexingFeaturesEXT& operator=(const safe_VkPhysicalDeviceDescriptorIndexingFeaturesEXT& src);
+    safe_VkPhysicalDeviceDescriptorIndexingFeaturesEXT();
+    ~safe_VkPhysicalDeviceDescriptorIndexingFeaturesEXT();
+    void initialize(const VkPhysicalDeviceDescriptorIndexingFeaturesEXT* in_struct);
+    void initialize(const safe_VkPhysicalDeviceDescriptorIndexingFeaturesEXT* src);
+    VkPhysicalDeviceDescriptorIndexingFeaturesEXT *ptr() { return reinterpret_cast<VkPhysicalDeviceDescriptorIndexingFeaturesEXT *>(this); }
+    VkPhysicalDeviceDescriptorIndexingFeaturesEXT const *ptr() const { return reinterpret_cast<VkPhysicalDeviceDescriptorIndexingFeaturesEXT const *>(this); }
+};
+
+struct safe_VkPhysicalDeviceDescriptorIndexingPropertiesEXT {
+    VkStructureType sType;
+    void* pNext;
+    uint32_t maxUpdateAfterBindDescriptorsInAllPools;
+    VkBool32 shaderUniformBufferArrayNonUniformIndexingNative;
+    VkBool32 shaderSampledImageArrayNonUniformIndexingNative;
+    VkBool32 shaderStorageBufferArrayNonUniformIndexingNative;
+    VkBool32 shaderStorageImageArrayNonUniformIndexingNative;
+    VkBool32 shaderInputAttachmentArrayNonUniformIndexingNative;
+    VkBool32 robustBufferAccessUpdateAfterBind;
+    VkBool32 quadDivergentImplicitLod;
+    uint32_t maxPerStageDescriptorUpdateAfterBindSamplers;
+    uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers;
+    uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers;
+    uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages;
+    uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages;
+    uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments;
+    uint32_t maxPerStageUpdateAfterBindResources;
+    uint32_t maxDescriptorSetUpdateAfterBindSamplers;
+    uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers;
+    uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic;
+    uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers;
+    uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic;
+    uint32_t maxDescriptorSetUpdateAfterBindSampledImages;
+    uint32_t maxDescriptorSetUpdateAfterBindStorageImages;
+    uint32_t maxDescriptorSetUpdateAfterBindInputAttachments;
+    safe_VkPhysicalDeviceDescriptorIndexingPropertiesEXT(const VkPhysicalDeviceDescriptorIndexingPropertiesEXT* in_struct);
+    safe_VkPhysicalDeviceDescriptorIndexingPropertiesEXT(const safe_VkPhysicalDeviceDescriptorIndexingPropertiesEXT& src);
+    safe_VkPhysicalDeviceDescriptorIndexingPropertiesEXT& operator=(const safe_VkPhysicalDeviceDescriptorIndexingPropertiesEXT& src);
+    safe_VkPhysicalDeviceDescriptorIndexingPropertiesEXT();
+    ~safe_VkPhysicalDeviceDescriptorIndexingPropertiesEXT();
+    void initialize(const VkPhysicalDeviceDescriptorIndexingPropertiesEXT* in_struct);
+    void initialize(const safe_VkPhysicalDeviceDescriptorIndexingPropertiesEXT* src);
+    VkPhysicalDeviceDescriptorIndexingPropertiesEXT *ptr() { return reinterpret_cast<VkPhysicalDeviceDescriptorIndexingPropertiesEXT *>(this); }
+    VkPhysicalDeviceDescriptorIndexingPropertiesEXT const *ptr() const { return reinterpret_cast<VkPhysicalDeviceDescriptorIndexingPropertiesEXT const *>(this); }
+};
+
+struct safe_VkDescriptorSetVariableDescriptorCountAllocateInfoEXT {
+    VkStructureType sType;
+    const void* pNext;
+    uint32_t descriptorSetCount;
+    const uint32_t* pDescriptorCounts;
+    safe_VkDescriptorSetVariableDescriptorCountAllocateInfoEXT(const VkDescriptorSetVariableDescriptorCountAllocateInfoEXT* in_struct);
+    safe_VkDescriptorSetVariableDescriptorCountAllocateInfoEXT(const safe_VkDescriptorSetVariableDescriptorCountAllocateInfoEXT& src);
+    safe_VkDescriptorSetVariableDescriptorCountAllocateInfoEXT& operator=(const safe_VkDescriptorSetVariableDescriptorCountAllocateInfoEXT& src);
+    safe_VkDescriptorSetVariableDescriptorCountAllocateInfoEXT();
+    ~safe_VkDescriptorSetVariableDescriptorCountAllocateInfoEXT();
+    void initialize(const VkDescriptorSetVariableDescriptorCountAllocateInfoEXT* in_struct);
+    void initialize(const safe_VkDescriptorSetVariableDescriptorCountAllocateInfoEXT* src);
+    VkDescriptorSetVariableDescriptorCountAllocateInfoEXT *ptr() { return reinterpret_cast<VkDescriptorSetVariableDescriptorCountAllocateInfoEXT *>(this); }
+    VkDescriptorSetVariableDescriptorCountAllocateInfoEXT const *ptr() const { return reinterpret_cast<VkDescriptorSetVariableDescriptorCountAllocateInfoEXT const *>(this); }
+};
+
+struct safe_VkDescriptorSetVariableDescriptorCountLayoutSupportEXT {
+    VkStructureType sType;
+    void* pNext;
+    uint32_t maxVariableDescriptorCount;
+    safe_VkDescriptorSetVariableDescriptorCountLayoutSupportEXT(const VkDescriptorSetVariableDescriptorCountLayoutSupportEXT* in_struct);
+    safe_VkDescriptorSetVariableDescriptorCountLayoutSupportEXT(const safe_VkDescriptorSetVariableDescriptorCountLayoutSupportEXT& src);
+    safe_VkDescriptorSetVariableDescriptorCountLayoutSupportEXT& operator=(const safe_VkDescriptorSetVariableDescriptorCountLayoutSupportEXT& src);
+    safe_VkDescriptorSetVariableDescriptorCountLayoutSupportEXT();
+    ~safe_VkDescriptorSetVariableDescriptorCountLayoutSupportEXT();
+    void initialize(const VkDescriptorSetVariableDescriptorCountLayoutSupportEXT* in_struct);
+    void initialize(const safe_VkDescriptorSetVariableDescriptorCountLayoutSupportEXT* src);
+    VkDescriptorSetVariableDescriptorCountLayoutSupportEXT *ptr() { return reinterpret_cast<VkDescriptorSetVariableDescriptorCountLayoutSupportEXT *>(this); }
+    VkDescriptorSetVariableDescriptorCountLayoutSupportEXT const *ptr() const { return reinterpret_cast<VkDescriptorSetVariableDescriptorCountLayoutSupportEXT const *>(this); }
+};
+
+struct safe_VkShadingRatePaletteNV {
+    uint32_t shadingRatePaletteEntryCount;
+    const VkShadingRatePaletteEntryNV* pShadingRatePaletteEntries;
+    safe_VkShadingRatePaletteNV(const VkShadingRatePaletteNV* in_struct);
+    safe_VkShadingRatePaletteNV(const safe_VkShadingRatePaletteNV& src);
+    safe_VkShadingRatePaletteNV& operator=(const safe_VkShadingRatePaletteNV& src);
+    safe_VkShadingRatePaletteNV();
+    ~safe_VkShadingRatePaletteNV();
+    void initialize(const VkShadingRatePaletteNV* in_struct);
+    void initialize(const safe_VkShadingRatePaletteNV* src);
+    VkShadingRatePaletteNV *ptr() { return reinterpret_cast<VkShadingRatePaletteNV *>(this); }
+    VkShadingRatePaletteNV const *ptr() const { return reinterpret_cast<VkShadingRatePaletteNV const *>(this); }
+};
+
+struct safe_VkPipelineViewportShadingRateImageStateCreateInfoNV {
+    VkStructureType sType;
+    const void* pNext;
+    VkBool32 shadingRateImageEnable;
+    uint32_t viewportCount;
+    safe_VkShadingRatePaletteNV* pShadingRatePalettes;
+    safe_VkPipelineViewportShadingRateImageStateCreateInfoNV(const VkPipelineViewportShadingRateImageStateCreateInfoNV* in_struct);
+    safe_VkPipelineViewportShadingRateImageStateCreateInfoNV(const safe_VkPipelineViewportShadingRateImageStateCreateInfoNV& src);
+    safe_VkPipelineViewportShadingRateImageStateCreateInfoNV& operator=(const safe_VkPipelineViewportShadingRateImageStateCreateInfoNV& src);
+    safe_VkPipelineViewportShadingRateImageStateCreateInfoNV();
+    ~safe_VkPipelineViewportShadingRateImageStateCreateInfoNV();
+    void initialize(const VkPipelineViewportShadingRateImageStateCreateInfoNV* in_struct);
+    void initialize(const safe_VkPipelineViewportShadingRateImageStateCreateInfoNV* src);
+    VkPipelineViewportShadingRateImageStateCreateInfoNV *ptr() { return reinterpret_cast<VkPipelineViewportShadingRateImageStateCreateInfoNV *>(this); }
+    VkPipelineViewportShadingRateImageStateCreateInfoNV const *ptr() const { return reinterpret_cast<VkPipelineViewportShadingRateImageStateCreateInfoNV const *>(this); }
+};
+
+struct safe_VkPhysicalDeviceShadingRateImageFeaturesNV {
+    VkStructureType sType;
+    void* pNext;
+    VkBool32 shadingRateImage;
+    VkBool32 shadingRateCoarseSampleOrder;
+    safe_VkPhysicalDeviceShadingRateImageFeaturesNV(const VkPhysicalDeviceShadingRateImageFeaturesNV* in_struct);
+    safe_VkPhysicalDeviceShadingRateImageFeaturesNV(const safe_VkPhysicalDeviceShadingRateImageFeaturesNV& src);
+    safe_VkPhysicalDeviceShadingRateImageFeaturesNV& operator=(const safe_VkPhysicalDeviceShadingRateImageFeaturesNV& src);
+    safe_VkPhysicalDeviceShadingRateImageFeaturesNV();
+    ~safe_VkPhysicalDeviceShadingRateImageFeaturesNV();
+    void initialize(const VkPhysicalDeviceShadingRateImageFeaturesNV* in_struct);
+    void initialize(const safe_VkPhysicalDeviceShadingRateImageFeaturesNV* src);
+    VkPhysicalDeviceShadingRateImageFeaturesNV *ptr() { return reinterpret_cast<VkPhysicalDeviceShadingRateImageFeaturesNV *>(this); }
+    VkPhysicalDeviceShadingRateImageFeaturesNV const *ptr() const { return reinterpret_cast<VkPhysicalDeviceShadingRateImageFeaturesNV const *>(this); }
+};
+
+struct safe_VkPhysicalDeviceShadingRateImagePropertiesNV {
+    VkStructureType sType;
+    void* pNext;
+    VkExtent2D shadingRateTexelSize;
+    uint32_t shadingRatePaletteSize;
+    uint32_t shadingRateMaxCoarseSamples;
+    safe_VkPhysicalDeviceShadingRateImagePropertiesNV(const VkPhysicalDeviceShadingRateImagePropertiesNV* in_struct);
+    safe_VkPhysicalDeviceShadingRateImagePropertiesNV(const safe_VkPhysicalDeviceShadingRateImagePropertiesNV& src);
+    safe_VkPhysicalDeviceShadingRateImagePropertiesNV& operator=(const safe_VkPhysicalDeviceShadingRateImagePropertiesNV& src);
+    safe_VkPhysicalDeviceShadingRateImagePropertiesNV();
+    ~safe_VkPhysicalDeviceShadingRateImagePropertiesNV();
+    void initialize(const VkPhysicalDeviceShadingRateImagePropertiesNV* in_struct);
+    void initialize(const safe_VkPhysicalDeviceShadingRateImagePropertiesNV* src);
+    VkPhysicalDeviceShadingRateImagePropertiesNV *ptr() { return reinterpret_cast<VkPhysicalDeviceShadingRateImagePropertiesNV *>(this); }
+    VkPhysicalDeviceShadingRateImagePropertiesNV const *ptr() const { return reinterpret_cast<VkPhysicalDeviceShadingRateImagePropertiesNV const *>(this); }
+};
+
+struct safe_VkCoarseSampleOrderCustomNV {
+    VkShadingRatePaletteEntryNV shadingRate;
+    uint32_t sampleCount;
+    uint32_t sampleLocationCount;
+    const VkCoarseSampleLocationNV* pSampleLocations;
+    safe_VkCoarseSampleOrderCustomNV(const VkCoarseSampleOrderCustomNV* in_struct);
+    safe_VkCoarseSampleOrderCustomNV(const safe_VkCoarseSampleOrderCustomNV& src);
+    safe_VkCoarseSampleOrderCustomNV& operator=(const safe_VkCoarseSampleOrderCustomNV& src);
+    safe_VkCoarseSampleOrderCustomNV();
+    ~safe_VkCoarseSampleOrderCustomNV();
+    void initialize(const VkCoarseSampleOrderCustomNV* in_struct);
+    void initialize(const safe_VkCoarseSampleOrderCustomNV* src);
+    VkCoarseSampleOrderCustomNV *ptr() { return reinterpret_cast<VkCoarseSampleOrderCustomNV *>(this); }
+    VkCoarseSampleOrderCustomNV const *ptr() const { return reinterpret_cast<VkCoarseSampleOrderCustomNV const *>(this); }
+};
+
+struct safe_VkPipelineViewportCoarseSampleOrderStateCreateInfoNV {
+    VkStructureType sType;
+    const void* pNext;
+    VkCoarseSampleOrderTypeNV sampleOrderType;
+    uint32_t customSampleOrderCount;
+    safe_VkCoarseSampleOrderCustomNV* pCustomSampleOrders;
+    safe_VkPipelineViewportCoarseSampleOrderStateCreateInfoNV(const VkPipelineViewportCoarseSampleOrderStateCreateInfoNV* in_struct);
+    safe_VkPipelineViewportCoarseSampleOrderStateCreateInfoNV(const safe_VkPipelineViewportCoarseSampleOrderStateCreateInfoNV& src);
+    safe_VkPipelineViewportCoarseSampleOrderStateCreateInfoNV& operator=(const safe_VkPipelineViewportCoarseSampleOrderStateCreateInfoNV& src);
+    safe_VkPipelineViewportCoarseSampleOrderStateCreateInfoNV();
+    ~safe_VkPipelineViewportCoarseSampleOrderStateCreateInfoNV();
+    void initialize(const VkPipelineViewportCoarseSampleOrderStateCreateInfoNV* in_struct);
+    void initialize(const safe_VkPipelineViewportCoarseSampleOrderStateCreateInfoNV* src);
+    VkPipelineViewportCoarseSampleOrderStateCreateInfoNV *ptr() { return reinterpret_cast<VkPipelineViewportCoarseSampleOrderStateCreateInfoNV *>(this); }
+    VkPipelineViewportCoarseSampleOrderStateCreateInfoNV const *ptr() const { return reinterpret_cast<VkPipelineViewportCoarseSampleOrderStateCreateInfoNV const *>(this); }
+};
+
+struct safe_VkRayTracingShaderGroupCreateInfoNV {
+    VkStructureType sType;
+    const void* pNext;
+    VkRayTracingShaderGroupTypeNV type;
+    uint32_t generalShader;
+    uint32_t closestHitShader;
+    uint32_t anyHitShader;
+    uint32_t intersectionShader;
+    safe_VkRayTracingShaderGroupCreateInfoNV(const VkRayTracingShaderGroupCreateInfoNV* in_struct);
+    safe_VkRayTracingShaderGroupCreateInfoNV(const safe_VkRayTracingShaderGroupCreateInfoNV& src);
+    safe_VkRayTracingShaderGroupCreateInfoNV& operator=(const safe_VkRayTracingShaderGroupCreateInfoNV& src);
+    safe_VkRayTracingShaderGroupCreateInfoNV();
+    ~safe_VkRayTracingShaderGroupCreateInfoNV();
+    void initialize(const VkRayTracingShaderGroupCreateInfoNV* in_struct);
+    void initialize(const safe_VkRayTracingShaderGroupCreateInfoNV* src);
+    VkRayTracingShaderGroupCreateInfoNV *ptr() { return reinterpret_cast<VkRayTracingShaderGroupCreateInfoNV *>(this); }
+    VkRayTracingShaderGroupCreateInfoNV const *ptr() const { return reinterpret_cast<VkRayTracingShaderGroupCreateInfoNV const *>(this); }
+};
+
+struct safe_VkRayTracingPipelineCreateInfoNV {
+    VkStructureType sType;
+    const void* pNext;
+    VkPipelineCreateFlags flags;
+    uint32_t stageCount;
+    safe_VkPipelineShaderStageCreateInfo* pStages;
+    uint32_t groupCount;
+    safe_VkRayTracingShaderGroupCreateInfoNV* pGroups;
+    uint32_t maxRecursionDepth;
+    VkPipelineLayout layout;
+    VkPipeline basePipelineHandle;
+    int32_t basePipelineIndex;
+    safe_VkRayTracingPipelineCreateInfoNV(const VkRayTracingPipelineCreateInfoNV* in_struct);
+    safe_VkRayTracingPipelineCreateInfoNV(const safe_VkRayTracingPipelineCreateInfoNV& src);
+    safe_VkRayTracingPipelineCreateInfoNV& operator=(const safe_VkRayTracingPipelineCreateInfoNV& src);
+    safe_VkRayTracingPipelineCreateInfoNV();
+    ~safe_VkRayTracingPipelineCreateInfoNV();
+    void initialize(const VkRayTracingPipelineCreateInfoNV* in_struct);
+    void initialize(const safe_VkRayTracingPipelineCreateInfoNV* src);
+    VkRayTracingPipelineCreateInfoNV *ptr() { return reinterpret_cast<VkRayTracingPipelineCreateInfoNV *>(this); }
+    VkRayTracingPipelineCreateInfoNV const *ptr() const { return reinterpret_cast<VkRayTracingPipelineCreateInfoNV const *>(this); }
+};
+
+struct safe_VkGeometryTrianglesNV {
+    VkStructureType sType;
+    const void* pNext;
+    VkBuffer vertexData;
+    VkDeviceSize vertexOffset;
+    uint32_t vertexCount;
+    VkDeviceSize vertexStride;
+    VkFormat vertexFormat;
+    VkBuffer indexData;
+    VkDeviceSize indexOffset;
+    uint32_t indexCount;
+    VkIndexType indexType;
+    VkBuffer transformData;
+    VkDeviceSize transformOffset;
+    safe_VkGeometryTrianglesNV(const VkGeometryTrianglesNV* in_struct);
+    safe_VkGeometryTrianglesNV(const safe_VkGeometryTrianglesNV& src);
+    safe_VkGeometryTrianglesNV& operator=(const safe_VkGeometryTrianglesNV& src);
+    safe_VkGeometryTrianglesNV();
+    ~safe_VkGeometryTrianglesNV();
+    void initialize(const VkGeometryTrianglesNV* in_struct);
+    void initialize(const safe_VkGeometryTrianglesNV* src);
+    VkGeometryTrianglesNV *ptr() { return reinterpret_cast<VkGeometryTrianglesNV *>(this); }
+    VkGeometryTrianglesNV const *ptr() const { return reinterpret_cast<VkGeometryTrianglesNV const *>(this); }
+};
+
+struct safe_VkGeometryAABBNV {
+    VkStructureType sType;
+    const void* pNext;
+    VkBuffer aabbData;
+    uint32_t numAABBs;
+    uint32_t stride;
+    VkDeviceSize offset;
+    safe_VkGeometryAABBNV(const VkGeometryAABBNV* in_struct);
+    safe_VkGeometryAABBNV(const safe_VkGeometryAABBNV& src);
+    safe_VkGeometryAABBNV& operator=(const safe_VkGeometryAABBNV& src);
+    safe_VkGeometryAABBNV();
+    ~safe_VkGeometryAABBNV();
+    void initialize(const VkGeometryAABBNV* in_struct);
+    void initialize(const safe_VkGeometryAABBNV* src);
+    VkGeometryAABBNV *ptr() { return reinterpret_cast<VkGeometryAABBNV *>(this); }
+    VkGeometryAABBNV const *ptr() const { return reinterpret_cast<VkGeometryAABBNV const *>(this); }
+};
+
+struct safe_VkGeometryNV {
+    VkStructureType sType;
+    const void* pNext;
+    VkGeometryTypeNV geometryType;
+    VkGeometryDataNV geometry;
+    VkGeometryFlagsNV flags;
+    safe_VkGeometryNV(const VkGeometryNV* in_struct);
+    safe_VkGeometryNV(const safe_VkGeometryNV& src);
+    safe_VkGeometryNV& operator=(const safe_VkGeometryNV& src);
+    safe_VkGeometryNV();
+    ~safe_VkGeometryNV();
+    void initialize(const VkGeometryNV* in_struct);
+    void initialize(const safe_VkGeometryNV* src);
+    VkGeometryNV *ptr() { return reinterpret_cast<VkGeometryNV *>(this); }
+    VkGeometryNV const *ptr() const { return reinterpret_cast<VkGeometryNV const *>(this); }
+};
+
+struct safe_VkAccelerationStructureInfoNV {
+    VkStructureType sType;
+    const void* pNext;
+    VkAccelerationStructureTypeNV type;
+    VkBuildAccelerationStructureFlagsNV flags;
+    uint32_t instanceCount;
+    uint32_t geometryCount;
+    safe_VkGeometryNV* pGeometries;
+    safe_VkAccelerationStructureInfoNV(const VkAccelerationStructureInfoNV* in_struct);
+    safe_VkAccelerationStructureInfoNV(const safe_VkAccelerationStructureInfoNV& src);
+    safe_VkAccelerationStructureInfoNV& operator=(const safe_VkAccelerationStructureInfoNV& src);
+    safe_VkAccelerationStructureInfoNV();
+    ~safe_VkAccelerationStructureInfoNV();
+    void initialize(const VkAccelerationStructureInfoNV* in_struct);
+    void initialize(const safe_VkAccelerationStructureInfoNV* src);
+    VkAccelerationStructureInfoNV *ptr() { return reinterpret_cast<VkAccelerationStructureInfoNV *>(this); }
+    VkAccelerationStructureInfoNV const *ptr() const { return reinterpret_cast<VkAccelerationStructureInfoNV const *>(this); }
+};
+
+struct safe_VkAccelerationStructureCreateInfoNV {
+    VkStructureType sType;
+    const void* pNext;
+    VkDeviceSize compactedSize;
+    safe_VkAccelerationStructureInfoNV info;
+    safe_VkAccelerationStructureCreateInfoNV(const VkAccelerationStructureCreateInfoNV* in_struct);
+    safe_VkAccelerationStructureCreateInfoNV(const safe_VkAccelerationStructureCreateInfoNV& src);
+    safe_VkAccelerationStructureCreateInfoNV& operator=(const safe_VkAccelerationStructureCreateInfoNV& src);
+    safe_VkAccelerationStructureCreateInfoNV();
+    ~safe_VkAccelerationStructureCreateInfoNV();
+    void initialize(const VkAccelerationStructureCreateInfoNV* in_struct);
+    void initialize(const safe_VkAccelerationStructureCreateInfoNV* src);
+    VkAccelerationStructureCreateInfoNV *ptr() { return reinterpret_cast<VkAccelerationStructureCreateInfoNV *>(this); }
+    VkAccelerationStructureCreateInfoNV const *ptr() const { return reinterpret_cast<VkAccelerationStructureCreateInfoNV const *>(this); }
+};
+
+struct safe_VkBindAccelerationStructureMemoryInfoNV {
+    VkStructureType sType;
+    const void* pNext;
+    VkAccelerationStructureNV accelerationStructure;
+    VkDeviceMemory memory;
+    VkDeviceSize memoryOffset;
+    uint32_t deviceIndexCount;
+    const uint32_t* pDeviceIndices;
+    safe_VkBindAccelerationStructureMemoryInfoNV(const VkBindAccelerationStructureMemoryInfoNV* in_struct);
+    safe_VkBindAccelerationStructureMemoryInfoNV(const safe_VkBindAccelerationStructureMemoryInfoNV& src);
+    safe_VkBindAccelerationStructureMemoryInfoNV& operator=(const safe_VkBindAccelerationStructureMemoryInfoNV& src);
+    safe_VkBindAccelerationStructureMemoryInfoNV();
+    ~safe_VkBindAccelerationStructureMemoryInfoNV();
+    void initialize(const VkBindAccelerationStructureMemoryInfoNV* in_struct);
+    void initialize(const safe_VkBindAccelerationStructureMemoryInfoNV* src);
+    VkBindAccelerationStructureMemoryInfoNV *ptr() { return reinterpret_cast<VkBindAccelerationStructureMemoryInfoNV *>(this); }
+    VkBindAccelerationStructureMemoryInfoNV const *ptr() const { return reinterpret_cast<VkBindAccelerationStructureMemoryInfoNV const *>(this); }
+};
+
+struct safe_VkWriteDescriptorSetAccelerationStructureNV {
+    VkStructureType sType;
+    const void* pNext;
+    uint32_t accelerationStructureCount;
+    VkAccelerationStructureNV* pAccelerationStructures;
+    safe_VkWriteDescriptorSetAccelerationStructureNV(const VkWriteDescriptorSetAccelerationStructureNV* in_struct);
+    safe_VkWriteDescriptorSetAccelerationStructureNV(const safe_VkWriteDescriptorSetAccelerationStructureNV& src);
+    safe_VkWriteDescriptorSetAccelerationStructureNV& operator=(const safe_VkWriteDescriptorSetAccelerationStructureNV& src);
+    safe_VkWriteDescriptorSetAccelerationStructureNV();
+    ~safe_VkWriteDescriptorSetAccelerationStructureNV();
+    void initialize(const VkWriteDescriptorSetAccelerationStructureNV* in_struct);
+    void initialize(const safe_VkWriteDescriptorSetAccelerationStructureNV* src);
+    VkWriteDescriptorSetAccelerationStructureNV *ptr() { return reinterpret_cast<VkWriteDescriptorSetAccelerationStructureNV *>(this); }
+    VkWriteDescriptorSetAccelerationStructureNV const *ptr() const { return reinterpret_cast<VkWriteDescriptorSetAccelerationStructureNV const *>(this); }
+};
+
+struct safe_VkAccelerationStructureMemoryRequirementsInfoNV {
+    VkStructureType sType;
+    const void* pNext;
+    VkAccelerationStructureMemoryRequirementsTypeNV type;
+    VkAccelerationStructureNV accelerationStructure;
+    safe_VkAccelerationStructureMemoryRequirementsInfoNV(const VkAccelerationStructureMemoryRequirementsInfoNV* in_struct);
+    safe_VkAccelerationStructureMemoryRequirementsInfoNV(const safe_VkAccelerationStructureMemoryRequirementsInfoNV& src);
+    safe_VkAccelerationStructureMemoryRequirementsInfoNV& operator=(const safe_VkAccelerationStructureMemoryRequirementsInfoNV& src);
+    safe_VkAccelerationStructureMemoryRequirementsInfoNV();
+    ~safe_VkAccelerationStructureMemoryRequirementsInfoNV();
+    void initialize(const VkAccelerationStructureMemoryRequirementsInfoNV* in_struct);
+    void initialize(const safe_VkAccelerationStructureMemoryRequirementsInfoNV* src);
+    VkAccelerationStructureMemoryRequirementsInfoNV *ptr() { return reinterpret_cast<VkAccelerationStructureMemoryRequirementsInfoNV *>(this); }
+    VkAccelerationStructureMemoryRequirementsInfoNV const *ptr() const { return reinterpret_cast<VkAccelerationStructureMemoryRequirementsInfoNV const *>(this); }
+};
+
+struct safe_VkPhysicalDeviceRayTracingPropertiesNV {
+    VkStructureType sType;
+    void* pNext;
+    uint32_t shaderGroupHandleSize;
+    uint32_t maxRecursionDepth;
+    uint32_t maxShaderGroupStride;
+    uint32_t shaderGroupBaseAlignment;
+    uint64_t maxGeometryCount;
+    uint64_t maxInstanceCount;
+    uint64_t maxTriangleCount;
+    uint32_t maxDescriptorSetAccelerationStructures;
+    safe_VkPhysicalDeviceRayTracingPropertiesNV(const VkPhysicalDeviceRayTracingPropertiesNV* in_struct);
+    safe_VkPhysicalDeviceRayTracingPropertiesNV(const safe_VkPhysicalDeviceRayTracingPropertiesNV& src);
+    safe_VkPhysicalDeviceRayTracingPropertiesNV& operator=(const safe_VkPhysicalDeviceRayTracingPropertiesNV& src);
+    safe_VkPhysicalDeviceRayTracingPropertiesNV();
+    ~safe_VkPhysicalDeviceRayTracingPropertiesNV();
+    void initialize(const VkPhysicalDeviceRayTracingPropertiesNV* in_struct);
+    void initialize(const safe_VkPhysicalDeviceRayTracingPropertiesNV* src);
+    VkPhysicalDeviceRayTracingPropertiesNV *ptr() { return reinterpret_cast<VkPhysicalDeviceRayTracingPropertiesNV *>(this); }
+    VkPhysicalDeviceRayTracingPropertiesNV const *ptr() const { return reinterpret_cast<VkPhysicalDeviceRayTracingPropertiesNV const *>(this); }
+};
+
+struct safe_VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV {
+    VkStructureType sType;
+    void* pNext;
+    VkBool32 representativeFragmentTest;
+    safe_VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV(const VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV* in_struct);
+    safe_VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV(const safe_VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV& src);
+    safe_VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV& operator=(const safe_VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV& src);
+    safe_VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV();
+    ~safe_VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV();
+    void initialize(const VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV* in_struct);
+    void initialize(const safe_VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV* src);
+    VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV *ptr() { return reinterpret_cast<VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV *>(this); }
+    VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV const *ptr() const { return reinterpret_cast<VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV const *>(this); }
+};
+
+struct safe_VkPipelineRepresentativeFragmentTestStateCreateInfoNV {
+    VkStructureType sType;
+    const void* pNext;
+    VkBool32 representativeFragmentTestEnable;
+    safe_VkPipelineRepresentativeFragmentTestStateCreateInfoNV(const VkPipelineRepresentativeFragmentTestStateCreateInfoNV* in_struct);
+    safe_VkPipelineRepresentativeFragmentTestStateCreateInfoNV(const safe_VkPipelineRepresentativeFragmentTestStateCreateInfoNV& src);
+    safe_VkPipelineRepresentativeFragmentTestStateCreateInfoNV& operator=(const safe_VkPipelineRepresentativeFragmentTestStateCreateInfoNV& src);
+    safe_VkPipelineRepresentativeFragmentTestStateCreateInfoNV();
+    ~safe_VkPipelineRepresentativeFragmentTestStateCreateInfoNV();
+    void initialize(const VkPipelineRepresentativeFragmentTestStateCreateInfoNV* in_struct);
+    void initialize(const safe_VkPipelineRepresentativeFragmentTestStateCreateInfoNV* src);
+    VkPipelineRepresentativeFragmentTestStateCreateInfoNV *ptr() { return reinterpret_cast<VkPipelineRepresentativeFragmentTestStateCreateInfoNV *>(this); }
+    VkPipelineRepresentativeFragmentTestStateCreateInfoNV const *ptr() const { return reinterpret_cast<VkPipelineRepresentativeFragmentTestStateCreateInfoNV const *>(this); }
+};
+
+struct safe_VkPhysicalDeviceImageViewImageFormatInfoEXT {
+    VkStructureType sType;
+    void* pNext;
+    VkImageViewType imageViewType;
+    safe_VkPhysicalDeviceImageViewImageFormatInfoEXT(const VkPhysicalDeviceImageViewImageFormatInfoEXT* in_struct);
+    safe_VkPhysicalDeviceImageViewImageFormatInfoEXT(const safe_VkPhysicalDeviceImageViewImageFormatInfoEXT& src);
+    safe_VkPhysicalDeviceImageViewImageFormatInfoEXT& operator=(const safe_VkPhysicalDeviceImageViewImageFormatInfoEXT& src);
+    safe_VkPhysicalDeviceImageViewImageFormatInfoEXT();
+    ~safe_VkPhysicalDeviceImageViewImageFormatInfoEXT();
+    void initialize(const VkPhysicalDeviceImageViewImageFormatInfoEXT* in_struct);
+    void initialize(const safe_VkPhysicalDeviceImageViewImageFormatInfoEXT* src);
+    VkPhysicalDeviceImageViewImageFormatInfoEXT *ptr() { return reinterpret_cast<VkPhysicalDeviceImageViewImageFormatInfoEXT *>(this); }
+    VkPhysicalDeviceImageViewImageFormatInfoEXT const *ptr() const { return reinterpret_cast<VkPhysicalDeviceImageViewImageFormatInfoEXT const *>(this); }
+};
+
+struct safe_VkFilterCubicImageViewImageFormatPropertiesEXT {
+    VkStructureType sType;
+    void* pNext;
+    VkBool32 filterCubic;
+    VkBool32 filterCubicMinmax ;
+    safe_VkFilterCubicImageViewImageFormatPropertiesEXT(const VkFilterCubicImageViewImageFormatPropertiesEXT* in_struct);
+    safe_VkFilterCubicImageViewImageFormatPropertiesEXT(const safe_VkFilterCubicImageViewImageFormatPropertiesEXT& src);
+    safe_VkFilterCubicImageViewImageFormatPropertiesEXT& operator=(const safe_VkFilterCubicImageViewImageFormatPropertiesEXT& src);
+    safe_VkFilterCubicImageViewImageFormatPropertiesEXT();
+    ~safe_VkFilterCubicImageViewImageFormatPropertiesEXT();
+    void initialize(const VkFilterCubicImageViewImageFormatPropertiesEXT* in_struct);
+    void initialize(const safe_VkFilterCubicImageViewImageFormatPropertiesEXT* src);
+    VkFilterCubicImageViewImageFormatPropertiesEXT *ptr() { return reinterpret_cast<VkFilterCubicImageViewImageFormatPropertiesEXT *>(this); }
+    VkFilterCubicImageViewImageFormatPropertiesEXT const *ptr() const { return reinterpret_cast<VkFilterCubicImageViewImageFormatPropertiesEXT const *>(this); }
+};
+
+struct safe_VkDeviceQueueGlobalPriorityCreateInfoEXT {
+    VkStructureType sType;
+    const void* pNext;
+    VkQueueGlobalPriorityEXT globalPriority;
+    safe_VkDeviceQueueGlobalPriorityCreateInfoEXT(const VkDeviceQueueGlobalPriorityCreateInfoEXT* in_struct);
+    safe_VkDeviceQueueGlobalPriorityCreateInfoEXT(const safe_VkDeviceQueueGlobalPriorityCreateInfoEXT& src);
+    safe_VkDeviceQueueGlobalPriorityCreateInfoEXT& operator=(const safe_VkDeviceQueueGlobalPriorityCreateInfoEXT& src);
+    safe_VkDeviceQueueGlobalPriorityCreateInfoEXT();
+    ~safe_VkDeviceQueueGlobalPriorityCreateInfoEXT();
+    void initialize(const VkDeviceQueueGlobalPriorityCreateInfoEXT* in_struct);
+    void initialize(const safe_VkDeviceQueueGlobalPriorityCreateInfoEXT* src);
+    VkDeviceQueueGlobalPriorityCreateInfoEXT *ptr() { return reinterpret_cast<VkDeviceQueueGlobalPriorityCreateInfoEXT *>(this); }
+    VkDeviceQueueGlobalPriorityCreateInfoEXT const *ptr() const { return reinterpret_cast<VkDeviceQueueGlobalPriorityCreateInfoEXT const *>(this); }
+};
+
+struct safe_VkImportMemoryHostPointerInfoEXT {
+    VkStructureType sType;
+    const void* pNext;
+    VkExternalMemoryHandleTypeFlagBits handleType;
+    void* pHostPointer;
+    safe_VkImportMemoryHostPointerInfoEXT(const VkImportMemoryHostPointerInfoEXT* in_struct);
+    safe_VkImportMemoryHostPointerInfoEXT(const safe_VkImportMemoryHostPointerInfoEXT& src);
+    safe_VkImportMemoryHostPointerInfoEXT& operator=(const safe_VkImportMemoryHostPointerInfoEXT& src);
+    safe_VkImportMemoryHostPointerInfoEXT();
+    ~safe_VkImportMemoryHostPointerInfoEXT();
+    void initialize(const VkImportMemoryHostPointerInfoEXT* in_struct);
+    void initialize(const safe_VkImportMemoryHostPointerInfoEXT* src);
+    VkImportMemoryHostPointerInfoEXT *ptr() { return reinterpret_cast<VkImportMemoryHostPointerInfoEXT *>(this); }
+    VkImportMemoryHostPointerInfoEXT const *ptr() const { return reinterpret_cast<VkImportMemoryHostPointerInfoEXT const *>(this); }
+};
+
+struct safe_VkMemoryHostPointerPropertiesEXT {
+    VkStructureType sType;
+    void* pNext;
+    uint32_t memoryTypeBits;
+    safe_VkMemoryHostPointerPropertiesEXT(const VkMemoryHostPointerPropertiesEXT* in_struct);
+    safe_VkMemoryHostPointerPropertiesEXT(const safe_VkMemoryHostPointerPropertiesEXT& src);
+    safe_VkMemoryHostPointerPropertiesEXT& operator=(const safe_VkMemoryHostPointerPropertiesEXT& src);
+    safe_VkMemoryHostPointerPropertiesEXT();
+    ~safe_VkMemoryHostPointerPropertiesEXT();
+    void initialize(const VkMemoryHostPointerPropertiesEXT* in_struct);
+    void initialize(const safe_VkMemoryHostPointerPropertiesEXT* src);
+    VkMemoryHostPointerPropertiesEXT *ptr() { return reinterpret_cast<VkMemoryHostPointerPropertiesEXT *>(this); }
+    VkMemoryHostPointerPropertiesEXT const *ptr() const { return reinterpret_cast<VkMemoryHostPointerPropertiesEXT const *>(this); }
+};
+
+struct safe_VkPhysicalDeviceExternalMemoryHostPropertiesEXT {
+    VkStructureType sType;
+    void* pNext;
+    VkDeviceSize minImportedHostPointerAlignment;
+    safe_VkPhysicalDeviceExternalMemoryHostPropertiesEXT(const VkPhysicalDeviceExternalMemoryHostPropertiesEXT* in_struct);
+    safe_VkPhysicalDeviceExternalMemoryHostPropertiesEXT(const safe_VkPhysicalDeviceExternalMemoryHostPropertiesEXT& src);
+    safe_VkPhysicalDeviceExternalMemoryHostPropertiesEXT& operator=(const safe_VkPhysicalDeviceExternalMemoryHostPropertiesEXT& src);
+    safe_VkPhysicalDeviceExternalMemoryHostPropertiesEXT();
+    ~safe_VkPhysicalDeviceExternalMemoryHostPropertiesEXT();
+    void initialize(const VkPhysicalDeviceExternalMemoryHostPropertiesEXT* in_struct);
+    void initialize(const safe_VkPhysicalDeviceExternalMemoryHostPropertiesEXT* src);
+    VkPhysicalDeviceExternalMemoryHostPropertiesEXT *ptr() { return reinterpret_cast<VkPhysicalDeviceExternalMemoryHostPropertiesEXT *>(this); }
+    VkPhysicalDeviceExternalMemoryHostPropertiesEXT const *ptr() const { return reinterpret_cast<VkPhysicalDeviceExternalMemoryHostPropertiesEXT const *>(this); }
+};
+
+struct safe_VkPipelineCompilerControlCreateInfoAMD {
+    VkStructureType sType;
+    const void* pNext;
+    VkPipelineCompilerControlFlagsAMD compilerControlFlags;
+    safe_VkPipelineCompilerControlCreateInfoAMD(const VkPipelineCompilerControlCreateInfoAMD* in_struct);
+    safe_VkPipelineCompilerControlCreateInfoAMD(const safe_VkPipelineCompilerControlCreateInfoAMD& src);
+    safe_VkPipelineCompilerControlCreateInfoAMD& operator=(const safe_VkPipelineCompilerControlCreateInfoAMD& src);
+    safe_VkPipelineCompilerControlCreateInfoAMD();
+    ~safe_VkPipelineCompilerControlCreateInfoAMD();
+    void initialize(const VkPipelineCompilerControlCreateInfoAMD* in_struct);
+    void initialize(const safe_VkPipelineCompilerControlCreateInfoAMD* src);
+    VkPipelineCompilerControlCreateInfoAMD *ptr() { return reinterpret_cast<VkPipelineCompilerControlCreateInfoAMD *>(this); }
+    VkPipelineCompilerControlCreateInfoAMD const *ptr() const { return reinterpret_cast<VkPipelineCompilerControlCreateInfoAMD const *>(this); }
+};
+
+struct safe_VkCalibratedTimestampInfoEXT {
+    VkStructureType sType;
+    const void* pNext;
+    VkTimeDomainEXT timeDomain;
+    safe_VkCalibratedTimestampInfoEXT(const VkCalibratedTimestampInfoEXT* in_struct);
+    safe_VkCalibratedTimestampInfoEXT(const safe_VkCalibratedTimestampInfoEXT& src);
+    safe_VkCalibratedTimestampInfoEXT& operator=(const safe_VkCalibratedTimestampInfoEXT& src);
+    safe_VkCalibratedTimestampInfoEXT();
+    ~safe_VkCalibratedTimestampInfoEXT();
+    void initialize(const VkCalibratedTimestampInfoEXT* in_struct);
+    void initialize(const safe_VkCalibratedTimestampInfoEXT* src);
+    VkCalibratedTimestampInfoEXT *ptr() { return reinterpret_cast<VkCalibratedTimestampInfoEXT *>(this); }
+    VkCalibratedTimestampInfoEXT const *ptr() const { return reinterpret_cast<VkCalibratedTimestampInfoEXT const *>(this); }
+};
+
+struct safe_VkPhysicalDeviceShaderCorePropertiesAMD {
+    VkStructureType sType;
+    void* pNext;
+    uint32_t shaderEngineCount;
+    uint32_t shaderArraysPerEngineCount;
+    uint32_t computeUnitsPerShaderArray;
+    uint32_t simdPerComputeUnit;
+    uint32_t wavefrontsPerSimd;
+    uint32_t wavefrontSize;
+    uint32_t sgprsPerSimd;
+    uint32_t minSgprAllocation;
+    uint32_t maxSgprAllocation;
+    uint32_t sgprAllocationGranularity;
+    uint32_t vgprsPerSimd;
+    uint32_t minVgprAllocation;
+    uint32_t maxVgprAllocation;
+    uint32_t vgprAllocationGranularity;
+    safe_VkPhysicalDeviceShaderCorePropertiesAMD(const VkPhysicalDeviceShaderCorePropertiesAMD* in_struct);
+    safe_VkPhysicalDeviceShaderCorePropertiesAMD(const safe_VkPhysicalDeviceShaderCorePropertiesAMD& src);
+    safe_VkPhysicalDeviceShaderCorePropertiesAMD& operator=(const safe_VkPhysicalDeviceShaderCorePropertiesAMD& src);
+    safe_VkPhysicalDeviceShaderCorePropertiesAMD();
+    ~safe_VkPhysicalDeviceShaderCorePropertiesAMD();
+    void initialize(const VkPhysicalDeviceShaderCorePropertiesAMD* in_struct);
+    void initialize(const safe_VkPhysicalDeviceShaderCorePropertiesAMD* src);
+    VkPhysicalDeviceShaderCorePropertiesAMD *ptr() { return reinterpret_cast<VkPhysicalDeviceShaderCorePropertiesAMD *>(this); }
+    VkPhysicalDeviceShaderCorePropertiesAMD const *ptr() const { return reinterpret_cast<VkPhysicalDeviceShaderCorePropertiesAMD const *>(this); }
+};
+
+struct safe_VkDeviceMemoryOverallocationCreateInfoAMD {
+    VkStructureType sType;
+    const void* pNext;
+    VkMemoryOverallocationBehaviorAMD overallocationBehavior;
+    safe_VkDeviceMemoryOverallocationCreateInfoAMD(const VkDeviceMemoryOverallocationCreateInfoAMD* in_struct);
+    safe_VkDeviceMemoryOverallocationCreateInfoAMD(const safe_VkDeviceMemoryOverallocationCreateInfoAMD& src);
+    safe_VkDeviceMemoryOverallocationCreateInfoAMD& operator=(const safe_VkDeviceMemoryOverallocationCreateInfoAMD& src);
+    safe_VkDeviceMemoryOverallocationCreateInfoAMD();
+    ~safe_VkDeviceMemoryOverallocationCreateInfoAMD();
+    void initialize(const VkDeviceMemoryOverallocationCreateInfoAMD* in_struct);
+    void initialize(const safe_VkDeviceMemoryOverallocationCreateInfoAMD* src);
+    VkDeviceMemoryOverallocationCreateInfoAMD *ptr() { return reinterpret_cast<VkDeviceMemoryOverallocationCreateInfoAMD *>(this); }
+    VkDeviceMemoryOverallocationCreateInfoAMD const *ptr() const { return reinterpret_cast<VkDeviceMemoryOverallocationCreateInfoAMD const *>(this); }
+};
+
+struct safe_VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT {
+    VkStructureType sType;
+    void* pNext;
+    uint32_t maxVertexAttribDivisor;
+    safe_VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT(const VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT* in_struct);
+    safe_VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT(const safe_VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT& src);
+    safe_VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT& operator=(const safe_VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT& src);
+    safe_VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT();
+    ~safe_VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT();
+    void initialize(const VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT* in_struct);
+    void initialize(const safe_VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT* src);
+    VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT *ptr() { return reinterpret_cast<VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT *>(this); }
+    VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT const *ptr() const { return reinterpret_cast<VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT const *>(this); }
+};
+
+struct safe_VkPipelineVertexInputDivisorStateCreateInfoEXT {
+    VkStructureType sType;
+    const void* pNext;
+    uint32_t vertexBindingDivisorCount;
+    const VkVertexInputBindingDivisorDescriptionEXT* pVertexBindingDivisors;
+    safe_VkPipelineVertexInputDivisorStateCreateInfoEXT(const VkPipelineVertexInputDivisorStateCreateInfoEXT* in_struct);
+    safe_VkPipelineVertexInputDivisorStateCreateInfoEXT(const safe_VkPipelineVertexInputDivisorStateCreateInfoEXT& src);
+    safe_VkPipelineVertexInputDivisorStateCreateInfoEXT& operator=(const safe_VkPipelineVertexInputDivisorStateCreateInfoEXT& src);
+    safe_VkPipelineVertexInputDivisorStateCreateInfoEXT();
+    ~safe_VkPipelineVertexInputDivisorStateCreateInfoEXT();
+    void initialize(const VkPipelineVertexInputDivisorStateCreateInfoEXT* in_struct);
+    void initialize(const safe_VkPipelineVertexInputDivisorStateCreateInfoEXT* src);
+    VkPipelineVertexInputDivisorStateCreateInfoEXT *ptr() { return reinterpret_cast<VkPipelineVertexInputDivisorStateCreateInfoEXT *>(this); }
+    VkPipelineVertexInputDivisorStateCreateInfoEXT const *ptr() const { return reinterpret_cast<VkPipelineVertexInputDivisorStateCreateInfoEXT const *>(this); }
+};
+
+struct safe_VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT {
+    VkStructureType sType;
+    void* pNext;
+    VkBool32 vertexAttributeInstanceRateDivisor;
+    VkBool32 vertexAttributeInstanceRateZeroDivisor;
+    safe_VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT(const VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT* in_struct);
+    safe_VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT(const safe_VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT& src);
+    safe_VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT& operator=(const safe_VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT& src);
+    safe_VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT();
+    ~safe_VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT();
+    void initialize(const VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT* in_struct);
+    void initialize(const safe_VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT* src);
+    VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT *ptr() { return reinterpret_cast<VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT *>(this); }
+    VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT const *ptr() const { return reinterpret_cast<VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT const *>(this); }
+};
+
+#ifdef VK_USE_PLATFORM_GGP
+struct safe_VkPresentFrameTokenGGP {
+    VkStructureType sType;
+    const void* pNext;
+    GgpFrameToken frameToken;
+    safe_VkPresentFrameTokenGGP(const VkPresentFrameTokenGGP* in_struct);
+    safe_VkPresentFrameTokenGGP(const safe_VkPresentFrameTokenGGP& src);
+    safe_VkPresentFrameTokenGGP& operator=(const safe_VkPresentFrameTokenGGP& src);
+    safe_VkPresentFrameTokenGGP();
+    ~safe_VkPresentFrameTokenGGP();
+    void initialize(const VkPresentFrameTokenGGP* in_struct);
+    void initialize(const safe_VkPresentFrameTokenGGP* src);
+    VkPresentFrameTokenGGP *ptr() { return reinterpret_cast<VkPresentFrameTokenGGP *>(this); }
+    VkPresentFrameTokenGGP const *ptr() const { return reinterpret_cast<VkPresentFrameTokenGGP const *>(this); }
+};
+#endif // VK_USE_PLATFORM_GGP
+
+struct safe_VkPipelineCreationFeedbackCreateInfoEXT {
+    VkStructureType sType;
+    const void* pNext;
+    VkPipelineCreationFeedbackEXT* pPipelineCreationFeedback;
+    uint32_t pipelineStageCreationFeedbackCount;
+    VkPipelineCreationFeedbackEXT* pPipelineStageCreationFeedbacks;
+    safe_VkPipelineCreationFeedbackCreateInfoEXT(const VkPipelineCreationFeedbackCreateInfoEXT* in_struct);
+    safe_VkPipelineCreationFeedbackCreateInfoEXT(const safe_VkPipelineCreationFeedbackCreateInfoEXT& src);
+    safe_VkPipelineCreationFeedbackCreateInfoEXT& operator=(const safe_VkPipelineCreationFeedbackCreateInfoEXT& src);
+    safe_VkPipelineCreationFeedbackCreateInfoEXT();
+    ~safe_VkPipelineCreationFeedbackCreateInfoEXT();
+    void initialize(const VkPipelineCreationFeedbackCreateInfoEXT* in_struct);
+    void initialize(const safe_VkPipelineCreationFeedbackCreateInfoEXT* src);
+    VkPipelineCreationFeedbackCreateInfoEXT *ptr() { return reinterpret_cast<VkPipelineCreationFeedbackCreateInfoEXT *>(this); }
+    VkPipelineCreationFeedbackCreateInfoEXT const *ptr() const { return reinterpret_cast<VkPipelineCreationFeedbackCreateInfoEXT const *>(this); }
+};
+
+struct safe_VkPhysicalDeviceComputeShaderDerivativesFeaturesNV {
+    VkStructureType sType;
+    void* pNext;
+    VkBool32 computeDerivativeGroupQuads;
+    VkBool32 computeDerivativeGroupLinear;
+    safe_VkPhysicalDeviceComputeShaderDerivativesFeaturesNV(const VkPhysicalDeviceComputeShaderDerivativesFeaturesNV* in_struct);
+    safe_VkPhysicalDeviceComputeShaderDerivativesFeaturesNV(const safe_VkPhysicalDeviceComputeShaderDerivativesFeaturesNV& src);
+    safe_VkPhysicalDeviceComputeShaderDerivativesFeaturesNV& operator=(const safe_VkPhysicalDeviceComputeShaderDerivativesFeaturesNV& src);
+    safe_VkPhysicalDeviceComputeShaderDerivativesFeaturesNV();
+    ~safe_VkPhysicalDeviceComputeShaderDerivativesFeaturesNV();
+    void initialize(const VkPhysicalDeviceComputeShaderDerivativesFeaturesNV* in_struct);
+    void initialize(const safe_VkPhysicalDeviceComputeShaderDerivativesFeaturesNV* src);
+    VkPhysicalDeviceComputeShaderDerivativesFeaturesNV *ptr() { return reinterpret_cast<VkPhysicalDeviceComputeShaderDerivativesFeaturesNV *>(this); }
+    VkPhysicalDeviceComputeShaderDerivativesFeaturesNV const *ptr() const { return reinterpret_cast<VkPhysicalDeviceComputeShaderDerivativesFeaturesNV const *>(this); }
+};
+
+struct safe_VkPhysicalDeviceMeshShaderFeaturesNV {
+    VkStructureType sType;
+    void* pNext;
+    VkBool32 taskShader;
+    VkBool32 meshShader;
+    safe_VkPhysicalDeviceMeshShaderFeaturesNV(const VkPhysicalDeviceMeshShaderFeaturesNV* in_struct);
+    safe_VkPhysicalDeviceMeshShaderFeaturesNV(const safe_VkPhysicalDeviceMeshShaderFeaturesNV& src);
+    safe_VkPhysicalDeviceMeshShaderFeaturesNV& operator=(const safe_VkPhysicalDeviceMeshShaderFeaturesNV& src);
+    safe_VkPhysicalDeviceMeshShaderFeaturesNV();
+    ~safe_VkPhysicalDeviceMeshShaderFeaturesNV();
+    void initialize(const VkPhysicalDeviceMeshShaderFeaturesNV* in_struct);
+    void initialize(const safe_VkPhysicalDeviceMeshShaderFeaturesNV* src);
+    VkPhysicalDeviceMeshShaderFeaturesNV *ptr() { return reinterpret_cast<VkPhysicalDeviceMeshShaderFeaturesNV *>(this); }
+    VkPhysicalDeviceMeshShaderFeaturesNV const *ptr() const { return reinterpret_cast<VkPhysicalDeviceMeshShaderFeaturesNV const *>(this); }
+};
+
+struct safe_VkPhysicalDeviceMeshShaderPropertiesNV {
+    VkStructureType sType;
+    void* pNext;
+    uint32_t maxDrawMeshTasksCount;
+    uint32_t maxTaskWorkGroupInvocations;
+    uint32_t maxTaskWorkGroupSize[3];
+    uint32_t maxTaskTotalMemorySize;
+    uint32_t maxTaskOutputCount;
+    uint32_t maxMeshWorkGroupInvocations;
+    uint32_t maxMeshWorkGroupSize[3];
+    uint32_t maxMeshTotalMemorySize;
+    uint32_t maxMeshOutputVertices;
+    uint32_t maxMeshOutputPrimitives;
+    uint32_t maxMeshMultiviewViewCount;
+    uint32_t meshOutputPerVertexGranularity;
+    uint32_t meshOutputPerPrimitiveGranularity;
+    safe_VkPhysicalDeviceMeshShaderPropertiesNV(const VkPhysicalDeviceMeshShaderPropertiesNV* in_struct);
+    safe_VkPhysicalDeviceMeshShaderPropertiesNV(const safe_VkPhysicalDeviceMeshShaderPropertiesNV& src);
+    safe_VkPhysicalDeviceMeshShaderPropertiesNV& operator=(const safe_VkPhysicalDeviceMeshShaderPropertiesNV& src);
+    safe_VkPhysicalDeviceMeshShaderPropertiesNV();
+    ~safe_VkPhysicalDeviceMeshShaderPropertiesNV();
+    void initialize(const VkPhysicalDeviceMeshShaderPropertiesNV* in_struct);
+    void initialize(const safe_VkPhysicalDeviceMeshShaderPropertiesNV* src);
+    VkPhysicalDeviceMeshShaderPropertiesNV *ptr() { return reinterpret_cast<VkPhysicalDeviceMeshShaderPropertiesNV *>(this); }
+    VkPhysicalDeviceMeshShaderPropertiesNV const *ptr() const { return reinterpret_cast<VkPhysicalDeviceMeshShaderPropertiesNV const *>(this); }
+};
+
+struct safe_VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV {
+    VkStructureType sType;
+    void* pNext;
+    VkBool32 fragmentShaderBarycentric;
+    safe_VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV(const VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV* in_struct);
+    safe_VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV(const safe_VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV& src);
+    safe_VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV& operator=(const safe_VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV& src);
+    safe_VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV();
+    ~safe_VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV();
+    void initialize(const VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV* in_struct);
+    void initialize(const safe_VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV* src);
+    VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV *ptr() { return reinterpret_cast<VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV *>(this); }
+    VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV const *ptr() const { return reinterpret_cast<VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV const *>(this); }
+};
+
+struct safe_VkPhysicalDeviceShaderImageFootprintFeaturesNV {
+    VkStructureType sType;
+    void* pNext;
+    VkBool32 imageFootprint;
+    safe_VkPhysicalDeviceShaderImageFootprintFeaturesNV(const VkPhysicalDeviceShaderImageFootprintFeaturesNV* in_struct);
+    safe_VkPhysicalDeviceShaderImageFootprintFeaturesNV(const safe_VkPhysicalDeviceShaderImageFootprintFeaturesNV& src);
+    safe_VkPhysicalDeviceShaderImageFootprintFeaturesNV& operator=(const safe_VkPhysicalDeviceShaderImageFootprintFeaturesNV& src);
+    safe_VkPhysicalDeviceShaderImageFootprintFeaturesNV();
+    ~safe_VkPhysicalDeviceShaderImageFootprintFeaturesNV();
+    void initialize(const VkPhysicalDeviceShaderImageFootprintFeaturesNV* in_struct);
+    void initialize(const safe_VkPhysicalDeviceShaderImageFootprintFeaturesNV* src);
+    VkPhysicalDeviceShaderImageFootprintFeaturesNV *ptr() { return reinterpret_cast<VkPhysicalDeviceShaderImageFootprintFeaturesNV *>(this); }
+    VkPhysicalDeviceShaderImageFootprintFeaturesNV const *ptr() const { return reinterpret_cast<VkPhysicalDeviceShaderImageFootprintFeaturesNV const *>(this); }
+};
+
+struct safe_VkPipelineViewportExclusiveScissorStateCreateInfoNV {
+    VkStructureType sType;
+    const void* pNext;
+    uint32_t exclusiveScissorCount;
+    const VkRect2D* pExclusiveScissors;
+    safe_VkPipelineViewportExclusiveScissorStateCreateInfoNV(const VkPipelineViewportExclusiveScissorStateCreateInfoNV* in_struct);
+    safe_VkPipelineViewportExclusiveScissorStateCreateInfoNV(const safe_VkPipelineViewportExclusiveScissorStateCreateInfoNV& src);
+    safe_VkPipelineViewportExclusiveScissorStateCreateInfoNV& operator=(const safe_VkPipelineViewportExclusiveScissorStateCreateInfoNV& src);
+    safe_VkPipelineViewportExclusiveScissorStateCreateInfoNV();
+    ~safe_VkPipelineViewportExclusiveScissorStateCreateInfoNV();
+    void initialize(const VkPipelineViewportExclusiveScissorStateCreateInfoNV* in_struct);
+    void initialize(const safe_VkPipelineViewportExclusiveScissorStateCreateInfoNV* src);
+    VkPipelineViewportExclusiveScissorStateCreateInfoNV *ptr() { return reinterpret_cast<VkPipelineViewportExclusiveScissorStateCreateInfoNV *>(this); }
+    VkPipelineViewportExclusiveScissorStateCreateInfoNV const *ptr() const { return reinterpret_cast<VkPipelineViewportExclusiveScissorStateCreateInfoNV const *>(this); }
+};
+
+struct safe_VkPhysicalDeviceExclusiveScissorFeaturesNV {
+    VkStructureType sType;
+    void* pNext;
+    VkBool32 exclusiveScissor;
+    safe_VkPhysicalDeviceExclusiveScissorFeaturesNV(const VkPhysicalDeviceExclusiveScissorFeaturesNV* in_struct);
+    safe_VkPhysicalDeviceExclusiveScissorFeaturesNV(const safe_VkPhysicalDeviceExclusiveScissorFeaturesNV& src);
+    safe_VkPhysicalDeviceExclusiveScissorFeaturesNV& operator=(const safe_VkPhysicalDeviceExclusiveScissorFeaturesNV& src);
+    safe_VkPhysicalDeviceExclusiveScissorFeaturesNV();
+    ~safe_VkPhysicalDeviceExclusiveScissorFeaturesNV();
+    void initialize(const VkPhysicalDeviceExclusiveScissorFeaturesNV* in_struct);
+    void initialize(const safe_VkPhysicalDeviceExclusiveScissorFeaturesNV* src);
+    VkPhysicalDeviceExclusiveScissorFeaturesNV *ptr() { return reinterpret_cast<VkPhysicalDeviceExclusiveScissorFeaturesNV *>(this); }
+    VkPhysicalDeviceExclusiveScissorFeaturesNV const *ptr() const { return reinterpret_cast<VkPhysicalDeviceExclusiveScissorFeaturesNV const *>(this); }
+};
+
+struct safe_VkQueueFamilyCheckpointPropertiesNV {
+    VkStructureType sType;
+    void* pNext;
+    VkPipelineStageFlags checkpointExecutionStageMask;
+    safe_VkQueueFamilyCheckpointPropertiesNV(const VkQueueFamilyCheckpointPropertiesNV* in_struct);
+    safe_VkQueueFamilyCheckpointPropertiesNV(const safe_VkQueueFamilyCheckpointPropertiesNV& src);
+    safe_VkQueueFamilyCheckpointPropertiesNV& operator=(const safe_VkQueueFamilyCheckpointPropertiesNV& src);
+    safe_VkQueueFamilyCheckpointPropertiesNV();
+    ~safe_VkQueueFamilyCheckpointPropertiesNV();
+    void initialize(const VkQueueFamilyCheckpointPropertiesNV* in_struct);
+    void initialize(const safe_VkQueueFamilyCheckpointPropertiesNV* src);
+    VkQueueFamilyCheckpointPropertiesNV *ptr() { return reinterpret_cast<VkQueueFamilyCheckpointPropertiesNV *>(this); }
+    VkQueueFamilyCheckpointPropertiesNV const *ptr() const { return reinterpret_cast<VkQueueFamilyCheckpointPropertiesNV const *>(this); }
+};
+
+struct safe_VkCheckpointDataNV {
+    VkStructureType sType;
+    void* pNext;
+    VkPipelineStageFlagBits stage;
+    void* pCheckpointMarker;
+    safe_VkCheckpointDataNV(const VkCheckpointDataNV* in_struct);
+    safe_VkCheckpointDataNV(const safe_VkCheckpointDataNV& src);
+    safe_VkCheckpointDataNV& operator=(const safe_VkCheckpointDataNV& src);
+    safe_VkCheckpointDataNV();
+    ~safe_VkCheckpointDataNV();
+    void initialize(const VkCheckpointDataNV* in_struct);
+    void initialize(const safe_VkCheckpointDataNV* src);
+    VkCheckpointDataNV *ptr() { return reinterpret_cast<VkCheckpointDataNV *>(this); }
+    VkCheckpointDataNV const *ptr() const { return reinterpret_cast<VkCheckpointDataNV const *>(this); }
+};
+
+struct safe_VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL {
+    VkStructureType sType;
+    void* pNext;
+    VkBool32 shaderIntegerFunctions2;
+    safe_VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL(const VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL* in_struct);
+    safe_VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL(const safe_VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL& src);
+    safe_VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL& operator=(const safe_VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL& src);
+    safe_VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL();
+    ~safe_VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL();
+    void initialize(const VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL* in_struct);
+    void initialize(const safe_VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL* src);
+    VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL *ptr() { return reinterpret_cast<VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL *>(this); }
+    VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const *ptr() const { return reinterpret_cast<VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const *>(this); }
+};
+
+struct safe_VkPerformanceValueDataINTEL {
+    uint32_t value32;
+    uint64_t value64;
+    float valueFloat;
+    VkBool32 valueBool;
+    const char* valueString;
+    safe_VkPerformanceValueDataINTEL(const VkPerformanceValueDataINTEL* in_struct);
+    safe_VkPerformanceValueDataINTEL(const safe_VkPerformanceValueDataINTEL& src);
+    safe_VkPerformanceValueDataINTEL& operator=(const safe_VkPerformanceValueDataINTEL& src);
+    safe_VkPerformanceValueDataINTEL();
+    ~safe_VkPerformanceValueDataINTEL();
+    void initialize(const VkPerformanceValueDataINTEL* in_struct);
+    void initialize(const safe_VkPerformanceValueDataINTEL* src);
+    VkPerformanceValueDataINTEL *ptr() { return reinterpret_cast<VkPerformanceValueDataINTEL *>(this); }
+    VkPerformanceValueDataINTEL const *ptr() const { return reinterpret_cast<VkPerformanceValueDataINTEL const *>(this); }
+};
+
+struct safe_VkInitializePerformanceApiInfoINTEL {
+    VkStructureType sType;
+    const void* pNext;
+    void* pUserData;
+    safe_VkInitializePerformanceApiInfoINTEL(const VkInitializePerformanceApiInfoINTEL* in_struct);
+    safe_VkInitializePerformanceApiInfoINTEL(const safe_VkInitializePerformanceApiInfoINTEL& src);
+    safe_VkInitializePerformanceApiInfoINTEL& operator=(const safe_VkInitializePerformanceApiInfoINTEL& src);
+    safe_VkInitializePerformanceApiInfoINTEL();
+    ~safe_VkInitializePerformanceApiInfoINTEL();
+    void initialize(const VkInitializePerformanceApiInfoINTEL* in_struct);
+    void initialize(const safe_VkInitializePerformanceApiInfoINTEL* src);
+    VkInitializePerformanceApiInfoINTEL *ptr() { return reinterpret_cast<VkInitializePerformanceApiInfoINTEL *>(this); }
+    VkInitializePerformanceApiInfoINTEL const *ptr() const { return reinterpret_cast<VkInitializePerformanceApiInfoINTEL const *>(this); }
+};
+
+struct safe_VkQueryPoolCreateInfoINTEL {
+    VkStructureType sType;
+    const void* pNext;
+    VkQueryPoolSamplingModeINTEL performanceCountersSampling;
+    safe_VkQueryPoolCreateInfoINTEL(const VkQueryPoolCreateInfoINTEL* in_struct);
+    safe_VkQueryPoolCreateInfoINTEL(const safe_VkQueryPoolCreateInfoINTEL& src);
+    safe_VkQueryPoolCreateInfoINTEL& operator=(const safe_VkQueryPoolCreateInfoINTEL& src);
+    safe_VkQueryPoolCreateInfoINTEL();
+    ~safe_VkQueryPoolCreateInfoINTEL();
+    void initialize(const VkQueryPoolCreateInfoINTEL* in_struct);
+    void initialize(const safe_VkQueryPoolCreateInfoINTEL* src);
+    VkQueryPoolCreateInfoINTEL *ptr() { return reinterpret_cast<VkQueryPoolCreateInfoINTEL *>(this); }
+    VkQueryPoolCreateInfoINTEL const *ptr() const { return reinterpret_cast<VkQueryPoolCreateInfoINTEL const *>(this); }
+};
+
+struct safe_VkPerformanceMarkerInfoINTEL {
+    VkStructureType sType;
+    const void* pNext;
+    uint64_t marker;
+    safe_VkPerformanceMarkerInfoINTEL(const VkPerformanceMarkerInfoINTEL* in_struct);
+    safe_VkPerformanceMarkerInfoINTEL(const safe_VkPerformanceMarkerInfoINTEL& src);
+    safe_VkPerformanceMarkerInfoINTEL& operator=(const safe_VkPerformanceMarkerInfoINTEL& src);
+    safe_VkPerformanceMarkerInfoINTEL();
+    ~safe_VkPerformanceMarkerInfoINTEL();
+    void initialize(const VkPerformanceMarkerInfoINTEL* in_struct);
+    void initialize(const safe_VkPerformanceMarkerInfoINTEL* src);
+    VkPerformanceMarkerInfoINTEL *ptr() { return reinterpret_cast<VkPerformanceMarkerInfoINTEL *>(this); }
+    VkPerformanceMarkerInfoINTEL const *ptr() const { return reinterpret_cast<VkPerformanceMarkerInfoINTEL const *>(this); }
+};
+
+struct safe_VkPerformanceStreamMarkerInfoINTEL {
+    VkStructureType sType;
+    const void* pNext;
+    uint32_t marker;
+    safe_VkPerformanceStreamMarkerInfoINTEL(const VkPerformanceStreamMarkerInfoINTEL* in_struct);
+    safe_VkPerformanceStreamMarkerInfoINTEL(const safe_VkPerformanceStreamMarkerInfoINTEL& src);
+    safe_VkPerformanceStreamMarkerInfoINTEL& operator=(const safe_VkPerformanceStreamMarkerInfoINTEL& src);
+    safe_VkPerformanceStreamMarkerInfoINTEL();
+    ~safe_VkPerformanceStreamMarkerInfoINTEL();
+    void initialize(const VkPerformanceStreamMarkerInfoINTEL* in_struct);
+    void initialize(const safe_VkPerformanceStreamMarkerInfoINTEL* src);
+    VkPerformanceStreamMarkerInfoINTEL *ptr() { return reinterpret_cast<VkPerformanceStreamMarkerInfoINTEL *>(this); }
+    VkPerformanceStreamMarkerInfoINTEL const *ptr() const { return reinterpret_cast<VkPerformanceStreamMarkerInfoINTEL const *>(this); }
+};
+
+struct safe_VkPerformanceOverrideInfoINTEL {
+    VkStructureType sType;
+    const void* pNext;
+    VkPerformanceOverrideTypeINTEL type;
+    VkBool32 enable;
+    uint64_t parameter;
+    safe_VkPerformanceOverrideInfoINTEL(const VkPerformanceOverrideInfoINTEL* in_struct);
+    safe_VkPerformanceOverrideInfoINTEL(const safe_VkPerformanceOverrideInfoINTEL& src);
+    safe_VkPerformanceOverrideInfoINTEL& operator=(const safe_VkPerformanceOverrideInfoINTEL& src);
+    safe_VkPerformanceOverrideInfoINTEL();
+    ~safe_VkPerformanceOverrideInfoINTEL();
+    void initialize(const VkPerformanceOverrideInfoINTEL* in_struct);
+    void initialize(const safe_VkPerformanceOverrideInfoINTEL* src);
+    VkPerformanceOverrideInfoINTEL *ptr() { return reinterpret_cast<VkPerformanceOverrideInfoINTEL *>(this); }
+    VkPerformanceOverrideInfoINTEL const *ptr() const { return reinterpret_cast<VkPerformanceOverrideInfoINTEL const *>(this); }
+};
+
+struct safe_VkPerformanceConfigurationAcquireInfoINTEL {
+    VkStructureType sType;
+    const void* pNext;
+    VkPerformanceConfigurationTypeINTEL type;
+    safe_VkPerformanceConfigurationAcquireInfoINTEL(const VkPerformanceConfigurationAcquireInfoINTEL* in_struct);
+    safe_VkPerformanceConfigurationAcquireInfoINTEL(const safe_VkPerformanceConfigurationAcquireInfoINTEL& src);
+    safe_VkPerformanceConfigurationAcquireInfoINTEL& operator=(const safe_VkPerformanceConfigurationAcquireInfoINTEL& src);
+    safe_VkPerformanceConfigurationAcquireInfoINTEL();
+    ~safe_VkPerformanceConfigurationAcquireInfoINTEL();
+    void initialize(const VkPerformanceConfigurationAcquireInfoINTEL* in_struct);
+    void initialize(const safe_VkPerformanceConfigurationAcquireInfoINTEL* src);
+    VkPerformanceConfigurationAcquireInfoINTEL *ptr() { return reinterpret_cast<VkPerformanceConfigurationAcquireInfoINTEL *>(this); }
+    VkPerformanceConfigurationAcquireInfoINTEL const *ptr() const { return reinterpret_cast<VkPerformanceConfigurationAcquireInfoINTEL const *>(this); }
+};
+
+struct safe_VkPhysicalDevicePCIBusInfoPropertiesEXT {
+    VkStructureType sType;
+    void* pNext;
+    uint32_t pciDomain;
+    uint32_t pciBus;
+    uint32_t pciDevice;
+    uint32_t pciFunction;
+    safe_VkPhysicalDevicePCIBusInfoPropertiesEXT(const VkPhysicalDevicePCIBusInfoPropertiesEXT* in_struct);
+    safe_VkPhysicalDevicePCIBusInfoPropertiesEXT(const safe_VkPhysicalDevicePCIBusInfoPropertiesEXT& src);
+    safe_VkPhysicalDevicePCIBusInfoPropertiesEXT& operator=(const safe_VkPhysicalDevicePCIBusInfoPropertiesEXT& src);
+    safe_VkPhysicalDevicePCIBusInfoPropertiesEXT();
+    ~safe_VkPhysicalDevicePCIBusInfoPropertiesEXT();
+    void initialize(const VkPhysicalDevicePCIBusInfoPropertiesEXT* in_struct);
+    void initialize(const safe_VkPhysicalDevicePCIBusInfoPropertiesEXT* src);
+    VkPhysicalDevicePCIBusInfoPropertiesEXT *ptr() { return reinterpret_cast<VkPhysicalDevicePCIBusInfoPropertiesEXT *>(this); }
+    VkPhysicalDevicePCIBusInfoPropertiesEXT const *ptr() const { return reinterpret_cast<VkPhysicalDevicePCIBusInfoPropertiesEXT const *>(this); }
+};
+
+struct safe_VkDisplayNativeHdrSurfaceCapabilitiesAMD {
+    VkStructureType sType;
+    void* pNext;
+    VkBool32 localDimmingSupport;
+    safe_VkDisplayNativeHdrSurfaceCapabilitiesAMD(const VkDisplayNativeHdrSurfaceCapabilitiesAMD* in_struct);
+    safe_VkDisplayNativeHdrSurfaceCapabilitiesAMD(const safe_VkDisplayNativeHdrSurfaceCapabilitiesAMD& src);
+    safe_VkDisplayNativeHdrSurfaceCapabilitiesAMD& operator=(const safe_VkDisplayNativeHdrSurfaceCapabilitiesAMD& src);
+    safe_VkDisplayNativeHdrSurfaceCapabilitiesAMD();
+    ~safe_VkDisplayNativeHdrSurfaceCapabilitiesAMD();
+    void initialize(const VkDisplayNativeHdrSurfaceCapabilitiesAMD* in_struct);
+    void initialize(const safe_VkDisplayNativeHdrSurfaceCapabilitiesAMD* src);
+    VkDisplayNativeHdrSurfaceCapabilitiesAMD *ptr() { return reinterpret_cast<VkDisplayNativeHdrSurfaceCapabilitiesAMD *>(this); }
+    VkDisplayNativeHdrSurfaceCapabilitiesAMD const *ptr() const { return reinterpret_cast<VkDisplayNativeHdrSurfaceCapabilitiesAMD const *>(this); }
+};
+
+struct safe_VkSwapchainDisplayNativeHdrCreateInfoAMD {
+    VkStructureType sType;
+    const void* pNext;
+    VkBool32 localDimmingEnable;
+    safe_VkSwapchainDisplayNativeHdrCreateInfoAMD(const VkSwapchainDisplayNativeHdrCreateInfoAMD* in_struct);
+    safe_VkSwapchainDisplayNativeHdrCreateInfoAMD(const safe_VkSwapchainDisplayNativeHdrCreateInfoAMD& src);
+    safe_VkSwapchainDisplayNativeHdrCreateInfoAMD& operator=(const safe_VkSwapchainDisplayNativeHdrCreateInfoAMD& src);
+    safe_VkSwapchainDisplayNativeHdrCreateInfoAMD();
+    ~safe_VkSwapchainDisplayNativeHdrCreateInfoAMD();
+    void initialize(const VkSwapchainDisplayNativeHdrCreateInfoAMD* in_struct);
+    void initialize(const safe_VkSwapchainDisplayNativeHdrCreateInfoAMD* src);
+    VkSwapchainDisplayNativeHdrCreateInfoAMD *ptr() { return reinterpret_cast<VkSwapchainDisplayNativeHdrCreateInfoAMD *>(this); }
+    VkSwapchainDisplayNativeHdrCreateInfoAMD const *ptr() const { return reinterpret_cast<VkSwapchainDisplayNativeHdrCreateInfoAMD const *>(this); }
+};
+
+#ifdef VK_USE_PLATFORM_FUCHSIA
+struct safe_VkImagePipeSurfaceCreateInfoFUCHSIA {
+    VkStructureType sType;
+    const void* pNext;
+    VkImagePipeSurfaceCreateFlagsFUCHSIA flags;
+    zx_handle_t imagePipeHandle;
+    safe_VkImagePipeSurfaceCreateInfoFUCHSIA(const VkImagePipeSurfaceCreateInfoFUCHSIA* in_struct);
+    safe_VkImagePipeSurfaceCreateInfoFUCHSIA(const safe_VkImagePipeSurfaceCreateInfoFUCHSIA& src);
+    safe_VkImagePipeSurfaceCreateInfoFUCHSIA& operator=(const safe_VkImagePipeSurfaceCreateInfoFUCHSIA& src);
+    safe_VkImagePipeSurfaceCreateInfoFUCHSIA();
+    ~safe_VkImagePipeSurfaceCreateInfoFUCHSIA();
+    void initialize(const VkImagePipeSurfaceCreateInfoFUCHSIA* in_struct);
+    void initialize(const safe_VkImagePipeSurfaceCreateInfoFUCHSIA* src);
+    VkImagePipeSurfaceCreateInfoFUCHSIA *ptr() { return reinterpret_cast<VkImagePipeSurfaceCreateInfoFUCHSIA *>(this); }
+    VkImagePipeSurfaceCreateInfoFUCHSIA const *ptr() const { return reinterpret_cast<VkImagePipeSurfaceCreateInfoFUCHSIA const *>(this); }
+};
+#endif // VK_USE_PLATFORM_FUCHSIA
+
+#ifdef VK_USE_PLATFORM_METAL_EXT
+struct safe_VkMetalSurfaceCreateInfoEXT {
+    VkStructureType sType;
+    const void* pNext;
+    VkMetalSurfaceCreateFlagsEXT flags;
+    const CAMetalLayer* pLayer;
+    safe_VkMetalSurfaceCreateInfoEXT(const VkMetalSurfaceCreateInfoEXT* in_struct);
+    safe_VkMetalSurfaceCreateInfoEXT(const safe_VkMetalSurfaceCreateInfoEXT& src);
+    safe_VkMetalSurfaceCreateInfoEXT& operator=(const safe_VkMetalSurfaceCreateInfoEXT& src);
+    safe_VkMetalSurfaceCreateInfoEXT();
+    ~safe_VkMetalSurfaceCreateInfoEXT();
+    void initialize(const VkMetalSurfaceCreateInfoEXT* in_struct);
+    void initialize(const safe_VkMetalSurfaceCreateInfoEXT* src);
+    VkMetalSurfaceCreateInfoEXT *ptr() { return reinterpret_cast<VkMetalSurfaceCreateInfoEXT *>(this); }
+    VkMetalSurfaceCreateInfoEXT const *ptr() const { return reinterpret_cast<VkMetalSurfaceCreateInfoEXT const *>(this); }
+};
+#endif // VK_USE_PLATFORM_METAL_EXT
+
+struct safe_VkPhysicalDeviceFragmentDensityMapFeaturesEXT {
+    VkStructureType sType;
+    void* pNext;
+    VkBool32 fragmentDensityMap;
+    VkBool32 fragmentDensityMapDynamic;
+    VkBool32 fragmentDensityMapNonSubsampledImages;
+    safe_VkPhysicalDeviceFragmentDensityMapFeaturesEXT(const VkPhysicalDeviceFragmentDensityMapFeaturesEXT* in_struct);
+    safe_VkPhysicalDeviceFragmentDensityMapFeaturesEXT(const safe_VkPhysicalDeviceFragmentDensityMapFeaturesEXT& src);
+    safe_VkPhysicalDeviceFragmentDensityMapFeaturesEXT& operator=(const safe_VkPhysicalDeviceFragmentDensityMapFeaturesEXT& src);
+    safe_VkPhysicalDeviceFragmentDensityMapFeaturesEXT();
+    ~safe_VkPhysicalDeviceFragmentDensityMapFeaturesEXT();
+    void initialize(const VkPhysicalDeviceFragmentDensityMapFeaturesEXT* in_struct);
+    void initialize(const safe_VkPhysicalDeviceFragmentDensityMapFeaturesEXT* src);
+    VkPhysicalDeviceFragmentDensityMapFeaturesEXT *ptr() { return reinterpret_cast<VkPhysicalDeviceFragmentDensityMapFeaturesEXT *>(this); }
+    VkPhysicalDeviceFragmentDensityMapFeaturesEXT const *ptr() const { return reinterpret_cast<VkPhysicalDeviceFragmentDensityMapFeaturesEXT const *>(this); }
+};
+
+struct safe_VkPhysicalDeviceFragmentDensityMapPropertiesEXT {
+    VkStructureType sType;
+    void* pNext;
+    VkExtent2D minFragmentDensityTexelSize;
+    VkExtent2D maxFragmentDensityTexelSize;
+    VkBool32 fragmentDensityInvocations;
+    safe_VkPhysicalDeviceFragmentDensityMapPropertiesEXT(const VkPhysicalDeviceFragmentDensityMapPropertiesEXT* in_struct);
+    safe_VkPhysicalDeviceFragmentDensityMapPropertiesEXT(const safe_VkPhysicalDeviceFragmentDensityMapPropertiesEXT& src);
+    safe_VkPhysicalDeviceFragmentDensityMapPropertiesEXT& operator=(const safe_VkPhysicalDeviceFragmentDensityMapPropertiesEXT& src);
+    safe_VkPhysicalDeviceFragmentDensityMapPropertiesEXT();
+    ~safe_VkPhysicalDeviceFragmentDensityMapPropertiesEXT();
+    void initialize(const VkPhysicalDeviceFragmentDensityMapPropertiesEXT* in_struct);
+    void initialize(const safe_VkPhysicalDeviceFragmentDensityMapPropertiesEXT* src);
+    VkPhysicalDeviceFragmentDensityMapPropertiesEXT *ptr() { return reinterpret_cast<VkPhysicalDeviceFragmentDensityMapPropertiesEXT *>(this); }
+    VkPhysicalDeviceFragmentDensityMapPropertiesEXT const *ptr() const { return reinterpret_cast<VkPhysicalDeviceFragmentDensityMapPropertiesEXT const *>(this); }
+};
+
+struct safe_VkRenderPassFragmentDensityMapCreateInfoEXT {
+    VkStructureType sType;
+    const void* pNext;
+    VkAttachmentReference fragmentDensityMapAttachment;
+    safe_VkRenderPassFragmentDensityMapCreateInfoEXT(const VkRenderPassFragmentDensityMapCreateInfoEXT* in_struct);
+    safe_VkRenderPassFragmentDensityMapCreateInfoEXT(const safe_VkRenderPassFragmentDensityMapCreateInfoEXT& src);
+    safe_VkRenderPassFragmentDensityMapCreateInfoEXT& operator=(const safe_VkRenderPassFragmentDensityMapCreateInfoEXT& src);
+    safe_VkRenderPassFragmentDensityMapCreateInfoEXT();
+    ~safe_VkRenderPassFragmentDensityMapCreateInfoEXT();
+    void initialize(const VkRenderPassFragmentDensityMapCreateInfoEXT* in_struct);
+    void initialize(const safe_VkRenderPassFragmentDensityMapCreateInfoEXT* src);
+    VkRenderPassFragmentDensityMapCreateInfoEXT *ptr() { return reinterpret_cast<VkRenderPassFragmentDensityMapCreateInfoEXT *>(this); }
+    VkRenderPassFragmentDensityMapCreateInfoEXT const *ptr() const { return reinterpret_cast<VkRenderPassFragmentDensityMapCreateInfoEXT const *>(this); }
+};
+
+struct safe_VkPhysicalDeviceScalarBlockLayoutFeaturesEXT {
+    VkStructureType sType;
+    void* pNext;
+    VkBool32 scalarBlockLayout;
+    safe_VkPhysicalDeviceScalarBlockLayoutFeaturesEXT(const VkPhysicalDeviceScalarBlockLayoutFeaturesEXT* in_struct);
+    safe_VkPhysicalDeviceScalarBlockLayoutFeaturesEXT(const safe_VkPhysicalDeviceScalarBlockLayoutFeaturesEXT& src);
+    safe_VkPhysicalDeviceScalarBlockLayoutFeaturesEXT& operator=(const safe_VkPhysicalDeviceScalarBlockLayoutFeaturesEXT& src);
+    safe_VkPhysicalDeviceScalarBlockLayoutFeaturesEXT();
+    ~safe_VkPhysicalDeviceScalarBlockLayoutFeaturesEXT();
+    void initialize(const VkPhysicalDeviceScalarBlockLayoutFeaturesEXT* in_struct);
+    void initialize(const safe_VkPhysicalDeviceScalarBlockLayoutFeaturesEXT* src);
+    VkPhysicalDeviceScalarBlockLayoutFeaturesEXT *ptr() { return reinterpret_cast<VkPhysicalDeviceScalarBlockLayoutFeaturesEXT *>(this); }
+    VkPhysicalDeviceScalarBlockLayoutFeaturesEXT const *ptr() const { return reinterpret_cast<VkPhysicalDeviceScalarBlockLayoutFeaturesEXT const *>(this); }
+};
+
+struct safe_VkPhysicalDeviceSubgroupSizeControlFeaturesEXT {
+    VkStructureType sType;
+    void* pNext;
+    VkBool32 subgroupSizeControl;
+    VkBool32 computeFullSubgroups;
+    safe_VkPhysicalDeviceSubgroupSizeControlFeaturesEXT(const VkPhysicalDeviceSubgroupSizeControlFeaturesEXT* in_struct);
+    safe_VkPhysicalDeviceSubgroupSizeControlFeaturesEXT(const safe_VkPhysicalDeviceSubgroupSizeControlFeaturesEXT& src);
+    safe_VkPhysicalDeviceSubgroupSizeControlFeaturesEXT& operator=(const safe_VkPhysicalDeviceSubgroupSizeControlFeaturesEXT& src);
+    safe_VkPhysicalDeviceSubgroupSizeControlFeaturesEXT();
+    ~safe_VkPhysicalDeviceSubgroupSizeControlFeaturesEXT();
+    void initialize(const VkPhysicalDeviceSubgroupSizeControlFeaturesEXT* in_struct);
+    void initialize(const safe_VkPhysicalDeviceSubgroupSizeControlFeaturesEXT* src);
+    VkPhysicalDeviceSubgroupSizeControlFeaturesEXT *ptr() { return reinterpret_cast<VkPhysicalDeviceSubgroupSizeControlFeaturesEXT *>(this); }
+    VkPhysicalDeviceSubgroupSizeControlFeaturesEXT const *ptr() const { return reinterpret_cast<VkPhysicalDeviceSubgroupSizeControlFeaturesEXT const *>(this); }
+};
+
+struct safe_VkPhysicalDeviceSubgroupSizeControlPropertiesEXT {
+    VkStructureType sType;
+    void* pNext;
+    uint32_t minSubgroupSize;
+    uint32_t maxSubgroupSize;
+    uint32_t maxComputeWorkgroupSubgroups;
+    VkShaderStageFlags requiredSubgroupSizeStages;
+    safe_VkPhysicalDeviceSubgroupSizeControlPropertiesEXT(const VkPhysicalDeviceSubgroupSizeControlPropertiesEXT* in_struct);
+    safe_VkPhysicalDeviceSubgroupSizeControlPropertiesEXT(const safe_VkPhysicalDeviceSubgroupSizeControlPropertiesEXT& src);
+    safe_VkPhysicalDeviceSubgroupSizeControlPropertiesEXT& operator=(const safe_VkPhysicalDeviceSubgroupSizeControlPropertiesEXT& src);
+    safe_VkPhysicalDeviceSubgroupSizeControlPropertiesEXT();
+    ~safe_VkPhysicalDeviceSubgroupSizeControlPropertiesEXT();
+    void initialize(const VkPhysicalDeviceSubgroupSizeControlPropertiesEXT* in_struct);
+    void initialize(const safe_VkPhysicalDeviceSubgroupSizeControlPropertiesEXT* src);
+    VkPhysicalDeviceSubgroupSizeControlPropertiesEXT *ptr() { return reinterpret_cast<VkPhysicalDeviceSubgroupSizeControlPropertiesEXT *>(this); }
+    VkPhysicalDeviceSubgroupSizeControlPropertiesEXT const *ptr() const { return reinterpret_cast<VkPhysicalDeviceSubgroupSizeControlPropertiesEXT const *>(this); }
+};
+
+struct safe_VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT {
+    VkStructureType sType;
+    void* pNext;
+    uint32_t requiredSubgroupSize;
+    safe_VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT(const VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT* in_struct);
+    safe_VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT(const safe_VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT& src);
+    safe_VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT& operator=(const safe_VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT& src);
+    safe_VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT();
+    ~safe_VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT();
+    void initialize(const VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT* in_struct);
+    void initialize(const safe_VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT* src);
+    VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT *ptr() { return reinterpret_cast<VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT *>(this); }
+    VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const *ptr() const { return reinterpret_cast<VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const *>(this); }
+};
+
+struct safe_VkPhysicalDeviceShaderCoreProperties2AMD {
+    VkStructureType sType;
+    void* pNext;
+    VkShaderCorePropertiesFlagsAMD shaderCoreFeatures;
+    uint32_t activeComputeUnitCount;
+    safe_VkPhysicalDeviceShaderCoreProperties2AMD(const VkPhysicalDeviceShaderCoreProperties2AMD* in_struct);
+    safe_VkPhysicalDeviceShaderCoreProperties2AMD(const safe_VkPhysicalDeviceShaderCoreProperties2AMD& src);
+    safe_VkPhysicalDeviceShaderCoreProperties2AMD& operator=(const safe_VkPhysicalDeviceShaderCoreProperties2AMD& src);
+    safe_VkPhysicalDeviceShaderCoreProperties2AMD();
+    ~safe_VkPhysicalDeviceShaderCoreProperties2AMD();
+    void initialize(const VkPhysicalDeviceShaderCoreProperties2AMD* in_struct);
+    void initialize(const safe_VkPhysicalDeviceShaderCoreProperties2AMD* src);
+    VkPhysicalDeviceShaderCoreProperties2AMD *ptr() { return reinterpret_cast<VkPhysicalDeviceShaderCoreProperties2AMD *>(this); }
+    VkPhysicalDeviceShaderCoreProperties2AMD const *ptr() const { return reinterpret_cast<VkPhysicalDeviceShaderCoreProperties2AMD const *>(this); }
+};
+
+struct safe_VkPhysicalDeviceCoherentMemoryFeaturesAMD {
+    VkStructureType sType;
+    void* pNext;
+    VkBool32 deviceCoherentMemory;
+    safe_VkPhysicalDeviceCoherentMemoryFeaturesAMD(const VkPhysicalDeviceCoherentMemoryFeaturesAMD* in_struct);
+    safe_VkPhysicalDeviceCoherentMemoryFeaturesAMD(const safe_VkPhysicalDeviceCoherentMemoryFeaturesAMD& src);
+    safe_VkPhysicalDeviceCoherentMemoryFeaturesAMD& operator=(const safe_VkPhysicalDeviceCoherentMemoryFeaturesAMD& src);
+    safe_VkPhysicalDeviceCoherentMemoryFeaturesAMD();
+    ~safe_VkPhysicalDeviceCoherentMemoryFeaturesAMD();
+    void initialize(const VkPhysicalDeviceCoherentMemoryFeaturesAMD* in_struct);
+    void initialize(const safe_VkPhysicalDeviceCoherentMemoryFeaturesAMD* src);
+    VkPhysicalDeviceCoherentMemoryFeaturesAMD *ptr() { return reinterpret_cast<VkPhysicalDeviceCoherentMemoryFeaturesAMD *>(this); }
+    VkPhysicalDeviceCoherentMemoryFeaturesAMD const *ptr() const { return reinterpret_cast<VkPhysicalDeviceCoherentMemoryFeaturesAMD const *>(this); }
+};
+
+struct safe_VkPhysicalDeviceMemoryBudgetPropertiesEXT {
+    VkStructureType sType;
+    void* pNext;
+    VkDeviceSize heapBudget[VK_MAX_MEMORY_HEAPS];
+    VkDeviceSize heapUsage[VK_MAX_MEMORY_HEAPS];
+    safe_VkPhysicalDeviceMemoryBudgetPropertiesEXT(const VkPhysicalDeviceMemoryBudgetPropertiesEXT* in_struct);
+    safe_VkPhysicalDeviceMemoryBudgetPropertiesEXT(const safe_VkPhysicalDeviceMemoryBudgetPropertiesEXT& src);
+    safe_VkPhysicalDeviceMemoryBudgetPropertiesEXT& operator=(const safe_VkPhysicalDeviceMemoryBudgetPropertiesEXT& src);
+    safe_VkPhysicalDeviceMemoryBudgetPropertiesEXT();
+    ~safe_VkPhysicalDeviceMemoryBudgetPropertiesEXT();
+    void initialize(const VkPhysicalDeviceMemoryBudgetPropertiesEXT* in_struct);
+    void initialize(const safe_VkPhysicalDeviceMemoryBudgetPropertiesEXT* src);
+    VkPhysicalDeviceMemoryBudgetPropertiesEXT *ptr() { return reinterpret_cast<VkPhysicalDeviceMemoryBudgetPropertiesEXT *>(this); }
+    VkPhysicalDeviceMemoryBudgetPropertiesEXT const *ptr() const { return reinterpret_cast<VkPhysicalDeviceMemoryBudgetPropertiesEXT const *>(this); }
+};
+
+struct safe_VkPhysicalDeviceMemoryPriorityFeaturesEXT {
+    VkStructureType sType;
+    void* pNext;
+    VkBool32 memoryPriority;
+    safe_VkPhysicalDeviceMemoryPriorityFeaturesEXT(const VkPhysicalDeviceMemoryPriorityFeaturesEXT* in_struct);
+    safe_VkPhysicalDeviceMemoryPriorityFeaturesEXT(const safe_VkPhysicalDeviceMemoryPriorityFeaturesEXT& src);
+    safe_VkPhysicalDeviceMemoryPriorityFeaturesEXT& operator=(const safe_VkPhysicalDeviceMemoryPriorityFeaturesEXT& src);
+    safe_VkPhysicalDeviceMemoryPriorityFeaturesEXT();
+    ~safe_VkPhysicalDeviceMemoryPriorityFeaturesEXT();
+    void initialize(const VkPhysicalDeviceMemoryPriorityFeaturesEXT* in_struct);
+    void initialize(const safe_VkPhysicalDeviceMemoryPriorityFeaturesEXT* src);
+    VkPhysicalDeviceMemoryPriorityFeaturesEXT *ptr() { return reinterpret_cast<VkPhysicalDeviceMemoryPriorityFeaturesEXT *>(this); }
+    VkPhysicalDeviceMemoryPriorityFeaturesEXT const *ptr() const { return reinterpret_cast<VkPhysicalDeviceMemoryPriorityFeaturesEXT const *>(this); }
+};
+
+struct safe_VkMemoryPriorityAllocateInfoEXT {
+    VkStructureType sType;
+    const void* pNext;
+    float priority;
+    safe_VkMemoryPriorityAllocateInfoEXT(const VkMemoryPriorityAllocateInfoEXT* in_struct);
+    safe_VkMemoryPriorityAllocateInfoEXT(const safe_VkMemoryPriorityAllocateInfoEXT& src);
+    safe_VkMemoryPriorityAllocateInfoEXT& operator=(const safe_VkMemoryPriorityAllocateInfoEXT& src);
+    safe_VkMemoryPriorityAllocateInfoEXT();
+    ~safe_VkMemoryPriorityAllocateInfoEXT();
+    void initialize(const VkMemoryPriorityAllocateInfoEXT* in_struct);
+    void initialize(const safe_VkMemoryPriorityAllocateInfoEXT* src);
+    VkMemoryPriorityAllocateInfoEXT *ptr() { return reinterpret_cast<VkMemoryPriorityAllocateInfoEXT *>(this); }
+    VkMemoryPriorityAllocateInfoEXT const *ptr() const { return reinterpret_cast<VkMemoryPriorityAllocateInfoEXT const *>(this); }
+};
+
+struct safe_VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV {
+    VkStructureType sType;
+    void* pNext;
+    VkBool32 dedicatedAllocationImageAliasing;
+    safe_VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV(const VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV* in_struct);
+    safe_VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV(const safe_VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV& src);
+    safe_VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV& operator=(const safe_VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV& src);
+    safe_VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV();
+    ~safe_VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV();
+    void initialize(const VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV* in_struct);
+    void initialize(const safe_VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV* src);
+    VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV *ptr() { return reinterpret_cast<VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV *>(this); }
+    VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const *ptr() const { return reinterpret_cast<VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const *>(this); }
+};
+
+struct safe_VkPhysicalDeviceBufferDeviceAddressFeaturesEXT {
+    VkStructureType sType;
+    void* pNext;
+    VkBool32 bufferDeviceAddress;
+    VkBool32 bufferDeviceAddressCaptureReplay;
+    VkBool32 bufferDeviceAddressMultiDevice;
+    safe_VkPhysicalDeviceBufferDeviceAddressFeaturesEXT(const VkPhysicalDeviceBufferDeviceAddressFeaturesEXT* in_struct);
+    safe_VkPhysicalDeviceBufferDeviceAddressFeaturesEXT(const safe_VkPhysicalDeviceBufferDeviceAddressFeaturesEXT& src);
+    safe_VkPhysicalDeviceBufferDeviceAddressFeaturesEXT& operator=(const safe_VkPhysicalDeviceBufferDeviceAddressFeaturesEXT& src);
+    safe_VkPhysicalDeviceBufferDeviceAddressFeaturesEXT();
+    ~safe_VkPhysicalDeviceBufferDeviceAddressFeaturesEXT();
+    void initialize(const VkPhysicalDeviceBufferDeviceAddressFeaturesEXT* in_struct);
+    void initialize(const safe_VkPhysicalDeviceBufferDeviceAddressFeaturesEXT* src);
+    VkPhysicalDeviceBufferDeviceAddressFeaturesEXT *ptr() { return reinterpret_cast<VkPhysicalDeviceBufferDeviceAddressFeaturesEXT *>(this); }
+    VkPhysicalDeviceBufferDeviceAddressFeaturesEXT const *ptr() const { return reinterpret_cast<VkPhysicalDeviceBufferDeviceAddressFeaturesEXT const *>(this); }
+};
+
+struct safe_VkBufferDeviceAddressCreateInfoEXT {
+    VkStructureType sType;
+    const void* pNext;
+    VkDeviceAddress deviceAddress;
+    safe_VkBufferDeviceAddressCreateInfoEXT(const VkBufferDeviceAddressCreateInfoEXT* in_struct);
+    safe_VkBufferDeviceAddressCreateInfoEXT(const safe_VkBufferDeviceAddressCreateInfoEXT& src);
+    safe_VkBufferDeviceAddressCreateInfoEXT& operator=(const safe_VkBufferDeviceAddressCreateInfoEXT& src);
+    safe_VkBufferDeviceAddressCreateInfoEXT();
+    ~safe_VkBufferDeviceAddressCreateInfoEXT();
+    void initialize(const VkBufferDeviceAddressCreateInfoEXT* in_struct);
+    void initialize(const safe_VkBufferDeviceAddressCreateInfoEXT* src);
+    VkBufferDeviceAddressCreateInfoEXT *ptr() { return reinterpret_cast<VkBufferDeviceAddressCreateInfoEXT *>(this); }
+    VkBufferDeviceAddressCreateInfoEXT const *ptr() const { return reinterpret_cast<VkBufferDeviceAddressCreateInfoEXT const *>(this); }
+};
+
+struct safe_VkPhysicalDeviceToolPropertiesEXT {
+    VkStructureType sType;
+    void* pNext;
+    char name[VK_MAX_EXTENSION_NAME_SIZE];
+    char version[VK_MAX_EXTENSION_NAME_SIZE];
+    VkToolPurposeFlagsEXT purposes;
+    char description[VK_MAX_DESCRIPTION_SIZE];
+    char layer[VK_MAX_EXTENSION_NAME_SIZE];
+    safe_VkPhysicalDeviceToolPropertiesEXT(const VkPhysicalDeviceToolPropertiesEXT* in_struct);
+    safe_VkPhysicalDeviceToolPropertiesEXT(const safe_VkPhysicalDeviceToolPropertiesEXT& src);
+    safe_VkPhysicalDeviceToolPropertiesEXT& operator=(const safe_VkPhysicalDeviceToolPropertiesEXT& src);
+    safe_VkPhysicalDeviceToolPropertiesEXT();
+    ~safe_VkPhysicalDeviceToolPropertiesEXT();
+    void initialize(const VkPhysicalDeviceToolPropertiesEXT* in_struct);
+    void initialize(const safe_VkPhysicalDeviceToolPropertiesEXT* src);
+    VkPhysicalDeviceToolPropertiesEXT *ptr() { return reinterpret_cast<VkPhysicalDeviceToolPropertiesEXT *>(this); }
+    VkPhysicalDeviceToolPropertiesEXT const *ptr() const { return reinterpret_cast<VkPhysicalDeviceToolPropertiesEXT const *>(this); }
+};
+
+struct safe_VkImageStencilUsageCreateInfoEXT {
+    VkStructureType sType;
+    const void* pNext;
+    VkImageUsageFlags stencilUsage;
+    safe_VkImageStencilUsageCreateInfoEXT(const VkImageStencilUsageCreateInfoEXT* in_struct);
+    safe_VkImageStencilUsageCreateInfoEXT(const safe_VkImageStencilUsageCreateInfoEXT& src);
+    safe_VkImageStencilUsageCreateInfoEXT& operator=(const safe_VkImageStencilUsageCreateInfoEXT& src);
+    safe_VkImageStencilUsageCreateInfoEXT();
+    ~safe_VkImageStencilUsageCreateInfoEXT();
+    void initialize(const VkImageStencilUsageCreateInfoEXT* in_struct);
+    void initialize(const safe_VkImageStencilUsageCreateInfoEXT* src);
+    VkImageStencilUsageCreateInfoEXT *ptr() { return reinterpret_cast<VkImageStencilUsageCreateInfoEXT *>(this); }
+    VkImageStencilUsageCreateInfoEXT const *ptr() const { return reinterpret_cast<VkImageStencilUsageCreateInfoEXT const *>(this); }
+};
+
+struct safe_VkValidationFeaturesEXT {
+    VkStructureType sType;
+    const void* pNext;
+    uint32_t enabledValidationFeatureCount;
+    const VkValidationFeatureEnableEXT* pEnabledValidationFeatures;
+    uint32_t disabledValidationFeatureCount;
+    const VkValidationFeatureDisableEXT* pDisabledValidationFeatures;
+    safe_VkValidationFeaturesEXT(const VkValidationFeaturesEXT* in_struct);
+    safe_VkValidationFeaturesEXT(const safe_VkValidationFeaturesEXT& src);
+    safe_VkValidationFeaturesEXT& operator=(const safe_VkValidationFeaturesEXT& src);
+    safe_VkValidationFeaturesEXT();
+    ~safe_VkValidationFeaturesEXT();
+    void initialize(const VkValidationFeaturesEXT* in_struct);
+    void initialize(const safe_VkValidationFeaturesEXT* src);
+    VkValidationFeaturesEXT *ptr() { return reinterpret_cast<VkValidationFeaturesEXT *>(this); }
+    VkValidationFeaturesEXT const *ptr() const { return reinterpret_cast<VkValidationFeaturesEXT const *>(this); }
+};
+
+struct safe_VkCooperativeMatrixPropertiesNV {
+    VkStructureType sType;
+    void* pNext;
+    uint32_t MSize;
+    uint32_t NSize;
+    uint32_t KSize;
+    VkComponentTypeNV AType;
+    VkComponentTypeNV BType;
+    VkComponentTypeNV CType;
+    VkComponentTypeNV DType;
+    VkScopeNV scope;
+    safe_VkCooperativeMatrixPropertiesNV(const VkCooperativeMatrixPropertiesNV* in_struct);
+    safe_VkCooperativeMatrixPropertiesNV(const safe_VkCooperativeMatrixPropertiesNV& src);
+    safe_VkCooperativeMatrixPropertiesNV& operator=(const safe_VkCooperativeMatrixPropertiesNV& src);
+    safe_VkCooperativeMatrixPropertiesNV();
+    ~safe_VkCooperativeMatrixPropertiesNV();
+    void initialize(const VkCooperativeMatrixPropertiesNV* in_struct);
+    void initialize(const safe_VkCooperativeMatrixPropertiesNV* src);
+    VkCooperativeMatrixPropertiesNV *ptr() { return reinterpret_cast<VkCooperativeMatrixPropertiesNV *>(this); }
+    VkCooperativeMatrixPropertiesNV const *ptr() const { return reinterpret_cast<VkCooperativeMatrixPropertiesNV const *>(this); }
+};
+
+struct safe_VkPhysicalDeviceCooperativeMatrixFeaturesNV {
+    VkStructureType sType;
+    void* pNext;
+    VkBool32 cooperativeMatrix;
+    VkBool32 cooperativeMatrixRobustBufferAccess;
+    safe_VkPhysicalDeviceCooperativeMatrixFeaturesNV(const VkPhysicalDeviceCooperativeMatrixFeaturesNV* in_struct);
+    safe_VkPhysicalDeviceCooperativeMatrixFeaturesNV(const safe_VkPhysicalDeviceCooperativeMatrixFeaturesNV& src);
+    safe_VkPhysicalDeviceCooperativeMatrixFeaturesNV& operator=(const safe_VkPhysicalDeviceCooperativeMatrixFeaturesNV& src);
+    safe_VkPhysicalDeviceCooperativeMatrixFeaturesNV();
+    ~safe_VkPhysicalDeviceCooperativeMatrixFeaturesNV();
+    void initialize(const VkPhysicalDeviceCooperativeMatrixFeaturesNV* in_struct);
+    void initialize(const safe_VkPhysicalDeviceCooperativeMatrixFeaturesNV* src);
+    VkPhysicalDeviceCooperativeMatrixFeaturesNV *ptr() { return reinterpret_cast<VkPhysicalDeviceCooperativeMatrixFeaturesNV *>(this); }
+    VkPhysicalDeviceCooperativeMatrixFeaturesNV const *ptr() const { return reinterpret_cast<VkPhysicalDeviceCooperativeMatrixFeaturesNV const *>(this); }
+};
+
+struct safe_VkPhysicalDeviceCooperativeMatrixPropertiesNV {
+    VkStructureType sType;
+    void* pNext;
+    VkShaderStageFlags cooperativeMatrixSupportedStages;
+    safe_VkPhysicalDeviceCooperativeMatrixPropertiesNV(const VkPhysicalDeviceCooperativeMatrixPropertiesNV* in_struct);
+    safe_VkPhysicalDeviceCooperativeMatrixPropertiesNV(const safe_VkPhysicalDeviceCooperativeMatrixPropertiesNV& src);
+    safe_VkPhysicalDeviceCooperativeMatrixPropertiesNV& operator=(const safe_VkPhysicalDeviceCooperativeMatrixPropertiesNV& src);
+    safe_VkPhysicalDeviceCooperativeMatrixPropertiesNV();
+    ~safe_VkPhysicalDeviceCooperativeMatrixPropertiesNV();
+    void initialize(const VkPhysicalDeviceCooperativeMatrixPropertiesNV* in_struct);
+    void initialize(const safe_VkPhysicalDeviceCooperativeMatrixPropertiesNV* src);
+    VkPhysicalDeviceCooperativeMatrixPropertiesNV *ptr() { return reinterpret_cast<VkPhysicalDeviceCooperativeMatrixPropertiesNV *>(this); }
+    VkPhysicalDeviceCooperativeMatrixPropertiesNV const *ptr() const { return reinterpret_cast<VkPhysicalDeviceCooperativeMatrixPropertiesNV const *>(this); }
+};
+
+struct safe_VkPhysicalDeviceCoverageReductionModeFeaturesNV {
+    VkStructureType sType;
+    void* pNext;
+    VkBool32 coverageReductionMode;
+    safe_VkPhysicalDeviceCoverageReductionModeFeaturesNV(const VkPhysicalDeviceCoverageReductionModeFeaturesNV* in_struct);
+    safe_VkPhysicalDeviceCoverageReductionModeFeaturesNV(const safe_VkPhysicalDeviceCoverageReductionModeFeaturesNV& src);
+    safe_VkPhysicalDeviceCoverageReductionModeFeaturesNV& operator=(const safe_VkPhysicalDeviceCoverageReductionModeFeaturesNV& src);
+    safe_VkPhysicalDeviceCoverageReductionModeFeaturesNV();
+    ~safe_VkPhysicalDeviceCoverageReductionModeFeaturesNV();
+    void initialize(const VkPhysicalDeviceCoverageReductionModeFeaturesNV* in_struct);
+    void initialize(const safe_VkPhysicalDeviceCoverageReductionModeFeaturesNV* src);
+    VkPhysicalDeviceCoverageReductionModeFeaturesNV *ptr() { return reinterpret_cast<VkPhysicalDeviceCoverageReductionModeFeaturesNV *>(this); }
+    VkPhysicalDeviceCoverageReductionModeFeaturesNV const *ptr() const { return reinterpret_cast<VkPhysicalDeviceCoverageReductionModeFeaturesNV const *>(this); }
+};
+
+struct safe_VkPipelineCoverageReductionStateCreateInfoNV {
+    VkStructureType sType;
+    const void* pNext;
+    VkPipelineCoverageReductionStateCreateFlagsNV flags;
+    VkCoverageReductionModeNV coverageReductionMode;
+    safe_VkPipelineCoverageReductionStateCreateInfoNV(const VkPipelineCoverageReductionStateCreateInfoNV* in_struct);
+    safe_VkPipelineCoverageReductionStateCreateInfoNV(const safe_VkPipelineCoverageReductionStateCreateInfoNV& src);
+    safe_VkPipelineCoverageReductionStateCreateInfoNV& operator=(const safe_VkPipelineCoverageReductionStateCreateInfoNV& src);
+    safe_VkPipelineCoverageReductionStateCreateInfoNV();
+    ~safe_VkPipelineCoverageReductionStateCreateInfoNV();
+    void initialize(const VkPipelineCoverageReductionStateCreateInfoNV* in_struct);
+    void initialize(const safe_VkPipelineCoverageReductionStateCreateInfoNV* src);
+    VkPipelineCoverageReductionStateCreateInfoNV *ptr() { return reinterpret_cast<VkPipelineCoverageReductionStateCreateInfoNV *>(this); }
+    VkPipelineCoverageReductionStateCreateInfoNV const *ptr() const { return reinterpret_cast<VkPipelineCoverageReductionStateCreateInfoNV const *>(this); }
+};
+
+struct safe_VkFramebufferMixedSamplesCombinationNV {
+    VkStructureType sType;
+    void* pNext;
+    VkCoverageReductionModeNV coverageReductionMode;
+    VkSampleCountFlagBits rasterizationSamples;
+    VkSampleCountFlags depthStencilSamples;
+    VkSampleCountFlags colorSamples;
+    safe_VkFramebufferMixedSamplesCombinationNV(const VkFramebufferMixedSamplesCombinationNV* in_struct);
+    safe_VkFramebufferMixedSamplesCombinationNV(const safe_VkFramebufferMixedSamplesCombinationNV& src);
+    safe_VkFramebufferMixedSamplesCombinationNV& operator=(const safe_VkFramebufferMixedSamplesCombinationNV& src);
+    safe_VkFramebufferMixedSamplesCombinationNV();
+    ~safe_VkFramebufferMixedSamplesCombinationNV();
+    void initialize(const VkFramebufferMixedSamplesCombinationNV* in_struct);
+    void initialize(const safe_VkFramebufferMixedSamplesCombinationNV* src);
+    VkFramebufferMixedSamplesCombinationNV *ptr() { return reinterpret_cast<VkFramebufferMixedSamplesCombinationNV *>(this); }
+    VkFramebufferMixedSamplesCombinationNV const *ptr() const { return reinterpret_cast<VkFramebufferMixedSamplesCombinationNV const *>(this); }
+};
+
+struct safe_VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT {
+    VkStructureType sType;
+    void* pNext;
+    VkBool32 fragmentShaderSampleInterlock;
+    VkBool32 fragmentShaderPixelInterlock;
+    VkBool32 fragmentShaderShadingRateInterlock;
+    safe_VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT(const VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT* in_struct);
+    safe_VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT(const safe_VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT& src);
+    safe_VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT& operator=(const safe_VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT& src);
+    safe_VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT();
+    ~safe_VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT();
+    void initialize(const VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT* in_struct);
+    void initialize(const safe_VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT* src);
+    VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT *ptr() { return reinterpret_cast<VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT *>(this); }
+    VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT const *ptr() const { return reinterpret_cast<VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT const *>(this); }
+};
+
+struct safe_VkPhysicalDeviceYcbcrImageArraysFeaturesEXT {
+    VkStructureType sType;
+    void* pNext;
+    VkBool32 ycbcrImageArrays;
+    safe_VkPhysicalDeviceYcbcrImageArraysFeaturesEXT(const VkPhysicalDeviceYcbcrImageArraysFeaturesEXT* in_struct);
+    safe_VkPhysicalDeviceYcbcrImageArraysFeaturesEXT(const safe_VkPhysicalDeviceYcbcrImageArraysFeaturesEXT& src);
+    safe_VkPhysicalDeviceYcbcrImageArraysFeaturesEXT& operator=(const safe_VkPhysicalDeviceYcbcrImageArraysFeaturesEXT& src);
+    safe_VkPhysicalDeviceYcbcrImageArraysFeaturesEXT();
+    ~safe_VkPhysicalDeviceYcbcrImageArraysFeaturesEXT();
+    void initialize(const VkPhysicalDeviceYcbcrImageArraysFeaturesEXT* in_struct);
+    void initialize(const safe_VkPhysicalDeviceYcbcrImageArraysFeaturesEXT* src);
+    VkPhysicalDeviceYcbcrImageArraysFeaturesEXT *ptr() { return reinterpret_cast<VkPhysicalDeviceYcbcrImageArraysFeaturesEXT *>(this); }
+    VkPhysicalDeviceYcbcrImageArraysFeaturesEXT const *ptr() const { return reinterpret_cast<VkPhysicalDeviceYcbcrImageArraysFeaturesEXT const *>(this); }
+};
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+struct safe_VkSurfaceFullScreenExclusiveInfoEXT {
+    VkStructureType sType;
+    void* pNext;
+    VkFullScreenExclusiveEXT fullScreenExclusive;
+    safe_VkSurfaceFullScreenExclusiveInfoEXT(const VkSurfaceFullScreenExclusiveInfoEXT* in_struct);
+    safe_VkSurfaceFullScreenExclusiveInfoEXT(const safe_VkSurfaceFullScreenExclusiveInfoEXT& src);
+    safe_VkSurfaceFullScreenExclusiveInfoEXT& operator=(const safe_VkSurfaceFullScreenExclusiveInfoEXT& src);
+    safe_VkSurfaceFullScreenExclusiveInfoEXT();
+    ~safe_VkSurfaceFullScreenExclusiveInfoEXT();
+    void initialize(const VkSurfaceFullScreenExclusiveInfoEXT* in_struct);
+    void initialize(const safe_VkSurfaceFullScreenExclusiveInfoEXT* src);
+    VkSurfaceFullScreenExclusiveInfoEXT *ptr() { return reinterpret_cast<VkSurfaceFullScreenExclusiveInfoEXT *>(this); }
+    VkSurfaceFullScreenExclusiveInfoEXT const *ptr() const { return reinterpret_cast<VkSurfaceFullScreenExclusiveInfoEXT const *>(this); }
+};
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+struct safe_VkSurfaceCapabilitiesFullScreenExclusiveEXT {
+    VkStructureType sType;
+    void* pNext;
+    VkBool32 fullScreenExclusiveSupported;
+    safe_VkSurfaceCapabilitiesFullScreenExclusiveEXT(const VkSurfaceCapabilitiesFullScreenExclusiveEXT* in_struct);
+    safe_VkSurfaceCapabilitiesFullScreenExclusiveEXT(const safe_VkSurfaceCapabilitiesFullScreenExclusiveEXT& src);
+    safe_VkSurfaceCapabilitiesFullScreenExclusiveEXT& operator=(const safe_VkSurfaceCapabilitiesFullScreenExclusiveEXT& src);
+    safe_VkSurfaceCapabilitiesFullScreenExclusiveEXT();
+    ~safe_VkSurfaceCapabilitiesFullScreenExclusiveEXT();
+    void initialize(const VkSurfaceCapabilitiesFullScreenExclusiveEXT* in_struct);
+    void initialize(const safe_VkSurfaceCapabilitiesFullScreenExclusiveEXT* src);
+    VkSurfaceCapabilitiesFullScreenExclusiveEXT *ptr() { return reinterpret_cast<VkSurfaceCapabilitiesFullScreenExclusiveEXT *>(this); }
+    VkSurfaceCapabilitiesFullScreenExclusiveEXT const *ptr() const { return reinterpret_cast<VkSurfaceCapabilitiesFullScreenExclusiveEXT const *>(this); }
+};
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+struct safe_VkSurfaceFullScreenExclusiveWin32InfoEXT {
+    VkStructureType sType;
+    const void* pNext;
+    HMONITOR hmonitor;
+    safe_VkSurfaceFullScreenExclusiveWin32InfoEXT(const VkSurfaceFullScreenExclusiveWin32InfoEXT* in_struct);
+    safe_VkSurfaceFullScreenExclusiveWin32InfoEXT(const safe_VkSurfaceFullScreenExclusiveWin32InfoEXT& src);
+    safe_VkSurfaceFullScreenExclusiveWin32InfoEXT& operator=(const safe_VkSurfaceFullScreenExclusiveWin32InfoEXT& src);
+    safe_VkSurfaceFullScreenExclusiveWin32InfoEXT();
+    ~safe_VkSurfaceFullScreenExclusiveWin32InfoEXT();
+    void initialize(const VkSurfaceFullScreenExclusiveWin32InfoEXT* in_struct);
+    void initialize(const safe_VkSurfaceFullScreenExclusiveWin32InfoEXT* src);
+    VkSurfaceFullScreenExclusiveWin32InfoEXT *ptr() { return reinterpret_cast<VkSurfaceFullScreenExclusiveWin32InfoEXT *>(this); }
+    VkSurfaceFullScreenExclusiveWin32InfoEXT const *ptr() const { return reinterpret_cast<VkSurfaceFullScreenExclusiveWin32InfoEXT const *>(this); }
+};
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+struct safe_VkHeadlessSurfaceCreateInfoEXT {
+    VkStructureType sType;
+    const void* pNext;
+    VkHeadlessSurfaceCreateFlagsEXT flags;
+    safe_VkHeadlessSurfaceCreateInfoEXT(const VkHeadlessSurfaceCreateInfoEXT* in_struct);
+    safe_VkHeadlessSurfaceCreateInfoEXT(const safe_VkHeadlessSurfaceCreateInfoEXT& src);
+    safe_VkHeadlessSurfaceCreateInfoEXT& operator=(const safe_VkHeadlessSurfaceCreateInfoEXT& src);
+    safe_VkHeadlessSurfaceCreateInfoEXT();
+    ~safe_VkHeadlessSurfaceCreateInfoEXT();
+    void initialize(const VkHeadlessSurfaceCreateInfoEXT* in_struct);
+    void initialize(const safe_VkHeadlessSurfaceCreateInfoEXT* src);
+    VkHeadlessSurfaceCreateInfoEXT *ptr() { return reinterpret_cast<VkHeadlessSurfaceCreateInfoEXT *>(this); }
+    VkHeadlessSurfaceCreateInfoEXT const *ptr() const { return reinterpret_cast<VkHeadlessSurfaceCreateInfoEXT const *>(this); }
+};
+
+struct safe_VkPhysicalDeviceLineRasterizationFeaturesEXT {
+    VkStructureType sType;
+    void* pNext;
+    VkBool32 rectangularLines;
+    VkBool32 bresenhamLines;
+    VkBool32 smoothLines;
+    VkBool32 stippledRectangularLines;
+    VkBool32 stippledBresenhamLines;
+    VkBool32 stippledSmoothLines;
+    safe_VkPhysicalDeviceLineRasterizationFeaturesEXT(const VkPhysicalDeviceLineRasterizationFeaturesEXT* in_struct);
+    safe_VkPhysicalDeviceLineRasterizationFeaturesEXT(const safe_VkPhysicalDeviceLineRasterizationFeaturesEXT& src);
+    safe_VkPhysicalDeviceLineRasterizationFeaturesEXT& operator=(const safe_VkPhysicalDeviceLineRasterizationFeaturesEXT& src);
+    safe_VkPhysicalDeviceLineRasterizationFeaturesEXT();
+    ~safe_VkPhysicalDeviceLineRasterizationFeaturesEXT();
+    void initialize(const VkPhysicalDeviceLineRasterizationFeaturesEXT* in_struct);
+    void initialize(const safe_VkPhysicalDeviceLineRasterizationFeaturesEXT* src);
+    VkPhysicalDeviceLineRasterizationFeaturesEXT *ptr() { return reinterpret_cast<VkPhysicalDeviceLineRasterizationFeaturesEXT *>(this); }
+    VkPhysicalDeviceLineRasterizationFeaturesEXT const *ptr() const { return reinterpret_cast<VkPhysicalDeviceLineRasterizationFeaturesEXT const *>(this); }
+};
+
+struct safe_VkPhysicalDeviceLineRasterizationPropertiesEXT {
+    VkStructureType sType;
+    void* pNext;
+    uint32_t lineSubPixelPrecisionBits;
+    safe_VkPhysicalDeviceLineRasterizationPropertiesEXT(const VkPhysicalDeviceLineRasterizationPropertiesEXT* in_struct);
+    safe_VkPhysicalDeviceLineRasterizationPropertiesEXT(const safe_VkPhysicalDeviceLineRasterizationPropertiesEXT& src);
+    safe_VkPhysicalDeviceLineRasterizationPropertiesEXT& operator=(const safe_VkPhysicalDeviceLineRasterizationPropertiesEXT& src);
+    safe_VkPhysicalDeviceLineRasterizationPropertiesEXT();
+    ~safe_VkPhysicalDeviceLineRasterizationPropertiesEXT();
+    void initialize(const VkPhysicalDeviceLineRasterizationPropertiesEXT* in_struct);
+    void initialize(const safe_VkPhysicalDeviceLineRasterizationPropertiesEXT* src);
+    VkPhysicalDeviceLineRasterizationPropertiesEXT *ptr() { return reinterpret_cast<VkPhysicalDeviceLineRasterizationPropertiesEXT *>(this); }
+    VkPhysicalDeviceLineRasterizationPropertiesEXT const *ptr() const { return reinterpret_cast<VkPhysicalDeviceLineRasterizationPropertiesEXT const *>(this); }
+};
+
+struct safe_VkPipelineRasterizationLineStateCreateInfoEXT {
+    VkStructureType sType;
+    const void* pNext;
+    VkLineRasterizationModeEXT lineRasterizationMode;
+    VkBool32 stippledLineEnable;
+    uint32_t lineStippleFactor;
+    uint16_t lineStipplePattern;
+    safe_VkPipelineRasterizationLineStateCreateInfoEXT(const VkPipelineRasterizationLineStateCreateInfoEXT* in_struct);
+    safe_VkPipelineRasterizationLineStateCreateInfoEXT(const safe_VkPipelineRasterizationLineStateCreateInfoEXT& src);
+    safe_VkPipelineRasterizationLineStateCreateInfoEXT& operator=(const safe_VkPipelineRasterizationLineStateCreateInfoEXT& src);
+    safe_VkPipelineRasterizationLineStateCreateInfoEXT();
+    ~safe_VkPipelineRasterizationLineStateCreateInfoEXT();
+    void initialize(const VkPipelineRasterizationLineStateCreateInfoEXT* in_struct);
+    void initialize(const safe_VkPipelineRasterizationLineStateCreateInfoEXT* src);
+    VkPipelineRasterizationLineStateCreateInfoEXT *ptr() { return reinterpret_cast<VkPipelineRasterizationLineStateCreateInfoEXT *>(this); }
+    VkPipelineRasterizationLineStateCreateInfoEXT const *ptr() const { return reinterpret_cast<VkPipelineRasterizationLineStateCreateInfoEXT const *>(this); }
+};
+
+struct safe_VkPhysicalDeviceHostQueryResetFeaturesEXT {
+    VkStructureType sType;
+    void* pNext;
+    VkBool32 hostQueryReset;
+    safe_VkPhysicalDeviceHostQueryResetFeaturesEXT(const VkPhysicalDeviceHostQueryResetFeaturesEXT* in_struct);
+    safe_VkPhysicalDeviceHostQueryResetFeaturesEXT(const safe_VkPhysicalDeviceHostQueryResetFeaturesEXT& src);
+    safe_VkPhysicalDeviceHostQueryResetFeaturesEXT& operator=(const safe_VkPhysicalDeviceHostQueryResetFeaturesEXT& src);
+    safe_VkPhysicalDeviceHostQueryResetFeaturesEXT();
+    ~safe_VkPhysicalDeviceHostQueryResetFeaturesEXT();
+    void initialize(const VkPhysicalDeviceHostQueryResetFeaturesEXT* in_struct);
+    void initialize(const safe_VkPhysicalDeviceHostQueryResetFeaturesEXT* src);
+    VkPhysicalDeviceHostQueryResetFeaturesEXT *ptr() { return reinterpret_cast<VkPhysicalDeviceHostQueryResetFeaturesEXT *>(this); }
+    VkPhysicalDeviceHostQueryResetFeaturesEXT const *ptr() const { return reinterpret_cast<VkPhysicalDeviceHostQueryResetFeaturesEXT const *>(this); }
+};
+
+struct safe_VkPhysicalDeviceIndexTypeUint8FeaturesEXT {
+    VkStructureType sType;
+    void* pNext;
+    VkBool32 indexTypeUint8;
+    safe_VkPhysicalDeviceIndexTypeUint8FeaturesEXT(const VkPhysicalDeviceIndexTypeUint8FeaturesEXT* in_struct);
+    safe_VkPhysicalDeviceIndexTypeUint8FeaturesEXT(const safe_VkPhysicalDeviceIndexTypeUint8FeaturesEXT& src);
+    safe_VkPhysicalDeviceIndexTypeUint8FeaturesEXT& operator=(const safe_VkPhysicalDeviceIndexTypeUint8FeaturesEXT& src);
+    safe_VkPhysicalDeviceIndexTypeUint8FeaturesEXT();
+    ~safe_VkPhysicalDeviceIndexTypeUint8FeaturesEXT();
+    void initialize(const VkPhysicalDeviceIndexTypeUint8FeaturesEXT* in_struct);
+    void initialize(const safe_VkPhysicalDeviceIndexTypeUint8FeaturesEXT* src);
+    VkPhysicalDeviceIndexTypeUint8FeaturesEXT *ptr() { return reinterpret_cast<VkPhysicalDeviceIndexTypeUint8FeaturesEXT *>(this); }
+    VkPhysicalDeviceIndexTypeUint8FeaturesEXT const *ptr() const { return reinterpret_cast<VkPhysicalDeviceIndexTypeUint8FeaturesEXT const *>(this); }
+};
+
+struct safe_VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT {
+    VkStructureType sType;
+    void* pNext;
+    VkBool32 shaderDemoteToHelperInvocation;
+    safe_VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT(const VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT* in_struct);
+    safe_VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT(const safe_VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT& src);
+    safe_VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT& operator=(const safe_VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT& src);
+    safe_VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT();
+    ~safe_VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT();
+    void initialize(const VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT* in_struct);
+    void initialize(const safe_VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT* src);
+    VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT *ptr() { return reinterpret_cast<VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT *>(this); }
+    VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const *ptr() const { return reinterpret_cast<VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const *>(this); }
+};
+
+struct safe_VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT {
+    VkStructureType sType;
+    void* pNext;
+    VkBool32 texelBufferAlignment;
+    safe_VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT(const VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT* in_struct);
+    safe_VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT(const safe_VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT& src);
+    safe_VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT& operator=(const safe_VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT& src);
+    safe_VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT();
+    ~safe_VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT();
+    void initialize(const VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT* in_struct);
+    void initialize(const safe_VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT* src);
+    VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT *ptr() { return reinterpret_cast<VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT *>(this); }
+    VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT const *ptr() const { return reinterpret_cast<VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT const *>(this); }
+};
+
+struct safe_VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT {
+    VkStructureType sType;
+    void* pNext;
+    VkDeviceSize storageTexelBufferOffsetAlignmentBytes;
+    VkBool32 storageTexelBufferOffsetSingleTexelAlignment;
+    VkDeviceSize uniformTexelBufferOffsetAlignmentBytes;
+    VkBool32 uniformTexelBufferOffsetSingleTexelAlignment;
+    safe_VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT(const VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT* in_struct);
+    safe_VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT(const safe_VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT& src);
+    safe_VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT& operator=(const safe_VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT& src);
+    safe_VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT();
+    ~safe_VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT();
+    void initialize(const VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT* in_struct);
+    void initialize(const safe_VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT* src);
+    VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT *ptr() { return reinterpret_cast<VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT *>(this); }
+    VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT const *ptr() const { return reinterpret_cast<VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT const *>(this); }
+};
diff --git a/src/third_party/vulkan-validation-layers/src/layers/generated/vk_typemap_helper.h b/src/third_party/vulkan-validation-layers/src/layers/generated/vk_typemap_helper.h
new file mode 100644
index 0000000..f3d4781
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/layers/generated/vk_typemap_helper.h
@@ -0,0 +1,3680 @@
+// *** THIS FILE IS GENERATED - DO NOT EDIT ***
+// See helper_file_generator.py for modifications
+
+
+/***************************************************************************
+ *
+ * Copyright (c) 2015-2019 The Khronos Group Inc.
+ * Copyright (c) 2015-2019 Valve Corporation
+ * Copyright (c) 2015-2019 LunarG, Inc.
+ * Copyright (c) 2015-2019 Google Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Mark Lobodzinski <mark@lunarg.com>
+ * Author: Courtney Goeltzenleuchter <courtneygo@google.com>
+ * Author: Tobin Ehlis <tobine@google.com>
+ * Author: Chris Forbes <chrisforbes@google.com>
+ * Author: John Zulauf<jzulauf@lunarg.com>
+ *
+ ****************************************************************************/
+
+#pragma once
+#include <vulkan/vulkan.h>
+
+// These empty generic templates are specialized for each type with sType
+// members and for each sType -- providing a two way map between structure
+// types and sTypes
+
+template <VkStructureType id> struct LvlSTypeMap {};
+template <typename T> struct LvlTypeMap {};
+
+// Map type VkApplicationInfo to id VK_STRUCTURE_TYPE_APPLICATION_INFO
+template <> struct LvlTypeMap<VkApplicationInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_APPLICATION_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_APPLICATION_INFO> {
+    typedef VkApplicationInfo Type;
+};
+
+// Map type VkInstanceCreateInfo to id VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO
+template <> struct LvlTypeMap<VkInstanceCreateInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO> {
+    typedef VkInstanceCreateInfo Type;
+};
+
+// Map type VkDeviceQueueCreateInfo to id VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO
+template <> struct LvlTypeMap<VkDeviceQueueCreateInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO> {
+    typedef VkDeviceQueueCreateInfo Type;
+};
+
+// Map type VkDeviceCreateInfo to id VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO
+template <> struct LvlTypeMap<VkDeviceCreateInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO> {
+    typedef VkDeviceCreateInfo Type;
+};
+
+// Map type VkSubmitInfo to id VK_STRUCTURE_TYPE_SUBMIT_INFO
+template <> struct LvlTypeMap<VkSubmitInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_SUBMIT_INFO> {
+    typedef VkSubmitInfo Type;
+};
+
+// Map type VkMemoryAllocateInfo to id VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO
+template <> struct LvlTypeMap<VkMemoryAllocateInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO> {
+    typedef VkMemoryAllocateInfo Type;
+};
+
+// Map type VkMappedMemoryRange to id VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE
+template <> struct LvlTypeMap<VkMappedMemoryRange> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE> {
+    typedef VkMappedMemoryRange Type;
+};
+
+// Map type VkBindSparseInfo to id VK_STRUCTURE_TYPE_BIND_SPARSE_INFO
+template <> struct LvlTypeMap<VkBindSparseInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_BIND_SPARSE_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_BIND_SPARSE_INFO> {
+    typedef VkBindSparseInfo Type;
+};
+
+// Map type VkFenceCreateInfo to id VK_STRUCTURE_TYPE_FENCE_CREATE_INFO
+template <> struct LvlTypeMap<VkFenceCreateInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_FENCE_CREATE_INFO> {
+    typedef VkFenceCreateInfo Type;
+};
+
+// Map type VkSemaphoreCreateInfo to id VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO
+template <> struct LvlTypeMap<VkSemaphoreCreateInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO> {
+    typedef VkSemaphoreCreateInfo Type;
+};
+
+// Map type VkEventCreateInfo to id VK_STRUCTURE_TYPE_EVENT_CREATE_INFO
+template <> struct LvlTypeMap<VkEventCreateInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_EVENT_CREATE_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_EVENT_CREATE_INFO> {
+    typedef VkEventCreateInfo Type;
+};
+
+// Map type VkQueryPoolCreateInfo to id VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO
+template <> struct LvlTypeMap<VkQueryPoolCreateInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO> {
+    typedef VkQueryPoolCreateInfo Type;
+};
+
+// Map type VkBufferCreateInfo to id VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO
+template <> struct LvlTypeMap<VkBufferCreateInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO> {
+    typedef VkBufferCreateInfo Type;
+};
+
+// Map type VkBufferViewCreateInfo to id VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO
+template <> struct LvlTypeMap<VkBufferViewCreateInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO> {
+    typedef VkBufferViewCreateInfo Type;
+};
+
+// Map type VkImageCreateInfo to id VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO
+template <> struct LvlTypeMap<VkImageCreateInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO> {
+    typedef VkImageCreateInfo Type;
+};
+
+// Map type VkImageViewCreateInfo to id VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO
+template <> struct LvlTypeMap<VkImageViewCreateInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO> {
+    typedef VkImageViewCreateInfo Type;
+};
+
+// Map type VkShaderModuleCreateInfo to id VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO
+template <> struct LvlTypeMap<VkShaderModuleCreateInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO> {
+    typedef VkShaderModuleCreateInfo Type;
+};
+
+// Map type VkPipelineCacheCreateInfo to id VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO
+template <> struct LvlTypeMap<VkPipelineCacheCreateInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO> {
+    typedef VkPipelineCacheCreateInfo Type;
+};
+
+// Map type VkPipelineShaderStageCreateInfo to id VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO
+template <> struct LvlTypeMap<VkPipelineShaderStageCreateInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO> {
+    typedef VkPipelineShaderStageCreateInfo Type;
+};
+
+// Map type VkPipelineVertexInputStateCreateInfo to id VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO
+template <> struct LvlTypeMap<VkPipelineVertexInputStateCreateInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO> {
+    typedef VkPipelineVertexInputStateCreateInfo Type;
+};
+
+// Map type VkPipelineInputAssemblyStateCreateInfo to id VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO
+template <> struct LvlTypeMap<VkPipelineInputAssemblyStateCreateInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO> {
+    typedef VkPipelineInputAssemblyStateCreateInfo Type;
+};
+
+// Map type VkPipelineTessellationStateCreateInfo to id VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO
+template <> struct LvlTypeMap<VkPipelineTessellationStateCreateInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO> {
+    typedef VkPipelineTessellationStateCreateInfo Type;
+};
+
+// Map type VkPipelineViewportStateCreateInfo to id VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO
+template <> struct LvlTypeMap<VkPipelineViewportStateCreateInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO> {
+    typedef VkPipelineViewportStateCreateInfo Type;
+};
+
+// Map type VkPipelineRasterizationStateCreateInfo to id VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO
+template <> struct LvlTypeMap<VkPipelineRasterizationStateCreateInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO> {
+    typedef VkPipelineRasterizationStateCreateInfo Type;
+};
+
+// Map type VkPipelineMultisampleStateCreateInfo to id VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO
+template <> struct LvlTypeMap<VkPipelineMultisampleStateCreateInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO> {
+    typedef VkPipelineMultisampleStateCreateInfo Type;
+};
+
+// Map type VkPipelineDepthStencilStateCreateInfo to id VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO
+template <> struct LvlTypeMap<VkPipelineDepthStencilStateCreateInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO> {
+    typedef VkPipelineDepthStencilStateCreateInfo Type;
+};
+
+// Map type VkPipelineColorBlendStateCreateInfo to id VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO
+template <> struct LvlTypeMap<VkPipelineColorBlendStateCreateInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO> {
+    typedef VkPipelineColorBlendStateCreateInfo Type;
+};
+
+// Map type VkPipelineDynamicStateCreateInfo to id VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO
+template <> struct LvlTypeMap<VkPipelineDynamicStateCreateInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO> {
+    typedef VkPipelineDynamicStateCreateInfo Type;
+};
+
+// Map type VkGraphicsPipelineCreateInfo to id VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO
+template <> struct LvlTypeMap<VkGraphicsPipelineCreateInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO> {
+    typedef VkGraphicsPipelineCreateInfo Type;
+};
+
+// Map type VkComputePipelineCreateInfo to id VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO
+template <> struct LvlTypeMap<VkComputePipelineCreateInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO> {
+    typedef VkComputePipelineCreateInfo Type;
+};
+
+// Map type VkPipelineLayoutCreateInfo to id VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO
+template <> struct LvlTypeMap<VkPipelineLayoutCreateInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO> {
+    typedef VkPipelineLayoutCreateInfo Type;
+};
+
+// Map type VkSamplerCreateInfo to id VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO
+template <> struct LvlTypeMap<VkSamplerCreateInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO> {
+    typedef VkSamplerCreateInfo Type;
+};
+
+// Map type VkDescriptorSetLayoutCreateInfo to id VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO
+template <> struct LvlTypeMap<VkDescriptorSetLayoutCreateInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO> {
+    typedef VkDescriptorSetLayoutCreateInfo Type;
+};
+
+// Map type VkDescriptorPoolCreateInfo to id VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO
+template <> struct LvlTypeMap<VkDescriptorPoolCreateInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO> {
+    typedef VkDescriptorPoolCreateInfo Type;
+};
+
+// Map type VkDescriptorSetAllocateInfo to id VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO
+template <> struct LvlTypeMap<VkDescriptorSetAllocateInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO> {
+    typedef VkDescriptorSetAllocateInfo Type;
+};
+
+// Map type VkWriteDescriptorSet to id VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET
+template <> struct LvlTypeMap<VkWriteDescriptorSet> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET> {
+    typedef VkWriteDescriptorSet Type;
+};
+
+// Map type VkCopyDescriptorSet to id VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET
+template <> struct LvlTypeMap<VkCopyDescriptorSet> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET> {
+    typedef VkCopyDescriptorSet Type;
+};
+
+// Map type VkFramebufferCreateInfo to id VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO
+template <> struct LvlTypeMap<VkFramebufferCreateInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO> {
+    typedef VkFramebufferCreateInfo Type;
+};
+
+// Map type VkRenderPassCreateInfo to id VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO
+template <> struct LvlTypeMap<VkRenderPassCreateInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO> {
+    typedef VkRenderPassCreateInfo Type;
+};
+
+// Map type VkCommandPoolCreateInfo to id VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO
+template <> struct LvlTypeMap<VkCommandPoolCreateInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO> {
+    typedef VkCommandPoolCreateInfo Type;
+};
+
+// Map type VkCommandBufferAllocateInfo to id VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO
+template <> struct LvlTypeMap<VkCommandBufferAllocateInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO> {
+    typedef VkCommandBufferAllocateInfo Type;
+};
+
+// Map type VkCommandBufferInheritanceInfo to id VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO
+template <> struct LvlTypeMap<VkCommandBufferInheritanceInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO> {
+    typedef VkCommandBufferInheritanceInfo Type;
+};
+
+// Map type VkCommandBufferBeginInfo to id VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO
+template <> struct LvlTypeMap<VkCommandBufferBeginInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO> {
+    typedef VkCommandBufferBeginInfo Type;
+};
+
+// Map type VkMemoryBarrier to id VK_STRUCTURE_TYPE_MEMORY_BARRIER
+template <> struct LvlTypeMap<VkMemoryBarrier> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_MEMORY_BARRIER;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_MEMORY_BARRIER> {
+    typedef VkMemoryBarrier Type;
+};
+
+// Map type VkBufferMemoryBarrier to id VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER
+template <> struct LvlTypeMap<VkBufferMemoryBarrier> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER> {
+    typedef VkBufferMemoryBarrier Type;
+};
+
+// Map type VkImageMemoryBarrier to id VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER
+template <> struct LvlTypeMap<VkImageMemoryBarrier> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER> {
+    typedef VkImageMemoryBarrier Type;
+};
+
+// Map type VkRenderPassBeginInfo to id VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO
+template <> struct LvlTypeMap<VkRenderPassBeginInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO> {
+    typedef VkRenderPassBeginInfo Type;
+};
+
+// Map type VkPhysicalDeviceSubgroupProperties to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_PROPERTIES
+template <> struct LvlTypeMap<VkPhysicalDeviceSubgroupProperties> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_PROPERTIES;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_PROPERTIES> {
+    typedef VkPhysicalDeviceSubgroupProperties Type;
+};
+
+// Map type VkBindBufferMemoryInfo to id VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO
+template <> struct LvlTypeMap<VkBindBufferMemoryInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO> {
+    typedef VkBindBufferMemoryInfo Type;
+};
+
+// Map type VkBindImageMemoryInfo to id VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO
+template <> struct LvlTypeMap<VkBindImageMemoryInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO> {
+    typedef VkBindImageMemoryInfo Type;
+};
+
+// Map type VkPhysicalDevice16BitStorageFeatures to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES
+template <> struct LvlTypeMap<VkPhysicalDevice16BitStorageFeatures> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES> {
+    typedef VkPhysicalDevice16BitStorageFeatures Type;
+};
+
+// Map type VkMemoryDedicatedRequirements to id VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS
+template <> struct LvlTypeMap<VkMemoryDedicatedRequirements> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS> {
+    typedef VkMemoryDedicatedRequirements Type;
+};
+
+// Map type VkMemoryDedicatedAllocateInfo to id VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO
+template <> struct LvlTypeMap<VkMemoryDedicatedAllocateInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO> {
+    typedef VkMemoryDedicatedAllocateInfo Type;
+};
+
+// Map type VkMemoryAllocateFlagsInfo to id VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO
+template <> struct LvlTypeMap<VkMemoryAllocateFlagsInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO> {
+    typedef VkMemoryAllocateFlagsInfo Type;
+};
+
+// Map type VkDeviceGroupRenderPassBeginInfo to id VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO
+template <> struct LvlTypeMap<VkDeviceGroupRenderPassBeginInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO> {
+    typedef VkDeviceGroupRenderPassBeginInfo Type;
+};
+
+// Map type VkDeviceGroupCommandBufferBeginInfo to id VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO
+template <> struct LvlTypeMap<VkDeviceGroupCommandBufferBeginInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO> {
+    typedef VkDeviceGroupCommandBufferBeginInfo Type;
+};
+
+// Map type VkDeviceGroupSubmitInfo to id VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO
+template <> struct LvlTypeMap<VkDeviceGroupSubmitInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO> {
+    typedef VkDeviceGroupSubmitInfo Type;
+};
+
+// Map type VkDeviceGroupBindSparseInfo to id VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO
+template <> struct LvlTypeMap<VkDeviceGroupBindSparseInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO> {
+    typedef VkDeviceGroupBindSparseInfo Type;
+};
+
+// Map type VkBindBufferMemoryDeviceGroupInfo to id VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO
+template <> struct LvlTypeMap<VkBindBufferMemoryDeviceGroupInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO> {
+    typedef VkBindBufferMemoryDeviceGroupInfo Type;
+};
+
+// Map type VkBindImageMemoryDeviceGroupInfo to id VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO
+template <> struct LvlTypeMap<VkBindImageMemoryDeviceGroupInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO> {
+    typedef VkBindImageMemoryDeviceGroupInfo Type;
+};
+
+// Map type VkPhysicalDeviceGroupProperties to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES
+template <> struct LvlTypeMap<VkPhysicalDeviceGroupProperties> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES> {
+    typedef VkPhysicalDeviceGroupProperties Type;
+};
+
+// Map type VkDeviceGroupDeviceCreateInfo to id VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO
+template <> struct LvlTypeMap<VkDeviceGroupDeviceCreateInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO> {
+    typedef VkDeviceGroupDeviceCreateInfo Type;
+};
+
+// Map type VkBufferMemoryRequirementsInfo2 to id VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2
+template <> struct LvlTypeMap<VkBufferMemoryRequirementsInfo2> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2> {
+    typedef VkBufferMemoryRequirementsInfo2 Type;
+};
+
+// Map type VkImageMemoryRequirementsInfo2 to id VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2
+template <> struct LvlTypeMap<VkImageMemoryRequirementsInfo2> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2> {
+    typedef VkImageMemoryRequirementsInfo2 Type;
+};
+
+// Map type VkImageSparseMemoryRequirementsInfo2 to id VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2
+template <> struct LvlTypeMap<VkImageSparseMemoryRequirementsInfo2> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2> {
+    typedef VkImageSparseMemoryRequirementsInfo2 Type;
+};
+
+// Map type VkMemoryRequirements2 to id VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2
+template <> struct LvlTypeMap<VkMemoryRequirements2> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2> {
+    typedef VkMemoryRequirements2 Type;
+};
+
+// Map type VkSparseImageMemoryRequirements2 to id VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2
+template <> struct LvlTypeMap<VkSparseImageMemoryRequirements2> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2> {
+    typedef VkSparseImageMemoryRequirements2 Type;
+};
+
+// Map type VkPhysicalDeviceFeatures2 to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2
+template <> struct LvlTypeMap<VkPhysicalDeviceFeatures2> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2> {
+    typedef VkPhysicalDeviceFeatures2 Type;
+};
+
+// Map type VkPhysicalDeviceProperties2 to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2
+template <> struct LvlTypeMap<VkPhysicalDeviceProperties2> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2> {
+    typedef VkPhysicalDeviceProperties2 Type;
+};
+
+// Map type VkFormatProperties2 to id VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2
+template <> struct LvlTypeMap<VkFormatProperties2> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2> {
+    typedef VkFormatProperties2 Type;
+};
+
+// Map type VkImageFormatProperties2 to id VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2
+template <> struct LvlTypeMap<VkImageFormatProperties2> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2> {
+    typedef VkImageFormatProperties2 Type;
+};
+
+// Map type VkPhysicalDeviceImageFormatInfo2 to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2
+template <> struct LvlTypeMap<VkPhysicalDeviceImageFormatInfo2> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2> {
+    typedef VkPhysicalDeviceImageFormatInfo2 Type;
+};
+
+// Map type VkQueueFamilyProperties2 to id VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2
+template <> struct LvlTypeMap<VkQueueFamilyProperties2> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2> {
+    typedef VkQueueFamilyProperties2 Type;
+};
+
+// Map type VkPhysicalDeviceMemoryProperties2 to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2
+template <> struct LvlTypeMap<VkPhysicalDeviceMemoryProperties2> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2> {
+    typedef VkPhysicalDeviceMemoryProperties2 Type;
+};
+
+// Map type VkSparseImageFormatProperties2 to id VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2
+template <> struct LvlTypeMap<VkSparseImageFormatProperties2> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2> {
+    typedef VkSparseImageFormatProperties2 Type;
+};
+
+// Map type VkPhysicalDeviceSparseImageFormatInfo2 to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2
+template <> struct LvlTypeMap<VkPhysicalDeviceSparseImageFormatInfo2> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2> {
+    typedef VkPhysicalDeviceSparseImageFormatInfo2 Type;
+};
+
+// Map type VkPhysicalDevicePointClippingProperties to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES
+template <> struct LvlTypeMap<VkPhysicalDevicePointClippingProperties> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES> {
+    typedef VkPhysicalDevicePointClippingProperties Type;
+};
+
+// Map type VkRenderPassInputAttachmentAspectCreateInfo to id VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO
+template <> struct LvlTypeMap<VkRenderPassInputAttachmentAspectCreateInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO> {
+    typedef VkRenderPassInputAttachmentAspectCreateInfo Type;
+};
+
+// Map type VkImageViewUsageCreateInfo to id VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO
+template <> struct LvlTypeMap<VkImageViewUsageCreateInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO> {
+    typedef VkImageViewUsageCreateInfo Type;
+};
+
+// Map type VkPipelineTessellationDomainOriginStateCreateInfo to id VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO
+template <> struct LvlTypeMap<VkPipelineTessellationDomainOriginStateCreateInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO> {
+    typedef VkPipelineTessellationDomainOriginStateCreateInfo Type;
+};
+
+// Map type VkRenderPassMultiviewCreateInfo to id VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO
+template <> struct LvlTypeMap<VkRenderPassMultiviewCreateInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO> {
+    typedef VkRenderPassMultiviewCreateInfo Type;
+};
+
+// Map type VkPhysicalDeviceMultiviewFeatures to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES
+template <> struct LvlTypeMap<VkPhysicalDeviceMultiviewFeatures> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES> {
+    typedef VkPhysicalDeviceMultiviewFeatures Type;
+};
+
+// Map type VkPhysicalDeviceMultiviewProperties to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES
+template <> struct LvlTypeMap<VkPhysicalDeviceMultiviewProperties> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES> {
+    typedef VkPhysicalDeviceMultiviewProperties Type;
+};
+
+// Map type VkPhysicalDeviceVariablePointersFeatures to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES
+template <> struct LvlTypeMap<VkPhysicalDeviceVariablePointersFeatures> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES> {
+    typedef VkPhysicalDeviceVariablePointersFeatures Type;
+};
+
+// Map type VkPhysicalDeviceProtectedMemoryFeatures to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_FEATURES
+template <> struct LvlTypeMap<VkPhysicalDeviceProtectedMemoryFeatures> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_FEATURES;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_FEATURES> {
+    typedef VkPhysicalDeviceProtectedMemoryFeatures Type;
+};
+
+// Map type VkPhysicalDeviceProtectedMemoryProperties to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_PROPERTIES
+template <> struct LvlTypeMap<VkPhysicalDeviceProtectedMemoryProperties> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_PROPERTIES;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_PROPERTIES> {
+    typedef VkPhysicalDeviceProtectedMemoryProperties Type;
+};
+
+// Map type VkDeviceQueueInfo2 to id VK_STRUCTURE_TYPE_DEVICE_QUEUE_INFO_2
+template <> struct LvlTypeMap<VkDeviceQueueInfo2> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_INFO_2;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DEVICE_QUEUE_INFO_2> {
+    typedef VkDeviceQueueInfo2 Type;
+};
+
+// Map type VkProtectedSubmitInfo to id VK_STRUCTURE_TYPE_PROTECTED_SUBMIT_INFO
+template <> struct LvlTypeMap<VkProtectedSubmitInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PROTECTED_SUBMIT_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PROTECTED_SUBMIT_INFO> {
+    typedef VkProtectedSubmitInfo Type;
+};
+
+// Map type VkSamplerYcbcrConversionCreateInfo to id VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO
+template <> struct LvlTypeMap<VkSamplerYcbcrConversionCreateInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO> {
+    typedef VkSamplerYcbcrConversionCreateInfo Type;
+};
+
+// Map type VkSamplerYcbcrConversionInfo to id VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO
+template <> struct LvlTypeMap<VkSamplerYcbcrConversionInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO> {
+    typedef VkSamplerYcbcrConversionInfo Type;
+};
+
+// Map type VkBindImagePlaneMemoryInfo to id VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO
+template <> struct LvlTypeMap<VkBindImagePlaneMemoryInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO> {
+    typedef VkBindImagePlaneMemoryInfo Type;
+};
+
+// Map type VkImagePlaneMemoryRequirementsInfo to id VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO
+template <> struct LvlTypeMap<VkImagePlaneMemoryRequirementsInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO> {
+    typedef VkImagePlaneMemoryRequirementsInfo Type;
+};
+
+// Map type VkPhysicalDeviceSamplerYcbcrConversionFeatures to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES
+template <> struct LvlTypeMap<VkPhysicalDeviceSamplerYcbcrConversionFeatures> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES> {
+    typedef VkPhysicalDeviceSamplerYcbcrConversionFeatures Type;
+};
+
+// Map type VkSamplerYcbcrConversionImageFormatProperties to id VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES
+template <> struct LvlTypeMap<VkSamplerYcbcrConversionImageFormatProperties> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES> {
+    typedef VkSamplerYcbcrConversionImageFormatProperties Type;
+};
+
+// Map type VkDescriptorUpdateTemplateCreateInfo to id VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO
+template <> struct LvlTypeMap<VkDescriptorUpdateTemplateCreateInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO> {
+    typedef VkDescriptorUpdateTemplateCreateInfo Type;
+};
+
+// Map type VkPhysicalDeviceExternalImageFormatInfo to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO
+template <> struct LvlTypeMap<VkPhysicalDeviceExternalImageFormatInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO> {
+    typedef VkPhysicalDeviceExternalImageFormatInfo Type;
+};
+
+// Map type VkExternalImageFormatProperties to id VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES
+template <> struct LvlTypeMap<VkExternalImageFormatProperties> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES> {
+    typedef VkExternalImageFormatProperties Type;
+};
+
+// Map type VkPhysicalDeviceExternalBufferInfo to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO
+template <> struct LvlTypeMap<VkPhysicalDeviceExternalBufferInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO> {
+    typedef VkPhysicalDeviceExternalBufferInfo Type;
+};
+
+// Map type VkExternalBufferProperties to id VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES
+template <> struct LvlTypeMap<VkExternalBufferProperties> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES> {
+    typedef VkExternalBufferProperties Type;
+};
+
+// Map type VkPhysicalDeviceIDProperties to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES
+template <> struct LvlTypeMap<VkPhysicalDeviceIDProperties> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES> {
+    typedef VkPhysicalDeviceIDProperties Type;
+};
+
+// Map type VkExternalMemoryImageCreateInfo to id VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO
+template <> struct LvlTypeMap<VkExternalMemoryImageCreateInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO> {
+    typedef VkExternalMemoryImageCreateInfo Type;
+};
+
+// Map type VkExternalMemoryBufferCreateInfo to id VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO
+template <> struct LvlTypeMap<VkExternalMemoryBufferCreateInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO> {
+    typedef VkExternalMemoryBufferCreateInfo Type;
+};
+
+// Map type VkExportMemoryAllocateInfo to id VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO
+template <> struct LvlTypeMap<VkExportMemoryAllocateInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO> {
+    typedef VkExportMemoryAllocateInfo Type;
+};
+
+// Map type VkPhysicalDeviceExternalFenceInfo to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO
+template <> struct LvlTypeMap<VkPhysicalDeviceExternalFenceInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO> {
+    typedef VkPhysicalDeviceExternalFenceInfo Type;
+};
+
+// Map type VkExternalFenceProperties to id VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES
+template <> struct LvlTypeMap<VkExternalFenceProperties> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES> {
+    typedef VkExternalFenceProperties Type;
+};
+
+// Map type VkExportFenceCreateInfo to id VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO
+template <> struct LvlTypeMap<VkExportFenceCreateInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO> {
+    typedef VkExportFenceCreateInfo Type;
+};
+
+// Map type VkExportSemaphoreCreateInfo to id VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO
+template <> struct LvlTypeMap<VkExportSemaphoreCreateInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO> {
+    typedef VkExportSemaphoreCreateInfo Type;
+};
+
+// Map type VkPhysicalDeviceExternalSemaphoreInfo to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO
+template <> struct LvlTypeMap<VkPhysicalDeviceExternalSemaphoreInfo> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO> {
+    typedef VkPhysicalDeviceExternalSemaphoreInfo Type;
+};
+
+// Map type VkExternalSemaphoreProperties to id VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES
+template <> struct LvlTypeMap<VkExternalSemaphoreProperties> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES> {
+    typedef VkExternalSemaphoreProperties Type;
+};
+
+// Map type VkPhysicalDeviceMaintenance3Properties to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES
+template <> struct LvlTypeMap<VkPhysicalDeviceMaintenance3Properties> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES> {
+    typedef VkPhysicalDeviceMaintenance3Properties Type;
+};
+
+// Map type VkDescriptorSetLayoutSupport to id VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT
+template <> struct LvlTypeMap<VkDescriptorSetLayoutSupport> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT> {
+    typedef VkDescriptorSetLayoutSupport Type;
+};
+
+// Map type VkPhysicalDeviceShaderDrawParametersFeatures to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES
+template <> struct LvlTypeMap<VkPhysicalDeviceShaderDrawParametersFeatures> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES> {
+    typedef VkPhysicalDeviceShaderDrawParametersFeatures Type;
+};
+
+// Map type VkSwapchainCreateInfoKHR to id VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR
+template <> struct LvlTypeMap<VkSwapchainCreateInfoKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR> {
+    typedef VkSwapchainCreateInfoKHR Type;
+};
+
+// Map type VkPresentInfoKHR to id VK_STRUCTURE_TYPE_PRESENT_INFO_KHR
+template <> struct LvlTypeMap<VkPresentInfoKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PRESENT_INFO_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PRESENT_INFO_KHR> {
+    typedef VkPresentInfoKHR Type;
+};
+
+// Map type VkImageSwapchainCreateInfoKHR to id VK_STRUCTURE_TYPE_IMAGE_SWAPCHAIN_CREATE_INFO_KHR
+template <> struct LvlTypeMap<VkImageSwapchainCreateInfoKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_IMAGE_SWAPCHAIN_CREATE_INFO_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_IMAGE_SWAPCHAIN_CREATE_INFO_KHR> {
+    typedef VkImageSwapchainCreateInfoKHR Type;
+};
+
+// Map type VkBindImageMemorySwapchainInfoKHR to id VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_SWAPCHAIN_INFO_KHR
+template <> struct LvlTypeMap<VkBindImageMemorySwapchainInfoKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_SWAPCHAIN_INFO_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_SWAPCHAIN_INFO_KHR> {
+    typedef VkBindImageMemorySwapchainInfoKHR Type;
+};
+
+// Map type VkAcquireNextImageInfoKHR to id VK_STRUCTURE_TYPE_ACQUIRE_NEXT_IMAGE_INFO_KHR
+template <> struct LvlTypeMap<VkAcquireNextImageInfoKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_ACQUIRE_NEXT_IMAGE_INFO_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_ACQUIRE_NEXT_IMAGE_INFO_KHR> {
+    typedef VkAcquireNextImageInfoKHR Type;
+};
+
+// Map type VkDeviceGroupPresentCapabilitiesKHR to id VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_CAPABILITIES_KHR
+template <> struct LvlTypeMap<VkDeviceGroupPresentCapabilitiesKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_CAPABILITIES_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_CAPABILITIES_KHR> {
+    typedef VkDeviceGroupPresentCapabilitiesKHR Type;
+};
+
+// Map type VkDeviceGroupPresentInfoKHR to id VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_INFO_KHR
+template <> struct LvlTypeMap<VkDeviceGroupPresentInfoKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_INFO_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_INFO_KHR> {
+    typedef VkDeviceGroupPresentInfoKHR Type;
+};
+
+// Map type VkDeviceGroupSwapchainCreateInfoKHR to id VK_STRUCTURE_TYPE_DEVICE_GROUP_SWAPCHAIN_CREATE_INFO_KHR
+template <> struct LvlTypeMap<VkDeviceGroupSwapchainCreateInfoKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_DEVICE_GROUP_SWAPCHAIN_CREATE_INFO_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DEVICE_GROUP_SWAPCHAIN_CREATE_INFO_KHR> {
+    typedef VkDeviceGroupSwapchainCreateInfoKHR Type;
+};
+
+// Map type VkDisplayModeCreateInfoKHR to id VK_STRUCTURE_TYPE_DISPLAY_MODE_CREATE_INFO_KHR
+template <> struct LvlTypeMap<VkDisplayModeCreateInfoKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_DISPLAY_MODE_CREATE_INFO_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DISPLAY_MODE_CREATE_INFO_KHR> {
+    typedef VkDisplayModeCreateInfoKHR Type;
+};
+
+// Map type VkDisplaySurfaceCreateInfoKHR to id VK_STRUCTURE_TYPE_DISPLAY_SURFACE_CREATE_INFO_KHR
+template <> struct LvlTypeMap<VkDisplaySurfaceCreateInfoKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_DISPLAY_SURFACE_CREATE_INFO_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DISPLAY_SURFACE_CREATE_INFO_KHR> {
+    typedef VkDisplaySurfaceCreateInfoKHR Type;
+};
+
+// Map type VkDisplayPresentInfoKHR to id VK_STRUCTURE_TYPE_DISPLAY_PRESENT_INFO_KHR
+template <> struct LvlTypeMap<VkDisplayPresentInfoKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_DISPLAY_PRESENT_INFO_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DISPLAY_PRESENT_INFO_KHR> {
+    typedef VkDisplayPresentInfoKHR Type;
+};
+
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+// Map type VkXlibSurfaceCreateInfoKHR to id VK_STRUCTURE_TYPE_XLIB_SURFACE_CREATE_INFO_KHR
+template <> struct LvlTypeMap<VkXlibSurfaceCreateInfoKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_XLIB_SURFACE_CREATE_INFO_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_XLIB_SURFACE_CREATE_INFO_KHR> {
+    typedef VkXlibSurfaceCreateInfoKHR Type;
+};
+
+#endif // VK_USE_PLATFORM_XLIB_KHR
+#ifdef VK_USE_PLATFORM_XCB_KHR
+// Map type VkXcbSurfaceCreateInfoKHR to id VK_STRUCTURE_TYPE_XCB_SURFACE_CREATE_INFO_KHR
+template <> struct LvlTypeMap<VkXcbSurfaceCreateInfoKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_XCB_SURFACE_CREATE_INFO_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_XCB_SURFACE_CREATE_INFO_KHR> {
+    typedef VkXcbSurfaceCreateInfoKHR Type;
+};
+
+#endif // VK_USE_PLATFORM_XCB_KHR
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+// Map type VkWaylandSurfaceCreateInfoKHR to id VK_STRUCTURE_TYPE_WAYLAND_SURFACE_CREATE_INFO_KHR
+template <> struct LvlTypeMap<VkWaylandSurfaceCreateInfoKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_WAYLAND_SURFACE_CREATE_INFO_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_WAYLAND_SURFACE_CREATE_INFO_KHR> {
+    typedef VkWaylandSurfaceCreateInfoKHR Type;
+};
+
+#endif // VK_USE_PLATFORM_WAYLAND_KHR
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+// Map type VkAndroidSurfaceCreateInfoKHR to id VK_STRUCTURE_TYPE_ANDROID_SURFACE_CREATE_INFO_KHR
+template <> struct LvlTypeMap<VkAndroidSurfaceCreateInfoKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_ANDROID_SURFACE_CREATE_INFO_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_ANDROID_SURFACE_CREATE_INFO_KHR> {
+    typedef VkAndroidSurfaceCreateInfoKHR Type;
+};
+
+#endif // VK_USE_PLATFORM_ANDROID_KHR
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+// Map type VkWin32SurfaceCreateInfoKHR to id VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR
+template <> struct LvlTypeMap<VkWin32SurfaceCreateInfoKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR> {
+    typedef VkWin32SurfaceCreateInfoKHR Type;
+};
+
+#endif // VK_USE_PLATFORM_WIN32_KHR
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+// Map type VkImportMemoryWin32HandleInfoKHR to id VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_KHR
+template <> struct LvlTypeMap<VkImportMemoryWin32HandleInfoKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_KHR> {
+    typedef VkImportMemoryWin32HandleInfoKHR Type;
+};
+
+#endif // VK_USE_PLATFORM_WIN32_KHR
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+// Map type VkExportMemoryWin32HandleInfoKHR to id VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_KHR
+template <> struct LvlTypeMap<VkExportMemoryWin32HandleInfoKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_KHR> {
+    typedef VkExportMemoryWin32HandleInfoKHR Type;
+};
+
+#endif // VK_USE_PLATFORM_WIN32_KHR
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+// Map type VkMemoryWin32HandlePropertiesKHR to id VK_STRUCTURE_TYPE_MEMORY_WIN32_HANDLE_PROPERTIES_KHR
+template <> struct LvlTypeMap<VkMemoryWin32HandlePropertiesKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_MEMORY_WIN32_HANDLE_PROPERTIES_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_MEMORY_WIN32_HANDLE_PROPERTIES_KHR> {
+    typedef VkMemoryWin32HandlePropertiesKHR Type;
+};
+
+#endif // VK_USE_PLATFORM_WIN32_KHR
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+// Map type VkMemoryGetWin32HandleInfoKHR to id VK_STRUCTURE_TYPE_MEMORY_GET_WIN32_HANDLE_INFO_KHR
+template <> struct LvlTypeMap<VkMemoryGetWin32HandleInfoKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_MEMORY_GET_WIN32_HANDLE_INFO_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_MEMORY_GET_WIN32_HANDLE_INFO_KHR> {
+    typedef VkMemoryGetWin32HandleInfoKHR Type;
+};
+
+#endif // VK_USE_PLATFORM_WIN32_KHR
+// Map type VkImportMemoryFdInfoKHR to id VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR
+template <> struct LvlTypeMap<VkImportMemoryFdInfoKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR> {
+    typedef VkImportMemoryFdInfoKHR Type;
+};
+
+// Map type VkMemoryFdPropertiesKHR to id VK_STRUCTURE_TYPE_MEMORY_FD_PROPERTIES_KHR
+template <> struct LvlTypeMap<VkMemoryFdPropertiesKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_MEMORY_FD_PROPERTIES_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_MEMORY_FD_PROPERTIES_KHR> {
+    typedef VkMemoryFdPropertiesKHR Type;
+};
+
+// Map type VkMemoryGetFdInfoKHR to id VK_STRUCTURE_TYPE_MEMORY_GET_FD_INFO_KHR
+template <> struct LvlTypeMap<VkMemoryGetFdInfoKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_MEMORY_GET_FD_INFO_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_MEMORY_GET_FD_INFO_KHR> {
+    typedef VkMemoryGetFdInfoKHR Type;
+};
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+// Map type VkWin32KeyedMutexAcquireReleaseInfoKHR to id VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_KHR
+template <> struct LvlTypeMap<VkWin32KeyedMutexAcquireReleaseInfoKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_KHR> {
+    typedef VkWin32KeyedMutexAcquireReleaseInfoKHR Type;
+};
+
+#endif // VK_USE_PLATFORM_WIN32_KHR
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+// Map type VkImportSemaphoreWin32HandleInfoKHR to id VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR
+template <> struct LvlTypeMap<VkImportSemaphoreWin32HandleInfoKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR> {
+    typedef VkImportSemaphoreWin32HandleInfoKHR Type;
+};
+
+#endif // VK_USE_PLATFORM_WIN32_KHR
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+// Map type VkExportSemaphoreWin32HandleInfoKHR to id VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR
+template <> struct LvlTypeMap<VkExportSemaphoreWin32HandleInfoKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR> {
+    typedef VkExportSemaphoreWin32HandleInfoKHR Type;
+};
+
+#endif // VK_USE_PLATFORM_WIN32_KHR
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+// Map type VkD3D12FenceSubmitInfoKHR to id VK_STRUCTURE_TYPE_D3D12_FENCE_SUBMIT_INFO_KHR
+template <> struct LvlTypeMap<VkD3D12FenceSubmitInfoKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_D3D12_FENCE_SUBMIT_INFO_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_D3D12_FENCE_SUBMIT_INFO_KHR> {
+    typedef VkD3D12FenceSubmitInfoKHR Type;
+};
+
+#endif // VK_USE_PLATFORM_WIN32_KHR
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+// Map type VkSemaphoreGetWin32HandleInfoKHR to id VK_STRUCTURE_TYPE_SEMAPHORE_GET_WIN32_HANDLE_INFO_KHR
+template <> struct LvlTypeMap<VkSemaphoreGetWin32HandleInfoKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_SEMAPHORE_GET_WIN32_HANDLE_INFO_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_SEMAPHORE_GET_WIN32_HANDLE_INFO_KHR> {
+    typedef VkSemaphoreGetWin32HandleInfoKHR Type;
+};
+
+#endif // VK_USE_PLATFORM_WIN32_KHR
+// Map type VkImportSemaphoreFdInfoKHR to id VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_FD_INFO_KHR
+template <> struct LvlTypeMap<VkImportSemaphoreFdInfoKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_FD_INFO_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_FD_INFO_KHR> {
+    typedef VkImportSemaphoreFdInfoKHR Type;
+};
+
+// Map type VkSemaphoreGetFdInfoKHR to id VK_STRUCTURE_TYPE_SEMAPHORE_GET_FD_INFO_KHR
+template <> struct LvlTypeMap<VkSemaphoreGetFdInfoKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_SEMAPHORE_GET_FD_INFO_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_SEMAPHORE_GET_FD_INFO_KHR> {
+    typedef VkSemaphoreGetFdInfoKHR Type;
+};
+
+// Map type VkPhysicalDevicePushDescriptorPropertiesKHR to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR
+template <> struct LvlTypeMap<VkPhysicalDevicePushDescriptorPropertiesKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR> {
+    typedef VkPhysicalDevicePushDescriptorPropertiesKHR Type;
+};
+
+// Map type VkPhysicalDeviceShaderFloat16Int8FeaturesKHR to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES_KHR
+template <> struct LvlTypeMap<VkPhysicalDeviceShaderFloat16Int8FeaturesKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES_KHR> {
+    typedef VkPhysicalDeviceShaderFloat16Int8FeaturesKHR Type;
+};
+
+// Map type VkPresentRegionsKHR to id VK_STRUCTURE_TYPE_PRESENT_REGIONS_KHR
+template <> struct LvlTypeMap<VkPresentRegionsKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PRESENT_REGIONS_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PRESENT_REGIONS_KHR> {
+    typedef VkPresentRegionsKHR Type;
+};
+
+// Map type VkPhysicalDeviceImagelessFramebufferFeaturesKHR to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES_KHR
+template <> struct LvlTypeMap<VkPhysicalDeviceImagelessFramebufferFeaturesKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES_KHR> {
+    typedef VkPhysicalDeviceImagelessFramebufferFeaturesKHR Type;
+};
+
+// Map type VkFramebufferAttachmentImageInfoKHR to id VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO_KHR
+template <> struct LvlTypeMap<VkFramebufferAttachmentImageInfoKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO_KHR> {
+    typedef VkFramebufferAttachmentImageInfoKHR Type;
+};
+
+// Map type VkFramebufferAttachmentsCreateInfoKHR to id VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO_KHR
+template <> struct LvlTypeMap<VkFramebufferAttachmentsCreateInfoKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO_KHR> {
+    typedef VkFramebufferAttachmentsCreateInfoKHR Type;
+};
+
+// Map type VkRenderPassAttachmentBeginInfoKHR to id VK_STRUCTURE_TYPE_RENDER_PASS_ATTACHMENT_BEGIN_INFO_KHR
+template <> struct LvlTypeMap<VkRenderPassAttachmentBeginInfoKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_RENDER_PASS_ATTACHMENT_BEGIN_INFO_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_RENDER_PASS_ATTACHMENT_BEGIN_INFO_KHR> {
+    typedef VkRenderPassAttachmentBeginInfoKHR Type;
+};
+
+// Map type VkAttachmentDescription2KHR to id VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_2_KHR
+template <> struct LvlTypeMap<VkAttachmentDescription2KHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_2_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_2_KHR> {
+    typedef VkAttachmentDescription2KHR Type;
+};
+
+// Map type VkAttachmentReference2KHR to id VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2_KHR
+template <> struct LvlTypeMap<VkAttachmentReference2KHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2_KHR> {
+    typedef VkAttachmentReference2KHR Type;
+};
+
+// Map type VkSubpassDescription2KHR to id VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_2_KHR
+template <> struct LvlTypeMap<VkSubpassDescription2KHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_2_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_2_KHR> {
+    typedef VkSubpassDescription2KHR Type;
+};
+
+// Map type VkSubpassDependency2KHR to id VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2_KHR
+template <> struct LvlTypeMap<VkSubpassDependency2KHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2_KHR> {
+    typedef VkSubpassDependency2KHR Type;
+};
+
+// Map type VkRenderPassCreateInfo2KHR to id VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO_2_KHR
+template <> struct LvlTypeMap<VkRenderPassCreateInfo2KHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO_2_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO_2_KHR> {
+    typedef VkRenderPassCreateInfo2KHR Type;
+};
+
+// Map type VkSubpassBeginInfoKHR to id VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO_KHR
+template <> struct LvlTypeMap<VkSubpassBeginInfoKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO_KHR> {
+    typedef VkSubpassBeginInfoKHR Type;
+};
+
+// Map type VkSubpassEndInfoKHR to id VK_STRUCTURE_TYPE_SUBPASS_END_INFO_KHR
+template <> struct LvlTypeMap<VkSubpassEndInfoKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_SUBPASS_END_INFO_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_SUBPASS_END_INFO_KHR> {
+    typedef VkSubpassEndInfoKHR Type;
+};
+
+// Map type VkSharedPresentSurfaceCapabilitiesKHR to id VK_STRUCTURE_TYPE_SHARED_PRESENT_SURFACE_CAPABILITIES_KHR
+template <> struct LvlTypeMap<VkSharedPresentSurfaceCapabilitiesKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_SHARED_PRESENT_SURFACE_CAPABILITIES_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_SHARED_PRESENT_SURFACE_CAPABILITIES_KHR> {
+    typedef VkSharedPresentSurfaceCapabilitiesKHR Type;
+};
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+// Map type VkImportFenceWin32HandleInfoKHR to id VK_STRUCTURE_TYPE_IMPORT_FENCE_WIN32_HANDLE_INFO_KHR
+template <> struct LvlTypeMap<VkImportFenceWin32HandleInfoKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_IMPORT_FENCE_WIN32_HANDLE_INFO_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_IMPORT_FENCE_WIN32_HANDLE_INFO_KHR> {
+    typedef VkImportFenceWin32HandleInfoKHR Type;
+};
+
+#endif // VK_USE_PLATFORM_WIN32_KHR
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+// Map type VkExportFenceWin32HandleInfoKHR to id VK_STRUCTURE_TYPE_EXPORT_FENCE_WIN32_HANDLE_INFO_KHR
+template <> struct LvlTypeMap<VkExportFenceWin32HandleInfoKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_EXPORT_FENCE_WIN32_HANDLE_INFO_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_EXPORT_FENCE_WIN32_HANDLE_INFO_KHR> {
+    typedef VkExportFenceWin32HandleInfoKHR Type;
+};
+
+#endif // VK_USE_PLATFORM_WIN32_KHR
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+// Map type VkFenceGetWin32HandleInfoKHR to id VK_STRUCTURE_TYPE_FENCE_GET_WIN32_HANDLE_INFO_KHR
+template <> struct LvlTypeMap<VkFenceGetWin32HandleInfoKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_FENCE_GET_WIN32_HANDLE_INFO_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_FENCE_GET_WIN32_HANDLE_INFO_KHR> {
+    typedef VkFenceGetWin32HandleInfoKHR Type;
+};
+
+#endif // VK_USE_PLATFORM_WIN32_KHR
+// Map type VkImportFenceFdInfoKHR to id VK_STRUCTURE_TYPE_IMPORT_FENCE_FD_INFO_KHR
+template <> struct LvlTypeMap<VkImportFenceFdInfoKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_IMPORT_FENCE_FD_INFO_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_IMPORT_FENCE_FD_INFO_KHR> {
+    typedef VkImportFenceFdInfoKHR Type;
+};
+
+// Map type VkFenceGetFdInfoKHR to id VK_STRUCTURE_TYPE_FENCE_GET_FD_INFO_KHR
+template <> struct LvlTypeMap<VkFenceGetFdInfoKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_FENCE_GET_FD_INFO_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_FENCE_GET_FD_INFO_KHR> {
+    typedef VkFenceGetFdInfoKHR Type;
+};
+
+// Map type VkPhysicalDevicePerformanceQueryFeaturesKHR to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PERFORMANCE_QUERY_FEATURES_KHR
+template <> struct LvlTypeMap<VkPhysicalDevicePerformanceQueryFeaturesKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PERFORMANCE_QUERY_FEATURES_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PERFORMANCE_QUERY_FEATURES_KHR> {
+    typedef VkPhysicalDevicePerformanceQueryFeaturesKHR Type;
+};
+
+// Map type VkPhysicalDevicePerformanceQueryPropertiesKHR to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PERFORMANCE_QUERY_PROPERTIES_KHR
+template <> struct LvlTypeMap<VkPhysicalDevicePerformanceQueryPropertiesKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PERFORMANCE_QUERY_PROPERTIES_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PERFORMANCE_QUERY_PROPERTIES_KHR> {
+    typedef VkPhysicalDevicePerformanceQueryPropertiesKHR Type;
+};
+
+// Map type VkPerformanceCounterKHR to id VK_STRUCTURE_TYPE_PERFORMANCE_COUNTER_KHR
+template <> struct LvlTypeMap<VkPerformanceCounterKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PERFORMANCE_COUNTER_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PERFORMANCE_COUNTER_KHR> {
+    typedef VkPerformanceCounterKHR Type;
+};
+
+// Map type VkPerformanceCounterDescriptionKHR to id VK_STRUCTURE_TYPE_PERFORMANCE_COUNTER_DESCRIPTION_KHR
+template <> struct LvlTypeMap<VkPerformanceCounterDescriptionKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PERFORMANCE_COUNTER_DESCRIPTION_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PERFORMANCE_COUNTER_DESCRIPTION_KHR> {
+    typedef VkPerformanceCounterDescriptionKHR Type;
+};
+
+// Map type VkQueryPoolPerformanceCreateInfoKHR to id VK_STRUCTURE_TYPE_QUERY_POOL_PERFORMANCE_CREATE_INFO_KHR
+template <> struct LvlTypeMap<VkQueryPoolPerformanceCreateInfoKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_QUERY_POOL_PERFORMANCE_CREATE_INFO_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_QUERY_POOL_PERFORMANCE_CREATE_INFO_KHR> {
+    typedef VkQueryPoolPerformanceCreateInfoKHR Type;
+};
+
+// Map type VkAcquireProfilingLockInfoKHR to id VK_STRUCTURE_TYPE_ACQUIRE_PROFILING_LOCK_INFO_KHR
+template <> struct LvlTypeMap<VkAcquireProfilingLockInfoKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_ACQUIRE_PROFILING_LOCK_INFO_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_ACQUIRE_PROFILING_LOCK_INFO_KHR> {
+    typedef VkAcquireProfilingLockInfoKHR Type;
+};
+
+// Map type VkPerformanceQuerySubmitInfoKHR to id VK_STRUCTURE_TYPE_PERFORMANCE_QUERY_SUBMIT_INFO_KHR
+template <> struct LvlTypeMap<VkPerformanceQuerySubmitInfoKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PERFORMANCE_QUERY_SUBMIT_INFO_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PERFORMANCE_QUERY_SUBMIT_INFO_KHR> {
+    typedef VkPerformanceQuerySubmitInfoKHR Type;
+};
+
+// Map type VkPhysicalDeviceSurfaceInfo2KHR to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SURFACE_INFO_2_KHR
+template <> struct LvlTypeMap<VkPhysicalDeviceSurfaceInfo2KHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SURFACE_INFO_2_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SURFACE_INFO_2_KHR> {
+    typedef VkPhysicalDeviceSurfaceInfo2KHR Type;
+};
+
+// Map type VkSurfaceCapabilities2KHR to id VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_KHR
+template <> struct LvlTypeMap<VkSurfaceCapabilities2KHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_KHR> {
+    typedef VkSurfaceCapabilities2KHR Type;
+};
+
+// Map type VkSurfaceFormat2KHR to id VK_STRUCTURE_TYPE_SURFACE_FORMAT_2_KHR
+template <> struct LvlTypeMap<VkSurfaceFormat2KHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_SURFACE_FORMAT_2_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_SURFACE_FORMAT_2_KHR> {
+    typedef VkSurfaceFormat2KHR Type;
+};
+
+// Map type VkDisplayProperties2KHR to id VK_STRUCTURE_TYPE_DISPLAY_PROPERTIES_2_KHR
+template <> struct LvlTypeMap<VkDisplayProperties2KHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_DISPLAY_PROPERTIES_2_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DISPLAY_PROPERTIES_2_KHR> {
+    typedef VkDisplayProperties2KHR Type;
+};
+
+// Map type VkDisplayPlaneProperties2KHR to id VK_STRUCTURE_TYPE_DISPLAY_PLANE_PROPERTIES_2_KHR
+template <> struct LvlTypeMap<VkDisplayPlaneProperties2KHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_DISPLAY_PLANE_PROPERTIES_2_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DISPLAY_PLANE_PROPERTIES_2_KHR> {
+    typedef VkDisplayPlaneProperties2KHR Type;
+};
+
+// Map type VkDisplayModeProperties2KHR to id VK_STRUCTURE_TYPE_DISPLAY_MODE_PROPERTIES_2_KHR
+template <> struct LvlTypeMap<VkDisplayModeProperties2KHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_DISPLAY_MODE_PROPERTIES_2_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DISPLAY_MODE_PROPERTIES_2_KHR> {
+    typedef VkDisplayModeProperties2KHR Type;
+};
+
+// Map type VkDisplayPlaneInfo2KHR to id VK_STRUCTURE_TYPE_DISPLAY_PLANE_INFO_2_KHR
+template <> struct LvlTypeMap<VkDisplayPlaneInfo2KHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_DISPLAY_PLANE_INFO_2_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DISPLAY_PLANE_INFO_2_KHR> {
+    typedef VkDisplayPlaneInfo2KHR Type;
+};
+
+// Map type VkDisplayPlaneCapabilities2KHR to id VK_STRUCTURE_TYPE_DISPLAY_PLANE_CAPABILITIES_2_KHR
+template <> struct LvlTypeMap<VkDisplayPlaneCapabilities2KHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_DISPLAY_PLANE_CAPABILITIES_2_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DISPLAY_PLANE_CAPABILITIES_2_KHR> {
+    typedef VkDisplayPlaneCapabilities2KHR Type;
+};
+
+// Map type VkImageFormatListCreateInfoKHR to id VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO_KHR
+template <> struct LvlTypeMap<VkImageFormatListCreateInfoKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO_KHR> {
+    typedef VkImageFormatListCreateInfoKHR Type;
+};
+
+// Map type VkPhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES_KHR
+template <> struct LvlTypeMap<VkPhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES_KHR> {
+    typedef VkPhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR Type;
+};
+
+// Map type VkPhysicalDevice8BitStorageFeaturesKHR to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES_KHR
+template <> struct LvlTypeMap<VkPhysicalDevice8BitStorageFeaturesKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES_KHR> {
+    typedef VkPhysicalDevice8BitStorageFeaturesKHR Type;
+};
+
+// Map type VkPhysicalDeviceShaderAtomicInt64FeaturesKHR to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES_KHR
+template <> struct LvlTypeMap<VkPhysicalDeviceShaderAtomicInt64FeaturesKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES_KHR> {
+    typedef VkPhysicalDeviceShaderAtomicInt64FeaturesKHR Type;
+};
+
+// Map type VkPhysicalDeviceShaderClockFeaturesKHR to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CLOCK_FEATURES_KHR
+template <> struct LvlTypeMap<VkPhysicalDeviceShaderClockFeaturesKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CLOCK_FEATURES_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CLOCK_FEATURES_KHR> {
+    typedef VkPhysicalDeviceShaderClockFeaturesKHR Type;
+};
+
+// Map type VkPhysicalDeviceDriverPropertiesKHR to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES_KHR
+template <> struct LvlTypeMap<VkPhysicalDeviceDriverPropertiesKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES_KHR> {
+    typedef VkPhysicalDeviceDriverPropertiesKHR Type;
+};
+
+// Map type VkPhysicalDeviceFloatControlsPropertiesKHR to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES_KHR
+template <> struct LvlTypeMap<VkPhysicalDeviceFloatControlsPropertiesKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES_KHR> {
+    typedef VkPhysicalDeviceFloatControlsPropertiesKHR Type;
+};
+
+// Map type VkSubpassDescriptionDepthStencilResolveKHR to id VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE_KHR
+template <> struct LvlTypeMap<VkSubpassDescriptionDepthStencilResolveKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE_KHR> {
+    typedef VkSubpassDescriptionDepthStencilResolveKHR Type;
+};
+
+// Map type VkPhysicalDeviceDepthStencilResolvePropertiesKHR to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES_KHR
+template <> struct LvlTypeMap<VkPhysicalDeviceDepthStencilResolvePropertiesKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES_KHR> {
+    typedef VkPhysicalDeviceDepthStencilResolvePropertiesKHR Type;
+};
+
+// Map type VkPhysicalDeviceTimelineSemaphoreFeaturesKHR to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES_KHR
+template <> struct LvlTypeMap<VkPhysicalDeviceTimelineSemaphoreFeaturesKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES_KHR> {
+    typedef VkPhysicalDeviceTimelineSemaphoreFeaturesKHR Type;
+};
+
+// Map type VkPhysicalDeviceTimelineSemaphorePropertiesKHR to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES_KHR
+template <> struct LvlTypeMap<VkPhysicalDeviceTimelineSemaphorePropertiesKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES_KHR> {
+    typedef VkPhysicalDeviceTimelineSemaphorePropertiesKHR Type;
+};
+
+// Map type VkSemaphoreTypeCreateInfoKHR to id VK_STRUCTURE_TYPE_SEMAPHORE_TYPE_CREATE_INFO_KHR
+template <> struct LvlTypeMap<VkSemaphoreTypeCreateInfoKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_SEMAPHORE_TYPE_CREATE_INFO_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_SEMAPHORE_TYPE_CREATE_INFO_KHR> {
+    typedef VkSemaphoreTypeCreateInfoKHR Type;
+};
+
+// Map type VkTimelineSemaphoreSubmitInfoKHR to id VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO_KHR
+template <> struct LvlTypeMap<VkTimelineSemaphoreSubmitInfoKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO_KHR> {
+    typedef VkTimelineSemaphoreSubmitInfoKHR Type;
+};
+
+// Map type VkSemaphoreWaitInfoKHR to id VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO_KHR
+template <> struct LvlTypeMap<VkSemaphoreWaitInfoKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO_KHR> {
+    typedef VkSemaphoreWaitInfoKHR Type;
+};
+
+// Map type VkSemaphoreSignalInfoKHR to id VK_STRUCTURE_TYPE_SEMAPHORE_SIGNAL_INFO_KHR
+template <> struct LvlTypeMap<VkSemaphoreSignalInfoKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_SEMAPHORE_SIGNAL_INFO_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_SEMAPHORE_SIGNAL_INFO_KHR> {
+    typedef VkSemaphoreSignalInfoKHR Type;
+};
+
+// Map type VkPhysicalDeviceVulkanMemoryModelFeaturesKHR to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES_KHR
+template <> struct LvlTypeMap<VkPhysicalDeviceVulkanMemoryModelFeaturesKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES_KHR> {
+    typedef VkPhysicalDeviceVulkanMemoryModelFeaturesKHR Type;
+};
+
+// Map type VkSurfaceProtectedCapabilitiesKHR to id VK_STRUCTURE_TYPE_SURFACE_PROTECTED_CAPABILITIES_KHR
+template <> struct LvlTypeMap<VkSurfaceProtectedCapabilitiesKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_SURFACE_PROTECTED_CAPABILITIES_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_SURFACE_PROTECTED_CAPABILITIES_KHR> {
+    typedef VkSurfaceProtectedCapabilitiesKHR Type;
+};
+
+// Map type VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES_KHR
+template <> struct LvlTypeMap<VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES_KHR> {
+    typedef VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR Type;
+};
+
+// Map type VkAttachmentReferenceStencilLayoutKHR to id VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_STENCIL_LAYOUT_KHR
+template <> struct LvlTypeMap<VkAttachmentReferenceStencilLayoutKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_STENCIL_LAYOUT_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_STENCIL_LAYOUT_KHR> {
+    typedef VkAttachmentReferenceStencilLayoutKHR Type;
+};
+
+// Map type VkAttachmentDescriptionStencilLayoutKHR to id VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT_KHR
+template <> struct LvlTypeMap<VkAttachmentDescriptionStencilLayoutKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT_KHR> {
+    typedef VkAttachmentDescriptionStencilLayoutKHR Type;
+};
+
+// Map type VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES_KHR
+template <> struct LvlTypeMap<VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES_KHR> {
+    typedef VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR Type;
+};
+
+// Map type VkPhysicalDeviceBufferDeviceAddressFeaturesKHR to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_KHR
+template <> struct LvlTypeMap<VkPhysicalDeviceBufferDeviceAddressFeaturesKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_KHR> {
+    typedef VkPhysicalDeviceBufferDeviceAddressFeaturesKHR Type;
+};
+
+// Map type VkBufferDeviceAddressInfoKHR to id VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO_KHR
+template <> struct LvlTypeMap<VkBufferDeviceAddressInfoKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO_KHR> {
+    typedef VkBufferDeviceAddressInfoKHR Type;
+};
+
+// Map type VkBufferOpaqueCaptureAddressCreateInfoKHR to id VK_STRUCTURE_TYPE_BUFFER_OPAQUE_CAPTURE_ADDRESS_CREATE_INFO_KHR
+template <> struct LvlTypeMap<VkBufferOpaqueCaptureAddressCreateInfoKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_BUFFER_OPAQUE_CAPTURE_ADDRESS_CREATE_INFO_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_BUFFER_OPAQUE_CAPTURE_ADDRESS_CREATE_INFO_KHR> {
+    typedef VkBufferOpaqueCaptureAddressCreateInfoKHR Type;
+};
+
+// Map type VkMemoryOpaqueCaptureAddressAllocateInfoKHR to id VK_STRUCTURE_TYPE_MEMORY_OPAQUE_CAPTURE_ADDRESS_ALLOCATE_INFO_KHR
+template <> struct LvlTypeMap<VkMemoryOpaqueCaptureAddressAllocateInfoKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_MEMORY_OPAQUE_CAPTURE_ADDRESS_ALLOCATE_INFO_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_MEMORY_OPAQUE_CAPTURE_ADDRESS_ALLOCATE_INFO_KHR> {
+    typedef VkMemoryOpaqueCaptureAddressAllocateInfoKHR Type;
+};
+
+// Map type VkDeviceMemoryOpaqueCaptureAddressInfoKHR to id VK_STRUCTURE_TYPE_DEVICE_MEMORY_OPAQUE_CAPTURE_ADDRESS_INFO_KHR
+template <> struct LvlTypeMap<VkDeviceMemoryOpaqueCaptureAddressInfoKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_DEVICE_MEMORY_OPAQUE_CAPTURE_ADDRESS_INFO_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DEVICE_MEMORY_OPAQUE_CAPTURE_ADDRESS_INFO_KHR> {
+    typedef VkDeviceMemoryOpaqueCaptureAddressInfoKHR Type;
+};
+
+// Map type VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_EXECUTABLE_PROPERTIES_FEATURES_KHR
+template <> struct LvlTypeMap<VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_EXECUTABLE_PROPERTIES_FEATURES_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_EXECUTABLE_PROPERTIES_FEATURES_KHR> {
+    typedef VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR Type;
+};
+
+// Map type VkPipelineInfoKHR to id VK_STRUCTURE_TYPE_PIPELINE_INFO_KHR
+template <> struct LvlTypeMap<VkPipelineInfoKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_INFO_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_INFO_KHR> {
+    typedef VkPipelineInfoKHR Type;
+};
+
+// Map type VkPipelineExecutablePropertiesKHR to id VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_PROPERTIES_KHR
+template <> struct LvlTypeMap<VkPipelineExecutablePropertiesKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_PROPERTIES_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_PROPERTIES_KHR> {
+    typedef VkPipelineExecutablePropertiesKHR Type;
+};
+
+// Map type VkPipelineExecutableInfoKHR to id VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INFO_KHR
+template <> struct LvlTypeMap<VkPipelineExecutableInfoKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INFO_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INFO_KHR> {
+    typedef VkPipelineExecutableInfoKHR Type;
+};
+
+// Map type VkPipelineExecutableStatisticKHR to id VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_STATISTIC_KHR
+template <> struct LvlTypeMap<VkPipelineExecutableStatisticKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_STATISTIC_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_STATISTIC_KHR> {
+    typedef VkPipelineExecutableStatisticKHR Type;
+};
+
+// Map type VkPipelineExecutableInternalRepresentationKHR to id VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INTERNAL_REPRESENTATION_KHR
+template <> struct LvlTypeMap<VkPipelineExecutableInternalRepresentationKHR> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INTERNAL_REPRESENTATION_KHR;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INTERNAL_REPRESENTATION_KHR> {
+    typedef VkPipelineExecutableInternalRepresentationKHR Type;
+};
+
+// Map type VkDebugReportCallbackCreateInfoEXT to id VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT
+template <> struct LvlTypeMap<VkDebugReportCallbackCreateInfoEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT> {
+    typedef VkDebugReportCallbackCreateInfoEXT Type;
+};
+
+// Map type VkPipelineRasterizationStateRasterizationOrderAMD to id VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_RASTERIZATION_ORDER_AMD
+template <> struct LvlTypeMap<VkPipelineRasterizationStateRasterizationOrderAMD> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_RASTERIZATION_ORDER_AMD;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_RASTERIZATION_ORDER_AMD> {
+    typedef VkPipelineRasterizationStateRasterizationOrderAMD Type;
+};
+
+// Map type VkDebugMarkerObjectNameInfoEXT to id VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_NAME_INFO_EXT
+template <> struct LvlTypeMap<VkDebugMarkerObjectNameInfoEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_NAME_INFO_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_NAME_INFO_EXT> {
+    typedef VkDebugMarkerObjectNameInfoEXT Type;
+};
+
+// Map type VkDebugMarkerObjectTagInfoEXT to id VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_TAG_INFO_EXT
+template <> struct LvlTypeMap<VkDebugMarkerObjectTagInfoEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_TAG_INFO_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_TAG_INFO_EXT> {
+    typedef VkDebugMarkerObjectTagInfoEXT Type;
+};
+
+// Map type VkDebugMarkerMarkerInfoEXT to id VK_STRUCTURE_TYPE_DEBUG_MARKER_MARKER_INFO_EXT
+template <> struct LvlTypeMap<VkDebugMarkerMarkerInfoEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_DEBUG_MARKER_MARKER_INFO_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DEBUG_MARKER_MARKER_INFO_EXT> {
+    typedef VkDebugMarkerMarkerInfoEXT Type;
+};
+
+// Map type VkDedicatedAllocationImageCreateInfoNV to id VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_IMAGE_CREATE_INFO_NV
+template <> struct LvlTypeMap<VkDedicatedAllocationImageCreateInfoNV> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_IMAGE_CREATE_INFO_NV;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_IMAGE_CREATE_INFO_NV> {
+    typedef VkDedicatedAllocationImageCreateInfoNV Type;
+};
+
+// Map type VkDedicatedAllocationBufferCreateInfoNV to id VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_BUFFER_CREATE_INFO_NV
+template <> struct LvlTypeMap<VkDedicatedAllocationBufferCreateInfoNV> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_BUFFER_CREATE_INFO_NV;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_BUFFER_CREATE_INFO_NV> {
+    typedef VkDedicatedAllocationBufferCreateInfoNV Type;
+};
+
+// Map type VkDedicatedAllocationMemoryAllocateInfoNV to id VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_MEMORY_ALLOCATE_INFO_NV
+template <> struct LvlTypeMap<VkDedicatedAllocationMemoryAllocateInfoNV> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_MEMORY_ALLOCATE_INFO_NV;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_MEMORY_ALLOCATE_INFO_NV> {
+    typedef VkDedicatedAllocationMemoryAllocateInfoNV Type;
+};
+
+// Map type VkPhysicalDeviceTransformFeedbackFeaturesEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_FEATURES_EXT
+template <> struct LvlTypeMap<VkPhysicalDeviceTransformFeedbackFeaturesEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_FEATURES_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_FEATURES_EXT> {
+    typedef VkPhysicalDeviceTransformFeedbackFeaturesEXT Type;
+};
+
+// Map type VkPhysicalDeviceTransformFeedbackPropertiesEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_PROPERTIES_EXT
+template <> struct LvlTypeMap<VkPhysicalDeviceTransformFeedbackPropertiesEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_PROPERTIES_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_PROPERTIES_EXT> {
+    typedef VkPhysicalDeviceTransformFeedbackPropertiesEXT Type;
+};
+
+// Map type VkPipelineRasterizationStateStreamCreateInfoEXT to id VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_STREAM_CREATE_INFO_EXT
+template <> struct LvlTypeMap<VkPipelineRasterizationStateStreamCreateInfoEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_STREAM_CREATE_INFO_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_STREAM_CREATE_INFO_EXT> {
+    typedef VkPipelineRasterizationStateStreamCreateInfoEXT Type;
+};
+
+// Map type VkImageViewHandleInfoNVX to id VK_STRUCTURE_TYPE_IMAGE_VIEW_HANDLE_INFO_NVX
+template <> struct LvlTypeMap<VkImageViewHandleInfoNVX> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_IMAGE_VIEW_HANDLE_INFO_NVX;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_IMAGE_VIEW_HANDLE_INFO_NVX> {
+    typedef VkImageViewHandleInfoNVX Type;
+};
+
+// Map type VkTextureLODGatherFormatPropertiesAMD to id VK_STRUCTURE_TYPE_TEXTURE_LOD_GATHER_FORMAT_PROPERTIES_AMD
+template <> struct LvlTypeMap<VkTextureLODGatherFormatPropertiesAMD> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_TEXTURE_LOD_GATHER_FORMAT_PROPERTIES_AMD;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_TEXTURE_LOD_GATHER_FORMAT_PROPERTIES_AMD> {
+    typedef VkTextureLODGatherFormatPropertiesAMD Type;
+};
+
+#ifdef VK_USE_PLATFORM_GGP
+// Map type VkStreamDescriptorSurfaceCreateInfoGGP to id VK_STRUCTURE_TYPE_STREAM_DESCRIPTOR_SURFACE_CREATE_INFO_GGP
+template <> struct LvlTypeMap<VkStreamDescriptorSurfaceCreateInfoGGP> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_STREAM_DESCRIPTOR_SURFACE_CREATE_INFO_GGP;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_STREAM_DESCRIPTOR_SURFACE_CREATE_INFO_GGP> {
+    typedef VkStreamDescriptorSurfaceCreateInfoGGP Type;
+};
+
+#endif // VK_USE_PLATFORM_GGP
+// Map type VkPhysicalDeviceCornerSampledImageFeaturesNV to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CORNER_SAMPLED_IMAGE_FEATURES_NV
+template <> struct LvlTypeMap<VkPhysicalDeviceCornerSampledImageFeaturesNV> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CORNER_SAMPLED_IMAGE_FEATURES_NV;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CORNER_SAMPLED_IMAGE_FEATURES_NV> {
+    typedef VkPhysicalDeviceCornerSampledImageFeaturesNV Type;
+};
+
+// Map type VkExternalMemoryImageCreateInfoNV to id VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_NV
+template <> struct LvlTypeMap<VkExternalMemoryImageCreateInfoNV> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_NV;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_NV> {
+    typedef VkExternalMemoryImageCreateInfoNV Type;
+};
+
+// Map type VkExportMemoryAllocateInfoNV to id VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_NV
+template <> struct LvlTypeMap<VkExportMemoryAllocateInfoNV> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_NV;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_NV> {
+    typedef VkExportMemoryAllocateInfoNV Type;
+};
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+// Map type VkImportMemoryWin32HandleInfoNV to id VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_NV
+template <> struct LvlTypeMap<VkImportMemoryWin32HandleInfoNV> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_NV;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_NV> {
+    typedef VkImportMemoryWin32HandleInfoNV Type;
+};
+
+#endif // VK_USE_PLATFORM_WIN32_KHR
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+// Map type VkExportMemoryWin32HandleInfoNV to id VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_NV
+template <> struct LvlTypeMap<VkExportMemoryWin32HandleInfoNV> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_NV;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_NV> {
+    typedef VkExportMemoryWin32HandleInfoNV Type;
+};
+
+#endif // VK_USE_PLATFORM_WIN32_KHR
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+// Map type VkWin32KeyedMutexAcquireReleaseInfoNV to id VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_NV
+template <> struct LvlTypeMap<VkWin32KeyedMutexAcquireReleaseInfoNV> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_NV;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_NV> {
+    typedef VkWin32KeyedMutexAcquireReleaseInfoNV Type;
+};
+
+#endif // VK_USE_PLATFORM_WIN32_KHR
+// Map type VkValidationFlagsEXT to id VK_STRUCTURE_TYPE_VALIDATION_FLAGS_EXT
+template <> struct LvlTypeMap<VkValidationFlagsEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_VALIDATION_FLAGS_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_VALIDATION_FLAGS_EXT> {
+    typedef VkValidationFlagsEXT Type;
+};
+
+#ifdef VK_USE_PLATFORM_VI_NN
+// Map type VkViSurfaceCreateInfoNN to id VK_STRUCTURE_TYPE_VI_SURFACE_CREATE_INFO_NN
+template <> struct LvlTypeMap<VkViSurfaceCreateInfoNN> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_VI_SURFACE_CREATE_INFO_NN;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_VI_SURFACE_CREATE_INFO_NN> {
+    typedef VkViSurfaceCreateInfoNN Type;
+};
+
+#endif // VK_USE_PLATFORM_VI_NN
+// Map type VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXTURE_COMPRESSION_ASTC_HDR_FEATURES_EXT
+template <> struct LvlTypeMap<VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXTURE_COMPRESSION_ASTC_HDR_FEATURES_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXTURE_COMPRESSION_ASTC_HDR_FEATURES_EXT> {
+    typedef VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT Type;
+};
+
+// Map type VkImageViewASTCDecodeModeEXT to id VK_STRUCTURE_TYPE_IMAGE_VIEW_ASTC_DECODE_MODE_EXT
+template <> struct LvlTypeMap<VkImageViewASTCDecodeModeEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_IMAGE_VIEW_ASTC_DECODE_MODE_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_IMAGE_VIEW_ASTC_DECODE_MODE_EXT> {
+    typedef VkImageViewASTCDecodeModeEXT Type;
+};
+
+// Map type VkPhysicalDeviceASTCDecodeFeaturesEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ASTC_DECODE_FEATURES_EXT
+template <> struct LvlTypeMap<VkPhysicalDeviceASTCDecodeFeaturesEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ASTC_DECODE_FEATURES_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ASTC_DECODE_FEATURES_EXT> {
+    typedef VkPhysicalDeviceASTCDecodeFeaturesEXT Type;
+};
+
+// Map type VkConditionalRenderingBeginInfoEXT to id VK_STRUCTURE_TYPE_CONDITIONAL_RENDERING_BEGIN_INFO_EXT
+template <> struct LvlTypeMap<VkConditionalRenderingBeginInfoEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_CONDITIONAL_RENDERING_BEGIN_INFO_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_CONDITIONAL_RENDERING_BEGIN_INFO_EXT> {
+    typedef VkConditionalRenderingBeginInfoEXT Type;
+};
+
+// Map type VkPhysicalDeviceConditionalRenderingFeaturesEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONDITIONAL_RENDERING_FEATURES_EXT
+template <> struct LvlTypeMap<VkPhysicalDeviceConditionalRenderingFeaturesEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONDITIONAL_RENDERING_FEATURES_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONDITIONAL_RENDERING_FEATURES_EXT> {
+    typedef VkPhysicalDeviceConditionalRenderingFeaturesEXT Type;
+};
+
+// Map type VkCommandBufferInheritanceConditionalRenderingInfoEXT to id VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_CONDITIONAL_RENDERING_INFO_EXT
+template <> struct LvlTypeMap<VkCommandBufferInheritanceConditionalRenderingInfoEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_CONDITIONAL_RENDERING_INFO_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_CONDITIONAL_RENDERING_INFO_EXT> {
+    typedef VkCommandBufferInheritanceConditionalRenderingInfoEXT Type;
+};
+
+// Map type VkDeviceGeneratedCommandsFeaturesNVX to id VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_FEATURES_NVX
+template <> struct LvlTypeMap<VkDeviceGeneratedCommandsFeaturesNVX> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_FEATURES_NVX;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_FEATURES_NVX> {
+    typedef VkDeviceGeneratedCommandsFeaturesNVX Type;
+};
+
+// Map type VkDeviceGeneratedCommandsLimitsNVX to id VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_LIMITS_NVX
+template <> struct LvlTypeMap<VkDeviceGeneratedCommandsLimitsNVX> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_LIMITS_NVX;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_LIMITS_NVX> {
+    typedef VkDeviceGeneratedCommandsLimitsNVX Type;
+};
+
+// Map type VkIndirectCommandsLayoutCreateInfoNVX to id VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_NVX
+template <> struct LvlTypeMap<VkIndirectCommandsLayoutCreateInfoNVX> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_NVX;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_NVX> {
+    typedef VkIndirectCommandsLayoutCreateInfoNVX Type;
+};
+
+// Map type VkCmdProcessCommandsInfoNVX to id VK_STRUCTURE_TYPE_CMD_PROCESS_COMMANDS_INFO_NVX
+template <> struct LvlTypeMap<VkCmdProcessCommandsInfoNVX> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_CMD_PROCESS_COMMANDS_INFO_NVX;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_CMD_PROCESS_COMMANDS_INFO_NVX> {
+    typedef VkCmdProcessCommandsInfoNVX Type;
+};
+
+// Map type VkCmdReserveSpaceForCommandsInfoNVX to id VK_STRUCTURE_TYPE_CMD_RESERVE_SPACE_FOR_COMMANDS_INFO_NVX
+template <> struct LvlTypeMap<VkCmdReserveSpaceForCommandsInfoNVX> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_CMD_RESERVE_SPACE_FOR_COMMANDS_INFO_NVX;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_CMD_RESERVE_SPACE_FOR_COMMANDS_INFO_NVX> {
+    typedef VkCmdReserveSpaceForCommandsInfoNVX Type;
+};
+
+// Map type VkObjectTableCreateInfoNVX to id VK_STRUCTURE_TYPE_OBJECT_TABLE_CREATE_INFO_NVX
+template <> struct LvlTypeMap<VkObjectTableCreateInfoNVX> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_OBJECT_TABLE_CREATE_INFO_NVX;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_OBJECT_TABLE_CREATE_INFO_NVX> {
+    typedef VkObjectTableCreateInfoNVX Type;
+};
+
+// Map type VkPipelineViewportWScalingStateCreateInfoNV to id VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_W_SCALING_STATE_CREATE_INFO_NV
+template <> struct LvlTypeMap<VkPipelineViewportWScalingStateCreateInfoNV> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_W_SCALING_STATE_CREATE_INFO_NV;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_W_SCALING_STATE_CREATE_INFO_NV> {
+    typedef VkPipelineViewportWScalingStateCreateInfoNV Type;
+};
+
+// Map type VkSurfaceCapabilities2EXT to id VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_EXT
+template <> struct LvlTypeMap<VkSurfaceCapabilities2EXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_EXT> {
+    typedef VkSurfaceCapabilities2EXT Type;
+};
+
+// Map type VkDisplayPowerInfoEXT to id VK_STRUCTURE_TYPE_DISPLAY_POWER_INFO_EXT
+template <> struct LvlTypeMap<VkDisplayPowerInfoEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_DISPLAY_POWER_INFO_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DISPLAY_POWER_INFO_EXT> {
+    typedef VkDisplayPowerInfoEXT Type;
+};
+
+// Map type VkDeviceEventInfoEXT to id VK_STRUCTURE_TYPE_DEVICE_EVENT_INFO_EXT
+template <> struct LvlTypeMap<VkDeviceEventInfoEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_DEVICE_EVENT_INFO_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DEVICE_EVENT_INFO_EXT> {
+    typedef VkDeviceEventInfoEXT Type;
+};
+
+// Map type VkDisplayEventInfoEXT to id VK_STRUCTURE_TYPE_DISPLAY_EVENT_INFO_EXT
+template <> struct LvlTypeMap<VkDisplayEventInfoEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_DISPLAY_EVENT_INFO_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DISPLAY_EVENT_INFO_EXT> {
+    typedef VkDisplayEventInfoEXT Type;
+};
+
+// Map type VkSwapchainCounterCreateInfoEXT to id VK_STRUCTURE_TYPE_SWAPCHAIN_COUNTER_CREATE_INFO_EXT
+template <> struct LvlTypeMap<VkSwapchainCounterCreateInfoEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_SWAPCHAIN_COUNTER_CREATE_INFO_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_SWAPCHAIN_COUNTER_CREATE_INFO_EXT> {
+    typedef VkSwapchainCounterCreateInfoEXT Type;
+};
+
+// Map type VkPresentTimesInfoGOOGLE to id VK_STRUCTURE_TYPE_PRESENT_TIMES_INFO_GOOGLE
+template <> struct LvlTypeMap<VkPresentTimesInfoGOOGLE> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PRESENT_TIMES_INFO_GOOGLE;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PRESENT_TIMES_INFO_GOOGLE> {
+    typedef VkPresentTimesInfoGOOGLE Type;
+};
+
+// Map type VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_ATTRIBUTES_PROPERTIES_NVX
+template <> struct LvlTypeMap<VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_ATTRIBUTES_PROPERTIES_NVX;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_ATTRIBUTES_PROPERTIES_NVX> {
+    typedef VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX Type;
+};
+
+// Map type VkPipelineViewportSwizzleStateCreateInfoNV to id VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SWIZZLE_STATE_CREATE_INFO_NV
+template <> struct LvlTypeMap<VkPipelineViewportSwizzleStateCreateInfoNV> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SWIZZLE_STATE_CREATE_INFO_NV;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SWIZZLE_STATE_CREATE_INFO_NV> {
+    typedef VkPipelineViewportSwizzleStateCreateInfoNV Type;
+};
+
+// Map type VkPhysicalDeviceDiscardRectanglePropertiesEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DISCARD_RECTANGLE_PROPERTIES_EXT
+template <> struct LvlTypeMap<VkPhysicalDeviceDiscardRectanglePropertiesEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DISCARD_RECTANGLE_PROPERTIES_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DISCARD_RECTANGLE_PROPERTIES_EXT> {
+    typedef VkPhysicalDeviceDiscardRectanglePropertiesEXT Type;
+};
+
+// Map type VkPipelineDiscardRectangleStateCreateInfoEXT to id VK_STRUCTURE_TYPE_PIPELINE_DISCARD_RECTANGLE_STATE_CREATE_INFO_EXT
+template <> struct LvlTypeMap<VkPipelineDiscardRectangleStateCreateInfoEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_DISCARD_RECTANGLE_STATE_CREATE_INFO_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_DISCARD_RECTANGLE_STATE_CREATE_INFO_EXT> {
+    typedef VkPipelineDiscardRectangleStateCreateInfoEXT Type;
+};
+
+// Map type VkPhysicalDeviceConservativeRasterizationPropertiesEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONSERVATIVE_RASTERIZATION_PROPERTIES_EXT
+template <> struct LvlTypeMap<VkPhysicalDeviceConservativeRasterizationPropertiesEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONSERVATIVE_RASTERIZATION_PROPERTIES_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONSERVATIVE_RASTERIZATION_PROPERTIES_EXT> {
+    typedef VkPhysicalDeviceConservativeRasterizationPropertiesEXT Type;
+};
+
+// Map type VkPipelineRasterizationConservativeStateCreateInfoEXT to id VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_CONSERVATIVE_STATE_CREATE_INFO_EXT
+template <> struct LvlTypeMap<VkPipelineRasterizationConservativeStateCreateInfoEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_CONSERVATIVE_STATE_CREATE_INFO_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_CONSERVATIVE_STATE_CREATE_INFO_EXT> {
+    typedef VkPipelineRasterizationConservativeStateCreateInfoEXT Type;
+};
+
+// Map type VkPhysicalDeviceDepthClipEnableFeaturesEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLIP_ENABLE_FEATURES_EXT
+template <> struct LvlTypeMap<VkPhysicalDeviceDepthClipEnableFeaturesEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLIP_ENABLE_FEATURES_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLIP_ENABLE_FEATURES_EXT> {
+    typedef VkPhysicalDeviceDepthClipEnableFeaturesEXT Type;
+};
+
+// Map type VkPipelineRasterizationDepthClipStateCreateInfoEXT to id VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_DEPTH_CLIP_STATE_CREATE_INFO_EXT
+template <> struct LvlTypeMap<VkPipelineRasterizationDepthClipStateCreateInfoEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_DEPTH_CLIP_STATE_CREATE_INFO_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_DEPTH_CLIP_STATE_CREATE_INFO_EXT> {
+    typedef VkPipelineRasterizationDepthClipStateCreateInfoEXT Type;
+};
+
+// Map type VkHdrMetadataEXT to id VK_STRUCTURE_TYPE_HDR_METADATA_EXT
+template <> struct LvlTypeMap<VkHdrMetadataEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_HDR_METADATA_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_HDR_METADATA_EXT> {
+    typedef VkHdrMetadataEXT Type;
+};
+
+#ifdef VK_USE_PLATFORM_IOS_MVK
+// Map type VkIOSSurfaceCreateInfoMVK to id VK_STRUCTURE_TYPE_IOS_SURFACE_CREATE_INFO_MVK
+template <> struct LvlTypeMap<VkIOSSurfaceCreateInfoMVK> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_IOS_SURFACE_CREATE_INFO_MVK;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_IOS_SURFACE_CREATE_INFO_MVK> {
+    typedef VkIOSSurfaceCreateInfoMVK Type;
+};
+
+#endif // VK_USE_PLATFORM_IOS_MVK
+#ifdef VK_USE_PLATFORM_MACOS_MVK
+// Map type VkMacOSSurfaceCreateInfoMVK to id VK_STRUCTURE_TYPE_MACOS_SURFACE_CREATE_INFO_MVK
+template <> struct LvlTypeMap<VkMacOSSurfaceCreateInfoMVK> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_MACOS_SURFACE_CREATE_INFO_MVK;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_MACOS_SURFACE_CREATE_INFO_MVK> {
+    typedef VkMacOSSurfaceCreateInfoMVK Type;
+};
+
+#endif // VK_USE_PLATFORM_MACOS_MVK
+// Map type VkDebugUtilsObjectNameInfoEXT to id VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT
+template <> struct LvlTypeMap<VkDebugUtilsObjectNameInfoEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT> {
+    typedef VkDebugUtilsObjectNameInfoEXT Type;
+};
+
+// Map type VkDebugUtilsObjectTagInfoEXT to id VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_TAG_INFO_EXT
+template <> struct LvlTypeMap<VkDebugUtilsObjectTagInfoEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_TAG_INFO_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_TAG_INFO_EXT> {
+    typedef VkDebugUtilsObjectTagInfoEXT Type;
+};
+
+// Map type VkDebugUtilsLabelEXT to id VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT
+template <> struct LvlTypeMap<VkDebugUtilsLabelEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT> {
+    typedef VkDebugUtilsLabelEXT Type;
+};
+
+// Map type VkDebugUtilsMessengerCallbackDataEXT to id VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CALLBACK_DATA_EXT
+template <> struct LvlTypeMap<VkDebugUtilsMessengerCallbackDataEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CALLBACK_DATA_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CALLBACK_DATA_EXT> {
+    typedef VkDebugUtilsMessengerCallbackDataEXT Type;
+};
+
+// Map type VkDebugUtilsMessengerCreateInfoEXT to id VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT
+template <> struct LvlTypeMap<VkDebugUtilsMessengerCreateInfoEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT> {
+    typedef VkDebugUtilsMessengerCreateInfoEXT Type;
+};
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+// Map type VkAndroidHardwareBufferUsageANDROID to id VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_USAGE_ANDROID
+template <> struct LvlTypeMap<VkAndroidHardwareBufferUsageANDROID> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_USAGE_ANDROID;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_USAGE_ANDROID> {
+    typedef VkAndroidHardwareBufferUsageANDROID Type;
+};
+
+#endif // VK_USE_PLATFORM_ANDROID_KHR
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+// Map type VkAndroidHardwareBufferPropertiesANDROID to id VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID
+template <> struct LvlTypeMap<VkAndroidHardwareBufferPropertiesANDROID> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID> {
+    typedef VkAndroidHardwareBufferPropertiesANDROID Type;
+};
+
+#endif // VK_USE_PLATFORM_ANDROID_KHR
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+// Map type VkAndroidHardwareBufferFormatPropertiesANDROID to id VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID
+template <> struct LvlTypeMap<VkAndroidHardwareBufferFormatPropertiesANDROID> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID> {
+    typedef VkAndroidHardwareBufferFormatPropertiesANDROID Type;
+};
+
+#endif // VK_USE_PLATFORM_ANDROID_KHR
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+// Map type VkImportAndroidHardwareBufferInfoANDROID to id VK_STRUCTURE_TYPE_IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID
+template <> struct LvlTypeMap<VkImportAndroidHardwareBufferInfoANDROID> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID> {
+    typedef VkImportAndroidHardwareBufferInfoANDROID Type;
+};
+
+#endif // VK_USE_PLATFORM_ANDROID_KHR
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+// Map type VkMemoryGetAndroidHardwareBufferInfoANDROID to id VK_STRUCTURE_TYPE_MEMORY_GET_ANDROID_HARDWARE_BUFFER_INFO_ANDROID
+template <> struct LvlTypeMap<VkMemoryGetAndroidHardwareBufferInfoANDROID> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_MEMORY_GET_ANDROID_HARDWARE_BUFFER_INFO_ANDROID;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_MEMORY_GET_ANDROID_HARDWARE_BUFFER_INFO_ANDROID> {
+    typedef VkMemoryGetAndroidHardwareBufferInfoANDROID Type;
+};
+
+#endif // VK_USE_PLATFORM_ANDROID_KHR
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+// Map type VkExternalFormatANDROID to id VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID
+template <> struct LvlTypeMap<VkExternalFormatANDROID> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID> {
+    typedef VkExternalFormatANDROID Type;
+};
+
+#endif // VK_USE_PLATFORM_ANDROID_KHR
+// Map type VkSamplerReductionModeCreateInfoEXT to id VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO_EXT
+template <> struct LvlTypeMap<VkSamplerReductionModeCreateInfoEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO_EXT> {
+    typedef VkSamplerReductionModeCreateInfoEXT Type;
+};
+
+// Map type VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES_EXT
+template <> struct LvlTypeMap<VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES_EXT> {
+    typedef VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT Type;
+};
+
+// Map type VkPhysicalDeviceInlineUniformBlockFeaturesEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES_EXT
+template <> struct LvlTypeMap<VkPhysicalDeviceInlineUniformBlockFeaturesEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES_EXT> {
+    typedef VkPhysicalDeviceInlineUniformBlockFeaturesEXT Type;
+};
+
+// Map type VkPhysicalDeviceInlineUniformBlockPropertiesEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_PROPERTIES_EXT
+template <> struct LvlTypeMap<VkPhysicalDeviceInlineUniformBlockPropertiesEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_PROPERTIES_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_PROPERTIES_EXT> {
+    typedef VkPhysicalDeviceInlineUniformBlockPropertiesEXT Type;
+};
+
+// Map type VkWriteDescriptorSetInlineUniformBlockEXT to id VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK_EXT
+template <> struct LvlTypeMap<VkWriteDescriptorSetInlineUniformBlockEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK_EXT> {
+    typedef VkWriteDescriptorSetInlineUniformBlockEXT Type;
+};
+
+// Map type VkDescriptorPoolInlineUniformBlockCreateInfoEXT to id VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO_EXT
+template <> struct LvlTypeMap<VkDescriptorPoolInlineUniformBlockCreateInfoEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO_EXT> {
+    typedef VkDescriptorPoolInlineUniformBlockCreateInfoEXT Type;
+};
+
+// Map type VkSampleLocationsInfoEXT to id VK_STRUCTURE_TYPE_SAMPLE_LOCATIONS_INFO_EXT
+template <> struct LvlTypeMap<VkSampleLocationsInfoEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_SAMPLE_LOCATIONS_INFO_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_SAMPLE_LOCATIONS_INFO_EXT> {
+    typedef VkSampleLocationsInfoEXT Type;
+};
+
+// Map type VkRenderPassSampleLocationsBeginInfoEXT to id VK_STRUCTURE_TYPE_RENDER_PASS_SAMPLE_LOCATIONS_BEGIN_INFO_EXT
+template <> struct LvlTypeMap<VkRenderPassSampleLocationsBeginInfoEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_RENDER_PASS_SAMPLE_LOCATIONS_BEGIN_INFO_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_RENDER_PASS_SAMPLE_LOCATIONS_BEGIN_INFO_EXT> {
+    typedef VkRenderPassSampleLocationsBeginInfoEXT Type;
+};
+
+// Map type VkPipelineSampleLocationsStateCreateInfoEXT to id VK_STRUCTURE_TYPE_PIPELINE_SAMPLE_LOCATIONS_STATE_CREATE_INFO_EXT
+template <> struct LvlTypeMap<VkPipelineSampleLocationsStateCreateInfoEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_SAMPLE_LOCATIONS_STATE_CREATE_INFO_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_SAMPLE_LOCATIONS_STATE_CREATE_INFO_EXT> {
+    typedef VkPipelineSampleLocationsStateCreateInfoEXT Type;
+};
+
+// Map type VkPhysicalDeviceSampleLocationsPropertiesEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLE_LOCATIONS_PROPERTIES_EXT
+template <> struct LvlTypeMap<VkPhysicalDeviceSampleLocationsPropertiesEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLE_LOCATIONS_PROPERTIES_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLE_LOCATIONS_PROPERTIES_EXT> {
+    typedef VkPhysicalDeviceSampleLocationsPropertiesEXT Type;
+};
+
+// Map type VkMultisamplePropertiesEXT to id VK_STRUCTURE_TYPE_MULTISAMPLE_PROPERTIES_EXT
+template <> struct LvlTypeMap<VkMultisamplePropertiesEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_MULTISAMPLE_PROPERTIES_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_MULTISAMPLE_PROPERTIES_EXT> {
+    typedef VkMultisamplePropertiesEXT Type;
+};
+
+// Map type VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_FEATURES_EXT
+template <> struct LvlTypeMap<VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_FEATURES_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_FEATURES_EXT> {
+    typedef VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT Type;
+};
+
+// Map type VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_PROPERTIES_EXT
+template <> struct LvlTypeMap<VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_PROPERTIES_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_PROPERTIES_EXT> {
+    typedef VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT Type;
+};
+
+// Map type VkPipelineColorBlendAdvancedStateCreateInfoEXT to id VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_ADVANCED_STATE_CREATE_INFO_EXT
+template <> struct LvlTypeMap<VkPipelineColorBlendAdvancedStateCreateInfoEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_ADVANCED_STATE_CREATE_INFO_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_ADVANCED_STATE_CREATE_INFO_EXT> {
+    typedef VkPipelineColorBlendAdvancedStateCreateInfoEXT Type;
+};
+
+// Map type VkPipelineCoverageToColorStateCreateInfoNV to id VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_TO_COLOR_STATE_CREATE_INFO_NV
+template <> struct LvlTypeMap<VkPipelineCoverageToColorStateCreateInfoNV> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_TO_COLOR_STATE_CREATE_INFO_NV;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_TO_COLOR_STATE_CREATE_INFO_NV> {
+    typedef VkPipelineCoverageToColorStateCreateInfoNV Type;
+};
+
+// Map type VkPipelineCoverageModulationStateCreateInfoNV to id VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_MODULATION_STATE_CREATE_INFO_NV
+template <> struct LvlTypeMap<VkPipelineCoverageModulationStateCreateInfoNV> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_MODULATION_STATE_CREATE_INFO_NV;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_MODULATION_STATE_CREATE_INFO_NV> {
+    typedef VkPipelineCoverageModulationStateCreateInfoNV Type;
+};
+
+// Map type VkPhysicalDeviceShaderSMBuiltinsPropertiesNV to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_PROPERTIES_NV
+template <> struct LvlTypeMap<VkPhysicalDeviceShaderSMBuiltinsPropertiesNV> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_PROPERTIES_NV;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_PROPERTIES_NV> {
+    typedef VkPhysicalDeviceShaderSMBuiltinsPropertiesNV Type;
+};
+
+// Map type VkPhysicalDeviceShaderSMBuiltinsFeaturesNV to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_FEATURES_NV
+template <> struct LvlTypeMap<VkPhysicalDeviceShaderSMBuiltinsFeaturesNV> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_FEATURES_NV;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_FEATURES_NV> {
+    typedef VkPhysicalDeviceShaderSMBuiltinsFeaturesNV Type;
+};
+
+// Map type VkDrmFormatModifierPropertiesListEXT to id VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT
+template <> struct LvlTypeMap<VkDrmFormatModifierPropertiesListEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT> {
+    typedef VkDrmFormatModifierPropertiesListEXT Type;
+};
+
+// Map type VkPhysicalDeviceImageDrmFormatModifierInfoEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_DRM_FORMAT_MODIFIER_INFO_EXT
+template <> struct LvlTypeMap<VkPhysicalDeviceImageDrmFormatModifierInfoEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_DRM_FORMAT_MODIFIER_INFO_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_DRM_FORMAT_MODIFIER_INFO_EXT> {
+    typedef VkPhysicalDeviceImageDrmFormatModifierInfoEXT Type;
+};
+
+// Map type VkImageDrmFormatModifierListCreateInfoEXT to id VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_LIST_CREATE_INFO_EXT
+template <> struct LvlTypeMap<VkImageDrmFormatModifierListCreateInfoEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_LIST_CREATE_INFO_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_LIST_CREATE_INFO_EXT> {
+    typedef VkImageDrmFormatModifierListCreateInfoEXT Type;
+};
+
+// Map type VkImageDrmFormatModifierExplicitCreateInfoEXT to id VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_EXPLICIT_CREATE_INFO_EXT
+template <> struct LvlTypeMap<VkImageDrmFormatModifierExplicitCreateInfoEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_EXPLICIT_CREATE_INFO_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_EXPLICIT_CREATE_INFO_EXT> {
+    typedef VkImageDrmFormatModifierExplicitCreateInfoEXT Type;
+};
+
+// Map type VkImageDrmFormatModifierPropertiesEXT to id VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT
+template <> struct LvlTypeMap<VkImageDrmFormatModifierPropertiesEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT> {
+    typedef VkImageDrmFormatModifierPropertiesEXT Type;
+};
+
+// Map type VkValidationCacheCreateInfoEXT to id VK_STRUCTURE_TYPE_VALIDATION_CACHE_CREATE_INFO_EXT
+template <> struct LvlTypeMap<VkValidationCacheCreateInfoEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_VALIDATION_CACHE_CREATE_INFO_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_VALIDATION_CACHE_CREATE_INFO_EXT> {
+    typedef VkValidationCacheCreateInfoEXT Type;
+};
+
+// Map type VkShaderModuleValidationCacheCreateInfoEXT to id VK_STRUCTURE_TYPE_SHADER_MODULE_VALIDATION_CACHE_CREATE_INFO_EXT
+template <> struct LvlTypeMap<VkShaderModuleValidationCacheCreateInfoEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_SHADER_MODULE_VALIDATION_CACHE_CREATE_INFO_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_SHADER_MODULE_VALIDATION_CACHE_CREATE_INFO_EXT> {
+    typedef VkShaderModuleValidationCacheCreateInfoEXT Type;
+};
+
+// Map type VkDescriptorSetLayoutBindingFlagsCreateInfoEXT to id VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT
+template <> struct LvlTypeMap<VkDescriptorSetLayoutBindingFlagsCreateInfoEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT> {
+    typedef VkDescriptorSetLayoutBindingFlagsCreateInfoEXT Type;
+};
+
+// Map type VkPhysicalDeviceDescriptorIndexingFeaturesEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES_EXT
+template <> struct LvlTypeMap<VkPhysicalDeviceDescriptorIndexingFeaturesEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES_EXT> {
+    typedef VkPhysicalDeviceDescriptorIndexingFeaturesEXT Type;
+};
+
+// Map type VkPhysicalDeviceDescriptorIndexingPropertiesEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES_EXT
+template <> struct LvlTypeMap<VkPhysicalDeviceDescriptorIndexingPropertiesEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES_EXT> {
+    typedef VkPhysicalDeviceDescriptorIndexingPropertiesEXT Type;
+};
+
+// Map type VkDescriptorSetVariableDescriptorCountAllocateInfoEXT to id VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO_EXT
+template <> struct LvlTypeMap<VkDescriptorSetVariableDescriptorCountAllocateInfoEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO_EXT> {
+    typedef VkDescriptorSetVariableDescriptorCountAllocateInfoEXT Type;
+};
+
+// Map type VkDescriptorSetVariableDescriptorCountLayoutSupportEXT to id VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT_EXT
+template <> struct LvlTypeMap<VkDescriptorSetVariableDescriptorCountLayoutSupportEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT_EXT> {
+    typedef VkDescriptorSetVariableDescriptorCountLayoutSupportEXT Type;
+};
+
+// Map type VkPipelineViewportShadingRateImageStateCreateInfoNV to id VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SHADING_RATE_IMAGE_STATE_CREATE_INFO_NV
+template <> struct LvlTypeMap<VkPipelineViewportShadingRateImageStateCreateInfoNV> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SHADING_RATE_IMAGE_STATE_CREATE_INFO_NV;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SHADING_RATE_IMAGE_STATE_CREATE_INFO_NV> {
+    typedef VkPipelineViewportShadingRateImageStateCreateInfoNV Type;
+};
+
+// Map type VkPhysicalDeviceShadingRateImageFeaturesNV to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_FEATURES_NV
+template <> struct LvlTypeMap<VkPhysicalDeviceShadingRateImageFeaturesNV> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_FEATURES_NV;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_FEATURES_NV> {
+    typedef VkPhysicalDeviceShadingRateImageFeaturesNV Type;
+};
+
+// Map type VkPhysicalDeviceShadingRateImagePropertiesNV to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_PROPERTIES_NV
+template <> struct LvlTypeMap<VkPhysicalDeviceShadingRateImagePropertiesNV> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_PROPERTIES_NV;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_PROPERTIES_NV> {
+    typedef VkPhysicalDeviceShadingRateImagePropertiesNV Type;
+};
+
+// Map type VkPipelineViewportCoarseSampleOrderStateCreateInfoNV to id VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_COARSE_SAMPLE_ORDER_STATE_CREATE_INFO_NV
+template <> struct LvlTypeMap<VkPipelineViewportCoarseSampleOrderStateCreateInfoNV> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_COARSE_SAMPLE_ORDER_STATE_CREATE_INFO_NV;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_COARSE_SAMPLE_ORDER_STATE_CREATE_INFO_NV> {
+    typedef VkPipelineViewportCoarseSampleOrderStateCreateInfoNV Type;
+};
+
+// Map type VkRayTracingShaderGroupCreateInfoNV to id VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV
+template <> struct LvlTypeMap<VkRayTracingShaderGroupCreateInfoNV> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV> {
+    typedef VkRayTracingShaderGroupCreateInfoNV Type;
+};
+
+// Map type VkRayTracingPipelineCreateInfoNV to id VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_NV
+template <> struct LvlTypeMap<VkRayTracingPipelineCreateInfoNV> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_NV;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_NV> {
+    typedef VkRayTracingPipelineCreateInfoNV Type;
+};
+
+// Map type VkGeometryTrianglesNV to id VK_STRUCTURE_TYPE_GEOMETRY_TRIANGLES_NV
+template <> struct LvlTypeMap<VkGeometryTrianglesNV> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_GEOMETRY_TRIANGLES_NV;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_GEOMETRY_TRIANGLES_NV> {
+    typedef VkGeometryTrianglesNV Type;
+};
+
+// Map type VkGeometryAABBNV to id VK_STRUCTURE_TYPE_GEOMETRY_AABB_NV
+template <> struct LvlTypeMap<VkGeometryAABBNV> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_GEOMETRY_AABB_NV;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_GEOMETRY_AABB_NV> {
+    typedef VkGeometryAABBNV Type;
+};
+
+// Map type VkGeometryNV to id VK_STRUCTURE_TYPE_GEOMETRY_NV
+template <> struct LvlTypeMap<VkGeometryNV> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_GEOMETRY_NV;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_GEOMETRY_NV> {
+    typedef VkGeometryNV Type;
+};
+
+// Map type VkAccelerationStructureInfoNV to id VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_INFO_NV
+template <> struct LvlTypeMap<VkAccelerationStructureInfoNV> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_INFO_NV;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_INFO_NV> {
+    typedef VkAccelerationStructureInfoNV Type;
+};
+
+// Map type VkAccelerationStructureCreateInfoNV to id VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_NV
+template <> struct LvlTypeMap<VkAccelerationStructureCreateInfoNV> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_NV;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_NV> {
+    typedef VkAccelerationStructureCreateInfoNV Type;
+};
+
+// Map type VkBindAccelerationStructureMemoryInfoNV to id VK_STRUCTURE_TYPE_BIND_ACCELERATION_STRUCTURE_MEMORY_INFO_NV
+template <> struct LvlTypeMap<VkBindAccelerationStructureMemoryInfoNV> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_BIND_ACCELERATION_STRUCTURE_MEMORY_INFO_NV;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_BIND_ACCELERATION_STRUCTURE_MEMORY_INFO_NV> {
+    typedef VkBindAccelerationStructureMemoryInfoNV Type;
+};
+
+// Map type VkWriteDescriptorSetAccelerationStructureNV to id VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_NV
+template <> struct LvlTypeMap<VkWriteDescriptorSetAccelerationStructureNV> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_NV;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_NV> {
+    typedef VkWriteDescriptorSetAccelerationStructureNV Type;
+};
+
+// Map type VkAccelerationStructureMemoryRequirementsInfoNV to id VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV
+template <> struct LvlTypeMap<VkAccelerationStructureMemoryRequirementsInfoNV> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV> {
+    typedef VkAccelerationStructureMemoryRequirementsInfoNV Type;
+};
+
+// Map type VkPhysicalDeviceRayTracingPropertiesNV to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PROPERTIES_NV
+template <> struct LvlTypeMap<VkPhysicalDeviceRayTracingPropertiesNV> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PROPERTIES_NV;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PROPERTIES_NV> {
+    typedef VkPhysicalDeviceRayTracingPropertiesNV Type;
+};
+
+// Map type VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_REPRESENTATIVE_FRAGMENT_TEST_FEATURES_NV
+template <> struct LvlTypeMap<VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_REPRESENTATIVE_FRAGMENT_TEST_FEATURES_NV;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_REPRESENTATIVE_FRAGMENT_TEST_FEATURES_NV> {
+    typedef VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV Type;
+};
+
+// Map type VkPipelineRepresentativeFragmentTestStateCreateInfoNV to id VK_STRUCTURE_TYPE_PIPELINE_REPRESENTATIVE_FRAGMENT_TEST_STATE_CREATE_INFO_NV
+template <> struct LvlTypeMap<VkPipelineRepresentativeFragmentTestStateCreateInfoNV> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_REPRESENTATIVE_FRAGMENT_TEST_STATE_CREATE_INFO_NV;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_REPRESENTATIVE_FRAGMENT_TEST_STATE_CREATE_INFO_NV> {
+    typedef VkPipelineRepresentativeFragmentTestStateCreateInfoNV Type;
+};
+
+// Map type VkPhysicalDeviceImageViewImageFormatInfoEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_VIEW_IMAGE_FORMAT_INFO_EXT
+template <> struct LvlTypeMap<VkPhysicalDeviceImageViewImageFormatInfoEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_VIEW_IMAGE_FORMAT_INFO_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_VIEW_IMAGE_FORMAT_INFO_EXT> {
+    typedef VkPhysicalDeviceImageViewImageFormatInfoEXT Type;
+};
+
+// Map type VkFilterCubicImageViewImageFormatPropertiesEXT to id VK_STRUCTURE_TYPE_FILTER_CUBIC_IMAGE_VIEW_IMAGE_FORMAT_PROPERTIES_EXT
+template <> struct LvlTypeMap<VkFilterCubicImageViewImageFormatPropertiesEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_FILTER_CUBIC_IMAGE_VIEW_IMAGE_FORMAT_PROPERTIES_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_FILTER_CUBIC_IMAGE_VIEW_IMAGE_FORMAT_PROPERTIES_EXT> {
+    typedef VkFilterCubicImageViewImageFormatPropertiesEXT Type;
+};
+
+// Map type VkDeviceQueueGlobalPriorityCreateInfoEXT to id VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_EXT
+template <> struct LvlTypeMap<VkDeviceQueueGlobalPriorityCreateInfoEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_EXT> {
+    typedef VkDeviceQueueGlobalPriorityCreateInfoEXT Type;
+};
+
+// Map type VkImportMemoryHostPointerInfoEXT to id VK_STRUCTURE_TYPE_IMPORT_MEMORY_HOST_POINTER_INFO_EXT
+template <> struct LvlTypeMap<VkImportMemoryHostPointerInfoEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_IMPORT_MEMORY_HOST_POINTER_INFO_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_IMPORT_MEMORY_HOST_POINTER_INFO_EXT> {
+    typedef VkImportMemoryHostPointerInfoEXT Type;
+};
+
+// Map type VkMemoryHostPointerPropertiesEXT to id VK_STRUCTURE_TYPE_MEMORY_HOST_POINTER_PROPERTIES_EXT
+template <> struct LvlTypeMap<VkMemoryHostPointerPropertiesEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_MEMORY_HOST_POINTER_PROPERTIES_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_MEMORY_HOST_POINTER_PROPERTIES_EXT> {
+    typedef VkMemoryHostPointerPropertiesEXT Type;
+};
+
+// Map type VkPhysicalDeviceExternalMemoryHostPropertiesEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_HOST_PROPERTIES_EXT
+template <> struct LvlTypeMap<VkPhysicalDeviceExternalMemoryHostPropertiesEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_HOST_PROPERTIES_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_HOST_PROPERTIES_EXT> {
+    typedef VkPhysicalDeviceExternalMemoryHostPropertiesEXT Type;
+};
+
+// Map type VkPipelineCompilerControlCreateInfoAMD to id VK_STRUCTURE_TYPE_PIPELINE_COMPILER_CONTROL_CREATE_INFO_AMD
+template <> struct LvlTypeMap<VkPipelineCompilerControlCreateInfoAMD> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_COMPILER_CONTROL_CREATE_INFO_AMD;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_COMPILER_CONTROL_CREATE_INFO_AMD> {
+    typedef VkPipelineCompilerControlCreateInfoAMD Type;
+};
+
+// Map type VkCalibratedTimestampInfoEXT to id VK_STRUCTURE_TYPE_CALIBRATED_TIMESTAMP_INFO_EXT
+template <> struct LvlTypeMap<VkCalibratedTimestampInfoEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_CALIBRATED_TIMESTAMP_INFO_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_CALIBRATED_TIMESTAMP_INFO_EXT> {
+    typedef VkCalibratedTimestampInfoEXT Type;
+};
+
+// Map type VkPhysicalDeviceShaderCorePropertiesAMD to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_AMD
+template <> struct LvlTypeMap<VkPhysicalDeviceShaderCorePropertiesAMD> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_AMD;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_AMD> {
+    typedef VkPhysicalDeviceShaderCorePropertiesAMD Type;
+};
+
+// Map type VkDeviceMemoryOverallocationCreateInfoAMD to id VK_STRUCTURE_TYPE_DEVICE_MEMORY_OVERALLOCATION_CREATE_INFO_AMD
+template <> struct LvlTypeMap<VkDeviceMemoryOverallocationCreateInfoAMD> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_DEVICE_MEMORY_OVERALLOCATION_CREATE_INFO_AMD;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DEVICE_MEMORY_OVERALLOCATION_CREATE_INFO_AMD> {
+    typedef VkDeviceMemoryOverallocationCreateInfoAMD Type;
+};
+
+// Map type VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_EXT
+template <> struct LvlTypeMap<VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_EXT> {
+    typedef VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT Type;
+};
+
+// Map type VkPipelineVertexInputDivisorStateCreateInfoEXT to id VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_EXT
+template <> struct LvlTypeMap<VkPipelineVertexInputDivisorStateCreateInfoEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_EXT> {
+    typedef VkPipelineVertexInputDivisorStateCreateInfoEXT Type;
+};
+
+// Map type VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_EXT
+template <> struct LvlTypeMap<VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_EXT> {
+    typedef VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT Type;
+};
+
+#ifdef VK_USE_PLATFORM_GGP
+// Map type VkPresentFrameTokenGGP to id VK_STRUCTURE_TYPE_PRESENT_FRAME_TOKEN_GGP
+template <> struct LvlTypeMap<VkPresentFrameTokenGGP> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PRESENT_FRAME_TOKEN_GGP;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PRESENT_FRAME_TOKEN_GGP> {
+    typedef VkPresentFrameTokenGGP Type;
+};
+
+#endif // VK_USE_PLATFORM_GGP
+// Map type VkPipelineCreationFeedbackCreateInfoEXT to id VK_STRUCTURE_TYPE_PIPELINE_CREATION_FEEDBACK_CREATE_INFO_EXT
+template <> struct LvlTypeMap<VkPipelineCreationFeedbackCreateInfoEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_CREATION_FEEDBACK_CREATE_INFO_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_CREATION_FEEDBACK_CREATE_INFO_EXT> {
+    typedef VkPipelineCreationFeedbackCreateInfoEXT Type;
+};
+
+// Map type VkPhysicalDeviceComputeShaderDerivativesFeaturesNV to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_FEATURES_NV
+template <> struct LvlTypeMap<VkPhysicalDeviceComputeShaderDerivativesFeaturesNV> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_FEATURES_NV;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_FEATURES_NV> {
+    typedef VkPhysicalDeviceComputeShaderDerivativesFeaturesNV Type;
+};
+
+// Map type VkPhysicalDeviceMeshShaderFeaturesNV to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_FEATURES_NV
+template <> struct LvlTypeMap<VkPhysicalDeviceMeshShaderFeaturesNV> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_FEATURES_NV;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_FEATURES_NV> {
+    typedef VkPhysicalDeviceMeshShaderFeaturesNV Type;
+};
+
+// Map type VkPhysicalDeviceMeshShaderPropertiesNV to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_PROPERTIES_NV
+template <> struct LvlTypeMap<VkPhysicalDeviceMeshShaderPropertiesNV> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_PROPERTIES_NV;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_PROPERTIES_NV> {
+    typedef VkPhysicalDeviceMeshShaderPropertiesNV Type;
+};
+
+// Map type VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_FEATURES_NV
+template <> struct LvlTypeMap<VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_FEATURES_NV;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_FEATURES_NV> {
+    typedef VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV Type;
+};
+
+// Map type VkPhysicalDeviceShaderImageFootprintFeaturesNV to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_IMAGE_FOOTPRINT_FEATURES_NV
+template <> struct LvlTypeMap<VkPhysicalDeviceShaderImageFootprintFeaturesNV> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_IMAGE_FOOTPRINT_FEATURES_NV;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_IMAGE_FOOTPRINT_FEATURES_NV> {
+    typedef VkPhysicalDeviceShaderImageFootprintFeaturesNV Type;
+};
+
+// Map type VkPipelineViewportExclusiveScissorStateCreateInfoNV to id VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_EXCLUSIVE_SCISSOR_STATE_CREATE_INFO_NV
+template <> struct LvlTypeMap<VkPipelineViewportExclusiveScissorStateCreateInfoNV> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_EXCLUSIVE_SCISSOR_STATE_CREATE_INFO_NV;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_EXCLUSIVE_SCISSOR_STATE_CREATE_INFO_NV> {
+    typedef VkPipelineViewportExclusiveScissorStateCreateInfoNV Type;
+};
+
+// Map type VkPhysicalDeviceExclusiveScissorFeaturesNV to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXCLUSIVE_SCISSOR_FEATURES_NV
+template <> struct LvlTypeMap<VkPhysicalDeviceExclusiveScissorFeaturesNV> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXCLUSIVE_SCISSOR_FEATURES_NV;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXCLUSIVE_SCISSOR_FEATURES_NV> {
+    typedef VkPhysicalDeviceExclusiveScissorFeaturesNV Type;
+};
+
+// Map type VkQueueFamilyCheckpointPropertiesNV to id VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_NV
+template <> struct LvlTypeMap<VkQueueFamilyCheckpointPropertiesNV> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_NV;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_NV> {
+    typedef VkQueueFamilyCheckpointPropertiesNV Type;
+};
+
+// Map type VkCheckpointDataNV to id VK_STRUCTURE_TYPE_CHECKPOINT_DATA_NV
+template <> struct LvlTypeMap<VkCheckpointDataNV> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_CHECKPOINT_DATA_NV;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_CHECKPOINT_DATA_NV> {
+    typedef VkCheckpointDataNV Type;
+};
+
+// Map type VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_FUNCTIONS_2_FEATURES_INTEL
+template <> struct LvlTypeMap<VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_FUNCTIONS_2_FEATURES_INTEL;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_FUNCTIONS_2_FEATURES_INTEL> {
+    typedef VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL Type;
+};
+
+// Map type VkInitializePerformanceApiInfoINTEL to id VK_STRUCTURE_TYPE_INITIALIZE_PERFORMANCE_API_INFO_INTEL
+template <> struct LvlTypeMap<VkInitializePerformanceApiInfoINTEL> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_INITIALIZE_PERFORMANCE_API_INFO_INTEL;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_INITIALIZE_PERFORMANCE_API_INFO_INTEL> {
+    typedef VkInitializePerformanceApiInfoINTEL Type;
+};
+
+// Map type VkQueryPoolCreateInfoINTEL to id VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO_INTEL
+template <> struct LvlTypeMap<VkQueryPoolCreateInfoINTEL> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO_INTEL;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO_INTEL> {
+    typedef VkQueryPoolCreateInfoINTEL Type;
+};
+
+// Map type VkPerformanceMarkerInfoINTEL to id VK_STRUCTURE_TYPE_PERFORMANCE_MARKER_INFO_INTEL
+template <> struct LvlTypeMap<VkPerformanceMarkerInfoINTEL> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PERFORMANCE_MARKER_INFO_INTEL;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PERFORMANCE_MARKER_INFO_INTEL> {
+    typedef VkPerformanceMarkerInfoINTEL Type;
+};
+
+// Map type VkPerformanceStreamMarkerInfoINTEL to id VK_STRUCTURE_TYPE_PERFORMANCE_STREAM_MARKER_INFO_INTEL
+template <> struct LvlTypeMap<VkPerformanceStreamMarkerInfoINTEL> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PERFORMANCE_STREAM_MARKER_INFO_INTEL;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PERFORMANCE_STREAM_MARKER_INFO_INTEL> {
+    typedef VkPerformanceStreamMarkerInfoINTEL Type;
+};
+
+// Map type VkPerformanceOverrideInfoINTEL to id VK_STRUCTURE_TYPE_PERFORMANCE_OVERRIDE_INFO_INTEL
+template <> struct LvlTypeMap<VkPerformanceOverrideInfoINTEL> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PERFORMANCE_OVERRIDE_INFO_INTEL;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PERFORMANCE_OVERRIDE_INFO_INTEL> {
+    typedef VkPerformanceOverrideInfoINTEL Type;
+};
+
+// Map type VkPerformanceConfigurationAcquireInfoINTEL to id VK_STRUCTURE_TYPE_PERFORMANCE_CONFIGURATION_ACQUIRE_INFO_INTEL
+template <> struct LvlTypeMap<VkPerformanceConfigurationAcquireInfoINTEL> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PERFORMANCE_CONFIGURATION_ACQUIRE_INFO_INTEL;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PERFORMANCE_CONFIGURATION_ACQUIRE_INFO_INTEL> {
+    typedef VkPerformanceConfigurationAcquireInfoINTEL Type;
+};
+
+// Map type VkPhysicalDevicePCIBusInfoPropertiesEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PCI_BUS_INFO_PROPERTIES_EXT
+template <> struct LvlTypeMap<VkPhysicalDevicePCIBusInfoPropertiesEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PCI_BUS_INFO_PROPERTIES_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PCI_BUS_INFO_PROPERTIES_EXT> {
+    typedef VkPhysicalDevicePCIBusInfoPropertiesEXT Type;
+};
+
+// Map type VkDisplayNativeHdrSurfaceCapabilitiesAMD to id VK_STRUCTURE_TYPE_DISPLAY_NATIVE_HDR_SURFACE_CAPABILITIES_AMD
+template <> struct LvlTypeMap<VkDisplayNativeHdrSurfaceCapabilitiesAMD> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_DISPLAY_NATIVE_HDR_SURFACE_CAPABILITIES_AMD;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_DISPLAY_NATIVE_HDR_SURFACE_CAPABILITIES_AMD> {
+    typedef VkDisplayNativeHdrSurfaceCapabilitiesAMD Type;
+};
+
+// Map type VkSwapchainDisplayNativeHdrCreateInfoAMD to id VK_STRUCTURE_TYPE_SWAPCHAIN_DISPLAY_NATIVE_HDR_CREATE_INFO_AMD
+template <> struct LvlTypeMap<VkSwapchainDisplayNativeHdrCreateInfoAMD> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_SWAPCHAIN_DISPLAY_NATIVE_HDR_CREATE_INFO_AMD;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_SWAPCHAIN_DISPLAY_NATIVE_HDR_CREATE_INFO_AMD> {
+    typedef VkSwapchainDisplayNativeHdrCreateInfoAMD Type;
+};
+
+#ifdef VK_USE_PLATFORM_FUCHSIA
+// Map type VkImagePipeSurfaceCreateInfoFUCHSIA to id VK_STRUCTURE_TYPE_IMAGEPIPE_SURFACE_CREATE_INFO_FUCHSIA
+template <> struct LvlTypeMap<VkImagePipeSurfaceCreateInfoFUCHSIA> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_IMAGEPIPE_SURFACE_CREATE_INFO_FUCHSIA;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_IMAGEPIPE_SURFACE_CREATE_INFO_FUCHSIA> {
+    typedef VkImagePipeSurfaceCreateInfoFUCHSIA Type;
+};
+
+#endif // VK_USE_PLATFORM_FUCHSIA
+#ifdef VK_USE_PLATFORM_METAL_EXT
+// Map type VkMetalSurfaceCreateInfoEXT to id VK_STRUCTURE_TYPE_METAL_SURFACE_CREATE_INFO_EXT
+template <> struct LvlTypeMap<VkMetalSurfaceCreateInfoEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_METAL_SURFACE_CREATE_INFO_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_METAL_SURFACE_CREATE_INFO_EXT> {
+    typedef VkMetalSurfaceCreateInfoEXT Type;
+};
+
+#endif // VK_USE_PLATFORM_METAL_EXT
+// Map type VkPhysicalDeviceFragmentDensityMapFeaturesEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_FEATURES_EXT
+template <> struct LvlTypeMap<VkPhysicalDeviceFragmentDensityMapFeaturesEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_FEATURES_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_FEATURES_EXT> {
+    typedef VkPhysicalDeviceFragmentDensityMapFeaturesEXT Type;
+};
+
+// Map type VkPhysicalDeviceFragmentDensityMapPropertiesEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_PROPERTIES_EXT
+template <> struct LvlTypeMap<VkPhysicalDeviceFragmentDensityMapPropertiesEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_PROPERTIES_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_PROPERTIES_EXT> {
+    typedef VkPhysicalDeviceFragmentDensityMapPropertiesEXT Type;
+};
+
+// Map type VkRenderPassFragmentDensityMapCreateInfoEXT to id VK_STRUCTURE_TYPE_RENDER_PASS_FRAGMENT_DENSITY_MAP_CREATE_INFO_EXT
+template <> struct LvlTypeMap<VkRenderPassFragmentDensityMapCreateInfoEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_RENDER_PASS_FRAGMENT_DENSITY_MAP_CREATE_INFO_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_RENDER_PASS_FRAGMENT_DENSITY_MAP_CREATE_INFO_EXT> {
+    typedef VkRenderPassFragmentDensityMapCreateInfoEXT Type;
+};
+
+// Map type VkPhysicalDeviceScalarBlockLayoutFeaturesEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES_EXT
+template <> struct LvlTypeMap<VkPhysicalDeviceScalarBlockLayoutFeaturesEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES_EXT> {
+    typedef VkPhysicalDeviceScalarBlockLayoutFeaturesEXT Type;
+};
+
+// Map type VkPhysicalDeviceSubgroupSizeControlFeaturesEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES_EXT
+template <> struct LvlTypeMap<VkPhysicalDeviceSubgroupSizeControlFeaturesEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES_EXT> {
+    typedef VkPhysicalDeviceSubgroupSizeControlFeaturesEXT Type;
+};
+
+// Map type VkPhysicalDeviceSubgroupSizeControlPropertiesEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES_EXT
+template <> struct LvlTypeMap<VkPhysicalDeviceSubgroupSizeControlPropertiesEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES_EXT> {
+    typedef VkPhysicalDeviceSubgroupSizeControlPropertiesEXT Type;
+};
+
+// Map type VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT to id VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO_EXT
+template <> struct LvlTypeMap<VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO_EXT> {
+    typedef VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT Type;
+};
+
+// Map type VkPhysicalDeviceShaderCoreProperties2AMD to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_2_AMD
+template <> struct LvlTypeMap<VkPhysicalDeviceShaderCoreProperties2AMD> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_2_AMD;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_2_AMD> {
+    typedef VkPhysicalDeviceShaderCoreProperties2AMD Type;
+};
+
+// Map type VkPhysicalDeviceCoherentMemoryFeaturesAMD to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COHERENT_MEMORY_FEATURES_AMD
+template <> struct LvlTypeMap<VkPhysicalDeviceCoherentMemoryFeaturesAMD> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COHERENT_MEMORY_FEATURES_AMD;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COHERENT_MEMORY_FEATURES_AMD> {
+    typedef VkPhysicalDeviceCoherentMemoryFeaturesAMD Type;
+};
+
+// Map type VkPhysicalDeviceMemoryBudgetPropertiesEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT
+template <> struct LvlTypeMap<VkPhysicalDeviceMemoryBudgetPropertiesEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT> {
+    typedef VkPhysicalDeviceMemoryBudgetPropertiesEXT Type;
+};
+
+// Map type VkPhysicalDeviceMemoryPriorityFeaturesEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PRIORITY_FEATURES_EXT
+template <> struct LvlTypeMap<VkPhysicalDeviceMemoryPriorityFeaturesEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PRIORITY_FEATURES_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PRIORITY_FEATURES_EXT> {
+    typedef VkPhysicalDeviceMemoryPriorityFeaturesEXT Type;
+};
+
+// Map type VkMemoryPriorityAllocateInfoEXT to id VK_STRUCTURE_TYPE_MEMORY_PRIORITY_ALLOCATE_INFO_EXT
+template <> struct LvlTypeMap<VkMemoryPriorityAllocateInfoEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_MEMORY_PRIORITY_ALLOCATE_INFO_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_MEMORY_PRIORITY_ALLOCATE_INFO_EXT> {
+    typedef VkMemoryPriorityAllocateInfoEXT Type;
+};
+
+// Map type VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEDICATED_ALLOCATION_IMAGE_ALIASING_FEATURES_NV
+template <> struct LvlTypeMap<VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEDICATED_ALLOCATION_IMAGE_ALIASING_FEATURES_NV;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEDICATED_ALLOCATION_IMAGE_ALIASING_FEATURES_NV> {
+    typedef VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV Type;
+};
+
+// Map type VkPhysicalDeviceBufferDeviceAddressFeaturesEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_EXT
+template <> struct LvlTypeMap<VkPhysicalDeviceBufferDeviceAddressFeaturesEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_EXT> {
+    typedef VkPhysicalDeviceBufferDeviceAddressFeaturesEXT Type;
+};
+
+// Map type VkBufferDeviceAddressCreateInfoEXT to id VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_CREATE_INFO_EXT
+template <> struct LvlTypeMap<VkBufferDeviceAddressCreateInfoEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_CREATE_INFO_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_CREATE_INFO_EXT> {
+    typedef VkBufferDeviceAddressCreateInfoEXT Type;
+};
+
+// Map type VkPhysicalDeviceToolPropertiesEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TOOL_PROPERTIES_EXT
+template <> struct LvlTypeMap<VkPhysicalDeviceToolPropertiesEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TOOL_PROPERTIES_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TOOL_PROPERTIES_EXT> {
+    typedef VkPhysicalDeviceToolPropertiesEXT Type;
+};
+
+// Map type VkImageStencilUsageCreateInfoEXT to id VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO_EXT
+template <> struct LvlTypeMap<VkImageStencilUsageCreateInfoEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO_EXT> {
+    typedef VkImageStencilUsageCreateInfoEXT Type;
+};
+
+// Map type VkValidationFeaturesEXT to id VK_STRUCTURE_TYPE_VALIDATION_FEATURES_EXT
+template <> struct LvlTypeMap<VkValidationFeaturesEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_VALIDATION_FEATURES_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_VALIDATION_FEATURES_EXT> {
+    typedef VkValidationFeaturesEXT Type;
+};
+
+// Map type VkCooperativeMatrixPropertiesNV to id VK_STRUCTURE_TYPE_COOPERATIVE_MATRIX_PROPERTIES_NV
+template <> struct LvlTypeMap<VkCooperativeMatrixPropertiesNV> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_COOPERATIVE_MATRIX_PROPERTIES_NV;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_COOPERATIVE_MATRIX_PROPERTIES_NV> {
+    typedef VkCooperativeMatrixPropertiesNV Type;
+};
+
+// Map type VkPhysicalDeviceCooperativeMatrixFeaturesNV to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_FEATURES_NV
+template <> struct LvlTypeMap<VkPhysicalDeviceCooperativeMatrixFeaturesNV> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_FEATURES_NV;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_FEATURES_NV> {
+    typedef VkPhysicalDeviceCooperativeMatrixFeaturesNV Type;
+};
+
+// Map type VkPhysicalDeviceCooperativeMatrixPropertiesNV to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_PROPERTIES_NV
+template <> struct LvlTypeMap<VkPhysicalDeviceCooperativeMatrixPropertiesNV> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_PROPERTIES_NV;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_PROPERTIES_NV> {
+    typedef VkPhysicalDeviceCooperativeMatrixPropertiesNV Type;
+};
+
+// Map type VkPhysicalDeviceCoverageReductionModeFeaturesNV to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COVERAGE_REDUCTION_MODE_FEATURES_NV
+template <> struct LvlTypeMap<VkPhysicalDeviceCoverageReductionModeFeaturesNV> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COVERAGE_REDUCTION_MODE_FEATURES_NV;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COVERAGE_REDUCTION_MODE_FEATURES_NV> {
+    typedef VkPhysicalDeviceCoverageReductionModeFeaturesNV Type;
+};
+
+// Map type VkPipelineCoverageReductionStateCreateInfoNV to id VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_REDUCTION_STATE_CREATE_INFO_NV
+template <> struct LvlTypeMap<VkPipelineCoverageReductionStateCreateInfoNV> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_REDUCTION_STATE_CREATE_INFO_NV;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_REDUCTION_STATE_CREATE_INFO_NV> {
+    typedef VkPipelineCoverageReductionStateCreateInfoNV Type;
+};
+
+// Map type VkFramebufferMixedSamplesCombinationNV to id VK_STRUCTURE_TYPE_FRAMEBUFFER_MIXED_SAMPLES_COMBINATION_NV
+template <> struct LvlTypeMap<VkFramebufferMixedSamplesCombinationNV> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_FRAMEBUFFER_MIXED_SAMPLES_COMBINATION_NV;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_FRAMEBUFFER_MIXED_SAMPLES_COMBINATION_NV> {
+    typedef VkFramebufferMixedSamplesCombinationNV Type;
+};
+
+// Map type VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_INTERLOCK_FEATURES_EXT
+template <> struct LvlTypeMap<VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_INTERLOCK_FEATURES_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_INTERLOCK_FEATURES_EXT> {
+    typedef VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT Type;
+};
+
+// Map type VkPhysicalDeviceYcbcrImageArraysFeaturesEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_YCBCR_IMAGE_ARRAYS_FEATURES_EXT
+template <> struct LvlTypeMap<VkPhysicalDeviceYcbcrImageArraysFeaturesEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_YCBCR_IMAGE_ARRAYS_FEATURES_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_YCBCR_IMAGE_ARRAYS_FEATURES_EXT> {
+    typedef VkPhysicalDeviceYcbcrImageArraysFeaturesEXT Type;
+};
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+// Map type VkSurfaceFullScreenExclusiveInfoEXT to id VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_INFO_EXT
+template <> struct LvlTypeMap<VkSurfaceFullScreenExclusiveInfoEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_INFO_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_INFO_EXT> {
+    typedef VkSurfaceFullScreenExclusiveInfoEXT Type;
+};
+
+#endif // VK_USE_PLATFORM_WIN32_KHR
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+// Map type VkSurfaceCapabilitiesFullScreenExclusiveEXT to id VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_FULL_SCREEN_EXCLUSIVE_EXT
+template <> struct LvlTypeMap<VkSurfaceCapabilitiesFullScreenExclusiveEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_FULL_SCREEN_EXCLUSIVE_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_FULL_SCREEN_EXCLUSIVE_EXT> {
+    typedef VkSurfaceCapabilitiesFullScreenExclusiveEXT Type;
+};
+
+#endif // VK_USE_PLATFORM_WIN32_KHR
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+// Map type VkSurfaceFullScreenExclusiveWin32InfoEXT to id VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_WIN32_INFO_EXT
+template <> struct LvlTypeMap<VkSurfaceFullScreenExclusiveWin32InfoEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_WIN32_INFO_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_WIN32_INFO_EXT> {
+    typedef VkSurfaceFullScreenExclusiveWin32InfoEXT Type;
+};
+
+#endif // VK_USE_PLATFORM_WIN32_KHR
+// Map type VkHeadlessSurfaceCreateInfoEXT to id VK_STRUCTURE_TYPE_HEADLESS_SURFACE_CREATE_INFO_EXT
+template <> struct LvlTypeMap<VkHeadlessSurfaceCreateInfoEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_HEADLESS_SURFACE_CREATE_INFO_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_HEADLESS_SURFACE_CREATE_INFO_EXT> {
+    typedef VkHeadlessSurfaceCreateInfoEXT Type;
+};
+
+// Map type VkPhysicalDeviceLineRasterizationFeaturesEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_EXT
+template <> struct LvlTypeMap<VkPhysicalDeviceLineRasterizationFeaturesEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_EXT> {
+    typedef VkPhysicalDeviceLineRasterizationFeaturesEXT Type;
+};
+
+// Map type VkPhysicalDeviceLineRasterizationPropertiesEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES_EXT
+template <> struct LvlTypeMap<VkPhysicalDeviceLineRasterizationPropertiesEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES_EXT> {
+    typedef VkPhysicalDeviceLineRasterizationPropertiesEXT Type;
+};
+
+// Map type VkPipelineRasterizationLineStateCreateInfoEXT to id VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO_EXT
+template <> struct LvlTypeMap<VkPipelineRasterizationLineStateCreateInfoEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO_EXT> {
+    typedef VkPipelineRasterizationLineStateCreateInfoEXT Type;
+};
+
+// Map type VkPhysicalDeviceHostQueryResetFeaturesEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES_EXT
+template <> struct LvlTypeMap<VkPhysicalDeviceHostQueryResetFeaturesEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES_EXT> {
+    typedef VkPhysicalDeviceHostQueryResetFeaturesEXT Type;
+};
+
+// Map type VkPhysicalDeviceIndexTypeUint8FeaturesEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_EXT
+template <> struct LvlTypeMap<VkPhysicalDeviceIndexTypeUint8FeaturesEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_EXT> {
+    typedef VkPhysicalDeviceIndexTypeUint8FeaturesEXT Type;
+};
+
+// Map type VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES_EXT
+template <> struct LvlTypeMap<VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES_EXT> {
+    typedef VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT Type;
+};
+
+// Map type VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_FEATURES_EXT
+template <> struct LvlTypeMap<VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_FEATURES_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_FEATURES_EXT> {
+    typedef VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT Type;
+};
+
+// Map type VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT to id VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES_EXT
+template <> struct LvlTypeMap<VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT> {
+    static const VkStructureType kSType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES_EXT;
+};
+
+template <> struct LvlSTypeMap<VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES_EXT> {
+    typedef VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT Type;
+};
+
+// Find an entry of the given type in the pNext chain
+template <typename T> const T *lvl_find_in_chain(const void *next) {
+    const VkBaseOutStructure *current = reinterpret_cast<const VkBaseOutStructure *>(next);
+    const T *found = nullptr;
+    while (current) {
+        if (LvlTypeMap<T>::kSType == current->sType) {
+            found = reinterpret_cast<const T*>(current);
+            current = nullptr;
+        } else {
+            current = current->pNext;
+        }
+    }
+    return found;
+}
+
+// Init the header of an sType struct with pNext
+template <typename T> T lvl_init_struct(void *p_next) {
+    T out = {};
+    out.sType = LvlTypeMap<T>::kSType;
+    out.pNext = p_next;
+    return out;
+}
+
+// Init the header of an sType struct
+template <typename T> T lvl_init_struct() {
+    T out = {};
+    out.sType = LvlTypeMap<T>::kSType;
+    return out;
+}
+
diff --git a/src/third_party/vulkan-validation-layers/src/layers/generated/vk_validation_error_messages.h b/src/third_party/vulkan-validation-layers/src/layers/generated/vk_validation_error_messages.h
new file mode 100644
index 0000000..ef03585
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/layers/generated/vk_validation_error_messages.h
@@ -0,0 +1,5854 @@
+/* THIS FILE IS GENERATED - DO NOT EDIT (scripts/vk_validation_stats.py) */
+/* Vulkan specification version: 1.1.130 */
+/*
+ * Vulkan
+ *
+ * Copyright (c) 2016-2019 Google Inc.
+ * Copyright (c) 2016-2019 LunarG, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Tobin Ehlis <tobine@google.com>
+ * Author: Dave Houlton <daveh@lunarg.com>
+ */
+
+#pragma once
+
+// Disable auto-formatting for generated file
+// clang-format off
+
+// Mapping from VUID string to the corresponding spec text
+typedef struct _vuid_spec_text_pair {
+    const char * vuid;
+    const char * spec_text;
+} vuid_spec_text_pair;
+
+static const vuid_spec_text_pair vuid_spec_text[] = {
+    {"VUID-VkAccelerationStructureCreateInfoNV-compactedSize-02421", "If compactedSize is not 0 then both info.geometryCount and info.instanceCount must be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAccelerationStructureCreateInfoNV-compactedSize-02421)"},
+    {"VUID-VkAccelerationStructureCreateInfoNV-info-parameter", "info must be a valid VkAccelerationStructureInfoNV structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAccelerationStructureCreateInfoNV-info-parameter)"},
+    {"VUID-VkAccelerationStructureCreateInfoNV-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAccelerationStructureCreateInfoNV-pNext-pNext)"},
+    {"VUID-VkAccelerationStructureCreateInfoNV-sType-sType", "sType must be VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAccelerationStructureCreateInfoNV-sType-sType)"},
+    {"VUID-VkAccelerationStructureInfoNV-flags-02592", "If flags has the VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_TRACE_BIT_NV bit set, then it must not have the VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_BUILD_BIT_NV bit set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAccelerationStructureInfoNV-flags-02592)"},
+    {"VUID-VkAccelerationStructureInfoNV-flags-parameter", "flags must be a valid combination of VkBuildAccelerationStructureFlagBitsNV values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAccelerationStructureInfoNV-flags-parameter)"},
+    {"VUID-VkAccelerationStructureInfoNV-geometryCount-02422", "geometryCount must be less than or equal to VkPhysicalDeviceRayTracingPropertiesNV::maxGeometryCount (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAccelerationStructureInfoNV-geometryCount-02422)"},
+    {"VUID-VkAccelerationStructureInfoNV-instanceCount-02423", "instanceCount must be less than or equal to VkPhysicalDeviceRayTracingPropertiesNV::maxInstanceCount (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAccelerationStructureInfoNV-instanceCount-02423)"},
+    {"VUID-VkAccelerationStructureInfoNV-instanceData-02782", "If instanceData is not VK_NULL_HANDLE, instanceData must have been created with VK_BUFFER_USAGE_RAY_TRACING_BIT_NV usage flag (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAccelerationStructureInfoNV-instanceData-02782)"},
+    {"VUID-VkAccelerationStructureInfoNV-maxTriangleCount-02424", "The total number of triangles in all geometries must be less than or equal to VkPhysicalDeviceRayTracingPropertiesNV::maxTriangleCount (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAccelerationStructureInfoNV-maxTriangleCount-02424)"},
+    {"VUID-VkAccelerationStructureInfoNV-pGeometries-parameter", "If geometryCount is not 0, pGeometries must be a valid pointer to an array of geometryCount valid VkGeometryNV structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAccelerationStructureInfoNV-pGeometries-parameter)"},
+    {"VUID-VkAccelerationStructureInfoNV-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAccelerationStructureInfoNV-pNext-pNext)"},
+    {"VUID-VkAccelerationStructureInfoNV-sType-sType", "sType must be VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_INFO_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAccelerationStructureInfoNV-sType-sType)"},
+    {"VUID-VkAccelerationStructureInfoNV-scratch-02781", "scratch must have been created with VK_BUFFER_USAGE_RAY_TRACING_BIT_NV usage flag (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAccelerationStructureInfoNV-scratch-02781)"},
+    {"VUID-VkAccelerationStructureInfoNV-type-02425", "If type is VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_NV then geometryCount must be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAccelerationStructureInfoNV-type-02425)"},
+    {"VUID-VkAccelerationStructureInfoNV-type-02426", "If type is VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_NV then instanceCount must be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAccelerationStructureInfoNV-type-02426)"},
+    {"VUID-VkAccelerationStructureInfoNV-type-02786", "If type is VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_NV then the geometryType member of each geometry in pGeometries must be the same (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAccelerationStructureInfoNV-type-02786)"},
+    {"VUID-VkAccelerationStructureInfoNV-type-parameter", "type must be a valid VkAccelerationStructureTypeNV value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAccelerationStructureInfoNV-type-parameter)"},
+    {"VUID-VkAccelerationStructureMemoryRequirementsInfoNV-accelerationStructure-parameter", "accelerationStructure must be a valid VkAccelerationStructureNV handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAccelerationStructureMemoryRequirementsInfoNV-accelerationStructure-parameter)"},
+    {"VUID-VkAccelerationStructureMemoryRequirementsInfoNV-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAccelerationStructureMemoryRequirementsInfoNV-pNext-pNext)"},
+    {"VUID-VkAccelerationStructureMemoryRequirementsInfoNV-sType-sType", "sType must be VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAccelerationStructureMemoryRequirementsInfoNV-sType-sType)"},
+    {"VUID-VkAccelerationStructureMemoryRequirementsInfoNV-type-parameter", "type must be a valid VkAccelerationStructureMemoryRequirementsTypeNV value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAccelerationStructureMemoryRequirementsInfoNV-type-parameter)"},
+    {"VUID-VkAcquireNextImageInfoKHR-commonparent", "Each of fence, semaphore, and swapchain that are valid handles of non-ignored parameters must have been created, allocated, or retrieved from the same VkInstance (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAcquireNextImageInfoKHR-commonparent)"},
+    {"VUID-VkAcquireNextImageInfoKHR-deviceMask-01290", "deviceMask must be a valid device mask (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAcquireNextImageInfoKHR-deviceMask-01290)"},
+    {"VUID-VkAcquireNextImageInfoKHR-deviceMask-01291", "deviceMask must not be zero (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAcquireNextImageInfoKHR-deviceMask-01291)"},
+    {"VUID-VkAcquireNextImageInfoKHR-fence-01289", "If fence is not VK_NULL_HANDLE it must be unsignaled and must not be associated with any other queue command that has not yet completed execution on that queue (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAcquireNextImageInfoKHR-fence-01289)"},
+    {"VUID-VkAcquireNextImageInfoKHR-fence-parameter", "If fence is not VK_NULL_HANDLE, fence must be a valid VkFence handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAcquireNextImageInfoKHR-fence-parameter)"},
+    {"VUID-VkAcquireNextImageInfoKHR-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAcquireNextImageInfoKHR-pNext-pNext)"},
+    {"VUID-VkAcquireNextImageInfoKHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_ACQUIRE_NEXT_IMAGE_INFO_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAcquireNextImageInfoKHR-sType-sType)"},
+    {"VUID-VkAcquireNextImageInfoKHR-semaphore-01288", "If semaphore is not VK_NULL_HANDLE it must be unsignaled (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAcquireNextImageInfoKHR-semaphore-01288)"},
+    {"VUID-VkAcquireNextImageInfoKHR-semaphore-01781", "If semaphore is not VK_NULL_HANDLE it must not have any uncompleted signal or wait operations pending (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAcquireNextImageInfoKHR-semaphore-01781)"},
+    {"VUID-VkAcquireNextImageInfoKHR-semaphore-01782", "semaphore and fence must not both be equal to VK_NULL_HANDLE (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAcquireNextImageInfoKHR-semaphore-01782)"},
+    {"VUID-VkAcquireNextImageInfoKHR-semaphore-03266", "semaphore must have a VkSemaphoreTypeKHR of VK_SEMAPHORE_TYPE_BINARY_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAcquireNextImageInfoKHR-semaphore-03266)"},
+    {"VUID-VkAcquireNextImageInfoKHR-semaphore-parameter", "If semaphore is not VK_NULL_HANDLE, semaphore must be a valid VkSemaphore handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAcquireNextImageInfoKHR-semaphore-parameter)"},
+    {"VUID-VkAcquireNextImageInfoKHR-swapchain-01675", "swapchain must not be in the retired state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAcquireNextImageInfoKHR-swapchain-01675)"},
+    {"VUID-VkAcquireNextImageInfoKHR-swapchain-parameter", "swapchain must be a valid VkSwapchainKHR handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAcquireNextImageInfoKHR-swapchain-parameter)"},
+    {"VUID-VkAcquireProfilingLockInfoKHR-flags-zerobitmask", "flags must be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAcquireProfilingLockInfoKHR-flags-zerobitmask)"},
+    {"VUID-VkAcquireProfilingLockInfoKHR-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAcquireProfilingLockInfoKHR-pNext-pNext)"},
+    {"VUID-VkAcquireProfilingLockInfoKHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_ACQUIRE_PROFILING_LOCK_INFO_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAcquireProfilingLockInfoKHR-sType-sType)"},
+    {"VUID-VkAllocationCallbacks-pfnAllocation-00632", "pfnAllocation must be a valid pointer to a valid user-defined PFN_vkAllocationFunction (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAllocationCallbacks-pfnAllocation-00632)"},
+    {"VUID-VkAllocationCallbacks-pfnFree-00634", "pfnFree must be a valid pointer to a valid user-defined PFN_vkFreeFunction (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAllocationCallbacks-pfnFree-00634)"},
+    {"VUID-VkAllocationCallbacks-pfnInternalAllocation-00635", "If either of pfnInternalAllocation or pfnInternalFree is not NULL, both must be valid callbacks (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAllocationCallbacks-pfnInternalAllocation-00635)"},
+    {"VUID-VkAllocationCallbacks-pfnReallocation-00633", "pfnReallocation must be a valid pointer to a valid user-defined PFN_vkReallocationFunction (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAllocationCallbacks-pfnReallocation-00633)"},
+    {"VUID-VkAndroidHardwareBufferFormatPropertiesANDROID-sType-sType", "sType must be VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAndroidHardwareBufferFormatPropertiesANDROID-sType-sType)"},
+    {"VUID-VkAndroidHardwareBufferPropertiesANDROID-pNext-pNext", "pNext must be NULL or a pointer to a valid instance of VkAndroidHardwareBufferFormatPropertiesANDROID (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAndroidHardwareBufferPropertiesANDROID-pNext-pNext)"},
+    {"VUID-VkAndroidHardwareBufferPropertiesANDROID-sType-sType", "sType must be VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAndroidHardwareBufferPropertiesANDROID-sType-sType)"},
+    {"VUID-VkAndroidHardwareBufferUsageANDROID-sType-sType", "sType must be VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_USAGE_ANDROID (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAndroidHardwareBufferUsageANDROID-sType-sType)"},
+    {"VUID-VkAndroidSurfaceCreateInfoKHR-flags-zerobitmask", "flags must be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAndroidSurfaceCreateInfoKHR-flags-zerobitmask)"},
+    {"VUID-VkAndroidSurfaceCreateInfoKHR-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAndroidSurfaceCreateInfoKHR-pNext-pNext)"},
+    {"VUID-VkAndroidSurfaceCreateInfoKHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_ANDROID_SURFACE_CREATE_INFO_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAndroidSurfaceCreateInfoKHR-sType-sType)"},
+    {"VUID-VkAndroidSurfaceCreateInfoKHR-window-01248", "window must point to a valid Android ANativeWindow. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAndroidSurfaceCreateInfoKHR-window-01248)"},
+    {"VUID-VkApplicationInfo-pApplicationName-parameter", "If pApplicationName is not NULL, pApplicationName must be a null-terminated UTF-8 string (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkApplicationInfo-pApplicationName-parameter)"},
+    {"VUID-VkApplicationInfo-pEngineName-parameter", "If pEngineName is not NULL, pEngineName must be a null-terminated UTF-8 string (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkApplicationInfo-pEngineName-parameter)"},
+    {"VUID-VkApplicationInfo-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkApplicationInfo-pNext-pNext)"},
+    {"VUID-VkApplicationInfo-sType-sType", "sType must be VK_STRUCTURE_TYPE_APPLICATION_INFO (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkApplicationInfo-sType-sType)"},
+    {"VUID-VkAttachmentDescription-finalLayout-00843", "finalLayout must not be VK_IMAGE_LAYOUT_UNDEFINED or VK_IMAGE_LAYOUT_PREINITIALIZED (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAttachmentDescription-finalLayout-00843)"},
+    {"VUID-VkAttachmentDescription-finalLayout-parameter", "finalLayout must be a valid VkImageLayout value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAttachmentDescription-finalLayout-parameter)"},
+    {"VUID-VkAttachmentDescription-flags-parameter", "flags must be a valid combination of VkAttachmentDescriptionFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAttachmentDescription-flags-parameter)"},
+    {"VUID-VkAttachmentDescription-format-03280", "If format is a color format, initialLayout must not be VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL, VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL, VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL, or VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAttachmentDescription-format-03280)"},
+    {"VUID-VkAttachmentDescription-format-03281", "If format is a depth/stencil format, initialLayout must not be VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAttachmentDescription-format-03281)"},
+    {"VUID-VkAttachmentDescription-format-03282", "If format is a color format, name:finalLayout must not be VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL, VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL, VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL, or VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAttachmentDescription-format-03282)"},
+    {"VUID-VkAttachmentDescription-format-03283", "If format is a depth/stencil format, finalLayout must not be VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAttachmentDescription-format-03283)"},
+    {"VUID-VkAttachmentDescription-format-03286", "If format is a color format, initialLayout must not be VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR, VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR, VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR, or VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAttachmentDescription-format-03286)"},
+    {"VUID-VkAttachmentDescription-format-03287", "If format is a color format, finalLayout must not be VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR, VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR, VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR, or VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAttachmentDescription-format-03287)"},
+    {"VUID-VkAttachmentDescription-format-03288", "If format is a depth/stencil format which includes both depth and stencil aspects, initialLayout must not be VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR, VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR, VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR, or VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAttachmentDescription-format-03288)"},
+    {"VUID-VkAttachmentDescription-format-03289", "If format is a depth/stencil format which includes both depth and stencil aspects, finalLayout must not be VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR, VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR, VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR, or VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAttachmentDescription-format-03289)"},
+    {"VUID-VkAttachmentDescription-format-03290", "If format is a depth/stencil format which includes only the depth aspect, initialLayout must not be VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR or VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAttachmentDescription-format-03290)"},
+    {"VUID-VkAttachmentDescription-format-03291", "If format is a depth/stencil format which includes only the depth aspect, finalLayout must not be VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR or VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAttachmentDescription-format-03291)"},
+    {"VUID-VkAttachmentDescription-format-03292", "If format is a depth/stencil format which includes only the stencil aspect, initialLayout must not be VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR or VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAttachmentDescription-format-03292)"},
+    {"VUID-VkAttachmentDescription-format-03293", "If format is a depth/stencil format which includes only the stencil aspect, finalLayout must not be VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR or VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAttachmentDescription-format-03293)"},
+    {"VUID-VkAttachmentDescription-format-parameter", "format must be a valid VkFormat value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAttachmentDescription-format-parameter)"},
+    {"VUID-VkAttachmentDescription-initialLayout-parameter", "initialLayout must be a valid VkImageLayout value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAttachmentDescription-initialLayout-parameter)"},
+    {"VUID-VkAttachmentDescription-loadOp-parameter", "loadOp must be a valid VkAttachmentLoadOp value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAttachmentDescription-loadOp-parameter)"},
+    {"VUID-VkAttachmentDescription-samples-parameter", "samples must be a valid VkSampleCountFlagBits value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAttachmentDescription-samples-parameter)"},
+    {"VUID-VkAttachmentDescription-separateDepthStencilLayouts-03284", "If the separateDepthStencilLayouts feature is not enabled, initialLayout must not be VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR, VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR, VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR, or VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAttachmentDescription-separateDepthStencilLayouts-03284)"},
+    {"VUID-VkAttachmentDescription-separateDepthStencilLayouts-03285", "If the separateDepthStencilLayouts feature is not enabled, finalLayout must not be VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR, VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR, VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR, or VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAttachmentDescription-separateDepthStencilLayouts-03285)"},
+    {"VUID-VkAttachmentDescription-stencilLoadOp-parameter", "stencilLoadOp must be a valid VkAttachmentLoadOp value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAttachmentDescription-stencilLoadOp-parameter)"},
+    {"VUID-VkAttachmentDescription-stencilStoreOp-parameter", "stencilStoreOp must be a valid VkAttachmentStoreOp value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAttachmentDescription-stencilStoreOp-parameter)"},
+    {"VUID-VkAttachmentDescription-storeOp-parameter", "storeOp must be a valid VkAttachmentStoreOp value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAttachmentDescription-storeOp-parameter)"},
+    {"VUID-VkAttachmentDescription2KHR-finalLayout-03061", "finalLayout must not be VK_IMAGE_LAYOUT_UNDEFINED or VK_IMAGE_LAYOUT_PREINITIALIZED (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAttachmentDescription2KHR-finalLayout-03061)"},
+    {"VUID-VkAttachmentDescription2KHR-finalLayout-parameter", "finalLayout must be a valid VkImageLayout value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAttachmentDescription2KHR-finalLayout-parameter)"},
+    {"VUID-VkAttachmentDescription2KHR-flags-parameter", "flags must be a valid combination of VkAttachmentDescriptionFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAttachmentDescription2KHR-flags-parameter)"},
+    {"VUID-VkAttachmentDescription2KHR-format-03294", "If format is a color format, name:initialLayout must not be VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL, VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL, VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL, or VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAttachmentDescription2KHR-format-03294)"},
+    {"VUID-VkAttachmentDescription2KHR-format-03295", "If format is a depth/stencil format, name:initialLayout must not be VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAttachmentDescription2KHR-format-03295)"},
+    {"VUID-VkAttachmentDescription2KHR-format-03296", "If format is a color format, name:finalLayout must not be VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL, VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL, VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL, or VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAttachmentDescription2KHR-format-03296)"},
+    {"VUID-VkAttachmentDescription2KHR-format-03297", "If format is a depth/stencil format, name:finalLayout must not be VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAttachmentDescription2KHR-format-03297)"},
+    {"VUID-VkAttachmentDescription2KHR-format-03300", "If format is a color format, initialLayout must not be VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR, VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR, VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR or VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAttachmentDescription2KHR-format-03300)"},
+    {"VUID-VkAttachmentDescription2KHR-format-03301", "If format is a color format, finalLayout must not be VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR, VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR, VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR or VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAttachmentDescription2KHR-format-03301)"},
+    {"VUID-VkAttachmentDescription2KHR-format-03302", "If format is a depth/stencil format which includes both depth and stencil aspects, and initialLayout is VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR or VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR, the pNext chain must include a VkAttachmentDescriptionStencilLayoutKHR structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAttachmentDescription2KHR-format-03302)"},
+    {"VUID-VkAttachmentDescription2KHR-format-03303", "If format is a depth/stencil format which includes both depth and stencil aspects, and finalLayout is VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR or VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR, the pNext chain must include a VkAttachmentDescriptionStencilLayoutKHR structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAttachmentDescription2KHR-format-03303)"},
+    {"VUID-VkAttachmentDescription2KHR-format-03304", "If format is a depth/stencil format which includes only the depth aspect, initialLayout must not be VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR or VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAttachmentDescription2KHR-format-03304)"},
+    {"VUID-VkAttachmentDescription2KHR-format-03305", "If format is a depth/stencil format which includes only the depth aspect, finalLayout must not be VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR or VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAttachmentDescription2KHR-format-03305)"},
+    {"VUID-VkAttachmentDescription2KHR-format-03306", "If format is a depth/stencil format which includes only the stencil aspect, initialLayout must not be VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR or VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAttachmentDescription2KHR-format-03306)"},
+    {"VUID-VkAttachmentDescription2KHR-format-03307", "If format is a depth/stencil format which includes only the stencil aspect, finalLayout must not be VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR or VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAttachmentDescription2KHR-format-03307)"},
+    {"VUID-VkAttachmentDescription2KHR-format-parameter", "format must be a valid VkFormat value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAttachmentDescription2KHR-format-parameter)"},
+    {"VUID-VkAttachmentDescription2KHR-initialLayout-parameter", "initialLayout must be a valid VkImageLayout value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAttachmentDescription2KHR-initialLayout-parameter)"},
+    {"VUID-VkAttachmentDescription2KHR-loadOp-parameter", "loadOp must be a valid VkAttachmentLoadOp value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAttachmentDescription2KHR-loadOp-parameter)"},
+    {"VUID-VkAttachmentDescription2KHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_2_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAttachmentDescription2KHR-sType-sType)"},
+    {"VUID-VkAttachmentDescription2KHR-samples-parameter", "samples must be a valid VkSampleCountFlagBits value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAttachmentDescription2KHR-samples-parameter)"},
+    {"VUID-VkAttachmentDescription2KHR-separateDepthStencilLayouts-03298", "If the separateDepthStencilLayouts feature is not enabled, initialLayout must not be VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR, VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR, VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR or VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAttachmentDescription2KHR-separateDepthStencilLayouts-03298)"},
+    {"VUID-VkAttachmentDescription2KHR-separateDepthStencilLayouts-03299", "If the separateDepthStencilLayouts feature is not enabled, finalLayout must not be VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR, VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR, VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR or VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAttachmentDescription2KHR-separateDepthStencilLayouts-03299)"},
+    {"VUID-VkAttachmentDescription2KHR-stencilLoadOp-parameter", "stencilLoadOp must be a valid VkAttachmentLoadOp value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAttachmentDescription2KHR-stencilLoadOp-parameter)"},
+    {"VUID-VkAttachmentDescription2KHR-stencilStoreOp-parameter", "stencilStoreOp must be a valid VkAttachmentStoreOp value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAttachmentDescription2KHR-stencilStoreOp-parameter)"},
+    {"VUID-VkAttachmentDescription2KHR-storeOp-parameter", "storeOp must be a valid VkAttachmentStoreOp value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAttachmentDescription2KHR-storeOp-parameter)"},
+    {"VUID-VkAttachmentDescriptionStencilLayoutKHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAttachmentDescriptionStencilLayoutKHR-sType-sType)"},
+    {"VUID-VkAttachmentDescriptionStencilLayoutKHR-stencilFinalLayout-03309", "stencilFinalLayout must not be VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR, VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR, VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL, VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL, VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL, or VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAttachmentDescriptionStencilLayoutKHR-stencilFinalLayout-03309)"},
+    {"VUID-VkAttachmentDescriptionStencilLayoutKHR-stencilFinalLayout-03310", "stencilFinalLayout must not be VK_IMAGE_LAYOUT_UNDEFINED or VK_IMAGE_LAYOUT_PREINITIALIZED (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAttachmentDescriptionStencilLayoutKHR-stencilFinalLayout-03310)"},
+    {"VUID-VkAttachmentDescriptionStencilLayoutKHR-stencilFinalLayout-parameter", "stencilFinalLayout must be a valid VkImageLayout value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAttachmentDescriptionStencilLayoutKHR-stencilFinalLayout-parameter)"},
+    {"VUID-VkAttachmentDescriptionStencilLayoutKHR-stencilInitialLayout-03308", "stencilInitialLayout must not be VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR, VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR, VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL, VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL, VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL, or VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAttachmentDescriptionStencilLayoutKHR-stencilInitialLayout-03308)"},
+    {"VUID-VkAttachmentDescriptionStencilLayoutKHR-stencilInitialLayout-parameter", "stencilInitialLayout must be a valid VkImageLayout value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAttachmentDescriptionStencilLayoutKHR-stencilInitialLayout-parameter)"},
+    {"VUID-VkAttachmentReference-layout-00857", "If attachment is not VK_ATTACHMENT_UNUSED, layout must not be VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_PREINITIALIZED, VK_IMAGE_LAYOUT_PRESENT_SRC_KHR, VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR, VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR, VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR, or VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAttachmentReference-layout-00857)"},
+    {"VUID-VkAttachmentReference-layout-parameter", "layout must be a valid VkImageLayout value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAttachmentReference-layout-parameter)"},
+    {"VUID-VkAttachmentReference2KHR-attachment-03311", "If attachment is not VK_ATTACHMENT_UNUSED, and aspectMask does not include VK_IMAGE_ASPECT_STENCIL_BIT or VK_IMAGE_ASPECT_DEPTH_BIT, layout must not be VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL, VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL, VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL, or VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAttachmentReference2KHR-attachment-03311)"},
+    {"VUID-VkAttachmentReference2KHR-attachment-03312", "If attachment is not VK_ATTACHMENT_UNUSED, and aspectMask does not include VK_IMAGE_ASPECT_COLOR_BIT, layout must not be VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAttachmentReference2KHR-attachment-03312)"},
+    {"VUID-VkAttachmentReference2KHR-attachment-03314", "If attachment is not VK_ATTACHMENT_UNUSED, and aspectMask includes VK_IMAGE_ASPECT_COLOR_BIT, layout must not be VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR, VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR, VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR, or VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR, (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAttachmentReference2KHR-attachment-03314)"},
+    {"VUID-VkAttachmentReference2KHR-attachment-03315", "If attachment is not VK_ATTACHMENT_UNUSED, and aspectMask includes both VK_IMAGE_ASPECT_DEPTH_BIT and VK_IMAGE_ASPECT_STENCIL_BIT, and layout is VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR or VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR, the pNext chain must include a VkAttachmentReferenceStencilLayoutKHR structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAttachmentReference2KHR-attachment-03315)"},
+    {"VUID-VkAttachmentReference2KHR-attachment-03316", "If attachment is not VK_ATTACHMENT_UNUSED, and aspectMask includes only VK_IMAGE_ASPECT_DEPTH_BIT then layout must not be VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR, or VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAttachmentReference2KHR-attachment-03316)"},
+    {"VUID-VkAttachmentReference2KHR-attachment-03317", "If attachment is not VK_ATTACHMENT_UNUSED, and aspectMask includes only VK_IMAGE_ASPECT_STENCIL_BIT then layout must not be VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR, or VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAttachmentReference2KHR-attachment-03317)"},
+    {"VUID-VkAttachmentReference2KHR-layout-03077", "If attachment is not VK_ATTACHMENT_UNUSED, layout must not be VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_PREINITIALIZED, or VK_IMAGE_LAYOUT_PRESENT_SRC_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAttachmentReference2KHR-layout-03077)"},
+    {"VUID-VkAttachmentReference2KHR-layout-parameter", "layout must be a valid VkImageLayout value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAttachmentReference2KHR-layout-parameter)"},
+    {"VUID-VkAttachmentReference2KHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAttachmentReference2KHR-sType-sType)"},
+    {"VUID-VkAttachmentReference2KHR-separateDepthStencilLayouts-03313", "If the separateDepthStencilLayouts feature is not enabled, and attachment is not VK_ATTACHMENT_UNUSED, layout must not be VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR, VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR, VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR, or VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR, (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAttachmentReference2KHR-separateDepthStencilLayouts-03313)"},
+    {"VUID-VkAttachmentReferenceStencilLayoutKHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_STENCIL_LAYOUT_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAttachmentReferenceStencilLayoutKHR-sType-sType)"},
+    {"VUID-VkAttachmentReferenceStencilLayoutKHR-stencilLayout-03318", "stencilLayout must not be VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_PREINITIALIZED, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR, VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR, VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL, VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL, VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL, VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL, or VK_IMAGE_LAYOUT_PRESENT_SRC_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAttachmentReferenceStencilLayoutKHR-stencilLayout-03318)"},
+    {"VUID-VkAttachmentReferenceStencilLayoutKHR-stencilLayout-parameter", "stencilLayout must be a valid VkImageLayout value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAttachmentReferenceStencilLayoutKHR-stencilLayout-parameter)"},
+    {"VUID-VkAttachmentSampleLocationsEXT-attachmentIndex-01531", "attachmentIndex must be less than the attachmentCount specified in VkRenderPassCreateInfo the render pass specified by VkRenderPassBeginInfo::renderPass was created with (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAttachmentSampleLocationsEXT-attachmentIndex-01531)"},
+    {"VUID-VkAttachmentSampleLocationsEXT-sampleLocationsInfo-parameter", "sampleLocationsInfo must be a valid VkSampleLocationsInfoEXT structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkAttachmentSampleLocationsEXT-sampleLocationsInfo-parameter)"},
+    {"VUID-VkBindAccelerationStructureMemoryInfoNV-accelerationStructure-02450", "accelerationStructure must not already be backed by a memory object (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindAccelerationStructureMemoryInfoNV-accelerationStructure-02450)"},
+    {"VUID-VkBindAccelerationStructureMemoryInfoNV-accelerationStructure-parameter", "accelerationStructure must be a valid VkAccelerationStructureNV handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindAccelerationStructureMemoryInfoNV-accelerationStructure-parameter)"},
+    {"VUID-VkBindAccelerationStructureMemoryInfoNV-commonparent", "Both of accelerationStructure, and memory must have been created, allocated, or retrieved from the same VkDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindAccelerationStructureMemoryInfoNV-commonparent)"},
+    {"VUID-VkBindAccelerationStructureMemoryInfoNV-memory-02593", "memory must have been allocated using one of the memory types allowed in the memoryTypeBits member of the VkMemoryRequirements structure returned from a call to vkGetAccelerationStructureMemoryRequirementsNV with accelerationStructure and type of VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindAccelerationStructureMemoryInfoNV-memory-02593)"},
+    {"VUID-VkBindAccelerationStructureMemoryInfoNV-memory-parameter", "memory must be a valid VkDeviceMemory handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindAccelerationStructureMemoryInfoNV-memory-parameter)"},
+    {"VUID-VkBindAccelerationStructureMemoryInfoNV-memoryOffset-02451", "memoryOffset must be less than the size of memory (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindAccelerationStructureMemoryInfoNV-memoryOffset-02451)"},
+    {"VUID-VkBindAccelerationStructureMemoryInfoNV-memoryOffset-02594", "memoryOffset must be an integer multiple of the alignment member of the VkMemoryRequirements structure returned from a call to vkGetAccelerationStructureMemoryRequirementsNV with accelerationStructure and type of VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindAccelerationStructureMemoryInfoNV-memoryOffset-02594)"},
+    {"VUID-VkBindAccelerationStructureMemoryInfoNV-pDeviceIndices-parameter", "If deviceIndexCount is not 0, pDeviceIndices must be a valid pointer to an array of deviceIndexCount uint32_t values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindAccelerationStructureMemoryInfoNV-pDeviceIndices-parameter)"},
+    {"VUID-VkBindAccelerationStructureMemoryInfoNV-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindAccelerationStructureMemoryInfoNV-pNext-pNext)"},
+    {"VUID-VkBindAccelerationStructureMemoryInfoNV-sType-sType", "sType must be VK_STRUCTURE_TYPE_BIND_ACCELERATION_STRUCTURE_MEMORY_INFO_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindAccelerationStructureMemoryInfoNV-sType-sType)"},
+    {"VUID-VkBindAccelerationStructureMemoryInfoNV-size-02595", "The size member of the VkMemoryRequirements structure returned from a call to vkGetAccelerationStructureMemoryRequirementsNV with accelerationStructure and type of VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV must be less than or equal to the size of memory minus memoryOffset (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindAccelerationStructureMemoryInfoNV-size-02595)"},
+    {"VUID-VkBindBufferMemoryDeviceGroupInfo-deviceIndexCount-01606", "deviceIndexCount must either be zero or equal to the number of physical devices in the logical device (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindBufferMemoryDeviceGroupInfo-deviceIndexCount-01606)"},
+    {"VUID-VkBindBufferMemoryDeviceGroupInfo-pDeviceIndices-01607", "All elements of pDeviceIndices must be valid device indices (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindBufferMemoryDeviceGroupInfo-pDeviceIndices-01607)"},
+    {"VUID-VkBindBufferMemoryDeviceGroupInfo-pDeviceIndices-parameter", "If deviceIndexCount is not 0, pDeviceIndices must be a valid pointer to an array of deviceIndexCount uint32_t values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindBufferMemoryDeviceGroupInfo-pDeviceIndices-parameter)"},
+    {"VUID-VkBindBufferMemoryDeviceGroupInfo-sType-sType", "sType must be VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindBufferMemoryDeviceGroupInfo-sType-sType)"},
+    {"VUID-VkBindBufferMemoryInfo-buffer-01593", "buffer must not already be backed by a memory object (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindBufferMemoryInfo-buffer-01593)"},
+    {"VUID-VkBindBufferMemoryInfo-buffer-01594", "buffer must not have been created with any sparse memory binding flags (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindBufferMemoryInfo-buffer-01594)"},
+    {"VUID-VkBindBufferMemoryInfo-buffer-01602", "If buffer requires a dedicated allocation(as reported by vkGetBufferMemoryRequirements2 in VkMemoryDedicatedRequirements::requiresDedicatedAllocation for buffer), memory must have been created with VkMemoryDedicatedAllocateInfo::buffer equal to buffer and memoryOffset must be zero (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindBufferMemoryInfo-buffer-01602)"},
+    {"VUID-VkBindBufferMemoryInfo-buffer-01603", "If buffer was created with VkDedicatedAllocationBufferCreateInfoNV::dedicatedAllocation equal to VK_TRUE, memory must have been created with VkDedicatedAllocationMemoryAllocateInfoNV::buffer equal to buffer and memoryOffset must be zero (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindBufferMemoryInfo-buffer-01603)"},
+    {"VUID-VkBindBufferMemoryInfo-buffer-01604", "If buffer was not created with VkDedicatedAllocationBufferCreateInfoNV::dedicatedAllocation equal to VK_TRUE, memory must not have been allocated dedicated for a specific buffer or image (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindBufferMemoryInfo-buffer-01604)"},
+    {"VUID-VkBindBufferMemoryInfo-buffer-parameter", "buffer must be a valid VkBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindBufferMemoryInfo-buffer-parameter)"},
+    {"VUID-VkBindBufferMemoryInfo-commonparent", "Both of buffer, and memory must have been created, allocated, or retrieved from the same VkDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindBufferMemoryInfo-commonparent)"},
+    {"VUID-VkBindBufferMemoryInfo-handleTypes-02791", "If the value of VkExportMemoryAllocateInfo::handleTypes used to allocate memory is not 0, it must include at least one of the handles set in VkExternalMemoryBufferCreateInfo::handleTypes when buffer was created (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindBufferMemoryInfo-handleTypes-02791)"},
+    {"VUID-VkBindBufferMemoryInfo-memory-01599", "memory must have been allocated using one of the memory types allowed in the memoryTypeBits member of the VkMemoryRequirements structure returned from a call to vkGetBufferMemoryRequirements with buffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindBufferMemoryInfo-memory-01599)"},
+    {"VUID-VkBindBufferMemoryInfo-memory-01900", "If the VkMemoryAllocateInfo provided when memory was allocated included a VkMemoryDedicatedAllocateInfo structure in its pNext chain, and VkMemoryDedicatedAllocateInfo::buffer was not VK_NULL_HANDLE, then buffer must equal VkMemoryDedicatedAllocateInfo::buffer and memoryOffset must be zero. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindBufferMemoryInfo-memory-01900)"},
+    {"VUID-VkBindBufferMemoryInfo-memory-02792", "If memory was created by a memory import operation, the external handle type of the imported memory must also have been set in VkExternalMemoryBufferCreateInfo::handleTypes when buffer was created (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindBufferMemoryInfo-memory-02792)"},
+    {"VUID-VkBindBufferMemoryInfo-memory-parameter", "memory must be a valid VkDeviceMemory handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindBufferMemoryInfo-memory-parameter)"},
+    {"VUID-VkBindBufferMemoryInfo-memoryOffset-01595", "memoryOffset must be less than the size of memory (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindBufferMemoryInfo-memoryOffset-01595)"},
+    {"VUID-VkBindBufferMemoryInfo-memoryOffset-01600", "memoryOffset must be an integer multiple of the alignment member of the VkMemoryRequirements structure returned from a call to vkGetBufferMemoryRequirements with buffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindBufferMemoryInfo-memoryOffset-01600)"},
+    {"VUID-VkBindBufferMemoryInfo-pNext-01605", "If the pNext chain includes a VkBindBufferMemoryDeviceGroupInfo structure, all instances of memory specified by VkBindBufferMemoryDeviceGroupInfo::pDeviceIndices must have been allocated (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindBufferMemoryInfo-pNext-01605)"},
+    {"VUID-VkBindBufferMemoryInfo-pNext-pNext", "pNext must be NULL or a pointer to a valid instance of VkBindBufferMemoryDeviceGroupInfo (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindBufferMemoryInfo-pNext-pNext)"},
+    {"VUID-VkBindBufferMemoryInfo-sType-sType", "sType must be VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindBufferMemoryInfo-sType-sType)"},
+    {"VUID-VkBindBufferMemoryInfo-size-01601", "The size member of the VkMemoryRequirements structure returned from a call to vkGetBufferMemoryRequirements with buffer must be less than or equal to the size of memory minus memoryOffset (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindBufferMemoryInfo-size-01601)"},
+    {"VUID-VkBindImageMemoryDeviceGroupInfo-deviceIndexCount-01633", "At least one of deviceIndexCount and splitInstanceBindRegionCount must be zero. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindImageMemoryDeviceGroupInfo-deviceIndexCount-01633)"},
+    {"VUID-VkBindImageMemoryDeviceGroupInfo-deviceIndexCount-01634", "deviceIndexCount must either be zero or equal to the number of physical devices in the logical device (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindImageMemoryDeviceGroupInfo-deviceIndexCount-01634)"},
+    {"VUID-VkBindImageMemoryDeviceGroupInfo-extent-01640", "The extent.width member of any element of pSplitInstanceBindRegions must either be a multiple of the sparse image block width of all non-metadata aspects of the image, or else extent.width + offset.x must equal the width of the image subresource (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindImageMemoryDeviceGroupInfo-extent-01640)"},
+    {"VUID-VkBindImageMemoryDeviceGroupInfo-extent-01641", "The extent.height member of any element of pSplitInstanceBindRegions must either be a multiple of the sparse image block height of all non-metadata aspects of the image, or else extent.height + offset.y must equal the width of the image subresource (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindImageMemoryDeviceGroupInfo-extent-01641)"},
+    {"VUID-VkBindImageMemoryDeviceGroupInfo-offset-01638", "The offset.x member of any element of pSplitInstanceBindRegions must be a multiple of the sparse image block width (VkSparseImageFormatProperties::imageGranularity.width) of all non-metadata aspects of the image (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindImageMemoryDeviceGroupInfo-offset-01638)"},
+    {"VUID-VkBindImageMemoryDeviceGroupInfo-offset-01639", "The offset.y member of any element of pSplitInstanceBindRegions must be a multiple of the sparse image block height (VkSparseImageFormatProperties::imageGranularity.height) of all non-metadata aspects of the image (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindImageMemoryDeviceGroupInfo-offset-01639)"},
+    {"VUID-VkBindImageMemoryDeviceGroupInfo-pDeviceIndices-01635", "All elements of pDeviceIndices must be valid device indices. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindImageMemoryDeviceGroupInfo-pDeviceIndices-01635)"},
+    {"VUID-VkBindImageMemoryDeviceGroupInfo-pDeviceIndices-parameter", "If deviceIndexCount is not 0, pDeviceIndices must be a valid pointer to an array of deviceIndexCount uint32_t values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindImageMemoryDeviceGroupInfo-pDeviceIndices-parameter)"},
+    {"VUID-VkBindImageMemoryDeviceGroupInfo-pSplitInstanceBindRegions-01637", "Elements of pSplitInstanceBindRegions that correspond to the same instance of an image must not overlap. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindImageMemoryDeviceGroupInfo-pSplitInstanceBindRegions-01637)"},
+    {"VUID-VkBindImageMemoryDeviceGroupInfo-pSplitInstanceBindRegions-parameter", "If splitInstanceBindRegionCount is not 0, pSplitInstanceBindRegions must be a valid pointer to an array of splitInstanceBindRegionCount VkRect2D structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindImageMemoryDeviceGroupInfo-pSplitInstanceBindRegions-parameter)"},
+    {"VUID-VkBindImageMemoryDeviceGroupInfo-sType-sType", "sType must be VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindImageMemoryDeviceGroupInfo-sType-sType)"},
+    {"VUID-VkBindImageMemoryDeviceGroupInfo-splitInstanceBindRegionCount-01636", "splitInstanceBindRegionCount must either be zero or equal to the number of physical devices in the logical device squared (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindImageMemoryDeviceGroupInfo-splitInstanceBindRegionCount-01636)"},
+    {"VUID-VkBindImageMemoryInfo-commonparent", "Both of image, and memory that are valid handles of non-ignored parameters must have been created, allocated, or retrieved from the same VkDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindImageMemoryInfo-commonparent)"},
+    {"VUID-VkBindImageMemoryInfo-handleTypes-02793", "If the value of VkExportMemoryAllocateInfo::handleTypes used to allocate memory is not 0, it must include at least one of the handles set in VkExternalMemoryImageCreateInfo::handleTypes when image was created (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindImageMemoryInfo-handleTypes-02793)"},
+    {"VUID-VkBindImageMemoryInfo-image-01609", "image must not already be backed by a memory object (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindImageMemoryInfo-image-01609)"},
+    {"VUID-VkBindImageMemoryInfo-image-01610", "image must not have been created with any sparse memory binding flags (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindImageMemoryInfo-image-01610)"},
+    {"VUID-VkBindImageMemoryInfo-image-01622", "If image requires a dedicated allocation (as reported by vkGetImageMemoryRequirements2 in VkMemoryDedicatedRequirements::requiresDedicatedAllocation for image), memory must have been created with VkMemoryDedicatedAllocateInfo::image equal to image and memoryOffset must be zero (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindImageMemoryInfo-image-01622)"},
+    {"VUID-VkBindImageMemoryInfo-image-01623", "If image was created with VkDedicatedAllocationImageCreateInfoNV::dedicatedAllocation equal to VK_TRUE, memory must have been created with VkDedicatedAllocationMemoryAllocateInfoNV::image equal to image and memoryOffset must be zero (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindImageMemoryInfo-image-01623)"},
+    {"VUID-VkBindImageMemoryInfo-image-01624", "If image was not created with VkDedicatedAllocationImageCreateInfoNV::dedicatedAllocation equal to VK_TRUE, memory must not have been allocated dedicated for a specific buffer or image (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindImageMemoryInfo-image-01624)"},
+    {"VUID-VkBindImageMemoryInfo-image-01630", "If image was created with a valid swapchain handle in VkImageSwapchainCreateInfoKHR::swapchain, then the pNext chain must include a VkBindImageMemorySwapchainInfoKHR structure containing the same swapchain handle. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindImageMemoryInfo-image-01630)"},
+    {"VUID-VkBindImageMemoryInfo-image-parameter", "image must be a valid VkImage handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindImageMemoryInfo-image-parameter)"},
+    {"VUID-VkBindImageMemoryInfo-memory-01612", "memory must have been allocated using one of the memory types allowed in the memoryTypeBits member of the VkMemoryRequirements structure returned from a call to vkGetImageMemoryRequirements with image (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindImageMemoryInfo-memory-01612)"},
+    {"VUID-VkBindImageMemoryInfo-memory-01614", "The difference of the size of memory and memoryOffset must be greater than or equal to the size member of the VkMemoryRequirements structure returned from a call to vkGetImageMemoryRequirements with the same image (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindImageMemoryInfo-memory-01614)"},
+    {"VUID-VkBindImageMemoryInfo-memory-01625", "memory must be a valid VkDeviceMemory handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindImageMemoryInfo-memory-01625)"},
+    {"VUID-VkBindImageMemoryInfo-memory-01903", "If the VkMemoryAllocateInfo provided when memory was allocated included a VkMemoryDedicatedAllocateInfo structure in its pNext chain, and VkMemoryDedicatedAllocateInfo::image was not VK_NULL_HANDLE, then image must equal VkMemoryDedicatedAllocateInfo::image and memoryOffset must be zero. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindImageMemoryInfo-memory-01903)"},
+    {"VUID-VkBindImageMemoryInfo-memory-02630", "If the dedicated allocation image aliasing feature is not enabled, and the VkMemoryAllocateInfo provided when memory was allocated included a VkMemoryDedicatedAllocateInfo structure in its pNext chain, and VkMemoryDedicatedAllocateInfo::image was not VK_NULL_HANDLE, then image must equal VkMemoryDedicatedAllocateInfo::image and memoryOffset must be zero. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindImageMemoryInfo-memory-02630)"},
+    {"VUID-VkBindImageMemoryInfo-memory-02631", "If the dedicated allocation image aliasing feature is enabled, and the VkMemoryAllocateInfo provided when memory was allocated included a VkMemoryDedicatedAllocateInfo structure in its pNext chain, and VkMemoryDedicatedAllocateInfo::image was not VK_NULL_HANDLE, then memoryOffset must be zero, and image must be either equal to VkMemoryDedicatedAllocateInfo::image or an image that was created using the same parameters in VkImageCreateInfo, with the exception that extent and arrayLayers may differ subject to the following restrictions: every dimension in the extent parameter of the image being bound must be equal to or smaller than the original image for which the allocation was created; and the arrayLayers parameter of the image being bound must be equal to or smaller than the original image for which the allocation was created. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindImageMemoryInfo-memory-02631)"},
+    {"VUID-VkBindImageMemoryInfo-memory-02794", "If memory was created by a memory import operation, the external handle type of the imported memory must also have been set in VkExternalMemoryImageCreateInfo::handleTypes when image was created (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindImageMemoryInfo-memory-02794)"},
+    {"VUID-VkBindImageMemoryInfo-memoryOffset-01611", "memoryOffset must be less than the size of memory (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindImageMemoryInfo-memoryOffset-01611)"},
+    {"VUID-VkBindImageMemoryInfo-memoryOffset-01613", "memoryOffset must be an integer multiple of the alignment member of the VkMemoryRequirements structure returned from a call to vkGetImageMemoryRequirements with image (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindImageMemoryInfo-memoryOffset-01613)"},
+    {"VUID-VkBindImageMemoryInfo-pNext-01615", "If the pNext chain does not include a VkBindImagePlaneMemoryInfo structure, memory must have been allocated using one of the memory types allowed in the memoryTypeBits member of the VkMemoryRequirements structure returned from a call to vkGetImageMemoryRequirements2 with image (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindImageMemoryInfo-pNext-01615)"},
+    {"VUID-VkBindImageMemoryInfo-pNext-01616", "If the pNext chain does not include a VkBindImagePlaneMemoryInfo structure, memoryOffset must be an integer multiple of the alignment member of the VkMemoryRequirements structure returned from a call to vkGetImageMemoryRequirements2 with image (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindImageMemoryInfo-pNext-01616)"},
+    {"VUID-VkBindImageMemoryInfo-pNext-01617", "If the pNext chain does not include a VkBindImagePlaneMemoryInfo structure, the difference of the size of memory and memoryOffset must be greater than or equal to the size member of the VkMemoryRequirements structure returned from a call to vkGetImageMemoryRequirements2 with the same image (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindImageMemoryInfo-pNext-01617)"},
+    {"VUID-VkBindImageMemoryInfo-pNext-01618", "If the pNext chain includes a VkBindImagePlaneMemoryInfo structure, image must have been created with the VK_IMAGE_CREATE_DISJOINT_BIT bit set. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindImageMemoryInfo-pNext-01618)"},
+    {"VUID-VkBindImageMemoryInfo-pNext-01619", "If the pNext chain includes a VkBindImagePlaneMemoryInfo structure, memory must have been allocated using one of the memory types allowed in the memoryTypeBits member of the VkMemoryRequirements structure returned from a call to vkGetImageMemoryRequirements2 with image and the correct planeAspect for this plane in the VkImagePlaneMemoryRequirementsInfo structure included in the VkImageMemoryRequirementsInfo2 structure's pNext chain (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindImageMemoryInfo-pNext-01619)"},
+    {"VUID-VkBindImageMemoryInfo-pNext-01620", "If the pNext chain includes a VkBindImagePlaneMemoryInfo structure, memoryOffset must be an integer multiple of the alignment member of the VkMemoryRequirements structure returned from a call to vkGetImageMemoryRequirements2 with image and the correct planeAspect for this plane in the VkImagePlaneMemoryRequirementsInfo structure included in the VkImageMemoryRequirementsInfo2 structure's pNext chain (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindImageMemoryInfo-pNext-01620)"},
+    {"VUID-VkBindImageMemoryInfo-pNext-01621", "If the pNext chain includes a VkBindImagePlaneMemoryInfo structure, the difference of the size of memory and memoryOffset must be greater than or equal to the size member of the VkMemoryRequirements structure returned from a call to vkGetImageMemoryRequirements2 with the same image and the correct planeAspect for this plane in the VkImagePlaneMemoryRequirementsInfo structure included in the VkImageMemoryRequirementsInfo2 structure's pNext chain (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindImageMemoryInfo-pNext-01621)"},
+    {"VUID-VkBindImageMemoryInfo-pNext-01626", "If the pNext chain includes a VkBindImageMemoryDeviceGroupInfo structure, all instances of memory specified by VkBindImageMemoryDeviceGroupInfo::pDeviceIndices must have been allocated (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindImageMemoryInfo-pNext-01626)"},
+    {"VUID-VkBindImageMemoryInfo-pNext-01627", "If the pNext chain includes a VkBindImageMemoryDeviceGroupInfo structure, and VkBindImageMemoryDeviceGroupInfo::splitInstanceBindRegionCount is not zero, then image must have been created with the VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT bit set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindImageMemoryInfo-pNext-01627)"},
+    {"VUID-VkBindImageMemoryInfo-pNext-01628", "If the pNext chain includes a VkBindImageMemoryDeviceGroupInfo structure, all elements of VkBindImageMemoryDeviceGroupInfo::pSplitInstanceBindRegions must be valid rectangles contained within the dimensions of image (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindImageMemoryInfo-pNext-01628)"},
+    {"VUID-VkBindImageMemoryInfo-pNext-01629", "If the pNext chain includes a VkBindImageMemoryDeviceGroupInfo structure, the union of the areas of all elements of VkBindImageMemoryDeviceGroupInfo::pSplitInstanceBindRegions that correspond to the same instance of image must cover the entire image. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindImageMemoryInfo-pNext-01629)"},
+    {"VUID-VkBindImageMemoryInfo-pNext-01631", "If the pNext chain includes a VkBindImageMemorySwapchainInfoKHR structure, memory must be VK_NULL_HANDLE (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindImageMemoryInfo-pNext-01631)"},
+    {"VUID-VkBindImageMemoryInfo-pNext-01632", "If the pNext chain does not include a VkBindImageMemorySwapchainInfoKHR structure, memory must be a valid VkDeviceMemory handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindImageMemoryInfo-pNext-01632)"},
+    {"VUID-VkBindImageMemoryInfo-pNext-pNext", "Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkBindImageMemoryDeviceGroupInfo, VkBindImageMemorySwapchainInfoKHR, or VkBindImagePlaneMemoryInfo (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindImageMemoryInfo-pNext-pNext)"},
+    {"VUID-VkBindImageMemoryInfo-sType-sType", "sType must be VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindImageMemoryInfo-sType-sType)"},
+    {"VUID-VkBindImageMemoryInfo-sType-unique", "Each sType member in the pNext chain must be unique (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindImageMemoryInfo-sType-unique)"},
+    {"VUID-VkBindImageMemorySwapchainInfoKHR-imageIndex-01644", "imageIndex must be less than the number of images in swapchain (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindImageMemorySwapchainInfoKHR-imageIndex-01644)"},
+    {"VUID-VkBindImageMemorySwapchainInfoKHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_SWAPCHAIN_INFO_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindImageMemorySwapchainInfoKHR-sType-sType)"},
+    {"VUID-VkBindImageMemorySwapchainInfoKHR-swapchain-parameter", "swapchain must be a valid VkSwapchainKHR handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindImageMemorySwapchainInfoKHR-swapchain-parameter)"},
+    {"VUID-VkBindImagePlaneMemoryInfo-None-01643", "A single call to vkBindImageMemory2 must bind all or none of the planes of an image (i.e. bindings to all planes of an image must be made in a single vkBindImageMemory2 call), as separate bindings (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindImagePlaneMemoryInfo-None-01643)"},
+    {"VUID-VkBindImagePlaneMemoryInfo-planeAspect-02283", "If the image's tiling is VK_IMAGE_TILING_LINEAR or VK_IMAGE_TILING_OPTIMAL, then planeAspect must be a single valid format plane for the image. (That is, planeAspect must be VK_IMAGE_ASPECT_PLANE_0_BIT or VK_IMAGE_ASPECT_PLANE_1_BIT for '_2PLANE' formats and planeAspect must be VK_IMAGE_ASPECT_PLANE_0_BIT, VK_IMAGE_ASPECT_PLANE_1_BIT, or VK_IMAGE_ASPECT_PLANE_2_BIT for '_3PLANE' formats.) (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindImagePlaneMemoryInfo-planeAspect-02283)"},
+    {"VUID-VkBindImagePlaneMemoryInfo-planeAspect-02284", "If the image's tiling is VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT,  then planeAspect must be a single valid memory plane for the  image.  (That is, aspectMask must specify a plane index that is less than  the  drmFormatModifierPlaneCount  associated with the image's format and drmFormatModifier.) (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindImagePlaneMemoryInfo-planeAspect-02284)"},
+    {"VUID-VkBindImagePlaneMemoryInfo-planeAspect-parameter", "planeAspect must be a valid VkImageAspectFlagBits value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindImagePlaneMemoryInfo-planeAspect-parameter)"},
+    {"VUID-VkBindImagePlaneMemoryInfo-sType-sType", "sType must be VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindImagePlaneMemoryInfo-sType-sType)"},
+    {"VUID-VkBindSparseInfo-commonparent", "Both of the elements of pSignalSemaphores, and the elements of pWaitSemaphores that are valid handles of non-ignored parameters must have been created, allocated, or retrieved from the same VkDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindSparseInfo-commonparent)"},
+    {"VUID-VkBindSparseInfo-pBufferBinds-parameter", "If bufferBindCount is not 0, pBufferBinds must be a valid pointer to an array of bufferBindCount valid VkSparseBufferMemoryBindInfo structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindSparseInfo-pBufferBinds-parameter)"},
+    {"VUID-VkBindSparseInfo-pImageBinds-parameter", "If imageBindCount is not 0, pImageBinds must be a valid pointer to an array of imageBindCount valid VkSparseImageMemoryBindInfo structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindSparseInfo-pImageBinds-parameter)"},
+    {"VUID-VkBindSparseInfo-pImageOpaqueBinds-parameter", "If imageOpaqueBindCount is not 0, pImageOpaqueBinds must be a valid pointer to an array of imageOpaqueBindCount valid VkSparseImageOpaqueMemoryBindInfo structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindSparseInfo-pImageOpaqueBinds-parameter)"},
+    {"VUID-VkBindSparseInfo-pNext-03247", "If the pNext chain of this structure includes a VkTimelineSemaphoreSubmitInfoKHR structure and any element of pWaitSemaphores was created with a VkSemaphoreTypeKHR of VK_SEMAPHORE_TYPE_TIMELINE_KHR then its waitSemaphoreValueCount member must equal waitSemaphoreCount (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindSparseInfo-pNext-03247)"},
+    {"VUID-VkBindSparseInfo-pNext-03248", "If the pNext chain of this structure includes a VkTimelineSemaphoreSubmitInfoKHR structure and any element of pSignalSemaphores was created with a VkSemaphoreTypeKHR of VK_SEMAPHORE_TYPE_TIMELINE_KHR then its signalSemaphoreValueCount member must equal signalSemaphoreCount (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindSparseInfo-pNext-03248)"},
+    {"VUID-VkBindSparseInfo-pNext-pNext", "Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkDeviceGroupBindSparseInfo or VkTimelineSemaphoreSubmitInfoKHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindSparseInfo-pNext-pNext)"},
+    {"VUID-VkBindSparseInfo-pSignalSemaphores-03249", "For each element of pSignalSemaphores created with a VkSemaphoreTypeKHR of VK_SEMAPHORE_TYPE_TIMELINE_KHR the corresponding element of VkTimelineSemaphoreSubmitInfoKHR::pSignalSemaphoreValues must have a value greater than the current value of the semaphore when the semaphore signal operation is executed (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindSparseInfo-pSignalSemaphores-03249)"},
+    {"VUID-VkBindSparseInfo-pSignalSemaphores-03251", "For each element of pSignalSemaphores created with a VkSemaphoreTypeKHR of VK_SEMAPHORE_TYPE_TIMELINE_KHR the corresponding element of VkTimelineSemaphoreSubmitInfoKHR::pSignalSemaphoreValues must have a value which does not differ from the current value of the semaphore or from the value of any outstanding semaphore wait or signal operation on that semaphore by more than maxTimelineSemaphoreValueDifference. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindSparseInfo-pSignalSemaphores-03251)"},
+    {"VUID-VkBindSparseInfo-pSignalSemaphores-parameter", "If signalSemaphoreCount is not 0, pSignalSemaphores must be a valid pointer to an array of signalSemaphoreCount valid VkSemaphore handles (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindSparseInfo-pSignalSemaphores-parameter)"},
+    {"VUID-VkBindSparseInfo-pWaitSemaphores-03246", "If any element of pWaitSemaphores or pSignalSemaphores was created with a VkSemaphoreTypeKHR of VK_SEMAPHORE_TYPE_TIMELINE_KHR then the pNext chain must include a VkTimelineSemaphoreSubmitInfoKHR structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindSparseInfo-pWaitSemaphores-03246)"},
+    {"VUID-VkBindSparseInfo-pWaitSemaphores-03250", "For each element of pWaitSemaphores created with a VkSemaphoreTypeKHR of VK_SEMAPHORE_TYPE_TIMELINE_KHR the corresponding element of VkTimelineSemaphoreSubmitInfoKHR::pWaitSemaphoreValues must have a value which does not differ from the current value of the semaphore or from the value of any outstanding semaphore wait or signal operation on that semaphore by more than maxTimelineSemaphoreValueDifference. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindSparseInfo-pWaitSemaphores-03250)"},
+    {"VUID-VkBindSparseInfo-pWaitSemaphores-parameter", "If waitSemaphoreCount is not 0, pWaitSemaphores must be a valid pointer to an array of waitSemaphoreCount valid VkSemaphore handles (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindSparseInfo-pWaitSemaphores-parameter)"},
+    {"VUID-VkBindSparseInfo-sType-sType", "sType must be VK_STRUCTURE_TYPE_BIND_SPARSE_INFO (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindSparseInfo-sType-sType)"},
+    {"VUID-VkBindSparseInfo-sType-unique", "Each sType member in the pNext chain must be unique (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBindSparseInfo-sType-unique)"},
+    {"VUID-VkBufferCopy-size-01988", "The size must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBufferCopy-size-01988)"},
+    {"VUID-VkBufferCreateInfo-None-01888", "If any of the bits VK_BUFFER_CREATE_SPARSE_BINDING_BIT, VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT, or VK_BUFFER_CREATE_SPARSE_ALIASED_BIT are set, VK_BUFFER_CREATE_PROTECTED_BIT must not also be set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBufferCreateInfo-None-01888)"},
+    {"VUID-VkBufferCreateInfo-deviceAddress-02604", "If VkBufferDeviceAddressCreateInfoEXT::deviceAddress is not zero, flags must include VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBufferCreateInfo-deviceAddress-02604)"},
+    {"VUID-VkBufferCreateInfo-flags-00915", "If the sparse bindings feature is not enabled, flags must not contain VK_BUFFER_CREATE_SPARSE_BINDING_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBufferCreateInfo-flags-00915)"},
+    {"VUID-VkBufferCreateInfo-flags-00916", "If the sparse buffer residency feature is not enabled, flags must not contain VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBufferCreateInfo-flags-00916)"},
+    {"VUID-VkBufferCreateInfo-flags-00917", "If the sparse aliased residency feature is not enabled, flags must not contain VK_BUFFER_CREATE_SPARSE_ALIASED_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBufferCreateInfo-flags-00917)"},
+    {"VUID-VkBufferCreateInfo-flags-00918", "If flags contains VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT or VK_BUFFER_CREATE_SPARSE_ALIASED_BIT, it must also contain VK_BUFFER_CREATE_SPARSE_BINDING_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBufferCreateInfo-flags-00918)"},
+    {"VUID-VkBufferCreateInfo-flags-01887", "If the protected memory feature is not enabled, flags must not contain VK_BUFFER_CREATE_PROTECTED_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBufferCreateInfo-flags-01887)"},
+    {"VUID-VkBufferCreateInfo-flags-03338", "If flags includes VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_KHR, the bufferDeviceAddressCaptureReplay or VkPhysicalDeviceBufferDeviceAddressFeaturesEXT::bufferDeviceAddressCaptureReplay feature must be enabled (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBufferCreateInfo-flags-03338)"},
+    {"VUID-VkBufferCreateInfo-flags-parameter", "flags must be a valid combination of VkBufferCreateFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBufferCreateInfo-flags-parameter)"},
+    {"VUID-VkBufferCreateInfo-opaqueCaptureAddress-03337", "If VkBufferOpaqueCaptureAddressCreateInfoKHR::opaqueCaptureAddress is not zero, flags must include VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBufferCreateInfo-opaqueCaptureAddress-03337)"},
+    {"VUID-VkBufferCreateInfo-pNext-00920", "If the pNext chain includes a VkExternalMemoryBufferCreateInfo structure, its handleTypes member must only contain bits that are also in VkExternalBufferProperties::externalMemoryProperties.compatibleHandleTypes, as returned by vkGetPhysicalDeviceExternalBufferProperties with pExternalBufferInfo->handleType equal to any one of the handle types specified in VkExternalMemoryBufferCreateInfo::handleTypes (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBufferCreateInfo-pNext-00920)"},
+    {"VUID-VkBufferCreateInfo-pNext-01571", "If the pNext chain includes a VkDedicatedAllocationBufferCreateInfoNV structure, and the dedicatedAllocation member of the chained structure is VK_TRUE, then flags must not include VK_BUFFER_CREATE_SPARSE_BINDING_BIT, VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT, or VK_BUFFER_CREATE_SPARSE_ALIASED_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBufferCreateInfo-pNext-01571)"},
+    {"VUID-VkBufferCreateInfo-pNext-pNext", "Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkBufferDeviceAddressCreateInfoEXT, VkBufferOpaqueCaptureAddressCreateInfoKHR, VkDedicatedAllocationBufferCreateInfoNV, or VkExternalMemoryBufferCreateInfo (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBufferCreateInfo-pNext-pNext)"},
+    {"VUID-VkBufferCreateInfo-sType-sType", "sType must be VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBufferCreateInfo-sType-sType)"},
+    {"VUID-VkBufferCreateInfo-sType-unique", "Each sType member in the pNext chain must be unique (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBufferCreateInfo-sType-unique)"},
+    {"VUID-VkBufferCreateInfo-sharingMode-00913", "If sharingMode is VK_SHARING_MODE_CONCURRENT, pQueueFamilyIndices must be a valid pointer to an array of queueFamilyIndexCount uint32_t values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBufferCreateInfo-sharingMode-00913)"},
+    {"VUID-VkBufferCreateInfo-sharingMode-00914", "If sharingMode is VK_SHARING_MODE_CONCURRENT, queueFamilyIndexCount must be greater than 1 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBufferCreateInfo-sharingMode-00914)"},
+    {"VUID-VkBufferCreateInfo-sharingMode-01391", "If sharingMode is VK_SHARING_MODE_CONCURRENT, each element of pQueueFamilyIndices must be unique and must be less than pQueueFamilyPropertyCount returned by vkGetPhysicalDeviceQueueFamilyProperties for the physicalDevice that was used to create device (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBufferCreateInfo-sharingMode-01391)"},
+    {"VUID-VkBufferCreateInfo-sharingMode-01419", "If sharingMode is VK_SHARING_MODE_CONCURRENT, each element of pQueueFamilyIndices must be unique and must be less than pQueueFamilyPropertyCount returned by either vkGetPhysicalDeviceQueueFamilyProperties or vkGetPhysicalDeviceQueueFamilyProperties2 for the physicalDevice that was used to create device (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBufferCreateInfo-sharingMode-01419)"},
+    {"VUID-VkBufferCreateInfo-sharingMode-parameter", "sharingMode must be a valid VkSharingMode value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBufferCreateInfo-sharingMode-parameter)"},
+    {"VUID-VkBufferCreateInfo-size-00912", "size must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBufferCreateInfo-size-00912)"},
+    {"VUID-VkBufferCreateInfo-usage-parameter", "usage must be a valid combination of VkBufferUsageFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBufferCreateInfo-usage-parameter)"},
+    {"VUID-VkBufferCreateInfo-usage-requiredbitmask", "usage must not be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBufferCreateInfo-usage-requiredbitmask)"},
+    {"VUID-VkBufferDeviceAddressCreateInfoEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_CREATE_INFO_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBufferDeviceAddressCreateInfoEXT-sType-sType)"},
+    {"VUID-VkBufferDeviceAddressInfoKHR-buffer-02600", "If buffer is non-sparse and was not created with the VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_KHR flag, then it must be bound completely and contiguously to a single VkDeviceMemory object (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBufferDeviceAddressInfoKHR-buffer-02600)"},
+    {"VUID-VkBufferDeviceAddressInfoKHR-buffer-02601", "buffer must have been created with VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBufferDeviceAddressInfoKHR-buffer-02601)"},
+    {"VUID-VkBufferDeviceAddressInfoKHR-buffer-parameter", "buffer must be a valid VkBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBufferDeviceAddressInfoKHR-buffer-parameter)"},
+    {"VUID-VkBufferDeviceAddressInfoKHR-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBufferDeviceAddressInfoKHR-pNext-pNext)"},
+    {"VUID-VkBufferDeviceAddressInfoKHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBufferDeviceAddressInfoKHR-sType-sType)"},
+    {"VUID-VkBufferImageCopy-None-00214", "When copying to the depth aspect of an image subresource, the data in the source buffer must be in the range [0,1] (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBufferImageCopy-None-00214)"},
+    {"VUID-VkBufferImageCopy-None-01735", "If the calling command's VkImage parameter is a compressed image, or a single-plane, '_422' image format, bufferRowLength must be a multiple of the compressed texel block width (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBufferImageCopy-None-01735)"},
+    {"VUID-VkBufferImageCopy-None-01736", "If the calling command's VkImage parameter is a compressed image, or a single-plane, '_422' image format, bufferImageHeight must be a multiple of the compressed texel block height (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBufferImageCopy-None-01736)"},
+    {"VUID-VkBufferImageCopy-None-01737", "If the calling command's VkImage parameter is a compressed image, or a single-plane, '_422' image format, all members of imageOffset must be a multiple of the corresponding dimensions of the compressed texel block (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBufferImageCopy-None-01737)"},
+    {"VUID-VkBufferImageCopy-None-01738", "If the calling command's VkImage parameter is a compressed image, or a single-plane, '_422' image format, bufferOffset must be a multiple of the compressed texel block size in bytes (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBufferImageCopy-None-01738)"},
+    {"VUID-VkBufferImageCopy-None-01739", "If the calling command's VkImage parameter is a compressed image, or a single-plane, '_422' image format, imageExtent.width must be a multiple of the compressed texel block width or (imageExtent.width + imageOffset.x) must equal the image subresource width (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBufferImageCopy-None-01739)"},
+    {"VUID-VkBufferImageCopy-None-01740", "If the calling command's VkImage parameter is a compressed image, or a single-plane, '_422' image format, imageExtent.height must be a multiple of the compressed texel block height or (imageExtent.height + imageOffset.y) must equal the image subresource height (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBufferImageCopy-None-01740)"},
+    {"VUID-VkBufferImageCopy-None-01741", "If the calling command's VkImage parameter is a compressed image, or a single-plane, '_422' image format, imageExtent.depth must be a multiple of the compressed texel block depth or (imageExtent.depth + imageOffset.z) must equal the image subresource depth (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBufferImageCopy-None-01741)"},
+    {"VUID-VkBufferImageCopy-aspectMask-00211", "The aspectMask member of imageSubresource must specify aspects present in the calling command's VkImage parameter (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBufferImageCopy-aspectMask-00211)"},
+    {"VUID-VkBufferImageCopy-aspectMask-00212", "The aspectMask member of imageSubresource must only have a single bit set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBufferImageCopy-aspectMask-00212)"},
+    {"VUID-VkBufferImageCopy-aspectMask-01560", "If the calling command's VkImage parameter's format is a multi-planar format, then the aspectMask member of imageSubresource must be VK_IMAGE_ASPECT_PLANE_0_BIT, VK_IMAGE_ASPECT_PLANE_1_BIT, or VK_IMAGE_ASPECT_PLANE_2_BIT (with VK_IMAGE_ASPECT_PLANE_2_BIT valid only for image formats with three planes) (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBufferImageCopy-aspectMask-01560)"},
+    {"VUID-VkBufferImageCopy-baseArrayLayer-00213", "If the calling command's VkImage parameter is of VkImageType VK_IMAGE_TYPE_3D, the baseArrayLayer and layerCount members of imageSubresource must be 0 and 1, respectively (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBufferImageCopy-baseArrayLayer-00213)"},
+    {"VUID-VkBufferImageCopy-bufferImageHeight-00196", "bufferImageHeight must be 0, or greater than or equal to the height member of imageExtent (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBufferImageCopy-bufferImageHeight-00196)"},
+    {"VUID-VkBufferImageCopy-bufferImageHeight-00204", "If the calling command's VkImage parameter is a compressed image, bufferImageHeight must be a multiple of the compressed texel block height (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBufferImageCopy-bufferImageHeight-00204)"},
+    {"VUID-VkBufferImageCopy-bufferOffset-00193", "If the calling command's VkImage parameter's format is not a depth/stencil format, then bufferOffset must be a multiple of the format's texel block size. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBufferImageCopy-bufferOffset-00193)"},
+    {"VUID-VkBufferImageCopy-bufferOffset-00194", "bufferOffset must be a multiple of 4 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBufferImageCopy-bufferOffset-00194)"},
+    {"VUID-VkBufferImageCopy-bufferOffset-00206", "If the calling command's VkImage parameter is a compressed image, bufferOffset must be a multiple of the compressed texel block size in bytes (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBufferImageCopy-bufferOffset-00206)"},
+    {"VUID-VkBufferImageCopy-bufferOffset-01558", "If the calling command's VkImage parameter's format is not a depth/stencil format or a multi-planar format, then bufferOffset must be a multiple of the format's texel block size. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBufferImageCopy-bufferOffset-01558)"},
+    {"VUID-VkBufferImageCopy-bufferOffset-01559", "If the calling command's VkImage parameter's format is a multi-planar format, then bufferOffset must be a multiple of the element size of the compatible format for the format and the aspectMask of the imageSubresource as defined in Compatible formats of planes of multi-planar formats (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBufferImageCopy-bufferOffset-01559)"},
+    {"VUID-VkBufferImageCopy-bufferRowLength-00195", "bufferRowLength must be 0, or greater than or equal to the width member of imageExtent (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBufferImageCopy-bufferRowLength-00195)"},
+    {"VUID-VkBufferImageCopy-bufferRowLength-00203", "If the calling command's VkImage parameter is a compressed image, bufferRowLength must be a multiple of the compressed texel block width (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBufferImageCopy-bufferRowLength-00203)"},
+    {"VUID-VkBufferImageCopy-imageExtent-00207", "If the calling command's VkImage parameter is a compressed image, imageExtent.width must be a multiple of the compressed texel block width or (imageExtent.width + imageOffset.x) must equal the image subresource width (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBufferImageCopy-imageExtent-00207)"},
+    {"VUID-VkBufferImageCopy-imageExtent-00208", "If the calling command's VkImage parameter is a compressed image, imageExtent.height must be a multiple of the compressed texel block height or (imageExtent.height + imageOffset.y) must equal the image subresource height (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBufferImageCopy-imageExtent-00208)"},
+    {"VUID-VkBufferImageCopy-imageExtent-00209", "If the calling command's VkImage parameter is a compressed image, imageExtent.depth must be a multiple of the compressed texel block depth or (imageExtent.depth + imageOffset.z) must equal the image subresource depth (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBufferImageCopy-imageExtent-00209)"},
+    {"VUID-VkBufferImageCopy-imageOffset-00197", "imageOffset.x and (imageExtent.width + imageOffset.x) must both be greater than or equal to 0 and less than or equal to the image subresource width (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBufferImageCopy-imageOffset-00197)"},
+    {"VUID-VkBufferImageCopy-imageOffset-00198", "imageOffset.y and (imageExtent.height + imageOffset.y) must both be greater than or equal to 0 and less than or equal to the image subresource height (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBufferImageCopy-imageOffset-00198)"},
+    {"VUID-VkBufferImageCopy-imageOffset-00200", "imageOffset.z and (imageExtent.depth + imageOffset.z) must both be greater than or equal to 0 and less than or equal to the image subresource depth (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBufferImageCopy-imageOffset-00200)"},
+    {"VUID-VkBufferImageCopy-imageOffset-00205", "If the calling command's VkImage parameter is a compressed image, all members of imageOffset must be a multiple of the corresponding dimensions of the compressed texel block (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBufferImageCopy-imageOffset-00205)"},
+    {"VUID-VkBufferImageCopy-imageSubresource-parameter", "imageSubresource must be a valid VkImageSubresourceLayers structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBufferImageCopy-imageSubresource-parameter)"},
+    {"VUID-VkBufferImageCopy-srcImage-00199", "If the calling command's srcImage (vkCmdCopyImageToBuffer) or dstImage (vkCmdCopyBufferToImage) is of type VK_IMAGE_TYPE_1D, then imageOffset.y must be 0 and imageExtent.height must be 1. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBufferImageCopy-srcImage-00199)"},
+    {"VUID-VkBufferImageCopy-srcImage-00201", "If the calling command's srcImage (vkCmdCopyImageToBuffer) or dstImage (vkCmdCopyBufferToImage) is of type VK_IMAGE_TYPE_1D or VK_IMAGE_TYPE_2D, then imageOffset.z must be 0 and imageExtent.depth must be 1 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBufferImageCopy-srcImage-00201)"},
+    {"VUID-VkBufferMemoryBarrier-buffer-01190", "If buffer was created with a sharing mode of VK_SHARING_MODE_CONCURRENT, srcQueueFamilyIndex and dstQueueFamilyIndex must both be VK_QUEUE_FAMILY_IGNORED (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBufferMemoryBarrier-buffer-01190)"},
+    {"VUID-VkBufferMemoryBarrier-buffer-01191", "If buffer was created with a sharing mode of VK_SHARING_MODE_CONCURRENT, at least one of srcQueueFamilyIndex and dstQueueFamilyIndex must be VK_QUEUE_FAMILY_IGNORED (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBufferMemoryBarrier-buffer-01191)"},
+    {"VUID-VkBufferMemoryBarrier-buffer-01192", "If buffer was created with a sharing mode of VK_SHARING_MODE_EXCLUSIVE, srcQueueFamilyIndex and dstQueueFamilyIndex must either both be VK_QUEUE_FAMILY_IGNORED, or both be a valid queue family (see Queue Family Properties) (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBufferMemoryBarrier-buffer-01192)"},
+    {"VUID-VkBufferMemoryBarrier-buffer-01193", "If buffer was created with a sharing mode of VK_SHARING_MODE_EXCLUSIVE and srcQueueFamilyIndex is VK_QUEUE_FAMILY_IGNORED, dstQueueFamilyIndex must also be VK_QUEUE_FAMILY_IGNORED (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBufferMemoryBarrier-buffer-01193)"},
+    {"VUID-VkBufferMemoryBarrier-buffer-01196", "If buffer was created with a sharing mode of VK_SHARING_MODE_EXCLUSIVE, and srcQueueFamilyIndex and dstQueueFamilyIndex are not VK_QUEUE_FAMILY_IGNORED, at least one of them must be the same as the family of the queue that will execute this barrier (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBufferMemoryBarrier-buffer-01196)"},
+    {"VUID-VkBufferMemoryBarrier-buffer-01763", "If buffer was created with a sharing mode of VK_SHARING_MODE_CONCURRENT, and one of srcQueueFamilyIndex and dstQueueFamilyIndex is VK_QUEUE_FAMILY_IGNORED, the other must be VK_QUEUE_FAMILY_IGNORED or a special queue family reserved for external memory ownership transfers, as described in Queue Family Ownership Transfer. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBufferMemoryBarrier-buffer-01763)"},
+    {"VUID-VkBufferMemoryBarrier-buffer-01764", "If buffer was created with a sharing mode of VK_SHARING_MODE_EXCLUSIVE and srcQueueFamilyIndex is not VK_QUEUE_FAMILY_IGNORED, it must be a valid queue family or a special queue family reserved for external memory transfers, as described in Queue Family Ownership Transfer. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBufferMemoryBarrier-buffer-01764)"},
+    {"VUID-VkBufferMemoryBarrier-buffer-01765", "If buffer was created with a sharing mode of VK_SHARING_MODE_EXCLUSIVE and dstQueueFamilyIndex is not VK_QUEUE_FAMILY_IGNORED, it must be a valid queue family or a special queue family reserved for external memory transfers, as described in Queue Family Ownership Transfer. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBufferMemoryBarrier-buffer-01765)"},
+    {"VUID-VkBufferMemoryBarrier-buffer-01931", "If buffer is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBufferMemoryBarrier-buffer-01931)"},
+    {"VUID-VkBufferMemoryBarrier-buffer-parameter", "buffer must be a valid VkBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBufferMemoryBarrier-buffer-parameter)"},
+    {"VUID-VkBufferMemoryBarrier-offset-01187", "offset must be less than the size of buffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBufferMemoryBarrier-offset-01187)"},
+    {"VUID-VkBufferMemoryBarrier-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBufferMemoryBarrier-pNext-pNext)"},
+    {"VUID-VkBufferMemoryBarrier-sType-sType", "sType must be VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBufferMemoryBarrier-sType-sType)"},
+    {"VUID-VkBufferMemoryBarrier-size-01188", "If size is not equal to VK_WHOLE_SIZE, size must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBufferMemoryBarrier-size-01188)"},
+    {"VUID-VkBufferMemoryBarrier-size-01189", "If size is not equal to VK_WHOLE_SIZE, size must be less than or equal to than the size of buffer minus offset (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBufferMemoryBarrier-size-01189)"},
+    {"VUID-VkBufferMemoryRequirementsInfo2-buffer-parameter", "buffer must be a valid VkBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBufferMemoryRequirementsInfo2-buffer-parameter)"},
+    {"VUID-VkBufferMemoryRequirementsInfo2-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBufferMemoryRequirementsInfo2-pNext-pNext)"},
+    {"VUID-VkBufferMemoryRequirementsInfo2-sType-sType", "sType must be VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBufferMemoryRequirementsInfo2-sType-sType)"},
+    {"VUID-VkBufferOpaqueCaptureAddressCreateInfoKHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_BUFFER_OPAQUE_CAPTURE_ADDRESS_CREATE_INFO_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBufferOpaqueCaptureAddressCreateInfoKHR-sType-sType)"},
+    {"VUID-VkBufferViewCreateInfo-buffer-00932", "buffer must have been created with a usage value containing at least one of VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT or VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBufferViewCreateInfo-buffer-00932)"},
+    {"VUID-VkBufferViewCreateInfo-buffer-00933", "If buffer was created with usage containing VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT, format must be supported for uniform texel buffers, as specified by the VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT flag in VkFormatProperties::bufferFeatures returned by vkGetPhysicalDeviceFormatProperties (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBufferViewCreateInfo-buffer-00933)"},
+    {"VUID-VkBufferViewCreateInfo-buffer-00934", "If buffer was created with usage containing VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT, format must be supported for storage texel buffers, as specified by the VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT flag in VkFormatProperties::bufferFeatures returned by vkGetPhysicalDeviceFormatProperties (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBufferViewCreateInfo-buffer-00934)"},
+    {"VUID-VkBufferViewCreateInfo-buffer-00935", "If buffer is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBufferViewCreateInfo-buffer-00935)"},
+    {"VUID-VkBufferViewCreateInfo-buffer-02750", "If the texelBufferAlignment feature is enabled and if buffer was created with usage containing VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT, offset must be a multiple of the lesser of VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT::storageTexelBufferOffsetAlignmentBytes or, if VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT::storageTexelBufferOffsetSingleTexelAlignment is VK_TRUE, the size of a texel of the requested format. If the size of a texel is a multiple of three bytes, then the size of a single component of format is used instead (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBufferViewCreateInfo-buffer-02750)"},
+    {"VUID-VkBufferViewCreateInfo-buffer-02751", "If the texelBufferAlignment feature is enabled and if buffer was created with usage containing VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT, offset must be a multiple of the lesser of VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT::uniformTexelBufferOffsetAlignmentBytes or, if VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT::uniformTexelBufferOffsetSingleTexelAlignment is VK_TRUE, the size of a texel of the requested format. If the size of a texel is a multiple of three bytes, then the size of a single component of format is used instead (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBufferViewCreateInfo-buffer-02751)"},
+    {"VUID-VkBufferViewCreateInfo-buffer-parameter", "buffer must be a valid VkBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBufferViewCreateInfo-buffer-parameter)"},
+    {"VUID-VkBufferViewCreateInfo-flags-zerobitmask", "flags must be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBufferViewCreateInfo-flags-zerobitmask)"},
+    {"VUID-VkBufferViewCreateInfo-format-parameter", "format must be a valid VkFormat value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBufferViewCreateInfo-format-parameter)"},
+    {"VUID-VkBufferViewCreateInfo-offset-00925", "offset must be less than the size of buffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBufferViewCreateInfo-offset-00925)"},
+    {"VUID-VkBufferViewCreateInfo-offset-00926", "offset must be a multiple of VkPhysicalDeviceLimits::minTexelBufferOffsetAlignment (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBufferViewCreateInfo-offset-00926)"},
+    {"VUID-VkBufferViewCreateInfo-offset-00931", "If range is not equal to VK_WHOLE_SIZE, the sum of offset and range must be less than or equal to the size of buffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBufferViewCreateInfo-offset-00931)"},
+    {"VUID-VkBufferViewCreateInfo-offset-02749", "If the texelBufferAlignment feature is not enabled, offset must be a multiple of VkPhysicalDeviceLimits::minTexelBufferOffsetAlignment (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBufferViewCreateInfo-offset-02749)"},
+    {"VUID-VkBufferViewCreateInfo-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBufferViewCreateInfo-pNext-pNext)"},
+    {"VUID-VkBufferViewCreateInfo-range-00928", "If range is not equal to VK_WHOLE_SIZE, range must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBufferViewCreateInfo-range-00928)"},
+    {"VUID-VkBufferViewCreateInfo-range-00929", "If range is not equal to VK_WHOLE_SIZE, range must be an integer multiple of the texel block size of format (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBufferViewCreateInfo-range-00929)"},
+    {"VUID-VkBufferViewCreateInfo-range-00930", "If range is not equal to VK_WHOLE_SIZE, range divided by the texel block size of format, multiplied by the number of texels per texel block for that format (as defined in the Compatible Formats table), must be less than or equal to VkPhysicalDeviceLimits::maxTexelBufferElements (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBufferViewCreateInfo-range-00930)"},
+    {"VUID-VkBufferViewCreateInfo-sType-sType", "sType must be VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkBufferViewCreateInfo-sType-sType)"},
+    {"VUID-VkCalibratedTimestampInfoEXT-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkCalibratedTimestampInfoEXT-pNext-pNext)"},
+    {"VUID-VkCalibratedTimestampInfoEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_CALIBRATED_TIMESTAMP_INFO_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkCalibratedTimestampInfoEXT-sType-sType)"},
+    {"VUID-VkCalibratedTimestampInfoEXT-timeDomain-02354", "timeDomain must be one of the VkTimeDomainEXT values returned by vkGetPhysicalDeviceCalibrateableTimeDomainsEXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkCalibratedTimestampInfoEXT-timeDomain-02354)"},
+    {"VUID-VkCalibratedTimestampInfoEXT-timeDomain-parameter", "timeDomain must be a valid VkTimeDomainEXT value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkCalibratedTimestampInfoEXT-timeDomain-parameter)"},
+    {"VUID-VkCheckpointDataNV-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkCheckpointDataNV-pNext-pNext)"},
+    {"VUID-VkCheckpointDataNV-sType-sType", "sType must be VK_STRUCTURE_TYPE_CHECKPOINT_DATA_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkCheckpointDataNV-sType-sType)"},
+    {"VUID-VkClearAttachment-aspectMask-00019", "If aspectMask includes VK_IMAGE_ASPECT_COLOR_BIT, it must not include VK_IMAGE_ASPECT_DEPTH_BIT or VK_IMAGE_ASPECT_STENCIL_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkClearAttachment-aspectMask-00019)"},
+    {"VUID-VkClearAttachment-aspectMask-00020", "aspectMask must not include VK_IMAGE_ASPECT_METADATA_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkClearAttachment-aspectMask-00020)"},
+    {"VUID-VkClearAttachment-aspectMask-02246", "aspectMask must not include VK_IMAGE_ASPECT_MEMORY_PLANE_i_BIT_EXT for any index i. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkClearAttachment-aspectMask-02246)"},
+    {"VUID-VkClearAttachment-aspectMask-parameter", "aspectMask must be a valid combination of VkImageAspectFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkClearAttachment-aspectMask-parameter)"},
+    {"VUID-VkClearAttachment-aspectMask-requiredbitmask", "aspectMask must not be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkClearAttachment-aspectMask-requiredbitmask)"},
+    {"VUID-VkClearAttachment-clearValue-00021", "clearValue must be a valid VkClearValue union (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkClearAttachment-clearValue-00021)"},
+    {"VUID-VkClearDepthStencilValue-depth-00022", "Unless the VK_EXT_depth_range_unrestricted extension is enabled depth must be between 0.0 and 1.0, inclusive (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkClearDepthStencilValue-depth-00022)"},
+    {"VUID-VkClearDepthStencilValue-depth-02506", "depth must be between 0.0 and 1.0, inclusive (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkClearDepthStencilValue-depth-02506)"},
+    {"VUID-VkCmdProcessCommandsInfoNVX-commonparent", "Each of indirectCommandsLayout, objectTable, sequencesCountBuffer, sequencesIndexBuffer, and targetCommandBuffer that are valid handles of non-ignored parameters must have been created, allocated, or retrieved from the same VkDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkCmdProcessCommandsInfoNVX-commonparent)"},
+    {"VUID-VkCmdProcessCommandsInfoNVX-indirectCommandsLayout-parameter", "indirectCommandsLayout must be a valid VkIndirectCommandsLayoutNVX handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkCmdProcessCommandsInfoNVX-indirectCommandsLayout-parameter)"},
+    {"VUID-VkCmdProcessCommandsInfoNVX-indirectCommandsTokenCount-01332", "indirectCommandsTokenCount must match the indirectCommandsLayout's tokenCount (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkCmdProcessCommandsInfoNVX-indirectCommandsTokenCount-01332)"},
+    {"VUID-VkCmdProcessCommandsInfoNVX-indirectCommandsTokenCount-arraylength", "indirectCommandsTokenCount must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkCmdProcessCommandsInfoNVX-indirectCommandsTokenCount-arraylength)"},
+    {"VUID-VkCmdProcessCommandsInfoNVX-objectTable-01331", "The provided objectTable must include all objects referenced by the generation process (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkCmdProcessCommandsInfoNVX-objectTable-01331)"},
+    {"VUID-VkCmdProcessCommandsInfoNVX-objectTable-parameter", "objectTable must be a valid VkObjectTableNVX handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkCmdProcessCommandsInfoNVX-objectTable-parameter)"},
+    {"VUID-VkCmdProcessCommandsInfoNVX-pIndirectCommandsTokens-parameter", "pIndirectCommandsTokens must be a valid pointer to an array of indirectCommandsTokenCount valid VkIndirectCommandsTokenNVX structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkCmdProcessCommandsInfoNVX-pIndirectCommandsTokens-parameter)"},
+    {"VUID-VkCmdProcessCommandsInfoNVX-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkCmdProcessCommandsInfoNVX-pNext-pNext)"},
+    {"VUID-VkCmdProcessCommandsInfoNVX-sType-sType", "sType must be VK_STRUCTURE_TYPE_CMD_PROCESS_COMMANDS_INFO_NVX (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkCmdProcessCommandsInfoNVX-sType-sType)"},
+    {"VUID-VkCmdProcessCommandsInfoNVX-sequencesCountBuffer-01338", "If sequencesCountBuffer is used, its usage flag must have the VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT bit set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkCmdProcessCommandsInfoNVX-sequencesCountBuffer-01338)"},
+    {"VUID-VkCmdProcessCommandsInfoNVX-sequencesCountBuffer-01339", "If sequencesCountBuffer is used, sequencesCountOffset must be aligned to VkDeviceGeneratedCommandsLimitsNVX::minSequenceCountBufferOffsetAlignment (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkCmdProcessCommandsInfoNVX-sequencesCountBuffer-01339)"},
+    {"VUID-VkCmdProcessCommandsInfoNVX-sequencesCountBuffer-parameter", "If sequencesCountBuffer is not VK_NULL_HANDLE, sequencesCountBuffer must be a valid VkBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkCmdProcessCommandsInfoNVX-sequencesCountBuffer-parameter)"},
+    {"VUID-VkCmdProcessCommandsInfoNVX-sequencesIndexBuffer-01340", "If sequencesIndexBuffer is used, its usage flag must have the VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT bit set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkCmdProcessCommandsInfoNVX-sequencesIndexBuffer-01340)"},
+    {"VUID-VkCmdProcessCommandsInfoNVX-sequencesIndexBuffer-01341", "If sequencesIndexBuffer is used, sequencesIndexOffset must be aligned to VkDeviceGeneratedCommandsLimitsNVX::minSequenceIndexBufferOffsetAlignment (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkCmdProcessCommandsInfoNVX-sequencesIndexBuffer-01341)"},
+    {"VUID-VkCmdProcessCommandsInfoNVX-sequencesIndexBuffer-parameter", "If sequencesIndexBuffer is not VK_NULL_HANDLE, sequencesIndexBuffer must be a valid VkBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkCmdProcessCommandsInfoNVX-sequencesIndexBuffer-parameter)"},
+    {"VUID-VkCmdProcessCommandsInfoNVX-targetCommandBuffer-01334", "If targetCommandBuffer is provided, it must have reserved command space (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkCmdProcessCommandsInfoNVX-targetCommandBuffer-01334)"},
+    {"VUID-VkCmdProcessCommandsInfoNVX-targetCommandBuffer-01335", "If targetCommandBuffer is provided, the objectTable must match the reservation's objectTable and must have had all referenced objects registered at reservation time (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkCmdProcessCommandsInfoNVX-targetCommandBuffer-01335)"},
+    {"VUID-VkCmdProcessCommandsInfoNVX-targetCommandBuffer-01336", "If targetCommandBuffer is provided, the indirectCommandsLayout must match the reservation's indirectCommandsLayout (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkCmdProcessCommandsInfoNVX-targetCommandBuffer-01336)"},
+    {"VUID-VkCmdProcessCommandsInfoNVX-targetCommandBuffer-01337", "If targetCommandBuffer is provided, the maxSequencesCount must not exceed the reservation's maxSequencesCount (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkCmdProcessCommandsInfoNVX-targetCommandBuffer-01337)"},
+    {"VUID-VkCmdProcessCommandsInfoNVX-targetCommandBuffer-parameter", "If targetCommandBuffer is not NULL, targetCommandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkCmdProcessCommandsInfoNVX-targetCommandBuffer-parameter)"},
+    {"VUID-VkCmdProcessCommandsInfoNVX-tokenType-01333", "The tokenType member of each entry in the pIndirectCommandsTokens array must match the values used at creation time of indirectCommandsLayout (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkCmdProcessCommandsInfoNVX-tokenType-01333)"},
+    {"VUID-VkCmdReserveSpaceForCommandsInfoNVX-commonparent", "Both of indirectCommandsLayout, and objectTable must have been created, allocated, or retrieved from the same VkDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkCmdReserveSpaceForCommandsInfoNVX-commonparent)"},
+    {"VUID-VkCmdReserveSpaceForCommandsInfoNVX-indirectCommandsLayout-parameter", "indirectCommandsLayout must be a valid VkIndirectCommandsLayoutNVX handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkCmdReserveSpaceForCommandsInfoNVX-indirectCommandsLayout-parameter)"},
+    {"VUID-VkCmdReserveSpaceForCommandsInfoNVX-objectTable-parameter", "objectTable must be a valid VkObjectTableNVX handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkCmdReserveSpaceForCommandsInfoNVX-objectTable-parameter)"},
+    {"VUID-VkCmdReserveSpaceForCommandsInfoNVX-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkCmdReserveSpaceForCommandsInfoNVX-pNext-pNext)"},
+    {"VUID-VkCmdReserveSpaceForCommandsInfoNVX-sType-sType", "sType must be VK_STRUCTURE_TYPE_CMD_RESERVE_SPACE_FOR_COMMANDS_INFO_NVX (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkCmdReserveSpaceForCommandsInfoNVX-sType-sType)"},
+    {"VUID-VkCoarseSampleLocationNV-pixelX-02078", "pixelX must be less than the width (in pixels) of the fragment. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkCoarseSampleLocationNV-pixelX-02078)"},
+    {"VUID-VkCoarseSampleLocationNV-pixelY-02079", "pixelY must be less than the height (in pixels) of the fragment. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkCoarseSampleLocationNV-pixelY-02079)"},
+    {"VUID-VkCoarseSampleLocationNV-sample-02080", "sample must be less than the number of coverage samples in each pixel belonging to the fragment. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkCoarseSampleLocationNV-sample-02080)"},
+    {"VUID-VkCoarseSampleOrderCustomNV-pSampleLocations-02077", "The array pSampleLocations must contain exactly one entry for every combination of valid values for pixelX, pixelY, and sample in the structure VkCoarseSampleOrderCustomNV. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkCoarseSampleOrderCustomNV-pSampleLocations-02077)"},
+    {"VUID-VkCoarseSampleOrderCustomNV-pSampleLocations-parameter", "pSampleLocations must be a valid pointer to an array of sampleLocationCount VkCoarseSampleLocationNV structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkCoarseSampleOrderCustomNV-pSampleLocations-parameter)"},
+    {"VUID-VkCoarseSampleOrderCustomNV-sampleCount-02074", "sampleCount must correspond to a sample count enumerated in VkSampleCountFlags whose corresponding bit is set in VkPhysicalDeviceLimits::framebufferNoAttachmentsSampleCounts. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkCoarseSampleOrderCustomNV-sampleCount-02074)"},
+    {"VUID-VkCoarseSampleOrderCustomNV-sampleLocationCount-02075", "sampleLocationCount must be equal to the product of sampleCount, the fragment width for shadingRate, and the fragment height for shadingRate. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkCoarseSampleOrderCustomNV-sampleLocationCount-02075)"},
+    {"VUID-VkCoarseSampleOrderCustomNV-sampleLocationCount-02076", "sampleLocationCount must be less than or equal to the value of VkPhysicalDeviceShadingRateImagePropertiesNV::shadingRateMaxCoarseSamples. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkCoarseSampleOrderCustomNV-sampleLocationCount-02076)"},
+    {"VUID-VkCoarseSampleOrderCustomNV-sampleLocationCount-arraylength", "sampleLocationCount must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkCoarseSampleOrderCustomNV-sampleLocationCount-arraylength)"},
+    {"VUID-VkCoarseSampleOrderCustomNV-shadingRate-02073", "shadingRate must be a shading rate that generates fragments with more than one pixel. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkCoarseSampleOrderCustomNV-shadingRate-02073)"},
+    {"VUID-VkCoarseSampleOrderCustomNV-shadingRate-parameter", "shadingRate must be a valid VkShadingRatePaletteEntryNV value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkCoarseSampleOrderCustomNV-shadingRate-parameter)"},
+    {"VUID-VkCommandBufferAllocateInfo-commandBufferCount-00044", "commandBufferCount must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkCommandBufferAllocateInfo-commandBufferCount-00044)"},
+    {"VUID-VkCommandBufferAllocateInfo-commandPool-parameter", "commandPool must be a valid VkCommandPool handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkCommandBufferAllocateInfo-commandPool-parameter)"},
+    {"VUID-VkCommandBufferAllocateInfo-level-parameter", "level must be a valid VkCommandBufferLevel value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkCommandBufferAllocateInfo-level-parameter)"},
+    {"VUID-VkCommandBufferAllocateInfo-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkCommandBufferAllocateInfo-pNext-pNext)"},
+    {"VUID-VkCommandBufferAllocateInfo-sType-sType", "sType must be VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkCommandBufferAllocateInfo-sType-sType)"},
+    {"VUID-VkCommandBufferBeginInfo-flags-00053", "If flags contains VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT, the renderPass member of pInheritanceInfo must be a valid VkRenderPass (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkCommandBufferBeginInfo-flags-00053)"},
+    {"VUID-VkCommandBufferBeginInfo-flags-00054", "If flags contains VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT, the subpass member of pInheritanceInfo must be a valid subpass index within the renderPass member of pInheritanceInfo (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkCommandBufferBeginInfo-flags-00054)"},
+    {"VUID-VkCommandBufferBeginInfo-flags-00055", "If flags contains VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT, the framebuffer member of pInheritanceInfo must be either VK_NULL_HANDLE, or a valid VkFramebuffer that is compatible with the renderPass member of pInheritanceInfo (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkCommandBufferBeginInfo-flags-00055)"},
+    {"VUID-VkCommandBufferBeginInfo-flags-parameter", "flags must be a valid combination of VkCommandBufferUsageFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkCommandBufferBeginInfo-flags-parameter)"},
+    {"VUID-VkCommandBufferBeginInfo-pNext-pNext", "pNext must be NULL or a pointer to a valid instance of VkDeviceGroupCommandBufferBeginInfo (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkCommandBufferBeginInfo-pNext-pNext)"},
+    {"VUID-VkCommandBufferBeginInfo-sType-sType", "sType must be VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkCommandBufferBeginInfo-sType-sType)"},
+    {"VUID-VkCommandBufferInheritanceConditionalRenderingInfoEXT-conditionalRenderingEnable-01977", "If the inherited conditional rendering feature is not enabled, conditionalRenderingEnable must be VK_FALSE (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkCommandBufferInheritanceConditionalRenderingInfoEXT-conditionalRenderingEnable-01977)"},
+    {"VUID-VkCommandBufferInheritanceConditionalRenderingInfoEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_CONDITIONAL_RENDERING_INFO_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkCommandBufferInheritanceConditionalRenderingInfoEXT-sType-sType)"},
+    {"VUID-VkCommandBufferInheritanceInfo-commonparent", "Both of framebuffer, and renderPass that are valid handles of non-ignored parameters must have been created, allocated, or retrieved from the same VkDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkCommandBufferInheritanceInfo-commonparent)"},
+    {"VUID-VkCommandBufferInheritanceInfo-occlusionQueryEnable-00056", "If the inherited queries feature is not enabled, occlusionQueryEnable must be VK_FALSE (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkCommandBufferInheritanceInfo-occlusionQueryEnable-00056)"},
+    {"VUID-VkCommandBufferInheritanceInfo-pNext-pNext", "pNext must be NULL or a pointer to a valid instance of VkCommandBufferInheritanceConditionalRenderingInfoEXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkCommandBufferInheritanceInfo-pNext-pNext)"},
+    {"VUID-VkCommandBufferInheritanceInfo-pipelineStatistics-00058", "If the pipeline statistics queries feature is not enabled, pipelineStatistics must be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkCommandBufferInheritanceInfo-pipelineStatistics-00058)"},
+    {"VUID-VkCommandBufferInheritanceInfo-pipelineStatistics-02789", "If the pipeline statistics queries feature is enabled, pipelineStatistics must be a valid combination of VkQueryPipelineStatisticFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkCommandBufferInheritanceInfo-pipelineStatistics-02789)"},
+    {"VUID-VkCommandBufferInheritanceInfo-queryFlags-00057", "If the inherited queries feature is enabled, queryFlags must be a valid combination of VkQueryControlFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkCommandBufferInheritanceInfo-queryFlags-00057)"},
+    {"VUID-VkCommandBufferInheritanceInfo-queryFlags-02788", "If the inherited queries feature is not enabled, queryFlags must be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkCommandBufferInheritanceInfo-queryFlags-02788)"},
+    {"VUID-VkCommandBufferInheritanceInfo-sType-sType", "sType must be VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkCommandBufferInheritanceInfo-sType-sType)"},
+    {"VUID-VkCommandPoolCreateInfo-flags-parameter", "flags must be a valid combination of VkCommandPoolCreateFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkCommandPoolCreateInfo-flags-parameter)"},
+    {"VUID-VkCommandPoolCreateInfo-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkCommandPoolCreateInfo-pNext-pNext)"},
+    {"VUID-VkCommandPoolCreateInfo-sType-sType", "sType must be VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkCommandPoolCreateInfo-sType-sType)"},
+    {"VUID-VkComponentMapping-a-parameter", "a must be a valid VkComponentSwizzle value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkComponentMapping-a-parameter)"},
+    {"VUID-VkComponentMapping-b-parameter", "b must be a valid VkComponentSwizzle value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkComponentMapping-b-parameter)"},
+    {"VUID-VkComponentMapping-g-parameter", "g must be a valid VkComponentSwizzle value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkComponentMapping-g-parameter)"},
+    {"VUID-VkComponentMapping-r-parameter", "r must be a valid VkComponentSwizzle value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkComponentMapping-r-parameter)"},
+    {"VUID-VkComputePipelineCreateInfo-commonparent", "Both of basePipelineHandle, and layout that are valid handles of non-ignored parameters must have been created, allocated, or retrieved from the same VkDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkComputePipelineCreateInfo-commonparent)"},
+    {"VUID-VkComputePipelineCreateInfo-flags-00697", "If flags contains the VK_PIPELINE_CREATE_DERIVATIVE_BIT flag, and basePipelineIndex is -1, basePipelineHandle must be a valid handle to a compute VkPipeline (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkComputePipelineCreateInfo-flags-00697)"},
+    {"VUID-VkComputePipelineCreateInfo-flags-00698", "If flags contains the VK_PIPELINE_CREATE_DERIVATIVE_BIT flag, and basePipelineHandle is VK_NULL_HANDLE, basePipelineIndex must be a valid index into the calling command's pCreateInfos parameter (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkComputePipelineCreateInfo-flags-00698)"},
+    {"VUID-VkComputePipelineCreateInfo-flags-00699", "If flags contains the VK_PIPELINE_CREATE_DERIVATIVE_BIT flag, and basePipelineIndex is not -1, basePipelineHandle must be VK_NULL_HANDLE (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkComputePipelineCreateInfo-flags-00699)"},
+    {"VUID-VkComputePipelineCreateInfo-flags-00700", "If flags contains the VK_PIPELINE_CREATE_DERIVATIVE_BIT flag, and basePipelineHandle is not VK_NULL_HANDLE, basePipelineIndex must be -1 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkComputePipelineCreateInfo-flags-00700)"},
+    {"VUID-VkComputePipelineCreateInfo-flags-parameter", "flags must be a valid combination of VkPipelineCreateFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkComputePipelineCreateInfo-flags-parameter)"},
+    {"VUID-VkComputePipelineCreateInfo-layout-00703", "layout must be consistent with the layout of the compute shader specified in stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkComputePipelineCreateInfo-layout-00703)"},
+    {"VUID-VkComputePipelineCreateInfo-layout-01687", "The number of resources in layout accessible to the compute shader stage must be less than or equal to VkPhysicalDeviceLimits::maxPerStageResources (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkComputePipelineCreateInfo-layout-01687)"},
+    {"VUID-VkComputePipelineCreateInfo-layout-parameter", "layout must be a valid VkPipelineLayout handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkComputePipelineCreateInfo-layout-parameter)"},
+    {"VUID-VkComputePipelineCreateInfo-pNext-pNext", "Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkPipelineCompilerControlCreateInfoAMD or VkPipelineCreationFeedbackCreateInfoEXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkComputePipelineCreateInfo-pNext-pNext)"},
+    {"VUID-VkComputePipelineCreateInfo-sType-sType", "sType must be VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkComputePipelineCreateInfo-sType-sType)"},
+    {"VUID-VkComputePipelineCreateInfo-sType-unique", "Each sType member in the pNext chain must be unique (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkComputePipelineCreateInfo-sType-unique)"},
+    {"VUID-VkComputePipelineCreateInfo-stage-00701", "The stage member of stage must be VK_SHADER_STAGE_COMPUTE_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkComputePipelineCreateInfo-stage-00701)"},
+    {"VUID-VkComputePipelineCreateInfo-stage-00702", "The shader code for the entry point identified by stage and the rest of the state identified by this structure must adhere to the pipeline linking rules described in the Shader Interfaces chapter (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkComputePipelineCreateInfo-stage-00702)"},
+    {"VUID-VkComputePipelineCreateInfo-stage-parameter", "stage must be a valid VkPipelineShaderStageCreateInfo structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkComputePipelineCreateInfo-stage-parameter)"},
+    {"VUID-VkConditionalRenderingBeginInfoEXT-buffer-01981", "If buffer is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkConditionalRenderingBeginInfoEXT-buffer-01981)"},
+    {"VUID-VkConditionalRenderingBeginInfoEXT-buffer-01982", "buffer must have been created with the VK_BUFFER_USAGE_CONDITIONAL_RENDERING_BIT_EXT bit set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkConditionalRenderingBeginInfoEXT-buffer-01982)"},
+    {"VUID-VkConditionalRenderingBeginInfoEXT-buffer-parameter", "buffer must be a valid VkBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkConditionalRenderingBeginInfoEXT-buffer-parameter)"},
+    {"VUID-VkConditionalRenderingBeginInfoEXT-flags-parameter", "flags must be a valid combination of VkConditionalRenderingFlagBitsEXT values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkConditionalRenderingBeginInfoEXT-flags-parameter)"},
+    {"VUID-VkConditionalRenderingBeginInfoEXT-offset-01983", "offset must be less than the size of buffer by at least 32 bits. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkConditionalRenderingBeginInfoEXT-offset-01983)"},
+    {"VUID-VkConditionalRenderingBeginInfoEXT-offset-01984", "offset must be a multiple of 4 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkConditionalRenderingBeginInfoEXT-offset-01984)"},
+    {"VUID-VkConditionalRenderingBeginInfoEXT-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkConditionalRenderingBeginInfoEXT-pNext-pNext)"},
+    {"VUID-VkConditionalRenderingBeginInfoEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_CONDITIONAL_RENDERING_BEGIN_INFO_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkConditionalRenderingBeginInfoEXT-sType-sType)"},
+    {"VUID-VkCooperativeMatrixPropertiesNV-AType-parameter", "AType must be a valid VkComponentTypeNV value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkCooperativeMatrixPropertiesNV-AType-parameter)"},
+    {"VUID-VkCooperativeMatrixPropertiesNV-BType-parameter", "BType must be a valid VkComponentTypeNV value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkCooperativeMatrixPropertiesNV-BType-parameter)"},
+    {"VUID-VkCooperativeMatrixPropertiesNV-CType-parameter", "CType must be a valid VkComponentTypeNV value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkCooperativeMatrixPropertiesNV-CType-parameter)"},
+    {"VUID-VkCooperativeMatrixPropertiesNV-DType-parameter", "DType must be a valid VkComponentTypeNV value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkCooperativeMatrixPropertiesNV-DType-parameter)"},
+    {"VUID-VkCooperativeMatrixPropertiesNV-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkCooperativeMatrixPropertiesNV-pNext-pNext)"},
+    {"VUID-VkCooperativeMatrixPropertiesNV-sType-sType", "sType must be VK_STRUCTURE_TYPE_COOPERATIVE_MATRIX_PROPERTIES_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkCooperativeMatrixPropertiesNV-sType-sType)"},
+    {"VUID-VkCooperativeMatrixPropertiesNV-scope-parameter", "scope must be a valid VkScopeNV value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkCooperativeMatrixPropertiesNV-scope-parameter)"},
+    {"VUID-VkCopyDescriptorSet-commonparent", "Both of dstSet, and srcSet must have been created, allocated, or retrieved from the same VkDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkCopyDescriptorSet-commonparent)"},
+    {"VUID-VkCopyDescriptorSet-dstArrayElement-00348", "The sum of dstArrayElement and descriptorCount must be less than or equal to the number of array elements in the descriptor set binding specified by dstBinding, and all applicable consecutive bindings, as described by consecutive binding updates (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkCopyDescriptorSet-dstArrayElement-00348)"},
+    {"VUID-VkCopyDescriptorSet-dstBinding-00347", "dstBinding must be a valid binding within dstSet (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkCopyDescriptorSet-dstBinding-00347)"},
+    {"VUID-VkCopyDescriptorSet-dstBinding-02224", "If the descriptor type of the descriptor set binding specified by dstBinding is VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT, dstArrayElement must be an integer multiple of 4 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkCopyDescriptorSet-dstBinding-02224)"},
+    {"VUID-VkCopyDescriptorSet-dstBinding-02632", "The type of dstBinding within dstSet must be equal to the type of srcBinding within srcSet (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkCopyDescriptorSet-dstBinding-02632)"},
+    {"VUID-VkCopyDescriptorSet-dstBinding-02753", "If the descriptor type of the descriptor set binding specified by dstBinding is VK_DESCRIPTOR_TYPE_SAMPLER, then dstSet must not have been allocated with a layout that included immutable samplers for dstBinding (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkCopyDescriptorSet-dstBinding-02753)"},
+    {"VUID-VkCopyDescriptorSet-dstSet-parameter", "dstSet must be a valid VkDescriptorSet handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkCopyDescriptorSet-dstSet-parameter)"},
+    {"VUID-VkCopyDescriptorSet-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkCopyDescriptorSet-pNext-pNext)"},
+    {"VUID-VkCopyDescriptorSet-sType-sType", "sType must be VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkCopyDescriptorSet-sType-sType)"},
+    {"VUID-VkCopyDescriptorSet-srcArrayElement-00346", "The sum of srcArrayElement and descriptorCount must be less than or equal to the number of array elements in the descriptor set binding specified by srcBinding, and all applicable consecutive bindings, as described by consecutive binding updates (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkCopyDescriptorSet-srcArrayElement-00346)"},
+    {"VUID-VkCopyDescriptorSet-srcBinding-00345", "srcBinding must be a valid binding within srcSet (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkCopyDescriptorSet-srcBinding-00345)"},
+    {"VUID-VkCopyDescriptorSet-srcBinding-02223", "If the descriptor type of the descriptor set binding specified by srcBinding is VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT, srcArrayElement must be an integer multiple of 4 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkCopyDescriptorSet-srcBinding-02223)"},
+    {"VUID-VkCopyDescriptorSet-srcBinding-02225", "If the descriptor type of the descriptor set binding specified by either srcBinding or dstBinding is VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT, descriptorCount must be an integer multiple of 4 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkCopyDescriptorSet-srcBinding-02225)"},
+    {"VUID-VkCopyDescriptorSet-srcSet-00349", "If srcSet is equal to dstSet, then the source and destination ranges of descriptors must not overlap, where the ranges may include array elements from consecutive bindings as described by consecutive binding updates (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkCopyDescriptorSet-srcSet-00349)"},
+    {"VUID-VkCopyDescriptorSet-srcSet-01918", "If srcSet's layout was created with the VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT flag set, then dstSet's layout must also have been created with the VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT flag set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkCopyDescriptorSet-srcSet-01918)"},
+    {"VUID-VkCopyDescriptorSet-srcSet-01919", "If srcSet's layout was created without the VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT flag set, then dstSet's layout must also have been created without the VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT flag set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkCopyDescriptorSet-srcSet-01919)"},
+    {"VUID-VkCopyDescriptorSet-srcSet-01920", "If the descriptor pool from which srcSet was allocated was created with the VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT_EXT flag set, then the descriptor pool from which dstSet was allocated must also have been created with the VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT_EXT flag set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkCopyDescriptorSet-srcSet-01920)"},
+    {"VUID-VkCopyDescriptorSet-srcSet-01921", "If the descriptor pool from which srcSet was allocated was created without the VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT_EXT flag set, then the descriptor pool from which dstSet was allocated must also have been created without the VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT_EXT flag set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkCopyDescriptorSet-srcSet-01921)"},
+    {"VUID-VkCopyDescriptorSet-srcSet-parameter", "srcSet must be a valid VkDescriptorSet handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkCopyDescriptorSet-srcSet-parameter)"},
+    {"VUID-VkD3D12FenceSubmitInfoKHR-pSignalSemaphoreValues-parameter", "If signalSemaphoreValuesCount is not 0, and pSignalSemaphoreValues is not NULL, pSignalSemaphoreValues must be a valid pointer to an array of signalSemaphoreValuesCount uint64_t values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkD3D12FenceSubmitInfoKHR-pSignalSemaphoreValues-parameter)"},
+    {"VUID-VkD3D12FenceSubmitInfoKHR-pWaitSemaphoreValues-parameter", "If waitSemaphoreValuesCount is not 0, and pWaitSemaphoreValues is not NULL, pWaitSemaphoreValues must be a valid pointer to an array of waitSemaphoreValuesCount uint64_t values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkD3D12FenceSubmitInfoKHR-pWaitSemaphoreValues-parameter)"},
+    {"VUID-VkD3D12FenceSubmitInfoKHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_D3D12_FENCE_SUBMIT_INFO_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkD3D12FenceSubmitInfoKHR-sType-sType)"},
+    {"VUID-VkD3D12FenceSubmitInfoKHR-signalSemaphoreValuesCount-00080", "signalSemaphoreValuesCount must be the same value as VkSubmitInfo::signalSemaphoreCount, where VkSubmitInfo is in the pNext chain of this VkD3D12FenceSubmitInfoKHR structure. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkD3D12FenceSubmitInfoKHR-signalSemaphoreValuesCount-00080)"},
+    {"VUID-VkD3D12FenceSubmitInfoKHR-waitSemaphoreValuesCount-00079", "waitSemaphoreValuesCount must be the same value as VkSubmitInfo::waitSemaphoreCount, where VkSubmitInfo is in the pNext chain of this VkD3D12FenceSubmitInfoKHR structure. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkD3D12FenceSubmitInfoKHR-waitSemaphoreValuesCount-00079)"},
+    {"VUID-VkDebugMarkerMarkerInfoEXT-pMarkerName-parameter", "pMarkerName must be a null-terminated UTF-8 string (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDebugMarkerMarkerInfoEXT-pMarkerName-parameter)"},
+    {"VUID-VkDebugMarkerMarkerInfoEXT-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDebugMarkerMarkerInfoEXT-pNext-pNext)"},
+    {"VUID-VkDebugMarkerMarkerInfoEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_DEBUG_MARKER_MARKER_INFO_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDebugMarkerMarkerInfoEXT-sType-sType)"},
+    {"VUID-VkDebugMarkerObjectNameInfoEXT-object-01491", "object must not be VK_NULL_HANDLE (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDebugMarkerObjectNameInfoEXT-object-01491)"},
+    {"VUID-VkDebugMarkerObjectNameInfoEXT-object-01492", "object must be a Vulkan object of the type associated with objectType as defined in VkDebugReportObjectTypeEXT and Vulkan Handle Relationship. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDebugMarkerObjectNameInfoEXT-object-01492)"},
+    {"VUID-VkDebugMarkerObjectNameInfoEXT-objectType-01490", "objectType must not be VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDebugMarkerObjectNameInfoEXT-objectType-01490)"},
+    {"VUID-VkDebugMarkerObjectNameInfoEXT-objectType-parameter", "objectType must be a valid VkDebugReportObjectTypeEXT value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDebugMarkerObjectNameInfoEXT-objectType-parameter)"},
+    {"VUID-VkDebugMarkerObjectNameInfoEXT-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDebugMarkerObjectNameInfoEXT-pNext-pNext)"},
+    {"VUID-VkDebugMarkerObjectNameInfoEXT-pObjectName-parameter", "pObjectName must be a null-terminated UTF-8 string (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDebugMarkerObjectNameInfoEXT-pObjectName-parameter)"},
+    {"VUID-VkDebugMarkerObjectNameInfoEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_NAME_INFO_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDebugMarkerObjectNameInfoEXT-sType-sType)"},
+    {"VUID-VkDebugMarkerObjectTagInfoEXT-object-01494", "object must not be VK_NULL_HANDLE (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDebugMarkerObjectTagInfoEXT-object-01494)"},
+    {"VUID-VkDebugMarkerObjectTagInfoEXT-object-01495", "object must be a Vulkan object of the type associated with objectType as defined in VkDebugReportObjectTypeEXT and Vulkan Handle Relationship. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDebugMarkerObjectTagInfoEXT-object-01495)"},
+    {"VUID-VkDebugMarkerObjectTagInfoEXT-objectType-01493", "objectType must not be VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDebugMarkerObjectTagInfoEXT-objectType-01493)"},
+    {"VUID-VkDebugMarkerObjectTagInfoEXT-objectType-parameter", "objectType must be a valid VkDebugReportObjectTypeEXT value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDebugMarkerObjectTagInfoEXT-objectType-parameter)"},
+    {"VUID-VkDebugMarkerObjectTagInfoEXT-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDebugMarkerObjectTagInfoEXT-pNext-pNext)"},
+    {"VUID-VkDebugMarkerObjectTagInfoEXT-pTag-parameter", "pTag must be a valid pointer to an array of tagSize bytes (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDebugMarkerObjectTagInfoEXT-pTag-parameter)"},
+    {"VUID-VkDebugMarkerObjectTagInfoEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_TAG_INFO_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDebugMarkerObjectTagInfoEXT-sType-sType)"},
+    {"VUID-VkDebugMarkerObjectTagInfoEXT-tagSize-arraylength", "tagSize must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDebugMarkerObjectTagInfoEXT-tagSize-arraylength)"},
+    {"VUID-VkDebugReportCallbackCreateInfoEXT-flags-parameter", "flags must be a valid combination of VkDebugReportFlagBitsEXT values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDebugReportCallbackCreateInfoEXT-flags-parameter)"},
+    {"VUID-VkDebugReportCallbackCreateInfoEXT-pfnCallback-parameter", "pfnCallback must be a valid PFN_vkDebugReportCallbackEXT value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDebugReportCallbackCreateInfoEXT-pfnCallback-parameter)"},
+    {"VUID-VkDebugReportCallbackCreateInfoEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDebugReportCallbackCreateInfoEXT-sType-sType)"},
+    {"VUID-VkDebugUtilsLabelEXT-pLabelName-parameter", "pLabelName must be a null-terminated UTF-8 string (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDebugUtilsLabelEXT-pLabelName-parameter)"},
+    {"VUID-VkDebugUtilsLabelEXT-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDebugUtilsLabelEXT-pNext-pNext)"},
+    {"VUID-VkDebugUtilsLabelEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDebugUtilsLabelEXT-sType-sType)"},
+    {"VUID-VkDebugUtilsMessengerCallbackDataEXT-flags-zerobitmask", "flags must be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDebugUtilsMessengerCallbackDataEXT-flags-zerobitmask)"},
+    {"VUID-VkDebugUtilsMessengerCallbackDataEXT-pCmdBufLabels-parameter", "If cmdBufLabelCount is not 0, pCmdBufLabels must be a valid pointer to an array of cmdBufLabelCount valid VkDebugUtilsLabelEXT structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDebugUtilsMessengerCallbackDataEXT-pCmdBufLabels-parameter)"},
+    {"VUID-VkDebugUtilsMessengerCallbackDataEXT-pMessage-parameter", "pMessage must be a null-terminated UTF-8 string (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDebugUtilsMessengerCallbackDataEXT-pMessage-parameter)"},
+    {"VUID-VkDebugUtilsMessengerCallbackDataEXT-pMessageIdName-parameter", "If pMessageIdName is not NULL, pMessageIdName must be a null-terminated UTF-8 string (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDebugUtilsMessengerCallbackDataEXT-pMessageIdName-parameter)"},
+    {"VUID-VkDebugUtilsMessengerCallbackDataEXT-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDebugUtilsMessengerCallbackDataEXT-pNext-pNext)"},
+    {"VUID-VkDebugUtilsMessengerCallbackDataEXT-pObjects-parameter", "If objectCount is not 0, pObjects must be a valid pointer to an array of objectCount valid VkDebugUtilsObjectNameInfoEXT structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDebugUtilsMessengerCallbackDataEXT-pObjects-parameter)"},
+    {"VUID-VkDebugUtilsMessengerCallbackDataEXT-pQueueLabels-parameter", "If queueLabelCount is not 0, pQueueLabels must be a valid pointer to an array of queueLabelCount valid VkDebugUtilsLabelEXT structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDebugUtilsMessengerCallbackDataEXT-pQueueLabels-parameter)"},
+    {"VUID-VkDebugUtilsMessengerCallbackDataEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CALLBACK_DATA_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDebugUtilsMessengerCallbackDataEXT-sType-sType)"},
+    {"VUID-VkDebugUtilsMessengerCreateInfoEXT-flags-zerobitmask", "flags must be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDebugUtilsMessengerCreateInfoEXT-flags-zerobitmask)"},
+    {"VUID-VkDebugUtilsMessengerCreateInfoEXT-messageSeverity-parameter", "messageSeverity must be a valid combination of VkDebugUtilsMessageSeverityFlagBitsEXT values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDebugUtilsMessengerCreateInfoEXT-messageSeverity-parameter)"},
+    {"VUID-VkDebugUtilsMessengerCreateInfoEXT-messageSeverity-requiredbitmask", "messageSeverity must not be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDebugUtilsMessengerCreateInfoEXT-messageSeverity-requiredbitmask)"},
+    {"VUID-VkDebugUtilsMessengerCreateInfoEXT-messageType-parameter", "messageType must be a valid combination of VkDebugUtilsMessageTypeFlagBitsEXT values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDebugUtilsMessengerCreateInfoEXT-messageType-parameter)"},
+    {"VUID-VkDebugUtilsMessengerCreateInfoEXT-messageType-requiredbitmask", "messageType must not be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDebugUtilsMessengerCreateInfoEXT-messageType-requiredbitmask)"},
+    {"VUID-VkDebugUtilsMessengerCreateInfoEXT-pfnUserCallback-01914", "pfnUserCallback must be a valid PFN_vkDebugUtilsMessengerCallbackEXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDebugUtilsMessengerCreateInfoEXT-pfnUserCallback-01914)"},
+    {"VUID-VkDebugUtilsMessengerCreateInfoEXT-pfnUserCallback-parameter", "pfnUserCallback must be a valid PFN_vkDebugUtilsMessengerCallbackEXT value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDebugUtilsMessengerCreateInfoEXT-pfnUserCallback-parameter)"},
+    {"VUID-VkDebugUtilsMessengerCreateInfoEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDebugUtilsMessengerCreateInfoEXT-sType-sType)"},
+    {"VUID-VkDebugUtilsObjectNameInfoEXT-objectType-02589", "If objectType is VK_OBJECT_TYPE_UNKNOWN, objectHandle must not be VK_NULL_HANDLE (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDebugUtilsObjectNameInfoEXT-objectType-02589)"},
+    {"VUID-VkDebugUtilsObjectNameInfoEXT-objectType-02590", "If objectType is not VK_OBJECT_TYPE_UNKNOWN, objectHandle must be VK_NULL_HANDLE or a valid Vulkan handle of the type associated with objectType as defined in the VkObjectType and Vulkan Handle Relationship table (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDebugUtilsObjectNameInfoEXT-objectType-02590)"},
+    {"VUID-VkDebugUtilsObjectNameInfoEXT-objectType-parameter", "objectType must be a valid VkObjectType value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDebugUtilsObjectNameInfoEXT-objectType-parameter)"},
+    {"VUID-VkDebugUtilsObjectNameInfoEXT-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDebugUtilsObjectNameInfoEXT-pNext-pNext)"},
+    {"VUID-VkDebugUtilsObjectNameInfoEXT-pObjectName-parameter", "If pObjectName is not NULL, pObjectName must be a null-terminated UTF-8 string (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDebugUtilsObjectNameInfoEXT-pObjectName-parameter)"},
+    {"VUID-VkDebugUtilsObjectNameInfoEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDebugUtilsObjectNameInfoEXT-sType-sType)"},
+    {"VUID-VkDebugUtilsObjectTagInfoEXT-objectHandle-01910", "objectHandle must be a valid Vulkan handle of the type associated with objectType as defined in the VkObjectType and Vulkan Handle Relationship table (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDebugUtilsObjectTagInfoEXT-objectHandle-01910)"},
+    {"VUID-VkDebugUtilsObjectTagInfoEXT-objectType-01908", "objectType must not be VK_OBJECT_TYPE_UNKNOWN (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDebugUtilsObjectTagInfoEXT-objectType-01908)"},
+    {"VUID-VkDebugUtilsObjectTagInfoEXT-objectType-parameter", "objectType must be a valid VkObjectType value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDebugUtilsObjectTagInfoEXT-objectType-parameter)"},
+    {"VUID-VkDebugUtilsObjectTagInfoEXT-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDebugUtilsObjectTagInfoEXT-pNext-pNext)"},
+    {"VUID-VkDebugUtilsObjectTagInfoEXT-pTag-parameter", "pTag must be a valid pointer to an array of tagSize bytes (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDebugUtilsObjectTagInfoEXT-pTag-parameter)"},
+    {"VUID-VkDebugUtilsObjectTagInfoEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_TAG_INFO_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDebugUtilsObjectTagInfoEXT-sType-sType)"},
+    {"VUID-VkDebugUtilsObjectTagInfoEXT-tagSize-arraylength", "tagSize must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDebugUtilsObjectTagInfoEXT-tagSize-arraylength)"},
+    {"VUID-VkDedicatedAllocationBufferCreateInfoNV-sType-sType", "sType must be VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_BUFFER_CREATE_INFO_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDedicatedAllocationBufferCreateInfoNV-sType-sType)"},
+    {"VUID-VkDedicatedAllocationImageCreateInfoNV-dedicatedAllocation-00994", "If dedicatedAllocation is VK_TRUE, VkImageCreateInfo::flags must not include VK_IMAGE_CREATE_SPARSE_BINDING_BIT, VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT, or VK_IMAGE_CREATE_SPARSE_ALIASED_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDedicatedAllocationImageCreateInfoNV-dedicatedAllocation-00994)"},
+    {"VUID-VkDedicatedAllocationImageCreateInfoNV-sType-sType", "sType must be VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_IMAGE_CREATE_INFO_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDedicatedAllocationImageCreateInfoNV-sType-sType)"},
+    {"VUID-VkDedicatedAllocationMemoryAllocateInfoNV-buffer-00651", "If buffer is not VK_NULL_HANDLE, the buffer must have been created with VkDedicatedAllocationBufferCreateInfoNV::dedicatedAllocation equal to VK_TRUE (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDedicatedAllocationMemoryAllocateInfoNV-buffer-00651)"},
+    {"VUID-VkDedicatedAllocationMemoryAllocateInfoNV-buffer-00653", "If buffer is not VK_NULL_HANDLE, VkMemoryAllocateInfo::allocationSize must equal the VkMemoryRequirements::size of the buffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDedicatedAllocationMemoryAllocateInfoNV-buffer-00653)"},
+    {"VUID-VkDedicatedAllocationMemoryAllocateInfoNV-buffer-00655", "If buffer is not VK_NULL_HANDLE and VkMemoryAllocateInfo defines a memory import operation, the memory being imported must also be a dedicated buffer allocation and buffer must be identical to the buffer associated with the imported memory. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDedicatedAllocationMemoryAllocateInfoNV-buffer-00655)"},
+    {"VUID-VkDedicatedAllocationMemoryAllocateInfoNV-buffer-parameter", "If buffer is not VK_NULL_HANDLE, buffer must be a valid VkBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDedicatedAllocationMemoryAllocateInfoNV-buffer-parameter)"},
+    {"VUID-VkDedicatedAllocationMemoryAllocateInfoNV-commonparent", "Both of buffer, and image that are valid handles of non-ignored parameters must have been created, allocated, or retrieved from the same VkDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDedicatedAllocationMemoryAllocateInfoNV-commonparent)"},
+    {"VUID-VkDedicatedAllocationMemoryAllocateInfoNV-image-00649", "At least one of image and buffer must be VK_NULL_HANDLE (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDedicatedAllocationMemoryAllocateInfoNV-image-00649)"},
+    {"VUID-VkDedicatedAllocationMemoryAllocateInfoNV-image-00650", "If image is not VK_NULL_HANDLE, the image must have been created with VkDedicatedAllocationImageCreateInfoNV::dedicatedAllocation equal to VK_TRUE (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDedicatedAllocationMemoryAllocateInfoNV-image-00650)"},
+    {"VUID-VkDedicatedAllocationMemoryAllocateInfoNV-image-00652", "If image is not VK_NULL_HANDLE, VkMemoryAllocateInfo::allocationSize must equal the VkMemoryRequirements::size of the image (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDedicatedAllocationMemoryAllocateInfoNV-image-00652)"},
+    {"VUID-VkDedicatedAllocationMemoryAllocateInfoNV-image-00654", "If image is not VK_NULL_HANDLE and VkMemoryAllocateInfo defines a memory import operation, the memory being imported must also be a dedicated image allocation and image must be identical to the image associated with the imported memory. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDedicatedAllocationMemoryAllocateInfoNV-image-00654)"},
+    {"VUID-VkDedicatedAllocationMemoryAllocateInfoNV-image-parameter", "If image is not VK_NULL_HANDLE, image must be a valid VkImage handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDedicatedAllocationMemoryAllocateInfoNV-image-parameter)"},
+    {"VUID-VkDedicatedAllocationMemoryAllocateInfoNV-sType-sType", "sType must be VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_MEMORY_ALLOCATE_INFO_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDedicatedAllocationMemoryAllocateInfoNV-sType-sType)"},
+    {"VUID-VkDescriptorBufferInfo-buffer-parameter", "buffer must be a valid VkBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDescriptorBufferInfo-buffer-parameter)"},
+    {"VUID-VkDescriptorBufferInfo-offset-00340", "offset must be less than the size of buffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDescriptorBufferInfo-offset-00340)"},
+    {"VUID-VkDescriptorBufferInfo-range-00341", "If range is not equal to VK_WHOLE_SIZE, range must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDescriptorBufferInfo-range-00341)"},
+    {"VUID-VkDescriptorBufferInfo-range-00342", "If range is not equal to VK_WHOLE_SIZE, range must be less than or equal to the size of buffer minus offset (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDescriptorBufferInfo-range-00342)"},
+    {"VUID-VkDescriptorImageInfo-commonparent", "Both of imageView, and sampler that are valid handles of non-ignored parameters must have been created, allocated, or retrieved from the same VkDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDescriptorImageInfo-commonparent)"},
+    {"VUID-VkDescriptorImageInfo-imageLayout-00344", "imageLayout must match the actual VkImageLayout of each subresource accessible from imageView at the time this descriptor is accessed as defined by the image layout matching rules (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDescriptorImageInfo-imageLayout-00344)"},
+    {"VUID-VkDescriptorImageInfo-imageView-00343", "imageView must not be 2D or 2D array image view created from a 3D image (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDescriptorImageInfo-imageView-00343)"},
+    {"VUID-VkDescriptorImageInfo-imageView-01976", "If imageView is created from a depth/stencil image, the aspectMask used to create the imageView must include either VK_IMAGE_ASPECT_DEPTH_BIT or VK_IMAGE_ASPECT_STENCIL_BIT but not both. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDescriptorImageInfo-imageView-01976)"},
+    {"VUID-VkDescriptorImageInfo-sampler-01564", "If sampler is used and the VkFormat of the image is a multi-planar format, the image must have been created with VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT, and the aspectMask of the imageView must be VK_IMAGE_ASPECT_PLANE_0_BIT, VK_IMAGE_ASPECT_PLANE_1_BIT or (for three-plane formats only) VK_IMAGE_ASPECT_PLANE_2_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDescriptorImageInfo-sampler-01564)"},
+    {"VUID-VkDescriptorPoolCreateInfo-flags-parameter", "flags must be a valid combination of VkDescriptorPoolCreateFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDescriptorPoolCreateInfo-flags-parameter)"},
+    {"VUID-VkDescriptorPoolCreateInfo-maxSets-00301", "maxSets must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDescriptorPoolCreateInfo-maxSets-00301)"},
+    {"VUID-VkDescriptorPoolCreateInfo-pNext-pNext", "pNext must be NULL or a pointer to a valid instance of VkDescriptorPoolInlineUniformBlockCreateInfoEXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDescriptorPoolCreateInfo-pNext-pNext)"},
+    {"VUID-VkDescriptorPoolCreateInfo-pPoolSizes-parameter", "pPoolSizes must be a valid pointer to an array of poolSizeCount valid VkDescriptorPoolSize structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDescriptorPoolCreateInfo-pPoolSizes-parameter)"},
+    {"VUID-VkDescriptorPoolCreateInfo-poolSizeCount-arraylength", "poolSizeCount must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDescriptorPoolCreateInfo-poolSizeCount-arraylength)"},
+    {"VUID-VkDescriptorPoolCreateInfo-sType-sType", "sType must be VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDescriptorPoolCreateInfo-sType-sType)"},
+    {"VUID-VkDescriptorPoolInlineUniformBlockCreateInfoEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDescriptorPoolInlineUniformBlockCreateInfoEXT-sType-sType)"},
+    {"VUID-VkDescriptorPoolSize-descriptorCount-00302", "descriptorCount must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDescriptorPoolSize-descriptorCount-00302)"},
+    {"VUID-VkDescriptorPoolSize-type-02218", "If type is VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT then descriptorCount must be a multiple of 4 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDescriptorPoolSize-type-02218)"},
+    {"VUID-VkDescriptorPoolSize-type-parameter", "type must be a valid VkDescriptorType value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDescriptorPoolSize-type-parameter)"},
+    {"VUID-VkDescriptorSetAllocateInfo-commonparent", "Both of descriptorPool, and the elements of pSetLayouts must have been created, allocated, or retrieved from the same VkDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDescriptorSetAllocateInfo-commonparent)"},
+    {"VUID-VkDescriptorSetAllocateInfo-descriptorPool-00307", "descriptorPool must have enough free descriptor capacity remaining to allocate the descriptor sets of the specified layouts (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDescriptorSetAllocateInfo-descriptorPool-00307)"},
+    {"VUID-VkDescriptorSetAllocateInfo-descriptorPool-parameter", "descriptorPool must be a valid VkDescriptorPool handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDescriptorSetAllocateInfo-descriptorPool-parameter)"},
+    {"VUID-VkDescriptorSetAllocateInfo-descriptorSetCount-00306", "descriptorSetCount must not be greater than the number of sets that are currently available for allocation in descriptorPool (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDescriptorSetAllocateInfo-descriptorSetCount-00306)"},
+    {"VUID-VkDescriptorSetAllocateInfo-descriptorSetCount-arraylength", "descriptorSetCount must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDescriptorSetAllocateInfo-descriptorSetCount-arraylength)"},
+    {"VUID-VkDescriptorSetAllocateInfo-pNext-pNext", "pNext must be NULL or a pointer to a valid instance of VkDescriptorSetVariableDescriptorCountAllocateInfoEXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDescriptorSetAllocateInfo-pNext-pNext)"},
+    {"VUID-VkDescriptorSetAllocateInfo-pSetLayouts-00308", "Each element of pSetLayouts must not have been created with VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDescriptorSetAllocateInfo-pSetLayouts-00308)"},
+    {"VUID-VkDescriptorSetAllocateInfo-pSetLayouts-03044", "If any element of pSetLayouts was created with the VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT bit set, descriptorPool must have been created with the VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT_EXT flag set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDescriptorSetAllocateInfo-pSetLayouts-03044)"},
+    {"VUID-VkDescriptorSetAllocateInfo-pSetLayouts-parameter", "pSetLayouts must be a valid pointer to an array of descriptorSetCount valid VkDescriptorSetLayout handles (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDescriptorSetAllocateInfo-pSetLayouts-parameter)"},
+    {"VUID-VkDescriptorSetAllocateInfo-sType-sType", "sType must be VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDescriptorSetAllocateInfo-sType-sType)"},
+    {"VUID-VkDescriptorSetLayoutBinding-descriptorCount-00283", "If descriptorCount is not 0, stageFlags must be a valid combination of VkShaderStageFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDescriptorSetLayoutBinding-descriptorCount-00283)"},
+    {"VUID-VkDescriptorSetLayoutBinding-descriptorType-00282", "If descriptorType is VK_DESCRIPTOR_TYPE_SAMPLER or VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, and descriptorCount is not 0 and pImmutableSamplers is not NULL, pImmutableSamplers must be a valid pointer to an array of descriptorCount valid VkSampler handles (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDescriptorSetLayoutBinding-descriptorType-00282)"},
+    {"VUID-VkDescriptorSetLayoutBinding-descriptorType-01510", "If descriptorType is VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT and descriptorCount is not 0, then stageFlags must be 0 or VK_SHADER_STAGE_FRAGMENT_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDescriptorSetLayoutBinding-descriptorType-01510)"},
+    {"VUID-VkDescriptorSetLayoutBinding-descriptorType-02209", "If descriptorType is VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT then descriptorCount must be a multiple of 4 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDescriptorSetLayoutBinding-descriptorType-02209)"},
+    {"VUID-VkDescriptorSetLayoutBinding-descriptorType-02210", "If descriptorType is VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT then descriptorCount must be less than or equal to VkPhysicalDeviceInlineUniformBlockPropertiesEXT::maxInlineUniformBlockSize (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDescriptorSetLayoutBinding-descriptorType-02210)"},
+    {"VUID-VkDescriptorSetLayoutBinding-descriptorType-parameter", "descriptorType must be a valid VkDescriptorType value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDescriptorSetLayoutBinding-descriptorType-parameter)"},
+    {"VUID-VkDescriptorSetLayoutBindingFlagsCreateInfoEXT-None-03011", "All bindings with descriptor type VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC, or VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC must not use VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDescriptorSetLayoutBindingFlagsCreateInfoEXT-None-03011)"},
+    {"VUID-VkDescriptorSetLayoutBindingFlagsCreateInfoEXT-bindingCount-03002", "If bindingCount is not zero, bindingCount must equal VkDescriptorSetLayoutCreateInfo::bindingCount (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDescriptorSetLayoutBindingFlagsCreateInfoEXT-bindingCount-03002)"},
+    {"VUID-VkDescriptorSetLayoutBindingFlagsCreateInfoEXT-descriptorBindingInlineUniformBlockUpdateAfterBind-02211", "If VkPhysicalDeviceInlineUniformBlockFeaturesEXT::descriptorBindingInlineUniformBlockUpdateAfterBind is not enabled, all bindings with descriptor type VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT must not use VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDescriptorSetLayoutBindingFlagsCreateInfoEXT-descriptorBindingInlineUniformBlockUpdateAfterBind-02211)"},
+    {"VUID-VkDescriptorSetLayoutBindingFlagsCreateInfoEXT-descriptorBindingPartiallyBound-03013", "If VkPhysicalDeviceDescriptorIndexingFeaturesEXT::descriptorBindingPartiallyBound is not enabled, all elements of pBindingFlags must not include VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDescriptorSetLayoutBindingFlagsCreateInfoEXT-descriptorBindingPartiallyBound-03013)"},
+    {"VUID-VkDescriptorSetLayoutBindingFlagsCreateInfoEXT-descriptorBindingSampledImageUpdateAfterBind-03006", "If VkPhysicalDeviceDescriptorIndexingFeaturesEXT::descriptorBindingSampledImageUpdateAfterBind is not enabled, all bindings with descriptor type VK_DESCRIPTOR_TYPE_SAMPLER, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, or VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE must not use VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDescriptorSetLayoutBindingFlagsCreateInfoEXT-descriptorBindingSampledImageUpdateAfterBind-03006)"},
+    {"VUID-VkDescriptorSetLayoutBindingFlagsCreateInfoEXT-descriptorBindingStorageBufferUpdateAfterBind-03008", "If VkPhysicalDeviceDescriptorIndexingFeaturesEXT::descriptorBindingStorageBufferUpdateAfterBind is not enabled, all bindings with descriptor type VK_DESCRIPTOR_TYPE_STORAGE_BUFFER must not use VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDescriptorSetLayoutBindingFlagsCreateInfoEXT-descriptorBindingStorageBufferUpdateAfterBind-03008)"},
+    {"VUID-VkDescriptorSetLayoutBindingFlagsCreateInfoEXT-descriptorBindingStorageImageUpdateAfterBind-03007", "If VkPhysicalDeviceDescriptorIndexingFeaturesEXT::descriptorBindingStorageImageUpdateAfterBind is not enabled, all bindings with descriptor type VK_DESCRIPTOR_TYPE_STORAGE_IMAGE must not use VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDescriptorSetLayoutBindingFlagsCreateInfoEXT-descriptorBindingStorageImageUpdateAfterBind-03007)"},
+    {"VUID-VkDescriptorSetLayoutBindingFlagsCreateInfoEXT-descriptorBindingStorageTexelBufferUpdateAfterBind-03010", "If VkPhysicalDeviceDescriptorIndexingFeaturesEXT::descriptorBindingStorageTexelBufferUpdateAfterBind is not enabled, all bindings with descriptor type VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER must not use VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDescriptorSetLayoutBindingFlagsCreateInfoEXT-descriptorBindingStorageTexelBufferUpdateAfterBind-03010)"},
+    {"VUID-VkDescriptorSetLayoutBindingFlagsCreateInfoEXT-descriptorBindingUniformBufferUpdateAfterBind-03005", "If VkPhysicalDeviceDescriptorIndexingFeaturesEXT::descriptorBindingUniformBufferUpdateAfterBind is not enabled, all bindings with descriptor type VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER must not use VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDescriptorSetLayoutBindingFlagsCreateInfoEXT-descriptorBindingUniformBufferUpdateAfterBind-03005)"},
+    {"VUID-VkDescriptorSetLayoutBindingFlagsCreateInfoEXT-descriptorBindingUniformTexelBufferUpdateAfterBind-03009", "If VkPhysicalDeviceDescriptorIndexingFeaturesEXT::descriptorBindingUniformTexelBufferUpdateAfterBind is not enabled, all bindings with descriptor type VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER must not use VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDescriptorSetLayoutBindingFlagsCreateInfoEXT-descriptorBindingUniformTexelBufferUpdateAfterBind-03009)"},
+    {"VUID-VkDescriptorSetLayoutBindingFlagsCreateInfoEXT-descriptorBindingUpdateUnusedWhilePending-03012", "If VkPhysicalDeviceDescriptorIndexingFeaturesEXT::descriptorBindingUpdateUnusedWhilePending is not enabled, all elements of pBindingFlags must not include VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDescriptorSetLayoutBindingFlagsCreateInfoEXT-descriptorBindingUpdateUnusedWhilePending-03012)"},
+    {"VUID-VkDescriptorSetLayoutBindingFlagsCreateInfoEXT-descriptorBindingVariableDescriptorCount-03014", "If VkPhysicalDeviceDescriptorIndexingFeaturesEXT::descriptorBindingVariableDescriptorCount is not enabled, all elements of pBindingFlags must not include VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDescriptorSetLayoutBindingFlagsCreateInfoEXT-descriptorBindingVariableDescriptorCount-03014)"},
+    {"VUID-VkDescriptorSetLayoutBindingFlagsCreateInfoEXT-flags-03003", "If VkDescriptorSetLayoutCreateInfo::flags includes VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR, then all elements of pBindingFlags must not include VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT, VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT_EXT, or VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDescriptorSetLayoutBindingFlagsCreateInfoEXT-flags-03003)"},
+    {"VUID-VkDescriptorSetLayoutBindingFlagsCreateInfoEXT-pBindingFlags-03004", "If an element of pBindingFlags includes VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT_EXT, then all other elements of VkDescriptorSetLayoutCreateInfo::pBindings must have a smaller value of binding (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDescriptorSetLayoutBindingFlagsCreateInfoEXT-pBindingFlags-03004)"},
+    {"VUID-VkDescriptorSetLayoutBindingFlagsCreateInfoEXT-pBindingFlags-03015", "If an element of pBindingFlags includes VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT_EXT, that element's descriptorType must not be VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC or VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDescriptorSetLayoutBindingFlagsCreateInfoEXT-pBindingFlags-03015)"},
+    {"VUID-VkDescriptorSetLayoutBindingFlagsCreateInfoEXT-pBindingFlags-parameter", "If bindingCount is not 0, and pBindingFlags is not NULL, pBindingFlags must be a valid pointer to an array of bindingCount valid combinations of VkDescriptorBindingFlagBitsEXT values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDescriptorSetLayoutBindingFlagsCreateInfoEXT-pBindingFlags-parameter)"},
+    {"VUID-VkDescriptorSetLayoutBindingFlagsCreateInfoEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDescriptorSetLayoutBindingFlagsCreateInfoEXT-sType-sType)"},
+    {"VUID-VkDescriptorSetLayoutCreateInfo-binding-00279", "The VkDescriptorSetLayoutBinding::binding members of the elements of the pBindings array must each have different values. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDescriptorSetLayoutCreateInfo-binding-00279)"},
+    {"VUID-VkDescriptorSetLayoutCreateInfo-descriptorType-03001", "If any binding has the VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT bit set, then all bindings must not have descriptorType of VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC or VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDescriptorSetLayoutCreateInfo-descriptorType-03001)"},
+    {"VUID-VkDescriptorSetLayoutCreateInfo-flags-00280", "If flags contains VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR, then all elements of pBindings must not have a descriptorType of VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC or VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDescriptorSetLayoutCreateInfo-flags-00280)"},
+    {"VUID-VkDescriptorSetLayoutCreateInfo-flags-00281", "If flags contains VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR, then the total number of elements of all bindings must be less than or equal to VkPhysicalDevicePushDescriptorPropertiesKHR::maxPushDescriptors (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDescriptorSetLayoutCreateInfo-flags-00281)"},
+    {"VUID-VkDescriptorSetLayoutCreateInfo-flags-02208", "If flags contains VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR, then all elements of pBindings must not have a descriptorType of VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDescriptorSetLayoutCreateInfo-flags-02208)"},
+    {"VUID-VkDescriptorSetLayoutCreateInfo-flags-03000", "If any binding has the VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT bit set, flags must include VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDescriptorSetLayoutCreateInfo-flags-03000)"},
+    {"VUID-VkDescriptorSetLayoutCreateInfo-flags-parameter", "flags must be a valid combination of VkDescriptorSetLayoutCreateFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDescriptorSetLayoutCreateInfo-flags-parameter)"},
+    {"VUID-VkDescriptorSetLayoutCreateInfo-pBindings-parameter", "If bindingCount is not 0, pBindings must be a valid pointer to an array of bindingCount valid VkDescriptorSetLayoutBinding structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDescriptorSetLayoutCreateInfo-pBindings-parameter)"},
+    {"VUID-VkDescriptorSetLayoutCreateInfo-pNext-pNext", "pNext must be NULL or a pointer to a valid instance of VkDescriptorSetLayoutBindingFlagsCreateInfoEXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDescriptorSetLayoutCreateInfo-pNext-pNext)"},
+    {"VUID-VkDescriptorSetLayoutCreateInfo-sType-sType", "sType must be VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDescriptorSetLayoutCreateInfo-sType-sType)"},
+    {"VUID-VkDescriptorSetLayoutSupport-pNext-pNext", "pNext must be NULL or a pointer to a valid instance of VkDescriptorSetVariableDescriptorCountLayoutSupportEXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDescriptorSetLayoutSupport-pNext-pNext)"},
+    {"VUID-VkDescriptorSetLayoutSupport-sType-sType", "sType must be VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDescriptorSetLayoutSupport-sType-sType)"},
+    {"VUID-VkDescriptorSetVariableDescriptorCountAllocateInfoEXT-descriptorSetCount-03045", "If descriptorSetCount is not zero, descriptorSetCount must equal VkDescriptorSetAllocateInfo::descriptorSetCount (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDescriptorSetVariableDescriptorCountAllocateInfoEXT-descriptorSetCount-03045)"},
+    {"VUID-VkDescriptorSetVariableDescriptorCountAllocateInfoEXT-pDescriptorCounts-parameter", "If descriptorSetCount is not 0, pDescriptorCounts must be a valid pointer to an array of descriptorSetCount uint32_t values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDescriptorSetVariableDescriptorCountAllocateInfoEXT-pDescriptorCounts-parameter)"},
+    {"VUID-VkDescriptorSetVariableDescriptorCountAllocateInfoEXT-pSetLayouts-03046", "If VkDescriptorSetAllocateInfo::pSetLayouts[i] has a variable descriptor count binding, then pDescriptorCounts[i] must be less than or equal to the descriptor count specified for that binding when the descriptor set layout was created. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDescriptorSetVariableDescriptorCountAllocateInfoEXT-pSetLayouts-03046)"},
+    {"VUID-VkDescriptorSetVariableDescriptorCountAllocateInfoEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDescriptorSetVariableDescriptorCountAllocateInfoEXT-sType-sType)"},
+    {"VUID-VkDescriptorSetVariableDescriptorCountLayoutSupportEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDescriptorSetVariableDescriptorCountLayoutSupportEXT-sType-sType)"},
+    {"VUID-VkDescriptorUpdateTemplateCreateInfo-commonparent", "Both of descriptorSetLayout, and pipelineLayout that are valid handles of non-ignored parameters must have been created, allocated, or retrieved from the same VkDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDescriptorUpdateTemplateCreateInfo-commonparent)"},
+    {"VUID-VkDescriptorUpdateTemplateCreateInfo-descriptorUpdateEntryCount-arraylength", "descriptorUpdateEntryCount must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDescriptorUpdateTemplateCreateInfo-descriptorUpdateEntryCount-arraylength)"},
+    {"VUID-VkDescriptorUpdateTemplateCreateInfo-flags-zerobitmask", "flags must be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDescriptorUpdateTemplateCreateInfo-flags-zerobitmask)"},
+    {"VUID-VkDescriptorUpdateTemplateCreateInfo-pDescriptorUpdateEntries-parameter", "pDescriptorUpdateEntries must be a valid pointer to an array of descriptorUpdateEntryCount valid VkDescriptorUpdateTemplateEntry structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDescriptorUpdateTemplateCreateInfo-pDescriptorUpdateEntries-parameter)"},
+    {"VUID-VkDescriptorUpdateTemplateCreateInfo-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDescriptorUpdateTemplateCreateInfo-pNext-pNext)"},
+    {"VUID-VkDescriptorUpdateTemplateCreateInfo-sType-sType", "sType must be VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDescriptorUpdateTemplateCreateInfo-sType-sType)"},
+    {"VUID-VkDescriptorUpdateTemplateCreateInfo-templateType-00350", "If templateType is VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET, descriptorSetLayout must be a valid VkDescriptorSetLayout handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDescriptorUpdateTemplateCreateInfo-templateType-00350)"},
+    {"VUID-VkDescriptorUpdateTemplateCreateInfo-templateType-00351", "If templateType is VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR, pipelineBindPoint must be a valid VkPipelineBindPoint value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDescriptorUpdateTemplateCreateInfo-templateType-00351)"},
+    {"VUID-VkDescriptorUpdateTemplateCreateInfo-templateType-00352", "If templateType is VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR, pipelineLayout must be a valid VkPipelineLayout handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDescriptorUpdateTemplateCreateInfo-templateType-00352)"},
+    {"VUID-VkDescriptorUpdateTemplateCreateInfo-templateType-00353", "If templateType is VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR, set must be the unique set number in the pipeline layout that uses a descriptor set layout that was created with VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDescriptorUpdateTemplateCreateInfo-templateType-00353)"},
+    {"VUID-VkDescriptorUpdateTemplateCreateInfo-templateType-parameter", "templateType must be a valid VkDescriptorUpdateTemplateType value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDescriptorUpdateTemplateCreateInfo-templateType-parameter)"},
+    {"VUID-VkDescriptorUpdateTemplateEntry-descriptor-02226", "If descriptor type is VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT, dstArrayElement must be an integer multiple of 4 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDescriptorUpdateTemplateEntry-descriptor-02226)"},
+    {"VUID-VkDescriptorUpdateTemplateEntry-descriptor-02227", "If descriptor type is VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT, descriptorCount must be an integer multiple of 4 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDescriptorUpdateTemplateEntry-descriptor-02227)"},
+    {"VUID-VkDescriptorUpdateTemplateEntry-descriptorType-parameter", "descriptorType must be a valid VkDescriptorType value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDescriptorUpdateTemplateEntry-descriptorType-parameter)"},
+    {"VUID-VkDescriptorUpdateTemplateEntry-dstArrayElement-00355", "dstArrayElement and descriptorCount must be less than or equal to the number of array elements in the descriptor set binding implicitly specified when using a descriptor update template to update descriptors, and all applicable consecutive bindings, as described by consecutive binding updates (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDescriptorUpdateTemplateEntry-dstArrayElement-00355)"},
+    {"VUID-VkDescriptorUpdateTemplateEntry-dstBinding-00354", "dstBinding must be a valid binding in the descriptor set layout implicitly specified when using a descriptor update template to update descriptors. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDescriptorUpdateTemplateEntry-dstBinding-00354)"},
+    {"VUID-VkDeviceCreateInfo-flags-zerobitmask", "flags must be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDeviceCreateInfo-flags-zerobitmask)"},
+    {"VUID-VkDeviceCreateInfo-pEnabledFeatures-parameter", "If pEnabledFeatures is not NULL, pEnabledFeatures must be a valid pointer to a valid VkPhysicalDeviceFeatures structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDeviceCreateInfo-pEnabledFeatures-parameter)"},
+    {"VUID-VkDeviceCreateInfo-pNext-00373", "If the pNext chain includes a VkPhysicalDeviceFeatures2 structure, then pEnabledFeatures must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDeviceCreateInfo-pNext-00373)"},
+    {"VUID-VkDeviceCreateInfo-pNext-pNext", "Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkDeviceGroupDeviceCreateInfo, VkDeviceMemoryOverallocationCreateInfoAMD, VkPhysicalDevice16BitStorageFeatures, VkPhysicalDevice8BitStorageFeaturesKHR, VkPhysicalDeviceASTCDecodeFeaturesEXT, VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT, VkPhysicalDeviceBufferDeviceAddressFeaturesEXT, VkPhysicalDeviceBufferDeviceAddressFeaturesKHR, VkPhysicalDeviceCoherentMemoryFeaturesAMD, VkPhysicalDeviceComputeShaderDerivativesFeaturesNV, VkPhysicalDeviceConditionalRenderingFeaturesEXT, VkPhysicalDeviceCooperativeMatrixFeaturesNV, VkPhysicalDeviceCornerSampledImageFeaturesNV, VkPhysicalDeviceCoverageReductionModeFeaturesNV, VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV, VkPhysicalDeviceDepthClipEnableFeaturesEXT, VkPhysicalDeviceDescriptorIndexingFeaturesEXT, VkPhysicalDeviceExclusiveScissorFeaturesNV, VkPhysicalDeviceFeatures2, VkPhysicalDeviceFragmentDensityMapFeaturesEXT, VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV, VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT, VkPhysicalDeviceHostQueryResetFeaturesEXT, VkPhysicalDeviceImagelessFramebufferFeaturesKHR, VkPhysicalDeviceIndexTypeUint8FeaturesEXT, VkPhysicalDeviceInlineUniformBlockFeaturesEXT, VkPhysicalDeviceLineRasterizationFeaturesEXT, VkPhysicalDeviceMemoryPriorityFeaturesEXT, VkPhysicalDeviceMeshShaderFeaturesNV, VkPhysicalDeviceMultiviewFeatures, VkPhysicalDevicePerformanceQueryFeaturesKHR, VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR, VkPhysicalDeviceProtectedMemoryFeatures, VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV, VkPhysicalDeviceSamplerYcbcrConversionFeatures, VkPhysicalDeviceScalarBlockLayoutFeaturesEXT, VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR, VkPhysicalDeviceShaderAtomicInt64FeaturesKHR, VkPhysicalDeviceShaderClockFeaturesKHR, VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT, VkPhysicalDeviceShaderDrawParametersFeatures, VkPhysicalDeviceShaderFloat16Int8FeaturesKHR, VkPhysicalDeviceShaderImageFootprintFeaturesNV, VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL, VkPhysicalDeviceShaderSMBuiltinsFeaturesNV, VkPhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR, VkPhysicalDeviceShadingRateImageFeaturesNV, VkPhysicalDeviceSubgroupSizeControlFeaturesEXT, VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT, VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT, VkPhysicalDeviceTimelineSemaphoreFeaturesKHR, VkPhysicalDeviceTransformFeedbackFeaturesEXT, VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR, VkPhysicalDeviceVariablePointersFeatures, VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT, VkPhysicalDeviceVulkanMemoryModelFeaturesKHR, or VkPhysicalDeviceYcbcrImageArraysFeaturesEXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDeviceCreateInfo-pNext-pNext)"},
+    {"VUID-VkDeviceCreateInfo-pQueueCreateInfos-parameter", "pQueueCreateInfos must be a valid pointer to an array of queueCreateInfoCount valid VkDeviceQueueCreateInfo structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDeviceCreateInfo-pQueueCreateInfos-parameter)"},
+    {"VUID-VkDeviceCreateInfo-ppEnabledExtensionNames-00374", "ppEnabledExtensionNames must not contain both VK_KHR_maintenance1 and VK_AMD_negative_viewport_height (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDeviceCreateInfo-ppEnabledExtensionNames-00374)"},
+    {"VUID-VkDeviceCreateInfo-ppEnabledExtensionNames-01840", "ppEnabledExtensionNames must not contain VK_AMD_negative_viewport_height (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDeviceCreateInfo-ppEnabledExtensionNames-01840)"},
+    {"VUID-VkDeviceCreateInfo-ppEnabledExtensionNames-03328", "ppEnabledExtensionNames must not contain both VK_KHR_buffer_device_address and VK_EXT_buffer_device_address (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDeviceCreateInfo-ppEnabledExtensionNames-03328)"},
+    {"VUID-VkDeviceCreateInfo-ppEnabledExtensionNames-parameter", "If enabledExtensionCount is not 0, ppEnabledExtensionNames must be a valid pointer to an array of enabledExtensionCount null-terminated UTF-8 strings (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDeviceCreateInfo-ppEnabledExtensionNames-parameter)"},
+    {"VUID-VkDeviceCreateInfo-ppEnabledLayerNames-parameter", "If enabledLayerCount is not 0, ppEnabledLayerNames must be a valid pointer to an array of enabledLayerCount null-terminated UTF-8 strings (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDeviceCreateInfo-ppEnabledLayerNames-parameter)"},
+    {"VUID-VkDeviceCreateInfo-queueCreateInfoCount-arraylength", "queueCreateInfoCount must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDeviceCreateInfo-queueCreateInfoCount-arraylength)"},
+    {"VUID-VkDeviceCreateInfo-queueFamilyIndex-00372", "The queueFamilyIndex member of each element of pQueueCreateInfos must be unique within pQueueCreateInfos (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDeviceCreateInfo-queueFamilyIndex-00372)"},
+    {"VUID-VkDeviceCreateInfo-queueFamilyIndex-02802", "The queueFamilyIndex member of each element of pQueueCreateInfos must be unique within pQueueCreateInfos, except that two members can share the same queueFamilyIndex if one is a protected-capable queue and one is not a protected-capable queue. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDeviceCreateInfo-queueFamilyIndex-02802)"},
+    {"VUID-VkDeviceCreateInfo-sType-sType", "sType must be VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDeviceCreateInfo-sType-sType)"},
+    {"VUID-VkDeviceCreateInfo-sType-unique", "Each sType member in the pNext chain must be unique (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDeviceCreateInfo-sType-unique)"},
+    {"VUID-VkDeviceEventInfoEXT-deviceEvent-parameter", "deviceEvent must be a valid VkDeviceEventTypeEXT value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDeviceEventInfoEXT-deviceEvent-parameter)"},
+    {"VUID-VkDeviceEventInfoEXT-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDeviceEventInfoEXT-pNext-pNext)"},
+    {"VUID-VkDeviceEventInfoEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_DEVICE_EVENT_INFO_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDeviceEventInfoEXT-sType-sType)"},
+    {"VUID-VkDeviceGeneratedCommandsFeaturesNVX-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDeviceGeneratedCommandsFeaturesNVX-pNext-pNext)"},
+    {"VUID-VkDeviceGeneratedCommandsFeaturesNVX-sType-sType", "sType must be VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_FEATURES_NVX (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDeviceGeneratedCommandsFeaturesNVX-sType-sType)"},
+    {"VUID-VkDeviceGeneratedCommandsLimitsNVX-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDeviceGeneratedCommandsLimitsNVX-pNext-pNext)"},
+    {"VUID-VkDeviceGeneratedCommandsLimitsNVX-sType-sType", "sType must be VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_LIMITS_NVX (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDeviceGeneratedCommandsLimitsNVX-sType-sType)"},
+    {"VUID-VkDeviceGroupBindSparseInfo-memoryDeviceIndex-01119", "Each memory allocation bound in this batch must have allocated an instance for memoryDeviceIndex. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDeviceGroupBindSparseInfo-memoryDeviceIndex-01119)"},
+    {"VUID-VkDeviceGroupBindSparseInfo-resourceDeviceIndex-01118", "resourceDeviceIndex and memoryDeviceIndex must both be valid device indices. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDeviceGroupBindSparseInfo-resourceDeviceIndex-01118)"},
+    {"VUID-VkDeviceGroupBindSparseInfo-sType-sType", "sType must be VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDeviceGroupBindSparseInfo-sType-sType)"},
+    {"VUID-VkDeviceGroupCommandBufferBeginInfo-deviceMask-00106", "deviceMask must be a valid device mask value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDeviceGroupCommandBufferBeginInfo-deviceMask-00106)"},
+    {"VUID-VkDeviceGroupCommandBufferBeginInfo-deviceMask-00107", "deviceMask must not be zero (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDeviceGroupCommandBufferBeginInfo-deviceMask-00107)"},
+    {"VUID-VkDeviceGroupCommandBufferBeginInfo-sType-sType", "sType must be VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDeviceGroupCommandBufferBeginInfo-sType-sType)"},
+    {"VUID-VkDeviceGroupDeviceCreateInfo-pPhysicalDevices-00375", "Each element of pPhysicalDevices must be unique (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDeviceGroupDeviceCreateInfo-pPhysicalDevices-00375)"},
+    {"VUID-VkDeviceGroupDeviceCreateInfo-pPhysicalDevices-00376", "All elements of pPhysicalDevices must be in the same device group as enumerated by vkEnumeratePhysicalDeviceGroups (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDeviceGroupDeviceCreateInfo-pPhysicalDevices-00376)"},
+    {"VUID-VkDeviceGroupDeviceCreateInfo-pPhysicalDevices-parameter", "If physicalDeviceCount is not 0, pPhysicalDevices must be a valid pointer to an array of physicalDeviceCount valid VkPhysicalDevice handles (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDeviceGroupDeviceCreateInfo-pPhysicalDevices-parameter)"},
+    {"VUID-VkDeviceGroupDeviceCreateInfo-physicalDeviceCount-00377", "If physicalDeviceCount is not 0, the physicalDevice parameter of vkCreateDevice must be an element of pPhysicalDevices. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDeviceGroupDeviceCreateInfo-physicalDeviceCount-00377)"},
+    {"VUID-VkDeviceGroupDeviceCreateInfo-sType-sType", "sType must be VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDeviceGroupDeviceCreateInfo-sType-sType)"},
+    {"VUID-VkDeviceGroupPresentCapabilitiesKHR-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDeviceGroupPresentCapabilitiesKHR-pNext-pNext)"},
+    {"VUID-VkDeviceGroupPresentCapabilitiesKHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_CAPABILITIES_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDeviceGroupPresentCapabilitiesKHR-sType-sType)"},
+    {"VUID-VkDeviceGroupPresentInfoKHR-mode-01298", "If mode is VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_BIT_KHR, then each element of pDeviceMasks must have exactly one bit set, and the corresponding element of VkDeviceGroupPresentCapabilitiesKHR::presentMask must be non-zero (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDeviceGroupPresentInfoKHR-mode-01298)"},
+    {"VUID-VkDeviceGroupPresentInfoKHR-mode-01299", "If mode is VK_DEVICE_GROUP_PRESENT_MODE_REMOTE_BIT_KHR, then each element of pDeviceMasks must have exactly one bit set, and some physical device in the logical device must include that bit in its VkDeviceGroupPresentCapabilitiesKHR::presentMask. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDeviceGroupPresentInfoKHR-mode-01299)"},
+    {"VUID-VkDeviceGroupPresentInfoKHR-mode-01300", "If mode is VK_DEVICE_GROUP_PRESENT_MODE_SUM_BIT_KHR, then each element of pDeviceMasks must have a value for which all set bits are set in one of the elements of VkDeviceGroupPresentCapabilitiesKHR::presentMask (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDeviceGroupPresentInfoKHR-mode-01300)"},
+    {"VUID-VkDeviceGroupPresentInfoKHR-mode-01301", "If mode is VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_MULTI_DEVICE_BIT_KHR, then for each bit set in each element of pDeviceMasks, the corresponding element of VkDeviceGroupPresentCapabilitiesKHR::presentMask must be non-zero (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDeviceGroupPresentInfoKHR-mode-01301)"},
+    {"VUID-VkDeviceGroupPresentInfoKHR-mode-01303", "mode must have exactly one bit set, and that bit must have been included in VkDeviceGroupSwapchainCreateInfoKHR::modes (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDeviceGroupPresentInfoKHR-mode-01303)"},
+    {"VUID-VkDeviceGroupPresentInfoKHR-mode-parameter", "mode must be a valid VkDeviceGroupPresentModeFlagBitsKHR value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDeviceGroupPresentInfoKHR-mode-parameter)"},
+    {"VUID-VkDeviceGroupPresentInfoKHR-pDeviceMasks-01302", "The value of each element of pDeviceMasks must be equal to the device mask passed in VkAcquireNextImageInfoKHR::deviceMask when the image index was last acquired (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDeviceGroupPresentInfoKHR-pDeviceMasks-01302)"},
+    {"VUID-VkDeviceGroupPresentInfoKHR-pDeviceMasks-parameter", "If swapchainCount is not 0, pDeviceMasks must be a valid pointer to an array of swapchainCount uint32_t values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDeviceGroupPresentInfoKHR-pDeviceMasks-parameter)"},
+    {"VUID-VkDeviceGroupPresentInfoKHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_INFO_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDeviceGroupPresentInfoKHR-sType-sType)"},
+    {"VUID-VkDeviceGroupPresentInfoKHR-swapchainCount-01297", "swapchainCount must equal 0 or VkPresentInfoKHR::swapchainCount (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDeviceGroupPresentInfoKHR-swapchainCount-01297)"},
+    {"VUID-VkDeviceGroupRenderPassBeginInfo-deviceMask-00905", "deviceMask must be a valid device mask value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDeviceGroupRenderPassBeginInfo-deviceMask-00905)"},
+    {"VUID-VkDeviceGroupRenderPassBeginInfo-deviceMask-00906", "deviceMask must not be zero (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDeviceGroupRenderPassBeginInfo-deviceMask-00906)"},
+    {"VUID-VkDeviceGroupRenderPassBeginInfo-deviceMask-00907", "deviceMask must be a subset of the command buffer's initial device mask (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDeviceGroupRenderPassBeginInfo-deviceMask-00907)"},
+    {"VUID-VkDeviceGroupRenderPassBeginInfo-deviceRenderAreaCount-00908", "deviceRenderAreaCount must either be zero or equal to the number of physical devices in the logical device. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDeviceGroupRenderPassBeginInfo-deviceRenderAreaCount-00908)"},
+    {"VUID-VkDeviceGroupRenderPassBeginInfo-pDeviceRenderAreas-parameter", "If deviceRenderAreaCount is not 0, pDeviceRenderAreas must be a valid pointer to an array of deviceRenderAreaCount VkRect2D structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDeviceGroupRenderPassBeginInfo-pDeviceRenderAreas-parameter)"},
+    {"VUID-VkDeviceGroupRenderPassBeginInfo-sType-sType", "sType must be VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDeviceGroupRenderPassBeginInfo-sType-sType)"},
+    {"VUID-VkDeviceGroupSubmitInfo-commandBufferCount-00083", "commandBufferCount must equal VkSubmitInfo::commandBufferCount (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDeviceGroupSubmitInfo-commandBufferCount-00083)"},
+    {"VUID-VkDeviceGroupSubmitInfo-pCommandBufferDeviceMasks-00086", "All elements of pCommandBufferDeviceMasks must be valid device masks (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDeviceGroupSubmitInfo-pCommandBufferDeviceMasks-00086)"},
+    {"VUID-VkDeviceGroupSubmitInfo-pCommandBufferDeviceMasks-parameter", "If commandBufferCount is not 0, pCommandBufferDeviceMasks must be a valid pointer to an array of commandBufferCount uint32_t values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDeviceGroupSubmitInfo-pCommandBufferDeviceMasks-parameter)"},
+    {"VUID-VkDeviceGroupSubmitInfo-pSignalSemaphoreDeviceIndices-parameter", "If signalSemaphoreCount is not 0, pSignalSemaphoreDeviceIndices must be a valid pointer to an array of signalSemaphoreCount uint32_t values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDeviceGroupSubmitInfo-pSignalSemaphoreDeviceIndices-parameter)"},
+    {"VUID-VkDeviceGroupSubmitInfo-pWaitSemaphoreDeviceIndices-00085", "All elements of pWaitSemaphoreDeviceIndices and pSignalSemaphoreDeviceIndices must be valid device indices (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDeviceGroupSubmitInfo-pWaitSemaphoreDeviceIndices-00085)"},
+    {"VUID-VkDeviceGroupSubmitInfo-pWaitSemaphoreDeviceIndices-parameter", "If waitSemaphoreCount is not 0, pWaitSemaphoreDeviceIndices must be a valid pointer to an array of waitSemaphoreCount uint32_t values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDeviceGroupSubmitInfo-pWaitSemaphoreDeviceIndices-parameter)"},
+    {"VUID-VkDeviceGroupSubmitInfo-sType-sType", "sType must be VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDeviceGroupSubmitInfo-sType-sType)"},
+    {"VUID-VkDeviceGroupSubmitInfo-signalSemaphoreCount-00084", "signalSemaphoreCount must equal VkSubmitInfo::signalSemaphoreCount (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDeviceGroupSubmitInfo-signalSemaphoreCount-00084)"},
+    {"VUID-VkDeviceGroupSubmitInfo-waitSemaphoreCount-00082", "waitSemaphoreCount must equal VkSubmitInfo::waitSemaphoreCount (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDeviceGroupSubmitInfo-waitSemaphoreCount-00082)"},
+    {"VUID-VkDeviceGroupSwapchainCreateInfoKHR-modes-parameter", "modes must be a valid combination of VkDeviceGroupPresentModeFlagBitsKHR values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDeviceGroupSwapchainCreateInfoKHR-modes-parameter)"},
+    {"VUID-VkDeviceGroupSwapchainCreateInfoKHR-modes-requiredbitmask", "modes must not be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDeviceGroupSwapchainCreateInfoKHR-modes-requiredbitmask)"},
+    {"VUID-VkDeviceGroupSwapchainCreateInfoKHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_DEVICE_GROUP_SWAPCHAIN_CREATE_INFO_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDeviceGroupSwapchainCreateInfoKHR-sType-sType)"},
+    {"VUID-VkDeviceMemoryOpaqueCaptureAddressInfoKHR-memory-03336", "memory must have been allocated with VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDeviceMemoryOpaqueCaptureAddressInfoKHR-memory-03336)"},
+    {"VUID-VkDeviceMemoryOpaqueCaptureAddressInfoKHR-memory-parameter", "memory must be a valid VkDeviceMemory handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDeviceMemoryOpaqueCaptureAddressInfoKHR-memory-parameter)"},
+    {"VUID-VkDeviceMemoryOpaqueCaptureAddressInfoKHR-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDeviceMemoryOpaqueCaptureAddressInfoKHR-pNext-pNext)"},
+    {"VUID-VkDeviceMemoryOpaqueCaptureAddressInfoKHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_DEVICE_MEMORY_OPAQUE_CAPTURE_ADDRESS_INFO_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDeviceMemoryOpaqueCaptureAddressInfoKHR-sType-sType)"},
+    {"VUID-VkDeviceMemoryOverallocationCreateInfoAMD-overallocationBehavior-parameter", "overallocationBehavior must be a valid VkMemoryOverallocationBehaviorAMD value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDeviceMemoryOverallocationCreateInfoAMD-overallocationBehavior-parameter)"},
+    {"VUID-VkDeviceMemoryOverallocationCreateInfoAMD-sType-sType", "sType must be VK_STRUCTURE_TYPE_DEVICE_MEMORY_OVERALLOCATION_CREATE_INFO_AMD (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDeviceMemoryOverallocationCreateInfoAMD-sType-sType)"},
+    {"VUID-VkDeviceQueueCreateInfo-flags-parameter", "flags must be a valid combination of VkDeviceQueueCreateFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDeviceQueueCreateInfo-flags-parameter)"},
+    {"VUID-VkDeviceQueueCreateInfo-pNext-pNext", "pNext must be NULL or a pointer to a valid instance of VkDeviceQueueGlobalPriorityCreateInfoEXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDeviceQueueCreateInfo-pNext-pNext)"},
+    {"VUID-VkDeviceQueueCreateInfo-pQueuePriorities-00383", "Each element of pQueuePriorities must be between 0.0 and 1.0 inclusive (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDeviceQueueCreateInfo-pQueuePriorities-00383)"},
+    {"VUID-VkDeviceQueueCreateInfo-pQueuePriorities-parameter", "pQueuePriorities must be a valid pointer to an array of queueCount float values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDeviceQueueCreateInfo-pQueuePriorities-parameter)"},
+    {"VUID-VkDeviceQueueCreateInfo-queueCount-00382", "queueCount must be less than or equal to the queueCount member of the VkQueueFamilyProperties structure, as returned by vkGetPhysicalDeviceQueueFamilyProperties in the pQueueFamilyProperties[queueFamilyIndex] (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDeviceQueueCreateInfo-queueCount-00382)"},
+    {"VUID-VkDeviceQueueCreateInfo-queueCount-arraylength", "queueCount must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDeviceQueueCreateInfo-queueCount-arraylength)"},
+    {"VUID-VkDeviceQueueCreateInfo-queueFamilyIndex-00381", "queueFamilyIndex must be less than pQueueFamilyPropertyCount returned by vkGetPhysicalDeviceQueueFamilyProperties (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDeviceQueueCreateInfo-queueFamilyIndex-00381)"},
+    {"VUID-VkDeviceQueueCreateInfo-sType-sType", "sType must be VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDeviceQueueCreateInfo-sType-sType)"},
+    {"VUID-VkDeviceQueueGlobalPriorityCreateInfoEXT-globalPriority-parameter", "globalPriority must be a valid VkQueueGlobalPriorityEXT value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDeviceQueueGlobalPriorityCreateInfoEXT-globalPriority-parameter)"},
+    {"VUID-VkDeviceQueueGlobalPriorityCreateInfoEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDeviceQueueGlobalPriorityCreateInfoEXT-sType-sType)"},
+    {"VUID-VkDeviceQueueInfo2-flags-parameter", "flags must be a valid combination of VkDeviceQueueCreateFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDeviceQueueInfo2-flags-parameter)"},
+    {"VUID-VkDeviceQueueInfo2-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDeviceQueueInfo2-pNext-pNext)"},
+    {"VUID-VkDeviceQueueInfo2-queueFamilyIndex-01842", "queueFamilyIndex must be one of the queue family indices specified when device was created, via the VkDeviceQueueCreateInfo structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDeviceQueueInfo2-queueFamilyIndex-01842)"},
+    {"VUID-VkDeviceQueueInfo2-queueIndex-01843", "queueIndex must be less than the number of queues created for the specified queue family index and VkDeviceQueueCreateFlags member flags equal to this flags value when device was created, via the queueCount member of the VkDeviceQueueCreateInfo structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDeviceQueueInfo2-queueIndex-01843)"},
+    {"VUID-VkDeviceQueueInfo2-sType-sType", "sType must be VK_STRUCTURE_TYPE_DEVICE_QUEUE_INFO_2 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDeviceQueueInfo2-sType-sType)"},
+    {"VUID-VkDispatchIndirectCommand-x-00417", "x must be less than or equal to VkPhysicalDeviceLimits::maxComputeWorkGroupCount[0] (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDispatchIndirectCommand-x-00417)"},
+    {"VUID-VkDispatchIndirectCommand-y-00418", "y must be less than or equal to VkPhysicalDeviceLimits::maxComputeWorkGroupCount[1] (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDispatchIndirectCommand-y-00418)"},
+    {"VUID-VkDispatchIndirectCommand-z-00419", "z must be less than or equal to VkPhysicalDeviceLimits::maxComputeWorkGroupCount[2] (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDispatchIndirectCommand-z-00419)"},
+    {"VUID-VkDisplayEventInfoEXT-displayEvent-parameter", "displayEvent must be a valid VkDisplayEventTypeEXT value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDisplayEventInfoEXT-displayEvent-parameter)"},
+    {"VUID-VkDisplayEventInfoEXT-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDisplayEventInfoEXT-pNext-pNext)"},
+    {"VUID-VkDisplayEventInfoEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_DISPLAY_EVENT_INFO_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDisplayEventInfoEXT-sType-sType)"},
+    {"VUID-VkDisplayModeCreateInfoKHR-flags-zerobitmask", "flags must be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDisplayModeCreateInfoKHR-flags-zerobitmask)"},
+    {"VUID-VkDisplayModeCreateInfoKHR-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDisplayModeCreateInfoKHR-pNext-pNext)"},
+    {"VUID-VkDisplayModeCreateInfoKHR-parameters-parameter", "parameters must be a valid VkDisplayModeParametersKHR structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDisplayModeCreateInfoKHR-parameters-parameter)"},
+    {"VUID-VkDisplayModeCreateInfoKHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_DISPLAY_MODE_CREATE_INFO_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDisplayModeCreateInfoKHR-sType-sType)"},
+    {"VUID-VkDisplayModeParametersKHR-height-01991", "The height member of visibleRegion must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDisplayModeParametersKHR-height-01991)"},
+    {"VUID-VkDisplayModeParametersKHR-refreshRate-01992", "refreshRate must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDisplayModeParametersKHR-refreshRate-01992)"},
+    {"VUID-VkDisplayModeParametersKHR-width-01990", "The width member of visibleRegion must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDisplayModeParametersKHR-width-01990)"},
+    {"VUID-VkDisplayModeProperties2KHR-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDisplayModeProperties2KHR-pNext-pNext)"},
+    {"VUID-VkDisplayModeProperties2KHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_DISPLAY_MODE_PROPERTIES_2_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDisplayModeProperties2KHR-sType-sType)"},
+    {"VUID-VkDisplayNativeHdrSurfaceCapabilitiesAMD-sType-sType", "sType must be VK_STRUCTURE_TYPE_DISPLAY_NATIVE_HDR_SURFACE_CAPABILITIES_AMD (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDisplayNativeHdrSurfaceCapabilitiesAMD-sType-sType)"},
+    {"VUID-VkDisplayPlaneCapabilities2KHR-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDisplayPlaneCapabilities2KHR-pNext-pNext)"},
+    {"VUID-VkDisplayPlaneCapabilities2KHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_DISPLAY_PLANE_CAPABILITIES_2_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDisplayPlaneCapabilities2KHR-sType-sType)"},
+    {"VUID-VkDisplayPlaneInfo2KHR-mode-parameter", "mode must be a valid VkDisplayModeKHR handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDisplayPlaneInfo2KHR-mode-parameter)"},
+    {"VUID-VkDisplayPlaneInfo2KHR-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDisplayPlaneInfo2KHR-pNext-pNext)"},
+    {"VUID-VkDisplayPlaneInfo2KHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_DISPLAY_PLANE_INFO_2_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDisplayPlaneInfo2KHR-sType-sType)"},
+    {"VUID-VkDisplayPlaneProperties2KHR-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDisplayPlaneProperties2KHR-pNext-pNext)"},
+    {"VUID-VkDisplayPlaneProperties2KHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_DISPLAY_PLANE_PROPERTIES_2_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDisplayPlaneProperties2KHR-sType-sType)"},
+    {"VUID-VkDisplayPowerInfoEXT-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDisplayPowerInfoEXT-pNext-pNext)"},
+    {"VUID-VkDisplayPowerInfoEXT-powerState-parameter", "powerState must be a valid VkDisplayPowerStateEXT value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDisplayPowerInfoEXT-powerState-parameter)"},
+    {"VUID-VkDisplayPowerInfoEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_DISPLAY_POWER_INFO_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDisplayPowerInfoEXT-sType-sType)"},
+    {"VUID-VkDisplayPresentInfoKHR-dstRect-01258", "dstRect must specify a rectangular region that is a subset of the visibleRegion parameter of the display mode the swapchain being presented uses (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDisplayPresentInfoKHR-dstRect-01258)"},
+    {"VUID-VkDisplayPresentInfoKHR-persistentContent-01259", "If the persistentContent member of the VkDisplayPropertiesKHR structure returned by vkGetPhysicalDeviceDisplayPropertiesKHR for the display the present operation targets then persistent must be VK_FALSE (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDisplayPresentInfoKHR-persistentContent-01259)"},
+    {"VUID-VkDisplayPresentInfoKHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_DISPLAY_PRESENT_INFO_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDisplayPresentInfoKHR-sType-sType)"},
+    {"VUID-VkDisplayPresentInfoKHR-srcRect-01257", "srcRect must specify a rectangular region that is a subset of the image being presented (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDisplayPresentInfoKHR-srcRect-01257)"},
+    {"VUID-VkDisplayProperties2KHR-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDisplayProperties2KHR-pNext-pNext)"},
+    {"VUID-VkDisplayProperties2KHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_DISPLAY_PROPERTIES_2_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDisplayProperties2KHR-sType-sType)"},
+    {"VUID-VkDisplaySurfaceCreateInfoKHR-alphaMode-01254", "If alphaMode is VK_DISPLAY_PLANE_ALPHA_GLOBAL_BIT_KHR then globalAlpha must be between 0 and 1, inclusive (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDisplaySurfaceCreateInfoKHR-alphaMode-01254)"},
+    {"VUID-VkDisplaySurfaceCreateInfoKHR-alphaMode-01255", "alphaMode must be 0 or one of the bits present in the supportedAlpha member of VkDisplayPlaneCapabilitiesKHR returned by vkGetDisplayPlaneCapabilitiesKHR for the display plane corresponding to displayMode (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDisplaySurfaceCreateInfoKHR-alphaMode-01255)"},
+    {"VUID-VkDisplaySurfaceCreateInfoKHR-alphaMode-parameter", "alphaMode must be a valid VkDisplayPlaneAlphaFlagBitsKHR value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDisplaySurfaceCreateInfoKHR-alphaMode-parameter)"},
+    {"VUID-VkDisplaySurfaceCreateInfoKHR-displayMode-parameter", "displayMode must be a valid VkDisplayModeKHR handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDisplaySurfaceCreateInfoKHR-displayMode-parameter)"},
+    {"VUID-VkDisplaySurfaceCreateInfoKHR-flags-zerobitmask", "flags must be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDisplaySurfaceCreateInfoKHR-flags-zerobitmask)"},
+    {"VUID-VkDisplaySurfaceCreateInfoKHR-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDisplaySurfaceCreateInfoKHR-pNext-pNext)"},
+    {"VUID-VkDisplaySurfaceCreateInfoKHR-planeIndex-01252", "planeIndex must be less than the number of display planes supported by the device as determined by calling vkGetPhysicalDeviceDisplayPlanePropertiesKHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDisplaySurfaceCreateInfoKHR-planeIndex-01252)"},
+    {"VUID-VkDisplaySurfaceCreateInfoKHR-planeReorderPossible-01253", "If the planeReorderPossible member of the VkDisplayPropertiesKHR structure returned by vkGetPhysicalDeviceDisplayPropertiesKHR for the display corresponding to displayMode is VK_TRUE then planeStackIndex must be less than the number of display planes supported by the device as determined by calling vkGetPhysicalDeviceDisplayPlanePropertiesKHR; otherwise planeStackIndex must equal the currentStackIndex member of VkDisplayPlanePropertiesKHR returned by vkGetPhysicalDeviceDisplayPlanePropertiesKHR for the display plane corresponding to displayMode (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDisplaySurfaceCreateInfoKHR-planeReorderPossible-01253)"},
+    {"VUID-VkDisplaySurfaceCreateInfoKHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_DISPLAY_SURFACE_CREATE_INFO_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDisplaySurfaceCreateInfoKHR-sType-sType)"},
+    {"VUID-VkDisplaySurfaceCreateInfoKHR-transform-parameter", "transform must be a valid VkSurfaceTransformFlagBitsKHR value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDisplaySurfaceCreateInfoKHR-transform-parameter)"},
+    {"VUID-VkDisplaySurfaceCreateInfoKHR-width-01256", "The width and height members of imageExtent must be less than the maxImageDimensions2D member of VkPhysicalDeviceLimits (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDisplaySurfaceCreateInfoKHR-width-01256)"},
+    {"VUID-VkDrawIndexedIndirectCommand-None-00552", "For a given vertex buffer binding, any attribute data fetched must be entirely contained within the corresponding vertex buffer binding, as described in Vertex Input Description (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDrawIndexedIndirectCommand-None-00552)"},
+    {"VUID-VkDrawIndexedIndirectCommand-firstInstance-00554", "If the drawIndirectFirstInstance feature is not enabled, firstInstance must be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDrawIndexedIndirectCommand-firstInstance-00554)"},
+    {"VUID-VkDrawIndexedIndirectCommand-indexSize-00553", "(indexSize * (firstIndex + indexCount) + offset) must be less than or equal to the size of the bound index buffer, with indexSize being based on the type specified by indexType, where the index buffer, indexType, and offset are specified via vkCmdBindIndexBuffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDrawIndexedIndirectCommand-indexSize-00553)"},
+    {"VUID-VkDrawIndirectCommand-None-00500", "For a given vertex buffer binding, any attribute data fetched must be entirely contained within the corresponding vertex buffer binding, as described in Vertex Input Description (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDrawIndirectCommand-None-00500)"},
+    {"VUID-VkDrawIndirectCommand-firstInstance-00501", "If the drawIndirectFirstInstance feature is not enabled, firstInstance must be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDrawIndirectCommand-firstInstance-00501)"},
+    {"VUID-VkDrawMeshTasksIndirectCommandNV-taskCount-02175", "taskCount must be less than or equal to VkPhysicalDeviceMeshShaderPropertiesNV::maxDrawMeshTasksCount (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDrawMeshTasksIndirectCommandNV-taskCount-02175)"},
+    {"VUID-VkDrmFormatModifierPropertiesListEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkDrmFormatModifierPropertiesListEXT-sType-sType)"},
+    {"VUID-VkEventCreateInfo-flags-zerobitmask", "flags must be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkEventCreateInfo-flags-zerobitmask)"},
+    {"VUID-VkEventCreateInfo-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkEventCreateInfo-pNext-pNext)"},
+    {"VUID-VkEventCreateInfo-sType-sType", "sType must be VK_STRUCTURE_TYPE_EVENT_CREATE_INFO (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkEventCreateInfo-sType-sType)"},
+    {"VUID-VkExportFenceCreateInfo-handleTypes-01446", "The bits in handleTypes must be supported and compatible, as reported by VkExternalFenceProperties. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkExportFenceCreateInfo-handleTypes-01446)"},
+    {"VUID-VkExportFenceCreateInfo-handleTypes-parameter", "handleTypes must be a valid combination of VkExternalFenceHandleTypeFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkExportFenceCreateInfo-handleTypes-parameter)"},
+    {"VUID-VkExportFenceCreateInfo-sType-sType", "sType must be VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkExportFenceCreateInfo-sType-sType)"},
+    {"VUID-VkExportFenceWin32HandleInfoKHR-handleTypes-01447", "If VkExportFenceCreateInfo::handleTypes does not include VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_BIT, a VkExportFenceWin32HandleInfoKHR structure must not be included in the pNext chain of VkFenceCreateInfo. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkExportFenceWin32HandleInfoKHR-handleTypes-01447)"},
+    {"VUID-VkExportFenceWin32HandleInfoKHR-pAttributes-parameter", "If pAttributes is not NULL, pAttributes must be a valid pointer to a valid SECURITY_ATTRIBUTES value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkExportFenceWin32HandleInfoKHR-pAttributes-parameter)"},
+    {"VUID-VkExportFenceWin32HandleInfoKHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_EXPORT_FENCE_WIN32_HANDLE_INFO_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkExportFenceWin32HandleInfoKHR-sType-sType)"},
+    {"VUID-VkExportMemoryAllocateInfo-handleTypes-00656", "The bits in handleTypes must be supported and compatible, as reported by VkExternalImageFormatProperties or VkExternalBufferProperties. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkExportMemoryAllocateInfo-handleTypes-00656)"},
+    {"VUID-VkExportMemoryAllocateInfo-handleTypes-parameter", "handleTypes must be a valid combination of VkExternalMemoryHandleTypeFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkExportMemoryAllocateInfo-handleTypes-parameter)"},
+    {"VUID-VkExportMemoryAllocateInfo-sType-sType", "sType must be VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkExportMemoryAllocateInfo-sType-sType)"},
+    {"VUID-VkExportMemoryAllocateInfoNV-handleTypes-parameter", "handleTypes must be a valid combination of VkExternalMemoryHandleTypeFlagBitsNV values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkExportMemoryAllocateInfoNV-handleTypes-parameter)"},
+    {"VUID-VkExportMemoryAllocateInfoNV-sType-sType", "sType must be VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkExportMemoryAllocateInfoNV-sType-sType)"},
+    {"VUID-VkExportMemoryWin32HandleInfoKHR-handleTypes-00657", "If VkExportMemoryAllocateInfo::handleTypes does not include VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT, VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT, VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP_BIT, or VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT, a VkExportMemoryWin32HandleInfoKHR structure must not be included in the pNext chain of VkMemoryAllocateInfo. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkExportMemoryWin32HandleInfoKHR-handleTypes-00657)"},
+    {"VUID-VkExportMemoryWin32HandleInfoKHR-pAttributes-parameter", "If pAttributes is not NULL, pAttributes must be a valid pointer to a valid SECURITY_ATTRIBUTES value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkExportMemoryWin32HandleInfoKHR-pAttributes-parameter)"},
+    {"VUID-VkExportMemoryWin32HandleInfoKHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkExportMemoryWin32HandleInfoKHR-sType-sType)"},
+    {"VUID-VkExportMemoryWin32HandleInfoNV-pAttributes-parameter", "If pAttributes is not NULL, pAttributes must be a valid pointer to a valid SECURITY_ATTRIBUTES value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkExportMemoryWin32HandleInfoNV-pAttributes-parameter)"},
+    {"VUID-VkExportMemoryWin32HandleInfoNV-sType-sType", "sType must be VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkExportMemoryWin32HandleInfoNV-sType-sType)"},
+    {"VUID-VkExportSemaphoreCreateInfo-handleTypes-01124", "The bits in handleTypes must be supported and compatible, as reported by VkExternalSemaphoreProperties. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkExportSemaphoreCreateInfo-handleTypes-01124)"},
+    {"VUID-VkExportSemaphoreCreateInfo-handleTypes-parameter", "handleTypes must be a valid combination of VkExternalSemaphoreHandleTypeFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkExportSemaphoreCreateInfo-handleTypes-parameter)"},
+    {"VUID-VkExportSemaphoreCreateInfo-sType-sType", "sType must be VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkExportSemaphoreCreateInfo-sType-sType)"},
+    {"VUID-VkExportSemaphoreWin32HandleInfoKHR-handleTypes-01125", "If VkExportSemaphoreCreateInfo::handleTypes does not include VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT or VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT, VkExportSemaphoreWin32HandleInfoKHR must not be included in the pNext chain of VkSemaphoreCreateInfo. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkExportSemaphoreWin32HandleInfoKHR-handleTypes-01125)"},
+    {"VUID-VkExportSemaphoreWin32HandleInfoKHR-pAttributes-parameter", "If pAttributes is not NULL, pAttributes must be a valid pointer to a valid SECURITY_ATTRIBUTES value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkExportSemaphoreWin32HandleInfoKHR-pAttributes-parameter)"},
+    {"VUID-VkExportSemaphoreWin32HandleInfoKHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkExportSemaphoreWin32HandleInfoKHR-sType-sType)"},
+    {"VUID-VkExternalBufferProperties-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkExternalBufferProperties-pNext-pNext)"},
+    {"VUID-VkExternalBufferProperties-sType-sType", "sType must be VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkExternalBufferProperties-sType-sType)"},
+    {"VUID-VkExternalFenceProperties-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkExternalFenceProperties-pNext-pNext)"},
+    {"VUID-VkExternalFenceProperties-sType-sType", "sType must be VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkExternalFenceProperties-sType-sType)"},
+    {"VUID-VkExternalFormatANDROID-externalFormat-01894", "externalFormat must be 0 or a value returned in the externalFormat member of VkAndroidHardwareBufferFormatPropertiesANDROID by an earlier call to vkGetAndroidHardwareBufferPropertiesANDROID (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkExternalFormatANDROID-externalFormat-01894)"},
+    {"VUID-VkExternalFormatANDROID-sType-sType", "sType must be VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkExternalFormatANDROID-sType-sType)"},
+    {"VUID-VkExternalImageFormatProperties-sType-sType", "sType must be VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkExternalImageFormatProperties-sType-sType)"},
+    {"VUID-VkExternalMemoryBufferCreateInfo-handleTypes-parameter", "handleTypes must be a valid combination of VkExternalMemoryHandleTypeFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkExternalMemoryBufferCreateInfo-handleTypes-parameter)"},
+    {"VUID-VkExternalMemoryBufferCreateInfo-sType-sType", "sType must be VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkExternalMemoryBufferCreateInfo-sType-sType)"},
+    {"VUID-VkExternalMemoryImageCreateInfo-handleTypes-parameter", "handleTypes must be a valid combination of VkExternalMemoryHandleTypeFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkExternalMemoryImageCreateInfo-handleTypes-parameter)"},
+    {"VUID-VkExternalMemoryImageCreateInfo-handleTypes-requiredbitmask", "handleTypes must not be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkExternalMemoryImageCreateInfo-handleTypes-requiredbitmask)"},
+    {"VUID-VkExternalMemoryImageCreateInfo-sType-sType", "sType must be VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkExternalMemoryImageCreateInfo-sType-sType)"},
+    {"VUID-VkExternalMemoryImageCreateInfoNV-handleTypes-parameter", "handleTypes must be a valid combination of VkExternalMemoryHandleTypeFlagBitsNV values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkExternalMemoryImageCreateInfoNV-handleTypes-parameter)"},
+    {"VUID-VkExternalMemoryImageCreateInfoNV-sType-sType", "sType must be VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkExternalMemoryImageCreateInfoNV-sType-sType)"},
+    {"VUID-VkExternalSemaphoreProperties-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkExternalSemaphoreProperties-pNext-pNext)"},
+    {"VUID-VkExternalSemaphoreProperties-sType-sType", "sType must be VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkExternalSemaphoreProperties-sType-sType)"},
+    {"VUID-VkFenceCreateInfo-flags-parameter", "flags must be a valid combination of VkFenceCreateFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFenceCreateInfo-flags-parameter)"},
+    {"VUID-VkFenceCreateInfo-pNext-pNext", "Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkExportFenceCreateInfo or VkExportFenceWin32HandleInfoKHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFenceCreateInfo-pNext-pNext)"},
+    {"VUID-VkFenceCreateInfo-sType-sType", "sType must be VK_STRUCTURE_TYPE_FENCE_CREATE_INFO (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFenceCreateInfo-sType-sType)"},
+    {"VUID-VkFenceCreateInfo-sType-unique", "Each sType member in the pNext chain must be unique (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFenceCreateInfo-sType-unique)"},
+    {"VUID-VkFenceGetFdInfoKHR-fence-01455", "fence must not currently have its payload replaced by an imported payload as described below in Importing Fence Payloads unless that imported payload's handle type was included in VkExternalFenceProperties::exportFromImportedHandleTypes for handleType. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFenceGetFdInfoKHR-fence-01455)"},
+    {"VUID-VkFenceGetFdInfoKHR-fence-parameter", "fence must be a valid VkFence handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFenceGetFdInfoKHR-fence-parameter)"},
+    {"VUID-VkFenceGetFdInfoKHR-handleType-01453", "handleType must have been included in VkExportFenceCreateInfo::handleTypes when fence's current payload was created. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFenceGetFdInfoKHR-handleType-01453)"},
+    {"VUID-VkFenceGetFdInfoKHR-handleType-01454", "If handleType refers to a handle type with copy payload transference semantics, fence must be signaled, or have an associated fence signal operation pending execution. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFenceGetFdInfoKHR-handleType-01454)"},
+    {"VUID-VkFenceGetFdInfoKHR-handleType-01456", "handleType must be defined as a POSIX file descriptor handle. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFenceGetFdInfoKHR-handleType-01456)"},
+    {"VUID-VkFenceGetFdInfoKHR-handleType-parameter", "handleType must be a valid VkExternalFenceHandleTypeFlagBits value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFenceGetFdInfoKHR-handleType-parameter)"},
+    {"VUID-VkFenceGetFdInfoKHR-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFenceGetFdInfoKHR-pNext-pNext)"},
+    {"VUID-VkFenceGetFdInfoKHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_FENCE_GET_FD_INFO_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFenceGetFdInfoKHR-sType-sType)"},
+    {"VUID-VkFenceGetWin32HandleInfoKHR-fence-01450", "fence must not currently have its payload replaced by an imported payload as described below in Importing Fence Payloads unless that imported payload's handle type was included in VkExternalFenceProperties::exportFromImportedHandleTypes for handleType. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFenceGetWin32HandleInfoKHR-fence-01450)"},
+    {"VUID-VkFenceGetWin32HandleInfoKHR-fence-parameter", "fence must be a valid VkFence handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFenceGetWin32HandleInfoKHR-fence-parameter)"},
+    {"VUID-VkFenceGetWin32HandleInfoKHR-handleType-01448", "handleType must have been included in VkExportFenceCreateInfo::handleTypes when the fence's current payload was created. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFenceGetWin32HandleInfoKHR-handleType-01448)"},
+    {"VUID-VkFenceGetWin32HandleInfoKHR-handleType-01449", "If handleType is defined as an NT handle, vkGetFenceWin32HandleKHR must be called no more than once for each valid unique combination of fence and handleType. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFenceGetWin32HandleInfoKHR-handleType-01449)"},
+    {"VUID-VkFenceGetWin32HandleInfoKHR-handleType-01451", "If handleType refers to a handle type with copy payload transference semantics, fence must be signaled, or have an associated fence signal operation pending execution. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFenceGetWin32HandleInfoKHR-handleType-01451)"},
+    {"VUID-VkFenceGetWin32HandleInfoKHR-handleType-01452", "handleType must be defined as an NT handle or a global share handle. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFenceGetWin32HandleInfoKHR-handleType-01452)"},
+    {"VUID-VkFenceGetWin32HandleInfoKHR-handleType-parameter", "handleType must be a valid VkExternalFenceHandleTypeFlagBits value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFenceGetWin32HandleInfoKHR-handleType-parameter)"},
+    {"VUID-VkFenceGetWin32HandleInfoKHR-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFenceGetWin32HandleInfoKHR-pNext-pNext)"},
+    {"VUID-VkFenceGetWin32HandleInfoKHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_FENCE_GET_WIN32_HANDLE_INFO_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFenceGetWin32HandleInfoKHR-sType-sType)"},
+    {"VUID-VkFilterCubicImageViewImageFormatPropertiesEXT-pNext-02627", "If the pNext chain of the VkImageFormatProperties2 structure includes a VkFilterCubicImageViewImageFormatPropertiesEXT structure, the pNext chain of the VkPhysicalDeviceImageFormatInfo2 structure must include a VkPhysicalDeviceImageViewImageFormatInfoEXT structure with an imageViewType that is compatible with imageType. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFilterCubicImageViewImageFormatPropertiesEXT-pNext-02627)"},
+    {"VUID-VkFilterCubicImageViewImageFormatPropertiesEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_FILTER_CUBIC_IMAGE_VIEW_IMAGE_FORMAT_PROPERTIES_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFilterCubicImageViewImageFormatPropertiesEXT-sType-sType)"},
+    {"VUID-VkFormatProperties2-pNext-pNext", "pNext must be NULL or a pointer to a valid instance of VkDrmFormatModifierPropertiesListEXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFormatProperties2-pNext-pNext)"},
+    {"VUID-VkFormatProperties2-sType-sType", "sType must be VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFormatProperties2-sType-sType)"},
+    {"VUID-VkFramebufferAttachmentImageInfoKHR-flags-parameter", "flags must be a valid combination of VkImageCreateFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferAttachmentImageInfoKHR-flags-parameter)"},
+    {"VUID-VkFramebufferAttachmentImageInfoKHR-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferAttachmentImageInfoKHR-pNext-pNext)"},
+    {"VUID-VkFramebufferAttachmentImageInfoKHR-pViewFormats-parameter", "If viewFormatCount is not 0, pViewFormats must be a valid pointer to an array of viewFormatCount valid VkFormat values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferAttachmentImageInfoKHR-pViewFormats-parameter)"},
+    {"VUID-VkFramebufferAttachmentImageInfoKHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferAttachmentImageInfoKHR-sType-sType)"},
+    {"VUID-VkFramebufferAttachmentImageInfoKHR-usage-parameter", "usage must be a valid combination of VkImageUsageFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferAttachmentImageInfoKHR-usage-parameter)"},
+    {"VUID-VkFramebufferAttachmentImageInfoKHR-usage-requiredbitmask", "usage must not be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferAttachmentImageInfoKHR-usage-requiredbitmask)"},
+    {"VUID-VkFramebufferAttachmentsCreateInfoKHR-pAttachmentImageInfos-parameter", "If attachmentImageInfoCount is not 0, pAttachmentImageInfos must be a valid pointer to an array of attachmentImageInfoCount valid VkFramebufferAttachmentImageInfoKHR structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferAttachmentsCreateInfoKHR-pAttachmentImageInfos-parameter)"},
+    {"VUID-VkFramebufferAttachmentsCreateInfoKHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferAttachmentsCreateInfoKHR-sType-sType)"},
+    {"VUID-VkFramebufferCreateInfo-attachmentCount-00876", "attachmentCount must be equal to the attachment count specified in renderPass (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-attachmentCount-00876)"},
+    {"VUID-VkFramebufferCreateInfo-commonparent", "Both of renderPass, and the elements of pAttachments that are valid handles of non-ignored parameters must have been created, allocated, or retrieved from the same VkDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-commonparent)"},
+    {"VUID-VkFramebufferCreateInfo-flags-02778", "If flags does not include VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR, and attachmentCount is not 0, pAttachments must be a valid pointer to an array of attachmentCount valid VkImageView handles (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-flags-02778)"},
+    {"VUID-VkFramebufferCreateInfo-flags-03188", "If flags does not include VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR, and attachmentCount is not 0, pAttachments must be a valid pointer to an array of attachmentCount valid VkImageView handles (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-flags-03188)"},
+    {"VUID-VkFramebufferCreateInfo-flags-03189", "If the imageless framebuffer feature is not enabled, flags must not include VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-flags-03189)"},
+    {"VUID-VkFramebufferCreateInfo-flags-03190", "If flags includes VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR, the pNext chain must include a VkFramebufferAttachmentsCreateInfoKHR structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-flags-03190)"},
+    {"VUID-VkFramebufferCreateInfo-flags-03191", "If flags includes VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR, the attachmentImageInfoCount member of a VkFramebufferAttachmentsCreateInfoKHR structure included in the pNext chain must be equal to either zero or attachmentCount (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-flags-03191)"},
+    {"VUID-VkFramebufferCreateInfo-flags-03192", "If flags includes VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR, the width member of any element of the pAttachmentImageInfos member of a VkFramebufferAttachmentsCreateInfoKHR structure included in the pNext chain must be greater than or equal to width (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-flags-03192)"},
+    {"VUID-VkFramebufferCreateInfo-flags-03193", "If flags includes VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR, the height member of any element of the pAttachmentImageInfos member of a VkFramebufferAttachmentsCreateInfoKHR structure included in the pNext chain must be greater than or equal to height (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-flags-03193)"},
+    {"VUID-VkFramebufferCreateInfo-flags-03194", "If flags includes VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR, the width member of any element of the pAttachmentImageInfos member of a VkFramebufferAttachmentsCreateInfoKHR structure included in the pNext chain must be greater than or equal to width, except for any element that is referenced by VkRenderPassFragmentDensityMapCreateInfoEXT::fragmentDensityMapAttachment in renderPass (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-flags-03194)"},
+    {"VUID-VkFramebufferCreateInfo-flags-03195", "If flags includes VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR, the height member of any element of the pAttachmentImageInfos member of a VkFramebufferAttachmentsCreateInfoKHR structure included in the pNext chain must be greater than or equal to height, except for any element that is referenced by VkRenderPassFragmentDensityMapCreateInfoEXT::fragmentDensityMapAttachment in renderPass (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-flags-03195)"},
+    {"VUID-VkFramebufferCreateInfo-flags-03196", "If flags includes VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR, the width member of any element of the pAttachmentImageInfos member of a VkFramebufferAttachmentsCreateInfoKHR structure included in the pNext chain that is referenced by VkRenderPassFragmentDensityMapCreateInfoEXT::fragmentDensityMapAttachment in renderPass must be greater than or equal to the ceiling of width/maxFragmentDensityTexelSize.width (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-flags-03196)"},
+    {"VUID-VkFramebufferCreateInfo-flags-03197", "If flags includes VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR, the height member of any element of the pAttachmentImageInfos member of a VkFramebufferAttachmentsCreateInfoKHR structure included in the pNext chain that is referenced by VkRenderPassFragmentDensityMapCreateInfoEXT::fragmentDensityMapAttachment in renderPass must be greater than or equal to the ceiling of height/maxFragmentDensityTexelSize.height (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-flags-03197)"},
+    {"VUID-VkFramebufferCreateInfo-flags-03200", "If flags includes VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR, the layerCount member of any element of the pAttachmentImageInfos member of a VkFramebufferAttachmentsCreateInfoKHR structure included in the pNext chain must be greater than or equal to layers (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-flags-03200)"},
+    {"VUID-VkFramebufferCreateInfo-flags-03201", "If flags includes VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR, the usage member of any element of the pAttachmentImageInfos member of a VkFramebufferAttachmentsCreateInfoKHR structure included in the pNext chain that refers to an attachment used as a color attachment or resolve attachment by renderPass must include VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-flags-03201)"},
+    {"VUID-VkFramebufferCreateInfo-flags-03202", "If flags includes VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR, the usage member of any element of the pAttachmentImageInfos member of a VkFramebufferAttachmentsCreateInfoKHR structure included in the pNext chain that refers to an attachment used as a depth/stencil attachment by renderPass must include VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-flags-03202)"},
+    {"VUID-VkFramebufferCreateInfo-flags-03203", "If flags includes VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR, the usage member of any element of the pAttachmentImageInfos member of a VkFramebufferAttachmentsCreateInfoKHR structure included in the pNext chain that refers to an attachment used as a depth/stencil resolve attachment by renderPass must include VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-flags-03203)"},
+    {"VUID-VkFramebufferCreateInfo-flags-03204", "If flags includes VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR, the usage member of any element of the pAttachmentImageInfos member of a VkFramebufferAttachmentsCreateInfoKHR structure included in the pNext chain that refers to an attachment used as an input attachment by renderPass must include VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-flags-03204)"},
+    {"VUID-VkFramebufferCreateInfo-flags-03205", "If flags includes VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR, at least one element of the pViewFormats member of any element of the pAttachmentImageInfos member of a VkFramebufferAttachmentsCreateInfoKHR structure included in the pNext chain must be equal to the corresponding value of VkAttachmentDescription::format used to create renderPass (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-flags-03205)"},
+    {"VUID-VkFramebufferCreateInfo-flags-parameter", "flags must be a valid combination of VkFramebufferCreateFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-flags-parameter)"},
+    {"VUID-VkFramebufferCreateInfo-height-00887", "height must be greater than 0. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-height-00887)"},
+    {"VUID-VkFramebufferCreateInfo-height-00888", "height must be less than or equal to VkPhysicalDeviceLimits::maxFramebufferHeight (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-height-00888)"},
+    {"VUID-VkFramebufferCreateInfo-layers-00889", "layers must be greater than 0. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-layers-00889)"},
+    {"VUID-VkFramebufferCreateInfo-layers-00890", "layers must be less than or equal to VkPhysicalDeviceLimits::maxFramebufferLayers (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-layers-00890)"},
+    {"VUID-VkFramebufferCreateInfo-pAttachments-00877", "If flags does not include VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR, each element of pAttachments that is used as a color attachment or resolve attachment by renderPass must have been created with a usage value including VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-pAttachments-00877)"},
+    {"VUID-VkFramebufferCreateInfo-pAttachments-00879", "If flags does not include VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR, each element of pAttachments that is used as an input attachment by renderPass must have been created with a usage value including VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-pAttachments-00879)"},
+    {"VUID-VkFramebufferCreateInfo-pAttachments-00880", "If flags does not include VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR, each element of pAttachments must have been created with a VkFormat value that matches the VkFormat specified by the corresponding VkAttachmentDescription in renderPass (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-pAttachments-00880)"},
+    {"VUID-VkFramebufferCreateInfo-pAttachments-00881", "If flags does not include VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR, each element of pAttachments must have been created with a samples value that matches the samples value specified by the corresponding VkAttachmentDescription in renderPass (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-pAttachments-00881)"},
+    {"VUID-VkFramebufferCreateInfo-pAttachments-00882", "If flags does not include VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR, each element of pAttachments must have dimensions at least as large as the corresponding framebuffer dimension (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-pAttachments-00882)"},
+    {"VUID-VkFramebufferCreateInfo-pAttachments-00883", "If flags does not include VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR, each element of pAttachments must only specify a single mip level (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-pAttachments-00883)"},
+    {"VUID-VkFramebufferCreateInfo-pAttachments-00884", "If flags does not include VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR, each element of pAttachments must have been created with the identity swizzle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-pAttachments-00884)"},
+    {"VUID-VkFramebufferCreateInfo-pAttachments-00891", "If flags does not include VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR, each element of pAttachments that is a 2D or 2D array image view taken from a 3D image must not be a depth/stencil format (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-pAttachments-00891)"},
+    {"VUID-VkFramebufferCreateInfo-pAttachments-02552", "Each element of pAttachments that is used as a fragment density map attachment by renderPass must not have been created with a flags value including VK_IMAGE_CREATE_SUBSAMPLED_BIT_EXT. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-pAttachments-02552)"},
+    {"VUID-VkFramebufferCreateInfo-pAttachments-02554", "If flags does not include VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR, each element of pAttachments must have dimensions at least as large as the corresponding framebuffer dimension except for any element that is referenced by fragmentDensityMapAttachment (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-pAttachments-02554)"},
+    {"VUID-VkFramebufferCreateInfo-pAttachments-02555", "If flags does not include VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR, an element of pAttachments that is referenced by fragmentDensityMapAttachment must have a width at least as large as the ceiling of width/maxFragmentDensityTexelSize.width (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-pAttachments-02555)"},
+    {"VUID-VkFramebufferCreateInfo-pAttachments-02556", "If flags does not include VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR, an element of pAttachments that is referenced by fragmentDensityMapAttachment must have a height at least as large as the ceiling of height/maxFragmentDensityTexelSize.height (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-pAttachments-02556)"},
+    {"VUID-VkFramebufferCreateInfo-pAttachments-02633", "If flags does not include VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR, each element of pAttachments that is used as a depth/stencil attachment by renderPass must have been created with a usage value including VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-pAttachments-02633)"},
+    {"VUID-VkFramebufferCreateInfo-pAttachments-02634", "If flags does not include VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR, each element of pAttachments that is used as a depth/stencil resolve attachment by renderPass must have been created with a usage value including VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-pAttachments-02634)"},
+    {"VUID-VkFramebufferCreateInfo-pAttachments-02744", "An element of pAttachments that is referenced by fragmentDensityMapAttachment must have a layerCount equal to 1 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-pAttachments-02744)"},
+    {"VUID-VkFramebufferCreateInfo-pNext-pNext", "pNext must be NULL or a pointer to a valid instance of VkFramebufferAttachmentsCreateInfoKHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-pNext-pNext)"},
+    {"VUID-VkFramebufferCreateInfo-renderPass-02531", "If renderPass was specified with non-zero view masks, layers must be 1 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-renderPass-02531)"},
+    {"VUID-VkFramebufferCreateInfo-renderPass-02553", "If renderPass has a fragment density map attachment and non-subsample image feature is not enabled, each element of pAttachments must have been created with a flags value including VK_IMAGE_CREATE_SUBSAMPLED_BIT_EXT unless that element is the fragment density map attachment. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-renderPass-02553)"},
+    {"VUID-VkFramebufferCreateInfo-renderPass-02743", "If renderPass was specified with non-zero view masks, each element of pAttachments must have a layerCount greater than the index of the most significant bit set in any of those view masks (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-renderPass-02743)"},
+    {"VUID-VkFramebufferCreateInfo-renderPass-02745", "If renderPass was specified with non-zero view masks, each element of pAttachments that is not referenced by fragmentDensityMapAttachment must have a layerCount greater than the index of the most significant bit set in any of those view masks (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-renderPass-02745)"},
+    {"VUID-VkFramebufferCreateInfo-renderPass-02746", "If renderPass was specified with non-zero view masks, each element of pAttachments that is referenced by fragmentDensityMapAttachment must have a layerCount equal to 1 or greater than the index of the most significant bit set in any of those view masks (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-renderPass-02746)"},
+    {"VUID-VkFramebufferCreateInfo-renderPass-02747", "If renderPass was not specified with non-zero view masks, each element of pAttachments that is referenced by fragmentDensityMapAttachment must have a layerCount equal to 1 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-renderPass-02747)"},
+    {"VUID-VkFramebufferCreateInfo-renderPass-03198", "If multiview is enabled for renderPass, and flags includes VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR, the layerCount member of any element of the pAttachmentImageInfos member of a VkFramebufferAttachmentsCreateInfoKHR structure included in the pNext chain must be greater than the maximum bit index set in the view mask in the subpasses in which it is used in renderPass (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-renderPass-03198)"},
+    {"VUID-VkFramebufferCreateInfo-renderPass-03199", "If multiview is not enabled for renderPass, and flags includes VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR, the layerCount member of any element of the pAttachmentImageInfos member of a VkFramebufferAttachmentsCreateInfoKHR structure included in the pNext chain must be greater than or equal to layers (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-renderPass-03199)"},
+    {"VUID-VkFramebufferCreateInfo-renderPass-parameter", "renderPass must be a valid VkRenderPass handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-renderPass-parameter)"},
+    {"VUID-VkFramebufferCreateInfo-sType-sType", "sType must be VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-sType-sType)"},
+    {"VUID-VkFramebufferCreateInfo-width-00885", "width must be greater than 0. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-width-00885)"},
+    {"VUID-VkFramebufferCreateInfo-width-00886", "width must be less than or equal to VkPhysicalDeviceLimits::maxFramebufferWidth (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferCreateInfo-width-00886)"},
+    {"VUID-VkFramebufferMixedSamplesCombinationNV-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferMixedSamplesCombinationNV-pNext-pNext)"},
+    {"VUID-VkFramebufferMixedSamplesCombinationNV-sType-sType", "sType must be VK_STRUCTURE_TYPE_FRAMEBUFFER_MIXED_SAMPLES_COMBINATION_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkFramebufferMixedSamplesCombinationNV-sType-sType)"},
+    {"VUID-VkGeometryAABBNV-aabbData-parameter", "If aabbData is not VK_NULL_HANDLE, aabbData must be a valid VkBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGeometryAABBNV-aabbData-parameter)"},
+    {"VUID-VkGeometryAABBNV-offset-02439", "offset must be less than the size of aabbData (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGeometryAABBNV-offset-02439)"},
+    {"VUID-VkGeometryAABBNV-offset-02440", "offset must be a multiple of 8 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGeometryAABBNV-offset-02440)"},
+    {"VUID-VkGeometryAABBNV-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGeometryAABBNV-pNext-pNext)"},
+    {"VUID-VkGeometryAABBNV-sType-sType", "sType must be VK_STRUCTURE_TYPE_GEOMETRY_AABB_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGeometryAABBNV-sType-sType)"},
+    {"VUID-VkGeometryAABBNV-stride-02441", "stride must be a multiple of 8 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGeometryAABBNV-stride-02441)"},
+    {"VUID-VkGeometryDataNV-aabbs-parameter", "aabbs must be a valid VkGeometryAABBNV structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGeometryDataNV-aabbs-parameter)"},
+    {"VUID-VkGeometryDataNV-triangles-parameter", "triangles must be a valid VkGeometryTrianglesNV structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGeometryDataNV-triangles-parameter)"},
+    {"VUID-VkGeometryNV-flags-parameter", "flags must be a valid combination of VkGeometryFlagBitsNV values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGeometryNV-flags-parameter)"},
+    {"VUID-VkGeometryNV-geometry-parameter", "geometry must be a valid VkGeometryDataNV structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGeometryNV-geometry-parameter)"},
+    {"VUID-VkGeometryNV-geometryType-parameter", "geometryType must be a valid VkGeometryTypeNV value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGeometryNV-geometryType-parameter)"},
+    {"VUID-VkGeometryNV-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGeometryNV-pNext-pNext)"},
+    {"VUID-VkGeometryNV-sType-sType", "sType must be VK_STRUCTURE_TYPE_GEOMETRY_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGeometryNV-sType-sType)"},
+    {"VUID-VkGeometryTrianglesNV-commonparent", "Each of indexData, transformData, and vertexData that are valid handles of non-ignored parameters must have been created, allocated, or retrieved from the same VkDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGeometryTrianglesNV-commonparent)"},
+    {"VUID-VkGeometryTrianglesNV-indexCount-02436", "indexCount must be 0 if indexType is VK_INDEX_TYPE_NONE_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGeometryTrianglesNV-indexCount-02436)"},
+    {"VUID-VkGeometryTrianglesNV-indexData-02434", "indexData must be VK_NULL_HANDLE if indexType is VK_INDEX_TYPE_NONE_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGeometryTrianglesNV-indexData-02434)"},
+    {"VUID-VkGeometryTrianglesNV-indexData-02435", "indexData must be a valid VkBuffer handle if indexType is not VK_INDEX_TYPE_NONE_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGeometryTrianglesNV-indexData-02435)"},
+    {"VUID-VkGeometryTrianglesNV-indexData-parameter", "If indexData is not VK_NULL_HANDLE, indexData must be a valid VkBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGeometryTrianglesNV-indexData-parameter)"},
+    {"VUID-VkGeometryTrianglesNV-indexOffset-02431", "indexOffset must be less than the size of indexData (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGeometryTrianglesNV-indexOffset-02431)"},
+    {"VUID-VkGeometryTrianglesNV-indexOffset-02432", "indexOffset must be a multiple of the element size of indexType (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGeometryTrianglesNV-indexOffset-02432)"},
+    {"VUID-VkGeometryTrianglesNV-indexType-02433", "indexType must be VK_INDEX_TYPE_UINT16, VK_INDEX_TYPE_UINT32, or VK_INDEX_TYPE_NONE_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGeometryTrianglesNV-indexType-02433)"},
+    {"VUID-VkGeometryTrianglesNV-indexType-parameter", "indexType must be a valid VkIndexType value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGeometryTrianglesNV-indexType-parameter)"},
+    {"VUID-VkGeometryTrianglesNV-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGeometryTrianglesNV-pNext-pNext)"},
+    {"VUID-VkGeometryTrianglesNV-sType-sType", "sType must be VK_STRUCTURE_TYPE_GEOMETRY_TRIANGLES_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGeometryTrianglesNV-sType-sType)"},
+    {"VUID-VkGeometryTrianglesNV-transformData-parameter", "If transformData is not VK_NULL_HANDLE, transformData must be a valid VkBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGeometryTrianglesNV-transformData-parameter)"},
+    {"VUID-VkGeometryTrianglesNV-transformOffset-02437", "transformOffset must be less than the size of transformData (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGeometryTrianglesNV-transformOffset-02437)"},
+    {"VUID-VkGeometryTrianglesNV-transformOffset-02438", "transformOffset must be a multiple of 16 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGeometryTrianglesNV-transformOffset-02438)"},
+    {"VUID-VkGeometryTrianglesNV-vertexData-parameter", "If vertexData is not VK_NULL_HANDLE, vertexData must be a valid VkBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGeometryTrianglesNV-vertexData-parameter)"},
+    {"VUID-VkGeometryTrianglesNV-vertexFormat-02430", "vertexFormat must be one of VK_FORMAT_R32G32B32_SFLOAT, VK_FORMAT_R32G32_SFLOAT, VK_FORMAT_R16G16B16_SFLOAT, VK_FORMAT_R16G16_SFLOAT, VK_FORMAT_R16G16_SNORM, or VK_FORMAT_R16G16B16_SNORM (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGeometryTrianglesNV-vertexFormat-02430)"},
+    {"VUID-VkGeometryTrianglesNV-vertexFormat-parameter", "vertexFormat must be a valid VkFormat value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGeometryTrianglesNV-vertexFormat-parameter)"},
+    {"VUID-VkGeometryTrianglesNV-vertexOffset-02428", "vertexOffset must be less than the size of vertexData (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGeometryTrianglesNV-vertexOffset-02428)"},
+    {"VUID-VkGeometryTrianglesNV-vertexOffset-02429", "vertexOffset must be a multiple of the component size of vertexFormat (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGeometryTrianglesNV-vertexOffset-02429)"},
+    {"VUID-VkGraphicsPipelineCreateInfo-None-02322", "If there are any mesh shader stages in the pipeline there must not be any shader stage in the pipeline with a Xfb execution mode. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGraphicsPipelineCreateInfo-None-02322)"},
+    {"VUID-VkGraphicsPipelineCreateInfo-attachmentCount-00746", "If rasterization is not disabled and the subpass uses color attachments, the attachmentCount member of pColorBlendState must be equal to the colorAttachmentCount used to create subpass (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGraphicsPipelineCreateInfo-attachmentCount-00746)"},
+    {"VUID-VkGraphicsPipelineCreateInfo-blendEnable-02023", "If rasterization is not disabled and the subpass uses color attachments, then for each color attachment in the subpass the blendEnable member of the corresponding element of the pAttachment member of pColorBlendState must be VK_FALSE if the attached image's format features does not contain VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGraphicsPipelineCreateInfo-blendEnable-02023)"},
+    {"VUID-VkGraphicsPipelineCreateInfo-commonparent", "Each of basePipelineHandle, layout, and renderPass that are valid handles of non-ignored parameters must have been created, allocated, or retrieved from the same VkDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGraphicsPipelineCreateInfo-commonparent)"},
+    {"VUID-VkGraphicsPipelineCreateInfo-coverageReductionMode-02722", "If the VK_NV_coverage_reduction_mode extension is enabled, the coverage reduction mode specified by VkPipelineCoverageReductionStateCreateInfoNV::coverageReductionMode, the rasterizationSamples member of pMultisampleState and the sample counts for the color and depth/stencil attachments (if the subpass has them) must be a valid combination returned by vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGraphicsPipelineCreateInfo-coverageReductionMode-02722)"},
+    {"VUID-VkGraphicsPipelineCreateInfo-flags-00722", "If flags contains the VK_PIPELINE_CREATE_DERIVATIVE_BIT flag, and basePipelineIndex is -1, basePipelineHandle must be a valid handle to a graphics VkPipeline (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGraphicsPipelineCreateInfo-flags-00722)"},
+    {"VUID-VkGraphicsPipelineCreateInfo-flags-00723", "If flags contains the VK_PIPELINE_CREATE_DERIVATIVE_BIT flag, and basePipelineHandle is VK_NULL_HANDLE, basePipelineIndex must be a valid index into the calling command's pCreateInfos parameter (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGraphicsPipelineCreateInfo-flags-00723)"},
+    {"VUID-VkGraphicsPipelineCreateInfo-flags-00724", "If flags contains the VK_PIPELINE_CREATE_DERIVATIVE_BIT flag, and basePipelineIndex is not -1, basePipelineHandle must be VK_NULL_HANDLE (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGraphicsPipelineCreateInfo-flags-00724)"},
+    {"VUID-VkGraphicsPipelineCreateInfo-flags-00725", "If flags contains the VK_PIPELINE_CREATE_DERIVATIVE_BIT flag, and basePipelineHandle is not VK_NULL_HANDLE, basePipelineIndex must be -1 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGraphicsPipelineCreateInfo-flags-00725)"},
+    {"VUID-VkGraphicsPipelineCreateInfo-flags-00764", "flags must not contain the VK_PIPELINE_CREATE_DISPATCH_BASE flag. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGraphicsPipelineCreateInfo-flags-00764)"},
+    {"VUID-VkGraphicsPipelineCreateInfo-flags-parameter", "flags must be a valid combination of VkPipelineCreateFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGraphicsPipelineCreateInfo-flags-parameter)"},
+    {"VUID-VkGraphicsPipelineCreateInfo-geometryStreams-02321", "If the last vertex processing stage is a geometry shader, and that geometry shader uses the GeometryStreams capability, then VkPhysicalDeviceTransformFeedbackFeaturesEXT::geometryStreams feature must be enabled (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGraphicsPipelineCreateInfo-geometryStreams-02321)"},
+    {"VUID-VkGraphicsPipelineCreateInfo-layout-00756", "layout must be consistent with all shaders specified in pStages (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGraphicsPipelineCreateInfo-layout-00756)"},
+    {"VUID-VkGraphicsPipelineCreateInfo-layout-01688", "The number of resources in layout accessible to each shader stage that is used by the pipeline must be less than or equal to VkPhysicalDeviceLimits::maxPerStageResources (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGraphicsPipelineCreateInfo-layout-01688)"},
+    {"VUID-VkGraphicsPipelineCreateInfo-layout-parameter", "layout must be a valid VkPipelineLayout handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGraphicsPipelineCreateInfo-layout-parameter)"},
+    {"VUID-VkGraphicsPipelineCreateInfo-lineRasterizationMode-02766", "If the lineRasterizationMode member of a VkPipelineRasterizationLineStateCreateInfoEXT structure included in the pNext chain of pRasterizationState is VK_LINE_RASTERIZATION_MODE_BRESENHAM_EXT or VK_LINE_RASTERIZATION_MODE_RECTANGULAR_SMOOTH_EXT and if rasterization is enabled, then the alphaToCoverageEnable, alphaToOneEnable, and sampleShadingEnable members of pMultisampleState must all be VK_FALSE (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGraphicsPipelineCreateInfo-lineRasterizationMode-02766)"},
+    {"VUID-VkGraphicsPipelineCreateInfo-pDynamicState-parameter", "If pDynamicState is not NULL, pDynamicState must be a valid pointer to a valid VkPipelineDynamicStateCreateInfo structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGraphicsPipelineCreateInfo-pDynamicState-parameter)"},
+    {"VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-00747", "If no element of the pDynamicStates member of pDynamicState is VK_DYNAMIC_STATE_VIEWPORT, the pViewports member of pViewportState must be a valid pointer to an array of pViewportState->viewportCount valid VkViewport structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-00747)"},
+    {"VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-00748", "If no element of the pDynamicStates member of pDynamicState is VK_DYNAMIC_STATE_SCISSOR, the pScissors member of pViewportState must be a valid pointer to an array of pViewportState->scissorCount VkRect2D structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-00748)"},
+    {"VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-00749", "If the wide lines feature is not enabled, and no element of the pDynamicStates member of pDynamicState is VK_DYNAMIC_STATE_LINE_WIDTH, the lineWidth member of pRasterizationState must be 1.0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-00749)"},
+    {"VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-00754", "If the depth bias clamping feature is not enabled, no element of the pDynamicStates member of pDynamicState is VK_DYNAMIC_STATE_DEPTH_BIAS, and the depthBiasEnable member of pRasterizationState is VK_TRUE, the depthBiasClamp member of pRasterizationState must be 0.0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-00754)"},
+    {"VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-00755", "If no element of the pDynamicStates member of pDynamicState is VK_DYNAMIC_STATE_DEPTH_BOUNDS, and the depthBoundsTestEnable member of pDepthStencilState is VK_TRUE, the minDepthBounds and maxDepthBounds members of pDepthStencilState must be between 0.0 and 1.0, inclusive (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-00755)"},
+    {"VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-01521", "If no element of the pDynamicStates member of pDynamicState is VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_EXT, and the sampleLocationsEnable member of a VkPipelineSampleLocationsStateCreateInfoEXT structure included in the pNext chain of pMultisampleState is VK_TRUE, sampleLocationsInfo.sampleLocationGridSize.width must evenly divide VkMultisamplePropertiesEXT::sampleLocationGridSize.width as returned by vkGetPhysicalDeviceMultisamplePropertiesEXT with a samples parameter equaling rasterizationSamples (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-01521)"},
+    {"VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-01522", "If no element of the pDynamicStates member of pDynamicState is VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_EXT, and the sampleLocationsEnable member of a VkPipelineSampleLocationsStateCreateInfoEXT structure included in the pNext chain of pMultisampleState is VK_TRUE, sampleLocationsInfo.sampleLocationGridSize.height must evenly divide VkMultisamplePropertiesEXT::sampleLocationGridSize.height as returned by vkGetPhysicalDeviceMultisamplePropertiesEXT with a samples parameter equaling rasterizationSamples (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-01522)"},
+    {"VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-01523", "If no element of the pDynamicStates member of pDynamicState is VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_EXT, and the sampleLocationsEnable member of a VkPipelineSampleLocationsStateCreateInfoEXT structure included in the pNext chain of pMultisampleState is VK_TRUE, sampleLocationsInfo.sampleLocationsPerPixel must equal rasterizationSamples (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-01523)"},
+    {"VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-01715", "If no element of the pDynamicStates member of pDynamicState is VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV, and the viewportWScalingEnable member of a VkPipelineViewportWScalingStateCreateInfoNV structure, included in the pNext chain of pViewportState, is VK_TRUE, the pViewportWScalings member of the VkPipelineViewportWScalingStateCreateInfoNV must be a pointer to an array of VkPipelineViewportWScalingStateCreateInfoNV::viewportCount valid VkViewportWScalingNV structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-01715)"},
+    {"VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-02510", "If the VK_EXT_depth_range_unrestricted extension is not enabled and no element of the pDynamicStates member of pDynamicState is VK_DYNAMIC_STATE_DEPTH_BOUNDS, and the depthBoundsTestEnable member of pDepthStencilState is VK_TRUE, the minDepthBounds and maxDepthBounds members of pDepthStencilState must be between 0.0 and 1.0, inclusive (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-02510)"},
+    {"VUID-VkGraphicsPipelineCreateInfo-pNext-pNext", "Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkPipelineCompilerControlCreateInfoAMD, VkPipelineCreationFeedbackCreateInfoEXT, VkPipelineDiscardRectangleStateCreateInfoEXT, or VkPipelineRepresentativeFragmentTestStateCreateInfoNV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGraphicsPipelineCreateInfo-pNext-pNext)"},
+    {"VUID-VkGraphicsPipelineCreateInfo-pRasterizationState-parameter", "pRasterizationState must be a valid pointer to a valid VkPipelineRasterizationStateCreateInfo structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGraphicsPipelineCreateInfo-pRasterizationState-parameter)"},
+    {"VUID-VkGraphicsPipelineCreateInfo-pStages-00729", "If pStages includes a tessellation control shader stage, it must include a tessellation evaluation shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGraphicsPipelineCreateInfo-pStages-00729)"},
+    {"VUID-VkGraphicsPipelineCreateInfo-pStages-00730", "If pStages includes a tessellation evaluation shader stage, it must include a tessellation control shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGraphicsPipelineCreateInfo-pStages-00730)"},
+    {"VUID-VkGraphicsPipelineCreateInfo-pStages-00731", "If pStages includes a tessellation control shader stage and a tessellation evaluation shader stage, pTessellationState must be a valid pointer to a valid VkPipelineTessellationStateCreateInfo structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGraphicsPipelineCreateInfo-pStages-00731)"},
+    {"VUID-VkGraphicsPipelineCreateInfo-pStages-00732", "If pStages includes tessellation shader stages, the shader code of at least one stage must contain an OpExecutionMode instruction that specifies the type of subdivision in the pipeline (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGraphicsPipelineCreateInfo-pStages-00732)"},
+    {"VUID-VkGraphicsPipelineCreateInfo-pStages-00733", "If pStages includes tessellation shader stages, and the shader code of both stages contain an OpExecutionMode instruction that specifies the type of subdivision in the pipeline, they must both specify the same subdivision mode (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGraphicsPipelineCreateInfo-pStages-00733)"},
+    {"VUID-VkGraphicsPipelineCreateInfo-pStages-00734", "If pStages includes tessellation shader stages, the shader code of at least one stage must contain an OpExecutionMode instruction that specifies the output patch size in the pipeline (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGraphicsPipelineCreateInfo-pStages-00734)"},
+    {"VUID-VkGraphicsPipelineCreateInfo-pStages-00735", "If pStages includes tessellation shader stages, and the shader code of both contain an OpExecutionMode instruction that specifies the out patch size in the pipeline, they must both specify the same patch size (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGraphicsPipelineCreateInfo-pStages-00735)"},
+    {"VUID-VkGraphicsPipelineCreateInfo-pStages-00736", "If pStages includes tessellation shader stages, the topology member of pInputAssembly must be VK_PRIMITIVE_TOPOLOGY_PATCH_LIST (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGraphicsPipelineCreateInfo-pStages-00736)"},
+    {"VUID-VkGraphicsPipelineCreateInfo-pStages-00738", "If pStages includes a geometry shader stage, and does not include any tessellation shader stages, its shader code must contain an OpExecutionMode instruction that specifies an input primitive type that is compatible with the primitive topology specified in pInputAssembly (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGraphicsPipelineCreateInfo-pStages-00738)"},
+    {"VUID-VkGraphicsPipelineCreateInfo-pStages-00739", "If pStages includes a geometry shader stage, and also includes tessellation shader stages, its shader code must contain an OpExecutionMode instruction that specifies an input primitive type that is compatible with the primitive topology that is output by the tessellation stages (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGraphicsPipelineCreateInfo-pStages-00739)"},
+    {"VUID-VkGraphicsPipelineCreateInfo-pStages-00740", "If pStages includes a fragment shader stage and a geometry shader stage, and the fragment shader code reads from an input variable that is decorated with PrimitiveID, then the geometry shader code must write to a matching output variable, decorated with PrimitiveID, in all execution paths (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGraphicsPipelineCreateInfo-pStages-00740)"},
+    {"VUID-VkGraphicsPipelineCreateInfo-pStages-00741", "If pStages includes a fragment shader stage, its shader code must not read from any input attachment that is defined as VK_ATTACHMENT_UNUSED in subpass (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGraphicsPipelineCreateInfo-pStages-00741)"},
+    {"VUID-VkGraphicsPipelineCreateInfo-pStages-00742", "The shader code for the entry points identified by pStages, and the rest of the state identified by this structure must adhere to the pipeline linking rules described in the Shader Interfaces chapter (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGraphicsPipelineCreateInfo-pStages-00742)"},
+    {"VUID-VkGraphicsPipelineCreateInfo-pStages-01565", "If pStages includes a fragment shader stage and an input attachment was referenced by the VkRenderPassInputAttachmentAspectCreateInfo at renderPass create time, its shader code must not read from any aspect that was not specified in the aspectMask of the corresponding VkInputAttachmentAspectReference structure. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGraphicsPipelineCreateInfo-pStages-01565)"},
+    {"VUID-VkGraphicsPipelineCreateInfo-pStages-02095", "The geometric shader stages provided in pStages must be either from the mesh shading pipeline (stage is VK_SHADER_STAGE_TASK_BIT_NV or VK_SHADER_STAGE_MESH_BIT_NV) or from the primitive shading pipeline (stage is VK_SHADER_STAGE_VERTEX_BIT, VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT, VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT, or VK_SHADER_STAGE_GEOMETRY_BIT). (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGraphicsPipelineCreateInfo-pStages-02095)"},
+    {"VUID-VkGraphicsPipelineCreateInfo-pStages-02097", "If pStages includes a vertex shader stage, pVertexInputState must be a valid pointer to a valid VkPipelineVertexInputStateCreateInfo structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGraphicsPipelineCreateInfo-pStages-02097)"},
+    {"VUID-VkGraphicsPipelineCreateInfo-pStages-02098", "If pStages includes a vertex shader stage, pInputAssemblyState must be a valid pointer to a valid VkPipelineInputAssemblyStateCreateInfo structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGraphicsPipelineCreateInfo-pStages-02098)"},
+    {"VUID-VkGraphicsPipelineCreateInfo-pStages-02317", "The Xfb execution mode can be specified by only one shader stage in pStages (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGraphicsPipelineCreateInfo-pStages-02317)"},
+    {"VUID-VkGraphicsPipelineCreateInfo-pStages-02318", "If any shader stage in pStages specifies Xfb execution mode it must be the last vertex processing stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGraphicsPipelineCreateInfo-pStages-02318)"},
+    {"VUID-VkGraphicsPipelineCreateInfo-pStages-parameter", "pStages must be a valid pointer to an array of stageCount valid VkPipelineShaderStageCreateInfo structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGraphicsPipelineCreateInfo-pStages-parameter)"},
+    {"VUID-VkGraphicsPipelineCreateInfo-rasterizationStream-02319", "If a VkPipelineRasterizationStateStreamCreateInfoEXT::rasterizationStream value other than zero is specified, all variables in the output interface of the entry point being compiled decorated with Position, PointSize, ClipDistance, or CullDistance must all be decorated with identical Stream values that match the rasterizationStream (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGraphicsPipelineCreateInfo-rasterizationStream-02319)"},
+    {"VUID-VkGraphicsPipelineCreateInfo-rasterizationStream-02320", "If VkPipelineRasterizationStateStreamCreateInfoEXT::rasterizationStream is zero, or not specified, all variables in the output interface of the entry point being compiled decorated with Position, PointSize, ClipDistance, or CullDistance must all be decorated with a Stream value of zero, or must not specify the Stream decoration (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGraphicsPipelineCreateInfo-rasterizationStream-02320)"},
+    {"VUID-VkGraphicsPipelineCreateInfo-rasterizerDiscardEnable-00750", "If the rasterizerDiscardEnable member of pRasterizationState is VK_FALSE, pViewportState must be a valid pointer to a valid VkPipelineViewportStateCreateInfo structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGraphicsPipelineCreateInfo-rasterizerDiscardEnable-00750)"},
+    {"VUID-VkGraphicsPipelineCreateInfo-rasterizerDiscardEnable-00751", "If the rasterizerDiscardEnable member of pRasterizationState is VK_FALSE, pMultisampleState must be a valid pointer to a valid VkPipelineMultisampleStateCreateInfo structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGraphicsPipelineCreateInfo-rasterizerDiscardEnable-00751)"},
+    {"VUID-VkGraphicsPipelineCreateInfo-rasterizerDiscardEnable-00752", "If the rasterizerDiscardEnable member of pRasterizationState is VK_FALSE, and subpass uses a depth/stencil attachment, pDepthStencilState must be a valid pointer to a valid VkPipelineDepthStencilStateCreateInfo structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGraphicsPipelineCreateInfo-rasterizerDiscardEnable-00752)"},
+    {"VUID-VkGraphicsPipelineCreateInfo-rasterizerDiscardEnable-00753", "If the rasterizerDiscardEnable member of pRasterizationState is VK_FALSE, and subpass uses color attachments, pColorBlendState must be a valid pointer to a valid VkPipelineColorBlendStateCreateInfo structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGraphicsPipelineCreateInfo-rasterizerDiscardEnable-00753)"},
+    {"VUID-VkGraphicsPipelineCreateInfo-renderPass-00760", "If the renderPass has multiview enabled and subpass has more than one bit set in the view mask and multiviewTessellationShader is not enabled, then pStages must not include tessellation shaders. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGraphicsPipelineCreateInfo-renderPass-00760)"},
+    {"VUID-VkGraphicsPipelineCreateInfo-renderPass-00761", "If the renderPass has multiview enabled and subpass has more than one bit set in the view mask and multiviewGeometryShader is not enabled, then pStages must not include a geometry shader. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGraphicsPipelineCreateInfo-renderPass-00761)"},
+    {"VUID-VkGraphicsPipelineCreateInfo-renderPass-00762", "If the renderPass has multiview enabled and subpass has more than one bit set in the view mask, shaders in the pipeline must not write to the Layer built-in output (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGraphicsPipelineCreateInfo-renderPass-00762)"},
+    {"VUID-VkGraphicsPipelineCreateInfo-renderPass-00763", "If the renderPass has multiview enabled, then all shaders must not include variables decorated with the Layer built-in decoration in their interfaces. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGraphicsPipelineCreateInfo-renderPass-00763)"},
+    {"VUID-VkGraphicsPipelineCreateInfo-renderPass-parameter", "renderPass must be a valid VkRenderPass handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGraphicsPipelineCreateInfo-renderPass-parameter)"},
+    {"VUID-VkGraphicsPipelineCreateInfo-sType-sType", "sType must be VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGraphicsPipelineCreateInfo-sType-sType)"},
+    {"VUID-VkGraphicsPipelineCreateInfo-sType-unique", "Each sType member in the pNext chain must be unique (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGraphicsPipelineCreateInfo-sType-unique)"},
+    {"VUID-VkGraphicsPipelineCreateInfo-sampleLocationsEnable-01524", "If the sampleLocationsEnable member of a VkPipelineSampleLocationsStateCreateInfoEXT structure included in the pNext chain of pMultisampleState is VK_TRUE, the fragment shader code must not statically use the extended instruction InterpolateAtSample (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGraphicsPipelineCreateInfo-sampleLocationsEnable-01524)"},
+    {"VUID-VkGraphicsPipelineCreateInfo-stage-00726", "The stage member of each element of pStages must be unique (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGraphicsPipelineCreateInfo-stage-00726)"},
+    {"VUID-VkGraphicsPipelineCreateInfo-stage-00727", "The stage member of one element of pStages must be VK_SHADER_STAGE_VERTEX_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGraphicsPipelineCreateInfo-stage-00727)"},
+    {"VUID-VkGraphicsPipelineCreateInfo-stage-00728", "The stage member of each element of pStages must not be VK_SHADER_STAGE_COMPUTE_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGraphicsPipelineCreateInfo-stage-00728)"},
+    {"VUID-VkGraphicsPipelineCreateInfo-stage-02096", "The stage member of one element of pStages must be either VK_SHADER_STAGE_VERTEX_BIT or VK_SHADER_STAGE_MESH_BIT_NV. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGraphicsPipelineCreateInfo-stage-02096)"},
+    {"VUID-VkGraphicsPipelineCreateInfo-stageCount-arraylength", "stageCount must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGraphicsPipelineCreateInfo-stageCount-arraylength)"},
+    {"VUID-VkGraphicsPipelineCreateInfo-stippledLineEnable-02767", "If the stippledLineEnable member of VkPipelineRasterizationLineStateCreateInfoEXT is VK_TRUE and no element of the pDynamicStates member of pDynamicState is VK_DYNAMIC_STATE_LINE_STIPPLE_EXT, then the lineStippleFactor member of VkPipelineRasterizationLineStateCreateInfoEXT must be in the range [1,256] (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGraphicsPipelineCreateInfo-stippledLineEnable-02767)"},
+    {"VUID-VkGraphicsPipelineCreateInfo-subpass-00743", "If rasterization is not disabled and subpass uses a depth/stencil attachment in renderPass that has a layout of VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL in the VkAttachmentReference defined by subpass, the depthWriteEnable member of pDepthStencilState must be VK_FALSE (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGraphicsPipelineCreateInfo-subpass-00743)"},
+    {"VUID-VkGraphicsPipelineCreateInfo-subpass-00744", "If rasterization is not disabled and subpass uses a depth/stencil attachment in renderPass that has a layout of VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL in the VkAttachmentReference defined by subpass, the failOp, passOp and depthFailOp members of each of the front and back members of pDepthStencilState must be VK_STENCIL_OP_KEEP (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGraphicsPipelineCreateInfo-subpass-00744)"},
+    {"VUID-VkGraphicsPipelineCreateInfo-subpass-00757", "If neither the VK_AMD_mixed_attachment_samples nor the VK_NV_framebuffer_mixed_samples extensions are enabled, and if subpass uses color and/or depth/stencil attachments, then the rasterizationSamples member of pMultisampleState must be the same as the sample count for those subpass attachments (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGraphicsPipelineCreateInfo-subpass-00757)"},
+    {"VUID-VkGraphicsPipelineCreateInfo-subpass-00758", "If subpass does not use any color and/or depth/stencil attachments, then the rasterizationSamples member of pMultisampleState must follow the rules for a zero-attachment subpass (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGraphicsPipelineCreateInfo-subpass-00758)"},
+    {"VUID-VkGraphicsPipelineCreateInfo-subpass-00759", "subpass must be a valid subpass within renderPass (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGraphicsPipelineCreateInfo-subpass-00759)"},
+    {"VUID-VkGraphicsPipelineCreateInfo-subpass-01411", "If the VK_NV_framebuffer_mixed_samples extension is enabled, and if subpass has a depth/stencil attachment and depth test, stencil test, or depth bounds test are enabled, then the rasterizationSamples member of pMultisampleState must be the same as the sample count of the depth/stencil attachment (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGraphicsPipelineCreateInfo-subpass-01411)"},
+    {"VUID-VkGraphicsPipelineCreateInfo-subpass-01412", "If the VK_NV_framebuffer_mixed_samples extension is enabled, and if subpass has any color attachments, then the rasterizationSamples member of pMultisampleState must be greater than or equal to the sample count for those subpass attachments (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGraphicsPipelineCreateInfo-subpass-01412)"},
+    {"VUID-VkGraphicsPipelineCreateInfo-subpass-01505", "If the VK_AMD_mixed_attachment_samples extension is enabled, and if subpass uses color and/or depth/stencil attachments, then the rasterizationSamples member of pMultisampleState must equal the maximum of the sample counts of those subpass attachments (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGraphicsPipelineCreateInfo-subpass-01505)"},
+    {"VUID-VkGraphicsPipelineCreateInfo-subpass-01756", "If rasterization is not disabled and subpass uses a depth/stencil attachment in renderPass that has a layout of VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL or VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL in the VkAttachmentReference defined by subpass, the depthWriteEnable member of pDepthStencilState must be VK_FALSE (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGraphicsPipelineCreateInfo-subpass-01756)"},
+    {"VUID-VkGraphicsPipelineCreateInfo-subpass-01757", "If rasterization is not disabled and subpass uses a depth/stencil attachment in renderPass that has a layout of VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL or VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL in the VkAttachmentReference defined by subpass, the failOp, passOp and depthFailOp members of each of the front and back members of pDepthStencilState must be VK_STENCIL_OP_KEEP (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGraphicsPipelineCreateInfo-subpass-01757)"},
+    {"VUID-VkGraphicsPipelineCreateInfo-topology-00737", "If the topology member of pInputAssembly is VK_PRIMITIVE_TOPOLOGY_PATCH_LIST, pStages must include tessellation shader stages (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkGraphicsPipelineCreateInfo-topology-00737)"},
+    {"VUID-VkHdrMetadataEXT-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkHdrMetadataEXT-pNext-pNext)"},
+    {"VUID-VkHdrMetadataEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_HDR_METADATA_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkHdrMetadataEXT-sType-sType)"},
+    {"VUID-VkHeadlessSurfaceCreateInfoEXT-flags-zerobitmask", "flags must be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkHeadlessSurfaceCreateInfoEXT-flags-zerobitmask)"},
+    {"VUID-VkHeadlessSurfaceCreateInfoEXT-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkHeadlessSurfaceCreateInfoEXT-pNext-pNext)"},
+    {"VUID-VkHeadlessSurfaceCreateInfoEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_HEADLESS_SURFACE_CREATE_INFO_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkHeadlessSurfaceCreateInfoEXT-sType-sType)"},
+    {"VUID-VkIOSSurfaceCreateInfoMVK-flags-zerobitmask", "flags must be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkIOSSurfaceCreateInfoMVK-flags-zerobitmask)"},
+    {"VUID-VkIOSSurfaceCreateInfoMVK-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkIOSSurfaceCreateInfoMVK-pNext-pNext)"},
+    {"VUID-VkIOSSurfaceCreateInfoMVK-pView-01316", "pView must be a valid UIView and must be backed by a CALayer instance of type CAMetalLayer. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkIOSSurfaceCreateInfoMVK-pView-01316)"},
+    {"VUID-VkIOSSurfaceCreateInfoMVK-sType-sType", "sType must be VK_STRUCTURE_TYPE_IOS_SURFACE_CREATE_INFO_MVK (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkIOSSurfaceCreateInfoMVK-sType-sType)"},
+    {"VUID-VkImageBlit-aspectMask-00238", "The aspectMask member of srcSubresource and dstSubresource must match (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageBlit-aspectMask-00238)"},
+    {"VUID-VkImageBlit-aspectMask-00241", "The aspectMask member of srcSubresource must specify aspects present in the calling command's srcImage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageBlit-aspectMask-00241)"},
+    {"VUID-VkImageBlit-aspectMask-00242", "The aspectMask member of dstSubresource must specify aspects present in the calling command's dstImage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageBlit-aspectMask-00242)"},
+    {"VUID-VkImageBlit-dstImage-00250", "If the calling command's dstImage is of type VK_IMAGE_TYPE_1D, then dstOffset[0].y must be 0 and dstOffset[1].y must be 1. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageBlit-dstImage-00250)"},
+    {"VUID-VkImageBlit-dstImage-00252", "If the calling command's dstImage is of type VK_IMAGE_TYPE_1D or VK_IMAGE_TYPE_2D, then dstOffset[0].z must be 0 and dstOffset[1].z must be 1. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageBlit-dstImage-00252)"},
+    {"VUID-VkImageBlit-dstOffset-00248", "dstOffset[0].x and dstOffset[1].x must both be greater than or equal to 0 and less than or equal to the destination image subresource width (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageBlit-dstOffset-00248)"},
+    {"VUID-VkImageBlit-dstOffset-00249", "dstOffset[0].y and dstOffset[1].y must both be greater than or equal to 0 and less than or equal to the destination image subresource height (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageBlit-dstOffset-00249)"},
+    {"VUID-VkImageBlit-dstOffset-00251", "dstOffset[0].z and dstOffset[1].z must both be greater than or equal to 0 and less than or equal to the destination image subresource depth (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageBlit-dstOffset-00251)"},
+    {"VUID-VkImageBlit-dstSubresource-parameter", "dstSubresource must be a valid VkImageSubresourceLayers structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageBlit-dstSubresource-parameter)"},
+    {"VUID-VkImageBlit-layerCount-00239", "The layerCount member of srcSubresource and dstSubresource must match (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageBlit-layerCount-00239)"},
+    {"VUID-VkImageBlit-srcImage-00240", "If either of the calling command's srcImage or dstImage parameters are of VkImageType VK_IMAGE_TYPE_3D, the baseArrayLayer and layerCount members of both srcSubresource and dstSubresource must be 0 and 1, respectively (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageBlit-srcImage-00240)"},
+    {"VUID-VkImageBlit-srcImage-00245", "If the calling command's srcImage is of type VK_IMAGE_TYPE_1D, then srcOffset[0].y must be 0 and srcOffset[1].y must be 1. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageBlit-srcImage-00245)"},
+    {"VUID-VkImageBlit-srcImage-00247", "If the calling command's srcImage is of type VK_IMAGE_TYPE_1D or VK_IMAGE_TYPE_2D, then srcOffset[0].z must be 0 and srcOffset[1].z must be 1. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageBlit-srcImage-00247)"},
+    {"VUID-VkImageBlit-srcOffset-00243", "srcOffset[0].x and srcOffset[1].x must both be greater than or equal to 0 and less than or equal to the source image subresource width (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageBlit-srcOffset-00243)"},
+    {"VUID-VkImageBlit-srcOffset-00244", "srcOffset[0].y and srcOffset[1].y must both be greater than or equal to 0 and less than or equal to the source image subresource height (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageBlit-srcOffset-00244)"},
+    {"VUID-VkImageBlit-srcOffset-00246", "srcOffset[0].z and srcOffset[1].z must both be greater than or equal to 0 and less than or equal to the source image subresource depth (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageBlit-srcOffset-00246)"},
+    {"VUID-VkImageBlit-srcSubresource-parameter", "srcSubresource must be a valid VkImageSubresourceLayers structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageBlit-srcSubresource-parameter)"},
+    {"VUID-VkImageCopy-aspectMask-00137", "The aspectMask member of srcSubresource and dstSubresource must match (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCopy-aspectMask-00137)"},
+    {"VUID-VkImageCopy-aspectMask-00142", "The aspectMask member of srcSubresource must specify aspects present in the calling command's srcImage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCopy-aspectMask-00142)"},
+    {"VUID-VkImageCopy-aspectMask-00143", "The aspectMask member of dstSubresource must specify aspects present in the calling command's dstImage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCopy-aspectMask-00143)"},
+    {"VUID-VkImageCopy-dstImage-00152", "If the calling command's dstImage is of type VK_IMAGE_TYPE_1D, then dstOffset.y must be 0 and extent.height must be 1. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCopy-dstImage-00152)"},
+    {"VUID-VkImageCopy-dstImage-01554", "If the calling command's dstImage has a VkFormat with two planes then the dstSubresource aspectMask must be VK_IMAGE_ASPECT_PLANE_0_BIT or VK_IMAGE_ASPECT_PLANE_1_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCopy-dstImage-01554)"},
+    {"VUID-VkImageCopy-dstImage-01555", "If the calling command's dstImage has a VkFormat with three planes then the dstSubresource aspectMask must be VK_IMAGE_ASPECT_PLANE_0_BIT, VK_IMAGE_ASPECT_PLANE_1_BIT, or VK_IMAGE_ASPECT_PLANE_2_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCopy-dstImage-01555)"},
+    {"VUID-VkImageCopy-dstImage-01557", "If the calling command's dstImage has a multi-planar image format and the srcImage does not have a multi-planar image format, the srcSubresource aspectMask must be VK_IMAGE_ASPECT_COLOR_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCopy-dstImage-01557)"},
+    {"VUID-VkImageCopy-dstImage-01731", "If the calling command's dstImage is a compressed format image, or a single-plane, '_422' image format, all members of dstOffset must be a multiple of the corresponding dimensions of the compressed texel block (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCopy-dstImage-01731)"},
+    {"VUID-VkImageCopy-dstImage-01732", "If the calling command's dstImage is a compressed format image, or a single-plane, '_422' image format, extent.width must be a multiple of the compressed texel block width or (extent.width + dstOffset.x) must equal the destination image subresource width (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCopy-dstImage-01732)"},
+    {"VUID-VkImageCopy-dstImage-01733", "If the calling command's dstImage is a compressed format image, or a single-plane, '_422' image format, extent.height must be a multiple of the compressed texel block height or (extent.height + dstOffset.y) must equal the destination image subresource height (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCopy-dstImage-01733)"},
+    {"VUID-VkImageCopy-dstImage-01734", "If the calling command's dstImage is a compressed format image, or a single-plane, '_422' image format, extent.depth must be a multiple of the compressed texel block depth or (extent.depth + dstOffset.z) must equal the destination image subresource depth (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCopy-dstImage-01734)"},
+    {"VUID-VkImageCopy-dstImage-01786", "If the calling command's dstImage is of type VK_IMAGE_TYPE_1D, then dstOffset.z must be 0 and extent.depth must be 1. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCopy-dstImage-01786)"},
+    {"VUID-VkImageCopy-dstImage-01788", "If the calling command's dstImage is of type VK_IMAGE_TYPE_2D, then dstOffset.z must be 0. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCopy-dstImage-01788)"},
+    {"VUID-VkImageCopy-dstImage-01792", "If the calling command's dstImage is of type VK_IMAGE_TYPE_2D, and the srcImage is of type VK_IMAGE_TYPE_3D, then extent.depth must equal to the layerCount member of dstSubresource. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCopy-dstImage-01792)"},
+    {"VUID-VkImageCopy-dstOffset-00150", "dstOffset.x and (extent.width + dstOffset.x) must both be greater than or equal to 0 and less than or equal to the destination image subresource width (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCopy-dstOffset-00150)"},
+    {"VUID-VkImageCopy-dstOffset-00151", "dstOffset.y and (extent.height + dstOffset.y) must both be greater than or equal to 0 and less than or equal to the destination image subresource height (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCopy-dstOffset-00151)"},
+    {"VUID-VkImageCopy-dstOffset-00153", "dstOffset.z and (extent.depth + dstOffset.z) must both be greater than or equal to 0 and less than or equal to the destination image subresource depth (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCopy-dstOffset-00153)"},
+    {"VUID-VkImageCopy-dstOffset-00162", "If the calling command's dstImage is a compressed format image, all members of dstOffset must be a multiple of the corresponding dimensions of the compressed texel block (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCopy-dstOffset-00162)"},
+    {"VUID-VkImageCopy-dstSubresource-parameter", "dstSubresource must be a valid VkImageSubresourceLayers structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCopy-dstSubresource-parameter)"},
+    {"VUID-VkImageCopy-extent-00140", "The number of slices of the extent (for 3D) or layers of the srcSubresource (for non-3D) must match the number of slices of the extent (for 3D) or layers of the dstSubresource (for non-3D) (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCopy-extent-00140)"},
+    {"VUID-VkImageCopy-extent-00158", "If the calling command's srcImage is a compressed image, extent.width must be a multiple of the compressed texel block width or (extent.width + srcOffset.x) must equal the source image subresource width (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCopy-extent-00158)"},
+    {"VUID-VkImageCopy-extent-00159", "If the calling command's srcImage is a compressed image, extent.height must be a multiple of the compressed texel block height or (extent.height + srcOffset.y) must equal the source image subresource height (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCopy-extent-00159)"},
+    {"VUID-VkImageCopy-extent-00160", "If the calling command's srcImage is a compressed image, extent.depth must be a multiple of the compressed texel block depth or (extent.depth + srcOffset.z) must equal the source image subresource depth (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCopy-extent-00160)"},
+    {"VUID-VkImageCopy-extent-00163", "If the calling command's dstImage is a compressed format image, extent.width must be a multiple of the compressed texel block width or (extent.width + dstOffset.x) must equal the destination image subresource width (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCopy-extent-00163)"},
+    {"VUID-VkImageCopy-extent-00164", "If the calling command's dstImage is a compressed format image, extent.height must be a multiple of the compressed texel block height or (extent.height + dstOffset.y) must equal the destination image subresource height (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCopy-extent-00164)"},
+    {"VUID-VkImageCopy-extent-00165", "If the calling command's dstImage is a compressed format image, extent.depth must be a multiple of the compressed texel block depth or (extent.depth + dstOffset.z) must equal the destination image subresource depth (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCopy-extent-00165)"},
+    {"VUID-VkImageCopy-layerCount-00138", "The layerCount member of srcSubresource and dstSubresource must match (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCopy-layerCount-00138)"},
+    {"VUID-VkImageCopy-srcImage-00139", "If either of the calling command's srcImage or dstImage parameters are of VkImageType VK_IMAGE_TYPE_3D, the baseArrayLayer and layerCount members of both srcSubresource and dstSubresource must be 0 and 1, respectively (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCopy-srcImage-00139)"},
+    {"VUID-VkImageCopy-srcImage-00141", "If either of the calling command's srcImage or dstImage parameters are of VkImageType VK_IMAGE_TYPE_3D, the baseArrayLayer and layerCount members of the corresponding subresource must be 0 and 1, respectively (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCopy-srcImage-00141)"},
+    {"VUID-VkImageCopy-srcImage-00146", "If the calling command's srcImage is of type VK_IMAGE_TYPE_1D, then srcOffset.y must be 0 and extent.height must be 1. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCopy-srcImage-00146)"},
+    {"VUID-VkImageCopy-srcImage-01551", "If neither the calling command's srcImage nor the calling command's dstImage has a multi-planar image format then the aspectMask member of srcSubresource and dstSubresource must match (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCopy-srcImage-01551)"},
+    {"VUID-VkImageCopy-srcImage-01552", "If the calling command's srcImage has a VkFormat with two planes then the srcSubresource aspectMask must be VK_IMAGE_ASPECT_PLANE_0_BIT or VK_IMAGE_ASPECT_PLANE_1_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCopy-srcImage-01552)"},
+    {"VUID-VkImageCopy-srcImage-01553", "If the calling command's srcImage has a VkFormat with three planes then the srcSubresource aspectMask must be VK_IMAGE_ASPECT_PLANE_0_BIT, VK_IMAGE_ASPECT_PLANE_1_BIT, or VK_IMAGE_ASPECT_PLANE_2_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCopy-srcImage-01553)"},
+    {"VUID-VkImageCopy-srcImage-01556", "If the calling command's srcImage has a multi-planar image format and the dstImage does not have a multi-planar image format, the dstSubresource aspectMask must be VK_IMAGE_ASPECT_COLOR_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCopy-srcImage-01556)"},
+    {"VUID-VkImageCopy-srcImage-01727", "If the calling command's srcImage is a compressed image, or a single-plane, '_422' image format, all members of srcOffset must be a multiple of the corresponding dimensions of the compressed texel block (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCopy-srcImage-01727)"},
+    {"VUID-VkImageCopy-srcImage-01728", "If the calling command's srcImage is a compressed image, or a single-plane, '_422' image format, extent.width must be a multiple of the compressed texel block width or (extent.width + srcOffset.x) must equal the source image subresource width (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCopy-srcImage-01728)"},
+    {"VUID-VkImageCopy-srcImage-01729", "If the calling command's srcImage is a compressed image, or a single-plane, '_422' image format, extent.height must be a multiple of the compressed texel block height or (extent.height + srcOffset.y) must equal the source image subresource height (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCopy-srcImage-01729)"},
+    {"VUID-VkImageCopy-srcImage-01730", "If the calling command's srcImage is a compressed image, or a single-plane, '_422' image format, extent.depth must be a multiple of the compressed texel block depth or (extent.depth + srcOffset.z) must equal the source image subresource depth (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCopy-srcImage-01730)"},
+    {"VUID-VkImageCopy-srcImage-01785", "If the calling command's srcImage is of type VK_IMAGE_TYPE_1D, then srcOffset.z must be 0 and extent.depth must be 1. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCopy-srcImage-01785)"},
+    {"VUID-VkImageCopy-srcImage-01787", "If the calling command's srcImage is of type VK_IMAGE_TYPE_2D, then srcOffset.z must be 0. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCopy-srcImage-01787)"},
+    {"VUID-VkImageCopy-srcImage-01789", "If the calling command's srcImage or dstImage is of type VK_IMAGE_TYPE_2D, then extent.depth must be 1. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCopy-srcImage-01789)"},
+    {"VUID-VkImageCopy-srcImage-01790", "If both srcImage and dstImage are of type VK_IMAGE_TYPE_2D then extent.depth must be 1. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCopy-srcImage-01790)"},
+    {"VUID-VkImageCopy-srcImage-01791", "If the calling command's srcImage is of type VK_IMAGE_TYPE_2D, and the dstImage is of type VK_IMAGE_TYPE_3D, then extent.depth must equal to the layerCount member of srcSubresource. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCopy-srcImage-01791)"},
+    {"VUID-VkImageCopy-srcOffset-00144", "srcOffset.x and (extent.width + srcOffset.x) must both be greater than or equal to 0 and less than or equal to the source image subresource width (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCopy-srcOffset-00144)"},
+    {"VUID-VkImageCopy-srcOffset-00145", "srcOffset.y and (extent.height + srcOffset.y) must both be greater than or equal to 0 and less than or equal to the source image subresource height (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCopy-srcOffset-00145)"},
+    {"VUID-VkImageCopy-srcOffset-00147", "srcOffset.z and (extent.depth + srcOffset.z) must both be greater than or equal to 0 and less than or equal to the source image subresource depth (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCopy-srcOffset-00147)"},
+    {"VUID-VkImageCopy-srcOffset-00157", "If the calling command's srcImage is a compressed image, all members of srcOffset must be a multiple of the corresponding dimensions of the compressed texel block (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCopy-srcOffset-00157)"},
+    {"VUID-VkImageCopy-srcSubresource-parameter", "srcSubresource must be a valid VkImageSubresourceLayers structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCopy-srcSubresource-parameter)"},
+    {"VUID-VkImageCreateInfo-Format-02536", "If Format is a depth-stencil format and the pNext chain includes a VkImageStencilUsageCreateInfoEXT structure with its stencilUsage member including VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT, extent.width must be less than or equal to VkPhysicalDeviceLimits::maxFramebufferWidth (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCreateInfo-Format-02536)"},
+    {"VUID-VkImageCreateInfo-None-01891", "If any of the bits VK_IMAGE_CREATE_SPARSE_BINDING_BIT, VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT, or VK_IMAGE_CREATE_SPARSE_ALIASED_BIT are set, VK_IMAGE_CREATE_PROTECTED_BIT must not also be set. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCreateInfo-None-01891)"},
+    {"VUID-VkImageCreateInfo-None-01925", "If any of the bits VK_IMAGE_CREATE_SPARSE_BINDING_BIT, VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT, or VK_IMAGE_CREATE_SPARSE_ALIASED_BIT are set, VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT must not also be set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCreateInfo-None-01925)"},
+    {"VUID-VkImageCreateInfo-arrayLayers-00948", "arrayLayers must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCreateInfo-arrayLayers-00948)"},
+    {"VUID-VkImageCreateInfo-arrayLayers-02256", "arrayLayers must be less than or equal to imageCreateMaxArrayLayers (as defined in Image Creation Limits). (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCreateInfo-arrayLayers-02256)"},
+    {"VUID-VkImageCreateInfo-extent-00944", "extent.width must be greater than 0. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCreateInfo-extent-00944)"},
+    {"VUID-VkImageCreateInfo-extent-00945", "extent.height must be greater than 0. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCreateInfo-extent-00945)"},
+    {"VUID-VkImageCreateInfo-extent-00946", "extent.depth must be greater than 0. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCreateInfo-extent-00946)"},
+    {"VUID-VkImageCreateInfo-extent-02252", "extent.width must be less than or equal to imageCreateMaxExtent.width (as defined in Image Creation Limits). (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCreateInfo-extent-02252)"},
+    {"VUID-VkImageCreateInfo-extent-02253", "extent.height must be less than or equal to imageCreateMaxExtent.height (as defined in Image Creation Limits). (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCreateInfo-extent-02253)"},
+    {"VUID-VkImageCreateInfo-extent-02254", "extent.depth must be less than or equal to imageCreateMaxExtent.depth (as defined in Image Creation Limits). (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCreateInfo-extent-02254)"},
+    {"VUID-VkImageCreateInfo-flags-00949", "If flags contains VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT, imageType must be VK_IMAGE_TYPE_2D (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCreateInfo-flags-00949)"},
+    {"VUID-VkImageCreateInfo-flags-00950", "If flags contains VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT, imageType must be VK_IMAGE_TYPE_3D (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCreateInfo-flags-00950)"},
+    {"VUID-VkImageCreateInfo-flags-00969", "If the sparse bindings feature is not enabled, flags must not contain VK_IMAGE_CREATE_SPARSE_BINDING_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCreateInfo-flags-00969)"},
+    {"VUID-VkImageCreateInfo-flags-00987", "If flags contains VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT or VK_IMAGE_CREATE_SPARSE_ALIASED_BIT, it must also contain VK_IMAGE_CREATE_SPARSE_BINDING_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCreateInfo-flags-00987)"},
+    {"VUID-VkImageCreateInfo-flags-01533", "If flags contains VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT format must be a depth or depth/stencil format (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCreateInfo-flags-01533)"},
+    {"VUID-VkImageCreateInfo-flags-01572", "If flags contains VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT, then format must be a block-compressed image format, an ETC compressed image format, or an ASTC compressed image format. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCreateInfo-flags-01572)"},
+    {"VUID-VkImageCreateInfo-flags-01573", "If flags contains VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT, then flags must also contain VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCreateInfo-flags-01573)"},
+    {"VUID-VkImageCreateInfo-flags-01890", "If the protected memory feature is not enabled, flags must not contain VK_IMAGE_CREATE_PROTECTED_BIT. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCreateInfo-flags-01890)"},
+    {"VUID-VkImageCreateInfo-flags-01924", "If the sparse aliased residency feature is not enabled, flags must not contain VK_IMAGE_CREATE_SPARSE_ALIASED_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCreateInfo-flags-01924)"},
+    {"VUID-VkImageCreateInfo-flags-02050", "If flags contains VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV, imageType must be VK_IMAGE_TYPE_2D or VK_IMAGE_TYPE_3D (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCreateInfo-flags-02050)"},
+    {"VUID-VkImageCreateInfo-flags-02051", "If flags contains VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV, it must not contain VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT and the format must not be a depth/stencil format (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCreateInfo-flags-02051)"},
+    {"VUID-VkImageCreateInfo-flags-02052", "If flags contains VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV and imageType is VK_IMAGE_TYPE_2D, extent.width and extent.height must be greater than 1 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCreateInfo-flags-02052)"},
+    {"VUID-VkImageCreateInfo-flags-02053", "If flags contains VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV and imageType is VK_IMAGE_TYPE_3D, extent.width, extent.height, and extent.depth must be greater than 1 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCreateInfo-flags-02053)"},
+    {"VUID-VkImageCreateInfo-flags-02259", "If flags contains VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT, then mipLevels must be one, arrayLayers must be one, imageType must be VK_IMAGE_TYPE_2D. and imageCreateMaybeLinear (as defined in Image Creation Limits) must be false. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCreateInfo-flags-02259)"},
+    {"VUID-VkImageCreateInfo-flags-02557", "If flags contains VK_IMAGE_USAGE_FRAGMENT_DENSITY_MAP_BIT_EXT, imageType must be VK_IMAGE_TYPE_2D (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCreateInfo-flags-02557)"},
+    {"VUID-VkImageCreateInfo-flags-02565", "If flags contains VK_IMAGE_CREATE_SUBSAMPLED_BIT_EXT, tiling must be VK_IMAGE_TILING_OPTIMAL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCreateInfo-flags-02565)"},
+    {"VUID-VkImageCreateInfo-flags-02566", "If flags contains VK_IMAGE_CREATE_SUBSAMPLED_BIT_EXT, imageType must be VK_IMAGE_TYPE_2D (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCreateInfo-flags-02566)"},
+    {"VUID-VkImageCreateInfo-flags-02567", "If flags contains VK_IMAGE_CREATE_SUBSAMPLED_BIT_EXT, flags must not contain VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCreateInfo-flags-02567)"},
+    {"VUID-VkImageCreateInfo-flags-02568", "If flags contains VK_IMAGE_CREATE_SUBSAMPLED_BIT_EXT, mipLevels must be 1 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCreateInfo-flags-02568)"},
+    {"VUID-VkImageCreateInfo-flags-parameter", "flags must be a valid combination of VkImageCreateFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCreateInfo-flags-parameter)"},
+    {"VUID-VkImageCreateInfo-format-00943", "format must not be VK_FORMAT_UNDEFINED (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCreateInfo-format-00943)"},
+    {"VUID-VkImageCreateInfo-format-01577", "If format is not a multi-planar format, and flags does not include VK_IMAGE_CREATE_ALIAS_BIT, flags must not contain VK_IMAGE_CREATE_DISJOINT_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCreateInfo-format-01577)"},
+    {"VUID-VkImageCreateInfo-format-02537", "If format is a depth-stencil format and the pNext chain includes a VkImageStencilUsageCreateInfoEXT structure with its stencilUsage member including VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT, extent.height must be less than or equal to VkPhysicalDeviceLimits::maxFramebufferHeight (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCreateInfo-format-02537)"},
+    {"VUID-VkImageCreateInfo-format-02538", "If the multisampled storage images feature is not enabled, format is a depth-stencil format and the pNext chain includes a VkImageStencilUsageCreateInfoEXT structure with its stencilUsage including VK_IMAGE_USAGE_STORAGE_BIT, samples must be VK_SAMPLE_COUNT_1_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCreateInfo-format-02538)"},
+    {"VUID-VkImageCreateInfo-format-02561", "If the image format is one of those listed in Formats requiring sampler Y'CBCR conversion for VK_IMAGE_ASPECT_COLOR_BIT image views, then mipLevels must be 1 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCreateInfo-format-02561)"},
+    {"VUID-VkImageCreateInfo-format-02562", "If the image format is one of those listed in Formats requiring sampler Y'CBCR conversion for VK_IMAGE_ASPECT_COLOR_BIT image views, samples must be VK_SAMPLE_COUNT_1_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCreateInfo-format-02562)"},
+    {"VUID-VkImageCreateInfo-format-02563", "If the image format is one of those listed in Formats requiring sampler Y'CBCR conversion for VK_IMAGE_ASPECT_COLOR_BIT image views, imageType must be VK_IMAGE_TYPE_2D (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCreateInfo-format-02563)"},
+    {"VUID-VkImageCreateInfo-format-02564", "If the image format is one of those listed in Formats requiring sampler Y'CBCR conversion for VK_IMAGE_ASPECT_COLOR_BIT image views, arrayLayers must be 1 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCreateInfo-format-02564)"},
+    {"VUID-VkImageCreateInfo-format-02653", "If the image format is one of those listed in Formats requiring sampler Y'CBCR conversion for VK_IMAGE_ASPECT_COLOR_BIT image views, and the ycbcrImageArrays feature is not enabled, arrayLayers must be 1 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCreateInfo-format-02653)"},
+    {"VUID-VkImageCreateInfo-format-02795", "If format is a depth-stencil format, usage includes VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT, and the pNext chain includes a VkImageStencilUsageCreateInfoEXT structure, then its VkImageStencilUsageCreateInfoEXT::stencilUsage member must also include VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCreateInfo-format-02795)"},
+    {"VUID-VkImageCreateInfo-format-02796", "If format is a depth-stencil format, usage does not include VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT, and the pNext chain includes a VkImageStencilUsageCreateInfoEXT structure, then its VkImageStencilUsageCreateInfoEXT::stencilUsage member must also not include VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCreateInfo-format-02796)"},
+    {"VUID-VkImageCreateInfo-format-02797", "If format is a depth-stencil format, usage includes VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT, and the pNext chain includes a VkImageStencilUsageCreateInfoEXT structure, then its VkImageStencilUsageCreateInfoEXT::stencilUsage member must also include VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCreateInfo-format-02797)"},
+    {"VUID-VkImageCreateInfo-format-02798", "If format is a depth-stencil format, usage does not include VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT, and the pNext chain includes a VkImageStencilUsageCreateInfoEXT structure, then its VkImageStencilUsageCreateInfoEXT::stencilUsage member must also not include VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCreateInfo-format-02798)"},
+    {"VUID-VkImageCreateInfo-format-parameter", "format must be a valid VkFormat value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCreateInfo-format-parameter)"},
+    {"VUID-VkImageCreateInfo-imageCreateFormatFeatures-02260", "If format is a multi-planar format, and if imageCreateFormatFeatures (as defined in Image Creation Limits) does not contain VK_FORMAT_FEATURE_DISJOINT_BIT, then flags must not contain VK_IMAGE_CREATE_DISJOINT_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCreateInfo-imageCreateFormatFeatures-02260)"},
+    {"VUID-VkImageCreateInfo-imageCreateMaxMipLevels-02251", "Each of the following values (as described in Image Creation Limits) must not be undefined imageCreateMaxMipLevels, imageCreateMaxArrayLayers, imageCreateMaxExtent, and imageCreateSampleCounts. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCreateInfo-imageCreateMaxMipLevels-02251)"},
+    {"VUID-VkImageCreateInfo-imageType-00954", "If imageType is VK_IMAGE_TYPE_2D and flags contains VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT, extent.width and extent.height must be equal and arrayLayers must be greater than or equal to 6 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCreateInfo-imageType-00954)"},
+    {"VUID-VkImageCreateInfo-imageType-00956", "If imageType is VK_IMAGE_TYPE_1D, both extent.height and extent.depth must be 1 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCreateInfo-imageType-00956)"},
+    {"VUID-VkImageCreateInfo-imageType-00957", "If imageType is VK_IMAGE_TYPE_2D, extent.depth must be 1 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCreateInfo-imageType-00957)"},
+    {"VUID-VkImageCreateInfo-imageType-00961", "If imageType is VK_IMAGE_TYPE_3D, arrayLayers must be 1. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCreateInfo-imageType-00961)"},
+    {"VUID-VkImageCreateInfo-imageType-00970", "If imageType is VK_IMAGE_TYPE_1D, flags must not contain VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCreateInfo-imageType-00970)"},
+    {"VUID-VkImageCreateInfo-imageType-00971", "If the sparse residency for 2D images feature is not enabled, and imageType is VK_IMAGE_TYPE_2D, flags must not contain VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCreateInfo-imageType-00971)"},
+    {"VUID-VkImageCreateInfo-imageType-00972", "If the sparse residency for 3D images feature is not enabled, and imageType is VK_IMAGE_TYPE_3D, flags must not contain VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCreateInfo-imageType-00972)"},
+    {"VUID-VkImageCreateInfo-imageType-00973", "If the sparse residency for images with 2 samples feature is not enabled, imageType is VK_IMAGE_TYPE_2D, and samples is VK_SAMPLE_COUNT_2_BIT, flags must not contain VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCreateInfo-imageType-00973)"},
+    {"VUID-VkImageCreateInfo-imageType-00974", "If the sparse residency for images with 4 samples feature is not enabled, imageType is VK_IMAGE_TYPE_2D, and samples is VK_SAMPLE_COUNT_4_BIT, flags must not contain VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCreateInfo-imageType-00974)"},
+    {"VUID-VkImageCreateInfo-imageType-00975", "If the sparse residency for images with 8 samples feature is not enabled, imageType is VK_IMAGE_TYPE_2D, and samples is VK_SAMPLE_COUNT_8_BIT, flags must not contain VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCreateInfo-imageType-00975)"},
+    {"VUID-VkImageCreateInfo-imageType-00976", "If the sparse residency for images with 16 samples feature is not enabled, imageType is VK_IMAGE_TYPE_2D, and samples is VK_SAMPLE_COUNT_16_BIT, flags must not contain VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCreateInfo-imageType-00976)"},
+    {"VUID-VkImageCreateInfo-imageType-02082", "If usage includes VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV, imageType must be VK_IMAGE_TYPE_2D. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCreateInfo-imageType-02082)"},
+    {"VUID-VkImageCreateInfo-imageType-parameter", "imageType must be a valid VkImageType value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCreateInfo-imageType-parameter)"},
+    {"VUID-VkImageCreateInfo-initialLayout-00993", "initialLayout must be VK_IMAGE_LAYOUT_UNDEFINED or VK_IMAGE_LAYOUT_PREINITIALIZED. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCreateInfo-initialLayout-00993)"},
+    {"VUID-VkImageCreateInfo-initialLayout-parameter", "initialLayout must be a valid VkImageLayout value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCreateInfo-initialLayout-parameter)"},
+    {"VUID-VkImageCreateInfo-mipLevels-00947", "mipLevels must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCreateInfo-mipLevels-00947)"},
+    {"VUID-VkImageCreateInfo-mipLevels-00958", "mipLevels must be less than or equal to the number of levels in the complete mipmap chain based on extent.width, extent.height, and extent.depth. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCreateInfo-mipLevels-00958)"},
+    {"VUID-VkImageCreateInfo-mipLevels-02255", "mipLevels must be less than or equal to imageCreateMaxMipLevels (as defined in Image Creation Limits). (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCreateInfo-mipLevels-02255)"},
+    {"VUID-VkImageCreateInfo-pNext-00988", "If the pNext chain includes a VkExternalMemoryImageCreateInfoNV structure, it must not contain a VkExternalMemoryImageCreateInfo structure. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCreateInfo-pNext-00988)"},
+    {"VUID-VkImageCreateInfo-pNext-00990", "If the pNext chain includes a VkExternalMemoryImageCreateInfo structure, its handleTypes member must only contain bits that are also in VkExternalImageFormatProperties::externalMemoryProperties.compatibleHandleTypes, as returned by vkGetPhysicalDeviceImageFormatProperties2 with format, imageType, tiling, usage, and flags equal to those in this structure, and with a VkPhysicalDeviceExternalImageFormatInfo structure included in the pNext chain, with a handleType equal to any one of the handle types specified in VkExternalMemoryImageCreateInfo::handleTypes (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCreateInfo-pNext-00990)"},
+    {"VUID-VkImageCreateInfo-pNext-00991", "If the pNext chain includes a VkExternalMemoryImageCreateInfoNV structure, its handleTypes member must only contain bits that are also in VkExternalImageFormatPropertiesNV::externalMemoryProperties.compatibleHandleTypes, as returned by vkGetPhysicalDeviceExternalImageFormatPropertiesNV with format, imageType, tiling, usage, and flags equal to those in this structure, and with externalHandleType equal to any one of the handle types specified in VkExternalMemoryImageCreateInfoNV::handleTypes (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCreateInfo-pNext-00991)"},
+    {"VUID-VkImageCreateInfo-pNext-01443", "If the pNext chain includes a ifdef::VK_VERSION_1_1,VK_KHR_external_memory[VkExternalMemoryImageCreateInfo] (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCreateInfo-pNext-01443)"},
+    {"VUID-VkImageCreateInfo-pNext-01974", "If the pNext chain includes a VkExternalFormatANDROID structure, and its externalFormat member is non-zero the format must be VK_FORMAT_UNDEFINED. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCreateInfo-pNext-01974)"},
+    {"VUID-VkImageCreateInfo-pNext-01975", "If the pNext chain does not include a VkExternalFormatANDROID structure, or does and its externalFormat member is 0, the format must not be VK_FORMAT_UNDEFINED. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCreateInfo-pNext-01975)"},
+    {"VUID-VkImageCreateInfo-pNext-02262", "If the pNext chain includes a VkImageDrmFormatModifierListCreateInfoEXT or VkImageDrmFormatModifierExplicitCreateInfoEXT structure, then tiling must be VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCreateInfo-pNext-02262)"},
+    {"VUID-VkImageCreateInfo-pNext-02393", "If the pNext chain includes a VkExternalMemoryImageCreateInfo structure whose handleTypes member includes VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID, imageType must be VK_IMAGE_TYPE_2D. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCreateInfo-pNext-02393)"},
+    {"VUID-VkImageCreateInfo-pNext-02394", "If the pNext chain includes a VkExternalMemoryImageCreateInfo structure whose handleTypes member includes VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID, mipLevels must either be 1 or equal to the number of levels in the complete mipmap chain based on extent.width, extent.height, and extent.depth. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCreateInfo-pNext-02394)"},
+    {"VUID-VkImageCreateInfo-pNext-02396", "If the pNext chain includes a VkExternalFormatANDROID structure whose externalFormat member is not 0, flags must not include VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCreateInfo-pNext-02396)"},
+    {"VUID-VkImageCreateInfo-pNext-02397", "If the pNext chain includes a VkExternalFormatANDROID structure whose externalFormat member is not 0, usage must not include any usages except VK_IMAGE_USAGE_SAMPLED_BIT. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCreateInfo-pNext-02397)"},
+    {"VUID-VkImageCreateInfo-pNext-02398", "If the pNext chain includes a VkExternalFormatANDROID structure whose externalFormat member is not 0, tiling must be VK_IMAGE_TILING_OPTIMAL. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCreateInfo-pNext-02398)"},
+    {"VUID-VkImageCreateInfo-pNext-pNext", "Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkDedicatedAllocationImageCreateInfoNV, VkExternalFormatANDROID, VkExternalMemoryImageCreateInfo, VkExternalMemoryImageCreateInfoNV, VkImageDrmFormatModifierExplicitCreateInfoEXT, VkImageDrmFormatModifierListCreateInfoEXT, VkImageFormatListCreateInfoKHR, VkImageStencilUsageCreateInfoEXT, or VkImageSwapchainCreateInfoKHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCreateInfo-pNext-pNext)"},
+    {"VUID-VkImageCreateInfo-physicalDeviceCount-01421", "If the logical device was created with VkDeviceGroupDeviceCreateInfo::physicalDeviceCount equal to 1, flags must not contain VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCreateInfo-physicalDeviceCount-01421)"},
+    {"VUID-VkImageCreateInfo-sType-sType", "sType must be VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCreateInfo-sType-sType)"},
+    {"VUID-VkImageCreateInfo-sType-unique", "Each sType member in the pNext chain must be unique (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCreateInfo-sType-unique)"},
+    {"VUID-VkImageCreateInfo-samples-02083", "If usage includes VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV, samples must be VK_SAMPLE_COUNT_1_BIT. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCreateInfo-samples-02083)"},
+    {"VUID-VkImageCreateInfo-samples-02257", "If samples is not VK_SAMPLE_COUNT_1_BIT, then imageType must be VK_IMAGE_TYPE_2D, flags must not contain VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT, mipLevels must be equal to 1, and imageCreateMaybeLinear (as defined in Image Creation Limits) must be false, (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCreateInfo-samples-02257)"},
+    {"VUID-VkImageCreateInfo-samples-02258", "samples must be a bit value that is set in imageCreateSampleCounts (as defined in Image Creation Limits). (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCreateInfo-samples-02258)"},
+    {"VUID-VkImageCreateInfo-samples-02558", "If samples is not VK_SAMPLE_COUNT_1_BIT, usage must not contain VK_IMAGE_USAGE_FRAGMENT_DENSITY_MAP_BIT_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCreateInfo-samples-02558)"},
+    {"VUID-VkImageCreateInfo-samples-parameter", "samples must be a valid VkSampleCountFlagBits value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCreateInfo-samples-parameter)"},
+    {"VUID-VkImageCreateInfo-sharingMode-00941", "If sharingMode is VK_SHARING_MODE_CONCURRENT, pQueueFamilyIndices must be a valid pointer to an array of queueFamilyIndexCount uint32_t values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCreateInfo-sharingMode-00941)"},
+    {"VUID-VkImageCreateInfo-sharingMode-00942", "If sharingMode is VK_SHARING_MODE_CONCURRENT, queueFamilyIndexCount must be greater than 1 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCreateInfo-sharingMode-00942)"},
+    {"VUID-VkImageCreateInfo-sharingMode-01392", "If sharingMode is VK_SHARING_MODE_CONCURRENT, each element of pQueueFamilyIndices must be unique and must be less than pQueueFamilyPropertyCount returned by vkGetPhysicalDeviceQueueFamilyProperties for the physicalDevice that was used to create device (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCreateInfo-sharingMode-01392)"},
+    {"VUID-VkImageCreateInfo-sharingMode-01420", "If sharingMode is VK_SHARING_MODE_CONCURRENT, each element of pQueueFamilyIndices must be unique and must be less than pQueueFamilyPropertyCount returned by either vkGetPhysicalDeviceQueueFamilyProperties or vkGetPhysicalDeviceQueueFamilyProperties2 for the physicalDevice that was used to create device (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCreateInfo-sharingMode-01420)"},
+    {"VUID-VkImageCreateInfo-sharingMode-parameter", "sharingMode must be a valid VkSharingMode value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCreateInfo-sharingMode-parameter)"},
+    {"VUID-VkImageCreateInfo-tiling-02084", "If usage includes VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV, tiling must be VK_IMAGE_TILING_OPTIMAL. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCreateInfo-tiling-02084)"},
+    {"VUID-VkImageCreateInfo-tiling-02261", "If tiling is VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT, then the pNext chain must include exactly one of VkImageDrmFormatModifierListCreateInfoEXT or VkImageDrmFormatModifierExplicitCreateInfoEXT structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCreateInfo-tiling-02261)"},
+    {"VUID-VkImageCreateInfo-tiling-02353", "If tiling is VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT and flags contains VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT, then the pNext chain must include an VkImageFormatListCreateInfoKHR structure with non-zero viewFormatCount (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCreateInfo-tiling-02353)"},
+    {"VUID-VkImageCreateInfo-tiling-parameter", "tiling must be a valid VkImageTiling value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCreateInfo-tiling-parameter)"},
+    {"VUID-VkImageCreateInfo-usage-00963", "If usage includes VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT, then bits other than VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT, and VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT must not be set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCreateInfo-usage-00963)"},
+    {"VUID-VkImageCreateInfo-usage-00964", "If usage includes VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT, VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT, or VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT, extent.width must be less than or equal to VkPhysicalDeviceLimits::maxFramebufferWidth (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCreateInfo-usage-00964)"},
+    {"VUID-VkImageCreateInfo-usage-00965", "If usage includes VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT, VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT, or VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT, extent.height must be less than or equal to VkPhysicalDeviceLimits::maxFramebufferHeight (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCreateInfo-usage-00965)"},
+    {"VUID-VkImageCreateInfo-usage-00966", "If usage includes VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT, usage must also contain at least one of VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT, or VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCreateInfo-usage-00966)"},
+    {"VUID-VkImageCreateInfo-usage-00968", "If the multisampled storage images feature is not enabled, and usage contains VK_IMAGE_USAGE_STORAGE_BIT, samples must be VK_SAMPLE_COUNT_1_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCreateInfo-usage-00968)"},
+    {"VUID-VkImageCreateInfo-usage-02559", "If usage includes VK_IMAGE_USAGE_FRAGMENT_DENSITY_MAP_BIT_EXT, extent.width must be less than or equal to the ceiling of maxFramebufferWidth/minFragmentDensityTexelSize.width (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCreateInfo-usage-02559)"},
+    {"VUID-VkImageCreateInfo-usage-02560", "If usage includes VK_IMAGE_USAGE_FRAGMENT_DENSITY_MAP_BIT_EXT, extent.height must be less than or equal to the ceiling of maxFramebufferHeight/minFragmentDensityTexelSize.height (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCreateInfo-usage-02560)"},
+    {"VUID-VkImageCreateInfo-usage-parameter", "usage must be a valid combination of VkImageUsageFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCreateInfo-usage-parameter)"},
+    {"VUID-VkImageCreateInfo-usage-requiredbitmask", "usage must not be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageCreateInfo-usage-requiredbitmask)"},
+    {"VUID-VkImageDrmFormatModifierExplicitCreateInfoEXT-arrayPitch-02268", "For each element of pPlaneLayouts, arrayPitch must be 0 if VkImageCreateInfo::arrayLayers is 1. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageDrmFormatModifierExplicitCreateInfoEXT-arrayPitch-02268)"},
+    {"VUID-VkImageDrmFormatModifierExplicitCreateInfoEXT-depthPitch-02269", "For each element of pPlaneLayouts, depthPitch must be 0 if VkImageCreateInfo::extent.depth is 1. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageDrmFormatModifierExplicitCreateInfoEXT-depthPitch-02269)"},
+    {"VUID-VkImageDrmFormatModifierExplicitCreateInfoEXT-drmFormatModifier-02264", "drmFormatModifier must be compatible with the parameters in VkImageCreateInfo and its pNext chain, as determined by querying VkPhysicalDeviceImageFormatInfo2 extended with VkPhysicalDeviceImageDrmFormatModifierInfoEXT. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageDrmFormatModifierExplicitCreateInfoEXT-drmFormatModifier-02264)"},
+    {"VUID-VkImageDrmFormatModifierExplicitCreateInfoEXT-drmFormatModifierPlaneCount-02265", "drmFormatModifierPlaneCount must be equal to the VkDrmFormatModifierPropertiesEXT::drmFormatModifierPlaneCount associated with VkImageCreateInfo::format and drmFormatModifier, as found by querying VkDrmFormatModifierPropertiesListEXT. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageDrmFormatModifierExplicitCreateInfoEXT-drmFormatModifierPlaneCount-02265)"},
+    {"VUID-VkImageDrmFormatModifierExplicitCreateInfoEXT-pPlaneLayouts-parameter", "If drmFormatModifierPlaneCount is not 0, pPlaneLayouts must be a valid pointer to an array of drmFormatModifierPlaneCount VkSubresourceLayout structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageDrmFormatModifierExplicitCreateInfoEXT-pPlaneLayouts-parameter)"},
+    {"VUID-VkImageDrmFormatModifierExplicitCreateInfoEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_EXPLICIT_CREATE_INFO_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageDrmFormatModifierExplicitCreateInfoEXT-sType-sType)"},
+    {"VUID-VkImageDrmFormatModifierExplicitCreateInfoEXT-size-02267", "For each element of pPlaneLayouts, size must be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageDrmFormatModifierExplicitCreateInfoEXT-size-02267)"},
+    {"VUID-VkImageDrmFormatModifierListCreateInfoEXT-drmFormatModifierCount-arraylength", "drmFormatModifierCount must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageDrmFormatModifierListCreateInfoEXT-drmFormatModifierCount-arraylength)"},
+    {"VUID-VkImageDrmFormatModifierListCreateInfoEXT-pDrmFormatModifiers-02263", "Each modifier in pDrmFormatModifiers must be compatible with the parameters in VkImageCreateInfo and its pNext chain, as determined by querying VkPhysicalDeviceImageFormatInfo2 extended with VkPhysicalDeviceImageDrmFormatModifierInfoEXT. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageDrmFormatModifierListCreateInfoEXT-pDrmFormatModifiers-02263)"},
+    {"VUID-VkImageDrmFormatModifierListCreateInfoEXT-pDrmFormatModifiers-parameter", "pDrmFormatModifiers must be a valid pointer to an array of drmFormatModifierCount uint64_t values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageDrmFormatModifierListCreateInfoEXT-pDrmFormatModifiers-parameter)"},
+    {"VUID-VkImageDrmFormatModifierListCreateInfoEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_LIST_CREATE_INFO_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageDrmFormatModifierListCreateInfoEXT-sType-sType)"},
+    {"VUID-VkImageDrmFormatModifierPropertiesEXT-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageDrmFormatModifierPropertiesEXT-pNext-pNext)"},
+    {"VUID-VkImageDrmFormatModifierPropertiesEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageDrmFormatModifierPropertiesEXT-sType-sType)"},
+    {"VUID-VkImageFormatListCreateInfoKHR-flags-01579", "If VkImageCreateInfo::flags does not contain VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT, viewFormatCount must be 0 or 1. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageFormatListCreateInfoKHR-flags-01579)"},
+    {"VUID-VkImageFormatListCreateInfoKHR-pViewFormats-parameter", "If viewFormatCount is not 0, pViewFormats must be a valid pointer to an array of viewFormatCount valid VkFormat values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageFormatListCreateInfoKHR-pViewFormats-parameter)"},
+    {"VUID-VkImageFormatListCreateInfoKHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageFormatListCreateInfoKHR-sType-sType)"},
+    {"VUID-VkImageFormatListCreateInfoKHR-viewFormatCount-01578", "If viewFormatCount is not 0, all of the formats in the pViewFormats array must be compatible with the format specified in the format field of VkImageCreateInfo, as described in the compatibility table. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageFormatListCreateInfoKHR-viewFormatCount-01578)"},
+    {"VUID-VkImageFormatListCreateInfoKHR-viewFormatCount-01580", "If viewFormatCount is not 0, VkImageCreateInfo::format must be in pViewFormats. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageFormatListCreateInfoKHR-viewFormatCount-01580)"},
+    {"VUID-VkImageFormatProperties2-pNext-pNext", "Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkAndroidHardwareBufferUsageANDROID, VkExternalImageFormatProperties, VkFilterCubicImageViewImageFormatPropertiesEXT, VkSamplerYcbcrConversionImageFormatProperties, or VkTextureLODGatherFormatPropertiesAMD (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageFormatProperties2-pNext-pNext)"},
+    {"VUID-VkImageFormatProperties2-sType-sType", "sType must be VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageFormatProperties2-sType-sType)"},
+    {"VUID-VkImageFormatProperties2-sType-unique", "Each sType member in the pNext chain must be unique (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageFormatProperties2-sType-unique)"},
+    {"VUID-VkImageMemoryBarrier-image-01199", "If image was created with a sharing mode of VK_SHARING_MODE_CONCURRENT, srcQueueFamilyIndex and dstQueueFamilyIndex must both be VK_QUEUE_FAMILY_IGNORED (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageMemoryBarrier-image-01199)"},
+    {"VUID-VkImageMemoryBarrier-image-01200", "If image was created with a sharing mode of VK_SHARING_MODE_EXCLUSIVE, srcQueueFamilyIndex and dstQueueFamilyIndex must either both be VK_QUEUE_FAMILY_IGNORED, or both be a valid queue family (see Queue Family Properties). (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageMemoryBarrier-image-01200)"},
+    {"VUID-VkImageMemoryBarrier-image-01201", "If image was created with a sharing mode of VK_SHARING_MODE_EXCLUSIVE and srcQueueFamilyIndex is VK_QUEUE_FAMILY_IGNORED, dstQueueFamilyIndex must also be VK_QUEUE_FAMILY_IGNORED. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageMemoryBarrier-image-01201)"},
+    {"VUID-VkImageMemoryBarrier-image-01205", "If image was created with a sharing mode of VK_SHARING_MODE_EXCLUSIVE, and srcQueueFamilyIndex and dstQueueFamilyIndex are not VK_QUEUE_FAMILY_IGNORED, at least one of them must be the same as the family of the queue that will execute this barrier (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageMemoryBarrier-image-01205)"},
+    {"VUID-VkImageMemoryBarrier-image-01207", "If image has a depth/stencil format with both depth and stencil components, then the aspectMask member of subresourceRange must include both VK_IMAGE_ASPECT_DEPTH_BIT and VK_IMAGE_ASPECT_STENCIL_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageMemoryBarrier-image-01207)"},
+    {"VUID-VkImageMemoryBarrier-image-01381", "If image was created with a sharing mode of VK_SHARING_MODE_CONCURRENT, at least one of srcQueueFamilyIndex and dstQueueFamilyIndex must be VK_QUEUE_FAMILY_IGNORED (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageMemoryBarrier-image-01381)"},
+    {"VUID-VkImageMemoryBarrier-image-01671", "If image has a single-plane color format or is not disjoint, then the aspectMask member of subresourceRange must be VK_IMAGE_ASPECT_COLOR_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageMemoryBarrier-image-01671)"},
+    {"VUID-VkImageMemoryBarrier-image-01672", "If image has a multi-planar format and the image is disjoint, then the aspectMask member of subresourceRange must include either at least one of VK_IMAGE_ASPECT_PLANE_0_BIT, VK_IMAGE_ASPECT_PLANE_1_BIT, and VK_IMAGE_ASPECT_PLANE_2_BIT; or must include VK_IMAGE_ASPECT_COLOR_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageMemoryBarrier-image-01672)"},
+    {"VUID-VkImageMemoryBarrier-image-01673", "If image has a multi-planar format with only two planes, then the aspectMask member of subresourceRange must not include VK_IMAGE_ASPECT_PLANE_2_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageMemoryBarrier-image-01673)"},
+    {"VUID-VkImageMemoryBarrier-image-01766", "If image was created with a sharing mode of VK_SHARING_MODE_CONCURRENT, and one of srcQueueFamilyIndex and dstQueueFamilyIndex is VK_QUEUE_FAMILY_IGNORED, the other must be VK_QUEUE_FAMILY_IGNORED or a special queue family reserved for external memory transfers, as described in Queue Family Ownership Transfer. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageMemoryBarrier-image-01766)"},
+    {"VUID-VkImageMemoryBarrier-image-01767", "If image was created with a sharing mode of VK_SHARING_MODE_EXCLUSIVE and srcQueueFamilyIndex is not VK_QUEUE_FAMILY_IGNORED, it must be a valid queue family or a special queue family reserved for external memory transfers, as described in Queue Family Ownership Transfer. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageMemoryBarrier-image-01767)"},
+    {"VUID-VkImageMemoryBarrier-image-01768", "If image was created with a sharing mode of VK_SHARING_MODE_EXCLUSIVE and dstQueueFamilyIndex is not VK_QUEUE_FAMILY_IGNORED, it must be a valid queue family or a special queue family reserved for external memory transfers, as described in Queue Family Ownership Transfer. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageMemoryBarrier-image-01768)"},
+    {"VUID-VkImageMemoryBarrier-image-01932", "If image is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageMemoryBarrier-image-01932)"},
+    {"VUID-VkImageMemoryBarrier-image-03319", "If image has a depth/stencil format with both depth and stencil and the separateDepthStencilLayouts feature is enabled, then the aspectMask member of subresourceRange must include either or both VK_IMAGE_ASPECT_DEPTH_BIT and VK_IMAGE_ASPECT_STENCIL_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageMemoryBarrier-image-03319)"},
+    {"VUID-VkImageMemoryBarrier-image-03320", "If image has a depth/stencil format with both depth and stencil and the separateDepthStencilLayouts feature is not enabled, then the aspectMask member of subresourceRange must include both VK_IMAGE_ASPECT_DEPTH_BIT and VK_IMAGE_ASPECT_STENCIL_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageMemoryBarrier-image-03320)"},
+    {"VUID-VkImageMemoryBarrier-image-parameter", "image must be a valid VkImage handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageMemoryBarrier-image-parameter)"},
+    {"VUID-VkImageMemoryBarrier-newLayout-01198", "newLayout must not be VK_IMAGE_LAYOUT_UNDEFINED or VK_IMAGE_LAYOUT_PREINITIALIZED (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageMemoryBarrier-newLayout-01198)"},
+    {"VUID-VkImageMemoryBarrier-newLayout-parameter", "newLayout must be a valid VkImageLayout value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageMemoryBarrier-newLayout-parameter)"},
+    {"VUID-VkImageMemoryBarrier-oldLayout-01197", "oldLayout must be VK_IMAGE_LAYOUT_UNDEFINED or the current layout of the image subresources affected by the barrier (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageMemoryBarrier-oldLayout-01197)"},
+    {"VUID-VkImageMemoryBarrier-oldLayout-01208", "If either oldLayout or newLayout is VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL then image must have been created with VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageMemoryBarrier-oldLayout-01208)"},
+    {"VUID-VkImageMemoryBarrier-oldLayout-01209", "If either oldLayout or newLayout is VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL then image must have been created with VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageMemoryBarrier-oldLayout-01209)"},
+    {"VUID-VkImageMemoryBarrier-oldLayout-01210", "If either oldLayout or newLayout is VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL then image must have been created with VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageMemoryBarrier-oldLayout-01210)"},
+    {"VUID-VkImageMemoryBarrier-oldLayout-01211", "If either oldLayout or newLayout is VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL then image must have been created with VK_IMAGE_USAGE_SAMPLED_BIT or VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageMemoryBarrier-oldLayout-01211)"},
+    {"VUID-VkImageMemoryBarrier-oldLayout-01212", "If either oldLayout or newLayout is VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL then image must have been created with VK_IMAGE_USAGE_TRANSFER_SRC_BIT set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageMemoryBarrier-oldLayout-01212)"},
+    {"VUID-VkImageMemoryBarrier-oldLayout-01213", "If either oldLayout or newLayout is VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL then image must have been created with VK_IMAGE_USAGE_TRANSFER_DST_BIT set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageMemoryBarrier-oldLayout-01213)"},
+    {"VUID-VkImageMemoryBarrier-oldLayout-01658", "If either oldLayout or newLayout is VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL then image must have been created with VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageMemoryBarrier-oldLayout-01658)"},
+    {"VUID-VkImageMemoryBarrier-oldLayout-01659", "If either oldLayout or newLayout is VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL then image must have been created with VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageMemoryBarrier-oldLayout-01659)"},
+    {"VUID-VkImageMemoryBarrier-oldLayout-02088", "If either oldLayout or newLayout is VK_IMAGE_LAYOUT_SHADING_RATE_OPTIMAL_NV then image must have been created with VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageMemoryBarrier-oldLayout-02088)"},
+    {"VUID-VkImageMemoryBarrier-oldLayout-parameter", "oldLayout must be a valid VkImageLayout value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageMemoryBarrier-oldLayout-parameter)"},
+    {"VUID-VkImageMemoryBarrier-pNext-pNext", "pNext must be NULL or a pointer to a valid instance of VkSampleLocationsInfoEXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageMemoryBarrier-pNext-pNext)"},
+    {"VUID-VkImageMemoryBarrier-sType-sType", "sType must be VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageMemoryBarrier-sType-sType)"},
+    {"VUID-VkImageMemoryBarrier-subresourceRange-01486", "subresourceRange.baseMipLevel must be less than the mipLevels specified in VkImageCreateInfo when image was created (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageMemoryBarrier-subresourceRange-01486)"},
+    {"VUID-VkImageMemoryBarrier-subresourceRange-01488", "subresourceRange.baseArrayLayer must be less than the arrayLayers specified in VkImageCreateInfo when image was created (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageMemoryBarrier-subresourceRange-01488)"},
+    {"VUID-VkImageMemoryBarrier-subresourceRange-01724", "If subresourceRange.levelCount is not VK_REMAINING_MIP_LEVELS, subresourceRange.baseMipLevel + subresourceRange.levelCount must be less than or equal to the mipLevels specified in VkImageCreateInfo when image was created (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageMemoryBarrier-subresourceRange-01724)"},
+    {"VUID-VkImageMemoryBarrier-subresourceRange-01725", "If subresourceRange.layerCount is not VK_REMAINING_ARRAY_LAYERS, subresourceRange.baseArrayLayer + subresourceRange.layerCount must be less than or equal to the arrayLayers specified in VkImageCreateInfo when image was created (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageMemoryBarrier-subresourceRange-01725)"},
+    {"VUID-VkImageMemoryBarrier-subresourceRange-parameter", "subresourceRange must be a valid VkImageSubresourceRange structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageMemoryBarrier-subresourceRange-parameter)"},
+    {"VUID-VkImageMemoryRequirementsInfo2-image-01589", "If image was created with a multi-planar format and the VK_IMAGE_CREATE_DISJOINT_BIT flag, there must be a VkImagePlaneMemoryRequirementsInfo included in the pNext chain of the VkImageMemoryRequirementsInfo2 structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageMemoryRequirementsInfo2-image-01589)"},
+    {"VUID-VkImageMemoryRequirementsInfo2-image-01590", "If image was not created with the VK_IMAGE_CREATE_DISJOINT_BIT flag, there must not be a VkImagePlaneMemoryRequirementsInfo included in the pNext chain of the VkImageMemoryRequirementsInfo2 structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageMemoryRequirementsInfo2-image-01590)"},
+    {"VUID-VkImageMemoryRequirementsInfo2-image-01591", "If image was created with a single-plane format, there must not be a VkImagePlaneMemoryRequirementsInfo included in the pNext chain of the VkImageMemoryRequirementsInfo2 structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageMemoryRequirementsInfo2-image-01591)"},
+    {"VUID-VkImageMemoryRequirementsInfo2-image-01897", "If image was created with the VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID external memory handle type, then image must be bound to memory. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageMemoryRequirementsInfo2-image-01897)"},
+    {"VUID-VkImageMemoryRequirementsInfo2-image-02279", "If image was created with VK_IMAGE_CREATE_DISJOINT_BIT and with VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT, then there must be a VkImagePlaneMemoryRequirementsInfo included in the pNext chain of the VkImageMemoryRequirementsInfo2 structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageMemoryRequirementsInfo2-image-02279)"},
+    {"VUID-VkImageMemoryRequirementsInfo2-image-02280", "If image was created with a single-plane format and with any tiling other than VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT, then there must not be a VkImagePlaneMemoryRequirementsInfo included in the pNext chain of the VkImageMemoryRequirementsInfo2 structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageMemoryRequirementsInfo2-image-02280)"},
+    {"VUID-VkImageMemoryRequirementsInfo2-image-parameter", "image must be a valid VkImage handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageMemoryRequirementsInfo2-image-parameter)"},
+    {"VUID-VkImageMemoryRequirementsInfo2-pNext-pNext", "pNext must be NULL or a pointer to a valid instance of VkImagePlaneMemoryRequirementsInfo (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageMemoryRequirementsInfo2-pNext-pNext)"},
+    {"VUID-VkImageMemoryRequirementsInfo2-sType-sType", "sType must be VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageMemoryRequirementsInfo2-sType-sType)"},
+    {"VUID-VkImagePipeSurfaceCreateInfoFUCHSIA-flags-zerobitmask", "flags must be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImagePipeSurfaceCreateInfoFUCHSIA-flags-zerobitmask)"},
+    {"VUID-VkImagePipeSurfaceCreateInfoFUCHSIA-imagePipeHandle-00000", "imagePipeHandle must be a valid zx_handle_t (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImagePipeSurfaceCreateInfoFUCHSIA-imagePipeHandle-00000)"},
+    {"VUID-VkImagePipeSurfaceCreateInfoFUCHSIA-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImagePipeSurfaceCreateInfoFUCHSIA-pNext-pNext)"},
+    {"VUID-VkImagePipeSurfaceCreateInfoFUCHSIA-sType-sType", "sType must be VK_STRUCTURE_TYPE_IMAGEPIPE_SURFACE_CREATE_INFO_FUCHSIA (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImagePipeSurfaceCreateInfoFUCHSIA-sType-sType)"},
+    {"VUID-VkImagePlaneMemoryRequirementsInfo-planeAspect-02281", "If the image's tiling is VK_IMAGE_TILING_LINEAR or VK_IMAGE_TILING_OPTIMAL, then planeAspect must be a single valid format plane for the image. (That is, for a two-plane image planeAspect must be VK_IMAGE_ASPECT_PLANE_0_BIT or VK_IMAGE_ASPECT_PLANE_1_BIT, and for a three-plane image planeAspect must be VK_IMAGE_ASPECT_PLANE_0_BIT, VK_IMAGE_ASPECT_PLANE_1_BIT or VK_IMAGE_ASPECT_PLANE_2_BIT). (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImagePlaneMemoryRequirementsInfo-planeAspect-02281)"},
+    {"VUID-VkImagePlaneMemoryRequirementsInfo-planeAspect-02282", "If the image's tiling is VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT,  then planeAspect must be a single valid memory plane for the  image.  (That is, aspectMask must specify a plane index that is less than  the  drmFormatModifierPlaneCount  associated with the image's format and drmFormatModifier.) (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImagePlaneMemoryRequirementsInfo-planeAspect-02282)"},
+    {"VUID-VkImagePlaneMemoryRequirementsInfo-planeAspect-parameter", "planeAspect must be a valid VkImageAspectFlagBits value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImagePlaneMemoryRequirementsInfo-planeAspect-parameter)"},
+    {"VUID-VkImagePlaneMemoryRequirementsInfo-sType-sType", "sType must be VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImagePlaneMemoryRequirementsInfo-sType-sType)"},
+    {"VUID-VkImageResolve-aspectMask-00266", "The aspectMask member of srcSubresource and dstSubresource must only contain VK_IMAGE_ASPECT_COLOR_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageResolve-aspectMask-00266)"},
+    {"VUID-VkImageResolve-dstImage-00276", "If the calling command's dstImage is of type VK_IMAGE_TYPE_1D, then dstOffset.y must be 0 and extent.height must be 1. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageResolve-dstImage-00276)"},
+    {"VUID-VkImageResolve-dstImage-00278", "If the calling command's dstImage is of type VK_IMAGE_TYPE_1D or VK_IMAGE_TYPE_2D, then dstOffset.z must be 0 and extent.depth must be 1. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageResolve-dstImage-00278)"},
+    {"VUID-VkImageResolve-dstOffset-00274", "dstOffset.x and (extent.width + dstOffset.x) must both be greater than or equal to 0 and less than or equal to the destination image subresource width (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageResolve-dstOffset-00274)"},
+    {"VUID-VkImageResolve-dstOffset-00275", "dstOffset.y and (extent.height + dstOffset.y) must both be greater than or equal to 0 and less than or equal to the destination image subresource height (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageResolve-dstOffset-00275)"},
+    {"VUID-VkImageResolve-dstOffset-00277", "dstOffset.z and (extent.depth + dstOffset.z) must both be greater than or equal to 0 and less than or equal to the destination image subresource depth (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageResolve-dstOffset-00277)"},
+    {"VUID-VkImageResolve-dstSubresource-parameter", "dstSubresource must be a valid VkImageSubresourceLayers structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageResolve-dstSubresource-parameter)"},
+    {"VUID-VkImageResolve-layerCount-00267", "The layerCount member of srcSubresource and dstSubresource must match (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageResolve-layerCount-00267)"},
+    {"VUID-VkImageResolve-srcImage-00268", "If either of the calling command's srcImage or dstImage parameters are of VkImageType VK_IMAGE_TYPE_3D, the baseArrayLayer and layerCount members of both srcSubresource and dstSubresource must be 0 and 1, respectively (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageResolve-srcImage-00268)"},
+    {"VUID-VkImageResolve-srcImage-00271", "If the calling command's srcImage is of type VK_IMAGE_TYPE_1D, then srcOffset.y must be 0 and extent.height must be 1. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageResolve-srcImage-00271)"},
+    {"VUID-VkImageResolve-srcImage-00273", "If the calling command's srcImage is of type VK_IMAGE_TYPE_1D or VK_IMAGE_TYPE_2D, then srcOffset.z must be 0 and extent.depth must be 1. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageResolve-srcImage-00273)"},
+    {"VUID-VkImageResolve-srcOffset-00269", "srcOffset.x and (extent.width + srcOffset.x) must both be greater than or equal to 0 and less than or equal to the source image subresource width (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageResolve-srcOffset-00269)"},
+    {"VUID-VkImageResolve-srcOffset-00270", "srcOffset.y and (extent.height + srcOffset.y) must both be greater than or equal to 0 and less than or equal to the source image subresource height (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageResolve-srcOffset-00270)"},
+    {"VUID-VkImageResolve-srcOffset-00272", "srcOffset.z and (extent.depth + srcOffset.z) must both be greater than or equal to 0 and less than or equal to the source image subresource depth (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageResolve-srcOffset-00272)"},
+    {"VUID-VkImageResolve-srcSubresource-parameter", "srcSubresource must be a valid VkImageSubresourceLayers structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageResolve-srcSubresource-parameter)"},
+    {"VUID-VkImageSparseMemoryRequirementsInfo2-image-parameter", "image must be a valid VkImage handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageSparseMemoryRequirementsInfo2-image-parameter)"},
+    {"VUID-VkImageSparseMemoryRequirementsInfo2-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageSparseMemoryRequirementsInfo2-pNext-pNext)"},
+    {"VUID-VkImageSparseMemoryRequirementsInfo2-sType-sType", "sType must be VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageSparseMemoryRequirementsInfo2-sType-sType)"},
+    {"VUID-VkImageStencilUsageCreateInfoEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageStencilUsageCreateInfoEXT-sType-sType)"},
+    {"VUID-VkImageStencilUsageCreateInfoEXT-stencilUsage-02539", "If stencilUsage includes VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT, it must not include bits other than VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT or VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageStencilUsageCreateInfoEXT-stencilUsage-02539)"},
+    {"VUID-VkImageStencilUsageCreateInfoEXT-stencilUsage-parameter", "stencilUsage must be a valid combination of VkImageUsageFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageStencilUsageCreateInfoEXT-stencilUsage-parameter)"},
+    {"VUID-VkImageStencilUsageCreateInfoEXT-stencilUsage-requiredbitmask", "stencilUsage must not be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageStencilUsageCreateInfoEXT-stencilUsage-requiredbitmask)"},
+    {"VUID-VkImageSubresource-aspectMask-parameter", "aspectMask must be a valid combination of VkImageAspectFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageSubresource-aspectMask-parameter)"},
+    {"VUID-VkImageSubresource-aspectMask-requiredbitmask", "aspectMask must not be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageSubresource-aspectMask-requiredbitmask)"},
+    {"VUID-VkImageSubresourceLayers-aspectMask-00167", "If aspectMask contains VK_IMAGE_ASPECT_COLOR_BIT, it must not contain either of VK_IMAGE_ASPECT_DEPTH_BIT or VK_IMAGE_ASPECT_STENCIL_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageSubresourceLayers-aspectMask-00167)"},
+    {"VUID-VkImageSubresourceLayers-aspectMask-00168", "aspectMask must not contain VK_IMAGE_ASPECT_METADATA_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageSubresourceLayers-aspectMask-00168)"},
+    {"VUID-VkImageSubresourceLayers-aspectMask-02247", "aspectMask must not include VK_IMAGE_ASPECT_MEMORY_PLANE_i_BIT_EXT for any index i. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageSubresourceLayers-aspectMask-02247)"},
+    {"VUID-VkImageSubresourceLayers-aspectMask-parameter", "aspectMask must be a valid combination of VkImageAspectFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageSubresourceLayers-aspectMask-parameter)"},
+    {"VUID-VkImageSubresourceLayers-aspectMask-requiredbitmask", "aspectMask must not be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageSubresourceLayers-aspectMask-requiredbitmask)"},
+    {"VUID-VkImageSubresourceLayers-layerCount-01700", "layerCount must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageSubresourceLayers-layerCount-01700)"},
+    {"VUID-VkImageSubresourceRange-aspectMask-01670", "If aspectMask includes VK_IMAGE_ASPECT_COLOR_BIT, then it must not include any of VK_IMAGE_ASPECT_PLANE_0_BIT, VK_IMAGE_ASPECT_PLANE_1_BIT, or VK_IMAGE_ASPECT_PLANE_2_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageSubresourceRange-aspectMask-01670)"},
+    {"VUID-VkImageSubresourceRange-aspectMask-02278", "aspectMask must not include VK_IMAGE_ASPECT_MEMORY_PLANE_i_BIT_EXT for any index i (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageSubresourceRange-aspectMask-02278)"},
+    {"VUID-VkImageSubresourceRange-aspectMask-parameter", "aspectMask must be a valid combination of VkImageAspectFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageSubresourceRange-aspectMask-parameter)"},
+    {"VUID-VkImageSubresourceRange-aspectMask-requiredbitmask", "aspectMask must not be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageSubresourceRange-aspectMask-requiredbitmask)"},
+    {"VUID-VkImageSubresourceRange-layerCount-01721", "If layerCount is not VK_REMAINING_ARRAY_LAYERS, it must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageSubresourceRange-layerCount-01721)"},
+    {"VUID-VkImageSubresourceRange-levelCount-01720", "If levelCount is not VK_REMAINING_MIP_LEVELS, it must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageSubresourceRange-levelCount-01720)"},
+    {"VUID-VkImageSwapchainCreateInfoKHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_IMAGE_SWAPCHAIN_CREATE_INFO_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageSwapchainCreateInfoKHR-sType-sType)"},
+    {"VUID-VkImageSwapchainCreateInfoKHR-swapchain-00995", "If swapchain is not VK_NULL_HANDLE, the fields of VkImageCreateInfo must match the implied image creation parameters of the swapchain (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageSwapchainCreateInfoKHR-swapchain-00995)"},
+    {"VUID-VkImageSwapchainCreateInfoKHR-swapchain-parameter", "If swapchain is not VK_NULL_HANDLE, swapchain must be a valid VkSwapchainKHR handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageSwapchainCreateInfoKHR-swapchain-parameter)"},
+    {"VUID-VkImageViewASTCDecodeModeEXT-decodeMode-02230", "decodeMode must be one of VK_FORMAT_R16G16B16A16_SFLOAT, VK_FORMAT_R8G8B8A8_UNORM, or VK_FORMAT_E5B9G9R9_UFLOAT_PACK32 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewASTCDecodeModeEXT-decodeMode-02230)"},
+    {"VUID-VkImageViewASTCDecodeModeEXT-decodeMode-02231", "If the decodeModeSharedExponent feature is not enabled, decodeMode must not be VK_FORMAT_E5B9G9R9_UFLOAT_PACK32 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewASTCDecodeModeEXT-decodeMode-02231)"},
+    {"VUID-VkImageViewASTCDecodeModeEXT-decodeMode-02232", "If decodeMode is VK_FORMAT_R8G8B8A8_UNORM the image view must not include blocks using any of the ASTC HDR modes (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewASTCDecodeModeEXT-decodeMode-02232)"},
+    {"VUID-VkImageViewASTCDecodeModeEXT-decodeMode-parameter", "decodeMode must be a valid VkFormat value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewASTCDecodeModeEXT-decodeMode-parameter)"},
+    {"VUID-VkImageViewASTCDecodeModeEXT-format-02233", "format of the image view must be one of VK_FORMAT_ASTC_4x4_UNORM_BLOCK, VK_FORMAT_ASTC_4x4_SRGB_BLOCK, VK_FORMAT_ASTC_5x4_UNORM_BLOCK, VK_FORMAT_ASTC_5x4_SRGB_BLOCK, VK_FORMAT_ASTC_5x5_UNORM_BLOCK, VK_FORMAT_ASTC_5x5_SRGB_BLOCK, VK_FORMAT_ASTC_6x5_UNORM_BLOCK, VK_FORMAT_ASTC_6x5_SRGB_BLOCK, VK_FORMAT_ASTC_6x6_UNORM_BLOCK, VK_FORMAT_ASTC_6x6_SRGB_BLOCK, VK_FORMAT_ASTC_8x5_UNORM_BLOCK, VK_FORMAT_ASTC_8x5_SRGB_BLOCK, VK_FORMAT_ASTC_8x6_UNORM_BLOCK, VK_FORMAT_ASTC_8x6_SRGB_BLOCK, VK_FORMAT_ASTC_8x8_UNORM_BLOCK, VK_FORMAT_ASTC_8x8_SRGB_BLOCK, VK_FORMAT_ASTC_10x5_UNORM_BLOCK, VK_FORMAT_ASTC_10x5_SRGB_BLOCK, VK_FORMAT_ASTC_10x6_UNORM_BLOCK, VK_FORMAT_ASTC_10x6_SRGB_BLOCK, VK_FORMAT_ASTC_10x8_UNORM_BLOCK, VK_FORMAT_ASTC_10x8_SRGB_BLOCK, VK_FORMAT_ASTC_10x10_UNORM_BLOCK, VK_FORMAT_ASTC_10x10_SRGB_BLOCK, VK_FORMAT_ASTC_12x10_UNORM_BLOCK, VK_FORMAT_ASTC_12x10_SRGB_BLOCK, VK_FORMAT_ASTC_12x12_UNORM_BLOCK, or VK_FORMAT_ASTC_12x12_SRGB_BLOCK (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewASTCDecodeModeEXT-format-02233)"},
+    {"VUID-VkImageViewASTCDecodeModeEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_IMAGE_VIEW_ASTC_DECODE_MODE_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewASTCDecodeModeEXT-sType-sType)"},
+    {"VUID-VkImageViewCreateInfo-None-02273", "The format features of the resultant image view must contain at least one bit. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewCreateInfo-None-02273)"},
+    {"VUID-VkImageViewCreateInfo-components-parameter", "components must be a valid VkComponentMapping structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewCreateInfo-components-parameter)"},
+    {"VUID-VkImageViewCreateInfo-flags-02572", "If dynamic fragment density map feature is not enabled, flags must not contain VK_IMAGE_VIEW_CREATE_FRAGMENT_DENSITY_MAP_DYNAMIC_BIT_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewCreateInfo-flags-02572)"},
+    {"VUID-VkImageViewCreateInfo-flags-parameter", "flags must be a valid combination of VkImageViewCreateFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewCreateInfo-flags-parameter)"},
+    {"VUID-VkImageViewCreateInfo-format-parameter", "format must be a valid VkFormat value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewCreateInfo-format-parameter)"},
+    {"VUID-VkImageViewCreateInfo-image-01003", "If image was not created with VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT then viewType must not be VK_IMAGE_VIEW_TYPE_CUBE or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewCreateInfo-image-01003)"},
+    {"VUID-VkImageViewCreateInfo-image-01005", "If image was created with VK_IMAGE_TYPE_3D but without VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT set then viewType must not be VK_IMAGE_VIEW_TYPE_2D or VK_IMAGE_VIEW_TYPE_2D_ARRAY (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewCreateInfo-image-01005)"},
+    {"VUID-VkImageViewCreateInfo-image-01007", "image must have been created with a usage value containing at least one of VK_IMAGE_USAGE_SAMPLED_BIT, VK_IMAGE_USAGE_STORAGE_BIT, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT, or VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewCreateInfo-image-01007)"},
+    {"VUID-VkImageViewCreateInfo-image-01018", "If image was created with the VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT flag, format must be compatible with the format used to create image, as defined in Format Compatibility Classes (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewCreateInfo-image-01018)"},
+    {"VUID-VkImageViewCreateInfo-image-01019", "If image was not created with the VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT flag, format must be identical to the format used to create image (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewCreateInfo-image-01019)"},
+    {"VUID-VkImageViewCreateInfo-image-01020", "If image is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewCreateInfo-image-01020)"},
+    {"VUID-VkImageViewCreateInfo-image-01482", "If image is not a 3D image created with VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT set, or viewType is not VK_IMAGE_VIEW_TYPE_2D or VK_IMAGE_VIEW_TYPE_2D_ARRAY, subresourceRange.baseArrayLayer must be less than the arrayLayers specified in VkImageCreateInfo when image was created (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewCreateInfo-image-01482)"},
+    {"VUID-VkImageViewCreateInfo-image-01583", "If image was created with the VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT flag, format must be compatible with, or must be an uncompressed format that is size-compatible with, the format used to create image. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewCreateInfo-image-01583)"},
+    {"VUID-VkImageViewCreateInfo-image-01584", "If image was created with the VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT flag, the levelCount and layerCount members of subresourceRange must both be 1. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewCreateInfo-image-01584)"},
+    {"VUID-VkImageViewCreateInfo-image-01586", "If image was created with the VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT flag, if the format of the image is a multi-planar format, and if subresourceRange.aspectMask is one of VK_IMAGE_ASPECT_PLANE_0_BIT, VK_IMAGE_ASPECT_PLANE_1_BIT, or VK_IMAGE_ASPECT_PLANE_2_BIT, then format must be compatible with the VkFormat for the plane of the image format indicated by subresourceRange.aspectMask, as defined in Compatible formats of planes of multi-planar formats (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewCreateInfo-image-01586)"},
+    {"VUID-VkImageViewCreateInfo-image-01759", "If image was created with the VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT flag, but without the VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT flag, format must be compatible with the format used to create image, as defined in Format Compatibility Classes (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewCreateInfo-image-01759)"},
+    {"VUID-VkImageViewCreateInfo-image-01760", "If image was created with the VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT flag, and if the format of the image is not a multi-planar format, format must be compatible with the format used to create image, as defined in Format Compatibility Classes (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewCreateInfo-image-01760)"},
+    {"VUID-VkImageViewCreateInfo-image-01761", "If image was created with the VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT flag, but without the VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT flag, and if the format of the image is not a multi-planar format, format must be compatible with the format used to create image, as defined in Format Compatibility Classes (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewCreateInfo-image-01761)"},
+    {"VUID-VkImageViewCreateInfo-image-01762", "If image was not created with the VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT flag, or if the format of the image is a multi-planar format and if subresourceRange.aspectMask is VK_IMAGE_ASPECT_COLOR_BIT, format must be identical to the format used to create image (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewCreateInfo-image-01762)"},
+    {"VUID-VkImageViewCreateInfo-image-02085", "image must have been created with a usage value containing at least one of VK_IMAGE_USAGE_SAMPLED_BIT, VK_IMAGE_USAGE_STORAGE_BIT, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT, VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT, or VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewCreateInfo-image-02085)"},
+    {"VUID-VkImageViewCreateInfo-image-02086", "If image was created with usage containing VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV, viewType must be VK_IMAGE_VIEW_TYPE_2D or VK_IMAGE_VIEW_TYPE_2D_ARRAY (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewCreateInfo-image-02086)"},
+    {"VUID-VkImageViewCreateInfo-image-02087", "If image was created with usage containing VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV, format must be VK_FORMAT_R8_UINT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewCreateInfo-image-02087)"},
+    {"VUID-VkImageViewCreateInfo-image-02399", "If image has an external format, format must be VK_FORMAT_UNDEFINED. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewCreateInfo-image-02399)"},
+    {"VUID-VkImageViewCreateInfo-image-02400", "If image has an external format, the pNext chain must include a VkSamplerYcbcrConversionInfo structure with a conversion object created with the same external format as image. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewCreateInfo-image-02400)"},
+    {"VUID-VkImageViewCreateInfo-image-02401", "If image has an external format, all members of components must be VK_COMPONENT_SWIZZLE_IDENTITY. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewCreateInfo-image-02401)"},
+    {"VUID-VkImageViewCreateInfo-image-02569", "image must have been created with a usage value containing at least one of VK_IMAGE_USAGE_SAMPLED_BIT, VK_IMAGE_USAGE_STORAGE_BIT, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT, VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT, or VK_IMAGE_USAGE_FRAGMENT_DENSITY_MAP_BIT_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewCreateInfo-image-02569)"},
+    {"VUID-VkImageViewCreateInfo-image-02570", "image must have been created with a usage value containing at least one of VK_IMAGE_USAGE_SAMPLED_BIT, VK_IMAGE_USAGE_STORAGE_BIT, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT, VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT, VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV, or VK_IMAGE_USAGE_FRAGMENT_DENSITY_MAP_BIT_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewCreateInfo-image-02570)"},
+    {"VUID-VkImageViewCreateInfo-image-02571", "If image was created with usage containing VK_IMAGE_USAGE_FRAGMENT_DENSITY_MAP_BIT_EXT, subresourceRange.levelCount must be 1 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewCreateInfo-image-02571)"},
+    {"VUID-VkImageViewCreateInfo-image-02573", "If dynamic fragment density map feature is not enabled and image was created with usage containing VK_IMAGE_USAGE_FRAGMENT_DENSITY_MAP_BIT_EXT, flags must not contain any of VK_IMAGE_CREATE_PROTECTED_BIT, VK_IMAGE_CREATE_SPARSE_BINDING_BIT, VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT, or VK_IMAGE_CREATE_SPARSE_ALIASED_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewCreateInfo-image-02573)"},
+    {"VUID-VkImageViewCreateInfo-image-02724", "If image is a 3D image created with VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT set, and viewType is VK_IMAGE_VIEW_TYPE_2D or VK_IMAGE_VIEW_TYPE_2D_ARRAY, subresourceRange.baseArrayLayer must be less than the depth computed from baseMipLevel and extent.depth specified in VkImageCreateInfo when image was created, according to the formula defined in Image Miplevel Sizing. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewCreateInfo-image-02724)"},
+    {"VUID-VkImageViewCreateInfo-image-parameter", "image must be a valid VkImage handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewCreateInfo-image-parameter)"},
+    {"VUID-VkImageViewCreateInfo-pNext-01585", "If a VkImageFormatListCreateInfoKHR structure was included in the pNext chain of the VkImageCreateInfo structure used when creating image and the viewFormatCount field of VkImageFormatListCreateInfoKHR is not zero then format must be one of the formats in VkImageFormatListCreateInfoKHR::pViewFormats. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewCreateInfo-pNext-01585)"},
+    {"VUID-VkImageViewCreateInfo-pNext-01970", "If the pNext chain includes a VkSamplerYcbcrConversionInfo structure with a conversion value other than VK_NULL_HANDLE, all members of components must have the value VK_COMPONENT_SWIZZLE_IDENTITY. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewCreateInfo-pNext-01970)"},
+    {"VUID-VkImageViewCreateInfo-pNext-02661", "If the pNext chain includes a VkImageViewUsageCreateInfo structure, its usage member must not include any bits that were not set in the usage member of the VkImageCreateInfo structure used to create image. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewCreateInfo-pNext-02661)"},
+    {"VUID-VkImageViewCreateInfo-pNext-02662", "If the pNext chain includes a VkImageViewUsageCreateInfo structure, and image was not created with a VkImageStencilUsageCreateInfoEXT structure included in the pNext chain of VkImageCreateInfo, its usage member must not include any bits that were not set in the usage member of the VkImageCreateInfo structure used to create image (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewCreateInfo-pNext-02662)"},
+    {"VUID-VkImageViewCreateInfo-pNext-02663", "If the pNext chain includes a VkImageViewUsageCreateInfo structure, image was created with a VkImageStencilUsageCreateInfoEXT structure included in the pNext chain of VkImageCreateInfo, and subResourceRange.aspectMask includes VK_IMAGE_ASPECT_STENCIL_BIT, the usage member of the VkImageViewUsageCreateInfo instance must not include any bits that were not set in the usage member of the VkImageStencilUsageCreateInfoEXT structure used to create image (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewCreateInfo-pNext-02663)"},
+    {"VUID-VkImageViewCreateInfo-pNext-02664", "If the pNext chain includes a VkImageViewUsageCreateInfo structure, image was created with a VkImageStencilUsageCreateInfoEXT structure included in the pNext chain of VkImageCreateInfo, and subResourceRange.aspectMask includes bits other than VK_IMAGE_ASPECT_STENCIL_BIT, the usage member of the VkImageViewUsageCreateInfo structure must not include any bits that were not set in the usage member of the VkImageCreateInfo structure used to create image (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewCreateInfo-pNext-02664)"},
+    {"VUID-VkImageViewCreateInfo-pNext-pNext", "Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkImageViewASTCDecodeModeEXT, VkImageViewUsageCreateInfo, or VkSamplerYcbcrConversionInfo (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewCreateInfo-pNext-pNext)"},
+    {"VUID-VkImageViewCreateInfo-sType-sType", "sType must be VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewCreateInfo-sType-sType)"},
+    {"VUID-VkImageViewCreateInfo-sType-unique", "Each sType member in the pNext chain must be unique (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewCreateInfo-sType-unique)"},
+    {"VUID-VkImageViewCreateInfo-subResourceRange-01021", "subresourceRange and viewType must be compatible with the image, as described in the compatibility table (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewCreateInfo-subResourceRange-01021)"},
+    {"VUID-VkImageViewCreateInfo-subresourceRange-01478", "subresourceRange.baseMipLevel must be less than the mipLevels specified in VkImageCreateInfo when image was created (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewCreateInfo-subresourceRange-01478)"},
+    {"VUID-VkImageViewCreateInfo-subresourceRange-01480", "subresourceRange.baseArrayLayer must be less than the arrayLayers specified in VkImageCreateInfo when image was created (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewCreateInfo-subresourceRange-01480)"},
+    {"VUID-VkImageViewCreateInfo-subresourceRange-01483", "If subresourceRange.layerCount is not VK_REMAINING_ARRAY_LAYERS, image is not a 3D image created with VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT set, or viewType is not VK_IMAGE_VIEW_TYPE_2D or VK_IMAGE_VIEW_TYPE_2D_ARRAY, subresourceRange.layerCount must be non-zero and subresourceRange.baseArrayLayer + subresourceRange.layerCount must be less than or equal to the arrayLayers specified in VkImageCreateInfo when image was created (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewCreateInfo-subresourceRange-01483)"},
+    {"VUID-VkImageViewCreateInfo-subresourceRange-01718", "If subresourceRange.levelCount is not VK_REMAINING_MIP_LEVELS, subresourceRange.baseMipLevel + subresourceRange.levelCount must be less than or equal to the mipLevels specified in VkImageCreateInfo when image was created (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewCreateInfo-subresourceRange-01718)"},
+    {"VUID-VkImageViewCreateInfo-subresourceRange-01719", "If subresourceRange.layerCount is not VK_REMAINING_ARRAY_LAYERS, subresourceRange.baseArrayLayer + subresourceRange.layerCount must be less than or equal to the arrayLayers specified in VkImageCreateInfo when image was created (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewCreateInfo-subresourceRange-01719)"},
+    {"VUID-VkImageViewCreateInfo-subresourceRange-02725", "If subresourceRange.layerCount is not VK_REMAINING_ARRAY_LAYERS, image is a 3D image created with VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT set, and viewType is VK_IMAGE_VIEW_TYPE_2D or VK_IMAGE_VIEW_TYPE_2D_ARRAY, subresourceRange.layerCount must be non-zero and subresourceRange.baseArrayLayer + subresourceRange.layerCount must be less than or equal to the depth computed from baseMipLevel and extent.depth specified in VkImageCreateInfo when image was created, according to the formula defined in Image Miplevel Sizing. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewCreateInfo-subresourceRange-02725)"},
+    {"VUID-VkImageViewCreateInfo-subresourceRange-parameter", "subresourceRange must be a valid VkImageSubresourceRange structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewCreateInfo-subresourceRange-parameter)"},
+    {"VUID-VkImageViewCreateInfo-usage-02274", "If usage contains VK_IMAGE_USAGE_SAMPLED_BIT, then the format features of the resultant image view must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewCreateInfo-usage-02274)"},
+    {"VUID-VkImageViewCreateInfo-usage-02275", "If usage contains VK_IMAGE_USAGE_STORAGE_BIT, then the image view's format features must contain VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewCreateInfo-usage-02275)"},
+    {"VUID-VkImageViewCreateInfo-usage-02276", "If usage contains VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, then the image view's format features must contain VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewCreateInfo-usage-02276)"},
+    {"VUID-VkImageViewCreateInfo-usage-02277", "If usage contains VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT, then the image view's format features must contain VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewCreateInfo-usage-02277)"},
+    {"VUID-VkImageViewCreateInfo-usage-02652", "If usage contains VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT, then the image view's format features must contain at least one of VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT or VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewCreateInfo-usage-02652)"},
+    {"VUID-VkImageViewCreateInfo-viewType-01004", "If the image cubemap arrays feature is not enabled, viewType must not be VK_IMAGE_VIEW_TYPE_CUBE_ARRAY (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewCreateInfo-viewType-01004)"},
+    {"VUID-VkImageViewCreateInfo-viewType-parameter", "viewType must be a valid VkImageViewType value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewCreateInfo-viewType-parameter)"},
+    {"VUID-VkImageViewHandleInfoNVX-commonparent", "Both of imageView, and sampler that are valid handles of non-ignored parameters must have been created, allocated, or retrieved from the same VkDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewHandleInfoNVX-commonparent)"},
+    {"VUID-VkImageViewHandleInfoNVX-descriptorType-02654", "descriptorType must be VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, or VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewHandleInfoNVX-descriptorType-02654)"},
+    {"VUID-VkImageViewHandleInfoNVX-descriptorType-parameter", "descriptorType must be a valid VkDescriptorType value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewHandleInfoNVX-descriptorType-parameter)"},
+    {"VUID-VkImageViewHandleInfoNVX-imageView-02656", "If descriptorType is VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE or VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, the image that imageView was created from must have been created with the VK_IMAGE_USAGE_SAMPLED_BIT usage bit set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewHandleInfoNVX-imageView-02656)"},
+    {"VUID-VkImageViewHandleInfoNVX-imageView-02657", "If descriptorType is VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, the image that imageView was created from must have been created with the VK_IMAGE_USAGE_STORAGE_BIT usage bit set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewHandleInfoNVX-imageView-02657)"},
+    {"VUID-VkImageViewHandleInfoNVX-imageView-parameter", "imageView must be a valid VkImageView handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewHandleInfoNVX-imageView-parameter)"},
+    {"VUID-VkImageViewHandleInfoNVX-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewHandleInfoNVX-pNext-pNext)"},
+    {"VUID-VkImageViewHandleInfoNVX-sType-sType", "sType must be VK_STRUCTURE_TYPE_IMAGE_VIEW_HANDLE_INFO_NVX (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewHandleInfoNVX-sType-sType)"},
+    {"VUID-VkImageViewHandleInfoNVX-sampler-02655", "sampler must be a valid VkSampler if descriptorType is VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewHandleInfoNVX-sampler-02655)"},
+    {"VUID-VkImageViewHandleInfoNVX-sampler-parameter", "If sampler is not VK_NULL_HANDLE, sampler must be a valid VkSampler handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewHandleInfoNVX-sampler-parameter)"},
+    {"VUID-VkImageViewUsageCreateInfo-sType-sType", "sType must be VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewUsageCreateInfo-sType-sType)"},
+    {"VUID-VkImageViewUsageCreateInfo-usage-parameter", "usage must be a valid combination of VkImageUsageFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewUsageCreateInfo-usage-parameter)"},
+    {"VUID-VkImageViewUsageCreateInfo-usage-requiredbitmask", "usage must not be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImageViewUsageCreateInfo-usage-requiredbitmask)"},
+    {"VUID-VkImportAndroidHardwareBufferInfoANDROID-buffer-01880", "If buffer is not NULL, Android hardware buffers must be supported for import, as reported by VkExternalImageFormatProperties or VkExternalBufferProperties. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImportAndroidHardwareBufferInfoANDROID-buffer-01880)"},
+    {"VUID-VkImportAndroidHardwareBufferInfoANDROID-buffer-01881", "If buffer is not NULL, it must be a valid Android hardware buffer object with AHardwareBuffer_Desc::format and AHardwareBuffer_Desc::usage compatible with Vulkan as described in Android Hardware Buffers. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImportAndroidHardwareBufferInfoANDROID-buffer-01881)"},
+    {"VUID-VkImportAndroidHardwareBufferInfoANDROID-buffer-parameter", "buffer must be a valid pointer to an AHardwareBuffer value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImportAndroidHardwareBufferInfoANDROID-buffer-parameter)"},
+    {"VUID-VkImportAndroidHardwareBufferInfoANDROID-sType-sType", "sType must be VK_STRUCTURE_TYPE_IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImportAndroidHardwareBufferInfoANDROID-sType-sType)"},
+    {"VUID-VkImportFenceFdInfoKHR-fd-01541", "fd must obey any requirements listed for handleType in external fence handle types compatibility. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImportFenceFdInfoKHR-fd-01541)"},
+    {"VUID-VkImportFenceFdInfoKHR-fence-parameter", "fence must be a valid VkFence handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImportFenceFdInfoKHR-fence-parameter)"},
+    {"VUID-VkImportFenceFdInfoKHR-flags-parameter", "flags must be a valid combination of VkFenceImportFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImportFenceFdInfoKHR-flags-parameter)"},
+    {"VUID-VkImportFenceFdInfoKHR-handleType-01464", "handleType must be a value included in the Handle Types Supported by VkImportFenceFdInfoKHR table. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImportFenceFdInfoKHR-handleType-01464)"},
+    {"VUID-VkImportFenceFdInfoKHR-handleType-parameter", "handleType must be a valid VkExternalFenceHandleTypeFlagBits value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImportFenceFdInfoKHR-handleType-parameter)"},
+    {"VUID-VkImportFenceFdInfoKHR-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImportFenceFdInfoKHR-pNext-pNext)"},
+    {"VUID-VkImportFenceFdInfoKHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_IMPORT_FENCE_FD_INFO_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImportFenceFdInfoKHR-sType-sType)"},
+    {"VUID-VkImportFenceWin32HandleInfoKHR-fence-parameter", "fence must be a valid VkFence handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImportFenceWin32HandleInfoKHR-fence-parameter)"},
+    {"VUID-VkImportFenceWin32HandleInfoKHR-flags-parameter", "flags must be a valid combination of VkFenceImportFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImportFenceWin32HandleInfoKHR-flags-parameter)"},
+    {"VUID-VkImportFenceWin32HandleInfoKHR-handle-01462", "If handle is not NULL, name must be NULL. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImportFenceWin32HandleInfoKHR-handle-01462)"},
+    {"VUID-VkImportFenceWin32HandleInfoKHR-handle-01539", "If handle is not NULL, it must obey any requirements listed for handleType in external fence handle types compatibility. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImportFenceWin32HandleInfoKHR-handle-01539)"},
+    {"VUID-VkImportFenceWin32HandleInfoKHR-handleType-01457", "handleType must be a value included in the Handle Types Supported by VkImportFenceWin32HandleInfoKHR table. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImportFenceWin32HandleInfoKHR-handleType-01457)"},
+    {"VUID-VkImportFenceWin32HandleInfoKHR-handleType-01459", "If handleType is not VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_BIT, name must be NULL. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImportFenceWin32HandleInfoKHR-handleType-01459)"},
+    {"VUID-VkImportFenceWin32HandleInfoKHR-handleType-01460", "If handleType is not 0 and handle is NULL, name must name a valid synchronization primitive of the type specified by handleType. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImportFenceWin32HandleInfoKHR-handleType-01460)"},
+    {"VUID-VkImportFenceWin32HandleInfoKHR-handleType-01461", "If handleType is not 0 and name is NULL, handle must be a valid handle of the type specified by handleType. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImportFenceWin32HandleInfoKHR-handleType-01461)"},
+    {"VUID-VkImportFenceWin32HandleInfoKHR-handleType-parameter", "If handleType is not 0, handleType must be a valid VkExternalFenceHandleTypeFlagBits value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImportFenceWin32HandleInfoKHR-handleType-parameter)"},
+    {"VUID-VkImportFenceWin32HandleInfoKHR-name-01540", "If name is not NULL, it must obey any requirements listed for handleType in external fence handle types compatibility. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImportFenceWin32HandleInfoKHR-name-01540)"},
+    {"VUID-VkImportFenceWin32HandleInfoKHR-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImportFenceWin32HandleInfoKHR-pNext-pNext)"},
+    {"VUID-VkImportFenceWin32HandleInfoKHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_IMPORT_FENCE_WIN32_HANDLE_INFO_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImportFenceWin32HandleInfoKHR-sType-sType)"},
+    {"VUID-VkImportMemoryFdInfoKHR-fd-00668", "The memory from which fd was exported must have been created on the same underlying physical device as device. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImportMemoryFdInfoKHR-fd-00668)"},
+    {"VUID-VkImportMemoryFdInfoKHR-fd-01520", "fd must obey any requirements listed for handleType in external memory handle types compatibility. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImportMemoryFdInfoKHR-fd-01520)"},
+    {"VUID-VkImportMemoryFdInfoKHR-fd-01746", "The memory represented by fd must have been created from a physical device and driver that is compatible with device and handleType, as described in External memory handle types compatibility. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImportMemoryFdInfoKHR-fd-01746)"},
+    {"VUID-VkImportMemoryFdInfoKHR-handleType-00667", "If handleType is not 0, it must be supported for import, as reported by VkExternalImageFormatProperties or VkExternalBufferProperties. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImportMemoryFdInfoKHR-handleType-00667)"},
+    {"VUID-VkImportMemoryFdInfoKHR-handleType-00669", "If handleType is not 0, it must be defined as a POSIX file descriptor handle. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImportMemoryFdInfoKHR-handleType-00669)"},
+    {"VUID-VkImportMemoryFdInfoKHR-handleType-00670", "If handleType is not 0, fd must be a valid handle of the type specified by handleType. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImportMemoryFdInfoKHR-handleType-00670)"},
+    {"VUID-VkImportMemoryFdInfoKHR-handleType-parameter", "If handleType is not 0, handleType must be a valid VkExternalMemoryHandleTypeFlagBits value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImportMemoryFdInfoKHR-handleType-parameter)"},
+    {"VUID-VkImportMemoryFdInfoKHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImportMemoryFdInfoKHR-sType-sType)"},
+    {"VUID-VkImportMemoryHostPointerInfoEXT-handleType-01747", "If handleType is not 0, it must be supported for import, as reported in VkExternalMemoryProperties (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImportMemoryHostPointerInfoEXT-handleType-01747)"},
+    {"VUID-VkImportMemoryHostPointerInfoEXT-handleType-01748", "If handleType is not 0, it must be VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT or VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_MAPPED_FOREIGN_MEMORY_BIT_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImportMemoryHostPointerInfoEXT-handleType-01748)"},
+    {"VUID-VkImportMemoryHostPointerInfoEXT-handleType-01750", "If handleType is VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT, pHostPointer must be a pointer to allocationSize number of bytes of host memory, where allocationSize is the member of the VkMemoryAllocateInfo structure this structure is chained to (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImportMemoryHostPointerInfoEXT-handleType-01750)"},
+    {"VUID-VkImportMemoryHostPointerInfoEXT-handleType-01751", "If handleType is VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_MAPPED_FOREIGN_MEMORY_BIT_EXT, pHostPointer must be a pointer to allocationSize number of bytes of host mapped foreign memory, where allocationSize is the member of the VkMemoryAllocateInfo structure this structure is chained to (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImportMemoryHostPointerInfoEXT-handleType-01751)"},
+    {"VUID-VkImportMemoryHostPointerInfoEXT-handleType-parameter", "handleType must be a valid VkExternalMemoryHandleTypeFlagBits value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImportMemoryHostPointerInfoEXT-handleType-parameter)"},
+    {"VUID-VkImportMemoryHostPointerInfoEXT-pHostPointer-01749", "pHostPointer must be a pointer aligned to an integer multiple of VkPhysicalDeviceExternalMemoryHostPropertiesEXT::minImportedHostPointerAlignment (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImportMemoryHostPointerInfoEXT-pHostPointer-01749)"},
+    {"VUID-VkImportMemoryHostPointerInfoEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_IMPORT_MEMORY_HOST_POINTER_INFO_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImportMemoryHostPointerInfoEXT-sType-sType)"},
+    {"VUID-VkImportMemoryWin32HandleInfoKHR-handle-00659", "The memory from which handle was exported, or the memory named by name must have been created on the same underlying physical device as device. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImportMemoryWin32HandleInfoKHR-handle-00659)"},
+    {"VUID-VkImportMemoryWin32HandleInfoKHR-handle-01441", "if handle is not NULL, name must be NULL. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImportMemoryWin32HandleInfoKHR-handle-01441)"},
+    {"VUID-VkImportMemoryWin32HandleInfoKHR-handle-01518", "If handle is not NULL, it must obey any requirements listed for handleType in external memory handle types compatibility. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImportMemoryWin32HandleInfoKHR-handle-01518)"},
+    {"VUID-VkImportMemoryWin32HandleInfoKHR-handleType-00658", "If handleType is not 0, it must be supported for import, as reported by VkExternalImageFormatProperties or VkExternalBufferProperties. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImportMemoryWin32HandleInfoKHR-handleType-00658)"},
+    {"VUID-VkImportMemoryWin32HandleInfoKHR-handleType-00660", "If handleType is not 0, it must be defined as an NT handle or a global share handle. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImportMemoryWin32HandleInfoKHR-handleType-00660)"},
+    {"VUID-VkImportMemoryWin32HandleInfoKHR-handleType-00661", "If handleType is not 0 and name is NULL, handle must be a valid handle of the type specified by handleType. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImportMemoryWin32HandleInfoKHR-handleType-00661)"},
+    {"VUID-VkImportMemoryWin32HandleInfoKHR-handleType-01439", "If handleType is not VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT, VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT, VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP_BIT, or VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT, name must be NULL. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImportMemoryWin32HandleInfoKHR-handleType-01439)"},
+    {"VUID-VkImportMemoryWin32HandleInfoKHR-handleType-01440", "If handleType is not 0 and handle is NULL, name must name a valid memory resource of the type specified by handleType. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImportMemoryWin32HandleInfoKHR-handleType-01440)"},
+    {"VUID-VkImportMemoryWin32HandleInfoKHR-handleType-parameter", "If handleType is not 0, handleType must be a valid VkExternalMemoryHandleTypeFlagBits value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImportMemoryWin32HandleInfoKHR-handleType-parameter)"},
+    {"VUID-VkImportMemoryWin32HandleInfoKHR-name-01519", "If name is not NULL, it must obey any requirements listed for handleType in external memory handle types compatibility. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImportMemoryWin32HandleInfoKHR-name-01519)"},
+    {"VUID-VkImportMemoryWin32HandleInfoKHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImportMemoryWin32HandleInfoKHR-sType-sType)"},
+    {"VUID-VkImportMemoryWin32HandleInfoNV-handle-01328", "handle must be a valid handle to memory, obtained as specified by handleType. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImportMemoryWin32HandleInfoNV-handle-01328)"},
+    {"VUID-VkImportMemoryWin32HandleInfoNV-handleType-01327", "handleType must not have more than one bit set. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImportMemoryWin32HandleInfoNV-handleType-01327)"},
+    {"VUID-VkImportMemoryWin32HandleInfoNV-handleType-parameter", "handleType must be a valid combination of VkExternalMemoryHandleTypeFlagBitsNV values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImportMemoryWin32HandleInfoNV-handleType-parameter)"},
+    {"VUID-VkImportMemoryWin32HandleInfoNV-sType-sType", "sType must be VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImportMemoryWin32HandleInfoNV-sType-sType)"},
+    {"VUID-VkImportSemaphoreFdInfoKHR-fd-01544", "fd must obey any requirements listed for handleType in external semaphore handle types compatibility. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImportSemaphoreFdInfoKHR-fd-01544)"},
+    {"VUID-VkImportSemaphoreFdInfoKHR-flags-03323", "If flags contains VK_SEMAPHORE_IMPORT_TEMPORARY_BIT, the VkSemaphoreTypeCreateInfoKHR::semaphoreType field of the semaphore from which fd was exported must not be VK_SEMAPHORE_TYPE_TIMELINE_KHR. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImportSemaphoreFdInfoKHR-flags-03323)"},
+    {"VUID-VkImportSemaphoreFdInfoKHR-flags-parameter", "flags must be a valid combination of VkSemaphoreImportFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImportSemaphoreFdInfoKHR-flags-parameter)"},
+    {"VUID-VkImportSemaphoreFdInfoKHR-handleType-01143", "handleType must be a value included in the Handle Types Supported by VkImportSemaphoreFdInfoKHR table. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImportSemaphoreFdInfoKHR-handleType-01143)"},
+    {"VUID-VkImportSemaphoreFdInfoKHR-handleType-03263", "If handleType is VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT, the VkSemaphoreCreateInfo::flags field must match that of the semaphore from which fd was exported. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImportSemaphoreFdInfoKHR-handleType-03263)"},
+    {"VUID-VkImportSemaphoreFdInfoKHR-handleType-03264", "If handleType is VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT, the VkSemaphoreTypeCreateInfoKHR::semaphoreType field must match that of the semaphore from which fd was exported. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImportSemaphoreFdInfoKHR-handleType-03264)"},
+    {"VUID-VkImportSemaphoreFdInfoKHR-handleType-parameter", "handleType must be a valid VkExternalSemaphoreHandleTypeFlagBits value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImportSemaphoreFdInfoKHR-handleType-parameter)"},
+    {"VUID-VkImportSemaphoreFdInfoKHR-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImportSemaphoreFdInfoKHR-pNext-pNext)"},
+    {"VUID-VkImportSemaphoreFdInfoKHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_FD_INFO_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImportSemaphoreFdInfoKHR-sType-sType)"},
+    {"VUID-VkImportSemaphoreFdInfoKHR-semaphore-parameter", "semaphore must be a valid VkSemaphore handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImportSemaphoreFdInfoKHR-semaphore-parameter)"},
+    {"VUID-VkImportSemaphoreWin32HandleInfoKHR-flags-03322", "If flags contains VK_SEMAPHORE_IMPORT_TEMPORARY_BIT, the VkSemaphoreTypeCreateInfoKHR::semaphoreType field of the semaphore from which handle or name was exported must not be VK_SEMAPHORE_TYPE_TIMELINE_KHR. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImportSemaphoreWin32HandleInfoKHR-flags-03322)"},
+    {"VUID-VkImportSemaphoreWin32HandleInfoKHR-flags-parameter", "flags must be a valid combination of VkSemaphoreImportFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImportSemaphoreWin32HandleInfoKHR-flags-parameter)"},
+    {"VUID-VkImportSemaphoreWin32HandleInfoKHR-handle-01469", "If handle is not NULL, name must be NULL. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImportSemaphoreWin32HandleInfoKHR-handle-01469)"},
+    {"VUID-VkImportSemaphoreWin32HandleInfoKHR-handle-01542", "If handle is not NULL, it must obey any requirements listed for handleType in external semaphore handle types compatibility. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImportSemaphoreWin32HandleInfoKHR-handle-01542)"},
+    {"VUID-VkImportSemaphoreWin32HandleInfoKHR-handleType-01140", "handleType must be a value included in the Handle Types Supported by VkImportSemaphoreWin32HandleInfoKHR table. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImportSemaphoreWin32HandleInfoKHR-handleType-01140)"},
+    {"VUID-VkImportSemaphoreWin32HandleInfoKHR-handleType-01466", "If handleType is not VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT or VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT, name must be NULL. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImportSemaphoreWin32HandleInfoKHR-handleType-01466)"},
+    {"VUID-VkImportSemaphoreWin32HandleInfoKHR-handleType-01467", "If handleType is not 0 and handle is NULL, name must name a valid synchronization primitive of the type specified by handleType. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImportSemaphoreWin32HandleInfoKHR-handleType-01467)"},
+    {"VUID-VkImportSemaphoreWin32HandleInfoKHR-handleType-01468", "If handleType is not 0 and name is NULL, handle must be a valid handle of the type specified by handleType. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImportSemaphoreWin32HandleInfoKHR-handleType-01468)"},
+    {"VUID-VkImportSemaphoreWin32HandleInfoKHR-handleType-03261", "If handleType is VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT or VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT, the VkSemaphoreCreateInfo::flags field must match that of the semaphore from which handle or name was exported. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImportSemaphoreWin32HandleInfoKHR-handleType-03261)"},
+    {"VUID-VkImportSemaphoreWin32HandleInfoKHR-handleType-03262", "If handleType is VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT or VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT, the VkSemaphoreTypeCreateInfoKHR::semaphoreType field must match that of the semaphore from which handle or name was exported. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImportSemaphoreWin32HandleInfoKHR-handleType-03262)"},
+    {"VUID-VkImportSemaphoreWin32HandleInfoKHR-handleType-parameter", "If handleType is not 0, handleType must be a valid VkExternalSemaphoreHandleTypeFlagBits value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImportSemaphoreWin32HandleInfoKHR-handleType-parameter)"},
+    {"VUID-VkImportSemaphoreWin32HandleInfoKHR-name-01543", "If name is not NULL, it must obey any requirements listed for handleType in external semaphore handle types compatibility. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImportSemaphoreWin32HandleInfoKHR-name-01543)"},
+    {"VUID-VkImportSemaphoreWin32HandleInfoKHR-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImportSemaphoreWin32HandleInfoKHR-pNext-pNext)"},
+    {"VUID-VkImportSemaphoreWin32HandleInfoKHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImportSemaphoreWin32HandleInfoKHR-sType-sType)"},
+    {"VUID-VkImportSemaphoreWin32HandleInfoKHR-semaphore-parameter", "semaphore must be a valid VkSemaphore handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkImportSemaphoreWin32HandleInfoKHR-semaphore-parameter)"},
+    {"VUID-VkIndirectCommandsLayoutCreateInfoNVX-computeBindingPointSupport-01348", "If the VkDeviceGeneratedCommandsFeaturesNVX::computeBindingPointSupport feature is not enabled, then pipelineBindPoint must not be VK_PIPELINE_BIND_POINT_COMPUTE (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkIndirectCommandsLayoutCreateInfoNVX-computeBindingPointSupport-01348)"},
+    {"VUID-VkIndirectCommandsLayoutCreateInfoNVX-flags-parameter", "flags must be a valid combination of VkIndirectCommandsLayoutUsageFlagBitsNVX values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkIndirectCommandsLayoutCreateInfoNVX-flags-parameter)"},
+    {"VUID-VkIndirectCommandsLayoutCreateInfoNVX-flags-requiredbitmask", "flags must not be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkIndirectCommandsLayoutCreateInfoNVX-flags-requiredbitmask)"},
+    {"VUID-VkIndirectCommandsLayoutCreateInfoNVX-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkIndirectCommandsLayoutCreateInfoNVX-pNext-pNext)"},
+    {"VUID-VkIndirectCommandsLayoutCreateInfoNVX-pTokens-01349", "If pTokens contains an entry of VK_INDIRECT_COMMANDS_TOKEN_TYPE_PIPELINE_NVX it must be the first element of the array and there must be only a single element of such token type. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkIndirectCommandsLayoutCreateInfoNVX-pTokens-01349)"},
+    {"VUID-VkIndirectCommandsLayoutCreateInfoNVX-pTokens-01350", "All state binding tokens in pTokens must occur prior work provoking tokens (VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_NVX, VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_NVX, VK_INDIRECT_COMMANDS_TOKEN_TYPE_DISPATCH_NVX). (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkIndirectCommandsLayoutCreateInfoNVX-pTokens-01350)"},
+    {"VUID-VkIndirectCommandsLayoutCreateInfoNVX-pTokens-01351", "The content of pTokens must include one single work provoking token that is compatible with the pipelineBindPoint. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkIndirectCommandsLayoutCreateInfoNVX-pTokens-01351)"},
+    {"VUID-VkIndirectCommandsLayoutCreateInfoNVX-pTokens-parameter", "pTokens must be a valid pointer to an array of tokenCount valid VkIndirectCommandsLayoutTokenNVX structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkIndirectCommandsLayoutCreateInfoNVX-pTokens-parameter)"},
+    {"VUID-VkIndirectCommandsLayoutCreateInfoNVX-pipelineBindPoint-parameter", "pipelineBindPoint must be a valid VkPipelineBindPoint value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkIndirectCommandsLayoutCreateInfoNVX-pipelineBindPoint-parameter)"},
+    {"VUID-VkIndirectCommandsLayoutCreateInfoNVX-sType-sType", "sType must be VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_NVX (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkIndirectCommandsLayoutCreateInfoNVX-sType-sType)"},
+    {"VUID-VkIndirectCommandsLayoutCreateInfoNVX-tokenCount-01347", "tokenCount must be greater than 0 and below VkDeviceGeneratedCommandsLimitsNVX::maxIndirectCommandsLayoutTokenCount (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkIndirectCommandsLayoutCreateInfoNVX-tokenCount-01347)"},
+    {"VUID-VkIndirectCommandsLayoutCreateInfoNVX-tokenCount-arraylength", "tokenCount must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkIndirectCommandsLayoutCreateInfoNVX-tokenCount-arraylength)"},
+    {"VUID-VkIndirectCommandsLayoutTokenNVX-bindingUnit-01342", "bindingUnit must stay within device supported limits for the appropriate commands. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkIndirectCommandsLayoutTokenNVX-bindingUnit-01342)"},
+    {"VUID-VkIndirectCommandsLayoutTokenNVX-divisor-01344", "divisor must be greater than 0 and a power of two. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkIndirectCommandsLayoutTokenNVX-divisor-01344)"},
+    {"VUID-VkIndirectCommandsLayoutTokenNVX-dynamicCount-01343", "dynamicCount must stay within device supported limits for the appropriate commands. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkIndirectCommandsLayoutTokenNVX-dynamicCount-01343)"},
+    {"VUID-VkIndirectCommandsLayoutTokenNVX-tokenType-parameter", "tokenType must be a valid VkIndirectCommandsTokenTypeNVX value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkIndirectCommandsLayoutTokenNVX-tokenType-parameter)"},
+    {"VUID-VkIndirectCommandsTokenNVX-buffer-01345", "The buffer's usage flag must have the VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT bit set. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkIndirectCommandsTokenNVX-buffer-01345)"},
+    {"VUID-VkIndirectCommandsTokenNVX-buffer-parameter", "buffer must be a valid VkBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkIndirectCommandsTokenNVX-buffer-parameter)"},
+    {"VUID-VkIndirectCommandsTokenNVX-offset-01346", "The offset must be aligned to VkDeviceGeneratedCommandsLimitsNVX::minCommandsTokenBufferOffsetAlignment. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkIndirectCommandsTokenNVX-offset-01346)"},
+    {"VUID-VkIndirectCommandsTokenNVX-tokenType-parameter", "tokenType must be a valid VkIndirectCommandsTokenTypeNVX value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkIndirectCommandsTokenNVX-tokenType-parameter)"},
+    {"VUID-VkInitializePerformanceApiInfoINTEL-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkInitializePerformanceApiInfoINTEL-pNext-pNext)"},
+    {"VUID-VkInitializePerformanceApiInfoINTEL-sType-sType", "sType must be VK_STRUCTURE_TYPE_INITIALIZE_PERFORMANCE_API_INFO_INTEL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkInitializePerformanceApiInfoINTEL-sType-sType)"},
+    {"VUID-VkInputAttachmentAspectReference-aspectMask-01964", "aspectMask must not include VK_IMAGE_ASPECT_METADATA_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkInputAttachmentAspectReference-aspectMask-01964)"},
+    {"VUID-VkInputAttachmentAspectReference-aspectMask-02250", "aspectMask must not include VK_IMAGE_ASPECT_MEMORY_PLANE_i_BIT_EXT for any index i. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkInputAttachmentAspectReference-aspectMask-02250)"},
+    {"VUID-VkInputAttachmentAspectReference-aspectMask-parameter", "aspectMask must be a valid combination of VkImageAspectFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkInputAttachmentAspectReference-aspectMask-parameter)"},
+    {"VUID-VkInputAttachmentAspectReference-aspectMask-requiredbitmask", "aspectMask must not be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkInputAttachmentAspectReference-aspectMask-requiredbitmask)"},
+    {"VUID-VkInstanceCreateInfo-flags-zerobitmask", "flags must be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkInstanceCreateInfo-flags-zerobitmask)"},
+    {"VUID-VkInstanceCreateInfo-pApplicationInfo-parameter", "If pApplicationInfo is not NULL, pApplicationInfo must be a valid pointer to a valid VkApplicationInfo structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkInstanceCreateInfo-pApplicationInfo-parameter)"},
+    {"VUID-VkInstanceCreateInfo-pNext-pNext", "Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkDebugReportCallbackCreateInfoEXT, VkDebugUtilsMessengerCreateInfoEXT, VkValidationFeaturesEXT, or VkValidationFlagsEXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkInstanceCreateInfo-pNext-pNext)"},
+    {"VUID-VkInstanceCreateInfo-ppEnabledExtensionNames-parameter", "If enabledExtensionCount is not 0, ppEnabledExtensionNames must be a valid pointer to an array of enabledExtensionCount null-terminated UTF-8 strings (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkInstanceCreateInfo-ppEnabledExtensionNames-parameter)"},
+    {"VUID-VkInstanceCreateInfo-ppEnabledLayerNames-parameter", "If enabledLayerCount is not 0, ppEnabledLayerNames must be a valid pointer to an array of enabledLayerCount null-terminated UTF-8 strings (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkInstanceCreateInfo-ppEnabledLayerNames-parameter)"},
+    {"VUID-VkInstanceCreateInfo-sType-sType", "sType must be VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkInstanceCreateInfo-sType-sType)"},
+    {"VUID-VkInstanceCreateInfo-sType-unique", "Each sType member in the pNext chain must be unique (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkInstanceCreateInfo-sType-unique)"},
+    {"VUID-VkMacOSSurfaceCreateInfoMVK-flags-zerobitmask", "flags must be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkMacOSSurfaceCreateInfoMVK-flags-zerobitmask)"},
+    {"VUID-VkMacOSSurfaceCreateInfoMVK-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkMacOSSurfaceCreateInfoMVK-pNext-pNext)"},
+    {"VUID-VkMacOSSurfaceCreateInfoMVK-pView-01317", "pView must be a valid NSView and must be backed by a CALayer instance of type CAMetalLayer. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkMacOSSurfaceCreateInfoMVK-pView-01317)"},
+    {"VUID-VkMacOSSurfaceCreateInfoMVK-sType-sType", "sType must be VK_STRUCTURE_TYPE_MACOS_SURFACE_CREATE_INFO_MVK (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkMacOSSurfaceCreateInfoMVK-sType-sType)"},
+    {"VUID-VkMappedMemoryRange-memory-00684", "memory must be currently host mapped (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkMappedMemoryRange-memory-00684)"},
+    {"VUID-VkMappedMemoryRange-memory-parameter", "memory must be a valid VkDeviceMemory handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkMappedMemoryRange-memory-parameter)"},
+    {"VUID-VkMappedMemoryRange-offset-00687", "offset must be a multiple of VkPhysicalDeviceLimits::nonCoherentAtomSize (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkMappedMemoryRange-offset-00687)"},
+    {"VUID-VkMappedMemoryRange-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkMappedMemoryRange-pNext-pNext)"},
+    {"VUID-VkMappedMemoryRange-sType-sType", "sType must be VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkMappedMemoryRange-sType-sType)"},
+    {"VUID-VkMappedMemoryRange-size-00685", "If size is not equal to VK_WHOLE_SIZE, offset and size must specify a range contained within the currently mapped range of memory (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkMappedMemoryRange-size-00685)"},
+    {"VUID-VkMappedMemoryRange-size-00686", "If size is equal to VK_WHOLE_SIZE, offset must be within the currently mapped range of memory (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkMappedMemoryRange-size-00686)"},
+    {"VUID-VkMappedMemoryRange-size-01389", "If size is equal to VK_WHOLE_SIZE, the end of the current mapping of memory must be a multiple of VkPhysicalDeviceLimits::nonCoherentAtomSize bytes from the beginning of the memory object. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkMappedMemoryRange-size-01389)"},
+    {"VUID-VkMappedMemoryRange-size-01390", "If size is not equal to VK_WHOLE_SIZE, size must either be a multiple of VkPhysicalDeviceLimits::nonCoherentAtomSize, or offset plus size must equal the size of memory. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkMappedMemoryRange-size-01390)"},
+    {"VUID-VkMemoryAllocateFlagsInfo-deviceMask-00675", "If VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT is set, deviceMask must be a valid device mask. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkMemoryAllocateFlagsInfo-deviceMask-00675)"},
+    {"VUID-VkMemoryAllocateFlagsInfo-deviceMask-00676", "If VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT is set, deviceMask must not be zero (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkMemoryAllocateFlagsInfo-deviceMask-00676)"},
+    {"VUID-VkMemoryAllocateFlagsInfo-flags-parameter", "flags must be a valid combination of VkMemoryAllocateFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkMemoryAllocateFlagsInfo-flags-parameter)"},
+    {"VUID-VkMemoryAllocateFlagsInfo-sType-sType", "sType must be VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkMemoryAllocateFlagsInfo-sType-sType)"},
+    {"VUID-VkMemoryAllocateInfo-None-00643", "If the parameters define an import operation and the external handle specified was created by the Vulkan API, the device mask specified by VkMemoryAllocateFlagsInfo must match that specified when the memory object being imported was allocated. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkMemoryAllocateInfo-None-00643)"},
+    {"VUID-VkMemoryAllocateInfo-None-00644", "If the parameters define an import operation and the external handle specified was created by the Vulkan API, the list of physical devices that comprise the logical device passed to vkAllocateMemory must match the list of physical devices that comprise the logical device on which the memory was originally allocated. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkMemoryAllocateInfo-None-00644)"},
+    {"VUID-VkMemoryAllocateInfo-allocationSize-00638", "allocationSize must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkMemoryAllocateInfo-allocationSize-00638)"},
+    {"VUID-VkMemoryAllocateInfo-allocationSize-00646", "If the parameters define an import operation and the external handle type is VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT, VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_KMT_BIT, or VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT, allocationSize must match the size reported in the memory requirements of the image or buffer member of the VkDedicatedAllocationMemoryAllocateInfoNV structure included in the pNext chain. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkMemoryAllocateInfo-allocationSize-00646)"},
+    {"VUID-VkMemoryAllocateInfo-allocationSize-00647", "If the parameters define an import operation and the external handle type is VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP_BIT, allocationSize must match the size specified when creating the Direct3D 12 heap from which the external handle was extracted. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkMemoryAllocateInfo-allocationSize-00647)"},
+    {"VUID-VkMemoryAllocateInfo-allocationSize-01742", "If the parameters define an import operation, the external handle specified was created by the Vulkan API, and the external handle type is VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT_KHR, then the values of allocationSize and memoryTypeIndex must match those specified when the memory object being imported was created. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkMemoryAllocateInfo-allocationSize-01742)"},
+    {"VUID-VkMemoryAllocateInfo-allocationSize-01743", "If the parameters define an import operation, the external handle was created by the Vulkan API, and the external handle type is VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT_KHR or VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_KHR, then the values of allocationSize and memoryTypeIndex must match those specified when the memory object being imported was created. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkMemoryAllocateInfo-allocationSize-01743)"},
+    {"VUID-VkMemoryAllocateInfo-allocationSize-01745", "If the parameters define an import operation and the external handle is a host pointer, allocationSize must be an integer multiple of VkPhysicalDeviceExternalMemoryHostPropertiesEXT::minImportedHostPointerAlignment (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkMemoryAllocateInfo-allocationSize-01745)"},
+    {"VUID-VkMemoryAllocateInfo-allocationSize-02383", "If the parameters define an import operation and the external handle type is VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID, allocationSize must be the size returned by vkGetAndroidHardwareBufferPropertiesANDROID for the Android hardware buffer. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkMemoryAllocateInfo-allocationSize-02383)"},
+    {"VUID-VkMemoryAllocateInfo-flags-03330", "If VkMemoryAllocateFlagsInfo::flags includes VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_KHR, the bufferDeviceAddressCaptureReplay feature must be enabled (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkMemoryAllocateInfo-flags-03330)"},
+    {"VUID-VkMemoryAllocateInfo-flags-03331", "If VkMemoryAllocateFlagsInfo::flags includes VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT_KHR, the bufferDeviceAddress feature must be enabled (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkMemoryAllocateInfo-flags-03331)"},
+    {"VUID-VkMemoryAllocateInfo-memoryTypeIndex-00645", "If the parameters define an import operation and the external handle is an NT handle or a global share handle created outside of the Vulkan API, the value of memoryTypeIndex must be one of those returned by vkGetMemoryWin32HandlePropertiesKHR. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkMemoryAllocateInfo-memoryTypeIndex-00645)"},
+    {"VUID-VkMemoryAllocateInfo-memoryTypeIndex-00648", "If the parameters define an import operation and the external handle is a POSIX file descriptor created outside of the Vulkan API, the value of memoryTypeIndex must be one of those returned by vkGetMemoryFdPropertiesKHR. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkMemoryAllocateInfo-memoryTypeIndex-00648)"},
+    {"VUID-VkMemoryAllocateInfo-memoryTypeIndex-01744", "If the parameters define an import operation and the external handle is a host pointer, the value of memoryTypeIndex must be one of those returned by vkGetMemoryHostPointerPropertiesEXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkMemoryAllocateInfo-memoryTypeIndex-01744)"},
+    {"VUID-VkMemoryAllocateInfo-memoryTypeIndex-01872", "If the protected memory feature is not enabled, the VkMemoryAllocateInfo::memoryTypeIndex must not indicate a memory type that reports VK_MEMORY_PROPERTY_PROTECTED_BIT. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkMemoryAllocateInfo-memoryTypeIndex-01872)"},
+    {"VUID-VkMemoryAllocateInfo-memoryTypeIndex-02385", "If the parameters define an import operation and the external handle type is VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID, memoryTypeIndex must be one of those returned by vkGetAndroidHardwareBufferPropertiesANDROID for the Android hardware buffer. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkMemoryAllocateInfo-memoryTypeIndex-02385)"},
+    {"VUID-VkMemoryAllocateInfo-opaqueCaptureAddress-03329", "If VkMemoryOpaqueCaptureAddressAllocateInfoKHR::opaqueCaptureAddress is not zero, VkMemoryAllocateFlagsInfo::flags must include VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkMemoryAllocateInfo-opaqueCaptureAddress-03329)"},
+    {"VUID-VkMemoryAllocateInfo-opaqueCaptureAddress-03333", "If the parameters define an import operation, VkMemoryOpaqueCaptureAddressAllocateInfoKHR::opaqueCaptureAddress must be zero (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkMemoryAllocateInfo-opaqueCaptureAddress-03333)"},
+    {"VUID-VkMemoryAllocateInfo-pNext-00639", "If the pNext chain includes a VkExportMemoryAllocateInfo     structure, and any of the handle types specified in     VkExportMemoryAllocateInfo::handleTypes require a dedicated     allocation, as reported by     vkGetPhysicalDeviceImageFormatProperties2 in     VkExternalImageFormatProperties::externalMemoryProperties.externalMemoryFeatures     or     VkExternalBufferProperties::externalMemoryProperties.externalMemoryFeatures,     the pNext chain must include a ifdef::VK_KHR_dedicated_allocation[VkMemoryDedicatedAllocateInfo] (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkMemoryAllocateInfo-pNext-00639)"},
+    {"VUID-VkMemoryAllocateInfo-pNext-00640", "If the pNext chain includes a VkExportMemoryAllocateInfo structure, it must not include a VkExportMemoryAllocateInfoNV or VkExportMemoryWin32HandleInfoNV structure. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkMemoryAllocateInfo-pNext-00640)"},
+    {"VUID-VkMemoryAllocateInfo-pNext-00641", "If the pNext chain includes a VkImportMemoryWin32HandleInfoKHR structure, it must not include a VkImportMemoryWin32HandleInfoNV structure. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkMemoryAllocateInfo-pNext-00641)"},
+    {"VUID-VkMemoryAllocateInfo-pNext-01874", "If the parameters do not define an import operation, and the pNext chain includes a VkExportMemoryAllocateInfo structure with VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID included in its handleTypes member, and the pNext chain includes a VkMemoryDedicatedAllocateInfo structure with image not equal to VK_NULL_HANDLE, then allocationSize must be 0, otherwise allocationSize must be greater than 0. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkMemoryAllocateInfo-pNext-01874)"},
+    {"VUID-VkMemoryAllocateInfo-pNext-02384", "If the parameters define an import operation and the external handle type is VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID, and the pNext chain does not include a VkMemoryDedicatedAllocateInfo structure or VkMemoryDedicatedAllocateInfo::image is VK_NULL_HANDLE, the Android hardware buffer must have a AHardwareBuffer_Desc::format of AHARDWAREBUFFER_FORMAT_BLOB and a AHardwareBuffer_Desc::usage that includes AHARDWAREBUFFER_USAGE_GPU_DATA_BUFFER. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkMemoryAllocateInfo-pNext-02384)"},
+    {"VUID-VkMemoryAllocateInfo-pNext-02386", "If the parameters define an import operation, the external handle is an Android hardware buffer, and the pNext chain includes a VkMemoryDedicatedAllocateInfo with image that is not VK_NULL_HANDLE, the Android hardware buffer's AHardwareBuffer::usage must include at least one of AHARDWAREBUFFER_USAGE_GPU_COLOR_OUTPUT or AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkMemoryAllocateInfo-pNext-02386)"},
+    {"VUID-VkMemoryAllocateInfo-pNext-02387", "If the parameters define an import operation, the external handle is an Android hardware buffer, and the pNext chain includes a VkMemoryDedicatedAllocateInfo with image that is not VK_NULL_HANDLE, the format of image must be VK_FORMAT_UNDEFINED or the format returned by vkGetAndroidHardwareBufferPropertiesANDROID in VkAndroidHardwareBufferFormatPropertiesANDROID::format for the Android hardware buffer. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkMemoryAllocateInfo-pNext-02387)"},
+    {"VUID-VkMemoryAllocateInfo-pNext-02388", "If the parameters define an import operation, the external handle is an Android hardware buffer, and the pNext chain includes a VkMemoryDedicatedAllocateInfo structure with image that is not VK_NULL_HANDLE, the width, height, and array layer dimensions of image and the Android hardware buffer's AHardwareBuffer_Desc must be identical. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkMemoryAllocateInfo-pNext-02388)"},
+    {"VUID-VkMemoryAllocateInfo-pNext-02389", "If the parameters define an import operation, the external handle is an Android hardware buffer, and the pNext chain includes a VkMemoryDedicatedAllocateInfo structure with image that is not VK_NULL_HANDLE, and the Android hardware buffer's AHardwareBuffer::usage includes AHARDWAREBUFFER_USAGE_GPU_MIPMAP_COMPLETE, the image must have a complete mipmap chain. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkMemoryAllocateInfo-pNext-02389)"},
+    {"VUID-VkMemoryAllocateInfo-pNext-02390", "If the parameters define an import operation, the external handle is an Android hardware buffer, and the pNext chain includes a VkMemoryDedicatedAllocateInfo structure with image that is not VK_NULL_HANDLE, each bit set in the usage of image must be listed in AHardwareBuffer Usage Equivalence, and if there is a corresponding AHARDWAREBUFFER_USAGE bit listed that bit must be included in the Android hardware buffer's AHardwareBuffer_Desc::usage. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkMemoryAllocateInfo-pNext-02390)"},
+    {"VUID-VkMemoryAllocateInfo-pNext-02586", "If the parameters define an import operation, the external handle is an Android hardware buffer, and the pNext chain includes a VkMemoryDedicatedAllocateInfo structure with image that is not VK_NULL_HANDLE, and the Android hardware buffer's AHardwareBuffer::usage does not include AHARDWAREBUFFER_USAGE_GPU_MIPMAP_COMPLETE, the image must have exactly one mipmap level. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkMemoryAllocateInfo-pNext-02586)"},
+    {"VUID-VkMemoryAllocateInfo-pNext-02805", "If the parameters define an import operation and the external handle is a host pointer, the pNext chain must not include a VkDedicatedAllocationMemoryAllocateInfoNV structure with either its image or buffer field set to a value other than VK_NULL_HANDLE. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkMemoryAllocateInfo-pNext-02805)"},
+    {"VUID-VkMemoryAllocateInfo-pNext-02806", "If the parameters define an import operation and the external handle is a host pointer, the pNext chain must not include a VkMemoryDedicatedAllocateInfo structure with either its image or buffer field set to a value other than VK_NULL_HANDLE. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkMemoryAllocateInfo-pNext-02806)"},
+    {"VUID-VkMemoryAllocateInfo-pNext-03332", "If the pNext chain includes a VkImportMemoryHostPointerInfoEXT structure, VkMemoryOpaqueCaptureAddressAllocateInfoKHR::opaqueCaptureAddress must be zero (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkMemoryAllocateInfo-pNext-03332)"},
+    {"VUID-VkMemoryAllocateInfo-pNext-pNext", "Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkDedicatedAllocationMemoryAllocateInfoNV, VkExportMemoryAllocateInfo, VkExportMemoryAllocateInfoNV, VkExportMemoryWin32HandleInfoKHR, VkExportMemoryWin32HandleInfoNV, VkImportAndroidHardwareBufferInfoANDROID, VkImportMemoryFdInfoKHR, VkImportMemoryHostPointerInfoEXT, VkImportMemoryWin32HandleInfoKHR, VkImportMemoryWin32HandleInfoNV, VkMemoryAllocateFlagsInfo, VkMemoryDedicatedAllocateInfo, VkMemoryOpaqueCaptureAddressAllocateInfoKHR, or VkMemoryPriorityAllocateInfoEXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkMemoryAllocateInfo-pNext-pNext)"},
+    {"VUID-VkMemoryAllocateInfo-sType-sType", "sType must be VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkMemoryAllocateInfo-sType-sType)"},
+    {"VUID-VkMemoryAllocateInfo-sType-unique", "Each sType member in the pNext chain must be unique (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkMemoryAllocateInfo-sType-unique)"},
+    {"VUID-VkMemoryBarrier-dstAccessMask-parameter", "dstAccessMask must be a valid combination of VkAccessFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkMemoryBarrier-dstAccessMask-parameter)"},
+    {"VUID-VkMemoryBarrier-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkMemoryBarrier-pNext-pNext)"},
+    {"VUID-VkMemoryBarrier-sType-sType", "sType must be VK_STRUCTURE_TYPE_MEMORY_BARRIER (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkMemoryBarrier-sType-sType)"},
+    {"VUID-VkMemoryBarrier-srcAccessMask-parameter", "srcAccessMask must be a valid combination of VkAccessFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkMemoryBarrier-srcAccessMask-parameter)"},
+    {"VUID-VkMemoryDedicatedAllocateInfo-buffer-01435", "If buffer is not VK_NULL_HANDLE, VkMemoryAllocateInfo::allocationSize must equal the VkMemoryRequirements::size of the buffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkMemoryDedicatedAllocateInfo-buffer-01435)"},
+    {"VUID-VkMemoryDedicatedAllocateInfo-buffer-01436", "If buffer is not VK_NULL_HANDLE, buffer must have been created without VK_BUFFER_CREATE_SPARSE_BINDING_BIT set in VkBufferCreateInfo::flags (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkMemoryDedicatedAllocateInfo-buffer-01436)"},
+    {"VUID-VkMemoryDedicatedAllocateInfo-buffer-01877", "If buffer is not VK_NULL_HANDLE and VkMemoryAllocateInfo defines a memory import operation with handle type VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT, VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT, VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT, VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_KMT_BIT, VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP_BIT, or VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT, and the external handle was created by the Vulkan API, then the memory being imported must also be a dedicated buffer allocation and buffer must be identical to the buffer associated with the imported memory. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkMemoryDedicatedAllocateInfo-buffer-01877)"},
+    {"VUID-VkMemoryDedicatedAllocateInfo-buffer-01879", "If buffer is not VK_NULL_HANDLE and VkMemoryAllocateInfo defines a memory import operation with handle type VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT, the memory being imported must also be a dedicated buffer allocation and buffer must be identical to the buffer associated with the imported memory. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkMemoryDedicatedAllocateInfo-buffer-01879)"},
+    {"VUID-VkMemoryDedicatedAllocateInfo-buffer-parameter", "If buffer is not VK_NULL_HANDLE, buffer must be a valid VkBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkMemoryDedicatedAllocateInfo-buffer-parameter)"},
+    {"VUID-VkMemoryDedicatedAllocateInfo-commonparent", "Both of buffer, and image that are valid handles of non-ignored parameters must have been created, allocated, or retrieved from the same VkDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkMemoryDedicatedAllocateInfo-commonparent)"},
+    {"VUID-VkMemoryDedicatedAllocateInfo-image-01432", "At least one of image and buffer must be VK_NULL_HANDLE (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkMemoryDedicatedAllocateInfo-image-01432)"},
+    {"VUID-VkMemoryDedicatedAllocateInfo-image-01433", "If image is not VK_NULL_HANDLE, VkMemoryAllocateInfo::allocationSize must equal the VkMemoryRequirements::size of the image (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkMemoryDedicatedAllocateInfo-image-01433)"},
+    {"VUID-VkMemoryDedicatedAllocateInfo-image-01434", "If image is not VK_NULL_HANDLE, image must have been created without VK_IMAGE_CREATE_SPARSE_BINDING_BIT set in VkImageCreateInfo::flags (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkMemoryDedicatedAllocateInfo-image-01434)"},
+    {"VUID-VkMemoryDedicatedAllocateInfo-image-01797", "If image is not VK_NULL_HANDLE, image must not have been created with VK_IMAGE_CREATE_DISJOINT_BIT set in VkImageCreateInfo::flags (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkMemoryDedicatedAllocateInfo-image-01797)"},
+    {"VUID-VkMemoryDedicatedAllocateInfo-image-01876", "If image is not VK_NULL_HANDLE and VkMemoryAllocateInfo defines a memory import operation with handle type VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT, VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT, VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT, VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_KMT_BIT, VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP_BIT, or VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT, and the external handle was created by the Vulkan API, then the memory being imported must also be a dedicated image allocation and image must be identical to the image associated with the imported memory. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkMemoryDedicatedAllocateInfo-image-01876)"},
+    {"VUID-VkMemoryDedicatedAllocateInfo-image-01878", "If image is not VK_NULL_HANDLE and VkMemoryAllocateInfo defines a memory import operation with handle type VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT, the memory being imported must also be a dedicated image allocation and image must be identical to the image associated with the imported memory. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkMemoryDedicatedAllocateInfo-image-01878)"},
+    {"VUID-VkMemoryDedicatedAllocateInfo-image-parameter", "If image is not VK_NULL_HANDLE, image must be a valid VkImage handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkMemoryDedicatedAllocateInfo-image-parameter)"},
+    {"VUID-VkMemoryDedicatedAllocateInfo-sType-sType", "sType must be VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkMemoryDedicatedAllocateInfo-sType-sType)"},
+    {"VUID-VkMemoryDedicatedRequirements-sType-sType", "sType must be VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkMemoryDedicatedRequirements-sType-sType)"},
+    {"VUID-VkMemoryFdPropertiesKHR-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkMemoryFdPropertiesKHR-pNext-pNext)"},
+    {"VUID-VkMemoryFdPropertiesKHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_MEMORY_FD_PROPERTIES_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkMemoryFdPropertiesKHR-sType-sType)"},
+    {"VUID-VkMemoryGetAndroidHardwareBufferInfoANDROID-handleTypes-01882", "VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID must have been included in VkExportMemoryAllocateInfo::handleTypes when memory was created. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkMemoryGetAndroidHardwareBufferInfoANDROID-handleTypes-01882)"},
+    {"VUID-VkMemoryGetAndroidHardwareBufferInfoANDROID-memory-parameter", "memory must be a valid VkDeviceMemory handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkMemoryGetAndroidHardwareBufferInfoANDROID-memory-parameter)"},
+    {"VUID-VkMemoryGetAndroidHardwareBufferInfoANDROID-pNext-01883", "If the pNext chain of the VkMemoryAllocateInfo used to allocate memory included a VkMemoryDedicatedAllocateInfo with non-NULL image member, then that image must already be bound to memory. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkMemoryGetAndroidHardwareBufferInfoANDROID-pNext-01883)"},
+    {"VUID-VkMemoryGetAndroidHardwareBufferInfoANDROID-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkMemoryGetAndroidHardwareBufferInfoANDROID-pNext-pNext)"},
+    {"VUID-VkMemoryGetAndroidHardwareBufferInfoANDROID-sType-sType", "sType must be VK_STRUCTURE_TYPE_MEMORY_GET_ANDROID_HARDWARE_BUFFER_INFO_ANDROID (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkMemoryGetAndroidHardwareBufferInfoANDROID-sType-sType)"},
+    {"VUID-VkMemoryGetFdInfoKHR-handleType-00671", "handleType must have been included in VkExportMemoryAllocateInfo::handleTypes when memory was created. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkMemoryGetFdInfoKHR-handleType-00671)"},
+    {"VUID-VkMemoryGetFdInfoKHR-handleType-00672", "handleType must be defined as a POSIX file descriptor handle. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkMemoryGetFdInfoKHR-handleType-00672)"},
+    {"VUID-VkMemoryGetFdInfoKHR-handleType-parameter", "handleType must be a valid VkExternalMemoryHandleTypeFlagBits value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkMemoryGetFdInfoKHR-handleType-parameter)"},
+    {"VUID-VkMemoryGetFdInfoKHR-memory-parameter", "memory must be a valid VkDeviceMemory handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkMemoryGetFdInfoKHR-memory-parameter)"},
+    {"VUID-VkMemoryGetFdInfoKHR-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkMemoryGetFdInfoKHR-pNext-pNext)"},
+    {"VUID-VkMemoryGetFdInfoKHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_MEMORY_GET_FD_INFO_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkMemoryGetFdInfoKHR-sType-sType)"},
+    {"VUID-VkMemoryGetWin32HandleInfoKHR-handleType-00662", "handleType must have been included in VkExportMemoryAllocateInfo::handleTypes when memory was created. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkMemoryGetWin32HandleInfoKHR-handleType-00662)"},
+    {"VUID-VkMemoryGetWin32HandleInfoKHR-handleType-00663", "If handleType is defined as an NT handle, vkGetMemoryWin32HandleKHR must be called no more than once for each valid unique combination of memory and handleType. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkMemoryGetWin32HandleInfoKHR-handleType-00663)"},
+    {"VUID-VkMemoryGetWin32HandleInfoKHR-handleType-00664", "handleType must be defined as an NT handle or a global share handle. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkMemoryGetWin32HandleInfoKHR-handleType-00664)"},
+    {"VUID-VkMemoryGetWin32HandleInfoKHR-handleType-parameter", "handleType must be a valid VkExternalMemoryHandleTypeFlagBits value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkMemoryGetWin32HandleInfoKHR-handleType-parameter)"},
+    {"VUID-VkMemoryGetWin32HandleInfoKHR-memory-parameter", "memory must be a valid VkDeviceMemory handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkMemoryGetWin32HandleInfoKHR-memory-parameter)"},
+    {"VUID-VkMemoryGetWin32HandleInfoKHR-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkMemoryGetWin32HandleInfoKHR-pNext-pNext)"},
+    {"VUID-VkMemoryGetWin32HandleInfoKHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_MEMORY_GET_WIN32_HANDLE_INFO_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkMemoryGetWin32HandleInfoKHR-sType-sType)"},
+    {"VUID-VkMemoryHostPointerPropertiesEXT-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkMemoryHostPointerPropertiesEXT-pNext-pNext)"},
+    {"VUID-VkMemoryHostPointerPropertiesEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_MEMORY_HOST_POINTER_PROPERTIES_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkMemoryHostPointerPropertiesEXT-sType-sType)"},
+    {"VUID-VkMemoryOpaqueCaptureAddressAllocateInfoKHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_MEMORY_OPAQUE_CAPTURE_ADDRESS_ALLOCATE_INFO_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkMemoryOpaqueCaptureAddressAllocateInfoKHR-sType-sType)"},
+    {"VUID-VkMemoryPriorityAllocateInfoEXT-priority-02602", "priority must be between 0 and 1, inclusive (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkMemoryPriorityAllocateInfoEXT-priority-02602)"},
+    {"VUID-VkMemoryPriorityAllocateInfoEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_MEMORY_PRIORITY_ALLOCATE_INFO_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkMemoryPriorityAllocateInfoEXT-sType-sType)"},
+    {"VUID-VkMemoryRequirements2-pNext-pNext", "pNext must be NULL or a pointer to a valid instance of VkMemoryDedicatedRequirements (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkMemoryRequirements2-pNext-pNext)"},
+    {"VUID-VkMemoryRequirements2-sType-sType", "sType must be VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkMemoryRequirements2-sType-sType)"},
+    {"VUID-VkMemoryWin32HandlePropertiesKHR-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkMemoryWin32HandlePropertiesKHR-pNext-pNext)"},
+    {"VUID-VkMemoryWin32HandlePropertiesKHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_MEMORY_WIN32_HANDLE_PROPERTIES_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkMemoryWin32HandlePropertiesKHR-sType-sType)"},
+    {"VUID-VkMetalSurfaceCreateInfoEXT-flags-zerobitmask", "flags must be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkMetalSurfaceCreateInfoEXT-flags-zerobitmask)"},
+    {"VUID-VkMetalSurfaceCreateInfoEXT-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkMetalSurfaceCreateInfoEXT-pNext-pNext)"},
+    {"VUID-VkMetalSurfaceCreateInfoEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_METAL_SURFACE_CREATE_INFO_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkMetalSurfaceCreateInfoEXT-sType-sType)"},
+    {"VUID-VkMultisamplePropertiesEXT-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkMultisamplePropertiesEXT-pNext-pNext)"},
+    {"VUID-VkMultisamplePropertiesEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_MULTISAMPLE_PROPERTIES_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkMultisamplePropertiesEXT-sType-sType)"},
+    {"VUID-VkObjectTableCreateInfoNVX-computeBindingPointSupport-01355", "If the VkDeviceGeneratedCommandsFeaturesNVX::computeBindingPointSupport feature is not enabled, pObjectEntryUsageFlags must not contain VK_OBJECT_ENTRY_USAGE_COMPUTE_BIT_NVX (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkObjectTableCreateInfoNVX-computeBindingPointSupport-01355)"},
+    {"VUID-VkObjectTableCreateInfoNVX-maxSampledImagesPerDescriptor-01360", "maxSampledImagesPerDescriptor must be within the limits supported by the device. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkObjectTableCreateInfoNVX-maxSampledImagesPerDescriptor-01360)"},
+    {"VUID-VkObjectTableCreateInfoNVX-maxStorageBuffersPerDescriptor-01358", "maxStorageBuffersPerDescriptor must be within the limits supported by the device. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkObjectTableCreateInfoNVX-maxStorageBuffersPerDescriptor-01358)"},
+    {"VUID-VkObjectTableCreateInfoNVX-maxStorageImagesPerDescriptor-01359", "maxStorageImagesPerDescriptor must be within the limits supported by the device. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkObjectTableCreateInfoNVX-maxStorageImagesPerDescriptor-01359)"},
+    {"VUID-VkObjectTableCreateInfoNVX-maxUniformBuffersPerDescriptor-01357", "maxUniformBuffersPerDescriptor must be within the limits supported by the device. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkObjectTableCreateInfoNVX-maxUniformBuffersPerDescriptor-01357)"},
+    {"VUID-VkObjectTableCreateInfoNVX-objectCount-arraylength", "objectCount must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkObjectTableCreateInfoNVX-objectCount-arraylength)"},
+    {"VUID-VkObjectTableCreateInfoNVX-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkObjectTableCreateInfoNVX-pNext-pNext)"},
+    {"VUID-VkObjectTableCreateInfoNVX-pObjectEntryCounts-01356", "Any value within pObjectEntryCounts must not exceed VkDeviceGeneratedCommandsLimitsNVX::maxObjectEntryCounts (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkObjectTableCreateInfoNVX-pObjectEntryCounts-01356)"},
+    {"VUID-VkObjectTableCreateInfoNVX-pObjectEntryCounts-parameter", "pObjectEntryCounts must be a valid pointer to an array of objectCount uint32_t values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkObjectTableCreateInfoNVX-pObjectEntryCounts-parameter)"},
+    {"VUID-VkObjectTableCreateInfoNVX-pObjectEntryTypes-parameter", "pObjectEntryTypes must be a valid pointer to an array of objectCount valid VkObjectEntryTypeNVX values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkObjectTableCreateInfoNVX-pObjectEntryTypes-parameter)"},
+    {"VUID-VkObjectTableCreateInfoNVX-pObjectEntryUsageFlags-parameter", "pObjectEntryUsageFlags must be a valid pointer to an array of objectCount valid combinations of VkObjectEntryUsageFlagBitsNVX values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkObjectTableCreateInfoNVX-pObjectEntryUsageFlags-parameter)"},
+    {"VUID-VkObjectTableCreateInfoNVX-pObjectEntryUsageFlags-requiredbitmask", "Each element of pObjectEntryUsageFlags must not be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkObjectTableCreateInfoNVX-pObjectEntryUsageFlags-requiredbitmask)"},
+    {"VUID-VkObjectTableCreateInfoNVX-sType-sType", "sType must be VK_STRUCTURE_TYPE_OBJECT_TABLE_CREATE_INFO_NVX (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkObjectTableCreateInfoNVX-sType-sType)"},
+    {"VUID-VkObjectTableDescriptorSetEntryNVX-commonparent", "Both of descriptorSet, and pipelineLayout must have been created, allocated, or retrieved from the same VkDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkObjectTableDescriptorSetEntryNVX-commonparent)"},
+    {"VUID-VkObjectTableDescriptorSetEntryNVX-descriptorSet-parameter", "descriptorSet must be a valid VkDescriptorSet handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkObjectTableDescriptorSetEntryNVX-descriptorSet-parameter)"},
+    {"VUID-VkObjectTableDescriptorSetEntryNVX-flags-parameter", "flags must be a valid combination of VkObjectEntryUsageFlagBitsNVX values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkObjectTableDescriptorSetEntryNVX-flags-parameter)"},
+    {"VUID-VkObjectTableDescriptorSetEntryNVX-flags-requiredbitmask", "flags must not be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkObjectTableDescriptorSetEntryNVX-flags-requiredbitmask)"},
+    {"VUID-VkObjectTableDescriptorSetEntryNVX-pipelineLayout-parameter", "pipelineLayout must be a valid VkPipelineLayout handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkObjectTableDescriptorSetEntryNVX-pipelineLayout-parameter)"},
+    {"VUID-VkObjectTableDescriptorSetEntryNVX-type-01369", "type must be VK_OBJECT_ENTRY_TYPE_DESCRIPTOR_SET_NVX (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkObjectTableDescriptorSetEntryNVX-type-01369)"},
+    {"VUID-VkObjectTableDescriptorSetEntryNVX-type-parameter", "type must be a valid VkObjectEntryTypeNVX value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkObjectTableDescriptorSetEntryNVX-type-parameter)"},
+    {"VUID-VkObjectTableEntryNVX-computeBindingPointSupport-01367", "If the VkDeviceGeneratedCommandsFeaturesNVX::computeBindingPointSupport feature is not enabled, flags must not contain VK_OBJECT_ENTRY_USAGE_COMPUTE_BIT_NVX (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkObjectTableEntryNVX-computeBindingPointSupport-01367)"},
+    {"VUID-VkObjectTableEntryNVX-flags-parameter", "flags must be a valid combination of VkObjectEntryUsageFlagBitsNVX values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkObjectTableEntryNVX-flags-parameter)"},
+    {"VUID-VkObjectTableEntryNVX-flags-requiredbitmask", "flags must not be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkObjectTableEntryNVX-flags-requiredbitmask)"},
+    {"VUID-VkObjectTableEntryNVX-type-parameter", "type must be a valid VkObjectEntryTypeNVX value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkObjectTableEntryNVX-type-parameter)"},
+    {"VUID-VkObjectTableIndexBufferEntryNVX-buffer-parameter", "buffer must be a valid VkBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkObjectTableIndexBufferEntryNVX-buffer-parameter)"},
+    {"VUID-VkObjectTableIndexBufferEntryNVX-flags-parameter", "flags must be a valid combination of VkObjectEntryUsageFlagBitsNVX values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkObjectTableIndexBufferEntryNVX-flags-parameter)"},
+    {"VUID-VkObjectTableIndexBufferEntryNVX-flags-requiredbitmask", "flags must not be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkObjectTableIndexBufferEntryNVX-flags-requiredbitmask)"},
+    {"VUID-VkObjectTableIndexBufferEntryNVX-indexType-02783", "indexType must be VK_INDEX_TYPE_UINT16, or VK_INDEX_TYPE_UINT32 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkObjectTableIndexBufferEntryNVX-indexType-02783)"},
+    {"VUID-VkObjectTableIndexBufferEntryNVX-indexType-parameter", "indexType must be a valid VkIndexType value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkObjectTableIndexBufferEntryNVX-indexType-parameter)"},
+    {"VUID-VkObjectTableIndexBufferEntryNVX-type-01371", "type must be VK_OBJECT_ENTRY_TYPE_INDEX_BUFFER_NVX (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkObjectTableIndexBufferEntryNVX-type-01371)"},
+    {"VUID-VkObjectTableIndexBufferEntryNVX-type-parameter", "type must be a valid VkObjectEntryTypeNVX value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkObjectTableIndexBufferEntryNVX-type-parameter)"},
+    {"VUID-VkObjectTablePipelineEntryNVX-flags-parameter", "flags must be a valid combination of VkObjectEntryUsageFlagBitsNVX values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkObjectTablePipelineEntryNVX-flags-parameter)"},
+    {"VUID-VkObjectTablePipelineEntryNVX-flags-requiredbitmask", "flags must not be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkObjectTablePipelineEntryNVX-flags-requiredbitmask)"},
+    {"VUID-VkObjectTablePipelineEntryNVX-pipeline-parameter", "pipeline must be a valid VkPipeline handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkObjectTablePipelineEntryNVX-pipeline-parameter)"},
+    {"VUID-VkObjectTablePipelineEntryNVX-type-01368", "type must be VK_OBJECT_ENTRY_TYPE_PIPELINE_NVX (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkObjectTablePipelineEntryNVX-type-01368)"},
+    {"VUID-VkObjectTablePipelineEntryNVX-type-parameter", "type must be a valid VkObjectEntryTypeNVX value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkObjectTablePipelineEntryNVX-type-parameter)"},
+    {"VUID-VkObjectTablePushConstantEntryNVX-flags-parameter", "flags must be a valid combination of VkObjectEntryUsageFlagBitsNVX values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkObjectTablePushConstantEntryNVX-flags-parameter)"},
+    {"VUID-VkObjectTablePushConstantEntryNVX-flags-requiredbitmask", "flags must not be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkObjectTablePushConstantEntryNVX-flags-requiredbitmask)"},
+    {"VUID-VkObjectTablePushConstantEntryNVX-pipelineLayout-parameter", "pipelineLayout must be a valid VkPipelineLayout handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkObjectTablePushConstantEntryNVX-pipelineLayout-parameter)"},
+    {"VUID-VkObjectTablePushConstantEntryNVX-stageFlags-parameter", "stageFlags must be a valid combination of VkShaderStageFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkObjectTablePushConstantEntryNVX-stageFlags-parameter)"},
+    {"VUID-VkObjectTablePushConstantEntryNVX-stageFlags-requiredbitmask", "stageFlags must not be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkObjectTablePushConstantEntryNVX-stageFlags-requiredbitmask)"},
+    {"VUID-VkObjectTablePushConstantEntryNVX-type-01372", "type must be VK_OBJECT_ENTRY_TYPE_PUSH_CONSTANT_NVX (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkObjectTablePushConstantEntryNVX-type-01372)"},
+    {"VUID-VkObjectTablePushConstantEntryNVX-type-parameter", "type must be a valid VkObjectEntryTypeNVX value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkObjectTablePushConstantEntryNVX-type-parameter)"},
+    {"VUID-VkObjectTableVertexBufferEntryNVX-buffer-parameter", "buffer must be a valid VkBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkObjectTableVertexBufferEntryNVX-buffer-parameter)"},
+    {"VUID-VkObjectTableVertexBufferEntryNVX-flags-parameter", "flags must be a valid combination of VkObjectEntryUsageFlagBitsNVX values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkObjectTableVertexBufferEntryNVX-flags-parameter)"},
+    {"VUID-VkObjectTableVertexBufferEntryNVX-flags-requiredbitmask", "flags must not be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkObjectTableVertexBufferEntryNVX-flags-requiredbitmask)"},
+    {"VUID-VkObjectTableVertexBufferEntryNVX-type-01370", "type must be VK_OBJECT_ENTRY_TYPE_VERTEX_BUFFER_NVX (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkObjectTableVertexBufferEntryNVX-type-01370)"},
+    {"VUID-VkObjectTableVertexBufferEntryNVX-type-parameter", "type must be a valid VkObjectEntryTypeNVX value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkObjectTableVertexBufferEntryNVX-type-parameter)"},
+    {"VUID-VkPerformanceConfigurationAcquireInfoINTEL-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPerformanceConfigurationAcquireInfoINTEL-pNext-pNext)"},
+    {"VUID-VkPerformanceConfigurationAcquireInfoINTEL-sType-sType", "sType must be VK_STRUCTURE_TYPE_PERFORMANCE_CONFIGURATION_ACQUIRE_INFO_INTEL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPerformanceConfigurationAcquireInfoINTEL-sType-sType)"},
+    {"VUID-VkPerformanceConfigurationAcquireInfoINTEL-type-parameter", "type must be a valid VkPerformanceConfigurationTypeINTEL value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPerformanceConfigurationAcquireInfoINTEL-type-parameter)"},
+    {"VUID-VkPerformanceCounterDescriptionKHR-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPerformanceCounterDescriptionKHR-pNext-pNext)"},
+    {"VUID-VkPerformanceCounterDescriptionKHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_PERFORMANCE_COUNTER_DESCRIPTION_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPerformanceCounterDescriptionKHR-sType-sType)"},
+    {"VUID-VkPerformanceCounterKHR-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPerformanceCounterKHR-pNext-pNext)"},
+    {"VUID-VkPerformanceCounterKHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_PERFORMANCE_COUNTER_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPerformanceCounterKHR-sType-sType)"},
+    {"VUID-VkPerformanceMarkerInfoINTEL-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPerformanceMarkerInfoINTEL-pNext-pNext)"},
+    {"VUID-VkPerformanceMarkerInfoINTEL-sType-sType", "sType must be VK_STRUCTURE_TYPE_PERFORMANCE_MARKER_INFO_INTEL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPerformanceMarkerInfoINTEL-sType-sType)"},
+    {"VUID-VkPerformanceOverrideInfoINTEL-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPerformanceOverrideInfoINTEL-pNext-pNext)"},
+    {"VUID-VkPerformanceOverrideInfoINTEL-sType-sType", "sType must be VK_STRUCTURE_TYPE_PERFORMANCE_OVERRIDE_INFO_INTEL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPerformanceOverrideInfoINTEL-sType-sType)"},
+    {"VUID-VkPerformanceOverrideInfoINTEL-type-parameter", "type must be a valid VkPerformanceOverrideTypeINTEL value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPerformanceOverrideInfoINTEL-type-parameter)"},
+    {"VUID-VkPerformanceQuerySubmitInfoKHR-counterPassIndex-03221", "counterPassIndex must be less than the number of counter passes required by any queries within the batch. The required number of counter passes for a performance query is obtained by calling vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPerformanceQuerySubmitInfoKHR-counterPassIndex-03221)"},
+    {"VUID-VkPerformanceQuerySubmitInfoKHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_PERFORMANCE_QUERY_SUBMIT_INFO_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPerformanceQuerySubmitInfoKHR-sType-sType)"},
+    {"VUID-VkPerformanceStreamMarkerInfoINTEL-marker-02735", "The value written by the application into marker must only used the valid bits as reported by vkGetPerformanceParameterINTEL with the VK_PERFORMANCE_PARAMETER_TYPE_STREAM_MARKER_VALID_BITS_INTEL. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPerformanceStreamMarkerInfoINTEL-marker-02735)"},
+    {"VUID-VkPerformanceStreamMarkerInfoINTEL-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPerformanceStreamMarkerInfoINTEL-pNext-pNext)"},
+    {"VUID-VkPerformanceStreamMarkerInfoINTEL-sType-sType", "sType must be VK_STRUCTURE_TYPE_PERFORMANCE_STREAM_MARKER_INFO_INTEL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPerformanceStreamMarkerInfoINTEL-sType-sType)"},
+    {"VUID-VkPerformanceValueDataINTEL-valueString-parameter", "valueString must be a valid pointer to a valid (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPerformanceValueDataINTEL-valueString-parameter)"},
+    {"VUID-VkPerformanceValueINTEL-data-parameter", "data must be a valid VkPerformanceValueDataINTEL union (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPerformanceValueINTEL-data-parameter)"},
+    {"VUID-VkPerformanceValueINTEL-type-parameter", "type must be a valid VkPerformanceValueTypeINTEL value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPerformanceValueINTEL-type-parameter)"},
+    {"VUID-VkPhysicalDevice16BitStorageFeatures-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDevice16BitStorageFeatures-sType-sType)"},
+    {"VUID-VkPhysicalDevice8BitStorageFeaturesKHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDevice8BitStorageFeaturesKHR-sType-sType)"},
+    {"VUID-VkPhysicalDeviceASTCDecodeFeaturesEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ASTC_DECODE_FEATURES_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceASTCDecodeFeaturesEXT-sType-sType)"},
+    {"VUID-VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_FEATURES_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT-sType-sType)"},
+    {"VUID-VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_PROPERTIES_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT-sType-sType)"},
+    {"VUID-VkPhysicalDeviceBufferDeviceAddressFeaturesEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceBufferDeviceAddressFeaturesEXT-sType-sType)"},
+    {"VUID-VkPhysicalDeviceBufferDeviceAddressFeaturesKHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceBufferDeviceAddressFeaturesKHR-sType-sType)"},
+    {"VUID-VkPhysicalDeviceCoherentMemoryFeaturesAMD-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COHERENT_MEMORY_FEATURES_AMD (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceCoherentMemoryFeaturesAMD-sType-sType)"},
+    {"VUID-VkPhysicalDeviceComputeShaderDerivativesFeaturesNV-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_FEATURES_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceComputeShaderDerivativesFeaturesNV-sType-sType)"},
+    {"VUID-VkPhysicalDeviceConditionalRenderingFeaturesEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONDITIONAL_RENDERING_FEATURES_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceConditionalRenderingFeaturesEXT-sType-sType)"},
+    {"VUID-VkPhysicalDeviceConservativeRasterizationPropertiesEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONSERVATIVE_RASTERIZATION_PROPERTIES_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceConservativeRasterizationPropertiesEXT-sType-sType)"},
+    {"VUID-VkPhysicalDeviceCooperativeMatrixFeaturesNV-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_FEATURES_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceCooperativeMatrixFeaturesNV-sType-sType)"},
+    {"VUID-VkPhysicalDeviceCooperativeMatrixPropertiesNV-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_PROPERTIES_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceCooperativeMatrixPropertiesNV-sType-sType)"},
+    {"VUID-VkPhysicalDeviceCornerSampledImageFeaturesNV-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CORNER_SAMPLED_IMAGE_FEATURES_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceCornerSampledImageFeaturesNV-sType-sType)"},
+    {"VUID-VkPhysicalDeviceCoverageReductionModeFeaturesNV-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COVERAGE_REDUCTION_MODE_FEATURES_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceCoverageReductionModeFeaturesNV-sType-sType)"},
+    {"VUID-VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEDICATED_ALLOCATION_IMAGE_ALIASING_FEATURES_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV-sType-sType)"},
+    {"VUID-VkPhysicalDeviceDepthClipEnableFeaturesEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLIP_ENABLE_FEATURES_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceDepthClipEnableFeaturesEXT-sType-sType)"},
+    {"VUID-VkPhysicalDeviceDepthStencilResolvePropertiesKHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceDepthStencilResolvePropertiesKHR-sType-sType)"},
+    {"VUID-VkPhysicalDeviceDescriptorIndexingFeaturesEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceDescriptorIndexingFeaturesEXT-sType-sType)"},
+    {"VUID-VkPhysicalDeviceDescriptorIndexingPropertiesEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceDescriptorIndexingPropertiesEXT-sType-sType)"},
+    {"VUID-VkPhysicalDeviceDiscardRectanglePropertiesEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DISCARD_RECTANGLE_PROPERTIES_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceDiscardRectanglePropertiesEXT-sType-sType)"},
+    {"VUID-VkPhysicalDeviceDriverPropertiesKHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceDriverPropertiesKHR-sType-sType)"},
+    {"VUID-VkPhysicalDeviceExclusiveScissorFeaturesNV-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXCLUSIVE_SCISSOR_FEATURES_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceExclusiveScissorFeaturesNV-sType-sType)"},
+    {"VUID-VkPhysicalDeviceExternalBufferInfo-flags-parameter", "flags must be a valid combination of VkBufferCreateFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceExternalBufferInfo-flags-parameter)"},
+    {"VUID-VkPhysicalDeviceExternalBufferInfo-handleType-parameter", "handleType must be a valid VkExternalMemoryHandleTypeFlagBits value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceExternalBufferInfo-handleType-parameter)"},
+    {"VUID-VkPhysicalDeviceExternalBufferInfo-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceExternalBufferInfo-pNext-pNext)"},
+    {"VUID-VkPhysicalDeviceExternalBufferInfo-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceExternalBufferInfo-sType-sType)"},
+    {"VUID-VkPhysicalDeviceExternalBufferInfo-usage-parameter", "usage must be a valid combination of VkBufferUsageFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceExternalBufferInfo-usage-parameter)"},
+    {"VUID-VkPhysicalDeviceExternalBufferInfo-usage-requiredbitmask", "usage must not be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceExternalBufferInfo-usage-requiredbitmask)"},
+    {"VUID-VkPhysicalDeviceExternalFenceInfo-handleType-parameter", "handleType must be a valid VkExternalFenceHandleTypeFlagBits value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceExternalFenceInfo-handleType-parameter)"},
+    {"VUID-VkPhysicalDeviceExternalFenceInfo-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceExternalFenceInfo-pNext-pNext)"},
+    {"VUID-VkPhysicalDeviceExternalFenceInfo-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceExternalFenceInfo-sType-sType)"},
+    {"VUID-VkPhysicalDeviceExternalImageFormatInfo-handleType-parameter", "If handleType is not 0, handleType must be a valid VkExternalMemoryHandleTypeFlagBits value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceExternalImageFormatInfo-handleType-parameter)"},
+    {"VUID-VkPhysicalDeviceExternalImageFormatInfo-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceExternalImageFormatInfo-sType-sType)"},
+    {"VUID-VkPhysicalDeviceExternalMemoryHostPropertiesEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_HOST_PROPERTIES_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceExternalMemoryHostPropertiesEXT-sType-sType)"},
+    {"VUID-VkPhysicalDeviceExternalSemaphoreInfo-handleType-parameter", "handleType must be a valid VkExternalSemaphoreHandleTypeFlagBits value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceExternalSemaphoreInfo-handleType-parameter)"},
+    {"VUID-VkPhysicalDeviceExternalSemaphoreInfo-pNext-pNext", "pNext must be NULL or a pointer to a valid instance of VkSemaphoreTypeCreateInfoKHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceExternalSemaphoreInfo-pNext-pNext)"},
+    {"VUID-VkPhysicalDeviceExternalSemaphoreInfo-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceExternalSemaphoreInfo-sType-sType)"},
+    {"VUID-VkPhysicalDeviceFeatures2-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceFeatures2-sType-sType)"},
+    {"VUID-VkPhysicalDeviceFloatControlsPropertiesKHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceFloatControlsPropertiesKHR-sType-sType)"},
+    {"VUID-VkPhysicalDeviceFragmentDensityMapFeaturesEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_FEATURES_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceFragmentDensityMapFeaturesEXT-sType-sType)"},
+    {"VUID-VkPhysicalDeviceFragmentDensityMapPropertiesEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_PROPERTIES_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceFragmentDensityMapPropertiesEXT-sType-sType)"},
+    {"VUID-VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_FEATURES_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV-sType-sType)"},
+    {"VUID-VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_INTERLOCK_FEATURES_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT-sType-sType)"},
+    {"VUID-VkPhysicalDeviceGroupProperties-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceGroupProperties-pNext-pNext)"},
+    {"VUID-VkPhysicalDeviceGroupProperties-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceGroupProperties-sType-sType)"},
+    {"VUID-VkPhysicalDeviceHostQueryResetFeaturesEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceHostQueryResetFeaturesEXT-sType-sType)"},
+    {"VUID-VkPhysicalDeviceIDProperties-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceIDProperties-sType-sType)"},
+    {"VUID-VkPhysicalDeviceImageDrmFormatModifierInfoEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_DRM_FORMAT_MODIFIER_INFO_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceImageDrmFormatModifierInfoEXT-sType-sType)"},
+    {"VUID-VkPhysicalDeviceImageDrmFormatModifierInfoEXT-sharingMode-02314", "If sharingMode is VK_SHARING_MODE_CONCURRENT, then pQueueFamilyIndices must be a valid pointer to an array of queueFamilyIndexCount uint32_t values. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceImageDrmFormatModifierInfoEXT-sharingMode-02314)"},
+    {"VUID-VkPhysicalDeviceImageDrmFormatModifierInfoEXT-sharingMode-02315", "If sharingMode is VK_SHARING_MODE_CONCURRENT, then queueFamilyIndexCount must be greater than 1. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceImageDrmFormatModifierInfoEXT-sharingMode-02315)"},
+    {"VUID-VkPhysicalDeviceImageDrmFormatModifierInfoEXT-sharingMode-02316", "If sharingMode is VK_SHARING_MODE_CONCURRENT, each element of pQueueFamilyIndices must be unique and must be less than the pQueueFamilyPropertyCount returned by vkGetPhysicalDeviceQueueFamilyProperties2 for the physicalDevice that was used to create device. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceImageDrmFormatModifierInfoEXT-sharingMode-02316)"},
+    {"VUID-VkPhysicalDeviceImageDrmFormatModifierInfoEXT-sharingMode-parameter", "sharingMode must be a valid VkSharingMode value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceImageDrmFormatModifierInfoEXT-sharingMode-parameter)"},
+    {"VUID-VkPhysicalDeviceImageFormatInfo2-flags-parameter", "flags must be a valid combination of VkImageCreateFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceImageFormatInfo2-flags-parameter)"},
+    {"VUID-VkPhysicalDeviceImageFormatInfo2-format-parameter", "format must be a valid VkFormat value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceImageFormatInfo2-format-parameter)"},
+    {"VUID-VkPhysicalDeviceImageFormatInfo2-pNext-pNext", "Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkImageFormatListCreateInfoKHR, VkImageStencilUsageCreateInfoEXT, VkPhysicalDeviceExternalImageFormatInfo, VkPhysicalDeviceImageDrmFormatModifierInfoEXT, or VkPhysicalDeviceImageViewImageFormatInfoEXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceImageFormatInfo2-pNext-pNext)"},
+    {"VUID-VkPhysicalDeviceImageFormatInfo2-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceImageFormatInfo2-sType-sType)"},
+    {"VUID-VkPhysicalDeviceImageFormatInfo2-sType-unique", "Each sType member in the pNext chain must be unique (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceImageFormatInfo2-sType-unique)"},
+    {"VUID-VkPhysicalDeviceImageFormatInfo2-tiling-02249", "tiling must be VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT if and only if the pNext chain includes VkPhysicalDeviceImageDrmFormatModifierInfoEXT. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceImageFormatInfo2-tiling-02249)"},
+    {"VUID-VkPhysicalDeviceImageFormatInfo2-tiling-02313", "If tiling is VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT and flags contains VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT, then the pNext chain must include VkImageFormatListCreateInfoKHR with non-zero viewFormatCount. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceImageFormatInfo2-tiling-02313)"},
+    {"VUID-VkPhysicalDeviceImageFormatInfo2-tiling-parameter", "tiling must be a valid VkImageTiling value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceImageFormatInfo2-tiling-parameter)"},
+    {"VUID-VkPhysicalDeviceImageFormatInfo2-type-parameter", "type must be a valid VkImageType value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceImageFormatInfo2-type-parameter)"},
+    {"VUID-VkPhysicalDeviceImageFormatInfo2-usage-parameter", "usage must be a valid combination of VkImageUsageFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceImageFormatInfo2-usage-parameter)"},
+    {"VUID-VkPhysicalDeviceImageFormatInfo2-usage-requiredbitmask", "usage must not be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceImageFormatInfo2-usage-requiredbitmask)"},
+    {"VUID-VkPhysicalDeviceImageViewImageFormatInfoEXT-imageViewType-parameter", "imageViewType must be a valid VkImageViewType value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceImageViewImageFormatInfoEXT-imageViewType-parameter)"},
+    {"VUID-VkPhysicalDeviceImageViewImageFormatInfoEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_VIEW_IMAGE_FORMAT_INFO_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceImageViewImageFormatInfoEXT-sType-sType)"},
+    {"VUID-VkPhysicalDeviceImagelessFramebufferFeaturesKHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceImagelessFramebufferFeaturesKHR-sType-sType)"},
+    {"VUID-VkPhysicalDeviceIndexTypeUint8FeaturesEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceIndexTypeUint8FeaturesEXT-sType-sType)"},
+    {"VUID-VkPhysicalDeviceInlineUniformBlockFeaturesEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceInlineUniformBlockFeaturesEXT-sType-sType)"},
+    {"VUID-VkPhysicalDeviceInlineUniformBlockPropertiesEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_PROPERTIES_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceInlineUniformBlockPropertiesEXT-sType-sType)"},
+    {"VUID-VkPhysicalDeviceLineRasterizationFeaturesEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceLineRasterizationFeaturesEXT-sType-sType)"},
+    {"VUID-VkPhysicalDeviceLineRasterizationPropertiesEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceLineRasterizationPropertiesEXT-sType-sType)"},
+    {"VUID-VkPhysicalDeviceMaintenance3Properties-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceMaintenance3Properties-sType-sType)"},
+    {"VUID-VkPhysicalDeviceMemoryBudgetPropertiesEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceMemoryBudgetPropertiesEXT-sType-sType)"},
+    {"VUID-VkPhysicalDeviceMemoryPriorityFeaturesEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PRIORITY_FEATURES_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceMemoryPriorityFeaturesEXT-sType-sType)"},
+    {"VUID-VkPhysicalDeviceMemoryProperties2-pNext-pNext", "pNext must be NULL or a pointer to a valid instance of VkPhysicalDeviceMemoryBudgetPropertiesEXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceMemoryProperties2-pNext-pNext)"},
+    {"VUID-VkPhysicalDeviceMemoryProperties2-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceMemoryProperties2-sType-sType)"},
+    {"VUID-VkPhysicalDeviceMeshShaderFeaturesNV-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_FEATURES_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceMeshShaderFeaturesNV-sType-sType)"},
+    {"VUID-VkPhysicalDeviceMeshShaderPropertiesNV-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_PROPERTIES_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceMeshShaderPropertiesNV-sType-sType)"},
+    {"VUID-VkPhysicalDeviceMultiviewFeatures-multiviewGeometryShader-00580", "If multiviewGeometryShader is enabled then multiview must also be enabled. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceMultiviewFeatures-multiviewGeometryShader-00580)"},
+    {"VUID-VkPhysicalDeviceMultiviewFeatures-multiviewTessellationShader-00581", "If multiviewTessellationShader is enabled then multiview must also be enabled. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceMultiviewFeatures-multiviewTessellationShader-00581)"},
+    {"VUID-VkPhysicalDeviceMultiviewFeatures-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceMultiviewFeatures-sType-sType)"},
+    {"VUID-VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_ATTRIBUTES_PROPERTIES_NVX (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX-sType-sType)"},
+    {"VUID-VkPhysicalDeviceMultiviewProperties-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceMultiviewProperties-sType-sType)"},
+    {"VUID-VkPhysicalDevicePCIBusInfoPropertiesEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PCI_BUS_INFO_PROPERTIES_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDevicePCIBusInfoPropertiesEXT-sType-sType)"},
+    {"VUID-VkPhysicalDevicePerformanceQueryFeaturesKHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PERFORMANCE_QUERY_FEATURES_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDevicePerformanceQueryFeaturesKHR-sType-sType)"},
+    {"VUID-VkPhysicalDevicePerformanceQueryPropertiesKHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PERFORMANCE_QUERY_PROPERTIES_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDevicePerformanceQueryPropertiesKHR-sType-sType)"},
+    {"VUID-VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_EXECUTABLE_PROPERTIES_FEATURES_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR-sType-sType)"},
+    {"VUID-VkPhysicalDevicePointClippingProperties-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDevicePointClippingProperties-sType-sType)"},
+    {"VUID-VkPhysicalDeviceProperties2-pNext-pNext", "Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT, VkPhysicalDeviceConservativeRasterizationPropertiesEXT, VkPhysicalDeviceCooperativeMatrixPropertiesNV, VkPhysicalDeviceDepthStencilResolvePropertiesKHR, VkPhysicalDeviceDescriptorIndexingPropertiesEXT, VkPhysicalDeviceDiscardRectanglePropertiesEXT, VkPhysicalDeviceDriverPropertiesKHR, VkPhysicalDeviceExternalMemoryHostPropertiesEXT, VkPhysicalDeviceFloatControlsPropertiesKHR, VkPhysicalDeviceFragmentDensityMapPropertiesEXT, VkPhysicalDeviceIDProperties, VkPhysicalDeviceInlineUniformBlockPropertiesEXT, VkPhysicalDeviceLineRasterizationPropertiesEXT, VkPhysicalDeviceMaintenance3Properties, VkPhysicalDeviceMeshShaderPropertiesNV, VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX, VkPhysicalDeviceMultiviewProperties, VkPhysicalDevicePCIBusInfoPropertiesEXT, VkPhysicalDevicePerformanceQueryPropertiesKHR, VkPhysicalDevicePointClippingProperties, VkPhysicalDeviceProtectedMemoryProperties, VkPhysicalDevicePushDescriptorPropertiesKHR, VkPhysicalDeviceRayTracingPropertiesNV, VkPhysicalDeviceSampleLocationsPropertiesEXT, VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT, VkPhysicalDeviceShaderCoreProperties2AMD, VkPhysicalDeviceShaderCorePropertiesAMD, VkPhysicalDeviceShaderSMBuiltinsPropertiesNV, VkPhysicalDeviceShadingRateImagePropertiesNV, VkPhysicalDeviceSubgroupProperties, VkPhysicalDeviceSubgroupSizeControlPropertiesEXT, VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT, VkPhysicalDeviceTimelineSemaphorePropertiesKHR, VkPhysicalDeviceTransformFeedbackPropertiesEXT, or VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceProperties2-pNext-pNext)"},
+    {"VUID-VkPhysicalDeviceProperties2-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceProperties2-sType-sType)"},
+    {"VUID-VkPhysicalDeviceProperties2-sType-unique", "Each sType member in the pNext chain must be unique (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceProperties2-sType-unique)"},
+    {"VUID-VkPhysicalDeviceProtectedMemoryFeatures-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_FEATURES (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceProtectedMemoryFeatures-sType-sType)"},
+    {"VUID-VkPhysicalDeviceProtectedMemoryProperties-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_PROPERTIES (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceProtectedMemoryProperties-sType-sType)"},
+    {"VUID-VkPhysicalDevicePushDescriptorPropertiesKHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDevicePushDescriptorPropertiesKHR-sType-sType)"},
+    {"VUID-VkPhysicalDeviceRayTracingPropertiesNV-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PROPERTIES_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceRayTracingPropertiesNV-sType-sType)"},
+    {"VUID-VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_REPRESENTATIVE_FRAGMENT_TEST_FEATURES_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV-sType-sType)"},
+    {"VUID-VkPhysicalDeviceSampleLocationsPropertiesEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLE_LOCATIONS_PROPERTIES_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceSampleLocationsPropertiesEXT-sType-sType)"},
+    {"VUID-VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT-sType-sType)"},
+    {"VUID-VkPhysicalDeviceSamplerYcbcrConversionFeatures-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceSamplerYcbcrConversionFeatures-sType-sType)"},
+    {"VUID-VkPhysicalDeviceScalarBlockLayoutFeaturesEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceScalarBlockLayoutFeaturesEXT-sType-sType)"},
+    {"VUID-VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR-sType-sType)"},
+    {"VUID-VkPhysicalDeviceShaderAtomicInt64FeaturesKHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceShaderAtomicInt64FeaturesKHR-sType-sType)"},
+    {"VUID-VkPhysicalDeviceShaderClockFeaturesKHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CLOCK_FEATURES_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceShaderClockFeaturesKHR-sType-sType)"},
+    {"VUID-VkPhysicalDeviceShaderCoreProperties2AMD-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_2_AMD (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceShaderCoreProperties2AMD-sType-sType)"},
+    {"VUID-VkPhysicalDeviceShaderCorePropertiesAMD-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_AMD (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceShaderCorePropertiesAMD-sType-sType)"},
+    {"VUID-VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT-sType-sType)"},
+    {"VUID-VkPhysicalDeviceShaderDrawParametersFeatures-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceShaderDrawParametersFeatures-sType-sType)"},
+    {"VUID-VkPhysicalDeviceShaderFloat16Int8FeaturesKHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceShaderFloat16Int8FeaturesKHR-sType-sType)"},
+    {"VUID-VkPhysicalDeviceShaderImageFootprintFeaturesNV-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_IMAGE_FOOTPRINT_FEATURES_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceShaderImageFootprintFeaturesNV-sType-sType)"},
+    {"VUID-VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_FUNCTIONS_2_FEATURES_INTEL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL-sType-sType)"},
+    {"VUID-VkPhysicalDeviceShaderSMBuiltinsFeaturesNV-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_FEATURES_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceShaderSMBuiltinsFeaturesNV-sType-sType)"},
+    {"VUID-VkPhysicalDeviceShaderSMBuiltinsPropertiesNV-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_PROPERTIES_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceShaderSMBuiltinsPropertiesNV-sType-sType)"},
+    {"VUID-VkPhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR-sType-sType)"},
+    {"VUID-VkPhysicalDeviceShadingRateImageFeaturesNV-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_FEATURES_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceShadingRateImageFeaturesNV-sType-sType)"},
+    {"VUID-VkPhysicalDeviceShadingRateImagePropertiesNV-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_PROPERTIES_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceShadingRateImagePropertiesNV-sType-sType)"},
+    {"VUID-VkPhysicalDeviceSparseImageFormatInfo2-format-parameter", "format must be a valid VkFormat value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceSparseImageFormatInfo2-format-parameter)"},
+    {"VUID-VkPhysicalDeviceSparseImageFormatInfo2-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceSparseImageFormatInfo2-pNext-pNext)"},
+    {"VUID-VkPhysicalDeviceSparseImageFormatInfo2-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceSparseImageFormatInfo2-sType-sType)"},
+    {"VUID-VkPhysicalDeviceSparseImageFormatInfo2-samples-01095", "samples must be a bit value that is set in VkImageFormatProperties::sampleCounts returned by vkGetPhysicalDeviceImageFormatProperties with format, type, tiling, and usage equal to those in this command and flags equal to the value that is set in VkImageCreateInfo::flags when the image is created (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceSparseImageFormatInfo2-samples-01095)"},
+    {"VUID-VkPhysicalDeviceSparseImageFormatInfo2-samples-parameter", "samples must be a valid VkSampleCountFlagBits value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceSparseImageFormatInfo2-samples-parameter)"},
+    {"VUID-VkPhysicalDeviceSparseImageFormatInfo2-tiling-parameter", "tiling must be a valid VkImageTiling value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceSparseImageFormatInfo2-tiling-parameter)"},
+    {"VUID-VkPhysicalDeviceSparseImageFormatInfo2-type-parameter", "type must be a valid VkImageType value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceSparseImageFormatInfo2-type-parameter)"},
+    {"VUID-VkPhysicalDeviceSparseImageFormatInfo2-usage-parameter", "usage must be a valid combination of VkImageUsageFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceSparseImageFormatInfo2-usage-parameter)"},
+    {"VUID-VkPhysicalDeviceSparseImageFormatInfo2-usage-requiredbitmask", "usage must not be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceSparseImageFormatInfo2-usage-requiredbitmask)"},
+    {"VUID-VkPhysicalDeviceSubgroupProperties-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_PROPERTIES (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceSubgroupProperties-sType-sType)"},
+    {"VUID-VkPhysicalDeviceSubgroupSizeControlFeaturesEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceSubgroupSizeControlFeaturesEXT-sType-sType)"},
+    {"VUID-VkPhysicalDeviceSubgroupSizeControlPropertiesEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceSubgroupSizeControlPropertiesEXT-sType-sType)"},
+    {"VUID-VkPhysicalDeviceSurfaceInfo2KHR-pNext-02672", "If the pNext chain includes a VkSurfaceFullScreenExclusiveInfoEXT structure with its fullScreenExclusive member set to VK_FULL_SCREEN_EXCLUSIVE_APPLICATION_CONTROLLED_EXT, and surface was created using vkCreateWin32SurfaceKHR, a VkSurfaceFullScreenExclusiveWin32InfoEXT structure must be included in the pNext chain (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceSurfaceInfo2KHR-pNext-02672)"},
+    {"VUID-VkPhysicalDeviceSurfaceInfo2KHR-pNext-pNext", "Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkSurfaceFullScreenExclusiveInfoEXT or VkSurfaceFullScreenExclusiveWin32InfoEXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceSurfaceInfo2KHR-pNext-pNext)"},
+    {"VUID-VkPhysicalDeviceSurfaceInfo2KHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SURFACE_INFO_2_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceSurfaceInfo2KHR-sType-sType)"},
+    {"VUID-VkPhysicalDeviceSurfaceInfo2KHR-sType-unique", "Each sType member in the pNext chain must be unique (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceSurfaceInfo2KHR-sType-unique)"},
+    {"VUID-VkPhysicalDeviceSurfaceInfo2KHR-surface-parameter", "surface must be a valid VkSurfaceKHR handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceSurfaceInfo2KHR-surface-parameter)"},
+    {"VUID-VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_FEATURES_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT-sType-sType)"},
+    {"VUID-VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT-sType-sType)"},
+    {"VUID-VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXTURE_COMPRESSION_ASTC_HDR_FEATURES_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT-sType-sType)"},
+    {"VUID-VkPhysicalDeviceTimelineSemaphoreFeaturesKHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceTimelineSemaphoreFeaturesKHR-sType-sType)"},
+    {"VUID-VkPhysicalDeviceTimelineSemaphorePropertiesKHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceTimelineSemaphorePropertiesKHR-sType-sType)"},
+    {"VUID-VkPhysicalDeviceToolPropertiesEXT-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceToolPropertiesEXT-pNext-pNext)"},
+    {"VUID-VkPhysicalDeviceToolPropertiesEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TOOL_PROPERTIES_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceToolPropertiesEXT-sType-sType)"},
+    {"VUID-VkPhysicalDeviceTransformFeedbackFeaturesEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_FEATURES_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceTransformFeedbackFeaturesEXT-sType-sType)"},
+    {"VUID-VkPhysicalDeviceTransformFeedbackPropertiesEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_PROPERTIES_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceTransformFeedbackPropertiesEXT-sType-sType)"},
+    {"VUID-VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR-sType-sType)"},
+    {"VUID-VkPhysicalDeviceVariablePointersFeatures-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceVariablePointersFeatures-sType-sType)"},
+    {"VUID-VkPhysicalDeviceVariablePointersFeatures-variablePointers-01431", "If variablePointers is enabled then variablePointersStorageBuffer must also be enabled. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceVariablePointersFeatures-variablePointers-01431)"},
+    {"VUID-VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT-sType-sType)"},
+    {"VUID-VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT-sType-sType)"},
+    {"VUID-VkPhysicalDeviceVulkanMemoryModelFeaturesKHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceVulkanMemoryModelFeaturesKHR-sType-sType)"},
+    {"VUID-VkPhysicalDeviceYcbcrImageArraysFeaturesEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_YCBCR_IMAGE_ARRAYS_FEATURES_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPhysicalDeviceYcbcrImageArraysFeaturesEXT-sType-sType)"},
+    {"VUID-VkPipelineCacheCreateInfo-flags-zerobitmask", "flags must be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineCacheCreateInfo-flags-zerobitmask)"},
+    {"VUID-VkPipelineCacheCreateInfo-initialDataSize-00768", "If initialDataSize is not 0, it must be equal to the size of pInitialData, as returned by vkGetPipelineCacheData when pInitialData was originally retrieved (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineCacheCreateInfo-initialDataSize-00768)"},
+    {"VUID-VkPipelineCacheCreateInfo-initialDataSize-00769", "If initialDataSize is not 0, pInitialData must have been retrieved from a previous call to vkGetPipelineCacheData (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineCacheCreateInfo-initialDataSize-00769)"},
+    {"VUID-VkPipelineCacheCreateInfo-pInitialData-parameter", "If initialDataSize is not 0, pInitialData must be a valid pointer to an array of initialDataSize bytes (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineCacheCreateInfo-pInitialData-parameter)"},
+    {"VUID-VkPipelineCacheCreateInfo-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineCacheCreateInfo-pNext-pNext)"},
+    {"VUID-VkPipelineCacheCreateInfo-sType-sType", "sType must be VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineCacheCreateInfo-sType-sType)"},
+    {"VUID-VkPipelineColorBlendAdvancedStateCreateInfoEXT-blendOverlap-01426", "If the correlated overlap property is not supported, blendOverlap must be VK_BLEND_OVERLAP_UNCORRELATED_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineColorBlendAdvancedStateCreateInfoEXT-blendOverlap-01426)"},
+    {"VUID-VkPipelineColorBlendAdvancedStateCreateInfoEXT-blendOverlap-parameter", "blendOverlap must be a valid VkBlendOverlapEXT value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineColorBlendAdvancedStateCreateInfoEXT-blendOverlap-parameter)"},
+    {"VUID-VkPipelineColorBlendAdvancedStateCreateInfoEXT-dstPremultiplied-01425", "If the non-premultiplied destination color property is not supported, dstPremultiplied must be VK_TRUE (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineColorBlendAdvancedStateCreateInfoEXT-dstPremultiplied-01425)"},
+    {"VUID-VkPipelineColorBlendAdvancedStateCreateInfoEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_ADVANCED_STATE_CREATE_INFO_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineColorBlendAdvancedStateCreateInfoEXT-sType-sType)"},
+    {"VUID-VkPipelineColorBlendAdvancedStateCreateInfoEXT-srcPremultiplied-01424", "If the non-premultiplied source color property is not supported, srcPremultiplied must be VK_TRUE (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineColorBlendAdvancedStateCreateInfoEXT-srcPremultiplied-01424)"},
+    {"VUID-VkPipelineColorBlendAttachmentState-advancedBlendAllOperations-01409", "If VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT::advancedBlendAllOperations is VK_FALSE, then colorBlendOp must not be VK_BLEND_OP_ZERO_EXT, VK_BLEND_OP_SRC_EXT, VK_BLEND_OP_DST_EXT, VK_BLEND_OP_SRC_OVER_EXT, VK_BLEND_OP_DST_OVER_EXT, VK_BLEND_OP_SRC_IN_EXT, VK_BLEND_OP_DST_IN_EXT, VK_BLEND_OP_SRC_OUT_EXT, VK_BLEND_OP_DST_OUT_EXT, VK_BLEND_OP_SRC_ATOP_EXT, VK_BLEND_OP_DST_ATOP_EXT, VK_BLEND_OP_XOR_EXT, VK_BLEND_OP_INVERT_EXT, VK_BLEND_OP_INVERT_RGB_EXT, VK_BLEND_OP_LINEARDODGE_EXT, VK_BLEND_OP_LINEARBURN_EXT, VK_BLEND_OP_VIVIDLIGHT_EXT, VK_BLEND_OP_LINEARLIGHT_EXT, VK_BLEND_OP_PINLIGHT_EXT, VK_BLEND_OP_HARDMIX_EXT, VK_BLEND_OP_PLUS_EXT, VK_BLEND_OP_PLUS_CLAMPED_EXT, VK_BLEND_OP_PLUS_CLAMPED_ALPHA_EXT, VK_BLEND_OP_PLUS_DARKER_EXT, VK_BLEND_OP_MINUS_EXT, VK_BLEND_OP_MINUS_CLAMPED_EXT, VK_BLEND_OP_CONTRAST_EXT, VK_BLEND_OP_INVERT_OVG_EXT, VK_BLEND_OP_RED_EXT, VK_BLEND_OP_GREEN_EXT, or VK_BLEND_OP_BLUE_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineColorBlendAttachmentState-advancedBlendAllOperations-01409)"},
+    {"VUID-VkPipelineColorBlendAttachmentState-advancedBlendIndependentBlend-01407", "If VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT::advancedBlendIndependentBlend is VK_FALSE and colorBlendOp is an advanced blend operation, then colorBlendOp must be the same for all attachments. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineColorBlendAttachmentState-advancedBlendIndependentBlend-01407)"},
+    {"VUID-VkPipelineColorBlendAttachmentState-advancedBlendIndependentBlend-01408", "If VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT::advancedBlendIndependentBlend is VK_FALSE and alphaBlendOp is an advanced blend operation, then alphaBlendOp must be the same for all attachments. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineColorBlendAttachmentState-advancedBlendIndependentBlend-01408)"},
+    {"VUID-VkPipelineColorBlendAttachmentState-alphaBlendOp-parameter", "alphaBlendOp must be a valid VkBlendOp value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineColorBlendAttachmentState-alphaBlendOp-parameter)"},
+    {"VUID-VkPipelineColorBlendAttachmentState-colorBlendOp-01406", "If either of colorBlendOp or alphaBlendOp is an advanced blend operation, then colorBlendOp must equal alphaBlendOp (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineColorBlendAttachmentState-colorBlendOp-01406)"},
+    {"VUID-VkPipelineColorBlendAttachmentState-colorBlendOp-01410", "If colorBlendOp or alphaBlendOp is an advanced blend operation, then VkSubpassDescription::colorAttachmentCount of the subpass this pipeline is compiled against must be less than or equal to VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT::advancedBlendMaxColorAttachments (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineColorBlendAttachmentState-colorBlendOp-01410)"},
+    {"VUID-VkPipelineColorBlendAttachmentState-colorBlendOp-parameter", "colorBlendOp must be a valid VkBlendOp value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineColorBlendAttachmentState-colorBlendOp-parameter)"},
+    {"VUID-VkPipelineColorBlendAttachmentState-colorWriteMask-parameter", "colorWriteMask must be a valid combination of VkColorComponentFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineColorBlendAttachmentState-colorWriteMask-parameter)"},
+    {"VUID-VkPipelineColorBlendAttachmentState-dstAlphaBlendFactor-00611", "If the dual source blending feature is not enabled, dstAlphaBlendFactor must not be VK_BLEND_FACTOR_SRC1_COLOR, VK_BLEND_FACTOR_ONE_MINUS_SRC1_COLOR, VK_BLEND_FACTOR_SRC1_ALPHA, or VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineColorBlendAttachmentState-dstAlphaBlendFactor-00611)"},
+    {"VUID-VkPipelineColorBlendAttachmentState-dstAlphaBlendFactor-parameter", "dstAlphaBlendFactor must be a valid VkBlendFactor value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineColorBlendAttachmentState-dstAlphaBlendFactor-parameter)"},
+    {"VUID-VkPipelineColorBlendAttachmentState-dstColorBlendFactor-00609", "If the dual source blending feature is not enabled, dstColorBlendFactor must not be VK_BLEND_FACTOR_SRC1_COLOR, VK_BLEND_FACTOR_ONE_MINUS_SRC1_COLOR, VK_BLEND_FACTOR_SRC1_ALPHA, or VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineColorBlendAttachmentState-dstColorBlendFactor-00609)"},
+    {"VUID-VkPipelineColorBlendAttachmentState-dstColorBlendFactor-parameter", "dstColorBlendFactor must be a valid VkBlendFactor value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineColorBlendAttachmentState-dstColorBlendFactor-parameter)"},
+    {"VUID-VkPipelineColorBlendAttachmentState-srcAlphaBlendFactor-00610", "If the dual source blending feature is not enabled, srcAlphaBlendFactor must not be VK_BLEND_FACTOR_SRC1_COLOR, VK_BLEND_FACTOR_ONE_MINUS_SRC1_COLOR, VK_BLEND_FACTOR_SRC1_ALPHA, or VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineColorBlendAttachmentState-srcAlphaBlendFactor-00610)"},
+    {"VUID-VkPipelineColorBlendAttachmentState-srcAlphaBlendFactor-parameter", "srcAlphaBlendFactor must be a valid VkBlendFactor value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineColorBlendAttachmentState-srcAlphaBlendFactor-parameter)"},
+    {"VUID-VkPipelineColorBlendAttachmentState-srcColorBlendFactor-00608", "If the dual source blending feature is not enabled, srcColorBlendFactor must not be VK_BLEND_FACTOR_SRC1_COLOR, VK_BLEND_FACTOR_ONE_MINUS_SRC1_COLOR, VK_BLEND_FACTOR_SRC1_ALPHA, or VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineColorBlendAttachmentState-srcColorBlendFactor-00608)"},
+    {"VUID-VkPipelineColorBlendAttachmentState-srcColorBlendFactor-parameter", "srcColorBlendFactor must be a valid VkBlendFactor value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineColorBlendAttachmentState-srcColorBlendFactor-parameter)"},
+    {"VUID-VkPipelineColorBlendStateCreateInfo-flags-zerobitmask", "flags must be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineColorBlendStateCreateInfo-flags-zerobitmask)"},
+    {"VUID-VkPipelineColorBlendStateCreateInfo-logicOpEnable-00606", "If the logic operations feature is not enabled, logicOpEnable must be VK_FALSE (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineColorBlendStateCreateInfo-logicOpEnable-00606)"},
+    {"VUID-VkPipelineColorBlendStateCreateInfo-logicOpEnable-00607", "If logicOpEnable is VK_TRUE, logicOp must be a valid VkLogicOp value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineColorBlendStateCreateInfo-logicOpEnable-00607)"},
+    {"VUID-VkPipelineColorBlendStateCreateInfo-pAttachments-00605", "If the independent blending feature is not enabled, all elements of pAttachments must be identical (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineColorBlendStateCreateInfo-pAttachments-00605)"},
+    {"VUID-VkPipelineColorBlendStateCreateInfo-pAttachments-parameter", "If attachmentCount is not 0, pAttachments must be a valid pointer to an array of attachmentCount valid VkPipelineColorBlendAttachmentState structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineColorBlendStateCreateInfo-pAttachments-parameter)"},
+    {"VUID-VkPipelineColorBlendStateCreateInfo-pNext-pNext", "pNext must be NULL or a pointer to a valid instance of VkPipelineColorBlendAdvancedStateCreateInfoEXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineColorBlendStateCreateInfo-pNext-pNext)"},
+    {"VUID-VkPipelineColorBlendStateCreateInfo-sType-sType", "sType must be VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineColorBlendStateCreateInfo-sType-sType)"},
+    {"VUID-VkPipelineCompilerControlCreateInfoAMD-compilerControlFlags-zerobitmask", "compilerControlFlags must be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineCompilerControlCreateInfoAMD-compilerControlFlags-zerobitmask)"},
+    {"VUID-VkPipelineCompilerControlCreateInfoAMD-sType-sType", "sType must be VK_STRUCTURE_TYPE_PIPELINE_COMPILER_CONTROL_CREATE_INFO_AMD (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineCompilerControlCreateInfoAMD-sType-sType)"},
+    {"VUID-VkPipelineCoverageModulationStateCreateInfoNV-coverageModulationMode-parameter", "coverageModulationMode must be a valid VkCoverageModulationModeNV value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineCoverageModulationStateCreateInfoNV-coverageModulationMode-parameter)"},
+    {"VUID-VkPipelineCoverageModulationStateCreateInfoNV-coverageModulationTableEnable-01405", "If coverageModulationTableEnable is VK_TRUE, coverageModulationTableCount must be equal to the number of rasterization samples divided by the number of color samples in the subpass (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineCoverageModulationStateCreateInfoNV-coverageModulationTableEnable-01405)"},
+    {"VUID-VkPipelineCoverageModulationStateCreateInfoNV-flags-zerobitmask", "flags must be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineCoverageModulationStateCreateInfoNV-flags-zerobitmask)"},
+    {"VUID-VkPipelineCoverageModulationStateCreateInfoNV-sType-sType", "sType must be VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_MODULATION_STATE_CREATE_INFO_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineCoverageModulationStateCreateInfoNV-sType-sType)"},
+    {"VUID-VkPipelineCoverageReductionStateCreateInfoNV-coverageReductionMode-parameter", "coverageReductionMode must be a valid VkCoverageReductionModeNV value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineCoverageReductionStateCreateInfoNV-coverageReductionMode-parameter)"},
+    {"VUID-VkPipelineCoverageReductionStateCreateInfoNV-flags-zerobitmask", "flags must be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineCoverageReductionStateCreateInfoNV-flags-zerobitmask)"},
+    {"VUID-VkPipelineCoverageReductionStateCreateInfoNV-sType-sType", "sType must be VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_REDUCTION_STATE_CREATE_INFO_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineCoverageReductionStateCreateInfoNV-sType-sType)"},
+    {"VUID-VkPipelineCoverageToColorStateCreateInfoNV-coverageToColorEnable-01404", "If coverageToColorEnable is VK_TRUE, then the render pass subpass indicated by VkGraphicsPipelineCreateInfo::renderPass and VkGraphicsPipelineCreateInfo::subpass must have a color attachment at the location selected by coverageToColorLocation, with a VkFormat of VK_FORMAT_R8_UINT, VK_FORMAT_R8_SINT, VK_FORMAT_R16_UINT, VK_FORMAT_R16_SINT, VK_FORMAT_R32_UINT, or VK_FORMAT_R32_SINT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineCoverageToColorStateCreateInfoNV-coverageToColorEnable-01404)"},
+    {"VUID-VkPipelineCoverageToColorStateCreateInfoNV-flags-zerobitmask", "flags must be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineCoverageToColorStateCreateInfoNV-flags-zerobitmask)"},
+    {"VUID-VkPipelineCoverageToColorStateCreateInfoNV-sType-sType", "sType must be VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_TO_COLOR_STATE_CREATE_INFO_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineCoverageToColorStateCreateInfoNV-sType-sType)"},
+    {"VUID-VkPipelineCreationFeedbackCreateInfoEXT-pPipelineCreationFeedback-parameter", "pPipelineCreationFeedback must be a valid pointer to a VkPipelineCreationFeedbackEXT structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineCreationFeedbackCreateInfoEXT-pPipelineCreationFeedback-parameter)"},
+    {"VUID-VkPipelineCreationFeedbackCreateInfoEXT-pPipelineStageCreationFeedbacks-parameter", "pPipelineStageCreationFeedbacks must be a valid pointer to an array of pipelineStageCreationFeedbackCount VkPipelineCreationFeedbackEXT structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineCreationFeedbackCreateInfoEXT-pPipelineStageCreationFeedbacks-parameter)"},
+    {"VUID-VkPipelineCreationFeedbackCreateInfoEXT-pipelineStageCreationFeedbackCount-02668", "When chained to VkGraphicsPipelineCreateInfo, VkPipelineCreationFeedbackEXT::pipelineStageCreationFeedbackCount must equal VkGraphicsPipelineCreateInfo::stageCount (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineCreationFeedbackCreateInfoEXT-pipelineStageCreationFeedbackCount-02668)"},
+    {"VUID-VkPipelineCreationFeedbackCreateInfoEXT-pipelineStageCreationFeedbackCount-02669", "When chained to VkComputePipelineCreateInfo, VkPipelineCreationFeedbackEXT::pipelineStageCreationFeedbackCount must equal 1 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineCreationFeedbackCreateInfoEXT-pipelineStageCreationFeedbackCount-02669)"},
+    {"VUID-VkPipelineCreationFeedbackCreateInfoEXT-pipelineStageCreationFeedbackCount-02670", "When chained to VkRayTracingPipelineCreateInfoNV, VkPipelineCreationFeedbackEXT::pipelineStageCreationFeedbackCount must equal VkRayTracingPipelineCreateInfoNV::stageCount (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineCreationFeedbackCreateInfoEXT-pipelineStageCreationFeedbackCount-02670)"},
+    {"VUID-VkPipelineCreationFeedbackCreateInfoEXT-pipelineStageCreationFeedbackCount-arraylength", "pipelineStageCreationFeedbackCount must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineCreationFeedbackCreateInfoEXT-pipelineStageCreationFeedbackCount-arraylength)"},
+    {"VUID-VkPipelineCreationFeedbackCreateInfoEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_PIPELINE_CREATION_FEEDBACK_CREATE_INFO_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineCreationFeedbackCreateInfoEXT-sType-sType)"},
+    {"VUID-VkPipelineDepthStencilStateCreateInfo-back-parameter", "back must be a valid VkStencilOpState structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineDepthStencilStateCreateInfo-back-parameter)"},
+    {"VUID-VkPipelineDepthStencilStateCreateInfo-depthBoundsTestEnable-00598", "If the depth bounds testing feature is not enabled, depthBoundsTestEnable must be VK_FALSE (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineDepthStencilStateCreateInfo-depthBoundsTestEnable-00598)"},
+    {"VUID-VkPipelineDepthStencilStateCreateInfo-depthCompareOp-parameter", "depthCompareOp must be a valid VkCompareOp value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineDepthStencilStateCreateInfo-depthCompareOp-parameter)"},
+    {"VUID-VkPipelineDepthStencilStateCreateInfo-flags-zerobitmask", "flags must be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineDepthStencilStateCreateInfo-flags-zerobitmask)"},
+    {"VUID-VkPipelineDepthStencilStateCreateInfo-front-parameter", "front must be a valid VkStencilOpState structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineDepthStencilStateCreateInfo-front-parameter)"},
+    {"VUID-VkPipelineDepthStencilStateCreateInfo-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineDepthStencilStateCreateInfo-pNext-pNext)"},
+    {"VUID-VkPipelineDepthStencilStateCreateInfo-sType-sType", "sType must be VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineDepthStencilStateCreateInfo-sType-sType)"},
+    {"VUID-VkPipelineDiscardRectangleStateCreateInfoEXT-discardRectangleCount-00582", "discardRectangleCount must be between 0 and VkPhysicalDeviceDiscardRectanglePropertiesEXT::maxDiscardRectangles, inclusive (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineDiscardRectangleStateCreateInfoEXT-discardRectangleCount-00582)"},
+    {"VUID-VkPipelineDiscardRectangleStateCreateInfoEXT-discardRectangleMode-parameter", "discardRectangleMode must be a valid VkDiscardRectangleModeEXT value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineDiscardRectangleStateCreateInfoEXT-discardRectangleMode-parameter)"},
+    {"VUID-VkPipelineDiscardRectangleStateCreateInfoEXT-flags-zerobitmask", "flags must be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineDiscardRectangleStateCreateInfoEXT-flags-zerobitmask)"},
+    {"VUID-VkPipelineDiscardRectangleStateCreateInfoEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_PIPELINE_DISCARD_RECTANGLE_STATE_CREATE_INFO_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineDiscardRectangleStateCreateInfoEXT-sType-sType)"},
+    {"VUID-VkPipelineDynamicStateCreateInfo-flags-zerobitmask", "flags must be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineDynamicStateCreateInfo-flags-zerobitmask)"},
+    {"VUID-VkPipelineDynamicStateCreateInfo-pDynamicStates-01442", "Each element of pDynamicStates must be unique (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineDynamicStateCreateInfo-pDynamicStates-01442)"},
+    {"VUID-VkPipelineDynamicStateCreateInfo-pDynamicStates-parameter", "If dynamicStateCount is not 0, pDynamicStates must be a valid pointer to an array of dynamicStateCount valid VkDynamicState values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineDynamicStateCreateInfo-pDynamicStates-parameter)"},
+    {"VUID-VkPipelineDynamicStateCreateInfo-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineDynamicStateCreateInfo-pNext-pNext)"},
+    {"VUID-VkPipelineDynamicStateCreateInfo-sType-sType", "sType must be VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineDynamicStateCreateInfo-sType-sType)"},
+    {"VUID-VkPipelineExecutableInfoKHR-executableIndex-03275", "executableIndex must be less than the number of executables associated with pipeline as returned in the pExecutableCount parameter of vkGetPipelineExecutablePropertiesKHR. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineExecutableInfoKHR-executableIndex-03275)"},
+    {"VUID-VkPipelineExecutableInfoKHR-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineExecutableInfoKHR-pNext-pNext)"},
+    {"VUID-VkPipelineExecutableInfoKHR-pipeline-parameter", "pipeline must be a valid VkPipeline handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineExecutableInfoKHR-pipeline-parameter)"},
+    {"VUID-VkPipelineExecutableInfoKHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INFO_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineExecutableInfoKHR-sType-sType)"},
+    {"VUID-VkPipelineExecutableInternalRepresentationKHR-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineExecutableInternalRepresentationKHR-pNext-pNext)"},
+    {"VUID-VkPipelineExecutableInternalRepresentationKHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INTERNAL_REPRESENTATION_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineExecutableInternalRepresentationKHR-sType-sType)"},
+    {"VUID-VkPipelineExecutablePropertiesKHR-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineExecutablePropertiesKHR-pNext-pNext)"},
+    {"VUID-VkPipelineExecutablePropertiesKHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_PROPERTIES_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineExecutablePropertiesKHR-sType-sType)"},
+    {"VUID-VkPipelineExecutableStatisticKHR-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineExecutableStatisticKHR-pNext-pNext)"},
+    {"VUID-VkPipelineExecutableStatisticKHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_STATISTIC_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineExecutableStatisticKHR-sType-sType)"},
+    {"VUID-VkPipelineInfoKHR-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineInfoKHR-pNext-pNext)"},
+    {"VUID-VkPipelineInfoKHR-pipeline-parameter", "pipeline must be a valid VkPipeline handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineInfoKHR-pipeline-parameter)"},
+    {"VUID-VkPipelineInfoKHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_PIPELINE_INFO_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineInfoKHR-sType-sType)"},
+    {"VUID-VkPipelineInputAssemblyStateCreateInfo-flags-zerobitmask", "flags must be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineInputAssemblyStateCreateInfo-flags-zerobitmask)"},
+    {"VUID-VkPipelineInputAssemblyStateCreateInfo-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineInputAssemblyStateCreateInfo-pNext-pNext)"},
+    {"VUID-VkPipelineInputAssemblyStateCreateInfo-sType-sType", "sType must be VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineInputAssemblyStateCreateInfo-sType-sType)"},
+    {"VUID-VkPipelineInputAssemblyStateCreateInfo-topology-00428", "If topology is VK_PRIMITIVE_TOPOLOGY_POINT_LIST, VK_PRIMITIVE_TOPOLOGY_LINE_LIST, VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST, VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY, VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY or VK_PRIMITIVE_TOPOLOGY_PATCH_LIST, primitiveRestartEnable must be VK_FALSE (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineInputAssemblyStateCreateInfo-topology-00428)"},
+    {"VUID-VkPipelineInputAssemblyStateCreateInfo-topology-00429", "If the geometry shaders feature is not enabled, topology must not be any of VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY, VK_PRIMITIVE_TOPOLOGY_LINE_STRIP_WITH_ADJACENCY, VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY or VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineInputAssemblyStateCreateInfo-topology-00429)"},
+    {"VUID-VkPipelineInputAssemblyStateCreateInfo-topology-00430", "If the tessellation shaders feature is not enabled, topology must not be VK_PRIMITIVE_TOPOLOGY_PATCH_LIST (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineInputAssemblyStateCreateInfo-topology-00430)"},
+    {"VUID-VkPipelineInputAssemblyStateCreateInfo-topology-parameter", "topology must be a valid VkPrimitiveTopology value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineInputAssemblyStateCreateInfo-topology-parameter)"},
+    {"VUID-VkPipelineLayoutCreateInfo-descriptorType-02212", "The total number of bindings with a descriptorType of VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT accessible to any given shader stage across all elements of pSetLayouts must be less than or equal to VkPhysicalDeviceInlineUniformBlockPropertiesEXT::maxPerStageDescriptorInlineUniformBlocks (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineLayoutCreateInfo-descriptorType-02212)"},
+    {"VUID-VkPipelineLayoutCreateInfo-descriptorType-02213", "The total number of bindings with a descriptorType of VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT accessible across all shader stages and across all elements of pSetLayouts must be less than or equal to VkPhysicalDeviceInlineUniformBlockPropertiesEXT::maxDescriptorSetInlineUniformBlocks (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineLayoutCreateInfo-descriptorType-02213)"},
+    {"VUID-VkPipelineLayoutCreateInfo-descriptorType-02214", "The total number of bindings in descriptor set layouts created without the VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT bit set with a descriptorType of VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT accessible to any given shader stage across all elements of pSetLayouts must be less than or equal to VkPhysicalDeviceInlineUniformBlockPropertiesEXT::maxPerStageDescriptorInlineUniformBlocks (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineLayoutCreateInfo-descriptorType-02214)"},
+    {"VUID-VkPipelineLayoutCreateInfo-descriptorType-02215", "The total number of bindings with a descriptorType of VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT accessible to any given shader stage across all elements of pSetLayouts must be less than or equal to VkPhysicalDeviceInlineUniformBlockPropertiesEXT::maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineLayoutCreateInfo-descriptorType-02215)"},
+    {"VUID-VkPipelineLayoutCreateInfo-descriptorType-02216", "The total number of bindings in descriptor set layouts created without the VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT bit set with a descriptorType of VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT accessible across all shader stages and across all elements of pSetLayouts must be less than or equal to VkPhysicalDeviceInlineUniformBlockPropertiesEXT::maxDescriptorSetInlineUniformBlocks (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineLayoutCreateInfo-descriptorType-02216)"},
+    {"VUID-VkPipelineLayoutCreateInfo-descriptorType-02217", "The total number of bindings with a descriptorType of VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT accessible across all shader stages and across all elements of pSetLayouts must be less than or equal to VkPhysicalDeviceInlineUniformBlockPropertiesEXT::maxDescriptorSetUpdateAfterBindInlineUniformBlocks (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineLayoutCreateInfo-descriptorType-02217)"},
+    {"VUID-VkPipelineLayoutCreateInfo-descriptorType-02381", "The total number of bindings with a descriptorType of VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV accessible across all shader stages and across all elements of pSetLayouts must be less than or equal to VkPhysicalDeviceRayTracingPropertiesNV::maxDescriptorSetAccelerationStructures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineLayoutCreateInfo-descriptorType-02381)"},
+    {"VUID-VkPipelineLayoutCreateInfo-descriptorType-03016", "The total number of descriptors in descriptor set layouts created without the VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT bit set with a descriptorType of VK_DESCRIPTOR_TYPE_SAMPLER and VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER accessible to any given shader stage across all elements of pSetLayouts must be less than or equal to VkPhysicalDeviceLimits::maxPerStageDescriptorSamplers (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineLayoutCreateInfo-descriptorType-03016)"},
+    {"VUID-VkPipelineLayoutCreateInfo-descriptorType-03017", "The total number of descriptors in descriptor set layouts created without the VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT bit set with a descriptorType of VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER and VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC accessible to any given shader stage across all elements of pSetLayouts must be less than or equal to VkPhysicalDeviceLimits::maxPerStageDescriptorUniformBuffers (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineLayoutCreateInfo-descriptorType-03017)"},
+    {"VUID-VkPipelineLayoutCreateInfo-descriptorType-03018", "The total number of descriptors in descriptor set layouts created without the VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT bit set with a descriptorType of VK_DESCRIPTOR_TYPE_STORAGE_BUFFER and VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC accessible to any given shader stage across all elements of pSetLayouts must be less than or equal to VkPhysicalDeviceLimits::maxPerStageDescriptorStorageBuffers (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineLayoutCreateInfo-descriptorType-03018)"},
+    {"VUID-VkPipelineLayoutCreateInfo-descriptorType-03019", "The total number of descriptors in descriptor set layouts created without the VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT bit set with a descriptorType of VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, and VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER accessible to any given shader stage across all elements of pSetLayouts must be less than or equal to VkPhysicalDeviceLimits::maxPerStageDescriptorSampledImages (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineLayoutCreateInfo-descriptorType-03019)"},
+    {"VUID-VkPipelineLayoutCreateInfo-descriptorType-03020", "The total number of descriptors in descriptor set layouts created without the VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT bit set with a descriptorType of VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, and VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER accessible to any given shader stage across all elements of pSetLayouts must be less than or equal to VkPhysicalDeviceLimits::maxPerStageDescriptorStorageImages (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineLayoutCreateInfo-descriptorType-03020)"},
+    {"VUID-VkPipelineLayoutCreateInfo-descriptorType-03021", "The total number of descriptors in descriptor set layouts created without the VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT bit set with a descriptorType of VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT accessible to any given shader stage across all elements of pSetLayouts must be less than or equal to VkPhysicalDeviceLimits::maxPerStageDescriptorInputAttachments (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineLayoutCreateInfo-descriptorType-03021)"},
+    {"VUID-VkPipelineLayoutCreateInfo-descriptorType-03022", "The total number of descriptors with a descriptorType of VK_DESCRIPTOR_TYPE_SAMPLER and VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER accessible to any given shader stage across all elements of pSetLayouts must be less than or equal to VkPhysicalDeviceDescriptorIndexingPropertiesEXT::maxPerStageDescriptorUpdateAfterBindSamplers (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineLayoutCreateInfo-descriptorType-03022)"},
+    {"VUID-VkPipelineLayoutCreateInfo-descriptorType-03023", "The total number of descriptors with a descriptorType of VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER and VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC accessible to any given shader stage across all elements of pSetLayouts must be less than or equal to VkPhysicalDeviceDescriptorIndexingPropertiesEXT::maxPerStageDescriptorUpdateAfterBindUniformBuffers (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineLayoutCreateInfo-descriptorType-03023)"},
+    {"VUID-VkPipelineLayoutCreateInfo-descriptorType-03024", "The total number of descriptors with a descriptorType of VK_DESCRIPTOR_TYPE_STORAGE_BUFFER and VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC accessible to any given shader stage across all elements of pSetLayouts must be less than or equal to VkPhysicalDeviceDescriptorIndexingPropertiesEXT::maxPerStageDescriptorUpdateAfterBindStorageBuffers (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineLayoutCreateInfo-descriptorType-03024)"},
+    {"VUID-VkPipelineLayoutCreateInfo-descriptorType-03025", "The total number of descriptors with a descriptorType of VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, and VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER accessible to any given shader stage across all elements of pSetLayouts must be less than or equal to VkPhysicalDeviceDescriptorIndexingPropertiesEXT::maxPerStageDescriptorUpdateAfterBindSampledImages (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineLayoutCreateInfo-descriptorType-03025)"},
+    {"VUID-VkPipelineLayoutCreateInfo-descriptorType-03026", "The total number of descriptors with a descriptorType of VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, and VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER accessible to any given shader stage across all elements of pSetLayouts must be less than or equal to VkPhysicalDeviceDescriptorIndexingPropertiesEXT::maxPerStageDescriptorUpdateAfterBindStorageImages (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineLayoutCreateInfo-descriptorType-03026)"},
+    {"VUID-VkPipelineLayoutCreateInfo-descriptorType-03027", "The total number of descriptors with a descriptorType of VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT accessible to any given shader stage across all elements of pSetLayouts must be less than or equal to VkPhysicalDeviceDescriptorIndexingPropertiesEXT::maxPerStageDescriptorUpdateAfterBindInputAttachments (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineLayoutCreateInfo-descriptorType-03027)"},
+    {"VUID-VkPipelineLayoutCreateInfo-descriptorType-03028", "The total number of descriptors in descriptor set layouts created without the VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT bit set with a descriptorType of VK_DESCRIPTOR_TYPE_SAMPLER and VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER accessible across all shader stages and across all elements of pSetLayouts must be less than or equal to VkPhysicalDeviceLimits::maxDescriptorSetSamplers (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineLayoutCreateInfo-descriptorType-03028)"},
+    {"VUID-VkPipelineLayoutCreateInfo-descriptorType-03029", "The total number of descriptors in descriptor set layouts created without the VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT bit set with a descriptorType of VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER accessible across all shader stages and across all elements of pSetLayouts must be less than or equal to VkPhysicalDeviceLimits::maxDescriptorSetUniformBuffers (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineLayoutCreateInfo-descriptorType-03029)"},
+    {"VUID-VkPipelineLayoutCreateInfo-descriptorType-03030", "The total number of descriptors in descriptor set layouts created without the VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT bit set with a descriptorType of VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC accessible across all shader stages and across all elements of pSetLayouts must be less than or equal to VkPhysicalDeviceLimits::maxDescriptorSetUniformBuffersDynamic (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineLayoutCreateInfo-descriptorType-03030)"},
+    {"VUID-VkPipelineLayoutCreateInfo-descriptorType-03031", "The total number of descriptors in descriptor set layouts created without the VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT bit set with a descriptorType of VK_DESCRIPTOR_TYPE_STORAGE_BUFFER accessible across all shader stages and across all elements of pSetLayouts must be less than or equal to VkPhysicalDeviceLimits::maxDescriptorSetStorageBuffers (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineLayoutCreateInfo-descriptorType-03031)"},
+    {"VUID-VkPipelineLayoutCreateInfo-descriptorType-03032", "The total number of descriptors in descriptor set layouts created without the VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT bit set with a descriptorType of VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC accessible across all shader stages and across all elements of pSetLayouts must be less than or equal to VkPhysicalDeviceLimits::maxDescriptorSetStorageBuffersDynamic (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineLayoutCreateInfo-descriptorType-03032)"},
+    {"VUID-VkPipelineLayoutCreateInfo-descriptorType-03033", "The total number of descriptors in descriptor set layouts created without the VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT bit set with a descriptorType of VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, and VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER accessible across all shader stages and across all elements of pSetLayouts must be less than or equal to VkPhysicalDeviceLimits::maxDescriptorSetSampledImages (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineLayoutCreateInfo-descriptorType-03033)"},
+    {"VUID-VkPipelineLayoutCreateInfo-descriptorType-03034", "The total number of descriptors in descriptor set layouts created without the VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT bit set with a descriptorType of VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, and VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER accessible across all shader stages and across all elements of pSetLayouts must be less than or equal to VkPhysicalDeviceLimits::maxDescriptorSetStorageImages (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineLayoutCreateInfo-descriptorType-03034)"},
+    {"VUID-VkPipelineLayoutCreateInfo-descriptorType-03035", "The total number of descriptors in descriptor set layouts created without the VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT bit set with a descriptorType of VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT accessible across all shader stages and across all elements of pSetLayouts must be less than or equal to VkPhysicalDeviceLimits::maxDescriptorSetInputAttachments (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineLayoutCreateInfo-descriptorType-03035)"},
+    {"VUID-VkPipelineLayoutCreateInfo-flags-zerobitmask", "flags must be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineLayoutCreateInfo-flags-zerobitmask)"},
+    {"VUID-VkPipelineLayoutCreateInfo-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineLayoutCreateInfo-pNext-pNext)"},
+    {"VUID-VkPipelineLayoutCreateInfo-pPushConstantRanges-00292", "Any two elements of pPushConstantRanges must not include the same stage in stageFlags (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineLayoutCreateInfo-pPushConstantRanges-00292)"},
+    {"VUID-VkPipelineLayoutCreateInfo-pPushConstantRanges-parameter", "If pushConstantRangeCount is not 0, pPushConstantRanges must be a valid pointer to an array of pushConstantRangeCount valid VkPushConstantRange structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineLayoutCreateInfo-pPushConstantRanges-parameter)"},
+    {"VUID-VkPipelineLayoutCreateInfo-pSetLayouts-00287", "The total number of descriptors of the type VK_DESCRIPTOR_TYPE_SAMPLER and VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER accessible to any shader stage across all elements of pSetLayouts must be less than or equal to VkPhysicalDeviceLimits::maxPerStageDescriptorSamplers (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineLayoutCreateInfo-pSetLayouts-00287)"},
+    {"VUID-VkPipelineLayoutCreateInfo-pSetLayouts-00288", "The total number of descriptors of the type VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER and VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC accessible to any shader stage across all elements of pSetLayouts must be less than or equal to VkPhysicalDeviceLimits::maxPerStageDescriptorUniformBuffers (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineLayoutCreateInfo-pSetLayouts-00288)"},
+    {"VUID-VkPipelineLayoutCreateInfo-pSetLayouts-00289", "The total number of descriptors of the type VK_DESCRIPTOR_TYPE_STORAGE_BUFFER and VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC accessible to any shader stage across all elements of pSetLayouts must be less than or equal to VkPhysicalDeviceLimits::maxPerStageDescriptorStorageBuffers (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineLayoutCreateInfo-pSetLayouts-00289)"},
+    {"VUID-VkPipelineLayoutCreateInfo-pSetLayouts-00290", "The total number of descriptors of the type VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, and VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER accessible to any shader stage across all elements of pSetLayouts must be less than or equal to VkPhysicalDeviceLimits::maxPerStageDescriptorSampledImages (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineLayoutCreateInfo-pSetLayouts-00290)"},
+    {"VUID-VkPipelineLayoutCreateInfo-pSetLayouts-00291", "The total number of descriptors of the type VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, and VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER accessible to any shader stage across all elements of pSetLayouts must be less than or equal to VkPhysicalDeviceLimits::maxPerStageDescriptorStorageImages (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineLayoutCreateInfo-pSetLayouts-00291)"},
+    {"VUID-VkPipelineLayoutCreateInfo-pSetLayouts-00293", "pSetLayouts must not contain more than one descriptor set layout that was created with VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineLayoutCreateInfo-pSetLayouts-00293)"},
+    {"VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01676", "The total number of descriptors of the type VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT accessible to any given shader stage across all elements of pSetLayouts must be less than or equal to VkPhysicalDeviceLimits::maxPerStageDescriptorInputAttachments (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01676)"},
+    {"VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01677", "The total number of descriptors of the type VK_DESCRIPTOR_TYPE_SAMPLER and VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER accessible across all shader stages and across all elements of pSetLayouts must be less than or equal to VkPhysicalDeviceLimits::maxDescriptorSetSamplers (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01677)"},
+    {"VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01678", "The total number of descriptors of the type VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER accessible across all shader stages and across all elements of pSetLayouts must be less than or equal to VkPhysicalDeviceLimits::maxDescriptorSetUniformBuffers (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01678)"},
+    {"VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01679", "The total number of descriptors of the type VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC accessible across all shader stages and across all elements of pSetLayouts must be less than or equal to VkPhysicalDeviceLimits::maxDescriptorSetUniformBuffersDynamic (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01679)"},
+    {"VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01680", "The total number of descriptors of the type VK_DESCRIPTOR_TYPE_STORAGE_BUFFER accessible across all shader stages and across all elements of pSetLayouts must be less than or equal to VkPhysicalDeviceLimits::maxDescriptorSetStorageBuffers (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01680)"},
+    {"VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01681", "The total number of descriptors of the type VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC accessible across all shader stages and across all elements of pSetLayouts must be less than or equal to VkPhysicalDeviceLimits::maxDescriptorSetStorageBuffersDynamic (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01681)"},
+    {"VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01682", "The total number of descriptors of the type VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, and VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER accessible across all shader stages and across all elements of pSetLayouts must be less than or equal to VkPhysicalDeviceLimits::maxDescriptorSetSampledImages (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01682)"},
+    {"VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01683", "The total number of descriptors of the type VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, and VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER accessible across all shader stages and across all elements of pSetLayouts must be less than or equal to VkPhysicalDeviceLimits::maxDescriptorSetStorageImages (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01683)"},
+    {"VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01684", "The total number of descriptors of the type VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT accessible across all shader stages and across all elements of pSetLayouts must be less than or equal to VkPhysicalDeviceLimits::maxDescriptorSetInputAttachments (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01684)"},
+    {"VUID-VkPipelineLayoutCreateInfo-pSetLayouts-03036", "The total number of descriptors of the type VK_DESCRIPTOR_TYPE_SAMPLER and VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER accessible across all shader stages and across all elements of pSetLayouts must be less than or equal to VkPhysicalDeviceDescriptorIndexingPropertiesEXT::maxDescriptorSetUpdateAfterBindSamplers (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineLayoutCreateInfo-pSetLayouts-03036)"},
+    {"VUID-VkPipelineLayoutCreateInfo-pSetLayouts-03037", "The total number of descriptors of the type VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER accessible across all shader stages and across all elements of pSetLayouts must be less than or equal to VkPhysicalDeviceDescriptorIndexingPropertiesEXT::maxDescriptorSetUpdateAfterBindUniformBuffers (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineLayoutCreateInfo-pSetLayouts-03037)"},
+    {"VUID-VkPipelineLayoutCreateInfo-pSetLayouts-03038", "The total number of descriptors of the type VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC accessible across all shader stages and across all elements of pSetLayouts must be less than or equal to VkPhysicalDeviceDescriptorIndexingPropertiesEXT::maxDescriptorSetUpdateAfterBindUniformBuffersDynamic (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineLayoutCreateInfo-pSetLayouts-03038)"},
+    {"VUID-VkPipelineLayoutCreateInfo-pSetLayouts-03039", "The total number of descriptors of the type VK_DESCRIPTOR_TYPE_STORAGE_BUFFER accessible across all shader stages and across all elements of pSetLayouts must be less than or equal to VkPhysicalDeviceDescriptorIndexingPropertiesEXT::maxDescriptorSetUpdateAfterBindStorageBuffers (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineLayoutCreateInfo-pSetLayouts-03039)"},
+    {"VUID-VkPipelineLayoutCreateInfo-pSetLayouts-03040", "The total number of descriptors of the type VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC accessible across all shader stages and across all elements of pSetLayouts must be less than or equal to VkPhysicalDeviceDescriptorIndexingPropertiesEXT::maxDescriptorSetUpdateAfterBindStorageBuffersDynamic (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineLayoutCreateInfo-pSetLayouts-03040)"},
+    {"VUID-VkPipelineLayoutCreateInfo-pSetLayouts-03041", "The total number of descriptors of the type VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, and VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER accessible across all shader stages and across all elements of pSetLayouts must be less than or equal to VkPhysicalDeviceDescriptorIndexingPropertiesEXT::maxDescriptorSetUpdateAfterBindSampledImages (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineLayoutCreateInfo-pSetLayouts-03041)"},
+    {"VUID-VkPipelineLayoutCreateInfo-pSetLayouts-03042", "The total number of descriptors of the type VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, and VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER accessible across all shader stages and across all elements of pSetLayouts must be less than or equal to VkPhysicalDeviceDescriptorIndexingPropertiesEXT::maxDescriptorSetUpdateAfterBindStorageImages (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineLayoutCreateInfo-pSetLayouts-03042)"},
+    {"VUID-VkPipelineLayoutCreateInfo-pSetLayouts-03043", "The total number of descriptors of the type VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT accessible across all shader stages and across all elements of pSetLayouts must be less than or equal to VkPhysicalDeviceDescriptorIndexingPropertiesEXT::maxDescriptorSetUpdateAfterBindInputAttachments (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineLayoutCreateInfo-pSetLayouts-03043)"},
+    {"VUID-VkPipelineLayoutCreateInfo-pSetLayouts-parameter", "If setLayoutCount is not 0, pSetLayouts must be a valid pointer to an array of setLayoutCount valid VkDescriptorSetLayout handles (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineLayoutCreateInfo-pSetLayouts-parameter)"},
+    {"VUID-VkPipelineLayoutCreateInfo-sType-sType", "sType must be VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineLayoutCreateInfo-sType-sType)"},
+    {"VUID-VkPipelineLayoutCreateInfo-setLayoutCount-00286", "setLayoutCount must be less than or equal to VkPhysicalDeviceLimits::maxBoundDescriptorSets (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineLayoutCreateInfo-setLayoutCount-00286)"},
+    {"VUID-VkPipelineMultisampleStateCreateInfo-alphaToOneEnable-00785", "If the alpha to one feature is not enabled, alphaToOneEnable must be VK_FALSE (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineMultisampleStateCreateInfo-alphaToOneEnable-00785)"},
+    {"VUID-VkPipelineMultisampleStateCreateInfo-flags-zerobitmask", "flags must be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineMultisampleStateCreateInfo-flags-zerobitmask)"},
+    {"VUID-VkPipelineMultisampleStateCreateInfo-minSampleShading-00786", "minSampleShading must be in the range [0,1] (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineMultisampleStateCreateInfo-minSampleShading-00786)"},
+    {"VUID-VkPipelineMultisampleStateCreateInfo-pNext-pNext", "Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkPipelineCoverageModulationStateCreateInfoNV, VkPipelineCoverageReductionStateCreateInfoNV, VkPipelineCoverageToColorStateCreateInfoNV, or VkPipelineSampleLocationsStateCreateInfoEXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineMultisampleStateCreateInfo-pNext-pNext)"},
+    {"VUID-VkPipelineMultisampleStateCreateInfo-pSampleMask-parameter", "If pSampleMask is not NULL, pSampleMask must be a valid pointer to an array of (rasterizationSamples/32) VkSampleMask values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineMultisampleStateCreateInfo-pSampleMask-parameter)"},
+    {"VUID-VkPipelineMultisampleStateCreateInfo-rasterizationSamples-01415", "If the VK_NV_framebuffer_mixed_samples extension is enabled, and if the subpass has any color attachments and rasterizationSamples is greater than the number of color samples, then sampleShadingEnable must be VK_FALSE (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineMultisampleStateCreateInfo-rasterizationSamples-01415)"},
+    {"VUID-VkPipelineMultisampleStateCreateInfo-rasterizationSamples-parameter", "rasterizationSamples must be a valid VkSampleCountFlagBits value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineMultisampleStateCreateInfo-rasterizationSamples-parameter)"},
+    {"VUID-VkPipelineMultisampleStateCreateInfo-sType-sType", "sType must be VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineMultisampleStateCreateInfo-sType-sType)"},
+    {"VUID-VkPipelineMultisampleStateCreateInfo-sType-unique", "Each sType member in the pNext chain must be unique (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineMultisampleStateCreateInfo-sType-unique)"},
+    {"VUID-VkPipelineMultisampleStateCreateInfo-sampleShadingEnable-00784", "If the sample rate shading feature is not enabled, sampleShadingEnable must be VK_FALSE (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineMultisampleStateCreateInfo-sampleShadingEnable-00784)"},
+    {"VUID-VkPipelineRasterizationConservativeStateCreateInfoEXT-conservativeRasterizationMode-parameter", "conservativeRasterizationMode must be a valid VkConservativeRasterizationModeEXT value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineRasterizationConservativeStateCreateInfoEXT-conservativeRasterizationMode-parameter)"},
+    {"VUID-VkPipelineRasterizationConservativeStateCreateInfoEXT-extraPrimitiveOverestimationSize-01769", "extraPrimitiveOverestimationSize must be in the range of 0.0 to VkPhysicalDeviceConservativeRasterizationPropertiesEXT::maxExtraPrimitiveOverestimationSize inclusive (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineRasterizationConservativeStateCreateInfoEXT-extraPrimitiveOverestimationSize-01769)"},
+    {"VUID-VkPipelineRasterizationConservativeStateCreateInfoEXT-flags-zerobitmask", "flags must be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineRasterizationConservativeStateCreateInfoEXT-flags-zerobitmask)"},
+    {"VUID-VkPipelineRasterizationConservativeStateCreateInfoEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_CONSERVATIVE_STATE_CREATE_INFO_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineRasterizationConservativeStateCreateInfoEXT-sType-sType)"},
+    {"VUID-VkPipelineRasterizationDepthClipStateCreateInfoEXT-flags-zerobitmask", "flags must be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineRasterizationDepthClipStateCreateInfoEXT-flags-zerobitmask)"},
+    {"VUID-VkPipelineRasterizationDepthClipStateCreateInfoEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_DEPTH_CLIP_STATE_CREATE_INFO_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineRasterizationDepthClipStateCreateInfoEXT-sType-sType)"},
+    {"VUID-VkPipelineRasterizationLineStateCreateInfoEXT-lineRasterizationMode-02768", "If lineRasterizationMode is VK_LINE_RASTERIZATION_MODE_RECTANGULAR_EXT, then the rectangularLines feature must be enabled (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineRasterizationLineStateCreateInfoEXT-lineRasterizationMode-02768)"},
+    {"VUID-VkPipelineRasterizationLineStateCreateInfoEXT-lineRasterizationMode-02769", "If lineRasterizationMode is VK_LINE_RASTERIZATION_MODE_BRESENHAM_EXT, then the bresenhamLines feature must be enabled (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineRasterizationLineStateCreateInfoEXT-lineRasterizationMode-02769)"},
+    {"VUID-VkPipelineRasterizationLineStateCreateInfoEXT-lineRasterizationMode-02770", "If lineRasterizationMode is VK_LINE_RASTERIZATION_MODE_RECTANGULAR_SMOOTH_EXT, then the smoothLines feature must be enabled (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineRasterizationLineStateCreateInfoEXT-lineRasterizationMode-02770)"},
+    {"VUID-VkPipelineRasterizationLineStateCreateInfoEXT-lineRasterizationMode-parameter", "lineRasterizationMode must be a valid VkLineRasterizationModeEXT value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineRasterizationLineStateCreateInfoEXT-lineRasterizationMode-parameter)"},
+    {"VUID-VkPipelineRasterizationLineStateCreateInfoEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineRasterizationLineStateCreateInfoEXT-sType-sType)"},
+    {"VUID-VkPipelineRasterizationLineStateCreateInfoEXT-stippledLineEnable-02771", "If stippledLineEnable is VK_TRUE and lineRasterizationMode is VK_LINE_RASTERIZATION_MODE_RECTANGULAR_EXT, then the stippledRectangularLines feature must be enabled (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineRasterizationLineStateCreateInfoEXT-stippledLineEnable-02771)"},
+    {"VUID-VkPipelineRasterizationLineStateCreateInfoEXT-stippledLineEnable-02772", "If stippledLineEnable is VK_TRUE and lineRasterizationMode is VK_LINE_RASTERIZATION_MODE_BRESENHAM_EXT, then the stippledBresenhamLines feature must be enabled (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineRasterizationLineStateCreateInfoEXT-stippledLineEnable-02772)"},
+    {"VUID-VkPipelineRasterizationLineStateCreateInfoEXT-stippledLineEnable-02773", "If stippledLineEnable is VK_TRUE and lineRasterizationMode is VK_LINE_RASTERIZATION_MODE_RECTANGULAR_SMOOTH_EXT, then the stippledSmoothLines feature must be enabled (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineRasterizationLineStateCreateInfoEXT-stippledLineEnable-02773)"},
+    {"VUID-VkPipelineRasterizationLineStateCreateInfoEXT-stippledLineEnable-02774", "If stippledLineEnable is VK_TRUE and lineRasterizationMode is VK_LINE_RASTERIZATION_MODE_DEFAULT_EXT, then the stippledRectangularLines feature must be enabled and VkPhysicalDeviceLimits::strictLines must be VK_TRUE (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineRasterizationLineStateCreateInfoEXT-stippledLineEnable-02774)"},
+    {"VUID-VkPipelineRasterizationStateCreateInfo-cullMode-parameter", "cullMode must be a valid combination of VkCullModeFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineRasterizationStateCreateInfo-cullMode-parameter)"},
+    {"VUID-VkPipelineRasterizationStateCreateInfo-depthClampEnable-00782", "If the depth clamping feature is not enabled, depthClampEnable must be VK_FALSE (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineRasterizationStateCreateInfo-depthClampEnable-00782)"},
+    {"VUID-VkPipelineRasterizationStateCreateInfo-flags-zerobitmask", "flags must be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineRasterizationStateCreateInfo-flags-zerobitmask)"},
+    {"VUID-VkPipelineRasterizationStateCreateInfo-frontFace-parameter", "frontFace must be a valid VkFrontFace value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineRasterizationStateCreateInfo-frontFace-parameter)"},
+    {"VUID-VkPipelineRasterizationStateCreateInfo-pNext-pNext", "Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkPipelineRasterizationConservativeStateCreateInfoEXT, VkPipelineRasterizationDepthClipStateCreateInfoEXT, VkPipelineRasterizationLineStateCreateInfoEXT, VkPipelineRasterizationStateRasterizationOrderAMD, or VkPipelineRasterizationStateStreamCreateInfoEXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineRasterizationStateCreateInfo-pNext-pNext)"},
+    {"VUID-VkPipelineRasterizationStateCreateInfo-polygonMode-01413", "If the non-solid fill modes feature is not enabled, polygonMode must be VK_POLYGON_MODE_FILL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineRasterizationStateCreateInfo-polygonMode-01413)"},
+    {"VUID-VkPipelineRasterizationStateCreateInfo-polygonMode-01414", "If the VK_NV_fill_rectangle extension is not enabled, polygonMode must not be VK_POLYGON_MODE_FILL_RECTANGLE_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineRasterizationStateCreateInfo-polygonMode-01414)"},
+    {"VUID-VkPipelineRasterizationStateCreateInfo-polygonMode-01507", "If the non-solid fill modes feature is not enabled, polygonMode must be VK_POLYGON_MODE_FILL or VK_POLYGON_MODE_FILL_RECTANGLE_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineRasterizationStateCreateInfo-polygonMode-01507)"},
+    {"VUID-VkPipelineRasterizationStateCreateInfo-polygonMode-parameter", "polygonMode must be a valid VkPolygonMode value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineRasterizationStateCreateInfo-polygonMode-parameter)"},
+    {"VUID-VkPipelineRasterizationStateCreateInfo-sType-sType", "sType must be VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineRasterizationStateCreateInfo-sType-sType)"},
+    {"VUID-VkPipelineRasterizationStateCreateInfo-sType-unique", "Each sType member in the pNext chain must be unique (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineRasterizationStateCreateInfo-sType-unique)"},
+    {"VUID-VkPipelineRasterizationStateRasterizationOrderAMD-rasterizationOrder-parameter", "rasterizationOrder must be a valid VkRasterizationOrderAMD value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineRasterizationStateRasterizationOrderAMD-rasterizationOrder-parameter)"},
+    {"VUID-VkPipelineRasterizationStateRasterizationOrderAMD-sType-sType", "sType must be VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_RASTERIZATION_ORDER_AMD (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineRasterizationStateRasterizationOrderAMD-sType-sType)"},
+    {"VUID-VkPipelineRasterizationStateStreamCreateInfoEXT-flags-zerobitmask", "flags must be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineRasterizationStateStreamCreateInfoEXT-flags-zerobitmask)"},
+    {"VUID-VkPipelineRasterizationStateStreamCreateInfoEXT-geometryStreams-02324", "VkPhysicalDeviceTransformFeedbackFeaturesEXT::geometryStreams must be enabled (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineRasterizationStateStreamCreateInfoEXT-geometryStreams-02324)"},
+    {"VUID-VkPipelineRasterizationStateStreamCreateInfoEXT-rasterizationStream-02325", "rasterizationStream must be less than VkPhysicalDeviceTransformFeedbackPropertiesEXT::maxTransformFeedbackStreams (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineRasterizationStateStreamCreateInfoEXT-rasterizationStream-02325)"},
+    {"VUID-VkPipelineRasterizationStateStreamCreateInfoEXT-rasterizationStream-02326", "rasterizationStream must be zero if VkPhysicalDeviceTransformFeedbackPropertiesEXT::transformFeedbackRasterizationStreamSelect is VK_FALSE (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineRasterizationStateStreamCreateInfoEXT-rasterizationStream-02326)"},
+    {"VUID-VkPipelineRasterizationStateStreamCreateInfoEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_STREAM_CREATE_INFO_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineRasterizationStateStreamCreateInfoEXT-sType-sType)"},
+    {"VUID-VkPipelineRepresentativeFragmentTestStateCreateInfoNV-sType-sType", "sType must be VK_STRUCTURE_TYPE_PIPELINE_REPRESENTATIVE_FRAGMENT_TEST_STATE_CREATE_INFO_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineRepresentativeFragmentTestStateCreateInfoNV-sType-sType)"},
+    {"VUID-VkPipelineSampleLocationsStateCreateInfoEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_PIPELINE_SAMPLE_LOCATIONS_STATE_CREATE_INFO_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineSampleLocationsStateCreateInfoEXT-sType-sType)"},
+    {"VUID-VkPipelineSampleLocationsStateCreateInfoEXT-sampleLocationsInfo-parameter", "sampleLocationsInfo must be a valid VkSampleLocationsInfoEXT structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineSampleLocationsStateCreateInfoEXT-sampleLocationsInfo-parameter)"},
+    {"VUID-VkPipelineShaderStageCreateInfo-flags-02758", "If flags has both the VK_PIPELINE_SHADER_STAGE_CREATE_REQUIRE_FULL_SUBGROUPS_BIT_EXT and VK_PIPELINE_SHADER_STAGE_CREATE_ALLOW_VARYING_SUBGROUP_SIZE_BIT_EXT flags set, the local workgroup size in the X dimension of the pipeline must be a multiple of maxSubgroupSize. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineShaderStageCreateInfo-flags-02758)"},
+    {"VUID-VkPipelineShaderStageCreateInfo-flags-02759", "If flags has the VK_PIPELINE_SHADER_STAGE_CREATE_REQUIRE_FULL_SUBGROUPS_BIT_EXT flag set and flags does not have the VK_PIPELINE_SHADER_STAGE_CREATE_ALLOW_VARYING_SUBGROUP_SIZE_BIT_EXT flag set and no VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT structure is included in the pNext chain, the local workgroup size in the X dimension of the pipeline must be a multiple of subgroupSize. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineShaderStageCreateInfo-flags-02759)"},
+    {"VUID-VkPipelineShaderStageCreateInfo-flags-02784", "If flags has the VK_PIPELINE_SHADER_STAGE_CREATE_ALLOW_VARYING_SUBGROUP_SIZE_BIT_EXT flag set, the subgroupSizeControl feature must be enabled. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineShaderStageCreateInfo-flags-02784)"},
+    {"VUID-VkPipelineShaderStageCreateInfo-flags-02785", "If flags has the VK_PIPELINE_SHADER_STAGE_CREATE_REQUIRE_FULL_SUBGROUPS_BIT_EXT flag set, the computeFullSubgroups feature must be enabled. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineShaderStageCreateInfo-flags-02785)"},
+    {"VUID-VkPipelineShaderStageCreateInfo-flags-parameter", "flags must be a valid combination of VkPipelineShaderStageCreateFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineShaderStageCreateInfo-flags-parameter)"},
+    {"VUID-VkPipelineShaderStageCreateInfo-maxClipDistances-00708", "If the identified entry point includes any variable in its interface that is declared with the ClipDistance BuiltIn decoration, that variable must not have an array size greater than VkPhysicalDeviceLimits::maxClipDistances (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineShaderStageCreateInfo-maxClipDistances-00708)"},
+    {"VUID-VkPipelineShaderStageCreateInfo-maxCombinedClipAndCullDistances-00710", "If the identified entry point includes any variables in its interface that are declared with the ClipDistance or CullDistance BuiltIn decoration, those variables must not have array sizes which sum to more than VkPhysicalDeviceLimits::maxCombinedClipAndCullDistances (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineShaderStageCreateInfo-maxCombinedClipAndCullDistances-00710)"},
+    {"VUID-VkPipelineShaderStageCreateInfo-maxCullDistances-00709", "If the identified entry point includes any variable in its interface that is declared with the CullDistance BuiltIn decoration, that variable must not have an array size greater than VkPhysicalDeviceLimits::maxCullDistances (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineShaderStageCreateInfo-maxCullDistances-00709)"},
+    {"VUID-VkPipelineShaderStageCreateInfo-maxSampleMaskWords-00711", "If the identified entry point includes any variable in its interface that is declared with the SampleMask BuiltIn decoration, that variable must not have an array size greater than VkPhysicalDeviceLimits::maxSampleMaskWords (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineShaderStageCreateInfo-maxSampleMaskWords-00711)"},
+    {"VUID-VkPipelineShaderStageCreateInfo-module-parameter", "module must be a valid VkShaderModule handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineShaderStageCreateInfo-module-parameter)"},
+    {"VUID-VkPipelineShaderStageCreateInfo-pName-00707", "pName must be the name of an OpEntryPoint in module with an execution model that matches stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineShaderStageCreateInfo-pName-00707)"},
+    {"VUID-VkPipelineShaderStageCreateInfo-pName-parameter", "pName must be a null-terminated UTF-8 string (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineShaderStageCreateInfo-pName-parameter)"},
+    {"VUID-VkPipelineShaderStageCreateInfo-pNext-02754", "If a VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT structure is included in the pNext chain, flags must not have the VK_PIPELINE_SHADER_STAGE_CREATE_ALLOW_VARYING_SUBGROUP_SIZE_BIT_EXT flag set. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineShaderStageCreateInfo-pNext-02754)"},
+    {"VUID-VkPipelineShaderStageCreateInfo-pNext-02755", "If a VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT structure is included in the pNext chain, the subgroupSizeControl feature must be enabled, and stage must be a valid bit specified in requiredSubgroupSizeStages. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineShaderStageCreateInfo-pNext-02755)"},
+    {"VUID-VkPipelineShaderStageCreateInfo-pNext-02756", "If a VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT structure is included in the pNext chain and stage is VK_SHADER_STAGE_COMPUTE_BIT, the local workgroup size of the shader must be less than or equal to the product of VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT::requiredSubgroupSize and maxComputeWorkgroupSubgroups. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineShaderStageCreateInfo-pNext-02756)"},
+    {"VUID-VkPipelineShaderStageCreateInfo-pNext-02757", "If a VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT structure is included in the pNext chain, and flags has the VK_PIPELINE_SHADER_STAGE_CREATE_REQUIRE_FULL_SUBGROUPS_BIT_EXT flag set, the local workgroup size in the X dimension of the pipeline must be a multiple of VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT::requiredSubgroupSize. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineShaderStageCreateInfo-pNext-02757)"},
+    {"VUID-VkPipelineShaderStageCreateInfo-pNext-pNext", "pNext must be NULL or a pointer to a valid instance of VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineShaderStageCreateInfo-pNext-pNext)"},
+    {"VUID-VkPipelineShaderStageCreateInfo-pSpecializationInfo-parameter", "If pSpecializationInfo is not NULL, pSpecializationInfo must be a valid pointer to a valid VkSpecializationInfo structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineShaderStageCreateInfo-pSpecializationInfo-parameter)"},
+    {"VUID-VkPipelineShaderStageCreateInfo-sType-sType", "sType must be VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineShaderStageCreateInfo-sType-sType)"},
+    {"VUID-VkPipelineShaderStageCreateInfo-stage-00704", "If the geometry shaders feature is not enabled, stage must not be VK_SHADER_STAGE_GEOMETRY_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineShaderStageCreateInfo-stage-00704)"},
+    {"VUID-VkPipelineShaderStageCreateInfo-stage-00705", "If the tessellation shaders feature is not enabled, stage must not be VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT or VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineShaderStageCreateInfo-stage-00705)"},
+    {"VUID-VkPipelineShaderStageCreateInfo-stage-00706", "stage must not be VK_SHADER_STAGE_ALL_GRAPHICS, or VK_SHADER_STAGE_ALL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineShaderStageCreateInfo-stage-00706)"},
+    {"VUID-VkPipelineShaderStageCreateInfo-stage-00712", "If stage is VK_SHADER_STAGE_VERTEX_BIT, the identified entry point must not include any input variable in its interface that is decorated with CullDistance (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineShaderStageCreateInfo-stage-00712)"},
+    {"VUID-VkPipelineShaderStageCreateInfo-stage-00713", "If stage is VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT or VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT, and the identified entry point has an OpExecutionMode instruction that specifies a patch size with OutputVertices, the patch size must be greater than 0 and less than or equal to VkPhysicalDeviceLimits::maxTessellationPatchSize (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineShaderStageCreateInfo-stage-00713)"},
+    {"VUID-VkPipelineShaderStageCreateInfo-stage-00714", "If stage is VK_SHADER_STAGE_GEOMETRY_BIT, the identified entry point must have an OpExecutionMode instruction that specifies a maximum output vertex count that is greater than 0 and less than or equal to VkPhysicalDeviceLimits::maxGeometryOutputVertices (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineShaderStageCreateInfo-stage-00714)"},
+    {"VUID-VkPipelineShaderStageCreateInfo-stage-00715", "If stage is VK_SHADER_STAGE_GEOMETRY_BIT, the identified entry point must have an OpExecutionMode instruction that specifies an invocation count that is greater than 0 and less than or equal to VkPhysicalDeviceLimits::maxGeometryShaderInvocations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineShaderStageCreateInfo-stage-00715)"},
+    {"VUID-VkPipelineShaderStageCreateInfo-stage-00718", "If stage is VK_SHADER_STAGE_FRAGMENT_BIT, the identified entry point must not include any output variables in its interface decorated with CullDistance (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineShaderStageCreateInfo-stage-00718)"},
+    {"VUID-VkPipelineShaderStageCreateInfo-stage-00719", "If stage is VK_SHADER_STAGE_FRAGMENT_BIT, and the identified entry point writes to FragDepth in any execution path, it must write to FragDepth in all execution paths (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineShaderStageCreateInfo-stage-00719)"},
+    {"VUID-VkPipelineShaderStageCreateInfo-stage-01511", "If stage is VK_SHADER_STAGE_FRAGMENT_BIT, and the identified entry point writes to FragStencilRefEXT in any execution path, it must write to FragStencilRefEXT in all execution paths (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineShaderStageCreateInfo-stage-01511)"},
+    {"VUID-VkPipelineShaderStageCreateInfo-stage-02091", "If the mesh shader feature is not enabled, stage must not be VK_SHADER_STAGE_MESH_BIT_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineShaderStageCreateInfo-stage-02091)"},
+    {"VUID-VkPipelineShaderStageCreateInfo-stage-02092", "If the task shader feature is not enabled, stage must not be VK_SHADER_STAGE_TASK_BIT_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineShaderStageCreateInfo-stage-02092)"},
+    {"VUID-VkPipelineShaderStageCreateInfo-stage-02093", "If stage is VK_SHADER_STAGE_MESH_BIT_NV, the identified entry point must have an OpExecutionMode instruction that specifies a maximum output vertex count, OutputVertices, that is greater than 0 and less than or equal to VkPhysicalDeviceMeshShaderPropertiesNV::maxMeshOutputVertices. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineShaderStageCreateInfo-stage-02093)"},
+    {"VUID-VkPipelineShaderStageCreateInfo-stage-02094", "If stage is VK_SHADER_STAGE_MESH_BIT_NV, the identified entry point must have an OpExecutionMode instruction that specifies a maximum output primitive count, OutputPrimitivesNV, that is greater than 0 and less than or equal to VkPhysicalDeviceMeshShaderPropertiesNV::maxMeshOutputPrimitives. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineShaderStageCreateInfo-stage-02094)"},
+    {"VUID-VkPipelineShaderStageCreateInfo-stage-02596", "If stage is a vertex processing stage, and the identified entry point writes to Layer for any primitive, it must write the same value to Layer for all vertices of a given primitive (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineShaderStageCreateInfo-stage-02596)"},
+    {"VUID-VkPipelineShaderStageCreateInfo-stage-02597", "If stage is a vertex processing stage, and the identified entry point writes to ViewportIndex for any primitive, it must write the same value to ViewportIndex for all vertices of a given primitive (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineShaderStageCreateInfo-stage-02597)"},
+    {"VUID-VkPipelineShaderStageCreateInfo-stage-parameter", "stage must be a valid VkShaderStageFlagBits value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineShaderStageCreateInfo-stage-parameter)"},
+    {"VUID-VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT-requiredSubgroupSize-02760", "requiredSubgroupSize must be a power-of-two integer. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT-requiredSubgroupSize-02760)"},
+    {"VUID-VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT-requiredSubgroupSize-02761", "requiredSubgroupSize must be greater or equal to minSubgroupSize. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT-requiredSubgroupSize-02761)"},
+    {"VUID-VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT-requiredSubgroupSize-02762", "requiredSubgroupSize must be less than or equal to maxSubgroupSize. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT-requiredSubgroupSize-02762)"},
+    {"VUID-VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT-sType-sType)"},
+    {"VUID-VkPipelineTessellationDomainOriginStateCreateInfo-domainOrigin-parameter", "domainOrigin must be a valid VkTessellationDomainOrigin value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineTessellationDomainOriginStateCreateInfo-domainOrigin-parameter)"},
+    {"VUID-VkPipelineTessellationDomainOriginStateCreateInfo-sType-sType", "sType must be VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineTessellationDomainOriginStateCreateInfo-sType-sType)"},
+    {"VUID-VkPipelineTessellationStateCreateInfo-flags-zerobitmask", "flags must be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineTessellationStateCreateInfo-flags-zerobitmask)"},
+    {"VUID-VkPipelineTessellationStateCreateInfo-pNext-pNext", "pNext must be NULL or a pointer to a valid instance of VkPipelineTessellationDomainOriginStateCreateInfo (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineTessellationStateCreateInfo-pNext-pNext)"},
+    {"VUID-VkPipelineTessellationStateCreateInfo-patchControlPoints-01214", "patchControlPoints must be greater than zero and less than or equal to VkPhysicalDeviceLimits::maxTessellationPatchSize (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineTessellationStateCreateInfo-patchControlPoints-01214)"},
+    {"VUID-VkPipelineTessellationStateCreateInfo-sType-sType", "sType must be VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineTessellationStateCreateInfo-sType-sType)"},
+    {"VUID-VkPipelineVertexInputDivisorStateCreateInfoEXT-pVertexBindingDivisors-parameter", "pVertexBindingDivisors must be a valid pointer to an array of vertexBindingDivisorCount VkVertexInputBindingDivisorDescriptionEXT structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineVertexInputDivisorStateCreateInfoEXT-pVertexBindingDivisors-parameter)"},
+    {"VUID-VkPipelineVertexInputDivisorStateCreateInfoEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineVertexInputDivisorStateCreateInfoEXT-sType-sType)"},
+    {"VUID-VkPipelineVertexInputDivisorStateCreateInfoEXT-vertexBindingDivisorCount-arraylength", "vertexBindingDivisorCount must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineVertexInputDivisorStateCreateInfoEXT-vertexBindingDivisorCount-arraylength)"},
+    {"VUID-VkPipelineVertexInputStateCreateInfo-binding-00615", "For every binding specified by each element of pVertexAttributeDescriptions, a VkVertexInputBindingDescription must exist in pVertexBindingDescriptions with the same value of binding (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineVertexInputStateCreateInfo-binding-00615)"},
+    {"VUID-VkPipelineVertexInputStateCreateInfo-flags-zerobitmask", "flags must be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineVertexInputStateCreateInfo-flags-zerobitmask)"},
+    {"VUID-VkPipelineVertexInputStateCreateInfo-pNext-pNext", "pNext must be NULL or a pointer to a valid instance of VkPipelineVertexInputDivisorStateCreateInfoEXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineVertexInputStateCreateInfo-pNext-pNext)"},
+    {"VUID-VkPipelineVertexInputStateCreateInfo-pVertexAttributeDescriptions-00617", "All elements of pVertexAttributeDescriptions must describe distinct attribute locations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineVertexInputStateCreateInfo-pVertexAttributeDescriptions-00617)"},
+    {"VUID-VkPipelineVertexInputStateCreateInfo-pVertexAttributeDescriptions-parameter", "If vertexAttributeDescriptionCount is not 0, pVertexAttributeDescriptions must be a valid pointer to an array of vertexAttributeDescriptionCount valid VkVertexInputAttributeDescription structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineVertexInputStateCreateInfo-pVertexAttributeDescriptions-parameter)"},
+    {"VUID-VkPipelineVertexInputStateCreateInfo-pVertexBindingDescriptions-00616", "All elements of pVertexBindingDescriptions must describe distinct binding numbers (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineVertexInputStateCreateInfo-pVertexBindingDescriptions-00616)"},
+    {"VUID-VkPipelineVertexInputStateCreateInfo-pVertexBindingDescriptions-parameter", "If vertexBindingDescriptionCount is not 0, pVertexBindingDescriptions must be a valid pointer to an array of vertexBindingDescriptionCount valid VkVertexInputBindingDescription structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineVertexInputStateCreateInfo-pVertexBindingDescriptions-parameter)"},
+    {"VUID-VkPipelineVertexInputStateCreateInfo-sType-sType", "sType must be VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineVertexInputStateCreateInfo-sType-sType)"},
+    {"VUID-VkPipelineVertexInputStateCreateInfo-vertexAttributeDescriptionCount-00614", "vertexAttributeDescriptionCount must be less than or equal to VkPhysicalDeviceLimits::maxVertexInputAttributes (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineVertexInputStateCreateInfo-vertexAttributeDescriptionCount-00614)"},
+    {"VUID-VkPipelineVertexInputStateCreateInfo-vertexBindingDescriptionCount-00613", "vertexBindingDescriptionCount must be less than or equal to VkPhysicalDeviceLimits::maxVertexInputBindings (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineVertexInputStateCreateInfo-vertexBindingDescriptionCount-00613)"},
+    {"VUID-VkPipelineViewportCoarseSampleOrderStateCreateInfoNV-pCustomSampleOrders-02234", "The array pCustomSampleOrders must not contain two structures with matching values for both the shadingRate and sampleCount members. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineViewportCoarseSampleOrderStateCreateInfoNV-pCustomSampleOrders-02234)"},
+    {"VUID-VkPipelineViewportCoarseSampleOrderStateCreateInfoNV-pCustomSampleOrders-parameter", "If customSampleOrderCount is not 0, pCustomSampleOrders must be a valid pointer to an array of customSampleOrderCount valid VkCoarseSampleOrderCustomNV structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineViewportCoarseSampleOrderStateCreateInfoNV-pCustomSampleOrders-parameter)"},
+    {"VUID-VkPipelineViewportCoarseSampleOrderStateCreateInfoNV-sType-sType", "sType must be VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_COARSE_SAMPLE_ORDER_STATE_CREATE_INFO_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineViewportCoarseSampleOrderStateCreateInfoNV-sType-sType)"},
+    {"VUID-VkPipelineViewportCoarseSampleOrderStateCreateInfoNV-sampleOrderType-02072", "If sampleOrderType is not VK_COARSE_SAMPLE_ORDER_TYPE_CUSTOM_NV, customSamplerOrderCount must be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineViewportCoarseSampleOrderStateCreateInfoNV-sampleOrderType-02072)"},
+    {"VUID-VkPipelineViewportCoarseSampleOrderStateCreateInfoNV-sampleOrderType-parameter", "sampleOrderType must be a valid VkCoarseSampleOrderTypeNV value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineViewportCoarseSampleOrderStateCreateInfoNV-sampleOrderType-parameter)"},
+    {"VUID-VkPipelineViewportExclusiveScissorStateCreateInfoNV-exclusiveScissorCount-02027", "If the multiple viewports feature is not enabled, exclusiveScissorCount must be 0 or 1 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineViewportExclusiveScissorStateCreateInfoNV-exclusiveScissorCount-02027)"},
+    {"VUID-VkPipelineViewportExclusiveScissorStateCreateInfoNV-exclusiveScissorCount-02028", "exclusiveScissorCount must be less than or equal to VkPhysicalDeviceLimits::maxViewports (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineViewportExclusiveScissorStateCreateInfoNV-exclusiveScissorCount-02028)"},
+    {"VUID-VkPipelineViewportExclusiveScissorStateCreateInfoNV-exclusiveScissorCount-02029", "exclusiveScissorCount must be 0 or identical to the viewportCount member of VkPipelineViewportStateCreateInfo (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineViewportExclusiveScissorStateCreateInfoNV-exclusiveScissorCount-02029)"},
+    {"VUID-VkPipelineViewportExclusiveScissorStateCreateInfoNV-pDynamicStates-02030", "If no element of the pDynamicStates member of pDynamicState is VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_NV and exclusiveScissorCount is not 0, pExclusiveScissors must be a valid pointer to an array of exclusiveScissorCount VkRect2D structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineViewportExclusiveScissorStateCreateInfoNV-pDynamicStates-02030)"},
+    {"VUID-VkPipelineViewportExclusiveScissorStateCreateInfoNV-pExclusiveScissors-parameter", "If exclusiveScissorCount is not 0, and pExclusiveScissors is not NULL, pExclusiveScissors must be a valid pointer to an array of exclusiveScissorCount VkRect2D structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineViewportExclusiveScissorStateCreateInfoNV-pExclusiveScissors-parameter)"},
+    {"VUID-VkPipelineViewportExclusiveScissorStateCreateInfoNV-sType-sType", "sType must be VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_EXCLUSIVE_SCISSOR_STATE_CREATE_INFO_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineViewportExclusiveScissorStateCreateInfoNV-sType-sType)"},
+    {"VUID-VkPipelineViewportShadingRateImageStateCreateInfoNV-pDynamicStates-02057", "If no element of the pDynamicStates member of pDynamicState is VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV, pShadingRatePalettes must be a valid pointer to an array of viewportCount VkShadingRatePaletteNV structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineViewportShadingRateImageStateCreateInfoNV-pDynamicStates-02057)"},
+    {"VUID-VkPipelineViewportShadingRateImageStateCreateInfoNV-pShadingRatePalettes-parameter", "If viewportCount is not 0, and pShadingRatePalettes is not NULL, pShadingRatePalettes must be a valid pointer to an array of viewportCount valid VkShadingRatePaletteNV structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineViewportShadingRateImageStateCreateInfoNV-pShadingRatePalettes-parameter)"},
+    {"VUID-VkPipelineViewportShadingRateImageStateCreateInfoNV-sType-sType", "sType must be VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SHADING_RATE_IMAGE_STATE_CREATE_INFO_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineViewportShadingRateImageStateCreateInfoNV-sType-sType)"},
+    {"VUID-VkPipelineViewportShadingRateImageStateCreateInfoNV-shadingRateImageEnable-02056", "If shadingRateImageEnable is VK_TRUE, viewportCount must be equal to the viewportCount member of VkPipelineViewportStateCreateInfo (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineViewportShadingRateImageStateCreateInfoNV-shadingRateImageEnable-02056)"},
+    {"VUID-VkPipelineViewportShadingRateImageStateCreateInfoNV-viewportCount-02054", "If the multiple viewports feature is not enabled, viewportCount must be 0 or 1 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineViewportShadingRateImageStateCreateInfoNV-viewportCount-02054)"},
+    {"VUID-VkPipelineViewportShadingRateImageStateCreateInfoNV-viewportCount-02055", "viewportCount must be less than or equal to VkPhysicalDeviceLimits::maxViewports (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineViewportShadingRateImageStateCreateInfoNV-viewportCount-02055)"},
+    {"VUID-VkPipelineViewportStateCreateInfo-flags-zerobitmask", "flags must be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineViewportStateCreateInfo-flags-zerobitmask)"},
+    {"VUID-VkPipelineViewportStateCreateInfo-offset-02822", "Evaluation of (offset.x + extent.width) must not cause a signed integer addition overflow for any element of pScissors (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineViewportStateCreateInfo-offset-02822)"},
+    {"VUID-VkPipelineViewportStateCreateInfo-offset-02823", "Evaluation of (offset.y + extent.height) must not cause a signed integer addition overflow for any element of pScissors (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineViewportStateCreateInfo-offset-02823)"},
+    {"VUID-VkPipelineViewportStateCreateInfo-pNext-pNext", "Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkPipelineViewportCoarseSampleOrderStateCreateInfoNV, VkPipelineViewportExclusiveScissorStateCreateInfoNV, VkPipelineViewportShadingRateImageStateCreateInfoNV, VkPipelineViewportSwizzleStateCreateInfoNV, or VkPipelineViewportWScalingStateCreateInfoNV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineViewportStateCreateInfo-pNext-pNext)"},
+    {"VUID-VkPipelineViewportStateCreateInfo-sType-sType", "sType must be VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineViewportStateCreateInfo-sType-sType)"},
+    {"VUID-VkPipelineViewportStateCreateInfo-sType-unique", "Each sType member in the pNext chain must be unique (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineViewportStateCreateInfo-sType-unique)"},
+    {"VUID-VkPipelineViewportStateCreateInfo-scissorCount-01217", "If the multiple viewports feature is not enabled, scissorCount must be 1 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineViewportStateCreateInfo-scissorCount-01217)"},
+    {"VUID-VkPipelineViewportStateCreateInfo-scissorCount-01219", "scissorCount must be between 1 and VkPhysicalDeviceLimits::maxViewports, inclusive (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineViewportStateCreateInfo-scissorCount-01219)"},
+    {"VUID-VkPipelineViewportStateCreateInfo-scissorCount-01220", "scissorCount and viewportCount must be identical (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineViewportStateCreateInfo-scissorCount-01220)"},
+    {"VUID-VkPipelineViewportStateCreateInfo-scissorCount-arraylength", "scissorCount must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineViewportStateCreateInfo-scissorCount-arraylength)"},
+    {"VUID-VkPipelineViewportStateCreateInfo-viewportCount-01216", "If the multiple viewports feature is not enabled, viewportCount must be 1 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineViewportStateCreateInfo-viewportCount-01216)"},
+    {"VUID-VkPipelineViewportStateCreateInfo-viewportCount-01218", "viewportCount must be between 1 and VkPhysicalDeviceLimits::maxViewports, inclusive (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineViewportStateCreateInfo-viewportCount-01218)"},
+    {"VUID-VkPipelineViewportStateCreateInfo-viewportCount-arraylength", "viewportCount must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineViewportStateCreateInfo-viewportCount-arraylength)"},
+    {"VUID-VkPipelineViewportStateCreateInfo-viewportWScalingEnable-01726", "If the viewportWScalingEnable member of a VkPipelineViewportWScalingStateCreateInfoNV structure included in the pNext chain is VK_TRUE, the viewportCount member of the VkPipelineViewportWScalingStateCreateInfoNV structure must be equal to viewportCount (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineViewportStateCreateInfo-viewportWScalingEnable-01726)"},
+    {"VUID-VkPipelineViewportStateCreateInfo-x-02821", "The x and y members of offset member of any element of pScissors must be greater than or equal to 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineViewportStateCreateInfo-x-02821)"},
+    {"VUID-VkPipelineViewportSwizzleStateCreateInfoNV-flags-zerobitmask", "flags must be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineViewportSwizzleStateCreateInfoNV-flags-zerobitmask)"},
+    {"VUID-VkPipelineViewportSwizzleStateCreateInfoNV-pViewportSwizzles-parameter", "pViewportSwizzles must be a valid pointer to an array of viewportCount valid VkViewportSwizzleNV structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineViewportSwizzleStateCreateInfoNV-pViewportSwizzles-parameter)"},
+    {"VUID-VkPipelineViewportSwizzleStateCreateInfoNV-sType-sType", "sType must be VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SWIZZLE_STATE_CREATE_INFO_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineViewportSwizzleStateCreateInfoNV-sType-sType)"},
+    {"VUID-VkPipelineViewportSwizzleStateCreateInfoNV-viewportCount-01215", "viewportCount must match the viewportCount set in VkPipelineViewportStateCreateInfo (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineViewportSwizzleStateCreateInfoNV-viewportCount-01215)"},
+    {"VUID-VkPipelineViewportSwizzleStateCreateInfoNV-viewportCount-arraylength", "viewportCount must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineViewportSwizzleStateCreateInfoNV-viewportCount-arraylength)"},
+    {"VUID-VkPipelineViewportWScalingStateCreateInfoNV-sType-sType", "sType must be VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_W_SCALING_STATE_CREATE_INFO_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineViewportWScalingStateCreateInfoNV-sType-sType)"},
+    {"VUID-VkPipelineViewportWScalingStateCreateInfoNV-viewportCount-arraylength", "viewportCount must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPipelineViewportWScalingStateCreateInfoNV-viewportCount-arraylength)"},
+    {"VUID-VkPresentFrameTokenGGP-frameToken-02680", "frameToken must be a valid GgpFrameToken (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPresentFrameTokenGGP-frameToken-02680)"},
+    {"VUID-VkPresentFrameTokenGGP-sType-sType", "sType must be VK_STRUCTURE_TYPE_PRESENT_FRAME_TOKEN_GGP (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPresentFrameTokenGGP-sType-sType)"},
+    {"VUID-VkPresentInfoKHR-commonparent", "Both of the elements of pSwapchains, and the elements of pWaitSemaphores that are valid handles of non-ignored parameters must have been created, allocated, or retrieved from the same VkInstance (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPresentInfoKHR-commonparent)"},
+    {"VUID-VkPresentInfoKHR-pImageIndices-01296", "Each element of pImageIndices must be the index of a presentable image acquired from the swapchain specified by the corresponding element of the pSwapchains array, and the presented image subresource must be in the VK_IMAGE_LAYOUT_PRESENT_SRC_KHR layout at the time the operation is executed on a VkDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPresentInfoKHR-pImageIndices-01296)"},
+    {"VUID-VkPresentInfoKHR-pImageIndices-01430", "Each element of pImageIndices must be the index of a presentable image acquired from the swapchain specified by the corresponding element of the pSwapchains array, and the presented image subresource must be in the VK_IMAGE_LAYOUT_PRESENT_SRC_KHR or VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR layout at the time the operation is executed on a VkDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPresentInfoKHR-pImageIndices-01430)"},
+    {"VUID-VkPresentInfoKHR-pImageIndices-parameter", "pImageIndices must be a valid pointer to an array of swapchainCount uint32_t values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPresentInfoKHR-pImageIndices-parameter)"},
+    {"VUID-VkPresentInfoKHR-pNext-pNext", "Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkDeviceGroupPresentInfoKHR, VkDisplayPresentInfoKHR, VkPresentFrameTokenGGP, VkPresentRegionsKHR, or VkPresentTimesInfoGOOGLE (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPresentInfoKHR-pNext-pNext)"},
+    {"VUID-VkPresentInfoKHR-pResults-parameter", "If pResults is not NULL, pResults must be a valid pointer to an array of swapchainCount VkResult values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPresentInfoKHR-pResults-parameter)"},
+    {"VUID-VkPresentInfoKHR-pSwapchains-parameter", "pSwapchains must be a valid pointer to an array of swapchainCount valid VkSwapchainKHR handles (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPresentInfoKHR-pSwapchains-parameter)"},
+    {"VUID-VkPresentInfoKHR-pWaitSemaphores-03269", "All elements of the pWaitSemaphores must have a VkSemaphoreTypeKHR of VK_SEMAPHORE_TYPE_BINARY_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPresentInfoKHR-pWaitSemaphores-03269)"},
+    {"VUID-VkPresentInfoKHR-pWaitSemaphores-parameter", "If waitSemaphoreCount is not 0, pWaitSemaphores must be a valid pointer to an array of waitSemaphoreCount valid VkSemaphore handles (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPresentInfoKHR-pWaitSemaphores-parameter)"},
+    {"VUID-VkPresentInfoKHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_PRESENT_INFO_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPresentInfoKHR-sType-sType)"},
+    {"VUID-VkPresentInfoKHR-sType-unique", "Each sType member in the pNext chain must be unique (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPresentInfoKHR-sType-unique)"},
+    {"VUID-VkPresentInfoKHR-swapchainCount-arraylength", "swapchainCount must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPresentInfoKHR-swapchainCount-arraylength)"},
+    {"VUID-VkPresentRegionKHR-pRectangles-parameter", "If rectangleCount is not 0, and pRectangles is not NULL, pRectangles must be a valid pointer to an array of rectangleCount valid VkRectLayerKHR structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPresentRegionKHR-pRectangles-parameter)"},
+    {"VUID-VkPresentRegionsKHR-pRegions-parameter", "If pRegions is not NULL, pRegions must be a valid pointer to an array of swapchainCount valid VkPresentRegionKHR structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPresentRegionsKHR-pRegions-parameter)"},
+    {"VUID-VkPresentRegionsKHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_PRESENT_REGIONS_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPresentRegionsKHR-sType-sType)"},
+    {"VUID-VkPresentRegionsKHR-swapchainCount-01260", "swapchainCount must be the same value as VkPresentInfoKHR::swapchainCount, where VkPresentInfoKHR is included in the pNext chain of this VkPresentRegionsKHR structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPresentRegionsKHR-swapchainCount-01260)"},
+    {"VUID-VkPresentRegionsKHR-swapchainCount-arraylength", "swapchainCount must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPresentRegionsKHR-swapchainCount-arraylength)"},
+    {"VUID-VkPresentTimesInfoGOOGLE-pTimes-parameter", "If pTimes is not NULL, pTimes must be a valid pointer to an array of swapchainCount VkPresentTimeGOOGLE structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPresentTimesInfoGOOGLE-pTimes-parameter)"},
+    {"VUID-VkPresentTimesInfoGOOGLE-sType-sType", "sType must be VK_STRUCTURE_TYPE_PRESENT_TIMES_INFO_GOOGLE (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPresentTimesInfoGOOGLE-sType-sType)"},
+    {"VUID-VkPresentTimesInfoGOOGLE-swapchainCount-01247", "swapchainCount must be the same value as VkPresentInfoKHR::swapchainCount, where VkPresentInfoKHR is included in the pNext chain of this VkPresentTimesInfoGOOGLE structure. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPresentTimesInfoGOOGLE-swapchainCount-01247)"},
+    {"VUID-VkPresentTimesInfoGOOGLE-swapchainCount-arraylength", "swapchainCount must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPresentTimesInfoGOOGLE-swapchainCount-arraylength)"},
+    {"VUID-VkProtectedSubmitInfo-pNext-01819", "If the VkSubmitInfo::pNext chain does not include a VkProtectedSubmitInfo structure, then each element of the command buffer of the pCommandBuffers array must be an unprotected command buffer. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkProtectedSubmitInfo-pNext-01819)"},
+    {"VUID-VkProtectedSubmitInfo-protectedSubmit-01816", "If the protected memory feature is not enabled, protectedSubmit must not be VK_TRUE. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkProtectedSubmitInfo-protectedSubmit-01816)"},
+    {"VUID-VkProtectedSubmitInfo-protectedSubmit-01817", "If protectedSubmit is VK_TRUE, then each element of the pCommandBuffers array must be a protected command buffer. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkProtectedSubmitInfo-protectedSubmit-01817)"},
+    {"VUID-VkProtectedSubmitInfo-protectedSubmit-01818", "If protectedSubmit is VK_FALSE, then each element of the pCommandBuffers array must be an unprotected command buffer. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkProtectedSubmitInfo-protectedSubmit-01818)"},
+    {"VUID-VkProtectedSubmitInfo-sType-sType", "sType must be VK_STRUCTURE_TYPE_PROTECTED_SUBMIT_INFO (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkProtectedSubmitInfo-sType-sType)"},
+    {"VUID-VkPushConstantRange-offset-00294", "offset must be less than VkPhysicalDeviceLimits::maxPushConstantsSize (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPushConstantRange-offset-00294)"},
+    {"VUID-VkPushConstantRange-offset-00295", "offset must be a multiple of 4 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPushConstantRange-offset-00295)"},
+    {"VUID-VkPushConstantRange-size-00296", "size must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPushConstantRange-size-00296)"},
+    {"VUID-VkPushConstantRange-size-00297", "size must be a multiple of 4 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPushConstantRange-size-00297)"},
+    {"VUID-VkPushConstantRange-size-00298", "size must be less than or equal to VkPhysicalDeviceLimits::maxPushConstantsSize minus offset (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPushConstantRange-size-00298)"},
+    {"VUID-VkPushConstantRange-stageFlags-parameter", "stageFlags must be a valid combination of VkShaderStageFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPushConstantRange-stageFlags-parameter)"},
+    {"VUID-VkPushConstantRange-stageFlags-requiredbitmask", "stageFlags must not be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkPushConstantRange-stageFlags-requiredbitmask)"},
+    {"VUID-VkQueryPoolCreateInfo-flags-zerobitmask", "flags must be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkQueryPoolCreateInfo-flags-zerobitmask)"},
+    {"VUID-VkQueryPoolCreateInfo-pNext-pNext", "pNext must be NULL or a pointer to a valid instance of VkQueryPoolPerformanceCreateInfoKHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkQueryPoolCreateInfo-pNext-pNext)"},
+    {"VUID-VkQueryPoolCreateInfo-queryCount-02763", "queryCount must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkQueryPoolCreateInfo-queryCount-02763)"},
+    {"VUID-VkQueryPoolCreateInfo-queryType-00791", "If the pipeline statistics queries feature is not enabled, queryType must not be VK_QUERY_TYPE_PIPELINE_STATISTICS (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkQueryPoolCreateInfo-queryType-00791)"},
+    {"VUID-VkQueryPoolCreateInfo-queryType-00792", "If queryType is VK_QUERY_TYPE_PIPELINE_STATISTICS, pipelineStatistics must be a valid combination of VkQueryPipelineStatisticFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkQueryPoolCreateInfo-queryType-00792)"},
+    {"VUID-VkQueryPoolCreateInfo-queryType-03222", "If queryType is VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR, the pNext chain must include a structure of type VkQueryPoolPerformanceCreateInfoKHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkQueryPoolCreateInfo-queryType-03222)"},
+    {"VUID-VkQueryPoolCreateInfo-queryType-parameter", "queryType must be a valid VkQueryType value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkQueryPoolCreateInfo-queryType-parameter)"},
+    {"VUID-VkQueryPoolCreateInfo-sType-sType", "sType must be VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkQueryPoolCreateInfo-sType-sType)"},
+    {"VUID-VkQueryPoolCreateInfoINTEL-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkQueryPoolCreateInfoINTEL-pNext-pNext)"},
+    {"VUID-VkQueryPoolCreateInfoINTEL-performanceCountersSampling-parameter", "performanceCountersSampling must be a valid VkQueryPoolSamplingModeINTEL value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkQueryPoolCreateInfoINTEL-performanceCountersSampling-parameter)"},
+    {"VUID-VkQueryPoolCreateInfoINTEL-sType-sType", "sType must be VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO_INTEL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkQueryPoolCreateInfoINTEL-sType-sType)"},
+    {"VUID-VkQueryPoolPerformanceCreateInfoKHR-counterIndexCount-arraylength", "counterIndexCount must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkQueryPoolPerformanceCreateInfoKHR-counterIndexCount-arraylength)"},
+    {"VUID-VkQueryPoolPerformanceCreateInfoKHR-pCounterIndices-03321", "Each element of pCounterIndices must be in the range of counters reported by vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR for the queue family specified in queueFamilyIndex (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkQueryPoolPerformanceCreateInfoKHR-pCounterIndices-03321)"},
+    {"VUID-VkQueryPoolPerformanceCreateInfoKHR-pCounterIndices-parameter", "pCounterIndices must be a valid pointer to an array of counterIndexCount uint32_t values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkQueryPoolPerformanceCreateInfoKHR-pCounterIndices-parameter)"},
+    {"VUID-VkQueryPoolPerformanceCreateInfoKHR-performanceCounterQueryPools-03237", "The performanceCounterQueryPools feature must be enabled (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkQueryPoolPerformanceCreateInfoKHR-performanceCounterQueryPools-03237)"},
+    {"VUID-VkQueryPoolPerformanceCreateInfoKHR-queueFamilyIndex-03236", "queueFamilyIndex must be a valid queue family index of the device (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkQueryPoolPerformanceCreateInfoKHR-queueFamilyIndex-03236)"},
+    {"VUID-VkQueryPoolPerformanceCreateInfoKHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_QUERY_POOL_PERFORMANCE_CREATE_INFO_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkQueryPoolPerformanceCreateInfoKHR-sType-sType)"},
+    {"VUID-VkQueueFamilyCheckpointPropertiesNV-sType-sType", "sType must be VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkQueueFamilyCheckpointPropertiesNV-sType-sType)"},
+    {"VUID-VkQueueFamilyProperties2-pNext-pNext", "pNext must be NULL or a pointer to a valid instance of VkQueueFamilyCheckpointPropertiesNV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkQueueFamilyProperties2-pNext-pNext)"},
+    {"VUID-VkQueueFamilyProperties2-sType-sType", "sType must be VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkQueueFamilyProperties2-sType-sType)"},
+    {"VUID-VkRayTracingPipelineCreateInfoNV-commonparent", "Both of basePipelineHandle, and layout that are valid handles of non-ignored parameters must have been created, allocated, or retrieved from the same VkDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRayTracingPipelineCreateInfoNV-commonparent)"},
+    {"VUID-VkRayTracingPipelineCreateInfoNV-flags-02404", "If flags contains the VK_PIPELINE_CREATE_DERIVATIVE_BIT flag, and basePipelineIndex is -1, basePipelineHandle must be a valid handle to a ray tracing VkPipeline (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRayTracingPipelineCreateInfoNV-flags-02404)"},
+    {"VUID-VkRayTracingPipelineCreateInfoNV-flags-02405", "If flags contains the VK_PIPELINE_CREATE_DERIVATIVE_BIT flag, and basePipelineHandle is VK_NULL_HANDLE, basePipelineIndex must be a valid index into the calling command's pCreateInfos parameter (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRayTracingPipelineCreateInfoNV-flags-02405)"},
+    {"VUID-VkRayTracingPipelineCreateInfoNV-flags-02406", "If flags contains the VK_PIPELINE_CREATE_DERIVATIVE_BIT flag, and basePipelineIndex is not -1, basePipelineHandle must be VK_NULL_HANDLE (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRayTracingPipelineCreateInfoNV-flags-02406)"},
+    {"VUID-VkRayTracingPipelineCreateInfoNV-flags-02407", "If flags contains the VK_PIPELINE_CREATE_DERIVATIVE_BIT flag, and basePipelineHandle is not VK_NULL_HANDLE, basePipelineIndex must be -1 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRayTracingPipelineCreateInfoNV-flags-02407)"},
+    {"VUID-VkRayTracingPipelineCreateInfoNV-flags-parameter", "flags must be a valid combination of VkPipelineCreateFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRayTracingPipelineCreateInfoNV-flags-parameter)"},
+    {"VUID-VkRayTracingPipelineCreateInfoNV-groupCount-arraylength", "groupCount must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRayTracingPipelineCreateInfoNV-groupCount-arraylength)"},
+    {"VUID-VkRayTracingPipelineCreateInfoNV-layout-02410", "layout must be consistent with all shaders specified in pStages (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRayTracingPipelineCreateInfoNV-layout-02410)"},
+    {"VUID-VkRayTracingPipelineCreateInfoNV-layout-02411", "The number of resources in layout accessible to each shader stage that is used by the pipeline must be less than or equal to VkPhysicalDeviceLimits::maxPerStageResources (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRayTracingPipelineCreateInfoNV-layout-02411)"},
+    {"VUID-VkRayTracingPipelineCreateInfoNV-layout-parameter", "layout must be a valid VkPipelineLayout handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRayTracingPipelineCreateInfoNV-layout-parameter)"},
+    {"VUID-VkRayTracingPipelineCreateInfoNV-maxRecursionDepth-02412", "maxRecursionDepth must be less than or equal to VkPhysicalDeviceRayTracingPropertiesNV::maxRecursionDepth (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRayTracingPipelineCreateInfoNV-maxRecursionDepth-02412)"},
+    {"VUID-VkRayTracingPipelineCreateInfoNV-pGroups-parameter", "pGroups must be a valid pointer to an array of groupCount valid VkRayTracingShaderGroupCreateInfoNV structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRayTracingPipelineCreateInfoNV-pGroups-parameter)"},
+    {"VUID-VkRayTracingPipelineCreateInfoNV-pNext-pNext", "pNext must be NULL or a pointer to a valid instance of VkPipelineCreationFeedbackCreateInfoEXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRayTracingPipelineCreateInfoNV-pNext-pNext)"},
+    {"VUID-VkRayTracingPipelineCreateInfoNV-pStages-02409", "The shader code for the entry points identified by pStages, and the rest of the state identified by this structure must adhere to the pipeline linking rules described in the Shader Interfaces chapter (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRayTracingPipelineCreateInfoNV-pStages-02409)"},
+    {"VUID-VkRayTracingPipelineCreateInfoNV-pStages-parameter", "pStages must be a valid pointer to an array of stageCount valid VkPipelineShaderStageCreateInfo structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRayTracingPipelineCreateInfoNV-pStages-parameter)"},
+    {"VUID-VkRayTracingPipelineCreateInfoNV-sType-sType", "sType must be VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRayTracingPipelineCreateInfoNV-sType-sType)"},
+    {"VUID-VkRayTracingPipelineCreateInfoNV-stage-02408", "The stage member of one element of pStages must be VK_SHADER_STAGE_RAYGEN_BIT_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRayTracingPipelineCreateInfoNV-stage-02408)"},
+    {"VUID-VkRayTracingPipelineCreateInfoNV-stageCount-arraylength", "stageCount must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRayTracingPipelineCreateInfoNV-stageCount-arraylength)"},
+    {"VUID-VkRayTracingShaderGroupCreateInfoNV-anyHitShader-02418", "anyHitShader must be either VK_SHADER_UNUSED_NV or a valid index into pStages referring to a shader of VK_SHADER_STAGE_ANY_HIT_BIT_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRayTracingShaderGroupCreateInfoNV-anyHitShader-02418)"},
+    {"VUID-VkRayTracingShaderGroupCreateInfoNV-closestHitShader-02417", "closestHitShader must be either VK_SHADER_UNUSED_NV or a valid index into pStages referring to a shader of VK_SHADER_STAGE_CLOSEST_HIT_BIT_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRayTracingShaderGroupCreateInfoNV-closestHitShader-02417)"},
+    {"VUID-VkRayTracingShaderGroupCreateInfoNV-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRayTracingShaderGroupCreateInfoNV-pNext-pNext)"},
+    {"VUID-VkRayTracingShaderGroupCreateInfoNV-sType-sType", "sType must be VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRayTracingShaderGroupCreateInfoNV-sType-sType)"},
+    {"VUID-VkRayTracingShaderGroupCreateInfoNV-type-02413", "If type is VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_NV then generalShader must be a valid index into pStages referring to a shader of VK_SHADER_STAGE_RAYGEN_BIT_NV, VK_SHADER_STAGE_MISS_BIT_NV, or VK_SHADER_STAGE_CALLABLE_BIT_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRayTracingShaderGroupCreateInfoNV-type-02413)"},
+    {"VUID-VkRayTracingShaderGroupCreateInfoNV-type-02414", "If type is VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_NV then closestHitShader, anyHitShader, and intersectionShader must be VK_SHADER_UNUSED_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRayTracingShaderGroupCreateInfoNV-type-02414)"},
+    {"VUID-VkRayTracingShaderGroupCreateInfoNV-type-02415", "If type is VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_NV then intersectionShader must be a valid index into pStages referring to a shader of VK_SHADER_STAGE_INTERSECTION_BIT_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRayTracingShaderGroupCreateInfoNV-type-02415)"},
+    {"VUID-VkRayTracingShaderGroupCreateInfoNV-type-02416", "If type is VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_NV then intersectionShader must be VK_SHADER_UNUSED_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRayTracingShaderGroupCreateInfoNV-type-02416)"},
+    {"VUID-VkRayTracingShaderGroupCreateInfoNV-type-parameter", "type must be a valid VkRayTracingShaderGroupTypeNV value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRayTracingShaderGroupCreateInfoNV-type-parameter)"},
+    {"VUID-VkRectLayerKHR-layer-01262", "layer must be less than imageArrayLayers member of the VkSwapchainCreateInfoKHR structure given to vkCreateSwapchainKHR. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRectLayerKHR-layer-01262)"},
+    {"VUID-VkRectLayerKHR-offset-01261", "The sum of offset and extent must be no greater than the imageExtent member of the VkSwapchainCreateInfoKHR structure given to vkCreateSwapchainKHR. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRectLayerKHR-offset-01261)"},
+    {"VUID-VkRenderPassAttachmentBeginInfoKHR-pAttachments-03218", "Each element of pAttachments must only specify a single mip level (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassAttachmentBeginInfoKHR-pAttachments-03218)"},
+    {"VUID-VkRenderPassAttachmentBeginInfoKHR-pAttachments-03219", "Each element of pAttachments must have been created with the identity swizzle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassAttachmentBeginInfoKHR-pAttachments-03219)"},
+    {"VUID-VkRenderPassAttachmentBeginInfoKHR-pAttachments-parameter", "If attachmentCount is not 0, pAttachments must be a valid pointer to an array of attachmentCount valid VkImageView handles (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassAttachmentBeginInfoKHR-pAttachments-parameter)"},
+    {"VUID-VkRenderPassAttachmentBeginInfoKHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_RENDER_PASS_ATTACHMENT_BEGIN_INFO_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassAttachmentBeginInfoKHR-sType-sType)"},
+    {"VUID-VkRenderPassBeginInfo-clearValueCount-00902", "clearValueCount must be greater than the largest attachment index in renderPass that specifies a loadOp (or stencilLoadOp, if the attachment has a depth/stencil format) of VK_ATTACHMENT_LOAD_OP_CLEAR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassBeginInfo-clearValueCount-00902)"},
+    {"VUID-VkRenderPassBeginInfo-commonparent", "Both of framebuffer, and renderPass must have been created, allocated, or retrieved from the same VkDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassBeginInfo-commonparent)"},
+    {"VUID-VkRenderPassBeginInfo-framebuffer-02780", "If framebuffer was created with a VkFramebufferCreateInfo::flags value that included VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR, each element of the pAttachments member of a VkRenderPassAttachmentBeginInfoKHR structure included in the pNext chain must have been created on the same VkDevice as framebuffer and renderPass (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassBeginInfo-framebuffer-02780)"},
+    {"VUID-VkRenderPassBeginInfo-framebuffer-03207", "If framebuffer was created with a VkFramebufferCreateInfo::flags value that did not include VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR, and the pNext chain includes a VkRenderPassAttachmentBeginInfoKHR structure, its attachmentCount must be zero (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassBeginInfo-framebuffer-03207)"},
+    {"VUID-VkRenderPassBeginInfo-framebuffer-03208", "If framebuffer was created with a VkFramebufferCreateInfo::flags value that included VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR, the attachmentCount of a VkRenderPassAttachmentBeginInfoKHR structure included in the pNext chain must be equal to the value of VkFramebufferAttachmentsCreateInfoKHR::attachmentImageInfoCount used to create framebuffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassBeginInfo-framebuffer-03208)"},
+    {"VUID-VkRenderPassBeginInfo-framebuffer-03209", "If framebuffer was created with a VkFramebufferCreateInfo::flags value that included VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR, each element of the pAttachments member of a VkRenderPassAttachmentBeginInfoKHR structure included in the pNext chain must be a VkImageView of an image created with a value of VkImageCreateInfo::flags equal to the flags member of the corresponding element of VkFramebufferAttachmentsCreateInfoKHR::pAttachments used to create framebuffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassBeginInfo-framebuffer-03209)"},
+    {"VUID-VkRenderPassBeginInfo-framebuffer-03210", "If framebuffer was created with a VkFramebufferCreateInfo::flags value that included VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR, each element of the pAttachments member of a VkRenderPassAttachmentBeginInfoKHR structure included in the pNext chain must be a VkImageView of an image created with a value of VkImageCreateInfo::usage equal to the usage member of the corresponding element of VkFramebufferAttachmentsCreateInfoKHR::pAttachments used to create framebuffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassBeginInfo-framebuffer-03210)"},
+    {"VUID-VkRenderPassBeginInfo-framebuffer-03211", "If framebuffer was created with a VkFramebufferCreateInfo::flags value that included VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR, each element of the pAttachments member of a VkRenderPassAttachmentBeginInfoKHR structure included in the pNext chain must be a VkImageView with a width equal to the width member of the corresponding element of VkFramebufferAttachmentsCreateInfoKHR::pAttachments used to create framebuffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassBeginInfo-framebuffer-03211)"},
+    {"VUID-VkRenderPassBeginInfo-framebuffer-03212", "If framebuffer was created with a VkFramebufferCreateInfo::flags value that included VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR, each element of the pAttachments member of a VkRenderPassAttachmentBeginInfoKHR structure included in the pNext chain must be a VkImageView with a height equal to the height member of the corresponding element of VkFramebufferAttachmentsCreateInfoKHR::pAttachments used to create framebuffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassBeginInfo-framebuffer-03212)"},
+    {"VUID-VkRenderPassBeginInfo-framebuffer-03213", "If framebuffer was created with a VkFramebufferCreateInfo::flags value that included VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR, each element of the pAttachments member of a VkRenderPassAttachmentBeginInfoKHR structure included in the pNext chain must be a VkImageView of an image created with a value of VkImageViewCreateInfo::subresourceRange.layerCount equal to the layerCount member of the corresponding element of VkFramebufferAttachmentsCreateInfoKHR::pAttachments used to create framebuffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassBeginInfo-framebuffer-03213)"},
+    {"VUID-VkRenderPassBeginInfo-framebuffer-03214", "If framebuffer was created with a VkFramebufferCreateInfo::flags value that included VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR, each element of the pAttachments member of a VkRenderPassAttachmentBeginInfoKHR structure included in the pNext chain must be a VkImageView of an image created with a value of VkImageFormatListCreateInfoKHR::viewFormatCount equal to the viewFormatCount member of the corresponding element of VkFramebufferAttachmentsCreateInfoKHR::pAttachments used to create framebuffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassBeginInfo-framebuffer-03214)"},
+    {"VUID-VkRenderPassBeginInfo-framebuffer-03215", "If framebuffer was created with a VkFramebufferCreateInfo::flags value that included VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR, each element of the pAttachments member of a VkRenderPassAttachmentBeginInfoKHR structure included in the pNext chain must be a VkImageView of an image created with a set of elements in VkImageFormatListCreateInfoKHR::pViewFormats equal to the set of elements in the pViewFormats member of the corresponding element of VkFramebufferAttachmentsCreateInfoKHR::pAttachments used to create framebuffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassBeginInfo-framebuffer-03215)"},
+    {"VUID-VkRenderPassBeginInfo-framebuffer-03216", "If framebuffer was created with a VkFramebufferCreateInfo::flags value that included VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR, each element of the pAttachments member of a VkRenderPassAttachmentBeginInfoKHR structure included in the pNext chain must be a VkImageView of an image created with a value of VkImageViewCreateInfo::format equal to the corresponding value of VkAttachmentDescription::format in renderPass (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassBeginInfo-framebuffer-03216)"},
+    {"VUID-VkRenderPassBeginInfo-framebuffer-03217", "If framebuffer was created with a VkFramebufferCreateInfo::flags value that included VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR, each element of the pAttachments member of a VkRenderPassAttachmentBeginInfoKHR structure included in the pNext chain must be a VkImageView of an image created with a value of VkImageCreateInfo::samples equal to the corresponding value of VkAttachmentDescription::samples in renderPass (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassBeginInfo-framebuffer-03217)"},
+    {"VUID-VkRenderPassBeginInfo-framebuffer-parameter", "framebuffer must be a valid VkFramebuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassBeginInfo-framebuffer-parameter)"},
+    {"VUID-VkRenderPassBeginInfo-pClearValues-parameter", "If clearValueCount is not 0, pClearValues must be a valid pointer to an array of clearValueCount VkClearValue unions (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassBeginInfo-pClearValues-parameter)"},
+    {"VUID-VkRenderPassBeginInfo-pNext-pNext", "Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkDeviceGroupRenderPassBeginInfo, VkRenderPassAttachmentBeginInfoKHR, or VkRenderPassSampleLocationsBeginInfoEXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassBeginInfo-pNext-pNext)"},
+    {"VUID-VkRenderPassBeginInfo-renderPass-00904", "renderPass must be compatible with the renderPass member of the VkFramebufferCreateInfo structure specified when creating framebuffer. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassBeginInfo-renderPass-00904)"},
+    {"VUID-VkRenderPassBeginInfo-renderPass-parameter", "renderPass must be a valid VkRenderPass handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassBeginInfo-renderPass-parameter)"},
+    {"VUID-VkRenderPassBeginInfo-sType-sType", "sType must be VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassBeginInfo-sType-sType)"},
+    {"VUID-VkRenderPassBeginInfo-sType-unique", "Each sType member in the pNext chain must be unique (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassBeginInfo-sType-unique)"},
+    {"VUID-VkRenderPassCreateInfo-attachment-00834", "If the attachment member of any element of pInputAttachments, pColorAttachments, pResolveAttachments or pDepthStencilAttachment, or any element of pPreserveAttachments in any element of pSubpasses is not VK_ATTACHMENT_UNUSED, it must be less than attachmentCount (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassCreateInfo-attachment-00834)"},
+    {"VUID-VkRenderPassCreateInfo-dstSubpass-02518", "The dstSubpass member of each element of pDependencies must be less than subpassCount (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassCreateInfo-dstSubpass-02518)"},
+    {"VUID-VkRenderPassCreateInfo-flags-zerobitmask", "flags must be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassCreateInfo-flags-zerobitmask)"},
+    {"VUID-VkRenderPassCreateInfo-pAttachments-00836", "For any member of pAttachments with a loadOp equal to VK_ATTACHMENT_LOAD_OP_CLEAR, the first use of that attachment must not specify a layout equal to VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL or VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassCreateInfo-pAttachments-00836)"},
+    {"VUID-VkRenderPassCreateInfo-pAttachments-01566", "For any member of pAttachments with a loadOp equal to VK_ATTACHMENT_LOAD_OP_CLEAR, the first use of that attachment must not specify a layout equal to VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassCreateInfo-pAttachments-01566)"},
+    {"VUID-VkRenderPassCreateInfo-pAttachments-01567", "For any member of pAttachments with a stencilLoadOp equal to VK_ATTACHMENT_LOAD_OP_CLEAR, the first use of that attachment must not specify a layout equal to VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassCreateInfo-pAttachments-01567)"},
+    {"VUID-VkRenderPassCreateInfo-pAttachments-02511", "For any member of pAttachments with a stencilLoadOp equal to VK_ATTACHMENT_LOAD_OP_CLEAR, the first use of that attachment must not specify a layout equal to VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL or VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassCreateInfo-pAttachments-02511)"},
+    {"VUID-VkRenderPassCreateInfo-pAttachments-parameter", "If attachmentCount is not 0, pAttachments must be a valid pointer to an array of attachmentCount valid VkAttachmentDescription structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassCreateInfo-pAttachments-parameter)"},
+    {"VUID-VkRenderPassCreateInfo-pDependencies-00837", "For any element of pDependencies, if the srcSubpass is not VK_SUBPASS_EXTERNAL, all stage flags included in the srcStageMask member of that dependency must be a pipeline stage supported by the pipeline identified by the pipelineBindPoint member of the source subpass (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassCreateInfo-pDependencies-00837)"},
+    {"VUID-VkRenderPassCreateInfo-pDependencies-00838", "For any element of pDependencies, if the dstSubpass is not VK_SUBPASS_EXTERNAL, all stage flags included in the dstStageMask member of that dependency must be a pipeline stage supported by the pipeline identified by the pipelineBindPoint member of the destination subpass (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassCreateInfo-pDependencies-00838)"},
+    {"VUID-VkRenderPassCreateInfo-pDependencies-parameter", "If dependencyCount is not 0, pDependencies must be a valid pointer to an array of dependencyCount valid VkSubpassDependency structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassCreateInfo-pDependencies-parameter)"},
+    {"VUID-VkRenderPassCreateInfo-pNext-01926", "If the pNext chain includes a VkRenderPassInputAttachmentAspectCreateInfo structure, the subpass member of each element of its pAspectReferences member must be less than subpassCount (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassCreateInfo-pNext-01926)"},
+    {"VUID-VkRenderPassCreateInfo-pNext-01927", "If the pNext chain includes a VkRenderPassInputAttachmentAspectCreateInfo structure, the inputAttachmentIndex member of each element of its pAspectReferences member must be less than the value of inputAttachmentCount in the member of pSubpasses identified by its subpass member (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassCreateInfo-pNext-01927)"},
+    {"VUID-VkRenderPassCreateInfo-pNext-01928", "If the pNext chain includes a VkRenderPassMultiviewCreateInfo structure, and its subpassCount member is not zero, that member must be equal to the value of subpassCount (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassCreateInfo-pNext-01928)"},
+    {"VUID-VkRenderPassCreateInfo-pNext-01929", "If the pNext chain includes a VkRenderPassMultiviewCreateInfo structure, if its dependencyCount member is not zero, it must be equal to dependencyCount (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassCreateInfo-pNext-01929)"},
+    {"VUID-VkRenderPassCreateInfo-pNext-01930", "If the pNext chain includes a VkRenderPassMultiviewCreateInfo structure, for each non-zero element of pViewOffsets, the srcSubpass and dstSubpass members of pDependencies at the same index must not be equal (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassCreateInfo-pNext-01930)"},
+    {"VUID-VkRenderPassCreateInfo-pNext-01963", "If the pNext chain includes a VkRenderPassInputAttachmentAspectCreateInfo structure, for any element of the pInputAttachments member of any element of pSubpasses where the attachment member is not VK_ATTACHMENT_UNUSED, the aspectMask member of the corresponding element of VkRenderPassInputAttachmentAspectCreateInfo::pAspectReferences must only include aspects that are present in images of the format specified by the element of pAttachments at attachment (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassCreateInfo-pNext-01963)"},
+    {"VUID-VkRenderPassCreateInfo-pNext-02512", "If the pNext chain includes a VkRenderPassMultiviewCreateInfo structure, for any element of pDependencies with a dependencyFlags member that does not include VK_DEPENDENCY_VIEW_LOCAL_BIT, the corresponding element of the pViewOffsets member of that VkRenderPassMultiviewCreateInfo instance must be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassCreateInfo-pNext-02512)"},
+    {"VUID-VkRenderPassCreateInfo-pNext-02513", "If the pNext chain includes a VkRenderPassMultiviewCreateInfo structure, elements of its pViewMasks member must either all be 0, or all not be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassCreateInfo-pNext-02513)"},
+    {"VUID-VkRenderPassCreateInfo-pNext-02514", "If the pNext chain includes a VkRenderPassMultiviewCreateInfo structure, and each element of its pViewMasks member is 0, the dependencyFlags member of each element of pDependencies must not include VK_DEPENDENCY_VIEW_LOCAL_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassCreateInfo-pNext-02514)"},
+    {"VUID-VkRenderPassCreateInfo-pNext-02515", "If the pNext chain includes a VkRenderPassMultiviewCreateInfo structure, and each element of its pViewMasks member is 0, correlatedViewMaskCount must be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassCreateInfo-pNext-02515)"},
+    {"VUID-VkRenderPassCreateInfo-pNext-02516", "If the pNext chain includes a VkRenderPassMultiviewCreateInfo structure, each element of its pViewMask member must not have a bit set at an index greater than or equal to VkPhysicalDeviceLimits::maxFramebufferLayers (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassCreateInfo-pNext-02516)"},
+    {"VUID-VkRenderPassCreateInfo-pNext-pNext", "Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkRenderPassFragmentDensityMapCreateInfoEXT, VkRenderPassInputAttachmentAspectCreateInfo, or VkRenderPassMultiviewCreateInfo (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassCreateInfo-pNext-pNext)"},
+    {"VUID-VkRenderPassCreateInfo-pSubpasses-parameter", "pSubpasses must be a valid pointer to an array of subpassCount valid VkSubpassDescription structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassCreateInfo-pSubpasses-parameter)"},
+    {"VUID-VkRenderPassCreateInfo-sType-sType", "sType must be VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassCreateInfo-sType-sType)"},
+    {"VUID-VkRenderPassCreateInfo-sType-unique", "Each sType member in the pNext chain must be unique (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassCreateInfo-sType-unique)"},
+    {"VUID-VkRenderPassCreateInfo-srcSubpass-02517", "The srcSubpass member of each element of pDependencies must be less than subpassCount (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassCreateInfo-srcSubpass-02517)"},
+    {"VUID-VkRenderPassCreateInfo-subpassCount-arraylength", "subpassCount must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassCreateInfo-subpassCount-arraylength)"},
+    {"VUID-VkRenderPassCreateInfo2KHR-None-03049", "If any two subpasses operate on attachments with overlapping ranges of the same VkDeviceMemory object, and at least one subpass writes to that area of VkDeviceMemory, a subpass dependency must be included (either directly or via some intermediate subpasses) between them (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassCreateInfo2KHR-None-03049)"},
+    {"VUID-VkRenderPassCreateInfo2KHR-attachment-02525", "If the attachment member of any element of the pInputAttachments member of any element of pSubpasses is not VK_ATTACHMENT_UNUSED, the aspectMask member of that element of pInputAttachments must only include aspects that are present in images of the format specified by the element of pAttachments specified by attachment (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassCreateInfo2KHR-attachment-02525)"},
+    {"VUID-VkRenderPassCreateInfo2KHR-attachment-03050", "If the attachment member of any element of pInputAttachments, pColorAttachments, pResolveAttachments or pDepthStencilAttachment, or the attachment indexed by any element of pPreserveAttachments in any given element of pSubpasses is bound to a range of a VkDeviceMemory object that overlaps with any other attachment in any subpass (including the same subpass), the VkAttachmentDescription2KHR structures describing them must include VK_ATTACHMENT_DESCRIPTION_MAY_ALIAS_BIT in flags (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassCreateInfo2KHR-attachment-03050)"},
+    {"VUID-VkRenderPassCreateInfo2KHR-attachment-03051", "If the attachment member of any element of pInputAttachments, pColorAttachments, pResolveAttachments or pDepthStencilAttachment, or any element of pPreserveAttachments in any given element of pSubpasses is not VK_ATTACHMENT_UNUSED, it must be less than attachmentCount (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassCreateInfo2KHR-attachment-03051)"},
+    {"VUID-VkRenderPassCreateInfo2KHR-dstSubpass-02527", "The dstSubpass member of each element of pDependencies must be less than subpassCount (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassCreateInfo2KHR-dstSubpass-02527)"},
+    {"VUID-VkRenderPassCreateInfo2KHR-flags-zerobitmask", "flags must be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassCreateInfo2KHR-flags-zerobitmask)"},
+    {"VUID-VkRenderPassCreateInfo2KHR-pAttachments-02522", "For any member of pAttachments with a loadOp equal to VK_ATTACHMENT_LOAD_OP_CLEAR, the first use of that attachment must not specify a layout equal to VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL, VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL, or VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassCreateInfo2KHR-pAttachments-02522)"},
+    {"VUID-VkRenderPassCreateInfo2KHR-pAttachments-02523", "For any member of pAttachments with a stencilLoadOp equal to VK_ATTACHMENT_LOAD_OP_CLEAR, the first use of that attachment must not specify a layout equal to VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL, VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL, or VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassCreateInfo2KHR-pAttachments-02523)"},
+    {"VUID-VkRenderPassCreateInfo2KHR-pAttachments-parameter", "If attachmentCount is not 0, pAttachments must be a valid pointer to an array of attachmentCount valid VkAttachmentDescription2KHR structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassCreateInfo2KHR-pAttachments-parameter)"},
+    {"VUID-VkRenderPassCreateInfo2KHR-pCorrelatedViewMasks-03056", "The set of bits included in any element of pCorrelatedViewMasks must not overlap with the set of bits included in any other element of pCorrelatedViewMasks (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassCreateInfo2KHR-pCorrelatedViewMasks-03056)"},
+    {"VUID-VkRenderPassCreateInfo2KHR-pCorrelatedViewMasks-parameter", "If correlatedViewMaskCount is not 0, pCorrelatedViewMasks must be a valid pointer to an array of correlatedViewMaskCount uint32_t values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassCreateInfo2KHR-pCorrelatedViewMasks-parameter)"},
+    {"VUID-VkRenderPassCreateInfo2KHR-pDependencies-03054", "For any element of pDependencies, if the srcSubpass is not VK_SUBPASS_EXTERNAL, all stage flags included in the srcStageMask member of that dependency must be a pipeline stage supported by the pipeline identified by the pipelineBindPoint member of the source subpass (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassCreateInfo2KHR-pDependencies-03054)"},
+    {"VUID-VkRenderPassCreateInfo2KHR-pDependencies-03055", "For any element of pDependencies, if the dstSubpass is not VK_SUBPASS_EXTERNAL, all stage flags included in the dstStageMask member of that dependency must be a pipeline stage supported by the pipeline identified by the pipelineBindPoint member of the destination subpass (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassCreateInfo2KHR-pDependencies-03055)"},
+    {"VUID-VkRenderPassCreateInfo2KHR-pDependencies-03060", "For any element of pDependencies where its srcSubpass member equals its dstSubpass member, if the viewMask member of the corresponding element of pSubpasses includes more than one bit, its dependencyFlags member must include VK_DEPENDENCY_VIEW_LOCAL_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassCreateInfo2KHR-pDependencies-03060)"},
+    {"VUID-VkRenderPassCreateInfo2KHR-pDependencies-parameter", "If dependencyCount is not 0, pDependencies must be a valid pointer to an array of dependencyCount valid VkSubpassDependency2KHR structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassCreateInfo2KHR-pDependencies-parameter)"},
+    {"VUID-VkRenderPassCreateInfo2KHR-pNext-pNext", "pNext must be NULL or a pointer to a valid instance of VkRenderPassFragmentDensityMapCreateInfoEXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassCreateInfo2KHR-pNext-pNext)"},
+    {"VUID-VkRenderPassCreateInfo2KHR-pSubpasses-parameter", "pSubpasses must be a valid pointer to an array of subpassCount valid VkSubpassDescription2KHR structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassCreateInfo2KHR-pSubpasses-parameter)"},
+    {"VUID-VkRenderPassCreateInfo2KHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO_2_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassCreateInfo2KHR-sType-sType)"},
+    {"VUID-VkRenderPassCreateInfo2KHR-srcSubpass-02526", "The srcSubpass member of each element of pDependencies must be less than subpassCount (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassCreateInfo2KHR-srcSubpass-02526)"},
+    {"VUID-VkRenderPassCreateInfo2KHR-subpassCount-arraylength", "subpassCount must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassCreateInfo2KHR-subpassCount-arraylength)"},
+    {"VUID-VkRenderPassCreateInfo2KHR-viewMask-02524", "The viewMask member must not have a bit set at an index greater than or equal to VkPhysicalDeviceLimits::maxFramebufferLayers (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassCreateInfo2KHR-viewMask-02524)"},
+    {"VUID-VkRenderPassCreateInfo2KHR-viewMask-03057", "If the VkSubpassDescription2KHR::viewMask member of all elements of pSubpasses is 0, correlatedViewMaskCount must be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassCreateInfo2KHR-viewMask-03057)"},
+    {"VUID-VkRenderPassCreateInfo2KHR-viewMask-03058", "The VkSubpassDescription2KHR::viewMask member of all elements of pSubpasses must either all be 0, or all not be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassCreateInfo2KHR-viewMask-03058)"},
+    {"VUID-VkRenderPassCreateInfo2KHR-viewMask-03059", "If the VkSubpassDescription2KHR::viewMask member of all elements of pSubpasses is 0, the dependencyFlags member of any element of pDependencies must not include VK_DEPENDENCY_VIEW_LOCAL_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassCreateInfo2KHR-viewMask-03059)"},
+    {"VUID-VkRenderPassFragmentDensityMapCreateInfoEXT-fragmentDensityMapAttachment-02547", "If fragmentDensityMapAttachment is not VK_ATTACHMENT_UNUSED, fragmentDensityMapAttachment must be less than VkRenderPassCreateInfo::attachmentCount (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassFragmentDensityMapCreateInfoEXT-fragmentDensityMapAttachment-02547)"},
+    {"VUID-VkRenderPassFragmentDensityMapCreateInfoEXT-fragmentDensityMapAttachment-02548", "If fragmentDensityMapAttachment is not VK_ATTACHMENT_UNUSED, fragmentDensityMapAttachment must not be an element of VkSubpassDescription::pInputAttachments, VkSubpassDescription::pColorAttachments, VkSubpassDescription::pResolveAttachments, VkSubpassDescription::pDepthStencilAttachment, or VkSubpassDescription::pPreserveAttachments for any subpass (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassFragmentDensityMapCreateInfoEXT-fragmentDensityMapAttachment-02548)"},
+    {"VUID-VkRenderPassFragmentDensityMapCreateInfoEXT-fragmentDensityMapAttachment-02549", "If fragmentDensityMapAttachment is not VK_ATTACHMENT_UNUSED, layout must be equal to VK_IMAGE_LAYOUT_FRAGMENT_DENSITY_MAP_OPTIMAL_EXT, or VK_IMAGE_LAYOUT_GENERAL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassFragmentDensityMapCreateInfoEXT-fragmentDensityMapAttachment-02549)"},
+    {"VUID-VkRenderPassFragmentDensityMapCreateInfoEXT-fragmentDensityMapAttachment-02550", "If fragmentDensityMapAttachment is not VK_ATTACHMENT_UNUSED, fragmentDensityMapAttachment must reference an attachment with a loadOp equal to VK_ATTACHMENT_LOAD_OP_LOAD or VK_ATTACHMENT_LOAD_OP_DONT_CARE. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassFragmentDensityMapCreateInfoEXT-fragmentDensityMapAttachment-02550)"},
+    {"VUID-VkRenderPassFragmentDensityMapCreateInfoEXT-fragmentDensityMapAttachment-02551", "If fragmentDensityMapAttachment is not VK_ATTACHMENT_UNUSED, fragmentDensityMapAttachment must reference an attachment with a storeOp equal to VK_ATTACHMENT_STORE_OP_DONT_CARE. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassFragmentDensityMapCreateInfoEXT-fragmentDensityMapAttachment-02551)"},
+    {"VUID-VkRenderPassFragmentDensityMapCreateInfoEXT-fragmentDensityMapAttachment-parameter", "fragmentDensityMapAttachment must be a valid VkAttachmentReference structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassFragmentDensityMapCreateInfoEXT-fragmentDensityMapAttachment-parameter)"},
+    {"VUID-VkRenderPassFragmentDensityMapCreateInfoEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_RENDER_PASS_FRAGMENT_DENSITY_MAP_CREATE_INFO_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassFragmentDensityMapCreateInfoEXT-sType-sType)"},
+    {"VUID-VkRenderPassInputAttachmentAspectCreateInfo-aspectReferenceCount-arraylength", "aspectReferenceCount must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassInputAttachmentAspectCreateInfo-aspectReferenceCount-arraylength)"},
+    {"VUID-VkRenderPassInputAttachmentAspectCreateInfo-pAspectReferences-parameter", "pAspectReferences must be a valid pointer to an array of aspectReferenceCount valid VkInputAttachmentAspectReference structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassInputAttachmentAspectCreateInfo-pAspectReferences-parameter)"},
+    {"VUID-VkRenderPassInputAttachmentAspectCreateInfo-sType-sType", "sType must be VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassInputAttachmentAspectCreateInfo-sType-sType)"},
+    {"VUID-VkRenderPassMultiviewCreateInfo-pCorrelationMasks-00841", "Each view index must not be set in more than one element of pCorrelationMasks (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassMultiviewCreateInfo-pCorrelationMasks-00841)"},
+    {"VUID-VkRenderPassMultiviewCreateInfo-pCorrelationMasks-parameter", "If correlationMaskCount is not 0, pCorrelationMasks must be a valid pointer to an array of correlationMaskCount uint32_t values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassMultiviewCreateInfo-pCorrelationMasks-parameter)"},
+    {"VUID-VkRenderPassMultiviewCreateInfo-pViewMasks-parameter", "If subpassCount is not 0, pViewMasks must be a valid pointer to an array of subpassCount uint32_t values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassMultiviewCreateInfo-pViewMasks-parameter)"},
+    {"VUID-VkRenderPassMultiviewCreateInfo-pViewOffsets-parameter", "If dependencyCount is not 0, pViewOffsets must be a valid pointer to an array of dependencyCount int32_t values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassMultiviewCreateInfo-pViewOffsets-parameter)"},
+    {"VUID-VkRenderPassMultiviewCreateInfo-sType-sType", "sType must be VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassMultiviewCreateInfo-sType-sType)"},
+    {"VUID-VkRenderPassSampleLocationsBeginInfoEXT-pAttachmentInitialSampleLocations-parameter", "If attachmentInitialSampleLocationsCount is not 0, pAttachmentInitialSampleLocations must be a valid pointer to an array of attachmentInitialSampleLocationsCount valid VkAttachmentSampleLocationsEXT structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassSampleLocationsBeginInfoEXT-pAttachmentInitialSampleLocations-parameter)"},
+    {"VUID-VkRenderPassSampleLocationsBeginInfoEXT-pPostSubpassSampleLocations-parameter", "If postSubpassSampleLocationsCount is not 0, pPostSubpassSampleLocations must be a valid pointer to an array of postSubpassSampleLocationsCount valid VkSubpassSampleLocationsEXT structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassSampleLocationsBeginInfoEXT-pPostSubpassSampleLocations-parameter)"},
+    {"VUID-VkRenderPassSampleLocationsBeginInfoEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_RENDER_PASS_SAMPLE_LOCATIONS_BEGIN_INFO_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkRenderPassSampleLocationsBeginInfoEXT-sType-sType)"},
+    {"VUID-VkSampleLocationsInfoEXT-pSampleLocations-parameter", "If sampleLocationsCount is not 0, pSampleLocations must be a valid pointer to an array of sampleLocationsCount VkSampleLocationEXT structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSampleLocationsInfoEXT-pSampleLocations-parameter)"},
+    {"VUID-VkSampleLocationsInfoEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_SAMPLE_LOCATIONS_INFO_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSampleLocationsInfoEXT-sType-sType)"},
+    {"VUID-VkSampleLocationsInfoEXT-sampleLocationsCount-01527", "sampleLocationsCount must equal sampleLocationsPerPixel {times} sampleLocationGridSize.width {times} sampleLocationGridSize.height (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSampleLocationsInfoEXT-sampleLocationsCount-01527)"},
+    {"VUID-VkSampleLocationsInfoEXT-sampleLocationsPerPixel-01526", "sampleLocationsPerPixel must be a bit value that is set in VkPhysicalDeviceSampleLocationsPropertiesEXT::sampleLocationSampleCounts (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSampleLocationsInfoEXT-sampleLocationsPerPixel-01526)"},
+    {"VUID-VkSampleLocationsInfoEXT-sampleLocationsPerPixel-parameter", "If sampleLocationsPerPixel is not 0, sampleLocationsPerPixel must be a valid VkSampleCountFlagBits value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSampleLocationsInfoEXT-sampleLocationsPerPixel-parameter)"},
+    {"VUID-VkSamplerCreateInfo-None-01647", "The sampler reduction mode must be set to VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE_EXT if sampler Y'CBCR conversion is enabled (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSamplerCreateInfo-None-01647)"},
+    {"VUID-VkSamplerCreateInfo-addressModeU-01078", "If any of addressModeU, addressModeV or addressModeW are VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER, borderColor must be a valid VkBorderColor value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSamplerCreateInfo-addressModeU-01078)"},
+    {"VUID-VkSamplerCreateInfo-addressModeU-01079", "If the VK_KHR_sampler_mirror_clamp_to_edge extension is not enabled, addressModeU, addressModeV and addressModeW must not be VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSamplerCreateInfo-addressModeU-01079)"},
+    {"VUID-VkSamplerCreateInfo-addressModeU-01646", "If sampler Y'CBCR conversion is enabled, addressModeU, addressModeV, and addressModeW must be VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE, anisotropyEnable must be VK_FALSE, and unnormalizedCoordinates must be VK_FALSE (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSamplerCreateInfo-addressModeU-01646)"},
+    {"VUID-VkSamplerCreateInfo-addressModeU-parameter", "addressModeU must be a valid VkSamplerAddressMode value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSamplerCreateInfo-addressModeU-parameter)"},
+    {"VUID-VkSamplerCreateInfo-addressModeV-parameter", "addressModeV must be a valid VkSamplerAddressMode value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSamplerCreateInfo-addressModeV-parameter)"},
+    {"VUID-VkSamplerCreateInfo-addressModeW-parameter", "addressModeW must be a valid VkSamplerAddressMode value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSamplerCreateInfo-addressModeW-parameter)"},
+    {"VUID-VkSamplerCreateInfo-anisotropyEnable-01070", "If the anisotropic sampling feature is not enabled, anisotropyEnable must be VK_FALSE (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSamplerCreateInfo-anisotropyEnable-01070)"},
+    {"VUID-VkSamplerCreateInfo-anisotropyEnable-01071", "If anisotropyEnable is VK_TRUE, maxAnisotropy must be between 1.0 and VkPhysicalDeviceLimits::maxSamplerAnisotropy, inclusive (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSamplerCreateInfo-anisotropyEnable-01071)"},
+    {"VUID-VkSamplerCreateInfo-compareEnable-01080", "If compareEnable is VK_TRUE, compareOp must be a valid VkCompareOp value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSamplerCreateInfo-compareEnable-01080)"},
+    {"VUID-VkSamplerCreateInfo-compareEnable-01423", "If compareEnable is VK_TRUE, the reductionMode member of VkSamplerReductionModeCreateInfoEXT must be VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSamplerCreateInfo-compareEnable-01423)"},
+    {"VUID-VkSamplerCreateInfo-flags-02574", "If flags includes VK_SAMPLER_CREATE_SUBSAMPLED_BIT_EXT, then minFilter and magFilter must be equal. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSamplerCreateInfo-flags-02574)"},
+    {"VUID-VkSamplerCreateInfo-flags-02575", "If flags includes VK_SAMPLER_CREATE_SUBSAMPLED_BIT_EXT, then mipmapMode must be VK_SAMPLER_MIPMAP_MODE_NEAREST. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSamplerCreateInfo-flags-02575)"},
+    {"VUID-VkSamplerCreateInfo-flags-02576", "If flags includes VK_SAMPLER_CREATE_SUBSAMPLED_BIT_EXT, then minLod and maxLod must be zero. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSamplerCreateInfo-flags-02576)"},
+    {"VUID-VkSamplerCreateInfo-flags-02577", "If flags includes VK_SAMPLER_CREATE_SUBSAMPLED_BIT_EXT, then addressModeU and addressModeV must each be either VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE or VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSamplerCreateInfo-flags-02577)"},
+    {"VUID-VkSamplerCreateInfo-flags-02578", "If flags includes VK_SAMPLER_CREATE_SUBSAMPLED_BIT_EXT, then anisotropyEnable must be VK_FALSE. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSamplerCreateInfo-flags-02578)"},
+    {"VUID-VkSamplerCreateInfo-flags-02579", "If flags includes VK_SAMPLER_CREATE_SUBSAMPLED_BIT_EXT, then compareEnable must be VK_FALSE. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSamplerCreateInfo-flags-02579)"},
+    {"VUID-VkSamplerCreateInfo-flags-02580", "If flags includes VK_SAMPLER_CREATE_SUBSAMPLED_BIT_EXT, then unnormalizedCoordinates must be VK_FALSE. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSamplerCreateInfo-flags-02580)"},
+    {"VUID-VkSamplerCreateInfo-flags-parameter", "flags must be a valid combination of VkSamplerCreateFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSamplerCreateInfo-flags-parameter)"},
+    {"VUID-VkSamplerCreateInfo-magFilter-01081", "If either magFilter or minFilter is VK_FILTER_CUBIC_EXT, anisotropyEnable must be VK_FALSE (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSamplerCreateInfo-magFilter-01081)"},
+    {"VUID-VkSamplerCreateInfo-magFilter-01422", "If either magFilter or minFilter is VK_FILTER_CUBIC_EXT, the reductionMode member of VkSamplerReductionModeCreateInfoEXT must be VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSamplerCreateInfo-magFilter-01422)"},
+    {"VUID-VkSamplerCreateInfo-magFilter-parameter", "magFilter must be a valid VkFilter value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSamplerCreateInfo-magFilter-parameter)"},
+    {"VUID-VkSamplerCreateInfo-maxLod-01973", "maxLod must be greater than or equal to minLod (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSamplerCreateInfo-maxLod-01973)"},
+    {"VUID-VkSamplerCreateInfo-minFilter-01645", "If sampler Y'CBCR conversion is enabled and VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT is not set for the format, minFilter and magFilter must be equal to the sampler Y'CBCR conversion's chromaFilter (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSamplerCreateInfo-minFilter-01645)"},
+    {"VUID-VkSamplerCreateInfo-minFilter-parameter", "minFilter must be a valid VkFilter value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSamplerCreateInfo-minFilter-parameter)"},
+    {"VUID-VkSamplerCreateInfo-mipLodBias-01069", "The absolute value of mipLodBias must be less than or equal to VkPhysicalDeviceLimits::maxSamplerLodBias (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSamplerCreateInfo-mipLodBias-01069)"},
+    {"VUID-VkSamplerCreateInfo-mipmapMode-parameter", "mipmapMode must be a valid VkSamplerMipmapMode value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSamplerCreateInfo-mipmapMode-parameter)"},
+    {"VUID-VkSamplerCreateInfo-pNext-pNext", "Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkSamplerReductionModeCreateInfoEXT or VkSamplerYcbcrConversionInfo (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSamplerCreateInfo-pNext-pNext)"},
+    {"VUID-VkSamplerCreateInfo-sType-sType", "sType must be VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSamplerCreateInfo-sType-sType)"},
+    {"VUID-VkSamplerCreateInfo-sType-unique", "Each sType member in the pNext chain must be unique (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSamplerCreateInfo-sType-unique)"},
+    {"VUID-VkSamplerCreateInfo-unnormalizedCoordinates-01072", "If unnormalizedCoordinates is VK_TRUE, minFilter and magFilter must be equal (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSamplerCreateInfo-unnormalizedCoordinates-01072)"},
+    {"VUID-VkSamplerCreateInfo-unnormalizedCoordinates-01073", "If unnormalizedCoordinates is VK_TRUE, mipmapMode must be VK_SAMPLER_MIPMAP_MODE_NEAREST (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSamplerCreateInfo-unnormalizedCoordinates-01073)"},
+    {"VUID-VkSamplerCreateInfo-unnormalizedCoordinates-01074", "If unnormalizedCoordinates is VK_TRUE, minLod and maxLod must be zero (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSamplerCreateInfo-unnormalizedCoordinates-01074)"},
+    {"VUID-VkSamplerCreateInfo-unnormalizedCoordinates-01075", "If unnormalizedCoordinates is VK_TRUE, addressModeU and addressModeV must each be either VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE or VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSamplerCreateInfo-unnormalizedCoordinates-01075)"},
+    {"VUID-VkSamplerCreateInfo-unnormalizedCoordinates-01076", "If unnormalizedCoordinates is VK_TRUE, anisotropyEnable must be VK_FALSE (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSamplerCreateInfo-unnormalizedCoordinates-01076)"},
+    {"VUID-VkSamplerCreateInfo-unnormalizedCoordinates-01077", "If unnormalizedCoordinates is VK_TRUE, compareEnable must be VK_FALSE (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSamplerCreateInfo-unnormalizedCoordinates-01077)"},
+    {"VUID-VkSamplerReductionModeCreateInfoEXT-reductionMode-parameter", "reductionMode must be a valid VkSamplerReductionModeEXT value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSamplerReductionModeCreateInfoEXT-reductionMode-parameter)"},
+    {"VUID-VkSamplerReductionModeCreateInfoEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSamplerReductionModeCreateInfoEXT-sType-sType)"},
+    {"VUID-VkSamplerYcbcrConversionCreateInfo-chromaFilter-01657", "If the format does not support VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT, chromaFilter must be VK_FILTER_NEAREST (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSamplerYcbcrConversionCreateInfo-chromaFilter-01657)"},
+    {"VUID-VkSamplerYcbcrConversionCreateInfo-chromaFilter-parameter", "chromaFilter must be a valid VkFilter value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSamplerYcbcrConversionCreateInfo-chromaFilter-parameter)"},
+    {"VUID-VkSamplerYcbcrConversionCreateInfo-components-02581", "If the format has a _422 or _420 suffix, then components.g must be VK_COMPONENT_SWIZZLE_IDENTITY (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSamplerYcbcrConversionCreateInfo-components-02581)"},
+    {"VUID-VkSamplerYcbcrConversionCreateInfo-components-02582", "If the format has a _422 or _420 suffix, then components.a must be VK_COMPONENT_SWIZZLE_IDENTITY, VK_COMPONENT_SWIZZLE_ONE, or VK_COMPONENT_SWIZZLE_ZERO (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSamplerYcbcrConversionCreateInfo-components-02582)"},
+    {"VUID-VkSamplerYcbcrConversionCreateInfo-components-02583", "If the format has a _422 or _420 suffix, then components.r must be VK_COMPONENT_SWIZZLE_IDENTITY or VK_COMPONENT_SWIZZLE_B (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSamplerYcbcrConversionCreateInfo-components-02583)"},
+    {"VUID-VkSamplerYcbcrConversionCreateInfo-components-02584", "If the format has a _422 or _420 suffix, then components.b must be VK_COMPONENT_SWIZZLE_IDENTITY or VK_COMPONENT_SWIZZLE_R (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSamplerYcbcrConversionCreateInfo-components-02584)"},
+    {"VUID-VkSamplerYcbcrConversionCreateInfo-components-02585", "If the format has a _422 or _420 suffix, and if either components.r or components.b is VK_COMPONENT_SWIZZLE_IDENTITY, both values must be VK_COMPONENT_SWIZZLE_IDENTITY (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSamplerYcbcrConversionCreateInfo-components-02585)"},
+    {"VUID-VkSamplerYcbcrConversionCreateInfo-components-parameter", "components must be a valid VkComponentMapping structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSamplerYcbcrConversionCreateInfo-components-parameter)"},
+    {"VUID-VkSamplerYcbcrConversionCreateInfo-forceExplicitReconstruction-01656", "If the format does not support VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT, forceExplicitReconstruction must be FALSE (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSamplerYcbcrConversionCreateInfo-forceExplicitReconstruction-01656)"},
+    {"VUID-VkSamplerYcbcrConversionCreateInfo-format-01649", "format must not be VK_FORMAT_UNDEFINED (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSamplerYcbcrConversionCreateInfo-format-01649)"},
+    {"VUID-VkSamplerYcbcrConversionCreateInfo-format-01650", "format must support VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT or VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSamplerYcbcrConversionCreateInfo-format-01650)"},
+    {"VUID-VkSamplerYcbcrConversionCreateInfo-format-01653", "format must represent unsigned normalized values (i.e. the format must be a UNORM format) (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSamplerYcbcrConversionCreateInfo-format-01653)"},
+    {"VUID-VkSamplerYcbcrConversionCreateInfo-format-01904", "If an external format conversion is being created, format must be VK_FORMAT_UNDEFINED, otherwise it must not be VK_FORMAT_UNDEFINED. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSamplerYcbcrConversionCreateInfo-format-01904)"},
+    {"VUID-VkSamplerYcbcrConversionCreateInfo-format-parameter", "format must be a valid VkFormat value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSamplerYcbcrConversionCreateInfo-format-parameter)"},
+    {"VUID-VkSamplerYcbcrConversionCreateInfo-pNext-pNext", "pNext must be NULL or a pointer to a valid instance of VkExternalFormatANDROID (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSamplerYcbcrConversionCreateInfo-pNext-pNext)"},
+    {"VUID-VkSamplerYcbcrConversionCreateInfo-sType-sType", "sType must be VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSamplerYcbcrConversionCreateInfo-sType-sType)"},
+    {"VUID-VkSamplerYcbcrConversionCreateInfo-xChromaOffset-01651", "If the format does not support VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT, xChromaOffset and yChromaOffset must not be VK_CHROMA_LOCATION_COSITED_EVEN (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSamplerYcbcrConversionCreateInfo-xChromaOffset-01651)"},
+    {"VUID-VkSamplerYcbcrConversionCreateInfo-xChromaOffset-01652", "If the format does not support VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT, xChromaOffset and yChromaOffset must not be VK_CHROMA_LOCATION_MIDPOINT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSamplerYcbcrConversionCreateInfo-xChromaOffset-01652)"},
+    {"VUID-VkSamplerYcbcrConversionCreateInfo-xChromaOffset-parameter", "xChromaOffset must be a valid VkChromaLocation value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSamplerYcbcrConversionCreateInfo-xChromaOffset-parameter)"},
+    {"VUID-VkSamplerYcbcrConversionCreateInfo-yChromaOffset-parameter", "yChromaOffset must be a valid VkChromaLocation value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSamplerYcbcrConversionCreateInfo-yChromaOffset-parameter)"},
+    {"VUID-VkSamplerYcbcrConversionCreateInfo-ycbcrModel-01655", "If ycbcrModel is not VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY, then components.r, components.g, and components.b must correspond to channels of the format; that is, components.r, components.g, and components.b must not be VK_COMPONENT_SWIZZLE_ZERO or VK_COMPONENT_SWIZZLE_ONE, and must not correspond to a channel which contains zero or one as a consequence of conversion to RGBA (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSamplerYcbcrConversionCreateInfo-ycbcrModel-01655)"},
+    {"VUID-VkSamplerYcbcrConversionCreateInfo-ycbcrModel-parameter", "ycbcrModel must be a valid VkSamplerYcbcrModelConversion value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSamplerYcbcrConversionCreateInfo-ycbcrModel-parameter)"},
+    {"VUID-VkSamplerYcbcrConversionCreateInfo-ycbcrRange-02748", "If ycbcrRange is VK_SAMPLER_YCBCR_RANGE_ITU_NARROW then the R, G and B channels obtained by applying the component swizzle to format must each have a bit-depth greater than or equal to 8. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSamplerYcbcrConversionCreateInfo-ycbcrRange-02748)"},
+    {"VUID-VkSamplerYcbcrConversionCreateInfo-ycbcrRange-parameter", "ycbcrRange must be a valid VkSamplerYcbcrRange value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSamplerYcbcrConversionCreateInfo-ycbcrRange-parameter)"},
+    {"VUID-VkSamplerYcbcrConversionImageFormatProperties-sType-sType", "sType must be VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSamplerYcbcrConversionImageFormatProperties-sType-sType)"},
+    {"VUID-VkSamplerYcbcrConversionInfo-conversion-parameter", "conversion must be a valid VkSamplerYcbcrConversion handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSamplerYcbcrConversionInfo-conversion-parameter)"},
+    {"VUID-VkSamplerYcbcrConversionInfo-sType-sType", "sType must be VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSamplerYcbcrConversionInfo-sType-sType)"},
+    {"VUID-VkSemaphoreCreateInfo-flags-zerobitmask", "flags must be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSemaphoreCreateInfo-flags-zerobitmask)"},
+    {"VUID-VkSemaphoreCreateInfo-pNext-pNext", "Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkExportSemaphoreCreateInfo, VkExportSemaphoreWin32HandleInfoKHR, or VkSemaphoreTypeCreateInfoKHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSemaphoreCreateInfo-pNext-pNext)"},
+    {"VUID-VkSemaphoreCreateInfo-sType-sType", "sType must be VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSemaphoreCreateInfo-sType-sType)"},
+    {"VUID-VkSemaphoreCreateInfo-sType-unique", "Each sType member in the pNext chain must be unique (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSemaphoreCreateInfo-sType-unique)"},
+    {"VUID-VkSemaphoreGetFdInfoKHR-handleType-01132", "handleType must have been included in VkExportSemaphoreCreateInfo::handleTypes when semaphore's current payload was created. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSemaphoreGetFdInfoKHR-handleType-01132)"},
+    {"VUID-VkSemaphoreGetFdInfoKHR-handleType-01134", "If handleType refers to a handle type with copy payload transference semantics, as defined below in Importing Semaphore Payloads, there must be no queue waiting on semaphore. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSemaphoreGetFdInfoKHR-handleType-01134)"},
+    {"VUID-VkSemaphoreGetFdInfoKHR-handleType-01135", "If handleType refers to a handle type with copy payload transference semantics, semaphore must be signaled, or have an associated semaphore signal operation pending execution. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSemaphoreGetFdInfoKHR-handleType-01135)"},
+    {"VUID-VkSemaphoreGetFdInfoKHR-handleType-01136", "handleType must be defined as a POSIX file descriptor handle. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSemaphoreGetFdInfoKHR-handleType-01136)"},
+    {"VUID-VkSemaphoreGetFdInfoKHR-handleType-03253", "If handleType refers to a handle type with copy payload transference semantics, semaphore must have been created with a VkSemaphoreTypeKHR of VK_SEMAPHORE_TYPE_BINARY_KHR. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSemaphoreGetFdInfoKHR-handleType-03253)"},
+    {"VUID-VkSemaphoreGetFdInfoKHR-handleType-03254", "If handleType refers to a handle type with copy payload transference semantics, semaphore must have an associated semaphore signal operation that has been submitted for execution and any semaphore signal operations on which it depends (if any) must have also been submitted for execution. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSemaphoreGetFdInfoKHR-handleType-03254)"},
+    {"VUID-VkSemaphoreGetFdInfoKHR-handleType-parameter", "handleType must be a valid VkExternalSemaphoreHandleTypeFlagBits value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSemaphoreGetFdInfoKHR-handleType-parameter)"},
+    {"VUID-VkSemaphoreGetFdInfoKHR-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSemaphoreGetFdInfoKHR-pNext-pNext)"},
+    {"VUID-VkSemaphoreGetFdInfoKHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_SEMAPHORE_GET_FD_INFO_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSemaphoreGetFdInfoKHR-sType-sType)"},
+    {"VUID-VkSemaphoreGetFdInfoKHR-semaphore-01133", "semaphore must not currently have its payload replaced by an imported payload as described below in Importing Semaphore Payloads unless that imported payload's handle type was included in VkExternalSemaphoreProperties::exportFromImportedHandleTypes for handleType. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSemaphoreGetFdInfoKHR-semaphore-01133)"},
+    {"VUID-VkSemaphoreGetFdInfoKHR-semaphore-parameter", "semaphore must be a valid VkSemaphore handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSemaphoreGetFdInfoKHR-semaphore-parameter)"},
+    {"VUID-VkSemaphoreGetWin32HandleInfoKHR-handleType-01126", "handleType must have been included in VkExportSemaphoreCreateInfo::handleTypes when the semaphore's current payload was created. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSemaphoreGetWin32HandleInfoKHR-handleType-01126)"},
+    {"VUID-VkSemaphoreGetWin32HandleInfoKHR-handleType-01127", "If handleType is defined as an NT handle, vkGetSemaphoreWin32HandleKHR must be called no more than once for each valid unique combination of semaphore and handleType. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSemaphoreGetWin32HandleInfoKHR-handleType-01127)"},
+    {"VUID-VkSemaphoreGetWin32HandleInfoKHR-handleType-01129", "If handleType refers to a handle type with copy payload transference semantics, as defined below in Importing Semaphore Payloads, there must be no queue waiting on semaphore. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSemaphoreGetWin32HandleInfoKHR-handleType-01129)"},
+    {"VUID-VkSemaphoreGetWin32HandleInfoKHR-handleType-01130", "If handleType refers to a handle type with copy payload transference semantics, semaphore must be signaled, or have an associated semaphore signal operation pending execution. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSemaphoreGetWin32HandleInfoKHR-handleType-01130)"},
+    {"VUID-VkSemaphoreGetWin32HandleInfoKHR-handleType-01131", "handleType must be defined as an NT handle or a global share handle. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSemaphoreGetWin32HandleInfoKHR-handleType-01131)"},
+    {"VUID-VkSemaphoreGetWin32HandleInfoKHR-handleType-parameter", "handleType must be a valid VkExternalSemaphoreHandleTypeFlagBits value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSemaphoreGetWin32HandleInfoKHR-handleType-parameter)"},
+    {"VUID-VkSemaphoreGetWin32HandleInfoKHR-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSemaphoreGetWin32HandleInfoKHR-pNext-pNext)"},
+    {"VUID-VkSemaphoreGetWin32HandleInfoKHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_SEMAPHORE_GET_WIN32_HANDLE_INFO_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSemaphoreGetWin32HandleInfoKHR-sType-sType)"},
+    {"VUID-VkSemaphoreGetWin32HandleInfoKHR-semaphore-01128", "semaphore must not currently have its payload replaced by an imported payload as described below in Importing Semaphore Payloads unless that imported payload's handle type was included in VkExternalSemaphoreProperties::exportFromImportedHandleTypes for handleType. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSemaphoreGetWin32HandleInfoKHR-semaphore-01128)"},
+    {"VUID-VkSemaphoreGetWin32HandleInfoKHR-semaphore-parameter", "semaphore must be a valid VkSemaphore handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSemaphoreGetWin32HandleInfoKHR-semaphore-parameter)"},
+    {"VUID-VkSemaphoreSignalInfoKHR-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSemaphoreSignalInfoKHR-pNext-pNext)"},
+    {"VUID-VkSemaphoreSignalInfoKHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_SEMAPHORE_SIGNAL_INFO_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSemaphoreSignalInfoKHR-sType-sType)"},
+    {"VUID-VkSemaphoreSignalInfoKHR-semaphore-03257", "semaphore must have been created with a VkSemaphoreTypeKHR of VK_SEMAPHORE_TYPE_TIMELINE_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSemaphoreSignalInfoKHR-semaphore-03257)"},
+    {"VUID-VkSemaphoreSignalInfoKHR-semaphore-parameter", "semaphore must be a valid VkSemaphore handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSemaphoreSignalInfoKHR-semaphore-parameter)"},
+    {"VUID-VkSemaphoreSignalInfoKHR-value-03258", "value must have a value greater than the current value of the semaphore (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSemaphoreSignalInfoKHR-value-03258)"},
+    {"VUID-VkSemaphoreSignalInfoKHR-value-03259", "value must be less than the value of any pending semaphore signal operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSemaphoreSignalInfoKHR-value-03259)"},
+    {"VUID-VkSemaphoreSignalInfoKHR-value-03260", "value must have a value which does not differ from the current value of the semaphore or the value of any outstanding semaphore wait or signal operation on semaphore by more than maxTimelineSemaphoreValueDifference. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSemaphoreSignalInfoKHR-value-03260)"},
+    {"VUID-VkSemaphoreTypeCreateInfoKHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_SEMAPHORE_TYPE_CREATE_INFO_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSemaphoreTypeCreateInfoKHR-sType-sType)"},
+    {"VUID-VkSemaphoreTypeCreateInfoKHR-semaphoreType-03279", "If semaphoreType is VK_SEMAPHORE_TYPE_BINARY_KHR, initialValue must be zero. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSemaphoreTypeCreateInfoKHR-semaphoreType-03279)"},
+    {"VUID-VkSemaphoreTypeCreateInfoKHR-semaphoreType-parameter", "semaphoreType must be a valid VkSemaphoreTypeKHR value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSemaphoreTypeCreateInfoKHR-semaphoreType-parameter)"},
+    {"VUID-VkSemaphoreTypeCreateInfoKHR-timelineSemaphore-03252", "If the timelineSemaphore feature is not enabled, semaphoreType must not equal VK_SEMAPHORE_TYPE_TIMELINE_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSemaphoreTypeCreateInfoKHR-timelineSemaphore-03252)"},
+    {"VUID-VkSemaphoreWaitInfoKHR-flags-parameter", "flags must be a valid combination of VkSemaphoreWaitFlagBitsKHR values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSemaphoreWaitInfoKHR-flags-parameter)"},
+    {"VUID-VkSemaphoreWaitInfoKHR-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSemaphoreWaitInfoKHR-pNext-pNext)"},
+    {"VUID-VkSemaphoreWaitInfoKHR-pSemaphores-03256", "All of the elements of pSemaphores must reference a semaphore that was created with a VkSemaphoreTypeKHR of VK_SEMAPHORE_TYPE_TIMELINE_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSemaphoreWaitInfoKHR-pSemaphores-03256)"},
+    {"VUID-VkSemaphoreWaitInfoKHR-pSemaphores-parameter", "pSemaphores must be a valid pointer to an array of semaphoreCount valid VkSemaphore handles (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSemaphoreWaitInfoKHR-pSemaphores-parameter)"},
+    {"VUID-VkSemaphoreWaitInfoKHR-pValues-parameter", "pValues must be a valid pointer to an array of semaphoreCount uint64_t values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSemaphoreWaitInfoKHR-pValues-parameter)"},
+    {"VUID-VkSemaphoreWaitInfoKHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSemaphoreWaitInfoKHR-sType-sType)"},
+    {"VUID-VkSemaphoreWaitInfoKHR-semaphoreCount-arraylength", "semaphoreCount must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSemaphoreWaitInfoKHR-semaphoreCount-arraylength)"},
+    {"VUID-VkShaderModuleCreateInfo-codeSize-01085", "codeSize must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkShaderModuleCreateInfo-codeSize-01085)"},
+    {"VUID-VkShaderModuleCreateInfo-codeSize-01086", "codeSize must be a multiple of 4 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkShaderModuleCreateInfo-codeSize-01086)"},
+    {"VUID-VkShaderModuleCreateInfo-flags-zerobitmask", "flags must be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkShaderModuleCreateInfo-flags-zerobitmask)"},
+    {"VUID-VkShaderModuleCreateInfo-pCode-01087", "pCode must point to valid SPIR-V code, formatted and packed as described by the Khronos SPIR-V Specification (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkShaderModuleCreateInfo-pCode-01087)"},
+    {"VUID-VkShaderModuleCreateInfo-pCode-01088", "pCode must adhere to the validation rules described by the Validation Rules within a Module section of the SPIR-V Environment appendix (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkShaderModuleCreateInfo-pCode-01088)"},
+    {"VUID-VkShaderModuleCreateInfo-pCode-01089", "pCode must declare the Shader capability for SPIR-V code (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkShaderModuleCreateInfo-pCode-01089)"},
+    {"VUID-VkShaderModuleCreateInfo-pCode-01090", "pCode must not declare any capability that is not supported by the API, as described by the Capabilities section of the SPIR-V Environment appendix (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkShaderModuleCreateInfo-pCode-01090)"},
+    {"VUID-VkShaderModuleCreateInfo-pCode-01091", "If pCode declares any of the capabilities listed as optional in the SPIR-V Environment appendix, the corresponding feature(s) must be enabled. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkShaderModuleCreateInfo-pCode-01091)"},
+    {"VUID-VkShaderModuleCreateInfo-pCode-01376", "If pCode is a pointer to SPIR-V code, codeSize must be a multiple of 4 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkShaderModuleCreateInfo-pCode-01376)"},
+    {"VUID-VkShaderModuleCreateInfo-pCode-01377", "pCode must point to either valid SPIR-V code, formatted and packed as described by the Khronos SPIR-V Specification or valid GLSL code which must be written to the GL_KHR_vulkan_glsl extension specification (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkShaderModuleCreateInfo-pCode-01377)"},
+    {"VUID-VkShaderModuleCreateInfo-pCode-01378", "If pCode is a pointer to SPIR-V code, that code must adhere to the validation rules described by the Validation Rules within a Module section of the SPIR-V Environment appendix (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkShaderModuleCreateInfo-pCode-01378)"},
+    {"VUID-VkShaderModuleCreateInfo-pCode-01379", "If pCode is a pointer to GLSL code, it must be valid GLSL code written to the GL_KHR_vulkan_glsl GLSL extension specification (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkShaderModuleCreateInfo-pCode-01379)"},
+    {"VUID-VkShaderModuleCreateInfo-pCode-parameter", "pCode must be a valid pointer to an array of (codeSize/4) uint32_t values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkShaderModuleCreateInfo-pCode-parameter)"},
+    {"VUID-VkShaderModuleCreateInfo-pNext-pNext", "pNext must be NULL or a pointer to a valid instance of VkShaderModuleValidationCacheCreateInfoEXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkShaderModuleCreateInfo-pNext-pNext)"},
+    {"VUID-VkShaderModuleCreateInfo-sType-sType", "sType must be VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkShaderModuleCreateInfo-sType-sType)"},
+    {"VUID-VkShaderModuleValidationCacheCreateInfoEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_SHADER_MODULE_VALIDATION_CACHE_CREATE_INFO_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkShaderModuleValidationCacheCreateInfoEXT-sType-sType)"},
+    {"VUID-VkShaderModuleValidationCacheCreateInfoEXT-validationCache-parameter", "validationCache must be a valid VkValidationCacheEXT handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkShaderModuleValidationCacheCreateInfoEXT-validationCache-parameter)"},
+    {"VUID-VkShadingRatePaletteNV-pShadingRatePaletteEntries-parameter", "pShadingRatePaletteEntries must be a valid pointer to an array of shadingRatePaletteEntryCount valid VkShadingRatePaletteEntryNV values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkShadingRatePaletteNV-pShadingRatePaletteEntries-parameter)"},
+    {"VUID-VkShadingRatePaletteNV-shadingRatePaletteEntryCount-02071", "shadingRatePaletteEntryCount must be between 1 and VkPhysicalDeviceShadingRateImagePropertiesNV::shadingRatePaletteSize, inclusive (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkShadingRatePaletteNV-shadingRatePaletteEntryCount-02071)"},
+    {"VUID-VkShadingRatePaletteNV-shadingRatePaletteEntryCount-arraylength", "shadingRatePaletteEntryCount must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkShadingRatePaletteNV-shadingRatePaletteEntryCount-arraylength)"},
+    {"VUID-VkSharedPresentSurfaceCapabilitiesKHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_SHARED_PRESENT_SURFACE_CAPABILITIES_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSharedPresentSurfaceCapabilitiesKHR-sType-sType)"},
+    {"VUID-VkSparseBufferMemoryBindInfo-bindCount-arraylength", "bindCount must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSparseBufferMemoryBindInfo-bindCount-arraylength)"},
+    {"VUID-VkSparseBufferMemoryBindInfo-buffer-parameter", "buffer must be a valid VkBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSparseBufferMemoryBindInfo-buffer-parameter)"},
+    {"VUID-VkSparseBufferMemoryBindInfo-pBinds-parameter", "pBinds must be a valid pointer to an array of bindCount valid VkSparseMemoryBind structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSparseBufferMemoryBindInfo-pBinds-parameter)"},
+    {"VUID-VkSparseImageFormatProperties2-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSparseImageFormatProperties2-pNext-pNext)"},
+    {"VUID-VkSparseImageFormatProperties2-sType-sType", "sType must be VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSparseImageFormatProperties2-sType-sType)"},
+    {"VUID-VkSparseImageMemoryBind-extent-01108", "extent.width must either be a multiple of the sparse image block width of the image, or else (extent.width + offset.x) must equal the width of the image subresource (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSparseImageMemoryBind-extent-01108)"},
+    {"VUID-VkSparseImageMemoryBind-extent-01110", "extent.height must either be a multiple of the sparse image block height of the image, or else (extent.height + offset.y) must equal the height of the image subresource (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSparseImageMemoryBind-extent-01110)"},
+    {"VUID-VkSparseImageMemoryBind-extent-01112", "extent.depth must either be a multiple of the sparse image block depth of the image, or else (extent.depth + offset.z) must equal the depth of the image subresource (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSparseImageMemoryBind-extent-01112)"},
+    {"VUID-VkSparseImageMemoryBind-flags-parameter", "flags must be a valid combination of VkSparseMemoryBindFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSparseImageMemoryBind-flags-parameter)"},
+    {"VUID-VkSparseImageMemoryBind-memory-01104", "If the sparse aliased residency feature is not enabled, and if any other resources are bound to ranges of memory, the range of memory being bound must not overlap with those bound ranges (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSparseImageMemoryBind-memory-01104)"},
+    {"VUID-VkSparseImageMemoryBind-memory-01105", "memory and memoryOffset must match the memory requirements of the calling command's image, as described in section Resource Memory Association (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSparseImageMemoryBind-memory-01105)"},
+    {"VUID-VkSparseImageMemoryBind-memory-02732", "If memory was created with VkExportMemoryAllocateInfo::handleTypes not equal to 0, at least one handle type it contained must also have been set in VkExternalMemoryImageCreateInfo::handleTypes when the image was created. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSparseImageMemoryBind-memory-02732)"},
+    {"VUID-VkSparseImageMemoryBind-memory-02733", "If memory was created by a memory import operation, the external handle type of the imported memory must also have been set in VkExternalMemoryImageCreateInfo::handleTypes when image was created. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSparseImageMemoryBind-memory-02733)"},
+    {"VUID-VkSparseImageMemoryBind-memory-parameter", "If memory is not VK_NULL_HANDLE, memory must be a valid VkDeviceMemory handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSparseImageMemoryBind-memory-parameter)"},
+    {"VUID-VkSparseImageMemoryBind-offset-01107", "offset.x must be a multiple of the sparse image block width (VkSparseImageFormatProperties::imageGranularity.width) of the image (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSparseImageMemoryBind-offset-01107)"},
+    {"VUID-VkSparseImageMemoryBind-offset-01109", "offset.y must be a multiple of the sparse image block height (VkSparseImageFormatProperties::imageGranularity.height) of the image (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSparseImageMemoryBind-offset-01109)"},
+    {"VUID-VkSparseImageMemoryBind-offset-01111", "offset.z must be a multiple of the sparse image block depth (VkSparseImageFormatProperties::imageGranularity.depth) of the image (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSparseImageMemoryBind-offset-01111)"},
+    {"VUID-VkSparseImageMemoryBind-subresource-01106", "subresource must be a valid image subresource for image (see Image Views) (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSparseImageMemoryBind-subresource-01106)"},
+    {"VUID-VkSparseImageMemoryBind-subresource-parameter", "subresource must be a valid VkImageSubresource structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSparseImageMemoryBind-subresource-parameter)"},
+    {"VUID-VkSparseImageMemoryBindInfo-bindCount-arraylength", "bindCount must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSparseImageMemoryBindInfo-bindCount-arraylength)"},
+    {"VUID-VkSparseImageMemoryBindInfo-image-parameter", "image must be a valid VkImage handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSparseImageMemoryBindInfo-image-parameter)"},
+    {"VUID-VkSparseImageMemoryBindInfo-pBinds-parameter", "pBinds must be a valid pointer to an array of bindCount valid VkSparseImageMemoryBind structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSparseImageMemoryBindInfo-pBinds-parameter)"},
+    {"VUID-VkSparseImageMemoryBindInfo-subresource-01722", "The subresource.mipLevel member of each element of pBinds must be less than the mipLevels specified in VkImageCreateInfo when image was created (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSparseImageMemoryBindInfo-subresource-01722)"},
+    {"VUID-VkSparseImageMemoryBindInfo-subresource-01723", "The subresource.arrayLayer member of each element of pBinds must be less than the arrayLayers specified in VkImageCreateInfo when image was created (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSparseImageMemoryBindInfo-subresource-01723)"},
+    {"VUID-VkSparseImageMemoryRequirements2-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSparseImageMemoryRequirements2-pNext-pNext)"},
+    {"VUID-VkSparseImageMemoryRequirements2-sType-sType", "sType must be VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSparseImageMemoryRequirements2-sType-sType)"},
+    {"VUID-VkSparseImageOpaqueMemoryBindInfo-bindCount-arraylength", "bindCount must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSparseImageOpaqueMemoryBindInfo-bindCount-arraylength)"},
+    {"VUID-VkSparseImageOpaqueMemoryBindInfo-image-parameter", "image must be a valid VkImage handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSparseImageOpaqueMemoryBindInfo-image-parameter)"},
+    {"VUID-VkSparseImageOpaqueMemoryBindInfo-pBinds-01103", "If the flags member of any element of pBinds contains VK_SPARSE_MEMORY_BIND_METADATA_BIT, the binding range defined must be within the mip tail region of the metadata aspect of image (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSparseImageOpaqueMemoryBindInfo-pBinds-01103)"},
+    {"VUID-VkSparseImageOpaqueMemoryBindInfo-pBinds-parameter", "pBinds must be a valid pointer to an array of bindCount valid VkSparseMemoryBind structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSparseImageOpaqueMemoryBindInfo-pBinds-parameter)"},
+    {"VUID-VkSparseMemoryBind-flags-parameter", "flags must be a valid combination of VkSparseMemoryBindFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSparseMemoryBind-flags-parameter)"},
+    {"VUID-VkSparseMemoryBind-memory-01096", "If memory is not VK_NULL_HANDLE, memory and memoryOffset must match the memory requirements of the resource, as described in section Resource Memory Association (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSparseMemoryBind-memory-01096)"},
+    {"VUID-VkSparseMemoryBind-memory-01097", "If memory is not VK_NULL_HANDLE, memory must not have been created with a memory type that reports VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT bit set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSparseMemoryBind-memory-01097)"},
+    {"VUID-VkSparseMemoryBind-memory-02730", "If memory was created with VkExportMemoryAllocateInfo::handleTypes not equal to 0, at least one handle type it contained must also have been set in VkExternalMemoryBufferCreateInfo::handleTypes or VkExternalMemoryImageCreateInfo::handleTypes when the resource was created. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSparseMemoryBind-memory-02730)"},
+    {"VUID-VkSparseMemoryBind-memory-02731", "If memory was created by a memory import operation, the external handle type of the imported memory must also have been set in VkExternalMemoryBufferCreateInfo::handleTypes or VkExternalMemoryImageCreateInfo::handleTypes when the resource was created. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSparseMemoryBind-memory-02731)"},
+    {"VUID-VkSparseMemoryBind-memory-parameter", "If memory is not VK_NULL_HANDLE, memory must be a valid VkDeviceMemory handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSparseMemoryBind-memory-parameter)"},
+    {"VUID-VkSparseMemoryBind-memoryOffset-01101", "memoryOffset must be less than the size of memory (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSparseMemoryBind-memoryOffset-01101)"},
+    {"VUID-VkSparseMemoryBind-resourceOffset-01099", "resourceOffset must be less than the size of the resource (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSparseMemoryBind-resourceOffset-01099)"},
+    {"VUID-VkSparseMemoryBind-size-01098", "size must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSparseMemoryBind-size-01098)"},
+    {"VUID-VkSparseMemoryBind-size-01100", "size must be less than or equal to the size of the resource minus resourceOffset (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSparseMemoryBind-size-01100)"},
+    {"VUID-VkSparseMemoryBind-size-01102", "size must be less than or equal to the size of memory minus memoryOffset (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSparseMemoryBind-size-01102)"},
+    {"VUID-VkSpecializationInfo-offset-00773", "The offset member of each element of pMapEntries must be less than dataSize (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSpecializationInfo-offset-00773)"},
+    {"VUID-VkSpecializationInfo-pData-parameter", "If dataSize is not 0, pData must be a valid pointer to an array of dataSize bytes (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSpecializationInfo-pData-parameter)"},
+    {"VUID-VkSpecializationInfo-pMapEntries-00774", "The size member of each element of pMapEntries must be less than or equal to dataSize minus offset (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSpecializationInfo-pMapEntries-00774)"},
+    {"VUID-VkSpecializationInfo-pMapEntries-parameter", "If mapEntryCount is not 0, pMapEntries must be a valid pointer to an array of mapEntryCount valid VkSpecializationMapEntry structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSpecializationInfo-pMapEntries-parameter)"},
+    {"VUID-VkSpecializationMapEntry-constantID-00776", "For a constantID specialization constant declared in a shader, size must match the byte size of the constantID. If the specialization constant is of type boolean, size must be the byte size of VkBool32 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSpecializationMapEntry-constantID-00776)"},
+    {"VUID-VkStencilOpState-compareOp-parameter", "compareOp must be a valid VkCompareOp value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkStencilOpState-compareOp-parameter)"},
+    {"VUID-VkStencilOpState-depthFailOp-parameter", "depthFailOp must be a valid VkStencilOp value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkStencilOpState-depthFailOp-parameter)"},
+    {"VUID-VkStencilOpState-failOp-parameter", "failOp must be a valid VkStencilOp value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkStencilOpState-failOp-parameter)"},
+    {"VUID-VkStencilOpState-passOp-parameter", "passOp must be a valid VkStencilOp value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkStencilOpState-passOp-parameter)"},
+    {"VUID-VkStreamDescriptorSurfaceCreateInfoGGP-flags-zerobitmask", "flags must be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkStreamDescriptorSurfaceCreateInfoGGP-flags-zerobitmask)"},
+    {"VUID-VkStreamDescriptorSurfaceCreateInfoGGP-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkStreamDescriptorSurfaceCreateInfoGGP-pNext-pNext)"},
+    {"VUID-VkStreamDescriptorSurfaceCreateInfoGGP-sType-sType", "sType must be VK_STRUCTURE_TYPE_STREAM_DESCRIPTOR_SURFACE_CREATE_INFO_GGP (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkStreamDescriptorSurfaceCreateInfoGGP-sType-sType)"},
+    {"VUID-VkStreamDescriptorSurfaceCreateInfoGGP-streamDescriptor-02681", "streamDescriptor must be a valid GgpStreamDescriptor (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkStreamDescriptorSurfaceCreateInfoGGP-streamDescriptor-02681)"},
+    {"VUID-VkSubmitInfo-commonparent", "Each of the elements of pCommandBuffers, the elements of pSignalSemaphores, and the elements of pWaitSemaphores that are valid handles of non-ignored parameters must have been created, allocated, or retrieved from the same VkDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubmitInfo-commonparent)"},
+    {"VUID-VkSubmitInfo-pCommandBuffers-00075", "Each element of pCommandBuffers must not have been allocated with VK_COMMAND_BUFFER_LEVEL_SECONDARY (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubmitInfo-pCommandBuffers-00075)"},
+    {"VUID-VkSubmitInfo-pCommandBuffers-parameter", "If commandBufferCount is not 0, pCommandBuffers must be a valid pointer to an array of commandBufferCount valid VkCommandBuffer handles (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubmitInfo-pCommandBuffers-parameter)"},
+    {"VUID-VkSubmitInfo-pNext-03240", "If the pNext chain of this structure includes a VkTimelineSemaphoreSubmitInfoKHR structure and any element of pWaitSemaphores was created with a VkSemaphoreTypeKHR of VK_SEMAPHORE_TYPE_TIMELINE_KHR, then its waitSemaphoreValueCount member must equal waitSemaphoreCount (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubmitInfo-pNext-03240)"},
+    {"VUID-VkSubmitInfo-pNext-03241", "If the pNext chain of this structure includes a VkTimelineSemaphoreSubmitInfoKHR structure and any element of pSignalSemaphores was created with a VkSemaphoreTypeKHR of VK_SEMAPHORE_TYPE_TIMELINE_KHR, then its signalSemaphoreValueCount member must equal signalSemaphoreCount (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubmitInfo-pNext-03241)"},
+    {"VUID-VkSubmitInfo-pNext-pNext", "Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkD3D12FenceSubmitInfoKHR, VkDeviceGroupSubmitInfo, VkPerformanceQuerySubmitInfoKHR, VkProtectedSubmitInfo, VkTimelineSemaphoreSubmitInfoKHR, VkWin32KeyedMutexAcquireReleaseInfoKHR, or VkWin32KeyedMutexAcquireReleaseInfoNV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubmitInfo-pNext-pNext)"},
+    {"VUID-VkSubmitInfo-pSignalSemaphores-03242", "For each element of pSignalSemaphores created with a VkSemaphoreTypeKHR of VK_SEMAPHORE_TYPE_TIMELINE_KHR the corresponding element of VkTimelineSemaphoreSubmitInfoKHR::pSignalSemaphoreValues must have a value greater than the current value of the semaphore when the semaphore signal operation is executed (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubmitInfo-pSignalSemaphores-03242)"},
+    {"VUID-VkSubmitInfo-pSignalSemaphores-03244", "For each element of pSignalSemaphores created with a VkSemaphoreTypeKHR of VK_SEMAPHORE_TYPE_TIMELINE_KHR the corresponding element of VkTimelineSemaphoreSubmitInfoKHR::pSignalSemaphoreValues must have a value which does not differ from the current value of the semaphore or the value of any outstanding semaphore wait or signal operation on that semaphore by more than maxTimelineSemaphoreValueDifference. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubmitInfo-pSignalSemaphores-03244)"},
+    {"VUID-VkSubmitInfo-pSignalSemaphores-parameter", "If signalSemaphoreCount is not 0, pSignalSemaphores must be a valid pointer to an array of signalSemaphoreCount valid VkSemaphore handles (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubmitInfo-pSignalSemaphores-parameter)"},
+    {"VUID-VkSubmitInfo-pWaitDstStageMask-00076", "If the geometry shaders feature is not enabled, each element of pWaitDstStageMask must not contain VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubmitInfo-pWaitDstStageMask-00076)"},
+    {"VUID-VkSubmitInfo-pWaitDstStageMask-00077", "If the tessellation shaders feature is not enabled, each element of pWaitDstStageMask must not contain VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT or VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubmitInfo-pWaitDstStageMask-00077)"},
+    {"VUID-VkSubmitInfo-pWaitDstStageMask-00078", "Each element of pWaitDstStageMask must not include VK_PIPELINE_STAGE_HOST_BIT. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubmitInfo-pWaitDstStageMask-00078)"},
+    {"VUID-VkSubmitInfo-pWaitDstStageMask-02089", "If the mesh shaders feature is not enabled, each element of pWaitDstStageMask must not contain VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubmitInfo-pWaitDstStageMask-02089)"},
+    {"VUID-VkSubmitInfo-pWaitDstStageMask-02090", "If the task shaders feature is not enabled, each element of pWaitDstStageMask must not contain VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubmitInfo-pWaitDstStageMask-02090)"},
+    {"VUID-VkSubmitInfo-pWaitDstStageMask-parameter", "If waitSemaphoreCount is not 0, pWaitDstStageMask must be a valid pointer to an array of waitSemaphoreCount valid combinations of VkPipelineStageFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubmitInfo-pWaitDstStageMask-parameter)"},
+    {"VUID-VkSubmitInfo-pWaitDstStageMask-requiredbitmask", "Each element of pWaitDstStageMask must not be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubmitInfo-pWaitDstStageMask-requiredbitmask)"},
+    {"VUID-VkSubmitInfo-pWaitSemaphores-03239", "If any element of pWaitSemaphores or pSignalSemaphores was created with a VkSemaphoreTypeKHR of VK_SEMAPHORE_TYPE_TIMELINE_KHR, then the pNext chain must include a VkTimelineSemaphoreSubmitInfoKHR structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubmitInfo-pWaitSemaphores-03239)"},
+    {"VUID-VkSubmitInfo-pWaitSemaphores-03243", "For each element of pWaitSemaphores created with a VkSemaphoreTypeKHR of VK_SEMAPHORE_TYPE_TIMELINE_KHR the corresponding element of VkTimelineSemaphoreSubmitInfoKHR::pWaitSemaphoreValues must have a value which does not differ from the current value of the semaphore or the value of any outstanding semaphore wait or signal operation on that semaphore by more than maxTimelineSemaphoreValueDifference. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubmitInfo-pWaitSemaphores-03243)"},
+    {"VUID-VkSubmitInfo-pWaitSemaphores-parameter", "If waitSemaphoreCount is not 0, pWaitSemaphores must be a valid pointer to an array of waitSemaphoreCount valid VkSemaphore handles (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubmitInfo-pWaitSemaphores-parameter)"},
+    {"VUID-VkSubmitInfo-sType-sType", "sType must be VK_STRUCTURE_TYPE_SUBMIT_INFO (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubmitInfo-sType-sType)"},
+    {"VUID-VkSubmitInfo-sType-unique", "Each sType member in the pNext chain must be unique (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubmitInfo-sType-unique)"},
+    {"VUID-VkSubpassBeginInfoKHR-contents-parameter", "contents must be a valid VkSubpassContents value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassBeginInfoKHR-contents-parameter)"},
+    {"VUID-VkSubpassBeginInfoKHR-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassBeginInfoKHR-pNext-pNext)"},
+    {"VUID-VkSubpassBeginInfoKHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassBeginInfoKHR-sType-sType)"},
+    {"VUID-VkSubpassDependency-dependencyFlags-02520", "If dependencyFlags includes VK_DEPENDENCY_VIEW_LOCAL_BIT, srcSubpass must not be equal to VK_SUBPASS_EXTERNAL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDependency-dependencyFlags-02520)"},
+    {"VUID-VkSubpassDependency-dependencyFlags-02521", "If dependencyFlags includes VK_DEPENDENCY_VIEW_LOCAL_BIT, dstSubpass must not be equal to VK_SUBPASS_EXTERNAL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDependency-dependencyFlags-02521)"},
+    {"VUID-VkSubpassDependency-dependencyFlags-parameter", "dependencyFlags must be a valid combination of VkDependencyFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDependency-dependencyFlags-parameter)"},
+    {"VUID-VkSubpassDependency-dstAccessMask-00869", "Any access flag included in dstAccessMask must be supported by one of the pipeline stages in dstStageMask, as specified in the table of supported access types (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDependency-dstAccessMask-00869)"},
+    {"VUID-VkSubpassDependency-dstAccessMask-parameter", "dstAccessMask must be a valid combination of VkAccessFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDependency-dstAccessMask-parameter)"},
+    {"VUID-VkSubpassDependency-dstStageMask-00861", "If the geometry shaders feature is not enabled, dstStageMask must not contain VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDependency-dstStageMask-00861)"},
+    {"VUID-VkSubpassDependency-dstStageMask-00863", "If the tessellation shaders feature is not enabled, dstStageMask must not contain VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT or VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDependency-dstStageMask-00863)"},
+    {"VUID-VkSubpassDependency-dstStageMask-02101", "If the mesh shaders feature is not enabled, dstStageMask must not contain VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDependency-dstStageMask-02101)"},
+    {"VUID-VkSubpassDependency-dstStageMask-02102", "If the task shaders feature is not enabled, dstStageMask must not contain VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDependency-dstStageMask-02102)"},
+    {"VUID-VkSubpassDependency-dstStageMask-parameter", "dstStageMask must be a valid combination of VkPipelineStageFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDependency-dstStageMask-parameter)"},
+    {"VUID-VkSubpassDependency-dstStageMask-requiredbitmask", "dstStageMask must not be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDependency-dstStageMask-requiredbitmask)"},
+    {"VUID-VkSubpassDependency-srcAccessMask-00868", "Any access flag included in srcAccessMask must be supported by one of the pipeline stages in srcStageMask, as specified in the table of supported access types (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDependency-srcAccessMask-00868)"},
+    {"VUID-VkSubpassDependency-srcAccessMask-parameter", "srcAccessMask must be a valid combination of VkAccessFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDependency-srcAccessMask-parameter)"},
+    {"VUID-VkSubpassDependency-srcStageMask-00860", "If the geometry shaders feature is not enabled, srcStageMask must not contain VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDependency-srcStageMask-00860)"},
+    {"VUID-VkSubpassDependency-srcStageMask-00862", "If the tessellation shaders feature is not enabled, srcStageMask must not contain VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT or VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDependency-srcStageMask-00862)"},
+    {"VUID-VkSubpassDependency-srcStageMask-02099", "If the mesh shaders feature is not enabled, srcStageMask must not contain VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDependency-srcStageMask-02099)"},
+    {"VUID-VkSubpassDependency-srcStageMask-02100", "If the task shaders feature is not enabled, srcStageMask must not contain VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDependency-srcStageMask-02100)"},
+    {"VUID-VkSubpassDependency-srcStageMask-parameter", "srcStageMask must be a valid combination of VkPipelineStageFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDependency-srcStageMask-parameter)"},
+    {"VUID-VkSubpassDependency-srcStageMask-requiredbitmask", "srcStageMask must not be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDependency-srcStageMask-requiredbitmask)"},
+    {"VUID-VkSubpassDependency-srcSubpass-00864", "srcSubpass must be less than or equal to dstSubpass, unless one of them is VK_SUBPASS_EXTERNAL, to avoid cyclic dependencies and ensure a valid execution order (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDependency-srcSubpass-00864)"},
+    {"VUID-VkSubpassDependency-srcSubpass-00865", "srcSubpass and dstSubpass must not both be equal to VK_SUBPASS_EXTERNAL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDependency-srcSubpass-00865)"},
+    {"VUID-VkSubpassDependency-srcSubpass-00867", "If srcSubpass is equal to dstSubpass and not all of the stages in srcStageMask and dstStageMask are framebuffer-space stages, the logically latest pipeline stage in srcStageMask must be logically earlier than or equal to the logically earliest pipeline stage in dstStageMask (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDependency-srcSubpass-00867)"},
+    {"VUID-VkSubpassDependency-srcSubpass-00872", "If srcSubpass equals dstSubpass and that subpass has more than one bit set in the view mask, then dependencyFlags must include VK_DEPENDENCY_VIEW_LOCAL_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDependency-srcSubpass-00872)"},
+    {"VUID-VkSubpassDependency-srcSubpass-02243", "If srcSubpass equals dstSubpass, and srcStageMask and dstStageMask both include a framebuffer-space stage, then dependencyFlags must include VK_DEPENDENCY_BY_REGION_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDependency-srcSubpass-02243)"},
+    {"VUID-VkSubpassDependency2KHR-dependencyFlags-03090", "If dependencyFlags includes VK_DEPENDENCY_VIEW_LOCAL_BIT, srcSubpass must not be equal to VK_SUBPASS_EXTERNAL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDependency2KHR-dependencyFlags-03090)"},
+    {"VUID-VkSubpassDependency2KHR-dependencyFlags-03091", "If dependencyFlags includes VK_DEPENDENCY_VIEW_LOCAL_BIT, dstSubpass must not be equal to VK_SUBPASS_EXTERNAL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDependency2KHR-dependencyFlags-03091)"},
+    {"VUID-VkSubpassDependency2KHR-dependencyFlags-03092", "If dependencyFlags does not include VK_DEPENDENCY_VIEW_LOCAL_BIT, viewOffset must be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDependency2KHR-dependencyFlags-03092)"},
+    {"VUID-VkSubpassDependency2KHR-dependencyFlags-parameter", "dependencyFlags must be a valid combination of VkDependencyFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDependency2KHR-dependencyFlags-parameter)"},
+    {"VUID-VkSubpassDependency2KHR-dstAccessMask-03089", "Any access flag included in dstAccessMask must be supported by one of the pipeline stages in dstStageMask, as specified in the table of supported access types (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDependency2KHR-dstAccessMask-03089)"},
+    {"VUID-VkSubpassDependency2KHR-dstAccessMask-parameter", "dstAccessMask must be a valid combination of VkAccessFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDependency2KHR-dstAccessMask-parameter)"},
+    {"VUID-VkSubpassDependency2KHR-dstStageMask-02105", "If the mesh shaders feature is not enabled, dstStageMask must not contain VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDependency2KHR-dstStageMask-02105)"},
+    {"VUID-VkSubpassDependency2KHR-dstStageMask-02106", "If the task shaders feature is not enabled, dstStageMask must not contain VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDependency2KHR-dstStageMask-02106)"},
+    {"VUID-VkSubpassDependency2KHR-dstStageMask-03081", "If the geometry shaders feature is not enabled, dstStageMask must not contain VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDependency2KHR-dstStageMask-03081)"},
+    {"VUID-VkSubpassDependency2KHR-dstStageMask-03083", "If the tessellation shaders feature is not enabled, dstStageMask must not contain VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT or VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDependency2KHR-dstStageMask-03083)"},
+    {"VUID-VkSubpassDependency2KHR-dstStageMask-parameter", "dstStageMask must be a valid combination of VkPipelineStageFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDependency2KHR-dstStageMask-parameter)"},
+    {"VUID-VkSubpassDependency2KHR-dstStageMask-requiredbitmask", "dstStageMask must not be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDependency2KHR-dstStageMask-requiredbitmask)"},
+    {"VUID-VkSubpassDependency2KHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDependency2KHR-sType-sType)"},
+    {"VUID-VkSubpassDependency2KHR-srcAccessMask-03088", "Any access flag included in srcAccessMask must be supported by one of the pipeline stages in srcStageMask, as specified in the table of supported access types (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDependency2KHR-srcAccessMask-03088)"},
+    {"VUID-VkSubpassDependency2KHR-srcAccessMask-parameter", "srcAccessMask must be a valid combination of VkAccessFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDependency2KHR-srcAccessMask-parameter)"},
+    {"VUID-VkSubpassDependency2KHR-srcStageMask-02103", "If the mesh shaders feature is not enabled, srcStageMask must not contain VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDependency2KHR-srcStageMask-02103)"},
+    {"VUID-VkSubpassDependency2KHR-srcStageMask-02104", "If the task shaders feature is not enabled, srcStageMask must not contain VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDependency2KHR-srcStageMask-02104)"},
+    {"VUID-VkSubpassDependency2KHR-srcStageMask-03080", "If the geometry shaders feature is not enabled, srcStageMask must not contain VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDependency2KHR-srcStageMask-03080)"},
+    {"VUID-VkSubpassDependency2KHR-srcStageMask-03082", "If the tessellation shaders feature is not enabled, srcStageMask must not contain VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT or VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDependency2KHR-srcStageMask-03082)"},
+    {"VUID-VkSubpassDependency2KHR-srcStageMask-parameter", "srcStageMask must be a valid combination of VkPipelineStageFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDependency2KHR-srcStageMask-parameter)"},
+    {"VUID-VkSubpassDependency2KHR-srcStageMask-requiredbitmask", "srcStageMask must not be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDependency2KHR-srcStageMask-requiredbitmask)"},
+    {"VUID-VkSubpassDependency2KHR-srcSubpass-02245", "If srcSubpass equals dstSubpass, and srcStageMask and dstStageMask both include a framebuffer-space stage, then dependencyFlags must include VK_DEPENDENCY_BY_REGION_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDependency2KHR-srcSubpass-02245)"},
+    {"VUID-VkSubpassDependency2KHR-srcSubpass-03084", "srcSubpass must be less than or equal to dstSubpass, unless one of them is VK_SUBPASS_EXTERNAL, to avoid cyclic dependencies and ensure a valid execution order (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDependency2KHR-srcSubpass-03084)"},
+    {"VUID-VkSubpassDependency2KHR-srcSubpass-03085", "srcSubpass and dstSubpass must not both be equal to VK_SUBPASS_EXTERNAL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDependency2KHR-srcSubpass-03085)"},
+    {"VUID-VkSubpassDependency2KHR-srcSubpass-03087", "If srcSubpass is equal to dstSubpass and not all of the stages in srcStageMask and dstStageMask are framebuffer-space stages, the logically latest pipeline stage in srcStageMask must be logically earlier than or equal to the logically earliest pipeline stage in dstStageMask (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDependency2KHR-srcSubpass-03087)"},
+    {"VUID-VkSubpassDependency2KHR-viewOffset-02530", "If viewOffset is not equal to 0, srcSubpass must not be equal to dstSubpass (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDependency2KHR-viewOffset-02530)"},
+    {"VUID-VkSubpassDependency2KHR-viewOffset-03093", "If viewOffset is not 0, srcSubpass must not be equal to dstSubpass. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDependency2KHR-viewOffset-03093)"},
+    {"VUID-VkSubpassDescription-attachment-00853", "The attachment member of each element of pPreserveAttachments must not be VK_ATTACHMENT_UNUSED (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDescription-attachment-00853)"},
+    {"VUID-VkSubpassDescription-colorAttachmentCount-00845", "colorAttachmentCount must be less than or equal to VkPhysicalDeviceLimits::maxColorAttachments (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDescription-colorAttachmentCount-00845)"},
+    {"VUID-VkSubpassDescription-flags-00856", "If flags includes VK_SUBPASS_DESCRIPTION_PER_VIEW_POSITION_X_ONLY_BIT_NVX, it must also include VK_SUBPASS_DESCRIPTION_PER_VIEW_ATTRIBUTES_BIT_NVX. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDescription-flags-00856)"},
+    {"VUID-VkSubpassDescription-flags-parameter", "flags must be a valid combination of VkSubpassDescriptionFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDescription-flags-parameter)"},
+    {"VUID-VkSubpassDescription-layout-02519", "If any attachment is used by more than one VkAttachmentReference member, then each use must use the same layout (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDescription-layout-02519)"},
+    {"VUID-VkSubpassDescription-loadOp-00846", "If the first use of an attachment in this render pass is as an input attachment, and the attachment is not also used as a color or depth/stencil attachment in the same subpass, then loadOp must not be VK_ATTACHMENT_LOAD_OP_CLEAR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDescription-loadOp-00846)"},
+    {"VUID-VkSubpassDescription-pColorAttachments-01417", "All attachments in pColorAttachments that are not VK_ATTACHMENT_UNUSED must have the same sample count (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDescription-pColorAttachments-01417)"},
+    {"VUID-VkSubpassDescription-pColorAttachments-01506", "If the VK_AMD_mixed_attachment_samples extension is enabled, and all attachments in pColorAttachments that are not VK_ATTACHMENT_UNUSED must have a sample count that is smaller than or equal to the sample count of pDepthStencilAttachment if it is not VK_ATTACHMENT_UNUSED (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDescription-pColorAttachments-01506)"},
+    {"VUID-VkSubpassDescription-pColorAttachments-02648", "All attachments in pColorAttachments that are not VK_ATTACHMENT_UNUSED must have formats whose features contain VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDescription-pColorAttachments-02648)"},
+    {"VUID-VkSubpassDescription-pColorAttachments-parameter", "If colorAttachmentCount is not 0, pColorAttachments must be a valid pointer to an array of colorAttachmentCount valid VkAttachmentReference structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDescription-pColorAttachments-parameter)"},
+    {"VUID-VkSubpassDescription-pDepthStencilAttachment-01418", "If neither the VK_AMD_mixed_attachment_samples nor the VK_NV_framebuffer_mixed_samples extensions are enabled, and if pDepthStencilAttachment is not VK_ATTACHMENT_UNUSED and any attachments in pColorAttachments are not VK_ATTACHMENT_UNUSED, they must have the same sample count (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDescription-pDepthStencilAttachment-01418)"},
+    {"VUID-VkSubpassDescription-pDepthStencilAttachment-02650", "If pDepthStencilAttachment is not NULL and the attachment is not VK_ATTACHMENT_UNUSED then it must have a format whose features contain VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDescription-pDepthStencilAttachment-02650)"},
+    {"VUID-VkSubpassDescription-pDepthStencilAttachment-parameter", "If pDepthStencilAttachment is not NULL, pDepthStencilAttachment must be a valid pointer to a valid VkAttachmentReference structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDescription-pDepthStencilAttachment-parameter)"},
+    {"VUID-VkSubpassDescription-pInputAttachments-02647", "All attachments in pInputAttachments that are not VK_ATTACHMENT_UNUSED must have formats whose features contain at least one of VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT or VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDescription-pInputAttachments-02647)"},
+    {"VUID-VkSubpassDescription-pInputAttachments-parameter", "If inputAttachmentCount is not 0, pInputAttachments must be a valid pointer to an array of inputAttachmentCount valid VkAttachmentReference structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDescription-pInputAttachments-parameter)"},
+    {"VUID-VkSubpassDescription-pPreserveAttachments-00854", "Each element of pPreserveAttachments must not also be an element of any other member of the subpass description (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDescription-pPreserveAttachments-00854)"},
+    {"VUID-VkSubpassDescription-pPreserveAttachments-parameter", "If preserveAttachmentCount is not 0, pPreserveAttachments must be a valid pointer to an array of preserveAttachmentCount uint32_t values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDescription-pPreserveAttachments-parameter)"},
+    {"VUID-VkSubpassDescription-pResolveAttachments-00847", "If pResolveAttachments is not NULL, for each resolve attachment that is not VK_ATTACHMENT_UNUSED, the corresponding color attachment must not be VK_ATTACHMENT_UNUSED (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDescription-pResolveAttachments-00847)"},
+    {"VUID-VkSubpassDescription-pResolveAttachments-00848", "If pResolveAttachments is not NULL, for each resolve attachment that is not VK_ATTACHMENT_UNUSED, the corresponding color attachment must not have a sample count of VK_SAMPLE_COUNT_1_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDescription-pResolveAttachments-00848)"},
+    {"VUID-VkSubpassDescription-pResolveAttachments-00849", "If pResolveAttachments is not NULL, each resolve attachment that is not VK_ATTACHMENT_UNUSED must have a sample count of VK_SAMPLE_COUNT_1_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDescription-pResolveAttachments-00849)"},
+    {"VUID-VkSubpassDescription-pResolveAttachments-00850", "If pResolveAttachments is not NULL, each resolve attachment that is not VK_ATTACHMENT_UNUSED must have the same VkFormat as its corresponding color attachment (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDescription-pResolveAttachments-00850)"},
+    {"VUID-VkSubpassDescription-pResolveAttachments-02649", "All attachments in pResolveAttachments that are not VK_ATTACHMENT_UNUSED must have formats whose features contain VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDescription-pResolveAttachments-02649)"},
+    {"VUID-VkSubpassDescription-pResolveAttachments-parameter", "If colorAttachmentCount is not 0, and pResolveAttachments is not NULL, pResolveAttachments must be a valid pointer to an array of colorAttachmentCount valid VkAttachmentReference structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDescription-pResolveAttachments-parameter)"},
+    {"VUID-VkSubpassDescription-pipelineBindPoint-00844", "pipelineBindPoint must be VK_PIPELINE_BIND_POINT_GRAPHICS (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDescription-pipelineBindPoint-00844)"},
+    {"VUID-VkSubpassDescription-pipelineBindPoint-parameter", "pipelineBindPoint must be a valid VkPipelineBindPoint value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDescription-pipelineBindPoint-parameter)"},
+    {"VUID-VkSubpassDescription2KHR-attachment-02799", "If the attachment member of any element of pInputAttachments is not VK_ATTACHMENT_UNUSED, then the aspectMask member must be a valid combination of VkImageAspectFlagBits (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDescription2KHR-attachment-02799)"},
+    {"VUID-VkSubpassDescription2KHR-attachment-02800", "If the attachment member of any element of pInputAttachments is not VK_ATTACHMENT_UNUSED, then the aspectMask member must not be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDescription2KHR-attachment-02800)"},
+    {"VUID-VkSubpassDescription2KHR-attachment-02801", "If the attachment member of any element of pInputAttachments is not VK_ATTACHMENT_UNUSED, then the aspectMask member must not include VK_IMAGE_ASPECT_METADATA_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDescription2KHR-attachment-02801)"},
+    {"VUID-VkSubpassDescription2KHR-attachment-03073", "The attachment member of any element of pPreserveAttachments must not be VK_ATTACHMENT_UNUSED (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDescription2KHR-attachment-03073)"},
+    {"VUID-VkSubpassDescription2KHR-colorAttachmentCount-03063", "colorAttachmentCount must be less than or equal to VkPhysicalDeviceLimits::maxColorAttachments (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDescription2KHR-colorAttachmentCount-03063)"},
+    {"VUID-VkSubpassDescription2KHR-flags-03076", "If flags includes VK_SUBPASS_DESCRIPTION_PER_VIEW_POSITION_X_ONLY_BIT_NVX, it must also include VK_SUBPASS_DESCRIPTION_PER_VIEW_ATTRIBUTES_BIT_NVX. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDescription2KHR-flags-03076)"},
+    {"VUID-VkSubpassDescription2KHR-flags-parameter", "flags must be a valid combination of VkSubpassDescriptionFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDescription2KHR-flags-parameter)"},
+    {"VUID-VkSubpassDescription2KHR-layout-02528", "If any attachment is used by more than one VkAttachmentReference member, then each use must use the same layout (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDescription2KHR-layout-02528)"},
+    {"VUID-VkSubpassDescription2KHR-loadOp-03064", "If the first use of an attachment in this render pass is as an input attachment, and the attachment is not also used as a color or depth/stencil attachment in the same subpass, then loadOp must not be VK_ATTACHMENT_LOAD_OP_CLEAR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDescription2KHR-loadOp-03064)"},
+    {"VUID-VkSubpassDescription2KHR-pColorAttachments-03069", "All attachments in pColorAttachments that are not VK_ATTACHMENT_UNUSED must have the same sample count (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDescription2KHR-pColorAttachments-03069)"},
+    {"VUID-VkSubpassDescription2KHR-pColorAttachments-03070", "If the VK_AMD_mixed_attachment_samples extension is enabled, all attachments in pColorAttachments that are not VK_ATTACHMENT_UNUSED must have a sample count that is smaller than or equal to the sample count of pDepthStencilAttachment if it is not VK_ATTACHMENT_UNUSED (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDescription2KHR-pColorAttachments-03070)"},
+    {"VUID-VkSubpassDescription2KHR-pColorAttachments-parameter", "If colorAttachmentCount is not 0, pColorAttachments must be a valid pointer to an array of colorAttachmentCount valid VkAttachmentReference2KHR structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDescription2KHR-pColorAttachments-parameter)"},
+    {"VUID-VkSubpassDescription2KHR-pDepthStencilAttachment-03071", "If neither the VK_AMD_mixed_attachment_samples nor the VK_NV_framebuffer_mixed_samples extensions are enabled, and if pDepthStencilAttachment is not VK_ATTACHMENT_UNUSED and any attachments in pColorAttachments are not VK_ATTACHMENT_UNUSED, they must have the same sample count (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDescription2KHR-pDepthStencilAttachment-03071)"},
+    {"VUID-VkSubpassDescription2KHR-pDepthStencilAttachment-parameter", "If pDepthStencilAttachment is not NULL, pDepthStencilAttachment must be a valid pointer to a valid VkAttachmentReference2KHR structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDescription2KHR-pDepthStencilAttachment-parameter)"},
+    {"VUID-VkSubpassDescription2KHR-pInputAttachments-parameter", "If inputAttachmentCount is not 0, pInputAttachments must be a valid pointer to an array of inputAttachmentCount valid VkAttachmentReference2KHR structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDescription2KHR-pInputAttachments-parameter)"},
+    {"VUID-VkSubpassDescription2KHR-pPreserveAttachments-03074", "Any given element of pPreserveAttachments must not also be an element of any other member of the subpass description (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDescription2KHR-pPreserveAttachments-03074)"},
+    {"VUID-VkSubpassDescription2KHR-pPreserveAttachments-parameter", "If preserveAttachmentCount is not 0, pPreserveAttachments must be a valid pointer to an array of preserveAttachmentCount uint32_t values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDescription2KHR-pPreserveAttachments-parameter)"},
+    {"VUID-VkSubpassDescription2KHR-pResolveAttachments-03065", "If pResolveAttachments is not NULL, for each resolve attachment that does not have the value VK_ATTACHMENT_UNUSED, the corresponding color attachment must not have the value VK_ATTACHMENT_UNUSED (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDescription2KHR-pResolveAttachments-03065)"},
+    {"VUID-VkSubpassDescription2KHR-pResolveAttachments-03066", "If pResolveAttachments is not NULL, for each resolve attachment that is not VK_ATTACHMENT_UNUSED, the corresponding color attachment must not have a sample count of VK_SAMPLE_COUNT_1_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDescription2KHR-pResolveAttachments-03066)"},
+    {"VUID-VkSubpassDescription2KHR-pResolveAttachments-03067", "If pResolveAttachments is not NULL, each resolve attachment that is not VK_ATTACHMENT_UNUSED must have a sample count of VK_SAMPLE_COUNT_1_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDescription2KHR-pResolveAttachments-03067)"},
+    {"VUID-VkSubpassDescription2KHR-pResolveAttachments-03068", "Any given element of pResolveAttachments must have the same VkFormat as its corresponding color attachment (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDescription2KHR-pResolveAttachments-03068)"},
+    {"VUID-VkSubpassDescription2KHR-pResolveAttachments-parameter", "If colorAttachmentCount is not 0, and pResolveAttachments is not NULL, pResolveAttachments must be a valid pointer to an array of colorAttachmentCount valid VkAttachmentReference2KHR structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDescription2KHR-pResolveAttachments-parameter)"},
+    {"VUID-VkSubpassDescription2KHR-pipelineBindPoint-03062", "pipelineBindPoint must be VK_PIPELINE_BIND_POINT_GRAPHICS (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDescription2KHR-pipelineBindPoint-03062)"},
+    {"VUID-VkSubpassDescription2KHR-pipelineBindPoint-parameter", "pipelineBindPoint must be a valid VkPipelineBindPoint value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDescription2KHR-pipelineBindPoint-parameter)"},
+    {"VUID-VkSubpassDescription2KHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_2_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDescription2KHR-sType-sType)"},
+    {"VUID-VkSubpassDescriptionDepthStencilResolveKHR-depthResolveMode-03183", "The value of depthResolveMode must be one of the bits set in VkPhysicalDeviceDepthStencilResolvePropertiesKHR::supportedDepthResolveModes or VK_RESOLVE_MODE_NONE_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDescriptionDepthStencilResolveKHR-depthResolveMode-03183)"},
+    {"VUID-VkSubpassDescriptionDepthStencilResolveKHR-depthResolveMode-parameter", "depthResolveMode must be a valid VkResolveModeFlagBitsKHR value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDescriptionDepthStencilResolveKHR-depthResolveMode-parameter)"},
+    {"VUID-VkSubpassDescriptionDepthStencilResolveKHR-pDepthStencilResolveAttachment-02651", "If pDepthStencilResolveAttachment is not NULL and does not have the value VK_ATTACHMENT_UNUSED then it must have a format whose features contain VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDescriptionDepthStencilResolveKHR-pDepthStencilResolveAttachment-02651)"},
+    {"VUID-VkSubpassDescriptionDepthStencilResolveKHR-pDepthStencilResolveAttachment-03177", "If pDepthStencilResolveAttachment is not NULL and does not have the value VK_ATTACHMENT_UNUSED, pDepthStencilAttachment must not have the value VK_ATTACHMENT_UNUSED (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDescriptionDepthStencilResolveKHR-pDepthStencilResolveAttachment-03177)"},
+    {"VUID-VkSubpassDescriptionDepthStencilResolveKHR-pDepthStencilResolveAttachment-03178", "If pDepthStencilResolveAttachment is not NULL and does not have the value VK_ATTACHMENT_UNUSED, depthResolveMode and stencilResolveMode must not both be VK_RESOLVE_MODE_NONE_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDescriptionDepthStencilResolveKHR-pDepthStencilResolveAttachment-03178)"},
+    {"VUID-VkSubpassDescriptionDepthStencilResolveKHR-pDepthStencilResolveAttachment-03179", "If pDepthStencilResolveAttachment is not NULL and does not have the value VK_ATTACHMENT_UNUSED, pDepthStencilAttachment must not have a sample count of VK_SAMPLE_COUNT_1_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDescriptionDepthStencilResolveKHR-pDepthStencilResolveAttachment-03179)"},
+    {"VUID-VkSubpassDescriptionDepthStencilResolveKHR-pDepthStencilResolveAttachment-03180", "If pDepthStencilResolveAttachment is not NULL and does not have the value VK_ATTACHMENT_UNUSED, pDepthStencilResolveAttachment must have a sample count of VK_SAMPLE_COUNT_1_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDescriptionDepthStencilResolveKHR-pDepthStencilResolveAttachment-03180)"},
+    {"VUID-VkSubpassDescriptionDepthStencilResolveKHR-pDepthStencilResolveAttachment-03181", "If the VkFormat of pDepthStencilResolveAttachment has a depth component, then the VkFormat of pDepthStencilAttachment must have a depth component with the same number of bits and numerical type (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDescriptionDepthStencilResolveKHR-pDepthStencilResolveAttachment-03181)"},
+    {"VUID-VkSubpassDescriptionDepthStencilResolveKHR-pDepthStencilResolveAttachment-03182", "If the VkFormat of pDepthStencilResolveAttachment has a stencil component, then the VkFormat of pDepthStencilAttachment must have a stencil component with the same number of bits and numerical type (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDescriptionDepthStencilResolveKHR-pDepthStencilResolveAttachment-03182)"},
+    {"VUID-VkSubpassDescriptionDepthStencilResolveKHR-pDepthStencilResolveAttachment-03185", "If the VkFormat of pDepthStencilResolveAttachment has both depth and stencil components, VkPhysicalDeviceDepthStencilResolvePropertiesKHR::independentResolve is VK_FALSE, and VkPhysicalDeviceDepthStencilResolvePropertiesKHR::independentResolveNone is VK_FALSE, then the values of depthResolveMode and stencilResolveMode must be identical (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDescriptionDepthStencilResolveKHR-pDepthStencilResolveAttachment-03185)"},
+    {"VUID-VkSubpassDescriptionDepthStencilResolveKHR-pDepthStencilResolveAttachment-03186", "If the VkFormat of pDepthStencilResolveAttachment has both depth and stencil components, VkPhysicalDeviceDepthStencilResolvePropertiesKHR::independentResolve is VK_FALSE and VkPhysicalDeviceDepthStencilResolvePropertiesKHR::independentResolveNone is VK_TRUE, then the values of depthResolveMode and stencilResolveMode must be identical or one of them must be VK_RESOLVE_MODE_NONE_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDescriptionDepthStencilResolveKHR-pDepthStencilResolveAttachment-03186)"},
+    {"VUID-VkSubpassDescriptionDepthStencilResolveKHR-pDepthStencilResolveAttachment-parameter", "If pDepthStencilResolveAttachment is not NULL, pDepthStencilResolveAttachment must be a valid pointer to a valid VkAttachmentReference2KHR structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDescriptionDepthStencilResolveKHR-pDepthStencilResolveAttachment-parameter)"},
+    {"VUID-VkSubpassDescriptionDepthStencilResolveKHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDescriptionDepthStencilResolveKHR-sType-sType)"},
+    {"VUID-VkSubpassDescriptionDepthStencilResolveKHR-stencilResolveMode-03184", "The value of stencilResolveMode must be one of the bits set in VkPhysicalDeviceDepthStencilResolvePropertiesKHR::supportedStencilResolveModes or VK_RESOLVE_MODE_NONE_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDescriptionDepthStencilResolveKHR-stencilResolveMode-03184)"},
+    {"VUID-VkSubpassDescriptionDepthStencilResolveKHR-stencilResolveMode-parameter", "stencilResolveMode must be a valid VkResolveModeFlagBitsKHR value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassDescriptionDepthStencilResolveKHR-stencilResolveMode-parameter)"},
+    {"VUID-VkSubpassEndInfoKHR-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassEndInfoKHR-pNext-pNext)"},
+    {"VUID-VkSubpassEndInfoKHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_SUBPASS_END_INFO_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassEndInfoKHR-sType-sType)"},
+    {"VUID-VkSubpassSampleLocationsEXT-sampleLocationsInfo-parameter", "sampleLocationsInfo must be a valid VkSampleLocationsInfoEXT structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassSampleLocationsEXT-sampleLocationsInfo-parameter)"},
+    {"VUID-VkSubpassSampleLocationsEXT-subpassIndex-01532", "subpassIndex must be less than the subpassCount specified in VkRenderPassCreateInfo the render pass specified by VkRenderPassBeginInfo::renderPass was created with (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSubpassSampleLocationsEXT-subpassIndex-01532)"},
+    {"VUID-VkSurfaceCapabilities2EXT-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSurfaceCapabilities2EXT-pNext-pNext)"},
+    {"VUID-VkSurfaceCapabilities2EXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSurfaceCapabilities2EXT-sType-sType)"},
+    {"VUID-VkSurfaceCapabilities2EXT-supportedSurfaceCounters-01246", "supportedSurfaceCounters must not include VK_SURFACE_COUNTER_VBLANK_EXT unless the surface queried is a display surface. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSurfaceCapabilities2EXT-supportedSurfaceCounters-01246)"},
+    {"VUID-VkSurfaceCapabilities2KHR-pNext-pNext", "Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkDisplayNativeHdrSurfaceCapabilitiesAMD, VkSharedPresentSurfaceCapabilitiesKHR, VkSurfaceCapabilitiesFullScreenExclusiveEXT, or VkSurfaceProtectedCapabilitiesKHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSurfaceCapabilities2KHR-pNext-pNext)"},
+    {"VUID-VkSurfaceCapabilities2KHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSurfaceCapabilities2KHR-sType-sType)"},
+    {"VUID-VkSurfaceCapabilities2KHR-sType-unique", "Each sType member in the pNext chain must be unique (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSurfaceCapabilities2KHR-sType-unique)"},
+    {"VUID-VkSurfaceCapabilitiesFullScreenExclusiveEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_FULL_SCREEN_EXCLUSIVE_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSurfaceCapabilitiesFullScreenExclusiveEXT-sType-sType)"},
+    {"VUID-VkSurfaceFormat2KHR-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSurfaceFormat2KHR-pNext-pNext)"},
+    {"VUID-VkSurfaceFormat2KHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_SURFACE_FORMAT_2_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSurfaceFormat2KHR-sType-sType)"},
+    {"VUID-VkSurfaceFullScreenExclusiveInfoEXT-fullScreenExclusive-parameter", "fullScreenExclusive must be a valid VkFullScreenExclusiveEXT value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSurfaceFullScreenExclusiveInfoEXT-fullScreenExclusive-parameter)"},
+    {"VUID-VkSurfaceFullScreenExclusiveInfoEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_INFO_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSurfaceFullScreenExclusiveInfoEXT-sType-sType)"},
+    {"VUID-VkSurfaceFullScreenExclusiveWin32InfoEXT-hmonitor-02673", "hmonitor must be a valid HMONITOR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSurfaceFullScreenExclusiveWin32InfoEXT-hmonitor-02673)"},
+    {"VUID-VkSurfaceFullScreenExclusiveWin32InfoEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_WIN32_INFO_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSurfaceFullScreenExclusiveWin32InfoEXT-sType-sType)"},
+    {"VUID-VkSurfaceProtectedCapabilitiesKHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_SURFACE_PROTECTED_CAPABILITIES_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSurfaceProtectedCapabilitiesKHR-sType-sType)"},
+    {"VUID-VkSwapchainCounterCreateInfoEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_SWAPCHAIN_COUNTER_CREATE_INFO_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSwapchainCounterCreateInfoEXT-sType-sType)"},
+    {"VUID-VkSwapchainCounterCreateInfoEXT-surfaceCounters-01244", "The bits in surfaceCounters must be supported by VkSwapchainCreateInfoKHR::surface, as reported by vkGetPhysicalDeviceSurfaceCapabilities2EXT. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSwapchainCounterCreateInfoEXT-surfaceCounters-01244)"},
+    {"VUID-VkSwapchainCounterCreateInfoEXT-surfaceCounters-parameter", "surfaceCounters must be a valid combination of VkSurfaceCounterFlagBitsEXT values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSwapchainCounterCreateInfoEXT-surfaceCounters-parameter)"},
+    {"VUID-VkSwapchainCreateInfoKHR-commonparent", "Both of oldSwapchain, and surface that are valid handles of non-ignored parameters must have been created, allocated, or retrieved from the same VkInstance (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSwapchainCreateInfoKHR-commonparent)"},
+    {"VUID-VkSwapchainCreateInfoKHR-compositeAlpha-01280", "compositeAlpha must be one of the bits present in the supportedCompositeAlpha member of the VkSurfaceCapabilitiesKHR structure returned by vkGetPhysicalDeviceSurfaceCapabilitiesKHR for the surface (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSwapchainCreateInfoKHR-compositeAlpha-01280)"},
+    {"VUID-VkSwapchainCreateInfoKHR-compositeAlpha-parameter", "compositeAlpha must be a valid VkCompositeAlphaFlagBitsKHR value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSwapchainCreateInfoKHR-compositeAlpha-parameter)"},
+    {"VUID-VkSwapchainCreateInfoKHR-flags-03168", "If flags contains VK_SWAPCHAIN_CREATE_MUTABLE_FORMAT_BIT_KHR then the pNext chain must include a VkImageFormatListCreateInfoKHR structure with a viewFormatCount greater than zero and pViewFormats must have an element equal to imageFormat (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSwapchainCreateInfoKHR-flags-03168)"},
+    {"VUID-VkSwapchainCreateInfoKHR-flags-03187", "If flags contains VK_SWAPCHAIN_CREATE_PROTECTED_BIT_KHR, then VkSurfaceProtectedCapabilitiesKHR::supportsProtected must be VK_TRUE in the VkSurfaceProtectedCapabilitiesKHR structure returned by vkGetPhysicalDeviceSurfaceCapabilities2KHR for surface (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSwapchainCreateInfoKHR-flags-03187)"},
+    {"VUID-VkSwapchainCreateInfoKHR-flags-parameter", "flags must be a valid combination of VkSwapchainCreateFlagBitsKHR values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSwapchainCreateInfoKHR-flags-parameter)"},
+    {"VUID-VkSwapchainCreateInfoKHR-imageArrayLayers-01275", "imageArrayLayers must be greater than 0 and less than or equal to the maxImageArrayLayers member of the VkSurfaceCapabilitiesKHR structure returned by vkGetPhysicalDeviceSurfaceCapabilitiesKHR for the surface (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSwapchainCreateInfoKHR-imageArrayLayers-01275)"},
+    {"VUID-VkSwapchainCreateInfoKHR-imageColorSpace-parameter", "imageColorSpace must be a valid VkColorSpaceKHR value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSwapchainCreateInfoKHR-imageColorSpace-parameter)"},
+    {"VUID-VkSwapchainCreateInfoKHR-imageExtent-01274", "imageExtent must be between minImageExtent and maxImageExtent, inclusive, where minImageExtent and maxImageExtent are members of the VkSurfaceCapabilitiesKHR structure returned by vkGetPhysicalDeviceSurfaceCapabilitiesKHR for the surface (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSwapchainCreateInfoKHR-imageExtent-01274)"},
+    {"VUID-VkSwapchainCreateInfoKHR-imageExtent-01689", "imageExtent members width and height must both be non-zero (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSwapchainCreateInfoKHR-imageExtent-01689)"},
+    {"VUID-VkSwapchainCreateInfoKHR-imageFormat-01273", "imageFormat and imageColorSpace must match the format and colorSpace members, respectively, of one of the VkSurfaceFormatKHR structures returned by vkGetPhysicalDeviceSurfaceFormatsKHR for the surface (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSwapchainCreateInfoKHR-imageFormat-01273)"},
+    {"VUID-VkSwapchainCreateInfoKHR-imageFormat-01778", "The implied image creation parameters of the swapchain must be supported as reported by vkGetPhysicalDeviceImageFormatProperties (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSwapchainCreateInfoKHR-imageFormat-01778)"},
+    {"VUID-VkSwapchainCreateInfoKHR-imageFormat-parameter", "imageFormat must be a valid VkFormat value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSwapchainCreateInfoKHR-imageFormat-parameter)"},
+    {"VUID-VkSwapchainCreateInfoKHR-imageSharingMode-01277", "If imageSharingMode is VK_SHARING_MODE_CONCURRENT, pQueueFamilyIndices must be a valid pointer to an array of queueFamilyIndexCount uint32_t values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSwapchainCreateInfoKHR-imageSharingMode-01277)"},
+    {"VUID-VkSwapchainCreateInfoKHR-imageSharingMode-01278", "If imageSharingMode is VK_SHARING_MODE_CONCURRENT, queueFamilyIndexCount must be greater than 1 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSwapchainCreateInfoKHR-imageSharingMode-01278)"},
+    {"VUID-VkSwapchainCreateInfoKHR-imageSharingMode-01393", "If imageSharingMode is VK_SHARING_MODE_CONCURRENT, each element of pQueueFamilyIndices must be unique and must be less than pQueueFamilyPropertyCount returned by vkGetPhysicalDeviceQueueFamilyProperties for the physicalDevice that was used to create device (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSwapchainCreateInfoKHR-imageSharingMode-01393)"},
+    {"VUID-VkSwapchainCreateInfoKHR-imageSharingMode-01428", "If imageSharingMode is VK_SHARING_MODE_CONCURRENT, each element of pQueueFamilyIndices must be unique and must be less than pQueueFamilyPropertyCount returned by either vkGetPhysicalDeviceQueueFamilyProperties or vkGetPhysicalDeviceQueueFamilyProperties2 for the physicalDevice that was used to create device (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSwapchainCreateInfoKHR-imageSharingMode-01428)"},
+    {"VUID-VkSwapchainCreateInfoKHR-imageSharingMode-parameter", "imageSharingMode must be a valid VkSharingMode value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSwapchainCreateInfoKHR-imageSharingMode-parameter)"},
+    {"VUID-VkSwapchainCreateInfoKHR-imageUsage-01276", "imageUsage must be a subset of the supported usage flags present in the supportedUsageFlags member of the VkSurfaceCapabilitiesKHR structure returned by vkGetPhysicalDeviceSurfaceCapabilitiesKHR for the surface (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSwapchainCreateInfoKHR-imageUsage-01276)"},
+    {"VUID-VkSwapchainCreateInfoKHR-imageUsage-01384", "If presentMode is VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR or VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR, imageUsage must be a subset of the supported usage flags present in the sharedPresentSupportedUsageFlags member of the VkSharedPresentSurfaceCapabilitiesKHR structure returned by vkGetPhysicalDeviceSurfaceCapabilities2KHR for surface (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSwapchainCreateInfoKHR-imageUsage-01384)"},
+    {"VUID-VkSwapchainCreateInfoKHR-imageUsage-parameter", "imageUsage must be a valid combination of VkImageUsageFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSwapchainCreateInfoKHR-imageUsage-parameter)"},
+    {"VUID-VkSwapchainCreateInfoKHR-imageUsage-requiredbitmask", "imageUsage must not be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSwapchainCreateInfoKHR-imageUsage-requiredbitmask)"},
+    {"VUID-VkSwapchainCreateInfoKHR-minImageCount-01271", "minImageCount must be greater than or equal to the value returned in the minImageCount member of the VkSurfaceCapabilitiesKHR structure returned by vkGetPhysicalDeviceSurfaceCapabilitiesKHR for the surface (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSwapchainCreateInfoKHR-minImageCount-01271)"},
+    {"VUID-VkSwapchainCreateInfoKHR-minImageCount-01272", "minImageCount must be less than or equal to the value returned in the maxImageCount member of the VkSurfaceCapabilitiesKHR structure returned by vkGetPhysicalDeviceSurfaceCapabilitiesKHR for the surface if the returned maxImageCount is not zero (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSwapchainCreateInfoKHR-minImageCount-01272)"},
+    {"VUID-VkSwapchainCreateInfoKHR-minImageCount-01383", "minImageCount must be 1 if presentMode is either VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR or VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSwapchainCreateInfoKHR-minImageCount-01383)"},
+    {"VUID-VkSwapchainCreateInfoKHR-oldSwapchain-01933", "If oldSwapchain is not VK_NULL_HANDLE, oldSwapchain must be a non-retired swapchain associated with native window referred to by surface (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSwapchainCreateInfoKHR-oldSwapchain-01933)"},
+    {"VUID-VkSwapchainCreateInfoKHR-oldSwapchain-parameter", "If oldSwapchain is not VK_NULL_HANDLE, oldSwapchain must be a valid VkSwapchainKHR handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSwapchainCreateInfoKHR-oldSwapchain-parameter)"},
+    {"VUID-VkSwapchainCreateInfoKHR-oldSwapchain-parent", "If oldSwapchain is a valid handle, it must have been created, allocated, or retrieved from surface (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSwapchainCreateInfoKHR-oldSwapchain-parent)"},
+    {"VUID-VkSwapchainCreateInfoKHR-pNext-02679", "If the pNext chain includes a VkSurfaceFullScreenExclusiveInfoEXT structure with its fullScreenExclusive member set to VK_FULL_SCREEN_EXCLUSIVE_APPLICATION_CONTROLLED_EXT, and surface was created using vkCreateWin32SurfaceKHR, a VkSurfaceFullScreenExclusiveWin32InfoEXT structure must be included in the pNext chain (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSwapchainCreateInfoKHR-pNext-02679)"},
+    {"VUID-VkSwapchainCreateInfoKHR-pNext-pNext", "Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkDeviceGroupSwapchainCreateInfoKHR, VkImageFormatListCreateInfoKHR, VkSurfaceFullScreenExclusiveInfoEXT, VkSurfaceFullScreenExclusiveWin32InfoEXT, VkSwapchainCounterCreateInfoEXT, or VkSwapchainDisplayNativeHdrCreateInfoAMD (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSwapchainCreateInfoKHR-pNext-pNext)"},
+    {"VUID-VkSwapchainCreateInfoKHR-physicalDeviceCount-01429", "If the logical device was created with VkDeviceGroupDeviceCreateInfo::physicalDeviceCount equal to 1, flags must not contain VK_SWAPCHAIN_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSwapchainCreateInfoKHR-physicalDeviceCount-01429)"},
+    {"VUID-VkSwapchainCreateInfoKHR-preTransform-01279", "preTransform must be one of the bits present in the supportedTransforms member of the VkSurfaceCapabilitiesKHR structure returned by vkGetPhysicalDeviceSurfaceCapabilitiesKHR for the surface (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSwapchainCreateInfoKHR-preTransform-01279)"},
+    {"VUID-VkSwapchainCreateInfoKHR-preTransform-parameter", "preTransform must be a valid VkSurfaceTransformFlagBitsKHR value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSwapchainCreateInfoKHR-preTransform-parameter)"},
+    {"VUID-VkSwapchainCreateInfoKHR-presentMode-01281", "presentMode must be one of the VkPresentModeKHR values returned by vkGetPhysicalDeviceSurfacePresentModesKHR for the surface (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSwapchainCreateInfoKHR-presentMode-01281)"},
+    {"VUID-VkSwapchainCreateInfoKHR-presentMode-01427", "If presentMode is VK_PRESENT_MODE_IMMEDIATE_KHR, VK_PRESENT_MODE_MAILBOX_KHR, VK_PRESENT_MODE_FIFO_KHR or VK_PRESENT_MODE_FIFO_RELAXED_KHR, imageUsage must be a subset of the supported usage flags present in the supportedUsageFlags member of the VkSurfaceCapabilitiesKHR structure returned by vkGetPhysicalDeviceSurfaceCapabilitiesKHR for surface (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSwapchainCreateInfoKHR-presentMode-01427)"},
+    {"VUID-VkSwapchainCreateInfoKHR-presentMode-parameter", "presentMode must be a valid VkPresentModeKHR value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSwapchainCreateInfoKHR-presentMode-parameter)"},
+    {"VUID-VkSwapchainCreateInfoKHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSwapchainCreateInfoKHR-sType-sType)"},
+    {"VUID-VkSwapchainCreateInfoKHR-sType-unique", "Each sType member in the pNext chain must be unique (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSwapchainCreateInfoKHR-sType-unique)"},
+    {"VUID-VkSwapchainCreateInfoKHR-surface-01270", "surface must be a surface that is supported by the device as determined using vkGetPhysicalDeviceSurfaceSupportKHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSwapchainCreateInfoKHR-surface-01270)"},
+    {"VUID-VkSwapchainCreateInfoKHR-surface-parameter", "surface must be a valid VkSurfaceKHR handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSwapchainCreateInfoKHR-surface-parameter)"},
+    {"VUID-VkSwapchainDisplayNativeHdrCreateInfoAMD-localDimmingEnable-XXXXX", "It is only valid to set localDimmingEnable to VK_TRUE if VkDisplayNativeHdrSurfaceCapabilitiesAMD::localDimmingSupport is supported. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSwapchainDisplayNativeHdrCreateInfoAMD-localDimmingEnable-XXXXX)"},
+    {"VUID-VkSwapchainDisplayNativeHdrCreateInfoAMD-sType-sType", "sType must be VK_STRUCTURE_TYPE_SWAPCHAIN_DISPLAY_NATIVE_HDR_CREATE_INFO_AMD (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkSwapchainDisplayNativeHdrCreateInfoAMD-sType-sType)"},
+    {"VUID-VkTextureLODGatherFormatPropertiesAMD-sType-sType", "sType must be VK_STRUCTURE_TYPE_TEXTURE_LOD_GATHER_FORMAT_PROPERTIES_AMD (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkTextureLODGatherFormatPropertiesAMD-sType-sType)"},
+    {"VUID-VkTimelineSemaphoreSubmitInfoKHR-pSignalSemaphoreValues-parameter", "If signalSemaphoreValueCount is not 0, and pSignalSemaphoreValues is not NULL, pSignalSemaphoreValues must be a valid pointer to an array of signalSemaphoreValueCount uint64_t values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkTimelineSemaphoreSubmitInfoKHR-pSignalSemaphoreValues-parameter)"},
+    {"VUID-VkTimelineSemaphoreSubmitInfoKHR-pWaitSemaphoreValues-parameter", "If waitSemaphoreValueCount is not 0, and pWaitSemaphoreValues is not NULL, pWaitSemaphoreValues must be a valid pointer to an array of waitSemaphoreValueCount uint64_t values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkTimelineSemaphoreSubmitInfoKHR-pWaitSemaphoreValues-parameter)"},
+    {"VUID-VkTimelineSemaphoreSubmitInfoKHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkTimelineSemaphoreSubmitInfoKHR-sType-sType)"},
+    {"VUID-VkValidationCacheCreateInfoEXT-flags-zerobitmask", "flags must be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkValidationCacheCreateInfoEXT-flags-zerobitmask)"},
+    {"VUID-VkValidationCacheCreateInfoEXT-initialDataSize-01534", "If initialDataSize is not 0, it must be equal to the size of pInitialData, as returned by vkGetValidationCacheDataEXT when pInitialData was originally retrieved (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkValidationCacheCreateInfoEXT-initialDataSize-01534)"},
+    {"VUID-VkValidationCacheCreateInfoEXT-initialDataSize-01535", "If initialDataSize is not 0, pInitialData must have been retrieved from a previous call to vkGetValidationCacheDataEXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkValidationCacheCreateInfoEXT-initialDataSize-01535)"},
+    {"VUID-VkValidationCacheCreateInfoEXT-pInitialData-parameter", "If initialDataSize is not 0, pInitialData must be a valid pointer to an array of initialDataSize bytes (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkValidationCacheCreateInfoEXT-pInitialData-parameter)"},
+    {"VUID-VkValidationCacheCreateInfoEXT-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkValidationCacheCreateInfoEXT-pNext-pNext)"},
+    {"VUID-VkValidationCacheCreateInfoEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_VALIDATION_CACHE_CREATE_INFO_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkValidationCacheCreateInfoEXT-sType-sType)"},
+    {"VUID-VkValidationFeaturesEXT-pDisabledValidationFeatures-parameter", "If disabledValidationFeatureCount is not 0, pDisabledValidationFeatures must be a valid pointer to an array of disabledValidationFeatureCount valid VkValidationFeatureDisableEXT values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkValidationFeaturesEXT-pDisabledValidationFeatures-parameter)"},
+    {"VUID-VkValidationFeaturesEXT-pEnabledValidationFeatures-parameter", "If enabledValidationFeatureCount is not 0, pEnabledValidationFeatures must be a valid pointer to an array of enabledValidationFeatureCount valid VkValidationFeatureEnableEXT values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkValidationFeaturesEXT-pEnabledValidationFeatures-parameter)"},
+    {"VUID-VkValidationFeaturesEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_VALIDATION_FEATURES_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkValidationFeaturesEXT-sType-sType)"},
+    {"VUID-VkValidationFlagsEXT-disabledValidationCheckCount-arraylength", "disabledValidationCheckCount must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkValidationFlagsEXT-disabledValidationCheckCount-arraylength)"},
+    {"VUID-VkValidationFlagsEXT-pDisabledValidationChecks-parameter", "pDisabledValidationChecks must be a valid pointer to an array of disabledValidationCheckCount valid VkValidationCheckEXT values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkValidationFlagsEXT-pDisabledValidationChecks-parameter)"},
+    {"VUID-VkValidationFlagsEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_VALIDATION_FLAGS_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkValidationFlagsEXT-sType-sType)"},
+    {"VUID-VkVertexInputAttributeDescription-binding-00621", "binding must be less than VkPhysicalDeviceLimits::maxVertexInputBindings (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkVertexInputAttributeDescription-binding-00621)"},
+    {"VUID-VkVertexInputAttributeDescription-format-00623", "format must be allowed as a vertex buffer format, as specified by the VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT flag in VkFormatProperties::bufferFeatures returned by vkGetPhysicalDeviceFormatProperties (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkVertexInputAttributeDescription-format-00623)"},
+    {"VUID-VkVertexInputAttributeDescription-format-parameter", "format must be a valid VkFormat value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkVertexInputAttributeDescription-format-parameter)"},
+    {"VUID-VkVertexInputAttributeDescription-location-00620", "location must be less than VkPhysicalDeviceLimits::maxVertexInputAttributes (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkVertexInputAttributeDescription-location-00620)"},
+    {"VUID-VkVertexInputAttributeDescription-offset-00622", "offset must be less than or equal to VkPhysicalDeviceLimits::maxVertexInputAttributeOffset (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkVertexInputAttributeDescription-offset-00622)"},
+    {"VUID-VkVertexInputBindingDescription-binding-00618", "binding must be less than VkPhysicalDeviceLimits::maxVertexInputBindings (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkVertexInputBindingDescription-binding-00618)"},
+    {"VUID-VkVertexInputBindingDescription-inputRate-parameter", "inputRate must be a valid VkVertexInputRate value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkVertexInputBindingDescription-inputRate-parameter)"},
+    {"VUID-VkVertexInputBindingDescription-stride-00619", "stride must be less than or equal to VkPhysicalDeviceLimits::maxVertexInputBindingStride (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkVertexInputBindingDescription-stride-00619)"},
+    {"VUID-VkVertexInputBindingDivisorDescriptionEXT-binding-01869", "binding must be less than VkPhysicalDeviceLimits::maxVertexInputBindings (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkVertexInputBindingDivisorDescriptionEXT-binding-01869)"},
+    {"VUID-VkVertexInputBindingDivisorDescriptionEXT-divisor-01870", "divisor must be a value between 0 and VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT::maxVertexAttribDivisor, inclusive. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkVertexInputBindingDivisorDescriptionEXT-divisor-01870)"},
+    {"VUID-VkVertexInputBindingDivisorDescriptionEXT-inputRate-01871", "VkVertexInputBindingDescription::inputRate must be of type VK_VERTEX_INPUT_RATE_INSTANCE for this binding. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkVertexInputBindingDivisorDescriptionEXT-inputRate-01871)"},
+    {"VUID-VkVertexInputBindingDivisorDescriptionEXT-vertexAttributeInstanceRateDivisor-02229", "If the vertexAttributeInstanceRateDivisor feature is not enabled, divisor must be 1 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkVertexInputBindingDivisorDescriptionEXT-vertexAttributeInstanceRateDivisor-02229)"},
+    {"VUID-VkVertexInputBindingDivisorDescriptionEXT-vertexAttributeInstanceRateZeroDivisor-02228", "If the vertexAttributeInstanceRateZeroDivisor feature is not enabled, divisor must not be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkVertexInputBindingDivisorDescriptionEXT-vertexAttributeInstanceRateZeroDivisor-02228)"},
+    {"VUID-VkViSurfaceCreateInfoNN-flags-zerobitmask", "flags must be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkViSurfaceCreateInfoNN-flags-zerobitmask)"},
+    {"VUID-VkViSurfaceCreateInfoNN-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkViSurfaceCreateInfoNN-pNext-pNext)"},
+    {"VUID-VkViSurfaceCreateInfoNN-sType-sType", "sType must be VK_STRUCTURE_TYPE_VI_SURFACE_CREATE_INFO_NN (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkViSurfaceCreateInfoNN-sType-sType)"},
+    {"VUID-VkViSurfaceCreateInfoNN-window-01318", "window must be a valid nn::vi::NativeWindowHandle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkViSurfaceCreateInfoNN-window-01318)"},
+    {"VUID-VkViewport-height-01772", "height must be greater than 0.0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkViewport-height-01772)"},
+    {"VUID-VkViewport-height-01773", "The absolute value of height must be less than or equal to VkPhysicalDeviceLimits::maxViewportDimensions[1] (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkViewport-height-01773)"},
+    {"VUID-VkViewport-maxDepth-01235", "Unless VK_EXT_depth_range_unrestricted extension is enabled maxDepth must be between 0.0 and 1.0, inclusive (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkViewport-maxDepth-01235)"},
+    {"VUID-VkViewport-maxDepth-02541", "maxDepth must be between 0.0 and 1.0, inclusive (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkViewport-maxDepth-02541)"},
+    {"VUID-VkViewport-minDepth-01234", "Unless VK_EXT_depth_range_unrestricted extension is enabled minDepth must be between 0.0 and 1.0, inclusive (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkViewport-minDepth-01234)"},
+    {"VUID-VkViewport-minDepth-02540", "minDepth must be between 0.0 and 1.0, inclusive (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkViewport-minDepth-02540)"},
+    {"VUID-VkViewport-width-01770", "width must be greater than 0.0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkViewport-width-01770)"},
+    {"VUID-VkViewport-width-01771", "width must be less than or equal to VkPhysicalDeviceLimits::maxViewportDimensions[0] (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkViewport-width-01771)"},
+    {"VUID-VkViewport-x-01232", "(x + width) must be less than or equal to viewportBoundsRange[1] (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkViewport-x-01232)"},
+    {"VUID-VkViewport-x-01774", "x must be greater than or equal to viewportBoundsRange[0] (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkViewport-x-01774)"},
+    {"VUID-VkViewport-y-01233", "(y + height) must be less than or equal to viewportBoundsRange[1] (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkViewport-y-01233)"},
+    {"VUID-VkViewport-y-01775", "y must be greater than or equal to viewportBoundsRange[0] (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkViewport-y-01775)"},
+    {"VUID-VkViewport-y-01776", "y must be less than or equal to viewportBoundsRange[1] (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkViewport-y-01776)"},
+    {"VUID-VkViewport-y-01777", "(y + height) must be greater than or equal to viewportBoundsRange[0] (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkViewport-y-01777)"},
+    {"VUID-VkViewportSwizzleNV-w-parameter", "w must be a valid VkViewportCoordinateSwizzleNV value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkViewportSwizzleNV-w-parameter)"},
+    {"VUID-VkViewportSwizzleNV-x-parameter", "x must be a valid VkViewportCoordinateSwizzleNV value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkViewportSwizzleNV-x-parameter)"},
+    {"VUID-VkViewportSwizzleNV-y-parameter", "y must be a valid VkViewportCoordinateSwizzleNV value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkViewportSwizzleNV-y-parameter)"},
+    {"VUID-VkViewportSwizzleNV-z-parameter", "z must be a valid VkViewportCoordinateSwizzleNV value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkViewportSwizzleNV-z-parameter)"},
+    {"VUID-VkWaylandSurfaceCreateInfoKHR-display-01304", "display must point to a valid Wayland wl_display. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkWaylandSurfaceCreateInfoKHR-display-01304)"},
+    {"VUID-VkWaylandSurfaceCreateInfoKHR-flags-zerobitmask", "flags must be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkWaylandSurfaceCreateInfoKHR-flags-zerobitmask)"},
+    {"VUID-VkWaylandSurfaceCreateInfoKHR-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkWaylandSurfaceCreateInfoKHR-pNext-pNext)"},
+    {"VUID-VkWaylandSurfaceCreateInfoKHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_WAYLAND_SURFACE_CREATE_INFO_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkWaylandSurfaceCreateInfoKHR-sType-sType)"},
+    {"VUID-VkWaylandSurfaceCreateInfoKHR-surface-01305", "surface must point to a valid Wayland wl_surface. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkWaylandSurfaceCreateInfoKHR-surface-01305)"},
+    {"VUID-VkWin32KeyedMutexAcquireReleaseInfoKHR-commonparent", "Both of the elements of pAcquireSyncs, and the elements of pReleaseSyncs that are valid handles of non-ignored parameters must have been created, allocated, or retrieved from the same VkDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkWin32KeyedMutexAcquireReleaseInfoKHR-commonparent)"},
+    {"VUID-VkWin32KeyedMutexAcquireReleaseInfoKHR-pAcquireKeys-parameter", "If acquireCount is not 0, pAcquireKeys must be a valid pointer to an array of acquireCount uint64_t values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkWin32KeyedMutexAcquireReleaseInfoKHR-pAcquireKeys-parameter)"},
+    {"VUID-VkWin32KeyedMutexAcquireReleaseInfoKHR-pAcquireSyncs-00081", "Each member of pAcquireSyncs and pReleaseSyncs must be a device memory object imported by setting VkImportMemoryWin32HandleInfoKHR::handleType to VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT or VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_KMT_BIT. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkWin32KeyedMutexAcquireReleaseInfoKHR-pAcquireSyncs-00081)"},
+    {"VUID-VkWin32KeyedMutexAcquireReleaseInfoKHR-pAcquireSyncs-parameter", "If acquireCount is not 0, pAcquireSyncs must be a valid pointer to an array of acquireCount valid VkDeviceMemory handles (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkWin32KeyedMutexAcquireReleaseInfoKHR-pAcquireSyncs-parameter)"},
+    {"VUID-VkWin32KeyedMutexAcquireReleaseInfoKHR-pAcquireTimeouts-parameter", "If acquireCount is not 0, pAcquireTimeouts must be a valid pointer to an array of acquireCount uint32_t values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkWin32KeyedMutexAcquireReleaseInfoKHR-pAcquireTimeouts-parameter)"},
+    {"VUID-VkWin32KeyedMutexAcquireReleaseInfoKHR-pReleaseKeys-parameter", "If releaseCount is not 0, pReleaseKeys must be a valid pointer to an array of releaseCount uint64_t values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkWin32KeyedMutexAcquireReleaseInfoKHR-pReleaseKeys-parameter)"},
+    {"VUID-VkWin32KeyedMutexAcquireReleaseInfoKHR-pReleaseSyncs-parameter", "If releaseCount is not 0, pReleaseSyncs must be a valid pointer to an array of releaseCount valid VkDeviceMemory handles (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkWin32KeyedMutexAcquireReleaseInfoKHR-pReleaseSyncs-parameter)"},
+    {"VUID-VkWin32KeyedMutexAcquireReleaseInfoKHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkWin32KeyedMutexAcquireReleaseInfoKHR-sType-sType)"},
+    {"VUID-VkWin32KeyedMutexAcquireReleaseInfoNV-commonparent", "Both of the elements of pAcquireSyncs, and the elements of pReleaseSyncs that are valid handles of non-ignored parameters must have been created, allocated, or retrieved from the same VkDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkWin32KeyedMutexAcquireReleaseInfoNV-commonparent)"},
+    {"VUID-VkWin32KeyedMutexAcquireReleaseInfoNV-pAcquireKeys-parameter", "If acquireCount is not 0, pAcquireKeys must be a valid pointer to an array of acquireCount uint64_t values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkWin32KeyedMutexAcquireReleaseInfoNV-pAcquireKeys-parameter)"},
+    {"VUID-VkWin32KeyedMutexAcquireReleaseInfoNV-pAcquireSyncs-parameter", "If acquireCount is not 0, pAcquireSyncs must be a valid pointer to an array of acquireCount valid VkDeviceMemory handles (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkWin32KeyedMutexAcquireReleaseInfoNV-pAcquireSyncs-parameter)"},
+    {"VUID-VkWin32KeyedMutexAcquireReleaseInfoNV-pAcquireTimeoutMilliseconds-parameter", "If acquireCount is not 0, pAcquireTimeoutMilliseconds must be a valid pointer to an array of acquireCount uint32_t values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkWin32KeyedMutexAcquireReleaseInfoNV-pAcquireTimeoutMilliseconds-parameter)"},
+    {"VUID-VkWin32KeyedMutexAcquireReleaseInfoNV-pReleaseKeys-parameter", "If releaseCount is not 0, pReleaseKeys must be a valid pointer to an array of releaseCount uint64_t values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkWin32KeyedMutexAcquireReleaseInfoNV-pReleaseKeys-parameter)"},
+    {"VUID-VkWin32KeyedMutexAcquireReleaseInfoNV-pReleaseSyncs-parameter", "If releaseCount is not 0, pReleaseSyncs must be a valid pointer to an array of releaseCount valid VkDeviceMemory handles (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkWin32KeyedMutexAcquireReleaseInfoNV-pReleaseSyncs-parameter)"},
+    {"VUID-VkWin32KeyedMutexAcquireReleaseInfoNV-sType-sType", "sType must be VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkWin32KeyedMutexAcquireReleaseInfoNV-sType-sType)"},
+    {"VUID-VkWin32SurfaceCreateInfoKHR-flags-zerobitmask", "flags must be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkWin32SurfaceCreateInfoKHR-flags-zerobitmask)"},
+    {"VUID-VkWin32SurfaceCreateInfoKHR-hinstance-01307", "hinstance must be a valid Win32 HINSTANCE. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkWin32SurfaceCreateInfoKHR-hinstance-01307)"},
+    {"VUID-VkWin32SurfaceCreateInfoKHR-hwnd-01308", "hwnd must be a valid Win32 HWND. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkWin32SurfaceCreateInfoKHR-hwnd-01308)"},
+    {"VUID-VkWin32SurfaceCreateInfoKHR-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkWin32SurfaceCreateInfoKHR-pNext-pNext)"},
+    {"VUID-VkWin32SurfaceCreateInfoKHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkWin32SurfaceCreateInfoKHR-sType-sType)"},
+    {"VUID-VkWriteDescriptorSet-commonparent", "Both of dstSet, and the elements of pTexelBufferView that are valid handles of non-ignored parameters must have been created, allocated, or retrieved from the same VkDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkWriteDescriptorSet-commonparent)"},
+    {"VUID-VkWriteDescriptorSet-descriptorCount-00317", "All consecutive bindings updated via a single VkWriteDescriptorSet structure, except those with a descriptorCount of zero, must have identical descriptorType and stageFlags. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkWriteDescriptorSet-descriptorCount-00317)"},
+    {"VUID-VkWriteDescriptorSet-descriptorCount-00318", "All consecutive bindings updated via a single VkWriteDescriptorSet structure, except those with a descriptorCount of zero, must all either use immutable samplers or must all not use immutable samplers. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkWriteDescriptorSet-descriptorCount-00318)"},
+    {"VUID-VkWriteDescriptorSet-descriptorCount-03048", "All consecutive bindings updated via a single VkWriteDescriptorSet structure, except those with a descriptorCount of zero, must have identical VkDescriptorBindingFlagBitsEXT. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkWriteDescriptorSet-descriptorCount-03048)"},
+    {"VUID-VkWriteDescriptorSet-descriptorCount-arraylength", "descriptorCount must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkWriteDescriptorSet-descriptorCount-arraylength)"},
+    {"VUID-VkWriteDescriptorSet-descriptorType-00319", "descriptorType must match the type of dstBinding within dstSet (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkWriteDescriptorSet-descriptorType-00319)"},
+    {"VUID-VkWriteDescriptorSet-descriptorType-00322", "If descriptorType is VK_DESCRIPTOR_TYPE_SAMPLER, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, or VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, pImageInfo must be a valid pointer to an array of descriptorCount valid VkDescriptorImageInfo structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkWriteDescriptorSet-descriptorType-00322)"},
+    {"VUID-VkWriteDescriptorSet-descriptorType-00323", "If descriptorType is VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER or VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER, pTexelBufferView must be a valid pointer to an array of descriptorCount valid VkBufferView handles (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkWriteDescriptorSet-descriptorType-00323)"},
+    {"VUID-VkWriteDescriptorSet-descriptorType-00324", "If descriptorType is VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC, or VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC, pBufferInfo must be a valid pointer to an array of descriptorCount valid VkDescriptorBufferInfo structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkWriteDescriptorSet-descriptorType-00324)"},
+    {"VUID-VkWriteDescriptorSet-descriptorType-00325", "If descriptorType is VK_DESCRIPTOR_TYPE_SAMPLER or VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, and dstSet was not allocated with a layout that included immutable samplers for dstBinding with descriptorType, the sampler member of each element of pImageInfo must be a valid VkSampler object (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkWriteDescriptorSet-descriptorType-00325)"},
+    {"VUID-VkWriteDescriptorSet-descriptorType-00326", "If descriptorType is VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, or VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, the imageView and imageLayout members of each element of pImageInfo must be a valid VkImageView and VkImageLayout, respectively (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkWriteDescriptorSet-descriptorType-00326)"},
+    {"VUID-VkWriteDescriptorSet-descriptorType-00327", "If descriptorType is VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER or VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC, the offset member of each element of pBufferInfo must be a multiple of VkPhysicalDeviceLimits::minUniformBufferOffsetAlignment (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkWriteDescriptorSet-descriptorType-00327)"},
+    {"VUID-VkWriteDescriptorSet-descriptorType-00328", "If descriptorType is VK_DESCRIPTOR_TYPE_STORAGE_BUFFER or VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC, the offset member of each element of pBufferInfo must be a multiple of VkPhysicalDeviceLimits::minStorageBufferOffsetAlignment (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkWriteDescriptorSet-descriptorType-00328)"},
+    {"VUID-VkWriteDescriptorSet-descriptorType-00329", "If descriptorType is VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC, VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, or VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC, and the buffer member of any element of pBufferInfo is the handle of a non-sparse buffer, then that buffer must be bound completely and contiguously to a single VkDeviceMemory object (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkWriteDescriptorSet-descriptorType-00329)"},
+    {"VUID-VkWriteDescriptorSet-descriptorType-00330", "If descriptorType is VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER or VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC, the buffer member of each element of pBufferInfo must have been created with VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkWriteDescriptorSet-descriptorType-00330)"},
+    {"VUID-VkWriteDescriptorSet-descriptorType-00331", "If descriptorType is VK_DESCRIPTOR_TYPE_STORAGE_BUFFER or VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC, the buffer member of each element of pBufferInfo must have been created with VK_BUFFER_USAGE_STORAGE_BUFFER_BIT set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkWriteDescriptorSet-descriptorType-00331)"},
+    {"VUID-VkWriteDescriptorSet-descriptorType-00332", "If descriptorType is VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER or VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC, the range member of each element of pBufferInfo, or the effective range if range is VK_WHOLE_SIZE, must be less than or equal to VkPhysicalDeviceLimits::maxUniformBufferRange (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkWriteDescriptorSet-descriptorType-00332)"},
+    {"VUID-VkWriteDescriptorSet-descriptorType-00333", "If descriptorType is VK_DESCRIPTOR_TYPE_STORAGE_BUFFER or VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC, the range member of each element of pBufferInfo, or the effective range if range is VK_WHOLE_SIZE, must be less than or equal to VkPhysicalDeviceLimits::maxStorageBufferRange (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkWriteDescriptorSet-descriptorType-00333)"},
+    {"VUID-VkWriteDescriptorSet-descriptorType-00334", "If descriptorType is VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER, the VkBuffer that each element of pTexelBufferView was created from must have been created with VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkWriteDescriptorSet-descriptorType-00334)"},
+    {"VUID-VkWriteDescriptorSet-descriptorType-00335", "If descriptorType is VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER, the VkBuffer that each element of pTexelBufferView was created from must have been created with VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkWriteDescriptorSet-descriptorType-00335)"},
+    {"VUID-VkWriteDescriptorSet-descriptorType-00336", "If descriptorType is VK_DESCRIPTOR_TYPE_STORAGE_IMAGE or VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, the imageView member of each element of pImageInfo must have been created with the identity swizzle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkWriteDescriptorSet-descriptorType-00336)"},
+    {"VUID-VkWriteDescriptorSet-descriptorType-00337", "If descriptorType is VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE or VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, the imageView member of each element of pImageInfo must have been created with VK_IMAGE_USAGE_SAMPLED_BIT set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkWriteDescriptorSet-descriptorType-00337)"},
+    {"VUID-VkWriteDescriptorSet-descriptorType-00338", "If descriptorType is VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, the imageView member of each element of pImageInfo must have been created with VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkWriteDescriptorSet-descriptorType-00338)"},
+    {"VUID-VkWriteDescriptorSet-descriptorType-00339", "If descriptorType is VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, the imageView member of each element of pImageInfo must have been created with VK_IMAGE_USAGE_STORAGE_BIT set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkWriteDescriptorSet-descriptorType-00339)"},
+    {"VUID-VkWriteDescriptorSet-descriptorType-01402", "If descriptorType is VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, for each descriptor that will be accessed via load or store operations the imageLayout member for corresponding elements of pImageInfo must be VK_IMAGE_LAYOUT_GENERAL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkWriteDescriptorSet-descriptorType-01402)"},
+    {"VUID-VkWriteDescriptorSet-descriptorType-01403", "If descriptorType is VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE or VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, the imageLayout member of each element of pImageInfo must be a member of the list given in Sampled Image or Combined Image Sampler, corresponding to its type (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkWriteDescriptorSet-descriptorType-01403)"},
+    {"VUID-VkWriteDescriptorSet-descriptorType-01946", "If descriptorType is VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, then the imageView member of each pImageInfo element must have been created without a VkSamplerYcbcrConversionInfo structure in its pNext chain (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkWriteDescriptorSet-descriptorType-01946)"},
+    {"VUID-VkWriteDescriptorSet-descriptorType-01948", "If descriptorType is VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, and dstSet was allocated with a layout that included immutable samplers for dstBinding, then the imageView member of each element of pImageInfo which corresponds to an immutable sampler that enables sampler Y'CBCR conversion must have been created with a VkSamplerYcbcrConversionInfo structure in its pNext chain with an identically defined VkSamplerYcbcrConversionInfo to the corresponding immutable sampler (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkWriteDescriptorSet-descriptorType-01948)"},
+    {"VUID-VkWriteDescriptorSet-descriptorType-02219", "If descriptorType is VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT, dstArrayElement must be an integer multiple of 4 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkWriteDescriptorSet-descriptorType-02219)"},
+    {"VUID-VkWriteDescriptorSet-descriptorType-02220", "If descriptorType is VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT, descriptorCount must be an integer multiple of 4 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkWriteDescriptorSet-descriptorType-02220)"},
+    {"VUID-VkWriteDescriptorSet-descriptorType-02221", "If descriptorType is VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT, the pNext chain must include a VkWriteDescriptorSetInlineUniformBlockEXT structure whose dataSize member equals descriptorCount (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkWriteDescriptorSet-descriptorType-02221)"},
+    {"VUID-VkWriteDescriptorSet-descriptorType-02382", "If descriptorType is VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV, the pNext chain must include a VkWriteDescriptorSetAccelerationStructureNV structure whose accelerationStructureCount member equals descriptorCount (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkWriteDescriptorSet-descriptorType-02382)"},
+    {"VUID-VkWriteDescriptorSet-descriptorType-02738", "If descriptorType is VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, and if any element of pImageInfo has a imageView member that was created with a VkSamplerYcbcrConversionInfo structure in its pNext chain, then dstSet must have been allocated with a layout that included immutable samplers for dstBinding, and the corresponding immutable sampler must have been created with an identically defined VkSamplerYcbcrConversionInfo object (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkWriteDescriptorSet-descriptorType-02738)"},
+    {"VUID-VkWriteDescriptorSet-descriptorType-02752", "If descriptorType is VK_DESCRIPTOR_TYPE_SAMPLER, then dstSet must not have been allocated with a layout that included immutable samplers for dstBinding (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkWriteDescriptorSet-descriptorType-02752)"},
+    {"VUID-VkWriteDescriptorSet-descriptorType-parameter", "descriptorType must be a valid VkDescriptorType value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkWriteDescriptorSet-descriptorType-parameter)"},
+    {"VUID-VkWriteDescriptorSet-dstArrayElement-00321", "The sum of dstArrayElement and descriptorCount must be less than or equal to the number of array elements in the descriptor set binding specified by dstBinding, and all applicable consecutive bindings, as described by consecutive binding updates (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkWriteDescriptorSet-dstArrayElement-00321)"},
+    {"VUID-VkWriteDescriptorSet-dstBinding-00315", "dstBinding must be less than or equal to the maximum value of binding of all VkDescriptorSetLayoutBinding structures specified when dstSet's descriptor set layout was created (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkWriteDescriptorSet-dstBinding-00315)"},
+    {"VUID-VkWriteDescriptorSet-dstBinding-00316", "dstBinding must be a binding with a non-zero descriptorCount (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkWriteDescriptorSet-dstBinding-00316)"},
+    {"VUID-VkWriteDescriptorSet-dstSet-00320", "dstSet must be a valid VkDescriptorSet handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkWriteDescriptorSet-dstSet-00320)"},
+    {"VUID-VkWriteDescriptorSet-pNext-pNext", "Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkWriteDescriptorSetAccelerationStructureNV or VkWriteDescriptorSetInlineUniformBlockEXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkWriteDescriptorSet-pNext-pNext)"},
+    {"VUID-VkWriteDescriptorSet-sType-sType", "sType must be VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkWriteDescriptorSet-sType-sType)"},
+    {"VUID-VkWriteDescriptorSet-sType-unique", "Each sType member in the pNext chain must be unique (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkWriteDescriptorSet-sType-unique)"},
+    {"VUID-VkWriteDescriptorSetAccelerationStructureNV-accelerationStructureCount-02236", "accelerationStructureCount must be equal to descriptorCount in the extended structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkWriteDescriptorSetAccelerationStructureNV-accelerationStructureCount-02236)"},
+    {"VUID-VkWriteDescriptorSetAccelerationStructureNV-accelerationStructureCount-arraylength", "accelerationStructureCount must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkWriteDescriptorSetAccelerationStructureNV-accelerationStructureCount-arraylength)"},
+    {"VUID-VkWriteDescriptorSetAccelerationStructureNV-pAccelerationStructures-02764", "Each acceleration structure in pAccelerationStructures must have been created with VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkWriteDescriptorSetAccelerationStructureNV-pAccelerationStructures-02764)"},
+    {"VUID-VkWriteDescriptorSetAccelerationStructureNV-pAccelerationStructures-parameter", "pAccelerationStructures must be a valid pointer to an array of accelerationStructureCount valid VkAccelerationStructureNV handles (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkWriteDescriptorSetAccelerationStructureNV-pAccelerationStructures-parameter)"},
+    {"VUID-VkWriteDescriptorSetAccelerationStructureNV-sType-sType", "sType must be VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkWriteDescriptorSetAccelerationStructureNV-sType-sType)"},
+    {"VUID-VkWriteDescriptorSetInlineUniformBlockEXT-dataSize-02222", "dataSize must be an integer multiple of 4 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkWriteDescriptorSetInlineUniformBlockEXT-dataSize-02222)"},
+    {"VUID-VkWriteDescriptorSetInlineUniformBlockEXT-dataSize-arraylength", "dataSize must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkWriteDescriptorSetInlineUniformBlockEXT-dataSize-arraylength)"},
+    {"VUID-VkWriteDescriptorSetInlineUniformBlockEXT-pData-parameter", "pData must be a valid pointer to an array of dataSize bytes (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkWriteDescriptorSetInlineUniformBlockEXT-pData-parameter)"},
+    {"VUID-VkWriteDescriptorSetInlineUniformBlockEXT-sType-sType", "sType must be VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkWriteDescriptorSetInlineUniformBlockEXT-sType-sType)"},
+    {"VUID-VkXcbSurfaceCreateInfoKHR-connection-01310", "connection must point to a valid X11 xcb_connection_t. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkXcbSurfaceCreateInfoKHR-connection-01310)"},
+    {"VUID-VkXcbSurfaceCreateInfoKHR-flags-zerobitmask", "flags must be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkXcbSurfaceCreateInfoKHR-flags-zerobitmask)"},
+    {"VUID-VkXcbSurfaceCreateInfoKHR-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkXcbSurfaceCreateInfoKHR-pNext-pNext)"},
+    {"VUID-VkXcbSurfaceCreateInfoKHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_XCB_SURFACE_CREATE_INFO_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkXcbSurfaceCreateInfoKHR-sType-sType)"},
+    {"VUID-VkXcbSurfaceCreateInfoKHR-window-01311", "window must be a valid X11 xcb_window_t. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkXcbSurfaceCreateInfoKHR-window-01311)"},
+    {"VUID-VkXlibSurfaceCreateInfoKHR-dpy-01313", "dpy must point to a valid Xlib Display. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkXlibSurfaceCreateInfoKHR-dpy-01313)"},
+    {"VUID-VkXlibSurfaceCreateInfoKHR-flags-zerobitmask", "flags must be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkXlibSurfaceCreateInfoKHR-flags-zerobitmask)"},
+    {"VUID-VkXlibSurfaceCreateInfoKHR-pNext-pNext", "pNext must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkXlibSurfaceCreateInfoKHR-pNext-pNext)"},
+    {"VUID-VkXlibSurfaceCreateInfoKHR-sType-sType", "sType must be VK_STRUCTURE_TYPE_XLIB_SURFACE_CREATE_INFO_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkXlibSurfaceCreateInfoKHR-sType-sType)"},
+    {"VUID-VkXlibSurfaceCreateInfoKHR-window-01314", "window must be a valid Xlib Window. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkXlibSurfaceCreateInfoKHR-window-01314)"},
+    {"VUID-vkAcquireFullScreenExclusiveModeEXT-commonparent", "Both of device, and swapchain must have been created, allocated, or retrieved from the same VkInstance (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkAcquireFullScreenExclusiveModeEXT-commonparent)"},
+    {"VUID-vkAcquireFullScreenExclusiveModeEXT-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkAcquireFullScreenExclusiveModeEXT-device-parameter)"},
+    {"VUID-vkAcquireFullScreenExclusiveModeEXT-swapchain-02674", "swapchain must not be in the retired state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkAcquireFullScreenExclusiveModeEXT-swapchain-02674)"},
+    {"VUID-vkAcquireFullScreenExclusiveModeEXT-swapchain-02675", "swapchain must be a swapchain created with a VkSurfaceFullScreenExclusiveInfoEXT structure, with fullScreenExclusive set to VK_FULL_SCREEN_EXCLUSIVE_APPLICATION_CONTROLLED_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkAcquireFullScreenExclusiveModeEXT-swapchain-02675)"},
+    {"VUID-vkAcquireFullScreenExclusiveModeEXT-swapchain-02676", "swapchain must not currently have exclusive full-screen access (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkAcquireFullScreenExclusiveModeEXT-swapchain-02676)"},
+    {"VUID-vkAcquireFullScreenExclusiveModeEXT-swapchain-parameter", "swapchain must be a valid VkSwapchainKHR handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkAcquireFullScreenExclusiveModeEXT-swapchain-parameter)"},
+    {"VUID-vkAcquireNextImage2KHR-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkAcquireNextImage2KHR-device-parameter)"},
+    {"VUID-vkAcquireNextImage2KHR-pAcquireInfo-parameter", "pAcquireInfo must be a valid pointer to a valid VkAcquireNextImageInfoKHR structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkAcquireNextImage2KHR-pAcquireInfo-parameter)"},
+    {"VUID-vkAcquireNextImage2KHR-pImageIndex-parameter", "pImageIndex must be a valid pointer to a uint32_t value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkAcquireNextImage2KHR-pImageIndex-parameter)"},
+    {"VUID-vkAcquireNextImage2KHR-swapchain-01803", "If the number of currently acquired images is greater than the difference between the number of images in the swapchain member of pAcquireInfo and the value of VkSurfaceCapabilitiesKHR::minImageCount as returned by a call to vkGetPhysicalDeviceSurfaceCapabilities2KHR with the surface used to create swapchain, the timeout member of pAcquireInfo must not be UINT64_MAX (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkAcquireNextImage2KHR-swapchain-01803)"},
+    {"VUID-vkAcquireNextImageKHR-commonparent", "Both of device, and swapchain that are valid handles of non-ignored parameters must have been created, allocated, or retrieved from the same VkInstance (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkAcquireNextImageKHR-commonparent)"},
+    {"VUID-vkAcquireNextImageKHR-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkAcquireNextImageKHR-device-parameter)"},
+    {"VUID-vkAcquireNextImageKHR-fence-01287", "If fence is not VK_NULL_HANDLE it must be unsignaled and must not be associated with any other queue command that has not yet completed execution on that queue (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkAcquireNextImageKHR-fence-01287)"},
+    {"VUID-vkAcquireNextImageKHR-fence-parameter", "If fence is not VK_NULL_HANDLE, fence must be a valid VkFence handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkAcquireNextImageKHR-fence-parameter)"},
+    {"VUID-vkAcquireNextImageKHR-fence-parent", "If fence is a valid handle, it must have been created, allocated, or retrieved from device (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkAcquireNextImageKHR-fence-parent)"},
+    {"VUID-vkAcquireNextImageKHR-pImageIndex-parameter", "pImageIndex must be a valid pointer to a uint32_t value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkAcquireNextImageKHR-pImageIndex-parameter)"},
+    {"VUID-vkAcquireNextImageKHR-semaphore-01286", "If semaphore is not VK_NULL_HANDLE it must be unsignaled (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkAcquireNextImageKHR-semaphore-01286)"},
+    {"VUID-vkAcquireNextImageKHR-semaphore-01779", "If semaphore is not VK_NULL_HANDLE it must not have any uncompleted signal or wait operations pending (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkAcquireNextImageKHR-semaphore-01779)"},
+    {"VUID-vkAcquireNextImageKHR-semaphore-01780", "semaphore and fence must not both be equal to VK_NULL_HANDLE (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkAcquireNextImageKHR-semaphore-01780)"},
+    {"VUID-vkAcquireNextImageKHR-semaphore-03265", "semaphore must have a VkSemaphoreTypeKHR of VK_SEMAPHORE_TYPE_BINARY_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkAcquireNextImageKHR-semaphore-03265)"},
+    {"VUID-vkAcquireNextImageKHR-semaphore-parameter", "If semaphore is not VK_NULL_HANDLE, semaphore must be a valid VkSemaphore handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkAcquireNextImageKHR-semaphore-parameter)"},
+    {"VUID-vkAcquireNextImageKHR-semaphore-parent", "If semaphore is a valid handle, it must have been created, allocated, or retrieved from device (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkAcquireNextImageKHR-semaphore-parent)"},
+    {"VUID-vkAcquireNextImageKHR-swapchain-01285", "swapchain must not be in the retired state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkAcquireNextImageKHR-swapchain-01285)"},
+    {"VUID-vkAcquireNextImageKHR-swapchain-01802", "If the number of currently acquired images is greater than the difference between the number of images in swapchain and the value of VkSurfaceCapabilitiesKHR::minImageCount as returned by a call to vkGetPhysicalDeviceSurfaceCapabilities2KHR with the surface used to create swapchain, timeout must not be UINT64_MAX (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkAcquireNextImageKHR-swapchain-01802)"},
+    {"VUID-vkAcquireNextImageKHR-swapchain-parameter", "swapchain must be a valid VkSwapchainKHR handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkAcquireNextImageKHR-swapchain-parameter)"},
+    {"VUID-vkAcquirePerformanceConfigurationINTEL-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkAcquirePerformanceConfigurationINTEL-device-parameter)"},
+    {"VUID-vkAcquirePerformanceConfigurationINTEL-pAcquireInfo-parameter", "pAcquireInfo must be a valid pointer to a valid VkPerformanceConfigurationAcquireInfoINTEL structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkAcquirePerformanceConfigurationINTEL-pAcquireInfo-parameter)"},
+    {"VUID-vkAcquirePerformanceConfigurationINTEL-pConfiguration-parameter", "pConfiguration must be a valid pointer to a VkPerformanceConfigurationINTEL handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkAcquirePerformanceConfigurationINTEL-pConfiguration-parameter)"},
+    {"VUID-vkAcquireProfilingLockKHR-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkAcquireProfilingLockKHR-device-parameter)"},
+    {"VUID-vkAcquireProfilingLockKHR-pInfo-parameter", "pInfo must be a valid pointer to a valid VkAcquireProfilingLockInfoKHR structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkAcquireProfilingLockKHR-pInfo-parameter)"},
+    {"VUID-vkAcquireXlibDisplayEXT-display-parameter", "display must be a valid VkDisplayKHR handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkAcquireXlibDisplayEXT-display-parameter)"},
+    {"VUID-vkAcquireXlibDisplayEXT-dpy-parameter", "dpy must be a valid pointer to a Display value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkAcquireXlibDisplayEXT-dpy-parameter)"},
+    {"VUID-vkAcquireXlibDisplayEXT-physicalDevice-parameter", "physicalDevice must be a valid VkPhysicalDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkAcquireXlibDisplayEXT-physicalDevice-parameter)"},
+    {"VUID-vkAllocateCommandBuffers-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkAllocateCommandBuffers-device-parameter)"},
+    {"VUID-vkAllocateCommandBuffers-pAllocateInfo-parameter", "pAllocateInfo must be a valid pointer to a valid VkCommandBufferAllocateInfo structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkAllocateCommandBuffers-pAllocateInfo-parameter)"},
+    {"VUID-vkAllocateCommandBuffers-pAllocateInfo::commandBufferCount-arraylength", "The value referenced by pAllocateInfo::commandBufferCount must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkAllocateCommandBuffers-pAllocateInfo::commandBufferCount-arraylength)"},
+    {"VUID-vkAllocateCommandBuffers-pCommandBuffers-parameter", "pCommandBuffers must be a valid pointer to an array of pAllocateInfo::commandBufferCount VkCommandBuffer handles (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkAllocateCommandBuffers-pCommandBuffers-parameter)"},
+    {"VUID-vkAllocateDescriptorSets-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkAllocateDescriptorSets-device-parameter)"},
+    {"VUID-vkAllocateDescriptorSets-pAllocateInfo-parameter", "pAllocateInfo must be a valid pointer to a valid VkDescriptorSetAllocateInfo structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkAllocateDescriptorSets-pAllocateInfo-parameter)"},
+    {"VUID-vkAllocateDescriptorSets-pAllocateInfo::descriptorSetCount-arraylength", "The value referenced by pAllocateInfo::descriptorSetCount must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkAllocateDescriptorSets-pAllocateInfo::descriptorSetCount-arraylength)"},
+    {"VUID-vkAllocateDescriptorSets-pDescriptorSets-parameter", "pDescriptorSets must be a valid pointer to an array of pAllocateInfo::descriptorSetCount VkDescriptorSet handles (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkAllocateDescriptorSets-pDescriptorSets-parameter)"},
+    {"VUID-vkAllocateMemory-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkAllocateMemory-device-parameter)"},
+    {"VUID-vkAllocateMemory-deviceCoherentMemory-02790", "If the deviceCoherentMemory feature is not enabled, pAllocateInfo->memoryTypeIndex must not identify a memory type supporting VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkAllocateMemory-deviceCoherentMemory-02790)"},
+    {"VUID-vkAllocateMemory-pAllocateInfo-01713", "pAllocateInfo->allocationSize must be less than or equal to VkPhysicalDeviceMemoryProperties::memoryHeaps[memindex].size where memindex = VkPhysicalDeviceMemoryProperties::memoryTypes[pAllocateInfo->memoryTypeIndex].heapIndex as returned by vkGetPhysicalDeviceMemoryProperties for the VkPhysicalDevice that device was created from. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkAllocateMemory-pAllocateInfo-01713)"},
+    {"VUID-vkAllocateMemory-pAllocateInfo-01714", "pAllocateInfo->memoryTypeIndex must be less than VkPhysicalDeviceMemoryProperties::memoryTypeCount as returned by vkGetPhysicalDeviceMemoryProperties for the VkPhysicalDevice that device was created from. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkAllocateMemory-pAllocateInfo-01714)"},
+    {"VUID-vkAllocateMemory-pAllocateInfo-parameter", "pAllocateInfo must be a valid pointer to a valid VkMemoryAllocateInfo structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkAllocateMemory-pAllocateInfo-parameter)"},
+    {"VUID-vkAllocateMemory-pAllocator-parameter", "If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkAllocateMemory-pAllocator-parameter)"},
+    {"VUID-vkAllocateMemory-pMemory-parameter", "pMemory must be a valid pointer to a VkDeviceMemory handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkAllocateMemory-pMemory-parameter)"},
+    {"VUID-vkBeginCommandBuffer-commandBuffer-00049", "commandBuffer must not be in the recording or pending state. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkBeginCommandBuffer-commandBuffer-00049)"},
+    {"VUID-vkBeginCommandBuffer-commandBuffer-00050", "If commandBuffer was allocated from a VkCommandPool which did not have the VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT flag set, commandBuffer must be in the initial state. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkBeginCommandBuffer-commandBuffer-00050)"},
+    {"VUID-vkBeginCommandBuffer-commandBuffer-00051", "If commandBuffer is a secondary command buffer, the pInheritanceInfo member of pBeginInfo must be a valid VkCommandBufferInheritanceInfo structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkBeginCommandBuffer-commandBuffer-00051)"},
+    {"VUID-vkBeginCommandBuffer-commandBuffer-00052", "If commandBuffer is a secondary command buffer and either the occlusionQueryEnable member of the pInheritanceInfo member of pBeginInfo is VK_FALSE, or the precise occlusion queries feature is not enabled, the queryFlags member of the pInheritanceInfo member pBeginInfo must not contain VK_QUERY_CONTROL_PRECISE_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkBeginCommandBuffer-commandBuffer-00052)"},
+    {"VUID-vkBeginCommandBuffer-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkBeginCommandBuffer-commandBuffer-parameter)"},
+    {"VUID-vkBeginCommandBuffer-pBeginInfo-parameter", "pBeginInfo must be a valid pointer to a valid VkCommandBufferBeginInfo structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkBeginCommandBuffer-pBeginInfo-parameter)"},
+    {"VUID-vkBindAccelerationStructureMemoryNV-bindInfoCount-arraylength", "bindInfoCount must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkBindAccelerationStructureMemoryNV-bindInfoCount-arraylength)"},
+    {"VUID-vkBindAccelerationStructureMemoryNV-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkBindAccelerationStructureMemoryNV-device-parameter)"},
+    {"VUID-vkBindAccelerationStructureMemoryNV-pBindInfos-parameter", "pBindInfos must be a valid pointer to an array of bindInfoCount valid VkBindAccelerationStructureMemoryInfoNV structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkBindAccelerationStructureMemoryNV-pBindInfos-parameter)"},
+    {"VUID-vkBindBufferMemory-None-01898", "If buffer was created with the VK_BUFFER_CREATE_PROTECTED_BIT bit set, the buffer must be bound to a memory object allocated with a memory type that reports VK_MEMORY_PROPERTY_PROTECTED_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkBindBufferMemory-None-01898)"},
+    {"VUID-vkBindBufferMemory-None-01899", "If buffer was created with the VK_BUFFER_CREATE_PROTECTED_BIT bit not set, the buffer must not be bound to a memory object created with a memory type that reports VK_MEMORY_PROPERTY_PROTECTED_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkBindBufferMemory-None-01899)"},
+    {"VUID-vkBindBufferMemory-buffer-01029", "buffer must not already be backed by a memory object (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkBindBufferMemory-buffer-01029)"},
+    {"VUID-vkBindBufferMemory-buffer-01030", "buffer must not have been created with any sparse memory binding flags (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkBindBufferMemory-buffer-01030)"},
+    {"VUID-vkBindBufferMemory-buffer-01038", "If buffer was created with VkDedicatedAllocationBufferCreateInfoNV::dedicatedAllocation equal to VK_TRUE, memory must have been created with VkDedicatedAllocationMemoryAllocateInfoNV::buffer equal to a buffer handle created with identical creation parameters to buffer and memoryOffset must be zero (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkBindBufferMemory-buffer-01038)"},
+    {"VUID-vkBindBufferMemory-buffer-01039", "If buffer was not created with VkDedicatedAllocationBufferCreateInfoNV::dedicatedAllocation equal to VK_TRUE, memory must not have been allocated dedicated for a specific buffer or image (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkBindBufferMemory-buffer-01039)"},
+    {"VUID-vkBindBufferMemory-buffer-01444", "If buffer requires a dedicated allocation(as reported by vkGetBufferMemoryRequirements2 in VkMemoryDedicatedRequirements::requiresDedicatedAllocation for buffer), memory must have been created with VkMemoryDedicatedAllocateInfo::buffer equal to buffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkBindBufferMemory-buffer-01444)"},
+    {"VUID-vkBindBufferMemory-buffer-parameter", "buffer must be a valid VkBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkBindBufferMemory-buffer-parameter)"},
+    {"VUID-vkBindBufferMemory-buffer-parent", "buffer must have been created, allocated, or retrieved from device (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkBindBufferMemory-buffer-parent)"},
+    {"VUID-vkBindBufferMemory-bufferDeviceAddress-03339", "If the VkPhysicalDeviceBufferDeviceAddressFeaturesKHR::bufferDeviceAddress feature is enabled and buffer was created with the VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_KHR bit set, memory must have been allocated with the VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT_KHR bit set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkBindBufferMemory-bufferDeviceAddress-03339)"},
+    {"VUID-vkBindBufferMemory-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkBindBufferMemory-device-parameter)"},
+    {"VUID-vkBindBufferMemory-memory-01035", "memory must have been allocated using one of the memory types allowed in the memoryTypeBits member of the VkMemoryRequirements structure returned from a call to vkGetBufferMemoryRequirements with buffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkBindBufferMemory-memory-01035)"},
+    {"VUID-vkBindBufferMemory-memory-01508", "If the VkMemoryAllocateInfo provided when memory was allocated included a VkMemoryDedicatedAllocateInfo structure in its pNext chain, and VkMemoryDedicatedAllocateInfo::buffer was not VK_NULL_HANDLE, then buffer must equal VkMemoryDedicatedAllocateInfo::buffer, and memoryOffset must be zero. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkBindBufferMemory-memory-01508)"},
+    {"VUID-vkBindBufferMemory-memory-02726", "If the value of VkExportMemoryAllocateInfo::handleTypes used to allocate memory is not 0, it must include at least one of the handles set in VkExternalMemoryBufferCreateInfo::handleTypes when buffer was created (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkBindBufferMemory-memory-02726)"},
+    {"VUID-vkBindBufferMemory-memory-02727", "If memory was created by a memory import operation, the external handle type of the imported memory must also have been set in VkExternalMemoryBufferCreateInfo::handleTypes when buffer was created (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkBindBufferMemory-memory-02727)"},
+    {"VUID-vkBindBufferMemory-memory-parameter", "memory must be a valid VkDeviceMemory handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkBindBufferMemory-memory-parameter)"},
+    {"VUID-vkBindBufferMemory-memory-parent", "memory must have been created, allocated, or retrieved from device (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkBindBufferMemory-memory-parent)"},
+    {"VUID-vkBindBufferMemory-memoryOffset-01031", "memoryOffset must be less than the size of memory (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkBindBufferMemory-memoryOffset-01031)"},
+    {"VUID-vkBindBufferMemory-memoryOffset-01036", "memoryOffset must be an integer multiple of the alignment member of the VkMemoryRequirements structure returned from a call to vkGetBufferMemoryRequirements with buffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkBindBufferMemory-memoryOffset-01036)"},
+    {"VUID-vkBindBufferMemory-size-01037", "The size member of the VkMemoryRequirements structure returned from a call to vkGetBufferMemoryRequirements with buffer must be less than or equal to the size of memory minus memoryOffset (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkBindBufferMemory-size-01037)"},
+    {"VUID-vkBindBufferMemory2-bindInfoCount-arraylength", "bindInfoCount must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkBindBufferMemory2-bindInfoCount-arraylength)"},
+    {"VUID-vkBindBufferMemory2-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkBindBufferMemory2-device-parameter)"},
+    {"VUID-vkBindBufferMemory2-pBindInfos-parameter", "pBindInfos must be a valid pointer to an array of bindInfoCount valid VkBindBufferMemoryInfo structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkBindBufferMemory2-pBindInfos-parameter)"},
+    {"VUID-vkBindImageMemory-None-01901", "If image was created with the VK_IMAGE_CREATE_PROTECTED_BIT bit set, the image must be bound to a memory object allocated with a memory type that reports VK_MEMORY_PROPERTY_PROTECTED_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkBindImageMemory-None-01901)"},
+    {"VUID-vkBindImageMemory-None-01902", "If image was created with the VK_IMAGE_CREATE_PROTECTED_BIT bit not set, the image must not be bound to a memory object created with a memory type that reports VK_MEMORY_PROPERTY_PROTECTED_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkBindImageMemory-None-01902)"},
+    {"VUID-vkBindImageMemory-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkBindImageMemory-device-parameter)"},
+    {"VUID-vkBindImageMemory-image-01044", "image must not already be backed by a memory object (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkBindImageMemory-image-01044)"},
+    {"VUID-vkBindImageMemory-image-01045", "image must not have been created with any sparse memory binding flags (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkBindImageMemory-image-01045)"},
+    {"VUID-vkBindImageMemory-image-01050", "If image was created with VkDedicatedAllocationImageCreateInfoNV::dedicatedAllocation equal to VK_TRUE, memory must have been created with VkDedicatedAllocationMemoryAllocateInfoNV::image equal to an image handle created with identical creation parameters to image and memoryOffset must be zero (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkBindImageMemory-image-01050)"},
+    {"VUID-vkBindImageMemory-image-01051", "If image was not created with VkDedicatedAllocationImageCreateInfoNV::dedicatedAllocation equal to VK_TRUE, memory must not have been allocated dedicated for a specific buffer or image (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkBindImageMemory-image-01051)"},
+    {"VUID-vkBindImageMemory-image-01445", "If image requires a dedicated allocation (as reported by vkGetImageMemoryRequirements2 in VkMemoryDedicatedRequirements::requiresDedicatedAllocation for image), memory must have been created with VkMemoryDedicatedAllocateInfo::image equal to image (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkBindImageMemory-image-01445)"},
+    {"VUID-vkBindImageMemory-image-01608", "image must not have been created with the VK_IMAGE_CREATE_DISJOINT_BIT set. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkBindImageMemory-image-01608)"},
+    {"VUID-vkBindImageMemory-image-parameter", "image must be a valid VkImage handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkBindImageMemory-image-parameter)"},
+    {"VUID-vkBindImageMemory-image-parent", "image must have been created, allocated, or retrieved from device (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkBindImageMemory-image-parent)"},
+    {"VUID-vkBindImageMemory-memory-01047", "memory must have been allocated using one of the memory types allowed in the memoryTypeBits member of the VkMemoryRequirements structure returned from a call to vkGetImageMemoryRequirements with image (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkBindImageMemory-memory-01047)"},
+    {"VUID-vkBindImageMemory-memory-01509", "If the VkMemoryAllocateInfo provided when memory was allocated included a VkMemoryDedicatedAllocateInfo structure in its pNext chain, and VkMemoryDedicatedAllocateInfo::image was not VK_NULL_HANDLE, then image must equal VkMemoryDedicatedAllocateInfo::image and memoryOffset must be zero (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkBindImageMemory-memory-01509)"},
+    {"VUID-vkBindImageMemory-memory-02628", "If the dedicated allocation image aliasing feature is not enabled, and the VkMemoryAllocateInfo provided when memory was allocated included a VkMemoryDedicatedAllocateInfo structure in its pNext chain, and VkMemoryDedicatedAllocateInfo::image was not VK_NULL_HANDLE, then image must equal VkMemoryDedicatedAllocateInfo::image and memoryOffset must be zero. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkBindImageMemory-memory-02628)"},
+    {"VUID-vkBindImageMemory-memory-02629", "If the dedicated allocation image aliasing feature is enabled, and the VkMemoryAllocateInfo provided when memory was allocated included a VkMemoryDedicatedAllocateInfo structure in its pNext chain, and VkMemoryDedicatedAllocateInfo::image was not VK_NULL_HANDLE, then memoryOffset must be zero, and image must be either equal to VkMemoryDedicatedAllocateInfo::image or an image that was created using the same parameters in VkImageCreateInfo, with the exception that extent and arrayLayers may differ subject to the following restrictions: every dimension in the extent parameter of the image being bound must be equal to or smaller than the original image for which the allocation was created; and the arrayLayers parameter of the image being bound must be equal to or smaller than the original image for which the allocation was created. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkBindImageMemory-memory-02629)"},
+    {"VUID-vkBindImageMemory-memory-02728", "If the value of VkExportMemoryAllocateInfo::handleTypes used to allocate memory is not 0, it must include at least one of the handles set in VkExternalMemoryImageCreateInfo::handleTypes when image was created (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkBindImageMemory-memory-02728)"},
+    {"VUID-vkBindImageMemory-memory-02729", "If memory was created by a memory import operation, the external handle type of the imported memory must also have been set in VkExternalMemoryImageCreateInfo::handleTypes when image was created (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkBindImageMemory-memory-02729)"},
+    {"VUID-vkBindImageMemory-memory-parameter", "memory must be a valid VkDeviceMemory handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkBindImageMemory-memory-parameter)"},
+    {"VUID-vkBindImageMemory-memory-parent", "memory must have been created, allocated, or retrieved from device (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkBindImageMemory-memory-parent)"},
+    {"VUID-vkBindImageMemory-memoryOffset-01046", "memoryOffset must be less than the size of memory (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkBindImageMemory-memoryOffset-01046)"},
+    {"VUID-vkBindImageMemory-memoryOffset-01048", "memoryOffset must be an integer multiple of the alignment member of the VkMemoryRequirements structure returned from a call to vkGetImageMemoryRequirements with image (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkBindImageMemory-memoryOffset-01048)"},
+    {"VUID-vkBindImageMemory-size-01049", "The size member of the VkMemoryRequirements structure returned from a call to vkGetImageMemoryRequirements with image must be less than or equal to the size of memory minus memoryOffset (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkBindImageMemory-size-01049)"},
+    {"VUID-vkBindImageMemory2-bindInfoCount-arraylength", "bindInfoCount must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkBindImageMemory2-bindInfoCount-arraylength)"},
+    {"VUID-vkBindImageMemory2-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkBindImageMemory2-device-parameter)"},
+    {"VUID-vkBindImageMemory2-pBindInfos-parameter", "pBindInfos must be a valid pointer to an array of bindInfoCount valid VkBindImageMemoryInfo structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkBindImageMemory2-pBindInfos-parameter)"},
+    {"VUID-vkCmdBeginConditionalRenderingEXT-None-01980", "Conditional rendering must not already be active (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginConditionalRenderingEXT-None-01980)"},
+    {"VUID-vkCmdBeginConditionalRenderingEXT-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support graphics, or compute operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginConditionalRenderingEXT-commandBuffer-cmdpool)"},
+    {"VUID-vkCmdBeginConditionalRenderingEXT-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginConditionalRenderingEXT-commandBuffer-parameter)"},
+    {"VUID-vkCmdBeginConditionalRenderingEXT-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginConditionalRenderingEXT-commandBuffer-recording)"},
+    {"VUID-vkCmdBeginConditionalRenderingEXT-pConditionalRenderingBegin-parameter", "pConditionalRenderingBegin must be a valid pointer to a valid VkConditionalRenderingBeginInfoEXT structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginConditionalRenderingEXT-pConditionalRenderingBegin-parameter)"},
+    {"VUID-vkCmdBeginDebugUtilsLabelEXT-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support graphics, or compute operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginDebugUtilsLabelEXT-commandBuffer-cmdpool)"},
+    {"VUID-vkCmdBeginDebugUtilsLabelEXT-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginDebugUtilsLabelEXT-commandBuffer-parameter)"},
+    {"VUID-vkCmdBeginDebugUtilsLabelEXT-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginDebugUtilsLabelEXT-commandBuffer-recording)"},
+    {"VUID-vkCmdBeginDebugUtilsLabelEXT-pLabelInfo-parameter", "pLabelInfo must be a valid pointer to a valid VkDebugUtilsLabelEXT structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginDebugUtilsLabelEXT-pLabelInfo-parameter)"},
+    {"VUID-vkCmdBeginQuery-None-00807", "All queries used by the command must be unavailable (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginQuery-None-00807)"},
+    {"VUID-vkCmdBeginQuery-commandBuffer-01885", "commandBuffer must not be a protected command buffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginQuery-commandBuffer-01885)"},
+    {"VUID-vkCmdBeginQuery-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support graphics, or compute operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginQuery-commandBuffer-cmdpool)"},
+    {"VUID-vkCmdBeginQuery-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginQuery-commandBuffer-parameter)"},
+    {"VUID-vkCmdBeginQuery-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginQuery-commandBuffer-recording)"},
+    {"VUID-vkCmdBeginQuery-commonparent", "Both of commandBuffer, and queryPool must have been created, allocated, or retrieved from the same VkDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginQuery-commonparent)"},
+    {"VUID-vkCmdBeginQuery-flags-parameter", "flags must be a valid combination of VkQueryControlFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginQuery-flags-parameter)"},
+    {"VUID-vkCmdBeginQuery-query-00802", "query must be less than the number of queries in queryPool (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginQuery-query-00802)"},
+    {"VUID-vkCmdBeginQuery-query-00808", "If called within a render pass instance, the sum of query and the number of bits set in the current subpass's view mask must be less than or equal to the number of queries in queryPool (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginQuery-query-00808)"},
+    {"VUID-vkCmdBeginQuery-queryPool-01922", "queryPool must have been created with a queryType that differs from that of any queries that are active within commandBuffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginQuery-queryPool-01922)"},
+    {"VUID-vkCmdBeginQuery-queryPool-03223", "If queryPool was created with a queryType of VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR, the profiling lock must have been held before vkBeginCommandBuffer was called on commandBuffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginQuery-queryPool-03223)"},
+    {"VUID-vkCmdBeginQuery-queryPool-03224", "If queryPool was created with a queryType of VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR and one of the counters used to create queryPool was VK_QUERY_SCOPE_COMMAND_BUFFER_KHR, the query begin must be the first recorded command in commandBuffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginQuery-queryPool-03224)"},
+    {"VUID-vkCmdBeginQuery-queryPool-03225", "If queryPool was created with a queryType of VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR and one of the counters used to create queryPool was VK_QUERY_SCOPE_RENDER_PASS_KHR, the begin command must not be recorded within a render pass instance (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginQuery-queryPool-03225)"},
+    {"VUID-vkCmdBeginQuery-queryPool-03226", "If queryPool was created with a queryType of VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR and another query pool with a queryType VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR has been used within commandBuffer, its parent primary command buffer or secondary command buffer recorded within the same parent primary command buffer as commandBuffer, the performanceCounterMultipleQueryPools feature must be enabled (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginQuery-queryPool-03226)"},
+    {"VUID-vkCmdBeginQuery-queryPool-parameter", "queryPool must be a valid VkQueryPool handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginQuery-queryPool-parameter)"},
+    {"VUID-vkCmdBeginQuery-queryType-00800", "If the precise occlusion queries feature is not enabled, or the queryType used to create queryPool was not VK_QUERY_TYPE_OCCLUSION, flags must not contain VK_QUERY_CONTROL_PRECISE_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginQuery-queryType-00800)"},
+    {"VUID-vkCmdBeginQuery-queryType-00803", "If the queryType used to create queryPool was VK_QUERY_TYPE_OCCLUSION, the VkCommandPool that commandBuffer was allocated from must support graphics operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginQuery-queryType-00803)"},
+    {"VUID-vkCmdBeginQuery-queryType-00804", "If the queryType used to create queryPool was VK_QUERY_TYPE_PIPELINE_STATISTICS and any of the pipelineStatistics indicate graphics operations, the VkCommandPool that commandBuffer was allocated from must support graphics operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginQuery-queryType-00804)"},
+    {"VUID-vkCmdBeginQuery-queryType-00805", "If the queryType used to create queryPool was VK_QUERY_TYPE_PIPELINE_STATISTICS and any of the pipelineStatistics indicate compute operations, the VkCommandPool that commandBuffer was allocated from must support compute operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginQuery-queryType-00805)"},
+    {"VUID-vkCmdBeginQuery-queryType-02327", "If the queryType used to create queryPool was VK_QUERY_TYPE_TRANSFORM_FEEDBACK_STREAM_EXT the VkCommandPool that commandBuffer was allocated from must support graphics operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginQuery-queryType-02327)"},
+    {"VUID-vkCmdBeginQuery-queryType-02328", "If the queryType used to create queryPool was VK_QUERY_TYPE_TRANSFORM_FEEDBACK_STREAM_EXT then VkPhysicalDeviceTransformFeedbackPropertiesEXT::transformFeedbackQueries must be supported (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginQuery-queryType-02328)"},
+    {"VUID-vkCmdBeginQuery-queryType-02804", "The queryType used to create queryPool must not be VK_QUERY_TYPE_TIMESTAMP (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginQuery-queryType-02804)"},
+    {"VUID-vkCmdBeginQueryIndexedEXT-None-00807", "All queries used by the command must be unavailable (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginQueryIndexedEXT-None-00807)"},
+    {"VUID-vkCmdBeginQueryIndexedEXT-commandBuffer-01885", "commandBuffer must not be a protected command buffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginQueryIndexedEXT-commandBuffer-01885)"},
+    {"VUID-vkCmdBeginQueryIndexedEXT-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support graphics, or compute operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginQueryIndexedEXT-commandBuffer-cmdpool)"},
+    {"VUID-vkCmdBeginQueryIndexedEXT-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginQueryIndexedEXT-commandBuffer-parameter)"},
+    {"VUID-vkCmdBeginQueryIndexedEXT-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginQueryIndexedEXT-commandBuffer-recording)"},
+    {"VUID-vkCmdBeginQueryIndexedEXT-commonparent", "Both of commandBuffer, and queryPool must have been created, allocated, or retrieved from the same VkDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginQueryIndexedEXT-commonparent)"},
+    {"VUID-vkCmdBeginQueryIndexedEXT-flags-parameter", "flags must be a valid combination of VkQueryControlFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginQueryIndexedEXT-flags-parameter)"},
+    {"VUID-vkCmdBeginQueryIndexedEXT-query-00802", "query must be less than the number of queries in queryPool (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginQueryIndexedEXT-query-00802)"},
+    {"VUID-vkCmdBeginQueryIndexedEXT-query-00808", "If called within a render pass instance, the sum of query and the number of bits set in the current subpass's view mask must be less than or equal to the number of queries in queryPool (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginQueryIndexedEXT-query-00808)"},
+    {"VUID-vkCmdBeginQueryIndexedEXT-queryPool-01922", "queryPool must have been created with a queryType that differs from that of any queries that are active within commandBuffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginQueryIndexedEXT-queryPool-01922)"},
+    {"VUID-vkCmdBeginQueryIndexedEXT-queryPool-03223", "If queryPool was created with a queryType of VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR, the profiling lock must have been held before vkBeginCommandBuffer was called on commandBuffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginQueryIndexedEXT-queryPool-03223)"},
+    {"VUID-vkCmdBeginQueryIndexedEXT-queryPool-03224", "If queryPool was created with a queryType of VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR and one of the counters used to create queryPool was VK_QUERY_SCOPE_COMMAND_BUFFER_KHR, the query begin must be the first recorded command in commandBuffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginQueryIndexedEXT-queryPool-03224)"},
+    {"VUID-vkCmdBeginQueryIndexedEXT-queryPool-03225", "If queryPool was created with a queryType of VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR and one of the counters used to create queryPool was VK_QUERY_SCOPE_RENDER_PASS_KHR, the begin command must not be recorded within a render pass instance (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginQueryIndexedEXT-queryPool-03225)"},
+    {"VUID-vkCmdBeginQueryIndexedEXT-queryPool-03226", "If queryPool was created with a queryType of VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR and another query pool with a queryType VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR has been used within commandBuffer, its parent primary command buffer or secondary command buffer recorded within the same parent primary command buffer as commandBuffer, the performanceCounterMultipleQueryPools feature must be enabled (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginQueryIndexedEXT-queryPool-03226)"},
+    {"VUID-vkCmdBeginQueryIndexedEXT-queryPool-parameter", "queryPool must be a valid VkQueryPool handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginQueryIndexedEXT-queryPool-parameter)"},
+    {"VUID-vkCmdBeginQueryIndexedEXT-queryType-00800", "If the precise occlusion queries feature is not enabled, or the queryType used to create queryPool was not VK_QUERY_TYPE_OCCLUSION, flags must not contain VK_QUERY_CONTROL_PRECISE_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginQueryIndexedEXT-queryType-00800)"},
+    {"VUID-vkCmdBeginQueryIndexedEXT-queryType-00803", "If the queryType used to create queryPool was VK_QUERY_TYPE_OCCLUSION, the VkCommandPool that commandBuffer was allocated from must support graphics operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginQueryIndexedEXT-queryType-00803)"},
+    {"VUID-vkCmdBeginQueryIndexedEXT-queryType-00804", "If the queryType used to create queryPool was VK_QUERY_TYPE_PIPELINE_STATISTICS and any of the pipelineStatistics indicate graphics operations, the VkCommandPool that commandBuffer was allocated from must support graphics operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginQueryIndexedEXT-queryType-00804)"},
+    {"VUID-vkCmdBeginQueryIndexedEXT-queryType-00805", "If the queryType used to create queryPool was VK_QUERY_TYPE_PIPELINE_STATISTICS and any of the pipelineStatistics indicate compute operations, the VkCommandPool that commandBuffer was allocated from must support compute operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginQueryIndexedEXT-queryType-00805)"},
+    {"VUID-vkCmdBeginQueryIndexedEXT-queryType-02338", "If the queryType used to create queryPool was VK_QUERY_TYPE_TRANSFORM_FEEDBACK_STREAM_EXT the VkCommandPool that commandBuffer was allocated from must support graphics operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginQueryIndexedEXT-queryType-02338)"},
+    {"VUID-vkCmdBeginQueryIndexedEXT-queryType-02339", "If the queryType used to create queryPool was VK_QUERY_TYPE_TRANSFORM_FEEDBACK_STREAM_EXT the index parameter must be less than VkPhysicalDeviceTransformFeedbackPropertiesEXT::maxTransformFeedbackStreams (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginQueryIndexedEXT-queryType-02339)"},
+    {"VUID-vkCmdBeginQueryIndexedEXT-queryType-02340", "If the queryType used to create queryPool was not VK_QUERY_TYPE_TRANSFORM_FEEDBACK_STREAM_EXT the index must be zero (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginQueryIndexedEXT-queryType-02340)"},
+    {"VUID-vkCmdBeginQueryIndexedEXT-queryType-02341", "If the queryType used to create queryPool was VK_QUERY_TYPE_TRANSFORM_FEEDBACK_STREAM_EXT then VkPhysicalDeviceTransformFeedbackPropertiesEXT::transformFeedbackQueries must be supported (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginQueryIndexedEXT-queryType-02341)"},
+    {"VUID-vkCmdBeginQueryIndexedEXT-queryType-02804", "The queryType used to create queryPool must not be VK_QUERY_TYPE_TIMESTAMP (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginQueryIndexedEXT-queryType-02804)"},
+    {"VUID-vkCmdBeginRenderPass-bufferlevel", "commandBuffer must be a primary VkCommandBuffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginRenderPass-bufferlevel)"},
+    {"VUID-vkCmdBeginRenderPass-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support graphics operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginRenderPass-commandBuffer-cmdpool)"},
+    {"VUID-vkCmdBeginRenderPass-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginRenderPass-commandBuffer-parameter)"},
+    {"VUID-vkCmdBeginRenderPass-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginRenderPass-commandBuffer-recording)"},
+    {"VUID-vkCmdBeginRenderPass-contents-parameter", "contents must be a valid VkSubpassContents value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginRenderPass-contents-parameter)"},
+    {"VUID-vkCmdBeginRenderPass-framebuffer-02532", "For any attachment in framebuffer that is used by renderPass and is bound to memory locations that are also bound to another attachment used by renderPass, and if at least one of those uses causes either attachment to be written to, both attachments must have had the VK_ATTACHMENT_DESCRIPTION_MAY_ALIAS_BIT set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginRenderPass-framebuffer-02532)"},
+    {"VUID-vkCmdBeginRenderPass-initialLayout-00895", "If any of the initialLayout or finalLayout member of the VkAttachmentDescription structures or the layout member of the VkAttachmentReference structures specified when creating the render pass specified in the renderPass member of pRenderPassBegin is VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL then the corresponding attachment image view of the framebuffer specified in the framebuffer member of pRenderPassBegin must have been created with a usage value including VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginRenderPass-initialLayout-00895)"},
+    {"VUID-vkCmdBeginRenderPass-initialLayout-00896", "If any of the initialLayout or finalLayout member of the VkAttachmentDescription structures or the layout member of the VkAttachmentReference structures specified when creating the render pass specified in the renderPass member of pRenderPassBegin is VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL, or VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL then the corresponding attachment image view of the framebuffer specified in the framebuffer member of pRenderPassBegin must have been created with a usage value including VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginRenderPass-initialLayout-00896)"},
+    {"VUID-vkCmdBeginRenderPass-initialLayout-00897", "If any of the initialLayout or finalLayout member of the VkAttachmentDescription structures or the layout member of the VkAttachmentReference structures specified when creating the render pass specified in the renderPass member of pRenderPassBegin is VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL then the corresponding attachment image view of the framebuffer specified in the framebuffer member of pRenderPassBegin must have been created with a usage value including VK_IMAGE_USAGE_SAMPLED_BIT or VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginRenderPass-initialLayout-00897)"},
+    {"VUID-vkCmdBeginRenderPass-initialLayout-00898", "If any of the initialLayout or finalLayout member of the VkAttachmentDescription structures or the layout member of the VkAttachmentReference structures specified when creating the render pass specified in the renderPass member of pRenderPassBegin is VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL then the corresponding attachment image view of the framebuffer specified in the framebuffer member of pRenderPassBegin must have been created with a usage value including VK_IMAGE_USAGE_TRANSFER_SRC_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginRenderPass-initialLayout-00898)"},
+    {"VUID-vkCmdBeginRenderPass-initialLayout-00899", "If any of the initialLayout or finalLayout member of the VkAttachmentDescription structures or the layout member of the VkAttachmentReference structures specified when creating the render pass specified in the renderPass member of pRenderPassBegin is VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL then the corresponding attachment image view of the framebuffer specified in the framebuffer member of pRenderPassBegin must have been created with a usage value including VK_IMAGE_USAGE_TRANSFER_DST_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginRenderPass-initialLayout-00899)"},
+    {"VUID-vkCmdBeginRenderPass-initialLayout-00900", "If any of the initialLayout members of the VkAttachmentDescription structures specified when creating the render pass specified in the renderPass member of pRenderPassBegin is not VK_IMAGE_LAYOUT_UNDEFINED, then each such initialLayout must be equal to the current layout of the corresponding attachment image subresource of the framebuffer specified in the framebuffer member of pRenderPassBegin (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginRenderPass-initialLayout-00900)"},
+    {"VUID-vkCmdBeginRenderPass-initialLayout-01758", "If any of the initialLayout or finalLayout member of the VkAttachmentDescription structures or the layout member of the VkAttachmentReference structures specified when creating the render pass specified in the renderPass member of pRenderPassBegin is VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL, VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL, VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL, or VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL then the corresponding attachment image view of the framebuffer specified in the framebuffer member of pRenderPassBegin must have been created with a usage value including VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginRenderPass-initialLayout-01758)"},
+    {"VUID-vkCmdBeginRenderPass-pRenderPassBegin-parameter", "pRenderPassBegin must be a valid pointer to a valid VkRenderPassBeginInfo structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginRenderPass-pRenderPassBegin-parameter)"},
+    {"VUID-vkCmdBeginRenderPass-renderpass", "This command must only be called outside of a render pass instance (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginRenderPass-renderpass)"},
+    {"VUID-vkCmdBeginRenderPass-srcStageMask-00901", "The srcStageMask and dstStageMask members of any element of the pDependencies member of VkRenderPassCreateInfo used to create renderPass must be supported by the capabilities of the queue family identified by the queueFamilyIndex member of the VkCommandPoolCreateInfo used to create the command pool which commandBuffer was allocated from (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginRenderPass-srcStageMask-00901)"},
+    {"VUID-vkCmdBeginRenderPass2KHR-bufferlevel", "commandBuffer must be a primary VkCommandBuffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginRenderPass2KHR-bufferlevel)"},
+    {"VUID-vkCmdBeginRenderPass2KHR-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support graphics operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginRenderPass2KHR-commandBuffer-cmdpool)"},
+    {"VUID-vkCmdBeginRenderPass2KHR-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginRenderPass2KHR-commandBuffer-parameter)"},
+    {"VUID-vkCmdBeginRenderPass2KHR-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginRenderPass2KHR-commandBuffer-recording)"},
+    {"VUID-vkCmdBeginRenderPass2KHR-framebuffer-02533", "For any attachment in framebuffer that is used by renderPass and is bound to memory locations that are also bound to another attachment used by renderPass, and if at least one of those uses causes either attachment to be written to, both attachments must have had the VK_ATTACHMENT_DESCRIPTION_MAY_ALIAS_BIT set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginRenderPass2KHR-framebuffer-02533)"},
+    {"VUID-vkCmdBeginRenderPass2KHR-framebuffer-02779", "Both the framebuffer and renderPass members of pRenderPassBegin must have been created on the same VkDevice that commandBuffer was allocated on (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginRenderPass2KHR-framebuffer-02779)"},
+    {"VUID-vkCmdBeginRenderPass2KHR-initialLayout-03094", "If any of the initialLayout or finalLayout member of the VkAttachmentDescription structures or the layout member of the VkAttachmentReference structures specified when creating the render pass specified in the renderPass member of pRenderPassBegin is VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL then the corresponding attachment image view of the framebuffer specified in the framebuffer member of pRenderPassBegin must have been created with a usage value including VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginRenderPass2KHR-initialLayout-03094)"},
+    {"VUID-vkCmdBeginRenderPass2KHR-initialLayout-03096", "If any of the initialLayout or finalLayout member of the VkAttachmentDescription structures or the layout member of the VkAttachmentReference structures specified when creating the render pass specified in the renderPass member of pRenderPassBegin is VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL, VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL, VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL, or VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL then the corresponding attachment image view of the framebuffer specified in the framebuffer member of pRenderPassBegin must have been created with a usage value including VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginRenderPass2KHR-initialLayout-03096)"},
+    {"VUID-vkCmdBeginRenderPass2KHR-initialLayout-03097", "If any of the initialLayout or finalLayout member of the VkAttachmentDescription structures or the layout member of the VkAttachmentReference structures specified when creating the render pass specified in the renderPass member of pRenderPassBegin is VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL then the corresponding attachment image view of the framebuffer specified in the framebuffer member of pRenderPassBegin must have been created with a usage value including VK_IMAGE_USAGE_SAMPLED_BIT or VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginRenderPass2KHR-initialLayout-03097)"},
+    {"VUID-vkCmdBeginRenderPass2KHR-initialLayout-03098", "If any of the initialLayout or finalLayout member of the VkAttachmentDescription structures or the layout member of the VkAttachmentReference structures specified when creating the render pass specified in the renderPass member of pRenderPassBegin is VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL then the corresponding attachment image view of the framebuffer specified in the framebuffer member of pRenderPassBegin must have been created with a usage value including VK_IMAGE_USAGE_TRANSFER_SRC_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginRenderPass2KHR-initialLayout-03098)"},
+    {"VUID-vkCmdBeginRenderPass2KHR-initialLayout-03099", "If any of the initialLayout or finalLayout member of the VkAttachmentDescription structures or the layout member of the VkAttachmentReference structures specified when creating the render pass specified in the renderPass member of pRenderPassBegin is VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL then the corresponding attachment image view of the framebuffer specified in the framebuffer member of pRenderPassBegin must have been created with a usage value including VK_IMAGE_USAGE_TRANSFER_DST_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginRenderPass2KHR-initialLayout-03099)"},
+    {"VUID-vkCmdBeginRenderPass2KHR-initialLayout-03100", "If any of the initialLayout members of the VkAttachmentDescription structures specified when creating the render pass specified in the renderPass member of pRenderPassBegin is not VK_IMAGE_LAYOUT_UNDEFINED, then each such initialLayout must be equal to the current layout of the corresponding attachment image subresource of the framebuffer specified in the framebuffer member of pRenderPassBegin (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginRenderPass2KHR-initialLayout-03100)"},
+    {"VUID-vkCmdBeginRenderPass2KHR-pRenderPassBegin-parameter", "pRenderPassBegin must be a valid pointer to a valid VkRenderPassBeginInfo structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginRenderPass2KHR-pRenderPassBegin-parameter)"},
+    {"VUID-vkCmdBeginRenderPass2KHR-pSubpassBeginInfo-parameter", "pSubpassBeginInfo must be a valid pointer to a valid VkSubpassBeginInfoKHR structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginRenderPass2KHR-pSubpassBeginInfo-parameter)"},
+    {"VUID-vkCmdBeginRenderPass2KHR-renderpass", "This command must only be called outside of a render pass instance (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginRenderPass2KHR-renderpass)"},
+    {"VUID-vkCmdBeginRenderPass2KHR-srcStageMask-03101", "The srcStageMask and dstStageMask members of any element of the pDependencies member of VkRenderPassCreateInfo used to create renderPass must be supported by the capabilities of the queue family identified by the queueFamilyIndex member of the VkCommandPoolCreateInfo used to create the command pool which commandBuffer was allocated from (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginRenderPass2KHR-srcStageMask-03101)"},
+    {"VUID-vkCmdBeginTransformFeedbackEXT-None-02367", "Transform feedback must not be active (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginTransformFeedbackEXT-None-02367)"},
+    {"VUID-vkCmdBeginTransformFeedbackEXT-None-02373", "Transform feedback must not be made active in a render pass instance with multiview enabled (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginTransformFeedbackEXT-None-02373)"},
+    {"VUID-vkCmdBeginTransformFeedbackEXT-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support graphics operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginTransformFeedbackEXT-commandBuffer-cmdpool)"},
+    {"VUID-vkCmdBeginTransformFeedbackEXT-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginTransformFeedbackEXT-commandBuffer-parameter)"},
+    {"VUID-vkCmdBeginTransformFeedbackEXT-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginTransformFeedbackEXT-commandBuffer-recording)"},
+    {"VUID-vkCmdBeginTransformFeedbackEXT-commonparent", "Both of commandBuffer, and the elements of pCounterBuffers that are valid handles of non-ignored parameters must have been created, allocated, or retrieved from the same VkDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginTransformFeedbackEXT-commonparent)"},
+    {"VUID-vkCmdBeginTransformFeedbackEXT-counterBufferCount-02607", "If counterBufferCount is not 0, and pCounterBuffers is not NULL, pCounterBuffers must be a valid pointer to an array of counterBufferCount VkBuffer handles that are either valid or VK_NULL_HANDLE (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginTransformFeedbackEXT-counterBufferCount-02607)"},
+    {"VUID-vkCmdBeginTransformFeedbackEXT-firstCounterBuffer-02368", "firstCounterBuffer must be less than VkPhysicalDeviceTransformFeedbackPropertiesEXT::maxTransformFeedbackBuffers (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginTransformFeedbackEXT-firstCounterBuffer-02368)"},
+    {"VUID-vkCmdBeginTransformFeedbackEXT-firstCounterBuffer-02369", "The sum of firstCounterBuffer and counterBufferCount must be less than or equal to VkPhysicalDeviceTransformFeedbackPropertiesEXT::maxTransformFeedbackBuffers (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginTransformFeedbackEXT-firstCounterBuffer-02369)"},
+    {"VUID-vkCmdBeginTransformFeedbackEXT-pCounterBuffer-02371", "If pCounterBuffer is NULL, then pCounterBufferOffsets must also be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginTransformFeedbackEXT-pCounterBuffer-02371)"},
+    {"VUID-vkCmdBeginTransformFeedbackEXT-pCounterBufferOffsets-02370", "For each buffer handle in the array, if it is not VK_NULL_HANDLE it must reference a buffer large enough to hold 4 bytes at the corresponding offset from the pCounterBufferOffsets array (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginTransformFeedbackEXT-pCounterBufferOffsets-02370)"},
+    {"VUID-vkCmdBeginTransformFeedbackEXT-pCounterBufferOffsets-parameter", "If counterBufferCount is not 0, and pCounterBufferOffsets is not NULL, pCounterBufferOffsets must be a valid pointer to an array of counterBufferCount VkDeviceSize values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginTransformFeedbackEXT-pCounterBufferOffsets-parameter)"},
+    {"VUID-vkCmdBeginTransformFeedbackEXT-pCounterBuffers-02372", "For each buffer handle in the pCounterBuffers array that is not VK_NULL_HANDLE it must have been created with a usage value containing VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_COUNTER_BUFFER_BIT_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginTransformFeedbackEXT-pCounterBuffers-02372)"},
+    {"VUID-vkCmdBeginTransformFeedbackEXT-renderpass", "This command must only be called inside of a render pass instance (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginTransformFeedbackEXT-renderpass)"},
+    {"VUID-vkCmdBeginTransformFeedbackEXT-transformFeedback-02366", "VkPhysicalDeviceTransformFeedbackFeaturesEXT::transformFeedback must be enabled (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBeginTransformFeedbackEXT-transformFeedback-02366)"},
+    {"VUID-vkCmdBindDescriptorSets-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support graphics, or compute operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBindDescriptorSets-commandBuffer-cmdpool)"},
+    {"VUID-vkCmdBindDescriptorSets-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBindDescriptorSets-commandBuffer-parameter)"},
+    {"VUID-vkCmdBindDescriptorSets-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBindDescriptorSets-commandBuffer-recording)"},
+    {"VUID-vkCmdBindDescriptorSets-commonparent", "Each of commandBuffer, layout, and the elements of pDescriptorSets must have been created, allocated, or retrieved from the same VkDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBindDescriptorSets-commonparent)"},
+    {"VUID-vkCmdBindDescriptorSets-descriptorSetCount-arraylength", "descriptorSetCount must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBindDescriptorSets-descriptorSetCount-arraylength)"},
+    {"VUID-vkCmdBindDescriptorSets-dynamicOffsetCount-00359", "dynamicOffsetCount must be equal to the total number of dynamic descriptors in pDescriptorSets (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBindDescriptorSets-dynamicOffsetCount-00359)"},
+    {"VUID-vkCmdBindDescriptorSets-firstSet-00360", "The sum of firstSet and descriptorSetCount must be less than or equal to VkPipelineLayoutCreateInfo::setLayoutCount provided when layout was created (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBindDescriptorSets-firstSet-00360)"},
+    {"VUID-vkCmdBindDescriptorSets-layout-parameter", "layout must be a valid VkPipelineLayout handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBindDescriptorSets-layout-parameter)"},
+    {"VUID-vkCmdBindDescriptorSets-pDescriptorSets-00358", "Each element of pDescriptorSets must have been allocated with a VkDescriptorSetLayout that matches (is the same as, or identically defined as) the VkDescriptorSetLayout at set n in layout, where n is the sum of firstSet and the index into pDescriptorSets (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBindDescriptorSets-pDescriptorSets-00358)"},
+    {"VUID-vkCmdBindDescriptorSets-pDescriptorSets-01979", "For each dynamic uniform or storage buffer binding in pDescriptorSets, the sum of the effective offset, as defined above, and the range of the binding must be less than or equal to the size of the buffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBindDescriptorSets-pDescriptorSets-01979)"},
+    {"VUID-vkCmdBindDescriptorSets-pDescriptorSets-parameter", "pDescriptorSets must be a valid pointer to an array of descriptorSetCount valid VkDescriptorSet handles (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBindDescriptorSets-pDescriptorSets-parameter)"},
+    {"VUID-vkCmdBindDescriptorSets-pDynamicOffsets-01971", "Each element of pDynamicOffsets which corresponds to a descriptor binding with type VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC must be a multiple of VkPhysicalDeviceLimits::minUniformBufferOffsetAlignment (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBindDescriptorSets-pDynamicOffsets-01971)"},
+    {"VUID-vkCmdBindDescriptorSets-pDynamicOffsets-01972", "Each element of pDynamicOffsets which corresponds to a descriptor binding with type VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC must be a multiple of VkPhysicalDeviceLimits::minStorageBufferOffsetAlignment (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBindDescriptorSets-pDynamicOffsets-01972)"},
+    {"VUID-vkCmdBindDescriptorSets-pDynamicOffsets-parameter", "If dynamicOffsetCount is not 0, pDynamicOffsets must be a valid pointer to an array of dynamicOffsetCount uint32_t values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBindDescriptorSets-pDynamicOffsets-parameter)"},
+    {"VUID-vkCmdBindDescriptorSets-pipelineBindPoint-00361", "pipelineBindPoint must be supported by the commandBuffer's parent VkCommandPool's queue family (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBindDescriptorSets-pipelineBindPoint-00361)"},
+    {"VUID-vkCmdBindDescriptorSets-pipelineBindPoint-parameter", "pipelineBindPoint must be a valid VkPipelineBindPoint value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBindDescriptorSets-pipelineBindPoint-parameter)"},
+    {"VUID-vkCmdBindIndexBuffer-buffer-00433", "buffer must have been created with the VK_BUFFER_USAGE_INDEX_BUFFER_BIT flag (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBindIndexBuffer-buffer-00433)"},
+    {"VUID-vkCmdBindIndexBuffer-buffer-00434", "If buffer is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBindIndexBuffer-buffer-00434)"},
+    {"VUID-vkCmdBindIndexBuffer-buffer-parameter", "buffer must be a valid VkBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBindIndexBuffer-buffer-parameter)"},
+    {"VUID-vkCmdBindIndexBuffer-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support graphics operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBindIndexBuffer-commandBuffer-cmdpool)"},
+    {"VUID-vkCmdBindIndexBuffer-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBindIndexBuffer-commandBuffer-parameter)"},
+    {"VUID-vkCmdBindIndexBuffer-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBindIndexBuffer-commandBuffer-recording)"},
+    {"VUID-vkCmdBindIndexBuffer-commonparent", "Both of buffer, and commandBuffer must have been created, allocated, or retrieved from the same VkDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBindIndexBuffer-commonparent)"},
+    {"VUID-vkCmdBindIndexBuffer-indexType-02507", "indexType must not be VK_INDEX_TYPE_NONE_NV. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBindIndexBuffer-indexType-02507)"},
+    {"VUID-vkCmdBindIndexBuffer-indexType-02765", "If indexType is VK_INDEX_TYPE_UINT8_EXT, the indexTypeUint8 feature must be enabled (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBindIndexBuffer-indexType-02765)"},
+    {"VUID-vkCmdBindIndexBuffer-indexType-parameter", "indexType must be a valid VkIndexType value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBindIndexBuffer-indexType-parameter)"},
+    {"VUID-vkCmdBindIndexBuffer-offset-00431", "offset must be less than the size of buffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBindIndexBuffer-offset-00431)"},
+    {"VUID-vkCmdBindIndexBuffer-offset-00432", "The sum of offset and the address of the range of VkDeviceMemory object that is backing buffer, must be a multiple of the type indicated by indexType (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBindIndexBuffer-offset-00432)"},
+    {"VUID-vkCmdBindPipeline-None-02323", "This command must not be recorded when transform feedback is active (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBindPipeline-None-02323)"},
+    {"VUID-vkCmdBindPipeline-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support graphics, or compute operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBindPipeline-commandBuffer-cmdpool)"},
+    {"VUID-vkCmdBindPipeline-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBindPipeline-commandBuffer-parameter)"},
+    {"VUID-vkCmdBindPipeline-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBindPipeline-commandBuffer-recording)"},
+    {"VUID-vkCmdBindPipeline-commonparent", "Both of commandBuffer, and pipeline must have been created, allocated, or retrieved from the same VkDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBindPipeline-commonparent)"},
+    {"VUID-vkCmdBindPipeline-pipeline-00781", "If the variable multisample rate feature is not supported, pipeline is a graphics pipeline, the current subpass has no attachments, and this is not the first call to this function with a graphics pipeline after transitioning to the current subpass, then the sample count specified by this pipeline must match that set in the previous pipeline (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBindPipeline-pipeline-00781)"},
+    {"VUID-vkCmdBindPipeline-pipeline-parameter", "pipeline must be a valid VkPipeline handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBindPipeline-pipeline-parameter)"},
+    {"VUID-vkCmdBindPipeline-pipelineBindPoint-00777", "If pipelineBindPoint is VK_PIPELINE_BIND_POINT_COMPUTE, the VkCommandPool that commandBuffer was allocated from must support compute operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBindPipeline-pipelineBindPoint-00777)"},
+    {"VUID-vkCmdBindPipeline-pipelineBindPoint-00778", "If pipelineBindPoint is VK_PIPELINE_BIND_POINT_GRAPHICS, the VkCommandPool that commandBuffer was allocated from must support graphics operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBindPipeline-pipelineBindPoint-00778)"},
+    {"VUID-vkCmdBindPipeline-pipelineBindPoint-00779", "If pipelineBindPoint is VK_PIPELINE_BIND_POINT_COMPUTE, pipeline must be a compute pipeline (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBindPipeline-pipelineBindPoint-00779)"},
+    {"VUID-vkCmdBindPipeline-pipelineBindPoint-00780", "If pipelineBindPoint is VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline must be a graphics pipeline (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBindPipeline-pipelineBindPoint-00780)"},
+    {"VUID-vkCmdBindPipeline-pipelineBindPoint-02391", "If pipelineBindPoint is VK_PIPELINE_BIND_POINT_RAY_TRACING_NV, the VkCommandPool that commandBuffer was allocated from must support compute operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBindPipeline-pipelineBindPoint-02391)"},
+    {"VUID-vkCmdBindPipeline-pipelineBindPoint-02392", "If pipelineBindPoint is VK_PIPELINE_BIND_POINT_RAY_TRACING_NV, the pipeline must be a ray tracing pipeline (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBindPipeline-pipelineBindPoint-02392)"},
+    {"VUID-vkCmdBindPipeline-pipelineBindPoint-parameter", "pipelineBindPoint must be a valid VkPipelineBindPoint value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBindPipeline-pipelineBindPoint-parameter)"},
+    {"VUID-vkCmdBindPipeline-variableSampleLocations-01525", "If VkPhysicalDeviceSampleLocationsPropertiesEXT::variableSampleLocations is VK_FALSE, and pipeline is a graphics pipeline created with a VkPipelineSampleLocationsStateCreateInfoEXT structure having its sampleLocationsEnable member set to VK_TRUE but without VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_EXT enabled then the current render pass instance must have been begun by specifying a VkRenderPassSampleLocationsBeginInfoEXT structure whose pPostSubpassSampleLocations member contains an element with a subpassIndex matching the current subpass index and the sampleLocationsInfo member of that element must match the sampleLocationsInfo specified in VkPipelineSampleLocationsStateCreateInfoEXT when the pipeline was created (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBindPipeline-variableSampleLocations-01525)"},
+    {"VUID-vkCmdBindShadingRateImageNV-None-02058", "The shading rate image feature must be enabled. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBindShadingRateImageNV-None-02058)"},
+    {"VUID-vkCmdBindShadingRateImageNV-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support graphics operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBindShadingRateImageNV-commandBuffer-cmdpool)"},
+    {"VUID-vkCmdBindShadingRateImageNV-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBindShadingRateImageNV-commandBuffer-parameter)"},
+    {"VUID-vkCmdBindShadingRateImageNV-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBindShadingRateImageNV-commandBuffer-recording)"},
+    {"VUID-vkCmdBindShadingRateImageNV-commonparent", "Both of commandBuffer, and imageView that are valid handles of non-ignored parameters must have been created, allocated, or retrieved from the same VkDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBindShadingRateImageNV-commonparent)"},
+    {"VUID-vkCmdBindShadingRateImageNV-imageLayout-02063", "If imageView is not VK_NULL_HANDLE, imageLayout must be VK_IMAGE_LAYOUT_SHADING_RATE_OPTIMAL_NV or VK_IMAGE_LAYOUT_GENERAL. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBindShadingRateImageNV-imageLayout-02063)"},
+    {"VUID-vkCmdBindShadingRateImageNV-imageLayout-parameter", "imageLayout must be a valid VkImageLayout value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBindShadingRateImageNV-imageLayout-parameter)"},
+    {"VUID-vkCmdBindShadingRateImageNV-imageView-02059", "If imageView is not VK_NULL_HANDLE, it must be a valid VkImageView handle of type VK_IMAGE_VIEW_TYPE_2D or VK_IMAGE_VIEW_TYPE_2D_ARRAY. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBindShadingRateImageNV-imageView-02059)"},
+    {"VUID-vkCmdBindShadingRateImageNV-imageView-02060", "If imageView is not VK_NULL_HANDLE, it must have a format of VK_FORMAT_R8_UINT. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBindShadingRateImageNV-imageView-02060)"},
+    {"VUID-vkCmdBindShadingRateImageNV-imageView-02061", "If imageView is not VK_NULL_HANDLE, it must have been created with a usage value including VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBindShadingRateImageNV-imageView-02061)"},
+    {"VUID-vkCmdBindShadingRateImageNV-imageView-02062", "If imageView is not VK_NULL_HANDLE, imageLayout must match the actual VkImageLayout of each subresource accessible from imageView at the time the subresource is accessed. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBindShadingRateImageNV-imageView-02062)"},
+    {"VUID-vkCmdBindShadingRateImageNV-imageView-parameter", "If imageView is not VK_NULL_HANDLE, imageView must be a valid VkImageView handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBindShadingRateImageNV-imageView-parameter)"},
+    {"VUID-vkCmdBindTransformFeedbackBuffersEXT-None-02365", "Transform feedback must not be active when the vkCmdBindTransformFeedbackBuffersEXT command is recorded (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBindTransformFeedbackBuffersEXT-None-02365)"},
+    {"VUID-vkCmdBindTransformFeedbackBuffersEXT-bindingCount-arraylength", "If pSizes is not NULL, bindingCount must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBindTransformFeedbackBuffersEXT-bindingCount-arraylength)"},
+    {"VUID-vkCmdBindTransformFeedbackBuffersEXT-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support graphics operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBindTransformFeedbackBuffersEXT-commandBuffer-cmdpool)"},
+    {"VUID-vkCmdBindTransformFeedbackBuffersEXT-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBindTransformFeedbackBuffersEXT-commandBuffer-parameter)"},
+    {"VUID-vkCmdBindTransformFeedbackBuffersEXT-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBindTransformFeedbackBuffersEXT-commandBuffer-recording)"},
+    {"VUID-vkCmdBindTransformFeedbackBuffersEXT-commonparent", "Both of commandBuffer, and the elements of pBuffers must have been created, allocated, or retrieved from the same VkDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBindTransformFeedbackBuffersEXT-commonparent)"},
+    {"VUID-vkCmdBindTransformFeedbackBuffersEXT-firstBinding-02356", "firstBinding must be less than VkPhysicalDeviceTransformFeedbackPropertiesEXT::maxTransformFeedbackBuffers (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBindTransformFeedbackBuffersEXT-firstBinding-02356)"},
+    {"VUID-vkCmdBindTransformFeedbackBuffersEXT-firstBinding-02357", "The sum of firstBinding and bindingCount must be less than or equal to VkPhysicalDeviceTransformFeedbackPropertiesEXT::maxTransformFeedbackBuffers (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBindTransformFeedbackBuffersEXT-firstBinding-02357)"},
+    {"VUID-vkCmdBindTransformFeedbackBuffersEXT-pBuffers-02360", "All elements of pBuffers must have been created with the VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_BUFFER_BIT_EXT flag (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBindTransformFeedbackBuffersEXT-pBuffers-02360)"},
+    {"VUID-vkCmdBindTransformFeedbackBuffersEXT-pBuffers-02364", "Each element of pBuffers that is non-sparse must be bound completely and contiguously to a single VkDeviceMemory object (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBindTransformFeedbackBuffersEXT-pBuffers-02364)"},
+    {"VUID-vkCmdBindTransformFeedbackBuffersEXT-pBuffers-parameter", "pBuffers must be a valid pointer to an array of bindingCount valid VkBuffer handles (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBindTransformFeedbackBuffersEXT-pBuffers-parameter)"},
+    {"VUID-vkCmdBindTransformFeedbackBuffersEXT-pOffsets-02358", "All elements of pOffsets must be less than the size of the corresponding element in pBuffers (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBindTransformFeedbackBuffersEXT-pOffsets-02358)"},
+    {"VUID-vkCmdBindTransformFeedbackBuffersEXT-pOffsets-02359", "All elements of pOffsets must be a multiple of 4 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBindTransformFeedbackBuffersEXT-pOffsets-02359)"},
+    {"VUID-vkCmdBindTransformFeedbackBuffersEXT-pOffsets-02363", "All elements of pOffsets plus pSizes, where the pSizes, element is not VK_WHOLE_SIZE, must be less than or equal to the size of the corresponding element in pBuffers (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBindTransformFeedbackBuffersEXT-pOffsets-02363)"},
+    {"VUID-vkCmdBindTransformFeedbackBuffersEXT-pOffsets-parameter", "pOffsets must be a valid pointer to an array of bindingCount VkDeviceSize values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBindTransformFeedbackBuffersEXT-pOffsets-parameter)"},
+    {"VUID-vkCmdBindTransformFeedbackBuffersEXT-pSize-02361", "If the optional pSize array is specified, each element of pSizes must either be VK_WHOLE_SIZE, or be less than or equal to VkPhysicalDeviceTransformFeedbackPropertiesEXT::maxTransformFeedbackBufferSize (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBindTransformFeedbackBuffersEXT-pSize-02361)"},
+    {"VUID-vkCmdBindTransformFeedbackBuffersEXT-pSizes-02362", "All elements of pSizes must be less than or equal to the size of the corresponding buffer in pBuffers (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBindTransformFeedbackBuffersEXT-pSizes-02362)"},
+    {"VUID-vkCmdBindTransformFeedbackBuffersEXT-pSizes-parameter", "If pSizes is not NULL, pSizes must be a valid pointer to an array of bindingCount VkDeviceSize values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBindTransformFeedbackBuffersEXT-pSizes-parameter)"},
+    {"VUID-vkCmdBindTransformFeedbackBuffersEXT-transformFeedback-02355", "VkPhysicalDeviceTransformFeedbackFeaturesEXT::transformFeedback must be enabled (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBindTransformFeedbackBuffersEXT-transformFeedback-02355)"},
+    {"VUID-vkCmdBindVertexBuffers-bindingCount-arraylength", "bindingCount must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBindVertexBuffers-bindingCount-arraylength)"},
+    {"VUID-vkCmdBindVertexBuffers-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support graphics operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBindVertexBuffers-commandBuffer-cmdpool)"},
+    {"VUID-vkCmdBindVertexBuffers-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBindVertexBuffers-commandBuffer-parameter)"},
+    {"VUID-vkCmdBindVertexBuffers-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBindVertexBuffers-commandBuffer-recording)"},
+    {"VUID-vkCmdBindVertexBuffers-commonparent", "Both of commandBuffer, and the elements of pBuffers must have been created, allocated, or retrieved from the same VkDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBindVertexBuffers-commonparent)"},
+    {"VUID-vkCmdBindVertexBuffers-firstBinding-00624", "firstBinding must be less than VkPhysicalDeviceLimits::maxVertexInputBindings (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBindVertexBuffers-firstBinding-00624)"},
+    {"VUID-vkCmdBindVertexBuffers-firstBinding-00625", "The sum of firstBinding and bindingCount must be less than or equal to VkPhysicalDeviceLimits::maxVertexInputBindings (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBindVertexBuffers-firstBinding-00625)"},
+    {"VUID-vkCmdBindVertexBuffers-pBuffers-00627", "All elements of pBuffers must have been created with the VK_BUFFER_USAGE_VERTEX_BUFFER_BIT flag (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBindVertexBuffers-pBuffers-00627)"},
+    {"VUID-vkCmdBindVertexBuffers-pBuffers-00628", "Each element of pBuffers that is non-sparse must be bound completely and contiguously to a single VkDeviceMemory object (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBindVertexBuffers-pBuffers-00628)"},
+    {"VUID-vkCmdBindVertexBuffers-pBuffers-parameter", "pBuffers must be a valid pointer to an array of bindingCount valid VkBuffer handles (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBindVertexBuffers-pBuffers-parameter)"},
+    {"VUID-vkCmdBindVertexBuffers-pOffsets-00626", "All elements of pOffsets must be less than the size of the corresponding element in pBuffers (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBindVertexBuffers-pOffsets-00626)"},
+    {"VUID-vkCmdBindVertexBuffers-pOffsets-parameter", "pOffsets must be a valid pointer to an array of bindingCount VkDeviceSize values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBindVertexBuffers-pOffsets-parameter)"},
+    {"VUID-vkCmdBlitImage-commandBuffer-01834", "If commandBuffer is an unprotected command buffer, then srcImage must not be a protected image (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBlitImage-commandBuffer-01834)"},
+    {"VUID-vkCmdBlitImage-commandBuffer-01835", "If commandBuffer is an unprotected command buffer, then dstImage must not be a protected image (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBlitImage-commandBuffer-01835)"},
+    {"VUID-vkCmdBlitImage-commandBuffer-01836", "If commandBuffer is a protected command buffer, then dstImage must not be an unprotected image (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBlitImage-commandBuffer-01836)"},
+    {"VUID-vkCmdBlitImage-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support graphics operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBlitImage-commandBuffer-cmdpool)"},
+    {"VUID-vkCmdBlitImage-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBlitImage-commandBuffer-parameter)"},
+    {"VUID-vkCmdBlitImage-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBlitImage-commandBuffer-recording)"},
+    {"VUID-vkCmdBlitImage-commonparent", "Each of commandBuffer, dstImage, and srcImage must have been created, allocated, or retrieved from the same VkDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBlitImage-commonparent)"},
+    {"VUID-vkCmdBlitImage-dstImage-00224", "dstImage must have been created with VK_IMAGE_USAGE_TRANSFER_DST_BIT usage flag (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBlitImage-dstImage-00224)"},
+    {"VUID-vkCmdBlitImage-dstImage-00225", "If dstImage is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBlitImage-dstImage-00225)"},
+    {"VUID-vkCmdBlitImage-dstImage-00234", "dstImage must have been created with a samples value of VK_SAMPLE_COUNT_1_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBlitImage-dstImage-00234)"},
+    {"VUID-vkCmdBlitImage-dstImage-01562", "dstImage must not use a format listed in Formats requiring sampler Y'CBCR conversion for VK_IMAGE_ASPECT_COLOR_BIT image views (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBlitImage-dstImage-01562)"},
+    {"VUID-vkCmdBlitImage-dstImage-02000", "The format features of dstImage must contain VK_FORMAT_FEATURE_BLIT_DST_BIT. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBlitImage-dstImage-02000)"},
+    {"VUID-vkCmdBlitImage-dstImage-02545", "dstImage and srcImage must not have been created with flags containing VK_IMAGE_CREATE_SUBSAMPLED_BIT_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBlitImage-dstImage-02545)"},
+    {"VUID-vkCmdBlitImage-dstImage-parameter", "dstImage must be a valid VkImage handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBlitImage-dstImage-parameter)"},
+    {"VUID-vkCmdBlitImage-dstImageLayout-00226", "dstImageLayout must specify the layout of the image subresources of dstImage specified in pRegions at the time this command is executed on a VkDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBlitImage-dstImageLayout-00226)"},
+    {"VUID-vkCmdBlitImage-dstImageLayout-00227", "dstImageLayout must be VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL or VK_IMAGE_LAYOUT_GENERAL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBlitImage-dstImageLayout-00227)"},
+    {"VUID-vkCmdBlitImage-dstImageLayout-01399", "dstImageLayout must be VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL or VK_IMAGE_LAYOUT_GENERAL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBlitImage-dstImageLayout-01399)"},
+    {"VUID-vkCmdBlitImage-dstImageLayout-parameter", "dstImageLayout must be a valid VkImageLayout value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBlitImage-dstImageLayout-parameter)"},
+    {"VUID-vkCmdBlitImage-dstSubresource-01706", "The dstSubresource.mipLevel member of each element of pRegions must be less than the mipLevels specified in VkImageCreateInfo when dstImage was created (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBlitImage-dstSubresource-01706)"},
+    {"VUID-vkCmdBlitImage-dstSubresource-01708", "The dstSubresource.baseArrayLayer + dstSubresource.layerCount of each element of pRegions must be less than or equal to the arrayLayers specified in VkImageCreateInfo when dstImage was created (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBlitImage-dstSubresource-01708)"},
+    {"VUID-vkCmdBlitImage-filter-00237", "If filter is VK_FILTER_CUBIC_EXT, srcImage must have a VkImageType of VK_IMAGE_TYPE_2D (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBlitImage-filter-00237)"},
+    {"VUID-vkCmdBlitImage-filter-02001", "If filter is VK_FILTER_LINEAR, then the format features of srcImage must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBlitImage-filter-02001)"},
+    {"VUID-vkCmdBlitImage-filter-02002", "If filter is VK_FILTER_CUBIC_EXT, then the format features of srcImage must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBlitImage-filter-02002)"},
+    {"VUID-vkCmdBlitImage-filter-parameter", "filter must be a valid VkFilter value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBlitImage-filter-parameter)"},
+    {"VUID-vkCmdBlitImage-pRegions-00215", "The source region specified by each element of pRegions must be a region that is contained within srcImage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBlitImage-pRegions-00215)"},
+    {"VUID-vkCmdBlitImage-pRegions-00216", "The destination region specified by each element of pRegions must be a region that is contained within dstImage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBlitImage-pRegions-00216)"},
+    {"VUID-vkCmdBlitImage-pRegions-00217", "The union of all destination regions, specified by the elements of pRegions, must not overlap in memory with any texel that may be sampled during the blit operation (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBlitImage-pRegions-00217)"},
+    {"VUID-vkCmdBlitImage-pRegions-parameter", "pRegions must be a valid pointer to an array of regionCount valid VkImageBlit structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBlitImage-pRegions-parameter)"},
+    {"VUID-vkCmdBlitImage-regionCount-arraylength", "regionCount must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBlitImage-regionCount-arraylength)"},
+    {"VUID-vkCmdBlitImage-renderpass", "This command must only be called outside of a render pass instance (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBlitImage-renderpass)"},
+    {"VUID-vkCmdBlitImage-srcImage-00219", "srcImage must have been created with VK_IMAGE_USAGE_TRANSFER_SRC_BIT usage flag (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBlitImage-srcImage-00219)"},
+    {"VUID-vkCmdBlitImage-srcImage-00220", "If srcImage is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBlitImage-srcImage-00220)"},
+    {"VUID-vkCmdBlitImage-srcImage-00228", "The sample count of srcImage and dstImage must both be equal to VK_SAMPLE_COUNT_1_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBlitImage-srcImage-00228)"},
+    {"VUID-vkCmdBlitImage-srcImage-00229", "If either of srcImage or dstImage was created with a signed integer VkFormat, the other must also have been created with a signed integer VkFormat (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBlitImage-srcImage-00229)"},
+    {"VUID-vkCmdBlitImage-srcImage-00230", "If either of srcImage or dstImage was created with an unsigned integer VkFormat, the other must also have been created with an unsigned integer VkFormat (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBlitImage-srcImage-00230)"},
+    {"VUID-vkCmdBlitImage-srcImage-00231", "If either of srcImage or dstImage was created with a depth/stencil format, the other must have exactly the same format (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBlitImage-srcImage-00231)"},
+    {"VUID-vkCmdBlitImage-srcImage-00232", "If srcImage was created with a depth/stencil format, filter must be VK_FILTER_NEAREST (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBlitImage-srcImage-00232)"},
+    {"VUID-vkCmdBlitImage-srcImage-00233", "srcImage must have been created with a samples value of VK_SAMPLE_COUNT_1_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBlitImage-srcImage-00233)"},
+    {"VUID-vkCmdBlitImage-srcImage-01561", "srcImage must not use a format listed in Formats requiring sampler Y'CBCR conversion for VK_IMAGE_ASPECT_COLOR_BIT image views (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBlitImage-srcImage-01561)"},
+    {"VUID-vkCmdBlitImage-srcImage-01999", "The format features of srcImage must contain VK_FORMAT_FEATURE_BLIT_SRC_BIT. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBlitImage-srcImage-01999)"},
+    {"VUID-vkCmdBlitImage-srcImage-parameter", "srcImage must be a valid VkImage handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBlitImage-srcImage-parameter)"},
+    {"VUID-vkCmdBlitImage-srcImageLayout-00221", "srcImageLayout must specify the layout of the image subresources of srcImage specified in pRegions at the time this command is executed on a VkDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBlitImage-srcImageLayout-00221)"},
+    {"VUID-vkCmdBlitImage-srcImageLayout-00222", "srcImageLayout must be VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL or VK_IMAGE_LAYOUT_GENERAL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBlitImage-srcImageLayout-00222)"},
+    {"VUID-vkCmdBlitImage-srcImageLayout-01398", "srcImageLayout must be VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL or VK_IMAGE_LAYOUT_GENERAL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBlitImage-srcImageLayout-01398)"},
+    {"VUID-vkCmdBlitImage-srcImageLayout-parameter", "srcImageLayout must be a valid VkImageLayout value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBlitImage-srcImageLayout-parameter)"},
+    {"VUID-vkCmdBlitImage-srcSubresource-01705", "The srcSubresource.mipLevel member of each element of pRegions must be less than the mipLevels specified in VkImageCreateInfo when srcImage was created (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBlitImage-srcSubresource-01705)"},
+    {"VUID-vkCmdBlitImage-srcSubresource-01707", "The srcSubresource.baseArrayLayer + srcSubresource.layerCount of each element of pRegions must be less than or equal to the arrayLayers specified in VkImageCreateInfo when srcImage was created (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBlitImage-srcSubresource-01707)"},
+    {"VUID-vkCmdBuildAccelerationStructureNV-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support compute operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBuildAccelerationStructureNV-commandBuffer-cmdpool)"},
+    {"VUID-vkCmdBuildAccelerationStructureNV-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBuildAccelerationStructureNV-commandBuffer-parameter)"},
+    {"VUID-vkCmdBuildAccelerationStructureNV-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBuildAccelerationStructureNV-commandBuffer-recording)"},
+    {"VUID-vkCmdBuildAccelerationStructureNV-commonparent", "Each of commandBuffer, dst, instanceData, scratch, and src that are valid handles of non-ignored parameters must have been created, allocated, or retrieved from the same VkDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBuildAccelerationStructureNV-commonparent)"},
+    {"VUID-vkCmdBuildAccelerationStructureNV-dst-02488", "dst must have been created with compatible VkAccelerationStructureInfoNV where VkAccelerationStructureInfoNV::type and VkAccelerationStructureInfoNV::flags are identical, VkAccelerationStructureInfoNV::instanceCount and VkAccelerationStructureInfoNV::geometryCount for dst are greater than or equal to the build size and each geometry in VkAccelerationStructureInfoNV::pGeometries for dst has greater than or equal to the number of vertices, indices, and AABBs. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBuildAccelerationStructureNV-dst-02488)"},
+    {"VUID-vkCmdBuildAccelerationStructureNV-dst-parameter", "dst must be a valid VkAccelerationStructureNV handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBuildAccelerationStructureNV-dst-parameter)"},
+    {"VUID-vkCmdBuildAccelerationStructureNV-geometryCount-02241", "geometryCount must be less than or equal to VkPhysicalDeviceRayTracingPropertiesNV::maxGeometryCount (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBuildAccelerationStructureNV-geometryCount-02241)"},
+    {"VUID-vkCmdBuildAccelerationStructureNV-instanceData-parameter", "If instanceData is not VK_NULL_HANDLE, instanceData must be a valid VkBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBuildAccelerationStructureNV-instanceData-parameter)"},
+    {"VUID-vkCmdBuildAccelerationStructureNV-pInfo-parameter", "pInfo must be a valid pointer to a valid VkAccelerationStructureInfoNV structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBuildAccelerationStructureNV-pInfo-parameter)"},
+    {"VUID-vkCmdBuildAccelerationStructureNV-renderpass", "This command must only be called outside of a render pass instance (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBuildAccelerationStructureNV-renderpass)"},
+    {"VUID-vkCmdBuildAccelerationStructureNV-scratch-parameter", "scratch must be a valid VkBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBuildAccelerationStructureNV-scratch-parameter)"},
+    {"VUID-vkCmdBuildAccelerationStructureNV-src-parameter", "If src is not VK_NULL_HANDLE, src must be a valid VkAccelerationStructureNV handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBuildAccelerationStructureNV-src-parameter)"},
+    {"VUID-vkCmdBuildAccelerationStructureNV-update-02489", "If update is VK_TRUE, src must not be VK_NULL_HANDLE (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBuildAccelerationStructureNV-update-02489)"},
+    {"VUID-vkCmdBuildAccelerationStructureNV-update-02490", "If update is VK_TRUE, src must have been built before with VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_NV set in VkAccelerationStructureInfoNV::flags (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBuildAccelerationStructureNV-update-02490)"},
+    {"VUID-vkCmdBuildAccelerationStructureNV-update-02491", "If update is VK_FALSE, The size member of the VkMemoryRequirements structure returned from a call to vkGetAccelerationStructureMemoryRequirementsNV with VkAccelerationStructureMemoryRequirementsInfoNV::accelerationStructure set to dst and VkAccelerationStructureMemoryRequirementsInfoNV::type set to VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV must be less than or equal to the size of scratch minus scratchOffset (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBuildAccelerationStructureNV-update-02491)"},
+    {"VUID-vkCmdBuildAccelerationStructureNV-update-02492", "If update is VK_TRUE, The size member of the VkMemoryRequirements structure returned from a call to vkGetAccelerationStructureMemoryRequirementsNV with VkAccelerationStructureMemoryRequirementsInfoNV::accelerationStructure set to dst and VkAccelerationStructureMemoryRequirementsInfoNV::type set to VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV must be less than or equal to the size of scratch minus scratchOffset (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdBuildAccelerationStructureNV-update-02492)"},
+    {"VUID-vkCmdClearAttachments-aspectMask-02501", "If the aspectMask member of any element of pAttachments contains VK_IMAGE_ASPECT_COLOR_BIT, then the colorAttachment member of that element must either refer to a color attachment which is VK_ATTACHMENT_UNUSED, or must be a valid color attachment. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdClearAttachments-aspectMask-02501)"},
+    {"VUID-vkCmdClearAttachments-aspectMask-02502", "If the aspectMask member of any element of pAttachments contains VK_IMAGE_ASPECT_DEPTH_BIT, then the current subpass' depth/stencil attachment must either be VK_ATTACHMENT_UNUSED, or must have a depth component (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdClearAttachments-aspectMask-02502)"},
+    {"VUID-vkCmdClearAttachments-aspectMask-02503", "If the aspectMask member of any element of pAttachments contains VK_IMAGE_ASPECT_STENCIL_BIT, then the current subpass' depth/stencil attachment must either be VK_ATTACHMENT_UNUSED, or must have a stencil component (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdClearAttachments-aspectMask-02503)"},
+    {"VUID-vkCmdClearAttachments-attachmentCount-arraylength", "attachmentCount must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdClearAttachments-attachmentCount-arraylength)"},
+    {"VUID-vkCmdClearAttachments-baseArrayLayer-00018", "If the render pass instance this is recorded in uses multiview, then baseArrayLayer must be zero and layerCount must be one. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdClearAttachments-baseArrayLayer-00018)"},
+    {"VUID-vkCmdClearAttachments-commandBuffer-02504", "If commandBuffer is an unprotected command buffer, then each attachment to be cleared must not be a protected image. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdClearAttachments-commandBuffer-02504)"},
+    {"VUID-vkCmdClearAttachments-commandBuffer-02505", "If commandBuffer is a protected command buffer, then each attachment to be cleared must not be an unprotected image. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdClearAttachments-commandBuffer-02505)"},
+    {"VUID-vkCmdClearAttachments-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support graphics operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdClearAttachments-commandBuffer-cmdpool)"},
+    {"VUID-vkCmdClearAttachments-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdClearAttachments-commandBuffer-parameter)"},
+    {"VUID-vkCmdClearAttachments-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdClearAttachments-commandBuffer-recording)"},
+    {"VUID-vkCmdClearAttachments-layerCount-01934", "The layerCount member of each element of pRects must not be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdClearAttachments-layerCount-01934)"},
+    {"VUID-vkCmdClearAttachments-pAttachments-parameter", "pAttachments must be a valid pointer to an array of attachmentCount valid VkClearAttachment structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdClearAttachments-pAttachments-parameter)"},
+    {"VUID-vkCmdClearAttachments-pRects-00016", "The rectangular region specified by each element of pRects must be contained within the render area of the current render pass instance (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdClearAttachments-pRects-00016)"},
+    {"VUID-vkCmdClearAttachments-pRects-00017", "The layers specified by each element of pRects must be contained within every attachment that pAttachments refers to (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdClearAttachments-pRects-00017)"},
+    {"VUID-vkCmdClearAttachments-pRects-parameter", "pRects must be a valid pointer to an array of rectCount VkClearRect structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdClearAttachments-pRects-parameter)"},
+    {"VUID-vkCmdClearAttachments-rect-02682", "The rect member of each element of pRects must have an extent.width greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdClearAttachments-rect-02682)"},
+    {"VUID-vkCmdClearAttachments-rect-02683", "The rect member of each element of pRects must have an extent.height greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdClearAttachments-rect-02683)"},
+    {"VUID-vkCmdClearAttachments-rectCount-arraylength", "rectCount must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdClearAttachments-rectCount-arraylength)"},
+    {"VUID-vkCmdClearAttachments-renderpass", "This command must only be called inside of a render pass instance (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdClearAttachments-renderpass)"},
+    {"VUID-vkCmdClearColorImage-aspectMask-02498", "The VkImageSubresourceRange::aspectMask members of the elements of the pRanges array must each only include VK_IMAGE_ASPECT_COLOR_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdClearColorImage-aspectMask-02498)"},
+    {"VUID-vkCmdClearColorImage-baseArrayLayer-01472", "The VkImageSubresourceRange::baseArrayLayer members of the elements of the pRanges array must each be less than the arrayLayers specified in VkImageCreateInfo when image was created (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdClearColorImage-baseArrayLayer-01472)"},
+    {"VUID-vkCmdClearColorImage-baseMipLevel-01470", "The VkImageSubresourceRange::baseMipLevel members of the elements of the pRanges array must each be less than the mipLevels specified in VkImageCreateInfo when image was created (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdClearColorImage-baseMipLevel-01470)"},
+    {"VUID-vkCmdClearColorImage-commandBuffer-01805", "If commandBuffer is an unprotected command buffer, then image must not be a protected image (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdClearColorImage-commandBuffer-01805)"},
+    {"VUID-vkCmdClearColorImage-commandBuffer-01806", "If commandBuffer is a protected command buffer, then image must not be an unprotected image (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdClearColorImage-commandBuffer-01806)"},
+    {"VUID-vkCmdClearColorImage-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support graphics, or compute operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdClearColorImage-commandBuffer-cmdpool)"},
+    {"VUID-vkCmdClearColorImage-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdClearColorImage-commandBuffer-parameter)"},
+    {"VUID-vkCmdClearColorImage-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdClearColorImage-commandBuffer-recording)"},
+    {"VUID-vkCmdClearColorImage-commonparent", "Both of commandBuffer, and image must have been created, allocated, or retrieved from the same VkDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdClearColorImage-commonparent)"},
+    {"VUID-vkCmdClearColorImage-image-00002", "image must have been created with VK_IMAGE_USAGE_TRANSFER_DST_BIT usage flag (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdClearColorImage-image-00002)"},
+    {"VUID-vkCmdClearColorImage-image-00003", "If image is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdClearColorImage-image-00003)"},
+    {"VUID-vkCmdClearColorImage-image-00007", "image must not have a compressed or depth/stencil format (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdClearColorImage-image-00007)"},
+    {"VUID-vkCmdClearColorImage-image-01545", "image must not use a format listed in Formats requiring sampler Y'CBCR conversion for VK_IMAGE_ASPECT_COLOR_BIT image views (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdClearColorImage-image-01545)"},
+    {"VUID-vkCmdClearColorImage-image-01993", "The format features of image must contain VK_FORMAT_FEATURE_TRANSFER_DST_BIT. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdClearColorImage-image-01993)"},
+    {"VUID-vkCmdClearColorImage-image-parameter", "image must be a valid VkImage handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdClearColorImage-image-parameter)"},
+    {"VUID-vkCmdClearColorImage-imageLayout-00004", "imageLayout must specify the layout of the image subresource ranges of image specified in pRanges at the time this command is executed on a VkDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdClearColorImage-imageLayout-00004)"},
+    {"VUID-vkCmdClearColorImage-imageLayout-00005", "imageLayout must be VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL or VK_IMAGE_LAYOUT_GENERAL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdClearColorImage-imageLayout-00005)"},
+    {"VUID-vkCmdClearColorImage-imageLayout-01394", "imageLayout must be VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, VK_IMAGE_LAYOUT_GENERAL, or VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdClearColorImage-imageLayout-01394)"},
+    {"VUID-vkCmdClearColorImage-imageLayout-parameter", "imageLayout must be a valid VkImageLayout value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdClearColorImage-imageLayout-parameter)"},
+    {"VUID-vkCmdClearColorImage-pColor-parameter", "pColor must be a valid pointer to a valid VkClearColorValue union (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdClearColorImage-pColor-parameter)"},
+    {"VUID-vkCmdClearColorImage-pRanges-01692", "For each VkImageSubresourceRange element of pRanges, if the levelCount member is not VK_REMAINING_MIP_LEVELS, then baseMipLevel + levelCount must be less than the mipLevels specified in VkImageCreateInfo when image was created (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdClearColorImage-pRanges-01692)"},
+    {"VUID-vkCmdClearColorImage-pRanges-01693", "For each VkImageSubresourceRange element of pRanges, if the layerCount member is not VK_REMAINING_ARRAY_LAYERS, then baseArrayLayer + layerCount must be less than the arrayLayers specified in VkImageCreateInfo when image was created (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdClearColorImage-pRanges-01693)"},
+    {"VUID-vkCmdClearColorImage-pRanges-parameter", "pRanges must be a valid pointer to an array of rangeCount valid VkImageSubresourceRange structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdClearColorImage-pRanges-parameter)"},
+    {"VUID-vkCmdClearColorImage-rangeCount-arraylength", "rangeCount must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdClearColorImage-rangeCount-arraylength)"},
+    {"VUID-vkCmdClearColorImage-renderpass", "This command must only be called outside of a render pass instance (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdClearColorImage-renderpass)"},
+    {"VUID-vkCmdClearDepthStencilImage-aspectMask-02824", "The VkImageSubresourceRange::aspectMask member of each element of the pRanges array must not include bits other than VK_IMAGE_ASPECT_DEPTH_BIT or VK_IMAGE_ASPECT_STENCIL_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdClearDepthStencilImage-aspectMask-02824)"},
+    {"VUID-vkCmdClearDepthStencilImage-baseArrayLayer-01476", "The VkImageSubresourceRange::baseArrayLayer members of the elements of the pRanges array must each be less than the arrayLayers specified in VkImageCreateInfo when image was created (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdClearDepthStencilImage-baseArrayLayer-01476)"},
+    {"VUID-vkCmdClearDepthStencilImage-baseMipLevel-01474", "The VkImageSubresourceRange::baseMipLevel members of the elements of the pRanges array must each be less than the mipLevels specified in VkImageCreateInfo when image was created (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdClearDepthStencilImage-baseMipLevel-01474)"},
+    {"VUID-vkCmdClearDepthStencilImage-commandBuffer-01807", "If commandBuffer is an unprotected command buffer, then image must not be a protected image (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdClearDepthStencilImage-commandBuffer-01807)"},
+    {"VUID-vkCmdClearDepthStencilImage-commandBuffer-01808", "If commandBuffer is a protected command buffer, then image must not be an unprotected image (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdClearDepthStencilImage-commandBuffer-01808)"},
+    {"VUID-vkCmdClearDepthStencilImage-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support graphics operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdClearDepthStencilImage-commandBuffer-cmdpool)"},
+    {"VUID-vkCmdClearDepthStencilImage-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdClearDepthStencilImage-commandBuffer-parameter)"},
+    {"VUID-vkCmdClearDepthStencilImage-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdClearDepthStencilImage-commandBuffer-recording)"},
+    {"VUID-vkCmdClearDepthStencilImage-commonparent", "Both of commandBuffer, and image must have been created, allocated, or retrieved from the same VkDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdClearDepthStencilImage-commonparent)"},
+    {"VUID-vkCmdClearDepthStencilImage-image-00009", "image must have been created with VK_IMAGE_USAGE_TRANSFER_DST_BIT usage flag (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdClearDepthStencilImage-image-00009)"},
+    {"VUID-vkCmdClearDepthStencilImage-image-00010", "If image is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdClearDepthStencilImage-image-00010)"},
+    {"VUID-vkCmdClearDepthStencilImage-image-00014", "image must have a depth/stencil format (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdClearDepthStencilImage-image-00014)"},
+    {"VUID-vkCmdClearDepthStencilImage-image-01994", "The format features of image must contain VK_FORMAT_FEATURE_TRANSFER_DST_BIT. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdClearDepthStencilImage-image-01994)"},
+    {"VUID-vkCmdClearDepthStencilImage-image-02825", "If the image's format does not have a stencil component, then the VkImageSubresourceRange::aspectMask member of each element of the pRanges array must not include the VK_IMAGE_ASPECT_STENCIL_BIT bit (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdClearDepthStencilImage-image-02825)"},
+    {"VUID-vkCmdClearDepthStencilImage-image-02826", "If the image's format does not have a depth component, then the VkImageSubresourceRange::aspectMask member of each element of the pRanges array must not include the VK_IMAGE_ASPECT_DEPTH_BIT bit (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdClearDepthStencilImage-image-02826)"},
+    {"VUID-vkCmdClearDepthStencilImage-image-parameter", "image must be a valid VkImage handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdClearDepthStencilImage-image-parameter)"},
+    {"VUID-vkCmdClearDepthStencilImage-imageLayout-00011", "imageLayout must specify the layout of the image subresource ranges of image specified in pRanges at the time this command is executed on a VkDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdClearDepthStencilImage-imageLayout-00011)"},
+    {"VUID-vkCmdClearDepthStencilImage-imageLayout-00012", "imageLayout must be either of VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL or VK_IMAGE_LAYOUT_GENERAL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdClearDepthStencilImage-imageLayout-00012)"},
+    {"VUID-vkCmdClearDepthStencilImage-imageLayout-parameter", "imageLayout must be a valid VkImageLayout value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdClearDepthStencilImage-imageLayout-parameter)"},
+    {"VUID-vkCmdClearDepthStencilImage-pDepthStencil-parameter", "pDepthStencil must be a valid pointer to a valid VkClearDepthStencilValue structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdClearDepthStencilImage-pDepthStencil-parameter)"},
+    {"VUID-vkCmdClearDepthStencilImage-pRanges-01694", "For each VkImageSubresourceRange element of pRanges, if the levelCount member is not VK_REMAINING_MIP_LEVELS, then baseMipLevel + levelCount must be less than the mipLevels specified in VkImageCreateInfo when image was created (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdClearDepthStencilImage-pRanges-01694)"},
+    {"VUID-vkCmdClearDepthStencilImage-pRanges-01695", "For each VkImageSubresourceRange element of pRanges, if the layerCount member is not VK_REMAINING_ARRAY_LAYERS, then baseArrayLayer + layerCount must be less than the arrayLayers specified in VkImageCreateInfo when image was created (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdClearDepthStencilImage-pRanges-01695)"},
+    {"VUID-vkCmdClearDepthStencilImage-pRanges-02658", "If any element of pRanges.aspect includes VK_IMAGE_ASPECT_STENCIL_BIT, and image was created with separate stencil usage, VK_IMAGE_USAGE_TRANSFER_DST_BIT must have been included in the VkImageStencilUsageCreateInfoEXT::stencilUsage used to create image (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdClearDepthStencilImage-pRanges-02658)"},
+    {"VUID-vkCmdClearDepthStencilImage-pRanges-02659", "If any element of pRanges.aspect includes VK_IMAGE_ASPECT_STENCIL_BIT, and image was not created with separate stencil usage, VK_IMAGE_USAGE_TRANSFER_DST_BIT must have been included in the VkImageCreateInfo::usage used to create image (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdClearDepthStencilImage-pRanges-02659)"},
+    {"VUID-vkCmdClearDepthStencilImage-pRanges-02660", "If any element of pRanges.aspect includes VK_IMAGE_ASPECT_DEPTH_BIT, VK_IMAGE_USAGE_TRANSFER_DST_BIT must have been included in the VkImageCreateInfo::usage used to create image (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdClearDepthStencilImage-pRanges-02660)"},
+    {"VUID-vkCmdClearDepthStencilImage-pRanges-parameter", "pRanges must be a valid pointer to an array of rangeCount valid VkImageSubresourceRange structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdClearDepthStencilImage-pRanges-parameter)"},
+    {"VUID-vkCmdClearDepthStencilImage-rangeCount-arraylength", "rangeCount must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdClearDepthStencilImage-rangeCount-arraylength)"},
+    {"VUID-vkCmdClearDepthStencilImage-renderpass", "This command must only be called outside of a render pass instance (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdClearDepthStencilImage-renderpass)"},
+    {"VUID-vkCmdCopyAccelerationStructureNV-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support compute operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyAccelerationStructureNV-commandBuffer-cmdpool)"},
+    {"VUID-vkCmdCopyAccelerationStructureNV-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyAccelerationStructureNV-commandBuffer-parameter)"},
+    {"VUID-vkCmdCopyAccelerationStructureNV-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyAccelerationStructureNV-commandBuffer-recording)"},
+    {"VUID-vkCmdCopyAccelerationStructureNV-commonparent", "Each of commandBuffer, dst, and src must have been created, allocated, or retrieved from the same VkDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyAccelerationStructureNV-commonparent)"},
+    {"VUID-vkCmdCopyAccelerationStructureNV-dst-parameter", "dst must be a valid VkAccelerationStructureNV handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyAccelerationStructureNV-dst-parameter)"},
+    {"VUID-vkCmdCopyAccelerationStructureNV-mode-02496", "mode must be VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_NV or VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyAccelerationStructureNV-mode-02496)"},
+    {"VUID-vkCmdCopyAccelerationStructureNV-mode-parameter", "mode must be a valid VkCopyAccelerationStructureModeNV value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyAccelerationStructureNV-mode-parameter)"},
+    {"VUID-vkCmdCopyAccelerationStructureNV-renderpass", "This command must only be called outside of a render pass instance (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyAccelerationStructureNV-renderpass)"},
+    {"VUID-vkCmdCopyAccelerationStructureNV-src-02497", "src must have been built with VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_NV if mode is VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyAccelerationStructureNV-src-02497)"},
+    {"VUID-vkCmdCopyAccelerationStructureNV-src-parameter", "src must be a valid VkAccelerationStructureNV handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyAccelerationStructureNV-src-parameter)"},
+    {"VUID-vkCmdCopyBuffer-commandBuffer-01822", "If commandBuffer is an unprotected command buffer, then srcBuffer must not be a protected buffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyBuffer-commandBuffer-01822)"},
+    {"VUID-vkCmdCopyBuffer-commandBuffer-01823", "If commandBuffer is an unprotected command buffer, then dstBuffer must not be a protected buffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyBuffer-commandBuffer-01823)"},
+    {"VUID-vkCmdCopyBuffer-commandBuffer-01824", "If commandBuffer is a protected command buffer, then dstBuffer must not be an unprotected buffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyBuffer-commandBuffer-01824)"},
+    {"VUID-vkCmdCopyBuffer-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support transfer, graphics, or compute operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyBuffer-commandBuffer-cmdpool)"},
+    {"VUID-vkCmdCopyBuffer-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyBuffer-commandBuffer-parameter)"},
+    {"VUID-vkCmdCopyBuffer-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyBuffer-commandBuffer-recording)"},
+    {"VUID-vkCmdCopyBuffer-commonparent", "Each of commandBuffer, dstBuffer, and srcBuffer must have been created, allocated, or retrieved from the same VkDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyBuffer-commonparent)"},
+    {"VUID-vkCmdCopyBuffer-dstBuffer-00120", "dstBuffer must have been created with VK_BUFFER_USAGE_TRANSFER_DST_BIT usage flag (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyBuffer-dstBuffer-00120)"},
+    {"VUID-vkCmdCopyBuffer-dstBuffer-00121", "If dstBuffer is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyBuffer-dstBuffer-00121)"},
+    {"VUID-vkCmdCopyBuffer-dstBuffer-parameter", "dstBuffer must be a valid VkBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyBuffer-dstBuffer-parameter)"},
+    {"VUID-vkCmdCopyBuffer-dstOffset-00114", "The dstOffset member of each element of pRegions must be less than the size of dstBuffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyBuffer-dstOffset-00114)"},
+    {"VUID-vkCmdCopyBuffer-pRegions-00117", "The union of the source regions, and the union of the destination regions, specified by the elements of pRegions, must not overlap in memory (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyBuffer-pRegions-00117)"},
+    {"VUID-vkCmdCopyBuffer-pRegions-parameter", "pRegions must be a valid pointer to an array of regionCount valid VkBufferCopy structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyBuffer-pRegions-parameter)"},
+    {"VUID-vkCmdCopyBuffer-regionCount-arraylength", "regionCount must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyBuffer-regionCount-arraylength)"},
+    {"VUID-vkCmdCopyBuffer-renderpass", "This command must only be called outside of a render pass instance (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyBuffer-renderpass)"},
+    {"VUID-vkCmdCopyBuffer-size-00115", "The size member of each element of pRegions must be less than or equal to the size of srcBuffer minus srcOffset (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyBuffer-size-00115)"},
+    {"VUID-vkCmdCopyBuffer-size-00116", "The size member of each element of pRegions must be less than or equal to the size of dstBuffer minus dstOffset (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyBuffer-size-00116)"},
+    {"VUID-vkCmdCopyBuffer-srcBuffer-00118", "srcBuffer must have been created with VK_BUFFER_USAGE_TRANSFER_SRC_BIT usage flag (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyBuffer-srcBuffer-00118)"},
+    {"VUID-vkCmdCopyBuffer-srcBuffer-00119", "If srcBuffer is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyBuffer-srcBuffer-00119)"},
+    {"VUID-vkCmdCopyBuffer-srcBuffer-parameter", "srcBuffer must be a valid VkBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyBuffer-srcBuffer-parameter)"},
+    {"VUID-vkCmdCopyBuffer-srcOffset-00113", "The srcOffset member of each element of pRegions must be less than the size of srcBuffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyBuffer-srcOffset-00113)"},
+    {"VUID-vkCmdCopyBufferToImage-commandBuffer-01828", "If commandBuffer is an unprotected command buffer, then srcBuffer must not be a protected buffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyBufferToImage-commandBuffer-01828)"},
+    {"VUID-vkCmdCopyBufferToImage-commandBuffer-01829", "If commandBuffer is an unprotected command buffer, then dstImage must not be a protected image (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyBufferToImage-commandBuffer-01829)"},
+    {"VUID-vkCmdCopyBufferToImage-commandBuffer-01830", "If commandBuffer is a protected command buffer, then dstImage must not be an unprotected image (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyBufferToImage-commandBuffer-01830)"},
+    {"VUID-vkCmdCopyBufferToImage-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support transfer, graphics, or compute operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyBufferToImage-commandBuffer-cmdpool)"},
+    {"VUID-vkCmdCopyBufferToImage-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyBufferToImage-commandBuffer-parameter)"},
+    {"VUID-vkCmdCopyBufferToImage-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyBufferToImage-commandBuffer-recording)"},
+    {"VUID-vkCmdCopyBufferToImage-commonparent", "Each of commandBuffer, dstImage, and srcBuffer must have been created, allocated, or retrieved from the same VkDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyBufferToImage-commonparent)"},
+    {"VUID-vkCmdCopyBufferToImage-dstImage-00177", "dstImage must have been created with VK_IMAGE_USAGE_TRANSFER_DST_BIT usage flag (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyBufferToImage-dstImage-00177)"},
+    {"VUID-vkCmdCopyBufferToImage-dstImage-00178", "If dstImage is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyBufferToImage-dstImage-00178)"},
+    {"VUID-vkCmdCopyBufferToImage-dstImage-00179", "dstImage must have a sample count equal to VK_SAMPLE_COUNT_1_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyBufferToImage-dstImage-00179)"},
+    {"VUID-vkCmdCopyBufferToImage-dstImage-01997", "The format features of dstImage must contain VK_FORMAT_FEATURE_TRANSFER_DST_BIT. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyBufferToImage-dstImage-01997)"},
+    {"VUID-vkCmdCopyBufferToImage-dstImage-02543", "dstImage must not have been created with flags containing VK_IMAGE_CREATE_SUBSAMPLED_BIT_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyBufferToImage-dstImage-02543)"},
+    {"VUID-vkCmdCopyBufferToImage-dstImage-parameter", "dstImage must be a valid VkImage handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyBufferToImage-dstImage-parameter)"},
+    {"VUID-vkCmdCopyBufferToImage-dstImageLayout-00180", "dstImageLayout must specify the layout of the image subresources of dstImage specified in pRegions at the time this command is executed on a VkDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyBufferToImage-dstImageLayout-00180)"},
+    {"VUID-vkCmdCopyBufferToImage-dstImageLayout-00181", "dstImageLayout must be VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL or VK_IMAGE_LAYOUT_GENERAL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyBufferToImage-dstImageLayout-00181)"},
+    {"VUID-vkCmdCopyBufferToImage-dstImageLayout-01396", "dstImageLayout must be VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, VK_IMAGE_LAYOUT_GENERAL, or VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyBufferToImage-dstImageLayout-01396)"},
+    {"VUID-vkCmdCopyBufferToImage-dstImageLayout-parameter", "dstImageLayout must be a valid VkImageLayout value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyBufferToImage-dstImageLayout-parameter)"},
+    {"VUID-vkCmdCopyBufferToImage-imageOffset-01793", "The imageOffset and imageExtent members of each element of pRegions must respect the image transfer granularity requirements of commandBuffer's command pool's queue family, as described in VkQueueFamilyProperties (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyBufferToImage-imageOffset-01793)"},
+    {"VUID-vkCmdCopyBufferToImage-imageSubresource-01701", "The imageSubresource.mipLevel member of each element of pRegions must be less than the mipLevels specified in VkImageCreateInfo when dstImage was created (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyBufferToImage-imageSubresource-01701)"},
+    {"VUID-vkCmdCopyBufferToImage-imageSubresource-01702", "The imageSubresource.baseArrayLayer + imageSubresource.layerCount of each element of pRegions must be less than or equal to the arrayLayers specified in VkImageCreateInfo when dstImage was created (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyBufferToImage-imageSubresource-01702)"},
+    {"VUID-vkCmdCopyBufferToImage-pRegions-00171", "srcBuffer must be large enough to contain all buffer locations that are accessed according to Buffer and Image Addressing, for each element of pRegions (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyBufferToImage-pRegions-00171)"},
+    {"VUID-vkCmdCopyBufferToImage-pRegions-00172", "The image region specified by each element of pRegions must be a region that is contained within dstImage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyBufferToImage-pRegions-00172)"},
+    {"VUID-vkCmdCopyBufferToImage-pRegions-00173", "The union of all source regions, and the union of all destination regions, specified by the elements of pRegions, must not overlap in memory (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyBufferToImage-pRegions-00173)"},
+    {"VUID-vkCmdCopyBufferToImage-pRegions-parameter", "pRegions must be a valid pointer to an array of regionCount valid VkBufferImageCopy structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyBufferToImage-pRegions-parameter)"},
+    {"VUID-vkCmdCopyBufferToImage-regionCount-arraylength", "regionCount must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyBufferToImage-regionCount-arraylength)"},
+    {"VUID-vkCmdCopyBufferToImage-renderpass", "This command must only be called outside of a render pass instance (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyBufferToImage-renderpass)"},
+    {"VUID-vkCmdCopyBufferToImage-srcBuffer-00174", "srcBuffer must have been created with VK_BUFFER_USAGE_TRANSFER_SRC_BIT usage flag (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyBufferToImage-srcBuffer-00174)"},
+    {"VUID-vkCmdCopyBufferToImage-srcBuffer-00176", "If srcBuffer is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyBufferToImage-srcBuffer-00176)"},
+    {"VUID-vkCmdCopyBufferToImage-srcBuffer-parameter", "srcBuffer must be a valid VkBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyBufferToImage-srcBuffer-parameter)"},
+    {"VUID-vkCmdCopyImage-None-01549", "In a copy to or from a plane of a multi-planar image, the VkFormat of the image and plane must be compatible according to the description of compatible planes for the plane being copied (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyImage-None-01549)"},
+    {"VUID-vkCmdCopyImage-aspectMask-01550", "When a copy is performed to or from an image with a multi-planar format, the aspectMask of the srcSubresource and/or dstSubresource that refers to the multi-planar image must be VK_IMAGE_ASPECT_PLANE_0_BIT, VK_IMAGE_ASPECT_PLANE_1_BIT, or VK_IMAGE_ASPECT_PLANE_2_BIT (with VK_IMAGE_ASPECT_PLANE_2_BIT valid only for a VkFormat with three planes) (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyImage-aspectMask-01550)"},
+    {"VUID-vkCmdCopyImage-commandBuffer-01825", "If commandBuffer is an unprotected command buffer, then srcImage must not be a protected image (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyImage-commandBuffer-01825)"},
+    {"VUID-vkCmdCopyImage-commandBuffer-01826", "If commandBuffer is an unprotected command buffer, then dstImage must not be a protected image (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyImage-commandBuffer-01826)"},
+    {"VUID-vkCmdCopyImage-commandBuffer-01827", "If commandBuffer is a protected command buffer, then dstImage must not be an unprotected image (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyImage-commandBuffer-01827)"},
+    {"VUID-vkCmdCopyImage-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support transfer, graphics, or compute operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyImage-commandBuffer-cmdpool)"},
+    {"VUID-vkCmdCopyImage-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyImage-commandBuffer-parameter)"},
+    {"VUID-vkCmdCopyImage-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyImage-commandBuffer-recording)"},
+    {"VUID-vkCmdCopyImage-commonparent", "Each of commandBuffer, dstImage, and srcImage must have been created, allocated, or retrieved from the same VkDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyImage-commonparent)"},
+    {"VUID-vkCmdCopyImage-dstImage-00131", "dstImage must have been created with VK_IMAGE_USAGE_TRANSFER_DST_BIT usage flag (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyImage-dstImage-00131)"},
+    {"VUID-vkCmdCopyImage-dstImage-00132", "If dstImage is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyImage-dstImage-00132)"},
+    {"VUID-vkCmdCopyImage-dstImage-01547", "If dstImage is non-sparse then the image or disjoint plane that is the destination of the copy must be bound completely and contiguously to a single VkDeviceMemory object (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyImage-dstImage-01547)"},
+    {"VUID-vkCmdCopyImage-dstImage-01996", "The format features of dstImage must contain VK_FORMAT_FEATURE_TRANSFER_DST_BIT. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyImage-dstImage-01996)"},
+    {"VUID-vkCmdCopyImage-dstImage-02542", "dstImage and srcImage must not have been created with flags containing VK_IMAGE_CREATE_SUBSAMPLED_BIT_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyImage-dstImage-02542)"},
+    {"VUID-vkCmdCopyImage-dstImage-parameter", "dstImage must be a valid VkImage handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyImage-dstImage-parameter)"},
+    {"VUID-vkCmdCopyImage-dstImageLayout-00133", "dstImageLayout must specify the layout of the image subresources of dstImage specified in pRegions at the time this command is executed on a VkDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyImage-dstImageLayout-00133)"},
+    {"VUID-vkCmdCopyImage-dstImageLayout-00134", "dstImageLayout must be VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL or VK_IMAGE_LAYOUT_GENERAL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyImage-dstImageLayout-00134)"},
+    {"VUID-vkCmdCopyImage-dstImageLayout-01395", "dstImageLayout must be VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, VK_IMAGE_LAYOUT_GENERAL, or VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyImage-dstImageLayout-01395)"},
+    {"VUID-vkCmdCopyImage-dstImageLayout-parameter", "dstImageLayout must be a valid VkImageLayout value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyImage-dstImageLayout-parameter)"},
+    {"VUID-vkCmdCopyImage-dstOffset-01784", "The dstOffset and extent members of each element of pRegions must respect the image transfer granularity requirements of commandBuffer's command pool's queue family, as described in VkQueueFamilyProperties (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyImage-dstOffset-01784)"},
+    {"VUID-vkCmdCopyImage-dstSubresource-01697", "The dstSubresource.mipLevel member of each element of pRegions must be less than the mipLevels specified in VkImageCreateInfo when dstImage was created (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyImage-dstSubresource-01697)"},
+    {"VUID-vkCmdCopyImage-dstSubresource-01699", "The dstSubresource.baseArrayLayer + dstSubresource.layerCount of each element of pRegions must be less than or equal to the arrayLayers specified in VkImageCreateInfo when dstImage was created (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyImage-dstSubresource-01699)"},
+    {"VUID-vkCmdCopyImage-pRegions-00122", "The source region specified by each element of pRegions must be a region that is contained within srcImage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyImage-pRegions-00122)"},
+    {"VUID-vkCmdCopyImage-pRegions-00123", "The destination region specified by each element of pRegions must be a region that is contained within dstImage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyImage-pRegions-00123)"},
+    {"VUID-vkCmdCopyImage-pRegions-00124", "The union of all source regions, and the union of all destination regions, specified by the elements of pRegions, must not overlap in memory (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyImage-pRegions-00124)"},
+    {"VUID-vkCmdCopyImage-pRegions-parameter", "pRegions must be a valid pointer to an array of regionCount valid VkImageCopy structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyImage-pRegions-parameter)"},
+    {"VUID-vkCmdCopyImage-regionCount-arraylength", "regionCount must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyImage-regionCount-arraylength)"},
+    {"VUID-vkCmdCopyImage-renderpass", "This command must only be called outside of a render pass instance (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyImage-renderpass)"},
+    {"VUID-vkCmdCopyImage-srcImage-00126", "srcImage must have been created with VK_IMAGE_USAGE_TRANSFER_SRC_BIT usage flag (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyImage-srcImage-00126)"},
+    {"VUID-vkCmdCopyImage-srcImage-00127", "If srcImage is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyImage-srcImage-00127)"},
+    {"VUID-vkCmdCopyImage-srcImage-00135", "The VkFormat of each of srcImage and dstImage must be compatible, as defined above (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyImage-srcImage-00135)"},
+    {"VUID-vkCmdCopyImage-srcImage-00136", "The sample count of srcImage and dstImage must match (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyImage-srcImage-00136)"},
+    {"VUID-vkCmdCopyImage-srcImage-01546", "If srcImage is non-sparse then the image or disjoint plane to be copied must be bound completely and contiguously to a single VkDeviceMemory object (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyImage-srcImage-01546)"},
+    {"VUID-vkCmdCopyImage-srcImage-01548", "If the VkFormat of each of srcImage and dstImage is not a multi-planar format, the VkFormat of each of srcImage and dstImage must be compatible, as defined above (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyImage-srcImage-01548)"},
+    {"VUID-vkCmdCopyImage-srcImage-01995", "The format features of srcImage must contain VK_FORMAT_FEATURE_TRANSFER_SRC_BIT. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyImage-srcImage-01995)"},
+    {"VUID-vkCmdCopyImage-srcImage-parameter", "srcImage must be a valid VkImage handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyImage-srcImage-parameter)"},
+    {"VUID-vkCmdCopyImage-srcImageLayout-00128", "srcImageLayout must specify the layout of the image subresources of srcImage specified in pRegions at the time this command is executed on a VkDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyImage-srcImageLayout-00128)"},
+    {"VUID-vkCmdCopyImage-srcImageLayout-00129", "srcImageLayout must be VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL or VK_IMAGE_LAYOUT_GENERAL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyImage-srcImageLayout-00129)"},
+    {"VUID-vkCmdCopyImage-srcImageLayout-01917", "srcImageLayout must be VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, VK_IMAGE_LAYOUT_GENERAL, or VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyImage-srcImageLayout-01917)"},
+    {"VUID-vkCmdCopyImage-srcImageLayout-parameter", "srcImageLayout must be a valid VkImageLayout value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyImage-srcImageLayout-parameter)"},
+    {"VUID-vkCmdCopyImage-srcOffset-01783", "The srcOffset and extent members of each element of pRegions must respect the image transfer granularity requirements of commandBuffer's command pool's queue family, as described in VkQueueFamilyProperties (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyImage-srcOffset-01783)"},
+    {"VUID-vkCmdCopyImage-srcSubresource-01696", "The srcSubresource.mipLevel member of each element of pRegions must be less than the mipLevels specified in VkImageCreateInfo when srcImage was created (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyImage-srcSubresource-01696)"},
+    {"VUID-vkCmdCopyImage-srcSubresource-01698", "The srcSubresource.baseArrayLayer + srcSubresource.layerCount of each element of pRegions must be less than or equal to the arrayLayers specified in VkImageCreateInfo when srcImage was created (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyImage-srcSubresource-01698)"},
+    {"VUID-vkCmdCopyImageToBuffer-commandBuffer-01831", "If commandBuffer is an unprotected command buffer, then srcImage must not be a protected image (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyImageToBuffer-commandBuffer-01831)"},
+    {"VUID-vkCmdCopyImageToBuffer-commandBuffer-01832", "If commandBuffer is an unprotected command buffer, then dstBuffer must not be a protected buffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyImageToBuffer-commandBuffer-01832)"},
+    {"VUID-vkCmdCopyImageToBuffer-commandBuffer-01833", "If commandBuffer is a protected command buffer, then dstBuffer must not be an unprotected buffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyImageToBuffer-commandBuffer-01833)"},
+    {"VUID-vkCmdCopyImageToBuffer-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support transfer, graphics, or compute operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyImageToBuffer-commandBuffer-cmdpool)"},
+    {"VUID-vkCmdCopyImageToBuffer-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyImageToBuffer-commandBuffer-parameter)"},
+    {"VUID-vkCmdCopyImageToBuffer-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyImageToBuffer-commandBuffer-recording)"},
+    {"VUID-vkCmdCopyImageToBuffer-commonparent", "Each of commandBuffer, dstBuffer, and srcImage must have been created, allocated, or retrieved from the same VkDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyImageToBuffer-commonparent)"},
+    {"VUID-vkCmdCopyImageToBuffer-dstBuffer-00191", "dstBuffer must have been created with VK_BUFFER_USAGE_TRANSFER_DST_BIT usage flag (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyImageToBuffer-dstBuffer-00191)"},
+    {"VUID-vkCmdCopyImageToBuffer-dstBuffer-00192", "If dstBuffer is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyImageToBuffer-dstBuffer-00192)"},
+    {"VUID-vkCmdCopyImageToBuffer-dstBuffer-parameter", "dstBuffer must be a valid VkBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyImageToBuffer-dstBuffer-parameter)"},
+    {"VUID-vkCmdCopyImageToBuffer-imageOffset-01794", "The imageOffset and imageExtent members of each element of pRegions must respect the image transfer granularity requirements of commandBuffer's command pool's queue family, as described in VkQueueFamilyProperties (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyImageToBuffer-imageOffset-01794)"},
+    {"VUID-vkCmdCopyImageToBuffer-imageSubresource-01703", "The imageSubresource.mipLevel member of each element of pRegions must be less than the mipLevels specified in VkImageCreateInfo when srcImage was created (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyImageToBuffer-imageSubresource-01703)"},
+    {"VUID-vkCmdCopyImageToBuffer-imageSubresource-01704", "The imageSubresource.baseArrayLayer + imageSubresource.layerCount of each element of pRegions must be less than or equal to the arrayLayers specified in VkImageCreateInfo when srcImage was created (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyImageToBuffer-imageSubresource-01704)"},
+    {"VUID-vkCmdCopyImageToBuffer-pRegions-00182", "The image region specified by each element of pRegions must be a region that is contained within srcImage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyImageToBuffer-pRegions-00182)"},
+    {"VUID-vkCmdCopyImageToBuffer-pRegions-00183", "dstBuffer must be large enough to contain all buffer locations that are accessed according to Buffer and Image Addressing, for each element of pRegions (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyImageToBuffer-pRegions-00183)"},
+    {"VUID-vkCmdCopyImageToBuffer-pRegions-00184", "The union of all source regions, and the union of all destination regions, specified by the elements of pRegions, must not overlap in memory (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyImageToBuffer-pRegions-00184)"},
+    {"VUID-vkCmdCopyImageToBuffer-pRegions-parameter", "pRegions must be a valid pointer to an array of regionCount valid VkBufferImageCopy structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyImageToBuffer-pRegions-parameter)"},
+    {"VUID-vkCmdCopyImageToBuffer-regionCount-arraylength", "regionCount must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyImageToBuffer-regionCount-arraylength)"},
+    {"VUID-vkCmdCopyImageToBuffer-renderpass", "This command must only be called outside of a render pass instance (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyImageToBuffer-renderpass)"},
+    {"VUID-vkCmdCopyImageToBuffer-srcImage-00186", "srcImage must have been created with VK_IMAGE_USAGE_TRANSFER_SRC_BIT usage flag (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyImageToBuffer-srcImage-00186)"},
+    {"VUID-vkCmdCopyImageToBuffer-srcImage-00187", "If srcImage is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyImageToBuffer-srcImage-00187)"},
+    {"VUID-vkCmdCopyImageToBuffer-srcImage-00188", "srcImage must have a sample count equal to VK_SAMPLE_COUNT_1_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyImageToBuffer-srcImage-00188)"},
+    {"VUID-vkCmdCopyImageToBuffer-srcImage-01998", "The format features of srcImage must contain VK_FORMAT_FEATURE_TRANSFER_SRC_BIT. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyImageToBuffer-srcImage-01998)"},
+    {"VUID-vkCmdCopyImageToBuffer-srcImage-02544", "srcImage must not have been created with flags containing VK_IMAGE_CREATE_SUBSAMPLED_BIT_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyImageToBuffer-srcImage-02544)"},
+    {"VUID-vkCmdCopyImageToBuffer-srcImage-parameter", "srcImage must be a valid VkImage handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyImageToBuffer-srcImage-parameter)"},
+    {"VUID-vkCmdCopyImageToBuffer-srcImageLayout-00189", "srcImageLayout must specify the layout of the image subresources of srcImage specified in pRegions at the time this command is executed on a VkDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyImageToBuffer-srcImageLayout-00189)"},
+    {"VUID-vkCmdCopyImageToBuffer-srcImageLayout-00190", "srcImageLayout must be VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL or VK_IMAGE_LAYOUT_GENERAL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyImageToBuffer-srcImageLayout-00190)"},
+    {"VUID-vkCmdCopyImageToBuffer-srcImageLayout-01397", "srcImageLayout must be VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL or VK_IMAGE_LAYOUT_GENERAL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyImageToBuffer-srcImageLayout-01397)"},
+    {"VUID-vkCmdCopyImageToBuffer-srcImageLayout-parameter", "srcImageLayout must be a valid VkImageLayout value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyImageToBuffer-srcImageLayout-parameter)"},
+    {"VUID-vkCmdCopyQueryPoolResults-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support graphics, or compute operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyQueryPoolResults-commandBuffer-cmdpool)"},
+    {"VUID-vkCmdCopyQueryPoolResults-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyQueryPoolResults-commandBuffer-parameter)"},
+    {"VUID-vkCmdCopyQueryPoolResults-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyQueryPoolResults-commandBuffer-recording)"},
+    {"VUID-vkCmdCopyQueryPoolResults-commonparent", "Each of commandBuffer, dstBuffer, and queryPool must have been created, allocated, or retrieved from the same VkDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyQueryPoolResults-commonparent)"},
+    {"VUID-vkCmdCopyQueryPoolResults-dstBuffer-00824", "dstBuffer must have enough storage, from dstOffset, to contain the result of each query, as described here (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyQueryPoolResults-dstBuffer-00824)"},
+    {"VUID-vkCmdCopyQueryPoolResults-dstBuffer-00825", "dstBuffer must have been created with VK_BUFFER_USAGE_TRANSFER_DST_BIT usage flag (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyQueryPoolResults-dstBuffer-00825)"},
+    {"VUID-vkCmdCopyQueryPoolResults-dstBuffer-00826", "If dstBuffer is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyQueryPoolResults-dstBuffer-00826)"},
+    {"VUID-vkCmdCopyQueryPoolResults-dstBuffer-parameter", "dstBuffer must be a valid VkBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyQueryPoolResults-dstBuffer-parameter)"},
+    {"VUID-vkCmdCopyQueryPoolResults-dstOffset-00819", "dstOffset must be less than the size of dstBuffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyQueryPoolResults-dstOffset-00819)"},
+    {"VUID-vkCmdCopyQueryPoolResults-firstQuery-00820", "firstQuery must be less than the number of queries in queryPool (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyQueryPoolResults-firstQuery-00820)"},
+    {"VUID-vkCmdCopyQueryPoolResults-firstQuery-00821", "The sum of firstQuery and queryCount must be less than or equal to the number of queries in queryPool (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyQueryPoolResults-firstQuery-00821)"},
+    {"VUID-vkCmdCopyQueryPoolResults-flags-00822", "If VK_QUERY_RESULT_64_BIT is not set in flags then dstOffset and stride must be multiples of 4 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyQueryPoolResults-flags-00822)"},
+    {"VUID-vkCmdCopyQueryPoolResults-flags-00823", "If VK_QUERY_RESULT_64_BIT is set in flags then dstOffset and stride must be multiples of 8 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyQueryPoolResults-flags-00823)"},
+    {"VUID-vkCmdCopyQueryPoolResults-flags-parameter", "flags must be a valid combination of VkQueryResultFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyQueryPoolResults-flags-parameter)"},
+    {"VUID-vkCmdCopyQueryPoolResults-queryPool-parameter", "queryPool must be a valid VkQueryPool handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyQueryPoolResults-queryPool-parameter)"},
+    {"VUID-vkCmdCopyQueryPoolResults-queryType-00827", "If the queryType used to create queryPool was VK_QUERY_TYPE_TIMESTAMP, flags must not contain VK_QUERY_RESULT_PARTIAL_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyQueryPoolResults-queryType-00827)"},
+    {"VUID-vkCmdCopyQueryPoolResults-queryType-02734", "vkCmdCopyQueryPoolResults must not be called if the queryType used to create queryPool was VK_QUERY_TYPE_PERFORMANCE_QUERY_INTEL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyQueryPoolResults-queryType-02734)"},
+    {"VUID-vkCmdCopyQueryPoolResults-queryType-03232", "If the queryType used to create queryPool was VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR, VkPhysicalDevicePerformanceQueryPropertiesKHR::allowCommandBufferQueryCopies must be VK_TRUE (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyQueryPoolResults-queryType-03232)"},
+    {"VUID-vkCmdCopyQueryPoolResults-queryType-03233", "If the queryType used to create queryPool was VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR, flags must not contain VK_QUERY_RESULT_WITH_AVAILABILITY_BIT, VK_QUERY_RESULT_PARTIAL_BIT or VK_QUERY_RESULT_64_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyQueryPoolResults-queryType-03233)"},
+    {"VUID-vkCmdCopyQueryPoolResults-queryType-03234", "If the queryType used to create queryPool was VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR, the queryPool must have been submitted once for each pass as retrieved via a call to vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyQueryPoolResults-queryType-03234)"},
+    {"VUID-vkCmdCopyQueryPoolResults-renderpass", "This command must only be called outside of a render pass instance (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdCopyQueryPoolResults-renderpass)"},
+    {"VUID-vkCmdDebugMarkerBeginEXT-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support graphics, or compute operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDebugMarkerBeginEXT-commandBuffer-cmdpool)"},
+    {"VUID-vkCmdDebugMarkerBeginEXT-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDebugMarkerBeginEXT-commandBuffer-parameter)"},
+    {"VUID-vkCmdDebugMarkerBeginEXT-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDebugMarkerBeginEXT-commandBuffer-recording)"},
+    {"VUID-vkCmdDebugMarkerBeginEXT-pMarkerInfo-parameter", "pMarkerInfo must be a valid pointer to a valid VkDebugMarkerMarkerInfoEXT structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDebugMarkerBeginEXT-pMarkerInfo-parameter)"},
+    {"VUID-vkCmdDebugMarkerEndEXT-commandBuffer-01239", "There must be an outstanding vkCmdDebugMarkerBeginEXT command prior to the vkCmdDebugMarkerEndEXT on the queue that commandBuffer is submitted to (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDebugMarkerEndEXT-commandBuffer-01239)"},
+    {"VUID-vkCmdDebugMarkerEndEXT-commandBuffer-01240", "If commandBuffer is a secondary command buffer, there must be an outstanding vkCmdDebugMarkerBeginEXT command recorded to commandBuffer that has not previously been ended by a call to vkCmdDebugMarkerEndEXT. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDebugMarkerEndEXT-commandBuffer-01240)"},
+    {"VUID-vkCmdDebugMarkerEndEXT-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support graphics, or compute operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDebugMarkerEndEXT-commandBuffer-cmdpool)"},
+    {"VUID-vkCmdDebugMarkerEndEXT-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDebugMarkerEndEXT-commandBuffer-parameter)"},
+    {"VUID-vkCmdDebugMarkerEndEXT-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDebugMarkerEndEXT-commandBuffer-recording)"},
+    {"VUID-vkCmdDebugMarkerInsertEXT-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support graphics, or compute operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDebugMarkerInsertEXT-commandBuffer-cmdpool)"},
+    {"VUID-vkCmdDebugMarkerInsertEXT-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDebugMarkerInsertEXT-commandBuffer-parameter)"},
+    {"VUID-vkCmdDebugMarkerInsertEXT-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDebugMarkerInsertEXT-commandBuffer-recording)"},
+    {"VUID-vkCmdDebugMarkerInsertEXT-pMarkerInfo-parameter", "pMarkerInfo must be a valid pointer to a valid VkDebugMarkerMarkerInfoEXT structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDebugMarkerInsertEXT-pMarkerInfo-parameter)"},
+    {"VUID-vkCmdDispatch-None-02690", "If a VkImageView is sampled with VK_FILTER_LINEAR as a result of this command, then the image view's format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatch-None-02690)"},
+    {"VUID-vkCmdDispatch-None-02691", "If a VkImageView is accessed using atomic operations as a result of this command, then the image view's format features must contain VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatch-None-02691)"},
+    {"VUID-vkCmdDispatch-None-02692", "If a VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, then the image view's format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatch-None-02692)"},
+    {"VUID-vkCmdDispatch-None-02693", "Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must not have a VkImageViewType of VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatch-None-02693)"},
+    {"VUID-vkCmdDispatch-None-02697", "For each set n that is statically used by the VkPipeline bound to the pipeline bind point used by this command, a descriptor set must have been bound to n at the same pipeline bind point, with a VkPipelineLayout that is compatible for set n, with the VkPipelineLayout used to create the current VkPipeline, as described in Pipeline Layout Compatibility (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatch-None-02697)"},
+    {"VUID-vkCmdDispatch-None-02698", "For each push constant that is statically used by the VkPipeline bound to the pipeline bind point used by this command, a push constant value must have been set for the same pipeline bind point, with a VkPipelineLayout that is compatible for push constants, with the VkPipelineLayout used to create the current VkPipeline, as described in Pipeline Layout Compatibility (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatch-None-02698)"},
+    {"VUID-vkCmdDispatch-None-02699", "Descriptors in each bound descriptor set, specified via vkCmdBindDescriptorSets, must be valid if they are statically used by the VkPipeline bound to the pipeline bind point used by this command (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatch-None-02699)"},
+    {"VUID-vkCmdDispatch-None-02700", "A valid pipeline must be bound to the pipeline bind point used by this command (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatch-None-02700)"},
+    {"VUID-vkCmdDispatch-None-02702", "If the VkPipeline object bound to the pipeline bind point used by this command accesses a VkSampler object that uses unnormalized coordinates, that sampler must not be used to sample from any VkImage with a VkImageView of the type VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, VK_IMAGE_VIEW_TYPE_1D_ARRAY, VK_IMAGE_VIEW_TYPE_2D_ARRAY or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatch-None-02702)"},
+    {"VUID-vkCmdDispatch-None-02703", "If the VkPipeline object bound to the pipeline bind point used by this command accesses a VkSampler object that uses unnormalized coordinates, that sampler must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* instructions with ImplicitLod, Dref or Proj in their name, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatch-None-02703)"},
+    {"VUID-vkCmdDispatch-None-02704", "If the VkPipeline object bound to the pipeline bind point used by this command accesses a VkSampler object that uses unnormalized coordinates, that sampler must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* instructions that includes a LOD bias or any offset values, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatch-None-02704)"},
+    {"VUID-vkCmdDispatch-None-02705", "If the robust buffer access feature is not enabled, and if the VkPipeline object bound to the pipeline bind point used by this command accesses a uniform buffer, it must not access values outside of the range of the buffer as specified in the descriptor set bound to the same pipeline bind point (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatch-None-02705)"},
+    {"VUID-vkCmdDispatch-None-02706", "If the robust buffer access feature is not enabled, and if the VkPipeline object bound to the pipeline bind point used by this command accesses a storage buffer, it must not access values outside of the range of the buffer as specified in the descriptor set bound to the same pipeline bind point (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatch-None-02706)"},
+    {"VUID-vkCmdDispatch-commandBuffer-02701", "If the VkPipeline object bound to the pipeline bind point used by this command requires any dynamic state, that state must have been set for commandBuffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatch-commandBuffer-02701)"},
+    {"VUID-vkCmdDispatch-commandBuffer-02707", "If commandBuffer is an unprotected command buffer, any resource accessed by the VkPipeline object bound to the pipeline bind point used by this command must not be a protected resource (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatch-commandBuffer-02707)"},
+    {"VUID-vkCmdDispatch-commandBuffer-02712", "If commandBuffer is a protected command buffer, any resource written to by the VkPipeline object bound to the pipeline bind point used by this command must not be an unprotected resource (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatch-commandBuffer-02712)"},
+    {"VUID-vkCmdDispatch-commandBuffer-02713", "If commandBuffer is a protected command buffer, pipeline stages other than the framebuffer-space and compute stages in the VkPipeline object bound to the pipeline bind point must not write to any resource (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatch-commandBuffer-02713)"},
+    {"VUID-vkCmdDispatch-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support compute operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatch-commandBuffer-cmdpool)"},
+    {"VUID-vkCmdDispatch-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatch-commandBuffer-parameter)"},
+    {"VUID-vkCmdDispatch-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatch-commandBuffer-recording)"},
+    {"VUID-vkCmdDispatch-filterCubic-02694", "Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubic returned by vkGetPhysicalDeviceImageFormatProperties2 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatch-filterCubic-02694)"},
+    {"VUID-vkCmdDispatch-filterCubicMinmax-02695", "Any VkImageView being sampled with VK_FILTER_CUBIC_EXT with a reduction mode of either VK_SAMPLER_REDUCTION_MODE_MIN_EXT or VK_SAMPLER_REDUCTION_MODE_MAX_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering together with minmax filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubicMinmax returned by vkGetPhysicalDeviceImageFormatProperties2 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatch-filterCubicMinmax-02695)"},
+    {"VUID-vkCmdDispatch-flags-02696", "Any VkImage created with a VkImageCreateInfo::flags containing VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV sampled as a result of this command must only be sampled using a VkSamplerAddressMode of VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatch-flags-02696)"},
+    {"VUID-vkCmdDispatch-groupCountX-00386", "groupCountX must be less than or equal to VkPhysicalDeviceLimits::maxComputeWorkGroupCount[0] (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatch-groupCountX-00386)"},
+    {"VUID-vkCmdDispatch-groupCountY-00387", "groupCountY must be less than or equal to VkPhysicalDeviceLimits::maxComputeWorkGroupCount[1] (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatch-groupCountY-00387)"},
+    {"VUID-vkCmdDispatch-groupCountZ-00388", "groupCountZ must be less than or equal to VkPhysicalDeviceLimits::maxComputeWorkGroupCount[2] (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatch-groupCountZ-00388)"},
+    {"VUID-vkCmdDispatch-renderpass", "This command must only be called outside of a render pass instance (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatch-renderpass)"},
+    {"VUID-vkCmdDispatchBase-None-02690", "If a VkImageView is sampled with VK_FILTER_LINEAR as a result of this command, then the image view's format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchBase-None-02690)"},
+    {"VUID-vkCmdDispatchBase-None-02691", "If a VkImageView is accessed using atomic operations as a result of this command, then the image view's format features must contain VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchBase-None-02691)"},
+    {"VUID-vkCmdDispatchBase-None-02692", "If a VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, then the image view's format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchBase-None-02692)"},
+    {"VUID-vkCmdDispatchBase-None-02693", "Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must not have a VkImageViewType of VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchBase-None-02693)"},
+    {"VUID-vkCmdDispatchBase-None-02697", "For each set n that is statically used by the VkPipeline bound to the pipeline bind point used by this command, a descriptor set must have been bound to n at the same pipeline bind point, with a VkPipelineLayout that is compatible for set n, with the VkPipelineLayout used to create the current VkPipeline, as described in Pipeline Layout Compatibility (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchBase-None-02697)"},
+    {"VUID-vkCmdDispatchBase-None-02698", "For each push constant that is statically used by the VkPipeline bound to the pipeline bind point used by this command, a push constant value must have been set for the same pipeline bind point, with a VkPipelineLayout that is compatible for push constants, with the VkPipelineLayout used to create the current VkPipeline, as described in Pipeline Layout Compatibility (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchBase-None-02698)"},
+    {"VUID-vkCmdDispatchBase-None-02699", "Descriptors in each bound descriptor set, specified via vkCmdBindDescriptorSets, must be valid if they are statically used by the VkPipeline bound to the pipeline bind point used by this command (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchBase-None-02699)"},
+    {"VUID-vkCmdDispatchBase-None-02700", "A valid pipeline must be bound to the pipeline bind point used by this command (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchBase-None-02700)"},
+    {"VUID-vkCmdDispatchBase-None-02702", "If the VkPipeline object bound to the pipeline bind point used by this command accesses a VkSampler object that uses unnormalized coordinates, that sampler must not be used to sample from any VkImage with a VkImageView of the type VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, VK_IMAGE_VIEW_TYPE_1D_ARRAY, VK_IMAGE_VIEW_TYPE_2D_ARRAY or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchBase-None-02702)"},
+    {"VUID-vkCmdDispatchBase-None-02703", "If the VkPipeline object bound to the pipeline bind point used by this command accesses a VkSampler object that uses unnormalized coordinates, that sampler must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* instructions with ImplicitLod, Dref or Proj in their name, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchBase-None-02703)"},
+    {"VUID-vkCmdDispatchBase-None-02704", "If the VkPipeline object bound to the pipeline bind point used by this command accesses a VkSampler object that uses unnormalized coordinates, that sampler must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* instructions that includes a LOD bias or any offset values, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchBase-None-02704)"},
+    {"VUID-vkCmdDispatchBase-None-02705", "If the robust buffer access feature is not enabled, and if the VkPipeline object bound to the pipeline bind point used by this command accesses a uniform buffer, it must not access values outside of the range of the buffer as specified in the descriptor set bound to the same pipeline bind point (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchBase-None-02705)"},
+    {"VUID-vkCmdDispatchBase-None-02706", "If the robust buffer access feature is not enabled, and if the VkPipeline object bound to the pipeline bind point used by this command accesses a storage buffer, it must not access values outside of the range of the buffer as specified in the descriptor set bound to the same pipeline bind point (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchBase-None-02706)"},
+    {"VUID-vkCmdDispatchBase-baseGroupX-00421", "baseGroupX must be less than VkPhysicalDeviceLimits::maxComputeWorkGroupCount[0] (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchBase-baseGroupX-00421)"},
+    {"VUID-vkCmdDispatchBase-baseGroupX-00422", "baseGroupX must be less than VkPhysicalDeviceLimits::maxComputeWorkGroupCount[1] (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchBase-baseGroupX-00422)"},
+    {"VUID-vkCmdDispatchBase-baseGroupX-00427", "If any of baseGroupX, baseGroupY, or baseGroupZ are not zero, then the bound compute pipeline must have been created with the VK_PIPELINE_CREATE_DISPATCH_BASE flag. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchBase-baseGroupX-00427)"},
+    {"VUID-vkCmdDispatchBase-baseGroupZ-00423", "baseGroupZ must be less than VkPhysicalDeviceLimits::maxComputeWorkGroupCount[2] (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchBase-baseGroupZ-00423)"},
+    {"VUID-vkCmdDispatchBase-commandBuffer-02701", "If the VkPipeline object bound to the pipeline bind point used by this command requires any dynamic state, that state must have been set for commandBuffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchBase-commandBuffer-02701)"},
+    {"VUID-vkCmdDispatchBase-commandBuffer-02707", "If commandBuffer is an unprotected command buffer, any resource accessed by the VkPipeline object bound to the pipeline bind point used by this command must not be a protected resource (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchBase-commandBuffer-02707)"},
+    {"VUID-vkCmdDispatchBase-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support compute operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchBase-commandBuffer-cmdpool)"},
+    {"VUID-vkCmdDispatchBase-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchBase-commandBuffer-parameter)"},
+    {"VUID-vkCmdDispatchBase-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchBase-commandBuffer-recording)"},
+    {"VUID-vkCmdDispatchBase-filterCubic-02694", "Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubic returned by vkGetPhysicalDeviceImageFormatProperties2 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchBase-filterCubic-02694)"},
+    {"VUID-vkCmdDispatchBase-filterCubicMinmax-02695", "Any VkImageView being sampled with VK_FILTER_CUBIC_EXT with a reduction mode of either VK_SAMPLER_REDUCTION_MODE_MIN_EXT or VK_SAMPLER_REDUCTION_MODE_MAX_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering together with minmax filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubicMinmax returned by vkGetPhysicalDeviceImageFormatProperties2 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchBase-filterCubicMinmax-02695)"},
+    {"VUID-vkCmdDispatchBase-flags-02696", "Any VkImage created with a VkImageCreateInfo::flags containing VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV sampled as a result of this command must only be sampled using a VkSamplerAddressMode of VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchBase-flags-02696)"},
+    {"VUID-vkCmdDispatchBase-groupCountX-00424", "groupCountX must be less than or equal to VkPhysicalDeviceLimits::maxComputeWorkGroupCount[0] minus baseGroupX (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchBase-groupCountX-00424)"},
+    {"VUID-vkCmdDispatchBase-groupCountY-00425", "groupCountY must be less than or equal to VkPhysicalDeviceLimits::maxComputeWorkGroupCount[1] minus baseGroupY (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchBase-groupCountY-00425)"},
+    {"VUID-vkCmdDispatchBase-groupCountZ-00426", "groupCountZ must be less than or equal to VkPhysicalDeviceLimits::maxComputeWorkGroupCount[2] minus baseGroupZ (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchBase-groupCountZ-00426)"},
+    {"VUID-vkCmdDispatchBase-renderpass", "This command must only be called outside of a render pass instance (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchBase-renderpass)"},
+    {"VUID-vkCmdDispatchIndirect-None-02690", "If a VkImageView is sampled with VK_FILTER_LINEAR as a result of this command, then the image view's format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchIndirect-None-02690)"},
+    {"VUID-vkCmdDispatchIndirect-None-02691", "If a VkImageView is accessed using atomic operations as a result of this command, then the image view's format features must contain VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchIndirect-None-02691)"},
+    {"VUID-vkCmdDispatchIndirect-None-02692", "If a VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, then the image view's format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchIndirect-None-02692)"},
+    {"VUID-vkCmdDispatchIndirect-None-02693", "Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must not have a VkImageViewType of VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchIndirect-None-02693)"},
+    {"VUID-vkCmdDispatchIndirect-None-02697", "For each set n that is statically used by the VkPipeline bound to the pipeline bind point used by this command, a descriptor set must have been bound to n at the same pipeline bind point, with a VkPipelineLayout that is compatible for set n, with the VkPipelineLayout used to create the current VkPipeline, as described in Pipeline Layout Compatibility (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchIndirect-None-02697)"},
+    {"VUID-vkCmdDispatchIndirect-None-02698", "For each push constant that is statically used by the VkPipeline bound to the pipeline bind point used by this command, a push constant value must have been set for the same pipeline bind point, with a VkPipelineLayout that is compatible for push constants, with the VkPipelineLayout used to create the current VkPipeline, as described in Pipeline Layout Compatibility (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchIndirect-None-02698)"},
+    {"VUID-vkCmdDispatchIndirect-None-02699", "Descriptors in each bound descriptor set, specified via vkCmdBindDescriptorSets, must be valid if they are statically used by the VkPipeline bound to the pipeline bind point used by this command (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchIndirect-None-02699)"},
+    {"VUID-vkCmdDispatchIndirect-None-02700", "A valid pipeline must be bound to the pipeline bind point used by this command (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchIndirect-None-02700)"},
+    {"VUID-vkCmdDispatchIndirect-None-02702", "If the VkPipeline object bound to the pipeline bind point used by this command accesses a VkSampler object that uses unnormalized coordinates, that sampler must not be used to sample from any VkImage with a VkImageView of the type VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, VK_IMAGE_VIEW_TYPE_1D_ARRAY, VK_IMAGE_VIEW_TYPE_2D_ARRAY or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchIndirect-None-02702)"},
+    {"VUID-vkCmdDispatchIndirect-None-02703", "If the VkPipeline object bound to the pipeline bind point used by this command accesses a VkSampler object that uses unnormalized coordinates, that sampler must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* instructions with ImplicitLod, Dref or Proj in their name, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchIndirect-None-02703)"},
+    {"VUID-vkCmdDispatchIndirect-None-02704", "If the VkPipeline object bound to the pipeline bind point used by this command accesses a VkSampler object that uses unnormalized coordinates, that sampler must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* instructions that includes a LOD bias or any offset values, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchIndirect-None-02704)"},
+    {"VUID-vkCmdDispatchIndirect-None-02705", "If the robust buffer access feature is not enabled, and if the VkPipeline object bound to the pipeline bind point used by this command accesses a uniform buffer, it must not access values outside of the range of the buffer as specified in the descriptor set bound to the same pipeline bind point (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchIndirect-None-02705)"},
+    {"VUID-vkCmdDispatchIndirect-None-02706", "If the robust buffer access feature is not enabled, and if the VkPipeline object bound to the pipeline bind point used by this command accesses a storage buffer, it must not access values outside of the range of the buffer as specified in the descriptor set bound to the same pipeline bind point (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchIndirect-None-02706)"},
+    {"VUID-vkCmdDispatchIndirect-buffer-02708", "If buffer is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchIndirect-buffer-02708)"},
+    {"VUID-vkCmdDispatchIndirect-buffer-02709", "buffer must have been created with the VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT bit set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchIndirect-buffer-02709)"},
+    {"VUID-vkCmdDispatchIndirect-buffer-parameter", "buffer must be a valid VkBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchIndirect-buffer-parameter)"},
+    {"VUID-vkCmdDispatchIndirect-commandBuffer-02701", "If the VkPipeline object bound to the pipeline bind point used by this command requires any dynamic state, that state must have been set for commandBuffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchIndirect-commandBuffer-02701)"},
+    {"VUID-vkCmdDispatchIndirect-commandBuffer-02707", "If commandBuffer is an unprotected command buffer, any resource accessed by the VkPipeline object bound to the pipeline bind point used by this command must not be a protected resource (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchIndirect-commandBuffer-02707)"},
+    {"VUID-vkCmdDispatchIndirect-commandBuffer-02711", "commandBuffer must not be a protected command buffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchIndirect-commandBuffer-02711)"},
+    {"VUID-vkCmdDispatchIndirect-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support compute operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchIndirect-commandBuffer-cmdpool)"},
+    {"VUID-vkCmdDispatchIndirect-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchIndirect-commandBuffer-parameter)"},
+    {"VUID-vkCmdDispatchIndirect-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchIndirect-commandBuffer-recording)"},
+    {"VUID-vkCmdDispatchIndirect-commonparent", "Both of buffer, and commandBuffer must have been created, allocated, or retrieved from the same VkDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchIndirect-commonparent)"},
+    {"VUID-vkCmdDispatchIndirect-filterCubic-02694", "Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubic returned by vkGetPhysicalDeviceImageFormatProperties2 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchIndirect-filterCubic-02694)"},
+    {"VUID-vkCmdDispatchIndirect-filterCubicMinmax-02695", "Any VkImageView being sampled with VK_FILTER_CUBIC_EXT with a reduction mode of either VK_SAMPLER_REDUCTION_MODE_MIN_EXT or VK_SAMPLER_REDUCTION_MODE_MAX_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering together with minmax filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubicMinmax returned by vkGetPhysicalDeviceImageFormatProperties2 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchIndirect-filterCubicMinmax-02695)"},
+    {"VUID-vkCmdDispatchIndirect-flags-02696", "Any VkImage created with a VkImageCreateInfo::flags containing VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV sampled as a result of this command must only be sampled using a VkSamplerAddressMode of VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchIndirect-flags-02696)"},
+    {"VUID-vkCmdDispatchIndirect-offset-00407", "The sum of offset and the size of VkDispatchIndirectCommand must be less than or equal to the size of buffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchIndirect-offset-00407)"},
+    {"VUID-vkCmdDispatchIndirect-offset-02710", "offset must be a multiple of 4 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchIndirect-offset-02710)"},
+    {"VUID-vkCmdDispatchIndirect-renderpass", "This command must only be called outside of a render pass instance (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDispatchIndirect-renderpass)"},
+    {"VUID-vkCmdDraw-None-02686", "Every input attachment used by the current subpass must be bound to the pipeline via a descriptor set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDraw-None-02686)"},
+    {"VUID-vkCmdDraw-None-02687", "Image subresources used as attachments in the current render pass must not be accessed in any way other than as an attachment by this command. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDraw-None-02687)"},
+    {"VUID-vkCmdDraw-None-02690", "If a VkImageView is sampled with VK_FILTER_LINEAR as a result of this command, then the image view's format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDraw-None-02690)"},
+    {"VUID-vkCmdDraw-None-02691", "If a VkImageView is accessed using atomic operations as a result of this command, then the image view's format features must contain VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDraw-None-02691)"},
+    {"VUID-vkCmdDraw-None-02692", "If a VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, then the image view's format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDraw-None-02692)"},
+    {"VUID-vkCmdDraw-None-02693", "Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must not have a VkImageViewType of VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDraw-None-02693)"},
+    {"VUID-vkCmdDraw-None-02697", "For each set n that is statically used by the VkPipeline bound to the pipeline bind point used by this command, a descriptor set must have been bound to n at the same pipeline bind point, with a VkPipelineLayout that is compatible for set n, with the VkPipelineLayout used to create the current VkPipeline, as described in Pipeline Layout Compatibility (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDraw-None-02697)"},
+    {"VUID-vkCmdDraw-None-02698", "For each push constant that is statically used by the VkPipeline bound to the pipeline bind point used by this command, a push constant value must have been set for the same pipeline bind point, with a VkPipelineLayout that is compatible for push constants, with the VkPipelineLayout used to create the current VkPipeline, as described in Pipeline Layout Compatibility (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDraw-None-02698)"},
+    {"VUID-vkCmdDraw-None-02699", "Descriptors in each bound descriptor set, specified via vkCmdBindDescriptorSets, must be valid if they are statically used by the VkPipeline bound to the pipeline bind point used by this command (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDraw-None-02699)"},
+    {"VUID-vkCmdDraw-None-02700", "A valid pipeline must be bound to the pipeline bind point used by this command (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDraw-None-02700)"},
+    {"VUID-vkCmdDraw-None-02702", "If the VkPipeline object bound to the pipeline bind point used by this command accesses a VkSampler object that uses unnormalized coordinates, that sampler must not be used to sample from any VkImage with a VkImageView of the type VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, VK_IMAGE_VIEW_TYPE_1D_ARRAY, VK_IMAGE_VIEW_TYPE_2D_ARRAY or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDraw-None-02702)"},
+    {"VUID-vkCmdDraw-None-02703", "If the VkPipeline object bound to the pipeline bind point used by this command accesses a VkSampler object that uses unnormalized coordinates, that sampler must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* instructions with ImplicitLod, Dref or Proj in their name, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDraw-None-02703)"},
+    {"VUID-vkCmdDraw-None-02704", "If the VkPipeline object bound to the pipeline bind point used by this command accesses a VkSampler object that uses unnormalized coordinates, that sampler must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* instructions that includes a LOD bias or any offset values, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDraw-None-02704)"},
+    {"VUID-vkCmdDraw-None-02705", "If the robust buffer access feature is not enabled, and if the VkPipeline object bound to the pipeline bind point used by this command accesses a uniform buffer, it must not access values outside of the range of the buffer as specified in the descriptor set bound to the same pipeline bind point (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDraw-None-02705)"},
+    {"VUID-vkCmdDraw-None-02706", "If the robust buffer access feature is not enabled, and if the VkPipeline object bound to the pipeline bind point used by this command accesses a storage buffer, it must not access values outside of the range of the buffer as specified in the descriptor set bound to the same pipeline bind point (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDraw-None-02706)"},
+    {"VUID-vkCmdDraw-None-02720", "All vertex input bindings accessed via vertex input variables declared in the vertex shader entry point's interface must have valid buffers bound (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDraw-None-02720)"},
+    {"VUID-vkCmdDraw-None-02721", "For a given vertex buffer binding, any attribute data fetched must be entirely contained within the corresponding vertex buffer binding, as described in Vertex Input Description (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDraw-None-02721)"},
+    {"VUID-vkCmdDraw-commandBuffer-02701", "If the VkPipeline object bound to the pipeline bind point used by this command requires any dynamic state, that state must have been set for commandBuffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDraw-commandBuffer-02701)"},
+    {"VUID-vkCmdDraw-commandBuffer-02707", "If commandBuffer is an unprotected command buffer, any resource accessed by the VkPipeline object bound to the pipeline bind point used by this command must not be a protected resource (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDraw-commandBuffer-02707)"},
+    {"VUID-vkCmdDraw-commandBuffer-02712", "If commandBuffer is a protected command buffer, any resource written to by the VkPipeline object bound to the pipeline bind point used by this command must not be an unprotected resource (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDraw-commandBuffer-02712)"},
+    {"VUID-vkCmdDraw-commandBuffer-02713", "If commandBuffer is a protected command buffer, pipeline stages other than the framebuffer-space and compute stages in the VkPipeline object bound to the pipeline bind point must not write to any resource (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDraw-commandBuffer-02713)"},
+    {"VUID-vkCmdDraw-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support graphics operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDraw-commandBuffer-cmdpool)"},
+    {"VUID-vkCmdDraw-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDraw-commandBuffer-parameter)"},
+    {"VUID-vkCmdDraw-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDraw-commandBuffer-recording)"},
+    {"VUID-vkCmdDraw-filterCubic-02694", "Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubic returned by vkGetPhysicalDeviceImageFormatProperties2 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDraw-filterCubic-02694)"},
+    {"VUID-vkCmdDraw-filterCubicMinmax-02695", "Any VkImageView being sampled with VK_FILTER_CUBIC_EXT with a reduction mode of either VK_SAMPLER_REDUCTION_MODE_MIN_EXT or VK_SAMPLER_REDUCTION_MODE_MAX_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering together with minmax filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubicMinmax returned by vkGetPhysicalDeviceImageFormatProperties2 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDraw-filterCubicMinmax-02695)"},
+    {"VUID-vkCmdDraw-flags-02696", "Any VkImage created with a VkImageCreateInfo::flags containing VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV sampled as a result of this command must only be sampled using a VkSamplerAddressMode of VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDraw-flags-02696)"},
+    {"VUID-vkCmdDraw-maxMultiviewInstanceIndex-02688", "If the draw is recorded in a render pass instance with multiview enabled, the maximum instance index must be less than or equal to VkPhysicalDeviceMultiviewProperties::maxMultiviewInstanceIndex. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDraw-maxMultiviewInstanceIndex-02688)"},
+    {"VUID-vkCmdDraw-renderPass-02684", "The current render pass must be compatible with the renderPass member of the VkGraphicsPipelineCreateInfo structure specified when creating the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDraw-renderPass-02684)"},
+    {"VUID-vkCmdDraw-renderpass", "This command must only be called inside of a render pass instance (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDraw-renderpass)"},
+    {"VUID-vkCmdDraw-sampleLocationsEnable-02689", "If the bound graphics pipeline was created with VkPipelineSampleLocationsStateCreateInfoEXT::sampleLocationsEnable set to VK_TRUE and the current subpass has a depth/stencil attachment, then that attachment must have been created with the VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT bit set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDraw-sampleLocationsEnable-02689)"},
+    {"VUID-vkCmdDraw-subpass-02685", "The subpass index of the current render pass must be equal to the subpass member of the VkGraphicsPipelineCreateInfo structure specified when creating the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDraw-subpass-02685)"},
+    {"VUID-vkCmdDrawIndexed-None-02686", "Every input attachment used by the current subpass must be bound to the pipeline via a descriptor set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexed-None-02686)"},
+    {"VUID-vkCmdDrawIndexed-None-02687", "Image subresources used as attachments in the current render pass must not be accessed in any way other than as an attachment by this command. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexed-None-02687)"},
+    {"VUID-vkCmdDrawIndexed-None-02690", "If a VkImageView is sampled with VK_FILTER_LINEAR as a result of this command, then the image view's format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexed-None-02690)"},
+    {"VUID-vkCmdDrawIndexed-None-02691", "If a VkImageView is accessed using atomic operations as a result of this command, then the image view's format features must contain VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexed-None-02691)"},
+    {"VUID-vkCmdDrawIndexed-None-02692", "If a VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, then the image view's format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexed-None-02692)"},
+    {"VUID-vkCmdDrawIndexed-None-02693", "Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must not have a VkImageViewType of VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexed-None-02693)"},
+    {"VUID-vkCmdDrawIndexed-None-02697", "For each set n that is statically used by the VkPipeline bound to the pipeline bind point used by this command, a descriptor set must have been bound to n at the same pipeline bind point, with a VkPipelineLayout that is compatible for set n, with the VkPipelineLayout used to create the current VkPipeline, as described in Pipeline Layout Compatibility (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexed-None-02697)"},
+    {"VUID-vkCmdDrawIndexed-None-02698", "For each push constant that is statically used by the VkPipeline bound to the pipeline bind point used by this command, a push constant value must have been set for the same pipeline bind point, with a VkPipelineLayout that is compatible for push constants, with the VkPipelineLayout used to create the current VkPipeline, as described in Pipeline Layout Compatibility (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexed-None-02698)"},
+    {"VUID-vkCmdDrawIndexed-None-02699", "Descriptors in each bound descriptor set, specified via vkCmdBindDescriptorSets, must be valid if they are statically used by the VkPipeline bound to the pipeline bind point used by this command (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexed-None-02699)"},
+    {"VUID-vkCmdDrawIndexed-None-02700", "A valid pipeline must be bound to the pipeline bind point used by this command (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexed-None-02700)"},
+    {"VUID-vkCmdDrawIndexed-None-02702", "If the VkPipeline object bound to the pipeline bind point used by this command accesses a VkSampler object that uses unnormalized coordinates, that sampler must not be used to sample from any VkImage with a VkImageView of the type VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, VK_IMAGE_VIEW_TYPE_1D_ARRAY, VK_IMAGE_VIEW_TYPE_2D_ARRAY or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexed-None-02702)"},
+    {"VUID-vkCmdDrawIndexed-None-02703", "If the VkPipeline object bound to the pipeline bind point used by this command accesses a VkSampler object that uses unnormalized coordinates, that sampler must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* instructions with ImplicitLod, Dref or Proj in their name, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexed-None-02703)"},
+    {"VUID-vkCmdDrawIndexed-None-02704", "If the VkPipeline object bound to the pipeline bind point used by this command accesses a VkSampler object that uses unnormalized coordinates, that sampler must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* instructions that includes a LOD bias or any offset values, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexed-None-02704)"},
+    {"VUID-vkCmdDrawIndexed-None-02705", "If the robust buffer access feature is not enabled, and if the VkPipeline object bound to the pipeline bind point used by this command accesses a uniform buffer, it must not access values outside of the range of the buffer as specified in the descriptor set bound to the same pipeline bind point (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexed-None-02705)"},
+    {"VUID-vkCmdDrawIndexed-None-02706", "If the robust buffer access feature is not enabled, and if the VkPipeline object bound to the pipeline bind point used by this command accesses a storage buffer, it must not access values outside of the range of the buffer as specified in the descriptor set bound to the same pipeline bind point (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexed-None-02706)"},
+    {"VUID-vkCmdDrawIndexed-None-02720", "All vertex input bindings accessed via vertex input variables declared in the vertex shader entry point's interface must have valid buffers bound (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexed-None-02720)"},
+    {"VUID-vkCmdDrawIndexed-None-02721", "For a given vertex buffer binding, any attribute data fetched must be entirely contained within the corresponding vertex buffer binding, as described in Vertex Input Description (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexed-None-02721)"},
+    {"VUID-vkCmdDrawIndexed-commandBuffer-02701", "If the VkPipeline object bound to the pipeline bind point used by this command requires any dynamic state, that state must have been set for commandBuffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexed-commandBuffer-02701)"},
+    {"VUID-vkCmdDrawIndexed-commandBuffer-02707", "If commandBuffer is an unprotected command buffer, any resource accessed by the VkPipeline object bound to the pipeline bind point used by this command must not be a protected resource (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexed-commandBuffer-02707)"},
+    {"VUID-vkCmdDrawIndexed-commandBuffer-02712", "If commandBuffer is a protected command buffer, any resource written to by the VkPipeline object bound to the pipeline bind point used by this command must not be an unprotected resource (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexed-commandBuffer-02712)"},
+    {"VUID-vkCmdDrawIndexed-commandBuffer-02713", "If commandBuffer is a protected command buffer, pipeline stages other than the framebuffer-space and compute stages in the VkPipeline object bound to the pipeline bind point must not write to any resource (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexed-commandBuffer-02713)"},
+    {"VUID-vkCmdDrawIndexed-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support graphics operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexed-commandBuffer-cmdpool)"},
+    {"VUID-vkCmdDrawIndexed-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexed-commandBuffer-parameter)"},
+    {"VUID-vkCmdDrawIndexed-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexed-commandBuffer-recording)"},
+    {"VUID-vkCmdDrawIndexed-filterCubic-02694", "Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubic returned by vkGetPhysicalDeviceImageFormatProperties2 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexed-filterCubic-02694)"},
+    {"VUID-vkCmdDrawIndexed-filterCubicMinmax-02695", "Any VkImageView being sampled with VK_FILTER_CUBIC_EXT with a reduction mode of either VK_SAMPLER_REDUCTION_MODE_MIN_EXT or VK_SAMPLER_REDUCTION_MODE_MAX_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering together with minmax filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubicMinmax returned by vkGetPhysicalDeviceImageFormatProperties2 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexed-filterCubicMinmax-02695)"},
+    {"VUID-vkCmdDrawIndexed-flags-02696", "Any VkImage created with a VkImageCreateInfo::flags containing VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV sampled as a result of this command must only be sampled using a VkSamplerAddressMode of VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexed-flags-02696)"},
+    {"VUID-vkCmdDrawIndexed-indexSize-00463", "(indexSize * (firstIndex + indexCount) + offset) must be less than or equal to the size of the bound index buffer, with indexSize being based on the type specified by indexType, where the index buffer, indexType, and offset are specified via vkCmdBindIndexBuffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexed-indexSize-00463)"},
+    {"VUID-vkCmdDrawIndexed-maxMultiviewInstanceIndex-02688", "If the draw is recorded in a render pass instance with multiview enabled, the maximum instance index must be less than or equal to VkPhysicalDeviceMultiviewProperties::maxMultiviewInstanceIndex. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexed-maxMultiviewInstanceIndex-02688)"},
+    {"VUID-vkCmdDrawIndexed-renderPass-02684", "The current render pass must be compatible with the renderPass member of the VkGraphicsPipelineCreateInfo structure specified when creating the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexed-renderPass-02684)"},
+    {"VUID-vkCmdDrawIndexed-renderpass", "This command must only be called inside of a render pass instance (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexed-renderpass)"},
+    {"VUID-vkCmdDrawIndexed-sampleLocationsEnable-02689", "If the bound graphics pipeline was created with VkPipelineSampleLocationsStateCreateInfoEXT::sampleLocationsEnable set to VK_TRUE and the current subpass has a depth/stencil attachment, then that attachment must have been created with the VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT bit set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexed-sampleLocationsEnable-02689)"},
+    {"VUID-vkCmdDrawIndexed-subpass-02685", "The subpass index of the current render pass must be equal to the subpass member of the VkGraphicsPipelineCreateInfo structure specified when creating the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexed-subpass-02685)"},
+    {"VUID-vkCmdDrawIndexedIndirect-None-02686", "Every input attachment used by the current subpass must be bound to the pipeline via a descriptor set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirect-None-02686)"},
+    {"VUID-vkCmdDrawIndexedIndirect-None-02687", "Image subresources used as attachments in the current render pass must not be accessed in any way other than as an attachment by this command. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirect-None-02687)"},
+    {"VUID-vkCmdDrawIndexedIndirect-None-02690", "If a VkImageView is sampled with VK_FILTER_LINEAR as a result of this command, then the image view's format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirect-None-02690)"},
+    {"VUID-vkCmdDrawIndexedIndirect-None-02691", "If a VkImageView is accessed using atomic operations as a result of this command, then the image view's format features must contain VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirect-None-02691)"},
+    {"VUID-vkCmdDrawIndexedIndirect-None-02692", "If a VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, then the image view's format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirect-None-02692)"},
+    {"VUID-vkCmdDrawIndexedIndirect-None-02693", "Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must not have a VkImageViewType of VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirect-None-02693)"},
+    {"VUID-vkCmdDrawIndexedIndirect-None-02697", "For each set n that is statically used by the VkPipeline bound to the pipeline bind point used by this command, a descriptor set must have been bound to n at the same pipeline bind point, with a VkPipelineLayout that is compatible for set n, with the VkPipelineLayout used to create the current VkPipeline, as described in Pipeline Layout Compatibility (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirect-None-02697)"},
+    {"VUID-vkCmdDrawIndexedIndirect-None-02698", "For each push constant that is statically used by the VkPipeline bound to the pipeline bind point used by this command, a push constant value must have been set for the same pipeline bind point, with a VkPipelineLayout that is compatible for push constants, with the VkPipelineLayout used to create the current VkPipeline, as described in Pipeline Layout Compatibility (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirect-None-02698)"},
+    {"VUID-vkCmdDrawIndexedIndirect-None-02699", "Descriptors in each bound descriptor set, specified via vkCmdBindDescriptorSets, must be valid if they are statically used by the VkPipeline bound to the pipeline bind point used by this command (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirect-None-02699)"},
+    {"VUID-vkCmdDrawIndexedIndirect-None-02700", "A valid pipeline must be bound to the pipeline bind point used by this command (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirect-None-02700)"},
+    {"VUID-vkCmdDrawIndexedIndirect-None-02702", "If the VkPipeline object bound to the pipeline bind point used by this command accesses a VkSampler object that uses unnormalized coordinates, that sampler must not be used to sample from any VkImage with a VkImageView of the type VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, VK_IMAGE_VIEW_TYPE_1D_ARRAY, VK_IMAGE_VIEW_TYPE_2D_ARRAY or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirect-None-02702)"},
+    {"VUID-vkCmdDrawIndexedIndirect-None-02703", "If the VkPipeline object bound to the pipeline bind point used by this command accesses a VkSampler object that uses unnormalized coordinates, that sampler must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* instructions with ImplicitLod, Dref or Proj in their name, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirect-None-02703)"},
+    {"VUID-vkCmdDrawIndexedIndirect-None-02704", "If the VkPipeline object bound to the pipeline bind point used by this command accesses a VkSampler object that uses unnormalized coordinates, that sampler must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* instructions that includes a LOD bias or any offset values, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirect-None-02704)"},
+    {"VUID-vkCmdDrawIndexedIndirect-None-02705", "If the robust buffer access feature is not enabled, and if the VkPipeline object bound to the pipeline bind point used by this command accesses a uniform buffer, it must not access values outside of the range of the buffer as specified in the descriptor set bound to the same pipeline bind point (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirect-None-02705)"},
+    {"VUID-vkCmdDrawIndexedIndirect-None-02706", "If the robust buffer access feature is not enabled, and if the VkPipeline object bound to the pipeline bind point used by this command accesses a storage buffer, it must not access values outside of the range of the buffer as specified in the descriptor set bound to the same pipeline bind point (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirect-None-02706)"},
+    {"VUID-vkCmdDrawIndexedIndirect-None-02720", "All vertex input bindings accessed via vertex input variables declared in the vertex shader entry point's interface must have valid buffers bound (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirect-None-02720)"},
+    {"VUID-vkCmdDrawIndexedIndirect-None-02721", "For a given vertex buffer binding, any attribute data fetched must be entirely contained within the corresponding vertex buffer binding, as described in Vertex Input Description (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirect-None-02721)"},
+    {"VUID-vkCmdDrawIndexedIndirect-buffer-02708", "If buffer is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirect-buffer-02708)"},
+    {"VUID-vkCmdDrawIndexedIndirect-buffer-02709", "buffer must have been created with the VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT bit set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirect-buffer-02709)"},
+    {"VUID-vkCmdDrawIndexedIndirect-buffer-parameter", "buffer must be a valid VkBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirect-buffer-parameter)"},
+    {"VUID-vkCmdDrawIndexedIndirect-commandBuffer-02701", "If the VkPipeline object bound to the pipeline bind point used by this command requires any dynamic state, that state must have been set for commandBuffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirect-commandBuffer-02701)"},
+    {"VUID-vkCmdDrawIndexedIndirect-commandBuffer-02707", "If commandBuffer is an unprotected command buffer, any resource accessed by the VkPipeline object bound to the pipeline bind point used by this command must not be a protected resource (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirect-commandBuffer-02707)"},
+    {"VUID-vkCmdDrawIndexedIndirect-commandBuffer-02711", "commandBuffer must not be a protected command buffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirect-commandBuffer-02711)"},
+    {"VUID-vkCmdDrawIndexedIndirect-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support graphics operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirect-commandBuffer-cmdpool)"},
+    {"VUID-vkCmdDrawIndexedIndirect-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirect-commandBuffer-parameter)"},
+    {"VUID-vkCmdDrawIndexedIndirect-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirect-commandBuffer-recording)"},
+    {"VUID-vkCmdDrawIndexedIndirect-commonparent", "Both of buffer, and commandBuffer must have been created, allocated, or retrieved from the same VkDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirect-commonparent)"},
+    {"VUID-vkCmdDrawIndexedIndirect-drawCount-00528", "If drawCount is greater than 1, stride must be a multiple of 4 and must be greater than or equal to sizeof(VkDrawIndexedIndirectCommand) (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirect-drawCount-00528)"},
+    {"VUID-vkCmdDrawIndexedIndirect-drawCount-00539", "If drawCount is equal to 1, (offset + sizeof(VkDrawIndexedIndirectCommand)) must be less than or equal to the size of buffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirect-drawCount-00539)"},
+    {"VUID-vkCmdDrawIndexedIndirect-drawCount-00540", "If drawCount is greater than 1, (stride {times} (drawCount - 1) + offset + sizeof(VkDrawIndexedIndirectCommand)) must be less than or equal to the size of buffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirect-drawCount-00540)"},
+    {"VUID-vkCmdDrawIndexedIndirect-drawCount-02718", "If the multi-draw indirect feature is not enabled, drawCount must be 0 or 1 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirect-drawCount-02718)"},
+    {"VUID-vkCmdDrawIndexedIndirect-drawCount-02719", "drawCount must be less than or equal to VkPhysicalDeviceLimits::maxDrawIndirectCount (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirect-drawCount-02719)"},
+    {"VUID-vkCmdDrawIndexedIndirect-filterCubic-02694", "Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubic returned by vkGetPhysicalDeviceImageFormatProperties2 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirect-filterCubic-02694)"},
+    {"VUID-vkCmdDrawIndexedIndirect-filterCubicMinmax-02695", "Any VkImageView being sampled with VK_FILTER_CUBIC_EXT with a reduction mode of either VK_SAMPLER_REDUCTION_MODE_MIN_EXT or VK_SAMPLER_REDUCTION_MODE_MAX_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering together with minmax filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubicMinmax returned by vkGetPhysicalDeviceImageFormatProperties2 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirect-filterCubicMinmax-02695)"},
+    {"VUID-vkCmdDrawIndexedIndirect-firstInstance-00530", "If the drawIndirectFirstInstance feature is not enabled, all the firstInstance members of the VkDrawIndexedIndirectCommand structures accessed by this command must be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirect-firstInstance-00530)"},
+    {"VUID-vkCmdDrawIndexedIndirect-flags-02696", "Any VkImage created with a VkImageCreateInfo::flags containing VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV sampled as a result of this command must only be sampled using a VkSamplerAddressMode of VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirect-flags-02696)"},
+    {"VUID-vkCmdDrawIndexedIndirect-maxMultiviewInstanceIndex-02688", "If the draw is recorded in a render pass instance with multiview enabled, the maximum instance index must be less than or equal to VkPhysicalDeviceMultiviewProperties::maxMultiviewInstanceIndex. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirect-maxMultiviewInstanceIndex-02688)"},
+    {"VUID-vkCmdDrawIndexedIndirect-offset-02710", "offset must be a multiple of 4 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirect-offset-02710)"},
+    {"VUID-vkCmdDrawIndexedIndirect-renderPass-02684", "The current render pass must be compatible with the renderPass member of the VkGraphicsPipelineCreateInfo structure specified when creating the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirect-renderPass-02684)"},
+    {"VUID-vkCmdDrawIndexedIndirect-renderpass", "This command must only be called inside of a render pass instance (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirect-renderpass)"},
+    {"VUID-vkCmdDrawIndexedIndirect-sampleLocationsEnable-02689", "If the bound graphics pipeline was created with VkPipelineSampleLocationsStateCreateInfoEXT::sampleLocationsEnable set to VK_TRUE and the current subpass has a depth/stencil attachment, then that attachment must have been created with the VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT bit set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirect-sampleLocationsEnable-02689)"},
+    {"VUID-vkCmdDrawIndexedIndirect-subpass-02685", "The subpass index of the current render pass must be equal to the subpass member of the VkGraphicsPipelineCreateInfo structure specified when creating the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirect-subpass-02685)"},
+    {"VUID-vkCmdDrawIndexedIndirectCountKHR-None-02686", "Every input attachment used by the current subpass must be bound to the pipeline via a descriptor set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-None-02686)"},
+    {"VUID-vkCmdDrawIndexedIndirectCountKHR-None-02687", "Image subresources used as attachments in the current render pass must not be accessed in any way other than as an attachment by this command. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-None-02687)"},
+    {"VUID-vkCmdDrawIndexedIndirectCountKHR-None-02690", "If a VkImageView is sampled with VK_FILTER_LINEAR as a result of this command, then the image view's format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-None-02690)"},
+    {"VUID-vkCmdDrawIndexedIndirectCountKHR-None-02691", "If a VkImageView is accessed using atomic operations as a result of this command, then the image view's format features must contain VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-None-02691)"},
+    {"VUID-vkCmdDrawIndexedIndirectCountKHR-None-02692", "If a VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, then the image view's format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-None-02692)"},
+    {"VUID-vkCmdDrawIndexedIndirectCountKHR-None-02693", "Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must not have a VkImageViewType of VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-None-02693)"},
+    {"VUID-vkCmdDrawIndexedIndirectCountKHR-None-02697", "For each set n that is statically used by the VkPipeline bound to the pipeline bind point used by this command, a descriptor set must have been bound to n at the same pipeline bind point, with a VkPipelineLayout that is compatible for set n, with the VkPipelineLayout used to create the current VkPipeline, as described in Pipeline Layout Compatibility (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-None-02697)"},
+    {"VUID-vkCmdDrawIndexedIndirectCountKHR-None-02698", "For each push constant that is statically used by the VkPipeline bound to the pipeline bind point used by this command, a push constant value must have been set for the same pipeline bind point, with a VkPipelineLayout that is compatible for push constants, with the VkPipelineLayout used to create the current VkPipeline, as described in Pipeline Layout Compatibility (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-None-02698)"},
+    {"VUID-vkCmdDrawIndexedIndirectCountKHR-None-02699", "Descriptors in each bound descriptor set, specified via vkCmdBindDescriptorSets, must be valid if they are statically used by the VkPipeline bound to the pipeline bind point used by this command (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-None-02699)"},
+    {"VUID-vkCmdDrawIndexedIndirectCountKHR-None-02700", "A valid pipeline must be bound to the pipeline bind point used by this command (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-None-02700)"},
+    {"VUID-vkCmdDrawIndexedIndirectCountKHR-None-02702", "If the VkPipeline object bound to the pipeline bind point used by this command accesses a VkSampler object that uses unnormalized coordinates, that sampler must not be used to sample from any VkImage with a VkImageView of the type VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, VK_IMAGE_VIEW_TYPE_1D_ARRAY, VK_IMAGE_VIEW_TYPE_2D_ARRAY or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-None-02702)"},
+    {"VUID-vkCmdDrawIndexedIndirectCountKHR-None-02703", "If the VkPipeline object bound to the pipeline bind point used by this command accesses a VkSampler object that uses unnormalized coordinates, that sampler must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* instructions with ImplicitLod, Dref or Proj in their name, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-None-02703)"},
+    {"VUID-vkCmdDrawIndexedIndirectCountKHR-None-02704", "If the VkPipeline object bound to the pipeline bind point used by this command accesses a VkSampler object that uses unnormalized coordinates, that sampler must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* instructions that includes a LOD bias or any offset values, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-None-02704)"},
+    {"VUID-vkCmdDrawIndexedIndirectCountKHR-None-02705", "If the robust buffer access feature is not enabled, and if the VkPipeline object bound to the pipeline bind point used by this command accesses a uniform buffer, it must not access values outside of the range of the buffer as specified in the descriptor set bound to the same pipeline bind point (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-None-02705)"},
+    {"VUID-vkCmdDrawIndexedIndirectCountKHR-None-02706", "If the robust buffer access feature is not enabled, and if the VkPipeline object bound to the pipeline bind point used by this command accesses a storage buffer, it must not access values outside of the range of the buffer as specified in the descriptor set bound to the same pipeline bind point (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-None-02706)"},
+    {"VUID-vkCmdDrawIndexedIndirectCountKHR-None-02720", "All vertex input bindings accessed via vertex input variables declared in the vertex shader entry point's interface must have valid buffers bound (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-None-02720)"},
+    {"VUID-vkCmdDrawIndexedIndirectCountKHR-None-02721", "For a given vertex buffer binding, any attribute data fetched must be entirely contained within the corresponding vertex buffer binding, as described in Vertex Input Description (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-None-02721)"},
+    {"VUID-vkCmdDrawIndexedIndirectCountKHR-buffer-02708", "If buffer is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-buffer-02708)"},
+    {"VUID-vkCmdDrawIndexedIndirectCountKHR-buffer-02709", "buffer must have been created with the VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT bit set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-buffer-02709)"},
+    {"VUID-vkCmdDrawIndexedIndirectCountKHR-buffer-parameter", "buffer must be a valid VkBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-buffer-parameter)"},
+    {"VUID-vkCmdDrawIndexedIndirectCountKHR-commandBuffer-02701", "If the VkPipeline object bound to the pipeline bind point used by this command requires any dynamic state, that state must have been set for commandBuffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-commandBuffer-02701)"},
+    {"VUID-vkCmdDrawIndexedIndirectCountKHR-commandBuffer-02707", "If commandBuffer is an unprotected command buffer, any resource accessed by the VkPipeline object bound to the pipeline bind point used by this command must not be a protected resource (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-commandBuffer-02707)"},
+    {"VUID-vkCmdDrawIndexedIndirectCountKHR-commandBuffer-02711", "commandBuffer must not be a protected command buffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-commandBuffer-02711)"},
+    {"VUID-vkCmdDrawIndexedIndirectCountKHR-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support graphics operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-commandBuffer-cmdpool)"},
+    {"VUID-vkCmdDrawIndexedIndirectCountKHR-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-commandBuffer-parameter)"},
+    {"VUID-vkCmdDrawIndexedIndirectCountKHR-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-commandBuffer-recording)"},
+    {"VUID-vkCmdDrawIndexedIndirectCountKHR-commonparent", "Each of buffer, commandBuffer, and countBuffer must have been created, allocated, or retrieved from the same VkDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-commonparent)"},
+    {"VUID-vkCmdDrawIndexedIndirectCountKHR-countBuffer-02714", "If countBuffer is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-countBuffer-02714)"},
+    {"VUID-vkCmdDrawIndexedIndirectCountKHR-countBuffer-02715", "countBuffer must have been created with the VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT bit set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-countBuffer-02715)"},
+    {"VUID-vkCmdDrawIndexedIndirectCountKHR-countBuffer-02717", "The count stored in countBuffer must be less than or equal to VkPhysicalDeviceLimits::maxDrawIndirectCount (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-countBuffer-02717)"},
+    {"VUID-vkCmdDrawIndexedIndirectCountKHR-countBuffer-03153", "If count stored in countBuffer is equal to 1, (offset + sizeof(VkDrawIndexedIndirectCommand)) must be less than or equal to the size of buffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-countBuffer-03153)"},
+    {"VUID-vkCmdDrawIndexedIndirectCountKHR-countBuffer-03154", "If count stored in countBuffer is greater than 1, (stride {times} (drawCount - 1) + offset + sizeof(VkDrawIndexedIndirectCommand)) must be less than or equal to the size of buffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-countBuffer-03154)"},
+    {"VUID-vkCmdDrawIndexedIndirectCountKHR-countBuffer-parameter", "countBuffer must be a valid VkBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-countBuffer-parameter)"},
+    {"VUID-vkCmdDrawIndexedIndirectCountKHR-countBufferOffset-02716", "countBufferOffset must be a multiple of 4 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-countBufferOffset-02716)"},
+    {"VUID-vkCmdDrawIndexedIndirectCountKHR-filterCubic-02694", "Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubic returned by vkGetPhysicalDeviceImageFormatProperties2 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-filterCubic-02694)"},
+    {"VUID-vkCmdDrawIndexedIndirectCountKHR-filterCubicMinmax-02695", "Any VkImageView being sampled with VK_FILTER_CUBIC_EXT with a reduction mode of either VK_SAMPLER_REDUCTION_MODE_MIN_EXT or VK_SAMPLER_REDUCTION_MODE_MAX_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering together with minmax filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubicMinmax returned by vkGetPhysicalDeviceImageFormatProperties2 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-filterCubicMinmax-02695)"},
+    {"VUID-vkCmdDrawIndexedIndirectCountKHR-flags-02696", "Any VkImage created with a VkImageCreateInfo::flags containing VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV sampled as a result of this command must only be sampled using a VkSamplerAddressMode of VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-flags-02696)"},
+    {"VUID-vkCmdDrawIndexedIndirectCountKHR-maxDrawCount-03143", "If maxDrawCount is greater than or equal to 1, (stride {times} (maxDrawCount - 1) + offset + sizeof(VkDrawIndexedIndirectCommand)) must be less than or equal to the size of buffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-maxDrawCount-03143)"},
+    {"VUID-vkCmdDrawIndexedIndirectCountKHR-maxMultiviewInstanceIndex-02688", "If the draw is recorded in a render pass instance with multiview enabled, the maximum instance index must be less than or equal to VkPhysicalDeviceMultiviewProperties::maxMultiviewInstanceIndex. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-maxMultiviewInstanceIndex-02688)"},
+    {"VUID-vkCmdDrawIndexedIndirectCountKHR-offset-02710", "offset must be a multiple of 4 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-offset-02710)"},
+    {"VUID-vkCmdDrawIndexedIndirectCountKHR-renderPass-02684", "The current render pass must be compatible with the renderPass member of the VkGraphicsPipelineCreateInfo structure specified when creating the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-renderPass-02684)"},
+    {"VUID-vkCmdDrawIndexedIndirectCountKHR-renderpass", "This command must only be called inside of a render pass instance (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-renderpass)"},
+    {"VUID-vkCmdDrawIndexedIndirectCountKHR-sampleLocationsEnable-02689", "If the bound graphics pipeline was created with VkPipelineSampleLocationsStateCreateInfoEXT::sampleLocationsEnable set to VK_TRUE and the current subpass has a depth/stencil attachment, then that attachment must have been created with the VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT bit set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-sampleLocationsEnable-02689)"},
+    {"VUID-vkCmdDrawIndexedIndirectCountKHR-stride-03142", "stride must be a multiple of 4 and must be greater than or equal to sizeof(VkDrawIndexedIndirectCommand) (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-stride-03142)"},
+    {"VUID-vkCmdDrawIndexedIndirectCountKHR-subpass-02685", "The subpass index of the current render pass must be equal to the subpass member of the VkGraphicsPipelineCreateInfo structure specified when creating the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndexedIndirectCountKHR-subpass-02685)"},
+    {"VUID-vkCmdDrawIndirect-None-02686", "Every input attachment used by the current subpass must be bound to the pipeline via a descriptor set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirect-None-02686)"},
+    {"VUID-vkCmdDrawIndirect-None-02687", "Image subresources used as attachments in the current render pass must not be accessed in any way other than as an attachment by this command. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirect-None-02687)"},
+    {"VUID-vkCmdDrawIndirect-None-02690", "If a VkImageView is sampled with VK_FILTER_LINEAR as a result of this command, then the image view's format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirect-None-02690)"},
+    {"VUID-vkCmdDrawIndirect-None-02691", "If a VkImageView is accessed using atomic operations as a result of this command, then the image view's format features must contain VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirect-None-02691)"},
+    {"VUID-vkCmdDrawIndirect-None-02692", "If a VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, then the image view's format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirect-None-02692)"},
+    {"VUID-vkCmdDrawIndirect-None-02693", "Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must not have a VkImageViewType of VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirect-None-02693)"},
+    {"VUID-vkCmdDrawIndirect-None-02697", "For each set n that is statically used by the VkPipeline bound to the pipeline bind point used by this command, a descriptor set must have been bound to n at the same pipeline bind point, with a VkPipelineLayout that is compatible for set n, with the VkPipelineLayout used to create the current VkPipeline, as described in Pipeline Layout Compatibility (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirect-None-02697)"},
+    {"VUID-vkCmdDrawIndirect-None-02698", "For each push constant that is statically used by the VkPipeline bound to the pipeline bind point used by this command, a push constant value must have been set for the same pipeline bind point, with a VkPipelineLayout that is compatible for push constants, with the VkPipelineLayout used to create the current VkPipeline, as described in Pipeline Layout Compatibility (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirect-None-02698)"},
+    {"VUID-vkCmdDrawIndirect-None-02699", "Descriptors in each bound descriptor set, specified via vkCmdBindDescriptorSets, must be valid if they are statically used by the VkPipeline bound to the pipeline bind point used by this command (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirect-None-02699)"},
+    {"VUID-vkCmdDrawIndirect-None-02700", "A valid pipeline must be bound to the pipeline bind point used by this command (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirect-None-02700)"},
+    {"VUID-vkCmdDrawIndirect-None-02702", "If the VkPipeline object bound to the pipeline bind point used by this command accesses a VkSampler object that uses unnormalized coordinates, that sampler must not be used to sample from any VkImage with a VkImageView of the type VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, VK_IMAGE_VIEW_TYPE_1D_ARRAY, VK_IMAGE_VIEW_TYPE_2D_ARRAY or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirect-None-02702)"},
+    {"VUID-vkCmdDrawIndirect-None-02703", "If the VkPipeline object bound to the pipeline bind point used by this command accesses a VkSampler object that uses unnormalized coordinates, that sampler must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* instructions with ImplicitLod, Dref or Proj in their name, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirect-None-02703)"},
+    {"VUID-vkCmdDrawIndirect-None-02704", "If the VkPipeline object bound to the pipeline bind point used by this command accesses a VkSampler object that uses unnormalized coordinates, that sampler must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* instructions that includes a LOD bias or any offset values, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirect-None-02704)"},
+    {"VUID-vkCmdDrawIndirect-None-02705", "If the robust buffer access feature is not enabled, and if the VkPipeline object bound to the pipeline bind point used by this command accesses a uniform buffer, it must not access values outside of the range of the buffer as specified in the descriptor set bound to the same pipeline bind point (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirect-None-02705)"},
+    {"VUID-vkCmdDrawIndirect-None-02706", "If the robust buffer access feature is not enabled, and if the VkPipeline object bound to the pipeline bind point used by this command accesses a storage buffer, it must not access values outside of the range of the buffer as specified in the descriptor set bound to the same pipeline bind point (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirect-None-02706)"},
+    {"VUID-vkCmdDrawIndirect-None-02720", "All vertex input bindings accessed via vertex input variables declared in the vertex shader entry point's interface must have valid buffers bound (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirect-None-02720)"},
+    {"VUID-vkCmdDrawIndirect-None-02721", "For a given vertex buffer binding, any attribute data fetched must be entirely contained within the corresponding vertex buffer binding, as described in Vertex Input Description (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirect-None-02721)"},
+    {"VUID-vkCmdDrawIndirect-buffer-02708", "If buffer is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirect-buffer-02708)"},
+    {"VUID-vkCmdDrawIndirect-buffer-02709", "buffer must have been created with the VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT bit set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirect-buffer-02709)"},
+    {"VUID-vkCmdDrawIndirect-buffer-parameter", "buffer must be a valid VkBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirect-buffer-parameter)"},
+    {"VUID-vkCmdDrawIndirect-commandBuffer-02701", "If the VkPipeline object bound to the pipeline bind point used by this command requires any dynamic state, that state must have been set for commandBuffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirect-commandBuffer-02701)"},
+    {"VUID-vkCmdDrawIndirect-commandBuffer-02707", "If commandBuffer is an unprotected command buffer, any resource accessed by the VkPipeline object bound to the pipeline bind point used by this command must not be a protected resource (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirect-commandBuffer-02707)"},
+    {"VUID-vkCmdDrawIndirect-commandBuffer-02711", "commandBuffer must not be a protected command buffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirect-commandBuffer-02711)"},
+    {"VUID-vkCmdDrawIndirect-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support graphics operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirect-commandBuffer-cmdpool)"},
+    {"VUID-vkCmdDrawIndirect-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirect-commandBuffer-parameter)"},
+    {"VUID-vkCmdDrawIndirect-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirect-commandBuffer-recording)"},
+    {"VUID-vkCmdDrawIndirect-commonparent", "Both of buffer, and commandBuffer must have been created, allocated, or retrieved from the same VkDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirect-commonparent)"},
+    {"VUID-vkCmdDrawIndirect-drawCount-00476", "If drawCount is greater than 1, stride must be a multiple of 4 and must be greater than or equal to sizeof(VkDrawIndirectCommand) (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirect-drawCount-00476)"},
+    {"VUID-vkCmdDrawIndirect-drawCount-00487", "If drawCount is equal to 1, (offset + sizeof(VkDrawIndirectCommand)) must be less than or equal to the size of buffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirect-drawCount-00487)"},
+    {"VUID-vkCmdDrawIndirect-drawCount-00488", "If drawCount is greater than 1, (stride {times} (drawCount - 1) + offset + sizeof(VkDrawIndirectCommand)) must be less than or equal to the size of buffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirect-drawCount-00488)"},
+    {"VUID-vkCmdDrawIndirect-drawCount-02718", "If the multi-draw indirect feature is not enabled, drawCount must be 0 or 1 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirect-drawCount-02718)"},
+    {"VUID-vkCmdDrawIndirect-drawCount-02719", "drawCount must be less than or equal to VkPhysicalDeviceLimits::maxDrawIndirectCount (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirect-drawCount-02719)"},
+    {"VUID-vkCmdDrawIndirect-filterCubic-02694", "Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubic returned by vkGetPhysicalDeviceImageFormatProperties2 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirect-filterCubic-02694)"},
+    {"VUID-vkCmdDrawIndirect-filterCubicMinmax-02695", "Any VkImageView being sampled with VK_FILTER_CUBIC_EXT with a reduction mode of either VK_SAMPLER_REDUCTION_MODE_MIN_EXT or VK_SAMPLER_REDUCTION_MODE_MAX_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering together with minmax filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubicMinmax returned by vkGetPhysicalDeviceImageFormatProperties2 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirect-filterCubicMinmax-02695)"},
+    {"VUID-vkCmdDrawIndirect-firstInstance-00478", "If the drawIndirectFirstInstance feature is not enabled, all the firstInstance members of the VkDrawIndirectCommand structures accessed by this command must be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirect-firstInstance-00478)"},
+    {"VUID-vkCmdDrawIndirect-flags-02696", "Any VkImage created with a VkImageCreateInfo::flags containing VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV sampled as a result of this command must only be sampled using a VkSamplerAddressMode of VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirect-flags-02696)"},
+    {"VUID-vkCmdDrawIndirect-maxMultiviewInstanceIndex-02688", "If the draw is recorded in a render pass instance with multiview enabled, the maximum instance index must be less than or equal to VkPhysicalDeviceMultiviewProperties::maxMultiviewInstanceIndex. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirect-maxMultiviewInstanceIndex-02688)"},
+    {"VUID-vkCmdDrawIndirect-offset-02710", "offset must be a multiple of 4 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirect-offset-02710)"},
+    {"VUID-vkCmdDrawIndirect-renderPass-02684", "The current render pass must be compatible with the renderPass member of the VkGraphicsPipelineCreateInfo structure specified when creating the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirect-renderPass-02684)"},
+    {"VUID-vkCmdDrawIndirect-renderpass", "This command must only be called inside of a render pass instance (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirect-renderpass)"},
+    {"VUID-vkCmdDrawIndirect-sampleLocationsEnable-02689", "If the bound graphics pipeline was created with VkPipelineSampleLocationsStateCreateInfoEXT::sampleLocationsEnable set to VK_TRUE and the current subpass has a depth/stencil attachment, then that attachment must have been created with the VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT bit set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirect-sampleLocationsEnable-02689)"},
+    {"VUID-vkCmdDrawIndirect-subpass-02685", "The subpass index of the current render pass must be equal to the subpass member of the VkGraphicsPipelineCreateInfo structure specified when creating the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirect-subpass-02685)"},
+    {"VUID-vkCmdDrawIndirectByteCountEXT-None-02686", "Every input attachment used by the current subpass must be bound to the pipeline via a descriptor set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectByteCountEXT-None-02686)"},
+    {"VUID-vkCmdDrawIndirectByteCountEXT-None-02687", "Image subresources used as attachments in the current render pass must not be accessed in any way other than as an attachment by this command. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectByteCountEXT-None-02687)"},
+    {"VUID-vkCmdDrawIndirectByteCountEXT-None-02690", "If a VkImageView is sampled with VK_FILTER_LINEAR as a result of this command, then the image view's format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectByteCountEXT-None-02690)"},
+    {"VUID-vkCmdDrawIndirectByteCountEXT-None-02691", "If a VkImageView is accessed using atomic operations as a result of this command, then the image view's format features must contain VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectByteCountEXT-None-02691)"},
+    {"VUID-vkCmdDrawIndirectByteCountEXT-None-02692", "If a VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, then the image view's format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectByteCountEXT-None-02692)"},
+    {"VUID-vkCmdDrawIndirectByteCountEXT-None-02693", "Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must not have a VkImageViewType of VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectByteCountEXT-None-02693)"},
+    {"VUID-vkCmdDrawIndirectByteCountEXT-None-02697", "For each set n that is statically used by the VkPipeline bound to the pipeline bind point used by this command, a descriptor set must have been bound to n at the same pipeline bind point, with a VkPipelineLayout that is compatible for set n, with the VkPipelineLayout used to create the current VkPipeline, as described in Pipeline Layout Compatibility (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectByteCountEXT-None-02697)"},
+    {"VUID-vkCmdDrawIndirectByteCountEXT-None-02698", "For each push constant that is statically used by the VkPipeline bound to the pipeline bind point used by this command, a push constant value must have been set for the same pipeline bind point, with a VkPipelineLayout that is compatible for push constants, with the VkPipelineLayout used to create the current VkPipeline, as described in Pipeline Layout Compatibility (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectByteCountEXT-None-02698)"},
+    {"VUID-vkCmdDrawIndirectByteCountEXT-None-02699", "Descriptors in each bound descriptor set, specified via vkCmdBindDescriptorSets, must be valid if they are statically used by the VkPipeline bound to the pipeline bind point used by this command (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectByteCountEXT-None-02699)"},
+    {"VUID-vkCmdDrawIndirectByteCountEXT-None-02700", "A valid pipeline must be bound to the pipeline bind point used by this command (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectByteCountEXT-None-02700)"},
+    {"VUID-vkCmdDrawIndirectByteCountEXT-None-02702", "If the VkPipeline object bound to the pipeline bind point used by this command accesses a VkSampler object that uses unnormalized coordinates, that sampler must not be used to sample from any VkImage with a VkImageView of the type VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, VK_IMAGE_VIEW_TYPE_1D_ARRAY, VK_IMAGE_VIEW_TYPE_2D_ARRAY or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectByteCountEXT-None-02702)"},
+    {"VUID-vkCmdDrawIndirectByteCountEXT-None-02703", "If the VkPipeline object bound to the pipeline bind point used by this command accesses a VkSampler object that uses unnormalized coordinates, that sampler must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* instructions with ImplicitLod, Dref or Proj in their name, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectByteCountEXT-None-02703)"},
+    {"VUID-vkCmdDrawIndirectByteCountEXT-None-02704", "If the VkPipeline object bound to the pipeline bind point used by this command accesses a VkSampler object that uses unnormalized coordinates, that sampler must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* instructions that includes a LOD bias or any offset values, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectByteCountEXT-None-02704)"},
+    {"VUID-vkCmdDrawIndirectByteCountEXT-None-02705", "If the robust buffer access feature is not enabled, and if the VkPipeline object bound to the pipeline bind point used by this command accesses a uniform buffer, it must not access values outside of the range of the buffer as specified in the descriptor set bound to the same pipeline bind point (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectByteCountEXT-None-02705)"},
+    {"VUID-vkCmdDrawIndirectByteCountEXT-None-02706", "If the robust buffer access feature is not enabled, and if the VkPipeline object bound to the pipeline bind point used by this command accesses a storage buffer, it must not access values outside of the range of the buffer as specified in the descriptor set bound to the same pipeline bind point (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectByteCountEXT-None-02706)"},
+    {"VUID-vkCmdDrawIndirectByteCountEXT-None-02720", "All vertex input bindings accessed via vertex input variables declared in the vertex shader entry point's interface must have valid buffers bound (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectByteCountEXT-None-02720)"},
+    {"VUID-vkCmdDrawIndirectByteCountEXT-None-02721", "For a given vertex buffer binding, any attribute data fetched must be entirely contained within the corresponding vertex buffer binding, as described in Vertex Input Description (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectByteCountEXT-None-02721)"},
+    {"VUID-vkCmdDrawIndirectByteCountEXT-commandBuffer-02646", "commandBuffer must not be a protected command buffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectByteCountEXT-commandBuffer-02646)"},
+    {"VUID-vkCmdDrawIndirectByteCountEXT-commandBuffer-02701", "If the VkPipeline object bound to the pipeline bind point used by this command requires any dynamic state, that state must have been set for commandBuffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectByteCountEXT-commandBuffer-02701)"},
+    {"VUID-vkCmdDrawIndirectByteCountEXT-commandBuffer-02707", "If commandBuffer is an unprotected command buffer, any resource accessed by the VkPipeline object bound to the pipeline bind point used by this command must not be a protected resource (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectByteCountEXT-commandBuffer-02707)"},
+    {"VUID-vkCmdDrawIndirectByteCountEXT-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support graphics operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectByteCountEXT-commandBuffer-cmdpool)"},
+    {"VUID-vkCmdDrawIndirectByteCountEXT-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectByteCountEXT-commandBuffer-parameter)"},
+    {"VUID-vkCmdDrawIndirectByteCountEXT-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectByteCountEXT-commandBuffer-recording)"},
+    {"VUID-vkCmdDrawIndirectByteCountEXT-commonparent", "Both of commandBuffer, and counterBuffer must have been created, allocated, or retrieved from the same VkDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectByteCountEXT-commonparent)"},
+    {"VUID-vkCmdDrawIndirectByteCountEXT-counterBuffer-02290", "counterBuffer must have been created with the VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT bit set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectByteCountEXT-counterBuffer-02290)"},
+    {"VUID-vkCmdDrawIndirectByteCountEXT-counterBuffer-parameter", "counterBuffer must be a valid VkBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectByteCountEXT-counterBuffer-parameter)"},
+    {"VUID-vkCmdDrawIndirectByteCountEXT-filterCubic-02694", "Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubic returned by vkGetPhysicalDeviceImageFormatProperties2 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectByteCountEXT-filterCubic-02694)"},
+    {"VUID-vkCmdDrawIndirectByteCountEXT-filterCubicMinmax-02695", "Any VkImageView being sampled with VK_FILTER_CUBIC_EXT with a reduction mode of either VK_SAMPLER_REDUCTION_MODE_MIN_EXT or VK_SAMPLER_REDUCTION_MODE_MAX_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering together with minmax filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubicMinmax returned by vkGetPhysicalDeviceImageFormatProperties2 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectByteCountEXT-filterCubicMinmax-02695)"},
+    {"VUID-vkCmdDrawIndirectByteCountEXT-flags-02696", "Any VkImage created with a VkImageCreateInfo::flags containing VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV sampled as a result of this command must only be sampled using a VkSamplerAddressMode of VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectByteCountEXT-flags-02696)"},
+    {"VUID-vkCmdDrawIndirectByteCountEXT-maxMultiviewInstanceIndex-02688", "If the draw is recorded in a render pass instance with multiview enabled, the maximum instance index must be less than or equal to VkPhysicalDeviceMultiviewProperties::maxMultiviewInstanceIndex. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectByteCountEXT-maxMultiviewInstanceIndex-02688)"},
+    {"VUID-vkCmdDrawIndirectByteCountEXT-renderPass-02684", "The current render pass must be compatible with the renderPass member of the VkGraphicsPipelineCreateInfo structure specified when creating the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectByteCountEXT-renderPass-02684)"},
+    {"VUID-vkCmdDrawIndirectByteCountEXT-renderpass", "This command must only be called inside of a render pass instance (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectByteCountEXT-renderpass)"},
+    {"VUID-vkCmdDrawIndirectByteCountEXT-sampleLocationsEnable-02689", "If the bound graphics pipeline was created with VkPipelineSampleLocationsStateCreateInfoEXT::sampleLocationsEnable set to VK_TRUE and the current subpass has a depth/stencil attachment, then that attachment must have been created with the VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT bit set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectByteCountEXT-sampleLocationsEnable-02689)"},
+    {"VUID-vkCmdDrawIndirectByteCountEXT-subpass-02685", "The subpass index of the current render pass must be equal to the subpass member of the VkGraphicsPipelineCreateInfo structure specified when creating the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectByteCountEXT-subpass-02685)"},
+    {"VUID-vkCmdDrawIndirectByteCountEXT-transformFeedback-02287", "VkPhysicalDeviceTransformFeedbackFeaturesEXT::transformFeedback must be enabled (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectByteCountEXT-transformFeedback-02287)"},
+    {"VUID-vkCmdDrawIndirectByteCountEXT-transformFeedbackDraw-02288", "The implementation must support VkPhysicalDeviceTransformFeedbackPropertiesEXT::transformFeedbackDraw (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectByteCountEXT-transformFeedbackDraw-02288)"},
+    {"VUID-vkCmdDrawIndirectByteCountEXT-vertexStride-02289", "vertexStride must be greater than 0 and less than or equal to VkPhysicalDeviceLimits::maxTransformFeedbackBufferDataStride (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectByteCountEXT-vertexStride-02289)"},
+    {"VUID-vkCmdDrawIndirectCountKHR-None-02686", "Every input attachment used by the current subpass must be bound to the pipeline via a descriptor set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-None-02686)"},
+    {"VUID-vkCmdDrawIndirectCountKHR-None-02687", "Image subresources used as attachments in the current render pass must not be accessed in any way other than as an attachment by this command. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-None-02687)"},
+    {"VUID-vkCmdDrawIndirectCountKHR-None-02690", "If a VkImageView is sampled with VK_FILTER_LINEAR as a result of this command, then the image view's format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-None-02690)"},
+    {"VUID-vkCmdDrawIndirectCountKHR-None-02691", "If a VkImageView is accessed using atomic operations as a result of this command, then the image view's format features must contain VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-None-02691)"},
+    {"VUID-vkCmdDrawIndirectCountKHR-None-02692", "If a VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, then the image view's format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-None-02692)"},
+    {"VUID-vkCmdDrawIndirectCountKHR-None-02693", "Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must not have a VkImageViewType of VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-None-02693)"},
+    {"VUID-vkCmdDrawIndirectCountKHR-None-02697", "For each set n that is statically used by the VkPipeline bound to the pipeline bind point used by this command, a descriptor set must have been bound to n at the same pipeline bind point, with a VkPipelineLayout that is compatible for set n, with the VkPipelineLayout used to create the current VkPipeline, as described in Pipeline Layout Compatibility (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-None-02697)"},
+    {"VUID-vkCmdDrawIndirectCountKHR-None-02698", "For each push constant that is statically used by the VkPipeline bound to the pipeline bind point used by this command, a push constant value must have been set for the same pipeline bind point, with a VkPipelineLayout that is compatible for push constants, with the VkPipelineLayout used to create the current VkPipeline, as described in Pipeline Layout Compatibility (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-None-02698)"},
+    {"VUID-vkCmdDrawIndirectCountKHR-None-02699", "Descriptors in each bound descriptor set, specified via vkCmdBindDescriptorSets, must be valid if they are statically used by the VkPipeline bound to the pipeline bind point used by this command (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-None-02699)"},
+    {"VUID-vkCmdDrawIndirectCountKHR-None-02700", "A valid pipeline must be bound to the pipeline bind point used by this command (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-None-02700)"},
+    {"VUID-vkCmdDrawIndirectCountKHR-None-02702", "If the VkPipeline object bound to the pipeline bind point used by this command accesses a VkSampler object that uses unnormalized coordinates, that sampler must not be used to sample from any VkImage with a VkImageView of the type VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, VK_IMAGE_VIEW_TYPE_1D_ARRAY, VK_IMAGE_VIEW_TYPE_2D_ARRAY or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-None-02702)"},
+    {"VUID-vkCmdDrawIndirectCountKHR-None-02703", "If the VkPipeline object bound to the pipeline bind point used by this command accesses a VkSampler object that uses unnormalized coordinates, that sampler must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* instructions with ImplicitLod, Dref or Proj in their name, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-None-02703)"},
+    {"VUID-vkCmdDrawIndirectCountKHR-None-02704", "If the VkPipeline object bound to the pipeline bind point used by this command accesses a VkSampler object that uses unnormalized coordinates, that sampler must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* instructions that includes a LOD bias or any offset values, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-None-02704)"},
+    {"VUID-vkCmdDrawIndirectCountKHR-None-02705", "If the robust buffer access feature is not enabled, and if the VkPipeline object bound to the pipeline bind point used by this command accesses a uniform buffer, it must not access values outside of the range of the buffer as specified in the descriptor set bound to the same pipeline bind point (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-None-02705)"},
+    {"VUID-vkCmdDrawIndirectCountKHR-None-02706", "If the robust buffer access feature is not enabled, and if the VkPipeline object bound to the pipeline bind point used by this command accesses a storage buffer, it must not access values outside of the range of the buffer as specified in the descriptor set bound to the same pipeline bind point (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-None-02706)"},
+    {"VUID-vkCmdDrawIndirectCountKHR-None-02720", "All vertex input bindings accessed via vertex input variables declared in the vertex shader entry point's interface must have valid buffers bound (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-None-02720)"},
+    {"VUID-vkCmdDrawIndirectCountKHR-None-02721", "For a given vertex buffer binding, any attribute data fetched must be entirely contained within the corresponding vertex buffer binding, as described in Vertex Input Description (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-None-02721)"},
+    {"VUID-vkCmdDrawIndirectCountKHR-buffer-02708", "If buffer is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-buffer-02708)"},
+    {"VUID-vkCmdDrawIndirectCountKHR-buffer-02709", "buffer must have been created with the VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT bit set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-buffer-02709)"},
+    {"VUID-vkCmdDrawIndirectCountKHR-buffer-parameter", "buffer must be a valid VkBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-buffer-parameter)"},
+    {"VUID-vkCmdDrawIndirectCountKHR-commandBuffer-02701", "If the VkPipeline object bound to the pipeline bind point used by this command requires any dynamic state, that state must have been set for commandBuffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-commandBuffer-02701)"},
+    {"VUID-vkCmdDrawIndirectCountKHR-commandBuffer-02707", "If commandBuffer is an unprotected command buffer, any resource accessed by the VkPipeline object bound to the pipeline bind point used by this command must not be a protected resource (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-commandBuffer-02707)"},
+    {"VUID-vkCmdDrawIndirectCountKHR-commandBuffer-02711", "commandBuffer must not be a protected command buffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-commandBuffer-02711)"},
+    {"VUID-vkCmdDrawIndirectCountKHR-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support graphics operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-commandBuffer-cmdpool)"},
+    {"VUID-vkCmdDrawIndirectCountKHR-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-commandBuffer-parameter)"},
+    {"VUID-vkCmdDrawIndirectCountKHR-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-commandBuffer-recording)"},
+    {"VUID-vkCmdDrawIndirectCountKHR-commonparent", "Each of buffer, commandBuffer, and countBuffer must have been created, allocated, or retrieved from the same VkDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-commonparent)"},
+    {"VUID-vkCmdDrawIndirectCountKHR-countBuffer-02714", "If countBuffer is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-countBuffer-02714)"},
+    {"VUID-vkCmdDrawIndirectCountKHR-countBuffer-02715", "countBuffer must have been created with the VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT bit set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-countBuffer-02715)"},
+    {"VUID-vkCmdDrawIndirectCountKHR-countBuffer-02717", "The count stored in countBuffer must be less than or equal to VkPhysicalDeviceLimits::maxDrawIndirectCount (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-countBuffer-02717)"},
+    {"VUID-vkCmdDrawIndirectCountKHR-countBuffer-03121", "If the count stored in countBuffer is equal to 1, (offset + sizeof(VkDrawIndirectCommand)) must be less than or equal to the size of buffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-countBuffer-03121)"},
+    {"VUID-vkCmdDrawIndirectCountKHR-countBuffer-03122", "If the count stored in countBuffer is greater than 1, (stride {times} (drawCount - 1) + offset + sizeof(VkDrawIndirectCommand)) must be less than or equal to the size of buffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-countBuffer-03122)"},
+    {"VUID-vkCmdDrawIndirectCountKHR-countBuffer-parameter", "countBuffer must be a valid VkBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-countBuffer-parameter)"},
+    {"VUID-vkCmdDrawIndirectCountKHR-countBufferOffset-02716", "countBufferOffset must be a multiple of 4 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-countBufferOffset-02716)"},
+    {"VUID-vkCmdDrawIndirectCountKHR-filterCubic-02694", "Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubic returned by vkGetPhysicalDeviceImageFormatProperties2 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-filterCubic-02694)"},
+    {"VUID-vkCmdDrawIndirectCountKHR-filterCubicMinmax-02695", "Any VkImageView being sampled with VK_FILTER_CUBIC_EXT with a reduction mode of either VK_SAMPLER_REDUCTION_MODE_MIN_EXT or VK_SAMPLER_REDUCTION_MODE_MAX_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering together with minmax filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubicMinmax returned by vkGetPhysicalDeviceImageFormatProperties2 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-filterCubicMinmax-02695)"},
+    {"VUID-vkCmdDrawIndirectCountKHR-flags-02696", "Any VkImage created with a VkImageCreateInfo::flags containing VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV sampled as a result of this command must only be sampled using a VkSamplerAddressMode of VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-flags-02696)"},
+    {"VUID-vkCmdDrawIndirectCountKHR-maxDrawCount-03111", "If maxDrawCount is greater than or equal to 1, (stride {times} (maxDrawCount - 1) + offset + sizeof(VkDrawIndirectCommand)) must be less than or equal to the size of buffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-maxDrawCount-03111)"},
+    {"VUID-vkCmdDrawIndirectCountKHR-maxMultiviewInstanceIndex-02688", "If the draw is recorded in a render pass instance with multiview enabled, the maximum instance index must be less than or equal to VkPhysicalDeviceMultiviewProperties::maxMultiviewInstanceIndex. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-maxMultiviewInstanceIndex-02688)"},
+    {"VUID-vkCmdDrawIndirectCountKHR-offset-02710", "offset must be a multiple of 4 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-offset-02710)"},
+    {"VUID-vkCmdDrawIndirectCountKHR-renderPass-02684", "The current render pass must be compatible with the renderPass member of the VkGraphicsPipelineCreateInfo structure specified when creating the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-renderPass-02684)"},
+    {"VUID-vkCmdDrawIndirectCountKHR-renderpass", "This command must only be called inside of a render pass instance (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-renderpass)"},
+    {"VUID-vkCmdDrawIndirectCountKHR-sampleLocationsEnable-02689", "If the bound graphics pipeline was created with VkPipelineSampleLocationsStateCreateInfoEXT::sampleLocationsEnable set to VK_TRUE and the current subpass has a depth/stencil attachment, then that attachment must have been created with the VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT bit set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-sampleLocationsEnable-02689)"},
+    {"VUID-vkCmdDrawIndirectCountKHR-stride-03110", "stride must be a multiple of 4 and must be greater than or equal to sizeof(VkDrawIndirectCommand) (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-stride-03110)"},
+    {"VUID-vkCmdDrawIndirectCountKHR-subpass-02685", "The subpass index of the current render pass must be equal to the subpass member of the VkGraphicsPipelineCreateInfo structure specified when creating the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawIndirectCountKHR-subpass-02685)"},
+    {"VUID-vkCmdDrawMeshTasksIndirectCountNV-None-02686", "Every input attachment used by the current subpass must be bound to the pipeline via a descriptor set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectCountNV-None-02686)"},
+    {"VUID-vkCmdDrawMeshTasksIndirectCountNV-None-02687", "Image subresources used as attachments in the current render pass must not be accessed in any way other than as an attachment by this command. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectCountNV-None-02687)"},
+    {"VUID-vkCmdDrawMeshTasksIndirectCountNV-None-02690", "If a VkImageView is sampled with VK_FILTER_LINEAR as a result of this command, then the image view's format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectCountNV-None-02690)"},
+    {"VUID-vkCmdDrawMeshTasksIndirectCountNV-None-02691", "If a VkImageView is accessed using atomic operations as a result of this command, then the image view's format features must contain VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectCountNV-None-02691)"},
+    {"VUID-vkCmdDrawMeshTasksIndirectCountNV-None-02692", "If a VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, then the image view's format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectCountNV-None-02692)"},
+    {"VUID-vkCmdDrawMeshTasksIndirectCountNV-None-02693", "Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must not have a VkImageViewType of VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectCountNV-None-02693)"},
+    {"VUID-vkCmdDrawMeshTasksIndirectCountNV-None-02697", "For each set n that is statically used by the VkPipeline bound to the pipeline bind point used by this command, a descriptor set must have been bound to n at the same pipeline bind point, with a VkPipelineLayout that is compatible for set n, with the VkPipelineLayout used to create the current VkPipeline, as described in Pipeline Layout Compatibility (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectCountNV-None-02697)"},
+    {"VUID-vkCmdDrawMeshTasksIndirectCountNV-None-02698", "For each push constant that is statically used by the VkPipeline bound to the pipeline bind point used by this command, a push constant value must have been set for the same pipeline bind point, with a VkPipelineLayout that is compatible for push constants, with the VkPipelineLayout used to create the current VkPipeline, as described in Pipeline Layout Compatibility (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectCountNV-None-02698)"},
+    {"VUID-vkCmdDrawMeshTasksIndirectCountNV-None-02699", "Descriptors in each bound descriptor set, specified via vkCmdBindDescriptorSets, must be valid if they are statically used by the VkPipeline bound to the pipeline bind point used by this command (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectCountNV-None-02699)"},
+    {"VUID-vkCmdDrawMeshTasksIndirectCountNV-None-02700", "A valid pipeline must be bound to the pipeline bind point used by this command (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectCountNV-None-02700)"},
+    {"VUID-vkCmdDrawMeshTasksIndirectCountNV-None-02702", "If the VkPipeline object bound to the pipeline bind point used by this command accesses a VkSampler object that uses unnormalized coordinates, that sampler must not be used to sample from any VkImage with a VkImageView of the type VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, VK_IMAGE_VIEW_TYPE_1D_ARRAY, VK_IMAGE_VIEW_TYPE_2D_ARRAY or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectCountNV-None-02702)"},
+    {"VUID-vkCmdDrawMeshTasksIndirectCountNV-None-02703", "If the VkPipeline object bound to the pipeline bind point used by this command accesses a VkSampler object that uses unnormalized coordinates, that sampler must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* instructions with ImplicitLod, Dref or Proj in their name, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectCountNV-None-02703)"},
+    {"VUID-vkCmdDrawMeshTasksIndirectCountNV-None-02704", "If the VkPipeline object bound to the pipeline bind point used by this command accesses a VkSampler object that uses unnormalized coordinates, that sampler must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* instructions that includes a LOD bias or any offset values, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectCountNV-None-02704)"},
+    {"VUID-vkCmdDrawMeshTasksIndirectCountNV-None-02705", "If the robust buffer access feature is not enabled, and if the VkPipeline object bound to the pipeline bind point used by this command accesses a uniform buffer, it must not access values outside of the range of the buffer as specified in the descriptor set bound to the same pipeline bind point (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectCountNV-None-02705)"},
+    {"VUID-vkCmdDrawMeshTasksIndirectCountNV-None-02706", "If the robust buffer access feature is not enabled, and if the VkPipeline object bound to the pipeline bind point used by this command accesses a storage buffer, it must not access values outside of the range of the buffer as specified in the descriptor set bound to the same pipeline bind point (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectCountNV-None-02706)"},
+    {"VUID-vkCmdDrawMeshTasksIndirectCountNV-buffer-02708", "If buffer is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectCountNV-buffer-02708)"},
+    {"VUID-vkCmdDrawMeshTasksIndirectCountNV-buffer-02709", "buffer must have been created with the VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT bit set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectCountNV-buffer-02709)"},
+    {"VUID-vkCmdDrawMeshTasksIndirectCountNV-buffer-parameter", "buffer must be a valid VkBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectCountNV-buffer-parameter)"},
+    {"VUID-vkCmdDrawMeshTasksIndirectCountNV-commandBuffer-02701", "If the VkPipeline object bound to the pipeline bind point used by this command requires any dynamic state, that state must have been set for commandBuffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectCountNV-commandBuffer-02701)"},
+    {"VUID-vkCmdDrawMeshTasksIndirectCountNV-commandBuffer-02707", "If commandBuffer is an unprotected command buffer, any resource accessed by the VkPipeline object bound to the pipeline bind point used by this command must not be a protected resource (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectCountNV-commandBuffer-02707)"},
+    {"VUID-vkCmdDrawMeshTasksIndirectCountNV-commandBuffer-02711", "commandBuffer must not be a protected command buffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectCountNV-commandBuffer-02711)"},
+    {"VUID-vkCmdDrawMeshTasksIndirectCountNV-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support graphics operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectCountNV-commandBuffer-cmdpool)"},
+    {"VUID-vkCmdDrawMeshTasksIndirectCountNV-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectCountNV-commandBuffer-parameter)"},
+    {"VUID-vkCmdDrawMeshTasksIndirectCountNV-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectCountNV-commandBuffer-recording)"},
+    {"VUID-vkCmdDrawMeshTasksIndirectCountNV-commonparent", "Each of buffer, commandBuffer, and countBuffer must have been created, allocated, or retrieved from the same VkDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectCountNV-commonparent)"},
+    {"VUID-vkCmdDrawMeshTasksIndirectCountNV-countBuffer-02191", "If the count stored in countBuffer is equal to 1, (offset + sizeof(VkDrawMeshTasksIndirectCommandNV)) must be less than or equal to the size of buffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectCountNV-countBuffer-02191)"},
+    {"VUID-vkCmdDrawMeshTasksIndirectCountNV-countBuffer-02192", "If the count stored in countBuffer is greater than 1, (stride {times} (drawCount - 1) + offset + sizeof(VkDrawMeshTasksIndirectCommandNV)) must be less than or equal to the size of buffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectCountNV-countBuffer-02192)"},
+    {"VUID-vkCmdDrawMeshTasksIndirectCountNV-countBuffer-02714", "If countBuffer is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectCountNV-countBuffer-02714)"},
+    {"VUID-vkCmdDrawMeshTasksIndirectCountNV-countBuffer-02715", "countBuffer must have been created with the VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT bit set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectCountNV-countBuffer-02715)"},
+    {"VUID-vkCmdDrawMeshTasksIndirectCountNV-countBuffer-02717", "The count stored in countBuffer must be less than or equal to VkPhysicalDeviceLimits::maxDrawIndirectCount (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectCountNV-countBuffer-02717)"},
+    {"VUID-vkCmdDrawMeshTasksIndirectCountNV-countBuffer-parameter", "countBuffer must be a valid VkBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectCountNV-countBuffer-parameter)"},
+    {"VUID-vkCmdDrawMeshTasksIndirectCountNV-countBufferOffset-02716", "countBufferOffset must be a multiple of 4 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectCountNV-countBufferOffset-02716)"},
+    {"VUID-vkCmdDrawMeshTasksIndirectCountNV-filterCubic-02694", "Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubic returned by vkGetPhysicalDeviceImageFormatProperties2 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectCountNV-filterCubic-02694)"},
+    {"VUID-vkCmdDrawMeshTasksIndirectCountNV-filterCubicMinmax-02695", "Any VkImageView being sampled with VK_FILTER_CUBIC_EXT with a reduction mode of either VK_SAMPLER_REDUCTION_MODE_MIN_EXT or VK_SAMPLER_REDUCTION_MODE_MAX_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering together with minmax filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubicMinmax returned by vkGetPhysicalDeviceImageFormatProperties2 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectCountNV-filterCubicMinmax-02695)"},
+    {"VUID-vkCmdDrawMeshTasksIndirectCountNV-flags-02696", "Any VkImage created with a VkImageCreateInfo::flags containing VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV sampled as a result of this command must only be sampled using a VkSamplerAddressMode of VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectCountNV-flags-02696)"},
+    {"VUID-vkCmdDrawMeshTasksIndirectCountNV-maxDrawCount-02183", "If maxDrawCount is greater than or equal to 1, (stride {times} (maxDrawCount - 1) + offset + sizeof(VkDrawMeshTasksIndirectCommandNV)) must be less than or equal to the size of buffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectCountNV-maxDrawCount-02183)"},
+    {"VUID-vkCmdDrawMeshTasksIndirectCountNV-maxMultiviewInstanceIndex-02688", "If the draw is recorded in a render pass instance with multiview enabled, the maximum instance index must be less than or equal to VkPhysicalDeviceMultiviewProperties::maxMultiviewInstanceIndex. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectCountNV-maxMultiviewInstanceIndex-02688)"},
+    {"VUID-vkCmdDrawMeshTasksIndirectCountNV-offset-02710", "offset must be a multiple of 4 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectCountNV-offset-02710)"},
+    {"VUID-vkCmdDrawMeshTasksIndirectCountNV-renderPass-02684", "The current render pass must be compatible with the renderPass member of the VkGraphicsPipelineCreateInfo structure specified when creating the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectCountNV-renderPass-02684)"},
+    {"VUID-vkCmdDrawMeshTasksIndirectCountNV-renderpass", "This command must only be called inside of a render pass instance (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectCountNV-renderpass)"},
+    {"VUID-vkCmdDrawMeshTasksIndirectCountNV-sampleLocationsEnable-02689", "If the bound graphics pipeline was created with VkPipelineSampleLocationsStateCreateInfoEXT::sampleLocationsEnable set to VK_TRUE and the current subpass has a depth/stencil attachment, then that attachment must have been created with the VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT bit set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectCountNV-sampleLocationsEnable-02689)"},
+    {"VUID-vkCmdDrawMeshTasksIndirectCountNV-stride-02182", "stride must be a multiple of 4 and must be greater than or equal to sizeof(VkDrawMeshTasksIndirectCommandNV) (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectCountNV-stride-02182)"},
+    {"VUID-vkCmdDrawMeshTasksIndirectCountNV-subpass-02685", "The subpass index of the current render pass must be equal to the subpass member of the VkGraphicsPipelineCreateInfo structure specified when creating the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectCountNV-subpass-02685)"},
+    {"VUID-vkCmdDrawMeshTasksIndirectNV-None-02686", "Every input attachment used by the current subpass must be bound to the pipeline via a descriptor set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectNV-None-02686)"},
+    {"VUID-vkCmdDrawMeshTasksIndirectNV-None-02687", "Image subresources used as attachments in the current render pass must not be accessed in any way other than as an attachment by this command. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectNV-None-02687)"},
+    {"VUID-vkCmdDrawMeshTasksIndirectNV-None-02690", "If a VkImageView is sampled with VK_FILTER_LINEAR as a result of this command, then the image view's format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectNV-None-02690)"},
+    {"VUID-vkCmdDrawMeshTasksIndirectNV-None-02691", "If a VkImageView is accessed using atomic operations as a result of this command, then the image view's format features must contain VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectNV-None-02691)"},
+    {"VUID-vkCmdDrawMeshTasksIndirectNV-None-02692", "If a VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, then the image view's format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectNV-None-02692)"},
+    {"VUID-vkCmdDrawMeshTasksIndirectNV-None-02693", "Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must not have a VkImageViewType of VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectNV-None-02693)"},
+    {"VUID-vkCmdDrawMeshTasksIndirectNV-None-02697", "For each set n that is statically used by the VkPipeline bound to the pipeline bind point used by this command, a descriptor set must have been bound to n at the same pipeline bind point, with a VkPipelineLayout that is compatible for set n, with the VkPipelineLayout used to create the current VkPipeline, as described in Pipeline Layout Compatibility (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectNV-None-02697)"},
+    {"VUID-vkCmdDrawMeshTasksIndirectNV-None-02698", "For each push constant that is statically used by the VkPipeline bound to the pipeline bind point used by this command, a push constant value must have been set for the same pipeline bind point, with a VkPipelineLayout that is compatible for push constants, with the VkPipelineLayout used to create the current VkPipeline, as described in Pipeline Layout Compatibility (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectNV-None-02698)"},
+    {"VUID-vkCmdDrawMeshTasksIndirectNV-None-02699", "Descriptors in each bound descriptor set, specified via vkCmdBindDescriptorSets, must be valid if they are statically used by the VkPipeline bound to the pipeline bind point used by this command (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectNV-None-02699)"},
+    {"VUID-vkCmdDrawMeshTasksIndirectNV-None-02700", "A valid pipeline must be bound to the pipeline bind point used by this command (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectNV-None-02700)"},
+    {"VUID-vkCmdDrawMeshTasksIndirectNV-None-02702", "If the VkPipeline object bound to the pipeline bind point used by this command accesses a VkSampler object that uses unnormalized coordinates, that sampler must not be used to sample from any VkImage with a VkImageView of the type VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, VK_IMAGE_VIEW_TYPE_1D_ARRAY, VK_IMAGE_VIEW_TYPE_2D_ARRAY or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectNV-None-02702)"},
+    {"VUID-vkCmdDrawMeshTasksIndirectNV-None-02703", "If the VkPipeline object bound to the pipeline bind point used by this command accesses a VkSampler object that uses unnormalized coordinates, that sampler must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* instructions with ImplicitLod, Dref or Proj in their name, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectNV-None-02703)"},
+    {"VUID-vkCmdDrawMeshTasksIndirectNV-None-02704", "If the VkPipeline object bound to the pipeline bind point used by this command accesses a VkSampler object that uses unnormalized coordinates, that sampler must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* instructions that includes a LOD bias or any offset values, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectNV-None-02704)"},
+    {"VUID-vkCmdDrawMeshTasksIndirectNV-None-02705", "If the robust buffer access feature is not enabled, and if the VkPipeline object bound to the pipeline bind point used by this command accesses a uniform buffer, it must not access values outside of the range of the buffer as specified in the descriptor set bound to the same pipeline bind point (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectNV-None-02705)"},
+    {"VUID-vkCmdDrawMeshTasksIndirectNV-None-02706", "If the robust buffer access feature is not enabled, and if the VkPipeline object bound to the pipeline bind point used by this command accesses a storage buffer, it must not access values outside of the range of the buffer as specified in the descriptor set bound to the same pipeline bind point (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectNV-None-02706)"},
+    {"VUID-vkCmdDrawMeshTasksIndirectNV-buffer-02708", "If buffer is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectNV-buffer-02708)"},
+    {"VUID-vkCmdDrawMeshTasksIndirectNV-buffer-02709", "buffer must have been created with the VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT bit set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectNV-buffer-02709)"},
+    {"VUID-vkCmdDrawMeshTasksIndirectNV-buffer-parameter", "buffer must be a valid VkBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectNV-buffer-parameter)"},
+    {"VUID-vkCmdDrawMeshTasksIndirectNV-commandBuffer-02701", "If the VkPipeline object bound to the pipeline bind point used by this command requires any dynamic state, that state must have been set for commandBuffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectNV-commandBuffer-02701)"},
+    {"VUID-vkCmdDrawMeshTasksIndirectNV-commandBuffer-02707", "If commandBuffer is an unprotected command buffer, any resource accessed by the VkPipeline object bound to the pipeline bind point used by this command must not be a protected resource (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectNV-commandBuffer-02707)"},
+    {"VUID-vkCmdDrawMeshTasksIndirectNV-commandBuffer-02711", "commandBuffer must not be a protected command buffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectNV-commandBuffer-02711)"},
+    {"VUID-vkCmdDrawMeshTasksIndirectNV-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support graphics operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectNV-commandBuffer-cmdpool)"},
+    {"VUID-vkCmdDrawMeshTasksIndirectNV-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectNV-commandBuffer-parameter)"},
+    {"VUID-vkCmdDrawMeshTasksIndirectNV-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectNV-commandBuffer-recording)"},
+    {"VUID-vkCmdDrawMeshTasksIndirectNV-commonparent", "Both of buffer, and commandBuffer must have been created, allocated, or retrieved from the same VkDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectNV-commonparent)"},
+    {"VUID-vkCmdDrawMeshTasksIndirectNV-drawCount-02146", "If drawCount is greater than 1, stride must be a multiple of 4 and must be greater than or equal to sizeof(VkDrawMeshTasksIndirectCommandNV) (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectNV-drawCount-02146)"},
+    {"VUID-vkCmdDrawMeshTasksIndirectNV-drawCount-02156", "If drawCount is equal to 1, (offset + sizeof(VkDrawMeshTasksIndirectCommandNV)) must be less than or equal to the size of buffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectNV-drawCount-02156)"},
+    {"VUID-vkCmdDrawMeshTasksIndirectNV-drawCount-02157", "If drawCount is greater than 1, (stride {times} (drawCount - 1) + offset + sizeof(VkDrawMeshTasksIndirectCommandNV)) must be less than or equal to the size of buffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectNV-drawCount-02157)"},
+    {"VUID-vkCmdDrawMeshTasksIndirectNV-drawCount-02718", "If the multi-draw indirect feature is not enabled, drawCount must be 0 or 1 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectNV-drawCount-02718)"},
+    {"VUID-vkCmdDrawMeshTasksIndirectNV-drawCount-02719", "drawCount must be less than or equal to VkPhysicalDeviceLimits::maxDrawIndirectCount (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectNV-drawCount-02719)"},
+    {"VUID-vkCmdDrawMeshTasksIndirectNV-filterCubic-02694", "Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubic returned by vkGetPhysicalDeviceImageFormatProperties2 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectNV-filterCubic-02694)"},
+    {"VUID-vkCmdDrawMeshTasksIndirectNV-filterCubicMinmax-02695", "Any VkImageView being sampled with VK_FILTER_CUBIC_EXT with a reduction mode of either VK_SAMPLER_REDUCTION_MODE_MIN_EXT or VK_SAMPLER_REDUCTION_MODE_MAX_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering together with minmax filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubicMinmax returned by vkGetPhysicalDeviceImageFormatProperties2 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectNV-filterCubicMinmax-02695)"},
+    {"VUID-vkCmdDrawMeshTasksIndirectNV-flags-02696", "Any VkImage created with a VkImageCreateInfo::flags containing VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV sampled as a result of this command must only be sampled using a VkSamplerAddressMode of VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectNV-flags-02696)"},
+    {"VUID-vkCmdDrawMeshTasksIndirectNV-maxMultiviewInstanceIndex-02688", "If the draw is recorded in a render pass instance with multiview enabled, the maximum instance index must be less than or equal to VkPhysicalDeviceMultiviewProperties::maxMultiviewInstanceIndex. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectNV-maxMultiviewInstanceIndex-02688)"},
+    {"VUID-vkCmdDrawMeshTasksIndirectNV-offset-02710", "offset must be a multiple of 4 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectNV-offset-02710)"},
+    {"VUID-vkCmdDrawMeshTasksIndirectNV-renderPass-02684", "The current render pass must be compatible with the renderPass member of the VkGraphicsPipelineCreateInfo structure specified when creating the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectNV-renderPass-02684)"},
+    {"VUID-vkCmdDrawMeshTasksIndirectNV-renderpass", "This command must only be called inside of a render pass instance (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectNV-renderpass)"},
+    {"VUID-vkCmdDrawMeshTasksIndirectNV-sampleLocationsEnable-02689", "If the bound graphics pipeline was created with VkPipelineSampleLocationsStateCreateInfoEXT::sampleLocationsEnable set to VK_TRUE and the current subpass has a depth/stencil attachment, then that attachment must have been created with the VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT bit set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectNV-sampleLocationsEnable-02689)"},
+    {"VUID-vkCmdDrawMeshTasksIndirectNV-subpass-02685", "The subpass index of the current render pass must be equal to the subpass member of the VkGraphicsPipelineCreateInfo structure specified when creating the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksIndirectNV-subpass-02685)"},
+    {"VUID-vkCmdDrawMeshTasksNV-None-02686", "Every input attachment used by the current subpass must be bound to the pipeline via a descriptor set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksNV-None-02686)"},
+    {"VUID-vkCmdDrawMeshTasksNV-None-02687", "Image subresources used as attachments in the current render pass must not be accessed in any way other than as an attachment by this command. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksNV-None-02687)"},
+    {"VUID-vkCmdDrawMeshTasksNV-None-02690", "If a VkImageView is sampled with VK_FILTER_LINEAR as a result of this command, then the image view's format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksNV-None-02690)"},
+    {"VUID-vkCmdDrawMeshTasksNV-None-02691", "If a VkImageView is accessed using atomic operations as a result of this command, then the image view's format features must contain VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksNV-None-02691)"},
+    {"VUID-vkCmdDrawMeshTasksNV-None-02692", "If a VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, then the image view's format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksNV-None-02692)"},
+    {"VUID-vkCmdDrawMeshTasksNV-None-02693", "Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must not have a VkImageViewType of VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksNV-None-02693)"},
+    {"VUID-vkCmdDrawMeshTasksNV-None-02697", "For each set n that is statically used by the VkPipeline bound to the pipeline bind point used by this command, a descriptor set must have been bound to n at the same pipeline bind point, with a VkPipelineLayout that is compatible for set n, with the VkPipelineLayout used to create the current VkPipeline, as described in Pipeline Layout Compatibility (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksNV-None-02697)"},
+    {"VUID-vkCmdDrawMeshTasksNV-None-02698", "For each push constant that is statically used by the VkPipeline bound to the pipeline bind point used by this command, a push constant value must have been set for the same pipeline bind point, with a VkPipelineLayout that is compatible for push constants, with the VkPipelineLayout used to create the current VkPipeline, as described in Pipeline Layout Compatibility (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksNV-None-02698)"},
+    {"VUID-vkCmdDrawMeshTasksNV-None-02699", "Descriptors in each bound descriptor set, specified via vkCmdBindDescriptorSets, must be valid if they are statically used by the VkPipeline bound to the pipeline bind point used by this command (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksNV-None-02699)"},
+    {"VUID-vkCmdDrawMeshTasksNV-None-02700", "A valid pipeline must be bound to the pipeline bind point used by this command (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksNV-None-02700)"},
+    {"VUID-vkCmdDrawMeshTasksNV-None-02702", "If the VkPipeline object bound to the pipeline bind point used by this command accesses a VkSampler object that uses unnormalized coordinates, that sampler must not be used to sample from any VkImage with a VkImageView of the type VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, VK_IMAGE_VIEW_TYPE_1D_ARRAY, VK_IMAGE_VIEW_TYPE_2D_ARRAY or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksNV-None-02702)"},
+    {"VUID-vkCmdDrawMeshTasksNV-None-02703", "If the VkPipeline object bound to the pipeline bind point used by this command accesses a VkSampler object that uses unnormalized coordinates, that sampler must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* instructions with ImplicitLod, Dref or Proj in their name, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksNV-None-02703)"},
+    {"VUID-vkCmdDrawMeshTasksNV-None-02704", "If the VkPipeline object bound to the pipeline bind point used by this command accesses a VkSampler object that uses unnormalized coordinates, that sampler must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* instructions that includes a LOD bias or any offset values, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksNV-None-02704)"},
+    {"VUID-vkCmdDrawMeshTasksNV-None-02705", "If the robust buffer access feature is not enabled, and if the VkPipeline object bound to the pipeline bind point used by this command accesses a uniform buffer, it must not access values outside of the range of the buffer as specified in the descriptor set bound to the same pipeline bind point (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksNV-None-02705)"},
+    {"VUID-vkCmdDrawMeshTasksNV-None-02706", "If the robust buffer access feature is not enabled, and if the VkPipeline object bound to the pipeline bind point used by this command accesses a storage buffer, it must not access values outside of the range of the buffer as specified in the descriptor set bound to the same pipeline bind point (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksNV-None-02706)"},
+    {"VUID-vkCmdDrawMeshTasksNV-commandBuffer-02701", "If the VkPipeline object bound to the pipeline bind point used by this command requires any dynamic state, that state must have been set for commandBuffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksNV-commandBuffer-02701)"},
+    {"VUID-vkCmdDrawMeshTasksNV-commandBuffer-02707", "If commandBuffer is an unprotected command buffer, any resource accessed by the VkPipeline object bound to the pipeline bind point used by this command must not be a protected resource (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksNV-commandBuffer-02707)"},
+    {"VUID-vkCmdDrawMeshTasksNV-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support graphics operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksNV-commandBuffer-cmdpool)"},
+    {"VUID-vkCmdDrawMeshTasksNV-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksNV-commandBuffer-parameter)"},
+    {"VUID-vkCmdDrawMeshTasksNV-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksNV-commandBuffer-recording)"},
+    {"VUID-vkCmdDrawMeshTasksNV-filterCubic-02694", "Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubic returned by vkGetPhysicalDeviceImageFormatProperties2 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksNV-filterCubic-02694)"},
+    {"VUID-vkCmdDrawMeshTasksNV-filterCubicMinmax-02695", "Any VkImageView being sampled with VK_FILTER_CUBIC_EXT with a reduction mode of either VK_SAMPLER_REDUCTION_MODE_MIN_EXT or VK_SAMPLER_REDUCTION_MODE_MAX_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering together with minmax filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubicMinmax returned by vkGetPhysicalDeviceImageFormatProperties2 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksNV-filterCubicMinmax-02695)"},
+    {"VUID-vkCmdDrawMeshTasksNV-flags-02696", "Any VkImage created with a VkImageCreateInfo::flags containing VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV sampled as a result of this command must only be sampled using a VkSamplerAddressMode of VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksNV-flags-02696)"},
+    {"VUID-vkCmdDrawMeshTasksNV-maxMultiviewInstanceIndex-02688", "If the draw is recorded in a render pass instance with multiview enabled, the maximum instance index must be less than or equal to VkPhysicalDeviceMultiviewProperties::maxMultiviewInstanceIndex. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksNV-maxMultiviewInstanceIndex-02688)"},
+    {"VUID-vkCmdDrawMeshTasksNV-renderPass-02684", "The current render pass must be compatible with the renderPass member of the VkGraphicsPipelineCreateInfo structure specified when creating the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksNV-renderPass-02684)"},
+    {"VUID-vkCmdDrawMeshTasksNV-renderpass", "This command must only be called inside of a render pass instance (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksNV-renderpass)"},
+    {"VUID-vkCmdDrawMeshTasksNV-sampleLocationsEnable-02689", "If the bound graphics pipeline was created with VkPipelineSampleLocationsStateCreateInfoEXT::sampleLocationsEnable set to VK_TRUE and the current subpass has a depth/stencil attachment, then that attachment must have been created with the VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT bit set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksNV-sampleLocationsEnable-02689)"},
+    {"VUID-vkCmdDrawMeshTasksNV-subpass-02685", "The subpass index of the current render pass must be equal to the subpass member of the VkGraphicsPipelineCreateInfo structure specified when creating the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksNV-subpass-02685)"},
+    {"VUID-vkCmdDrawMeshTasksNV-taskCount-02119", "taskCount must be less than or equal to VkPhysicalDeviceMeshShaderPropertiesNV::maxDrawMeshTasksCount (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdDrawMeshTasksNV-taskCount-02119)"},
+    {"VUID-vkCmdEndConditionalRenderingEXT-None-01985", "Conditional rendering must be active (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdEndConditionalRenderingEXT-None-01985)"},
+    {"VUID-vkCmdEndConditionalRenderingEXT-None-01986", "If conditional rendering was made active outside of a render pass instance, it must not be ended inside a render pass instance (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdEndConditionalRenderingEXT-None-01986)"},
+    {"VUID-vkCmdEndConditionalRenderingEXT-None-01987", "If conditional rendering was made active within a subpass it must be ended in the same subpass (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdEndConditionalRenderingEXT-None-01987)"},
+    {"VUID-vkCmdEndConditionalRenderingEXT-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support graphics, or compute operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdEndConditionalRenderingEXT-commandBuffer-cmdpool)"},
+    {"VUID-vkCmdEndConditionalRenderingEXT-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdEndConditionalRenderingEXT-commandBuffer-parameter)"},
+    {"VUID-vkCmdEndConditionalRenderingEXT-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdEndConditionalRenderingEXT-commandBuffer-recording)"},
+    {"VUID-vkCmdEndDebugUtilsLabelEXT-commandBuffer-01912", "There must be an outstanding vkCmdBeginDebugUtilsLabelEXT command prior to the vkCmdEndDebugUtilsLabelEXT on the queue that commandBuffer is submitted to (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdEndDebugUtilsLabelEXT-commandBuffer-01912)"},
+    {"VUID-vkCmdEndDebugUtilsLabelEXT-commandBuffer-01913", "If commandBuffer is a secondary command buffer, there must be an outstanding vkCmdBeginDebugUtilsLabelEXT command recorded to commandBuffer that has not previously been ended by a call to vkCmdEndDebugUtilsLabelEXT. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdEndDebugUtilsLabelEXT-commandBuffer-01913)"},
+    {"VUID-vkCmdEndDebugUtilsLabelEXT-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support graphics, or compute operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdEndDebugUtilsLabelEXT-commandBuffer-cmdpool)"},
+    {"VUID-vkCmdEndDebugUtilsLabelEXT-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdEndDebugUtilsLabelEXT-commandBuffer-parameter)"},
+    {"VUID-vkCmdEndDebugUtilsLabelEXT-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdEndDebugUtilsLabelEXT-commandBuffer-recording)"},
+    {"VUID-vkCmdEndQuery-None-01923", "All queries used by the command must be active (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdEndQuery-None-01923)"},
+    {"VUID-vkCmdEndQuery-commandBuffer-01886", "commandBuffer must not be a protected command buffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdEndQuery-commandBuffer-01886)"},
+    {"VUID-vkCmdEndQuery-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support graphics, or compute operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdEndQuery-commandBuffer-cmdpool)"},
+    {"VUID-vkCmdEndQuery-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdEndQuery-commandBuffer-parameter)"},
+    {"VUID-vkCmdEndQuery-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdEndQuery-commandBuffer-recording)"},
+    {"VUID-vkCmdEndQuery-commonparent", "Both of commandBuffer, and queryPool must have been created, allocated, or retrieved from the same VkDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdEndQuery-commonparent)"},
+    {"VUID-vkCmdEndQuery-query-00810", "query must be less than the number of queries in queryPool (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdEndQuery-query-00810)"},
+    {"VUID-vkCmdEndQuery-query-00812", "If vkCmdEndQuery is called within a render pass instance, the sum of query and the number of bits set in the current subpass's view mask must be less than or equal to the number of queries in queryPool (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdEndQuery-query-00812)"},
+    {"VUID-vkCmdEndQuery-queryPool-03227", "If queryPool was created with a queryType of VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR and one or more of the counters used to create queryPool was VK_QUERY_SCOPE_COMMAND_BUFFER_KHR, the vkCmdEndQuery must be the last recorded command in commandBuffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdEndQuery-queryPool-03227)"},
+    {"VUID-vkCmdEndQuery-queryPool-03228", "If queryPool was created with a queryType of VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR and one or more of the counters used to create queryPool was VK_QUERY_SCOPE_RENDER_PASS_KHR, the vkCmdEndQuery must not be recorded within a render pass instance (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdEndQuery-queryPool-03228)"},
+    {"VUID-vkCmdEndQuery-queryPool-parameter", "queryPool must be a valid VkQueryPool handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdEndQuery-queryPool-parameter)"},
+    {"VUID-vkCmdEndQueryIndexedEXT-None-02342", "All queries used by the command must be active (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdEndQueryIndexedEXT-None-02342)"},
+    {"VUID-vkCmdEndQueryIndexedEXT-commandBuffer-02344", "commandBuffer must not be a protected command buffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdEndQueryIndexedEXT-commandBuffer-02344)"},
+    {"VUID-vkCmdEndQueryIndexedEXT-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support graphics, or compute operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdEndQueryIndexedEXT-commandBuffer-cmdpool)"},
+    {"VUID-vkCmdEndQueryIndexedEXT-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdEndQueryIndexedEXT-commandBuffer-parameter)"},
+    {"VUID-vkCmdEndQueryIndexedEXT-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdEndQueryIndexedEXT-commandBuffer-recording)"},
+    {"VUID-vkCmdEndQueryIndexedEXT-commonparent", "Both of commandBuffer, and queryPool must have been created, allocated, or retrieved from the same VkDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdEndQueryIndexedEXT-commonparent)"},
+    {"VUID-vkCmdEndQueryIndexedEXT-query-02343", "query must be less than the number of queries in queryPool (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdEndQueryIndexedEXT-query-02343)"},
+    {"VUID-vkCmdEndQueryIndexedEXT-query-02345", "If vkCmdEndQueryIndexedEXT is called within a render pass instance, the sum of query and the number of bits set in the current subpass's view mask must be less than or equal to the number of queries in queryPool (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdEndQueryIndexedEXT-query-02345)"},
+    {"VUID-vkCmdEndQueryIndexedEXT-queryPool-parameter", "queryPool must be a valid VkQueryPool handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdEndQueryIndexedEXT-queryPool-parameter)"},
+    {"VUID-vkCmdEndQueryIndexedEXT-queryType-02346", "If the queryType used to create queryPool was VK_QUERY_TYPE_TRANSFORM_FEEDBACK_STREAM_EXT the index parameter must be less than VkPhysicalDeviceTransformFeedbackPropertiesEXT::maxTransformFeedbackStreams (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdEndQueryIndexedEXT-queryType-02346)"},
+    {"VUID-vkCmdEndQueryIndexedEXT-queryType-02347", "If the queryType used to create queryPool was not VK_QUERY_TYPE_TRANSFORM_FEEDBACK_STREAM_EXT the index must be zero (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdEndQueryIndexedEXT-queryType-02347)"},
+    {"VUID-vkCmdEndQueryIndexedEXT-queryType-02723", "If the queryType used to create queryPool was VK_QUERY_TYPE_TRANSFORM_FEEDBACK_STREAM_EXT index must equal the index used to begin the query (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdEndQueryIndexedEXT-queryType-02723)"},
+    {"VUID-vkCmdEndRenderPass-None-00910", "The current subpass index must be equal to the number of subpasses in the render pass minus one (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdEndRenderPass-None-00910)"},
+    {"VUID-vkCmdEndRenderPass-None-02351", "This command must not be recorded when transform feedback is active (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdEndRenderPass-None-02351)"},
+    {"VUID-vkCmdEndRenderPass-bufferlevel", "commandBuffer must be a primary VkCommandBuffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdEndRenderPass-bufferlevel)"},
+    {"VUID-vkCmdEndRenderPass-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support graphics operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdEndRenderPass-commandBuffer-cmdpool)"},
+    {"VUID-vkCmdEndRenderPass-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdEndRenderPass-commandBuffer-parameter)"},
+    {"VUID-vkCmdEndRenderPass-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdEndRenderPass-commandBuffer-recording)"},
+    {"VUID-vkCmdEndRenderPass-renderpass", "This command must only be called inside of a render pass instance (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdEndRenderPass-renderpass)"},
+    {"VUID-vkCmdEndRenderPass2KHR-None-02352", "This command must not be recorded when transform feedback is active (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdEndRenderPass2KHR-None-02352)"},
+    {"VUID-vkCmdEndRenderPass2KHR-None-03103", "The current subpass index must be equal to the number of subpasses in the render pass minus one (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdEndRenderPass2KHR-None-03103)"},
+    {"VUID-vkCmdEndRenderPass2KHR-bufferlevel", "commandBuffer must be a primary VkCommandBuffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdEndRenderPass2KHR-bufferlevel)"},
+    {"VUID-vkCmdEndRenderPass2KHR-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support graphics operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdEndRenderPass2KHR-commandBuffer-cmdpool)"},
+    {"VUID-vkCmdEndRenderPass2KHR-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdEndRenderPass2KHR-commandBuffer-parameter)"},
+    {"VUID-vkCmdEndRenderPass2KHR-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdEndRenderPass2KHR-commandBuffer-recording)"},
+    {"VUID-vkCmdEndRenderPass2KHR-pSubpassEndInfo-parameter", "pSubpassEndInfo must be a valid pointer to a valid VkSubpassEndInfoKHR structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdEndRenderPass2KHR-pSubpassEndInfo-parameter)"},
+    {"VUID-vkCmdEndRenderPass2KHR-renderpass", "This command must only be called inside of a render pass instance (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdEndRenderPass2KHR-renderpass)"},
+    {"VUID-vkCmdEndTransformFeedbackEXT-None-02375", "Transform feedback must be active (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdEndTransformFeedbackEXT-None-02375)"},
+    {"VUID-vkCmdEndTransformFeedbackEXT-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support graphics operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdEndTransformFeedbackEXT-commandBuffer-cmdpool)"},
+    {"VUID-vkCmdEndTransformFeedbackEXT-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdEndTransformFeedbackEXT-commandBuffer-parameter)"},
+    {"VUID-vkCmdEndTransformFeedbackEXT-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdEndTransformFeedbackEXT-commandBuffer-recording)"},
+    {"VUID-vkCmdEndTransformFeedbackEXT-commonparent", "Both of commandBuffer, and the elements of pCounterBuffers that are valid handles of non-ignored parameters must have been created, allocated, or retrieved from the same VkDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdEndTransformFeedbackEXT-commonparent)"},
+    {"VUID-vkCmdEndTransformFeedbackEXT-counterBufferCount-02608", "If counterBufferCount is not 0, and pCounterBuffers is not NULL, pCounterBuffers must be a valid pointer to an array of counterBufferCount VkBuffer handles that are either valid or VK_NULL_HANDLE (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdEndTransformFeedbackEXT-counterBufferCount-02608)"},
+    {"VUID-vkCmdEndTransformFeedbackEXT-firstCounterBuffer-02376", "firstCounterBuffer must be less than VkPhysicalDeviceTransformFeedbackPropertiesEXT::maxTransformFeedbackBuffers (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdEndTransformFeedbackEXT-firstCounterBuffer-02376)"},
+    {"VUID-vkCmdEndTransformFeedbackEXT-firstCounterBuffer-02377", "The sum of firstCounterBuffer and counterBufferCount must be less than or equal to VkPhysicalDeviceTransformFeedbackPropertiesEXT::maxTransformFeedbackBuffers (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdEndTransformFeedbackEXT-firstCounterBuffer-02377)"},
+    {"VUID-vkCmdEndTransformFeedbackEXT-pCounterBuffer-02379", "If pCounterBuffer is NULL, then pCounterBufferOffsets must also be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdEndTransformFeedbackEXT-pCounterBuffer-02379)"},
+    {"VUID-vkCmdEndTransformFeedbackEXT-pCounterBufferOffsets-02378", "For each buffer handle in the array, if it is not VK_NULL_HANDLE it must reference a buffer large enough to hold 4 bytes at the corresponding offset from the pCounterBufferOffsets array (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdEndTransformFeedbackEXT-pCounterBufferOffsets-02378)"},
+    {"VUID-vkCmdEndTransformFeedbackEXT-pCounterBufferOffsets-parameter", "If counterBufferCount is not 0, and pCounterBufferOffsets is not NULL, pCounterBufferOffsets must be a valid pointer to an array of counterBufferCount VkDeviceSize values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdEndTransformFeedbackEXT-pCounterBufferOffsets-parameter)"},
+    {"VUID-vkCmdEndTransformFeedbackEXT-pCounterBuffers-02380", "For each buffer handle in the pCounterBuffers array that is not VK_NULL_HANDLE it must have been created with a usage value containing VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_COUNTER_BUFFER_BIT_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdEndTransformFeedbackEXT-pCounterBuffers-02380)"},
+    {"VUID-vkCmdEndTransformFeedbackEXT-renderpass", "This command must only be called inside of a render pass instance (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdEndTransformFeedbackEXT-renderpass)"},
+    {"VUID-vkCmdEndTransformFeedbackEXT-transformFeedback-02374", "VkPhysicalDeviceTransformFeedbackFeaturesEXT::transformFeedback must be enabled (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdEndTransformFeedbackEXT-transformFeedback-02374)"},
+    {"VUID-vkCmdExecuteCommands-None-02286", "This command must not be recorded when transform feedback is active (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdExecuteCommands-None-02286)"},
+    {"VUID-vkCmdExecuteCommands-bufferlevel", "commandBuffer must be a primary VkCommandBuffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdExecuteCommands-bufferlevel)"},
+    {"VUID-vkCmdExecuteCommands-commandBuffer-00087", "commandBuffer must have been allocated with a level of VK_COMMAND_BUFFER_LEVEL_PRIMARY (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdExecuteCommands-commandBuffer-00087)"},
+    {"VUID-vkCmdExecuteCommands-commandBuffer-00101", "If the inherited queries feature is not enabled, commandBuffer must not have any queries active (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdExecuteCommands-commandBuffer-00101)"},
+    {"VUID-vkCmdExecuteCommands-commandBuffer-00102", "If commandBuffer has a VK_QUERY_TYPE_OCCLUSION query active, then each element of pCommandBuffers must have been recorded with VkCommandBufferInheritanceInfo::occlusionQueryEnable set to VK_TRUE (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdExecuteCommands-commandBuffer-00102)"},
+    {"VUID-vkCmdExecuteCommands-commandBuffer-00103", "If commandBuffer has a VK_QUERY_TYPE_OCCLUSION query active, then each element of pCommandBuffers must have been recorded with VkCommandBufferInheritanceInfo::queryFlags having all bits set that are set for the query (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdExecuteCommands-commandBuffer-00103)"},
+    {"VUID-vkCmdExecuteCommands-commandBuffer-00104", "If commandBuffer has a VK_QUERY_TYPE_PIPELINE_STATISTICS query active, then each element of pCommandBuffers must have been recorded with VkCommandBufferInheritanceInfo::pipelineStatistics having all bits set that are set in the VkQueryPool the query uses (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdExecuteCommands-commandBuffer-00104)"},
+    {"VUID-vkCmdExecuteCommands-commandBuffer-01820", "If commandBuffer is a protected command buffer, then each element of pCommandBuffers must be a protected command buffer. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdExecuteCommands-commandBuffer-01820)"},
+    {"VUID-vkCmdExecuteCommands-commandBuffer-01821", "If commandBuffer is an unprotected command buffer, then each element of pCommandBuffers must be an unprotected command buffer. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdExecuteCommands-commandBuffer-01821)"},
+    {"VUID-vkCmdExecuteCommands-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support transfer, graphics, or compute operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdExecuteCommands-commandBuffer-cmdpool)"},
+    {"VUID-vkCmdExecuteCommands-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdExecuteCommands-commandBuffer-parameter)"},
+    {"VUID-vkCmdExecuteCommands-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdExecuteCommands-commandBuffer-recording)"},
+    {"VUID-vkCmdExecuteCommands-commandBufferCount-arraylength", "commandBufferCount must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdExecuteCommands-commandBufferCount-arraylength)"},
+    {"VUID-vkCmdExecuteCommands-commonparent", "Both of commandBuffer, and the elements of pCommandBuffers must have been created, allocated, or retrieved from the same VkDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdExecuteCommands-commonparent)"},
+    {"VUID-vkCmdExecuteCommands-contents-00095", "If vkCmdExecuteCommands is being called within a render pass instance, that render pass instance must have been begun with the contents parameter of vkCmdBeginRenderPass set to VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdExecuteCommands-contents-00095)"},
+    {"VUID-vkCmdExecuteCommands-pCommandBuffers-00088", "Each element of pCommandBuffers must have been allocated with a level of VK_COMMAND_BUFFER_LEVEL_SECONDARY (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdExecuteCommands-pCommandBuffers-00088)"},
+    {"VUID-vkCmdExecuteCommands-pCommandBuffers-00089", "Each element of pCommandBuffers must be in the pending or executable state. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdExecuteCommands-pCommandBuffers-00089)"},
+    {"VUID-vkCmdExecuteCommands-pCommandBuffers-00090", "If any element of pCommandBuffers was not recorded with the VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT flag, and it was recorded into any other primary command buffer, that primary command buffer must not be in the pending state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdExecuteCommands-pCommandBuffers-00090)"},
+    {"VUID-vkCmdExecuteCommands-pCommandBuffers-00091", "If any element of pCommandBuffers was not recorded with the VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT flag, it must not be in the pending state. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdExecuteCommands-pCommandBuffers-00091)"},
+    {"VUID-vkCmdExecuteCommands-pCommandBuffers-00092", "If any element of pCommandBuffers was not recorded with the VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT flag, it must not have already been recorded to commandBuffer. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdExecuteCommands-pCommandBuffers-00092)"},
+    {"VUID-vkCmdExecuteCommands-pCommandBuffers-00093", "If any element of pCommandBuffers was not recorded with the VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT flag, it must not appear more than once in pCommandBuffers. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdExecuteCommands-pCommandBuffers-00093)"},
+    {"VUID-vkCmdExecuteCommands-pCommandBuffers-00094", "Each element of pCommandBuffers must have been allocated from a VkCommandPool that was created for the same queue family as the VkCommandPool from which commandBuffer was allocated (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdExecuteCommands-pCommandBuffers-00094)"},
+    {"VUID-vkCmdExecuteCommands-pCommandBuffers-00096", "If vkCmdExecuteCommands is being called within a render pass instance, each element of pCommandBuffers must have been recorded with the VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdExecuteCommands-pCommandBuffers-00096)"},
+    {"VUID-vkCmdExecuteCommands-pCommandBuffers-00097", "If vkCmdExecuteCommands is being called within a render pass instance, each element of pCommandBuffers must have been recorded with VkCommandBufferInheritanceInfo::subpass set to the index of the subpass which the given command buffer will be executed in (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdExecuteCommands-pCommandBuffers-00097)"},
+    {"VUID-vkCmdExecuteCommands-pCommandBuffers-00099", "If vkCmdExecuteCommands is being called within a render pass instance, and any element of pCommandBuffers was recorded with VkCommandBufferInheritanceInfo::framebuffer not equal to VK_NULL_HANDLE, that VkFramebuffer must match the VkFramebuffer used in the current render pass instance (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdExecuteCommands-pCommandBuffers-00099)"},
+    {"VUID-vkCmdExecuteCommands-pCommandBuffers-00100", "If vkCmdExecuteCommands is not being called within a render pass instance, each element of pCommandBuffers must not have been recorded with the VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdExecuteCommands-pCommandBuffers-00100)"},
+    {"VUID-vkCmdExecuteCommands-pCommandBuffers-00105", "Each element of pCommandBuffers must not begin any query types that are active in commandBuffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdExecuteCommands-pCommandBuffers-00105)"},
+    {"VUID-vkCmdExecuteCommands-pCommandBuffers-parameter", "pCommandBuffers must be a valid pointer to an array of commandBufferCount valid VkCommandBuffer handles (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdExecuteCommands-pCommandBuffers-parameter)"},
+    {"VUID-vkCmdExecuteCommands-pInheritanceInfo-00098", "If vkCmdExecuteCommands is being called within a render pass instance, the render passes specified in the pBeginInfo->pInheritanceInfo->renderPass members of the vkBeginCommandBuffer commands used to begin recording each element of pCommandBuffers must be compatible with the current render pass. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdExecuteCommands-pInheritanceInfo-00098)"},
+    {"VUID-vkCmdFillBuffer-commandBuffer-00030", "The VkCommandPool that commandBuffer was allocated from must support graphics or compute operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdFillBuffer-commandBuffer-00030)"},
+    {"VUID-vkCmdFillBuffer-commandBuffer-01811", "If commandBuffer is an unprotected command buffer, then dstBuffer must not be a protected buffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdFillBuffer-commandBuffer-01811)"},
+    {"VUID-vkCmdFillBuffer-commandBuffer-01812", "If commandBuffer is a protected command buffer, then dstBuffer must not be an unprotected buffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdFillBuffer-commandBuffer-01812)"},
+    {"VUID-vkCmdFillBuffer-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support transfer, graphics or compute operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdFillBuffer-commandBuffer-cmdpool)"},
+    {"VUID-vkCmdFillBuffer-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdFillBuffer-commandBuffer-parameter)"},
+    {"VUID-vkCmdFillBuffer-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdFillBuffer-commandBuffer-recording)"},
+    {"VUID-vkCmdFillBuffer-commonparent", "Both of commandBuffer, and dstBuffer must have been created, allocated, or retrieved from the same VkDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdFillBuffer-commonparent)"},
+    {"VUID-vkCmdFillBuffer-dstBuffer-00029", "dstBuffer must have been created with VK_BUFFER_USAGE_TRANSFER_DST_BIT usage flag (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdFillBuffer-dstBuffer-00029)"},
+    {"VUID-vkCmdFillBuffer-dstBuffer-00031", "If dstBuffer is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdFillBuffer-dstBuffer-00031)"},
+    {"VUID-vkCmdFillBuffer-dstBuffer-parameter", "dstBuffer must be a valid VkBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdFillBuffer-dstBuffer-parameter)"},
+    {"VUID-vkCmdFillBuffer-dstOffset-00024", "dstOffset must be less than the size of dstBuffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdFillBuffer-dstOffset-00024)"},
+    {"VUID-vkCmdFillBuffer-dstOffset-00025", "dstOffset must be a multiple of 4 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdFillBuffer-dstOffset-00025)"},
+    {"VUID-vkCmdFillBuffer-renderpass", "This command must only be called outside of a render pass instance (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdFillBuffer-renderpass)"},
+    {"VUID-vkCmdFillBuffer-size-00026", "If size is not equal to VK_WHOLE_SIZE, size must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdFillBuffer-size-00026)"},
+    {"VUID-vkCmdFillBuffer-size-00027", "If size is not equal to VK_WHOLE_SIZE, size must be less than or equal to the size of dstBuffer minus dstOffset (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdFillBuffer-size-00027)"},
+    {"VUID-vkCmdFillBuffer-size-00028", "If size is not equal to VK_WHOLE_SIZE, size must be a multiple of 4 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdFillBuffer-size-00028)"},
+    {"VUID-vkCmdInsertDebugUtilsLabelEXT-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support graphics, or compute operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdInsertDebugUtilsLabelEXT-commandBuffer-cmdpool)"},
+    {"VUID-vkCmdInsertDebugUtilsLabelEXT-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdInsertDebugUtilsLabelEXT-commandBuffer-parameter)"},
+    {"VUID-vkCmdInsertDebugUtilsLabelEXT-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdInsertDebugUtilsLabelEXT-commandBuffer-recording)"},
+    {"VUID-vkCmdInsertDebugUtilsLabelEXT-pLabelInfo-parameter", "pLabelInfo must be a valid pointer to a valid VkDebugUtilsLabelEXT structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdInsertDebugUtilsLabelEXT-pLabelInfo-parameter)"},
+    {"VUID-vkCmdNextSubpass-None-00909", "The current subpass index must be less than the number of subpasses in the render pass minus one (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdNextSubpass-None-00909)"},
+    {"VUID-vkCmdNextSubpass-None-02349", "This command must not be recorded when transform feedback is active (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdNextSubpass-None-02349)"},
+    {"VUID-vkCmdNextSubpass-bufferlevel", "commandBuffer must be a primary VkCommandBuffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdNextSubpass-bufferlevel)"},
+    {"VUID-vkCmdNextSubpass-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support graphics operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdNextSubpass-commandBuffer-cmdpool)"},
+    {"VUID-vkCmdNextSubpass-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdNextSubpass-commandBuffer-parameter)"},
+    {"VUID-vkCmdNextSubpass-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdNextSubpass-commandBuffer-recording)"},
+    {"VUID-vkCmdNextSubpass-contents-parameter", "contents must be a valid VkSubpassContents value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdNextSubpass-contents-parameter)"},
+    {"VUID-vkCmdNextSubpass-renderpass", "This command must only be called inside of a render pass instance (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdNextSubpass-renderpass)"},
+    {"VUID-vkCmdNextSubpass2KHR-None-02350", "This command must not be recorded when transform feedback is active (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdNextSubpass2KHR-None-02350)"},
+    {"VUID-vkCmdNextSubpass2KHR-None-03102", "The current subpass index must be less than the number of subpasses in the render pass minus one (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdNextSubpass2KHR-None-03102)"},
+    {"VUID-vkCmdNextSubpass2KHR-bufferlevel", "commandBuffer must be a primary VkCommandBuffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdNextSubpass2KHR-bufferlevel)"},
+    {"VUID-vkCmdNextSubpass2KHR-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support graphics operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdNextSubpass2KHR-commandBuffer-cmdpool)"},
+    {"VUID-vkCmdNextSubpass2KHR-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdNextSubpass2KHR-commandBuffer-parameter)"},
+    {"VUID-vkCmdNextSubpass2KHR-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdNextSubpass2KHR-commandBuffer-recording)"},
+    {"VUID-vkCmdNextSubpass2KHR-pSubpassBeginInfo-parameter", "pSubpassBeginInfo must be a valid pointer to a valid VkSubpassBeginInfoKHR structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdNextSubpass2KHR-pSubpassBeginInfo-parameter)"},
+    {"VUID-vkCmdNextSubpass2KHR-pSubpassEndInfo-parameter", "pSubpassEndInfo must be a valid pointer to a valid VkSubpassEndInfoKHR structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdNextSubpass2KHR-pSubpassEndInfo-parameter)"},
+    {"VUID-vkCmdNextSubpass2KHR-renderpass", "This command must only be called inside of a render pass instance (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdNextSubpass2KHR-renderpass)"},
+    {"VUID-vkCmdPipelineBarrier-bufferMemoryBarrierCount-01178", "If vkCmdPipelineBarrier is called within a render pass instance, bufferMemoryBarrierCount must be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdPipelineBarrier-bufferMemoryBarrierCount-01178)"},
+    {"VUID-vkCmdPipelineBarrier-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support transfer, graphics, or compute operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdPipelineBarrier-commandBuffer-cmdpool)"},
+    {"VUID-vkCmdPipelineBarrier-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdPipelineBarrier-commandBuffer-parameter)"},
+    {"VUID-vkCmdPipelineBarrier-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdPipelineBarrier-commandBuffer-recording)"},
+    {"VUID-vkCmdPipelineBarrier-dependencyFlags-01186", "If vkCmdPipelineBarrier is called outside of a render pass instance, dependencyFlags must not include VK_DEPENDENCY_VIEW_LOCAL_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdPipelineBarrier-dependencyFlags-01186)"},
+    {"VUID-vkCmdPipelineBarrier-dependencyFlags-parameter", "dependencyFlags must be a valid combination of VkDependencyFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdPipelineBarrier-dependencyFlags-parameter)"},
+    {"VUID-vkCmdPipelineBarrier-dstAccessMask-02816", "The dstAccessMask member of each element of pMemoryBarriers must only include access flags that are supported by one or more of the pipeline stages in dstStageMask, as specified in the table of supported access types (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdPipelineBarrier-dstAccessMask-02816)"},
+    {"VUID-vkCmdPipelineBarrier-dstStageMask-01169", "If the geometry shaders feature is not enabled, dstStageMask must not contain VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdPipelineBarrier-dstStageMask-01169)"},
+    {"VUID-vkCmdPipelineBarrier-dstStageMask-01171", "If the tessellation shaders feature is not enabled, dstStageMask must not contain VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT or VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdPipelineBarrier-dstStageMask-01171)"},
+    {"VUID-vkCmdPipelineBarrier-dstStageMask-02117", "If the mesh shaders feature is not enabled, dstStageMask must not contain VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdPipelineBarrier-dstStageMask-02117)"},
+    {"VUID-vkCmdPipelineBarrier-dstStageMask-02118", "If the task shaders feature is not enabled, dstStageMask must not contain VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdPipelineBarrier-dstStageMask-02118)"},
+    {"VUID-vkCmdPipelineBarrier-dstStageMask-parameter", "dstStageMask must be a valid combination of VkPipelineStageFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdPipelineBarrier-dstStageMask-parameter)"},
+    {"VUID-vkCmdPipelineBarrier-dstStageMask-requiredbitmask", "dstStageMask must not be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdPipelineBarrier-dstStageMask-requiredbitmask)"},
+    {"VUID-vkCmdPipelineBarrier-image-02635", "If vkCmdPipelineBarrier is called within a render pass instance, the image member of any element of pImageMemoryBarriers must be equal to one of the elements of pAttachments that the current framebuffer was created with, that is also referred to by one of the elements of the pColorAttachments, pResolveAttachments or pDepthStencilAttachment members of the VkSubpassDescription instance or by the pDepthStencilResolveAttachment member of the VkSubpassDescriptionDepthStencilResolveKHR structure that the current subpass was created with (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdPipelineBarrier-image-02635)"},
+    {"VUID-vkCmdPipelineBarrier-image-02637", "If vkCmdPipelineBarrier is called within a render pass instance, the image member of any element of pImageMemoryBarriers must be equal to one of the elements of pAttachments that the current framebuffer was created with, that is also referred to by one of the elements of the pColorAttachments, pResolveAttachments or pDepthStencilAttachment members of the VkSubpassDescription instance that the current subpass was created with (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdPipelineBarrier-image-02637)"},
+    {"VUID-vkCmdPipelineBarrier-oldLayout-01181", "If vkCmdPipelineBarrier is called within a render pass instance, the oldLayout and newLayout members of an element of pImageMemoryBarriers must be equal (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdPipelineBarrier-oldLayout-01181)"},
+    {"VUID-vkCmdPipelineBarrier-oldLayout-02636", "If vkCmdPipelineBarrier is called within a render pass instance, the oldLayout and newLayout members of any element of pImageMemoryBarriers must be equal to the layout member of an element of the pColorAttachments, pResolveAttachments or pDepthStencilAttachment members of the VkSubpassDescription instance or by the pDepthStencilResolveAttachment member of the VkSubpassDescriptionDepthStencilResolveKHR structure that the current subpass was created with, that refers to the same image (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdPipelineBarrier-oldLayout-02636)"},
+    {"VUID-vkCmdPipelineBarrier-oldLayout-02638", "If vkCmdPipelineBarrier is called within a render pass instance, the oldLayout and newLayout members of any element of pImageMemoryBarriers must be equal to the layout member of an element of the pColorAttachments, pResolveAttachments or pDepthStencilAttachment members of the VkSubpassDescription instance that the current subpass was created with, that refers to the same image (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdPipelineBarrier-oldLayout-02638)"},
+    {"VUID-vkCmdPipelineBarrier-pBufferMemoryBarriers-02817", "For any element of pBufferMemoryBarriers, if its srcQueueFamilyIndex and dstQueueFamilyIndex members are equal, or if its srcQueueFamilyIndex is the queue family index that was used to create the command pool that commandBuffer was allocated from, then its srcAccessMask member must only contain access flags that are supported by one or more of the pipeline stages in srcStageMask, as specified in the table of supported access types (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdPipelineBarrier-pBufferMemoryBarriers-02817)"},
+    {"VUID-vkCmdPipelineBarrier-pBufferMemoryBarriers-02818", "For any element of pBufferMemoryBarriers, if its srcQueueFamilyIndex and dstQueueFamilyIndex members are equal, or if its dstQueueFamilyIndex is the queue family index that was used to create the command pool that commandBuffer was allocated from, then its dstAccessMask member must only contain access flags that are supported by one or more of the pipeline stages in dstStageMask, as specified in the table of supported access types (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdPipelineBarrier-pBufferMemoryBarriers-02818)"},
+    {"VUID-vkCmdPipelineBarrier-pBufferMemoryBarriers-parameter", "If bufferMemoryBarrierCount is not 0, pBufferMemoryBarriers must be a valid pointer to an array of bufferMemoryBarrierCount valid VkBufferMemoryBarrier structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdPipelineBarrier-pBufferMemoryBarriers-parameter)"},
+    {"VUID-vkCmdPipelineBarrier-pDependencies-02285", "If vkCmdPipelineBarrier is called within a render pass instance, the render pass must have been created with at least one VkSubpassDependency instance in VkRenderPassCreateInfo::pDependencies that expresses a dependency from the current subpass to itself, and for which srcStageMask contains a subset of the bit values in VkSubpassDependency::srcStageMask, dstStageMask contains a subset of the bit values in VkSubpassDependency::dstStageMask, dependencyFlags is equal to VkSubpassDependency::dependencyFlags, srcAccessMask member of each element of pMemoryBarriers and pImageMemoryBarriers contains a subset of the bit values in VkSubpassDependency::srcAccessMask, and dstAccessMask member of each element of pMemoryBarriers and pImageMemoryBarriers contains a subset of the bit values in VkSubpassDependency::dstAccessMask (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdPipelineBarrier-pDependencies-02285)"},
+    {"VUID-vkCmdPipelineBarrier-pImageMemoryBarriers-02819", "For any element of pImageMemoryBarriers, if its srcQueueFamilyIndex and dstQueueFamilyIndex members are equal, or if its srcQueueFamilyIndex is the queue family index that was used to create the command pool that commandBuffer was allocated from, then its srcAccessMask member must only contain access flags that are supported by one or more of the pipeline stages in srcStageMask, as specified in the table of supported access types (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdPipelineBarrier-pImageMemoryBarriers-02819)"},
+    {"VUID-vkCmdPipelineBarrier-pImageMemoryBarriers-02820", "For any element of pImageMemoryBarriers, if its srcQueueFamilyIndex and dstQueueFamilyIndex members are equal, or if its dstQueueFamilyIndex is the queue family index that was used to create the command pool that commandBuffer was allocated from, then its dstAccessMask member must only contain access flags that are supported by one or more of the pipeline stages in dstStageMask, as specified in the table of supported access types (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdPipelineBarrier-pImageMemoryBarriers-02820)"},
+    {"VUID-vkCmdPipelineBarrier-pImageMemoryBarriers-parameter", "If imageMemoryBarrierCount is not 0, pImageMemoryBarriers must be a valid pointer to an array of imageMemoryBarrierCount valid VkImageMemoryBarrier structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdPipelineBarrier-pImageMemoryBarriers-parameter)"},
+    {"VUID-vkCmdPipelineBarrier-pMemoryBarriers-parameter", "If memoryBarrierCount is not 0, pMemoryBarriers must be a valid pointer to an array of memoryBarrierCount valid VkMemoryBarrier structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdPipelineBarrier-pMemoryBarriers-parameter)"},
+    {"VUID-vkCmdPipelineBarrier-srcAccessMask-02815", "The srcAccessMask member of each element of pMemoryBarriers must only include access flags that are supported by one or more of the pipeline stages in srcStageMask, as specified in the table of supported access types (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdPipelineBarrier-srcAccessMask-02815)"},
+    {"VUID-vkCmdPipelineBarrier-srcQueueFamilyIndex-01182", "If vkCmdPipelineBarrier is called within a render pass instance, the srcQueueFamilyIndex and dstQueueFamilyIndex members of any element of pImageMemoryBarriers must be VK_QUEUE_FAMILY_IGNORED (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdPipelineBarrier-srcQueueFamilyIndex-01182)"},
+    {"VUID-vkCmdPipelineBarrier-srcStageMask-01168", "If the geometry shaders feature is not enabled, srcStageMask must not contain VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdPipelineBarrier-srcStageMask-01168)"},
+    {"VUID-vkCmdPipelineBarrier-srcStageMask-01170", "If the tessellation shaders feature is not enabled, srcStageMask must not contain VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT or VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdPipelineBarrier-srcStageMask-01170)"},
+    {"VUID-vkCmdPipelineBarrier-srcStageMask-01183", "Any pipeline stage included in srcStageMask or dstStageMask must be supported by the capabilities of the queue family specified by the queueFamilyIndex member of the VkCommandPoolCreateInfo structure that was used to create the VkCommandPool that commandBuffer was allocated from, as specified in the table of supported pipeline stages (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdPipelineBarrier-srcStageMask-01183)"},
+    {"VUID-vkCmdPipelineBarrier-srcStageMask-02115", "If the mesh shaders feature is not enabled, srcStageMask must not contain VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdPipelineBarrier-srcStageMask-02115)"},
+    {"VUID-vkCmdPipelineBarrier-srcStageMask-02116", "If the task shaders feature is not enabled, srcStageMask must not contain VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdPipelineBarrier-srcStageMask-02116)"},
+    {"VUID-vkCmdPipelineBarrier-srcStageMask-parameter", "srcStageMask must be a valid combination of VkPipelineStageFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdPipelineBarrier-srcStageMask-parameter)"},
+    {"VUID-vkCmdPipelineBarrier-srcStageMask-requiredbitmask", "srcStageMask must not be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdPipelineBarrier-srcStageMask-requiredbitmask)"},
+    {"VUID-vkCmdProcessCommandsNVX-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support graphics, or compute operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdProcessCommandsNVX-commandBuffer-cmdpool)"},
+    {"VUID-vkCmdProcessCommandsNVX-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdProcessCommandsNVX-commandBuffer-parameter)"},
+    {"VUID-vkCmdProcessCommandsNVX-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdProcessCommandsNVX-commandBuffer-recording)"},
+    {"VUID-vkCmdProcessCommandsNVX-pProcessCommandsInfo-parameter", "pProcessCommandsInfo must be a valid pointer to a valid VkCmdProcessCommandsInfoNVX structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdProcessCommandsNVX-pProcessCommandsInfo-parameter)"},
+    {"VUID-vkCmdProcessCommandsNVX-renderpass", "This command must only be called inside of a render pass instance (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdProcessCommandsNVX-renderpass)"},
+    {"VUID-vkCmdPushConstants-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support graphics, or compute operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdPushConstants-commandBuffer-cmdpool)"},
+    {"VUID-vkCmdPushConstants-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdPushConstants-commandBuffer-parameter)"},
+    {"VUID-vkCmdPushConstants-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdPushConstants-commandBuffer-recording)"},
+    {"VUID-vkCmdPushConstants-commonparent", "Both of commandBuffer, and layout must have been created, allocated, or retrieved from the same VkDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdPushConstants-commonparent)"},
+    {"VUID-vkCmdPushConstants-layout-parameter", "layout must be a valid VkPipelineLayout handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdPushConstants-layout-parameter)"},
+    {"VUID-vkCmdPushConstants-offset-00368", "offset must be a multiple of 4 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdPushConstants-offset-00368)"},
+    {"VUID-vkCmdPushConstants-offset-00370", "offset must be less than VkPhysicalDeviceLimits::maxPushConstantsSize (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdPushConstants-offset-00370)"},
+    {"VUID-vkCmdPushConstants-offset-01795", "For each byte in the range specified by offset and size and for each shader stage in stageFlags, there must be a push constant range in layout that includes that byte and that stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdPushConstants-offset-01795)"},
+    {"VUID-vkCmdPushConstants-offset-01796", "For each byte in the range specified by offset and size and for each push constant range that overlaps that byte, stageFlags must include all stages in that push constant range's VkPushConstantRange::stageFlags (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdPushConstants-offset-01796)"},
+    {"VUID-vkCmdPushConstants-pValues-parameter", "pValues must be a valid pointer to an array of size bytes (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdPushConstants-pValues-parameter)"},
+    {"VUID-vkCmdPushConstants-size-00369", "size must be a multiple of 4 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdPushConstants-size-00369)"},
+    {"VUID-vkCmdPushConstants-size-00371", "size must be less than or equal to VkPhysicalDeviceLimits::maxPushConstantsSize minus offset (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdPushConstants-size-00371)"},
+    {"VUID-vkCmdPushConstants-size-arraylength", "size must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdPushConstants-size-arraylength)"},
+    {"VUID-vkCmdPushConstants-stageFlags-parameter", "stageFlags must be a valid combination of VkShaderStageFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdPushConstants-stageFlags-parameter)"},
+    {"VUID-vkCmdPushConstants-stageFlags-requiredbitmask", "stageFlags must not be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdPushConstants-stageFlags-requiredbitmask)"},
+    {"VUID-vkCmdPushDescriptorSetKHR-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support graphics, or compute operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdPushDescriptorSetKHR-commandBuffer-cmdpool)"},
+    {"VUID-vkCmdPushDescriptorSetKHR-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdPushDescriptorSetKHR-commandBuffer-parameter)"},
+    {"VUID-vkCmdPushDescriptorSetKHR-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdPushDescriptorSetKHR-commandBuffer-recording)"},
+    {"VUID-vkCmdPushDescriptorSetKHR-commonparent", "Both of commandBuffer, and layout must have been created, allocated, or retrieved from the same VkDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdPushDescriptorSetKHR-commonparent)"},
+    {"VUID-vkCmdPushDescriptorSetKHR-descriptorWriteCount-arraylength", "descriptorWriteCount must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdPushDescriptorSetKHR-descriptorWriteCount-arraylength)"},
+    {"VUID-vkCmdPushDescriptorSetKHR-layout-parameter", "layout must be a valid VkPipelineLayout handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdPushDescriptorSetKHR-layout-parameter)"},
+    {"VUID-vkCmdPushDescriptorSetKHR-pDescriptorWrites-parameter", "pDescriptorWrites must be a valid pointer to an array of descriptorWriteCount valid VkWriteDescriptorSet structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdPushDescriptorSetKHR-pDescriptorWrites-parameter)"},
+    {"VUID-vkCmdPushDescriptorSetKHR-pipelineBindPoint-00363", "pipelineBindPoint must be supported by the commandBuffer's parent VkCommandPool's queue family (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdPushDescriptorSetKHR-pipelineBindPoint-00363)"},
+    {"VUID-vkCmdPushDescriptorSetKHR-pipelineBindPoint-parameter", "pipelineBindPoint must be a valid VkPipelineBindPoint value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdPushDescriptorSetKHR-pipelineBindPoint-parameter)"},
+    {"VUID-vkCmdPushDescriptorSetKHR-set-00364", "set must be less than VkPipelineLayoutCreateInfo::setLayoutCount provided when layout was created (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdPushDescriptorSetKHR-set-00364)"},
+    {"VUID-vkCmdPushDescriptorSetKHR-set-00365", "set must be the unique set number in the pipeline layout that uses a descriptor set layout that was created with VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdPushDescriptorSetKHR-set-00365)"},
+    {"VUID-vkCmdPushDescriptorSetWithTemplateKHR-commandBuffer-00366", "The pipelineBindPoint specified during the creation of the descriptor update template must be supported by the commandBuffer's parent VkCommandPool's queue family (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdPushDescriptorSetWithTemplateKHR-commandBuffer-00366)"},
+    {"VUID-vkCmdPushDescriptorSetWithTemplateKHR-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support graphics, or compute operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdPushDescriptorSetWithTemplateKHR-commandBuffer-cmdpool)"},
+    {"VUID-vkCmdPushDescriptorSetWithTemplateKHR-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdPushDescriptorSetWithTemplateKHR-commandBuffer-parameter)"},
+    {"VUID-vkCmdPushDescriptorSetWithTemplateKHR-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdPushDescriptorSetWithTemplateKHR-commandBuffer-recording)"},
+    {"VUID-vkCmdPushDescriptorSetWithTemplateKHR-commonparent", "Each of commandBuffer, descriptorUpdateTemplate, and layout must have been created, allocated, or retrieved from the same VkDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdPushDescriptorSetWithTemplateKHR-commonparent)"},
+    {"VUID-vkCmdPushDescriptorSetWithTemplateKHR-descriptorUpdateTemplate-parameter", "descriptorUpdateTemplate must be a valid VkDescriptorUpdateTemplate handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdPushDescriptorSetWithTemplateKHR-descriptorUpdateTemplate-parameter)"},
+    {"VUID-vkCmdPushDescriptorSetWithTemplateKHR-layout-parameter", "layout must be a valid VkPipelineLayout handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdPushDescriptorSetWithTemplateKHR-layout-parameter)"},
+    {"VUID-vkCmdPushDescriptorSetWithTemplateKHR-pData-01686", "pData must be a valid pointer to a memory containing one or more valid instances of VkDescriptorImageInfo, VkDescriptorBufferInfo, or VkBufferView in a layout defined by descriptorUpdateTemplate when it was created with vkCreateDescriptorUpdateTemplateKHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdPushDescriptorSetWithTemplateKHR-pData-01686)"},
+    {"VUID-vkCmdReserveSpaceForCommandsNVX-bufferlevel", "commandBuffer must be a secondary VkCommandBuffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdReserveSpaceForCommandsNVX-bufferlevel)"},
+    {"VUID-vkCmdReserveSpaceForCommandsNVX-commandBuffer-01329", "The provided commandBuffer must not have had a prior space reservation since its creation or the last reset. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdReserveSpaceForCommandsNVX-commandBuffer-01329)"},
+    {"VUID-vkCmdReserveSpaceForCommandsNVX-commandBuffer-01330", "The state of the commandBuffer must be legal to execute all commands within the sequence provided by the indirectCommandsLayout member of pProcessCommandsInfo. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdReserveSpaceForCommandsNVX-commandBuffer-01330)"},
+    {"VUID-vkCmdReserveSpaceForCommandsNVX-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support graphics, or compute operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdReserveSpaceForCommandsNVX-commandBuffer-cmdpool)"},
+    {"VUID-vkCmdReserveSpaceForCommandsNVX-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdReserveSpaceForCommandsNVX-commandBuffer-parameter)"},
+    {"VUID-vkCmdReserveSpaceForCommandsNVX-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdReserveSpaceForCommandsNVX-commandBuffer-recording)"},
+    {"VUID-vkCmdReserveSpaceForCommandsNVX-pReserveSpaceInfo-parameter", "pReserveSpaceInfo must be a valid pointer to a valid VkCmdReserveSpaceForCommandsInfoNVX structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdReserveSpaceForCommandsNVX-pReserveSpaceInfo-parameter)"},
+    {"VUID-vkCmdReserveSpaceForCommandsNVX-renderpass", "This command must only be called inside of a render pass instance (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdReserveSpaceForCommandsNVX-renderpass)"},
+    {"VUID-vkCmdResetEvent-commandBuffer-01157", "commandBuffer's current device mask must include exactly one physical device. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdResetEvent-commandBuffer-01157)"},
+    {"VUID-vkCmdResetEvent-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support graphics, or compute operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdResetEvent-commandBuffer-cmdpool)"},
+    {"VUID-vkCmdResetEvent-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdResetEvent-commandBuffer-parameter)"},
+    {"VUID-vkCmdResetEvent-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdResetEvent-commandBuffer-recording)"},
+    {"VUID-vkCmdResetEvent-commonparent", "Both of commandBuffer, and event must have been created, allocated, or retrieved from the same VkDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdResetEvent-commonparent)"},
+    {"VUID-vkCmdResetEvent-event-01156", "When this command executes, event must not be waited on by a vkCmdWaitEvents command that is currently executing (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdResetEvent-event-01156)"},
+    {"VUID-vkCmdResetEvent-event-parameter", "event must be a valid VkEvent handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdResetEvent-event-parameter)"},
+    {"VUID-vkCmdResetEvent-renderpass", "This command must only be called outside of a render pass instance (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdResetEvent-renderpass)"},
+    {"VUID-vkCmdResetEvent-stageMask-01153", "stageMask must not include VK_PIPELINE_STAGE_HOST_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdResetEvent-stageMask-01153)"},
+    {"VUID-vkCmdResetEvent-stageMask-01154", "If the geometry shaders feature is not enabled, stageMask must not contain VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdResetEvent-stageMask-01154)"},
+    {"VUID-vkCmdResetEvent-stageMask-01155", "If the tessellation shaders feature is not enabled, stageMask must not contain VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT or VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdResetEvent-stageMask-01155)"},
+    {"VUID-vkCmdResetEvent-stageMask-02109", "If the mesh shaders feature is not enabled, stageMask must not contain VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdResetEvent-stageMask-02109)"},
+    {"VUID-vkCmdResetEvent-stageMask-02110", "If the task shaders feature is not enabled, stageMask must not contain VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdResetEvent-stageMask-02110)"},
+    {"VUID-vkCmdResetEvent-stageMask-parameter", "stageMask must be a valid combination of VkPipelineStageFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdResetEvent-stageMask-parameter)"},
+    {"VUID-vkCmdResetEvent-stageMask-requiredbitmask", "stageMask must not be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdResetEvent-stageMask-requiredbitmask)"},
+    {"VUID-vkCmdResetQueryPool-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support graphics, or compute operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdResetQueryPool-commandBuffer-cmdpool)"},
+    {"VUID-vkCmdResetQueryPool-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdResetQueryPool-commandBuffer-parameter)"},
+    {"VUID-vkCmdResetQueryPool-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdResetQueryPool-commandBuffer-recording)"},
+    {"VUID-vkCmdResetQueryPool-commonparent", "Both of commandBuffer, and queryPool must have been created, allocated, or retrieved from the same VkDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdResetQueryPool-commonparent)"},
+    {"VUID-vkCmdResetQueryPool-firstQuery-00796", "firstQuery must be less than the number of queries in queryPool (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdResetQueryPool-firstQuery-00796)"},
+    {"VUID-vkCmdResetQueryPool-firstQuery-00797", "The sum of firstQuery and queryCount must be less than or equal to the number of queries in queryPool (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdResetQueryPool-firstQuery-00797)"},
+    {"VUID-vkCmdResetQueryPool-queryPool-parameter", "queryPool must be a valid VkQueryPool handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdResetQueryPool-queryPool-parameter)"},
+    {"VUID-vkCmdResetQueryPool-renderpass", "This command must only be called outside of a render pass instance (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdResetQueryPool-renderpass)"},
+    {"VUID-vkCmdResolveImage-commandBuffer-01837", "If commandBuffer is an unprotected command buffer, then srcImage must not be a protected image (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdResolveImage-commandBuffer-01837)"},
+    {"VUID-vkCmdResolveImage-commandBuffer-01838", "If commandBuffer is an unprotected command buffer, then dstImage must not be a protected image (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdResolveImage-commandBuffer-01838)"},
+    {"VUID-vkCmdResolveImage-commandBuffer-01839", "If commandBuffer is a protected command buffer, then dstImage must not be an unprotected image (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdResolveImage-commandBuffer-01839)"},
+    {"VUID-vkCmdResolveImage-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support graphics operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdResolveImage-commandBuffer-cmdpool)"},
+    {"VUID-vkCmdResolveImage-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdResolveImage-commandBuffer-parameter)"},
+    {"VUID-vkCmdResolveImage-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdResolveImage-commandBuffer-recording)"},
+    {"VUID-vkCmdResolveImage-commonparent", "Each of commandBuffer, dstImage, and srcImage must have been created, allocated, or retrieved from the same VkDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdResolveImage-commonparent)"},
+    {"VUID-vkCmdResolveImage-dstImage-00258", "If dstImage is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdResolveImage-dstImage-00258)"},
+    {"VUID-vkCmdResolveImage-dstImage-00259", "dstImage must have a sample count equal to VK_SAMPLE_COUNT_1_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdResolveImage-dstImage-00259)"},
+    {"VUID-vkCmdResolveImage-dstImage-02003", "The format features of dstImage must contain VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdResolveImage-dstImage-02003)"},
+    {"VUID-vkCmdResolveImage-dstImage-02546", "dstImage and srcImage must not have been created with flags containing VK_IMAGE_CREATE_SUBSAMPLED_BIT_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdResolveImage-dstImage-02546)"},
+    {"VUID-vkCmdResolveImage-dstImage-parameter", "dstImage must be a valid VkImage handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdResolveImage-dstImage-parameter)"},
+    {"VUID-vkCmdResolveImage-dstImageLayout-00262", "dstImageLayout must specify the layout of the image subresources of dstImage specified in pRegions at the time this command is executed on a VkDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdResolveImage-dstImageLayout-00262)"},
+    {"VUID-vkCmdResolveImage-dstImageLayout-00263", "dstImageLayout must be VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL or VK_IMAGE_LAYOUT_GENERAL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdResolveImage-dstImageLayout-00263)"},
+    {"VUID-vkCmdResolveImage-dstImageLayout-01401", "dstImageLayout must be VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL or VK_IMAGE_LAYOUT_GENERAL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdResolveImage-dstImageLayout-01401)"},
+    {"VUID-vkCmdResolveImage-dstImageLayout-parameter", "dstImageLayout must be a valid VkImageLayout value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdResolveImage-dstImageLayout-parameter)"},
+    {"VUID-vkCmdResolveImage-dstSubresource-01710", "The dstSubresource.mipLevel member of each element of pRegions must be less than the mipLevels specified in VkImageCreateInfo when dstImage was created (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdResolveImage-dstSubresource-01710)"},
+    {"VUID-vkCmdResolveImage-dstSubresource-01712", "The dstSubresource.baseArrayLayer + dstSubresource.layerCount of each element of pRegions must be less than or equal to the arrayLayers specified in VkImageCreateInfo when dstImage was created (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdResolveImage-dstSubresource-01712)"},
+    {"VUID-vkCmdResolveImage-pRegions-00253", "The source region specified by each element of pRegions must be a region that is contained within srcImage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdResolveImage-pRegions-00253)"},
+    {"VUID-vkCmdResolveImage-pRegions-00254", "The destination region specified by each element of pRegions must be a region that is contained within dstImage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdResolveImage-pRegions-00254)"},
+    {"VUID-vkCmdResolveImage-pRegions-00255", "The union of all source regions, and the union of all destination regions, specified by the elements of pRegions, must not overlap in memory (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdResolveImage-pRegions-00255)"},
+    {"VUID-vkCmdResolveImage-pRegions-parameter", "pRegions must be a valid pointer to an array of regionCount valid VkImageResolve structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdResolveImage-pRegions-parameter)"},
+    {"VUID-vkCmdResolveImage-regionCount-arraylength", "regionCount must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdResolveImage-regionCount-arraylength)"},
+    {"VUID-vkCmdResolveImage-renderpass", "This command must only be called outside of a render pass instance (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdResolveImage-renderpass)"},
+    {"VUID-vkCmdResolveImage-srcImage-00256", "If srcImage is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdResolveImage-srcImage-00256)"},
+    {"VUID-vkCmdResolveImage-srcImage-00257", "srcImage must have a sample count equal to any valid sample count value other than VK_SAMPLE_COUNT_1_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdResolveImage-srcImage-00257)"},
+    {"VUID-vkCmdResolveImage-srcImage-01386", "srcImage and dstImage must have been created with the same image format (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdResolveImage-srcImage-01386)"},
+    {"VUID-vkCmdResolveImage-srcImage-parameter", "srcImage must be a valid VkImage handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdResolveImage-srcImage-parameter)"},
+    {"VUID-vkCmdResolveImage-srcImageLayout-00260", "srcImageLayout must specify the layout of the image subresources of srcImage specified in pRegions at the time this command is executed on a VkDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdResolveImage-srcImageLayout-00260)"},
+    {"VUID-vkCmdResolveImage-srcImageLayout-00261", "srcImageLayout must be VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL or VK_IMAGE_LAYOUT_GENERAL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdResolveImage-srcImageLayout-00261)"},
+    {"VUID-vkCmdResolveImage-srcImageLayout-01400", "srcImageLayout must be VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL or VK_IMAGE_LAYOUT_GENERAL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdResolveImage-srcImageLayout-01400)"},
+    {"VUID-vkCmdResolveImage-srcImageLayout-parameter", "srcImageLayout must be a valid VkImageLayout value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdResolveImage-srcImageLayout-parameter)"},
+    {"VUID-vkCmdResolveImage-srcSubresource-01709", "The srcSubresource.mipLevel member of each element of pRegions must be less than the mipLevels specified in VkImageCreateInfo when srcImage was created (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdResolveImage-srcSubresource-01709)"},
+    {"VUID-vkCmdResolveImage-srcSubresource-01711", "The srcSubresource.baseArrayLayer + srcSubresource.layerCount of each element of pRegions must be less than or equal to the arrayLayers specified in VkImageCreateInfo when srcImage was created (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdResolveImage-srcSubresource-01711)"},
+    {"VUID-vkCmdSetBlendConstants-None-00612", "The bound graphics pipeline must have been created with the VK_DYNAMIC_STATE_BLEND_CONSTANTS dynamic state enabled (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetBlendConstants-None-00612)"},
+    {"VUID-vkCmdSetBlendConstants-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support graphics operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetBlendConstants-commandBuffer-cmdpool)"},
+    {"VUID-vkCmdSetBlendConstants-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetBlendConstants-commandBuffer-parameter)"},
+    {"VUID-vkCmdSetBlendConstants-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetBlendConstants-commandBuffer-recording)"},
+    {"VUID-vkCmdSetCheckpointNV-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support graphics, compute, or transfer operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetCheckpointNV-commandBuffer-cmdpool)"},
+    {"VUID-vkCmdSetCheckpointNV-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetCheckpointNV-commandBuffer-parameter)"},
+    {"VUID-vkCmdSetCheckpointNV-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetCheckpointNV-commandBuffer-recording)"},
+    {"VUID-vkCmdSetCoarseSampleOrderNV-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support graphics operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetCoarseSampleOrderNV-commandBuffer-cmdpool)"},
+    {"VUID-vkCmdSetCoarseSampleOrderNV-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetCoarseSampleOrderNV-commandBuffer-parameter)"},
+    {"VUID-vkCmdSetCoarseSampleOrderNV-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetCoarseSampleOrderNV-commandBuffer-recording)"},
+    {"VUID-vkCmdSetCoarseSampleOrderNV-pCustomSampleOrders-02235", "The array pCustomSampleOrders must not contain two structures with matching values for both the shadingRate and sampleCount members. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetCoarseSampleOrderNV-pCustomSampleOrders-02235)"},
+    {"VUID-vkCmdSetCoarseSampleOrderNV-pCustomSampleOrders-parameter", "If customSampleOrderCount is not 0, pCustomSampleOrders must be a valid pointer to an array of customSampleOrderCount valid VkCoarseSampleOrderCustomNV structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetCoarseSampleOrderNV-pCustomSampleOrders-parameter)"},
+    {"VUID-vkCmdSetCoarseSampleOrderNV-sampleOrderType-02081", "If sampleOrderType is not VK_COARSE_SAMPLE_ORDER_TYPE_CUSTOM_NV, customSamplerOrderCount must be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetCoarseSampleOrderNV-sampleOrderType-02081)"},
+    {"VUID-vkCmdSetCoarseSampleOrderNV-sampleOrderType-parameter", "sampleOrderType must be a valid VkCoarseSampleOrderTypeNV value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetCoarseSampleOrderNV-sampleOrderType-parameter)"},
+    {"VUID-vkCmdSetDepthBias-None-00789", "The bound graphics pipeline must have been created with the VK_DYNAMIC_STATE_DEPTH_BIAS dynamic state enabled (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetDepthBias-None-00789)"},
+    {"VUID-vkCmdSetDepthBias-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support graphics operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetDepthBias-commandBuffer-cmdpool)"},
+    {"VUID-vkCmdSetDepthBias-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetDepthBias-commandBuffer-parameter)"},
+    {"VUID-vkCmdSetDepthBias-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetDepthBias-commandBuffer-recording)"},
+    {"VUID-vkCmdSetDepthBias-depthBiasClamp-00790", "If the depth bias clamping feature is not enabled, depthBiasClamp must be 0.0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetDepthBias-depthBiasClamp-00790)"},
+    {"VUID-vkCmdSetDepthBounds-None-00599", "The bound graphics pipeline must have been created with the VK_DYNAMIC_STATE_DEPTH_BOUNDS dynamic state enabled (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetDepthBounds-None-00599)"},
+    {"VUID-vkCmdSetDepthBounds-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support graphics operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetDepthBounds-commandBuffer-cmdpool)"},
+    {"VUID-vkCmdSetDepthBounds-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetDepthBounds-commandBuffer-parameter)"},
+    {"VUID-vkCmdSetDepthBounds-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetDepthBounds-commandBuffer-recording)"},
+    {"VUID-vkCmdSetDepthBounds-maxDepthBounds-00601", "Unless the VK_EXT_depth_range_unrestricted extension is enabled maxDepthBounds must be between 0.0 and 1.0, inclusive (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetDepthBounds-maxDepthBounds-00601)"},
+    {"VUID-vkCmdSetDepthBounds-maxDepthBounds-02509", "maxDepthBounds must be between 0.0 and 1.0, inclusive (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetDepthBounds-maxDepthBounds-02509)"},
+    {"VUID-vkCmdSetDepthBounds-minDepthBounds-00600", "Unless the VK_EXT_depth_range_unrestricted extension is enabled minDepthBounds must be between 0.0 and 1.0, inclusive (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetDepthBounds-minDepthBounds-00600)"},
+    {"VUID-vkCmdSetDepthBounds-minDepthBounds-02508", "minDepthBounds must be between 0.0 and 1.0, inclusive (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetDepthBounds-minDepthBounds-02508)"},
+    {"VUID-vkCmdSetDeviceMask-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support graphics, compute, or transfer operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetDeviceMask-commandBuffer-cmdpool)"},
+    {"VUID-vkCmdSetDeviceMask-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetDeviceMask-commandBuffer-parameter)"},
+    {"VUID-vkCmdSetDeviceMask-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetDeviceMask-commandBuffer-recording)"},
+    {"VUID-vkCmdSetDeviceMask-deviceMask-00108", "deviceMask must be a valid device mask value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetDeviceMask-deviceMask-00108)"},
+    {"VUID-vkCmdSetDeviceMask-deviceMask-00109", "deviceMask must not be zero (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetDeviceMask-deviceMask-00109)"},
+    {"VUID-vkCmdSetDeviceMask-deviceMask-00110", "deviceMask must not include any set bits that were not in the VkDeviceGroupCommandBufferBeginInfo::deviceMask value when the command buffer began recording. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetDeviceMask-deviceMask-00110)"},
+    {"VUID-vkCmdSetDeviceMask-deviceMask-00111", "If vkCmdSetDeviceMask is called inside a render pass instance, deviceMask must not include any set bits that were not in the VkDeviceGroupRenderPassBeginInfo::deviceMask value when the render pass instance began recording. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetDeviceMask-deviceMask-00111)"},
+    {"VUID-vkCmdSetDiscardRectangleEXT-None-00583", "The bound graphics pipeline must have been created with the VK_DYNAMIC_STATE_DISCARD_RECTANGLE_EXT dynamic state enabled (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetDiscardRectangleEXT-None-00583)"},
+    {"VUID-vkCmdSetDiscardRectangleEXT-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support graphics operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetDiscardRectangleEXT-commandBuffer-cmdpool)"},
+    {"VUID-vkCmdSetDiscardRectangleEXT-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetDiscardRectangleEXT-commandBuffer-parameter)"},
+    {"VUID-vkCmdSetDiscardRectangleEXT-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetDiscardRectangleEXT-commandBuffer-recording)"},
+    {"VUID-vkCmdSetDiscardRectangleEXT-discardRectangleCount-arraylength", "discardRectangleCount must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetDiscardRectangleEXT-discardRectangleCount-arraylength)"},
+    {"VUID-vkCmdSetDiscardRectangleEXT-firstDiscardRectangle-00585", "The sum of firstDiscardRectangle and discardRectangleCount must be less than or equal to VkPhysicalDeviceDiscardRectanglePropertiesEXT::maxDiscardRectangles (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetDiscardRectangleEXT-firstDiscardRectangle-00585)"},
+    {"VUID-vkCmdSetDiscardRectangleEXT-offset-00588", "Evaluation of (offset.x + extent.width) in each VkRect2D element of pDiscardRectangles must not cause a signed integer addition overflow (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetDiscardRectangleEXT-offset-00588)"},
+    {"VUID-vkCmdSetDiscardRectangleEXT-offset-00589", "Evaluation of (offset.y + extent.height) in each VkRect2D element of pDiscardRectangles must not cause a signed integer addition overflow (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetDiscardRectangleEXT-offset-00589)"},
+    {"VUID-vkCmdSetDiscardRectangleEXT-pDiscardRectangles-parameter", "pDiscardRectangles must be a valid pointer to an array of discardRectangleCount VkRect2D structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetDiscardRectangleEXT-pDiscardRectangles-parameter)"},
+    {"VUID-vkCmdSetDiscardRectangleEXT-x-00587", "The x and y member of offset in each VkRect2D element of pDiscardRectangles must be greater than or equal to 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetDiscardRectangleEXT-x-00587)"},
+    {"VUID-vkCmdSetEvent-commandBuffer-01152", "commandBuffer's current device mask must include exactly one physical device. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetEvent-commandBuffer-01152)"},
+    {"VUID-vkCmdSetEvent-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support graphics, or compute operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetEvent-commandBuffer-cmdpool)"},
+    {"VUID-vkCmdSetEvent-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetEvent-commandBuffer-parameter)"},
+    {"VUID-vkCmdSetEvent-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetEvent-commandBuffer-recording)"},
+    {"VUID-vkCmdSetEvent-commonparent", "Both of commandBuffer, and event must have been created, allocated, or retrieved from the same VkDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetEvent-commonparent)"},
+    {"VUID-vkCmdSetEvent-event-parameter", "event must be a valid VkEvent handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetEvent-event-parameter)"},
+    {"VUID-vkCmdSetEvent-renderpass", "This command must only be called outside of a render pass instance (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetEvent-renderpass)"},
+    {"VUID-vkCmdSetEvent-stageMask-01149", "stageMask must not include VK_PIPELINE_STAGE_HOST_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetEvent-stageMask-01149)"},
+    {"VUID-vkCmdSetEvent-stageMask-01150", "If the geometry shaders feature is not enabled, stageMask must not contain VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetEvent-stageMask-01150)"},
+    {"VUID-vkCmdSetEvent-stageMask-01151", "If the tessellation shaders feature is not enabled, stageMask must not contain VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT or VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetEvent-stageMask-01151)"},
+    {"VUID-vkCmdSetEvent-stageMask-02107", "If the mesh shaders feature is not enabled, stageMask must not contain VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetEvent-stageMask-02107)"},
+    {"VUID-vkCmdSetEvent-stageMask-02108", "If the task shaders feature is not enabled, stageMask must not contain VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetEvent-stageMask-02108)"},
+    {"VUID-vkCmdSetEvent-stageMask-parameter", "stageMask must be a valid combination of VkPipelineStageFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetEvent-stageMask-parameter)"},
+    {"VUID-vkCmdSetEvent-stageMask-requiredbitmask", "stageMask must not be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetEvent-stageMask-requiredbitmask)"},
+    {"VUID-vkCmdSetExclusiveScissorNV-None-02031", "The exclusive scissor feature must be enabled. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetExclusiveScissorNV-None-02031)"},
+    {"VUID-vkCmdSetExclusiveScissorNV-None-02032", "The bound graphics pipeline must have been created with the VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_NV dynamic state enabled (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetExclusiveScissorNV-None-02032)"},
+    {"VUID-vkCmdSetExclusiveScissorNV-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support graphics operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetExclusiveScissorNV-commandBuffer-cmdpool)"},
+    {"VUID-vkCmdSetExclusiveScissorNV-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetExclusiveScissorNV-commandBuffer-parameter)"},
+    {"VUID-vkCmdSetExclusiveScissorNV-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetExclusiveScissorNV-commandBuffer-recording)"},
+    {"VUID-vkCmdSetExclusiveScissorNV-exclusiveScissorCount-02036", "If the multiple viewports feature is not enabled, exclusiveScissorCount must be 1 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetExclusiveScissorNV-exclusiveScissorCount-02036)"},
+    {"VUID-vkCmdSetExclusiveScissorNV-exclusiveScissorCount-arraylength", "exclusiveScissorCount must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetExclusiveScissorNV-exclusiveScissorCount-arraylength)"},
+    {"VUID-vkCmdSetExclusiveScissorNV-firstExclusiveScissor-02033", "firstExclusiveScissor must be less than VkPhysicalDeviceLimits::maxViewports (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetExclusiveScissorNV-firstExclusiveScissor-02033)"},
+    {"VUID-vkCmdSetExclusiveScissorNV-firstExclusiveScissor-02034", "The sum of firstExclusiveScissor and exclusiveScissorCount must be between 1 and VkPhysicalDeviceLimits::maxViewports, inclusive (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetExclusiveScissorNV-firstExclusiveScissor-02034)"},
+    {"VUID-vkCmdSetExclusiveScissorNV-firstExclusiveScissor-02035", "If the multiple viewports feature is not enabled, firstExclusiveScissor must be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetExclusiveScissorNV-firstExclusiveScissor-02035)"},
+    {"VUID-vkCmdSetExclusiveScissorNV-offset-02038", "Evaluation of (offset.x + extent.width) for each member of pExclusiveScissors must not cause a signed integer addition overflow (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetExclusiveScissorNV-offset-02038)"},
+    {"VUID-vkCmdSetExclusiveScissorNV-offset-02039", "Evaluation of (offset.y + extent.height) for each member of pExclusiveScissors must not cause a signed integer addition overflow (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetExclusiveScissorNV-offset-02039)"},
+    {"VUID-vkCmdSetExclusiveScissorNV-pExclusiveScissors-parameter", "pExclusiveScissors must be a valid pointer to an array of exclusiveScissorCount VkRect2D structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetExclusiveScissorNV-pExclusiveScissors-parameter)"},
+    {"VUID-vkCmdSetExclusiveScissorNV-x-02037", "The x and y members of offset in each member of pExclusiveScissors must be greater than or equal to 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetExclusiveScissorNV-x-02037)"},
+    {"VUID-vkCmdSetLineStippleEXT-None-02775", "The bound graphics pipeline must have been created with the VK_DYNAMIC_STATE_LINE_STIPPLE_EXT dynamic state enabled (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetLineStippleEXT-None-02775)"},
+    {"VUID-vkCmdSetLineStippleEXT-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support graphics operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetLineStippleEXT-commandBuffer-cmdpool)"},
+    {"VUID-vkCmdSetLineStippleEXT-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetLineStippleEXT-commandBuffer-parameter)"},
+    {"VUID-vkCmdSetLineStippleEXT-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetLineStippleEXT-commandBuffer-recording)"},
+    {"VUID-vkCmdSetLineStippleEXT-lineStippleFactor-02776", "lineStippleFactor must be in the range [1,256] (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetLineStippleEXT-lineStippleFactor-02776)"},
+    {"VUID-vkCmdSetLineWidth-None-00787", "The bound graphics pipeline must have been created with the VK_DYNAMIC_STATE_LINE_WIDTH dynamic state enabled (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetLineWidth-None-00787)"},
+    {"VUID-vkCmdSetLineWidth-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support graphics operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetLineWidth-commandBuffer-cmdpool)"},
+    {"VUID-vkCmdSetLineWidth-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetLineWidth-commandBuffer-parameter)"},
+    {"VUID-vkCmdSetLineWidth-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetLineWidth-commandBuffer-recording)"},
+    {"VUID-vkCmdSetLineWidth-lineWidth-00788", "If the wide lines feature is not enabled, lineWidth must be 1.0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetLineWidth-lineWidth-00788)"},
+    {"VUID-vkCmdSetPerformanceMarkerINTEL-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support graphics, compute, or transfer operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetPerformanceMarkerINTEL-commandBuffer-cmdpool)"},
+    {"VUID-vkCmdSetPerformanceMarkerINTEL-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetPerformanceMarkerINTEL-commandBuffer-parameter)"},
+    {"VUID-vkCmdSetPerformanceMarkerINTEL-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetPerformanceMarkerINTEL-commandBuffer-recording)"},
+    {"VUID-vkCmdSetPerformanceMarkerINTEL-pMarkerInfo-parameter", "pMarkerInfo must be a valid pointer to a valid VkPerformanceMarkerInfoINTEL structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetPerformanceMarkerINTEL-pMarkerInfo-parameter)"},
+    {"VUID-vkCmdSetPerformanceOverrideINTEL-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support graphics, compute, or transfer operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetPerformanceOverrideINTEL-commandBuffer-cmdpool)"},
+    {"VUID-vkCmdSetPerformanceOverrideINTEL-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetPerformanceOverrideINTEL-commandBuffer-parameter)"},
+    {"VUID-vkCmdSetPerformanceOverrideINTEL-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetPerformanceOverrideINTEL-commandBuffer-recording)"},
+    {"VUID-vkCmdSetPerformanceOverrideINTEL-pOverrideInfo-02736", "pOverrideInfo must not be used with a VkPerformanceOverrideTypeINTEL that is not reported available by vkGetPerformanceParameterINTEL. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetPerformanceOverrideINTEL-pOverrideInfo-02736)"},
+    {"VUID-vkCmdSetPerformanceOverrideINTEL-pOverrideInfo-parameter", "pOverrideInfo must be a valid pointer to a valid VkPerformanceOverrideInfoINTEL structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetPerformanceOverrideINTEL-pOverrideInfo-parameter)"},
+    {"VUID-vkCmdSetPerformanceStreamMarkerINTEL-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support graphics, compute, or transfer operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetPerformanceStreamMarkerINTEL-commandBuffer-cmdpool)"},
+    {"VUID-vkCmdSetPerformanceStreamMarkerINTEL-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetPerformanceStreamMarkerINTEL-commandBuffer-parameter)"},
+    {"VUID-vkCmdSetPerformanceStreamMarkerINTEL-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetPerformanceStreamMarkerINTEL-commandBuffer-recording)"},
+    {"VUID-vkCmdSetPerformanceStreamMarkerINTEL-pMarkerInfo-parameter", "pMarkerInfo must be a valid pointer to a valid VkPerformanceStreamMarkerInfoINTEL structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetPerformanceStreamMarkerINTEL-pMarkerInfo-parameter)"},
+    {"VUID-vkCmdSetSampleLocationsEXT-None-01528", "The bound graphics pipeline must have been created with the VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_EXT dynamic state enabled (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetSampleLocationsEXT-None-01528)"},
+    {"VUID-vkCmdSetSampleLocationsEXT-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support graphics operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetSampleLocationsEXT-commandBuffer-cmdpool)"},
+    {"VUID-vkCmdSetSampleLocationsEXT-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetSampleLocationsEXT-commandBuffer-parameter)"},
+    {"VUID-vkCmdSetSampleLocationsEXT-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetSampleLocationsEXT-commandBuffer-recording)"},
+    {"VUID-vkCmdSetSampleLocationsEXT-pSampleLocationsInfo-parameter", "pSampleLocationsInfo must be a valid pointer to a valid VkSampleLocationsInfoEXT structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetSampleLocationsEXT-pSampleLocationsInfo-parameter)"},
+    {"VUID-vkCmdSetSampleLocationsEXT-sampleLocationsPerPixel-01529", "The sampleLocationsPerPixel member of pSampleLocationsInfo must equal the rasterizationSamples member of the VkPipelineMultisampleStateCreateInfo structure the bound graphics pipeline has been created with (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetSampleLocationsEXT-sampleLocationsPerPixel-01529)"},
+    {"VUID-vkCmdSetSampleLocationsEXT-variableSampleLocations-01530", "If VkPhysicalDeviceSampleLocationsPropertiesEXT::variableSampleLocations is VK_FALSE then the current render pass must have been begun by specifying a VkRenderPassSampleLocationsBeginInfoEXT structure whose pPostSubpassSampleLocations member contains an element with a subpassIndex matching the current subpass index and the sampleLocationsInfo member of that element must match the sample locations state pointed to by pSampleLocationsInfo (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetSampleLocationsEXT-variableSampleLocations-01530)"},
+    {"VUID-vkCmdSetScissor-None-00590", "The bound graphics pipeline must have been created with the VK_DYNAMIC_STATE_SCISSOR dynamic state enabled (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetScissor-None-00590)"},
+    {"VUID-vkCmdSetScissor-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support graphics operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetScissor-commandBuffer-cmdpool)"},
+    {"VUID-vkCmdSetScissor-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetScissor-commandBuffer-parameter)"},
+    {"VUID-vkCmdSetScissor-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetScissor-commandBuffer-recording)"},
+    {"VUID-vkCmdSetScissor-firstScissor-00591", "firstScissor must be less than VkPhysicalDeviceLimits::maxViewports (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetScissor-firstScissor-00591)"},
+    {"VUID-vkCmdSetScissor-firstScissor-00592", "The sum of firstScissor and scissorCount must be between 1 and VkPhysicalDeviceLimits::maxViewports, inclusive (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetScissor-firstScissor-00592)"},
+    {"VUID-vkCmdSetScissor-firstScissor-00593", "If the multiple viewports feature is not enabled, firstScissor must be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetScissor-firstScissor-00593)"},
+    {"VUID-vkCmdSetScissor-offset-00596", "Evaluation of (offset.x + extent.width) must not cause a signed integer addition overflow for any element of pScissors (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetScissor-offset-00596)"},
+    {"VUID-vkCmdSetScissor-offset-00597", "Evaluation of (offset.y + extent.height) must not cause a signed integer addition overflow for any element of pScissors (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetScissor-offset-00597)"},
+    {"VUID-vkCmdSetScissor-pScissors-parameter", "pScissors must be a valid pointer to an array of scissorCount VkRect2D structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetScissor-pScissors-parameter)"},
+    {"VUID-vkCmdSetScissor-scissorCount-00594", "If the multiple viewports feature is not enabled, scissorCount must be 1 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetScissor-scissorCount-00594)"},
+    {"VUID-vkCmdSetScissor-scissorCount-arraylength", "scissorCount must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetScissor-scissorCount-arraylength)"},
+    {"VUID-vkCmdSetScissor-x-00595", "The x and y members of offset member of any element of pScissors must be greater than or equal to 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetScissor-x-00595)"},
+    {"VUID-vkCmdSetStencilCompareMask-None-00602", "The bound graphics pipeline must have been created with the VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK dynamic state enabled (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetStencilCompareMask-None-00602)"},
+    {"VUID-vkCmdSetStencilCompareMask-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support graphics operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetStencilCompareMask-commandBuffer-cmdpool)"},
+    {"VUID-vkCmdSetStencilCompareMask-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetStencilCompareMask-commandBuffer-parameter)"},
+    {"VUID-vkCmdSetStencilCompareMask-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetStencilCompareMask-commandBuffer-recording)"},
+    {"VUID-vkCmdSetStencilCompareMask-faceMask-parameter", "faceMask must be a valid combination of VkStencilFaceFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetStencilCompareMask-faceMask-parameter)"},
+    {"VUID-vkCmdSetStencilCompareMask-faceMask-requiredbitmask", "faceMask must not be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetStencilCompareMask-faceMask-requiredbitmask)"},
+    {"VUID-vkCmdSetStencilReference-None-00604", "The bound graphics pipeline must have been created with the VK_DYNAMIC_STATE_STENCIL_REFERENCE dynamic state enabled (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetStencilReference-None-00604)"},
+    {"VUID-vkCmdSetStencilReference-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support graphics operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetStencilReference-commandBuffer-cmdpool)"},
+    {"VUID-vkCmdSetStencilReference-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetStencilReference-commandBuffer-parameter)"},
+    {"VUID-vkCmdSetStencilReference-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetStencilReference-commandBuffer-recording)"},
+    {"VUID-vkCmdSetStencilReference-faceMask-parameter", "faceMask must be a valid combination of VkStencilFaceFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetStencilReference-faceMask-parameter)"},
+    {"VUID-vkCmdSetStencilReference-faceMask-requiredbitmask", "faceMask must not be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetStencilReference-faceMask-requiredbitmask)"},
+    {"VUID-vkCmdSetStencilWriteMask-None-00603", "The bound graphics pipeline must have been created with the VK_DYNAMIC_STATE_STENCIL_WRITE_MASK dynamic state enabled (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetStencilWriteMask-None-00603)"},
+    {"VUID-vkCmdSetStencilWriteMask-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support graphics operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetStencilWriteMask-commandBuffer-cmdpool)"},
+    {"VUID-vkCmdSetStencilWriteMask-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetStencilWriteMask-commandBuffer-parameter)"},
+    {"VUID-vkCmdSetStencilWriteMask-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetStencilWriteMask-commandBuffer-recording)"},
+    {"VUID-vkCmdSetStencilWriteMask-faceMask-parameter", "faceMask must be a valid combination of VkStencilFaceFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetStencilWriteMask-faceMask-parameter)"},
+    {"VUID-vkCmdSetStencilWriteMask-faceMask-requiredbitmask", "faceMask must not be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetStencilWriteMask-faceMask-requiredbitmask)"},
+    {"VUID-vkCmdSetViewport-None-01221", "The bound graphics pipeline must have been created with the VK_DYNAMIC_STATE_VIEWPORT dynamic state enabled (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetViewport-None-01221)"},
+    {"VUID-vkCmdSetViewport-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support graphics operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetViewport-commandBuffer-cmdpool)"},
+    {"VUID-vkCmdSetViewport-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetViewport-commandBuffer-parameter)"},
+    {"VUID-vkCmdSetViewport-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetViewport-commandBuffer-recording)"},
+    {"VUID-vkCmdSetViewport-firstViewport-01222", "firstViewport must be less than VkPhysicalDeviceLimits::maxViewports (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetViewport-firstViewport-01222)"},
+    {"VUID-vkCmdSetViewport-firstViewport-01223", "The sum of firstViewport and viewportCount must be between 1 and VkPhysicalDeviceLimits::maxViewports, inclusive (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetViewport-firstViewport-01223)"},
+    {"VUID-vkCmdSetViewport-firstViewport-01224", "If the multiple viewports feature is not enabled, firstViewport must be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetViewport-firstViewport-01224)"},
+    {"VUID-vkCmdSetViewport-pViewports-parameter", "pViewports must be a valid pointer to an array of viewportCount valid VkViewport structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetViewport-pViewports-parameter)"},
+    {"VUID-vkCmdSetViewport-viewportCount-01225", "If the multiple viewports feature is not enabled, viewportCount must be 1 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetViewport-viewportCount-01225)"},
+    {"VUID-vkCmdSetViewport-viewportCount-arraylength", "viewportCount must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetViewport-viewportCount-arraylength)"},
+    {"VUID-vkCmdSetViewportShadingRatePaletteNV-None-02064", "The shading rate image feature must be enabled. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetViewportShadingRatePaletteNV-None-02064)"},
+    {"VUID-vkCmdSetViewportShadingRatePaletteNV-None-02065", "The bound graphics pipeline must have been created with the VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV dynamic state enabled (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetViewportShadingRatePaletteNV-None-02065)"},
+    {"VUID-vkCmdSetViewportShadingRatePaletteNV-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support graphics operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetViewportShadingRatePaletteNV-commandBuffer-cmdpool)"},
+    {"VUID-vkCmdSetViewportShadingRatePaletteNV-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetViewportShadingRatePaletteNV-commandBuffer-parameter)"},
+    {"VUID-vkCmdSetViewportShadingRatePaletteNV-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetViewportShadingRatePaletteNV-commandBuffer-recording)"},
+    {"VUID-vkCmdSetViewportShadingRatePaletteNV-firstViewport-02066", "firstViewport must be less than VkPhysicalDeviceLimits::maxViewports (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetViewportShadingRatePaletteNV-firstViewport-02066)"},
+    {"VUID-vkCmdSetViewportShadingRatePaletteNV-firstViewport-02067", "The sum of firstViewport and viewportCount must be between 1 and VkPhysicalDeviceLimits::maxViewports, inclusive (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetViewportShadingRatePaletteNV-firstViewport-02067)"},
+    {"VUID-vkCmdSetViewportShadingRatePaletteNV-firstViewport-02068", "If the multiple viewports feature is not enabled, firstViewport must be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetViewportShadingRatePaletteNV-firstViewport-02068)"},
+    {"VUID-vkCmdSetViewportShadingRatePaletteNV-pShadingRatePalettes-parameter", "pShadingRatePalettes must be a valid pointer to an array of viewportCount valid VkShadingRatePaletteNV structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetViewportShadingRatePaletteNV-pShadingRatePalettes-parameter)"},
+    {"VUID-vkCmdSetViewportShadingRatePaletteNV-viewportCount-02069", "If the multiple viewports feature is not enabled, viewportCount must be 1 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetViewportShadingRatePaletteNV-viewportCount-02069)"},
+    {"VUID-vkCmdSetViewportShadingRatePaletteNV-viewportCount-arraylength", "viewportCount must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetViewportShadingRatePaletteNV-viewportCount-arraylength)"},
+    {"VUID-vkCmdSetViewportWScalingNV-None-01322", "The bound graphics pipeline must have been created with the VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV dynamic state enabled (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetViewportWScalingNV-None-01322)"},
+    {"VUID-vkCmdSetViewportWScalingNV-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support graphics operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetViewportWScalingNV-commandBuffer-cmdpool)"},
+    {"VUID-vkCmdSetViewportWScalingNV-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetViewportWScalingNV-commandBuffer-parameter)"},
+    {"VUID-vkCmdSetViewportWScalingNV-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetViewportWScalingNV-commandBuffer-recording)"},
+    {"VUID-vkCmdSetViewportWScalingNV-firstViewport-01323", "firstViewport must be less than VkPhysicalDeviceLimits::maxViewports (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetViewportWScalingNV-firstViewport-01323)"},
+    {"VUID-vkCmdSetViewportWScalingNV-firstViewport-01324", "The sum of firstViewport and viewportCount must be between 1 and VkPhysicalDeviceLimits::maxViewports, inclusive (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetViewportWScalingNV-firstViewport-01324)"},
+    {"VUID-vkCmdSetViewportWScalingNV-pViewportWScalings-parameter", "pViewportWScalings must be a valid pointer to an array of viewportCount VkViewportWScalingNV structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetViewportWScalingNV-pViewportWScalings-parameter)"},
+    {"VUID-vkCmdSetViewportWScalingNV-viewportCount-arraylength", "viewportCount must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdSetViewportWScalingNV-viewportCount-arraylength)"},
+    {"VUID-vkCmdTraceRaysNV-None-02690", "If a VkImageView is sampled with VK_FILTER_LINEAR as a result of this command, then the image view's format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdTraceRaysNV-None-02690)"},
+    {"VUID-vkCmdTraceRaysNV-None-02691", "If a VkImageView is accessed using atomic operations as a result of this command, then the image view's format features must contain VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdTraceRaysNV-None-02691)"},
+    {"VUID-vkCmdTraceRaysNV-None-02692", "If a VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, then the image view's format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdTraceRaysNV-None-02692)"},
+    {"VUID-vkCmdTraceRaysNV-None-02693", "Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must not have a VkImageViewType of VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdTraceRaysNV-None-02693)"},
+    {"VUID-vkCmdTraceRaysNV-None-02697", "For each set n that is statically used by the VkPipeline bound to the pipeline bind point used by this command, a descriptor set must have been bound to n at the same pipeline bind point, with a VkPipelineLayout that is compatible for set n, with the VkPipelineLayout used to create the current VkPipeline, as described in Pipeline Layout Compatibility (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdTraceRaysNV-None-02697)"},
+    {"VUID-vkCmdTraceRaysNV-None-02698", "For each push constant that is statically used by the VkPipeline bound to the pipeline bind point used by this command, a push constant value must have been set for the same pipeline bind point, with a VkPipelineLayout that is compatible for push constants, with the VkPipelineLayout used to create the current VkPipeline, as described in Pipeline Layout Compatibility (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdTraceRaysNV-None-02698)"},
+    {"VUID-vkCmdTraceRaysNV-None-02699", "Descriptors in each bound descriptor set, specified via vkCmdBindDescriptorSets, must be valid if they are statically used by the VkPipeline bound to the pipeline bind point used by this command (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdTraceRaysNV-None-02699)"},
+    {"VUID-vkCmdTraceRaysNV-None-02700", "A valid pipeline must be bound to the pipeline bind point used by this command (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdTraceRaysNV-None-02700)"},
+    {"VUID-vkCmdTraceRaysNV-None-02702", "If the VkPipeline object bound to the pipeline bind point used by this command accesses a VkSampler object that uses unnormalized coordinates, that sampler must not be used to sample from any VkImage with a VkImageView of the type VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, VK_IMAGE_VIEW_TYPE_1D_ARRAY, VK_IMAGE_VIEW_TYPE_2D_ARRAY or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdTraceRaysNV-None-02702)"},
+    {"VUID-vkCmdTraceRaysNV-None-02703", "If the VkPipeline object bound to the pipeline bind point used by this command accesses a VkSampler object that uses unnormalized coordinates, that sampler must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* instructions with ImplicitLod, Dref or Proj in their name, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdTraceRaysNV-None-02703)"},
+    {"VUID-vkCmdTraceRaysNV-None-02704", "If the VkPipeline object bound to the pipeline bind point used by this command accesses a VkSampler object that uses unnormalized coordinates, that sampler must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* instructions that includes a LOD bias or any offset values, in any shader stage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdTraceRaysNV-None-02704)"},
+    {"VUID-vkCmdTraceRaysNV-None-02705", "If the robust buffer access feature is not enabled, and if the VkPipeline object bound to the pipeline bind point used by this command accesses a uniform buffer, it must not access values outside of the range of the buffer as specified in the descriptor set bound to the same pipeline bind point (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdTraceRaysNV-None-02705)"},
+    {"VUID-vkCmdTraceRaysNV-None-02706", "If the robust buffer access feature is not enabled, and if the VkPipeline object bound to the pipeline bind point used by this command accesses a storage buffer, it must not access values outside of the range of the buffer as specified in the descriptor set bound to the same pipeline bind point (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdTraceRaysNV-None-02706)"},
+    {"VUID-vkCmdTraceRaysNV-callableShaderBindingOffset-02461", "callableShaderBindingOffset must be less than the size of callableShaderBindingTableBuffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdTraceRaysNV-callableShaderBindingOffset-02461)"},
+    {"VUID-vkCmdTraceRaysNV-callableShaderBindingOffset-02462", "callableShaderBindingOffset must be a multiple of VkPhysicalDeviceRayTracingPropertiesNV::shaderGroupBaseAlignment (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdTraceRaysNV-callableShaderBindingOffset-02462)"},
+    {"VUID-vkCmdTraceRaysNV-callableShaderBindingStride-02465", "callableShaderBindingStride must be a multiple of VkPhysicalDeviceRayTracingPropertiesNV::shaderGroupHandleSize (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdTraceRaysNV-callableShaderBindingStride-02465)"},
+    {"VUID-vkCmdTraceRaysNV-callableShaderBindingStride-02468", "callableShaderBindingStride must be a less than or equal to VkPhysicalDeviceRayTracingPropertiesNV::maxShaderGroupStride (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdTraceRaysNV-callableShaderBindingStride-02468)"},
+    {"VUID-vkCmdTraceRaysNV-callableShaderBindingTableBuffer-parameter", "If callableShaderBindingTableBuffer is not VK_NULL_HANDLE, callableShaderBindingTableBuffer must be a valid VkBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdTraceRaysNV-callableShaderBindingTableBuffer-parameter)"},
+    {"VUID-vkCmdTraceRaysNV-commandBuffer-02701", "If the VkPipeline object bound to the pipeline bind point used by this command requires any dynamic state, that state must have been set for commandBuffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdTraceRaysNV-commandBuffer-02701)"},
+    {"VUID-vkCmdTraceRaysNV-commandBuffer-02707", "If commandBuffer is an unprotected command buffer, any resource accessed by the VkPipeline object bound to the pipeline bind point used by this command must not be a protected resource (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdTraceRaysNV-commandBuffer-02707)"},
+    {"VUID-vkCmdTraceRaysNV-commandBuffer-02712", "If commandBuffer is a protected command buffer, any resource written to by the VkPipeline object bound to the pipeline bind point used by this command must not be an unprotected resource (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdTraceRaysNV-commandBuffer-02712)"},
+    {"VUID-vkCmdTraceRaysNV-commandBuffer-02713", "If commandBuffer is a protected command buffer, pipeline stages other than the framebuffer-space and compute stages in the VkPipeline object bound to the pipeline bind point must not write to any resource (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdTraceRaysNV-commandBuffer-02713)"},
+    {"VUID-vkCmdTraceRaysNV-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support compute operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdTraceRaysNV-commandBuffer-cmdpool)"},
+    {"VUID-vkCmdTraceRaysNV-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdTraceRaysNV-commandBuffer-parameter)"},
+    {"VUID-vkCmdTraceRaysNV-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdTraceRaysNV-commandBuffer-recording)"},
+    {"VUID-vkCmdTraceRaysNV-commonparent", "Each of callableShaderBindingTableBuffer, commandBuffer, hitShaderBindingTableBuffer, missShaderBindingTableBuffer, and raygenShaderBindingTableBuffer that are valid handles of non-ignored parameters must have been created, allocated, or retrieved from the same VkDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdTraceRaysNV-commonparent)"},
+    {"VUID-vkCmdTraceRaysNV-depth-02471", "depth must be less than or equal to VkPhysicalDeviceLimits::maxComputeWorkGroupCount[2] (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdTraceRaysNV-depth-02471)"},
+    {"VUID-vkCmdTraceRaysNV-filterCubic-02694", "Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubic returned by vkGetPhysicalDeviceImageFormatProperties2 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdTraceRaysNV-filterCubic-02694)"},
+    {"VUID-vkCmdTraceRaysNV-filterCubicMinmax-02695", "Any VkImageView being sampled with VK_FILTER_CUBIC_EXT with a reduction mode of either VK_SAMPLER_REDUCTION_MODE_MIN_EXT or VK_SAMPLER_REDUCTION_MODE_MAX_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering together with minmax filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubicMinmax returned by vkGetPhysicalDeviceImageFormatProperties2 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdTraceRaysNV-filterCubicMinmax-02695)"},
+    {"VUID-vkCmdTraceRaysNV-flags-02696", "Any VkImage created with a VkImageCreateInfo::flags containing VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV sampled as a result of this command must only be sampled using a VkSamplerAddressMode of VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdTraceRaysNV-flags-02696)"},
+    {"VUID-vkCmdTraceRaysNV-height-02470", "height must be less than or equal to VkPhysicalDeviceLimits::maxComputeWorkGroupCount[1] (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdTraceRaysNV-height-02470)"},
+    {"VUID-vkCmdTraceRaysNV-hitShaderBindingOffset-02459", "hitShaderBindingOffset must be less than the size of hitShaderBindingTableBuffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdTraceRaysNV-hitShaderBindingOffset-02459)"},
+    {"VUID-vkCmdTraceRaysNV-hitShaderBindingOffset-02460", "hitShaderBindingOffset must be a multiple of VkPhysicalDeviceRayTracingPropertiesNV::shaderGroupBaseAlignment (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdTraceRaysNV-hitShaderBindingOffset-02460)"},
+    {"VUID-vkCmdTraceRaysNV-hitShaderBindingStride-02464", "hitShaderBindingStride must be a multiple of VkPhysicalDeviceRayTracingPropertiesNV::shaderGroupHandleSize (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdTraceRaysNV-hitShaderBindingStride-02464)"},
+    {"VUID-vkCmdTraceRaysNV-hitShaderBindingStride-02467", "hitShaderBindingStride must be a less than or equal to VkPhysicalDeviceRayTracingPropertiesNV::maxShaderGroupStride (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdTraceRaysNV-hitShaderBindingStride-02467)"},
+    {"VUID-vkCmdTraceRaysNV-hitShaderBindingTableBuffer-parameter", "If hitShaderBindingTableBuffer is not VK_NULL_HANDLE, hitShaderBindingTableBuffer must be a valid VkBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdTraceRaysNV-hitShaderBindingTableBuffer-parameter)"},
+    {"VUID-vkCmdTraceRaysNV-missShaderBindingOffset-02457", "missShaderBindingOffset must be less than the size of missShaderBindingTableBuffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdTraceRaysNV-missShaderBindingOffset-02457)"},
+    {"VUID-vkCmdTraceRaysNV-missShaderBindingOffset-02458", "missShaderBindingOffset must be a multiple of VkPhysicalDeviceRayTracingPropertiesNV::shaderGroupBaseAlignment (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdTraceRaysNV-missShaderBindingOffset-02458)"},
+    {"VUID-vkCmdTraceRaysNV-missShaderBindingStride-02463", "missShaderBindingStride must be a multiple of VkPhysicalDeviceRayTracingPropertiesNV::shaderGroupHandleSize (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdTraceRaysNV-missShaderBindingStride-02463)"},
+    {"VUID-vkCmdTraceRaysNV-missShaderBindingStride-02466", "missShaderBindingStride must be a less than or equal to VkPhysicalDeviceRayTracingPropertiesNV::maxShaderGroupStride (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdTraceRaysNV-missShaderBindingStride-02466)"},
+    {"VUID-vkCmdTraceRaysNV-missShaderBindingTableBuffer-parameter", "If missShaderBindingTableBuffer is not VK_NULL_HANDLE, missShaderBindingTableBuffer must be a valid VkBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdTraceRaysNV-missShaderBindingTableBuffer-parameter)"},
+    {"VUID-vkCmdTraceRaysNV-raygenShaderBindingOffset-02455", "raygenShaderBindingOffset must be less than the size of raygenShaderBindingTableBuffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdTraceRaysNV-raygenShaderBindingOffset-02455)"},
+    {"VUID-vkCmdTraceRaysNV-raygenShaderBindingOffset-02456", "raygenShaderBindingOffset must be a multiple of VkPhysicalDeviceRayTracingPropertiesNV::shaderGroupBaseAlignment (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdTraceRaysNV-raygenShaderBindingOffset-02456)"},
+    {"VUID-vkCmdTraceRaysNV-raygenShaderBindingTableBuffer-parameter", "raygenShaderBindingTableBuffer must be a valid VkBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdTraceRaysNV-raygenShaderBindingTableBuffer-parameter)"},
+    {"VUID-vkCmdTraceRaysNV-renderpass", "This command must only be called outside of a render pass instance (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdTraceRaysNV-renderpass)"},
+    {"VUID-vkCmdTraceRaysNV-width-02469", "width must be less than or equal to VkPhysicalDeviceLimits::maxComputeWorkGroupCount[0] (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdTraceRaysNV-width-02469)"},
+    {"VUID-vkCmdUpdateBuffer-commandBuffer-01813", "If commandBuffer is an unprotected command buffer, then dstBuffer must not be a protected buffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdUpdateBuffer-commandBuffer-01813)"},
+    {"VUID-vkCmdUpdateBuffer-commandBuffer-01814", "If commandBuffer is a protected command buffer, then dstBuffer must not be an unprotected buffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdUpdateBuffer-commandBuffer-01814)"},
+    {"VUID-vkCmdUpdateBuffer-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support transfer, graphics, or compute operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdUpdateBuffer-commandBuffer-cmdpool)"},
+    {"VUID-vkCmdUpdateBuffer-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdUpdateBuffer-commandBuffer-parameter)"},
+    {"VUID-vkCmdUpdateBuffer-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdUpdateBuffer-commandBuffer-recording)"},
+    {"VUID-vkCmdUpdateBuffer-commonparent", "Both of commandBuffer, and dstBuffer must have been created, allocated, or retrieved from the same VkDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdUpdateBuffer-commonparent)"},
+    {"VUID-vkCmdUpdateBuffer-dataSize-00033", "dataSize must be less than or equal to the size of dstBuffer minus dstOffset (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdUpdateBuffer-dataSize-00033)"},
+    {"VUID-vkCmdUpdateBuffer-dataSize-00037", "dataSize must be less than or equal to 65536 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdUpdateBuffer-dataSize-00037)"},
+    {"VUID-vkCmdUpdateBuffer-dataSize-00038", "dataSize must be a multiple of 4 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdUpdateBuffer-dataSize-00038)"},
+    {"VUID-vkCmdUpdateBuffer-dataSize-arraylength", "dataSize must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdUpdateBuffer-dataSize-arraylength)"},
+    {"VUID-vkCmdUpdateBuffer-dstBuffer-00034", "dstBuffer must have been created with VK_BUFFER_USAGE_TRANSFER_DST_BIT usage flag (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdUpdateBuffer-dstBuffer-00034)"},
+    {"VUID-vkCmdUpdateBuffer-dstBuffer-00035", "If dstBuffer is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdUpdateBuffer-dstBuffer-00035)"},
+    {"VUID-vkCmdUpdateBuffer-dstBuffer-parameter", "dstBuffer must be a valid VkBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdUpdateBuffer-dstBuffer-parameter)"},
+    {"VUID-vkCmdUpdateBuffer-dstOffset-00032", "dstOffset must be less than the size of dstBuffer (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdUpdateBuffer-dstOffset-00032)"},
+    {"VUID-vkCmdUpdateBuffer-dstOffset-00036", "dstOffset must be a multiple of 4 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdUpdateBuffer-dstOffset-00036)"},
+    {"VUID-vkCmdUpdateBuffer-pData-parameter", "pData must be a valid pointer to an array of dataSize bytes (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdUpdateBuffer-pData-parameter)"},
+    {"VUID-vkCmdUpdateBuffer-renderpass", "This command must only be called outside of a render pass instance (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdUpdateBuffer-renderpass)"},
+    {"VUID-vkCmdWaitEvents-commandBuffer-01167", "commandBuffer's current device mask must include exactly one physical device. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdWaitEvents-commandBuffer-01167)"},
+    {"VUID-vkCmdWaitEvents-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support graphics, or compute operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdWaitEvents-commandBuffer-cmdpool)"},
+    {"VUID-vkCmdWaitEvents-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdWaitEvents-commandBuffer-parameter)"},
+    {"VUID-vkCmdWaitEvents-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdWaitEvents-commandBuffer-recording)"},
+    {"VUID-vkCmdWaitEvents-commonparent", "Both of commandBuffer, and the elements of pEvents must have been created, allocated, or retrieved from the same VkDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdWaitEvents-commonparent)"},
+    {"VUID-vkCmdWaitEvents-dstAccessMask-02810", "The dstAccessMask member of each element of pMemoryBarriers must only include access flags that are supported by one or more of the pipeline stages in dstStageMask, as specified in the table of supported access types (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdWaitEvents-dstAccessMask-02810)"},
+    {"VUID-vkCmdWaitEvents-dstAccessMask-02812", "The dstAccessMask member of each element of pBufferMemoryBarriers must only include access flags that are supported by one or more of the pipeline stages in dstStageMask, as specified in the table of supported access types (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdWaitEvents-dstAccessMask-02812)"},
+    {"VUID-vkCmdWaitEvents-dstAccessMask-02814", "The dstAccessMask member of any element of pImageMemoryBarriers must only include access flags that are supported by one or more of the pipeline stages in dstStageMask, as specified in the table of supported access types (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdWaitEvents-dstAccessMask-02814)"},
+    {"VUID-vkCmdWaitEvents-dstStageMask-01160", "If the geometry shaders feature is not enabled, dstStageMask must not contain VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdWaitEvents-dstStageMask-01160)"},
+    {"VUID-vkCmdWaitEvents-dstStageMask-01162", "If the tessellation shaders feature is not enabled, dstStageMask must not contain VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT or VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdWaitEvents-dstStageMask-01162)"},
+    {"VUID-vkCmdWaitEvents-dstStageMask-02113", "If the mesh shaders feature is not enabled, dstStageMask must not contain VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdWaitEvents-dstStageMask-02113)"},
+    {"VUID-vkCmdWaitEvents-dstStageMask-02114", "If the task shaders feature is not enabled, dstStageMask must not contain VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdWaitEvents-dstStageMask-02114)"},
+    {"VUID-vkCmdWaitEvents-dstStageMask-parameter", "dstStageMask must be a valid combination of VkPipelineStageFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdWaitEvents-dstStageMask-parameter)"},
+    {"VUID-vkCmdWaitEvents-dstStageMask-requiredbitmask", "dstStageMask must not be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdWaitEvents-dstStageMask-requiredbitmask)"},
+    {"VUID-vkCmdWaitEvents-eventCount-arraylength", "eventCount must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdWaitEvents-eventCount-arraylength)"},
+    {"VUID-vkCmdWaitEvents-pBufferMemoryBarriers-parameter", "If bufferMemoryBarrierCount is not 0, pBufferMemoryBarriers must be a valid pointer to an array of bufferMemoryBarrierCount valid VkBufferMemoryBarrier structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdWaitEvents-pBufferMemoryBarriers-parameter)"},
+    {"VUID-vkCmdWaitEvents-pEvents-01163", "If pEvents includes one or more events that will be signaled by vkSetEvent after commandBuffer has been submitted to a queue, then vkCmdWaitEvents must not be called inside a render pass instance (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdWaitEvents-pEvents-01163)"},
+    {"VUID-vkCmdWaitEvents-pEvents-parameter", "pEvents must be a valid pointer to an array of eventCount valid VkEvent handles (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdWaitEvents-pEvents-parameter)"},
+    {"VUID-vkCmdWaitEvents-pImageMemoryBarriers-parameter", "If imageMemoryBarrierCount is not 0, pImageMemoryBarriers must be a valid pointer to an array of imageMemoryBarrierCount valid VkImageMemoryBarrier structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdWaitEvents-pImageMemoryBarriers-parameter)"},
+    {"VUID-vkCmdWaitEvents-pMemoryBarriers-01165", "Each element of pMemoryBarriers, pBufferMemoryBarriers or pImageMemoryBarriers must not have any access flag included in its srcAccessMask member if that bit is not supported by any of the pipeline stages in srcStageMask, as specified in the table of supported access types. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdWaitEvents-pMemoryBarriers-01165)"},
+    {"VUID-vkCmdWaitEvents-pMemoryBarriers-01166", "Each element of pMemoryBarriers, pBufferMemoryBarriers or pImageMemoryBarriers must not have any access flag included in its dstAccessMask member if that bit is not supported by any of the pipeline stages in dstStageMask, as specified in the table of supported access types. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdWaitEvents-pMemoryBarriers-01166)"},
+    {"VUID-vkCmdWaitEvents-pMemoryBarriers-parameter", "If memoryBarrierCount is not 0, pMemoryBarriers must be a valid pointer to an array of memoryBarrierCount valid VkMemoryBarrier structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdWaitEvents-pMemoryBarriers-parameter)"},
+    {"VUID-vkCmdWaitEvents-srcAccessMask-02809", "The srcAccessMask member of each element of pMemoryBarriers must only include access flags that are supported by one or more of the pipeline stages in srcStageMask, as specified in the table of supported access types (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdWaitEvents-srcAccessMask-02809)"},
+    {"VUID-vkCmdWaitEvents-srcAccessMask-02811", "The srcAccessMask member of each element of pBufferMemoryBarriers must only include access flags that are supported by one or more of the pipeline stages in srcStageMask, as specified in the table of supported access types (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdWaitEvents-srcAccessMask-02811)"},
+    {"VUID-vkCmdWaitEvents-srcAccessMask-02813", "The srcAccessMask member of each element of pImageMemoryBarriers must only include access flags that are supported by one or more of the pipeline stages in srcStageMask, as specified in the table of supported access types (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdWaitEvents-srcAccessMask-02813)"},
+    {"VUID-vkCmdWaitEvents-srcQueueFamilyIndex-02803", "The srcQueueFamilyIndex and dstQueueFamilyIndex members of any element of pBufferMemoryBarriers or pImageMemoryBarriers must be equal. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdWaitEvents-srcQueueFamilyIndex-02803)"},
+    {"VUID-vkCmdWaitEvents-srcStageMask-01158", "srcStageMask must be the bitwise OR of the stageMask parameter used in previous calls to vkCmdSetEvent with any of the members of pEvents and VK_PIPELINE_STAGE_HOST_BIT if any of the members of pEvents was set using vkSetEvent (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdWaitEvents-srcStageMask-01158)"},
+    {"VUID-vkCmdWaitEvents-srcStageMask-01159", "If the geometry shaders feature is not enabled, srcStageMask must not contain VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdWaitEvents-srcStageMask-01159)"},
+    {"VUID-vkCmdWaitEvents-srcStageMask-01161", "If the tessellation shaders feature is not enabled, srcStageMask must not contain VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT or VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdWaitEvents-srcStageMask-01161)"},
+    {"VUID-vkCmdWaitEvents-srcStageMask-01164", "Any pipeline stage included in srcStageMask or dstStageMask must be supported by the capabilities of the queue family specified by the queueFamilyIndex member of the VkCommandPoolCreateInfo structure that was used to create the VkCommandPool that commandBuffer was allocated from, as specified in the table of supported pipeline stages. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdWaitEvents-srcStageMask-01164)"},
+    {"VUID-vkCmdWaitEvents-srcStageMask-02111", "If the mesh shaders feature is not enabled, srcStageMask must not contain VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdWaitEvents-srcStageMask-02111)"},
+    {"VUID-vkCmdWaitEvents-srcStageMask-02112", "If the task shaders feature is not enabled, srcStageMask must not contain VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdWaitEvents-srcStageMask-02112)"},
+    {"VUID-vkCmdWaitEvents-srcStageMask-parameter", "srcStageMask must be a valid combination of VkPipelineStageFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdWaitEvents-srcStageMask-parameter)"},
+    {"VUID-vkCmdWaitEvents-srcStageMask-requiredbitmask", "srcStageMask must not be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdWaitEvents-srcStageMask-requiredbitmask)"},
+    {"VUID-vkCmdWriteAccelerationStructuresPropertiesNV-accelerationStructureCount-arraylength", "accelerationStructureCount must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdWriteAccelerationStructuresPropertiesNV-accelerationStructureCount-arraylength)"},
+    {"VUID-vkCmdWriteAccelerationStructuresPropertiesNV-accelerationStructures-02495", "All acceleration structures in accelerationStructures must have been built with VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_NV if queryType is VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdWriteAccelerationStructuresPropertiesNV-accelerationStructures-02495)"},
+    {"VUID-vkCmdWriteAccelerationStructuresPropertiesNV-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support compute operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdWriteAccelerationStructuresPropertiesNV-commandBuffer-cmdpool)"},
+    {"VUID-vkCmdWriteAccelerationStructuresPropertiesNV-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdWriteAccelerationStructuresPropertiesNV-commandBuffer-parameter)"},
+    {"VUID-vkCmdWriteAccelerationStructuresPropertiesNV-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdWriteAccelerationStructuresPropertiesNV-commandBuffer-recording)"},
+    {"VUID-vkCmdWriteAccelerationStructuresPropertiesNV-commonparent", "Each of commandBuffer, queryPool, and the elements of pAccelerationStructures must have been created, allocated, or retrieved from the same VkDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdWriteAccelerationStructuresPropertiesNV-commonparent)"},
+    {"VUID-vkCmdWriteAccelerationStructuresPropertiesNV-pAccelerationStructures-parameter", "pAccelerationStructures must be a valid pointer to an array of accelerationStructureCount valid VkAccelerationStructureNV handles (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdWriteAccelerationStructuresPropertiesNV-pAccelerationStructures-parameter)"},
+    {"VUID-vkCmdWriteAccelerationStructuresPropertiesNV-queryPool-02493", "queryPool must have been created with a queryType matching queryType (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdWriteAccelerationStructuresPropertiesNV-queryPool-02493)"},
+    {"VUID-vkCmdWriteAccelerationStructuresPropertiesNV-queryPool-02494", "The queries identified by queryPool and firstQuery must be unavailable (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdWriteAccelerationStructuresPropertiesNV-queryPool-02494)"},
+    {"VUID-vkCmdWriteAccelerationStructuresPropertiesNV-queryPool-parameter", "queryPool must be a valid VkQueryPool handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdWriteAccelerationStructuresPropertiesNV-queryPool-parameter)"},
+    {"VUID-vkCmdWriteAccelerationStructuresPropertiesNV-queryType-02242", "queryType must be VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdWriteAccelerationStructuresPropertiesNV-queryType-02242)"},
+    {"VUID-vkCmdWriteAccelerationStructuresPropertiesNV-queryType-parameter", "queryType must be a valid VkQueryType value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdWriteAccelerationStructuresPropertiesNV-queryType-parameter)"},
+    {"VUID-vkCmdWriteAccelerationStructuresPropertiesNV-renderpass", "This command must only be called outside of a render pass instance (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdWriteAccelerationStructuresPropertiesNV-renderpass)"},
+    {"VUID-vkCmdWriteBufferMarkerAMD-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support transfer, graphics, or compute operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdWriteBufferMarkerAMD-commandBuffer-cmdpool)"},
+    {"VUID-vkCmdWriteBufferMarkerAMD-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdWriteBufferMarkerAMD-commandBuffer-parameter)"},
+    {"VUID-vkCmdWriteBufferMarkerAMD-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdWriteBufferMarkerAMD-commandBuffer-recording)"},
+    {"VUID-vkCmdWriteBufferMarkerAMD-commonparent", "Both of commandBuffer, and dstBuffer must have been created, allocated, or retrieved from the same VkDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdWriteBufferMarkerAMD-commonparent)"},
+    {"VUID-vkCmdWriteBufferMarkerAMD-dstBuffer-01799", "dstBuffer must have been created with VK_BUFFER_USAGE_TRANSFER_DST_BIT usage flag (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdWriteBufferMarkerAMD-dstBuffer-01799)"},
+    {"VUID-vkCmdWriteBufferMarkerAMD-dstBuffer-01800", "If dstBuffer is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdWriteBufferMarkerAMD-dstBuffer-01800)"},
+    {"VUID-vkCmdWriteBufferMarkerAMD-dstBuffer-parameter", "dstBuffer must be a valid VkBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdWriteBufferMarkerAMD-dstBuffer-parameter)"},
+    {"VUID-vkCmdWriteBufferMarkerAMD-dstOffset-01798", "dstOffset must be less than or equal to the size of dstBuffer minus 4. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdWriteBufferMarkerAMD-dstOffset-01798)"},
+    {"VUID-vkCmdWriteBufferMarkerAMD-dstOffset-01801", "dstOffset must be a multiple of 4 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdWriteBufferMarkerAMD-dstOffset-01801)"},
+    {"VUID-vkCmdWriteBufferMarkerAMD-pipelineStage-parameter", "pipelineStage must be a valid VkPipelineStageFlagBits value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdWriteBufferMarkerAMD-pipelineStage-parameter)"},
+    {"VUID-vkCmdWriteTimestamp-None-00830", "All queries used by the command must be unavailable (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdWriteTimestamp-None-00830)"},
+    {"VUID-vkCmdWriteTimestamp-commandBuffer-cmdpool", "The VkCommandPool that commandBuffer was allocated from must support transfer, graphics, or compute operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdWriteTimestamp-commandBuffer-cmdpool)"},
+    {"VUID-vkCmdWriteTimestamp-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdWriteTimestamp-commandBuffer-parameter)"},
+    {"VUID-vkCmdWriteTimestamp-commandBuffer-recording", "commandBuffer must be in the recording state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdWriteTimestamp-commandBuffer-recording)"},
+    {"VUID-vkCmdWriteTimestamp-commonparent", "Both of commandBuffer, and queryPool must have been created, allocated, or retrieved from the same VkDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdWriteTimestamp-commonparent)"},
+    {"VUID-vkCmdWriteTimestamp-pipelineStage-parameter", "pipelineStage must be a valid VkPipelineStageFlagBits value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdWriteTimestamp-pipelineStage-parameter)"},
+    {"VUID-vkCmdWriteTimestamp-query-00831", "If vkCmdWriteTimestamp is called within a render pass instance, the sum of query and the number of bits set in the current subpass's view mask must be less than or equal to the number of queries in queryPool (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdWriteTimestamp-query-00831)"},
+    {"VUID-vkCmdWriteTimestamp-queryPool-00828", "The query identified by queryPool and query must be unavailable (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdWriteTimestamp-queryPool-00828)"},
+    {"VUID-vkCmdWriteTimestamp-queryPool-01416", "queryPool must have been created with a queryType of VK_QUERY_TYPE_TIMESTAMP (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdWriteTimestamp-queryPool-01416)"},
+    {"VUID-vkCmdWriteTimestamp-queryPool-parameter", "queryPool must be a valid VkQueryPool handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdWriteTimestamp-queryPool-parameter)"},
+    {"VUID-vkCmdWriteTimestamp-timestampValidBits-00829", "The command pool's queue family must support a non-zero timestampValidBits (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCmdWriteTimestamp-timestampValidBits-00829)"},
+    {"VUID-vkCompileDeferredNV-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCompileDeferredNV-device-parameter)"},
+    {"VUID-vkCompileDeferredNV-pipeline-02237", "pipeline must have been created with VK_PIPELINE_CREATE_DEFER_COMPILE_BIT_NV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCompileDeferredNV-pipeline-02237)"},
+    {"VUID-vkCompileDeferredNV-pipeline-parameter", "pipeline must be a valid VkPipeline handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCompileDeferredNV-pipeline-parameter)"},
+    {"VUID-vkCompileDeferredNV-pipeline-parent", "pipeline must have been created, allocated, or retrieved from device (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCompileDeferredNV-pipeline-parent)"},
+    {"VUID-vkCompileDeferredNV-shader-02238", "shader must not have been called as a deferred compile before (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCompileDeferredNV-shader-02238)"},
+    {"VUID-vkCreateAccelerationStructureNV-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateAccelerationStructureNV-device-parameter)"},
+    {"VUID-vkCreateAccelerationStructureNV-pAccelerationStructure-parameter", "pAccelerationStructure must be a valid pointer to a VkAccelerationStructureNV handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateAccelerationStructureNV-pAccelerationStructure-parameter)"},
+    {"VUID-vkCreateAccelerationStructureNV-pAllocator-parameter", "If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateAccelerationStructureNV-pAllocator-parameter)"},
+    {"VUID-vkCreateAccelerationStructureNV-pCreateInfo-parameter", "pCreateInfo must be a valid pointer to a valid VkAccelerationStructureCreateInfoNV structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateAccelerationStructureNV-pCreateInfo-parameter)"},
+    {"VUID-vkCreateAndroidSurfaceKHR-instance-parameter", "instance must be a valid VkInstance handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateAndroidSurfaceKHR-instance-parameter)"},
+    {"VUID-vkCreateAndroidSurfaceKHR-pAllocator-parameter", "If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateAndroidSurfaceKHR-pAllocator-parameter)"},
+    {"VUID-vkCreateAndroidSurfaceKHR-pCreateInfo-parameter", "pCreateInfo must be a valid pointer to a valid VkAndroidSurfaceCreateInfoKHR structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateAndroidSurfaceKHR-pCreateInfo-parameter)"},
+    {"VUID-vkCreateAndroidSurfaceKHR-pSurface-parameter", "pSurface must be a valid pointer to a VkSurfaceKHR handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateAndroidSurfaceKHR-pSurface-parameter)"},
+    {"VUID-vkCreateBuffer-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateBuffer-device-parameter)"},
+    {"VUID-vkCreateBuffer-flags-00911", "If the flags member of pCreateInfo includes VK_BUFFER_CREATE_SPARSE_BINDING_BIT, creating this VkBuffer must not cause the total required sparse memory for all currently valid sparse resources on the device to exceed VkPhysicalDeviceLimits::sparseAddressSpaceSize (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateBuffer-flags-00911)"},
+    {"VUID-vkCreateBuffer-pAllocator-parameter", "If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateBuffer-pAllocator-parameter)"},
+    {"VUID-vkCreateBuffer-pBuffer-parameter", "pBuffer must be a valid pointer to a VkBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateBuffer-pBuffer-parameter)"},
+    {"VUID-vkCreateBuffer-pCreateInfo-parameter", "pCreateInfo must be a valid pointer to a valid VkBufferCreateInfo structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateBuffer-pCreateInfo-parameter)"},
+    {"VUID-vkCreateBufferView-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateBufferView-device-parameter)"},
+    {"VUID-vkCreateBufferView-pAllocator-parameter", "If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateBufferView-pAllocator-parameter)"},
+    {"VUID-vkCreateBufferView-pCreateInfo-parameter", "pCreateInfo must be a valid pointer to a valid VkBufferViewCreateInfo structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateBufferView-pCreateInfo-parameter)"},
+    {"VUID-vkCreateBufferView-pView-parameter", "pView must be a valid pointer to a VkBufferView handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateBufferView-pView-parameter)"},
+    {"VUID-vkCreateCommandPool-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateCommandPool-device-parameter)"},
+    {"VUID-vkCreateCommandPool-pAllocator-parameter", "If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateCommandPool-pAllocator-parameter)"},
+    {"VUID-vkCreateCommandPool-pCommandPool-parameter", "pCommandPool must be a valid pointer to a VkCommandPool handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateCommandPool-pCommandPool-parameter)"},
+    {"VUID-vkCreateCommandPool-pCreateInfo-parameter", "pCreateInfo must be a valid pointer to a valid VkCommandPoolCreateInfo structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateCommandPool-pCreateInfo-parameter)"},
+    {"VUID-vkCreateCommandPool-queueFamilyIndex-01937", "pCreateInfo->queueFamilyIndex must be the index of a queue family available in the logical device device. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateCommandPool-queueFamilyIndex-01937)"},
+    {"VUID-vkCreateComputePipelines-createInfoCount-arraylength", "createInfoCount must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateComputePipelines-createInfoCount-arraylength)"},
+    {"VUID-vkCreateComputePipelines-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateComputePipelines-device-parameter)"},
+    {"VUID-vkCreateComputePipelines-flags-00695", "If the flags member of any element of pCreateInfos contains the VK_PIPELINE_CREATE_DERIVATIVE_BIT flag, and the basePipelineIndex member of that same element is not -1, basePipelineIndex must be less than the index into pCreateInfos that corresponds to that element (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateComputePipelines-flags-00695)"},
+    {"VUID-vkCreateComputePipelines-flags-00696", "If the flags member of any element of pCreateInfos contains the VK_PIPELINE_CREATE_DERIVATIVE_BIT flag, the base pipeline must have been created with the VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT flag set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateComputePipelines-flags-00696)"},
+    {"VUID-vkCreateComputePipelines-pAllocator-parameter", "If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateComputePipelines-pAllocator-parameter)"},
+    {"VUID-vkCreateComputePipelines-pCreateInfos-parameter", "pCreateInfos must be a valid pointer to an array of createInfoCount valid VkComputePipelineCreateInfo structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateComputePipelines-pCreateInfos-parameter)"},
+    {"VUID-vkCreateComputePipelines-pPipelines-parameter", "pPipelines must be a valid pointer to an array of createInfoCount VkPipeline handles (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateComputePipelines-pPipelines-parameter)"},
+    {"VUID-vkCreateComputePipelines-pipelineCache-parameter", "If pipelineCache is not VK_NULL_HANDLE, pipelineCache must be a valid VkPipelineCache handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateComputePipelines-pipelineCache-parameter)"},
+    {"VUID-vkCreateComputePipelines-pipelineCache-parent", "If pipelineCache is a valid handle, it must have been created, allocated, or retrieved from device (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateComputePipelines-pipelineCache-parent)"},
+    {"VUID-vkCreateDebugReportCallbackEXT-instance-parameter", "instance must be a valid VkInstance handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateDebugReportCallbackEXT-instance-parameter)"},
+    {"VUID-vkCreateDebugReportCallbackEXT-pAllocator-parameter", "If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateDebugReportCallbackEXT-pAllocator-parameter)"},
+    {"VUID-vkCreateDebugReportCallbackEXT-pCallback-parameter", "pCallback must be a valid pointer to a VkDebugReportCallbackEXT handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateDebugReportCallbackEXT-pCallback-parameter)"},
+    {"VUID-vkCreateDebugReportCallbackEXT-pCreateInfo-parameter", "pCreateInfo must be a valid pointer to a valid VkDebugReportCallbackCreateInfoEXT structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateDebugReportCallbackEXT-pCreateInfo-parameter)"},
+    {"VUID-vkCreateDebugUtilsMessengerEXT-instance-parameter", "instance must be a valid VkInstance handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateDebugUtilsMessengerEXT-instance-parameter)"},
+    {"VUID-vkCreateDebugUtilsMessengerEXT-pAllocator-parameter", "If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateDebugUtilsMessengerEXT-pAllocator-parameter)"},
+    {"VUID-vkCreateDebugUtilsMessengerEXT-pCreateInfo-parameter", "pCreateInfo must be a valid pointer to a valid VkDebugUtilsMessengerCreateInfoEXT structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateDebugUtilsMessengerEXT-pCreateInfo-parameter)"},
+    {"VUID-vkCreateDebugUtilsMessengerEXT-pMessenger-parameter", "pMessenger must be a valid pointer to a VkDebugUtilsMessengerEXT handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateDebugUtilsMessengerEXT-pMessenger-parameter)"},
+    {"VUID-vkCreateDescriptorPool-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateDescriptorPool-device-parameter)"},
+    {"VUID-vkCreateDescriptorPool-pAllocator-parameter", "If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateDescriptorPool-pAllocator-parameter)"},
+    {"VUID-vkCreateDescriptorPool-pCreateInfo-parameter", "pCreateInfo must be a valid pointer to a valid VkDescriptorPoolCreateInfo structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateDescriptorPool-pCreateInfo-parameter)"},
+    {"VUID-vkCreateDescriptorPool-pDescriptorPool-parameter", "pDescriptorPool must be a valid pointer to a VkDescriptorPool handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateDescriptorPool-pDescriptorPool-parameter)"},
+    {"VUID-vkCreateDescriptorSetLayout-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateDescriptorSetLayout-device-parameter)"},
+    {"VUID-vkCreateDescriptorSetLayout-pAllocator-parameter", "If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateDescriptorSetLayout-pAllocator-parameter)"},
+    {"VUID-vkCreateDescriptorSetLayout-pCreateInfo-parameter", "pCreateInfo must be a valid pointer to a valid VkDescriptorSetLayoutCreateInfo structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateDescriptorSetLayout-pCreateInfo-parameter)"},
+    {"VUID-vkCreateDescriptorSetLayout-pSetLayout-parameter", "pSetLayout must be a valid pointer to a VkDescriptorSetLayout handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateDescriptorSetLayout-pSetLayout-parameter)"},
+    {"VUID-vkCreateDescriptorUpdateTemplate-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateDescriptorUpdateTemplate-device-parameter)"},
+    {"VUID-vkCreateDescriptorUpdateTemplate-pAllocator-parameter", "If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateDescriptorUpdateTemplate-pAllocator-parameter)"},
+    {"VUID-vkCreateDescriptorUpdateTemplate-pCreateInfo-parameter", "pCreateInfo must be a valid pointer to a valid VkDescriptorUpdateTemplateCreateInfo structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateDescriptorUpdateTemplate-pCreateInfo-parameter)"},
+    {"VUID-vkCreateDescriptorUpdateTemplate-pDescriptorUpdateTemplate-parameter", "pDescriptorUpdateTemplate must be a valid pointer to a VkDescriptorUpdateTemplate handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateDescriptorUpdateTemplate-pDescriptorUpdateTemplate-parameter)"},
+    {"VUID-vkCreateDevice-pAllocator-parameter", "If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateDevice-pAllocator-parameter)"},
+    {"VUID-vkCreateDevice-pCreateInfo-parameter", "pCreateInfo must be a valid pointer to a valid VkDeviceCreateInfo structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateDevice-pCreateInfo-parameter)"},
+    {"VUID-vkCreateDevice-pDevice-parameter", "pDevice must be a valid pointer to a VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateDevice-pDevice-parameter)"},
+    {"VUID-vkCreateDevice-physicalDevice-parameter", "physicalDevice must be a valid VkPhysicalDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateDevice-physicalDevice-parameter)"},
+    {"VUID-vkCreateDevice-ppEnabledExtensionNames-01387", "All required extensions for each extension in the VkDeviceCreateInfo::ppEnabledExtensionNames list must also be present in that list. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateDevice-ppEnabledExtensionNames-01387)"},
+    {"VUID-vkCreateDisplayModeKHR-display-parameter", "display must be a valid VkDisplayKHR handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateDisplayModeKHR-display-parameter)"},
+    {"VUID-vkCreateDisplayModeKHR-pAllocator-parameter", "If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateDisplayModeKHR-pAllocator-parameter)"},
+    {"VUID-vkCreateDisplayModeKHR-pCreateInfo-parameter", "pCreateInfo must be a valid pointer to a valid VkDisplayModeCreateInfoKHR structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateDisplayModeKHR-pCreateInfo-parameter)"},
+    {"VUID-vkCreateDisplayModeKHR-pMode-parameter", "pMode must be a valid pointer to a VkDisplayModeKHR handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateDisplayModeKHR-pMode-parameter)"},
+    {"VUID-vkCreateDisplayModeKHR-physicalDevice-parameter", "physicalDevice must be a valid VkPhysicalDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateDisplayModeKHR-physicalDevice-parameter)"},
+    {"VUID-vkCreateDisplayPlaneSurfaceKHR-instance-parameter", "instance must be a valid VkInstance handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateDisplayPlaneSurfaceKHR-instance-parameter)"},
+    {"VUID-vkCreateDisplayPlaneSurfaceKHR-pAllocator-parameter", "If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateDisplayPlaneSurfaceKHR-pAllocator-parameter)"},
+    {"VUID-vkCreateDisplayPlaneSurfaceKHR-pCreateInfo-parameter", "pCreateInfo must be a valid pointer to a valid VkDisplaySurfaceCreateInfoKHR structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateDisplayPlaneSurfaceKHR-pCreateInfo-parameter)"},
+    {"VUID-vkCreateDisplayPlaneSurfaceKHR-pSurface-parameter", "pSurface must be a valid pointer to a VkSurfaceKHR handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateDisplayPlaneSurfaceKHR-pSurface-parameter)"},
+    {"VUID-vkCreateEvent-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateEvent-device-parameter)"},
+    {"VUID-vkCreateEvent-pAllocator-parameter", "If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateEvent-pAllocator-parameter)"},
+    {"VUID-vkCreateEvent-pCreateInfo-parameter", "pCreateInfo must be a valid pointer to a valid VkEventCreateInfo structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateEvent-pCreateInfo-parameter)"},
+    {"VUID-vkCreateEvent-pEvent-parameter", "pEvent must be a valid pointer to a VkEvent handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateEvent-pEvent-parameter)"},
+    {"VUID-vkCreateFence-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateFence-device-parameter)"},
+    {"VUID-vkCreateFence-pAllocator-parameter", "If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateFence-pAllocator-parameter)"},
+    {"VUID-vkCreateFence-pCreateInfo-parameter", "pCreateInfo must be a valid pointer to a valid VkFenceCreateInfo structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateFence-pCreateInfo-parameter)"},
+    {"VUID-vkCreateFence-pFence-parameter", "pFence must be a valid pointer to a VkFence handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateFence-pFence-parameter)"},
+    {"VUID-vkCreateFramebuffer-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateFramebuffer-device-parameter)"},
+    {"VUID-vkCreateFramebuffer-pAllocator-parameter", "If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateFramebuffer-pAllocator-parameter)"},
+    {"VUID-vkCreateFramebuffer-pCreateInfo-02777", "If pCreateInfo->flags does not include VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR, and attachmentCount is not 0, each element of pCreateInfo->pAttachments must have been created on device (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateFramebuffer-pCreateInfo-02777)"},
+    {"VUID-vkCreateFramebuffer-pCreateInfo-parameter", "pCreateInfo must be a valid pointer to a valid VkFramebufferCreateInfo structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateFramebuffer-pCreateInfo-parameter)"},
+    {"VUID-vkCreateFramebuffer-pFramebuffer-parameter", "pFramebuffer must be a valid pointer to a VkFramebuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateFramebuffer-pFramebuffer-parameter)"},
+    {"VUID-vkCreateGraphicsPipelines-createInfoCount-arraylength", "createInfoCount must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateGraphicsPipelines-createInfoCount-arraylength)"},
+    {"VUID-vkCreateGraphicsPipelines-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateGraphicsPipelines-device-parameter)"},
+    {"VUID-vkCreateGraphicsPipelines-flags-00720", "If the flags member of any element of pCreateInfos contains the VK_PIPELINE_CREATE_DERIVATIVE_BIT flag, and the basePipelineIndex member of that same element is not -1, basePipelineIndex must be less than the index into pCreateInfos that corresponds to that element (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateGraphicsPipelines-flags-00720)"},
+    {"VUID-vkCreateGraphicsPipelines-flags-00721", "If the flags member of any element of pCreateInfos contains the VK_PIPELINE_CREATE_DERIVATIVE_BIT flag, the base pipeline must have been created with the VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT flag set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateGraphicsPipelines-flags-00721)"},
+    {"VUID-vkCreateGraphicsPipelines-pAllocator-parameter", "If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateGraphicsPipelines-pAllocator-parameter)"},
+    {"VUID-vkCreateGraphicsPipelines-pCreateInfos-parameter", "pCreateInfos must be a valid pointer to an array of createInfoCount valid VkGraphicsPipelineCreateInfo structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateGraphicsPipelines-pCreateInfos-parameter)"},
+    {"VUID-vkCreateGraphicsPipelines-pPipelines-parameter", "pPipelines must be a valid pointer to an array of createInfoCount VkPipeline handles (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateGraphicsPipelines-pPipelines-parameter)"},
+    {"VUID-vkCreateGraphicsPipelines-pipelineCache-parameter", "If pipelineCache is not VK_NULL_HANDLE, pipelineCache must be a valid VkPipelineCache handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateGraphicsPipelines-pipelineCache-parameter)"},
+    {"VUID-vkCreateGraphicsPipelines-pipelineCache-parent", "If pipelineCache is a valid handle, it must have been created, allocated, or retrieved from device (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateGraphicsPipelines-pipelineCache-parent)"},
+    {"VUID-vkCreateHeadlessSurfaceEXT-instance-parameter", "instance must be a valid VkInstance handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateHeadlessSurfaceEXT-instance-parameter)"},
+    {"VUID-vkCreateHeadlessSurfaceEXT-pAllocator-parameter", "If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateHeadlessSurfaceEXT-pAllocator-parameter)"},
+    {"VUID-vkCreateHeadlessSurfaceEXT-pCreateInfo-parameter", "pCreateInfo must be a valid pointer to a valid VkHeadlessSurfaceCreateInfoEXT structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateHeadlessSurfaceEXT-pCreateInfo-parameter)"},
+    {"VUID-vkCreateHeadlessSurfaceEXT-pSurface-parameter", "pSurface must be a valid pointer to a VkSurfaceKHR handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateHeadlessSurfaceEXT-pSurface-parameter)"},
+    {"VUID-vkCreateIOSSurfaceMVK-instance-parameter", "instance must be a valid VkInstance handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateIOSSurfaceMVK-instance-parameter)"},
+    {"VUID-vkCreateIOSSurfaceMVK-pAllocator-parameter", "If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateIOSSurfaceMVK-pAllocator-parameter)"},
+    {"VUID-vkCreateIOSSurfaceMVK-pCreateInfo-parameter", "pCreateInfo must be a valid pointer to a valid VkIOSSurfaceCreateInfoMVK structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateIOSSurfaceMVK-pCreateInfo-parameter)"},
+    {"VUID-vkCreateIOSSurfaceMVK-pSurface-parameter", "pSurface must be a valid pointer to a VkSurfaceKHR handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateIOSSurfaceMVK-pSurface-parameter)"},
+    {"VUID-vkCreateImage-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateImage-device-parameter)"},
+    {"VUID-vkCreateImage-flags-00939", "If the flags member of pCreateInfo includes VK_IMAGE_CREATE_SPARSE_BINDING_BIT, creating this VkImage must not cause the total required sparse memory for all currently valid sparse resources on the device to exceed VkPhysicalDeviceLimits::sparseAddressSpaceSize (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateImage-flags-00939)"},
+    {"VUID-vkCreateImage-pAllocator-parameter", "If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateImage-pAllocator-parameter)"},
+    {"VUID-vkCreateImage-pCreateInfo-parameter", "pCreateInfo must be a valid pointer to a valid VkImageCreateInfo structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateImage-pCreateInfo-parameter)"},
+    {"VUID-vkCreateImage-pImage-parameter", "pImage must be a valid pointer to a VkImage handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateImage-pImage-parameter)"},
+    {"VUID-vkCreateImagePipeSurfaceFUCHSIA-instance-parameter", "instance must be a valid VkInstance handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateImagePipeSurfaceFUCHSIA-instance-parameter)"},
+    {"VUID-vkCreateImagePipeSurfaceFUCHSIA-pAllocator-parameter", "If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateImagePipeSurfaceFUCHSIA-pAllocator-parameter)"},
+    {"VUID-vkCreateImagePipeSurfaceFUCHSIA-pCreateInfo-parameter", "pCreateInfo must be a valid pointer to a valid VkImagePipeSurfaceCreateInfoFUCHSIA structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateImagePipeSurfaceFUCHSIA-pCreateInfo-parameter)"},
+    {"VUID-vkCreateImagePipeSurfaceFUCHSIA-pSurface-parameter", "pSurface must be a valid pointer to a VkSurfaceKHR handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateImagePipeSurfaceFUCHSIA-pSurface-parameter)"},
+    {"VUID-vkCreateImageView-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateImageView-device-parameter)"},
+    {"VUID-vkCreateImageView-pAllocator-parameter", "If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateImageView-pAllocator-parameter)"},
+    {"VUID-vkCreateImageView-pCreateInfo-parameter", "pCreateInfo must be a valid pointer to a valid VkImageViewCreateInfo structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateImageView-pCreateInfo-parameter)"},
+    {"VUID-vkCreateImageView-pView-parameter", "pView must be a valid pointer to a VkImageView handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateImageView-pView-parameter)"},
+    {"VUID-vkCreateIndirectCommandsLayoutNVX-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateIndirectCommandsLayoutNVX-device-parameter)"},
+    {"VUID-vkCreateIndirectCommandsLayoutNVX-pAllocator-parameter", "If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateIndirectCommandsLayoutNVX-pAllocator-parameter)"},
+    {"VUID-vkCreateIndirectCommandsLayoutNVX-pCreateInfo-parameter", "pCreateInfo must be a valid pointer to a valid VkIndirectCommandsLayoutCreateInfoNVX structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateIndirectCommandsLayoutNVX-pCreateInfo-parameter)"},
+    {"VUID-vkCreateIndirectCommandsLayoutNVX-pIndirectCommandsLayout-parameter", "pIndirectCommandsLayout must be a valid pointer to a VkIndirectCommandsLayoutNVX handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateIndirectCommandsLayoutNVX-pIndirectCommandsLayout-parameter)"},
+    {"VUID-vkCreateInstance-pAllocator-parameter", "If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateInstance-pAllocator-parameter)"},
+    {"VUID-vkCreateInstance-pCreateInfo-parameter", "pCreateInfo must be a valid pointer to a valid VkInstanceCreateInfo structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateInstance-pCreateInfo-parameter)"},
+    {"VUID-vkCreateInstance-pInstance-parameter", "pInstance must be a valid pointer to a VkInstance handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateInstance-pInstance-parameter)"},
+    {"VUID-vkCreateInstance-ppEnabledExtensionNames-01388", "All required extensions for each extension in the VkInstanceCreateInfo::ppEnabledExtensionNames list must also be present in that list. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateInstance-ppEnabledExtensionNames-01388)"},
+    {"VUID-vkCreateMacOSSurfaceMVK-instance-parameter", "instance must be a valid VkInstance handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateMacOSSurfaceMVK-instance-parameter)"},
+    {"VUID-vkCreateMacOSSurfaceMVK-pAllocator-parameter", "If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateMacOSSurfaceMVK-pAllocator-parameter)"},
+    {"VUID-vkCreateMacOSSurfaceMVK-pCreateInfo-parameter", "pCreateInfo must be a valid pointer to a valid VkMacOSSurfaceCreateInfoMVK structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateMacOSSurfaceMVK-pCreateInfo-parameter)"},
+    {"VUID-vkCreateMacOSSurfaceMVK-pSurface-parameter", "pSurface must be a valid pointer to a VkSurfaceKHR handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateMacOSSurfaceMVK-pSurface-parameter)"},
+    {"VUID-vkCreateMetalSurfaceEXT-instance-parameter", "instance must be a valid VkInstance handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateMetalSurfaceEXT-instance-parameter)"},
+    {"VUID-vkCreateMetalSurfaceEXT-pAllocator-parameter", "If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateMetalSurfaceEXT-pAllocator-parameter)"},
+    {"VUID-vkCreateMetalSurfaceEXT-pCreateInfo-parameter", "pCreateInfo must be a valid pointer to a valid VkMetalSurfaceCreateInfoEXT structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateMetalSurfaceEXT-pCreateInfo-parameter)"},
+    {"VUID-vkCreateMetalSurfaceEXT-pSurface-parameter", "pSurface must be a valid pointer to a VkSurfaceKHR handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateMetalSurfaceEXT-pSurface-parameter)"},
+    {"VUID-vkCreateObjectTableNVX-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateObjectTableNVX-device-parameter)"},
+    {"VUID-vkCreateObjectTableNVX-pAllocator-parameter", "If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateObjectTableNVX-pAllocator-parameter)"},
+    {"VUID-vkCreateObjectTableNVX-pCreateInfo-parameter", "pCreateInfo must be a valid pointer to a valid VkObjectTableCreateInfoNVX structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateObjectTableNVX-pCreateInfo-parameter)"},
+    {"VUID-vkCreateObjectTableNVX-pObjectTable-parameter", "pObjectTable must be a valid pointer to a VkObjectTableNVX handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateObjectTableNVX-pObjectTable-parameter)"},
+    {"VUID-vkCreatePipelineCache-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreatePipelineCache-device-parameter)"},
+    {"VUID-vkCreatePipelineCache-pAllocator-parameter", "If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreatePipelineCache-pAllocator-parameter)"},
+    {"VUID-vkCreatePipelineCache-pCreateInfo-parameter", "pCreateInfo must be a valid pointer to a valid VkPipelineCacheCreateInfo structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreatePipelineCache-pCreateInfo-parameter)"},
+    {"VUID-vkCreatePipelineCache-pPipelineCache-parameter", "pPipelineCache must be a valid pointer to a VkPipelineCache handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreatePipelineCache-pPipelineCache-parameter)"},
+    {"VUID-vkCreatePipelineLayout-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreatePipelineLayout-device-parameter)"},
+    {"VUID-vkCreatePipelineLayout-pAllocator-parameter", "If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreatePipelineLayout-pAllocator-parameter)"},
+    {"VUID-vkCreatePipelineLayout-pCreateInfo-parameter", "pCreateInfo must be a valid pointer to a valid VkPipelineLayoutCreateInfo structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreatePipelineLayout-pCreateInfo-parameter)"},
+    {"VUID-vkCreatePipelineLayout-pPipelineLayout-parameter", "pPipelineLayout must be a valid pointer to a VkPipelineLayout handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreatePipelineLayout-pPipelineLayout-parameter)"},
+    {"VUID-vkCreateQueryPool-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateQueryPool-device-parameter)"},
+    {"VUID-vkCreateQueryPool-pAllocator-parameter", "If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateQueryPool-pAllocator-parameter)"},
+    {"VUID-vkCreateQueryPool-pCreateInfo-parameter", "pCreateInfo must be a valid pointer to a valid VkQueryPoolCreateInfo structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateQueryPool-pCreateInfo-parameter)"},
+    {"VUID-vkCreateQueryPool-pQueryPool-parameter", "pQueryPool must be a valid pointer to a VkQueryPool handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateQueryPool-pQueryPool-parameter)"},
+    {"VUID-vkCreateRayTracingPipelinesNV-createInfoCount-arraylength", "createInfoCount must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateRayTracingPipelinesNV-createInfoCount-arraylength)"},
+    {"VUID-vkCreateRayTracingPipelinesNV-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateRayTracingPipelinesNV-device-parameter)"},
+    {"VUID-vkCreateRayTracingPipelinesNV-flags-02402", "If the flags member of any element of pCreateInfos contains the VK_PIPELINE_CREATE_DERIVATIVE_BIT flag, and the basePipelineIndex member of that same element is not -1, basePipelineIndex must be less than the index into pCreateInfos that corresponds to that element (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateRayTracingPipelinesNV-flags-02402)"},
+    {"VUID-vkCreateRayTracingPipelinesNV-flags-02403", "If the flags member of any element of pCreateInfos contains the VK_PIPELINE_CREATE_DERIVATIVE_BIT flag, the base pipeline must have been created with the VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT flag set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateRayTracingPipelinesNV-flags-02403)"},
+    {"VUID-vkCreateRayTracingPipelinesNV-pAllocator-parameter", "If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateRayTracingPipelinesNV-pAllocator-parameter)"},
+    {"VUID-vkCreateRayTracingPipelinesNV-pCreateInfos-parameter", "pCreateInfos must be a valid pointer to an array of createInfoCount valid VkRayTracingPipelineCreateInfoNV structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateRayTracingPipelinesNV-pCreateInfos-parameter)"},
+    {"VUID-vkCreateRayTracingPipelinesNV-pPipelines-parameter", "pPipelines must be a valid pointer to an array of createInfoCount VkPipeline handles (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateRayTracingPipelinesNV-pPipelines-parameter)"},
+    {"VUID-vkCreateRayTracingPipelinesNV-pipelineCache-parameter", "If pipelineCache is not VK_NULL_HANDLE, pipelineCache must be a valid VkPipelineCache handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateRayTracingPipelinesNV-pipelineCache-parameter)"},
+    {"VUID-vkCreateRayTracingPipelinesNV-pipelineCache-parent", "If pipelineCache is a valid handle, it must have been created, allocated, or retrieved from device (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateRayTracingPipelinesNV-pipelineCache-parent)"},
+    {"VUID-vkCreateRenderPass-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateRenderPass-device-parameter)"},
+    {"VUID-vkCreateRenderPass-pAllocator-parameter", "If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateRenderPass-pAllocator-parameter)"},
+    {"VUID-vkCreateRenderPass-pCreateInfo-parameter", "pCreateInfo must be a valid pointer to a valid VkRenderPassCreateInfo structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateRenderPass-pCreateInfo-parameter)"},
+    {"VUID-vkCreateRenderPass-pRenderPass-parameter", "pRenderPass must be a valid pointer to a VkRenderPass handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateRenderPass-pRenderPass-parameter)"},
+    {"VUID-vkCreateRenderPass2KHR-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateRenderPass2KHR-device-parameter)"},
+    {"VUID-vkCreateRenderPass2KHR-pAllocator-parameter", "If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateRenderPass2KHR-pAllocator-parameter)"},
+    {"VUID-vkCreateRenderPass2KHR-pCreateInfo-parameter", "pCreateInfo must be a valid pointer to a valid VkRenderPassCreateInfo2KHR structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateRenderPass2KHR-pCreateInfo-parameter)"},
+    {"VUID-vkCreateRenderPass2KHR-pRenderPass-parameter", "pRenderPass must be a valid pointer to a VkRenderPass handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateRenderPass2KHR-pRenderPass-parameter)"},
+    {"VUID-vkCreateSampler-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateSampler-device-parameter)"},
+    {"VUID-vkCreateSampler-pAllocator-parameter", "If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateSampler-pAllocator-parameter)"},
+    {"VUID-vkCreateSampler-pCreateInfo-parameter", "pCreateInfo must be a valid pointer to a valid VkSamplerCreateInfo structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateSampler-pCreateInfo-parameter)"},
+    {"VUID-vkCreateSampler-pSampler-parameter", "pSampler must be a valid pointer to a VkSampler handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateSampler-pSampler-parameter)"},
+    {"VUID-vkCreateSamplerYcbcrConversion-None-01648", "The sampler Y'CBCR conversion feature must be enabled (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateSamplerYcbcrConversion-None-01648)"},
+    {"VUID-vkCreateSamplerYcbcrConversion-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateSamplerYcbcrConversion-device-parameter)"},
+    {"VUID-vkCreateSamplerYcbcrConversion-pAllocator-parameter", "If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateSamplerYcbcrConversion-pAllocator-parameter)"},
+    {"VUID-vkCreateSamplerYcbcrConversion-pCreateInfo-parameter", "pCreateInfo must be a valid pointer to a valid VkSamplerYcbcrConversionCreateInfo structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateSamplerYcbcrConversion-pCreateInfo-parameter)"},
+    {"VUID-vkCreateSamplerYcbcrConversion-pYcbcrConversion-parameter", "pYcbcrConversion must be a valid pointer to a VkSamplerYcbcrConversion handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateSamplerYcbcrConversion-pYcbcrConversion-parameter)"},
+    {"VUID-vkCreateSemaphore-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateSemaphore-device-parameter)"},
+    {"VUID-vkCreateSemaphore-pAllocator-parameter", "If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateSemaphore-pAllocator-parameter)"},
+    {"VUID-vkCreateSemaphore-pCreateInfo-parameter", "pCreateInfo must be a valid pointer to a valid VkSemaphoreCreateInfo structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateSemaphore-pCreateInfo-parameter)"},
+    {"VUID-vkCreateSemaphore-pSemaphore-parameter", "pSemaphore must be a valid pointer to a VkSemaphore handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateSemaphore-pSemaphore-parameter)"},
+    {"VUID-vkCreateShaderModule-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateShaderModule-device-parameter)"},
+    {"VUID-vkCreateShaderModule-pAllocator-parameter", "If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateShaderModule-pAllocator-parameter)"},
+    {"VUID-vkCreateShaderModule-pCreateInfo-parameter", "pCreateInfo must be a valid pointer to a valid VkShaderModuleCreateInfo structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateShaderModule-pCreateInfo-parameter)"},
+    {"VUID-vkCreateShaderModule-pShaderModule-parameter", "pShaderModule must be a valid pointer to a VkShaderModule handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateShaderModule-pShaderModule-parameter)"},
+    {"VUID-vkCreateSharedSwapchainsKHR-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateSharedSwapchainsKHR-device-parameter)"},
+    {"VUID-vkCreateSharedSwapchainsKHR-pAllocator-parameter", "If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateSharedSwapchainsKHR-pAllocator-parameter)"},
+    {"VUID-vkCreateSharedSwapchainsKHR-pCreateInfos-parameter", "pCreateInfos must be a valid pointer to an array of swapchainCount valid VkSwapchainCreateInfoKHR structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateSharedSwapchainsKHR-pCreateInfos-parameter)"},
+    {"VUID-vkCreateSharedSwapchainsKHR-pSwapchains-parameter", "pSwapchains must be a valid pointer to an array of swapchainCount VkSwapchainKHR handles (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateSharedSwapchainsKHR-pSwapchains-parameter)"},
+    {"VUID-vkCreateSharedSwapchainsKHR-swapchainCount-arraylength", "swapchainCount must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateSharedSwapchainsKHR-swapchainCount-arraylength)"},
+    {"VUID-vkCreateStreamDescriptorSurfaceGGP-instance-parameter", "instance must be a valid VkInstance handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateStreamDescriptorSurfaceGGP-instance-parameter)"},
+    {"VUID-vkCreateStreamDescriptorSurfaceGGP-pAllocator-parameter", "If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateStreamDescriptorSurfaceGGP-pAllocator-parameter)"},
+    {"VUID-vkCreateStreamDescriptorSurfaceGGP-pCreateInfo-parameter", "pCreateInfo must be a valid pointer to a valid VkStreamDescriptorSurfaceCreateInfoGGP structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateStreamDescriptorSurfaceGGP-pCreateInfo-parameter)"},
+    {"VUID-vkCreateStreamDescriptorSurfaceGGP-pSurface-parameter", "pSurface must be a valid pointer to a VkSurfaceKHR handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateStreamDescriptorSurfaceGGP-pSurface-parameter)"},
+    {"VUID-vkCreateSwapchainKHR-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateSwapchainKHR-device-parameter)"},
+    {"VUID-vkCreateSwapchainKHR-pAllocator-parameter", "If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateSwapchainKHR-pAllocator-parameter)"},
+    {"VUID-vkCreateSwapchainKHR-pCreateInfo-parameter", "pCreateInfo must be a valid pointer to a valid VkSwapchainCreateInfoKHR structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateSwapchainKHR-pCreateInfo-parameter)"},
+    {"VUID-vkCreateSwapchainKHR-pSwapchain-parameter", "pSwapchain must be a valid pointer to a VkSwapchainKHR handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateSwapchainKHR-pSwapchain-parameter)"},
+    {"VUID-vkCreateValidationCacheEXT-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateValidationCacheEXT-device-parameter)"},
+    {"VUID-vkCreateValidationCacheEXT-pAllocator-parameter", "If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateValidationCacheEXT-pAllocator-parameter)"},
+    {"VUID-vkCreateValidationCacheEXT-pCreateInfo-parameter", "pCreateInfo must be a valid pointer to a valid VkValidationCacheCreateInfoEXT structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateValidationCacheEXT-pCreateInfo-parameter)"},
+    {"VUID-vkCreateValidationCacheEXT-pValidationCache-parameter", "pValidationCache must be a valid pointer to a VkValidationCacheEXT handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateValidationCacheEXT-pValidationCache-parameter)"},
+    {"VUID-vkCreateViSurfaceNN-instance-parameter", "instance must be a valid VkInstance handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateViSurfaceNN-instance-parameter)"},
+    {"VUID-vkCreateViSurfaceNN-pAllocator-parameter", "If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateViSurfaceNN-pAllocator-parameter)"},
+    {"VUID-vkCreateViSurfaceNN-pCreateInfo-parameter", "pCreateInfo must be a valid pointer to a valid VkViSurfaceCreateInfoNN structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateViSurfaceNN-pCreateInfo-parameter)"},
+    {"VUID-vkCreateViSurfaceNN-pSurface-parameter", "pSurface must be a valid pointer to a VkSurfaceKHR handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateViSurfaceNN-pSurface-parameter)"},
+    {"VUID-vkCreateWaylandSurfaceKHR-instance-parameter", "instance must be a valid VkInstance handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateWaylandSurfaceKHR-instance-parameter)"},
+    {"VUID-vkCreateWaylandSurfaceKHR-pAllocator-parameter", "If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateWaylandSurfaceKHR-pAllocator-parameter)"},
+    {"VUID-vkCreateWaylandSurfaceKHR-pCreateInfo-parameter", "pCreateInfo must be a valid pointer to a valid VkWaylandSurfaceCreateInfoKHR structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateWaylandSurfaceKHR-pCreateInfo-parameter)"},
+    {"VUID-vkCreateWaylandSurfaceKHR-pSurface-parameter", "pSurface must be a valid pointer to a VkSurfaceKHR handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateWaylandSurfaceKHR-pSurface-parameter)"},
+    {"VUID-vkCreateWin32SurfaceKHR-instance-parameter", "instance must be a valid VkInstance handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateWin32SurfaceKHR-instance-parameter)"},
+    {"VUID-vkCreateWin32SurfaceKHR-pAllocator-parameter", "If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateWin32SurfaceKHR-pAllocator-parameter)"},
+    {"VUID-vkCreateWin32SurfaceKHR-pCreateInfo-parameter", "pCreateInfo must be a valid pointer to a valid VkWin32SurfaceCreateInfoKHR structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateWin32SurfaceKHR-pCreateInfo-parameter)"},
+    {"VUID-vkCreateWin32SurfaceKHR-pSurface-parameter", "pSurface must be a valid pointer to a VkSurfaceKHR handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateWin32SurfaceKHR-pSurface-parameter)"},
+    {"VUID-vkCreateXcbSurfaceKHR-instance-parameter", "instance must be a valid VkInstance handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateXcbSurfaceKHR-instance-parameter)"},
+    {"VUID-vkCreateXcbSurfaceKHR-pAllocator-parameter", "If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateXcbSurfaceKHR-pAllocator-parameter)"},
+    {"VUID-vkCreateXcbSurfaceKHR-pCreateInfo-parameter", "pCreateInfo must be a valid pointer to a valid VkXcbSurfaceCreateInfoKHR structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateXcbSurfaceKHR-pCreateInfo-parameter)"},
+    {"VUID-vkCreateXcbSurfaceKHR-pSurface-parameter", "pSurface must be a valid pointer to a VkSurfaceKHR handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateXcbSurfaceKHR-pSurface-parameter)"},
+    {"VUID-vkCreateXlibSurfaceKHR-instance-parameter", "instance must be a valid VkInstance handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateXlibSurfaceKHR-instance-parameter)"},
+    {"VUID-vkCreateXlibSurfaceKHR-pAllocator-parameter", "If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateXlibSurfaceKHR-pAllocator-parameter)"},
+    {"VUID-vkCreateXlibSurfaceKHR-pCreateInfo-parameter", "pCreateInfo must be a valid pointer to a valid VkXlibSurfaceCreateInfoKHR structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateXlibSurfaceKHR-pCreateInfo-parameter)"},
+    {"VUID-vkCreateXlibSurfaceKHR-pSurface-parameter", "pSurface must be a valid pointer to a VkSurfaceKHR handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkCreateXlibSurfaceKHR-pSurface-parameter)"},
+    {"VUID-vkDebugMarkerSetObjectNameEXT-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDebugMarkerSetObjectNameEXT-device-parameter)"},
+    {"VUID-vkDebugMarkerSetObjectNameEXT-pNameInfo-parameter", "pNameInfo must be a valid pointer to a valid VkDebugMarkerObjectNameInfoEXT structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDebugMarkerSetObjectNameEXT-pNameInfo-parameter)"},
+    {"VUID-vkDebugMarkerSetObjectTagEXT-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDebugMarkerSetObjectTagEXT-device-parameter)"},
+    {"VUID-vkDebugMarkerSetObjectTagEXT-pTagInfo-parameter", "pTagInfo must be a valid pointer to a valid VkDebugMarkerObjectTagInfoEXT structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDebugMarkerSetObjectTagEXT-pTagInfo-parameter)"},
+    {"VUID-vkDebugReportMessageEXT-flags-parameter", "flags must be a valid combination of VkDebugReportFlagBitsEXT values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDebugReportMessageEXT-flags-parameter)"},
+    {"VUID-vkDebugReportMessageEXT-flags-requiredbitmask", "flags must not be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDebugReportMessageEXT-flags-requiredbitmask)"},
+    {"VUID-vkDebugReportMessageEXT-instance-parameter", "instance must be a valid VkInstance handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDebugReportMessageEXT-instance-parameter)"},
+    {"VUID-vkDebugReportMessageEXT-object-01241", "object must be a Vulkan object or VK_NULL_HANDLE (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDebugReportMessageEXT-object-01241)"},
+    {"VUID-vkDebugReportMessageEXT-objectType-01498", "If objectType is not VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT and object is not VK_NULL_HANDLE, object must be a Vulkan object of the corresponding type associated with objectType as defined in VkDebugReportObjectTypeEXT and Vulkan Handle Relationship. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDebugReportMessageEXT-objectType-01498)"},
+    {"VUID-vkDebugReportMessageEXT-objectType-parameter", "objectType must be a valid VkDebugReportObjectTypeEXT value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDebugReportMessageEXT-objectType-parameter)"},
+    {"VUID-vkDebugReportMessageEXT-pLayerPrefix-parameter", "pLayerPrefix must be a null-terminated UTF-8 string (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDebugReportMessageEXT-pLayerPrefix-parameter)"},
+    {"VUID-vkDebugReportMessageEXT-pMessage-parameter", "pMessage must be a null-terminated UTF-8 string (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDebugReportMessageEXT-pMessage-parameter)"},
+    {"VUID-vkDestroyAccelerationStructureNV-accelerationStructure-02442", "All submitted commands that refer to accelerationStructure must have completed execution (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyAccelerationStructureNV-accelerationStructure-02442)"},
+    {"VUID-vkDestroyAccelerationStructureNV-accelerationStructure-02443", "If VkAllocationCallbacks were provided when accelerationStructure was created, a compatible set of callbacks must be provided here (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyAccelerationStructureNV-accelerationStructure-02443)"},
+    {"VUID-vkDestroyAccelerationStructureNV-accelerationStructure-02444", "If no VkAllocationCallbacks were provided when accelerationStructure was created, pAllocator must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyAccelerationStructureNV-accelerationStructure-02444)"},
+    {"VUID-vkDestroyAccelerationStructureNV-accelerationStructure-parameter", "accelerationStructure must be a valid VkAccelerationStructureNV handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyAccelerationStructureNV-accelerationStructure-parameter)"},
+    {"VUID-vkDestroyAccelerationStructureNV-accelerationStructure-parent", "accelerationStructure must have been created, allocated, or retrieved from device (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyAccelerationStructureNV-accelerationStructure-parent)"},
+    {"VUID-vkDestroyAccelerationStructureNV-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyAccelerationStructureNV-device-parameter)"},
+    {"VUID-vkDestroyAccelerationStructureNV-pAllocator-parameter", "If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyAccelerationStructureNV-pAllocator-parameter)"},
+    {"VUID-vkDestroyBuffer-buffer-00922", "All submitted commands that refer to buffer, either directly or via a VkBufferView, must have completed execution (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyBuffer-buffer-00922)"},
+    {"VUID-vkDestroyBuffer-buffer-00923", "If VkAllocationCallbacks were provided when buffer was created, a compatible set of callbacks must be provided here (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyBuffer-buffer-00923)"},
+    {"VUID-vkDestroyBuffer-buffer-00924", "If no VkAllocationCallbacks were provided when buffer was created, pAllocator must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyBuffer-buffer-00924)"},
+    {"VUID-vkDestroyBuffer-buffer-parameter", "If buffer is not VK_NULL_HANDLE, buffer must be a valid VkBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyBuffer-buffer-parameter)"},
+    {"VUID-vkDestroyBuffer-buffer-parent", "If buffer is a valid handle, it must have been created, allocated, or retrieved from device (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyBuffer-buffer-parent)"},
+    {"VUID-vkDestroyBuffer-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyBuffer-device-parameter)"},
+    {"VUID-vkDestroyBuffer-pAllocator-parameter", "If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyBuffer-pAllocator-parameter)"},
+    {"VUID-vkDestroyBufferView-bufferView-00936", "All submitted commands that refer to bufferView must have completed execution (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyBufferView-bufferView-00936)"},
+    {"VUID-vkDestroyBufferView-bufferView-00937", "If VkAllocationCallbacks were provided when bufferView was created, a compatible set of callbacks must be provided here (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyBufferView-bufferView-00937)"},
+    {"VUID-vkDestroyBufferView-bufferView-00938", "If no VkAllocationCallbacks were provided when bufferView was created, pAllocator must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyBufferView-bufferView-00938)"},
+    {"VUID-vkDestroyBufferView-bufferView-parameter", "If bufferView is not VK_NULL_HANDLE, bufferView must be a valid VkBufferView handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyBufferView-bufferView-parameter)"},
+    {"VUID-vkDestroyBufferView-bufferView-parent", "If bufferView is a valid handle, it must have been created, allocated, or retrieved from device (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyBufferView-bufferView-parent)"},
+    {"VUID-vkDestroyBufferView-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyBufferView-device-parameter)"},
+    {"VUID-vkDestroyBufferView-pAllocator-parameter", "If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyBufferView-pAllocator-parameter)"},
+    {"VUID-vkDestroyCommandPool-commandPool-00041", "All VkCommandBuffer objects allocated from commandPool must not be in the pending state. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyCommandPool-commandPool-00041)"},
+    {"VUID-vkDestroyCommandPool-commandPool-00042", "If VkAllocationCallbacks were provided when commandPool was created, a compatible set of callbacks must be provided here (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyCommandPool-commandPool-00042)"},
+    {"VUID-vkDestroyCommandPool-commandPool-00043", "If no VkAllocationCallbacks were provided when commandPool was created, pAllocator must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyCommandPool-commandPool-00043)"},
+    {"VUID-vkDestroyCommandPool-commandPool-parameter", "If commandPool is not VK_NULL_HANDLE, commandPool must be a valid VkCommandPool handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyCommandPool-commandPool-parameter)"},
+    {"VUID-vkDestroyCommandPool-commandPool-parent", "If commandPool is a valid handle, it must have been created, allocated, or retrieved from device (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyCommandPool-commandPool-parent)"},
+    {"VUID-vkDestroyCommandPool-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyCommandPool-device-parameter)"},
+    {"VUID-vkDestroyCommandPool-pAllocator-parameter", "If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyCommandPool-pAllocator-parameter)"},
+    {"VUID-vkDestroyDebugReportCallbackEXT-callback-parameter", "callback must be a valid VkDebugReportCallbackEXT handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyDebugReportCallbackEXT-callback-parameter)"},
+    {"VUID-vkDestroyDebugReportCallbackEXT-callback-parent", "callback must have been created, allocated, or retrieved from instance (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyDebugReportCallbackEXT-callback-parent)"},
+    {"VUID-vkDestroyDebugReportCallbackEXT-instance-01242", "If VkAllocationCallbacks were provided when callback was created, a compatible set of callbacks must be provided here (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyDebugReportCallbackEXT-instance-01242)"},
+    {"VUID-vkDestroyDebugReportCallbackEXT-instance-01243", "If no VkAllocationCallbacks were provided when callback was created, pAllocator must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyDebugReportCallbackEXT-instance-01243)"},
+    {"VUID-vkDestroyDebugReportCallbackEXT-instance-parameter", "instance must be a valid VkInstance handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyDebugReportCallbackEXT-instance-parameter)"},
+    {"VUID-vkDestroyDebugReportCallbackEXT-pAllocator-parameter", "If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyDebugReportCallbackEXT-pAllocator-parameter)"},
+    {"VUID-vkDestroyDebugUtilsMessengerEXT-instance-parameter", "instance must be a valid VkInstance handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyDebugUtilsMessengerEXT-instance-parameter)"},
+    {"VUID-vkDestroyDebugUtilsMessengerEXT-messenger-01915", "If VkAllocationCallbacks were provided when messenger was created, a compatible set of callbacks must be provided here (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyDebugUtilsMessengerEXT-messenger-01915)"},
+    {"VUID-vkDestroyDebugUtilsMessengerEXT-messenger-01916", "If no VkAllocationCallbacks were provided when messenger was created, pAllocator must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyDebugUtilsMessengerEXT-messenger-01916)"},
+    {"VUID-vkDestroyDebugUtilsMessengerEXT-messenger-parameter", "messenger must be a valid VkDebugUtilsMessengerEXT handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyDebugUtilsMessengerEXT-messenger-parameter)"},
+    {"VUID-vkDestroyDebugUtilsMessengerEXT-messenger-parent", "messenger must have been created, allocated, or retrieved from instance (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyDebugUtilsMessengerEXT-messenger-parent)"},
+    {"VUID-vkDestroyDebugUtilsMessengerEXT-pAllocator-parameter", "If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyDebugUtilsMessengerEXT-pAllocator-parameter)"},
+    {"VUID-vkDestroyDescriptorPool-descriptorPool-00303", "All submitted commands that refer to descriptorPool (via any allocated descriptor sets) must have completed execution (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyDescriptorPool-descriptorPool-00303)"},
+    {"VUID-vkDestroyDescriptorPool-descriptorPool-00304", "If VkAllocationCallbacks were provided when descriptorPool was created, a compatible set of callbacks must be provided here (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyDescriptorPool-descriptorPool-00304)"},
+    {"VUID-vkDestroyDescriptorPool-descriptorPool-00305", "If no VkAllocationCallbacks were provided when descriptorPool was created, pAllocator must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyDescriptorPool-descriptorPool-00305)"},
+    {"VUID-vkDestroyDescriptorPool-descriptorPool-parameter", "If descriptorPool is not VK_NULL_HANDLE, descriptorPool must be a valid VkDescriptorPool handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyDescriptorPool-descriptorPool-parameter)"},
+    {"VUID-vkDestroyDescriptorPool-descriptorPool-parent", "If descriptorPool is a valid handle, it must have been created, allocated, or retrieved from device (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyDescriptorPool-descriptorPool-parent)"},
+    {"VUID-vkDestroyDescriptorPool-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyDescriptorPool-device-parameter)"},
+    {"VUID-vkDestroyDescriptorPool-pAllocator-parameter", "If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyDescriptorPool-pAllocator-parameter)"},
+    {"VUID-vkDestroyDescriptorSetLayout-descriptorSetLayout-00284", "If VkAllocationCallbacks were provided when descriptorSetLayout was created, a compatible set of callbacks must be provided here (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyDescriptorSetLayout-descriptorSetLayout-00284)"},
+    {"VUID-vkDestroyDescriptorSetLayout-descriptorSetLayout-00285", "If no VkAllocationCallbacks were provided when descriptorSetLayout was created, pAllocator must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyDescriptorSetLayout-descriptorSetLayout-00285)"},
+    {"VUID-vkDestroyDescriptorSetLayout-descriptorSetLayout-parameter", "If descriptorSetLayout is not VK_NULL_HANDLE, descriptorSetLayout must be a valid VkDescriptorSetLayout handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyDescriptorSetLayout-descriptorSetLayout-parameter)"},
+    {"VUID-vkDestroyDescriptorSetLayout-descriptorSetLayout-parent", "If descriptorSetLayout is a valid handle, it must have been created, allocated, or retrieved from device (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyDescriptorSetLayout-descriptorSetLayout-parent)"},
+    {"VUID-vkDestroyDescriptorSetLayout-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyDescriptorSetLayout-device-parameter)"},
+    {"VUID-vkDestroyDescriptorSetLayout-pAllocator-parameter", "If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyDescriptorSetLayout-pAllocator-parameter)"},
+    {"VUID-vkDestroyDescriptorUpdateTemplate-descriptorSetLayout-00356", "If VkAllocationCallbacks were provided when descriptorSetLayout was created, a compatible set of callbacks must be provided here (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyDescriptorUpdateTemplate-descriptorSetLayout-00356)"},
+    {"VUID-vkDestroyDescriptorUpdateTemplate-descriptorSetLayout-00357", "If no VkAllocationCallbacks were provided when descriptorSetLayout was created, pAllocator must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyDescriptorUpdateTemplate-descriptorSetLayout-00357)"},
+    {"VUID-vkDestroyDescriptorUpdateTemplate-descriptorUpdateTemplate-parameter", "If descriptorUpdateTemplate is not VK_NULL_HANDLE, descriptorUpdateTemplate must be a valid VkDescriptorUpdateTemplate handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyDescriptorUpdateTemplate-descriptorUpdateTemplate-parameter)"},
+    {"VUID-vkDestroyDescriptorUpdateTemplate-descriptorUpdateTemplate-parent", "If descriptorUpdateTemplate is a valid handle, it must have been created, allocated, or retrieved from device (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyDescriptorUpdateTemplate-descriptorUpdateTemplate-parent)"},
+    {"VUID-vkDestroyDescriptorUpdateTemplate-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyDescriptorUpdateTemplate-device-parameter)"},
+    {"VUID-vkDestroyDescriptorUpdateTemplate-pAllocator-parameter", "If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyDescriptorUpdateTemplate-pAllocator-parameter)"},
+    {"VUID-vkDestroyDevice-device-00378", "All child objects created on device must have been destroyed prior to destroying device (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyDevice-device-00378)"},
+    {"VUID-vkDestroyDevice-device-00379", "If VkAllocationCallbacks were provided when device was created, a compatible set of callbacks must be provided here (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyDevice-device-00379)"},
+    {"VUID-vkDestroyDevice-device-00380", "If no VkAllocationCallbacks were provided when device was created, pAllocator must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyDevice-device-00380)"},
+    {"VUID-vkDestroyDevice-device-parameter", "If device is not NULL, device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyDevice-device-parameter)"},
+    {"VUID-vkDestroyDevice-pAllocator-parameter", "If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyDevice-pAllocator-parameter)"},
+    {"VUID-vkDestroyEvent-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyEvent-device-parameter)"},
+    {"VUID-vkDestroyEvent-event-01145", "All submitted commands that refer to event must have completed execution (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyEvent-event-01145)"},
+    {"VUID-vkDestroyEvent-event-01146", "If VkAllocationCallbacks were provided when event was created, a compatible set of callbacks must be provided here (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyEvent-event-01146)"},
+    {"VUID-vkDestroyEvent-event-01147", "If no VkAllocationCallbacks were provided when event was created, pAllocator must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyEvent-event-01147)"},
+    {"VUID-vkDestroyEvent-event-parameter", "If event is not VK_NULL_HANDLE, event must be a valid VkEvent handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyEvent-event-parameter)"},
+    {"VUID-vkDestroyEvent-event-parent", "If event is a valid handle, it must have been created, allocated, or retrieved from device (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyEvent-event-parent)"},
+    {"VUID-vkDestroyEvent-pAllocator-parameter", "If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyEvent-pAllocator-parameter)"},
+    {"VUID-vkDestroyFence-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyFence-device-parameter)"},
+    {"VUID-vkDestroyFence-fence-01120", "All queue submission commands that refer to fence must have completed execution (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyFence-fence-01120)"},
+    {"VUID-vkDestroyFence-fence-01121", "If VkAllocationCallbacks were provided when fence was created, a compatible set of callbacks must be provided here (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyFence-fence-01121)"},
+    {"VUID-vkDestroyFence-fence-01122", "If no VkAllocationCallbacks were provided when fence was created, pAllocator must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyFence-fence-01122)"},
+    {"VUID-vkDestroyFence-fence-parameter", "If fence is not VK_NULL_HANDLE, fence must be a valid VkFence handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyFence-fence-parameter)"},
+    {"VUID-vkDestroyFence-fence-parent", "If fence is a valid handle, it must have been created, allocated, or retrieved from device (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyFence-fence-parent)"},
+    {"VUID-vkDestroyFence-pAllocator-parameter", "If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyFence-pAllocator-parameter)"},
+    {"VUID-vkDestroyFramebuffer-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyFramebuffer-device-parameter)"},
+    {"VUID-vkDestroyFramebuffer-framebuffer-00892", "All submitted commands that refer to framebuffer must have completed execution (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyFramebuffer-framebuffer-00892)"},
+    {"VUID-vkDestroyFramebuffer-framebuffer-00893", "If VkAllocationCallbacks were provided when framebuffer was created, a compatible set of callbacks must be provided here (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyFramebuffer-framebuffer-00893)"},
+    {"VUID-vkDestroyFramebuffer-framebuffer-00894", "If no VkAllocationCallbacks were provided when framebuffer was created, pAllocator must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyFramebuffer-framebuffer-00894)"},
+    {"VUID-vkDestroyFramebuffer-framebuffer-parameter", "If framebuffer is not VK_NULL_HANDLE, framebuffer must be a valid VkFramebuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyFramebuffer-framebuffer-parameter)"},
+    {"VUID-vkDestroyFramebuffer-framebuffer-parent", "If framebuffer is a valid handle, it must have been created, allocated, or retrieved from device (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyFramebuffer-framebuffer-parent)"},
+    {"VUID-vkDestroyFramebuffer-pAllocator-parameter", "If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyFramebuffer-pAllocator-parameter)"},
+    {"VUID-vkDestroyImage-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyImage-device-parameter)"},
+    {"VUID-vkDestroyImage-image-01000", "All submitted commands that refer to image, either directly or via a VkImageView, must have completed execution (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyImage-image-01000)"},
+    {"VUID-vkDestroyImage-image-01001", "If VkAllocationCallbacks were provided when image was created, a compatible set of callbacks must be provided here (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyImage-image-01001)"},
+    {"VUID-vkDestroyImage-image-01002", "If no VkAllocationCallbacks were provided when image was created, pAllocator must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyImage-image-01002)"},
+    {"VUID-vkDestroyImage-image-parameter", "If image is not VK_NULL_HANDLE, image must be a valid VkImage handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyImage-image-parameter)"},
+    {"VUID-vkDestroyImage-image-parent", "If image is a valid handle, it must have been created, allocated, or retrieved from device (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyImage-image-parent)"},
+    {"VUID-vkDestroyImage-pAllocator-parameter", "If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyImage-pAllocator-parameter)"},
+    {"VUID-vkDestroyImageView-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyImageView-device-parameter)"},
+    {"VUID-vkDestroyImageView-imageView-01026", "All submitted commands that refer to imageView must have completed execution (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyImageView-imageView-01026)"},
+    {"VUID-vkDestroyImageView-imageView-01027", "If VkAllocationCallbacks were provided when imageView was created, a compatible set of callbacks must be provided here (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyImageView-imageView-01027)"},
+    {"VUID-vkDestroyImageView-imageView-01028", "If no VkAllocationCallbacks were provided when imageView was created, pAllocator must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyImageView-imageView-01028)"},
+    {"VUID-vkDestroyImageView-imageView-parameter", "If imageView is not VK_NULL_HANDLE, imageView must be a valid VkImageView handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyImageView-imageView-parameter)"},
+    {"VUID-vkDestroyImageView-imageView-parent", "If imageView is a valid handle, it must have been created, allocated, or retrieved from device (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyImageView-imageView-parent)"},
+    {"VUID-vkDestroyImageView-pAllocator-parameter", "If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyImageView-pAllocator-parameter)"},
+    {"VUID-vkDestroyIndirectCommandsLayoutNVX-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyIndirectCommandsLayoutNVX-device-parameter)"},
+    {"VUID-vkDestroyIndirectCommandsLayoutNVX-indirectCommandsLayout-01352", "All submitted commands that refer to indirectCommandsLayout must have completed execution (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyIndirectCommandsLayoutNVX-indirectCommandsLayout-01352)"},
+    {"VUID-vkDestroyIndirectCommandsLayoutNVX-indirectCommandsLayout-parameter", "indirectCommandsLayout must be a valid VkIndirectCommandsLayoutNVX handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyIndirectCommandsLayoutNVX-indirectCommandsLayout-parameter)"},
+    {"VUID-vkDestroyIndirectCommandsLayoutNVX-indirectCommandsLayout-parent", "indirectCommandsLayout must have been created, allocated, or retrieved from device (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyIndirectCommandsLayoutNVX-indirectCommandsLayout-parent)"},
+    {"VUID-vkDestroyIndirectCommandsLayoutNVX-objectTable-01353", "If VkAllocationCallbacks were provided when objectTable was created, a compatible set of callbacks must be provided here (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyIndirectCommandsLayoutNVX-objectTable-01353)"},
+    {"VUID-vkDestroyIndirectCommandsLayoutNVX-objectTable-01354", "If no VkAllocationCallbacks were provided when objectTable was created, pAllocator must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyIndirectCommandsLayoutNVX-objectTable-01354)"},
+    {"VUID-vkDestroyIndirectCommandsLayoutNVX-pAllocator-parameter", "If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyIndirectCommandsLayoutNVX-pAllocator-parameter)"},
+    {"VUID-vkDestroyInstance-instance-00629", "All child objects created using instance must have been destroyed prior to destroying instance (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyInstance-instance-00629)"},
+    {"VUID-vkDestroyInstance-instance-00630", "If VkAllocationCallbacks were provided when instance was created, a compatible set of callbacks must be provided here (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyInstance-instance-00630)"},
+    {"VUID-vkDestroyInstance-instance-00631", "If no VkAllocationCallbacks were provided when instance was created, pAllocator must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyInstance-instance-00631)"},
+    {"VUID-vkDestroyInstance-instance-parameter", "If instance is not NULL, instance must be a valid VkInstance handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyInstance-instance-parameter)"},
+    {"VUID-vkDestroyInstance-pAllocator-parameter", "If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyInstance-pAllocator-parameter)"},
+    {"VUID-vkDestroyObjectTableNVX-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyObjectTableNVX-device-parameter)"},
+    {"VUID-vkDestroyObjectTableNVX-objectTable-01361", "All submitted commands that refer to objectTable must have completed execution. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyObjectTableNVX-objectTable-01361)"},
+    {"VUID-vkDestroyObjectTableNVX-objectTable-01362", "If VkAllocationCallbacks were provided when objectTable was created, a compatible set of callbacks must be provided here. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyObjectTableNVX-objectTable-01362)"},
+    {"VUID-vkDestroyObjectTableNVX-objectTable-01363", "If no VkAllocationCallbacks were provided when objectTable was created, pAllocator must be NULL. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyObjectTableNVX-objectTable-01363)"},
+    {"VUID-vkDestroyObjectTableNVX-objectTable-parameter", "objectTable must be a valid VkObjectTableNVX handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyObjectTableNVX-objectTable-parameter)"},
+    {"VUID-vkDestroyObjectTableNVX-objectTable-parent", "objectTable must have been created, allocated, or retrieved from device (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyObjectTableNVX-objectTable-parent)"},
+    {"VUID-vkDestroyObjectTableNVX-pAllocator-parameter", "If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyObjectTableNVX-pAllocator-parameter)"},
+    {"VUID-vkDestroyPipeline-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyPipeline-device-parameter)"},
+    {"VUID-vkDestroyPipeline-pAllocator-parameter", "If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyPipeline-pAllocator-parameter)"},
+    {"VUID-vkDestroyPipeline-pipeline-00765", "All submitted commands that refer to pipeline must have completed execution (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyPipeline-pipeline-00765)"},
+    {"VUID-vkDestroyPipeline-pipeline-00766", "If VkAllocationCallbacks were provided when pipeline was created, a compatible set of callbacks must be provided here (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyPipeline-pipeline-00766)"},
+    {"VUID-vkDestroyPipeline-pipeline-00767", "If no VkAllocationCallbacks were provided when pipeline was created, pAllocator must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyPipeline-pipeline-00767)"},
+    {"VUID-vkDestroyPipeline-pipeline-parameter", "If pipeline is not VK_NULL_HANDLE, pipeline must be a valid VkPipeline handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyPipeline-pipeline-parameter)"},
+    {"VUID-vkDestroyPipeline-pipeline-parent", "If pipeline is a valid handle, it must have been created, allocated, or retrieved from device (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyPipeline-pipeline-parent)"},
+    {"VUID-vkDestroyPipelineCache-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyPipelineCache-device-parameter)"},
+    {"VUID-vkDestroyPipelineCache-pAllocator-parameter", "If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyPipelineCache-pAllocator-parameter)"},
+    {"VUID-vkDestroyPipelineCache-pipelineCache-00771", "If VkAllocationCallbacks were provided when pipelineCache was created, a compatible set of callbacks must be provided here (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyPipelineCache-pipelineCache-00771)"},
+    {"VUID-vkDestroyPipelineCache-pipelineCache-00772", "If no VkAllocationCallbacks were provided when pipelineCache was created, pAllocator must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyPipelineCache-pipelineCache-00772)"},
+    {"VUID-vkDestroyPipelineCache-pipelineCache-parameter", "If pipelineCache is not VK_NULL_HANDLE, pipelineCache must be a valid VkPipelineCache handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyPipelineCache-pipelineCache-parameter)"},
+    {"VUID-vkDestroyPipelineCache-pipelineCache-parent", "If pipelineCache is a valid handle, it must have been created, allocated, or retrieved from device (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyPipelineCache-pipelineCache-parent)"},
+    {"VUID-vkDestroyPipelineLayout-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyPipelineLayout-device-parameter)"},
+    {"VUID-vkDestroyPipelineLayout-pAllocator-parameter", "If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyPipelineLayout-pAllocator-parameter)"},
+    {"VUID-vkDestroyPipelineLayout-pipelineLayout-00299", "If VkAllocationCallbacks were provided when pipelineLayout was created, a compatible set of callbacks must be provided here (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyPipelineLayout-pipelineLayout-00299)"},
+    {"VUID-vkDestroyPipelineLayout-pipelineLayout-00300", "If no VkAllocationCallbacks were provided when pipelineLayout was created, pAllocator must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyPipelineLayout-pipelineLayout-00300)"},
+    {"VUID-vkDestroyPipelineLayout-pipelineLayout-02004", "pipelineLayout must not have been passed to any vkCmd* command for any command buffers that are still in the recording state when vkDestroyPipelineLayout is called (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyPipelineLayout-pipelineLayout-02004)"},
+    {"VUID-vkDestroyPipelineLayout-pipelineLayout-parameter", "If pipelineLayout is not VK_NULL_HANDLE, pipelineLayout must be a valid VkPipelineLayout handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyPipelineLayout-pipelineLayout-parameter)"},
+    {"VUID-vkDestroyPipelineLayout-pipelineLayout-parent", "If pipelineLayout is a valid handle, it must have been created, allocated, or retrieved from device (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyPipelineLayout-pipelineLayout-parent)"},
+    {"VUID-vkDestroyQueryPool-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyQueryPool-device-parameter)"},
+    {"VUID-vkDestroyQueryPool-pAllocator-parameter", "If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyQueryPool-pAllocator-parameter)"},
+    {"VUID-vkDestroyQueryPool-queryPool-00793", "All submitted commands that refer to queryPool must have completed execution (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyQueryPool-queryPool-00793)"},
+    {"VUID-vkDestroyQueryPool-queryPool-00794", "If VkAllocationCallbacks were provided when queryPool was created, a compatible set of callbacks must be provided here (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyQueryPool-queryPool-00794)"},
+    {"VUID-vkDestroyQueryPool-queryPool-00795", "If no VkAllocationCallbacks were provided when queryPool was created, pAllocator must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyQueryPool-queryPool-00795)"},
+    {"VUID-vkDestroyQueryPool-queryPool-parameter", "If queryPool is not VK_NULL_HANDLE, queryPool must be a valid VkQueryPool handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyQueryPool-queryPool-parameter)"},
+    {"VUID-vkDestroyQueryPool-queryPool-parent", "If queryPool is a valid handle, it must have been created, allocated, or retrieved from device (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyQueryPool-queryPool-parent)"},
+    {"VUID-vkDestroyRenderPass-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyRenderPass-device-parameter)"},
+    {"VUID-vkDestroyRenderPass-pAllocator-parameter", "If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyRenderPass-pAllocator-parameter)"},
+    {"VUID-vkDestroyRenderPass-renderPass-00873", "All submitted commands that refer to renderPass must have completed execution (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyRenderPass-renderPass-00873)"},
+    {"VUID-vkDestroyRenderPass-renderPass-00874", "If VkAllocationCallbacks were provided when renderPass was created, a compatible set of callbacks must be provided here (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyRenderPass-renderPass-00874)"},
+    {"VUID-vkDestroyRenderPass-renderPass-00875", "If no VkAllocationCallbacks were provided when renderPass was created, pAllocator must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyRenderPass-renderPass-00875)"},
+    {"VUID-vkDestroyRenderPass-renderPass-parameter", "If renderPass is not VK_NULL_HANDLE, renderPass must be a valid VkRenderPass handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyRenderPass-renderPass-parameter)"},
+    {"VUID-vkDestroyRenderPass-renderPass-parent", "If renderPass is a valid handle, it must have been created, allocated, or retrieved from device (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyRenderPass-renderPass-parent)"},
+    {"VUID-vkDestroySampler-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroySampler-device-parameter)"},
+    {"VUID-vkDestroySampler-pAllocator-parameter", "If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroySampler-pAllocator-parameter)"},
+    {"VUID-vkDestroySampler-sampler-01082", "All submitted commands that refer to sampler must have completed execution (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroySampler-sampler-01082)"},
+    {"VUID-vkDestroySampler-sampler-01083", "If VkAllocationCallbacks were provided when sampler was created, a compatible set of callbacks must be provided here (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroySampler-sampler-01083)"},
+    {"VUID-vkDestroySampler-sampler-01084", "If no VkAllocationCallbacks were provided when sampler was created, pAllocator must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroySampler-sampler-01084)"},
+    {"VUID-vkDestroySampler-sampler-parameter", "If sampler is not VK_NULL_HANDLE, sampler must be a valid VkSampler handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroySampler-sampler-parameter)"},
+    {"VUID-vkDestroySampler-sampler-parent", "If sampler is a valid handle, it must have been created, allocated, or retrieved from device (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroySampler-sampler-parent)"},
+    {"VUID-vkDestroySamplerYcbcrConversion-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroySamplerYcbcrConversion-device-parameter)"},
+    {"VUID-vkDestroySamplerYcbcrConversion-pAllocator-parameter", "If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroySamplerYcbcrConversion-pAllocator-parameter)"},
+    {"VUID-vkDestroySamplerYcbcrConversion-ycbcrConversion-parameter", "If ycbcrConversion is not VK_NULL_HANDLE, ycbcrConversion must be a valid VkSamplerYcbcrConversion handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroySamplerYcbcrConversion-ycbcrConversion-parameter)"},
+    {"VUID-vkDestroySamplerYcbcrConversion-ycbcrConversion-parent", "If ycbcrConversion is a valid handle, it must have been created, allocated, or retrieved from device (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroySamplerYcbcrConversion-ycbcrConversion-parent)"},
+    {"VUID-vkDestroySemaphore-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroySemaphore-device-parameter)"},
+    {"VUID-vkDestroySemaphore-pAllocator-parameter", "If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroySemaphore-pAllocator-parameter)"},
+    {"VUID-vkDestroySemaphore-semaphore-01137", "All submitted batches that refer to semaphore must have completed execution (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroySemaphore-semaphore-01137)"},
+    {"VUID-vkDestroySemaphore-semaphore-01138", "If VkAllocationCallbacks were provided when semaphore was created, a compatible set of callbacks must be provided here (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroySemaphore-semaphore-01138)"},
+    {"VUID-vkDestroySemaphore-semaphore-01139", "If no VkAllocationCallbacks were provided when semaphore was created, pAllocator must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroySemaphore-semaphore-01139)"},
+    {"VUID-vkDestroySemaphore-semaphore-parameter", "If semaphore is not VK_NULL_HANDLE, semaphore must be a valid VkSemaphore handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroySemaphore-semaphore-parameter)"},
+    {"VUID-vkDestroySemaphore-semaphore-parent", "If semaphore is a valid handle, it must have been created, allocated, or retrieved from device (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroySemaphore-semaphore-parent)"},
+    {"VUID-vkDestroyShaderModule-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyShaderModule-device-parameter)"},
+    {"VUID-vkDestroyShaderModule-pAllocator-parameter", "If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyShaderModule-pAllocator-parameter)"},
+    {"VUID-vkDestroyShaderModule-shaderModule-01092", "If VkAllocationCallbacks were provided when shaderModule was created, a compatible set of callbacks must be provided here (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyShaderModule-shaderModule-01092)"},
+    {"VUID-vkDestroyShaderModule-shaderModule-01093", "If no VkAllocationCallbacks were provided when shaderModule was created, pAllocator must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyShaderModule-shaderModule-01093)"},
+    {"VUID-vkDestroyShaderModule-shaderModule-parameter", "If shaderModule is not VK_NULL_HANDLE, shaderModule must be a valid VkShaderModule handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyShaderModule-shaderModule-parameter)"},
+    {"VUID-vkDestroyShaderModule-shaderModule-parent", "If shaderModule is a valid handle, it must have been created, allocated, or retrieved from device (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyShaderModule-shaderModule-parent)"},
+    {"VUID-vkDestroySurfaceKHR-instance-parameter", "instance must be a valid VkInstance handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroySurfaceKHR-instance-parameter)"},
+    {"VUID-vkDestroySurfaceKHR-pAllocator-parameter", "If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroySurfaceKHR-pAllocator-parameter)"},
+    {"VUID-vkDestroySurfaceKHR-surface-01266", "All VkSwapchainKHR objects created for surface must have been destroyed prior to destroying surface (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroySurfaceKHR-surface-01266)"},
+    {"VUID-vkDestroySurfaceKHR-surface-01267", "If VkAllocationCallbacks were provided when surface was created, a compatible set of callbacks must be provided here (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroySurfaceKHR-surface-01267)"},
+    {"VUID-vkDestroySurfaceKHR-surface-01268", "If no VkAllocationCallbacks were provided when surface was created, pAllocator must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroySurfaceKHR-surface-01268)"},
+    {"VUID-vkDestroySurfaceKHR-surface-parameter", "If surface is not VK_NULL_HANDLE, surface must be a valid VkSurfaceKHR handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroySurfaceKHR-surface-parameter)"},
+    {"VUID-vkDestroySurfaceKHR-surface-parent", "If surface is a valid handle, it must have been created, allocated, or retrieved from instance (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroySurfaceKHR-surface-parent)"},
+    {"VUID-vkDestroySwapchainKHR-commonparent", "Both of device, and swapchain that are valid handles of non-ignored parameters must have been created, allocated, or retrieved from the same VkInstance (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroySwapchainKHR-commonparent)"},
+    {"VUID-vkDestroySwapchainKHR-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroySwapchainKHR-device-parameter)"},
+    {"VUID-vkDestroySwapchainKHR-pAllocator-parameter", "If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroySwapchainKHR-pAllocator-parameter)"},
+    {"VUID-vkDestroySwapchainKHR-swapchain-01282", "All uses of presentable images acquired from swapchain must have completed execution (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroySwapchainKHR-swapchain-01282)"},
+    {"VUID-vkDestroySwapchainKHR-swapchain-01283", "If VkAllocationCallbacks were provided when swapchain was created, a compatible set of callbacks must be provided here (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroySwapchainKHR-swapchain-01283)"},
+    {"VUID-vkDestroySwapchainKHR-swapchain-01284", "If no VkAllocationCallbacks were provided when swapchain was created, pAllocator must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroySwapchainKHR-swapchain-01284)"},
+    {"VUID-vkDestroySwapchainKHR-swapchain-parameter", "If swapchain is not VK_NULL_HANDLE, swapchain must be a valid VkSwapchainKHR handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroySwapchainKHR-swapchain-parameter)"},
+    {"VUID-vkDestroyValidationCacheEXT-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyValidationCacheEXT-device-parameter)"},
+    {"VUID-vkDestroyValidationCacheEXT-pAllocator-parameter", "If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyValidationCacheEXT-pAllocator-parameter)"},
+    {"VUID-vkDestroyValidationCacheEXT-validationCache-01537", "If VkAllocationCallbacks were provided when validationCache was created, a compatible set of callbacks must be provided here (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyValidationCacheEXT-validationCache-01537)"},
+    {"VUID-vkDestroyValidationCacheEXT-validationCache-01538", "If no VkAllocationCallbacks were provided when validationCache was created, pAllocator must be NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyValidationCacheEXT-validationCache-01538)"},
+    {"VUID-vkDestroyValidationCacheEXT-validationCache-parameter", "If validationCache is not VK_NULL_HANDLE, validationCache must be a valid VkValidationCacheEXT handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyValidationCacheEXT-validationCache-parameter)"},
+    {"VUID-vkDestroyValidationCacheEXT-validationCache-parent", "If validationCache is a valid handle, it must have been created, allocated, or retrieved from device (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDestroyValidationCacheEXT-validationCache-parent)"},
+    {"VUID-vkDeviceWaitIdle-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDeviceWaitIdle-device-parameter)"},
+    {"VUID-vkDisplayPowerControlEXT-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDisplayPowerControlEXT-device-parameter)"},
+    {"VUID-vkDisplayPowerControlEXT-display-parameter", "display must be a valid VkDisplayKHR handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDisplayPowerControlEXT-display-parameter)"},
+    {"VUID-vkDisplayPowerControlEXT-pDisplayPowerInfo-parameter", "pDisplayPowerInfo must be a valid pointer to a valid VkDisplayPowerInfoEXT structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkDisplayPowerControlEXT-pDisplayPowerInfo-parameter)"},
+    {"VUID-vkEndCommandBuffer-None-01978", "Conditional rendering must not be active (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkEndCommandBuffer-None-01978)"},
+    {"VUID-vkEndCommandBuffer-commandBuffer-00059", "commandBuffer must be in the recording state. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkEndCommandBuffer-commandBuffer-00059)"},
+    {"VUID-vkEndCommandBuffer-commandBuffer-00060", "If commandBuffer is a primary command buffer, there must not be an active render pass instance (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkEndCommandBuffer-commandBuffer-00060)"},
+    {"VUID-vkEndCommandBuffer-commandBuffer-00061", "All queries made active during the recording of commandBuffer must have been made inactive (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkEndCommandBuffer-commandBuffer-00061)"},
+    {"VUID-vkEndCommandBuffer-commandBuffer-00062", "If commandBuffer is a secondary command buffer, there must not be an outstanding vkCmdDebugMarkerBeginEXT command recorded to commandBuffer that has not previously been ended by a call to vkCmdDebugMarkerEndEXT. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkEndCommandBuffer-commandBuffer-00062)"},
+    {"VUID-vkEndCommandBuffer-commandBuffer-01815", "If commandBuffer is a secondary command buffer, there must not be an outstanding vkCmdBeginDebugUtilsLabelEXT command recorded to commandBuffer that has not previously been ended by a call to vkCmdEndDebugUtilsLabelEXT. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkEndCommandBuffer-commandBuffer-01815)"},
+    {"VUID-vkEndCommandBuffer-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkEndCommandBuffer-commandBuffer-parameter)"},
+    {"VUID-vkEnumerateDeviceExtensionProperties-pLayerName-parameter", "If pLayerName is not NULL, pLayerName must be a null-terminated UTF-8 string (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkEnumerateDeviceExtensionProperties-pLayerName-parameter)"},
+    {"VUID-vkEnumerateDeviceExtensionProperties-pProperties-parameter", "If the value referenced by pPropertyCount is not 0, and pProperties is not NULL, pProperties must be a valid pointer to an array of pPropertyCount VkExtensionProperties structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkEnumerateDeviceExtensionProperties-pProperties-parameter)"},
+    {"VUID-vkEnumerateDeviceExtensionProperties-pPropertyCount-parameter", "pPropertyCount must be a valid pointer to a uint32_t value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkEnumerateDeviceExtensionProperties-pPropertyCount-parameter)"},
+    {"VUID-vkEnumerateDeviceExtensionProperties-physicalDevice-parameter", "physicalDevice must be a valid VkPhysicalDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkEnumerateDeviceExtensionProperties-physicalDevice-parameter)"},
+    {"VUID-vkEnumerateDeviceLayerProperties-pProperties-parameter", "If the value referenced by pPropertyCount is not 0, and pProperties is not NULL, pProperties must be a valid pointer to an array of pPropertyCount VkLayerProperties structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkEnumerateDeviceLayerProperties-pProperties-parameter)"},
+    {"VUID-vkEnumerateDeviceLayerProperties-pPropertyCount-parameter", "pPropertyCount must be a valid pointer to a uint32_t value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkEnumerateDeviceLayerProperties-pPropertyCount-parameter)"},
+    {"VUID-vkEnumerateDeviceLayerProperties-physicalDevice-parameter", "physicalDevice must be a valid VkPhysicalDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkEnumerateDeviceLayerProperties-physicalDevice-parameter)"},
+    {"VUID-vkEnumerateInstanceExtensionProperties-pLayerName-parameter", "If pLayerName is not NULL, pLayerName must be a null-terminated UTF-8 string (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkEnumerateInstanceExtensionProperties-pLayerName-parameter)"},
+    {"VUID-vkEnumerateInstanceExtensionProperties-pProperties-parameter", "If the value referenced by pPropertyCount is not 0, and pProperties is not NULL, pProperties must be a valid pointer to an array of pPropertyCount VkExtensionProperties structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkEnumerateInstanceExtensionProperties-pProperties-parameter)"},
+    {"VUID-vkEnumerateInstanceExtensionProperties-pPropertyCount-parameter", "pPropertyCount must be a valid pointer to a uint32_t value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkEnumerateInstanceExtensionProperties-pPropertyCount-parameter)"},
+    {"VUID-vkEnumerateInstanceLayerProperties-pProperties-parameter", "If the value referenced by pPropertyCount is not 0, and pProperties is not NULL, pProperties must be a valid pointer to an array of pPropertyCount VkLayerProperties structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkEnumerateInstanceLayerProperties-pProperties-parameter)"},
+    {"VUID-vkEnumerateInstanceLayerProperties-pPropertyCount-parameter", "pPropertyCount must be a valid pointer to a uint32_t value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkEnumerateInstanceLayerProperties-pPropertyCount-parameter)"},
+    {"VUID-vkEnumerateInstanceVersion-pApiVersion-parameter", "pApiVersion must be a valid pointer to a uint32_t value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkEnumerateInstanceVersion-pApiVersion-parameter)"},
+    {"VUID-vkEnumeratePhysicalDeviceGroups-instance-parameter", "instance must be a valid VkInstance handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkEnumeratePhysicalDeviceGroups-instance-parameter)"},
+    {"VUID-vkEnumeratePhysicalDeviceGroups-pPhysicalDeviceGroupCount-parameter", "pPhysicalDeviceGroupCount must be a valid pointer to a uint32_t value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkEnumeratePhysicalDeviceGroups-pPhysicalDeviceGroupCount-parameter)"},
+    {"VUID-vkEnumeratePhysicalDeviceGroups-pPhysicalDeviceGroupProperties-parameter", "If the value referenced by pPhysicalDeviceGroupCount is not 0, and pPhysicalDeviceGroupProperties is not NULL, pPhysicalDeviceGroupProperties must be a valid pointer to an array of pPhysicalDeviceGroupCount VkPhysicalDeviceGroupProperties structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkEnumeratePhysicalDeviceGroups-pPhysicalDeviceGroupProperties-parameter)"},
+    {"VUID-vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR-pCounterCount-parameter", "pCounterCount must be a valid pointer to a uint32_t value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR-pCounterCount-parameter)"},
+    {"VUID-vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR-pCounterDescriptions-parameter", "If the value referenced by pCounterCount is not 0, and pCounterDescriptions is not NULL, pCounterDescriptions must be a valid pointer to an array of pCounterCount VkPerformanceCounterDescriptionKHR structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR-pCounterDescriptions-parameter)"},
+    {"VUID-vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR-pCounters-parameter", "If the value referenced by pCounterCount is not 0, and pCounters is not NULL, pCounters must be a valid pointer to an array of pCounterCount VkPerformanceCounterKHR structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR-pCounters-parameter)"},
+    {"VUID-vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR-physicalDevice-parameter", "physicalDevice must be a valid VkPhysicalDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR-physicalDevice-parameter)"},
+    {"VUID-vkEnumeratePhysicalDevices-instance-parameter", "instance must be a valid VkInstance handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkEnumeratePhysicalDevices-instance-parameter)"},
+    {"VUID-vkEnumeratePhysicalDevices-pPhysicalDeviceCount-parameter", "pPhysicalDeviceCount must be a valid pointer to a uint32_t value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkEnumeratePhysicalDevices-pPhysicalDeviceCount-parameter)"},
+    {"VUID-vkEnumeratePhysicalDevices-pPhysicalDevices-parameter", "If the value referenced by pPhysicalDeviceCount is not 0, and pPhysicalDevices is not NULL, pPhysicalDevices must be a valid pointer to an array of pPhysicalDeviceCount VkPhysicalDevice handles (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkEnumeratePhysicalDevices-pPhysicalDevices-parameter)"},
+    {"VUID-vkFlushMappedMemoryRanges-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkFlushMappedMemoryRanges-device-parameter)"},
+    {"VUID-vkFlushMappedMemoryRanges-memoryRangeCount-arraylength", "memoryRangeCount must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkFlushMappedMemoryRanges-memoryRangeCount-arraylength)"},
+    {"VUID-vkFlushMappedMemoryRanges-pMemoryRanges-parameter", "pMemoryRanges must be a valid pointer to an array of memoryRangeCount valid VkMappedMemoryRange structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkFlushMappedMemoryRanges-pMemoryRanges-parameter)"},
+    {"VUID-vkFreeCommandBuffers-commandBufferCount-arraylength", "commandBufferCount must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkFreeCommandBuffers-commandBufferCount-arraylength)"},
+    {"VUID-vkFreeCommandBuffers-commandPool-parameter", "commandPool must be a valid VkCommandPool handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkFreeCommandBuffers-commandPool-parameter)"},
+    {"VUID-vkFreeCommandBuffers-commandPool-parent", "commandPool must have been created, allocated, or retrieved from device (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkFreeCommandBuffers-commandPool-parent)"},
+    {"VUID-vkFreeCommandBuffers-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkFreeCommandBuffers-device-parameter)"},
+    {"VUID-vkFreeCommandBuffers-pCommandBuffers-00047", "All elements of pCommandBuffers must not be in the pending state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkFreeCommandBuffers-pCommandBuffers-00047)"},
+    {"VUID-vkFreeCommandBuffers-pCommandBuffers-00048", "pCommandBuffers must be a valid pointer to an array of commandBufferCount VkCommandBuffer handles, each element of which must either be a valid handle or NULL (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkFreeCommandBuffers-pCommandBuffers-00048)"},
+    {"VUID-vkFreeCommandBuffers-pCommandBuffers-parent", "Each element of pCommandBuffers that is a valid handle must have been created, allocated, or retrieved from commandPool (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkFreeCommandBuffers-pCommandBuffers-parent)"},
+    {"VUID-vkFreeDescriptorSets-descriptorPool-00312", "descriptorPool must have been created with the VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT flag (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkFreeDescriptorSets-descriptorPool-00312)"},
+    {"VUID-vkFreeDescriptorSets-descriptorPool-parameter", "descriptorPool must be a valid VkDescriptorPool handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkFreeDescriptorSets-descriptorPool-parameter)"},
+    {"VUID-vkFreeDescriptorSets-descriptorPool-parent", "descriptorPool must have been created, allocated, or retrieved from device (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkFreeDescriptorSets-descriptorPool-parent)"},
+    {"VUID-vkFreeDescriptorSets-descriptorSetCount-arraylength", "descriptorSetCount must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkFreeDescriptorSets-descriptorSetCount-arraylength)"},
+    {"VUID-vkFreeDescriptorSets-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkFreeDescriptorSets-device-parameter)"},
+    {"VUID-vkFreeDescriptorSets-pDescriptorSets-00309", "All submitted commands that refer to any element of pDescriptorSets must have completed execution (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkFreeDescriptorSets-pDescriptorSets-00309)"},
+    {"VUID-vkFreeDescriptorSets-pDescriptorSets-00310", "pDescriptorSets must be a valid pointer to an array of descriptorSetCount VkDescriptorSet handles, each element of which must either be a valid handle or VK_NULL_HANDLE (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkFreeDescriptorSets-pDescriptorSets-00310)"},
+    {"VUID-vkFreeDescriptorSets-pDescriptorSets-00311", "Each valid handle in pDescriptorSets must have been allocated from descriptorPool (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkFreeDescriptorSets-pDescriptorSets-00311)"},
+    {"VUID-vkFreeDescriptorSets-pDescriptorSets-parent", "Each element of pDescriptorSets that is a valid handle must have been created, allocated, or retrieved from descriptorPool (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkFreeDescriptorSets-pDescriptorSets-parent)"},
+    {"VUID-vkFreeMemory-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkFreeMemory-device-parameter)"},
+    {"VUID-vkFreeMemory-memory-00677", "All submitted commands that refer to memory (via images or buffers) must have completed execution (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkFreeMemory-memory-00677)"},
+    {"VUID-vkFreeMemory-memory-parameter", "If memory is not VK_NULL_HANDLE, memory must be a valid VkDeviceMemory handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkFreeMemory-memory-parameter)"},
+    {"VUID-vkFreeMemory-memory-parent", "If memory is a valid handle, it must have been created, allocated, or retrieved from device (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkFreeMemory-memory-parent)"},
+    {"VUID-vkFreeMemory-pAllocator-parameter", "If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkFreeMemory-pAllocator-parameter)"},
+    {"VUID-vkGetAccelerationStructureHandleNV-accelerationStructure-02787", "accelerationStructure must be bound completely and contiguously to a single VkDeviceMemory object via vkBindAccelerationStructureMemoryNV (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetAccelerationStructureHandleNV-accelerationStructure-02787)"},
+    {"VUID-vkGetAccelerationStructureHandleNV-accelerationStructure-parameter", "accelerationStructure must be a valid VkAccelerationStructureNV handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetAccelerationStructureHandleNV-accelerationStructure-parameter)"},
+    {"VUID-vkGetAccelerationStructureHandleNV-accelerationStructure-parent", "accelerationStructure must have been created, allocated, or retrieved from device (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetAccelerationStructureHandleNV-accelerationStructure-parent)"},
+    {"VUID-vkGetAccelerationStructureHandleNV-dataSize-02240", "dataSize must be large enough to contain the result of the query, as described above (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetAccelerationStructureHandleNV-dataSize-02240)"},
+    {"VUID-vkGetAccelerationStructureHandleNV-dataSize-arraylength", "dataSize must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetAccelerationStructureHandleNV-dataSize-arraylength)"},
+    {"VUID-vkGetAccelerationStructureHandleNV-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetAccelerationStructureHandleNV-device-parameter)"},
+    {"VUID-vkGetAccelerationStructureHandleNV-pData-parameter", "pData must be a valid pointer to an array of dataSize bytes (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetAccelerationStructureHandleNV-pData-parameter)"},
+    {"VUID-vkGetAccelerationStructureMemoryRequirementsNV-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetAccelerationStructureMemoryRequirementsNV-device-parameter)"},
+    {"VUID-vkGetAccelerationStructureMemoryRequirementsNV-pInfo-parameter", "pInfo must be a valid pointer to a valid VkAccelerationStructureMemoryRequirementsInfoNV structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetAccelerationStructureMemoryRequirementsNV-pInfo-parameter)"},
+    {"VUID-vkGetAccelerationStructureMemoryRequirementsNV-pMemoryRequirements-parameter", "pMemoryRequirements must be a valid pointer to a VkMemoryRequirements2KHR structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetAccelerationStructureMemoryRequirementsNV-pMemoryRequirements-parameter)"},
+    {"VUID-vkGetAndroidHardwareBufferPropertiesANDROID-buffer-01884", "buffer must be a valid Android hardware buffer object with at least one of the AHARDWAREBUFFER_USAGE_GPU_* flags in its AHardwareBuffer_Desc::usage (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetAndroidHardwareBufferPropertiesANDROID-buffer-01884)"},
+    {"VUID-vkGetAndroidHardwareBufferPropertiesANDROID-buffer-parameter", "buffer must be a valid pointer to a valid AHardwareBuffer value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetAndroidHardwareBufferPropertiesANDROID-buffer-parameter)"},
+    {"VUID-vkGetAndroidHardwareBufferPropertiesANDROID-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetAndroidHardwareBufferPropertiesANDROID-device-parameter)"},
+    {"VUID-vkGetAndroidHardwareBufferPropertiesANDROID-pProperties-parameter", "pProperties must be a valid pointer to a VkAndroidHardwareBufferPropertiesANDROID structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetAndroidHardwareBufferPropertiesANDROID-pProperties-parameter)"},
+    {"VUID-vkGetBufferDeviceAddressKHR-bufferDeviceAddress-03324", "The bufferDeviceAddress or VkPhysicalDeviceBufferDeviceAddressFeaturesEXT::bufferDeviceAddress feature must be enabled (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetBufferDeviceAddressKHR-bufferDeviceAddress-03324)"},
+    {"VUID-vkGetBufferDeviceAddressKHR-device-03325", "If device was created with multiple physical devices, then the bufferDeviceAddressMultiDevice or VkPhysicalDeviceBufferDeviceAddressFeaturesEXT::bufferDeviceAddressMultiDevice feature must be enabled (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetBufferDeviceAddressKHR-device-03325)"},
+    {"VUID-vkGetBufferDeviceAddressKHR-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetBufferDeviceAddressKHR-device-parameter)"},
+    {"VUID-vkGetBufferDeviceAddressKHR-pInfo-parameter", "pInfo must be a valid pointer to a valid VkBufferDeviceAddressInfoKHR structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetBufferDeviceAddressKHR-pInfo-parameter)"},
+    {"VUID-vkGetBufferMemoryRequirements-buffer-parameter", "buffer must be a valid VkBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetBufferMemoryRequirements-buffer-parameter)"},
+    {"VUID-vkGetBufferMemoryRequirements-buffer-parent", "buffer must have been created, allocated, or retrieved from device (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetBufferMemoryRequirements-buffer-parent)"},
+    {"VUID-vkGetBufferMemoryRequirements-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetBufferMemoryRequirements-device-parameter)"},
+    {"VUID-vkGetBufferMemoryRequirements-pMemoryRequirements-parameter", "pMemoryRequirements must be a valid pointer to a VkMemoryRequirements structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetBufferMemoryRequirements-pMemoryRequirements-parameter)"},
+    {"VUID-vkGetBufferMemoryRequirements2-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetBufferMemoryRequirements2-device-parameter)"},
+    {"VUID-vkGetBufferMemoryRequirements2-pInfo-parameter", "pInfo must be a valid pointer to a valid VkBufferMemoryRequirementsInfo2 structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetBufferMemoryRequirements2-pInfo-parameter)"},
+    {"VUID-vkGetBufferMemoryRequirements2-pMemoryRequirements-parameter", "pMemoryRequirements must be a valid pointer to a VkMemoryRequirements2 structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetBufferMemoryRequirements2-pMemoryRequirements-parameter)"},
+    {"VUID-vkGetBufferOpaqueCaptureAddressKHR-None-03326", "The bufferDeviceAddress feature must be enabled (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetBufferOpaqueCaptureAddressKHR-None-03326)"},
+    {"VUID-vkGetBufferOpaqueCaptureAddressKHR-device-03327", "If device was created with multiple physical devices, then the bufferDeviceAddressMultiDevice feature must be enabled (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetBufferOpaqueCaptureAddressKHR-device-03327)"},
+    {"VUID-vkGetBufferOpaqueCaptureAddressKHR-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetBufferOpaqueCaptureAddressKHR-device-parameter)"},
+    {"VUID-vkGetBufferOpaqueCaptureAddressKHR-pInfo-parameter", "pInfo must be a valid pointer to a valid VkBufferDeviceAddressInfoKHR structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetBufferOpaqueCaptureAddressKHR-pInfo-parameter)"},
+    {"VUID-vkGetCalibratedTimestampsEXT-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetCalibratedTimestampsEXT-device-parameter)"},
+    {"VUID-vkGetCalibratedTimestampsEXT-pMaxDeviation-parameter", "pMaxDeviation must be a valid pointer to a uint64_t value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetCalibratedTimestampsEXT-pMaxDeviation-parameter)"},
+    {"VUID-vkGetCalibratedTimestampsEXT-pTimestampInfos-parameter", "pTimestampInfos must be a valid pointer to an array of timestampCount valid VkCalibratedTimestampInfoEXT structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetCalibratedTimestampsEXT-pTimestampInfos-parameter)"},
+    {"VUID-vkGetCalibratedTimestampsEXT-pTimestamps-parameter", "pTimestamps must be a valid pointer to an array of timestampCount uint64_t values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetCalibratedTimestampsEXT-pTimestamps-parameter)"},
+    {"VUID-vkGetCalibratedTimestampsEXT-timestampCount-arraylength", "timestampCount must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetCalibratedTimestampsEXT-timestampCount-arraylength)"},
+    {"VUID-vkGetDescriptorSetLayoutSupport-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetDescriptorSetLayoutSupport-device-parameter)"},
+    {"VUID-vkGetDescriptorSetLayoutSupport-pCreateInfo-parameter", "pCreateInfo must be a valid pointer to a valid VkDescriptorSetLayoutCreateInfo structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetDescriptorSetLayoutSupport-pCreateInfo-parameter)"},
+    {"VUID-vkGetDescriptorSetLayoutSupport-pSupport-parameter", "pSupport must be a valid pointer to a VkDescriptorSetLayoutSupport structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetDescriptorSetLayoutSupport-pSupport-parameter)"},
+    {"VUID-vkGetDeviceGroupPeerMemoryFeatures-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetDeviceGroupPeerMemoryFeatures-device-parameter)"},
+    {"VUID-vkGetDeviceGroupPeerMemoryFeatures-heapIndex-00691", "heapIndex must be less than memoryHeapCount (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetDeviceGroupPeerMemoryFeatures-heapIndex-00691)"},
+    {"VUID-vkGetDeviceGroupPeerMemoryFeatures-localDeviceIndex-00692", "localDeviceIndex must be a valid device index (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetDeviceGroupPeerMemoryFeatures-localDeviceIndex-00692)"},
+    {"VUID-vkGetDeviceGroupPeerMemoryFeatures-localDeviceIndex-00694", "localDeviceIndex must not equal remoteDeviceIndex (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetDeviceGroupPeerMemoryFeatures-localDeviceIndex-00694)"},
+    {"VUID-vkGetDeviceGroupPeerMemoryFeatures-pPeerMemoryFeatures-parameter", "pPeerMemoryFeatures must be a valid pointer to a VkPeerMemoryFeatureFlags value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetDeviceGroupPeerMemoryFeatures-pPeerMemoryFeatures-parameter)"},
+    {"VUID-vkGetDeviceGroupPeerMemoryFeatures-remoteDeviceIndex-00693", "remoteDeviceIndex must be a valid device index (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetDeviceGroupPeerMemoryFeatures-remoteDeviceIndex-00693)"},
+    {"VUID-vkGetDeviceGroupPresentCapabilitiesKHR-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetDeviceGroupPresentCapabilitiesKHR-device-parameter)"},
+    {"VUID-vkGetDeviceGroupPresentCapabilitiesKHR-pDeviceGroupPresentCapabilities-parameter", "pDeviceGroupPresentCapabilities must be a valid pointer to a VkDeviceGroupPresentCapabilitiesKHR structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetDeviceGroupPresentCapabilitiesKHR-pDeviceGroupPresentCapabilities-parameter)"},
+    {"VUID-vkGetDeviceGroupSurfacePresentModes2EXT-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetDeviceGroupSurfacePresentModes2EXT-device-parameter)"},
+    {"VUID-vkGetDeviceGroupSurfacePresentModes2EXT-pModes-parameter", "pModes must be a valid pointer to a VkDeviceGroupPresentModeFlagsKHR value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetDeviceGroupSurfacePresentModes2EXT-pModes-parameter)"},
+    {"VUID-vkGetDeviceGroupSurfacePresentModes2EXT-pSurfaceInfo-parameter", "pSurfaceInfo must be a valid pointer to a valid VkPhysicalDeviceSurfaceInfo2KHR structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetDeviceGroupSurfacePresentModes2EXT-pSurfaceInfo-parameter)"},
+    {"VUID-vkGetDeviceGroupSurfacePresentModesKHR-commonparent", "Both of device, and surface must have been created, allocated, or retrieved from the same VkInstance (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetDeviceGroupSurfacePresentModesKHR-commonparent)"},
+    {"VUID-vkGetDeviceGroupSurfacePresentModesKHR-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetDeviceGroupSurfacePresentModesKHR-device-parameter)"},
+    {"VUID-vkGetDeviceGroupSurfacePresentModesKHR-pModes-parameter", "pModes must be a valid pointer to a VkDeviceGroupPresentModeFlagsKHR value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetDeviceGroupSurfacePresentModesKHR-pModes-parameter)"},
+    {"VUID-vkGetDeviceGroupSurfacePresentModesKHR-surface-parameter", "surface must be a valid VkSurfaceKHR handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetDeviceGroupSurfacePresentModesKHR-surface-parameter)"},
+    {"VUID-vkGetDeviceMemoryCommitment-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetDeviceMemoryCommitment-device-parameter)"},
+    {"VUID-vkGetDeviceMemoryCommitment-memory-00690", "memory must have been created with a memory type that reports VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetDeviceMemoryCommitment-memory-00690)"},
+    {"VUID-vkGetDeviceMemoryCommitment-memory-parameter", "memory must be a valid VkDeviceMemory handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetDeviceMemoryCommitment-memory-parameter)"},
+    {"VUID-vkGetDeviceMemoryCommitment-memory-parent", "memory must have been created, allocated, or retrieved from device (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetDeviceMemoryCommitment-memory-parent)"},
+    {"VUID-vkGetDeviceMemoryCommitment-pCommittedMemoryInBytes-parameter", "pCommittedMemoryInBytes must be a valid pointer to a VkDeviceSize value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetDeviceMemoryCommitment-pCommittedMemoryInBytes-parameter)"},
+    {"VUID-vkGetDeviceMemoryOpaqueCaptureAddressKHR-None-03334", "The bufferDeviceAddress feature must be enabled (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetDeviceMemoryOpaqueCaptureAddressKHR-None-03334)"},
+    {"VUID-vkGetDeviceMemoryOpaqueCaptureAddressKHR-device-03335", "If device was created with multiple physical devices, then the bufferDeviceAddressMultiDevice feature must be enabled (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetDeviceMemoryOpaqueCaptureAddressKHR-device-03335)"},
+    {"VUID-vkGetDeviceMemoryOpaqueCaptureAddressKHR-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetDeviceMemoryOpaqueCaptureAddressKHR-device-parameter)"},
+    {"VUID-vkGetDeviceMemoryOpaqueCaptureAddressKHR-pInfo-parameter", "pInfo must be a valid pointer to a valid VkDeviceMemoryOpaqueCaptureAddressInfoKHR structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetDeviceMemoryOpaqueCaptureAddressKHR-pInfo-parameter)"},
+    {"VUID-vkGetDeviceProcAddr-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetDeviceProcAddr-device-parameter)"},
+    {"VUID-vkGetDeviceProcAddr-pName-parameter", "pName must be a null-terminated UTF-8 string (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetDeviceProcAddr-pName-parameter)"},
+    {"VUID-vkGetDeviceQueue-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetDeviceQueue-device-parameter)"},
+    {"VUID-vkGetDeviceQueue-flags-01841", "VkDeviceQueueCreateInfo::flags must have been set to zero when device was created (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetDeviceQueue-flags-01841)"},
+    {"VUID-vkGetDeviceQueue-pQueue-parameter", "pQueue must be a valid pointer to a VkQueue handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetDeviceQueue-pQueue-parameter)"},
+    {"VUID-vkGetDeviceQueue-queueFamilyIndex-00384", "queueFamilyIndex must be one of the queue family indices specified when device was created, via the VkDeviceQueueCreateInfo structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetDeviceQueue-queueFamilyIndex-00384)"},
+    {"VUID-vkGetDeviceQueue-queueIndex-00385", "queueIndex must be less than the number of queues created for the specified queue family index when device was created, via the queueCount member of the VkDeviceQueueCreateInfo structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetDeviceQueue-queueIndex-00385)"},
+    {"VUID-vkGetDeviceQueue2-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetDeviceQueue2-device-parameter)"},
+    {"VUID-vkGetDeviceQueue2-pQueue-parameter", "pQueue must be a valid pointer to a VkQueue handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetDeviceQueue2-pQueue-parameter)"},
+    {"VUID-vkGetDeviceQueue2-pQueueInfo-parameter", "pQueueInfo must be a valid pointer to a valid VkDeviceQueueInfo2 structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetDeviceQueue2-pQueueInfo-parameter)"},
+    {"VUID-vkGetDisplayModeProperties2KHR-display-parameter", "display must be a valid VkDisplayKHR handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetDisplayModeProperties2KHR-display-parameter)"},
+    {"VUID-vkGetDisplayModeProperties2KHR-pProperties-parameter", "If the value referenced by pPropertyCount is not 0, and pProperties is not NULL, pProperties must be a valid pointer to an array of pPropertyCount VkDisplayModeProperties2KHR structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetDisplayModeProperties2KHR-pProperties-parameter)"},
+    {"VUID-vkGetDisplayModeProperties2KHR-pPropertyCount-parameter", "pPropertyCount must be a valid pointer to a uint32_t value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetDisplayModeProperties2KHR-pPropertyCount-parameter)"},
+    {"VUID-vkGetDisplayModeProperties2KHR-physicalDevice-parameter", "physicalDevice must be a valid VkPhysicalDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetDisplayModeProperties2KHR-physicalDevice-parameter)"},
+    {"VUID-vkGetDisplayModePropertiesKHR-display-parameter", "display must be a valid VkDisplayKHR handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetDisplayModePropertiesKHR-display-parameter)"},
+    {"VUID-vkGetDisplayModePropertiesKHR-pProperties-parameter", "If the value referenced by pPropertyCount is not 0, and pProperties is not NULL, pProperties must be a valid pointer to an array of pPropertyCount VkDisplayModePropertiesKHR structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetDisplayModePropertiesKHR-pProperties-parameter)"},
+    {"VUID-vkGetDisplayModePropertiesKHR-pPropertyCount-parameter", "pPropertyCount must be a valid pointer to a uint32_t value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetDisplayModePropertiesKHR-pPropertyCount-parameter)"},
+    {"VUID-vkGetDisplayModePropertiesKHR-physicalDevice-parameter", "physicalDevice must be a valid VkPhysicalDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetDisplayModePropertiesKHR-physicalDevice-parameter)"},
+    {"VUID-vkGetDisplayPlaneCapabilities2KHR-pCapabilities-parameter", "pCapabilities must be a valid pointer to a VkDisplayPlaneCapabilities2KHR structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetDisplayPlaneCapabilities2KHR-pCapabilities-parameter)"},
+    {"VUID-vkGetDisplayPlaneCapabilities2KHR-pDisplayPlaneInfo-parameter", "pDisplayPlaneInfo must be a valid pointer to a valid VkDisplayPlaneInfo2KHR structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetDisplayPlaneCapabilities2KHR-pDisplayPlaneInfo-parameter)"},
+    {"VUID-vkGetDisplayPlaneCapabilities2KHR-physicalDevice-parameter", "physicalDevice must be a valid VkPhysicalDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetDisplayPlaneCapabilities2KHR-physicalDevice-parameter)"},
+    {"VUID-vkGetDisplayPlaneCapabilitiesKHR-mode-parameter", "mode must be a valid VkDisplayModeKHR handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetDisplayPlaneCapabilitiesKHR-mode-parameter)"},
+    {"VUID-vkGetDisplayPlaneCapabilitiesKHR-pCapabilities-parameter", "pCapabilities must be a valid pointer to a VkDisplayPlaneCapabilitiesKHR structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetDisplayPlaneCapabilitiesKHR-pCapabilities-parameter)"},
+    {"VUID-vkGetDisplayPlaneCapabilitiesKHR-physicalDevice-parameter", "physicalDevice must be a valid VkPhysicalDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetDisplayPlaneCapabilitiesKHR-physicalDevice-parameter)"},
+    {"VUID-vkGetDisplayPlaneSupportedDisplaysKHR-pDisplayCount-parameter", "pDisplayCount must be a valid pointer to a uint32_t value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetDisplayPlaneSupportedDisplaysKHR-pDisplayCount-parameter)"},
+    {"VUID-vkGetDisplayPlaneSupportedDisplaysKHR-pDisplays-parameter", "If the value referenced by pDisplayCount is not 0, and pDisplays is not NULL, pDisplays must be a valid pointer to an array of pDisplayCount VkDisplayKHR handles (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetDisplayPlaneSupportedDisplaysKHR-pDisplays-parameter)"},
+    {"VUID-vkGetDisplayPlaneSupportedDisplaysKHR-physicalDevice-parameter", "physicalDevice must be a valid VkPhysicalDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetDisplayPlaneSupportedDisplaysKHR-physicalDevice-parameter)"},
+    {"VUID-vkGetDisplayPlaneSupportedDisplaysKHR-planeIndex-01249", "planeIndex must be less than the number of display planes supported by the device as determined by calling vkGetPhysicalDeviceDisplayPlanePropertiesKHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetDisplayPlaneSupportedDisplaysKHR-planeIndex-01249)"},
+    {"VUID-vkGetEventStatus-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetEventStatus-device-parameter)"},
+    {"VUID-vkGetEventStatus-event-parameter", "event must be a valid VkEvent handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetEventStatus-event-parameter)"},
+    {"VUID-vkGetEventStatus-event-parent", "event must have been created, allocated, or retrieved from device (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetEventStatus-event-parent)"},
+    {"VUID-vkGetFenceFdKHR-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetFenceFdKHR-device-parameter)"},
+    {"VUID-vkGetFenceFdKHR-pFd-parameter", "pFd must be a valid pointer to an int value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetFenceFdKHR-pFd-parameter)"},
+    {"VUID-vkGetFenceFdKHR-pGetFdInfo-parameter", "pGetFdInfo must be a valid pointer to a valid VkFenceGetFdInfoKHR structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetFenceFdKHR-pGetFdInfo-parameter)"},
+    {"VUID-vkGetFenceStatus-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetFenceStatus-device-parameter)"},
+    {"VUID-vkGetFenceStatus-fence-parameter", "fence must be a valid VkFence handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetFenceStatus-fence-parameter)"},
+    {"VUID-vkGetFenceStatus-fence-parent", "fence must have been created, allocated, or retrieved from device (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetFenceStatus-fence-parent)"},
+    {"VUID-vkGetFenceWin32HandleKHR-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetFenceWin32HandleKHR-device-parameter)"},
+    {"VUID-vkGetFenceWin32HandleKHR-pGetWin32HandleInfo-parameter", "pGetWin32HandleInfo must be a valid pointer to a valid VkFenceGetWin32HandleInfoKHR structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetFenceWin32HandleKHR-pGetWin32HandleInfo-parameter)"},
+    {"VUID-vkGetFenceWin32HandleKHR-pHandle-parameter", "pHandle must be a valid pointer to a HANDLE value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetFenceWin32HandleKHR-pHandle-parameter)"},
+    {"VUID-vkGetImageDrmFormatModifierPropertiesEXT-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetImageDrmFormatModifierPropertiesEXT-device-parameter)"},
+    {"VUID-vkGetImageDrmFormatModifierPropertiesEXT-image-02272", "image must have been created with tiling equal to VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetImageDrmFormatModifierPropertiesEXT-image-02272)"},
+    {"VUID-vkGetImageDrmFormatModifierPropertiesEXT-image-parameter", "image must be a valid VkImage handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetImageDrmFormatModifierPropertiesEXT-image-parameter)"},
+    {"VUID-vkGetImageDrmFormatModifierPropertiesEXT-image-parent", "image must have been created, allocated, or retrieved from device (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetImageDrmFormatModifierPropertiesEXT-image-parent)"},
+    {"VUID-vkGetImageDrmFormatModifierPropertiesEXT-pProperties-parameter", "pProperties must be a valid pointer to a VkImageDrmFormatModifierPropertiesEXT structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetImageDrmFormatModifierPropertiesEXT-pProperties-parameter)"},
+    {"VUID-vkGetImageMemoryRequirements-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetImageMemoryRequirements-device-parameter)"},
+    {"VUID-vkGetImageMemoryRequirements-image-01588", "image must not have been created with the VK_IMAGE_CREATE_DISJOINT_BIT flag set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetImageMemoryRequirements-image-01588)"},
+    {"VUID-vkGetImageMemoryRequirements-image-parameter", "image must be a valid VkImage handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetImageMemoryRequirements-image-parameter)"},
+    {"VUID-vkGetImageMemoryRequirements-image-parent", "image must have been created, allocated, or retrieved from device (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetImageMemoryRequirements-image-parent)"},
+    {"VUID-vkGetImageMemoryRequirements-pMemoryRequirements-parameter", "pMemoryRequirements must be a valid pointer to a VkMemoryRequirements structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetImageMemoryRequirements-pMemoryRequirements-parameter)"},
+    {"VUID-vkGetImageMemoryRequirements2-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetImageMemoryRequirements2-device-parameter)"},
+    {"VUID-vkGetImageMemoryRequirements2-pInfo-parameter", "pInfo must be a valid pointer to a valid VkImageMemoryRequirementsInfo2 structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetImageMemoryRequirements2-pInfo-parameter)"},
+    {"VUID-vkGetImageMemoryRequirements2-pMemoryRequirements-parameter", "pMemoryRequirements must be a valid pointer to a VkMemoryRequirements2 structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetImageMemoryRequirements2-pMemoryRequirements-parameter)"},
+    {"VUID-vkGetImageSparseMemoryRequirements-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetImageSparseMemoryRequirements-device-parameter)"},
+    {"VUID-vkGetImageSparseMemoryRequirements-image-parameter", "image must be a valid VkImage handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetImageSparseMemoryRequirements-image-parameter)"},
+    {"VUID-vkGetImageSparseMemoryRequirements-image-parent", "image must have been created, allocated, or retrieved from device (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetImageSparseMemoryRequirements-image-parent)"},
+    {"VUID-vkGetImageSparseMemoryRequirements-pSparseMemoryRequirementCount-parameter", "pSparseMemoryRequirementCount must be a valid pointer to a uint32_t value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetImageSparseMemoryRequirements-pSparseMemoryRequirementCount-parameter)"},
+    {"VUID-vkGetImageSparseMemoryRequirements-pSparseMemoryRequirements-parameter", "If the value referenced by pSparseMemoryRequirementCount is not 0, and pSparseMemoryRequirements is not NULL, pSparseMemoryRequirements must be a valid pointer to an array of pSparseMemoryRequirementCount VkSparseImageMemoryRequirements structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetImageSparseMemoryRequirements-pSparseMemoryRequirements-parameter)"},
+    {"VUID-vkGetImageSparseMemoryRequirements2-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetImageSparseMemoryRequirements2-device-parameter)"},
+    {"VUID-vkGetImageSparseMemoryRequirements2-pInfo-parameter", "pInfo must be a valid pointer to a valid VkImageSparseMemoryRequirementsInfo2 structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetImageSparseMemoryRequirements2-pInfo-parameter)"},
+    {"VUID-vkGetImageSparseMemoryRequirements2-pSparseMemoryRequirementCount-parameter", "pSparseMemoryRequirementCount must be a valid pointer to a uint32_t value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetImageSparseMemoryRequirements2-pSparseMemoryRequirementCount-parameter)"},
+    {"VUID-vkGetImageSparseMemoryRequirements2-pSparseMemoryRequirements-parameter", "If the value referenced by pSparseMemoryRequirementCount is not 0, and pSparseMemoryRequirements is not NULL, pSparseMemoryRequirements must be a valid pointer to an array of pSparseMemoryRequirementCount VkSparseImageMemoryRequirements2 structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetImageSparseMemoryRequirements2-pSparseMemoryRequirements-parameter)"},
+    {"VUID-vkGetImageSubresourceLayout-arrayLayer-01717", "The arrayLayer member of pSubresource must be less than the arrayLayers specified in VkImageCreateInfo when image was created (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetImageSubresourceLayout-arrayLayer-01717)"},
+    {"VUID-vkGetImageSubresourceLayout-aspectMask-00997", "The aspectMask member of pSubresource must only have a single bit set (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetImageSubresourceLayout-aspectMask-00997)"},
+    {"VUID-vkGetImageSubresourceLayout-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetImageSubresourceLayout-device-parameter)"},
+    {"VUID-vkGetImageSubresourceLayout-format-01581", "If the tiling of the image is VK_IMAGE_TILING_LINEAR and its format is a multi-planar format with two planes, the aspectMask member of pSubresource must be VK_IMAGE_ASPECT_PLANE_0_BIT or VK_IMAGE_ASPECT_PLANE_1_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetImageSubresourceLayout-format-01581)"},
+    {"VUID-vkGetImageSubresourceLayout-format-01582", "If the tiling of the image is VK_IMAGE_TILING_LINEAR and its format is a multi-planar format with three planes, the aspectMask member of pSubresource must be VK_IMAGE_ASPECT_PLANE_0_BIT, VK_IMAGE_ASPECT_PLANE_1_BIT or VK_IMAGE_ASPECT_PLANE_2_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetImageSubresourceLayout-format-01582)"},
+    {"VUID-vkGetImageSubresourceLayout-image-00996", "image must have been created with tiling equal to VK_IMAGE_TILING_LINEAR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetImageSubresourceLayout-image-00996)"},
+    {"VUID-vkGetImageSubresourceLayout-image-01895", "If image was created with the VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID external memory handle type, then image must be bound to memory. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetImageSubresourceLayout-image-01895)"},
+    {"VUID-vkGetImageSubresourceLayout-image-02270", "image must have been created with tiling equal to VK_IMAGE_TILING_LINEAR or VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetImageSubresourceLayout-image-02270)"},
+    {"VUID-vkGetImageSubresourceLayout-image-parameter", "image must be a valid VkImage handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetImageSubresourceLayout-image-parameter)"},
+    {"VUID-vkGetImageSubresourceLayout-image-parent", "image must have been created, allocated, or retrieved from device (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetImageSubresourceLayout-image-parent)"},
+    {"VUID-vkGetImageSubresourceLayout-mipLevel-01716", "The mipLevel member of pSubresource must be less than the mipLevels specified in VkImageCreateInfo when image was created (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetImageSubresourceLayout-mipLevel-01716)"},
+    {"VUID-vkGetImageSubresourceLayout-pLayout-parameter", "pLayout must be a valid pointer to a VkSubresourceLayout structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetImageSubresourceLayout-pLayout-parameter)"},
+    {"VUID-vkGetImageSubresourceLayout-pSubresource-parameter", "pSubresource must be a valid pointer to a valid VkImageSubresource structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetImageSubresourceLayout-pSubresource-parameter)"},
+    {"VUID-vkGetImageSubresourceLayout-tiling-02271", "If the tiling of the image is  VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT, then the aspectMask  member of pSubresource must be  VK_IMAGE_ASPECT_MEMORY_PLANE_i_BIT_EXT and the index i must  be less than the  drmFormatModifierPlaneCount  associated with the image's format and drmFormatModifier. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetImageSubresourceLayout-tiling-02271)"},
+    {"VUID-vkGetImageViewHandleNVX-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetImageViewHandleNVX-device-parameter)"},
+    {"VUID-vkGetImageViewHandleNVX-pInfo-parameter", "pInfo must be a valid pointer to a valid VkImageViewHandleInfoNVX structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetImageViewHandleNVX-pInfo-parameter)"},
+    {"VUID-vkGetInstanceProcAddr-instance-parameter", "If instance is not NULL, instance must be a valid VkInstance handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetInstanceProcAddr-instance-parameter)"},
+    {"VUID-vkGetInstanceProcAddr-pName-parameter", "pName must be a null-terminated UTF-8 string (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetInstanceProcAddr-pName-parameter)"},
+    {"VUID-vkGetMemoryAndroidHardwareBufferANDROID-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetMemoryAndroidHardwareBufferANDROID-device-parameter)"},
+    {"VUID-vkGetMemoryAndroidHardwareBufferANDROID-pBuffer-parameter", "pBuffer must be a valid pointer to a valid pointer to an AHardwareBuffer value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetMemoryAndroidHardwareBufferANDROID-pBuffer-parameter)"},
+    {"VUID-vkGetMemoryAndroidHardwareBufferANDROID-pInfo-parameter", "pInfo must be a valid pointer to a valid VkMemoryGetAndroidHardwareBufferInfoANDROID structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetMemoryAndroidHardwareBufferANDROID-pInfo-parameter)"},
+    {"VUID-vkGetMemoryFdKHR-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetMemoryFdKHR-device-parameter)"},
+    {"VUID-vkGetMemoryFdKHR-pFd-parameter", "pFd must be a valid pointer to an int value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetMemoryFdKHR-pFd-parameter)"},
+    {"VUID-vkGetMemoryFdKHR-pGetFdInfo-parameter", "pGetFdInfo must be a valid pointer to a valid VkMemoryGetFdInfoKHR structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetMemoryFdKHR-pGetFdInfo-parameter)"},
+    {"VUID-vkGetMemoryFdPropertiesKHR-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetMemoryFdPropertiesKHR-device-parameter)"},
+    {"VUID-vkGetMemoryFdPropertiesKHR-fd-00673", "fd must be an external memory handle created outside of the Vulkan API. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetMemoryFdPropertiesKHR-fd-00673)"},
+    {"VUID-vkGetMemoryFdPropertiesKHR-handleType-00674", "handleType must not be VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT_KHR. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetMemoryFdPropertiesKHR-handleType-00674)"},
+    {"VUID-vkGetMemoryFdPropertiesKHR-handleType-parameter", "handleType must be a valid VkExternalMemoryHandleTypeFlagBits value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetMemoryFdPropertiesKHR-handleType-parameter)"},
+    {"VUID-vkGetMemoryFdPropertiesKHR-pMemoryFdProperties-parameter", "pMemoryFdProperties must be a valid pointer to a VkMemoryFdPropertiesKHR structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetMemoryFdPropertiesKHR-pMemoryFdProperties-parameter)"},
+    {"VUID-vkGetMemoryHostPointerPropertiesEXT-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetMemoryHostPointerPropertiesEXT-device-parameter)"},
+    {"VUID-vkGetMemoryHostPointerPropertiesEXT-handleType-01752", "handleType must be VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT or VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_MAPPED_FOREIGN_MEMORY_BIT_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetMemoryHostPointerPropertiesEXT-handleType-01752)"},
+    {"VUID-vkGetMemoryHostPointerPropertiesEXT-handleType-01754", "If handleType is VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT, pHostPointer must be a pointer to host memory (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetMemoryHostPointerPropertiesEXT-handleType-01754)"},
+    {"VUID-vkGetMemoryHostPointerPropertiesEXT-handleType-01755", "If handleType is VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_MAPPED_FOREIGN_MEMORY_BIT_EXT, pHostPointer must be a pointer to host mapped foreign memory (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetMemoryHostPointerPropertiesEXT-handleType-01755)"},
+    {"VUID-vkGetMemoryHostPointerPropertiesEXT-handleType-parameter", "handleType must be a valid VkExternalMemoryHandleTypeFlagBits value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetMemoryHostPointerPropertiesEXT-handleType-parameter)"},
+    {"VUID-vkGetMemoryHostPointerPropertiesEXT-pHostPointer-01753", "pHostPointer must be a pointer aligned to an integer multiple of VkPhysicalDeviceExternalMemoryHostPropertiesEXT::minImportedHostPointerAlignment (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetMemoryHostPointerPropertiesEXT-pHostPointer-01753)"},
+    {"VUID-vkGetMemoryHostPointerPropertiesEXT-pMemoryHostPointerProperties-parameter", "pMemoryHostPointerProperties must be a valid pointer to a VkMemoryHostPointerPropertiesEXT structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetMemoryHostPointerPropertiesEXT-pMemoryHostPointerProperties-parameter)"},
+    {"VUID-vkGetMemoryWin32HandleKHR-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetMemoryWin32HandleKHR-device-parameter)"},
+    {"VUID-vkGetMemoryWin32HandleKHR-pGetWin32HandleInfo-parameter", "pGetWin32HandleInfo must be a valid pointer to a valid VkMemoryGetWin32HandleInfoKHR structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetMemoryWin32HandleKHR-pGetWin32HandleInfo-parameter)"},
+    {"VUID-vkGetMemoryWin32HandleKHR-pHandle-parameter", "pHandle must be a valid pointer to a HANDLE value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetMemoryWin32HandleKHR-pHandle-parameter)"},
+    {"VUID-vkGetMemoryWin32HandleNV-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetMemoryWin32HandleNV-device-parameter)"},
+    {"VUID-vkGetMemoryWin32HandleNV-handleType-01326", "handleType must be a flag specified in VkExportMemoryAllocateInfoNV::handleTypes when allocating memory (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetMemoryWin32HandleNV-handleType-01326)"},
+    {"VUID-vkGetMemoryWin32HandleNV-handleType-parameter", "handleType must be a valid combination of VkExternalMemoryHandleTypeFlagBitsNV values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetMemoryWin32HandleNV-handleType-parameter)"},
+    {"VUID-vkGetMemoryWin32HandleNV-handleType-requiredbitmask", "handleType must not be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetMemoryWin32HandleNV-handleType-requiredbitmask)"},
+    {"VUID-vkGetMemoryWin32HandleNV-memory-parameter", "memory must be a valid VkDeviceMemory handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetMemoryWin32HandleNV-memory-parameter)"},
+    {"VUID-vkGetMemoryWin32HandleNV-memory-parent", "memory must have been created, allocated, or retrieved from device (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetMemoryWin32HandleNV-memory-parent)"},
+    {"VUID-vkGetMemoryWin32HandleNV-pHandle-parameter", "pHandle must be a valid pointer to a HANDLE value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetMemoryWin32HandleNV-pHandle-parameter)"},
+    {"VUID-vkGetMemoryWin32HandlePropertiesKHR-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetMemoryWin32HandlePropertiesKHR-device-parameter)"},
+    {"VUID-vkGetMemoryWin32HandlePropertiesKHR-handle-00665", "handle must be an external memory handle created outside of the Vulkan API. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetMemoryWin32HandlePropertiesKHR-handle-00665)"},
+    {"VUID-vkGetMemoryWin32HandlePropertiesKHR-handleType-00666", "handleType must not be one of the handle types defined as opaque. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetMemoryWin32HandlePropertiesKHR-handleType-00666)"},
+    {"VUID-vkGetMemoryWin32HandlePropertiesKHR-handleType-parameter", "handleType must be a valid VkExternalMemoryHandleTypeFlagBits value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetMemoryWin32HandlePropertiesKHR-handleType-parameter)"},
+    {"VUID-vkGetMemoryWin32HandlePropertiesKHR-pMemoryWin32HandleProperties-parameter", "pMemoryWin32HandleProperties must be a valid pointer to a VkMemoryWin32HandlePropertiesKHR structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetMemoryWin32HandlePropertiesKHR-pMemoryWin32HandleProperties-parameter)"},
+    {"VUID-vkGetPastPresentationTimingGOOGLE-commonparent", "Both of device, and swapchain must have been created, allocated, or retrieved from the same VkInstance (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPastPresentationTimingGOOGLE-commonparent)"},
+    {"VUID-vkGetPastPresentationTimingGOOGLE-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPastPresentationTimingGOOGLE-device-parameter)"},
+    {"VUID-vkGetPastPresentationTimingGOOGLE-pPresentationTimingCount-parameter", "pPresentationTimingCount must be a valid pointer to a uint32_t value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPastPresentationTimingGOOGLE-pPresentationTimingCount-parameter)"},
+    {"VUID-vkGetPastPresentationTimingGOOGLE-pPresentationTimings-parameter", "If the value referenced by pPresentationTimingCount is not 0, and pPresentationTimings is not NULL, pPresentationTimings must be a valid pointer to an array of pPresentationTimingCount VkPastPresentationTimingGOOGLE structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPastPresentationTimingGOOGLE-pPresentationTimings-parameter)"},
+    {"VUID-vkGetPastPresentationTimingGOOGLE-swapchain-parameter", "swapchain must be a valid VkSwapchainKHR handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPastPresentationTimingGOOGLE-swapchain-parameter)"},
+    {"VUID-vkGetPerformanceParameterINTEL-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPerformanceParameterINTEL-device-parameter)"},
+    {"VUID-vkGetPerformanceParameterINTEL-pValue-parameter", "pValue must be a valid pointer to a VkPerformanceValueINTEL structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPerformanceParameterINTEL-pValue-parameter)"},
+    {"VUID-vkGetPerformanceParameterINTEL-parameter-parameter", "parameter must be a valid VkPerformanceParameterTypeINTEL value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPerformanceParameterINTEL-parameter-parameter)"},
+    {"VUID-vkGetPhysicalDeviceCalibrateableTimeDomainsEXT-pTimeDomainCount-parameter", "pTimeDomainCount must be a valid pointer to a uint32_t value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceCalibrateableTimeDomainsEXT-pTimeDomainCount-parameter)"},
+    {"VUID-vkGetPhysicalDeviceCalibrateableTimeDomainsEXT-pTimeDomains-parameter", "If the value referenced by pTimeDomainCount is not 0, and pTimeDomains is not NULL, pTimeDomains must be a valid pointer to an array of pTimeDomainCount VkTimeDomainEXT values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceCalibrateableTimeDomainsEXT-pTimeDomains-parameter)"},
+    {"VUID-vkGetPhysicalDeviceCalibrateableTimeDomainsEXT-physicalDevice-parameter", "physicalDevice must be a valid VkPhysicalDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceCalibrateableTimeDomainsEXT-physicalDevice-parameter)"},
+    {"VUID-vkGetPhysicalDeviceCooperativeMatrixPropertiesNV-pProperties-parameter", "If the value referenced by pPropertyCount is not 0, and pProperties is not NULL, pProperties must be a valid pointer to an array of pPropertyCount VkCooperativeMatrixPropertiesNV structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceCooperativeMatrixPropertiesNV-pProperties-parameter)"},
+    {"VUID-vkGetPhysicalDeviceCooperativeMatrixPropertiesNV-pPropertyCount-parameter", "pPropertyCount must be a valid pointer to a uint32_t value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceCooperativeMatrixPropertiesNV-pPropertyCount-parameter)"},
+    {"VUID-vkGetPhysicalDeviceCooperativeMatrixPropertiesNV-physicalDevice-parameter", "physicalDevice must be a valid VkPhysicalDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceCooperativeMatrixPropertiesNV-physicalDevice-parameter)"},
+    {"VUID-vkGetPhysicalDeviceDisplayPlaneProperties2KHR-pProperties-parameter", "If the value referenced by pPropertyCount is not 0, and pProperties is not NULL, pProperties must be a valid pointer to an array of pPropertyCount VkDisplayPlaneProperties2KHR structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceDisplayPlaneProperties2KHR-pProperties-parameter)"},
+    {"VUID-vkGetPhysicalDeviceDisplayPlaneProperties2KHR-pPropertyCount-parameter", "pPropertyCount must be a valid pointer to a uint32_t value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceDisplayPlaneProperties2KHR-pPropertyCount-parameter)"},
+    {"VUID-vkGetPhysicalDeviceDisplayPlaneProperties2KHR-physicalDevice-parameter", "physicalDevice must be a valid VkPhysicalDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceDisplayPlaneProperties2KHR-physicalDevice-parameter)"},
+    {"VUID-vkGetPhysicalDeviceDisplayPlanePropertiesKHR-pProperties-parameter", "If the value referenced by pPropertyCount is not 0, and pProperties is not NULL, pProperties must be a valid pointer to an array of pPropertyCount VkDisplayPlanePropertiesKHR structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceDisplayPlanePropertiesKHR-pProperties-parameter)"},
+    {"VUID-vkGetPhysicalDeviceDisplayPlanePropertiesKHR-pPropertyCount-parameter", "pPropertyCount must be a valid pointer to a uint32_t value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceDisplayPlanePropertiesKHR-pPropertyCount-parameter)"},
+    {"VUID-vkGetPhysicalDeviceDisplayPlanePropertiesKHR-physicalDevice-parameter", "physicalDevice must be a valid VkPhysicalDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceDisplayPlanePropertiesKHR-physicalDevice-parameter)"},
+    {"VUID-vkGetPhysicalDeviceDisplayProperties2KHR-pProperties-parameter", "If the value referenced by pPropertyCount is not 0, and pProperties is not NULL, pProperties must be a valid pointer to an array of pPropertyCount VkDisplayProperties2KHR structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceDisplayProperties2KHR-pProperties-parameter)"},
+    {"VUID-vkGetPhysicalDeviceDisplayProperties2KHR-pPropertyCount-parameter", "pPropertyCount must be a valid pointer to a uint32_t value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceDisplayProperties2KHR-pPropertyCount-parameter)"},
+    {"VUID-vkGetPhysicalDeviceDisplayProperties2KHR-physicalDevice-parameter", "physicalDevice must be a valid VkPhysicalDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceDisplayProperties2KHR-physicalDevice-parameter)"},
+    {"VUID-vkGetPhysicalDeviceDisplayPropertiesKHR-pProperties-parameter", "If the value referenced by pPropertyCount is not 0, and pProperties is not NULL, pProperties must be a valid pointer to an array of pPropertyCount VkDisplayPropertiesKHR structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceDisplayPropertiesKHR-pProperties-parameter)"},
+    {"VUID-vkGetPhysicalDeviceDisplayPropertiesKHR-pPropertyCount-parameter", "pPropertyCount must be a valid pointer to a uint32_t value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceDisplayPropertiesKHR-pPropertyCount-parameter)"},
+    {"VUID-vkGetPhysicalDeviceDisplayPropertiesKHR-physicalDevice-parameter", "physicalDevice must be a valid VkPhysicalDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceDisplayPropertiesKHR-physicalDevice-parameter)"},
+    {"VUID-vkGetPhysicalDeviceExternalBufferProperties-pExternalBufferInfo-parameter", "pExternalBufferInfo must be a valid pointer to a valid VkPhysicalDeviceExternalBufferInfo structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceExternalBufferProperties-pExternalBufferInfo-parameter)"},
+    {"VUID-vkGetPhysicalDeviceExternalBufferProperties-pExternalBufferProperties-parameter", "pExternalBufferProperties must be a valid pointer to a VkExternalBufferProperties structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceExternalBufferProperties-pExternalBufferProperties-parameter)"},
+    {"VUID-vkGetPhysicalDeviceExternalBufferProperties-physicalDevice-parameter", "physicalDevice must be a valid VkPhysicalDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceExternalBufferProperties-physicalDevice-parameter)"},
+    {"VUID-vkGetPhysicalDeviceExternalFenceProperties-pExternalFenceInfo-parameter", "pExternalFenceInfo must be a valid pointer to a valid VkPhysicalDeviceExternalFenceInfo structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceExternalFenceProperties-pExternalFenceInfo-parameter)"},
+    {"VUID-vkGetPhysicalDeviceExternalFenceProperties-pExternalFenceProperties-parameter", "pExternalFenceProperties must be a valid pointer to a VkExternalFenceProperties structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceExternalFenceProperties-pExternalFenceProperties-parameter)"},
+    {"VUID-vkGetPhysicalDeviceExternalFenceProperties-physicalDevice-parameter", "physicalDevice must be a valid VkPhysicalDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceExternalFenceProperties-physicalDevice-parameter)"},
+    {"VUID-vkGetPhysicalDeviceExternalImageFormatPropertiesNV-externalHandleType-parameter", "externalHandleType must be a valid combination of VkExternalMemoryHandleTypeFlagBitsNV values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceExternalImageFormatPropertiesNV-externalHandleType-parameter)"},
+    {"VUID-vkGetPhysicalDeviceExternalImageFormatPropertiesNV-flags-parameter", "flags must be a valid combination of VkImageCreateFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceExternalImageFormatPropertiesNV-flags-parameter)"},
+    {"VUID-vkGetPhysicalDeviceExternalImageFormatPropertiesNV-format-parameter", "format must be a valid VkFormat value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceExternalImageFormatPropertiesNV-format-parameter)"},
+    {"VUID-vkGetPhysicalDeviceExternalImageFormatPropertiesNV-pExternalImageFormatProperties-parameter", "pExternalImageFormatProperties must be a valid pointer to a VkExternalImageFormatPropertiesNV structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceExternalImageFormatPropertiesNV-pExternalImageFormatProperties-parameter)"},
+    {"VUID-vkGetPhysicalDeviceExternalImageFormatPropertiesNV-physicalDevice-parameter", "physicalDevice must be a valid VkPhysicalDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceExternalImageFormatPropertiesNV-physicalDevice-parameter)"},
+    {"VUID-vkGetPhysicalDeviceExternalImageFormatPropertiesNV-tiling-parameter", "tiling must be a valid VkImageTiling value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceExternalImageFormatPropertiesNV-tiling-parameter)"},
+    {"VUID-vkGetPhysicalDeviceExternalImageFormatPropertiesNV-type-parameter", "type must be a valid VkImageType value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceExternalImageFormatPropertiesNV-type-parameter)"},
+    {"VUID-vkGetPhysicalDeviceExternalImageFormatPropertiesNV-usage-parameter", "usage must be a valid combination of VkImageUsageFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceExternalImageFormatPropertiesNV-usage-parameter)"},
+    {"VUID-vkGetPhysicalDeviceExternalImageFormatPropertiesNV-usage-requiredbitmask", "usage must not be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceExternalImageFormatPropertiesNV-usage-requiredbitmask)"},
+    {"VUID-vkGetPhysicalDeviceExternalSemaphoreProperties-pExternalSemaphoreInfo-parameter", "pExternalSemaphoreInfo must be a valid pointer to a valid VkPhysicalDeviceExternalSemaphoreInfo structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceExternalSemaphoreProperties-pExternalSemaphoreInfo-parameter)"},
+    {"VUID-vkGetPhysicalDeviceExternalSemaphoreProperties-pExternalSemaphoreProperties-parameter", "pExternalSemaphoreProperties must be a valid pointer to a VkExternalSemaphoreProperties structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceExternalSemaphoreProperties-pExternalSemaphoreProperties-parameter)"},
+    {"VUID-vkGetPhysicalDeviceExternalSemaphoreProperties-physicalDevice-parameter", "physicalDevice must be a valid VkPhysicalDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceExternalSemaphoreProperties-physicalDevice-parameter)"},
+    {"VUID-vkGetPhysicalDeviceFeatures-pFeatures-parameter", "pFeatures must be a valid pointer to a VkPhysicalDeviceFeatures structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceFeatures-pFeatures-parameter)"},
+    {"VUID-vkGetPhysicalDeviceFeatures-physicalDevice-parameter", "physicalDevice must be a valid VkPhysicalDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceFeatures-physicalDevice-parameter)"},
+    {"VUID-vkGetPhysicalDeviceFeatures2-pFeatures-parameter", "pFeatures must be a valid pointer to a VkPhysicalDeviceFeatures2 structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceFeatures2-pFeatures-parameter)"},
+    {"VUID-vkGetPhysicalDeviceFeatures2-physicalDevice-parameter", "physicalDevice must be a valid VkPhysicalDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceFeatures2-physicalDevice-parameter)"},
+    {"VUID-vkGetPhysicalDeviceFormatProperties-format-parameter", "format must be a valid VkFormat value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceFormatProperties-format-parameter)"},
+    {"VUID-vkGetPhysicalDeviceFormatProperties-pFormatProperties-parameter", "pFormatProperties must be a valid pointer to a VkFormatProperties structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceFormatProperties-pFormatProperties-parameter)"},
+    {"VUID-vkGetPhysicalDeviceFormatProperties-physicalDevice-parameter", "physicalDevice must be a valid VkPhysicalDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceFormatProperties-physicalDevice-parameter)"},
+    {"VUID-vkGetPhysicalDeviceFormatProperties2-format-parameter", "format must be a valid VkFormat value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceFormatProperties2-format-parameter)"},
+    {"VUID-vkGetPhysicalDeviceFormatProperties2-pFormatProperties-parameter", "pFormatProperties must be a valid pointer to a VkFormatProperties2 structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceFormatProperties2-pFormatProperties-parameter)"},
+    {"VUID-vkGetPhysicalDeviceFormatProperties2-physicalDevice-parameter", "physicalDevice must be a valid VkPhysicalDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceFormatProperties2-physicalDevice-parameter)"},
+    {"VUID-vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX-pFeatures-parameter", "pFeatures must be a valid pointer to a VkDeviceGeneratedCommandsFeaturesNVX structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX-pFeatures-parameter)"},
+    {"VUID-vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX-pLimits-parameter", "pLimits must be a valid pointer to a VkDeviceGeneratedCommandsLimitsNVX structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX-pLimits-parameter)"},
+    {"VUID-vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX-physicalDevice-parameter", "physicalDevice must be a valid VkPhysicalDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX-physicalDevice-parameter)"},
+    {"VUID-vkGetPhysicalDeviceImageFormatProperties-flags-parameter", "flags must be a valid combination of VkImageCreateFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceImageFormatProperties-flags-parameter)"},
+    {"VUID-vkGetPhysicalDeviceImageFormatProperties-format-parameter", "format must be a valid VkFormat value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceImageFormatProperties-format-parameter)"},
+    {"VUID-vkGetPhysicalDeviceImageFormatProperties-pImageFormatProperties-parameter", "pImageFormatProperties must be a valid pointer to a VkImageFormatProperties structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceImageFormatProperties-pImageFormatProperties-parameter)"},
+    {"VUID-vkGetPhysicalDeviceImageFormatProperties-physicalDevice-parameter", "physicalDevice must be a valid VkPhysicalDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceImageFormatProperties-physicalDevice-parameter)"},
+    {"VUID-vkGetPhysicalDeviceImageFormatProperties-tiling-02248", "tiling must not be VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT. (Use vkGetPhysicalDeviceImageFormatProperties2 instead). (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceImageFormatProperties-tiling-02248)"},
+    {"VUID-vkGetPhysicalDeviceImageFormatProperties-tiling-parameter", "tiling must be a valid VkImageTiling value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceImageFormatProperties-tiling-parameter)"},
+    {"VUID-vkGetPhysicalDeviceImageFormatProperties-type-parameter", "type must be a valid VkImageType value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceImageFormatProperties-type-parameter)"},
+    {"VUID-vkGetPhysicalDeviceImageFormatProperties-usage-parameter", "usage must be a valid combination of VkImageUsageFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceImageFormatProperties-usage-parameter)"},
+    {"VUID-vkGetPhysicalDeviceImageFormatProperties-usage-requiredbitmask", "usage must not be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceImageFormatProperties-usage-requiredbitmask)"},
+    {"VUID-vkGetPhysicalDeviceImageFormatProperties2-pImageFormatInfo-parameter", "pImageFormatInfo must be a valid pointer to a valid VkPhysicalDeviceImageFormatInfo2 structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceImageFormatProperties2-pImageFormatInfo-parameter)"},
+    {"VUID-vkGetPhysicalDeviceImageFormatProperties2-pImageFormatProperties-parameter", "pImageFormatProperties must be a valid pointer to a VkImageFormatProperties2 structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceImageFormatProperties2-pImageFormatProperties-parameter)"},
+    {"VUID-vkGetPhysicalDeviceImageFormatProperties2-pNext-01868", "If the pNext chain of pImageFormatProperties includes a VkAndroidHardwareBufferUsageANDROID structure, the pNext chain of pImageFormatInfo must include a VkPhysicalDeviceExternalImageFormatInfo structure with handleType set to VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceImageFormatProperties2-pNext-01868)"},
+    {"VUID-vkGetPhysicalDeviceImageFormatProperties2-physicalDevice-parameter", "physicalDevice must be a valid VkPhysicalDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceImageFormatProperties2-physicalDevice-parameter)"},
+    {"VUID-vkGetPhysicalDeviceMemoryProperties-pMemoryProperties-parameter", "pMemoryProperties must be a valid pointer to a VkPhysicalDeviceMemoryProperties structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceMemoryProperties-pMemoryProperties-parameter)"},
+    {"VUID-vkGetPhysicalDeviceMemoryProperties-physicalDevice-parameter", "physicalDevice must be a valid VkPhysicalDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceMemoryProperties-physicalDevice-parameter)"},
+    {"VUID-vkGetPhysicalDeviceMemoryProperties2-pMemoryProperties-parameter", "pMemoryProperties must be a valid pointer to a VkPhysicalDeviceMemoryProperties2 structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceMemoryProperties2-pMemoryProperties-parameter)"},
+    {"VUID-vkGetPhysicalDeviceMemoryProperties2-physicalDevice-parameter", "physicalDevice must be a valid VkPhysicalDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceMemoryProperties2-physicalDevice-parameter)"},
+    {"VUID-vkGetPhysicalDeviceMultisamplePropertiesEXT-pMultisampleProperties-parameter", "pMultisampleProperties must be a valid pointer to a VkMultisamplePropertiesEXT structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceMultisamplePropertiesEXT-pMultisampleProperties-parameter)"},
+    {"VUID-vkGetPhysicalDeviceMultisamplePropertiesEXT-physicalDevice-parameter", "physicalDevice must be a valid VkPhysicalDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceMultisamplePropertiesEXT-physicalDevice-parameter)"},
+    {"VUID-vkGetPhysicalDeviceMultisamplePropertiesEXT-samples-parameter", "samples must be a valid VkSampleCountFlagBits value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceMultisamplePropertiesEXT-samples-parameter)"},
+    {"VUID-vkGetPhysicalDevicePresentRectanglesKHR-commonparent", "Both of physicalDevice, and surface must have been created, allocated, or retrieved from the same VkInstance (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDevicePresentRectanglesKHR-commonparent)"},
+    {"VUID-vkGetPhysicalDevicePresentRectanglesKHR-pRectCount-parameter", "pRectCount must be a valid pointer to a uint32_t value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDevicePresentRectanglesKHR-pRectCount-parameter)"},
+    {"VUID-vkGetPhysicalDevicePresentRectanglesKHR-pRects-parameter", "If the value referenced by pRectCount is not 0, and pRects is not NULL, pRects must be a valid pointer to an array of pRectCount VkRect2D structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDevicePresentRectanglesKHR-pRects-parameter)"},
+    {"VUID-vkGetPhysicalDevicePresentRectanglesKHR-physicalDevice-parameter", "physicalDevice must be a valid VkPhysicalDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDevicePresentRectanglesKHR-physicalDevice-parameter)"},
+    {"VUID-vkGetPhysicalDevicePresentRectanglesKHR-surface-parameter", "surface must be a valid VkSurfaceKHR handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDevicePresentRectanglesKHR-surface-parameter)"},
+    {"VUID-vkGetPhysicalDeviceProperties-pProperties-parameter", "pProperties must be a valid pointer to a VkPhysicalDeviceProperties structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceProperties-pProperties-parameter)"},
+    {"VUID-vkGetPhysicalDeviceProperties-physicalDevice-parameter", "physicalDevice must be a valid VkPhysicalDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceProperties-physicalDevice-parameter)"},
+    {"VUID-vkGetPhysicalDeviceProperties2-pProperties-parameter", "pProperties must be a valid pointer to a VkPhysicalDeviceProperties2 structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceProperties2-pProperties-parameter)"},
+    {"VUID-vkGetPhysicalDeviceProperties2-physicalDevice-parameter", "physicalDevice must be a valid VkPhysicalDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceProperties2-physicalDevice-parameter)"},
+    {"VUID-vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR-pNumPasses-parameter", "pNumPasses must be a valid pointer to a uint32_t value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR-pNumPasses-parameter)"},
+    {"VUID-vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR-pPerformanceQueryCreateInfo-parameter", "pPerformanceQueryCreateInfo must be a valid pointer to a valid VkQueryPoolPerformanceCreateInfoKHR structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR-pPerformanceQueryCreateInfo-parameter)"},
+    {"VUID-vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR-physicalDevice-parameter", "physicalDevice must be a valid VkPhysicalDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR-physicalDevice-parameter)"},
+    {"VUID-vkGetPhysicalDeviceQueueFamilyProperties-pQueueFamilyProperties-parameter", "If the value referenced by pQueueFamilyPropertyCount is not 0, and pQueueFamilyProperties is not NULL, pQueueFamilyProperties must be a valid pointer to an array of pQueueFamilyPropertyCount VkQueueFamilyProperties structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceQueueFamilyProperties-pQueueFamilyProperties-parameter)"},
+    {"VUID-vkGetPhysicalDeviceQueueFamilyProperties-pQueueFamilyPropertyCount-parameter", "pQueueFamilyPropertyCount must be a valid pointer to a uint32_t value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceQueueFamilyProperties-pQueueFamilyPropertyCount-parameter)"},
+    {"VUID-vkGetPhysicalDeviceQueueFamilyProperties-physicalDevice-parameter", "physicalDevice must be a valid VkPhysicalDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceQueueFamilyProperties-physicalDevice-parameter)"},
+    {"VUID-vkGetPhysicalDeviceQueueFamilyProperties2-pQueueFamilyProperties-parameter", "If the value referenced by pQueueFamilyPropertyCount is not 0, and pQueueFamilyProperties is not NULL, pQueueFamilyProperties must be a valid pointer to an array of pQueueFamilyPropertyCount VkQueueFamilyProperties2 structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceQueueFamilyProperties2-pQueueFamilyProperties-parameter)"},
+    {"VUID-vkGetPhysicalDeviceQueueFamilyProperties2-pQueueFamilyPropertyCount-parameter", "pQueueFamilyPropertyCount must be a valid pointer to a uint32_t value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceQueueFamilyProperties2-pQueueFamilyPropertyCount-parameter)"},
+    {"VUID-vkGetPhysicalDeviceQueueFamilyProperties2-physicalDevice-parameter", "physicalDevice must be a valid VkPhysicalDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceQueueFamilyProperties2-physicalDevice-parameter)"},
+    {"VUID-vkGetPhysicalDeviceSparseImageFormatProperties-format-parameter", "format must be a valid VkFormat value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceSparseImageFormatProperties-format-parameter)"},
+    {"VUID-vkGetPhysicalDeviceSparseImageFormatProperties-pProperties-parameter", "If the value referenced by pPropertyCount is not 0, and pProperties is not NULL, pProperties must be a valid pointer to an array of pPropertyCount VkSparseImageFormatProperties structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceSparseImageFormatProperties-pProperties-parameter)"},
+    {"VUID-vkGetPhysicalDeviceSparseImageFormatProperties-pPropertyCount-parameter", "pPropertyCount must be a valid pointer to a uint32_t value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceSparseImageFormatProperties-pPropertyCount-parameter)"},
+    {"VUID-vkGetPhysicalDeviceSparseImageFormatProperties-physicalDevice-parameter", "physicalDevice must be a valid VkPhysicalDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceSparseImageFormatProperties-physicalDevice-parameter)"},
+    {"VUID-vkGetPhysicalDeviceSparseImageFormatProperties-samples-01094", "samples must be a bit value that is set in VkImageFormatProperties::sampleCounts returned by vkGetPhysicalDeviceImageFormatProperties with format, type, tiling, and usage equal to those in this command and flags equal to the value that is set in VkImageCreateInfo::flags when the image is created (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceSparseImageFormatProperties-samples-01094)"},
+    {"VUID-vkGetPhysicalDeviceSparseImageFormatProperties-samples-parameter", "samples must be a valid VkSampleCountFlagBits value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceSparseImageFormatProperties-samples-parameter)"},
+    {"VUID-vkGetPhysicalDeviceSparseImageFormatProperties-tiling-parameter", "tiling must be a valid VkImageTiling value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceSparseImageFormatProperties-tiling-parameter)"},
+    {"VUID-vkGetPhysicalDeviceSparseImageFormatProperties-type-parameter", "type must be a valid VkImageType value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceSparseImageFormatProperties-type-parameter)"},
+    {"VUID-vkGetPhysicalDeviceSparseImageFormatProperties-usage-parameter", "usage must be a valid combination of VkImageUsageFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceSparseImageFormatProperties-usage-parameter)"},
+    {"VUID-vkGetPhysicalDeviceSparseImageFormatProperties-usage-requiredbitmask", "usage must not be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceSparseImageFormatProperties-usage-requiredbitmask)"},
+    {"VUID-vkGetPhysicalDeviceSparseImageFormatProperties2-pFormatInfo-parameter", "pFormatInfo must be a valid pointer to a valid VkPhysicalDeviceSparseImageFormatInfo2 structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceSparseImageFormatProperties2-pFormatInfo-parameter)"},
+    {"VUID-vkGetPhysicalDeviceSparseImageFormatProperties2-pProperties-parameter", "If the value referenced by pPropertyCount is not 0, and pProperties is not NULL, pProperties must be a valid pointer to an array of pPropertyCount VkSparseImageFormatProperties2 structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceSparseImageFormatProperties2-pProperties-parameter)"},
+    {"VUID-vkGetPhysicalDeviceSparseImageFormatProperties2-pPropertyCount-parameter", "pPropertyCount must be a valid pointer to a uint32_t value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceSparseImageFormatProperties2-pPropertyCount-parameter)"},
+    {"VUID-vkGetPhysicalDeviceSparseImageFormatProperties2-physicalDevice-parameter", "physicalDevice must be a valid VkPhysicalDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceSparseImageFormatProperties2-physicalDevice-parameter)"},
+    {"VUID-vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV-pCombinationCount-parameter", "pCombinationCount must be a valid pointer to a uint32_t value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV-pCombinationCount-parameter)"},
+    {"VUID-vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV-pCombinations-parameter", "If the value referenced by pCombinationCount is not 0, and pCombinations is not NULL, pCombinations must be a valid pointer to an array of pCombinationCount VkFramebufferMixedSamplesCombinationNV structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV-pCombinations-parameter)"},
+    {"VUID-vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV-physicalDevice-parameter", "physicalDevice must be a valid VkPhysicalDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV-physicalDevice-parameter)"},
+    {"VUID-vkGetPhysicalDeviceSurfaceCapabilities2EXT-commonparent", "Both of physicalDevice, and surface must have been created, allocated, or retrieved from the same VkInstance (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceSurfaceCapabilities2EXT-commonparent)"},
+    {"VUID-vkGetPhysicalDeviceSurfaceCapabilities2EXT-pSurfaceCapabilities-parameter", "pSurfaceCapabilities must be a valid pointer to a VkSurfaceCapabilities2EXT structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceSurfaceCapabilities2EXT-pSurfaceCapabilities-parameter)"},
+    {"VUID-vkGetPhysicalDeviceSurfaceCapabilities2EXT-physicalDevice-parameter", "physicalDevice must be a valid VkPhysicalDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceSurfaceCapabilities2EXT-physicalDevice-parameter)"},
+    {"VUID-vkGetPhysicalDeviceSurfaceCapabilities2EXT-surface-parameter", "surface must be a valid VkSurfaceKHR handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceSurfaceCapabilities2EXT-surface-parameter)"},
+    {"VUID-vkGetPhysicalDeviceSurfaceCapabilities2KHR-pNext-02671", "If a VkSurfaceCapabilitiesFullScreenExclusiveEXT structure is included in the pNext chain of pSurfaceCapabilities, a VkSurfaceFullScreenExclusiveWin32InfoEXT structure must be included in the pNext chain of pSurfaceInfo. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceSurfaceCapabilities2KHR-pNext-02671)"},
+    {"VUID-vkGetPhysicalDeviceSurfaceCapabilities2KHR-pSurfaceCapabilities-parameter", "pSurfaceCapabilities must be a valid pointer to a VkSurfaceCapabilities2KHR structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceSurfaceCapabilities2KHR-pSurfaceCapabilities-parameter)"},
+    {"VUID-vkGetPhysicalDeviceSurfaceCapabilities2KHR-pSurfaceInfo-parameter", "pSurfaceInfo must be a valid pointer to a valid VkPhysicalDeviceSurfaceInfo2KHR structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceSurfaceCapabilities2KHR-pSurfaceInfo-parameter)"},
+    {"VUID-vkGetPhysicalDeviceSurfaceCapabilities2KHR-physicalDevice-parameter", "physicalDevice must be a valid VkPhysicalDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceSurfaceCapabilities2KHR-physicalDevice-parameter)"},
+    {"VUID-vkGetPhysicalDeviceSurfaceCapabilitiesKHR-commonparent", "Both of physicalDevice, and surface must have been created, allocated, or retrieved from the same VkInstance (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceSurfaceCapabilitiesKHR-commonparent)"},
+    {"VUID-vkGetPhysicalDeviceSurfaceCapabilitiesKHR-pSurfaceCapabilities-parameter", "pSurfaceCapabilities must be a valid pointer to a VkSurfaceCapabilitiesKHR structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceSurfaceCapabilitiesKHR-pSurfaceCapabilities-parameter)"},
+    {"VUID-vkGetPhysicalDeviceSurfaceCapabilitiesKHR-physicalDevice-parameter", "physicalDevice must be a valid VkPhysicalDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceSurfaceCapabilitiesKHR-physicalDevice-parameter)"},
+    {"VUID-vkGetPhysicalDeviceSurfaceCapabilitiesKHR-surface-parameter", "surface must be a valid VkSurfaceKHR handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceSurfaceCapabilitiesKHR-surface-parameter)"},
+    {"VUID-vkGetPhysicalDeviceSurfaceFormats2KHR-pSurfaceFormatCount-parameter", "pSurfaceFormatCount must be a valid pointer to a uint32_t value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceSurfaceFormats2KHR-pSurfaceFormatCount-parameter)"},
+    {"VUID-vkGetPhysicalDeviceSurfaceFormats2KHR-pSurfaceFormats-parameter", "If the value referenced by pSurfaceFormatCount is not 0, and pSurfaceFormats is not NULL, pSurfaceFormats must be a valid pointer to an array of pSurfaceFormatCount VkSurfaceFormat2KHR structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceSurfaceFormats2KHR-pSurfaceFormats-parameter)"},
+    {"VUID-vkGetPhysicalDeviceSurfaceFormats2KHR-pSurfaceInfo-02740", "pSurfaceInfo->surface must be supported by physicalDevice, as reported by vkGetPhysicalDeviceSurfaceSupportKHR or an equivalent platform-specific mechanism. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceSurfaceFormats2KHR-pSurfaceInfo-02740)"},
+    {"VUID-vkGetPhysicalDeviceSurfaceFormats2KHR-pSurfaceInfo-parameter", "pSurfaceInfo must be a valid pointer to a valid VkPhysicalDeviceSurfaceInfo2KHR structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceSurfaceFormats2KHR-pSurfaceInfo-parameter)"},
+    {"VUID-vkGetPhysicalDeviceSurfaceFormats2KHR-physicalDevice-parameter", "physicalDevice must be a valid VkPhysicalDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceSurfaceFormats2KHR-physicalDevice-parameter)"},
+    {"VUID-vkGetPhysicalDeviceSurfaceFormatsKHR-commonparent", "Both of physicalDevice, and surface must have been created, allocated, or retrieved from the same VkInstance (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceSurfaceFormatsKHR-commonparent)"},
+    {"VUID-vkGetPhysicalDeviceSurfaceFormatsKHR-pSurfaceFormatCount-parameter", "pSurfaceFormatCount must be a valid pointer to a uint32_t value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceSurfaceFormatsKHR-pSurfaceFormatCount-parameter)"},
+    {"VUID-vkGetPhysicalDeviceSurfaceFormatsKHR-pSurfaceFormats-parameter", "If the value referenced by pSurfaceFormatCount is not 0, and pSurfaceFormats is not NULL, pSurfaceFormats must be a valid pointer to an array of pSurfaceFormatCount VkSurfaceFormatKHR structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceSurfaceFormatsKHR-pSurfaceFormats-parameter)"},
+    {"VUID-vkGetPhysicalDeviceSurfaceFormatsKHR-physicalDevice-parameter", "physicalDevice must be a valid VkPhysicalDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceSurfaceFormatsKHR-physicalDevice-parameter)"},
+    {"VUID-vkGetPhysicalDeviceSurfaceFormatsKHR-surface-02739", "surface must be supported by physicalDevice, as reported by vkGetPhysicalDeviceSurfaceSupportKHR or an equivalent platform-specific mechanism. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceSurfaceFormatsKHR-surface-02739)"},
+    {"VUID-vkGetPhysicalDeviceSurfaceFormatsKHR-surface-parameter", "surface must be a valid VkSurfaceKHR handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceSurfaceFormatsKHR-surface-parameter)"},
+    {"VUID-vkGetPhysicalDeviceSurfacePresentModes2EXT-pPresentModeCount-parameter", "pPresentModeCount must be a valid pointer to a uint32_t value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceSurfacePresentModes2EXT-pPresentModeCount-parameter)"},
+    {"VUID-vkGetPhysicalDeviceSurfacePresentModes2EXT-pPresentModes-parameter", "If the value referenced by pPresentModeCount is not 0, and pPresentModes is not NULL, pPresentModes must be a valid pointer to an array of pPresentModeCount VkPresentModeKHR values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceSurfacePresentModes2EXT-pPresentModes-parameter)"},
+    {"VUID-vkGetPhysicalDeviceSurfacePresentModes2EXT-pSurfaceInfo-parameter", "pSurfaceInfo must be a valid pointer to a valid VkPhysicalDeviceSurfaceInfo2KHR structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceSurfacePresentModes2EXT-pSurfaceInfo-parameter)"},
+    {"VUID-vkGetPhysicalDeviceSurfacePresentModes2EXT-physicalDevice-parameter", "physicalDevice must be a valid VkPhysicalDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceSurfacePresentModes2EXT-physicalDevice-parameter)"},
+    {"VUID-vkGetPhysicalDeviceSurfacePresentModesKHR-commonparent", "Both of physicalDevice, and surface must have been created, allocated, or retrieved from the same VkInstance (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceSurfacePresentModesKHR-commonparent)"},
+    {"VUID-vkGetPhysicalDeviceSurfacePresentModesKHR-pPresentModeCount-parameter", "pPresentModeCount must be a valid pointer to a uint32_t value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceSurfacePresentModesKHR-pPresentModeCount-parameter)"},
+    {"VUID-vkGetPhysicalDeviceSurfacePresentModesKHR-pPresentModes-parameter", "If the value referenced by pPresentModeCount is not 0, and pPresentModes is not NULL, pPresentModes must be a valid pointer to an array of pPresentModeCount VkPresentModeKHR values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceSurfacePresentModesKHR-pPresentModes-parameter)"},
+    {"VUID-vkGetPhysicalDeviceSurfacePresentModesKHR-physicalDevice-parameter", "physicalDevice must be a valid VkPhysicalDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceSurfacePresentModesKHR-physicalDevice-parameter)"},
+    {"VUID-vkGetPhysicalDeviceSurfacePresentModesKHR-surface-parameter", "surface must be a valid VkSurfaceKHR handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceSurfacePresentModesKHR-surface-parameter)"},
+    {"VUID-vkGetPhysicalDeviceSurfaceSupportKHR-commonparent", "Both of physicalDevice, and surface must have been created, allocated, or retrieved from the same VkInstance (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceSurfaceSupportKHR-commonparent)"},
+    {"VUID-vkGetPhysicalDeviceSurfaceSupportKHR-pSupported-parameter", "pSupported must be a valid pointer to a VkBool32 value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceSurfaceSupportKHR-pSupported-parameter)"},
+    {"VUID-vkGetPhysicalDeviceSurfaceSupportKHR-physicalDevice-parameter", "physicalDevice must be a valid VkPhysicalDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceSurfaceSupportKHR-physicalDevice-parameter)"},
+    {"VUID-vkGetPhysicalDeviceSurfaceSupportKHR-queueFamilyIndex-01269", "queueFamilyIndex must be less than pQueueFamilyPropertyCount returned by vkGetPhysicalDeviceQueueFamilyProperties for the given physicalDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceSurfaceSupportKHR-queueFamilyIndex-01269)"},
+    {"VUID-vkGetPhysicalDeviceSurfaceSupportKHR-surface-parameter", "surface must be a valid VkSurfaceKHR handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceSurfaceSupportKHR-surface-parameter)"},
+    {"VUID-vkGetPhysicalDeviceToolPropertiesEXT-pToolCount-arraylength", "If pToolProperties is not NULL, the value referenced by pToolCount must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceToolPropertiesEXT-pToolCount-arraylength)"},
+    {"VUID-vkGetPhysicalDeviceToolPropertiesEXT-pToolCount-parameter", "pToolCount must be a valid pointer to a uint32_t value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceToolPropertiesEXT-pToolCount-parameter)"},
+    {"VUID-vkGetPhysicalDeviceToolPropertiesEXT-pToolProperties-parameter", "If pToolProperties is not NULL, pToolProperties must be a valid pointer to an array of pToolCount VkPhysicalDeviceToolPropertiesEXT structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceToolPropertiesEXT-pToolProperties-parameter)"},
+    {"VUID-vkGetPhysicalDeviceToolPropertiesEXT-physicalDevice-parameter", "physicalDevice must be a valid VkPhysicalDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceToolPropertiesEXT-physicalDevice-parameter)"},
+    {"VUID-vkGetPhysicalDeviceWaylandPresentationSupportKHR-display-parameter", "display must be a valid pointer to a wl_display value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceWaylandPresentationSupportKHR-display-parameter)"},
+    {"VUID-vkGetPhysicalDeviceWaylandPresentationSupportKHR-physicalDevice-parameter", "physicalDevice must be a valid VkPhysicalDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceWaylandPresentationSupportKHR-physicalDevice-parameter)"},
+    {"VUID-vkGetPhysicalDeviceWaylandPresentationSupportKHR-queueFamilyIndex-01306", "queueFamilyIndex must be less than pQueueFamilyPropertyCount returned by vkGetPhysicalDeviceQueueFamilyProperties for the given physicalDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceWaylandPresentationSupportKHR-queueFamilyIndex-01306)"},
+    {"VUID-vkGetPhysicalDeviceWin32PresentationSupportKHR-physicalDevice-parameter", "physicalDevice must be a valid VkPhysicalDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceWin32PresentationSupportKHR-physicalDevice-parameter)"},
+    {"VUID-vkGetPhysicalDeviceWin32PresentationSupportKHR-queueFamilyIndex-01309", "queueFamilyIndex must be less than pQueueFamilyPropertyCount returned by vkGetPhysicalDeviceQueueFamilyProperties for the given physicalDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceWin32PresentationSupportKHR-queueFamilyIndex-01309)"},
+    {"VUID-vkGetPhysicalDeviceXcbPresentationSupportKHR-connection-parameter", "connection must be a valid pointer to an xcb_connection_t value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceXcbPresentationSupportKHR-connection-parameter)"},
+    {"VUID-vkGetPhysicalDeviceXcbPresentationSupportKHR-physicalDevice-parameter", "physicalDevice must be a valid VkPhysicalDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceXcbPresentationSupportKHR-physicalDevice-parameter)"},
+    {"VUID-vkGetPhysicalDeviceXcbPresentationSupportKHR-queueFamilyIndex-01312", "queueFamilyIndex must be less than pQueueFamilyPropertyCount returned by vkGetPhysicalDeviceQueueFamilyProperties for the given physicalDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceXcbPresentationSupportKHR-queueFamilyIndex-01312)"},
+    {"VUID-vkGetPhysicalDeviceXlibPresentationSupportKHR-dpy-parameter", "dpy must be a valid pointer to a Display value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceXlibPresentationSupportKHR-dpy-parameter)"},
+    {"VUID-vkGetPhysicalDeviceXlibPresentationSupportKHR-physicalDevice-parameter", "physicalDevice must be a valid VkPhysicalDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceXlibPresentationSupportKHR-physicalDevice-parameter)"},
+    {"VUID-vkGetPhysicalDeviceXlibPresentationSupportKHR-queueFamilyIndex-01315", "queueFamilyIndex must be less than pQueueFamilyPropertyCount returned by vkGetPhysicalDeviceQueueFamilyProperties for the given physicalDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPhysicalDeviceXlibPresentationSupportKHR-queueFamilyIndex-01315)"},
+    {"VUID-vkGetPipelineCacheData-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPipelineCacheData-device-parameter)"},
+    {"VUID-vkGetPipelineCacheData-pData-parameter", "If the value referenced by pDataSize is not 0, and pData is not NULL, pData must be a valid pointer to an array of pDataSize bytes (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPipelineCacheData-pData-parameter)"},
+    {"VUID-vkGetPipelineCacheData-pDataSize-parameter", "pDataSize must be a valid pointer to a size_t value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPipelineCacheData-pDataSize-parameter)"},
+    {"VUID-vkGetPipelineCacheData-pipelineCache-parameter", "pipelineCache must be a valid VkPipelineCache handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPipelineCacheData-pipelineCache-parameter)"},
+    {"VUID-vkGetPipelineCacheData-pipelineCache-parent", "pipelineCache must have been created, allocated, or retrieved from device (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPipelineCacheData-pipelineCache-parent)"},
+    {"VUID-vkGetPipelineExecutableInternalRepresentationsKHR-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPipelineExecutableInternalRepresentationsKHR-device-parameter)"},
+    {"VUID-vkGetPipelineExecutableInternalRepresentationsKHR-pExecutableInfo-parameter", "pExecutableInfo must be a valid pointer to a valid VkPipelineExecutableInfoKHR structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPipelineExecutableInternalRepresentationsKHR-pExecutableInfo-parameter)"},
+    {"VUID-vkGetPipelineExecutableInternalRepresentationsKHR-pInternalRepresentationCount-parameter", "pInternalRepresentationCount must be a valid pointer to a uint32_t value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPipelineExecutableInternalRepresentationsKHR-pInternalRepresentationCount-parameter)"},
+    {"VUID-vkGetPipelineExecutableInternalRepresentationsKHR-pInternalRepresentations-parameter", "If the value referenced by pInternalRepresentationCount is not 0, and pInternalRepresentations is not NULL, pInternalRepresentations must be a valid pointer to an array of pInternalRepresentationCount VkPipelineExecutableInternalRepresentationKHR structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPipelineExecutableInternalRepresentationsKHR-pInternalRepresentations-parameter)"},
+    {"VUID-vkGetPipelineExecutableInternalRepresentationsKHR-pipeline-03277", "pipeline member of pExecutableInfo must have been created with device. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPipelineExecutableInternalRepresentationsKHR-pipeline-03277)"},
+    {"VUID-vkGetPipelineExecutableInternalRepresentationsKHR-pipeline-03278", "pipeline member of pExecutableInfo must have been created with VK_PIPELINE_CREATE_CAPTURE_INTERNAL_REPRESENTATIONS_BIT_KHR set in the flags field of VkGraphicsPipelineCreateInfo or VkComputePipelineCreateInfo. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPipelineExecutableInternalRepresentationsKHR-pipeline-03278)"},
+    {"VUID-vkGetPipelineExecutableInternalRepresentationsKHR-pipelineExecutableInfo-03276", "pipelineExecutableInfo must be enabled. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPipelineExecutableInternalRepresentationsKHR-pipelineExecutableInfo-03276)"},
+    {"VUID-vkGetPipelineExecutablePropertiesKHR-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPipelineExecutablePropertiesKHR-device-parameter)"},
+    {"VUID-vkGetPipelineExecutablePropertiesKHR-pExecutableCount-parameter", "pExecutableCount must be a valid pointer to a uint32_t value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPipelineExecutablePropertiesKHR-pExecutableCount-parameter)"},
+    {"VUID-vkGetPipelineExecutablePropertiesKHR-pPipelineInfo-parameter", "pPipelineInfo must be a valid pointer to a valid VkPipelineInfoKHR structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPipelineExecutablePropertiesKHR-pPipelineInfo-parameter)"},
+    {"VUID-vkGetPipelineExecutablePropertiesKHR-pProperties-parameter", "If the value referenced by pExecutableCount is not 0, and pProperties is not NULL, pProperties must be a valid pointer to an array of pExecutableCount VkPipelineExecutablePropertiesKHR structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPipelineExecutablePropertiesKHR-pProperties-parameter)"},
+    {"VUID-vkGetPipelineExecutablePropertiesKHR-pipeline-03271", "pipeline member of pPipelineInfo must have been created with device. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPipelineExecutablePropertiesKHR-pipeline-03271)"},
+    {"VUID-vkGetPipelineExecutablePropertiesKHR-pipelineExecutableInfo-03270", "pipelineExecutableInfo must be enabled. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPipelineExecutablePropertiesKHR-pipelineExecutableInfo-03270)"},
+    {"VUID-vkGetPipelineExecutableStatisticsKHR-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPipelineExecutableStatisticsKHR-device-parameter)"},
+    {"VUID-vkGetPipelineExecutableStatisticsKHR-pExecutableInfo-parameter", "pExecutableInfo must be a valid pointer to a valid VkPipelineExecutableInfoKHR structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPipelineExecutableStatisticsKHR-pExecutableInfo-parameter)"},
+    {"VUID-vkGetPipelineExecutableStatisticsKHR-pStatisticCount-parameter", "pStatisticCount must be a valid pointer to a uint32_t value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPipelineExecutableStatisticsKHR-pStatisticCount-parameter)"},
+    {"VUID-vkGetPipelineExecutableStatisticsKHR-pStatistics-parameter", "If the value referenced by pStatisticCount is not 0, and pStatistics is not NULL, pStatistics must be a valid pointer to an array of pStatisticCount VkPipelineExecutableStatisticKHR structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPipelineExecutableStatisticsKHR-pStatistics-parameter)"},
+    {"VUID-vkGetPipelineExecutableStatisticsKHR-pipeline-03273", "pipeline member of pExecutableInfo must have been created with device. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPipelineExecutableStatisticsKHR-pipeline-03273)"},
+    {"VUID-vkGetPipelineExecutableStatisticsKHR-pipeline-03274", "pipeline member of pExecutableInfo must have been created with VK_PIPELINE_CREATE_CAPTURE_STATISTICS_BIT_KHR set in the flags field of VkGraphicsPipelineCreateInfo or VkComputePipelineCreateInfo. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPipelineExecutableStatisticsKHR-pipeline-03274)"},
+    {"VUID-vkGetPipelineExecutableStatisticsKHR-pipelineExecutableInfo-03272", "pipelineExecutableInfo must be enabled. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetPipelineExecutableStatisticsKHR-pipelineExecutableInfo-03272)"},
+    {"VUID-vkGetQueryPoolResults-dataSize-00817", "dataSize must be large enough to contain the result of each query, as described here (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetQueryPoolResults-dataSize-00817)"},
+    {"VUID-vkGetQueryPoolResults-dataSize-arraylength", "dataSize must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetQueryPoolResults-dataSize-arraylength)"},
+    {"VUID-vkGetQueryPoolResults-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetQueryPoolResults-device-parameter)"},
+    {"VUID-vkGetQueryPoolResults-firstQuery-00813", "firstQuery must be less than the number of queries in queryPool (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetQueryPoolResults-firstQuery-00813)"},
+    {"VUID-vkGetQueryPoolResults-firstQuery-00816", "The sum of firstQuery and queryCount must be less than or equal to the number of queries in queryPool (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetQueryPoolResults-firstQuery-00816)"},
+    {"VUID-vkGetQueryPoolResults-flags-00815", "If VK_QUERY_RESULT_64_BIT is set in flags then pData and stride must be multiples of 8 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetQueryPoolResults-flags-00815)"},
+    {"VUID-vkGetQueryPoolResults-flags-02827", "If VK_QUERY_RESULT_64_BIT is not set in flags, then pData and stride must be multiples of 4 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetQueryPoolResults-flags-02827)"},
+    {"VUID-vkGetQueryPoolResults-flags-02828", "If VK_QUERY_RESULT_64_BIT is not set in flags and the queryType used to create queryPool was not VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR, then pData and stride must be multiples of 4 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetQueryPoolResults-flags-02828)"},
+    {"VUID-vkGetQueryPoolResults-flags-parameter", "flags must be a valid combination of VkQueryResultFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetQueryPoolResults-flags-parameter)"},
+    {"VUID-vkGetQueryPoolResults-pData-parameter", "pData must be a valid pointer to an array of dataSize bytes (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetQueryPoolResults-pData-parameter)"},
+    {"VUID-vkGetQueryPoolResults-queryPool-parameter", "queryPool must be a valid VkQueryPool handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetQueryPoolResults-queryPool-parameter)"},
+    {"VUID-vkGetQueryPoolResults-queryPool-parent", "queryPool must have been created, allocated, or retrieved from device (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetQueryPoolResults-queryPool-parent)"},
+    {"VUID-vkGetQueryPoolResults-queryType-00818", "If the queryType used to create queryPool was VK_QUERY_TYPE_TIMESTAMP, flags must not contain VK_QUERY_RESULT_PARTIAL_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetQueryPoolResults-queryType-00818)"},
+    {"VUID-vkGetQueryPoolResults-queryType-03229", "If the queryType used to create queryPool was VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR, then pData and stride must be multiples of the size of VkPerformanceCounterResultKHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetQueryPoolResults-queryType-03229)"},
+    {"VUID-vkGetQueryPoolResults-queryType-03230", "If the queryType used to create queryPool was VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR, flags must not contain VK_QUERY_RESULT_WITH_AVAILABILITY_BIT, VK_QUERY_RESULT_PARTIAL_BIT or VK_QUERY_RESULT_64_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetQueryPoolResults-queryType-03230)"},
+    {"VUID-vkGetQueryPoolResults-queryType-03231", "If the queryType used to create queryPool was VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR, the queryPool must have been recorded once for each pass as retrieved via a call to vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetQueryPoolResults-queryType-03231)"},
+    {"VUID-vkGetQueueCheckpointDataNV-pCheckpointData-parameter", "If the value referenced by pCheckpointDataCount is not 0, and pCheckpointData is not NULL, pCheckpointData must be a valid pointer to an array of pCheckpointDataCount VkCheckpointDataNV structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetQueueCheckpointDataNV-pCheckpointData-parameter)"},
+    {"VUID-vkGetQueueCheckpointDataNV-pCheckpointDataCount-parameter", "pCheckpointDataCount must be a valid pointer to a uint32_t value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetQueueCheckpointDataNV-pCheckpointDataCount-parameter)"},
+    {"VUID-vkGetQueueCheckpointDataNV-queue-02025", "The device that queue belongs to must be in the lost state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetQueueCheckpointDataNV-queue-02025)"},
+    {"VUID-vkGetQueueCheckpointDataNV-queue-parameter", "queue must be a valid VkQueue handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetQueueCheckpointDataNV-queue-parameter)"},
+    {"VUID-vkGetRandROutputDisplayEXT-dpy-parameter", "dpy must be a valid pointer to a Display value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetRandROutputDisplayEXT-dpy-parameter)"},
+    {"VUID-vkGetRandROutputDisplayEXT-pDisplay-parameter", "pDisplay must be a valid pointer to a VkDisplayKHR handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetRandROutputDisplayEXT-pDisplay-parameter)"},
+    {"VUID-vkGetRandROutputDisplayEXT-physicalDevice-parameter", "physicalDevice must be a valid VkPhysicalDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetRandROutputDisplayEXT-physicalDevice-parameter)"},
+    {"VUID-vkGetRayTracingShaderGroupHandlesNV-dataSize-02420", "dataSize must be at least VkPhysicalDeviceRayTracingPropertiesNV::shaderGroupHandleSize {times} groupCount (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetRayTracingShaderGroupHandlesNV-dataSize-02420)"},
+    {"VUID-vkGetRayTracingShaderGroupHandlesNV-dataSize-arraylength", "dataSize must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetRayTracingShaderGroupHandlesNV-dataSize-arraylength)"},
+    {"VUID-vkGetRayTracingShaderGroupHandlesNV-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetRayTracingShaderGroupHandlesNV-device-parameter)"},
+    {"VUID-vkGetRayTracingShaderGroupHandlesNV-firstGroup-02419", "The sum of firstGroup and groupCount must be less than the number of shader groups in pipeline. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetRayTracingShaderGroupHandlesNV-firstGroup-02419)"},
+    {"VUID-vkGetRayTracingShaderGroupHandlesNV-pData-parameter", "pData must be a valid pointer to an array of dataSize bytes (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetRayTracingShaderGroupHandlesNV-pData-parameter)"},
+    {"VUID-vkGetRayTracingShaderGroupHandlesNV-pipeline-parameter", "pipeline must be a valid VkPipeline handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetRayTracingShaderGroupHandlesNV-pipeline-parameter)"},
+    {"VUID-vkGetRayTracingShaderGroupHandlesNV-pipeline-parent", "pipeline must have been created, allocated, or retrieved from device (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetRayTracingShaderGroupHandlesNV-pipeline-parent)"},
+    {"VUID-vkGetRefreshCycleDurationGOOGLE-commonparent", "Both of device, and swapchain must have been created, allocated, or retrieved from the same VkInstance (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetRefreshCycleDurationGOOGLE-commonparent)"},
+    {"VUID-vkGetRefreshCycleDurationGOOGLE-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetRefreshCycleDurationGOOGLE-device-parameter)"},
+    {"VUID-vkGetRefreshCycleDurationGOOGLE-pDisplayTimingProperties-parameter", "pDisplayTimingProperties must be a valid pointer to a VkRefreshCycleDurationGOOGLE structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetRefreshCycleDurationGOOGLE-pDisplayTimingProperties-parameter)"},
+    {"VUID-vkGetRefreshCycleDurationGOOGLE-swapchain-parameter", "swapchain must be a valid VkSwapchainKHR handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetRefreshCycleDurationGOOGLE-swapchain-parameter)"},
+    {"VUID-vkGetRenderAreaGranularity-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetRenderAreaGranularity-device-parameter)"},
+    {"VUID-vkGetRenderAreaGranularity-pGranularity-parameter", "pGranularity must be a valid pointer to a VkExtent2D structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetRenderAreaGranularity-pGranularity-parameter)"},
+    {"VUID-vkGetRenderAreaGranularity-renderPass-parameter", "renderPass must be a valid VkRenderPass handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetRenderAreaGranularity-renderPass-parameter)"},
+    {"VUID-vkGetRenderAreaGranularity-renderPass-parent", "renderPass must have been created, allocated, or retrieved from device (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetRenderAreaGranularity-renderPass-parent)"},
+    {"VUID-vkGetSemaphoreCounterValueKHR-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetSemaphoreCounterValueKHR-device-parameter)"},
+    {"VUID-vkGetSemaphoreCounterValueKHR-pValue-parameter", "pValue must be a valid pointer to a uint64_t value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetSemaphoreCounterValueKHR-pValue-parameter)"},
+    {"VUID-vkGetSemaphoreCounterValueKHR-semaphore-03255", "semaphore must have been created with a VkSemaphoreTypeKHR of VK_SEMAPHORE_TYPE_TIMELINE_KHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetSemaphoreCounterValueKHR-semaphore-03255)"},
+    {"VUID-vkGetSemaphoreCounterValueKHR-semaphore-parameter", "semaphore must be a valid VkSemaphore handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetSemaphoreCounterValueKHR-semaphore-parameter)"},
+    {"VUID-vkGetSemaphoreCounterValueKHR-semaphore-parent", "semaphore must have been created, allocated, or retrieved from device (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetSemaphoreCounterValueKHR-semaphore-parent)"},
+    {"VUID-vkGetSemaphoreFdKHR-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetSemaphoreFdKHR-device-parameter)"},
+    {"VUID-vkGetSemaphoreFdKHR-pFd-parameter", "pFd must be a valid pointer to an int value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetSemaphoreFdKHR-pFd-parameter)"},
+    {"VUID-vkGetSemaphoreFdKHR-pGetFdInfo-parameter", "pGetFdInfo must be a valid pointer to a valid VkSemaphoreGetFdInfoKHR structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetSemaphoreFdKHR-pGetFdInfo-parameter)"},
+    {"VUID-vkGetSemaphoreWin32HandleKHR-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetSemaphoreWin32HandleKHR-device-parameter)"},
+    {"VUID-vkGetSemaphoreWin32HandleKHR-pGetWin32HandleInfo-parameter", "pGetWin32HandleInfo must be a valid pointer to a valid VkSemaphoreGetWin32HandleInfoKHR structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetSemaphoreWin32HandleKHR-pGetWin32HandleInfo-parameter)"},
+    {"VUID-vkGetSemaphoreWin32HandleKHR-pHandle-parameter", "pHandle must be a valid pointer to a HANDLE value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetSemaphoreWin32HandleKHR-pHandle-parameter)"},
+    {"VUID-vkGetShaderInfoAMD-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetShaderInfoAMD-device-parameter)"},
+    {"VUID-vkGetShaderInfoAMD-infoType-parameter", "infoType must be a valid VkShaderInfoTypeAMD value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetShaderInfoAMD-infoType-parameter)"},
+    {"VUID-vkGetShaderInfoAMD-pInfo-parameter", "If the value referenced by pInfoSize is not 0, and pInfo is not NULL, pInfo must be a valid pointer to an array of pInfoSize bytes (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetShaderInfoAMD-pInfo-parameter)"},
+    {"VUID-vkGetShaderInfoAMD-pInfoSize-parameter", "pInfoSize must be a valid pointer to a size_t value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetShaderInfoAMD-pInfoSize-parameter)"},
+    {"VUID-vkGetShaderInfoAMD-pipeline-parameter", "pipeline must be a valid VkPipeline handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetShaderInfoAMD-pipeline-parameter)"},
+    {"VUID-vkGetShaderInfoAMD-pipeline-parent", "pipeline must have been created, allocated, or retrieved from device (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetShaderInfoAMD-pipeline-parent)"},
+    {"VUID-vkGetShaderInfoAMD-shaderStage-parameter", "shaderStage must be a valid VkShaderStageFlagBits value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetShaderInfoAMD-shaderStage-parameter)"},
+    {"VUID-vkGetSwapchainCounterEXT-commonparent", "Both of device, and swapchain must have been created, allocated, or retrieved from the same VkInstance (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetSwapchainCounterEXT-commonparent)"},
+    {"VUID-vkGetSwapchainCounterEXT-counter-parameter", "counter must be a valid VkSurfaceCounterFlagBitsEXT value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetSwapchainCounterEXT-counter-parameter)"},
+    {"VUID-vkGetSwapchainCounterEXT-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetSwapchainCounterEXT-device-parameter)"},
+    {"VUID-vkGetSwapchainCounterEXT-pCounterValue-parameter", "pCounterValue must be a valid pointer to a uint64_t value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetSwapchainCounterEXT-pCounterValue-parameter)"},
+    {"VUID-vkGetSwapchainCounterEXT-swapchain-01245", "One or more present commands on swapchain must have been processed by the presentation engine. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetSwapchainCounterEXT-swapchain-01245)"},
+    {"VUID-vkGetSwapchainCounterEXT-swapchain-parameter", "swapchain must be a valid VkSwapchainKHR handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetSwapchainCounterEXT-swapchain-parameter)"},
+    {"VUID-vkGetSwapchainImagesKHR-commonparent", "Both of device, and swapchain must have been created, allocated, or retrieved from the same VkInstance (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetSwapchainImagesKHR-commonparent)"},
+    {"VUID-vkGetSwapchainImagesKHR-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetSwapchainImagesKHR-device-parameter)"},
+    {"VUID-vkGetSwapchainImagesKHR-pSwapchainImageCount-parameter", "pSwapchainImageCount must be a valid pointer to a uint32_t value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetSwapchainImagesKHR-pSwapchainImageCount-parameter)"},
+    {"VUID-vkGetSwapchainImagesKHR-pSwapchainImages-parameter", "If the value referenced by pSwapchainImageCount is not 0, and pSwapchainImages is not NULL, pSwapchainImages must be a valid pointer to an array of pSwapchainImageCount VkImage handles (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetSwapchainImagesKHR-pSwapchainImages-parameter)"},
+    {"VUID-vkGetSwapchainImagesKHR-swapchain-parameter", "swapchain must be a valid VkSwapchainKHR handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetSwapchainImagesKHR-swapchain-parameter)"},
+    {"VUID-vkGetSwapchainStatusKHR-commonparent", "Both of device, and swapchain must have been created, allocated, or retrieved from the same VkInstance (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetSwapchainStatusKHR-commonparent)"},
+    {"VUID-vkGetSwapchainStatusKHR-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetSwapchainStatusKHR-device-parameter)"},
+    {"VUID-vkGetSwapchainStatusKHR-swapchain-parameter", "swapchain must be a valid VkSwapchainKHR handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetSwapchainStatusKHR-swapchain-parameter)"},
+    {"VUID-vkGetValidationCacheDataEXT-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetValidationCacheDataEXT-device-parameter)"},
+    {"VUID-vkGetValidationCacheDataEXT-pData-parameter", "If the value referenced by pDataSize is not 0, and pData is not NULL, pData must be a valid pointer to an array of pDataSize bytes (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetValidationCacheDataEXT-pData-parameter)"},
+    {"VUID-vkGetValidationCacheDataEXT-pDataSize-parameter", "pDataSize must be a valid pointer to a size_t value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetValidationCacheDataEXT-pDataSize-parameter)"},
+    {"VUID-vkGetValidationCacheDataEXT-validationCache-parameter", "validationCache must be a valid VkValidationCacheEXT handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetValidationCacheDataEXT-validationCache-parameter)"},
+    {"VUID-vkGetValidationCacheDataEXT-validationCache-parent", "validationCache must have been created, allocated, or retrieved from device (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkGetValidationCacheDataEXT-validationCache-parent)"},
+    {"VUID-vkImportFenceFdKHR-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkImportFenceFdKHR-device-parameter)"},
+    {"VUID-vkImportFenceFdKHR-fence-01463", "fence must not be associated with any queue command that has not yet completed execution on that queue (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkImportFenceFdKHR-fence-01463)"},
+    {"VUID-vkImportFenceFdKHR-pImportFenceFdInfo-parameter", "pImportFenceFdInfo must be a valid pointer to a valid VkImportFenceFdInfoKHR structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkImportFenceFdKHR-pImportFenceFdInfo-parameter)"},
+    {"VUID-vkImportFenceWin32HandleKHR-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkImportFenceWin32HandleKHR-device-parameter)"},
+    {"VUID-vkImportFenceWin32HandleKHR-pImportFenceWin32HandleInfo-parameter", "pImportFenceWin32HandleInfo must be a valid pointer to a valid VkImportFenceWin32HandleInfoKHR structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkImportFenceWin32HandleKHR-pImportFenceWin32HandleInfo-parameter)"},
+    {"VUID-vkImportSemaphoreFdKHR-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkImportSemaphoreFdKHR-device-parameter)"},
+    {"VUID-vkImportSemaphoreFdKHR-pImportSemaphoreFdInfo-parameter", "pImportSemaphoreFdInfo must be a valid pointer to a valid VkImportSemaphoreFdInfoKHR structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkImportSemaphoreFdKHR-pImportSemaphoreFdInfo-parameter)"},
+    {"VUID-vkImportSemaphoreFdKHR-semaphore-01142", "semaphore must not be associated with any queue command that has not yet completed execution on that queue (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkImportSemaphoreFdKHR-semaphore-01142)"},
+    {"VUID-vkImportSemaphoreWin32HandleKHR-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkImportSemaphoreWin32HandleKHR-device-parameter)"},
+    {"VUID-vkImportSemaphoreWin32HandleKHR-pImportSemaphoreWin32HandleInfo-parameter", "pImportSemaphoreWin32HandleInfo must be a valid pointer to a valid VkImportSemaphoreWin32HandleInfoKHR structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkImportSemaphoreWin32HandleKHR-pImportSemaphoreWin32HandleInfo-parameter)"},
+    {"VUID-vkInitializePerformanceApiINTEL-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkInitializePerformanceApiINTEL-device-parameter)"},
+    {"VUID-vkInitializePerformanceApiINTEL-pInitializeInfo-parameter", "pInitializeInfo must be a valid pointer to a valid VkInitializePerformanceApiInfoINTEL structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkInitializePerformanceApiINTEL-pInitializeInfo-parameter)"},
+    {"VUID-vkInvalidateMappedMemoryRanges-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkInvalidateMappedMemoryRanges-device-parameter)"},
+    {"VUID-vkInvalidateMappedMemoryRanges-memoryRangeCount-arraylength", "memoryRangeCount must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkInvalidateMappedMemoryRanges-memoryRangeCount-arraylength)"},
+    {"VUID-vkInvalidateMappedMemoryRanges-pMemoryRanges-parameter", "pMemoryRanges must be a valid pointer to an array of memoryRangeCount valid VkMappedMemoryRange structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkInvalidateMappedMemoryRanges-pMemoryRanges-parameter)"},
+    {"VUID-vkMapMemory-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkMapMemory-device-parameter)"},
+    {"VUID-vkMapMemory-flags-zerobitmask", "flags must be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkMapMemory-flags-zerobitmask)"},
+    {"VUID-vkMapMemory-memory-00678", "memory must not be currently host mapped (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkMapMemory-memory-00678)"},
+    {"VUID-vkMapMemory-memory-00682", "memory must have been created with a memory type that reports VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkMapMemory-memory-00682)"},
+    {"VUID-vkMapMemory-memory-00683", "memory must not have been allocated with multiple instances. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkMapMemory-memory-00683)"},
+    {"VUID-vkMapMemory-memory-parameter", "memory must be a valid VkDeviceMemory handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkMapMemory-memory-parameter)"},
+    {"VUID-vkMapMemory-memory-parent", "memory must have been created, allocated, or retrieved from device (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkMapMemory-memory-parent)"},
+    {"VUID-vkMapMemory-offset-00679", "offset must be less than the size of memory (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkMapMemory-offset-00679)"},
+    {"VUID-vkMapMemory-ppData-parameter", "ppData must be a valid pointer to a pointer value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkMapMemory-ppData-parameter)"},
+    {"VUID-vkMapMemory-size-00680", "If size is not equal to VK_WHOLE_SIZE, size must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkMapMemory-size-00680)"},
+    {"VUID-vkMapMemory-size-00681", "If size is not equal to VK_WHOLE_SIZE, size must be less than or equal to the size of the memory minus offset (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkMapMemory-size-00681)"},
+    {"VUID-vkMergePipelineCaches-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkMergePipelineCaches-device-parameter)"},
+    {"VUID-vkMergePipelineCaches-dstCache-00770", "dstCache must not appear in the list of source caches (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkMergePipelineCaches-dstCache-00770)"},
+    {"VUID-vkMergePipelineCaches-dstCache-parameter", "dstCache must be a valid VkPipelineCache handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkMergePipelineCaches-dstCache-parameter)"},
+    {"VUID-vkMergePipelineCaches-dstCache-parent", "dstCache must have been created, allocated, or retrieved from device (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkMergePipelineCaches-dstCache-parent)"},
+    {"VUID-vkMergePipelineCaches-pSrcCaches-parameter", "pSrcCaches must be a valid pointer to an array of srcCacheCount valid VkPipelineCache handles (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkMergePipelineCaches-pSrcCaches-parameter)"},
+    {"VUID-vkMergePipelineCaches-pSrcCaches-parent", "Each element of pSrcCaches must have been created, allocated, or retrieved from device (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkMergePipelineCaches-pSrcCaches-parent)"},
+    {"VUID-vkMergePipelineCaches-srcCacheCount-arraylength", "srcCacheCount must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkMergePipelineCaches-srcCacheCount-arraylength)"},
+    {"VUID-vkMergeValidationCachesEXT-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkMergeValidationCachesEXT-device-parameter)"},
+    {"VUID-vkMergeValidationCachesEXT-dstCache-01536", "dstCache must not appear in the list of source caches (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkMergeValidationCachesEXT-dstCache-01536)"},
+    {"VUID-vkMergeValidationCachesEXT-dstCache-parameter", "dstCache must be a valid VkValidationCacheEXT handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkMergeValidationCachesEXT-dstCache-parameter)"},
+    {"VUID-vkMergeValidationCachesEXT-dstCache-parent", "dstCache must have been created, allocated, or retrieved from device (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkMergeValidationCachesEXT-dstCache-parent)"},
+    {"VUID-vkMergeValidationCachesEXT-pSrcCaches-parameter", "pSrcCaches must be a valid pointer to an array of srcCacheCount valid VkValidationCacheEXT handles (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkMergeValidationCachesEXT-pSrcCaches-parameter)"},
+    {"VUID-vkMergeValidationCachesEXT-pSrcCaches-parent", "Each element of pSrcCaches must have been created, allocated, or retrieved from device (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkMergeValidationCachesEXT-pSrcCaches-parent)"},
+    {"VUID-vkMergeValidationCachesEXT-srcCacheCount-arraylength", "srcCacheCount must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkMergeValidationCachesEXT-srcCacheCount-arraylength)"},
+    {"VUID-vkQueueBeginDebugUtilsLabelEXT-pLabelInfo-parameter", "pLabelInfo must be a valid pointer to a valid VkDebugUtilsLabelEXT structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkQueueBeginDebugUtilsLabelEXT-pLabelInfo-parameter)"},
+    {"VUID-vkQueueBeginDebugUtilsLabelEXT-queue-parameter", "queue must be a valid VkQueue handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkQueueBeginDebugUtilsLabelEXT-queue-parameter)"},
+    {"VUID-vkQueueBindSparse-commonparent", "Both of fence, and queue that are valid handles of non-ignored parameters must have been created, allocated, or retrieved from the same VkDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkQueueBindSparse-commonparent)"},
+    {"VUID-vkQueueBindSparse-fence-01113", "If fence is not VK_NULL_HANDLE, fence must be unsignaled (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkQueueBindSparse-fence-01113)"},
+    {"VUID-vkQueueBindSparse-fence-01114", "If fence is not VK_NULL_HANDLE, fence must not be associated with any other queue command that has not yet completed execution on that queue (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkQueueBindSparse-fence-01114)"},
+    {"VUID-vkQueueBindSparse-fence-parameter", "If fence is not VK_NULL_HANDLE, fence must be a valid VkFence handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkQueueBindSparse-fence-parameter)"},
+    {"VUID-vkQueueBindSparse-pBindInfo-parameter", "If bindInfoCount is not 0, pBindInfo must be a valid pointer to an array of bindInfoCount valid VkBindSparseInfo structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkQueueBindSparse-pBindInfo-parameter)"},
+    {"VUID-vkQueueBindSparse-pSignalSemaphores-01115", "Each element of the pSignalSemaphores member of each element of pBindInfo must be unsignaled when the semaphore signal operation it defines is executed on the device (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkQueueBindSparse-pSignalSemaphores-01115)"},
+    {"VUID-vkQueueBindSparse-pWaitSemaphores-01116", "When a semaphore wait operation referring to a binary semaphore defined by any element of the pWaitSemaphores member of any element of pBindInfo executes on queue, there must be no other queues waiting on the same semaphore. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkQueueBindSparse-pWaitSemaphores-01116)"},
+    {"VUID-vkQueueBindSparse-pWaitSemaphores-01117", "All elements of the pWaitSemaphores member of all elements of pBindInfo member referring to a binary semaphore must be semaphores that are signaled, or have semaphore signal operations previously submitted for execution. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkQueueBindSparse-pWaitSemaphores-01117)"},
+    {"VUID-vkQueueBindSparse-pWaitSemaphores-03245", "All elements of the pWaitSemaphores member of all elements of pBindInfo created with a VkSemaphoreTypeKHR of VK_SEMAPHORE_TYPE_BINARY_KHR must reference a semaphore signal operation that has been submitted for execution and any semaphore signal operations on which it depends (if any) must have also been submitted for execution. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkQueueBindSparse-pWaitSemaphores-03245)"},
+    {"VUID-vkQueueBindSparse-queue-parameter", "queue must be a valid VkQueue handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkQueueBindSparse-queue-parameter)"},
+    {"VUID-vkQueueBindSparse-queuetype", "The queue must support sparse binding operations (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkQueueBindSparse-queuetype)"},
+    {"VUID-vkQueueEndDebugUtilsLabelEXT-None-01911", "There must be an outstanding vkQueueBeginDebugUtilsLabelEXT command prior to the vkQueueEndDebugUtilsLabelEXT on the queue (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkQueueEndDebugUtilsLabelEXT-None-01911)"},
+    {"VUID-vkQueueEndDebugUtilsLabelEXT-queue-parameter", "queue must be a valid VkQueue handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkQueueEndDebugUtilsLabelEXT-queue-parameter)"},
+    {"VUID-vkQueueInsertDebugUtilsLabelEXT-pLabelInfo-parameter", "pLabelInfo must be a valid pointer to a valid VkDebugUtilsLabelEXT structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkQueueInsertDebugUtilsLabelEXT-pLabelInfo-parameter)"},
+    {"VUID-vkQueueInsertDebugUtilsLabelEXT-queue-parameter", "queue must be a valid VkQueue handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkQueueInsertDebugUtilsLabelEXT-queue-parameter)"},
+    {"VUID-vkQueuePresentKHR-pPresentInfo-parameter", "pPresentInfo must be a valid pointer to a valid VkPresentInfoKHR structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkQueuePresentKHR-pPresentInfo-parameter)"},
+    {"VUID-vkQueuePresentKHR-pSwapchains-01292", "Each element of pSwapchains member of pPresentInfo must be a swapchain that is created for a surface for which presentation is supported from queue as determined using a call to vkGetPhysicalDeviceSurfaceSupportKHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkQueuePresentKHR-pSwapchains-01292)"},
+    {"VUID-vkQueuePresentKHR-pSwapchains-01293", "If more than one member of pSwapchains was created from a display surface, all display surfaces referenced that refer to the same display must use the same display mode (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkQueuePresentKHR-pSwapchains-01293)"},
+    {"VUID-vkQueuePresentKHR-pWaitSemaphores-01294", "When a semaphore wait operation referring to a binary semaphore defined by the elements of the pWaitSemaphores member of pPresentInfo executes on queue, there must be no other queues waiting on the same semaphore. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkQueuePresentKHR-pWaitSemaphores-01294)"},
+    {"VUID-vkQueuePresentKHR-pWaitSemaphores-01295", "All elements of the pWaitSemaphores member of pPresentInfo must be semaphores that are signaled, or have semaphore signal operations previously submitted for execution. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkQueuePresentKHR-pWaitSemaphores-01295)"},
+    {"VUID-vkQueuePresentKHR-pWaitSemaphores-03267", "All elements of the pWaitSemaphores member of pPresentInfo must be created with a VkSemaphoreTypeKHR of VK_SEMAPHORE_TYPE_BINARY_KHR. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkQueuePresentKHR-pWaitSemaphores-03267)"},
+    {"VUID-vkQueuePresentKHR-pWaitSemaphores-03268", "All elements of the pWaitSemaphores member of pPresentInfo must reference a semaphore signal operation that has been submitted for execution and any semaphore signal operations on which it depends (if any) must have also been submitted for execution. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkQueuePresentKHR-pWaitSemaphores-03268)"},
+    {"VUID-vkQueuePresentKHR-queue-parameter", "queue must be a valid VkQueue handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkQueuePresentKHR-queue-parameter)"},
+    {"VUID-vkQueueSetPerformanceConfigurationINTEL-commonparent", "Both of configuration, and queue must have been created, allocated, or retrieved from the same VkDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkQueueSetPerformanceConfigurationINTEL-commonparent)"},
+    {"VUID-vkQueueSetPerformanceConfigurationINTEL-configuration-parameter", "configuration must be a valid VkPerformanceConfigurationINTEL handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkQueueSetPerformanceConfigurationINTEL-configuration-parameter)"},
+    {"VUID-vkQueueSetPerformanceConfigurationINTEL-queue-parameter", "queue must be a valid VkQueue handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkQueueSetPerformanceConfigurationINTEL-queue-parameter)"},
+    {"VUID-vkQueueSubmit-commonparent", "Both of fence, and queue that are valid handles of non-ignored parameters must have been created, allocated, or retrieved from the same VkDevice (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkQueueSubmit-commonparent)"},
+    {"VUID-vkQueueSubmit-fence-00063", "If fence is not VK_NULL_HANDLE, fence must be unsignaled (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkQueueSubmit-fence-00063)"},
+    {"VUID-vkQueueSubmit-fence-00064", "If fence is not VK_NULL_HANDLE, fence must not be associated with any other queue command that has not yet completed execution on that queue (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkQueueSubmit-fence-00064)"},
+    {"VUID-vkQueueSubmit-fence-parameter", "If fence is not VK_NULL_HANDLE, fence must be a valid VkFence handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkQueueSubmit-fence-parameter)"},
+    {"VUID-vkQueueSubmit-pCommandBuffers-00065", "Any calls to vkCmdSetEvent, vkCmdResetEvent or vkCmdWaitEvents that have been recorded into any of the command buffer elements of the pCommandBuffers member of any element of pSubmits, must not reference any VkEvent that is referenced by any of those commands in a command buffer that has been submitted to another queue and is still in the pending state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkQueueSubmit-pCommandBuffers-00065)"},
+    {"VUID-vkQueueSubmit-pCommandBuffers-00070", "Each element of the pCommandBuffers member of each element of pSubmits must be in the pending or executable state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkQueueSubmit-pCommandBuffers-00070)"},
+    {"VUID-vkQueueSubmit-pCommandBuffers-00071", "If any element of the pCommandBuffers member of any element of pSubmits was not recorded with the VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT, it must not be in the pending state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkQueueSubmit-pCommandBuffers-00071)"},
+    {"VUID-vkQueueSubmit-pCommandBuffers-00072", "Any secondary command buffers recorded into any element of the pCommandBuffers member of any element of pSubmits must be in the pending or executable state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkQueueSubmit-pCommandBuffers-00072)"},
+    {"VUID-vkQueueSubmit-pCommandBuffers-00073", "If any secondary command buffers recorded into any element of the pCommandBuffers member of any element of pSubmits was not recorded with the VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT, it must not be in the pending state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkQueueSubmit-pCommandBuffers-00073)"},
+    {"VUID-vkQueueSubmit-pCommandBuffers-00074", "Each element of the pCommandBuffers member of each element of pSubmits must have been allocated from a VkCommandPool that was created for the same queue family queue belongs to (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkQueueSubmit-pCommandBuffers-00074)"},
+    {"VUID-vkQueueSubmit-pCommandBuffers-03220", "If a command recorded into any element of pCommandBuffers was a vkCmdBeginQuery whose queryPool was created with a queryType of VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR, the profiling lock must have been held continuously on the VkDevice that queue was retrieved from, throughout recording of those command buffers (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkQueueSubmit-pCommandBuffers-03220)"},
+    {"VUID-vkQueueSubmit-pSignalSemaphores-00067", "Each element of the pSignalSemaphores member of any element of pSubmits must be unsignaled when the semaphore signal operation it defines is executed on the device (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkQueueSubmit-pSignalSemaphores-00067)"},
+    {"VUID-vkQueueSubmit-pSubmits-02207", "If any element of pSubmits->pCommandBuffers includes a Queue Family Transfer Acquire Operation, there must exist a previously submitted Queue Family Transfer Release Operation on a queue in the queue family identified by the acquire operation, with parameters matching the acquire operation as defined in the definition of such acquire operations, and which happens before the acquire operation (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkQueueSubmit-pSubmits-02207)"},
+    {"VUID-vkQueueSubmit-pSubmits-02808", "Any resource created with VK_SHARING_MODE_EXCLUSIVE that is read by an operation specified by pSubmits must not be owned by any queue family other than the one which queue belongs to, at the time it is executed (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkQueueSubmit-pSubmits-02808)"},
+    {"VUID-vkQueueSubmit-pSubmits-parameter", "If submitCount is not 0, pSubmits must be a valid pointer to an array of submitCount valid VkSubmitInfo structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkQueueSubmit-pSubmits-parameter)"},
+    {"VUID-vkQueueSubmit-pWaitDstStageMask-00066", "Any stage flag included in any element of the pWaitDstStageMask member of any element of pSubmits must be a pipeline stage supported by one of the capabilities of queue, as specified in the table of supported pipeline stages (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkQueueSubmit-pWaitDstStageMask-00066)"},
+    {"VUID-vkQueueSubmit-pWaitSemaphores-00068", "When a semaphore wait operation referring to a binary semaphore defined by any element of the pWaitSemaphores member of any element of pSubmits executes on queue, there must be no other queues waiting on the same semaphore (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkQueueSubmit-pWaitSemaphores-00068)"},
+    {"VUID-vkQueueSubmit-pWaitSemaphores-00069", "All elements of the pWaitSemaphores member of all elements of pSubmits must be semaphores that are signaled, or have semaphore signal operations previously submitted for execution (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkQueueSubmit-pWaitSemaphores-00069)"},
+    {"VUID-vkQueueSubmit-pWaitSemaphores-03238", "All elements of the pWaitSemaphores member of all elements of pSubmits created with a VkSemaphoreTypeKHR of VK_SEMAPHORE_TYPE_BINARY_KHR must reference a semaphore signal operation that has been submitted for execution and any semaphore signal operations on which it depends (if any) must have also been submitted for execution (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkQueueSubmit-pWaitSemaphores-03238)"},
+    {"VUID-vkQueueSubmit-queue-parameter", "queue must be a valid VkQueue handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkQueueSubmit-queue-parameter)"},
+    {"VUID-vkQueueWaitIdle-queue-parameter", "queue must be a valid VkQueue handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkQueueWaitIdle-queue-parameter)"},
+    {"VUID-vkRegisterDeviceEventEXT-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkRegisterDeviceEventEXT-device-parameter)"},
+    {"VUID-vkRegisterDeviceEventEXT-pAllocator-parameter", "If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkRegisterDeviceEventEXT-pAllocator-parameter)"},
+    {"VUID-vkRegisterDeviceEventEXT-pDeviceEventInfo-parameter", "pDeviceEventInfo must be a valid pointer to a valid VkDeviceEventInfoEXT structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkRegisterDeviceEventEXT-pDeviceEventInfo-parameter)"},
+    {"VUID-vkRegisterDeviceEventEXT-pFence-parameter", "pFence must be a valid pointer to a VkFence handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkRegisterDeviceEventEXT-pFence-parameter)"},
+    {"VUID-vkRegisterDisplayEventEXT-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkRegisterDisplayEventEXT-device-parameter)"},
+    {"VUID-vkRegisterDisplayEventEXT-display-parameter", "display must be a valid VkDisplayKHR handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkRegisterDisplayEventEXT-display-parameter)"},
+    {"VUID-vkRegisterDisplayEventEXT-pAllocator-parameter", "If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkRegisterDisplayEventEXT-pAllocator-parameter)"},
+    {"VUID-vkRegisterDisplayEventEXT-pDisplayEventInfo-parameter", "pDisplayEventInfo must be a valid pointer to a valid VkDisplayEventInfoEXT structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkRegisterDisplayEventEXT-pDisplayEventInfo-parameter)"},
+    {"VUID-vkRegisterDisplayEventEXT-pFence-parameter", "pFence must be a valid pointer to a VkFence handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkRegisterDisplayEventEXT-pFence-parameter)"},
+    {"VUID-vkRegisterObjectsNVX-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkRegisterObjectsNVX-device-parameter)"},
+    {"VUID-vkRegisterObjectsNVX-objectCount-arraylength", "objectCount must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkRegisterObjectsNVX-objectCount-arraylength)"},
+    {"VUID-vkRegisterObjectsNVX-objectTable-parameter", "objectTable must be a valid VkObjectTableNVX handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkRegisterObjectsNVX-objectTable-parameter)"},
+    {"VUID-vkRegisterObjectsNVX-objectTable-parent", "objectTable must have been created, allocated, or retrieved from device (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkRegisterObjectsNVX-objectTable-parent)"},
+    {"VUID-vkRegisterObjectsNVX-pObjectIndices-01365", "At any pObjectIndices there must not be a registered resource already. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkRegisterObjectsNVX-pObjectIndices-01365)"},
+    {"VUID-vkRegisterObjectsNVX-pObjectIndices-01366", "Any value inside pObjectIndices must be below the appropriate VkObjectTableCreateInfoNVX::pObjectEntryCounts limits provided at objectTable creation time. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkRegisterObjectsNVX-pObjectIndices-01366)"},
+    {"VUID-vkRegisterObjectsNVX-pObjectIndices-parameter", "pObjectIndices must be a valid pointer to an array of objectCount uint32_t values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkRegisterObjectsNVX-pObjectIndices-parameter)"},
+    {"VUID-vkRegisterObjectsNVX-pObjectTableEntry-01364", "The contents of pObjectTableEntry must yield plausible bindings supported by the device. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkRegisterObjectsNVX-pObjectTableEntry-01364)"},
+    {"VUID-vkRegisterObjectsNVX-ppObjectTableEntries-parameter", "ppObjectTableEntries must be a valid pointer to an array of objectCount valid VkObjectTableEntryNVX structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkRegisterObjectsNVX-ppObjectTableEntries-parameter)"},
+    {"VUID-vkReleaseDisplayEXT-display-parameter", "display must be a valid VkDisplayKHR handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkReleaseDisplayEXT-display-parameter)"},
+    {"VUID-vkReleaseDisplayEXT-physicalDevice-parameter", "physicalDevice must be a valid VkPhysicalDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkReleaseDisplayEXT-physicalDevice-parameter)"},
+    {"VUID-vkReleaseFullScreenExclusiveModeEXT-swapchain-02677", "swapchain must not be in the retired state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkReleaseFullScreenExclusiveModeEXT-swapchain-02677)"},
+    {"VUID-vkReleaseFullScreenExclusiveModeEXT-swapchain-02678", "swapchain must be a swapchain created with a VkSurfaceFullScreenExclusiveInfoEXT structure, with fullScreenExclusive set to VK_FULL_SCREEN_EXCLUSIVE_APPLICATION_CONTROLLED_EXT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkReleaseFullScreenExclusiveModeEXT-swapchain-02678)"},
+    {"VUID-vkReleasePerformanceConfigurationINTEL-configuration-02737", "configuration must not be released before all command buffers submitted while the configuration was set are in pending state. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkReleasePerformanceConfigurationINTEL-configuration-02737)"},
+    {"VUID-vkReleasePerformanceConfigurationINTEL-configuration-parameter", "configuration must be a valid VkPerformanceConfigurationINTEL handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkReleasePerformanceConfigurationINTEL-configuration-parameter)"},
+    {"VUID-vkReleasePerformanceConfigurationINTEL-configuration-parent", "configuration must have been created, allocated, or retrieved from device (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkReleasePerformanceConfigurationINTEL-configuration-parent)"},
+    {"VUID-vkReleasePerformanceConfigurationINTEL-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkReleasePerformanceConfigurationINTEL-device-parameter)"},
+    {"VUID-vkReleaseProfilingLockKHR-device-03235", "The profiling lock of device must have been held via a previous successful call to vkAcquireProfilingLockKHR (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkReleaseProfilingLockKHR-device-03235)"},
+    {"VUID-vkReleaseProfilingLockKHR-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkReleaseProfilingLockKHR-device-parameter)"},
+    {"VUID-vkResetCommandBuffer-commandBuffer-00045", "commandBuffer must not be in the pending state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkResetCommandBuffer-commandBuffer-00045)"},
+    {"VUID-vkResetCommandBuffer-commandBuffer-00046", "commandBuffer must have been allocated from a pool that was created with the VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkResetCommandBuffer-commandBuffer-00046)"},
+    {"VUID-vkResetCommandBuffer-commandBuffer-parameter", "commandBuffer must be a valid VkCommandBuffer handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkResetCommandBuffer-commandBuffer-parameter)"},
+    {"VUID-vkResetCommandBuffer-flags-parameter", "flags must be a valid combination of VkCommandBufferResetFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkResetCommandBuffer-flags-parameter)"},
+    {"VUID-vkResetCommandPool-commandPool-00040", "All VkCommandBuffer objects allocated from commandPool must not be in the pending state (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkResetCommandPool-commandPool-00040)"},
+    {"VUID-vkResetCommandPool-commandPool-parameter", "commandPool must be a valid VkCommandPool handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkResetCommandPool-commandPool-parameter)"},
+    {"VUID-vkResetCommandPool-commandPool-parent", "commandPool must have been created, allocated, or retrieved from device (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkResetCommandPool-commandPool-parent)"},
+    {"VUID-vkResetCommandPool-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkResetCommandPool-device-parameter)"},
+    {"VUID-vkResetCommandPool-flags-parameter", "flags must be a valid combination of VkCommandPoolResetFlagBits values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkResetCommandPool-flags-parameter)"},
+    {"VUID-vkResetDescriptorPool-descriptorPool-00313", "All uses of descriptorPool (via any allocated descriptor sets) must have completed execution (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkResetDescriptorPool-descriptorPool-00313)"},
+    {"VUID-vkResetDescriptorPool-descriptorPool-parameter", "descriptorPool must be a valid VkDescriptorPool handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkResetDescriptorPool-descriptorPool-parameter)"},
+    {"VUID-vkResetDescriptorPool-descriptorPool-parent", "descriptorPool must have been created, allocated, or retrieved from device (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkResetDescriptorPool-descriptorPool-parent)"},
+    {"VUID-vkResetDescriptorPool-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkResetDescriptorPool-device-parameter)"},
+    {"VUID-vkResetDescriptorPool-flags-zerobitmask", "flags must be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkResetDescriptorPool-flags-zerobitmask)"},
+    {"VUID-vkResetEvent-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkResetEvent-device-parameter)"},
+    {"VUID-vkResetEvent-event-01148", "event must not be waited on by a vkCmdWaitEvents command that is currently executing (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkResetEvent-event-01148)"},
+    {"VUID-vkResetEvent-event-parameter", "event must be a valid VkEvent handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkResetEvent-event-parameter)"},
+    {"VUID-vkResetEvent-event-parent", "event must have been created, allocated, or retrieved from device (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkResetEvent-event-parent)"},
+    {"VUID-vkResetFences-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkResetFences-device-parameter)"},
+    {"VUID-vkResetFences-fenceCount-arraylength", "fenceCount must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkResetFences-fenceCount-arraylength)"},
+    {"VUID-vkResetFences-pFences-01123", "Each element of pFences must not be currently associated with any queue command that has not yet completed execution on that queue (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkResetFences-pFences-01123)"},
+    {"VUID-vkResetFences-pFences-parameter", "pFences must be a valid pointer to an array of fenceCount valid VkFence handles (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkResetFences-pFences-parameter)"},
+    {"VUID-vkResetFences-pFences-parent", "Each element of pFences must have been created, allocated, or retrieved from device (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkResetFences-pFences-parent)"},
+    {"VUID-vkResetQueryPoolEXT-None-02665", "The hostQueryReset feature must be enabled (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkResetQueryPoolEXT-None-02665)"},
+    {"VUID-vkResetQueryPoolEXT-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkResetQueryPoolEXT-device-parameter)"},
+    {"VUID-vkResetQueryPoolEXT-firstQuery-02666", "firstQuery must be less than the number of queries in queryPool (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkResetQueryPoolEXT-firstQuery-02666)"},
+    {"VUID-vkResetQueryPoolEXT-firstQuery-02667", "The sum of firstQuery and queryCount must be less than or equal to the number of queries in queryPool (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkResetQueryPoolEXT-firstQuery-02667)"},
+    {"VUID-vkResetQueryPoolEXT-firstQuery-02741", "Submitted commands that refer to the range specified by firstQuery and queryCount in queryPool must have completed execution (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkResetQueryPoolEXT-firstQuery-02741)"},
+    {"VUID-vkResetQueryPoolEXT-firstQuery-02742", "The range of queries specified by firstQuery and queryCount in queryPool must not be in use by calls to vkGetQueryPoolResults or vkResetQueryPoolEXT in other threads (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkResetQueryPoolEXT-firstQuery-02742)"},
+    {"VUID-vkResetQueryPoolEXT-queryPool-parameter", "queryPool must be a valid VkQueryPool handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkResetQueryPoolEXT-queryPool-parameter)"},
+    {"VUID-vkResetQueryPoolEXT-queryPool-parent", "queryPool must have been created, allocated, or retrieved from device (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkResetQueryPoolEXT-queryPool-parent)"},
+    {"VUID-vkSetDebugUtilsObjectNameEXT-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkSetDebugUtilsObjectNameEXT-device-parameter)"},
+    {"VUID-vkSetDebugUtilsObjectNameEXT-pNameInfo-02587", "pNameInfo->objectType must not be VK_OBJECT_TYPE_UNKNOWN (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkSetDebugUtilsObjectNameEXT-pNameInfo-02587)"},
+    {"VUID-vkSetDebugUtilsObjectNameEXT-pNameInfo-02588", "pNameInfo->objectHandle must not be VK_NULL_HANDLE (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkSetDebugUtilsObjectNameEXT-pNameInfo-02588)"},
+    {"VUID-vkSetDebugUtilsObjectNameEXT-pNameInfo-parameter", "pNameInfo must be a valid pointer to a valid VkDebugUtilsObjectNameInfoEXT structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkSetDebugUtilsObjectNameEXT-pNameInfo-parameter)"},
+    {"VUID-vkSetDebugUtilsObjectTagEXT-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkSetDebugUtilsObjectTagEXT-device-parameter)"},
+    {"VUID-vkSetDebugUtilsObjectTagEXT-pTagInfo-parameter", "pTagInfo must be a valid pointer to a valid VkDebugUtilsObjectTagInfoEXT structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkSetDebugUtilsObjectTagEXT-pTagInfo-parameter)"},
+    {"VUID-vkSetEvent-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkSetEvent-device-parameter)"},
+    {"VUID-vkSetEvent-event-parameter", "event must be a valid VkEvent handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkSetEvent-event-parameter)"},
+    {"VUID-vkSetEvent-event-parent", "event must have been created, allocated, or retrieved from device (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkSetEvent-event-parent)"},
+    {"VUID-vkSetHdrMetadataEXT-commonparent", "Both of device, and the elements of pSwapchains must have been created, allocated, or retrieved from the same VkInstance (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkSetHdrMetadataEXT-commonparent)"},
+    {"VUID-vkSetHdrMetadataEXT-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkSetHdrMetadataEXT-device-parameter)"},
+    {"VUID-vkSetHdrMetadataEXT-pMetadata-parameter", "pMetadata must be a valid pointer to an array of swapchainCount valid VkHdrMetadataEXT structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkSetHdrMetadataEXT-pMetadata-parameter)"},
+    {"VUID-vkSetHdrMetadataEXT-pSwapchains-parameter", "pSwapchains must be a valid pointer to an array of swapchainCount valid VkSwapchainKHR handles (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkSetHdrMetadataEXT-pSwapchains-parameter)"},
+    {"VUID-vkSetHdrMetadataEXT-swapchainCount-arraylength", "swapchainCount must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkSetHdrMetadataEXT-swapchainCount-arraylength)"},
+    {"VUID-vkSetLocalDimmingAMD-XXXXX", "It is only valid to call vkSetLocalDimmingAMD if VkDisplayNativeHdrSurfaceCapabilitiesAMD::localDimmingSupport is supported. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkSetLocalDimmingAMD-XXXXX)"},
+    {"VUID-vkSetLocalDimmingAMD-commonparent", "Both of device, and swapChain must have been created, allocated, or retrieved from the same VkInstance (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkSetLocalDimmingAMD-commonparent)"},
+    {"VUID-vkSetLocalDimmingAMD-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkSetLocalDimmingAMD-device-parameter)"},
+    {"VUID-vkSetLocalDimmingAMD-swapChain-parameter", "swapChain must be a valid VkSwapchainKHR handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkSetLocalDimmingAMD-swapChain-parameter)"},
+    {"VUID-vkSignalSemaphoreKHR-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkSignalSemaphoreKHR-device-parameter)"},
+    {"VUID-vkSignalSemaphoreKHR-pSignalInfo-parameter", "pSignalInfo must be a valid pointer to a valid VkSemaphoreSignalInfoKHR structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkSignalSemaphoreKHR-pSignalInfo-parameter)"},
+    {"VUID-vkSubmitDebugUtilsMessageEXT-instance-parameter", "instance must be a valid VkInstance handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkSubmitDebugUtilsMessageEXT-instance-parameter)"},
+    {"VUID-vkSubmitDebugUtilsMessageEXT-messageSeverity-parameter", "messageSeverity must be a valid VkDebugUtilsMessageSeverityFlagBitsEXT value (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkSubmitDebugUtilsMessageEXT-messageSeverity-parameter)"},
+    {"VUID-vkSubmitDebugUtilsMessageEXT-messageTypes-parameter", "messageTypes must be a valid combination of VkDebugUtilsMessageTypeFlagBitsEXT values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkSubmitDebugUtilsMessageEXT-messageTypes-parameter)"},
+    {"VUID-vkSubmitDebugUtilsMessageEXT-messageTypes-requiredbitmask", "messageTypes must not be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkSubmitDebugUtilsMessageEXT-messageTypes-requiredbitmask)"},
+    {"VUID-vkSubmitDebugUtilsMessageEXT-objectType-02591", "The objectType member of each element of pCallbackData->pObjects must not be VK_OBJECT_TYPE_UNKNOWN (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkSubmitDebugUtilsMessageEXT-objectType-02591)"},
+    {"VUID-vkSubmitDebugUtilsMessageEXT-pCallbackData-parameter", "pCallbackData must be a valid pointer to a valid VkDebugUtilsMessengerCallbackDataEXT structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkSubmitDebugUtilsMessageEXT-pCallbackData-parameter)"},
+    {"VUID-vkTrimCommandPool-commandPool-parameter", "commandPool must be a valid VkCommandPool handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkTrimCommandPool-commandPool-parameter)"},
+    {"VUID-vkTrimCommandPool-commandPool-parent", "commandPool must have been created, allocated, or retrieved from device (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkTrimCommandPool-commandPool-parent)"},
+    {"VUID-vkTrimCommandPool-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkTrimCommandPool-device-parameter)"},
+    {"VUID-vkTrimCommandPool-flags-zerobitmask", "flags must be 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkTrimCommandPool-flags-zerobitmask)"},
+    {"VUID-vkUninitializePerformanceApiINTEL-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkUninitializePerformanceApiINTEL-device-parameter)"},
+    {"VUID-vkUnmapMemory-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkUnmapMemory-device-parameter)"},
+    {"VUID-vkUnmapMemory-memory-00689", "memory must be currently host mapped (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkUnmapMemory-memory-00689)"},
+    {"VUID-vkUnmapMemory-memory-parameter", "memory must be a valid VkDeviceMemory handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkUnmapMemory-memory-parameter)"},
+    {"VUID-vkUnmapMemory-memory-parent", "memory must have been created, allocated, or retrieved from device (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkUnmapMemory-memory-parent)"},
+    {"VUID-vkUnregisterObjectsNVX-None-01375", "All operations on the device using the registered resource must have been completed. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkUnregisterObjectsNVX-None-01375)"},
+    {"VUID-vkUnregisterObjectsNVX-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkUnregisterObjectsNVX-device-parameter)"},
+    {"VUID-vkUnregisterObjectsNVX-objectCount-arraylength", "objectCount must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkUnregisterObjectsNVX-objectCount-arraylength)"},
+    {"VUID-vkUnregisterObjectsNVX-objectTable-parameter", "objectTable must be a valid VkObjectTableNVX handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkUnregisterObjectsNVX-objectTable-parameter)"},
+    {"VUID-vkUnregisterObjectsNVX-objectTable-parent", "objectTable must have been created, allocated, or retrieved from device (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkUnregisterObjectsNVX-objectTable-parent)"},
+    {"VUID-vkUnregisterObjectsNVX-pObjectEntryTypes-01374", "The pObjectEntryTypes of the resource at pObjectIndices must match. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkUnregisterObjectsNVX-pObjectEntryTypes-01374)"},
+    {"VUID-vkUnregisterObjectsNVX-pObjectEntryTypes-parameter", "pObjectEntryTypes must be a valid pointer to an array of objectCount valid VkObjectEntryTypeNVX values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkUnregisterObjectsNVX-pObjectEntryTypes-parameter)"},
+    {"VUID-vkUnregisterObjectsNVX-pObjectIndices-01373", "At any pObjectIndices there must be a registered resource already. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkUnregisterObjectsNVX-pObjectIndices-01373)"},
+    {"VUID-vkUnregisterObjectsNVX-pObjectIndices-parameter", "pObjectIndices must be a valid pointer to an array of objectCount uint32_t values (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkUnregisterObjectsNVX-pObjectIndices-parameter)"},
+    {"VUID-vkUpdateDescriptorSetWithTemplate-descriptorSet-parameter", "descriptorSet must be a valid VkDescriptorSet handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkUpdateDescriptorSetWithTemplate-descriptorSet-parameter)"},
+    {"VUID-vkUpdateDescriptorSetWithTemplate-descriptorUpdateTemplate-parameter", "descriptorUpdateTemplate must be a valid VkDescriptorUpdateTemplate handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkUpdateDescriptorSetWithTemplate-descriptorUpdateTemplate-parameter)"},
+    {"VUID-vkUpdateDescriptorSetWithTemplate-descriptorUpdateTemplate-parent", "descriptorUpdateTemplate must have been created, allocated, or retrieved from device (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkUpdateDescriptorSetWithTemplate-descriptorUpdateTemplate-parent)"},
+    {"VUID-vkUpdateDescriptorSetWithTemplate-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkUpdateDescriptorSetWithTemplate-device-parameter)"},
+    {"VUID-vkUpdateDescriptorSetWithTemplate-pData-01685", "pData must be a valid pointer to a memory containing one or more valid instances of VkDescriptorImageInfo, VkDescriptorBufferInfo, or VkBufferView in a layout defined by descriptorUpdateTemplate when it was created with vkCreateDescriptorUpdateTemplate (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkUpdateDescriptorSetWithTemplate-pData-01685)"},
+    {"VUID-vkUpdateDescriptorSets-None-03047", "Descriptor bindings updated by this command which were created without the VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT or VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT_EXT bits set must not be used by any command that was recorded to a command buffer which is in the pending state. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkUpdateDescriptorSets-None-03047)"},
+    {"VUID-vkUpdateDescriptorSets-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkUpdateDescriptorSets-device-parameter)"},
+    {"VUID-vkUpdateDescriptorSets-dstSet-00314", "The dstSet member of each element of pDescriptorWrites or pDescriptorCopies must not be used by any command that was recorded to a command buffer which is in the pending state. (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkUpdateDescriptorSets-dstSet-00314)"},
+    {"VUID-vkUpdateDescriptorSets-pDescriptorCopies-parameter", "If descriptorCopyCount is not 0, pDescriptorCopies must be a valid pointer to an array of descriptorCopyCount valid VkCopyDescriptorSet structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkUpdateDescriptorSets-pDescriptorCopies-parameter)"},
+    {"VUID-vkUpdateDescriptorSets-pDescriptorWrites-parameter", "If descriptorWriteCount is not 0, pDescriptorWrites must be a valid pointer to an array of descriptorWriteCount valid VkWriteDescriptorSet structures (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkUpdateDescriptorSets-pDescriptorWrites-parameter)"},
+    {"VUID-vkWaitForFences-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkWaitForFences-device-parameter)"},
+    {"VUID-vkWaitForFences-fenceCount-arraylength", "fenceCount must be greater than 0 (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkWaitForFences-fenceCount-arraylength)"},
+    {"VUID-vkWaitForFences-pFences-parameter", "pFences must be a valid pointer to an array of fenceCount valid VkFence handles (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkWaitForFences-pFences-parameter)"},
+    {"VUID-vkWaitForFences-pFences-parent", "Each element of pFences must have been created, allocated, or retrieved from device (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkWaitForFences-pFences-parent)"},
+    {"VUID-vkWaitSemaphoresKHR-device-parameter", "device must be a valid VkDevice handle (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkWaitSemaphoresKHR-device-parameter)"},
+    {"VUID-vkWaitSemaphoresKHR-pWaitInfo-parameter", "pWaitInfo must be a valid pointer to a valid VkSemaphoreWaitInfoKHR structure (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-vkWaitSemaphoresKHR-pWaitInfo-parameter)"},
+};
+
+// Defines to allow creating "must be recording" meta data
+#define VUID_CMD_ENUM_LIST(prefix)\
+    prefix##NONE = 0,\
+    prefix##BEGINCONDITIONALRENDERINGEXT = 1,\
+    prefix##BEGINDEBUGUTILSLABELEXT = 2,\
+    prefix##BEGINQUERY = 3,\
+    prefix##BEGINQUERYINDEXEDEXT = 4,\
+    prefix##BEGINRENDERPASS = 5,\
+    prefix##BEGINRENDERPASS2KHR = 6,\
+    prefix##BEGINTRANSFORMFEEDBACKEXT = 7,\
+    prefix##BINDDESCRIPTORSETS = 8,\
+    prefix##BINDINDEXBUFFER = 9,\
+    prefix##BINDPIPELINE = 10,\
+    prefix##BINDSHADINGRATEIMAGENV = 11,\
+    prefix##BINDTRANSFORMFEEDBACKBUFFERSEXT = 12,\
+    prefix##BINDVERTEXBUFFERS = 13,\
+    prefix##BLITIMAGE = 14,\
+    prefix##BUILDACCELERATIONSTRUCTURENV = 15,\
+    prefix##CLEARATTACHMENTS = 16,\
+    prefix##CLEARCOLORIMAGE = 17,\
+    prefix##CLEARDEPTHSTENCILIMAGE = 18,\
+    prefix##COPYACCELERATIONSTRUCTURENV = 19,\
+    prefix##COPYBUFFER = 20,\
+    prefix##COPYBUFFERTOIMAGE = 21,\
+    prefix##COPYIMAGE = 22,\
+    prefix##COPYIMAGETOBUFFER = 23,\
+    prefix##COPYQUERYPOOLRESULTS = 24,\
+    prefix##DEBUGMARKERBEGINEXT = 25,\
+    prefix##DEBUGMARKERENDEXT = 26,\
+    prefix##DEBUGMARKERINSERTEXT = 27,\
+    prefix##DISPATCH = 28,\
+    prefix##DISPATCHBASE = 29,\
+    prefix##DISPATCHINDIRECT = 30,\
+    prefix##DRAW = 31,\
+    prefix##DRAWINDEXED = 32,\
+    prefix##DRAWINDEXEDINDIRECT = 33,\
+    prefix##DRAWINDEXEDINDIRECTCOUNTKHR = 34,\
+    prefix##DRAWINDIRECT = 35,\
+    prefix##DRAWINDIRECTBYTECOUNTEXT = 36,\
+    prefix##DRAWINDIRECTCOUNTKHR = 37,\
+    prefix##DRAWMESHTASKSINDIRECTCOUNTNV = 38,\
+    prefix##DRAWMESHTASKSINDIRECTNV = 39,\
+    prefix##DRAWMESHTASKSNV = 40,\
+    prefix##ENDCONDITIONALRENDERINGEXT = 41,\
+    prefix##ENDDEBUGUTILSLABELEXT = 42,\
+    prefix##ENDQUERY = 43,\
+    prefix##ENDQUERYINDEXEDEXT = 44,\
+    prefix##ENDRENDERPASS = 45,\
+    prefix##ENDRENDERPASS2KHR = 46,\
+    prefix##ENDTRANSFORMFEEDBACKEXT = 47,\
+    prefix##EXECUTECOMMANDS = 48,\
+    prefix##FILLBUFFER = 49,\
+    prefix##INSERTDEBUGUTILSLABELEXT = 50,\
+    prefix##NEXTSUBPASS = 51,\
+    prefix##NEXTSUBPASS2KHR = 52,\
+    prefix##PIPELINEBARRIER = 53,\
+    prefix##PROCESSCOMMANDSNVX = 54,\
+    prefix##PUSHCONSTANTS = 55,\
+    prefix##PUSHDESCRIPTORSETKHR = 56,\
+    prefix##PUSHDESCRIPTORSETWITHTEMPLATEKHR = 57,\
+    prefix##RESERVESPACEFORCOMMANDSNVX = 58,\
+    prefix##RESETEVENT = 59,\
+    prefix##RESETQUERYPOOL = 60,\
+    prefix##RESOLVEIMAGE = 61,\
+    prefix##SETBLENDCONSTANTS = 62,\
+    prefix##SETCHECKPOINTNV = 63,\
+    prefix##SETCOARSESAMPLEORDERNV = 64,\
+    prefix##SETDEPTHBIAS = 65,\
+    prefix##SETDEPTHBOUNDS = 66,\
+    prefix##SETDEVICEMASK = 67,\
+    prefix##SETDISCARDRECTANGLEEXT = 68,\
+    prefix##SETEVENT = 69,\
+    prefix##SETEXCLUSIVESCISSORNV = 70,\
+    prefix##SETLINESTIPPLEEXT = 71,\
+    prefix##SETLINEWIDTH = 72,\
+    prefix##SETPERFORMANCEMARKERINTEL = 73,\
+    prefix##SETPERFORMANCEOVERRIDEINTEL = 74,\
+    prefix##SETPERFORMANCESTREAMMARKERINTEL = 75,\
+    prefix##SETSAMPLELOCATIONSEXT = 76,\
+    prefix##SETSCISSOR = 77,\
+    prefix##SETSTENCILCOMPAREMASK = 78,\
+    prefix##SETSTENCILREFERENCE = 79,\
+    prefix##SETSTENCILWRITEMASK = 80,\
+    prefix##SETVIEWPORT = 81,\
+    prefix##SETVIEWPORTSHADINGRATEPALETTENV = 82,\
+    prefix##SETVIEWPORTWSCALINGNV = 83,\
+    prefix##TRACERAYSNV = 84,\
+    prefix##UPDATEBUFFER = 85,\
+    prefix##WAITEVENTS = 86,\
+    prefix##WRITEACCELERATIONSTRUCTURESPROPERTIESNV = 87,\
+    prefix##WRITEBUFFERMARKERAMD = 88,\
+    prefix##WRITETIMESTAMP = 89,\
+    prefix##ENDCOMMANDBUFFER = 90,\
+    prefix##RANGE_SIZE = 91
+
+#define VUID_CMD_NAME_LIST\
+    "Command_Undefined",\
+    "vkCmdBeginConditionalRenderingEXT",\
+    "vkCmdBeginDebugUtilsLabelEXT",\
+    "vkCmdBeginQuery",\
+    "vkCmdBeginQueryIndexedEXT",\
+    "vkCmdBeginRenderPass",\
+    "vkCmdBeginRenderPass2KHR",\
+    "vkCmdBeginTransformFeedbackEXT",\
+    "vkCmdBindDescriptorSets",\
+    "vkCmdBindIndexBuffer",\
+    "vkCmdBindPipeline",\
+    "vkCmdBindShadingRateImageNV",\
+    "vkCmdBindTransformFeedbackBuffersEXT",\
+    "vkCmdBindVertexBuffers",\
+    "vkCmdBlitImage",\
+    "vkCmdBuildAccelerationStructureNV",\
+    "vkCmdClearAttachments",\
+    "vkCmdClearColorImage",\
+    "vkCmdClearDepthStencilImage",\
+    "vkCmdCopyAccelerationStructureNV",\
+    "vkCmdCopyBuffer",\
+    "vkCmdCopyBufferToImage",\
+    "vkCmdCopyImage",\
+    "vkCmdCopyImageToBuffer",\
+    "vkCmdCopyQueryPoolResults",\
+    "vkCmdDebugMarkerBeginEXT",\
+    "vkCmdDebugMarkerEndEXT",\
+    "vkCmdDebugMarkerInsertEXT",\
+    "vkCmdDispatch",\
+    "vkCmdDispatchBase",\
+    "vkCmdDispatchIndirect",\
+    "vkCmdDraw",\
+    "vkCmdDrawIndexed",\
+    "vkCmdDrawIndexedIndirect",\
+    "vkCmdDrawIndexedIndirectCountKHR",\
+    "vkCmdDrawIndirect",\
+    "vkCmdDrawIndirectByteCountEXT",\
+    "vkCmdDrawIndirectCountKHR",\
+    "vkCmdDrawMeshTasksIndirectCountNV",\
+    "vkCmdDrawMeshTasksIndirectNV",\
+    "vkCmdDrawMeshTasksNV",\
+    "vkCmdEndConditionalRenderingEXT",\
+    "vkCmdEndDebugUtilsLabelEXT",\
+    "vkCmdEndQuery",\
+    "vkCmdEndQueryIndexedEXT",\
+    "vkCmdEndRenderPass",\
+    "vkCmdEndRenderPass2KHR",\
+    "vkCmdEndTransformFeedbackEXT",\
+    "vkCmdExecuteCommands",\
+    "vkCmdFillBuffer",\
+    "vkCmdInsertDebugUtilsLabelEXT",\
+    "vkCmdNextSubpass",\
+    "vkCmdNextSubpass2KHR",\
+    "vkCmdPipelineBarrier",\
+    "vkCmdProcessCommandsNVX",\
+    "vkCmdPushConstants",\
+    "vkCmdPushDescriptorSetKHR",\
+    "vkCmdPushDescriptorSetWithTemplateKHR",\
+    "vkCmdReserveSpaceForCommandsNVX",\
+    "vkCmdResetEvent",\
+    "vkCmdResetQueryPool",\
+    "vkCmdResolveImage",\
+    "vkCmdSetBlendConstants",\
+    "vkCmdSetCheckpointNV",\
+    "vkCmdSetCoarseSampleOrderNV",\
+    "vkCmdSetDepthBias",\
+    "vkCmdSetDepthBounds",\
+    "vkCmdSetDeviceMask",\
+    "vkCmdSetDiscardRectangleEXT",\
+    "vkCmdSetEvent",\
+    "vkCmdSetExclusiveScissorNV",\
+    "vkCmdSetLineStippleEXT",\
+    "vkCmdSetLineWidth",\
+    "vkCmdSetPerformanceMarkerINTEL",\
+    "vkCmdSetPerformanceOverrideINTEL",\
+    "vkCmdSetPerformanceStreamMarkerINTEL",\
+    "vkCmdSetSampleLocationsEXT",\
+    "vkCmdSetScissor",\
+    "vkCmdSetStencilCompareMask",\
+    "vkCmdSetStencilReference",\
+    "vkCmdSetStencilWriteMask",\
+    "vkCmdSetViewport",\
+    "vkCmdSetViewportShadingRatePaletteNV",\
+    "vkCmdSetViewportWScalingNV",\
+    "vkCmdTraceRaysNV",\
+    "vkCmdUpdateBuffer",\
+    "vkCmdWaitEvents",\
+    "vkCmdWriteAccelerationStructuresPropertiesNV",\
+    "vkCmdWriteBufferMarkerAMD",\
+    "vkCmdWriteTimestamp",\
+    "vkEndCommandBuffer"
+
+#define VUID_MUST_BE_RECORDING_LIST\
+    "VUID_Undefined",\
+    "VUID-vkCmdBeginConditionalRenderingEXT-commandBuffer-recording",\
+    "VUID-vkCmdBeginDebugUtilsLabelEXT-commandBuffer-recording",\
+    "VUID-vkCmdBeginQuery-commandBuffer-recording",\
+    "VUID-vkCmdBeginQueryIndexedEXT-commandBuffer-recording",\
+    "VUID-vkCmdBeginRenderPass-commandBuffer-recording",\
+    "VUID-vkCmdBeginRenderPass2KHR-commandBuffer-recording",\
+    "VUID-vkCmdBeginTransformFeedbackEXT-commandBuffer-recording",\
+    "VUID-vkCmdBindDescriptorSets-commandBuffer-recording",\
+    "VUID-vkCmdBindIndexBuffer-commandBuffer-recording",\
+    "VUID-vkCmdBindPipeline-commandBuffer-recording",\
+    "VUID-vkCmdBindShadingRateImageNV-commandBuffer-recording",\
+    "VUID-vkCmdBindTransformFeedbackBuffersEXT-commandBuffer-recording",\
+    "VUID-vkCmdBindVertexBuffers-commandBuffer-recording",\
+    "VUID-vkCmdBlitImage-commandBuffer-recording",\
+    "VUID-vkCmdBuildAccelerationStructureNV-commandBuffer-recording",\
+    "VUID-vkCmdClearAttachments-commandBuffer-recording",\
+    "VUID-vkCmdClearColorImage-commandBuffer-recording",\
+    "VUID-vkCmdClearDepthStencilImage-commandBuffer-recording",\
+    "VUID-vkCmdCopyAccelerationStructureNV-commandBuffer-recording",\
+    "VUID-vkCmdCopyBuffer-commandBuffer-recording",\
+    "VUID-vkCmdCopyBufferToImage-commandBuffer-recording",\
+    "VUID-vkCmdCopyImage-commandBuffer-recording",\
+    "VUID-vkCmdCopyImageToBuffer-commandBuffer-recording",\
+    "VUID-vkCmdCopyQueryPoolResults-commandBuffer-recording",\
+    "VUID-vkCmdDebugMarkerBeginEXT-commandBuffer-recording",\
+    "VUID-vkCmdDebugMarkerEndEXT-commandBuffer-recording",\
+    "VUID-vkCmdDebugMarkerInsertEXT-commandBuffer-recording",\
+    "VUID-vkCmdDispatch-commandBuffer-recording",\
+    "VUID-vkCmdDispatchBase-commandBuffer-recording",\
+    "VUID-vkCmdDispatchIndirect-commandBuffer-recording",\
+    "VUID-vkCmdDraw-commandBuffer-recording",\
+    "VUID-vkCmdDrawIndexed-commandBuffer-recording",\
+    "VUID-vkCmdDrawIndexedIndirect-commandBuffer-recording",\
+    "VUID-vkCmdDrawIndexedIndirectCountKHR-commandBuffer-recording",\
+    "VUID-vkCmdDrawIndirect-commandBuffer-recording",\
+    "VUID-vkCmdDrawIndirectByteCountEXT-commandBuffer-recording",\
+    "VUID-vkCmdDrawIndirectCountKHR-commandBuffer-recording",\
+    "VUID-vkCmdDrawMeshTasksIndirectCountNV-commandBuffer-recording",\
+    "VUID-vkCmdDrawMeshTasksIndirectNV-commandBuffer-recording",\
+    "VUID-vkCmdDrawMeshTasksNV-commandBuffer-recording",\
+    "VUID-vkCmdEndConditionalRenderingEXT-commandBuffer-recording",\
+    "VUID-vkCmdEndDebugUtilsLabelEXT-commandBuffer-recording",\
+    "VUID-vkCmdEndQuery-commandBuffer-recording",\
+    "VUID-vkCmdEndQueryIndexedEXT-commandBuffer-recording",\
+    "VUID-vkCmdEndRenderPass-commandBuffer-recording",\
+    "VUID-vkCmdEndRenderPass2KHR-commandBuffer-recording",\
+    "VUID-vkCmdEndTransformFeedbackEXT-commandBuffer-recording",\
+    "VUID-vkCmdExecuteCommands-commandBuffer-recording",\
+    "VUID-vkCmdFillBuffer-commandBuffer-recording",\
+    "VUID-vkCmdInsertDebugUtilsLabelEXT-commandBuffer-recording",\
+    "VUID-vkCmdNextSubpass-commandBuffer-recording",\
+    "VUID-vkCmdNextSubpass2KHR-commandBuffer-recording",\
+    "VUID-vkCmdPipelineBarrier-commandBuffer-recording",\
+    "VUID-vkCmdProcessCommandsNVX-commandBuffer-recording",\
+    "VUID-vkCmdPushConstants-commandBuffer-recording",\
+    "VUID-vkCmdPushDescriptorSetKHR-commandBuffer-recording",\
+    "VUID-vkCmdPushDescriptorSetWithTemplateKHR-commandBuffer-recording",\
+    "VUID-vkCmdReserveSpaceForCommandsNVX-commandBuffer-recording",\
+    "VUID-vkCmdResetEvent-commandBuffer-recording",\
+    "VUID-vkCmdResetQueryPool-commandBuffer-recording",\
+    "VUID-vkCmdResolveImage-commandBuffer-recording",\
+    "VUID-vkCmdSetBlendConstants-commandBuffer-recording",\
+    "VUID-vkCmdSetCheckpointNV-commandBuffer-recording",\
+    "VUID-vkCmdSetCoarseSampleOrderNV-commandBuffer-recording",\
+    "VUID-vkCmdSetDepthBias-commandBuffer-recording",\
+    "VUID-vkCmdSetDepthBounds-commandBuffer-recording",\
+    "VUID-vkCmdSetDeviceMask-commandBuffer-recording",\
+    "VUID-vkCmdSetDiscardRectangleEXT-commandBuffer-recording",\
+    "VUID-vkCmdSetEvent-commandBuffer-recording",\
+    "VUID-vkCmdSetExclusiveScissorNV-commandBuffer-recording",\
+    "VUID-vkCmdSetLineStippleEXT-commandBuffer-recording",\
+    "VUID-vkCmdSetLineWidth-commandBuffer-recording",\
+    "VUID-vkCmdSetPerformanceMarkerINTEL-commandBuffer-recording",\
+    "VUID-vkCmdSetPerformanceOverrideINTEL-commandBuffer-recording",\
+    "VUID-vkCmdSetPerformanceStreamMarkerINTEL-commandBuffer-recording",\
+    "VUID-vkCmdSetSampleLocationsEXT-commandBuffer-recording",\
+    "VUID-vkCmdSetScissor-commandBuffer-recording",\
+    "VUID-vkCmdSetStencilCompareMask-commandBuffer-recording",\
+    "VUID-vkCmdSetStencilReference-commandBuffer-recording",\
+    "VUID-vkCmdSetStencilWriteMask-commandBuffer-recording",\
+    "VUID-vkCmdSetViewport-commandBuffer-recording",\
+    "VUID-vkCmdSetViewportShadingRatePaletteNV-commandBuffer-recording",\
+    "VUID-vkCmdSetViewportWScalingNV-commandBuffer-recording",\
+    "VUID-vkCmdTraceRaysNV-commandBuffer-recording",\
+    "VUID-vkCmdUpdateBuffer-commandBuffer-recording",\
+    "VUID-vkCmdWaitEvents-commandBuffer-recording",\
+    "VUID-vkCmdWriteAccelerationStructuresPropertiesNV-commandBuffer-recording",\
+    "VUID-vkCmdWriteBufferMarkerAMD-commandBuffer-recording",\
+    "VUID-vkCmdWriteTimestamp-commandBuffer-recording",\
+    "VUID-vkEndCommandBuffer-commandBuffer-00059"
diff --git a/src/third_party/vulkan-validation-layers/src/layers/gpu_validation.cpp b/src/third_party/vulkan-validation-layers/src/layers/gpu_validation.cpp
new file mode 100644
index 0000000..e37efe3
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/layers/gpu_validation.cpp
@@ -0,0 +1,2595 @@
+/* Copyright (c) 2018-2019 The Khronos Group Inc.
+ * Copyright (c) 2018-2019 Valve Corporation
+ * Copyright (c) 2018-2019 LunarG, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Karl Schultz <karl@lunarg.com>
+ * Author: Tony Barbour <tony@lunarg.com>
+ */
+
+// Allow use of STL min and max functions in Windows
+#define NOMINMAX
+// This define indicates to build the VMA routines themselves
+#define VMA_IMPLEMENTATION
+// This define indicates that we will supply Vulkan function pointers at initialization
+#define VMA_STATIC_VULKAN_FUNCTIONS 0
+#include "gpu_validation.h"
+#include "shader_validation.h"
+#include "spirv-tools/libspirv.h"
+#include "spirv-tools/optimizer.hpp"
+#include "spirv-tools/instrument.hpp"
+#include <SPIRV/spirv.hpp>
+#include <algorithm>
+#include <regex>
+#include "layer_chassis_dispatch.h"
+
+// This is the number of bindings in the debug descriptor set.
+static const uint32_t kNumBindingsInSet = 3;
+
+static const VkShaderStageFlags kShaderStageAllRayTracing =
+    VK_SHADER_STAGE_ANY_HIT_BIT_NV | VK_SHADER_STAGE_CALLABLE_BIT_NV | VK_SHADER_STAGE_CLOSEST_HIT_BIT_NV |
+    VK_SHADER_STAGE_INTERSECTION_BIT_NV | VK_SHADER_STAGE_MISS_BIT_NV | VK_SHADER_STAGE_RAYGEN_BIT_NV;
+
+// Keep in sync with the GLSL shader below.
+struct GpuAccelerationStructureBuildValidationBuffer {
+    uint32_t instances_to_validate;
+    uint32_t replacement_handle_bits_0;
+    uint32_t replacement_handle_bits_1;
+    uint32_t invalid_handle_found;
+    uint32_t invalid_handle_bits_0;
+    uint32_t invalid_handle_bits_1;
+    uint32_t valid_handles_count;
+};
+
+// This is the GLSL source for the compute shader that is used during ray tracing acceleration structure
+// building validation which inspects instance buffers for top level acceleration structure builds and
+// reports and replaces invalid bottom level acceleration structure handles with good bottom level
+// acceleration structure handle so that applications can continue without undefined behavior long enough
+// to report errors.
+//
+// #version 450
+// layout(local_size_x = 1, local_size_y = 1, local_size_z = 1) in;
+// struct VkGeometryInstanceNV {
+//     uint unused[14];
+//     uint handle_bits_0;
+//     uint handle_bits_1;
+// };
+// layout(set=0, binding=0, std430) buffer InstanceBuffer {
+//     VkGeometryInstanceNV instances[];
+// };
+// layout(set=0, binding=1, std430) buffer ValidationBuffer {
+//     uint instances_to_validate;
+//     uint replacement_handle_bits_0;
+//     uint replacement_handle_bits_1;
+//     uint invalid_handle_found;
+//     uint invalid_handle_bits_0;
+//     uint invalid_handle_bits_1;
+//     uint valid_handles_count;
+//     uint valid_handles[];
+// };
+// void main() {
+//     for (uint instance_index = 0; instance_index < instances_to_validate; instance_index++) {
+//         uint instance_handle_bits_0 = instances[instance_index].handle_bits_0;
+//         uint instance_handle_bits_1 = instances[instance_index].handle_bits_1;
+//         bool valid = false;
+//         for (uint valid_handle_index = 0; valid_handle_index < valid_handles_count; valid_handle_index++) {
+//             if (instance_handle_bits_0 == valid_handles[2*valid_handle_index+0] &&
+//                 instance_handle_bits_1 == valid_handles[2*valid_handle_index+1]) {
+//                 valid = true;
+//                 break;
+//             }
+//         }
+//         if (!valid) {
+//             invalid_handle_found += 1;
+//             invalid_handle_bits_0 = instance_handle_bits_0;
+//             invalid_handle_bits_1 = instance_handle_bits_1;
+//             instances[instance_index].handle_bits_0 = replacement_handle_bits_0;
+//             instances[instance_index].handle_bits_1 = replacement_handle_bits_1;
+//         }
+//     }
+// }
+//
+// To regenerate the spirv below:
+//   1. Save the above GLSL source to a file called validation_shader.comp.
+//   2. Run in terminal
+//
+//      glslangValidator.exe -x -V validation_shader.comp -o validation_shader.comp.spv
+//
+//   4. Copy-paste the contents of validation_shader.comp.spv here (clang-format will fix up the alignment).
+static const uint32_t kComputeShaderSpirv[] = {
+    0x07230203, 0x00010000, 0x00080007, 0x0000006d, 0x00000000, 0x00020011, 0x00000001, 0x0006000b, 0x00000001, 0x4c534c47,
+    0x6474732e, 0x3035342e, 0x00000000, 0x0003000e, 0x00000000, 0x00000001, 0x0005000f, 0x00000005, 0x00000004, 0x6e69616d,
+    0x00000000, 0x00060010, 0x00000004, 0x00000011, 0x00000001, 0x00000001, 0x00000001, 0x00030003, 0x00000002, 0x000001c2,
+    0x00040005, 0x00000004, 0x6e69616d, 0x00000000, 0x00060005, 0x00000008, 0x74736e69, 0x65636e61, 0x646e695f, 0x00007865,
+    0x00070005, 0x00000011, 0x696c6156, 0x69746164, 0x75426e6f, 0x72656666, 0x00000000, 0x00090006, 0x00000011, 0x00000000,
+    0x74736e69, 0x65636e61, 0x6f745f73, 0x6c61765f, 0x74616469, 0x00000065, 0x000a0006, 0x00000011, 0x00000001, 0x6c706572,
+    0x6d656361, 0x5f746e65, 0x646e6168, 0x625f656c, 0x5f737469, 0x00000030, 0x000a0006, 0x00000011, 0x00000002, 0x6c706572,
+    0x6d656361, 0x5f746e65, 0x646e6168, 0x625f656c, 0x5f737469, 0x00000031, 0x00090006, 0x00000011, 0x00000003, 0x61766e69,
+    0x5f64696c, 0x646e6168, 0x665f656c, 0x646e756f, 0x00000000, 0x00090006, 0x00000011, 0x00000004, 0x61766e69, 0x5f64696c,
+    0x646e6168, 0x625f656c, 0x5f737469, 0x00000030, 0x00090006, 0x00000011, 0x00000005, 0x61766e69, 0x5f64696c, 0x646e6168,
+    0x625f656c, 0x5f737469, 0x00000031, 0x00080006, 0x00000011, 0x00000006, 0x696c6176, 0x61685f64, 0x656c646e, 0x6f635f73,
+    0x00746e75, 0x00070006, 0x00000011, 0x00000007, 0x696c6176, 0x61685f64, 0x656c646e, 0x00000073, 0x00030005, 0x00000013,
+    0x00000000, 0x00080005, 0x0000001b, 0x74736e69, 0x65636e61, 0x6e61685f, 0x5f656c64, 0x73746962, 0x0000305f, 0x00080005,
+    0x0000001e, 0x65476b56, 0x74656d6f, 0x6e497972, 0x6e617473, 0x564e6563, 0x00000000, 0x00050006, 0x0000001e, 0x00000000,
+    0x73756e75, 0x00006465, 0x00070006, 0x0000001e, 0x00000001, 0x646e6168, 0x625f656c, 0x5f737469, 0x00000030, 0x00070006,
+    0x0000001e, 0x00000002, 0x646e6168, 0x625f656c, 0x5f737469, 0x00000031, 0x00060005, 0x00000020, 0x74736e49, 0x65636e61,
+    0x66667542, 0x00007265, 0x00060006, 0x00000020, 0x00000000, 0x74736e69, 0x65636e61, 0x00000073, 0x00030005, 0x00000022,
+    0x00000000, 0x00080005, 0x00000027, 0x74736e69, 0x65636e61, 0x6e61685f, 0x5f656c64, 0x73746962, 0x0000315f, 0x00040005,
+    0x0000002d, 0x696c6176, 0x00000064, 0x00070005, 0x0000002f, 0x696c6176, 0x61685f64, 0x656c646e, 0x646e695f, 0x00007865,
+    0x00040047, 0x00000010, 0x00000006, 0x00000004, 0x00050048, 0x00000011, 0x00000000, 0x00000023, 0x00000000, 0x00050048,
+    0x00000011, 0x00000001, 0x00000023, 0x00000004, 0x00050048, 0x00000011, 0x00000002, 0x00000023, 0x00000008, 0x00050048,
+    0x00000011, 0x00000003, 0x00000023, 0x0000000c, 0x00050048, 0x00000011, 0x00000004, 0x00000023, 0x00000010, 0x00050048,
+    0x00000011, 0x00000005, 0x00000023, 0x00000014, 0x00050048, 0x00000011, 0x00000006, 0x00000023, 0x00000018, 0x00050048,
+    0x00000011, 0x00000007, 0x00000023, 0x0000001c, 0x00030047, 0x00000011, 0x00000003, 0x00040047, 0x00000013, 0x00000022,
+    0x00000000, 0x00040047, 0x00000013, 0x00000021, 0x00000001, 0x00040047, 0x0000001d, 0x00000006, 0x00000004, 0x00050048,
+    0x0000001e, 0x00000000, 0x00000023, 0x00000000, 0x00050048, 0x0000001e, 0x00000001, 0x00000023, 0x00000038, 0x00050048,
+    0x0000001e, 0x00000002, 0x00000023, 0x0000003c, 0x00040047, 0x0000001f, 0x00000006, 0x00000040, 0x00050048, 0x00000020,
+    0x00000000, 0x00000023, 0x00000000, 0x00030047, 0x00000020, 0x00000003, 0x00040047, 0x00000022, 0x00000022, 0x00000000,
+    0x00040047, 0x00000022, 0x00000021, 0x00000000, 0x00020013, 0x00000002, 0x00030021, 0x00000003, 0x00000002, 0x00040015,
+    0x00000006, 0x00000020, 0x00000000, 0x00040020, 0x00000007, 0x00000007, 0x00000006, 0x0004002b, 0x00000006, 0x00000009,
+    0x00000000, 0x0003001d, 0x00000010, 0x00000006, 0x000a001e, 0x00000011, 0x00000006, 0x00000006, 0x00000006, 0x00000006,
+    0x00000006, 0x00000006, 0x00000006, 0x00000010, 0x00040020, 0x00000012, 0x00000002, 0x00000011, 0x0004003b, 0x00000012,
+    0x00000013, 0x00000002, 0x00040015, 0x00000014, 0x00000020, 0x00000001, 0x0004002b, 0x00000014, 0x00000015, 0x00000000,
+    0x00040020, 0x00000016, 0x00000002, 0x00000006, 0x00020014, 0x00000019, 0x0004002b, 0x00000006, 0x0000001c, 0x0000000e,
+    0x0004001c, 0x0000001d, 0x00000006, 0x0000001c, 0x0005001e, 0x0000001e, 0x0000001d, 0x00000006, 0x00000006, 0x0003001d,
+    0x0000001f, 0x0000001e, 0x0003001e, 0x00000020, 0x0000001f, 0x00040020, 0x00000021, 0x00000002, 0x00000020, 0x0004003b,
+    0x00000021, 0x00000022, 0x00000002, 0x0004002b, 0x00000014, 0x00000024, 0x00000001, 0x0004002b, 0x00000014, 0x00000029,
+    0x00000002, 0x00040020, 0x0000002c, 0x00000007, 0x00000019, 0x0003002a, 0x00000019, 0x0000002e, 0x0004002b, 0x00000014,
+    0x00000036, 0x00000006, 0x0004002b, 0x00000014, 0x0000003b, 0x00000007, 0x0004002b, 0x00000006, 0x0000003c, 0x00000002,
+    0x0004002b, 0x00000006, 0x00000048, 0x00000001, 0x00030029, 0x00000019, 0x00000050, 0x0004002b, 0x00000014, 0x00000058,
+    0x00000003, 0x0004002b, 0x00000014, 0x0000005d, 0x00000004, 0x0004002b, 0x00000014, 0x00000060, 0x00000005, 0x00050036,
+    0x00000002, 0x00000004, 0x00000000, 0x00000003, 0x000200f8, 0x00000005, 0x0004003b, 0x00000007, 0x00000008, 0x00000007,
+    0x0004003b, 0x00000007, 0x0000001b, 0x00000007, 0x0004003b, 0x00000007, 0x00000027, 0x00000007, 0x0004003b, 0x0000002c,
+    0x0000002d, 0x00000007, 0x0004003b, 0x00000007, 0x0000002f, 0x00000007, 0x0003003e, 0x00000008, 0x00000009, 0x000200f9,
+    0x0000000a, 0x000200f8, 0x0000000a, 0x000400f6, 0x0000000c, 0x0000000d, 0x00000000, 0x000200f9, 0x0000000e, 0x000200f8,
+    0x0000000e, 0x0004003d, 0x00000006, 0x0000000f, 0x00000008, 0x00050041, 0x00000016, 0x00000017, 0x00000013, 0x00000015,
+    0x0004003d, 0x00000006, 0x00000018, 0x00000017, 0x000500b0, 0x00000019, 0x0000001a, 0x0000000f, 0x00000018, 0x000400fa,
+    0x0000001a, 0x0000000b, 0x0000000c, 0x000200f8, 0x0000000b, 0x0004003d, 0x00000006, 0x00000023, 0x00000008, 0x00070041,
+    0x00000016, 0x00000025, 0x00000022, 0x00000015, 0x00000023, 0x00000024, 0x0004003d, 0x00000006, 0x00000026, 0x00000025,
+    0x0003003e, 0x0000001b, 0x00000026, 0x0004003d, 0x00000006, 0x00000028, 0x00000008, 0x00070041, 0x00000016, 0x0000002a,
+    0x00000022, 0x00000015, 0x00000028, 0x00000029, 0x0004003d, 0x00000006, 0x0000002b, 0x0000002a, 0x0003003e, 0x00000027,
+    0x0000002b, 0x0003003e, 0x0000002d, 0x0000002e, 0x0003003e, 0x0000002f, 0x00000009, 0x000200f9, 0x00000030, 0x000200f8,
+    0x00000030, 0x000400f6, 0x00000032, 0x00000033, 0x00000000, 0x000200f9, 0x00000034, 0x000200f8, 0x00000034, 0x0004003d,
+    0x00000006, 0x00000035, 0x0000002f, 0x00050041, 0x00000016, 0x00000037, 0x00000013, 0x00000036, 0x0004003d, 0x00000006,
+    0x00000038, 0x00000037, 0x000500b0, 0x00000019, 0x00000039, 0x00000035, 0x00000038, 0x000400fa, 0x00000039, 0x00000031,
+    0x00000032, 0x000200f8, 0x00000031, 0x0004003d, 0x00000006, 0x0000003a, 0x0000001b, 0x0004003d, 0x00000006, 0x0000003d,
+    0x0000002f, 0x00050084, 0x00000006, 0x0000003e, 0x0000003c, 0x0000003d, 0x00050080, 0x00000006, 0x0000003f, 0x0000003e,
+    0x00000009, 0x00060041, 0x00000016, 0x00000040, 0x00000013, 0x0000003b, 0x0000003f, 0x0004003d, 0x00000006, 0x00000041,
+    0x00000040, 0x000500aa, 0x00000019, 0x00000042, 0x0000003a, 0x00000041, 0x000300f7, 0x00000044, 0x00000000, 0x000400fa,
+    0x00000042, 0x00000043, 0x00000044, 0x000200f8, 0x00000043, 0x0004003d, 0x00000006, 0x00000045, 0x00000027, 0x0004003d,
+    0x00000006, 0x00000046, 0x0000002f, 0x00050084, 0x00000006, 0x00000047, 0x0000003c, 0x00000046, 0x00050080, 0x00000006,
+    0x00000049, 0x00000047, 0x00000048, 0x00060041, 0x00000016, 0x0000004a, 0x00000013, 0x0000003b, 0x00000049, 0x0004003d,
+    0x00000006, 0x0000004b, 0x0000004a, 0x000500aa, 0x00000019, 0x0000004c, 0x00000045, 0x0000004b, 0x000200f9, 0x00000044,
+    0x000200f8, 0x00000044, 0x000700f5, 0x00000019, 0x0000004d, 0x00000042, 0x00000031, 0x0000004c, 0x00000043, 0x000300f7,
+    0x0000004f, 0x00000000, 0x000400fa, 0x0000004d, 0x0000004e, 0x0000004f, 0x000200f8, 0x0000004e, 0x0003003e, 0x0000002d,
+    0x00000050, 0x000200f9, 0x00000032, 0x000200f8, 0x0000004f, 0x000200f9, 0x00000033, 0x000200f8, 0x00000033, 0x0004003d,
+    0x00000006, 0x00000052, 0x0000002f, 0x00050080, 0x00000006, 0x00000053, 0x00000052, 0x00000024, 0x0003003e, 0x0000002f,
+    0x00000053, 0x000200f9, 0x00000030, 0x000200f8, 0x00000032, 0x0004003d, 0x00000019, 0x00000054, 0x0000002d, 0x000400a8,
+    0x00000019, 0x00000055, 0x00000054, 0x000300f7, 0x00000057, 0x00000000, 0x000400fa, 0x00000055, 0x00000056, 0x00000057,
+    0x000200f8, 0x00000056, 0x00050041, 0x00000016, 0x00000059, 0x00000013, 0x00000058, 0x0004003d, 0x00000006, 0x0000005a,
+    0x00000059, 0x00050080, 0x00000006, 0x0000005b, 0x0000005a, 0x00000048, 0x00050041, 0x00000016, 0x0000005c, 0x00000013,
+    0x00000058, 0x0003003e, 0x0000005c, 0x0000005b, 0x0004003d, 0x00000006, 0x0000005e, 0x0000001b, 0x00050041, 0x00000016,
+    0x0000005f, 0x00000013, 0x0000005d, 0x0003003e, 0x0000005f, 0x0000005e, 0x0004003d, 0x00000006, 0x00000061, 0x00000027,
+    0x00050041, 0x00000016, 0x00000062, 0x00000013, 0x00000060, 0x0003003e, 0x00000062, 0x00000061, 0x0004003d, 0x00000006,
+    0x00000063, 0x00000008, 0x00050041, 0x00000016, 0x00000064, 0x00000013, 0x00000024, 0x0004003d, 0x00000006, 0x00000065,
+    0x00000064, 0x00070041, 0x00000016, 0x00000066, 0x00000022, 0x00000015, 0x00000063, 0x00000024, 0x0003003e, 0x00000066,
+    0x00000065, 0x0004003d, 0x00000006, 0x00000067, 0x00000008, 0x00050041, 0x00000016, 0x00000068, 0x00000013, 0x00000029,
+    0x0004003d, 0x00000006, 0x00000069, 0x00000068, 0x00070041, 0x00000016, 0x0000006a, 0x00000022, 0x00000015, 0x00000067,
+    0x00000029, 0x0003003e, 0x0000006a, 0x00000069, 0x000200f9, 0x00000057, 0x000200f8, 0x00000057, 0x000200f9, 0x0000000d,
+    0x000200f8, 0x0000000d, 0x0004003d, 0x00000006, 0x0000006b, 0x00000008, 0x00050080, 0x00000006, 0x0000006c, 0x0000006b,
+    0x00000024, 0x0003003e, 0x00000008, 0x0000006c, 0x000200f9, 0x0000000a, 0x000200f8, 0x0000000c, 0x000100fd, 0x00010038};
+
+// Implementation for Descriptor Set Manager class
+GpuAssistedDescriptorSetManager::GpuAssistedDescriptorSetManager(GpuAssisted *dev_data) { dev_data_ = dev_data; }
+
+GpuAssistedDescriptorSetManager::~GpuAssistedDescriptorSetManager() {
+    for (auto &pool : desc_pool_map_) {
+        DispatchDestroyDescriptorPool(dev_data_->device, pool.first, NULL);
+    }
+    desc_pool_map_.clear();
+}
+
+VkResult GpuAssistedDescriptorSetManager::GetDescriptorSet(VkDescriptorPool *desc_pool, VkDescriptorSet *desc_set) {
+    std::vector<VkDescriptorSet> desc_sets;
+    VkResult result = GetDescriptorSets(1, desc_pool, &desc_sets);
+    if (result == VK_SUCCESS) {
+        *desc_set = desc_sets[0];
+    }
+    return result;
+}
+
+VkResult GpuAssistedDescriptorSetManager::GetDescriptorSets(uint32_t count, VkDescriptorPool *pool,
+                                                            std::vector<VkDescriptorSet> *desc_sets) {
+    const uint32_t default_pool_size = kItemsPerChunk;
+    VkResult result = VK_SUCCESS;
+    VkDescriptorPool pool_to_use = VK_NULL_HANDLE;
+
+    if (0 == count) {
+        return result;
+    }
+    desc_sets->clear();
+    desc_sets->resize(count);
+
+    for (auto &pool : desc_pool_map_) {
+        if (pool.second.used + count < pool.second.size) {
+            pool_to_use = pool.first;
+            break;
+        }
+    }
+    if (VK_NULL_HANDLE == pool_to_use) {
+        uint32_t pool_count = default_pool_size;
+        if (count > default_pool_size) {
+            pool_count = count;
+        }
+        const VkDescriptorPoolSize size_counts = {
+            VK_DESCRIPTOR_TYPE_STORAGE_BUFFER,
+            pool_count * kNumBindingsInSet,
+        };
+        VkDescriptorPoolCreateInfo desc_pool_info = {};
+        desc_pool_info.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
+        desc_pool_info.pNext = NULL;
+        desc_pool_info.flags = VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT;
+        desc_pool_info.maxSets = pool_count;
+        desc_pool_info.poolSizeCount = 1;
+        desc_pool_info.pPoolSizes = &size_counts;
+        result = DispatchCreateDescriptorPool(dev_data_->device, &desc_pool_info, NULL, &pool_to_use);
+        assert(result == VK_SUCCESS);
+        if (result != VK_SUCCESS) {
+            return result;
+        }
+        desc_pool_map_[pool_to_use].size = desc_pool_info.maxSets;
+        desc_pool_map_[pool_to_use].used = 0;
+    }
+    std::vector<VkDescriptorSetLayout> desc_layouts(count, dev_data_->debug_desc_layout);
+
+    VkDescriptorSetAllocateInfo alloc_info = {VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO, NULL, pool_to_use, count,
+                                              desc_layouts.data()};
+
+    result = DispatchAllocateDescriptorSets(dev_data_->device, &alloc_info, desc_sets->data());
+    assert(result == VK_SUCCESS);
+    if (result != VK_SUCCESS) {
+        return result;
+    }
+    *pool = pool_to_use;
+    desc_pool_map_[pool_to_use].used += count;
+    return result;
+}
+
+void GpuAssistedDescriptorSetManager::PutBackDescriptorSet(VkDescriptorPool desc_pool, VkDescriptorSet desc_set) {
+    auto iter = desc_pool_map_.find(desc_pool);
+    if (iter != desc_pool_map_.end()) {
+        VkResult result = DispatchFreeDescriptorSets(dev_data_->device, desc_pool, 1, &desc_set);
+        assert(result == VK_SUCCESS);
+        if (result != VK_SUCCESS) {
+            return;
+        }
+        desc_pool_map_[desc_pool].used--;
+        if (0 == desc_pool_map_[desc_pool].used) {
+            DispatchDestroyDescriptorPool(dev_data_->device, desc_pool, NULL);
+            desc_pool_map_.erase(desc_pool);
+        }
+    }
+    return;
+}
+
+// Trampolines to make VMA call Dispatch for Vulkan calls
+static VKAPI_ATTR void VKAPI_CALL gpuVkGetPhysicalDeviceProperties(VkPhysicalDevice physicalDevice,
+                                                                   VkPhysicalDeviceProperties *pProperties) {
+    DispatchGetPhysicalDeviceProperties(physicalDevice, pProperties);
+}
+static VKAPI_ATTR void VKAPI_CALL gpuVkGetPhysicalDeviceMemoryProperties(VkPhysicalDevice physicalDevice,
+                                                                         VkPhysicalDeviceMemoryProperties *pMemoryProperties) {
+    DispatchGetPhysicalDeviceMemoryProperties(physicalDevice, pMemoryProperties);
+}
+static VKAPI_ATTR VkResult VKAPI_CALL gpuVkAllocateMemory(VkDevice device, const VkMemoryAllocateInfo *pAllocateInfo,
+                                                          const VkAllocationCallbacks *pAllocator, VkDeviceMemory *pMemory) {
+    return DispatchAllocateMemory(device, pAllocateInfo, pAllocator, pMemory);
+}
+static VKAPI_ATTR void VKAPI_CALL gpuVkFreeMemory(VkDevice device, VkDeviceMemory memory, const VkAllocationCallbacks *pAllocator) {
+    DispatchFreeMemory(device, memory, pAllocator);
+}
+static VKAPI_ATTR VkResult VKAPI_CALL gpuVkMapMemory(VkDevice device, VkDeviceMemory memory, VkDeviceSize offset, VkDeviceSize size,
+                                                     VkMemoryMapFlags flags, void **ppData) {
+    return DispatchMapMemory(device, memory, offset, size, flags, ppData);
+}
+static VKAPI_ATTR void VKAPI_CALL gpuVkUnmapMemory(VkDevice device, VkDeviceMemory memory) { DispatchUnmapMemory(device, memory); }
+static VKAPI_ATTR VkResult VKAPI_CALL gpuVkFlushMappedMemoryRanges(VkDevice device, uint32_t memoryRangeCount,
+                                                                   const VkMappedMemoryRange *pMemoryRanges) {
+    return DispatchFlushMappedMemoryRanges(device, memoryRangeCount, pMemoryRanges);
+}
+static VKAPI_ATTR VkResult VKAPI_CALL gpuVkInvalidateMappedMemoryRanges(VkDevice device, uint32_t memoryRangeCount,
+                                                                        const VkMappedMemoryRange *pMemoryRanges) {
+    return DispatchInvalidateMappedMemoryRanges(device, memoryRangeCount, pMemoryRanges);
+}
+static VKAPI_ATTR VkResult VKAPI_CALL gpuVkBindBufferMemory(VkDevice device, VkBuffer buffer, VkDeviceMemory memory,
+                                                            VkDeviceSize memoryOffset) {
+    return DispatchBindBufferMemory(device, buffer, memory, memoryOffset);
+}
+static VKAPI_ATTR VkResult VKAPI_CALL gpuVkBindImageMemory(VkDevice device, VkImage image, VkDeviceMemory memory,
+                                                           VkDeviceSize memoryOffset) {
+    return DispatchBindImageMemory(device, image, memory, memoryOffset);
+}
+static VKAPI_ATTR void VKAPI_CALL gpuVkGetBufferMemoryRequirements(VkDevice device, VkBuffer buffer,
+                                                                   VkMemoryRequirements *pMemoryRequirements) {
+    DispatchGetBufferMemoryRequirements(device, buffer, pMemoryRequirements);
+}
+static VKAPI_ATTR void VKAPI_CALL gpuVkGetImageMemoryRequirements(VkDevice device, VkImage image,
+                                                                  VkMemoryRequirements *pMemoryRequirements) {
+    DispatchGetImageMemoryRequirements(device, image, pMemoryRequirements);
+}
+static VKAPI_ATTR VkResult VKAPI_CALL gpuVkCreateBuffer(VkDevice device, const VkBufferCreateInfo *pCreateInfo,
+                                                        const VkAllocationCallbacks *pAllocator, VkBuffer *pBuffer) {
+    return DispatchCreateBuffer(device, pCreateInfo, pAllocator, pBuffer);
+}
+static VKAPI_ATTR void VKAPI_CALL gpuVkDestroyBuffer(VkDevice device, VkBuffer buffer, const VkAllocationCallbacks *pAllocator) {
+    return DispatchDestroyBuffer(device, buffer, pAllocator);
+}
+static VKAPI_ATTR VkResult VKAPI_CALL gpuVkCreateImage(VkDevice device, const VkImageCreateInfo *pCreateInfo,
+                                                       const VkAllocationCallbacks *pAllocator, VkImage *pImage) {
+    return DispatchCreateImage(device, pCreateInfo, pAllocator, pImage);
+}
+static VKAPI_ATTR void VKAPI_CALL gpuVkDestroyImage(VkDevice device, VkImage image, const VkAllocationCallbacks *pAllocator) {
+    DispatchDestroyImage(device, image, pAllocator);
+}
+static VKAPI_ATTR void VKAPI_CALL gpuVkCmdCopyBuffer(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer,
+                                                     uint32_t regionCount, const VkBufferCopy *pRegions) {
+    DispatchCmdCopyBuffer(commandBuffer, srcBuffer, dstBuffer, regionCount, pRegions);
+}
+
+VkResult GpuAssisted::InitializeVma(VkPhysicalDevice physical_device, VkDevice device, VmaAllocator *pAllocator) {
+    VmaVulkanFunctions functions;
+    VmaAllocatorCreateInfo allocatorInfo = {};
+    allocatorInfo.device = device;
+    allocatorInfo.physicalDevice = physical_device;
+
+    functions.vkGetPhysicalDeviceProperties = (PFN_vkGetPhysicalDeviceProperties)gpuVkGetPhysicalDeviceProperties;
+    functions.vkGetPhysicalDeviceMemoryProperties = (PFN_vkGetPhysicalDeviceMemoryProperties)gpuVkGetPhysicalDeviceMemoryProperties;
+    functions.vkAllocateMemory = (PFN_vkAllocateMemory)gpuVkAllocateMemory;
+    functions.vkFreeMemory = (PFN_vkFreeMemory)gpuVkFreeMemory;
+    functions.vkMapMemory = (PFN_vkMapMemory)gpuVkMapMemory;
+    functions.vkUnmapMemory = (PFN_vkUnmapMemory)gpuVkUnmapMemory;
+    functions.vkFlushMappedMemoryRanges = (PFN_vkFlushMappedMemoryRanges)gpuVkFlushMappedMemoryRanges;
+    functions.vkInvalidateMappedMemoryRanges = (PFN_vkInvalidateMappedMemoryRanges)gpuVkInvalidateMappedMemoryRanges;
+    functions.vkBindBufferMemory = (PFN_vkBindBufferMemory)gpuVkBindBufferMemory;
+    functions.vkBindImageMemory = (PFN_vkBindImageMemory)gpuVkBindImageMemory;
+    functions.vkGetBufferMemoryRequirements = (PFN_vkGetBufferMemoryRequirements)gpuVkGetBufferMemoryRequirements;
+    functions.vkGetImageMemoryRequirements = (PFN_vkGetImageMemoryRequirements)gpuVkGetImageMemoryRequirements;
+    functions.vkCreateBuffer = (PFN_vkCreateBuffer)gpuVkCreateBuffer;
+    functions.vkDestroyBuffer = (PFN_vkDestroyBuffer)gpuVkDestroyBuffer;
+    functions.vkCreateImage = (PFN_vkCreateImage)gpuVkCreateImage;
+    functions.vkDestroyImage = (PFN_vkDestroyImage)gpuVkDestroyImage;
+    functions.vkCmdCopyBuffer = (PFN_vkCmdCopyBuffer)gpuVkCmdCopyBuffer;
+    allocatorInfo.pVulkanFunctions = &functions;
+
+    return vmaCreateAllocator(&allocatorInfo, pAllocator);
+}
+
+// Convenience function for reporting problems with setting up GPU Validation.
+void GpuAssisted::ReportSetupProblem(VkDebugReportObjectTypeEXT object_type, uint64_t object_handle,
+                                     const char *const specific_message) const {
+    log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, object_type, object_handle, "UNASSIGNED-GPU-Assisted Validation Error. ",
+            "Detail: (%s)", specific_message);
+}
+
+void GpuAssisted::PreCallRecordCreateBuffer(VkDevice device, const VkBufferCreateInfo *pCreateInfo,
+                                            const VkAllocationCallbacks *pAllocator, VkBuffer *pBuffer, void *cb_state_data) {
+    // Ray tracing acceleration structure instance buffers also need the storage buffer usage as
+    // acceleration structure build validation will find and replace invalid acceleration structure
+    // handles inside of a compute shader.
+    create_buffer_api_state *cb_state = reinterpret_cast<create_buffer_api_state *>(cb_state_data);
+    if (cb_state && cb_state->modified_create_info.usage & VK_BUFFER_USAGE_RAY_TRACING_BIT_NV) {
+        cb_state->modified_create_info.usage |= VK_BUFFER_USAGE_STORAGE_BUFFER_BIT;
+    }
+}
+
+// Turn on necessary device features.
+void GpuAssisted::PreCallRecordCreateDevice(VkPhysicalDevice gpu, const VkDeviceCreateInfo *create_info,
+                                            const VkAllocationCallbacks *pAllocator, VkDevice *pDevice,
+                                            safe_VkDeviceCreateInfo *modified_create_info) {
+    VkPhysicalDeviceFeatures supported_features;
+    DispatchGetPhysicalDeviceFeatures(gpu, &supported_features);
+    if (supported_features.fragmentStoresAndAtomics || supported_features.vertexPipelineStoresAndAtomics ||
+        supported_features.shaderInt64) {
+        VkPhysicalDeviceFeatures *features = nullptr;
+        if (modified_create_info->pEnabledFeatures) {
+            // If pEnabledFeatures, VkPhysicalDeviceFeatures2 in pNext chain is not allowed
+            features = const_cast<VkPhysicalDeviceFeatures *>(modified_create_info->pEnabledFeatures);
+        } else {
+            VkPhysicalDeviceFeatures2 *features2 = nullptr;
+            features2 =
+                const_cast<VkPhysicalDeviceFeatures2 *>(lvl_find_in_chain<VkPhysicalDeviceFeatures2>(modified_create_info->pNext));
+            if (features2) features = &features2->features;
+        }
+        if (features) {
+            features->fragmentStoresAndAtomics = supported_features.fragmentStoresAndAtomics;
+            features->vertexPipelineStoresAndAtomics = supported_features.vertexPipelineStoresAndAtomics;
+            features->shaderInt64 = supported_features.shaderInt64;
+        } else {
+            VkPhysicalDeviceFeatures new_features = {};
+            new_features.fragmentStoresAndAtomics = supported_features.fragmentStoresAndAtomics;
+            new_features.vertexPipelineStoresAndAtomics = supported_features.vertexPipelineStoresAndAtomics;
+            new_features.shaderInt64 = supported_features.shaderInt64;
+            delete modified_create_info->pEnabledFeatures;
+            modified_create_info->pEnabledFeatures = new VkPhysicalDeviceFeatures(new_features);
+        }
+    }
+}
+// Perform initializations that can be done at Create Device time.
+void GpuAssisted::PostCallRecordCreateDevice(VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo *pCreateInfo,
+                                             const VkAllocationCallbacks *pAllocator, VkDevice *pDevice, VkResult result) {
+    // The state tracker sets up the device state
+    ValidationStateTracker::PostCallRecordCreateDevice(physicalDevice, pCreateInfo, pAllocator, pDevice, result);
+
+    ValidationObject *device_object = GetLayerDataPtr(get_dispatch_key(*pDevice), layer_data_map);
+    ValidationObject *validation_data = GetValidationObject(device_object->object_dispatch, this->container_type);
+    GpuAssisted *device_gpu_assisted = static_cast<GpuAssisted *>(validation_data);
+
+    if (device_gpu_assisted->phys_dev_props.apiVersion < VK_API_VERSION_1_1) {
+        ReportSetupProblem(VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
+                           "GPU-Assisted validation requires Vulkan 1.1 or later.  GPU-Assisted Validation disabled.");
+        device_gpu_assisted->aborted = true;
+        return;
+    }
+
+    if (!device_gpu_assisted->enabled_features.core.fragmentStoresAndAtomics ||
+        !device_gpu_assisted->enabled_features.core.vertexPipelineStoresAndAtomics) {
+        ReportSetupProblem(VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
+                           "GPU-Assisted validation requires fragmentStoresAndAtomics and vertexPipelineStoresAndAtomics.  "
+                           "GPU-Assisted Validation disabled.");
+        device_gpu_assisted->aborted = true;
+        return;
+    }
+
+    if (device_extensions.vk_ext_buffer_device_address && !device_gpu_assisted->enabled_features.core.shaderInt64) {
+        log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
+                "UNASSIGNED-GPU-Assisted Validation Warning",
+                "shaderInt64 feature is not available.  No buffer device address checking will be attempted");
+    }
+    device_gpu_assisted->shaderInt64 = device_gpu_assisted->enabled_features.core.shaderInt64;
+
+    // If api version 1.1 or later, SetDeviceLoaderData will be in the loader
+    auto chain_info = get_chain_info(pCreateInfo, VK_LOADER_DATA_CALLBACK);
+    assert(chain_info->u.pfnSetDeviceLoaderData);
+    device_gpu_assisted->vkSetDeviceLoaderData = chain_info->u.pfnSetDeviceLoaderData;
+
+    // Some devices have extremely high limits here, so set a reasonable max because we have to pad
+    // the pipeline layout with dummy descriptor set layouts.
+    device_gpu_assisted->adjusted_max_desc_sets = device_gpu_assisted->phys_dev_props.limits.maxBoundDescriptorSets;
+    device_gpu_assisted->adjusted_max_desc_sets = std::min(33U, device_gpu_assisted->adjusted_max_desc_sets);
+
+    // We can't do anything if there is only one.
+    // Device probably not a legit Vulkan device, since there should be at least 4. Protect ourselves.
+    if (device_gpu_assisted->adjusted_max_desc_sets == 1) {
+        ReportSetupProblem(VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
+                           "Device can bind only a single descriptor set.  GPU-Assisted Validation disabled.");
+        device_gpu_assisted->aborted = true;
+        return;
+    }
+    device_gpu_assisted->desc_set_bind_index = device_gpu_assisted->adjusted_max_desc_sets - 1;
+    log_msg(report_data, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
+            "UNASSIGNED-GPU-Assisted Validation. ", "Shaders using descriptor set at index %d. ",
+            device_gpu_assisted->desc_set_bind_index);
+
+    device_gpu_assisted->output_buffer_size = sizeof(uint32_t) * (spvtools::kInst2MaxOutCnt + 1);
+    VkResult result1 = InitializeVma(physicalDevice, *pDevice, &device_gpu_assisted->vmaAllocator);
+    assert(result1 == VK_SUCCESS);
+    std::unique_ptr<GpuAssistedDescriptorSetManager> desc_set_manager(new GpuAssistedDescriptorSetManager(device_gpu_assisted));
+
+    // The descriptor indexing checks require only the first "output" binding.
+    const VkDescriptorSetLayoutBinding debug_desc_layout_bindings[kNumBindingsInSet] = {
+        {
+            0,  // output
+            VK_DESCRIPTOR_TYPE_STORAGE_BUFFER,
+            1,
+            VK_SHADER_STAGE_ALL_GRAPHICS | VK_SHADER_STAGE_COMPUTE_BIT | kShaderStageAllRayTracing,
+            NULL,
+        },
+        {
+            1,  // input
+            VK_DESCRIPTOR_TYPE_STORAGE_BUFFER,
+            1,
+            VK_SHADER_STAGE_ALL_GRAPHICS | VK_SHADER_STAGE_COMPUTE_BIT | kShaderStageAllRayTracing,
+            NULL,
+        },
+        {
+            2,  // input
+            VK_DESCRIPTOR_TYPE_STORAGE_BUFFER,
+            1,
+            VK_SHADER_STAGE_ALL_GRAPHICS | VK_SHADER_STAGE_COMPUTE_BIT | kShaderStageAllRayTracing,
+            NULL,
+        },
+    };
+    const VkDescriptorSetLayoutCreateInfo debug_desc_layout_info = {VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO, NULL, 0,
+                                                                    kNumBindingsInSet, debug_desc_layout_bindings};
+
+    const VkDescriptorSetLayoutCreateInfo dummy_desc_layout_info = {VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO, NULL, 0, 0,
+                                                                    NULL};
+
+    result1 = DispatchCreateDescriptorSetLayout(*pDevice, &debug_desc_layout_info, NULL, &device_gpu_assisted->debug_desc_layout);
+
+    // This is a layout used to "pad" a pipeline layout to fill in any gaps to the selected bind index.
+    VkResult result2 =
+        DispatchCreateDescriptorSetLayout(*pDevice, &dummy_desc_layout_info, NULL, &device_gpu_assisted->dummy_desc_layout);
+    assert((result1 == VK_SUCCESS) && (result2 == VK_SUCCESS));
+    if ((result1 != VK_SUCCESS) || (result2 != VK_SUCCESS)) {
+        ReportSetupProblem(VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(*pDevice),
+                           "Unable to create descriptor set layout.  GPU-Assisted Validation disabled.");
+        if (result == VK_SUCCESS) {
+            DispatchDestroyDescriptorSetLayout(*pDevice, device_gpu_assisted->debug_desc_layout, NULL);
+        }
+        if (result2 == VK_SUCCESS) {
+            DispatchDestroyDescriptorSetLayout(*pDevice, device_gpu_assisted->dummy_desc_layout, NULL);
+        }
+        device_gpu_assisted->debug_desc_layout = VK_NULL_HANDLE;
+        device_gpu_assisted->dummy_desc_layout = VK_NULL_HANDLE;
+        device_gpu_assisted->aborted = true;
+        return;
+    }
+    device_gpu_assisted->desc_set_manager = std::move(desc_set_manager);
+
+    // Register callback to be called at any ResetCommandBuffer time
+    device_gpu_assisted->SetCommandBufferResetCallback(
+        [device_gpu_assisted](VkCommandBuffer command_buffer) -> void { device_gpu_assisted->ResetCommandBuffer(command_buffer); });
+
+    CreateAccelerationStructureBuildValidationState(device_gpu_assisted);
+}
+
+void GpuAssisted::PostCallRecordGetBufferDeviceAddressEXT(VkDevice device, const VkBufferDeviceAddressInfoEXT *pInfo,
+                                                          VkDeviceAddress address) {
+    BUFFER_STATE *buffer_state = GetBufferState(pInfo->buffer);
+    // Validate against the size requested when the buffer was created
+    if (buffer_state) {
+        buffer_map[address] = buffer_state->createInfo.size;
+        buffer_state->deviceAddress = address;
+    }
+}
+
+void GpuAssisted::PreCallRecordDestroyBuffer(VkDevice device, VkBuffer buffer, const VkAllocationCallbacks *pAllocator) {
+    BUFFER_STATE *buffer_state = GetBufferState(buffer);
+    if (buffer_state) buffer_map.erase(buffer_state->deviceAddress);
+}
+// Clean up device-related resources
+void GpuAssisted::PreCallRecordDestroyDevice(VkDevice device, const VkAllocationCallbacks *pAllocator) {
+    for (auto &queue_barrier_command_info_kv : queue_barrier_command_infos) {
+        GpuAssistedQueueBarrierCommandInfo &queue_barrier_command_info = queue_barrier_command_info_kv.second;
+
+        DispatchFreeCommandBuffers(device, queue_barrier_command_info.barrier_command_pool, 1,
+                                   &queue_barrier_command_info.barrier_command_buffer);
+        queue_barrier_command_info.barrier_command_buffer = VK_NULL_HANDLE;
+
+        DispatchDestroyCommandPool(device, queue_barrier_command_info.barrier_command_pool, NULL);
+        queue_barrier_command_info.barrier_command_pool = VK_NULL_HANDLE;
+    }
+    queue_barrier_command_infos.clear();
+    if (debug_desc_layout) {
+        DispatchDestroyDescriptorSetLayout(device, debug_desc_layout, NULL);
+        debug_desc_layout = VK_NULL_HANDLE;
+    }
+    if (dummy_desc_layout) {
+        DispatchDestroyDescriptorSetLayout(device, dummy_desc_layout, NULL);
+        dummy_desc_layout = VK_NULL_HANDLE;
+    }
+    desc_set_manager.reset();
+
+    DestroyAccelerationStructureBuildValidationState();
+
+    if (vmaAllocator) {
+        vmaDestroyAllocator(vmaAllocator);
+    }
+}
+void GpuAssisted::CreateAccelerationStructureBuildValidationState(GpuAssisted *device_gpuav) {
+    if (device_gpuav->aborted) {
+        return;
+    }
+
+    auto &as_validation_state = device_gpuav->acceleration_structure_validation_state;
+    if (as_validation_state.initialized) {
+        return;
+    }
+
+    if (!device_extensions.vk_nv_ray_tracing) {
+        return;
+    }
+
+    // Outline:
+    //   - Create valid bottom level acceleration structure which acts as replacement
+    //      - Create and load vertex buffer
+    //      - Create and load index buffer
+    //      - Create, allocate memory for, and bind memory for acceleration structure
+    //      - Query acceleration structure handle
+    //      - Create command pool and command buffer
+    //      - Record build acceleration structure command
+    //      - Submit command buffer and wait for completion
+    //      - Cleanup
+    //  - Create compute pipeline for validating instance buffers
+    //      - Create descriptor set layout
+    //      - Create pipeline layout
+    //      - Create pipeline
+    //      - Cleanup
+
+    VkResult result = VK_SUCCESS;
+
+    VkBuffer vbo = VK_NULL_HANDLE;
+    VmaAllocation vbo_allocation = VK_NULL_HANDLE;
+    if (result == VK_SUCCESS) {
+        VkBufferCreateInfo vbo_ci = {};
+        vbo_ci.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
+        vbo_ci.size = sizeof(float) * 9;
+        vbo_ci.usage = VK_BUFFER_USAGE_RAY_TRACING_BIT_NV;
+
+        VmaAllocationCreateInfo vbo_ai = {};
+        vbo_ai.usage = VMA_MEMORY_USAGE_CPU_TO_GPU;
+        vbo_ai.requiredFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
+
+        result = vmaCreateBuffer(device_gpuav->vmaAllocator, &vbo_ci, &vbo_ai, &vbo, &vbo_allocation, nullptr);
+        if (result != VK_SUCCESS) {
+            ReportSetupProblem(VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
+                               "Failed to create vertex buffer for acceleration structure build validation.");
+        }
+    }
+
+    if (result == VK_SUCCESS) {
+        uint8_t *mapped_vbo_buffer = nullptr;
+        result = vmaMapMemory(device_gpuav->vmaAllocator, vbo_allocation, (void **)&mapped_vbo_buffer);
+        if (result != VK_SUCCESS) {
+            ReportSetupProblem(VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
+                               "Failed to map vertex buffer for acceleration structure build validation.");
+        } else {
+            const std::vector<float> vertices = {1.0f, 0.0f, 0.0f, 0.5f, 1.0f, 0.0f, 0.0f, 0.0f, 0.0f};
+            std::memcpy(mapped_vbo_buffer, (uint8_t *)vertices.data(), sizeof(float) * vertices.size());
+            vmaUnmapMemory(device_gpuav->vmaAllocator, vbo_allocation);
+        }
+    }
+
+    VkBuffer ibo = VK_NULL_HANDLE;
+    VmaAllocation ibo_allocation = VK_NULL_HANDLE;
+    if (result == VK_SUCCESS) {
+        VkBufferCreateInfo ibo_ci = {};
+        ibo_ci.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
+        ibo_ci.size = sizeof(uint32_t) * 3;
+        ibo_ci.usage = VK_BUFFER_USAGE_RAY_TRACING_BIT_NV;
+
+        VmaAllocationCreateInfo ibo_ai = {};
+        ibo_ai.usage = VMA_MEMORY_USAGE_CPU_TO_GPU;
+        ibo_ai.requiredFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
+
+        result = vmaCreateBuffer(device_gpuav->vmaAllocator, &ibo_ci, &ibo_ai, &ibo, &ibo_allocation, nullptr);
+        if (result != VK_SUCCESS) {
+            ReportSetupProblem(VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
+                               "Failed to create index buffer for acceleration structure build validation.");
+        }
+    }
+
+    if (result == VK_SUCCESS) {
+        uint8_t *mapped_ibo_buffer = nullptr;
+        result = vmaMapMemory(device_gpuav->vmaAllocator, ibo_allocation, (void **)&mapped_ibo_buffer);
+        if (result != VK_SUCCESS) {
+            ReportSetupProblem(VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
+                               "Failed to map index buffer for acceleration structure build validation.");
+        } else {
+            const std::vector<uint32_t> indicies = {0, 1, 2};
+            std::memcpy(mapped_ibo_buffer, (uint8_t *)indicies.data(), sizeof(uint32_t) * indicies.size());
+            vmaUnmapMemory(device_gpuav->vmaAllocator, ibo_allocation);
+        }
+    }
+
+    VkGeometryNV geometry = {};
+    geometry.sType = VK_STRUCTURE_TYPE_GEOMETRY_NV;
+    geometry.geometryType = VK_GEOMETRY_TYPE_TRIANGLES_NV;
+    geometry.geometry.triangles.sType = VK_STRUCTURE_TYPE_GEOMETRY_TRIANGLES_NV;
+    geometry.geometry.triangles.vertexData = vbo;
+    geometry.geometry.triangles.vertexOffset = 0;
+    geometry.geometry.triangles.vertexCount = 3;
+    geometry.geometry.triangles.vertexStride = 12;
+    geometry.geometry.triangles.vertexFormat = VK_FORMAT_R32G32B32_SFLOAT;
+    geometry.geometry.triangles.indexData = ibo;
+    geometry.geometry.triangles.indexOffset = 0;
+    geometry.geometry.triangles.indexCount = 3;
+    geometry.geometry.triangles.indexType = VK_INDEX_TYPE_UINT32;
+    geometry.geometry.triangles.transformData = VK_NULL_HANDLE;
+    geometry.geometry.triangles.transformOffset = 0;
+    geometry.geometry.aabbs = {};
+    geometry.geometry.aabbs.sType = VK_STRUCTURE_TYPE_GEOMETRY_AABB_NV;
+
+    VkAccelerationStructureCreateInfoNV as_ci = {};
+    as_ci.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_NV;
+    as_ci.info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_INFO_NV;
+    as_ci.info.type = VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_NV;
+    as_ci.info.instanceCount = 0;
+    as_ci.info.geometryCount = 1;
+    as_ci.info.pGeometries = &geometry;
+    if (result == VK_SUCCESS) {
+        result = DispatchCreateAccelerationStructureNV(device_gpuav->device, &as_ci, nullptr, &as_validation_state.replacement_as);
+        if (result != VK_SUCCESS) {
+            ReportSetupProblem(VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device_gpuav->device),
+                               "Failed to create acceleration structure for acceleration structure build validation.");
+        }
+    }
+
+    VkMemoryRequirements2 as_mem_requirements = {};
+    if (result == VK_SUCCESS) {
+        VkAccelerationStructureMemoryRequirementsInfoNV as_mem_requirements_info = {};
+        as_mem_requirements_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV;
+        as_mem_requirements_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV;
+        as_mem_requirements_info.accelerationStructure = as_validation_state.replacement_as;
+
+        DispatchGetAccelerationStructureMemoryRequirementsNV(device_gpuav->device, &as_mem_requirements_info, &as_mem_requirements);
+    }
+
+    VmaAllocationInfo as_memory_ai = {};
+    if (result == VK_SUCCESS) {
+        VmaAllocationCreateInfo as_memory_aci = {};
+        as_memory_aci.usage = VMA_MEMORY_USAGE_GPU_ONLY;
+
+        result = vmaAllocateMemory(device_gpuav->vmaAllocator, &as_mem_requirements.memoryRequirements, &as_memory_aci,
+                                   &as_validation_state.replacement_as_allocation, &as_memory_ai);
+        if (result != VK_SUCCESS) {
+            ReportSetupProblem(VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device_gpuav->device),
+                               "Failed to alloc acceleration structure memory for acceleration structure build validation.");
+        }
+    }
+
+    if (result == VK_SUCCESS) {
+        VkBindAccelerationStructureMemoryInfoNV as_bind_info = {};
+        as_bind_info.sType = VK_STRUCTURE_TYPE_BIND_ACCELERATION_STRUCTURE_MEMORY_INFO_NV;
+        as_bind_info.accelerationStructure = as_validation_state.replacement_as;
+        as_bind_info.memory = as_memory_ai.deviceMemory;
+        as_bind_info.memoryOffset = as_memory_ai.offset;
+
+        result = DispatchBindAccelerationStructureMemoryNV(device_gpuav->device, 1, &as_bind_info);
+        if (result != VK_SUCCESS) {
+            ReportSetupProblem(VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device_gpuav->device),
+                               "Failed to bind acceleration structure memory for acceleration structure build validation.");
+        }
+    }
+
+    if (result == VK_SUCCESS) {
+        result = DispatchGetAccelerationStructureHandleNV(device_gpuav->device, as_validation_state.replacement_as,
+                                                          sizeof(uint64_t), &as_validation_state.replacement_as_handle);
+        if (result != VK_SUCCESS) {
+            ReportSetupProblem(VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device_gpuav->device),
+                               "Failed to get acceleration structure handle for acceleration structure build validation.");
+        }
+    }
+
+    VkMemoryRequirements2 scratch_mem_requirements = {};
+    if (result == VK_SUCCESS) {
+        VkAccelerationStructureMemoryRequirementsInfoNV scratch_mem_requirements_info = {};
+        scratch_mem_requirements_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV;
+        scratch_mem_requirements_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV;
+        scratch_mem_requirements_info.accelerationStructure = as_validation_state.replacement_as;
+
+        DispatchGetAccelerationStructureMemoryRequirementsNV(device_gpuav->device, &scratch_mem_requirements_info,
+                                                             &scratch_mem_requirements);
+    }
+
+    VkBuffer scratch = VK_NULL_HANDLE;
+    if (result == VK_SUCCESS) {
+        VkBufferCreateInfo scratch_ci = {};
+        scratch_ci.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
+        scratch_ci.size = scratch_mem_requirements.memoryRequirements.size;
+        scratch_ci.usage = VK_BUFFER_USAGE_RAY_TRACING_BIT_NV;
+
+        result = DispatchCreateBuffer(device_gpuav->device, &scratch_ci, nullptr, &scratch);
+        if (result != VK_SUCCESS) {
+            ReportSetupProblem(VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device_gpuav->device),
+                               "Failed to create scratch buffer for acceleration structure build validation.");
+        }
+    }
+
+    VmaAllocation scratch_allocation = VK_NULL_HANDLE;
+    VmaAllocationInfo scratch_allocation_info = {};
+    if (result == VK_SUCCESS) {
+        VmaAllocationCreateInfo scratch_aci = {};
+        scratch_aci.usage = VMA_MEMORY_USAGE_GPU_ONLY;
+
+        result = vmaAllocateMemory(device_gpuav->vmaAllocator, &scratch_mem_requirements.memoryRequirements, &scratch_aci,
+                                   &scratch_allocation, &scratch_allocation_info);
+        if (result != VK_SUCCESS) {
+            ReportSetupProblem(VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device_gpuav->device),
+                               "Failed to alloc scratch memory for acceleration structure build validation.");
+        }
+    }
+
+    if (result == VK_SUCCESS) {
+        result = DispatchBindBufferMemory(device_gpuav->device, scratch, scratch_allocation_info.deviceMemory,
+                                          scratch_allocation_info.offset);
+        if (result != VK_SUCCESS) {
+            ReportSetupProblem(VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device_gpuav->device),
+                               "Failed to bind scratch memory for acceleration structure build validation.");
+        }
+    }
+
+    VkCommandPool command_pool = VK_NULL_HANDLE;
+    if (result == VK_SUCCESS) {
+        VkCommandPoolCreateInfo command_pool_ci = {};
+        command_pool_ci.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
+        command_pool_ci.queueFamilyIndex = 0;
+
+        result = DispatchCreateCommandPool(device_gpuav->device, &command_pool_ci, nullptr, &command_pool);
+        if (result != VK_SUCCESS) {
+            ReportSetupProblem(VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device_gpuav->device),
+                               "Failed to create command pool for acceleration structure build validation.");
+        }
+    }
+
+    VkCommandBuffer command_buffer = VK_NULL_HANDLE;
+
+    if (result == VK_SUCCESS) {
+        VkCommandBufferAllocateInfo command_buffer_ai = {};
+        command_buffer_ai.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
+        command_buffer_ai.commandPool = command_pool;
+        command_buffer_ai.commandBufferCount = 1;
+        command_buffer_ai.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
+
+        result = DispatchAllocateCommandBuffers(device_gpuav->device, &command_buffer_ai, &command_buffer);
+        if (result != VK_SUCCESS) {
+            ReportSetupProblem(VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device_gpuav->device),
+                               "Failed to create command buffer for acceleration structure build validation.");
+        }
+
+        // Hook up command buffer dispatch
+        device_gpuav->vkSetDeviceLoaderData(device_gpuav->device, command_buffer);
+    }
+
+    if (result == VK_SUCCESS) {
+        VkCommandBufferBeginInfo command_buffer_bi = {};
+        command_buffer_bi.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
+
+        result = DispatchBeginCommandBuffer(command_buffer, &command_buffer_bi);
+        if (result != VK_SUCCESS) {
+            ReportSetupProblem(VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device_gpuav->device),
+                               "Failed to begin command buffer for acceleration structure build validation.");
+        }
+    }
+
+    if (result == VK_SUCCESS) {
+        DispatchCmdBuildAccelerationStructureNV(command_buffer, &as_ci.info, VK_NULL_HANDLE, 0, VK_FALSE,
+                                                as_validation_state.replacement_as, VK_NULL_HANDLE, scratch, 0);
+        DispatchEndCommandBuffer(command_buffer);
+    }
+
+    VkQueue queue = VK_NULL_HANDLE;
+    if (result == VK_SUCCESS) {
+        DispatchGetDeviceQueue(device_gpuav->device, 0, 0, &queue);
+
+        // Hook up queue dispatch
+        device_gpuav->vkSetDeviceLoaderData(device_gpuav->device, queue);
+
+        VkSubmitInfo submit_info = {};
+        submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+        submit_info.commandBufferCount = 1;
+        submit_info.pCommandBuffers = &command_buffer;
+        result = DispatchQueueSubmit(queue, 1, &submit_info, VK_NULL_HANDLE);
+        if (result != VK_SUCCESS) {
+            ReportSetupProblem(VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device_gpuav->device),
+                               "Failed to submit command buffer for acceleration structure build validation.");
+        }
+    }
+
+    if (result == VK_SUCCESS) {
+        result = DispatchQueueWaitIdle(queue);
+        if (result != VK_SUCCESS) {
+            ReportSetupProblem(VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device_gpuav->device),
+                               "Failed to wait for queue idle for acceleration structure build validation.");
+        }
+    }
+
+    if (vbo != VK_NULL_HANDLE) {
+        vmaDestroyBuffer(device_gpuav->vmaAllocator, vbo, vbo_allocation);
+    }
+    if (ibo != VK_NULL_HANDLE) {
+        vmaDestroyBuffer(device_gpuav->vmaAllocator, ibo, ibo_allocation);
+    }
+    if (scratch != VK_NULL_HANDLE) {
+        DispatchDestroyBuffer(device_gpuav->device, scratch, nullptr);
+        vmaFreeMemory(device_gpuav->vmaAllocator, scratch_allocation);
+    }
+    if (command_pool != VK_NULL_HANDLE) {
+        DispatchDestroyCommandPool(device_gpuav->device, command_pool, nullptr);
+    }
+
+    if (device_gpuav->debug_desc_layout == VK_NULL_HANDLE) {
+        ReportSetupProblem(VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device_gpuav->device),
+                           "Failed to find descriptor set layout for acceleration structure build validation.");
+        result = VK_INCOMPLETE;
+    }
+
+    if (result == VK_SUCCESS) {
+        VkPipelineLayoutCreateInfo pipeline_layout_ci = {};
+        pipeline_layout_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO;
+        pipeline_layout_ci.setLayoutCount = 1;
+        pipeline_layout_ci.pSetLayouts = &device_gpuav->debug_desc_layout;
+        result = DispatchCreatePipelineLayout(device_gpuav->device, &pipeline_layout_ci, 0, &as_validation_state.pipeline_layout);
+        if (result != VK_SUCCESS) {
+            ReportSetupProblem(VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device_gpuav->device),
+                               "Failed to create pipeline layout for acceleration structure build validation.");
+        }
+    }
+
+    VkShaderModule shader_module = VK_NULL_HANDLE;
+    if (result == VK_SUCCESS) {
+        VkShaderModuleCreateInfo shader_module_ci = {};
+        shader_module_ci.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
+        shader_module_ci.codeSize = sizeof(kComputeShaderSpirv);
+        shader_module_ci.pCode = (uint32_t *)kComputeShaderSpirv;
+
+        result = DispatchCreateShaderModule(device_gpuav->device, &shader_module_ci, nullptr, &shader_module);
+        if (result != VK_SUCCESS) {
+            ReportSetupProblem(VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device_gpuav->device),
+                               "Failed to create compute shader module for acceleration structure build validation.");
+        }
+    }
+
+    if (result == VK_SUCCESS) {
+        VkPipelineShaderStageCreateInfo pipeline_stage_ci = {};
+        pipeline_stage_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
+        pipeline_stage_ci.stage = VK_SHADER_STAGE_COMPUTE_BIT;
+        pipeline_stage_ci.module = shader_module;
+        pipeline_stage_ci.pName = "main";
+
+        VkComputePipelineCreateInfo pipeline_ci = {};
+        pipeline_ci.sType = VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO;
+        pipeline_ci.stage = pipeline_stage_ci;
+        pipeline_ci.layout = as_validation_state.pipeline_layout;
+
+        result = DispatchCreateComputePipelines(device_gpuav->device, VK_NULL_HANDLE, 1, &pipeline_ci, nullptr,
+                                                &as_validation_state.pipeline);
+        if (result != VK_SUCCESS) {
+            ReportSetupProblem(VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device_gpuav->device),
+                               "Failed to create compute pipeline for acceleration structure build validation.");
+        }
+    }
+
+    if (shader_module != VK_NULL_HANDLE) {
+        DispatchDestroyShaderModule(device_gpuav->device, shader_module, nullptr);
+    }
+
+    if (result == VK_SUCCESS) {
+        as_validation_state.initialized = true;
+        log_msg(report_data, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+                HandleToUint64(device_gpuav->device), "UNASSIGNED-GPU-Assisted Validation.",
+                "Acceleration Structure Building GPU Validation Enabled.");
+    } else {
+        device_gpuav->aborted = true;
+    }
+}
+
+void GpuAssisted::DestroyAccelerationStructureBuildValidationState() {
+    auto &as_validation_state = acceleration_structure_validation_state;
+    if (as_validation_state.pipeline != VK_NULL_HANDLE) {
+        DispatchDestroyPipeline(device, as_validation_state.pipeline, nullptr);
+    }
+    if (as_validation_state.pipeline_layout != VK_NULL_HANDLE) {
+        DispatchDestroyPipelineLayout(device, as_validation_state.pipeline_layout, nullptr);
+    }
+    if (as_validation_state.replacement_as != VK_NULL_HANDLE) {
+        DispatchDestroyAccelerationStructureNV(device, as_validation_state.replacement_as, nullptr);
+    }
+    if (as_validation_state.replacement_as_allocation != VK_NULL_HANDLE) {
+        vmaFreeMemory(vmaAllocator, as_validation_state.replacement_as_allocation);
+    }
+}
+
+struct GPUAV_RESTORABLE_PIPELINE_STATE {
+    VkPipelineBindPoint pipeline_bind_point = VK_PIPELINE_BIND_POINT_MAX_ENUM;
+    VkPipeline pipeline = VK_NULL_HANDLE;
+    VkPipelineLayout pipeline_layout = VK_NULL_HANDLE;
+    std::vector<VkDescriptorSet> descriptor_sets;
+    std::vector<std::vector<uint32_t>> dynamic_offsets;
+    uint32_t push_descriptor_set_index = 0;
+    std::vector<safe_VkWriteDescriptorSet> push_descriptor_set_writes;
+    std::vector<uint8_t> push_constants_data;
+    PushConstantRangesId push_constants_ranges;
+
+    void Create(CMD_BUFFER_STATE *cb_state, VkPipelineBindPoint bind_point) {
+        pipeline_bind_point = bind_point;
+
+        LAST_BOUND_STATE &last_bound = cb_state->lastBound[bind_point];
+        if (last_bound.pipeline_state) {
+            pipeline = last_bound.pipeline_state->pipeline;
+            pipeline_layout = last_bound.pipeline_layout;
+            descriptor_sets.reserve(last_bound.per_set.size());
+            for (std::size_t i = 0; i < last_bound.per_set.size(); i++) {
+                const auto *bound_descriptor_set = last_bound.per_set[i].bound_descriptor_set;
+
+                descriptor_sets.push_back(bound_descriptor_set->GetSet());
+                if (bound_descriptor_set->IsPushDescriptor()) {
+                    push_descriptor_set_index = static_cast<uint32_t>(i);
+                }
+                dynamic_offsets.push_back(last_bound.per_set[i].dynamicOffsets);
+            }
+
+            if (last_bound.push_descriptor_set) {
+                push_descriptor_set_writes = last_bound.push_descriptor_set->GetWrites();
+            }
+            if (last_bound.pipeline_state->pipeline_layout->push_constant_ranges == cb_state->push_constant_data_ranges) {
+                push_constants_data = cb_state->push_constant_data;
+                push_constants_ranges = last_bound.pipeline_state->pipeline_layout->push_constant_ranges;
+            }
+        }
+    }
+
+    void Restore(VkCommandBuffer command_buffer) const {
+        if (pipeline != VK_NULL_HANDLE) {
+            DispatchCmdBindPipeline(command_buffer, pipeline_bind_point, pipeline);
+            if (!descriptor_sets.empty()) {
+                for (std::size_t i = 0; i < descriptor_sets.size(); i++) {
+                    VkDescriptorSet descriptor_set = descriptor_sets[i];
+                    if (descriptor_set != VK_NULL_HANDLE) {
+                        DispatchCmdBindDescriptorSets(command_buffer, pipeline_bind_point, pipeline_layout,
+                                                      static_cast<uint32_t>(i), 1, &descriptor_set,
+                                                      static_cast<uint32_t>(dynamic_offsets[i].size()), dynamic_offsets[i].data());
+                    }
+                }
+            }
+            if (!push_descriptor_set_writes.empty()) {
+                DispatchCmdPushDescriptorSetKHR(command_buffer, pipeline_bind_point, pipeline_layout, push_descriptor_set_index,
+                                                static_cast<uint32_t>(push_descriptor_set_writes.size()),
+                                                reinterpret_cast<const VkWriteDescriptorSet *>(push_descriptor_set_writes.data()));
+            }
+            for (const auto &push_constant_range : *push_constants_ranges) {
+                if (push_constant_range.size == 0) continue;
+                DispatchCmdPushConstants(command_buffer, pipeline_layout, push_constant_range.stageFlags,
+                                         push_constant_range.offset, push_constant_range.size, push_constants_data.data());
+            }
+        }
+    }
+};
+
+void GpuAssisted::PreCallRecordCmdBuildAccelerationStructureNV(VkCommandBuffer commandBuffer,
+                                                               const VkAccelerationStructureInfoNV *pInfo, VkBuffer instanceData,
+                                                               VkDeviceSize instanceOffset, VkBool32 update,
+                                                               VkAccelerationStructureNV dst, VkAccelerationStructureNV src,
+                                                               VkBuffer scratch, VkDeviceSize scratchOffset) {
+    if (pInfo == nullptr || pInfo->type != VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_NV) {
+        return;
+    }
+
+    auto &as_validation_state = acceleration_structure_validation_state;
+    if (!as_validation_state.initialized) {
+        return;
+    }
+
+    // Empty acceleration structure is valid according to the spec.
+    if (pInfo->instanceCount == 0 || instanceData == VK_NULL_HANDLE) {
+        return;
+    }
+
+    CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+    assert(cb_state != nullptr);
+
+    std::vector<uint64_t> current_valid_handles;
+    for (const auto &as_state_kv : accelerationStructureMap) {
+        const ACCELERATION_STRUCTURE_STATE &as_state = *as_state_kv.second;
+        if (as_state.built && as_state.create_info.info.type == VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_NV) {
+            current_valid_handles.push_back(as_state.opaque_handle);
+        }
+    }
+
+    GpuAssistedAccelerationStructureBuildValidationBufferInfo as_validation_buffer_info = {};
+    as_validation_buffer_info.acceleration_structure = dst;
+
+    const VkDeviceSize validation_buffer_size =
+        // One uint for number of instances to validate
+        4 +
+        // Two uint for the replacement acceleration structure handle
+        8 +
+        // One uint for number of invalid handles found
+        4 +
+        // Two uint for the first invalid handle found
+        8 +
+        // One uint for the number of current valid handles
+        4 +
+        // Two uint for each current valid handle
+        (8 * current_valid_handles.size());
+
+    VkBufferCreateInfo validation_buffer_create_info = {};
+    validation_buffer_create_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
+    validation_buffer_create_info.size = validation_buffer_size;
+    validation_buffer_create_info.usage = VK_BUFFER_USAGE_STORAGE_BUFFER_BIT;
+
+    VmaAllocationCreateInfo validation_buffer_alloc_info = {};
+    validation_buffer_alloc_info.requiredFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
+
+    VkResult result = vmaCreateBuffer(vmaAllocator, &validation_buffer_create_info, &validation_buffer_alloc_info,
+                                      &as_validation_buffer_info.validation_buffer,
+                                      &as_validation_buffer_info.validation_buffer_allocation, nullptr);
+    if (result != VK_SUCCESS) {
+        ReportSetupProblem(VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
+                           "Unable to allocate device memory.  Device could become unstable.");
+        aborted = true;
+        return;
+    }
+
+    GpuAccelerationStructureBuildValidationBuffer *mapped_validation_buffer = nullptr;
+    result = vmaMapMemory(vmaAllocator, as_validation_buffer_info.validation_buffer_allocation, (void **)&mapped_validation_buffer);
+    if (result != VK_SUCCESS) {
+        ReportSetupProblem(VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
+                           "Unable to allocate device memory for acceleration structure build val buffer.");
+        aborted = true;
+        return;
+    }
+
+    mapped_validation_buffer->instances_to_validate = pInfo->instanceCount;
+    mapped_validation_buffer->replacement_handle_bits_0 =
+        reinterpret_cast<const uint32_t *>(&as_validation_state.replacement_as_handle)[0];
+    mapped_validation_buffer->replacement_handle_bits_1 =
+        reinterpret_cast<const uint32_t *>(&as_validation_state.replacement_as_handle)[1];
+    mapped_validation_buffer->invalid_handle_found = 0;
+    mapped_validation_buffer->invalid_handle_bits_0 = 0;
+    mapped_validation_buffer->invalid_handle_bits_1 = 0;
+    mapped_validation_buffer->valid_handles_count = static_cast<uint32_t>(current_valid_handles.size());
+
+    uint32_t *mapped_valid_handles = reinterpret_cast<uint32_t *>(&mapped_validation_buffer[1]);
+    for (std::size_t i = 0; i < current_valid_handles.size(); i++) {
+        const uint64_t current_valid_handle = current_valid_handles[i];
+
+        *mapped_valid_handles = reinterpret_cast<const uint32_t *>(&current_valid_handle)[0];
+        ++mapped_valid_handles;
+        *mapped_valid_handles = reinterpret_cast<const uint32_t *>(&current_valid_handle)[1];
+        ++mapped_valid_handles;
+    }
+
+    vmaUnmapMemory(vmaAllocator, as_validation_buffer_info.validation_buffer_allocation);
+
+    static constexpr const VkDeviceSize kInstanceSize = 64;
+    const VkDeviceSize instance_buffer_size = kInstanceSize * pInfo->instanceCount;
+
+    result =
+        desc_set_manager->GetDescriptorSet(&as_validation_buffer_info.descriptor_pool, &as_validation_buffer_info.descriptor_set);
+    if (result != VK_SUCCESS) {
+        ReportSetupProblem(VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
+                           "Unable to get descriptor set for acceleration structure build.");
+        aborted = true;
+        return;
+    }
+
+    VkDescriptorBufferInfo descriptor_buffer_infos[2] = {};
+    descriptor_buffer_infos[0].buffer = instanceData;
+    descriptor_buffer_infos[0].offset = instanceOffset;
+    descriptor_buffer_infos[0].range = instance_buffer_size;
+    descriptor_buffer_infos[1].buffer = as_validation_buffer_info.validation_buffer;
+    descriptor_buffer_infos[1].offset = 0;
+    descriptor_buffer_infos[1].range = validation_buffer_size;
+
+    VkWriteDescriptorSet descriptor_set_writes[2] = {};
+    descriptor_set_writes[0].sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
+    descriptor_set_writes[0].dstSet = as_validation_buffer_info.descriptor_set;
+    descriptor_set_writes[0].dstBinding = 0;
+    descriptor_set_writes[0].descriptorCount = 1;
+    descriptor_set_writes[0].descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
+    descriptor_set_writes[0].pBufferInfo = &descriptor_buffer_infos[0];
+    descriptor_set_writes[1].sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
+    descriptor_set_writes[1].dstSet = as_validation_buffer_info.descriptor_set;
+    descriptor_set_writes[1].dstBinding = 1;
+    descriptor_set_writes[1].descriptorCount = 1;
+    descriptor_set_writes[1].descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
+    descriptor_set_writes[1].pBufferInfo = &descriptor_buffer_infos[1];
+
+    DispatchUpdateDescriptorSets(device, 2, descriptor_set_writes, 0, nullptr);
+
+    // Issue a memory barrier to make sure anything writing to the instance buffer has finished.
+    VkMemoryBarrier memory_barrier = {};
+    memory_barrier.sType = VK_STRUCTURE_TYPE_MEMORY_BARRIER;
+    memory_barrier.srcAccessMask = VK_ACCESS_MEMORY_WRITE_BIT;
+    memory_barrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT;
+    DispatchCmdPipelineBarrier(commandBuffer, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, 0, 1,
+                               &memory_barrier, 0, nullptr, 0, nullptr);
+
+    // Save a copy of the compute pipeline state that needs to be restored.
+    GPUAV_RESTORABLE_PIPELINE_STATE restorable_state;
+    restorable_state.Create(cb_state, VK_PIPELINE_BIND_POINT_COMPUTE);
+
+    // Switch to and launch the validation compute shader to find, replace, and report invalid acceleration structure handles.
+    DispatchCmdBindPipeline(commandBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, as_validation_state.pipeline);
+    DispatchCmdBindDescriptorSets(commandBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, as_validation_state.pipeline_layout, 0, 1,
+                                  &as_validation_buffer_info.descriptor_set, 0, nullptr);
+    DispatchCmdDispatch(commandBuffer, 1, 1, 1);
+
+    // Issue a buffer memory barrier to make sure that any invalid bottom level acceleration structure handles
+    // have been replaced by the validation compute shader before any builds take place.
+    VkBufferMemoryBarrier instance_buffer_barrier = {};
+    instance_buffer_barrier.sType = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER;
+    instance_buffer_barrier.srcAccessMask = VK_ACCESS_SHADER_WRITE_BIT;
+    instance_buffer_barrier.dstAccessMask = VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_NV;
+    instance_buffer_barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
+    instance_buffer_barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
+    instance_buffer_barrier.buffer = instanceData;
+    instance_buffer_barrier.offset = instanceOffset;
+    instance_buffer_barrier.size = instance_buffer_size;
+    DispatchCmdPipelineBarrier(commandBuffer, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT,
+                               VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_NV, 0, 0, nullptr, 1, &instance_buffer_barrier, 0,
+                               nullptr);
+
+    // Restore the previous compute pipeline state.
+    restorable_state.Restore(commandBuffer);
+
+    as_validation_state.validation_buffers[commandBuffer].push_back(std::move(as_validation_buffer_info));
+}
+
+void GpuAssisted::ProcessAccelerationStructureBuildValidationBuffer(VkQueue queue, CMD_BUFFER_STATE *cb_node) {
+    if (cb_node == nullptr || !cb_node->hasBuildAccelerationStructureCmd) {
+        return;
+    }
+
+    auto &as_validation_info = acceleration_structure_validation_state;
+    auto &as_validation_buffer_infos = as_validation_info.validation_buffers[cb_node->commandBuffer];
+    for (const auto &as_validation_buffer_info : as_validation_buffer_infos) {
+        GpuAccelerationStructureBuildValidationBuffer *mapped_validation_buffer = nullptr;
+
+        VkResult result =
+            vmaMapMemory(vmaAllocator, as_validation_buffer_info.validation_buffer_allocation, (void **)&mapped_validation_buffer);
+        if (result == VK_SUCCESS) {
+            if (mapped_validation_buffer->invalid_handle_found > 0) {
+                uint64_t invalid_handle = 0;
+                reinterpret_cast<uint32_t *>(&invalid_handle)[0] = mapped_validation_buffer->invalid_handle_bits_0;
+                reinterpret_cast<uint32_t *>(&invalid_handle)[1] = mapped_validation_buffer->invalid_handle_bits_1;
+
+                log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV_EXT,
+                        HandleToUint64(as_validation_buffer_info.acceleration_structure), "UNASSIGNED-AccelerationStructure",
+                        "Attempted to build top level acceleration structure using invalid bottom level acceleration structure "
+                        "handle (%" PRIu64 ")",
+                        invalid_handle);
+            }
+            vmaUnmapMemory(vmaAllocator, as_validation_buffer_info.validation_buffer_allocation);
+        }
+    }
+}
+
+void GpuAssisted::PostCallRecordBindAccelerationStructureMemoryNV(VkDevice device, uint32_t bindInfoCount,
+                                                                  const VkBindAccelerationStructureMemoryInfoNV *pBindInfos,
+                                                                  VkResult result) {
+    if (VK_SUCCESS != result) return;
+    ValidationStateTracker::PostCallRecordBindAccelerationStructureMemoryNV(device, bindInfoCount, pBindInfos, result);
+    for (uint32_t i = 0; i < bindInfoCount; i++) {
+        const VkBindAccelerationStructureMemoryInfoNV &info = pBindInfos[i];
+        ACCELERATION_STRUCTURE_STATE *as_state = GetAccelerationStructureState(info.accelerationStructure);
+        if (as_state) {
+            DispatchGetAccelerationStructureHandleNV(device, info.accelerationStructure, 8, &as_state->opaque_handle);
+        }
+    }
+}
+
+// Modify the pipeline layout to include our debug descriptor set and any needed padding with the dummy descriptor set.
+void GpuAssisted::PreCallRecordCreatePipelineLayout(VkDevice device, const VkPipelineLayoutCreateInfo *pCreateInfo,
+                                                    const VkAllocationCallbacks *pAllocator, VkPipelineLayout *pPipelineLayout,
+                                                    void *cpl_state_data) {
+    if (aborted) {
+        return;
+    }
+
+    create_pipeline_layout_api_state *cpl_state = reinterpret_cast<create_pipeline_layout_api_state *>(cpl_state_data);
+
+    if (cpl_state->modified_create_info.setLayoutCount >= adjusted_max_desc_sets) {
+        std::ostringstream strm;
+        strm << "Pipeline Layout conflict with validation's descriptor set at slot " << desc_set_bind_index << ". "
+             << "Application has too many descriptor sets in the pipeline layout to continue with gpu validation. "
+             << "Validation is not modifying the pipeline layout. "
+             << "Instrumented shaders are replaced with non-instrumented shaders.";
+        ReportSetupProblem(VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device), strm.str().c_str());
+    } else {
+        // Modify the pipeline layout by:
+        // 1. Copying the caller's descriptor set desc_layouts
+        // 2. Fill in dummy descriptor layouts up to the max binding
+        // 3. Fill in with the debug descriptor layout at the max binding slot
+        cpl_state->new_layouts.reserve(adjusted_max_desc_sets);
+        cpl_state->new_layouts.insert(cpl_state->new_layouts.end(), &pCreateInfo->pSetLayouts[0],
+                                      &pCreateInfo->pSetLayouts[pCreateInfo->setLayoutCount]);
+        for (uint32_t i = pCreateInfo->setLayoutCount; i < adjusted_max_desc_sets - 1; ++i) {
+            cpl_state->new_layouts.push_back(dummy_desc_layout);
+        }
+        cpl_state->new_layouts.push_back(debug_desc_layout);
+        cpl_state->modified_create_info.pSetLayouts = cpl_state->new_layouts.data();
+        cpl_state->modified_create_info.setLayoutCount = adjusted_max_desc_sets;
+    }
+}
+
+void GpuAssisted::PostCallRecordCreatePipelineLayout(VkDevice device, const VkPipelineLayoutCreateInfo *pCreateInfo,
+                                                     const VkAllocationCallbacks *pAllocator, VkPipelineLayout *pPipelineLayout,
+                                                     VkResult result) {
+    ValidationStateTracker::PostCallRecordCreatePipelineLayout(device, pCreateInfo, pAllocator, pPipelineLayout, result);
+
+    if (result != VK_SUCCESS) {
+        ReportSetupProblem(VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
+                           "Unable to create pipeline layout.  Device could become unstable.");
+        aborted = true;
+    }
+}
+
+// Free the device memory and descriptor set associated with a command buffer.
+void GpuAssisted::ResetCommandBuffer(VkCommandBuffer commandBuffer) {
+    if (aborted) {
+        return;
+    }
+    auto gpuav_buffer_list = GetGpuAssistedBufferInfo(commandBuffer);
+    for (auto buffer_info : gpuav_buffer_list) {
+        vmaDestroyBuffer(vmaAllocator, buffer_info.output_mem_block.buffer, buffer_info.output_mem_block.allocation);
+        if (buffer_info.di_input_mem_block.buffer) {
+            vmaDestroyBuffer(vmaAllocator, buffer_info.di_input_mem_block.buffer, buffer_info.di_input_mem_block.allocation);
+        }
+        if (buffer_info.bda_input_mem_block.buffer) {
+            vmaDestroyBuffer(vmaAllocator, buffer_info.bda_input_mem_block.buffer, buffer_info.bda_input_mem_block.allocation);
+        }
+        if (buffer_info.desc_set != VK_NULL_HANDLE) {
+            desc_set_manager->PutBackDescriptorSet(buffer_info.desc_pool, buffer_info.desc_set);
+        }
+    }
+    command_buffer_map.erase(commandBuffer);
+
+    auto &as_validation_info = acceleration_structure_validation_state;
+    auto &as_validation_buffer_infos = as_validation_info.validation_buffers[commandBuffer];
+    for (auto &as_validation_buffer_info : as_validation_buffer_infos) {
+        vmaDestroyBuffer(vmaAllocator, as_validation_buffer_info.validation_buffer,
+                         as_validation_buffer_info.validation_buffer_allocation);
+
+        if (as_validation_buffer_info.descriptor_set != VK_NULL_HANDLE) {
+            desc_set_manager->PutBackDescriptorSet(as_validation_buffer_info.descriptor_pool,
+                                                   as_validation_buffer_info.descriptor_set);
+        }
+    }
+    as_validation_info.validation_buffers.erase(commandBuffer);
+}
+// Just gives a warning about a possible deadlock.
+bool GpuAssisted::PreCallValidateCmdWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent *pEvents,
+                                               VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask,
+                                               uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
+                                               uint32_t bufferMemoryBarrierCount,
+                                               const VkBufferMemoryBarrier *pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount,
+                                               const VkImageMemoryBarrier *pImageMemoryBarriers) const {
+    if (srcStageMask & VK_PIPELINE_STAGE_HOST_BIT) {
+        ReportSetupProblem(VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, HandleToUint64(commandBuffer),
+                           "CmdWaitEvents recorded with VK_PIPELINE_STAGE_HOST_BIT set. "
+                           "GPU_Assisted validation waits on queue completion. "
+                           "This wait could block the host's signaling of this event, resulting in deadlock.");
+    }
+    return false;
+}
+
+void GpuAssisted::PostCallRecordGetPhysicalDeviceProperties(VkPhysicalDevice physicalDevice,
+                                                            VkPhysicalDeviceProperties *pPhysicalDeviceProperties) {
+    // There is an implicit layer that can cause this call to return 0 for maxBoundDescriptorSets - Ignore such calls
+    if (enabled.gpu_validation_reserve_binding_slot && pPhysicalDeviceProperties->limits.maxBoundDescriptorSets > 0) {
+        if (pPhysicalDeviceProperties->limits.maxBoundDescriptorSets > 1) {
+            pPhysicalDeviceProperties->limits.maxBoundDescriptorSets -= 1;
+        } else {
+            log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT,
+                    HandleToUint64(physicalDevice), "UNASSIGNED-GPU-Assisted Validation Setup Error.",
+                    "Unable to reserve descriptor binding slot on a device with only one slot.");
+        }
+    }
+}
+
+void GpuAssisted::PostCallRecordGetPhysicalDeviceProperties2(VkPhysicalDevice physicalDevice,
+                                                             VkPhysicalDeviceProperties2 *pPhysicalDeviceProperties2) {
+    // There is an implicit layer that can cause this call to return 0 for maxBoundDescriptorSets - Ignore such calls
+    if (enabled.gpu_validation_reserve_binding_slot && pPhysicalDeviceProperties2->properties.limits.maxBoundDescriptorSets > 0) {
+        if (pPhysicalDeviceProperties2->properties.limits.maxBoundDescriptorSets > 1) {
+            pPhysicalDeviceProperties2->properties.limits.maxBoundDescriptorSets -= 1;
+        } else {
+            log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT,
+                    HandleToUint64(physicalDevice), "UNASSIGNED-GPU-Assisted Validation Setup Error.",
+                    "Unable to reserve descriptor binding slot on a device with only one slot.");
+        }
+    }
+}
+
+void GpuAssisted::PreCallRecordCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
+                                                       const VkGraphicsPipelineCreateInfo *pCreateInfos,
+                                                       const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
+                                                       void *cgpl_state_data) {
+    std::vector<safe_VkGraphicsPipelineCreateInfo> new_pipeline_create_infos;
+    create_graphics_pipeline_api_state *cgpl_state = reinterpret_cast<create_graphics_pipeline_api_state *>(cgpl_state_data);
+    PreCallRecordPipelineCreations(count, pCreateInfos, pAllocator, pPipelines, cgpl_state->pipe_state, &new_pipeline_create_infos,
+                                   VK_PIPELINE_BIND_POINT_GRAPHICS);
+    cgpl_state->gpu_create_infos = new_pipeline_create_infos;
+    cgpl_state->pCreateInfos = reinterpret_cast<VkGraphicsPipelineCreateInfo *>(cgpl_state->gpu_create_infos.data());
+}
+
+void GpuAssisted::PreCallRecordCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
+                                                      const VkComputePipelineCreateInfo *pCreateInfos,
+                                                      const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
+                                                      void *ccpl_state_data) {
+    std::vector<safe_VkComputePipelineCreateInfo> new_pipeline_create_infos;
+    auto *ccpl_state = reinterpret_cast<create_compute_pipeline_api_state *>(ccpl_state_data);
+    PreCallRecordPipelineCreations(count, pCreateInfos, pAllocator, pPipelines, ccpl_state->pipe_state, &new_pipeline_create_infos,
+                                   VK_PIPELINE_BIND_POINT_COMPUTE);
+    ccpl_state->gpu_create_infos = new_pipeline_create_infos;
+    ccpl_state->pCreateInfos = reinterpret_cast<VkComputePipelineCreateInfo *>(ccpl_state->gpu_create_infos.data());
+}
+
+void GpuAssisted::PreCallRecordCreateRayTracingPipelinesNV(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
+                                                           const VkRayTracingPipelineCreateInfoNV *pCreateInfos,
+                                                           const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
+                                                           void *crtpl_state_data) {
+    std::vector<safe_VkRayTracingPipelineCreateInfoNV> new_pipeline_create_infos;
+    auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_api_state *>(crtpl_state_data);
+    PreCallRecordPipelineCreations(count, pCreateInfos, pAllocator, pPipelines, crtpl_state->pipe_state, &new_pipeline_create_infos,
+                                   VK_PIPELINE_BIND_POINT_RAY_TRACING_NV);
+    crtpl_state->gpu_create_infos = new_pipeline_create_infos;
+    crtpl_state->pCreateInfos = reinterpret_cast<VkRayTracingPipelineCreateInfoNV *>(crtpl_state->gpu_create_infos.data());
+}
+template <typename CreateInfo>
+struct CreatePipelineTraits {};
+template <>
+struct CreatePipelineTraits<VkGraphicsPipelineCreateInfo> {
+    using SafeType = safe_VkGraphicsPipelineCreateInfo;
+    static const SafeType &GetPipelineCI(const PIPELINE_STATE *pipeline_state) { return pipeline_state->graphicsPipelineCI; }
+    static uint32_t GetStageCount(const VkGraphicsPipelineCreateInfo &createInfo) { return createInfo.stageCount; }
+    static VkShaderModule GetShaderModule(const VkGraphicsPipelineCreateInfo &createInfo, uint32_t stage) {
+        return createInfo.pStages[stage].module;
+    }
+    static void SetShaderModule(SafeType *createInfo, VkShaderModule shader_module, uint32_t stage) {
+        createInfo->pStages[stage].module = shader_module;
+    }
+};
+
+template <>
+struct CreatePipelineTraits<VkComputePipelineCreateInfo> {
+    using SafeType = safe_VkComputePipelineCreateInfo;
+    static const SafeType &GetPipelineCI(const PIPELINE_STATE *pipeline_state) { return pipeline_state->computePipelineCI; }
+    static uint32_t GetStageCount(const VkComputePipelineCreateInfo &createInfo) { return 1; }
+    static VkShaderModule GetShaderModule(const VkComputePipelineCreateInfo &createInfo, uint32_t stage) {
+        return createInfo.stage.module;
+    }
+    static void SetShaderModule(SafeType *createInfo, VkShaderModule shader_module, uint32_t stage) {
+        assert(stage == 0);
+        createInfo->stage.module = shader_module;
+    }
+};
+template <>
+struct CreatePipelineTraits<VkRayTracingPipelineCreateInfoNV> {
+    using SafeType = safe_VkRayTracingPipelineCreateInfoNV;
+    static const SafeType &GetPipelineCI(const PIPELINE_STATE *pipeline_state) { return pipeline_state->raytracingPipelineCI; }
+    static uint32_t GetStageCount(const VkRayTracingPipelineCreateInfoNV &createInfo) { return createInfo.stageCount; }
+    static VkShaderModule GetShaderModule(const VkRayTracingPipelineCreateInfoNV &createInfo, uint32_t stage) {
+        return createInfo.pStages[stage].module;
+    }
+    static void SetShaderModule(SafeType *createInfo, VkShaderModule shader_module, uint32_t stage) {
+        createInfo->pStages[stage].module = shader_module;
+    }
+};
+
+// Examine the pipelines to see if they use the debug descriptor set binding index.
+// If any do, create new non-instrumented shader modules and use them to replace the instrumented
+// shaders in the pipeline.  Return the (possibly) modified create infos to the caller.
+template <typename CreateInfo, typename SafeCreateInfo>
+void GpuAssisted::PreCallRecordPipelineCreations(uint32_t count, const CreateInfo *pCreateInfos,
+                                                 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
+                                                 std::vector<std::shared_ptr<PIPELINE_STATE>> &pipe_state,
+                                                 std::vector<SafeCreateInfo> *new_pipeline_create_infos,
+                                                 const VkPipelineBindPoint bind_point) {
+    using Accessor = CreatePipelineTraits<CreateInfo>;
+    if (bind_point != VK_PIPELINE_BIND_POINT_GRAPHICS && bind_point != VK_PIPELINE_BIND_POINT_COMPUTE &&
+        bind_point != VK_PIPELINE_BIND_POINT_RAY_TRACING_NV) {
+        return;
+    }
+
+    // Walk through all the pipelines, make a copy of each and flag each pipeline that contains a shader that uses the debug
+    // descriptor set index.
+    for (uint32_t pipeline = 0; pipeline < count; ++pipeline) {
+        uint32_t stageCount = Accessor::GetStageCount(pCreateInfos[pipeline]);
+        new_pipeline_create_infos->push_back(Accessor::GetPipelineCI(pipe_state[pipeline].get()));
+
+        bool replace_shaders = false;
+        if (pipe_state[pipeline]->active_slots.find(desc_set_bind_index) != pipe_state[pipeline]->active_slots.end()) {
+            replace_shaders = true;
+        }
+        // If the app requests all available sets, the pipeline layout was not modified at pipeline layout creation and the already
+        // instrumented shaders need to be replaced with uninstrumented shaders
+        if (pipe_state[pipeline]->pipeline_layout->set_layouts.size() >= adjusted_max_desc_sets) {
+            replace_shaders = true;
+        }
+
+        if (replace_shaders) {
+            for (uint32_t stage = 0; stage < stageCount; ++stage) {
+                const SHADER_MODULE_STATE *shader = GetShaderModuleState(Accessor::GetShaderModule(pCreateInfos[pipeline], stage));
+
+                VkShaderModuleCreateInfo create_info = {};
+                VkShaderModule shader_module;
+                create_info.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
+                create_info.pCode = shader->words.data();
+                create_info.codeSize = shader->words.size() * sizeof(uint32_t);
+                VkResult result = DispatchCreateShaderModule(device, &create_info, pAllocator, &shader_module);
+                if (result == VK_SUCCESS) {
+                    Accessor::SetShaderModule(&(*new_pipeline_create_infos)[pipeline], shader_module, stage);
+                } else {
+                    uint64_t moduleHandle = HandleToUint64(Accessor::GetShaderModule(pCreateInfos[pipeline], stage));
+                    ReportSetupProblem(VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT, moduleHandle,
+                                       "Unable to replace instrumented shader with non-instrumented one.  "
+                                       "Device could become unstable.");
+                }
+            }
+        }
+    }
+}
+
+void GpuAssisted::PostCallRecordCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
+                                                        const VkGraphicsPipelineCreateInfo *pCreateInfos,
+                                                        const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
+                                                        VkResult result, void *cgpl_state_data) {
+    ValidationStateTracker::PostCallRecordCreateGraphicsPipelines(device, pipelineCache, count, pCreateInfos, pAllocator,
+                                                                  pPipelines, result, cgpl_state_data);
+    PostCallRecordPipelineCreations(count, pCreateInfos, pAllocator, pPipelines, VK_PIPELINE_BIND_POINT_GRAPHICS);
+}
+
+void GpuAssisted::PostCallRecordCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
+                                                       const VkComputePipelineCreateInfo *pCreateInfos,
+                                                       const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
+                                                       VkResult result, void *ccpl_state_data) {
+    ValidationStateTracker::PostCallRecordCreateComputePipelines(device, pipelineCache, count, pCreateInfos, pAllocator, pPipelines,
+                                                                 result, ccpl_state_data);
+    PostCallRecordPipelineCreations(count, pCreateInfos, pAllocator, pPipelines, VK_PIPELINE_BIND_POINT_COMPUTE);
+}
+
+void GpuAssisted::PostCallRecordCreateRayTracingPipelinesNV(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
+                                                            const VkRayTracingPipelineCreateInfoNV *pCreateInfos,
+                                                            const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
+                                                            VkResult result, void *crtpl_state_data) {
+    ValidationStateTracker::PostCallRecordCreateRayTracingPipelinesNV(device, pipelineCache, count, pCreateInfos, pAllocator,
+                                                                      pPipelines, result, crtpl_state_data);
+    PostCallRecordPipelineCreations(count, pCreateInfos, pAllocator, pPipelines, VK_PIPELINE_BIND_POINT_RAY_TRACING_NV);
+}
+
+// For every pipeline:
+// - For every shader in a pipeline:
+//   - If the shader had to be replaced in PreCallRecord (because the pipeline is using the debug desc set index):
+//     - Destroy it since it has been bound into the pipeline by now.  This is our only chance to delete it.
+//   - Track the shader in the shader_map
+//   - Save the shader binary if it contains debug code
+template <typename CreateInfo>
+void GpuAssisted::PostCallRecordPipelineCreations(const uint32_t count, const CreateInfo *pCreateInfos,
+                                                  const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
+                                                  const VkPipelineBindPoint bind_point) {
+    using Accessor = CreatePipelineTraits<CreateInfo>;
+    if (bind_point != VK_PIPELINE_BIND_POINT_GRAPHICS && bind_point != VK_PIPELINE_BIND_POINT_COMPUTE &&
+        bind_point != VK_PIPELINE_BIND_POINT_RAY_TRACING_NV) {
+        return;
+    }
+    for (uint32_t pipeline = 0; pipeline < count; ++pipeline) {
+        auto pipeline_state = ValidationStateTracker::GetPipelineState(pPipelines[pipeline]);
+        if (nullptr == pipeline_state) continue;
+
+        uint32_t stageCount = 0;
+        if (bind_point == VK_PIPELINE_BIND_POINT_GRAPHICS) {
+            stageCount = pipeline_state->graphicsPipelineCI.stageCount;
+        } else if (bind_point == VK_PIPELINE_BIND_POINT_COMPUTE) {
+            stageCount = 1;
+        } else if (bind_point == VK_PIPELINE_BIND_POINT_RAY_TRACING_NV) {
+            stageCount = pipeline_state->raytracingPipelineCI.stageCount;
+        } else {
+            assert(false);
+        }
+
+        for (uint32_t stage = 0; stage < stageCount; ++stage) {
+            if (pipeline_state->active_slots.find(desc_set_bind_index) != pipeline_state->active_slots.end()) {
+                DispatchDestroyShaderModule(device, Accessor::GetShaderModule(pCreateInfos[pipeline], stage), pAllocator);
+            }
+
+            const SHADER_MODULE_STATE *shader_state = nullptr;
+            if (bind_point == VK_PIPELINE_BIND_POINT_GRAPHICS) {
+                shader_state = GetShaderModuleState(pipeline_state->graphicsPipelineCI.pStages[stage].module);
+            } else if (bind_point == VK_PIPELINE_BIND_POINT_COMPUTE) {
+                assert(stage == 0);
+                shader_state = GetShaderModuleState(pipeline_state->computePipelineCI.stage.module);
+            } else if (bind_point == VK_PIPELINE_BIND_POINT_RAY_TRACING_NV) {
+                shader_state = GetShaderModuleState(pipeline_state->raytracingPipelineCI.pStages[stage].module);
+            } else {
+                assert(false);
+            }
+
+            if (shader_state) {
+                std::vector<unsigned int> code;
+                // Save the shader binary if debug info is present.
+                // The core_validation ShaderModule tracker saves the binary too, but discards it when the ShaderModule
+                // is destroyed.  Applications may destroy ShaderModules after they are placed in a pipeline and before
+                // the pipeline is used, so we have to keep another copy.
+                if (shader_state->has_valid_spirv) {  // really checking for presense of SPIR-V code.
+                    for (auto insn : *shader_state) {
+                        if (insn.opcode() == spv::OpLine) {
+                            code = shader_state->words;
+                            break;
+                        }
+                    }
+                }
+                shader_map[shader_state->gpu_validation_shader_id].pipeline = pipeline_state->pipeline;
+                // Be careful to use the originally bound (instrumented) shader here, even if PreCallRecord had to back it
+                // out with a non-instrumented shader.  The non-instrumented shader (found in pCreateInfo) was destroyed above.
+                VkShaderModule shader_module = VK_NULL_HANDLE;
+                if (bind_point == VK_PIPELINE_BIND_POINT_GRAPHICS) {
+                    shader_module = pipeline_state->graphicsPipelineCI.pStages[stage].module;
+                } else if (bind_point == VK_PIPELINE_BIND_POINT_COMPUTE) {
+                    assert(stage == 0);
+                    shader_module = pipeline_state->computePipelineCI.stage.module;
+                } else if (bind_point == VK_PIPELINE_BIND_POINT_RAY_TRACING_NV) {
+                    shader_module = pipeline_state->raytracingPipelineCI.pStages[stage].module;
+                } else {
+                    assert(false);
+                }
+                shader_map[shader_state->gpu_validation_shader_id].shader_module = shader_module;
+                shader_map[shader_state->gpu_validation_shader_id].pgm = std::move(code);
+            }
+        }
+    }
+}
+
+// Remove all the shader trackers associated with this destroyed pipeline.
+void GpuAssisted::PreCallRecordDestroyPipeline(VkDevice device, VkPipeline pipeline, const VkAllocationCallbacks *pAllocator) {
+    for (auto it = shader_map.begin(); it != shader_map.end();) {
+        if (it->second.pipeline == pipeline) {
+            it = shader_map.erase(it);
+        } else {
+            ++it;
+        }
+    }
+    ValidationStateTracker::PreCallRecordDestroyPipeline(device, pipeline, pAllocator);
+}
+
+// Call the SPIR-V Optimizer to run the instrumentation pass on the shader.
+bool GpuAssisted::InstrumentShader(const VkShaderModuleCreateInfo *pCreateInfo, std::vector<unsigned int> &new_pgm,
+                                   uint32_t *unique_shader_id) {
+    if (aborted) return false;
+    if (pCreateInfo->pCode[0] != spv::MagicNumber) return false;
+
+    // Load original shader SPIR-V
+    uint32_t num_words = static_cast<uint32_t>(pCreateInfo->codeSize / 4);
+    new_pgm.clear();
+    new_pgm.reserve(num_words);
+    new_pgm.insert(new_pgm.end(), &pCreateInfo->pCode[0], &pCreateInfo->pCode[num_words]);
+
+    // Call the optimizer to instrument the shader.
+    // Use the unique_shader_module_id as a shader ID so we can look up its handle later in the shader_map.
+    // If descriptor indexing is enabled, enable length checks and updated descriptor checks
+    const bool descriptor_indexing = IsExtEnabled(device_extensions.vk_ext_descriptor_indexing);
+    using namespace spvtools;
+    spv_target_env target_env = SPV_ENV_VULKAN_1_1;
+    Optimizer optimizer(target_env);
+    optimizer.RegisterPass(
+        CreateInstBindlessCheckPass(desc_set_bind_index, unique_shader_module_id, descriptor_indexing, descriptor_indexing, 2));
+    optimizer.RegisterPass(CreateAggressiveDCEPass());
+    if (device_extensions.vk_ext_buffer_device_address && shaderInt64)
+        optimizer.RegisterPass(CreateInstBuffAddrCheckPass(desc_set_bind_index, unique_shader_module_id));
+    bool pass = optimizer.Run(new_pgm.data(), new_pgm.size(), &new_pgm);
+    if (!pass) {
+        ReportSetupProblem(VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT, VK_NULL_HANDLE,
+                           "Failure to instrument shader.  Proceeding with non-instrumented shader.");
+    }
+    *unique_shader_id = unique_shader_module_id++;
+    return pass;
+}
+// Create the instrumented shader data to provide to the driver.
+void GpuAssisted::PreCallRecordCreateShaderModule(VkDevice device, const VkShaderModuleCreateInfo *pCreateInfo,
+                                                  const VkAllocationCallbacks *pAllocator, VkShaderModule *pShaderModule,
+                                                  void *csm_state_data) {
+    create_shader_module_api_state *csm_state = reinterpret_cast<create_shader_module_api_state *>(csm_state_data);
+    bool pass = InstrumentShader(pCreateInfo, csm_state->instrumented_pgm, &csm_state->unique_shader_id);
+    if (pass) {
+        csm_state->instrumented_create_info.pCode = csm_state->instrumented_pgm.data();
+        csm_state->instrumented_create_info.codeSize = csm_state->instrumented_pgm.size() * sizeof(unsigned int);
+    }
+}
+
+// Generate the stage-specific part of the message.
+static void GenerateStageMessage(const uint32_t *debug_record, std::string &msg) {
+    using namespace spvtools;
+    std::ostringstream strm;
+    switch (debug_record[kInstCommonOutStageIdx]) {
+        case spv::ExecutionModelVertex: {
+            strm << "Stage = Vertex. Vertex Index = " << debug_record[kInstVertOutVertexIndex]
+                 << " Instance Index = " << debug_record[kInstVertOutInstanceIndex] << ". ";
+        } break;
+        case spv::ExecutionModelTessellationControl: {
+            strm << "Stage = Tessellation Control.  Invocation ID = " << debug_record[kInstTessCtlOutInvocationId]
+                 << ", Primitive ID = " << debug_record[kInstTessCtlOutPrimitiveId];
+        } break;
+        case spv::ExecutionModelTessellationEvaluation: {
+            strm << "Stage = Tessellation Eval.  Primitive ID = " << debug_record[kInstTessEvalOutPrimitiveId]
+                 << ", TessCoord (u, v) = (" << debug_record[kInstTessEvalOutTessCoordU] << ", "
+                 << debug_record[kInstTessEvalOutTessCoordV] << "). ";
+        } break;
+        case spv::ExecutionModelGeometry: {
+            strm << "Stage = Geometry.  Primitive ID = " << debug_record[kInstGeomOutPrimitiveId]
+                 << " Invocation ID = " << debug_record[kInstGeomOutInvocationId] << ". ";
+        } break;
+        case spv::ExecutionModelFragment: {
+            strm << "Stage = Fragment.  Fragment coord (x,y) = ("
+                 << *reinterpret_cast<const float *>(&debug_record[kInstFragOutFragCoordX]) << ", "
+                 << *reinterpret_cast<const float *>(&debug_record[kInstFragOutFragCoordY]) << "). ";
+        } break;
+        case spv::ExecutionModelGLCompute: {
+            strm << "Stage = Compute.  Global invocation ID (x, y, z) = (" << debug_record[kInstCompOutGlobalInvocationIdX] << ", "
+                 << debug_record[kInstCompOutGlobalInvocationIdY] << ", " << debug_record[kInstCompOutGlobalInvocationIdZ] << " )";
+        } break;
+        case spv::ExecutionModelRayGenerationNV: {
+            strm << "Stage = Ray Generation.  Global Launch ID (x,y,z) = (" << debug_record[kInstRayTracingOutLaunchIdX] << ", "
+                 << debug_record[kInstRayTracingOutLaunchIdY] << ", " << debug_record[kInstRayTracingOutLaunchIdZ] << "). ";
+        } break;
+        case spv::ExecutionModelIntersectionNV: {
+            strm << "Stage = Intersection.  Global Launch ID (x,y,z) = (" << debug_record[kInstRayTracingOutLaunchIdX] << ", "
+                 << debug_record[kInstRayTracingOutLaunchIdY] << ", " << debug_record[kInstRayTracingOutLaunchIdZ] << "). ";
+        } break;
+        case spv::ExecutionModelAnyHitNV: {
+            strm << "Stage = Any Hit.  Global Launch ID (x,y,z) = (" << debug_record[kInstRayTracingOutLaunchIdX] << ", "
+                 << debug_record[kInstRayTracingOutLaunchIdY] << ", " << debug_record[kInstRayTracingOutLaunchIdZ] << "). ";
+        } break;
+        case spv::ExecutionModelClosestHitNV: {
+            strm << "Stage = Closest Hit.  Global Launch ID (x,y,z) = (" << debug_record[kInstRayTracingOutLaunchIdX] << ", "
+                 << debug_record[kInstRayTracingOutLaunchIdY] << ", " << debug_record[kInstRayTracingOutLaunchIdZ] << "). ";
+        } break;
+        case spv::ExecutionModelMissNV: {
+            strm << "Stage = Miss.  Global Launch ID (x,y,z) = (" << debug_record[kInstRayTracingOutLaunchIdX] << ", "
+                 << debug_record[kInstRayTracingOutLaunchIdY] << ", " << debug_record[kInstRayTracingOutLaunchIdZ] << "). ";
+        } break;
+        case spv::ExecutionModelCallableNV: {
+            strm << "Stage = Callable.  Global Launch ID (x,y,z) = (" << debug_record[kInstRayTracingOutLaunchIdX] << ", "
+                 << debug_record[kInstRayTracingOutLaunchIdY] << ", " << debug_record[kInstRayTracingOutLaunchIdZ] << "). ";
+        } break;
+        default: {
+            strm << "Internal Error (unexpected stage = " << debug_record[kInstCommonOutStageIdx] << "). ";
+            assert(false);
+        } break;
+    }
+    msg = strm.str();
+}
+
+// Generate the part of the message describing the violation.
+static void GenerateValidationMessage(const uint32_t *debug_record, std::string &msg, std::string &vuid_msg) {
+    using namespace spvtools;
+    std::ostringstream strm;
+    switch (debug_record[kInst2ValidationOutError]) {
+        case kInstErrorBindlessBounds: {
+            strm << "Index of " << debug_record[kInst2BindlessBoundsOutDescIndex] << " used to index descriptor array of length "
+                 << debug_record[kInst2BindlessBoundsOutDescBound] << ". ";
+            vuid_msg = "UNASSIGNED-Descriptor index out of bounds";
+        } break;
+        case kInstErrorBindlessUninit: {
+            strm << "Descriptor index " << debug_record[kInst2BindlessUninitOutDescIndex] << " is uninitialized. ";
+            vuid_msg = "UNASSIGNED-Descriptor uninitialized";
+        } break;
+        case kInstErrorBuffAddrUnallocRef: {
+            uint64_t *ptr = (uint64_t *)&debug_record[kInst2BuffAddrUnallocOutDescPtrLo];
+            strm << "Device address 0x" << std::hex << *ptr << " access out of bounds. ";
+            vuid_msg = "UNASSIGNED-Device address out of bounds";
+        } break;
+        default: {
+            strm << "Internal Error (unexpected error type = " << debug_record[kInst2ValidationOutError] << "). ";
+            vuid_msg = "UNASSIGNED-Internal Error";
+            assert(false);
+        } break;
+    }
+    msg = strm.str();
+}
+
+static std::string LookupDebugUtilsName(const debug_report_data *report_data, const uint64_t object) {
+    auto object_label = report_data->DebugReportGetUtilsObjectName(object);
+    if (object_label != "") {
+        object_label = "(" + object_label + ")";
+    }
+    return object_label;
+}
+
+// Generate message from the common portion of the debug report record.
+static void GenerateCommonMessage(const debug_report_data *report_data, const CMD_BUFFER_STATE *cb_node,
+                                  const uint32_t *debug_record, const VkShaderModule shader_module_handle,
+                                  const VkPipeline pipeline_handle, const VkPipelineBindPoint pipeline_bind_point,
+                                  const uint32_t operation_index, std::string &msg) {
+    using namespace spvtools;
+    std::ostringstream strm;
+    if (shader_module_handle == VK_NULL_HANDLE) {
+        strm << std::hex << std::showbase << "Internal Error: Unable to locate information for shader used in command buffer "
+             << LookupDebugUtilsName(report_data, HandleToUint64(cb_node->commandBuffer)) << "("
+             << HandleToUint64(cb_node->commandBuffer) << "). ";
+        assert(true);
+    } else {
+        strm << std::hex << std::showbase << "Command buffer "
+             << LookupDebugUtilsName(report_data, HandleToUint64(cb_node->commandBuffer)) << "("
+             << HandleToUint64(cb_node->commandBuffer) << "). ";
+        if (pipeline_bind_point == VK_PIPELINE_BIND_POINT_GRAPHICS) {
+            strm << "Draw ";
+        } else if (pipeline_bind_point == VK_PIPELINE_BIND_POINT_COMPUTE) {
+            strm << "Compute ";
+        } else if (pipeline_bind_point == VK_PIPELINE_BIND_POINT_RAY_TRACING_NV) {
+            strm << "Ray Trace ";
+        } else {
+            assert(false);
+            strm << "Unknown Pipeline Operation ";
+        }
+        strm << "Index " << operation_index << ". "
+             << "Pipeline " << LookupDebugUtilsName(report_data, HandleToUint64(pipeline_handle)) << "("
+             << HandleToUint64(pipeline_handle) << "). "
+             << "Shader Module " << LookupDebugUtilsName(report_data, HandleToUint64(shader_module_handle)) << "("
+             << HandleToUint64(shader_module_handle) << "). ";
+    }
+    strm << std::dec << std::noshowbase;
+    strm << "Shader Instruction Index = " << debug_record[kInstCommonOutInstructionIdx] << ". ";
+    msg = strm.str();
+}
+
+// Read the contents of the SPIR-V OpSource instruction and any following continuation instructions.
+// Split the single string into a vector of strings, one for each line, for easier processing.
+static void ReadOpSource(const SHADER_MODULE_STATE &shader, const uint32_t reported_file_id,
+                         std::vector<std::string> &opsource_lines) {
+    for (auto insn : shader) {
+        if ((insn.opcode() == spv::OpSource) && (insn.len() >= 5) && (insn.word(3) == reported_file_id)) {
+            std::istringstream in_stream;
+            std::string cur_line;
+            in_stream.str((char *)&insn.word(4));
+            while (std::getline(in_stream, cur_line)) {
+                opsource_lines.push_back(cur_line);
+            }
+            while ((++insn).opcode() == spv::OpSourceContinued) {
+                in_stream.str((char *)&insn.word(1));
+                while (std::getline(in_stream, cur_line)) {
+                    opsource_lines.push_back(cur_line);
+                }
+            }
+            break;
+        }
+    }
+}
+
+// The task here is to search the OpSource content to find the #line directive with the
+// line number that is closest to, but still prior to the reported error line number and
+// still within the reported filename.
+// From this known position in the OpSource content we can add the difference between
+// the #line line number and the reported error line number to determine the location
+// in the OpSource content of the reported error line.
+//
+// Considerations:
+// - Look only at #line directives that specify the reported_filename since
+//   the reported error line number refers to its location in the reported filename.
+// - If a #line directive does not have a filename, the file is the reported filename, or
+//   the filename found in a prior #line directive.  (This is C-preprocessor behavior)
+// - It is possible (e.g., inlining) for blocks of code to get shuffled out of their
+//   original order and the #line directives are used to keep the numbering correct.  This
+//   is why we need to examine the entire contents of the source, instead of leaving early
+//   when finding a #line line number larger than the reported error line number.
+//
+
+// GCC 4.8 has a problem with std::regex that is fixed in GCC 4.9.  Provide fallback code for 4.8
+#define GCC_VERSION (__GNUC__ * 10000 + __GNUC_MINOR__ * 100 + __GNUC_PATCHLEVEL__)
+
+#if defined(__GNUC__) && GCC_VERSION < 40900
+static bool GetLineAndFilename(const std::string string, uint32_t *linenumber, std::string &filename) {
+    // # line <linenumber> "<filename>" or
+    // #line <linenumber> "<filename>"
+    std::vector<std::string> tokens;
+    std::stringstream stream(string);
+    std::string temp;
+    uint32_t line_index = 0;
+
+    while (stream >> temp) tokens.push_back(temp);
+    auto size = tokens.size();
+    if (size > 1) {
+        if (tokens[0] == "#" && tokens[1] == "line") {
+            line_index = 2;
+        } else if (tokens[0] == "#line") {
+            line_index = 1;
+        }
+    }
+    if (0 == line_index) return false;
+    *linenumber = std::stoul(tokens[line_index]);
+    uint32_t filename_index = line_index + 1;
+    // Remove enclosing double quotes around filename
+    if (size > filename_index) filename = tokens[filename_index].substr(1, tokens[filename_index].size() - 2);
+    return true;
+}
+#else
+static bool GetLineAndFilename(const std::string string, uint32_t *linenumber, std::string &filename) {
+    static const std::regex line_regex(  // matches #line directives
+        "^"                              // beginning of line
+        "\\s*"                           // optional whitespace
+        "#"                              // required text
+        "\\s*"                           // optional whitespace
+        "line"                           // required text
+        "\\s+"                           // required whitespace
+        "([0-9]+)"                       // required first capture - line number
+        "(\\s+)?"                        // optional second capture - whitespace
+        "(\".+\")?"                      // optional third capture - quoted filename with at least one char inside
+        ".*");                           // rest of line (needed when using std::regex_match since the entire line is tested)
+
+    std::smatch captures;
+
+    bool found_line = std::regex_match(string, captures, line_regex);
+    if (!found_line) return false;
+
+    // filename is optional and considered found only if the whitespace and the filename are captured
+    if (captures[2].matched && captures[3].matched) {
+        // Remove enclosing double quotes.  The regex guarantees the quotes and at least one char.
+        filename = captures[3].str().substr(1, captures[3].str().size() - 2);
+    }
+    *linenumber = std::stoul(captures[1]);
+    return true;
+}
+#endif  // GCC_VERSION
+
+// Extract the filename, line number, and column number from the correct OpLine and build a message string from it.
+// Scan the source (from OpSource) to find the line of source at the reported line number and place it in another message string.
+static void GenerateSourceMessages(const std::vector<unsigned int> &pgm, const uint32_t *debug_record, std::string &filename_msg,
+                                   std::string &source_msg) {
+    using namespace spvtools;
+    std::ostringstream filename_stream;
+    std::ostringstream source_stream;
+    SHADER_MODULE_STATE shader;
+    shader.words = pgm;
+    // Find the OpLine just before the failing instruction indicated by the debug info.
+    // SPIR-V can only be iterated in the forward direction due to its opcode/length encoding.
+    uint32_t instruction_index = 0;
+    uint32_t reported_file_id = 0;
+    uint32_t reported_line_number = 0;
+    uint32_t reported_column_number = 0;
+    if (shader.words.size() > 0) {
+        for (auto insn : shader) {
+            if (insn.opcode() == spv::OpLine) {
+                reported_file_id = insn.word(1);
+                reported_line_number = insn.word(2);
+                reported_column_number = insn.word(3);
+            }
+            if (instruction_index == debug_record[kInstCommonOutInstructionIdx]) {
+                break;
+            }
+            instruction_index++;
+        }
+    }
+    // Create message with file information obtained from the OpString pointed to by the discovered OpLine.
+    std::string reported_filename;
+    if (reported_file_id == 0) {
+        filename_stream
+            << "Unable to find SPIR-V OpLine for source information.  Build shader with debug info to get source information.";
+    } else {
+        bool found_opstring = false;
+        for (auto insn : shader) {
+            if ((insn.opcode() == spv::OpString) && (insn.len() >= 3) && (insn.word(1) == reported_file_id)) {
+                found_opstring = true;
+                reported_filename = (char *)&insn.word(2);
+                if (reported_filename.empty()) {
+                    filename_stream << "Shader validation error occurred at line " << reported_line_number;
+                } else {
+                    filename_stream << "Shader validation error occurred in file: " << reported_filename << " at line "
+                                    << reported_line_number;
+                }
+                if (reported_column_number > 0) {
+                    filename_stream << ", column " << reported_column_number;
+                }
+                filename_stream << ".";
+                break;
+            }
+        }
+        if (!found_opstring) {
+            filename_stream << "Unable to find SPIR-V OpString for file id " << reported_file_id << " from OpLine instruction.";
+        }
+    }
+    filename_msg = filename_stream.str();
+
+    // Create message to display source code line containing error.
+    if ((reported_file_id != 0)) {
+        // Read the source code and split it up into separate lines.
+        std::vector<std::string> opsource_lines;
+        ReadOpSource(shader, reported_file_id, opsource_lines);
+        // Find the line in the OpSource content that corresponds to the reported error file and line.
+        if (!opsource_lines.empty()) {
+            uint32_t saved_line_number = 0;
+            std::string current_filename = reported_filename;  // current "preprocessor" filename state.
+            std::vector<std::string>::size_type saved_opsource_offset = 0;
+            bool found_best_line = false;
+            for (auto it = opsource_lines.begin(); it != opsource_lines.end(); ++it) {
+                uint32_t parsed_line_number;
+                std::string parsed_filename;
+                bool found_line = GetLineAndFilename(*it, &parsed_line_number, parsed_filename);
+                if (!found_line) continue;
+
+                bool found_filename = parsed_filename.size() > 0;
+                if (found_filename) {
+                    current_filename = parsed_filename;
+                }
+                if ((!found_filename) || (current_filename == reported_filename)) {
+                    // Update the candidate best line directive, if the current one is prior and closer to the reported line
+                    if (reported_line_number >= parsed_line_number) {
+                        if (!found_best_line ||
+                            (reported_line_number - parsed_line_number <= reported_line_number - saved_line_number)) {
+                            saved_line_number = parsed_line_number;
+                            saved_opsource_offset = std::distance(opsource_lines.begin(), it);
+                            found_best_line = true;
+                        }
+                    }
+                }
+            }
+            if (found_best_line) {
+                assert(reported_line_number >= saved_line_number);
+                std::vector<std::string>::size_type opsource_index =
+                    (reported_line_number - saved_line_number) + 1 + saved_opsource_offset;
+                if (opsource_index < opsource_lines.size()) {
+                    source_stream << "\n" << reported_line_number << ": " << opsource_lines[opsource_index].c_str();
+                } else {
+                    source_stream << "Internal error: calculated source line of " << opsource_index << " for source size of "
+                                  << opsource_lines.size() << " lines.";
+                }
+            } else {
+                source_stream << "Unable to find suitable #line directive in SPIR-V OpSource.";
+            }
+        } else {
+            source_stream << "Unable to find SPIR-V OpSource.";
+        }
+    }
+    source_msg = source_stream.str();
+}
+
+// Pull together all the information from the debug record to build the error message strings,
+// and then assemble them into a single message string.
+// Retrieve the shader program referenced by the unique shader ID provided in the debug record.
+// We had to keep a copy of the shader program with the same lifecycle as the pipeline to make
+// sure it is available when the pipeline is submitted.  (The ShaderModule tracking object also
+// keeps a copy, but it can be destroyed after the pipeline is created and before it is submitted.)
+//
+void GpuAssisted::AnalyzeAndReportError(CMD_BUFFER_STATE *cb_node, VkQueue queue, VkPipelineBindPoint pipeline_bind_point,
+                                        uint32_t operation_index, uint32_t *const debug_output_buffer) {
+    using namespace spvtools;
+    const uint32_t total_words = debug_output_buffer[0];
+    // A zero here means that the shader instrumentation didn't write anything.
+    // If you have nothing to say, don't say it here.
+    if (0 == total_words) {
+        return;
+    }
+    // The first word in the debug output buffer is the number of words that would have
+    // been written by the shader instrumentation, if there was enough room in the buffer we provided.
+    // The number of words actually written by the shaders is determined by the size of the buffer
+    // we provide via the descriptor.  So, we process only the number of words that can fit in the
+    // buffer.
+    // Each "report" written by the shader instrumentation is considered a "record".  This function
+    // is hard-coded to process only one record because it expects the buffer to be large enough to
+    // hold only one record.  If there is a desire to process more than one record, this function needs
+    // to be modified to loop over records and the buffer size increased.
+    std::string validation_message;
+    std::string stage_message;
+    std::string common_message;
+    std::string filename_message;
+    std::string source_message;
+    std::string vuid_msg;
+    VkShaderModule shader_module_handle = VK_NULL_HANDLE;
+    VkPipeline pipeline_handle = VK_NULL_HANDLE;
+    std::vector<unsigned int> pgm;
+    // The first record starts at this offset after the total_words.
+    const uint32_t *debug_record = &debug_output_buffer[kDebugOutputDataOffset];
+    // Lookup the VkShaderModule handle and SPIR-V code used to create the shader, using the unique shader ID value returned
+    // by the instrumented shader.
+    auto it = shader_map.find(debug_record[kInstCommonOutShaderId]);
+    if (it != shader_map.end()) {
+        shader_module_handle = it->second.shader_module;
+        pipeline_handle = it->second.pipeline;
+        pgm = it->second.pgm;
+    }
+    GenerateValidationMessage(debug_record, validation_message, vuid_msg);
+    GenerateStageMessage(debug_record, stage_message);
+    GenerateCommonMessage(report_data, cb_node, debug_record, shader_module_handle, pipeline_handle, pipeline_bind_point,
+                          operation_index, common_message);
+    GenerateSourceMessages(pgm, debug_record, filename_message, source_message);
+    log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT, HandleToUint64(queue),
+            vuid_msg.c_str(), "%s %s %s %s%s", validation_message.c_str(), common_message.c_str(), stage_message.c_str(),
+            filename_message.c_str(), source_message.c_str());
+    // The debug record at word kInstCommonOutSize is the number of words in the record
+    // written by the shader.  Clear the entire record plus the total_words word at the start.
+    const uint32_t words_to_clear = 1 + std::min(debug_record[kInstCommonOutSize], (uint32_t)kInst2MaxOutCnt);
+    memset(debug_output_buffer, 0, sizeof(uint32_t) * words_to_clear);
+}
+
+// For the given command buffer, map its debug data buffers and read their contents for analysis.
+void GpuAssisted::ProcessInstrumentationBuffer(VkQueue queue, CMD_BUFFER_STATE *cb_node) {
+    if (cb_node && (cb_node->hasDrawCmd || cb_node->hasTraceRaysCmd || cb_node->hasDispatchCmd)) {
+        auto gpu_buffer_list = GetGpuAssistedBufferInfo(cb_node->commandBuffer);
+        uint32_t draw_index = 0;
+        uint32_t compute_index = 0;
+        uint32_t ray_trace_index = 0;
+
+        for (auto &buffer_info : gpu_buffer_list) {
+            char *pData;
+            VkResult result = vmaMapMemory(vmaAllocator, buffer_info.output_mem_block.allocation, (void **)&pData);
+            // Analyze debug output buffer
+            if (result == VK_SUCCESS) {
+                uint32_t operation_index = 0;
+                if (buffer_info.pipeline_bind_point == VK_PIPELINE_BIND_POINT_GRAPHICS) {
+                    operation_index = draw_index;
+                } else if (buffer_info.pipeline_bind_point == VK_PIPELINE_BIND_POINT_COMPUTE) {
+                    operation_index = compute_index;
+                } else if (buffer_info.pipeline_bind_point == VK_PIPELINE_BIND_POINT_RAY_TRACING_NV) {
+                    operation_index = ray_trace_index;
+                } else {
+                    assert(false);
+                }
+
+                AnalyzeAndReportError(cb_node, queue, buffer_info.pipeline_bind_point, operation_index, (uint32_t *)pData);
+                vmaUnmapMemory(vmaAllocator, buffer_info.output_mem_block.allocation);
+            }
+
+            if (buffer_info.pipeline_bind_point == VK_PIPELINE_BIND_POINT_GRAPHICS) {
+                draw_index++;
+            } else if (buffer_info.pipeline_bind_point == VK_PIPELINE_BIND_POINT_COMPUTE) {
+                compute_index++;
+            } else if (buffer_info.pipeline_bind_point == VK_PIPELINE_BIND_POINT_RAY_TRACING_NV) {
+                ray_trace_index++;
+            } else {
+                assert(false);
+            }
+        }
+    }
+}
+
+// For the given command buffer, map its debug data buffers and update the status of any update after bind descriptors
+void GpuAssisted::UpdateInstrumentationBuffer(CMD_BUFFER_STATE *cb_node) {
+    auto gpu_buffer_list = GetGpuAssistedBufferInfo(cb_node->commandBuffer);
+    uint32_t *pData;
+    for (auto &buffer_info : gpu_buffer_list) {
+        if (buffer_info.di_input_mem_block.update_at_submit.size() > 0) {
+            VkResult result = vmaMapMemory(vmaAllocator, buffer_info.di_input_mem_block.allocation, (void **)&pData);
+            if (result == VK_SUCCESS) {
+                for (auto update : buffer_info.di_input_mem_block.update_at_submit) {
+                    if (update.second->updated) pData[update.first] = 1;
+                }
+                vmaUnmapMemory(vmaAllocator, buffer_info.di_input_mem_block.allocation);
+            }
+        }
+    }
+}
+
+// Submit a memory barrier on graphics queues.
+// Lazy-create and record the needed command buffer.
+void GpuAssisted::SubmitBarrier(VkQueue queue) {
+    auto queue_barrier_command_info_it = queue_barrier_command_infos.emplace(queue, GpuAssistedQueueBarrierCommandInfo{});
+    if (queue_barrier_command_info_it.second) {
+        GpuAssistedQueueBarrierCommandInfo &queue_barrier_command_info = queue_barrier_command_info_it.first->second;
+
+        uint32_t queue_family_index = 0;
+
+        auto queue_state_it = queueMap.find(queue);
+        if (queue_state_it != queueMap.end()) {
+            queue_family_index = queue_state_it->second.queueFamilyIndex;
+        }
+
+        VkResult result = VK_SUCCESS;
+
+        VkCommandPoolCreateInfo pool_create_info = {};
+        pool_create_info.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
+        pool_create_info.queueFamilyIndex = queue_family_index;
+        result = DispatchCreateCommandPool(device, &pool_create_info, nullptr, &queue_barrier_command_info.barrier_command_pool);
+        if (result != VK_SUCCESS) {
+            ReportSetupProblem(VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
+                               "Unable to create command pool for barrier CB.");
+            queue_barrier_command_info.barrier_command_pool = VK_NULL_HANDLE;
+            return;
+        }
+
+        VkCommandBufferAllocateInfo buffer_alloc_info = {};
+        buffer_alloc_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
+        buffer_alloc_info.commandPool = queue_barrier_command_info.barrier_command_pool;
+        buffer_alloc_info.commandBufferCount = 1;
+        buffer_alloc_info.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
+        result = DispatchAllocateCommandBuffers(device, &buffer_alloc_info, &queue_barrier_command_info.barrier_command_buffer);
+        if (result != VK_SUCCESS) {
+            ReportSetupProblem(VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
+                               "Unable to create barrier command buffer.");
+            DispatchDestroyCommandPool(device, queue_barrier_command_info.barrier_command_pool, nullptr);
+            queue_barrier_command_info.barrier_command_pool = VK_NULL_HANDLE;
+            queue_barrier_command_info.barrier_command_buffer = VK_NULL_HANDLE;
+            return;
+        }
+
+        // Hook up command buffer dispatch
+        vkSetDeviceLoaderData(device, queue_barrier_command_info.barrier_command_buffer);
+
+        // Record a global memory barrier to force availability of device memory operations to the host domain.
+        VkCommandBufferBeginInfo command_buffer_begin_info = {};
+        command_buffer_begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
+        result = DispatchBeginCommandBuffer(queue_barrier_command_info.barrier_command_buffer, &command_buffer_begin_info);
+        if (result == VK_SUCCESS) {
+            VkMemoryBarrier memory_barrier = {};
+            memory_barrier.sType = VK_STRUCTURE_TYPE_MEMORY_BARRIER;
+            memory_barrier.srcAccessMask = VK_ACCESS_MEMORY_WRITE_BIT;
+            memory_barrier.dstAccessMask = VK_ACCESS_HOST_READ_BIT;
+
+            DispatchCmdPipelineBarrier(queue_barrier_command_info.barrier_command_buffer, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
+                                       VK_PIPELINE_STAGE_HOST_BIT, 0, 1, &memory_barrier, 0, nullptr, 0, nullptr);
+            DispatchEndCommandBuffer(queue_barrier_command_info.barrier_command_buffer);
+        }
+    }
+
+    GpuAssistedQueueBarrierCommandInfo &queue_barrier_command_info = queue_barrier_command_info_it.first->second;
+    if (queue_barrier_command_info.barrier_command_buffer != VK_NULL_HANDLE) {
+        VkSubmitInfo submit_info = {};
+        submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+        submit_info.commandBufferCount = 1;
+        submit_info.pCommandBuffers = &queue_barrier_command_info.barrier_command_buffer;
+        DispatchQueueSubmit(queue, 1, &submit_info, VK_NULL_HANDLE);
+    }
+}
+
+void GpuAssisted::PreCallRecordQueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo *pSubmits, VkFence fence) {
+    for (uint32_t submit_idx = 0; submit_idx < submitCount; submit_idx++) {
+        const VkSubmitInfo *submit = &pSubmits[submit_idx];
+        for (uint32_t i = 0; i < submit->commandBufferCount; i++) {
+            auto cb_node = GetCBState(submit->pCommandBuffers[i]);
+            UpdateInstrumentationBuffer(cb_node);
+            for (auto secondaryCmdBuffer : cb_node->linkedCommandBuffers) {
+                UpdateInstrumentationBuffer(secondaryCmdBuffer);
+            }
+        }
+    }
+}
+
+// Issue a memory barrier to make GPU-written data available to host.
+// Wait for the queue to complete execution.
+// Check the debug buffers for all the command buffers that were submitted.
+void GpuAssisted::PostCallRecordQueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo *pSubmits, VkFence fence,
+                                            VkResult result) {
+    ValidationStateTracker::PostCallRecordQueueSubmit(queue, submitCount, pSubmits, fence, result);
+
+    if (aborted) return;
+    bool buffers_present = false;
+    // Don't QueueWaitIdle if there's nothing to process
+    for (uint32_t submit_idx = 0; submit_idx < submitCount; submit_idx++) {
+        const VkSubmitInfo *submit = &pSubmits[submit_idx];
+        for (uint32_t i = 0; i < submit->commandBufferCount; i++) {
+            auto cb_node = GetCBState(submit->pCommandBuffers[i]);
+            if (GetGpuAssistedBufferInfo(cb_node->commandBuffer).size() || cb_node->hasBuildAccelerationStructureCmd)
+                buffers_present = true;
+            for (auto secondaryCmdBuffer : cb_node->linkedCommandBuffers) {
+                if (GetGpuAssistedBufferInfo(secondaryCmdBuffer->commandBuffer).size() || cb_node->hasBuildAccelerationStructureCmd)
+                    buffers_present = true;
+            }
+        }
+    }
+    if (!buffers_present) return;
+
+    SubmitBarrier(queue);
+
+    DispatchQueueWaitIdle(queue);
+
+    for (uint32_t submit_idx = 0; submit_idx < submitCount; submit_idx++) {
+        const VkSubmitInfo *submit = &pSubmits[submit_idx];
+        for (uint32_t i = 0; i < submit->commandBufferCount; i++) {
+            auto cb_node = GetCBState(submit->pCommandBuffers[i]);
+            ProcessInstrumentationBuffer(queue, cb_node);
+            ProcessAccelerationStructureBuildValidationBuffer(queue, cb_node);
+            for (auto secondaryCmdBuffer : cb_node->linkedCommandBuffers) {
+                ProcessInstrumentationBuffer(queue, secondaryCmdBuffer);
+                ProcessAccelerationStructureBuildValidationBuffer(queue, cb_node);
+            }
+        }
+    }
+}
+
+void GpuAssisted::PreCallRecordCmdDraw(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount,
+                                       uint32_t firstVertex, uint32_t firstInstance) {
+    AllocateValidationResources(commandBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS);
+}
+
+void GpuAssisted::PreCallRecordCmdDrawIndexed(VkCommandBuffer commandBuffer, uint32_t indexCount, uint32_t instanceCount,
+                                              uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance) {
+    AllocateValidationResources(commandBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS);
+}
+
+void GpuAssisted::PreCallRecordCmdDrawIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t count,
+                                               uint32_t stride) {
+    AllocateValidationResources(commandBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS);
+}
+
+void GpuAssisted::PreCallRecordCmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
+                                                      uint32_t count, uint32_t stride) {
+    AllocateValidationResources(commandBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS);
+}
+
+void GpuAssisted::PreCallRecordCmdDispatch(VkCommandBuffer commandBuffer, uint32_t x, uint32_t y, uint32_t z) {
+    AllocateValidationResources(commandBuffer, VK_PIPELINE_BIND_POINT_COMPUTE);
+}
+
+void GpuAssisted::PreCallRecordCmdDispatchIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset) {
+    AllocateValidationResources(commandBuffer, VK_PIPELINE_BIND_POINT_COMPUTE);
+}
+
+void GpuAssisted::PreCallRecordCmdTraceRaysNV(VkCommandBuffer commandBuffer, VkBuffer raygenShaderBindingTableBuffer,
+                                              VkDeviceSize raygenShaderBindingOffset, VkBuffer missShaderBindingTableBuffer,
+                                              VkDeviceSize missShaderBindingOffset, VkDeviceSize missShaderBindingStride,
+                                              VkBuffer hitShaderBindingTableBuffer, VkDeviceSize hitShaderBindingOffset,
+                                              VkDeviceSize hitShaderBindingStride, VkBuffer callableShaderBindingTableBuffer,
+                                              VkDeviceSize callableShaderBindingOffset, VkDeviceSize callableShaderBindingStride,
+                                              uint32_t width, uint32_t height, uint32_t depth) {
+    AllocateValidationResources(commandBuffer, VK_PIPELINE_BIND_POINT_RAY_TRACING_NV);
+}
+
+void GpuAssisted::PostCallRecordCmdTraceRaysNV(VkCommandBuffer commandBuffer, VkBuffer raygenShaderBindingTableBuffer,
+                                               VkDeviceSize raygenShaderBindingOffset, VkBuffer missShaderBindingTableBuffer,
+                                               VkDeviceSize missShaderBindingOffset, VkDeviceSize missShaderBindingStride,
+                                               VkBuffer hitShaderBindingTableBuffer, VkDeviceSize hitShaderBindingOffset,
+                                               VkDeviceSize hitShaderBindingStride, VkBuffer callableShaderBindingTableBuffer,
+                                               VkDeviceSize callableShaderBindingOffset, VkDeviceSize callableShaderBindingStride,
+                                               uint32_t width, uint32_t height, uint32_t depth) {
+    CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+    cb_state->hasTraceRaysCmd = true;
+}
+
+void GpuAssisted::AllocateValidationResources(const VkCommandBuffer cmd_buffer, const VkPipelineBindPoint bind_point) {
+    if (bind_point != VK_PIPELINE_BIND_POINT_GRAPHICS && bind_point != VK_PIPELINE_BIND_POINT_COMPUTE &&
+        bind_point != VK_PIPELINE_BIND_POINT_RAY_TRACING_NV) {
+        return;
+    }
+    VkResult result;
+
+    if (aborted) return;
+
+    std::vector<VkDescriptorSet> desc_sets;
+    VkDescriptorPool desc_pool = VK_NULL_HANDLE;
+    result = desc_set_manager->GetDescriptorSets(1, &desc_pool, &desc_sets);
+    assert(result == VK_SUCCESS);
+    if (result != VK_SUCCESS) {
+        ReportSetupProblem(VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
+                           "Unable to allocate descriptor sets.  Device could become unstable.");
+        aborted = true;
+        return;
+    }
+
+    VkDescriptorBufferInfo output_desc_buffer_info = {};
+    output_desc_buffer_info.range = output_buffer_size;
+
+    auto cb_node = GetCBState(cmd_buffer);
+    if (!cb_node) {
+        ReportSetupProblem(VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device), "Unrecognized command buffer");
+        aborted = true;
+        return;
+    }
+
+    // Allocate memory for the output block that the gpu will use to return any error information
+    GpuAssistedDeviceMemoryBlock output_block = {};
+    VkBufferCreateInfo bufferInfo = {VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO};
+    bufferInfo.size = output_buffer_size;
+    bufferInfo.usage = VK_BUFFER_USAGE_STORAGE_BUFFER_BIT;
+    VmaAllocationCreateInfo allocInfo = {};
+    allocInfo.usage = VMA_MEMORY_USAGE_GPU_TO_CPU;
+    result = vmaCreateBuffer(vmaAllocator, &bufferInfo, &allocInfo, &output_block.buffer, &output_block.allocation, nullptr);
+    if (result != VK_SUCCESS) {
+        ReportSetupProblem(VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
+                           "Unable to allocate device memory.  Device could become unstable.");
+        aborted = true;
+        return;
+    }
+
+    // Clear the output block to zeros so that only error information from the gpu will be present
+    uint32_t *pData;
+    result = vmaMapMemory(vmaAllocator, output_block.allocation, (void **)&pData);
+    if (result == VK_SUCCESS) {
+        memset(pData, 0, output_buffer_size);
+        vmaUnmapMemory(vmaAllocator, output_block.allocation);
+    }
+
+    GpuAssistedDeviceMemoryBlock di_input_block = {}, bda_input_block = {};
+    VkDescriptorBufferInfo di_input_desc_buffer_info = {};
+    VkDescriptorBufferInfo bda_input_desc_buffer_info = {};
+    VkWriteDescriptorSet desc_writes[3] = {};
+    uint32_t desc_count = 1;
+    auto const &state = cb_node->lastBound[bind_point];
+    uint32_t number_of_sets = (uint32_t)state.per_set.size();
+
+    // Figure out how much memory we need for the input block based on how many sets and bindings there are
+    // and how big each of the bindings is
+    if (number_of_sets > 0 && device_extensions.vk_ext_descriptor_indexing) {
+        uint32_t descriptor_count = 0;  // Number of descriptors, including all array elements
+        uint32_t binding_count = 0;     // Number of bindings based on the max binding number used
+        for (auto s : state.per_set) {
+            auto desc = s.bound_descriptor_set;
+            if (desc && (desc->GetBindingCount() > 0)) {
+                auto bindings = desc->GetLayout()->GetSortedBindingSet();
+                binding_count += desc->GetLayout()->GetMaxBinding() + 1;
+                for (auto binding : bindings) {
+                    // Shader instrumentation is tracking inline uniform blocks as scalers. Don't try to validate inline uniform
+                    // blocks
+                    if (VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT == desc->GetLayout()->GetTypeFromBinding(binding)) {
+                        descriptor_count++;
+                        log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT,
+                                VK_NULL_HANDLE, "UNASSIGNED-GPU-Assisted Validation Warning",
+                                "VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT descriptors will not be validated by GPU assisted "
+                                "validation");
+                    } else if (binding == desc->GetLayout()->GetMaxBinding() && desc->IsVariableDescriptorCount(binding)) {
+                        descriptor_count += desc->GetVariableDescriptorCount();
+                    } else {
+                        descriptor_count += desc->GetDescriptorCountFromBinding(binding);
+                    }
+                }
+            }
+        }
+
+        // Note that the size of the input buffer is dependent on the maximum binding number, which
+        // can be very large.  This is because for (set = s, binding = b, index = i), the validation
+        // code is going to dereference Input[ i + Input[ b + Input[ s + Input[ Input[0] ] ] ] ] to
+        // see if descriptors have been written. In gpu_validation.md, we note this and advise
+        // using densely packed bindings as a best practice when using gpu-av with descriptor indexing
+        uint32_t words_needed = 1 + (number_of_sets * 2) + (binding_count * 2) + descriptor_count;
+        allocInfo.usage = VMA_MEMORY_USAGE_CPU_TO_GPU;
+        bufferInfo.size = words_needed * 4;
+        result =
+            vmaCreateBuffer(vmaAllocator, &bufferInfo, &allocInfo, &di_input_block.buffer, &di_input_block.allocation, nullptr);
+        if (result != VK_SUCCESS) {
+            ReportSetupProblem(VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
+                               "Unable to allocate device memory.  Device could become unstable.");
+            aborted = true;
+            return;
+        }
+
+        // Populate input buffer first with the sizes of every descriptor in every set, then with whether
+        // each element of each descriptor has been written or not.  See gpu_validation.md for a more thourough
+        // outline of the input buffer format
+        result = vmaMapMemory(vmaAllocator, di_input_block.allocation, (void **)&pData);
+        memset(pData, 0, static_cast<size_t>(bufferInfo.size));
+        // Pointer to a sets array that points into the sizes array
+        uint32_t *sets_to_sizes = pData + 1;
+        // Pointer to the sizes array that contains the array size of the descriptor at each binding
+        uint32_t *sizes = sets_to_sizes + number_of_sets;
+        // Pointer to another sets array that points into the bindings array that points into the written array
+        uint32_t *sets_to_bindings = sizes + binding_count;
+        // Pointer to the bindings array that points at the start of the writes in the writes array for each binding
+        uint32_t *bindings_to_written = sets_to_bindings + number_of_sets;
+        // Index of the next entry in the written array to be updated
+        uint32_t written_index = 1 + (number_of_sets * 2) + (binding_count * 2);
+        uint32_t bindCounter = number_of_sets + 1;
+        // Index of the start of the sets_to_bindings array
+        pData[0] = number_of_sets + binding_count + 1;
+
+        for (auto s : state.per_set) {
+            auto desc = s.bound_descriptor_set;
+            if (desc && (desc->GetBindingCount() > 0)) {
+                auto layout = desc->GetLayout();
+                auto bindings = layout->GetSortedBindingSet();
+                // For each set, fill in index of its bindings sizes in the sizes array
+                *sets_to_sizes++ = bindCounter;
+                // For each set, fill in the index of its bindings in the bindings_to_written array
+                *sets_to_bindings++ = bindCounter + number_of_sets + binding_count;
+                for (auto binding : bindings) {
+                    // For each binding, fill in its size in the sizes array
+                    // Shader instrumentation is tracking inline uniform blocks as scalers. Don't try to validate inline uniform
+                    // blocks
+                    if (VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT == desc->GetLayout()->GetTypeFromBinding(binding)) {
+                        sizes[binding] = 1;
+                    } else if (binding == layout->GetMaxBinding() && desc->IsVariableDescriptorCount(binding)) {
+                        sizes[binding] = desc->GetVariableDescriptorCount();
+                    } else {
+                        sizes[binding] = desc->GetDescriptorCountFromBinding(binding);
+                    }
+                    // Fill in the starting index for this binding in the written array in the bindings_to_written array
+                    bindings_to_written[binding] = written_index;
+
+                    // Shader instrumentation is tracking inline uniform blocks as scalers. Don't try to validate inline uniform
+                    // blocks
+                    if (VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT == desc->GetLayout()->GetTypeFromBinding(binding)) {
+                        pData[written_index++] = 1;
+                        continue;
+                    }
+
+                    auto index_range = desc->GetGlobalIndexRangeFromBinding(binding, true);
+                    // For each array element in the binding, update the written array with whether it has been written
+                    for (uint32_t i = index_range.start; i < index_range.end; ++i) {
+                        auto *descriptor = desc->GetDescriptorFromGlobalIndex(i);
+                        if (descriptor->updated) {
+                            pData[written_index] = 1;
+                        } else if (desc->IsUpdateAfterBind(binding)) {
+                            // If it hasn't been written now and it's update after bind, put it in a list to check at QueueSubmit
+                            di_input_block.update_at_submit[written_index] = descriptor;
+                        }
+                        written_index++;
+                    }
+                }
+                auto last = desc->GetLayout()->GetMaxBinding();
+                bindings_to_written += last + 1;
+                bindCounter += last + 1;
+                sizes += last + 1;
+            } else {
+                *sets_to_sizes++ = 0;
+                *sets_to_bindings++ = 0;
+            }
+        }
+        vmaUnmapMemory(vmaAllocator, di_input_block.allocation);
+
+        di_input_desc_buffer_info.range = (words_needed * 4);
+        di_input_desc_buffer_info.buffer = di_input_block.buffer;
+        di_input_desc_buffer_info.offset = 0;
+
+        desc_writes[1].sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
+        desc_writes[1].dstBinding = 1;
+        desc_writes[1].descriptorCount = 1;
+        desc_writes[1].descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
+        desc_writes[1].pBufferInfo = &di_input_desc_buffer_info;
+        desc_writes[1].dstSet = desc_sets[0];
+
+        desc_count = 2;
+    }
+
+    if (number_of_sets > 0 && device_extensions.vk_ext_buffer_device_address && buffer_map.size() && shaderInt64) {
+        // Example BDA input buffer assuming 2 buffers using BDA:
+        // Word 0 | Index of start of buffer sizes (in this case 5)
+        // Word 1 | 0x0000000000000000
+        // Word 2 | Device Address of first buffer  (Addresses sorted in ascending order)
+        // Word 3 | Device Address of second buffer
+        // Word 4 | 0xffffffffffffffff
+        // Word 5 | 0 (size of pretend buffer at word 1)
+        // Word 6 | Size in bytes of first buffer
+        // Word 7 | Size in bytes of second buffer
+        // Word 8 | 0 (size of pretend buffer in word 4)
+
+        uint32_t num_buffers = static_cast<uint32_t>(buffer_map.size());
+        uint32_t words_needed = (num_buffers + 3) + (num_buffers + 2);
+        allocInfo.usage = VMA_MEMORY_USAGE_CPU_TO_GPU;
+        bufferInfo.size = words_needed * 8;  // 64 bit words
+        result =
+            vmaCreateBuffer(vmaAllocator, &bufferInfo, &allocInfo, &bda_input_block.buffer, &bda_input_block.allocation, nullptr);
+        if (result != VK_SUCCESS) {
+            ReportSetupProblem(VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
+                               "Unable to allocate device memory.  Device could become unstable.");
+            aborted = true;
+            return;
+        }
+        uint64_t *bda_data;
+        result = vmaMapMemory(vmaAllocator, bda_input_block.allocation, (void **)&bda_data);
+        uint32_t address_index = 1;
+        uint32_t size_index = 3 + num_buffers;
+        memset(bda_data, 0, static_cast<size_t>(bufferInfo.size));
+        bda_data[0] = size_index;       // Start of buffer sizes
+        bda_data[address_index++] = 0;  // NULL address
+        bda_data[size_index++] = 0;
+
+        for (auto const &value : buffer_map) {
+            bda_data[address_index++] = value.first;
+            bda_data[size_index++] = value.second;
+        }
+        bda_data[address_index] = UINTPTR_MAX;
+        bda_data[size_index] = 0;
+        vmaUnmapMemory(vmaAllocator, bda_input_block.allocation);
+
+        bda_input_desc_buffer_info.range = (words_needed * 8);
+        bda_input_desc_buffer_info.buffer = bda_input_block.buffer;
+        bda_input_desc_buffer_info.offset = 0;
+
+        desc_writes[desc_count].sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
+        desc_writes[desc_count].dstBinding = 2;
+        desc_writes[desc_count].descriptorCount = 1;
+        desc_writes[desc_count].descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
+        desc_writes[desc_count].pBufferInfo = &bda_input_desc_buffer_info;
+        desc_writes[desc_count].dstSet = desc_sets[0];
+        desc_count++;
+    }
+
+    // Write the descriptor
+    output_desc_buffer_info.buffer = output_block.buffer;
+    output_desc_buffer_info.offset = 0;
+
+    desc_writes[0].sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
+    desc_writes[0].descriptorCount = 1;
+    desc_writes[0].descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
+    desc_writes[0].pBufferInfo = &output_desc_buffer_info;
+    desc_writes[0].dstSet = desc_sets[0];
+    DispatchUpdateDescriptorSets(device, desc_count, desc_writes, 0, NULL);
+
+    auto iter = cb_node->lastBound.find(bind_point);  // find() allows read-only access to cb_state
+    if (iter != cb_node->lastBound.end()) {
+        auto pipeline_state = iter->second.pipeline_state;
+        if (pipeline_state && (pipeline_state->pipeline_layout->set_layouts.size() <= desc_set_bind_index)) {
+            DispatchCmdBindDescriptorSets(cmd_buffer, bind_point, pipeline_state->pipeline_layout->layout, desc_set_bind_index, 1,
+                                          desc_sets.data(), 0, nullptr);
+        }
+        // Record buffer and memory info in CB state tracking
+        GetGpuAssistedBufferInfo(cmd_buffer)
+            .emplace_back(output_block, di_input_block, bda_input_block, desc_sets[0], desc_pool, bind_point);
+    } else {
+        ReportSetupProblem(VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device), "Unable to find pipeline state");
+        vmaDestroyBuffer(vmaAllocator, di_input_block.buffer, di_input_block.allocation);
+        vmaDestroyBuffer(vmaAllocator, bda_input_block.buffer, bda_input_block.allocation);
+        vmaDestroyBuffer(vmaAllocator, output_block.buffer, output_block.allocation);
+        aborted = true;
+        return;
+    }
+}
diff --git a/src/third_party/vulkan-validation-layers/src/layers/gpu_validation.h b/src/third_party/vulkan-validation-layers/src/layers/gpu_validation.h
new file mode 100644
index 0000000..9a36f53
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/layers/gpu_validation.h
@@ -0,0 +1,252 @@
+/* Copyright (c) 2018-2019 The Khronos Group Inc.
+ * Copyright (c) 2018-2019 Valve Corporation
+ * Copyright (c) 2018-2019 LunarG, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Karl Schultz <karl@lunarg.com>
+ * Author: Tony Barbour <tony@lunarg.com>
+ */
+
+#pragma once
+
+#include "chassis.h"
+#include "state_tracker.h"
+#include "vk_mem_alloc.h"
+class GpuAssisted;
+
+struct GpuAssistedDeviceMemoryBlock {
+    VkBuffer buffer;
+    VmaAllocation allocation;
+    std::unordered_map<uint32_t, const cvdescriptorset::Descriptor*> update_at_submit;
+};
+
+struct GpuAssistedBufferInfo {
+    GpuAssistedDeviceMemoryBlock output_mem_block;
+    GpuAssistedDeviceMemoryBlock di_input_mem_block;   // Descriptor Indexing input
+    GpuAssistedDeviceMemoryBlock bda_input_mem_block;  // Buffer Device Address input
+    VkDescriptorSet desc_set;
+    VkDescriptorPool desc_pool;
+    VkPipelineBindPoint pipeline_bind_point;
+    GpuAssistedBufferInfo(GpuAssistedDeviceMemoryBlock output_mem_block, GpuAssistedDeviceMemoryBlock di_input_mem_block,
+                          GpuAssistedDeviceMemoryBlock bda_input_mem_block, VkDescriptorSet desc_set, VkDescriptorPool desc_pool,
+                          VkPipelineBindPoint pipeline_bind_point)
+        : output_mem_block(output_mem_block),
+          di_input_mem_block(di_input_mem_block),
+          bda_input_mem_block(bda_input_mem_block),
+          desc_set(desc_set),
+          desc_pool(desc_pool),
+          pipeline_bind_point(pipeline_bind_point){};
+};
+
+struct GpuAssistedQueueBarrierCommandInfo {
+    VkCommandPool barrier_command_pool = VK_NULL_HANDLE;
+    VkCommandBuffer barrier_command_buffer = VK_NULL_HANDLE;
+};
+
+// Class to encapsulate Descriptor Set allocation.  This manager creates and destroys Descriptor Pools
+// as needed to satisfy requests for descriptor sets.
+class GpuAssistedDescriptorSetManager {
+  public:
+    GpuAssistedDescriptorSetManager(GpuAssisted* dev_data);
+    ~GpuAssistedDescriptorSetManager();
+
+    VkResult GetDescriptorSet(VkDescriptorPool* desc_pool, VkDescriptorSet* desc_sets);
+    VkResult GetDescriptorSets(uint32_t count, VkDescriptorPool* pool, std::vector<VkDescriptorSet>* desc_sets);
+    void PutBackDescriptorSet(VkDescriptorPool desc_pool, VkDescriptorSet desc_set);
+
+  private:
+    static const uint32_t kItemsPerChunk = 512;
+    struct PoolTracker {
+        uint32_t size;
+        uint32_t used;
+    };
+
+    GpuAssisted* dev_data_;
+    std::unordered_map<VkDescriptorPool, struct PoolTracker> desc_pool_map_;
+};
+
+struct GpuAssistedShaderTracker {
+    VkPipeline pipeline;
+    VkShaderModule shader_module;
+    std::vector<unsigned int> pgm;
+};
+
+struct GpuAssistedAccelerationStructureBuildValidationBufferInfo {
+    // The acceleration structure that is being built.
+    VkAccelerationStructureNV acceleration_structure = VK_NULL_HANDLE;
+
+    // The descriptor pool and descriptor set being used to validate a given build.
+    VkDescriptorPool descriptor_pool = VK_NULL_HANDLE;
+    VkDescriptorSet descriptor_set = VK_NULL_HANDLE;
+
+    // The storage buffer used by the validating compute shader whichcontains info about
+    // the valid handles and which is written to communicate found invalid handles.
+    VkBuffer validation_buffer = VK_NULL_HANDLE;
+    VmaAllocation validation_buffer_allocation = VK_NULL_HANDLE;
+};
+
+struct GpuAssistedAccelerationStructureBuildValidationState {
+    bool initialized = false;
+
+    VkPipeline pipeline = VK_NULL_HANDLE;
+    VkPipelineLayout pipeline_layout = VK_NULL_HANDLE;
+
+    VkAccelerationStructureNV replacement_as = VK_NULL_HANDLE;
+    VmaAllocation replacement_as_allocation = VK_NULL_HANDLE;
+    uint64_t replacement_as_handle = 0;
+
+    std::unordered_map<VkCommandBuffer, std::vector<GpuAssistedAccelerationStructureBuildValidationBufferInfo>> validation_buffers;
+};
+
+class GpuAssisted : public ValidationStateTracker {
+    bool aborted = false;
+    VkBool32 shaderInt64;
+    uint32_t adjusted_max_desc_sets;
+    uint32_t desc_set_bind_index;
+    uint32_t unique_shader_module_id = 0;
+    std::unordered_map<uint32_t, GpuAssistedShaderTracker> shader_map;
+    std::unique_ptr<GpuAssistedDescriptorSetManager> desc_set_manager;
+    std::map<VkQueue, GpuAssistedQueueBarrierCommandInfo> queue_barrier_command_infos;
+    std::unordered_map<VkCommandBuffer, std::vector<GpuAssistedBufferInfo>> command_buffer_map;  // gpu_buffer_list;
+    uint32_t output_buffer_size;
+    VmaAllocator vmaAllocator = {};
+    PFN_vkSetDeviceLoaderData vkSetDeviceLoaderData;
+    std::map<VkDeviceAddress, VkDeviceSize> buffer_map;
+    GpuAssistedAccelerationStructureBuildValidationState acceleration_structure_validation_state;
+    std::vector<GpuAssistedBufferInfo>& GetGpuAssistedBufferInfo(const VkCommandBuffer command_buffer) {
+        auto buffer_list = command_buffer_map.find(command_buffer);
+        if (buffer_list == command_buffer_map.end()) {
+            std::vector<GpuAssistedBufferInfo> new_list{};
+            command_buffer_map[command_buffer] = new_list;
+            return command_buffer_map[command_buffer];
+        }
+        return buffer_list->second;
+    }
+    void ReportSetupProblem(VkDebugReportObjectTypeEXT object_type, uint64_t object_handle,
+                            const char* const specific_message) const;
+
+  public:
+    VkDescriptorSetLayout debug_desc_layout;
+    VkDescriptorSetLayout dummy_desc_layout;
+    void PreCallRecordCreateDevice(VkPhysicalDevice gpu, const VkDeviceCreateInfo* pCreateInfo,
+                                   const VkAllocationCallbacks* pAllocator, VkDevice* pDevice,
+                                   safe_VkDeviceCreateInfo* modified_create_info);
+    void PostCallRecordCreateDevice(VkPhysicalDevice gpu, const VkDeviceCreateInfo* pCreateInfo,
+                                    const VkAllocationCallbacks* pAllocator, VkDevice* pDevice, VkResult result);
+    void PostCallRecordGetBufferDeviceAddressEXT(VkDevice device, const VkBufferDeviceAddressInfoEXT* pInfo,
+                                                 VkDeviceAddress address);
+    void PreCallRecordDestroyBuffer(VkDevice device, VkBuffer buffer, const VkAllocationCallbacks* pAllocator);
+    void PreCallRecordDestroyDevice(VkDevice device, const VkAllocationCallbacks* pAllocator);
+    void PostCallRecordBindAccelerationStructureMemoryNV(VkDevice device, uint32_t bindInfoCount,
+                                                         const VkBindAccelerationStructureMemoryInfoNV* pBindInfos,
+                                                         VkResult result);
+    void PreCallRecordCreatePipelineLayout(VkDevice device, const VkPipelineLayoutCreateInfo* pCreateInfo,
+                                           const VkAllocationCallbacks* pAllocator, VkPipelineLayout* pPipelineLayout,
+                                           void* cpl_state_data);
+    void PostCallRecordCreatePipelineLayout(VkDevice device, const VkPipelineLayoutCreateInfo* pCreateInfo,
+                                            const VkAllocationCallbacks* pAllocator, VkPipelineLayout* pPipelineLayout,
+                                            VkResult result);
+    void ResetCommandBuffer(VkCommandBuffer commandBuffer);
+    bool PreCallValidateCmdWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent* pEvents,
+                                      VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask,
+                                      uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers,
+                                      uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers,
+                                      uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers) const;
+    void PreCallRecordCreateBuffer(VkDevice device, const VkBufferCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator,
+                                   VkBuffer* pBuffer, void* cb_state_data);
+    void CreateAccelerationStructureBuildValidationState(GpuAssisted* device_GpuAssisted);
+    void DestroyAccelerationStructureBuildValidationState();
+    void PreCallRecordCmdBuildAccelerationStructureNV(VkCommandBuffer commandBuffer, const VkAccelerationStructureInfoNV* pInfo,
+                                                      VkBuffer instanceData, VkDeviceSize instanceOffset, VkBool32 update,
+                                                      VkAccelerationStructureNV dst, VkAccelerationStructureNV src,
+                                                      VkBuffer scratch, VkDeviceSize scratchOffset);
+    void ProcessAccelerationStructureBuildValidationBuffer(VkQueue queue, CMD_BUFFER_STATE* cb_node);
+    void PreCallRecordCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
+                                              const VkGraphicsPipelineCreateInfo* pCreateInfos,
+                                              const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines,
+                                              void* cgpl_state_data);
+    void PreCallRecordCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
+                                             const VkComputePipelineCreateInfo* pCreateInfos,
+                                             const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines,
+                                             void* ccpl_state_data);
+    void PreCallRecordCreateRayTracingPipelinesNV(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
+                                                  const VkRayTracingPipelineCreateInfoNV* pCreateInfos,
+                                                  const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines,
+                                                  void* crtpl_state_data);
+    template <typename CreateInfo, typename SafeCreateInfo>
+    void PreCallRecordPipelineCreations(uint32_t count, const CreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator,
+                                        VkPipeline* pPipelines, std::vector<std::shared_ptr<PIPELINE_STATE>>& pipe_state,
+                                        std::vector<SafeCreateInfo>* new_pipeline_create_infos,
+                                        const VkPipelineBindPoint bind_point);
+    template <typename CreateInfo>
+    void PostCallRecordPipelineCreations(const uint32_t count, const CreateInfo* pCreateInfos,
+                                         const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines,
+                                         const VkPipelineBindPoint bind_point);
+    void PostCallRecordCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
+                                               const VkGraphicsPipelineCreateInfo* pCreateInfos,
+                                               const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines, VkResult result,
+                                               void* cgpl_state_data);
+    void PostCallRecordCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
+                                              const VkComputePipelineCreateInfo* pCreateInfos,
+                                              const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines, VkResult result,
+                                              void* ccpl_state_data);
+    void PostCallRecordCreateRayTracingPipelinesNV(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
+                                                   const VkRayTracingPipelineCreateInfoNV* pCreateInfos,
+                                                   const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines, VkResult result,
+                                                   void* crtpl_state_data);
+    void PreCallRecordDestroyPipeline(VkDevice device, VkPipeline pipeline, const VkAllocationCallbacks* pAllocator);
+    bool InstrumentShader(const VkShaderModuleCreateInfo* pCreateInfo, std::vector<unsigned int>& new_pgm,
+                          uint32_t* unique_shader_id);
+    void PreCallRecordCreateShaderModule(VkDevice device, const VkShaderModuleCreateInfo* pCreateInfo,
+                                         const VkAllocationCallbacks* pAllocator, VkShaderModule* pShaderModule,
+                                         void* csm_state_data);
+    void AnalyzeAndReportError(CMD_BUFFER_STATE* cb_node, VkQueue queue, VkPipelineBindPoint pipeline_bind_point,
+                               uint32_t operation_index, uint32_t* const debug_output_buffer);
+    void ProcessInstrumentationBuffer(VkQueue queue, CMD_BUFFER_STATE* cb_node);
+    void UpdateInstrumentationBuffer(CMD_BUFFER_STATE* cb_node);
+    void SubmitBarrier(VkQueue queue);
+    void PreCallRecordQueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo* pSubmits, VkFence fence);
+    void PostCallRecordQueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo* pSubmits, VkFence fence,
+                                   VkResult result);
+    void PreCallRecordCmdDraw(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex,
+                              uint32_t firstInstance);
+    void PreCallRecordCmdDrawIndexed(VkCommandBuffer commandBuffer, uint32_t indexCount, uint32_t instanceCount,
+                                     uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance);
+    void PreCallRecordCmdDrawIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t count,
+                                      uint32_t stride);
+    void PreCallRecordCmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t count,
+                                             uint32_t stride);
+    void PreCallRecordCmdDispatch(VkCommandBuffer commandBuffer, uint32_t x, uint32_t y, uint32_t z);
+    void PreCallRecordCmdDispatchIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset);
+    void PreCallRecordCmdTraceRaysNV(VkCommandBuffer commandBuffer, VkBuffer raygenShaderBindingTableBuffer,
+                                     VkDeviceSize raygenShaderBindingOffset, VkBuffer missShaderBindingTableBuffer,
+                                     VkDeviceSize missShaderBindingOffset, VkDeviceSize missShaderBindingStride,
+                                     VkBuffer hitShaderBindingTableBuffer, VkDeviceSize hitShaderBindingOffset,
+                                     VkDeviceSize hitShaderBindingStride, VkBuffer callableShaderBindingTableBuffer,
+                                     VkDeviceSize callableShaderBindingOffset, VkDeviceSize callableShaderBindingStride,
+                                     uint32_t width, uint32_t height, uint32_t depth);
+    void PostCallRecordCmdTraceRaysNV(VkCommandBuffer commandBuffer, VkBuffer raygenShaderBindingTableBuffer,
+                                      VkDeviceSize raygenShaderBindingOffset, VkBuffer missShaderBindingTableBuffer,
+                                      VkDeviceSize missShaderBindingOffset, VkDeviceSize missShaderBindingStride,
+                                      VkBuffer hitShaderBindingTableBuffer, VkDeviceSize hitShaderBindingOffset,
+                                      VkDeviceSize hitShaderBindingStride, VkBuffer callableShaderBindingTableBuffer,
+                                      VkDeviceSize callableShaderBindingOffset, VkDeviceSize callableShaderBindingStride,
+                                      uint32_t width, uint32_t height, uint32_t depth);
+    void AllocateValidationResources(const VkCommandBuffer cmd_buffer, const VkPipelineBindPoint bind_point);
+    void PostCallRecordGetPhysicalDeviceProperties(VkPhysicalDevice physicalDevice,
+                                                   VkPhysicalDeviceProperties* pPhysicalDeviceProperties);
+    void PostCallRecordGetPhysicalDeviceProperties2(VkPhysicalDevice physicalDevice,
+                                                    VkPhysicalDeviceProperties2* pPhysicalDeviceProperties2);
+    VkResult InitializeVma(VkPhysicalDevice physicalDevice, VkDevice device, VmaAllocator* pAllocator);
+};
diff --git a/src/third_party/vulkan-validation-layers/src/layers/hash_util.h b/src/third_party/vulkan-validation-layers/src/layers/hash_util.h
new file mode 100644
index 0000000..74e0d24
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/layers/hash_util.h
@@ -0,0 +1,164 @@
+/* Copyright (c) 2015-2019 The Khronos Group Inc.
+ * Copyright (c) 2015-2019 Valve Corporation
+ * Copyright (c) 2015-2019 LunarG, Inc.
+ * Copyright (C) 2015-2019 Google Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: John Zulauf <jzulauf@lunarg.com>
+ */
+#ifndef HASH_UTIL_H_
+#define HASH_UTIL_H_
+
+#define NOMINMAX
+#include <cstdint>
+#include <functional>
+#include <limits>
+#include <memory>
+#include <mutex>
+#include <type_traits>
+#include <unordered_set>
+#include <vector>
+
+// Hash and equality utilities for supporting hashing containers (e.g. unordered_set, unordered_map)
+namespace hash_util {
+
+// True iff both pointers are null or both are non-null
+template <typename T>
+bool similar_for_nullity(const T *const lhs, const T *const rhs) {
+    return ((lhs != nullptr) && (rhs != nullptr)) || ((lhs == nullptr) && (rhs == nullptr));
+}
+
+// Wrap std hash to avoid manual casts for the holes in std::hash (in C++11)
+template <typename Value>
+size_t HashWithUnderlying(Value value, typename std::enable_if<!std::is_enum<Value>::value, void *>::type = nullptr) {
+    return std::hash<Value>()(value);
+}
+
+template <typename Value>
+size_t HashWithUnderlying(Value value, typename std::enable_if<std::is_enum<Value>::value, void *>::type = nullptr) {
+    using Underlying = typename std::underlying_type<Value>::type;
+    return std::hash<Underlying>()(static_cast<const Underlying &>(value));
+}
+
+class HashCombiner {
+  public:
+    using Key = size_t;
+
+    template <typename Value>
+    struct WrappedHash {
+        size_t operator()(const Value &value) const { return HashWithUnderlying(value); }
+    };
+
+    HashCombiner(Key combined = 0) : combined_(combined) {}
+    // magic and combination algorithm based on boost::hash_combine
+    // http://www.boost.org/doc/libs/1_43_0/doc/html/hash/reference.html#boost.hash_combine
+    // Magic value is 2^size / ((1-sqrt(5)/2)
+    static const uint64_t kMagic = sizeof(Key) > 4 ? Key(0x9e3779b97f4a7c16UL) : Key(0x9e3779b9U);
+
+    // If you need to override the default hash
+    template <typename Value, typename Hasher = WrappedHash<Value>>
+    HashCombiner &Combine(const Value &value) {
+        combined_ ^= Hasher()(value) + kMagic + (combined_ << 6) + (combined_ >> 2);
+        return *this;
+    }
+
+    template <typename Iterator, typename Hasher = WrappedHash<typename std::iterator_traits<Iterator>::value_type>>
+    HashCombiner &Combine(Iterator first, Iterator end) {
+        using Value = typename std::iterator_traits<Iterator>::value_type;
+        auto current = first;
+        for (; current != end; ++current) {
+            Combine<Value, Hasher>(*current);
+        }
+        return *this;
+    }
+
+    template <typename Value, typename Hasher = WrappedHash<Value>>
+    HashCombiner &Combine(const std::vector<Value> &vector) {
+        return Combine(vector.cbegin(), vector.cend());
+    }
+
+    template <typename Value>
+    HashCombiner &operator<<(const Value &value) {
+        return Combine(value);
+    }
+
+    Key Value() const { return combined_; }
+    void Reset(Key combined = 0) { combined_ = combined; }
+
+  private:
+    Key combined_;
+};
+
+// A template to inherit std::hash overloads from when T::hash() is defined
+template <typename T>
+struct HasHashMember {
+    size_t operator()(const T &value) const { return value.hash(); }
+};
+
+// A template to inherit std::hash overloads from when is an *ordered* constainer
+template <typename T>
+struct IsOrderedContainer {
+    size_t operator()(const T &value) const { return HashCombiner().Combine(value.cbegin(), value.cend()).Value(); }
+};
+
+// The dictionary provides a way of referencing canonical/reference
+// data by id, such that the id's are invariant with dictionary
+// resize/insert and that no entries point to identical data.  This
+// approach uses the address of the unique data and as the unique
+//  ID  for a give value of T.
+//
+// Note: This ID is unique for a given application execution, neither
+//       globally unique, invariant, nor repeatable from execution to
+//       execution.
+//
+// The entries of the dictionary are shared_pointers (the contents of
+// which are invariant with resize/insert), with the hash and equality
+// template arguments wrapped in a shared pointer dereferencing
+// function object
+template <typename T, typename Hasher = std::hash<T>, typename KeyEqual = std::equal_to<T>>
+class Dictionary {
+  public:
+    using Def = T;
+    using Id = std::shared_ptr<const Def>;
+
+    // Find the unique entry match the provided value, adding if needed
+    // TODO: segregate lookup from insert, using reader/write locks to reduce contention -- if needed
+    template <typename U = T>
+    Id look_up(U &&value) {
+        // We create an Id from the value, which will either be retained by dict (if new) or deleted on return (if extant)
+        Id from_input = std::make_shared<T>(std::forward<U>(value));
+
+        // Insert takes care of the "unique" id part by rejecting the insert if a key matching by_value exists, but returning us
+        // the Id of the extant shared_pointer(id->def) instead.
+        // return the value of the Iterator from the <Iterator, bool> pair returned by insert
+        Guard g(lock);  // Dict isn't thread safe, and use is presumed to be multi-threaded
+        return *dict.insert(from_input).first;
+    }
+
+  private:
+    struct HashKeyValue {
+        size_t operator()(const Id &value) const { return Hasher()(*value); }
+    };
+    struct KeyValueEqual {
+        bool operator()(const Id &lhs, const Id &rhs) const { return KeyEqual()(*lhs, *rhs); }
+    };
+    using Dict = std::unordered_set<Id, HashKeyValue, KeyValueEqual>;
+    using Lock = std::mutex;
+    using Guard = std::lock_guard<Lock>;
+    Lock lock;
+    Dict dict;
+};
+}  // namespace hash_util
+
+#endif  // HASH_UTILS_H_
diff --git a/src/third_party/vulkan-validation-layers/src/layers/hash_vk_types.h b/src/third_party/vulkan-validation-layers/src/layers/hash_vk_types.h
new file mode 100644
index 0000000..6985886
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/layers/hash_vk_types.h
@@ -0,0 +1,102 @@
+/* Copyright (c) 2019 The Khronos Group Inc.
+ * Copyright (c) 2019 Valve Corporation
+ * Copyright (c) 2019 LunarG, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: John Zulauf <jzulauf@lunarg.com>
+ */
+#ifndef HASH_VK_TYPES_H_
+#define HASH_VK_TYPES_H_
+
+// Includes everything needed for overloading std::hash
+#include "hash_util.h"
+
+#include <vulkan/vulkan.h>
+#include "vk_safe_struct.h"
+#include <vector>
+
+// Hash and equality and/or compare functions for selected Vk types (and useful collections thereof)
+
+// VkDescriptorSetLayoutBinding
+static bool operator==(const safe_VkDescriptorSetLayoutBinding &lhs, const safe_VkDescriptorSetLayoutBinding &rhs) {
+    if ((lhs.binding != rhs.binding) || (lhs.descriptorType != rhs.descriptorType) ||
+        (lhs.descriptorCount != rhs.descriptorCount) || (lhs.stageFlags != rhs.stageFlags) ||
+        !hash_util::similar_for_nullity(lhs.pImmutableSamplers, rhs.pImmutableSamplers)) {
+        return false;
+    }
+    if (lhs.pImmutableSamplers) {  // either one will do as they *are* similar for nullity (i.e. either both null or both non-null)
+        for (uint32_t samp = 0; samp < lhs.descriptorCount; samp++) {
+            if (lhs.pImmutableSamplers[samp] != rhs.pImmutableSamplers[samp]) {
+                return false;
+            }
+        }
+    }
+    return true;
+}
+
+namespace std {
+template <>
+struct hash<safe_VkDescriptorSetLayoutBinding> {
+    size_t operator()(const safe_VkDescriptorSetLayoutBinding &value) const {
+        hash_util::HashCombiner hc;
+        hc << value.binding << value.descriptorType << value.descriptorCount << value.stageFlags;
+        if (value.pImmutableSamplers) {
+            for (uint32_t samp = 0; samp < value.descriptorCount; samp++) {
+                hc << value.pImmutableSamplers[samp];
+            }
+        }
+        return hc.Value();
+    }
+};
+}  // namespace std
+
+// VkPushConstantRange
+static inline bool operator==(const VkPushConstantRange &lhs, const VkPushConstantRange &rhs) {
+    return (lhs.stageFlags == rhs.stageFlags) && (lhs.offset == rhs.offset) && (lhs.size == rhs.size);
+}
+
+namespace std {
+template <>
+struct hash<VkPushConstantRange> {
+    size_t operator()(const VkPushConstantRange &value) const {
+        hash_util::HashCombiner hc;
+        return (hc << value.stageFlags << value.offset << value.size).Value();
+    }
+};
+}  // namespace std
+
+using PushConstantRanges = std::vector<VkPushConstantRange>;
+
+namespace std {
+template <>
+struct hash<PushConstantRanges> : public hash_util::IsOrderedContainer<PushConstantRanges> {};
+}  // namespace std
+
+// VkImageSubresourceRange
+static bool operator==(const VkImageSubresourceRange &lhs, const VkImageSubresourceRange &rhs) {
+    return (lhs.aspectMask == rhs.aspectMask) && (lhs.baseMipLevel == rhs.baseMipLevel) && (lhs.levelCount == rhs.levelCount) &&
+           (lhs.baseArrayLayer == rhs.baseArrayLayer) && (lhs.layerCount == rhs.layerCount);
+}
+namespace std {
+template <>
+struct hash<VkImageSubresourceRange> {
+    size_t operator()(const VkImageSubresourceRange &value) const {
+        hash_util::HashCombiner hc;
+        hc << value.aspectMask << value.baseMipLevel << value.levelCount << value.baseArrayLayer << value.layerCount;
+        return hc.Value();
+    }
+};
+}  // namespace std
+
+#endif  // HASH_VK_TYPES_H_
diff --git a/src/third_party/vulkan-validation-layers/src/layers/json/VkLayer_khronos_validation.json.in b/src/third_party/vulkan-validation-layers/src/layers/json/VkLayer_khronos_validation.json.in
new file mode 100644
index 0000000..e6124f2
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/layers/json/VkLayer_khronos_validation.json.in
@@ -0,0 +1,46 @@
+{
+    "file_format_version" : "1.1.0",
+    "layer" : {
+        "name": "VK_LAYER_KHRONOS_validation",
+        "type": "GLOBAL",
+        "library_path": "@RELATIVE_LAYER_BINARY@",
+        "api_version": "@VK_VERSION@",
+        "implementation_version": "1",
+        "description": "Khronos Validation Layer",
+        "instance_extensions": [
+             {
+                 "name": "VK_EXT_debug_report",
+                 "spec_version": "9"
+             },
+             {
+                 "name": "VK_EXT_debug_utils",
+                 "spec_version": "1"
+             },
+             {
+                 "name": "VK_EXT_validation_features",
+                 "spec_version": "2"
+             }
+         ],
+        "device_extensions": [
+             {
+                 "name": "VK_EXT_debug_marker",
+                 "spec_version": "4",
+                 "entrypoints": ["vkDebugMarkerSetObjectTagEXT",
+                        "vkDebugMarkerSetObjectNameEXT",
+                        "vkCmdDebugMarkerBeginEXT",
+                        "vkCmdDebugMarkerEndEXT",
+                        "vkCmdDebugMarkerInsertEXT"
+                       ]
+             },
+             {
+                 "name": "VK_EXT_validation_cache",
+                 "spec_version": "1",
+                 "entrypoints": ["vkCreateValidationCacheEXT",
+                         "vkDestroyValidationCacheEXT",
+                         "vkGetValidationCacheDataEXT",
+                         "vkMergeValidationCachesEXT"
+                        ]
+             }
+         ]
+    }
+}
diff --git a/src/third_party/vulkan-validation-layers/src/layers/json/VkLayer_standard_validation.json.in b/src/third_party/vulkan-validation-layers/src/layers/json/VkLayer_standard_validation.json.in
new file mode 100644
index 0000000..dc00458
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/layers/json/VkLayer_standard_validation.json.in
@@ -0,0 +1,13 @@
+{
+    "file_format_version": "1.1.1",
+    "layer": {
+        "name": "VK_LAYER_LUNARG_standard_validation",
+        "type": "GLOBAL",
+        "api_version": "@VK_VERSION@",
+        "implementation_version": "1",
+        "description": "LunarG Standard Validation",
+        "component_layers": [
+            "VK_LAYER_KHRONOS_validation"
+        ]
+    }
+}
diff --git a/src/third_party/vulkan-validation-layers/src/layers/libVkLayer_khronos_validation.map b/src/third_party/vulkan-validation-layers/src/layers/libVkLayer_khronos_validation.map
new file mode 100644
index 0000000..ad8ad49
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/layers/libVkLayer_khronos_validation.map
@@ -0,0 +1,10 @@
+{
+  global:
+    vkGetInstanceProcAddr;
+    vkGetDeviceProcAddr;
+    vkEnumerateInstanceLayerProperties;
+    vkEnumerateInstanceExtensionProperties;
+    vkNegotiateLoaderLayerInterfaceVersion;
+  local:
+    *;
+};
diff --git a/src/third_party/vulkan-validation-layers/src/layers/object_lifetime_validation.h b/src/third_party/vulkan-validation-layers/src/layers/object_lifetime_validation.h
new file mode 100644
index 0000000..524c6d3
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/layers/object_lifetime_validation.h
@@ -0,0 +1,276 @@
+/* Copyright (c) 2015-2019 The Khronos Group Inc.
+ * Copyright (c) 2015-2019 Valve Corporation
+ * Copyright (c) 2015-2019 LunarG, Inc.
+ * Copyright (C) 2015-2019 Google Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Mark Lobodzinski <mark@lunarg.com>
+ * Author: Jon Ashburn <jon@lunarg.com>
+ * Author: Tobin Ehlis <tobine@google.com>
+ */
+
+// Suppress unused warning on Linux
+#if defined(__GNUC__)
+#define DECORATE_UNUSED __attribute__((unused))
+#else
+#define DECORATE_UNUSED
+#endif
+
+// clang-format off
+static const char DECORATE_UNUSED *kVUID_ObjectTracker_Info = "UNASSIGNED-ObjectTracker-Info";
+static const char DECORATE_UNUSED *kVUID_ObjectTracker_InternalError = "UNASSIGNED-ObjectTracker-InternalError";
+static const char DECORATE_UNUSED *kVUID_ObjectTracker_ObjectLeak =    "UNASSIGNED-ObjectTracker-ObjectLeak";
+static const char DECORATE_UNUSED *kVUID_ObjectTracker_UnknownObject = "UNASSIGNED-ObjectTracker-UnknownObject";
+// clang-format on
+
+#undef DECORATE_UNUSED
+
+extern uint64_t object_track_index;
+
+// Object Status -- used to track state of individual objects
+typedef VkFlags ObjectStatusFlags;
+enum ObjectStatusFlagBits {
+    OBJSTATUS_NONE = 0x00000000,                      // No status is set
+    OBJSTATUS_COMMAND_BUFFER_SECONDARY = 0x00000001,  // Command Buffer is of type SECONDARY
+    OBJSTATUS_CUSTOM_ALLOCATOR = 0x00000002,          // Allocated with custom allocator
+};
+
+// Object and state information structure
+struct ObjTrackState {
+    uint64_t handle;                                               // Object handle (new)
+    VulkanObjectType object_type;                                  // Object type identifier
+    ObjectStatusFlags status;                                      // Object state
+    uint64_t parent_object;                                        // Parent object
+    std::unique_ptr<std::unordered_set<uint64_t> > child_objects;  // Child objects (used for VkDescriptorPool only)
+};
+
+typedef vl_concurrent_unordered_map<uint64_t, std::shared_ptr<ObjTrackState>, 6> object_map_type;
+
+class ObjectLifetimes : public ValidationObject {
+  public:
+    // Override chassis read/write locks for this validation object
+    // This override takes a deferred lock. i.e. it is not acquired.
+    // This class does its own locking with a shared mutex.
+    virtual read_lock_guard_t read_lock() { return read_lock_guard_t(validation_object_mutex, std::defer_lock); }
+    virtual write_lock_guard_t write_lock() { return write_lock_guard_t(validation_object_mutex, std::defer_lock); }
+
+    mutable ReadWriteLock object_lifetime_mutex;
+    write_lock_guard_t write_shared_lock() { return write_lock_guard_t(object_lifetime_mutex); }
+    read_lock_guard_t read_shared_lock() const { return read_lock_guard_t(object_lifetime_mutex); }
+
+    std::atomic<uint64_t> num_objects[kVulkanObjectTypeMax + 1];
+    std::atomic<uint64_t> num_total_objects;
+    // Vector of unordered_maps per object type to hold ObjTrackState info
+    object_map_type object_map[kVulkanObjectTypeMax + 1];
+    // Special-case map for swapchain images
+    object_map_type swapchainImageMap;
+
+    // Constructor for object lifetime tracking
+    ObjectLifetimes() : num_objects{}, num_total_objects(0) {}
+
+    void InsertObject(object_map_type &map, uint64_t object_handle, VulkanObjectType object_type,
+                      std::shared_ptr<ObjTrackState> pNode) {
+        bool inserted = map.insert(object_handle, pNode);
+        if (!inserted) {
+            // The object should not already exist. If we couldn't add it to the map, there was probably
+            // a race condition in the app. Report an error and move on.
+            VkDebugReportObjectTypeEXT debug_object_type = get_debug_report_enum[object_type];
+            log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, debug_object_type, object_handle, kVUID_ObjectTracker_Info,
+                    "Couldn't insert %s Object 0x%" PRIxLEAST64
+                    ", already existed. This should not happen and may indicate a "
+                    "race condition in the application.",
+                    object_string[object_type], object_handle);
+        }
+    }
+
+    bool ReportUndestroyedInstanceObjects(VkInstance instance, const std::string &error_code) const;
+    bool ReportUndestroyedDeviceObjects(VkDevice device, const std::string &error_code) const;
+
+    bool ReportLeakedDeviceObjects(VkDevice device, VulkanObjectType object_type, const std::string &error_code) const;
+    bool ReportLeakedInstanceObjects(VkInstance instance, VulkanObjectType object_type, const std::string &error_code) const;
+
+    void DestroyUndestroyedObjects(VulkanObjectType object_type);
+
+    void CreateQueue(VkQueue vkObj);
+    void AllocateCommandBuffer(const VkCommandPool command_pool, const VkCommandBuffer command_buffer, VkCommandBufferLevel level);
+    void AllocateDescriptorSet(VkDescriptorPool descriptor_pool, VkDescriptorSet descriptor_set);
+    void CreateSwapchainImageObject(VkImage swapchain_image, VkSwapchainKHR swapchain);
+    void DestroyLeakedInstanceObjects();
+    void DestroyLeakedDeviceObjects();
+    bool ValidateDeviceObject(const VulkanTypedHandle &device_typed, const char *invalid_handle_code,
+                              const char *wrong_device_code) const;
+    void DestroyQueueDataStructures();
+    bool ValidateCommandBuffer(VkCommandPool command_pool, VkCommandBuffer command_buffer) const;
+    bool ValidateDescriptorSet(VkDescriptorPool descriptor_pool, VkDescriptorSet descriptor_set) const;
+    bool ValidateSamplerObjects(const VkDescriptorSetLayoutCreateInfo *pCreateInfo) const;
+    bool ValidateDescriptorWrite(VkWriteDescriptorSet const *desc, bool isPush) const;
+    bool ValidateAnonymousObject(uint64_t object, VkObjectType core_object_type, bool null_allowed, const char *invalid_handle_code,
+                                 const char *wrong_device_code) const;
+
+    ObjectLifetimes *GetObjectLifetimeData(std::vector<ValidationObject *> &object_dispatch) const {
+        for (auto layer_object : object_dispatch) {
+            if (layer_object->container_type == LayerObjectTypeObjectTracker) {
+                return (reinterpret_cast<ObjectLifetimes *>(layer_object));
+            }
+        }
+        return nullptr;
+    };
+
+    bool CheckObjectValidity(uint64_t object_handle, VulkanObjectType object_type, bool null_allowed,
+                             const char *invalid_handle_code, const char *wrong_device_code) const {
+        VkDebugReportObjectTypeEXT debug_object_type = get_debug_report_enum[object_type];
+
+        // Look for object in object map
+        if (!object_map[object_type].contains(object_handle)) {
+            // If object is an image, also look for it in the swapchain image map
+            if ((object_type != kVulkanObjectTypeImage) || (swapchainImageMap.find(object_handle) == swapchainImageMap.end())) {
+                // Object not found, look for it in other device object maps
+                for (auto other_device_data : layer_data_map) {
+                    for (auto layer_object_data : other_device_data.second->object_dispatch) {
+                        if (layer_object_data->container_type == LayerObjectTypeObjectTracker) {
+                            auto object_lifetime_data = reinterpret_cast<ObjectLifetimes *>(layer_object_data);
+                            if (object_lifetime_data && (object_lifetime_data != this)) {
+                                if (object_lifetime_data->object_map[object_type].find(object_handle) !=
+                                        object_lifetime_data->object_map[object_type].end() ||
+                                    (object_type == kVulkanObjectTypeImage &&
+                                     object_lifetime_data->swapchainImageMap.find(object_handle) !=
+                                         object_lifetime_data->swapchainImageMap.end())) {
+                                    // Object found on other device, report an error if object has a device parent error code
+                                    if ((wrong_device_code != kVUIDUndefined) && (object_type != kVulkanObjectTypeSurfaceKHR)) {
+                                        return log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, debug_object_type, object_handle,
+                                                       wrong_device_code,
+                                                       "Object 0x%" PRIxLEAST64
+                                                       " was not created, allocated or retrieved from the correct device.",
+                                                       object_handle);
+                                    } else {
+                                        return false;
+                                    }
+                                }
+                            }
+                        }
+                    }
+                }
+                // Report an error if object was not found anywhere
+                return log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, debug_object_type, object_handle, invalid_handle_code,
+                               "Invalid %s Object 0x%" PRIxLEAST64 ".", object_string[object_type], object_handle);
+            }
+        }
+        return false;
+    }
+
+    template <typename T1>
+    bool ValidateObject(T1 object, VulkanObjectType object_type, bool null_allowed, const char *invalid_handle_code,
+                        const char *wrong_device_code) const {
+        if (null_allowed && (object == VK_NULL_HANDLE)) {
+            return false;
+        }
+        auto object_handle = HandleToUint64(object);
+
+        if (object_type == kVulkanObjectTypeDevice) {
+            return ValidateDeviceObject(VulkanTypedHandle(object, object_type), invalid_handle_code, wrong_device_code);
+        }
+
+        return CheckObjectValidity(object_handle, object_type, null_allowed, invalid_handle_code, wrong_device_code);
+    }
+
+    template <typename T1>
+    void CreateObject(T1 object, VulkanObjectType object_type, const VkAllocationCallbacks *pAllocator) {
+        uint64_t object_handle = HandleToUint64(object);
+        bool custom_allocator = (pAllocator != nullptr);
+        if (!object_map[object_type].contains(object_handle)) {
+            auto pNewObjNode = std::make_shared<ObjTrackState>();
+            pNewObjNode->object_type = object_type;
+            pNewObjNode->status = custom_allocator ? OBJSTATUS_CUSTOM_ALLOCATOR : OBJSTATUS_NONE;
+            pNewObjNode->handle = object_handle;
+
+            InsertObject(object_map[object_type], object_handle, object_type, pNewObjNode);
+            num_objects[object_type]++;
+            num_total_objects++;
+
+            if (object_type == kVulkanObjectTypeDescriptorPool) {
+                pNewObjNode->child_objects.reset(new std::unordered_set<uint64_t>);
+            }
+        }
+    }
+
+    template <typename T1>
+    void DestroyObjectSilently(T1 object, VulkanObjectType object_type) {
+        auto object_handle = HandleToUint64(object);
+        assert(object_handle != VK_NULL_HANDLE);
+
+        auto item = object_map[object_type].pop(object_handle);
+        if (item == object_map[object_type].end()) {
+            // We've already checked that the object exists. If we couldn't find and atomically remove it
+            // from the map, there must have been a race condition in the app. Report an error and move on.
+            VkDebugReportObjectTypeEXT debug_object_type = get_debug_report_enum[object_type];
+            log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, debug_object_type, object_handle, kVUID_ObjectTracker_Info,
+                    "Couldn't destroy %s Object 0x%" PRIxLEAST64
+                    ", not found. This should not happen and may indicate a "
+                    "race condition in the application.",
+                    object_string[object_type], object_handle);
+            return;
+        }
+        assert(num_total_objects > 0);
+
+        num_total_objects--;
+        assert(num_objects[item->second->object_type] > 0);
+
+        num_objects[item->second->object_type]--;
+    }
+
+    template <typename T1>
+    void RecordDestroyObject(T1 object, VulkanObjectType object_type) {
+        auto object_handle = HandleToUint64(object);
+        if (object_handle != VK_NULL_HANDLE) {
+            if (object_map[object_type].contains(object_handle)) {
+                DestroyObjectSilently(object, object_type);
+            }
+        }
+    }
+
+    template <typename T1>
+    bool ValidateDestroyObject(T1 object, VulkanObjectType object_type, const VkAllocationCallbacks *pAllocator,
+                               const char *expected_custom_allocator_code, const char *expected_default_allocator_code) const {
+        auto object_handle = HandleToUint64(object);
+        bool custom_allocator = pAllocator != nullptr;
+        VkDebugReportObjectTypeEXT debug_object_type = get_debug_report_enum[object_type];
+        bool skip = false;
+
+        if ((expected_custom_allocator_code != kVUIDUndefined || expected_default_allocator_code != kVUIDUndefined) &&
+            object_handle != VK_NULL_HANDLE) {
+            auto item = object_map[object_type].find(object_handle);
+            if (item != object_map[object_type].end()) {
+                auto allocated_with_custom = (item->second->status & OBJSTATUS_CUSTOM_ALLOCATOR) ? true : false;
+                if (allocated_with_custom && !custom_allocator && expected_custom_allocator_code != kVUIDUndefined) {
+                    // This check only verifies that custom allocation callbacks were provided to both Create and Destroy calls,
+                    // it cannot verify that these allocation callbacks are compatible with each other.
+                    skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, debug_object_type, object_handle,
+                                    expected_custom_allocator_code,
+                                    "Custom allocator not specified while destroying %s obj 0x%" PRIxLEAST64
+                                    " but specified at creation.",
+                                    object_string[object_type], object_handle);
+                } else if (!allocated_with_custom && custom_allocator && expected_default_allocator_code != kVUIDUndefined) {
+                    skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, debug_object_type, object_handle,
+                                    expected_default_allocator_code,
+                                    "Custom allocator specified while destroying %s obj 0x%" PRIxLEAST64
+                                    " but not specified at creation.",
+                                    object_string[object_type], object_handle);
+                }
+            }
+        }
+        return skip;
+    }
+
+#include "object_tracker.h"
+};
diff --git a/src/third_party/vulkan-validation-layers/src/layers/object_tracker_utils.cpp b/src/third_party/vulkan-validation-layers/src/layers/object_tracker_utils.cpp
new file mode 100644
index 0000000..885a8f7
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/layers/object_tracker_utils.cpp
@@ -0,0 +1,942 @@
+/* Copyright (c) 2015-2019 The Khronos Group Inc.
+ * Copyright (c) 2015-2019 Valve Corporation
+ * Copyright (c) 2015-2019 LunarG, Inc.
+ * Copyright (C) 2015-2019 Google Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Mark Lobodzinski <mark@lunarg.com>
+ * Author: Jon Ashburn <jon@lunarg.com>
+ * Author: Tobin Ehlis <tobin@lunarg.com>
+ */
+
+#include "chassis.h"
+
+#include "object_lifetime_validation.h"
+
+uint64_t object_track_index = 0;
+
+VulkanTypedHandle ObjTrackStateTypedHandle(const ObjTrackState &track_state) {
+    // TODO: Unify Typed Handle representation (i.e. VulkanTypedHandle everywhere there are handle/type pairs)
+    VulkanTypedHandle typed_handle;
+    typed_handle.handle = track_state.handle;
+    typed_handle.type = track_state.object_type;
+    return typed_handle;
+}
+
+// Destroy memRef lists and free all memory
+void ObjectLifetimes::DestroyQueueDataStructures() {
+    // Destroy the items in the queue map
+    auto snapshot = object_map[kVulkanObjectTypeQueue].snapshot();
+    for (const auto &queue : snapshot) {
+        uint32_t obj_index = queue.second->object_type;
+        assert(num_total_objects > 0);
+        num_total_objects--;
+        assert(num_objects[obj_index] > 0);
+        num_objects[obj_index]--;
+        object_map[kVulkanObjectTypeQueue].erase(queue.first);
+    }
+}
+
+void ObjectLifetimes::DestroyUndestroyedObjects(VulkanObjectType object_type) {
+    auto snapshot = object_map[object_type].snapshot();
+    for (const auto &item : snapshot) {
+        auto object_info = item.second;
+        DestroyObjectSilently(object_info->handle, object_type);
+    }
+}
+
+// Look for this device object in any of the instance child devices lists.
+// NOTE: This is of dubious value. In most circumstances Vulkan will die a flaming death if a dispatchable object is invalid.
+// However, if this layer is loaded first and GetProcAddress is used to make API calls, it will detect bad DOs.
+bool ObjectLifetimes::ValidateDeviceObject(const VulkanTypedHandle &device_typed, const char *invalid_handle_code,
+                                           const char *wrong_device_code) const {
+    auto instance_data = GetLayerDataPtr(get_dispatch_key(instance), layer_data_map);
+    auto instance_object_lifetime_data = GetObjectLifetimeData(instance_data->object_dispatch);
+    if (instance_object_lifetime_data->object_map[kVulkanObjectTypeDevice].contains(device_typed.handle)) {
+        return false;
+    }
+    return log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, device_typed.handle,
+                   invalid_handle_code, "Invalid %s.", report_data->FormatHandle(device_typed).c_str());
+}
+
+bool ObjectLifetimes::ValidateAnonymousObject(uint64_t object_handle, VkObjectType core_object_type, bool null_allowed,
+                                              const char *invalid_handle_code, const char *wrong_device_code) const {
+    if (null_allowed && (object_handle == VK_NULL_HANDLE)) return false;
+    auto object_type = ConvertCoreObjectToVulkanObject(core_object_type);
+
+    if (object_type == kVulkanObjectTypeDevice) {
+        return ValidateDeviceObject(VulkanTypedHandle(reinterpret_cast<VkDevice>(object_handle), object_type), invalid_handle_code,
+                                    wrong_device_code);
+    }
+    return CheckObjectValidity(object_handle, object_type, null_allowed, invalid_handle_code, wrong_device_code);
+}
+
+void ObjectLifetimes::AllocateCommandBuffer(const VkCommandPool command_pool, const VkCommandBuffer command_buffer,
+                                            VkCommandBufferLevel level) {
+    auto pNewObjNode = std::make_shared<ObjTrackState>();
+    pNewObjNode->object_type = kVulkanObjectTypeCommandBuffer;
+    pNewObjNode->handle = HandleToUint64(command_buffer);
+    pNewObjNode->parent_object = HandleToUint64(command_pool);
+    if (level == VK_COMMAND_BUFFER_LEVEL_SECONDARY) {
+        pNewObjNode->status = OBJSTATUS_COMMAND_BUFFER_SECONDARY;
+    } else {
+        pNewObjNode->status = OBJSTATUS_NONE;
+    }
+    InsertObject(object_map[kVulkanObjectTypeCommandBuffer], HandleToUint64(command_buffer), kVulkanObjectTypeCommandBuffer,
+                 pNewObjNode);
+    num_objects[kVulkanObjectTypeCommandBuffer]++;
+    num_total_objects++;
+}
+
+bool ObjectLifetimes::ValidateCommandBuffer(VkCommandPool command_pool, VkCommandBuffer command_buffer) const {
+    bool skip = false;
+    uint64_t object_handle = HandleToUint64(command_buffer);
+    auto iter = object_map[kVulkanObjectTypeCommandBuffer].find(object_handle);
+    if (iter != object_map[kVulkanObjectTypeCommandBuffer].end()) {
+        auto pNode = iter->second;
+
+        if (pNode->parent_object != HandleToUint64(command_pool)) {
+            // We know that the parent *must* be a command pool
+            const auto parent_pool = CastFromUint64<VkCommandPool>(pNode->parent_object);
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                            object_handle, "VUID-vkFreeCommandBuffers-pCommandBuffers-parent",
+                            "FreeCommandBuffers is attempting to free %s belonging to %s from %s).",
+                            report_data->FormatHandle(command_buffer).c_str(), report_data->FormatHandle(parent_pool).c_str(),
+                            report_data->FormatHandle(command_pool).c_str());
+        }
+    } else {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, object_handle,
+                        "VUID-vkFreeCommandBuffers-pCommandBuffers-00048", "Invalid %s.",
+                        report_data->FormatHandle(command_buffer).c_str());
+    }
+    return skip;
+}
+
+void ObjectLifetimes::AllocateDescriptorSet(VkDescriptorPool descriptor_pool, VkDescriptorSet descriptor_set) {
+    auto pNewObjNode = std::make_shared<ObjTrackState>();
+    pNewObjNode->object_type = kVulkanObjectTypeDescriptorSet;
+    pNewObjNode->status = OBJSTATUS_NONE;
+    pNewObjNode->handle = HandleToUint64(descriptor_set);
+    pNewObjNode->parent_object = HandleToUint64(descriptor_pool);
+    InsertObject(object_map[kVulkanObjectTypeDescriptorSet], HandleToUint64(descriptor_set), kVulkanObjectTypeDescriptorSet,
+                 pNewObjNode);
+    num_objects[kVulkanObjectTypeDescriptorSet]++;
+    num_total_objects++;
+
+    auto itr = object_map[kVulkanObjectTypeDescriptorPool].find(HandleToUint64(descriptor_pool));
+    if (itr != object_map[kVulkanObjectTypeDescriptorPool].end()) {
+        itr->second->child_objects->insert(HandleToUint64(descriptor_set));
+    }
+}
+
+bool ObjectLifetimes::ValidateDescriptorSet(VkDescriptorPool descriptor_pool, VkDescriptorSet descriptor_set) const {
+    bool skip = false;
+    uint64_t object_handle = HandleToUint64(descriptor_set);
+    auto dsItem = object_map[kVulkanObjectTypeDescriptorSet].find(object_handle);
+    if (dsItem != object_map[kVulkanObjectTypeDescriptorSet].end()) {
+        if (dsItem->second->parent_object != HandleToUint64(descriptor_pool)) {
+            // We know that the parent *must* be a descriptor pool
+            const auto parent_pool = CastFromUint64<VkDescriptorPool>(dsItem->second->parent_object);
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT,
+                            object_handle, "VUID-vkFreeDescriptorSets-pDescriptorSets-parent",
+                            "FreeDescriptorSets is attempting to free %s"
+                            " belonging to %s from %s).",
+                            report_data->FormatHandle(descriptor_set).c_str(), report_data->FormatHandle(parent_pool).c_str(),
+                            report_data->FormatHandle(descriptor_pool).c_str());
+        }
+    } else {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT, object_handle,
+                        "VUID-vkFreeDescriptorSets-pDescriptorSets-00310", "Invalid %s.",
+                        report_data->FormatHandle(descriptor_set).c_str());
+    }
+    return skip;
+}
+
+bool ObjectLifetimes::ValidateDescriptorWrite(VkWriteDescriptorSet const *desc, bool isPush) const {
+    bool skip = false;
+
+    if (!isPush && desc->dstSet) {
+        skip |= ValidateObject(desc->dstSet, kVulkanObjectTypeDescriptorSet, false, "VUID-VkWriteDescriptorSet-dstSet-00320",
+                               "VUID-VkWriteDescriptorSet-commonparent");
+    }
+
+    if ((desc->descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER) ||
+        (desc->descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER)) {
+        for (uint32_t idx2 = 0; idx2 < desc->descriptorCount; ++idx2) {
+            skip |= ValidateObject(desc->pTexelBufferView[idx2], kVulkanObjectTypeBufferView, false,
+                                   "VUID-VkWriteDescriptorSet-descriptorType-00323", "VUID-VkWriteDescriptorSet-commonparent");
+        }
+    }
+
+    if ((desc->descriptorType == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER) ||
+        (desc->descriptorType == VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE) || (desc->descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_IMAGE) ||
+        (desc->descriptorType == VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT)) {
+        for (uint32_t idx3 = 0; idx3 < desc->descriptorCount; ++idx3) {
+            skip |= ValidateObject(desc->pImageInfo[idx3].imageView, kVulkanObjectTypeImageView, false,
+                                   "VUID-VkWriteDescriptorSet-descriptorType-00326", "VUID-VkDescriptorImageInfo-commonparent");
+        }
+    }
+
+    if ((desc->descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER) ||
+        (desc->descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER) ||
+        (desc->descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC) ||
+        (desc->descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC)) {
+        for (uint32_t idx4 = 0; idx4 < desc->descriptorCount; ++idx4) {
+            if (desc->pBufferInfo[idx4].buffer) {
+                skip |= ValidateObject(desc->pBufferInfo[idx4].buffer, kVulkanObjectTypeBuffer, false,
+                                       "VUID-VkDescriptorBufferInfo-buffer-parameter", kVUIDUndefined);
+            }
+        }
+    }
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateCmdPushDescriptorSetKHR(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint,
+                                                             VkPipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount,
+                                                             const VkWriteDescriptorSet *pDescriptorWrites) const {
+    bool skip = false;
+    skip |= ValidateObject(commandBuffer, kVulkanObjectTypeCommandBuffer, false,
+                           "VUID-vkCmdPushDescriptorSetKHR-commandBuffer-parameter", "VUID-vkCmdPushDescriptorSetKHR-commonparent");
+    skip |= ValidateObject(layout, kVulkanObjectTypePipelineLayout, false, "VUID-vkCmdPushDescriptorSetKHR-layout-parameter",
+                           "VUID-vkCmdPushDescriptorSetKHR-commonparent");
+    if (pDescriptorWrites) {
+        for (uint32_t index0 = 0; index0 < descriptorWriteCount; ++index0) {
+            skip |= ValidateDescriptorWrite(&pDescriptorWrites[index0], true);
+        }
+    }
+    return skip;
+}
+
+void ObjectLifetimes::CreateQueue(VkQueue vkObj) {
+    std::shared_ptr<ObjTrackState> p_obj_node = NULL;
+    auto queue_item = object_map[kVulkanObjectTypeQueue].find(HandleToUint64(vkObj));
+    if (queue_item == object_map[kVulkanObjectTypeQueue].end()) {
+        p_obj_node = std::make_shared<ObjTrackState>();
+        InsertObject(object_map[kVulkanObjectTypeQueue], HandleToUint64(vkObj), kVulkanObjectTypeQueue, p_obj_node);
+        num_objects[kVulkanObjectTypeQueue]++;
+        num_total_objects++;
+    } else {
+        p_obj_node = queue_item->second;
+    }
+    p_obj_node->object_type = kVulkanObjectTypeQueue;
+    p_obj_node->status = OBJSTATUS_NONE;
+    p_obj_node->handle = HandleToUint64(vkObj);
+}
+
+void ObjectLifetimes::CreateSwapchainImageObject(VkImage swapchain_image, VkSwapchainKHR swapchain) {
+    if (!swapchainImageMap.contains(HandleToUint64(swapchain_image))) {
+        auto pNewObjNode = std::make_shared<ObjTrackState>();
+        pNewObjNode->object_type = kVulkanObjectTypeImage;
+        pNewObjNode->status = OBJSTATUS_NONE;
+        pNewObjNode->handle = HandleToUint64(swapchain_image);
+        pNewObjNode->parent_object = HandleToUint64(swapchain);
+        InsertObject(swapchainImageMap, HandleToUint64(swapchain_image), kVulkanObjectTypeImage, pNewObjNode);
+    }
+}
+
+bool ObjectLifetimes::ReportLeakedInstanceObjects(VkInstance instance, VulkanObjectType object_type,
+                                                  const std::string &error_code) const {
+    bool skip = false;
+
+    auto snapshot = object_map[object_type].snapshot();
+    for (const auto &item : snapshot) {
+        const auto object_info = item.second;
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, get_debug_report_enum[object_type], object_info->handle,
+                        error_code, "OBJ ERROR : For %s, %s has not been destroyed.", report_data->FormatHandle(instance).c_str(),
+                        report_data->FormatHandle(ObjTrackStateTypedHandle(*object_info)).c_str());
+    }
+    return skip;
+}
+
+bool ObjectLifetimes::ReportLeakedDeviceObjects(VkDevice device, VulkanObjectType object_type,
+                                                const std::string &error_code) const {
+    bool skip = false;
+
+    auto snapshot = object_map[object_type].snapshot();
+    for (const auto &item : snapshot) {
+        const auto object_info = item.second;
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, get_debug_report_enum[object_type], object_info->handle,
+                        error_code, "OBJ ERROR : For %s, %s has not been destroyed.", report_data->FormatHandle(device).c_str(),
+                        report_data->FormatHandle(ObjTrackStateTypedHandle(*object_info)).c_str());
+    }
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateDestroyInstance(VkInstance instance, const VkAllocationCallbacks *pAllocator) const {
+    bool skip = false;
+
+    // We validate here for coverage, though we'd not have made it this for with a bad instance.
+    skip |= ValidateObject(instance, kVulkanObjectTypeInstance, true, "VUID-vkDestroyInstance-instance-parameter", kVUIDUndefined);
+
+    // Validate that child devices have been destroyed
+    auto snapshot = object_map[kVulkanObjectTypeDevice].snapshot();
+    for (const auto &iit : snapshot) {
+        auto pNode = iit.second;
+
+        VkDevice device = reinterpret_cast<VkDevice>(pNode->handle);
+        VkDebugReportObjectTypeEXT debug_object_type = get_debug_report_enum[pNode->object_type];
+
+        skip |=
+            log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, debug_object_type, pNode->handle, kVUID_ObjectTracker_ObjectLeak,
+                    "OBJ ERROR : %s object %s has not been destroyed.", string_VkDebugReportObjectTypeEXT(debug_object_type),
+                    report_data->FormatHandle(ObjTrackStateTypedHandle(*pNode)).c_str());
+
+        // Throw errors if any device objects belonging to this instance have not been destroyed
+        skip |= ReportUndestroyedDeviceObjects(device, "VUID-vkDestroyDevice-device-00378");
+
+        skip |= ValidateDestroyObject(device, kVulkanObjectTypeDevice, pAllocator, "VUID-vkDestroyInstance-instance-00630",
+                                      "VUID-vkDestroyInstance-instance-00631");
+    }
+
+    // Throw errors if any instance objects created on this instance have not been destroyed
+    ValidateDestroyObject(instance, kVulkanObjectTypeInstance, pAllocator, "VUID-vkDestroyInstance-instance-00630",
+                          "VUID-vkDestroyInstance-instance-00631");
+
+    // Report any remaining instance objects
+    skip |= ReportUndestroyedInstanceObjects(instance, "VUID-vkDestroyInstance-instance-00629");
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateEnumeratePhysicalDevices(VkInstance instance, uint32_t *pPhysicalDeviceCount,
+                                                              VkPhysicalDevice *pPhysicalDevices) const {
+    bool skip = ValidateObject(instance, kVulkanObjectTypeInstance, false, "VUID-vkEnumeratePhysicalDevices-instance-parameter",
+                               kVUIDUndefined);
+    return skip;
+}
+
+void ObjectLifetimes::PostCallRecordEnumeratePhysicalDevices(VkInstance instance, uint32_t *pPhysicalDeviceCount,
+                                                             VkPhysicalDevice *pPhysicalDevices, VkResult result) {
+    if ((result != VK_SUCCESS) && (result != VK_INCOMPLETE)) return;
+    if (pPhysicalDevices) {
+        for (uint32_t i = 0; i < *pPhysicalDeviceCount; i++) {
+            CreateObject(pPhysicalDevices[i], kVulkanObjectTypePhysicalDevice, nullptr);
+        }
+    }
+}
+
+void ObjectLifetimes::PreCallRecordDestroyInstance(VkInstance instance, const VkAllocationCallbacks *pAllocator) {
+    // Destroy physical devices
+    auto snapshot = object_map[kVulkanObjectTypePhysicalDevice].snapshot();
+    for (const auto &iit : snapshot) {
+        auto pNode = iit.second;
+        VkPhysicalDevice physical_device = reinterpret_cast<VkPhysicalDevice>(pNode->handle);
+        RecordDestroyObject(physical_device, kVulkanObjectTypePhysicalDevice);
+    }
+
+    // Destroy child devices
+    auto snapshot2 = object_map[kVulkanObjectTypeDevice].snapshot();
+    for (const auto &iit : snapshot2) {
+        auto pNode = iit.second;
+        VkDevice device = reinterpret_cast<VkDevice>(pNode->handle);
+        DestroyLeakedInstanceObjects();
+
+        RecordDestroyObject(device, kVulkanObjectTypeDevice);
+    }
+}
+
+void ObjectLifetimes::PostCallRecordDestroyInstance(VkInstance instance, const VkAllocationCallbacks *pAllocator) {
+    RecordDestroyObject(instance, kVulkanObjectTypeInstance);
+}
+
+bool ObjectLifetimes::PreCallValidateDestroyDevice(VkDevice device, const VkAllocationCallbacks *pAllocator) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, true, "VUID-vkDestroyDevice-device-parameter", kVUIDUndefined);
+    skip |= ValidateDestroyObject(device, kVulkanObjectTypeDevice, pAllocator, "VUID-vkDestroyDevice-device-00379",
+                                  "VUID-vkDestroyDevice-device-00380");
+    // Report any remaining objects associated with this VkDevice object in LL
+    skip |= ReportUndestroyedDeviceObjects(device, "VUID-vkDestroyDevice-device-00378");
+
+    return skip;
+}
+
+void ObjectLifetimes::PreCallRecordDestroyDevice(VkDevice device, const VkAllocationCallbacks *pAllocator) {
+    auto instance_data = GetLayerDataPtr(get_dispatch_key(physical_device), layer_data_map);
+    ValidationObject *validation_data = GetValidationObject(instance_data->object_dispatch, LayerObjectTypeObjectTracker);
+    ObjectLifetimes *object_lifetimes = static_cast<ObjectLifetimes *>(validation_data);
+    object_lifetimes->RecordDestroyObject(device, kVulkanObjectTypeDevice);
+    DestroyLeakedDeviceObjects();
+
+    // Clean up Queue's MemRef Linked Lists
+    DestroyQueueDataStructures();
+}
+
+bool ObjectLifetimes::PreCallValidateGetDeviceQueue(VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex,
+                                                    VkQueue *pQueue) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkGetDeviceQueue-device-parameter", kVUIDUndefined);
+    return skip;
+}
+
+void ObjectLifetimes::PostCallRecordGetDeviceQueue(VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex,
+                                                   VkQueue *pQueue) {
+    auto lock = write_shared_lock();
+    CreateQueue(*pQueue);
+}
+
+bool ObjectLifetimes::PreCallValidateGetDeviceQueue2(VkDevice device, const VkDeviceQueueInfo2 *pQueueInfo, VkQueue *pQueue) const {
+    return ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkGetDeviceQueue2-device-parameter", kVUIDUndefined);
+}
+
+void ObjectLifetimes::PostCallRecordGetDeviceQueue2(VkDevice device, const VkDeviceQueueInfo2 *pQueueInfo, VkQueue *pQueue) {
+    auto lock = write_shared_lock();
+    CreateQueue(*pQueue);
+}
+
+bool ObjectLifetimes::PreCallValidateUpdateDescriptorSets(VkDevice device, uint32_t descriptorWriteCount,
+                                                          const VkWriteDescriptorSet *pDescriptorWrites,
+                                                          uint32_t descriptorCopyCount,
+                                                          const VkCopyDescriptorSet *pDescriptorCopies) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkUpdateDescriptorSets-device-parameter", kVUIDUndefined);
+    if (pDescriptorCopies) {
+        for (uint32_t idx0 = 0; idx0 < descriptorCopyCount; ++idx0) {
+            if (pDescriptorCopies[idx0].dstSet) {
+                skip |= ValidateObject(pDescriptorCopies[idx0].dstSet, kVulkanObjectTypeDescriptorSet, false,
+                                       "VUID-VkCopyDescriptorSet-dstSet-parameter", "VUID-VkCopyDescriptorSet-commonparent");
+            }
+            if (pDescriptorCopies[idx0].srcSet) {
+                skip |= ValidateObject(pDescriptorCopies[idx0].srcSet, kVulkanObjectTypeDescriptorSet, false,
+                                       "VUID-VkCopyDescriptorSet-srcSet-parameter", "VUID-VkCopyDescriptorSet-commonparent");
+            }
+        }
+    }
+    if (pDescriptorWrites) {
+        for (uint32_t idx1 = 0; idx1 < descriptorWriteCount; ++idx1) {
+            skip |= ValidateDescriptorWrite(&pDescriptorWrites[idx1], false);
+        }
+    }
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateResetDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool,
+                                                         VkDescriptorPoolResetFlags flags) const {
+    bool skip = false;
+    auto lock = read_shared_lock();
+
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkResetDescriptorPool-device-parameter", kVUIDUndefined);
+    skip |=
+        ValidateObject(descriptorPool, kVulkanObjectTypeDescriptorPool, false,
+                       "VUID-vkResetDescriptorPool-descriptorPool-parameter", "VUID-vkResetDescriptorPool-descriptorPool-parent");
+
+    auto itr = object_map[kVulkanObjectTypeDescriptorPool].find(HandleToUint64(descriptorPool));
+    if (itr != object_map[kVulkanObjectTypeDescriptorPool].end()) {
+        auto pPoolNode = itr->second;
+        for (auto set : *pPoolNode->child_objects) {
+            skip |= ValidateDestroyObject((VkDescriptorSet)set, kVulkanObjectTypeDescriptorSet, nullptr, kVUIDUndefined,
+                                          kVUIDUndefined);
+        }
+    }
+    return skip;
+}
+
+void ObjectLifetimes::PreCallRecordResetDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool,
+                                                       VkDescriptorPoolResetFlags flags) {
+    auto lock = write_shared_lock();
+    // A DescriptorPool's descriptor sets are implicitly deleted when the pool is reset. Remove this pool's descriptor sets from
+    // our descriptorSet map.
+    auto itr = object_map[kVulkanObjectTypeDescriptorPool].find(HandleToUint64(descriptorPool));
+    if (itr != object_map[kVulkanObjectTypeDescriptorPool].end()) {
+        auto pPoolNode = itr->second;
+        for (auto set : *pPoolNode->child_objects) {
+            RecordDestroyObject((VkDescriptorSet)set, kVulkanObjectTypeDescriptorSet);
+        }
+        pPoolNode->child_objects->clear();
+    }
+}
+
+bool ObjectLifetimes::PreCallValidateBeginCommandBuffer(VkCommandBuffer command_buffer,
+                                                        const VkCommandBufferBeginInfo *begin_info) const {
+    bool skip = false;
+    skip |= ValidateObject(command_buffer, kVulkanObjectTypeCommandBuffer, false,
+                           "VUID-vkBeginCommandBuffer-commandBuffer-parameter", kVUIDUndefined);
+    if (begin_info) {
+        auto iter = object_map[kVulkanObjectTypeCommandBuffer].find(HandleToUint64(command_buffer));
+        if (iter != object_map[kVulkanObjectTypeCommandBuffer].end()) {
+            auto pNode = iter->second;
+            if ((begin_info->pInheritanceInfo) && (pNode->status & OBJSTATUS_COMMAND_BUFFER_SECONDARY) &&
+                (begin_info->flags & VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT)) {
+                skip |=
+                    ValidateObject(begin_info->pInheritanceInfo->framebuffer, kVulkanObjectTypeFramebuffer, true,
+                                   "VUID-VkCommandBufferBeginInfo-flags-00055", "VUID-VkCommandBufferInheritanceInfo-commonparent");
+                skip |=
+                    ValidateObject(begin_info->pInheritanceInfo->renderPass, kVulkanObjectTypeRenderPass, false,
+                                   "VUID-VkCommandBufferBeginInfo-flags-00053", "VUID-VkCommandBufferInheritanceInfo-commonparent");
+            }
+        }
+    }
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateGetSwapchainImagesKHR(VkDevice device, VkSwapchainKHR swapchain,
+                                                           uint32_t *pSwapchainImageCount, VkImage *pSwapchainImages) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkGetSwapchainImagesKHR-device-parameter",
+                           "VUID-vkGetSwapchainImagesKHR-commonparent");
+    skip |= ValidateObject(swapchain, kVulkanObjectTypeSwapchainKHR, false, "VUID-vkGetSwapchainImagesKHR-swapchain-parameter",
+                           "VUID-vkGetSwapchainImagesKHR-commonparent");
+    return skip;
+}
+
+void ObjectLifetimes::PostCallRecordGetSwapchainImagesKHR(VkDevice device, VkSwapchainKHR swapchain, uint32_t *pSwapchainImageCount,
+                                                          VkImage *pSwapchainImages, VkResult result) {
+    if ((result != VK_SUCCESS) && (result != VK_INCOMPLETE)) return;
+    auto lock = write_shared_lock();
+    if (pSwapchainImages != NULL) {
+        for (uint32_t i = 0; i < *pSwapchainImageCount; i++) {
+            CreateSwapchainImageObject(pSwapchainImages[i], swapchain);
+        }
+    }
+}
+
+bool ObjectLifetimes::PreCallValidateCreateDescriptorSetLayout(VkDevice device, const VkDescriptorSetLayoutCreateInfo *pCreateInfo,
+                                                               const VkAllocationCallbacks *pAllocator,
+                                                               VkDescriptorSetLayout *pSetLayout) const {
+    bool skip = false;
+    skip |=
+        ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkCreateDescriptorSetLayout-device-parameter", kVUIDUndefined);
+    if (pCreateInfo) {
+        if (pCreateInfo->pBindings) {
+            for (uint32_t binding_index = 0; binding_index < pCreateInfo->bindingCount; ++binding_index) {
+                const VkDescriptorSetLayoutBinding &binding = pCreateInfo->pBindings[binding_index];
+                const bool is_sampler_type = binding.descriptorType == VK_DESCRIPTOR_TYPE_SAMPLER ||
+                                             binding.descriptorType == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
+                if (binding.pImmutableSamplers && is_sampler_type) {
+                    for (uint32_t index2 = 0; index2 < binding.descriptorCount; ++index2) {
+                        const VkSampler sampler = binding.pImmutableSamplers[index2];
+                        skip |= ValidateObject(sampler, kVulkanObjectTypeSampler, false,
+                                               "VUID-VkDescriptorSetLayoutBinding-descriptorType-00282", kVUIDUndefined);
+                    }
+                }
+            }
+        }
+    }
+    return skip;
+}
+
+void ObjectLifetimes::PostCallRecordCreateDescriptorSetLayout(VkDevice device, const VkDescriptorSetLayoutCreateInfo *pCreateInfo,
+                                                              const VkAllocationCallbacks *pAllocator,
+                                                              VkDescriptorSetLayout *pSetLayout, VkResult result) {
+    if (result != VK_SUCCESS) return;
+    CreateObject(*pSetLayout, kVulkanObjectTypeDescriptorSetLayout, pAllocator);
+}
+
+bool ObjectLifetimes::ValidateSamplerObjects(const VkDescriptorSetLayoutCreateInfo *pCreateInfo) const {
+    bool skip = false;
+    if (pCreateInfo->pBindings) {
+        for (uint32_t index1 = 0; index1 < pCreateInfo->bindingCount; ++index1) {
+            for (uint32_t index2 = 0; index2 < pCreateInfo->pBindings[index1].descriptorCount; ++index2) {
+                if (pCreateInfo->pBindings[index1].pImmutableSamplers) {
+                    skip |= ValidateObject(pCreateInfo->pBindings[index1].pImmutableSamplers[index2], kVulkanObjectTypeSampler,
+                                           true, "VUID-VkDescriptorSetLayoutBinding-descriptorType-00282", kVUIDUndefined);
+                }
+            }
+        }
+    }
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateGetDescriptorSetLayoutSupport(VkDevice device,
+                                                                   const VkDescriptorSetLayoutCreateInfo *pCreateInfo,
+                                                                   VkDescriptorSetLayoutSupport *pSupport) const {
+    bool skip = ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkGetDescriptorSetLayoutSupport-device-parameter",
+                               kVUIDUndefined);
+    if (pCreateInfo) {
+        skip |= ValidateSamplerObjects(pCreateInfo);
+    }
+    return skip;
+}
+bool ObjectLifetimes::PreCallValidateGetDescriptorSetLayoutSupportKHR(VkDevice device,
+                                                                      const VkDescriptorSetLayoutCreateInfo *pCreateInfo,
+                                                                      VkDescriptorSetLayoutSupport *pSupport) const {
+    bool skip = ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkGetDescriptorSetLayoutSupportKHR-device-parameter",
+                               kVUIDUndefined);
+    if (pCreateInfo) {
+        skip |= ValidateSamplerObjects(pCreateInfo);
+    }
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateGetPhysicalDeviceQueueFamilyProperties(VkPhysicalDevice physicalDevice,
+                                                                            uint32_t *pQueueFamilyPropertyCount,
+                                                                            VkQueueFamilyProperties *pQueueFamilyProperties) const {
+    return ValidateObject(physicalDevice, kVulkanObjectTypePhysicalDevice, false,
+                          "VUID-vkGetPhysicalDeviceQueueFamilyProperties-physicalDevice-parameter", kVUIDUndefined);
+}
+
+void ObjectLifetimes::PostCallRecordGetPhysicalDeviceQueueFamilyProperties(VkPhysicalDevice physicalDevice,
+                                                                           uint32_t *pQueueFamilyPropertyCount,
+                                                                           VkQueueFamilyProperties *pQueueFamilyProperties) {}
+
+void ObjectLifetimes::PostCallRecordCreateInstance(const VkInstanceCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator,
+                                                   VkInstance *pInstance, VkResult result) {
+    if (result != VK_SUCCESS) return;
+    CreateObject(*pInstance, kVulkanObjectTypeInstance, pAllocator);
+}
+
+bool ObjectLifetimes::PreCallValidateAllocateCommandBuffers(VkDevice device, const VkCommandBufferAllocateInfo *pAllocateInfo,
+                                                            VkCommandBuffer *pCommandBuffers) const {
+    bool skip = false;
+    skip |=
+        ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkAllocateCommandBuffers-device-parameter", kVUIDUndefined);
+    skip |= ValidateObject(pAllocateInfo->commandPool, kVulkanObjectTypeCommandPool, false,
+                           "VUID-VkCommandBufferAllocateInfo-commandPool-parameter", kVUIDUndefined);
+    return skip;
+}
+
+void ObjectLifetimes::PostCallRecordAllocateCommandBuffers(VkDevice device, const VkCommandBufferAllocateInfo *pAllocateInfo,
+                                                           VkCommandBuffer *pCommandBuffers, VkResult result) {
+    if (result != VK_SUCCESS) return;
+    for (uint32_t i = 0; i < pAllocateInfo->commandBufferCount; i++) {
+        AllocateCommandBuffer(pAllocateInfo->commandPool, pCommandBuffers[i], pAllocateInfo->level);
+    }
+}
+
+bool ObjectLifetimes::PreCallValidateAllocateDescriptorSets(VkDevice device, const VkDescriptorSetAllocateInfo *pAllocateInfo,
+                                                            VkDescriptorSet *pDescriptorSets) const {
+    bool skip = false;
+    auto lock = read_shared_lock();
+    skip |=
+        ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkAllocateDescriptorSets-device-parameter", kVUIDUndefined);
+    skip |= ValidateObject(pAllocateInfo->descriptorPool, kVulkanObjectTypeDescriptorPool, false,
+                           "VUID-VkDescriptorSetAllocateInfo-descriptorPool-parameter",
+                           "VUID-VkDescriptorSetAllocateInfo-commonparent");
+    for (uint32_t i = 0; i < pAllocateInfo->descriptorSetCount; i++) {
+        skip |= ValidateObject(pAllocateInfo->pSetLayouts[i], kVulkanObjectTypeDescriptorSetLayout, false,
+                               "VUID-VkDescriptorSetAllocateInfo-pSetLayouts-parameter",
+                               "VUID-VkDescriptorSetAllocateInfo-commonparent");
+    }
+    return skip;
+}
+
+void ObjectLifetimes::PostCallRecordAllocateDescriptorSets(VkDevice device, const VkDescriptorSetAllocateInfo *pAllocateInfo,
+                                                           VkDescriptorSet *pDescriptorSets, VkResult result) {
+    if (result != VK_SUCCESS) return;
+    auto lock = write_shared_lock();
+    for (uint32_t i = 0; i < pAllocateInfo->descriptorSetCount; i++) {
+        AllocateDescriptorSet(pAllocateInfo->descriptorPool, pDescriptorSets[i]);
+    }
+}
+
+bool ObjectLifetimes::PreCallValidateFreeCommandBuffers(VkDevice device, VkCommandPool commandPool, uint32_t commandBufferCount,
+                                                        const VkCommandBuffer *pCommandBuffers) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkFreeCommandBuffers-device-parameter", kVUIDUndefined);
+    skip |= ValidateObject(commandPool, kVulkanObjectTypeCommandPool, false, "VUID-vkFreeCommandBuffers-commandPool-parameter",
+                           "VUID-vkFreeCommandBuffers-commandPool-parent");
+    for (uint32_t i = 0; i < commandBufferCount; i++) {
+        if (pCommandBuffers[i] != VK_NULL_HANDLE) {
+            skip |= ValidateCommandBuffer(commandPool, pCommandBuffers[i]);
+            skip |=
+                ValidateDestroyObject(pCommandBuffers[i], kVulkanObjectTypeCommandBuffer, nullptr, kVUIDUndefined, kVUIDUndefined);
+        }
+    }
+    return skip;
+}
+
+void ObjectLifetimes::PreCallRecordFreeCommandBuffers(VkDevice device, VkCommandPool commandPool, uint32_t commandBufferCount,
+                                                      const VkCommandBuffer *pCommandBuffers) {
+    for (uint32_t i = 0; i < commandBufferCount; i++) {
+        RecordDestroyObject(pCommandBuffers[i], kVulkanObjectTypeCommandBuffer);
+    }
+}
+
+bool ObjectLifetimes::PreCallValidateDestroySwapchainKHR(VkDevice device, VkSwapchainKHR swapchain,
+                                                         const VkAllocationCallbacks *pAllocator) const {
+    return ValidateDestroyObject(swapchain, kVulkanObjectTypeSwapchainKHR, pAllocator, "VUID-vkDestroySwapchainKHR-swapchain-01283",
+                                 "VUID-vkDestroySwapchainKHR-swapchain-01284");
+}
+
+void ObjectLifetimes::PreCallRecordDestroySwapchainKHR(VkDevice device, VkSwapchainKHR swapchain,
+                                                       const VkAllocationCallbacks *pAllocator) {
+    RecordDestroyObject(swapchain, kVulkanObjectTypeSwapchainKHR);
+
+    auto snapshot = swapchainImageMap.snapshot(
+        [swapchain](std::shared_ptr<ObjTrackState> pNode) { return pNode->parent_object == HandleToUint64(swapchain); });
+    for (const auto &itr : snapshot) {
+        swapchainImageMap.erase(itr.first);
+    }
+}
+
+bool ObjectLifetimes::PreCallValidateFreeDescriptorSets(VkDevice device, VkDescriptorPool descriptorPool,
+                                                        uint32_t descriptorSetCount, const VkDescriptorSet *pDescriptorSets) const {
+    auto lock = read_shared_lock();
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkFreeDescriptorSets-device-parameter", kVUIDUndefined);
+    skip |= ValidateObject(descriptorPool, kVulkanObjectTypeDescriptorPool, false,
+                           "VUID-vkFreeDescriptorSets-descriptorPool-parameter", "VUID-vkFreeDescriptorSets-descriptorPool-parent");
+    for (uint32_t i = 0; i < descriptorSetCount; i++) {
+        if (pDescriptorSets[i] != VK_NULL_HANDLE) {
+            skip |= ValidateDescriptorSet(descriptorPool, pDescriptorSets[i]);
+            skip |=
+                ValidateDestroyObject(pDescriptorSets[i], kVulkanObjectTypeDescriptorSet, nullptr, kVUIDUndefined, kVUIDUndefined);
+        }
+    }
+    return skip;
+}
+void ObjectLifetimes::PreCallRecordFreeDescriptorSets(VkDevice device, VkDescriptorPool descriptorPool, uint32_t descriptorSetCount,
+                                                      const VkDescriptorSet *pDescriptorSets) {
+    auto lock = write_shared_lock();
+    std::shared_ptr<ObjTrackState> pPoolNode = nullptr;
+    auto itr = object_map[kVulkanObjectTypeDescriptorPool].find(HandleToUint64(descriptorPool));
+    if (itr != object_map[kVulkanObjectTypeDescriptorPool].end()) {
+        pPoolNode = itr->second;
+    }
+    for (uint32_t i = 0; i < descriptorSetCount; i++) {
+        RecordDestroyObject(pDescriptorSets[i], kVulkanObjectTypeDescriptorSet);
+        if (pPoolNode) {
+            pPoolNode->child_objects->erase(HandleToUint64(pDescriptorSets[i]));
+        }
+    }
+}
+
+bool ObjectLifetimes::PreCallValidateDestroyDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool,
+                                                           const VkAllocationCallbacks *pAllocator) const {
+    auto lock = read_shared_lock();
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkDestroyDescriptorPool-device-parameter", kVUIDUndefined);
+    skip |= ValidateObject(descriptorPool, kVulkanObjectTypeDescriptorPool, true,
+                           "VUID-vkDestroyDescriptorPool-descriptorPool-parameter",
+                           "VUID-vkDestroyDescriptorPool-descriptorPool-parent");
+
+    auto itr = object_map[kVulkanObjectTypeDescriptorPool].find(HandleToUint64(descriptorPool));
+    if (itr != object_map[kVulkanObjectTypeDescriptorPool].end()) {
+        auto pPoolNode = itr->second;
+        for (auto set : *pPoolNode->child_objects) {
+            skip |= ValidateDestroyObject((VkDescriptorSet)set, kVulkanObjectTypeDescriptorSet, nullptr, kVUIDUndefined,
+                                          kVUIDUndefined);
+        }
+    }
+    skip |= ValidateDestroyObject(descriptorPool, kVulkanObjectTypeDescriptorPool, pAllocator,
+                                  "VUID-vkDestroyDescriptorPool-descriptorPool-00304",
+                                  "VUID-vkDestroyDescriptorPool-descriptorPool-00305");
+    return skip;
+}
+void ObjectLifetimes::PreCallRecordDestroyDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool,
+                                                         const VkAllocationCallbacks *pAllocator) {
+    auto lock = write_shared_lock();
+    auto itr = object_map[kVulkanObjectTypeDescriptorPool].find(HandleToUint64(descriptorPool));
+    if (itr != object_map[kVulkanObjectTypeDescriptorPool].end()) {
+        auto pPoolNode = itr->second;
+        for (auto set : *pPoolNode->child_objects) {
+            RecordDestroyObject((VkDescriptorSet)set, kVulkanObjectTypeDescriptorSet);
+        }
+        pPoolNode->child_objects->clear();
+    }
+    RecordDestroyObject(descriptorPool, kVulkanObjectTypeDescriptorPool);
+}
+
+bool ObjectLifetimes::PreCallValidateDestroyCommandPool(VkDevice device, VkCommandPool commandPool,
+                                                        const VkAllocationCallbacks *pAllocator) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkDestroyCommandPool-device-parameter", kVUIDUndefined);
+    skip |= ValidateObject(commandPool, kVulkanObjectTypeCommandPool, true, "VUID-vkDestroyCommandPool-commandPool-parameter",
+                           "VUID-vkDestroyCommandPool-commandPool-parent");
+
+    auto snapshot = object_map[kVulkanObjectTypeCommandBuffer].snapshot(
+        [commandPool](std::shared_ptr<ObjTrackState> pNode) { return pNode->parent_object == HandleToUint64(commandPool); });
+    for (const auto &itr : snapshot) {
+        auto pNode = itr.second;
+        skip |= ValidateCommandBuffer(commandPool, reinterpret_cast<VkCommandBuffer>(itr.first));
+        skip |= ValidateDestroyObject(reinterpret_cast<VkCommandBuffer>(itr.first), kVulkanObjectTypeCommandBuffer, nullptr,
+                                      kVUIDUndefined, kVUIDUndefined);
+    }
+    skip |= ValidateDestroyObject(commandPool, kVulkanObjectTypeCommandPool, pAllocator,
+                                  "VUID-vkDestroyCommandPool-commandPool-00042", "VUID-vkDestroyCommandPool-commandPool-00043");
+    return skip;
+}
+
+void ObjectLifetimes::PreCallRecordDestroyCommandPool(VkDevice device, VkCommandPool commandPool,
+                                                      const VkAllocationCallbacks *pAllocator) {
+    auto snapshot = object_map[kVulkanObjectTypeCommandBuffer].snapshot(
+        [commandPool](std::shared_ptr<ObjTrackState> pNode) { return pNode->parent_object == HandleToUint64(commandPool); });
+    // A CommandPool's cmd buffers are implicitly deleted when pool is deleted. Remove this pool's cmdBuffers from cmd buffer map.
+    for (const auto &itr : snapshot) {
+        RecordDestroyObject(reinterpret_cast<VkCommandBuffer>(itr.first), kVulkanObjectTypeCommandBuffer);
+    }
+    RecordDestroyObject(commandPool, kVulkanObjectTypeCommandPool);
+}
+
+bool ObjectLifetimes::PreCallValidateGetPhysicalDeviceQueueFamilyProperties2(
+    VkPhysicalDevice physicalDevice, uint32_t *pQueueFamilyPropertyCount,
+    VkQueueFamilyProperties2KHR *pQueueFamilyProperties) const {
+    return ValidateObject(physicalDevice, kVulkanObjectTypePhysicalDevice, false,
+                          "VUID-vkGetPhysicalDeviceQueueFamilyProperties2-physicalDevice-parameter", kVUIDUndefined);
+}
+
+bool ObjectLifetimes::PreCallValidateGetPhysicalDeviceQueueFamilyProperties2KHR(
+    VkPhysicalDevice physicalDevice, uint32_t *pQueueFamilyPropertyCount, VkQueueFamilyProperties2 *pQueueFamilyProperties) const {
+    return ValidateObject(physicalDevice, kVulkanObjectTypePhysicalDevice, false,
+                          "VUID-vkGetPhysicalDeviceQueueFamilyProperties2-physicalDevice-parameter", kVUIDUndefined);
+}
+
+void ObjectLifetimes::PostCallRecordGetPhysicalDeviceQueueFamilyProperties2(VkPhysicalDevice physicalDevice,
+                                                                            uint32_t *pQueueFamilyPropertyCount,
+                                                                            VkQueueFamilyProperties2KHR *pQueueFamilyProperties) {}
+
+void ObjectLifetimes::PostCallRecordGetPhysicalDeviceQueueFamilyProperties2KHR(
+    VkPhysicalDevice physicalDevice, uint32_t *pQueueFamilyPropertyCount, VkQueueFamilyProperties2KHR *pQueueFamilyProperties) {}
+
+bool ObjectLifetimes::PreCallValidateGetPhysicalDeviceDisplayPropertiesKHR(VkPhysicalDevice physicalDevice,
+                                                                           uint32_t *pPropertyCount,
+                                                                           VkDisplayPropertiesKHR *pProperties) const {
+    return ValidateObject(physicalDevice, kVulkanObjectTypePhysicalDevice, false,
+                          "VUID-vkGetPhysicalDeviceDisplayPropertiesKHR-physicalDevice-parameter", kVUIDUndefined);
+}
+
+void ObjectLifetimes::PostCallRecordGetPhysicalDeviceDisplayPropertiesKHR(VkPhysicalDevice physicalDevice, uint32_t *pPropertyCount,
+                                                                          VkDisplayPropertiesKHR *pProperties, VkResult result) {
+    if ((result != VK_SUCCESS) && (result != VK_INCOMPLETE)) return;
+    if (pProperties) {
+        for (uint32_t i = 0; i < *pPropertyCount; ++i) {
+            CreateObject(pProperties[i].display, kVulkanObjectTypeDisplayKHR, nullptr);
+        }
+    }
+}
+
+bool ObjectLifetimes::PreCallValidateGetDisplayModePropertiesKHR(VkPhysicalDevice physicalDevice, VkDisplayKHR display,
+                                                                 uint32_t *pPropertyCount,
+                                                                 VkDisplayModePropertiesKHR *pProperties) const {
+    bool skip = false;
+    skip |= ValidateObject(physicalDevice, kVulkanObjectTypePhysicalDevice, false,
+                           "VUID-vkGetDisplayModePropertiesKHR-physicalDevice-parameter", kVUIDUndefined);
+    skip |= ValidateObject(display, kVulkanObjectTypeDisplayKHR, false, "VUID-vkGetDisplayModePropertiesKHR-display-parameter",
+                           kVUIDUndefined);
+
+    return skip;
+}
+
+void ObjectLifetimes::PostCallRecordGetDisplayModePropertiesKHR(VkPhysicalDevice physicalDevice, VkDisplayKHR display,
+                                                                uint32_t *pPropertyCount, VkDisplayModePropertiesKHR *pProperties,
+                                                                VkResult result) {
+    if ((result != VK_SUCCESS) && (result != VK_INCOMPLETE)) return;
+    if (pProperties) {
+        for (uint32_t i = 0; i < *pPropertyCount; ++i) {
+            CreateObject(pProperties[i].displayMode, kVulkanObjectTypeDisplayModeKHR, nullptr);
+        }
+    }
+}
+
+bool ObjectLifetimes::PreCallValidateGetPhysicalDeviceDisplayProperties2KHR(VkPhysicalDevice physicalDevice,
+                                                                            uint32_t *pPropertyCount,
+                                                                            VkDisplayProperties2KHR *pProperties) const {
+    return ValidateObject(physicalDevice, kVulkanObjectTypePhysicalDevice, false,
+                          "VUID-vkGetPhysicalDeviceDisplayProperties2KHR-physicalDevice-parameter", kVUIDUndefined);
+}
+
+void ObjectLifetimes::PostCallRecordGetPhysicalDeviceDisplayProperties2KHR(VkPhysicalDevice physicalDevice,
+                                                                           uint32_t *pPropertyCount,
+                                                                           VkDisplayProperties2KHR *pProperties, VkResult result) {
+    if ((result != VK_SUCCESS) && (result != VK_INCOMPLETE)) return;
+    for (uint32_t index = 0; index < *pPropertyCount; ++index) {
+        CreateObject(pProperties[index].displayProperties.display, kVulkanObjectTypeDisplayKHR, nullptr);
+    }
+}
+
+bool ObjectLifetimes::PreCallValidateGetDisplayModeProperties2KHR(VkPhysicalDevice physicalDevice, VkDisplayKHR display,
+                                                                  uint32_t *pPropertyCount,
+                                                                  VkDisplayModeProperties2KHR *pProperties) const {
+    bool skip = false;
+    skip |= ValidateObject(physicalDevice, kVulkanObjectTypePhysicalDevice, false,
+                           "VUID-vkGetDisplayModeProperties2KHR-physicalDevice-parameter", kVUIDUndefined);
+    skip |= ValidateObject(display, kVulkanObjectTypeDisplayKHR, false, "VUID-vkGetDisplayModeProperties2KHR-display-parameter",
+                           kVUIDUndefined);
+
+    return skip;
+}
+
+void ObjectLifetimes::PostCallRecordGetDisplayModeProperties2KHR(VkPhysicalDevice physicalDevice, VkDisplayKHR display,
+                                                                 uint32_t *pPropertyCount, VkDisplayModeProperties2KHR *pProperties,
+                                                                 VkResult result) {
+    if ((result != VK_SUCCESS) && (result != VK_INCOMPLETE)) return;
+    for (uint32_t index = 0; index < *pPropertyCount; ++index) {
+        CreateObject(pProperties[index].displayModeProperties.displayMode, kVulkanObjectTypeDisplayModeKHR, nullptr);
+    }
+}
+
+bool ObjectLifetimes::PreCallValidateAcquirePerformanceConfigurationINTEL(
+    VkDevice device, const VkPerformanceConfigurationAcquireInfoINTEL *pAcquireInfo,
+    VkPerformanceConfigurationINTEL *pConfiguration) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkAcquirePerformanceConfigurationINTEL-device-parameter",
+                           kVUIDUndefined);
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateReleasePerformanceConfigurationINTEL(VkDevice device,
+                                                                          VkPerformanceConfigurationINTEL configuration) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkReleasePerformanceConfigurationINTEL-device-parameter",
+                           kVUIDUndefined);
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateQueueSetPerformanceConfigurationINTEL(VkQueue queue,
+                                                                           VkPerformanceConfigurationINTEL configuration) const {
+    bool skip = false;
+    skip |= ValidateObject(queue, kVulkanObjectTypeQueue, false, "VUID-vkQueueSetPerformanceConfigurationINTEL-queue-parameter",
+                           "VUID-vkQueueSetPerformanceConfigurationINTEL-commonparent");
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateCreateFramebuffer(VkDevice device, const VkFramebufferCreateInfo *pCreateInfo,
+                                                       const VkAllocationCallbacks *pAllocator, VkFramebuffer *pFramebuffer) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkCreateFramebuffer-device-parameter", kVUIDUndefined);
+    if (pCreateInfo) {
+        skip |= ValidateObject(pCreateInfo->renderPass, kVulkanObjectTypeRenderPass, false,
+                               "VUID-VkFramebufferCreateInfo-renderPass-parameter", "VUID-VkFramebufferCreateInfo-commonparent");
+        if ((pCreateInfo->flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR) == 0) {
+            for (uint32_t index1 = 0; index1 < pCreateInfo->attachmentCount; ++index1) {
+                skip |= ValidateObject(pCreateInfo->pAttachments[index1], kVulkanObjectTypeImageView, true, kVUIDUndefined,
+                                       "VUID-VkFramebufferCreateInfo-commonparent");
+            }
+        }
+    }
+
+    return skip;
+}
+
+void ObjectLifetimes::PostCallRecordCreateFramebuffer(VkDevice device, const VkFramebufferCreateInfo *pCreateInfo,
+                                                      const VkAllocationCallbacks *pAllocator, VkFramebuffer *pFramebuffer,
+                                                      VkResult result) {
+    if (result != VK_SUCCESS) return;
+    CreateObject(*pFramebuffer, kVulkanObjectTypeFramebuffer, pAllocator);
+}
+
+bool ObjectLifetimes::PreCallValidateSetDebugUtilsObjectNameEXT(VkDevice device,
+                                                                const VkDebugUtilsObjectNameInfoEXT *pNameInfo) const {
+    bool skip = false;
+    skip |= ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkSetDebugUtilsObjectNameEXT-device-parameter",
+                           kVUIDUndefined);
+    skip |= ValidateAnonymousObject(pNameInfo->objectHandle, pNameInfo->objectType, false,
+                                    "VUID-VkDebugUtilsObjectNameInfoEXT-objectType-02590", kVUIDUndefined);
+
+    return skip;
+}
+
+bool ObjectLifetimes::PreCallValidateSetDebugUtilsObjectTagEXT(VkDevice device,
+                                                               const VkDebugUtilsObjectTagInfoEXT *pTagInfo) const {
+    bool skip = false;
+    skip |=
+        ValidateObject(device, kVulkanObjectTypeDevice, false, "VUID-vkSetDebugUtilsObjectTagEXT-device-parameter", kVUIDUndefined);
+    skip |= ValidateAnonymousObject(pTagInfo->objectHandle, pTagInfo->objectType, false,
+                                    "VUID-VkDebugUtilsObjectTagInfoEXT-objectHandle-01910", kVUIDUndefined);
+
+    return skip;
+}
diff --git a/src/third_party/vulkan-validation-layers/src/layers/parameter_name.h b/src/third_party/vulkan-validation-layers/src/layers/parameter_name.h
new file mode 100644
index 0000000..42320bb
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/layers/parameter_name.h
@@ -0,0 +1,130 @@
+/* Copyright (c) 2016-2019 The Khronos Group Inc.
+ * Copyright (c) 2016-2019 Valve Corporation
+ * Copyright (c) 2016-2019 LunarG, Inc.
+ * Copyright (c) 2016-2019 Google Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#ifndef PARAMETER_NAME_H
+#define PARAMETER_NAME_H
+
+#include <cassert>
+#include <sstream>
+#include <string>
+#include <vector>
+
+/**
+ * Parameter name string supporting deferred formatting for array subscripts.
+ *
+ * Custom parameter name class with support for deferred formatting of names containing array subscripts.  The class stores
+ * a format string and a pointer to an array of index values, and performs string formatting when an accessor function is called to
+ * retrieve the name string.  This class was primarily designed to be used with validation functions that receive a parameter name
+ * string and value as arguments, and print an error message that includes the parameter name when the value fails a validation
+ * test.  Using standard strings with these validation functions requires that parameter names containing array subscripts be
+ * formatted before each validation function is called, performing the string formatting even when the value passes validation
+ * and the string is not used:
+ *         sprintf(name, "pCreateInfo[%d].sType", i);
+ *         validate_stype(name, pCreateInfo[i].sType);
+ *
+ * With the ParameterName class, a format string and a pointer to an array of format values are stored by the ParameterName object
+ * that is provided to the validation function.  String formatting is then performed only when the validation function retrieves the
+ * name string from the ParameterName object:
+ *         validate_stype(ParameterName("pCreateInfo[%i].sType", IndexVector{ i }), pCreateInfo[i].sType);
+ *
+ * Since the IndexVector is not copied into the object, the lifetime of the ParameterName should not outlast the lifetime of
+ * the IndexVector, but that's fine given how it is used in parameter validation.
+ */
+class ParameterName {
+  public:
+    /// Container for index values to be used with parameter name string formatting.
+    typedef std::initializer_list<size_t> IndexVector;
+
+    /// Format specifier for the parameter name string, to be replaced by an index value.  The parameter name string must contain
+    /// one format specifier for each index value specified.
+    const char *const IndexFormatSpecifier = "%i";
+
+  public:
+    /**
+     * Construct a ParameterName object from a string literal, without formatting.
+     *
+     * @param source Paramater name string without format specifiers.
+     *
+     * @pre The source string must not contain the %i format specifier.
+     */
+    ParameterName(const char *source) : source_(source), args_(nullptr), num_indices_(0) { assert(IsValid()); }
+
+    /**
+     * Construct a ParameterName object from a string literal, with formatting.
+     *
+     * @param source Paramater name string with format specifiers.
+     * @param args Array index values to be used for formatting.
+     *
+     * @pre The number of %i format specifiers contained by the source string must match the number of elements contained
+     *      by the index vector.
+     */
+    ParameterName(const char *source, const IndexVector &args)
+        : source_(source), args_(args.size() ? args.begin() : (const size_t *)nullptr), num_indices_(args.size()) {
+        assert(IsValid());
+    }
+
+    /// Retrive the formatted name string.
+    std::string get_name() const { return (num_indices_ == 0) ? std::string(source_) : Format(); }
+
+  private:
+    /// Replace the %i format specifiers in the source string with the values from the index vector.
+    std::string Format() const {
+        std::string::size_type current = 0;
+        std::string::size_type last = 0;
+        std::stringstream format;
+
+        std::string source(source_);
+
+        for (size_t i = 0; i < num_indices_; ++i) {
+            auto index = args_[i];
+            current = source.find(IndexFormatSpecifier, last);
+            if (current == std::string::npos) {
+                break;
+            }
+            format << source.substr(last, (current - last)) << index;
+            last = current + strlen(IndexFormatSpecifier);
+        }
+
+        format << source.substr(last, std::string::npos);
+
+        return format.str();
+    }
+
+    /// Check that the number of %i format specifiers in the source string matches the number of elements in the index vector.
+    bool IsValid() {
+        // Count the number of occurances of the format specifier
+        uint32_t count = 0;
+
+        std::string source(source_);
+
+        std::string::size_type pos = source.find(IndexFormatSpecifier);
+
+        while (pos != std::string::npos) {
+            ++count;
+            pos = source.find(IndexFormatSpecifier, pos + 1);
+        }
+
+        return (count == num_indices_);
+    }
+
+  private:
+    const char *source_;  ///< Format string.
+    const size_t *args_;  ///< Array index values for formatting.
+    size_t num_indices_;  ///< Number of array index values.
+};
+
+#endif  // PARAMETER_NAME_H
diff --git a/src/third_party/vulkan-validation-layers/src/layers/parameter_validation_utils.cpp b/src/third_party/vulkan-validation-layers/src/layers/parameter_validation_utils.cpp
new file mode 100644
index 0000000..18e215e
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/layers/parameter_validation_utils.cpp
@@ -0,0 +1,3771 @@
+/* Copyright (c) 2015-2019 The Khronos Group Inc.
+ * Copyright (c) 2015-2019 Valve Corporation
+ * Copyright (c) 2015-2019 LunarG, Inc.
+ * Copyright (C) 2015-2019 Google Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Mark Lobodzinski <mark@LunarG.com>
+ * Author: John Zulauf <jzulauf@lunarg.com>
+ */
+
+#define NOMINMAX
+
+#include <cmath>
+
+#include "chassis.h"
+#include "stateless_validation.h"
+#include "layer_chassis_dispatch.h"
+
+static const int MaxParamCheckerStringLength = 256;
+
+template <typename T>
+inline bool in_inclusive_range(const T &value, const T &min, const T &max) {
+    // Using only < for generality and || for early abort
+    return !((value < min) || (max < value));
+}
+
+bool StatelessValidation::validate_string(const char *apiName, const ParameterName &stringName, const std::string &vuid,
+                                          const char *validateString) const {
+    bool skip = false;
+
+    VkStringErrorFlags result = vk_string_validate(MaxParamCheckerStringLength, validateString);
+
+    if (result == VK_STRING_ERROR_NONE) {
+        return skip;
+    } else if (result & VK_STRING_ERROR_LENGTH) {
+        skip = log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
+                       "%s: string %s exceeds max length %d", apiName, stringName.get_name().c_str(), MaxParamCheckerStringLength);
+    } else if (result & VK_STRING_ERROR_BAD_DATA) {
+        skip = log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
+                       "%s: string %s contains invalid characters or is badly formed", apiName, stringName.get_name().c_str());
+    }
+    return skip;
+}
+
+bool StatelessValidation::validate_api_version(uint32_t api_version, uint32_t effective_api_version) const {
+    bool skip = false;
+    uint32_t api_version_nopatch = VK_MAKE_VERSION(VK_VERSION_MAJOR(api_version), VK_VERSION_MINOR(api_version), 0);
+    if (api_version_nopatch != effective_api_version) {
+        if (api_version_nopatch < VK_API_VERSION_1_0) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT,
+                            HandleToUint64(instance), kVUIDUndefined,
+                            "Invalid CreateInstance->pCreateInfo->pApplicationInfo.apiVersion number (0x%08x). "
+                            "Using VK_API_VERSION_%" PRIu32 "_%" PRIu32 ".",
+                            api_version, VK_VERSION_MAJOR(effective_api_version), VK_VERSION_MINOR(effective_api_version));
+        } else {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT,
+                            HandleToUint64(instance), kVUIDUndefined,
+                            "Unrecognized CreateInstance->pCreateInfo->pApplicationInfo.apiVersion number (0x%08x). "
+                            "Assuming VK_API_VERSION_%" PRIu32 "_%" PRIu32 ".",
+                            api_version, VK_VERSION_MAJOR(effective_api_version), VK_VERSION_MINOR(effective_api_version));
+        }
+    }
+    return skip;
+}
+
+bool StatelessValidation::validate_instance_extensions(const VkInstanceCreateInfo *pCreateInfo) const {
+    bool skip = false;
+    // Create and use a local instance extension object, as an actual instance has not been created yet
+    uint32_t specified_version = (pCreateInfo->pApplicationInfo ? pCreateInfo->pApplicationInfo->apiVersion : VK_API_VERSION_1_0);
+    InstanceExtensions local_instance_extensions;
+    local_instance_extensions.InitFromInstanceCreateInfo(specified_version, pCreateInfo);
+
+    for (uint32_t i = 0; i < pCreateInfo->enabledExtensionCount; i++) {
+        skip |= validate_extension_reqs(local_instance_extensions, "VUID-vkCreateInstance-ppEnabledExtensionNames-01388",
+                                        "instance", pCreateInfo->ppEnabledExtensionNames[i]);
+    }
+
+    return skip;
+}
+
+template <typename ExtensionState>
+ExtEnabled extension_state_by_name(const ExtensionState &extensions, const char *extension_name) {
+    if (!extension_name) return kNotEnabled;  // null strings specify nothing
+    auto info = ExtensionState::get_info(extension_name);
+    ExtEnabled state =
+        info.state ? extensions.*(info.state) : kNotEnabled;  // unknown extensions can't be enabled in extension struct
+    return state;
+}
+
+bool StatelessValidation::manual_PreCallValidateCreateInstance(const VkInstanceCreateInfo *pCreateInfo,
+                                                               const VkAllocationCallbacks *pAllocator,
+                                                               VkInstance *pInstance) const {
+    bool skip = false;
+    // Note: From the spec--
+    //  Providing a NULL VkInstanceCreateInfo::pApplicationInfo or providing an apiVersion of 0 is equivalent to providing
+    //  an apiVersion of VK_MAKE_VERSION(1, 0, 0).  (a.k.a. VK_API_VERSION_1_0)
+    uint32_t local_api_version = (pCreateInfo->pApplicationInfo && pCreateInfo->pApplicationInfo->apiVersion)
+                                     ? pCreateInfo->pApplicationInfo->apiVersion
+                                     : VK_API_VERSION_1_0;
+    skip |= validate_api_version(local_api_version, api_version);
+    skip |= validate_instance_extensions(pCreateInfo);
+    return skip;
+}
+
+void StatelessValidation::PostCallRecordCreateInstance(const VkInstanceCreateInfo *pCreateInfo,
+                                                       const VkAllocationCallbacks *pAllocator, VkInstance *pInstance,
+                                                       VkResult result) {
+    auto instance_data = GetLayerDataPtr(get_dispatch_key(*pInstance), layer_data_map);
+    // Copy extension data into local object
+    if (result != VK_SUCCESS) return;
+    this->instance_extensions = instance_data->instance_extensions;
+}
+
+void StatelessValidation::PostCallRecordQueuePresentKHR(VkQueue queue, const VkPresentInfoKHR *pPresentInfo, VkResult result) {
+    for (uint32_t i = 0; i < pPresentInfo->swapchainCount; ++i) {
+        auto swapchains_result = pPresentInfo->pResults ? pPresentInfo->pResults[i] : result;
+        if (swapchains_result == VK_SUBOPTIMAL_KHR) {
+            log_msg(report_data, VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT,
+                    HandleToUint64(pPresentInfo->pSwapchains[i]), kVUID_PVPerfWarn_SuboptimalSwapchain,
+                    "vkQueuePresentKHR: %s :VK_SUBOPTIMAL_KHR was returned. VK_SUBOPTIMAL_KHR - Presentation will still succeed, "
+                    "subject to the window resize behavior, but the swapchain is no longer configured optimally for the surface it "
+                    "targets. Applications should query updated surface information and recreate their swapchain at the next "
+                    "convenient opportunity.",
+                    report_data->FormatHandle(pPresentInfo->pSwapchains[i]).c_str());
+        }
+    }
+}
+
+void StatelessValidation::PostCallRecordCreateDevice(VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo *pCreateInfo,
+                                                     const VkAllocationCallbacks *pAllocator, VkDevice *pDevice, VkResult result) {
+    auto device_data = GetLayerDataPtr(get_dispatch_key(*pDevice), layer_data_map);
+    if (result != VK_SUCCESS) return;
+    ValidationObject *validation_data = GetValidationObject(device_data->object_dispatch, LayerObjectTypeParameterValidation);
+    StatelessValidation *stateless_validation = static_cast<StatelessValidation *>(validation_data);
+
+    // Parmeter validation also uses extension data
+    stateless_validation->device_extensions = this->device_extensions;
+
+    VkPhysicalDeviceProperties device_properties = {};
+    // Need to get instance and do a getlayerdata call...
+    DispatchGetPhysicalDeviceProperties(physicalDevice, &device_properties);
+    memcpy(&stateless_validation->device_limits, &device_properties.limits, sizeof(VkPhysicalDeviceLimits));
+
+    if (device_extensions.vk_nv_shading_rate_image) {
+        // Get the needed shading rate image limits
+        auto shading_rate_image_props = lvl_init_struct<VkPhysicalDeviceShadingRateImagePropertiesNV>();
+        auto prop2 = lvl_init_struct<VkPhysicalDeviceProperties2KHR>(&shading_rate_image_props);
+        DispatchGetPhysicalDeviceProperties2KHR(physicalDevice, &prop2);
+        phys_dev_ext_props.shading_rate_image_props = shading_rate_image_props;
+    }
+
+    if (device_extensions.vk_nv_mesh_shader) {
+        // Get the needed mesh shader limits
+        auto mesh_shader_props = lvl_init_struct<VkPhysicalDeviceMeshShaderPropertiesNV>();
+        auto prop2 = lvl_init_struct<VkPhysicalDeviceProperties2KHR>(&mesh_shader_props);
+        DispatchGetPhysicalDeviceProperties2KHR(physicalDevice, &prop2);
+        phys_dev_ext_props.mesh_shader_props = mesh_shader_props;
+    }
+
+    if (device_extensions.vk_nv_ray_tracing) {
+        // Get the needed ray tracing limits
+        auto ray_tracing_props = lvl_init_struct<VkPhysicalDeviceRayTracingPropertiesNV>();
+        auto prop2 = lvl_init_struct<VkPhysicalDeviceProperties2KHR>(&ray_tracing_props);
+        DispatchGetPhysicalDeviceProperties2KHR(physicalDevice, &prop2);
+        phys_dev_ext_props.ray_tracing_props = ray_tracing_props;
+    }
+
+    stateless_validation->phys_dev_ext_props = this->phys_dev_ext_props;
+
+    // Save app-enabled features in this device's validation object
+    // The enabled features can come from either pEnabledFeatures, or from the pNext chain
+    const auto *features2 = lvl_find_in_chain<VkPhysicalDeviceFeatures2>(pCreateInfo->pNext);
+    safe_VkPhysicalDeviceFeatures2 tmp_features2_state;
+    tmp_features2_state.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2;
+    if (features2) {
+        tmp_features2_state.features = features2->features;
+    } else if (pCreateInfo->pEnabledFeatures) {
+        tmp_features2_state.features = *pCreateInfo->pEnabledFeatures;
+    } else {
+        tmp_features2_state.features = {};
+    }
+    // Use pCreateInfo->pNext to get full chain
+    tmp_features2_state.pNext = SafePnextCopy(pCreateInfo->pNext);
+    stateless_validation->physical_device_features2 = tmp_features2_state;
+}
+
+bool StatelessValidation::manual_PreCallValidateCreateDevice(VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo *pCreateInfo,
+                                                             const VkAllocationCallbacks *pAllocator, VkDevice *pDevice) const {
+    bool skip = false;
+
+    for (size_t i = 0; i < pCreateInfo->enabledLayerCount; i++) {
+        skip |= validate_string("vkCreateDevice", "pCreateInfo->ppEnabledLayerNames",
+                                "VUID-VkDeviceCreateInfo-ppEnabledLayerNames-parameter", pCreateInfo->ppEnabledLayerNames[i]);
+    }
+
+    for (size_t i = 0; i < pCreateInfo->enabledExtensionCount; i++) {
+        skip |=
+            validate_string("vkCreateDevice", "pCreateInfo->ppEnabledExtensionNames",
+                            "VUID-VkDeviceCreateInfo-ppEnabledExtensionNames-parameter", pCreateInfo->ppEnabledExtensionNames[i]);
+        skip |= validate_extension_reqs(device_extensions, "VUID-vkCreateDevice-ppEnabledExtensionNames-01387", "device",
+                                        pCreateInfo->ppEnabledExtensionNames[i]);
+    }
+
+    {
+        bool maint1 = IsExtEnabled(extension_state_by_name(device_extensions, VK_KHR_MAINTENANCE1_EXTENSION_NAME));
+        bool negative_viewport =
+            IsExtEnabled(extension_state_by_name(device_extensions, VK_AMD_NEGATIVE_VIEWPORT_HEIGHT_EXTENSION_NAME));
+        if (maint1 && negative_viewport) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                            "VUID-VkDeviceCreateInfo-ppEnabledExtensionNames-00374",
+                            "VkDeviceCreateInfo->ppEnabledExtensionNames must not simultaneously include VK_KHR_maintenance1 and "
+                            "VK_AMD_negative_viewport_height.");
+        }
+    }
+
+    {
+        bool khr_bda = IsExtEnabled(extension_state_by_name(device_extensions, VK_KHR_BUFFER_DEVICE_ADDRESS_EXTENSION_NAME));
+        bool ext_bda = IsExtEnabled(extension_state_by_name(device_extensions, VK_EXT_BUFFER_DEVICE_ADDRESS_EXTENSION_NAME));
+        if (khr_bda && ext_bda) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                            "VUID-VkDeviceCreateInfo-ppEnabledExtensionNames-03328",
+                            "VkDeviceCreateInfo->ppEnabledExtensionNames must not contain both VK_KHR_buffer_device_address and "
+                            "VK_EXT_buffer_device_address.");
+        }
+    }
+
+    if (pCreateInfo->pNext != NULL && pCreateInfo->pEnabledFeatures) {
+        // Check for get_physical_device_properties2 struct
+        const auto *features2 = lvl_find_in_chain<VkPhysicalDeviceFeatures2KHR>(pCreateInfo->pNext);
+        if (features2) {
+            // Cannot include VkPhysicalDeviceFeatures2KHR and have non-null pEnabledFeatures
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                            kVUID_PVError_InvalidUsage,
+                            "VkDeviceCreateInfo->pNext includes a VkPhysicalDeviceFeatures2KHR struct when "
+                            "pCreateInfo->pEnabledFeatures is non-NULL.");
+        }
+    }
+
+    auto features2 = lvl_find_in_chain<VkPhysicalDeviceFeatures2>(pCreateInfo->pNext);
+    if (features2) {
+        if (!instance_extensions.vk_khr_get_physical_device_properties_2) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                            kVUID_PVError_ExtensionNotEnabled,
+                            "VkDeviceCreateInfo->pNext includes a VkPhysicalDeviceFeatures2 struct, "
+                            "VK_KHR_get_physical_device_properties2 must be enabled when it creates an instance.");
+        }
+    }
+
+    auto vertex_attribute_divisor_features =
+        lvl_find_in_chain<VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT>(pCreateInfo->pNext);
+    if (vertex_attribute_divisor_features) {
+        bool extension_found = false;
+        for (uint32_t i = 0; i < pCreateInfo->enabledExtensionCount; ++i) {
+            if (0 == strncmp(pCreateInfo->ppEnabledExtensionNames[i], VK_EXT_VERTEX_ATTRIBUTE_DIVISOR_EXTENSION_NAME,
+                             VK_MAX_EXTENSION_NAME_SIZE)) {
+                extension_found = true;
+                break;
+            }
+        }
+        if (!extension_found) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                            kVUID_PVError_ExtensionNotEnabled,
+                            "VkDeviceCreateInfo->pNext includes a VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT "
+                            "struct, VK_EXT_vertex_attribute_divisor must be enabled when it creates a device.");
+        }
+    }
+
+    // Validate pCreateInfo->pQueueCreateInfos
+    if (pCreateInfo->pQueueCreateInfos) {
+        std::unordered_set<uint32_t> set;
+
+        for (uint32_t i = 0; i < pCreateInfo->queueCreateInfoCount; ++i) {
+            const uint32_t requested_queue_family = pCreateInfo->pQueueCreateInfos[i].queueFamilyIndex;
+            if (requested_queue_family == VK_QUEUE_FAMILY_IGNORED) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT,
+                                HandleToUint64(physicalDevice), "VUID-VkDeviceQueueCreateInfo-queueFamilyIndex-00381",
+                                "vkCreateDevice: pCreateInfo->pQueueCreateInfos[%" PRIu32
+                                "].queueFamilyIndex is VK_QUEUE_FAMILY_IGNORED, but it is required to provide a valid queue family "
+                                "index value.",
+                                i);
+            } else if (set.count(requested_queue_family)) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT,
+                                HandleToUint64(physicalDevice), "VUID-VkDeviceCreateInfo-queueFamilyIndex-00372",
+                                "vkCreateDevice: pCreateInfo->pQueueCreateInfos[%" PRIu32 "].queueFamilyIndex (=%" PRIu32
+                                ") is not unique within pCreateInfo->pQueueCreateInfos array.",
+                                i, requested_queue_family);
+            } else {
+                set.insert(requested_queue_family);
+            }
+
+            if (pCreateInfo->pQueueCreateInfos[i].pQueuePriorities != nullptr) {
+                for (uint32_t j = 0; j < pCreateInfo->pQueueCreateInfos[i].queueCount; ++j) {
+                    const float queue_priority = pCreateInfo->pQueueCreateInfos[i].pQueuePriorities[j];
+                    if (!(queue_priority >= 0.f) || !(queue_priority <= 1.f)) {
+                        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT,
+                                        HandleToUint64(physicalDevice), "VUID-VkDeviceQueueCreateInfo-pQueuePriorities-00383",
+                                        "vkCreateDevice: pCreateInfo->pQueueCreateInfos[%" PRIu32 "].pQueuePriorities[%" PRIu32
+                                        "] (=%f) is not between 0 and 1 (inclusive).",
+                                        i, j, queue_priority);
+                    }
+                }
+            }
+        }
+    }
+
+    return skip;
+}
+
+bool StatelessValidation::require_device_extension(bool flag, char const *function_name, char const *extension_name) const {
+    if (!flag) {
+        return log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                       kVUID_PVError_ExtensionNotEnabled,
+                       "%s() called even though the %s extension was not enabled for this VkDevice.", function_name,
+                       extension_name);
+    }
+
+    return false;
+}
+
+bool StatelessValidation::manual_PreCallValidateCreateBuffer(VkDevice device, const VkBufferCreateInfo *pCreateInfo,
+                                                             const VkAllocationCallbacks *pAllocator, VkBuffer *pBuffer) const {
+    bool skip = false;
+
+    const LogMiscParams log_misc{VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, VK_NULL_HANDLE, "vkCreateBuffer"};
+
+    if (pCreateInfo != nullptr) {
+        skip |= ValidateGreaterThanZero(pCreateInfo->size, "pCreateInfo->size", "VUID-VkBufferCreateInfo-size-00912", log_misc);
+
+        // Validation for parameters excluded from the generated validation code due to a 'noautovalidity' tag in vk.xml
+        if (pCreateInfo->sharingMode == VK_SHARING_MODE_CONCURRENT) {
+            // If sharingMode is VK_SHARING_MODE_CONCURRENT, queueFamilyIndexCount must be greater than 1
+            if (pCreateInfo->queueFamilyIndexCount <= 1) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                "VUID-VkBufferCreateInfo-sharingMode-00914",
+                                "vkCreateBuffer: if pCreateInfo->sharingMode is VK_SHARING_MODE_CONCURRENT, "
+                                "pCreateInfo->queueFamilyIndexCount must be greater than 1.");
+            }
+
+            // If sharingMode is VK_SHARING_MODE_CONCURRENT, pQueueFamilyIndices must be a pointer to an array of
+            // queueFamilyIndexCount uint32_t values
+            if (pCreateInfo->pQueueFamilyIndices == nullptr) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                "VUID-VkBufferCreateInfo-sharingMode-00913",
+                                "vkCreateBuffer: if pCreateInfo->sharingMode is VK_SHARING_MODE_CONCURRENT, "
+                                "pCreateInfo->pQueueFamilyIndices must be a pointer to an array of "
+                                "pCreateInfo->queueFamilyIndexCount uint32_t values.");
+            }
+        }
+
+        // If flags contains VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT or VK_BUFFER_CREATE_SPARSE_ALIASED_BIT, it must also contain
+        // VK_BUFFER_CREATE_SPARSE_BINDING_BIT
+        if (((pCreateInfo->flags & (VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT | VK_BUFFER_CREATE_SPARSE_ALIASED_BIT)) != 0) &&
+            ((pCreateInfo->flags & VK_BUFFER_CREATE_SPARSE_BINDING_BIT) != VK_BUFFER_CREATE_SPARSE_BINDING_BIT)) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                            "VUID-VkBufferCreateInfo-flags-00918",
+                            "vkCreateBuffer: if pCreateInfo->flags contains VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT or "
+                            "VK_BUFFER_CREATE_SPARSE_ALIASED_BIT, it must also contain VK_BUFFER_CREATE_SPARSE_BINDING_BIT.");
+        }
+    }
+
+    return skip;
+}
+
+bool StatelessValidation::manual_PreCallValidateCreateImage(VkDevice device, const VkImageCreateInfo *pCreateInfo,
+                                                            const VkAllocationCallbacks *pAllocator, VkImage *pImage) const {
+    bool skip = false;
+
+    const LogMiscParams log_misc{VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, VK_NULL_HANDLE, "vkCreateImage"};
+
+    if (pCreateInfo != nullptr) {
+        // Validation for parameters excluded from the generated validation code due to a 'noautovalidity' tag in vk.xml
+        if (pCreateInfo->sharingMode == VK_SHARING_MODE_CONCURRENT) {
+            // If sharingMode is VK_SHARING_MODE_CONCURRENT, queueFamilyIndexCount must be greater than 1
+            if (pCreateInfo->queueFamilyIndexCount <= 1) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                "VUID-VkImageCreateInfo-sharingMode-00942",
+                                "vkCreateImage(): if pCreateInfo->sharingMode is VK_SHARING_MODE_CONCURRENT, "
+                                "pCreateInfo->queueFamilyIndexCount must be greater than 1.");
+            }
+
+            // If sharingMode is VK_SHARING_MODE_CONCURRENT, pQueueFamilyIndices must be a pointer to an array of
+            // queueFamilyIndexCount uint32_t values
+            if (pCreateInfo->pQueueFamilyIndices == nullptr) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                "VUID-VkImageCreateInfo-sharingMode-00941",
+                                "vkCreateImage(): if pCreateInfo->sharingMode is VK_SHARING_MODE_CONCURRENT, "
+                                "pCreateInfo->pQueueFamilyIndices must be a pointer to an array of "
+                                "pCreateInfo->queueFamilyIndexCount uint32_t values.");
+            }
+        }
+
+        skip |= ValidateGreaterThanZero(pCreateInfo->extent.width, "pCreateInfo->extent.width",
+                                        "VUID-VkImageCreateInfo-extent-00944", log_misc);
+        skip |= ValidateGreaterThanZero(pCreateInfo->extent.height, "pCreateInfo->extent.height",
+                                        "VUID-VkImageCreateInfo-extent-00945", log_misc);
+        skip |= ValidateGreaterThanZero(pCreateInfo->extent.depth, "pCreateInfo->extent.depth",
+                                        "VUID-VkImageCreateInfo-extent-00946", log_misc);
+
+        skip |= ValidateGreaterThanZero(pCreateInfo->mipLevels, "pCreateInfo->mipLevels", "VUID-VkImageCreateInfo-mipLevels-00947",
+                                        log_misc);
+        skip |= ValidateGreaterThanZero(pCreateInfo->arrayLayers, "pCreateInfo->arrayLayers",
+                                        "VUID-VkImageCreateInfo-arrayLayers-00948", log_misc);
+
+        // InitialLayout must be PREINITIALIZED or UNDEFINED
+        if ((pCreateInfo->initialLayout != VK_IMAGE_LAYOUT_UNDEFINED) &&
+            (pCreateInfo->initialLayout != VK_IMAGE_LAYOUT_PREINITIALIZED)) {
+            skip |= log_msg(
+                report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                "VUID-VkImageCreateInfo-initialLayout-00993",
+                "vkCreateImage(): initialLayout is %s, must be VK_IMAGE_LAYOUT_UNDEFINED or VK_IMAGE_LAYOUT_PREINITIALIZED.",
+                string_VkImageLayout(pCreateInfo->initialLayout));
+        }
+
+        // If imageType is VK_IMAGE_TYPE_1D, both extent.height and extent.depth must be 1
+        if ((pCreateInfo->imageType == VK_IMAGE_TYPE_1D) &&
+            ((pCreateInfo->extent.height != 1) || (pCreateInfo->extent.depth != 1))) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                            "VUID-VkImageCreateInfo-imageType-00956",
+                            "vkCreateImage(): if pCreateInfo->imageType is VK_IMAGE_TYPE_1D, both pCreateInfo->extent.height and "
+                            "pCreateInfo->extent.depth must be 1.");
+        }
+
+        if (pCreateInfo->imageType == VK_IMAGE_TYPE_2D) {
+            if (pCreateInfo->flags & VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT) {
+                if (pCreateInfo->extent.width != pCreateInfo->extent.height) {
+                    skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+                                    VK_NULL_HANDLE, "VUID-VkImageCreateInfo-imageType-00954",
+                                    "vkCreateImage(): pCreateInfo->flags contains VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT, but "
+                                    "pCreateInfo->extent.width (=%" PRIu32 ") and pCreateInfo->extent.height (=%" PRIu32
+                                    ") are not equal.",
+                                    pCreateInfo->extent.width, pCreateInfo->extent.height);
+                }
+
+                if (pCreateInfo->arrayLayers < 6) {
+                    skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+                                    VK_NULL_HANDLE, "VUID-VkImageCreateInfo-imageType-00954",
+                                    "vkCreateImage(): pCreateInfo->flags contains VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT, but "
+                                    "pCreateInfo->arrayLayers (=%" PRIu32 ") is not greater than or equal to 6.",
+                                    pCreateInfo->arrayLayers);
+                }
+            }
+
+            if (pCreateInfo->extent.depth != 1) {
+                skip |=
+                    log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                            "VUID-VkImageCreateInfo-imageType-00957",
+                            "vkCreateImage(): if pCreateInfo->imageType is VK_IMAGE_TYPE_2D, pCreateInfo->extent.depth must be 1.");
+            }
+        }
+
+        // 3D image may have only 1 layer
+        if ((pCreateInfo->imageType == VK_IMAGE_TYPE_3D) && (pCreateInfo->arrayLayers != 1)) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                            "VUID-VkImageCreateInfo-imageType-00961",
+                            "vkCreateImage(): if pCreateInfo->imageType is VK_IMAGE_TYPE_3D, pCreateInfo->arrayLayers must be 1.");
+        }
+
+        // If multi-sample, validate type, usage, tiling and mip levels.
+        if ((pCreateInfo->samples != VK_SAMPLE_COUNT_1_BIT) &&
+            ((pCreateInfo->imageType != VK_IMAGE_TYPE_2D) || (pCreateInfo->flags & VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT) ||
+             (pCreateInfo->mipLevels != 1) || (pCreateInfo->tiling != VK_IMAGE_TILING_OPTIMAL))) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                            "VUID-VkImageCreateInfo-samples-02257",
+                            "vkCreateImage(): Multi-sample image with incompatible type, usage, tiling, or mips.");
+        }
+
+        if (0 != (pCreateInfo->usage & VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT)) {
+            VkImageUsageFlags legal_flags = (VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT |
+                                             VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT);
+            // At least one of the legal attachment bits must be set
+            if (0 == (pCreateInfo->usage & legal_flags)) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                "VUID-VkImageCreateInfo-usage-00966",
+                                "vkCreateImage(): Transient attachment image without a compatible attachment flag set.");
+            }
+            // No flags other than the legal attachment bits may be set
+            legal_flags |= VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT;
+            if (0 != (pCreateInfo->usage & ~legal_flags)) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                "VUID-VkImageCreateInfo-usage-00963",
+                                "vkCreateImage(): Transient attachment image with incompatible usage flags set.");
+            }
+        }
+
+        // mipLevels must be less than or equal to the number of levels in the complete mipmap chain
+        uint32_t maxDim = std::max(std::max(pCreateInfo->extent.width, pCreateInfo->extent.height), pCreateInfo->extent.depth);
+        // Max mip levels is different for corner-sampled images vs normal images.
+        uint32_t maxMipLevels = (pCreateInfo->flags & VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV) ? (uint32_t)(ceil(log2(maxDim)))
+                                                                                             : (uint32_t)(floor(log2(maxDim)) + 1);
+        if (maxDim > 0 && pCreateInfo->mipLevels > maxMipLevels) {
+            skip |=
+                log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                        "VUID-VkImageCreateInfo-mipLevels-00958",
+                        "vkCreateImage(): pCreateInfo->mipLevels must be less than or equal to "
+                        "floor(log2(max(pCreateInfo->extent.width, pCreateInfo->extent.height, pCreateInfo->extent.depth)))+1.");
+        }
+
+        if ((pCreateInfo->flags & VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT) && (pCreateInfo->imageType != VK_IMAGE_TYPE_3D)) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, VK_NULL_HANDLE,
+                            "VUID-VkImageCreateInfo-flags-00950",
+                            "vkCreateImage(): pCreateInfo->flags contains VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT but "
+                            "pCreateInfo->imageType is not VK_IMAGE_TYPE_3D.");
+        }
+
+        if ((pCreateInfo->flags & VK_IMAGE_CREATE_SPARSE_BINDING_BIT) && (!physical_device_features.sparseBinding)) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, VK_NULL_HANDLE,
+                            "VUID-VkImageCreateInfo-flags-00969",
+                            "vkCreateImage(): pCreateInfo->flags contains VK_IMAGE_CREATE_SPARSE_BINDING_BIT, but the "
+                            "VkPhysicalDeviceFeatures::sparseBinding feature is disabled.");
+        }
+
+        // If flags contains VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT or VK_IMAGE_CREATE_SPARSE_ALIASED_BIT, it must also contain
+        // VK_IMAGE_CREATE_SPARSE_BINDING_BIT
+        if (((pCreateInfo->flags & (VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT | VK_IMAGE_CREATE_SPARSE_ALIASED_BIT)) != 0) &&
+            ((pCreateInfo->flags & VK_IMAGE_CREATE_SPARSE_BINDING_BIT) != VK_IMAGE_CREATE_SPARSE_BINDING_BIT)) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                            "VUID-VkImageCreateInfo-flags-00987",
+                            "vkCreateImage: if pCreateInfo->flags contains VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT or "
+                            "VK_IMAGE_CREATE_SPARSE_ALIASED_BIT, it must also contain VK_IMAGE_CREATE_SPARSE_BINDING_BIT.");
+        }
+
+        // Check for combinations of attributes that are incompatible with having VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT set
+        if ((pCreateInfo->flags & VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT) != 0) {
+            // Linear tiling is unsupported
+            if (VK_IMAGE_TILING_LINEAR == pCreateInfo->tiling) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                kVUID_PVError_InvalidUsage,
+                                "vkCreateImage: if pCreateInfo->flags contains VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT then image "
+                                "tiling of VK_IMAGE_TILING_LINEAR is not supported");
+            }
+
+            // Sparse 1D image isn't valid
+            if (VK_IMAGE_TYPE_1D == pCreateInfo->imageType) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                "VUID-VkImageCreateInfo-imageType-00970",
+                                "vkCreateImage: cannot specify VK_IMAGE_CREATE_SPARSE_BINDING_BIT for 1D image.");
+            }
+
+            // Sparse 2D image when device doesn't support it
+            if ((VK_FALSE == physical_device_features.sparseResidencyImage2D) && (VK_IMAGE_TYPE_2D == pCreateInfo->imageType)) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                "VUID-VkImageCreateInfo-imageType-00971",
+                                "vkCreateImage: cannot specify VK_IMAGE_CREATE_SPARSE_BINDING_BIT for 2D image if corresponding "
+                                "feature is not enabled on the device.");
+            }
+
+            // Sparse 3D image when device doesn't support it
+            if ((VK_FALSE == physical_device_features.sparseResidencyImage3D) && (VK_IMAGE_TYPE_3D == pCreateInfo->imageType)) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                "VUID-VkImageCreateInfo-imageType-00972",
+                                "vkCreateImage: cannot specify VK_IMAGE_CREATE_SPARSE_BINDING_BIT for 3D image if corresponding "
+                                "feature is not enabled on the device.");
+            }
+
+            // Multi-sample 2D image when device doesn't support it
+            if (VK_IMAGE_TYPE_2D == pCreateInfo->imageType) {
+                if ((VK_FALSE == physical_device_features.sparseResidency2Samples) &&
+                    (VK_SAMPLE_COUNT_2_BIT == pCreateInfo->samples)) {
+                    skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                    "VUID-VkImageCreateInfo-imageType-00973",
+                                    "vkCreateImage: cannot specify VK_IMAGE_CREATE_SPARSE_BINDING_BIT for 2-sample image if "
+                                    "corresponding feature is not enabled on the device.");
+                } else if ((VK_FALSE == physical_device_features.sparseResidency4Samples) &&
+                           (VK_SAMPLE_COUNT_4_BIT == pCreateInfo->samples)) {
+                    skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                    "VUID-VkImageCreateInfo-imageType-00974",
+                                    "vkCreateImage: cannot specify VK_IMAGE_CREATE_SPARSE_BINDING_BIT for 4-sample image if "
+                                    "corresponding feature is not enabled on the device.");
+                } else if ((VK_FALSE == physical_device_features.sparseResidency8Samples) &&
+                           (VK_SAMPLE_COUNT_8_BIT == pCreateInfo->samples)) {
+                    skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                    "VUID-VkImageCreateInfo-imageType-00975",
+                                    "vkCreateImage: cannot specify VK_IMAGE_CREATE_SPARSE_BINDING_BIT for 8-sample image if "
+                                    "corresponding feature is not enabled on the device.");
+                } else if ((VK_FALSE == physical_device_features.sparseResidency16Samples) &&
+                           (VK_SAMPLE_COUNT_16_BIT == pCreateInfo->samples)) {
+                    skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                    "VUID-VkImageCreateInfo-imageType-00976",
+                                    "vkCreateImage: cannot specify VK_IMAGE_CREATE_SPARSE_BINDING_BIT for 16-sample image if "
+                                    "corresponding feature is not enabled on the device.");
+                }
+            }
+        }
+
+        if (pCreateInfo->usage & VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV) {
+            if (pCreateInfo->imageType != VK_IMAGE_TYPE_2D) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                "VUID-VkImageCreateInfo-imageType-02082",
+                                "vkCreateImage: if usage includes VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV, "
+                                "imageType must be VK_IMAGE_TYPE_2D.");
+            }
+            if (pCreateInfo->samples != VK_SAMPLE_COUNT_1_BIT) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                "VUID-VkImageCreateInfo-samples-02083",
+                                "vkCreateImage: if usage includes VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV, "
+                                "samples must be VK_SAMPLE_COUNT_1_BIT.");
+            }
+            if (pCreateInfo->tiling != VK_IMAGE_TILING_OPTIMAL) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                "VUID-VkImageCreateInfo-tiling-02084",
+                                "vkCreateImage: if usage includes VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV, "
+                                "tiling must be VK_IMAGE_TILING_OPTIMAL.");
+            }
+        }
+
+        if (pCreateInfo->flags & VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV) {
+            if (pCreateInfo->imageType != VK_IMAGE_TYPE_2D && pCreateInfo->imageType != VK_IMAGE_TYPE_3D) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                "VUID-VkImageCreateInfo-flags-02050",
+                                "vkCreateImage: If flags contains VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV, "
+                                "imageType must be VK_IMAGE_TYPE_2D or VK_IMAGE_TYPE_3D.");
+            }
+
+            if ((pCreateInfo->flags & VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT) || FormatIsDepthOrStencil(pCreateInfo->format)) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                "VUID-VkImageCreateInfo-flags-02051",
+                                "vkCreateImage: If flags contains VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV, "
+                                "it must not also contain VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT and format must "
+                                "not be a depth/stencil format.");
+            }
+
+            if (pCreateInfo->imageType == VK_IMAGE_TYPE_2D && (pCreateInfo->extent.width == 1 || pCreateInfo->extent.height == 1)) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                "VUID-VkImageCreateInfo-flags-02052",
+                                "vkCreateImage: If flags contains VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV and "
+                                "imageType is VK_IMAGE_TYPE_2D, extent.width and extent.height must be "
+                                "greater than 1.");
+            } else if (pCreateInfo->imageType == VK_IMAGE_TYPE_3D &&
+                       (pCreateInfo->extent.width == 1 || pCreateInfo->extent.height == 1 || pCreateInfo->extent.depth == 1)) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                "VUID-VkImageCreateInfo-flags-02053",
+                                "vkCreateImage: If flags contains VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV and "
+                                "imageType is VK_IMAGE_TYPE_3D, extent.width, extent.height, and extent.depth "
+                                "must be greater than 1.");
+            }
+        }
+    }
+
+    return skip;
+}
+
+bool StatelessValidation::manual_PreCallValidateViewport(const VkViewport &viewport, const char *fn_name,
+                                                         const ParameterName &parameter_name,
+                                                         VkDebugReportObjectTypeEXT object_type, uint64_t object = 0) const {
+    bool skip = false;
+
+    // Note: for numerical correctness
+    //       - float comparisons should expect NaN (comparison always false).
+    //       - VkPhysicalDeviceLimits::maxViewportDimensions is uint32_t, not float -> careful.
+
+    const auto f_lte_u32_exact = [](const float v1_f, const uint32_t v2_u32) {
+        if (std::isnan(v1_f)) return false;
+        if (v1_f <= 0.0f) return true;
+
+        float intpart;
+        const float fract = modff(v1_f, &intpart);
+
+        assert(std::numeric_limits<float>::radix == 2);
+        const float u32_max_plus1 = ldexpf(1.0f, 32);  // hopefully exact
+        if (intpart >= u32_max_plus1) return false;
+
+        uint32_t v1_u32 = static_cast<uint32_t>(intpart);
+        if (v1_u32 < v2_u32)
+            return true;
+        else if (v1_u32 == v2_u32 && fract == 0.0f)
+            return true;
+        else
+            return false;
+    };
+
+    const auto f_lte_u32_direct = [](const float v1_f, const uint32_t v2_u32) {
+        const float v2_f = static_cast<float>(v2_u32);  // not accurate for > radix^digits; and undefined rounding mode
+        return (v1_f <= v2_f);
+    };
+
+    // width
+    bool width_healthy = true;
+    const auto max_w = device_limits.maxViewportDimensions[0];
+
+    if (!(viewport.width > 0.0f)) {
+        width_healthy = false;
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, object_type, object, "VUID-VkViewport-width-01770",
+                        "%s: %s.width (=%f) is not greater than 0.0.", fn_name, parameter_name.get_name().c_str(), viewport.width);
+    } else if (!(f_lte_u32_exact(viewport.width, max_w) || f_lte_u32_direct(viewport.width, max_w))) {
+        width_healthy = false;
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, object_type, object, "VUID-VkViewport-width-01771",
+                        "%s: %s.width (=%f) exceeds VkPhysicalDeviceLimits::maxViewportDimensions[0] (=%" PRIu32 ").", fn_name,
+                        parameter_name.get_name().c_str(), viewport.width, max_w);
+    } else if (!f_lte_u32_exact(viewport.width, max_w) && f_lte_u32_direct(viewport.width, max_w)) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, object_type, object, kVUID_PVError_NONE,
+                        "%s: %s.width (=%f) technically exceeds VkPhysicalDeviceLimits::maxViewportDimensions[0] (=%" PRIu32
+                        "), but it is within the static_cast<float>(maxViewportDimensions[0]) limit.",
+                        fn_name, parameter_name.get_name().c_str(), viewport.width, max_w);
+    }
+
+    // height
+    bool height_healthy = true;
+    const bool negative_height_enabled = api_version >= VK_API_VERSION_1_1 || device_extensions.vk_khr_maintenance1 ||
+                                         device_extensions.vk_amd_negative_viewport_height;
+    const auto max_h = device_limits.maxViewportDimensions[1];
+
+    if (!negative_height_enabled && !(viewport.height > 0.0f)) {
+        height_healthy = false;
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, object_type, object, "VUID-VkViewport-height-01772",
+                        "%s: %s.height (=%f) is not greater 0.0.", fn_name, parameter_name.get_name().c_str(), viewport.height);
+    } else if (!(f_lte_u32_exact(fabsf(viewport.height), max_h) || f_lte_u32_direct(fabsf(viewport.height), max_h))) {
+        height_healthy = false;
+
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, object_type, object, "VUID-VkViewport-height-01773",
+                        "%s: Absolute value of %s.height (=%f) exceeds VkPhysicalDeviceLimits::maxViewportDimensions[1] (=%" PRIu32
+                        ").",
+                        fn_name, parameter_name.get_name().c_str(), viewport.height, max_h);
+    } else if (!f_lte_u32_exact(fabsf(viewport.height), max_h) && f_lte_u32_direct(fabsf(viewport.height), max_h)) {
+        height_healthy = false;
+
+        skip |= log_msg(
+            report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, object_type, object, kVUID_PVError_NONE,
+            "%s: Absolute value of %s.height (=%f) technically exceeds VkPhysicalDeviceLimits::maxViewportDimensions[1] (=%" PRIu32
+            "), but it is within the static_cast<float>(maxViewportDimensions[1]) limit.",
+            fn_name, parameter_name.get_name().c_str(), viewport.height, max_h);
+    }
+
+    // x
+    bool x_healthy = true;
+    if (!(viewport.x >= device_limits.viewportBoundsRange[0])) {
+        x_healthy = false;
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, object_type, object, "VUID-VkViewport-x-01774",
+                        "%s: %s.x (=%f) is less than VkPhysicalDeviceLimits::viewportBoundsRange[0] (=%f).", fn_name,
+                        parameter_name.get_name().c_str(), viewport.x, device_limits.viewportBoundsRange[0]);
+    }
+
+    // x + width
+    if (x_healthy && width_healthy) {
+        const float right_bound = viewport.x + viewport.width;
+        if (!(right_bound <= device_limits.viewportBoundsRange[1])) {
+            skip |=
+                log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, object_type, object, "VUID-VkViewport-x-01232",
+                        "%s: %s.x + %s.width (=%f + %f = %f) is greater than VkPhysicalDeviceLimits::viewportBoundsRange[1] (=%f).",
+                        fn_name, parameter_name.get_name().c_str(), parameter_name.get_name().c_str(), viewport.x, viewport.width,
+                        right_bound, device_limits.viewportBoundsRange[1]);
+        }
+    }
+
+    // y
+    bool y_healthy = true;
+    if (!(viewport.y >= device_limits.viewportBoundsRange[0])) {
+        y_healthy = false;
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, object_type, object, "VUID-VkViewport-y-01775",
+                        "%s: %s.y (=%f) is less than VkPhysicalDeviceLimits::viewportBoundsRange[0] (=%f).", fn_name,
+                        parameter_name.get_name().c_str(), viewport.y, device_limits.viewportBoundsRange[0]);
+    } else if (negative_height_enabled && !(viewport.y <= device_limits.viewportBoundsRange[1])) {
+        y_healthy = false;
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, object_type, object, "VUID-VkViewport-y-01776",
+                        "%s: %s.y (=%f) exceeds VkPhysicalDeviceLimits::viewportBoundsRange[1] (=%f).", fn_name,
+                        parameter_name.get_name().c_str(), viewport.y, device_limits.viewportBoundsRange[1]);
+    }
+
+    // y + height
+    if (y_healthy && height_healthy) {
+        const float boundary = viewport.y + viewport.height;
+
+        if (!(boundary <= device_limits.viewportBoundsRange[1])) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, object_type, object, "VUID-VkViewport-y-01233",
+                            "%s: %s.y + %s.height (=%f + %f = %f) exceeds VkPhysicalDeviceLimits::viewportBoundsRange[1] (=%f).",
+                            fn_name, parameter_name.get_name().c_str(), parameter_name.get_name().c_str(), viewport.y,
+                            viewport.height, boundary, device_limits.viewportBoundsRange[1]);
+        } else if (negative_height_enabled && !(boundary >= device_limits.viewportBoundsRange[0])) {
+            skip |=
+                log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, object_type, object, "VUID-VkViewport-y-01777",
+                        "%s: %s.y + %s.height (=%f + %f = %f) is less than VkPhysicalDeviceLimits::viewportBoundsRange[0] (=%f).",
+                        fn_name, parameter_name.get_name().c_str(), parameter_name.get_name().c_str(), viewport.y, viewport.height,
+                        boundary, device_limits.viewportBoundsRange[0]);
+        }
+    }
+
+    if (!device_extensions.vk_ext_depth_range_unrestricted) {
+        // minDepth
+        if (!(viewport.minDepth >= 0.0) || !(viewport.minDepth <= 1.0)) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, object_type, object, "VUID-VkViewport-minDepth-01234",
+
+                            "%s: VK_EXT_depth_range_unrestricted extension is not enabled and %s.minDepth (=%f) is not within the "
+                            "[0.0, 1.0] range.",
+                            fn_name, parameter_name.get_name().c_str(), viewport.minDepth);
+        }
+
+        // maxDepth
+        if (!(viewport.maxDepth >= 0.0) || !(viewport.maxDepth <= 1.0)) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, object_type, object, "VUID-VkViewport-maxDepth-01235",
+
+                            "%s: VK_EXT_depth_range_unrestricted extension is not enabled and %s.maxDepth (=%f) is not within the "
+                            "[0.0, 1.0] range.",
+                            fn_name, parameter_name.get_name().c_str(), viewport.maxDepth);
+        }
+    }
+
+    return skip;
+}
+
+struct SampleOrderInfo {
+    VkShadingRatePaletteEntryNV shadingRate;
+    uint32_t width;
+    uint32_t height;
+};
+
+// All palette entries with more than one pixel per fragment
+static SampleOrderInfo sampleOrderInfos[] = {
+    {VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_1X2_PIXELS_NV, 1, 2},
+    {VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X1_PIXELS_NV, 2, 1},
+    {VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X2_PIXELS_NV, 2, 2},
+    {VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_4X2_PIXELS_NV, 4, 2},
+    {VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X4_PIXELS_NV, 2, 4},
+    {VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_4X4_PIXELS_NV, 4, 4},
+};
+
+bool StatelessValidation::ValidateCoarseSampleOrderCustomNV(const VkCoarseSampleOrderCustomNV *order) const {
+    bool skip = false;
+
+    SampleOrderInfo *sampleOrderInfo;
+    uint32_t infoIdx = 0;
+    for (sampleOrderInfo = nullptr; infoIdx < ARRAY_SIZE(sampleOrderInfos); ++infoIdx) {
+        if (sampleOrderInfos[infoIdx].shadingRate == order->shadingRate) {
+            sampleOrderInfo = &sampleOrderInfos[infoIdx];
+            break;
+        }
+    }
+
+    if (sampleOrderInfo == nullptr) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                        "VUID-VkCoarseSampleOrderCustomNV-shadingRate-02073",
+                        "VkCoarseSampleOrderCustomNV shadingRate must be a shading rate "
+                        "that generates fragments with more than one pixel.");
+        return skip;
+    }
+
+    if (order->sampleCount == 0 || (order->sampleCount & (order->sampleCount - 1)) ||
+        !(order->sampleCount & device_limits.framebufferNoAttachmentsSampleCounts)) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                        "VUID-VkCoarseSampleOrderCustomNV-sampleCount-02074",
+                        "VkCoarseSampleOrderCustomNV sampleCount (=%" PRIu32
+                        ") must "
+                        "correspond to a sample count enumerated in VkSampleCountFlags whose corresponding bit "
+                        "is set in framebufferNoAttachmentsSampleCounts.",
+                        order->sampleCount);
+    }
+
+    if (order->sampleLocationCount != order->sampleCount * sampleOrderInfo->width * sampleOrderInfo->height) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                        "VUID-VkCoarseSampleOrderCustomNV-sampleLocationCount-02075",
+                        "VkCoarseSampleOrderCustomNV sampleLocationCount (=%" PRIu32
+                        ") must "
+                        "be equal to the product of sampleCount (=%" PRIu32
+                        "), the fragment width for shadingRate "
+                        "(=%" PRIu32 "), and the fragment height for shadingRate (=%" PRIu32 ").",
+                        order->sampleLocationCount, order->sampleCount, sampleOrderInfo->width, sampleOrderInfo->height);
+    }
+
+    if (order->sampleLocationCount > phys_dev_ext_props.shading_rate_image_props.shadingRateMaxCoarseSamples) {
+        skip |= log_msg(
+            report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+            "VUID-VkCoarseSampleOrderCustomNV-sampleLocationCount-02076",
+            "VkCoarseSampleOrderCustomNV sampleLocationCount (=%" PRIu32
+            ") must "
+            "be less than or equal to VkPhysicalDeviceShadingRateImagePropertiesNV shadingRateMaxCoarseSamples (=%" PRIu32 ").",
+            order->sampleLocationCount, phys_dev_ext_props.shading_rate_image_props.shadingRateMaxCoarseSamples);
+    }
+
+    // Accumulate a bitmask tracking which (x,y,sample) tuples are seen. Expect
+    // the first width*height*sampleCount bits to all be set. Note: There is no
+    // guarantee that 64 bits is enough, but practically it's unlikely for an
+    // implementation to support more than 32 bits for samplemask.
+    assert(phys_dev_ext_props.shading_rate_image_props.shadingRateMaxCoarseSamples <= 64);
+    uint64_t sampleLocationsMask = 0;
+    for (uint32_t i = 0; i < order->sampleLocationCount; ++i) {
+        const VkCoarseSampleLocationNV *sampleLoc = &order->pSampleLocations[i];
+        if (sampleLoc->pixelX >= sampleOrderInfo->width) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                            "VUID-VkCoarseSampleLocationNV-pixelX-02078",
+                            "pixelX must be less than the width (in pixels) of the fragment.");
+        }
+        if (sampleLoc->pixelY >= sampleOrderInfo->height) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                            "VUID-VkCoarseSampleLocationNV-pixelY-02079",
+                            "pixelY must be less than the height (in pixels) of the fragment.");
+        }
+        if (sampleLoc->sample >= order->sampleCount) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                            "VUID-VkCoarseSampleLocationNV-sample-02080",
+                            "sample must be less than the number of coverage samples in each pixel belonging to the fragment.");
+        }
+        uint32_t idx = sampleLoc->sample + order->sampleCount * (sampleLoc->pixelX + sampleOrderInfo->width * sampleLoc->pixelY);
+        sampleLocationsMask |= 1ULL << idx;
+    }
+
+    uint64_t expectedMask = (order->sampleLocationCount == 64) ? ~0ULL : ((1ULL << order->sampleLocationCount) - 1);
+    if (sampleLocationsMask != expectedMask) {
+        skip |= log_msg(
+            report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+            "VUID-VkCoarseSampleOrderCustomNV-pSampleLocations-02077",
+            "The array pSampleLocations must contain exactly one entry for "
+            "every combination of valid values for pixelX, pixelY, and sample in the structure VkCoarseSampleOrderCustomNV.");
+    }
+
+    return skip;
+}
+
+bool StatelessValidation::manual_PreCallValidateCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache,
+                                                                        uint32_t createInfoCount,
+                                                                        const VkGraphicsPipelineCreateInfo *pCreateInfos,
+                                                                        const VkAllocationCallbacks *pAllocator,
+                                                                        VkPipeline *pPipelines) const {
+    bool skip = false;
+
+    if (pCreateInfos != nullptr) {
+        for (uint32_t i = 0; i < createInfoCount; ++i) {
+            bool has_dynamic_viewport = false;
+            bool has_dynamic_scissor = false;
+            bool has_dynamic_line_width = false;
+            bool has_dynamic_viewport_w_scaling_nv = false;
+            bool has_dynamic_discard_rectangle_ext = false;
+            bool has_dynamic_sample_locations_ext = false;
+            bool has_dynamic_exclusive_scissor_nv = false;
+            bool has_dynamic_shading_rate_palette_nv = false;
+            bool has_dynamic_line_stipple = false;
+            if (pCreateInfos[i].pDynamicState != nullptr) {
+                const auto &dynamic_state_info = *pCreateInfos[i].pDynamicState;
+                for (uint32_t state_index = 0; state_index < dynamic_state_info.dynamicStateCount; ++state_index) {
+                    const auto &dynamic_state = dynamic_state_info.pDynamicStates[state_index];
+                    if (dynamic_state == VK_DYNAMIC_STATE_VIEWPORT) has_dynamic_viewport = true;
+                    if (dynamic_state == VK_DYNAMIC_STATE_SCISSOR) has_dynamic_scissor = true;
+                    if (dynamic_state == VK_DYNAMIC_STATE_LINE_WIDTH) has_dynamic_line_width = true;
+                    if (dynamic_state == VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV) has_dynamic_viewport_w_scaling_nv = true;
+                    if (dynamic_state == VK_DYNAMIC_STATE_DISCARD_RECTANGLE_EXT) has_dynamic_discard_rectangle_ext = true;
+                    if (dynamic_state == VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_EXT) has_dynamic_sample_locations_ext = true;
+                    if (dynamic_state == VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_NV) has_dynamic_exclusive_scissor_nv = true;
+                    if (dynamic_state == VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV)
+                        has_dynamic_shading_rate_palette_nv = true;
+                    if (dynamic_state == VK_DYNAMIC_STATE_LINE_STIPPLE_EXT) has_dynamic_line_stipple = true;
+                }
+            }
+
+            auto feedback_struct = lvl_find_in_chain<VkPipelineCreationFeedbackCreateInfoEXT>(pCreateInfos[i].pNext);
+            if ((feedback_struct != nullptr) &&
+                (feedback_struct->pipelineStageCreationFeedbackCount != pCreateInfos[i].stageCount)) {
+                skip |=
+                    log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT, VK_NULL_HANDLE,
+                            "VUID-VkPipelineCreationFeedbackCreateInfoEXT-pipelineStageCreationFeedbackCount-02668",
+                            "vkCreateGraphicsPipelines(): in pCreateInfo[%" PRIu32
+                            "], VkPipelineCreationFeedbackEXT::pipelineStageCreationFeedbackCount"
+                            "(=%" PRIu32 ") must equal VkGraphicsPipelineCreateInfo::stageCount(=%" PRIu32 ").",
+                            i, feedback_struct->pipelineStageCreationFeedbackCount, pCreateInfos[i].stageCount);
+            }
+
+            // Validation for parameters excluded from the generated validation code due to a 'noautovalidity' tag in vk.xml
+
+            // Collect active stages
+            uint32_t active_shaders = 0;
+            for (uint32_t stages = 0; stages < pCreateInfos[i].stageCount; stages++) {
+                active_shaders |= pCreateInfos[i].pStages->stage;
+            }
+
+            if ((active_shaders & VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT) &&
+                (active_shaders & VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT) && (pCreateInfos[i].pTessellationState != nullptr)) {
+                skip |= validate_struct_type("vkCreateGraphicsPipelines", "pCreateInfos[i].pTessellationState",
+                                             "VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO",
+                                             pCreateInfos[i].pTessellationState,
+                                             VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO, false, kVUIDUndefined,
+                                             "VUID-VkPipelineTessellationStateCreateInfo-sType-sType");
+
+                const VkStructureType allowed_structs_VkPipelineTessellationStateCreateInfo[] = {
+                    VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO};
+
+                skip |= validate_struct_pnext("vkCreateGraphicsPipelines", "pCreateInfos[i].pTessellationState->pNext",
+                                              "VkPipelineTessellationDomainOriginStateCreateInfo",
+                                              pCreateInfos[i].pTessellationState->pNext,
+                                              ARRAY_SIZE(allowed_structs_VkPipelineTessellationStateCreateInfo),
+                                              allowed_structs_VkPipelineTessellationStateCreateInfo, GeneratedVulkanHeaderVersion,
+                                              "VUID-VkPipelineTessellationStateCreateInfo-pNext-pNext");
+
+                skip |= validate_reserved_flags("vkCreateGraphicsPipelines", "pCreateInfos[i].pTessellationState->flags",
+                                                pCreateInfos[i].pTessellationState->flags,
+                                                "VUID-VkPipelineTessellationStateCreateInfo-flags-zerobitmask");
+            }
+
+            if (!(active_shaders & VK_SHADER_STAGE_MESH_BIT_NV) && (pCreateInfos[i].pInputAssemblyState != nullptr)) {
+                skip |= validate_struct_type("vkCreateGraphicsPipelines", "pCreateInfos[i].pInputAssemblyState",
+                                             "VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO",
+                                             pCreateInfos[i].pInputAssemblyState,
+                                             VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO, false, kVUIDUndefined,
+                                             "VUID-VkPipelineInputAssemblyStateCreateInfo-sType-sType");
+
+                skip |= validate_struct_pnext("vkCreateGraphicsPipelines", "pCreateInfos[i].pInputAssemblyState->pNext", NULL,
+                                              pCreateInfos[i].pInputAssemblyState->pNext, 0, NULL, GeneratedVulkanHeaderVersion,
+                                              "VUID-VkPipelineInputAssemblyStateCreateInfo-pNext-pNext");
+
+                skip |= validate_reserved_flags("vkCreateGraphicsPipelines", "pCreateInfos[i].pInputAssemblyState->flags",
+                                                pCreateInfos[i].pInputAssemblyState->flags,
+                                                "VUID-VkPipelineInputAssemblyStateCreateInfo-flags-zerobitmask");
+
+                skip |= validate_ranged_enum("vkCreateGraphicsPipelines", "pCreateInfos[i].pInputAssemblyState->topology",
+                                             "VkPrimitiveTopology", AllVkPrimitiveTopologyEnums,
+                                             pCreateInfos[i].pInputAssemblyState->topology,
+                                             "VUID-VkPipelineInputAssemblyStateCreateInfo-topology-parameter");
+
+                skip |= validate_bool32("vkCreateGraphicsPipelines", "pCreateInfos[i].pInputAssemblyState->primitiveRestartEnable",
+                                        pCreateInfos[i].pInputAssemblyState->primitiveRestartEnable);
+            }
+
+            if (!(active_shaders & VK_SHADER_STAGE_MESH_BIT_NV) && (pCreateInfos[i].pVertexInputState != nullptr)) {
+                auto const &vertex_input_state = pCreateInfos[i].pVertexInputState;
+
+                if (pCreateInfos[i].pVertexInputState->flags != 0) {
+                    skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                    "VUID-VkPipelineVertexInputStateCreateInfo-flags-zerobitmask",
+                                    "vkCreateGraphicsPipelines: pararameter "
+                                    "pCreateInfos[%d].pVertexInputState->flags (%u) is reserved and must be zero.",
+                                    i, vertex_input_state->flags);
+                }
+
+                const VkStructureType allowed_structs_VkPipelineVertexInputStateCreateInfo[] = {
+                    VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_EXT};
+                skip |= validate_struct_pnext("vkCreateGraphicsPipelines", "pCreateInfos[i].pVertexInputState->pNext",
+                                              "VkPipelineVertexInputDivisorStateCreateInfoEXT",
+                                              pCreateInfos[i].pVertexInputState->pNext, 1,
+                                              allowed_structs_VkPipelineVertexInputStateCreateInfo, GeneratedVulkanHeaderVersion,
+                                              "VUID-VkPipelineVertexInputStateCreateInfo-pNext-pNext");
+                skip |= validate_struct_type("vkCreateGraphicsPipelines", "pCreateInfos[i].pVertexInputState",
+                                             "VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO", vertex_input_state,
+                                             VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO, false, kVUIDUndefined,
+                                             "VUID-VkPipelineVertexInputStateCreateInfo-sType-sType");
+                skip |=
+                    validate_array("vkCreateGraphicsPipelines", "pCreateInfos[i].pVertexInputState->vertexBindingDescriptionCount",
+                                   "pCreateInfos[i].pVertexInputState->pVertexBindingDescriptions",
+                                   pCreateInfos[i].pVertexInputState->vertexBindingDescriptionCount,
+                                   &pCreateInfos[i].pVertexInputState->pVertexBindingDescriptions, false, true, kVUIDUndefined,
+                                   "VUID-VkPipelineVertexInputStateCreateInfo-pVertexBindingDescriptions-parameter");
+
+                skip |= validate_array(
+                    "vkCreateGraphicsPipelines", "pCreateInfos[i].pVertexInputState->vertexAttributeDescriptionCount",
+                    "pCreateInfos[i]->pVertexAttributeDescriptions", vertex_input_state->vertexAttributeDescriptionCount,
+                    &vertex_input_state->pVertexAttributeDescriptions, false, true, kVUIDUndefined,
+                    "VUID-VkPipelineVertexInputStateCreateInfo-pVertexAttributeDescriptions-parameter");
+
+                if (pCreateInfos[i].pVertexInputState->pVertexBindingDescriptions != NULL) {
+                    for (uint32_t vertexBindingDescriptionIndex = 0;
+                         vertexBindingDescriptionIndex < pCreateInfos[i].pVertexInputState->vertexBindingDescriptionCount;
+                         ++vertexBindingDescriptionIndex) {
+                        skip |= validate_ranged_enum(
+                            "vkCreateGraphicsPipelines",
+                            "pCreateInfos[i].pVertexInputState->pVertexBindingDescriptions[j].inputRate", "VkVertexInputRate",
+                            AllVkVertexInputRateEnums,
+                            pCreateInfos[i].pVertexInputState->pVertexBindingDescriptions[vertexBindingDescriptionIndex].inputRate,
+                            "VUID-VkVertexInputBindingDescription-inputRate-parameter");
+                    }
+                }
+
+                if (pCreateInfos[i].pVertexInputState->pVertexAttributeDescriptions != NULL) {
+                    for (uint32_t vertexAttributeDescriptionIndex = 0;
+                         vertexAttributeDescriptionIndex < pCreateInfos[i].pVertexInputState->vertexAttributeDescriptionCount;
+                         ++vertexAttributeDescriptionIndex) {
+                        skip |= validate_ranged_enum(
+                            "vkCreateGraphicsPipelines",
+                            "pCreateInfos[i].pVertexInputState->pVertexAttributeDescriptions[i].format", "VkFormat",
+                            AllVkFormatEnums,
+                            pCreateInfos[i].pVertexInputState->pVertexAttributeDescriptions[vertexAttributeDescriptionIndex].format,
+                            "VUID-VkVertexInputAttributeDescription-format-parameter");
+                    }
+                }
+
+                if (vertex_input_state->vertexBindingDescriptionCount > device_limits.maxVertexInputBindings) {
+                    skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                    "VUID-VkPipelineVertexInputStateCreateInfo-vertexBindingDescriptionCount-00613",
+                                    "vkCreateGraphicsPipelines: pararameter "
+                                    "pCreateInfo[%d].pVertexInputState->vertexBindingDescriptionCount (%u) is "
+                                    "greater than VkPhysicalDeviceLimits::maxVertexInputBindings (%u).",
+                                    i, vertex_input_state->vertexBindingDescriptionCount, device_limits.maxVertexInputBindings);
+                }
+
+                if (vertex_input_state->vertexAttributeDescriptionCount > device_limits.maxVertexInputAttributes) {
+                    skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                    "VUID-VkPipelineVertexInputStateCreateInfo-vertexAttributeDescriptionCount-00614",
+                                    "vkCreateGraphicsPipelines: pararameter "
+                                    "pCreateInfo[%d].pVertexInputState->vertexAttributeDescriptionCount (%u) is "
+                                    "greater than VkPhysicalDeviceLimits::maxVertexInputAttributes (%u).",
+                                    i, vertex_input_state->vertexAttributeDescriptionCount, device_limits.maxVertexInputAttributes);
+                }
+
+                std::unordered_set<uint32_t> vertex_bindings(vertex_input_state->vertexBindingDescriptionCount);
+                for (uint32_t d = 0; d < vertex_input_state->vertexBindingDescriptionCount; ++d) {
+                    auto const &vertex_bind_desc = vertex_input_state->pVertexBindingDescriptions[d];
+                    auto const &binding_it = vertex_bindings.find(vertex_bind_desc.binding);
+                    if (binding_it != vertex_bindings.cend()) {
+                        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                        "VUID-VkPipelineVertexInputStateCreateInfo-pVertexBindingDescriptions-00616",
+                                        "vkCreateGraphicsPipelines: parameter "
+                                        "pCreateInfo[%d].pVertexInputState->pVertexBindingDescription[%d].binding "
+                                        "(%" PRIu32 ") is not distinct.",
+                                        i, d, vertex_bind_desc.binding);
+                    }
+                    vertex_bindings.insert(vertex_bind_desc.binding);
+
+                    if (vertex_bind_desc.binding >= device_limits.maxVertexInputBindings) {
+                        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                        "VUID-VkVertexInputBindingDescription-binding-00618",
+                                        "vkCreateGraphicsPipelines: parameter "
+                                        "pCreateInfos[%u].pVertexInputState->pVertexBindingDescriptions[%u].binding (%u) is "
+                                        "greater than or equal to VkPhysicalDeviceLimits::maxVertexInputBindings (%u).",
+                                        i, d, vertex_bind_desc.binding, device_limits.maxVertexInputBindings);
+                    }
+
+                    if (vertex_bind_desc.stride > device_limits.maxVertexInputBindingStride) {
+                        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                        "VUID-VkVertexInputBindingDescription-stride-00619",
+                                        "vkCreateGraphicsPipelines: parameter "
+                                        "pCreateInfos[%u].pVertexInputState->pVertexBindingDescriptions[%u].stride (%u) is greater "
+                                        "than VkPhysicalDeviceLimits::maxVertexInputBindingStride (%u).",
+                                        i, d, vertex_bind_desc.stride, device_limits.maxVertexInputBindingStride);
+                    }
+                }
+
+                std::unordered_set<uint32_t> attribute_locations(vertex_input_state->vertexAttributeDescriptionCount);
+                for (uint32_t d = 0; d < vertex_input_state->vertexAttributeDescriptionCount; ++d) {
+                    auto const &vertex_attrib_desc = vertex_input_state->pVertexAttributeDescriptions[d];
+                    auto const &location_it = attribute_locations.find(vertex_attrib_desc.location);
+                    if (location_it != attribute_locations.cend()) {
+                        skip |= log_msg(
+                            report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                            "VUID-VkPipelineVertexInputStateCreateInfo-pVertexAttributeDescriptions-00617",
+                            "vkCreateGraphicsPipelines: parameter "
+                            "pCreateInfo[%d].pVertexInputState->vertexAttributeDescriptions[%d].location (%u) is not distinct.",
+                            i, d, vertex_attrib_desc.location);
+                    }
+                    attribute_locations.insert(vertex_attrib_desc.location);
+
+                    auto const &binding_it = vertex_bindings.find(vertex_attrib_desc.binding);
+                    if (binding_it == vertex_bindings.cend()) {
+                        skip |= log_msg(
+                            report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                            "VUID-VkPipelineVertexInputStateCreateInfo-binding-00615",
+                            "vkCreateGraphicsPipelines: parameter "
+                            " pCreateInfo[%d].pVertexInputState->vertexAttributeDescriptions[%d].binding (%u) does not exist "
+                            "in any pCreateInfo[%d].pVertexInputState->pVertexBindingDescription.",
+                            i, d, vertex_attrib_desc.binding, i);
+                    }
+
+                    if (vertex_attrib_desc.location >= device_limits.maxVertexInputAttributes) {
+                        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                        "VUID-VkVertexInputAttributeDescription-location-00620",
+                                        "vkCreateGraphicsPipelines: parameter "
+                                        "pCreateInfos[%u].pVertexInputState->pVertexAttributeDescriptions[%u].location (%u) is "
+                                        "greater than or equal to VkPhysicalDeviceLimits::maxVertexInputAttributes (%u).",
+                                        i, d, vertex_attrib_desc.location, device_limits.maxVertexInputAttributes);
+                    }
+
+                    if (vertex_attrib_desc.binding >= device_limits.maxVertexInputBindings) {
+                        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                        "VUID-VkVertexInputAttributeDescription-binding-00621",
+                                        "vkCreateGraphicsPipelines: parameter "
+                                        "pCreateInfos[%u].pVertexInputState->pVertexAttributeDescriptions[%u].binding (%u) is "
+                                        "greater than or equal to VkPhysicalDeviceLimits::maxVertexInputBindings (%u).",
+                                        i, d, vertex_attrib_desc.binding, device_limits.maxVertexInputBindings);
+                    }
+
+                    if (vertex_attrib_desc.offset > device_limits.maxVertexInputAttributeOffset) {
+                        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                        "VUID-VkVertexInputAttributeDescription-offset-00622",
+                                        "vkCreateGraphicsPipelines: parameter "
+                                        "pCreateInfos[%u].pVertexInputState->pVertexAttributeDescriptions[%u].offset (%u) is "
+                                        "greater than VkPhysicalDeviceLimits::maxVertexInputAttributeOffset (%u).",
+                                        i, d, vertex_attrib_desc.offset, device_limits.maxVertexInputAttributeOffset);
+                    }
+                }
+            }
+
+            if (pCreateInfos[i].pStages != nullptr) {
+                bool has_control = false;
+                bool has_eval = false;
+
+                for (uint32_t stage_index = 0; stage_index < pCreateInfos[i].stageCount; ++stage_index) {
+                    if (pCreateInfos[i].pStages[stage_index].stage == VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT) {
+                        has_control = true;
+                    } else if (pCreateInfos[i].pStages[stage_index].stage == VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT) {
+                        has_eval = true;
+                    }
+                }
+
+                // pTessellationState is ignored without both tessellation control and tessellation evaluation shaders stages
+                if (has_control && has_eval) {
+                    if (pCreateInfos[i].pTessellationState == nullptr) {
+                        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                        "VUID-VkGraphicsPipelineCreateInfo-pStages-00731",
+                                        "vkCreateGraphicsPipelines: if pCreateInfos[%d].pStages includes a tessellation control "
+                                        "shader stage and a tessellation evaluation shader stage, "
+                                        "pCreateInfos[%d].pTessellationState must not be NULL.",
+                                        i, i);
+                    } else {
+                        const VkStructureType allowed_type =
+                            VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO;
+                        skip |= validate_struct_pnext(
+                            "vkCreateGraphicsPipelines",
+                            ParameterName("pCreateInfos[%i].pTessellationState->pNext", ParameterName::IndexVector{i}),
+                            "VkPipelineTessellationDomainOriginStateCreateInfo", pCreateInfos[i].pTessellationState->pNext, 1,
+                            &allowed_type, GeneratedVulkanHeaderVersion, "VUID-VkGraphicsPipelineCreateInfo-pNext-pNext");
+
+                        skip |= validate_reserved_flags(
+                            "vkCreateGraphicsPipelines",
+                            ParameterName("pCreateInfos[%i].pTessellationState->flags", ParameterName::IndexVector{i}),
+                            pCreateInfos[i].pTessellationState->flags,
+                            "VUID-VkPipelineTessellationStateCreateInfo-flags-zerobitmask");
+
+                        if (pCreateInfos[i].pTessellationState->sType !=
+                            VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO) {
+                            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                            "VUID-VkPipelineTessellationStateCreateInfo-sType-sType",
+                                            "vkCreateGraphicsPipelines: parameter pCreateInfos[%d].pTessellationState->sType must "
+                                            "be VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO.",
+                                            i);
+                        }
+
+                        if (pCreateInfos[i].pTessellationState->patchControlPoints == 0 ||
+                            pCreateInfos[i].pTessellationState->patchControlPoints > device_limits.maxTessellationPatchSize) {
+                            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                            "VUID-VkPipelineTessellationStateCreateInfo-patchControlPoints-01214",
+                                            "vkCreateGraphicsPipelines: invalid parameter "
+                                            "pCreateInfos[%d].pTessellationState->patchControlPoints value %u. patchControlPoints "
+                                            "should be >0 and <=%u.",
+                                            i, pCreateInfos[i].pTessellationState->patchControlPoints,
+                                            device_limits.maxTessellationPatchSize);
+                        }
+                    }
+                }
+            }
+
+            // pViewportState, pMultisampleState, pDepthStencilState, and pColorBlendState ignored when rasterization is disabled
+            if ((pCreateInfos[i].pRasterizationState != nullptr) &&
+                (pCreateInfos[i].pRasterizationState->rasterizerDiscardEnable == VK_FALSE)) {
+                if (pCreateInfos[i].pViewportState == nullptr) {
+                    skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
+                                    VK_NULL_HANDLE, "VUID-VkGraphicsPipelineCreateInfo-rasterizerDiscardEnable-00750",
+                                    "vkCreateGraphicsPipelines: Rasterization is enabled (pCreateInfos[%" PRIu32
+                                    "].pRasterizationState->rasterizerDiscardEnable is VK_FALSE), but pCreateInfos[%" PRIu32
+                                    "].pViewportState (=NULL) is not a valid pointer.",
+                                    i, i);
+                } else {
+                    const auto &viewport_state = *pCreateInfos[i].pViewportState;
+
+                    if (viewport_state.sType != VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO) {
+                        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
+                                        VK_NULL_HANDLE, "VUID-VkPipelineViewportStateCreateInfo-sType-sType",
+                                        "vkCreateGraphicsPipelines: pCreateInfos[%" PRIu32
+                                        "].pViewportState->sType is not VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO.",
+                                        i);
+                    }
+
+                    const VkStructureType allowed_structs_VkPipelineViewportStateCreateInfo[] = {
+                        VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SWIZZLE_STATE_CREATE_INFO_NV,
+                        VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_W_SCALING_STATE_CREATE_INFO_NV,
+                        VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_EXCLUSIVE_SCISSOR_STATE_CREATE_INFO_NV,
+                        VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SHADING_RATE_IMAGE_STATE_CREATE_INFO_NV,
+                        VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_COARSE_SAMPLE_ORDER_STATE_CREATE_INFO_NV,
+                    };
+                    skip |= validate_struct_pnext(
+                        "vkCreateGraphicsPipelines",
+                        ParameterName("pCreateInfos[%i].pViewportState->pNext", ParameterName::IndexVector{i}),
+                        "VkPipelineViewportSwizzleStateCreateInfoNV, VkPipelineViewportWScalingStateCreateInfoNV, "
+                        "VkPipelineViewportExclusiveScissorStateCreateInfoNV, VkPipelineViewportShadingRateImageStateCreateInfoNV, "
+                        "VkPipelineViewportCoarseSampleOrderStateCreateInfoNV",
+                        viewport_state.pNext, ARRAY_SIZE(allowed_structs_VkPipelineViewportStateCreateInfo),
+                        allowed_structs_VkPipelineViewportStateCreateInfo, 65,
+                        "VUID-VkPipelineViewportStateCreateInfo-pNext-pNext");
+
+                    skip |= validate_reserved_flags(
+                        "vkCreateGraphicsPipelines",
+                        ParameterName("pCreateInfos[%i].pViewportState->flags", ParameterName::IndexVector{i}),
+                        viewport_state.flags, "VUID-VkPipelineViewportStateCreateInfo-flags-zerobitmask");
+
+                    auto exclusive_scissor_struct = lvl_find_in_chain<VkPipelineViewportExclusiveScissorStateCreateInfoNV>(
+                        pCreateInfos[i].pViewportState->pNext);
+                    auto shading_rate_image_struct = lvl_find_in_chain<VkPipelineViewportShadingRateImageStateCreateInfoNV>(
+                        pCreateInfos[i].pViewportState->pNext);
+                    auto coarse_sample_order_struct = lvl_find_in_chain<VkPipelineViewportCoarseSampleOrderStateCreateInfoNV>(
+                        pCreateInfos[i].pViewportState->pNext);
+                    const auto vp_swizzle_struct =
+                        lvl_find_in_chain<VkPipelineViewportSwizzleStateCreateInfoNV>(pCreateInfos[i].pViewportState->pNext);
+                    const auto vp_w_scaling_struct =
+                        lvl_find_in_chain<VkPipelineViewportWScalingStateCreateInfoNV>(pCreateInfos[i].pViewportState->pNext);
+
+                    if (!physical_device_features.multiViewport) {
+                        if (viewport_state.viewportCount != 1) {
+                            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
+                                            VK_NULL_HANDLE, "VUID-VkPipelineViewportStateCreateInfo-viewportCount-01216",
+                                            "vkCreateGraphicsPipelines: The VkPhysicalDeviceFeatures::multiViewport feature is "
+                                            "disabled, but pCreateInfos[%" PRIu32 "].pViewportState->viewportCount (=%" PRIu32
+                                            ") is not 1.",
+                                            i, viewport_state.viewportCount);
+                        }
+
+                        if (viewport_state.scissorCount != 1) {
+                            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
+                                            VK_NULL_HANDLE, "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01217",
+                                            "vkCreateGraphicsPipelines: The VkPhysicalDeviceFeatures::multiViewport feature is "
+                                            "disabled, but pCreateInfos[%" PRIu32 "].pViewportState->scissorCount (=%" PRIu32
+                                            ") is not 1.",
+                                            i, viewport_state.scissorCount);
+                        }
+
+                        if (exclusive_scissor_struct && (exclusive_scissor_struct->exclusiveScissorCount != 0 &&
+                                                         exclusive_scissor_struct->exclusiveScissorCount != 1)) {
+                            skip |=
+                                log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
+                                        VK_NULL_HANDLE,
+                                        "VUID-VkPipelineViewportExclusiveScissorStateCreateInfoNV-exclusiveScissorCount-02027",
+                                        "vkCreateGraphicsPipelines: The VkPhysicalDeviceFeatures::multiViewport feature is "
+                                        "disabled, but pCreateInfos[%" PRIu32
+                                        "] VkPipelineViewportExclusiveScissorStateCreateInfoNV::exclusiveScissorCount (=%" PRIu32
+                                        ") is not 1.",
+                                        i, exclusive_scissor_struct->exclusiveScissorCount);
+                        }
+
+                        if (shading_rate_image_struct &&
+                            (shading_rate_image_struct->viewportCount != 0 && shading_rate_image_struct->viewportCount != 1)) {
+                            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
+                                            VK_NULL_HANDLE,
+                                            "VUID-VkPipelineViewportShadingRateImageStateCreateInfoNV-viewportCount-02054",
+                                            "vkCreateGraphicsPipelines: The VkPhysicalDeviceFeatures::multiViewport feature is "
+                                            "disabled, but pCreateInfos[%" PRIu32
+                                            "] VkPipelineViewportShadingRateImageStateCreateInfoNV::viewportCount (=%" PRIu32
+                                            ") is neither 0 nor 1.",
+                                            i, shading_rate_image_struct->viewportCount);
+                        }
+
+                    } else {  // multiViewport enabled
+                        if (viewport_state.viewportCount == 0) {
+                            skip |= log_msg(
+                                report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
+                                VK_NULL_HANDLE, "VUID-VkPipelineViewportStateCreateInfo-viewportCount-arraylength",
+                                "vkCreateGraphicsPipelines: pCreateInfos[%" PRIu32 "].pViewportState->viewportCount is 0.", i);
+                        } else if (viewport_state.viewportCount > device_limits.maxViewports) {
+                            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
+                                            VK_NULL_HANDLE, "VUID-VkPipelineViewportStateCreateInfo-viewportCount-01218",
+                                            "vkCreateGraphicsPipelines: pCreateInfos[%" PRIu32
+                                            "].pViewportState->viewportCount (=%" PRIu32
+                                            ") is greater than VkPhysicalDeviceLimits::maxViewports (=%" PRIu32 ").",
+                                            i, viewport_state.viewportCount, device_limits.maxViewports);
+                        }
+
+                        if (viewport_state.scissorCount == 0) {
+                            skip |= log_msg(
+                                report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
+                                VK_NULL_HANDLE, "VUID-VkPipelineViewportStateCreateInfo-scissorCount-arraylength",
+                                "vkCreateGraphicsPipelines: pCreateInfos[%" PRIu32 "].pViewportState->scissorCount is 0.", i);
+                        } else if (viewport_state.scissorCount > device_limits.maxViewports) {
+                            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
+                                            VK_NULL_HANDLE, "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01219",
+                                            "vkCreateGraphicsPipelines: pCreateInfos[%" PRIu32
+                                            "].pViewportState->scissorCount (=%" PRIu32
+                                            ") is greater than VkPhysicalDeviceLimits::maxViewports (=%" PRIu32 ").",
+                                            i, viewport_state.scissorCount, device_limits.maxViewports);
+                        }
+                    }
+
+                    if (exclusive_scissor_struct && exclusive_scissor_struct->exclusiveScissorCount > device_limits.maxViewports) {
+                        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
+                                        VK_NULL_HANDLE,
+                                        "VUID-VkPipelineViewportExclusiveScissorStateCreateInfoNV-exclusiveScissorCount-02028",
+                                        "vkCreateGraphicsPipelines: pCreateInfos[%" PRIu32 "] exclusiveScissorCount (=%" PRIu32
+                                        ") is greater than VkPhysicalDeviceLimits::maxViewports (=%" PRIu32 ").",
+                                        i, exclusive_scissor_struct->exclusiveScissorCount, device_limits.maxViewports);
+                    }
+
+                    if (shading_rate_image_struct && shading_rate_image_struct->viewportCount > device_limits.maxViewports) {
+                        skip |=
+                            log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
+                                    VK_NULL_HANDLE, "VUID-VkPipelineViewportShadingRateImageStateCreateInfoNV-viewportCount-02055",
+                                    "vkCreateGraphicsPipelines: pCreateInfos[%" PRIu32
+                                    "] VkPipelineViewportShadingRateImageStateCreateInfoNV viewportCount (=%" PRIu32
+                                    ") is greater than VkPhysicalDeviceLimits::maxViewports (=%" PRIu32 ").",
+                                    i, shading_rate_image_struct->viewportCount, device_limits.maxViewports);
+                    }
+
+                    if (viewport_state.scissorCount != viewport_state.viewportCount) {
+                        skip |=
+                            log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
+                                    VK_NULL_HANDLE, "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01220",
+                                    "vkCreateGraphicsPipelines: pCreateInfos[%" PRIu32 "].pViewportState->scissorCount (=%" PRIu32
+                                    ") is not identical to pCreateInfos[%" PRIu32 "].pViewportState->viewportCount (=%" PRIu32 ").",
+                                    i, viewport_state.scissorCount, i, viewport_state.viewportCount);
+                    }
+
+                    if (exclusive_scissor_struct && exclusive_scissor_struct->exclusiveScissorCount != 0 &&
+                        exclusive_scissor_struct->exclusiveScissorCount != viewport_state.viewportCount) {
+                        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
+                                        VK_NULL_HANDLE,
+                                        "VUID-VkPipelineViewportExclusiveScissorStateCreateInfoNV-exclusiveScissorCount-02029",
+                                        "vkCreateGraphicsPipelines: pCreateInfos[%" PRIu32 "] exclusiveScissorCount (=%" PRIu32
+                                        ") must be zero or identical to pCreateInfos[%" PRIu32
+                                        "].pViewportState->viewportCount (=%" PRIu32 ").",
+                                        i, exclusive_scissor_struct->exclusiveScissorCount, i, viewport_state.viewportCount);
+                    }
+
+                    if (shading_rate_image_struct && shading_rate_image_struct->shadingRateImageEnable &&
+                        shading_rate_image_struct->viewportCount != viewport_state.viewportCount) {
+                        skip |= log_msg(
+                            report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT, VK_NULL_HANDLE,
+                            "VUID-VkPipelineViewportShadingRateImageStateCreateInfoNV-shadingRateImageEnable-02056",
+                            "vkCreateGraphicsPipelines: If shadingRateImageEnable is enabled, pCreateInfos[%" PRIu32
+                            "] "
+                            "VkPipelineViewportShadingRateImageStateCreateInfoNV viewportCount (=%" PRIu32
+                            ") must identical to pCreateInfos[%" PRIu32 "].pViewportState->viewportCount (=%" PRIu32 ").",
+                            i, shading_rate_image_struct->viewportCount, i, viewport_state.viewportCount);
+                    }
+
+                    if (!has_dynamic_viewport && viewport_state.viewportCount > 0 && viewport_state.pViewports == nullptr) {
+                        skip |= log_msg(
+                            report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT, VK_NULL_HANDLE,
+                            "VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-00747",
+                            "vkCreateGraphicsPipelines: The viewport state is static (pCreateInfos[%" PRIu32
+                            "].pDynamicState->pDynamicStates does not contain VK_DYNAMIC_STATE_VIEWPORT), but pCreateInfos[%" PRIu32
+                            "].pViewportState->pViewports (=NULL) is an invalid pointer.",
+                            i, i);
+                    }
+
+                    if (!has_dynamic_scissor && viewport_state.scissorCount > 0 && viewport_state.pScissors == nullptr) {
+                        skip |= log_msg(
+                            report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT, VK_NULL_HANDLE,
+                            "VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-00748",
+                            "vkCreateGraphicsPipelines: The scissor state is static (pCreateInfos[%" PRIu32
+                            "].pDynamicState->pDynamicStates does not contain VK_DYNAMIC_STATE_SCISSOR), but pCreateInfos[%" PRIu32
+                            "].pViewportState->pScissors (=NULL) is an invalid pointer.",
+                            i, i);
+                    }
+
+                    if (!has_dynamic_exclusive_scissor_nv && exclusive_scissor_struct &&
+                        exclusive_scissor_struct->exclusiveScissorCount > 0 &&
+                        exclusive_scissor_struct->pExclusiveScissors == nullptr) {
+                        skip |=
+                            log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
+                                    VK_NULL_HANDLE, "VUID-VkPipelineViewportExclusiveScissorStateCreateInfoNV-pDynamicStates-02030",
+                                    "vkCreateGraphicsPipelines: The exclusive scissor state is static (pCreateInfos[%" PRIu32
+                                    "].pDynamicState->pDynamicStates does not contain VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_NV), but "
+                                    "pCreateInfos[%" PRIu32 "] pExclusiveScissors (=NULL) is an invalid pointer.",
+                                    i, i);
+                    }
+
+                    if (!has_dynamic_shading_rate_palette_nv && shading_rate_image_struct &&
+                        shading_rate_image_struct->viewportCount > 0 &&
+                        shading_rate_image_struct->pShadingRatePalettes == nullptr) {
+                        skip |= log_msg(
+                            report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT, VK_NULL_HANDLE,
+                            "VUID-VkPipelineViewportShadingRateImageStateCreateInfoNV-pDynamicStates-02057",
+                            "vkCreateGraphicsPipelines: The shading rate palette state is static (pCreateInfos[%" PRIu32
+                            "].pDynamicState->pDynamicStates does not contain VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV), "
+                            "but pCreateInfos[%" PRIu32 "] pShadingRatePalettes (=NULL) is an invalid pointer.",
+                            i, i);
+                    }
+
+                    if (vp_swizzle_struct) {
+                        if (vp_swizzle_struct->viewportCount != viewport_state.viewportCount) {
+                            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
+                                            VK_NULL_HANDLE, "VUID-VkPipelineViewportSwizzleStateCreateInfoNV-viewportCount-01215",
+                                            "vkCreateGraphicsPipelines: The viewport swizzle state vieport count of %" PRIu32
+                                            " does "
+                                            "not match the viewport count of %" PRIu32 " in VkPipelineViewportStateCreateInfo.",
+                                            vp_swizzle_struct->viewportCount, viewport_state.viewportCount);
+                        }
+                    }
+
+                    // validate the VkViewports
+                    if (!has_dynamic_viewport && viewport_state.pViewports) {
+                        for (uint32_t viewport_i = 0; viewport_i < viewport_state.viewportCount; ++viewport_i) {
+                            const auto &viewport = viewport_state.pViewports[viewport_i];  // will crash on invalid ptr
+                            const char *fn_name = "vkCreateGraphicsPipelines";
+                            skip |= manual_PreCallValidateViewport(viewport, fn_name,
+                                                                   ParameterName("pCreateInfos[%i].pViewportState->pViewports[%i]",
+                                                                                 ParameterName::IndexVector{i, viewport_i}),
+                                                                   VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT);
+                        }
+                    }
+
+                    if (has_dynamic_viewport_w_scaling_nv && !device_extensions.vk_nv_clip_space_w_scaling) {
+                        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
+                                        VK_NULL_HANDLE, kVUID_PVError_ExtensionNotEnabled,
+                                        "vkCreateGraphicsPipelines: pCreateInfos[%" PRIu32
+                                        "].pDynamicState->pDynamicStates contains VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV, but "
+                                        "VK_NV_clip_space_w_scaling extension is not enabled.",
+                                        i);
+                    }
+
+                    if (has_dynamic_discard_rectangle_ext && !device_extensions.vk_ext_discard_rectangles) {
+                        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
+                                        VK_NULL_HANDLE, kVUID_PVError_ExtensionNotEnabled,
+                                        "vkCreateGraphicsPipelines: pCreateInfos[%" PRIu32
+                                        "].pDynamicState->pDynamicStates contains VK_DYNAMIC_STATE_DISCARD_RECTANGLE_EXT, but "
+                                        "VK_EXT_discard_rectangles extension is not enabled.",
+                                        i);
+                    }
+
+                    if (has_dynamic_sample_locations_ext && !device_extensions.vk_ext_sample_locations) {
+                        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
+                                        VK_NULL_HANDLE, kVUID_PVError_ExtensionNotEnabled,
+                                        "vkCreateGraphicsPipelines: pCreateInfos[%" PRIu32
+                                        "].pDynamicState->pDynamicStates contains VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_EXT, but "
+                                        "VK_EXT_sample_locations extension is not enabled.",
+                                        i);
+                    }
+
+                    if (has_dynamic_exclusive_scissor_nv && !device_extensions.vk_nv_scissor_exclusive) {
+                        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
+                                        VK_NULL_HANDLE, kVUID_PVError_ExtensionNotEnabled,
+                                        "vkCreateGraphicsPipelines: pCreateInfos[%" PRIu32
+                                        "].pDynamicState->pDynamicStates contains VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_NV, but "
+                                        "VK_NV_scissor_exclusive extension is not enabled.",
+                                        i);
+                    }
+
+                    if (coarse_sample_order_struct &&
+                        coarse_sample_order_struct->sampleOrderType != VK_COARSE_SAMPLE_ORDER_TYPE_CUSTOM_NV &&
+                        coarse_sample_order_struct->customSampleOrderCount != 0) {
+                        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
+                                        VK_NULL_HANDLE,
+                                        "VUID-VkPipelineViewportCoarseSampleOrderStateCreateInfoNV-sampleOrderType-02072",
+                                        "vkCreateGraphicsPipelines: pCreateInfos[%" PRIu32
+                                        "] "
+                                        "VkPipelineViewportCoarseSampleOrderStateCreateInfoNV sampleOrderType is not "
+                                        "VK_COARSE_SAMPLE_ORDER_TYPE_CUSTOM_NV and customSampleOrderCount is not 0.",
+                                        i);
+                    }
+
+                    if (coarse_sample_order_struct) {
+                        for (uint32_t order_i = 0; order_i < coarse_sample_order_struct->customSampleOrderCount; ++order_i) {
+                            skip |= ValidateCoarseSampleOrderCustomNV(&coarse_sample_order_struct->pCustomSampleOrders[order_i]);
+                        }
+                    }
+
+                    if (vp_w_scaling_struct && (vp_w_scaling_struct->viewportWScalingEnable == VK_TRUE)) {
+                        if (vp_w_scaling_struct->viewportCount != viewport_state.viewportCount) {
+                            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
+                                            VK_NULL_HANDLE, "VUID-VkPipelineViewportStateCreateInfo-viewportWScalingEnable-01726",
+                                            "vkCreateGraphicsPipelines: pCreateInfos[%" PRIu32
+                                            "] "
+                                            "VkPipelineViewportWScalingStateCreateInfoNV.viewportCount (=%" PRIu32
+                                            ") "
+                                            "is not equal to VkPipelineViewportStateCreateInfo.viewportCount (=%" PRIu32 ").",
+                                            i, vp_w_scaling_struct->viewportCount, viewport_state.viewportCount);
+                        }
+                        if (!has_dynamic_viewport_w_scaling_nv && !vp_w_scaling_struct->pViewportWScalings) {
+                            skip |= log_msg(
+                                report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
+                                VK_NULL_HANDLE, "VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-01715",
+                                "vkCreateGraphicsPipelines: pCreateInfos[%" PRIu32
+                                "] "
+                                "VkPipelineViewportWScalingStateCreateInfoNV.pViewportWScalings (=NULL) is not a valid array.",
+                                i);
+                        }
+                    }
+                }
+
+                if (pCreateInfos[i].pMultisampleState == nullptr) {
+                    skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                    "VUID-VkGraphicsPipelineCreateInfo-rasterizerDiscardEnable-00751",
+                                    "vkCreateGraphicsPipelines: if pCreateInfos[%d].pRasterizationState->rasterizerDiscardEnable "
+                                    "is VK_FALSE, pCreateInfos[%d].pMultisampleState must not be NULL.",
+                                    i, i);
+                } else {
+                    const VkStructureType valid_next_stypes[] = {LvlTypeMap<VkPipelineCoverageModulationStateCreateInfoNV>::kSType,
+                                                                 LvlTypeMap<VkPipelineCoverageToColorStateCreateInfoNV>::kSType,
+                                                                 LvlTypeMap<VkPipelineSampleLocationsStateCreateInfoEXT>::kSType};
+                    const char *valid_struct_names =
+                        "VkPipelineCoverageModulationStateCreateInfoNV, VkPipelineCoverageToColorStateCreateInfoNV, "
+                        "VkPipelineSampleLocationsStateCreateInfoEXT";
+                    skip |= validate_struct_pnext(
+                        "vkCreateGraphicsPipelines",
+                        ParameterName("pCreateInfos[%i].pMultisampleState->pNext", ParameterName::IndexVector{i}),
+                        valid_struct_names, pCreateInfos[i].pMultisampleState->pNext, 3, valid_next_stypes,
+                        GeneratedVulkanHeaderVersion, "VUID-VkPipelineMultisampleStateCreateInfo-pNext-pNext");
+
+                    skip |= validate_reserved_flags(
+                        "vkCreateGraphicsPipelines",
+                        ParameterName("pCreateInfos[%i].pMultisampleState->flags", ParameterName::IndexVector{i}),
+                        pCreateInfos[i].pMultisampleState->flags, "VUID-VkPipelineMultisampleStateCreateInfo-flags-zerobitmask");
+
+                    skip |= validate_bool32(
+                        "vkCreateGraphicsPipelines",
+                        ParameterName("pCreateInfos[%i].pMultisampleState->sampleShadingEnable", ParameterName::IndexVector{i}),
+                        pCreateInfos[i].pMultisampleState->sampleShadingEnable);
+
+                    skip |= validate_array(
+                        "vkCreateGraphicsPipelines",
+                        ParameterName("pCreateInfos[%i].pMultisampleState->rasterizationSamples", ParameterName::IndexVector{i}),
+                        ParameterName("pCreateInfos[%i].pMultisampleState->pSampleMask", ParameterName::IndexVector{i}),
+                        pCreateInfos[i].pMultisampleState->rasterizationSamples, &pCreateInfos[i].pMultisampleState->pSampleMask,
+                        true, false, kVUIDUndefined, kVUIDUndefined);
+
+                    skip |= validate_flags(
+                        "vkCreateGraphicsPipelines",
+                        ParameterName("pCreateInfos[%i].pMultisampleState->rasterizationSamples", ParameterName::IndexVector{i}),
+                        "VkSampleCountFlagBits", AllVkSampleCountFlagBits, pCreateInfos[i].pMultisampleState->rasterizationSamples,
+                        kRequiredSingleBit, "VUID-VkPipelineMultisampleStateCreateInfo-rasterizationSamples-parameter");
+
+                    skip |= validate_bool32(
+                        "vkCreateGraphicsPipelines",
+                        ParameterName("pCreateInfos[%i].pMultisampleState->alphaToCoverageEnable", ParameterName::IndexVector{i}),
+                        pCreateInfos[i].pMultisampleState->alphaToCoverageEnable);
+
+                    skip |= validate_bool32(
+                        "vkCreateGraphicsPipelines",
+                        ParameterName("pCreateInfos[%i].pMultisampleState->alphaToOneEnable", ParameterName::IndexVector{i}),
+                        pCreateInfos[i].pMultisampleState->alphaToOneEnable);
+
+                    if (pCreateInfos[i].pMultisampleState->sType != VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO) {
+                        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                        kVUID_PVError_InvalidStructSType,
+                                        "vkCreateGraphicsPipelines: parameter pCreateInfos[%d].pMultisampleState->sType must be "
+                                        "VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO",
+                                        i);
+                    }
+                    if (pCreateInfos[i].pMultisampleState->sampleShadingEnable == VK_TRUE) {
+                        if (!physical_device_features.sampleRateShading) {
+                            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                            "VUID-VkPipelineMultisampleStateCreateInfo-sampleShadingEnable-00784",
+                                            "vkCreateGraphicsPipelines(): parameter "
+                                            "pCreateInfos[%d].pMultisampleState->sampleShadingEnable.",
+                                            i);
+                        }
+                        // TODO Add documentation issue about when minSampleShading must be in range and when it is ignored
+                        // For now a "least noise" test *only* when sampleShadingEnable is VK_TRUE.
+                        if (!in_inclusive_range(pCreateInfos[i].pMultisampleState->minSampleShading, 0.F, 1.0F)) {
+                            skip |= log_msg(
+                                report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                "VUID-VkPipelineMultisampleStateCreateInfo-minSampleShading-00786",
+                                "vkCreateGraphicsPipelines(): parameter pCreateInfos[%d].pMultisampleState->minSampleShading.", i);
+                        }
+                    }
+
+                    const auto *line_state = lvl_find_in_chain<VkPipelineRasterizationLineStateCreateInfoEXT>(
+                        pCreateInfos[i].pRasterizationState->pNext);
+
+                    if (line_state) {
+                        if ((line_state->lineRasterizationMode == VK_LINE_RASTERIZATION_MODE_BRESENHAM_EXT ||
+                             line_state->lineRasterizationMode == VK_LINE_RASTERIZATION_MODE_RECTANGULAR_SMOOTH_EXT)) {
+                            if (pCreateInfos[i].pMultisampleState->alphaToCoverageEnable) {
+                                skip |=
+                                    log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                            "VUID-VkGraphicsPipelineCreateInfo-lineRasterizationMode-02766",
+                                            "vkCreateGraphicsPipelines(): Bresenham/Smooth line rasterization not supported with "
+                                            "pCreateInfos[%d].pMultisampleState->alphaToCoverageEnable == VK_TRUE.",
+                                            i);
+                            }
+                            if (pCreateInfos[i].pMultisampleState->alphaToOneEnable) {
+                                skip |=
+                                    log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                            "VUID-VkGraphicsPipelineCreateInfo-lineRasterizationMode-02766",
+                                            "vkCreateGraphicsPipelines(): Bresenham/Smooth line rasterization not supported with "
+                                            "pCreateInfos[%d].pMultisampleState->alphaToOneEnable == VK_TRUE.",
+                                            i);
+                            }
+                            if (pCreateInfos[i].pMultisampleState->sampleShadingEnable) {
+                                skip |=
+                                    log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                            "VUID-VkGraphicsPipelineCreateInfo-lineRasterizationMode-02766",
+                                            "vkCreateGraphicsPipelines(): Bresenham/Smooth line rasterization not supported with "
+                                            "pCreateInfos[%d].pMultisampleState->sampleShadingEnable == VK_TRUE.",
+                                            i);
+                            }
+                        }
+                        if (line_state->stippledLineEnable && !has_dynamic_line_stipple) {
+                            if (line_state->lineStippleFactor < 1 || line_state->lineStippleFactor > 256) {
+                                skip |=
+                                    log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                            "VUID-VkGraphicsPipelineCreateInfo-stippledLineEnable-02767",
+                                            "vkCreateGraphicsPipelines(): pCreateInfos[%d] lineStippleFactor = %d must be in the "
+                                            "range [1,256].",
+                                            i, line_state->lineStippleFactor);
+                            }
+                        }
+                        const auto *line_features =
+                            lvl_find_in_chain<VkPhysicalDeviceLineRasterizationFeaturesEXT>(physical_device_features2.pNext);
+                        if (line_state->lineRasterizationMode == VK_LINE_RASTERIZATION_MODE_RECTANGULAR_EXT &&
+                            (!line_features || !line_features->rectangularLines)) {
+                            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                            "VUID-VkPipelineRasterizationLineStateCreateInfoEXT-lineRasterizationMode-02768",
+                                            "vkCreateGraphicsPipelines(): pCreateInfos[%d] lineRasterizationMode = "
+                                            "VK_LINE_RASTERIZATION_MODE_RECTANGULAR_EXT requires the rectangularLines feature.",
+                                            i);
+                        }
+                        if (line_state->lineRasterizationMode == VK_LINE_RASTERIZATION_MODE_BRESENHAM_EXT &&
+                            (!line_features || !line_features->bresenhamLines)) {
+                            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                            "VUID-VkPipelineRasterizationLineStateCreateInfoEXT-lineRasterizationMode-02769",
+                                            "vkCreateGraphicsPipelines(): pCreateInfos[%d] lineRasterizationMode = "
+                                            "VK_LINE_RASTERIZATION_MODE_BRESENHAM_EXT requires the bresenhamLines feature.",
+                                            i);
+                        }
+                        if (line_state->lineRasterizationMode == VK_LINE_RASTERIZATION_MODE_RECTANGULAR_SMOOTH_EXT &&
+                            (!line_features || !line_features->smoothLines)) {
+                            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                            "VUID-VkPipelineRasterizationLineStateCreateInfoEXT-lineRasterizationMode-02770",
+                                            "vkCreateGraphicsPipelines(): pCreateInfos[%d] lineRasterizationMode = "
+                                            "VK_LINE_RASTERIZATION_MODE_RECTANGULAR_SMOOTH_EXT requires the smoothLines feature.",
+                                            i);
+                        }
+                        if (line_state->stippledLineEnable) {
+                            if (line_state->lineRasterizationMode == VK_LINE_RASTERIZATION_MODE_RECTANGULAR_EXT &&
+                                (!line_features || !line_features->stippledRectangularLines)) {
+                                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT,
+                                                0, "VUID-VkPipelineRasterizationLineStateCreateInfoEXT-stippledLineEnable-02771",
+                                                "vkCreateGraphicsPipelines(): pCreateInfos[%d] lineRasterizationMode = "
+                                                "VK_LINE_RASTERIZATION_MODE_RECTANGULAR_EXT with stipple requires the "
+                                                "stippledRectangularLines feature.",
+                                                i);
+                            }
+                            if (line_state->lineRasterizationMode == VK_LINE_RASTERIZATION_MODE_BRESENHAM_EXT &&
+                                (!line_features || !line_features->stippledBresenhamLines)) {
+                                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT,
+                                                0, "VUID-VkPipelineRasterizationLineStateCreateInfoEXT-stippledLineEnable-02772",
+                                                "vkCreateGraphicsPipelines(): pCreateInfos[%d] lineRasterizationMode = "
+                                                "VK_LINE_RASTERIZATION_MODE_BRESENHAM_EXT with stipple requires the "
+                                                "stippledBresenhamLines feature.",
+                                                i);
+                            }
+                            if (line_state->lineRasterizationMode == VK_LINE_RASTERIZATION_MODE_RECTANGULAR_SMOOTH_EXT &&
+                                (!line_features || !line_features->stippledSmoothLines)) {
+                                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT,
+                                                0, "VUID-VkPipelineRasterizationLineStateCreateInfoEXT-stippledLineEnable-02773",
+                                                "vkCreateGraphicsPipelines(): pCreateInfos[%d] lineRasterizationMode = "
+                                                "VK_LINE_RASTERIZATION_MODE_RECTANGULAR_SMOOTH_EXT with stipple requires the "
+                                                "stippledSmoothLines feature.",
+                                                i);
+                            }
+                            if (line_state->lineRasterizationMode == VK_LINE_RASTERIZATION_MODE_DEFAULT_EXT &&
+                                (!line_features || !line_features->stippledSmoothLines || !device_limits.strictLines)) {
+                                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT,
+                                                0, "VUID-VkPipelineRasterizationLineStateCreateInfoEXT-stippledLineEnable-02774",
+                                                "vkCreateGraphicsPipelines(): pCreateInfos[%d] lineRasterizationMode = "
+                                                "VK_LINE_RASTERIZATION_MODE_DEFAULT_EXT with stipple requires the "
+                                                "stippledRectangularLines and strictLines features.",
+                                                i);
+                            }
+                        }
+                    }
+                }
+
+                bool uses_color_attachment = false;
+                bool uses_depthstencil_attachment = false;
+                {
+                    std::unique_lock<std::mutex> lock(renderpass_map_mutex);
+                    const auto subpasses_uses_it = renderpasses_states.find(pCreateInfos[i].renderPass);
+                    if (subpasses_uses_it != renderpasses_states.end()) {
+                        const auto &subpasses_uses = subpasses_uses_it->second;
+                        if (subpasses_uses.subpasses_using_color_attachment.count(pCreateInfos[i].subpass))
+                            uses_color_attachment = true;
+                        if (subpasses_uses.subpasses_using_depthstencil_attachment.count(pCreateInfos[i].subpass))
+                            uses_depthstencil_attachment = true;
+                    }
+                    lock.unlock();
+                }
+
+                if (pCreateInfos[i].pDepthStencilState != nullptr && uses_depthstencil_attachment) {
+                    skip |= validate_struct_pnext(
+                        "vkCreateGraphicsPipelines",
+                        ParameterName("pCreateInfos[%i].pDepthStencilState->pNext", ParameterName::IndexVector{i}), NULL,
+                        pCreateInfos[i].pDepthStencilState->pNext, 0, NULL, GeneratedVulkanHeaderVersion,
+                        "VUID-VkPipelineDepthStencilStateCreateInfo-pNext-pNext");
+
+                    skip |= validate_reserved_flags(
+                        "vkCreateGraphicsPipelines",
+                        ParameterName("pCreateInfos[%i].pDepthStencilState->flags", ParameterName::IndexVector{i}),
+                        pCreateInfos[i].pDepthStencilState->flags, "VUID-VkPipelineDepthStencilStateCreateInfo-flags-zerobitmask");
+
+                    skip |= validate_bool32(
+                        "vkCreateGraphicsPipelines",
+                        ParameterName("pCreateInfos[%i].pDepthStencilState->depthTestEnable", ParameterName::IndexVector{i}),
+                        pCreateInfos[i].pDepthStencilState->depthTestEnable);
+
+                    skip |= validate_bool32(
+                        "vkCreateGraphicsPipelines",
+                        ParameterName("pCreateInfos[%i].pDepthStencilState->depthWriteEnable", ParameterName::IndexVector{i}),
+                        pCreateInfos[i].pDepthStencilState->depthWriteEnable);
+
+                    skip |= validate_ranged_enum(
+                        "vkCreateGraphicsPipelines",
+                        ParameterName("pCreateInfos[%i].pDepthStencilState->depthCompareOp", ParameterName::IndexVector{i}),
+                        "VkCompareOp", AllVkCompareOpEnums, pCreateInfos[i].pDepthStencilState->depthCompareOp,
+                        "VUID-VkPipelineDepthStencilStateCreateInfo-depthCompareOp-parameter");
+
+                    skip |= validate_bool32(
+                        "vkCreateGraphicsPipelines",
+                        ParameterName("pCreateInfos[%i].pDepthStencilState->depthBoundsTestEnable", ParameterName::IndexVector{i}),
+                        pCreateInfos[i].pDepthStencilState->depthBoundsTestEnable);
+
+                    skip |= validate_bool32(
+                        "vkCreateGraphicsPipelines",
+                        ParameterName("pCreateInfos[%i].pDepthStencilState->stencilTestEnable", ParameterName::IndexVector{i}),
+                        pCreateInfos[i].pDepthStencilState->stencilTestEnable);
+
+                    skip |= validate_ranged_enum(
+                        "vkCreateGraphicsPipelines",
+                        ParameterName("pCreateInfos[%i].pDepthStencilState->front.failOp", ParameterName::IndexVector{i}),
+                        "VkStencilOp", AllVkStencilOpEnums, pCreateInfos[i].pDepthStencilState->front.failOp,
+                        "VUID-VkStencilOpState-failOp-parameter");
+
+                    skip |= validate_ranged_enum(
+                        "vkCreateGraphicsPipelines",
+                        ParameterName("pCreateInfos[%i].pDepthStencilState->front.passOp", ParameterName::IndexVector{i}),
+                        "VkStencilOp", AllVkStencilOpEnums, pCreateInfos[i].pDepthStencilState->front.passOp,
+                        "VUID-VkStencilOpState-passOp-parameter");
+
+                    skip |= validate_ranged_enum(
+                        "vkCreateGraphicsPipelines",
+                        ParameterName("pCreateInfos[%i].pDepthStencilState->front.depthFailOp", ParameterName::IndexVector{i}),
+                        "VkStencilOp", AllVkStencilOpEnums, pCreateInfos[i].pDepthStencilState->front.depthFailOp,
+                        "VUID-VkStencilOpState-depthFailOp-parameter");
+
+                    skip |= validate_ranged_enum(
+                        "vkCreateGraphicsPipelines",
+                        ParameterName("pCreateInfos[%i].pDepthStencilState->front.compareOp", ParameterName::IndexVector{i}),
+                        "VkCompareOp", AllVkCompareOpEnums, pCreateInfos[i].pDepthStencilState->front.compareOp,
+                        "VUID-VkPipelineDepthStencilStateCreateInfo-depthCompareOp-parameter");
+
+                    skip |= validate_ranged_enum(
+                        "vkCreateGraphicsPipelines",
+                        ParameterName("pCreateInfos[%i].pDepthStencilState->back.failOp", ParameterName::IndexVector{i}),
+                        "VkStencilOp", AllVkStencilOpEnums, pCreateInfos[i].pDepthStencilState->back.failOp,
+                        "VUID-VkStencilOpState-failOp-parameter");
+
+                    skip |= validate_ranged_enum(
+                        "vkCreateGraphicsPipelines",
+                        ParameterName("pCreateInfos[%i].pDepthStencilState->back.passOp", ParameterName::IndexVector{i}),
+                        "VkStencilOp", AllVkStencilOpEnums, pCreateInfos[i].pDepthStencilState->back.passOp,
+                        "VUID-VkStencilOpState-passOp-parameter");
+
+                    skip |= validate_ranged_enum(
+                        "vkCreateGraphicsPipelines",
+                        ParameterName("pCreateInfos[%i].pDepthStencilState->back.depthFailOp", ParameterName::IndexVector{i}),
+                        "VkStencilOp", AllVkStencilOpEnums, pCreateInfos[i].pDepthStencilState->back.depthFailOp,
+                        "VUID-VkStencilOpState-depthFailOp-parameter");
+
+                    skip |= validate_ranged_enum(
+                        "vkCreateGraphicsPipelines",
+                        ParameterName("pCreateInfos[%i].pDepthStencilState->back.compareOp", ParameterName::IndexVector{i}),
+                        "VkCompareOp", AllVkCompareOpEnums, pCreateInfos[i].pDepthStencilState->back.compareOp,
+                        "VUID-VkPipelineDepthStencilStateCreateInfo-depthCompareOp-parameter");
+
+                    if (pCreateInfos[i].pDepthStencilState->sType != VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO) {
+                        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                        kVUID_PVError_InvalidStructSType,
+                                        "vkCreateGraphicsPipelines: parameter pCreateInfos[%d].pDepthStencilState->sType must be "
+                                        "VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO",
+                                        i);
+                    }
+                }
+
+                const VkStructureType allowed_structs_VkPipelineColorBlendStateCreateInfo[] = {
+                    VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_ADVANCED_STATE_CREATE_INFO_EXT};
+
+                if (pCreateInfos[i].pColorBlendState != nullptr && uses_color_attachment) {
+                    skip |= validate_struct_type("vkCreateGraphicsPipelines",
+                                                 ParameterName("pCreateInfos[%i].pColorBlendState", ParameterName::IndexVector{i}),
+                                                 "VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO",
+                                                 pCreateInfos[i].pColorBlendState,
+                                                 VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO, false, kVUIDUndefined,
+                                                 "VUID-VkPipelineColorBlendStateCreateInfo-sType-sType");
+
+                    skip |= validate_struct_pnext(
+                        "vkCreateGraphicsPipelines",
+                        ParameterName("pCreateInfos[%i].pColorBlendState->pNext", ParameterName::IndexVector{i}),
+                        "VkPipelineColorBlendAdvancedStateCreateInfoEXT", pCreateInfos[i].pColorBlendState->pNext,
+                        ARRAY_SIZE(allowed_structs_VkPipelineColorBlendStateCreateInfo),
+                        allowed_structs_VkPipelineColorBlendStateCreateInfo, GeneratedVulkanHeaderVersion,
+                        "VUID-VkPipelineColorBlendStateCreateInfo-pNext-pNext");
+
+                    skip |= validate_reserved_flags(
+                        "vkCreateGraphicsPipelines",
+                        ParameterName("pCreateInfos[%i].pColorBlendState->flags", ParameterName::IndexVector{i}),
+                        pCreateInfos[i].pColorBlendState->flags, "VUID-VkPipelineColorBlendStateCreateInfo-flags-zerobitmask");
+
+                    skip |= validate_bool32(
+                        "vkCreateGraphicsPipelines",
+                        ParameterName("pCreateInfos[%i].pColorBlendState->logicOpEnable", ParameterName::IndexVector{i}),
+                        pCreateInfos[i].pColorBlendState->logicOpEnable);
+
+                    skip |= validate_array(
+                        "vkCreateGraphicsPipelines",
+                        ParameterName("pCreateInfos[%i].pColorBlendState->attachmentCount", ParameterName::IndexVector{i}),
+                        ParameterName("pCreateInfos[%i].pColorBlendState->pAttachments", ParameterName::IndexVector{i}),
+                        pCreateInfos[i].pColorBlendState->attachmentCount, &pCreateInfos[i].pColorBlendState->pAttachments, false,
+                        true, kVUIDUndefined, kVUIDUndefined);
+
+                    if (pCreateInfos[i].pColorBlendState->pAttachments != NULL) {
+                        for (uint32_t attachmentIndex = 0; attachmentIndex < pCreateInfos[i].pColorBlendState->attachmentCount;
+                             ++attachmentIndex) {
+                            skip |= validate_bool32("vkCreateGraphicsPipelines",
+                                                    ParameterName("pCreateInfos[%i].pColorBlendState->pAttachments[%i].blendEnable",
+                                                                  ParameterName::IndexVector{i, attachmentIndex}),
+                                                    pCreateInfos[i].pColorBlendState->pAttachments[attachmentIndex].blendEnable);
+
+                            skip |= validate_ranged_enum(
+                                "vkCreateGraphicsPipelines",
+                                ParameterName("pCreateInfos[%i].pColorBlendState->pAttachments[%i].srcColorBlendFactor",
+                                              ParameterName::IndexVector{i, attachmentIndex}),
+                                "VkBlendFactor", AllVkBlendFactorEnums,
+                                pCreateInfos[i].pColorBlendState->pAttachments[attachmentIndex].srcColorBlendFactor,
+                                "VUID-VkPipelineColorBlendAttachmentState-srcColorBlendFactor-parameter");
+
+                            skip |= validate_ranged_enum(
+                                "vkCreateGraphicsPipelines",
+                                ParameterName("pCreateInfos[%i].pColorBlendState->pAttachments[%i].dstColorBlendFactor",
+                                              ParameterName::IndexVector{i, attachmentIndex}),
+                                "VkBlendFactor", AllVkBlendFactorEnums,
+                                pCreateInfos[i].pColorBlendState->pAttachments[attachmentIndex].dstColorBlendFactor,
+                                "VUID-VkPipelineColorBlendAttachmentState-dstColorBlendFactor-parameter");
+
+                            skip |= validate_ranged_enum(
+                                "vkCreateGraphicsPipelines",
+                                ParameterName("pCreateInfos[%i].pColorBlendState->pAttachments[%i].colorBlendOp",
+                                              ParameterName::IndexVector{i, attachmentIndex}),
+                                "VkBlendOp", AllVkBlendOpEnums,
+                                pCreateInfos[i].pColorBlendState->pAttachments[attachmentIndex].colorBlendOp,
+                                "VUID-VkPipelineColorBlendAttachmentState-colorBlendOp-parameter");
+
+                            skip |= validate_ranged_enum(
+                                "vkCreateGraphicsPipelines",
+                                ParameterName("pCreateInfos[%i].pColorBlendState->pAttachments[%i].srcAlphaBlendFactor",
+                                              ParameterName::IndexVector{i, attachmentIndex}),
+                                "VkBlendFactor", AllVkBlendFactorEnums,
+                                pCreateInfos[i].pColorBlendState->pAttachments[attachmentIndex].srcAlphaBlendFactor,
+                                "VUID-VkPipelineColorBlendAttachmentState-srcAlphaBlendFactor-parameter");
+
+                            skip |= validate_ranged_enum(
+                                "vkCreateGraphicsPipelines",
+                                ParameterName("pCreateInfos[%i].pColorBlendState->pAttachments[%i].dstAlphaBlendFactor",
+                                              ParameterName::IndexVector{i, attachmentIndex}),
+                                "VkBlendFactor", AllVkBlendFactorEnums,
+                                pCreateInfos[i].pColorBlendState->pAttachments[attachmentIndex].dstAlphaBlendFactor,
+                                "VUID-VkPipelineColorBlendAttachmentState-dstAlphaBlendFactor-parameter");
+
+                            skip |= validate_ranged_enum(
+                                "vkCreateGraphicsPipelines",
+                                ParameterName("pCreateInfos[%i].pColorBlendState->pAttachments[%i].alphaBlendOp",
+                                              ParameterName::IndexVector{i, attachmentIndex}),
+                                "VkBlendOp", AllVkBlendOpEnums,
+                                pCreateInfos[i].pColorBlendState->pAttachments[attachmentIndex].alphaBlendOp,
+                                "VUID-VkPipelineColorBlendAttachmentState-alphaBlendOp-parameter");
+
+                            skip |=
+                                validate_flags("vkCreateGraphicsPipelines",
+                                               ParameterName("pCreateInfos[%i].pColorBlendState->pAttachments[%i].colorWriteMask",
+                                                             ParameterName::IndexVector{i, attachmentIndex}),
+                                               "VkColorComponentFlagBits", AllVkColorComponentFlagBits,
+                                               pCreateInfos[i].pColorBlendState->pAttachments[attachmentIndex].colorWriteMask,
+                                               kOptionalFlags, "VUID-VkPipelineColorBlendAttachmentState-colorWriteMask-parameter");
+                        }
+                    }
+
+                    if (pCreateInfos[i].pColorBlendState->sType != VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO) {
+                        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                        kVUID_PVError_InvalidStructSType,
+                                        "vkCreateGraphicsPipelines: parameter pCreateInfos[%d].pColorBlendState->sType must be "
+                                        "VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO",
+                                        i);
+                    }
+
+                    // If logicOpEnable is VK_TRUE, logicOp must be a valid VkLogicOp value
+                    if (pCreateInfos[i].pColorBlendState->logicOpEnable == VK_TRUE) {
+                        skip |= validate_ranged_enum(
+                            "vkCreateGraphicsPipelines",
+                            ParameterName("pCreateInfos[%i].pColorBlendState->logicOp", ParameterName::IndexVector{i}), "VkLogicOp",
+                            AllVkLogicOpEnums, pCreateInfos[i].pColorBlendState->logicOp,
+                            "VUID-VkPipelineColorBlendStateCreateInfo-logicOpEnable-00607");
+                    }
+                }
+            }
+
+            if (pCreateInfos[i].flags & VK_PIPELINE_CREATE_DERIVATIVE_BIT) {
+                if (pCreateInfos[i].basePipelineIndex != -1) {
+                    if (pCreateInfos[i].basePipelineHandle != VK_NULL_HANDLE) {
+                        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                        "VUID-VkGraphicsPipelineCreateInfo-flags-00724",
+                                        "vkCreateGraphicsPipelines parameter, pCreateInfos->basePipelineHandle, must be "
+                                        "VK_NULL_HANDLE if pCreateInfos->flags contains the VK_PIPELINE_CREATE_DERIVATIVE_BIT flag "
+                                        "and pCreateInfos->basePipelineIndex is not -1.");
+                    }
+                }
+
+                if (pCreateInfos[i].basePipelineHandle != VK_NULL_HANDLE) {
+                    if (pCreateInfos[i].basePipelineIndex != -1) {
+                        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                        "VUID-VkGraphicsPipelineCreateInfo-flags-00725",
+                                        "vkCreateGraphicsPipelines parameter, pCreateInfos->basePipelineIndex, must be -1 if "
+                                        "pCreateInfos->flags contains the VK_PIPELINE_CREATE_DERIVATIVE_BIT flag and "
+                                        "pCreateInfos->basePipelineHandle is not VK_NULL_HANDLE.");
+                    }
+                } else {
+                    if (static_cast<const uint32_t>(pCreateInfos[i].basePipelineIndex) >= createInfoCount) {
+                        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                        "VUID-VkGraphicsPipelineCreateInfo-flags-00723",
+                                        "vkCreateGraphicsPipelines parameter pCreateInfos->basePipelineIndex (%d) must be a valid"
+                                        "index into the pCreateInfos array, of size %d.",
+                                        pCreateInfos[i].basePipelineIndex, createInfoCount);
+                    }
+                }
+            }
+
+            if (pCreateInfos[i].pRasterizationState) {
+                if (!device_extensions.vk_nv_fill_rectangle) {
+                    if (pCreateInfos[i].pRasterizationState->polygonMode == VK_POLYGON_MODE_FILL_RECTANGLE_NV) {
+                        skip |=
+                            log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                    "VUID-VkPipelineRasterizationStateCreateInfo-polygonMode-01414",
+                                    "vkCreateGraphicsPipelines parameter, VkPolygonMode "
+                                    "pCreateInfos->pRasterizationState->polygonMode cannot be VK_POLYGON_MODE_FILL_RECTANGLE_NV "
+                                    "if the extension VK_NV_fill_rectangle is not enabled.");
+                    } else if ((pCreateInfos[i].pRasterizationState->polygonMode != VK_POLYGON_MODE_FILL) &&
+                               (physical_device_features.fillModeNonSolid == false)) {
+                        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                        kVUID_PVError_DeviceFeature,
+                                        "vkCreateGraphicsPipelines parameter, VkPolygonMode "
+                                        "pCreateInfos->pRasterizationState->polygonMode cannot be VK_POLYGON_MODE_POINT or "
+                                        "VK_POLYGON_MODE_LINE if VkPhysicalDeviceFeatures->fillModeNonSolid is false.");
+                    }
+                } else {
+                    if ((pCreateInfos[i].pRasterizationState->polygonMode != VK_POLYGON_MODE_FILL) &&
+                        (pCreateInfos[i].pRasterizationState->polygonMode != VK_POLYGON_MODE_FILL_RECTANGLE_NV) &&
+                        (physical_device_features.fillModeNonSolid == false)) {
+                        skip |=
+                            log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                    "VUID-VkPipelineRasterizationStateCreateInfo-polygonMode-01507",
+                                    "vkCreateGraphicsPipelines parameter, VkPolygonMode "
+                                    "pCreateInfos->pRasterizationState->polygonMode must be VK_POLYGON_MODE_FILL or "
+                                    "VK_POLYGON_MODE_FILL_RECTANGLE_NV if VkPhysicalDeviceFeatures->fillModeNonSolid is false.");
+                    }
+                }
+
+                if (!has_dynamic_line_width && !physical_device_features.wideLines &&
+                    (pCreateInfos[i].pRasterizationState->lineWidth != 1.0f)) {
+                    skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT, 0,
+                                    "VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-00749",
+                                    "The line width state is static (pCreateInfos[%" PRIu32
+                                    "].pDynamicState->pDynamicStates does not contain VK_DYNAMIC_STATE_LINE_WIDTH) and "
+                                    "VkPhysicalDeviceFeatures::wideLines is disabled, but pCreateInfos[%" PRIu32
+                                    "].pRasterizationState->lineWidth (=%f) is not 1.0.",
+                                    i, i, pCreateInfos[i].pRasterizationState->lineWidth);
+                }
+            }
+
+            for (size_t j = 0; j < pCreateInfos[i].stageCount; j++) {
+                skip |= validate_string("vkCreateGraphicsPipelines",
+                                        ParameterName("pCreateInfos[%i].pStages[%i].pName", ParameterName::IndexVector{i, j}),
+                                        "VUID-VkGraphicsPipelineCreateInfo-pStages-parameter", pCreateInfos[i].pStages[j].pName);
+            }
+        }
+    }
+
+    return skip;
+}
+
+bool StatelessValidation::manual_PreCallValidateCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache,
+                                                                       uint32_t createInfoCount,
+                                                                       const VkComputePipelineCreateInfo *pCreateInfos,
+                                                                       const VkAllocationCallbacks *pAllocator,
+                                                                       VkPipeline *pPipelines) const {
+    bool skip = false;
+    for (uint32_t i = 0; i < createInfoCount; i++) {
+        skip |= validate_string("vkCreateComputePipelines",
+                                ParameterName("pCreateInfos[%i].stage.pName", ParameterName::IndexVector{i}),
+                                "VUID-VkPipelineShaderStageCreateInfo-pName-parameter", pCreateInfos[i].stage.pName);
+        auto feedback_struct = lvl_find_in_chain<VkPipelineCreationFeedbackCreateInfoEXT>(pCreateInfos[i].pNext);
+        if ((feedback_struct != nullptr) && (feedback_struct->pipelineStageCreationFeedbackCount != 1)) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT, VK_NULL_HANDLE,
+                            "VUID-VkPipelineCreationFeedbackCreateInfoEXT-pipelineStageCreationFeedbackCount-02669",
+                            "vkCreateComputePipelines(): in pCreateInfo[%" PRIu32
+                            "], VkPipelineCreationFeedbackEXT::pipelineStageCreationFeedbackCount must equal 1, found %" PRIu32 ".",
+                            i, feedback_struct->pipelineStageCreationFeedbackCount);
+        }
+    }
+    return skip;
+}
+
+bool StatelessValidation::manual_PreCallValidateCreateSampler(VkDevice device, const VkSamplerCreateInfo *pCreateInfo,
+                                                              const VkAllocationCallbacks *pAllocator, VkSampler *pSampler) const {
+    bool skip = false;
+
+    if (pCreateInfo != nullptr) {
+        const auto &features = physical_device_features;
+        const auto &limits = device_limits;
+
+        if (pCreateInfo->anisotropyEnable == VK_TRUE) {
+            if (!in_inclusive_range(pCreateInfo->maxAnisotropy, 1.0F, limits.maxSamplerAnisotropy)) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                "VUID-VkSamplerCreateInfo-anisotropyEnable-01071",
+                                "vkCreateSampler(): value of %s must be in range [1.0, %f] %s, but %f found.",
+                                "pCreateInfo->maxAnisotropy", limits.maxSamplerAnisotropy,
+                                "VkPhysicalDeviceLimits::maxSamplerAnistropy", pCreateInfo->maxAnisotropy);
+            }
+
+            // Anistropy cannot be enabled in sampler unless enabled as a feature
+            if (features.samplerAnisotropy == VK_FALSE) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                "VUID-VkSamplerCreateInfo-anisotropyEnable-01070",
+                                "vkCreateSampler(): Anisotropic sampling feature is not enabled, %s must be VK_FALSE.",
+                                "pCreateInfo->anisotropyEnable");
+            }
+        }
+
+        if (pCreateInfo->unnormalizedCoordinates == VK_TRUE) {
+            if (pCreateInfo->minFilter != pCreateInfo->magFilter) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                "VUID-VkSamplerCreateInfo-unnormalizedCoordinates-01072",
+                                "vkCreateSampler(): when pCreateInfo->unnormalizedCoordinates is VK_TRUE, "
+                                "pCreateInfo->minFilter (%s) and pCreateInfo->magFilter (%s) must be equal.",
+                                string_VkFilter(pCreateInfo->minFilter), string_VkFilter(pCreateInfo->magFilter));
+            }
+            if (pCreateInfo->mipmapMode != VK_SAMPLER_MIPMAP_MODE_NEAREST) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                "VUID-VkSamplerCreateInfo-unnormalizedCoordinates-01073",
+                                "vkCreateSampler(): when pCreateInfo->unnormalizedCoordinates is VK_TRUE, "
+                                "pCreateInfo->mipmapMode (%s) must be VK_SAMPLER_MIPMAP_MODE_NEAREST.",
+                                string_VkSamplerMipmapMode(pCreateInfo->mipmapMode));
+            }
+            if (pCreateInfo->minLod != 0.0f || pCreateInfo->maxLod != 0.0f) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                "VUID-VkSamplerCreateInfo-unnormalizedCoordinates-01074",
+                                "vkCreateSampler(): when pCreateInfo->unnormalizedCoordinates is VK_TRUE, "
+                                "pCreateInfo->minLod (%f) and pCreateInfo->maxLod (%f) must both be zero.",
+                                pCreateInfo->minLod, pCreateInfo->maxLod);
+            }
+            if ((pCreateInfo->addressModeU != VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE &&
+                 pCreateInfo->addressModeU != VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER) ||
+                (pCreateInfo->addressModeV != VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE &&
+                 pCreateInfo->addressModeV != VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER)) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                "VUID-VkSamplerCreateInfo-unnormalizedCoordinates-01075",
+                                "vkCreateSampler(): when pCreateInfo->unnormalizedCoordinates is VK_TRUE, "
+                                "pCreateInfo->addressModeU (%s) and pCreateInfo->addressModeV (%s) must both be "
+                                "VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE or VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER.",
+                                string_VkSamplerAddressMode(pCreateInfo->addressModeU),
+                                string_VkSamplerAddressMode(pCreateInfo->addressModeV));
+            }
+            if (pCreateInfo->anisotropyEnable == VK_TRUE) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                "VUID-VkSamplerCreateInfo-unnormalizedCoordinates-01076",
+                                "vkCreateSampler(): pCreateInfo->anisotropyEnable and pCreateInfo->unnormalizedCoordinates must "
+                                "not both be VK_TRUE.");
+            }
+            if (pCreateInfo->compareEnable == VK_TRUE) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                "VUID-VkSamplerCreateInfo-unnormalizedCoordinates-01077",
+                                "vkCreateSampler(): pCreateInfo->compareEnable and pCreateInfo->unnormalizedCoordinates must "
+                                "not both be VK_TRUE.");
+            }
+        }
+
+        // If compareEnable is VK_TRUE, compareOp must be a valid VkCompareOp value
+        if (pCreateInfo->compareEnable == VK_TRUE) {
+            skip |= validate_ranged_enum("vkCreateSampler", "pCreateInfo->compareOp", "VkCompareOp", AllVkCompareOpEnums,
+                                         pCreateInfo->compareOp, "VUID-VkSamplerCreateInfo-compareEnable-01080");
+        }
+
+        // If any of addressModeU, addressModeV or addressModeW are VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER, borderColor must be a
+        // valid VkBorderColor value
+        if ((pCreateInfo->addressModeU == VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER) ||
+            (pCreateInfo->addressModeV == VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER) ||
+            (pCreateInfo->addressModeW == VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER)) {
+            skip |= validate_ranged_enum("vkCreateSampler", "pCreateInfo->borderColor", "VkBorderColor", AllVkBorderColorEnums,
+                                         pCreateInfo->borderColor, "VUID-VkSamplerCreateInfo-addressModeU-01078");
+        }
+
+        // If any of addressModeU, addressModeV or addressModeW are VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE, the
+        // VK_KHR_sampler_mirror_clamp_to_edge extension must be enabled
+        if (!device_extensions.vk_khr_sampler_mirror_clamp_to_edge &&
+            ((pCreateInfo->addressModeU == VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE) ||
+             (pCreateInfo->addressModeV == VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE) ||
+             (pCreateInfo->addressModeW == VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE))) {
+            skip |=
+                log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                        "VUID-VkSamplerCreateInfo-addressModeU-01079",
+                        "vkCreateSampler(): A VkSamplerAddressMode value is set to VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE "
+                        "but the VK_KHR_sampler_mirror_clamp_to_edge extension has not been enabled.");
+        }
+
+        // Checks for the IMG cubic filtering extension
+        if (device_extensions.vk_img_filter_cubic) {
+            if ((pCreateInfo->anisotropyEnable == VK_TRUE) &&
+                ((pCreateInfo->minFilter == VK_FILTER_CUBIC_IMG) || (pCreateInfo->magFilter == VK_FILTER_CUBIC_IMG))) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                "VUID-VkSamplerCreateInfo-magFilter-01081",
+                                "vkCreateSampler(): Anisotropic sampling must not be VK_TRUE when either minFilter or magFilter "
+                                "are VK_FILTER_CUBIC_IMG.");
+            }
+        }
+    }
+
+    return skip;
+}
+
+bool StatelessValidation::manual_PreCallValidateCreateDescriptorSetLayout(VkDevice device,
+                                                                          const VkDescriptorSetLayoutCreateInfo *pCreateInfo,
+                                                                          const VkAllocationCallbacks *pAllocator,
+                                                                          VkDescriptorSetLayout *pSetLayout) const {
+    bool skip = false;
+
+    // Validation for parameters excluded from the generated validation code due to a 'noautovalidity' tag in vk.xml
+    if ((pCreateInfo != nullptr) && (pCreateInfo->pBindings != nullptr)) {
+        for (uint32_t i = 0; i < pCreateInfo->bindingCount; ++i) {
+            if (pCreateInfo->pBindings[i].descriptorCount != 0) {
+                // If descriptorType is VK_DESCRIPTOR_TYPE_SAMPLER or VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, and descriptorCount
+                // is not 0 and pImmutableSamplers is not NULL, pImmutableSamplers must be a pointer to an array of descriptorCount
+                // valid VkSampler handles
+                if (((pCreateInfo->pBindings[i].descriptorType == VK_DESCRIPTOR_TYPE_SAMPLER) ||
+                     (pCreateInfo->pBindings[i].descriptorType == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER)) &&
+                    (pCreateInfo->pBindings[i].pImmutableSamplers != nullptr)) {
+                    for (uint32_t descriptor_index = 0; descriptor_index < pCreateInfo->pBindings[i].descriptorCount;
+                         ++descriptor_index) {
+                        if (pCreateInfo->pBindings[i].pImmutableSamplers[descriptor_index] == VK_NULL_HANDLE) {
+                            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                            kVUID_PVError_RequiredParameter,
+                                            "vkCreateDescriptorSetLayout: required parameter "
+                                            "pCreateInfo->pBindings[%d].pImmutableSamplers[%d] specified as VK_NULL_HANDLE",
+                                            i, descriptor_index);
+                        }
+                    }
+                }
+
+                // If descriptorCount is not 0, stageFlags must be a valid combination of VkShaderStageFlagBits values
+                if ((pCreateInfo->pBindings[i].stageFlags != 0) &&
+                    ((pCreateInfo->pBindings[i].stageFlags & (~AllVkShaderStageFlagBits)) != 0)) {
+                    skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                    "VUID-VkDescriptorSetLayoutBinding-descriptorCount-00283",
+                                    "vkCreateDescriptorSetLayout(): if pCreateInfo->pBindings[%d].descriptorCount is not 0, "
+                                    "pCreateInfo->pBindings[%d].stageFlags must be a valid combination of VkShaderStageFlagBits "
+                                    "values.",
+                                    i, i);
+                }
+            }
+        }
+    }
+    return skip;
+}
+
+bool StatelessValidation::manual_PreCallValidateFreeDescriptorSets(VkDevice device, VkDescriptorPool descriptorPool,
+                                                                   uint32_t descriptorSetCount,
+                                                                   const VkDescriptorSet *pDescriptorSets) const {
+    // Validation for parameters excluded from the generated validation code due to a 'noautovalidity' tag in vk.xml
+    // This is an array of handles, where the elements are allowed to be VK_NULL_HANDLE, and does not require any validation beyond
+    // validate_array()
+    return validate_array("vkFreeDescriptorSets", "descriptorSetCount", "pDescriptorSets", descriptorSetCount, &pDescriptorSets,
+                          true, true, kVUIDUndefined, kVUIDUndefined);
+}
+
+bool StatelessValidation::manual_PreCallValidateUpdateDescriptorSets(VkDevice device, uint32_t descriptorWriteCount,
+                                                                     const VkWriteDescriptorSet *pDescriptorWrites,
+                                                                     uint32_t descriptorCopyCount,
+                                                                     const VkCopyDescriptorSet *pDescriptorCopies) const {
+    bool skip = false;
+    // Validation for parameters excluded from the generated validation code due to a 'noautovalidity' tag in vk.xml
+    if (pDescriptorWrites != NULL) {
+        for (uint32_t i = 0; i < descriptorWriteCount; ++i) {
+            // descriptorCount must be greater than 0
+            if (pDescriptorWrites[i].descriptorCount == 0) {
+                skip |=
+                    log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                            "VUID-VkWriteDescriptorSet-descriptorCount-arraylength",
+                            "vkUpdateDescriptorSets(): parameter pDescriptorWrites[%d].descriptorCount must be greater than 0.", i);
+            }
+
+            // dstSet must be a valid VkDescriptorSet handle
+            skip |= validate_required_handle("vkUpdateDescriptorSets",
+                                             ParameterName("pDescriptorWrites[%i].dstSet", ParameterName::IndexVector{i}),
+                                             pDescriptorWrites[i].dstSet);
+
+            if ((pDescriptorWrites[i].descriptorType == VK_DESCRIPTOR_TYPE_SAMPLER) ||
+                (pDescriptorWrites[i].descriptorType == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER) ||
+                (pDescriptorWrites[i].descriptorType == VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE) ||
+                (pDescriptorWrites[i].descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_IMAGE) ||
+                (pDescriptorWrites[i].descriptorType == VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT)) {
+                // If descriptorType is VK_DESCRIPTOR_TYPE_SAMPLER, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
+                // VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, VK_DESCRIPTOR_TYPE_STORAGE_IMAGE or VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT,
+                // pImageInfo must be a pointer to an array of descriptorCount valid VkDescriptorImageInfo structures
+                if (pDescriptorWrites[i].pImageInfo == nullptr) {
+                    skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                    "VUID-VkWriteDescriptorSet-descriptorType-00322",
+                                    "vkUpdateDescriptorSets(): if pDescriptorWrites[%d].descriptorType is "
+                                    "VK_DESCRIPTOR_TYPE_SAMPLER, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, "
+                                    "VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, VK_DESCRIPTOR_TYPE_STORAGE_IMAGE or "
+                                    "VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, pDescriptorWrites[%d].pImageInfo must not be NULL.",
+                                    i, i);
+                } else if (pDescriptorWrites[i].descriptorType != VK_DESCRIPTOR_TYPE_SAMPLER) {
+                    // If descriptorType is VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE,
+                    // VK_DESCRIPTOR_TYPE_STORAGE_IMAGE or VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, the imageView and imageLayout
+                    // members of any given element of pImageInfo must be a valid VkImageView and VkImageLayout, respectively
+                    for (uint32_t descriptor_index = 0; descriptor_index < pDescriptorWrites[i].descriptorCount;
+                         ++descriptor_index) {
+                        skip |= validate_required_handle("vkUpdateDescriptorSets",
+                                                         ParameterName("pDescriptorWrites[%i].pImageInfo[%i].imageView",
+                                                                       ParameterName::IndexVector{i, descriptor_index}),
+                                                         pDescriptorWrites[i].pImageInfo[descriptor_index].imageView);
+                        skip |= validate_ranged_enum("vkUpdateDescriptorSets",
+                                                     ParameterName("pDescriptorWrites[%i].pImageInfo[%i].imageLayout",
+                                                                   ParameterName::IndexVector{i, descriptor_index}),
+                                                     "VkImageLayout", AllVkImageLayoutEnums,
+                                                     pDescriptorWrites[i].pImageInfo[descriptor_index].imageLayout, kVUIDUndefined);
+                    }
+                }
+            } else if ((pDescriptorWrites[i].descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER) ||
+                       (pDescriptorWrites[i].descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER) ||
+                       (pDescriptorWrites[i].descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC) ||
+                       (pDescriptorWrites[i].descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC)) {
+                // If descriptorType is VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, VK_DESCRIPTOR_TYPE_STORAGE_BUFFER,
+                // VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC or VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC, pBufferInfo must be a
+                // pointer to an array of descriptorCount valid VkDescriptorBufferInfo structures
+                if (pDescriptorWrites[i].pBufferInfo == nullptr) {
+                    skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                    "VUID-VkWriteDescriptorSet-descriptorType-00324",
+                                    "vkUpdateDescriptorSets(): if pDescriptorWrites[%d].descriptorType is "
+                                    "VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, "
+                                    "VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC or VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC, "
+                                    "pDescriptorWrites[%d].pBufferInfo must not be NULL.",
+                                    i, i);
+                } else {
+                    for (uint32_t descriptorIndex = 0; descriptorIndex < pDescriptorWrites[i].descriptorCount; ++descriptorIndex) {
+                        skip |= validate_required_handle("vkUpdateDescriptorSets",
+                                                         ParameterName("pDescriptorWrites[%i].pBufferInfo[%i].buffer",
+                                                                       ParameterName::IndexVector{i, descriptorIndex}),
+                                                         pDescriptorWrites[i].pBufferInfo[descriptorIndex].buffer);
+                    }
+                }
+            } else if ((pDescriptorWrites[i].descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER) ||
+                       (pDescriptorWrites[i].descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER)) {
+                // If descriptorType is VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER or VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER,
+                // pTexelBufferView must be a pointer to an array of descriptorCount valid VkBufferView handles
+                if (pDescriptorWrites[i].pTexelBufferView == nullptr) {
+                    skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                    "VUID-VkWriteDescriptorSet-descriptorType-00323",
+                                    "vkUpdateDescriptorSets(): if pDescriptorWrites[%d].descriptorType is "
+                                    "VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER or VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER, "
+                                    "pDescriptorWrites[%d].pTexelBufferView must not be NULL.",
+                                    i, i);
+                } else {
+                    for (uint32_t descriptor_index = 0; descriptor_index < pDescriptorWrites[i].descriptorCount;
+                         ++descriptor_index) {
+                        skip |= validate_required_handle("vkUpdateDescriptorSets",
+                                                         ParameterName("pDescriptorWrites[%i].pTexelBufferView[%i]",
+                                                                       ParameterName::IndexVector{i, descriptor_index}),
+                                                         pDescriptorWrites[i].pTexelBufferView[descriptor_index]);
+                    }
+                }
+            }
+
+            if ((pDescriptorWrites[i].descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER) ||
+                (pDescriptorWrites[i].descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC)) {
+                VkDeviceSize uniformAlignment = device_limits.minUniformBufferOffsetAlignment;
+                for (uint32_t j = 0; j < pDescriptorWrites[i].descriptorCount; j++) {
+                    if (pDescriptorWrites[i].pBufferInfo != NULL) {
+                        if (SafeModulo(pDescriptorWrites[i].pBufferInfo[j].offset, uniformAlignment) != 0) {
+                            skip |=
+                                log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT,
+                                        0, "VUID-VkWriteDescriptorSet-descriptorType-00327",
+                                        "vkUpdateDescriptorSets(): pDescriptorWrites[%d].pBufferInfo[%d].offset (0x%" PRIxLEAST64
+                                        ") must be a multiple of device limit minUniformBufferOffsetAlignment 0x%" PRIxLEAST64 ".",
+                                        i, j, pDescriptorWrites[i].pBufferInfo[j].offset, uniformAlignment);
+                        }
+                    }
+                }
+            } else if ((pDescriptorWrites[i].descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER) ||
+                       (pDescriptorWrites[i].descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC)) {
+                VkDeviceSize storageAlignment = device_limits.minStorageBufferOffsetAlignment;
+                for (uint32_t j = 0; j < pDescriptorWrites[i].descriptorCount; j++) {
+                    if (pDescriptorWrites[i].pBufferInfo != NULL) {
+                        if (SafeModulo(pDescriptorWrites[i].pBufferInfo[j].offset, storageAlignment) != 0) {
+                            skip |=
+                                log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT,
+                                        0, "VUID-VkWriteDescriptorSet-descriptorType-00328",
+                                        "vkUpdateDescriptorSets(): pDescriptorWrites[%d].pBufferInfo[%d].offset (0x%" PRIxLEAST64
+                                        ") must be a multiple of device limit minStorageBufferOffsetAlignment 0x%" PRIxLEAST64 ".",
+                                        i, j, pDescriptorWrites[i].pBufferInfo[j].offset, storageAlignment);
+                        }
+                    }
+                }
+            }
+        }
+    }
+    return skip;
+}
+
+bool StatelessValidation::manual_PreCallValidateCreateRenderPass(VkDevice device, const VkRenderPassCreateInfo *pCreateInfo,
+                                                                 const VkAllocationCallbacks *pAllocator,
+                                                                 VkRenderPass *pRenderPass) const {
+    return CreateRenderPassGeneric(device, pCreateInfo, pAllocator, pRenderPass, RENDER_PASS_VERSION_1);
+}
+
+bool StatelessValidation::manual_PreCallValidateCreateRenderPass2KHR(VkDevice device, const VkRenderPassCreateInfo2KHR *pCreateInfo,
+                                                                     const VkAllocationCallbacks *pAllocator,
+                                                                     VkRenderPass *pRenderPass) const {
+    return CreateRenderPassGeneric(device, pCreateInfo, pAllocator, pRenderPass, RENDER_PASS_VERSION_2);
+}
+
+bool StatelessValidation::manual_PreCallValidateFreeCommandBuffers(VkDevice device, VkCommandPool commandPool,
+                                                                   uint32_t commandBufferCount,
+                                                                   const VkCommandBuffer *pCommandBuffers) const {
+    bool skip = false;
+
+    // Validation for parameters excluded from the generated validation code due to a 'noautovalidity' tag in vk.xml
+    // This is an array of handles, where the elements are allowed to be VK_NULL_HANDLE, and does not require any validation beyond
+    // validate_array()
+    skip |= validate_array("vkFreeCommandBuffers", "commandBufferCount", "pCommandBuffers", commandBufferCount, &pCommandBuffers,
+                           true, true, kVUIDUndefined, kVUIDUndefined);
+    return skip;
+}
+
+bool StatelessValidation::manual_PreCallValidateBeginCommandBuffer(VkCommandBuffer commandBuffer,
+                                                                   const VkCommandBufferBeginInfo *pBeginInfo) const {
+    bool skip = false;
+
+    // VkCommandBufferInheritanceInfo validation, due to a 'noautovalidity' of pBeginInfo->pInheritanceInfo in vkBeginCommandBuffer
+    const char *cmd_name = "vkBeginCommandBuffer";
+    const VkCommandBufferInheritanceInfo *pInfo = pBeginInfo->pInheritanceInfo;
+
+    // Implicit VUs
+    // validate only sType here; pointer has to be validated in core_validation
+    const bool kNotRequired = false;
+    const char *kNoVUID = nullptr;
+    skip |= validate_struct_type(cmd_name, "pBeginInfo->pInheritanceInfo", "VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO",
+                                 pInfo, VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO, kNotRequired, kNoVUID,
+                                 "VUID-VkCommandBufferInheritanceInfo-sType-sType");
+
+    if (pInfo) {
+        const VkStructureType allowed_structs_VkCommandBufferInheritanceInfo[] = {
+            VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_CONDITIONAL_RENDERING_INFO_EXT};
+        skip |= validate_struct_pnext(
+            cmd_name, "pBeginInfo->pInheritanceInfo->pNext", "VkCommandBufferInheritanceConditionalRenderingInfoEXT", pInfo->pNext,
+            ARRAY_SIZE(allowed_structs_VkCommandBufferInheritanceInfo), allowed_structs_VkCommandBufferInheritanceInfo,
+            GeneratedVulkanHeaderVersion, "VUID-VkCommandBufferInheritanceInfo-pNext-pNext");
+
+        skip |= validate_bool32(cmd_name, "pBeginInfo->pInheritanceInfo->occlusionQueryEnable", pInfo->occlusionQueryEnable);
+
+        // Explicit VUs
+        if (!physical_device_features.inheritedQueries && pInfo->occlusionQueryEnable == VK_TRUE) {
+            skip |= log_msg(
+                report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                HandleToUint64(commandBuffer), "VUID-VkCommandBufferInheritanceInfo-occlusionQueryEnable-00056",
+                "%s: Inherited queries feature is disabled, but pBeginInfo->pInheritanceInfo->occlusionQueryEnable is VK_TRUE.",
+                cmd_name);
+        }
+
+        if (physical_device_features.inheritedQueries) {
+            skip |= validate_flags(cmd_name, "pBeginInfo->pInheritanceInfo->queryFlags", "VkQueryControlFlagBits",
+                                   AllVkQueryControlFlagBits, pInfo->queryFlags, kOptionalFlags,
+                                   "VUID-VkCommandBufferInheritanceInfo-queryFlags-00057");
+        } else {  // !inheritedQueries
+            skip |= validate_reserved_flags(cmd_name, "pBeginInfo->pInheritanceInfo->queryFlags", pInfo->queryFlags,
+                                            "VUID-VkCommandBufferInheritanceInfo-queryFlags-02788");
+        }
+
+        if (physical_device_features.pipelineStatisticsQuery) {
+            skip |= validate_flags(cmd_name, "pBeginInfo->pInheritanceInfo->pipelineStatistics", "VkQueryPipelineStatisticFlagBits",
+                                   AllVkQueryPipelineStatisticFlagBits, pInfo->pipelineStatistics, kOptionalFlags,
+                                   "VUID-VkCommandBufferInheritanceInfo-pipelineStatistics-02789");
+        } else {  // !pipelineStatisticsQuery
+            skip |= validate_reserved_flags(cmd_name, "pBeginInfo->pInheritanceInfo->pipelineStatistics", pInfo->pipelineStatistics,
+                                            "VUID-VkCommandBufferInheritanceInfo-pipelineStatistics-00058");
+        }
+
+        const auto *conditional_rendering = lvl_find_in_chain<VkCommandBufferInheritanceConditionalRenderingInfoEXT>(pInfo->pNext);
+        if (conditional_rendering) {
+            const auto *cr_features =
+                lvl_find_in_chain<VkPhysicalDeviceConditionalRenderingFeaturesEXT>(physical_device_features2.pNext);
+            const auto inherited_conditional_rendering = cr_features && cr_features->inheritedConditionalRendering;
+            if (!inherited_conditional_rendering && conditional_rendering->conditionalRenderingEnable == VK_TRUE) {
+                skip |= log_msg(
+                    report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                    HandleToUint64(commandBuffer),
+                    "VUID-VkCommandBufferInheritanceConditionalRenderingInfoEXT-conditionalRenderingEnable-01977",
+                    "vkBeginCommandBuffer: Inherited conditional rendering is disabled, but "
+                    "pBeginInfo->pInheritanceInfo->pNext<VkCommandBufferInheritanceConditionalRenderingInfoEXT> is VK_TRUE.");
+            }
+        }
+    }
+
+    return skip;
+}
+
+bool StatelessValidation::manual_PreCallValidateCmdSetViewport(VkCommandBuffer commandBuffer, uint32_t firstViewport,
+                                                               uint32_t viewportCount, const VkViewport *pViewports) const {
+    bool skip = false;
+
+    if (!physical_device_features.multiViewport) {
+        if (firstViewport != 0) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                            HandleToUint64(commandBuffer), "VUID-vkCmdSetViewport-firstViewport-01224",
+                            "vkCmdSetViewport: The multiViewport feature is disabled, but firstViewport (=%" PRIu32 ") is not 0.",
+                            firstViewport);
+        }
+        if (viewportCount > 1) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                            HandleToUint64(commandBuffer), "VUID-vkCmdSetViewport-viewportCount-01225",
+                            "vkCmdSetViewport: The multiViewport feature is disabled, but viewportCount (=%" PRIu32 ") is not 1.",
+                            viewportCount);
+        }
+    } else {  // multiViewport enabled
+        const uint64_t sum = static_cast<uint64_t>(firstViewport) + static_cast<uint64_t>(viewportCount);
+        if (sum > device_limits.maxViewports) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                            HandleToUint64(commandBuffer), "VUID-vkCmdSetViewport-firstViewport-01223",
+                            "vkCmdSetViewport: firstViewport + viewportCount (=%" PRIu32 " + %" PRIu32 " = %" PRIu64
+                            ") is greater than VkPhysicalDeviceLimits::maxViewports (=%" PRIu32 ").",
+                            firstViewport, viewportCount, sum, device_limits.maxViewports);
+        }
+    }
+
+    if (pViewports) {
+        for (uint32_t viewport_i = 0; viewport_i < viewportCount; ++viewport_i) {
+            const auto &viewport = pViewports[viewport_i];  // will crash on invalid ptr
+            const char *fn_name = "vkCmdSetViewport";
+            skip |= manual_PreCallValidateViewport(viewport, fn_name,
+                                                   ParameterName("pViewports[%i]", ParameterName::IndexVector{viewport_i}),
+                                                   VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, HandleToUint64(commandBuffer));
+        }
+    }
+
+    return skip;
+}
+
+bool StatelessValidation::manual_PreCallValidateCmdSetScissor(VkCommandBuffer commandBuffer, uint32_t firstScissor,
+                                                              uint32_t scissorCount, const VkRect2D *pScissors) const {
+    bool skip = false;
+
+    if (!physical_device_features.multiViewport) {
+        if (firstScissor != 0) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                            HandleToUint64(commandBuffer), "VUID-vkCmdSetScissor-firstScissor-00593",
+                            "vkCmdSetScissor: The multiViewport feature is disabled, but firstScissor (=%" PRIu32 ") is not 0.",
+                            firstScissor);
+        }
+        if (scissorCount > 1) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                            HandleToUint64(commandBuffer), "VUID-vkCmdSetScissor-scissorCount-00594",
+                            "vkCmdSetScissor: The multiViewport feature is disabled, but scissorCount (=%" PRIu32 ") is not 1.",
+                            scissorCount);
+        }
+    } else {  // multiViewport enabled
+        const uint64_t sum = static_cast<uint64_t>(firstScissor) + static_cast<uint64_t>(scissorCount);
+        if (sum > device_limits.maxViewports) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                            HandleToUint64(commandBuffer), "VUID-vkCmdSetScissor-firstScissor-00592",
+                            "vkCmdSetScissor: firstScissor + scissorCount (=%" PRIu32 " + %" PRIu32 " = %" PRIu64
+                            ") is greater than VkPhysicalDeviceLimits::maxViewports (=%" PRIu32 ").",
+                            firstScissor, scissorCount, sum, device_limits.maxViewports);
+        }
+    }
+
+    if (pScissors) {
+        for (uint32_t scissor_i = 0; scissor_i < scissorCount; ++scissor_i) {
+            const auto &scissor = pScissors[scissor_i];  // will crash on invalid ptr
+
+            if (scissor.offset.x < 0) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                                HandleToUint64(commandBuffer), "VUID-vkCmdSetScissor-x-00595",
+                                "vkCmdSetScissor: pScissors[%" PRIu32 "].offset.x (=%" PRIi32 ") is negative.", scissor_i,
+                                scissor.offset.x);
+            }
+
+            if (scissor.offset.y < 0) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                                HandleToUint64(commandBuffer), "VUID-vkCmdSetScissor-x-00595",
+                                "vkCmdSetScissor: pScissors[%" PRIu32 "].offset.y (=%" PRIi32 ") is negative.", scissor_i,
+                                scissor.offset.y);
+            }
+
+            const int64_t x_sum = static_cast<int64_t>(scissor.offset.x) + static_cast<int64_t>(scissor.extent.width);
+            if (x_sum > INT32_MAX) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                                HandleToUint64(commandBuffer), "VUID-vkCmdSetScissor-offset-00596",
+                                "vkCmdSetScissor: offset.x + extent.width (=%" PRIi32 " + %" PRIu32 " = %" PRIi64
+                                ") of pScissors[%" PRIu32 "] will overflow int32_t.",
+                                scissor.offset.x, scissor.extent.width, x_sum, scissor_i);
+            }
+
+            const int64_t y_sum = static_cast<int64_t>(scissor.offset.y) + static_cast<int64_t>(scissor.extent.height);
+            if (y_sum > INT32_MAX) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                                HandleToUint64(commandBuffer), "VUID-vkCmdSetScissor-offset-00597",
+                                "vkCmdSetScissor: offset.y + extent.height (=%" PRIi32 " + %" PRIu32 " = %" PRIi64
+                                ") of pScissors[%" PRIu32 "] will overflow int32_t.",
+                                scissor.offset.y, scissor.extent.height, y_sum, scissor_i);
+            }
+        }
+    }
+
+    return skip;
+}
+
+bool StatelessValidation::manual_PreCallValidateCmdSetLineWidth(VkCommandBuffer commandBuffer, float lineWidth) const {
+    bool skip = false;
+
+    if (!physical_device_features.wideLines && (lineWidth != 1.0f)) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                        HandleToUint64(commandBuffer), "VUID-vkCmdSetLineWidth-lineWidth-00788",
+                        "VkPhysicalDeviceFeatures::wideLines is disabled, but lineWidth (=%f) is not 1.0.", lineWidth);
+    }
+
+    return skip;
+}
+
+bool StatelessValidation::manual_PreCallValidateCmdDraw(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount,
+                                                        uint32_t firstVertex, uint32_t firstInstance) const {
+    bool skip = false;
+    if (vertexCount == 0) {
+        // TODO: Verify against Valid Usage section. I don't see a non-zero vertexCount listed, may need to add that and make
+        // this an error or leave as is.
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                        kVUID_PVError_RequiredParameter, "vkCmdDraw parameter, uint32_t vertexCount, is 0");
+    }
+
+    if (instanceCount == 0) {
+        // TODO: Verify against Valid Usage section. I don't see a non-zero instanceCount listed, may need to add that and make
+        // this an error or leave as is.
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                        kVUID_PVError_RequiredParameter, "vkCmdDraw parameter, uint32_t instanceCount, is 0");
+    }
+    return skip;
+}
+
+bool StatelessValidation::manual_PreCallValidateCmdDrawIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
+                                                                uint32_t count, uint32_t stride) const {
+    bool skip = false;
+
+    if (!physical_device_features.multiDrawIndirect && ((count > 1))) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                        kVUID_PVError_DeviceFeature,
+                        "CmdDrawIndirect(): Device feature multiDrawIndirect disabled: count must be 0 or 1 but is %d", count);
+    }
+    return skip;
+}
+
+bool StatelessValidation::manual_PreCallValidateCmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer,
+                                                                       VkDeviceSize offset, uint32_t count, uint32_t stride) const {
+    bool skip = false;
+    if (!physical_device_features.multiDrawIndirect && ((count > 1))) {
+        skip |= log_msg(
+            report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, kVUID_PVError_DeviceFeature,
+            "CmdDrawIndexedIndirect(): Device feature multiDrawIndirect disabled: count must be 0 or 1 but is %d", count);
+    }
+    return skip;
+}
+
+bool StatelessValidation::manual_PreCallValidateCmdClearAttachments(VkCommandBuffer commandBuffer, uint32_t attachmentCount,
+                                                                    const VkClearAttachment *pAttachments, uint32_t rectCount,
+                                                                    const VkClearRect *pRects) const {
+    bool skip = false;
+    for (uint32_t rect = 0; rect < rectCount; rect++) {
+        if (pRects[rect].layerCount == 0) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                            HandleToUint64(commandBuffer), "VUID-vkCmdClearAttachments-layerCount-01934",
+                            "CmdClearAttachments(): pRects[%d].layerCount is zero.", rect);
+        }
+    }
+    return skip;
+}
+
+bool StatelessValidation::manual_PreCallValidateCmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage,
+                                                             VkImageLayout srcImageLayout, VkImage dstImage,
+                                                             VkImageLayout dstImageLayout, uint32_t regionCount,
+                                                             const VkImageCopy *pRegions) const {
+    bool skip = false;
+
+    VkImageAspectFlags legal_aspect_flags =
+        VK_IMAGE_ASPECT_COLOR_BIT | VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT | VK_IMAGE_ASPECT_METADATA_BIT;
+    if (device_extensions.vk_khr_sampler_ycbcr_conversion) {
+        legal_aspect_flags |= (VK_IMAGE_ASPECT_PLANE_0_BIT_KHR | VK_IMAGE_ASPECT_PLANE_1_BIT_KHR | VK_IMAGE_ASPECT_PLANE_2_BIT_KHR);
+    }
+
+    if (pRegions != nullptr) {
+        if ((pRegions->srcSubresource.aspectMask & legal_aspect_flags) == 0) {
+            skip |= log_msg(
+                report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                "VUID-VkImageSubresourceLayers-aspectMask-parameter",
+                "vkCmdCopyImage() parameter, VkImageAspect pRegions->srcSubresource.aspectMask, is an unrecognized enumerator.");
+        }
+        if ((pRegions->dstSubresource.aspectMask & legal_aspect_flags) == 0) {
+            skip |= log_msg(
+                report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                "VUID-VkImageSubresourceLayers-aspectMask-parameter",
+                "vkCmdCopyImage() parameter, VkImageAspect pRegions->dstSubresource.aspectMask, is an unrecognized enumerator.");
+        }
+    }
+    return skip;
+}
+
+bool StatelessValidation::manual_PreCallValidateCmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage,
+                                                             VkImageLayout srcImageLayout, VkImage dstImage,
+                                                             VkImageLayout dstImageLayout, uint32_t regionCount,
+                                                             const VkImageBlit *pRegions, VkFilter filter) const {
+    bool skip = false;
+
+    VkImageAspectFlags legal_aspect_flags =
+        VK_IMAGE_ASPECT_COLOR_BIT | VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT | VK_IMAGE_ASPECT_METADATA_BIT;
+    if (device_extensions.vk_khr_sampler_ycbcr_conversion) {
+        legal_aspect_flags |= (VK_IMAGE_ASPECT_PLANE_0_BIT_KHR | VK_IMAGE_ASPECT_PLANE_1_BIT_KHR | VK_IMAGE_ASPECT_PLANE_2_BIT_KHR);
+    }
+
+    if (pRegions != nullptr) {
+        if ((pRegions->srcSubresource.aspectMask & legal_aspect_flags) == 0) {
+            skip |= log_msg(
+                report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                kVUID_PVError_UnrecognizedValue,
+                "vkCmdBlitImage() parameter, VkImageAspect pRegions->srcSubresource.aspectMask, is an unrecognized enumerator");
+        }
+        if ((pRegions->dstSubresource.aspectMask & legal_aspect_flags) == 0) {
+            skip |= log_msg(
+                report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                kVUID_PVError_UnrecognizedValue,
+                "vkCmdBlitImage() parameter, VkImageAspect pRegions->dstSubresource.aspectMask, is an unrecognized enumerator");
+        }
+    }
+    return skip;
+}
+
+bool StatelessValidation::manual_PreCallValidateCmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer,
+                                                                     VkImage dstImage, VkImageLayout dstImageLayout,
+                                                                     uint32_t regionCount,
+                                                                     const VkBufferImageCopy *pRegions) const {
+    bool skip = false;
+
+    VkImageAspectFlags legal_aspect_flags =
+        VK_IMAGE_ASPECT_COLOR_BIT | VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT | VK_IMAGE_ASPECT_METADATA_BIT;
+    if (device_extensions.vk_khr_sampler_ycbcr_conversion) {
+        legal_aspect_flags |= (VK_IMAGE_ASPECT_PLANE_0_BIT_KHR | VK_IMAGE_ASPECT_PLANE_1_BIT_KHR | VK_IMAGE_ASPECT_PLANE_2_BIT_KHR);
+    }
+
+    if (pRegions != nullptr) {
+        if ((pRegions->imageSubresource.aspectMask & legal_aspect_flags) == 0) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                            kVUID_PVError_UnrecognizedValue,
+                            "vkCmdCopyBufferToImage() parameter, VkImageAspect pRegions->imageSubresource.aspectMask, is an "
+                            "unrecognized enumerator");
+        }
+    }
+    return skip;
+}
+
+bool StatelessValidation::manual_PreCallValidateCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage,
+                                                                     VkImageLayout srcImageLayout, VkBuffer dstBuffer,
+                                                                     uint32_t regionCount,
+                                                                     const VkBufferImageCopy *pRegions) const {
+    bool skip = false;
+
+    VkImageAspectFlags legal_aspect_flags =
+        VK_IMAGE_ASPECT_COLOR_BIT | VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT | VK_IMAGE_ASPECT_METADATA_BIT;
+    if (device_extensions.vk_khr_sampler_ycbcr_conversion) {
+        legal_aspect_flags |= (VK_IMAGE_ASPECT_PLANE_0_BIT_KHR | VK_IMAGE_ASPECT_PLANE_1_BIT_KHR | VK_IMAGE_ASPECT_PLANE_2_BIT_KHR);
+    }
+
+    if (pRegions != nullptr) {
+        if ((pRegions->imageSubresource.aspectMask & legal_aspect_flags) == 0) {
+            log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                    kVUID_PVError_UnrecognizedValue,
+                    "vkCmdCopyImageToBuffer parameter, VkImageAspect pRegions->imageSubresource.aspectMask, is an unrecognized "
+                    "enumerator");
+        }
+    }
+    return skip;
+}
+
+bool StatelessValidation::manual_PreCallValidateCmdUpdateBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer,
+                                                                VkDeviceSize dstOffset, VkDeviceSize dataSize,
+                                                                const void *pData) const {
+    bool skip = false;
+
+    if (dstOffset & 3) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                        "VUID-vkCmdUpdateBuffer-dstOffset-00036",
+                        "vkCmdUpdateBuffer() parameter, VkDeviceSize dstOffset (0x%" PRIxLEAST64 "), is not a multiple of 4.",
+                        dstOffset);
+    }
+
+    if ((dataSize <= 0) || (dataSize > 65536)) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                        "VUID-vkCmdUpdateBuffer-dataSize-00037",
+                        "vkCmdUpdateBuffer() parameter, VkDeviceSize dataSize (0x%" PRIxLEAST64
+                        "), must be greater than zero and less than or equal to 65536.",
+                        dataSize);
+    } else if (dataSize & 3) {
+        skip |=
+            log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                    "VUID-vkCmdUpdateBuffer-dataSize-00038",
+                    "vkCmdUpdateBuffer() parameter, VkDeviceSize dataSize (0x%" PRIxLEAST64 "), is not a multiple of 4.", dataSize);
+    }
+    return skip;
+}
+
+bool StatelessValidation::manual_PreCallValidateCmdFillBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer,
+                                                              VkDeviceSize dstOffset, VkDeviceSize size, uint32_t data) const {
+    bool skip = false;
+
+    if (dstOffset & 3) {
+        skip |=
+            log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                    "VUID-vkCmdFillBuffer-dstOffset-00025",
+                    "vkCmdFillBuffer() parameter, VkDeviceSize dstOffset (0x%" PRIxLEAST64 "), is not a multiple of 4.", dstOffset);
+    }
+
+    if (size != VK_WHOLE_SIZE) {
+        if (size <= 0) {
+            skip |=
+                log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                        "VUID-vkCmdFillBuffer-size-00026",
+                        "vkCmdFillBuffer() parameter, VkDeviceSize size (0x%" PRIxLEAST64 "), must be greater than zero.", size);
+        } else if (size & 3) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                            "VUID-vkCmdFillBuffer-size-00028",
+                            "vkCmdFillBuffer() parameter, VkDeviceSize size (0x%" PRIxLEAST64 "), is not a multiple of 4.", size);
+        }
+    }
+    return skip;
+}
+
+bool StatelessValidation::manual_PreCallValidateCreateSwapchainKHR(VkDevice device, const VkSwapchainCreateInfoKHR *pCreateInfo,
+                                                                   const VkAllocationCallbacks *pAllocator,
+                                                                   VkSwapchainKHR *pSwapchain) const {
+    bool skip = false;
+
+    const LogMiscParams log_misc{VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT, VK_NULL_HANDLE, "vkCreateSwapchainKHR"};
+
+    if (pCreateInfo != nullptr) {
+        // Validation for parameters excluded from the generated validation code due to a 'noautovalidity' tag in vk.xml
+        if (pCreateInfo->imageSharingMode == VK_SHARING_MODE_CONCURRENT) {
+            // If imageSharingMode is VK_SHARING_MODE_CONCURRENT, queueFamilyIndexCount must be greater than 1
+            if (pCreateInfo->queueFamilyIndexCount <= 1) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                "VUID-VkSwapchainCreateInfoKHR-imageSharingMode-01278",
+                                "vkCreateSwapchainKHR(): if pCreateInfo->imageSharingMode is VK_SHARING_MODE_CONCURRENT, "
+                                "pCreateInfo->queueFamilyIndexCount must be greater than 1.");
+            }
+
+            // If imageSharingMode is VK_SHARING_MODE_CONCURRENT, pQueueFamilyIndices must be a pointer to an array of
+            // queueFamilyIndexCount uint32_t values
+            if (pCreateInfo->pQueueFamilyIndices == nullptr) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                "VUID-VkSwapchainCreateInfoKHR-imageSharingMode-01277",
+                                "vkCreateSwapchainKHR(): if pCreateInfo->imageSharingMode is VK_SHARING_MODE_CONCURRENT, "
+                                "pCreateInfo->pQueueFamilyIndices must be a pointer to an array of "
+                                "pCreateInfo->queueFamilyIndexCount uint32_t values.");
+            }
+        }
+
+        skip |= ValidateGreaterThanZero(pCreateInfo->imageArrayLayers, "pCreateInfo->imageArrayLayers",
+                                        "VUID-VkSwapchainCreateInfoKHR-imageArrayLayers-01275", log_misc);
+    }
+
+    return skip;
+}
+
+bool StatelessValidation::manual_PreCallValidateQueuePresentKHR(VkQueue queue, const VkPresentInfoKHR *pPresentInfo) const {
+    bool skip = false;
+
+    if (pPresentInfo && pPresentInfo->pNext) {
+        const auto *present_regions = lvl_find_in_chain<VkPresentRegionsKHR>(pPresentInfo->pNext);
+        if (present_regions) {
+            // TODO: This and all other pNext extension dependencies should be added to code-generation
+            skip |= require_device_extension(IsExtEnabled(device_extensions.vk_khr_incremental_present), "vkQueuePresentKHR",
+                                             VK_KHR_INCREMENTAL_PRESENT_EXTENSION_NAME);
+            if (present_regions->swapchainCount != pPresentInfo->swapchainCount) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                kVUID_PVError_InvalidUsage,
+                                "QueuePresentKHR(): pPresentInfo->swapchainCount has a value of %i but VkPresentRegionsKHR "
+                                "extension swapchainCount is %i. These values must be equal.",
+                                pPresentInfo->swapchainCount, present_regions->swapchainCount);
+            }
+            skip |= validate_struct_pnext("QueuePresentKHR", "pCreateInfo->pNext->pNext", NULL, present_regions->pNext, 0, NULL,
+                                          GeneratedVulkanHeaderVersion, "VUID-VkPresentInfoKHR-pNext-pNext");
+            skip |= validate_array("QueuePresentKHR", "pCreateInfo->pNext->swapchainCount", "pCreateInfo->pNext->pRegions",
+                                   present_regions->swapchainCount, &present_regions->pRegions, true, false, kVUIDUndefined,
+                                   kVUIDUndefined);
+            for (uint32_t i = 0; i < present_regions->swapchainCount; ++i) {
+                skip |= validate_array("QueuePresentKHR", "pCreateInfo->pNext->pRegions[].rectangleCount",
+                                       "pCreateInfo->pNext->pRegions[].pRectangles", present_regions->pRegions[i].rectangleCount,
+                                       &present_regions->pRegions[i].pRectangles, true, false, kVUIDUndefined, kVUIDUndefined);
+            }
+        }
+    }
+
+    return skip;
+}
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+bool StatelessValidation::manual_PreCallValidateCreateWin32SurfaceKHR(VkInstance instance,
+                                                                      const VkWin32SurfaceCreateInfoKHR *pCreateInfo,
+                                                                      const VkAllocationCallbacks *pAllocator,
+                                                                      VkSurfaceKHR *pSurface) const {
+    bool skip = false;
+
+    if (pCreateInfo->hwnd == nullptr) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                        "VUID-VkWin32SurfaceCreateInfoKHR-hwnd-01308",
+                        "vkCreateWin32SurfaceKHR(): hwnd must be a valid Win32 HWND but hwnd is NULL.");
+    }
+
+    return skip;
+}
+#endif  // VK_USE_PLATFORM_WIN32_KHR
+
+bool StatelessValidation::manual_PreCallValidateCreateDescriptorPool(VkDevice device, const VkDescriptorPoolCreateInfo *pCreateInfo,
+                                                                     const VkAllocationCallbacks *pAllocator,
+                                                                     VkDescriptorPool *pDescriptorPool) const {
+    bool skip = false;
+
+    if (pCreateInfo) {
+        if (pCreateInfo->maxSets <= 0) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT,
+                            VK_NULL_HANDLE, "VUID-VkDescriptorPoolCreateInfo-maxSets-00301",
+                            "vkCreateDescriptorPool(): pCreateInfo->maxSets is not greater than 0.");
+        }
+
+        if (pCreateInfo->pPoolSizes) {
+            for (uint32_t i = 0; i < pCreateInfo->poolSizeCount; ++i) {
+                if (pCreateInfo->pPoolSizes[i].descriptorCount <= 0) {
+                    skip |= log_msg(
+                        report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT, VK_NULL_HANDLE,
+                        "VUID-VkDescriptorPoolSize-descriptorCount-00302",
+                        "vkCreateDescriptorPool(): pCreateInfo->pPoolSizes[%" PRIu32 "].descriptorCount is not greater than 0.", i);
+                }
+                if (pCreateInfo->pPoolSizes[i].type == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT &&
+                    (pCreateInfo->pPoolSizes[i].descriptorCount % 4) != 0) {
+                    skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT,
+                                    VK_NULL_HANDLE, "VUID-VkDescriptorPoolSize-type-02218",
+                                    "vkCreateDescriptorPool(): pCreateInfo->pPoolSizes[%" PRIu32
+                                    "].type is VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT "
+                                    " and pCreateInfo->pPoolSizes[%" PRIu32 "].descriptorCount is not a multiple of 4.",
+                                    i, i);
+                }
+            }
+        }
+    }
+
+    return skip;
+}
+
+bool StatelessValidation::manual_PreCallValidateCmdDispatch(VkCommandBuffer commandBuffer, uint32_t groupCountX,
+                                                            uint32_t groupCountY, uint32_t groupCountZ) const {
+    bool skip = false;
+
+    if (groupCountX > device_limits.maxComputeWorkGroupCount[0]) {
+        skip |=
+            log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                    HandleToUint64(commandBuffer), "VUID-vkCmdDispatch-groupCountX-00386",
+                    "vkCmdDispatch(): groupCountX (%" PRIu32 ") exceeds device limit maxComputeWorkGroupCount[0] (%" PRIu32 ").",
+                    groupCountX, device_limits.maxComputeWorkGroupCount[0]);
+    }
+
+    if (groupCountY > device_limits.maxComputeWorkGroupCount[1]) {
+        skip |=
+            log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                    HandleToUint64(commandBuffer), "VUID-vkCmdDispatch-groupCountY-00387",
+                    "vkCmdDispatch(): groupCountY (%" PRIu32 ") exceeds device limit maxComputeWorkGroupCount[1] (%" PRIu32 ").",
+                    groupCountY, device_limits.maxComputeWorkGroupCount[1]);
+    }
+
+    if (groupCountZ > device_limits.maxComputeWorkGroupCount[2]) {
+        skip |=
+            log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                    HandleToUint64(commandBuffer), "VUID-vkCmdDispatch-groupCountZ-00388",
+                    "vkCmdDispatch(): groupCountZ (%" PRIu32 ") exceeds device limit maxComputeWorkGroupCount[2] (%" PRIu32 ").",
+                    groupCountZ, device_limits.maxComputeWorkGroupCount[2]);
+    }
+
+    return skip;
+}
+
+bool StatelessValidation::manual_PreCallValidateCmdDispatchIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer,
+                                                                    VkDeviceSize offset) const {
+    bool skip = false;
+
+    if ((offset % 4) != 0) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                        HandleToUint64(commandBuffer), "VUID-vkCmdDispatchIndirect-offset-02710",
+                        "vkCmdDispatchIndirect(): offset (%" PRIu64 ") must be a multiple of 4.", offset);
+    }
+    return skip;
+}
+
+bool StatelessValidation::manual_PreCallValidateCmdDispatchBaseKHR(VkCommandBuffer commandBuffer, uint32_t baseGroupX,
+                                                                   uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX,
+                                                                   uint32_t groupCountY, uint32_t groupCountZ) const {
+    bool skip = false;
+
+    // Paired if {} else if {} tests used to avoid any possible uint underflow
+    uint32_t limit = device_limits.maxComputeWorkGroupCount[0];
+    if (baseGroupX >= limit) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                        HandleToUint64(commandBuffer), "VUID-vkCmdDispatchBase-baseGroupX-00421",
+                        "vkCmdDispatch(): baseGroupX (%" PRIu32
+                        ") equals or exceeds device limit maxComputeWorkGroupCount[0] (%" PRIu32 ").",
+                        baseGroupX, limit);
+    } else if (groupCountX > (limit - baseGroupX)) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                        HandleToUint64(commandBuffer), "VUID-vkCmdDispatchBase-groupCountX-00424",
+                        "vkCmdDispatchBaseKHR(): baseGroupX (%" PRIu32 ") + groupCountX (%" PRIu32
+                        ") exceeds device limit maxComputeWorkGroupCount[0] (%" PRIu32 ").",
+                        baseGroupX, groupCountX, limit);
+    }
+
+    limit = device_limits.maxComputeWorkGroupCount[1];
+    if (baseGroupY >= limit) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                        HandleToUint64(commandBuffer), "VUID-vkCmdDispatchBase-baseGroupX-00422",
+                        "vkCmdDispatch(): baseGroupY (%" PRIu32
+                        ") equals or exceeds device limit maxComputeWorkGroupCount[1] (%" PRIu32 ").",
+                        baseGroupY, limit);
+    } else if (groupCountY > (limit - baseGroupY)) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                        HandleToUint64(commandBuffer), "VUID-vkCmdDispatchBase-groupCountY-00425",
+                        "vkCmdDispatchBaseKHR(): baseGroupY (%" PRIu32 ") + groupCountY (%" PRIu32
+                        ") exceeds device limit maxComputeWorkGroupCount[1] (%" PRIu32 ").",
+                        baseGroupY, groupCountY, limit);
+    }
+
+    limit = device_limits.maxComputeWorkGroupCount[2];
+    if (baseGroupZ >= limit) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                        HandleToUint64(commandBuffer), "VUID-vkCmdDispatchBase-baseGroupZ-00423",
+                        "vkCmdDispatch(): baseGroupZ (%" PRIu32
+                        ") equals or exceeds device limit maxComputeWorkGroupCount[2] (%" PRIu32 ").",
+                        baseGroupZ, limit);
+    } else if (groupCountZ > (limit - baseGroupZ)) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                        HandleToUint64(commandBuffer), "VUID-vkCmdDispatchBase-groupCountZ-00426",
+                        "vkCmdDispatchBaseKHR(): baseGroupZ (%" PRIu32 ") + groupCountZ (%" PRIu32
+                        ") exceeds device limit maxComputeWorkGroupCount[2] (%" PRIu32 ").",
+                        baseGroupZ, groupCountZ, limit);
+    }
+
+    return skip;
+}
+
+bool StatelessValidation::manual_PreCallValidateCmdSetExclusiveScissorNV(VkCommandBuffer commandBuffer,
+                                                                         uint32_t firstExclusiveScissor,
+                                                                         uint32_t exclusiveScissorCount,
+                                                                         const VkRect2D *pExclusiveScissors) const {
+    bool skip = false;
+
+    if (!physical_device_features.multiViewport) {
+        if (firstExclusiveScissor != 0) {
+            skip |=
+                log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                        HandleToUint64(commandBuffer), "VUID-vkCmdSetExclusiveScissorNV-firstExclusiveScissor-02035",
+                        "vkCmdSetExclusiveScissorNV: The multiViewport feature is disabled, but firstExclusiveScissor (=%" PRIu32
+                        ") is not 0.",
+                        firstExclusiveScissor);
+        }
+        if (exclusiveScissorCount > 1) {
+            skip |=
+                log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                        HandleToUint64(commandBuffer), "VUID-vkCmdSetExclusiveScissorNV-exclusiveScissorCount-02036",
+                        "vkCmdSetExclusiveScissorNV: The multiViewport feature is disabled, but exclusiveScissorCount (=%" PRIu32
+                        ") is not 1.",
+                        exclusiveScissorCount);
+        }
+    } else {  // multiViewport enabled
+        const uint64_t sum = static_cast<uint64_t>(firstExclusiveScissor) + static_cast<uint64_t>(exclusiveScissorCount);
+        if (sum > device_limits.maxViewports) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                            HandleToUint64(commandBuffer), "VUID-vkCmdSetExclusiveScissorNV-firstExclusiveScissor-02034",
+                            "vkCmdSetExclusiveScissorNV: firstExclusiveScissor + exclusiveScissorCount (=%" PRIu32 " + %" PRIu32
+                            " = %" PRIu64 ") is greater than VkPhysicalDeviceLimits::maxViewports (=%" PRIu32 ").",
+                            firstExclusiveScissor, exclusiveScissorCount, sum, device_limits.maxViewports);
+        }
+    }
+
+    if (firstExclusiveScissor >= device_limits.maxViewports) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                        HandleToUint64(commandBuffer), "VUID-vkCmdSetExclusiveScissorNV-firstExclusiveScissor-02033",
+                        "vkCmdSetExclusiveScissorNV: firstExclusiveScissor (=%" PRIu32 ") must be less than maxViewports (=%" PRIu32
+                        ").",
+                        firstExclusiveScissor, device_limits.maxViewports);
+    }
+
+    if (pExclusiveScissors) {
+        for (uint32_t scissor_i = 0; scissor_i < exclusiveScissorCount; ++scissor_i) {
+            const auto &scissor = pExclusiveScissors[scissor_i];  // will crash on invalid ptr
+
+            if (scissor.offset.x < 0) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                                HandleToUint64(commandBuffer), "VUID-vkCmdSetExclusiveScissorNV-x-02037",
+                                "vkCmdSetExclusiveScissorNV: pScissors[%" PRIu32 "].offset.x (=%" PRIi32 ") is negative.",
+                                scissor_i, scissor.offset.x);
+            }
+
+            if (scissor.offset.y < 0) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                                HandleToUint64(commandBuffer), "VUID-vkCmdSetExclusiveScissorNV-x-02037",
+                                "vkCmdSetExclusiveScissorNV: pScissors[%" PRIu32 "].offset.y (=%" PRIi32 ") is negative.",
+                                scissor_i, scissor.offset.y);
+            }
+
+            const int64_t x_sum = static_cast<int64_t>(scissor.offset.x) + static_cast<int64_t>(scissor.extent.width);
+            if (x_sum > INT32_MAX) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                                HandleToUint64(commandBuffer), "VUID-vkCmdSetExclusiveScissorNV-offset-02038",
+                                "vkCmdSetExclusiveScissorNV: offset.x + extent.width (=%" PRIi32 " + %" PRIu32 " = %" PRIi64
+                                ") of pScissors[%" PRIu32 "] will overflow int32_t.",
+                                scissor.offset.x, scissor.extent.width, x_sum, scissor_i);
+            }
+
+            const int64_t y_sum = static_cast<int64_t>(scissor.offset.y) + static_cast<int64_t>(scissor.extent.height);
+            if (y_sum > INT32_MAX) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                                HandleToUint64(commandBuffer), "VUID-vkCmdSetExclusiveScissorNV-offset-02039",
+                                "vkCmdSetExclusiveScissorNV: offset.y + extent.height (=%" PRIi32 " + %" PRIu32 " = %" PRIi64
+                                ") of pScissors[%" PRIu32 "] will overflow int32_t.",
+                                scissor.offset.y, scissor.extent.height, y_sum, scissor_i);
+            }
+        }
+    }
+
+    return skip;
+}
+
+bool StatelessValidation::manual_PreCallValidateCmdSetViewportWScalingNV(VkCommandBuffer commandBuffer, uint32_t firstViewport,
+                                                                         uint32_t viewportCount,
+                                                                         const VkViewportWScalingNV *pViewportWScalings) const {
+    bool skip = false;
+    if (firstViewport >= device_limits.maxViewports) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                        HandleToUint64(commandBuffer), "VUID-vkCmdSetViewportWScalingNV-firstViewport-01323",
+                        "vkCmdSetViewportWScalingNV: firstViewport (=%" PRIu32 ") must be less than maxViewports (=%" PRIu32 ").",
+                        firstViewport, device_limits.maxViewports);
+    } else {
+        const uint64_t sum = static_cast<uint64_t>(firstViewport) + static_cast<uint64_t>(viewportCount);
+        if ((sum < 1) || (sum > device_limits.maxViewports)) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                            HandleToUint64(commandBuffer), "VUID-vkCmdSetViewportWScalingNV-firstViewport-01324",
+                            "vkCmdSetViewportWScalingNV: firstViewport + viewportCount (=%" PRIu32 " + %" PRIu32 " = %" PRIu64
+                            ") must be between 1 and VkPhysicalDeviceLimits::maxViewports (=%" PRIu32 "), inculsive.",
+                            firstViewport, viewportCount, sum, device_limits.maxViewports);
+        }
+    }
+
+    return skip;
+}
+
+bool StatelessValidation::manual_PreCallValidateCmdSetViewportShadingRatePaletteNV(
+    VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount,
+    const VkShadingRatePaletteNV *pShadingRatePalettes) const {
+    bool skip = false;
+
+    if (!physical_device_features.multiViewport) {
+        if (firstViewport != 0) {
+            skip |=
+                log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                        HandleToUint64(commandBuffer), "VUID-vkCmdSetViewportShadingRatePaletteNV-firstViewport-02068",
+                        "vkCmdSetViewportShadingRatePaletteNV: The multiViewport feature is disabled, but firstViewport (=%" PRIu32
+                        ") is not 0.",
+                        firstViewport);
+        }
+        if (viewportCount > 1) {
+            skip |=
+                log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                        HandleToUint64(commandBuffer), "VUID-vkCmdSetViewportShadingRatePaletteNV-viewportCount-02069",
+                        "vkCmdSetViewportShadingRatePaletteNV: The multiViewport feature is disabled, but viewportCount (=%" PRIu32
+                        ") is not 1.",
+                        viewportCount);
+        }
+    }
+
+    if (firstViewport >= device_limits.maxViewports) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                        HandleToUint64(commandBuffer), "VUID-vkCmdSetViewportShadingRatePaletteNV-firstViewport-02066",
+                        "vkCmdSetViewportShadingRatePaletteNV: firstViewport (=%" PRIu32
+                        ") must be less than maxViewports (=%" PRIu32 ").",
+                        firstViewport, device_limits.maxViewports);
+    }
+
+    const uint64_t sum = static_cast<uint64_t>(firstViewport) + static_cast<uint64_t>(viewportCount);
+    if (sum > device_limits.maxViewports) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                        HandleToUint64(commandBuffer), "VUID-vkCmdSetViewportShadingRatePaletteNV-firstViewport-02067",
+                        "vkCmdSetViewportShadingRatePaletteNV: firstViewport + viewportCount (=%" PRIu32 " + %" PRIu32 " = %" PRIu64
+                        ") is greater than VkPhysicalDeviceLimits::maxViewports (=%" PRIu32 ").",
+                        firstViewport, viewportCount, sum, device_limits.maxViewports);
+    }
+
+    return skip;
+}
+
+bool StatelessValidation::manual_PreCallValidateCmdSetCoarseSampleOrderNV(
+    VkCommandBuffer commandBuffer, VkCoarseSampleOrderTypeNV sampleOrderType, uint32_t customSampleOrderCount,
+    const VkCoarseSampleOrderCustomNV *pCustomSampleOrders) const {
+    bool skip = false;
+
+    if (sampleOrderType != VK_COARSE_SAMPLE_ORDER_TYPE_CUSTOM_NV && customSampleOrderCount != 0) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                        HandleToUint64(commandBuffer), "VUID-vkCmdSetCoarseSampleOrderNV-sampleOrderType-02081",
+                        "vkCmdSetCoarseSampleOrderNV: If sampleOrderType is not VK_COARSE_SAMPLE_ORDER_TYPE_CUSTOM_NV, "
+                        "customSampleOrderCount must be 0.");
+    }
+
+    for (uint32_t order_i = 0; order_i < customSampleOrderCount; ++order_i) {
+        skip |= ValidateCoarseSampleOrderCustomNV(&pCustomSampleOrders[order_i]);
+    }
+
+    return skip;
+}
+
+bool StatelessValidation::manual_PreCallValidateCmdDrawMeshTasksNV(VkCommandBuffer commandBuffer, uint32_t taskCount,
+                                                                   uint32_t firstTask) const {
+    bool skip = false;
+
+    if (taskCount > phys_dev_ext_props.mesh_shader_props.maxDrawMeshTasksCount) {
+        skip |= log_msg(
+            report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+            HandleToUint64(commandBuffer), "VUID-vkCmdDrawMeshTasksNV-taskCount-02119",
+            "vkCmdDrawMeshTasksNV() parameter, uint32_t taskCount (0x%" PRIxLEAST32
+            "), must be less than or equal to VkPhysicalDeviceMeshShaderPropertiesNV::maxDrawMeshTasksCount (0x%" PRIxLEAST32 ").",
+            taskCount, phys_dev_ext_props.mesh_shader_props.maxDrawMeshTasksCount);
+    }
+
+    return skip;
+}
+
+bool StatelessValidation::manual_PreCallValidateCmdDrawMeshTasksIndirectNV(VkCommandBuffer commandBuffer, VkBuffer buffer,
+                                                                           VkDeviceSize offset, uint32_t drawCount,
+                                                                           uint32_t stride) const {
+    bool skip = false;
+    static const int condition_multiples = 0b0011;
+    if (offset & condition_multiples) {
+        skip |= log_msg(
+            report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+            HandleToUint64(commandBuffer), "VUID-vkCmdDrawMeshTasksIndirectNV-offset-02710",
+            "vkCmdDrawMeshTasksIndirectNV() parameter, VkDeviceSize offset (0x%" PRIxLEAST64 "), is not a multiple of 4.", offset);
+    }
+    if (drawCount > 1 && ((stride & condition_multiples) || stride < sizeof(VkDrawMeshTasksIndirectCommandNV))) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                        HandleToUint64(commandBuffer), "VUID-vkCmdDrawMeshTasksIndirectNV-drawCount-02146",
+                        "vkCmdDrawMeshTasksIndirectNV() parameter, uint32_t stride (0x%" PRIxLEAST32
+                        "), is not a multiple of 4 or smaller than sizeof (VkDrawMeshTasksIndirectCommandNV).",
+                        stride);
+    }
+    if (!physical_device_features.multiDrawIndirect && ((drawCount > 1))) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                        HandleToUint64(commandBuffer), "VUID-vkCmdDrawMeshTasksIndirectNV-drawCount-02718",
+                        "vkCmdDrawMeshTasksIndirectNV(): Device feature multiDrawIndirect disabled: count must be 0 or 1 but is %d",
+                        drawCount);
+    }
+
+    return skip;
+}
+
+bool StatelessValidation::manual_PreCallValidateCmdDrawMeshTasksIndirectCountNV(VkCommandBuffer commandBuffer, VkBuffer buffer,
+                                                                                VkDeviceSize offset, VkBuffer countBuffer,
+                                                                                VkDeviceSize countBufferOffset,
+                                                                                uint32_t maxDrawCount, uint32_t stride) const {
+    bool skip = false;
+
+    if (offset & 3) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                        HandleToUint64(commandBuffer), "VUID-vkCmdDrawMeshTasksIndirectCountNV-offset-02710",
+                        "vkCmdDrawMeshTasksIndirectCountNV() parameter, VkDeviceSize offset (0x%" PRIxLEAST64
+                        "), is not a multiple of 4.",
+                        offset);
+    }
+
+    if (countBufferOffset & 3) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                        HandleToUint64(commandBuffer), "VUID-vkCmdDrawMeshTasksIndirectCountNV-countBufferOffset-02716",
+                        "vkCmdDrawMeshTasksIndirectCountNV() parameter, VkDeviceSize countBufferOffset (0x%" PRIxLEAST64
+                        "), is not a multiple of 4.",
+                        countBufferOffset);
+    }
+
+    return skip;
+}
+
+bool StatelessValidation::manual_PreCallValidateCreateQueryPool(VkDevice device, const VkQueryPoolCreateInfo *pCreateInfo,
+                                                                const VkAllocationCallbacks *pAllocator,
+                                                                VkQueryPool *pQueryPool) const {
+    bool skip = false;
+
+    // Validation for parameters excluded from the generated validation code due to a 'noautovalidity' tag in vk.xml
+    if (pCreateInfo != nullptr) {
+        // If queryType is VK_QUERY_TYPE_PIPELINE_STATISTICS, pipelineStatistics must be a valid combination of
+        // VkQueryPipelineStatisticFlagBits values
+        if ((pCreateInfo->queryType == VK_QUERY_TYPE_PIPELINE_STATISTICS) && (pCreateInfo->pipelineStatistics != 0) &&
+            ((pCreateInfo->pipelineStatistics & (~AllVkQueryPipelineStatisticFlagBits)) != 0)) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                            "VUID-VkQueryPoolCreateInfo-queryType-00792",
+                            "vkCreateQueryPool(): if pCreateInfo->queryType is VK_QUERY_TYPE_PIPELINE_STATISTICS, "
+                            "pCreateInfo->pipelineStatistics must be a valid combination of VkQueryPipelineStatisticFlagBits "
+                            "values.");
+        }
+    }
+    return skip;
+}
+
+bool StatelessValidation::manual_PreCallValidateEnumerateDeviceExtensionProperties(VkPhysicalDevice physicalDevice,
+                                                                                   const char *pLayerName, uint32_t *pPropertyCount,
+                                                                                   VkExtensionProperties *pProperties) const {
+    return validate_array("vkEnumerateDeviceExtensionProperties", "pPropertyCount", "pProperties", pPropertyCount, &pProperties,
+                          true, false, false, kVUIDUndefined, "VUID-vkEnumerateDeviceExtensionProperties-pProperties-parameter");
+}
+
+void StatelessValidation::PostCallRecordCreateRenderPass(VkDevice device, const VkRenderPassCreateInfo *pCreateInfo,
+                                                         const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass,
+                                                         VkResult result) {
+    if (result != VK_SUCCESS) return;
+    RecordRenderPass(*pRenderPass, pCreateInfo);
+}
+
+void StatelessValidation::PostCallRecordCreateRenderPass2KHR(VkDevice device, const VkRenderPassCreateInfo2KHR *pCreateInfo,
+                                                             const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass,
+                                                             VkResult result) {
+    // Track the state necessary for checking vkCreateGraphicsPipeline (subpass usage of depth and color attachments)
+    if (result != VK_SUCCESS) return;
+    RecordRenderPass(*pRenderPass, pCreateInfo);
+}
+
+void StatelessValidation::PostCallRecordDestroyRenderPass(VkDevice device, VkRenderPass renderPass,
+                                                          const VkAllocationCallbacks *pAllocator) {
+    // Track the state necessary for checking vkCreateGraphicsPipeline (subpass usage of depth and color attachments)
+    std::unique_lock<std::mutex> lock(renderpass_map_mutex);
+    renderpasses_states.erase(renderPass);
+}
+
+bool StatelessValidation::manual_PreCallValidateAllocateMemory(VkDevice device, const VkMemoryAllocateInfo *pAllocateInfo,
+                                                               const VkAllocationCallbacks *pAllocator,
+                                                               VkDeviceMemory *pMemory) const {
+    bool skip = false;
+
+    if (pAllocateInfo) {
+        auto chained_prio_struct = lvl_find_in_chain<VkMemoryPriorityAllocateInfoEXT>(pAllocateInfo->pNext);
+        if (chained_prio_struct && (chained_prio_struct->priority < 0.0f || chained_prio_struct->priority > 1.0f)) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                            "VUID-VkMemoryPriorityAllocateInfoEXT-priority-02602",
+                            "priority (=%f) must be between `0` and `1`, inclusive.", chained_prio_struct->priority);
+        }
+
+        VkMemoryAllocateFlags flags = 0;
+        auto flags_info = lvl_find_in_chain<VkMemoryAllocateFlagsInfo>(pAllocateInfo->pNext);
+        if (flags_info) {
+            flags = flags_info->flags;
+        }
+
+        auto opaque_alloc_info = lvl_find_in_chain<VkMemoryOpaqueCaptureAddressAllocateInfoKHR>(pAllocateInfo->pNext);
+        if (opaque_alloc_info && opaque_alloc_info->opaqueCaptureAddress != 0) {
+            if (!(flags & VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_KHR)) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                "VUID-VkMemoryAllocateInfo-opaqueCaptureAddress-03329",
+                                "If opaqueCaptureAddress is non-zero, VkMemoryAllocateFlagsInfo::flags must include "
+                                "VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_KHR.");
+            }
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+            auto import_memory_win32_handle = lvl_find_in_chain<VkImportMemoryWin32HandleInfoKHR>(pAllocateInfo->pNext);
+#endif
+            auto import_memory_fd = lvl_find_in_chain<VkImportMemoryFdInfoKHR>(pAllocateInfo->pNext);
+            auto import_memory_host_pointer = lvl_find_in_chain<VkImportMemoryHostPointerInfoEXT>(pAllocateInfo->pNext);
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+            auto import_memory_ahb = lvl_find_in_chain<VkImportAndroidHardwareBufferInfoANDROID>(pAllocateInfo->pNext);
+#endif
+
+            if (import_memory_host_pointer) {
+                skip |= log_msg(
+                    report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                    "VUID-VkMemoryAllocateInfo-pNext-03332",
+                    "If the pNext chain includes a VkImportMemoryHostPointerInfoEXT structure, opaqueCaptureAddress must be zero.");
+            }
+            if (
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+                (import_memory_win32_handle && import_memory_win32_handle->handleType) ||
+#endif
+                (import_memory_fd && import_memory_fd->handleType) ||
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+                (import_memory_ahb && import_memory_ahb->buffer) ||
+#endif
+                (import_memory_host_pointer && import_memory_host_pointer->handleType)) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                "VUID-VkMemoryAllocateInfo-opaqueCaptureAddress-03333",
+                                "If the parameters define an import operation, opaqueCaptureAddress must be zero.");
+            }
+        }
+
+        if (flags) {
+            const auto *bda_features =
+                lvl_find_in_chain<VkPhysicalDeviceBufferDeviceAddressFeaturesKHR>(physical_device_features2.pNext);
+            if ((flags & VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_KHR) &&
+                (!bda_features || !bda_features->bufferDeviceAddressCaptureReplay)) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                "VUID-VkMemoryAllocateInfo-flags-03330",
+                                "If VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_KHR is set, "
+                                "bufferDeviceAddressCaptureReplay must be enabled.");
+            }
+            if ((flags & VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT_KHR) && (!bda_features || !bda_features->bufferDeviceAddress)) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                "VUID-VkMemoryAllocateInfo-flags-03331",
+                                "If VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT_KHR is set, bufferDeviceAddress must be enabled.");
+            }
+        }
+    }
+    return skip;
+}
+
+bool StatelessValidation::ValidateGeometryTrianglesNV(const VkGeometryTrianglesNV &triangles,
+                                                      VkDebugReportObjectTypeEXT object_type, uint64_t object_handle,
+                                                      const char *func_name) const {
+    bool skip = false;
+
+    if (triangles.vertexFormat != VK_FORMAT_R32G32B32_SFLOAT && triangles.vertexFormat != VK_FORMAT_R16G16B16_SFLOAT &&
+        triangles.vertexFormat != VK_FORMAT_R16G16B16_SNORM && triangles.vertexFormat != VK_FORMAT_R32G32_SFLOAT &&
+        triangles.vertexFormat != VK_FORMAT_R16G16_SFLOAT && triangles.vertexFormat != VK_FORMAT_R16G16_SNORM) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, object_type, object_handle,
+                        "VUID-VkGeometryTrianglesNV-vertexFormat-02430", "%s", func_name);
+    } else {
+        uint32_t vertex_component_size = 0;
+        if (triangles.vertexFormat == VK_FORMAT_R32G32B32_SFLOAT || triangles.vertexFormat == VK_FORMAT_R32G32_SFLOAT) {
+            vertex_component_size = 4;
+        } else if (triangles.vertexFormat == VK_FORMAT_R16G16B16_SFLOAT || triangles.vertexFormat == VK_FORMAT_R16G16B16_SNORM ||
+                   triangles.vertexFormat == VK_FORMAT_R16G16_SFLOAT || triangles.vertexFormat == VK_FORMAT_R16G16_SNORM) {
+            vertex_component_size = 2;
+        }
+        if (vertex_component_size > 0 && SafeModulo(triangles.vertexOffset, vertex_component_size) != 0) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, object_type, object_handle,
+                            "VUID-VkGeometryTrianglesNV-vertexOffset-02429", "%s", func_name);
+        }
+    }
+
+    if (triangles.indexType != VK_INDEX_TYPE_UINT32 && triangles.indexType != VK_INDEX_TYPE_UINT16 &&
+        triangles.indexType != VK_INDEX_TYPE_NONE_NV) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, object_type, object_handle,
+                        "VUID-VkGeometryTrianglesNV-indexType-02433", "%s", func_name);
+    } else {
+        uint32_t index_element_size = 0;
+        if (triangles.indexType == VK_INDEX_TYPE_UINT32) {
+            index_element_size = 4;
+        } else if (triangles.indexType == VK_INDEX_TYPE_UINT16) {
+            index_element_size = 2;
+        }
+        if (index_element_size > 0 && SafeModulo(triangles.indexOffset, index_element_size) != 0) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, object_type, object_handle,
+                            "VUID-VkGeometryTrianglesNV-indexOffset-02432", "%s", func_name);
+        }
+    }
+    if (triangles.indexType == VK_INDEX_TYPE_NONE_NV) {
+        if (triangles.indexCount != 0) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, object_type, object_handle,
+                            "VUID-VkGeometryTrianglesNV-indexCount-02436", "%s", func_name);
+        }
+        if (triangles.indexData != VK_NULL_HANDLE) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, object_type, object_handle,
+                            "VUID-VkGeometryTrianglesNV-indexData-02434", "%s", func_name);
+        }
+    }
+
+    if (SafeModulo(triangles.transformOffset, 16) != 0) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, object_type, object_handle,
+                        "VUID-VkGeometryTrianglesNV-transformOffset-02438", "%s", func_name);
+    }
+
+    return skip;
+}
+
+bool StatelessValidation::ValidateGeometryAABBNV(const VkGeometryAABBNV &aabbs, VkDebugReportObjectTypeEXT object_type,
+                                                 uint64_t object_handle, const char *func_name) const {
+    bool skip = false;
+
+    if (SafeModulo(aabbs.offset, 8) != 0) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, object_type, object_handle,
+                        "VUID-VkGeometryAABBNV-offset-02440", "%s", func_name);
+    }
+    if (SafeModulo(aabbs.stride, 8) != 0) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, object_type, object_handle,
+                        "VUID-VkGeometryAABBNV-stride-02441", "%s", func_name);
+    }
+
+    return skip;
+}
+
+bool StatelessValidation::ValidateGeometryNV(const VkGeometryNV &geometry, VkDebugReportObjectTypeEXT object_type,
+                                             uint64_t object_handle, const char *func_name) const {
+    bool skip = false;
+    if (geometry.geometryType == VK_GEOMETRY_TYPE_TRIANGLES_NV) {
+        skip = ValidateGeometryTrianglesNV(geometry.geometry.triangles, object_type, object_handle, func_name);
+    } else if (geometry.geometryType == VK_GEOMETRY_TYPE_AABBS_NV) {
+        skip = ValidateGeometryAABBNV(geometry.geometry.aabbs, object_type, object_handle, func_name);
+    }
+    return skip;
+}
+
+bool StatelessValidation::ValidateAccelerationStructureInfoNV(const VkAccelerationStructureInfoNV &info,
+                                                              VkDebugReportObjectTypeEXT object_type, uint64_t object_handle,
+                                                              const char *func_name) const {
+    bool skip = false;
+    if (info.type == VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_NV && info.geometryCount != 0) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, object_type, object_handle,
+                        "VUID-VkAccelerationStructureInfoNV-type-02425",
+                        "VkAccelerationStructureInfoNV: If type is VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_NV then "
+                        "geometryCount must be 0.");
+    }
+    if (info.type == VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_NV && info.instanceCount != 0) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, object_type, object_handle,
+                        "VUID-VkAccelerationStructureInfoNV-type-02426",
+                        "VkAccelerationStructureInfoNV: If type is VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_NV then "
+                        "instanceCount must be 0.");
+    }
+    if (info.flags & VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_TRACE_BIT_NV &&
+        info.flags & VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_BUILD_BIT_NV) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, object_type, object_handle,
+                        "VUID-VkAccelerationStructureInfoNV-flags-02592",
+                        "VkAccelerationStructureInfoNV: If flags has the VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_TRACE_BIT_NV"
+                        "bit set, then it must not have the VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_BUILD_BIT_NV bit set.");
+    }
+    if (info.geometryCount > phys_dev_ext_props.ray_tracing_props.maxGeometryCount) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, object_type, object_handle,
+                        "VUID-VkAccelerationStructureInfoNV-geometryCount-02422",
+                        "VkAccelerationStructureInfoNV: geometryCount must be less than or equal to "
+                        "VkPhysicalDeviceRayTracingPropertiesNV::maxGeometryCount.");
+    }
+    if (info.instanceCount > phys_dev_ext_props.ray_tracing_props.maxInstanceCount) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, object_type, object_handle,
+                        "VUID-VkAccelerationStructureInfoNV-instanceCount-02423",
+                        "VkAccelerationStructureInfoNV: instanceCount must be less than or equal to "
+                        "VkPhysicalDeviceRayTracingPropertiesNV::maxInstanceCount.");
+    }
+    if (info.type == VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_NV && info.geometryCount > 0) {
+        uint64_t total_triangle_count = 0;
+        for (uint32_t i = 0; i < info.geometryCount; i++) {
+            const VkGeometryNV &geometry = info.pGeometries[i];
+
+            skip |= ValidateGeometryNV(geometry, object_type, object_handle, func_name);
+
+            if (geometry.geometryType != VK_GEOMETRY_TYPE_TRIANGLES_NV) {
+                continue;
+            }
+            total_triangle_count += geometry.geometry.triangles.indexCount / 3;
+        }
+        if (total_triangle_count > phys_dev_ext_props.ray_tracing_props.maxTriangleCount) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, object_type, object_handle,
+                            "VUID-VkAccelerationStructureInfoNV-maxTriangleCount-02424",
+                            "VkAccelerationStructureInfoNV: The total number of triangles in all geometries must be less than "
+                            "or equal to VkPhysicalDeviceRayTracingPropertiesNV::maxTriangleCount.");
+        }
+    }
+    if (info.type == VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_NV && info.geometryCount > 1) {
+        const VkGeometryTypeNV first_geometry_type = info.pGeometries[0].geometryType;
+        for (uint32_t i = 1; i < info.geometryCount; i++) {
+            const VkGeometryNV &geometry = info.pGeometries[i];
+            if (geometry.geometryType != first_geometry_type) {
+                // TODO: update fake VUID below with the real one once it is generated.
+                skip |=
+                    log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV_EXT,
+                            0, "UNASSIGNED-VkAccelerationStructureInfoNV-pGeometries-XXXX",
+                            "VkAccelerationStructureInfoNV: info.pGeometries[%d].geometryType does not match "
+                            "info.pGeometries[0].geometryType.",
+                            i);
+            }
+        }
+    }
+    return skip;
+}
+
+bool StatelessValidation::manual_PreCallValidateCreateAccelerationStructureNV(
+    VkDevice device, const VkAccelerationStructureCreateInfoNV *pCreateInfo, const VkAllocationCallbacks *pAllocator,
+    VkAccelerationStructureNV *pAccelerationStructure) const {
+    bool skip = false;
+
+    if (pCreateInfo) {
+        if ((pCreateInfo->compactedSize != 0) &&
+            ((pCreateInfo->info.geometryCount != 0) || (pCreateInfo->info.instanceCount != 0))) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                            "VUID-VkAccelerationStructureCreateInfoNV-compactedSize-02421",
+                            "vkCreateAccelerationStructureNV(): pCreateInfo->compactedSize nonzero (%" PRIu64
+                            ") with info.geometryCount (%" PRIu32 ") or info.instanceCount (%" PRIu32 ") nonzero.",
+                            pCreateInfo->compactedSize, pCreateInfo->info.geometryCount, pCreateInfo->info.instanceCount);
+        }
+
+        skip |= ValidateAccelerationStructureInfoNV(pCreateInfo->info, VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV_EXT, 0,
+                                                    "vkCreateAccelerationStructureNV()");
+    }
+
+    return skip;
+}
+
+bool StatelessValidation::manual_PreCallValidateCmdBuildAccelerationStructureNV(VkCommandBuffer commandBuffer,
+                                                                                const VkAccelerationStructureInfoNV *pInfo,
+                                                                                VkBuffer instanceData, VkDeviceSize instanceOffset,
+                                                                                VkBool32 update, VkAccelerationStructureNV dst,
+                                                                                VkAccelerationStructureNV src, VkBuffer scratch,
+                                                                                VkDeviceSize scratchOffset) const {
+    bool skip = false;
+
+    if (pInfo != nullptr) {
+        skip |= ValidateAccelerationStructureInfoNV(*pInfo, VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV_EXT,
+                                                    HandleToUint64(dst), "vkCmdBuildAccelerationStructureNV()");
+    }
+
+    return skip;
+}
+
+bool StatelessValidation::manual_PreCallValidateGetAccelerationStructureHandleNV(VkDevice device,
+                                                                                 VkAccelerationStructureNV accelerationStructure,
+                                                                                 size_t dataSize, void *pData) const {
+    bool skip = false;
+    if (dataSize < 8) {
+        skip = log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV_EXT,
+                       HandleToUint64(accelerationStructure), "VUID-vkGetAccelerationStructureHandleNV-dataSize-02240",
+                       "vkGetAccelerationStructureHandleNV(): dataSize must be greater than or equal to 8.");
+    }
+    return skip;
+}
+
+bool StatelessValidation::manual_PreCallValidateCreateRayTracingPipelinesNV(VkDevice device, VkPipelineCache pipelineCache,
+                                                                            uint32_t createInfoCount,
+                                                                            const VkRayTracingPipelineCreateInfoNV *pCreateInfos,
+                                                                            const VkAllocationCallbacks *pAllocator,
+                                                                            VkPipeline *pPipelines) const {
+    bool skip = false;
+
+    for (uint32_t i = 0; i < createInfoCount; i++) {
+        auto feedback_struct = lvl_find_in_chain<VkPipelineCreationFeedbackCreateInfoEXT>(pCreateInfos[i].pNext);
+        if ((feedback_struct != nullptr) && (feedback_struct->pipelineStageCreationFeedbackCount != pCreateInfos[i].stageCount)) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT, VK_NULL_HANDLE,
+                            "VUID-VkPipelineCreationFeedbackCreateInfoEXT-pipelineStageCreationFeedbackCount-02670",
+                            "vkCreateRayTracingPipelinesNV(): in pCreateInfo[%" PRIu32
+                            "], VkPipelineCreationFeedbackEXT::pipelineStageCreationFeedbackCount"
+                            "(=%" PRIu32 ") must equal VkRayTracingPipelineCreateInfoNV::stageCount(=%" PRIu32 ").",
+                            i, feedback_struct->pipelineStageCreationFeedbackCount, pCreateInfos[i].stageCount);
+        }
+    }
+
+    return skip;
+}
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+bool StatelessValidation::PreCallValidateGetDeviceGroupSurfacePresentModes2EXT(VkDevice device,
+                                                                               const VkPhysicalDeviceSurfaceInfo2KHR *pSurfaceInfo,
+                                                                               VkDeviceGroupPresentModeFlagsKHR *pModes) const {
+    bool skip = false;
+    if (!device_extensions.vk_khr_swapchain)
+        skip |= OutputExtensionError("vkGetDeviceGroupSurfacePresentModes2EXT", VK_KHR_SWAPCHAIN_EXTENSION_NAME);
+    if (!device_extensions.vk_khr_get_surface_capabilities_2)
+        skip |= OutputExtensionError("vkGetDeviceGroupSurfacePresentModes2EXT", VK_KHR_GET_SURFACE_CAPABILITIES_2_EXTENSION_NAME);
+    if (!device_extensions.vk_khr_surface)
+        skip |= OutputExtensionError("vkGetDeviceGroupSurfacePresentModes2EXT", VK_KHR_SURFACE_EXTENSION_NAME);
+    if (!device_extensions.vk_khr_get_physical_device_properties_2)
+        skip |=
+            OutputExtensionError("vkGetDeviceGroupSurfacePresentModes2EXT", VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    if (!device_extensions.vk_ext_full_screen_exclusive)
+        skip |= OutputExtensionError("vkGetDeviceGroupSurfacePresentModes2EXT", VK_EXT_FULL_SCREEN_EXCLUSIVE_EXTENSION_NAME);
+    skip |= validate_struct_type(
+        "vkGetDeviceGroupSurfacePresentModes2EXT", "pSurfaceInfo", "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SURFACE_INFO_2_KHR",
+        pSurfaceInfo, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SURFACE_INFO_2_KHR, true,
+        "VUID-vkGetDeviceGroupSurfacePresentModes2EXT-pSurfaceInfo-parameter", "VUID-VkPhysicalDeviceSurfaceInfo2KHR-sType-sType");
+    if (pSurfaceInfo != NULL) {
+        const VkStructureType allowed_structs_VkPhysicalDeviceSurfaceInfo2KHR[] = {
+            VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_INFO_EXT,
+            VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_WIN32_INFO_EXT};
+
+        skip |= validate_struct_pnext("vkGetDeviceGroupSurfacePresentModes2EXT", "pSurfaceInfo->pNext",
+                                      "VkSurfaceFullScreenExclusiveInfoEXT, VkSurfaceFullScreenExclusiveWin32InfoEXT",
+                                      pSurfaceInfo->pNext, ARRAY_SIZE(allowed_structs_VkPhysicalDeviceSurfaceInfo2KHR),
+                                      allowed_structs_VkPhysicalDeviceSurfaceInfo2KHR, GeneratedVulkanHeaderVersion,
+                                      "VUID-VkPhysicalDeviceSurfaceInfo2KHR-pNext-pNext");
+
+        skip |= validate_required_handle("vkGetDeviceGroupSurfacePresentModes2EXT", "pSurfaceInfo->surface", pSurfaceInfo->surface);
+    }
+    return skip;
+}
+#endif
+
+bool StatelessValidation::manual_PreCallValidateCreateFramebuffer(VkDevice device, const VkFramebufferCreateInfo *pCreateInfo,
+                                                                  const VkAllocationCallbacks *pAllocator,
+                                                                  VkFramebuffer *pFramebuffer) const {
+    // Validation for pAttachments which is excluded from the generated validation code due to a 'noautovalidity' tag in vk.xml
+    bool skip = false;
+    if ((pCreateInfo->flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR) == 0) {
+        skip |= validate_array("vkCreateFramebuffer", "attachmentCount", "pAttachments", pCreateInfo->attachmentCount,
+                               &pCreateInfo->pAttachments, false, true, kVUIDUndefined, kVUIDUndefined);
+    }
+    return skip;
+}
+
+bool StatelessValidation::manual_PreCallValidateCmdSetLineStippleEXT(VkCommandBuffer commandBuffer, uint32_t lineStippleFactor,
+                                                                     uint16_t lineStipplePattern) const {
+    bool skip = false;
+
+    if (lineStippleFactor < 1 || lineStippleFactor > 256) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                        HandleToUint64(commandBuffer), "VUID-vkCmdSetLineStippleEXT-lineStippleFactor-02776",
+                        "vkCmdSetLineStippleEXT::lineStippleFactor=%d is not in [1,256].", lineStippleFactor);
+    }
+
+    return skip;
+}
+
+bool StatelessValidation::manual_PreCallValidateCmdBindIndexBuffer(VkCommandBuffer commandBuffer, VkBuffer buffer,
+                                                                   VkDeviceSize offset, VkIndexType indexType) const {
+    bool skip = false;
+
+    if (indexType == VK_INDEX_TYPE_NONE_NV) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                        HandleToUint64(commandBuffer), "VUID-vkCmdBindIndexBuffer-indexType-02507",
+                        "vkCmdBindIndexBuffer() indexType must not be VK_INDEX_TYPE_NONE_NV.");
+    }
+
+    const auto *index_type_uint8_features =
+        lvl_find_in_chain<VkPhysicalDeviceIndexTypeUint8FeaturesEXT>(physical_device_features2.pNext);
+    if (indexType == VK_INDEX_TYPE_UINT8_EXT && !index_type_uint8_features->indexTypeUint8) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+                        HandleToUint64(commandBuffer), "VUID-vkCmdBindIndexBuffer-indexType-02765",
+                        "vkCmdBindIndexBuffer() indexType is VK_INDEX_TYPE_UINT8_EXT but indexTypeUint8 feature is not enabled.");
+    }
+
+    return skip;
+}
+
+bool StatelessValidation::manual_PreCallValidateSetDebugUtilsObjectNameEXT(VkDevice device,
+                                                                           const VkDebugUtilsObjectNameInfoEXT *pNameInfo) const {
+    bool skip = false;
+    if (pNameInfo->objectType == VK_OBJECT_TYPE_UNKNOWN) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
+                        "VUID-VkDebugUtilsObjectNameInfoEXT-objectType-02589",
+                        "vkSetDebugUtilsObjectNameEXT() pNameInfo->objectType cannot be VK_OBJECT_TYPE_UNKNOWN.");
+    }
+    return skip;
+}
+
+bool StatelessValidation::manual_PreCallValidateSetDebugUtilsObjectTagEXT(VkDevice device,
+                                                                          const VkDebugUtilsObjectTagInfoEXT *pTagInfo) const {
+    bool skip = false;
+    if (pTagInfo->objectType == VK_OBJECT_TYPE_UNKNOWN) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
+                        "VUID-VkDebugUtilsObjectTagInfoEXT-objectType-01908",
+                        "vkSetDebugUtilsObjectTagEXT() pTagInfo->objectType cannot be VK_OBJECT_TYPE_UNKNOWN.");
+    }
+    return skip;
+}
+
+bool StatelessValidation::manual_PreCallValidateAcquireNextImageKHR(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout,
+                                                                    VkSemaphore semaphore, VkFence fence,
+                                                                    uint32_t *pImageIndex) const {
+    bool skip = false;
+
+    if (semaphore == VK_NULL_HANDLE && fence == VK_NULL_HANDLE) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT,
+                        HandleToUint64(swapchain), "VUID-vkAcquireNextImageKHR-semaphore-01780",
+                        "vkAcquireNextImageKHR: semaphore and fence are both VK_NULL_HANDLE.");
+    }
+
+    return skip;
+}
+
+bool StatelessValidation::manual_PreCallValidateAcquireNextImage2KHR(VkDevice device, const VkAcquireNextImageInfoKHR *pAcquireInfo,
+                                                                     uint32_t *pImageIndex) const {
+    bool skip = false;
+
+    if (pAcquireInfo->semaphore == VK_NULL_HANDLE && pAcquireInfo->fence == VK_NULL_HANDLE) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT,
+                        HandleToUint64(pAcquireInfo->swapchain), "VUID-VkAcquireNextImageInfoKHR-semaphore-01782",
+                        "vkAcquireNextImage2KHR: pAcquireInfo->semaphore and pAcquireInfo->fence are both VK_NULL_HANDLE.");
+    }
+
+    return skip;
+}
diff --git a/src/third_party/vulkan-validation-layers/src/layers/shader_validation.cpp b/src/third_party/vulkan-validation-layers/src/layers/shader_validation.cpp
new file mode 100644
index 0000000..539a711
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/layers/shader_validation.cpp
@@ -0,0 +1,3466 @@
+/* Copyright (c) 2015-2019 The Khronos Group Inc.
+ * Copyright (c) 2015-2019 Valve Corporation
+ * Copyright (c) 2015-2019 LunarG, Inc.
+ * Copyright (C) 2015-2019 Google Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Chris Forbes <chrisf@ijw.co.nz>
+ * Author: Dave Houlton <daveh@lunarg.com>
+ */
+
+#define NOMINMAX
+
+#include "shader_validation.h"
+
+#include <cassert>
+#include <chrono>
+#include <cinttypes>
+#include <cmath>
+#include <map>
+#include <sstream>
+#include <string>
+#include <unordered_map>
+#include <vector>
+
+#include <SPIRV/spirv.hpp>
+#include "vk_loader_platform.h"
+#include "vk_enum_string_helper.h"
+#include "vk_layer_data.h"
+#include "vk_layer_extension_utils.h"
+#include "vk_layer_utils.h"
+#include "chassis.h"
+#include "core_validation.h"
+
+#include "spirv-tools/libspirv.h"
+#include "xxhash.h"
+
+void decoration_set::add(uint32_t decoration, uint32_t value) {
+    switch (decoration) {
+        case spv::DecorationLocation:
+            flags |= location_bit;
+            location = value;
+            break;
+        case spv::DecorationPatch:
+            flags |= patch_bit;
+            break;
+        case spv::DecorationRelaxedPrecision:
+            flags |= relaxed_precision_bit;
+            break;
+        case spv::DecorationBlock:
+            flags |= block_bit;
+            break;
+        case spv::DecorationBufferBlock:
+            flags |= buffer_block_bit;
+            break;
+        case spv::DecorationComponent:
+            flags |= component_bit;
+            component = value;
+            break;
+        case spv::DecorationInputAttachmentIndex:
+            flags |= input_attachment_index_bit;
+            input_attachment_index = value;
+            break;
+        case spv::DecorationDescriptorSet:
+            flags |= descriptor_set_bit;
+            descriptor_set = value;
+            break;
+        case spv::DecorationBinding:
+            flags |= binding_bit;
+            binding = value;
+            break;
+        case spv::DecorationNonWritable:
+            flags |= nonwritable_bit;
+            break;
+        case spv::DecorationBuiltIn:
+            flags |= builtin_bit;
+            builtin = value;
+            break;
+    }
+}
+
+enum FORMAT_TYPE {
+    FORMAT_TYPE_FLOAT = 1,  // UNORM, SNORM, FLOAT, USCALED, SSCALED, SRGB -- anything we consider float in the shader
+    FORMAT_TYPE_SINT = 2,
+    FORMAT_TYPE_UINT = 4,
+};
+
+typedef std::pair<unsigned, unsigned> location_t;
+
+struct shader_stage_attributes {
+    char const *const name;
+    bool arrayed_input;
+    bool arrayed_output;
+    VkShaderStageFlags stage;
+};
+
+static shader_stage_attributes shader_stage_attribs[] = {
+    {"vertex shader", false, false, VK_SHADER_STAGE_VERTEX_BIT},
+    {"tessellation control shader", true, true, VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT},
+    {"tessellation evaluation shader", true, false, VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT},
+    {"geometry shader", true, false, VK_SHADER_STAGE_GEOMETRY_BIT},
+    {"fragment shader", false, false, VK_SHADER_STAGE_FRAGMENT_BIT},
+};
+
+unsigned ExecutionModelToShaderStageFlagBits(unsigned mode);
+
+// SPIRV utility functions
+void SHADER_MODULE_STATE::BuildDefIndex() {
+    for (auto insn : *this) {
+        switch (insn.opcode()) {
+            // Types
+            case spv::OpTypeVoid:
+            case spv::OpTypeBool:
+            case spv::OpTypeInt:
+            case spv::OpTypeFloat:
+            case spv::OpTypeVector:
+            case spv::OpTypeMatrix:
+            case spv::OpTypeImage:
+            case spv::OpTypeSampler:
+            case spv::OpTypeSampledImage:
+            case spv::OpTypeArray:
+            case spv::OpTypeRuntimeArray:
+            case spv::OpTypeStruct:
+            case spv::OpTypeOpaque:
+            case spv::OpTypePointer:
+            case spv::OpTypeFunction:
+            case spv::OpTypeEvent:
+            case spv::OpTypeDeviceEvent:
+            case spv::OpTypeReserveId:
+            case spv::OpTypeQueue:
+            case spv::OpTypePipe:
+            case spv::OpTypeAccelerationStructureNV:
+            case spv::OpTypeCooperativeMatrixNV:
+                def_index[insn.word(1)] = insn.offset();
+                break;
+
+                // Fixed constants
+            case spv::OpConstantTrue:
+            case spv::OpConstantFalse:
+            case spv::OpConstant:
+            case spv::OpConstantComposite:
+            case spv::OpConstantSampler:
+            case spv::OpConstantNull:
+                def_index[insn.word(2)] = insn.offset();
+                break;
+
+                // Specialization constants
+            case spv::OpSpecConstantTrue:
+            case spv::OpSpecConstantFalse:
+            case spv::OpSpecConstant:
+            case spv::OpSpecConstantComposite:
+            case spv::OpSpecConstantOp:
+                def_index[insn.word(2)] = insn.offset();
+                break;
+
+                // Variables
+            case spv::OpVariable:
+                def_index[insn.word(2)] = insn.offset();
+                break;
+
+                // Functions
+            case spv::OpFunction:
+                def_index[insn.word(2)] = insn.offset();
+                break;
+
+                // Decorations
+            case spv::OpDecorate: {
+                auto targetId = insn.word(1);
+                decorations[targetId].add(insn.word(2), insn.len() > 3u ? insn.word(3) : 0u);
+            } break;
+            case spv::OpGroupDecorate: {
+                auto const &src = decorations[insn.word(1)];
+                for (auto i = 2u; i < insn.len(); i++) decorations[insn.word(i)].merge(src);
+            } break;
+
+                // Entry points ... add to the entrypoint table
+            case spv::OpEntryPoint: {
+                // Entry points do not have an id (the id is the function id) and thus need their own table
+                auto entrypoint_name = (char const *)&insn.word(3);
+                auto execution_model = insn.word(1);
+                auto entrypoint_stage = ExecutionModelToShaderStageFlagBits(execution_model);
+                entry_points.emplace(entrypoint_name, EntryPoint{insn.offset(), entrypoint_stage});
+                break;
+            }
+
+            default:
+                // We don't care about any other defs for now.
+                break;
+        }
+    }
+}
+
+unsigned ExecutionModelToShaderStageFlagBits(unsigned mode) {
+    switch (mode) {
+        case spv::ExecutionModelVertex:
+            return VK_SHADER_STAGE_VERTEX_BIT;
+        case spv::ExecutionModelTessellationControl:
+            return VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT;
+        case spv::ExecutionModelTessellationEvaluation:
+            return VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT;
+        case spv::ExecutionModelGeometry:
+            return VK_SHADER_STAGE_GEOMETRY_BIT;
+        case spv::ExecutionModelFragment:
+            return VK_SHADER_STAGE_FRAGMENT_BIT;
+        case spv::ExecutionModelGLCompute:
+            return VK_SHADER_STAGE_COMPUTE_BIT;
+        case spv::ExecutionModelRayGenerationNV:
+            return VK_SHADER_STAGE_RAYGEN_BIT_NV;
+        case spv::ExecutionModelAnyHitNV:
+            return VK_SHADER_STAGE_ANY_HIT_BIT_NV;
+        case spv::ExecutionModelClosestHitNV:
+            return VK_SHADER_STAGE_CLOSEST_HIT_BIT_NV;
+        case spv::ExecutionModelMissNV:
+            return VK_SHADER_STAGE_MISS_BIT_NV;
+        case spv::ExecutionModelIntersectionNV:
+            return VK_SHADER_STAGE_INTERSECTION_BIT_NV;
+        case spv::ExecutionModelCallableNV:
+            return VK_SHADER_STAGE_CALLABLE_BIT_NV;
+        case spv::ExecutionModelTaskNV:
+            return VK_SHADER_STAGE_TASK_BIT_NV;
+        case spv::ExecutionModelMeshNV:
+            return VK_SHADER_STAGE_MESH_BIT_NV;
+        default:
+            return 0;
+    }
+}
+
+spirv_inst_iter FindEntrypoint(SHADER_MODULE_STATE const *src, char const *name, VkShaderStageFlagBits stageBits) {
+    auto range = src->entry_points.equal_range(name);
+    for (auto it = range.first; it != range.second; ++it) {
+        if (it->second.stage == stageBits) {
+            return src->at(it->second.offset);
+        }
+    }
+    return src->end();
+}
+
+static char const *StorageClassName(unsigned sc) {
+    switch (sc) {
+        case spv::StorageClassInput:
+            return "input";
+        case spv::StorageClassOutput:
+            return "output";
+        case spv::StorageClassUniformConstant:
+            return "const uniform";
+        case spv::StorageClassUniform:
+            return "uniform";
+        case spv::StorageClassWorkgroup:
+            return "workgroup local";
+        case spv::StorageClassCrossWorkgroup:
+            return "workgroup global";
+        case spv::StorageClassPrivate:
+            return "private global";
+        case spv::StorageClassFunction:
+            return "function";
+        case spv::StorageClassGeneric:
+            return "generic";
+        case spv::StorageClassAtomicCounter:
+            return "atomic counter";
+        case spv::StorageClassImage:
+            return "image";
+        case spv::StorageClassPushConstant:
+            return "push constant";
+        case spv::StorageClassStorageBuffer:
+            return "storage buffer";
+        default:
+            return "unknown";
+    }
+}
+
+// Get the value of an integral constant
+unsigned GetConstantValue(SHADER_MODULE_STATE const *src, unsigned id) {
+    auto value = src->get_def(id);
+    assert(value != src->end());
+
+    if (value.opcode() != spv::OpConstant) {
+        // TODO: Either ensure that the specialization transform is already performed on a module we're
+        //       considering here, OR -- specialize on the fly now.
+        return 1;
+    }
+
+    return value.word(3);
+}
+
+static void DescribeTypeInner(std::ostringstream &ss, SHADER_MODULE_STATE const *src, unsigned type) {
+    auto insn = src->get_def(type);
+    assert(insn != src->end());
+
+    switch (insn.opcode()) {
+        case spv::OpTypeBool:
+            ss << "bool";
+            break;
+        case spv::OpTypeInt:
+            ss << (insn.word(3) ? 's' : 'u') << "int" << insn.word(2);
+            break;
+        case spv::OpTypeFloat:
+            ss << "float" << insn.word(2);
+            break;
+        case spv::OpTypeVector:
+            ss << "vec" << insn.word(3) << " of ";
+            DescribeTypeInner(ss, src, insn.word(2));
+            break;
+        case spv::OpTypeMatrix:
+            ss << "mat" << insn.word(3) << " of ";
+            DescribeTypeInner(ss, src, insn.word(2));
+            break;
+        case spv::OpTypeArray:
+            ss << "arr[" << GetConstantValue(src, insn.word(3)) << "] of ";
+            DescribeTypeInner(ss, src, insn.word(2));
+            break;
+        case spv::OpTypeRuntimeArray:
+            ss << "runtime arr[] of ";
+            DescribeTypeInner(ss, src, insn.word(2));
+            break;
+        case spv::OpTypePointer:
+            ss << "ptr to " << StorageClassName(insn.word(2)) << " ";
+            DescribeTypeInner(ss, src, insn.word(3));
+            break;
+        case spv::OpTypeStruct: {
+            ss << "struct of (";
+            for (unsigned i = 2; i < insn.len(); i++) {
+                DescribeTypeInner(ss, src, insn.word(i));
+                if (i == insn.len() - 1) {
+                    ss << ")";
+                } else {
+                    ss << ", ";
+                }
+            }
+            break;
+        }
+        case spv::OpTypeSampler:
+            ss << "sampler";
+            break;
+        case spv::OpTypeSampledImage:
+            ss << "sampler+";
+            DescribeTypeInner(ss, src, insn.word(2));
+            break;
+        case spv::OpTypeImage:
+            ss << "image(dim=" << insn.word(3) << ", sampled=" << insn.word(7) << ")";
+            break;
+        case spv::OpTypeAccelerationStructureNV:
+            ss << "accelerationStruture";
+            break;
+        default:
+            ss << "oddtype";
+            break;
+    }
+}
+
+static std::string DescribeType(SHADER_MODULE_STATE const *src, unsigned type) {
+    std::ostringstream ss;
+    DescribeTypeInner(ss, src, type);
+    return ss.str();
+}
+
+static bool IsNarrowNumericType(spirv_inst_iter type) {
+    if (type.opcode() != spv::OpTypeInt && type.opcode() != spv::OpTypeFloat) return false;
+    return type.word(2) < 64;
+}
+
+static bool TypesMatch(SHADER_MODULE_STATE const *a, SHADER_MODULE_STATE const *b, unsigned a_type, unsigned b_type, bool a_arrayed,
+                       bool b_arrayed, bool relaxed) {
+    // Walk two type trees together, and complain about differences
+    auto a_insn = a->get_def(a_type);
+    auto b_insn = b->get_def(b_type);
+    assert(a_insn != a->end());
+    assert(b_insn != b->end());
+
+    // Ignore runtime-sized arrays-- they cannot appear in these interfaces.
+
+    if (a_arrayed && a_insn.opcode() == spv::OpTypeArray) {
+        return TypesMatch(a, b, a_insn.word(2), b_type, false, b_arrayed, relaxed);
+    }
+
+    if (b_arrayed && b_insn.opcode() == spv::OpTypeArray) {
+        // We probably just found the extra level of arrayness in b_type: compare the type inside it to a_type
+        return TypesMatch(a, b, a_type, b_insn.word(2), a_arrayed, false, relaxed);
+    }
+
+    if (a_insn.opcode() == spv::OpTypeVector && relaxed && IsNarrowNumericType(b_insn)) {
+        return TypesMatch(a, b, a_insn.word(2), b_type, a_arrayed, b_arrayed, false);
+    }
+
+    if (a_insn.opcode() != b_insn.opcode()) {
+        return false;
+    }
+
+    if (a_insn.opcode() == spv::OpTypePointer) {
+        // Match on pointee type. storage class is expected to differ
+        return TypesMatch(a, b, a_insn.word(3), b_insn.word(3), a_arrayed, b_arrayed, relaxed);
+    }
+
+    if (a_arrayed || b_arrayed) {
+        // If we havent resolved array-of-verts by here, we're not going to.
+        return false;
+    }
+
+    switch (a_insn.opcode()) {
+        case spv::OpTypeBool:
+            return true;
+        case spv::OpTypeInt:
+            // Match on width, signedness
+            return a_insn.word(2) == b_insn.word(2) && a_insn.word(3) == b_insn.word(3);
+        case spv::OpTypeFloat:
+            // Match on width
+            return a_insn.word(2) == b_insn.word(2);
+        case spv::OpTypeVector:
+            // Match on element type, count.
+            if (!TypesMatch(a, b, a_insn.word(2), b_insn.word(2), a_arrayed, b_arrayed, false)) return false;
+            if (relaxed && IsNarrowNumericType(a->get_def(a_insn.word(2)))) {
+                return a_insn.word(3) >= b_insn.word(3);
+            } else {
+                return a_insn.word(3) == b_insn.word(3);
+            }
+        case spv::OpTypeMatrix:
+            // Match on element type, count.
+            return TypesMatch(a, b, a_insn.word(2), b_insn.word(2), a_arrayed, b_arrayed, false) &&
+                   a_insn.word(3) == b_insn.word(3);
+        case spv::OpTypeArray:
+            // Match on element type, count. these all have the same layout. we don't get here if b_arrayed. This differs from
+            // vector & matrix types in that the array size is the id of a constant instruction, * not a literal within OpTypeArray
+            return TypesMatch(a, b, a_insn.word(2), b_insn.word(2), a_arrayed, b_arrayed, false) &&
+                   GetConstantValue(a, a_insn.word(3)) == GetConstantValue(b, b_insn.word(3));
+        case spv::OpTypeStruct:
+            // Match on all element types
+            {
+                if (a_insn.len() != b_insn.len()) {
+                    return false;  // Structs cannot match if member counts differ
+                }
+
+                for (unsigned i = 2; i < a_insn.len(); i++) {
+                    if (!TypesMatch(a, b, a_insn.word(i), b_insn.word(i), a_arrayed, b_arrayed, false)) {
+                        return false;
+                    }
+                }
+
+                return true;
+            }
+        default:
+            // Remaining types are CLisms, or may not appear in the interfaces we are interested in. Just claim no match.
+            return false;
+    }
+}
+
+static unsigned ValueOrDefault(std::unordered_map<unsigned, unsigned> const &map, unsigned id, unsigned def) {
+    auto it = map.find(id);
+    if (it == map.end())
+        return def;
+    else
+        return it->second;
+}
+
+static unsigned GetLocationsConsumedByType(SHADER_MODULE_STATE const *src, unsigned type, bool strip_array_level) {
+    auto insn = src->get_def(type);
+    assert(insn != src->end());
+
+    switch (insn.opcode()) {
+        case spv::OpTypePointer:
+            // See through the ptr -- this is only ever at the toplevel for graphics shaders we're never actually passing
+            // pointers around.
+            return GetLocationsConsumedByType(src, insn.word(3), strip_array_level);
+        case spv::OpTypeArray:
+            if (strip_array_level) {
+                return GetLocationsConsumedByType(src, insn.word(2), false);
+            } else {
+                return GetConstantValue(src, insn.word(3)) * GetLocationsConsumedByType(src, insn.word(2), false);
+            }
+        case spv::OpTypeMatrix:
+            // Num locations is the dimension * element size
+            return insn.word(3) * GetLocationsConsumedByType(src, insn.word(2), false);
+        case spv::OpTypeVector: {
+            auto scalar_type = src->get_def(insn.word(2));
+            auto bit_width =
+                (scalar_type.opcode() == spv::OpTypeInt || scalar_type.opcode() == spv::OpTypeFloat) ? scalar_type.word(2) : 32;
+
+            // Locations are 128-bit wide; 3- and 4-component vectors of 64 bit types require two.
+            return (bit_width * insn.word(3) + 127) / 128;
+        }
+        default:
+            // Everything else is just 1.
+            return 1;
+
+            // TODO: extend to handle 64bit scalar types, whose vectors may need multiple locations.
+    }
+}
+
+static unsigned GetComponentsConsumedByType(SHADER_MODULE_STATE const *src, unsigned type, bool strip_array_level) {
+    auto insn = src->get_def(type);
+    assert(insn != src->end());
+
+    switch (insn.opcode()) {
+        case spv::OpTypePointer:
+            // See through the ptr -- this is only ever at the toplevel for graphics shaders we're never actually passing
+            // pointers around.
+            return GetComponentsConsumedByType(src, insn.word(3), strip_array_level);
+        case spv::OpTypeStruct: {
+            uint32_t sum = 0;
+            for (uint32_t i = 2; i < insn.len(); i++) {  // i=2 to skip word(0) and word(1)=ID of struct
+                sum += GetComponentsConsumedByType(src, insn.word(i), false);
+            }
+            return sum;
+        }
+        case spv::OpTypeArray:
+            if (strip_array_level) {
+                return GetComponentsConsumedByType(src, insn.word(2), false);
+            } else {
+                return GetConstantValue(src, insn.word(3)) * GetComponentsConsumedByType(src, insn.word(2), false);
+            }
+        case spv::OpTypeMatrix:
+            // Num locations is the dimension * element size
+            return insn.word(3) * GetComponentsConsumedByType(src, insn.word(2), false);
+        case spv::OpTypeVector: {
+            auto scalar_type = src->get_def(insn.word(2));
+            auto bit_width =
+                (scalar_type.opcode() == spv::OpTypeInt || scalar_type.opcode() == spv::OpTypeFloat) ? scalar_type.word(2) : 32;
+            // One component is 32-bit
+            return (bit_width * insn.word(3) + 31) / 32;
+        }
+        case spv::OpTypeFloat: {
+            auto bit_width = insn.word(2);
+            return (bit_width + 31) / 32;
+        }
+        case spv::OpTypeInt: {
+            auto bit_width = insn.word(2);
+            return (bit_width + 31) / 32;
+        }
+        case spv::OpConstant:
+            return GetComponentsConsumedByType(src, insn.word(1), false);
+        default:
+            return 0;
+    }
+}
+
+static unsigned GetLocationsConsumedByFormat(VkFormat format) {
+    switch (format) {
+        case VK_FORMAT_R64G64B64A64_SFLOAT:
+        case VK_FORMAT_R64G64B64A64_SINT:
+        case VK_FORMAT_R64G64B64A64_UINT:
+        case VK_FORMAT_R64G64B64_SFLOAT:
+        case VK_FORMAT_R64G64B64_SINT:
+        case VK_FORMAT_R64G64B64_UINT:
+            return 2;
+        default:
+            return 1;
+    }
+}
+
+static unsigned GetFormatType(VkFormat fmt) {
+    if (FormatIsSInt(fmt)) return FORMAT_TYPE_SINT;
+    if (FormatIsUInt(fmt)) return FORMAT_TYPE_UINT;
+    if (FormatIsDepthAndStencil(fmt)) return FORMAT_TYPE_FLOAT | FORMAT_TYPE_UINT;
+    if (fmt == VK_FORMAT_UNDEFINED) return 0;
+    // everything else -- UNORM/SNORM/FLOAT/USCALED/SSCALED is all float in the shader.
+    return FORMAT_TYPE_FLOAT;
+}
+
+// characterizes a SPIR-V type appearing in an interface to a FF stage, for comparison to a VkFormat's characterization above.
+// also used for input attachments, as we statically know their format.
+static unsigned GetFundamentalType(SHADER_MODULE_STATE const *src, unsigned type) {
+    auto insn = src->get_def(type);
+    assert(insn != src->end());
+
+    switch (insn.opcode()) {
+        case spv::OpTypeInt:
+            return insn.word(3) ? FORMAT_TYPE_SINT : FORMAT_TYPE_UINT;
+        case spv::OpTypeFloat:
+            return FORMAT_TYPE_FLOAT;
+        case spv::OpTypeVector:
+        case spv::OpTypeMatrix:
+        case spv::OpTypeArray:
+        case spv::OpTypeRuntimeArray:
+        case spv::OpTypeImage:
+            return GetFundamentalType(src, insn.word(2));
+        case spv::OpTypePointer:
+            return GetFundamentalType(src, insn.word(3));
+
+        default:
+            return 0;
+    }
+}
+
+static uint32_t GetShaderStageId(VkShaderStageFlagBits stage) {
+    uint32_t bit_pos = uint32_t(u_ffs(stage));
+    return bit_pos - 1;
+}
+
+static spirv_inst_iter GetStructType(SHADER_MODULE_STATE const *src, spirv_inst_iter def, bool is_array_of_verts) {
+    while (true) {
+        if (def.opcode() == spv::OpTypePointer) {
+            def = src->get_def(def.word(3));
+        } else if (def.opcode() == spv::OpTypeArray && is_array_of_verts) {
+            def = src->get_def(def.word(2));
+            is_array_of_verts = false;
+        } else if (def.opcode() == spv::OpTypeStruct) {
+            return def;
+        } else {
+            return src->end();
+        }
+    }
+}
+
+static bool CollectInterfaceBlockMembers(SHADER_MODULE_STATE const *src, std::map<location_t, interface_var> *out,
+                                         bool is_array_of_verts, uint32_t id, uint32_t type_id, bool is_patch,
+                                         int /*first_location*/) {
+    // Walk down the type_id presented, trying to determine whether it's actually an interface block.
+    auto type = GetStructType(src, src->get_def(type_id), is_array_of_verts && !is_patch);
+    if (type == src->end() || !(src->get_decorations(type.word(1)).flags & decoration_set::block_bit)) {
+        // This isn't an interface block.
+        return false;
+    }
+
+    std::unordered_map<unsigned, unsigned> member_components;
+    std::unordered_map<unsigned, unsigned> member_relaxed_precision;
+    std::unordered_map<unsigned, unsigned> member_patch;
+
+    // Walk all the OpMemberDecorate for type's result id -- first pass, collect components.
+    for (auto insn : *src) {
+        if (insn.opcode() == spv::OpMemberDecorate && insn.word(1) == type.word(1)) {
+            unsigned member_index = insn.word(2);
+
+            if (insn.word(3) == spv::DecorationComponent) {
+                unsigned component = insn.word(4);
+                member_components[member_index] = component;
+            }
+
+            if (insn.word(3) == spv::DecorationRelaxedPrecision) {
+                member_relaxed_precision[member_index] = 1;
+            }
+
+            if (insn.word(3) == spv::DecorationPatch) {
+                member_patch[member_index] = 1;
+            }
+        }
+    }
+
+    // TODO: correctly handle location assignment from outside
+
+    // Second pass -- produce the output, from Location decorations
+    for (auto insn : *src) {
+        if (insn.opcode() == spv::OpMemberDecorate && insn.word(1) == type.word(1)) {
+            unsigned member_index = insn.word(2);
+            unsigned member_type_id = type.word(2 + member_index);
+
+            if (insn.word(3) == spv::DecorationLocation) {
+                unsigned location = insn.word(4);
+                unsigned num_locations = GetLocationsConsumedByType(src, member_type_id, false);
+                auto component_it = member_components.find(member_index);
+                unsigned component = component_it == member_components.end() ? 0 : component_it->second;
+                bool is_relaxed_precision = member_relaxed_precision.find(member_index) != member_relaxed_precision.end();
+                bool member_is_patch = is_patch || member_patch.count(member_index) > 0;
+
+                for (unsigned int offset = 0; offset < num_locations; offset++) {
+                    interface_var v = {};
+                    v.id = id;
+                    // TODO: member index in interface_var too?
+                    v.type_id = member_type_id;
+                    v.offset = offset;
+                    v.is_patch = member_is_patch;
+                    v.is_block_member = true;
+                    v.is_relaxed_precision = is_relaxed_precision;
+                    (*out)[std::make_pair(location + offset, component)] = v;
+                }
+            }
+        }
+    }
+
+    return true;
+}
+
+static std::vector<uint32_t> FindEntrypointInterfaces(spirv_inst_iter entrypoint) {
+    assert(entrypoint.opcode() == spv::OpEntryPoint);
+
+    std::vector<uint32_t> interfaces;
+    // Find the end of the entrypoint's name string. additional zero bytes follow the actual null terminator, to fill out the
+    // rest of the word - so we only need to look at the last byte in the word to determine which word contains the terminator.
+    uint32_t word = 3;
+    while (entrypoint.word(word) & 0xff000000u) {
+        ++word;
+    }
+    ++word;
+
+    for (; word < entrypoint.len(); word++) interfaces.push_back(entrypoint.word(word));
+
+    return interfaces;
+}
+
+static std::map<location_t, interface_var> CollectInterfaceByLocation(SHADER_MODULE_STATE const *src, spirv_inst_iter entrypoint,
+                                                                      spv::StorageClass sinterface, bool is_array_of_verts) {
+    // TODO: handle index=1 dual source outputs from FS -- two vars will have the same location, and we DON'T want to clobber.
+
+    std::map<location_t, interface_var> out;
+
+    for (uint32_t iid : FindEntrypointInterfaces(entrypoint)) {
+        auto insn = src->get_def(iid);
+        assert(insn != src->end());
+        assert(insn.opcode() == spv::OpVariable);
+
+        if (insn.word(3) == static_cast<uint32_t>(sinterface)) {
+            auto d = src->get_decorations(iid);
+            unsigned id = insn.word(2);
+            unsigned type = insn.word(1);
+
+            int location = d.location;
+            int builtin = d.builtin;
+            unsigned component = d.component;
+            bool is_patch = (d.flags & decoration_set::patch_bit) != 0;
+            bool is_relaxed_precision = (d.flags & decoration_set::relaxed_precision_bit) != 0;
+
+            if (builtin != -1)
+                continue;
+            else if (!CollectInterfaceBlockMembers(src, &out, is_array_of_verts, id, type, is_patch, location)) {
+                // A user-defined interface variable, with a location. Where a variable occupied multiple locations, emit
+                // one result for each.
+                unsigned num_locations = GetLocationsConsumedByType(src, type, is_array_of_verts && !is_patch);
+                for (unsigned int offset = 0; offset < num_locations; offset++) {
+                    interface_var v = {};
+                    v.id = id;
+                    v.type_id = type;
+                    v.offset = offset;
+                    v.is_patch = is_patch;
+                    v.is_relaxed_precision = is_relaxed_precision;
+                    out[std::make_pair(location + offset, component)] = v;
+                }
+            }
+        }
+    }
+
+    return out;
+}
+
+static std::vector<uint32_t> CollectBuiltinBlockMembers(SHADER_MODULE_STATE const *src, spirv_inst_iter entrypoint,
+                                                        uint32_t storageClass) {
+    std::vector<uint32_t> variables;
+    std::vector<uint32_t> builtinStructMembers;
+    std::vector<uint32_t> builtinDecorations;
+
+    for (auto insn : *src) {
+        switch (insn.opcode()) {
+            // Find all built-in member decorations
+            case spv::OpMemberDecorate:
+                if (insn.word(3) == spv::DecorationBuiltIn) {
+                    builtinStructMembers.push_back(insn.word(1));
+                }
+                break;
+            // Find all built-in decorations
+            case spv::OpDecorate:
+                switch (insn.word(2)) {
+                    case spv::DecorationBlock: {
+                        uint32_t blockID = insn.word(1);
+                        for (auto builtInBlockID : builtinStructMembers) {
+                            // Check if one of the members of the block are built-in -> the block is built-in
+                            if (blockID == builtInBlockID) {
+                                builtinDecorations.push_back(blockID);
+                                break;
+                            }
+                        }
+                        break;
+                    }
+                    case spv::DecorationBuiltIn:
+                        builtinDecorations.push_back(insn.word(1));
+                        break;
+                    default:
+                        break;
+                }
+                break;
+            default:
+                break;
+        }
+    }
+
+    // Find all interface variables belonging to the entrypoint and matching the storage class
+    for (uint32_t id : FindEntrypointInterfaces(entrypoint)) {
+        auto def = src->get_def(id);
+        assert(def != src->end());
+        assert(def.opcode() == spv::OpVariable);
+
+        if (def.word(3) == storageClass) variables.push_back(def.word(1));
+    }
+
+    // Find all members belonging to the builtin block selected
+    std::vector<uint32_t> builtinBlockMembers;
+    for (auto &var : variables) {
+        auto def = src->get_def(src->get_def(var).word(3));
+
+        // It could be an array of IO blocks. The element type should be the struct defining the block contents
+        if (def.opcode() == spv::OpTypeArray) def = src->get_def(def.word(2));
+
+        // Now find all members belonging to the struct defining the IO block
+        if (def.opcode() == spv::OpTypeStruct) {
+            for (auto builtInID : builtinDecorations) {
+                if (builtInID == def.word(1)) {
+                    for (int i = 2; i < (int)def.len(); i++)
+                        builtinBlockMembers.push_back(spv::BuiltInMax);  // Start with undefined builtin for each struct member.
+                                                                         // These shouldn't be left after replacing.
+                    for (auto insn : *src) {
+                        if (insn.opcode() == spv::OpMemberDecorate && insn.word(1) == builtInID &&
+                            insn.word(3) == spv::DecorationBuiltIn) {
+                            auto structIndex = insn.word(2);
+                            assert(structIndex < builtinBlockMembers.size());
+                            builtinBlockMembers[structIndex] = insn.word(4);
+                        }
+                    }
+                }
+            }
+        }
+    }
+
+    return builtinBlockMembers;
+}
+
+static std::vector<std::pair<uint32_t, interface_var>> CollectInterfaceByInputAttachmentIndex(
+    SHADER_MODULE_STATE const *src, std::unordered_set<uint32_t> const &accessible_ids) {
+    std::vector<std::pair<uint32_t, interface_var>> out;
+
+    for (auto insn : *src) {
+        if (insn.opcode() == spv::OpDecorate) {
+            if (insn.word(2) == spv::DecorationInputAttachmentIndex) {
+                auto attachment_index = insn.word(3);
+                auto id = insn.word(1);
+
+                if (accessible_ids.count(id)) {
+                    auto def = src->get_def(id);
+                    assert(def != src->end());
+
+                    if (def.opcode() == spv::OpVariable && insn.word(3) == spv::StorageClassUniformConstant) {
+                        auto num_locations = GetLocationsConsumedByType(src, def.word(1), false);
+                        for (unsigned int offset = 0; offset < num_locations; offset++) {
+                            interface_var v = {};
+                            v.id = id;
+                            v.type_id = def.word(1);
+                            v.offset = offset;
+                            out.emplace_back(attachment_index + offset, v);
+                        }
+                    }
+                }
+            }
+        }
+    }
+
+    return out;
+}
+
+static bool IsWritableDescriptorType(SHADER_MODULE_STATE const *module, uint32_t type_id, bool is_storage_buffer) {
+    auto type = module->get_def(type_id);
+
+    // Strip off any array or ptrs. Where we remove array levels, adjust the  descriptor count for each dimension.
+    while (type.opcode() == spv::OpTypeArray || type.opcode() == spv::OpTypePointer || type.opcode() == spv::OpTypeRuntimeArray) {
+        if (type.opcode() == spv::OpTypeArray || type.opcode() == spv::OpTypeRuntimeArray) {
+            type = module->get_def(type.word(2));  // Element type
+        } else {
+            type = module->get_def(type.word(3));  // Pointee type
+        }
+    }
+
+    switch (type.opcode()) {
+        case spv::OpTypeImage: {
+            auto dim = type.word(3);
+            auto sampled = type.word(7);
+            return sampled == 2 && dim != spv::DimSubpassData;
+        }
+
+        case spv::OpTypeStruct: {
+            std::unordered_set<unsigned> nonwritable_members;
+            if (module->get_decorations(type.word(1)).flags & decoration_set::buffer_block_bit) is_storage_buffer = true;
+            for (auto insn : *module) {
+                if (insn.opcode() == spv::OpMemberDecorate && insn.word(1) == type.word(1) &&
+                    insn.word(3) == spv::DecorationNonWritable) {
+                    nonwritable_members.insert(insn.word(2));
+                }
+            }
+
+            // A buffer is writable if it's either flavor of storage buffer, and has any member not decorated
+            // as nonwritable.
+            return is_storage_buffer && nonwritable_members.size() != type.len() - 2;
+        }
+    }
+
+    return false;
+}
+
+std::vector<std::pair<descriptor_slot_t, interface_var>> CollectInterfaceByDescriptorSlot(
+    debug_report_data const *report_data, SHADER_MODULE_STATE const *src, std::unordered_set<uint32_t> const &accessible_ids,
+    bool *has_writable_descriptor) {
+    std::vector<std::pair<descriptor_slot_t, interface_var>> out;
+
+    for (auto id : accessible_ids) {
+        auto insn = src->get_def(id);
+        assert(insn != src->end());
+
+        if (insn.opcode() == spv::OpVariable &&
+            (insn.word(3) == spv::StorageClassUniform || insn.word(3) == spv::StorageClassUniformConstant ||
+             insn.word(3) == spv::StorageClassStorageBuffer)) {
+            auto d = src->get_decorations(insn.word(2));
+            unsigned set = d.descriptor_set;
+            unsigned binding = d.binding;
+
+            interface_var v = {};
+            v.id = insn.word(2);
+            v.type_id = insn.word(1);
+            out.emplace_back(std::make_pair(set, binding), v);
+
+            if (!(d.flags & decoration_set::nonwritable_bit) &&
+                IsWritableDescriptorType(src, insn.word(1), insn.word(3) == spv::StorageClassStorageBuffer)) {
+                *has_writable_descriptor = true;
+            }
+        }
+    }
+
+    return out;
+}
+
+static bool ValidateViConsistency(debug_report_data const *report_data, VkPipelineVertexInputStateCreateInfo const *vi) {
+    // Walk the binding descriptions, which describe the step rate and stride of each vertex buffer.  Each binding should
+    // be specified only once.
+    std::unordered_map<uint32_t, VkVertexInputBindingDescription const *> bindings;
+    bool skip = false;
+
+    for (unsigned i = 0; i < vi->vertexBindingDescriptionCount; i++) {
+        auto desc = &vi->pVertexBindingDescriptions[i];
+        auto &binding = bindings[desc->binding];
+        if (binding) {
+            // TODO: "VUID-VkGraphicsPipelineCreateInfo-pStages-00742" perhaps?
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                            kVUID_Core_Shader_InconsistentVi, "Duplicate vertex input binding descriptions for binding %d",
+                            desc->binding);
+        } else {
+            binding = desc;
+        }
+    }
+
+    return skip;
+}
+
+static bool ValidateViAgainstVsInputs(debug_report_data const *report_data, VkPipelineVertexInputStateCreateInfo const *vi,
+                                      SHADER_MODULE_STATE const *vs, spirv_inst_iter entrypoint) {
+    bool skip = false;
+
+    const auto inputs = CollectInterfaceByLocation(vs, entrypoint, spv::StorageClassInput, false);
+
+    // Build index by location
+    std::map<uint32_t, const VkVertexInputAttributeDescription *> attribs;
+    if (vi) {
+        for (uint32_t i = 0; i < vi->vertexAttributeDescriptionCount; ++i) {
+            const auto num_locations = GetLocationsConsumedByFormat(vi->pVertexAttributeDescriptions[i].format);
+            for (uint32_t j = 0; j < num_locations; ++j) {
+                attribs[vi->pVertexAttributeDescriptions[i].location + j] = &vi->pVertexAttributeDescriptions[i];
+            }
+        }
+    }
+
+    struct AttribInputPair {
+        const VkVertexInputAttributeDescription *attrib = nullptr;
+        const interface_var *input = nullptr;
+    };
+    std::map<uint32_t, AttribInputPair> location_map;
+    for (const auto &attrib_it : attribs) location_map[attrib_it.first].attrib = attrib_it.second;
+    for (const auto &input_it : inputs) location_map[input_it.first.first].input = &input_it.second;
+
+    for (const auto location_it : location_map) {
+        const auto location = location_it.first;
+        const auto attrib = location_it.second.attrib;
+        const auto input = location_it.second.input;
+
+        if (attrib && !input) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT,
+                            HandleToUint64(vs->vk_shader_module), kVUID_Core_Shader_OutputNotConsumed,
+                            "Vertex attribute at location %" PRIu32 " not consumed by vertex shader", location);
+        } else if (!attrib && input) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT,
+                            HandleToUint64(vs->vk_shader_module), kVUID_Core_Shader_InputNotProduced,
+                            "Vertex shader consumes input at location %" PRIu32 " but not provided", location);
+        } else if (attrib && input) {
+            const auto attrib_type = GetFormatType(attrib->format);
+            const auto input_type = GetFundamentalType(vs, input->type_id);
+
+            // Type checking
+            if (!(attrib_type & input_type)) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT,
+                                HandleToUint64(vs->vk_shader_module), kVUID_Core_Shader_InterfaceTypeMismatch,
+                                "Attribute type of `%s` at location %" PRIu32 " does not match vertex shader input type of `%s`",
+                                string_VkFormat(attrib->format), location, DescribeType(vs, input->type_id).c_str());
+            }
+        } else {            // !attrib && !input
+            assert(false);  // at least one exists in the map
+        }
+    }
+
+    return skip;
+}
+
+static bool ValidateFsOutputsAgainstRenderPass(debug_report_data const *report_data, SHADER_MODULE_STATE const *fs,
+                                               spirv_inst_iter entrypoint, PIPELINE_STATE const *pipeline, uint32_t subpass_index) {
+    bool skip = false;
+
+    const auto rpci = pipeline->rp_state->createInfo.ptr();
+
+    struct Attachment {
+        const VkAttachmentReference2KHR *reference = nullptr;
+        const VkAttachmentDescription2KHR *attachment = nullptr;
+        const interface_var *output = nullptr;
+    };
+    std::map<uint32_t, Attachment> location_map;
+
+    const auto subpass = rpci->pSubpasses[subpass_index];
+    for (uint32_t i = 0; i < subpass.colorAttachmentCount; ++i) {
+        auto const &reference = subpass.pColorAttachments[i];
+        location_map[i].reference = &reference;
+        if (reference.attachment != VK_ATTACHMENT_UNUSED &&
+            rpci->pAttachments[reference.attachment].format != VK_FORMAT_UNDEFINED) {
+            location_map[i].attachment = &rpci->pAttachments[reference.attachment];
+        }
+    }
+
+    // TODO: dual source blend index (spv::DecIndex, zero if not provided)
+
+    const auto outputs = CollectInterfaceByLocation(fs, entrypoint, spv::StorageClassOutput, false);
+    for (const auto &output_it : outputs) {
+        auto const location = output_it.first.first;
+        location_map[location].output = &output_it.second;
+    }
+
+    const bool alphaToCoverageEnabled = pipeline->graphicsPipelineCI.pMultisampleState != NULL &&
+                                        pipeline->graphicsPipelineCI.pMultisampleState->alphaToCoverageEnable == VK_TRUE;
+
+    for (const auto location_it : location_map) {
+        const auto reference = location_it.second.reference;
+        if (reference != nullptr && reference->attachment == VK_ATTACHMENT_UNUSED) {
+            continue;
+        }
+
+        const auto location = location_it.first;
+        const auto attachment = location_it.second.attachment;
+        const auto output = location_it.second.output;
+        if (attachment && !output) {
+            if (pipeline->attachments[location].colorWriteMask != 0) {
+                skip |=
+                    log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT,
+                            HandleToUint64(fs->vk_shader_module), kVUID_Core_Shader_InputNotProduced,
+                            "Attachment %" PRIu32 " not written by fragment shader; undefined values will be written to attachment",
+                            location);
+            }
+        } else if (!attachment && output) {
+            if (!(alphaToCoverageEnabled && location == 0)) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT,
+                                HandleToUint64(fs->vk_shader_module), kVUID_Core_Shader_OutputNotConsumed,
+                                "fragment shader writes to output location %" PRIu32 " with no matching attachment", location);
+            }
+        } else if (attachment && output) {
+            const auto attachment_type = GetFormatType(attachment->format);
+            const auto output_type = GetFundamentalType(fs, output->type_id);
+
+            // Type checking
+            if (!(output_type & attachment_type)) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT,
+                                HandleToUint64(fs->vk_shader_module), kVUID_Core_Shader_InterfaceTypeMismatch,
+                                "Attachment %" PRIu32
+                                " of type `%s` does not match fragment shader output type of `%s`; resulting values are undefined",
+                                location, string_VkFormat(attachment->format), DescribeType(fs, output->type_id).c_str());
+            }
+        } else {            // !attachment && !output
+            assert(false);  // at least one exists in the map
+        }
+    }
+
+    const auto output_zero = location_map.count(0) ? location_map[0].output : nullptr;
+    bool locationZeroHasAlpha = output_zero && fs->get_def(output_zero->type_id) != fs->end() &&
+                                GetComponentsConsumedByType(fs, output_zero->type_id, false) == 4;
+    if (alphaToCoverageEnabled && !locationZeroHasAlpha) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT,
+                        HandleToUint64(fs->vk_shader_module), kVUID_Core_Shader_NoAlphaAtLocation0WithAlphaToCoverage,
+                        "fragment shader doesn't declare alpha output at location 0 even though alpha to coverage is enabled.");
+    }
+
+    return skip;
+}
+
+// For PointSize analysis we need to know if the variable decorated with the PointSize built-in was actually written to.
+// This function examines instructions in the static call tree for a write to this variable.
+static bool IsPointSizeWritten(SHADER_MODULE_STATE const *src, spirv_inst_iter builtin_instr, spirv_inst_iter entrypoint) {
+    auto type = builtin_instr.opcode();
+    uint32_t target_id = builtin_instr.word(1);
+    bool init_complete = false;
+
+    if (type == spv::OpMemberDecorate) {
+        // Built-in is part of a structure -- examine instructions up to first function body to get initial IDs
+        auto insn = entrypoint;
+        while (!init_complete && (insn.opcode() != spv::OpFunction)) {
+            switch (insn.opcode()) {
+                case spv::OpTypePointer:
+                    if ((insn.word(3) == target_id) && (insn.word(2) == spv::StorageClassOutput)) {
+                        target_id = insn.word(1);
+                    }
+                    break;
+                case spv::OpVariable:
+                    if (insn.word(1) == target_id) {
+                        target_id = insn.word(2);
+                        init_complete = true;
+                    }
+                    break;
+            }
+            insn++;
+        }
+    }
+
+    if (!init_complete && (type == spv::OpMemberDecorate)) return false;
+
+    bool found_write = false;
+    std::unordered_set<uint32_t> worklist;
+    worklist.insert(entrypoint.word(2));
+
+    // Follow instructions in call graph looking for writes to target
+    while (!worklist.empty() && !found_write) {
+        auto id_iter = worklist.begin();
+        auto id = *id_iter;
+        worklist.erase(id_iter);
+
+        auto insn = src->get_def(id);
+        if (insn == src->end()) {
+            continue;
+        }
+
+        if (insn.opcode() == spv::OpFunction) {
+            // Scan body of function looking for other function calls or items in our ID chain
+            while (++insn, insn.opcode() != spv::OpFunctionEnd) {
+                switch (insn.opcode()) {
+                    case spv::OpAccessChain:
+                        if (insn.word(3) == target_id) {
+                            if (type == spv::OpMemberDecorate) {
+                                auto value = GetConstantValue(src, insn.word(4));
+                                if (value == builtin_instr.word(2)) {
+                                    target_id = insn.word(2);
+                                }
+                            } else {
+                                target_id = insn.word(2);
+                            }
+                        }
+                        break;
+                    case spv::OpStore:
+                        if (insn.word(1) == target_id) {
+                            found_write = true;
+                        }
+                        break;
+                    case spv::OpFunctionCall:
+                        worklist.insert(insn.word(3));
+                        break;
+                }
+            }
+        }
+    }
+    return found_write;
+}
+
+// For some analyses, we need to know about all ids referenced by the static call tree of a particular entrypoint. This is
+// important for identifying the set of shader resources actually used by an entrypoint, for example.
+// Note: we only explore parts of the image which might actually contain ids we care about for the above analyses.
+//  - NOT the shader input/output interfaces.
+//
+// TODO: The set of interesting opcodes here was determined by eyeballing the SPIRV spec. It might be worth
+// converting parts of this to be generated from the machine-readable spec instead.
+std::unordered_set<uint32_t> MarkAccessibleIds(SHADER_MODULE_STATE const *src, spirv_inst_iter entrypoint) {
+    std::unordered_set<uint32_t> ids;
+    std::unordered_set<uint32_t> worklist;
+    worklist.insert(entrypoint.word(2));
+
+    while (!worklist.empty()) {
+        auto id_iter = worklist.begin();
+        auto id = *id_iter;
+        worklist.erase(id_iter);
+
+        auto insn = src->get_def(id);
+        if (insn == src->end()) {
+            // ID is something we didn't collect in BuildDefIndex. that's OK -- we'll stumble across all kinds of things here
+            // that we may not care about.
+            continue;
+        }
+
+        // Try to add to the output set
+        if (!ids.insert(id).second) {
+            continue;  // If we already saw this id, we don't want to walk it again.
+        }
+
+        switch (insn.opcode()) {
+            case spv::OpFunction:
+                // Scan whole body of the function, enlisting anything interesting
+                while (++insn, insn.opcode() != spv::OpFunctionEnd) {
+                    switch (insn.opcode()) {
+                        case spv::OpLoad:
+                        case spv::OpAtomicLoad:
+                        case spv::OpAtomicExchange:
+                        case spv::OpAtomicCompareExchange:
+                        case spv::OpAtomicCompareExchangeWeak:
+                        case spv::OpAtomicIIncrement:
+                        case spv::OpAtomicIDecrement:
+                        case spv::OpAtomicIAdd:
+                        case spv::OpAtomicISub:
+                        case spv::OpAtomicSMin:
+                        case spv::OpAtomicUMin:
+                        case spv::OpAtomicSMax:
+                        case spv::OpAtomicUMax:
+                        case spv::OpAtomicAnd:
+                        case spv::OpAtomicOr:
+                        case spv::OpAtomicXor:
+                            worklist.insert(insn.word(3));  // ptr
+                            break;
+                        case spv::OpStore:
+                        case spv::OpAtomicStore:
+                            worklist.insert(insn.word(1));  // ptr
+                            break;
+                        case spv::OpAccessChain:
+                        case spv::OpInBoundsAccessChain:
+                            worklist.insert(insn.word(3));  // base ptr
+                            break;
+                        case spv::OpSampledImage:
+                        case spv::OpImageSampleImplicitLod:
+                        case spv::OpImageSampleExplicitLod:
+                        case spv::OpImageSampleDrefImplicitLod:
+                        case spv::OpImageSampleDrefExplicitLod:
+                        case spv::OpImageSampleProjImplicitLod:
+                        case spv::OpImageSampleProjExplicitLod:
+                        case spv::OpImageSampleProjDrefImplicitLod:
+                        case spv::OpImageSampleProjDrefExplicitLod:
+                        case spv::OpImageFetch:
+                        case spv::OpImageGather:
+                        case spv::OpImageDrefGather:
+                        case spv::OpImageRead:
+                        case spv::OpImage:
+                        case spv::OpImageQueryFormat:
+                        case spv::OpImageQueryOrder:
+                        case spv::OpImageQuerySizeLod:
+                        case spv::OpImageQuerySize:
+                        case spv::OpImageQueryLod:
+                        case spv::OpImageQueryLevels:
+                        case spv::OpImageQuerySamples:
+                        case spv::OpImageSparseSampleImplicitLod:
+                        case spv::OpImageSparseSampleExplicitLod:
+                        case spv::OpImageSparseSampleDrefImplicitLod:
+                        case spv::OpImageSparseSampleDrefExplicitLod:
+                        case spv::OpImageSparseSampleProjImplicitLod:
+                        case spv::OpImageSparseSampleProjExplicitLod:
+                        case spv::OpImageSparseSampleProjDrefImplicitLod:
+                        case spv::OpImageSparseSampleProjDrefExplicitLod:
+                        case spv::OpImageSparseFetch:
+                        case spv::OpImageSparseGather:
+                        case spv::OpImageSparseDrefGather:
+                        case spv::OpImageTexelPointer:
+                            worklist.insert(insn.word(3));  // Image or sampled image
+                            break;
+                        case spv::OpImageWrite:
+                            worklist.insert(insn.word(1));  // Image -- different operand order to above
+                            break;
+                        case spv::OpFunctionCall:
+                            for (uint32_t i = 3; i < insn.len(); i++) {
+                                worklist.insert(insn.word(i));  // fn itself, and all args
+                            }
+                            break;
+
+                        case spv::OpExtInst:
+                            for (uint32_t i = 5; i < insn.len(); i++) {
+                                worklist.insert(insn.word(i));  // Operands to ext inst
+                            }
+                            break;
+                    }
+                }
+                break;
+        }
+    }
+
+    return ids;
+}
+
+static bool ValidatePushConstantBlockAgainstPipeline(debug_report_data const *report_data,
+                                                     std::vector<VkPushConstantRange> const *push_constant_ranges,
+                                                     SHADER_MODULE_STATE const *src, spirv_inst_iter type,
+                                                     VkShaderStageFlagBits stage) {
+    bool skip = false;
+
+    // Strip off ptrs etc
+    type = GetStructType(src, type, false);
+    assert(type != src->end());
+
+    // Validate directly off the offsets. this isn't quite correct for arrays and matrices, but is a good first step.
+    // TODO: arrays, matrices, weird sizes
+    for (auto insn : *src) {
+        if (insn.opcode() == spv::OpMemberDecorate && insn.word(1) == type.word(1)) {
+            if (insn.word(3) == spv::DecorationOffset) {
+                unsigned offset = insn.word(4);
+                auto size = 4;  // Bytes; TODO: calculate this based on the type
+
+                bool found_range = false;
+                for (auto const &range : *push_constant_ranges) {
+                    if ((range.offset <= offset) && ((range.offset + range.size) >= (offset + size)) &&
+                        (range.stageFlags & stage)) {
+                        found_range = true;
+
+                        break;
+                    }
+                }
+
+                if (!found_range) {
+                    skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                    kVUID_Core_Shader_PushConstantOutOfRange,
+                                    "Push constant range covering variable starting at offset %u not declared in layout", offset);
+                }
+            }
+        }
+    }
+
+    return skip;
+}
+
+static bool ValidatePushConstantUsage(debug_report_data const *report_data,
+                                      std::vector<VkPushConstantRange> const *push_constant_ranges, SHADER_MODULE_STATE const *src,
+                                      std::unordered_set<uint32_t> accessible_ids, VkShaderStageFlagBits stage) {
+    bool skip = false;
+
+    for (auto id : accessible_ids) {
+        auto def_insn = src->get_def(id);
+        if (def_insn.opcode() == spv::OpVariable && def_insn.word(3) == spv::StorageClassPushConstant) {
+            skip |= ValidatePushConstantBlockAgainstPipeline(report_data, push_constant_ranges, src, src->get_def(def_insn.word(1)),
+                                                             stage);
+        }
+    }
+
+    return skip;
+}
+
+// Validate that data for each specialization entry is fully contained within the buffer.
+static bool ValidateSpecializationOffsets(debug_report_data const *report_data, VkPipelineShaderStageCreateInfo const *info) {
+    bool skip = false;
+
+    VkSpecializationInfo const *spec = info->pSpecializationInfo;
+
+    if (spec) {
+        for (auto i = 0u; i < spec->mapEntryCount; i++) {
+            if (spec->pMapEntries[i].offset >= spec->dataSize) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, 0,
+                                "VUID-VkSpecializationInfo-offset-00773",
+                                "Specialization entry %u (for constant id %u) references memory outside provided specialization "
+                                "data (bytes %u.." PRINTF_SIZE_T_SPECIFIER "; " PRINTF_SIZE_T_SPECIFIER " bytes provided)..",
+                                i, spec->pMapEntries[i].constantID, spec->pMapEntries[i].offset,
+                                spec->pMapEntries[i].offset + spec->dataSize - 1, spec->dataSize);
+
+                continue;
+            }
+            if (spec->pMapEntries[i].offset + spec->pMapEntries[i].size > spec->dataSize) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, 0,
+                                "VUID-VkSpecializationInfo-pMapEntries-00774",
+                                "Specialization entry %u (for constant id %u) references memory outside provided specialization "
+                                "data (bytes %u.." PRINTF_SIZE_T_SPECIFIER "; " PRINTF_SIZE_T_SPECIFIER " bytes provided)..",
+                                i, spec->pMapEntries[i].constantID, spec->pMapEntries[i].offset,
+                                spec->pMapEntries[i].offset + spec->pMapEntries[i].size - 1, spec->dataSize);
+            }
+        }
+    }
+
+    return skip;
+}
+
+// TODO (jbolz): Can this return a const reference?
+static std::set<uint32_t> TypeToDescriptorTypeSet(SHADER_MODULE_STATE const *module, uint32_t type_id, unsigned &descriptor_count) {
+    auto type = module->get_def(type_id);
+    bool is_storage_buffer = false;
+    descriptor_count = 1;
+    std::set<uint32_t> ret;
+
+    // Strip off any array or ptrs. Where we remove array levels, adjust the  descriptor count for each dimension.
+    while (type.opcode() == spv::OpTypeArray || type.opcode() == spv::OpTypePointer || type.opcode() == spv::OpTypeRuntimeArray) {
+        if (type.opcode() == spv::OpTypeRuntimeArray) {
+            descriptor_count = 0;
+            type = module->get_def(type.word(2));
+        } else if (type.opcode() == spv::OpTypeArray) {
+            descriptor_count *= GetConstantValue(module, type.word(3));
+            type = module->get_def(type.word(2));
+        } else {
+            if (type.word(2) == spv::StorageClassStorageBuffer) {
+                is_storage_buffer = true;
+            }
+            type = module->get_def(type.word(3));
+        }
+    }
+
+    switch (type.opcode()) {
+        case spv::OpTypeStruct: {
+            for (auto insn : *module) {
+                if (insn.opcode() == spv::OpDecorate && insn.word(1) == type.word(1)) {
+                    if (insn.word(2) == spv::DecorationBlock) {
+                        if (is_storage_buffer) {
+                            ret.insert(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER);
+                            ret.insert(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC);
+                            return ret;
+                        } else {
+                            ret.insert(VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER);
+                            ret.insert(VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC);
+                            ret.insert(VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT);
+                            return ret;
+                        }
+                    } else if (insn.word(2) == spv::DecorationBufferBlock) {
+                        ret.insert(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER);
+                        ret.insert(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC);
+                        return ret;
+                    }
+                }
+            }
+
+            // Invalid
+            return ret;
+        }
+
+        case spv::OpTypeSampler:
+            ret.insert(VK_DESCRIPTOR_TYPE_SAMPLER);
+            ret.insert(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER);
+            return ret;
+
+        case spv::OpTypeSampledImage: {
+            // Slight relaxation for some GLSL historical madness: samplerBuffer doesn't really have a sampler, and a texel
+            // buffer descriptor doesn't really provide one. Allow this slight mismatch.
+            auto image_type = module->get_def(type.word(2));
+            auto dim = image_type.word(3);
+            auto sampled = image_type.word(7);
+            if (dim == spv::DimBuffer && sampled == 1) {
+                ret.insert(VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER);
+                return ret;
+            }
+        }
+            ret.insert(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER);
+            return ret;
+
+        case spv::OpTypeImage: {
+            // Many descriptor types backing image types-- depends on dimension and whether the image will be used with a sampler.
+            // SPIRV for Vulkan requires that sampled be 1 or 2 -- leaving the decision to runtime is unacceptable.
+            auto dim = type.word(3);
+            auto sampled = type.word(7);
+
+            if (dim == spv::DimSubpassData) {
+                ret.insert(VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT);
+                return ret;
+            } else if (dim == spv::DimBuffer) {
+                if (sampled == 1) {
+                    ret.insert(VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER);
+                    return ret;
+                } else {
+                    ret.insert(VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER);
+                    return ret;
+                }
+            } else if (sampled == 1) {
+                ret.insert(VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE);
+                ret.insert(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER);
+                return ret;
+            } else {
+                ret.insert(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE);
+                return ret;
+            }
+        }
+        case spv::OpTypeAccelerationStructureNV:
+            ret.insert(VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV);
+            return ret;
+
+            // We shouldn't really see any other junk types -- but if we do, they're a mismatch.
+        default:
+            return ret;  // Matches nothing
+    }
+}
+
+static std::string string_descriptorTypes(const std::set<uint32_t> &descriptor_types) {
+    std::stringstream ss;
+    for (auto it = descriptor_types.begin(); it != descriptor_types.end(); ++it) {
+        if (ss.tellp()) ss << ", ";
+        ss << string_VkDescriptorType(VkDescriptorType(*it));
+    }
+    return ss.str();
+}
+
+static bool RequirePropertyFlag(debug_report_data const *report_data, VkBool32 check, char const *flag, char const *structure) {
+    if (!check) {
+        if (log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                    kVUID_Core_Shader_ExceedDeviceLimit, "Shader requires flag %s set in %s but it is not set on the device", flag,
+                    structure)) {
+            return true;
+        }
+    }
+
+    return false;
+}
+
+static bool RequireFeature(debug_report_data const *report_data, VkBool32 feature, char const *feature_name) {
+    if (!feature) {
+        if (log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                    kVUID_Core_Shader_FeatureNotEnabled, "Shader requires %s but is not enabled on the device", feature_name)) {
+            return true;
+        }
+    }
+
+    return false;
+}
+
+static bool RequireExtension(debug_report_data const *report_data, bool extension, char const *extension_name) {
+    if (!extension) {
+        if (log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                    kVUID_Core_Shader_FeatureNotEnabled, "Shader requires extension %s but is not enabled on the device",
+                    extension_name)) {
+            return true;
+        }
+    }
+
+    return false;
+}
+
+bool CoreChecks::ValidateShaderCapabilities(SHADER_MODULE_STATE const *src, VkShaderStageFlagBits stage) const {
+    bool skip = false;
+
+    struct FeaturePointer {
+        // Callable object to test if this feature is enabled in the given aggregate feature struct
+        const std::function<VkBool32(const DeviceFeatures &)> IsEnabled;
+
+        // Test if feature pointer is populated
+        explicit operator bool() const { return static_cast<bool>(IsEnabled); }
+
+        // Default and nullptr constructor to create an empty FeaturePointer
+        FeaturePointer() : IsEnabled(nullptr) {}
+        FeaturePointer(std::nullptr_t ptr) : IsEnabled(nullptr) {}
+
+        // Constructors to populate FeaturePointer based on given pointer to member
+        FeaturePointer(VkBool32 VkPhysicalDeviceFeatures::*ptr)
+            : IsEnabled([=](const DeviceFeatures &features) { return features.core.*ptr; }) {}
+        FeaturePointer(VkBool32 VkPhysicalDeviceDescriptorIndexingFeaturesEXT::*ptr)
+            : IsEnabled([=](const DeviceFeatures &features) { return features.descriptor_indexing.*ptr; }) {}
+        FeaturePointer(VkBool32 VkPhysicalDevice8BitStorageFeaturesKHR::*ptr)
+            : IsEnabled([=](const DeviceFeatures &features) { return features.eight_bit_storage.*ptr; }) {}
+        FeaturePointer(VkBool32 VkPhysicalDeviceTransformFeedbackFeaturesEXT::*ptr)
+            : IsEnabled([=](const DeviceFeatures &features) { return features.transform_feedback_features.*ptr; }) {}
+        FeaturePointer(VkBool32 VkPhysicalDeviceFloat16Int8FeaturesKHR::*ptr)
+            : IsEnabled([=](const DeviceFeatures &features) { return features.float16_int8.*ptr; }) {}
+        FeaturePointer(VkBool32 VkPhysicalDeviceScalarBlockLayoutFeaturesEXT::*ptr)
+            : IsEnabled([=](const DeviceFeatures &features) { return features.scalar_block_layout_features.*ptr; }) {}
+        FeaturePointer(VkBool32 VkPhysicalDeviceCooperativeMatrixFeaturesNV::*ptr)
+            : IsEnabled([=](const DeviceFeatures &features) { return features.cooperative_matrix_features.*ptr; }) {}
+        FeaturePointer(VkBool32 VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR::*ptr)
+            : IsEnabled([=](const DeviceFeatures &features) { return features.uniform_buffer_standard_layout.*ptr; }) {}
+        FeaturePointer(VkBool32 VkPhysicalDeviceComputeShaderDerivativesFeaturesNV::*ptr)
+            : IsEnabled([=](const DeviceFeatures &features) { return features.compute_shader_derivatives_features.*ptr; }) {}
+        FeaturePointer(VkBool32 VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV::*ptr)
+            : IsEnabled([=](const DeviceFeatures &features) { return features.fragment_shader_barycentric_features.*ptr; }) {}
+        FeaturePointer(VkBool32 VkPhysicalDeviceShaderImageFootprintFeaturesNV::*ptr)
+            : IsEnabled([=](const DeviceFeatures &features) { return features.shader_image_footprint_features.*ptr; }) {}
+        FeaturePointer(VkBool32 VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT::*ptr)
+            : IsEnabled([=](const DeviceFeatures &features) { return features.fragment_shader_interlock_features.*ptr; }) {}
+        FeaturePointer(VkBool32 VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT::*ptr)
+            : IsEnabled([=](const DeviceFeatures &features) { return features.demote_to_helper_invocation_features.*ptr; }) {}
+        FeaturePointer(VkBool32 VkPhysicalDeviceBufferDeviceAddressFeaturesEXT::*ptr)
+            : IsEnabled([=](const DeviceFeatures &features) { return features.buffer_device_address_ext.*ptr; }) {}
+        FeaturePointer(VkBool32 VkPhysicalDeviceBufferDeviceAddressFeaturesKHR::*ptr)
+            : IsEnabled([=](const DeviceFeatures &features) { return features.buffer_device_address.*ptr; }) {}
+    };
+
+    struct CapabilityInfo {
+        char const *name;
+        FeaturePointer feature;
+        ExtEnabled DeviceExtensions::*extension;
+    };
+
+    // clang-format off
+    static const std::unordered_multimap<uint32_t, CapabilityInfo> capabilities = {
+        // Capabilities always supported by a Vulkan 1.0 implementation -- no
+        // feature bits.
+        {spv::CapabilityMatrix, {nullptr}},
+        {spv::CapabilityShader, {nullptr}},
+        {spv::CapabilityInputAttachment, {nullptr}},
+        {spv::CapabilitySampled1D, {nullptr}},
+        {spv::CapabilityImage1D, {nullptr}},
+        {spv::CapabilitySampledBuffer, {nullptr}},
+        {spv::CapabilityStorageImageExtendedFormats, {nullptr}},
+        {spv::CapabilityImageQuery, {nullptr}},
+        {spv::CapabilityDerivativeControl, {nullptr}},
+
+        // Capabilities that are optionally supported, but require a feature to
+        // be enabled on the device
+        {spv::CapabilityGeometry, {"VkPhysicalDeviceFeatures::geometryShader", &VkPhysicalDeviceFeatures::geometryShader}},
+        {spv::CapabilityTessellation, {"VkPhysicalDeviceFeatures::tessellationShader", &VkPhysicalDeviceFeatures::tessellationShader}},
+        {spv::CapabilityFloat64, {"VkPhysicalDeviceFeatures::shaderFloat64", &VkPhysicalDeviceFeatures::shaderFloat64}},
+        {spv::CapabilityInt64, {"VkPhysicalDeviceFeatures::shaderInt64", &VkPhysicalDeviceFeatures::shaderInt64}},
+        {spv::CapabilityTessellationPointSize, {"VkPhysicalDeviceFeatures::shaderTessellationAndGeometryPointSize", &VkPhysicalDeviceFeatures::shaderTessellationAndGeometryPointSize}},
+        {spv::CapabilityGeometryPointSize, {"VkPhysicalDeviceFeatures::shaderTessellationAndGeometryPointSize", &VkPhysicalDeviceFeatures::shaderTessellationAndGeometryPointSize}},
+        {spv::CapabilityImageGatherExtended, {"VkPhysicalDeviceFeatures::shaderImageGatherExtended", &VkPhysicalDeviceFeatures::shaderImageGatherExtended}},
+        {spv::CapabilityStorageImageMultisample, {"VkPhysicalDeviceFeatures::shaderStorageImageMultisample", &VkPhysicalDeviceFeatures::shaderStorageImageMultisample}},
+        {spv::CapabilityUniformBufferArrayDynamicIndexing, {"VkPhysicalDeviceFeatures::shaderUniformBufferArrayDynamicIndexing", &VkPhysicalDeviceFeatures::shaderUniformBufferArrayDynamicIndexing}},
+        {spv::CapabilitySampledImageArrayDynamicIndexing, {"VkPhysicalDeviceFeatures::shaderSampledImageArrayDynamicIndexing", &VkPhysicalDeviceFeatures::shaderSampledImageArrayDynamicIndexing}},
+        {spv::CapabilityStorageBufferArrayDynamicIndexing, {"VkPhysicalDeviceFeatures::shaderStorageBufferArrayDynamicIndexing", &VkPhysicalDeviceFeatures::shaderStorageBufferArrayDynamicIndexing}},
+        {spv::CapabilityStorageImageArrayDynamicIndexing, {"VkPhysicalDeviceFeatures::shaderStorageImageArrayDynamicIndexing", &VkPhysicalDeviceFeatures::shaderStorageBufferArrayDynamicIndexing}},
+        {spv::CapabilityClipDistance, {"VkPhysicalDeviceFeatures::shaderClipDistance", &VkPhysicalDeviceFeatures::shaderClipDistance}},
+        {spv::CapabilityCullDistance, {"VkPhysicalDeviceFeatures::shaderCullDistance", &VkPhysicalDeviceFeatures::shaderCullDistance}},
+        {spv::CapabilityImageCubeArray, {"VkPhysicalDeviceFeatures::imageCubeArray", &VkPhysicalDeviceFeatures::imageCubeArray}},
+        {spv::CapabilitySampleRateShading, {"VkPhysicalDeviceFeatures::sampleRateShading", &VkPhysicalDeviceFeatures::sampleRateShading}},
+        {spv::CapabilitySparseResidency, {"VkPhysicalDeviceFeatures::shaderResourceResidency", &VkPhysicalDeviceFeatures::shaderResourceResidency}},
+        {spv::CapabilityMinLod, {"VkPhysicalDeviceFeatures::shaderResourceMinLod", &VkPhysicalDeviceFeatures::shaderResourceMinLod}},
+        {spv::CapabilitySampledCubeArray, {"VkPhysicalDeviceFeatures::imageCubeArray", &VkPhysicalDeviceFeatures::imageCubeArray}},
+        {spv::CapabilityImageMSArray, {"VkPhysicalDeviceFeatures::shaderStorageImageMultisample", &VkPhysicalDeviceFeatures::shaderStorageImageMultisample}},
+        {spv::CapabilityInterpolationFunction, {"VkPhysicalDeviceFeatures::sampleRateShading", &VkPhysicalDeviceFeatures::sampleRateShading}},
+        {spv::CapabilityStorageImageReadWithoutFormat, {"VkPhysicalDeviceFeatures::shaderStorageImageReadWithoutFormat", &VkPhysicalDeviceFeatures::shaderStorageImageReadWithoutFormat}},
+        {spv::CapabilityStorageImageWriteWithoutFormat, {"VkPhysicalDeviceFeatures::shaderStorageImageWriteWithoutFormat", &VkPhysicalDeviceFeatures::shaderStorageImageWriteWithoutFormat}},
+        {spv::CapabilityMultiViewport, {"VkPhysicalDeviceFeatures::multiViewport", &VkPhysicalDeviceFeatures::multiViewport}},
+
+        {spv::CapabilityShaderNonUniformEXT, {VK_EXT_DESCRIPTOR_INDEXING_EXTENSION_NAME, nullptr, &DeviceExtensions::vk_ext_descriptor_indexing}},
+        {spv::CapabilityRuntimeDescriptorArrayEXT, {"VkPhysicalDeviceDescriptorIndexingFeaturesEXT::runtimeDescriptorArray", &VkPhysicalDeviceDescriptorIndexingFeaturesEXT::runtimeDescriptorArray}},
+        {spv::CapabilityInputAttachmentArrayDynamicIndexingEXT, {"VkPhysicalDeviceDescriptorIndexingFeaturesEXT::shaderInputAttachmentArrayDynamicIndexing", &VkPhysicalDeviceDescriptorIndexingFeaturesEXT::shaderInputAttachmentArrayDynamicIndexing}},
+        {spv::CapabilityUniformTexelBufferArrayDynamicIndexingEXT, {"VkPhysicalDeviceDescriptorIndexingFeaturesEXT::shaderUniformTexelBufferArrayDynamicIndexing", &VkPhysicalDeviceDescriptorIndexingFeaturesEXT::shaderUniformTexelBufferArrayDynamicIndexing}},
+        {spv::CapabilityStorageTexelBufferArrayDynamicIndexingEXT, {"VkPhysicalDeviceDescriptorIndexingFeaturesEXT::shaderStorageTexelBufferArrayDynamicIndexing", &VkPhysicalDeviceDescriptorIndexingFeaturesEXT::shaderStorageTexelBufferArrayDynamicIndexing}},
+        {spv::CapabilityUniformBufferArrayNonUniformIndexingEXT, {"VkPhysicalDeviceDescriptorIndexingFeaturesEXT::shaderUniformBufferArrayNonUniformIndexing", &VkPhysicalDeviceDescriptorIndexingFeaturesEXT::shaderUniformBufferArrayNonUniformIndexing}},
+        {spv::CapabilitySampledImageArrayNonUniformIndexingEXT, {"VkPhysicalDeviceDescriptorIndexingFeaturesEXT::shaderSampledImageArrayNonUniformIndexing", &VkPhysicalDeviceDescriptorIndexingFeaturesEXT::shaderSampledImageArrayNonUniformIndexing}},
+        {spv::CapabilityStorageBufferArrayNonUniformIndexingEXT, {"VkPhysicalDeviceDescriptorIndexingFeaturesEXT::shaderStorageBufferArrayNonUniformIndexing", &VkPhysicalDeviceDescriptorIndexingFeaturesEXT::shaderStorageBufferArrayNonUniformIndexing}},
+        {spv::CapabilityStorageImageArrayNonUniformIndexingEXT, {"VkPhysicalDeviceDescriptorIndexingFeaturesEXT::shaderStorageImageArrayNonUniformIndexing", &VkPhysicalDeviceDescriptorIndexingFeaturesEXT::shaderStorageImageArrayNonUniformIndexing}},
+        {spv::CapabilityInputAttachmentArrayNonUniformIndexingEXT, {"VkPhysicalDeviceDescriptorIndexingFeaturesEXT::shaderInputAttachmentArrayNonUniformIndexing", &VkPhysicalDeviceDescriptorIndexingFeaturesEXT::shaderInputAttachmentArrayNonUniformIndexing}},
+        {spv::CapabilityUniformTexelBufferArrayNonUniformIndexingEXT, {"VkPhysicalDeviceDescriptorIndexingFeaturesEXT::shaderUniformTexelBufferArrayNonUniformIndexing", &VkPhysicalDeviceDescriptorIndexingFeaturesEXT::shaderUniformTexelBufferArrayNonUniformIndexing}},
+        {spv::CapabilityStorageTexelBufferArrayNonUniformIndexingEXT, {"VkPhysicalDeviceDescriptorIndexingFeaturesEXT::shaderStorageTexelBufferArrayNonUniformIndexing", &VkPhysicalDeviceDescriptorIndexingFeaturesEXT::shaderStorageTexelBufferArrayNonUniformIndexing}},
+
+        // Capabilities that require an extension
+        {spv::CapabilityDrawParameters, {VK_KHR_SHADER_DRAW_PARAMETERS_EXTENSION_NAME, nullptr, &DeviceExtensions::vk_khr_shader_draw_parameters}},
+        {spv::CapabilityGeometryShaderPassthroughNV, {VK_NV_GEOMETRY_SHADER_PASSTHROUGH_EXTENSION_NAME, nullptr, &DeviceExtensions::vk_nv_geometry_shader_passthrough}},
+        {spv::CapabilitySampleMaskOverrideCoverageNV, {VK_NV_SAMPLE_MASK_OVERRIDE_COVERAGE_EXTENSION_NAME, nullptr, &DeviceExtensions::vk_nv_sample_mask_override_coverage}},
+        {spv::CapabilityShaderViewportIndexLayerEXT, {VK_EXT_SHADER_VIEWPORT_INDEX_LAYER_EXTENSION_NAME, nullptr, &DeviceExtensions::vk_ext_shader_viewport_index_layer}},
+        {spv::CapabilityShaderViewportIndexLayerNV, {VK_NV_VIEWPORT_ARRAY2_EXTENSION_NAME, nullptr, &DeviceExtensions::vk_nv_viewport_array2}},
+        {spv::CapabilityShaderViewportMaskNV, {VK_NV_VIEWPORT_ARRAY2_EXTENSION_NAME, nullptr, &DeviceExtensions::vk_nv_viewport_array2}},
+        {spv::CapabilitySubgroupBallotKHR, {VK_EXT_SHADER_SUBGROUP_BALLOT_EXTENSION_NAME, nullptr, &DeviceExtensions::vk_ext_shader_subgroup_ballot }},
+        {spv::CapabilitySubgroupVoteKHR, {VK_EXT_SHADER_SUBGROUP_VOTE_EXTENSION_NAME, nullptr, &DeviceExtensions::vk_ext_shader_subgroup_vote }},
+        {spv::CapabilityGroupNonUniformPartitionedNV, {VK_NV_SHADER_SUBGROUP_PARTITIONED_EXTENSION_NAME, nullptr, &DeviceExtensions::vk_nv_shader_subgroup_partitioned}},
+        {spv::CapabilityInt64Atomics, {VK_KHR_SHADER_ATOMIC_INT64_EXTENSION_NAME, nullptr, &DeviceExtensions::vk_khr_shader_atomic_int64 }},
+        {spv::CapabilityShaderClockKHR, {VK_KHR_SHADER_CLOCK_EXTENSION_NAME, nullptr, &DeviceExtensions::vk_khr_shader_clock }},
+
+        {spv::CapabilityComputeDerivativeGroupQuadsNV, {"VkPhysicalDeviceComputeShaderDerivativesFeaturesNV::computeDerivativeGroupQuads", &VkPhysicalDeviceComputeShaderDerivativesFeaturesNV::computeDerivativeGroupQuads, &DeviceExtensions::vk_nv_compute_shader_derivatives}},
+        {spv::CapabilityComputeDerivativeGroupLinearNV, {"VkPhysicalDeviceComputeShaderDerivativesFeaturesNV::computeDerivativeGroupLinear", &VkPhysicalDeviceComputeShaderDerivativesFeaturesNV::computeDerivativeGroupLinear, &DeviceExtensions::vk_nv_compute_shader_derivatives}},
+        {spv::CapabilityFragmentBarycentricNV, {"VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV::fragmentShaderBarycentric", &VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV::fragmentShaderBarycentric, &DeviceExtensions::vk_nv_fragment_shader_barycentric}},
+
+        {spv::CapabilityStorageBuffer8BitAccess, {"VkPhysicalDevice8BitStorageFeaturesKHR::storageBuffer8BitAccess", &VkPhysicalDevice8BitStorageFeaturesKHR::storageBuffer8BitAccess, &DeviceExtensions::vk_khr_8bit_storage}},
+        {spv::CapabilityUniformAndStorageBuffer8BitAccess, {"VkPhysicalDevice8BitStorageFeaturesKHR::uniformAndStorageBuffer8BitAccess", &VkPhysicalDevice8BitStorageFeaturesKHR::uniformAndStorageBuffer8BitAccess, &DeviceExtensions::vk_khr_8bit_storage}},
+        {spv::CapabilityStoragePushConstant8, {"VkPhysicalDevice8BitStorageFeaturesKHR::storagePushConstant8", &VkPhysicalDevice8BitStorageFeaturesKHR::storagePushConstant8, &DeviceExtensions::vk_khr_8bit_storage}},
+
+        {spv::CapabilityTransformFeedback, { "VkPhysicalDeviceTransformFeedbackFeaturesEXT::transformFeedback", &VkPhysicalDeviceTransformFeedbackFeaturesEXT::transformFeedback, &DeviceExtensions::vk_ext_transform_feedback}},
+        {spv::CapabilityGeometryStreams, { "VkPhysicalDeviceTransformFeedbackFeaturesEXT::geometryStreams", &VkPhysicalDeviceTransformFeedbackFeaturesEXT::geometryStreams, &DeviceExtensions::vk_ext_transform_feedback}},
+
+        {spv::CapabilityFloat16, {"VkPhysicalDeviceFloat16Int8FeaturesKHR::shaderFloat16", &VkPhysicalDeviceFloat16Int8FeaturesKHR::shaderFloat16, &DeviceExtensions::vk_khr_shader_float16_int8}},
+        {spv::CapabilityInt8, {"VkPhysicalDeviceFloat16Int8FeaturesKHR::shaderInt8", &VkPhysicalDeviceFloat16Int8FeaturesKHR::shaderInt8, &DeviceExtensions::vk_khr_shader_float16_int8}},
+
+        {spv::CapabilityImageFootprintNV, {"VkPhysicalDeviceShaderImageFootprintFeaturesNV::imageFootprint", &VkPhysicalDeviceShaderImageFootprintFeaturesNV::imageFootprint, &DeviceExtensions::vk_nv_shader_image_footprint}},
+
+        {spv::CapabilityCooperativeMatrixNV, {"VkPhysicalDeviceCooperativeMatrixFeaturesNV::cooperativeMatrix", &VkPhysicalDeviceCooperativeMatrixFeaturesNV::cooperativeMatrix, &DeviceExtensions::vk_nv_cooperative_matrix}},
+
+        {spv::CapabilitySignedZeroInfNanPreserve, {"VkPhysicalDeviceFloatControlsPropertiesKHR::shaderSignedZeroInfNanPreserve", nullptr, &DeviceExtensions::vk_khr_shader_float_controls}},
+        {spv::CapabilityDenormPreserve, {"VkPhysicalDeviceFloatControlsPropertiesKHR::shaderDenormPreserve", nullptr, &DeviceExtensions::vk_khr_shader_float_controls}},
+        {spv::CapabilityDenormFlushToZero, {"VkPhysicalDeviceFloatControlsPropertiesKHR::shaderDenormFlushToZero", nullptr, &DeviceExtensions::vk_khr_shader_float_controls}},
+        {spv::CapabilityRoundingModeRTE, {"VkPhysicalDeviceFloatControlsPropertiesKHR::shaderRoundingModeRTE", nullptr, &DeviceExtensions::vk_khr_shader_float_controls}},
+        {spv::CapabilityRoundingModeRTZ, {"VkPhysicalDeviceFloatControlsPropertiesKHR::shaderRoundingModeRTZ", nullptr, &DeviceExtensions::vk_khr_shader_float_controls}},
+
+        {spv::CapabilityFragmentShaderSampleInterlockEXT,       {"VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT::fragmentShaderSampleInterlock",       &VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT::fragmentShaderSampleInterlock,         &DeviceExtensions::vk_ext_fragment_shader_interlock}},
+        {spv::CapabilityFragmentShaderPixelInterlockEXT,        {"VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT::fragmentShaderPixelInterlock",        &VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT::fragmentShaderPixelInterlock,          &DeviceExtensions::vk_ext_fragment_shader_interlock}},
+        {spv::CapabilityFragmentShaderShadingRateInterlockEXT,  {"VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT::fragmentShaderShadingRateInterlock",  &VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT::fragmentShaderShadingRateInterlock,    &DeviceExtensions::vk_ext_fragment_shader_interlock}},
+        {spv::CapabilityDemoteToHelperInvocationEXT,       {"VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT::shaderDemoteToHelperInvocation",       &VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT::shaderDemoteToHelperInvocation,         &DeviceExtensions::vk_ext_shader_demote_to_helper_invocation}},
+
+        {spv::CapabilityPhysicalStorageBufferAddressesEXT, {"VkPhysicalDeviceBufferDeviceAddressFeaturesEXT::bufferDeviceAddress", &VkPhysicalDeviceBufferDeviceAddressFeaturesEXT::bufferDeviceAddress, &DeviceExtensions::vk_ext_buffer_device_address}},
+        // Should be non-EXT token, but Android SPIRV-Headers are out of date, and the token value is the same anyway
+        {spv::CapabilityPhysicalStorageBufferAddressesEXT, {"VkPhysicalDeviceBufferDeviceAddressFeaturesKHR::bufferDeviceAddress", &VkPhysicalDeviceBufferDeviceAddressFeaturesKHR::bufferDeviceAddress, &DeviceExtensions::vk_khr_buffer_device_address}},
+    };
+    // clang-format on
+
+    for (auto insn : *src) {
+        if (insn.opcode() == spv::OpCapability) {
+            size_t n = capabilities.count(insn.word(1));
+            if (1 == n) {  // key occurs exactly once
+                auto it = capabilities.find(insn.word(1));
+                if (it != capabilities.end()) {
+                    if (it->second.feature) {
+                        skip |= RequireFeature(report_data, it->second.feature.IsEnabled(enabled_features), it->second.name);
+                    }
+                    if (it->second.extension) {
+                        skip |= RequireExtension(report_data, IsExtEnabled((device_extensions.*(it->second.extension))),
+                                                 it->second.name);
+                    }
+                }
+            } else if (1 < n) {  // key occurs multiple times, at least one must be enabled
+                bool needs_feature = false, has_feature = false;
+                bool needs_ext = false, has_ext = false;
+                std::string feature_names = "(one of) [ ";
+                std::string extension_names = feature_names;
+                auto caps = capabilities.equal_range(insn.word(1));
+                for (auto it = caps.first; it != caps.second; ++it) {
+                    if (it->second.feature) {
+                        needs_feature = true;
+                        has_feature = has_feature || it->second.feature.IsEnabled(enabled_features);
+                        feature_names += it->second.name;
+                        feature_names += " ";
+                    }
+                    if (it->second.extension) {
+                        needs_ext = true;
+                        has_ext = has_ext || device_extensions.*(it->second.extension);
+                        extension_names += it->second.name;
+                        extension_names += " ";
+                    }
+                }
+                if (needs_feature) {
+                    feature_names += "]";
+                    skip |= RequireFeature(report_data, has_feature, feature_names.c_str());
+                }
+                if (needs_ext) {
+                    extension_names += "]";
+                    skip |= RequireExtension(report_data, has_ext, extension_names.c_str());
+                }
+            }
+
+            {  // Do group non-uniform checks
+                const VkSubgroupFeatureFlags supportedOperations = phys_dev_ext_props.subgroup_props.supportedOperations;
+                const VkSubgroupFeatureFlags supportedStages = phys_dev_ext_props.subgroup_props.supportedStages;
+
+                switch (insn.word(1)) {
+                    default:
+                        break;
+                    case spv::CapabilityGroupNonUniform:
+                    case spv::CapabilityGroupNonUniformVote:
+                    case spv::CapabilityGroupNonUniformArithmetic:
+                    case spv::CapabilityGroupNonUniformBallot:
+                    case spv::CapabilityGroupNonUniformShuffle:
+                    case spv::CapabilityGroupNonUniformShuffleRelative:
+                    case spv::CapabilityGroupNonUniformClustered:
+                    case spv::CapabilityGroupNonUniformQuad:
+                    case spv::CapabilityGroupNonUniformPartitionedNV:
+                        RequirePropertyFlag(report_data, supportedStages & stage, string_VkShaderStageFlagBits(stage),
+                                            "VkPhysicalDeviceSubgroupProperties::supportedStages");
+                        break;
+                }
+
+                switch (insn.word(1)) {
+                    default:
+                        break;
+                    case spv::CapabilityGroupNonUniform:
+                        RequirePropertyFlag(report_data, supportedOperations & VK_SUBGROUP_FEATURE_BASIC_BIT,
+                                            "VK_SUBGROUP_FEATURE_BASIC_BIT",
+                                            "VkPhysicalDeviceSubgroupProperties::supportedOperations");
+                        break;
+                    case spv::CapabilityGroupNonUniformVote:
+                        RequirePropertyFlag(report_data, supportedOperations & VK_SUBGROUP_FEATURE_VOTE_BIT,
+                                            "VK_SUBGROUP_FEATURE_VOTE_BIT",
+                                            "VkPhysicalDeviceSubgroupProperties::supportedOperations");
+                        break;
+                    case spv::CapabilityGroupNonUniformArithmetic:
+                        RequirePropertyFlag(report_data, supportedOperations & VK_SUBGROUP_FEATURE_ARITHMETIC_BIT,
+                                            "VK_SUBGROUP_FEATURE_ARITHMETIC_BIT",
+                                            "VkPhysicalDeviceSubgroupProperties::supportedOperations");
+                        break;
+                    case spv::CapabilityGroupNonUniformBallot:
+                        RequirePropertyFlag(report_data, supportedOperations & VK_SUBGROUP_FEATURE_BALLOT_BIT,
+                                            "VK_SUBGROUP_FEATURE_BALLOT_BIT",
+                                            "VkPhysicalDeviceSubgroupProperties::supportedOperations");
+                        break;
+                    case spv::CapabilityGroupNonUniformShuffle:
+                        RequirePropertyFlag(report_data, supportedOperations & VK_SUBGROUP_FEATURE_SHUFFLE_BIT,
+                                            "VK_SUBGROUP_FEATURE_SHUFFLE_BIT",
+                                            "VkPhysicalDeviceSubgroupProperties::supportedOperations");
+                        break;
+                    case spv::CapabilityGroupNonUniformShuffleRelative:
+                        RequirePropertyFlag(report_data, supportedOperations & VK_SUBGROUP_FEATURE_SHUFFLE_RELATIVE_BIT,
+                                            "VK_SUBGROUP_FEATURE_SHUFFLE_RELATIVE_BIT",
+                                            "VkPhysicalDeviceSubgroupProperties::supportedOperations");
+                        break;
+                    case spv::CapabilityGroupNonUniformClustered:
+                        RequirePropertyFlag(report_data, supportedOperations & VK_SUBGROUP_FEATURE_CLUSTERED_BIT,
+                                            "VK_SUBGROUP_FEATURE_CLUSTERED_BIT",
+                                            "VkPhysicalDeviceSubgroupProperties::supportedOperations");
+                        break;
+                    case spv::CapabilityGroupNonUniformQuad:
+                        RequirePropertyFlag(report_data, supportedOperations & VK_SUBGROUP_FEATURE_QUAD_BIT,
+                                            "VK_SUBGROUP_FEATURE_QUAD_BIT",
+                                            "VkPhysicalDeviceSubgroupProperties::supportedOperations");
+                        break;
+                    case spv::CapabilityGroupNonUniformPartitionedNV:
+                        RequirePropertyFlag(report_data, supportedOperations & VK_SUBGROUP_FEATURE_PARTITIONED_BIT_NV,
+                                            "VK_SUBGROUP_FEATURE_PARTITIONED_BIT_NV",
+                                            "VkPhysicalDeviceSubgroupProperties::supportedOperations");
+                        break;
+                }
+            }
+        }
+    }
+
+    return skip;
+}
+
+bool CoreChecks::ValidateShaderStageWritableDescriptor(VkShaderStageFlagBits stage, bool has_writable_descriptor) const {
+    bool skip = false;
+
+    if (has_writable_descriptor) {
+        switch (stage) {
+            case VK_SHADER_STAGE_COMPUTE_BIT:
+            case VK_SHADER_STAGE_RAYGEN_BIT_NV:
+            case VK_SHADER_STAGE_ANY_HIT_BIT_NV:
+            case VK_SHADER_STAGE_CLOSEST_HIT_BIT_NV:
+            case VK_SHADER_STAGE_MISS_BIT_NV:
+            case VK_SHADER_STAGE_INTERSECTION_BIT_NV:
+            case VK_SHADER_STAGE_CALLABLE_BIT_NV:
+            case VK_SHADER_STAGE_TASK_BIT_NV:
+            case VK_SHADER_STAGE_MESH_BIT_NV:
+                /* No feature requirements for writes and atomics from compute
+                 * raytracing, or mesh stages */
+                break;
+            case VK_SHADER_STAGE_FRAGMENT_BIT:
+                skip |= RequireFeature(report_data, enabled_features.core.fragmentStoresAndAtomics, "fragmentStoresAndAtomics");
+                break;
+            default:
+                skip |= RequireFeature(report_data, enabled_features.core.vertexPipelineStoresAndAtomics,
+                                       "vertexPipelineStoresAndAtomics");
+                break;
+        }
+    }
+
+    return skip;
+}
+
+bool CoreChecks::ValidateShaderStageGroupNonUniform(SHADER_MODULE_STATE const *module, VkShaderStageFlagBits stage) const {
+    bool skip = false;
+
+    auto const subgroup_props = phys_dev_ext_props.subgroup_props;
+
+    for (auto inst : *module) {
+        // Check the quad operations.
+        switch (inst.opcode()) {
+            default:
+                break;
+            case spv::OpGroupNonUniformQuadBroadcast:
+            case spv::OpGroupNonUniformQuadSwap:
+                if ((stage != VK_SHADER_STAGE_FRAGMENT_BIT) && (stage != VK_SHADER_STAGE_COMPUTE_BIT)) {
+                    skip |= RequireFeature(report_data, subgroup_props.quadOperationsInAllStages,
+                                           "VkPhysicalDeviceSubgroupProperties::quadOperationsInAllStages");
+                }
+                break;
+        }
+
+        if (!enabled_features.subgroup_extended_types_features.shaderSubgroupExtendedTypes) {
+            switch (inst.opcode()) {
+                default:
+                    break;
+                case spv::OpGroupNonUniformAllEqual:
+                case spv::OpGroupNonUniformBroadcast:
+                case spv::OpGroupNonUniformBroadcastFirst:
+                case spv::OpGroupNonUniformShuffle:
+                case spv::OpGroupNonUniformShuffleXor:
+                case spv::OpGroupNonUniformShuffleUp:
+                case spv::OpGroupNonUniformShuffleDown:
+                case spv::OpGroupNonUniformIAdd:
+                case spv::OpGroupNonUniformFAdd:
+                case spv::OpGroupNonUniformIMul:
+                case spv::OpGroupNonUniformFMul:
+                case spv::OpGroupNonUniformSMin:
+                case spv::OpGroupNonUniformUMin:
+                case spv::OpGroupNonUniformFMin:
+                case spv::OpGroupNonUniformSMax:
+                case spv::OpGroupNonUniformUMax:
+                case spv::OpGroupNonUniformFMax:
+                case spv::OpGroupNonUniformBitwiseAnd:
+                case spv::OpGroupNonUniformBitwiseOr:
+                case spv::OpGroupNonUniformBitwiseXor:
+                case spv::OpGroupNonUniformLogicalAnd:
+                case spv::OpGroupNonUniformLogicalOr:
+                case spv::OpGroupNonUniformLogicalXor:
+                case spv::OpGroupNonUniformQuadBroadcast:
+                case spv::OpGroupNonUniformQuadSwap: {
+                    auto type = module->get_def(inst.word(1));
+
+                    if (type.opcode() == spv::OpTypeVector) {
+                        // Get the element type
+                        type = module->get_def(type.word(2));
+                    }
+
+                    if (type.opcode() == spv::OpTypeBool) {
+                        break;
+                    }
+
+                    // Both OpTypeInt and OpTypeFloat the width is in the 2nd word.
+                    const uint32_t width = type.word(2);
+
+                    if ((type.opcode() == spv::OpTypeFloat && width == 16) ||
+                        (type.opcode() == spv::OpTypeInt && (width == 8 || width == 16 || width == 64))) {
+                        skip |= RequireFeature(
+                            report_data, enabled_features.subgroup_extended_types_features.shaderSubgroupExtendedTypes,
+                            "VkPhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR::shaderSubgroupExtendedTypes");
+                    }
+                    break;
+                }
+            }
+        }
+    }
+
+    return skip;
+}
+
+bool CoreChecks::ValidateShaderStageInputOutputLimits(SHADER_MODULE_STATE const *src, VkPipelineShaderStageCreateInfo const *pStage,
+                                                      const PIPELINE_STATE *pipeline, spirv_inst_iter entrypoint) const {
+    if (pStage->stage == VK_SHADER_STAGE_COMPUTE_BIT || pStage->stage == VK_SHADER_STAGE_ALL_GRAPHICS ||
+        pStage->stage == VK_SHADER_STAGE_ALL) {
+        return false;
+    }
+
+    bool skip = false;
+    auto const &limits = phys_dev_props.limits;
+
+    std::set<uint32_t> patchIDs;
+    struct Variable {
+        uint32_t baseTypePtrID;
+        uint32_t ID;
+        uint32_t storageClass;
+    };
+    std::vector<Variable> variables;
+
+    uint32_t numVertices = 0;
+
+    auto entrypointVariables = FindEntrypointInterfaces(entrypoint);
+
+    for (auto insn : *src) {
+        switch (insn.opcode()) {
+            // Find all Patch decorations
+            case spv::OpDecorate:
+                switch (insn.word(2)) {
+                    case spv::DecorationPatch: {
+                        patchIDs.insert(insn.word(1));
+                        break;
+                    }
+                    default:
+                        break;
+                }
+                break;
+            // Find all input and output variables
+            case spv::OpVariable: {
+                Variable var = {};
+                var.storageClass = insn.word(3);
+                if ((var.storageClass == spv::StorageClassInput || var.storageClass == spv::StorageClassOutput) &&
+                    // Only include variables in the entrypoint's interface
+                    find(entrypointVariables.begin(), entrypointVariables.end(), insn.word(2)) != entrypointVariables.end()) {
+                    var.baseTypePtrID = insn.word(1);
+                    var.ID = insn.word(2);
+                    variables.push_back(var);
+                }
+                break;
+            }
+            case spv::OpExecutionMode:
+                if (insn.word(1) == entrypoint.word(2)) {
+                    switch (insn.word(2)) {
+                        default:
+                            break;
+                        case spv::ExecutionModeOutputVertices:
+                            numVertices = insn.word(3);
+                            break;
+                    }
+                }
+                break;
+            default:
+                break;
+        }
+    }
+
+    bool strip_output_array_level =
+        (pStage->stage == VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT || pStage->stage == VK_SHADER_STAGE_MESH_BIT_NV);
+    bool strip_input_array_level =
+        (pStage->stage == VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT ||
+         pStage->stage == VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT || pStage->stage == VK_SHADER_STAGE_GEOMETRY_BIT);
+
+    uint32_t numCompIn = 0, numCompOut = 0;
+    int maxCompIn = 0, maxCompOut = 0;
+
+    auto inputs = CollectInterfaceByLocation(src, entrypoint, spv::StorageClassInput, strip_input_array_level);
+    auto outputs = CollectInterfaceByLocation(src, entrypoint, spv::StorageClassOutput, strip_output_array_level);
+
+    // Find max component location used for input variables.
+    for (auto &var : inputs) {
+        int location = var.first.first;
+        int component = var.first.second;
+        interface_var &iv = var.second;
+
+        // Only need to look at the first location, since we use the type's whole size
+        if (iv.offset != 0) {
+            continue;
+        }
+
+        if (iv.is_patch) {
+            continue;
+        }
+
+        int numComponents = GetComponentsConsumedByType(src, iv.type_id, strip_input_array_level);
+        maxCompIn = std::max(maxCompIn, location * 4 + component + numComponents);
+    }
+
+    // Find max component location used for output variables.
+    for (auto &var : outputs) {
+        int location = var.first.first;
+        int component = var.first.second;
+        interface_var &iv = var.second;
+
+        // Only need to look at the first location, since we use the type's whole size
+        if (iv.offset != 0) {
+            continue;
+        }
+
+        if (iv.is_patch) {
+            continue;
+        }
+
+        int numComponents = GetComponentsConsumedByType(src, iv.type_id, strip_output_array_level);
+        maxCompOut = std::max(maxCompOut, location * 4 + component + numComponents);
+    }
+
+    // XXX TODO: Would be nice to rewrite this to use CollectInterfaceByLocation (or something similar),
+    // but that doesn't include builtins.
+    for (auto &var : variables) {
+        // Check if the variable is a patch. Patches can also be members of blocks,
+        // but if they are then the top-level arrayness has already been stripped
+        // by the time GetComponentsConsumedByType gets to it.
+        bool isPatch = patchIDs.find(var.ID) != patchIDs.end();
+
+        if (var.storageClass == spv::StorageClassInput) {
+            numCompIn += GetComponentsConsumedByType(src, var.baseTypePtrID, strip_input_array_level && !isPatch);
+        } else {  // var.storageClass == spv::StorageClassOutput
+            numCompOut += GetComponentsConsumedByType(src, var.baseTypePtrID, strip_output_array_level && !isPatch);
+        }
+    }
+
+    switch (pStage->stage) {
+        case VK_SHADER_STAGE_VERTEX_BIT:
+            if (numCompOut > limits.maxVertexOutputComponents) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
+                                HandleToUint64(pipeline->pipeline), kVUID_Core_Shader_ExceedDeviceLimit,
+                                "Invalid Pipeline CreateInfo State: Vertex shader exceeds "
+                                "VkPhysicalDeviceLimits::maxVertexOutputComponents of %u "
+                                "components by %u components",
+                                limits.maxVertexOutputComponents, numCompOut - limits.maxVertexOutputComponents);
+            }
+            if (maxCompOut > (int)limits.maxVertexOutputComponents) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
+                                HandleToUint64(pipeline->pipeline), kVUID_Core_Shader_ExceedDeviceLimit,
+                                "Invalid Pipeline CreateInfo State: Vertex shader output variable uses location that "
+                                "exceeds component limit VkPhysicalDeviceLimits::maxVertexOutputComponents (%u)",
+                                limits.maxVertexOutputComponents);
+            }
+            break;
+
+        case VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT:
+            if (numCompIn > limits.maxTessellationControlPerVertexInputComponents) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
+                                HandleToUint64(pipeline->pipeline), kVUID_Core_Shader_ExceedDeviceLimit,
+                                "Invalid Pipeline CreateInfo State: Tessellation control shader exceeds "
+                                "VkPhysicalDeviceLimits::maxTessellationControlPerVertexInputComponents of %u "
+                                "components by %u components",
+                                limits.maxTessellationControlPerVertexInputComponents,
+                                numCompIn - limits.maxTessellationControlPerVertexInputComponents);
+            }
+            if (maxCompIn > (int)limits.maxTessellationControlPerVertexInputComponents) {
+                skip |=
+                    log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
+                            HandleToUint64(pipeline->pipeline), kVUID_Core_Shader_ExceedDeviceLimit,
+                            "Invalid Pipeline CreateInfo State: Tessellation control shader input variable uses location that "
+                            "exceeds component limit VkPhysicalDeviceLimits::maxTessellationControlPerVertexInputComponents (%u)",
+                            limits.maxTessellationControlPerVertexInputComponents);
+            }
+            if (numCompOut > limits.maxTessellationControlPerVertexOutputComponents) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
+                                HandleToUint64(pipeline->pipeline), kVUID_Core_Shader_ExceedDeviceLimit,
+                                "Invalid Pipeline CreateInfo State: Tessellation control shader exceeds "
+                                "VkPhysicalDeviceLimits::maxTessellationControlPerVertexOutputComponents of %u "
+                                "components by %u components",
+                                limits.maxTessellationControlPerVertexOutputComponents,
+                                numCompOut - limits.maxTessellationControlPerVertexOutputComponents);
+            }
+            if (maxCompOut > (int)limits.maxTessellationControlPerVertexOutputComponents) {
+                skip |=
+                    log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
+                            HandleToUint64(pipeline->pipeline), kVUID_Core_Shader_ExceedDeviceLimit,
+                            "Invalid Pipeline CreateInfo State: Tessellation control shader output variable uses location that "
+                            "exceeds component limit VkPhysicalDeviceLimits::maxTessellationControlPerVertexOutputComponents (%u)",
+                            limits.maxTessellationControlPerVertexOutputComponents);
+            }
+            break;
+
+        case VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT:
+            if (numCompIn > limits.maxTessellationEvaluationInputComponents) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
+                                HandleToUint64(pipeline->pipeline), kVUID_Core_Shader_ExceedDeviceLimit,
+                                "Invalid Pipeline CreateInfo State: Tessellation evaluation shader exceeds "
+                                "VkPhysicalDeviceLimits::maxTessellationEvaluationInputComponents of %u "
+                                "components by %u components",
+                                limits.maxTessellationEvaluationInputComponents,
+                                numCompIn - limits.maxTessellationEvaluationInputComponents);
+            }
+            if (maxCompIn > (int)limits.maxTessellationEvaluationInputComponents) {
+                skip |=
+                    log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
+                            HandleToUint64(pipeline->pipeline), kVUID_Core_Shader_ExceedDeviceLimit,
+                            "Invalid Pipeline CreateInfo State: Tessellation evaluation shader input variable uses location that "
+                            "exceeds component limit VkPhysicalDeviceLimits::maxTessellationEvaluationInputComponents (%u)",
+                            limits.maxTessellationEvaluationInputComponents);
+            }
+            if (numCompOut > limits.maxTessellationEvaluationOutputComponents) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
+                                HandleToUint64(pipeline->pipeline), kVUID_Core_Shader_ExceedDeviceLimit,
+                                "Invalid Pipeline CreateInfo State: Tessellation evaluation shader exceeds "
+                                "VkPhysicalDeviceLimits::maxTessellationEvaluationOutputComponents of %u "
+                                "components by %u components",
+                                limits.maxTessellationEvaluationOutputComponents,
+                                numCompOut - limits.maxTessellationEvaluationOutputComponents);
+            }
+            if (maxCompOut > (int)limits.maxTessellationEvaluationOutputComponents) {
+                skip |=
+                    log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
+                            HandleToUint64(pipeline->pipeline), kVUID_Core_Shader_ExceedDeviceLimit,
+                            "Invalid Pipeline CreateInfo State: Tessellation evaluation shader output variable uses location that "
+                            "exceeds component limit VkPhysicalDeviceLimits::maxTessellationEvaluationOutputComponents (%u)",
+                            limits.maxTessellationEvaluationOutputComponents);
+            }
+            break;
+
+        case VK_SHADER_STAGE_GEOMETRY_BIT:
+            if (numCompIn > limits.maxGeometryInputComponents) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
+                                HandleToUint64(pipeline->pipeline), kVUID_Core_Shader_ExceedDeviceLimit,
+                                "Invalid Pipeline CreateInfo State: Geometry shader exceeds "
+                                "VkPhysicalDeviceLimits::maxGeometryInputComponents of %u "
+                                "components by %u components",
+                                limits.maxGeometryInputComponents, numCompIn - limits.maxGeometryInputComponents);
+            }
+            if (maxCompIn > (int)limits.maxGeometryInputComponents) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
+                                HandleToUint64(pipeline->pipeline), kVUID_Core_Shader_ExceedDeviceLimit,
+                                "Invalid Pipeline CreateInfo State: Geometry shader input variable uses location that "
+                                "exceeds component limit VkPhysicalDeviceLimits::maxGeometryInputComponents (%u)",
+                                limits.maxGeometryInputComponents);
+            }
+            if (numCompOut > limits.maxGeometryOutputComponents) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
+                                HandleToUint64(pipeline->pipeline), kVUID_Core_Shader_ExceedDeviceLimit,
+                                "Invalid Pipeline CreateInfo State: Geometry shader exceeds "
+                                "VkPhysicalDeviceLimits::maxGeometryOutputComponents of %u "
+                                "components by %u components",
+                                limits.maxGeometryOutputComponents, numCompOut - limits.maxGeometryOutputComponents);
+            }
+            if (maxCompOut > (int)limits.maxGeometryOutputComponents) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
+                                HandleToUint64(pipeline->pipeline), kVUID_Core_Shader_ExceedDeviceLimit,
+                                "Invalid Pipeline CreateInfo State: Geometry shader output variable uses location that "
+                                "exceeds component limit VkPhysicalDeviceLimits::maxGeometryOutputComponents (%u)",
+                                limits.maxGeometryOutputComponents);
+            }
+            if (numCompOut * numVertices > limits.maxGeometryTotalOutputComponents) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
+                                HandleToUint64(pipeline->pipeline), kVUID_Core_Shader_ExceedDeviceLimit,
+                                "Invalid Pipeline CreateInfo State: Geometry shader exceeds "
+                                "VkPhysicalDeviceLimits::maxGeometryTotalOutputComponents of %u "
+                                "components by %u components",
+                                limits.maxGeometryTotalOutputComponents,
+                                numCompOut * numVertices - limits.maxGeometryTotalOutputComponents);
+            }
+            break;
+
+        case VK_SHADER_STAGE_FRAGMENT_BIT:
+            if (numCompIn > limits.maxFragmentInputComponents) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
+                                HandleToUint64(pipeline->pipeline), kVUID_Core_Shader_ExceedDeviceLimit,
+                                "Invalid Pipeline CreateInfo State: Fragment shader exceeds "
+                                "VkPhysicalDeviceLimits::maxFragmentInputComponents of %u "
+                                "components by %u components",
+                                limits.maxFragmentInputComponents, numCompIn - limits.maxFragmentInputComponents);
+            }
+            if (maxCompIn > (int)limits.maxFragmentInputComponents) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
+                                HandleToUint64(pipeline->pipeline), kVUID_Core_Shader_ExceedDeviceLimit,
+                                "Invalid Pipeline CreateInfo State: Fragment shader input variable uses location that "
+                                "exceeds component limit VkPhysicalDeviceLimits::maxFragmentInputComponents (%u)",
+                                limits.maxFragmentInputComponents);
+            }
+            break;
+
+        case VK_SHADER_STAGE_RAYGEN_BIT_NV:
+        case VK_SHADER_STAGE_ANY_HIT_BIT_NV:
+        case VK_SHADER_STAGE_CLOSEST_HIT_BIT_NV:
+        case VK_SHADER_STAGE_MISS_BIT_NV:
+        case VK_SHADER_STAGE_INTERSECTION_BIT_NV:
+        case VK_SHADER_STAGE_CALLABLE_BIT_NV:
+        case VK_SHADER_STAGE_TASK_BIT_NV:
+        case VK_SHADER_STAGE_MESH_BIT_NV:
+            break;
+
+        default:
+            assert(false);  // This should never happen
+    }
+    return skip;
+}
+
+// copy the specialization constant value into buf, if it is present
+void GetSpecConstantValue(VkPipelineShaderStageCreateInfo const *pStage, uint32_t spec_id, void *buf) {
+    VkSpecializationInfo const *spec = pStage->pSpecializationInfo;
+
+    if (spec && spec_id < spec->mapEntryCount) {
+        memcpy(buf, (uint8_t *)spec->pData + spec->pMapEntries[spec_id].offset, spec->pMapEntries[spec_id].size);
+    }
+}
+
+// Fill in value with the constant or specialization constant value, if available.
+// Returns true if the value has been accurately filled out.
+static bool GetIntConstantValue(spirv_inst_iter insn, SHADER_MODULE_STATE const *src, VkPipelineShaderStageCreateInfo const *pStage,
+                                const std::unordered_map<uint32_t, uint32_t> &id_to_spec_id, uint32_t *value) {
+    auto type_id = src->get_def(insn.word(1));
+    if (type_id.opcode() != spv::OpTypeInt || type_id.word(2) != 32) {
+        return false;
+    }
+    switch (insn.opcode()) {
+        case spv::OpSpecConstant:
+            *value = insn.word(3);
+            GetSpecConstantValue(pStage, id_to_spec_id.at(insn.word(2)), value);
+            return true;
+        case spv::OpConstant:
+            *value = insn.word(3);
+            return true;
+        default:
+            return false;
+    }
+}
+
+// Map SPIR-V type to VK_COMPONENT_TYPE enum
+VkComponentTypeNV GetComponentType(spirv_inst_iter insn, SHADER_MODULE_STATE const *src) {
+    switch (insn.opcode()) {
+        case spv::OpTypeInt:
+            switch (insn.word(2)) {
+                case 8:
+                    return insn.word(3) != 0 ? VK_COMPONENT_TYPE_SINT8_NV : VK_COMPONENT_TYPE_UINT8_NV;
+                case 16:
+                    return insn.word(3) != 0 ? VK_COMPONENT_TYPE_SINT16_NV : VK_COMPONENT_TYPE_UINT16_NV;
+                case 32:
+                    return insn.word(3) != 0 ? VK_COMPONENT_TYPE_SINT32_NV : VK_COMPONENT_TYPE_UINT32_NV;
+                case 64:
+                    return insn.word(3) != 0 ? VK_COMPONENT_TYPE_SINT64_NV : VK_COMPONENT_TYPE_UINT64_NV;
+                default:
+                    return VK_COMPONENT_TYPE_MAX_ENUM_NV;
+            }
+        case spv::OpTypeFloat:
+            switch (insn.word(2)) {
+                case 16:
+                    return VK_COMPONENT_TYPE_FLOAT16_NV;
+                case 32:
+                    return VK_COMPONENT_TYPE_FLOAT32_NV;
+                case 64:
+                    return VK_COMPONENT_TYPE_FLOAT64_NV;
+                default:
+                    return VK_COMPONENT_TYPE_MAX_ENUM_NV;
+            }
+        default:
+            return VK_COMPONENT_TYPE_MAX_ENUM_NV;
+    }
+}
+
+// Validate SPV_NV_cooperative_matrix behavior that can't be statically validated
+// in SPIRV-Tools (e.g. due to specialization constant usage).
+bool CoreChecks::ValidateCooperativeMatrix(SHADER_MODULE_STATE const *src, VkPipelineShaderStageCreateInfo const *pStage,
+                                           const PIPELINE_STATE *pipeline) const {
+    bool skip = false;
+
+    // Map SPIR-V result ID to specialization constant id (SpecId decoration value)
+    std::unordered_map<uint32_t, uint32_t> id_to_spec_id;
+    // Map SPIR-V result ID to the ID of its type.
+    std::unordered_map<uint32_t, uint32_t> id_to_type_id;
+
+    struct CoopMatType {
+        uint32_t scope, rows, cols;
+        VkComponentTypeNV component_type;
+        bool all_constant;
+
+        CoopMatType() : scope(0), rows(0), cols(0), component_type(VK_COMPONENT_TYPE_MAX_ENUM_NV), all_constant(false) {}
+
+        void Init(uint32_t id, SHADER_MODULE_STATE const *src, VkPipelineShaderStageCreateInfo const *pStage,
+                  const std::unordered_map<uint32_t, uint32_t> &id_to_spec_id) {
+            spirv_inst_iter insn = src->get_def(id);
+            uint32_t component_type_id = insn.word(2);
+            uint32_t scope_id = insn.word(3);
+            uint32_t rows_id = insn.word(4);
+            uint32_t cols_id = insn.word(5);
+            auto component_type_iter = src->get_def(component_type_id);
+            auto scope_iter = src->get_def(scope_id);
+            auto rows_iter = src->get_def(rows_id);
+            auto cols_iter = src->get_def(cols_id);
+
+            all_constant = true;
+            if (!GetIntConstantValue(scope_iter, src, pStage, id_to_spec_id, &scope)) {
+                all_constant = false;
+            }
+            if (!GetIntConstantValue(rows_iter, src, pStage, id_to_spec_id, &rows)) {
+                all_constant = false;
+            }
+            if (!GetIntConstantValue(cols_iter, src, pStage, id_to_spec_id, &cols)) {
+                all_constant = false;
+            }
+            component_type = GetComponentType(component_type_iter, src);
+        }
+    };
+
+    bool seen_coopmat_capability = false;
+
+    for (auto insn : *src) {
+        // Whitelist instructions whose result can be a cooperative matrix type, and
+        // keep track of their types. It would be nice if SPIRV-Headers generated code
+        // to identify which instructions have a result type and result id. Lacking that,
+        // this whitelist is based on the set of instructions that
+        // SPV_NV_cooperative_matrix says can be used with cooperative matrix types.
+        switch (insn.opcode()) {
+            case spv::OpLoad:
+            case spv::OpCooperativeMatrixLoadNV:
+            case spv::OpCooperativeMatrixMulAddNV:
+            case spv::OpSNegate:
+            case spv::OpFNegate:
+            case spv::OpIAdd:
+            case spv::OpFAdd:
+            case spv::OpISub:
+            case spv::OpFSub:
+            case spv::OpFDiv:
+            case spv::OpSDiv:
+            case spv::OpUDiv:
+            case spv::OpMatrixTimesScalar:
+            case spv::OpConstantComposite:
+            case spv::OpCompositeConstruct:
+            case spv::OpConvertFToU:
+            case spv::OpConvertFToS:
+            case spv::OpConvertSToF:
+            case spv::OpConvertUToF:
+            case spv::OpUConvert:
+            case spv::OpSConvert:
+            case spv::OpFConvert:
+                id_to_type_id[insn.word(2)] = insn.word(1);
+                break;
+            default:
+                break;
+        }
+
+        switch (insn.opcode()) {
+            case spv::OpDecorate:
+                if (insn.word(2) == spv::DecorationSpecId) {
+                    id_to_spec_id[insn.word(1)] = insn.word(3);
+                }
+                break;
+            case spv::OpCapability:
+                if (insn.word(1) == spv::CapabilityCooperativeMatrixNV) {
+                    seen_coopmat_capability = true;
+
+                    if (!(pStage->stage & phys_dev_ext_props.cooperative_matrix_props.cooperativeMatrixSupportedStages)) {
+                        skip |=
+                            log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
+                                    HandleToUint64(pipeline->pipeline), kVUID_Core_Shader_CooperativeMatrixSupportedStages,
+                                    "OpTypeCooperativeMatrixNV used in shader stage not in cooperativeMatrixSupportedStages (= %u)",
+                                    phys_dev_ext_props.cooperative_matrix_props.cooperativeMatrixSupportedStages);
+                    }
+                }
+                break;
+            case spv::OpMemoryModel:
+                // If the capability isn't enabled, don't bother with the rest of this function.
+                // OpMemoryModel is the first required instruction after all OpCapability instructions.
+                if (!seen_coopmat_capability) {
+                    return skip;
+                }
+                break;
+            case spv::OpTypeCooperativeMatrixNV: {
+                CoopMatType M;
+                M.Init(insn.word(1), src, pStage, id_to_spec_id);
+
+                if (M.all_constant) {
+                    // Validate that the type parameters are all supported for one of the
+                    // operands of a cooperative matrix property.
+                    bool valid = false;
+                    for (unsigned i = 0; i < cooperative_matrix_properties.size(); ++i) {
+                        if (cooperative_matrix_properties[i].AType == M.component_type &&
+                            cooperative_matrix_properties[i].MSize == M.rows && cooperative_matrix_properties[i].KSize == M.cols &&
+                            cooperative_matrix_properties[i].scope == M.scope) {
+                            valid = true;
+                            break;
+                        }
+                        if (cooperative_matrix_properties[i].BType == M.component_type &&
+                            cooperative_matrix_properties[i].KSize == M.rows && cooperative_matrix_properties[i].NSize == M.cols &&
+                            cooperative_matrix_properties[i].scope == M.scope) {
+                            valid = true;
+                            break;
+                        }
+                        if (cooperative_matrix_properties[i].CType == M.component_type &&
+                            cooperative_matrix_properties[i].MSize == M.rows && cooperative_matrix_properties[i].NSize == M.cols &&
+                            cooperative_matrix_properties[i].scope == M.scope) {
+                            valid = true;
+                            break;
+                        }
+                        if (cooperative_matrix_properties[i].DType == M.component_type &&
+                            cooperative_matrix_properties[i].MSize == M.rows && cooperative_matrix_properties[i].NSize == M.cols &&
+                            cooperative_matrix_properties[i].scope == M.scope) {
+                            valid = true;
+                            break;
+                        }
+                    }
+                    if (!valid) {
+                        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
+                                        HandleToUint64(pipeline->pipeline), kVUID_Core_Shader_CooperativeMatrixType,
+                                        "OpTypeCooperativeMatrixNV (result id = %u) operands don't match a supported matrix type",
+                                        insn.word(1));
+                    }
+                }
+                break;
+            }
+            case spv::OpCooperativeMatrixMulAddNV: {
+                CoopMatType A, B, C, D;
+                if (id_to_type_id.find(insn.word(2)) == id_to_type_id.end() ||
+                    id_to_type_id.find(insn.word(3)) == id_to_type_id.end() ||
+                    id_to_type_id.find(insn.word(4)) == id_to_type_id.end() ||
+                    id_to_type_id.find(insn.word(5)) == id_to_type_id.end()) {
+                    // Couldn't find type of matrix
+                    assert(false);
+                    break;
+                }
+                D.Init(id_to_type_id[insn.word(2)], src, pStage, id_to_spec_id);
+                A.Init(id_to_type_id[insn.word(3)], src, pStage, id_to_spec_id);
+                B.Init(id_to_type_id[insn.word(4)], src, pStage, id_to_spec_id);
+                C.Init(id_to_type_id[insn.word(5)], src, pStage, id_to_spec_id);
+
+                if (A.all_constant && B.all_constant && C.all_constant && D.all_constant) {
+                    // Validate that the type parameters are all supported for the same
+                    // cooperative matrix property.
+                    bool valid = false;
+                    for (unsigned i = 0; i < cooperative_matrix_properties.size(); ++i) {
+                        if (cooperative_matrix_properties[i].AType == A.component_type &&
+                            cooperative_matrix_properties[i].MSize == A.rows && cooperative_matrix_properties[i].KSize == A.cols &&
+                            cooperative_matrix_properties[i].scope == A.scope &&
+
+                            cooperative_matrix_properties[i].BType == B.component_type &&
+                            cooperative_matrix_properties[i].KSize == B.rows && cooperative_matrix_properties[i].NSize == B.cols &&
+                            cooperative_matrix_properties[i].scope == B.scope &&
+
+                            cooperative_matrix_properties[i].CType == C.component_type &&
+                            cooperative_matrix_properties[i].MSize == C.rows && cooperative_matrix_properties[i].NSize == C.cols &&
+                            cooperative_matrix_properties[i].scope == C.scope &&
+
+                            cooperative_matrix_properties[i].DType == D.component_type &&
+                            cooperative_matrix_properties[i].MSize == D.rows && cooperative_matrix_properties[i].NSize == D.cols &&
+                            cooperative_matrix_properties[i].scope == D.scope) {
+                            valid = true;
+                            break;
+                        }
+                    }
+                    if (!valid) {
+                        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
+                                        HandleToUint64(pipeline->pipeline), kVUID_Core_Shader_CooperativeMatrixMulAdd,
+                                        "OpCooperativeMatrixMulAddNV (result id = %u) operands don't match a supported matrix "
+                                        "VkCooperativeMatrixPropertiesNV",
+                                        insn.word(2));
+                    }
+                }
+                break;
+            }
+            default:
+                break;
+        }
+    }
+
+    return skip;
+}
+
+bool CoreChecks::ValidateExecutionModes(SHADER_MODULE_STATE const *src, spirv_inst_iter entrypoint) const {
+    auto entrypoint_id = entrypoint.word(2);
+
+    // The first denorm execution mode encountered, along with its bit width.
+    // Used to check if SeparateDenormSettings is respected.
+    std::pair<spv::ExecutionMode, uint32_t> first_denorm_execution_mode = std::make_pair(spv::ExecutionModeMax, 0);
+
+    // The first rounding mode encountered, along with its bit width.
+    // Used to check if SeparateRoundingModeSettings is respected.
+    std::pair<spv::ExecutionMode, uint32_t> first_rounding_mode = std::make_pair(spv::ExecutionModeMax, 0);
+
+    bool skip = false;
+
+    uint32_t verticesOut = 0;
+    uint32_t invocations = 0;
+
+    for (auto insn : *src) {
+        if (insn.opcode() == spv::OpExecutionMode && insn.word(1) == entrypoint_id) {
+            auto mode = insn.word(2);
+            switch (mode) {
+                case spv::ExecutionModeSignedZeroInfNanPreserve: {
+                    auto bit_width = insn.word(3);
+                    if ((bit_width == 16 && !phys_dev_ext_props.float_controls_props.shaderSignedZeroInfNanPreserveFloat16) ||
+                        (bit_width == 32 && !phys_dev_ext_props.float_controls_props.shaderSignedZeroInfNanPreserveFloat32) ||
+                        (bit_width == 64 && !phys_dev_ext_props.float_controls_props.shaderSignedZeroInfNanPreserveFloat64)) {
+                        skip |=
+                            log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                    kVUID_Core_Shader_FeatureNotEnabled,
+                                    "Shader requires SignedZeroInfNanPreserve for bit width %d but it is not enabled on the device",
+                                    bit_width);
+                    }
+                    break;
+                }
+
+                case spv::ExecutionModeDenormPreserve: {
+                    auto bit_width = insn.word(3);
+                    if ((bit_width == 16 && !phys_dev_ext_props.float_controls_props.shaderDenormPreserveFloat16) ||
+                        (bit_width == 32 && !phys_dev_ext_props.float_controls_props.shaderDenormPreserveFloat32) ||
+                        (bit_width == 64 && !phys_dev_ext_props.float_controls_props.shaderDenormPreserveFloat64)) {
+                        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                        kVUID_Core_Shader_FeatureNotEnabled,
+                                        "Shader requires DenormPreserve for bit width %d but it is not enabled on the device",
+                                        bit_width);
+                    }
+
+                    if (first_denorm_execution_mode.first == spv::ExecutionModeMax) {
+                        // Register the first denorm execution mode found
+                        first_denorm_execution_mode = std::make_pair(static_cast<spv::ExecutionMode>(mode), bit_width);
+                    } else if (first_denorm_execution_mode.first != mode && first_denorm_execution_mode.second != bit_width) {
+                        switch (phys_dev_ext_props.float_controls_props.denormBehaviorIndependence) {
+                            case VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_32_BIT_ONLY_KHR:
+                                if (first_rounding_mode.second != 32 && bit_width != 32) {
+                                    skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                                    VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, kVUID_Core_Shader_FeatureNotEnabled,
+                                                    "Shader uses different denorm execution modes for 16 and 64-bit but "
+                                                    "denormBehaviorIndependence is "
+                                                    "VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_32_BIT_ONLY_KHR on the device");
+                                }
+                                break;
+
+                            case VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_ALL_KHR:
+                                break;
+
+                            case VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_NONE_KHR:
+                                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT,
+                                                0, kVUID_Core_Shader_FeatureNotEnabled,
+                                                "Shader uses different denorm execution modes for different bit widths but "
+                                                "denormBehaviorIndependence is "
+                                                "VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_NONE_KHR on the device");
+                                break;
+
+                            default:
+                                break;
+                        }
+                    }
+                    break;
+                }
+
+                case spv::ExecutionModeDenormFlushToZero: {
+                    auto bit_width = insn.word(3);
+                    if ((bit_width == 16 && !phys_dev_ext_props.float_controls_props.shaderDenormFlushToZeroFloat16) ||
+                        (bit_width == 32 && !phys_dev_ext_props.float_controls_props.shaderDenormFlushToZeroFloat32) ||
+                        (bit_width == 64 && !phys_dev_ext_props.float_controls_props.shaderDenormFlushToZeroFloat64)) {
+                        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                        kVUID_Core_Shader_FeatureNotEnabled,
+                                        "Shader requires DenormFlushToZero for bit width %d but it is not enabled on the device",
+                                        bit_width);
+                    }
+
+                    if (first_denorm_execution_mode.first == spv::ExecutionModeMax) {
+                        // Register the first denorm execution mode found
+                        first_denorm_execution_mode = std::make_pair(static_cast<spv::ExecutionMode>(mode), bit_width);
+                    } else if (first_denorm_execution_mode.first != mode && first_denorm_execution_mode.second != bit_width) {
+                        switch (phys_dev_ext_props.float_controls_props.denormBehaviorIndependence) {
+                            case VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_32_BIT_ONLY_KHR:
+                                if (first_rounding_mode.second != 32 && bit_width != 32) {
+                                    skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                                    VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, kVUID_Core_Shader_FeatureNotEnabled,
+                                                    "Shader uses different denorm execution modes for 16 and 64-bit but "
+                                                    "denormBehaviorIndependence is "
+                                                    "VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_32_BIT_ONLY_KHR on the device");
+                                }
+                                break;
+
+                            case VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_ALL_KHR:
+                                break;
+
+                            case VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_NONE_KHR:
+                                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT,
+                                                0, kVUID_Core_Shader_FeatureNotEnabled,
+                                                "Shader uses different denorm execution modes for different bit widths but "
+                                                "denormBehaviorIndependence is "
+                                                "VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_NONE_KHR on the device");
+                                break;
+
+                            default:
+                                break;
+                        }
+                    }
+                    break;
+                }
+
+                case spv::ExecutionModeRoundingModeRTE: {
+                    auto bit_width = insn.word(3);
+                    if ((bit_width == 16 && !phys_dev_ext_props.float_controls_props.shaderRoundingModeRTEFloat16) ||
+                        (bit_width == 32 && !phys_dev_ext_props.float_controls_props.shaderRoundingModeRTEFloat32) ||
+                        (bit_width == 64 && !phys_dev_ext_props.float_controls_props.shaderRoundingModeRTEFloat64)) {
+                        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                        kVUID_Core_Shader_FeatureNotEnabled,
+                                        "Shader requires RoundingModeRTE for bit width %d but it is not enabled on the device",
+                                        bit_width);
+                    }
+
+                    if (first_rounding_mode.first == spv::ExecutionModeMax) {
+                        // Register the first rounding mode found
+                        first_rounding_mode = std::make_pair(static_cast<spv::ExecutionMode>(mode), bit_width);
+                    } else if (first_rounding_mode.first != mode && first_rounding_mode.second != bit_width) {
+                        switch (phys_dev_ext_props.float_controls_props.roundingModeIndependence) {
+                            case VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_32_BIT_ONLY_KHR:
+                                if (first_rounding_mode.second != 32 && bit_width != 32) {
+                                    skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                                    VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, kVUID_Core_Shader_FeatureNotEnabled,
+                                                    "Shader uses different rounding modes for 16 and 64-bit but "
+                                                    "roundingModeIndependence is "
+                                                    "VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_32_BIT_ONLY_KHR on the device");
+                                }
+                                break;
+
+                            case VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_ALL_KHR:
+                                break;
+
+                            case VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_NONE_KHR:
+                                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT,
+                                                0, kVUID_Core_Shader_FeatureNotEnabled,
+                                                "Shader uses different rounding modes for different bit widths but "
+                                                "roundingModeIndependence is "
+                                                "VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_NONE_KHR on the device");
+                                break;
+
+                            default:
+                                break;
+                        }
+                    }
+                    break;
+                }
+
+                case spv::ExecutionModeRoundingModeRTZ: {
+                    auto bit_width = insn.word(3);
+                    if ((bit_width == 16 && !phys_dev_ext_props.float_controls_props.shaderRoundingModeRTZFloat16) ||
+                        (bit_width == 32 && !phys_dev_ext_props.float_controls_props.shaderRoundingModeRTZFloat32) ||
+                        (bit_width == 64 && !phys_dev_ext_props.float_controls_props.shaderRoundingModeRTZFloat64)) {
+                        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                        kVUID_Core_Shader_FeatureNotEnabled,
+                                        "Shader requires RoundingModeRTZ for bit width %d but it is not enabled on the device",
+                                        bit_width);
+                    }
+
+                    if (first_rounding_mode.first == spv::ExecutionModeMax) {
+                        // Register the first rounding mode found
+                        first_rounding_mode = std::make_pair(static_cast<spv::ExecutionMode>(mode), bit_width);
+                    } else if (first_rounding_mode.first != mode && first_rounding_mode.second != bit_width) {
+                        switch (phys_dev_ext_props.float_controls_props.roundingModeIndependence) {
+                            case VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_32_BIT_ONLY_KHR:
+                                if (first_rounding_mode.second != 32 && bit_width != 32) {
+                                    skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                                    VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, kVUID_Core_Shader_FeatureNotEnabled,
+                                                    "Shader uses different rounding modes for 16 and 64-bit but "
+                                                    "roundingModeIndependence is "
+                                                    "VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_32_BIT_ONLY_KHR on the device");
+                                }
+                                break;
+
+                            case VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_ALL_KHR:
+                                break;
+
+                            case VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_NONE_KHR:
+                                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT,
+                                                0, kVUID_Core_Shader_FeatureNotEnabled,
+                                                "Shader uses different rounding modes for different bit widths but "
+                                                "roundingModeIndependence is "
+                                                "VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_NONE_KHR on the device");
+                                break;
+
+                            default:
+                                break;
+                        }
+                    }
+                    break;
+                }
+
+                case spv::ExecutionModeOutputVertices: {
+                    verticesOut = insn.word(3);
+                    break;
+                }
+
+                case spv::ExecutionModeInvocations: {
+                    invocations = insn.word(3);
+                    break;
+                }
+            }
+        }
+    }
+
+    if (entrypoint.word(1) == spv::ExecutionModelGeometry) {
+        if (verticesOut == 0 || verticesOut > phys_dev_props.limits.maxGeometryOutputVertices) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                            "VUID-VkPipelineShaderStageCreateInfo-stage-00714",
+                            "Geometry shader entry point must have an OpExecutionMode instruction that "
+                            "specifies a maximum output vertex count that is greater than 0 and less "
+                            "than or equal to maxGeometryOutputVertices. "
+                            "OutputVertices=%d, maxGeometryOutputVertices=%d",
+                            verticesOut, phys_dev_props.limits.maxGeometryOutputVertices);
+        }
+
+        if (invocations == 0 || invocations > phys_dev_props.limits.maxGeometryShaderInvocations) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                            "VUID-VkPipelineShaderStageCreateInfo-stage-00715",
+                            "Geometry shader entry point must have an OpExecutionMode instruction that "
+                            "specifies an invocation count that is greater than 0 and less "
+                            "than or equal to maxGeometryShaderInvocations. "
+                            "Invocations=%d, maxGeometryShaderInvocations=%d",
+                            invocations, phys_dev_props.limits.maxGeometryShaderInvocations);
+        }
+    }
+    return skip;
+}
+
+uint32_t DescriptorTypeToReqs(SHADER_MODULE_STATE const *module, uint32_t type_id) {
+    auto type = module->get_def(type_id);
+
+    while (true) {
+        switch (type.opcode()) {
+            case spv::OpTypeArray:
+            case spv::OpTypeRuntimeArray:
+            case spv::OpTypeSampledImage:
+                type = module->get_def(type.word(2));
+                break;
+            case spv::OpTypePointer:
+                type = module->get_def(type.word(3));
+                break;
+            case spv::OpTypeImage: {
+                auto dim = type.word(3);
+                auto arrayed = type.word(5);
+                auto msaa = type.word(6);
+
+                uint32_t bits = 0;
+                switch (GetFundamentalType(module, type.word(2))) {
+                    case FORMAT_TYPE_FLOAT:
+                        bits = DESCRIPTOR_REQ_COMPONENT_TYPE_FLOAT;
+                        break;
+                    case FORMAT_TYPE_UINT:
+                        bits = DESCRIPTOR_REQ_COMPONENT_TYPE_UINT;
+                        break;
+                    case FORMAT_TYPE_SINT:
+                        bits = DESCRIPTOR_REQ_COMPONENT_TYPE_SINT;
+                        break;
+                    default:
+                        break;
+                }
+
+                switch (dim) {
+                    case spv::Dim1D:
+                        bits |= arrayed ? DESCRIPTOR_REQ_VIEW_TYPE_1D_ARRAY : DESCRIPTOR_REQ_VIEW_TYPE_1D;
+                        return bits;
+                    case spv::Dim2D:
+                        bits |= msaa ? DESCRIPTOR_REQ_MULTI_SAMPLE : DESCRIPTOR_REQ_SINGLE_SAMPLE;
+                        bits |= arrayed ? DESCRIPTOR_REQ_VIEW_TYPE_2D_ARRAY : DESCRIPTOR_REQ_VIEW_TYPE_2D;
+                        return bits;
+                    case spv::Dim3D:
+                        bits |= DESCRIPTOR_REQ_VIEW_TYPE_3D;
+                        return bits;
+                    case spv::DimCube:
+                        bits |= arrayed ? DESCRIPTOR_REQ_VIEW_TYPE_CUBE_ARRAY : DESCRIPTOR_REQ_VIEW_TYPE_CUBE;
+                        return bits;
+                    case spv::DimSubpassData:
+                        bits |= msaa ? DESCRIPTOR_REQ_MULTI_SAMPLE : DESCRIPTOR_REQ_SINGLE_SAMPLE;
+                        return bits;
+                    default:  // buffer, etc.
+                        return bits;
+                }
+            }
+            default:
+                return 0;
+        }
+    }
+}
+
+// For given pipelineLayout verify that the set_layout_node at slot.first
+//  has the requested binding at slot.second and return ptr to that binding
+static VkDescriptorSetLayoutBinding const *GetDescriptorBinding(PIPELINE_LAYOUT_STATE const *pipelineLayout,
+                                                                descriptor_slot_t slot) {
+    if (!pipelineLayout) return nullptr;
+
+    if (slot.first >= pipelineLayout->set_layouts.size()) return nullptr;
+
+    return pipelineLayout->set_layouts[slot.first]->GetDescriptorSetLayoutBindingPtrFromBinding(slot.second);
+}
+
+static bool FindLocalSize(SHADER_MODULE_STATE const *src, uint32_t &local_size_x, uint32_t &local_size_y, uint32_t &local_size_z) {
+    for (auto insn : *src) {
+        if (insn.opcode() == spv::OpEntryPoint) {
+            auto executionModel = insn.word(1);
+            auto entrypointStageBits = ExecutionModelToShaderStageFlagBits(executionModel);
+            if (entrypointStageBits == VK_SHADER_STAGE_COMPUTE_BIT) {
+                auto entrypoint_id = insn.word(2);
+                for (auto insn1 : *src) {
+                    if (insn1.opcode() == spv::OpExecutionMode && insn1.word(1) == entrypoint_id &&
+                        insn1.word(2) == spv::ExecutionModeLocalSize) {
+                        local_size_x = insn1.word(3);
+                        local_size_y = insn1.word(4);
+                        local_size_z = insn1.word(5);
+                        return true;
+                    }
+                }
+            }
+        }
+    }
+    return false;
+}
+
+void ProcessExecutionModes(SHADER_MODULE_STATE const *src, const spirv_inst_iter &entrypoint, PIPELINE_STATE *pipeline) {
+    auto entrypoint_id = entrypoint.word(2);
+    bool is_point_mode = false;
+
+    for (auto insn : *src) {
+        if (insn.opcode() == spv::OpExecutionMode && insn.word(1) == entrypoint_id) {
+            switch (insn.word(2)) {
+                case spv::ExecutionModePointMode:
+                    // In tessellation shaders, PointMode is separate and trumps the tessellation topology.
+                    is_point_mode = true;
+                    break;
+
+                case spv::ExecutionModeOutputPoints:
+                    pipeline->topology_at_rasterizer = VK_PRIMITIVE_TOPOLOGY_POINT_LIST;
+                    break;
+
+                case spv::ExecutionModeIsolines:
+                case spv::ExecutionModeOutputLineStrip:
+                    pipeline->topology_at_rasterizer = VK_PRIMITIVE_TOPOLOGY_LINE_STRIP;
+                    break;
+
+                case spv::ExecutionModeTriangles:
+                case spv::ExecutionModeQuads:
+                case spv::ExecutionModeOutputTriangleStrip:
+                    pipeline->topology_at_rasterizer = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP;
+                    break;
+            }
+        }
+    }
+
+    if (is_point_mode) pipeline->topology_at_rasterizer = VK_PRIMITIVE_TOPOLOGY_POINT_LIST;
+}
+
+// If PointList topology is specified in the pipeline, verify that a shader geometry stage writes PointSize
+//    o If there is only a vertex shader : gl_PointSize must be written when using points
+//    o If there is a geometry or tessellation shader:
+//        - If shaderTessellationAndGeometryPointSize feature is enabled:
+//            * gl_PointSize must be written in the final geometry stage
+//        - If shaderTessellationAndGeometryPointSize feature is disabled:
+//            * gl_PointSize must NOT be written and a default of 1.0 is assumed
+bool CoreChecks::ValidatePointListShaderState(const PIPELINE_STATE *pipeline, SHADER_MODULE_STATE const *src,
+                                              spirv_inst_iter entrypoint, VkShaderStageFlagBits stage) const {
+    if (pipeline->topology_at_rasterizer != VK_PRIMITIVE_TOPOLOGY_POINT_LIST) {
+        return false;
+    }
+
+    bool pointsize_written = false;
+    bool skip = false;
+
+    // Search for PointSize built-in decorations
+    std::vector<uint32_t> pointsize_builtin_offsets;
+    spirv_inst_iter insn = entrypoint;
+    while (!pointsize_written && (insn.opcode() != spv::OpFunction)) {
+        if (insn.opcode() == spv::OpMemberDecorate) {
+            if (insn.word(3) == spv::DecorationBuiltIn) {
+                if (insn.word(4) == spv::BuiltInPointSize) {
+                    pointsize_written = IsPointSizeWritten(src, insn, entrypoint);
+                }
+            }
+        } else if (insn.opcode() == spv::OpDecorate) {
+            if (insn.word(2) == spv::DecorationBuiltIn) {
+                if (insn.word(3) == spv::BuiltInPointSize) {
+                    pointsize_written = IsPointSizeWritten(src, insn, entrypoint);
+                }
+            }
+        }
+
+        insn++;
+    }
+
+    if ((stage == VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT || stage == VK_SHADER_STAGE_GEOMETRY_BIT) &&
+        !enabled_features.core.shaderTessellationAndGeometryPointSize) {
+        if (pointsize_written) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
+                            HandleToUint64(pipeline->pipeline), kVUID_Core_Shader_PointSizeBuiltInOverSpecified,
+                            "Pipeline topology is set to POINT_LIST and geometry or tessellation shaders write PointSize which "
+                            "is prohibited when the shaderTessellationAndGeometryPointSize feature is not enabled.");
+        }
+    } else if (!pointsize_written) {
+        skip |=
+            log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
+                    HandleToUint64(pipeline->pipeline), kVUID_Core_Shader_MissingPointSizeBuiltIn,
+                    "Pipeline topology is set to POINT_LIST, but PointSize is not written to in the shader corresponding to %s.",
+                    string_VkShaderStageFlagBits(stage));
+    }
+    return skip;
+}
+
+bool CoreChecks::ValidatePipelineShaderStage(VkPipelineShaderStageCreateInfo const *pStage, const PIPELINE_STATE *pipeline,
+                                             const PIPELINE_STATE::StageState &stage_state, const SHADER_MODULE_STATE *module,
+                                             const spirv_inst_iter &entrypoint, bool check_point_size) const {
+    bool skip = false;
+
+    // Check the module
+    if (!module->has_valid_spirv) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                        "VUID-VkPipelineShaderStageCreateInfo-module-parameter", "%s does not contain valid spirv for stage %s.",
+                        report_data->FormatHandle(module->vk_shader_module).c_str(), string_VkShaderStageFlagBits(pStage->stage));
+    }
+
+    // If specialization-constant values are given and specialization-constant instructions are present in the shader, the
+    // specializations should be applied and validated.
+    if (pStage->pSpecializationInfo != nullptr && pStage->pSpecializationInfo->mapEntryCount > 0 &&
+        pStage->pSpecializationInfo->pMapEntries != nullptr && module->has_specialization_constants) {
+        // Gather the specialization-constant values.
+        auto const &specialization_info = pStage->pSpecializationInfo;
+        std::unordered_map<uint32_t, std::vector<uint32_t>> id_value_map;
+        id_value_map.reserve(specialization_info->mapEntryCount);
+        for (auto i = 0u; i < specialization_info->mapEntryCount; ++i) {
+            auto const &map_entry = specialization_info->pMapEntries[i];
+            assert(map_entry.size % 4 == 0);
+
+            auto const begin = reinterpret_cast<uint32_t const *>(specialization_info->pData) + map_entry.offset / 4;
+            auto const end = begin + map_entry.size / 4;
+            id_value_map.emplace(map_entry.constantID, std::vector<uint32_t>(begin, end));
+        }
+
+        // Apply the specialization-constant values and revalidate the shader module.
+        spv_target_env const spirv_environment = ((api_version >= VK_API_VERSION_1_1) ? SPV_ENV_VULKAN_1_1 : SPV_ENV_VULKAN_1_0);
+        spvtools::Optimizer optimizer(spirv_environment);
+        spvtools::MessageConsumer consumer = [&skip, &module, &pStage, this](spv_message_level_t level, const char *source,
+                                                                             const spv_position_t &position, const char *message) {
+            skip |= log_msg(
+                report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                "VUID-VkPipelineShaderStageCreateInfo-module-parameter", "%s does not contain valid spirv for stage %s. %s",
+                report_data->FormatHandle(module->vk_shader_module).c_str(), string_VkShaderStageFlagBits(pStage->stage), message);
+        };
+        optimizer.SetMessageConsumer(consumer);
+        optimizer.RegisterPass(spvtools::CreateSetSpecConstantDefaultValuePass(id_value_map));
+        optimizer.RegisterPass(spvtools::CreateFreezeSpecConstantValuePass());
+        std::vector<uint32_t> specialized_spirv;
+        auto const optimized =
+            optimizer.Run(module->words.data(), module->words.size(), &specialized_spirv, spvtools::ValidatorOptions(), true);
+        assert(optimized == true);
+
+        if (optimized) {
+            spv_context ctx = spvContextCreate(spirv_environment);
+            spv_const_binary_t binary{specialized_spirv.data(), specialized_spirv.size()};
+            spv_diagnostic diag = nullptr;
+            spv_validator_options options = spvValidatorOptionsCreate();
+            if (device_extensions.vk_khr_relaxed_block_layout) {
+                spvValidatorOptionsSetRelaxBlockLayout(options, true);
+            }
+            if (device_extensions.vk_khr_uniform_buffer_standard_layout &&
+                enabled_features.uniform_buffer_standard_layout.uniformBufferStandardLayout == VK_TRUE) {
+                spvValidatorOptionsSetUniformBufferStandardLayout(options, true);
+            }
+            if (device_extensions.vk_ext_scalar_block_layout &&
+                enabled_features.scalar_block_layout_features.scalarBlockLayout == VK_TRUE) {
+                spvValidatorOptionsSetScalarBlockLayout(options, true);
+            }
+            auto const spv_valid = spvValidateWithOptions(ctx, options, &binary, &diag);
+            if (spv_valid != SPV_SUCCESS) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                "VUID-VkPipelineShaderStageCreateInfo-module-parameter",
+                                "After specialization was applied, %s does not contain valid spirv for stage %s.",
+                                report_data->FormatHandle(module->vk_shader_module).c_str(),
+                                string_VkShaderStageFlagBits(pStage->stage));
+            }
+
+            spvValidatorOptionsDestroy(options);
+            spvDiagnosticDestroy(diag);
+            spvContextDestroy(ctx);
+        }
+    }
+
+    // Check the entrypoint
+    if (entrypoint == module->end()) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                        "VUID-VkPipelineShaderStageCreateInfo-pName-00707", "No entrypoint found named `%s` for stage %s..",
+                        pStage->pName, string_VkShaderStageFlagBits(pStage->stage));
+    }
+    if (skip) return true;  // no point continuing beyond here, any analysis is just going to be garbage.
+
+    // Mark accessible ids
+    auto &accessible_ids = stage_state.accessible_ids;
+
+    // Validate descriptor set layout against what the entrypoint actually uses
+    bool has_writable_descriptor = stage_state.has_writable_descriptor;
+    auto &descriptor_uses = stage_state.descriptor_uses;
+
+    // Validate shader capabilities against enabled device features
+    skip |= ValidateShaderCapabilities(module, pStage->stage);
+    skip |= ValidateShaderStageWritableDescriptor(pStage->stage, has_writable_descriptor);
+    skip |= ValidateShaderStageInputOutputLimits(module, pStage, pipeline, entrypoint);
+    skip |= ValidateShaderStageGroupNonUniform(module, pStage->stage);
+    skip |= ValidateExecutionModes(module, entrypoint);
+    skip |= ValidateSpecializationOffsets(report_data, pStage);
+    skip |= ValidatePushConstantUsage(report_data, pipeline->pipeline_layout->push_constant_ranges.get(), module, accessible_ids,
+                                      pStage->stage);
+    if (check_point_size && !pipeline->graphicsPipelineCI.pRasterizationState->rasterizerDiscardEnable) {
+        skip |= ValidatePointListShaderState(pipeline, module, entrypoint, pStage->stage);
+    }
+    skip |= ValidateCooperativeMatrix(module, pStage, pipeline);
+
+    // Validate descriptor use
+    for (auto use : descriptor_uses) {
+        // Verify given pipelineLayout has requested setLayout with requested binding
+        const auto &binding = GetDescriptorBinding(pipeline->pipeline_layout.get(), use.first);
+        unsigned required_descriptor_count;
+        std::set<uint32_t> descriptor_types = TypeToDescriptorTypeSet(module, use.second.type_id, required_descriptor_count);
+
+        if (!binding) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                            kVUID_Core_Shader_MissingDescriptor,
+                            "Shader uses descriptor slot %u.%u (expected `%s`) but not declared in pipeline layout",
+                            use.first.first, use.first.second, string_descriptorTypes(descriptor_types).c_str());
+        } else if (~binding->stageFlags & pStage->stage) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, 0,
+                            kVUID_Core_Shader_DescriptorNotAccessibleFromStage,
+                            "Shader uses descriptor slot %u.%u but descriptor not accessible from stage %s", use.first.first,
+                            use.first.second, string_VkShaderStageFlagBits(pStage->stage));
+        } else if (descriptor_types.find(binding->descriptorType) == descriptor_types.end()) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                            kVUID_Core_Shader_DescriptorTypeMismatch,
+                            "Type mismatch on descriptor slot %u.%u (expected `%s`) but descriptor of type %s", use.first.first,
+                            use.first.second, string_descriptorTypes(descriptor_types).c_str(),
+                            string_VkDescriptorType(binding->descriptorType));
+        } else if (binding->descriptorCount < required_descriptor_count) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                            kVUID_Core_Shader_DescriptorTypeMismatch,
+                            "Shader expects at least %u descriptors for binding %u.%u but only %u provided",
+                            required_descriptor_count, use.first.first, use.first.second, binding->descriptorCount);
+        }
+    }
+
+    // Validate use of input attachments against subpass structure
+    if (pStage->stage == VK_SHADER_STAGE_FRAGMENT_BIT) {
+        auto input_attachment_uses = CollectInterfaceByInputAttachmentIndex(module, accessible_ids);
+
+        auto rpci = pipeline->rp_state->createInfo.ptr();
+        auto subpass = pipeline->graphicsPipelineCI.subpass;
+
+        for (auto use : input_attachment_uses) {
+            auto input_attachments = rpci->pSubpasses[subpass].pInputAttachments;
+            auto index = (input_attachments && use.first < rpci->pSubpasses[subpass].inputAttachmentCount)
+                             ? input_attachments[use.first].attachment
+                             : VK_ATTACHMENT_UNUSED;
+
+            if (index == VK_ATTACHMENT_UNUSED) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                kVUID_Core_Shader_MissingInputAttachment,
+                                "Shader consumes input attachment index %d but not provided in subpass", use.first);
+            } else if (!(GetFormatType(rpci->pAttachments[index].format) & GetFundamentalType(module, use.second.type_id))) {
+                skip |=
+                    log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                            kVUID_Core_Shader_InputAttachmentTypeMismatch,
+                            "Subpass input attachment %u format of %s does not match type used in shader `%s`", use.first,
+                            string_VkFormat(rpci->pAttachments[index].format), DescribeType(module, use.second.type_id).c_str());
+            }
+        }
+    }
+    if (pStage->stage == VK_SHADER_STAGE_COMPUTE_BIT) {
+        skip |= ValidateComputeWorkGroupSizes(module);
+    }
+    return skip;
+}
+
+static bool ValidateInterfaceBetweenStages(debug_report_data const *report_data, SHADER_MODULE_STATE const *producer,
+                                           spirv_inst_iter producer_entrypoint, shader_stage_attributes const *producer_stage,
+                                           SHADER_MODULE_STATE const *consumer, spirv_inst_iter consumer_entrypoint,
+                                           shader_stage_attributes const *consumer_stage) {
+    bool skip = false;
+
+    auto outputs =
+        CollectInterfaceByLocation(producer, producer_entrypoint, spv::StorageClassOutput, producer_stage->arrayed_output);
+    auto inputs = CollectInterfaceByLocation(consumer, consumer_entrypoint, spv::StorageClassInput, consumer_stage->arrayed_input);
+
+    auto a_it = outputs.begin();
+    auto b_it = inputs.begin();
+
+    // Maps sorted by key (location); walk them together to find mismatches
+    while ((outputs.size() > 0 && a_it != outputs.end()) || (inputs.size() && b_it != inputs.end())) {
+        bool a_at_end = outputs.size() == 0 || a_it == outputs.end();
+        bool b_at_end = inputs.size() == 0 || b_it == inputs.end();
+        auto a_first = a_at_end ? std::make_pair(0u, 0u) : a_it->first;
+        auto b_first = b_at_end ? std::make_pair(0u, 0u) : b_it->first;
+
+        if (b_at_end || ((!a_at_end) && (a_first < b_first))) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT,
+                            HandleToUint64(producer->vk_shader_module), kVUID_Core_Shader_OutputNotConsumed,
+                            "%s writes to output location %u.%u which is not consumed by %s", producer_stage->name, a_first.first,
+                            a_first.second, consumer_stage->name);
+            a_it++;
+        } else if (a_at_end || a_first > b_first) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT,
+                            HandleToUint64(consumer->vk_shader_module), kVUID_Core_Shader_InputNotProduced,
+                            "%s consumes input location %u.%u which is not written by %s", consumer_stage->name, b_first.first,
+                            b_first.second, producer_stage->name);
+            b_it++;
+        } else {
+            // subtleties of arrayed interfaces:
+            // - if is_patch, then the member is not arrayed, even though the interface may be.
+            // - if is_block_member, then the extra array level of an arrayed interface is not
+            //   expressed in the member type -- it's expressed in the block type.
+            if (!TypesMatch(producer, consumer, a_it->second.type_id, b_it->second.type_id,
+                            producer_stage->arrayed_output && !a_it->second.is_patch && !a_it->second.is_block_member,
+                            consumer_stage->arrayed_input && !b_it->second.is_patch && !b_it->second.is_block_member, true)) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT,
+                                HandleToUint64(producer->vk_shader_module), kVUID_Core_Shader_InterfaceTypeMismatch,
+                                "Type mismatch on location %u.%u: '%s' vs '%s'", a_first.first, a_first.second,
+                                DescribeType(producer, a_it->second.type_id).c_str(),
+                                DescribeType(consumer, b_it->second.type_id).c_str());
+            }
+            if (a_it->second.is_patch != b_it->second.is_patch) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT,
+                                HandleToUint64(producer->vk_shader_module), kVUID_Core_Shader_InterfaceTypeMismatch,
+                                "Decoration mismatch on location %u.%u: is per-%s in %s stage but per-%s in %s stage",
+                                a_first.first, a_first.second, a_it->second.is_patch ? "patch" : "vertex", producer_stage->name,
+                                b_it->second.is_patch ? "patch" : "vertex", consumer_stage->name);
+            }
+            if (a_it->second.is_relaxed_precision != b_it->second.is_relaxed_precision) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT,
+                                HandleToUint64(producer->vk_shader_module), kVUID_Core_Shader_InterfaceTypeMismatch,
+                                "Decoration mismatch on location %u.%u: %s and %s stages differ in precision", a_first.first,
+                                a_first.second, producer_stage->name, consumer_stage->name);
+            }
+            a_it++;
+            b_it++;
+        }
+    }
+
+    if (consumer_stage->stage != VK_SHADER_STAGE_FRAGMENT_BIT) {
+        auto builtins_producer = CollectBuiltinBlockMembers(producer, producer_entrypoint, spv::StorageClassOutput);
+        auto builtins_consumer = CollectBuiltinBlockMembers(consumer, consumer_entrypoint, spv::StorageClassInput);
+
+        if (!builtins_producer.empty() && !builtins_consumer.empty()) {
+            if (builtins_producer.size() != builtins_consumer.size()) {
+                skip |=
+                    log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT,
+                            HandleToUint64(producer->vk_shader_module), kVUID_Core_Shader_InterfaceTypeMismatch,
+                            "Number of elements inside builtin block differ between stages (%s %d vs %s %d).", producer_stage->name,
+                            (int)builtins_producer.size(), consumer_stage->name, (int)builtins_consumer.size());
+            } else {
+                auto it_producer = builtins_producer.begin();
+                auto it_consumer = builtins_consumer.begin();
+                while (it_producer != builtins_producer.end() && it_consumer != builtins_consumer.end()) {
+                    if (*it_producer != *it_consumer) {
+                        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT,
+                                        HandleToUint64(producer->vk_shader_module), kVUID_Core_Shader_InterfaceTypeMismatch,
+                                        "Builtin variable inside block doesn't match between %s and %s.", producer_stage->name,
+                                        consumer_stage->name);
+                        break;
+                    }
+                    it_producer++;
+                    it_consumer++;
+                }
+            }
+        }
+    }
+
+    return skip;
+}
+
+static inline uint32_t DetermineFinalGeomStage(const PIPELINE_STATE *pipeline, const VkGraphicsPipelineCreateInfo *pCreateInfo) {
+    uint32_t stage_mask = 0;
+    if (pipeline->topology_at_rasterizer == VK_PRIMITIVE_TOPOLOGY_POINT_LIST) {
+        for (uint32_t i = 0; i < pCreateInfo->stageCount; i++) {
+            stage_mask |= pCreateInfo->pStages[i].stage;
+        }
+        // Determine which shader in which PointSize should be written (the final geometry stage)
+        if (stage_mask & VK_SHADER_STAGE_MESH_BIT_NV) {
+            stage_mask = VK_SHADER_STAGE_MESH_BIT_NV;
+        } else if (stage_mask & VK_SHADER_STAGE_GEOMETRY_BIT) {
+            stage_mask = VK_SHADER_STAGE_GEOMETRY_BIT;
+        } else if (stage_mask & VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT) {
+            stage_mask = VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT;
+        } else if (stage_mask & VK_SHADER_STAGE_VERTEX_BIT) {
+            stage_mask = VK_SHADER_STAGE_VERTEX_BIT;
+        }
+    }
+    return stage_mask;
+}
+
+// Validate that the shaders used by the given pipeline and store the active_slots
+//  that are actually used by the pipeline into pPipeline->active_slots
+bool CoreChecks::ValidateGraphicsPipelineShaderState(const PIPELINE_STATE *pipeline) const {
+    auto pCreateInfo = pipeline->graphicsPipelineCI.ptr();
+    int vertex_stage = GetShaderStageId(VK_SHADER_STAGE_VERTEX_BIT);
+    int fragment_stage = GetShaderStageId(VK_SHADER_STAGE_FRAGMENT_BIT);
+
+    const SHADER_MODULE_STATE *shaders[32];
+    memset(shaders, 0, sizeof(shaders));
+    spirv_inst_iter entrypoints[32];
+    memset(entrypoints, 0, sizeof(entrypoints));
+    bool skip = false;
+
+    uint32_t pointlist_stage_mask = DetermineFinalGeomStage(pipeline, pCreateInfo);
+
+    for (uint32_t i = 0; i < pCreateInfo->stageCount; i++) {
+        auto pStage = &pCreateInfo->pStages[i];
+        auto stage_id = GetShaderStageId(pStage->stage);
+        shaders[stage_id] = GetShaderModuleState(pStage->module);
+        entrypoints[stage_id] = FindEntrypoint(shaders[stage_id], pStage->pName, pStage->stage);
+        skip |= ValidatePipelineShaderStage(pStage, pipeline, pipeline->stage_state[i], shaders[stage_id], entrypoints[stage_id],
+                                            (pointlist_stage_mask == pStage->stage));
+    }
+
+    // if the shader stages are no good individually, cross-stage validation is pointless.
+    if (skip) return true;
+
+    auto vi = pCreateInfo->pVertexInputState;
+
+    if (vi) {
+        skip |= ValidateViConsistency(report_data, vi);
+    }
+
+    if (shaders[vertex_stage] && shaders[vertex_stage]->has_valid_spirv) {
+        skip |= ValidateViAgainstVsInputs(report_data, vi, shaders[vertex_stage], entrypoints[vertex_stage]);
+    }
+
+    int producer = GetShaderStageId(VK_SHADER_STAGE_VERTEX_BIT);
+    int consumer = GetShaderStageId(VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT);
+
+    while (!shaders[producer] && producer != fragment_stage) {
+        producer++;
+        consumer++;
+    }
+
+    for (; producer != fragment_stage && consumer <= fragment_stage; consumer++) {
+        assert(shaders[producer]);
+        if (shaders[consumer]) {
+            if (shaders[consumer]->has_valid_spirv && shaders[producer]->has_valid_spirv) {
+                skip |= ValidateInterfaceBetweenStages(report_data, shaders[producer], entrypoints[producer],
+                                                       &shader_stage_attribs[producer], shaders[consumer], entrypoints[consumer],
+                                                       &shader_stage_attribs[consumer]);
+            }
+
+            producer = consumer;
+        }
+    }
+
+    if (shaders[fragment_stage] && shaders[fragment_stage]->has_valid_spirv) {
+        skip |= ValidateFsOutputsAgainstRenderPass(report_data, shaders[fragment_stage], entrypoints[fragment_stage], pipeline,
+                                                   pCreateInfo->subpass);
+    }
+
+    return skip;
+}
+
+bool CoreChecks::ValidateComputePipeline(PIPELINE_STATE *pipeline) const {
+    const auto &stage = *pipeline->computePipelineCI.stage.ptr();
+
+    const SHADER_MODULE_STATE *module = GetShaderModuleState(stage.module);
+    const spirv_inst_iter entrypoint = FindEntrypoint(module, stage.pName, stage.stage);
+
+    return ValidatePipelineShaderStage(&stage, pipeline, pipeline->stage_state[0], module, entrypoint, false);
+}
+
+bool CoreChecks::ValidateRayTracingPipelineNV(PIPELINE_STATE *pipeline) const {
+    bool skip = false;
+
+    if (pipeline->raytracingPipelineCI.maxRecursionDepth > phys_dev_ext_props.ray_tracing_props.maxRecursionDepth) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                        "VUID-VkRayTracingPipelineCreateInfoNV-maxRecursionDepth-02412", ": %d > %d",
+                        pipeline->raytracingPipelineCI.maxRecursionDepth, phys_dev_ext_props.ray_tracing_props.maxRecursionDepth);
+    }
+
+    const auto *stages = pipeline->raytracingPipelineCI.ptr()->pStages;
+    const auto *groups = pipeline->raytracingPipelineCI.ptr()->pGroups;
+
+    uint32_t raygen_stages_found = 0;
+    for (uint32_t stage_index = 0; stage_index < pipeline->raytracingPipelineCI.stageCount; stage_index++) {
+        const auto &stage = stages[stage_index];
+
+        const SHADER_MODULE_STATE *module = GetShaderModuleState(stage.module);
+        const spirv_inst_iter entrypoint = FindEntrypoint(module, stage.pName, stage.stage);
+
+        skip |= ValidatePipelineShaderStage(&stage, pipeline, pipeline->stage_state[stage_index], module, entrypoint, false);
+
+        if (stage.stage == VK_SHADER_STAGE_RAYGEN_BIT_NV) {
+            raygen_stages_found++;
+        }
+    }
+    if (raygen_stages_found != 1) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                        "VUID-VkRayTracingPipelineCreateInfoNV-stage-02408", " : %d raygen stages specified", raygen_stages_found);
+    }
+
+    for (uint32_t group_index = 0; group_index < pipeline->raytracingPipelineCI.groupCount; group_index++) {
+        const auto &group = groups[group_index];
+
+        if (group.type == VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_NV) {
+            if (group.generalShader >= pipeline->raytracingPipelineCI.stageCount ||
+                (stages[group.generalShader].stage != VK_SHADER_STAGE_RAYGEN_BIT_NV &&
+                 stages[group.generalShader].stage != VK_SHADER_STAGE_MISS_BIT_NV &&
+                 stages[group.generalShader].stage != VK_SHADER_STAGE_CALLABLE_BIT_NV)) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                "VUID-VkRayTracingShaderGroupCreateInfoNV-type-02413", ": pGroups[%d]", group_index);
+            }
+            if (group.anyHitShader != VK_SHADER_UNUSED_NV || group.closestHitShader != VK_SHADER_UNUSED_NV ||
+                group.intersectionShader != VK_SHADER_UNUSED_NV) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                "VUID-VkRayTracingShaderGroupCreateInfoNV-type-02414", ": pGroups[%d]", group_index);
+            }
+        } else if (group.type == VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_NV) {
+            if (group.intersectionShader >= pipeline->raytracingPipelineCI.stageCount ||
+                stages[group.intersectionShader].stage != VK_SHADER_STAGE_INTERSECTION_BIT_NV) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                "VUID-VkRayTracingShaderGroupCreateInfoNV-type-02415", ": pGroups[%d]", group_index);
+            }
+        } else if (group.type == VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_NV) {
+            if (group.intersectionShader != VK_SHADER_UNUSED_NV) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                "VUID-VkRayTracingShaderGroupCreateInfoNV-type-02416", ": pGroups[%d]", group_index);
+            }
+        }
+
+        if (group.type == VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_NV ||
+            group.type == VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_NV) {
+            if (group.anyHitShader != VK_SHADER_UNUSED_NV && (group.anyHitShader >= pipeline->raytracingPipelineCI.stageCount ||
+                                                              stages[group.anyHitShader].stage != VK_SHADER_STAGE_ANY_HIT_BIT_NV)) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                "VUID-VkRayTracingShaderGroupCreateInfoNV-anyHitShader-02418", ": pGroups[%d]", group_index);
+            }
+            if (group.closestHitShader != VK_SHADER_UNUSED_NV &&
+                (group.closestHitShader >= pipeline->raytracingPipelineCI.stageCount ||
+                 stages[group.closestHitShader].stage != VK_SHADER_STAGE_CLOSEST_HIT_BIT_NV)) {
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                "VUID-VkRayTracingShaderGroupCreateInfoNV-closestHitShader-02417", ": pGroups[%d]", group_index);
+            }
+        }
+    }
+    return skip;
+}
+
+uint32_t ValidationCache::MakeShaderHash(VkShaderModuleCreateInfo const *smci) { return XXH32(smci->pCode, smci->codeSize, 0); }
+
+static ValidationCache *GetValidationCacheInfo(VkShaderModuleCreateInfo const *pCreateInfo) {
+    const auto validation_cache_ci = lvl_find_in_chain<VkShaderModuleValidationCacheCreateInfoEXT>(pCreateInfo->pNext);
+    if (validation_cache_ci) {
+        return CastFromHandle<ValidationCache *>(validation_cache_ci->validationCache);
+    }
+    return nullptr;
+}
+
+bool CoreChecks::PreCallValidateCreateShaderModule(VkDevice device, const VkShaderModuleCreateInfo *pCreateInfo,
+                                                   const VkAllocationCallbacks *pAllocator, VkShaderModule *pShaderModule) const {
+    bool skip = false;
+    spv_result_t spv_valid = SPV_SUCCESS;
+
+    if (disabled.shader_validation) {
+        return false;
+    }
+
+    auto have_glsl_shader = device_extensions.vk_nv_glsl_shader;
+
+    if (!have_glsl_shader && (pCreateInfo->codeSize % 4)) {
+        skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                        "VUID-VkShaderModuleCreateInfo-pCode-01376",
+                        "SPIR-V module not valid: Codesize must be a multiple of 4 but is " PRINTF_SIZE_T_SPECIFIER ".",
+                        pCreateInfo->codeSize);
+    } else {
+        auto cache = GetValidationCacheInfo(pCreateInfo);
+        uint32_t hash = 0;
+        if (cache) {
+            hash = ValidationCache::MakeShaderHash(pCreateInfo);
+            if (cache->Contains(hash)) return false;
+        }
+
+        // Use SPIRV-Tools validator to try and catch any issues with the module itself. If specialization constants are present,
+        // the default values will be used during validation.
+        spv_target_env spirv_environment = SPV_ENV_VULKAN_1_0;
+        if (api_version >= VK_API_VERSION_1_1) {
+            if (device_extensions.vk_khr_spirv_1_4) {
+                spirv_environment = SPV_ENV_VULKAN_1_1_SPIRV_1_4;
+            } else {
+                spirv_environment = SPV_ENV_VULKAN_1_1;
+            }
+        }
+        spv_context ctx = spvContextCreate(spirv_environment);
+        spv_const_binary_t binary{pCreateInfo->pCode, pCreateInfo->codeSize / sizeof(uint32_t)};
+        spv_diagnostic diag = nullptr;
+        spv_validator_options options = spvValidatorOptionsCreate();
+        if (device_extensions.vk_khr_relaxed_block_layout) {
+            spvValidatorOptionsSetRelaxBlockLayout(options, true);
+        }
+        if (device_extensions.vk_khr_uniform_buffer_standard_layout &&
+            enabled_features.uniform_buffer_standard_layout.uniformBufferStandardLayout == VK_TRUE) {
+            spvValidatorOptionsSetUniformBufferStandardLayout(options, true);
+        }
+        if (device_extensions.vk_ext_scalar_block_layout &&
+            enabled_features.scalar_block_layout_features.scalarBlockLayout == VK_TRUE) {
+            spvValidatorOptionsSetScalarBlockLayout(options, true);
+        }
+        spv_valid = spvValidateWithOptions(ctx, options, &binary, &diag);
+        if (spv_valid != SPV_SUCCESS) {
+            if (!have_glsl_shader || (pCreateInfo->pCode[0] == spv::MagicNumber)) {
+                skip |=
+                    log_msg(report_data, spv_valid == SPV_WARNING ? VK_DEBUG_REPORT_WARNING_BIT_EXT : VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                            VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, kVUID_Core_Shader_InconsistentSpirv,
+                            "SPIR-V module not valid: %s", diag && diag->error ? diag->error : "(no error text)");
+            }
+        } else {
+            if (cache) {
+                cache->Insert(hash);
+            }
+        }
+
+        spvValidatorOptionsDestroy(options);
+        spvDiagnosticDestroy(diag);
+        spvContextDestroy(ctx);
+    }
+
+    return skip;
+}
+
+bool CoreChecks::ValidateComputeWorkGroupSizes(const SHADER_MODULE_STATE *shader) const {
+    bool skip = false;
+    uint32_t local_size_x = 0;
+    uint32_t local_size_y = 0;
+    uint32_t local_size_z = 0;
+    if (FindLocalSize(shader, local_size_x, local_size_y, local_size_z)) {
+        if (local_size_x > phys_dev_props.limits.maxComputeWorkGroupSize[0]) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT,
+                            HandleToUint64(shader->vk_shader_module), "UNASSIGNED-features-limits-maxComputeWorkGroupSize",
+                            "%s local_size_x (%" PRIu32 ") exceeds device limit maxComputeWorkGroupSize[0] (%" PRIu32 ").",
+                            report_data->FormatHandle(shader->vk_shader_module).c_str(), local_size_x,
+                            phys_dev_props.limits.maxComputeWorkGroupSize[0]);
+        }
+        if (local_size_y > phys_dev_props.limits.maxComputeWorkGroupSize[1]) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT,
+                            HandleToUint64(shader->vk_shader_module), "UNASSIGNED-features-limits-maxComputeWorkGroupSize",
+                            "%s local_size_y (%" PRIu32 ") exceeds device limit maxComputeWorkGroupSize[1] (%" PRIu32 ").",
+                            report_data->FormatHandle(shader->vk_shader_module).c_str(), local_size_x,
+                            phys_dev_props.limits.maxComputeWorkGroupSize[1]);
+        }
+        if (local_size_z > phys_dev_props.limits.maxComputeWorkGroupSize[2]) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT,
+                            HandleToUint64(shader->vk_shader_module), "UNASSIGNED-features-limits-maxComputeWorkGroupSize",
+                            "%s local_size_z (%" PRIu32 ") exceeds device limit maxComputeWorkGroupSize[2] (%" PRIu32 ").",
+                            report_data->FormatHandle(shader->vk_shader_module).c_str(), local_size_x,
+                            phys_dev_props.limits.maxComputeWorkGroupSize[2]);
+        }
+
+        uint32_t limit = phys_dev_props.limits.maxComputeWorkGroupInvocations;
+        uint64_t invocations = local_size_x * local_size_y;
+        // Prevent overflow.
+        bool fail = false;
+        if (invocations > UINT32_MAX || invocations > limit) {
+            fail = true;
+        }
+        if (!fail) {
+            invocations *= local_size_z;
+            if (invocations > UINT32_MAX || invocations > limit) {
+                fail = true;
+            }
+        }
+        if (fail) {
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT,
+                            HandleToUint64(shader->vk_shader_module), "UNASSIGNED-features-limits-maxComputeWorkGroupInvocations",
+                            "%s local_size (%" PRIu32 ", %" PRIu32 ", %" PRIu32
+                            ") exceeds device limit maxComputeWorkGroupInvocations (%" PRIu32 ").",
+                            report_data->FormatHandle(shader->vk_shader_module).c_str(), local_size_x, local_size_y, local_size_z,
+                            limit);
+        }
+    }
+    return skip;
+}
diff --git a/src/third_party/vulkan-validation-layers/src/layers/shader_validation.h b/src/third_party/vulkan-validation-layers/src/layers/shader_validation.h
new file mode 100644
index 0000000..d2d11f5
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/layers/shader_validation.h
@@ -0,0 +1,329 @@
+/* Copyright (c) 2015-2019 The Khronos Group Inc.
+ * Copyright (c) 2015-2019 Valve Corporation
+ * Copyright (c) 2015-2019 LunarG, Inc.
+ * Copyright (C) 2015-2019 Google Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Chris Forbes <chrisf@ijw.co.nz>
+ */
+#ifndef VULKAN_SHADER_VALIDATION_H
+#define VULKAN_SHADER_VALIDATION_H
+
+#include <cassert>
+#include <cstdlib>
+#include <cstring>
+#include <unordered_map>
+#include <unordered_set>
+#include <vector>
+
+#include "vulkan/vulkan.h"
+#include <SPIRV/spirv.hpp>
+#include <generated/spirv_tools_commit_id.h>
+#include "spirv-tools/optimizer.hpp"
+#include "core_validation_types.h"
+
+// A forward iterator over spirv instructions. Provides easy access to len, opcode, and content words
+// without the caller needing to care too much about the physical SPIRV module layout.
+struct spirv_inst_iter {
+    std::vector<uint32_t>::const_iterator zero;
+    std::vector<uint32_t>::const_iterator it;
+
+    uint32_t len() const {
+        auto result = *it >> 16;
+        assert(result > 0);
+        return result;
+    }
+
+    uint32_t opcode() { return *it & 0x0ffffu; }
+
+    uint32_t const &word(unsigned n) const {
+        assert(n < len());
+        return it[n];
+    }
+
+    uint32_t offset() { return (uint32_t)(it - zero); }
+
+    spirv_inst_iter() {}
+
+    spirv_inst_iter(std::vector<uint32_t>::const_iterator zero, std::vector<uint32_t>::const_iterator it) : zero(zero), it(it) {}
+
+    bool operator==(spirv_inst_iter const &other) const { return it == other.it; }
+
+    bool operator!=(spirv_inst_iter const &other) const { return it != other.it; }
+
+    spirv_inst_iter operator++(int) {  // x++
+        spirv_inst_iter ii = *this;
+        it += len();
+        return ii;
+    }
+
+    spirv_inst_iter operator++() {  // ++x;
+        it += len();
+        return *this;
+    }
+
+    // The iterator and the value are the same thing.
+    spirv_inst_iter &operator*() { return *this; }
+    spirv_inst_iter const &operator*() const { return *this; }
+};
+
+struct decoration_set {
+    enum {
+        location_bit = 1 << 0,
+        patch_bit = 1 << 1,
+        relaxed_precision_bit = 1 << 2,
+        block_bit = 1 << 3,
+        buffer_block_bit = 1 << 4,
+        component_bit = 1 << 5,
+        input_attachment_index_bit = 1 << 6,
+        descriptor_set_bit = 1 << 7,
+        binding_bit = 1 << 8,
+        nonwritable_bit = 1 << 9,
+        builtin_bit = 1 << 10,
+    };
+    uint32_t flags = 0;
+    uint32_t location = static_cast<uint32_t>(-1);
+    uint32_t component = 0;
+    uint32_t input_attachment_index = 0;
+    uint32_t descriptor_set = 0;
+    uint32_t binding = 0;
+    uint32_t builtin = static_cast<uint32_t>(-1);
+
+    void merge(decoration_set const &other) {
+        if (other.flags & location_bit) location = other.location;
+        if (other.flags & component_bit) component = other.component;
+        if (other.flags & input_attachment_index_bit) input_attachment_index = other.input_attachment_index;
+        if (other.flags & descriptor_set_bit) descriptor_set = other.descriptor_set;
+        if (other.flags & binding_bit) binding = other.binding;
+        if (other.flags & builtin_bit) builtin = other.builtin;
+        flags |= other.flags;
+    }
+
+    void add(uint32_t decoration, uint32_t value);
+};
+
+struct SHADER_MODULE_STATE : public BASE_NODE {
+    // The spirv image itself
+    std::vector<uint32_t> words;
+    // A mapping of <id> to the first word of its def. this is useful because walking type
+    // trees, constant expressions, etc requires jumping all over the instruction stream.
+    std::unordered_map<unsigned, unsigned> def_index;
+    std::unordered_map<unsigned, decoration_set> decorations;
+    struct EntryPoint {
+        uint32_t offset;
+        VkShaderStageFlags stage;
+    };
+    std::unordered_multimap<std::string, EntryPoint> entry_points;
+    bool has_valid_spirv;
+    bool has_specialization_constants;
+    VkShaderModule vk_shader_module;
+    uint32_t gpu_validation_shader_id;
+
+    std::vector<uint32_t> PreprocessShaderBinary(uint32_t *src_binary, size_t binary_size, spv_target_env env) {
+        std::vector<uint32_t> src(src_binary, src_binary + binary_size / sizeof(uint32_t));
+
+        // Check if there are any group decoration instructions, and flatten them if found.
+        bool has_group_decoration = false;
+        bool done = false;
+
+        // Walk through the first part of the SPIR-V module, looking for group decoration and specialization constant instructions.
+        // Skip the header (5 words).
+        auto itr = spirv_inst_iter(src.begin(), src.begin() + 5);
+        auto itrend = spirv_inst_iter(src.begin(), src.end());
+        while (itr != itrend && !done) {
+            spv::Op opcode = (spv::Op)itr.opcode();
+            switch (opcode) {
+                case spv::OpDecorationGroup:
+                case spv::OpGroupDecorate:
+                case spv::OpGroupMemberDecorate:
+                    has_group_decoration = true;
+                    break;
+                case spv::OpSpecConstantTrue:
+                case spv::OpSpecConstantFalse:
+                case spv::OpSpecConstant:
+                case spv::OpSpecConstantComposite:
+                case spv::OpSpecConstantOp:
+                    has_specialization_constants = true;
+                    break;
+                case spv::OpFunction:
+                    // An OpFunction indicates there are no more decorations
+                    done = true;
+                    break;
+                default:
+                    break;
+            }
+            itr++;
+        }
+
+        if (has_group_decoration) {
+            spvtools::Optimizer optimizer(env);
+            optimizer.RegisterPass(spvtools::CreateFlattenDecorationPass());
+            std::vector<uint32_t> optimized_binary;
+            // Run optimizer to flatten decorations only, set skip_validation so as to not re-run validator
+            auto result =
+                optimizer.Run(src_binary, binary_size / sizeof(uint32_t), &optimized_binary, spvtools::ValidatorOptions(), true);
+            if (result) {
+                return optimized_binary;
+            }
+        }
+        // Return the original module.
+        return src;
+    }
+
+    SHADER_MODULE_STATE(VkShaderModuleCreateInfo const *pCreateInfo, VkShaderModule shaderModule, spv_target_env env,
+                        uint32_t unique_shader_id)
+        : words(PreprocessShaderBinary((uint32_t *)pCreateInfo->pCode, pCreateInfo->codeSize, env)),
+          def_index(),
+          has_valid_spirv(true),
+          vk_shader_module(shaderModule),
+          gpu_validation_shader_id(unique_shader_id) {
+        BuildDefIndex();
+    }
+
+    SHADER_MODULE_STATE() : has_valid_spirv(false), vk_shader_module(VK_NULL_HANDLE), gpu_validation_shader_id(UINT32_MAX) {}
+
+    decoration_set get_decorations(unsigned id) const {
+        // return the actual decorations for this id, or a default set.
+        auto it = decorations.find(id);
+        if (it != decorations.end()) return it->second;
+        return decoration_set();
+    }
+
+    // Expose begin() / end() to enable range-based for
+    spirv_inst_iter begin() const { return spirv_inst_iter(words.begin(), words.begin() + 5); }  // First insn
+    spirv_inst_iter end() const { return spirv_inst_iter(words.begin(), words.end()); }          // Just past last insn
+    // Given an offset into the module, produce an iterator there.
+    spirv_inst_iter at(unsigned offset) const { return spirv_inst_iter(words.begin(), words.begin() + offset); }
+
+    // Gets an iterator to the definition of an id
+    spirv_inst_iter get_def(unsigned id) const {
+        auto it = def_index.find(id);
+        if (it == def_index.end()) {
+            return end();
+        }
+        return at(it->second);
+    }
+
+    void BuildDefIndex();
+};
+
+class ValidationCache {
+    // hashes of shaders that have passed validation before, and can be skipped.
+    // we don't store negative results, as we would have to also store what was
+    // wrong with them; also, we expect they will get fixed, so we're less
+    // likely to see them again.
+    std::unordered_set<uint32_t> good_shader_hashes;
+    ValidationCache() {}
+
+  public:
+    static VkValidationCacheEXT Create(VkValidationCacheCreateInfoEXT const *pCreateInfo) {
+        auto cache = new ValidationCache();
+        cache->Load(pCreateInfo);
+        return VkValidationCacheEXT(cache);
+    }
+
+    void Load(VkValidationCacheCreateInfoEXT const *pCreateInfo) {
+        const auto headerSize = 2 * sizeof(uint32_t) + VK_UUID_SIZE;
+        auto size = headerSize;
+        if (!pCreateInfo->pInitialData || pCreateInfo->initialDataSize < size) return;
+
+        uint32_t const *data = (uint32_t const *)pCreateInfo->pInitialData;
+        if (data[0] != size) return;
+        if (data[1] != VK_VALIDATION_CACHE_HEADER_VERSION_ONE_EXT) return;
+        uint8_t expected_uuid[VK_UUID_SIZE];
+        Sha1ToVkUuid(SPIRV_TOOLS_COMMIT_ID, expected_uuid);
+        if (memcmp(&data[2], expected_uuid, VK_UUID_SIZE) != 0) return;  // different version
+
+        data = (uint32_t const *)(reinterpret_cast<uint8_t const *>(data) + headerSize);
+
+        for (; size < pCreateInfo->initialDataSize; data++, size += sizeof(uint32_t)) {
+            good_shader_hashes.insert(*data);
+        }
+    }
+
+    void Write(size_t *pDataSize, void *pData) {
+        const auto headerSize = 2 * sizeof(uint32_t) + VK_UUID_SIZE;  // 4 bytes for header size + 4 bytes for version number + UUID
+        if (!pData) {
+            *pDataSize = headerSize + good_shader_hashes.size() * sizeof(uint32_t);
+            return;
+        }
+
+        if (*pDataSize < headerSize) {
+            *pDataSize = 0;
+            return;  // Too small for even the header!
+        }
+
+        uint32_t *out = (uint32_t *)pData;
+        size_t actualSize = headerSize;
+
+        // Write the header
+        *out++ = headerSize;
+        *out++ = VK_VALIDATION_CACHE_HEADER_VERSION_ONE_EXT;
+        Sha1ToVkUuid(SPIRV_TOOLS_COMMIT_ID, reinterpret_cast<uint8_t *>(out));
+        out = (uint32_t *)(reinterpret_cast<uint8_t *>(out) + VK_UUID_SIZE);
+
+        for (auto it = good_shader_hashes.begin(); it != good_shader_hashes.end() && actualSize < *pDataSize;
+             it++, out++, actualSize += sizeof(uint32_t)) {
+            *out = *it;
+        }
+
+        *pDataSize = actualSize;
+    }
+
+    void Merge(ValidationCache const *other) {
+        good_shader_hashes.reserve(good_shader_hashes.size() + other->good_shader_hashes.size());
+        for (auto h : other->good_shader_hashes) good_shader_hashes.insert(h);
+    }
+
+    static uint32_t MakeShaderHash(VkShaderModuleCreateInfo const *smci);
+
+    bool Contains(uint32_t hash) { return good_shader_hashes.count(hash) != 0; }
+
+    void Insert(uint32_t hash) { good_shader_hashes.insert(hash); }
+
+  private:
+    void Sha1ToVkUuid(const char *sha1_str, uint8_t *uuid) {
+        // Convert sha1_str from a hex string to binary. We only need VK_UUID_SIZE bytes of
+        // output, so pad with zeroes if the input string is shorter than that, and truncate
+        // if it's longer.
+        char padded_sha1_str[2 * VK_UUID_SIZE + 1] = {};  // 2 hex digits == 1 byte
+        std::strncpy(padded_sha1_str, sha1_str, 2 * VK_UUID_SIZE);
+
+        for (uint32_t i = 0; i < VK_UUID_SIZE; ++i) {
+            const char byte_str[] = {padded_sha1_str[2 * i + 0], padded_sha1_str[2 * i + 1], '\0'};
+            uuid[i] = static_cast<uint8_t>(std::strtoul(byte_str, nullptr, 16));
+        }
+    }
+};
+
+spirv_inst_iter FindEntrypoint(SHADER_MODULE_STATE const *src, char const *name, VkShaderStageFlagBits stageBits);
+
+// For some analyses, we need to know about all ids referenced by the static call tree of a particular entrypoint. This is
+// important for identifying the set of shader resources actually used by an entrypoint, for example.
+// Note: we only explore parts of the image which might actually contain ids we care about for the above analyses.
+//  - NOT the shader input/output interfaces.
+//
+// TODO: The set of interesting opcodes here was determined by eyeballing the SPIRV spec. It might be worth
+// converting parts of this to be generated from the machine-readable spec instead.
+std::unordered_set<uint32_t> MarkAccessibleIds(SHADER_MODULE_STATE const *src, spirv_inst_iter entrypoint);
+
+void ProcessExecutionModes(SHADER_MODULE_STATE const *src, const spirv_inst_iter &entrypoint, PIPELINE_STATE *pipeline);
+
+std::vector<std::pair<descriptor_slot_t, interface_var>> CollectInterfaceByDescriptorSlot(
+    debug_report_data const *report_data, SHADER_MODULE_STATE const *src, std::unordered_set<uint32_t> const &accessible_ids,
+    bool *has_writable_descriptor);
+
+uint32_t DescriptorTypeToReqs(SHADER_MODULE_STATE const *module, uint32_t type_id);
+
+#endif  // VULKAN_SHADER_VALIDATION_H
diff --git a/src/third_party/vulkan-validation-layers/src/layers/sparse_containers.h b/src/third_party/vulkan-validation-layers/src/layers/sparse_containers.h
new file mode 100644
index 0000000..e0ca83c
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/layers/sparse_containers.h
@@ -0,0 +1,404 @@
+/* Copyright (c) 2019 The Khronos Group Inc.
+ * Copyright (c) 2019 Valve Corporation
+ * Copyright (c) 2019 LunarG, Inc.
+ * Copyright (C) 2019 Google Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * John Zulauf <jzulauf@lunarg.com>
+ *
+ */
+#ifndef SPARSE_CONTAINERS_H_
+#define SPARSE_CONTAINERS_H_
+#define NOMINMAX
+#include <cassert>
+#include <memory>
+#include <unordered_map>
+#include <vector>
+
+namespace sparse_container {
+// SparseVector:
+//
+// Defines a sparse single-dimensional container which is targeted for three distinct use cases
+// 1) Large range of indices sparsely populated ("Sparse access" below)
+// 2) Large range of indices where all values are the same ("Sparse access" below)
+// 3) Large range of values densely populated (more that 1/4 full) ("Dense access" below)
+// 4) Small range of values where direct access is most efficient ("Dense access" below)
+//
+// To update semantics are supported bases on kSetReplaces:
+//    true -- updates to already set (valid) indices replace current value
+//    false -- updates to already set (valid) indicies are ignored.
+//
+// Theory of operation:
+//
+// When created, a sparse vector is created (based on size relative to
+// the kSparseThreshold) in either Sparse or Dense access mode.
+//
+// In "Sparse access" mode individual values are stored in a map keyed
+// by the index.  A "full range" value (if set) defines the value of all
+// entries not present in the map. Setting a full range value via
+//
+//     SetRange(range_min, range_max, full_range_value )
+//
+// either clears the map (kSetReplaces==true) or prevents further
+// updates to the vector (kSetReplaces==false).  If the map becomes
+// more than 1/kConversionThreshold (=4) full, the SparseVector is
+// converted into "Dense access" mode. Entries are copied from map,
+// with non-present indices set to the default value (kDefaultValue)
+// or the full range value (if present).
+//
+// In "Dense access" mode, values are  stored in a vector the size of
+// the valid range indexed by the incoming index value minus range_min_.
+// The same upate semantic applies bases on kSetReplaces.
+//
+// Note that when kSparseThreshold is zero, the map is always in "Dense access" mode.
+//
+// Access:
+//
+// NOTE all "end" indices (in construction or access) are *exclusive*.
+//
+// Given the variable semantics and effective compression of Sparse
+// access mode, all access is through Get, Set, and SetRange functions
+// and a constant iterator. Get return either the value found (using
+// the current access mode) or the kDefaultValue. Set and SetRange
+// return whether or not state was updated, in order to support dirty
+// bit updates for any dependent state.
+//
+// The iterator ConstIterator provides basic, "by value" access. The
+// "by value" nature of the access reflect the compressed nature
+// operators *, ++, ==, and != are provided, with the latter two only
+// suitable for comparisons vs. cend. The iterator skips all
+// kDefaultValue entries in either access mode, returning a std::pair
+// containing {IndexType, ValueType}. The multiple access modes give
+// the iterator a bit more complexity than is optimal, but hides the
+// underlying complexity from the callers.
+//
+// TODO: Update iterator to use a reference (likely using
+// reference_wrapper...)
+
+template <typename IndexType_, typename T, bool kSetReplaces, T kDefaultValue = T(), size_t kSparseThreshold = 16>
+class SparseVector {
+  public:
+    typedef IndexType_ IndexType;
+    typedef T value_type;
+    typedef value_type ValueType;
+    typedef std::unordered_map<IndexType, ValueType> SparseType;
+    typedef std::vector<ValueType> DenseType;
+
+    SparseVector(IndexType start, IndexType end)
+        : range_min_(start), range_max_(end), threshold_((end - start) / kConversionThreshold) {
+        assert(end > start);
+        Reset();
+    }
+
+    // Initial access mode is set based on range size vs. kSparseThreshold.  Either sparse_ or dense_ is always set, but only
+    // ever one at a time
+    void Reset() {
+        has_full_range_value_ = false;
+        full_range_value_ = kDefaultValue;
+        size_t count = range_max_ - range_min_;
+        if (kSparseThreshold > 0 && (count > kSparseThreshold)) {
+            sparse_.reset(new SparseType());
+            dense_.reset();
+        } else {
+            sparse_.reset();
+            dense_.reset(new DenseType(count, kDefaultValue));
+        }
+    }
+
+    const ValueType &Get(const IndexType index) const {
+        // Note that here (and similarly below, the 'IsSparse' clause is
+        // eliminated as dead code in release builds if kSparseThreshold==0
+        if (IsSparse()) {
+            if (!sparse_->empty()) {  // Don't attempt lookup in empty map
+                auto it = sparse_->find(index);
+                if (it != sparse_->cend()) {
+                    return it->second;
+                }
+            }
+            // If there is a full_range_value, return it, but there isn't a full_range_value_, it's set to  kDefaultValue
+            // so it's still the correct this to return
+            return full_range_value_;
+        } else {
+            // Direct access
+            assert(dense_.get());
+            const ValueType &value = (*dense_)[index - range_min_];
+            return value;
+        }
+    }
+
+    // Set a indexes value, based on the access mode, update semantics are enforced within the access mode specific function
+    bool Set(const IndexType index, const ValueType &value) {
+        bool updated = false;
+        if (IsSparse()) {
+            updated = SetSparse(index, value);
+        } else {
+            assert(dense_.get());
+            updated = SetDense(index, value);
+        }
+        return updated;
+    }
+
+    // Set a range of values based on access mode, with some update semantics applied a the range level
+    bool SetRange(const IndexType start, IndexType end, ValueType value) {
+        bool updated = false;
+        if (IsSparse()) {
+            if (!kSetReplaces && HasFullRange()) return false;  // We have full coverage, we can change this no more
+
+            bool is_full_range = IsFullRange(start, end);
+            if (kSetReplaces && is_full_range) {
+                updated = value != full_range_value_;
+                full_range_value_ = value;
+                if (HasSparseSubranges()) {
+                    updated = true;
+                    sparse_->clear();  // full range replaces all subranges
+                }
+                // has_full_range_value_ state of the full_range_value_ to avoid ValueType comparisons
+                has_full_range_value_ = value != kDefaultValue;
+            } else if (!kSetReplaces && (value != kDefaultValue) && is_full_range && !HasFullRange()) {
+                // With the update only invalid semantics, the value becomes the fallback, and will prevent other updates
+                full_range_value_ = value;
+                has_full_range_value_ = true;
+                updated = true;
+                // Clean up the sparse map a bit
+                for (auto it = sparse_->begin(); it != sparse_->end();) {  // no increment clause because of erase below
+                    if (it->second == value) {
+                        it = sparse_->erase(it);  // remove redundant entries
+                    } else {
+                        ++it;
+                    }
+                }
+            } else {
+                for (IndexType index = start; index < end; ++index) {
+                    // NOTE: We can't use SetSparse here, because this may be converted to dense access mid update
+                    updated |= Set(index, value);
+                }
+            }
+        } else {
+            // Note that "Dense Access" does away with the full_range_value_ logic, storing empty entries using kDefaultValue
+            assert(dense_);
+            for (IndexType index = start; index < end; ++index) {
+                updated = SetDense(index, value);
+            }
+        }
+        return updated;
+    }
+
+    // Set only the non-default values from another sparse vector
+    bool Merge(const SparseVector &from) {
+        // Must not set from Sparse arracy with larger bounds...
+        assert((range_min_ <= from.range_min_) && (range_max_ >= from.range_max_));
+        bool updated = false;
+        if (from.IsSparse()) {
+            if (from.HasFullRange() && !from.HasSparseSubranges()) {
+                // Short cut to copy a full range if that's all we have
+                updated |= SetRange(from.range_min_, from.range_max_, from.full_range_value_);
+            } else {
+                // Have to do it the complete (potentially) slow way
+                // TODO add sorted keys to iterator to reduce hash lookups
+                for (auto it = from.cbegin(); it != from.cend(); ++it) {
+                    const IndexType index = (*it).first;
+                    const ValueType &value = (*it).second;
+                    Set(index, value);
+                }
+            }
+        } else {
+            assert(from.dense_);
+            DenseType &ray = *from.dense_;
+            for (IndexType entry = from.range_min_; entry < from.range_max_; ++entry) {
+                IndexType index = entry - from.range_min_;
+                if (ray[index] != kDefaultValue) {
+                    updated |= Set(entry, ray[index]);
+                }
+            }
+        }
+        return updated;
+    }
+
+    friend class ConstIterator;
+    class ConstIterator {
+      public:
+        using SparseType = typename SparseVector::SparseType;
+        using SparseIterator = typename SparseType::const_iterator;
+        using IndexType = typename SparseVector::IndexType;
+        using ValueType = typename SparseVector::ValueType;
+        using IteratorValueType = std::pair<IndexType, ValueType>;
+        const IteratorValueType &operator*() const { return current_value_; }
+
+        ConstIterator &operator++() {
+            if (delegated_) {  // implies sparse
+                ++it_sparse_;
+                if (it_sparse_ == vec_->sparse_->cend()) {
+                    the_end_ = true;
+                    current_value_.first = vec_->range_max_;
+                    current_value_.second = SparseVector::DefaultValue();
+                } else {
+                    current_value_.first = it_sparse_->first;
+                    current_value_.second = it_sparse_->second;
+                }
+            } else {
+                index_++;
+                SetCurrentValue();
+            }
+            return *this;
+        }
+        bool operator!=(const ConstIterator &rhs) const {
+            return (the_end_ != rhs.the_end_);  // Just good enough for cend checks
+        }
+
+        bool operator==(const ConstIterator &rhs) const {
+            return (the_end_ == rhs.the_end_);  // Just good enough for cend checks
+        }
+
+        // The iterator has two modes:
+        //     delegated:
+        //         where we are in sparse access mode and have no full_range_value
+        //         and thus can delegate our iteration to underlying map
+        //     non-delegated:
+        //         either dense mode or we have a full range value and thus
+        //         must iterate over the whole range
+        ConstIterator(const SparseVector &vec) : vec_(&vec) {
+            if (!vec_->IsSparse() || vec_->HasFullRange()) {
+                // Must iterated over entire ranges skipping (in the case of dense access), invalid entries
+                delegated_ = false;
+                index_ = vec_->range_min_;
+                SetCurrentValue();  // Skips invalid and sets the_end_
+            } else if (vec_->HasSparseSubranges()) {
+                // The subranges store the non-default values... and their is no full range value
+                delegated_ = true;
+                it_sparse_ = vec_->sparse_->cbegin();
+                current_value_.first = it_sparse_->first;
+                current_value_.second = it_sparse_->second;
+                the_end_ = false;  // the sparse map is non-empty (per HasSparseSubranges() above)
+            } else {
+                // Sparse, but with no subranges
+                the_end_ = true;
+            }
+        }
+
+        ConstIterator() : vec_(nullptr), the_end_(true) {}
+
+      protected:
+        const SparseVector *vec_;
+        bool the_end_;
+        SparseIterator it_sparse_;
+        bool delegated_;
+        IndexType index_;
+        ValueType value_;
+
+        IteratorValueType current_value_;
+
+        // in the non-delegated case we use normal accessors and skip default values.
+        void SetCurrentValue() {
+            the_end_ = true;
+            while (index_ < vec_->range_max_) {
+                value_ = vec_->Get(index_);
+                if (value_ != SparseVector::DefaultValue()) {
+                    the_end_ = false;
+                    current_value_ = IteratorValueType(index_, value_);
+                    break;
+                }
+                index_++;
+            }
+        }
+    };
+    typedef ConstIterator const_iterator;
+
+    ConstIterator cbegin() const { return ConstIterator(*this); }
+    ConstIterator cend() const { return ConstIterator(); }
+
+    IndexType RangeMax() const { return range_max_; }
+    IndexType RangeMin() const { return range_min_; }
+
+    static const unsigned kConversionThreshold = 4;
+    const IndexType range_min_;  // exclusive
+    const IndexType range_max_;  // exclusive
+    const IndexType threshold_;  // exclusive
+
+    // Data for sparse mode
+    // We have a short cut for full range values when in sparse mode
+    bool has_full_range_value_;
+    ValueType full_range_value_;
+    std::unique_ptr<SparseType> sparse_;
+
+    // Data for dense mode
+    std::unique_ptr<DenseType> dense_;
+
+    static const ValueType &DefaultValue() {
+        static ValueType value = kDefaultValue;
+        return value;
+    }
+    // Note that IsSparse is compile-time reducible if kSparseThreshold is zero...
+    inline bool IsSparse() const { return kSparseThreshold > 0 && sparse_.get(); }
+    bool IsFullRange(IndexType start, IndexType end) const { return (start == range_min_) && (end == range_max_); }
+    bool IsFullRangeValue(const ValueType &value) const { return has_full_range_value_ && (value == full_range_value_); }
+    bool HasFullRange() const { return IsSparse() && has_full_range_value_; }
+    bool HasSparseSubranges() const { return IsSparse() && !sparse_->empty(); }
+
+    // This is called unconditionally, to encapsulate the conversion criteria and logic here
+    void SparseToDenseConversion() {
+        // If we're using more threshold of the sparse range, convert to dense_
+        if (IsSparse() && (sparse_->size() > threshold_)) {
+            ValueType default_value = HasFullRange() ? full_range_value_ : kDefaultValue;
+            dense_.reset(new DenseType((range_max_ - range_min_), default_value));
+            DenseType &ray = *dense_;
+            for (auto const &item : *sparse_) {
+                ray[item.first - range_min_] = item.second;
+            }
+            sparse_.reset();
+            has_full_range_value_ = false;
+        }
+    }
+
+    // Dense access mode setter with update semantics implemented
+    bool SetDense(IndexType index, const ValueType &value) {
+        bool updated = false;
+        ValueType &current_value = (*dense_)[index - range_min_];
+        if ((kSetReplaces || current_value == kDefaultValue) && (value != current_value)) {
+            current_value = value;
+            updated = true;
+        }
+        return updated;
+    }
+
+    // Sparse access mode setter with update full range and update semantics implemented
+    bool SetSparse(IndexType index, const ValueType &value) {
+        if (!kSetReplaces && HasFullRange()) {
+            return false;  // We have full coverage, we can change this no more
+        }
+
+        if (kSetReplaces && IsFullRangeValue(value) && HasSparseSubranges()) {
+            auto erasure = sparse_->erase(index);  // Remove duplicate record from map
+            return erasure > 0;
+        }
+
+        // Use insert to reduce the number of hash lookups
+        auto map_pair = std::make_pair(index, value);
+        auto insert_pair = sparse_->insert(map_pair);
+        auto &it = insert_pair.first;  // use references to avoid nested pair accesses
+        const bool inserted = insert_pair.second;
+        bool updated = false;
+        if (inserted) {
+            updated = true;
+            SparseToDenseConversion();
+        } else if (kSetReplaces && value != it->second) {
+            // Only replace value if semantics allow it and it has changed.
+            it->second = value;
+            updated = true;
+        }
+        return updated;
+    }
+};
+
+}  // namespace sparse_container
+#endif
diff --git a/src/third_party/vulkan-validation-layers/src/layers/state_tracker.cpp b/src/third_party/vulkan-validation-layers/src/layers/state_tracker.cpp
new file mode 100644
index 0000000..af1f33f
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/layers/state_tracker.cpp
@@ -0,0 +1,4661 @@
+/* Copyright (c) 2015-2019 The Khronos Group Inc.
+ * Copyright (c) 2015-2019 Valve Corporation
+ * Copyright (c) 2015-2019 LunarG, Inc.
+ * Copyright (C) 2015-2019 Google Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Mark Lobodzinski <mark@lunarg.com>
+ * Author: Dave Houlton <daveh@lunarg.com>
+ * Shannon McPherson <shannon@lunarg.com>
+ */
+
+// Allow use of STL min and max functions in Windows
+#define NOMINMAX
+
+#include <cmath>
+#include <set>
+#include <sstream>
+#include <string>
+
+#include "vk_enum_string_helper.h"
+#include "vk_format_utils.h"
+#include "vk_layer_data.h"
+#include "vk_layer_utils.h"
+#include "vk_layer_logging.h"
+#include "vk_typemap_helper.h"
+
+#include "chassis.h"
+#include "state_tracker.h"
+#include "shader_validation.h"
+
+using std::max;
+using std::string;
+using std::stringstream;
+using std::unique_ptr;
+using std::unordered_map;
+using std::unordered_set;
+using std::vector;
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+// Android-specific validation that uses types defined only with VK_USE_PLATFORM_ANDROID_KHR
+// This could also move into a seperate core_validation_android.cpp file... ?
+
+void ValidationStateTracker::RecordCreateImageANDROID(const VkImageCreateInfo *create_info, IMAGE_STATE *is_node) {
+    const VkExternalMemoryImageCreateInfo *emici = lvl_find_in_chain<VkExternalMemoryImageCreateInfo>(create_info->pNext);
+    if (emici && (emici->handleTypes & VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID)) {
+        is_node->imported_ahb = true;
+    }
+    const VkExternalFormatANDROID *ext_fmt_android = lvl_find_in_chain<VkExternalFormatANDROID>(create_info->pNext);
+    if (ext_fmt_android && (0 != ext_fmt_android->externalFormat)) {
+        is_node->has_ahb_format = true;
+        is_node->ahb_format = ext_fmt_android->externalFormat;
+    }
+}
+
+void ValidationStateTracker::RecordCreateSamplerYcbcrConversionANDROID(const VkSamplerYcbcrConversionCreateInfo *create_info,
+                                                                       VkSamplerYcbcrConversion ycbcr_conversion) {
+    const VkExternalFormatANDROID *ext_format_android = lvl_find_in_chain<VkExternalFormatANDROID>(create_info->pNext);
+    if (ext_format_android && (0 != ext_format_android->externalFormat)) {
+        ycbcr_conversion_ahb_fmt_map.emplace(ycbcr_conversion, ext_format_android->externalFormat);
+    }
+};
+
+void ValidationStateTracker::RecordDestroySamplerYcbcrConversionANDROID(VkSamplerYcbcrConversion ycbcr_conversion) {
+    ycbcr_conversion_ahb_fmt_map.erase(ycbcr_conversion);
+};
+
+#else
+
+void ValidationStateTracker::RecordCreateImageANDROID(const VkImageCreateInfo *create_info, IMAGE_STATE *is_node) {}
+
+void ValidationStateTracker::RecordCreateSamplerYcbcrConversionANDROID(const VkSamplerYcbcrConversionCreateInfo *create_info,
+                                                                       VkSamplerYcbcrConversion ycbcr_conversion){};
+
+void ValidationStateTracker::RecordDestroySamplerYcbcrConversionANDROID(VkSamplerYcbcrConversion ycbcr_conversion){};
+
+#endif  // VK_USE_PLATFORM_ANDROID_KHR
+
+void ValidationStateTracker::PostCallRecordCreateImage(VkDevice device, const VkImageCreateInfo *pCreateInfo,
+                                                       const VkAllocationCallbacks *pAllocator, VkImage *pImage, VkResult result) {
+    if (VK_SUCCESS != result) return;
+    auto is_node = std::make_shared<IMAGE_STATE>(*pImage, pCreateInfo);
+    if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
+        RecordCreateImageANDROID(pCreateInfo, is_node.get());
+    }
+    const auto swapchain_info = lvl_find_in_chain<VkImageSwapchainCreateInfoKHR>(pCreateInfo->pNext);
+    if (swapchain_info) {
+        is_node->create_from_swapchain = swapchain_info->swapchain;
+    }
+
+    bool pre_fetch_memory_reqs = true;
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+    if (is_node->external_format_android) {
+        // Do not fetch requirements for external memory images
+        pre_fetch_memory_reqs = false;
+    }
+#endif
+    // Record the memory requirements in case they won't be queried
+    if (pre_fetch_memory_reqs) {
+        DispatchGetImageMemoryRequirements(device, *pImage, &is_node->requirements);
+    }
+    imageMap.insert(std::make_pair(*pImage, std::move(is_node)));
+}
+
+void ValidationStateTracker::PreCallRecordDestroyImage(VkDevice device, VkImage image, const VkAllocationCallbacks *pAllocator) {
+    if (!image) return;
+    IMAGE_STATE *image_state = GetImageState(image);
+    const VulkanTypedHandle obj_struct(image, kVulkanObjectTypeImage);
+    InvalidateCommandBuffers(image_state->cb_bindings, obj_struct);
+    // Clean up memory mapping, bindings and range references for image
+    for (auto mem_binding : image_state->GetBoundMemory()) {
+        auto mem_info = GetDevMemState(mem_binding);
+        if (mem_info) {
+            RemoveImageMemoryRange(image, mem_info);
+        }
+    }
+    if (image_state->bind_swapchain) {
+        auto swapchain = GetSwapchainState(image_state->bind_swapchain);
+        if (swapchain) {
+            swapchain->images[image_state->bind_swapchain_imageIndex].bound_images.erase(image_state->image);
+        }
+    }
+    RemoveAliasingImage(image_state);
+    ClearMemoryObjectBindings(obj_struct);
+    image_state->destroyed = true;
+    // Remove image from imageMap
+    imageMap.erase(image);
+}
+
+void ValidationStateTracker::PreCallRecordCmdClearColorImage(VkCommandBuffer commandBuffer, VkImage image,
+                                                             VkImageLayout imageLayout, const VkClearColorValue *pColor,
+                                                             uint32_t rangeCount, const VkImageSubresourceRange *pRanges) {
+    auto cb_node = GetCBState(commandBuffer);
+    auto image_state = GetImageState(image);
+    if (cb_node && image_state) {
+        AddCommandBufferBindingImage(cb_node, image_state);
+    }
+}
+
+void ValidationStateTracker::PreCallRecordCmdClearDepthStencilImage(VkCommandBuffer commandBuffer, VkImage image,
+                                                                    VkImageLayout imageLayout,
+                                                                    const VkClearDepthStencilValue *pDepthStencil,
+                                                                    uint32_t rangeCount, const VkImageSubresourceRange *pRanges) {
+    auto cb_node = GetCBState(commandBuffer);
+    auto image_state = GetImageState(image);
+    if (cb_node && image_state) {
+        AddCommandBufferBindingImage(cb_node, image_state);
+    }
+}
+
+void ValidationStateTracker::PreCallRecordCmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage,
+                                                       VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout,
+                                                       uint32_t regionCount, const VkImageCopy *pRegions) {
+    auto cb_node = GetCBState(commandBuffer);
+    auto src_image_state = GetImageState(srcImage);
+    auto dst_image_state = GetImageState(dstImage);
+
+    // Update bindings between images and cmd buffer
+    AddCommandBufferBindingImage(cb_node, src_image_state);
+    AddCommandBufferBindingImage(cb_node, dst_image_state);
+}
+
+void ValidationStateTracker::PreCallRecordCmdResolveImage(VkCommandBuffer commandBuffer, VkImage srcImage,
+                                                          VkImageLayout srcImageLayout, VkImage dstImage,
+                                                          VkImageLayout dstImageLayout, uint32_t regionCount,
+                                                          const VkImageResolve *pRegions) {
+    auto cb_node = GetCBState(commandBuffer);
+    auto src_image_state = GetImageState(srcImage);
+    auto dst_image_state = GetImageState(dstImage);
+
+    // Update bindings between images and cmd buffer
+    AddCommandBufferBindingImage(cb_node, src_image_state);
+    AddCommandBufferBindingImage(cb_node, dst_image_state);
+}
+
+void ValidationStateTracker::PreCallRecordCmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage,
+                                                       VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout,
+                                                       uint32_t regionCount, const VkImageBlit *pRegions, VkFilter filter) {
+    auto cb_node = GetCBState(commandBuffer);
+    auto src_image_state = GetImageState(srcImage);
+    auto dst_image_state = GetImageState(dstImage);
+
+    // Update bindings between images and cmd buffer
+    AddCommandBufferBindingImage(cb_node, src_image_state);
+    AddCommandBufferBindingImage(cb_node, dst_image_state);
+}
+
+void ValidationStateTracker::PostCallRecordCreateBuffer(VkDevice device, const VkBufferCreateInfo *pCreateInfo,
+                                                        const VkAllocationCallbacks *pAllocator, VkBuffer *pBuffer,
+                                                        VkResult result) {
+    if (result != VK_SUCCESS) return;
+    // TODO : This doesn't create deep copy of pQueueFamilyIndices so need to fix that if/when we want that data to be valid
+    auto buffer_state = std::make_shared<BUFFER_STATE>(*pBuffer, pCreateInfo);
+
+    // Get a set of requirements in the case the app does not
+    DispatchGetBufferMemoryRequirements(device, *pBuffer, &buffer_state->requirements);
+
+    bufferMap.insert(std::make_pair(*pBuffer, std::move(buffer_state)));
+}
+
+void ValidationStateTracker::PostCallRecordCreateBufferView(VkDevice device, const VkBufferViewCreateInfo *pCreateInfo,
+                                                            const VkAllocationCallbacks *pAllocator, VkBufferView *pView,
+                                                            VkResult result) {
+    if (result != VK_SUCCESS) return;
+    auto buffer_state = GetBufferShared(pCreateInfo->buffer);
+    bufferViewMap[*pView] = std::make_shared<BUFFER_VIEW_STATE>(buffer_state, *pView, pCreateInfo);
+}
+
+void ValidationStateTracker::PostCallRecordCreateImageView(VkDevice device, const VkImageViewCreateInfo *pCreateInfo,
+                                                           const VkAllocationCallbacks *pAllocator, VkImageView *pView,
+                                                           VkResult result) {
+    if (result != VK_SUCCESS) return;
+    auto image_state = GetImageShared(pCreateInfo->image);
+    imageViewMap[*pView] = std::make_shared<IMAGE_VIEW_STATE>(image_state, *pView, pCreateInfo);
+}
+
+void ValidationStateTracker::PreCallRecordCmdCopyBuffer(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer,
+                                                        uint32_t regionCount, const VkBufferCopy *pRegions) {
+    auto cb_node = GetCBState(commandBuffer);
+    auto src_buffer_state = GetBufferState(srcBuffer);
+    auto dst_buffer_state = GetBufferState(dstBuffer);
+
+    // Update bindings between buffers and cmd buffer
+    AddCommandBufferBindingBuffer(cb_node, src_buffer_state);
+    AddCommandBufferBindingBuffer(cb_node, dst_buffer_state);
+}
+
+void ValidationStateTracker::PreCallRecordDestroyImageView(VkDevice device, VkImageView imageView,
+                                                           const VkAllocationCallbacks *pAllocator) {
+    IMAGE_VIEW_STATE *image_view_state = GetImageViewState(imageView);
+    if (!image_view_state) return;
+    const VulkanTypedHandle obj_struct(imageView, kVulkanObjectTypeImageView);
+
+    // Any bound cmd buffers are now invalid
+    InvalidateCommandBuffers(image_view_state->cb_bindings, obj_struct);
+    image_view_state->destroyed = true;
+    imageViewMap.erase(imageView);
+}
+
+void ValidationStateTracker::PreCallRecordDestroyBuffer(VkDevice device, VkBuffer buffer, const VkAllocationCallbacks *pAllocator) {
+    if (!buffer) return;
+    auto buffer_state = GetBufferState(buffer);
+    const VulkanTypedHandle obj_struct(buffer, kVulkanObjectTypeBuffer);
+
+    InvalidateCommandBuffers(buffer_state->cb_bindings, obj_struct);
+    for (auto mem_binding : buffer_state->GetBoundMemory()) {
+        auto mem_info = GetDevMemState(mem_binding);
+        if (mem_info) {
+            RemoveBufferMemoryRange(buffer, mem_info);
+        }
+    }
+    ClearMemoryObjectBindings(obj_struct);
+    buffer_state->destroyed = true;
+    bufferMap.erase(buffer_state->buffer);
+}
+
+void ValidationStateTracker::PreCallRecordDestroyBufferView(VkDevice device, VkBufferView bufferView,
+                                                            const VkAllocationCallbacks *pAllocator) {
+    if (!bufferView) return;
+    auto buffer_view_state = GetBufferViewState(bufferView);
+    const VulkanTypedHandle obj_struct(bufferView, kVulkanObjectTypeBufferView);
+
+    // Any bound cmd buffers are now invalid
+    InvalidateCommandBuffers(buffer_view_state->cb_bindings, obj_struct);
+    buffer_view_state->destroyed = true;
+    bufferViewMap.erase(bufferView);
+}
+
+void ValidationStateTracker::PreCallRecordCmdFillBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset,
+                                                        VkDeviceSize size, uint32_t data) {
+    auto cb_node = GetCBState(commandBuffer);
+    auto buffer_state = GetBufferState(dstBuffer);
+    // Update bindings between buffer and cmd buffer
+    AddCommandBufferBindingBuffer(cb_node, buffer_state);
+}
+
+void ValidationStateTracker::PreCallRecordCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage,
+                                                               VkImageLayout srcImageLayout, VkBuffer dstBuffer,
+                                                               uint32_t regionCount, const VkBufferImageCopy *pRegions) {
+    auto cb_node = GetCBState(commandBuffer);
+    auto src_image_state = GetImageState(srcImage);
+    auto dst_buffer_state = GetBufferState(dstBuffer);
+
+    // Update bindings between buffer/image and cmd buffer
+    AddCommandBufferBindingImage(cb_node, src_image_state);
+    AddCommandBufferBindingBuffer(cb_node, dst_buffer_state);
+}
+
+void ValidationStateTracker::PreCallRecordCmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage,
+                                                               VkImageLayout dstImageLayout, uint32_t regionCount,
+                                                               const VkBufferImageCopy *pRegions) {
+    auto cb_node = GetCBState(commandBuffer);
+    auto src_buffer_state = GetBufferState(srcBuffer);
+    auto dst_image_state = GetImageState(dstImage);
+
+    AddCommandBufferBindingBuffer(cb_node, src_buffer_state);
+    AddCommandBufferBindingImage(cb_node, dst_image_state);
+}
+
+// Get the image viewstate for a given framebuffer attachment
+IMAGE_VIEW_STATE *ValidationStateTracker::GetAttachmentImageViewState(FRAMEBUFFER_STATE *framebuffer, uint32_t index) {
+    if (framebuffer->createInfo.flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR) return nullptr;
+    assert(framebuffer && (index < framebuffer->createInfo.attachmentCount));
+    const VkImageView &image_view = framebuffer->createInfo.pAttachments[index];
+    return GetImageViewState(image_view);
+}
+
+// Get the image viewstate for a given framebuffer attachment
+const IMAGE_VIEW_STATE *ValidationStateTracker::GetAttachmentImageViewState(const FRAMEBUFFER_STATE *framebuffer,
+                                                                            uint32_t index) const {
+    if (framebuffer->createInfo.flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR) return nullptr;
+    assert(framebuffer && (index < framebuffer->createInfo.attachmentCount));
+    const VkImageView &image_view = framebuffer->createInfo.pAttachments[index];
+    return GetImageViewState(image_view);
+}
+
+void ValidationStateTracker::AddAliasingImage(IMAGE_STATE *image_state) {
+    if (!(image_state->createInfo.flags & VK_IMAGE_CREATE_ALIAS_BIT)) return;
+    std::unordered_set<VkImage> *bound_images = nullptr;
+
+    if (image_state->bind_swapchain) {
+        auto swapchain_state = GetSwapchainState(image_state->bind_swapchain);
+        if (swapchain_state) {
+            bound_images = &swapchain_state->images[image_state->bind_swapchain_imageIndex].bound_images;
+        }
+    } else {
+        auto mem_state = GetDevMemState(image_state->binding.mem);
+        if (mem_state) {
+            bound_images = &mem_state->bound_images;
+        }
+    }
+
+    if (bound_images) {
+        for (const auto &handle : *bound_images) {
+            if (handle != image_state->image) {
+                auto is = GetImageState(handle);
+                if (is && is->IsCompatibleAliasing(image_state)) {
+                    auto inserted = is->aliasing_images.emplace(image_state->image);
+                    if (inserted.second) {
+                        image_state->aliasing_images.emplace(handle);
+                    }
+                }
+            }
+        }
+    }
+}
+
+void ValidationStateTracker::RemoveAliasingImage(IMAGE_STATE *image_state) {
+    for (const auto &image : image_state->aliasing_images) {
+        auto is = GetImageState(image);
+        if (is) {
+            is->aliasing_images.erase(image_state->image);
+        }
+    }
+    image_state->aliasing_images.clear();
+}
+
+void ValidationStateTracker::RemoveAliasingImages(const std::unordered_set<VkImage> &bound_images) {
+    // This is one way clear. Because the bound_images include cross references, the one way clear loop could clear the whole
+    // reference. It doesn't need two ways clear.
+    for (const auto &handle : bound_images) {
+        auto is = GetImageState(handle);
+        if (is) {
+            is->aliasing_images.clear();
+        }
+    }
+}
+
+const EVENT_STATE *ValidationStateTracker::GetEventState(VkEvent event) const {
+    auto it = eventMap.find(event);
+    if (it == eventMap.end()) {
+        return nullptr;
+    }
+    return &it->second;
+}
+
+EVENT_STATE *ValidationStateTracker::GetEventState(VkEvent event) {
+    auto it = eventMap.find(event);
+    if (it == eventMap.end()) {
+        return nullptr;
+    }
+    return &it->second;
+}
+
+const QUEUE_STATE *ValidationStateTracker::GetQueueState(VkQueue queue) const {
+    auto it = queueMap.find(queue);
+    if (it == queueMap.cend()) {
+        return nullptr;
+    }
+    return &it->second;
+}
+
+QUEUE_STATE *ValidationStateTracker::GetQueueState(VkQueue queue) {
+    auto it = queueMap.find(queue);
+    if (it == queueMap.end()) {
+        return nullptr;
+    }
+    return &it->second;
+}
+
+const PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState(VkPhysicalDevice phys) const {
+    auto *phys_dev_map = ((physical_device_map.size() > 0) ? &physical_device_map : &instance_state->physical_device_map);
+    auto it = phys_dev_map->find(phys);
+    if (it == phys_dev_map->end()) {
+        return nullptr;
+    }
+    return &it->second;
+}
+
+PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState(VkPhysicalDevice phys) {
+    auto *phys_dev_map = ((physical_device_map.size() > 0) ? &physical_device_map : &instance_state->physical_device_map);
+    auto it = phys_dev_map->find(phys);
+    if (it == phys_dev_map->end()) {
+        return nullptr;
+    }
+    return &it->second;
+}
+
+PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState() { return physical_device_state; }
+const PHYSICAL_DEVICE_STATE *ValidationStateTracker::GetPhysicalDeviceState() const { return physical_device_state; }
+
+// Return ptr to memory binding for given handle of specified type
+template <typename State, typename Result>
+static Result GetObjectMemBindingImpl(State state, const VulkanTypedHandle &typed_handle) {
+    switch (typed_handle.type) {
+        case kVulkanObjectTypeImage:
+            return state->GetImageState(typed_handle.Cast<VkImage>());
+        case kVulkanObjectTypeBuffer:
+            return state->GetBufferState(typed_handle.Cast<VkBuffer>());
+        case kVulkanObjectTypeAccelerationStructureNV:
+            return state->GetAccelerationStructureState(typed_handle.Cast<VkAccelerationStructureNV>());
+        default:
+            break;
+    }
+    return nullptr;
+}
+
+const BINDABLE *ValidationStateTracker::GetObjectMemBinding(const VulkanTypedHandle &typed_handle) const {
+    return GetObjectMemBindingImpl<const ValidationStateTracker *, const BINDABLE *>(this, typed_handle);
+}
+
+BINDABLE *ValidationStateTracker::GetObjectMemBinding(const VulkanTypedHandle &typed_handle) {
+    return GetObjectMemBindingImpl<ValidationStateTracker *, BINDABLE *>(this, typed_handle);
+}
+
+void ValidationStateTracker::AddMemObjInfo(void *object, const VkDeviceMemory mem, const VkMemoryAllocateInfo *pAllocateInfo) {
+    assert(object != NULL);
+
+    memObjMap[mem] = std::make_shared<DEVICE_MEMORY_STATE>(object, mem, pAllocateInfo);
+    auto mem_info = memObjMap[mem].get();
+
+    auto dedicated = lvl_find_in_chain<VkMemoryDedicatedAllocateInfoKHR>(pAllocateInfo->pNext);
+    if (dedicated) {
+        mem_info->is_dedicated = true;
+        mem_info->dedicated_buffer = dedicated->buffer;
+        mem_info->dedicated_image = dedicated->image;
+    }
+    auto export_info = lvl_find_in_chain<VkExportMemoryAllocateInfo>(pAllocateInfo->pNext);
+    if (export_info) {
+        mem_info->is_export = true;
+        mem_info->export_handle_type_flags = export_info->handleTypes;
+    }
+}
+
+// Create binding link between given sampler and command buffer node
+void ValidationStateTracker::AddCommandBufferBindingSampler(CMD_BUFFER_STATE *cb_node, SAMPLER_STATE *sampler_state) {
+    if (disabled.command_buffer_state) {
+        return;
+    }
+    AddCommandBufferBinding(sampler_state->cb_bindings,
+                            VulkanTypedHandle(sampler_state->sampler, kVulkanObjectTypeSampler, sampler_state), cb_node);
+}
+
+// Create binding link between given image node and command buffer node
+void ValidationStateTracker::AddCommandBufferBindingImage(CMD_BUFFER_STATE *cb_node, IMAGE_STATE *image_state) {
+    if (disabled.command_buffer_state) {
+        return;
+    }
+    // Skip validation if this image was created through WSI
+    if (image_state->create_from_swapchain == VK_NULL_HANDLE) {
+        // First update cb binding for image
+        if (AddCommandBufferBinding(image_state->cb_bindings,
+                                    VulkanTypedHandle(image_state->image, kVulkanObjectTypeImage, image_state), cb_node)) {
+            // Now update CB binding in MemObj mini CB list
+            for (auto mem_binding : image_state->GetBoundMemory()) {
+                DEVICE_MEMORY_STATE *pMemInfo = GetDevMemState(mem_binding);
+                if (pMemInfo) {
+                    // Now update CBInfo's Mem reference list
+                    AddCommandBufferBinding(pMemInfo->cb_bindings,
+                                            VulkanTypedHandle(mem_binding, kVulkanObjectTypeDeviceMemory, pMemInfo), cb_node);
+                }
+            }
+        }
+    }
+}
+
+// Create binding link between given image view node and its image with command buffer node
+void ValidationStateTracker::AddCommandBufferBindingImageView(CMD_BUFFER_STATE *cb_node, IMAGE_VIEW_STATE *view_state) {
+    if (disabled.command_buffer_state) {
+        return;
+    }
+    // First add bindings for imageView
+    if (AddCommandBufferBinding(view_state->cb_bindings,
+                                VulkanTypedHandle(view_state->image_view, kVulkanObjectTypeImageView, view_state), cb_node)) {
+        // Only need to continue if this is a new item
+        auto image_state = view_state->image_state.get();
+        // Add bindings for image within imageView
+        if (image_state) {
+            AddCommandBufferBindingImage(cb_node, image_state);
+        }
+    }
+}
+
+// Create binding link between given buffer node and command buffer node
+void ValidationStateTracker::AddCommandBufferBindingBuffer(CMD_BUFFER_STATE *cb_node, BUFFER_STATE *buffer_state) {
+    if (disabled.command_buffer_state) {
+        return;
+    }
+    // First update cb binding for buffer
+    if (AddCommandBufferBinding(buffer_state->cb_bindings,
+                                VulkanTypedHandle(buffer_state->buffer, kVulkanObjectTypeBuffer, buffer_state), cb_node)) {
+        // Now update CB binding in MemObj mini CB list
+        for (auto mem_binding : buffer_state->GetBoundMemory()) {
+            DEVICE_MEMORY_STATE *pMemInfo = GetDevMemState(mem_binding);
+            if (pMemInfo) {
+                // Now update CBInfo's Mem reference list
+                AddCommandBufferBinding(pMemInfo->cb_bindings,
+                                        VulkanTypedHandle(mem_binding, kVulkanObjectTypeDeviceMemory, pMemInfo), cb_node);
+            }
+        }
+    }
+}
+
+// Create binding link between given buffer view node and its buffer with command buffer node
+void ValidationStateTracker::AddCommandBufferBindingBufferView(CMD_BUFFER_STATE *cb_node, BUFFER_VIEW_STATE *view_state) {
+    if (disabled.command_buffer_state) {
+        return;
+    }
+    // First add bindings for bufferView
+    if (AddCommandBufferBinding(view_state->cb_bindings,
+                                VulkanTypedHandle(view_state->buffer_view, kVulkanObjectTypeBufferView, view_state), cb_node)) {
+        auto buffer_state = view_state->buffer_state.get();
+        // Add bindings for buffer within bufferView
+        if (buffer_state) {
+            AddCommandBufferBindingBuffer(cb_node, buffer_state);
+        }
+    }
+}
+
+// Create binding link between given acceleration structure and command buffer node
+void ValidationStateTracker::AddCommandBufferBindingAccelerationStructure(CMD_BUFFER_STATE *cb_node,
+                                                                          ACCELERATION_STRUCTURE_STATE *as_state) {
+    if (disabled.command_buffer_state) {
+        return;
+    }
+    if (AddCommandBufferBinding(
+            as_state->cb_bindings,
+            VulkanTypedHandle(as_state->acceleration_structure, kVulkanObjectTypeAccelerationStructureNV, as_state), cb_node)) {
+        // Now update CB binding in MemObj mini CB list
+        for (auto mem_binding : as_state->GetBoundMemory()) {
+            DEVICE_MEMORY_STATE *pMemInfo = GetDevMemState(mem_binding);
+            if (pMemInfo) {
+                // Now update CBInfo's Mem reference list
+                AddCommandBufferBinding(pMemInfo->cb_bindings,
+                                        VulkanTypedHandle(mem_binding, kVulkanObjectTypeDeviceMemory, pMemInfo), cb_node);
+            }
+        }
+    }
+}
+
+// Clear a single object binding from given memory object
+void ValidationStateTracker::ClearMemoryObjectBinding(const VulkanTypedHandle &typed_handle, VkDeviceMemory mem) {
+    DEVICE_MEMORY_STATE *mem_info = GetDevMemState(mem);
+    // This obj is bound to a memory object. Remove the reference to this object in that memory object's list
+    if (mem_info) {
+        mem_info->obj_bindings.erase(typed_handle);
+    }
+}
+
+// ClearMemoryObjectBindings clears the binding of objects to memory
+//  For the given object it pulls the memory bindings and makes sure that the bindings
+//  no longer refer to the object being cleared. This occurs when objects are destroyed.
+void ValidationStateTracker::ClearMemoryObjectBindings(const VulkanTypedHandle &typed_handle) {
+    BINDABLE *mem_binding = GetObjectMemBinding(typed_handle);
+    if (mem_binding) {
+        if (!mem_binding->sparse) {
+            ClearMemoryObjectBinding(typed_handle, mem_binding->binding.mem);
+        } else {  // Sparse, clear all bindings
+            for (auto &sparse_mem_binding : mem_binding->sparse_bindings) {
+                ClearMemoryObjectBinding(typed_handle, sparse_mem_binding.mem);
+            }
+        }
+    }
+}
+
+// SetMemBinding is used to establish immutable, non-sparse binding between a single image/buffer object and memory object.
+// Corresponding valid usage checks are in ValidateSetMemBinding().
+void ValidationStateTracker::SetMemBinding(VkDeviceMemory mem, BINDABLE *mem_binding, VkDeviceSize memory_offset,
+                                           const VulkanTypedHandle &typed_handle) {
+    assert(mem_binding);
+    mem_binding->binding.mem = mem;
+    mem_binding->UpdateBoundMemorySet();  // force recreation of cached set
+    mem_binding->binding.offset = memory_offset;
+    mem_binding->binding.size = mem_binding->requirements.size;
+
+    if (mem != VK_NULL_HANDLE) {
+        DEVICE_MEMORY_STATE *mem_info = GetDevMemState(mem);
+        if (mem_info) {
+            mem_info->obj_bindings.insert(typed_handle);
+            // For image objects, make sure default memory state is correctly set
+            // TODO : What's the best/correct way to handle this?
+            if (kVulkanObjectTypeImage == typed_handle.type) {
+                auto const image_state = reinterpret_cast<const IMAGE_STATE *>(mem_binding);
+                if (image_state) {
+                    VkImageCreateInfo ici = image_state->createInfo;
+                    if (ici.usage & (VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT)) {
+                        // TODO::  More memory state transition stuff.
+                    }
+                }
+            }
+        }
+    }
+}
+
+// For NULL mem case, clear any previous binding Else...
+// Make sure given object is in its object map
+//  IF a previous binding existed, update binding
+//  Add reference from objectInfo to memoryInfo
+//  Add reference off of object's binding info
+// Return VK_TRUE if addition is successful, VK_FALSE otherwise
+bool ValidationStateTracker::SetSparseMemBinding(MEM_BINDING binding, const VulkanTypedHandle &typed_handle) {
+    bool skip = VK_FALSE;
+    // Handle NULL case separately, just clear previous binding & decrement reference
+    if (binding.mem == VK_NULL_HANDLE) {
+        // TODO : This should cause the range of the resource to be unbound according to spec
+    } else {
+        BINDABLE *mem_binding = GetObjectMemBinding(typed_handle);
+        assert(mem_binding);
+        if (mem_binding) {  // Invalid handles are reported by object tracker, but Get returns NULL for them, so avoid SEGV here
+            assert(mem_binding->sparse);
+            DEVICE_MEMORY_STATE *mem_info = GetDevMemState(binding.mem);
+            if (mem_info) {
+                mem_info->obj_bindings.insert(typed_handle);
+                // Need to set mem binding for this object
+                mem_binding->sparse_bindings.insert(binding);
+                mem_binding->UpdateBoundMemorySet();
+            }
+        }
+    }
+    return skip;
+}
+
+void ValidationStateTracker::UpdateDrawState(CMD_BUFFER_STATE *cb_state, const VkPipelineBindPoint bind_point) {
+    auto &state = cb_state->lastBound[bind_point];
+    PIPELINE_STATE *pPipe = state.pipeline_state;
+    if (VK_NULL_HANDLE != state.pipeline_layout) {
+        for (const auto &set_binding_pair : pPipe->active_slots) {
+            uint32_t setIndex = set_binding_pair.first;
+            // Pull the set node
+            cvdescriptorset::DescriptorSet *descriptor_set = state.per_set[setIndex].bound_descriptor_set;
+            if (!descriptor_set->IsPushDescriptor()) {
+                // For the "bindless" style resource usage with many descriptors, need to optimize command <-> descriptor binding
+
+                // TODO: If recreating the reduced_map here shows up in profilinging, need to find a way of sharing with the
+                // Validate pass.  Though in the case of "many" descriptors, typically the descriptor count >> binding count
+                cvdescriptorset::PrefilterBindRequestMap reduced_map(*descriptor_set, set_binding_pair.second);
+                const auto &binding_req_map = reduced_map.FilteredMap(*cb_state, *pPipe);
+
+                if (reduced_map.IsManyDescriptors()) {
+                    // Only update validate binding tags if we meet the "many" criteria in the Prefilter class
+                    descriptor_set->UpdateValidationCache(*cb_state, *pPipe, binding_req_map);
+                }
+
+                // We can skip updating the state if "nothing" has changed since the last validation.
+                // See CoreChecks::ValidateCmdBufDrawState for more details.
+                bool descriptor_set_changed =
+                    !reduced_map.IsManyDescriptors() ||
+                    // Update if descriptor set (or contents) has changed
+                    state.per_set[setIndex].validated_set != descriptor_set ||
+                    state.per_set[setIndex].validated_set_change_count != descriptor_set->GetChangeCount() ||
+                    (!disabled.image_layout_validation &&
+                     state.per_set[setIndex].validated_set_image_layout_change_count != cb_state->image_layout_change_count);
+                bool need_update = descriptor_set_changed ||
+                                   // Update if previous bindingReqMap doesn't include new bindingReqMap
+                                   !std::includes(state.per_set[setIndex].validated_set_binding_req_map.begin(),
+                                                  state.per_set[setIndex].validated_set_binding_req_map.end(),
+                                                  binding_req_map.begin(), binding_req_map.end());
+
+                if (need_update) {
+                    // Bind this set and its active descriptor resources to the command buffer
+                    if (!descriptor_set_changed && reduced_map.IsManyDescriptors()) {
+                        // Only record the bindings that haven't already been recorded
+                        BindingReqMap delta_reqs;
+                        std::set_difference(binding_req_map.begin(), binding_req_map.end(),
+                                            state.per_set[setIndex].validated_set_binding_req_map.begin(),
+                                            state.per_set[setIndex].validated_set_binding_req_map.end(),
+                                            std::inserter(delta_reqs, delta_reqs.begin()));
+                        descriptor_set->UpdateDrawState(this, cb_state, pPipe, delta_reqs);
+                    } else {
+                        descriptor_set->UpdateDrawState(this, cb_state, pPipe, binding_req_map);
+                    }
+
+                    state.per_set[setIndex].validated_set = descriptor_set;
+                    state.per_set[setIndex].validated_set_change_count = descriptor_set->GetChangeCount();
+                    state.per_set[setIndex].validated_set_image_layout_change_count = cb_state->image_layout_change_count;
+                    if (reduced_map.IsManyDescriptors()) {
+                        // Check whether old == new before assigning, the equality check is much cheaper than
+                        // freeing and reallocating the map.
+                        if (state.per_set[setIndex].validated_set_binding_req_map != set_binding_pair.second) {
+                            state.per_set[setIndex].validated_set_binding_req_map = set_binding_pair.second;
+                        }
+                    } else {
+                        state.per_set[setIndex].validated_set_binding_req_map = BindingReqMap();
+                    }
+                }
+            }
+        }
+    }
+    if (!pPipe->vertex_binding_descriptions_.empty()) {
+        cb_state->vertex_buffer_used = true;
+    }
+}
+
+// Remove set from setMap and delete the set
+void ValidationStateTracker::FreeDescriptorSet(cvdescriptorset::DescriptorSet *descriptor_set) {
+    descriptor_set->destroyed = true;
+    const VulkanTypedHandle obj_struct(descriptor_set->GetSet(), kVulkanObjectTypeDescriptorSet);
+    // Any bound cmd buffers are now invalid
+    InvalidateCommandBuffers(descriptor_set->cb_bindings, obj_struct);
+
+    setMap.erase(descriptor_set->GetSet());
+}
+
+// Free all DS Pools including their Sets & related sub-structs
+// NOTE : Calls to this function should be wrapped in mutex
+void ValidationStateTracker::DeleteDescriptorSetPools() {
+    for (auto ii = descriptorPoolMap.begin(); ii != descriptorPoolMap.end();) {
+        // Remove this pools' sets from setMap and delete them
+        for (auto ds : ii->second->sets) {
+            FreeDescriptorSet(ds);
+        }
+        ii->second->sets.clear();
+        ii = descriptorPoolMap.erase(ii);
+    }
+}
+
+// For given object struct return a ptr of BASE_NODE type for its wrapping struct
+BASE_NODE *ValidationStateTracker::GetStateStructPtrFromObject(const VulkanTypedHandle &object_struct) {
+    if (object_struct.node) {
+#ifdef _DEBUG
+        // assert that lookup would find the same object
+        VulkanTypedHandle other = object_struct;
+        other.node = nullptr;
+        assert(object_struct.node == GetStateStructPtrFromObject(other));
+#endif
+        return object_struct.node;
+    }
+    BASE_NODE *base_ptr = nullptr;
+    switch (object_struct.type) {
+        case kVulkanObjectTypeDescriptorSet: {
+            base_ptr = GetSetNode(object_struct.Cast<VkDescriptorSet>());
+            break;
+        }
+        case kVulkanObjectTypeSampler: {
+            base_ptr = GetSamplerState(object_struct.Cast<VkSampler>());
+            break;
+        }
+        case kVulkanObjectTypeQueryPool: {
+            base_ptr = GetQueryPoolState(object_struct.Cast<VkQueryPool>());
+            break;
+        }
+        case kVulkanObjectTypePipeline: {
+            base_ptr = GetPipelineState(object_struct.Cast<VkPipeline>());
+            break;
+        }
+        case kVulkanObjectTypeBuffer: {
+            base_ptr = GetBufferState(object_struct.Cast<VkBuffer>());
+            break;
+        }
+        case kVulkanObjectTypeBufferView: {
+            base_ptr = GetBufferViewState(object_struct.Cast<VkBufferView>());
+            break;
+        }
+        case kVulkanObjectTypeImage: {
+            base_ptr = GetImageState(object_struct.Cast<VkImage>());
+            break;
+        }
+        case kVulkanObjectTypeImageView: {
+            base_ptr = GetImageViewState(object_struct.Cast<VkImageView>());
+            break;
+        }
+        case kVulkanObjectTypeEvent: {
+            base_ptr = GetEventState(object_struct.Cast<VkEvent>());
+            break;
+        }
+        case kVulkanObjectTypeDescriptorPool: {
+            base_ptr = GetDescriptorPoolState(object_struct.Cast<VkDescriptorPool>());
+            break;
+        }
+        case kVulkanObjectTypeCommandPool: {
+            base_ptr = GetCommandPoolState(object_struct.Cast<VkCommandPool>());
+            break;
+        }
+        case kVulkanObjectTypeFramebuffer: {
+            base_ptr = GetFramebufferState(object_struct.Cast<VkFramebuffer>());
+            break;
+        }
+        case kVulkanObjectTypeRenderPass: {
+            base_ptr = GetRenderPassState(object_struct.Cast<VkRenderPass>());
+            break;
+        }
+        case kVulkanObjectTypeDeviceMemory: {
+            base_ptr = GetDevMemState(object_struct.Cast<VkDeviceMemory>());
+            break;
+        }
+        case kVulkanObjectTypeAccelerationStructureNV: {
+            base_ptr = GetAccelerationStructureState(object_struct.Cast<VkAccelerationStructureNV>());
+            break;
+        }
+        case kVulkanObjectTypeUnknown:
+            // This can happen if an element of the object_bindings vector has been
+            // zeroed out, after an object is destroyed.
+            break;
+        default:
+            // TODO : Any other objects to be handled here?
+            assert(0);
+            break;
+    }
+    return base_ptr;
+}
+
+// Tie the VulkanTypedHandle to the cmd buffer which includes:
+//  Add object_binding to cmd buffer
+//  Add cb_binding to object
+bool ValidationStateTracker::AddCommandBufferBinding(small_unordered_map<CMD_BUFFER_STATE *, int, 8> &cb_bindings,
+                                                     const VulkanTypedHandle &obj, CMD_BUFFER_STATE *cb_node) {
+    if (disabled.command_buffer_state) {
+        return false;
+    }
+    // Insert the cb_binding with a default 'index' of -1. Then push the obj into the object_bindings
+    // vector, and update cb_bindings[cb_node] with the index of that element of the vector.
+    auto inserted = cb_bindings.insert({cb_node, -1});
+    if (inserted.second) {
+        cb_node->object_bindings.push_back(obj);
+        inserted.first->second = (int)cb_node->object_bindings.size() - 1;
+        return true;
+    }
+    return false;
+}
+
+// For a given object, if cb_node is in that objects cb_bindings, remove cb_node
+void ValidationStateTracker::RemoveCommandBufferBinding(VulkanTypedHandle const &object, CMD_BUFFER_STATE *cb_node) {
+    BASE_NODE *base_obj = GetStateStructPtrFromObject(object);
+    if (base_obj) base_obj->cb_bindings.erase(cb_node);
+}
+
+// Reset the command buffer state
+//  Maintain the createInfo and set state to CB_NEW, but clear all other state
+void ValidationStateTracker::ResetCommandBufferState(const VkCommandBuffer cb) {
+    CMD_BUFFER_STATE *pCB = GetCBState(cb);
+    if (pCB) {
+        pCB->in_use.store(0);
+        // Reset CB state (note that createInfo is not cleared)
+        pCB->commandBuffer = cb;
+        memset(&pCB->beginInfo, 0, sizeof(VkCommandBufferBeginInfo));
+        memset(&pCB->inheritanceInfo, 0, sizeof(VkCommandBufferInheritanceInfo));
+        pCB->hasDrawCmd = false;
+        pCB->hasTraceRaysCmd = false;
+        pCB->hasBuildAccelerationStructureCmd = false;
+        pCB->hasDispatchCmd = false;
+        pCB->state = CB_NEW;
+        pCB->commandCount = 0;
+        pCB->submitCount = 0;
+        pCB->image_layout_change_count = 1;  // Start at 1. 0 is insert value for validation cache versions, s.t. new == dirty
+        pCB->status = 0;
+        pCB->static_status = 0;
+        pCB->viewportMask = 0;
+        pCB->scissorMask = 0;
+
+        for (auto &item : pCB->lastBound) {
+            item.second.reset();
+        }
+
+        memset(&pCB->activeRenderPassBeginInfo, 0, sizeof(pCB->activeRenderPassBeginInfo));
+        pCB->activeRenderPass = nullptr;
+        pCB->activeSubpassContents = VK_SUBPASS_CONTENTS_INLINE;
+        pCB->activeSubpass = 0;
+        pCB->broken_bindings.clear();
+        pCB->waitedEvents.clear();
+        pCB->events.clear();
+        pCB->writeEventsBeforeWait.clear();
+        pCB->activeQueries.clear();
+        pCB->startedQueries.clear();
+        pCB->image_layout_map.clear();
+        pCB->current_vertex_buffer_binding_info.vertex_buffer_bindings.clear();
+        pCB->vertex_buffer_used = false;
+        pCB->primaryCommandBuffer = VK_NULL_HANDLE;
+        // If secondary, invalidate any primary command buffer that may call us.
+        if (pCB->createInfo.level == VK_COMMAND_BUFFER_LEVEL_SECONDARY) {
+            InvalidateLinkedCommandBuffers(pCB->linkedCommandBuffers, VulkanTypedHandle(cb, kVulkanObjectTypeCommandBuffer));
+        }
+
+        // Remove reverse command buffer links.
+        for (auto pSubCB : pCB->linkedCommandBuffers) {
+            pSubCB->linkedCommandBuffers.erase(pCB);
+        }
+        pCB->linkedCommandBuffers.clear();
+        pCB->queue_submit_functions.clear();
+        pCB->cmd_execute_commands_functions.clear();
+        pCB->eventUpdates.clear();
+        pCB->queryUpdates.clear();
+
+        // Remove object bindings
+        for (const auto &obj : pCB->object_bindings) {
+            RemoveCommandBufferBinding(obj, pCB);
+        }
+        pCB->object_bindings.clear();
+        // Remove this cmdBuffer's reference from each FrameBuffer's CB ref list
+        for (auto framebuffer : pCB->framebuffers) {
+            auto fb_state = GetFramebufferState(framebuffer);
+            if (fb_state) fb_state->cb_bindings.erase(pCB);
+        }
+        pCB->framebuffers.clear();
+        pCB->activeFramebuffer = VK_NULL_HANDLE;
+        memset(&pCB->index_buffer_binding, 0, sizeof(pCB->index_buffer_binding));
+
+        pCB->qfo_transfer_image_barriers.Reset();
+        pCB->qfo_transfer_buffer_barriers.Reset();
+
+        // Clean up the label data
+        ResetCmdDebugUtilsLabel(report_data, pCB->commandBuffer);
+        pCB->debug_label.Reset();
+        pCB->validate_descriptorsets_in_queuesubmit.clear();
+    }
+    if (command_buffer_reset_callback) {
+        (*command_buffer_reset_callback)(cb);
+    }
+}
+
+void ValidationStateTracker::PostCallRecordCreateDevice(VkPhysicalDevice gpu, const VkDeviceCreateInfo *pCreateInfo,
+                                                        const VkAllocationCallbacks *pAllocator, VkDevice *pDevice,
+                                                        VkResult result) {
+    if (VK_SUCCESS != result) return;
+
+    const VkPhysicalDeviceFeatures *enabled_features_found = pCreateInfo->pEnabledFeatures;
+    if (nullptr == enabled_features_found) {
+        const auto *features2 = lvl_find_in_chain<VkPhysicalDeviceFeatures2KHR>(pCreateInfo->pNext);
+        if (features2) {
+            enabled_features_found = &(features2->features);
+        }
+    }
+
+    ValidationObject *device_object = GetLayerDataPtr(get_dispatch_key(*pDevice), layer_data_map);
+    ValidationObject *validation_data = GetValidationObject(device_object->object_dispatch, this->container_type);
+    ValidationStateTracker *state_tracker = static_cast<ValidationStateTracker *>(validation_data);
+
+    if (nullptr == enabled_features_found) {
+        state_tracker->enabled_features.core = {};
+    } else {
+        state_tracker->enabled_features.core = *enabled_features_found;
+    }
+
+    // Make sure that queue_family_properties are obtained for this device's physical_device, even if the app has not
+    // previously set them through an explicit API call.
+    uint32_t count;
+    auto pd_state = GetPhysicalDeviceState(gpu);
+    DispatchGetPhysicalDeviceQueueFamilyProperties(gpu, &count, nullptr);
+    pd_state->queue_family_properties.resize(std::max(static_cast<uint32_t>(pd_state->queue_family_properties.size()), count));
+    DispatchGetPhysicalDeviceQueueFamilyProperties(gpu, &count, &pd_state->queue_family_properties[0]);
+    // Save local link to this device's physical device state
+    state_tracker->physical_device_state = pd_state;
+
+    const auto *device_group_ci = lvl_find_in_chain<VkDeviceGroupDeviceCreateInfo>(pCreateInfo->pNext);
+    state_tracker->physical_device_count =
+        device_group_ci && device_group_ci->physicalDeviceCount > 0 ? device_group_ci->physicalDeviceCount : 1;
+
+    const auto *descriptor_indexing_features = lvl_find_in_chain<VkPhysicalDeviceDescriptorIndexingFeaturesEXT>(pCreateInfo->pNext);
+    if (descriptor_indexing_features) {
+        state_tracker->enabled_features.descriptor_indexing = *descriptor_indexing_features;
+    }
+
+    const auto *eight_bit_storage_features = lvl_find_in_chain<VkPhysicalDevice8BitStorageFeaturesKHR>(pCreateInfo->pNext);
+    if (eight_bit_storage_features) {
+        state_tracker->enabled_features.eight_bit_storage = *eight_bit_storage_features;
+    }
+
+    const auto *exclusive_scissor_features = lvl_find_in_chain<VkPhysicalDeviceExclusiveScissorFeaturesNV>(pCreateInfo->pNext);
+    if (exclusive_scissor_features) {
+        state_tracker->enabled_features.exclusive_scissor = *exclusive_scissor_features;
+    }
+
+    const auto *shading_rate_image_features = lvl_find_in_chain<VkPhysicalDeviceShadingRateImageFeaturesNV>(pCreateInfo->pNext);
+    if (shading_rate_image_features) {
+        state_tracker->enabled_features.shading_rate_image = *shading_rate_image_features;
+    }
+
+    const auto *mesh_shader_features = lvl_find_in_chain<VkPhysicalDeviceMeshShaderFeaturesNV>(pCreateInfo->pNext);
+    if (mesh_shader_features) {
+        state_tracker->enabled_features.mesh_shader = *mesh_shader_features;
+    }
+
+    const auto *inline_uniform_block_features =
+        lvl_find_in_chain<VkPhysicalDeviceInlineUniformBlockFeaturesEXT>(pCreateInfo->pNext);
+    if (inline_uniform_block_features) {
+        state_tracker->enabled_features.inline_uniform_block = *inline_uniform_block_features;
+    }
+
+    const auto *transform_feedback_features = lvl_find_in_chain<VkPhysicalDeviceTransformFeedbackFeaturesEXT>(pCreateInfo->pNext);
+    if (transform_feedback_features) {
+        state_tracker->enabled_features.transform_feedback_features = *transform_feedback_features;
+    }
+
+    const auto *float16_int8_features = lvl_find_in_chain<VkPhysicalDeviceFloat16Int8FeaturesKHR>(pCreateInfo->pNext);
+    if (float16_int8_features) {
+        state_tracker->enabled_features.float16_int8 = *float16_int8_features;
+    }
+
+    const auto *vtx_attrib_div_features = lvl_find_in_chain<VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT>(pCreateInfo->pNext);
+    if (vtx_attrib_div_features) {
+        state_tracker->enabled_features.vtx_attrib_divisor_features = *vtx_attrib_div_features;
+    }
+
+    const auto *uniform_buffer_standard_layout_features =
+        lvl_find_in_chain<VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR>(pCreateInfo->pNext);
+    if (uniform_buffer_standard_layout_features) {
+        state_tracker->enabled_features.uniform_buffer_standard_layout = *uniform_buffer_standard_layout_features;
+    }
+
+    const auto *scalar_block_layout_features = lvl_find_in_chain<VkPhysicalDeviceScalarBlockLayoutFeaturesEXT>(pCreateInfo->pNext);
+    if (scalar_block_layout_features) {
+        state_tracker->enabled_features.scalar_block_layout_features = *scalar_block_layout_features;
+    }
+
+    const auto *buffer_device_address = lvl_find_in_chain<VkPhysicalDeviceBufferDeviceAddressFeaturesKHR>(pCreateInfo->pNext);
+    if (buffer_device_address) {
+        state_tracker->enabled_features.buffer_device_address = *buffer_device_address;
+    }
+
+    const auto *buffer_device_address_ext = lvl_find_in_chain<VkPhysicalDeviceBufferDeviceAddressFeaturesEXT>(pCreateInfo->pNext);
+    if (buffer_device_address_ext) {
+        state_tracker->enabled_features.buffer_device_address_ext = *buffer_device_address_ext;
+    }
+
+    const auto *cooperative_matrix_features = lvl_find_in_chain<VkPhysicalDeviceCooperativeMatrixFeaturesNV>(pCreateInfo->pNext);
+    if (cooperative_matrix_features) {
+        state_tracker->enabled_features.cooperative_matrix_features = *cooperative_matrix_features;
+    }
+
+    const auto *host_query_reset_features = lvl_find_in_chain<VkPhysicalDeviceHostQueryResetFeaturesEXT>(pCreateInfo->pNext);
+    if (host_query_reset_features) {
+        state_tracker->enabled_features.host_query_reset_features = *host_query_reset_features;
+    }
+
+    const auto *compute_shader_derivatives_features =
+        lvl_find_in_chain<VkPhysicalDeviceComputeShaderDerivativesFeaturesNV>(pCreateInfo->pNext);
+    if (compute_shader_derivatives_features) {
+        state_tracker->enabled_features.compute_shader_derivatives_features = *compute_shader_derivatives_features;
+    }
+
+    const auto *fragment_shader_barycentric_features =
+        lvl_find_in_chain<VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV>(pCreateInfo->pNext);
+    if (fragment_shader_barycentric_features) {
+        state_tracker->enabled_features.fragment_shader_barycentric_features = *fragment_shader_barycentric_features;
+    }
+
+    const auto *shader_image_footprint_features =
+        lvl_find_in_chain<VkPhysicalDeviceShaderImageFootprintFeaturesNV>(pCreateInfo->pNext);
+    if (shader_image_footprint_features) {
+        state_tracker->enabled_features.shader_image_footprint_features = *shader_image_footprint_features;
+    }
+
+    const auto *fragment_shader_interlock_features =
+        lvl_find_in_chain<VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT>(pCreateInfo->pNext);
+    if (fragment_shader_interlock_features) {
+        state_tracker->enabled_features.fragment_shader_interlock_features = *fragment_shader_interlock_features;
+    }
+
+    const auto *demote_to_helper_invocation_features =
+        lvl_find_in_chain<VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT>(pCreateInfo->pNext);
+    if (demote_to_helper_invocation_features) {
+        state_tracker->enabled_features.demote_to_helper_invocation_features = *demote_to_helper_invocation_features;
+    }
+
+    const auto *texel_buffer_alignment_features =
+        lvl_find_in_chain<VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT>(pCreateInfo->pNext);
+    if (texel_buffer_alignment_features) {
+        state_tracker->enabled_features.texel_buffer_alignment_features = *texel_buffer_alignment_features;
+    }
+
+    const auto *imageless_framebuffer_features =
+        lvl_find_in_chain<VkPhysicalDeviceImagelessFramebufferFeaturesKHR>(pCreateInfo->pNext);
+    if (imageless_framebuffer_features) {
+        state_tracker->enabled_features.imageless_framebuffer_features = *imageless_framebuffer_features;
+    }
+
+    const auto *pipeline_exe_props_features =
+        lvl_find_in_chain<VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR>(pCreateInfo->pNext);
+    if (pipeline_exe_props_features) {
+        state_tracker->enabled_features.pipeline_exe_props_features = *pipeline_exe_props_features;
+    }
+
+    const auto *dedicated_allocation_image_aliasing_features =
+        lvl_find_in_chain<VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV>(pCreateInfo->pNext);
+    if (dedicated_allocation_image_aliasing_features) {
+        state_tracker->enabled_features.dedicated_allocation_image_aliasing_features =
+            *dedicated_allocation_image_aliasing_features;
+    }
+
+    const auto *subgroup_extended_types_features =
+        lvl_find_in_chain<VkPhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR>(pCreateInfo->pNext);
+    if (subgroup_extended_types_features) {
+        state_tracker->enabled_features.subgroup_extended_types_features = *subgroup_extended_types_features;
+    }
+
+    const auto *separate_depth_stencil_layouts_features =
+        lvl_find_in_chain<VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR>(pCreateInfo->pNext);
+    if (separate_depth_stencil_layouts_features) {
+        state_tracker->enabled_features.separate_depth_stencil_layouts_features = *separate_depth_stencil_layouts_features;
+    }
+
+    const auto *performance_query_features = lvl_find_in_chain<VkPhysicalDevicePerformanceQueryFeaturesKHR>(pCreateInfo->pNext);
+    if (performance_query_features) {
+        state_tracker->enabled_features.performance_query_features = *performance_query_features;
+    }
+
+    const auto *timeline_semaphore_features = lvl_find_in_chain<VkPhysicalDeviceTimelineSemaphoreFeaturesKHR>(pCreateInfo->pNext);
+    if (timeline_semaphore_features) {
+        state_tracker->enabled_features.timeline_semaphore_features = *timeline_semaphore_features;
+    }
+
+    const auto *device_coherent_memory_features = lvl_find_in_chain<VkPhysicalDeviceCoherentMemoryFeaturesAMD>(pCreateInfo->pNext);
+    if (device_coherent_memory_features) {
+        state_tracker->enabled_features.device_coherent_memory_features = *device_coherent_memory_features;
+    }
+
+    // Store physical device properties and physical device mem limits into CoreChecks structs
+    DispatchGetPhysicalDeviceMemoryProperties(gpu, &state_tracker->phys_dev_mem_props);
+    DispatchGetPhysicalDeviceProperties(gpu, &state_tracker->phys_dev_props);
+
+    const auto &dev_ext = state_tracker->device_extensions;
+    auto *phys_dev_props = &state_tracker->phys_dev_ext_props;
+
+    if (dev_ext.vk_khr_push_descriptor) {
+        // Get the needed push_descriptor limits
+        VkPhysicalDevicePushDescriptorPropertiesKHR push_descriptor_prop;
+        GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_push_descriptor, &push_descriptor_prop);
+        phys_dev_props->max_push_descriptors = push_descriptor_prop.maxPushDescriptors;
+    }
+
+    GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_descriptor_indexing, &phys_dev_props->descriptor_indexing_props);
+    GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_nv_shading_rate_image, &phys_dev_props->shading_rate_image_props);
+    GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_nv_mesh_shader, &phys_dev_props->mesh_shader_props);
+    GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_inline_uniform_block, &phys_dev_props->inline_uniform_block_props);
+    GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_vertex_attribute_divisor, &phys_dev_props->vtx_attrib_divisor_props);
+    GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_depth_stencil_resolve, &phys_dev_props->depth_stencil_resolve_props);
+    GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_transform_feedback, &phys_dev_props->transform_feedback_props);
+    GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_nv_ray_tracing, &phys_dev_props->ray_tracing_props);
+    GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_texel_buffer_alignment, &phys_dev_props->texel_buffer_alignment_props);
+    GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_ext_fragment_density_map, &phys_dev_props->fragment_density_map_props);
+    GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_performance_query, &phys_dev_props->performance_query_props);
+    GetPhysicalDeviceExtProperties(gpu, dev_ext.vk_khr_timeline_semaphore, &phys_dev_props->timeline_semaphore_props);
+    if (state_tracker->device_extensions.vk_nv_cooperative_matrix) {
+        // Get the needed cooperative_matrix properties
+        auto cooperative_matrix_props = lvl_init_struct<VkPhysicalDeviceCooperativeMatrixPropertiesNV>();
+        auto prop2 = lvl_init_struct<VkPhysicalDeviceProperties2KHR>(&cooperative_matrix_props);
+        instance_dispatch_table.GetPhysicalDeviceProperties2KHR(gpu, &prop2);
+        state_tracker->phys_dev_ext_props.cooperative_matrix_props = cooperative_matrix_props;
+
+        uint32_t numCooperativeMatrixProperties = 0;
+        instance_dispatch_table.GetPhysicalDeviceCooperativeMatrixPropertiesNV(gpu, &numCooperativeMatrixProperties, NULL);
+        state_tracker->cooperative_matrix_properties.resize(numCooperativeMatrixProperties,
+                                                            lvl_init_struct<VkCooperativeMatrixPropertiesNV>());
+
+        instance_dispatch_table.GetPhysicalDeviceCooperativeMatrixPropertiesNV(gpu, &numCooperativeMatrixProperties,
+                                                                               state_tracker->cooperative_matrix_properties.data());
+    }
+    if (state_tracker->api_version >= VK_API_VERSION_1_1) {
+        // Get the needed subgroup limits
+        auto subgroup_prop = lvl_init_struct<VkPhysicalDeviceSubgroupProperties>();
+        auto prop2 = lvl_init_struct<VkPhysicalDeviceProperties2KHR>(&subgroup_prop);
+        instance_dispatch_table.GetPhysicalDeviceProperties2(gpu, &prop2);
+
+        state_tracker->phys_dev_ext_props.subgroup_props = subgroup_prop;
+    }
+
+    // Store queue family data
+    if (pCreateInfo->pQueueCreateInfos != nullptr) {
+        for (uint32_t i = 0; i < pCreateInfo->queueCreateInfoCount; ++i) {
+            state_tracker->queue_family_index_map.insert(
+                std::make_pair(pCreateInfo->pQueueCreateInfos[i].queueFamilyIndex, pCreateInfo->pQueueCreateInfos[i].queueCount));
+        }
+    }
+}
+
+void ValidationStateTracker::PreCallRecordDestroyDevice(VkDevice device, const VkAllocationCallbacks *pAllocator) {
+    if (!device) return;
+
+    // Reset all command buffers before destroying them, to unlink object_bindings.
+    for (auto &commandBuffer : commandBufferMap) {
+        ResetCommandBufferState(commandBuffer.first);
+    }
+    pipelineMap.clear();
+    renderPassMap.clear();
+    commandBufferMap.clear();
+
+    // This will also delete all sets in the pool & remove them from setMap
+    DeleteDescriptorSetPools();
+    // All sets should be removed
+    assert(setMap.empty());
+    descriptorSetLayoutMap.clear();
+    imageViewMap.clear();
+    imageMap.clear();
+    bufferViewMap.clear();
+    bufferMap.clear();
+    // Queues persist until device is destroyed
+    queueMap.clear();
+}
+
+// Loop through bound objects and increment their in_use counts.
+void ValidationStateTracker::IncrementBoundObjects(CMD_BUFFER_STATE const *cb_node) {
+    for (auto obj : cb_node->object_bindings) {
+        auto base_obj = GetStateStructPtrFromObject(obj);
+        if (base_obj) {
+            base_obj->in_use.fetch_add(1);
+        }
+    }
+}
+
+// Track which resources are in-flight by atomically incrementing their "in_use" count
+void ValidationStateTracker::IncrementResources(CMD_BUFFER_STATE *cb_node) {
+    cb_node->submitCount++;
+    cb_node->in_use.fetch_add(1);
+
+    // First Increment for all "generic" objects bound to cmd buffer, followed by special-case objects below
+    IncrementBoundObjects(cb_node);
+    // TODO : We should be able to remove the NULL look-up checks from the code below as long as
+    //  all the corresponding cases are verified to cause CB_INVALID state and the CB_INVALID state
+    //  should then be flagged prior to calling this function
+    for (auto event : cb_node->writeEventsBeforeWait) {
+        auto event_state = GetEventState(event);
+        if (event_state) event_state->write_in_use++;
+    }
+}
+
+// Decrement in-use count for objects bound to command buffer
+void ValidationStateTracker::DecrementBoundResources(CMD_BUFFER_STATE const *cb_node) {
+    BASE_NODE *base_obj = nullptr;
+    for (auto obj : cb_node->object_bindings) {
+        base_obj = GetStateStructPtrFromObject(obj);
+        if (base_obj) {
+            base_obj->in_use.fetch_sub(1);
+        }
+    }
+}
+
+void ValidationStateTracker::RetireWorkOnQueue(QUEUE_STATE *pQueue, uint64_t seq) {
+    std::unordered_map<VkQueue, uint64_t> otherQueueSeqs;
+
+    // Roll this queue forward, one submission at a time.
+    while (pQueue->seq < seq) {
+        auto &submission = pQueue->submissions.front();
+
+        for (auto &wait : submission.waitSemaphores) {
+            auto pSemaphore = GetSemaphoreState(wait.semaphore);
+            if (pSemaphore) {
+                pSemaphore->in_use.fetch_sub(1);
+            }
+            auto &lastSeq = otherQueueSeqs[wait.queue];
+            lastSeq = std::max(lastSeq, wait.seq);
+        }
+
+        for (auto &semaphore : submission.signalSemaphores) {
+            auto pSemaphore = GetSemaphoreState(semaphore);
+            if (pSemaphore) {
+                pSemaphore->in_use.fetch_sub(1);
+            }
+        }
+
+        for (auto &semaphore : submission.externalSemaphores) {
+            auto pSemaphore = GetSemaphoreState(semaphore);
+            if (pSemaphore) {
+                pSemaphore->in_use.fetch_sub(1);
+            }
+        }
+
+        for (auto cb : submission.cbs) {
+            auto cb_node = GetCBState(cb);
+            if (!cb_node) {
+                continue;
+            }
+            // First perform decrement on general case bound objects
+            DecrementBoundResources(cb_node);
+            for (auto event : cb_node->writeEventsBeforeWait) {
+                auto eventNode = eventMap.find(event);
+                if (eventNode != eventMap.end()) {
+                    eventNode->second.write_in_use--;
+                }
+            }
+            QueryMap localQueryToStateMap;
+            for (auto &function : cb_node->queryUpdates) {
+                function(nullptr, /*do_validate*/ false, &localQueryToStateMap);
+            }
+
+            for (auto queryStatePair : localQueryToStateMap) {
+                if (queryStatePair.second == QUERYSTATE_ENDED) {
+                    queryToStateMap[queryStatePair.first] = QUERYSTATE_AVAILABLE;
+
+                    const QUERY_POOL_STATE *qp_state = GetQueryPoolState(queryStatePair.first.pool);
+                    if (qp_state->createInfo.queryType == VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR)
+                        queryPassToStateMap[QueryObjectPass(queryStatePair.first, submission.perf_submit_pass)] =
+                            QUERYSTATE_AVAILABLE;
+                }
+            }
+            cb_node->in_use.fetch_sub(1);
+        }
+
+        auto pFence = GetFenceState(submission.fence);
+        if (pFence && pFence->scope == kSyncScopeInternal) {
+            pFence->state = FENCE_RETIRED;
+        }
+
+        pQueue->submissions.pop_front();
+        pQueue->seq++;
+    }
+
+    // Roll other queues forward to the highest seq we saw a wait for
+    for (auto qs : otherQueueSeqs) {
+        RetireWorkOnQueue(GetQueueState(qs.first), qs.second);
+    }
+}
+
+// Submit a fence to a queue, delimiting previous fences and previous untracked
+// work by it.
+static void SubmitFence(QUEUE_STATE *pQueue, FENCE_STATE *pFence, uint64_t submitCount) {
+    pFence->state = FENCE_INFLIGHT;
+    pFence->signaler.first = pQueue->queue;
+    pFence->signaler.second = pQueue->seq + pQueue->submissions.size() + submitCount;
+}
+
+void ValidationStateTracker::PostCallRecordQueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo *pSubmits,
+                                                       VkFence fence, VkResult result) {
+    uint64_t early_retire_seq = 0;
+    auto pQueue = GetQueueState(queue);
+    auto pFence = GetFenceState(fence);
+
+    if (pFence) {
+        if (pFence->scope == kSyncScopeInternal) {
+            // Mark fence in use
+            SubmitFence(pQueue, pFence, std::max(1u, submitCount));
+            if (!submitCount) {
+                // If no submissions, but just dropping a fence on the end of the queue,
+                // record an empty submission with just the fence, so we can determine
+                // its completion.
+                pQueue->submissions.emplace_back(std::vector<VkCommandBuffer>(), std::vector<SEMAPHORE_WAIT>(),
+                                                 std::vector<VkSemaphore>(), std::vector<VkSemaphore>(), fence, 0);
+            }
+        } else {
+            // Retire work up until this fence early, we will not see the wait that corresponds to this signal
+            early_retire_seq = pQueue->seq + pQueue->submissions.size();
+        }
+    }
+
+    // Now process each individual submit
+    for (uint32_t submit_idx = 0; submit_idx < submitCount; submit_idx++) {
+        std::vector<VkCommandBuffer> cbs;
+        const VkSubmitInfo *submit = &pSubmits[submit_idx];
+        vector<SEMAPHORE_WAIT> semaphore_waits;
+        vector<VkSemaphore> semaphore_signals;
+        vector<VkSemaphore> semaphore_externals;
+        auto *timeline_semaphore_submit = lvl_find_in_chain<VkTimelineSemaphoreSubmitInfoKHR>(submit->pNext);
+        for (uint32_t i = 0; i < submit->waitSemaphoreCount; ++i) {
+            VkSemaphore semaphore = submit->pWaitSemaphores[i];
+            auto pSemaphore = GetSemaphoreState(semaphore);
+            if (pSemaphore) {
+                if (pSemaphore->scope == kSyncScopeInternal) {
+                    if (pSemaphore->signaler.first != VK_NULL_HANDLE) {
+                        semaphore_waits.push_back({semaphore, pSemaphore->signaler.first, pSemaphore->signaler.second});
+                        pSemaphore->in_use.fetch_add(1);
+                    }
+                    pSemaphore->signaler.first = VK_NULL_HANDLE;
+                    pSemaphore->signaled = false;
+                } else {
+                    semaphore_externals.push_back(semaphore);
+                    pSemaphore->in_use.fetch_add(1);
+                    if (pSemaphore->scope == kSyncScopeExternalTemporary) {
+                        pSemaphore->scope = kSyncScopeInternal;
+                    }
+                }
+            }
+        }
+        for (uint32_t i = 0; i < submit->signalSemaphoreCount; ++i) {
+            VkSemaphore semaphore = submit->pSignalSemaphores[i];
+            auto pSemaphore = GetSemaphoreState(semaphore);
+            if (pSemaphore) {
+                if (pSemaphore->scope == kSyncScopeInternal) {
+                    if (pSemaphore->type == VK_SEMAPHORE_TYPE_BINARY_KHR) {
+                        pSemaphore->signaler.first = queue;
+                        pSemaphore->signaler.second = pQueue->seq + pQueue->submissions.size() + 1;
+                        pSemaphore->signaled = true;
+                    } else {
+                        pSemaphore->payload = timeline_semaphore_submit->pSignalSemaphoreValues[i];
+                    }
+                    pSemaphore->in_use.fetch_add(1);
+                    semaphore_signals.push_back(semaphore);
+                } else {
+                    // Retire work up until this submit early, we will not see the wait that corresponds to this signal
+                    early_retire_seq = std::max(early_retire_seq, pQueue->seq + pQueue->submissions.size() + 1);
+                }
+            }
+        }
+        for (uint32_t i = 0; i < submit->commandBufferCount; i++) {
+            auto cb_node = GetCBState(submit->pCommandBuffers[i]);
+            if (cb_node) {
+                cbs.push_back(submit->pCommandBuffers[i]);
+                for (auto secondaryCmdBuffer : cb_node->linkedCommandBuffers) {
+                    cbs.push_back(secondaryCmdBuffer->commandBuffer);
+                    IncrementResources(secondaryCmdBuffer);
+                }
+                IncrementResources(cb_node);
+
+                QueryMap localQueryToStateMap;
+                for (auto &function : cb_node->queryUpdates) {
+                    function(nullptr, /*do_validate*/ false, &localQueryToStateMap);
+                }
+
+                for (auto queryStatePair : localQueryToStateMap) {
+                    queryToStateMap[queryStatePair.first] = queryStatePair.second;
+                }
+
+                EventToStageMap localEventToStageMap;
+                for (auto &function : cb_node->eventUpdates) {
+                    function(nullptr, /*do_validate*/ false, &localEventToStageMap);
+                }
+
+                for (auto eventStagePair : localEventToStageMap) {
+                    eventMap[eventStagePair.first].stageMask = eventStagePair.second;
+                }
+            }
+        }
+
+        const auto perf_submit = lvl_find_in_chain<VkPerformanceQuerySubmitInfoKHR>(submit->pNext);
+
+        pQueue->submissions.emplace_back(cbs, semaphore_waits, semaphore_signals, semaphore_externals,
+                                         submit_idx == submitCount - 1 ? fence : (VkFence)VK_NULL_HANDLE,
+                                         perf_submit ? perf_submit->counterPassIndex : 0);
+    }
+
+    if (early_retire_seq) {
+        RetireWorkOnQueue(pQueue, early_retire_seq);
+    }
+}
+
+void ValidationStateTracker::PostCallRecordAllocateMemory(VkDevice device, const VkMemoryAllocateInfo *pAllocateInfo,
+                                                          const VkAllocationCallbacks *pAllocator, VkDeviceMemory *pMemory,
+                                                          VkResult result) {
+    if (VK_SUCCESS == result) {
+        AddMemObjInfo(device, *pMemory, pAllocateInfo);
+    }
+    return;
+}
+
+void ValidationStateTracker::PreCallRecordFreeMemory(VkDevice device, VkDeviceMemory mem, const VkAllocationCallbacks *pAllocator) {
+    if (!mem) return;
+    DEVICE_MEMORY_STATE *mem_info = GetDevMemState(mem);
+    const VulkanTypedHandle obj_struct(mem, kVulkanObjectTypeDeviceMemory);
+
+    // Clear mem binding for any bound objects
+    for (const auto &obj : mem_info->obj_bindings) {
+        BINDABLE *bindable_state = nullptr;
+        switch (obj.type) {
+            case kVulkanObjectTypeImage:
+                bindable_state = GetImageState(obj.Cast<VkImage>());
+                break;
+            case kVulkanObjectTypeBuffer:
+                bindable_state = GetBufferState(obj.Cast<VkBuffer>());
+                break;
+            case kVulkanObjectTypeAccelerationStructureNV:
+                bindable_state = GetAccelerationStructureState(obj.Cast<VkAccelerationStructureNV>());
+                break;
+
+            default:
+                // Should only have acceleration structure, buffer, or image objects bound to memory
+                assert(0);
+        }
+
+        if (bindable_state) {
+            bindable_state->binding.mem = MEMORY_UNBOUND;
+            bindable_state->UpdateBoundMemorySet();
+        }
+    }
+    // Any bound cmd buffers are now invalid
+    InvalidateCommandBuffers(mem_info->cb_bindings, obj_struct);
+    RemoveAliasingImages(mem_info->bound_images);
+    mem_info->destroyed = true;
+    memObjMap.erase(mem);
+}
+
+void ValidationStateTracker::PostCallRecordQueueBindSparse(VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo *pBindInfo,
+                                                           VkFence fence, VkResult result) {
+    if (result != VK_SUCCESS) return;
+    uint64_t early_retire_seq = 0;
+    auto pFence = GetFenceState(fence);
+    auto pQueue = GetQueueState(queue);
+
+    if (pFence) {
+        if (pFence->scope == kSyncScopeInternal) {
+            SubmitFence(pQueue, pFence, std::max(1u, bindInfoCount));
+            if (!bindInfoCount) {
+                // No work to do, just dropping a fence in the queue by itself.
+                pQueue->submissions.emplace_back(std::vector<VkCommandBuffer>(), std::vector<SEMAPHORE_WAIT>(),
+                                                 std::vector<VkSemaphore>(), std::vector<VkSemaphore>(), fence, 0);
+            }
+        } else {
+            // Retire work up until this fence early, we will not see the wait that corresponds to this signal
+            early_retire_seq = pQueue->seq + pQueue->submissions.size();
+        }
+    }
+
+    for (uint32_t bindIdx = 0; bindIdx < bindInfoCount; ++bindIdx) {
+        const VkBindSparseInfo &bindInfo = pBindInfo[bindIdx];
+        // Track objects tied to memory
+        for (uint32_t j = 0; j < bindInfo.bufferBindCount; j++) {
+            for (uint32_t k = 0; k < bindInfo.pBufferBinds[j].bindCount; k++) {
+                auto sparse_binding = bindInfo.pBufferBinds[j].pBinds[k];
+                SetSparseMemBinding({sparse_binding.memory, sparse_binding.memoryOffset, sparse_binding.size},
+                                    VulkanTypedHandle(bindInfo.pBufferBinds[j].buffer, kVulkanObjectTypeBuffer));
+            }
+        }
+        for (uint32_t j = 0; j < bindInfo.imageOpaqueBindCount; j++) {
+            for (uint32_t k = 0; k < bindInfo.pImageOpaqueBinds[j].bindCount; k++) {
+                auto sparse_binding = bindInfo.pImageOpaqueBinds[j].pBinds[k];
+                SetSparseMemBinding({sparse_binding.memory, sparse_binding.memoryOffset, sparse_binding.size},
+                                    VulkanTypedHandle(bindInfo.pImageOpaqueBinds[j].image, kVulkanObjectTypeImage));
+            }
+        }
+        for (uint32_t j = 0; j < bindInfo.imageBindCount; j++) {
+            for (uint32_t k = 0; k < bindInfo.pImageBinds[j].bindCount; k++) {
+                auto sparse_binding = bindInfo.pImageBinds[j].pBinds[k];
+                // TODO: This size is broken for non-opaque bindings, need to update to comprehend full sparse binding data
+                VkDeviceSize size = sparse_binding.extent.depth * sparse_binding.extent.height * sparse_binding.extent.width * 4;
+                SetSparseMemBinding({sparse_binding.memory, sparse_binding.memoryOffset, size},
+                                    VulkanTypedHandle(bindInfo.pImageBinds[j].image, kVulkanObjectTypeImage));
+            }
+        }
+
+        std::vector<SEMAPHORE_WAIT> semaphore_waits;
+        std::vector<VkSemaphore> semaphore_signals;
+        std::vector<VkSemaphore> semaphore_externals;
+        for (uint32_t i = 0; i < bindInfo.waitSemaphoreCount; ++i) {
+            VkSemaphore semaphore = bindInfo.pWaitSemaphores[i];
+            auto pSemaphore = GetSemaphoreState(semaphore);
+            if (pSemaphore) {
+                if (pSemaphore->scope == kSyncScopeInternal) {
+                    if (pSemaphore->signaler.first != VK_NULL_HANDLE) {
+                        semaphore_waits.push_back({semaphore, pSemaphore->signaler.first, pSemaphore->signaler.second});
+                        pSemaphore->in_use.fetch_add(1);
+                    }
+                    pSemaphore->signaler.first = VK_NULL_HANDLE;
+                    pSemaphore->signaled = false;
+                } else {
+                    semaphore_externals.push_back(semaphore);
+                    pSemaphore->in_use.fetch_add(1);
+                    if (pSemaphore->scope == kSyncScopeExternalTemporary) {
+                        pSemaphore->scope = kSyncScopeInternal;
+                    }
+                }
+            }
+        }
+        for (uint32_t i = 0; i < bindInfo.signalSemaphoreCount; ++i) {
+            VkSemaphore semaphore = bindInfo.pSignalSemaphores[i];
+            auto pSemaphore = GetSemaphoreState(semaphore);
+            if (pSemaphore) {
+                if (pSemaphore->scope == kSyncScopeInternal) {
+                    pSemaphore->signaler.first = queue;
+                    pSemaphore->signaler.second = pQueue->seq + pQueue->submissions.size() + 1;
+                    pSemaphore->signaled = true;
+                    pSemaphore->in_use.fetch_add(1);
+                    semaphore_signals.push_back(semaphore);
+                } else {
+                    // Retire work up until this submit early, we will not see the wait that corresponds to this signal
+                    early_retire_seq = std::max(early_retire_seq, pQueue->seq + pQueue->submissions.size() + 1);
+                }
+            }
+        }
+
+        pQueue->submissions.emplace_back(std::vector<VkCommandBuffer>(), semaphore_waits, semaphore_signals, semaphore_externals,
+                                         bindIdx == bindInfoCount - 1 ? fence : (VkFence)VK_NULL_HANDLE, 0);
+    }
+
+    if (early_retire_seq) {
+        RetireWorkOnQueue(pQueue, early_retire_seq);
+    }
+}
+
+void ValidationStateTracker::PostCallRecordCreateSemaphore(VkDevice device, const VkSemaphoreCreateInfo *pCreateInfo,
+                                                           const VkAllocationCallbacks *pAllocator, VkSemaphore *pSemaphore,
+                                                           VkResult result) {
+    if (VK_SUCCESS != result) return;
+    auto semaphore_state = std::make_shared<SEMAPHORE_STATE>();
+    semaphore_state->signaler.first = VK_NULL_HANDLE;
+    semaphore_state->signaler.second = 0;
+    semaphore_state->signaled = false;
+    semaphore_state->scope = kSyncScopeInternal;
+    semaphore_state->type = VK_SEMAPHORE_TYPE_BINARY_KHR;
+    semaphore_state->payload = 0;
+    auto semaphore_type_create_info = lvl_find_in_chain<VkSemaphoreTypeCreateInfoKHR>(pCreateInfo->pNext);
+    if (semaphore_type_create_info) {
+        semaphore_state->type = semaphore_type_create_info->semaphoreType;
+        semaphore_state->payload = semaphore_type_create_info->initialValue;
+    }
+    semaphoreMap[*pSemaphore] = std::move(semaphore_state);
+}
+
+void ValidationStateTracker::RecordImportSemaphoreState(VkSemaphore semaphore, VkExternalSemaphoreHandleTypeFlagBitsKHR handle_type,
+                                                        VkSemaphoreImportFlagsKHR flags) {
+    SEMAPHORE_STATE *sema_node = GetSemaphoreState(semaphore);
+    if (sema_node && sema_node->scope != kSyncScopeExternalPermanent) {
+        if ((handle_type == VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT_KHR || flags & VK_SEMAPHORE_IMPORT_TEMPORARY_BIT_KHR) &&
+            sema_node->scope == kSyncScopeInternal) {
+            sema_node->scope = kSyncScopeExternalTemporary;
+        } else {
+            sema_node->scope = kSyncScopeExternalPermanent;
+        }
+    }
+}
+
+void ValidationStateTracker::PostCallRecordSignalSemaphoreKHR(VkDevice device, const VkSemaphoreSignalInfoKHR *pSignalInfo,
+                                                              VkResult result) {
+    auto *pSemaphore = GetSemaphoreState(pSignalInfo->semaphore);
+    pSemaphore->payload = pSignalInfo->value;
+}
+
+void ValidationStateTracker::RecordMappedMemory(VkDeviceMemory mem, VkDeviceSize offset, VkDeviceSize size, void **ppData) {
+    auto mem_info = GetDevMemState(mem);
+    if (mem_info) {
+        mem_info->mapped_range.offset = offset;
+        mem_info->mapped_range.size = size;
+        mem_info->p_driver_data = *ppData;
+    }
+}
+
+void ValidationStateTracker::RetireFence(VkFence fence) {
+    auto pFence = GetFenceState(fence);
+    if (pFence && pFence->scope == kSyncScopeInternal) {
+        if (pFence->signaler.first != VK_NULL_HANDLE) {
+            // Fence signaller is a queue -- use this as proof that prior operations on that queue have completed.
+            RetireWorkOnQueue(GetQueueState(pFence->signaler.first), pFence->signaler.second);
+        } else {
+            // Fence signaller is the WSI. We're not tracking what the WSI op actually /was/ in CV yet, but we need to mark
+            // the fence as retired.
+            pFence->state = FENCE_RETIRED;
+        }
+    }
+}
+
+void ValidationStateTracker::PostCallRecordWaitForFences(VkDevice device, uint32_t fenceCount, const VkFence *pFences,
+                                                         VkBool32 waitAll, uint64_t timeout, VkResult result) {
+    if (VK_SUCCESS != result) return;
+
+    // When we know that all fences are complete we can clean/remove their CBs
+    if ((VK_TRUE == waitAll) || (1 == fenceCount)) {
+        for (uint32_t i = 0; i < fenceCount; i++) {
+            RetireFence(pFences[i]);
+        }
+    }
+    // NOTE : Alternate case not handled here is when some fences have completed. In
+    //  this case for app to guarantee which fences completed it will have to call
+    //  vkGetFenceStatus() at which point we'll clean/remove their CBs if complete.
+}
+
+void ValidationStateTracker::PostCallRecordGetFenceStatus(VkDevice device, VkFence fence, VkResult result) {
+    if (VK_SUCCESS != result) return;
+    RetireFence(fence);
+}
+
+void ValidationStateTracker::RecordGetDeviceQueueState(uint32_t queue_family_index, VkQueue queue) {
+    // Add queue to tracking set only if it is new
+    auto queue_is_new = queues.emplace(queue);
+    if (queue_is_new.second == true) {
+        QUEUE_STATE *queue_state = &queueMap[queue];
+        queue_state->queue = queue;
+        queue_state->queueFamilyIndex = queue_family_index;
+        queue_state->seq = 0;
+    }
+}
+
+void ValidationStateTracker::PostCallRecordGetDeviceQueue(VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex,
+                                                          VkQueue *pQueue) {
+    RecordGetDeviceQueueState(queueFamilyIndex, *pQueue);
+}
+
+void ValidationStateTracker::PostCallRecordGetDeviceQueue2(VkDevice device, const VkDeviceQueueInfo2 *pQueueInfo, VkQueue *pQueue) {
+    RecordGetDeviceQueueState(pQueueInfo->queueFamilyIndex, *pQueue);
+}
+
+void ValidationStateTracker::PostCallRecordQueueWaitIdle(VkQueue queue, VkResult result) {
+    if (VK_SUCCESS != result) return;
+    QUEUE_STATE *queue_state = GetQueueState(queue);
+    RetireWorkOnQueue(queue_state, queue_state->seq + queue_state->submissions.size());
+}
+
+void ValidationStateTracker::PostCallRecordDeviceWaitIdle(VkDevice device, VkResult result) {
+    if (VK_SUCCESS != result) return;
+    for (auto &queue : queueMap) {
+        RetireWorkOnQueue(&queue.second, queue.second.seq + queue.second.submissions.size());
+    }
+}
+
+void ValidationStateTracker::PreCallRecordDestroyFence(VkDevice device, VkFence fence, const VkAllocationCallbacks *pAllocator) {
+    if (!fence) return;
+    auto fence_state = GetFenceState(fence);
+    fence_state->destroyed = true;
+    fenceMap.erase(fence);
+}
+
+void ValidationStateTracker::PreCallRecordDestroySemaphore(VkDevice device, VkSemaphore semaphore,
+                                                           const VkAllocationCallbacks *pAllocator) {
+    if (!semaphore) return;
+    auto semaphore_state = GetSemaphoreState(semaphore);
+    semaphore_state->destroyed = true;
+    semaphoreMap.erase(semaphore);
+}
+
+void ValidationStateTracker::PreCallRecordDestroyEvent(VkDevice device, VkEvent event, const VkAllocationCallbacks *pAllocator) {
+    if (!event) return;
+    EVENT_STATE *event_state = GetEventState(event);
+    const VulkanTypedHandle obj_struct(event, kVulkanObjectTypeEvent);
+    InvalidateCommandBuffers(event_state->cb_bindings, obj_struct);
+    eventMap.erase(event);
+}
+
+void ValidationStateTracker::PreCallRecordDestroyQueryPool(VkDevice device, VkQueryPool queryPool,
+                                                           const VkAllocationCallbacks *pAllocator) {
+    if (!queryPool) return;
+    QUERY_POOL_STATE *qp_state = GetQueryPoolState(queryPool);
+    const VulkanTypedHandle obj_struct(queryPool, kVulkanObjectTypeQueryPool);
+    InvalidateCommandBuffers(qp_state->cb_bindings, obj_struct);
+    qp_state->destroyed = true;
+    queryPoolMap.erase(queryPool);
+}
+
+// Object with given handle is being bound to memory w/ given mem_info struct.
+//  Track the newly bound memory range with given memoryOffset
+//  Also scan any previous ranges, track aliased ranges with new range, and flag an error if a linear
+//  and non-linear range incorrectly overlap.
+// Return true if an error is flagged and the user callback returns "true", otherwise false
+// is_image indicates an image object, otherwise handle is for a buffer
+// is_linear indicates a buffer or linear image
+void ValidationStateTracker::InsertMemoryRange(const VulkanTypedHandle &typed_handle, DEVICE_MEMORY_STATE *mem_info,
+                                               VkDeviceSize memoryOffset, VkMemoryRequirements memRequirements, bool is_linear) {
+    if (typed_handle.type == kVulkanObjectTypeImage) {
+        mem_info->bound_images.insert(typed_handle.Cast<VkImage>());
+    } else if (typed_handle.type == kVulkanObjectTypeBuffer) {
+        mem_info->bound_buffers.insert(typed_handle.handle);
+    } else if (typed_handle.type == kVulkanObjectTypeAccelerationStructureNV) {
+        mem_info->bound_acceleration_structures.insert(typed_handle.handle);
+    } else {
+        // Unsupported object type
+        assert(false);
+    }
+}
+
+void ValidationStateTracker::InsertImageMemoryRange(VkImage image, DEVICE_MEMORY_STATE *mem_info, VkDeviceSize mem_offset,
+                                                    VkMemoryRequirements mem_reqs, bool is_linear) {
+    InsertMemoryRange(VulkanTypedHandle(image, kVulkanObjectTypeImage), mem_info, mem_offset, mem_reqs, is_linear);
+}
+
+void ValidationStateTracker::InsertBufferMemoryRange(VkBuffer buffer, DEVICE_MEMORY_STATE *mem_info, VkDeviceSize mem_offset,
+                                                     const VkMemoryRequirements &mem_reqs) {
+    InsertMemoryRange(VulkanTypedHandle(buffer, kVulkanObjectTypeBuffer), mem_info, mem_offset, mem_reqs, true);
+}
+
+void ValidationStateTracker::InsertAccelerationStructureMemoryRange(VkAccelerationStructureNV as, DEVICE_MEMORY_STATE *mem_info,
+                                                                    VkDeviceSize mem_offset, const VkMemoryRequirements &mem_reqs) {
+    InsertMemoryRange(VulkanTypedHandle(as, kVulkanObjectTypeAccelerationStructureNV), mem_info, mem_offset, mem_reqs, true);
+}
+
+// This function will remove the handle-to-index mapping from the appropriate map.
+static void RemoveMemoryRange(const VulkanTypedHandle &typed_handle, DEVICE_MEMORY_STATE *mem_info) {
+    if (typed_handle.type == kVulkanObjectTypeImage) {
+        mem_info->bound_images.erase(typed_handle.Cast<VkImage>());
+    } else if (typed_handle.type == kVulkanObjectTypeBuffer) {
+        mem_info->bound_buffers.erase(typed_handle.handle);
+    } else if (typed_handle.type == kVulkanObjectTypeAccelerationStructureNV) {
+        mem_info->bound_acceleration_structures.erase(typed_handle.handle);
+    } else {
+        // Unsupported object type
+        assert(false);
+    }
+}
+
+void ValidationStateTracker::RemoveBufferMemoryRange(VkBuffer buffer, DEVICE_MEMORY_STATE *mem_info) {
+    RemoveMemoryRange(VulkanTypedHandle(buffer, kVulkanObjectTypeBuffer), mem_info);
+}
+
+void ValidationStateTracker::RemoveImageMemoryRange(VkImage image, DEVICE_MEMORY_STATE *mem_info) {
+    RemoveMemoryRange(VulkanTypedHandle(image, kVulkanObjectTypeImage), mem_info);
+}
+
+void ValidationStateTracker::RemoveAccelerationStructureMemoryRange(VkAccelerationStructureNV as, DEVICE_MEMORY_STATE *mem_info) {
+    RemoveMemoryRange(VulkanTypedHandle(as, kVulkanObjectTypeAccelerationStructureNV), mem_info);
+}
+
+void ValidationStateTracker::UpdateBindBufferMemoryState(VkBuffer buffer, VkDeviceMemory mem, VkDeviceSize memoryOffset) {
+    BUFFER_STATE *buffer_state = GetBufferState(buffer);
+    if (buffer_state) {
+        // Track bound memory range information
+        auto mem_info = GetDevMemState(mem);
+        if (mem_info) {
+            InsertBufferMemoryRange(buffer, mem_info, memoryOffset, buffer_state->requirements);
+        }
+        // Track objects tied to memory
+        SetMemBinding(mem, buffer_state, memoryOffset, VulkanTypedHandle(buffer, kVulkanObjectTypeBuffer));
+    }
+}
+
+void ValidationStateTracker::PostCallRecordBindBufferMemory(VkDevice device, VkBuffer buffer, VkDeviceMemory mem,
+                                                            VkDeviceSize memoryOffset, VkResult result) {
+    if (VK_SUCCESS != result) return;
+    UpdateBindBufferMemoryState(buffer, mem, memoryOffset);
+}
+
+void ValidationStateTracker::PostCallRecordBindBufferMemory2(VkDevice device, uint32_t bindInfoCount,
+                                                             const VkBindBufferMemoryInfoKHR *pBindInfos, VkResult result) {
+    for (uint32_t i = 0; i < bindInfoCount; i++) {
+        UpdateBindBufferMemoryState(pBindInfos[i].buffer, pBindInfos[i].memory, pBindInfos[i].memoryOffset);
+    }
+}
+
+void ValidationStateTracker::PostCallRecordBindBufferMemory2KHR(VkDevice device, uint32_t bindInfoCount,
+                                                                const VkBindBufferMemoryInfoKHR *pBindInfos, VkResult result) {
+    for (uint32_t i = 0; i < bindInfoCount; i++) {
+        UpdateBindBufferMemoryState(pBindInfos[i].buffer, pBindInfos[i].memory, pBindInfos[i].memoryOffset);
+    }
+}
+
+void ValidationStateTracker::RecordGetBufferMemoryRequirementsState(VkBuffer buffer, VkMemoryRequirements *pMemoryRequirements) {
+    BUFFER_STATE *buffer_state = GetBufferState(buffer);
+    if (buffer_state) {
+        buffer_state->requirements = *pMemoryRequirements;
+        buffer_state->memory_requirements_checked = true;
+    }
+}
+
+void ValidationStateTracker::PostCallRecordGetBufferMemoryRequirements(VkDevice device, VkBuffer buffer,
+                                                                       VkMemoryRequirements *pMemoryRequirements) {
+    RecordGetBufferMemoryRequirementsState(buffer, pMemoryRequirements);
+}
+
+void ValidationStateTracker::PostCallRecordGetBufferMemoryRequirements2(VkDevice device,
+                                                                        const VkBufferMemoryRequirementsInfo2KHR *pInfo,
+                                                                        VkMemoryRequirements2KHR *pMemoryRequirements) {
+    RecordGetBufferMemoryRequirementsState(pInfo->buffer, &pMemoryRequirements->memoryRequirements);
+}
+
+void ValidationStateTracker::PostCallRecordGetBufferMemoryRequirements2KHR(VkDevice device,
+                                                                           const VkBufferMemoryRequirementsInfo2KHR *pInfo,
+                                                                           VkMemoryRequirements2KHR *pMemoryRequirements) {
+    RecordGetBufferMemoryRequirementsState(pInfo->buffer, &pMemoryRequirements->memoryRequirements);
+}
+
+void ValidationStateTracker::RecordGetImageMemoryRequiementsState(VkImage image, VkMemoryRequirements *pMemoryRequirements) {
+    IMAGE_STATE *image_state = GetImageState(image);
+    if (image_state) {
+        image_state->requirements = *pMemoryRequirements;
+        image_state->memory_requirements_checked = true;
+    }
+}
+
+void ValidationStateTracker::PostCallRecordGetImageMemoryRequirements(VkDevice device, VkImage image,
+                                                                      VkMemoryRequirements *pMemoryRequirements) {
+    RecordGetImageMemoryRequiementsState(image, pMemoryRequirements);
+}
+
+void ValidationStateTracker::PostCallRecordGetImageMemoryRequirements2(VkDevice device, const VkImageMemoryRequirementsInfo2 *pInfo,
+                                                                       VkMemoryRequirements2 *pMemoryRequirements) {
+    RecordGetImageMemoryRequiementsState(pInfo->image, &pMemoryRequirements->memoryRequirements);
+}
+
+void ValidationStateTracker::PostCallRecordGetImageMemoryRequirements2KHR(VkDevice device,
+                                                                          const VkImageMemoryRequirementsInfo2 *pInfo,
+                                                                          VkMemoryRequirements2 *pMemoryRequirements) {
+    RecordGetImageMemoryRequiementsState(pInfo->image, &pMemoryRequirements->memoryRequirements);
+}
+
+static void RecordGetImageSparseMemoryRequirementsState(IMAGE_STATE *image_state,
+                                                        VkSparseImageMemoryRequirements *sparse_image_memory_requirements) {
+    image_state->sparse_requirements.emplace_back(*sparse_image_memory_requirements);
+    if (sparse_image_memory_requirements->formatProperties.aspectMask & VK_IMAGE_ASPECT_METADATA_BIT) {
+        image_state->sparse_metadata_required = true;
+    }
+}
+
+void ValidationStateTracker::PostCallRecordGetImageSparseMemoryRequirements(
+    VkDevice device, VkImage image, uint32_t *pSparseMemoryRequirementCount,
+    VkSparseImageMemoryRequirements *pSparseMemoryRequirements) {
+    auto image_state = GetImageState(image);
+    image_state->get_sparse_reqs_called = true;
+    if (!pSparseMemoryRequirements) return;
+    for (uint32_t i = 0; i < *pSparseMemoryRequirementCount; i++) {
+        RecordGetImageSparseMemoryRequirementsState(image_state, &pSparseMemoryRequirements[i]);
+    }
+}
+
+void ValidationStateTracker::PostCallRecordGetImageSparseMemoryRequirements2(
+    VkDevice device, const VkImageSparseMemoryRequirementsInfo2KHR *pInfo, uint32_t *pSparseMemoryRequirementCount,
+    VkSparseImageMemoryRequirements2KHR *pSparseMemoryRequirements) {
+    auto image_state = GetImageState(pInfo->image);
+    image_state->get_sparse_reqs_called = true;
+    if (!pSparseMemoryRequirements) return;
+    for (uint32_t i = 0; i < *pSparseMemoryRequirementCount; i++) {
+        assert(!pSparseMemoryRequirements[i].pNext);  // TODO: If an extension is ever added here we need to handle it
+        RecordGetImageSparseMemoryRequirementsState(image_state, &pSparseMemoryRequirements[i].memoryRequirements);
+    }
+}
+
+void ValidationStateTracker::PostCallRecordGetImageSparseMemoryRequirements2KHR(
+    VkDevice device, const VkImageSparseMemoryRequirementsInfo2KHR *pInfo, uint32_t *pSparseMemoryRequirementCount,
+    VkSparseImageMemoryRequirements2KHR *pSparseMemoryRequirements) {
+    auto image_state = GetImageState(pInfo->image);
+    image_state->get_sparse_reqs_called = true;
+    if (!pSparseMemoryRequirements) return;
+    for (uint32_t i = 0; i < *pSparseMemoryRequirementCount; i++) {
+        assert(!pSparseMemoryRequirements[i].pNext);  // TODO: If an extension is ever added here we need to handle it
+        RecordGetImageSparseMemoryRequirementsState(image_state, &pSparseMemoryRequirements[i].memoryRequirements);
+    }
+}
+
+void ValidationStateTracker::PreCallRecordDestroyShaderModule(VkDevice device, VkShaderModule shaderModule,
+                                                              const VkAllocationCallbacks *pAllocator) {
+    if (!shaderModule) return;
+    auto shader_module_state = GetShaderModuleState(shaderModule);
+    shader_module_state->destroyed = true;
+    shaderModuleMap.erase(shaderModule);
+}
+
+void ValidationStateTracker::PreCallRecordDestroyPipeline(VkDevice device, VkPipeline pipeline,
+                                                          const VkAllocationCallbacks *pAllocator) {
+    if (!pipeline) return;
+    PIPELINE_STATE *pipeline_state = GetPipelineState(pipeline);
+    const VulkanTypedHandle obj_struct(pipeline, kVulkanObjectTypePipeline);
+    // Any bound cmd buffers are now invalid
+    InvalidateCommandBuffers(pipeline_state->cb_bindings, obj_struct);
+    pipeline_state->destroyed = true;
+    pipelineMap.erase(pipeline);
+}
+
+void ValidationStateTracker::PreCallRecordDestroyPipelineLayout(VkDevice device, VkPipelineLayout pipelineLayout,
+                                                                const VkAllocationCallbacks *pAllocator) {
+    if (!pipelineLayout) return;
+    auto pipeline_layout_state = GetPipelineLayout(pipelineLayout);
+    pipeline_layout_state->destroyed = true;
+    pipelineLayoutMap.erase(pipelineLayout);
+}
+
+void ValidationStateTracker::PreCallRecordDestroySampler(VkDevice device, VkSampler sampler,
+                                                         const VkAllocationCallbacks *pAllocator) {
+    if (!sampler) return;
+    SAMPLER_STATE *sampler_state = GetSamplerState(sampler);
+    const VulkanTypedHandle obj_struct(sampler, kVulkanObjectTypeSampler);
+    // Any bound cmd buffers are now invalid
+    if (sampler_state) {
+        InvalidateCommandBuffers(sampler_state->cb_bindings, obj_struct);
+    }
+    sampler_state->destroyed = true;
+    samplerMap.erase(sampler);
+}
+
+void ValidationStateTracker::PreCallRecordDestroyDescriptorSetLayout(VkDevice device, VkDescriptorSetLayout descriptorSetLayout,
+                                                                     const VkAllocationCallbacks *pAllocator) {
+    if (!descriptorSetLayout) return;
+    auto layout_it = descriptorSetLayoutMap.find(descriptorSetLayout);
+    if (layout_it != descriptorSetLayoutMap.end()) {
+        layout_it->second.get()->destroyed = true;
+        descriptorSetLayoutMap.erase(layout_it);
+    }
+}
+
+void ValidationStateTracker::PreCallRecordDestroyDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool,
+                                                                const VkAllocationCallbacks *pAllocator) {
+    if (!descriptorPool) return;
+    DESCRIPTOR_POOL_STATE *desc_pool_state = GetDescriptorPoolState(descriptorPool);
+    const VulkanTypedHandle obj_struct(descriptorPool, kVulkanObjectTypeDescriptorPool);
+    if (desc_pool_state) {
+        // Any bound cmd buffers are now invalid
+        InvalidateCommandBuffers(desc_pool_state->cb_bindings, obj_struct);
+        // Free sets that were in this pool
+        for (auto ds : desc_pool_state->sets) {
+            FreeDescriptorSet(ds);
+        }
+        desc_pool_state->destroyed = true;
+        descriptorPoolMap.erase(descriptorPool);
+    }
+}
+
+// Free all command buffers in given list, removing all references/links to them using ResetCommandBufferState
+void ValidationStateTracker::FreeCommandBufferStates(COMMAND_POOL_STATE *pool_state, const uint32_t command_buffer_count,
+                                                     const VkCommandBuffer *command_buffers) {
+    for (uint32_t i = 0; i < command_buffer_count; i++) {
+        auto cb_state = GetCBState(command_buffers[i]);
+        // Remove references to command buffer's state and delete
+        if (cb_state) {
+            // reset prior to delete, removing various references to it.
+            // TODO: fix this, it's insane.
+            ResetCommandBufferState(cb_state->commandBuffer);
+            // Remove the cb_state's references from COMMAND_POOL_STATEs
+            pool_state->commandBuffers.erase(command_buffers[i]);
+            // Remove the cb debug labels
+            EraseCmdDebugUtilsLabel(report_data, cb_state->commandBuffer);
+            // Remove CBState from CB map
+            cb_state->destroyed = true;
+            commandBufferMap.erase(cb_state->commandBuffer);
+        }
+    }
+}
+
+void ValidationStateTracker::PreCallRecordFreeCommandBuffers(VkDevice device, VkCommandPool commandPool,
+                                                             uint32_t commandBufferCount, const VkCommandBuffer *pCommandBuffers) {
+    auto pPool = GetCommandPoolState(commandPool);
+    FreeCommandBufferStates(pPool, commandBufferCount, pCommandBuffers);
+}
+
+void ValidationStateTracker::PostCallRecordCreateCommandPool(VkDevice device, const VkCommandPoolCreateInfo *pCreateInfo,
+                                                             const VkAllocationCallbacks *pAllocator, VkCommandPool *pCommandPool,
+                                                             VkResult result) {
+    if (VK_SUCCESS != result) return;
+    auto cmd_pool_state = std::make_shared<COMMAND_POOL_STATE>();
+    cmd_pool_state->createFlags = pCreateInfo->flags;
+    cmd_pool_state->queueFamilyIndex = pCreateInfo->queueFamilyIndex;
+    commandPoolMap[*pCommandPool] = std::move(cmd_pool_state);
+}
+
+void ValidationStateTracker::PostCallRecordCreateQueryPool(VkDevice device, const VkQueryPoolCreateInfo *pCreateInfo,
+                                                           const VkAllocationCallbacks *pAllocator, VkQueryPool *pQueryPool,
+                                                           VkResult result) {
+    if (VK_SUCCESS != result) return;
+    auto query_pool_state = std::make_shared<QUERY_POOL_STATE>();
+    query_pool_state->createInfo = *pCreateInfo;
+    query_pool_state->pool = *pQueryPool;
+    if (pCreateInfo->queryType == VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR) {
+        const auto *perf = lvl_find_in_chain<VkQueryPoolPerformanceCreateInfoKHR>(pCreateInfo->pNext);
+        const QUEUE_FAMILY_PERF_COUNTERS &counters = *physical_device_state->perf_counters[perf->queueFamilyIndex];
+
+        for (uint32_t i = 0; i < perf->counterIndexCount; i++) {
+            const auto &counter = counters.counters[perf->pCounterIndices[i]];
+            switch (counter.scope) {
+                case VK_QUERY_SCOPE_COMMAND_BUFFER_KHR:
+                    query_pool_state->has_perf_scope_command_buffer = true;
+                    break;
+                case VK_QUERY_SCOPE_RENDER_PASS_KHR:
+                    query_pool_state->has_perf_scope_render_pass = true;
+                    break;
+                default:
+                    break;
+            }
+        }
+
+        DispatchGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR(physical_device_state->phys_device, perf,
+                                                                      &query_pool_state->n_performance_passes);
+    }
+
+    queryPoolMap[*pQueryPool] = std::move(query_pool_state);
+
+    QueryObject query_obj{*pQueryPool, 0u};
+    for (uint32_t i = 0; i < pCreateInfo->queryCount; ++i) {
+        query_obj.query = i;
+        queryToStateMap[query_obj] = QUERYSTATE_UNKNOWN;
+    }
+}
+
+void ValidationStateTracker::PreCallRecordDestroyCommandPool(VkDevice device, VkCommandPool commandPool,
+                                                             const VkAllocationCallbacks *pAllocator) {
+    if (!commandPool) return;
+    COMMAND_POOL_STATE *cp_state = GetCommandPoolState(commandPool);
+    // Remove cmdpool from cmdpoolmap, after freeing layer data for the command buffers
+    // "When a pool is destroyed, all command buffers allocated from the pool are freed."
+    if (cp_state) {
+        // Create a vector, as FreeCommandBufferStates deletes from cp_state->commandBuffers during iteration.
+        std::vector<VkCommandBuffer> cb_vec{cp_state->commandBuffers.begin(), cp_state->commandBuffers.end()};
+        FreeCommandBufferStates(cp_state, static_cast<uint32_t>(cb_vec.size()), cb_vec.data());
+        cp_state->destroyed = true;
+        commandPoolMap.erase(commandPool);
+    }
+}
+
+void ValidationStateTracker::PostCallRecordResetCommandPool(VkDevice device, VkCommandPool commandPool,
+                                                            VkCommandPoolResetFlags flags, VkResult result) {
+    if (VK_SUCCESS != result) return;
+    // Reset all of the CBs allocated from this pool
+    auto command_pool_state = GetCommandPoolState(commandPool);
+    for (auto cmdBuffer : command_pool_state->commandBuffers) {
+        ResetCommandBufferState(cmdBuffer);
+    }
+}
+
+void ValidationStateTracker::PostCallRecordResetFences(VkDevice device, uint32_t fenceCount, const VkFence *pFences,
+                                                       VkResult result) {
+    for (uint32_t i = 0; i < fenceCount; ++i) {
+        auto pFence = GetFenceState(pFences[i]);
+        if (pFence) {
+            if (pFence->scope == kSyncScopeInternal) {
+                pFence->state = FENCE_UNSIGNALED;
+            } else if (pFence->scope == kSyncScopeExternalTemporary) {
+                pFence->scope = kSyncScopeInternal;
+            }
+        }
+    }
+}
+
+// For given cb_nodes, invalidate them and track object causing invalidation.
+// InvalidateCommandBuffers and InvalidateLinkedCommandBuffers are essentially
+// the same, except one takes a map and one takes a set, and InvalidateCommandBuffers
+// can also unlink objects from command buffers.
+void ValidationStateTracker::InvalidateCommandBuffers(small_unordered_map<CMD_BUFFER_STATE *, int, 8> &cb_nodes,
+                                                      const VulkanTypedHandle &obj, bool unlink) {
+    for (const auto &cb_node_pair : cb_nodes) {
+        auto &cb_node = cb_node_pair.first;
+        if (cb_node->state == CB_RECORDING) {
+            cb_node->state = CB_INVALID_INCOMPLETE;
+        } else if (cb_node->state == CB_RECORDED) {
+            cb_node->state = CB_INVALID_COMPLETE;
+        }
+        cb_node->broken_bindings.push_back(obj);
+
+        // if secondary, then propagate the invalidation to the primaries that will call us.
+        if (cb_node->createInfo.level == VK_COMMAND_BUFFER_LEVEL_SECONDARY) {
+            InvalidateLinkedCommandBuffers(cb_node->linkedCommandBuffers, obj);
+        }
+        if (unlink) {
+            int index = cb_node_pair.second;
+            assert(cb_node->object_bindings[index] == obj);
+            cb_node->object_bindings[index] = VulkanTypedHandle();
+        }
+    }
+    if (unlink) {
+        cb_nodes.clear();
+    }
+}
+
+void ValidationStateTracker::InvalidateLinkedCommandBuffers(std::unordered_set<CMD_BUFFER_STATE *> &cb_nodes,
+                                                            const VulkanTypedHandle &obj) {
+    for (auto cb_node : cb_nodes) {
+        if (cb_node->state == CB_RECORDING) {
+            cb_node->state = CB_INVALID_INCOMPLETE;
+        } else if (cb_node->state == CB_RECORDED) {
+            cb_node->state = CB_INVALID_COMPLETE;
+        }
+        cb_node->broken_bindings.push_back(obj);
+
+        // if secondary, then propagate the invalidation to the primaries that will call us.
+        if (cb_node->createInfo.level == VK_COMMAND_BUFFER_LEVEL_SECONDARY) {
+            InvalidateLinkedCommandBuffers(cb_node->linkedCommandBuffers, obj);
+        }
+    }
+}
+
+void ValidationStateTracker::PreCallRecordDestroyFramebuffer(VkDevice device, VkFramebuffer framebuffer,
+                                                             const VkAllocationCallbacks *pAllocator) {
+    if (!framebuffer) return;
+    FRAMEBUFFER_STATE *framebuffer_state = GetFramebufferState(framebuffer);
+    const VulkanTypedHandle obj_struct(framebuffer, kVulkanObjectTypeFramebuffer);
+    InvalidateCommandBuffers(framebuffer_state->cb_bindings, obj_struct);
+    framebuffer_state->destroyed = true;
+    frameBufferMap.erase(framebuffer);
+}
+
+void ValidationStateTracker::PreCallRecordDestroyRenderPass(VkDevice device, VkRenderPass renderPass,
+                                                            const VkAllocationCallbacks *pAllocator) {
+    if (!renderPass) return;
+    RENDER_PASS_STATE *rp_state = GetRenderPassState(renderPass);
+    const VulkanTypedHandle obj_struct(renderPass, kVulkanObjectTypeRenderPass);
+    InvalidateCommandBuffers(rp_state->cb_bindings, obj_struct);
+    rp_state->destroyed = true;
+    renderPassMap.erase(renderPass);
+}
+
+void ValidationStateTracker::PostCallRecordCreateFence(VkDevice device, const VkFenceCreateInfo *pCreateInfo,
+                                                       const VkAllocationCallbacks *pAllocator, VkFence *pFence, VkResult result) {
+    if (VK_SUCCESS != result) return;
+    auto fence_state = std::make_shared<FENCE_STATE>();
+    fence_state->fence = *pFence;
+    fence_state->createInfo = *pCreateInfo;
+    fence_state->state = (pCreateInfo->flags & VK_FENCE_CREATE_SIGNALED_BIT) ? FENCE_RETIRED : FENCE_UNSIGNALED;
+    fenceMap[*pFence] = std::move(fence_state);
+}
+
+bool ValidationStateTracker::PreCallValidateCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
+                                                                    const VkGraphicsPipelineCreateInfo *pCreateInfos,
+                                                                    const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
+                                                                    void *cgpl_state_data) const {
+    // Set up the state that CoreChecks, gpu_validation and later StateTracker Record will use.
+    create_graphics_pipeline_api_state *cgpl_state = reinterpret_cast<create_graphics_pipeline_api_state *>(cgpl_state_data);
+    cgpl_state->pCreateInfos = pCreateInfos;  // GPU validation can alter this, so we have to set a default value for the Chassis
+    cgpl_state->pipe_state.reserve(count);
+    for (uint32_t i = 0; i < count; i++) {
+        cgpl_state->pipe_state.push_back(std::make_shared<PIPELINE_STATE>());
+        (cgpl_state->pipe_state)[i]->initGraphicsPipeline(this, &pCreateInfos[i], GetRenderPassShared(pCreateInfos[i].renderPass));
+        (cgpl_state->pipe_state)[i]->pipeline_layout = GetPipelineLayoutShared(pCreateInfos[i].layout);
+    }
+    return false;
+}
+
+void ValidationStateTracker::PostCallRecordCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
+                                                                   const VkGraphicsPipelineCreateInfo *pCreateInfos,
+                                                                   const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
+                                                                   VkResult result, void *cgpl_state_data) {
+    create_graphics_pipeline_api_state *cgpl_state = reinterpret_cast<create_graphics_pipeline_api_state *>(cgpl_state_data);
+    // This API may create pipelines regardless of the return value
+    for (uint32_t i = 0; i < count; i++) {
+        if (pPipelines[i] != VK_NULL_HANDLE) {
+            (cgpl_state->pipe_state)[i]->pipeline = pPipelines[i];
+            pipelineMap[pPipelines[i]] = std::move((cgpl_state->pipe_state)[i]);
+        }
+    }
+    cgpl_state->pipe_state.clear();
+}
+
+bool ValidationStateTracker::PreCallValidateCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
+                                                                   const VkComputePipelineCreateInfo *pCreateInfos,
+                                                                   const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
+                                                                   void *ccpl_state_data) const {
+    auto *ccpl_state = reinterpret_cast<create_compute_pipeline_api_state *>(ccpl_state_data);
+    ccpl_state->pCreateInfos = pCreateInfos;  // GPU validation can alter this, so we have to set a default value for the Chassis
+    ccpl_state->pipe_state.reserve(count);
+    for (uint32_t i = 0; i < count; i++) {
+        // Create and initialize internal tracking data structure
+        ccpl_state->pipe_state.push_back(std::make_shared<PIPELINE_STATE>());
+        ccpl_state->pipe_state.back()->initComputePipeline(this, &pCreateInfos[i]);
+        ccpl_state->pipe_state.back()->pipeline_layout = GetPipelineLayoutShared(pCreateInfos[i].layout);
+    }
+    return false;
+}
+
+void ValidationStateTracker::PostCallRecordCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
+                                                                  const VkComputePipelineCreateInfo *pCreateInfos,
+                                                                  const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines,
+                                                                  VkResult result, void *ccpl_state_data) {
+    create_compute_pipeline_api_state *ccpl_state = reinterpret_cast<create_compute_pipeline_api_state *>(ccpl_state_data);
+
+    // This API may create pipelines regardless of the return value
+    for (uint32_t i = 0; i < count; i++) {
+        if (pPipelines[i] != VK_NULL_HANDLE) {
+            (ccpl_state->pipe_state)[i]->pipeline = pPipelines[i];
+            pipelineMap[pPipelines[i]] = std::move((ccpl_state->pipe_state)[i]);
+        }
+    }
+    ccpl_state->pipe_state.clear();
+}
+
+bool ValidationStateTracker::PreCallValidateCreateRayTracingPipelinesNV(VkDevice device, VkPipelineCache pipelineCache,
+                                                                        uint32_t count,
+                                                                        const VkRayTracingPipelineCreateInfoNV *pCreateInfos,
+                                                                        const VkAllocationCallbacks *pAllocator,
+                                                                        VkPipeline *pPipelines, void *crtpl_state_data) const {
+    auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_api_state *>(crtpl_state_data);
+    crtpl_state->pipe_state.reserve(count);
+    for (uint32_t i = 0; i < count; i++) {
+        // Create and initialize internal tracking data structure
+        crtpl_state->pipe_state.push_back(std::make_shared<PIPELINE_STATE>());
+        crtpl_state->pipe_state.back()->initRayTracingPipelineNV(this, &pCreateInfos[i]);
+        crtpl_state->pipe_state.back()->pipeline_layout = GetPipelineLayoutShared(pCreateInfos[i].layout);
+    }
+    return false;
+}
+
+void ValidationStateTracker::PostCallRecordCreateRayTracingPipelinesNV(
+    VkDevice device, VkPipelineCache pipelineCache, uint32_t count, const VkRayTracingPipelineCreateInfoNV *pCreateInfos,
+    const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines, VkResult result, void *crtpl_state_data) {
+    auto *crtpl_state = reinterpret_cast<create_ray_tracing_pipeline_api_state *>(crtpl_state_data);
+    // This API may create pipelines regardless of the return value
+    for (uint32_t i = 0; i < count; i++) {
+        if (pPipelines[i] != VK_NULL_HANDLE) {
+            (crtpl_state->pipe_state)[i]->pipeline = pPipelines[i];
+            pipelineMap[pPipelines[i]] = std::move((crtpl_state->pipe_state)[i]);
+        }
+    }
+    crtpl_state->pipe_state.clear();
+}
+
+void ValidationStateTracker::PostCallRecordCreateSampler(VkDevice device, const VkSamplerCreateInfo *pCreateInfo,
+                                                         const VkAllocationCallbacks *pAllocator, VkSampler *pSampler,
+                                                         VkResult result) {
+    samplerMap[*pSampler] = std::make_shared<SAMPLER_STATE>(pSampler, pCreateInfo);
+}
+
+void ValidationStateTracker::PostCallRecordCreateDescriptorSetLayout(VkDevice device,
+                                                                     const VkDescriptorSetLayoutCreateInfo *pCreateInfo,
+                                                                     const VkAllocationCallbacks *pAllocator,
+                                                                     VkDescriptorSetLayout *pSetLayout, VkResult result) {
+    if (VK_SUCCESS != result) return;
+    descriptorSetLayoutMap[*pSetLayout] = std::make_shared<cvdescriptorset::DescriptorSetLayout>(pCreateInfo, *pSetLayout);
+}
+
+// For repeatable sorting, not very useful for "memory in range" search
+struct PushConstantRangeCompare {
+    bool operator()(const VkPushConstantRange *lhs, const VkPushConstantRange *rhs) const {
+        if (lhs->offset == rhs->offset) {
+            if (lhs->size == rhs->size) {
+                // The comparison is arbitrary, but avoids false aliasing by comparing all fields.
+                return lhs->stageFlags < rhs->stageFlags;
+            }
+            // If the offsets are the same then sorting by the end of range is useful for validation
+            return lhs->size < rhs->size;
+        }
+        return lhs->offset < rhs->offset;
+    }
+};
+
+static PushConstantRangesDict push_constant_ranges_dict;
+
+PushConstantRangesId GetCanonicalId(const VkPipelineLayoutCreateInfo *info) {
+    if (!info->pPushConstantRanges) {
+        // Hand back the empty entry (creating as needed)...
+        return push_constant_ranges_dict.look_up(PushConstantRanges());
+    }
+
+    // Sort the input ranges to ensure equivalent ranges map to the same id
+    std::set<const VkPushConstantRange *, PushConstantRangeCompare> sorted;
+    for (uint32_t i = 0; i < info->pushConstantRangeCount; i++) {
+        sorted.insert(info->pPushConstantRanges + i);
+    }
+
+    PushConstantRanges ranges;
+    ranges.reserve(sorted.size());
+    for (const auto range : sorted) {
+        ranges.emplace_back(*range);
+    }
+    return push_constant_ranges_dict.look_up(std::move(ranges));
+}
+
+// Dictionary of canoncial form of the pipeline set layout of descriptor set layouts
+static PipelineLayoutSetLayoutsDict pipeline_layout_set_layouts_dict;
+
+// Dictionary of canonical form of the "compatible for set" records
+static PipelineLayoutCompatDict pipeline_layout_compat_dict;
+
+static PipelineLayoutCompatId GetCanonicalId(const uint32_t set_index, const PushConstantRangesId pcr_id,
+                                             const PipelineLayoutSetLayoutsId set_layouts_id) {
+    return pipeline_layout_compat_dict.look_up(PipelineLayoutCompatDef(set_index, pcr_id, set_layouts_id));
+}
+
+void ValidationStateTracker::PostCallRecordCreatePipelineLayout(VkDevice device, const VkPipelineLayoutCreateInfo *pCreateInfo,
+                                                                const VkAllocationCallbacks *pAllocator,
+                                                                VkPipelineLayout *pPipelineLayout, VkResult result) {
+    if (VK_SUCCESS != result) return;
+
+    auto pipeline_layout_state = std::make_shared<PIPELINE_LAYOUT_STATE>();
+    pipeline_layout_state->layout = *pPipelineLayout;
+    pipeline_layout_state->set_layouts.resize(pCreateInfo->setLayoutCount);
+    PipelineLayoutSetLayoutsDef set_layouts(pCreateInfo->setLayoutCount);
+    for (uint32_t i = 0; i < pCreateInfo->setLayoutCount; ++i) {
+        pipeline_layout_state->set_layouts[i] = GetDescriptorSetLayoutShared(pCreateInfo->pSetLayouts[i]);
+        set_layouts[i] = pipeline_layout_state->set_layouts[i]->GetLayoutId();
+    }
+
+    // Get canonical form IDs for the "compatible for set" contents
+    pipeline_layout_state->push_constant_ranges = GetCanonicalId(pCreateInfo);
+    auto set_layouts_id = pipeline_layout_set_layouts_dict.look_up(set_layouts);
+    pipeline_layout_state->compat_for_set.reserve(pCreateInfo->setLayoutCount);
+
+    // Create table of "compatible for set N" cannonical forms for trivial accept validation
+    for (uint32_t i = 0; i < pCreateInfo->setLayoutCount; ++i) {
+        pipeline_layout_state->compat_for_set.emplace_back(
+            GetCanonicalId(i, pipeline_layout_state->push_constant_ranges, set_layouts_id));
+    }
+    pipelineLayoutMap[*pPipelineLayout] = std::move(pipeline_layout_state);
+}
+
+void ValidationStateTracker::PostCallRecordCreateDescriptorPool(VkDevice device, const VkDescriptorPoolCreateInfo *pCreateInfo,
+                                                                const VkAllocationCallbacks *pAllocator,
+                                                                VkDescriptorPool *pDescriptorPool, VkResult result) {
+    if (VK_SUCCESS != result) return;
+    descriptorPoolMap[*pDescriptorPool] = std::make_shared<DESCRIPTOR_POOL_STATE>(*pDescriptorPool, pCreateInfo);
+}
+
+void ValidationStateTracker::PostCallRecordResetDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool,
+                                                               VkDescriptorPoolResetFlags flags, VkResult result) {
+    if (VK_SUCCESS != result) return;
+    DESCRIPTOR_POOL_STATE *pPool = GetDescriptorPoolState(descriptorPool);
+    // TODO: validate flags
+    // For every set off of this pool, clear it, remove from setMap, and free cvdescriptorset::DescriptorSet
+    for (auto ds : pPool->sets) {
+        FreeDescriptorSet(ds);
+    }
+    pPool->sets.clear();
+    // Reset available count for each type and available sets for this pool
+    for (auto it = pPool->availableDescriptorTypeCount.begin(); it != pPool->availableDescriptorTypeCount.end(); ++it) {
+        pPool->availableDescriptorTypeCount[it->first] = pPool->maxDescriptorTypeCount[it->first];
+    }
+    pPool->availableSets = pPool->maxSets;
+}
+
+bool ValidationStateTracker::PreCallValidateAllocateDescriptorSets(VkDevice device,
+                                                                   const VkDescriptorSetAllocateInfo *pAllocateInfo,
+                                                                   VkDescriptorSet *pDescriptorSets, void *ads_state_data) const {
+    // Always update common data
+    cvdescriptorset::AllocateDescriptorSetsData *ads_state =
+        reinterpret_cast<cvdescriptorset::AllocateDescriptorSetsData *>(ads_state_data);
+    UpdateAllocateDescriptorSetsData(pAllocateInfo, ads_state);
+
+    return false;
+}
+
+// Allocation state was good and call down chain was made so update state based on allocating descriptor sets
+void ValidationStateTracker::PostCallRecordAllocateDescriptorSets(VkDevice device, const VkDescriptorSetAllocateInfo *pAllocateInfo,
+                                                                  VkDescriptorSet *pDescriptorSets, VkResult result,
+                                                                  void *ads_state_data) {
+    if (VK_SUCCESS != result) return;
+    // All the updates are contained in a single cvdescriptorset function
+    cvdescriptorset::AllocateDescriptorSetsData *ads_state =
+        reinterpret_cast<cvdescriptorset::AllocateDescriptorSetsData *>(ads_state_data);
+    PerformAllocateDescriptorSets(pAllocateInfo, pDescriptorSets, ads_state);
+}
+
+void ValidationStateTracker::PreCallRecordFreeDescriptorSets(VkDevice device, VkDescriptorPool descriptorPool, uint32_t count,
+                                                             const VkDescriptorSet *pDescriptorSets) {
+    DESCRIPTOR_POOL_STATE *pool_state = GetDescriptorPoolState(descriptorPool);
+    // Update available descriptor sets in pool
+    pool_state->availableSets += count;
+
+    // For each freed descriptor add its resources back into the pool as available and remove from pool and setMap
+    for (uint32_t i = 0; i < count; ++i) {
+        if (pDescriptorSets[i] != VK_NULL_HANDLE) {
+            auto descriptor_set = setMap[pDescriptorSets[i]].get();
+            uint32_t type_index = 0, descriptor_count = 0;
+            for (uint32_t j = 0; j < descriptor_set->GetBindingCount(); ++j) {
+                type_index = static_cast<uint32_t>(descriptor_set->GetTypeFromIndex(j));
+                descriptor_count = descriptor_set->GetDescriptorCountFromIndex(j);
+                pool_state->availableDescriptorTypeCount[type_index] += descriptor_count;
+            }
+            FreeDescriptorSet(descriptor_set);
+            pool_state->sets.erase(descriptor_set);
+        }
+    }
+}
+
+void ValidationStateTracker::PreCallRecordUpdateDescriptorSets(VkDevice device, uint32_t descriptorWriteCount,
+                                                               const VkWriteDescriptorSet *pDescriptorWrites,
+                                                               uint32_t descriptorCopyCount,
+                                                               const VkCopyDescriptorSet *pDescriptorCopies) {
+    cvdescriptorset::PerformUpdateDescriptorSets(this, descriptorWriteCount, pDescriptorWrites, descriptorCopyCount,
+                                                 pDescriptorCopies);
+}
+
+void ValidationStateTracker::PostCallRecordAllocateCommandBuffers(VkDevice device, const VkCommandBufferAllocateInfo *pCreateInfo,
+                                                                  VkCommandBuffer *pCommandBuffer, VkResult result) {
+    if (VK_SUCCESS != result) return;
+    auto pPool = GetCommandPoolShared(pCreateInfo->commandPool);
+    if (pPool) {
+        for (uint32_t i = 0; i < pCreateInfo->commandBufferCount; i++) {
+            // Add command buffer to its commandPool map
+            pPool->commandBuffers.insert(pCommandBuffer[i]);
+            auto pCB = std::make_shared<CMD_BUFFER_STATE>();
+            pCB->createInfo = *pCreateInfo;
+            pCB->device = device;
+            pCB->command_pool = pPool;
+            // Add command buffer to map
+            commandBufferMap[pCommandBuffer[i]] = std::move(pCB);
+            ResetCommandBufferState(pCommandBuffer[i]);
+        }
+    }
+}
+
+// Add bindings between the given cmd buffer & framebuffer and the framebuffer's children
+void ValidationStateTracker::AddFramebufferBinding(CMD_BUFFER_STATE *cb_state, FRAMEBUFFER_STATE *fb_state) {
+    AddCommandBufferBinding(fb_state->cb_bindings, VulkanTypedHandle(fb_state->framebuffer, kVulkanObjectTypeFramebuffer, fb_state),
+                            cb_state);
+    // If imageless fb, skip fb binding
+    if (fb_state->createInfo.flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR) return;
+    const uint32_t attachmentCount = fb_state->createInfo.attachmentCount;
+    for (uint32_t attachment = 0; attachment < attachmentCount; ++attachment) {
+        auto view_state = GetAttachmentImageViewState(fb_state, attachment);
+        if (view_state) {
+            AddCommandBufferBindingImageView(cb_state, view_state);
+        }
+    }
+}
+
+void ValidationStateTracker::PreCallRecordBeginCommandBuffer(VkCommandBuffer commandBuffer,
+                                                             const VkCommandBufferBeginInfo *pBeginInfo) {
+    CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+    if (!cb_state) return;
+    if (cb_state->createInfo.level != VK_COMMAND_BUFFER_LEVEL_PRIMARY) {
+        // Secondary Command Buffer
+        const VkCommandBufferInheritanceInfo *pInfo = pBeginInfo->pInheritanceInfo;
+        if (pInfo) {
+            if (pBeginInfo->flags & VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT) {
+                assert(pInfo->renderPass);
+                auto framebuffer = GetFramebufferState(pInfo->framebuffer);
+                if (framebuffer) {
+                    // Connect this framebuffer and its children to this cmdBuffer
+                    AddFramebufferBinding(cb_state, framebuffer);
+                }
+            }
+        }
+    }
+    if (CB_RECORDED == cb_state->state || CB_INVALID_COMPLETE == cb_state->state) {
+        ResetCommandBufferState(commandBuffer);
+    }
+    // Set updated state here in case implicit reset occurs above
+    cb_state->state = CB_RECORDING;
+    cb_state->beginInfo = *pBeginInfo;
+    if (cb_state->beginInfo.pInheritanceInfo) {
+        cb_state->inheritanceInfo = *(cb_state->beginInfo.pInheritanceInfo);
+        cb_state->beginInfo.pInheritanceInfo = &cb_state->inheritanceInfo;
+        // If we are a secondary command-buffer and inheriting.  Update the items we should inherit.
+        if ((cb_state->createInfo.level != VK_COMMAND_BUFFER_LEVEL_PRIMARY) &&
+            (cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT)) {
+            cb_state->activeRenderPass = GetRenderPassState(cb_state->beginInfo.pInheritanceInfo->renderPass);
+            cb_state->activeSubpass = cb_state->beginInfo.pInheritanceInfo->subpass;
+            cb_state->activeFramebuffer = cb_state->beginInfo.pInheritanceInfo->framebuffer;
+            cb_state->framebuffers.insert(cb_state->beginInfo.pInheritanceInfo->framebuffer);
+        }
+    }
+
+    auto chained_device_group_struct = lvl_find_in_chain<VkDeviceGroupCommandBufferBeginInfo>(pBeginInfo->pNext);
+    if (chained_device_group_struct) {
+        cb_state->initial_device_mask = chained_device_group_struct->deviceMask;
+    } else {
+        cb_state->initial_device_mask = (1 << physical_device_count) - 1;
+    }
+
+    cb_state->performance_lock_acquired = performance_lock_acquired;
+}
+
+void ValidationStateTracker::PostCallRecordEndCommandBuffer(VkCommandBuffer commandBuffer, VkResult result) {
+    CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+    if (!cb_state) return;
+    // Cached validation is specific to a specific recording of a specific command buffer.
+    for (auto descriptor_set : cb_state->validated_descriptor_sets) {
+        descriptor_set->ClearCachedValidation(cb_state);
+    }
+    cb_state->validated_descriptor_sets.clear();
+    if (VK_SUCCESS == result) {
+        cb_state->state = CB_RECORDED;
+    }
+}
+
+void ValidationStateTracker::PostCallRecordResetCommandBuffer(VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags,
+                                                              VkResult result) {
+    if (VK_SUCCESS == result) {
+        ResetCommandBufferState(commandBuffer);
+    }
+}
+
+CBStatusFlags MakeStaticStateMask(VkPipelineDynamicStateCreateInfo const *ds) {
+    // initially assume everything is static state
+    CBStatusFlags flags = CBSTATUS_ALL_STATE_SET;
+
+    if (ds) {
+        for (uint32_t i = 0; i < ds->dynamicStateCount; i++) {
+            switch (ds->pDynamicStates[i]) {
+                case VK_DYNAMIC_STATE_LINE_WIDTH:
+                    flags &= ~CBSTATUS_LINE_WIDTH_SET;
+                    break;
+                case VK_DYNAMIC_STATE_DEPTH_BIAS:
+                    flags &= ~CBSTATUS_DEPTH_BIAS_SET;
+                    break;
+                case VK_DYNAMIC_STATE_BLEND_CONSTANTS:
+                    flags &= ~CBSTATUS_BLEND_CONSTANTS_SET;
+                    break;
+                case VK_DYNAMIC_STATE_DEPTH_BOUNDS:
+                    flags &= ~CBSTATUS_DEPTH_BOUNDS_SET;
+                    break;
+                case VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK:
+                    flags &= ~CBSTATUS_STENCIL_READ_MASK_SET;
+                    break;
+                case VK_DYNAMIC_STATE_STENCIL_WRITE_MASK:
+                    flags &= ~CBSTATUS_STENCIL_WRITE_MASK_SET;
+                    break;
+                case VK_DYNAMIC_STATE_STENCIL_REFERENCE:
+                    flags &= ~CBSTATUS_STENCIL_REFERENCE_SET;
+                    break;
+                case VK_DYNAMIC_STATE_SCISSOR:
+                    flags &= ~CBSTATUS_SCISSOR_SET;
+                    break;
+                case VK_DYNAMIC_STATE_VIEWPORT:
+                    flags &= ~CBSTATUS_VIEWPORT_SET;
+                    break;
+                case VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_NV:
+                    flags &= ~CBSTATUS_EXCLUSIVE_SCISSOR_SET;
+                    break;
+                case VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV:
+                    flags &= ~CBSTATUS_SHADING_RATE_PALETTE_SET;
+                    break;
+                case VK_DYNAMIC_STATE_LINE_STIPPLE_EXT:
+                    flags &= ~CBSTATUS_LINE_STIPPLE_SET;
+                    break;
+                case VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV:
+                    flags &= ~CBSTATUS_VIEWPORT_W_SCALING_SET;
+                    break;
+                default:
+                    break;
+            }
+        }
+    }
+
+    return flags;
+}
+
+// Validation cache:
+// CV is the bottommost implementor of this extension. Don't pass calls down.
+// utility function to set collective state for pipeline
+void SetPipelineState(PIPELINE_STATE *pPipe) {
+    // If any attachment used by this pipeline has blendEnable, set top-level blendEnable
+    if (pPipe->graphicsPipelineCI.pColorBlendState) {
+        for (size_t i = 0; i < pPipe->attachments.size(); ++i) {
+            if (VK_TRUE == pPipe->attachments[i].blendEnable) {
+                if (((pPipe->attachments[i].dstAlphaBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
+                     (pPipe->attachments[i].dstAlphaBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA)) ||
+                    ((pPipe->attachments[i].dstColorBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
+                     (pPipe->attachments[i].dstColorBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA)) ||
+                    ((pPipe->attachments[i].srcAlphaBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
+                     (pPipe->attachments[i].srcAlphaBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA)) ||
+                    ((pPipe->attachments[i].srcColorBlendFactor >= VK_BLEND_FACTOR_CONSTANT_COLOR) &&
+                     (pPipe->attachments[i].srcColorBlendFactor <= VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA))) {
+                    pPipe->blendConstantsEnabled = true;
+                }
+            }
+        }
+    }
+}
+
+void ValidationStateTracker::PreCallRecordCmdBindPipeline(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint,
+                                                          VkPipeline pipeline) {
+    CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+    assert(cb_state);
+
+    auto pipe_state = GetPipelineState(pipeline);
+    if (VK_PIPELINE_BIND_POINT_GRAPHICS == pipelineBindPoint) {
+        cb_state->status &= ~cb_state->static_status;
+        cb_state->static_status = MakeStaticStateMask(pipe_state->graphicsPipelineCI.ptr()->pDynamicState);
+        cb_state->status |= cb_state->static_status;
+    }
+    ResetCommandBufferPushConstantDataIfIncompatible(cb_state, pipe_state->pipeline_layout->layout);
+    cb_state->lastBound[pipelineBindPoint].pipeline_state = pipe_state;
+    SetPipelineState(pipe_state);
+    AddCommandBufferBinding(pipe_state->cb_bindings, VulkanTypedHandle(pipeline, kVulkanObjectTypePipeline), cb_state);
+}
+
+void ValidationStateTracker::PreCallRecordCmdSetViewport(VkCommandBuffer commandBuffer, uint32_t firstViewport,
+                                                         uint32_t viewportCount, const VkViewport *pViewports) {
+    CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+    cb_state->viewportMask |= ((1u << viewportCount) - 1u) << firstViewport;
+    cb_state->status |= CBSTATUS_VIEWPORT_SET;
+}
+
+void ValidationStateTracker::PreCallRecordCmdSetExclusiveScissorNV(VkCommandBuffer commandBuffer, uint32_t firstExclusiveScissor,
+                                                                   uint32_t exclusiveScissorCount,
+                                                                   const VkRect2D *pExclusiveScissors) {
+    CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+    // TODO: We don't have VUIDs for validating that all exclusive scissors have been set.
+    // cb_state->exclusiveScissorMask |= ((1u << exclusiveScissorCount) - 1u) << firstExclusiveScissor;
+    cb_state->status |= CBSTATUS_EXCLUSIVE_SCISSOR_SET;
+}
+
+void ValidationStateTracker::PreCallRecordCmdBindShadingRateImageNV(VkCommandBuffer commandBuffer, VkImageView imageView,
+                                                                    VkImageLayout imageLayout) {
+    CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+
+    if (imageView != VK_NULL_HANDLE) {
+        auto view_state = GetImageViewState(imageView);
+        AddCommandBufferBindingImageView(cb_state, view_state);
+    }
+}
+
+void ValidationStateTracker::PreCallRecordCmdSetViewportShadingRatePaletteNV(VkCommandBuffer commandBuffer, uint32_t firstViewport,
+                                                                             uint32_t viewportCount,
+                                                                             const VkShadingRatePaletteNV *pShadingRatePalettes) {
+    CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+    // TODO: We don't have VUIDs for validating that all shading rate palettes have been set.
+    // cb_state->shadingRatePaletteMask |= ((1u << viewportCount) - 1u) << firstViewport;
+    cb_state->status |= CBSTATUS_SHADING_RATE_PALETTE_SET;
+}
+
+void ValidationStateTracker::PostCallRecordCreateAccelerationStructureNV(VkDevice device,
+                                                                         const VkAccelerationStructureCreateInfoNV *pCreateInfo,
+                                                                         const VkAllocationCallbacks *pAllocator,
+                                                                         VkAccelerationStructureNV *pAccelerationStructure,
+                                                                         VkResult result) {
+    if (VK_SUCCESS != result) return;
+    auto as_state = std::make_shared<ACCELERATION_STRUCTURE_STATE>(*pAccelerationStructure, pCreateInfo);
+
+    // Query the requirements in case the application doesn't (to avoid bind/validation time query)
+    VkAccelerationStructureMemoryRequirementsInfoNV as_memory_requirements_info = {};
+    as_memory_requirements_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV;
+    as_memory_requirements_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV;
+    as_memory_requirements_info.accelerationStructure = as_state->acceleration_structure;
+    DispatchGetAccelerationStructureMemoryRequirementsNV(device, &as_memory_requirements_info, &as_state->memory_requirements);
+
+    VkAccelerationStructureMemoryRequirementsInfoNV scratch_memory_req_info = {};
+    scratch_memory_req_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV;
+    scratch_memory_req_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV;
+    scratch_memory_req_info.accelerationStructure = as_state->acceleration_structure;
+    DispatchGetAccelerationStructureMemoryRequirementsNV(device, &scratch_memory_req_info,
+                                                         &as_state->build_scratch_memory_requirements);
+
+    VkAccelerationStructureMemoryRequirementsInfoNV update_memory_req_info = {};
+    update_memory_req_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV;
+    update_memory_req_info.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV;
+    update_memory_req_info.accelerationStructure = as_state->acceleration_structure;
+    DispatchGetAccelerationStructureMemoryRequirementsNV(device, &update_memory_req_info,
+                                                         &as_state->update_scratch_memory_requirements);
+
+    accelerationStructureMap[*pAccelerationStructure] = std::move(as_state);
+}
+
+void ValidationStateTracker::PostCallRecordGetAccelerationStructureMemoryRequirementsNV(
+    VkDevice device, const VkAccelerationStructureMemoryRequirementsInfoNV *pInfo, VkMemoryRequirements2KHR *pMemoryRequirements) {
+    ACCELERATION_STRUCTURE_STATE *as_state = GetAccelerationStructureState(pInfo->accelerationStructure);
+    if (as_state != nullptr) {
+        if (pInfo->type == VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV) {
+            as_state->memory_requirements = *pMemoryRequirements;
+            as_state->memory_requirements_checked = true;
+        } else if (pInfo->type == VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV) {
+            as_state->build_scratch_memory_requirements = *pMemoryRequirements;
+            as_state->build_scratch_memory_requirements_checked = true;
+        } else if (pInfo->type == VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV) {
+            as_state->update_scratch_memory_requirements = *pMemoryRequirements;
+            as_state->update_scratch_memory_requirements_checked = true;
+        }
+    }
+}
+
+void ValidationStateTracker::PostCallRecordBindAccelerationStructureMemoryNV(
+    VkDevice device, uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoNV *pBindInfos, VkResult result) {
+    if (VK_SUCCESS != result) return;
+    for (uint32_t i = 0; i < bindInfoCount; i++) {
+        const VkBindAccelerationStructureMemoryInfoNV &info = pBindInfos[i];
+
+        ACCELERATION_STRUCTURE_STATE *as_state = GetAccelerationStructureState(info.accelerationStructure);
+        if (as_state) {
+            // Track bound memory range information
+            auto mem_info = GetDevMemState(info.memory);
+            if (mem_info) {
+                InsertAccelerationStructureMemoryRange(info.accelerationStructure, mem_info, info.memoryOffset,
+                                                       as_state->requirements);
+            }
+            // Track objects tied to memory
+            SetMemBinding(info.memory, as_state, info.memoryOffset,
+                          VulkanTypedHandle(info.accelerationStructure, kVulkanObjectTypeAccelerationStructureNV));
+
+            // GPU validation of top level acceleration structure building needs acceleration structure handles.
+            if (enabled.gpu_validation) {
+                DispatchGetAccelerationStructureHandleNV(device, info.accelerationStructure, 8, &as_state->opaque_handle);
+            }
+        }
+    }
+}
+
+void ValidationStateTracker::PostCallRecordCmdBuildAccelerationStructureNV(
+    VkCommandBuffer commandBuffer, const VkAccelerationStructureInfoNV *pInfo, VkBuffer instanceData, VkDeviceSize instanceOffset,
+    VkBool32 update, VkAccelerationStructureNV dst, VkAccelerationStructureNV src, VkBuffer scratch, VkDeviceSize scratchOffset) {
+    CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+    if (cb_state == nullptr) {
+        return;
+    }
+
+    ACCELERATION_STRUCTURE_STATE *dst_as_state = GetAccelerationStructureState(dst);
+    ACCELERATION_STRUCTURE_STATE *src_as_state = GetAccelerationStructureState(src);
+    if (dst_as_state != nullptr) {
+        dst_as_state->built = true;
+        dst_as_state->build_info.initialize(pInfo);
+        AddCommandBufferBindingAccelerationStructure(cb_state, dst_as_state);
+    }
+    if (src_as_state != nullptr) {
+        AddCommandBufferBindingAccelerationStructure(cb_state, src_as_state);
+    }
+    cb_state->hasBuildAccelerationStructureCmd = true;
+}
+
+void ValidationStateTracker::PostCallRecordCmdCopyAccelerationStructureNV(VkCommandBuffer commandBuffer,
+                                                                          VkAccelerationStructureNV dst,
+                                                                          VkAccelerationStructureNV src,
+                                                                          VkCopyAccelerationStructureModeNV mode) {
+    CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+    if (cb_state) {
+        ACCELERATION_STRUCTURE_STATE *src_as_state = GetAccelerationStructureState(src);
+        ACCELERATION_STRUCTURE_STATE *dst_as_state = GetAccelerationStructureState(dst);
+        if (dst_as_state != nullptr && src_as_state != nullptr) {
+            dst_as_state->built = true;
+            dst_as_state->build_info = src_as_state->build_info;
+            AddCommandBufferBindingAccelerationStructure(cb_state, dst_as_state);
+            AddCommandBufferBindingAccelerationStructure(cb_state, src_as_state);
+        }
+    }
+}
+
+void ValidationStateTracker::PreCallRecordDestroyAccelerationStructureNV(VkDevice device,
+                                                                         VkAccelerationStructureNV accelerationStructure,
+                                                                         const VkAllocationCallbacks *pAllocator) {
+    if (!accelerationStructure) return;
+    auto *as_state = GetAccelerationStructureState(accelerationStructure);
+    if (as_state) {
+        const VulkanTypedHandle obj_struct(accelerationStructure, kVulkanObjectTypeAccelerationStructureNV);
+        InvalidateCommandBuffers(as_state->cb_bindings, obj_struct);
+        for (auto mem_binding : as_state->GetBoundMemory()) {
+            auto mem_info = GetDevMemState(mem_binding);
+            if (mem_info) {
+                RemoveAccelerationStructureMemoryRange(accelerationStructure, mem_info);
+            }
+        }
+        ClearMemoryObjectBindings(obj_struct);
+        as_state->destroyed = true;
+        accelerationStructureMap.erase(accelerationStructure);
+    }
+}
+
+void ValidationStateTracker::PreCallRecordCmdSetViewportWScalingNV(VkCommandBuffer commandBuffer, uint32_t firstViewport,
+                                                                   uint32_t viewportCount,
+                                                                   const VkViewportWScalingNV *pViewportWScalings) {
+    CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+    cb_state->status |= CBSTATUS_VIEWPORT_W_SCALING_SET;
+}
+
+void ValidationStateTracker::PreCallRecordCmdSetLineWidth(VkCommandBuffer commandBuffer, float lineWidth) {
+    CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+    cb_state->status |= CBSTATUS_LINE_WIDTH_SET;
+}
+
+void ValidationStateTracker::PreCallRecordCmdSetLineStippleEXT(VkCommandBuffer commandBuffer, uint32_t lineStippleFactor,
+                                                               uint16_t lineStipplePattern) {
+    CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+    cb_state->status |= CBSTATUS_LINE_STIPPLE_SET;
+}
+
+void ValidationStateTracker::PreCallRecordCmdSetDepthBias(VkCommandBuffer commandBuffer, float depthBiasConstantFactor,
+                                                          float depthBiasClamp, float depthBiasSlopeFactor) {
+    CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+    cb_state->status |= CBSTATUS_DEPTH_BIAS_SET;
+}
+
+void ValidationStateTracker::PreCallRecordCmdSetScissor(VkCommandBuffer commandBuffer, uint32_t firstScissor, uint32_t scissorCount,
+                                                        const VkRect2D *pScissors) {
+    CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+    cb_state->scissorMask |= ((1u << scissorCount) - 1u) << firstScissor;
+    cb_state->status |= CBSTATUS_SCISSOR_SET;
+}
+
+void ValidationStateTracker::PreCallRecordCmdSetBlendConstants(VkCommandBuffer commandBuffer, const float blendConstants[4]) {
+    CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+    cb_state->status |= CBSTATUS_BLEND_CONSTANTS_SET;
+}
+
+void ValidationStateTracker::PreCallRecordCmdSetDepthBounds(VkCommandBuffer commandBuffer, float minDepthBounds,
+                                                            float maxDepthBounds) {
+    CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+    cb_state->status |= CBSTATUS_DEPTH_BOUNDS_SET;
+}
+
+void ValidationStateTracker::PreCallRecordCmdSetStencilCompareMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
+                                                                   uint32_t compareMask) {
+    CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+    cb_state->status |= CBSTATUS_STENCIL_READ_MASK_SET;
+}
+
+void ValidationStateTracker::PreCallRecordCmdSetStencilWriteMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
+                                                                 uint32_t writeMask) {
+    CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+    cb_state->status |= CBSTATUS_STENCIL_WRITE_MASK_SET;
+}
+
+void ValidationStateTracker::PreCallRecordCmdSetStencilReference(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
+                                                                 uint32_t reference) {
+    CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+    cb_state->status |= CBSTATUS_STENCIL_REFERENCE_SET;
+}
+
+// Update pipeline_layout bind points applying the "Pipeline Layout Compatibility" rules.
+// One of pDescriptorSets or push_descriptor_set should be nullptr, indicating whether this
+// is called for CmdBindDescriptorSets or CmdPushDescriptorSet.
+void ValidationStateTracker::UpdateLastBoundDescriptorSets(CMD_BUFFER_STATE *cb_state, VkPipelineBindPoint pipeline_bind_point,
+                                                           const PIPELINE_LAYOUT_STATE *pipeline_layout, uint32_t first_set,
+                                                           uint32_t set_count, const VkDescriptorSet *pDescriptorSets,
+                                                           cvdescriptorset::DescriptorSet *push_descriptor_set,
+                                                           uint32_t dynamic_offset_count, const uint32_t *p_dynamic_offsets) {
+    assert((pDescriptorSets == nullptr) ^ (push_descriptor_set == nullptr));
+    // Defensive
+    assert(pipeline_layout);
+    if (!pipeline_layout) return;
+
+    uint32_t required_size = first_set + set_count;
+    const uint32_t last_binding_index = required_size - 1;
+    assert(last_binding_index < pipeline_layout->compat_for_set.size());
+
+    // Some useful shorthand
+    auto &last_bound = cb_state->lastBound[pipeline_bind_point];
+    auto &pipe_compat_ids = pipeline_layout->compat_for_set;
+    const uint32_t current_size = static_cast<uint32_t>(last_bound.per_set.size());
+
+    // We need this three times in this function, but nowhere else
+    auto push_descriptor_cleanup = [&last_bound](const cvdescriptorset::DescriptorSet *ds) -> bool {
+        if (ds && ds->IsPushDescriptor()) {
+            assert(ds == last_bound.push_descriptor_set.get());
+            last_bound.push_descriptor_set = nullptr;
+            return true;
+        }
+        return false;
+    };
+
+    // Clean up the "disturbed" before and after the range to be set
+    if (required_size < current_size) {
+        if (last_bound.per_set[last_binding_index].compat_id_for_set != pipe_compat_ids[last_binding_index]) {
+            // We're disturbing those after last, we'll shrink below, but first need to check for and cleanup the push_descriptor
+            for (auto set_idx = required_size; set_idx < current_size; ++set_idx) {
+                if (push_descriptor_cleanup(last_bound.per_set[set_idx].bound_descriptor_set)) break;
+            }
+        } else {
+            // We're not disturbing past last, so leave the upper binding data alone.
+            required_size = current_size;
+        }
+    }
+
+    // We resize if we need more set entries or if those past "last" are disturbed
+    if (required_size != current_size) {
+        last_bound.per_set.resize(required_size);
+    }
+
+    // For any previously bound sets, need to set them to "invalid" if they were disturbed by this update
+    for (uint32_t set_idx = 0; set_idx < first_set; ++set_idx) {
+        if (last_bound.per_set[set_idx].compat_id_for_set != pipe_compat_ids[set_idx]) {
+            push_descriptor_cleanup(last_bound.per_set[set_idx].bound_descriptor_set);
+            last_bound.per_set[set_idx].bound_descriptor_set = nullptr;
+            last_bound.per_set[set_idx].dynamicOffsets.clear();
+            last_bound.per_set[set_idx].compat_id_for_set = pipe_compat_ids[set_idx];
+        }
+    }
+
+    // Now update the bound sets with the input sets
+    const uint32_t *input_dynamic_offsets = p_dynamic_offsets;  // "read" pointer for dynamic offset data
+    for (uint32_t input_idx = 0; input_idx < set_count; input_idx++) {
+        auto set_idx = input_idx + first_set;  // set_idx is index within layout, input_idx is index within input descriptor sets
+        cvdescriptorset::DescriptorSet *descriptor_set =
+            push_descriptor_set ? push_descriptor_set : GetSetNode(pDescriptorSets[input_idx]);
+
+        // Record binding (or push)
+        if (descriptor_set != last_bound.push_descriptor_set.get()) {
+            // Only cleanup the push descriptors if they aren't the currently used set.
+            push_descriptor_cleanup(last_bound.per_set[set_idx].bound_descriptor_set);
+        }
+        last_bound.per_set[set_idx].bound_descriptor_set = descriptor_set;
+        last_bound.per_set[set_idx].compat_id_for_set = pipe_compat_ids[set_idx];  // compat ids are canonical *per* set index
+
+        if (descriptor_set) {
+            auto set_dynamic_descriptor_count = descriptor_set->GetDynamicDescriptorCount();
+            // TODO: Add logic for tracking push_descriptor offsets (here or in caller)
+            if (set_dynamic_descriptor_count && input_dynamic_offsets) {
+                const uint32_t *end_offset = input_dynamic_offsets + set_dynamic_descriptor_count;
+                last_bound.per_set[set_idx].dynamicOffsets = std::vector<uint32_t>(input_dynamic_offsets, end_offset);
+                input_dynamic_offsets = end_offset;
+                assert(input_dynamic_offsets <= (p_dynamic_offsets + dynamic_offset_count));
+            } else {
+                last_bound.per_set[set_idx].dynamicOffsets.clear();
+            }
+            if (!descriptor_set->IsPushDescriptor()) {
+                // Can't cache validation of push_descriptors
+                cb_state->validated_descriptor_sets.insert(descriptor_set);
+            }
+        }
+    }
+}
+
+// Update the bound state for the bind point, including the effects of incompatible pipeline layouts
+void ValidationStateTracker::PreCallRecordCmdBindDescriptorSets(VkCommandBuffer commandBuffer,
+                                                                VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout,
+                                                                uint32_t firstSet, uint32_t setCount,
+                                                                const VkDescriptorSet *pDescriptorSets, uint32_t dynamicOffsetCount,
+                                                                const uint32_t *pDynamicOffsets) {
+    CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+    auto pipeline_layout = GetPipelineLayout(layout);
+
+    // Resize binding arrays
+    uint32_t last_set_index = firstSet + setCount - 1;
+    if (last_set_index >= cb_state->lastBound[pipelineBindPoint].per_set.size()) {
+        cb_state->lastBound[pipelineBindPoint].per_set.resize(last_set_index + 1);
+    }
+
+    UpdateLastBoundDescriptorSets(cb_state, pipelineBindPoint, pipeline_layout, firstSet, setCount, pDescriptorSets, nullptr,
+                                  dynamicOffsetCount, pDynamicOffsets);
+    cb_state->lastBound[pipelineBindPoint].pipeline_layout = layout;
+    ResetCommandBufferPushConstantDataIfIncompatible(cb_state, layout);
+}
+
+void ValidationStateTracker::RecordCmdPushDescriptorSetState(CMD_BUFFER_STATE *cb_state, VkPipelineBindPoint pipelineBindPoint,
+                                                             VkPipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount,
+                                                             const VkWriteDescriptorSet *pDescriptorWrites) {
+    const auto &pipeline_layout = GetPipelineLayout(layout);
+    // Short circuit invalid updates
+    if (!pipeline_layout || (set >= pipeline_layout->set_layouts.size()) || !pipeline_layout->set_layouts[set] ||
+        !pipeline_layout->set_layouts[set]->IsPushDescriptor())
+        return;
+
+    // We need a descriptor set to update the bindings with, compatible with the passed layout
+    const auto dsl = pipeline_layout->set_layouts[set];
+    auto &last_bound = cb_state->lastBound[pipelineBindPoint];
+    auto &push_descriptor_set = last_bound.push_descriptor_set;
+    // If we are disturbing the current push_desriptor_set clear it
+    if (!push_descriptor_set || !CompatForSet(set, last_bound, pipeline_layout->compat_for_set)) {
+        last_bound.UnbindAndResetPushDescriptorSet(new cvdescriptorset::DescriptorSet(0, nullptr, dsl, 0, this, report_data));
+    }
+
+    UpdateLastBoundDescriptorSets(cb_state, pipelineBindPoint, pipeline_layout, set, 1, nullptr, push_descriptor_set.get(), 0,
+                                  nullptr);
+    last_bound.pipeline_layout = layout;
+
+    // Now that we have either the new or extant push_descriptor set ... do the write updates against it
+    push_descriptor_set->PerformPushDescriptorsUpdate(this, descriptorWriteCount, pDescriptorWrites);
+}
+
+void ValidationStateTracker::PreCallRecordCmdPushDescriptorSetKHR(VkCommandBuffer commandBuffer,
+                                                                  VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout,
+                                                                  uint32_t set, uint32_t descriptorWriteCount,
+                                                                  const VkWriteDescriptorSet *pDescriptorWrites) {
+    CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+    RecordCmdPushDescriptorSetState(cb_state, pipelineBindPoint, layout, set, descriptorWriteCount, pDescriptorWrites);
+}
+
+void ValidationStateTracker::PostCallRecordCmdPushConstants(VkCommandBuffer commandBuffer, VkPipelineLayout layout,
+                                                            VkShaderStageFlags stageFlags, uint32_t offset, uint32_t size,
+                                                            const void *pValues) {
+    CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+    if (cb_state != nullptr) {
+        ResetCommandBufferPushConstantDataIfIncompatible(cb_state, layout);
+
+        auto &push_constant_data = cb_state->push_constant_data;
+        assert((offset + size) <= static_cast<uint32_t>(push_constant_data.size()));
+        std::memcpy(push_constant_data.data() + offset, pValues, static_cast<std::size_t>(size));
+    }
+}
+
+void ValidationStateTracker::PreCallRecordCmdBindIndexBuffer(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
+                                                             VkIndexType indexType) {
+    auto buffer_state = GetBufferState(buffer);
+    auto cb_state = GetCBState(commandBuffer);
+
+    cb_state->status |= CBSTATUS_INDEX_BUFFER_BOUND;
+    cb_state->index_buffer_binding.buffer = buffer;
+    cb_state->index_buffer_binding.size = buffer_state->createInfo.size;
+    cb_state->index_buffer_binding.offset = offset;
+    cb_state->index_buffer_binding.index_type = indexType;
+    // Add binding for this index buffer to this commandbuffer
+    AddCommandBufferBindingBuffer(cb_state, buffer_state);
+}
+
+void ValidationStateTracker::PreCallRecordCmdBindVertexBuffers(VkCommandBuffer commandBuffer, uint32_t firstBinding,
+                                                               uint32_t bindingCount, const VkBuffer *pBuffers,
+                                                               const VkDeviceSize *pOffsets) {
+    auto cb_state = GetCBState(commandBuffer);
+
+    uint32_t end = firstBinding + bindingCount;
+    if (cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings.size() < end) {
+        cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings.resize(end);
+    }
+
+    for (uint32_t i = 0; i < bindingCount; ++i) {
+        auto &vertex_buffer_binding = cb_state->current_vertex_buffer_binding_info.vertex_buffer_bindings[i + firstBinding];
+        vertex_buffer_binding.buffer = pBuffers[i];
+        vertex_buffer_binding.offset = pOffsets[i];
+        // Add binding for this vertex buffer to this commandbuffer
+        AddCommandBufferBindingBuffer(cb_state, GetBufferState(pBuffers[i]));
+    }
+}
+
+void ValidationStateTracker::PostCallRecordCmdUpdateBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer,
+                                                           VkDeviceSize dstOffset, VkDeviceSize dataSize, const void *pData) {
+    auto cb_state = GetCBState(commandBuffer);
+    auto dst_buffer_state = GetBufferState(dstBuffer);
+
+    // Update bindings between buffer and cmd buffer
+    AddCommandBufferBindingBuffer(cb_state, dst_buffer_state);
+}
+
+bool ValidationStateTracker::SetEventStageMask(VkEvent event, VkPipelineStageFlags stageMask,
+                                               EventToStageMap *localEventToStageMap) {
+    (*localEventToStageMap)[event] = stageMask;
+    return false;
+}
+
+void ValidationStateTracker::PreCallRecordCmdSetEvent(VkCommandBuffer commandBuffer, VkEvent event,
+                                                      VkPipelineStageFlags stageMask) {
+    CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+    auto event_state = GetEventState(event);
+    if (event_state) {
+        AddCommandBufferBinding(event_state->cb_bindings, VulkanTypedHandle(event, kVulkanObjectTypeEvent, event_state), cb_state);
+    }
+    cb_state->events.push_back(event);
+    if (!cb_state->waitedEvents.count(event)) {
+        cb_state->writeEventsBeforeWait.push_back(event);
+    }
+    cb_state->eventUpdates.emplace_back(
+        [event, stageMask](const ValidationStateTracker *device_data, bool do_validate, EventToStageMap *localEventToStageMap) {
+            return SetEventStageMask(event, stageMask, localEventToStageMap);
+        });
+}
+
+void ValidationStateTracker::PreCallRecordCmdResetEvent(VkCommandBuffer commandBuffer, VkEvent event,
+                                                        VkPipelineStageFlags stageMask) {
+    CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+    auto event_state = GetEventState(event);
+    if (event_state) {
+        AddCommandBufferBinding(event_state->cb_bindings, VulkanTypedHandle(event, kVulkanObjectTypeEvent, event_state), cb_state);
+    }
+    cb_state->events.push_back(event);
+    if (!cb_state->waitedEvents.count(event)) {
+        cb_state->writeEventsBeforeWait.push_back(event);
+    }
+
+    cb_state->eventUpdates.emplace_back(
+        [event](const ValidationStateTracker *, bool do_validate, EventToStageMap *localEventToStageMap) {
+            return SetEventStageMask(event, VkPipelineStageFlags(0), localEventToStageMap);
+        });
+}
+
+void ValidationStateTracker::PreCallRecordCmdWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent *pEvents,
+                                                        VkPipelineStageFlags sourceStageMask, VkPipelineStageFlags dstStageMask,
+                                                        uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
+                                                        uint32_t bufferMemoryBarrierCount,
+                                                        const VkBufferMemoryBarrier *pBufferMemoryBarriers,
+                                                        uint32_t imageMemoryBarrierCount,
+                                                        const VkImageMemoryBarrier *pImageMemoryBarriers) {
+    CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+    for (uint32_t i = 0; i < eventCount; ++i) {
+        auto event_state = GetEventState(pEvents[i]);
+        if (event_state) {
+            AddCommandBufferBinding(event_state->cb_bindings, VulkanTypedHandle(pEvents[i], kVulkanObjectTypeEvent, event_state),
+                                    cb_state);
+        }
+        cb_state->waitedEvents.insert(pEvents[i]);
+        cb_state->events.push_back(pEvents[i]);
+    }
+}
+
+bool ValidationStateTracker::SetQueryState(QueryObject object, QueryState value, QueryMap *localQueryToStateMap) {
+    (*localQueryToStateMap)[object] = value;
+    return false;
+}
+
+bool ValidationStateTracker::SetQueryStateMulti(VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, QueryState value,
+                                                QueryMap *localQueryToStateMap) {
+    for (uint32_t i = 0; i < queryCount; i++) {
+        QueryObject object = {queryPool, firstQuery + i};
+        (*localQueryToStateMap)[object] = value;
+    }
+    return false;
+}
+
+QueryState ValidationStateTracker::GetQueryState(const QueryMap *localQueryToStateMap, VkQueryPool queryPool,
+                                                 uint32_t queryIndex) const {
+    QueryObject query = {queryPool, queryIndex};
+
+    const std::array<const decltype(queryToStateMap) *, 2> map_list = {localQueryToStateMap, &queryToStateMap};
+
+    for (const auto map : map_list) {
+        auto query_data = map->find(query);
+        if (query_data != map->end()) {
+            return query_data->second;
+        }
+    }
+    return QUERYSTATE_UNKNOWN;
+}
+
+void ValidationStateTracker::RecordCmdBeginQuery(CMD_BUFFER_STATE *cb_state, const QueryObject &query_obj) {
+    if (disabled.query_validation) return;
+    cb_state->activeQueries.insert(query_obj);
+    cb_state->startedQueries.insert(query_obj);
+    cb_state->queryUpdates.emplace_back(
+        [query_obj](const ValidationStateTracker *device_data, bool do_validate, QueryMap *localQueryToStateMap) {
+            SetQueryState(query_obj, QUERYSTATE_RUNNING, localQueryToStateMap);
+            return false;
+        });
+    auto pool_state = GetQueryPoolState(query_obj.pool);
+    AddCommandBufferBinding(pool_state->cb_bindings, VulkanTypedHandle(query_obj.pool, kVulkanObjectTypeQueryPool, pool_state),
+                            cb_state);
+}
+
+void ValidationStateTracker::PostCallRecordCmdBeginQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t slot,
+                                                         VkFlags flags) {
+    if (disabled.query_validation) return;
+    QueryObject query = {queryPool, slot};
+    CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+    RecordCmdBeginQuery(cb_state, query);
+}
+
+void ValidationStateTracker::RecordCmdEndQuery(CMD_BUFFER_STATE *cb_state, const QueryObject &query_obj) {
+    if (disabled.query_validation) return;
+    cb_state->activeQueries.erase(query_obj);
+    cb_state->queryUpdates.emplace_back(
+        [query_obj](const ValidationStateTracker *device_data, bool do_validate, QueryMap *localQueryToStateMap) {
+            return SetQueryState(query_obj, QUERYSTATE_ENDED, localQueryToStateMap);
+        });
+    auto pool_state = GetQueryPoolState(query_obj.pool);
+    AddCommandBufferBinding(pool_state->cb_bindings, VulkanTypedHandle(query_obj.pool, kVulkanObjectTypeQueryPool, pool_state),
+                            cb_state);
+}
+
+void ValidationStateTracker::PostCallRecordCmdEndQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t slot) {
+    if (disabled.query_validation) return;
+    QueryObject query_obj = {queryPool, slot};
+    CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+    RecordCmdEndQuery(cb_state, query_obj);
+}
+
+void ValidationStateTracker::PostCallRecordCmdResetQueryPool(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
+                                                             uint32_t firstQuery, uint32_t queryCount) {
+    if (disabled.query_validation) return;
+    CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+
+    cb_state->queryUpdates.emplace_back([queryPool, firstQuery, queryCount](const ValidationStateTracker *device_data,
+                                                                            bool do_validate, QueryMap *localQueryToStateMap) {
+        return SetQueryStateMulti(queryPool, firstQuery, queryCount, QUERYSTATE_RESET, localQueryToStateMap);
+    });
+    auto pool_state = GetQueryPoolState(queryPool);
+    AddCommandBufferBinding(pool_state->cb_bindings, VulkanTypedHandle(queryPool, kVulkanObjectTypeQueryPool, pool_state),
+                            cb_state);
+}
+
+void ValidationStateTracker::PostCallRecordCmdCopyQueryPoolResults(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
+                                                                   uint32_t firstQuery, uint32_t queryCount, VkBuffer dstBuffer,
+                                                                   VkDeviceSize dstOffset, VkDeviceSize stride,
+                                                                   VkQueryResultFlags flags) {
+    if (disabled.query_validation) return;
+    auto cb_state = GetCBState(commandBuffer);
+    auto dst_buff_state = GetBufferState(dstBuffer);
+    AddCommandBufferBindingBuffer(cb_state, dst_buff_state);
+    auto pool_state = GetQueryPoolState(queryPool);
+    AddCommandBufferBinding(pool_state->cb_bindings, VulkanTypedHandle(queryPool, kVulkanObjectTypeQueryPool, pool_state),
+                            cb_state);
+}
+
+void ValidationStateTracker::PostCallRecordCmdWriteTimestamp(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage,
+                                                             VkQueryPool queryPool, uint32_t slot) {
+    if (disabled.query_validation) return;
+    CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+    auto pool_state = GetQueryPoolState(queryPool);
+    AddCommandBufferBinding(pool_state->cb_bindings, VulkanTypedHandle(queryPool, kVulkanObjectTypeQueryPool, pool_state),
+                            cb_state);
+    QueryObject query = {queryPool, slot};
+    cb_state->queryUpdates.emplace_back(
+        [query](const ValidationStateTracker *device_data, bool do_validate, QueryMap *localQueryToStateMap) {
+            return SetQueryState(query, QUERYSTATE_ENDED, localQueryToStateMap);
+        });
+}
+
+void ValidationStateTracker::PostCallRecordCreateFramebuffer(VkDevice device, const VkFramebufferCreateInfo *pCreateInfo,
+                                                             const VkAllocationCallbacks *pAllocator, VkFramebuffer *pFramebuffer,
+                                                             VkResult result) {
+    if (VK_SUCCESS != result) return;
+    // Shadow create info and store in map
+    auto fb_state = std::make_shared<FRAMEBUFFER_STATE>(*pFramebuffer, pCreateInfo, GetRenderPassShared(pCreateInfo->renderPass));
+
+    if ((pCreateInfo->flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR) == 0) {
+        for (uint32_t i = 0; i < pCreateInfo->attachmentCount; ++i) {
+            VkImageView view = pCreateInfo->pAttachments[i];
+            auto view_state = GetImageViewState(view);
+            if (!view_state) {
+                continue;
+            }
+        }
+    }
+    frameBufferMap[*pFramebuffer] = std::move(fb_state);
+}
+
+void ValidationStateTracker::RecordRenderPassDAG(RenderPassCreateVersion rp_version, const VkRenderPassCreateInfo2KHR *pCreateInfo,
+                                                 RENDER_PASS_STATE *render_pass) {
+    auto &subpass_to_node = render_pass->subpassToNode;
+    subpass_to_node.resize(pCreateInfo->subpassCount);
+    auto &self_dependencies = render_pass->self_dependencies;
+    self_dependencies.resize(pCreateInfo->subpassCount);
+
+    for (uint32_t i = 0; i < pCreateInfo->subpassCount; ++i) {
+        subpass_to_node[i].pass = i;
+        self_dependencies[i].clear();
+    }
+    for (uint32_t i = 0; i < pCreateInfo->dependencyCount; ++i) {
+        const VkSubpassDependency2KHR &dependency = pCreateInfo->pDependencies[i];
+        if ((dependency.srcSubpass != VK_SUBPASS_EXTERNAL) && (dependency.dstSubpass != VK_SUBPASS_EXTERNAL)) {
+            if (dependency.srcSubpass == dependency.dstSubpass) {
+                self_dependencies[dependency.srcSubpass].push_back(i);
+            } else {
+                subpass_to_node[dependency.dstSubpass].prev.push_back(dependency.srcSubpass);
+                subpass_to_node[dependency.srcSubpass].next.push_back(dependency.dstSubpass);
+            }
+        }
+    }
+}
+
+static void MarkAttachmentFirstUse(RENDER_PASS_STATE *render_pass, uint32_t index, bool is_read) {
+    if (index == VK_ATTACHMENT_UNUSED) return;
+
+    if (!render_pass->attachment_first_read.count(index)) render_pass->attachment_first_read[index] = is_read;
+}
+
+void ValidationStateTracker::RecordCreateRenderPassState(RenderPassCreateVersion rp_version,
+                                                         std::shared_ptr<RENDER_PASS_STATE> &render_pass,
+                                                         VkRenderPass *pRenderPass) {
+    render_pass->renderPass = *pRenderPass;
+    auto create_info = render_pass->createInfo.ptr();
+
+    RecordRenderPassDAG(RENDER_PASS_VERSION_1, create_info, render_pass.get());
+
+    for (uint32_t i = 0; i < create_info->subpassCount; ++i) {
+        const VkSubpassDescription2KHR &subpass = create_info->pSubpasses[i];
+        for (uint32_t j = 0; j < subpass.colorAttachmentCount; ++j) {
+            MarkAttachmentFirstUse(render_pass.get(), subpass.pColorAttachments[j].attachment, false);
+
+            // resolve attachments are considered to be written
+            if (subpass.pResolveAttachments) {
+                MarkAttachmentFirstUse(render_pass.get(), subpass.pResolveAttachments[j].attachment, false);
+            }
+        }
+        if (subpass.pDepthStencilAttachment) {
+            MarkAttachmentFirstUse(render_pass.get(), subpass.pDepthStencilAttachment->attachment, false);
+        }
+        for (uint32_t j = 0; j < subpass.inputAttachmentCount; ++j) {
+            MarkAttachmentFirstUse(render_pass.get(), subpass.pInputAttachments[j].attachment, true);
+        }
+    }
+
+    // Even though render_pass is an rvalue-ref parameter, still must move s.t. move assignment is invoked.
+    renderPassMap[*pRenderPass] = std::move(render_pass);
+}
+
+// Style note:
+// Use of rvalue reference exceeds reccommended usage of rvalue refs in google style guide, but intentionally forces caller to move
+// or copy.  This is clearer than passing a pointer to shared_ptr and avoids the atomic increment/decrement of shared_ptr copy
+// construction or assignment.
+void ValidationStateTracker::PostCallRecordCreateRenderPass(VkDevice device, const VkRenderPassCreateInfo *pCreateInfo,
+                                                            const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass,
+                                                            VkResult result) {
+    if (VK_SUCCESS != result) return;
+    auto render_pass_state = std::make_shared<RENDER_PASS_STATE>(pCreateInfo);
+    RecordCreateRenderPassState(RENDER_PASS_VERSION_1, render_pass_state, pRenderPass);
+}
+
+void ValidationStateTracker::PostCallRecordCreateRenderPass2KHR(VkDevice device, const VkRenderPassCreateInfo2KHR *pCreateInfo,
+                                                                const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass,
+                                                                VkResult result) {
+    if (VK_SUCCESS != result) return;
+    auto render_pass_state = std::make_shared<RENDER_PASS_STATE>(pCreateInfo);
+    RecordCreateRenderPassState(RENDER_PASS_VERSION_2, render_pass_state, pRenderPass);
+}
+
+void ValidationStateTracker::RecordCmdBeginRenderPassState(VkCommandBuffer commandBuffer,
+                                                           const VkRenderPassBeginInfo *pRenderPassBegin,
+                                                           const VkSubpassContents contents) {
+    CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+    auto render_pass_state = pRenderPassBegin ? GetRenderPassState(pRenderPassBegin->renderPass) : nullptr;
+    auto framebuffer = pRenderPassBegin ? GetFramebufferState(pRenderPassBegin->framebuffer) : nullptr;
+
+    if (render_pass_state) {
+        cb_state->activeFramebuffer = pRenderPassBegin->framebuffer;
+        cb_state->activeRenderPass = render_pass_state;
+        // This is a shallow copy as that is all that is needed for now
+        cb_state->activeRenderPassBeginInfo = *pRenderPassBegin;
+        cb_state->activeSubpass = 0;
+        cb_state->activeSubpassContents = contents;
+        cb_state->framebuffers.insert(pRenderPassBegin->framebuffer);
+        // Connect this framebuffer and its children to this cmdBuffer
+        AddFramebufferBinding(cb_state, framebuffer);
+        // Connect this RP to cmdBuffer
+        AddCommandBufferBinding(render_pass_state->cb_bindings,
+                                VulkanTypedHandle(render_pass_state->renderPass, kVulkanObjectTypeRenderPass, render_pass_state),
+                                cb_state);
+
+        auto chained_device_group_struct = lvl_find_in_chain<VkDeviceGroupRenderPassBeginInfo>(pRenderPassBegin->pNext);
+        if (chained_device_group_struct) {
+            cb_state->active_render_pass_device_mask = chained_device_group_struct->deviceMask;
+        } else {
+            cb_state->active_render_pass_device_mask = cb_state->initial_device_mask;
+        }
+    }
+}
+
+void ValidationStateTracker::PreCallRecordCmdBeginRenderPass(VkCommandBuffer commandBuffer,
+                                                             const VkRenderPassBeginInfo *pRenderPassBegin,
+                                                             VkSubpassContents contents) {
+    RecordCmdBeginRenderPassState(commandBuffer, pRenderPassBegin, contents);
+}
+
+void ValidationStateTracker::PreCallRecordCmdBeginRenderPass2KHR(VkCommandBuffer commandBuffer,
+                                                                 const VkRenderPassBeginInfo *pRenderPassBegin,
+                                                                 const VkSubpassBeginInfoKHR *pSubpassBeginInfo) {
+    RecordCmdBeginRenderPassState(commandBuffer, pRenderPassBegin, pSubpassBeginInfo->contents);
+}
+
+void ValidationStateTracker::RecordCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) {
+    CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+    cb_state->activeSubpass++;
+    cb_state->activeSubpassContents = contents;
+}
+
+void ValidationStateTracker::PostCallRecordCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) {
+    RecordCmdNextSubpass(commandBuffer, contents);
+}
+
+void ValidationStateTracker::PostCallRecordCmdNextSubpass2KHR(VkCommandBuffer commandBuffer,
+                                                              const VkSubpassBeginInfoKHR *pSubpassBeginInfo,
+                                                              const VkSubpassEndInfoKHR *pSubpassEndInfo) {
+    RecordCmdNextSubpass(commandBuffer, pSubpassBeginInfo->contents);
+}
+
+void ValidationStateTracker::RecordCmdEndRenderPassState(VkCommandBuffer commandBuffer) {
+    CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+    cb_state->activeRenderPass = nullptr;
+    cb_state->activeSubpass = 0;
+    cb_state->activeFramebuffer = VK_NULL_HANDLE;
+}
+
+void ValidationStateTracker::PostCallRecordCmdEndRenderPass(VkCommandBuffer commandBuffer) {
+    RecordCmdEndRenderPassState(commandBuffer);
+}
+
+void ValidationStateTracker::PostCallRecordCmdEndRenderPass2KHR(VkCommandBuffer commandBuffer,
+                                                                const VkSubpassEndInfoKHR *pSubpassEndInfo) {
+    RecordCmdEndRenderPassState(commandBuffer);
+}
+
+void ValidationStateTracker::PreCallRecordCmdExecuteCommands(VkCommandBuffer commandBuffer, uint32_t commandBuffersCount,
+                                                             const VkCommandBuffer *pCommandBuffers) {
+    CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+
+    CMD_BUFFER_STATE *sub_cb_state = NULL;
+    for (uint32_t i = 0; i < commandBuffersCount; i++) {
+        sub_cb_state = GetCBState(pCommandBuffers[i]);
+        assert(sub_cb_state);
+        if (!(sub_cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT)) {
+            if (cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT) {
+                // TODO: Because this is a state change, clearing the VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT needs to be moved
+                // from the validation step to the recording step
+                cb_state->beginInfo.flags &= ~VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT;
+            }
+        }
+
+        // Propagate inital layout and current layout state to the primary cmd buffer
+        // NOTE: The update/population of the image_layout_map is done in CoreChecks, but for other classes derived from
+        // ValidationStateTracker these maps will be empty, so leaving the propagation in the the state tracker should be a no-op
+        // for those other classes.
+        for (const auto &sub_layout_map_entry : sub_cb_state->image_layout_map) {
+            const auto image = sub_layout_map_entry.first;
+            const auto *image_state = GetImageState(image);
+            if (!image_state) continue;  // Can't set layouts of a dead image
+
+            auto *cb_subres_map = GetImageSubresourceLayoutMap(cb_state, *image_state);
+            const auto *sub_cb_subres_map = sub_layout_map_entry.second.get();
+            assert(cb_subres_map && sub_cb_subres_map);  // Non const get and map traversal should never be null
+            cb_subres_map->UpdateFrom(*sub_cb_subres_map);
+        }
+
+        sub_cb_state->primaryCommandBuffer = cb_state->commandBuffer;
+        cb_state->linkedCommandBuffers.insert(sub_cb_state);
+        sub_cb_state->linkedCommandBuffers.insert(cb_state);
+        for (auto &function : sub_cb_state->queryUpdates) {
+            cb_state->queryUpdates.push_back(function);
+        }
+        for (auto &function : sub_cb_state->queue_submit_functions) {
+            cb_state->queue_submit_functions.push_back(function);
+        }
+    }
+}
+
+void ValidationStateTracker::PostCallRecordMapMemory(VkDevice device, VkDeviceMemory mem, VkDeviceSize offset, VkDeviceSize size,
+                                                     VkFlags flags, void **ppData, VkResult result) {
+    if (VK_SUCCESS != result) return;
+    RecordMappedMemory(mem, offset, size, ppData);
+}
+
+void ValidationStateTracker::PreCallRecordUnmapMemory(VkDevice device, VkDeviceMemory mem) {
+    auto mem_info = GetDevMemState(mem);
+    if (mem_info) {
+        mem_info->mapped_range = MemRange();
+        mem_info->p_driver_data = nullptr;
+    }
+}
+
+void ValidationStateTracker::UpdateBindImageMemoryState(const VkBindImageMemoryInfo &bindInfo) {
+    IMAGE_STATE *image_state = GetImageState(bindInfo.image);
+    if (image_state) {
+        const auto swapchain_info = lvl_find_in_chain<VkBindImageMemorySwapchainInfoKHR>(bindInfo.pNext);
+        if (swapchain_info) {
+            auto swapchain = GetSwapchainState(swapchain_info->swapchain);
+            if (swapchain) {
+                swapchain->images[swapchain_info->imageIndex].bound_images.emplace(image_state->image);
+                image_state->bind_swapchain = swapchain_info->swapchain;
+                image_state->bind_swapchain_imageIndex = swapchain_info->imageIndex;
+            }
+        } else {
+            // Track bound memory range information
+            auto mem_info = GetDevMemState(bindInfo.memory);
+            if (mem_info) {
+                InsertImageMemoryRange(bindInfo.image, mem_info, bindInfo.memoryOffset, image_state->requirements,
+                                       image_state->createInfo.tiling == VK_IMAGE_TILING_LINEAR);
+            }
+
+            // Track objects tied to memory
+            SetMemBinding(bindInfo.memory, image_state, bindInfo.memoryOffset,
+                          VulkanTypedHandle(bindInfo.image, kVulkanObjectTypeImage));
+        }
+        if (image_state->createInfo.flags & VK_IMAGE_CREATE_ALIAS_BIT) {
+            AddAliasingImage(image_state);
+        }
+    }
+}
+
+void ValidationStateTracker::PostCallRecordBindImageMemory(VkDevice device, VkImage image, VkDeviceMemory mem,
+                                                           VkDeviceSize memoryOffset, VkResult result) {
+    if (VK_SUCCESS != result) return;
+    VkBindImageMemoryInfo bindInfo = {};
+    bindInfo.sType = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO;
+    bindInfo.image = image;
+    bindInfo.memory = mem;
+    bindInfo.memoryOffset = memoryOffset;
+    UpdateBindImageMemoryState(bindInfo);
+}
+
+void ValidationStateTracker::PostCallRecordBindImageMemory2(VkDevice device, uint32_t bindInfoCount,
+                                                            const VkBindImageMemoryInfoKHR *pBindInfos, VkResult result) {
+    if (VK_SUCCESS != result) return;
+    for (uint32_t i = 0; i < bindInfoCount; i++) {
+        UpdateBindImageMemoryState(pBindInfos[i]);
+    }
+}
+
+void ValidationStateTracker::PostCallRecordBindImageMemory2KHR(VkDevice device, uint32_t bindInfoCount,
+                                                               const VkBindImageMemoryInfoKHR *pBindInfos, VkResult result) {
+    if (VK_SUCCESS != result) return;
+    for (uint32_t i = 0; i < bindInfoCount; i++) {
+        UpdateBindImageMemoryState(pBindInfos[i]);
+    }
+}
+
+void ValidationStateTracker::PreCallRecordSetEvent(VkDevice device, VkEvent event) {
+    auto event_state = GetEventState(event);
+    if (event_state) {
+        event_state->stageMask = VK_PIPELINE_STAGE_HOST_BIT;
+    }
+}
+
+void ValidationStateTracker::PostCallRecordImportSemaphoreFdKHR(VkDevice device,
+                                                                const VkImportSemaphoreFdInfoKHR *pImportSemaphoreFdInfo,
+                                                                VkResult result) {
+    if (VK_SUCCESS != result) return;
+    RecordImportSemaphoreState(pImportSemaphoreFdInfo->semaphore, pImportSemaphoreFdInfo->handleType,
+                               pImportSemaphoreFdInfo->flags);
+}
+
+void ValidationStateTracker::RecordGetExternalSemaphoreState(VkSemaphore semaphore,
+                                                             VkExternalSemaphoreHandleTypeFlagBitsKHR handle_type) {
+    SEMAPHORE_STATE *semaphore_state = GetSemaphoreState(semaphore);
+    if (semaphore_state && handle_type != VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT_KHR) {
+        // Cannot track semaphore state once it is exported, except for Sync FD handle types which have copy transference
+        semaphore_state->scope = kSyncScopeExternalPermanent;
+    }
+}
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+void ValidationStateTracker::PostCallRecordImportSemaphoreWin32HandleKHR(
+    VkDevice device, const VkImportSemaphoreWin32HandleInfoKHR *pImportSemaphoreWin32HandleInfo, VkResult result) {
+    if (VK_SUCCESS != result) return;
+    RecordImportSemaphoreState(pImportSemaphoreWin32HandleInfo->semaphore, pImportSemaphoreWin32HandleInfo->handleType,
+                               pImportSemaphoreWin32HandleInfo->flags);
+}
+
+void ValidationStateTracker::PostCallRecordGetSemaphoreWin32HandleKHR(VkDevice device,
+                                                                      const VkSemaphoreGetWin32HandleInfoKHR *pGetWin32HandleInfo,
+                                                                      HANDLE *pHandle, VkResult result) {
+    if (VK_SUCCESS != result) return;
+    RecordGetExternalSemaphoreState(pGetWin32HandleInfo->semaphore, pGetWin32HandleInfo->handleType);
+}
+
+void ValidationStateTracker::PostCallRecordImportFenceWin32HandleKHR(
+    VkDevice device, const VkImportFenceWin32HandleInfoKHR *pImportFenceWin32HandleInfo, VkResult result) {
+    if (VK_SUCCESS != result) return;
+    RecordImportFenceState(pImportFenceWin32HandleInfo->fence, pImportFenceWin32HandleInfo->handleType,
+                           pImportFenceWin32HandleInfo->flags);
+}
+
+void ValidationStateTracker::PostCallRecordGetFenceWin32HandleKHR(VkDevice device,
+                                                                  const VkFenceGetWin32HandleInfoKHR *pGetWin32HandleInfo,
+                                                                  HANDLE *pHandle, VkResult result) {
+    if (VK_SUCCESS != result) return;
+    RecordGetExternalFenceState(pGetWin32HandleInfo->fence, pGetWin32HandleInfo->handleType);
+}
+#endif
+
+void ValidationStateTracker::PostCallRecordGetSemaphoreFdKHR(VkDevice device, const VkSemaphoreGetFdInfoKHR *pGetFdInfo, int *pFd,
+                                                             VkResult result) {
+    if (VK_SUCCESS != result) return;
+    RecordGetExternalSemaphoreState(pGetFdInfo->semaphore, pGetFdInfo->handleType);
+}
+
+void ValidationStateTracker::RecordImportFenceState(VkFence fence, VkExternalFenceHandleTypeFlagBitsKHR handle_type,
+                                                    VkFenceImportFlagsKHR flags) {
+    FENCE_STATE *fence_node = GetFenceState(fence);
+    if (fence_node && fence_node->scope != kSyncScopeExternalPermanent) {
+        if ((handle_type == VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT_KHR || flags & VK_FENCE_IMPORT_TEMPORARY_BIT_KHR) &&
+            fence_node->scope == kSyncScopeInternal) {
+            fence_node->scope = kSyncScopeExternalTemporary;
+        } else {
+            fence_node->scope = kSyncScopeExternalPermanent;
+        }
+    }
+}
+
+void ValidationStateTracker::PostCallRecordImportFenceFdKHR(VkDevice device, const VkImportFenceFdInfoKHR *pImportFenceFdInfo,
+                                                            VkResult result) {
+    if (VK_SUCCESS != result) return;
+    RecordImportFenceState(pImportFenceFdInfo->fence, pImportFenceFdInfo->handleType, pImportFenceFdInfo->flags);
+}
+
+void ValidationStateTracker::RecordGetExternalFenceState(VkFence fence, VkExternalFenceHandleTypeFlagBitsKHR handle_type) {
+    FENCE_STATE *fence_state = GetFenceState(fence);
+    if (fence_state) {
+        if (handle_type != VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT_KHR) {
+            // Export with reference transference becomes external
+            fence_state->scope = kSyncScopeExternalPermanent;
+        } else if (fence_state->scope == kSyncScopeInternal) {
+            // Export with copy transference has a side effect of resetting the fence
+            fence_state->state = FENCE_UNSIGNALED;
+        }
+    }
+}
+
+void ValidationStateTracker::PostCallRecordGetFenceFdKHR(VkDevice device, const VkFenceGetFdInfoKHR *pGetFdInfo, int *pFd,
+                                                         VkResult result) {
+    if (VK_SUCCESS != result) return;
+    RecordGetExternalFenceState(pGetFdInfo->fence, pGetFdInfo->handleType);
+}
+
+void ValidationStateTracker::PostCallRecordCreateEvent(VkDevice device, const VkEventCreateInfo *pCreateInfo,
+                                                       const VkAllocationCallbacks *pAllocator, VkEvent *pEvent, VkResult result) {
+    if (VK_SUCCESS != result) return;
+    eventMap[*pEvent].write_in_use = 0;
+    eventMap[*pEvent].stageMask = VkPipelineStageFlags(0);
+}
+
+void ValidationStateTracker::RecordCreateSwapchainState(VkResult result, const VkSwapchainCreateInfoKHR *pCreateInfo,
+                                                        VkSwapchainKHR *pSwapchain, SURFACE_STATE *surface_state,
+                                                        SWAPCHAIN_NODE *old_swapchain_state) {
+    if (VK_SUCCESS == result) {
+        auto swapchain_state = std::make_shared<SWAPCHAIN_NODE>(pCreateInfo, *pSwapchain);
+        if (VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR == pCreateInfo->presentMode ||
+            VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR == pCreateInfo->presentMode) {
+            swapchain_state->shared_presentable = true;
+        }
+        surface_state->swapchain = swapchain_state.get();
+        swapchainMap[*pSwapchain] = std::move(swapchain_state);
+    } else {
+        surface_state->swapchain = nullptr;
+    }
+    // Spec requires that even if CreateSwapchainKHR fails, oldSwapchain is retired
+    if (old_swapchain_state) {
+        old_swapchain_state->retired = true;
+    }
+    return;
+}
+
+void ValidationStateTracker::PostCallRecordCreateSwapchainKHR(VkDevice device, const VkSwapchainCreateInfoKHR *pCreateInfo,
+                                                              const VkAllocationCallbacks *pAllocator, VkSwapchainKHR *pSwapchain,
+                                                              VkResult result) {
+    auto surface_state = GetSurfaceState(pCreateInfo->surface);
+    auto old_swapchain_state = GetSwapchainState(pCreateInfo->oldSwapchain);
+    RecordCreateSwapchainState(result, pCreateInfo, pSwapchain, surface_state, old_swapchain_state);
+}
+
+void ValidationStateTracker::PreCallRecordDestroySwapchainKHR(VkDevice device, VkSwapchainKHR swapchain,
+                                                              const VkAllocationCallbacks *pAllocator) {
+    if (!swapchain) return;
+    auto swapchain_data = GetSwapchainState(swapchain);
+    if (swapchain_data) {
+        for (const auto &swapchain_image : swapchain_data->images) {
+            ClearMemoryObjectBindings(VulkanTypedHandle(swapchain_image.image, kVulkanObjectTypeImage));
+            imageMap.erase(swapchain_image.image);
+            RemoveAliasingImages(swapchain_image.bound_images);
+        }
+
+        auto surface_state = GetSurfaceState(swapchain_data->createInfo.surface);
+        if (surface_state) {
+            if (surface_state->swapchain == swapchain_data) surface_state->swapchain = nullptr;
+        }
+        swapchain_data->destroyed = true;
+        swapchainMap.erase(swapchain);
+    }
+}
+
+void ValidationStateTracker::PostCallRecordQueuePresentKHR(VkQueue queue, const VkPresentInfoKHR *pPresentInfo, VkResult result) {
+    // Semaphore waits occur before error generation, if the call reached the ICD. (Confirm?)
+    for (uint32_t i = 0; i < pPresentInfo->waitSemaphoreCount; ++i) {
+        auto pSemaphore = GetSemaphoreState(pPresentInfo->pWaitSemaphores[i]);
+        if (pSemaphore) {
+            pSemaphore->signaler.first = VK_NULL_HANDLE;
+            pSemaphore->signaled = false;
+        }
+    }
+
+    for (uint32_t i = 0; i < pPresentInfo->swapchainCount; ++i) {
+        // Note: this is imperfect, in that we can get confused about what did or didn't succeed-- but if the app does that, it's
+        // confused itself just as much.
+        auto local_result = pPresentInfo->pResults ? pPresentInfo->pResults[i] : result;
+        if (local_result != VK_SUCCESS && local_result != VK_SUBOPTIMAL_KHR) continue;  // this present didn't actually happen.
+        // Mark the image as having been released to the WSI
+        auto swapchain_data = GetSwapchainState(pPresentInfo->pSwapchains[i]);
+        if (swapchain_data && (swapchain_data->images.size() > pPresentInfo->pImageIndices[i])) {
+            auto image = swapchain_data->images[pPresentInfo->pImageIndices[i]].image;
+            auto image_state = GetImageState(image);
+            if (image_state) {
+                image_state->acquired = false;
+                if (image_state->shared_presentable) {
+                    image_state->layout_locked = true;
+                }
+            }
+        }
+    }
+    // Note: even though presentation is directed to a queue, there is no direct ordering between QP and subsequent work, so QP (and
+    // its semaphore waits) /never/ participate in any completion proof.
+}
+
+void ValidationStateTracker::PostCallRecordCreateSharedSwapchainsKHR(VkDevice device, uint32_t swapchainCount,
+                                                                     const VkSwapchainCreateInfoKHR *pCreateInfos,
+                                                                     const VkAllocationCallbacks *pAllocator,
+                                                                     VkSwapchainKHR *pSwapchains, VkResult result) {
+    if (pCreateInfos) {
+        for (uint32_t i = 0; i < swapchainCount; i++) {
+            auto surface_state = GetSurfaceState(pCreateInfos[i].surface);
+            auto old_swapchain_state = GetSwapchainState(pCreateInfos[i].oldSwapchain);
+            RecordCreateSwapchainState(result, &pCreateInfos[i], &pSwapchains[i], surface_state, old_swapchain_state);
+        }
+    }
+}
+
+void ValidationStateTracker::RecordAcquireNextImageState(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout,
+                                                         VkSemaphore semaphore, VkFence fence, uint32_t *pImageIndex) {
+    auto pFence = GetFenceState(fence);
+    if (pFence && pFence->scope == kSyncScopeInternal) {
+        // Treat as inflight since it is valid to wait on this fence, even in cases where it is technically a temporary
+        // import
+        pFence->state = FENCE_INFLIGHT;
+        pFence->signaler.first = VK_NULL_HANDLE;  // ANI isn't on a queue, so this can't participate in a completion proof.
+    }
+
+    auto pSemaphore = GetSemaphoreState(semaphore);
+    if (pSemaphore && pSemaphore->scope == kSyncScopeInternal) {
+        // Treat as signaled since it is valid to wait on this semaphore, even in cases where it is technically a
+        // temporary import
+        pSemaphore->signaled = true;
+        pSemaphore->signaler.first = VK_NULL_HANDLE;
+    }
+
+    // Mark the image as acquired.
+    auto swapchain_data = GetSwapchainState(swapchain);
+    if (swapchain_data && (swapchain_data->images.size() > *pImageIndex)) {
+        auto image = swapchain_data->images[*pImageIndex].image;
+        auto image_state = GetImageState(image);
+        if (image_state) {
+            image_state->acquired = true;
+            image_state->shared_presentable = swapchain_data->shared_presentable;
+        }
+    }
+}
+
+void ValidationStateTracker::PostCallRecordAcquireNextImageKHR(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout,
+                                                               VkSemaphore semaphore, VkFence fence, uint32_t *pImageIndex,
+                                                               VkResult result) {
+    if ((VK_SUCCESS != result) && (VK_SUBOPTIMAL_KHR != result)) return;
+    RecordAcquireNextImageState(device, swapchain, timeout, semaphore, fence, pImageIndex);
+}
+
+void ValidationStateTracker::PostCallRecordAcquireNextImage2KHR(VkDevice device, const VkAcquireNextImageInfoKHR *pAcquireInfo,
+                                                                uint32_t *pImageIndex, VkResult result) {
+    if ((VK_SUCCESS != result) && (VK_SUBOPTIMAL_KHR != result)) return;
+    RecordAcquireNextImageState(device, pAcquireInfo->swapchain, pAcquireInfo->timeout, pAcquireInfo->semaphore,
+                                pAcquireInfo->fence, pImageIndex);
+}
+
+void ValidationStateTracker::PostCallRecordEnumeratePhysicalDevices(VkInstance instance, uint32_t *pPhysicalDeviceCount,
+                                                                    VkPhysicalDevice *pPhysicalDevices, VkResult result) {
+    if ((NULL != pPhysicalDevices) && ((result == VK_SUCCESS || result == VK_INCOMPLETE))) {
+        for (uint32_t i = 0; i < *pPhysicalDeviceCount; i++) {
+            auto &phys_device_state = physical_device_map[pPhysicalDevices[i]];
+            phys_device_state.phys_device = pPhysicalDevices[i];
+            // Init actual features for each physical device
+            DispatchGetPhysicalDeviceFeatures(pPhysicalDevices[i], &phys_device_state.features2.features);
+        }
+    }
+}
+
+// Common function to update state for GetPhysicalDeviceQueueFamilyProperties & 2KHR version
+static void StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(PHYSICAL_DEVICE_STATE *pd_state, uint32_t count,
+                                                                    VkQueueFamilyProperties2KHR *pQueueFamilyProperties) {
+    pd_state->queue_family_known_count = std::max(pd_state->queue_family_known_count, count);
+
+    if (!pQueueFamilyProperties) {
+        if (UNCALLED == pd_state->vkGetPhysicalDeviceQueueFamilyPropertiesState)
+            pd_state->vkGetPhysicalDeviceQueueFamilyPropertiesState = QUERY_COUNT;
+    } else {  // Save queue family properties
+        pd_state->vkGetPhysicalDeviceQueueFamilyPropertiesState = QUERY_DETAILS;
+
+        pd_state->queue_family_properties.resize(std::max(static_cast<uint32_t>(pd_state->queue_family_properties.size()), count));
+        for (uint32_t i = 0; i < count; ++i) {
+            pd_state->queue_family_properties[i] = pQueueFamilyProperties[i].queueFamilyProperties;
+        }
+    }
+}
+
+void ValidationStateTracker::PostCallRecordGetPhysicalDeviceQueueFamilyProperties(VkPhysicalDevice physicalDevice,
+                                                                                  uint32_t *pQueueFamilyPropertyCount,
+                                                                                  VkQueueFamilyProperties *pQueueFamilyProperties) {
+    auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
+    assert(physical_device_state);
+    VkQueueFamilyProperties2KHR *pqfp = nullptr;
+    std::vector<VkQueueFamilyProperties2KHR> qfp;
+    qfp.resize(*pQueueFamilyPropertyCount);
+    if (pQueueFamilyProperties) {
+        for (uint32_t i = 0; i < *pQueueFamilyPropertyCount; ++i) {
+            qfp[i].sType = VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2_KHR;
+            qfp[i].pNext = nullptr;
+            qfp[i].queueFamilyProperties = pQueueFamilyProperties[i];
+        }
+        pqfp = qfp.data();
+    }
+    StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(physical_device_state, *pQueueFamilyPropertyCount, pqfp);
+}
+
+void ValidationStateTracker::PostCallRecordGetPhysicalDeviceQueueFamilyProperties2(
+    VkPhysicalDevice physicalDevice, uint32_t *pQueueFamilyPropertyCount, VkQueueFamilyProperties2KHR *pQueueFamilyProperties) {
+    auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
+    assert(physical_device_state);
+    StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(physical_device_state, *pQueueFamilyPropertyCount,
+                                                            pQueueFamilyProperties);
+}
+
+void ValidationStateTracker::PostCallRecordGetPhysicalDeviceQueueFamilyProperties2KHR(
+    VkPhysicalDevice physicalDevice, uint32_t *pQueueFamilyPropertyCount, VkQueueFamilyProperties2KHR *pQueueFamilyProperties) {
+    auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
+    assert(physical_device_state);
+    StateUpdateCommonGetPhysicalDeviceQueueFamilyProperties(physical_device_state, *pQueueFamilyPropertyCount,
+                                                            pQueueFamilyProperties);
+}
+void ValidationStateTracker::PreCallRecordDestroySurfaceKHR(VkInstance instance, VkSurfaceKHR surface,
+                                                            const VkAllocationCallbacks *pAllocator) {
+    if (!surface) return;
+    auto surface_state = GetSurfaceState(surface);
+    surface_state->destroyed = true;
+    surface_map.erase(surface);
+}
+
+void ValidationStateTracker::RecordVulkanSurface(VkSurfaceKHR *pSurface) {
+    surface_map[*pSurface] = std::make_shared<SURFACE_STATE>(*pSurface);
+}
+
+void ValidationStateTracker::PostCallRecordCreateDisplayPlaneSurfaceKHR(VkInstance instance,
+                                                                        const VkDisplaySurfaceCreateInfoKHR *pCreateInfo,
+                                                                        const VkAllocationCallbacks *pAllocator,
+                                                                        VkSurfaceKHR *pSurface, VkResult result) {
+    if (VK_SUCCESS != result) return;
+    RecordVulkanSurface(pSurface);
+}
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+void ValidationStateTracker::PostCallRecordCreateAndroidSurfaceKHR(VkInstance instance,
+                                                                   const VkAndroidSurfaceCreateInfoKHR *pCreateInfo,
+                                                                   const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
+                                                                   VkResult result) {
+    if (VK_SUCCESS != result) return;
+    RecordVulkanSurface(pSurface);
+}
+#endif  // VK_USE_PLATFORM_ANDROID_KHR
+
+#ifdef VK_USE_PLATFORM_IOS_MVK
+void ValidationStateTracker::PostCallRecordCreateIOSSurfaceMVK(VkInstance instance, const VkIOSSurfaceCreateInfoMVK *pCreateInfo,
+                                                               const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
+                                                               VkResult result) {
+    if (VK_SUCCESS != result) return;
+    RecordVulkanSurface(pSurface);
+}
+#endif  // VK_USE_PLATFORM_IOS_MVK
+
+#ifdef VK_USE_PLATFORM_MACOS_MVK
+void ValidationStateTracker::PostCallRecordCreateMacOSSurfaceMVK(VkInstance instance,
+                                                                 const VkMacOSSurfaceCreateInfoMVK *pCreateInfo,
+                                                                 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
+                                                                 VkResult result) {
+    if (VK_SUCCESS != result) return;
+    RecordVulkanSurface(pSurface);
+}
+#endif  // VK_USE_PLATFORM_MACOS_MVK
+
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+void ValidationStateTracker::PostCallRecordCreateWaylandSurfaceKHR(VkInstance instance,
+                                                                   const VkWaylandSurfaceCreateInfoKHR *pCreateInfo,
+                                                                   const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
+                                                                   VkResult result) {
+    if (VK_SUCCESS != result) return;
+    RecordVulkanSurface(pSurface);
+}
+#endif  // VK_USE_PLATFORM_WAYLAND_KHR
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+void ValidationStateTracker::PostCallRecordCreateWin32SurfaceKHR(VkInstance instance,
+                                                                 const VkWin32SurfaceCreateInfoKHR *pCreateInfo,
+                                                                 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
+                                                                 VkResult result) {
+    if (VK_SUCCESS != result) return;
+    RecordVulkanSurface(pSurface);
+}
+#endif  // VK_USE_PLATFORM_WIN32_KHR
+
+#ifdef VK_USE_PLATFORM_XCB_KHR
+void ValidationStateTracker::PostCallRecordCreateXcbSurfaceKHR(VkInstance instance, const VkXcbSurfaceCreateInfoKHR *pCreateInfo,
+                                                               const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
+                                                               VkResult result) {
+    if (VK_SUCCESS != result) return;
+    RecordVulkanSurface(pSurface);
+}
+#endif  // VK_USE_PLATFORM_XCB_KHR
+
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+void ValidationStateTracker::PostCallRecordCreateXlibSurfaceKHR(VkInstance instance, const VkXlibSurfaceCreateInfoKHR *pCreateInfo,
+                                                                const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface,
+                                                                VkResult result) {
+    if (VK_SUCCESS != result) return;
+    RecordVulkanSurface(pSurface);
+}
+#endif  // VK_USE_PLATFORM_XLIB_KHR
+
+void ValidationStateTracker::PostCallRecordGetPhysicalDeviceFeatures(VkPhysicalDevice physicalDevice,
+                                                                     VkPhysicalDeviceFeatures *pFeatures) {
+    auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
+    physical_device_state->vkGetPhysicalDeviceFeaturesState = QUERY_DETAILS;
+    physical_device_state->features2.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2;
+    physical_device_state->features2.pNext = nullptr;
+    physical_device_state->features2.features = *pFeatures;
+}
+
+void ValidationStateTracker::PostCallRecordGetPhysicalDeviceFeatures2(VkPhysicalDevice physicalDevice,
+                                                                      VkPhysicalDeviceFeatures2 *pFeatures) {
+    auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
+    physical_device_state->vkGetPhysicalDeviceFeaturesState = QUERY_DETAILS;
+    physical_device_state->features2.initialize(pFeatures);
+}
+
+void ValidationStateTracker::PostCallRecordGetPhysicalDeviceFeatures2KHR(VkPhysicalDevice physicalDevice,
+                                                                         VkPhysicalDeviceFeatures2 *pFeatures) {
+    auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
+    physical_device_state->vkGetPhysicalDeviceFeaturesState = QUERY_DETAILS;
+    physical_device_state->features2.initialize(pFeatures);
+}
+
+void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceCapabilitiesKHR(VkPhysicalDevice physicalDevice,
+                                                                                   VkSurfaceKHR surface,
+                                                                                   VkSurfaceCapabilitiesKHR *pSurfaceCapabilities,
+                                                                                   VkResult result) {
+    if (VK_SUCCESS != result) return;
+    auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
+    physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHRState = QUERY_DETAILS;
+    physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHR_called = true;
+    physical_device_state->surfaceCapabilities = *pSurfaceCapabilities;
+}
+
+void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceCapabilities2KHR(
+    VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR *pSurfaceInfo,
+    VkSurfaceCapabilities2KHR *pSurfaceCapabilities, VkResult result) {
+    if (VK_SUCCESS != result) return;
+    auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
+    physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHRState = QUERY_DETAILS;
+    physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHR_called = true;
+    physical_device_state->surfaceCapabilities = pSurfaceCapabilities->surfaceCapabilities;
+}
+
+void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceCapabilities2EXT(VkPhysicalDevice physicalDevice,
+                                                                                    VkSurfaceKHR surface,
+                                                                                    VkSurfaceCapabilities2EXT *pSurfaceCapabilities,
+                                                                                    VkResult result) {
+    auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
+    physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHRState = QUERY_DETAILS;
+    physical_device_state->vkGetPhysicalDeviceSurfaceCapabilitiesKHR_called = true;
+    physical_device_state->surfaceCapabilities.minImageCount = pSurfaceCapabilities->minImageCount;
+    physical_device_state->surfaceCapabilities.maxImageCount = pSurfaceCapabilities->maxImageCount;
+    physical_device_state->surfaceCapabilities.currentExtent = pSurfaceCapabilities->currentExtent;
+    physical_device_state->surfaceCapabilities.minImageExtent = pSurfaceCapabilities->minImageExtent;
+    physical_device_state->surfaceCapabilities.maxImageExtent = pSurfaceCapabilities->maxImageExtent;
+    physical_device_state->surfaceCapabilities.maxImageArrayLayers = pSurfaceCapabilities->maxImageArrayLayers;
+    physical_device_state->surfaceCapabilities.supportedTransforms = pSurfaceCapabilities->supportedTransforms;
+    physical_device_state->surfaceCapabilities.currentTransform = pSurfaceCapabilities->currentTransform;
+    physical_device_state->surfaceCapabilities.supportedCompositeAlpha = pSurfaceCapabilities->supportedCompositeAlpha;
+    physical_device_state->surfaceCapabilities.supportedUsageFlags = pSurfaceCapabilities->supportedUsageFlags;
+}
+
+void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceSupportKHR(VkPhysicalDevice physicalDevice,
+                                                                              uint32_t queueFamilyIndex, VkSurfaceKHR surface,
+                                                                              VkBool32 *pSupported, VkResult result) {
+    if (VK_SUCCESS != result) return;
+    auto surface_state = GetSurfaceState(surface);
+    surface_state->gpu_queue_support[{physicalDevice, queueFamilyIndex}] = (*pSupported == VK_TRUE);
+}
+
+void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfacePresentModesKHR(VkPhysicalDevice physicalDevice,
+                                                                                   VkSurfaceKHR surface,
+                                                                                   uint32_t *pPresentModeCount,
+                                                                                   VkPresentModeKHR *pPresentModes,
+                                                                                   VkResult result) {
+    if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
+
+    // TODO: This isn't quite right -- available modes may differ by surface AND physical device.
+    auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
+    auto &call_state = physical_device_state->vkGetPhysicalDeviceSurfacePresentModesKHRState;
+
+    if (*pPresentModeCount) {
+        if (call_state < QUERY_COUNT) call_state = QUERY_COUNT;
+        if (*pPresentModeCount > physical_device_state->present_modes.size())
+            physical_device_state->present_modes.resize(*pPresentModeCount);
+    }
+    if (pPresentModes) {
+        if (call_state < QUERY_DETAILS) call_state = QUERY_DETAILS;
+        for (uint32_t i = 0; i < *pPresentModeCount; i++) {
+            physical_device_state->present_modes[i] = pPresentModes[i];
+        }
+    }
+}
+
+void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceFormatsKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface,
+                                                                              uint32_t *pSurfaceFormatCount,
+                                                                              VkSurfaceFormatKHR *pSurfaceFormats,
+                                                                              VkResult result) {
+    if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
+
+    auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
+    auto &call_state = physical_device_state->vkGetPhysicalDeviceSurfaceFormatsKHRState;
+
+    if (*pSurfaceFormatCount) {
+        if (call_state < QUERY_COUNT) call_state = QUERY_COUNT;
+        if (*pSurfaceFormatCount > physical_device_state->surface_formats.size())
+            physical_device_state->surface_formats.resize(*pSurfaceFormatCount);
+    }
+    if (pSurfaceFormats) {
+        if (call_state < QUERY_DETAILS) call_state = QUERY_DETAILS;
+        for (uint32_t i = 0; i < *pSurfaceFormatCount; i++) {
+            physical_device_state->surface_formats[i] = pSurfaceFormats[i];
+        }
+    }
+}
+
+void ValidationStateTracker::PostCallRecordGetPhysicalDeviceSurfaceFormats2KHR(VkPhysicalDevice physicalDevice,
+                                                                               const VkPhysicalDeviceSurfaceInfo2KHR *pSurfaceInfo,
+                                                                               uint32_t *pSurfaceFormatCount,
+                                                                               VkSurfaceFormat2KHR *pSurfaceFormats,
+                                                                               VkResult result) {
+    if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
+
+    auto physicalDeviceState = GetPhysicalDeviceState(physicalDevice);
+    if (*pSurfaceFormatCount) {
+        if (physicalDeviceState->vkGetPhysicalDeviceSurfaceFormatsKHRState < QUERY_COUNT) {
+            physicalDeviceState->vkGetPhysicalDeviceSurfaceFormatsKHRState = QUERY_COUNT;
+        }
+        if (*pSurfaceFormatCount > physicalDeviceState->surface_formats.size())
+            physicalDeviceState->surface_formats.resize(*pSurfaceFormatCount);
+    }
+    if (pSurfaceFormats) {
+        if (physicalDeviceState->vkGetPhysicalDeviceSurfaceFormatsKHRState < QUERY_DETAILS) {
+            physicalDeviceState->vkGetPhysicalDeviceSurfaceFormatsKHRState = QUERY_DETAILS;
+        }
+        for (uint32_t i = 0; i < *pSurfaceFormatCount; i++) {
+            physicalDeviceState->surface_formats[i] = pSurfaceFormats[i].surfaceFormat;
+        }
+    }
+}
+
+void ValidationStateTracker::PreCallRecordCmdBeginDebugUtilsLabelEXT(VkCommandBuffer commandBuffer,
+                                                                     const VkDebugUtilsLabelEXT *pLabelInfo) {
+    BeginCmdDebugUtilsLabel(report_data, commandBuffer, pLabelInfo);
+}
+
+void ValidationStateTracker::PostCallRecordCmdEndDebugUtilsLabelEXT(VkCommandBuffer commandBuffer) {
+    EndCmdDebugUtilsLabel(report_data, commandBuffer);
+}
+
+void ValidationStateTracker::PreCallRecordCmdInsertDebugUtilsLabelEXT(VkCommandBuffer commandBuffer,
+                                                                      const VkDebugUtilsLabelEXT *pLabelInfo) {
+    InsertCmdDebugUtilsLabel(report_data, commandBuffer, pLabelInfo);
+
+    // Squirrel away an easily accessible copy.
+    CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+    cb_state->debug_label = LoggingLabel(pLabelInfo);
+}
+
+void ValidationStateTracker::RecordEnumeratePhysicalDeviceGroupsState(
+    uint32_t *pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupPropertiesKHR *pPhysicalDeviceGroupProperties) {
+    if (NULL != pPhysicalDeviceGroupProperties) {
+        for (uint32_t i = 0; i < *pPhysicalDeviceGroupCount; i++) {
+            for (uint32_t j = 0; j < pPhysicalDeviceGroupProperties[i].physicalDeviceCount; j++) {
+                VkPhysicalDevice cur_phys_dev = pPhysicalDeviceGroupProperties[i].physicalDevices[j];
+                auto &phys_device_state = physical_device_map[cur_phys_dev];
+                phys_device_state.phys_device = cur_phys_dev;
+                // Init actual features for each physical device
+                DispatchGetPhysicalDeviceFeatures(cur_phys_dev, &phys_device_state.features2.features);
+            }
+        }
+    }
+}
+
+void ValidationStateTracker::PostCallRecordEnumeratePhysicalDeviceGroups(
+    VkInstance instance, uint32_t *pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupPropertiesKHR *pPhysicalDeviceGroupProperties,
+    VkResult result) {
+    if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
+    RecordEnumeratePhysicalDeviceGroupsState(pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties);
+}
+
+void ValidationStateTracker::PostCallRecordEnumeratePhysicalDeviceGroupsKHR(
+    VkInstance instance, uint32_t *pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupPropertiesKHR *pPhysicalDeviceGroupProperties,
+    VkResult result) {
+    if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
+    RecordEnumeratePhysicalDeviceGroupsState(pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties);
+}
+
+void ValidationStateTracker::RecordEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCounters(VkPhysicalDevice physicalDevice,
+                                                                                              uint32_t queueFamilyIndex,
+                                                                                              uint32_t *pCounterCount,
+                                                                                              VkPerformanceCounterKHR *pCounters) {
+    if (NULL == pCounters) return;
+
+    auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
+    assert(physical_device_state);
+
+    std::unique_ptr<QUEUE_FAMILY_PERF_COUNTERS> queueFamilyCounters(new QUEUE_FAMILY_PERF_COUNTERS());
+    queueFamilyCounters->counters.resize(*pCounterCount);
+    for (uint32_t i = 0; i < *pCounterCount; i++) queueFamilyCounters->counters[i] = pCounters[i];
+
+    physical_device_state->perf_counters[queueFamilyIndex] = std::move(queueFamilyCounters);
+}
+
+void ValidationStateTracker::PostCallRecordEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(
+    VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, uint32_t *pCounterCount, VkPerformanceCounterKHR *pCounters,
+    VkPerformanceCounterDescriptionKHR *pCounterDescriptions, VkResult result) {
+    if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
+    RecordEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCounters(physicalDevice, queueFamilyIndex, pCounterCount, pCounters);
+}
+
+void ValidationStateTracker::PostCallRecordAcquireProfilingLockKHR(VkDevice device, const VkAcquireProfilingLockInfoKHR *pInfo,
+                                                                   VkResult result) {
+    if (result == VK_SUCCESS) performance_lock_acquired = true;
+}
+
+bool ValidationStateTracker::PreCallValidateReleaseProfilingLockKHR(VkDevice device) const {
+    bool skip = false;
+
+    if (!performance_lock_acquired) {
+        skip |= log_msg(
+            report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, HandleToUint64(device),
+            "VUID-vkReleaseProfilingLockKHR-device-03235",
+            "The profiling lock of device must have been held via a previous successful call to vkAcquireProfilingLockKHR.");
+    }
+
+    return skip;
+}
+
+void ValidationStateTracker::PostCallRecordReleaseProfilingLockKHR(VkDevice device) {
+    performance_lock_acquired = false;
+    for (auto &cmd_buffer : commandBufferMap) {
+        cmd_buffer.second->performance_lock_released = true;
+    }
+}
+
+void ValidationStateTracker::PreCallRecordDestroyDescriptorUpdateTemplate(VkDevice device,
+                                                                          VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
+                                                                          const VkAllocationCallbacks *pAllocator) {
+    if (!descriptorUpdateTemplate) return;
+    auto template_state = GetDescriptorTemplateState(descriptorUpdateTemplate);
+    template_state->destroyed = true;
+    desc_template_map.erase(descriptorUpdateTemplate);
+}
+
+void ValidationStateTracker::PreCallRecordDestroyDescriptorUpdateTemplateKHR(VkDevice device,
+                                                                             VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
+                                                                             const VkAllocationCallbacks *pAllocator) {
+    if (!descriptorUpdateTemplate) return;
+    auto template_state = GetDescriptorTemplateState(descriptorUpdateTemplate);
+    template_state->destroyed = true;
+    desc_template_map.erase(descriptorUpdateTemplate);
+}
+
+void ValidationStateTracker::RecordCreateDescriptorUpdateTemplateState(const VkDescriptorUpdateTemplateCreateInfoKHR *pCreateInfo,
+                                                                       VkDescriptorUpdateTemplateKHR *pDescriptorUpdateTemplate) {
+    safe_VkDescriptorUpdateTemplateCreateInfo local_create_info(pCreateInfo);
+    auto template_state = std::make_shared<TEMPLATE_STATE>(*pDescriptorUpdateTemplate, &local_create_info);
+    desc_template_map[*pDescriptorUpdateTemplate] = std::move(template_state);
+}
+
+void ValidationStateTracker::PostCallRecordCreateDescriptorUpdateTemplate(
+    VkDevice device, const VkDescriptorUpdateTemplateCreateInfoKHR *pCreateInfo, const VkAllocationCallbacks *pAllocator,
+    VkDescriptorUpdateTemplateKHR *pDescriptorUpdateTemplate, VkResult result) {
+    if (VK_SUCCESS != result) return;
+    RecordCreateDescriptorUpdateTemplateState(pCreateInfo, pDescriptorUpdateTemplate);
+}
+
+void ValidationStateTracker::PostCallRecordCreateDescriptorUpdateTemplateKHR(
+    VkDevice device, const VkDescriptorUpdateTemplateCreateInfoKHR *pCreateInfo, const VkAllocationCallbacks *pAllocator,
+    VkDescriptorUpdateTemplateKHR *pDescriptorUpdateTemplate, VkResult result) {
+    if (VK_SUCCESS != result) return;
+    RecordCreateDescriptorUpdateTemplateState(pCreateInfo, pDescriptorUpdateTemplate);
+}
+
+void ValidationStateTracker::RecordUpdateDescriptorSetWithTemplateState(VkDescriptorSet descriptorSet,
+                                                                        VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
+                                                                        const void *pData) {
+    auto const template_map_entry = desc_template_map.find(descriptorUpdateTemplate);
+    if ((template_map_entry == desc_template_map.end()) || (template_map_entry->second.get() == nullptr)) {
+        assert(0);
+    } else {
+        const TEMPLATE_STATE *template_state = template_map_entry->second.get();
+        // TODO: Record template push descriptor updates
+        if (template_state->create_info.templateType == VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET) {
+            PerformUpdateDescriptorSetsWithTemplateKHR(descriptorSet, template_state, pData);
+        }
+    }
+}
+
+void ValidationStateTracker::PreCallRecordUpdateDescriptorSetWithTemplate(VkDevice device, VkDescriptorSet descriptorSet,
+                                                                          VkDescriptorUpdateTemplate descriptorUpdateTemplate,
+                                                                          const void *pData) {
+    RecordUpdateDescriptorSetWithTemplateState(descriptorSet, descriptorUpdateTemplate, pData);
+}
+
+void ValidationStateTracker::PreCallRecordUpdateDescriptorSetWithTemplateKHR(VkDevice device, VkDescriptorSet descriptorSet,
+                                                                             VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
+                                                                             const void *pData) {
+    RecordUpdateDescriptorSetWithTemplateState(descriptorSet, descriptorUpdateTemplate, pData);
+}
+
+void ValidationStateTracker::PreCallRecordCmdPushDescriptorSetWithTemplateKHR(
+    VkCommandBuffer commandBuffer, VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate, VkPipelineLayout layout, uint32_t set,
+    const void *pData) {
+    CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+
+    const auto template_state = GetDescriptorTemplateState(descriptorUpdateTemplate);
+    if (template_state) {
+        auto layout_data = GetPipelineLayout(layout);
+        auto dsl = GetDslFromPipelineLayout(layout_data, set);
+        const auto &template_ci = template_state->create_info;
+        if (dsl && !dsl->destroyed) {
+            // Decode the template into a set of write updates
+            cvdescriptorset::DecodedTemplateUpdate decoded_template(this, VK_NULL_HANDLE, template_state, pData,
+                                                                    dsl->GetDescriptorSetLayout());
+            RecordCmdPushDescriptorSetState(cb_state, template_ci.pipelineBindPoint, layout, set,
+                                            static_cast<uint32_t>(decoded_template.desc_writes.size()),
+                                            decoded_template.desc_writes.data());
+        }
+    }
+}
+
+void ValidationStateTracker::RecordGetPhysicalDeviceDisplayPlanePropertiesState(VkPhysicalDevice physicalDevice,
+                                                                                uint32_t *pPropertyCount, void *pProperties) {
+    auto physical_device_state = GetPhysicalDeviceState(physicalDevice);
+    if (*pPropertyCount) {
+        if (physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHRState < QUERY_COUNT) {
+            physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHRState = QUERY_COUNT;
+            physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHR_called = true;
+        }
+        physical_device_state->display_plane_property_count = *pPropertyCount;
+    }
+    if (pProperties) {
+        if (physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHRState < QUERY_DETAILS) {
+            physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHRState = QUERY_DETAILS;
+            physical_device_state->vkGetPhysicalDeviceDisplayPlanePropertiesKHR_called = true;
+        }
+    }
+}
+
+void ValidationStateTracker::PostCallRecordGetPhysicalDeviceDisplayPlanePropertiesKHR(VkPhysicalDevice physicalDevice,
+                                                                                      uint32_t *pPropertyCount,
+                                                                                      VkDisplayPlanePropertiesKHR *pProperties,
+                                                                                      VkResult result) {
+    if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
+    RecordGetPhysicalDeviceDisplayPlanePropertiesState(physicalDevice, pPropertyCount, pProperties);
+}
+
+void ValidationStateTracker::PostCallRecordGetPhysicalDeviceDisplayPlaneProperties2KHR(VkPhysicalDevice physicalDevice,
+                                                                                       uint32_t *pPropertyCount,
+                                                                                       VkDisplayPlaneProperties2KHR *pProperties,
+                                                                                       VkResult result) {
+    if ((VK_SUCCESS != result) && (VK_INCOMPLETE != result)) return;
+    RecordGetPhysicalDeviceDisplayPlanePropertiesState(physicalDevice, pPropertyCount, pProperties);
+}
+
+void ValidationStateTracker::PostCallRecordCmdBeginQueryIndexedEXT(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
+                                                                   uint32_t query, VkQueryControlFlags flags, uint32_t index) {
+    QueryObject query_obj = {queryPool, query, index};
+    CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+    RecordCmdBeginQuery(cb_state, query_obj);
+}
+
+void ValidationStateTracker::PostCallRecordCmdEndQueryIndexedEXT(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
+                                                                 uint32_t query, uint32_t index) {
+    QueryObject query_obj = {queryPool, query, index};
+    CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+    RecordCmdEndQuery(cb_state, query_obj);
+}
+
+void ValidationStateTracker::RecordCreateSamplerYcbcrConversionState(const VkSamplerYcbcrConversionCreateInfo *create_info,
+                                                                     VkSamplerYcbcrConversion ycbcr_conversion) {
+    if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
+        RecordCreateSamplerYcbcrConversionANDROID(create_info, ycbcr_conversion);
+    }
+}
+
+void ValidationStateTracker::PostCallRecordCreateSamplerYcbcrConversion(VkDevice device,
+                                                                        const VkSamplerYcbcrConversionCreateInfo *pCreateInfo,
+                                                                        const VkAllocationCallbacks *pAllocator,
+                                                                        VkSamplerYcbcrConversion *pYcbcrConversion,
+                                                                        VkResult result) {
+    if (VK_SUCCESS != result) return;
+    RecordCreateSamplerYcbcrConversionState(pCreateInfo, *pYcbcrConversion);
+}
+
+void ValidationStateTracker::PostCallRecordCreateSamplerYcbcrConversionKHR(VkDevice device,
+                                                                           const VkSamplerYcbcrConversionCreateInfo *pCreateInfo,
+                                                                           const VkAllocationCallbacks *pAllocator,
+                                                                           VkSamplerYcbcrConversion *pYcbcrConversion,
+                                                                           VkResult result) {
+    if (VK_SUCCESS != result) return;
+    RecordCreateSamplerYcbcrConversionState(pCreateInfo, *pYcbcrConversion);
+}
+
+void ValidationStateTracker::PostCallRecordDestroySamplerYcbcrConversion(VkDevice device, VkSamplerYcbcrConversion ycbcrConversion,
+                                                                         const VkAllocationCallbacks *pAllocator) {
+    if (!ycbcrConversion) return;
+    if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
+        RecordDestroySamplerYcbcrConversionANDROID(ycbcrConversion);
+    }
+}
+
+void ValidationStateTracker::PostCallRecordDestroySamplerYcbcrConversionKHR(VkDevice device,
+                                                                            VkSamplerYcbcrConversion ycbcrConversion,
+                                                                            const VkAllocationCallbacks *pAllocator) {
+    if (!ycbcrConversion) return;
+    if (device_extensions.vk_android_external_memory_android_hardware_buffer) {
+        RecordDestroySamplerYcbcrConversionANDROID(ycbcrConversion);
+    }
+}
+
+void ValidationStateTracker::PostCallRecordResetQueryPoolEXT(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery,
+                                                             uint32_t queryCount) {
+    // Do nothing if the feature is not enabled.
+    if (!enabled_features.host_query_reset_features.hostQueryReset) return;
+
+    // Do nothing if the query pool has been destroyed.
+    auto query_pool_state = GetQueryPoolState(queryPool);
+    if (!query_pool_state) return;
+
+    // Reset the state of existing entries.
+    QueryObject query_obj{queryPool, 0};
+    QueryObjectPass query_pass_obj{query_obj, 0};
+    const uint32_t max_query_count = std::min(queryCount, query_pool_state->createInfo.queryCount - firstQuery);
+    for (uint32_t i = 0; i < max_query_count; ++i) {
+        query_obj.query = firstQuery + i;
+        auto query_it = queryToStateMap.find(query_obj);
+        if (query_it != queryToStateMap.end()) query_it->second = QUERYSTATE_RESET;
+        if (query_pool_state->createInfo.queryType == VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR) {
+            for (uint32_t passIndex = 0; passIndex < query_pool_state->n_performance_passes; passIndex++) {
+                query_pass_obj.perf_pass = passIndex;
+                auto query_perf_it = queryPassToStateMap.find(query_pass_obj);
+                if (query_perf_it != queryPassToStateMap.end()) query_perf_it->second = QUERYSTATE_RESET;
+            }
+        }
+    }
+}
+
+void ValidationStateTracker::PerformUpdateDescriptorSetsWithTemplateKHR(VkDescriptorSet descriptorSet,
+                                                                        const TEMPLATE_STATE *template_state, const void *pData) {
+    // Translate the templated update into a normal update for validation...
+    cvdescriptorset::DecodedTemplateUpdate decoded_update(this, descriptorSet, template_state, pData);
+    cvdescriptorset::PerformUpdateDescriptorSets(this, static_cast<uint32_t>(decoded_update.desc_writes.size()),
+                                                 decoded_update.desc_writes.data(), 0, NULL);
+}
+
+// Update the common AllocateDescriptorSetsData
+void ValidationStateTracker::UpdateAllocateDescriptorSetsData(const VkDescriptorSetAllocateInfo *p_alloc_info,
+                                                              cvdescriptorset::AllocateDescriptorSetsData *ds_data) const {
+    for (uint32_t i = 0; i < p_alloc_info->descriptorSetCount; i++) {
+        auto layout = GetDescriptorSetLayoutShared(p_alloc_info->pSetLayouts[i]);
+        if (layout) {
+            ds_data->layout_nodes[i] = layout;
+            // Count total descriptors required per type
+            for (uint32_t j = 0; j < layout->GetBindingCount(); ++j) {
+                const auto &binding_layout = layout->GetDescriptorSetLayoutBindingPtrFromIndex(j);
+                uint32_t typeIndex = static_cast<uint32_t>(binding_layout->descriptorType);
+                ds_data->required_descriptors_by_type[typeIndex] += binding_layout->descriptorCount;
+            }
+        }
+        // Any unknown layouts will be flagged as errors during ValidateAllocateDescriptorSets() call
+    }
+}
+
+// Decrement allocated sets from the pool and insert new sets into set_map
+void ValidationStateTracker::PerformAllocateDescriptorSets(const VkDescriptorSetAllocateInfo *p_alloc_info,
+                                                           const VkDescriptorSet *descriptor_sets,
+                                                           const cvdescriptorset::AllocateDescriptorSetsData *ds_data) {
+    auto pool_state = descriptorPoolMap[p_alloc_info->descriptorPool].get();
+    // Account for sets and individual descriptors allocated from pool
+    pool_state->availableSets -= p_alloc_info->descriptorSetCount;
+    for (auto it = ds_data->required_descriptors_by_type.begin(); it != ds_data->required_descriptors_by_type.end(); ++it) {
+        pool_state->availableDescriptorTypeCount[it->first] -= ds_data->required_descriptors_by_type.at(it->first);
+    }
+
+    const auto *variable_count_info = lvl_find_in_chain<VkDescriptorSetVariableDescriptorCountAllocateInfoEXT>(p_alloc_info->pNext);
+    bool variable_count_valid = variable_count_info && variable_count_info->descriptorSetCount == p_alloc_info->descriptorSetCount;
+
+    // Create tracking object for each descriptor set; insert into global map and the pool's set.
+    for (uint32_t i = 0; i < p_alloc_info->descriptorSetCount; i++) {
+        uint32_t variable_count = variable_count_valid ? variable_count_info->pDescriptorCounts[i] : 0;
+
+        auto new_ds = std::make_shared<cvdescriptorset::DescriptorSet>(descriptor_sets[i], pool_state, ds_data->layout_nodes[i],
+                                                                       variable_count, this, report_data);
+        pool_state->sets.insert(new_ds.get());
+        new_ds->in_use.store(0);
+        setMap[descriptor_sets[i]] = std::move(new_ds);
+    }
+}
+
+// Generic function to handle state update for all CmdDraw* and CmdDispatch* type functions
+void ValidationStateTracker::UpdateStateCmdDrawDispatchType(CMD_BUFFER_STATE *cb_state, VkPipelineBindPoint bind_point) {
+    UpdateDrawState(cb_state, bind_point);
+    cb_state->hasDispatchCmd = true;
+}
+
+// Generic function to handle state update for all CmdDraw* type functions
+void ValidationStateTracker::UpdateStateCmdDrawType(CMD_BUFFER_STATE *cb_state, VkPipelineBindPoint bind_point) {
+    UpdateStateCmdDrawDispatchType(cb_state, bind_point);
+    cb_state->hasDrawCmd = true;
+}
+
+void ValidationStateTracker::PostCallRecordCmdDraw(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount,
+                                                   uint32_t firstVertex, uint32_t firstInstance) {
+    CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+    UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
+}
+
+void ValidationStateTracker::PostCallRecordCmdDrawIndexed(VkCommandBuffer commandBuffer, uint32_t indexCount,
+                                                          uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset,
+                                                          uint32_t firstInstance) {
+    CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+    UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
+}
+
+void ValidationStateTracker::PostCallRecordCmdDrawIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
+                                                           uint32_t count, uint32_t stride) {
+    CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+    BUFFER_STATE *buffer_state = GetBufferState(buffer);
+    UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
+    AddCommandBufferBindingBuffer(cb_state, buffer_state);
+}
+
+void ValidationStateTracker::PostCallRecordCmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer,
+                                                                  VkDeviceSize offset, uint32_t count, uint32_t stride) {
+    CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+    BUFFER_STATE *buffer_state = GetBufferState(buffer);
+    UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
+    AddCommandBufferBindingBuffer(cb_state, buffer_state);
+}
+
+void ValidationStateTracker::PostCallRecordCmdDispatch(VkCommandBuffer commandBuffer, uint32_t x, uint32_t y, uint32_t z) {
+    CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+    UpdateStateCmdDrawDispatchType(cb_state, VK_PIPELINE_BIND_POINT_COMPUTE);
+}
+
+void ValidationStateTracker::PostCallRecordCmdDispatchIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer,
+                                                               VkDeviceSize offset) {
+    CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+    UpdateStateCmdDrawDispatchType(cb_state, VK_PIPELINE_BIND_POINT_COMPUTE);
+    BUFFER_STATE *buffer_state = GetBufferState(buffer);
+    AddCommandBufferBindingBuffer(cb_state, buffer_state);
+}
+
+void ValidationStateTracker::PreCallRecordCmdDrawIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer,
+                                                                  VkDeviceSize offset, VkBuffer countBuffer,
+                                                                  VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
+                                                                  uint32_t stride) {
+    CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+    BUFFER_STATE *buffer_state = GetBufferState(buffer);
+    BUFFER_STATE *count_buffer_state = GetBufferState(countBuffer);
+    UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
+    AddCommandBufferBindingBuffer(cb_state, buffer_state);
+    AddCommandBufferBindingBuffer(cb_state, count_buffer_state);
+}
+
+void ValidationStateTracker::PreCallRecordCmdDrawIndexedIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer,
+                                                                         VkDeviceSize offset, VkBuffer countBuffer,
+                                                                         VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
+                                                                         uint32_t stride) {
+    CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+    BUFFER_STATE *buffer_state = GetBufferState(buffer);
+    BUFFER_STATE *count_buffer_state = GetBufferState(countBuffer);
+    UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
+    AddCommandBufferBindingBuffer(cb_state, buffer_state);
+    AddCommandBufferBindingBuffer(cb_state, count_buffer_state);
+}
+
+void ValidationStateTracker::PreCallRecordCmdDrawMeshTasksNV(VkCommandBuffer commandBuffer, uint32_t taskCount,
+                                                             uint32_t firstTask) {
+    CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+    UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
+}
+
+void ValidationStateTracker::PreCallRecordCmdDrawMeshTasksIndirectNV(VkCommandBuffer commandBuffer, VkBuffer buffer,
+                                                                     VkDeviceSize offset, uint32_t drawCount, uint32_t stride) {
+    CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+    UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
+    BUFFER_STATE *buffer_state = GetBufferState(buffer);
+    if (buffer_state) {
+        AddCommandBufferBindingBuffer(cb_state, buffer_state);
+    }
+}
+
+void ValidationStateTracker::PreCallRecordCmdDrawMeshTasksIndirectCountNV(VkCommandBuffer commandBuffer, VkBuffer buffer,
+                                                                          VkDeviceSize offset, VkBuffer countBuffer,
+                                                                          VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
+                                                                          uint32_t stride) {
+    CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
+    BUFFER_STATE *buffer_state = GetBufferState(buffer);
+    BUFFER_STATE *count_buffer_state = GetBufferState(countBuffer);
+    UpdateStateCmdDrawType(cb_state, VK_PIPELINE_BIND_POINT_GRAPHICS);
+    if (buffer_state) {
+        AddCommandBufferBindingBuffer(cb_state, buffer_state);
+    }
+    if (count_buffer_state) {
+        AddCommandBufferBindingBuffer(cb_state, count_buffer_state);
+    }
+}
+
+void ValidationStateTracker::PostCallRecordCreateShaderModule(VkDevice device, const VkShaderModuleCreateInfo *pCreateInfo,
+                                                              const VkAllocationCallbacks *pAllocator,
+                                                              VkShaderModule *pShaderModule, VkResult result,
+                                                              void *csm_state_data) {
+    if (VK_SUCCESS != result) return;
+    create_shader_module_api_state *csm_state = reinterpret_cast<create_shader_module_api_state *>(csm_state_data);
+
+    spv_target_env spirv_environment = ((api_version >= VK_API_VERSION_1_1) ? SPV_ENV_VULKAN_1_1 : SPV_ENV_VULKAN_1_0);
+    bool is_spirv = (pCreateInfo->pCode[0] == spv::MagicNumber);
+    auto new_shader_module = is_spirv ? std::make_shared<SHADER_MODULE_STATE>(pCreateInfo, *pShaderModule, spirv_environment,
+                                                                              csm_state->unique_shader_id)
+                                      : std::make_shared<SHADER_MODULE_STATE>();
+    shaderModuleMap[*pShaderModule] = std::move(new_shader_module);
+}
+
+void ValidationStateTracker::RecordPipelineShaderStage(VkPipelineShaderStageCreateInfo const *pStage, PIPELINE_STATE *pipeline,
+                                                       PIPELINE_STATE::StageState *stage_state) const {
+    // Validation shouldn't rely on anything in stage state being valid if the spirv isn't
+    auto module = GetShaderModuleState(pStage->module);
+    if (!module->has_valid_spirv) return;
+
+    // Validation shouldn't rely on anything in stage state being valid if the entrypoint isn't present
+    auto entrypoint = FindEntrypoint(module, pStage->pName, pStage->stage);
+    if (entrypoint == module->end()) return;
+
+    // Mark accessible ids
+    stage_state->accessible_ids = MarkAccessibleIds(module, entrypoint);
+    ProcessExecutionModes(module, entrypoint, pipeline);
+
+    stage_state->descriptor_uses =
+        CollectInterfaceByDescriptorSlot(report_data, module, stage_state->accessible_ids, &stage_state->has_writable_descriptor);
+    // Capture descriptor uses for the pipeline
+    for (auto use : stage_state->descriptor_uses) {
+        // While validating shaders capture which slots are used by the pipeline
+        const uint32_t slot = use.first.first;
+        auto &reqs = pipeline->active_slots[slot][use.first.second];
+        reqs = descriptor_req(reqs | DescriptorTypeToReqs(module, use.second.type_id));
+        pipeline->max_active_slot = std::max(pipeline->max_active_slot, slot);
+    }
+}
+
+void ValidationStateTracker::ResetCommandBufferPushConstantDataIfIncompatible(CMD_BUFFER_STATE *cb_state, VkPipelineLayout layout) {
+    if (cb_state == nullptr) {
+        return;
+    }
+
+    const PIPELINE_LAYOUT_STATE *pipeline_layout_state = GetPipelineLayout(layout);
+    if (pipeline_layout_state == nullptr) {
+        return;
+    }
+
+    if (cb_state->push_constant_data_ranges != pipeline_layout_state->push_constant_ranges) {
+        cb_state->push_constant_data_ranges = pipeline_layout_state->push_constant_ranges;
+        cb_state->push_constant_data.clear();
+        uint32_t size_needed = 0;
+        for (auto push_constant_range : *cb_state->push_constant_data_ranges) {
+            size_needed = std::max(size_needed, (push_constant_range.offset + push_constant_range.size));
+        }
+        cb_state->push_constant_data.resize(size_needed, 0);
+    }
+}
+
+void ValidationStateTracker::PostCallRecordGetSwapchainImagesKHR(VkDevice device, VkSwapchainKHR swapchain,
+                                                                 uint32_t *pSwapchainImageCount, VkImage *pSwapchainImages,
+                                                                 VkResult result) {
+    if ((result != VK_SUCCESS) && (result != VK_INCOMPLETE)) return;
+    auto swapchain_state = GetSwapchainState(swapchain);
+
+    if (*pSwapchainImageCount > swapchain_state->images.size()) swapchain_state->images.resize(*pSwapchainImageCount);
+
+    if (pSwapchainImages) {
+        if (swapchain_state->vkGetSwapchainImagesKHRState < QUERY_DETAILS) {
+            swapchain_state->vkGetSwapchainImagesKHRState = QUERY_DETAILS;
+        }
+        for (uint32_t i = 0; i < *pSwapchainImageCount; ++i) {
+            if (swapchain_state->images[i].image != VK_NULL_HANDLE) continue;  // Already retrieved this.
+
+            // Add imageMap entries for each swapchain image
+            VkImageCreateInfo image_ci;
+            image_ci.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+            image_ci.pNext = nullptr;                    // to be set later
+            image_ci.flags = VK_IMAGE_CREATE_ALIAS_BIT;  // to be updated below
+            image_ci.imageType = VK_IMAGE_TYPE_2D;
+            image_ci.format = swapchain_state->createInfo.imageFormat;
+            image_ci.extent.width = swapchain_state->createInfo.imageExtent.width;
+            image_ci.extent.height = swapchain_state->createInfo.imageExtent.height;
+            image_ci.extent.depth = 1;
+            image_ci.mipLevels = 1;
+            image_ci.arrayLayers = swapchain_state->createInfo.imageArrayLayers;
+            image_ci.samples = VK_SAMPLE_COUNT_1_BIT;
+            image_ci.tiling = VK_IMAGE_TILING_OPTIMAL;
+            image_ci.usage = swapchain_state->createInfo.imageUsage;
+            image_ci.sharingMode = swapchain_state->createInfo.imageSharingMode;
+            image_ci.queueFamilyIndexCount = swapchain_state->createInfo.queueFamilyIndexCount;
+            image_ci.pQueueFamilyIndices = swapchain_state->createInfo.pQueueFamilyIndices;
+            image_ci.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+
+            image_ci.pNext = lvl_find_in_chain<VkImageFormatListCreateInfoKHR>(swapchain_state->createInfo.pNext);
+
+            if (swapchain_state->createInfo.flags & VK_SWAPCHAIN_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR)
+                image_ci.flags |= VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT;
+            if (swapchain_state->createInfo.flags & VK_SWAPCHAIN_CREATE_PROTECTED_BIT_KHR)
+                image_ci.flags |= VK_IMAGE_CREATE_PROTECTED_BIT;
+            if (swapchain_state->createInfo.flags & VK_SWAPCHAIN_CREATE_MUTABLE_FORMAT_BIT_KHR)
+                image_ci.flags |= (VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT | VK_IMAGE_CREATE_EXTENDED_USAGE_BIT_KHR);
+
+            imageMap[pSwapchainImages[i]] = std::make_shared<IMAGE_STATE>(pSwapchainImages[i], &image_ci);
+            auto &image_state = imageMap[pSwapchainImages[i]];
+            image_state->valid = false;
+            image_state->create_from_swapchain = swapchain;
+            image_state->bind_swapchain = swapchain;
+            image_state->bind_swapchain_imageIndex = i;
+            swapchain_state->images[i].image = pSwapchainImages[i];
+            swapchain_state->images[i].bound_images.emplace(pSwapchainImages[i]);
+        }
+    }
+
+    if (*pSwapchainImageCount) {
+        if (swapchain_state->vkGetSwapchainImagesKHRState < QUERY_COUNT) {
+            swapchain_state->vkGetSwapchainImagesKHRState = QUERY_COUNT;
+        }
+        swapchain_state->get_swapchain_image_count = *pSwapchainImageCount;
+    }
+}
diff --git a/src/third_party/vulkan-validation-layers/src/layers/state_tracker.h b/src/third_party/vulkan-validation-layers/src/layers/state_tracker.h
new file mode 100644
index 0000000..4e94be2
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/layers/state_tracker.h
@@ -0,0 +1,1101 @@
+/* Copyright (c) 2015-2019 The Khronos Group Inc.
+ * Copyright (c) 2015-2019 Valve Corporation
+ * Copyright (c) 2015-2019 LunarG, Inc.
+ * Copyright (C) 2015-2019 Google Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Courtney Goeltzenleuchter <courtneygo@google.com>
+ * Author: Tobin Ehlis <tobine@google.com>
+ * Author: Chris Forbes <chrisf@ijw.co.nz>
+ * Author: Mark Lobodzinski <mark@lunarg.com>
+ * Author: Dave Houlton <daveh@lunarg.com>
+ */
+
+#pragma once
+#include "core_validation_error_enums.h"
+#include "core_validation_types.h"
+#include "descriptor_sets.h"
+#include "vk_layer_logging.h"
+#include "vulkan/vk_layer.h"
+#include "vk_typemap_helper.h"
+#include "vk_layer_data.h"
+#include <atomic>
+#include <functional>
+#include <memory>
+#include <unordered_map>
+#include <unordered_set>
+#include <vector>
+#include <list>
+#include <deque>
+#include <map>
+
+enum SyncScope {
+    kSyncScopeInternal,
+    kSyncScopeExternalTemporary,
+    kSyncScopeExternalPermanent,
+};
+
+enum FENCE_STATUS { FENCE_UNSIGNALED, FENCE_INFLIGHT, FENCE_RETIRED };
+
+class FENCE_STATE : public BASE_NODE {
+  public:
+    VkFence fence;
+    VkFenceCreateInfo createInfo;
+    std::pair<VkQueue, uint64_t> signaler;
+    FENCE_STATUS state;
+    SyncScope scope;
+
+    // Default constructor
+    FENCE_STATE() : state(FENCE_UNSIGNALED), scope(kSyncScopeInternal) {}
+};
+
+class SEMAPHORE_STATE : public BASE_NODE {
+  public:
+    std::pair<VkQueue, uint64_t> signaler;
+    bool signaled;
+    SyncScope scope;
+    VkSemaphoreTypeKHR type;
+    uint64_t payload;
+};
+
+class EVENT_STATE : public BASE_NODE {
+  public:
+    int write_in_use;
+    VkPipelineStageFlags stageMask;
+};
+
+class QUEUE_STATE {
+  public:
+    VkQueue queue;
+    uint32_t queueFamilyIndex;
+
+    uint64_t seq;
+    std::deque<CB_SUBMISSION> submissions;
+};
+
+class QUERY_POOL_STATE : public BASE_NODE {
+  public:
+    VkQueryPoolCreateInfo createInfo;
+    VkQueryPool pool;
+
+    bool has_perf_scope_command_buffer = false;
+    bool has_perf_scope_render_pass = false;
+    uint32_t n_performance_passes = 0;
+};
+
+class QUEUE_FAMILY_PERF_COUNTERS {
+  public:
+    std::vector<VkPerformanceCounterKHR> counters;
+};
+
+struct PHYSICAL_DEVICE_STATE {
+    // Track the call state and array sizes for various query functions
+    CALL_STATE vkGetPhysicalDeviceQueueFamilyPropertiesState = UNCALLED;
+    CALL_STATE vkGetPhysicalDeviceLayerPropertiesState = UNCALLED;
+    CALL_STATE vkGetPhysicalDeviceExtensionPropertiesState = UNCALLED;
+    CALL_STATE vkGetPhysicalDeviceFeaturesState = UNCALLED;
+    CALL_STATE vkGetPhysicalDeviceSurfaceCapabilitiesKHRState = UNCALLED;
+    bool vkGetPhysicalDeviceSurfaceCapabilitiesKHR_called = false;
+    CALL_STATE vkGetPhysicalDeviceSurfacePresentModesKHRState = UNCALLED;
+    CALL_STATE vkGetPhysicalDeviceSurfaceFormatsKHRState = UNCALLED;
+    CALL_STATE vkGetPhysicalDeviceDisplayPlanePropertiesKHRState = UNCALLED;
+    bool vkGetPhysicalDeviceDisplayPlanePropertiesKHR_called = false;
+    safe_VkPhysicalDeviceFeatures2 features2 = {};
+    VkPhysicalDevice phys_device = VK_NULL_HANDLE;
+    uint32_t queue_family_known_count = 1;  // spec implies one QF must always be supported
+    std::vector<VkQueueFamilyProperties> queue_family_properties;
+    VkSurfaceCapabilitiesKHR surfaceCapabilities = {};
+    std::vector<VkPresentModeKHR> present_modes;
+    std::vector<VkSurfaceFormatKHR> surface_formats;
+    uint32_t display_plane_property_count = 0;
+
+    // Map of queue family index to QUEUE_FAMILY_PERF_COUNTERS
+    std::unordered_map<uint32_t, std::unique_ptr<QUEUE_FAMILY_PERF_COUNTERS>> perf_counters;
+};
+
+// This structure is used to save data across the CreateGraphicsPipelines down-chain API call
+struct create_graphics_pipeline_api_state {
+    std::vector<safe_VkGraphicsPipelineCreateInfo> gpu_create_infos;
+    std::vector<std::shared_ptr<PIPELINE_STATE>> pipe_state;
+    const VkGraphicsPipelineCreateInfo* pCreateInfos;
+};
+
+// This structure is used to save data across the CreateComputePipelines down-chain API call
+struct create_compute_pipeline_api_state {
+    std::vector<safe_VkComputePipelineCreateInfo> gpu_create_infos;
+    std::vector<std::shared_ptr<PIPELINE_STATE>> pipe_state;
+    const VkComputePipelineCreateInfo* pCreateInfos;
+};
+
+// This structure is used to save data across the CreateRayTracingPipelinesNV down-chain API call.
+struct create_ray_tracing_pipeline_api_state {
+    std::vector<safe_VkRayTracingPipelineCreateInfoNV> gpu_create_infos;
+    std::vector<std::shared_ptr<PIPELINE_STATE>> pipe_state;
+    const VkRayTracingPipelineCreateInfoNV* pCreateInfos;
+};
+
+// This structure is used modify parameters for the CreatePipelineLayout down-chain API call
+struct create_pipeline_layout_api_state {
+    std::vector<VkDescriptorSetLayout> new_layouts;
+    VkPipelineLayoutCreateInfo modified_create_info;
+};
+
+// This structure is used modify parameters for the CreateBuffer down-chain API call
+struct create_buffer_api_state {
+    VkBufferCreateInfo modified_create_info;
+};
+
+// This structure is used modify and pass parameters for the CreateShaderModule down-chain API call
+struct create_shader_module_api_state {
+    uint32_t unique_shader_id;
+    VkShaderModuleCreateInfo instrumented_create_info;
+    std::vector<unsigned int> instrumented_pgm;
+};
+
+struct GpuQueue {
+    VkPhysicalDevice gpu;
+    uint32_t queue_family_index;
+};
+
+struct SubresourceRangeErrorCodes {
+    const char *base_mip_err, *mip_count_err, *base_layer_err, *layer_count_err;
+};
+
+inline bool operator==(GpuQueue const& lhs, GpuQueue const& rhs) {
+    return (lhs.gpu == rhs.gpu && lhs.queue_family_index == rhs.queue_family_index);
+}
+
+namespace std {
+template <>
+struct hash<GpuQueue> {
+    size_t operator()(GpuQueue gq) const throw() {
+        return hash<uint64_t>()((uint64_t)(gq.gpu)) ^ hash<uint32_t>()(gq.queue_family_index);
+    }
+};
+}  // namespace std
+
+struct SURFACE_STATE : public BASE_NODE {
+    VkSurfaceKHR surface = VK_NULL_HANDLE;
+    SWAPCHAIN_NODE* swapchain = nullptr;
+    std::unordered_map<GpuQueue, bool> gpu_queue_support;
+
+    SURFACE_STATE() {}
+    SURFACE_STATE(VkSurfaceKHR surface) : surface(surface) {}
+};
+
+struct SubpassLayout {
+    uint32_t index;
+    VkImageLayout layout;
+};
+
+using std::unordered_map;
+
+#define VALSTATETRACK_MAP_AND_TRAITS_IMPL(handle_type, state_type, map_member, instance_scope)        \
+    template <typename Dummy>                                                                         \
+    struct AccessorStateHandle<state_type, Dummy> {                                                   \
+        using StateType = state_type;                                                                 \
+        using HandleType = handle_type;                                                               \
+    };                                                                                                \
+    AccessorTraitsTypes<state_type>::MapType map_member;                                              \
+    template <typename Dummy>                                                                         \
+    struct AccessorTraits<state_type, Dummy> : AccessorTraitsTypes<state_type> {                      \
+        static const bool kInstanceScope = instance_scope;                                            \
+        static MapType ValidationStateTracker::*Map() { return &ValidationStateTracker::map_member; } \
+    };
+
+#define VALSTATETRACK_MAP_AND_TRAITS(handle_type, state_type, map_member) \
+    VALSTATETRACK_MAP_AND_TRAITS_IMPL(handle_type, state_type, map_member, false)
+#define VALSTATETRACK_MAP_AND_TRAITS_INSTANCE_SCOPE(handle_type, state_type, map_member) \
+    VALSTATETRACK_MAP_AND_TRAITS_IMPL(handle_type, state_type, map_member, true)
+
+// A special memory handle used to flag object as unbound from memory
+static const VkDeviceMemory MEMORY_UNBOUND = VkDeviceMemory(~((uint64_t)(0)) - 1);
+
+static std::shared_ptr<cvdescriptorset::DescriptorSetLayout const> GetDslFromPipelineLayout(
+    PIPELINE_LAYOUT_STATE const* layout_data, uint32_t set) {
+    std::shared_ptr<cvdescriptorset::DescriptorSetLayout const> dsl = nullptr;
+    if (layout_data && (set < layout_data->set_layouts.size())) {
+        dsl = layout_data->set_layouts[set];
+    }
+    return dsl;
+}
+
+struct SHADER_MODULE_STATE;
+
+class ValidationStateTracker : public ValidationObject {
+  public:
+    //  TODO -- move to private
+    //  TODO -- make consistent with traits approach below.
+    unordered_map<VkQueue, QUEUE_STATE> queueMap;
+    unordered_map<VkEvent, EVENT_STATE> eventMap;
+
+    std::unordered_set<VkQueue> queues;  // All queues under given device
+    QueryMap queryToStateMap;
+    QueryPassMap queryPassToStateMap;
+    unordered_map<VkSamplerYcbcrConversion, uint64_t> ycbcr_conversion_ahb_fmt_map;
+
+    // Traits for State function resolution.  Specializations defined in the macro.
+    // NOTE: The Dummy argument allows for *partial* specialization at class scope, as full specialization at class scope
+    //       isn't supported until C++17.  Since the Dummy has a default all instantiations of the template can ignore it, but all
+    //       specializations of the template must list it (and not give it a default).
+    template <typename StateType, typename Dummy = int>
+    struct AccessorStateHandle {};
+    template <typename StateType, typename Dummy = int>
+    struct AccessorTraits {};
+    template <typename StateType_>
+    struct AccessorTraitsTypes {
+        using StateType = StateType_;
+        using HandleType = typename AccessorStateHandle<StateType>::HandleType;
+        using ReturnType = StateType*;
+        using SharedType = std::shared_ptr<StateType>;
+        using ConstSharedType = std::shared_ptr<const StateType>;
+        using MappedType = std::shared_ptr<StateType>;
+        using MapType = unordered_map<HandleType, MappedType>;
+    };
+
+    VALSTATETRACK_MAP_AND_TRAITS(VkRenderPass, RENDER_PASS_STATE, renderPassMap)
+    VALSTATETRACK_MAP_AND_TRAITS(VkDescriptorSetLayout, cvdescriptorset::DescriptorSetLayout, descriptorSetLayoutMap)
+    VALSTATETRACK_MAP_AND_TRAITS(VkSampler, SAMPLER_STATE, samplerMap)
+    VALSTATETRACK_MAP_AND_TRAITS(VkImageView, IMAGE_VIEW_STATE, imageViewMap)
+    VALSTATETRACK_MAP_AND_TRAITS(VkImage, IMAGE_STATE, imageMap)
+    VALSTATETRACK_MAP_AND_TRAITS(VkBufferView, BUFFER_VIEW_STATE, bufferViewMap)
+    VALSTATETRACK_MAP_AND_TRAITS(VkBuffer, BUFFER_STATE, bufferMap)
+    VALSTATETRACK_MAP_AND_TRAITS(VkPipeline, PIPELINE_STATE, pipelineMap)
+    VALSTATETRACK_MAP_AND_TRAITS(VkDeviceMemory, DEVICE_MEMORY_STATE, memObjMap)
+    VALSTATETRACK_MAP_AND_TRAITS(VkFramebuffer, FRAMEBUFFER_STATE, frameBufferMap)
+    VALSTATETRACK_MAP_AND_TRAITS(VkShaderModule, SHADER_MODULE_STATE, shaderModuleMap)
+    VALSTATETRACK_MAP_AND_TRAITS(VkDescriptorUpdateTemplateKHR, TEMPLATE_STATE, desc_template_map)
+    VALSTATETRACK_MAP_AND_TRAITS(VkSwapchainKHR, SWAPCHAIN_NODE, swapchainMap)
+    VALSTATETRACK_MAP_AND_TRAITS(VkDescriptorPool, DESCRIPTOR_POOL_STATE, descriptorPoolMap)
+    VALSTATETRACK_MAP_AND_TRAITS(VkDescriptorSet, cvdescriptorset::DescriptorSet, setMap)
+    VALSTATETRACK_MAP_AND_TRAITS(VkCommandBuffer, CMD_BUFFER_STATE, commandBufferMap)
+    VALSTATETRACK_MAP_AND_TRAITS(VkCommandPool, COMMAND_POOL_STATE, commandPoolMap)
+    VALSTATETRACK_MAP_AND_TRAITS(VkPipelineLayout, PIPELINE_LAYOUT_STATE, pipelineLayoutMap)
+    VALSTATETRACK_MAP_AND_TRAITS(VkFence, FENCE_STATE, fenceMap)
+    VALSTATETRACK_MAP_AND_TRAITS(VkQueryPool, QUERY_POOL_STATE, queryPoolMap)
+    VALSTATETRACK_MAP_AND_TRAITS(VkSemaphore, SEMAPHORE_STATE, semaphoreMap)
+    VALSTATETRACK_MAP_AND_TRAITS(VkAccelerationStructureNV, ACCELERATION_STRUCTURE_STATE, accelerationStructureMap)
+    VALSTATETRACK_MAP_AND_TRAITS_INSTANCE_SCOPE(VkSurfaceKHR, SURFACE_STATE, surface_map)
+
+    void AddAliasingImage(IMAGE_STATE* image_state);
+    void RemoveAliasingImage(IMAGE_STATE* image_state);
+    void RemoveAliasingImages(const std::unordered_set<VkImage>& bound_images);
+
+  public:
+    template <typename State>
+    typename AccessorTraits<State>::ReturnType Get(typename AccessorTraits<State>::HandleType handle) {
+        using Traits = AccessorTraits<State>;
+        auto map_member = Traits::Map();
+        const typename Traits::MapType& map =
+            (Traits::kInstanceScope && (this->*map_member).size() == 0) ? instance_state->*map_member : this->*map_member;
+
+        const auto found_it = map.find(handle);
+        if (found_it == map.end()) {
+            return nullptr;
+        }
+        return found_it->second.get();
+    };
+
+    template <typename State>
+    const typename AccessorTraits<State>::ReturnType Get(typename AccessorTraits<State>::HandleType handle) const {
+        using Traits = AccessorTraits<State>;
+        auto map_member = Traits::Map();
+        const typename Traits::MapType& map =
+            (Traits::kInstanceScope && (this->*map_member).size() == 0) ? instance_state->*map_member : this->*map_member;
+
+        const auto found_it = map.find(handle);
+        if (found_it == map.cend()) {
+            return nullptr;
+        }
+        return found_it->second.get();
+    };
+
+    template <typename State>
+    typename AccessorTraits<State>::SharedType GetShared(typename AccessorTraits<State>::HandleType handle) {
+        using Traits = AccessorTraits<State>;
+        auto map_member = Traits::Map();
+        const typename Traits::MapType& map =
+            (Traits::kInstanceScope && (this->*map_member).size() == 0) ? instance_state->*map_member : this->*map_member;
+
+        const auto found_it = map.find(handle);
+        if (found_it == map.end()) {
+            return nullptr;
+        }
+        return found_it->second;
+    };
+
+    template <typename State>
+    typename AccessorTraits<State>::ConstSharedType GetShared(typename AccessorTraits<State>::HandleType handle) const {
+        using Traits = AccessorTraits<State>;
+        auto map_member = Traits::Map();
+        const typename Traits::MapType& map =
+            (Traits::kInstanceScope && (this->*map_member).size() == 0) ? instance_state->*map_member : this->*map_member;
+
+        const auto found_it = map.find(handle);
+        if (found_it == map.cend()) {
+            return nullptr;
+        }
+        return found_it->second;
+    };
+
+    // Accessors for the VALSTATE... maps
+    std::shared_ptr<const cvdescriptorset::DescriptorSetLayout> GetDescriptorSetLayoutShared(VkDescriptorSetLayout dsLayout) const {
+        return GetShared<cvdescriptorset::DescriptorSetLayout>(dsLayout);
+    }
+    std::shared_ptr<cvdescriptorset::DescriptorSetLayout> GetDescriptorSetLayoutShared(VkDescriptorSetLayout dsLayout) {
+        return GetShared<cvdescriptorset::DescriptorSetLayout>(dsLayout);
+    }
+
+    std::shared_ptr<const RENDER_PASS_STATE> GetRenderPassShared(VkRenderPass renderpass) const {
+        return GetShared<RENDER_PASS_STATE>(renderpass);
+    }
+    std::shared_ptr<RENDER_PASS_STATE> GetRenderPassShared(VkRenderPass renderpass) {
+        return GetShared<RENDER_PASS_STATE>(renderpass);
+    }
+    const RENDER_PASS_STATE* GetRenderPassState(VkRenderPass renderpass) const { return Get<RENDER_PASS_STATE>(renderpass); }
+    RENDER_PASS_STATE* GetRenderPassState(VkRenderPass renderpass) { return Get<RENDER_PASS_STATE>(renderpass); }
+
+    std::shared_ptr<const SAMPLER_STATE> GetSamplerShared(VkSampler sampler) const { return GetShared<SAMPLER_STATE>(sampler); }
+    std::shared_ptr<SAMPLER_STATE> GetSamplerShared(VkSampler sampler) { return GetShared<SAMPLER_STATE>(sampler); }
+    const SAMPLER_STATE* GetSamplerState(VkSampler sampler) const { return Get<SAMPLER_STATE>(sampler); }
+    SAMPLER_STATE* GetSamplerState(VkSampler sampler) { return Get<SAMPLER_STATE>(sampler); }
+
+    std::shared_ptr<const IMAGE_VIEW_STATE> GetImageViewShared(VkImageView image_view) const {
+        return GetShared<IMAGE_VIEW_STATE>(image_view);
+    }
+    std::shared_ptr<IMAGE_VIEW_STATE> GetImageViewShared(VkImageView image_view) { return GetShared<IMAGE_VIEW_STATE>(image_view); }
+    const IMAGE_VIEW_STATE* GetImageViewState(VkImageView image_view) const { return Get<IMAGE_VIEW_STATE>(image_view); }
+    IMAGE_VIEW_STATE* GetImageViewState(VkImageView image_view) { return Get<IMAGE_VIEW_STATE>(image_view); }
+
+    std::shared_ptr<const IMAGE_STATE> GetImageShared(VkImage image) const { return GetShared<IMAGE_STATE>(image); }
+    std::shared_ptr<IMAGE_STATE> GetImageShared(VkImage image) { return GetShared<IMAGE_STATE>(image); }
+    const IMAGE_STATE* GetImageState(VkImage image) const { return Get<IMAGE_STATE>(image); }
+    IMAGE_STATE* GetImageState(VkImage image) { return Get<IMAGE_STATE>(image); }
+
+    std::shared_ptr<const BUFFER_VIEW_STATE> GetBufferViewShared(VkBufferView buffer_view) const {
+        return GetShared<BUFFER_VIEW_STATE>(buffer_view);
+    }
+    std::shared_ptr<BUFFER_VIEW_STATE> GetBufferViewShared(VkBufferView buffer_view) {
+        return GetShared<BUFFER_VIEW_STATE>(buffer_view);
+    }
+    const BUFFER_VIEW_STATE* GetBufferViewState(VkBufferView buffer_view) const { return Get<BUFFER_VIEW_STATE>(buffer_view); }
+    BUFFER_VIEW_STATE* GetBufferViewState(VkBufferView buffer_view) { return Get<BUFFER_VIEW_STATE>(buffer_view); }
+
+    std::shared_ptr<const BUFFER_STATE> GetBufferShared(VkBuffer buffer) const { return GetShared<BUFFER_STATE>(buffer); }
+    std::shared_ptr<BUFFER_STATE> GetBufferShared(VkBuffer buffer) { return GetShared<BUFFER_STATE>(buffer); }
+    const BUFFER_STATE* GetBufferState(VkBuffer buffer) const { return Get<BUFFER_STATE>(buffer); }
+    BUFFER_STATE* GetBufferState(VkBuffer buffer) { return Get<BUFFER_STATE>(buffer); }
+
+    const PIPELINE_STATE* GetPipelineState(VkPipeline pipeline) const { return Get<PIPELINE_STATE>(pipeline); }
+    PIPELINE_STATE* GetPipelineState(VkPipeline pipeline) { return Get<PIPELINE_STATE>(pipeline); }
+    const DEVICE_MEMORY_STATE* GetDevMemState(VkDeviceMemory mem) const { return Get<DEVICE_MEMORY_STATE>(mem); }
+    DEVICE_MEMORY_STATE* GetDevMemState(VkDeviceMemory mem) { return Get<DEVICE_MEMORY_STATE>(mem); }
+    const FRAMEBUFFER_STATE* GetFramebufferState(VkFramebuffer framebuffer) const { return Get<FRAMEBUFFER_STATE>(framebuffer); }
+    FRAMEBUFFER_STATE* GetFramebufferState(VkFramebuffer framebuffer) { return Get<FRAMEBUFFER_STATE>(framebuffer); }
+    const SHADER_MODULE_STATE* GetShaderModuleState(VkShaderModule module) const { return Get<SHADER_MODULE_STATE>(module); }
+    SHADER_MODULE_STATE* GetShaderModuleState(VkShaderModule module) { return Get<SHADER_MODULE_STATE>(module); }
+    const TEMPLATE_STATE* GetDescriptorTemplateState(VkDescriptorUpdateTemplateKHR descriptor_update_template) const {
+        return Get<TEMPLATE_STATE>(descriptor_update_template);
+    }
+    TEMPLATE_STATE* GetDescriptorTemplateState(VkDescriptorUpdateTemplateKHR descriptor_update_template) {
+        return Get<TEMPLATE_STATE>(descriptor_update_template);
+    }
+    const SWAPCHAIN_NODE* GetSwapchainState(VkSwapchainKHR swapchain) const { return Get<SWAPCHAIN_NODE>(swapchain); }
+    SWAPCHAIN_NODE* GetSwapchainState(VkSwapchainKHR swapchain) { return Get<SWAPCHAIN_NODE>(swapchain); }
+    const DESCRIPTOR_POOL_STATE* GetDescriptorPoolState(const VkDescriptorPool pool) const {
+        return Get<DESCRIPTOR_POOL_STATE>(pool);
+    }
+    DESCRIPTOR_POOL_STATE* GetDescriptorPoolState(const VkDescriptorPool pool) { return Get<DESCRIPTOR_POOL_STATE>(pool); }
+    const cvdescriptorset::DescriptorSet* GetSetNode(VkDescriptorSet set) const { return Get<cvdescriptorset::DescriptorSet>(set); }
+    cvdescriptorset::DescriptorSet* GetSetNode(VkDescriptorSet set) { return Get<cvdescriptorset::DescriptorSet>(set); }
+    const CMD_BUFFER_STATE* GetCBState(const VkCommandBuffer cb) const { return Get<CMD_BUFFER_STATE>(cb); }
+    CMD_BUFFER_STATE* GetCBState(const VkCommandBuffer cb) { return Get<CMD_BUFFER_STATE>(cb); }
+
+    std::shared_ptr<const COMMAND_POOL_STATE> GetCommandPoolShared(VkCommandPool pool) const {
+        return GetShared<COMMAND_POOL_STATE>(pool);
+    }
+    std::shared_ptr<COMMAND_POOL_STATE> GetCommandPoolShared(VkCommandPool pool) { return GetShared<COMMAND_POOL_STATE>(pool); }
+    const COMMAND_POOL_STATE* GetCommandPoolState(VkCommandPool pool) const { return Get<COMMAND_POOL_STATE>(pool); }
+    COMMAND_POOL_STATE* GetCommandPoolState(VkCommandPool pool) { return Get<COMMAND_POOL_STATE>(pool); }
+
+    std::shared_ptr<const PIPELINE_LAYOUT_STATE> GetPipelineLayoutShared(VkPipelineLayout pipeLayout) const {
+        return GetShared<PIPELINE_LAYOUT_STATE>(pipeLayout);
+    }
+    std::shared_ptr<PIPELINE_LAYOUT_STATE> GetPipelineLayoutShared(VkPipelineLayout pipeLayout) {
+        return GetShared<PIPELINE_LAYOUT_STATE>(pipeLayout);
+    }
+    const PIPELINE_LAYOUT_STATE* GetPipelineLayout(VkPipelineLayout pipeLayout) const {
+        return Get<PIPELINE_LAYOUT_STATE>(pipeLayout);
+    }
+    PIPELINE_LAYOUT_STATE* GetPipelineLayout(VkPipelineLayout pipeLayout) { return Get<PIPELINE_LAYOUT_STATE>(pipeLayout); }
+
+    const FENCE_STATE* GetFenceState(VkFence fence) const { return Get<FENCE_STATE>(fence); }
+    FENCE_STATE* GetFenceState(VkFence fence) { return Get<FENCE_STATE>(fence); }
+    const QUERY_POOL_STATE* GetQueryPoolState(VkQueryPool query_pool) const { return Get<QUERY_POOL_STATE>(query_pool); }
+    QUERY_POOL_STATE* GetQueryPoolState(VkQueryPool query_pool) { return Get<QUERY_POOL_STATE>(query_pool); }
+    const SEMAPHORE_STATE* GetSemaphoreState(VkSemaphore semaphore) const { return Get<SEMAPHORE_STATE>(semaphore); }
+    SEMAPHORE_STATE* GetSemaphoreState(VkSemaphore semaphore) { return Get<SEMAPHORE_STATE>(semaphore); }
+    const ACCELERATION_STRUCTURE_STATE* GetAccelerationStructureState(VkAccelerationStructureNV as) const {
+        return Get<ACCELERATION_STRUCTURE_STATE>(as);
+    }
+    ACCELERATION_STRUCTURE_STATE* GetAccelerationStructureState(VkAccelerationStructureNV as) {
+        return Get<ACCELERATION_STRUCTURE_STATE>(as);
+    }
+    const SURFACE_STATE* GetSurfaceState(VkSurfaceKHR surface) const { return Get<SURFACE_STATE>(surface); }
+    SURFACE_STATE* GetSurfaceState(VkSurfaceKHR surface) { return Get<SURFACE_STATE>(surface); }
+
+    // Class Declarations for helper functions
+    IMAGE_VIEW_STATE* GetAttachmentImageViewState(FRAMEBUFFER_STATE* framebuffer, uint32_t index);
+    const IMAGE_VIEW_STATE* GetAttachmentImageViewState(const FRAMEBUFFER_STATE* framebuffer, uint32_t index) const;
+    const EVENT_STATE* GetEventState(VkEvent event) const;
+    EVENT_STATE* GetEventState(VkEvent event);
+    const QUEUE_STATE* GetQueueState(VkQueue queue) const;
+    QUEUE_STATE* GetQueueState(VkQueue queue);
+    const BINDABLE* GetObjectMemBinding(const VulkanTypedHandle& typed_handle) const;
+    BINDABLE* GetObjectMemBinding(const VulkanTypedHandle& typed_handle);
+
+    // Used for instance versions of this object
+    unordered_map<VkPhysicalDevice, PHYSICAL_DEVICE_STATE> physical_device_map;
+    // Link to the device's physical-device data
+    PHYSICAL_DEVICE_STATE* physical_device_state;
+
+    // Link for derived device objects back to their parent instance object
+    ValidationStateTracker* instance_state;
+
+    const PHYSICAL_DEVICE_STATE* GetPhysicalDeviceState(VkPhysicalDevice phys) const;
+    PHYSICAL_DEVICE_STATE* GetPhysicalDeviceState(VkPhysicalDevice phys);
+    PHYSICAL_DEVICE_STATE* GetPhysicalDeviceState();
+    const PHYSICAL_DEVICE_STATE* GetPhysicalDeviceState() const;
+
+    using CommandBufferResetCallback = std::function<void(VkCommandBuffer)>;
+    std::unique_ptr<CommandBufferResetCallback> command_buffer_reset_callback;
+    template <typename Fn>
+    void SetCommandBufferResetCallback(Fn&& fn) {
+        command_buffer_reset_callback.reset(new CommandBufferResetCallback(std::forward<Fn>(fn)));
+    }
+
+    using SetImageViewInitialLayoutCallback = std::function<void(CMD_BUFFER_STATE*, const IMAGE_VIEW_STATE&, VkImageLayout)>;
+    std::unique_ptr<SetImageViewInitialLayoutCallback> set_image_view_initial_layout_callback;
+    template <typename Fn>
+    void SetSetImageViewInitialLayoutCallback(Fn&& fn) {
+        set_image_view_initial_layout_callback.reset(new SetImageViewInitialLayoutCallback(std::forward<Fn>(fn)));
+    }
+
+    void CallSetImageViewInitialLayoutCallback(CMD_BUFFER_STATE* cb_node, const IMAGE_VIEW_STATE& iv_state, VkImageLayout layout) {
+        if (set_image_view_initial_layout_callback) {
+            (*set_image_view_initial_layout_callback)(cb_node, iv_state, layout);
+        }
+    }
+
+    // State update functions
+    // Gets/Enumerations
+    void PostCallRecordEnumeratePhysicalDeviceGroups(VkInstance instance, uint32_t* pPhysicalDeviceGroupCount,
+                                                     VkPhysicalDeviceGroupPropertiesKHR* pPhysicalDeviceGroupProperties,
+                                                     VkResult result);
+    void PostCallRecordEnumeratePhysicalDeviceGroupsKHR(VkInstance instance, uint32_t* pPhysicalDeviceGroupCount,
+                                                        VkPhysicalDeviceGroupPropertiesKHR* pPhysicalDeviceGroupProperties,
+                                                        VkResult result);
+    void PostCallRecordEnumeratePhysicalDevices(VkInstance instance, uint32_t* pPhysicalDeviceCount,
+                                                VkPhysicalDevice* pPhysicalDevices, VkResult result);
+    void PostCallRecordEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(
+        VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, uint32_t* pCounterCount, VkPerformanceCounterKHR* pCounters,
+        VkPerformanceCounterDescriptionKHR* pCounterDescriptions, VkResult result);
+    void PostCallRecordGetAccelerationStructureMemoryRequirementsNV(VkDevice device,
+                                                                    const VkAccelerationStructureMemoryRequirementsInfoNV* pInfo,
+                                                                    VkMemoryRequirements2KHR* pMemoryRequirements);
+    void PostCallRecordGetBufferMemoryRequirements(VkDevice device, VkBuffer buffer, VkMemoryRequirements* pMemoryRequirements);
+    void PostCallRecordGetBufferMemoryRequirements2(VkDevice device, const VkBufferMemoryRequirementsInfo2KHR* pInfo,
+                                                    VkMemoryRequirements2KHR* pMemoryRequirements);
+    void PostCallRecordGetBufferMemoryRequirements2KHR(VkDevice device, const VkBufferMemoryRequirementsInfo2KHR* pInfo,
+                                                       VkMemoryRequirements2KHR* pMemoryRequirements);
+    void PostCallRecordGetDeviceQueue(VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex, VkQueue* pQueue);
+    void PostCallRecordGetDeviceQueue2(VkDevice device, const VkDeviceQueueInfo2* pQueueInfo, VkQueue* pQueue);
+    void PostCallRecordGetFenceFdKHR(VkDevice device, const VkFenceGetFdInfoKHR* pGetFdInfo, int* pFd, VkResult result);
+    void PostCallRecordGetFenceStatus(VkDevice device, VkFence fence, VkResult result);
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    void PostCallRecordGetFenceWin32HandleKHR(VkDevice device, const VkFenceGetWin32HandleInfoKHR* pGetWin32HandleInfo,
+                                              HANDLE* pHandle, VkResult result);
+#endif  // VK_USE_PLATFORM_WIN32_KHR
+    void PostCallRecordGetImageMemoryRequirements(VkDevice device, VkImage image, VkMemoryRequirements* pMemoryRequirements);
+    void PostCallRecordGetImageMemoryRequirements2(VkDevice device, const VkImageMemoryRequirementsInfo2* pInfo,
+                                                   VkMemoryRequirements2* pMemoryRequirements);
+    void PostCallRecordGetImageMemoryRequirements2KHR(VkDevice device, const VkImageMemoryRequirementsInfo2* pInfo,
+                                                      VkMemoryRequirements2* pMemoryRequirements);
+    void PostCallRecordGetImageSparseMemoryRequirements(VkDevice device, VkImage image, uint32_t* pSparseMemoryRequirementCount,
+                                                        VkSparseImageMemoryRequirements* pSparseMemoryRequirements);
+    void PostCallRecordGetImageSparseMemoryRequirements2(VkDevice device, const VkImageSparseMemoryRequirementsInfo2KHR* pInfo,
+                                                         uint32_t* pSparseMemoryRequirementCount,
+                                                         VkSparseImageMemoryRequirements2KHR* pSparseMemoryRequirements);
+    void PostCallRecordGetImageSparseMemoryRequirements2KHR(VkDevice device, const VkImageSparseMemoryRequirementsInfo2KHR* pInfo,
+                                                            uint32_t* pSparseMemoryRequirementCount,
+                                                            VkSparseImageMemoryRequirements2KHR* pSparseMemoryRequirements);
+    void PostCallRecordGetPhysicalDeviceDisplayPlanePropertiesKHR(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount,
+                                                                  VkDisplayPlanePropertiesKHR* pProperties, VkResult result);
+    void PostCallRecordGetPhysicalDeviceDisplayPlaneProperties2KHR(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount,
+                                                                   VkDisplayPlaneProperties2KHR* pProperties, VkResult result);
+    void PostCallRecordGetPhysicalDeviceFeatures(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures* pFeatures);
+    void PostCallRecordGetPhysicalDeviceFeatures2(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2* pFeatures);
+    void PostCallRecordGetPhysicalDeviceFeatures2KHR(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2* pFeatures);
+    void PostCallRecordGetPhysicalDeviceQueueFamilyProperties(VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount,
+                                                              VkQueueFamilyProperties* pQueueFamilyProperties);
+    void PostCallRecordGetPhysicalDeviceQueueFamilyProperties2(VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount,
+                                                               VkQueueFamilyProperties2KHR* pQueueFamilyProperties);
+    void PostCallRecordGetPhysicalDeviceQueueFamilyProperties2KHR(VkPhysicalDevice physicalDevice,
+                                                                  uint32_t* pQueueFamilyPropertyCount,
+                                                                  VkQueueFamilyProperties2KHR* pQueueFamilyProperties);
+    void PostCallRecordGetPhysicalDeviceSurfaceCapabilitiesKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface,
+                                                               VkSurfaceCapabilitiesKHR* pSurfaceCapabilities, VkResult result);
+    void PostCallRecordGetPhysicalDeviceSurfaceCapabilities2KHR(VkPhysicalDevice physicalDevice,
+                                                                const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo,
+                                                                VkSurfaceCapabilities2KHR* pSurfaceCapabilities, VkResult result);
+    void PostCallRecordGetPhysicalDeviceSurfaceCapabilities2EXT(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface,
+                                                                VkSurfaceCapabilities2EXT* pSurfaceCapabilities, VkResult result);
+    void PostCallRecordGetPhysicalDeviceSurfaceFormatsKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface,
+                                                          uint32_t* pSurfaceFormatCount, VkSurfaceFormatKHR* pSurfaceFormats,
+                                                          VkResult result);
+    void PostCallRecordGetPhysicalDeviceSurfaceFormats2KHR(VkPhysicalDevice physicalDevice,
+                                                           const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo,
+                                                           uint32_t* pSurfaceFormatCount, VkSurfaceFormat2KHR* pSurfaceFormats,
+                                                           VkResult result);
+    void PostCallRecordGetPhysicalDeviceSurfacePresentModesKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface,
+                                                               uint32_t* pPresentModeCount, VkPresentModeKHR* pPresentModes,
+                                                               VkResult result);
+    void PostCallRecordGetPhysicalDeviceSurfaceSupportKHR(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex,
+                                                          VkSurfaceKHR surface, VkBool32* pSupported, VkResult result);
+    void PostCallRecordGetSemaphoreFdKHR(VkDevice device, const VkSemaphoreGetFdInfoKHR* pGetFdInfo, int* pFd, VkResult result);
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    void PostCallRecordGetSemaphoreWin32HandleKHR(VkDevice device, const VkSemaphoreGetWin32HandleInfoKHR* pGetWin32HandleInfo,
+                                                  HANDLE* pHandle, VkResult result);
+#endif  // VK_USE_PLATFORM_WIN32_KHR
+    void PostCallRecordGetSwapchainImagesKHR(VkDevice device, VkSwapchainKHR swapchain, uint32_t* pSwapchainImageCount,
+                                             VkImage* pSwapchainImages, VkResult result);
+    void PostCallRecordImportFenceFdKHR(VkDevice device, const VkImportFenceFdInfoKHR* pImportFenceFdInfo, VkResult result);
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    void PostCallRecordImportFenceWin32HandleKHR(VkDevice device,
+                                                 const VkImportFenceWin32HandleInfoKHR* pImportFenceWin32HandleInfo,
+                                                 VkResult result);
+#endif  // VK_USE_PLATFORM_WIN32_KHR
+    void PostCallRecordImportSemaphoreFdKHR(VkDevice device, const VkImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo,
+                                            VkResult result);
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    void PostCallRecordImportSemaphoreWin32HandleKHR(VkDevice device,
+                                                     const VkImportSemaphoreWin32HandleInfoKHR* pImportSemaphoreWin32HandleInfo,
+                                                     VkResult result);
+#endif  // VK_USE_PLATFORM_WIN32_KHR
+    void PostCallRecordSignalSemaphoreKHR(VkDevice device, const VkSemaphoreSignalInfoKHR* pSignalInfo, VkResult result);
+
+    // Create/Destroy/Bind
+    void PostCallRecordBindAccelerationStructureMemoryNV(VkDevice device, uint32_t bindInfoCount,
+                                                         const VkBindAccelerationStructureMemoryInfoNV* pBindInfos,
+                                                         VkResult result);
+    void PostCallRecordBindBufferMemory(VkDevice device, VkBuffer buffer, VkDeviceMemory mem, VkDeviceSize memoryOffset,
+                                        VkResult result);
+    void PostCallRecordBindBufferMemory2(VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfoKHR* pBindInfos,
+                                         VkResult result);
+    void PostCallRecordBindBufferMemory2KHR(VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfoKHR* pBindInfos,
+                                            VkResult result);
+    void PostCallRecordBindImageMemory(VkDevice device, VkImage image, VkDeviceMemory mem, VkDeviceSize memoryOffset,
+                                       VkResult result);
+    void PostCallRecordBindImageMemory2(VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfoKHR* pBindInfos,
+                                        VkResult result);
+    void PostCallRecordBindImageMemory2KHR(VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfoKHR* pBindInfos,
+                                           VkResult result);
+
+    void PostCallRecordCreateDevice(VkPhysicalDevice gpu, const VkDeviceCreateInfo* pCreateInfo,
+                                    const VkAllocationCallbacks* pAllocator, VkDevice* pDevice, VkResult result);
+    void PreCallRecordDestroyDevice(VkDevice device, const VkAllocationCallbacks* pAllocator);
+
+    void PostCallRecordCreateAccelerationStructureNV(VkDevice device, const VkAccelerationStructureCreateInfoNV* pCreateInfo,
+                                                     const VkAllocationCallbacks* pAllocator,
+                                                     VkAccelerationStructureNV* pAccelerationStructure, VkResult result);
+    void PreCallRecordDestroyAccelerationStructureNV(VkDevice device, VkAccelerationStructureNV accelerationStructure,
+                                                     const VkAllocationCallbacks* pAllocator);
+    void PostCallRecordCreateBuffer(VkDevice device, const VkBufferCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator,
+                                    VkBuffer* pBuffer, VkResult result);
+    void PreCallRecordDestroyBuffer(VkDevice device, VkBuffer buffer, const VkAllocationCallbacks* pAllocator);
+    void PostCallRecordCreateBufferView(VkDevice device, const VkBufferViewCreateInfo* pCreateInfo,
+                                        const VkAllocationCallbacks* pAllocator, VkBufferView* pView, VkResult result);
+    void PreCallRecordDestroyBufferView(VkDevice device, VkBufferView bufferView, const VkAllocationCallbacks* pAllocator);
+    void PostCallRecordCreateCommandPool(VkDevice device, const VkCommandPoolCreateInfo* pCreateInfo,
+                                         const VkAllocationCallbacks* pAllocator, VkCommandPool* pCommandPool, VkResult result);
+    void PreCallRecordDestroyCommandPool(VkDevice device, VkCommandPool commandPool, const VkAllocationCallbacks* pAllocator);
+    void PostCallRecordCreateDisplayPlaneSurfaceKHR(VkInstance instance, const VkDisplaySurfaceCreateInfoKHR* pCreateInfo,
+                                                    const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface,
+                                                    VkResult result);
+    void PostCallRecordCreateEvent(VkDevice device, const VkEventCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator,
+                                   VkEvent* pEvent, VkResult result);
+    void PreCallRecordDestroyEvent(VkDevice device, VkEvent event, const VkAllocationCallbacks* pAllocator);
+    void PostCallRecordCreateDescriptorPool(VkDevice device, const VkDescriptorPoolCreateInfo* pCreateInfo,
+                                            const VkAllocationCallbacks* pAllocator, VkDescriptorPool* pDescriptorPool,
+                                            VkResult result);
+    void PreCallRecordDestroyDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool,
+                                            const VkAllocationCallbacks* pAllocator);
+    void PostCallRecordCreateDescriptorSetLayout(VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo,
+                                                 const VkAllocationCallbacks* pAllocator, VkDescriptorSetLayout* pSetLayout,
+                                                 VkResult result);
+    void PostCallRecordResetCommandBuffer(VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags, VkResult result);
+    void PostCallRecordResetCommandPool(VkDevice device, VkCommandPool commandPool, VkCommandPoolResetFlags flags, VkResult result);
+    bool PreCallValidateCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
+                                               const VkComputePipelineCreateInfo* pCreateInfos,
+                                               const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines,
+                                               void* pipe_state) const;
+    void PostCallRecordCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
+                                              const VkComputePipelineCreateInfo* pCreateInfos,
+                                              const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines, VkResult result,
+                                              void* pipe_state);
+    void PostCallRecordResetDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool, VkDescriptorPoolResetFlags flags,
+                                           VkResult result);
+    bool PreCallValidateAllocateDescriptorSets(VkDevice device, const VkDescriptorSetAllocateInfo* pAllocateInfo,
+                                               VkDescriptorSet* pDescriptorSets, void* ads_state_data) const;
+    void PreCallRecordDestroyDescriptorSetLayout(VkDevice device, VkDescriptorSetLayout descriptorSetLayout,
+                                                 const VkAllocationCallbacks* pAllocator);
+    void PostCallRecordCreateDescriptorUpdateTemplate(VkDevice device, const VkDescriptorUpdateTemplateCreateInfoKHR* pCreateInfo,
+                                                      const VkAllocationCallbacks* pAllocator,
+                                                      VkDescriptorUpdateTemplateKHR* pDescriptorUpdateTemplate, VkResult result);
+    void PostCallRecordCreateDescriptorUpdateTemplateKHR(VkDevice device,
+                                                         const VkDescriptorUpdateTemplateCreateInfoKHR* pCreateInfo,
+                                                         const VkAllocationCallbacks* pAllocator,
+                                                         VkDescriptorUpdateTemplateKHR* pDescriptorUpdateTemplate, VkResult result);
+    void PreCallRecordDestroyDescriptorUpdateTemplate(VkDevice device, VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
+                                                      const VkAllocationCallbacks* pAllocator);
+    void PreCallRecordDestroyDescriptorUpdateTemplateKHR(VkDevice device, VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
+                                                         const VkAllocationCallbacks* pAllocator);
+    void PostCallRecordCreateFence(VkDevice device, const VkFenceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator,
+                                   VkFence* pFence, VkResult result);
+    void PreCallRecordDestroyFence(VkDevice device, VkFence fence, const VkAllocationCallbacks* pAllocator);
+    void PostCallRecordResetFences(VkDevice device, uint32_t fenceCount, const VkFence* pFences, VkResult result);
+    void PostCallRecordCreateFramebuffer(VkDevice device, const VkFramebufferCreateInfo* pCreateInfo,
+                                         const VkAllocationCallbacks* pAllocator, VkFramebuffer* pFramebuffer, VkResult result);
+    void PreCallRecordDestroyFramebuffer(VkDevice device, VkFramebuffer framebuffer, const VkAllocationCallbacks* pAllocator);
+    bool PreCallValidateCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
+                                                const VkGraphicsPipelineCreateInfo* pCreateInfos,
+                                                const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines,
+                                                void* cgpl_state) const;
+    void PostCallRecordCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
+                                               const VkGraphicsPipelineCreateInfo* pCreateInfos,
+                                               const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines, VkResult result,
+                                               void* cgpl_state);
+    void PostCallRecordCreateImage(VkDevice device, const VkImageCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator,
+                                   VkImage* pImage, VkResult result);
+    void PreCallRecordDestroyImage(VkDevice device, VkImage image, const VkAllocationCallbacks* pAllocator);
+    void PostCallRecordCreateImageView(VkDevice device, const VkImageViewCreateInfo* pCreateInfo,
+                                       const VkAllocationCallbacks* pAllocator, VkImageView* pView, VkResult result);
+    void PreCallRecordDestroyImageView(VkDevice device, VkImageView imageView, const VkAllocationCallbacks* pAllocator);
+
+    void PreCallRecordDestroyPipeline(VkDevice device, VkPipeline pipeline, const VkAllocationCallbacks* pAllocator);
+    void PostCallRecordCreatePipelineLayout(VkDevice device, const VkPipelineLayoutCreateInfo* pCreateInfo,
+                                            const VkAllocationCallbacks* pAllocator, VkPipelineLayout* pPipelineLayout,
+                                            VkResult result);
+    void PreCallRecordDestroyPipelineLayout(VkDevice device, VkPipelineLayout pipelineLayout,
+                                            const VkAllocationCallbacks* pAllocator);
+    void PostCallRecordCreateQueryPool(VkDevice device, const VkQueryPoolCreateInfo* pCreateInfo,
+                                       const VkAllocationCallbacks* pAllocator, VkQueryPool* pQueryPool, VkResult result);
+    void PreCallRecordDestroyQueryPool(VkDevice device, VkQueryPool queryPool, const VkAllocationCallbacks* pAllocator);
+    void PostCallRecordResetQueryPoolEXT(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount);
+    bool PreCallValidateCreateRayTracingPipelinesNV(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
+                                                    const VkRayTracingPipelineCreateInfoNV* pCreateInfos,
+                                                    const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines,
+                                                    void* pipe_state) const;
+    void PostCallRecordCreateRayTracingPipelinesNV(VkDevice device, VkPipelineCache pipelineCache, uint32_t count,
+                                                   const VkRayTracingPipelineCreateInfoNV* pCreateInfos,
+                                                   const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines, VkResult result,
+                                                   void* pipe_state);
+    void PostCallRecordCreateRenderPass(VkDevice device, const VkRenderPassCreateInfo* pCreateInfo,
+                                        const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass, VkResult result);
+    void PostCallRecordCreateRenderPass2KHR(VkDevice device, const VkRenderPassCreateInfo2KHR* pCreateInfo,
+                                            const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass, VkResult result);
+    void PreCallRecordDestroyRenderPass(VkDevice device, VkRenderPass renderPass, const VkAllocationCallbacks* pAllocator);
+    void PostCallRecordCreateSampler(VkDevice device, const VkSamplerCreateInfo* pCreateInfo,
+                                     const VkAllocationCallbacks* pAllocator, VkSampler* pSampler, VkResult result);
+    void PreCallRecordDestroySampler(VkDevice device, VkSampler sampler, const VkAllocationCallbacks* pAllocator);
+    void PostCallRecordCreateSamplerYcbcrConversion(VkDevice device, const VkSamplerYcbcrConversionCreateInfo* pCreateInfo,
+                                                    const VkAllocationCallbacks* pAllocator,
+                                                    VkSamplerYcbcrConversion* pYcbcrConversion, VkResult result);
+    void PostCallRecordDestroySamplerYcbcrConversion(VkDevice device, VkSamplerYcbcrConversion ycbcrConversion,
+                                                     const VkAllocationCallbacks* pAllocator);
+    void PostCallRecordCreateSamplerYcbcrConversionKHR(VkDevice device, const VkSamplerYcbcrConversionCreateInfo* pCreateInfo,
+                                                       const VkAllocationCallbacks* pAllocator,
+                                                       VkSamplerYcbcrConversion* pYcbcrConversion, VkResult result);
+    void PostCallRecordDestroySamplerYcbcrConversionKHR(VkDevice device, VkSamplerYcbcrConversion ycbcrConversion,
+                                                        const VkAllocationCallbacks* pAllocator);
+    void PostCallRecordCreateSemaphore(VkDevice device, const VkSemaphoreCreateInfo* pCreateInfo,
+                                       const VkAllocationCallbacks* pAllocator, VkSemaphore* pSemaphore, VkResult result);
+    void PreCallRecordDestroySemaphore(VkDevice device, VkSemaphore semaphore, const VkAllocationCallbacks* pAllocator);
+    void PostCallRecordCreateShaderModule(VkDevice device, const VkShaderModuleCreateInfo* pCreateInfo,
+                                          const VkAllocationCallbacks* pAllocator, VkShaderModule* pShaderModule, VkResult result,
+                                          void* csm_state);
+    void PreCallRecordDestroyShaderModule(VkDevice device, VkShaderModule shaderModule, const VkAllocationCallbacks* pAllocator);
+    void PreCallRecordDestroySurfaceKHR(VkInstance instance, VkSurfaceKHR surface, const VkAllocationCallbacks* pAllocator);
+    void PostCallRecordCreateSharedSwapchainsKHR(VkDevice device, uint32_t swapchainCount,
+                                                 const VkSwapchainCreateInfoKHR* pCreateInfos,
+                                                 const VkAllocationCallbacks* pAllocator, VkSwapchainKHR* pSwapchains,
+                                                 VkResult result);
+    void PostCallRecordCreateSwapchainKHR(VkDevice device, const VkSwapchainCreateInfoKHR* pCreateInfo,
+                                          const VkAllocationCallbacks* pAllocator, VkSwapchainKHR* pSwapchain, VkResult result);
+    void PreCallRecordDestroySwapchainKHR(VkDevice device, VkSwapchainKHR swapchain, const VkAllocationCallbacks* pAllocator);
+
+    // CommandBuffer/Queue Control
+    void PreCallRecordBeginCommandBuffer(VkCommandBuffer commandBuffer, const VkCommandBufferBeginInfo* pBeginInfo);
+    void PostCallRecordDeviceWaitIdle(VkDevice device, VkResult result);
+    void PostCallRecordEndCommandBuffer(VkCommandBuffer commandBuffer, VkResult result);
+    void PostCallRecordQueueBindSparse(VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo* pBindInfo, VkFence fence,
+                                       VkResult result);
+    void PostCallRecordQueuePresentKHR(VkQueue queue, const VkPresentInfoKHR* pPresentInfo, VkResult result);
+    void PostCallRecordQueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo* pSubmits, VkFence fence,
+                                   VkResult result);
+    void PostCallRecordQueueWaitIdle(VkQueue queue, VkResult result);
+    void PreCallRecordSetEvent(VkDevice device, VkEvent event);
+    void PostCallRecordWaitForFences(VkDevice device, uint32_t fenceCount, const VkFence* pFences, VkBool32 waitAll,
+                                     uint64_t timeout, VkResult result);
+    void PostCallRecordAcquireProfilingLockKHR(VkDevice device, const VkAcquireProfilingLockInfoKHR* pInfo, VkResult result);
+    bool PreCallValidateReleaseProfilingLockKHR(VkDevice device) const;
+    void PostCallRecordReleaseProfilingLockKHR(VkDevice device);
+
+    // Allocate/Free
+    void PostCallRecordAllocateCommandBuffers(VkDevice device, const VkCommandBufferAllocateInfo* pCreateInfo,
+                                              VkCommandBuffer* pCommandBuffer, VkResult result);
+    void PostCallRecordAllocateDescriptorSets(VkDevice device, const VkDescriptorSetAllocateInfo* pAllocateInfo,
+                                              VkDescriptorSet* pDescriptorSets, VkResult result, void* ads_state);
+    void PostCallRecordAllocateMemory(VkDevice device, const VkMemoryAllocateInfo* pAllocateInfo,
+                                      const VkAllocationCallbacks* pAllocator, VkDeviceMemory* pMemory, VkResult result);
+    void PreCallRecordFreeCommandBuffers(VkDevice device, VkCommandPool commandPool, uint32_t commandBufferCount,
+                                         const VkCommandBuffer* pCommandBuffers);
+    void PreCallRecordFreeDescriptorSets(VkDevice device, VkDescriptorPool descriptorPool, uint32_t count,
+                                         const VkDescriptorSet* pDescriptorSets);
+    void PreCallRecordFreeMemory(VkDevice device, VkDeviceMemory mem, const VkAllocationCallbacks* pAllocator);
+    void PreCallRecordUpdateDescriptorSets(VkDevice device, uint32_t descriptorWriteCount,
+                                           const VkWriteDescriptorSet* pDescriptorWrites, uint32_t descriptorCopyCount,
+                                           const VkCopyDescriptorSet* pDescriptorCopies);
+    void PreCallRecordUpdateDescriptorSetWithTemplate(VkDevice device, VkDescriptorSet descriptorSet,
+                                                      VkDescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData);
+    void PreCallRecordUpdateDescriptorSetWithTemplateKHR(VkDevice device, VkDescriptorSet descriptorSet,
+                                                         VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate, const void* pData);
+
+    // Memory mapping
+    void PostCallRecordMapMemory(VkDevice device, VkDeviceMemory mem, VkDeviceSize offset, VkDeviceSize size, VkFlags flags,
+                                 void** ppData, VkResult result);
+    void PreCallRecordUnmapMemory(VkDevice device, VkDeviceMemory mem);
+
+    // Recorded Commands
+    void PreCallRecordCmdBeginDebugUtilsLabelEXT(VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT* pLabelInfo);
+    void PostCallRecordCmdBeginQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t slot, VkFlags flags);
+    void PostCallRecordCmdBeginQueryIndexedEXT(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query,
+                                               VkQueryControlFlags flags, uint32_t index);
+    void PreCallRecordCmdBeginRenderPass(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo* pRenderPassBegin,
+                                         VkSubpassContents contents);
+    void PreCallRecordCmdBeginRenderPass2KHR(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo* pRenderPassBegin,
+                                             const VkSubpassBeginInfoKHR* pSubpassBeginInfo);
+    void PreCallRecordCmdBindDescriptorSets(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint,
+                                            VkPipelineLayout layout, uint32_t firstSet, uint32_t setCount,
+                                            const VkDescriptorSet* pDescriptorSets, uint32_t dynamicOffsetCount,
+                                            const uint32_t* pDynamicOffsets);
+    void PreCallRecordCmdBindIndexBuffer(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
+                                         VkIndexType indexType);
+    void PreCallRecordCmdBindPipeline(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline);
+    void PreCallRecordCmdBindShadingRateImageNV(VkCommandBuffer commandBuffer, VkImageView imageView, VkImageLayout imageLayout);
+    void PreCallRecordCmdBindVertexBuffers(VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount,
+                                           const VkBuffer* pBuffers, const VkDeviceSize* pOffsets);
+    void PreCallRecordCmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage,
+                                   VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageBlit* pRegions,
+                                   VkFilter filter);
+    void PostCallRecordCmdBuildAccelerationStructureNV(VkCommandBuffer commandBuffer, const VkAccelerationStructureInfoNV* pInfo,
+                                                       VkBuffer instanceData, VkDeviceSize instanceOffset, VkBool32 update,
+                                                       VkAccelerationStructureNV dst, VkAccelerationStructureNV src,
+                                                       VkBuffer scratch, VkDeviceSize scratchOffset);
+    void PreCallRecordCmdClearColorImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout,
+                                         const VkClearColorValue* pColor, uint32_t rangeCount,
+                                         const VkImageSubresourceRange* pRanges);
+    void PreCallRecordCmdClearDepthStencilImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout,
+                                                const VkClearDepthStencilValue* pDepthStencil, uint32_t rangeCount,
+                                                const VkImageSubresourceRange* pRanges);
+    void PostCallRecordCmdCopyAccelerationStructureNV(VkCommandBuffer commandBuffer, VkAccelerationStructureNV dst,
+                                                      VkAccelerationStructureNV src, VkCopyAccelerationStructureModeNV mode);
+    void PreCallRecordCmdCopyBuffer(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer, uint32_t regionCount,
+                                    const VkBufferCopy* pRegions);
+    void PreCallRecordCmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage,
+                                           VkImageLayout dstImageLayout, uint32_t regionCount, const VkBufferImageCopy* pRegions);
+    void PreCallRecordCmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage,
+                                   VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageCopy* pRegions);
+    void PreCallRecordCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
+                                           VkBuffer dstBuffer, uint32_t regionCount, const VkBufferImageCopy* pRegions);
+    void PostCallRecordCmdCopyQueryPoolResults(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery,
+                                               uint32_t queryCount, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize stride,
+                                               VkQueryResultFlags flags);
+    void PostCallRecordCmdDispatch(VkCommandBuffer commandBuffer, uint32_t x, uint32_t y, uint32_t z);
+    void PostCallRecordCmdDispatchIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset);
+    void PostCallRecordCmdDraw(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex,
+                               uint32_t firstInstance);
+    void PostCallRecordCmdDrawIndexed(VkCommandBuffer commandBuffer, uint32_t indexCount, uint32_t instanceCount,
+                                      uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance);
+    void PostCallRecordCmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t count,
+                                              uint32_t stride);
+    void PostCallRecordCmdDrawIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t count,
+                                       uint32_t stride);
+    void PreCallRecordCmdDrawIndexedIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
+                                                     VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
+                                                     uint32_t stride);
+    void PreCallRecordCmdDrawIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
+                                              VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
+                                              uint32_t stride);
+    void PreCallRecordCmdDrawMeshTasksIndirectCountNV(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
+                                                      VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
+                                                      uint32_t stride);
+    void PreCallRecordCmdDrawMeshTasksIndirectNV(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
+                                                 uint32_t drawCount, uint32_t stride);
+    void PreCallRecordCmdDrawMeshTasksNV(VkCommandBuffer commandBuffer, uint32_t taskCount, uint32_t firstTask);
+    void PostCallRecordCmdEndDebugUtilsLabelEXT(VkCommandBuffer commandBuffer);
+    void PostCallRecordCmdEndQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t slot);
+    void PostCallRecordCmdEndQueryIndexedEXT(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, uint32_t index);
+    void PostCallRecordCmdEndRenderPass(VkCommandBuffer commandBuffer);
+    void PostCallRecordCmdEndRenderPass2KHR(VkCommandBuffer commandBuffer, const VkSubpassEndInfoKHR* pSubpassEndInfo);
+    void PreCallRecordCmdExecuteCommands(VkCommandBuffer commandBuffer, uint32_t commandBuffersCount,
+                                         const VkCommandBuffer* pCommandBuffers);
+    void PreCallRecordCmdFillBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize size,
+                                    uint32_t data);
+    void PreCallRecordCmdInsertDebugUtilsLabelEXT(VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT* pLabelInfo);
+    void PostCallRecordCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents);
+    void PostCallRecordCmdNextSubpass2KHR(VkCommandBuffer commandBuffer, const VkSubpassBeginInfoKHR* pSubpassBeginInfo,
+                                          const VkSubpassEndInfoKHR* pSubpassEndInfo);
+    void PreCallRecordCmdPushDescriptorSetKHR(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint,
+                                              VkPipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount,
+                                              const VkWriteDescriptorSet* pDescriptorWrites);
+    void PreCallRecordCmdPushDescriptorSetWithTemplateKHR(VkCommandBuffer commandBuffer,
+                                                          VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
+                                                          VkPipelineLayout layout, uint32_t set, const void* pData);
+    void PostCallRecordCmdPushConstants(VkCommandBuffer commandBuffer, VkPipelineLayout layout, VkShaderStageFlags stageFlags,
+                                        uint32_t offset, uint32_t size, const void* pValues);
+    void PreCallRecordCmdResetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask);
+    void PostCallRecordCmdResetQueryPool(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery,
+                                         uint32_t queryCount);
+    void PreCallRecordCmdResolveImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
+                                      VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount,
+                                      const VkImageResolve* pRegions);
+    void PreCallRecordCmdSetBlendConstants(VkCommandBuffer commandBuffer, const float blendConstants[4]);
+    void PreCallRecordCmdSetDepthBias(VkCommandBuffer commandBuffer, float depthBiasConstantFactor, float depthBiasClamp,
+                                      float depthBiasSlopeFactor);
+    void PreCallRecordCmdSetDepthBounds(VkCommandBuffer commandBuffer, float minDepthBounds, float maxDepthBounds);
+    void PreCallRecordCmdSetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask);
+    void PreCallRecordCmdSetExclusiveScissorNV(VkCommandBuffer commandBuffer, uint32_t firstExclusiveScissor,
+                                               uint32_t exclusiveScissorCount, const VkRect2D* pExclusiveScissors);
+    void PreCallRecordCmdSetLineWidth(VkCommandBuffer commandBuffer, float lineWidth);
+    void PreCallRecordCmdSetLineStippleEXT(VkCommandBuffer commandBuffer, uint32_t lineStippleFactor, uint16_t lineStipplePattern);
+    void PreCallRecordCmdSetScissor(VkCommandBuffer commandBuffer, uint32_t firstScissor, uint32_t scissorCount,
+                                    const VkRect2D* pScissors);
+    void PreCallRecordCmdSetStencilCompareMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t compareMask);
+    void PreCallRecordCmdSetStencilReference(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t reference);
+    void PreCallRecordCmdSetStencilWriteMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t writeMask);
+    void PreCallRecordCmdSetViewport(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount,
+                                     const VkViewport* pViewports);
+    void PreCallRecordCmdSetViewportShadingRatePaletteNV(VkCommandBuffer commandBuffer, uint32_t firstViewport,
+                                                         uint32_t viewportCount,
+                                                         const VkShadingRatePaletteNV* pShadingRatePalettes);
+    void PostCallRecordCmdUpdateBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset,
+                                       VkDeviceSize dataSize, const void* pData);
+    void PreCallRecordCmdWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent* pEvents,
+                                    VkPipelineStageFlags sourceStageMask, VkPipelineStageFlags dstStageMask,
+                                    uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers,
+                                    uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers,
+                                    uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers);
+    void PostCallRecordCmdWriteTimestamp(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage,
+                                         VkQueryPool queryPool, uint32_t slot);
+    void PreCallRecordCmdSetViewportWScalingNV(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount,
+                                               const VkViewportWScalingNV* pViewportWScalings);
+
+    // WSI
+    void PostCallRecordAcquireNextImageKHR(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout, VkSemaphore semaphore,
+                                           VkFence fence, uint32_t* pImageIndex, VkResult result);
+    void PostCallRecordAcquireNextImage2KHR(VkDevice device, const VkAcquireNextImageInfoKHR* pAcquireInfo, uint32_t* pImageIndex,
+                                            VkResult result);
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+    void PostCallRecordCreateAndroidSurfaceKHR(VkInstance instance, const VkAndroidSurfaceCreateInfoKHR* pCreateInfo,
+                                               const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface, VkResult result);
+#endif  // VK_USE_PLATFORM_ANDROID_KHR
+#ifdef VK_USE_PLATFORM_IOS_MVK
+    void PostCallRecordCreateIOSSurfaceMVK(VkInstance instance, const VkIOSSurfaceCreateInfoMVK* pCreateInfo,
+                                           const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface, VkResult result);
+#endif  // VK_USE_PLATFORM_IOS_MVK
+#ifdef VK_USE_PLATFORM_MACOS_MVK
+    void PostCallRecordCreateMacOSSurfaceMVK(VkInstance instance, const VkMacOSSurfaceCreateInfoMVK* pCreateInfo,
+                                             const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface, VkResult result);
+#endif  // VK_USE_PLATFORM_MACOS_MVK
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    void PostCallRecordCreateWin32SurfaceKHR(VkInstance instance, const VkWin32SurfaceCreateInfoKHR* pCreateInfo,
+                                             const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface, VkResult result);
+#endif  // VK_USE_PLATFORM_WIN32_KHR
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+    void PostCallRecordCreateWaylandSurfaceKHR(VkInstance instance, const VkWaylandSurfaceCreateInfoKHR* pCreateInfo,
+                                               const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface, VkResult result);
+#endif  // VK_USE_PLATFORM_WAYLAND_KHR
+#ifdef VK_USE_PLATFORM_XCB_KHR
+    void PostCallRecordCreateXcbSurfaceKHR(VkInstance instance, const VkXcbSurfaceCreateInfoKHR* pCreateInfo,
+                                           const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface, VkResult result);
+#endif  // VK_USE_PLATFORM_XCB_KHR
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+    void PostCallRecordCreateXlibSurfaceKHR(VkInstance instance, const VkXlibSurfaceCreateInfoKHR* pCreateInfo,
+                                            const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface, VkResult result);
+#endif  // VK_USE_PLATFORM_XLIB_KHR
+
+    // State Utilty functions
+    bool AddCommandBufferMem(small_unordered_map<CMD_BUFFER_STATE*, int, 8>& cb_bindings, VkDeviceMemory obj,
+                             CMD_BUFFER_STATE* cb_node);
+    bool AddCommandBufferBinding(small_unordered_map<CMD_BUFFER_STATE*, int, 8>& cb_bindings, const VulkanTypedHandle& obj,
+                                 CMD_BUFFER_STATE* cb_node);
+    void AddCommandBufferBindingAccelerationStructure(CMD_BUFFER_STATE*, ACCELERATION_STRUCTURE_STATE*);
+    void AddCommandBufferBindingBuffer(CMD_BUFFER_STATE*, BUFFER_STATE*);
+    void AddCommandBufferBindingBufferView(CMD_BUFFER_STATE*, BUFFER_VIEW_STATE*);
+    void AddCommandBufferBindingImage(CMD_BUFFER_STATE*, IMAGE_STATE*);
+    void AddCommandBufferBindingImageView(CMD_BUFFER_STATE*, IMAGE_VIEW_STATE*);
+    void AddCommandBufferBindingSampler(CMD_BUFFER_STATE*, SAMPLER_STATE*);
+    void AddMemObjInfo(void* object, const VkDeviceMemory mem, const VkMemoryAllocateInfo* pAllocateInfo);
+    void AddFramebufferBinding(CMD_BUFFER_STATE* cb_state, FRAMEBUFFER_STATE* fb_state);
+    void ClearMemoryObjectBindings(const VulkanTypedHandle& typed_handle);
+    void ClearMemoryObjectBinding(const VulkanTypedHandle& typed_handle, VkDeviceMemory mem);
+    void DecrementBoundResources(CMD_BUFFER_STATE const* cb_node);
+    void DeleteDescriptorSetPools();
+    void FreeCommandBufferStates(COMMAND_POOL_STATE* pool_state, const uint32_t command_buffer_count,
+                                 const VkCommandBuffer* command_buffers);
+    void FreeDescriptorSet(cvdescriptorset::DescriptorSet* descriptor_set);
+    BASE_NODE* GetStateStructPtrFromObject(const VulkanTypedHandle& object_struct);
+    void IncrementBoundObjects(CMD_BUFFER_STATE const* cb_node);
+    void IncrementResources(CMD_BUFFER_STATE* cb_node);
+    void InsertAccelerationStructureMemoryRange(VkAccelerationStructureNV as, DEVICE_MEMORY_STATE* mem_info,
+                                                VkDeviceSize mem_offset, const VkMemoryRequirements& mem_reqs);
+    void InsertBufferMemoryRange(VkBuffer buffer, DEVICE_MEMORY_STATE* mem_info, VkDeviceSize mem_offset,
+                                 const VkMemoryRequirements& mem_reqs);
+    void InsertImageMemoryRange(VkImage image, DEVICE_MEMORY_STATE* mem_info, VkDeviceSize mem_offset,
+                                VkMemoryRequirements mem_reqs, bool is_linear);
+    void InsertMemoryRange(const VulkanTypedHandle& typed_handle, DEVICE_MEMORY_STATE* mem_info, VkDeviceSize memoryOffset,
+                           VkMemoryRequirements memRequirements, bool is_linear);
+    void InvalidateCommandBuffers(small_unordered_map<CMD_BUFFER_STATE*, int, 8>& cb_nodes, const VulkanTypedHandle& obj,
+                                  bool unlink = true);
+    void InvalidateLinkedCommandBuffers(std::unordered_set<CMD_BUFFER_STATE*>& cb_nodes, const VulkanTypedHandle& obj);
+    void PerformAllocateDescriptorSets(const VkDescriptorSetAllocateInfo*, const VkDescriptorSet*,
+                                       const cvdescriptorset::AllocateDescriptorSetsData*);
+    void PerformUpdateDescriptorSetsWithTemplateKHR(VkDescriptorSet descriptorSet, const TEMPLATE_STATE* template_state,
+                                                    const void* pData);
+    void RecordAcquireNextImageState(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout, VkSemaphore semaphore,
+                                     VkFence fence, uint32_t* pImageIndex);
+    void RecordCmdBeginQuery(CMD_BUFFER_STATE* cb_state, const QueryObject& query_obj);
+    void RecordCmdEndQuery(CMD_BUFFER_STATE* cb_state, const QueryObject& query_obj);
+    void RecordCmdEndRenderPassState(VkCommandBuffer commandBuffer);
+    void RecordCmdBeginRenderPassState(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo* pRenderPassBegin,
+                                       const VkSubpassContents contents);
+    void RecordCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents);
+    void RecordCmdPushDescriptorSetState(CMD_BUFFER_STATE* cb_state, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout,
+                                         uint32_t set, uint32_t descriptorWriteCount,
+                                         const VkWriteDescriptorSet* pDescriptorWrites);
+    void RecordCreateImageANDROID(const VkImageCreateInfo* create_info, IMAGE_STATE* is_node);
+    void RecordCreateRenderPassState(RenderPassCreateVersion rp_version, std::shared_ptr<RENDER_PASS_STATE>& render_pass,
+                                     VkRenderPass* pRenderPass);
+    void RecordCreateSamplerYcbcrConversionState(const VkSamplerYcbcrConversionCreateInfo* create_info,
+                                                 VkSamplerYcbcrConversion ycbcr_conversion);
+    void RecordCreateSamplerYcbcrConversionANDROID(const VkSamplerYcbcrConversionCreateInfo* create_info,
+                                                   VkSamplerYcbcrConversion ycbcr_conversion);
+    void RecordCreateSwapchainState(VkResult result, const VkSwapchainCreateInfoKHR* pCreateInfo, VkSwapchainKHR* pSwapchain,
+                                    SURFACE_STATE* surface_state, SWAPCHAIN_NODE* old_swapchain_state);
+    void RecordDestroySamplerYcbcrConversionANDROID(VkSamplerYcbcrConversion ycbcr_conversion);
+    void RecordEnumeratePhysicalDeviceGroupsState(uint32_t* pPhysicalDeviceGroupCount,
+                                                  VkPhysicalDeviceGroupPropertiesKHR* pPhysicalDeviceGroupProperties);
+    void RecordEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCounters(VkPhysicalDevice physicalDevice,
+                                                                          uint32_t queueFamilyIndex, uint32_t* pCounterCount,
+                                                                          VkPerformanceCounterKHR* pCounters);
+    void RecordGetBufferMemoryRequirementsState(VkBuffer buffer, VkMemoryRequirements* pMemoryRequirements);
+    void RecordGetDeviceQueueState(uint32_t queue_family_index, VkQueue queue);
+    void RecordGetExternalFenceState(VkFence fence, VkExternalFenceHandleTypeFlagBitsKHR handle_type);
+    void RecordGetImageMemoryRequiementsState(VkImage image, VkMemoryRequirements* pMemoryRequirements);
+    void RecordImportSemaphoreState(VkSemaphore semaphore, VkExternalSemaphoreHandleTypeFlagBitsKHR handle_type,
+                                    VkSemaphoreImportFlagsKHR flags);
+    void RecordGetPhysicalDeviceDisplayPlanePropertiesState(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount,
+                                                            void* pProperties);
+    void RecordGetExternalSemaphoreState(VkSemaphore semaphore, VkExternalSemaphoreHandleTypeFlagBitsKHR handle_type);
+    void RecordImportFenceState(VkFence fence, VkExternalFenceHandleTypeFlagBitsKHR handle_type, VkFenceImportFlagsKHR flags);
+    void RecordUpdateDescriptorSetWithTemplateState(VkDescriptorSet descriptorSet,
+                                                    VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate, const void* pData);
+    void RecordCreateDescriptorUpdateTemplateState(const VkDescriptorUpdateTemplateCreateInfoKHR* pCreateInfo,
+                                                   VkDescriptorUpdateTemplateKHR* pDescriptorUpdateTemplate);
+    void RecordMappedMemory(VkDeviceMemory mem, VkDeviceSize offset, VkDeviceSize size, void** ppData);
+    void RecordPipelineShaderStage(const VkPipelineShaderStageCreateInfo* pStage, PIPELINE_STATE* pipeline,
+                                   PIPELINE_STATE::StageState* stage_state) const;
+    void RecordRenderPassDAG(RenderPassCreateVersion rp_version, const VkRenderPassCreateInfo2KHR* pCreateInfo,
+                             RENDER_PASS_STATE* render_pass);
+    void RecordVulkanSurface(VkSurfaceKHR* pSurface);
+    void RemoveAccelerationStructureMemoryRange(VkAccelerationStructureNV as, DEVICE_MEMORY_STATE* mem_info);
+    void RemoveCommandBufferBinding(const VulkanTypedHandle& object, CMD_BUFFER_STATE* cb_node);
+    void RemoveBufferMemoryRange(VkBuffer buffer, DEVICE_MEMORY_STATE* mem_info);
+    void RemoveImageMemoryRange(VkImage image, DEVICE_MEMORY_STATE* mem_info);
+    void ResetCommandBufferState(const VkCommandBuffer cb);
+    void RetireFence(VkFence fence);
+    void RetireWorkOnQueue(QUEUE_STATE* pQueue, uint64_t seq);
+    static bool SetEventStageMask(VkEvent event, VkPipelineStageFlags stageMask, EventToStageMap* localEventToStageMap);
+    void ResetCommandBufferPushConstantDataIfIncompatible(CMD_BUFFER_STATE* cb_state, VkPipelineLayout layout);
+    void SetMemBinding(VkDeviceMemory mem, BINDABLE* mem_binding, VkDeviceSize memory_offset,
+                       const VulkanTypedHandle& typed_handle);
+    static bool SetQueryState(QueryObject object, QueryState value, QueryMap* localQueryToStateMap);
+    static bool SetQueryStateMulti(VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, QueryState value,
+                                   QueryMap* localQueryToStateMap);
+    QueryState GetQueryState(const QueryMap* localQueryToStateMap, VkQueryPool queryPool, uint32_t queryIndex) const;
+    bool SetSparseMemBinding(MEM_BINDING binding, const VulkanTypedHandle& typed_handle);
+    void UpdateBindBufferMemoryState(VkBuffer buffer, VkDeviceMemory mem, VkDeviceSize memoryOffset);
+    void UpdateBindImageMemoryState(const VkBindImageMemoryInfo& bindInfo);
+    void UpdateLastBoundDescriptorSets(CMD_BUFFER_STATE* cb_state, VkPipelineBindPoint pipeline_bind_point,
+                                       const PIPELINE_LAYOUT_STATE* pipeline_layout, uint32_t first_set, uint32_t set_count,
+                                       const VkDescriptorSet* pDescriptorSets, cvdescriptorset::DescriptorSet* push_descriptor_set,
+                                       uint32_t dynamic_offset_count, const uint32_t* p_dynamic_offsets);
+    void UpdateStateCmdDrawDispatchType(CMD_BUFFER_STATE* cb_state, VkPipelineBindPoint bind_point);
+    void UpdateStateCmdDrawType(CMD_BUFFER_STATE* cb_state, VkPipelineBindPoint bind_point);
+    void UpdateDrawState(CMD_BUFFER_STATE* cb_state, const VkPipelineBindPoint bind_point);
+    void UpdateAllocateDescriptorSetsData(const VkDescriptorSetAllocateInfo*, cvdescriptorset::AllocateDescriptorSetsData*) const;
+
+    DeviceFeatures enabled_features = {};
+    // Device specific data
+    VkPhysicalDeviceMemoryProperties phys_dev_mem_props = {};
+    VkPhysicalDeviceProperties phys_dev_props = {};
+    uint32_t physical_device_count;
+
+    // Device extension properties -- storing properties gathered from VkPhysicalDeviceProperties2KHR::pNext chain
+    struct DeviceExtensionProperties {
+        uint32_t max_push_descriptors;  // from VkPhysicalDevicePushDescriptorPropertiesKHR::maxPushDescriptors
+        VkPhysicalDeviceDescriptorIndexingPropertiesEXT descriptor_indexing_props;
+        VkPhysicalDeviceShadingRateImagePropertiesNV shading_rate_image_props;
+        VkPhysicalDeviceMeshShaderPropertiesNV mesh_shader_props;
+        VkPhysicalDeviceInlineUniformBlockPropertiesEXT inline_uniform_block_props;
+        VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT vtx_attrib_divisor_props;
+        VkPhysicalDeviceDepthStencilResolvePropertiesKHR depth_stencil_resolve_props;
+        VkPhysicalDeviceCooperativeMatrixPropertiesNV cooperative_matrix_props;
+        VkPhysicalDeviceTransformFeedbackPropertiesEXT transform_feedback_props;
+        VkPhysicalDeviceSubgroupProperties subgroup_props;
+        VkPhysicalDeviceFloatControlsPropertiesKHR float_controls_props;
+        VkPhysicalDeviceRayTracingPropertiesNV ray_tracing_props;
+        VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT texel_buffer_alignment_props;
+        VkPhysicalDeviceFragmentDensityMapPropertiesEXT fragment_density_map_props;
+        VkPhysicalDevicePerformanceQueryPropertiesKHR performance_query_props;
+        VkPhysicalDeviceTimelineSemaphorePropertiesKHR timeline_semaphore_props;
+    };
+    DeviceExtensionProperties phys_dev_ext_props = {};
+    std::vector<VkCooperativeMatrixPropertiesNV> cooperative_matrix_properties;
+
+    // Map for queue family index to queue count
+    unordered_map<uint32_t, uint32_t> queue_family_index_map;
+    bool performance_lock_acquired = false;
+
+    template <typename ExtProp>
+    void GetPhysicalDeviceExtProperties(VkPhysicalDevice gpu, ExtEnabled enabled, ExtProp* ext_prop) {
+        assert(ext_prop);
+        if (enabled) {
+            *ext_prop = lvl_init_struct<ExtProp>();
+            auto prop2 = lvl_init_struct<VkPhysicalDeviceProperties2KHR>(ext_prop);
+            DispatchGetPhysicalDeviceProperties2KHR(gpu, &prop2);
+        }
+    }
+};
diff --git a/src/third_party/vulkan-validation-layers/src/layers/stateless_validation.h b/src/third_party/vulkan-validation-layers/src/layers/stateless_validation.h
new file mode 100644
index 0000000..a3caf21
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/layers/stateless_validation.h
@@ -0,0 +1,1379 @@
+/* Copyright (c) 2015-2019 The Khronos Group Inc.
+ * Copyright (c) 2015-2019 Valve Corporation
+ * Copyright (c) 2015-2019 LunarG, Inc.
+ * Copyright (C) 2015-2019 Google Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Dustin Graves <dustin@lunarg.com>
+ * Author: Mark Lobodzinski <mark@lunarg.com>
+ */
+
+#pragma once
+
+#include "parameter_name.h"
+#include "vk_typemap_helper.h"
+
+// Suppress unused warning on Linux
+#if defined(__GNUC__)
+#define DECORATE_UNUSED __attribute__((unused))
+#else
+#define DECORATE_UNUSED
+#endif
+
+static const char DECORATE_UNUSED *kVUID_PVError_NONE = "UNASSIGNED-GeneralParameterError-Info";
+static const char DECORATE_UNUSED *kVUID_PVError_InvalidUsage = "UNASSIGNED-GeneralParameterError-InvalidUsage";
+static const char DECORATE_UNUSED *kVUID_PVError_InvalidStructSType = "UNASSIGNED-GeneralParameterError-InvalidStructSType";
+static const char DECORATE_UNUSED *kVUID_PVError_InvalidStructPNext = "UNASSIGNED-GeneralParameterError-InvalidStructPNext";
+static const char DECORATE_UNUSED *kVUID_PVError_RequiredParameter = "UNASSIGNED-GeneralParameterError-RequiredParameter";
+static const char DECORATE_UNUSED *kVUID_PVError_ReservedParameter = "UNASSIGNED-GeneralParameterError-ReservedParameter";
+static const char DECORATE_UNUSED *kVUID_PVError_UnrecognizedValue = "UNASSIGNED-GeneralParameterError-UnrecognizedValue";
+static const char DECORATE_UNUSED *kVUID_PVError_DeviceLimit = "UNASSIGNED-GeneralParameterError-DeviceLimit";
+static const char DECORATE_UNUSED *kVUID_PVError_DeviceFeature = "UNASSIGNED-GeneralParameterError-DeviceFeature";
+static const char DECORATE_UNUSED *kVUID_PVError_FailureCode = "UNASSIGNED-GeneralParameterError-FailureCode";
+static const char DECORATE_UNUSED *kVUID_PVError_ExtensionNotEnabled = "UNASSIGNED-GeneralParameterError-ExtensionNotEnabled";
+static const char DECORATE_UNUSED *kVUID_PVPerfWarn_SuboptimalSwapchain = "UNASSIGNED-GeneralParameterPerfWarn-SuboptimalSwapchain";
+
+#undef DECORATE_UNUSED
+
+extern const uint32_t GeneratedVulkanHeaderVersion;
+
+extern const VkQueryPipelineStatisticFlags AllVkQueryPipelineStatisticFlagBits;
+extern const VkColorComponentFlags AllVkColorComponentFlagBits;
+extern const VkShaderStageFlags AllVkShaderStageFlagBits;
+extern const VkQueryControlFlags AllVkQueryControlFlagBits;
+extern const VkImageUsageFlags AllVkImageUsageFlagBits;
+extern const VkSampleCountFlags AllVkSampleCountFlagBits;
+
+extern const std::vector<VkCompareOp> AllVkCompareOpEnums;
+extern const std::vector<VkStencilOp> AllVkStencilOpEnums;
+extern const std::vector<VkBlendFactor> AllVkBlendFactorEnums;
+extern const std::vector<VkBlendOp> AllVkBlendOpEnums;
+extern const std::vector<VkLogicOp> AllVkLogicOpEnums;
+extern const std::vector<VkBorderColor> AllVkBorderColorEnums;
+extern const std::vector<VkImageLayout> AllVkImageLayoutEnums;
+extern const std::vector<VkFormat> AllVkFormatEnums;
+extern const std::vector<VkVertexInputRate> AllVkVertexInputRateEnums;
+extern const std::vector<VkPrimitiveTopology> AllVkPrimitiveTopologyEnums;
+
+// String returned by string_VkStructureType for an unrecognized type.
+const std::string UnsupportedStructureTypeString = "Unhandled VkStructureType";
+
+// String returned by string_VkResult for an unrecognized type.
+const std::string UnsupportedResultString = "Unhandled VkResult";
+
+// The base value used when computing the offset for an enumeration token value that is added by an extension.
+// When validating enumeration tokens, any value >= to this value is considered to be provided by an extension.
+// See Appendix C.10 "Assigning Extension Token Values" from the Vulkan specification
+const uint32_t ExtEnumBaseValue = 1000000000;
+
+// The value of all VK_xxx_MAX_ENUM tokens
+const uint32_t MaxEnumValue = 0x7FFFFFFF;
+
+// Misc parameters of log_msg that are likely constant per command (or low frequency change)
+struct LogMiscParams {
+    VkDebugReportObjectTypeEXT objectType;
+    uint64_t srcObject;
+    const char *api_name;
+};
+
+class StatelessValidation : public ValidationObject {
+  public:
+    VkPhysicalDeviceLimits device_limits = {};
+    safe_VkPhysicalDeviceFeatures2 physical_device_features2;
+    const VkPhysicalDeviceFeatures &physical_device_features = physical_device_features2.features;
+
+    // Override chassis read/write locks for this validation object
+    // This override takes a deferred lock. i.e. it is not acquired.
+    virtual read_lock_guard_t read_lock() { return read_lock_guard_t(validation_object_mutex, std::defer_lock); }
+    virtual write_lock_guard_t write_lock() { return write_lock_guard_t(validation_object_mutex, std::defer_lock); }
+
+    // Device extension properties -- storing properties gathered from VkPhysicalDeviceProperties2KHR::pNext chain
+    struct DeviceExtensionProperties {
+        VkPhysicalDeviceShadingRateImagePropertiesNV shading_rate_image_props;
+        VkPhysicalDeviceMeshShaderPropertiesNV mesh_shader_props;
+        VkPhysicalDeviceRayTracingPropertiesNV ray_tracing_props;
+    };
+    DeviceExtensionProperties phys_dev_ext_props = {};
+
+    struct SubpassesUsageStates {
+        std::unordered_set<uint32_t> subpasses_using_color_attachment;
+        std::unordered_set<uint32_t> subpasses_using_depthstencil_attachment;
+    };
+
+    // Though this validation object is predominantly statless, the Framebuffer checks are greatly simplified by creating and
+    // updating a map of the renderpass usage states, and these accesses need thread protection. Use a mutex separate from the
+    // parent object's to maintain that functionality.
+    mutable std::mutex renderpass_map_mutex;
+    std::unordered_map<VkRenderPass, SubpassesUsageStates> renderpasses_states;
+
+    // Constructor for stateles validation tracking
+    // StatelessValidation() : {}
+    /**
+     * Validate a minimum value.
+     *
+     * Verify that the specified value is greater than the specified lower bound.
+     *
+     * @param api_name Name of API call being validated.
+     * @param parameter_name Name of parameter being validated.
+     * @param value Value to validate.
+     * @param lower_bound Lower bound value to use for validation.
+     * @return Boolean value indicating that the call should be skipped.
+     */
+    template <typename T>
+    bool ValidateGreaterThan(const T value, const T lower_bound, const ParameterName &parameter_name, const std::string &vuid,
+                             const LogMiscParams &misc) const {
+        bool skip_call = false;
+
+        if (value <= lower_bound) {
+            std::ostringstream ss;
+            ss << misc.api_name << ": parameter " << parameter_name.get_name() << " (= " << value << ") is greater than "
+               << lower_bound;
+            skip_call |=
+                log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, misc.objectType, misc.srcObject, vuid, "%s", ss.str().c_str());
+        }
+
+        return skip_call;
+    }
+
+    template <typename T>
+    bool ValidateGreaterThanZero(const T value, const ParameterName &parameter_name, const std::string &vuid,
+                                 const LogMiscParams &misc) const {
+        return ValidateGreaterThan(value, T{0}, parameter_name, vuid, misc);
+    }
+    /**
+     * Validate a required pointer.
+     *
+     * Verify that a required pointer is not NULL.
+     *
+     * @param apiName Name of API call being validated.
+     * @param parameterName Name of parameter being validated.
+     * @param value Pointer to validate.
+     * @return Boolean value indicating that the call should be skipped.
+     */
+    bool validate_required_pointer(const char *apiName, const ParameterName &parameterName, const void *value,
+                                   const std::string &vuid) const {
+        bool skip_call = false;
+
+        if (value == NULL) {
+            skip_call |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
+                                 "%s: required parameter %s specified as NULL.", apiName, parameterName.get_name().c_str());
+        }
+
+        return skip_call;
+    }
+
+    /**
+     * Validate array count and pointer to array.
+     *
+     * Verify that required count and array parameters are not 0 or NULL.  If the
+     * count parameter is not optional, verify that it is not 0.  If the array
+     * parameter is NULL, and it is not optional, verify that count is 0.
+     *
+     * @param apiName Name of API call being validated.
+     * @param countName Name of count parameter.
+     * @param arrayName Name of array parameter.
+     * @param count Number of elements in the array.
+     * @param array Array to validate.
+     * @param countRequired The 'count' parameter may not be 0 when true.
+     * @param arrayRequired The 'array' parameter may not be NULL when true.
+     * @return Boolean value indicating that the call should be skipped.
+     */
+    template <typename T1, typename T2>
+    bool validate_array(const char *apiName, const ParameterName &countName, const ParameterName &arrayName, T1 count,
+                        const T2 *array, bool countRequired, bool arrayRequired, const char *count_required_vuid,
+                        const char *array_required_vuid) const {
+        bool skip_call = false;
+
+        // Count parameters not tagged as optional cannot be 0
+        if (countRequired && (count == 0)) {
+            skip_call |=
+                log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, count_required_vuid,
+                        "%s: parameter %s must be greater than 0.", apiName, countName.get_name().c_str());
+        }
+
+        // Array parameters not tagged as optional cannot be NULL, unless the count is 0
+        if (arrayRequired && (count != 0) && (*array == NULL)) {
+            skip_call |=
+                log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, array_required_vuid,
+                        "%s: required parameter %s specified as NULL.", apiName, arrayName.get_name().c_str());
+        }
+
+        return skip_call;
+    }
+
+    /**
+     * Validate pointer to array count and pointer to array.
+     *
+     * Verify that required count and array parameters are not NULL.  If count
+     * is not NULL and its value is not optional, verify that it is not 0.  If the
+     * array parameter is NULL, and it is not optional, verify that count is 0.
+     * The array parameter will typically be optional for this case (where count is
+     * a pointer), allowing the caller to retrieve the available count.
+     *
+     * @param apiName Name of API call being validated.
+     * @param countName Name of count parameter.
+     * @param arrayName Name of array parameter.
+     * @param count Pointer to the number of elements in the array.
+     * @param array Array to validate.
+     * @param countPtrRequired The 'count' parameter may not be NULL when true.
+     * @param countValueRequired The '*count' value may not be 0 when true.
+     * @param arrayRequired The 'array' parameter may not be NULL when true.
+     * @return Boolean value indicating that the call should be skipped.
+     */
+    template <typename T1, typename T2>
+    bool validate_array(const char *apiName, const ParameterName &countName, const ParameterName &arrayName, const T1 *count,
+                        const T2 *array, bool countPtrRequired, bool countValueRequired, bool arrayRequired,
+                        const char *count_required_vuid, const char *array_required_vuid) const {
+        bool skip_call = false;
+
+        if (count == NULL) {
+            if (countPtrRequired) {
+                skip_call |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                     kVUID_PVError_RequiredParameter, "%s: required parameter %s specified as NULL", apiName,
+                                     countName.get_name().c_str());
+            }
+        } else {
+            skip_call |= validate_array(apiName, countName, arrayName, *array ? (*count) : 0, &array, countValueRequired,
+                                        arrayRequired, count_required_vuid, array_required_vuid);
+        }
+
+        return skip_call;
+    }
+
+    /**
+     * Validate a pointer to a Vulkan structure.
+     *
+     * Verify that a required pointer to a structure is not NULL.  If the pointer is
+     * not NULL, verify that each structure's sType field is set to the correct
+     * VkStructureType value.
+     *
+     * @param apiName Name of API call being validated.
+     * @param parameterName Name of struct parameter being validated.
+     * @param sTypeName Name of expected VkStructureType value.
+     * @param value Pointer to the struct to validate.
+     * @param sType VkStructureType for structure validation.
+     * @param required The parameter may not be NULL when true.
+     * @return Boolean value indicating that the call should be skipped.
+     */
+    template <typename T>
+    bool validate_struct_type(const char *apiName, const ParameterName &parameterName, const char *sTypeName, const T *value,
+                              VkStructureType sType, bool required, const char *struct_vuid, const char *stype_vuid) const {
+        bool skip_call = false;
+
+        if (value == NULL) {
+            if (required) {
+                skip_call |=
+                    log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, struct_vuid,
+                            "%s: required parameter %s specified as NULL", apiName, parameterName.get_name().c_str());
+            }
+        } else if (value->sType != sType) {
+            skip_call |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, stype_vuid,
+                                 "%s: parameter %s->sType must be %s.", apiName, parameterName.get_name().c_str(), sTypeName);
+        }
+
+        return skip_call;
+    }
+
+    /**
+     * Validate an array of Vulkan structures
+     *
+     * Verify that required count and array parameters are not 0 or NULL.  If
+     * the array contains 1 or more structures, verify that each structure's
+     * sType field is set to the correct VkStructureType value.
+     *
+     * @param apiName Name of API call being validated.
+     * @param countName Name of count parameter.
+     * @param arrayName Name of array parameter.
+     * @param sTypeName Name of expected VkStructureType value.
+     * @param count Number of elements in the array.
+     * @param array Array to validate.
+     * @param sType VkStructureType for structure validation.
+     * @param countRequired The 'count' parameter may not be 0 when true.
+     * @param arrayRequired The 'array' parameter may not be NULL when true.
+     * @return Boolean value indicating that the call should be skipped.
+     */
+    template <typename T>
+    bool validate_struct_type_array(const char *apiName, const ParameterName &countName, const ParameterName &arrayName,
+                                    const char *sTypeName, uint32_t count, const T *array, VkStructureType sType,
+                                    bool countRequired, bool arrayRequired, const char *stype_vuid, const char *param_vuid,
+                                    const char *count_required_vuid) const {
+        bool skip_call = false;
+
+        if ((count == 0) || (array == NULL)) {
+            skip_call |= validate_array(apiName, countName, arrayName, count, &array, countRequired, arrayRequired,
+                                        count_required_vuid, param_vuid);
+        } else {
+            // Verify that all structs in the array have the correct type
+            for (uint32_t i = 0; i < count; ++i) {
+                if (array[i].sType != sType) {
+                    skip_call |=
+                        log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, stype_vuid,
+                                "%s: parameter %s[%d].sType must be %s", apiName, arrayName.get_name().c_str(), i, sTypeName);
+                }
+            }
+        }
+
+        return skip_call;
+    }
+
+    /**
+     * Validate an array of Vulkan structures.
+     *
+     * Verify that required count and array parameters are not NULL.  If count
+     * is not NULL and its value is not optional, verify that it is not 0.
+     * If the array contains 1 or more structures, verify that each structure's
+     * sType field is set to the correct VkStructureType value.
+     *
+     * @param apiName Name of API call being validated.
+     * @param countName Name of count parameter.
+     * @param arrayName Name of array parameter.
+     * @param sTypeName Name of expected VkStructureType value.
+     * @param count Pointer to the number of elements in the array.
+     * @param array Array to validate.
+     * @param sType VkStructureType for structure validation.
+     * @param countPtrRequired The 'count' parameter may not be NULL when true.
+     * @param countValueRequired The '*count' value may not be 0 when true.
+     * @param arrayRequired The 'array' parameter may not be NULL when true.
+     * @return Boolean value indicating that the call should be skipped.
+     */
+    template <typename T>
+    bool validate_struct_type_array(const char *apiName, const ParameterName &countName, const ParameterName &arrayName,
+                                    const char *sTypeName, uint32_t *count, const T *array, VkStructureType sType,
+                                    bool countPtrRequired, bool countValueRequired, bool arrayRequired, const char *stype_vuid,
+                                    const char *param_vuid, const char *count_required_vuid) const {
+        bool skip_call = false;
+
+        if (count == NULL) {
+            if (countPtrRequired) {
+                skip_call |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                     kVUID_PVError_RequiredParameter, "%s: required parameter %s specified as NULL", apiName,
+                                     countName.get_name().c_str());
+            }
+        } else {
+            skip_call |= validate_struct_type_array(apiName, countName, arrayName, sTypeName, (*count), array, sType,
+                                                    countValueRequired, arrayRequired, stype_vuid, param_vuid, count_required_vuid);
+        }
+
+        return skip_call;
+    }
+
+    /**
+     * Validate a Vulkan handle.
+     *
+     * Verify that the specified handle is not VK_NULL_HANDLE.
+     *
+     * @param api_name Name of API call being validated.
+     * @param parameter_name Name of struct parameter being validated.
+     * @param value Handle to validate.
+     * @return Boolean value indicating that the call should be skipped.
+     */
+    template <typename T>
+    bool validate_required_handle(const char *api_name, const ParameterName &parameter_name, T value) const {
+        bool skip_call = false;
+
+        if (value == VK_NULL_HANDLE) {
+            skip_call |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                 kVUID_PVError_RequiredParameter, "%s: required parameter %s specified as VK_NULL_HANDLE", api_name,
+                                 parameter_name.get_name().c_str());
+        }
+
+        return skip_call;
+    }
+
+    /**
+     * Validate an array of Vulkan handles.
+     *
+     * Verify that required count and array parameters are not NULL.  If count
+     * is not NULL and its value is not optional, verify that it is not 0.
+     * If the array contains 1 or more handles, verify that no handle is set to
+     * VK_NULL_HANDLE.
+     *
+     * @note This function is only intended to validate arrays of handles when none
+     *       of the handles are allowed to be VK_NULL_HANDLE.  For arrays of handles
+     *       that are allowed to contain VK_NULL_HANDLE, use validate_array() instead.
+     *
+     * @param api_name Name of API call being validated.
+     * @param count_name Name of count parameter.
+     * @param array_name Name of array parameter.
+     * @param count Number of elements in the array.
+     * @param array Array to validate.
+     * @param count_required The 'count' parameter may not be 0 when true.
+     * @param array_required The 'array' parameter may not be NULL when true.
+     * @return Boolean value indicating that the call should be skipped.
+     */
+    template <typename T>
+    bool validate_handle_array(const char *api_name, const ParameterName &count_name, const ParameterName &array_name,
+                               uint32_t count, const T *array, bool count_required, bool array_required) const {
+        bool skip_call = false;
+
+        if ((count == 0) || (array == NULL)) {
+            skip_call |= validate_array(api_name, count_name, array_name, count, &array, count_required, array_required,
+                                        kVUIDUndefined, kVUIDUndefined);
+        } else {
+            // Verify that no handles in the array are VK_NULL_HANDLE
+            for (uint32_t i = 0; i < count; ++i) {
+                if (array[i] == VK_NULL_HANDLE) {
+                    skip_call |=
+                        log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                kVUID_PVError_RequiredParameter, "%s: required parameter %s[%d] specified as VK_NULL_HANDLE",
+                                api_name, array_name.get_name().c_str(), i);
+                }
+            }
+        }
+
+        return skip_call;
+    }
+
+    /**
+     * Validate string array count and content.
+     *
+     * Verify that required count and array parameters are not 0 or NULL.  If the
+     * count parameter is not optional, verify that it is not 0.  If the array
+     * parameter is NULL, and it is not optional, verify that count is 0.  If the
+     * array parameter is not NULL, verify that none of the strings are NULL.
+     *
+     * @param apiName Name of API call being validated.
+     * @param countName Name of count parameter.
+     * @param arrayName Name of array parameter.
+     * @param count Number of strings in the array.
+     * @param array Array of strings to validate.
+     * @param countRequired The 'count' parameter may not be 0 when true.
+     * @param arrayRequired The 'array' parameter may not be NULL when true.
+     * @return Boolean value indicating that the call should be skipped.
+     */
+    bool validate_string_array(const char *apiName, const ParameterName &countName, const ParameterName &arrayName, uint32_t count,
+                               const char *const *array, bool countRequired, bool arrayRequired, const char *count_required_vuid,
+                               const char *array_required_vuid) const {
+        bool skip_call = false;
+
+        if ((count == 0) || (array == NULL)) {
+            skip_call |= validate_array(apiName, countName, arrayName, count, &array, countRequired, arrayRequired,
+                                        count_required_vuid, array_required_vuid);
+        } else {
+            // Verify that strings in the array are not NULL
+            for (uint32_t i = 0; i < count; ++i) {
+                if (array[i] == NULL) {
+                    skip_call |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                         kVUID_PVError_RequiredParameter, "%s: required parameter %s[%d] specified as NULL",
+                                         apiName, arrayName.get_name().c_str(), i);
+                }
+            }
+        }
+
+        return skip_call;
+    }
+
+    // Forward declaration for pNext validation
+    bool ValidatePnextStructContents(const char *api_name, const ParameterName &parameter_name,
+                                     const VkBaseOutStructure *header) const;
+
+    /**
+     * Validate a structure's pNext member.
+     *
+     * Verify that the specified pNext value points to the head of a list of
+     * allowed extension structures.  If no extension structures are allowed,
+     * verify that pNext is null.
+     *
+     * @param api_name Name of API call being validated.
+     * @param parameter_name Name of parameter being validated.
+     * @param allowed_struct_names Names of allowed structs.
+     * @param next Pointer to validate.
+     * @param allowed_type_count Total number of allowed structure types.
+     * @param allowed_types Array of structure types allowed for pNext.
+     * @param header_version Version of header defining the pNext validation rules.
+     * @return Boolean value indicating that the call should be skipped.
+     */
+    bool validate_struct_pnext(const char *api_name, const ParameterName &parameter_name, const char *allowed_struct_names,
+                               const void *next, size_t allowed_type_count, const VkStructureType *allowed_types,
+                               uint32_t header_version, const char *vuid) const {
+        bool skip_call = false;
+
+        // TODO: The valid pNext structure types are not recursive. Each structure has its own list of valid sTypes for pNext.
+        // Codegen a map of vectors containing the allowable pNext types for each struct and use that here -- also simplifies parms.
+        if (next != NULL) {
+            std::unordered_set<const void *> cycle_check;
+            std::unordered_set<VkStructureType, std::hash<int>> unique_stype_check;
+
+            const char *disclaimer =
+                "This warning is based on the Valid Usage documentation for version %d of the Vulkan header.  It is possible that "
+                "you "
+                "are "
+                "using a struct from a private extension or an extension that was added to a later version of the Vulkan header, "
+                "in "
+                "which "
+                "case your use of %s is perfectly valid but is not guaranteed to work correctly with validation enabled";
+
+            if (allowed_type_count == 0) {
+                std::string message = "%s: value of %s must be NULL. ";
+                message += disclaimer;
+                skip_call |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
+                                     message.c_str(), api_name, parameter_name.get_name().c_str(), header_version,
+                                     parameter_name.get_name().c_str());
+            } else {
+                const VkStructureType *start = allowed_types;
+                const VkStructureType *end = allowed_types + allowed_type_count;
+                const VkBaseOutStructure *current = reinterpret_cast<const VkBaseOutStructure *>(next);
+
+                cycle_check.insert(next);
+
+                while (current != NULL) {
+                    if (((strncmp(api_name, "vkCreateInstance", strlen(api_name)) != 0) ||
+                         (current->sType != VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO)) &&
+                        ((strncmp(api_name, "vkCreateDevice", strlen(api_name)) != 0) ||
+                         (current->sType != VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO))) {
+                        if (cycle_check.find(current->pNext) != cycle_check.end()) {
+                            std::string message = "%s: %s chain contains a cycle -- pNext pointer " PRIx64 " is repeated.";
+                            skip_call |=
+                                log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                        kVUID_PVError_InvalidStructPNext, message.c_str(), api_name,
+                                        parameter_name.get_name().c_str(), reinterpret_cast<uint64_t>(next));
+                            break;
+                        } else {
+                            cycle_check.insert(current->pNext);
+                        }
+
+                        std::string type_name = string_VkStructureType(current->sType);
+                        if (unique_stype_check.find(current->sType) != unique_stype_check.end()) {
+                            std::string message = "%s: %s chain contains duplicate structure types: %s appears multiple times.";
+                            skip_call |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                                 VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, kVUID_PVError_InvalidStructPNext,
+                                                 message.c_str(), api_name, parameter_name.get_name().c_str(), type_name.c_str());
+                        } else {
+                            unique_stype_check.insert(current->sType);
+                        }
+
+                        if (std::find(start, end, current->sType) == end) {
+                            if (type_name == UnsupportedStructureTypeString) {
+                                std::string message =
+                                    "%s: %s chain includes a structure with unknown VkStructureType (%d); Allowed structures are "
+                                    "[%s]. ";
+                                message += disclaimer;
+                                skip_call |=
+                                    log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT,
+                                            0, vuid, message.c_str(), api_name, parameter_name.get_name().c_str(), current->sType,
+                                            allowed_struct_names, header_version, parameter_name.get_name().c_str());
+                            } else {
+                                std::string message =
+                                    "%s: %s chain includes a structure with unexpected VkStructureType %s; Allowed structures are "
+                                    "[%s]. ";
+                                message += disclaimer;
+                                skip_call |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT,
+                                                     VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid, message.c_str(), api_name,
+                                                     parameter_name.get_name().c_str(), type_name.c_str(), allowed_struct_names,
+                                                     header_version, parameter_name.get_name().c_str());
+                            }
+                        }
+                        skip_call |= ValidatePnextStructContents(api_name, parameter_name, current);
+                    }
+                    current = reinterpret_cast<const VkBaseOutStructure *>(current->pNext);
+                }
+            }
+        }
+
+        return skip_call;
+    }
+
+    /**
+     * Validate a VkBool32 value.
+     *
+     * Generate a warning if a VkBool32 value is neither VK_TRUE nor VK_FALSE.
+     *
+     * @param apiName Name of API call being validated.
+     * @param parameterName Name of parameter being validated.
+     * @param value Boolean value to validate.
+     * @return Boolean value indicating that the call should be skipped.
+     */
+    bool validate_bool32(const char *apiName, const ParameterName &parameterName, VkBool32 value) const {
+        bool skip_call = false;
+
+        if ((value != VK_TRUE) && (value != VK_FALSE)) {
+            skip_call |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                 kVUID_PVError_UnrecognizedValue, "%s: value of %s (%d) is neither VK_TRUE nor VK_FALSE", apiName,
+                                 parameterName.get_name().c_str(), value);
+        }
+
+        return skip_call;
+    }
+
+    /**
+     * Validate a Vulkan enumeration value.
+     *
+     * Generate a warning if an enumeration token value does not fall within the core enumeration
+     * begin and end token values, and was not added to the enumeration by an extension.  Extension
+     * provided enumerations use the equation specified in Appendix C.10 of the Vulkan specification,
+     * with 1,000,000,000 as the base token value.
+     *
+     * @note This function does not expect to process enumerations defining bitmask flag bits.
+     *
+     * @param apiName Name of API call being validated.
+     * @param parameterName Name of parameter being validated.
+     * @param enumName Name of the enumeration being validated.
+     * @param valid_values The list of valid values for the enumeration.
+     * @param value Enumeration value to validate.
+     * @return Boolean value indicating that the call should be skipped.
+     */
+    template <typename T>
+    bool validate_ranged_enum(const char *apiName, const ParameterName &parameterName, const char *enumName,
+                              const std::vector<T> &valid_values, T value, const char *vuid) const {
+        bool skip = false;
+
+        if (std::find(valid_values.begin(), valid_values.end(), value) == valid_values.end()) {
+            skip |=
+                log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
+                        "%s: value of %s (%d) does not fall within the begin..end range of the core %s enumeration tokens and is "
+                        "not an extension added token.",
+                        apiName, parameterName.get_name().c_str(), value, enumName);
+        }
+
+        return skip;
+    }
+
+    /**
+     * Validate an array of Vulkan enumeration value.
+     *
+     * Process all enumeration token values in the specified array and generate a warning if a value
+     * does not fall within the core enumeration begin and end token values, and was not added to
+     * the enumeration by an extension.  Extension provided enumerations use the equation specified
+     * in Appendix C.10 of the Vulkan specification, with 1,000,000,000 as the base token value.
+     *
+     * @note This function does not expect to process enumerations defining bitmask flag bits.
+     *
+     * @param apiName Name of API call being validated.
+     * @param countName Name of count parameter.
+     * @param arrayName Name of array parameter.
+     * @param enumName Name of the enumeration being validated.
+     * @param valid_values The list of valid values for the enumeration.
+     * @param count Number of enumeration values in the array.
+     * @param array Array of enumeration values to validate.
+     * @param countRequired The 'count' parameter may not be 0 when true.
+     * @param arrayRequired The 'array' parameter may not be NULL when true.
+     * @return Boolean value indicating that the call should be skipped.
+     */
+    template <typename T>
+    bool validate_ranged_enum_array(const char *apiName, const ParameterName &countName, const ParameterName &arrayName,
+                                    const char *enumName, const std::vector<T> &valid_values, uint32_t count, const T *array,
+                                    bool countRequired, bool arrayRequired) const {
+        bool skip_call = false;
+
+        if ((count == 0) || (array == NULL)) {
+            skip_call |= validate_array(apiName, countName, arrayName, count, &array, countRequired, arrayRequired, kVUIDUndefined,
+                                        kVUIDUndefined);
+        } else {
+            for (uint32_t i = 0; i < count; ++i) {
+                if (std::find(valid_values.begin(), valid_values.end(), array[i]) == valid_values.end()) {
+                    skip_call |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                         kVUID_PVError_UnrecognizedValue,
+                                         "%s: value of %s[%d] (%d) does not fall within the begin..end range of the core %s "
+                                         "enumeration tokens and is not an extension added token",
+                                         apiName, arrayName.get_name().c_str(), i, array[i], enumName);
+                }
+            }
+        }
+
+        return skip_call;
+    }
+
+    /**
+     * Verify that a reserved VkFlags value is zero.
+     *
+     * Verify that the specified value is zero, to check VkFlags values that are reserved for
+     * future use.
+     *
+     * @param api_name Name of API call being validated.
+     * @param parameter_name Name of parameter being validated.
+     * @param value Value to validate.
+     * @return Boolean value indicating that the call should be skipped.
+     */
+    bool validate_reserved_flags(const char *api_name, const ParameterName &parameter_name, VkFlags value, const char *vuid) const {
+        bool skip_call = false;
+
+        if (value != 0) {
+            skip_call |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
+                                 "%s: parameter %s must be 0.", api_name, parameter_name.get_name().c_str());
+        }
+
+        return skip_call;
+    }
+
+    enum FlagType { kRequiredFlags, kOptionalFlags, kRequiredSingleBit, kOptionalSingleBit };
+
+    /**
+     * Validate a Vulkan bitmask value.
+     *
+     * Generate a warning if a value with a VkFlags derived type does not contain valid flag bits
+     * for that type.
+     *
+     * @param api_name Name of API call being validated.
+     * @param parameter_name Name of parameter being validated.
+     * @param flag_bits_name Name of the VkFlags type being validated.
+     * @param all_flags A bit mask combining all valid flag bits for the VkFlags type being validated.
+     * @param value VkFlags value to validate.
+     * @param flag_type The type of flag, like optional, or single bit.
+     * @param vuid VUID used for flag that is outside defined bits (or has more than one bit for Bits type).
+     * @param flags_zero_vuid VUID used for non-optional Flags that are zero.
+     * @return Boolean value indicating that the call should be skipped.
+     */
+    bool validate_flags(const char *api_name, const ParameterName &parameter_name, const char *flag_bits_name, VkFlags all_flags,
+                        VkFlags value, const FlagType flag_type, const char *vuid, const char *flags_zero_vuid = nullptr) const {
+        bool skip_call = false;
+
+        if ((value & ~all_flags) != 0) {
+            skip_call |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
+                                 "%s: value of %s contains flag bits that are not recognized members of %s", api_name,
+                                 parameter_name.get_name().c_str(), flag_bits_name);
+        }
+
+        const bool required = flag_type == kRequiredFlags || flag_type == kRequiredSingleBit;
+        const char *zero_vuid = flag_type == kRequiredFlags ? flags_zero_vuid : vuid;
+        if (required && value == 0) {
+            skip_call |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, zero_vuid,
+                                 "%s: value of %s must not be 0.", api_name, parameter_name.get_name().c_str());
+        }
+
+        const auto HasMaxOneBitSet = [](const VkFlags f) {
+            // Decrement flips bits from right upto first 1.
+            // Rest stays same, and if there was any other 1s &ded together they would be non-zero. QED
+            return f == 0 || !(f & (f - 1));
+        };
+
+        const bool is_bits_type = flag_type == kRequiredSingleBit || flag_type == kOptionalSingleBit;
+        if (is_bits_type && !HasMaxOneBitSet(value)) {
+            skip_call |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
+                                 "%s: value of %s contains multiple members of %s when only a single value is allowed", api_name,
+                                 parameter_name.get_name().c_str(), flag_bits_name);
+        }
+
+        return skip_call;
+    }
+
+    /**
+     * Validate an array of Vulkan bitmask values.
+     *
+     * Generate a warning if a value with a VkFlags derived type does not contain valid flag bits
+     * for that type.
+     *
+     * @param api_name Name of API call being validated.
+     * @param count_name Name of parameter being validated.
+     * @param array_name Name of parameter being validated.
+     * @param flag_bits_name Name of the VkFlags type being validated.
+     * @param all_flags A bitmask combining all valid flag bits for the VkFlags type being validated.
+     * @param count Number of VkFlags values in the array.
+     * @param array Array of VkFlags value to validate.
+     * @param count_required The 'count' parameter may not be 0 when true.
+     * @param array_required The 'array' parameter may not be NULL when true.
+     * @return Boolean value indicating that the call should be skipped.
+     */
+    bool validate_flags_array(const char *api_name, const ParameterName &count_name, const ParameterName &array_name,
+                              const char *flag_bits_name, VkFlags all_flags, uint32_t count, const VkFlags *array,
+                              bool count_required, bool array_required) const {
+        bool skip_call = false;
+
+        if ((count == 0) || (array == NULL)) {
+            skip_call |= validate_array(api_name, count_name, array_name, count, &array, count_required, array_required,
+                                        kVUIDUndefined, kVUIDUndefined);
+        } else {
+            // Verify that all VkFlags values in the array
+            for (uint32_t i = 0; i < count; ++i) {
+                if (array[i] == 0) {
+                    // Current XML registry logic for validity generation uses the array parameter's optional tag to determine if
+                    // elements in the array are allowed be 0
+                    if (array_required) {
+                        skip_call |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                             kVUID_PVError_RequiredParameter, "%s: value of %s[%d] must not be 0", api_name,
+                                             array_name.get_name().c_str(), i);
+                    }
+                } else if ((array[i] & (~all_flags)) != 0) {
+                    skip_call |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                         kVUID_PVError_UnrecognizedValue,
+                                         "%s: value of %s[%d] contains flag bits that are not recognized members of %s", api_name,
+                                         array_name.get_name().c_str(), i, flag_bits_name);
+                }
+            }
+        }
+
+        return skip_call;
+    }
+
+    template <typename ExtensionState>
+    bool validate_extension_reqs(const ExtensionState &extensions, const char *vuid, const char *extension_type,
+                                 const char *extension_name) const {
+        bool skip = false;
+        if (!extension_name) {
+            return skip;  // Robust to invalid char *
+        }
+        auto info = ExtensionState::get_info(extension_name);
+
+        if (!info.state) {
+            return skip;  // Unknown extensions cannot be checked so report OK
+        }
+
+        // Check against the required list in the info
+        std::vector<const char *> missing;
+        for (const auto &req : info.requires) {
+            if (!(extensions.*(req.enabled))) {
+                missing.push_back(req.name);
+            }
+        }
+
+        // Report any missing requirements
+        if (missing.size()) {
+            std::string missing_joined_list = string_join(", ", missing);
+            skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT,
+                            HandleToUint64(instance), vuid, "Missing extension%s required by the %s extension %s: %s.",
+                            ((missing.size() > 1) ? "s" : ""), extension_type, extension_name, missing_joined_list.c_str());
+        }
+        return skip;
+    }
+
+    enum RenderPassCreateVersion { RENDER_PASS_VERSION_1 = 0, RENDER_PASS_VERSION_2 = 1 };
+
+    template <typename RenderPassCreateInfoGeneric>
+    bool ValidateSubpassGraphicsFlags(const debug_report_data *report_data, const RenderPassCreateInfoGeneric *pCreateInfo,
+                                      uint32_t dependency_index, uint32_t subpass, VkPipelineStageFlags stages, const char *vuid,
+                                      const char *target) const {
+        const VkPipelineStageFlags kCommonStages = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT | VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT;
+        const VkPipelineStageFlags kFramebufferStages =
+            VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT |
+            VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT | VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
+        const VkPipelineStageFlags kPrimitiveShadingPipelineStages =
+            kCommonStages | VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT | VK_PIPELINE_STAGE_VERTEX_INPUT_BIT |
+            VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT |
+            VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT | VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT |
+            VK_PIPELINE_STAGE_TRANSFORM_FEEDBACK_BIT_EXT | VK_PIPELINE_STAGE_SHADING_RATE_IMAGE_BIT_NV | kFramebufferStages;
+        const VkPipelineStageFlags kMeshShadingPipelineStages =
+            kCommonStages | VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT | VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV |
+            VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV | VK_PIPELINE_STAGE_SHADING_RATE_IMAGE_BIT_NV | kFramebufferStages;
+        const VkPipelineStageFlags kFragmentDensityStages = VK_PIPELINE_STAGE_FRAGMENT_DENSITY_PROCESS_BIT_EXT;
+        const VkPipelineStageFlags kConditionalRenderingStages = VK_PIPELINE_STAGE_CONDITIONAL_RENDERING_BIT_EXT;
+        const VkPipelineStageFlags kCommandProcessingPipelineStages = kCommonStages | VK_PIPELINE_STAGE_COMMAND_PROCESS_BIT_NVX;
+
+        const VkPipelineStageFlags kGraphicsStages = VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT | kPrimitiveShadingPipelineStages |
+                                                     kMeshShadingPipelineStages | kFragmentDensityStages |
+                                                     kConditionalRenderingStages | kCommandProcessingPipelineStages;
+
+        bool skip = false;
+
+        const auto IsPipeline = [pCreateInfo](uint32_t subpass, const VkPipelineBindPoint stage) {
+            if (subpass == VK_SUBPASS_EXTERNAL)
+                return false;
+            else
+                return pCreateInfo->pSubpasses[subpass].pipelineBindPoint == stage;
+        };
+
+        const bool is_all_graphics_stages = (stages & ~kGraphicsStages) == 0;
+        if (IsPipeline(subpass, VK_PIPELINE_BIND_POINT_GRAPHICS) && !is_all_graphics_stages) {
+            skip |=
+                log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT, 0, vuid,
+                        "Dependency pDependencies[%" PRIu32
+                        "] specifies a %sStageMask that contains stages (%s) that are not part "
+                        "of the Graphics pipeline, as specified by the %sSubpass (= %" PRIu32 ") in pipelineBindPoint.",
+                        dependency_index, target, string_VkPipelineStageFlags(stages & ~kGraphicsStages).c_str(), target, subpass);
+        }
+
+        return skip;
+    };
+
+    template <typename RenderPassCreateInfoGeneric>
+    bool CreateRenderPassGeneric(VkDevice device, const RenderPassCreateInfoGeneric *pCreateInfo,
+                                 const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass,
+                                 RenderPassCreateVersion rp_version) const {
+        bool skip = false;
+        uint32_t max_color_attachments = device_limits.maxColorAttachments;
+        bool use_rp2 = (rp_version == RENDER_PASS_VERSION_2);
+        const char *vuid;
+        const auto *separate_depth_stencil_layouts_features =
+            lvl_find_in_chain<VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR>(physical_device_features2.pNext);
+
+        for (uint32_t i = 0; i < pCreateInfo->attachmentCount; ++i) {
+            const auto *attachment_description_stencil_layout =
+                (use_rp2) ? lvl_find_in_chain<VkAttachmentDescriptionStencilLayoutKHR>(
+                                reinterpret_cast<VkAttachmentDescription2KHR const *>(&pCreateInfo->pAttachments[i])->pNext)
+                          : 0;
+
+            if (pCreateInfo->pAttachments[i].format == VK_FORMAT_UNDEFINED) {
+                std::stringstream ss;
+                ss << (use_rp2 ? "vkCreateRenderPass2KHR" : "vkCreateRenderPass") << ": pCreateInfo->pAttachments[" << i
+                   << "].format is VK_FORMAT_UNDEFINED. ";
+                vuid =
+                    use_rp2 ? "VUID-VkAttachmentDescription2KHR-format-parameter" : "VUID-VkAttachmentDescription-format-parameter";
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
+                                "%s", ss.str().c_str());
+            }
+            if (pCreateInfo->pAttachments[i].finalLayout == VK_IMAGE_LAYOUT_UNDEFINED ||
+                pCreateInfo->pAttachments[i].finalLayout == VK_IMAGE_LAYOUT_PREINITIALIZED) {
+                vuid = use_rp2 ? "VUID-VkAttachmentDescription2KHR-finalLayout-03061"
+                               : "VUID-VkAttachmentDescription-finalLayout-00843";
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
+                                "pCreateInfo->pAttachments[%d].finalLayout must not be VK_IMAGE_LAYOUT_UNDEFINED or "
+                                "VK_IMAGE_LAYOUT_PREINITIALIZED.",
+                                i);
+            }
+            if (!separate_depth_stencil_layouts_features || !separate_depth_stencil_layouts_features->separateDepthStencilLayouts) {
+                if (pCreateInfo->pAttachments[i].initialLayout == VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR ||
+                    pCreateInfo->pAttachments[i].initialLayout == VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR ||
+                    pCreateInfo->pAttachments[i].initialLayout == VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR ||
+                    pCreateInfo->pAttachments[i].initialLayout == VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR) {
+                    vuid = use_rp2 ? "VUID-VkAttachmentDescription2KHR-separateDepthStencilLayouts-03298"
+                                   : "VUID-VkAttachmentDescription-separateDepthStencilLayouts-03284";
+                    skip |= log_msg(
+                        report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
+                        "pCreateInfo->pAttachments[%d].initialLayout must not be VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR, "
+                        "VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR, VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR, or "
+                        "VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR",
+                        i);
+                }
+                if (pCreateInfo->pAttachments[i].finalLayout == VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR ||
+                    pCreateInfo->pAttachments[i].finalLayout == VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR ||
+                    pCreateInfo->pAttachments[i].finalLayout == VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR ||
+                    pCreateInfo->pAttachments[i].finalLayout == VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR) {
+                    vuid = use_rp2 ? "VUID-VkAttachmentDescription2KHR-separateDepthStencilLayouts-03299"
+                                   : "VUID-VkAttachmentDescription-separateDepthStencilLayouts-03285";
+                    skip |= log_msg(
+                        report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
+                        "pCreateInfo->pAttachments[%d].finalLayout must not be VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR, "
+                        "VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR, VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR, or "
+                        "VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR",
+                        i);
+                }
+            }
+            if (!FormatIsDepthOrStencil(pCreateInfo->pAttachments[i].format)) {
+                if (pCreateInfo->pAttachments[i].initialLayout == VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR ||
+                    pCreateInfo->pAttachments[i].initialLayout == VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR ||
+                    pCreateInfo->pAttachments[i].initialLayout == VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR ||
+                    pCreateInfo->pAttachments[i].initialLayout == VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR) {
+                    vuid = use_rp2 ? "VUID-VkAttachmentDescription2KHR-format-03300" : "VUID-VkAttachmentDescription-format-03286";
+                    skip |= log_msg(
+                        report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
+                        "pCreateInfo->pAttachments[%d].initialLayout must not be VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR, "
+                        "VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR, VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR, or "
+                        "VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMA_KHRL",
+                        i);
+                }
+                if (pCreateInfo->pAttachments[i].finalLayout == VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR ||
+                    pCreateInfo->pAttachments[i].finalLayout == VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR ||
+                    pCreateInfo->pAttachments[i].finalLayout == VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR ||
+                    pCreateInfo->pAttachments[i].finalLayout == VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR) {
+                    vuid = use_rp2 ? "VUID-VkAttachmentDescription2KHR-format-03301" : "VUID-VkAttachmentDescription-format-03287";
+                    skip |= log_msg(
+                        report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
+                        "pCreateInfo->pAttachments[%d].finalLayout must not be VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR, "
+                        "VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR, VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR, or "
+                        "VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR",
+                        i);
+                }
+            } else if (FormatIsDepthAndStencil(pCreateInfo->pAttachments[i].format)) {
+                if (use_rp2) {
+                    if (!attachment_description_stencil_layout) {
+                        if (pCreateInfo->pAttachments[i].initialLayout == VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR ||
+                            pCreateInfo->pAttachments[i].initialLayout == VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR) {
+                            skip |=
+                                log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                        "VUID-VkAttachmentDescription2KHR-format-03302",
+                                        "pCreateInfo->pNext must include an instance of VkAttachmentDescriptionStencilLayoutKHR");
+                        }
+                        if (pCreateInfo->pAttachments[i].finalLayout == VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR ||
+                            pCreateInfo->pAttachments[i].finalLayout == VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR) {
+                            skip |=
+                                log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                        "VUID-VkAttachmentDescription2KHR-format-03303",
+                                        "pCreateInfo->pNext must include an instance of VkAttachmentDescriptionStencilLayoutKHR");
+                        }
+                    }
+                } else {
+                    if (pCreateInfo->pAttachments[i].initialLayout == VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR ||
+                        pCreateInfo->pAttachments[i].initialLayout == VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR ||
+                        pCreateInfo->pAttachments[i].initialLayout == VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR ||
+                        pCreateInfo->pAttachments[i].initialLayout == VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR) {
+                        skip |= log_msg(
+                            report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                            "VUID-VkAttachmentDescription-format-03288",
+                            "pCreateInfo->pAttachments[%d].initialLayout must not be VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR, "
+                            "VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR, VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR, or "
+                            "VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR",
+                            i);
+                    }
+                    if (pCreateInfo->pAttachments[i].finalLayout == VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR ||
+                        pCreateInfo->pAttachments[i].finalLayout == VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR ||
+                        pCreateInfo->pAttachments[i].finalLayout == VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR ||
+                        pCreateInfo->pAttachments[i].finalLayout == VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR) {
+                        skip |= log_msg(
+                            report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                            "VUID-VkAttachmentDescription-format-03289",
+                            "pCreateInfo->pAttachments[%d].finalLayout must not be VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR, "
+                            "VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR, VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR, or "
+                            "VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR",
+                            i);
+                    }
+                }
+            } else if (FormatIsDepthOnly(pCreateInfo->pAttachments[i].format)) {
+                if (pCreateInfo->pAttachments[i].initialLayout == VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR ||
+                    pCreateInfo->pAttachments[i].initialLayout == VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR) {
+                    vuid = use_rp2 ? "VUID-VkAttachmentDescription2KHR-format-03304" : "VUID-VkAttachmentDescription-format-03290";
+                    skip |= log_msg(
+                        report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
+                        "pCreateInfo->pAttachments[%d].initialLayout must not be VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR, or"
+                        "VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR",
+                        i);
+                }
+                if (pCreateInfo->pAttachments[i].finalLayout == VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR ||
+                    pCreateInfo->pAttachments[i].finalLayout == VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR) {
+                    vuid = use_rp2 ? "VUID-VkAttachmentDescription2KHR-format-03305" : "VUID-VkAttachmentDescription-format-03291";
+                    skip |= log_msg(
+                        report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
+                        "pCreateInfo->pAttachments[%d].finalLayout must not be VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR, or "
+                        "VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR",
+                        i);
+                }
+            } else if (FormatIsStencilOnly(pCreateInfo->pAttachments[i].format)) {
+                if (pCreateInfo->pAttachments[i].initialLayout == VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR ||
+                    pCreateInfo->pAttachments[i].initialLayout == VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR) {
+                    vuid = use_rp2 ? "VUID-VkAttachmentDescription2KHR-format-03306" : "VUID-VkAttachmentDescription-format-03292";
+                    skip |= log_msg(
+                        report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
+                        "pCreateInfo->pAttachments[%d].initialLayout must not be VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR, or"
+                        "VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR",
+                        i);
+                }
+                if (pCreateInfo->pAttachments[i].finalLayout == VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR ||
+                    pCreateInfo->pAttachments[i].finalLayout == VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR) {
+                    vuid = use_rp2 ? "VUID-VkAttachmentDescription2KHR-format-03307" : "VUID-VkAttachmentDescription-format-03293";
+                    skip |= log_msg(
+                        report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
+                        "pCreateInfo->pAttachments[%d].finalLayout must not be VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR, or "
+                        "VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMA_KHRL",
+                        i);
+                }
+            }
+            if (use_rp2 && attachment_description_stencil_layout) {
+                if (attachment_description_stencil_layout->stencilInitialLayout == VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL ||
+                    attachment_description_stencil_layout->stencilInitialLayout == VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR ||
+                    attachment_description_stencil_layout->stencilInitialLayout == VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR ||
+                    attachment_description_stencil_layout->stencilInitialLayout ==
+                        VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL ||
+                    attachment_description_stencil_layout->stencilInitialLayout ==
+                        VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL ||
+                    attachment_description_stencil_layout->stencilInitialLayout ==
+                        VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL ||
+                    attachment_description_stencil_layout->stencilInitialLayout ==
+                        VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL) {
+                    skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                    "VUID-VkAttachmentDescriptionStencilLayoutKHR-stencilInitialLayout-03308",
+                                    "VkAttachmentDescriptionStencilLayoutKHR.stencilInitialLayout must not be "
+                                    "VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, "
+                                    "VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR, VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR, "
+                                    "VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL, "
+                                    "VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL, "
+                                    "VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL, or "
+                                    "VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL.");
+                }
+                if (attachment_description_stencil_layout->stencilFinalLayout == VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL ||
+                    attachment_description_stencil_layout->stencilFinalLayout == VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR ||
+                    attachment_description_stencil_layout->stencilFinalLayout == VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR ||
+                    attachment_description_stencil_layout->stencilFinalLayout == VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL ||
+                    attachment_description_stencil_layout->stencilFinalLayout == VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL ||
+                    attachment_description_stencil_layout->stencilFinalLayout ==
+                        VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL ||
+                    attachment_description_stencil_layout->stencilFinalLayout ==
+                        VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL) {
+                    skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                                    "VUID-VkAttachmentDescriptionStencilLayoutKHR-stencilFinalLayout-03309",
+                                    "VkAttachmentDescriptionStencilLayoutKHR.stencilFinalLayout must not be "
+                                    "VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, "
+                                    "VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR, VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR, "
+                                    "VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL, "
+                                    "VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL, "
+                                    "VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL, or "
+                                    "VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL.");
+                }
+                if (attachment_description_stencil_layout->stencilFinalLayout == VK_IMAGE_LAYOUT_UNDEFINED ||
+                    attachment_description_stencil_layout->stencilFinalLayout == VK_IMAGE_LAYOUT_PREINITIALIZED) {
+                    skip |= log_msg(
+                        report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                        "VUID-VkAttachmentDescriptionStencilLayoutKHR-stencilFinalLayout-03310",
+                        "VkAttachmentDescriptionStencilLayoutKHR.stencilFinalLayout must not be VK_IMAGE_LAYOUT_UNDEFINED, or "
+                        "VK_IMAGE_LAYOUT_PREINITIALIZED.");
+                }
+            }
+        }
+
+        for (uint32_t i = 0; i < pCreateInfo->subpassCount; ++i) {
+            if (pCreateInfo->pSubpasses[i].colorAttachmentCount > max_color_attachments) {
+                vuid = use_rp2 ? "VUID-VkSubpassDescription2KHR-colorAttachmentCount-03063"
+                               : "VUID-VkSubpassDescription-colorAttachmentCount-00845";
+                skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
+                                "Cannot create a render pass with %d color attachments. Max is %d.",
+                                pCreateInfo->pSubpasses[i].colorAttachmentCount, max_color_attachments);
+            }
+        }
+
+        for (uint32_t i = 0; i < pCreateInfo->dependencyCount; ++i) {
+            const auto &dependency = pCreateInfo->pDependencies[i];
+
+            // Spec currently only supports Graphics pipeline in render pass -- so only that pipeline is currently checked
+            vuid =
+                use_rp2 ? "VUID-VkRenderPassCreateInfo2KHR-pDependencies-03054" : "VUID-VkRenderPassCreateInfo-pDependencies-00837";
+            skip |= ValidateSubpassGraphicsFlags(report_data, pCreateInfo, i, dependency.srcSubpass, dependency.srcStageMask, vuid,
+                                                 "src");
+
+            vuid =
+                use_rp2 ? "VUID-VkRenderPassCreateInfo2KHR-pDependencies-03055" : "VUID-VkRenderPassCreateInfo-pDependencies-00838";
+            skip |= ValidateSubpassGraphicsFlags(report_data, pCreateInfo, i, dependency.dstSubpass, dependency.dstStageMask, vuid,
+                                                 "dst");
+        }
+
+        return skip;
+    }
+
+    template <typename T>
+    void RecordRenderPass(VkRenderPass renderPass, const T *pCreateInfo) {
+        std::unique_lock<std::mutex> lock(renderpass_map_mutex);
+        auto &renderpass_state = renderpasses_states[renderPass];
+        lock.unlock();
+
+        for (uint32_t subpass = 0; subpass < pCreateInfo->subpassCount; ++subpass) {
+            bool uses_color = false;
+            for (uint32_t i = 0; i < pCreateInfo->pSubpasses[subpass].colorAttachmentCount && !uses_color; ++i)
+                if (pCreateInfo->pSubpasses[subpass].pColorAttachments[i].attachment != VK_ATTACHMENT_UNUSED) uses_color = true;
+
+            bool uses_depthstencil = false;
+            if (pCreateInfo->pSubpasses[subpass].pDepthStencilAttachment)
+                if (pCreateInfo->pSubpasses[subpass].pDepthStencilAttachment->attachment != VK_ATTACHMENT_UNUSED)
+                    uses_depthstencil = true;
+
+            if (uses_color) renderpass_state.subpasses_using_color_attachment.insert(subpass);
+            if (uses_depthstencil) renderpass_state.subpasses_using_depthstencil_attachment.insert(subpass);
+        }
+    }
+
+    bool require_device_extension(bool flag, char const *function_name, char const *extension_name) const;
+
+    bool validate_instance_extensions(const VkInstanceCreateInfo *pCreateInfo) const;
+
+    bool validate_api_version(uint32_t api_version, uint32_t effective_api_version) const;
+
+    bool validate_string(const char *apiName, const ParameterName &stringName, const std::string &vuid,
+                         const char *validateString) const;
+
+    bool ValidateCoarseSampleOrderCustomNV(const VkCoarseSampleOrderCustomNV *order) const;
+
+    bool ValidateQueueFamilies(uint32_t queue_family_count, const uint32_t *queue_families, const char *cmd_name,
+                               const char *array_parameter_name, const std::string &unique_error_code,
+                               const std::string &valid_error_code, bool optional);
+
+    bool ValidateDeviceQueueFamily(uint32_t queue_family, const char *cmd_name, const char *parameter_name,
+                                   const std::string &error_code, bool optional);
+
+    bool ValidateGeometryTrianglesNV(const VkGeometryTrianglesNV &triangles, VkDebugReportObjectTypeEXT object_type,
+                                     uint64_t object_handle, const char *func_name) const;
+    bool ValidateGeometryAABBNV(const VkGeometryAABBNV &geometry, VkDebugReportObjectTypeEXT object_type, uint64_t object_handle,
+                                const char *func_name) const;
+    bool ValidateGeometryNV(const VkGeometryNV &geometry, VkDebugReportObjectTypeEXT object_type, uint64_t object_handle,
+                            const char *func_name) const;
+    bool ValidateAccelerationStructureInfoNV(const VkAccelerationStructureInfoNV &info, VkDebugReportObjectTypeEXT object_type,
+                                             uint64_t object_handle, const char *func_nam) const;
+
+    bool OutputExtensionError(const std::string &api_name, const std::string &extension_name) const;
+
+    void PostCallRecordCreateRenderPass(VkDevice device, const VkRenderPassCreateInfo *pCreateInfo,
+                                        const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass, VkResult result);
+    void PostCallRecordCreateRenderPass2KHR(VkDevice device, const VkRenderPassCreateInfo2KHR *pCreateInfo,
+                                            const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass, VkResult result);
+    void PostCallRecordDestroyRenderPass(VkDevice device, VkRenderPass renderPass, const VkAllocationCallbacks *pAllocator);
+    void PostCallRecordCreateDevice(VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo *pCreateInfo,
+                                    const VkAllocationCallbacks *pAllocator, VkDevice *pDevice, VkResult result);
+
+    void PostCallRecordCreateInstance(const VkInstanceCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator,
+                                      VkInstance *pInstance, VkResult result);
+
+    void PostCallRecordQueuePresentKHR(VkQueue queue, const VkPresentInfoKHR *pPresentInfo, VkResult result);
+
+    bool manual_PreCallValidateCreateQueryPool(VkDevice device, const VkQueryPoolCreateInfo *pCreateInfo,
+                                               const VkAllocationCallbacks *pAllocator, VkQueryPool *pQueryPool) const;
+
+    bool manual_PreCallValidateCreateInstance(const VkInstanceCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator,
+                                              VkInstance *pInstance) const;
+
+    bool manual_PreCallValidateCreateDevice(VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo *pCreateInfo,
+                                            const VkAllocationCallbacks *pAllocator, VkDevice *pDevice) const;
+
+    bool manual_PreCallValidateCreateBuffer(VkDevice device, const VkBufferCreateInfo *pCreateInfo,
+                                            const VkAllocationCallbacks *pAllocator, VkBuffer *pBuffer) const;
+
+    bool manual_PreCallValidateCreateImage(VkDevice device, const VkImageCreateInfo *pCreateInfo,
+                                           const VkAllocationCallbacks *pAllocator, VkImage *pImage) const;
+
+    bool manual_PreCallValidateViewport(const VkViewport &viewport, const char *fn_name, const ParameterName &parameter_name,
+                                        VkDebugReportObjectTypeEXT object_type, uint64_t object) const;
+
+    bool manual_PreCallValidateCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount,
+                                                       const VkGraphicsPipelineCreateInfo *pCreateInfos,
+                                                       const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines) const;
+    bool manual_PreCallValidateCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount,
+                                                      const VkComputePipelineCreateInfo *pCreateInfos,
+                                                      const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines) const;
+
+    bool manual_PreCallValidateCreateSampler(VkDevice device, const VkSamplerCreateInfo *pCreateInfo,
+                                             const VkAllocationCallbacks *pAllocator, VkSampler *pSampler) const;
+    bool manual_PreCallValidateCreateDescriptorSetLayout(VkDevice device, const VkDescriptorSetLayoutCreateInfo *pCreateInfo,
+                                                         const VkAllocationCallbacks *pAllocator,
+                                                         VkDescriptorSetLayout *pSetLayout) const;
+
+    bool manual_PreCallValidateUpdateDescriptorSets(VkDevice device, uint32_t descriptorWriteCount,
+                                                    const VkWriteDescriptorSet *pDescriptorWrites, uint32_t descriptorCopyCount,
+                                                    const VkCopyDescriptorSet *pDescriptorCopies) const;
+
+    bool manual_PreCallValidateFreeDescriptorSets(VkDevice device, VkDescriptorPool descriptorPool, uint32_t descriptorSetCount,
+                                                  const VkDescriptorSet *pDescriptorSets) const;
+
+    bool manual_PreCallValidateCreateRenderPass(VkDevice device, const VkRenderPassCreateInfo *pCreateInfo,
+                                                const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass) const;
+
+    bool manual_PreCallValidateCreateRenderPass2KHR(VkDevice device, const VkRenderPassCreateInfo2KHR *pCreateInfo,
+                                                    const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass) const;
+
+    bool manual_PreCallValidateFreeCommandBuffers(VkDevice device, VkCommandPool commandPool, uint32_t commandBufferCount,
+                                                  const VkCommandBuffer *pCommandBuffers) const;
+
+    bool manual_PreCallValidateBeginCommandBuffer(VkCommandBuffer commandBuffer, const VkCommandBufferBeginInfo *pBeginInfo) const;
+
+    bool manual_PreCallValidateCmdSetViewport(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount,
+                                              const VkViewport *pViewports) const;
+
+    bool manual_PreCallValidateCmdSetScissor(VkCommandBuffer commandBuffer, uint32_t firstScissor, uint32_t scissorCount,
+                                             const VkRect2D *pScissors) const;
+    bool manual_PreCallValidateCmdSetLineWidth(VkCommandBuffer commandBuffer, float lineWidth) const;
+
+    bool manual_PreCallValidateCmdDraw(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount,
+                                       uint32_t firstVertex, uint32_t firstInstance) const;
+
+    bool manual_PreCallValidateCmdDrawIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t count,
+                                               uint32_t stride) const;
+
+    bool manual_PreCallValidateCmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
+                                                      uint32_t count, uint32_t stride) const;
+
+    bool manual_PreCallValidateCmdClearAttachments(VkCommandBuffer commandBuffer, uint32_t attachmentCount,
+                                                   const VkClearAttachment *pAttachments, uint32_t rectCount,
+                                                   const VkClearRect *pRects) const;
+
+    bool manual_PreCallValidateCmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
+                                            VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount,
+                                            const VkImageCopy *pRegions) const;
+
+    bool manual_PreCallValidateCmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
+                                            VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount,
+                                            const VkImageBlit *pRegions, VkFilter filter) const;
+
+    bool manual_PreCallValidateCmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage,
+                                                    VkImageLayout dstImageLayout, uint32_t regionCount,
+                                                    const VkBufferImageCopy *pRegions) const;
+
+    bool manual_PreCallValidateCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
+                                                    VkBuffer dstBuffer, uint32_t regionCount,
+                                                    const VkBufferImageCopy *pRegions) const;
+
+    bool manual_PreCallValidateCmdUpdateBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset,
+                                               VkDeviceSize dataSize, const void *pData) const;
+
+    bool manual_PreCallValidateCmdFillBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset,
+                                             VkDeviceSize size, uint32_t data) const;
+
+    bool manual_PreCallValidateCreateSwapchainKHR(VkDevice device, const VkSwapchainCreateInfoKHR *pCreateInfo,
+                                                  const VkAllocationCallbacks *pAllocator, VkSwapchainKHR *pSwapchain) const;
+    bool manual_PreCallValidateQueuePresentKHR(VkQueue queue, const VkPresentInfoKHR *pPresentInfo) const;
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    bool manual_PreCallValidateCreateWin32SurfaceKHR(VkInstance instance, const VkWin32SurfaceCreateInfoKHR *pCreateInfo,
+                                                     const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface) const;
+#endif  // VK_USE_PLATFORM_WIN32_KHR
+
+    bool manual_PreCallValidateCreateDescriptorPool(VkDevice device, const VkDescriptorPoolCreateInfo *pCreateInfo,
+                                                    const VkAllocationCallbacks *pAllocator,
+                                                    VkDescriptorPool *pDescriptorPool) const;
+    bool manual_PreCallValidateCmdDispatch(VkCommandBuffer commandBuffer, uint32_t groupCountX, uint32_t groupCountY,
+                                           uint32_t groupCountZ) const;
+
+    bool manual_PreCallValidateCmdDispatchIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset) const;
+
+    bool manual_PreCallValidateCmdDispatchBaseKHR(VkCommandBuffer commandBuffer, uint32_t baseGroupX, uint32_t baseGroupY,
+                                                  uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY,
+                                                  uint32_t groupCountZ) const;
+    bool manual_PreCallValidateCmdSetExclusiveScissorNV(VkCommandBuffer commandBuffer, uint32_t firstExclusiveScissor,
+                                                        uint32_t exclusiveScissorCount, const VkRect2D *pExclusiveScissors) const;
+    bool manual_PreCallValidateCmdSetViewportShadingRatePaletteNV(VkCommandBuffer commandBuffer, uint32_t firstViewport,
+                                                                  uint32_t viewportCount,
+                                                                  const VkShadingRatePaletteNV *pShadingRatePalettes) const;
+
+    bool manual_PreCallValidateCmdSetCoarseSampleOrderNV(VkCommandBuffer commandBuffer, VkCoarseSampleOrderTypeNV sampleOrderType,
+                                                         uint32_t customSampleOrderCount,
+                                                         const VkCoarseSampleOrderCustomNV *pCustomSampleOrders) const;
+
+    bool manual_PreCallValidateCmdDrawMeshTasksNV(VkCommandBuffer commandBuffer, uint32_t taskCount, uint32_t firstTask) const;
+    bool manual_PreCallValidateCmdDrawMeshTasksIndirectNV(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
+                                                          uint32_t drawCount, uint32_t stride) const;
+
+    bool manual_PreCallValidateCmdDrawMeshTasksIndirectCountNV(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
+                                                               VkBuffer countBuffer, VkDeviceSize countBufferOffset,
+                                                               uint32_t maxDrawCount, uint32_t stride) const;
+
+    bool manual_PreCallValidateEnumerateDeviceExtensionProperties(VkPhysicalDevice physicalDevice, const char *pLayerName,
+                                                                  uint32_t *pPropertyCount,
+                                                                  VkExtensionProperties *pProperties) const;
+    bool manual_PreCallValidateAllocateMemory(VkDevice device, const VkMemoryAllocateInfo *pAllocateInfo,
+                                              const VkAllocationCallbacks *pAllocator, VkDeviceMemory *pMemory) const;
+
+    bool manual_PreCallValidateCreateAccelerationStructureNV(VkDevice device,
+                                                             const VkAccelerationStructureCreateInfoNV *pCreateInfo,
+                                                             const VkAllocationCallbacks *pAllocator,
+                                                             VkAccelerationStructureNV *pAccelerationStructure) const;
+    bool manual_PreCallValidateCmdBuildAccelerationStructureNV(VkCommandBuffer commandBuffer,
+                                                               const VkAccelerationStructureInfoNV *pInfo, VkBuffer instanceData,
+                                                               VkDeviceSize instanceOffset, VkBool32 update,
+                                                               VkAccelerationStructureNV dst, VkAccelerationStructureNV src,
+                                                               VkBuffer scratch, VkDeviceSize scratchOffset) const;
+    bool manual_PreCallValidateGetAccelerationStructureHandleNV(VkDevice device, VkAccelerationStructureNV accelerationStructure,
+                                                                size_t dataSize, void *pData) const;
+    bool manual_PreCallValidateCreateRayTracingPipelinesNV(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount,
+                                                           const VkRayTracingPipelineCreateInfoNV *pCreateInfos,
+                                                           const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines) const;
+    bool manual_PreCallValidateCmdSetViewportWScalingNV(VkCommandBuffer commandBuffer, uint32_t firstViewport,
+                                                        uint32_t viewportCount,
+                                                        const VkViewportWScalingNV *pViewportWScalings) const;
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+    bool PreCallValidateGetDeviceGroupSurfacePresentModes2EXT(VkDevice device, const VkPhysicalDeviceSurfaceInfo2KHR *pSurfaceInfo,
+                                                              VkDeviceGroupPresentModeFlagsKHR *pModes) const;
+#endif  // VK_USE_PLATFORM_WIN32_KHR
+
+    bool manual_PreCallValidateCreateFramebuffer(VkDevice device, const VkFramebufferCreateInfo *pCreateInfo,
+                                                 const VkAllocationCallbacks *pAllocator, VkFramebuffer *pFramebuffer) const;
+
+    bool manual_PreCallValidateCmdSetLineStippleEXT(VkCommandBuffer commandBuffer, uint32_t lineStippleFactor,
+                                                    uint16_t lineStipplePattern) const;
+
+    bool manual_PreCallValidateCmdBindIndexBuffer(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
+                                                  VkIndexType indexType) const;
+
+    bool manual_PreCallValidateSetDebugUtilsObjectNameEXT(VkDevice device, const VkDebugUtilsObjectNameInfoEXT *pNameInfo) const;
+
+    bool manual_PreCallValidateSetDebugUtilsObjectTagEXT(VkDevice device, const VkDebugUtilsObjectTagInfoEXT *pTagInfo) const;
+
+    bool manual_PreCallValidateAcquireNextImageKHR(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout,
+                                                   VkSemaphore semaphore, VkFence fence, uint32_t *pImageIndex) const;
+
+    bool manual_PreCallValidateAcquireNextImage2KHR(VkDevice device, const VkAcquireNextImageInfoKHR *pAcquireInfo,
+                                                    uint32_t *pImageIndex) const;
+
+#include "parameter_validation.h"
+};  // Class StatelessValidation
diff --git a/src/third_party/vulkan-validation-layers/src/layers/vk_format_utils.cpp b/src/third_party/vulkan-validation-layers/src/layers/vk_format_utils.cpp
new file mode 100644
index 0000000..9990144
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/layers/vk_format_utils.cpp
@@ -0,0 +1,1372 @@
+/* Copyright (c) 2015-2019 The Khronos Group Inc.
+ * Copyright (c) 2015-2019 Valve Corporation
+ * Copyright (c) 2015-2019 LunarG, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Mark Lobodzinski <mark@lunarg.com>
+ * Author: Dave Houlton <daveh@lunarg.com>
+ *
+ */
+
+#include <string.h>
+#include <string>
+#include <vector>
+#include <map>
+#include "vulkan/vulkan.h"
+#include "vk_format_utils.h"
+
+struct VULKAN_FORMAT_INFO {
+    uint32_t size;
+    uint32_t channel_count;
+    VkFormatCompatibilityClass format_class;
+};
+
+// Disable auto-formatting for this large table
+// clang-format off
+
+// Set up data structure with size(bytes) and number of channels for each Vulkan format
+// For compressed and multi-plane formats, size is bytes per compressed or shared block
+const std::map<VkFormat, VULKAN_FORMAT_INFO> vk_format_table = {
+    {VK_FORMAT_UNDEFINED,                   {0, 0, VK_FORMAT_COMPATIBILITY_CLASS_NONE_BIT }},
+    {VK_FORMAT_R4G4_UNORM_PACK8,            {1, 2, VK_FORMAT_COMPATIBILITY_CLASS_8_BIT}},
+    {VK_FORMAT_R4G4B4A4_UNORM_PACK16,       {2, 4, VK_FORMAT_COMPATIBILITY_CLASS_16_BIT}},
+    {VK_FORMAT_B4G4R4A4_UNORM_PACK16,       {2, 4, VK_FORMAT_COMPATIBILITY_CLASS_16_BIT}},
+    {VK_FORMAT_R5G6B5_UNORM_PACK16,         {2, 3, VK_FORMAT_COMPATIBILITY_CLASS_16_BIT}},
+    {VK_FORMAT_B5G6R5_UNORM_PACK16,         {2, 3, VK_FORMAT_COMPATIBILITY_CLASS_16_BIT}},
+    {VK_FORMAT_R5G5B5A1_UNORM_PACK16,       {2, 4, VK_FORMAT_COMPATIBILITY_CLASS_16_BIT}},
+    {VK_FORMAT_B5G5R5A1_UNORM_PACK16,       {2, 4, VK_FORMAT_COMPATIBILITY_CLASS_16_BIT}},
+    {VK_FORMAT_A1R5G5B5_UNORM_PACK16,       {2, 4, VK_FORMAT_COMPATIBILITY_CLASS_16_BIT}},
+    {VK_FORMAT_R8_UNORM,                    {1, 1, VK_FORMAT_COMPATIBILITY_CLASS_8_BIT}},
+    {VK_FORMAT_R8_SNORM,                    {1, 1, VK_FORMAT_COMPATIBILITY_CLASS_8_BIT}},
+    {VK_FORMAT_R8_USCALED,                  {1, 1, VK_FORMAT_COMPATIBILITY_CLASS_8_BIT}},
+    {VK_FORMAT_R8_SSCALED,                  {1, 1, VK_FORMAT_COMPATIBILITY_CLASS_8_BIT}},
+    {VK_FORMAT_R8_UINT,                     {1, 1, VK_FORMAT_COMPATIBILITY_CLASS_8_BIT}},
+    {VK_FORMAT_R8_SINT,                     {1, 1, VK_FORMAT_COMPATIBILITY_CLASS_8_BIT}},
+    {VK_FORMAT_R8_SRGB,                     {1, 1, VK_FORMAT_COMPATIBILITY_CLASS_8_BIT}},
+    {VK_FORMAT_R8G8_UNORM,                  {2, 2, VK_FORMAT_COMPATIBILITY_CLASS_16_BIT}},
+    {VK_FORMAT_R8G8_SNORM,                  {2, 2, VK_FORMAT_COMPATIBILITY_CLASS_16_BIT}},
+    {VK_FORMAT_R8G8_USCALED,                {2, 2, VK_FORMAT_COMPATIBILITY_CLASS_16_BIT}},
+    {VK_FORMAT_R8G8_SSCALED,                {2, 2, VK_FORMAT_COMPATIBILITY_CLASS_16_BIT}},
+    {VK_FORMAT_R8G8_UINT,                   {2, 2, VK_FORMAT_COMPATIBILITY_CLASS_16_BIT}},
+    {VK_FORMAT_R8G8_SINT,                   {2, 2, VK_FORMAT_COMPATIBILITY_CLASS_16_BIT}},
+    {VK_FORMAT_R8G8_SRGB,                   {2, 2, VK_FORMAT_COMPATIBILITY_CLASS_16_BIT}},
+    {VK_FORMAT_R8G8B8_UNORM,                {3, 3, VK_FORMAT_COMPATIBILITY_CLASS_24_BIT}},
+    {VK_FORMAT_R8G8B8_SNORM,                {3, 3, VK_FORMAT_COMPATIBILITY_CLASS_24_BIT}},
+    {VK_FORMAT_R8G8B8_USCALED,              {3, 3, VK_FORMAT_COMPATIBILITY_CLASS_24_BIT}},
+    {VK_FORMAT_R8G8B8_SSCALED,              {3, 3, VK_FORMAT_COMPATIBILITY_CLASS_24_BIT}},
+    {VK_FORMAT_R8G8B8_UINT,                 {3, 3, VK_FORMAT_COMPATIBILITY_CLASS_24_BIT}},
+    {VK_FORMAT_R8G8B8_SINT,                 {3, 3, VK_FORMAT_COMPATIBILITY_CLASS_24_BIT}},
+    {VK_FORMAT_R8G8B8_SRGB,                 {3, 3, VK_FORMAT_COMPATIBILITY_CLASS_24_BIT}},
+    {VK_FORMAT_B8G8R8_UNORM,                {3, 3, VK_FORMAT_COMPATIBILITY_CLASS_24_BIT}},
+    {VK_FORMAT_B8G8R8_SNORM,                {3, 3, VK_FORMAT_COMPATIBILITY_CLASS_24_BIT}},
+    {VK_FORMAT_B8G8R8_USCALED,              {3, 3, VK_FORMAT_COMPATIBILITY_CLASS_24_BIT}},
+    {VK_FORMAT_B8G8R8_SSCALED,              {3, 3, VK_FORMAT_COMPATIBILITY_CLASS_24_BIT}},
+    {VK_FORMAT_B8G8R8_UINT,                 {3, 3, VK_FORMAT_COMPATIBILITY_CLASS_24_BIT}},
+    {VK_FORMAT_B8G8R8_SINT,                 {3, 3, VK_FORMAT_COMPATIBILITY_CLASS_24_BIT}},
+    {VK_FORMAT_B8G8R8_SRGB,                 {3, 3, VK_FORMAT_COMPATIBILITY_CLASS_24_BIT}},
+    {VK_FORMAT_R8G8B8A8_UNORM,              {4, 4, VK_FORMAT_COMPATIBILITY_CLASS_32_BIT}},
+    {VK_FORMAT_R8G8B8A8_SNORM,              {4, 4, VK_FORMAT_COMPATIBILITY_CLASS_32_BIT}},
+    {VK_FORMAT_R8G8B8A8_USCALED,            {4, 4, VK_FORMAT_COMPATIBILITY_CLASS_32_BIT}},
+    {VK_FORMAT_R8G8B8A8_SSCALED,            {4, 4, VK_FORMAT_COMPATIBILITY_CLASS_32_BIT}},
+    {VK_FORMAT_R8G8B8A8_UINT,               {4, 4, VK_FORMAT_COMPATIBILITY_CLASS_32_BIT}},
+    {VK_FORMAT_R8G8B8A8_SINT,               {4, 4, VK_FORMAT_COMPATIBILITY_CLASS_32_BIT}},
+    {VK_FORMAT_R8G8B8A8_SRGB,               {4, 4, VK_FORMAT_COMPATIBILITY_CLASS_32_BIT}},
+    {VK_FORMAT_B8G8R8A8_UNORM,              {4, 4, VK_FORMAT_COMPATIBILITY_CLASS_32_BIT}},
+    {VK_FORMAT_B8G8R8A8_SNORM,              {4, 4, VK_FORMAT_COMPATIBILITY_CLASS_32_BIT}},
+    {VK_FORMAT_B8G8R8A8_USCALED,            {4, 4, VK_FORMAT_COMPATIBILITY_CLASS_32_BIT}},
+    {VK_FORMAT_B8G8R8A8_SSCALED,            {4, 4, VK_FORMAT_COMPATIBILITY_CLASS_32_BIT}},
+    {VK_FORMAT_B8G8R8A8_UINT,               {4, 4, VK_FORMAT_COMPATIBILITY_CLASS_32_BIT}},
+    {VK_FORMAT_B8G8R8A8_SINT,               {4, 4, VK_FORMAT_COMPATIBILITY_CLASS_32_BIT}},
+    {VK_FORMAT_B8G8R8A8_SRGB,               {4, 4, VK_FORMAT_COMPATIBILITY_CLASS_32_BIT}},
+    {VK_FORMAT_A8B8G8R8_UNORM_PACK32,       {4, 4, VK_FORMAT_COMPATIBILITY_CLASS_32_BIT}},
+    {VK_FORMAT_A8B8G8R8_SNORM_PACK32,       {4, 4, VK_FORMAT_COMPATIBILITY_CLASS_32_BIT}},
+    {VK_FORMAT_A8B8G8R8_USCALED_PACK32,     {4, 4, VK_FORMAT_COMPATIBILITY_CLASS_32_BIT}},
+    {VK_FORMAT_A8B8G8R8_SSCALED_PACK32,     {4, 4, VK_FORMAT_COMPATIBILITY_CLASS_32_BIT}},
+    {VK_FORMAT_A8B8G8R8_UINT_PACK32,        {4, 4, VK_FORMAT_COMPATIBILITY_CLASS_32_BIT}},
+    {VK_FORMAT_A8B8G8R8_SINT_PACK32,        {4, 4, VK_FORMAT_COMPATIBILITY_CLASS_32_BIT}},
+    {VK_FORMAT_A8B8G8R8_SRGB_PACK32,        {4, 4, VK_FORMAT_COMPATIBILITY_CLASS_32_BIT}},
+    {VK_FORMAT_A2R10G10B10_UNORM_PACK32,    {4, 4, VK_FORMAT_COMPATIBILITY_CLASS_32_BIT}},
+    {VK_FORMAT_A2R10G10B10_SNORM_PACK32,    {4, 4, VK_FORMAT_COMPATIBILITY_CLASS_32_BIT}},
+    {VK_FORMAT_A2R10G10B10_USCALED_PACK32,  {4, 4, VK_FORMAT_COMPATIBILITY_CLASS_32_BIT}},
+    {VK_FORMAT_A2R10G10B10_SSCALED_PACK32,  {4, 4, VK_FORMAT_COMPATIBILITY_CLASS_32_BIT}},
+    {VK_FORMAT_A2R10G10B10_UINT_PACK32,     {4, 4, VK_FORMAT_COMPATIBILITY_CLASS_32_BIT}},
+    {VK_FORMAT_A2R10G10B10_SINT_PACK32,     {4, 4, VK_FORMAT_COMPATIBILITY_CLASS_32_BIT}},
+    {VK_FORMAT_A2B10G10R10_UNORM_PACK32,    {4, 4, VK_FORMAT_COMPATIBILITY_CLASS_32_BIT}},
+    {VK_FORMAT_A2B10G10R10_SNORM_PACK32,    {4, 4, VK_FORMAT_COMPATIBILITY_CLASS_32_BIT}},
+    {VK_FORMAT_A2B10G10R10_USCALED_PACK32,  {4, 4, VK_FORMAT_COMPATIBILITY_CLASS_32_BIT}},
+    {VK_FORMAT_A2B10G10R10_SSCALED_PACK32,  {4, 4, VK_FORMAT_COMPATIBILITY_CLASS_32_BIT}},
+    {VK_FORMAT_A2B10G10R10_UINT_PACK32,     {4, 4, VK_FORMAT_COMPATIBILITY_CLASS_32_BIT}},
+    {VK_FORMAT_A2B10G10R10_SINT_PACK32,     {4, 4, VK_FORMAT_COMPATIBILITY_CLASS_32_BIT}},
+    {VK_FORMAT_R16_UNORM,                   {2, 1, VK_FORMAT_COMPATIBILITY_CLASS_16_BIT}},
+    {VK_FORMAT_R16_SNORM,                   {2, 1, VK_FORMAT_COMPATIBILITY_CLASS_16_BIT}},
+    {VK_FORMAT_R16_USCALED,                 {2, 1, VK_FORMAT_COMPATIBILITY_CLASS_16_BIT}},
+    {VK_FORMAT_R16_SSCALED,                 {2, 1, VK_FORMAT_COMPATIBILITY_CLASS_16_BIT}},
+    {VK_FORMAT_R16_UINT,                    {2, 1, VK_FORMAT_COMPATIBILITY_CLASS_16_BIT}},
+    {VK_FORMAT_R16_SINT,                    {2, 1, VK_FORMAT_COMPATIBILITY_CLASS_16_BIT}},
+    {VK_FORMAT_R16_SFLOAT,                  {2, 1, VK_FORMAT_COMPATIBILITY_CLASS_16_BIT}},
+    {VK_FORMAT_R16G16_UNORM,                {4, 2, VK_FORMAT_COMPATIBILITY_CLASS_32_BIT}},
+    {VK_FORMAT_R16G16_SNORM,                {4, 2, VK_FORMAT_COMPATIBILITY_CLASS_32_BIT}},
+    {VK_FORMAT_R16G16_USCALED,              {4, 2, VK_FORMAT_COMPATIBILITY_CLASS_32_BIT}},
+    {VK_FORMAT_R16G16_SSCALED,              {4, 2, VK_FORMAT_COMPATIBILITY_CLASS_32_BIT}},
+    {VK_FORMAT_R16G16_UINT,                 {4, 2, VK_FORMAT_COMPATIBILITY_CLASS_32_BIT}},
+    {VK_FORMAT_R16G16_SINT,                 {4, 2, VK_FORMAT_COMPATIBILITY_CLASS_32_BIT}},
+    {VK_FORMAT_R16G16_SFLOAT,               {4, 2, VK_FORMAT_COMPATIBILITY_CLASS_32_BIT}},
+    {VK_FORMAT_R16G16B16_UNORM,             {6, 3, VK_FORMAT_COMPATIBILITY_CLASS_48_BIT}},
+    {VK_FORMAT_R16G16B16_SNORM,             {6, 3, VK_FORMAT_COMPATIBILITY_CLASS_48_BIT}},
+    {VK_FORMAT_R16G16B16_USCALED,           {6, 3, VK_FORMAT_COMPATIBILITY_CLASS_48_BIT}},
+    {VK_FORMAT_R16G16B16_SSCALED,           {6, 3, VK_FORMAT_COMPATIBILITY_CLASS_48_BIT}},
+    {VK_FORMAT_R16G16B16_UINT,              {6, 3, VK_FORMAT_COMPATIBILITY_CLASS_48_BIT}},
+    {VK_FORMAT_R16G16B16_SINT,              {6, 3, VK_FORMAT_COMPATIBILITY_CLASS_48_BIT}},
+    {VK_FORMAT_R16G16B16_SFLOAT,            {6, 3, VK_FORMAT_COMPATIBILITY_CLASS_48_BIT}},
+    {VK_FORMAT_R16G16B16A16_UNORM,          {8, 4, VK_FORMAT_COMPATIBILITY_CLASS_64_BIT}},
+    {VK_FORMAT_R16G16B16A16_SNORM,          {8, 4, VK_FORMAT_COMPATIBILITY_CLASS_64_BIT}},
+    {VK_FORMAT_R16G16B16A16_USCALED,        {8, 4, VK_FORMAT_COMPATIBILITY_CLASS_64_BIT}},
+    {VK_FORMAT_R16G16B16A16_SSCALED,        {8, 4, VK_FORMAT_COMPATIBILITY_CLASS_64_BIT}},
+    {VK_FORMAT_R16G16B16A16_UINT,           {8, 4, VK_FORMAT_COMPATIBILITY_CLASS_64_BIT}},
+    {VK_FORMAT_R16G16B16A16_SINT,           {8, 4, VK_FORMAT_COMPATIBILITY_CLASS_64_BIT}},
+    {VK_FORMAT_R16G16B16A16_SFLOAT,         {8, 4, VK_FORMAT_COMPATIBILITY_CLASS_64_BIT}},
+    {VK_FORMAT_R32_UINT,                    {4, 1, VK_FORMAT_COMPATIBILITY_CLASS_32_BIT}},
+    {VK_FORMAT_R32_SINT,                    {4, 1, VK_FORMAT_COMPATIBILITY_CLASS_32_BIT}},
+    {VK_FORMAT_R32_SFLOAT,                  {4, 1, VK_FORMAT_COMPATIBILITY_CLASS_32_BIT}},
+    {VK_FORMAT_R32G32_UINT,                 {8, 2, VK_FORMAT_COMPATIBILITY_CLASS_64_BIT}},
+    {VK_FORMAT_R32G32_SINT,                 {8, 2, VK_FORMAT_COMPATIBILITY_CLASS_64_BIT}},
+    {VK_FORMAT_R32G32_SFLOAT,               {8, 2, VK_FORMAT_COMPATIBILITY_CLASS_64_BIT}},
+    {VK_FORMAT_R32G32B32_UINT,              {12, 3, VK_FORMAT_COMPATIBILITY_CLASS_96_BIT}},
+    {VK_FORMAT_R32G32B32_SINT,              {12, 3, VK_FORMAT_COMPATIBILITY_CLASS_96_BIT}},
+    {VK_FORMAT_R32G32B32_SFLOAT,            {12, 3, VK_FORMAT_COMPATIBILITY_CLASS_96_BIT}},
+    {VK_FORMAT_R32G32B32A32_UINT,           {16, 4, VK_FORMAT_COMPATIBILITY_CLASS_128_BIT}},
+    {VK_FORMAT_R32G32B32A32_SINT,           {16, 4, VK_FORMAT_COMPATIBILITY_CLASS_128_BIT}},
+    {VK_FORMAT_R32G32B32A32_SFLOAT,         {16, 4, VK_FORMAT_COMPATIBILITY_CLASS_128_BIT}},
+    {VK_FORMAT_R64_UINT,                    {8, 1, VK_FORMAT_COMPATIBILITY_CLASS_64_BIT}},
+    {VK_FORMAT_R64_SINT,                    {8, 1, VK_FORMAT_COMPATIBILITY_CLASS_64_BIT}},
+    {VK_FORMAT_R64_SFLOAT,                  {8, 1, VK_FORMAT_COMPATIBILITY_CLASS_64_BIT}},
+    {VK_FORMAT_R64G64_UINT,                 {16, 2, VK_FORMAT_COMPATIBILITY_CLASS_128_BIT}},
+    {VK_FORMAT_R64G64_SINT,                 {16, 2, VK_FORMAT_COMPATIBILITY_CLASS_128_BIT}},
+    {VK_FORMAT_R64G64_SFLOAT,               {16, 2, VK_FORMAT_COMPATIBILITY_CLASS_128_BIT}},
+    {VK_FORMAT_R64G64B64_UINT,              {24, 3, VK_FORMAT_COMPATIBILITY_CLASS_192_BIT}},
+    {VK_FORMAT_R64G64B64_SINT,              {24, 3, VK_FORMAT_COMPATIBILITY_CLASS_192_BIT}},
+    {VK_FORMAT_R64G64B64_SFLOAT,            {24, 3, VK_FORMAT_COMPATIBILITY_CLASS_192_BIT}},
+    {VK_FORMAT_R64G64B64A64_UINT,           {32, 4, VK_FORMAT_COMPATIBILITY_CLASS_256_BIT}},
+    {VK_FORMAT_R64G64B64A64_SINT,           {32, 4, VK_FORMAT_COMPATIBILITY_CLASS_256_BIT}},
+    {VK_FORMAT_R64G64B64A64_SFLOAT,         {32, 4, VK_FORMAT_COMPATIBILITY_CLASS_256_BIT}},
+    {VK_FORMAT_B10G11R11_UFLOAT_PACK32,     {4, 3, VK_FORMAT_COMPATIBILITY_CLASS_32_BIT}},
+    {VK_FORMAT_E5B9G9R9_UFLOAT_PACK32,      {4, 3, VK_FORMAT_COMPATIBILITY_CLASS_32_BIT}},
+    {VK_FORMAT_D16_UNORM,                   {2, 1, VK_FORMAT_COMPATIBILITY_CLASS_NONE_BIT}},
+    {VK_FORMAT_X8_D24_UNORM_PACK32,         {4, 1, VK_FORMAT_COMPATIBILITY_CLASS_NONE_BIT}},
+    {VK_FORMAT_D32_SFLOAT,                  {4, 1, VK_FORMAT_COMPATIBILITY_CLASS_NONE_BIT}},
+    {VK_FORMAT_S8_UINT,                     {1, 1, VK_FORMAT_COMPATIBILITY_CLASS_NONE_BIT}},
+    {VK_FORMAT_D16_UNORM_S8_UINT,           {3, 2, VK_FORMAT_COMPATIBILITY_CLASS_NONE_BIT}},
+    {VK_FORMAT_D24_UNORM_S8_UINT,           {4, 2, VK_FORMAT_COMPATIBILITY_CLASS_NONE_BIT}},
+    {VK_FORMAT_D32_SFLOAT_S8_UINT,          {8, 2, VK_FORMAT_COMPATIBILITY_CLASS_NONE_BIT}},
+    {VK_FORMAT_BC1_RGB_UNORM_BLOCK,         {8, 4, VK_FORMAT_COMPATIBILITY_CLASS_BC1_RGB_BIT}},
+    {VK_FORMAT_BC1_RGB_SRGB_BLOCK,          {8, 4, VK_FORMAT_COMPATIBILITY_CLASS_BC1_RGB_BIT}},
+    {VK_FORMAT_BC1_RGBA_UNORM_BLOCK,        {8, 4, VK_FORMAT_COMPATIBILITY_CLASS_BC1_RGBA_BIT}},
+    {VK_FORMAT_BC1_RGBA_SRGB_BLOCK,         {8, 4, VK_FORMAT_COMPATIBILITY_CLASS_BC1_RGBA_BIT}},
+    {VK_FORMAT_BC2_UNORM_BLOCK,             {16, 4, VK_FORMAT_COMPATIBILITY_CLASS_BC2_BIT}},
+    {VK_FORMAT_BC2_SRGB_BLOCK,              {16, 4, VK_FORMAT_COMPATIBILITY_CLASS_BC2_BIT}},
+    {VK_FORMAT_BC3_UNORM_BLOCK,             {16, 4, VK_FORMAT_COMPATIBILITY_CLASS_BC3_BIT}},
+    {VK_FORMAT_BC3_SRGB_BLOCK,              {16, 4, VK_FORMAT_COMPATIBILITY_CLASS_BC3_BIT}},
+    {VK_FORMAT_BC4_UNORM_BLOCK,             {8, 4, VK_FORMAT_COMPATIBILITY_CLASS_BC4_BIT}},
+    {VK_FORMAT_BC4_SNORM_BLOCK,             {8, 4, VK_FORMAT_COMPATIBILITY_CLASS_BC4_BIT}},
+    {VK_FORMAT_BC5_UNORM_BLOCK,             {16, 4, VK_FORMAT_COMPATIBILITY_CLASS_BC5_BIT}},
+    {VK_FORMAT_BC5_SNORM_BLOCK,             {16, 4, VK_FORMAT_COMPATIBILITY_CLASS_BC5_BIT}},
+    {VK_FORMAT_BC6H_UFLOAT_BLOCK,           {16, 4, VK_FORMAT_COMPATIBILITY_CLASS_BC6H_BIT}},
+    {VK_FORMAT_BC6H_SFLOAT_BLOCK,           {16, 4, VK_FORMAT_COMPATIBILITY_CLASS_BC6H_BIT}},
+    {VK_FORMAT_BC7_UNORM_BLOCK,             {16, 4, VK_FORMAT_COMPATIBILITY_CLASS_BC7_BIT}},
+    {VK_FORMAT_BC7_SRGB_BLOCK,              {16, 4, VK_FORMAT_COMPATIBILITY_CLASS_BC7_BIT}},
+    {VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK,     {8, 3, VK_FORMAT_COMPATIBILITY_CLASS_ETC2_RGB_BIT}},
+    {VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK,      {8, 3, VK_FORMAT_COMPATIBILITY_CLASS_ETC2_RGB_BIT}},
+    {VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK,   {8, 4, VK_FORMAT_COMPATIBILITY_CLASS_ETC2_RGBA_BIT}},
+    {VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK,    {8, 4, VK_FORMAT_COMPATIBILITY_CLASS_ETC2_RGBA_BIT}},
+    {VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK,   {16, 4, VK_FORMAT_COMPATIBILITY_CLASS_ETC2_EAC_RGBA_BIT}},
+    {VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK,    {16, 4, VK_FORMAT_COMPATIBILITY_CLASS_ETC2_EAC_RGBA_BIT}},
+    {VK_FORMAT_EAC_R11_UNORM_BLOCK,         {8, 1, VK_FORMAT_COMPATIBILITY_CLASS_EAC_R_BIT}},
+    {VK_FORMAT_EAC_R11_SNORM_BLOCK,         {8, 1, VK_FORMAT_COMPATIBILITY_CLASS_EAC_R_BIT}},
+    {VK_FORMAT_EAC_R11G11_UNORM_BLOCK,      {16, 2, VK_FORMAT_COMPATIBILITY_CLASS_EAC_RG_BIT}},
+    {VK_FORMAT_EAC_R11G11_SNORM_BLOCK,      {16, 2, VK_FORMAT_COMPATIBILITY_CLASS_EAC_RG_BIT}},
+    {VK_FORMAT_ASTC_4x4_UNORM_BLOCK,        {16, 4, VK_FORMAT_COMPATIBILITY_CLASS_ASTC_4X4_BIT}},
+    {VK_FORMAT_ASTC_4x4_SRGB_BLOCK,         {16, 4, VK_FORMAT_COMPATIBILITY_CLASS_ASTC_4X4_BIT}},
+    {VK_FORMAT_ASTC_5x4_UNORM_BLOCK,        {16, 4, VK_FORMAT_COMPATIBILITY_CLASS_ASTC_5X4_BIT}},
+    {VK_FORMAT_ASTC_5x4_SRGB_BLOCK,         {16, 4, VK_FORMAT_COMPATIBILITY_CLASS_ASTC_5X4_BIT}},
+    {VK_FORMAT_ASTC_5x5_UNORM_BLOCK,        {16, 4, VK_FORMAT_COMPATIBILITY_CLASS_ASTC_5X5_BIT}},
+    {VK_FORMAT_ASTC_5x5_SRGB_BLOCK,         {16, 4, VK_FORMAT_COMPATIBILITY_CLASS_ASTC_5X5_BIT}},
+    {VK_FORMAT_ASTC_6x5_UNORM_BLOCK,        {16, 4, VK_FORMAT_COMPATIBILITY_CLASS_ASTC_6X5_BIT}},
+    {VK_FORMAT_ASTC_6x5_SRGB_BLOCK,         {16, 4, VK_FORMAT_COMPATIBILITY_CLASS_ASTC_6X5_BIT}},
+    {VK_FORMAT_ASTC_6x6_UNORM_BLOCK,        {16, 4, VK_FORMAT_COMPATIBILITY_CLASS_ASTC_6X6_BIT}},
+    {VK_FORMAT_ASTC_6x6_SRGB_BLOCK,         {16, 4, VK_FORMAT_COMPATIBILITY_CLASS_ASTC_6X6_BIT}},
+    {VK_FORMAT_ASTC_8x5_UNORM_BLOCK,        {16, 4, VK_FORMAT_COMPATIBILITY_CLASS_ASTC_8X5_BIT}},
+    {VK_FORMAT_ASTC_8x5_SRGB_BLOCK,         {16, 4, VK_FORMAT_COMPATIBILITY_CLASS_ASTC_8X5_BIT}},
+    {VK_FORMAT_ASTC_8x6_UNORM_BLOCK,        {16, 4, VK_FORMAT_COMPATIBILITY_CLASS_ASTC_8X6_BIT}},
+    {VK_FORMAT_ASTC_8x6_SRGB_BLOCK,         {16, 4, VK_FORMAT_COMPATIBILITY_CLASS_ASTC_8X6_BIT}},
+    {VK_FORMAT_ASTC_8x8_UNORM_BLOCK,        {16, 4, VK_FORMAT_COMPATIBILITY_CLASS_ASTC_8X8_BIT}},
+    {VK_FORMAT_ASTC_8x8_SRGB_BLOCK,         {16, 4, VK_FORMAT_COMPATIBILITY_CLASS_ASTC_8X8_BIT}},
+    {VK_FORMAT_ASTC_10x5_UNORM_BLOCK,       {16, 4, VK_FORMAT_COMPATIBILITY_CLASS_ASTC_10X5_BIT}},
+    {VK_FORMAT_ASTC_10x5_SRGB_BLOCK,        {16, 4, VK_FORMAT_COMPATIBILITY_CLASS_ASTC_10X5_BIT}},
+    {VK_FORMAT_ASTC_10x6_UNORM_BLOCK,       {16, 4, VK_FORMAT_COMPATIBILITY_CLASS_ASTC_10X6_BIT}},
+    {VK_FORMAT_ASTC_10x6_SRGB_BLOCK,        {16, 4, VK_FORMAT_COMPATIBILITY_CLASS_ASTC_10X6_BIT}},
+    {VK_FORMAT_ASTC_10x8_UNORM_BLOCK,       {16, 4, VK_FORMAT_COMPATIBILITY_CLASS_ASTC_10X8_BIT}},
+    {VK_FORMAT_ASTC_10x8_SRGB_BLOCK,        {16, 4, VK_FORMAT_COMPATIBILITY_CLASS_ASTC_10X8_BIT}},
+    {VK_FORMAT_ASTC_10x10_UNORM_BLOCK,      {16, 4, VK_FORMAT_COMPATIBILITY_CLASS_ASTC_10X10_BIT}},
+    {VK_FORMAT_ASTC_10x10_SRGB_BLOCK,       {16, 4, VK_FORMAT_COMPATIBILITY_CLASS_ASTC_10X10_BIT}},
+    {VK_FORMAT_ASTC_12x10_UNORM_BLOCK,      {16, 4, VK_FORMAT_COMPATIBILITY_CLASS_ASTC_12X10_BIT}},
+    {VK_FORMAT_ASTC_12x10_SRGB_BLOCK,       {16, 4, VK_FORMAT_COMPATIBILITY_CLASS_ASTC_12X10_BIT}},
+    {VK_FORMAT_ASTC_12x12_UNORM_BLOCK,      {16, 4, VK_FORMAT_COMPATIBILITY_CLASS_ASTC_12X12_BIT}},
+    {VK_FORMAT_ASTC_12x12_SRGB_BLOCK,       {16, 4, VK_FORMAT_COMPATIBILITY_CLASS_ASTC_12X12_BIT}},
+    {VK_FORMAT_PVRTC1_2BPP_UNORM_BLOCK_IMG, {8, 4, VK_FORMAT_COMPATIBILITY_CLASS_PVRTC1_2BPP_BIT}},
+    {VK_FORMAT_PVRTC1_4BPP_UNORM_BLOCK_IMG, {8, 4, VK_FORMAT_COMPATIBILITY_CLASS_PVRTC1_4BPP_BIT}},
+    {VK_FORMAT_PVRTC2_2BPP_UNORM_BLOCK_IMG, {8, 4, VK_FORMAT_COMPATIBILITY_CLASS_PVRTC2_2BPP_BIT}},
+    {VK_FORMAT_PVRTC2_4BPP_UNORM_BLOCK_IMG, {8, 4, VK_FORMAT_COMPATIBILITY_CLASS_PVRTC2_4BPP_BIT}},
+    {VK_FORMAT_PVRTC1_2BPP_SRGB_BLOCK_IMG,  {8, 4, VK_FORMAT_COMPATIBILITY_CLASS_PVRTC1_2BPP_BIT}},
+    {VK_FORMAT_PVRTC1_4BPP_SRGB_BLOCK_IMG,  {8, 4, VK_FORMAT_COMPATIBILITY_CLASS_PVRTC1_4BPP_BIT}},
+    {VK_FORMAT_PVRTC2_2BPP_SRGB_BLOCK_IMG,  {8, 4, VK_FORMAT_COMPATIBILITY_CLASS_PVRTC2_2BPP_BIT}},
+    {VK_FORMAT_PVRTC2_4BPP_SRGB_BLOCK_IMG,  {8, 4, VK_FORMAT_COMPATIBILITY_CLASS_PVRTC2_4BPP_BIT}},
+    // KHR_sampler_YCbCr_conversion extension - single-plane variants
+    // 'PACK' formats are normal, uncompressed
+    {VK_FORMAT_R10X6_UNORM_PACK16,                          {2, 1, VK_FORMAT_COMPATIBILITY_CLASS_16_BIT}},
+    {VK_FORMAT_R10X6G10X6_UNORM_2PACK16,                    {4, 2, VK_FORMAT_COMPATIBILITY_CLASS_32_BIT}},
+    {VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16,          {8, 4, VK_FORMAT_COMPATIBILITY_CLASS_64BIT_R10G10B10A10}},
+    {VK_FORMAT_R12X4_UNORM_PACK16,                          {2, 1, VK_FORMAT_COMPATIBILITY_CLASS_16_BIT}},
+    {VK_FORMAT_R12X4G12X4_UNORM_2PACK16,                    {4, 2, VK_FORMAT_COMPATIBILITY_CLASS_16_BIT}},
+    {VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16,          {8, 4, VK_FORMAT_COMPATIBILITY_CLASS_64BIT_R12G12B12A12}},
+    // _422 formats encode 2 texels per entry with B, R components shared - treated as compressed w/ 2x1 block size
+    {VK_FORMAT_G8B8G8R8_422_UNORM,                          {4, 4, VK_FORMAT_COMPATIBILITY_CLASS_32BIT_G8B8G8R8}},
+    {VK_FORMAT_B8G8R8G8_422_UNORM,                          {4, 4, VK_FORMAT_COMPATIBILITY_CLASS_32BIT_B8G8R8G8}},
+    {VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16,      {8, 4, VK_FORMAT_COMPATIBILITY_CLASS_64BIT_G10B10G10R10}},
+    {VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16,      {8, 4, VK_FORMAT_COMPATIBILITY_CLASS_64BIT_B10G10R10G10}},
+    {VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16,      {8, 4, VK_FORMAT_COMPATIBILITY_CLASS_64BIT_G12B12G12R12}},
+    {VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16,      {8, 4, VK_FORMAT_COMPATIBILITY_CLASS_64BIT_B12G12R12G12}},
+    {VK_FORMAT_G16B16G16R16_422_UNORM,                      {8, 4, VK_FORMAT_COMPATIBILITY_CLASS_64BIT_G16B16G16R16}},
+    {VK_FORMAT_B16G16R16G16_422_UNORM,                      {8, 4, VK_FORMAT_COMPATIBILITY_CLASS_64BIT_B16G16R16G16}},
+    // KHR_sampler_YCbCr_conversion extension - multi-plane variants
+    // Formats that 'share' components among texels (_420 and _422), size represents total bytes for the smallest possible texel block
+    // _420 share B, R components within a 2x2 texel block
+    {VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM,                   {6, 3, VK_FORMAT_COMPATIBILITY_CLASS_8BIT_3PLANE_420}},
+    {VK_FORMAT_G8_B8R8_2PLANE_420_UNORM,                    {6, 3, VK_FORMAT_COMPATIBILITY_CLASS_8BIT_2PLANE_420}},
+    {VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16,  {12, 3, VK_FORMAT_COMPATIBILITY_CLASS_10BIT_3PLANE_420}},
+    {VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16,   {12, 3, VK_FORMAT_COMPATIBILITY_CLASS_10BIT_2PLANE_420}},
+    {VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16,  {12, 3, VK_FORMAT_COMPATIBILITY_CLASS_12BIT_3PLANE_420}},
+    {VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16,   {12, 3, VK_FORMAT_COMPATIBILITY_CLASS_12BIT_2PLANE_420}},
+    {VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM,                {12, 3, VK_FORMAT_COMPATIBILITY_CLASS_16BIT_3PLANE_420}},
+    {VK_FORMAT_G16_B16R16_2PLANE_420_UNORM,                 {12, 3, VK_FORMAT_COMPATIBILITY_CLASS_16BIT_2PLANE_420}},
+    // _422 share B, R components within a 2x1 texel block
+    {VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM,                   {4, 3, VK_FORMAT_COMPATIBILITY_CLASS_8BIT_3PLANE_422}},
+    {VK_FORMAT_G8_B8R8_2PLANE_422_UNORM,                    {4, 3, VK_FORMAT_COMPATIBILITY_CLASS_8BIT_2PLANE_422}},
+    {VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16,  {8, 3, VK_FORMAT_COMPATIBILITY_CLASS_10BIT_3PLANE_422}},
+    {VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16,   {8, 3, VK_FORMAT_COMPATIBILITY_CLASS_10BIT_2PLANE_422}},
+    {VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16,  {8, 3, VK_FORMAT_COMPATIBILITY_CLASS_12BIT_3PLANE_422}},
+    {VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16,   {8, 3, VK_FORMAT_COMPATIBILITY_CLASS_12BIT_2PLANE_422}},
+    {VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM,                {8, 3, VK_FORMAT_COMPATIBILITY_CLASS_16BIT_3PLANE_422}},
+    {VK_FORMAT_G16_B16R16_2PLANE_422_UNORM,                 {8, 3, VK_FORMAT_COMPATIBILITY_CLASS_16BIT_2PLANE_422}},
+    // _444 do not share
+    {VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM,                   {3, 3, VK_FORMAT_COMPATIBILITY_CLASS_8BIT_3PLANE_444}},
+    {VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16,  {6, 3, VK_FORMAT_COMPATIBILITY_CLASS_10BIT_3PLANE_444}},
+    {VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16,  {6, 3, VK_FORMAT_COMPATIBILITY_CLASS_12BIT_3PLANE_444}},
+    {VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM,                {6, 3, VK_FORMAT_COMPATIBILITY_CLASS_16BIT_3PLANE_444}}
+};
+
+// Renable formatting
+// clang-format on
+
+// Return true if format is an ETC2 or EAC compressed texture format
+VK_LAYER_EXPORT bool FormatIsCompressed_ETC2_EAC(VkFormat format) {
+    bool found = false;
+
+    switch (format) {
+        case VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK:
+        case VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK:
+        case VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK:
+        case VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK:
+        case VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK:
+        case VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK:
+        case VK_FORMAT_EAC_R11_UNORM_BLOCK:
+        case VK_FORMAT_EAC_R11_SNORM_BLOCK:
+        case VK_FORMAT_EAC_R11G11_UNORM_BLOCK:
+        case VK_FORMAT_EAC_R11G11_SNORM_BLOCK:
+            found = true;
+            break;
+        default:
+            break;
+    }
+    return found;
+}
+
+// Return true if format is an ASTC compressed texture format
+VK_LAYER_EXPORT bool FormatIsCompressed_ASTC_LDR(VkFormat format) {
+    bool found = false;
+
+    switch (format) {
+        case VK_FORMAT_ASTC_4x4_UNORM_BLOCK:
+        case VK_FORMAT_ASTC_4x4_SRGB_BLOCK:
+        case VK_FORMAT_ASTC_5x4_UNORM_BLOCK:
+        case VK_FORMAT_ASTC_5x4_SRGB_BLOCK:
+        case VK_FORMAT_ASTC_5x5_UNORM_BLOCK:
+        case VK_FORMAT_ASTC_5x5_SRGB_BLOCK:
+        case VK_FORMAT_ASTC_6x5_UNORM_BLOCK:
+        case VK_FORMAT_ASTC_6x5_SRGB_BLOCK:
+        case VK_FORMAT_ASTC_6x6_UNORM_BLOCK:
+        case VK_FORMAT_ASTC_6x6_SRGB_BLOCK:
+        case VK_FORMAT_ASTC_8x5_UNORM_BLOCK:
+        case VK_FORMAT_ASTC_8x5_SRGB_BLOCK:
+        case VK_FORMAT_ASTC_8x6_UNORM_BLOCK:
+        case VK_FORMAT_ASTC_8x6_SRGB_BLOCK:
+        case VK_FORMAT_ASTC_8x8_UNORM_BLOCK:
+        case VK_FORMAT_ASTC_8x8_SRGB_BLOCK:
+        case VK_FORMAT_ASTC_10x5_UNORM_BLOCK:
+        case VK_FORMAT_ASTC_10x5_SRGB_BLOCK:
+        case VK_FORMAT_ASTC_10x6_UNORM_BLOCK:
+        case VK_FORMAT_ASTC_10x6_SRGB_BLOCK:
+        case VK_FORMAT_ASTC_10x8_UNORM_BLOCK:
+        case VK_FORMAT_ASTC_10x8_SRGB_BLOCK:
+        case VK_FORMAT_ASTC_10x10_UNORM_BLOCK:
+        case VK_FORMAT_ASTC_10x10_SRGB_BLOCK:
+        case VK_FORMAT_ASTC_12x10_UNORM_BLOCK:
+        case VK_FORMAT_ASTC_12x10_SRGB_BLOCK:
+        case VK_FORMAT_ASTC_12x12_UNORM_BLOCK:
+        case VK_FORMAT_ASTC_12x12_SRGB_BLOCK:
+            found = true;
+            break;
+        default:
+            break;
+    }
+    return found;
+}
+
+// Return true if format is a BC compressed texture format
+VK_LAYER_EXPORT bool FormatIsCompressed_BC(VkFormat format) {
+    bool found = false;
+
+    switch (format) {
+        case VK_FORMAT_BC1_RGB_UNORM_BLOCK:
+        case VK_FORMAT_BC1_RGB_SRGB_BLOCK:
+        case VK_FORMAT_BC1_RGBA_UNORM_BLOCK:
+        case VK_FORMAT_BC1_RGBA_SRGB_BLOCK:
+        case VK_FORMAT_BC2_UNORM_BLOCK:
+        case VK_FORMAT_BC2_SRGB_BLOCK:
+        case VK_FORMAT_BC3_UNORM_BLOCK:
+        case VK_FORMAT_BC3_SRGB_BLOCK:
+        case VK_FORMAT_BC4_UNORM_BLOCK:
+        case VK_FORMAT_BC4_SNORM_BLOCK:
+        case VK_FORMAT_BC5_UNORM_BLOCK:
+        case VK_FORMAT_BC5_SNORM_BLOCK:
+        case VK_FORMAT_BC6H_UFLOAT_BLOCK:
+        case VK_FORMAT_BC6H_SFLOAT_BLOCK:
+        case VK_FORMAT_BC7_UNORM_BLOCK:
+        case VK_FORMAT_BC7_SRGB_BLOCK:
+            found = true;
+            break;
+        default:
+            break;
+    }
+    return found;
+}
+
+// Return true if format is a PVRTC compressed texture format
+VK_LAYER_EXPORT bool FormatIsCompressed_PVRTC(VkFormat format) {
+    bool found = false;
+
+    switch (format) {
+        case VK_FORMAT_PVRTC1_2BPP_UNORM_BLOCK_IMG:
+        case VK_FORMAT_PVRTC1_4BPP_UNORM_BLOCK_IMG:
+        case VK_FORMAT_PVRTC2_2BPP_UNORM_BLOCK_IMG:
+        case VK_FORMAT_PVRTC2_4BPP_UNORM_BLOCK_IMG:
+        case VK_FORMAT_PVRTC1_2BPP_SRGB_BLOCK_IMG:
+        case VK_FORMAT_PVRTC1_4BPP_SRGB_BLOCK_IMG:
+        case VK_FORMAT_PVRTC2_2BPP_SRGB_BLOCK_IMG:
+        case VK_FORMAT_PVRTC2_4BPP_SRGB_BLOCK_IMG:
+            found = true;
+            break;
+        default:
+            break;
+    }
+    return found;
+}
+
+// Single-plane "_422" formats are treated as 2x1 compressed (for copies)
+VK_LAYER_EXPORT bool FormatIsSinglePlane_422(VkFormat format) {
+    bool found = false;
+
+    switch (format) {
+        case VK_FORMAT_G8B8G8R8_422_UNORM_KHR:
+        case VK_FORMAT_B8G8R8G8_422_UNORM_KHR:
+        case VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16_KHR:
+        case VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16_KHR:
+        case VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16_KHR:
+        case VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16_KHR:
+        case VK_FORMAT_G16B16G16R16_422_UNORM_KHR:
+        case VK_FORMAT_B16G16R16G16_422_UNORM_KHR:
+            found = true;
+            break;
+        default:
+            break;
+    }
+    return found;
+}
+
+// Return true if format is compressed
+VK_LAYER_EXPORT bool FormatIsCompressed(VkFormat format) {
+    return (FormatIsCompressed_ASTC_LDR(format) || FormatIsCompressed_BC(format) || FormatIsCompressed_ETC2_EAC(format) ||
+            FormatIsCompressed_PVRTC(format));
+}
+// Return true if format is packed
+VK_LAYER_EXPORT bool FormatIsPacked(VkFormat format) {
+    bool found = false;
+
+    switch (format) {
+        case VK_FORMAT_R4G4_UNORM_PACK8:
+        case VK_FORMAT_R4G4B4A4_UNORM_PACK16:
+        case VK_FORMAT_B4G4R4A4_UNORM_PACK16:
+        case VK_FORMAT_R5G6B5_UNORM_PACK16:
+        case VK_FORMAT_B5G6R5_UNORM_PACK16:
+        case VK_FORMAT_R5G5B5A1_UNORM_PACK16:
+        case VK_FORMAT_B5G5R5A1_UNORM_PACK16:
+        case VK_FORMAT_A1R5G5B5_UNORM_PACK16:
+        case VK_FORMAT_A8B8G8R8_UNORM_PACK32:
+        case VK_FORMAT_A8B8G8R8_SNORM_PACK32:
+        case VK_FORMAT_A8B8G8R8_USCALED_PACK32:
+        case VK_FORMAT_A8B8G8R8_SSCALED_PACK32:
+        case VK_FORMAT_A8B8G8R8_UINT_PACK32:
+        case VK_FORMAT_A8B8G8R8_SINT_PACK32:
+        case VK_FORMAT_A8B8G8R8_SRGB_PACK32:
+        case VK_FORMAT_A2R10G10B10_UNORM_PACK32:
+        case VK_FORMAT_A2R10G10B10_SNORM_PACK32:
+        case VK_FORMAT_A2R10G10B10_USCALED_PACK32:
+        case VK_FORMAT_A2R10G10B10_SSCALED_PACK32:
+        case VK_FORMAT_A2R10G10B10_UINT_PACK32:
+        case VK_FORMAT_A2R10G10B10_SINT_PACK32:
+        case VK_FORMAT_A2B10G10R10_UNORM_PACK32:
+        case VK_FORMAT_A2B10G10R10_SNORM_PACK32:
+        case VK_FORMAT_A2B10G10R10_USCALED_PACK32:
+        case VK_FORMAT_A2B10G10R10_SSCALED_PACK32:
+        case VK_FORMAT_A2B10G10R10_UINT_PACK32:
+        case VK_FORMAT_A2B10G10R10_SINT_PACK32:
+        case VK_FORMAT_B10G11R11_UFLOAT_PACK32:
+        case VK_FORMAT_E5B9G9R9_UFLOAT_PACK32:
+        case VK_FORMAT_X8_D24_UNORM_PACK32:
+        case VK_FORMAT_R10X6_UNORM_PACK16:
+        case VK_FORMAT_R10X6G10X6_UNORM_2PACK16:
+        case VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16:
+        case VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16:
+        case VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16:
+        case VK_FORMAT_R12X4_UNORM_PACK16:
+        case VK_FORMAT_R12X4G12X4_UNORM_2PACK16:
+        case VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16:
+        case VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16:
+        case VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16:
+        case VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16:
+        case VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16:
+        case VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16:
+        case VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16:
+        case VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16:
+        case VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16:
+        case VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16:
+        case VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16:
+        case VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16:
+        case VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16:
+            found = true;
+            break;
+        default:
+            break;
+    }
+    return found;
+}
+
+// Return true if format is 'normal', with one texel per format element
+VK_LAYER_EXPORT bool FormatElementIsTexel(VkFormat format) {
+    if (FormatIsPacked(format) || FormatIsCompressed(format) || FormatIsSinglePlane_422(format) || FormatIsMultiplane(format)) {
+        return false;
+    } else {
+        return true;
+    }
+}
+
+// Return true if format is a depth or stencil format
+VK_LAYER_EXPORT bool FormatIsDepthOrStencil(VkFormat format) {
+    return (FormatIsDepthAndStencil(format) || FormatIsDepthOnly(format) || FormatIsStencilOnly(format));
+}
+
+// Return true if format contains depth and stencil information
+VK_LAYER_EXPORT bool FormatIsDepthAndStencil(VkFormat format) {
+    bool is_ds = false;
+
+    switch (format) {
+        case VK_FORMAT_D16_UNORM_S8_UINT:
+        case VK_FORMAT_D24_UNORM_S8_UINT:
+        case VK_FORMAT_D32_SFLOAT_S8_UINT:
+            is_ds = true;
+            break;
+        default:
+            break;
+    }
+    return is_ds;
+}
+
+// Return true if format is a stencil-only format
+VK_LAYER_EXPORT bool FormatIsStencilOnly(VkFormat format) { return (format == VK_FORMAT_S8_UINT); }
+
+// Return true if format is a depth-only format
+VK_LAYER_EXPORT bool FormatIsDepthOnly(VkFormat format) {
+    bool is_depth = false;
+
+    switch (format) {
+        case VK_FORMAT_D16_UNORM:
+        case VK_FORMAT_X8_D24_UNORM_PACK32:
+        case VK_FORMAT_D32_SFLOAT:
+            is_depth = true;
+            break;
+        default:
+            break;
+    }
+
+    return is_depth;
+}
+
+// Return true if format is of type NORM
+VK_LAYER_EXPORT bool FormatIsNorm(VkFormat format) {
+    bool is_norm = false;
+
+    switch (format) {
+        case VK_FORMAT_R4G4_UNORM_PACK8:
+        case VK_FORMAT_R4G4B4A4_UNORM_PACK16:
+        case VK_FORMAT_R5G6B5_UNORM_PACK16:
+        case VK_FORMAT_R5G5B5A1_UNORM_PACK16:
+        case VK_FORMAT_A1R5G5B5_UNORM_PACK16:
+        case VK_FORMAT_R8_UNORM:
+        case VK_FORMAT_R8_SNORM:
+        case VK_FORMAT_R8G8_UNORM:
+        case VK_FORMAT_R8G8_SNORM:
+        case VK_FORMAT_R8G8B8_UNORM:
+        case VK_FORMAT_R8G8B8_SNORM:
+        case VK_FORMAT_R8G8B8A8_UNORM:
+        case VK_FORMAT_R8G8B8A8_SNORM:
+        case VK_FORMAT_A8B8G8R8_UNORM_PACK32:
+        case VK_FORMAT_A8B8G8R8_SNORM_PACK32:
+        case VK_FORMAT_A2B10G10R10_UNORM_PACK32:
+        case VK_FORMAT_A2B10G10R10_SNORM_PACK32:
+        case VK_FORMAT_R16_UNORM:
+        case VK_FORMAT_R16_SNORM:
+        case VK_FORMAT_R16G16_UNORM:
+        case VK_FORMAT_R16G16_SNORM:
+        case VK_FORMAT_R16G16B16_UNORM:
+        case VK_FORMAT_R16G16B16_SNORM:
+        case VK_FORMAT_R16G16B16A16_UNORM:
+        case VK_FORMAT_R16G16B16A16_SNORM:
+        case VK_FORMAT_BC1_RGB_UNORM_BLOCK:
+        case VK_FORMAT_BC2_UNORM_BLOCK:
+        case VK_FORMAT_BC3_UNORM_BLOCK:
+        case VK_FORMAT_BC4_UNORM_BLOCK:
+        case VK_FORMAT_BC4_SNORM_BLOCK:
+        case VK_FORMAT_BC5_UNORM_BLOCK:
+        case VK_FORMAT_BC5_SNORM_BLOCK:
+        case VK_FORMAT_BC7_UNORM_BLOCK:
+        case VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK:
+        case VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK:
+        case VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK:
+        case VK_FORMAT_EAC_R11_UNORM_BLOCK:
+        case VK_FORMAT_EAC_R11_SNORM_BLOCK:
+        case VK_FORMAT_EAC_R11G11_UNORM_BLOCK:
+        case VK_FORMAT_EAC_R11G11_SNORM_BLOCK:
+        case VK_FORMAT_ASTC_4x4_UNORM_BLOCK:
+        case VK_FORMAT_ASTC_5x4_UNORM_BLOCK:
+        case VK_FORMAT_ASTC_5x5_UNORM_BLOCK:
+        case VK_FORMAT_ASTC_6x5_UNORM_BLOCK:
+        case VK_FORMAT_ASTC_6x6_UNORM_BLOCK:
+        case VK_FORMAT_ASTC_8x5_UNORM_BLOCK:
+        case VK_FORMAT_ASTC_8x6_UNORM_BLOCK:
+        case VK_FORMAT_ASTC_8x8_UNORM_BLOCK:
+        case VK_FORMAT_ASTC_10x5_UNORM_BLOCK:
+        case VK_FORMAT_ASTC_10x6_UNORM_BLOCK:
+        case VK_FORMAT_ASTC_10x8_UNORM_BLOCK:
+        case VK_FORMAT_ASTC_10x10_UNORM_BLOCK:
+        case VK_FORMAT_ASTC_12x10_UNORM_BLOCK:
+        case VK_FORMAT_ASTC_12x12_UNORM_BLOCK:
+        case VK_FORMAT_B5G6R5_UNORM_PACK16:
+        case VK_FORMAT_B8G8R8_UNORM:
+        case VK_FORMAT_B8G8R8_SNORM:
+        case VK_FORMAT_B8G8R8A8_UNORM:
+        case VK_FORMAT_B8G8R8A8_SNORM:
+        case VK_FORMAT_A2R10G10B10_UNORM_PACK32:
+        case VK_FORMAT_A2R10G10B10_SNORM_PACK32:
+            is_norm = true;
+            break;
+        default:
+            break;
+    }
+
+    return is_norm;
+}
+
+// Return true if format is of type UNORM
+VK_LAYER_EXPORT bool FormatIsUNorm(VkFormat format) {
+    bool is_unorm = false;
+
+    switch (format) {
+        case VK_FORMAT_R4G4_UNORM_PACK8:
+        case VK_FORMAT_R4G4B4A4_UNORM_PACK16:
+        case VK_FORMAT_R5G6B5_UNORM_PACK16:
+        case VK_FORMAT_R5G5B5A1_UNORM_PACK16:
+        case VK_FORMAT_A1R5G5B5_UNORM_PACK16:
+        case VK_FORMAT_R8_UNORM:
+        case VK_FORMAT_R8G8_UNORM:
+        case VK_FORMAT_R8G8B8_UNORM:
+        case VK_FORMAT_R8G8B8A8_UNORM:
+        case VK_FORMAT_A8B8G8R8_UNORM_PACK32:
+        case VK_FORMAT_A2B10G10R10_UNORM_PACK32:
+        case VK_FORMAT_R16_UNORM:
+        case VK_FORMAT_R16G16_UNORM:
+        case VK_FORMAT_R16G16B16_UNORM:
+        case VK_FORMAT_R16G16B16A16_UNORM:
+        case VK_FORMAT_BC1_RGB_UNORM_BLOCK:
+        case VK_FORMAT_BC2_UNORM_BLOCK:
+        case VK_FORMAT_BC3_UNORM_BLOCK:
+        case VK_FORMAT_BC4_UNORM_BLOCK:
+        case VK_FORMAT_BC5_UNORM_BLOCK:
+        case VK_FORMAT_BC7_UNORM_BLOCK:
+        case VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK:
+        case VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK:
+        case VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK:
+        case VK_FORMAT_EAC_R11_UNORM_BLOCK:
+        case VK_FORMAT_EAC_R11G11_UNORM_BLOCK:
+        case VK_FORMAT_ASTC_4x4_UNORM_BLOCK:
+        case VK_FORMAT_ASTC_5x4_UNORM_BLOCK:
+        case VK_FORMAT_ASTC_5x5_UNORM_BLOCK:
+        case VK_FORMAT_ASTC_6x5_UNORM_BLOCK:
+        case VK_FORMAT_ASTC_6x6_UNORM_BLOCK:
+        case VK_FORMAT_ASTC_8x5_UNORM_BLOCK:
+        case VK_FORMAT_ASTC_8x6_UNORM_BLOCK:
+        case VK_FORMAT_ASTC_8x8_UNORM_BLOCK:
+        case VK_FORMAT_ASTC_10x5_UNORM_BLOCK:
+        case VK_FORMAT_ASTC_10x6_UNORM_BLOCK:
+        case VK_FORMAT_ASTC_10x8_UNORM_BLOCK:
+        case VK_FORMAT_ASTC_10x10_UNORM_BLOCK:
+        case VK_FORMAT_ASTC_12x10_UNORM_BLOCK:
+        case VK_FORMAT_ASTC_12x12_UNORM_BLOCK:
+        case VK_FORMAT_B5G6R5_UNORM_PACK16:
+        case VK_FORMAT_B8G8R8_UNORM:
+        case VK_FORMAT_B8G8R8A8_UNORM:
+        case VK_FORMAT_A2R10G10B10_UNORM_PACK32:
+            is_unorm = true;
+            break;
+        default:
+            break;
+    }
+
+    return is_unorm;
+}
+
+// Return true if format is of type SNORM
+VK_LAYER_EXPORT bool FormatIsSNorm(VkFormat format) {
+    bool is_snorm = false;
+
+    switch (format) {
+        case VK_FORMAT_R8_SNORM:
+        case VK_FORMAT_R8G8_SNORM:
+        case VK_FORMAT_R8G8B8_SNORM:
+        case VK_FORMAT_R8G8B8A8_SNORM:
+        case VK_FORMAT_A8B8G8R8_SNORM_PACK32:
+        case VK_FORMAT_A2B10G10R10_SNORM_PACK32:
+        case VK_FORMAT_R16_SNORM:
+        case VK_FORMAT_R16G16_SNORM:
+        case VK_FORMAT_R16G16B16_SNORM:
+        case VK_FORMAT_R16G16B16A16_SNORM:
+        case VK_FORMAT_BC4_SNORM_BLOCK:
+        case VK_FORMAT_BC5_SNORM_BLOCK:
+        case VK_FORMAT_EAC_R11_SNORM_BLOCK:
+        case VK_FORMAT_EAC_R11G11_SNORM_BLOCK:
+        case VK_FORMAT_B8G8R8_SNORM:
+        case VK_FORMAT_B8G8R8A8_SNORM:
+        case VK_FORMAT_A2R10G10B10_SNORM_PACK32:
+            is_snorm = true;
+            break;
+        default:
+            break;
+    }
+
+    return is_snorm;
+}
+
+// Return true if format is an integer format
+VK_LAYER_EXPORT bool FormatIsInt(VkFormat format) { return (FormatIsSInt(format) || FormatIsUInt(format)); }
+
+// Return true if format is an unsigned integer format
+VK_LAYER_EXPORT bool FormatIsUInt(VkFormat format) {
+    bool is_uint = false;
+
+    switch (format) {
+        case VK_FORMAT_R8_UINT:
+        case VK_FORMAT_S8_UINT:
+        case VK_FORMAT_R8G8_UINT:
+        case VK_FORMAT_R8G8B8_UINT:
+        case VK_FORMAT_R8G8B8A8_UINT:
+        case VK_FORMAT_A8B8G8R8_UINT_PACK32:
+        case VK_FORMAT_A2B10G10R10_UINT_PACK32:
+        case VK_FORMAT_R16_UINT:
+        case VK_FORMAT_R16G16_UINT:
+        case VK_FORMAT_R16G16B16_UINT:
+        case VK_FORMAT_R16G16B16A16_UINT:
+        case VK_FORMAT_R32_UINT:
+        case VK_FORMAT_R32G32_UINT:
+        case VK_FORMAT_R32G32B32_UINT:
+        case VK_FORMAT_R32G32B32A32_UINT:
+        case VK_FORMAT_R64_UINT:
+        case VK_FORMAT_R64G64_UINT:
+        case VK_FORMAT_R64G64B64_UINT:
+        case VK_FORMAT_R64G64B64A64_UINT:
+        case VK_FORMAT_B8G8R8_UINT:
+        case VK_FORMAT_B8G8R8A8_UINT:
+        case VK_FORMAT_A2R10G10B10_UINT_PACK32:
+            is_uint = true;
+            break;
+        default:
+            break;
+    }
+
+    return is_uint;
+}
+
+// Return true if format is a signed integer format
+VK_LAYER_EXPORT bool FormatIsSInt(VkFormat format) {
+    bool is_sint = false;
+
+    switch (format) {
+        case VK_FORMAT_R8_SINT:
+        case VK_FORMAT_R8G8_SINT:
+        case VK_FORMAT_R8G8B8_SINT:
+        case VK_FORMAT_R8G8B8A8_SINT:
+        case VK_FORMAT_A8B8G8R8_SINT_PACK32:
+        case VK_FORMAT_A2B10G10R10_SINT_PACK32:
+        case VK_FORMAT_R16_SINT:
+        case VK_FORMAT_R16G16_SINT:
+        case VK_FORMAT_R16G16B16_SINT:
+        case VK_FORMAT_R16G16B16A16_SINT:
+        case VK_FORMAT_R32_SINT:
+        case VK_FORMAT_R32G32_SINT:
+        case VK_FORMAT_R32G32B32_SINT:
+        case VK_FORMAT_R32G32B32A32_SINT:
+        case VK_FORMAT_R64_SINT:
+        case VK_FORMAT_R64G64_SINT:
+        case VK_FORMAT_R64G64B64_SINT:
+        case VK_FORMAT_R64G64B64A64_SINT:
+        case VK_FORMAT_B8G8R8_SINT:
+        case VK_FORMAT_B8G8R8A8_SINT:
+        case VK_FORMAT_A2R10G10B10_SINT_PACK32:
+            is_sint = true;
+            break;
+        default:
+            break;
+    }
+
+    return is_sint;
+}
+
+// Return true if format is a floating-point format
+VK_LAYER_EXPORT bool FormatIsFloat(VkFormat format) {
+    bool is_float = false;
+
+    switch (format) {
+        case VK_FORMAT_R16_SFLOAT:
+        case VK_FORMAT_R16G16_SFLOAT:
+        case VK_FORMAT_R16G16B16_SFLOAT:
+        case VK_FORMAT_R16G16B16A16_SFLOAT:
+        case VK_FORMAT_R32_SFLOAT:
+        case VK_FORMAT_R32G32_SFLOAT:
+        case VK_FORMAT_R32G32B32_SFLOAT:
+        case VK_FORMAT_R32G32B32A32_SFLOAT:
+        case VK_FORMAT_R64_SFLOAT:
+        case VK_FORMAT_R64G64_SFLOAT:
+        case VK_FORMAT_R64G64B64_SFLOAT:
+        case VK_FORMAT_R64G64B64A64_SFLOAT:
+        case VK_FORMAT_B10G11R11_UFLOAT_PACK32:
+        case VK_FORMAT_E5B9G9R9_UFLOAT_PACK32:
+        case VK_FORMAT_BC6H_UFLOAT_BLOCK:
+        case VK_FORMAT_BC6H_SFLOAT_BLOCK:
+            is_float = true;
+            break;
+        default:
+            break;
+    }
+
+    return is_float;
+}
+
+// Return true if format is in the SRGB colorspace
+VK_LAYER_EXPORT bool FormatIsSRGB(VkFormat format) {
+    bool is_srgb = false;
+
+    switch (format) {
+        case VK_FORMAT_R8_SRGB:
+        case VK_FORMAT_R8G8_SRGB:
+        case VK_FORMAT_R8G8B8_SRGB:
+        case VK_FORMAT_R8G8B8A8_SRGB:
+        case VK_FORMAT_A8B8G8R8_SRGB_PACK32:
+        case VK_FORMAT_BC1_RGB_SRGB_BLOCK:
+        case VK_FORMAT_BC2_SRGB_BLOCK:
+        case VK_FORMAT_BC3_SRGB_BLOCK:
+        case VK_FORMAT_BC7_SRGB_BLOCK:
+        case VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK:
+        case VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK:
+        case VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK:
+        case VK_FORMAT_ASTC_4x4_SRGB_BLOCK:
+        case VK_FORMAT_ASTC_5x4_SRGB_BLOCK:
+        case VK_FORMAT_ASTC_5x5_SRGB_BLOCK:
+        case VK_FORMAT_ASTC_6x5_SRGB_BLOCK:
+        case VK_FORMAT_ASTC_6x6_SRGB_BLOCK:
+        case VK_FORMAT_ASTC_8x5_SRGB_BLOCK:
+        case VK_FORMAT_ASTC_8x6_SRGB_BLOCK:
+        case VK_FORMAT_ASTC_8x8_SRGB_BLOCK:
+        case VK_FORMAT_ASTC_10x5_SRGB_BLOCK:
+        case VK_FORMAT_ASTC_10x6_SRGB_BLOCK:
+        case VK_FORMAT_ASTC_10x8_SRGB_BLOCK:
+        case VK_FORMAT_ASTC_10x10_SRGB_BLOCK:
+        case VK_FORMAT_ASTC_12x10_SRGB_BLOCK:
+        case VK_FORMAT_ASTC_12x12_SRGB_BLOCK:
+        case VK_FORMAT_B8G8R8_SRGB:
+        case VK_FORMAT_B8G8R8A8_SRGB:
+            is_srgb = true;
+            break;
+        default:
+            break;
+    }
+
+    return is_srgb;
+}
+
+// Return true if format is a USCALED format
+VK_LAYER_EXPORT bool FormatIsUScaled(VkFormat format) {
+    bool is_uscaled = false;
+
+    switch (format) {
+        case VK_FORMAT_R8_USCALED:
+        case VK_FORMAT_R8G8_USCALED:
+        case VK_FORMAT_R8G8B8_USCALED:
+        case VK_FORMAT_B8G8R8_USCALED:
+        case VK_FORMAT_R8G8B8A8_USCALED:
+        case VK_FORMAT_B8G8R8A8_USCALED:
+        case VK_FORMAT_A8B8G8R8_USCALED_PACK32:
+        case VK_FORMAT_A2R10G10B10_USCALED_PACK32:
+        case VK_FORMAT_A2B10G10R10_USCALED_PACK32:
+        case VK_FORMAT_R16_USCALED:
+        case VK_FORMAT_R16G16_USCALED:
+        case VK_FORMAT_R16G16B16_USCALED:
+        case VK_FORMAT_R16G16B16A16_USCALED:
+            is_uscaled = true;
+            break;
+        default:
+            break;
+    }
+
+    return is_uscaled;
+}
+
+// Return true if format is a SSCALED format
+VK_LAYER_EXPORT bool FormatIsSScaled(VkFormat format) {
+    bool is_sscaled = false;
+
+    switch (format) {
+        case VK_FORMAT_R8_SSCALED:
+        case VK_FORMAT_R8G8_SSCALED:
+        case VK_FORMAT_R8G8B8_SSCALED:
+        case VK_FORMAT_B8G8R8_SSCALED:
+        case VK_FORMAT_R8G8B8A8_SSCALED:
+        case VK_FORMAT_B8G8R8A8_SSCALED:
+        case VK_FORMAT_A8B8G8R8_SSCALED_PACK32:
+        case VK_FORMAT_A2R10G10B10_SSCALED_PACK32:
+        case VK_FORMAT_A2B10G10R10_SSCALED_PACK32:
+        case VK_FORMAT_R16_SSCALED:
+        case VK_FORMAT_R16G16_SSCALED:
+        case VK_FORMAT_R16G16B16_SSCALED:
+        case VK_FORMAT_R16G16B16A16_SSCALED:
+            is_sscaled = true;
+            break;
+        default:
+            break;
+    }
+
+    return is_sscaled;
+}
+
+// Return texel block sizes for all formats
+// Uncompressed formats return {1, 1, 1}
+// Compressed formats return the compression block extents
+// Multiplane formats return the 'shared' extent of their low-res channel(s)
+VK_LAYER_EXPORT VkExtent3D FormatTexelBlockExtent(VkFormat format) {
+    VkExtent3D block_size = {1, 1, 1};
+    switch (format) {
+        case VK_FORMAT_BC1_RGB_UNORM_BLOCK:
+        case VK_FORMAT_BC1_RGB_SRGB_BLOCK:
+        case VK_FORMAT_BC1_RGBA_UNORM_BLOCK:
+        case VK_FORMAT_BC1_RGBA_SRGB_BLOCK:
+        case VK_FORMAT_BC2_UNORM_BLOCK:
+        case VK_FORMAT_BC2_SRGB_BLOCK:
+        case VK_FORMAT_BC3_UNORM_BLOCK:
+        case VK_FORMAT_BC3_SRGB_BLOCK:
+        case VK_FORMAT_BC4_UNORM_BLOCK:
+        case VK_FORMAT_BC4_SNORM_BLOCK:
+        case VK_FORMAT_BC5_UNORM_BLOCK:
+        case VK_FORMAT_BC5_SNORM_BLOCK:
+        case VK_FORMAT_BC6H_UFLOAT_BLOCK:
+        case VK_FORMAT_BC6H_SFLOAT_BLOCK:
+        case VK_FORMAT_BC7_UNORM_BLOCK:
+        case VK_FORMAT_BC7_SRGB_BLOCK:
+        case VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK:
+        case VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK:
+        case VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK:
+        case VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK:
+        case VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK:
+        case VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK:
+        case VK_FORMAT_EAC_R11_UNORM_BLOCK:
+        case VK_FORMAT_EAC_R11_SNORM_BLOCK:
+        case VK_FORMAT_EAC_R11G11_UNORM_BLOCK:
+        case VK_FORMAT_EAC_R11G11_SNORM_BLOCK:
+        case VK_FORMAT_ASTC_4x4_UNORM_BLOCK:
+        case VK_FORMAT_ASTC_4x4_SRGB_BLOCK:
+            block_size = {4, 4, 1};
+            break;
+        case VK_FORMAT_ASTC_5x4_UNORM_BLOCK:
+        case VK_FORMAT_ASTC_5x4_SRGB_BLOCK:
+            block_size = {5, 4, 1};
+            break;
+        case VK_FORMAT_ASTC_5x5_UNORM_BLOCK:
+        case VK_FORMAT_ASTC_5x5_SRGB_BLOCK:
+            block_size = {5, 5, 1};
+            break;
+        case VK_FORMAT_ASTC_6x5_UNORM_BLOCK:
+        case VK_FORMAT_ASTC_6x5_SRGB_BLOCK:
+            block_size = {6, 5, 1};
+            break;
+        case VK_FORMAT_ASTC_6x6_UNORM_BLOCK:
+        case VK_FORMAT_ASTC_6x6_SRGB_BLOCK:
+            block_size = {6, 6, 1};
+            break;
+        case VK_FORMAT_ASTC_8x5_UNORM_BLOCK:
+        case VK_FORMAT_ASTC_8x5_SRGB_BLOCK:
+            block_size = {8, 5, 1};
+            break;
+        case VK_FORMAT_ASTC_8x6_UNORM_BLOCK:
+        case VK_FORMAT_ASTC_8x6_SRGB_BLOCK:
+            block_size = {8, 6, 1};
+            break;
+        case VK_FORMAT_ASTC_8x8_UNORM_BLOCK:
+        case VK_FORMAT_ASTC_8x8_SRGB_BLOCK:
+            block_size = {8, 8, 1};
+            break;
+        case VK_FORMAT_ASTC_10x5_UNORM_BLOCK:
+        case VK_FORMAT_ASTC_10x5_SRGB_BLOCK:
+            block_size = {10, 5, 1};
+            break;
+        case VK_FORMAT_ASTC_10x6_UNORM_BLOCK:
+        case VK_FORMAT_ASTC_10x6_SRGB_BLOCK:
+            block_size = {10, 6, 1};
+            break;
+        case VK_FORMAT_ASTC_10x8_UNORM_BLOCK:
+        case VK_FORMAT_ASTC_10x8_SRGB_BLOCK:
+            block_size = {10, 8, 1};
+            break;
+        case VK_FORMAT_ASTC_10x10_UNORM_BLOCK:
+        case VK_FORMAT_ASTC_10x10_SRGB_BLOCK:
+            block_size = {10, 10, 1};
+            break;
+        case VK_FORMAT_ASTC_12x10_UNORM_BLOCK:
+        case VK_FORMAT_ASTC_12x10_SRGB_BLOCK:
+            block_size = {12, 10, 1};
+            break;
+        case VK_FORMAT_ASTC_12x12_UNORM_BLOCK:
+        case VK_FORMAT_ASTC_12x12_SRGB_BLOCK:
+            block_size = {12, 12, 1};
+            break;
+        case VK_FORMAT_PVRTC1_2BPP_UNORM_BLOCK_IMG:
+        case VK_FORMAT_PVRTC2_2BPP_UNORM_BLOCK_IMG:
+        case VK_FORMAT_PVRTC1_2BPP_SRGB_BLOCK_IMG:
+        case VK_FORMAT_PVRTC2_2BPP_SRGB_BLOCK_IMG:
+            block_size = {8, 4, 1};
+            break;
+        case VK_FORMAT_PVRTC1_4BPP_UNORM_BLOCK_IMG:
+        case VK_FORMAT_PVRTC2_4BPP_UNORM_BLOCK_IMG:
+        case VK_FORMAT_PVRTC1_4BPP_SRGB_BLOCK_IMG:
+        case VK_FORMAT_PVRTC2_4BPP_SRGB_BLOCK_IMG:
+            block_size = {4, 4, 1};
+            break;
+        // (KHR_sampler_ycbcr_conversion) _422 single-plane formats are treated as 2x1 compressed (for copies)
+        case VK_FORMAT_G8B8G8R8_422_UNORM:
+        case VK_FORMAT_B8G8R8G8_422_UNORM:
+        case VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16:
+        case VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16:
+        case VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16:
+        case VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16:
+        case VK_FORMAT_G16B16G16R16_422_UNORM:
+        case VK_FORMAT_B16G16R16G16_422_UNORM:
+            block_size = {2, 1, 1};
+            break;
+        // _422 multi-plane formats are not considered compressed, but shared components form a logical 2x1 block
+        case VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM:
+        case VK_FORMAT_G8_B8R8_2PLANE_422_UNORM:
+        case VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16:
+        case VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16:
+        case VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16:
+        case VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16:
+        case VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM:
+        case VK_FORMAT_G16_B16R16_2PLANE_422_UNORM:
+            block_size = {2, 1, 1};
+            break;
+        // _420 formats are not considered compressed, but shared components form a logical 2x2 block
+        case VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM:
+        case VK_FORMAT_G8_B8R8_2PLANE_420_UNORM:
+        case VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16:
+        case VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16:
+        case VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16:
+        case VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16:
+        case VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM:
+        case VK_FORMAT_G16_B16R16_2PLANE_420_UNORM:
+            block_size = {2, 2, 1};
+            break;
+        // _444 multi-plane formats do not share components, default to 1x1
+        case VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16:
+        case VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM:
+        case VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16:
+        case VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM:
+        default:
+            break;
+    }
+    return block_size;
+}
+
+VK_LAYER_EXPORT uint32_t FormatDepthSize(VkFormat format) {
+    uint32_t depth_size = 0;
+    switch (format) {
+        case VK_FORMAT_D16_UNORM:
+        case VK_FORMAT_D16_UNORM_S8_UINT:
+            depth_size = 16;
+            break;
+        case VK_FORMAT_X8_D24_UNORM_PACK32:
+        case VK_FORMAT_D24_UNORM_S8_UINT:
+            depth_size = 24;
+            break;
+        case VK_FORMAT_D32_SFLOAT:
+        case VK_FORMAT_D32_SFLOAT_S8_UINT:
+            depth_size = 32;
+            break;
+
+        default:
+            break;
+    }
+    return depth_size;
+}
+
+VK_LAYER_EXPORT VkFormatNumericalType FormatDepthNumericalType(VkFormat format) {
+    VkFormatNumericalType numerical_type = VK_FORMAT_NUMERICAL_TYPE_NONE;
+    switch (format) {
+        case VK_FORMAT_D16_UNORM:
+        case VK_FORMAT_D16_UNORM_S8_UINT:
+        case VK_FORMAT_X8_D24_UNORM_PACK32:
+        case VK_FORMAT_D24_UNORM_S8_UINT:
+            numerical_type = VK_FORMAT_NUMERICAL_TYPE_UNORM;
+            break;
+        case VK_FORMAT_D32_SFLOAT:
+        case VK_FORMAT_D32_SFLOAT_S8_UINT:
+            numerical_type = VK_FORMAT_NUMERICAL_TYPE_SFLOAT;
+            break;
+
+        default:
+            break;
+    }
+    return numerical_type;
+}
+
+VK_LAYER_EXPORT uint32_t FormatStencilSize(VkFormat format) {
+    uint32_t stencil_size = 0;
+    switch (format) {
+        case VK_FORMAT_S8_UINT:
+        case VK_FORMAT_D16_UNORM_S8_UINT:
+        case VK_FORMAT_D24_UNORM_S8_UINT:
+        case VK_FORMAT_D32_SFLOAT_S8_UINT:
+            stencil_size = 8;
+            break;
+
+        default:
+            break;
+    }
+    return stencil_size;
+}
+
+VK_LAYER_EXPORT VkFormatNumericalType FormatStencilNumericalType(VkFormat format) {
+    VkFormatNumericalType numerical_type = VK_FORMAT_NUMERICAL_TYPE_NONE;
+    switch (format) {
+        case VK_FORMAT_S8_UINT:
+        case VK_FORMAT_D16_UNORM_S8_UINT:
+        case VK_FORMAT_D24_UNORM_S8_UINT:
+        case VK_FORMAT_D32_SFLOAT_S8_UINT:
+            numerical_type = VK_FORMAT_NUMERICAL_TYPE_UINT;
+            break;
+
+        default:
+            break;
+    }
+    return numerical_type;
+}
+
+VK_LAYER_EXPORT uint32_t FormatPlaneCount(VkFormat format) {
+    switch (format) {
+        case VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM:
+        case VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM:
+        case VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM:
+        case VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16:
+        case VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16:
+        case VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16:
+        case VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16:
+        case VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16:
+        case VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16:
+        case VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM:
+        case VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM:
+        case VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM:
+            return 3;
+            break;
+        case VK_FORMAT_G8_B8R8_2PLANE_420_UNORM:
+        case VK_FORMAT_G8_B8R8_2PLANE_422_UNORM:
+        case VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16:
+        case VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16:
+        case VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16:
+        case VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16:
+        case VK_FORMAT_G16_B16R16_2PLANE_420_UNORM:
+        case VK_FORMAT_G16_B16R16_2PLANE_422_UNORM:
+            return 2;
+            break;
+        default:
+            return 1;
+            break;
+    }
+}
+
+// Return format class of the specified format
+VK_LAYER_EXPORT VkFormatCompatibilityClass FormatCompatibilityClass(VkFormat format) {
+    auto item = vk_format_table.find(format);
+    if (item != vk_format_table.end()) {
+        return item->second.format_class;
+    }
+    return VK_FORMAT_COMPATIBILITY_CLASS_NONE_BIT;
+}
+
+// Return size, in bytes, of one element of the specified format
+// For uncompressed this is one texel, for compressed it is one block
+VK_LAYER_EXPORT uint32_t FormatElementSize(VkFormat format, VkImageAspectFlags aspectMask) {
+    // Handle special buffer packing rules for specific depth/stencil formats
+    if (aspectMask & VK_IMAGE_ASPECT_STENCIL_BIT) {
+        format = VK_FORMAT_S8_UINT;
+    } else if (aspectMask & VK_IMAGE_ASPECT_DEPTH_BIT) {
+        switch (format) {
+            case VK_FORMAT_D16_UNORM_S8_UINT:
+                format = VK_FORMAT_D16_UNORM;
+                break;
+            case VK_FORMAT_D32_SFLOAT_S8_UINT:
+                format = VK_FORMAT_D32_SFLOAT;
+                break;
+            default:
+                break;
+        }
+    } else if (FormatIsMultiplane(format)) {
+        format = FindMultiplaneCompatibleFormat(format, aspectMask);
+    }
+
+    auto item = vk_format_table.find(format);
+    if (item != vk_format_table.end()) {
+        return item->second.size;
+    }
+    return 0;
+}
+
+// Return the size in bytes of one texel of given foramt
+// For compressed or multi-plane, this may be a fractional number
+VK_LAYER_EXPORT double FormatTexelSize(VkFormat format) {
+    double texel_size = static_cast<double>(FormatElementSize(format));
+    VkExtent3D block_extent = FormatTexelBlockExtent(format);
+    uint32_t texels_per_block = block_extent.width * block_extent.height * block_extent.depth;
+    if (1 < texels_per_block) {
+        texel_size /= static_cast<double>(texels_per_block);
+    }
+    return texel_size;
+}
+
+// Return the number of channels for a given format
+uint32_t FormatChannelCount(VkFormat format) {
+    auto item = vk_format_table.find(format);
+    if (item != vk_format_table.end()) {
+        return item->second.channel_count;
+    }
+    return 0;
+}
+
+// Perform a zero-tolerant modulo operation
+VK_LAYER_EXPORT VkDeviceSize SafeModulo(VkDeviceSize dividend, VkDeviceSize divisor) {
+    VkDeviceSize result = 0;
+    if (divisor != 0) {
+        result = dividend % divisor;
+    }
+    return result;
+}
+
+VK_LAYER_EXPORT VkDeviceSize SafeDivision(VkDeviceSize dividend, VkDeviceSize divisor) {
+    VkDeviceSize result = 0;
+    if (divisor != 0) {
+        result = dividend / divisor;
+    }
+    return result;
+}
+
+struct VULKAN_PER_PLANE_COMPATIBILITY {
+    uint32_t width_divisor;
+    uint32_t height_divisor;
+    VkFormat compatible_format;
+};
+
+struct VULKAN_MULTIPLANE_COMPATIBILITY {
+    VULKAN_PER_PLANE_COMPATIBILITY per_plane[VK_MULTIPLANE_FORMAT_MAX_PLANES];
+};
+
+// Source: Vulkan spec Table 45. Plane Format Compatibility Table
+// clang-format off
+const std::map<VkFormat, VULKAN_MULTIPLANE_COMPATIBILITY> vk_multiplane_compatibility_map {
+    { VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM,                  { { { 1, 1, VK_FORMAT_R8_UNORM },
+                                                                { 2, 2, VK_FORMAT_R8_UNORM },
+                                                                { 2, 2, VK_FORMAT_R8_UNORM } } } },
+    { VK_FORMAT_G8_B8R8_2PLANE_420_UNORM,                   { { { 1, 1, VK_FORMAT_R8_UNORM },
+                                                                { 2, 2, VK_FORMAT_R8G8_UNORM },
+                                                                { 1, 1, VK_FORMAT_UNDEFINED } } } },
+    { VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM,                  { { { 1, 1, VK_FORMAT_R8_UNORM },
+                                                                { 2, 1, VK_FORMAT_R8_UNORM },
+                                                                { 2, 1, VK_FORMAT_R8_UNORM } } } },
+    { VK_FORMAT_G8_B8R8_2PLANE_422_UNORM,                   { { { 1, 1, VK_FORMAT_R8_UNORM },
+                                                                { 2, 1, VK_FORMAT_R8G8_UNORM },
+                                                                { 1, 1, VK_FORMAT_UNDEFINED } } } },
+    { VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM,                  { { { 1, 1, VK_FORMAT_R8_UNORM },
+                                                                { 1, 1, VK_FORMAT_R8_UNORM },
+                                                                { 1, 1, VK_FORMAT_R8_UNORM } } } },
+    { VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16, { { { 1, 1, VK_FORMAT_R10X6_UNORM_PACK16 },
+                                                                { 2, 2, VK_FORMAT_R10X6_UNORM_PACK16 },
+                                                                { 2, 2, VK_FORMAT_R10X6_UNORM_PACK16 } } } },
+    { VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16,  { { { 1, 1, VK_FORMAT_R10X6_UNORM_PACK16 },
+                                                                { 2, 2, VK_FORMAT_R10X6G10X6_UNORM_2PACK16 },
+                                                                { 1, 1, VK_FORMAT_UNDEFINED } } } },
+    { VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16, { { { 1, 1, VK_FORMAT_R10X6_UNORM_PACK16 },
+                                                                { 2, 1, VK_FORMAT_R10X6_UNORM_PACK16 },
+                                                                { 2, 1, VK_FORMAT_R10X6_UNORM_PACK16 } } } },
+    { VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16,  { { { 1, 1, VK_FORMAT_R10X6_UNORM_PACK16 },
+                                                                { 2, 1, VK_FORMAT_R10X6G10X6_UNORM_2PACK16 },
+                                                                { 1, 1, VK_FORMAT_UNDEFINED } } } },
+    { VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16, { { { 1, 1, VK_FORMAT_R10X6_UNORM_PACK16 },
+                                                                { 1, 1, VK_FORMAT_R10X6_UNORM_PACK16 },
+                                                                { 1, 1, VK_FORMAT_R10X6_UNORM_PACK16 } } } },
+    { VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16, { { { 1, 1, VK_FORMAT_R12X4_UNORM_PACK16 },
+                                                                { 2, 2, VK_FORMAT_R12X4_UNORM_PACK16 },
+                                                                { 2, 2, VK_FORMAT_R12X4_UNORM_PACK16 } } } },
+    { VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16,  { { { 1, 1, VK_FORMAT_R12X4_UNORM_PACK16 },
+                                                                { 2, 2, VK_FORMAT_R12X4G12X4_UNORM_2PACK16 },
+                                                                { 1, 1, VK_FORMAT_UNDEFINED } } } },
+    { VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16, { { { 1, 1, VK_FORMAT_R12X4_UNORM_PACK16 },
+                                                                { 2, 1, VK_FORMAT_R12X4_UNORM_PACK16 },
+                                                                { 2, 1, VK_FORMAT_R12X4_UNORM_PACK16 } } } },
+    { VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16,  { { { 1, 1, VK_FORMAT_R12X4_UNORM_PACK16 },
+                                                                { 2, 1, VK_FORMAT_R12X4G12X4_UNORM_2PACK16 },
+                                                                { 1, 1, VK_FORMAT_UNDEFINED } } } },
+    { VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16, { { { 1, 1, VK_FORMAT_R12X4_UNORM_PACK16 },
+                                                                { 1, 1, VK_FORMAT_R12X4_UNORM_PACK16 },
+                                                                { 1, 1, VK_FORMAT_R12X4_UNORM_PACK16 } } } },
+    { VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM,               { { { 1, 1, VK_FORMAT_R16_UNORM },
+                                                                { 2, 2, VK_FORMAT_R16_UNORM },
+                                                                { 2, 2, VK_FORMAT_R16_UNORM } } } },
+    { VK_FORMAT_G16_B16R16_2PLANE_420_UNORM,                { { { 1, 1, VK_FORMAT_R16_UNORM },
+                                                                { 2, 2, VK_FORMAT_R16G16_UNORM },
+                                                                { 1, 1, VK_FORMAT_UNDEFINED } } } },
+    { VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM,               { { { 1, 1, VK_FORMAT_R16_UNORM },
+                                                                { 2, 1, VK_FORMAT_R16_UNORM },
+                                                                { 2, 1, VK_FORMAT_R16_UNORM } } } },
+    { VK_FORMAT_G16_B16R16_2PLANE_422_UNORM,                { { { 1, 1, VK_FORMAT_R16_UNORM },
+                                                                { 2, 1, VK_FORMAT_R16G16_UNORM },
+                                                                { 1, 1, VK_FORMAT_UNDEFINED } } } },
+    { VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM,               { { { 1, 1, VK_FORMAT_R16_UNORM },
+                                                                { 1, 1, VK_FORMAT_R16_UNORM },
+                                                                { 1, 1, VK_FORMAT_R16_UNORM } } } }
+};
+// clang-format on
+
+uint32_t GetPlaneIndex(VkImageAspectFlags aspect) {
+    // Returns an out of bounds index on error
+    switch (aspect) {
+        case VK_IMAGE_ASPECT_PLANE_0_BIT:
+            return 0;
+            break;
+        case VK_IMAGE_ASPECT_PLANE_1_BIT:
+            return 1;
+            break;
+        case VK_IMAGE_ASPECT_PLANE_2_BIT:
+            return 2;
+            break;
+        default:
+            // If more than one plane bit is set, return error condition
+            return VK_MULTIPLANE_FORMAT_MAX_PLANES;
+            break;
+    }
+}
+
+VK_LAYER_EXPORT VkFormat FindMultiplaneCompatibleFormat(VkFormat mp_fmt, VkImageAspectFlags plane_aspect) {
+    uint32_t plane_idx = GetPlaneIndex(plane_aspect);
+    auto it = vk_multiplane_compatibility_map.find(mp_fmt);
+    if ((it == vk_multiplane_compatibility_map.end()) || (plane_idx >= VK_MULTIPLANE_FORMAT_MAX_PLANES)) {
+        return VK_FORMAT_UNDEFINED;
+    }
+
+    return it->second.per_plane[plane_idx].compatible_format;
+}
+
+VK_LAYER_EXPORT VkExtent2D FindMultiplaneExtentDivisors(VkFormat mp_fmt, VkImageAspectFlags plane_aspect) {
+    VkExtent2D divisors = {1, 1};
+    uint32_t plane_idx = GetPlaneIndex(plane_aspect);
+    auto it = vk_multiplane_compatibility_map.find(mp_fmt);
+    if ((it == vk_multiplane_compatibility_map.end()) || (plane_idx >= VK_MULTIPLANE_FORMAT_MAX_PLANES)) {
+        return divisors;
+    }
+
+    divisors.width = it->second.per_plane[plane_idx].width_divisor;
+    divisors.height = it->second.per_plane[plane_idx].height_divisor;
+    return divisors;
+}
+
+VK_LAYER_EXPORT bool FormatSizesAreEqual(VkFormat srcFormat, VkFormat dstFormat, uint32_t region_count,
+                                         const VkImageCopy *regions) {
+    size_t srcSize = 0, dstSize = 0;
+
+    if (FormatIsMultiplane(srcFormat) || FormatIsMultiplane(dstFormat)) {
+        for (uint32_t i = 0; i < region_count; i++) {
+            if (FormatIsMultiplane(srcFormat)) {
+                VkFormat planeFormat = FindMultiplaneCompatibleFormat(srcFormat, regions[i].srcSubresource.aspectMask);
+                srcSize = FormatElementSize(planeFormat);
+            } else {
+                srcSize = FormatElementSize(srcFormat);
+            }
+            if (FormatIsMultiplane(dstFormat)) {
+                VkFormat planeFormat = FindMultiplaneCompatibleFormat(dstFormat, regions[i].dstSubresource.aspectMask);
+                dstSize = FormatElementSize(planeFormat);
+            } else {
+                dstSize = FormatElementSize(dstFormat);
+            }
+            if (dstSize != srcSize) return false;
+        }
+        return true;
+    } else {
+        srcSize = FormatElementSize(srcFormat);
+        dstSize = FormatElementSize(dstFormat);
+        return (dstSize == srcSize);
+    }
+}
+
+VK_LAYER_EXPORT bool FormatRequiresYcbcrConversion(VkFormat format) {
+    return format >= VK_FORMAT_G8B8G8R8_422_UNORM && format <= VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM;
+}
diff --git a/src/third_party/vulkan-validation-layers/src/layers/vk_format_utils.h b/src/third_party/vulkan-validation-layers/src/layers/vk_format_utils.h
new file mode 100644
index 0000000..4656055
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/layers/vk_format_utils.h
@@ -0,0 +1,191 @@
+/* Copyright (c) 2015-2019 The Khronos Group Inc.
+ * Copyright (c) 2015-2019 Valve Corporation
+ * Copyright (c) 2015-2019 LunarG, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Mark Lobodzinski <mark@lunarg.com>
+ * Author: Courtney Goeltzenleuchter <courtney@LunarG.com>
+ * Author: Dave Houlton <daveh@lunarg.com>
+ */
+
+#pragma once
+#include <stdbool.h>
+#include <vector>
+#include "vulkan/vulkan.h"
+
+#if !defined(VK_LAYER_EXPORT)
+#if defined(__GNUC__) && __GNUC__ >= 4
+#define VK_LAYER_EXPORT __attribute__((visibility("default")))
+#elif defined(__SUNPRO_C) && (__SUNPRO_C >= 0x590)
+#define VK_LAYER_EXPORT __attribute__((visibility("default")))
+#else
+#define VK_LAYER_EXPORT
+#endif
+#endif
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#define VK_MULTIPLANE_FORMAT_MAX_PLANES 3
+
+typedef enum VkFormatCompatibilityClass {
+    VK_FORMAT_COMPATIBILITY_CLASS_NONE_BIT = 0,
+    VK_FORMAT_COMPATIBILITY_CLASS_8_BIT = 1,
+    VK_FORMAT_COMPATIBILITY_CLASS_16_BIT = 2,
+    VK_FORMAT_COMPATIBILITY_CLASS_24_BIT = 3,
+    VK_FORMAT_COMPATIBILITY_CLASS_32_BIT = 4,
+    VK_FORMAT_COMPATIBILITY_CLASS_48_BIT = 5,
+    VK_FORMAT_COMPATIBILITY_CLASS_64_BIT = 6,
+    VK_FORMAT_COMPATIBILITY_CLASS_96_BIT = 7,
+    VK_FORMAT_COMPATIBILITY_CLASS_128_BIT = 8,
+    VK_FORMAT_COMPATIBILITY_CLASS_192_BIT = 9,
+    VK_FORMAT_COMPATIBILITY_CLASS_256_BIT = 10,
+    VK_FORMAT_COMPATIBILITY_CLASS_BC1_RGB_BIT = 11,
+    VK_FORMAT_COMPATIBILITY_CLASS_BC1_RGBA_BIT = 12,
+    VK_FORMAT_COMPATIBILITY_CLASS_BC2_BIT = 13,
+    VK_FORMAT_COMPATIBILITY_CLASS_BC3_BIT = 14,
+    VK_FORMAT_COMPATIBILITY_CLASS_BC4_BIT = 15,
+    VK_FORMAT_COMPATIBILITY_CLASS_BC5_BIT = 16,
+    VK_FORMAT_COMPATIBILITY_CLASS_BC6H_BIT = 17,
+    VK_FORMAT_COMPATIBILITY_CLASS_BC7_BIT = 18,
+    VK_FORMAT_COMPATIBILITY_CLASS_ETC2_RGB_BIT = 19,
+    VK_FORMAT_COMPATIBILITY_CLASS_ETC2_RGBA_BIT = 20,
+    VK_FORMAT_COMPATIBILITY_CLASS_ETC2_EAC_RGBA_BIT = 21,
+    VK_FORMAT_COMPATIBILITY_CLASS_EAC_R_BIT = 22,
+    VK_FORMAT_COMPATIBILITY_CLASS_EAC_RG_BIT = 23,
+    VK_FORMAT_COMPATIBILITY_CLASS_ASTC_4X4_BIT = 24,
+    VK_FORMAT_COMPATIBILITY_CLASS_ASTC_5X4_BIT = 25,
+    VK_FORMAT_COMPATIBILITY_CLASS_ASTC_5X5_BIT = 26,
+    VK_FORMAT_COMPATIBILITY_CLASS_ASTC_6X5_BIT = 27,
+    VK_FORMAT_COMPATIBILITY_CLASS_ASTC_6X6_BIT = 28,
+    VK_FORMAT_COMPATIBILITY_CLASS_ASTC_8X5_BIT = 29,
+    VK_FORMAT_COMPATIBILITY_CLASS_ASTC_8X6_BIT = 20,
+    VK_FORMAT_COMPATIBILITY_CLASS_ASTC_8X8_BIT = 31,
+    VK_FORMAT_COMPATIBILITY_CLASS_ASTC_10X5_BIT = 32,
+    VK_FORMAT_COMPATIBILITY_CLASS_ASTC_10X6_BIT = 33,
+    VK_FORMAT_COMPATIBILITY_CLASS_ASTC_10X8_BIT = 34,
+    VK_FORMAT_COMPATIBILITY_CLASS_ASTC_10X10_BIT = 35,
+    VK_FORMAT_COMPATIBILITY_CLASS_ASTC_12X10_BIT = 36,
+    VK_FORMAT_COMPATIBILITY_CLASS_ASTC_12X12_BIT = 37,
+    VK_FORMAT_COMPATIBILITY_CLASS_D16_BIT = 38,
+    VK_FORMAT_COMPATIBILITY_CLASS_D24_BIT = 39,
+    VK_FORMAT_COMPATIBILITY_CLASS_D32_BIT = 30,
+    VK_FORMAT_COMPATIBILITY_CLASS_S8_BIT = 41,
+    VK_FORMAT_COMPATIBILITY_CLASS_D16S8_BIT = 42,
+    VK_FORMAT_COMPATIBILITY_CLASS_D24S8_BIT = 43,
+    VK_FORMAT_COMPATIBILITY_CLASS_D32S8_BIT = 44,
+    VK_FORMAT_COMPATIBILITY_CLASS_PVRTC1_2BPP_BIT = 45,
+    VK_FORMAT_COMPATIBILITY_CLASS_PVRTC1_4BPP_BIT = 46,
+    VK_FORMAT_COMPATIBILITY_CLASS_PVRTC2_2BPP_BIT = 47,
+    VK_FORMAT_COMPATIBILITY_CLASS_PVRTC2_4BPP_BIT = 48,
+    /* KHR_sampler_YCbCr_conversion */
+    VK_FORMAT_COMPATIBILITY_CLASS_32BIT_G8B8G8R8 = 49,
+    VK_FORMAT_COMPATIBILITY_CLASS_32BIT_B8G8R8G8 = 50,
+    VK_FORMAT_COMPATIBILITY_CLASS_64BIT_R10G10B10A10 = 51,
+    VK_FORMAT_COMPATIBILITY_CLASS_64BIT_G10B10G10R10 = 52,
+    VK_FORMAT_COMPATIBILITY_CLASS_64BIT_B10G10R10G10 = 53,
+    VK_FORMAT_COMPATIBILITY_CLASS_64BIT_R12G12B12A12 = 54,
+    VK_FORMAT_COMPATIBILITY_CLASS_64BIT_G12B12G12R12 = 55,
+    VK_FORMAT_COMPATIBILITY_CLASS_64BIT_B12G12R12G12 = 56,
+    VK_FORMAT_COMPATIBILITY_CLASS_64BIT_G16B16G16R16 = 57,
+    VK_FORMAT_COMPATIBILITY_CLASS_64BIT_B16G16R16G16 = 58,
+    VK_FORMAT_COMPATIBILITY_CLASS_8BIT_3PLANE_420 = 59,
+    VK_FORMAT_COMPATIBILITY_CLASS_8BIT_2PLANE_420 = 60,
+    VK_FORMAT_COMPATIBILITY_CLASS_8BIT_3PLANE_422 = 61,
+    VK_FORMAT_COMPATIBILITY_CLASS_8BIT_2PLANE_422 = 62,
+    VK_FORMAT_COMPATIBILITY_CLASS_8BIT_3PLANE_444 = 63,
+    VK_FORMAT_COMPATIBILITY_CLASS_10BIT_3PLANE_420 = 64,
+    VK_FORMAT_COMPATIBILITY_CLASS_10BIT_2PLANE_420 = 65,
+    VK_FORMAT_COMPATIBILITY_CLASS_10BIT_3PLANE_422 = 66,
+    VK_FORMAT_COMPATIBILITY_CLASS_10BIT_2PLANE_422 = 67,
+    VK_FORMAT_COMPATIBILITY_CLASS_10BIT_3PLANE_444 = 68,
+    VK_FORMAT_COMPATIBILITY_CLASS_12BIT_3PLANE_420 = 69,
+    VK_FORMAT_COMPATIBILITY_CLASS_12BIT_2PLANE_420 = 70,
+    VK_FORMAT_COMPATIBILITY_CLASS_12BIT_3PLANE_422 = 71,
+    VK_FORMAT_COMPATIBILITY_CLASS_12BIT_2PLANE_422 = 72,
+    VK_FORMAT_COMPATIBILITY_CLASS_12BIT_3PLANE_444 = 73,
+    VK_FORMAT_COMPATIBILITY_CLASS_16BIT_3PLANE_420 = 74,
+    VK_FORMAT_COMPATIBILITY_CLASS_16BIT_2PLANE_420 = 75,
+    VK_FORMAT_COMPATIBILITY_CLASS_16BIT_3PLANE_422 = 76,
+    VK_FORMAT_COMPATIBILITY_CLASS_16BIT_2PLANE_422 = 77,
+    VK_FORMAT_COMPATIBILITY_CLASS_16BIT_3PLANE_444 = 78,
+    VK_FORMAT_COMPATIBILITY_CLASS_MAX_ENUM = 79
+} VkFormatCompatibilityClass;
+
+typedef enum VkFormatNumericalType {
+    VK_FORMAT_NUMERICAL_TYPE_NONE,
+    VK_FORMAT_NUMERICAL_TYPE_UINT,
+    VK_FORMAT_NUMERICAL_TYPE_SINT,
+    VK_FORMAT_NUMERICAL_TYPE_UNORM,
+    VK_FORMAT_NUMERICAL_TYPE_SNORM,
+    VK_FORMAT_NUMERICAL_TYPE_USCALED,
+    VK_FORMAT_NUMERICAL_TYPE_SSCALED,
+    VK_FORMAT_NUMERICAL_TYPE_UFLOAT,
+    VK_FORMAT_NUMERICAL_TYPE_SFLOAT,
+    VK_FORMAT_NUMERICAL_TYPE_SRGB
+} VkFormatNumericalType;
+
+VK_LAYER_EXPORT bool FormatIsDepthOrStencil(VkFormat format);
+VK_LAYER_EXPORT bool FormatIsDepthAndStencil(VkFormat format);
+VK_LAYER_EXPORT bool FormatIsDepthOnly(VkFormat format);
+VK_LAYER_EXPORT bool FormatIsStencilOnly(VkFormat format);
+VK_LAYER_EXPORT bool FormatIsCompressed_ETC2_EAC(VkFormat format);
+VK_LAYER_EXPORT bool FormatIsCompressed_ASTC_LDR(VkFormat format);
+VK_LAYER_EXPORT bool FormatIsCompressed_BC(VkFormat format);
+VK_LAYER_EXPORT bool FormatIsCompressed_PVRTC(VkFormat format);
+VK_LAYER_EXPORT bool FormatIsSinglePlane_422(VkFormat format);
+VK_LAYER_EXPORT bool FormatIsNorm(VkFormat format);
+VK_LAYER_EXPORT bool FormatIsUNorm(VkFormat format);
+VK_LAYER_EXPORT bool FormatIsSNorm(VkFormat format);
+VK_LAYER_EXPORT bool FormatIsInt(VkFormat format);
+VK_LAYER_EXPORT bool FormatIsSInt(VkFormat format);
+VK_LAYER_EXPORT bool FormatIsUInt(VkFormat format);
+VK_LAYER_EXPORT bool FormatIsFloat(VkFormat format);
+VK_LAYER_EXPORT bool FormatIsSRGB(VkFormat format);
+VK_LAYER_EXPORT bool FormatIsUScaled(VkFormat format);
+VK_LAYER_EXPORT bool FormatIsSScaled(VkFormat format);
+VK_LAYER_EXPORT bool FormatIsCompressed(VkFormat format);
+VK_LAYER_EXPORT bool FormatIsPacked(VkFormat format);
+VK_LAYER_EXPORT bool FormatElementIsTexel(VkFormat format);
+VK_LAYER_EXPORT bool FormatSizesAreEqual(VkFormat srcFormat, VkFormat dstFormat, uint32_t region_count, const VkImageCopy *regions);
+VK_LAYER_EXPORT bool FormatRequiresYcbcrConversion(VkFormat format);
+
+VK_LAYER_EXPORT uint32_t FormatDepthSize(VkFormat format);
+VK_LAYER_EXPORT VkFormatNumericalType FormatDepthNumericalType(VkFormat format);
+VK_LAYER_EXPORT uint32_t FormatStencilSize(VkFormat format);
+VK_LAYER_EXPORT VkFormatNumericalType FormatStencilNumericalType(VkFormat format);
+VK_LAYER_EXPORT uint32_t FormatPlaneCount(VkFormat format);
+VK_LAYER_EXPORT uint32_t FormatChannelCount(VkFormat format);
+VK_LAYER_EXPORT VkExtent3D FormatTexelBlockExtent(VkFormat format);
+VK_LAYER_EXPORT uint32_t FormatElementSize(VkFormat format, VkImageAspectFlags aspectMask = VK_IMAGE_ASPECT_COLOR_BIT);
+VK_LAYER_EXPORT double FormatTexelSize(VkFormat format);
+VK_LAYER_EXPORT VkFormatCompatibilityClass FormatCompatibilityClass(VkFormat format);
+VK_LAYER_EXPORT VkDeviceSize SafeModulo(VkDeviceSize dividend, VkDeviceSize divisor);
+VK_LAYER_EXPORT VkDeviceSize SafeDivision(VkDeviceSize dividend, VkDeviceSize divisor);
+VK_LAYER_EXPORT uint32_t GetPlaneIndex(VkImageAspectFlags aspect);
+VK_LAYER_EXPORT VkFormat FindMultiplaneCompatibleFormat(VkFormat fmt, VkImageAspectFlags plane_aspect);
+VK_LAYER_EXPORT VkExtent2D FindMultiplaneExtentDivisors(VkFormat mp_fmt, VkImageAspectFlags plane_aspect);
+
+static inline bool FormatIsUndef(VkFormat format) { return (format == VK_FORMAT_UNDEFINED); }
+static inline bool FormatHasDepth(VkFormat format) { return (FormatIsDepthOnly(format) || FormatIsDepthAndStencil(format)); }
+static inline bool FormatHasStencil(VkFormat format) { return (FormatIsStencilOnly(format) || FormatIsDepthAndStencil(format)); }
+static inline bool FormatIsMultiplane(VkFormat format) { return ((FormatPlaneCount(format)) > 1u); }
+static inline bool FormatIsColor(VkFormat format) {
+    return !(FormatIsUndef(format) || FormatIsDepthOrStencil(format) || FormatIsMultiplane(format));
+}
+
+#ifdef __cplusplus
+}
+#endif
diff --git a/src/third_party/vulkan-validation-layers/src/layers/vk_layer_config.cpp b/src/third_party/vulkan-validation-layers/src/layers/vk_layer_config.cpp
new file mode 100644
index 0000000..21a06ba
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/layers/vk_layer_config.cpp
@@ -0,0 +1,336 @@
+/**************************************************************************
+ *
+ * Copyright 2014-2019 Valve Software
+ * Copyright 2015-2019 Google Inc.
+ * Copyright 2019 LunarG, Inc.
+ * All Rights Reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Jon Ashburn <jon@lunarg.com>
+ * Author: Courtney Goeltzenleuchter <courtney@LunarG.com>
+ * Author: Tobin Ehlis <tobin@lunarg.com>
+ * Author: Mark Lobodzinski <mark@lunarg.com>
+ **************************************************************************/
+#include "vk_layer_config.h"
+
+#include <string.h>
+#include <fstream>
+#include <iostream>
+#include <map>
+#include <sstream>
+#include <string>
+#include <sys/stat.h>
+
+#include <vulkan/vk_layer.h>
+#include <vulkan/vk_sdk_platform.h>
+#include "vk_layer_utils.h"
+
+#if defined(_WIN32)
+#include <windows.h>
+#endif
+
+using std::string;
+
+class ConfigFile {
+  public:
+    ConfigFile();
+    ~ConfigFile(){};
+
+    const char *GetOption(const string &option);
+    void SetOption(const string &option, const string &value);
+    string vk_layer_disables_env_var;
+
+  private:
+    bool file_is_parsed_;
+    std::map<string, string> value_map_;
+
+    string FindSettings();
+    void ParseFile(const char *filename);
+};
+
+static ConfigFile layer_config;
+
+string GetEnvironment(const char *variable) {
+#if !defined(__ANDROID__) && !defined(_WIN32)
+    const char *output = getenv(variable);
+    return output == NULL ? "" : output;
+#elif defined(_WIN32)
+    int size = GetEnvironmentVariable(variable, NULL, 0);
+    if (size == 0) {
+        return "";
+    }
+    char *buffer = new char[size];
+    GetEnvironmentVariable(variable, buffer, size);
+    string output = buffer;
+    delete[] buffer;
+    return output;
+#else
+    return "";
+#endif
+}
+
+VK_LAYER_EXPORT const char *getLayerOption(const char *option) { return layer_config.GetOption(option); }
+VK_LAYER_EXPORT const char *GetLayerEnvVar(const char *option) {
+    layer_config.vk_layer_disables_env_var = GetEnvironment(option);
+    return layer_config.vk_layer_disables_env_var.c_str();
+}
+
+// If option is NULL or stdout, return stdout, otherwise try to open option
+// as a filename. If successful, return file handle, otherwise stdout
+VK_LAYER_EXPORT FILE *getLayerLogOutput(const char *option, const char *layer_name) {
+    FILE *log_output = NULL;
+    if (!option || !strcmp("stdout", option))
+        log_output = stdout;
+    else {
+        log_output = fopen(option, "w");
+        if (log_output == NULL) {
+            if (option)
+                std::cout << std::endl
+                          << layer_name << " ERROR: Bad output filename specified: " << option << ". Writing to STDOUT instead"
+                          << std::endl
+                          << std::endl;
+            log_output = stdout;
+        }
+    }
+    return log_output;
+}
+
+// Map option strings to flag enum values
+VK_LAYER_EXPORT VkFlags GetLayerOptionFlags(string option, std::unordered_map<string, VkFlags> const &enum_data,
+                                            uint32_t option_default) {
+    VkDebugReportFlagsEXT flags = option_default;
+    string option_list = layer_config.GetOption(option.c_str());
+
+    while (option_list.length() != 0) {
+        // Find length of option string
+        std::size_t option_length = option_list.find(",");
+        if (option_length == option_list.npos) {
+            option_length = option_list.size();
+        }
+
+        // Get first option item in list
+        const string option_item = option_list.substr(0, option_length);
+
+        auto enum_value = enum_data.find(option_item);
+        if (enum_value != enum_data.end()) {
+            flags |= enum_value->second;
+        }
+
+        // Remove first option from option_list
+        option_list.erase(0, option_length);
+        // Remove possible comma separator
+        std::size_t char_position = option_list.find(",");
+        if (char_position == 0) {
+            option_list.erase(char_position, 1);
+        }
+        // Remove possible space
+        char_position = option_list.find(" ");
+        if (char_position == 0) {
+            option_list.erase(char_position, 1);
+        }
+    }
+    return flags;
+}
+
+VK_LAYER_EXPORT void setLayerOption(const char *option, const char *value) { layer_config.SetOption(option, value); }
+
+// Constructor for ConfigFile. Initialize layers to log error messages to stdout by default. If a vk_layer_settings file is present,
+// its settings will override the defaults.
+ConfigFile::ConfigFile() : file_is_parsed_(false) {
+    value_map_["khronos_validation.report_flags"] = "error";
+
+#ifdef WIN32
+    // For Windows, enable message logging AND OutputDebugString
+    value_map_["khronos_validation.debug_action"] =
+        "VK_DBG_LAYER_ACTION_DEFAULT,VK_DBG_LAYER_ACTION_LOG_MSG,VK_DBG_LAYER_ACTION_DEBUG_OUTPUT";
+#else   // WIN32
+    value_map_["khronos_validation.debug_action"] = "VK_DBG_LAYER_ACTION_DEFAULT,VK_DBG_LAYER_ACTION_LOG_MSG";
+#endif  // WIN32
+    value_map_["khronos_validation.log_filename"] = "stdout";
+}
+
+const char *ConfigFile::GetOption(const string &option) {
+    std::map<string, string>::const_iterator it;
+    if (!file_is_parsed_) {
+        string settings_file = FindSettings();
+        ParseFile(settings_file.c_str());
+    }
+
+    if ((it = value_map_.find(option)) == value_map_.end())
+        return "";
+    else
+        return it->second.c_str();
+}
+
+void ConfigFile::SetOption(const string &option, const string &val) {
+    if (!file_is_parsed_) {
+        string settings_file = FindSettings();
+        ParseFile(settings_file.c_str());
+    }
+
+    value_map_[option] = val;
+}
+
+string ConfigFile::FindSettings() {
+    struct stat info;
+
+#if defined(WIN32)
+    HKEY hive;
+    LSTATUS err = RegOpenKeyEx(HKEY_CURRENT_USER, "Software\\Khronos\\Vulkan\\Settings", 0, KEY_READ, &hive);
+    if (err == ERROR_SUCCESS) {
+        char name[2048];
+        DWORD i = 0, name_size = sizeof(name), type, value, value_size = sizeof(value);
+        while (ERROR_SUCCESS ==
+               RegEnumValue(hive, i++, name, &name_size, nullptr, &type, reinterpret_cast<LPBYTE>(&value), &value_size)) {
+            // Check if the registry entry is a dword with a value of zero
+            if (type != REG_DWORD || value != 0) {
+                continue;
+            }
+
+            // Check if this actually points to a file
+            if ((stat(name, &info) != 0) || !(info.st_mode & S_IFREG)) {
+                continue;
+            }
+
+            // Use this file
+            RegCloseKey(hive);
+            return name;
+        }
+
+        RegCloseKey(hive);
+    }
+#else
+    string search_path = GetEnvironment("XDG_DATA_HOME");
+    if (search_path == "") {
+        search_path = GetEnvironment("HOME");
+        if (search_path != "") {
+            search_path += "/.local/share";
+        }
+    }
+
+    // Use the vk_layer_settings.txt file from here, if it is present
+    if (search_path != "") {
+        string home_file = search_path + "/vulkan/settings.d/vk_layer_settings.txt";
+        if (stat(home_file.c_str(), &info) == 0) {
+            if (info.st_mode & S_IFREG) {
+                return home_file;
+            }
+        }
+    }
+
+#endif
+
+    string env_path = GetEnvironment("VK_LAYER_SETTINGS_PATH");
+
+    // If the path exists use it, else use vk_layer_settings
+    if (stat(env_path.c_str(), &info) == 0) {
+        // If this is a directory, look for vk_layer_settings within the directory
+        if (info.st_mode & S_IFDIR) {
+            return env_path + "/vk_layer_settings.txt";
+        }
+        return env_path;
+    }
+    return "vk_layer_settings.txt";
+}
+
+void ConfigFile::ParseFile(const char *filename) {
+    file_is_parsed_ = true;
+
+    // extract option = value pairs from a file
+    std::ifstream file(filename);
+    for (string line; std::getline(file, line);) {
+        // discard comments, which start with '#'
+        const auto comments_pos = line.find_first_of('#');
+        if (comments_pos != string::npos) line.erase(comments_pos);
+
+        const auto value_pos = line.find_first_of('=');
+        if (value_pos != string::npos) {
+            const string option = string_trim(line.substr(0, value_pos));
+            const string value = string_trim(line.substr(value_pos + 1));
+            value_map_[option] = value;
+        }
+    }
+}
+
+VK_LAYER_EXPORT void PrintMessageFlags(VkFlags vk_flags, char *msg_flags) {
+    bool separator = false;
+
+    msg_flags[0] = 0;
+    if (vk_flags & VK_DEBUG_REPORT_DEBUG_BIT_EXT) {
+        strcat(msg_flags, "DEBUG");
+        separator = true;
+    }
+    if (vk_flags & VK_DEBUG_REPORT_INFORMATION_BIT_EXT) {
+        if (separator) strcat(msg_flags, ",");
+        strcat(msg_flags, "INFO");
+        separator = true;
+    }
+    if (vk_flags & VK_DEBUG_REPORT_WARNING_BIT_EXT) {
+        if (separator) strcat(msg_flags, ",");
+        strcat(msg_flags, "WARN");
+        separator = true;
+    }
+    if (vk_flags & VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT) {
+        if (separator) strcat(msg_flags, ",");
+        strcat(msg_flags, "PERF");
+        separator = true;
+    }
+    if (vk_flags & VK_DEBUG_REPORT_ERROR_BIT_EXT) {
+        if (separator) strcat(msg_flags, ",");
+        strcat(msg_flags, "ERROR");
+    }
+}
+
+VK_LAYER_EXPORT void PrintMessageSeverity(VkFlags vk_flags, char *msg_flags) {
+    bool separator = false;
+
+    msg_flags[0] = 0;
+    if (vk_flags & VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT) {
+        strcat(msg_flags, "VERBOSE");
+        separator = true;
+    }
+    if (vk_flags & VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT) {
+        if (separator) strcat(msg_flags, ",");
+        strcat(msg_flags, "INFO");
+        separator = true;
+    }
+    if (vk_flags & VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT) {
+        if (separator) strcat(msg_flags, ",");
+        strcat(msg_flags, "WARN");
+        separator = true;
+    }
+    if (vk_flags & VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT) {
+        if (separator) strcat(msg_flags, ",");
+        strcat(msg_flags, "ERROR");
+    }
+}
+
+VK_LAYER_EXPORT void PrintMessageType(VkFlags vk_flags, char *msg_flags) {
+    bool separator = false;
+
+    msg_flags[0] = 0;
+    if (vk_flags & VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT) {
+        strcat(msg_flags, "GEN");
+        separator = true;
+    }
+    if (vk_flags & VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT) {
+        if (separator) strcat(msg_flags, ",");
+        strcat(msg_flags, "SPEC");
+        separator = true;
+    }
+    if (vk_flags & VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT) {
+        if (separator) strcat(msg_flags, ",");
+        strcat(msg_flags, "PERF");
+    }
+}
diff --git a/src/third_party/vulkan-validation-layers/src/layers/vk_layer_config.h b/src/third_party/vulkan-validation-layers/src/layers/vk_layer_config.h
new file mode 100644
index 0000000..535719c
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/layers/vk_layer_config.h
@@ -0,0 +1,75 @@
+/* Copyright (c) 2015-2019 The Khronos Group Inc.
+ * Copyright (c) 2015-2019 Valve Corporation
+ * Copyright (c) 2015-2019 LunarG, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Jon Ashburn <jon@lunarg.com>
+ * Author: Mark Lobodzinski <mark@lunarg.com>
+ **************************************************************************/
+#pragma once
+
+#include <stdio.h>
+#include <string>
+#include <unordered_map>
+
+#include "vulkan/vk_layer.h"
+#include "vulkan/vulkan.h"
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+// Definitions for Debug Actions
+typedef enum VkLayerDbgActionBits {
+    VK_DBG_LAYER_ACTION_IGNORE = 0x00000000,
+    VK_DBG_LAYER_ACTION_CALLBACK = 0x00000001,
+    VK_DBG_LAYER_ACTION_LOG_MSG = 0x00000002,
+    VK_DBG_LAYER_ACTION_BREAK = 0x00000004,
+    VK_DBG_LAYER_ACTION_DEBUG_OUTPUT = 0x00000008,
+    VK_DBG_LAYER_ACTION_DEFAULT = 0x40000000,
+} VkLayerDbgActionBits;
+typedef VkFlags VkLayerDbgActionFlags;
+
+const std::unordered_map<std::string, VkFlags> debug_actions_option_definitions = {
+    {std::string("VK_DBG_LAYER_ACTION_IGNORE"), VK_DBG_LAYER_ACTION_IGNORE},
+    {std::string("VK_DBG_LAYER_ACTION_CALLBACK"), VK_DBG_LAYER_ACTION_CALLBACK},
+    {std::string("VK_DBG_LAYER_ACTION_LOG_MSG"), VK_DBG_LAYER_ACTION_LOG_MSG},
+    {std::string("VK_DBG_LAYER_ACTION_BREAK"), VK_DBG_LAYER_ACTION_BREAK},
+#if defined(WIN32)
+    {std::string("VK_DBG_LAYER_ACTION_DEBUG_OUTPUT"), VK_DBG_LAYER_ACTION_DEBUG_OUTPUT},
+#endif
+    {std::string("VK_DBG_LAYER_ACTION_DEFAULT"), VK_DBG_LAYER_ACTION_DEFAULT}};
+
+const std::unordered_map<std::string, VkFlags> report_flags_option_definitions = {
+    {std::string("warn"), VK_DEBUG_REPORT_WARNING_BIT_EXT},
+    {std::string("info"), VK_DEBUG_REPORT_INFORMATION_BIT_EXT},
+    {std::string("perf"), VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT},
+    {std::string("error"), VK_DEBUG_REPORT_ERROR_BIT_EXT},
+    {std::string("debug"), VK_DEBUG_REPORT_DEBUG_BIT_EXT}};
+
+VK_LAYER_EXPORT const char *getLayerOption(const char *option);
+VK_LAYER_EXPORT const char *GetLayerEnvVar(const char *option);
+
+VK_LAYER_EXPORT FILE *getLayerLogOutput(const char *option, const char *layer_name);
+VK_LAYER_EXPORT VkFlags GetLayerOptionFlags(std::string option, std::unordered_map<std::string, VkFlags> const &enum_data,
+                                            uint32_t option_default);
+
+VK_LAYER_EXPORT void setLayerOption(const char *option, const char *val);
+VK_LAYER_EXPORT void PrintMessageFlags(VkFlags vk_flags, char *msg_flags);
+VK_LAYER_EXPORT void PrintMessageSeverity(VkFlags vk_flags, char *msg_flags);
+VK_LAYER_EXPORT void PrintMessageType(VkFlags vk_flags, char *msg_flags);
+
+#ifdef __cplusplus
+}
+#endif
diff --git a/src/third_party/vulkan-validation-layers/src/layers/vk_layer_data.h b/src/third_party/vulkan-validation-layers/src/layers/vk_layer_data.h
new file mode 100644
index 0000000..c822f47
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/layers/vk_layer_data.h
@@ -0,0 +1,391 @@
+/* Copyright (c) 2015-2017, 2019 The Khronos Group Inc.
+ * Copyright (c) 2015-2017, 2019 Valve Corporation
+ * Copyright (c) 2015-2017, 2019 LunarG, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Tobin Ehlis <tobine@google.com>
+ */
+
+#ifndef LAYER_DATA_H
+#define LAYER_DATA_H
+
+#include <cassert>
+#include <unordered_map>
+#include <unordered_set>
+
+// This is a wrapper around unordered_map that optimizes for the common case
+// of only containing a small number of elements. The first N elements are stored
+// inline in the object and don't require hashing or memory (de)allocation.
+
+template <typename Key, typename value_type, typename inner_container_type, typename value_type_helper, int N>
+class small_container {
+  protected:
+    bool small_data_allocated[N];
+    value_type small_data[N];
+
+    inner_container_type inner_cont;
+
+  public:
+    small_container() {
+        for (int i = 0; i < N; ++i) {
+            small_data_allocated[i] = false;
+        }
+    }
+
+    class iterator {
+        typedef typename inner_container_type::iterator inner_iterator;
+        friend class small_container<Key, value_type, inner_container_type, value_type_helper, N>;
+
+        small_container<Key, value_type, inner_container_type, value_type_helper, N> *parent;
+        int index;
+        inner_iterator it;
+
+      public:
+        iterator() {}
+
+        iterator operator++() {
+            if (index < N) {
+                index++;
+                while (index < N && !parent->small_data_allocated[index]) {
+                    index++;
+                }
+                if (index < N) {
+                    return *this;
+                }
+                it = parent->inner_cont.begin();
+                return *this;
+            }
+            ++it;
+            return *this;
+        }
+
+        bool operator==(const iterator &other) const {
+            if ((index < N) != (other.index < N)) {
+                return false;
+            }
+            if (index < N) {
+                return (index == other.index);
+            }
+            return it == other.it;
+        }
+
+        bool operator!=(const iterator &other) const { return !(*this == other); }
+
+        value_type &operator*() const {
+            if (index < N) {
+                return parent->small_data[index];
+            }
+            return *it;
+        }
+        value_type *operator->() const {
+            if (index < N) {
+                return &parent->small_data[index];
+            }
+            return &*it;
+        }
+    };
+
+    class const_iterator {
+        typedef typename inner_container_type::const_iterator inner_iterator;
+        friend class small_container<Key, value_type, inner_container_type, value_type_helper, N>;
+
+        const small_container<Key, value_type, inner_container_type, value_type_helper, N> *parent;
+        int index;
+        inner_iterator it;
+
+      public:
+        const_iterator() {}
+
+        const_iterator operator++() {
+            if (index < N) {
+                index++;
+                while (index < N && !parent->small_data_allocated[index]) {
+                    index++;
+                }
+                if (index < N) {
+                    return *this;
+                }
+                it = parent->inner_cont.begin();
+                return *this;
+            }
+            ++it;
+            return *this;
+        }
+
+        bool operator==(const const_iterator &other) const {
+            if ((index < N) != (other.index < N)) {
+                return false;
+            }
+            if (index < N) {
+                return (index == other.index);
+            }
+            return it == other.it;
+        }
+
+        bool operator!=(const const_iterator &other) const { return !(*this == other); }
+
+        const value_type &operator*() const {
+            if (index < N) {
+                return parent->small_data[index];
+            }
+            return *it;
+        }
+        const value_type *operator->() const {
+            if (index < N) {
+                return &parent->small_data[index];
+            }
+            return &*it;
+        }
+    };
+
+    iterator begin() {
+        iterator it;
+        it.parent = this;
+        // If index 0 is allocated, return it, otherwise use operator++ to find the first
+        // allocated element.
+        it.index = 0;
+        if (small_data_allocated[0]) {
+            return it;
+        }
+        ++it;
+        return it;
+    }
+
+    iterator end() {
+        iterator it;
+        it.parent = this;
+        it.index = N;
+        it.it = inner_cont.end();
+        return it;
+    }
+
+    const_iterator begin() const {
+        const_iterator it;
+        it.parent = this;
+        // If index 0 is allocated, return it, otherwise use operator++ to find the first
+        // allocated element.
+        it.index = 0;
+        if (small_data_allocated[0]) {
+            return it;
+        }
+        ++it;
+        return it;
+    }
+
+    const_iterator end() const {
+        const_iterator it;
+        it.parent = this;
+        it.index = N;
+        it.it = inner_cont.end();
+        return it;
+    }
+
+    bool contains(const Key &key) const {
+        for (int i = 0; i < N; ++i) {
+            if (value_type_helper().compare_equal(small_data[i], key) && small_data_allocated[i]) {
+                return true;
+            }
+        }
+        // check size() first to avoid hashing key unnecessarily.
+        if (inner_cont.size() == 0) {
+            return false;
+        }
+        return inner_cont.find(key) != inner_cont.end();
+    }
+
+    typename inner_container_type::size_type count(const Key &key) const { return contains(key) ? 1 : 0; }
+
+    std::pair<iterator, bool> insert(const value_type &value) {
+        for (int i = 0; i < N; ++i) {
+            if (value_type_helper().compare_equal(small_data[i], value) && small_data_allocated[i]) {
+                iterator it;
+                it.parent = this;
+                it.index = i;
+                return std::make_pair(it, false);
+            }
+        }
+        // check size() first to avoid hashing key unnecessarily.
+        auto iter = inner_cont.size() > 0 ? inner_cont.find(value_type_helper().get_key(value)) : inner_cont.end();
+        if (iter != inner_cont.end()) {
+            iterator it;
+            it.parent = this;
+            it.index = N;
+            it.it = iter;
+            return std::make_pair(it, false);
+        } else {
+            for (int i = 0; i < N; ++i) {
+                if (!small_data_allocated[i]) {
+                    small_data_allocated[i] = true;
+                    value_type_helper().assign(small_data[i], value);
+                    iterator it;
+                    it.parent = this;
+                    it.index = i;
+                    return std::make_pair(it, true);
+                }
+            }
+            iter = inner_cont.insert(value).first;
+            iterator it;
+            it.parent = this;
+            it.index = N;
+            it.it = iter;
+            return std::make_pair(it, true);
+        }
+    }
+
+    typename inner_container_type::size_type erase(const Key &key) {
+        for (int i = 0; i < N; ++i) {
+            if (value_type_helper().compare_equal(small_data[i], key) && small_data_allocated[i]) {
+                small_data_allocated[i] = false;
+                return 1;
+            }
+        }
+        return inner_cont.erase(key);
+    }
+
+    typename inner_container_type::size_type size() const {
+        auto size = inner_cont.size();
+        for (int i = 0; i < N; ++i) {
+            if (small_data_allocated[i]) {
+                size++;
+            }
+        }
+        return size;
+    }
+
+    bool empty() const {
+        for (int i = 0; i < N; ++i) {
+            if (small_data_allocated[i]) {
+                return false;
+            }
+        }
+        return inner_cont.size() == 0;
+    }
+
+    void clear() {
+        for (int i = 0; i < N; ++i) {
+            small_data_allocated[i] = false;
+        }
+        inner_cont.clear();
+    }
+};
+
+// Helper function objects to compare/assign/get keys in small_unordered_set/map.
+// This helps to abstract away whether value_type is a Key or a pair<Key, T>.
+template <typename Key, typename T>
+class value_type_helper_map {
+  public:
+    bool compare_equal(const std::pair<const Key, T> &lhs, const Key &rhs) const { return lhs.first == rhs; }
+    bool compare_equal(const std::pair<const Key, T> &lhs, const std::pair<const Key, T> &rhs) const {
+        return lhs.first == rhs.first;
+    }
+
+    void assign(std::pair<const Key, T> &lhs, const std::pair<Key, T> &rhs) const {
+        // While the const_cast may be unsatisfactory, we are using small_data as
+        // stand-in for placement new and a small-block allocator, so the const_cast
+        // is minimal, contained, valid, and allows operators * and -> to avoid copies
+        const_cast<Key &>(lhs.first) = rhs.first;
+        lhs.second = rhs.second;
+    }
+
+    Key get_key(const std::pair<const Key, T> &value) const { return value.first; }
+};
+
+template <typename Key>
+class value_type_helper_set {
+  public:
+    bool compare_equal(const Key &lhs, const Key &rhs) const { return lhs == rhs; }
+
+    void assign(Key &lhs, const Key &rhs) const { lhs = rhs; }
+
+    Key get_key(const Key &value) const { return value; }
+};
+
+template <typename Key, typename T, int N = 1>
+class small_unordered_map
+    : public small_container<Key, std::pair<const Key, T>, std::unordered_map<Key, T>, value_type_helper_map<Key, T>, N> {
+  public:
+    T &operator[](const Key &key) {
+        for (int i = 0; i < N; ++i) {
+            if (value_type_helper_map<Key, T>().compare_equal(this->small_data[i], key) && this->small_data_allocated[i]) {
+                return this->small_data[i].second;
+            }
+        }
+        auto iter = this->inner_cont.find(key);
+        if (iter != this->inner_cont.end()) {
+            return iter->second;
+        } else {
+            for (int i = 0; i < N; ++i) {
+                if (!this->small_data_allocated[i]) {
+                    this->small_data_allocated[i] = true;
+                    value_type_helper_map<Key, T>().assign(this->small_data[i], std::make_pair(key, T()));
+
+                    return this->small_data[i].second;
+                }
+            }
+            return this->inner_cont[key];
+        }
+    }
+};
+
+template <typename Key, int N = 1>
+class small_unordered_set : public small_container<Key, Key, std::unordered_set<Key>, value_type_helper_set<Key>, N> {};
+
+// For the given data key, look up the layer_data instance from given layer_data_map
+template <typename DATA_T>
+DATA_T *GetLayerDataPtr(void *data_key, small_unordered_map<void *, DATA_T *, 2> &layer_data_map) {
+    /* TODO: We probably should lock here, or have caller lock */
+    DATA_T *&got = layer_data_map[data_key];
+
+    if (got == nullptr) {
+        got = new DATA_T;
+    }
+
+    return got;
+}
+
+template <typename DATA_T>
+void FreeLayerDataPtr(void *data_key, small_unordered_map<void *, DATA_T *, 2> &layer_data_map) {
+    delete layer_data_map[data_key];
+    layer_data_map.erase(data_key);
+}
+
+// For the given data key, look up the layer_data instance from given layer_data_map
+template <typename DATA_T>
+DATA_T *GetLayerDataPtr(void *data_key, std::unordered_map<void *, DATA_T *> &layer_data_map) {
+    DATA_T *debug_data;
+    typename std::unordered_map<void *, DATA_T *>::const_iterator got;
+
+    /* TODO: We probably should lock here, or have caller lock */
+    got = layer_data_map.find(data_key);
+
+    if (got == layer_data_map.end()) {
+        debug_data = new DATA_T;
+        layer_data_map[(void *)data_key] = debug_data;
+    } else {
+        debug_data = got->second;
+    }
+
+    return debug_data;
+}
+
+template <typename DATA_T>
+void FreeLayerDataPtr(void *data_key, std::unordered_map<void *, DATA_T *> &layer_data_map) {
+    auto got = layer_data_map.find(data_key);
+    assert(got != layer_data_map.end());
+
+    delete got->second;
+    layer_data_map.erase(got);
+}
+
+#endif  // LAYER_DATA_H
diff --git a/src/third_party/vulkan-validation-layers/src/layers/vk_layer_extension_utils.cpp b/src/third_party/vulkan-validation-layers/src/layers/vk_layer_extension_utils.cpp
new file mode 100644
index 0000000..4c1e396
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/layers/vk_layer_extension_utils.cpp
@@ -0,0 +1,66 @@
+/* Copyright (c) 2015-2016 The Khronos Group Inc.
+ * Copyright (c) 2015-2016 Valve Corporation
+ * Copyright (c) 2015-2016 LunarG, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Courtney Goeltzenleuchter <courtney@LunarG.com>
+ *
+ */
+
+#include "string.h"
+#include "vk_layer_extension_utils.h"
+
+#define ARRAY_SIZE(a) (sizeof(a) / sizeof(a[0]))
+
+/*
+ * This file contains utility functions for layers
+ */
+
+VK_LAYER_EXPORT VkResult util_GetExtensionProperties(const uint32_t count, const VkExtensionProperties *layer_extensions,
+                                                     uint32_t *pCount, VkExtensionProperties *pProperties) {
+    uint32_t copy_size;
+
+    if (pProperties == NULL || layer_extensions == NULL) {
+        *pCount = count;
+        return VK_SUCCESS;
+    }
+
+    copy_size = *pCount < count ? *pCount : count;
+    memcpy(pProperties, layer_extensions, copy_size * sizeof(VkExtensionProperties));
+    *pCount = copy_size;
+    if (copy_size < count) {
+        return VK_INCOMPLETE;
+    }
+
+    return VK_SUCCESS;
+}
+
+VK_LAYER_EXPORT VkResult util_GetLayerProperties(const uint32_t count, const VkLayerProperties *layer_properties, uint32_t *pCount,
+                                                 VkLayerProperties *pProperties) {
+    uint32_t copy_size;
+
+    if (pProperties == NULL || layer_properties == NULL) {
+        *pCount = count;
+        return VK_SUCCESS;
+    }
+
+    copy_size = *pCount < count ? *pCount : count;
+    memcpy(pProperties, layer_properties, copy_size * sizeof(VkLayerProperties));
+    *pCount = copy_size;
+    if (copy_size < count) {
+        return VK_INCOMPLETE;
+    }
+
+    return VK_SUCCESS;
+}
diff --git a/src/third_party/vulkan-validation-layers/src/layers/vk_layer_extension_utils.h b/src/third_party/vulkan-validation-layers/src/layers/vk_layer_extension_utils.h
new file mode 100644
index 0000000..4a51c16
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/layers/vk_layer_extension_utils.h
@@ -0,0 +1,40 @@
+/* Copyright (c) 2015-2016 The Khronos Group Inc.
+ * Copyright (c) 2015-2016 Valve Corporation
+ * Copyright (c) 2015-2016 LunarG, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Courtney Goeltzenleuchter <courtney@LunarG.com>
+ *
+ */
+
+#include "vulkan/vk_layer.h"
+
+#ifndef LAYER_EXTENSION_UTILS_H
+#define LAYER_EXTENSION_UTILS_H
+
+#define ARRAY_SIZE(a) (sizeof(a) / sizeof(a[0]))
+
+/*
+ * This file contains static functions for the generated layers
+ */
+extern "C" {
+
+VK_LAYER_EXPORT VkResult util_GetExtensionProperties(const uint32_t count, const VkExtensionProperties *layer_extensions,
+                                                     uint32_t *pCount, VkExtensionProperties *pProperties);
+
+VK_LAYER_EXPORT VkResult util_GetLayerProperties(const uint32_t count, const VkLayerProperties *layer_properties, uint32_t *pCount,
+                                                 VkLayerProperties *pProperties);
+
+}  // extern "C"
+#endif  // LAYER_EXTENSION_UTILS_H
diff --git a/src/third_party/vulkan-validation-layers/src/layers/vk_layer_logging.h b/src/third_party/vulkan-validation-layers/src/layers/vk_layer_logging.h
new file mode 100644
index 0000000..08f7b8d
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/layers/vk_layer_logging.h
@@ -0,0 +1,884 @@
+/* Copyright (c) 2015-2019 The Khronos Group Inc.
+ * Copyright (c) 2015-2019 Valve Corporation
+ * Copyright (c) 2015-2019 LunarG, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Courtney Goeltzenleuchter <courtney@LunarG.com>
+ * Author: Tobin Ehlis <tobin@lunarg.com>
+ * Author: Mark Young <marky@lunarg.com>
+ * Author: Dave Houlton <daveh@lunarg.com>
+ *
+ */
+
+#ifndef LAYER_LOGGING_H
+#define LAYER_LOGGING_H
+
+#include <cinttypes>
+#include <signal.h>
+#include <stdarg.h>
+#include <stdbool.h>
+#include <stdio.h>
+
+#include <algorithm>
+#include <array>
+#include <memory>
+#include <mutex>
+#include <sstream>
+#include <string>
+#include <vector>
+#include <unordered_map>
+#include <utility>
+
+#include "vk_typemap_helper.h"
+#include "vk_layer_config.h"
+#include "vk_layer_data.h"
+#include "vk_loader_platform.h"
+#include "vulkan/vk_layer.h"
+#include "vk_object_types.h"
+#include "cast_utils.h"
+#include "vk_validation_error_messages.h"
+#include "vk_layer_dispatch_table.h"
+#include "vk_safe_struct.h"
+
+// Suppress unused warning on Linux
+#if defined(__GNUC__)
+#define DECORATE_UNUSED __attribute__((unused))
+#else
+#define DECORATE_UNUSED
+#endif
+
+#if defined __ANDROID__
+#include <android/log.h>
+#define LOGCONSOLE(...) ((void)__android_log_print(ANDROID_LOG_INFO, "VALIDATION", __VA_ARGS__))
+#else
+#define LOGCONSOLE(...)      \
+    {                        \
+        printf(__VA_ARGS__); \
+        printf("\n");        \
+    }
+#endif
+
+static const char DECORATE_UNUSED *kVUIDUndefined = "VUID_Undefined";
+
+#undef DECORATE_UNUSED
+
+typedef enum DebugCallbackStatusBits {
+    DEBUG_CALLBACK_UTILS = 0x00000001,     // This struct describes a VK_EXT_debug_utils callback
+    DEBUG_CALLBACK_DEFAULT = 0x00000002,   // An internally created callback, used if no user-defined callbacks are registered
+    DEBUG_CALLBACK_INSTANCE = 0x00000004,  // An internally created temporary instance callback
+} DebugCallbackStatusBits;
+typedef VkFlags DebugCallbackStatusFlags;
+
+typedef struct {
+    DebugCallbackStatusFlags callback_status;
+
+    // Debug report related information
+    VkDebugReportCallbackEXT debug_report_callback_object;
+    PFN_vkDebugReportCallbackEXT debug_report_callback_function_ptr;
+    VkFlags debug_report_msg_flags;
+
+    // Debug utils related information
+    VkDebugUtilsMessengerEXT debug_utils_callback_object;
+    VkDebugUtilsMessageSeverityFlagsEXT debug_utils_msg_flags;
+    VkDebugUtilsMessageTypeFlagsEXT debug_utils_msg_type;
+    PFN_vkDebugUtilsMessengerCallbackEXT debug_utils_callback_function_ptr;
+
+    void *pUserData;
+
+    bool IsUtils() { return ((callback_status & DEBUG_CALLBACK_UTILS) != 0); }
+    bool IsDefault() { return ((callback_status & DEBUG_CALLBACK_DEFAULT) != 0); }
+    bool IsInstance() { return ((callback_status & DEBUG_CALLBACK_INSTANCE) != 0); }
+} VkLayerDbgFunctionState;
+
+// TODO: Could be autogenerated for the specific handles for extra type safety...
+template <typename HANDLE_T>
+static inline uint64_t HandleToUint64(HANDLE_T h) {
+    return CastToUint64<HANDLE_T>(h);
+}
+
+static inline uint64_t HandleToUint64(uint64_t h) { return h; }
+
+// Data we store per label for logging
+struct LoggingLabel {
+    std::string name;
+    std::array<float, 4> color;
+
+    void Reset() { *this = LoggingLabel(); }
+    bool Empty() const { return name.empty(); }
+
+    VkDebugUtilsLabelEXT Export() const {
+        auto out = lvl_init_struct<VkDebugUtilsLabelEXT>();
+        out.pLabelName = name.c_str();
+        std::copy(color.cbegin(), color.cend(), out.color);
+        return out;
+    };
+
+    LoggingLabel() : name(), color({{0.f, 0.f, 0.f, 0.f}}) {}
+    LoggingLabel(const VkDebugUtilsLabelEXT *label_info) {
+        if (label_info && label_info->pLabelName) {
+            name = label_info->pLabelName;
+            std::copy_n(std::begin(label_info->color), 4, color.begin());
+        } else {
+            Reset();
+        }
+    }
+
+    LoggingLabel(const LoggingLabel &) = default;
+    LoggingLabel &operator=(const LoggingLabel &) = default;
+    LoggingLabel &operator=(LoggingLabel &&) = default;
+    LoggingLabel(LoggingLabel &&) = default;
+
+    template <typename Name, typename Vec>
+    LoggingLabel(Name &&name_, Vec &&vec_) : name(std::forward<Name>(name_)), color(std::forward<Vec>(vec_)) {}
+};
+
+struct LoggingLabelState {
+    std::vector<LoggingLabel> labels;
+    LoggingLabel insert_label;
+
+    // Export the labels, but in reverse order since we want the most recent at the top.
+    std::vector<VkDebugUtilsLabelEXT> Export() const {
+        size_t count = labels.size() + (insert_label.Empty() ? 0 : 1);
+        std::vector<VkDebugUtilsLabelEXT> out(count);
+
+        if (!count) return out;
+
+        size_t index = count - 1;
+        if (!insert_label.Empty()) {
+            out[index--] = insert_label.Export();
+        }
+        for (const auto &label : labels) {
+            out[index--] = label.Export();
+        }
+        return out;
+    }
+};
+
+static inline int string_sprintf(std::string *output, const char *fmt, ...);
+
+typedef struct _debug_report_data {
+    std::vector<VkLayerDbgFunctionState> debug_callback_list;
+    VkDebugUtilsMessageSeverityFlagsEXT active_severities{0};
+    VkDebugUtilsMessageTypeFlagsEXT active_types{0};
+    bool queueLabelHasInsert{false};
+    bool cmdBufLabelHasInsert{false};
+    std::unordered_map<uint64_t, std::string> debugObjectNameMap;
+    std::unordered_map<uint64_t, std::string> debugUtilsObjectNameMap;
+    std::unordered_map<VkQueue, std::unique_ptr<LoggingLabelState>> debugUtilsQueueLabels;
+    std::unordered_map<VkCommandBuffer, std::unique_ptr<LoggingLabelState>> debugUtilsCmdBufLabels;
+    // This mutex is defined as mutable since the normal usage for a debug report object is as 'const'. The mutable keyword allows
+    // the layers to continue this pattern, but also allows them to use/change this specific member for synchronization purposes.
+    mutable std::mutex debug_report_mutex;
+    const void *instance_pnext_chain{};
+
+    void DebugReportSetUtilsObjectName(const VkDebugUtilsObjectNameInfoEXT *pNameInfo) {
+        std::unique_lock<std::mutex> lock(debug_report_mutex);
+        if (pNameInfo->pObjectName) {
+            debugUtilsObjectNameMap[pNameInfo->objectHandle] = pNameInfo->pObjectName;
+        } else {
+            debugUtilsObjectNameMap.erase(pNameInfo->objectHandle);
+        }
+    }
+
+    void DebugReportSetMarkerObjectName(const VkDebugMarkerObjectNameInfoEXT *pNameInfo) {
+        std::unique_lock<std::mutex> lock(debug_report_mutex);
+        if (pNameInfo->pObjectName) {
+            debugObjectNameMap[pNameInfo->object] = pNameInfo->pObjectName;
+        } else {
+            debugObjectNameMap.erase(pNameInfo->object);
+        }
+    }
+
+    std::string DebugReportGetUtilsObjectName(const uint64_t object) const {
+        std::string label = "";
+        const auto utils_name_iter = debugUtilsObjectNameMap.find(object);
+        if (utils_name_iter != debugUtilsObjectNameMap.end()) {
+            label = utils_name_iter->second;
+        }
+        return label;
+    }
+
+    std::string DebugReportGetMarkerObjectName(const uint64_t object) const {
+        std::string label = "";
+        const auto marker_name_iter = debugObjectNameMap.find(object);
+        if (marker_name_iter != debugObjectNameMap.end()) {
+            label = marker_name_iter->second;
+        }
+        return label;
+    }
+
+    std::string FormatHandle(const char *handle_type_name, uint64_t handle) const {
+        std::string handle_name = DebugReportGetUtilsObjectName(handle);
+        if (handle_name.empty()) {
+            handle_name = DebugReportGetMarkerObjectName(handle);
+        }
+
+        std::string ret;
+        string_sprintf(&ret, "%s 0x%" PRIxLEAST64 "[%s]", handle_type_name, handle, handle_name.c_str());
+        return ret;
+    }
+
+    std::string FormatHandle(const VulkanTypedHandle &handle) const {
+        return FormatHandle(object_string[handle.type], handle.handle);
+    }
+
+    template <typename HANDLE_T>
+    std::string FormatHandle(HANDLE_T handle) const {
+        return FormatHandle(VkHandleInfo<HANDLE_T>::Typename(), HandleToUint64(handle));
+    }
+
+} debug_report_data;
+
+template debug_report_data *GetLayerDataPtr<debug_report_data>(void *data_key,
+                                                               std::unordered_map<void *, debug_report_data *> &data_map);
+
+static inline void DebugReportFlagsToAnnotFlags(VkDebugReportFlagsEXT dr_flags, bool default_flag_is_spec,
+                                                VkDebugUtilsMessageSeverityFlagsEXT *da_severity,
+                                                VkDebugUtilsMessageTypeFlagsEXT *da_type) {
+    *da_severity = 0;
+    *da_type = 0;
+    // If it's explicitly listed as a performance warning, treat it as a performance message. Otherwise, treat it as a validation
+    // issue.
+    if ((dr_flags & VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT) != 0) {
+        *da_type |= VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT;
+        *da_severity |= VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT;
+    }
+    if ((dr_flags & VK_DEBUG_REPORT_DEBUG_BIT_EXT) != 0) {
+        *da_type |= VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT | VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT;
+        *da_severity |= VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT;
+    }
+    if ((dr_flags & VK_DEBUG_REPORT_INFORMATION_BIT_EXT) != 0) {
+        *da_type |= VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT;
+        *da_severity |= VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT;
+    }
+    if ((dr_flags & VK_DEBUG_REPORT_WARNING_BIT_EXT) != 0) {
+        *da_type |= VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT;
+        *da_severity |= VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT;
+    }
+    if ((dr_flags & VK_DEBUG_REPORT_ERROR_BIT_EXT) != 0) {
+        *da_type |= VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT;
+        *da_severity |= VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT;
+    }
+}
+
+// Forward Declarations
+static inline bool debug_log_msg(const debug_report_data *debug_data, VkFlags msg_flags, VkDebugReportObjectTypeEXT object_type,
+                                 uint64_t src_object, size_t location, const char *layer_prefix, const char *message,
+                                 const char *text_vuid);
+
+static void SetDebugUtilsSeverityFlags(std::vector<VkLayerDbgFunctionState> &callbacks, debug_report_data *debug_data) {
+    // For all callback in list, return their complete set of severities and modes
+    for (auto item : callbacks) {
+        if (item.IsUtils()) {
+            debug_data->active_severities |= item.debug_utils_msg_flags;
+            debug_data->active_types |= item.debug_utils_msg_type;
+        } else {
+            VkFlags severities = 0;
+            VkFlags types = 0;
+            DebugReportFlagsToAnnotFlags(item.debug_report_msg_flags, true, &severities, &types);
+            debug_data->active_severities |= severities;
+            debug_data->active_types |= types;
+        }
+    }
+}
+
+static inline void RemoveDebugUtilsCallback(debug_report_data *debug_data, std::vector<VkLayerDbgFunctionState> &callbacks,
+                                            uint64_t callback) {
+    auto item = callbacks.begin();
+    for (item = callbacks.begin(); item != callbacks.end(); item++) {
+        if (item->IsUtils()) {
+            if (item->debug_utils_callback_object == CastToHandle<VkDebugUtilsMessengerEXT>(callback)) break;
+        } else {
+            if (item->debug_report_callback_object == CastToHandle<VkDebugReportCallbackEXT>(callback)) break;
+        }
+    }
+    if (item != callbacks.end()) {
+        callbacks.erase(item);
+    }
+    SetDebugUtilsSeverityFlags(callbacks, debug_data);
+}
+
+// Deletes all debug callback function structs
+static inline void RemoveAllMessageCallbacks(debug_report_data *debug_data, std::vector<VkLayerDbgFunctionState> &callbacks) {
+    callbacks.clear();
+}
+
+static inline bool debug_log_msg(const debug_report_data *debug_data, VkFlags msg_flags, VkDebugReportObjectTypeEXT object_type,
+                                 uint64_t src_object, size_t location, const char *layer_prefix, const char *message,
+                                 const char *text_vuid) {
+    bool bail = false;
+
+    VkDebugUtilsMessageSeverityFlagsEXT severity;
+    VkDebugUtilsMessageTypeFlagsEXT types;
+    VkDebugUtilsMessengerCallbackDataEXT callback_data;
+    VkDebugUtilsObjectNameInfoEXT object_name_info;
+
+    // Convert the info to the VK_EXT_debug_utils form in case we need it.
+    DebugReportFlagsToAnnotFlags(msg_flags, true, &severity, &types);
+    object_name_info.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT;
+    object_name_info.pNext = NULL;
+    object_name_info.objectType = convertDebugReportObjectToCoreObject(object_type);
+    object_name_info.objectHandle = (uint64_t)(uintptr_t)src_object;
+    object_name_info.pObjectName = NULL;
+    std::string object_label = {};
+
+    callback_data.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CALLBACK_DATA_EXT;
+    callback_data.pNext = NULL;
+    callback_data.flags = 0;
+    callback_data.pMessageIdName = text_vuid;
+    callback_data.messageIdNumber = 0;  // deprecated, validation layers use only the pMessageIdName
+    callback_data.pMessage = message;
+    callback_data.queueLabelCount = 0;
+    callback_data.pQueueLabels = NULL;
+    callback_data.cmdBufLabelCount = 0;
+    callback_data.pCmdBufLabels = NULL;
+    callback_data.objectCount = 1;
+    callback_data.pObjects = &object_name_info;
+
+    std::vector<VkDebugUtilsLabelEXT> queue_labels;
+    std::vector<VkDebugUtilsLabelEXT> cmd_buf_labels;
+    std::string new_debug_report_message = "";
+    std::ostringstream oss;
+
+    if (0 != src_object) {
+        oss << "Object: 0x" << std::hex << src_object;
+        // If this is a queue, add any queue labels to the callback data.
+        if (VK_OBJECT_TYPE_QUEUE == object_name_info.objectType) {
+            auto label_iter = debug_data->debugUtilsQueueLabels.find(reinterpret_cast<VkQueue>(src_object));
+            if (label_iter != debug_data->debugUtilsQueueLabels.end()) {
+                queue_labels = label_iter->second->Export();
+                callback_data.queueLabelCount = static_cast<uint32_t>(queue_labels.size());
+                callback_data.pQueueLabels = queue_labels.empty() ? nullptr : queue_labels.data();
+            }
+            // If this is a command buffer, add any command buffer labels to the callback data.
+        } else if (VK_OBJECT_TYPE_COMMAND_BUFFER == object_name_info.objectType) {
+            auto label_iter = debug_data->debugUtilsCmdBufLabels.find(reinterpret_cast<VkCommandBuffer>(src_object));
+            if (label_iter != debug_data->debugUtilsCmdBufLabels.end()) {
+                cmd_buf_labels = label_iter->second->Export();
+                callback_data.cmdBufLabelCount = static_cast<uint32_t>(cmd_buf_labels.size());
+                callback_data.pCmdBufLabels = cmd_buf_labels.empty() ? nullptr : cmd_buf_labels.data();
+            }
+        }
+
+        // Look for any debug utils or marker names to use for this object
+        object_label = debug_data->DebugReportGetUtilsObjectName(src_object);
+        if (object_label.empty()) {
+            object_label = debug_data->DebugReportGetMarkerObjectName(src_object);
+        }
+        if (!object_label.empty()) {
+            object_name_info.pObjectName = object_label.c_str();
+            oss << " (Name = " << object_label << " : Type = ";
+        } else {
+            oss << " (Type = ";
+        }
+        oss << std::to_string(object_type) << ")";
+    } else {
+        oss << "Object: VK_NULL_HANDLE (Type = " << std::to_string(object_type) << ")";
+    }
+    new_debug_report_message += oss.str();
+    new_debug_report_message += " | ";
+    new_debug_report_message += message;
+
+    const auto callback_list = &debug_data->debug_callback_list;
+
+    // We only output to default callbacks if there are no non-default callbacks
+    bool use_default_callbacks = true;
+    for (auto current_callback : *callback_list) {
+        use_default_callbacks &= current_callback.IsDefault();
+    }
+
+    for (auto current_callback : *callback_list) {
+        // Skip callback if it's a default callback and there are non-default callbacks present
+        if (current_callback.IsDefault() && !use_default_callbacks) continue;
+
+        // VK_EXT_debug_report callback (deprecated)
+        if (!current_callback.IsUtils() && (current_callback.debug_report_msg_flags & msg_flags)) {
+            if (text_vuid != nullptr) {
+                // If a text vuid is supplied for the old debug report extension, prepend it to the message string
+                new_debug_report_message.insert(0, " ] ");
+                new_debug_report_message.insert(0, text_vuid);
+                new_debug_report_message.insert(0, " [ ");
+            }
+            if (current_callback.debug_report_callback_function_ptr(msg_flags, object_type, src_object, location, 0, layer_prefix,
+                                                                    new_debug_report_message.c_str(), current_callback.pUserData)) {
+                bail = true;
+            }
+            // VK_EXT_debug_utils callback
+        } else if (current_callback.IsUtils() && (current_callback.debug_utils_msg_flags & severity) &&
+                   (current_callback.debug_utils_msg_type & types)) {
+            if (current_callback.debug_utils_callback_function_ptr(static_cast<VkDebugUtilsMessageSeverityFlagBitsEXT>(severity),
+                                                                   types, &callback_data, current_callback.pUserData)) {
+                bail = true;
+            }
+        }
+    }
+    return bail;
+}
+
+static inline void DebugAnnotFlagsToReportFlags(VkDebugUtilsMessageSeverityFlagBitsEXT da_severity,
+                                                VkDebugUtilsMessageTypeFlagsEXT da_type, VkDebugReportFlagsEXT *dr_flags) {
+    *dr_flags = 0;
+
+    if ((da_severity & VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT) != 0) {
+        *dr_flags |= VK_DEBUG_REPORT_ERROR_BIT_EXT;
+    } else if ((da_severity & VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT) != 0) {
+        if ((da_type & VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT) != 0) {
+            *dr_flags |= VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT;
+        } else {
+            *dr_flags |= VK_DEBUG_REPORT_WARNING_BIT_EXT;
+        }
+    } else if ((da_severity & VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT) != 0) {
+        *dr_flags |= VK_DEBUG_REPORT_INFORMATION_BIT_EXT;
+    } else if ((da_severity & VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT) != 0) {
+        *dr_flags |= VK_DEBUG_REPORT_DEBUG_BIT_EXT;
+    }
+}
+
+static inline void layer_debug_utils_destroy_instance(debug_report_data *debug_data) {
+    if (debug_data) {
+        std::unique_lock<std::mutex> lock(debug_data->debug_report_mutex);
+        RemoveAllMessageCallbacks(debug_data, debug_data->debug_callback_list);
+        lock.unlock();
+        delete (debug_data);
+    }
+}
+
+template <typename T>
+static inline void layer_destroy_callback(debug_report_data *debug_data, T callback, const VkAllocationCallbacks *allocator) {
+    std::unique_lock<std::mutex> lock(debug_data->debug_report_mutex);
+    RemoveDebugUtilsCallback(debug_data, debug_data->debug_callback_list, CastToUint64(callback));
+}
+
+template <typename TCreateInfo, typename TCallback>
+static inline void layer_create_callback(DebugCallbackStatusFlags callback_status, debug_report_data *debug_data,
+                                         const TCreateInfo *create_info, const VkAllocationCallbacks *allocator,
+                                         TCallback *callback) {
+    std::unique_lock<std::mutex> lock(debug_data->debug_report_mutex);
+
+    debug_data->debug_callback_list.emplace_back(VkLayerDbgFunctionState());
+    auto &callback_state = debug_data->debug_callback_list.back();
+    callback_state.callback_status = callback_status;
+    callback_state.pUserData = create_info->pUserData;
+
+    if (callback_state.IsUtils()) {
+        auto utils_create_info = reinterpret_cast<const VkDebugUtilsMessengerCreateInfoEXT *>(create_info);
+        auto utils_callback = reinterpret_cast<VkDebugUtilsMessengerEXT *>(callback);
+        if (!(*utils_callback)) {
+            // callback constructed default callbacks have no handle -- so use struct address as unique handle
+            *utils_callback = reinterpret_cast<VkDebugUtilsMessengerEXT>(&callback_state);
+        }
+        callback_state.debug_utils_callback_object = *utils_callback;
+        callback_state.debug_utils_callback_function_ptr = utils_create_info->pfnUserCallback;
+        callback_state.debug_utils_msg_flags = utils_create_info->messageSeverity;
+        callback_state.debug_utils_msg_type = utils_create_info->messageType;
+    } else {  // Debug report callback
+        auto report_create_info = reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT *>(create_info);
+        auto report_callback = reinterpret_cast<VkDebugReportCallbackEXT *>(callback);
+        if (!(*report_callback)) {
+            // Internally constructed default callbacks have no handle -- so use struct address as unique handle
+            *report_callback = reinterpret_cast<VkDebugReportCallbackEXT>(&callback_state);
+        }
+        callback_state.debug_report_callback_object = *report_callback;
+        callback_state.debug_report_callback_function_ptr = report_create_info->pfnCallback;
+        callback_state.debug_report_msg_flags = report_create_info->flags;
+    }
+
+    SetDebugUtilsSeverityFlags(debug_data->debug_callback_list, debug_data);
+}
+
+static inline VkResult layer_create_messenger_callback(debug_report_data *debug_data, bool default_callback,
+                                                       const VkDebugUtilsMessengerCreateInfoEXT *create_info,
+                                                       const VkAllocationCallbacks *allocator,
+                                                       VkDebugUtilsMessengerEXT *messenger) {
+    layer_create_callback((DEBUG_CALLBACK_UTILS | (default_callback ? DEBUG_CALLBACK_DEFAULT : 0)), debug_data, create_info,
+                          allocator, messenger);
+    return VK_SUCCESS;
+}
+
+static inline VkResult layer_create_report_callback(debug_report_data *debug_data, bool default_callback,
+                                                    const VkDebugReportCallbackCreateInfoEXT *create_info,
+                                                    const VkAllocationCallbacks *allocator, VkDebugReportCallbackEXT *callback) {
+    layer_create_callback((default_callback ? DEBUG_CALLBACK_DEFAULT : 0), debug_data, create_info, allocator, callback);
+    return VK_SUCCESS;
+}
+
+static inline void ActivateInstanceDebugCallbacks(debug_report_data *debug_data) {
+    auto current = debug_data->instance_pnext_chain;
+    for (;;) {
+        auto create_info = lvl_find_in_chain<VkDebugUtilsMessengerCreateInfoEXT>(current);
+        if (!create_info) break;
+        current = create_info->pNext;
+        VkDebugUtilsMessengerEXT utils_callback{};
+        layer_create_callback((DEBUG_CALLBACK_UTILS | DEBUG_CALLBACK_INSTANCE), debug_data, create_info, nullptr, &utils_callback);
+    }
+    for (;;) {
+        auto create_info = lvl_find_in_chain<VkDebugReportCallbackCreateInfoEXT>(current);
+        if (!create_info) break;
+        current = create_info->pNext;
+        VkDebugReportCallbackEXT report_callback{};
+        layer_create_callback(DEBUG_CALLBACK_INSTANCE, debug_data, create_info, nullptr, &report_callback);
+    }
+}
+
+static inline void DeactivateInstanceDebugCallbacks(debug_report_data *debug_data) {
+    if (!lvl_find_in_chain<VkDebugUtilsMessengerCreateInfoEXT>(debug_data->instance_pnext_chain) &&
+        !lvl_find_in_chain<VkDebugReportCallbackCreateInfoEXT>(debug_data->instance_pnext_chain))
+        return;
+    std::vector<VkDebugUtilsMessengerEXT> instance_utils_callback_handles{};
+    std::vector<VkDebugReportCallbackEXT> instance_report_callback_handles{};
+    for (auto item : debug_data->debug_callback_list) {
+        if (item.IsInstance()) {
+            if (item.IsUtils()) {
+                instance_utils_callback_handles.push_back(item.debug_utils_callback_object);
+            } else {
+                instance_report_callback_handles.push_back(item.debug_report_callback_object);
+            }
+        }
+    }
+    for (auto item : instance_utils_callback_handles) {
+        layer_destroy_callback(debug_data, item, nullptr);
+    }
+    for (auto item : instance_report_callback_handles) {
+        layer_destroy_callback(debug_data, item, nullptr);
+    }
+}
+
+#ifndef WIN32
+static inline int string_sprintf(std::string *output, const char *fmt, ...) __attribute__((format(printf, 2, 3)));
+#endif
+static inline int string_sprintf(std::string *output, const char *fmt, ...) {
+    std::string &formatted = *output;
+    va_list argptr;
+    va_start(argptr, fmt);
+    int reserve = vsnprintf(nullptr, 0, fmt, argptr);
+    va_end(argptr);
+    formatted.reserve(reserve + 1);  // Set the storage length long enough to hold the output + null
+    formatted.resize(reserve);       // Set the *logical* length to be what vsprintf will write
+    va_start(argptr, fmt);
+    int result = vsnprintf((char *)formatted.data(), formatted.capacity(), fmt, argptr);
+    va_end(argptr);
+    assert(result == reserve);
+    assert((formatted.size() == strlen(formatted.c_str())));
+    return result;
+}
+
+#ifdef WIN32
+static inline int vasprintf(char **strp, char const *fmt, va_list ap) {
+    *strp = nullptr;
+    int size = _vscprintf(fmt, ap);
+    if (size >= 0) {
+        *strp = (char *)malloc(size + 1);
+        if (!*strp) {
+            return -1;
+        }
+        _vsnprintf(*strp, size + 1, fmt, ap);
+    }
+    return size;
+}
+#endif
+
+// Output log message via DEBUG_REPORT. Takes format and variable arg list so that output string is only computed if a message
+// needs to be logged
+#ifndef WIN32
+static inline bool log_msg(const debug_report_data *debug_data, VkFlags msg_flags, VkDebugReportObjectTypeEXT object_type,
+                           uint64_t src_object, const std::string &vuid_text, const char *format, ...)
+    __attribute__((format(printf, 6, 7)));
+#endif
+static inline bool log_msg(const debug_report_data *debug_data, VkFlags msg_flags, VkDebugReportObjectTypeEXT object_type,
+                           uint64_t src_object, const std::string &vuid_text, const char *format, ...) {
+    if (!debug_data) return false;
+    std::unique_lock<std::mutex> lock(debug_data->debug_report_mutex);
+    VkFlags local_severity = 0;
+    VkFlags local_type = 0;
+    DebugReportFlagsToAnnotFlags(msg_flags, true, &local_severity, &local_type);
+    if (!debug_data || !(debug_data->active_severities & local_severity) || !(debug_data->active_types & local_type)) {
+        // Message is not wanted
+        return false;
+    }
+
+    va_list argptr;
+    va_start(argptr, format);
+    char *str;
+    if (-1 == vasprintf(&str, format, argptr)) {
+        // On failure, glibc vasprintf leaves str undefined
+        str = nullptr;
+    }
+    va_end(argptr);
+
+    std::string str_plus_spec_text(str ? str : "Allocation failure");
+
+    // Append the spec error text to the error message, unless it's an UNASSIGNED or UNDEFINED vuid
+    if ((vuid_text.find("UNASSIGNED-") == std::string::npos) && (vuid_text.find(kVUIDUndefined) == std::string::npos)) {
+        // Linear search makes no assumptions about the layout of the string table. This is not fast, but it does not need to be at
+        // this point in the error reporting path
+        uint32_t num_vuids = sizeof(vuid_spec_text) / sizeof(vuid_spec_text_pair);
+        const char *spec_text = nullptr;
+        for (uint32_t i = 0; i < num_vuids; i++) {
+            if (0 == strcmp(vuid_text.c_str(), vuid_spec_text[i].vuid)) {
+                spec_text = vuid_spec_text[i].spec_text;
+                break;
+            }
+        }
+
+        if (nullptr == spec_text) {
+            // If this happens, you've hit a VUID string that isn't defined in the spec's json file
+            // Try running 'vk_validation_stats -c' to look for invalid VUID strings in the repo code
+            assert(0);
+        } else {
+            str_plus_spec_text += " The Vulkan spec states: ";
+            str_plus_spec_text += spec_text;
+        }
+    }
+
+    // Append layer prefix with VUID string, pass in recovered legacy numerical VUID
+    bool result = debug_log_msg(debug_data, msg_flags, object_type, src_object, 0, "Validation", str_plus_spec_text.c_str(),
+                                vuid_text.c_str());
+
+    free(str);
+    return result;
+}
+
+static inline VKAPI_ATTR VkBool32 VKAPI_CALL report_log_callback(VkFlags msg_flags, VkDebugReportObjectTypeEXT obj_type,
+                                                                 uint64_t src_object, size_t location, int32_t msg_code,
+                                                                 const char *layer_prefix, const char *message, void *user_data) {
+    std::ostringstream msg_buffer;
+    char msg_flag_string[30];
+
+    PrintMessageFlags(msg_flags, msg_flag_string);
+
+    msg_buffer << layer_prefix << "(" << msg_flag_string << "): msg_code: " << msg_code << ": " << message << "\n";
+    const std::string tmp = msg_buffer.str();
+    const char *cstr = tmp.c_str();
+
+    fprintf((FILE *)user_data, "%s", cstr);
+    fflush((FILE *)user_data);
+
+#if defined __ANDROID__
+    LOGCONSOLE("%s", cstr);
+#endif
+
+    return false;
+}
+
+static inline VKAPI_ATTR VkBool32 VKAPI_CALL report_win32_debug_output_msg(VkFlags msg_flags, VkDebugReportObjectTypeEXT obj_type,
+                                                                           uint64_t src_object, size_t location, int32_t msg_code,
+                                                                           const char *layer_prefix, const char *message,
+                                                                           void *user_data) {
+#ifdef WIN32
+    char msg_flag_string[30];
+    char buf[2048];
+
+    PrintMessageFlags(msg_flags, msg_flag_string);
+    _snprintf(buf, sizeof(buf) - 1, "%s (%s): msg_code: %d: %s\n", layer_prefix, msg_flag_string, msg_code, message);
+
+    OutputDebugString(buf);
+#endif
+
+    return false;
+}
+
+static inline VKAPI_ATTR VkBool32 VKAPI_CALL DebugBreakCallback(VkFlags msgFlags, VkDebugReportObjectTypeEXT obj_type,
+                                                                uint64_t src_object, size_t location, int32_t msg_code,
+                                                                const char *layer_prefix, const char *message, void *user_data) {
+#ifdef WIN32
+    DebugBreak();
+#else
+    raise(SIGTRAP);
+#endif
+
+    return false;
+}
+
+static inline VKAPI_ATTR VkBool32 VKAPI_CALL MessengerBreakCallback(VkDebugUtilsMessageSeverityFlagBitsEXT message_severity,
+                                                                    VkDebugUtilsMessageTypeFlagsEXT message_type,
+                                                                    const VkDebugUtilsMessengerCallbackDataEXT *callback_data,
+                                                                    void *user_data) {
+#ifdef WIN32
+    DebugBreak();
+#else
+    raise(SIGTRAP);
+#endif
+
+    return false;
+}
+
+static inline VKAPI_ATTR VkBool32 VKAPI_CALL messenger_log_callback(VkDebugUtilsMessageSeverityFlagBitsEXT message_severity,
+                                                                    VkDebugUtilsMessageTypeFlagsEXT message_type,
+                                                                    const VkDebugUtilsMessengerCallbackDataEXT *callback_data,
+                                                                    void *user_data) {
+    std::ostringstream msg_buffer;
+    char msg_severity[30];
+    char msg_type[30];
+
+    PrintMessageSeverity(message_severity, msg_severity);
+    PrintMessageType(message_type, msg_type);
+
+    msg_buffer << callback_data->pMessageIdName << "(" << msg_severity << " / " << msg_type
+               << "): msgNum: " << callback_data->messageIdNumber << " - " << callback_data->pMessage << "\n";
+    msg_buffer << "    Objects: " << callback_data->objectCount << "\n";
+    for (uint32_t obj = 0; obj < callback_data->objectCount; ++obj) {
+        msg_buffer << "        [" << obj << "] " << std::hex << std::showbase
+                   << HandleToUint64(callback_data->pObjects[obj].objectHandle) << ", type: " << std::dec << std::noshowbase
+                   << callback_data->pObjects[obj].objectType
+                   << ", name: " << (callback_data->pObjects[obj].pObjectName ? callback_data->pObjects[obj].pObjectName : "NULL")
+                   << "\n";
+    }
+    const std::string tmp = msg_buffer.str();
+    const char *cstr = tmp.c_str();
+    fprintf((FILE *)user_data, "%s", cstr);
+    fflush((FILE *)user_data);
+
+#if defined __ANDROID__
+    LOGCONSOLE("%s", cstr);
+#endif
+
+    return false;
+}
+
+static inline VKAPI_ATTR VkBool32 VKAPI_CALL messenger_win32_debug_output_msg(
+    VkDebugUtilsMessageSeverityFlagBitsEXT message_severity, VkDebugUtilsMessageTypeFlagsEXT message_type,
+    const VkDebugUtilsMessengerCallbackDataEXT *callback_data, void *user_data) {
+#ifdef WIN32
+    std::ostringstream msg_buffer;
+    char msg_severity[30];
+    char msg_type[30];
+
+    PrintMessageSeverity(message_severity, msg_severity);
+    PrintMessageType(message_type, msg_type);
+
+    msg_buffer << callback_data->pMessageIdName << "(" << msg_severity << " / " << msg_type
+               << "): msgNum: " << callback_data->messageIdNumber << " - " << callback_data->pMessage << "\n";
+    msg_buffer << "    Objects: " << callback_data->objectCount << "\n";
+
+    for (uint32_t obj = 0; obj < callback_data->objectCount; ++obj) {
+        msg_buffer << "       [" << obj << "]  " << std::hex << std::showbase
+                   << HandleToUint64(callback_data->pObjects[obj].objectHandle) << ", type: " << std::dec << std::noshowbase
+                   << callback_data->pObjects[obj].objectType
+                   << ", name: " << (callback_data->pObjects[obj].pObjectName ? callback_data->pObjects[obj].pObjectName : "NULL")
+                   << "\n";
+    }
+    const std::string tmp = msg_buffer.str();
+    const char *cstr = tmp.c_str();
+    OutputDebugString(cstr);
+#endif
+
+    return false;
+}
+
+template <typename Map>
+static LoggingLabelState *GetLoggingLabelState(Map *map, typename Map::key_type key, bool insert) {
+    auto iter = map->find(key);
+    LoggingLabelState *label_state = nullptr;
+    if (iter == map->end()) {
+        if (insert) {
+            // Add a label state if not present
+            auto inserted = map->insert(std::make_pair(key, std::unique_ptr<LoggingLabelState>(new LoggingLabelState())));
+            assert(inserted.second);
+            iter = inserted.first;
+            label_state = iter->second.get();
+        }
+    } else {
+        label_state = iter->second.get();
+    }
+    return label_state;
+}
+
+static inline void BeginQueueDebugUtilsLabel(debug_report_data *report_data, VkQueue queue,
+                                             const VkDebugUtilsLabelEXT *label_info) {
+    std::unique_lock<std::mutex> lock(report_data->debug_report_mutex);
+    if (nullptr != label_info && nullptr != label_info->pLabelName) {
+        auto *label_state = GetLoggingLabelState(&report_data->debugUtilsQueueLabels, queue, /* insert */ true);
+        assert(label_state);
+        label_state->labels.push_back(LoggingLabel(label_info));
+
+        // TODO: Determine if this is the correct semantics for insert label vs. begin/end, perserving existing semantics for now
+        label_state->insert_label.Reset();
+    }
+}
+
+static inline void EndQueueDebugUtilsLabel(debug_report_data *report_data, VkQueue queue) {
+    std::unique_lock<std::mutex> lock(report_data->debug_report_mutex);
+    auto *label_state = GetLoggingLabelState(&report_data->debugUtilsQueueLabels, queue, /* insert */ false);
+    if (label_state) {
+        // Pop the normal item
+        if (!label_state->labels.empty()) {
+            label_state->labels.pop_back();
+        }
+
+        // TODO: Determine if this is the correct semantics for insert label vs. begin/end, perserving existing semantics for now
+        label_state->insert_label.Reset();
+    }
+}
+
+static inline void InsertQueueDebugUtilsLabel(debug_report_data *report_data, VkQueue queue,
+                                              const VkDebugUtilsLabelEXT *label_info) {
+    std::unique_lock<std::mutex> lock(report_data->debug_report_mutex);
+    auto *label_state = GetLoggingLabelState(&report_data->debugUtilsQueueLabels, queue, /* insert */ true);
+
+    // TODO: Determine if this is the correct semantics for insert label vs. begin/end, perserving existing semantics for now
+    label_state->insert_label = LoggingLabel(label_info);
+}
+
+static inline void BeginCmdDebugUtilsLabel(debug_report_data *report_data, VkCommandBuffer command_buffer,
+                                           const VkDebugUtilsLabelEXT *label_info) {
+    std::unique_lock<std::mutex> lock(report_data->debug_report_mutex);
+    if (nullptr != label_info && nullptr != label_info->pLabelName) {
+        auto *label_state = GetLoggingLabelState(&report_data->debugUtilsCmdBufLabels, command_buffer, /* insert */ true);
+        assert(label_state);
+        label_state->labels.push_back(LoggingLabel(label_info));
+
+        // TODO: Determine if this is the correct semantics for insert label vs. begin/end, perserving existing semantics for now
+        label_state->insert_label.Reset();
+    }
+}
+
+static inline void EndCmdDebugUtilsLabel(debug_report_data *report_data, VkCommandBuffer command_buffer) {
+    std::unique_lock<std::mutex> lock(report_data->debug_report_mutex);
+    auto *label_state = GetLoggingLabelState(&report_data->debugUtilsCmdBufLabels, command_buffer, /* insert */ false);
+    if (label_state) {
+        // Pop the normal item
+        if (!label_state->labels.empty()) {
+            label_state->labels.pop_back();
+        }
+
+        // TODO: Determine if this is the correct semantics for insert label vs. begin/end, perserving existing semantics for now
+        label_state->insert_label.Reset();
+    }
+}
+
+static inline void InsertCmdDebugUtilsLabel(debug_report_data *report_data, VkCommandBuffer command_buffer,
+                                            const VkDebugUtilsLabelEXT *label_info) {
+    std::unique_lock<std::mutex> lock(report_data->debug_report_mutex);
+    auto *label_state = GetLoggingLabelState(&report_data->debugUtilsCmdBufLabels, command_buffer, /* insert */ true);
+    assert(label_state);
+
+    // TODO: Determine if this is the correct semantics for insert label vs. begin/end, perserving existing semantics for now
+    label_state->insert_label = LoggingLabel(label_info);
+}
+
+// Current tracking beyond a single command buffer scope is incorrect, and even when it is we need to be able to clean up
+static inline void ResetCmdDebugUtilsLabel(debug_report_data *report_data, VkCommandBuffer command_buffer) {
+    std::unique_lock<std::mutex> lock(report_data->debug_report_mutex);
+    auto *label_state = GetLoggingLabelState(&report_data->debugUtilsCmdBufLabels, command_buffer, /* insert */ false);
+    if (label_state) {
+        label_state->labels.clear();
+        label_state->insert_label.Reset();
+    }
+}
+
+static inline void EraseCmdDebugUtilsLabel(debug_report_data *report_data, VkCommandBuffer command_buffer) {
+    report_data->debugUtilsCmdBufLabels.erase(command_buffer);
+}
+
+#endif  // LAYER_LOGGING_H
diff --git a/src/third_party/vulkan-validation-layers/src/layers/vk_layer_settings.txt b/src/third_party/vulkan-validation-layers/src/layers/vk_layer_settings.txt
new file mode 100644
index 0000000..211f6ee
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/layers/vk_layer_settings.txt
@@ -0,0 +1,105 @@
+################################################################################
+#
+#  This file contains per-layer settings that configure layer behavior at
+#  execution time. Comments in this file are denoted with the "#" char.
+#  Settings lines are of the form:
+#      "<LayerIdentifier>.<SettingName> = <SettingValue>"
+#
+#  <LayerIdentifier> is typically the official layer name, minus the VK_LAYER
+#  prefix and all lower-camel-case -- i.e., for VK_LAYER_KHRONOS_validation,
+#  the layer identifier is 'khronos_validation'.
+#
+################################################################################
+################################################################################
+# Validation Layer Common Settings:
+# =================================
+#
+#   DEBUG_ACTION:
+#   =============
+#   <LayerIdentifier>.debug_action : This is an enum value indicating what
+#    action is to be taken when a layer wants to report information.
+#    Possible settings values are defined in the vk_layer.h header file.
+#    These settings are:
+#    VK_DBG_LAYER_ACTION_IGNORE - Take no action.
+#    VK_DBG_LAYER_ACTION_LOG_MSG - Log a txt message to stdout or to a log filename
+#       specified via the <LayerIdentifier>.log_filename setting (see below).
+#    VK_DBG_LAYER_ACTION_CALLBACK - Call user defined callback function(s) that
+#       have been registered via the VK_EXT_debug_report extension. Since
+#       app must register callback, this is a NOOP for the settings file.
+#    VK_DBG_LAYER_ACTION_DEBUG_OUTPUT [Windows only] - Log a txt message using the
+#       Windows OutputDebugString function -- messages will show up in the
+#       Visual Studio output window, for instance.
+#    VK_DBG_LAYER_ACTION_BREAK - Trigger a breakpoint.
+#
+#   REPORT_FLAGS:
+#   =============
+#   <LayerIdentifier>.report_flags : This is a comma-delineated list of options
+#    telling the layer what types of messages it should report back.
+#    Options are:
+#    info - Report informational messages.
+#    warn - Report warnings from using the API in a manner which may lead to
+#           undefined behavior or to warn the user of common trouble spots.
+#           A warning does NOT necessarily signify illegal application behavior.
+#    perf - Report using the API in a way that may cause suboptimal performance.
+#    error - Report errors in API usage.
+#    debug - For layer development. Report messages for debugging layer
+#            behavior.
+#
+#   LOG_FILENAME:
+#   =============
+#   <LayerIdentifier>.log_filename : output filename. Can be relative to
+#      location of vk_layer_settings.txt file, or an absolute path. If no
+#      filename is specified or if filename has invalid path, then stdout
+#      is used by default.
+#
+#   DISABLES:
+#   =============
+#   <LayerIdentifier>.disables : comma separated list of feature/flag/disable enums
+#      These can include VkValidationFeatureDisableEXT flags defined in the Vulkan
+#      specification, or ValidationCheckDisables enums defined in chassis.h.
+#      Effects of setting these flags are described in the specification (or the
+#      source code in the case of the ValidationCheckDisables). The most useful
+#      flags are briefly described here:
+#      VK_VALIDATION_FEATURE_DISABLE_UNIQUE_HANDLES_EXT - disables handle wrapping.
+#          Disable this feature if you are running into crashes when authoring new extensions
+#          or developing new Vulkan objects/structures
+#      VK_VALIDATION_FEATURE_DISABLE_THREAD_SAFETY_EXT - disables thread checks. It may
+#          help with performance to run with thread-checking disabled most of the time,
+#          enabling it occasionally for a quick sanity check, or when debugging difficult
+#          application behaviors.
+#      VK_VALIDATION_FEATURE_DISABLE_CORE_CHECKS_EXT - disables the main, heavy-duty
+#          validation checks. This may be valuable early in the development cycle to
+#          reduce validation output while correcting paramter/object usage errors.
+#      VK_VALIDATION_FEATURE_DISABLE_API_PARAMETERS_EXT - disables stateless parameter
+#          checks. This may not always be necessary late in a development cycle.
+#      VK_VALIDATION_FEATURE_DISABLE_OBJECT_LIFETIMES_EXT - disables object tracking.
+#          This may not always be necessary late in a development cycle.
+#
+#   ENABLES:
+#   =============
+#   <LayerIdentifier>.enables : comma separated list of feature enable enums
+#      These can include VkValidationFeatureEnableEXT flags defined in the Vulkan
+#      specification, where their effects are described.  The most useful
+#      flags are briefly described here:
+#      VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_EXT - enables intrusive GPU-assisted
+#      shader validation in khronos validation layers
+#      VK_VALIDATION_FEATURE_ENABLE_BEST_PRACTICES_EXT - enables best practices warning
+#      validation
+#
+
+# VK_LAYER_KHRONOS_validation Settings
+
+khronos_validation.debug_action = VK_DBG_LAYER_ACTION_LOG_MSG
+khronos_validation.report_flags = error,warn,perf
+khronos_validation.log_filename = stdout
+
+# Example entry showing how to disable threading checks and validation at DestroyPipeline time
+#khronos_validation.disables = VK_VALIDATION_FEATURE_DISABLE_THREAD_SAFETY_EXT,VALIDATION_CHECK_DISABLE_DESTROY_PIPELINE
+
+# Example entry showing how to Enable GPU-Assisted Validation
+#khronos_validation.enables = VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_EXT,VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_RESERVE_BINDING_SLOT_EXT
+
+# Example entry showing how to Enable Best Practices Validation
+#khronos_validation.enables = VK_VALIDATION_FEATURE_ENABLE_BEST_PRACTICES_EXT
+
+################################################################################
diff --git a/src/third_party/vulkan-validation-layers/src/layers/vk_layer_utils.cpp b/src/third_party/vulkan-validation-layers/src/layers/vk_layer_utils.cpp
new file mode 100644
index 0000000..2c5ca41
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/layers/vk_layer_utils.cpp
@@ -0,0 +1,225 @@
+/* Copyright (c) 2015-2016, 2019 The Khronos Group Inc.
+ * Copyright (c) 2015-2016, 2019 Valve Corporation
+ * Copyright (c) 2015-2016, 2019 LunarG, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Mark Lobodzinski <mark@lunarg.com>
+ * Author: Dave Houlton <daveh@lunarg.com>
+ *
+ */
+
+#include "vk_layer_utils.h"
+
+#include <string.h>
+#include <string>
+#include <map>
+#include <vector>
+
+#include "vulkan/vulkan.h"
+#include "vk_layer_config.h"
+
+static const uint8_t UTF8_ONE_BYTE_CODE = 0xC0;
+static const uint8_t UTF8_ONE_BYTE_MASK = 0xE0;
+static const uint8_t UTF8_TWO_BYTE_CODE = 0xE0;
+static const uint8_t UTF8_TWO_BYTE_MASK = 0xF0;
+static const uint8_t UTF8_THREE_BYTE_CODE = 0xF0;
+static const uint8_t UTF8_THREE_BYTE_MASK = 0xF8;
+static const uint8_t UTF8_DATA_BYTE_CODE = 0x80;
+static const uint8_t UTF8_DATA_BYTE_MASK = 0xC0;
+
+VK_LAYER_EXPORT VkStringErrorFlags vk_string_validate(const int max_length, const char *utf8) {
+    VkStringErrorFlags result = VK_STRING_ERROR_NONE;
+    int num_char_bytes = 0;
+    int i, j;
+
+    for (i = 0; i <= max_length; i++) {
+        if (utf8[i] == 0) {
+            break;
+        } else if (i == max_length) {
+            result = VK_STRING_ERROR_LENGTH;
+            break;
+        } else if ((utf8[i] >= 0xa) && (utf8[i] < 0x7f)) {
+            num_char_bytes = 0;
+        } else if ((utf8[i] & UTF8_ONE_BYTE_MASK) == UTF8_ONE_BYTE_CODE) {
+            num_char_bytes = 1;
+        } else if ((utf8[i] & UTF8_TWO_BYTE_MASK) == UTF8_TWO_BYTE_CODE) {
+            num_char_bytes = 2;
+        } else if ((utf8[i] & UTF8_THREE_BYTE_MASK) == UTF8_THREE_BYTE_CODE) {
+            num_char_bytes = 3;
+        } else {
+            result = VK_STRING_ERROR_BAD_DATA;
+        }
+
+        // Validate the following num_char_bytes of data
+        for (j = 0; (j < num_char_bytes) && (i < max_length); j++) {
+            if (++i == max_length) {
+                result |= VK_STRING_ERROR_LENGTH;
+                break;
+            }
+            if ((utf8[i] & UTF8_DATA_BYTE_MASK) != UTF8_DATA_BYTE_CODE) {
+                result |= VK_STRING_ERROR_BAD_DATA;
+            }
+        }
+    }
+    return result;
+}
+
+// Utility function for determining if a string is in a set of strings
+VK_LAYER_EXPORT bool white_list(const char *item, const std::set<std::string> &list) { return (list.find(item) != list.end()); }
+
+// Debug callbacks get created in three ways:
+//   o  Application-defined debug callbacks
+//   o  Through settings in a vk_layer_settings.txt file
+//   o  By default, if neither an app-defined debug callback nor a vk_layer_settings.txt file is present
+//
+// At layer initialization time, default logging callbacks are created to output layer error messages.
+// If a vk_layer_settings.txt file is present its settings will override any default settings.
+//
+// If a vk_layer_settings.txt file is present and an application defines a debug callback, both callbacks
+// will be active.  If no vk_layer_settings.txt file is present, creating an application-defined debug
+// callback will cause the default callbacks to be unregisterd and removed.
+VK_LAYER_EXPORT void layer_debug_messenger_actions(debug_report_data *report_data, const VkAllocationCallbacks *pAllocator,
+                                                   const char *layer_identifier) {
+    VkDebugUtilsMessengerEXT messenger = VK_NULL_HANDLE;
+
+    std::string report_flags_key = layer_identifier;
+    std::string debug_action_key = layer_identifier;
+    std::string log_filename_key = layer_identifier;
+    report_flags_key.append(".report_flags");
+    debug_action_key.append(".debug_action");
+    log_filename_key.append(".log_filename");
+
+    // Initialize layer options
+    VkDebugReportFlagsEXT report_flags = GetLayerOptionFlags(report_flags_key, report_flags_option_definitions, 0);
+    VkLayerDbgActionFlags debug_action = GetLayerOptionFlags(debug_action_key, debug_actions_option_definitions, 0);
+    // Flag as default if these settings are not from a vk_layer_settings.txt file
+    VkDebugUtilsMessengerCreateInfoEXT dbgCreateInfo;
+    memset(&dbgCreateInfo, 0, sizeof(dbgCreateInfo));
+    dbgCreateInfo.sType = VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT;
+    dbgCreateInfo.messageType = VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT | VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT;
+    if (report_flags & VK_DEBUG_REPORT_ERROR_BIT_EXT) {
+        dbgCreateInfo.messageSeverity |= VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT;
+    }
+    if (report_flags & VK_DEBUG_REPORT_WARNING_BIT_EXT) {
+        dbgCreateInfo.messageSeverity |= VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT;
+    }
+    if (report_flags & VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT) {
+        dbgCreateInfo.messageSeverity |= VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT;
+        dbgCreateInfo.messageType |= VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT;
+    }
+    if (report_flags & VK_DEBUG_REPORT_INFORMATION_BIT_EXT) {
+        dbgCreateInfo.messageSeverity |= VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT;
+    }
+    if (report_flags & VK_DEBUG_REPORT_DEBUG_BIT_EXT) {
+        dbgCreateInfo.messageSeverity |= VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT;
+    }
+
+    if (debug_action & VK_DBG_LAYER_ACTION_LOG_MSG) {
+        const char *log_filename = getLayerOption(log_filename_key.c_str());
+        FILE *log_output = getLayerLogOutput(log_filename, layer_identifier);
+        dbgCreateInfo.pfnUserCallback = messenger_log_callback;
+        dbgCreateInfo.pUserData = (void *)log_output;
+        layer_create_messenger_callback(report_data, true, &dbgCreateInfo, pAllocator, &messenger);
+    }
+
+    messenger = VK_NULL_HANDLE;
+
+    if (debug_action & VK_DBG_LAYER_ACTION_DEBUG_OUTPUT) {
+        dbgCreateInfo.pfnUserCallback = messenger_win32_debug_output_msg;
+        dbgCreateInfo.pUserData = NULL;
+        layer_create_messenger_callback(report_data, true, &dbgCreateInfo, pAllocator, &messenger);
+    }
+
+    messenger = VK_NULL_HANDLE;
+
+    if (debug_action & VK_DBG_LAYER_ACTION_BREAK) {
+        dbgCreateInfo.pfnUserCallback = MessengerBreakCallback;
+        dbgCreateInfo.pUserData = NULL;
+        layer_create_messenger_callback(report_data, true, &dbgCreateInfo, pAllocator, &messenger);
+    }
+}
+
+// NOTE: This function has been deprecated, and the above function (layer_debug_messenger_actions) should be
+//       used in its place.
+VK_LAYER_EXPORT void layer_debug_report_actions(debug_report_data *report_data, const VkAllocationCallbacks *pAllocator,
+                                                const char *layer_identifier) {
+    VkDebugReportCallbackEXT callback = VK_NULL_HANDLE;
+
+    std::string report_flags_key = layer_identifier;
+    std::string debug_action_key = layer_identifier;
+    std::string log_filename_key = layer_identifier;
+    report_flags_key.append(".report_flags");
+    debug_action_key.append(".debug_action");
+    log_filename_key.append(".log_filename");
+
+    // Initialize layer options
+    VkDebugReportFlagsEXT report_flags = GetLayerOptionFlags(report_flags_key, report_flags_option_definitions, 0);
+    VkLayerDbgActionFlags debug_action = GetLayerOptionFlags(debug_action_key, debug_actions_option_definitions, 0);
+    // Flag as default if these settings are not from a vk_layer_settings.txt file
+    bool default_layer_callback = (debug_action & VK_DBG_LAYER_ACTION_DEFAULT) ? true : false;
+
+    if (debug_action & VK_DBG_LAYER_ACTION_LOG_MSG) {
+        const char *log_filename = getLayerOption(log_filename_key.c_str());
+        FILE *log_output = getLayerLogOutput(log_filename, layer_identifier);
+        VkDebugReportCallbackCreateInfoEXT dbgCreateInfo;
+        memset(&dbgCreateInfo, 0, sizeof(dbgCreateInfo));
+        dbgCreateInfo.sType = VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT;
+        dbgCreateInfo.flags = report_flags;
+        dbgCreateInfo.pfnCallback = report_log_callback;
+        dbgCreateInfo.pUserData = (void *)log_output;
+        layer_create_report_callback(report_data, default_layer_callback, &dbgCreateInfo, pAllocator, &callback);
+    }
+
+    callback = VK_NULL_HANDLE;
+
+    if (debug_action & VK_DBG_LAYER_ACTION_DEBUG_OUTPUT) {
+        VkDebugReportCallbackCreateInfoEXT dbgCreateInfo;
+        memset(&dbgCreateInfo, 0, sizeof(dbgCreateInfo));
+        dbgCreateInfo.sType = VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT;
+        dbgCreateInfo.flags = report_flags;
+        dbgCreateInfo.pfnCallback = report_win32_debug_output_msg;
+        dbgCreateInfo.pUserData = NULL;
+        layer_create_report_callback(report_data, default_layer_callback, &dbgCreateInfo, pAllocator, &callback);
+    }
+
+    callback = VK_NULL_HANDLE;
+
+    if (debug_action & VK_DBG_LAYER_ACTION_BREAK) {
+        VkDebugReportCallbackCreateInfoEXT dbgCreateInfo;
+        memset(&dbgCreateInfo, 0, sizeof(dbgCreateInfo));
+        dbgCreateInfo.sType = VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT;
+        dbgCreateInfo.flags = report_flags;
+        dbgCreateInfo.pfnCallback = DebugBreakCallback;
+        dbgCreateInfo.pUserData = NULL;
+        layer_create_report_callback(report_data, default_layer_callback, &dbgCreateInfo, pAllocator, &callback);
+    }
+}
+
+VK_LAYER_EXPORT VkLayerInstanceCreateInfo *get_chain_info(const VkInstanceCreateInfo *pCreateInfo, VkLayerFunction func) {
+    VkLayerInstanceCreateInfo *chain_info = (VkLayerInstanceCreateInfo *)pCreateInfo->pNext;
+    while (chain_info && !(chain_info->sType == VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO && chain_info->function == func)) {
+        chain_info = (VkLayerInstanceCreateInfo *)chain_info->pNext;
+    }
+    assert(chain_info != NULL);
+    return chain_info;
+}
+
+VK_LAYER_EXPORT VkLayerDeviceCreateInfo *get_chain_info(const VkDeviceCreateInfo *pCreateInfo, VkLayerFunction func) {
+    VkLayerDeviceCreateInfo *chain_info = (VkLayerDeviceCreateInfo *)pCreateInfo->pNext;
+    while (chain_info && !(chain_info->sType == VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO && chain_info->function == func)) {
+        chain_info = (VkLayerDeviceCreateInfo *)chain_info->pNext;
+    }
+    assert(chain_info != NULL);
+    return chain_info;
+}
diff --git a/src/third_party/vulkan-validation-layers/src/layers/vk_layer_utils.h b/src/third_party/vulkan-validation-layers/src/layers/vk_layer_utils.h
new file mode 100644
index 0000000..0dfca82
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/layers/vk_layer_utils.h
@@ -0,0 +1,351 @@
+/* Copyright (c) 2015-2017, 2019 The Khronos Group Inc.
+ * Copyright (c) 2015-2017, 2019 Valve Corporation
+ * Copyright (c) 2015-2017, 2019 LunarG, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Mark Lobodzinski <mark@lunarg.com>
+ * Author: Courtney Goeltzenleuchter <courtney@LunarG.com>
+ * Author: Dave Houlton <daveh@lunarg.com>
+ */
+
+#pragma once
+
+#include <cassert>
+#include <cstddef>
+#include <functional>
+#include <stdbool.h>
+#include <string>
+#include <vector>
+#include <set>
+#include "cast_utils.h"
+#include "vk_format_utils.h"
+#include "vk_layer_logging.h"
+
+#ifndef WIN32
+#include <strings.h>  // For ffs()
+#else
+#include <intrin.h>  // For __lzcnt()
+#endif
+
+#ifdef __cplusplus
+// Traits objects to allow string_join to operate on collections of const char *
+template <typename String>
+struct StringJoinSizeTrait {
+    static size_t size(const String &str) { return str.size(); }
+};
+
+template <>
+struct StringJoinSizeTrait<const char *> {
+    static size_t size(const char *str) {
+        if (!str) return 0;
+        return strlen(str);
+    }
+};
+// Similar to perl/python join
+//    * String must support size, reserve, append, and be default constructable
+//    * StringCollection must support size, const forward iteration, and store
+//      strings compatible with String::append
+//    * Accessor trait can be set if default accessors (compatible with string
+//      and const char *) don't support size(StringCollection::value_type &)
+//
+// Return type based on sep type
+template <typename String = std::string, typename StringCollection = std::vector<String>,
+          typename Accessor = StringJoinSizeTrait<typename StringCollection::value_type>>
+static inline String string_join(const String &sep, const StringCollection &strings) {
+    String joined;
+    const size_t count = strings.size();
+    if (!count) return joined;
+
+    // Prereserved storage, s.t. we will execute in linear time (avoids reallocation copies)
+    size_t reserve = (count - 1) * sep.size();
+    for (const auto &str : strings) {
+        reserve += Accessor::size(str);  // abstracted to allow const char * type in StringCollection
+    }
+    joined.reserve(reserve + 1);
+
+    // Seps only occur *between* strings entries, so first is special
+    auto current = strings.cbegin();
+    joined.append(*current);
+    ++current;
+    for (; current != strings.cend(); ++current) {
+        joined.append(sep);
+        joined.append(*current);
+    }
+    return joined;
+}
+
+// Requires StringCollection::value_type has a const char * constructor and is compatible the string_join::String above
+template <typename StringCollection = std::vector<std::string>, typename SepString = std::string>
+static inline SepString string_join(const char *sep, const StringCollection &strings) {
+    return string_join<SepString, StringCollection>(SepString(sep), strings);
+}
+
+static inline std::string string_trim(const std::string &s) {
+    const char *whitespace = " \t\f\v\n\r";
+
+    const auto trimmed_beg = s.find_first_not_of(whitespace);
+    if (trimmed_beg == std::string::npos) return "";
+
+    const auto trimmed_end = s.find_last_not_of(whitespace);
+    assert(trimmed_end != std::string::npos && trimmed_beg <= trimmed_end);
+
+    return s.substr(trimmed_beg, trimmed_end - trimmed_beg + 1);
+}
+
+// Perl/Python style join operation for general types using stream semantics
+// Note: won't be as fast as string_join above, but simpler to use (and code)
+// Note: Modifiable reference doesn't match the google style but does match std style for stream handling and algorithms
+template <typename Stream, typename String, typename ForwardIt>
+Stream &stream_join(Stream &stream, const String &sep, ForwardIt first, ForwardIt last) {
+    if (first != last) {
+        stream << *first;
+        ++first;
+        while (first != last) {
+            stream << sep << *first;
+            ++first;
+        }
+    }
+    return stream;
+}
+
+// stream_join For whole collections with forward iterators
+template <typename Stream, typename String, typename Collection>
+Stream &stream_join(Stream &stream, const String &sep, const Collection &values) {
+    return stream_join(stream, sep, values.cbegin(), values.cend());
+}
+
+typedef void *dispatch_key;
+static inline dispatch_key get_dispatch_key(const void *object) { return (dispatch_key) * (VkLayerDispatchTable **)object; }
+
+VK_LAYER_EXPORT VkLayerInstanceCreateInfo *get_chain_info(const VkInstanceCreateInfo *pCreateInfo, VkLayerFunction func);
+VK_LAYER_EXPORT VkLayerDeviceCreateInfo *get_chain_info(const VkDeviceCreateInfo *pCreateInfo, VkLayerFunction func);
+
+static inline bool IsPowerOfTwo(unsigned x) { return x && !(x & (x - 1)); }
+
+extern "C" {
+#endif
+
+#define VK_LAYER_API_VERSION VK_MAKE_VERSION(1, 0, VK_HEADER_VERSION)
+
+typedef enum VkStringErrorFlagBits {
+    VK_STRING_ERROR_NONE = 0x00000000,
+    VK_STRING_ERROR_LENGTH = 0x00000001,
+    VK_STRING_ERROR_BAD_DATA = 0x00000002,
+} VkStringErrorFlagBits;
+typedef VkFlags VkStringErrorFlags;
+
+VK_LAYER_EXPORT void layer_debug_report_actions(debug_report_data *report_data, const VkAllocationCallbacks *pAllocator,
+                                                const char *layer_identifier);
+
+VK_LAYER_EXPORT void layer_debug_messenger_actions(debug_report_data *report_data, const VkAllocationCallbacks *pAllocator,
+                                                   const char *layer_identifier);
+
+VK_LAYER_EXPORT VkStringErrorFlags vk_string_validate(const int max_length, const char *char_array);
+VK_LAYER_EXPORT bool white_list(const char *item, const std::set<std::string> &whitelist);
+
+static inline int u_ffs(int val) {
+#ifdef WIN32
+    unsigned long bit_pos = 0;
+    if (_BitScanForward(&bit_pos, val) != 0) {
+        bit_pos += 1;
+    }
+    return bit_pos;
+#else
+    return ffs(val);
+#endif
+}
+
+#ifdef __cplusplus
+}
+#endif
+
+// shared_mutex support added in MSVC 2015 update 2
+#if defined(_MSC_FULL_VER) && _MSC_FULL_VER >= 190023918 && NTDDI_VERSION > NTDDI_WIN10_RS2
+#include <shared_mutex>
+#endif
+
+class ReadWriteLock {
+  private:
+#if defined(_MSC_FULL_VER) && _MSC_FULL_VER >= 190023918 && NTDDI_VERSION > NTDDI_WIN10_RS2
+    typedef std::shared_mutex lock_t;
+#else
+    typedef std::mutex lock_t;
+#endif
+
+  public:
+    void lock() { m_lock.lock(); }
+    bool try_lock() { return m_lock.try_lock(); }
+    void unlock() { m_lock.unlock(); }
+#if defined(_MSC_FULL_VER) && _MSC_FULL_VER >= 190023918 && NTDDI_VERSION > NTDDI_WIN10_RS2
+    void lock_shared() { m_lock.lock_shared(); }
+    bool try_lock_shared() { return m_lock.try_lock_shared(); }
+    void unlock_shared() { m_lock.unlock_shared(); }
+#else
+    void lock_shared() { lock(); }
+    bool try_lock_shared() { return try_lock(); }
+    void unlock_shared() { unlock(); }
+#endif
+  private:
+    lock_t m_lock;
+};
+
+#if defined(_MSC_FULL_VER) && _MSC_FULL_VER >= 190023918 && NTDDI_VERSION > NTDDI_WIN10_RS2
+typedef std::shared_lock<ReadWriteLock> read_lock_guard_t;
+typedef std::unique_lock<ReadWriteLock> write_lock_guard_t;
+#else
+typedef std::unique_lock<ReadWriteLock> read_lock_guard_t;
+typedef std::unique_lock<ReadWriteLock> write_lock_guard_t;
+#endif
+
+// Limited concurrent_unordered_map that supports internally-synchronized
+// insert/erase/access. Splits locking across N buckets and uses shared_mutex
+// for read/write locking. Iterators are not supported. The following
+// operations are supported:
+//
+// insert_or_assign: Insert a new element or update an existing element.
+// insert: Insert a new element and return whether it was inserted.
+// erase: Remove an element.
+// contains: Returns true if the key is in the map.
+// find: Returns != end() if found, value is in ret->second.
+// pop: Erases and returns the erased value if found.
+//
+// find/end: find returns a vaguely iterator-like type that can be compared to
+// end and can use iter->second to retrieve the reference. This is to ease porting
+// for existing code that combines the existence check and lookup in a single
+// operation (and thus a single lock). i.e.:
+//
+//      auto iter = map.find(key);
+//      if (iter != map.end()) {
+//          T t = iter->second;
+//          ...
+//
+// snapshot: Return an array of elements (key, value pairs) that satisfy an optional
+// predicate. This can be used as a substitute for iterators in exceptional cases.
+template <typename Key, typename T, int BUCKETSLOG2 = 2, typename Hash = std::hash<Key>>
+class vl_concurrent_unordered_map {
+  public:
+    void insert_or_assign(const Key &key, const T &value) {
+        uint32_t h = ConcurrentMapHashObject(key);
+        write_lock_guard_t lock(locks[h].lock);
+        maps[h][key] = value;
+    }
+
+    bool insert(const Key &key, const T &value) {
+        uint32_t h = ConcurrentMapHashObject(key);
+        write_lock_guard_t lock(locks[h].lock);
+        auto ret = maps[h].insert(typename std::unordered_map<Key, T>::value_type(key, value));
+        return ret.second;
+    }
+
+    // returns size_type
+    size_t erase(const Key &key) {
+        uint32_t h = ConcurrentMapHashObject(key);
+        write_lock_guard_t lock(locks[h].lock);
+        return maps[h].erase(key);
+    }
+
+    bool contains(const Key &key) const {
+        uint32_t h = ConcurrentMapHashObject(key);
+        read_lock_guard_t lock(locks[h].lock);
+        return maps[h].count(key) != 0;
+    }
+
+    // type returned by find() and end().
+    class FindResult {
+      public:
+        FindResult(bool a, T b) : result(a, std::move(b)) {}
+
+        // == and != only support comparing against end()
+        bool operator==(const FindResult &other) const {
+            if (result.first == false && other.result.first == false) {
+                return true;
+            }
+            return false;
+        }
+        bool operator!=(const FindResult &other) const { return !(*this == other); }
+
+        // Make -> act kind of like an iterator.
+        std::pair<bool, T> *operator->() { return &result; }
+        const std::pair<bool, T> *operator->() const { return &result; }
+
+      private:
+        // (found, reference to element)
+        std::pair<bool, T> result;
+    };
+
+    // find()/end() return a FindResult containing a copy of the value. For end(),
+    // return a default value.
+    FindResult end() const { return FindResult(false, T()); }
+
+    FindResult find(const Key &key) const {
+        uint32_t h = ConcurrentMapHashObject(key);
+        read_lock_guard_t lock(locks[h].lock);
+
+        auto itr = maps[h].find(key);
+        bool found = itr != maps[h].end();
+
+        if (found) {
+            return FindResult(true, itr->second);
+        } else {
+            return end();
+        }
+    }
+
+    FindResult pop(const Key &key) {
+        uint32_t h = ConcurrentMapHashObject(key);
+        write_lock_guard_t lock(locks[h].lock);
+
+        auto itr = maps[h].find(key);
+        bool found = itr != maps[h].end();
+
+        if (found) {
+            auto ret = std::move(FindResult(true, itr->second));
+            maps[h].erase(itr);
+            return ret;
+        } else {
+            return end();
+        }
+    }
+
+    std::vector<std::pair<const Key, T>> snapshot(std::function<bool(T)> f = nullptr) const {
+        std::vector<std::pair<const Key, T>> ret;
+        for (int h = 0; h < BUCKETS; ++h) {
+            read_lock_guard_t lock(locks[h].lock);
+            for (auto j : maps[h]) {
+                if (!f || f(j.second)) {
+                    ret.push_back(j);
+                }
+            }
+        }
+        return ret;
+    }
+
+  private:
+    static const int BUCKETS = (1 << BUCKETSLOG2);
+
+    std::unordered_map<Key, T, Hash> maps[BUCKETS];
+    struct {
+        mutable ReadWriteLock lock;
+        // Put each lock on its own cache line to avoid false cache line sharing.
+        char padding[(-int(sizeof(ReadWriteLock))) & 63];
+    } locks[BUCKETS];
+
+    uint32_t ConcurrentMapHashObject(const Key &object) const {
+        uint64_t u64 = (uint64_t)(uintptr_t)object;
+        uint32_t hash = (uint32_t)(u64 >> 32) + (uint32_t)u64;
+        hash ^= (hash >> BUCKETSLOG2) ^ (hash >> (2 * BUCKETSLOG2));
+        hash &= (BUCKETS - 1);
+        return hash;
+    }
+};
diff --git a/src/third_party/vulkan-validation-layers/src/layers/vk_loader_platform.h b/src/third_party/vulkan-validation-layers/src/layers/vk_loader_platform.h
new file mode 100644
index 0000000..4162560
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/layers/vk_loader_platform.h
@@ -0,0 +1,367 @@
+/*
+ *
+ * Copyright (c) 2015-2018 The Khronos Group Inc.
+ * Copyright (c) 2015-2018 Valve Corporation
+ * Copyright (c) 2015-2018 LunarG, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Ian Elliot <ian@lunarg.com>
+ * Author: Jon Ashburn <jon@lunarg.com>
+ * Author: Lenny Komow <lenny@lunarg.com>
+ *
+ */
+#pragma once
+
+#if defined(_WIN32)
+// WinSock2.h must be included *BEFORE* windows.h
+#include <WinSock2.h>
+#endif  // _WIN32
+
+#include "vulkan/vk_platform.h"
+#include "vulkan/vk_sdk_platform.h"
+
+#if defined(__linux__) || defined(__APPLE__)
+/* Linux-specific common code: */
+
+// Headers:
+//#define _GNU_SOURCE 1
+// TBD: Are the contents of the following file used?
+#include <unistd.h>
+// Note: The following file is for dynamic loading:
+#include <dlfcn.h>
+#include <pthread.h>
+#include <assert.h>
+#include <string.h>
+#include <stdbool.h>
+#include <stdlib.h>
+#include <libgen.h>
+
+// VK Library Filenames, Paths, etc.:
+#define PATH_SEPARATOR ':'
+#define DIRECTORY_SYMBOL '/'
+
+#define VULKAN_DIR "/vulkan/"
+#define VULKAN_ICDCONF_DIR "icd.d"
+#define VULKAN_ICD_DIR "icd"
+#define VULKAN_ELAYERCONF_DIR "explicit_layer.d"
+#define VULKAN_ILAYERCONF_DIR "implicit_layer.d"
+#define VULKAN_LAYER_DIR "layer"
+
+#define DEFAULT_VK_DRIVERS_INFO ""
+#define DEFAULT_VK_ELAYERS_INFO ""
+#define DEFAULT_VK_ILAYERS_INFO ""
+
+#define DEFAULT_VK_DRIVERS_PATH ""
+#if !defined(DEFAULT_VK_LAYERS_PATH)
+#define DEFAULT_VK_LAYERS_PATH ""
+#endif
+
+#if !defined(LAYERS_SOURCE_PATH)
+#define LAYERS_SOURCE_PATH NULL
+#endif
+#define LAYERS_PATH_ENV "VK_LAYER_PATH"
+#define ENABLED_LAYERS_ENV "VK_INSTANCE_LAYERS"
+
+#define RELATIVE_VK_DRIVERS_INFO VULKAN_DIR VULKAN_ICDCONF_DIR
+#define RELATIVE_VK_ELAYERS_INFO VULKAN_DIR VULKAN_ELAYERCONF_DIR
+#define RELATIVE_VK_ILAYERS_INFO VULKAN_DIR VULKAN_ILAYERCONF_DIR
+
+// C99:
+#define PRINTF_SIZE_T_SPECIFIER "%zu"
+
+// File IO
+static inline bool loader_platform_file_exists(const char *path) {
+    if (access(path, F_OK))
+        return false;
+    else
+        return true;
+}
+
+static inline bool loader_platform_is_path_absolute(const char *path) {
+    if (path[0] == '/')
+        return true;
+    else
+        return false;
+}
+
+static inline char *loader_platform_dirname(char *path) { return dirname(path); }
+
+// Dynamic Loading of libraries:
+typedef void *loader_platform_dl_handle;
+static inline loader_platform_dl_handle loader_platform_open_library(const char *libPath) {
+    // When loading the library, we use RTLD_LAZY so that not all symbols have to be
+    // resolved at this time (which improves performance). Note that if not all symbols
+    // can be resolved, this could cause crashes later. Use the LD_BIND_NOW environment
+    // variable to force all symbols to be resolved here.
+    return dlopen(libPath, RTLD_LAZY | RTLD_LOCAL);
+}
+static inline const char *loader_platform_open_library_error(const char *libPath) { return dlerror(); }
+static inline void loader_platform_close_library(loader_platform_dl_handle library) { dlclose(library); }
+static inline void *loader_platform_get_proc_address(loader_platform_dl_handle library, const char *name) {
+    assert(library);
+    assert(name);
+    return dlsym(library, name);
+}
+static inline const char *loader_platform_get_proc_address_error(const char *name) { return dlerror(); }
+
+// Threads:
+typedef pthread_t loader_platform_thread;
+#define THREAD_LOCAL_DECL __thread
+
+// The once init functionality is not used on Linux
+#define LOADER_PLATFORM_THREAD_ONCE_DECLARATION(var)
+#define LOADER_PLATFORM_THREAD_ONCE_DEFINITION(var)
+#define LOADER_PLATFORM_THREAD_ONCE(ctl, func)
+
+// Thread IDs:
+typedef pthread_t loader_platform_thread_id;
+static inline loader_platform_thread_id loader_platform_get_thread_id() { return pthread_self(); }
+
+// Thread mutex:
+typedef pthread_mutex_t loader_platform_thread_mutex;
+static inline void loader_platform_thread_create_mutex(loader_platform_thread_mutex *pMutex) { pthread_mutex_init(pMutex, NULL); }
+static inline void loader_platform_thread_lock_mutex(loader_platform_thread_mutex *pMutex) { pthread_mutex_lock(pMutex); }
+static inline void loader_platform_thread_unlock_mutex(loader_platform_thread_mutex *pMutex) { pthread_mutex_unlock(pMutex); }
+static inline void loader_platform_thread_delete_mutex(loader_platform_thread_mutex *pMutex) { pthread_mutex_destroy(pMutex); }
+typedef pthread_cond_t loader_platform_thread_cond;
+static inline void loader_platform_thread_init_cond(loader_platform_thread_cond *pCond) { pthread_cond_init(pCond, NULL); }
+static inline void loader_platform_thread_cond_wait(loader_platform_thread_cond *pCond, loader_platform_thread_mutex *pMutex) {
+    pthread_cond_wait(pCond, pMutex);
+}
+static inline void loader_platform_thread_cond_broadcast(loader_platform_thread_cond *pCond) { pthread_cond_broadcast(pCond); }
+
+#define loader_stack_alloc(size) alloca(size)
+
+#elif defined(_WIN32)  // defined(__linux__)
+/* Windows-specific common code: */
+// WinBase.h defines CreateSemaphore and synchapi.h defines CreateEvent
+//  undefine them to avoid conflicts with VkLayerDispatchTable struct members.
+#ifdef CreateSemaphore
+#undef CreateSemaphore
+#endif
+#ifdef CreateEvent
+#undef CreateEvent
+#endif
+#include <assert.h>
+#include <stdio.h>
+#include <string.h>
+#include <io.h>
+#include <stdbool.h>
+#include <shlwapi.h>
+#ifdef __cplusplus
+#include <iostream>
+#include <string>
+#endif  // __cplusplus
+
+// VK Library Filenames, Paths, etc.:
+#define PATH_SEPARATOR ';'
+#define DIRECTORY_SYMBOL '\\'
+#define DEFAULT_VK_REGISTRY_HIVE HKEY_LOCAL_MACHINE
+#define DEFAULT_VK_REGISTRY_HIVE_STR "HKEY_LOCAL_MACHINE"
+#define SECONDARY_VK_REGISTRY_HIVE HKEY_CURRENT_USER
+#define SECONDARY_VK_REGISTRY_HIVE_STR "HKEY_CURRENT_USER"
+#define DEFAULT_VK_DRIVERS_INFO "SOFTWARE\\Khronos\\" API_NAME "\\Drivers"
+#define DEFAULT_VK_DRIVERS_PATH ""
+#define DEFAULT_VK_ELAYERS_INFO "SOFTWARE\\Khronos\\" API_NAME "\\ExplicitLayers"
+#define DEFAULT_VK_ILAYERS_INFO "SOFTWARE\\Khronos\\" API_NAME "\\ImplicitLayers"
+#if !defined(DEFAULT_VK_LAYERS_PATH)
+#define DEFAULT_VK_LAYERS_PATH ""
+#endif
+#if !defined(LAYERS_SOURCE_PATH)
+#define LAYERS_SOURCE_PATH NULL
+#endif
+#define LAYERS_PATH_ENV "VK_LAYER_PATH"
+#define ENABLED_LAYERS_ENV "VK_INSTANCE_LAYERS"
+#define RELATIVE_VK_DRIVERS_INFO ""
+#define RELATIVE_VK_ELAYERS_INFO ""
+#define RELATIVE_VK_ILAYERS_INFO ""
+#define PRINTF_SIZE_T_SPECIFIER "%Iu"
+
+#if defined(_WIN32)
+// Get the key for the plug n play driver registry
+// The string returned by this function should NOT be freed
+static inline const char *LoaderPnpDriverRegistry() {
+    BOOL is_wow;
+    IsWow64Process(GetCurrentProcess(), &is_wow);
+    return is_wow ? (API_NAME "DriverNameWow") : (API_NAME "DriverName");
+}
+
+// Get the key for the plug 'n play explicit layer registry
+// The string returned by this function should NOT be freed
+static inline const char *LoaderPnpELayerRegistry() {
+    BOOL is_wow;
+    IsWow64Process(GetCurrentProcess(), &is_wow);
+    return is_wow ? (API_NAME "ExplicitLayersWow") : (API_NAME "ExplicitLayers");
+}
+// Get the key for the plug 'n play implicit layer registry
+// The string returned by this function should NOT be freed
+
+static inline const char *LoaderPnpILayerRegistry() {
+    BOOL is_wow;
+    IsWow64Process(GetCurrentProcess(), &is_wow);
+    return is_wow ? (API_NAME "ImplicitLayersWow") : (API_NAME "ImplicitLayers");
+}
+#endif
+
+// File IO
+static bool loader_platform_file_exists(const char *path) {
+    if ((_access(path, 0)) == -1)
+        return false;
+    else
+        return true;
+}
+
+static bool loader_platform_is_path_absolute(const char *path) {
+    if (!path || !*path) {
+        return false;
+    }
+    if (*path == DIRECTORY_SYMBOL || path[1] == ':') {
+        return true;
+    }
+    return false;
+}
+
+// WIN32 runtime doesn't have dirname().
+static inline char *loader_platform_dirname(char *path) {
+    char *current, *next;
+
+    // TODO/TBD: Do we need to deal with the Windows's ":" character?
+
+    for (current = path; *current != '\0'; current = next) {
+        next = strchr(current, DIRECTORY_SYMBOL);
+        if (next == NULL) {
+            if (current != path) *(current - 1) = '\0';
+            return path;
+        } else {
+            // Point one character past the DIRECTORY_SYMBOL:
+            next++;
+        }
+    }
+    return path;
+}
+
+// WIN32 runtime doesn't have basename().
+// Microsoft also doesn't have basename().  Paths are different on Windows, and
+// so this is just a temporary solution in order to get us compiling, so that we
+// can test some scenarios, and develop the correct solution for Windows.
+// TODO: Develop a better, permanent solution for Windows, to replace this
+// temporary code:
+static char *loader_platform_basename(char *pathname) {
+    char *current, *next;
+
+    // TODO/TBD: Do we need to deal with the Windows's ":" character?
+
+    for (current = pathname; *current != '\0'; current = next) {
+        next = strchr(current, DIRECTORY_SYMBOL);
+        if (next == NULL) {
+            // No more DIRECTORY_SYMBOL's so return p:
+            return current;
+        } else {
+            // Point one character past the DIRECTORY_SYMBOL:
+            next++;
+        }
+    }
+    // We shouldn't get to here, but this makes the compiler happy:
+    return current;
+}
+
+// Dynamic Loading:
+typedef HMODULE loader_platform_dl_handle;
+static loader_platform_dl_handle loader_platform_open_library(const char *lib_path) {
+    // Try loading the library the original way first.
+    loader_platform_dl_handle lib_handle = LoadLibrary(lib_path);
+    if (lib_handle == NULL && GetLastError() == ERROR_MOD_NOT_FOUND) {
+        // If that failed, then try loading it with broader search folders.
+        lib_handle = LoadLibraryEx(lib_path, NULL, LOAD_LIBRARY_SEARCH_DEFAULT_DIRS | LOAD_LIBRARY_SEARCH_DLL_LOAD_DIR);
+    }
+    return lib_handle;
+}
+static char *loader_platform_open_library_error(const char *libPath) {
+    static char errorMsg[164];
+    (void)snprintf(errorMsg, 163, "Failed to open dynamic library \"%s\" with error %lu", libPath, GetLastError());
+    return errorMsg;
+}
+static void loader_platform_close_library(loader_platform_dl_handle library) { FreeLibrary(library); }
+static void *loader_platform_get_proc_address(loader_platform_dl_handle library, const char *name) {
+    assert(library);
+    assert(name);
+    return (void *)GetProcAddress(library, name);
+}
+static char *loader_platform_get_proc_address_error(const char *name) {
+    static char errorMsg[120];
+    (void)snprintf(errorMsg, 119, "Failed to find function \"%s\" in dynamic library", name);
+    return errorMsg;
+}
+
+// Threads:
+typedef HANDLE loader_platform_thread;
+#define THREAD_LOCAL_DECL __declspec(thread)
+
+// The once init functionality is not used when building a DLL on Windows. This is because there is no way to clean up the
+// resources allocated by anything allocated by once init. This isn't a problem for static libraries, but it is for dynamic
+// ones. When building a DLL, we use DllMain() instead to allow properly cleaning up resources.
+#if defined(LOADER_DYNAMIC_LIB)
+#define LOADER_PLATFORM_THREAD_ONCE_DECLARATION(var)
+#define LOADER_PLATFORM_THREAD_ONCE_DEFINITION(var)
+#define LOADER_PLATFORM_THREAD_ONCE(ctl, func)
+#else
+#define LOADER_PLATFORM_THREAD_ONCE_DECLARATION(var) INIT_ONCE var = INIT_ONCE_STATIC_INIT;
+#define LOADER_PLATFORM_THREAD_ONCE_DEFINITION(var) INIT_ONCE var;
+#define LOADER_PLATFORM_THREAD_ONCE(ctl, func) loader_platform_thread_once_fn(ctl, func)
+static BOOL CALLBACK InitFuncWrapper(PINIT_ONCE InitOnce, PVOID Parameter, PVOID *Context) {
+    void (*func)(void) = (void (*)(void))Parameter;
+    func();
+    return TRUE;
+}
+static void loader_platform_thread_once_fn(void *ctl, void (*func)(void)) {
+    assert(func != NULL);
+    assert(ctl != NULL);
+    InitOnceExecuteOnce((PINIT_ONCE)ctl, InitFuncWrapper, (void *)func, NULL);
+}
+#endif
+
+// Thread IDs:
+typedef DWORD loader_platform_thread_id;
+static loader_platform_thread_id loader_platform_get_thread_id() { return GetCurrentThreadId(); }
+
+// Thread mutex:
+typedef CRITICAL_SECTION loader_platform_thread_mutex;
+static void loader_platform_thread_create_mutex(loader_platform_thread_mutex *pMutex) { InitializeCriticalSection(pMutex); }
+static void loader_platform_thread_lock_mutex(loader_platform_thread_mutex *pMutex) { EnterCriticalSection(pMutex); }
+static void loader_platform_thread_unlock_mutex(loader_platform_thread_mutex *pMutex) { LeaveCriticalSection(pMutex); }
+static void loader_platform_thread_delete_mutex(loader_platform_thread_mutex *pMutex) { DeleteCriticalSection(pMutex); }
+typedef CONDITION_VARIABLE loader_platform_thread_cond;
+static void loader_platform_thread_init_cond(loader_platform_thread_cond *pCond) { InitializeConditionVariable(pCond); }
+static void loader_platform_thread_cond_wait(loader_platform_thread_cond *pCond, loader_platform_thread_mutex *pMutex) {
+    SleepConditionVariableCS(pCond, pMutex, INFINITE);
+}
+static void loader_platform_thread_cond_broadcast(loader_platform_thread_cond *pCond) { WakeAllConditionVariable(pCond); }
+
+#define loader_stack_alloc(size) _alloca(size)
+#else  // defined(_WIN32)
+
+#error The "loader_platform.h" file must be modified for this OS.
+
+// NOTE: In order to support another OS, an #elif needs to be added (above the
+// "#else // defined(_WIN32)") for that OS, and OS-specific versions of the
+// contents of this file must be created.
+
+// NOTE: Other OS-specific changes are also needed for this OS.  Search for
+// files with "WIN32" in it, as a quick way to find files that must be changed.
+
+#endif  // defined(_WIN32)
+
+// returns true if the given string appears to be a relative or absolute
+// path, as opposed to a bare filename.
+static inline bool loader_platform_is_path(const char *path) { return strchr(path, DIRECTORY_SYMBOL) != NULL; }
diff --git a/src/third_party/vulkan-validation-layers/src/layers/vk_mem_alloc.h b/src/third_party/vulkan-validation-layers/src/layers/vk_mem_alloc.h
new file mode 100644
index 0000000..2c13549
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/layers/vk_mem_alloc.h
@@ -0,0 +1,16813 @@
+//
+// Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a copy
+// of this software and associated documentation files (the "Software"), to deal
+// in the Software without restriction, including without limitation the rights
+// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+// copies of the Software, and to permit persons to whom the Software is
+// furnished to do so, subject to the following conditions:
+//
+// The above copyright notice and this permission notice shall be included in
+// all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL THE
+// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+// THE SOFTWARE.
+//
+
+// clang-format off
+//
+// Source: https://github.com/GPUOpen-LibrariesAndSDKs/VulkanMemoryAllocator
+// THIS FILE HAS BEEN CHANGED FROM THE ORIGINAL VERSION
+//
+// Change Log:
+//    3/27/19 - Make changes to suppress warnings from GCC
+//    4/18/19 - Make changes to suppress warnings from clang
+//    6/05/19 - Make changes to suppress warnings from clang 3.8.0
+//    6/05/19 - Make changes to suppress more warnings from GCC
+//    8/09/19 - Make changes to suppress dead code warnings (from upstream master branch)
+//
+
+#ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H
+#define AMD_VULKAN_MEMORY_ALLOCATOR_H
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+/** \mainpage Vulkan Memory Allocator
+
+<b>Version 2.2.0</b> (2018-12-13)
+
+Copyright (c) 2017-2018 Advanced Micro Devices, Inc. All rights reserved. \n
+License: MIT
+
+Documentation of all members: vk_mem_alloc.h
+
+\section main_table_of_contents Table of contents
+
+- <b>User guide</b>
+  - \subpage quick_start
+    - [Project setup](@ref quick_start_project_setup)
+    - [Initialization](@ref quick_start_initialization)
+    - [Resource allocation](@ref quick_start_resource_allocation)
+  - \subpage choosing_memory_type
+    - [Usage](@ref choosing_memory_type_usage)
+    - [Required and preferred flags](@ref choosing_memory_type_required_preferred_flags)
+    - [Explicit memory types](@ref choosing_memory_type_explicit_memory_types)
+    - [Custom memory pools](@ref choosing_memory_type_custom_memory_pools)
+  - \subpage memory_mapping
+    - [Mapping functions](@ref memory_mapping_mapping_functions)
+    - [Persistently mapped memory](@ref memory_mapping_persistently_mapped_memory)
+    - [Cache control](@ref memory_mapping_cache_control)
+    - [Finding out if memory is mappable](@ref memory_mapping_finding_if_memory_mappable)
+  - \subpage custom_memory_pools
+    - [Choosing memory type index](@ref custom_memory_pools_MemTypeIndex)
+    - [Linear allocation algorithm](@ref linear_algorithm)
+      - [Free-at-once](@ref linear_algorithm_free_at_once)
+      - [Stack](@ref linear_algorithm_stack)
+      - [Double stack](@ref linear_algorithm_double_stack)
+      - [Ring buffer](@ref linear_algorithm_ring_buffer)
+    - [Buddy allocation algorithm](@ref buddy_algorithm)
+  - \subpage defragmentation
+  	- [Defragmenting CPU memory](@ref defragmentation_cpu)
+  	- [Defragmenting GPU memory](@ref defragmentation_gpu)
+  	- [Additional notes](@ref defragmentation_additional_notes)
+  	- [Writing custom allocation algorithm](@ref defragmentation_custom_algorithm)
+  - \subpage lost_allocations
+  - \subpage statistics
+    - [Numeric statistics](@ref statistics_numeric_statistics)
+    - [JSON dump](@ref statistics_json_dump)
+  - \subpage allocation_annotation
+    - [Allocation user data](@ref allocation_user_data)
+    - [Allocation names](@ref allocation_names)
+  - \subpage debugging_memory_usage
+    - [Memory initialization](@ref debugging_memory_usage_initialization)
+    - [Margins](@ref debugging_memory_usage_margins)
+    - [Corruption detection](@ref debugging_memory_usage_corruption_detection)
+  - \subpage record_and_replay
+- \subpage usage_patterns
+  - [Simple patterns](@ref usage_patterns_simple)
+  - [Advanced patterns](@ref usage_patterns_advanced)
+- \subpage configuration
+  - [Pointers to Vulkan functions](@ref config_Vulkan_functions)
+  - [Custom host memory allocator](@ref custom_memory_allocator)
+  - [Device memory allocation callbacks](@ref allocation_callbacks)
+  - [Device heap memory limit](@ref heap_memory_limit)
+  - \subpage vk_khr_dedicated_allocation
+- \subpage general_considerations
+  - [Thread safety](@ref general_considerations_thread_safety)
+  - [Validation layer warnings](@ref general_considerations_validation_layer_warnings)
+  - [Allocation algorithm](@ref general_considerations_allocation_algorithm)
+  - [Features not supported](@ref general_considerations_features_not_supported)
+
+\section main_see_also See also
+
+- [Product page on GPUOpen](https://gpuopen.com/gaming-product/vulkan-memory-allocator/)
+- [Source repository on GitHub](https://github.com/GPUOpen-LibrariesAndSDKs/VulkanMemoryAllocator)
+
+
+
+
+\page quick_start Quick start
+
+\section quick_start_project_setup Project setup
+
+Vulkan Memory Allocator comes in form of a single header file.
+You don't need to build it as a separate library project.
+You can add this file directly to your project and submit it to code repository next to your other source files.
+
+"Single header" doesn't mean that everything is contained in C/C++ declarations,
+like it tends to be in case of inline functions or C++ templates.
+It means that implementation is bundled with interface in a single file and needs to be extracted using preprocessor macro.
+If you don't do it properly, you will get linker errors.
+
+To do it properly:
+
+-# Include "vk_mem_alloc.h" file in each CPP file where you want to use the library.
+   This includes declarations of all members of the library.
+-# In exacly one CPP file define following macro before this include.
+   It enables also internal definitions.
+
+\code
+#define VMA_IMPLEMENTATION
+#include "vk_mem_alloc.h"
+\endcode
+
+It may be a good idea to create dedicated CPP file just for this purpose.
+
+Note on language: This library is written in C++, but has C-compatible interface.
+Thus you can include and use vk_mem_alloc.h in C or C++ code, but full
+implementation with `VMA_IMPLEMENTATION` macro must be compiled as C++, NOT as C.
+
+Please note that this library includes header `<vulkan/vulkan.h>`, which in turn
+includes `<windows.h>` on Windows. If you need some specific macros defined
+before including these headers (like `WIN32_LEAN_AND_MEAN` or
+`WINVER` for Windows, `VK_USE_PLATFORM_WIN32_KHR` for Vulkan), you must define
+them before every `#include` of this library.
+
+
+\section quick_start_initialization Initialization
+
+At program startup:
+
+-# Initialize Vulkan to have `VkPhysicalDevice` and `VkDevice` object.
+-# Fill VmaAllocatorCreateInfo structure and create #VmaAllocator object by
+   calling vmaCreateAllocator().
+
+\code
+VmaAllocatorCreateInfo allocatorInfo = {};
+allocatorInfo.physicalDevice = physicalDevice;
+allocatorInfo.device = device;
+
+VmaAllocator allocator;
+vmaCreateAllocator(&allocatorInfo, &allocator);
+\endcode
+
+\section quick_start_resource_allocation Resource allocation
+
+When you want to create a buffer or image:
+
+-# Fill `VkBufferCreateInfo` / `VkImageCreateInfo` structure.
+-# Fill VmaAllocationCreateInfo structure.
+-# Call vmaCreateBuffer() / vmaCreateImage() to get `VkBuffer`/`VkImage` with memory
+   already allocated and bound to it.
+
+\code
+VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
+bufferInfo.size = 65536;
+bufferInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
+
+VmaAllocationCreateInfo allocInfo = {};
+allocInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
+
+VkBuffer buffer;
+VmaAllocation allocation;
+vmaCreateBuffer(allocator, &bufferInfo, &allocInfo, &buffer, &allocation, nullptr);
+\endcode
+
+Don't forget to destroy your objects when no longer needed:
+
+\code
+vmaDestroyBuffer(allocator, buffer, allocation);
+vmaDestroyAllocator(allocator);
+\endcode
+
+
+\page choosing_memory_type Choosing memory type
+
+Physical devices in Vulkan support various combinations of memory heaps and
+types. Help with choosing correct and optimal memory type for your specific
+resource is one of the key features of this library. You can use it by filling
+appropriate members of VmaAllocationCreateInfo structure, as described below.
+You can also combine multiple methods.
+
+-# If you just want to find memory type index that meets your requirements, you
+   can use function vmaFindMemoryTypeIndex().
+-# If you want to allocate a region of device memory without association with any
+   specific image or buffer, you can use function vmaAllocateMemory(). Usage of
+   this function is not recommended and usually not needed.
+-# If you already have a buffer or an image created, you want to allocate memory
+   for it and then you will bind it yourself, you can use function
+   vmaAllocateMemoryForBuffer(), vmaAllocateMemoryForImage().
+   For binding you should use functions: vmaBindBufferMemory(), vmaBindImageMemory().
+-# If you want to create a buffer or an image, allocate memory for it and bind
+   them together, all in one call, you can use function vmaCreateBuffer(),
+   vmaCreateImage(). This is the recommended way to use this library.
+
+When using 3. or 4., the library internally queries Vulkan for memory types
+supported for that buffer or image (function `vkGetBufferMemoryRequirements()`)
+and uses only one of these types.
+
+If no memory type can be found that meets all the requirements, these functions
+return `VK_ERROR_FEATURE_NOT_PRESENT`.
+
+You can leave VmaAllocationCreateInfo structure completely filled with zeros.
+It means no requirements are specified for memory type.
+It is valid, although not very useful.
+
+\section choosing_memory_type_usage Usage
+
+The easiest way to specify memory requirements is to fill member
+VmaAllocationCreateInfo::usage using one of the values of enum #VmaMemoryUsage.
+It defines high level, common usage types.
+For more details, see description of this enum.
+
+For example, if you want to create a uniform buffer that will be filled using
+transfer only once or infrequently and used for rendering every frame, you can
+do it using following code:
+
+\code
+VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
+bufferInfo.size = 65536;
+bufferInfo.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
+
+VmaAllocationCreateInfo allocInfo = {};
+allocInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
+
+VkBuffer buffer;
+VmaAllocation allocation;
+vmaCreateBuffer(allocator, &bufferInfo, &allocInfo, &buffer, &allocation, nullptr);
+\endcode
+
+\section choosing_memory_type_required_preferred_flags Required and preferred flags
+
+You can specify more detailed requirements by filling members
+VmaAllocationCreateInfo::requiredFlags and VmaAllocationCreateInfo::preferredFlags
+with a combination of bits from enum `VkMemoryPropertyFlags`. For example,
+if you want to create a buffer that will be persistently mapped on host (so it
+must be `HOST_VISIBLE`) and preferably will also be `HOST_COHERENT` and `HOST_CACHED`,
+use following code:
+
+\code
+VmaAllocationCreateInfo allocInfo = {};
+allocInfo.requiredFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
+allocInfo.preferredFlags = VK_MEMORY_PROPERTY_HOST_COHERENT_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
+allocInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
+
+VkBuffer buffer;
+VmaAllocation allocation;
+vmaCreateBuffer(allocator, &bufferInfo, &allocInfo, &buffer, &allocation, nullptr);
+\endcode
+
+A memory type is chosen that has all the required flags and as many preferred
+flags set as possible.
+
+If you use VmaAllocationCreateInfo::usage, it is just internally converted to
+a set of required and preferred flags.
+
+\section choosing_memory_type_explicit_memory_types Explicit memory types
+
+If you inspected memory types available on the physical device and you have
+a preference for memory types that you want to use, you can fill member
+VmaAllocationCreateInfo::memoryTypeBits. It is a bit mask, where each bit set
+means that a memory type with that index is allowed to be used for the
+allocation. Special value 0, just like `UINT32_MAX`, means there are no
+restrictions to memory type index.
+
+Please note that this member is NOT just a memory type index.
+Still you can use it to choose just one, specific memory type.
+For example, if you already determined that your buffer should be created in
+memory type 2, use following code:
+
+\code
+uint32_t memoryTypeIndex = 2;
+
+VmaAllocationCreateInfo allocInfo = {};
+allocInfo.memoryTypeBits = 1u << memoryTypeIndex;
+
+VkBuffer buffer;
+VmaAllocation allocation;
+vmaCreateBuffer(allocator, &bufferInfo, &allocInfo, &buffer, &allocation, nullptr);
+\endcode
+
+\section choosing_memory_type_custom_memory_pools Custom memory pools
+
+If you allocate from custom memory pool, all the ways of specifying memory
+requirements described above are not applicable and the aforementioned members
+of VmaAllocationCreateInfo structure are ignored. Memory type is selected
+explicitly when creating the pool and then used to make all the allocations from
+that pool. For further details, see \ref custom_memory_pools.
+
+
+\page memory_mapping Memory mapping
+
+To "map memory" in Vulkan means to obtain a CPU pointer to `VkDeviceMemory`,
+to be able to read from it or write to it in CPU code.
+Mapping is possible only of memory allocated from a memory type that has
+`VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT` flag.
+Functions `vkMapMemory()`, `vkUnmapMemory()` are designed for this purpose.
+You can use them directly with memory allocated by this library,
+but it is not recommended because of following issue:
+Mapping the same `VkDeviceMemory` block multiple times is illegal - only one mapping at a time is allowed.
+This includes mapping disjoint regions. Mapping is not reference-counted internally by Vulkan.
+Because of this, Vulkan Memory Allocator provides following facilities:
+
+\section memory_mapping_mapping_functions Mapping functions
+
+The library provides following functions for mapping of a specific #VmaAllocation: vmaMapMemory(), vmaUnmapMemory().
+They are safer and more convenient to use than standard Vulkan functions.
+You can map an allocation multiple times simultaneously - mapping is reference-counted internally.
+You can also map different allocations simultaneously regardless of whether they use the same `VkDeviceMemory` block.
+The way it's implemented is that the library always maps entire memory block, not just region of the allocation.
+For further details, see description of vmaMapMemory() function.
+Example:
+
+\code
+// Having these objects initialized:
+
+struct ConstantBuffer
+{
+    ...
+};
+ConstantBuffer constantBufferData;
+
+VmaAllocator allocator;
+VkBuffer constantBuffer;
+VmaAllocation constantBufferAllocation;
+
+// You can map and fill your buffer using following code:
+
+void* mappedData;
+vmaMapMemory(allocator, constantBufferAllocation, &mappedData);
+memcpy(mappedData, &constantBufferData, sizeof(constantBufferData));
+vmaUnmapMemory(allocator, constantBufferAllocation);
+\endcode
+
+When mapping, you may see a warning from Vulkan validation layer similar to this one:
+
+<i>Mapping an image with layout VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL can result in undefined behavior if this memory is used by the device. Only GENERAL or PREINITIALIZED should be used.</i>
+
+It happens because the library maps entire `VkDeviceMemory` block, where different
+types of images and buffers may end up together, especially on GPUs with unified memory like Intel.
+You can safely ignore it if you are sure you access only memory of the intended
+object that you wanted to map.
+
+
+\section memory_mapping_persistently_mapped_memory Persistently mapped memory
+
+Kepping your memory persistently mapped is generally OK in Vulkan.
+You don't need to unmap it before using its data on the GPU.
+The library provides a special feature designed for that:
+Allocations made with #VMA_ALLOCATION_CREATE_MAPPED_BIT flag set in
+VmaAllocationCreateInfo::flags stay mapped all the time,
+so you can just access CPU pointer to it any time
+without a need to call any "map" or "unmap" function.
+Example:
+
+\code
+VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
+bufCreateInfo.size = sizeof(ConstantBuffer);
+bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
+
+VmaAllocationCreateInfo allocCreateInfo = {};
+allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
+allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
+
+VkBuffer buf;
+VmaAllocation alloc;
+VmaAllocationInfo allocInfo;
+vmaCreateBuffer(allocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
+
+// Buffer is already mapped. You can access its memory.
+memcpy(allocInfo.pMappedData, &constantBufferData, sizeof(constantBufferData));
+\endcode
+
+There are some exceptions though, when you should consider mapping memory only for a short period of time:
+
+- When operating system is Windows 7 or 8.x (Windows 10 is not affected because it uses WDDM2),
+  device is discrete AMD GPU,
+  and memory type is the special 256 MiB pool of `DEVICE_LOCAL + HOST_VISIBLE` memory
+  (selected when you use #VMA_MEMORY_USAGE_CPU_TO_GPU),
+  then whenever a memory block allocated from this memory type stays mapped
+  for the time of any call to `vkQueueSubmit()` or `vkQueuePresentKHR()`, this
+  block is migrated by WDDM to system RAM, which degrades performance. It doesn't
+  matter if that particular memory block is actually used by the command buffer
+  being submitted.
+- On Mac/MoltenVK there is a known bug - [Issue #175](https://github.com/KhronosGroup/MoltenVK/issues/175)
+  which requires unmapping before GPU can see updated texture.
+- Keeping many large memory blocks mapped may impact performance or stability of some debugging tools.
+
+\section memory_mapping_cache_control Cache control
+  
+Memory in Vulkan doesn't need to be unmapped before using it on GPU,
+but unless a memory types has `VK_MEMORY_PROPERTY_HOST_COHERENT_BIT` flag set,
+you need to manually invalidate cache before reading of mapped pointer
+and flush cache after writing to mapped pointer.
+Vulkan provides following functions for this purpose `vkFlushMappedMemoryRanges()`,
+`vkInvalidateMappedMemoryRanges()`, but this library provides more convenient
+functions that refer to given allocation object: vmaFlushAllocation(),
+vmaInvalidateAllocation().
+
+Regions of memory specified for flush/invalidate must be aligned to
+`VkPhysicalDeviceLimits::nonCoherentAtomSize`. This is automatically ensured by the library.
+In any memory type that is `HOST_VISIBLE` but not `HOST_COHERENT`, all allocations
+within blocks are aligned to this value, so their offsets are always multiply of
+`nonCoherentAtomSize` and two different allocations never share same "line" of this size.
+
+Please note that memory allocated with #VMA_MEMORY_USAGE_CPU_ONLY is guaranteed to be `HOST_COHERENT`.
+
+Also, Windows drivers from all 3 PC GPU vendors (AMD, Intel, NVIDIA)
+currently provide `HOST_COHERENT` flag on all memory types that are
+`HOST_VISIBLE`, so on this platform you may not need to bother.
+
+\section memory_mapping_finding_if_memory_mappable Finding out if memory is mappable
+
+It may happen that your allocation ends up in memory that is `HOST_VISIBLE` (available for mapping)
+despite it wasn't explicitly requested.
+For example, application may work on integrated graphics with unified memory (like Intel) or
+allocation from video memory might have failed, so the library chose system memory as fallback.
+
+You can detect this case and map such allocation to access its memory on CPU directly,
+instead of launching a transfer operation.
+In order to do that: inspect `allocInfo.memoryType`, call vmaGetMemoryTypeProperties(),
+and look for `VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT` flag in properties of that memory type.
+
+\code
+VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
+bufCreateInfo.size = sizeof(ConstantBuffer);
+bufCreateInfo.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
+
+VmaAllocationCreateInfo allocCreateInfo = {};
+allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
+allocCreateInfo.preferredFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
+
+VkBuffer buf;
+VmaAllocation alloc;
+VmaAllocationInfo allocInfo;
+vmaCreateBuffer(allocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
+
+VkMemoryPropertyFlags memFlags;
+vmaGetMemoryTypeProperties(allocator, allocInfo.memoryType, &memFlags);
+if((memFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
+{
+    // Allocation ended up in mappable memory. You can map it and access it directly.
+    void* mappedData;
+    vmaMapMemory(allocator, alloc, &mappedData);
+    memcpy(mappedData, &constantBufferData, sizeof(constantBufferData));
+    vmaUnmapMemory(allocator, alloc);
+}
+else
+{
+    // Allocation ended up in non-mappable memory.
+    // You need to create CPU-side buffer in VMA_MEMORY_USAGE_CPU_ONLY and make a transfer.
+}
+\endcode
+
+You can even use #VMA_ALLOCATION_CREATE_MAPPED_BIT flag while creating allocations
+that are not necessarily `HOST_VISIBLE` (e.g. using #VMA_MEMORY_USAGE_GPU_ONLY).
+If the allocation ends up in memory type that is `HOST_VISIBLE`, it will be persistently mapped and you can use it directly.
+If not, the flag is just ignored.
+Example:
+
+\code
+VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
+bufCreateInfo.size = sizeof(ConstantBuffer);
+bufCreateInfo.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
+
+VmaAllocationCreateInfo allocCreateInfo = {};
+allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
+allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
+
+VkBuffer buf;
+VmaAllocation alloc;
+VmaAllocationInfo allocInfo;
+vmaCreateBuffer(allocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
+
+if(allocInfo.pUserData != nullptr)
+{
+    // Allocation ended up in mappable memory.
+    // It's persistently mapped. You can access it directly.
+    memcpy(allocInfo.pMappedData, &constantBufferData, sizeof(constantBufferData));
+}
+else
+{
+    // Allocation ended up in non-mappable memory.
+    // You need to create CPU-side buffer in VMA_MEMORY_USAGE_CPU_ONLY and make a transfer.
+}
+\endcode
+
+
+\page custom_memory_pools Custom memory pools
+
+A memory pool contains a number of `VkDeviceMemory` blocks.
+The library automatically creates and manages default pool for each memory type available on the device.
+Default memory pool automatically grows in size.
+Size of allocated blocks is also variable and managed automatically.
+
+You can create custom pool and allocate memory out of it.
+It can be useful if you want to:
+
+- Keep certain kind of allocations separate from others.
+- Enforce particular, fixed size of Vulkan memory blocks.
+- Limit maximum amount of Vulkan memory allocated for that pool.
+- Reserve minimum or fixed amount of Vulkan memory always preallocated for that pool.
+
+To use custom memory pools:
+
+-# Fill VmaPoolCreateInfo structure.
+-# Call vmaCreatePool() to obtain #VmaPool handle.
+-# When making an allocation, set VmaAllocationCreateInfo::pool to this handle.
+   You don't need to specify any other parameters of this structure, like `usage`.
+
+Example:
+
+\code
+// Create a pool that can have at most 2 blocks, 128 MiB each.
+VmaPoolCreateInfo poolCreateInfo = {};
+poolCreateInfo.memoryTypeIndex = ...
+poolCreateInfo.blockSize = 128ull * 1024 * 1024;
+poolCreateInfo.maxBlockCount = 2;
+
+VmaPool pool;
+vmaCreatePool(allocator, &poolCreateInfo, &pool);
+
+// Allocate a buffer out of it.
+VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
+bufCreateInfo.size = 1024;
+bufCreateInfo.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
+
+VmaAllocationCreateInfo allocCreateInfo = {};
+allocCreateInfo.pool = pool;
+
+VkBuffer buf;
+VmaAllocation alloc;
+VmaAllocationInfo allocInfo;
+vmaCreateBuffer(allocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
+\endcode
+
+You have to free all allocations made from this pool before destroying it.
+
+\code
+vmaDestroyBuffer(allocator, buf, alloc);
+vmaDestroyPool(allocator, pool);
+\endcode
+
+\section custom_memory_pools_MemTypeIndex Choosing memory type index
+
+When creating a pool, you must explicitly specify memory type index.
+To find the one suitable for your buffers or images, you can use helper functions
+vmaFindMemoryTypeIndexForBufferInfo(), vmaFindMemoryTypeIndexForImageInfo().
+You need to provide structures with example parameters of buffers or images
+that you are going to create in that pool.
+
+\code
+VkBufferCreateInfo exampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
+exampleBufCreateInfo.size = 1024; // Whatever.
+exampleBufCreateInfo.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT; // Change if needed.
+
+VmaAllocationCreateInfo allocCreateInfo = {};
+allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY; // Change if needed.
+
+uint32_t memTypeIndex;
+vmaFindMemoryTypeIndexForBufferInfo(allocator, &exampleBufCreateInfo, &allocCreateInfo, &memTypeIndex);
+
+VmaPoolCreateInfo poolCreateInfo = {};
+poolCreateInfo.memoryTypeIndex = memTypeIndex;
+// ...
+\endcode
+
+When creating buffers/images allocated in that pool, provide following parameters:
+
+- `VkBufferCreateInfo`: Prefer to pass same parameters as above.
+  Otherwise you risk creating resources in a memory type that is not suitable for them, which may result in undefined behavior.
+  Using different `VK_BUFFER_USAGE_` flags may work, but you shouldn't create images in a pool intended for buffers
+  or the other way around.
+- VmaAllocationCreateInfo: You don't need to pass same parameters. Fill only `pool` member.
+  Other members are ignored anyway.
+
+\section linear_algorithm Linear allocation algorithm
+
+Each Vulkan memory block managed by this library has accompanying metadata that
+keeps track of used and unused regions. By default, the metadata structure and
+algorithm tries to find best place for new allocations among free regions to
+optimize memory usage. This way you can allocate and free objects in any order.
+
+![Default allocation algorithm](../gfx/Linear_allocator_1_algo_default.png)
+
+Sometimes there is a need to use simpler, linear allocation algorithm. You can
+create custom pool that uses such algorithm by adding flag
+#VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT to VmaPoolCreateInfo::flags while creating
+#VmaPool object. Then an alternative metadata management is used. It always
+creates new allocations after last one and doesn't reuse free regions after
+allocations freed in the middle. It results in better allocation performance and
+less memory consumed by metadata.
+
+![Linear allocation algorithm](../gfx/Linear_allocator_2_algo_linear.png)
+
+With this one flag, you can create a custom pool that can be used in many ways:
+free-at-once, stack, double stack, and ring buffer. See below for details.
+
+\subsection linear_algorithm_free_at_once Free-at-once
+
+In a pool that uses linear algorithm, you still need to free all the allocations
+individually, e.g. by using vmaFreeMemory() or vmaDestroyBuffer(). You can free
+them in any order. New allocations are always made after last one - free space
+in the middle is not reused. However, when you release all the allocation and
+the pool becomes empty, allocation starts from the beginning again. This way you
+can use linear algorithm to speed up creation of allocations that you are going
+to release all at once.
+
+![Free-at-once](../gfx/Linear_allocator_3_free_at_once.png)
+
+This mode is also available for pools created with VmaPoolCreateInfo::maxBlockCount
+value that allows multiple memory blocks.
+
+\subsection linear_algorithm_stack Stack
+
+When you free an allocation that was created last, its space can be reused.
+Thanks to this, if you always release allocations in the order opposite to their
+creation (LIFO - Last In First Out), you can achieve behavior of a stack.
+
+![Stack](../gfx/Linear_allocator_4_stack.png)
+
+This mode is also available for pools created with VmaPoolCreateInfo::maxBlockCount
+value that allows multiple memory blocks.
+
+\subsection linear_algorithm_double_stack Double stack
+
+The space reserved by a custom pool with linear algorithm may be used by two
+stacks:
+
+- First, default one, growing up from offset 0.
+- Second, "upper" one, growing down from the end towards lower offsets.
+
+To make allocation from upper stack, add flag #VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT
+to VmaAllocationCreateInfo::flags.
+
+![Double stack](../gfx/Linear_allocator_7_double_stack.png)
+
+Double stack is available only in pools with one memory block -
+VmaPoolCreateInfo::maxBlockCount must be 1. Otherwise behavior is undefined.
+
+When the two stacks' ends meet so there is not enough space between them for a
+new allocation, such allocation fails with usual
+`VK_ERROR_OUT_OF_DEVICE_MEMORY` error.
+
+\subsection linear_algorithm_ring_buffer Ring buffer
+
+When you free some allocations from the beginning and there is not enough free space
+for a new one at the end of a pool, allocator's "cursor" wraps around to the
+beginning and starts allocation there. Thanks to this, if you always release
+allocations in the same order as you created them (FIFO - First In First Out),
+you can achieve behavior of a ring buffer / queue.
+
+![Ring buffer](../gfx/Linear_allocator_5_ring_buffer.png)
+
+Pools with linear algorithm support [lost allocations](@ref lost_allocations) when used as ring buffer.
+If there is not enough free space for a new allocation, but existing allocations
+from the front of the queue can become lost, they become lost and the allocation
+succeeds.
+
+![Ring buffer with lost allocations](../gfx/Linear_allocator_6_ring_buffer_lost.png)
+
+Ring buffer is available only in pools with one memory block -
+VmaPoolCreateInfo::maxBlockCount must be 1. Otherwise behavior is undefined.
+
+\section buddy_algorithm Buddy allocation algorithm
+
+There is another allocation algorithm that can be used with custom pools, called
+"buddy". Its internal data structure is based on a tree of blocks, each having
+size that is a power of two and a half of its parent's size. When you want to
+allocate memory of certain size, a free node in the tree is located. If it's too
+large, it is recursively split into two halves (called "buddies"). However, if
+requested allocation size is not a power of two, the size of a tree node is
+aligned up to the nearest power of two and the remaining space is wasted. When
+two buddy nodes become free, they are merged back into one larger node.
+
+![Buddy allocator](../gfx/Buddy_allocator.png)
+
+The advantage of buddy allocation algorithm over default algorithm is faster
+allocation and deallocation, as well as smaller external fragmentation. The
+disadvantage is more wasted space (internal fragmentation).
+
+For more information, please read ["Buddy memory allocation" on Wikipedia](https://en.wikipedia.org/wiki/Buddy_memory_allocation)
+or other sources that describe this concept in general.
+
+To use buddy allocation algorithm with a custom pool, add flag
+#VMA_POOL_CREATE_BUDDY_ALGORITHM_BIT to VmaPoolCreateInfo::flags while creating
+#VmaPool object.
+
+Several limitations apply to pools that use buddy algorithm:
+
+- It is recommended to use VmaPoolCreateInfo::blockSize that is a power of two.
+  Otherwise, only largest power of two smaller than the size is used for
+  allocations. The remaining space always stays unused.
+- [Margins](@ref debugging_memory_usage_margins) and
+  [corruption detection](@ref debugging_memory_usage_corruption_detection)
+  don't work in such pools.
+- [Lost allocations](@ref lost_allocations) don't work in such pools. You can
+  use them, but they never become lost. Support may be added in the future.
+- [Defragmentation](@ref defragmentation) doesn't work with allocations made from
+  such pool.
+
+\page defragmentation Defragmentation
+
+Interleaved allocations and deallocations of many objects of varying size can
+cause fragmentation over time, which can lead to a situation where the library is unable
+to find a continuous range of free memory for a new allocation despite there is
+enough free space, just scattered across many small free ranges between existing
+allocations.
+
+To mitigate this problem, you can use defragmentation feature:
+structure #VmaDefragmentationInfo2, function vmaDefragmentationBegin(), vmaDefragmentationEnd().
+Given set of allocations, 
+this function can move them to compact used memory, ensure more continuous free
+space and possibly also free some `VkDeviceMemory` blocks.
+
+What the defragmentation does is:
+
+- Updates #VmaAllocation objects to point to new `VkDeviceMemory` and offset.
+  After allocation has been moved, its VmaAllocationInfo::deviceMemory and/or
+  VmaAllocationInfo::offset changes. You must query them again using
+  vmaGetAllocationInfo() if you need them.
+- Moves actual data in memory.
+
+What it doesn't do, so you need to do it yourself:
+
+- Recreate buffers and images that were bound to allocations that were defragmented and
+  bind them with their new places in memory.
+  You must use `vkDestroyBuffer()`, `vkDestroyImage()`,
+  `vkCreateBuffer()`, `vkCreateImage()` for that purpose and NOT vmaDestroyBuffer(),
+  vmaDestroyImage(), vmaCreateBuffer(), vmaCreateImage(), because you don't need to
+  destroy or create allocation objects!
+- Recreate views and update descriptors that point to these buffers and images.
+
+\section defragmentation_cpu Defragmenting CPU memory
+
+Following example demonstrates how you can run defragmentation on CPU.
+Only allocations created in memory types that are `HOST_VISIBLE` can be defragmented.
+Others are ignored.
+
+The way it works is:
+
+- It temporarily maps entire memory blocks when necessary.
+- It moves data using `memmove()` function.
+
+\code
+// Given following variables already initialized:
+VkDevice device;
+VmaAllocator allocator;
+std::vector<VkBuffer> buffers;
+std::vector<VmaAllocation> allocations;
+
+
+const uint32_t allocCount = (uint32_t)allocations.size();
+std::vector<VkBool32> allocationsChanged(allocCount);
+
+VmaDefragmentationInfo2 defragInfo = {};
+defragInfo.allocationCount = allocCount;
+defragInfo.pAllocations = allocations.data();
+defragInfo.pAllocationsChanged = allocationsChanged.data();
+defragInfo.maxCpuBytesToMove = VK_WHOLE_SIZE; // No limit.
+defragInfo.maxCpuAllocationsToMove = UINT32_MAX; // No limit.
+
+VmaDefragmentationContext defragCtx;
+vmaDefragmentationBegin(allocator, &defragInfo, nullptr, &defragCtx);
+vmaDefragmentationEnd(allocator, defragCtx);
+
+for(uint32_t i = 0; i < allocCount; ++i)
+{
+    if(allocationsChanged[i])
+    {
+        // Destroy buffer that is immutably bound to memory region which is no longer valid.
+        vkDestroyBuffer(device, buffers[i], nullptr);
+
+        // Create new buffer with same parameters.
+        VkBufferCreateInfo bufferInfo = ...;
+        vkCreateBuffer(device, &bufferInfo, nullptr, &buffers[i]);
+            
+        // You can make dummy call to vkGetBufferMemoryRequirements here to silence validation layer warning.
+            
+        // Bind new buffer to new memory region. Data contained in it is already moved.
+        VmaAllocationInfo allocInfo;
+        vmaGetAllocationInfo(allocator, allocations[i], &allocInfo);
+        vkBindBufferMemory(device, buffers[i], allocInfo.deviceMemory, allocInfo.offset);
+    }
+}
+\endcode
+
+Setting VmaDefragmentationInfo2::pAllocationsChanged is optional.
+This output array tells whether particular allocation in VmaDefragmentationInfo2::pAllocations at the same index
+has been modified during defragmentation.
+You can pass null, but you then need to query every allocation passed to defragmentation
+for new parameters using vmaGetAllocationInfo() if you might need to recreate and rebind a buffer or image associated with it.
+
+If you use [Custom memory pools](@ref choosing_memory_type_custom_memory_pools),
+you can fill VmaDefragmentationInfo2::poolCount and VmaDefragmentationInfo2::pPools
+instead of VmaDefragmentationInfo2::allocationCount and VmaDefragmentationInfo2::pAllocations
+to defragment all allocations in given pools.
+You cannot use VmaDefragmentationInfo2::pAllocationsChanged in that case.
+You can also combine both methods.
+
+\section defragmentation_gpu Defragmenting GPU memory
+
+It is also possible to defragment allocations created in memory types that are not `HOST_VISIBLE`.
+To do that, you need to pass a command buffer that meets requirements as described in
+VmaDefragmentationInfo2::commandBuffer. The way it works is:
+
+- It creates temporary buffers and binds them to entire memory blocks when necessary.
+- It issues `vkCmdCopyBuffer()` to passed command buffer.
+
+Example:
+
+\code
+// Given following variables already initialized:
+VkDevice device;
+VmaAllocator allocator;
+VkCommandBuffer commandBuffer;
+std::vector<VkBuffer> buffers;
+std::vector<VmaAllocation> allocations;
+
+
+const uint32_t allocCount = (uint32_t)allocations.size();
+std::vector<VkBool32> allocationsChanged(allocCount);
+
+VkCommandBufferBeginInfo cmdBufBeginInfo = ...;
+vkBeginCommandBuffer(commandBuffer, &cmdBufBeginInfo);
+
+VmaDefragmentationInfo2 defragInfo = {};
+defragInfo.allocationCount = allocCount;
+defragInfo.pAllocations = allocations.data();
+defragInfo.pAllocationsChanged = allocationsChanged.data();
+defragInfo.maxGpuBytesToMove = VK_WHOLE_SIZE; // Notice it's "GPU" this time.
+defragInfo.maxGpuAllocationsToMove = UINT32_MAX; // Notice it's "GPU" this time.
+defragInfo.commandBuffer = commandBuffer;
+
+VmaDefragmentationContext defragCtx;
+vmaDefragmentationBegin(allocator, &defragInfo, nullptr, &defragCtx);
+
+vkEndCommandBuffer(commandBuffer);
+
+// Submit commandBuffer.
+// Wait for a fence that ensures commandBuffer execution finished.
+
+vmaDefragmentationEnd(allocator, defragCtx);
+
+for(uint32_t i = 0; i < allocCount; ++i)
+{
+    if(allocationsChanged[i])
+    {
+        // Destroy buffer that is immutably bound to memory region which is no longer valid.
+        vkDestroyBuffer(device, buffers[i], nullptr);
+
+        // Create new buffer with same parameters.
+        VkBufferCreateInfo bufferInfo = ...;
+        vkCreateBuffer(device, &bufferInfo, nullptr, &buffers[i]);
+            
+        // You can make dummy call to vkGetBufferMemoryRequirements here to silence validation layer warning.
+            
+        // Bind new buffer to new memory region. Data contained in it is already moved.
+        VmaAllocationInfo allocInfo;
+        vmaGetAllocationInfo(allocator, allocations[i], &allocInfo);
+        vkBindBufferMemory(device, buffers[i], allocInfo.deviceMemory, allocInfo.offset);
+    }
+}
+\endcode
+
+You can combine these two methods by specifying non-zero `maxGpu*` as well as `maxCpu*` parameters.
+The library automatically chooses best method to defragment each memory pool.
+
+You may try not to block your entire program to wait until defragmentation finishes,
+but do it in the background, as long as you carefully fullfill requirements described
+in function vmaDefragmentationBegin().
+
+\section defragmentation_additional_notes Additional notes
+
+While using defragmentation, you may experience validation layer warnings, which you just need to ignore.
+See [Validation layer warnings](@ref general_considerations_validation_layer_warnings).
+
+If you defragment allocations bound to images, these images should be created with
+`VK_IMAGE_CREATE_ALIAS_BIT` flag, to make sure that new image created with same
+parameters and pointing to data copied to another memory region will interpret
+its contents consistently. Otherwise you may experience corrupted data on some
+implementations, e.g. due to different pixel swizzling used internally by the graphics driver.
+
+If you defragment allocations bound to images, new images to be bound to new
+memory region after defragmentation should be created with `VK_IMAGE_LAYOUT_PREINITIALIZED`
+and then transitioned to their original layout from before defragmentation using
+an image memory barrier.
+
+Please don't expect memory to be fully compacted after defragmentation.
+Algorithms inside are based on some heuristics that try to maximize number of Vulkan
+memory blocks to make totally empty to release them, as well as to maximimze continuous
+empty space inside remaining blocks, while minimizing the number and size of allocations that
+need to be moved. Some fragmentation may still remain - this is normal.
+
+\section defragmentation_custom_algorithm Writing custom defragmentation algorithm
+
+If you want to implement your own, custom defragmentation algorithm,
+there is infrastructure prepared for that,
+but it is not exposed through the library API - you need to hack its source code.
+Here are steps needed to do this:
+
+-# Main thing you need to do is to define your own class derived from base abstract
+   class `VmaDefragmentationAlgorithm` and implement your version of its pure virtual methods.
+   See definition and comments of this class for details.
+-# Your code needs to interact with device memory block metadata.
+   If you need more access to its data than it's provided by its public interface,
+   declare your new class as a friend class e.g. in class `VmaBlockMetadata_Generic`.
+-# If you want to create a flag that would enable your algorithm or pass some additional
+   flags to configure it, add them to `VmaDefragmentationFlagBits` and use them in
+   VmaDefragmentationInfo2::flags.
+-# Modify function `VmaBlockVectorDefragmentationContext::Begin` to create object
+   of your new class whenever needed.
+
+
+\page lost_allocations Lost allocations
+
+If your game oversubscribes video memory, if may work OK in previous-generation
+graphics APIs (DirectX 9, 10, 11, OpenGL) because resources are automatically
+paged to system RAM. In Vulkan you can't do it because when you run out of
+memory, an allocation just fails. If you have more data (e.g. textures) that can
+fit into VRAM and you don't need it all at once, you may want to upload them to
+GPU on demand and "push out" ones that are not used for a long time to make room
+for the new ones, effectively using VRAM (or a cartain memory pool) as a form of
+cache. Vulkan Memory Allocator can help you with that by supporting a concept of
+"lost allocations".
+
+To create an allocation that can become lost, include #VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT
+flag in VmaAllocationCreateInfo::flags. Before using a buffer or image bound to
+such allocation in every new frame, you need to query it if it's not lost.
+To check it, call vmaTouchAllocation().
+If the allocation is lost, you should not use it or buffer/image bound to it.
+You mustn't forget to destroy this allocation and this buffer/image.
+vmaGetAllocationInfo() can also be used for checking status of the allocation.
+Allocation is lost when returned VmaAllocationInfo::deviceMemory == `VK_NULL_HANDLE`.
+
+To create an allocation that can make some other allocations lost to make room
+for it, use #VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT flag. You will
+usually use both flags #VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT and
+#VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT at the same time.
+
+Warning! Current implementation uses quite naive, brute force algorithm,
+which can make allocation calls that use #VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT
+flag quite slow. A new, more optimal algorithm and data structure to speed this
+up is planned for the future.
+
+<b>Q: When interleaving creation of new allocations with usage of existing ones,
+how do you make sure that an allocation won't become lost while it's used in the
+current frame?</b>
+
+It is ensured because vmaTouchAllocation() / vmaGetAllocationInfo() not only returns allocation
+status/parameters and checks whether it's not lost, but when it's not, it also
+atomically marks it as used in the current frame, which makes it impossible to
+become lost in that frame. It uses lockless algorithm, so it works fast and
+doesn't involve locking any internal mutex.
+
+<b>Q: What if my allocation may still be in use by the GPU when it's rendering a
+previous frame while I already submit new frame on the CPU?</b>
+
+You can make sure that allocations "touched" by vmaTouchAllocation() / vmaGetAllocationInfo() will not
+become lost for a number of additional frames back from the current one by
+specifying this number as VmaAllocatorCreateInfo::frameInUseCount (for default
+memory pool) and VmaPoolCreateInfo::frameInUseCount (for custom pool).
+
+<b>Q: How do you inform the library when new frame starts?</b>
+
+You need to call function vmaSetCurrentFrameIndex().
+
+Example code:
+
+\code
+struct MyBuffer
+{
+    VkBuffer m_Buf = nullptr;
+    VmaAllocation m_Alloc = nullptr;
+
+    // Called when the buffer is really needed in the current frame.
+    void EnsureBuffer();
+};
+
+void MyBuffer::EnsureBuffer()
+{
+    // Buffer has been created.
+    if(m_Buf != VK_NULL_HANDLE)
+    {
+        // Check if its allocation is not lost + mark it as used in current frame.
+        if(vmaTouchAllocation(allocator, m_Alloc))
+        {
+            // It's all OK - safe to use m_Buf.
+            return;
+        }
+    }
+
+    // Buffer not yet exists or lost - destroy and recreate it.
+
+    vmaDestroyBuffer(allocator, m_Buf, m_Alloc);
+
+    VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
+    bufCreateInfo.size = 1024;
+    bufCreateInfo.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
+
+    VmaAllocationCreateInfo allocCreateInfo = {};
+    allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
+    allocCreateInfo.flags = VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT |
+        VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT;
+
+    vmaCreateBuffer(allocator, &bufCreateInfo, &allocCreateInfo, &m_Buf, &m_Alloc, nullptr);
+}
+\endcode
+
+When using lost allocations, you may see some Vulkan validation layer warnings
+about overlapping regions of memory bound to different kinds of buffers and
+images. This is still valid as long as you implement proper handling of lost
+allocations (like in the example above) and don't use them.
+
+You can create an allocation that is already in lost state from the beginning using function
+vmaCreateLostAllocation(). It may be useful if you need a "dummy" allocation that is not null.
+
+You can call function vmaMakePoolAllocationsLost() to set all eligible allocations
+in a specified custom pool to lost state.
+Allocations that have been "touched" in current frame or VmaPoolCreateInfo::frameInUseCount frames back
+cannot become lost.
+
+<b>Q: Can I touch allocation that cannot become lost?</b>
+
+Yes, although it has no visible effect.
+Calls to vmaGetAllocationInfo() and vmaTouchAllocation() update last use frame index
+also for allocations that cannot become lost, but the only way to observe it is to dump
+internal allocator state using vmaBuildStatsString().
+You can use this feature for debugging purposes to explicitly mark allocations that you use
+in current frame and then analyze JSON dump to see for how long each allocation stays unused.
+
+
+\page statistics Statistics
+
+This library contains functions that return information about its internal state,
+especially the amount of memory allocated from Vulkan.
+Please keep in mind that these functions need to traverse all internal data structures
+to gather these information, so they may be quite time-consuming.
+Don't call them too often.
+
+\section statistics_numeric_statistics Numeric statistics
+
+You can query for overall statistics of the allocator using function vmaCalculateStats().
+Information are returned using structure #VmaStats.
+It contains #VmaStatInfo - number of allocated blocks, number of allocations
+(occupied ranges in these blocks), number of unused (free) ranges in these blocks,
+number of bytes used and unused (but still allocated from Vulkan) and other information.
+They are summed across memory heaps, memory types and total for whole allocator.
+
+You can query for statistics of a custom pool using function vmaGetPoolStats().
+Information are returned using structure #VmaPoolStats.
+
+You can query for information about specific allocation using function vmaGetAllocationInfo().
+It fill structure #VmaAllocationInfo.
+
+\section statistics_json_dump JSON dump
+
+You can dump internal state of the allocator to a string in JSON format using function vmaBuildStatsString().
+The result is guaranteed to be correct JSON.
+It uses ANSI encoding.
+Any strings provided by user (see [Allocation names](@ref allocation_names))
+are copied as-is and properly escaped for JSON, so if they use UTF-8, ISO-8859-2 or any other encoding,
+this JSON string can be treated as using this encoding.
+It must be freed using function vmaFreeStatsString().
+
+The format of this JSON string is not part of official documentation of the library,
+but it will not change in backward-incompatible way without increasing library major version number
+and appropriate mention in changelog.
+
+The JSON string contains all the data that can be obtained using vmaCalculateStats().
+It can also contain detailed map of allocated memory blocks and their regions -
+free and occupied by allocations.
+This allows e.g. to visualize the memory or assess fragmentation.
+
+
+\page allocation_annotation Allocation names and user data
+
+\section allocation_user_data Allocation user data
+
+You can annotate allocations with your own information, e.g. for debugging purposes.
+To do that, fill VmaAllocationCreateInfo::pUserData field when creating
+an allocation. It's an opaque `void*` pointer. You can use it e.g. as a pointer,
+some handle, index, key, ordinal number or any other value that would associate
+the allocation with your custom metadata.
+
+\code
+VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
+// Fill bufferInfo...
+
+MyBufferMetadata* pMetadata = CreateBufferMetadata();
+
+VmaAllocationCreateInfo allocCreateInfo = {};
+allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
+allocCreateInfo.pUserData = pMetadata;
+
+VkBuffer buffer;
+VmaAllocation allocation;
+vmaCreateBuffer(allocator, &bufferInfo, &allocCreateInfo, &buffer, &allocation, nullptr);
+\endcode
+
+The pointer may be later retrieved as VmaAllocationInfo::pUserData:
+
+\code
+VmaAllocationInfo allocInfo;
+vmaGetAllocationInfo(allocator, allocation, &allocInfo);
+MyBufferMetadata* pMetadata = (MyBufferMetadata*)allocInfo.pUserData;
+\endcode
+
+It can also be changed using function vmaSetAllocationUserData().
+
+Values of (non-zero) allocations' `pUserData` are printed in JSON report created by
+vmaBuildStatsString(), in hexadecimal form.
+
+\section allocation_names Allocation names
+
+There is alternative mode available where `pUserData` pointer is used to point to
+a null-terminated string, giving a name to the allocation. To use this mode,
+set #VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT flag in VmaAllocationCreateInfo::flags.
+Then `pUserData` passed as VmaAllocationCreateInfo::pUserData or argument to
+vmaSetAllocationUserData() must be either null or pointer to a null-terminated string.
+The library creates internal copy of the string, so the pointer you pass doesn't need
+to be valid for whole lifetime of the allocation. You can free it after the call.
+
+\code
+VkImageCreateInfo imageInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
+// Fill imageInfo...
+
+std::string imageName = "Texture: ";
+imageName += fileName;
+
+VmaAllocationCreateInfo allocCreateInfo = {};
+allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
+allocCreateInfo.flags = VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT;
+allocCreateInfo.pUserData = imageName.c_str();
+
+VkImage image;
+VmaAllocation allocation;
+vmaCreateImage(allocator, &imageInfo, &allocCreateInfo, &image, &allocation, nullptr);
+\endcode
+
+The value of `pUserData` pointer of the allocation will be different than the one
+you passed when setting allocation's name - pointing to a buffer managed
+internally that holds copy of the string.
+
+\code
+VmaAllocationInfo allocInfo;
+vmaGetAllocationInfo(allocator, allocation, &allocInfo);
+const char* imageName = (const char*)allocInfo.pUserData;
+printf("Image name: %s\n", imageName);
+\endcode
+
+That string is also printed in JSON report created by vmaBuildStatsString().
+
+
+\page debugging_memory_usage Debugging incorrect memory usage
+
+If you suspect a bug with memory usage, like usage of uninitialized memory or
+memory being overwritten out of bounds of an allocation,
+you can use debug features of this library to verify this.
+
+\section debugging_memory_usage_initialization Memory initialization
+
+If you experience a bug with incorrect and nondeterministic data in your program and you suspect uninitialized memory to be used,
+you can enable automatic memory initialization to verify this.
+To do it, define macro `VMA_DEBUG_INITIALIZE_ALLOCATIONS` to 1.
+
+\code
+#define VMA_DEBUG_INITIALIZE_ALLOCATIONS 1
+#include "vk_mem_alloc.h"
+\endcode
+
+It makes memory of all new allocations initialized to bit pattern `0xDCDCDCDC`.
+Before an allocation is destroyed, its memory is filled with bit pattern `0xEFEFEFEF`.
+Memory is automatically mapped and unmapped if necessary.
+
+If you find these values while debugging your program, good chances are that you incorrectly
+read Vulkan memory that is allocated but not initialized, or already freed, respectively.
+
+Memory initialization works only with memory types that are `HOST_VISIBLE`.
+It works also with dedicated allocations.
+It doesn't work with allocations created with #VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT flag,
+as they cannot be mapped.
+
+\section debugging_memory_usage_margins Margins
+
+By default, allocations are laid out in memory blocks next to each other if possible
+(considering required alignment, `bufferImageGranularity`, and `nonCoherentAtomSize`).
+
+![Allocations without margin](../gfx/Margins_1.png)
+
+Define macro `VMA_DEBUG_MARGIN` to some non-zero value (e.g. 16) to enforce specified
+number of bytes as a margin before and after every allocation.
+
+\code
+#define VMA_DEBUG_MARGIN 16
+#include "vk_mem_alloc.h"
+\endcode
+
+![Allocations with margin](../gfx/Margins_2.png)
+
+If your bug goes away after enabling margins, it means it may be caused by memory
+being overwritten outside of allocation boundaries. It is not 100% certain though.
+Change in application behavior may also be caused by different order and distribution
+of allocations across memory blocks after margins are applied.
+
+The margin is applied also before first and after last allocation in a block.
+It may occur only once between two adjacent allocations.
+
+Margins work with all types of memory.
+
+Margin is applied only to allocations made out of memory blocks and not to dedicated
+allocations, which have their own memory block of specific size.
+It is thus not applied to allocations made using #VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT flag
+or those automatically decided to put into dedicated allocations, e.g. due to its
+large size or recommended by VK_KHR_dedicated_allocation extension.
+Margins are also not active in custom pools created with #VMA_POOL_CREATE_BUDDY_ALGORITHM_BIT flag.
+
+Margins appear in [JSON dump](@ref statistics_json_dump) as part of free space.
+
+Note that enabling margins increases memory usage and fragmentation.
+
+\section debugging_memory_usage_corruption_detection Corruption detection
+
+You can additionally define macro `VMA_DEBUG_DETECT_CORRUPTION` to 1 to enable validation
+of contents of the margins.
+
+\code
+#define VMA_DEBUG_MARGIN 16
+#define VMA_DEBUG_DETECT_CORRUPTION 1
+#include "vk_mem_alloc.h"
+\endcode
+
+When this feature is enabled, number of bytes specified as `VMA_DEBUG_MARGIN`
+(it must be multiply of 4) before and after every allocation is filled with a magic number.
+This idea is also know as "canary".
+Memory is automatically mapped and unmapped if necessary.
+
+This number is validated automatically when the allocation is destroyed.
+If it's not equal to the expected value, `VMA_ASSERT()` is executed.
+It clearly means that either CPU or GPU overwritten the memory outside of boundaries of the allocation,
+which indicates a serious bug.
+
+You can also explicitly request checking margins of all allocations in all memory blocks
+that belong to specified memory types by using function vmaCheckCorruption(),
+or in memory blocks that belong to specified custom pool, by using function 
+vmaCheckPoolCorruption().
+
+Margin validation (corruption detection) works only for memory types that are
+`HOST_VISIBLE` and `HOST_COHERENT`.
+
+
+\page record_and_replay Record and replay
+
+\section record_and_replay_introduction Introduction
+
+While using the library, sequence of calls to its functions together with their
+parameters can be recorded to a file and later replayed using standalone player
+application. It can be useful to:
+
+- Test correctness - check if same sequence of calls will not cause crash or
+  failures on a target platform.
+- Gather statistics - see number of allocations, peak memory usage, number of
+  calls etc.
+- Benchmark performance - see how much time it takes to replay the whole
+  sequence.
+
+\section record_and_replay_usage Usage
+
+<b>To record sequence of calls to a file:</b> Fill in
+VmaAllocatorCreateInfo::pRecordSettings member while creating #VmaAllocator
+object. File is opened and written during whole lifetime of the allocator.
+
+<b>To replay file:</b> Use VmaReplay - standalone command-line program.
+Precompiled binary can be found in "bin" directory.
+Its source can be found in "src/VmaReplay" directory.
+Its project is generated by Premake.
+Command line syntax is printed when the program is launched without parameters.
+Basic usage:
+
+    VmaReplay.exe MyRecording.csv
+
+<b>Documentation of file format</b> can be found in file: "docs/Recording file format.md".
+It's a human-readable, text file in CSV format (Comma Separated Values).
+
+\section record_and_replay_additional_considerations Additional considerations
+
+- Replaying file that was recorded on a different GPU (with different parameters
+  like `bufferImageGranularity`, `nonCoherentAtomSize`, and especially different
+  set of memory heaps and types) may give different performance and memory usage
+  results, as well as issue some warnings and errors.
+- Current implementation of recording in VMA, as well as VmaReplay application, is
+  coded and tested only on Windows. Inclusion of recording code is driven by
+  `VMA_RECORDING_ENABLED` macro. Support for other platforms should be easy to
+  add. Contributions are welcomed.
+- Currently calls to vmaDefragment() function are not recorded.
+
+
+\page usage_patterns Recommended usage patterns
+
+See also slides from talk:
+[Sawicki, Adam. Advanced Graphics Techniques Tutorial: Memory management in Vulkan and DX12. Game Developers Conference, 2018](https://www.gdcvault.com/play/1025458/Advanced-Graphics-Techniques-Tutorial-New)
+
+
+\section usage_patterns_simple Simple patterns
+
+\subsection usage_patterns_simple_render_targets Render targets
+
+<b>When:</b>
+Any resources that you frequently write and read on GPU,
+e.g. images used as color attachments (aka "render targets"), depth-stencil attachments,
+images/buffers used as storage image/buffer (aka "Unordered Access View (UAV)").
+
+<b>What to do:</b>
+Create them in video memory that is fastest to access from GPU using
+#VMA_MEMORY_USAGE_GPU_ONLY.
+
+Consider using [VK_KHR_dedicated_allocation](@ref vk_khr_dedicated_allocation) extension
+and/or manually creating them as dedicated allocations using #VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT,
+especially if they are large or if you plan to destroy and recreate them e.g. when
+display resolution changes.
+Prefer to create such resources first and all other GPU resources (like textures and vertex buffers) later.
+
+\subsection usage_patterns_simple_immutable_resources Immutable resources
+
+<b>When:</b>
+Any resources that you fill on CPU only once (aka "immutable") or infrequently
+and then read frequently on GPU,
+e.g. textures, vertex and index buffers, constant buffers that don't change often.
+
+<b>What to do:</b>
+Create them in video memory that is fastest to access from GPU using
+#VMA_MEMORY_USAGE_GPU_ONLY.
+
+To initialize content of such resource, create a CPU-side (aka "staging") copy of it
+in system memory - #VMA_MEMORY_USAGE_CPU_ONLY, map it, fill it,
+and submit a transfer from it to the GPU resource.
+You can keep the staging copy if you need it for another upload transfer in the future.
+If you don't, you can destroy it or reuse this buffer for uploading different resource
+after the transfer finishes.
+
+Prefer to create just buffers in system memory rather than images, even for uploading textures.
+Use `vkCmdCopyBufferToImage()`.
+Dont use images with `VK_IMAGE_TILING_LINEAR`.
+
+\subsection usage_patterns_dynamic_resources Dynamic resources
+
+<b>When:</b>
+Any resources that change frequently (aka "dynamic"), e.g. every frame or every draw call,
+written on CPU, read on GPU.
+
+<b>What to do:</b>
+Create them using #VMA_MEMORY_USAGE_CPU_TO_GPU.
+You can map it and write to it directly on CPU, as well as read from it on GPU.
+
+This is a more complex situation. Different solutions are possible,
+and the best one depends on specific GPU type, but you can use this simple approach for the start.
+Prefer to write to such resource sequentially (e.g. using `memcpy`).
+Don't perform random access or any reads from it on CPU, as it may be very slow.
+
+\subsection usage_patterns_readback Readback
+
+<b>When:</b>
+Resources that contain data written by GPU that you want to read back on CPU,
+e.g. results of some computations.
+
+<b>What to do:</b>
+Create them using #VMA_MEMORY_USAGE_GPU_TO_CPU.
+You can write to them directly on GPU, as well as map and read them on CPU.
+
+\section usage_patterns_advanced Advanced patterns
+
+\subsection usage_patterns_integrated_graphics Detecting integrated graphics
+
+You can support integrated graphics (like Intel HD Graphics, AMD APU) better
+by detecting it in Vulkan.
+To do it, call `vkGetPhysicalDeviceProperties()`, inspect
+`VkPhysicalDeviceProperties::deviceType` and look for `VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU`.
+When you find it, you can assume that memory is unified and all memory types are comparably fast
+to access from GPU, regardless of `VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT`.
+
+You can then sum up sizes of all available memory heaps and treat them as useful for
+your GPU resources, instead of only `DEVICE_LOCAL` ones.
+You can also prefer to create your resources in memory types that are `HOST_VISIBLE` to map them
+directly instead of submitting explicit transfer (see below).
+
+\subsection usage_patterns_direct_vs_transfer Direct access versus transfer
+
+For resources that you frequently write on CPU and read on GPU, many solutions are possible:
+
+-# Create one copy in video memory using #VMA_MEMORY_USAGE_GPU_ONLY,
+   second copy in system memory using #VMA_MEMORY_USAGE_CPU_ONLY and submit explicit tranfer each time.
+-# Create just single copy using #VMA_MEMORY_USAGE_CPU_TO_GPU, map it and fill it on CPU,
+   read it directly on GPU.
+-# Create just single copy using #VMA_MEMORY_USAGE_CPU_ONLY, map it and fill it on CPU,
+   read it directly on GPU.
+
+Which solution is the most efficient depends on your resource and especially on the GPU.
+It is best to measure it and then make the decision.
+Some general recommendations:
+
+- On integrated graphics use (2) or (3) to avoid unnecesary time and memory overhead
+  related to using a second copy and making transfer.
+- For small resources (e.g. constant buffers) use (2).
+  Discrete AMD cards have special 256 MiB pool of video memory that is directly mappable.
+  Even if the resource ends up in system memory, its data may be cached on GPU after first
+  fetch over PCIe bus.
+- For larger resources (e.g. textures), decide between (1) and (2).
+  You may want to differentiate NVIDIA and AMD, e.g. by looking for memory type that is
+  both `DEVICE_LOCAL` and `HOST_VISIBLE`. When you find it, use (2), otherwise use (1).
+
+Similarly, for resources that you frequently write on GPU and read on CPU, multiple
+solutions are possible:
+
+-# Create one copy in video memory using #VMA_MEMORY_USAGE_GPU_ONLY,
+   second copy in system memory using #VMA_MEMORY_USAGE_GPU_TO_CPU and submit explicit tranfer each time.
+-# Create just single copy using #VMA_MEMORY_USAGE_GPU_TO_CPU, write to it directly on GPU,
+   map it and read it on CPU.
+
+You should take some measurements to decide which option is faster in case of your specific
+resource.
+
+If you don't want to specialize your code for specific types of GPUs, you can still make
+an simple optimization for cases when your resource ends up in mappable memory to use it
+directly in this case instead of creating CPU-side staging copy.
+For details see [Finding out if memory is mappable](@ref memory_mapping_finding_if_memory_mappable).
+
+
+\page configuration Configuration
+
+Please check "CONFIGURATION SECTION" in the code to find macros that you can define
+before each include of this file or change directly in this file to provide
+your own implementation of basic facilities like assert, `min()` and `max()` functions,
+mutex, atomic etc.
+The library uses its own implementation of containers by default, but you can switch to using
+STL containers instead.
+
+\section config_Vulkan_functions Pointers to Vulkan functions
+
+The library uses Vulkan functions straight from the `vulkan.h` header by default.
+If you want to provide your own pointers to these functions, e.g. fetched using
+`vkGetInstanceProcAddr()` and `vkGetDeviceProcAddr()`:
+
+-# Define `VMA_STATIC_VULKAN_FUNCTIONS 0`.
+-# Provide valid pointers through VmaAllocatorCreateInfo::pVulkanFunctions.
+
+\section custom_memory_allocator Custom host memory allocator
+
+If you use custom allocator for CPU memory rather than default operator `new`
+and `delete` from C++, you can make this library using your allocator as well
+by filling optional member VmaAllocatorCreateInfo::pAllocationCallbacks. These
+functions will be passed to Vulkan, as well as used by the library itself to
+make any CPU-side allocations.
+
+\section allocation_callbacks Device memory allocation callbacks
+
+The library makes calls to `vkAllocateMemory()` and `vkFreeMemory()` internally.
+You can setup callbacks to be informed about these calls, e.g. for the purpose
+of gathering some statistics. To do it, fill optional member
+VmaAllocatorCreateInfo::pDeviceMemoryCallbacks.
+
+\section heap_memory_limit Device heap memory limit
+
+If you want to test how your program behaves with limited amount of Vulkan device
+memory available without switching your graphics card to one that really has
+smaller VRAM, you can use a feature of this library intended for this purpose.
+To do it, fill optional member VmaAllocatorCreateInfo::pHeapSizeLimit.
+
+
+
+\page vk_khr_dedicated_allocation VK_KHR_dedicated_allocation
+
+VK_KHR_dedicated_allocation is a Vulkan extension which can be used to improve
+performance on some GPUs. It augments Vulkan API with possibility to query
+driver whether it prefers particular buffer or image to have its own, dedicated
+allocation (separate `VkDeviceMemory` block) for better efficiency - to be able
+to do some internal optimizations.
+
+The extension is supported by this library. It will be used automatically when
+enabled. To enable it:
+
+1 . When creating Vulkan device, check if following 2 device extensions are
+supported (call `vkEnumerateDeviceExtensionProperties()`).
+If yes, enable them (fill `VkDeviceCreateInfo::ppEnabledExtensionNames`).
+
+- VK_KHR_get_memory_requirements2
+- VK_KHR_dedicated_allocation
+
+If you enabled these extensions:
+
+2 . Use #VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT flag when creating
+your #VmaAllocator`to inform the library that you enabled required extensions
+and you want the library to use them.
+
+\code
+allocatorInfo.flags |= VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT;
+
+vmaCreateAllocator(&allocatorInfo, &allocator);
+\endcode
+
+That's all. The extension will be automatically used whenever you create a
+buffer using vmaCreateBuffer() or image using vmaCreateImage().
+
+When using the extension together with Vulkan Validation Layer, you will receive
+warnings like this:
+
+    vkBindBufferMemory(): Binding memory to buffer 0x33 but vkGetBufferMemoryRequirements() has not been called on that buffer.
+
+It is OK, you should just ignore it. It happens because you use function
+`vkGetBufferMemoryRequirements2KHR()` instead of standard
+`vkGetBufferMemoryRequirements()`, while the validation layer seems to be
+unaware of it.
+
+To learn more about this extension, see:
+
+- [VK_KHR_dedicated_allocation in Vulkan specification](https://www.khronos.org/registry/vulkan/specs/1.0-extensions/html/vkspec.html#VK_KHR_dedicated_allocation)
+- [VK_KHR_dedicated_allocation unofficial manual](http://asawicki.info/articles/VK_KHR_dedicated_allocation.php5)
+
+
+
+\page general_considerations General considerations
+
+\section general_considerations_thread_safety Thread safety
+
+- The library has no global state, so separate #VmaAllocator objects can be used
+  independently.
+  There should be no need to create multiple such objects though - one per `VkDevice` is enough.
+- By default, all calls to functions that take #VmaAllocator as first parameter
+  are safe to call from multiple threads simultaneously because they are
+  synchronized internally when needed.
+- When the allocator is created with #VMA_ALLOCATOR_CREATE_EXTERNALLY_SYNCHRONIZED_BIT
+  flag, calls to functions that take such #VmaAllocator object must be
+  synchronized externally.
+- Access to a #VmaAllocation object must be externally synchronized. For example,
+  you must not call vmaGetAllocationInfo() and vmaMapMemory() from different
+  threads at the same time if you pass the same #VmaAllocation object to these
+  functions.
+
+\section general_considerations_validation_layer_warnings Validation layer warnings
+
+When using this library, you can meet following types of warnings issued by
+Vulkan validation layer. They don't necessarily indicate a bug, so you may need
+to just ignore them.
+
+- *vkBindBufferMemory(): Binding memory to buffer 0xeb8e4 but vkGetBufferMemoryRequirements() has not been called on that buffer.*
+  - It happens when VK_KHR_dedicated_allocation extension is enabled.
+    `vkGetBufferMemoryRequirements2KHR` function is used instead, while validation layer seems to be unaware of it.
+- *Mapping an image with layout VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL can result in undefined behavior if this memory is used by the device. Only GENERAL or PREINITIALIZED should be used.*
+  - It happens when you map a buffer or image, because the library maps entire
+    `VkDeviceMemory` block, where different types of images and buffers may end
+    up together, especially on GPUs with unified memory like Intel.
+- *Non-linear image 0xebc91 is aliased with linear buffer 0xeb8e4 which may indicate a bug.*
+  - It happens when you use lost allocations, and a new image or buffer is
+    created in place of an existing object that bacame lost.
+  - It may happen also when you use [defragmentation](@ref defragmentation).
+
+\section general_considerations_allocation_algorithm Allocation algorithm
+
+The library uses following algorithm for allocation, in order:
+
+-# Try to find free range of memory in existing blocks.
+-# If failed, try to create a new block of `VkDeviceMemory`, with preferred block size.
+-# If failed, try to create such block with size/2, size/4, size/8.
+-# If failed and #VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT flag was
+   specified, try to find space in existing blocks, possilby making some other
+   allocations lost.
+-# If failed, try to allocate separate `VkDeviceMemory` for this allocation,
+   just like when you use #VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT.
+-# If failed, choose other memory type that meets the requirements specified in
+   VmaAllocationCreateInfo and go to point 1.
+-# If failed, return `VK_ERROR_OUT_OF_DEVICE_MEMORY`.
+
+\section general_considerations_features_not_supported Features not supported
+
+Features deliberately excluded from the scope of this library:
+
+- Data transfer. Uploading (straming) and downloading data of buffers and images
+  between CPU and GPU memory and related synchronization is responsibility of the user.
+- Allocations for imported/exported external memory. They tend to require
+  explicit memory type index and dedicated allocation anyway, so they don't
+  interact with main features of this library. Such special purpose allocations
+  should be made manually, using `vkCreateBuffer()` and `vkAllocateMemory()`.
+- Recreation of buffers and images. Although the library has functions for
+  buffer and image creation (vmaCreateBuffer(), vmaCreateImage()), you need to
+  recreate these objects yourself after defragmentation. That's because the big
+  structures `VkBufferCreateInfo`, `VkImageCreateInfo` are not stored in
+  #VmaAllocation object.
+- Handling CPU memory allocation failures. When dynamically creating small C++
+  objects in CPU memory (not Vulkan memory), allocation failures are not checked
+  and handled gracefully, because that would complicate code significantly and
+  is usually not needed in desktop PC applications anyway.
+- Code free of any compiler warnings. Maintaining the library to compile and
+  work correctly on so many different platforms is hard enough. Being free of 
+  any warnings, on any version of any compiler, is simply not feasible.
+- This is a C++ library with C interface.
+  Bindings or ports to any other programming languages are welcomed as external projects and
+  are not going to be included into this repository.
+
+*/
+
+/*
+Define this macro to 0/1 to disable/enable support for recording functionality,
+available through VmaAllocatorCreateInfo::pRecordSettings.
+*/
+#ifndef VMA_RECORDING_ENABLED
+    #ifdef _WIN32
+        #define VMA_RECORDING_ENABLED 1
+    #else
+        #define VMA_RECORDING_ENABLED 0
+    #endif
+#endif
+
+#ifndef NOMINMAX
+    #define NOMINMAX // For windows.h
+#endif
+
+#ifndef VULKAN_H_
+    #include <vulkan/vulkan.h>
+#endif
+
+#if VMA_RECORDING_ENABLED
+    #include <windows.h>
+#endif
+
+#if !defined(VMA_DEDICATED_ALLOCATION)
+    #if VK_KHR_get_memory_requirements2 && VK_KHR_dedicated_allocation
+        #define VMA_DEDICATED_ALLOCATION 1
+    #else
+        #define VMA_DEDICATED_ALLOCATION 0
+    #endif
+#endif
+
+/** \struct VmaAllocator
+\brief Represents main object of this library initialized.
+
+Fill structure #VmaAllocatorCreateInfo and call function vmaCreateAllocator() to create it.
+Call function vmaDestroyAllocator() to destroy it.
+
+It is recommended to create just one object of this type per `VkDevice` object,
+right after Vulkan is initialized and keep it alive until before Vulkan device is destroyed.
+*/
+VK_DEFINE_HANDLE(VmaAllocator)
+
+/// Callback function called after successful vkAllocateMemory.
+typedef void (VKAPI_PTR *PFN_vmaAllocateDeviceMemoryFunction)(
+    VmaAllocator      allocator,
+    uint32_t          memoryType,
+    VkDeviceMemory    memory,
+    VkDeviceSize      size);
+/// Callback function called before vkFreeMemory.
+typedef void (VKAPI_PTR *PFN_vmaFreeDeviceMemoryFunction)(
+    VmaAllocator      allocator,
+    uint32_t          memoryType,
+    VkDeviceMemory    memory,
+    VkDeviceSize      size);
+
+/** \brief Set of callbacks that the library will call for `vkAllocateMemory` and `vkFreeMemory`.
+
+Provided for informative purpose, e.g. to gather statistics about number of
+allocations or total amount of memory allocated in Vulkan.
+
+Used in VmaAllocatorCreateInfo::pDeviceMemoryCallbacks.
+*/
+typedef struct VmaDeviceMemoryCallbacks {
+    /// Optional, can be null.
+    PFN_vmaAllocateDeviceMemoryFunction pfnAllocate;
+    /// Optional, can be null.
+    PFN_vmaFreeDeviceMemoryFunction pfnFree;
+} VmaDeviceMemoryCallbacks;
+
+/// Flags for created #VmaAllocator.
+typedef enum VmaAllocatorCreateFlagBits {
+    /** \brief Allocator and all objects created from it will not be synchronized internally, so you must guarantee they are used from only one thread at a time or synchronized externally by you.
+
+    Using this flag may increase performance because internal mutexes are not used.
+    */
+    VMA_ALLOCATOR_CREATE_EXTERNALLY_SYNCHRONIZED_BIT = 0x00000001,
+    /** \brief Enables usage of VK_KHR_dedicated_allocation extension.
+
+    Using this extenion will automatically allocate dedicated blocks of memory for
+    some buffers and images instead of suballocating place for them out of bigger
+    memory blocks (as if you explicitly used #VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT
+    flag) when it is recommended by the driver. It may improve performance on some
+    GPUs.
+
+    You may set this flag only if you found out that following device extensions are
+    supported, you enabled them while creating Vulkan device passed as
+    VmaAllocatorCreateInfo::device, and you want them to be used internally by this
+    library:
+
+    - VK_KHR_get_memory_requirements2
+    - VK_KHR_dedicated_allocation
+
+When this flag is set, you can experience following warnings reported by Vulkan
+validation layer. You can ignore them.
+
+> vkBindBufferMemory(): Binding memory to buffer 0x2d but vkGetBufferMemoryRequirements() has not been called on that buffer.
+    */
+    VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT = 0x00000002,
+
+    VMA_ALLOCATOR_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VmaAllocatorCreateFlagBits;
+typedef VkFlags VmaAllocatorCreateFlags;
+
+/** \brief Pointers to some Vulkan functions - a subset used by the library.
+
+Used in VmaAllocatorCreateInfo::pVulkanFunctions.
+*/
+typedef struct VmaVulkanFunctions {
+    PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties;
+    PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties;
+    PFN_vkAllocateMemory vkAllocateMemory;
+    PFN_vkFreeMemory vkFreeMemory;
+    PFN_vkMapMemory vkMapMemory;
+    PFN_vkUnmapMemory vkUnmapMemory;
+    PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges;
+    PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges;
+    PFN_vkBindBufferMemory vkBindBufferMemory;
+    PFN_vkBindImageMemory vkBindImageMemory;
+    PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements;
+    PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements;
+    PFN_vkCreateBuffer vkCreateBuffer;
+    PFN_vkDestroyBuffer vkDestroyBuffer;
+    PFN_vkCreateImage vkCreateImage;
+    PFN_vkDestroyImage vkDestroyImage;
+    PFN_vkCmdCopyBuffer vkCmdCopyBuffer;
+#if VMA_DEDICATED_ALLOCATION
+    PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR;
+    PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR;
+#endif
+} VmaVulkanFunctions;
+
+/// Flags to be used in VmaRecordSettings::flags.
+typedef enum VmaRecordFlagBits {
+    /** \brief Enables flush after recording every function call.
+
+    Enable it if you expect your application to crash, which may leave recording file truncated.
+    It may degrade performance though.
+    */
+    VMA_RECORD_FLUSH_AFTER_CALL_BIT = 0x00000001,
+    
+    VMA_RECORD_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VmaRecordFlagBits;
+typedef VkFlags VmaRecordFlags;
+
+/// Parameters for recording calls to VMA functions. To be used in VmaAllocatorCreateInfo::pRecordSettings.
+typedef struct VmaRecordSettings
+{
+    /// Flags for recording. Use #VmaRecordFlagBits enum.
+    VmaRecordFlags flags;
+    /** \brief Path to the file that should be written by the recording.
+
+    Suggested extension: "csv".
+    If the file already exists, it will be overwritten.
+    It will be opened for the whole time #VmaAllocator object is alive.
+    If opening this file fails, creation of the whole allocator object fails.
+    */
+    const char* pFilePath;
+} VmaRecordSettings;
+
+/// Description of a Allocator to be created.
+typedef struct VmaAllocatorCreateInfo
+{
+    /// Flags for created allocator. Use #VmaAllocatorCreateFlagBits enum.
+    VmaAllocatorCreateFlags flags;
+    /// Vulkan physical device.
+    /** It must be valid throughout whole lifetime of created allocator. */
+    VkPhysicalDevice physicalDevice;
+    /// Vulkan device.
+    /** It must be valid throughout whole lifetime of created allocator. */
+    VkDevice device;
+    /// Preferred size of a single `VkDeviceMemory` block to be allocated from large heaps > 1 GiB. Optional.
+    /** Set to 0 to use default, which is currently 256 MiB. */
+    VkDeviceSize preferredLargeHeapBlockSize;
+    /// Custom CPU memory allocation callbacks. Optional.
+    /** Optional, can be null. When specified, will also be used for all CPU-side memory allocations. */
+    const VkAllocationCallbacks* pAllocationCallbacks;
+    /// Informative callbacks for `vkAllocateMemory`, `vkFreeMemory`. Optional.
+    /** Optional, can be null. */
+    const VmaDeviceMemoryCallbacks* pDeviceMemoryCallbacks;
+    /** \brief Maximum number of additional frames that are in use at the same time as current frame.
+
+    This value is used only when you make allocations with
+    VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT flag. Such allocation cannot become
+    lost if allocation.lastUseFrameIndex >= allocator.currentFrameIndex - frameInUseCount.
+
+    For example, if you double-buffer your command buffers, so resources used for
+    rendering in previous frame may still be in use by the GPU at the moment you
+    allocate resources needed for the current frame, set this value to 1.
+
+    If you want to allow any allocations other than used in the current frame to
+    become lost, set this value to 0.
+    */
+    uint32_t frameInUseCount;
+    /** \brief Either null or a pointer to an array of limits on maximum number of bytes that can be allocated out of particular Vulkan memory heap.
+
+    If not NULL, it must be a pointer to an array of
+    `VkPhysicalDeviceMemoryProperties::memoryHeapCount` elements, defining limit on
+    maximum number of bytes that can be allocated out of particular Vulkan memory
+    heap.
+
+    Any of the elements may be equal to `VK_WHOLE_SIZE`, which means no limit on that
+    heap. This is also the default in case of `pHeapSizeLimit` = NULL.
+
+    If there is a limit defined for a heap:
+
+    - If user tries to allocate more memory from that heap using this allocator,
+      the allocation fails with `VK_ERROR_OUT_OF_DEVICE_MEMORY`.
+    - If the limit is smaller than heap size reported in `VkMemoryHeap::size`, the
+      value of this limit will be reported instead when using vmaGetMemoryProperties().
+
+    Warning! Using this feature may not be equivalent to installing a GPU with
+    smaller amount of memory, because graphics driver doesn't necessary fail new
+    allocations with `VK_ERROR_OUT_OF_DEVICE_MEMORY` result when memory capacity is
+    exceeded. It may return success and just silently migrate some device memory
+    blocks to system RAM. This driver behavior can also be controlled using
+    VK_AMD_memory_overallocation_behavior extension.
+    */
+    const VkDeviceSize* pHeapSizeLimit;
+    /** \brief Pointers to Vulkan functions. Can be null if you leave define `VMA_STATIC_VULKAN_FUNCTIONS 1`.
+
+    If you leave define `VMA_STATIC_VULKAN_FUNCTIONS 1` in configuration section,
+    you can pass null as this member, because the library will fetch pointers to
+    Vulkan functions internally in a static way, like:
+
+        vulkanFunctions.vkAllocateMemory = &vkAllocateMemory;
+
+    Fill this member if you want to provide your own pointers to Vulkan functions,
+    e.g. fetched using `vkGetInstanceProcAddr()` and `vkGetDeviceProcAddr()`.
+    */
+    const VmaVulkanFunctions* pVulkanFunctions;
+    /** \brief Parameters for recording of VMA calls. Can be null.
+
+    If not null, it enables recording of calls to VMA functions to a file.
+    If support for recording is not enabled using `VMA_RECORDING_ENABLED` macro,
+    creation of the allocator object fails with `VK_ERROR_FEATURE_NOT_PRESENT`.
+    */
+    const VmaRecordSettings* pRecordSettings;
+} VmaAllocatorCreateInfo;
+
+/// Creates Allocator object.
+VkResult vmaCreateAllocator(
+    const VmaAllocatorCreateInfo* pCreateInfo,
+    VmaAllocator* pAllocator);
+
+/// Destroys allocator object.
+void vmaDestroyAllocator(
+    VmaAllocator allocator);
+
+/**
+PhysicalDeviceProperties are fetched from physicalDevice by the allocator.
+You can access it here, without fetching it again on your own.
+*/
+void vmaGetPhysicalDeviceProperties(
+    VmaAllocator allocator,
+    const VkPhysicalDeviceProperties** ppPhysicalDeviceProperties);
+
+/**
+PhysicalDeviceMemoryProperties are fetched from physicalDevice by the allocator.
+You can access it here, without fetching it again on your own.
+*/
+void vmaGetMemoryProperties(
+    VmaAllocator allocator,
+    const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties);
+
+/**
+\brief Given Memory Type Index, returns Property Flags of this memory type.
+
+This is just a convenience function. Same information can be obtained using
+vmaGetMemoryProperties().
+*/
+void vmaGetMemoryTypeProperties(
+    VmaAllocator allocator,
+    uint32_t memoryTypeIndex,
+    VkMemoryPropertyFlags* pFlags);
+
+/** \brief Sets index of the current frame.
+
+This function must be used if you make allocations with
+#VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT and
+#VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT flags to inform the allocator
+when a new frame begins. Allocations queried using vmaGetAllocationInfo() cannot
+become lost in the current frame.
+*/
+void vmaSetCurrentFrameIndex(
+    VmaAllocator allocator,
+    uint32_t frameIndex);
+
+/** \brief Calculated statistics of memory usage in entire allocator.
+*/
+typedef struct VmaStatInfo
+{
+    /// Number of `VkDeviceMemory` Vulkan memory blocks allocated.
+    uint32_t blockCount;
+    /// Number of #VmaAllocation allocation objects allocated.
+    uint32_t allocationCount;
+    /// Number of free ranges of memory between allocations.
+    uint32_t unusedRangeCount;
+    /// Total number of bytes occupied by all allocations.
+    VkDeviceSize usedBytes;
+    /// Total number of bytes occupied by unused ranges.
+    VkDeviceSize unusedBytes;
+    VkDeviceSize allocationSizeMin, allocationSizeAvg, allocationSizeMax;
+    VkDeviceSize unusedRangeSizeMin, unusedRangeSizeAvg, unusedRangeSizeMax;
+} VmaStatInfo;
+
+/// General statistics from current state of Allocator.
+typedef struct VmaStats
+{
+    VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES];
+    VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS];
+    VmaStatInfo total;
+} VmaStats;
+
+/// Retrieves statistics from current state of the Allocator.
+void vmaCalculateStats(
+    VmaAllocator allocator,
+    VmaStats* pStats);
+
+#define VMA_STATS_STRING_ENABLED 1
+
+#if VMA_STATS_STRING_ENABLED
+
+/// Builds and returns statistics as string in JSON format.
+/** @param[out] ppStatsString Must be freed using vmaFreeStatsString() function.
+*/
+void vmaBuildStatsString(
+    VmaAllocator allocator,
+    char** ppStatsString,
+    VkBool32 detailedMap);
+
+void vmaFreeStatsString(
+    VmaAllocator allocator,
+    char* pStatsString);
+
+#endif // #if VMA_STATS_STRING_ENABLED
+
+/** \struct VmaPool
+\brief Represents custom memory pool
+
+Fill structure VmaPoolCreateInfo and call function vmaCreatePool() to create it.
+Call function vmaDestroyPool() to destroy it.
+
+For more information see [Custom memory pools](@ref choosing_memory_type_custom_memory_pools).
+*/
+VK_DEFINE_HANDLE(VmaPool)
+
+typedef enum VmaMemoryUsage
+{
+    /** No intended memory usage specified.
+    Use other members of VmaAllocationCreateInfo to specify your requirements.
+    */
+    VMA_MEMORY_USAGE_UNKNOWN = 0,
+    /** Memory will be used on device only, so fast access from the device is preferred.
+    It usually means device-local GPU (video) memory.
+    No need to be mappable on host.
+    It is roughly equivalent of `D3D12_HEAP_TYPE_DEFAULT`.
+
+    Usage:
+    
+    - Resources written and read by device, e.g. images used as attachments.
+    - Resources transferred from host once (immutable) or infrequently and read by
+      device multiple times, e.g. textures to be sampled, vertex buffers, uniform
+      (constant) buffers, and majority of other types of resources used on GPU.
+
+    Allocation may still end up in `HOST_VISIBLE` memory on some implementations.
+    In such case, you are free to map it.
+    You can use #VMA_ALLOCATION_CREATE_MAPPED_BIT with this usage type.
+    */
+    VMA_MEMORY_USAGE_GPU_ONLY = 1,
+    /** Memory will be mappable on host.
+    It usually means CPU (system) memory.
+    Guarantees to be `HOST_VISIBLE` and `HOST_COHERENT`.
+    CPU access is typically uncached. Writes may be write-combined.
+    Resources created in this pool may still be accessible to the device, but access to them can be slow.
+    It is roughly equivalent of `D3D12_HEAP_TYPE_UPLOAD`.
+
+    Usage: Staging copy of resources used as transfer source.
+    */
+    VMA_MEMORY_USAGE_CPU_ONLY = 2,
+    /**
+    Memory that is both mappable on host (guarantees to be `HOST_VISIBLE`) and preferably fast to access by GPU.
+    CPU access is typically uncached. Writes may be write-combined.
+
+    Usage: Resources written frequently by host (dynamic), read by device. E.g. textures, vertex buffers, uniform buffers updated every frame or every draw call.
+    */
+    VMA_MEMORY_USAGE_CPU_TO_GPU = 3,
+    /** Memory mappable on host (guarantees to be `HOST_VISIBLE`) and cached.
+    It is roughly equivalent of `D3D12_HEAP_TYPE_READBACK`.
+
+    Usage:
+
+    - Resources written by device, read by host - results of some computations, e.g. screen capture, average scene luminance for HDR tone mapping.
+    - Any resources read or accessed randomly on host, e.g. CPU-side copy of vertex buffer used as source of transfer, but also used for collision detection.
+    */
+    VMA_MEMORY_USAGE_GPU_TO_CPU = 4,
+    VMA_MEMORY_USAGE_MAX_ENUM = 0x7FFFFFFF
+} VmaMemoryUsage;
+
+/// Flags to be passed as VmaAllocationCreateInfo::flags.
+typedef enum VmaAllocationCreateFlagBits {
+    /** \brief Set this flag if the allocation should have its own memory block.
+    
+    Use it for special, big resources, like fullscreen images used as attachments.
+   
+    This flag must also be used for host visible resources that you want to map
+    simultaneously because otherwise they might end up as regions of the same
+    `VkDeviceMemory`, while mapping same `VkDeviceMemory` multiple times
+    simultaneously is illegal.
+
+    You should not use this flag if VmaAllocationCreateInfo::pool is not null.
+    */
+    VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT = 0x00000001,
+
+    /** \brief Set this flag to only try to allocate from existing `VkDeviceMemory` blocks and never create new such block.
+    
+    If new allocation cannot be placed in any of the existing blocks, allocation
+    fails with `VK_ERROR_OUT_OF_DEVICE_MEMORY` error.
+    
+    You should not use #VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT and
+    #VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT at the same time. It makes no sense.
+    
+    If VmaAllocationCreateInfo::pool is not null, this flag is implied and ignored. */
+    VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT = 0x00000002,
+    /** \brief Set this flag to use a memory that will be persistently mapped and retrieve pointer to it.
+    
+    Pointer to mapped memory will be returned through VmaAllocationInfo::pMappedData.
+
+    Is it valid to use this flag for allocation made from memory type that is not
+    `HOST_VISIBLE`. This flag is then ignored and memory is not mapped. This is
+    useful if you need an allocation that is efficient to use on GPU
+    (`DEVICE_LOCAL`) and still want to map it directly if possible on platforms that
+    support it (e.g. Intel GPU).
+
+    You should not use this flag together with #VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT.
+    */
+    VMA_ALLOCATION_CREATE_MAPPED_BIT = 0x00000004,
+    /** Allocation created with this flag can become lost as a result of another
+    allocation with #VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT flag, so you
+    must check it before use.
+
+    To check if allocation is not lost, call vmaGetAllocationInfo() and check if
+    VmaAllocationInfo::deviceMemory is not `VK_NULL_HANDLE`.
+
+    For details about supporting lost allocations, see Lost Allocations
+    chapter of User Guide on Main Page.
+
+    You should not use this flag together with #VMA_ALLOCATION_CREATE_MAPPED_BIT.
+    */
+    VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT = 0x00000008,
+    /** While creating allocation using this flag, other allocations that were
+    created with flag #VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT can become lost.
+
+    For details about supporting lost allocations, see Lost Allocations
+    chapter of User Guide on Main Page.
+    */
+    VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT = 0x00000010,
+    /** Set this flag to treat VmaAllocationCreateInfo::pUserData as pointer to a
+    null-terminated string. Instead of copying pointer value, a local copy of the
+    string is made and stored in allocation's `pUserData`. The string is automatically
+    freed together with the allocation. It is also used in vmaBuildStatsString().
+    */
+    VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT = 0x00000020,
+    /** Allocation will be created from upper stack in a double stack pool.
+
+    This flag is only allowed for custom pools created with #VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT flag.
+    */
+    VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT = 0x00000040,
+
+    /** Allocation strategy that chooses smallest possible free range for the
+    allocation.
+    */
+    VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT  = 0x00010000,
+    /** Allocation strategy that chooses biggest possible free range for the
+    allocation.
+    */
+    VMA_ALLOCATION_CREATE_STRATEGY_WORST_FIT_BIT = 0x00020000,
+    /** Allocation strategy that chooses first suitable free range for the
+    allocation.
+
+    "First" doesn't necessarily means the one with smallest offset in memory,
+    but rather the one that is easiest and fastest to find.
+    */
+    VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT = 0x00040000,
+
+    /** Allocation strategy that tries to minimize memory usage.
+    */
+    VMA_ALLOCATION_CREATE_STRATEGY_MIN_MEMORY_BIT = VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT,
+    /** Allocation strategy that tries to minimize allocation time.
+    */
+    VMA_ALLOCATION_CREATE_STRATEGY_MIN_TIME_BIT = VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT,
+    /** Allocation strategy that tries to minimize memory fragmentation.
+    */
+    VMA_ALLOCATION_CREATE_STRATEGY_MIN_FRAGMENTATION_BIT = VMA_ALLOCATION_CREATE_STRATEGY_WORST_FIT_BIT,
+
+    /** A bit mask to extract only `STRATEGY` bits from entire set of flags.
+    */
+    VMA_ALLOCATION_CREATE_STRATEGY_MASK =
+        VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT |
+        VMA_ALLOCATION_CREATE_STRATEGY_WORST_FIT_BIT |
+        VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT,
+
+    VMA_ALLOCATION_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VmaAllocationCreateFlagBits;
+typedef VkFlags VmaAllocationCreateFlags;
+
+typedef struct VmaAllocationCreateInfo
+{
+    /// Use #VmaAllocationCreateFlagBits enum.
+    VmaAllocationCreateFlags flags;
+    /** \brief Intended usage of memory.
+    
+    You can leave #VMA_MEMORY_USAGE_UNKNOWN if you specify memory requirements in other way. \n
+    If `pool` is not null, this member is ignored.
+    */
+    VmaMemoryUsage usage;
+    /** \brief Flags that must be set in a Memory Type chosen for an allocation.
+    
+    Leave 0 if you specify memory requirements in other way. \n
+    If `pool` is not null, this member is ignored.*/
+    VkMemoryPropertyFlags requiredFlags;
+    /** \brief Flags that preferably should be set in a memory type chosen for an allocation.
+    
+    Set to 0 if no additional flags are prefered. \n
+    If `pool` is not null, this member is ignored. */
+    VkMemoryPropertyFlags preferredFlags;
+    /** \brief Bitmask containing one bit set for every memory type acceptable for this allocation.
+
+    Value 0 is equivalent to `UINT32_MAX` - it means any memory type is accepted if
+    it meets other requirements specified by this structure, with no further
+    restrictions on memory type index. \n
+    If `pool` is not null, this member is ignored.
+    */
+    uint32_t memoryTypeBits;
+    /** \brief Pool that this allocation should be created in.
+
+    Leave `VK_NULL_HANDLE` to allocate from default pool. If not null, members:
+    `usage`, `requiredFlags`, `preferredFlags`, `memoryTypeBits` are ignored.
+    */
+    VmaPool pool;
+    /** \brief Custom general-purpose pointer that will be stored in #VmaAllocation, can be read as VmaAllocationInfo::pUserData and changed using vmaSetAllocationUserData().
+    
+    If #VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT is used, it must be either
+    null or pointer to a null-terminated string. The string will be then copied to
+    internal buffer, so it doesn't need to be valid after allocation call.
+    */
+    void* pUserData;
+} VmaAllocationCreateInfo;
+
+/**
+\brief Helps to find memoryTypeIndex, given memoryTypeBits and VmaAllocationCreateInfo.
+
+This algorithm tries to find a memory type that:
+
+- Is allowed by memoryTypeBits.
+- Contains all the flags from pAllocationCreateInfo->requiredFlags.
+- Matches intended usage.
+- Has as many flags from pAllocationCreateInfo->preferredFlags as possible.
+
+\return Returns VK_ERROR_FEATURE_NOT_PRESENT if not found. Receiving such result
+from this function or any other allocating function probably means that your
+device doesn't support any memory type with requested features for the specific
+type of resource you want to use it for. Please check parameters of your
+resource, like image layout (OPTIMAL versus LINEAR) or mip level count.
+*/
+VkResult vmaFindMemoryTypeIndex(
+    VmaAllocator allocator,
+    uint32_t memoryTypeBits,
+    const VmaAllocationCreateInfo* pAllocationCreateInfo,
+    uint32_t* pMemoryTypeIndex);
+
+/**
+\brief Helps to find memoryTypeIndex, given VkBufferCreateInfo and VmaAllocationCreateInfo.
+
+It can be useful e.g. to determine value to be used as VmaPoolCreateInfo::memoryTypeIndex.
+It internally creates a temporary, dummy buffer that never has memory bound.
+It is just a convenience function, equivalent to calling:
+
+- `vkCreateBuffer`
+- `vkGetBufferMemoryRequirements`
+- `vmaFindMemoryTypeIndex`
+- `vkDestroyBuffer`
+*/
+VkResult vmaFindMemoryTypeIndexForBufferInfo(
+    VmaAllocator allocator,
+    const VkBufferCreateInfo* pBufferCreateInfo,
+    const VmaAllocationCreateInfo* pAllocationCreateInfo,
+    uint32_t* pMemoryTypeIndex);
+
+/**
+\brief Helps to find memoryTypeIndex, given VkImageCreateInfo and VmaAllocationCreateInfo.
+
+It can be useful e.g. to determine value to be used as VmaPoolCreateInfo::memoryTypeIndex.
+It internally creates a temporary, dummy image that never has memory bound.
+It is just a convenience function, equivalent to calling:
+
+- `vkCreateImage`
+- `vkGetImageMemoryRequirements`
+- `vmaFindMemoryTypeIndex`
+- `vkDestroyImage`
+*/
+VkResult vmaFindMemoryTypeIndexForImageInfo(
+    VmaAllocator allocator,
+    const VkImageCreateInfo* pImageCreateInfo,
+    const VmaAllocationCreateInfo* pAllocationCreateInfo,
+    uint32_t* pMemoryTypeIndex);
+
+/// Flags to be passed as VmaPoolCreateInfo::flags.
+typedef enum VmaPoolCreateFlagBits {
+    /** \brief Use this flag if you always allocate only buffers and linear images or only optimal images out of this pool and so Buffer-Image Granularity can be ignored.
+
+    This is an optional optimization flag.
+
+    If you always allocate using vmaCreateBuffer(), vmaCreateImage(),
+    vmaAllocateMemoryForBuffer(), then you don't need to use it because allocator
+    knows exact type of your allocations so it can handle Buffer-Image Granularity
+    in the optimal way.
+
+    If you also allocate using vmaAllocateMemoryForImage() or vmaAllocateMemory(),
+    exact type of such allocations is not known, so allocator must be conservative
+    in handling Buffer-Image Granularity, which can lead to suboptimal allocation
+    (wasted memory). In that case, if you can make sure you always allocate only
+    buffers and linear images or only optimal images out of this pool, use this flag
+    to make allocator disregard Buffer-Image Granularity and so make allocations
+    faster and more optimal.
+    */
+    VMA_POOL_CREATE_IGNORE_BUFFER_IMAGE_GRANULARITY_BIT = 0x00000002,
+
+    /** \brief Enables alternative, linear allocation algorithm in this pool.
+
+    Specify this flag to enable linear allocation algorithm, which always creates
+    new allocations after last one and doesn't reuse space from allocations freed in
+    between. It trades memory consumption for simplified algorithm and data
+    structure, which has better performance and uses less memory for metadata.
+
+    By using this flag, you can achieve behavior of free-at-once, stack,
+    ring buffer, and double stack. For details, see documentation chapter
+    \ref linear_algorithm.
+
+    When using this flag, you must specify VmaPoolCreateInfo::maxBlockCount == 1 (or 0 for default).
+
+    For more details, see [Linear allocation algorithm](@ref linear_algorithm).
+    */
+    VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT = 0x00000004,
+
+    /** \brief Enables alternative, buddy allocation algorithm in this pool.
+
+    It operates on a tree of blocks, each having size that is a power of two and
+    a half of its parent's size. Comparing to default algorithm, this one provides
+    faster allocation and deallocation and decreased external fragmentation,
+    at the expense of more memory wasted (internal fragmentation).
+
+    For more details, see [Buddy allocation algorithm](@ref buddy_algorithm).
+    */
+    VMA_POOL_CREATE_BUDDY_ALGORITHM_BIT = 0x00000008,
+
+    /** Bit mask to extract only `ALGORITHM` bits from entire set of flags.
+    */
+    VMA_POOL_CREATE_ALGORITHM_MASK =
+        VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT |
+        VMA_POOL_CREATE_BUDDY_ALGORITHM_BIT,
+
+    VMA_POOL_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VmaPoolCreateFlagBits;
+typedef VkFlags VmaPoolCreateFlags;
+
+/** \brief Describes parameter of created #VmaPool.
+*/
+typedef struct VmaPoolCreateInfo {
+    /** \brief Vulkan memory type index to allocate this pool from.
+    */
+    uint32_t memoryTypeIndex;
+    /** \brief Use combination of #VmaPoolCreateFlagBits.
+    */
+    VmaPoolCreateFlags flags;
+    /** \brief Size of a single `VkDeviceMemory` block to be allocated as part of this pool, in bytes. Optional.
+
+    Specify nonzero to set explicit, constant size of memory blocks used by this
+    pool.
+
+    Leave 0 to use default and let the library manage block sizes automatically.
+    Sizes of particular blocks may vary.
+    */
+    VkDeviceSize blockSize;
+    /** \brief Minimum number of blocks to be always allocated in this pool, even if they stay empty.
+
+    Set to 0 to have no preallocated blocks and allow the pool be completely empty.
+    */
+    size_t minBlockCount;
+    /** \brief Maximum number of blocks that can be allocated in this pool. Optional.
+
+    Set to 0 to use default, which is `SIZE_MAX`, which means no limit.
+    
+    Set to same value as VmaPoolCreateInfo::minBlockCount to have fixed amount of memory allocated
+    throughout whole lifetime of this pool.
+    */
+    size_t maxBlockCount;
+    /** \brief Maximum number of additional frames that are in use at the same time as current frame.
+
+    This value is used only when you make allocations with
+    #VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT flag. Such allocation cannot become
+    lost if allocation.lastUseFrameIndex >= allocator.currentFrameIndex - frameInUseCount.
+
+    For example, if you double-buffer your command buffers, so resources used for
+    rendering in previous frame may still be in use by the GPU at the moment you
+    allocate resources needed for the current frame, set this value to 1.
+
+    If you want to allow any allocations other than used in the current frame to
+    become lost, set this value to 0.
+    */
+    uint32_t frameInUseCount;
+} VmaPoolCreateInfo;
+
+/** \brief Describes parameter of existing #VmaPool.
+*/
+typedef struct VmaPoolStats {
+    /** \brief Total amount of `VkDeviceMemory` allocated from Vulkan for this pool, in bytes.
+    */
+    VkDeviceSize size;
+    /** \brief Total number of bytes in the pool not used by any #VmaAllocation.
+    */
+    VkDeviceSize unusedSize;
+    /** \brief Number of #VmaAllocation objects created from this pool that were not destroyed or lost.
+    */
+    size_t allocationCount;
+    /** \brief Number of continuous memory ranges in the pool not used by any #VmaAllocation.
+    */
+    size_t unusedRangeCount;
+    /** \brief Size of the largest continuous free memory region available for new allocation.
+
+    Making a new allocation of that size is not guaranteed to succeed because of
+    possible additional margin required to respect alignment and buffer/image
+    granularity.
+    */
+    VkDeviceSize unusedRangeSizeMax;
+    /** \brief Number of `VkDeviceMemory` blocks allocated for this pool.
+    */
+    size_t blockCount;
+} VmaPoolStats;
+
+/** \brief Allocates Vulkan device memory and creates #VmaPool object.
+
+@param allocator Allocator object.
+@param pCreateInfo Parameters of pool to create.
+@param[out] pPool Handle to created pool.
+*/
+VkResult vmaCreatePool(
+	VmaAllocator allocator,
+	const VmaPoolCreateInfo* pCreateInfo,
+	VmaPool* pPool);
+
+/** \brief Destroys #VmaPool object and frees Vulkan device memory.
+*/
+void vmaDestroyPool(
+    VmaAllocator allocator,
+    VmaPool pool);
+
+/** \brief Retrieves statistics of existing #VmaPool object.
+
+@param allocator Allocator object.
+@param pool Pool object.
+@param[out] pPoolStats Statistics of specified pool.
+*/
+void vmaGetPoolStats(
+    VmaAllocator allocator,
+    VmaPool pool,
+    VmaPoolStats* pPoolStats);
+
+/** \brief Marks all allocations in given pool as lost if they are not used in current frame or VmaPoolCreateInfo::frameInUseCount back from now.
+
+@param allocator Allocator object.
+@param pool Pool.
+@param[out] pLostAllocationCount Number of allocations marked as lost. Optional - pass null if you don't need this information.
+*/
+void vmaMakePoolAllocationsLost(
+    VmaAllocator allocator,
+    VmaPool pool,
+    size_t* pLostAllocationCount);
+
+/** \brief Checks magic number in margins around all allocations in given memory pool in search for corruptions.
+
+Corruption detection is enabled only when `VMA_DEBUG_DETECT_CORRUPTION` macro is defined to nonzero,
+`VMA_DEBUG_MARGIN` is defined to nonzero and the pool is created in memory type that is
+`HOST_VISIBLE` and `HOST_COHERENT`. For more information, see [Corruption detection](@ref debugging_memory_usage_corruption_detection).
+
+Possible return values:
+
+- `VK_ERROR_FEATURE_NOT_PRESENT` - corruption detection is not enabled for specified pool.
+- `VK_SUCCESS` - corruption detection has been performed and succeeded.
+- `VK_ERROR_VALIDATION_FAILED_EXT` - corruption detection has been performed and found memory corruptions around one of the allocations.
+  `VMA_ASSERT` is also fired in that case.
+- Other value: Error returned by Vulkan, e.g. memory mapping failure.
+*/
+VkResult vmaCheckPoolCorruption(VmaAllocator allocator, VmaPool pool);
+
+/** \struct VmaAllocation
+\brief Represents single memory allocation.
+
+It may be either dedicated block of `VkDeviceMemory` or a specific region of a bigger block of this type
+plus unique offset.
+
+There are multiple ways to create such object.
+You need to fill structure VmaAllocationCreateInfo.
+For more information see [Choosing memory type](@ref choosing_memory_type).
+
+Although the library provides convenience functions that create Vulkan buffer or image,
+allocate memory for it and bind them together,
+binding of the allocation to a buffer or an image is out of scope of the allocation itself.
+Allocation object can exist without buffer/image bound,
+binding can be done manually by the user, and destruction of it can be done
+independently of destruction of the allocation.
+
+The object also remembers its size and some other information.
+To retrieve this information, use function vmaGetAllocationInfo() and inspect
+returned structure VmaAllocationInfo.
+
+Some kinds allocations can be in lost state.
+For more information, see [Lost allocations](@ref lost_allocations).
+*/
+VK_DEFINE_HANDLE(VmaAllocation)
+
+/** \brief Parameters of #VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
+*/
+typedef struct VmaAllocationInfo {
+    /** \brief Memory type index that this allocation was allocated from.
+    
+    It never changes.
+    */
+    uint32_t memoryType;
+    /** \brief Handle to Vulkan memory object.
+
+    Same memory object can be shared by multiple allocations.
+    
+    It can change after call to vmaDefragment() if this allocation is passed to the function, or if allocation is lost.
+
+    If the allocation is lost, it is equal to `VK_NULL_HANDLE`.
+    */
+    VkDeviceMemory deviceMemory;
+    /** \brief Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory, offset) pair is unique to this allocation.
+
+    It can change after call to vmaDefragment() if this allocation is passed to the function, or if allocation is lost.
+    */
+    VkDeviceSize offset;
+    /** \brief Size of this allocation, in bytes.
+
+    It never changes, unless allocation is lost.
+    */
+    VkDeviceSize size;
+    /** \brief Pointer to the beginning of this allocation as mapped data.
+
+    If the allocation hasn't been mapped using vmaMapMemory() and hasn't been
+    created with #VMA_ALLOCATION_CREATE_MAPPED_BIT flag, this value null.
+
+    It can change after call to vmaMapMemory(), vmaUnmapMemory().
+    It can also change after call to vmaDefragment() if this allocation is passed to the function.
+    */
+    void* pMappedData;
+    /** \brief Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vmaSetAllocationUserData().
+
+    It can change after call to vmaSetAllocationUserData() for this allocation.
+    */
+    void* pUserData;
+} VmaAllocationInfo;
+
+/** \brief General purpose memory allocation.
+
+@param[out] pAllocation Handle to allocated memory.
+@param[out] pAllocationInfo Optional. Information about allocated memory. It can be later fetched using function vmaGetAllocationInfo().
+
+You should free the memory using vmaFreeMemory() or vmaFreeMemoryPages().
+
+It is recommended to use vmaAllocateMemoryForBuffer(), vmaAllocateMemoryForImage(),
+vmaCreateBuffer(), vmaCreateImage() instead whenever possible.
+*/
+VkResult vmaAllocateMemory(
+    VmaAllocator allocator,
+    const VkMemoryRequirements* pVkMemoryRequirements,
+    const VmaAllocationCreateInfo* pCreateInfo,
+    VmaAllocation* pAllocation,
+    VmaAllocationInfo* pAllocationInfo);
+
+/** \brief General purpose memory allocation for multiple allocation objects at once.
+
+@param allocator Allocator object.
+@param pVkMemoryRequirements Memory requirements for each allocation.
+@param pCreateInfo Creation parameters for each alloction.
+@param allocationCount Number of allocations to make.
+@param[out] pAllocations Pointer to array that will be filled with handles to created allocations.
+@param[out] pAllocationInfo Optional. Pointer to array that will be filled with parameters of created allocations.
+
+You should free the memory using vmaFreeMemory() or vmaFreeMemoryPages().
+
+Word "pages" is just a suggestion to use this function to allocate pieces of memory needed for sparse binding.
+It is just a general purpose allocation function able to make multiple allocations at once.
+It may be internally optimized to be more efficient than calling vmaAllocateMemory() `allocationCount` times.
+
+All allocations are made using same parameters. All of them are created out of the same memory pool and type.
+If any allocation fails, all allocations already made within this function call are also freed, so that when
+returned result is not `VK_SUCCESS`, `pAllocation` array is always entirely filled with `VK_NULL_HANDLE`.
+*/
+VkResult vmaAllocateMemoryPages(
+    VmaAllocator allocator,
+    const VkMemoryRequirements* pVkMemoryRequirements,
+    const VmaAllocationCreateInfo* pCreateInfo,
+    size_t allocationCount,
+    VmaAllocation* pAllocations,
+    VmaAllocationInfo* pAllocationInfo);
+
+/**
+@param[out] pAllocation Handle to allocated memory.
+@param[out] pAllocationInfo Optional. Information about allocated memory. It can be later fetched using function vmaGetAllocationInfo().
+
+You should free the memory using vmaFreeMemory().
+*/
+VkResult vmaAllocateMemoryForBuffer(
+    VmaAllocator allocator,
+    VkBuffer buffer,
+    const VmaAllocationCreateInfo* pCreateInfo,
+    VmaAllocation* pAllocation,
+    VmaAllocationInfo* pAllocationInfo);
+
+/// Function similar to vmaAllocateMemoryForBuffer().
+VkResult vmaAllocateMemoryForImage(
+    VmaAllocator allocator,
+    VkImage image,
+    const VmaAllocationCreateInfo* pCreateInfo,
+    VmaAllocation* pAllocation,
+    VmaAllocationInfo* pAllocationInfo);
+
+/** \brief Frees memory previously allocated using vmaAllocateMemory(), vmaAllocateMemoryForBuffer(), or vmaAllocateMemoryForImage().
+
+Passing `VK_NULL_HANDLE` as `allocation` is valid. Such function call is just skipped.
+*/
+void vmaFreeMemory(
+    VmaAllocator allocator,
+    VmaAllocation allocation);
+
+/** \brief Frees memory and destroys multiple allocations.
+
+Word "pages" is just a suggestion to use this function to free pieces of memory used for sparse binding.
+It is just a general purpose function to free memory and destroy allocations made using e.g. vmaAllocateMemory(),
+vmaAllocateMemoryPages() and other functions.
+It may be internally optimized to be more efficient than calling vmaFreeMemory() `allocationCount` times.
+
+Allocations in `pAllocations` array can come from any memory pools and types.
+Passing `VK_NULL_HANDLE` as elements of `pAllocations` array is valid. Such entries are just skipped.
+*/
+void vmaFreeMemoryPages(
+    VmaAllocator allocator,
+    size_t allocationCount,
+    VmaAllocation* pAllocations);
+
+/** \brief Tries to resize an allocation in place, if there is enough free memory after it.
+
+Tries to change allocation's size without moving or reallocating it.
+You can both shrink and grow allocation size.
+When growing, it succeeds only when the allocation belongs to a memory block with enough
+free space after it.
+
+Returns `VK_SUCCESS` if allocation's size has been successfully changed.
+Returns `VK_ERROR_OUT_OF_POOL_MEMORY` if allocation's size could not be changed.
+
+After successful call to this function, VmaAllocationInfo::size of this allocation changes.
+All other parameters stay the same: memory pool and type, alignment, offset, mapped pointer.
+
+- Calling this function on allocation that is in lost state fails with result `VK_ERROR_VALIDATION_FAILED_EXT`.
+- Calling this function with `newSize` same as current allocation size does nothing and returns `VK_SUCCESS`.
+- Resizing dedicated allocations, as well as allocations created in pools that use linear
+  or buddy algorithm, is not supported.
+  The function returns `VK_ERROR_FEATURE_NOT_PRESENT` in such cases.
+  Support may be added in the future.
+*/
+VkResult vmaResizeAllocation(
+    VmaAllocator allocator,
+    VmaAllocation allocation,
+    VkDeviceSize newSize);
+
+/** \brief Returns current information about specified allocation and atomically marks it as used in current frame.
+
+Current paramters of given allocation are returned in `pAllocationInfo`.
+
+This function also atomically "touches" allocation - marks it as used in current frame,
+just like vmaTouchAllocation().
+If the allocation is in lost state, `pAllocationInfo->deviceMemory == VK_NULL_HANDLE`.
+
+Although this function uses atomics and doesn't lock any mutex, so it should be quite efficient,
+you can avoid calling it too often.
+
+- You can retrieve same VmaAllocationInfo structure while creating your resource, from function
+  vmaCreateBuffer(), vmaCreateImage(). You can remember it if you are sure parameters don't change
+  (e.g. due to defragmentation or allocation becoming lost).
+- If you just want to check if allocation is not lost, vmaTouchAllocation() will work faster.
+*/
+void vmaGetAllocationInfo(
+    VmaAllocator allocator,
+    VmaAllocation allocation,
+    VmaAllocationInfo* pAllocationInfo);
+
+/** \brief Returns `VK_TRUE` if allocation is not lost and atomically marks it as used in current frame.
+
+If the allocation has been created with #VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT flag,
+this function returns `VK_TRUE` if it's not in lost state, so it can still be used.
+It then also atomically "touches" the allocation - marks it as used in current frame,
+so that you can be sure it won't become lost in current frame or next `frameInUseCount` frames.
+
+If the allocation is in lost state, the function returns `VK_FALSE`.
+Memory of such allocation, as well as buffer or image bound to it, should not be used.
+Lost allocation and the buffer/image still need to be destroyed.
+
+If the allocation has been created without #VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT flag,
+this function always returns `VK_TRUE`.
+*/
+VkBool32 vmaTouchAllocation(
+    VmaAllocator allocator,
+    VmaAllocation allocation);
+
+/** \brief Sets pUserData in given allocation to new value.
+
+If the allocation was created with VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT,
+pUserData must be either null, or pointer to a null-terminated string. The function
+makes local copy of the string and sets it as allocation's `pUserData`. String
+passed as pUserData doesn't need to be valid for whole lifetime of the allocation -
+you can free it after this call. String previously pointed by allocation's
+pUserData is freed from memory.
+
+If the flag was not used, the value of pointer `pUserData` is just copied to
+allocation's `pUserData`. It is opaque, so you can use it however you want - e.g.
+as a pointer, ordinal number or some handle to you own data.
+*/
+void vmaSetAllocationUserData(
+    VmaAllocator allocator,
+    VmaAllocation allocation,
+    void* pUserData);
+
+/** \brief Creates new allocation that is in lost state from the beginning.
+
+It can be useful if you need a dummy, non-null allocation.
+
+You still need to destroy created object using vmaFreeMemory().
+
+Returned allocation is not tied to any specific memory pool or memory type and
+not bound to any image or buffer. It has size = 0. It cannot be turned into
+a real, non-empty allocation.
+*/
+void vmaCreateLostAllocation(
+    VmaAllocator allocator,
+    VmaAllocation* pAllocation);
+
+/** \brief Maps memory represented by given allocation and returns pointer to it.
+
+Maps memory represented by given allocation to make it accessible to CPU code.
+When succeeded, `*ppData` contains pointer to first byte of this memory.
+If the allocation is part of bigger `VkDeviceMemory` block, the pointer is
+correctly offseted to the beginning of region assigned to this particular
+allocation.
+
+Mapping is internally reference-counted and synchronized, so despite raw Vulkan
+function `vkMapMemory()` cannot be used to map same block of `VkDeviceMemory`
+multiple times simultaneously, it is safe to call this function on allocations
+assigned to the same memory block. Actual Vulkan memory will be mapped on first
+mapping and unmapped on last unmapping.
+
+If the function succeeded, you must call vmaUnmapMemory() to unmap the
+allocation when mapping is no longer needed or before freeing the allocation, at
+the latest.
+
+It also safe to call this function multiple times on the same allocation. You
+must call vmaUnmapMemory() same number of times as you called vmaMapMemory().
+
+It is also safe to call this function on allocation created with
+#VMA_ALLOCATION_CREATE_MAPPED_BIT flag. Its memory stays mapped all the time.
+You must still call vmaUnmapMemory() same number of times as you called
+vmaMapMemory(). You must not call vmaUnmapMemory() additional time to free the
+"0-th" mapping made automatically due to #VMA_ALLOCATION_CREATE_MAPPED_BIT flag.
+
+This function fails when used on allocation made in memory type that is not
+`HOST_VISIBLE`.
+
+This function always fails when called for allocation that was created with
+#VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT flag. Such allocations cannot be
+mapped.
+*/
+VkResult vmaMapMemory(
+    VmaAllocator allocator,
+    VmaAllocation allocation,
+    void** ppData);
+
+/** \brief Unmaps memory represented by given allocation, mapped previously using vmaMapMemory().
+
+For details, see description of vmaMapMemory().
+*/
+void vmaUnmapMemory(
+    VmaAllocator allocator,
+    VmaAllocation allocation);
+
+/** \brief Flushes memory of given allocation.
+
+Calls `vkFlushMappedMemoryRanges()` for memory associated with given range of given allocation.
+
+- `offset` must be relative to the beginning of allocation.
+- `size` can be `VK_WHOLE_SIZE`. It means all memory from `offset` the the end of given allocation.
+- `offset` and `size` don't have to be aligned.
+  They are internally rounded down/up to multiply of `nonCoherentAtomSize`.
+- If `size` is 0, this call is ignored.
+- If memory type that the `allocation` belongs to is not `HOST_VISIBLE` or it is `HOST_COHERENT`,
+  this call is ignored.
+*/
+void vmaFlushAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
+
+/** \brief Invalidates memory of given allocation.
+
+Calls `vkInvalidateMappedMemoryRanges()` for memory associated with given range of given allocation.
+
+- `offset` must be relative to the beginning of allocation.
+- `size` can be `VK_WHOLE_SIZE`. It means all memory from `offset` the the end of given allocation.
+- `offset` and `size` don't have to be aligned.
+  They are internally rounded down/up to multiply of `nonCoherentAtomSize`.
+- If `size` is 0, this call is ignored.
+- If memory type that the `allocation` belongs to is not `HOST_VISIBLE` or it is `HOST_COHERENT`,
+  this call is ignored.
+*/
+void vmaInvalidateAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
+
+/** \brief Checks magic number in margins around all allocations in given memory types (in both default and custom pools) in search for corruptions.
+
+@param memoryTypeBits Bit mask, where each bit set means that a memory type with that index should be checked.
+
+Corruption detection is enabled only when `VMA_DEBUG_DETECT_CORRUPTION` macro is defined to nonzero,
+`VMA_DEBUG_MARGIN` is defined to nonzero and only for memory types that are
+`HOST_VISIBLE` and `HOST_COHERENT`. For more information, see [Corruption detection](@ref debugging_memory_usage_corruption_detection).
+
+Possible return values:
+
+- `VK_ERROR_FEATURE_NOT_PRESENT` - corruption detection is not enabled for any of specified memory types.
+- `VK_SUCCESS` - corruption detection has been performed and succeeded.
+- `VK_ERROR_VALIDATION_FAILED_EXT` - corruption detection has been performed and found memory corruptions around one of the allocations.
+  `VMA_ASSERT` is also fired in that case.
+- Other value: Error returned by Vulkan, e.g. memory mapping failure.
+*/
+VkResult vmaCheckCorruption(VmaAllocator allocator, uint32_t memoryTypeBits);
+
+/** \struct VmaDefragmentationContext
+\brief Represents Opaque object that represents started defragmentation process.
+
+Fill structure #VmaDefragmentationInfo2 and call function vmaDefragmentationBegin() to create it.
+Call function vmaDefragmentationEnd() to destroy it.
+*/
+VK_DEFINE_HANDLE(VmaDefragmentationContext)
+
+/// Flags to be used in vmaDefragmentationBegin(). None at the moment. Reserved for future use.
+typedef enum VmaDefragmentationFlagBits {
+    VMA_DEFRAGMENTATION_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VmaDefragmentationFlagBits;
+typedef VkFlags VmaDefragmentationFlags;
+
+/** \brief Parameters for defragmentation.
+
+To be used with function vmaDefragmentationBegin().
+*/
+typedef struct VmaDefragmentationInfo2 {
+    /** \brief Reserved for future use. Should be 0.
+    */
+    VmaDefragmentationFlags flags;
+    /** \brief Number of allocations in `pAllocations` array.
+    */
+    uint32_t allocationCount;
+    /** \brief Pointer to array of allocations that can be defragmented.
+
+    The array should have `allocationCount` elements.
+    The array should not contain nulls.
+    Elements in the array should be unique - same allocation cannot occur twice.
+    It is safe to pass allocations that are in the lost state - they are ignored.
+    All allocations not present in this array are considered non-moveable during this defragmentation.
+    */
+    VmaAllocation* pAllocations;
+    /** \brief Optional, output. Pointer to array that will be filled with information whether the allocation at certain index has been changed during defragmentation.
+
+    The array should have `allocationCount` elements.
+    You can pass null if you are not interested in this information.
+    */
+    VkBool32* pAllocationsChanged;
+    /** \brief Numer of pools in `pPools` array.
+    */
+    uint32_t poolCount;
+    /** \brief Either null or pointer to array of pools to be defragmented.
+
+    All the allocations in the specified pools can be moved during defragmentation
+    and there is no way to check if they were really moved as in `pAllocationsChanged`,
+    so you must query all the allocations in all these pools for new `VkDeviceMemory`
+    and offset using vmaGetAllocationInfo() if you might need to recreate buffers
+    and images bound to them.
+
+    The array should have `poolCount` elements.
+    The array should not contain nulls.
+    Elements in the array should be unique - same pool cannot occur twice.
+
+    Using this array is equivalent to specifying all allocations from the pools in `pAllocations`.
+    It might be more efficient.
+    */
+    VmaPool* pPools;
+    /** \brief Maximum total numbers of bytes that can be copied while moving allocations to different places using transfers on CPU side, like `memcpy()`, `memmove()`.
+    
+    `VK_WHOLE_SIZE` means no limit.
+    */
+    VkDeviceSize maxCpuBytesToMove;
+    /** \brief Maximum number of allocations that can be moved to a different place using transfers on CPU side, like `memcpy()`, `memmove()`.
+
+    `UINT32_MAX` means no limit.
+    */
+    uint32_t maxCpuAllocationsToMove;
+    /** \brief Maximum total numbers of bytes that can be copied while moving allocations to different places using transfers on GPU side, posted to `commandBuffer`.
+    
+    `VK_WHOLE_SIZE` means no limit.
+    */
+    VkDeviceSize maxGpuBytesToMove;
+    /** \brief Maximum number of allocations that can be moved to a different place using transfers on GPU side, posted to `commandBuffer`.
+
+    `UINT32_MAX` means no limit.
+    */
+    uint32_t maxGpuAllocationsToMove;
+    /** \brief Optional. Command buffer where GPU copy commands will be posted.
+
+    If not null, it must be a valid command buffer handle that supports Transfer queue type.
+    It must be in the recording state and outside of a render pass instance.
+    You need to submit it and make sure it finished execution before calling vmaDefragmentationEnd().
+
+    Passing null means that only CPU defragmentation will be performed.
+    */
+    VkCommandBuffer commandBuffer;
+} VmaDefragmentationInfo2;
+
+/** \brief Deprecated. Optional configuration parameters to be passed to function vmaDefragment().
+
+\deprecated This is a part of the old interface. It is recommended to use structure #VmaDefragmentationInfo2 and function vmaDefragmentationBegin() instead.
+*/
+typedef struct VmaDefragmentationInfo {
+    /** \brief Maximum total numbers of bytes that can be copied while moving allocations to different places.
+    
+    Default is `VK_WHOLE_SIZE`, which means no limit.
+    */
+    VkDeviceSize maxBytesToMove;
+    /** \brief Maximum number of allocations that can be moved to different place.
+
+    Default is `UINT32_MAX`, which means no limit.
+    */
+    uint32_t maxAllocationsToMove;
+} VmaDefragmentationInfo;
+
+/** \brief Statistics returned by function vmaDefragment(). */
+typedef struct VmaDefragmentationStats {
+    /// Total number of bytes that have been copied while moving allocations to different places.
+    VkDeviceSize bytesMoved;
+    /// Total number of bytes that have been released to the system by freeing empty `VkDeviceMemory` objects.
+    VkDeviceSize bytesFreed;
+    /// Number of allocations that have been moved to different places.
+    uint32_t allocationsMoved;
+    /// Number of empty `VkDeviceMemory` objects that have been released to the system.
+    uint32_t deviceMemoryBlocksFreed;
+} VmaDefragmentationStats;
+
+/** \brief Begins defragmentation process.
+
+@param allocator Allocator object.
+@param pInfo Structure filled with parameters of defragmentation.
+@param[out] pStats Optional. Statistics of defragmentation. You can pass null if you are not interested in this information.
+@param[out] pContext Context object that must be passed to vmaDefragmentationEnd() to finish defragmentation.
+@return `VK_SUCCESS` and `*pContext == null` if defragmentation finished within this function call. `VK_NOT_READY` and `*pContext != null` if defragmentation has been started and you need to call vmaDefragmentationEnd() to finish it. Negative value in case of error.
+
+Use this function instead of old, deprecated vmaDefragment().
+
+Warning! Between the call to vmaDefragmentationBegin() and vmaDefragmentationEnd():
+
+- You should not use any of allocations passed as `pInfo->pAllocations` or
+  any allocations that belong to pools passed as `pInfo->pPools`,
+  including calling vmaGetAllocationInfo(), vmaTouchAllocation(), or access
+  their data.
+- Some mutexes protecting internal data structures may be locked, so trying to
+  make or free any allocations, bind buffers or images, map memory, or launch
+  another simultaneous defragmentation in between may cause stall (when done on
+  another thread) or deadlock (when done on the same thread), unless you are
+  100% sure that defragmented allocations are in different pools.
+- Information returned via `pStats` and `pInfo->pAllocationsChanged` are undefined.
+  They become valid after call to vmaDefragmentationEnd().
+- If `pInfo->commandBuffer` is not null, you must submit that command buffer
+  and make sure it finished execution before calling vmaDefragmentationEnd().
+*/
+VkResult vmaDefragmentationBegin(
+    VmaAllocator allocator,
+    const VmaDefragmentationInfo2* pInfo,
+    VmaDefragmentationStats* pStats,
+    VmaDefragmentationContext *pContext);
+
+/** \brief Ends defragmentation process.
+
+Use this function to finish defragmentation started by vmaDefragmentationBegin().
+It is safe to pass `context == null`. The function then does nothing.
+*/
+VkResult vmaDefragmentationEnd(
+    VmaAllocator allocator,
+    VmaDefragmentationContext context);
+
+/** \brief Deprecated. Compacts memory by moving allocations.
+
+@param pAllocations Array of allocations that can be moved during this compation.
+@param allocationCount Number of elements in pAllocations and pAllocationsChanged arrays.
+@param[out] pAllocationsChanged Array of boolean values that will indicate whether matching allocation in pAllocations array has been moved. This parameter is optional. Pass null if you don't need this information.
+@param pDefragmentationInfo Configuration parameters. Optional - pass null to use default values.
+@param[out] pDefragmentationStats Statistics returned by the function. Optional - pass null if you don't need this information.
+@return `VK_SUCCESS` if completed, negative error code in case of error.
+
+\deprecated This is a part of the old interface. It is recommended to use structure #VmaDefragmentationInfo2 and function vmaDefragmentationBegin() instead.
+
+This function works by moving allocations to different places (different
+`VkDeviceMemory` objects and/or different offsets) in order to optimize memory
+usage. Only allocations that are in `pAllocations` array can be moved. All other
+allocations are considered nonmovable in this call. Basic rules:
+
+- Only allocations made in memory types that have
+  `VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT` and `VK_MEMORY_PROPERTY_HOST_COHERENT_BIT`
+  flags can be compacted. You may pass other allocations but it makes no sense -
+  these will never be moved.
+- Custom pools created with #VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT or
+  #VMA_POOL_CREATE_BUDDY_ALGORITHM_BIT flag are not defragmented. Allocations
+  passed to this function that come from such pools are ignored.
+- Allocations created with #VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT or
+  created as dedicated allocations for any other reason are also ignored.
+- Both allocations made with or without #VMA_ALLOCATION_CREATE_MAPPED_BIT
+  flag can be compacted. If not persistently mapped, memory will be mapped
+  temporarily inside this function if needed.
+- You must not pass same #VmaAllocation object multiple times in `pAllocations` array.
+
+The function also frees empty `VkDeviceMemory` blocks.
+
+Warning: This function may be time-consuming, so you shouldn't call it too often
+(like after every resource creation/destruction).
+You can call it on special occasions (like when reloading a game level or
+when you just destroyed a lot of objects). Calling it every frame may be OK, but
+you should measure that on your platform.
+
+For more information, see [Defragmentation](@ref defragmentation) chapter.
+*/
+VkResult vmaDefragment(
+    VmaAllocator allocator,
+    VmaAllocation* pAllocations,
+    size_t allocationCount,
+    VkBool32* pAllocationsChanged,
+    const VmaDefragmentationInfo *pDefragmentationInfo,
+    VmaDefragmentationStats* pDefragmentationStats);
+
+/** \brief Binds buffer to allocation.
+
+Binds specified buffer to region of memory represented by specified allocation.
+Gets `VkDeviceMemory` handle and offset from the allocation.
+If you want to create a buffer, allocate memory for it and bind them together separately,
+you should use this function for binding instead of standard `vkBindBufferMemory()`,
+because it ensures proper synchronization so that when a `VkDeviceMemory` object is used by multiple
+allocations, calls to `vkBind*Memory()` or `vkMapMemory()` won't happen from multiple threads simultaneously
+(which is illegal in Vulkan).
+
+It is recommended to use function vmaCreateBuffer() instead of this one.
+*/
+VkResult vmaBindBufferMemory(
+    VmaAllocator allocator,
+    VmaAllocation allocation,
+    VkBuffer buffer);
+
+/** \brief Binds image to allocation.
+
+Binds specified image to region of memory represented by specified allocation.
+Gets `VkDeviceMemory` handle and offset from the allocation.
+If you want to create an image, allocate memory for it and bind them together separately,
+you should use this function for binding instead of standard `vkBindImageMemory()`,
+because it ensures proper synchronization so that when a `VkDeviceMemory` object is used by multiple
+allocations, calls to `vkBind*Memory()` or `vkMapMemory()` won't happen from multiple threads simultaneously
+(which is illegal in Vulkan).
+
+It is recommended to use function vmaCreateImage() instead of this one.
+*/
+VkResult vmaBindImageMemory(
+    VmaAllocator allocator,
+    VmaAllocation allocation,
+    VkImage image);
+
+/**
+@param[out] pBuffer Buffer that was created.
+@param[out] pAllocation Allocation that was created.
+@param[out] pAllocationInfo Optional. Information about allocated memory. It can be later fetched using function vmaGetAllocationInfo().
+
+This function automatically:
+
+-# Creates buffer.
+-# Allocates appropriate memory for it.
+-# Binds the buffer with the memory.
+
+If any of these operations fail, buffer and allocation are not created,
+returned value is negative error code, *pBuffer and *pAllocation are null.
+
+If the function succeeded, you must destroy both buffer and allocation when you
+no longer need them using either convenience function vmaDestroyBuffer() or
+separately, using `vkDestroyBuffer()` and vmaFreeMemory().
+
+If VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT flag was used,
+VK_KHR_dedicated_allocation extension is used internally to query driver whether
+it requires or prefers the new buffer to have dedicated allocation. If yes,
+and if dedicated allocation is possible (VmaAllocationCreateInfo::pool is null
+and VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT is not used), it creates dedicated
+allocation for this buffer, just like when using
+VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT.
+*/
+VkResult vmaCreateBuffer(
+    VmaAllocator allocator,
+    const VkBufferCreateInfo* pBufferCreateInfo,
+    const VmaAllocationCreateInfo* pAllocationCreateInfo,
+    VkBuffer* pBuffer,
+    VmaAllocation* pAllocation,
+    VmaAllocationInfo* pAllocationInfo);
+
+/** \brief Destroys Vulkan buffer and frees allocated memory.
+
+This is just a convenience function equivalent to:
+
+\code
+vkDestroyBuffer(device, buffer, allocationCallbacks);
+vmaFreeMemory(allocator, allocation);
+\endcode
+
+It it safe to pass null as buffer and/or allocation.
+*/
+void vmaDestroyBuffer(
+    VmaAllocator allocator,
+    VkBuffer buffer,
+    VmaAllocation allocation);
+
+/// Function similar to vmaCreateBuffer().
+VkResult vmaCreateImage(
+    VmaAllocator allocator,
+    const VkImageCreateInfo* pImageCreateInfo,
+    const VmaAllocationCreateInfo* pAllocationCreateInfo,
+    VkImage* pImage,
+    VmaAllocation* pAllocation,
+    VmaAllocationInfo* pAllocationInfo);
+
+/** \brief Destroys Vulkan image and frees allocated memory.
+
+This is just a convenience function equivalent to:
+
+\code
+vkDestroyImage(device, image, allocationCallbacks);
+vmaFreeMemory(allocator, allocation);
+\endcode
+
+It it safe to pass null as image and/or allocation.
+*/
+void vmaDestroyImage(
+    VmaAllocator allocator,
+    VkImage image,
+    VmaAllocation allocation);
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif // AMD_VULKAN_MEMORY_ALLOCATOR_H
+
+// For Visual Studio IntelliSense.
+#if defined(__cplusplus) && defined(__INTELLISENSE__)
+#define VMA_IMPLEMENTATION
+#endif
+
+#ifdef VMA_IMPLEMENTATION
+#undef VMA_IMPLEMENTATION
+
+#include <cstdint>
+#include <cstdlib>
+#include <cstring>
+
+/*******************************************************************************
+CONFIGURATION SECTION
+
+Define some of these macros before each #include of this header or change them
+here if you need other then default behavior depending on your environment.
+*/
+
+/*
+Define this macro to 1 to make the library fetch pointers to Vulkan functions
+internally, like:
+
+    vulkanFunctions.vkAllocateMemory = &vkAllocateMemory;
+
+Define to 0 if you are going to provide you own pointers to Vulkan functions via
+VmaAllocatorCreateInfo::pVulkanFunctions.
+*/
+#if !defined(VMA_STATIC_VULKAN_FUNCTIONS) && !defined(VK_NO_PROTOTYPES)
+#define VMA_STATIC_VULKAN_FUNCTIONS 1
+#endif
+
+// Define this macro to 1 to make the library use STL containers instead of its own implementation.
+//#define VMA_USE_STL_CONTAINERS 1
+
+/* Set this macro to 1 to make the library including and using STL containers:
+std::pair, std::vector, std::list, std::unordered_map.
+
+Set it to 0 or undefined to make the library using its own implementation of
+the containers.
+*/
+#if VMA_USE_STL_CONTAINERS
+   #define VMA_USE_STL_VECTOR 1
+   #define VMA_USE_STL_UNORDERED_MAP 1
+   #define VMA_USE_STL_LIST 1
+#endif
+
+#ifndef VMA_USE_STL_SHARED_MUTEX
+    // Minimum Visual Studio 2015 Update 2
+    #if defined(_MSC_FULL_VER) && _MSC_FULL_VER >= 190023918 && NTDDI_VERSION > NTDDI_WIN10_RS2
+        #define VMA_USE_STL_SHARED_MUTEX 1
+    #endif
+#endif
+
+#if VMA_USE_STL_VECTOR
+   #include <vector>
+#endif
+
+#if VMA_USE_STL_UNORDERED_MAP
+   #include <unordered_map>
+#endif
+
+#if VMA_USE_STL_LIST
+   #include <list>
+#endif
+
+/*
+Following headers are used in this CONFIGURATION section only, so feel free to
+remove them if not needed.
+*/
+#include <cassert> // for assert
+#include <algorithm> // for min, max
+#include <mutex>
+#include <atomic> // for std::atomic
+
+#ifndef VMA_NULL
+   // Value used as null pointer. Define it to e.g.: nullptr, NULL, 0, (void*)0.
+   #define VMA_NULL   nullptr
+#endif
+
+#if defined(__ANDROID_API__) && (__ANDROID_API__ < 16)
+#include <cstdlib>
+void *aligned_alloc(size_t alignment, size_t size)
+{
+    // alignment must be >= sizeof(void*)
+    if(alignment < sizeof(void*))
+    {
+        alignment = sizeof(void*);
+    }
+
+    return memalign(alignment, size);
+}
+#elif defined(__APPLE__) || defined(__ANDROID__)
+#  define ALIGNED_ALLOC_WITH_POSIX_MEMALIGN
+#elif defined(__GNU_LIBRARY__)
+#  if !defined(__GLIBC_PREREQ) || !__GLIBC_PREREQ(2, 16)
+// aligned_alloc() is defined in glibc only for version >= 2.16
+#    define ALIGNED_ALLOC_WITH_POSIX_MEMALIGN
+#  endif
+#endif
+
+#ifdef ALIGNED_ALLOC_WITH_POSIX_MEMALIGN
+#include <cstdlib>
+void *aligned_alloc(size_t alignment, size_t size)
+{
+    // alignment must be >= sizeof(void*)
+    if(alignment < sizeof(void*))
+    {
+        alignment = sizeof(void*);
+    }
+
+    void *pointer;
+    if(posix_memalign(&pointer, alignment, size) == 0)
+        return pointer;
+    return VMA_NULL;
+}
+#endif
+
+// If your compiler is not compatible with C++11 and definition of
+// aligned_alloc() function is missing, uncommeting following line may help:
+
+//#include <malloc.h>
+
+// Normal assert to check for programmer's errors, especially in Debug configuration.
+#ifndef VMA_ASSERT
+   #ifdef _DEBUG
+       #define VMA_ASSERT(expr)         assert(expr)
+   #else
+       #define VMA_ASSERT(expr)
+   #endif
+#endif
+
+// Assert that will be called very often, like inside data structures e.g. operator[].
+// Making it non-empty can make program slow.
+#ifndef VMA_HEAVY_ASSERT
+   #ifdef _DEBUG
+       #define VMA_HEAVY_ASSERT(expr)   //VMA_ASSERT(expr)
+   #else
+       #define VMA_HEAVY_ASSERT(expr)
+   #endif
+#endif
+
+#ifndef VMA_ALIGN_OF
+   #define VMA_ALIGN_OF(type)       (__alignof(type))
+#endif
+
+#ifndef VMA_SYSTEM_ALIGNED_MALLOC
+   #if defined(_WIN32)
+       #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment)   (_aligned_malloc((size), (alignment)))
+   #else
+       #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment)   (aligned_alloc((alignment), (size) ))
+   #endif
+#endif
+
+#ifndef VMA_SYSTEM_FREE
+   #if defined(_WIN32)
+       #define VMA_SYSTEM_FREE(ptr)   _aligned_free(ptr)
+   #else
+       #define VMA_SYSTEM_FREE(ptr)   free(ptr)
+   #endif
+#endif
+
+#ifndef VMA_MIN
+   #define VMA_MIN(v1, v2)    (std::min((v1), (v2)))
+#endif
+
+#ifndef VMA_MAX
+   #define VMA_MAX(v1, v2)    (std::max((v1), (v2)))
+#endif
+
+#ifndef VMA_SWAP
+   #define VMA_SWAP(v1, v2)   std::swap((v1), (v2))
+#endif
+
+#ifndef VMA_SORT
+   #define VMA_SORT(beg, end, cmp)  std::sort(beg, end, cmp)
+#endif
+
+#ifndef VMA_DEBUG_LOG
+   #define VMA_DEBUG_LOG(format, ...)
+   /*
+   #define VMA_DEBUG_LOG(format, ...) do { \
+       printf(format, __VA_ARGS__); \
+       printf("\n"); \
+   } while(false)
+   */
+#endif
+
+// Define this macro to 1 to enable functions: vmaBuildStatsString, vmaFreeStatsString.
+#if VMA_STATS_STRING_ENABLED
+    static inline void VmaUint32ToStr(char* outStr, size_t strLen, uint32_t num)
+    {
+        snprintf(outStr, strLen, "%u", static_cast<unsigned int>(num));
+    }
+    static inline void VmaUint64ToStr(char* outStr, size_t strLen, uint64_t num)
+    {
+        snprintf(outStr, strLen, "%llu", static_cast<unsigned long long>(num));
+    }
+    static inline void VmaPtrToStr(char* outStr, size_t strLen, const void* ptr)
+    {
+        snprintf(outStr, strLen, "%p", ptr);
+    }
+#endif
+
+#ifndef VMA_MUTEX
+    class VmaMutex
+    {
+    public:
+        void Lock() { m_Mutex.lock(); }
+        void Unlock() { m_Mutex.unlock(); }
+    private:
+        std::mutex m_Mutex;
+    };
+    #define VMA_MUTEX VmaMutex
+#endif
+
+// Read-write mutex, where "read" is shared access, "write" is exclusive access.
+#ifndef VMA_RW_MUTEX
+    #if VMA_USE_STL_SHARED_MUTEX
+        // Use std::shared_mutex from C++17.
+        #include <shared_mutex>
+        class VmaRWMutex
+        {
+        public:
+            void LockRead() { m_Mutex.lock_shared(); }
+            void UnlockRead() { m_Mutex.unlock_shared(); }
+            void LockWrite() { m_Mutex.lock(); }
+            void UnlockWrite() { m_Mutex.unlock(); }
+        private:
+            std::shared_mutex m_Mutex;
+        };
+        #define VMA_RW_MUTEX VmaRWMutex
+    #elif defined(_WIN32)
+        // Use SRWLOCK from WinAPI.
+        class VmaRWMutex
+        {
+        public:
+            VmaRWMutex() { InitializeSRWLock(&m_Lock); }
+            void LockRead() { AcquireSRWLockShared(&m_Lock); }
+            void UnlockRead() { ReleaseSRWLockShared(&m_Lock); }
+            void LockWrite() { AcquireSRWLockExclusive(&m_Lock); }
+            void UnlockWrite() { ReleaseSRWLockExclusive(&m_Lock); }
+        private:
+            SRWLOCK m_Lock;
+        };
+        #define VMA_RW_MUTEX VmaRWMutex
+    #else
+        // Less efficient fallback: Use normal mutex.
+        class VmaRWMutex
+        {
+        public:
+            void LockRead() { m_Mutex.Lock(); }
+            void UnlockRead() { m_Mutex.Unlock(); }
+            void LockWrite() { m_Mutex.Lock(); }
+            void UnlockWrite() { m_Mutex.Unlock(); }
+        private:
+            VMA_MUTEX m_Mutex;
+        };
+        #define VMA_RW_MUTEX VmaRWMutex
+    #endif // #if VMA_USE_STL_SHARED_MUTEX
+#endif // #ifndef VMA_RW_MUTEX
+
+/*
+If providing your own implementation, you need to implement a subset of std::atomic:
+
+- Constructor(uint32_t desired)
+- uint32_t load() const
+- void store(uint32_t desired)
+- bool compare_exchange_weak(uint32_t& expected, uint32_t desired)
+*/
+#ifndef VMA_ATOMIC_UINT32
+   #define VMA_ATOMIC_UINT32 std::atomic<uint32_t>
+#endif
+
+#ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY
+    /**
+    Every allocation will have its own memory block.
+    Define to 1 for debugging purposes only.
+    */
+    #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0)
+#endif
+
+#ifndef VMA_DEBUG_ALIGNMENT
+    /**
+    Minimum alignment of all allocations, in bytes.
+    Set to more than 1 for debugging purposes only. Must be power of two.
+    */
+    #define VMA_DEBUG_ALIGNMENT (1)
+#endif
+
+#ifndef VMA_DEBUG_MARGIN
+    /**
+    Minimum margin before and after every allocation, in bytes.
+    Set nonzero for debugging purposes only.
+    */
+    #define VMA_DEBUG_MARGIN (0)
+#endif
+
+#ifndef VMA_DEBUG_INITIALIZE_ALLOCATIONS
+    /**
+    Define this macro to 1 to automatically fill new allocations and destroyed
+    allocations with some bit pattern.
+    */
+    #define VMA_DEBUG_INITIALIZE_ALLOCATIONS (0)
+#endif
+
+#ifndef VMA_DEBUG_DETECT_CORRUPTION
+    /**
+    Define this macro to 1 together with non-zero value of VMA_DEBUG_MARGIN to
+    enable writing magic value to the margin before and after every allocation and
+    validating it, so that memory corruptions (out-of-bounds writes) are detected.
+    */
+    #define VMA_DEBUG_DETECT_CORRUPTION (0)
+#endif
+
+#ifndef VMA_DEBUG_GLOBAL_MUTEX
+    /**
+    Set this to 1 for debugging purposes only, to enable single mutex protecting all
+    entry calls to the library. Can be useful for debugging multithreading issues.
+    */
+    #define VMA_DEBUG_GLOBAL_MUTEX (0)
+#endif
+
+#ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY
+    /**
+    Minimum value for VkPhysicalDeviceLimits::bufferImageGranularity.
+    Set to more than 1 for debugging purposes only. Must be power of two.
+    */
+    #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1)
+#endif
+
+#ifndef VMA_SMALL_HEAP_MAX_SIZE
+   /// Maximum size of a memory heap in Vulkan to consider it "small".
+   #define VMA_SMALL_HEAP_MAX_SIZE (1024ull * 1024 * 1024)
+#endif
+
+#ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE
+   /// Default size of a block allocated as single VkDeviceMemory from a "large" heap.
+   #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256ull * 1024 * 1024)
+#endif
+
+#ifndef VMA_CLASS_NO_COPY
+    #define VMA_CLASS_NO_COPY(className) \
+        private: \
+            className(const className&) = delete; \
+            className& operator=(const className&) = delete;
+#endif
+
+static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
+
+// Decimal 2139416166, float NaN, little-endian binary 66 E6 84 7F.
+static const uint32_t VMA_CORRUPTION_DETECTION_MAGIC_VALUE = 0x7F84E666;
+
+static const uint8_t VMA_ALLOCATION_FILL_PATTERN_CREATED   = 0xDC;
+static const uint8_t VMA_ALLOCATION_FILL_PATTERN_DESTROYED = 0xEF;
+
+/*******************************************************************************
+END OF CONFIGURATION
+*/
+
+#if defined(__GNUC__)
+#define GCC_VERSION (__GNUC__ * 10000 + __GNUC_MINOR__ * 100 + __GNUC_PATCHLEVEL__)
+#pragma GCC diagnostic push
+#pragma GCC diagnostic ignored "-Wtype-limits"
+#pragma GCC diagnostic ignored "-Wunused-variable"
+#if defined(__clang__)
+#pragma clang diagnostic push
+#pragma clang diagnostic ignored "-Wtautological-compare"
+#endif
+#if GCC_VERSION >= 80000
+#pragma GCC diagnostic ignored "-Wclass-memaccess"
+#endif
+#if defined(ANDROID)
+#pragma GCC diagnostic ignored "-Wunused-private-field"
+#endif
+#endif
+static const uint32_t VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET = 0x10000000u;
+
+static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
+    VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
+
+// Returns number of bits set to 1 in (v).
+static inline uint32_t VmaCountBitsSet(uint32_t v)
+{
+	uint32_t c = v - ((v >> 1) & 0x55555555);
+	c = ((c >>  2) & 0x33333333) + (c & 0x33333333);
+	c = ((c >>  4) + c) & 0x0F0F0F0F;
+	c = ((c >>  8) + c) & 0x00FF00FF;
+	c = ((c >> 16) + c) & 0x0000FFFF;
+	return c;
+}
+
+// Aligns given value up to nearest multiply of align value. For example: VmaAlignUp(11, 8) = 16.
+// Use types like uint32_t, uint64_t as T.
+template <typename T>
+static inline T VmaAlignUp(T val, T align)
+{
+	return (val + align - 1) / align * align;
+}
+// Aligns given value down to nearest multiply of align value. For example: VmaAlignUp(11, 8) = 8.
+// Use types like uint32_t, uint64_t as T.
+template <typename T>
+static inline T VmaAlignDown(T val, T align)
+{
+    return val / align * align;
+}
+
+// Division with mathematical rounding to nearest number.
+template <typename T>
+static inline T VmaRoundDiv(T x, T y)
+{
+	return (x + (y / (T)2)) / y;
+}
+
+/*
+Returns true if given number is a power of two.
+T must be unsigned integer number or signed integer but always nonnegative.
+For 0 returns true.
+*/
+template <typename T>
+inline bool VmaIsPow2(T x)
+{
+    return (x & (x-1)) == 0;
+}
+
+// Returns smallest power of 2 greater or equal to v.
+static inline uint32_t VmaNextPow2(uint32_t v)
+{
+	v--;
+    v |= v >> 1;
+    v |= v >> 2;
+    v |= v >> 4;
+    v |= v >> 8;
+    v |= v >> 16;
+    v++;
+    return v;
+}
+static inline uint64_t VmaNextPow2(uint64_t v)
+{
+	v--;
+    v |= v >> 1;
+    v |= v >> 2;
+    v |= v >> 4;
+    v |= v >> 8;
+    v |= v >> 16;
+    v |= v >> 32;
+    v++;
+    return v;
+}
+
+// Returns largest power of 2 less or equal to v.
+static inline uint32_t VmaPrevPow2(uint32_t v)
+{
+    v |= v >> 1;
+    v |= v >> 2;
+    v |= v >> 4;
+    v |= v >> 8;
+    v |= v >> 16;
+    v = v ^ (v >> 1);
+    return v;
+}
+static inline uint64_t VmaPrevPow2(uint64_t v)
+{
+    v |= v >> 1;
+    v |= v >> 2;
+    v |= v >> 4;
+    v |= v >> 8;
+    v |= v >> 16;
+    v |= v >> 32;
+    v = v ^ (v >> 1);
+    return v;
+}
+
+static inline bool VmaStrIsEmpty(const char* pStr)
+{
+    return pStr == VMA_NULL || *pStr == '\0';
+}
+
+static const char* VmaAlgorithmToStr(uint32_t algorithm)
+{
+    switch(algorithm)
+    {
+    case VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT:
+        return "Linear";
+    case VMA_POOL_CREATE_BUDDY_ALGORITHM_BIT:
+        return "Buddy";
+    case 0:
+        return "Default";
+    default:
+        VMA_ASSERT(0);
+        return "";
+    }
+}
+
+#ifndef VMA_SORT
+
+template<typename Iterator, typename Compare>
+Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
+{
+    Iterator centerValue = end; --centerValue;
+    Iterator insertIndex = beg;
+    for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
+    {
+        if(cmp(*memTypeIndex, *centerValue))
+        {
+            if(insertIndex != memTypeIndex)
+            {
+                VMA_SWAP(*memTypeIndex, *insertIndex);
+            }
+            ++insertIndex;
+        }
+    }
+    if(insertIndex != centerValue)
+    {
+        VMA_SWAP(*insertIndex, *centerValue);
+    }
+    return insertIndex;
+}
+
+template<typename Iterator, typename Compare>
+void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
+{
+    if(beg < end)
+    {
+        Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
+        VmaQuickSort<Iterator, Compare>(beg, it, cmp);
+        VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
+    }
+}
+
+#define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp)
+
+#endif // #ifndef VMA_SORT
+
+/*
+Returns true if two memory blocks occupy overlapping pages.
+ResourceA must be in less memory offset than ResourceB.
+
+Algorithm is based on "Vulkan 1.0.39 - A Specification (with all registered Vulkan extensions)"
+chapter 11.6 "Resource Memory Association", paragraph "Buffer-Image Granularity".
+*/
+static inline bool VmaBlocksOnSamePage(
+    VkDeviceSize resourceAOffset,
+    VkDeviceSize resourceASize,
+    VkDeviceSize resourceBOffset,
+    VkDeviceSize pageSize)
+{
+    VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
+    VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
+    VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
+    VkDeviceSize resourceBStart = resourceBOffset;
+    VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
+    return resourceAEndPage == resourceBStartPage;
+}
+
+enum VmaSuballocationType
+{
+    VMA_SUBALLOCATION_TYPE_FREE = 0,
+    VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
+    VMA_SUBALLOCATION_TYPE_BUFFER = 2,
+    VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
+    VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
+    VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
+    VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
+};
+
+/*
+Returns true if given suballocation types could conflict and must respect
+VkPhysicalDeviceLimits::bufferImageGranularity. They conflict if one is buffer
+or linear image and another one is optimal image. If type is unknown, behave
+conservatively.
+*/
+static inline bool VmaIsBufferImageGranularityConflict(
+    VmaSuballocationType suballocType1,
+    VmaSuballocationType suballocType2)
+{
+    if(suballocType1 > suballocType2)
+    {
+        VMA_SWAP(suballocType1, suballocType2);
+    }
+    
+    switch(suballocType1)
+    {
+    case VMA_SUBALLOCATION_TYPE_FREE:
+        return false;
+    case VMA_SUBALLOCATION_TYPE_UNKNOWN:
+        return true;
+    case VMA_SUBALLOCATION_TYPE_BUFFER:
+        return
+            suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
+            suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
+    case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
+        return
+            suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
+            suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
+            suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
+    case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
+        return
+            suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
+    case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
+        return false;
+    default:
+        VMA_ASSERT(0);
+        return true;
+    }
+}
+
+static void VmaWriteMagicValue(void* pData, VkDeviceSize offset)
+{
+#if VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_DETECT_CORRUPTION
+    uint32_t* pDst = (uint32_t*)((char*)pData + offset);
+    const size_t numberCount = VMA_DEBUG_MARGIN / sizeof(uint32_t);
+    for(size_t i = 0; i < numberCount; ++i, ++pDst)
+    {
+        *pDst = VMA_CORRUPTION_DETECTION_MAGIC_VALUE;
+    }
+#else
+    // no-op
+#endif
+}
+
+static bool VmaValidateMagicValue(const void* pData, VkDeviceSize offset)
+{
+#if VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_DETECT_CORRUPTION
+    const uint32_t* pSrc = (const uint32_t*)((const char*)pData + offset);
+    const size_t numberCount = VMA_DEBUG_MARGIN / sizeof(uint32_t);
+    for(size_t i = 0; i < numberCount; ++i, ++pSrc)
+    {
+        if(*pSrc != VMA_CORRUPTION_DETECTION_MAGIC_VALUE)
+        {
+            return false;
+        }
+    }
+#endif
+    return true;
+}
+
+// Helper RAII class to lock a mutex in constructor and unlock it in destructor (at the end of scope).
+struct VmaMutexLock
+{
+    VMA_CLASS_NO_COPY(VmaMutexLock)
+public:
+    VmaMutexLock(VMA_MUTEX& mutex, bool useMutex) :
+        m_pMutex(useMutex ? &mutex : VMA_NULL)
+    { if(m_pMutex) { m_pMutex->Lock(); } }
+    ~VmaMutexLock()
+    { if(m_pMutex) { m_pMutex->Unlock(); } }
+private:
+    VMA_MUTEX* m_pMutex;
+};
+
+// Helper RAII class to lock a RW mutex in constructor and unlock it in destructor (at the end of scope), for reading.
+struct VmaMutexLockRead
+{
+    VMA_CLASS_NO_COPY(VmaMutexLockRead)
+public:
+    VmaMutexLockRead(VMA_RW_MUTEX& mutex, bool useMutex) :
+        m_pMutex(useMutex ? &mutex : VMA_NULL)
+    { if(m_pMutex) { m_pMutex->LockRead(); } }
+    ~VmaMutexLockRead() { if(m_pMutex) { m_pMutex->UnlockRead(); } }
+private:
+    VMA_RW_MUTEX* m_pMutex;
+};
+
+// Helper RAII class to lock a RW mutex in constructor and unlock it in destructor (at the end of scope), for writing.
+struct VmaMutexLockWrite
+{
+    VMA_CLASS_NO_COPY(VmaMutexLockWrite)
+public:
+    VmaMutexLockWrite(VMA_RW_MUTEX& mutex, bool useMutex) :
+        m_pMutex(useMutex ? &mutex : VMA_NULL)
+    { if(m_pMutex) { m_pMutex->LockWrite(); } }
+    ~VmaMutexLockWrite() { if(m_pMutex) { m_pMutex->UnlockWrite(); } }
+private:
+    VMA_RW_MUTEX* m_pMutex;
+};
+
+#if VMA_DEBUG_GLOBAL_MUTEX
+    static VMA_MUTEX gDebugGlobalMutex;
+    #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true);
+#else
+    #define VMA_DEBUG_GLOBAL_MUTEX_LOCK
+#endif
+
+// Minimum size of a free suballocation to register it in the free suballocation collection.
+static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
+
+/*
+Performs binary search and returns iterator to first element that is greater or
+equal to (key), according to comparison (cmp).
+
+Cmp should return true if first argument is less than second argument.
+
+Returned value is the found element, if present in the collection or place where
+new element with value (key) should be inserted.
+*/
+template <typename CmpLess, typename IterT, typename KeyT>
+static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end, const KeyT &key, CmpLess cmp)
+{
+    size_t down = 0, up = (end - beg);
+    while(down < up)
+    {
+        const size_t mid = (down + up) / 2;
+        if(cmp(*(beg+mid), key))
+        {
+            down = mid + 1;
+        }
+        else
+        {
+            up = mid;
+        }
+    }
+    return beg + down;
+}
+
+/*
+Returns true if all pointers in the array are not-null and unique.
+Warning! O(n^2) complexity. Use only inside VMA_HEAVY_ASSERT.
+T must be pointer type, e.g. VmaAllocation, VmaPool.
+*/
+template<typename T>
+static bool VmaValidatePointerArray(uint32_t count, const T* arr)
+{
+    for(uint32_t i = 0; i < count; ++i)
+    {
+        const T iPtr = arr[i];
+        if(iPtr == VMA_NULL)
+        {
+            return false;
+        }
+        for(uint32_t j = i + 1; j < count; ++j)
+        {
+            if(iPtr == arr[j])
+            {
+                return false;
+            }
+        }
+    }
+    return true;
+}
+
+////////////////////////////////////////////////////////////////////////////////
+// Memory allocation
+
+static void* VmaMalloc(const VkAllocationCallbacks* pAllocationCallbacks, size_t size, size_t alignment)
+{
+    if((pAllocationCallbacks != VMA_NULL) &&
+        (pAllocationCallbacks->pfnAllocation != VMA_NULL))
+    {
+        return (*pAllocationCallbacks->pfnAllocation)(
+            pAllocationCallbacks->pUserData,
+            size,
+            alignment,
+            VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
+    }
+    else
+    {
+        return VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
+    }
+}
+
+static void VmaFree(const VkAllocationCallbacks* pAllocationCallbacks, void* ptr)
+{
+    if((pAllocationCallbacks != VMA_NULL) &&
+        (pAllocationCallbacks->pfnFree != VMA_NULL))
+    {
+        (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
+    }
+    else
+    {
+        VMA_SYSTEM_FREE(ptr);
+    }
+}
+
+template<typename T>
+static T* VmaAllocate(const VkAllocationCallbacks* pAllocationCallbacks)
+{
+    return (T*)VmaMalloc(pAllocationCallbacks, sizeof(T), VMA_ALIGN_OF(T));
+}
+
+template<typename T>
+static T* VmaAllocateArray(const VkAllocationCallbacks* pAllocationCallbacks, size_t count)
+{
+    return (T*)VmaMalloc(pAllocationCallbacks, sizeof(T) * count, VMA_ALIGN_OF(T));
+}
+
+#define vma_new(allocator, type)   new(VmaAllocate<type>(allocator))(type)
+
+#define vma_new_array(allocator, type, count)   new(VmaAllocateArray<type>((allocator), (count)))(type)
+
+template<typename T>
+static void vma_delete(const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
+{
+    ptr->~T();
+    VmaFree(pAllocationCallbacks, ptr);
+}
+
+template<typename T>
+static void vma_delete_array(const VkAllocationCallbacks* pAllocationCallbacks, T* ptr, size_t count)
+{
+    if(ptr != VMA_NULL)
+    {
+        for(size_t i = count; i--; )
+        {
+            ptr[i].~T();
+        }
+        VmaFree(pAllocationCallbacks, ptr);
+    }
+}
+
+// STL-compatible allocator.
+template<typename T>
+class VmaStlAllocator
+{
+public:
+    const VkAllocationCallbacks* const m_pCallbacks;
+    typedef T value_type;
+    
+    VmaStlAllocator(const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
+    template<typename U> VmaStlAllocator(const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
+
+    T* allocate(size_t n) { return VmaAllocateArray<T>(m_pCallbacks, n); }
+    void deallocate(T* p, size_t n) { VmaFree(m_pCallbacks, p); }
+
+    template<typename U>
+    bool operator==(const VmaStlAllocator<U>& rhs) const
+    {
+        return m_pCallbacks == rhs.m_pCallbacks;
+    }
+    template<typename U>
+    bool operator!=(const VmaStlAllocator<U>& rhs) const
+    {
+        return m_pCallbacks != rhs.m_pCallbacks;
+    }
+
+    VmaStlAllocator& operator=(const VmaStlAllocator& x) = delete;
+};
+
+#if VMA_USE_STL_VECTOR
+
+#define VmaVector std::vector
+
+template<typename T, typename allocatorT>
+static void VmaVectorInsert(std::vector<T, allocatorT>& vec, size_t index, const T& item)
+{
+    vec.insert(vec.begin() + index, item);
+}
+
+template<typename T, typename allocatorT>
+static void VmaVectorRemove(std::vector<T, allocatorT>& vec, size_t index)
+{
+    vec.erase(vec.begin() + index);
+}
+
+#else // #if VMA_USE_STL_VECTOR
+
+/* Class with interface compatible with subset of std::vector.
+T must be POD because constructors and destructors are not called and memcpy is
+used for these objects. */
+template<typename T, typename AllocatorT>
+class VmaVector
+{
+public:
+    typedef T value_type;
+
+    VmaVector(const AllocatorT& allocator) :
+        m_Allocator(allocator),
+        m_pArray(VMA_NULL),
+        m_Count(0),
+        m_Capacity(0)
+    {
+    }
+
+    VmaVector(size_t count, const AllocatorT& allocator) :
+        m_Allocator(allocator),
+        m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
+        m_Count(count),
+        m_Capacity(count)
+    {
+    }
+    
+    VmaVector(const VmaVector<T, AllocatorT>& src) :
+        m_Allocator(src.m_Allocator),
+        m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
+        m_Count(src.m_Count),
+        m_Capacity(src.m_Count)
+    {
+        if(m_Count != 0)
+        {
+            memcpy(m_pArray, src.m_pArray, m_Count * sizeof(T));
+        }
+    }
+    
+    ~VmaVector()
+    {
+        VmaFree(m_Allocator.m_pCallbacks, m_pArray);
+    }
+
+    VmaVector& operator=(const VmaVector<T, AllocatorT>& rhs)
+    {
+        if(&rhs != this)
+        {
+            resize(rhs.m_Count);
+            if(m_Count != 0)
+            {
+                memcpy(m_pArray, rhs.m_pArray, m_Count * sizeof(T));
+            }
+        }
+        return *this;
+    }
+    
+    bool empty() const { return m_Count == 0; }
+    size_t size() const { return m_Count; }
+    T* data() { return m_pArray; }
+    const T* data() const { return m_pArray; }
+    
+    T& operator[](size_t index)
+    {
+        VMA_HEAVY_ASSERT(index < m_Count);
+        return m_pArray[index];
+    }
+    const T& operator[](size_t index) const
+    {
+        VMA_HEAVY_ASSERT(index < m_Count);
+        return m_pArray[index];
+    }
+
+    T& front()
+    {
+        VMA_HEAVY_ASSERT(m_Count > 0);
+        return m_pArray[0];
+    }
+    const T& front() const
+    {
+        VMA_HEAVY_ASSERT(m_Count > 0);
+        return m_pArray[0];
+    }
+    T& back()
+    {
+        VMA_HEAVY_ASSERT(m_Count > 0);
+        return m_pArray[m_Count - 1];
+    }
+    const T& back() const
+    {
+        VMA_HEAVY_ASSERT(m_Count > 0);
+        return m_pArray[m_Count - 1];
+    }
+
+    void reserve(size_t newCapacity, bool freeMemory = false)
+    {
+        newCapacity = VMA_MAX(newCapacity, m_Count);
+        
+        if((newCapacity < m_Capacity) && !freeMemory)
+        {
+            newCapacity = m_Capacity;
+        }
+        
+        if(newCapacity != m_Capacity)
+        {
+            T* const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
+            if(m_Count != 0)
+            {
+                memcpy(newArray, m_pArray, m_Count * sizeof(T));
+            }
+            VmaFree(m_Allocator.m_pCallbacks, m_pArray);
+            m_Capacity = newCapacity;
+            m_pArray = newArray;
+        }
+    }
+
+    void resize(size_t newCount, bool freeMemory = false)
+    {
+        size_t newCapacity = m_Capacity;
+        if(newCount > m_Capacity)
+        {
+            newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (size_t)8));
+        }
+        else if(freeMemory)
+        {
+            newCapacity = newCount;
+        }
+
+        if(newCapacity != m_Capacity)
+        {
+            T* const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
+            const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
+            if(elementsToCopy != 0)
+            {
+                memcpy(newArray, m_pArray, elementsToCopy * sizeof(T));
+            }
+            VmaFree(m_Allocator.m_pCallbacks, m_pArray);
+            m_Capacity = newCapacity;
+            m_pArray = newArray;
+        }
+
+        m_Count = newCount;
+    }
+
+    void clear(bool freeMemory = false)
+    {
+        resize(0, freeMemory);
+    }
+
+    void insert(size_t index, const T& src)
+    {
+        VMA_HEAVY_ASSERT(index <= m_Count);
+        const size_t oldCount = size();
+        resize(oldCount + 1);
+        if(index < oldCount)
+        {
+            memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) * sizeof(T));
+        }
+        m_pArray[index] = src;
+    }
+
+    void remove(size_t index)
+    {
+        VMA_HEAVY_ASSERT(index < m_Count);
+        const size_t oldCount = size();
+        if(index < oldCount - 1)
+        {
+            memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) * sizeof(T));
+        }
+        resize(oldCount - 1);
+    }
+
+    void push_back(const T& src)
+    {
+        const size_t newIndex = size();
+        resize(newIndex + 1);
+        m_pArray[newIndex] = src;
+    }
+
+    void pop_back()
+    {
+        VMA_HEAVY_ASSERT(m_Count > 0);
+        resize(size() - 1);
+    }
+
+    void push_front(const T& src)
+    {
+        insert(0, src);
+    }
+
+    void pop_front()
+    {
+        VMA_HEAVY_ASSERT(m_Count > 0);
+        remove(0);
+    }
+
+    typedef T* iterator;
+
+    iterator begin() { return m_pArray; }
+    iterator end() { return m_pArray + m_Count; }
+
+private:
+    AllocatorT m_Allocator;
+    T* m_pArray;
+    size_t m_Count;
+    size_t m_Capacity;
+};
+
+template<typename T, typename allocatorT>
+static void VmaVectorInsert(VmaVector<T, allocatorT>& vec, size_t index, const T& item)
+{
+    vec.insert(index, item);
+}
+
+template<typename T, typename allocatorT>
+static void VmaVectorRemove(VmaVector<T, allocatorT>& vec, size_t index)
+{
+    vec.remove(index);
+}
+
+#endif // #if VMA_USE_STL_VECTOR
+
+template<typename CmpLess, typename VectorT>
+size_t VmaVectorInsertSorted(VectorT& vector, const typename VectorT::value_type& value)
+{
+    const size_t indexToInsert = VmaBinaryFindFirstNotLess(
+        vector.data(),
+        vector.data() + vector.size(),
+        value,
+        CmpLess()) - vector.data();
+    VmaVectorInsert(vector, indexToInsert, value);
+    return indexToInsert;
+}
+
+template<typename CmpLess, typename VectorT>
+bool VmaVectorRemoveSorted(VectorT& vector, const typename VectorT::value_type& value)
+{
+    CmpLess comparator;
+    typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
+        vector.begin(),
+        vector.end(),
+        value,
+        comparator);
+    if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
+    {
+        size_t indexToRemove = it - vector.begin();
+        VmaVectorRemove(vector, indexToRemove);
+        return true;
+    }
+    return false;
+}
+
+template<typename CmpLess, typename IterT, typename KeyT>
+IterT VmaVectorFindSorted(const IterT& beg, const IterT& end, const KeyT& value)
+{
+    CmpLess comparator;
+    IterT it = VmaBinaryFindFirstNotLess<CmpLess, IterT, KeyT>(
+        beg, end, value, comparator);
+    if(it == end ||
+        (!comparator(*it, value) && !comparator(value, *it)))
+    {
+        return it;
+    }
+    return end;
+}
+
+////////////////////////////////////////////////////////////////////////////////
+// class VmaPoolAllocator
+
+/*
+Allocator for objects of type T using a list of arrays (pools) to speed up
+allocation. Number of elements that can be allocated is not bounded because
+allocator can create multiple blocks.
+*/
+template<typename T>
+class VmaPoolAllocator
+{
+    VMA_CLASS_NO_COPY(VmaPoolAllocator)
+public:
+    VmaPoolAllocator(const VkAllocationCallbacks* pAllocationCallbacks, size_t itemsPerBlock);
+    ~VmaPoolAllocator();
+    void Clear();
+    T* Alloc();
+    void Free(T* ptr);
+
+private:
+    union Item
+    {
+        uint32_t NextFreeIndex;
+        T Value;
+    };
+
+    struct ItemBlock
+    {
+        Item* pItems;
+        uint32_t FirstFreeIndex;
+    };
+    
+    const VkAllocationCallbacks* m_pAllocationCallbacks;
+    size_t m_ItemsPerBlock;
+    VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
+
+    ItemBlock& CreateNewBlock();
+};
+
+template<typename T>
+VmaPoolAllocator<T>::VmaPoolAllocator(const VkAllocationCallbacks* pAllocationCallbacks, size_t itemsPerBlock) :
+    m_pAllocationCallbacks(pAllocationCallbacks),
+    m_ItemsPerBlock(itemsPerBlock),
+    m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
+{
+    VMA_ASSERT(itemsPerBlock > 0);
+}
+
+template<typename T>
+VmaPoolAllocator<T>::~VmaPoolAllocator()
+{
+    Clear();
+}
+
+template<typename T>
+void VmaPoolAllocator<T>::Clear()
+{
+    for(size_t i = m_ItemBlocks.size(); i--; )
+        vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemsPerBlock);
+    m_ItemBlocks.clear();
+}
+
+template<typename T>
+T* VmaPoolAllocator<T>::Alloc()
+{
+    for(size_t i = m_ItemBlocks.size(); i--; )
+    {
+        ItemBlock& block = m_ItemBlocks[i];
+        // This block has some free items: Use first one.
+        if(block.FirstFreeIndex != UINT32_MAX)
+        {
+            Item* const pItem = &block.pItems[block.FirstFreeIndex];
+            block.FirstFreeIndex = pItem->NextFreeIndex;
+            return &pItem->Value;
+        }
+    }
+
+    // No block has free item: Create new one and use it.
+    ItemBlock& newBlock = CreateNewBlock();
+    Item* const pItem = &newBlock.pItems[0];
+    newBlock.FirstFreeIndex = pItem->NextFreeIndex;
+    return &pItem->Value;
+}
+
+template<typename T>
+void VmaPoolAllocator<T>::Free(T* ptr)
+{
+    // Search all memory blocks to find ptr.
+    for(size_t i = 0; i < m_ItemBlocks.size(); ++i)
+    {
+        ItemBlock& block = m_ItemBlocks[i];
+        
+        // Casting to union.
+        Item* pItemPtr;
+        memcpy(&pItemPtr, &ptr, sizeof(pItemPtr));
+        
+        // Check if pItemPtr is in address range of this block.
+        if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + m_ItemsPerBlock))
+        {
+            const uint32_t index = static_cast<uint32_t>(pItemPtr - block.pItems);
+            pItemPtr->NextFreeIndex = block.FirstFreeIndex;
+            block.FirstFreeIndex = index;
+            return;
+        }
+    }
+    VMA_ASSERT(0 && "Pointer doesn't belong to this memory pool.");
+}
+
+template<typename T>
+typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
+{
+    ItemBlock newBlock = {
+        vma_new_array(m_pAllocationCallbacks, Item, m_ItemsPerBlock), 0 };
+
+    m_ItemBlocks.push_back(newBlock);
+
+    // Setup singly-linked list of all free items in this block.
+    for(uint32_t i = 0; i < m_ItemsPerBlock - 1; ++i)
+        newBlock.pItems[i].NextFreeIndex = i + 1;
+    newBlock.pItems[m_ItemsPerBlock - 1].NextFreeIndex = UINT32_MAX;
+    return m_ItemBlocks.back();
+}
+
+////////////////////////////////////////////////////////////////////////////////
+// class VmaRawList, VmaList
+
+#if VMA_USE_STL_LIST
+
+#define VmaList std::list
+
+#else // #if VMA_USE_STL_LIST
+
+template<typename T>
+struct VmaListItem
+{
+    VmaListItem* pPrev;
+    VmaListItem* pNext;
+    T Value;
+};
+
+// Doubly linked list.
+template<typename T>
+class VmaRawList
+{
+    VMA_CLASS_NO_COPY(VmaRawList)
+public:
+    typedef VmaListItem<T> ItemType;
+
+    VmaRawList(const VkAllocationCallbacks* pAllocationCallbacks);
+    ~VmaRawList();
+    void Clear();
+
+    size_t GetCount() const { return m_Count; }
+    bool IsEmpty() const { return m_Count == 0; }
+
+    ItemType* Front() { return m_pFront; }
+    const ItemType* Front() const { return m_pFront; }
+    ItemType* Back() { return m_pBack; }
+    const ItemType* Back() const { return m_pBack; }
+
+    ItemType* PushBack();
+    ItemType* PushFront();
+    ItemType* PushBack(const T& value);
+    ItemType* PushFront(const T& value);
+    void PopBack();
+    void PopFront();
+    
+    // Item can be null - it means PushBack.
+    ItemType* InsertBefore(ItemType* pItem);
+    // Item can be null - it means PushFront.
+    ItemType* InsertAfter(ItemType* pItem);
+
+    ItemType* InsertBefore(ItemType* pItem, const T& value);
+    ItemType* InsertAfter(ItemType* pItem, const T& value);
+
+    void Remove(ItemType* pItem);
+
+private:
+    const VkAllocationCallbacks* const m_pAllocationCallbacks;
+    VmaPoolAllocator<ItemType> m_ItemAllocator;
+    ItemType* m_pFront;
+    ItemType* m_pBack;
+    size_t m_Count;
+};
+
+template<typename T>
+VmaRawList<T>::VmaRawList(const VkAllocationCallbacks* pAllocationCallbacks) :
+    m_pAllocationCallbacks(pAllocationCallbacks),
+    m_ItemAllocator(pAllocationCallbacks, 128),
+    m_pFront(VMA_NULL),
+    m_pBack(VMA_NULL),
+    m_Count(0)
+{
+}
+
+template<typename T>
+VmaRawList<T>::~VmaRawList()
+{
+    // Intentionally not calling Clear, because that would be unnecessary
+    // computations to return all items to m_ItemAllocator as free.
+}
+
+template<typename T>
+void VmaRawList<T>::Clear()
+{
+    if(IsEmpty() == false)
+    {
+        ItemType* pItem = m_pBack;
+        while(pItem != VMA_NULL)
+        {
+            ItemType* const pPrevItem = pItem->pPrev;
+            m_ItemAllocator.Free(pItem);
+            pItem = pPrevItem;
+        }
+        m_pFront = VMA_NULL;
+        m_pBack = VMA_NULL;
+        m_Count = 0;
+    }
+}
+
+template<typename T>
+VmaListItem<T>* VmaRawList<T>::PushBack()
+{
+    ItemType* const pNewItem = m_ItemAllocator.Alloc();
+    pNewItem->pNext = VMA_NULL;
+    if(IsEmpty())
+    {
+        pNewItem->pPrev = VMA_NULL;
+        m_pFront = pNewItem;
+        m_pBack = pNewItem;
+        m_Count = 1;
+    }
+    else
+    {
+        pNewItem->pPrev = m_pBack;
+        m_pBack->pNext = pNewItem;
+        m_pBack = pNewItem;
+        ++m_Count;
+    }
+    return pNewItem;
+}
+
+template<typename T>
+VmaListItem<T>* VmaRawList<T>::PushFront()
+{
+    ItemType* const pNewItem = m_ItemAllocator.Alloc();
+    pNewItem->pPrev = VMA_NULL;
+    if(IsEmpty())
+    {
+        pNewItem->pNext = VMA_NULL;
+        m_pFront = pNewItem;
+        m_pBack = pNewItem;
+        m_Count = 1;
+    }
+    else
+    {
+        pNewItem->pNext = m_pFront;
+        m_pFront->pPrev = pNewItem;
+        m_pFront = pNewItem;
+        ++m_Count;
+    }
+    return pNewItem;
+}
+
+template<typename T>
+VmaListItem<T>* VmaRawList<T>::PushBack(const T& value)
+{
+    ItemType* const pNewItem = PushBack();
+    pNewItem->Value = value;
+    return pNewItem;
+}
+
+template<typename T>
+VmaListItem<T>* VmaRawList<T>::PushFront(const T& value)
+{
+    ItemType* const pNewItem = PushFront();
+    pNewItem->Value = value;
+    return pNewItem;
+}
+
+template<typename T>
+void VmaRawList<T>::PopBack()
+{
+    VMA_HEAVY_ASSERT(m_Count > 0);
+    ItemType* const pBackItem = m_pBack;
+    ItemType* const pPrevItem = pBackItem->pPrev;
+    if(pPrevItem != VMA_NULL)
+    {
+        pPrevItem->pNext = VMA_NULL;
+    }
+    m_pBack = pPrevItem;
+    m_ItemAllocator.Free(pBackItem);
+    --m_Count;
+}
+
+template<typename T>
+void VmaRawList<T>::PopFront()
+{
+    VMA_HEAVY_ASSERT(m_Count > 0);
+    ItemType* const pFrontItem = m_pFront;
+    ItemType* const pNextItem = pFrontItem->pNext;
+    if(pNextItem != VMA_NULL)
+    {
+        pNextItem->pPrev = VMA_NULL;
+    }
+    m_pFront = pNextItem;
+    m_ItemAllocator.Free(pFrontItem);
+    --m_Count;
+}
+
+template<typename T>
+void VmaRawList<T>::Remove(ItemType* pItem)
+{
+    VMA_HEAVY_ASSERT(pItem != VMA_NULL);
+    VMA_HEAVY_ASSERT(m_Count > 0);
+
+    if(pItem->pPrev != VMA_NULL)
+    {
+        pItem->pPrev->pNext = pItem->pNext;
+    }
+    else
+    {
+        VMA_HEAVY_ASSERT(m_pFront == pItem);
+        m_pFront = pItem->pNext;
+    }
+
+    if(pItem->pNext != VMA_NULL)
+    {
+        pItem->pNext->pPrev = pItem->pPrev;
+    }
+    else
+    {
+        VMA_HEAVY_ASSERT(m_pBack == pItem);
+        m_pBack = pItem->pPrev;
+    }
+
+    m_ItemAllocator.Free(pItem);
+    --m_Count;
+}
+
+template<typename T>
+VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
+{
+    if(pItem != VMA_NULL)
+    {
+        ItemType* const prevItem = pItem->pPrev;
+        ItemType* const newItem = m_ItemAllocator.Alloc();
+        newItem->pPrev = prevItem;
+        newItem->pNext = pItem;
+        pItem->pPrev = newItem;
+        if(prevItem != VMA_NULL)
+        {
+            prevItem->pNext = newItem;
+        }
+        else
+        {
+            VMA_HEAVY_ASSERT(m_pFront == pItem);
+            m_pFront = newItem;
+        }
+        ++m_Count;
+        return newItem;
+    }
+    else
+        return PushBack();
+}
+
+template<typename T>
+VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
+{
+    if(pItem != VMA_NULL)
+    {
+        ItemType* const nextItem = pItem->pNext;
+        ItemType* const newItem = m_ItemAllocator.Alloc();
+        newItem->pNext = nextItem;
+        newItem->pPrev = pItem;
+        pItem->pNext = newItem;
+        if(nextItem != VMA_NULL)
+        {
+            nextItem->pPrev = newItem;
+        }
+        else
+        {
+            VMA_HEAVY_ASSERT(m_pBack == pItem);
+            m_pBack = newItem;
+        }
+        ++m_Count;
+        return newItem;
+    }
+    else
+        return PushFront();
+}
+
+template<typename T>
+VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem, const T& value)
+{
+    ItemType* const newItem = InsertBefore(pItem);
+    newItem->Value = value;
+    return newItem;
+}
+
+template<typename T>
+VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem, const T& value)
+{
+    ItemType* const newItem = InsertAfter(pItem);
+    newItem->Value = value;
+    return newItem;
+}
+
+template<typename T, typename AllocatorT>
+class VmaList
+{
+    VMA_CLASS_NO_COPY(VmaList)
+public:
+    class iterator
+    {
+    public:
+        iterator() :
+            m_pList(VMA_NULL),
+            m_pItem(VMA_NULL)
+        {
+        }
+
+        T& operator*() const
+        {
+            VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
+            return m_pItem->Value;
+        }
+        T* operator->() const
+        {
+            VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
+            return &m_pItem->Value;
+        }
+
+        iterator& operator++()
+        {
+            VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
+            m_pItem = m_pItem->pNext;
+            return *this;
+        }
+        iterator& operator--()
+        {
+            if(m_pItem != VMA_NULL)
+            {
+                m_pItem = m_pItem->pPrev;
+            }
+            else
+            {
+                VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
+                m_pItem = m_pList->Back();
+            }
+            return *this;
+        }
+
+        iterator operator++(int)
+        {
+            iterator result = *this;
+            ++*this;
+            return result;
+        }
+        iterator operator--(int)
+        {
+            iterator result = *this;
+            --*this;
+            return result;
+        }
+
+        bool operator==(const iterator& rhs) const
+        {
+            VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
+            return m_pItem == rhs.m_pItem;
+        }
+        bool operator!=(const iterator& rhs) const
+        {
+            VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
+            return m_pItem != rhs.m_pItem;
+        }
+        
+    private:
+        VmaRawList<T>* m_pList;
+        VmaListItem<T>* m_pItem;
+
+        iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
+            m_pList(pList),
+            m_pItem(pItem)
+        {
+        }
+
+        friend class VmaList<T, AllocatorT>;
+    };
+
+    class const_iterator
+    {
+    public:
+        const_iterator() :
+            m_pList(VMA_NULL),
+            m_pItem(VMA_NULL)
+        {
+        }
+
+        const_iterator(const iterator& src) :
+            m_pList(src.m_pList),
+            m_pItem(src.m_pItem)
+        {
+        }
+        
+        const T& operator*() const
+        {
+            VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
+            return m_pItem->Value;
+        }
+        const T* operator->() const
+        {
+            VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
+            return &m_pItem->Value;
+        }
+
+        const_iterator& operator++()
+        {
+            VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
+            m_pItem = m_pItem->pNext;
+            return *this;
+        }
+        const_iterator& operator--()
+        {
+            if(m_pItem != VMA_NULL)
+            {
+                m_pItem = m_pItem->pPrev;
+            }
+            else
+            {
+                VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
+                m_pItem = m_pList->Back();
+            }
+            return *this;
+        }
+
+        const_iterator operator++(int)
+        {
+            const_iterator result = *this;
+            ++*this;
+            return result;
+        }
+        const_iterator operator--(int)
+        {
+            const_iterator result = *this;
+            --*this;
+            return result;
+        }
+
+        bool operator==(const const_iterator& rhs) const
+        {
+            VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
+            return m_pItem == rhs.m_pItem;
+        }
+        bool operator!=(const const_iterator& rhs) const
+        {
+            VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
+            return m_pItem != rhs.m_pItem;
+        }
+        
+    private:
+        const_iterator(const VmaRawList<T>* pList, const VmaListItem<T>* pItem) :
+            m_pList(pList),
+            m_pItem(pItem)
+        {
+        }
+
+        const VmaRawList<T>* m_pList;
+        const VmaListItem<T>* m_pItem;
+
+        friend class VmaList<T, AllocatorT>;
+    };
+
+    VmaList(const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
+
+    bool empty() const { return m_RawList.IsEmpty(); }
+    size_t size() const { return m_RawList.GetCount(); }
+
+    iterator begin() { return iterator(&m_RawList, m_RawList.Front()); }
+    iterator end() { return iterator(&m_RawList, VMA_NULL); }
+
+    const_iterator cbegin() const { return const_iterator(&m_RawList, m_RawList.Front()); }
+    const_iterator cend() const { return const_iterator(&m_RawList, VMA_NULL); }
+
+    void clear() { m_RawList.Clear(); }
+    void push_back(const T& value) { m_RawList.PushBack(value); }
+    void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
+    iterator insert(iterator it, const T& value) { return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
+
+private:
+    VmaRawList<T> m_RawList;
+};
+
+#endif // #if VMA_USE_STL_LIST
+
+////////////////////////////////////////////////////////////////////////////////
+// class VmaMap
+
+// Unused in this version.
+#if 0
+
+#if VMA_USE_STL_UNORDERED_MAP
+
+#define VmaPair std::pair
+
+#define VMA_MAP_TYPE(KeyT, ValueT) \
+    std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > >
+
+#else // #if VMA_USE_STL_UNORDERED_MAP
+
+template<typename T1, typename T2>
+struct VmaPair
+{
+    T1 first;
+    T2 second;
+
+    VmaPair() : first(), second() { }
+    VmaPair(const T1& firstSrc, const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
+};
+
+/* Class compatible with subset of interface of std::unordered_map.
+KeyT, ValueT must be POD because they will be stored in VmaVector.
+*/
+template<typename KeyT, typename ValueT>
+class VmaMap
+{
+public:
+    typedef VmaPair<KeyT, ValueT> PairType;
+    typedef PairType* iterator;
+
+    VmaMap(const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
+
+    iterator begin() { return m_Vector.begin(); }
+    iterator end() { return m_Vector.end(); }
+
+    void insert(const PairType& pair);
+    iterator find(const KeyT& key);
+    void erase(iterator it);
+    
+private:
+    VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
+};
+
+#define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT>
+
+template<typename FirstT, typename SecondT>
+struct VmaPairFirstLess
+{
+    bool operator()(const VmaPair<FirstT, SecondT>& lhs, const VmaPair<FirstT, SecondT>& rhs) const
+    {
+        return lhs.first < rhs.first;
+    }
+    bool operator()(const VmaPair<FirstT, SecondT>& lhs, const FirstT& rhsFirst) const
+    {
+        return lhs.first < rhsFirst;
+    }
+};
+
+template<typename KeyT, typename ValueT>
+void VmaMap<KeyT, ValueT>::insert(const PairType& pair)
+{
+    const size_t indexToInsert = VmaBinaryFindFirstNotLess(
+        m_Vector.data(),
+        m_Vector.data() + m_Vector.size(),
+        pair,
+        VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
+    VmaVectorInsert(m_Vector, indexToInsert, pair);
+}
+
+template<typename KeyT, typename ValueT>
+VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(const KeyT& key)
+{
+    PairType* it = VmaBinaryFindFirstNotLess(
+        m_Vector.data(),
+        m_Vector.data() + m_Vector.size(),
+        key,
+        VmaPairFirstLess<KeyT, ValueT>());
+    if((it != m_Vector.end()) && (it->first == key))
+    {
+        return it;
+    }
+    else
+    {
+        return m_Vector.end();
+    }
+}
+
+template<typename KeyT, typename ValueT>
+void VmaMap<KeyT, ValueT>::erase(iterator it)
+{
+    VmaVectorRemove(m_Vector, it - m_Vector.begin());
+}
+
+#endif // #if VMA_USE_STL_UNORDERED_MAP
+
+#endif // #if 0
+
+////////////////////////////////////////////////////////////////////////////////
+
+class VmaDeviceMemoryBlock;
+
+enum VMA_CACHE_OPERATION { VMA_CACHE_FLUSH, VMA_CACHE_INVALIDATE };
+
+struct VmaAllocation_T
+{
+    VMA_CLASS_NO_COPY(VmaAllocation_T)
+private:
+    static const uint8_t MAP_COUNT_FLAG_PERSISTENT_MAP = 0x80;
+
+    enum FLAGS
+    {
+        FLAG_USER_DATA_STRING = 0x01,
+    };
+
+public:
+    enum ALLOCATION_TYPE
+    {
+        ALLOCATION_TYPE_NONE,
+        ALLOCATION_TYPE_BLOCK,
+        ALLOCATION_TYPE_DEDICATED,
+    };
+
+    VmaAllocation_T(uint32_t currentFrameIndex, bool userDataString) :
+        m_Alignment(1),
+        m_Size(0),
+        m_pUserData(VMA_NULL),
+        m_LastUseFrameIndex(currentFrameIndex),
+        m_Type((uint8_t)ALLOCATION_TYPE_NONE),
+        m_SuballocationType((uint8_t)VMA_SUBALLOCATION_TYPE_UNKNOWN),
+        m_MapCount(0),
+        m_Flags(userDataString ? (uint8_t)FLAG_USER_DATA_STRING : 0)
+    {
+#if VMA_STATS_STRING_ENABLED
+        m_CreationFrameIndex = currentFrameIndex;
+        m_BufferImageUsage = 0;
+#endif
+    }
+
+    ~VmaAllocation_T()
+    {
+        VMA_ASSERT((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) == 0 && "Allocation was not unmapped before destruction.");
+
+        // Check if owned string was freed.
+        VMA_ASSERT(m_pUserData == VMA_NULL);
+    }
+
+    void InitBlockAllocation(
+        VmaPool hPool,
+        VmaDeviceMemoryBlock* block,
+        VkDeviceSize offset,
+        VkDeviceSize alignment,
+        VkDeviceSize size,
+        VmaSuballocationType suballocationType,
+        bool mapped,
+        bool canBecomeLost)
+    {
+        VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
+        VMA_ASSERT(block != VMA_NULL);
+        m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
+        m_Alignment = alignment;
+        m_Size = size;
+        m_MapCount = mapped ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
+        m_SuballocationType = (uint8_t)suballocationType;
+        m_BlockAllocation.m_hPool = hPool;
+        m_BlockAllocation.m_Block = block;
+        m_BlockAllocation.m_Offset = offset;
+        m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
+    }
+
+    void InitLost()
+    {
+        VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
+        VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
+        m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
+        m_BlockAllocation.m_hPool = VK_NULL_HANDLE;
+        m_BlockAllocation.m_Block = VMA_NULL;
+        m_BlockAllocation.m_Offset = 0;
+        m_BlockAllocation.m_CanBecomeLost = true;
+    }
+
+    void ChangeBlockAllocation(
+        VmaAllocator hAllocator,
+        VmaDeviceMemoryBlock* block,
+        VkDeviceSize offset); 
+
+    void ChangeSize(VkDeviceSize newSize);
+    void ChangeOffset(VkDeviceSize newOffset);
+
+    // pMappedData not null means allocation is created with MAPPED flag.
+    void InitDedicatedAllocation(
+        uint32_t memoryTypeIndex,
+        VkDeviceMemory hMemory,
+        VmaSuballocationType suballocationType,
+        void* pMappedData,
+        VkDeviceSize size)
+    {
+        VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
+        VMA_ASSERT(hMemory != VK_NULL_HANDLE);
+        m_Type = (uint8_t)ALLOCATION_TYPE_DEDICATED;
+        m_Alignment = 0;
+        m_Size = size;
+        m_SuballocationType = (uint8_t)suballocationType;
+        m_MapCount = (pMappedData != VMA_NULL) ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
+        m_DedicatedAllocation.m_MemoryTypeIndex = memoryTypeIndex;
+        m_DedicatedAllocation.m_hMemory = hMemory;
+        m_DedicatedAllocation.m_pMappedData = pMappedData;
+    }
+
+    ALLOCATION_TYPE GetType() const { return (ALLOCATION_TYPE)m_Type; }
+    VkDeviceSize GetAlignment() const { return m_Alignment; }
+    VkDeviceSize GetSize() const { return m_Size; }
+    bool IsUserDataString() const { return (m_Flags & FLAG_USER_DATA_STRING) != 0; }
+    void* GetUserData() const { return m_pUserData; }
+    void SetUserData(VmaAllocator hAllocator, void* pUserData);
+    VmaSuballocationType GetSuballocationType() const { return (VmaSuballocationType)m_SuballocationType; }
+
+    VmaDeviceMemoryBlock* GetBlock() const
+    {
+        VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
+        return m_BlockAllocation.m_Block;
+    }
+    VkDeviceSize GetOffset() const;
+    VkDeviceMemory GetMemory() const;
+    uint32_t GetMemoryTypeIndex() const;
+    bool IsPersistentMap() const { return (m_MapCount & MAP_COUNT_FLAG_PERSISTENT_MAP) != 0; }
+    void* GetMappedData() const;
+    bool CanBecomeLost() const;
+    VmaPool GetPool() const;
+    
+    uint32_t GetLastUseFrameIndex() const
+    {
+        return m_LastUseFrameIndex.load();
+    }
+    bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
+    {
+        return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
+    }
+    /*
+    - If hAllocation.LastUseFrameIndex + frameInUseCount < allocator.CurrentFrameIndex,
+      makes it lost by setting LastUseFrameIndex = VMA_FRAME_INDEX_LOST and returns true.
+    - Else, returns false.
+    
+    If hAllocation is already lost, assert - you should not call it then.
+    If hAllocation was not created with CAN_BECOME_LOST_BIT, assert.
+    */
+    bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
+
+    void DedicatedAllocCalcStatsInfo(VmaStatInfo& outInfo)
+    {
+        VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
+        outInfo.blockCount = 1;
+        outInfo.allocationCount = 1;
+        outInfo.unusedRangeCount = 0;
+        outInfo.usedBytes = m_Size;
+        outInfo.unusedBytes = 0;
+        outInfo.allocationSizeMin = outInfo.allocationSizeMax = m_Size;
+        outInfo.unusedRangeSizeMin = UINT64_MAX;
+        outInfo.unusedRangeSizeMax = 0;
+    }
+
+    void BlockAllocMap();
+    void BlockAllocUnmap();
+    VkResult DedicatedAllocMap(VmaAllocator hAllocator, void** ppData);
+    void DedicatedAllocUnmap(VmaAllocator hAllocator);
+
+#if VMA_STATS_STRING_ENABLED
+    uint32_t GetCreationFrameIndex() const { return m_CreationFrameIndex; }
+    uint32_t GetBufferImageUsage() const { return m_BufferImageUsage; }
+
+    void InitBufferImageUsage(uint32_t bufferImageUsage)
+    {
+        VMA_ASSERT(m_BufferImageUsage == 0);
+        m_BufferImageUsage = bufferImageUsage;
+    }
+
+    void PrintParameters(class VmaJsonWriter& json) const;
+#endif
+
+private:
+    VkDeviceSize m_Alignment;
+    VkDeviceSize m_Size;
+    void* m_pUserData;
+    VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
+    uint8_t m_Type; // ALLOCATION_TYPE
+    uint8_t m_SuballocationType; // VmaSuballocationType
+    // Bit 0x80 is set when allocation was created with VMA_ALLOCATION_CREATE_MAPPED_BIT.
+    // Bits with mask 0x7F are reference counter for vmaMapMemory()/vmaUnmapMemory().
+    uint8_t m_MapCount;
+    uint8_t m_Flags; // enum FLAGS
+
+    // Allocation out of VmaDeviceMemoryBlock.
+    struct BlockAllocation
+    {
+        VmaPool m_hPool; // Null if belongs to general memory.
+        VmaDeviceMemoryBlock* m_Block;
+        VkDeviceSize m_Offset;
+        bool m_CanBecomeLost;
+    };
+
+    // Allocation for an object that has its own private VkDeviceMemory.
+    struct DedicatedAllocation
+    {
+        uint32_t m_MemoryTypeIndex;
+        VkDeviceMemory m_hMemory;
+        void* m_pMappedData; // Not null means memory is mapped.
+    };
+
+    union
+    {
+        // Allocation out of VmaDeviceMemoryBlock.
+        BlockAllocation m_BlockAllocation;
+        // Allocation for an object that has its own private VkDeviceMemory.
+        DedicatedAllocation m_DedicatedAllocation;
+    };
+
+#if VMA_STATS_STRING_ENABLED
+    uint32_t m_CreationFrameIndex;
+    uint32_t m_BufferImageUsage; // 0 if unknown.
+#endif
+
+    void FreeUserDataString(VmaAllocator hAllocator);
+};
+
+/*
+Represents a region of VmaDeviceMemoryBlock that is either assigned and returned as
+allocated memory block or free.
+*/
+struct VmaSuballocation
+{
+    VkDeviceSize offset;
+    VkDeviceSize size;
+    VmaAllocation hAllocation;
+    VmaSuballocationType type;
+};
+
+// Comparator for offsets.
+struct VmaSuballocationOffsetLess
+{
+    bool operator()(const VmaSuballocation& lhs, const VmaSuballocation& rhs) const
+    {
+        return lhs.offset < rhs.offset;
+    }
+};
+struct VmaSuballocationOffsetGreater
+{
+    bool operator()(const VmaSuballocation& lhs, const VmaSuballocation& rhs) const
+    {
+        return lhs.offset > rhs.offset;
+    }
+};
+
+typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
+
+// Cost of one additional allocation lost, as equivalent in bytes.
+static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
+
+/*
+Parameters of planned allocation inside a VmaDeviceMemoryBlock.
+
+If canMakeOtherLost was false:
+- item points to a FREE suballocation.
+- itemsToMakeLostCount is 0.
+
+If canMakeOtherLost was true:
+- item points to first of sequence of suballocations, which are either FREE,
+  or point to VmaAllocations that can become lost.
+- itemsToMakeLostCount is the number of VmaAllocations that need to be made lost for
+  the requested allocation to succeed.
+*/
+struct VmaAllocationRequest
+{
+    VkDeviceSize offset;
+    VkDeviceSize sumFreeSize; // Sum size of free items that overlap with proposed allocation.
+    VkDeviceSize sumItemSize; // Sum size of items to make lost that overlap with proposed allocation.
+    VmaSuballocationList::iterator item;
+    size_t itemsToMakeLostCount;
+    void* customData;
+
+    VkDeviceSize CalcCost() const
+    {
+        return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
+    }
+};
+
+/*
+Data structure used for bookkeeping of allocations and unused ranges of memory
+in a single VkDeviceMemory block.
+*/
+class VmaBlockMetadata
+{
+public:
+    VmaBlockMetadata(VmaAllocator hAllocator);
+    virtual ~VmaBlockMetadata() { }
+    virtual void Init(VkDeviceSize size) { m_Size = size; }
+
+    // Validates all data structures inside this object. If not valid, returns false.
+    virtual bool Validate() const = 0;
+    VkDeviceSize GetSize() const { return m_Size; }
+    virtual size_t GetAllocationCount() const = 0;
+    virtual VkDeviceSize GetSumFreeSize() const = 0;
+    virtual VkDeviceSize GetUnusedRangeSizeMax() const = 0;
+    // Returns true if this block is empty - contains only single free suballocation.
+    virtual bool IsEmpty() const = 0;
+
+    virtual void CalcAllocationStatInfo(VmaStatInfo& outInfo) const = 0;
+    // Shouldn't modify blockCount.
+    virtual void AddPoolStats(VmaPoolStats& inoutStats) const = 0;
+
+#if VMA_STATS_STRING_ENABLED
+    virtual void PrintDetailedMap(class VmaJsonWriter& json) const = 0;
+#endif
+
+    // Tries to find a place for suballocation with given parameters inside this block.
+    // If succeeded, fills pAllocationRequest and returns true.
+    // If failed, returns false.
+    virtual bool CreateAllocationRequest(
+        uint32_t currentFrameIndex,
+        uint32_t frameInUseCount,
+        VkDeviceSize bufferImageGranularity,
+        VkDeviceSize allocSize,
+        VkDeviceSize allocAlignment,
+        bool upperAddress,
+        VmaSuballocationType allocType,
+        bool canMakeOtherLost,
+        // Always one of VMA_ALLOCATION_CREATE_STRATEGY_* or VMA_ALLOCATION_INTERNAL_STRATEGY_* flags.
+        uint32_t strategy,
+        VmaAllocationRequest* pAllocationRequest) = 0;
+
+    virtual bool MakeRequestedAllocationsLost(
+        uint32_t currentFrameIndex,
+        uint32_t frameInUseCount,
+        VmaAllocationRequest* pAllocationRequest) = 0;
+
+    virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount) = 0;
+
+    virtual VkResult CheckCorruption(const void* pBlockData) = 0;
+
+    // Makes actual allocation based on request. Request must already be checked and valid.
+    virtual void Alloc(
+        const VmaAllocationRequest& request,
+        VmaSuballocationType type,
+        VkDeviceSize allocSize,
+        bool upperAddress,
+        VmaAllocation hAllocation) = 0;
+
+    // Frees suballocation assigned to given memory region.
+    virtual void Free(const VmaAllocation allocation) = 0;
+    virtual void FreeAtOffset(VkDeviceSize offset) = 0;
+
+    // Tries to resize (grow or shrink) space for given allocation, in place.
+    virtual bool ResizeAllocation(const VmaAllocation alloc, VkDeviceSize newSize) { return false; }
+
+protected:
+    const VkAllocationCallbacks* GetAllocationCallbacks() const { return m_pAllocationCallbacks; }
+
+#if VMA_STATS_STRING_ENABLED
+    void PrintDetailedMap_Begin(class VmaJsonWriter& json,
+        VkDeviceSize unusedBytes,
+        size_t allocationCount,
+        size_t unusedRangeCount) const;
+    void PrintDetailedMap_Allocation(class VmaJsonWriter& json,
+        VkDeviceSize offset,
+        VmaAllocation hAllocation) const;
+    void PrintDetailedMap_UnusedRange(class VmaJsonWriter& json,
+        VkDeviceSize offset,
+        VkDeviceSize size) const;
+    void PrintDetailedMap_End(class VmaJsonWriter& json) const;
+#endif
+
+private:
+    VkDeviceSize m_Size;
+    const VkAllocationCallbacks* m_pAllocationCallbacks;
+};
+
+#define VMA_VALIDATE(cond) do { if(!(cond)) { \
+        VMA_ASSERT(0 && "Validation failed: " #cond); \
+        return false; \
+    } } while(false)
+
+class VmaBlockMetadata_Generic : public VmaBlockMetadata
+{
+    VMA_CLASS_NO_COPY(VmaBlockMetadata_Generic)
+public:
+    VmaBlockMetadata_Generic(VmaAllocator hAllocator);
+    virtual ~VmaBlockMetadata_Generic();
+    virtual void Init(VkDeviceSize size);
+
+    virtual bool Validate() const;
+    virtual size_t GetAllocationCount() const { return m_Suballocations.size() - m_FreeCount; }
+    virtual VkDeviceSize GetSumFreeSize() const { return m_SumFreeSize; }
+    virtual VkDeviceSize GetUnusedRangeSizeMax() const;
+    virtual bool IsEmpty() const;
+
+    virtual void CalcAllocationStatInfo(VmaStatInfo& outInfo) const;
+    virtual void AddPoolStats(VmaPoolStats& inoutStats) const;
+
+#if VMA_STATS_STRING_ENABLED
+    virtual void PrintDetailedMap(class VmaJsonWriter& json) const;
+#endif
+
+    virtual bool CreateAllocationRequest(
+        uint32_t currentFrameIndex,
+        uint32_t frameInUseCount,
+        VkDeviceSize bufferImageGranularity,
+        VkDeviceSize allocSize,
+        VkDeviceSize allocAlignment,
+        bool upperAddress,
+        VmaSuballocationType allocType,
+        bool canMakeOtherLost,
+        uint32_t strategy,
+        VmaAllocationRequest* pAllocationRequest);
+
+    virtual bool MakeRequestedAllocationsLost(
+        uint32_t currentFrameIndex,
+        uint32_t frameInUseCount,
+        VmaAllocationRequest* pAllocationRequest);
+
+    virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
+
+    virtual VkResult CheckCorruption(const void* pBlockData);
+
+    virtual void Alloc(
+        const VmaAllocationRequest& request,
+        VmaSuballocationType type,
+        VkDeviceSize allocSize,
+        bool upperAddress,
+        VmaAllocation hAllocation);
+
+    virtual void Free(const VmaAllocation allocation);
+    virtual void FreeAtOffset(VkDeviceSize offset);
+
+    virtual bool ResizeAllocation(const VmaAllocation alloc, VkDeviceSize newSize);
+
+    ////////////////////////////////////////////////////////////////////////////////
+    // For defragmentation
+    
+    bool IsBufferImageGranularityConflictPossible(
+        VkDeviceSize bufferImageGranularity,
+        VmaSuballocationType& inOutPrevSuballocType) const;
+
+private:
+    friend class VmaDefragmentationAlgorithm_Generic;
+    friend class VmaDefragmentationAlgorithm_Fast;
+
+    uint32_t m_FreeCount;
+    VkDeviceSize m_SumFreeSize;
+    VmaSuballocationList m_Suballocations;
+    // Suballocations that are free and have size greater than certain threshold.
+    // Sorted by size, ascending.
+    VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
+
+    bool ValidateFreeSuballocationList() const;
+
+    // Checks if requested suballocation with given parameters can be placed in given pFreeSuballocItem.
+    // If yes, fills pOffset and returns true. If no, returns false.
+    bool CheckAllocation(
+        uint32_t currentFrameIndex,
+        uint32_t frameInUseCount,
+        VkDeviceSize bufferImageGranularity,
+        VkDeviceSize allocSize,
+        VkDeviceSize allocAlignment,
+        VmaSuballocationType allocType,
+        VmaSuballocationList::const_iterator suballocItem,
+        bool canMakeOtherLost,
+        VkDeviceSize* pOffset,
+        size_t* itemsToMakeLostCount,
+        VkDeviceSize* pSumFreeSize,
+        VkDeviceSize* pSumItemSize) const;
+    // Given free suballocation, it merges it with following one, which must also be free.
+    void MergeFreeWithNext(VmaSuballocationList::iterator item);
+    // Releases given suballocation, making it free.
+    // Merges it with adjacent free suballocations if applicable.
+    // Returns iterator to new free suballocation at this place.
+    VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
+    // Given free suballocation, it inserts it into sorted list of
+    // m_FreeSuballocationsBySize if it's suitable.
+    void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
+    // Given free suballocation, it removes it from sorted list of
+    // m_FreeSuballocationsBySize if it's suitable.
+    void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
+};
+
+/*
+Allocations and their references in internal data structure look like this:
+
+if(m_2ndVectorMode == SECOND_VECTOR_EMPTY):
+
+        0 +-------+
+          |       |
+          |       |
+          |       |
+          +-------+
+          | Alloc |  1st[m_1stNullItemsBeginCount]
+          +-------+
+          | Alloc |  1st[m_1stNullItemsBeginCount + 1]
+          +-------+
+          |  ...  |
+          +-------+
+          | Alloc |  1st[1st.size() - 1]
+          +-------+
+          |       |
+          |       |
+          |       |
+GetSize() +-------+
+
+if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER):
+
+        0 +-------+
+          | Alloc |  2nd[0]
+          +-------+
+          | Alloc |  2nd[1]
+          +-------+
+          |  ...  |
+          +-------+
+          | Alloc |  2nd[2nd.size() - 1]
+          +-------+
+          |       |
+          |       |
+          |       |
+          +-------+
+          | Alloc |  1st[m_1stNullItemsBeginCount]
+          +-------+
+          | Alloc |  1st[m_1stNullItemsBeginCount + 1]
+          +-------+
+          |  ...  |
+          +-------+
+          | Alloc |  1st[1st.size() - 1]
+          +-------+
+          |       |
+GetSize() +-------+
+
+if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK):
+
+        0 +-------+
+          |       |
+          |       |
+          |       |
+          +-------+
+          | Alloc |  1st[m_1stNullItemsBeginCount]
+          +-------+
+          | Alloc |  1st[m_1stNullItemsBeginCount + 1]
+          +-------+
+          |  ...  |
+          +-------+
+          | Alloc |  1st[1st.size() - 1]
+          +-------+
+          |       |
+          |       |
+          |       |
+          +-------+
+          | Alloc |  2nd[2nd.size() - 1]
+          +-------+
+          |  ...  |
+          +-------+
+          | Alloc |  2nd[1]
+          +-------+
+          | Alloc |  2nd[0]
+GetSize() +-------+
+
+*/
+class VmaBlockMetadata_Linear : public VmaBlockMetadata
+{
+    VMA_CLASS_NO_COPY(VmaBlockMetadata_Linear)
+public:
+    VmaBlockMetadata_Linear(VmaAllocator hAllocator);
+    virtual ~VmaBlockMetadata_Linear();
+    virtual void Init(VkDeviceSize size);
+
+    virtual bool Validate() const;
+    virtual size_t GetAllocationCount() const;
+    virtual VkDeviceSize GetSumFreeSize() const { return m_SumFreeSize; }
+    virtual VkDeviceSize GetUnusedRangeSizeMax() const;
+    virtual bool IsEmpty() const { return GetAllocationCount() == 0; }
+
+    virtual void CalcAllocationStatInfo(VmaStatInfo& outInfo) const;
+    virtual void AddPoolStats(VmaPoolStats& inoutStats) const;
+
+#if VMA_STATS_STRING_ENABLED
+    virtual void PrintDetailedMap(class VmaJsonWriter& json) const;
+#endif
+
+    virtual bool CreateAllocationRequest(
+        uint32_t currentFrameIndex,
+        uint32_t frameInUseCount,
+        VkDeviceSize bufferImageGranularity,
+        VkDeviceSize allocSize,
+        VkDeviceSize allocAlignment,
+        bool upperAddress,
+        VmaSuballocationType allocType,
+        bool canMakeOtherLost,
+        uint32_t strategy,
+        VmaAllocationRequest* pAllocationRequest);
+
+    virtual bool MakeRequestedAllocationsLost(
+        uint32_t currentFrameIndex,
+        uint32_t frameInUseCount,
+        VmaAllocationRequest* pAllocationRequest);
+
+    virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
+
+    virtual VkResult CheckCorruption(const void* pBlockData);
+
+    virtual void Alloc(
+        const VmaAllocationRequest& request,
+        VmaSuballocationType type,
+        VkDeviceSize allocSize,
+        bool upperAddress,
+        VmaAllocation hAllocation);
+
+    virtual void Free(const VmaAllocation allocation);
+    virtual void FreeAtOffset(VkDeviceSize offset);
+
+private:
+    /*
+    There are two suballocation vectors, used in ping-pong way.
+    The one with index m_1stVectorIndex is called 1st.
+    The one with index (m_1stVectorIndex ^ 1) is called 2nd.
+    2nd can be non-empty only when 1st is not empty.
+    When 2nd is not empty, m_2ndVectorMode indicates its mode of operation.
+    */
+    typedef VmaVector< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > SuballocationVectorType;
+
+    enum SECOND_VECTOR_MODE
+    {
+        SECOND_VECTOR_EMPTY,
+        /*
+        Suballocations in 2nd vector are created later than the ones in 1st, but they
+        all have smaller offset.
+        */
+        SECOND_VECTOR_RING_BUFFER,
+        /*
+        Suballocations in 2nd vector are upper side of double stack.
+        They all have offsets higher than those in 1st vector.
+        Top of this stack means smaller offsets, but higher indices in this vector.
+        */
+        SECOND_VECTOR_DOUBLE_STACK,
+    };
+
+    VkDeviceSize m_SumFreeSize;
+    SuballocationVectorType m_Suballocations0, m_Suballocations1;
+    uint32_t m_1stVectorIndex;
+    SECOND_VECTOR_MODE m_2ndVectorMode;
+
+    SuballocationVectorType& AccessSuballocations1st() { return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
+    SuballocationVectorType& AccessSuballocations2nd() { return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
+    const SuballocationVectorType& AccessSuballocations1st() const { return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
+    const SuballocationVectorType& AccessSuballocations2nd() const { return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
+    
+    // Number of items in 1st vector with hAllocation = null at the beginning.
+    size_t m_1stNullItemsBeginCount;
+    // Number of other items in 1st vector with hAllocation = null somewhere in the middle.
+    size_t m_1stNullItemsMiddleCount;
+    // Number of items in 2nd vector with hAllocation = null.
+    size_t m_2ndNullItemsCount;
+
+    bool ShouldCompact1st() const;
+    void CleanupAfterFree();
+};
+
+/*
+- GetSize() is the original size of allocated memory block.
+- m_UsableSize is this size aligned down to a power of two.
+  All allocations and calculations happen relative to m_UsableSize.
+- GetUnusableSize() is the difference between them.
+  It is repoted as separate, unused range, not available for allocations.
+
+Node at level 0 has size = m_UsableSize.
+Each next level contains nodes with size 2 times smaller than current level.
+m_LevelCount is the maximum number of levels to use in the current object.
+*/
+class VmaBlockMetadata_Buddy : public VmaBlockMetadata
+{
+    VMA_CLASS_NO_COPY(VmaBlockMetadata_Buddy)
+public:
+    VmaBlockMetadata_Buddy(VmaAllocator hAllocator);
+    virtual ~VmaBlockMetadata_Buddy();
+    virtual void Init(VkDeviceSize size);
+
+    virtual bool Validate() const;
+    virtual size_t GetAllocationCount() const { return m_AllocationCount; }
+    virtual VkDeviceSize GetSumFreeSize() const { return m_SumFreeSize + GetUnusableSize(); }
+    virtual VkDeviceSize GetUnusedRangeSizeMax() const;
+    virtual bool IsEmpty() const { return m_Root->type == Node::TYPE_FREE; }
+
+    virtual void CalcAllocationStatInfo(VmaStatInfo& outInfo) const;
+    virtual void AddPoolStats(VmaPoolStats& inoutStats) const;
+
+#if VMA_STATS_STRING_ENABLED
+    virtual void PrintDetailedMap(class VmaJsonWriter& json) const;
+#endif
+
+    virtual bool CreateAllocationRequest(
+        uint32_t currentFrameIndex,
+        uint32_t frameInUseCount,
+        VkDeviceSize bufferImageGranularity,
+        VkDeviceSize allocSize,
+        VkDeviceSize allocAlignment,
+        bool upperAddress,
+        VmaSuballocationType allocType,
+        bool canMakeOtherLost,
+        uint32_t strategy,
+        VmaAllocationRequest* pAllocationRequest);
+
+    virtual bool MakeRequestedAllocationsLost(
+        uint32_t currentFrameIndex,
+        uint32_t frameInUseCount,
+        VmaAllocationRequest* pAllocationRequest);
+
+    virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
+
+    virtual VkResult CheckCorruption(const void* pBlockData) { return VK_ERROR_FEATURE_NOT_PRESENT; }
+
+    virtual void Alloc(
+        const VmaAllocationRequest& request,
+        VmaSuballocationType type,
+        VkDeviceSize allocSize,
+        bool upperAddress,
+        VmaAllocation hAllocation);
+
+    virtual void Free(const VmaAllocation allocation) { FreeAtOffset(allocation, allocation->GetOffset()); }
+    virtual void FreeAtOffset(VkDeviceSize offset) { FreeAtOffset(VMA_NULL, offset); }
+
+private:
+    static const VkDeviceSize MIN_NODE_SIZE = 32;
+    static const size_t MAX_LEVELS = 30;
+
+    struct ValidationContext
+    {
+        size_t calculatedAllocationCount;
+        size_t calculatedFreeCount;
+        VkDeviceSize calculatedSumFreeSize;
+
+        ValidationContext() :
+            calculatedAllocationCount(0),
+            calculatedFreeCount(0),
+            calculatedSumFreeSize(0) { }
+    };
+
+    struct Node
+    {
+        VkDeviceSize offset;
+        enum TYPE
+        {
+            TYPE_FREE,
+            TYPE_ALLOCATION,
+            TYPE_SPLIT,
+            TYPE_COUNT
+        } type;
+        Node* parent;
+        Node* buddy;
+
+        union
+        {
+            struct
+            {
+                Node* prev;
+                Node* next;
+            } free;
+            struct
+            {
+                VmaAllocation alloc;
+            } allocation;
+            struct
+            {
+                Node* leftChild;
+            } split;
+        };
+    };
+
+    // Size of the memory block aligned down to a power of two.
+    VkDeviceSize m_UsableSize;
+    uint32_t m_LevelCount;
+
+    Node* m_Root;
+    struct {
+        Node* front;
+        Node* back;
+    } m_FreeList[MAX_LEVELS];
+    // Number of nodes in the tree with type == TYPE_ALLOCATION.
+    size_t m_AllocationCount;
+    // Number of nodes in the tree with type == TYPE_FREE.
+    size_t m_FreeCount;
+    // This includes space wasted due to internal fragmentation. Doesn't include unusable size.
+    VkDeviceSize m_SumFreeSize;
+
+    VkDeviceSize GetUnusableSize() const { return GetSize() - m_UsableSize; }
+    void DeleteNode(Node* node);
+    bool ValidateNode(ValidationContext& ctx, const Node* parent, const Node* curr, uint32_t level, VkDeviceSize levelNodeSize) const;
+    uint32_t AllocSizeToLevel(VkDeviceSize allocSize) const;
+    inline VkDeviceSize LevelToNodeSize(uint32_t level) const { return m_UsableSize >> level; }
+    // Alloc passed just for validation. Can be null.
+    void FreeAtOffset(VmaAllocation alloc, VkDeviceSize offset);
+    void CalcAllocationStatInfoNode(VmaStatInfo& outInfo, const Node* node, VkDeviceSize levelNodeSize) const;
+    // Adds node to the front of FreeList at given level.
+    // node->type must be FREE.
+    // node->free.prev, next can be undefined.
+    void AddToFreeListFront(uint32_t level, Node* node);
+    // Removes node from FreeList at given level.
+    // node->type must be FREE.
+    // node->free.prev, next stay untouched.
+    void RemoveFromFreeList(uint32_t level, Node* node);
+
+#if VMA_STATS_STRING_ENABLED
+    void PrintDetailedMapNode(class VmaJsonWriter& json, const Node* node, VkDeviceSize levelNodeSize) const;
+#endif
+};
+
+/*
+Represents a single block of device memory (`VkDeviceMemory`) with all the
+data about its regions (aka suballocations, #VmaAllocation), assigned and free.
+
+Thread-safety: This class must be externally synchronized.
+*/
+class VmaDeviceMemoryBlock
+{
+    VMA_CLASS_NO_COPY(VmaDeviceMemoryBlock)
+public:
+    VmaBlockMetadata* m_pMetadata;
+
+    VmaDeviceMemoryBlock(VmaAllocator hAllocator);
+
+    ~VmaDeviceMemoryBlock()
+    {
+        VMA_ASSERT(m_MapCount == 0 && "VkDeviceMemory block is being destroyed while it is still mapped.");
+        VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
+    }
+
+    // Always call after construction.
+    void Init(
+        VmaAllocator hAllocator,
+        uint32_t newMemoryTypeIndex,
+        VkDeviceMemory newMemory,
+        VkDeviceSize newSize,
+        uint32_t id,
+        uint32_t algorithm);
+    // Always call before destruction.
+    void Destroy(VmaAllocator allocator);
+    
+    VkDeviceMemory GetDeviceMemory() const { return m_hMemory; }
+    uint32_t GetMemoryTypeIndex() const { return m_MemoryTypeIndex; }
+    uint32_t GetId() const { return m_Id; }
+    void* GetMappedData() const { return m_pMappedData; }
+
+    // Validates all data structures inside this object. If not valid, returns false.
+    bool Validate() const;
+
+    VkResult CheckCorruption(VmaAllocator hAllocator);
+
+    // ppData can be null.
+    VkResult Map(VmaAllocator hAllocator, uint32_t count, void** ppData);
+    void Unmap(VmaAllocator hAllocator, uint32_t count);
+
+    VkResult WriteMagicValueAroundAllocation(VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
+    VkResult ValidateMagicValueAroundAllocation(VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
+
+    VkResult BindBufferMemory(
+        const VmaAllocator hAllocator,
+        const VmaAllocation hAllocation,
+        VkBuffer hBuffer);
+    VkResult BindImageMemory(
+        const VmaAllocator hAllocator,
+        const VmaAllocation hAllocation,
+        VkImage hImage);
+
+private:
+    uint32_t m_MemoryTypeIndex;
+    uint32_t m_Id;
+    VkDeviceMemory m_hMemory;
+
+    /*
+    Protects access to m_hMemory so it's not used by multiple threads simultaneously, e.g. vkMapMemory, vkBindBufferMemory.
+    Also protects m_MapCount, m_pMappedData.
+    Allocations, deallocations, any change in m_pMetadata is protected by parent's VmaBlockVector::m_Mutex.
+    */
+    VMA_MUTEX m_Mutex;
+    uint32_t m_MapCount;
+    void* m_pMappedData;
+};
+
+struct VmaPointerLess
+{
+    bool operator()(const void* lhs, const void* rhs) const
+    {
+        return lhs < rhs;
+    }
+};
+
+struct VmaDefragmentationMove
+{
+    size_t srcBlockIndex;
+    size_t dstBlockIndex;
+    VkDeviceSize srcOffset;
+    VkDeviceSize dstOffset;
+    VkDeviceSize size;
+};
+
+class VmaDefragmentationAlgorithm;
+
+/*
+Sequence of VmaDeviceMemoryBlock. Represents memory blocks allocated for a specific
+Vulkan memory type.
+
+Synchronized internally with a mutex.
+*/
+struct VmaBlockVector
+{
+    VMA_CLASS_NO_COPY(VmaBlockVector)
+public:
+    VmaBlockVector(
+        VmaAllocator hAllocator,
+        uint32_t memoryTypeIndex,
+        VkDeviceSize preferredBlockSize,
+        size_t minBlockCount,
+        size_t maxBlockCount,
+        VkDeviceSize bufferImageGranularity,
+        uint32_t frameInUseCount,
+        bool isCustomPool,
+        bool explicitBlockSize,
+        uint32_t algorithm);
+    ~VmaBlockVector();
+
+    VkResult CreateMinBlocks();
+
+    uint32_t GetMemoryTypeIndex() const { return m_MemoryTypeIndex; }
+    VkDeviceSize GetPreferredBlockSize() const { return m_PreferredBlockSize; }
+    VkDeviceSize GetBufferImageGranularity() const { return m_BufferImageGranularity; }
+    uint32_t GetFrameInUseCount() const { return m_FrameInUseCount; }
+    uint32_t GetAlgorithm() const { return m_Algorithm; }
+
+    void GetPoolStats(VmaPoolStats* pStats);
+
+    bool IsEmpty() const { return m_Blocks.empty(); }
+    bool IsCorruptionDetectionEnabled() const;
+
+    VkResult Allocate(
+        VmaPool hCurrentPool,
+        uint32_t currentFrameIndex,
+        VkDeviceSize size,
+        VkDeviceSize alignment,
+        const VmaAllocationCreateInfo& createInfo,
+        VmaSuballocationType suballocType,
+        size_t allocationCount,
+        VmaAllocation* pAllocations);
+
+    void Free(
+        VmaAllocation hAllocation);
+
+    // Adds statistics of this BlockVector to pStats.
+    void AddStats(VmaStats* pStats);
+
+#if VMA_STATS_STRING_ENABLED
+    void PrintDetailedMap(class VmaJsonWriter& json);
+#endif
+
+    void MakePoolAllocationsLost(
+        uint32_t currentFrameIndex,
+        size_t* pLostAllocationCount);
+    VkResult CheckCorruption();
+
+    // Saves results in pCtx->res.
+    void Defragment(
+        class VmaBlockVectorDefragmentationContext* pCtx,
+        VmaDefragmentationStats* pStats,
+        VkDeviceSize& maxCpuBytesToMove, uint32_t& maxCpuAllocationsToMove,
+        VkDeviceSize& maxGpuBytesToMove, uint32_t& maxGpuAllocationsToMove,
+        VkCommandBuffer commandBuffer);
+    void DefragmentationEnd(
+        class VmaBlockVectorDefragmentationContext* pCtx,
+        VmaDefragmentationStats* pStats);
+
+    ////////////////////////////////////////////////////////////////////////////////
+    // To be used only while the m_Mutex is locked. Used during defragmentation.
+
+    size_t GetBlockCount() const { return m_Blocks.size(); }
+    VmaDeviceMemoryBlock* GetBlock(size_t index) const { return m_Blocks[index]; }
+    size_t CalcAllocationCount() const;
+    bool IsBufferImageGranularityConflictPossible() const;
+
+private:
+    friend class VmaDefragmentationAlgorithm_Generic;
+
+    const VmaAllocator m_hAllocator;
+    const uint32_t m_MemoryTypeIndex;
+    const VkDeviceSize m_PreferredBlockSize;
+    const size_t m_MinBlockCount;
+    const size_t m_MaxBlockCount;
+    const VkDeviceSize m_BufferImageGranularity;
+    const uint32_t m_FrameInUseCount;
+    const bool m_IsCustomPool;
+    const bool m_ExplicitBlockSize;
+    const uint32_t m_Algorithm;
+    /* There can be at most one allocation that is completely empty - a
+    hysteresis to avoid pessimistic case of alternating creation and destruction
+    of a VkDeviceMemory. */
+    bool m_HasEmptyBlock;
+    VMA_RW_MUTEX m_Mutex;
+    // Incrementally sorted by sumFreeSize, ascending.
+    VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
+    uint32_t m_NextBlockId;
+
+    VkDeviceSize CalcMaxBlockSize() const;
+
+    // Finds and removes given block from vector.
+    void Remove(VmaDeviceMemoryBlock* pBlock);
+
+    // Performs single step in sorting m_Blocks. They may not be fully sorted
+    // after this call.
+    void IncrementallySortBlocks();
+
+    VkResult AllocatePage(
+        VmaPool hCurrentPool,
+        uint32_t currentFrameIndex,
+        VkDeviceSize size,
+        VkDeviceSize alignment,
+        const VmaAllocationCreateInfo& createInfo,
+        VmaSuballocationType suballocType,
+        VmaAllocation* pAllocation);
+
+    // To be used only without CAN_MAKE_OTHER_LOST flag.
+    VkResult AllocateFromBlock(
+        VmaDeviceMemoryBlock* pBlock,
+        VmaPool hCurrentPool,
+        uint32_t currentFrameIndex,
+        VkDeviceSize size,
+        VkDeviceSize alignment,
+        VmaAllocationCreateFlags allocFlags,
+        void* pUserData,
+        VmaSuballocationType suballocType,
+        uint32_t strategy,
+        VmaAllocation* pAllocation);
+
+    VkResult CreateBlock(VkDeviceSize blockSize, size_t* pNewBlockIndex);
+
+    // Saves result to pCtx->res.
+    void ApplyDefragmentationMovesCpu(
+        class VmaBlockVectorDefragmentationContext* pDefragCtx,
+        const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves);
+    // Saves result to pCtx->res.
+    void ApplyDefragmentationMovesGpu(
+        class VmaBlockVectorDefragmentationContext* pDefragCtx,
+        const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
+        VkCommandBuffer commandBuffer);
+
+    /*
+    Used during defragmentation. pDefragmentationStats is optional. It's in/out
+    - updated with new data.
+    */
+    void FreeEmptyBlocks(VmaDefragmentationStats* pDefragmentationStats);
+};
+
+struct VmaPool_T
+{
+    VMA_CLASS_NO_COPY(VmaPool_T)
+public:
+    VmaBlockVector m_BlockVector;
+
+    VmaPool_T(
+        VmaAllocator hAllocator,
+        const VmaPoolCreateInfo& createInfo,
+        VkDeviceSize preferredBlockSize);
+    ~VmaPool_T();
+
+    uint32_t GetId() const { return m_Id; }
+    void SetId(uint32_t id) { VMA_ASSERT(m_Id == 0); m_Id = id; }
+
+#if VMA_STATS_STRING_ENABLED
+    //void PrintDetailedMap(class VmaStringBuilder& sb);
+#endif
+
+private:
+    uint32_t m_Id;
+};
+
+/*
+Performs defragmentation:
+
+- Updates `pBlockVector->m_pMetadata`.
+- Updates allocations by calling ChangeBlockAllocation() or ChangeOffset().
+- Does not move actual data, only returns requested moves as `moves`.
+*/
+class VmaDefragmentationAlgorithm
+{
+    VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm)
+public:
+    VmaDefragmentationAlgorithm(
+        VmaAllocator hAllocator,
+        VmaBlockVector* pBlockVector,
+        uint32_t currentFrameIndex) :
+        m_hAllocator(hAllocator),
+        m_pBlockVector(pBlockVector),
+        m_CurrentFrameIndex(currentFrameIndex)
+    {
+    }
+    virtual ~VmaDefragmentationAlgorithm()
+    {
+    }
+
+    virtual void AddAllocation(VmaAllocation hAlloc, VkBool32* pChanged) = 0;
+    virtual void AddAll() = 0;
+
+    virtual VkResult Defragment(
+        VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
+        VkDeviceSize maxBytesToMove,
+        uint32_t maxAllocationsToMove) = 0;
+
+    virtual VkDeviceSize GetBytesMoved() const = 0;
+    virtual uint32_t GetAllocationsMoved() const = 0;
+
+protected:
+    VmaAllocator const m_hAllocator;
+    VmaBlockVector* const m_pBlockVector;
+    const uint32_t m_CurrentFrameIndex;
+
+    struct AllocationInfo
+    {
+        VmaAllocation m_hAllocation;
+        VkBool32* m_pChanged;
+
+        AllocationInfo() :
+            m_hAllocation(VK_NULL_HANDLE),
+            m_pChanged(VMA_NULL)
+        {
+        }
+        AllocationInfo(VmaAllocation hAlloc, VkBool32* pChanged) :
+            m_hAllocation(hAlloc),
+            m_pChanged(pChanged)
+        {
+        }
+    };
+};
+
+class VmaDefragmentationAlgorithm_Generic : public VmaDefragmentationAlgorithm
+{
+    VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm_Generic)
+public:
+    VmaDefragmentationAlgorithm_Generic(
+        VmaAllocator hAllocator,
+        VmaBlockVector* pBlockVector,
+        uint32_t currentFrameIndex,
+        bool overlappingMoveSupported);
+    virtual ~VmaDefragmentationAlgorithm_Generic();
+
+    virtual void AddAllocation(VmaAllocation hAlloc, VkBool32* pChanged);
+    virtual void AddAll() { m_AllAllocations = true; }
+
+    virtual VkResult Defragment(
+        VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
+        VkDeviceSize maxBytesToMove,
+        uint32_t maxAllocationsToMove);
+
+    virtual VkDeviceSize GetBytesMoved() const { return m_BytesMoved; }
+    virtual uint32_t GetAllocationsMoved() const { return m_AllocationsMoved; }
+
+private:
+    uint32_t m_AllocationCount;
+    bool m_AllAllocations;
+
+    VkDeviceSize m_BytesMoved;
+    uint32_t m_AllocationsMoved;
+
+    struct AllocationInfoSizeGreater
+    {
+        bool operator()(const AllocationInfo& lhs, const AllocationInfo& rhs) const
+        {
+            return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
+        }
+    };
+
+    struct AllocationInfoOffsetGreater
+    {
+        bool operator()(const AllocationInfo& lhs, const AllocationInfo& rhs) const
+        {
+            return lhs.m_hAllocation->GetOffset() > rhs.m_hAllocation->GetOffset();
+        }
+    };
+
+    struct BlockInfo
+    {
+        size_t m_OriginalBlockIndex;
+        VmaDeviceMemoryBlock* m_pBlock;
+        bool m_HasNonMovableAllocations;
+        VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
+
+        BlockInfo(const VkAllocationCallbacks* pAllocationCallbacks) :
+            m_OriginalBlockIndex(SIZE_MAX),
+            m_pBlock(VMA_NULL),
+            m_HasNonMovableAllocations(true),
+            m_Allocations(pAllocationCallbacks)
+        {
+        }
+
+        void CalcHasNonMovableAllocations()
+        {
+            const size_t blockAllocCount = m_pBlock->m_pMetadata->GetAllocationCount();
+            const size_t defragmentAllocCount = m_Allocations.size();
+            m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
+        }
+
+        void SortAllocationsBySizeDescending()
+        {
+            VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
+        }
+
+        void SortAllocationsByOffsetDescending()
+        {
+            VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoOffsetGreater());
+        }
+    };
+
+    struct BlockPointerLess
+    {
+        bool operator()(const BlockInfo* pLhsBlockInfo, const VmaDeviceMemoryBlock* pRhsBlock) const
+        {
+            return pLhsBlockInfo->m_pBlock < pRhsBlock;
+        }
+        bool operator()(const BlockInfo* pLhsBlockInfo, const BlockInfo* pRhsBlockInfo) const
+        {
+            return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
+        }
+    };
+
+    // 1. Blocks with some non-movable allocations go first.
+    // 2. Blocks with smaller sumFreeSize go first.
+    struct BlockInfoCompareMoveDestination
+    {
+        bool operator()(const BlockInfo* pLhsBlockInfo, const BlockInfo* pRhsBlockInfo) const
+        {
+            if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
+            {
+                return true;
+            }
+            if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
+            {
+                return false;
+            }
+            if(pLhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize())
+            {
+                return true;
+            }
+            return false;
+        }
+    };
+
+    typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
+    BlockInfoVector m_Blocks;
+
+    VkResult DefragmentRound(
+        VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
+        VkDeviceSize maxBytesToMove,
+        uint32_t maxAllocationsToMove);
+
+    size_t CalcBlocksWithNonMovableCount() const;
+
+    static bool MoveMakesSense(
+        size_t dstBlockIndex, VkDeviceSize dstOffset,
+        size_t srcBlockIndex, VkDeviceSize srcOffset);
+};
+
+class VmaDefragmentationAlgorithm_Fast : public VmaDefragmentationAlgorithm
+{
+    VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm_Fast)
+public:
+    VmaDefragmentationAlgorithm_Fast(
+        VmaAllocator hAllocator,
+        VmaBlockVector* pBlockVector,
+        uint32_t currentFrameIndex,
+        bool overlappingMoveSupported);
+    virtual ~VmaDefragmentationAlgorithm_Fast();
+
+    virtual void AddAllocation(VmaAllocation hAlloc, VkBool32* pChanged) { ++m_AllocationCount; }
+    virtual void AddAll() { m_AllAllocations = true; }
+
+    virtual VkResult Defragment(
+        VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
+        VkDeviceSize maxBytesToMove,
+        uint32_t maxAllocationsToMove);
+
+    virtual VkDeviceSize GetBytesMoved() const { return m_BytesMoved; }
+    virtual uint32_t GetAllocationsMoved() const { return m_AllocationsMoved; }
+
+private:
+    struct BlockInfo
+    {
+        size_t origBlockIndex;
+    };
+
+    class FreeSpaceDatabase
+    {
+    public:
+        FreeSpaceDatabase()
+        {
+            FreeSpace s = {};
+            s.blockInfoIndex = SIZE_MAX;
+            for(size_t i = 0; i < MAX_COUNT; ++i)
+            {
+                m_FreeSpaces[i] = s;
+            }
+        }
+
+        void Register(size_t blockInfoIndex, VkDeviceSize offset, VkDeviceSize size)
+        {
+            if(size < VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
+            {
+                return;
+            }
+
+            // Find first invalid or the smallest structure.
+            size_t bestIndex = SIZE_MAX;
+            for(size_t i = 0; i < MAX_COUNT; ++i)
+            {
+                // Empty structure.
+                if(m_FreeSpaces[i].blockInfoIndex == SIZE_MAX)
+                {
+                    bestIndex = i;
+                    break;
+                }
+                if(m_FreeSpaces[i].size < size &&
+                    (bestIndex == SIZE_MAX || m_FreeSpaces[bestIndex].size > m_FreeSpaces[i].size))
+                {
+                    bestIndex = i;
+                }
+            }
+
+            if(bestIndex != SIZE_MAX)
+            {
+                m_FreeSpaces[bestIndex].blockInfoIndex = blockInfoIndex;
+                m_FreeSpaces[bestIndex].offset = offset;
+                m_FreeSpaces[bestIndex].size = size;
+            }
+        }
+
+        bool Fetch(VkDeviceSize alignment, VkDeviceSize size,
+            size_t& outBlockInfoIndex, VkDeviceSize& outDstOffset)
+        {
+            size_t bestIndex = SIZE_MAX;
+            VkDeviceSize bestFreeSpaceAfter = 0;
+            for(size_t i = 0; i < MAX_COUNT; ++i)
+            {
+                // Structure is valid.
+                if(m_FreeSpaces[i].blockInfoIndex != SIZE_MAX)
+                {
+                    const VkDeviceSize dstOffset = VmaAlignUp(m_FreeSpaces[i].offset, alignment);
+                    // Allocation fits into this structure.
+                    if(dstOffset + size <= m_FreeSpaces[i].offset + m_FreeSpaces[i].size)
+                    {
+                        const VkDeviceSize freeSpaceAfter = (m_FreeSpaces[i].offset + m_FreeSpaces[i].size) -
+                            (dstOffset + size);
+                        if(bestIndex == SIZE_MAX || freeSpaceAfter > bestFreeSpaceAfter)
+                        {
+                            bestIndex = i;
+                            bestFreeSpaceAfter = freeSpaceAfter;
+                        }
+                    }
+                }
+            }
+            
+            if(bestIndex != SIZE_MAX)
+            {
+                outBlockInfoIndex = m_FreeSpaces[bestIndex].blockInfoIndex;
+                outDstOffset = VmaAlignUp(m_FreeSpaces[bestIndex].offset, alignment);
+
+                if(bestFreeSpaceAfter >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
+                {
+                    // Leave this structure for remaining empty space.
+                    const VkDeviceSize alignmentPlusSize = (outDstOffset - m_FreeSpaces[bestIndex].offset) + size;
+                    m_FreeSpaces[bestIndex].offset += alignmentPlusSize;
+                    m_FreeSpaces[bestIndex].size -= alignmentPlusSize;
+                }
+                else
+                {
+                    // This structure becomes invalid.
+                    m_FreeSpaces[bestIndex].blockInfoIndex = SIZE_MAX;
+                }
+
+                return true;
+            }
+
+            return false;
+        }
+
+    private:
+        static const size_t MAX_COUNT = 4;
+
+        struct FreeSpace
+        {
+            size_t blockInfoIndex; // SIZE_MAX means this structure is invalid.
+            VkDeviceSize offset;
+            VkDeviceSize size;
+        } m_FreeSpaces[MAX_COUNT];
+    };
+
+    const bool m_OverlappingMoveSupported;
+
+    uint32_t m_AllocationCount;
+    bool m_AllAllocations;
+
+    VkDeviceSize m_BytesMoved;
+    uint32_t m_AllocationsMoved;
+
+    VmaVector< BlockInfo, VmaStlAllocator<BlockInfo> > m_BlockInfos;
+
+    void PreprocessMetadata();
+    void PostprocessMetadata();
+    void InsertSuballoc(VmaBlockMetadata_Generic* pMetadata, const VmaSuballocation& suballoc);
+};
+
+struct VmaBlockDefragmentationContext
+{
+    enum BLOCK_FLAG
+    {
+        BLOCK_FLAG_USED = 0x00000001,
+    };
+    uint32_t flags;
+    VkBuffer hBuffer;
+
+    VmaBlockDefragmentationContext() :
+        flags(0),
+        hBuffer(VK_NULL_HANDLE)
+    {
+    }
+};
+
+class VmaBlockVectorDefragmentationContext
+{
+    VMA_CLASS_NO_COPY(VmaBlockVectorDefragmentationContext)
+public:
+    VkResult res;
+    bool mutexLocked;
+    VmaVector< VmaBlockDefragmentationContext, VmaStlAllocator<VmaBlockDefragmentationContext> > blockContexts;
+
+    VmaBlockVectorDefragmentationContext(
+        VmaAllocator hAllocator,
+        VmaPool hCustomPool, // Optional.
+        VmaBlockVector* pBlockVector,
+        uint32_t currFrameIndex,
+        uint32_t flags);
+    ~VmaBlockVectorDefragmentationContext();
+
+    VmaPool GetCustomPool() const { return m_hCustomPool; }
+    VmaBlockVector* GetBlockVector() const { return m_pBlockVector; }
+    VmaDefragmentationAlgorithm* GetAlgorithm() const { return m_pAlgorithm; }
+
+    void AddAllocation(VmaAllocation hAlloc, VkBool32* pChanged);
+    void AddAll() { m_AllAllocations = true; }
+
+    void Begin(bool overlappingMoveSupported);
+
+private:
+    const VmaAllocator m_hAllocator;
+    // Null if not from custom pool.
+    const VmaPool m_hCustomPool;
+    // Redundant, for convenience not to fetch from m_hCustomPool->m_BlockVector or m_hAllocator->m_pBlockVectors.
+    VmaBlockVector* const m_pBlockVector;
+    const uint32_t m_CurrFrameIndex;
+    //const uint32_t m_AlgorithmFlags;
+    // Owner of this object.
+    VmaDefragmentationAlgorithm* m_pAlgorithm;
+
+    struct AllocInfo
+    {
+        VmaAllocation hAlloc;
+        VkBool32* pChanged;
+    };
+    // Used between constructor and Begin.
+    VmaVector< AllocInfo, VmaStlAllocator<AllocInfo> > m_Allocations;
+    bool m_AllAllocations;
+};
+
+struct VmaDefragmentationContext_T
+{
+private:
+    VMA_CLASS_NO_COPY(VmaDefragmentationContext_T)
+public:
+    VmaDefragmentationContext_T(
+        VmaAllocator hAllocator,
+        uint32_t currFrameIndex,
+        uint32_t flags,
+        VmaDefragmentationStats* pStats);
+    ~VmaDefragmentationContext_T();
+
+    void AddPools(uint32_t poolCount, VmaPool* pPools);
+    void AddAllocations(
+        uint32_t allocationCount,
+        VmaAllocation* pAllocations,
+        VkBool32* pAllocationsChanged);
+
+    /*
+    Returns:
+    - `VK_SUCCESS` if succeeded and object can be destroyed immediately.
+    - `VK_NOT_READY` if succeeded but the object must remain alive until vmaDefragmentationEnd().
+    - Negative value if error occured and object can be destroyed immediately.
+    */
+    VkResult Defragment(
+        VkDeviceSize maxCpuBytesToMove, uint32_t maxCpuAllocationsToMove,
+        VkDeviceSize maxGpuBytesToMove, uint32_t maxGpuAllocationsToMove,
+        VkCommandBuffer commandBuffer, VmaDefragmentationStats* pStats);
+
+private:
+    const VmaAllocator m_hAllocator;
+    const uint32_t m_CurrFrameIndex;
+    const uint32_t m_Flags;
+    VmaDefragmentationStats* const m_pStats;
+    // Owner of these objects.
+    VmaBlockVectorDefragmentationContext* m_DefaultPoolContexts[VK_MAX_MEMORY_TYPES];
+    // Owner of these objects.
+    VmaVector< VmaBlockVectorDefragmentationContext*, VmaStlAllocator<VmaBlockVectorDefragmentationContext*> > m_CustomPoolContexts;
+};
+
+#if VMA_RECORDING_ENABLED
+
+class VmaRecorder
+{
+public:
+    VmaRecorder();
+    VkResult Init(const VmaRecordSettings& settings, bool useMutex);
+    void WriteConfiguration(
+        const VkPhysicalDeviceProperties& devProps,
+        const VkPhysicalDeviceMemoryProperties& memProps,
+        bool dedicatedAllocationExtensionEnabled);
+    ~VmaRecorder();
+
+    void RecordCreateAllocator(uint32_t frameIndex);
+    void RecordDestroyAllocator(uint32_t frameIndex);
+    void RecordCreatePool(uint32_t frameIndex,
+        const VmaPoolCreateInfo& createInfo,
+        VmaPool pool);
+    void RecordDestroyPool(uint32_t frameIndex, VmaPool pool);
+    void RecordAllocateMemory(uint32_t frameIndex,
+        const VkMemoryRequirements& vkMemReq,
+        const VmaAllocationCreateInfo& createInfo,
+        VmaAllocation allocation);
+    void RecordAllocateMemoryPages(uint32_t frameIndex,
+        const VkMemoryRequirements& vkMemReq,
+        const VmaAllocationCreateInfo& createInfo,
+        uint64_t allocationCount,
+        const VmaAllocation* pAllocations);
+    void RecordAllocateMemoryForBuffer(uint32_t frameIndex,
+        const VkMemoryRequirements& vkMemReq,
+        bool requiresDedicatedAllocation,
+        bool prefersDedicatedAllocation,
+        const VmaAllocationCreateInfo& createInfo,
+        VmaAllocation allocation);
+    void RecordAllocateMemoryForImage(uint32_t frameIndex,
+        const VkMemoryRequirements& vkMemReq,
+        bool requiresDedicatedAllocation,
+        bool prefersDedicatedAllocation,
+        const VmaAllocationCreateInfo& createInfo,
+        VmaAllocation allocation);
+    void RecordFreeMemory(uint32_t frameIndex,
+        VmaAllocation allocation);
+    void RecordFreeMemoryPages(uint32_t frameIndex,
+        uint64_t allocationCount,
+        const VmaAllocation* pAllocations);
+    void RecordResizeAllocation(
+        uint32_t frameIndex,
+        VmaAllocation allocation,
+        VkDeviceSize newSize);
+    void RecordSetAllocationUserData(uint32_t frameIndex,
+        VmaAllocation allocation,
+        const void* pUserData);
+    void RecordCreateLostAllocation(uint32_t frameIndex,
+        VmaAllocation allocation);
+    void RecordMapMemory(uint32_t frameIndex,
+        VmaAllocation allocation);
+    void RecordUnmapMemory(uint32_t frameIndex,
+        VmaAllocation allocation);
+    void RecordFlushAllocation(uint32_t frameIndex,
+        VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
+    void RecordInvalidateAllocation(uint32_t frameIndex,
+        VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
+    void RecordCreateBuffer(uint32_t frameIndex,
+        const VkBufferCreateInfo& bufCreateInfo,
+        const VmaAllocationCreateInfo& allocCreateInfo,
+        VmaAllocation allocation);
+    void RecordCreateImage(uint32_t frameIndex,
+        const VkImageCreateInfo& imageCreateInfo,
+        const VmaAllocationCreateInfo& allocCreateInfo,
+        VmaAllocation allocation);
+    void RecordDestroyBuffer(uint32_t frameIndex,
+        VmaAllocation allocation);
+    void RecordDestroyImage(uint32_t frameIndex,
+        VmaAllocation allocation);
+    void RecordTouchAllocation(uint32_t frameIndex,
+        VmaAllocation allocation);
+    void RecordGetAllocationInfo(uint32_t frameIndex,
+        VmaAllocation allocation);
+    void RecordMakePoolAllocationsLost(uint32_t frameIndex,
+        VmaPool pool);
+    void RecordDefragmentationBegin(uint32_t frameIndex,
+        const VmaDefragmentationInfo2& info,
+        VmaDefragmentationContext ctx);
+    void RecordDefragmentationEnd(uint32_t frameIndex,
+        VmaDefragmentationContext ctx);
+
+private:
+    struct CallParams
+    {
+        uint32_t threadId;
+        double time;
+    };
+
+    class UserDataString
+    {
+    public:
+        UserDataString(VmaAllocationCreateFlags allocFlags, const void* pUserData);
+        const char* GetString() const { return m_Str; }
+
+    private:
+        char m_PtrStr[17];
+        const char* m_Str;
+    };
+
+    bool m_UseMutex;
+    VmaRecordFlags m_Flags;
+    FILE* m_File;
+    VMA_MUTEX m_FileMutex;
+    int64_t m_Freq;
+    int64_t m_StartCounter;
+
+    void GetBasicParams(CallParams& outParams);
+
+    // T must be a pointer type, e.g. VmaAllocation, VmaPool.
+    template<typename T>
+    void PrintPointerList(uint64_t count, const T* pItems)
+    {
+        if(count)
+        {
+            fprintf(m_File, "%p", pItems[0]);
+            for(uint64_t i = 1; i < count; ++i)
+            {
+                fprintf(m_File, " %p", pItems[i]);
+            }
+        }
+    }
+
+    void PrintPointerList(uint64_t count, const VmaAllocation* pItems);
+    void Flush();
+};
+
+#endif // #if VMA_RECORDING_ENABLED
+
+// Main allocator object.
+struct VmaAllocator_T
+{
+    VMA_CLASS_NO_COPY(VmaAllocator_T)
+public:
+    bool m_UseMutex;
+    bool m_UseKhrDedicatedAllocation;
+    VkDevice m_hDevice;
+    bool m_AllocationCallbacksSpecified;
+    VkAllocationCallbacks m_AllocationCallbacks;
+    VmaDeviceMemoryCallbacks m_DeviceMemoryCallbacks;
+    
+    // Number of bytes free out of limit, or VK_WHOLE_SIZE if no limit for that heap.
+    VkDeviceSize m_HeapSizeLimit[VK_MAX_MEMORY_HEAPS];
+    VMA_MUTEX m_HeapSizeLimitMutex;
+
+    VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
+    VkPhysicalDeviceMemoryProperties m_MemProps;
+
+    // Default pools.
+    VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES];
+
+    // Each vector is sorted by memory (handle value).
+    typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
+    AllocationVectorType* m_pDedicatedAllocations[VK_MAX_MEMORY_TYPES];
+    VMA_RW_MUTEX m_DedicatedAllocationsMutex[VK_MAX_MEMORY_TYPES];
+
+    VmaAllocator_T(const VmaAllocatorCreateInfo* pCreateInfo);
+    VkResult Init(const VmaAllocatorCreateInfo* pCreateInfo);
+    ~VmaAllocator_T();
+
+    const VkAllocationCallbacks* GetAllocationCallbacks() const
+    {
+        return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
+    }
+    const VmaVulkanFunctions& GetVulkanFunctions() const
+    {
+        return m_VulkanFunctions;
+    }
+
+    VkDeviceSize GetBufferImageGranularity() const
+    {
+        return VMA_MAX(
+            static_cast<VkDeviceSize>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
+            m_PhysicalDeviceProperties.limits.bufferImageGranularity);
+    }
+
+    uint32_t GetMemoryHeapCount() const { return m_MemProps.memoryHeapCount; }
+    uint32_t GetMemoryTypeCount() const { return m_MemProps.memoryTypeCount; }
+
+    uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex) const
+    {
+        VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
+        return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
+    }
+    // True when specific memory type is HOST_VISIBLE but not HOST_COHERENT.
+    bool IsMemoryTypeNonCoherent(uint32_t memTypeIndex) const
+    {
+        return (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & (VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)) ==
+            VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
+    }
+    // Minimum alignment for all allocations in specific memory type.
+    VkDeviceSize GetMemoryTypeMinAlignment(uint32_t memTypeIndex) const
+    {
+        return IsMemoryTypeNonCoherent(memTypeIndex) ?
+            VMA_MAX((VkDeviceSize)VMA_DEBUG_ALIGNMENT, m_PhysicalDeviceProperties.limits.nonCoherentAtomSize) :
+            (VkDeviceSize)VMA_DEBUG_ALIGNMENT;
+    }
+
+    bool IsIntegratedGpu() const
+    {
+        return m_PhysicalDeviceProperties.deviceType == VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU;
+    }
+
+#if VMA_RECORDING_ENABLED
+    VmaRecorder* GetRecorder() const { return m_pRecorder; }
+#endif
+
+    void GetBufferMemoryRequirements(
+        VkBuffer hBuffer,
+        VkMemoryRequirements& memReq,
+        bool& requiresDedicatedAllocation,
+        bool& prefersDedicatedAllocation) const;
+    void GetImageMemoryRequirements(
+        VkImage hImage,
+        VkMemoryRequirements& memReq,
+        bool& requiresDedicatedAllocation,
+        bool& prefersDedicatedAllocation) const;
+
+    // Main allocation function.
+    VkResult AllocateMemory(
+        const VkMemoryRequirements& vkMemReq,
+        bool requiresDedicatedAllocation,
+        bool prefersDedicatedAllocation,
+        VkBuffer dedicatedBuffer,
+        VkImage dedicatedImage,
+        const VmaAllocationCreateInfo& createInfo,
+        VmaSuballocationType suballocType,
+        size_t allocationCount,
+        VmaAllocation* pAllocations);
+
+    // Main deallocation function.
+    void FreeMemory(
+        size_t allocationCount,
+        const VmaAllocation* pAllocations);
+
+    VkResult ResizeAllocation(
+        const VmaAllocation alloc,
+        VkDeviceSize newSize);
+
+    void CalculateStats(VmaStats* pStats);
+
+#if VMA_STATS_STRING_ENABLED
+    void PrintDetailedMap(class VmaJsonWriter& json);
+#endif
+
+    VkResult DefragmentationBegin(
+        const VmaDefragmentationInfo2& info,
+        VmaDefragmentationStats* pStats,
+        VmaDefragmentationContext* pContext);
+    VkResult DefragmentationEnd(
+        VmaDefragmentationContext context);
+
+    void GetAllocationInfo(VmaAllocation hAllocation, VmaAllocationInfo* pAllocationInfo);
+    bool TouchAllocation(VmaAllocation hAllocation);
+
+    VkResult CreatePool(const VmaPoolCreateInfo* pCreateInfo, VmaPool* pPool);
+    void DestroyPool(VmaPool pool);
+    void GetPoolStats(VmaPool pool, VmaPoolStats* pPoolStats);
+
+    void SetCurrentFrameIndex(uint32_t frameIndex);
+    uint32_t GetCurrentFrameIndex() const { return m_CurrentFrameIndex.load(); }
+
+    void MakePoolAllocationsLost(
+        VmaPool hPool,
+        size_t* pLostAllocationCount);
+    VkResult CheckPoolCorruption(VmaPool hPool);
+    VkResult CheckCorruption(uint32_t memoryTypeBits);
+
+    void CreateLostAllocation(VmaAllocation* pAllocation);
+
+    VkResult AllocateVulkanMemory(const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
+    void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
+
+    VkResult Map(VmaAllocation hAllocation, void** ppData);
+    void Unmap(VmaAllocation hAllocation);
+
+    VkResult BindBufferMemory(VmaAllocation hAllocation, VkBuffer hBuffer);
+    VkResult BindImageMemory(VmaAllocation hAllocation, VkImage hImage);
+
+    void FlushOrInvalidateAllocation(
+        VmaAllocation hAllocation,
+        VkDeviceSize offset, VkDeviceSize size,
+        VMA_CACHE_OPERATION op);
+
+    void FillAllocation(const VmaAllocation hAllocation, uint8_t pattern);
+
+private:
+    VkDeviceSize m_PreferredLargeHeapBlockSize;
+
+    VkPhysicalDevice m_PhysicalDevice;
+    VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
+    
+    VMA_RW_MUTEX m_PoolsMutex;
+    // Protected by m_PoolsMutex. Sorted by pointer value.
+    VmaVector<VmaPool, VmaStlAllocator<VmaPool> > m_Pools;
+    uint32_t m_NextPoolId;
+
+    VmaVulkanFunctions m_VulkanFunctions;
+
+#if VMA_RECORDING_ENABLED
+    VmaRecorder* m_pRecorder;
+#endif
+
+    void ImportVulkanFunctions(const VmaVulkanFunctions* pVulkanFunctions);
+
+    VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
+
+    VkResult AllocateMemoryOfType(
+        VkDeviceSize size,
+        VkDeviceSize alignment,
+        bool dedicatedAllocation,
+        VkBuffer dedicatedBuffer,
+        VkImage dedicatedImage,
+        const VmaAllocationCreateInfo& createInfo,
+        uint32_t memTypeIndex,
+        VmaSuballocationType suballocType,
+        size_t allocationCount,
+        VmaAllocation* pAllocations);
+
+    // Helper function only to be used inside AllocateDedicatedMemory.
+    VkResult AllocateDedicatedMemoryPage(
+        VkDeviceSize size,
+        VmaSuballocationType suballocType,
+        uint32_t memTypeIndex,
+        const VkMemoryAllocateInfo& allocInfo,
+        bool map,
+        bool isUserDataString,
+        void* pUserData,
+        VmaAllocation* pAllocation);
+
+    // Allocates and registers new VkDeviceMemory specifically for dedicated allocations.
+    VkResult AllocateDedicatedMemory(
+        VkDeviceSize size,
+        VmaSuballocationType suballocType,
+        uint32_t memTypeIndex,
+        bool map,
+        bool isUserDataString,
+        void* pUserData,
+        VkBuffer dedicatedBuffer,
+        VkImage dedicatedImage,
+        size_t allocationCount,
+        VmaAllocation* pAllocations);
+
+    // Tries to free pMemory as Dedicated Memory. Returns true if found and freed.
+    void FreeDedicatedMemory(VmaAllocation allocation);
+};
+
+////////////////////////////////////////////////////////////////////////////////
+// Memory allocation #2 after VmaAllocator_T definition
+
+static void* VmaMalloc(VmaAllocator hAllocator, size_t size, size_t alignment)
+{
+    return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
+}
+
+static void VmaFree(VmaAllocator hAllocator, void* ptr)
+{
+    VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
+}
+
+template<typename T>
+static T* VmaAllocate(VmaAllocator hAllocator)
+{
+    return (T*)VmaMalloc(hAllocator, sizeof(T), VMA_ALIGN_OF(T));
+}
+
+template<typename T>
+static T* VmaAllocateArray(VmaAllocator hAllocator, size_t count)
+{
+    return (T*)VmaMalloc(hAllocator, sizeof(T) * count, VMA_ALIGN_OF(T));
+}
+
+template<typename T>
+static void vma_delete(VmaAllocator hAllocator, T* ptr)
+{
+    if(ptr != VMA_NULL)
+    {
+        ptr->~T();
+        VmaFree(hAllocator, ptr);
+    }
+}
+
+template<typename T>
+static void vma_delete_array(VmaAllocator hAllocator, T* ptr, size_t count)
+{
+    if(ptr != VMA_NULL)
+    {
+        for(size_t i = count; i--; )
+            ptr[i].~T();
+        VmaFree(hAllocator, ptr);
+    }
+}
+
+////////////////////////////////////////////////////////////////////////////////
+// VmaStringBuilder
+
+#if VMA_STATS_STRING_ENABLED
+
+class VmaStringBuilder
+{
+public:
+    VmaStringBuilder(VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
+    size_t GetLength() const { return m_Data.size(); }
+    const char* GetData() const { return m_Data.data(); }
+
+    void Add(char ch) { m_Data.push_back(ch); }
+    void Add(const char* pStr);
+    void AddNewLine() { Add('\n'); }
+    void AddNumber(uint32_t num);
+    void AddNumber(uint64_t num);
+    void AddPointer(const void* ptr);
+
+private:
+    VmaVector< char, VmaStlAllocator<char> > m_Data;
+};
+
+void VmaStringBuilder::Add(const char* pStr)
+{
+    const size_t strLen = strlen(pStr);
+    if(strLen > 0)
+    {
+        const size_t oldCount = m_Data.size();
+        m_Data.resize(oldCount + strLen);
+        memcpy(m_Data.data() + oldCount, pStr, strLen);
+    }
+}
+
+void VmaStringBuilder::AddNumber(uint32_t num)
+{
+    char buf[11];
+    VmaUint32ToStr(buf, sizeof(buf), num);
+    Add(buf);
+}
+
+void VmaStringBuilder::AddNumber(uint64_t num)
+{
+    char buf[21];
+    VmaUint64ToStr(buf, sizeof(buf), num);
+    Add(buf);
+}
+
+void VmaStringBuilder::AddPointer(const void* ptr)
+{
+    char buf[21];
+    VmaPtrToStr(buf, sizeof(buf), ptr);
+    Add(buf);
+}
+
+#endif // #if VMA_STATS_STRING_ENABLED
+
+////////////////////////////////////////////////////////////////////////////////
+// VmaJsonWriter
+
+#if VMA_STATS_STRING_ENABLED
+
+class VmaJsonWriter
+{
+    VMA_CLASS_NO_COPY(VmaJsonWriter)
+public:
+    VmaJsonWriter(const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
+    ~VmaJsonWriter();
+
+    void BeginObject(bool singleLine = false);
+    void EndObject();
+    
+    void BeginArray(bool singleLine = false);
+    void EndArray();
+    
+    void WriteString(const char* pStr);
+    void BeginString(const char* pStr = VMA_NULL);
+    void ContinueString(const char* pStr);
+    void ContinueString(uint32_t n);
+    void ContinueString(uint64_t n);
+    void ContinueString_Pointer(const void* ptr);
+    void EndString(const char* pStr = VMA_NULL);
+    
+    void WriteNumber(uint32_t n);
+    void WriteNumber(uint64_t n);
+    void WriteBool(bool b);
+    void WriteNull();
+
+private:
+    static const char* const INDENT;
+
+    enum COLLECTION_TYPE
+    {
+        COLLECTION_TYPE_OBJECT,
+        COLLECTION_TYPE_ARRAY,
+    };
+    struct StackItem
+    {
+        COLLECTION_TYPE type;
+        uint32_t valueCount;
+        bool singleLineMode;
+    };
+
+    VmaStringBuilder& m_SB;
+    VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
+    bool m_InsideString;
+
+    void BeginValue(bool isString);
+    void WriteIndent(bool oneLess = false);
+};
+
+const char* const VmaJsonWriter::INDENT = "  ";
+
+VmaJsonWriter::VmaJsonWriter(const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
+    m_SB(sb),
+    m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
+    m_InsideString(false)
+{
+}
+
+VmaJsonWriter::~VmaJsonWriter()
+{
+    VMA_ASSERT(!m_InsideString);
+    VMA_ASSERT(m_Stack.empty());
+}
+
+void VmaJsonWriter::BeginObject(bool singleLine)
+{
+    VMA_ASSERT(!m_InsideString);
+
+    BeginValue(false);
+    m_SB.Add('{');
+
+    StackItem item;
+    item.type = COLLECTION_TYPE_OBJECT;
+    item.valueCount = 0;
+    item.singleLineMode = singleLine;
+    m_Stack.push_back(item);
+}
+
+void VmaJsonWriter::EndObject()
+{
+    VMA_ASSERT(!m_InsideString);
+
+    WriteIndent(true);
+    m_SB.Add('}');
+
+    VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
+    m_Stack.pop_back();
+}
+
+void VmaJsonWriter::BeginArray(bool singleLine)
+{
+    VMA_ASSERT(!m_InsideString);
+
+    BeginValue(false);
+    m_SB.Add('[');
+
+    StackItem item;
+    item.type = COLLECTION_TYPE_ARRAY;
+    item.valueCount = 0;
+    item.singleLineMode = singleLine;
+    m_Stack.push_back(item);
+}
+
+void VmaJsonWriter::EndArray()
+{
+    VMA_ASSERT(!m_InsideString);
+
+    WriteIndent(true);
+    m_SB.Add(']');
+
+    VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
+    m_Stack.pop_back();
+}
+
+void VmaJsonWriter::WriteString(const char* pStr)
+{
+    BeginString(pStr);
+    EndString();
+}
+
+void VmaJsonWriter::BeginString(const char* pStr)
+{
+    VMA_ASSERT(!m_InsideString);
+
+    BeginValue(true);
+    m_SB.Add('"');
+    m_InsideString = true;
+    if(pStr != VMA_NULL && pStr[0] != '\0')
+    {
+        ContinueString(pStr);
+    }
+}
+
+void VmaJsonWriter::ContinueString(const char* pStr)
+{
+    VMA_ASSERT(m_InsideString);
+
+    const size_t strLen = strlen(pStr);
+    for(size_t i = 0; i < strLen; ++i)
+    {
+        char ch = pStr[i];
+        if(ch == '\\')
+        {
+            m_SB.Add("\\\\");
+        }
+        else if(ch == '"')
+        {
+            m_SB.Add("\\\"");
+        }
+        else if(ch >= 32)
+        {
+            m_SB.Add(ch);
+        }
+        else switch(ch)
+        {
+        case '\b':
+            m_SB.Add("\\b");
+            break;
+        case '\f':
+            m_SB.Add("\\f");
+            break;
+        case '\n':
+            m_SB.Add("\\n");
+            break;
+        case '\r':
+            m_SB.Add("\\r");
+            break;
+        case '\t':
+            m_SB.Add("\\t");
+            break;
+        default:
+            VMA_ASSERT(0 && "Character not currently supported.");
+            break;
+        }
+    }
+}
+
+void VmaJsonWriter::ContinueString(uint32_t n)
+{
+    VMA_ASSERT(m_InsideString);
+    m_SB.AddNumber(n);
+}
+
+void VmaJsonWriter::ContinueString(uint64_t n)
+{
+    VMA_ASSERT(m_InsideString);
+    m_SB.AddNumber(n);
+}
+
+void VmaJsonWriter::ContinueString_Pointer(const void* ptr)
+{
+    VMA_ASSERT(m_InsideString);
+    m_SB.AddPointer(ptr);
+}
+
+void VmaJsonWriter::EndString(const char* pStr)
+{
+    VMA_ASSERT(m_InsideString);
+    if(pStr != VMA_NULL && pStr[0] != '\0')
+    {
+        ContinueString(pStr);
+    }
+    m_SB.Add('"');
+    m_InsideString = false;
+}
+
+void VmaJsonWriter::WriteNumber(uint32_t n)
+{
+    VMA_ASSERT(!m_InsideString);
+    BeginValue(false);
+    m_SB.AddNumber(n);
+}
+
+void VmaJsonWriter::WriteNumber(uint64_t n)
+{
+    VMA_ASSERT(!m_InsideString);
+    BeginValue(false);
+    m_SB.AddNumber(n);
+}
+
+void VmaJsonWriter::WriteBool(bool b)
+{
+    VMA_ASSERT(!m_InsideString);
+    BeginValue(false);
+    m_SB.Add(b ? "true" : "false");
+}
+
+void VmaJsonWriter::WriteNull()
+{
+    VMA_ASSERT(!m_InsideString);
+    BeginValue(false);
+    m_SB.Add("null");
+}
+
+void VmaJsonWriter::BeginValue(bool isString)
+{
+    if(!m_Stack.empty())
+    {
+        StackItem& currItem = m_Stack.back();
+        if(currItem.type == COLLECTION_TYPE_OBJECT &&
+            currItem.valueCount % 2 == 0)
+        {
+            VMA_ASSERT(isString);
+        }
+
+        if(currItem.type == COLLECTION_TYPE_OBJECT &&
+            currItem.valueCount % 2 != 0)
+        {
+            m_SB.Add(": ");
+        }
+        else if(currItem.valueCount > 0)
+        {
+            m_SB.Add(", ");
+            WriteIndent();
+        }
+        else
+        {
+            WriteIndent();
+        }
+        ++currItem.valueCount;
+    }
+}
+
+void VmaJsonWriter::WriteIndent(bool oneLess)
+{
+    if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
+    {
+        m_SB.AddNewLine();
+        
+        size_t count = m_Stack.size();
+        if(count > 0 && oneLess)
+        {
+            --count;
+        }
+        for(size_t i = 0; i < count; ++i)
+        {
+            m_SB.Add(INDENT);
+        }
+    }
+}
+
+#endif // #if VMA_STATS_STRING_ENABLED
+
+////////////////////////////////////////////////////////////////////////////////
+
+void VmaAllocation_T::SetUserData(VmaAllocator hAllocator, void* pUserData)
+{
+    if(IsUserDataString())
+    {
+        VMA_ASSERT(pUserData == VMA_NULL || pUserData != m_pUserData);
+
+        FreeUserDataString(hAllocator);
+
+        if(pUserData != VMA_NULL)
+        {
+            const char* const newStrSrc = (char*)pUserData;
+            const size_t newStrLen = strlen(newStrSrc);
+            char* const newStrDst = vma_new_array(hAllocator, char, newStrLen + 1);
+            memcpy(newStrDst, newStrSrc, newStrLen + 1);
+            m_pUserData = newStrDst;
+        }
+    }
+    else
+    {
+        m_pUserData = pUserData;
+    }
+}
+
+void VmaAllocation_T::ChangeBlockAllocation(
+    VmaAllocator hAllocator,
+    VmaDeviceMemoryBlock* block,
+    VkDeviceSize offset)
+{
+    VMA_ASSERT(block != VMA_NULL);
+    VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
+
+    // Move mapping reference counter from old block to new block.
+    if(block != m_BlockAllocation.m_Block)
+    {
+        uint32_t mapRefCount = m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP;
+        if(IsPersistentMap())
+            ++mapRefCount;
+        m_BlockAllocation.m_Block->Unmap(hAllocator, mapRefCount);
+        block->Map(hAllocator, mapRefCount, VMA_NULL);
+    }
+
+    m_BlockAllocation.m_Block = block;
+    m_BlockAllocation.m_Offset = offset;
+}
+
+void VmaAllocation_T::ChangeSize(VkDeviceSize newSize)
+{
+    VMA_ASSERT(newSize > 0);
+    m_Size = newSize;
+}
+
+void VmaAllocation_T::ChangeOffset(VkDeviceSize newOffset)
+{
+    VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
+    m_BlockAllocation.m_Offset = newOffset;
+}
+
+VkDeviceSize VmaAllocation_T::GetOffset() const
+{
+    switch(m_Type)
+    {
+    case ALLOCATION_TYPE_BLOCK:
+        return m_BlockAllocation.m_Offset;
+    case ALLOCATION_TYPE_DEDICATED:
+        return 0;
+    default:
+        VMA_ASSERT(0);
+        return 0;
+    }
+}
+
+VkDeviceMemory VmaAllocation_T::GetMemory() const
+{
+    switch(m_Type)
+    {
+    case ALLOCATION_TYPE_BLOCK:
+        return m_BlockAllocation.m_Block->GetDeviceMemory();
+    case ALLOCATION_TYPE_DEDICATED:
+        return m_DedicatedAllocation.m_hMemory;
+    default:
+        VMA_ASSERT(0);
+        return VK_NULL_HANDLE;
+    }
+}
+
+uint32_t VmaAllocation_T::GetMemoryTypeIndex() const
+{
+    switch(m_Type)
+    {
+    case ALLOCATION_TYPE_BLOCK:
+        return m_BlockAllocation.m_Block->GetMemoryTypeIndex();
+    case ALLOCATION_TYPE_DEDICATED:
+        return m_DedicatedAllocation.m_MemoryTypeIndex;
+    default:
+        VMA_ASSERT(0);
+        return UINT32_MAX;
+    }
+}
+
+void* VmaAllocation_T::GetMappedData() const
+{
+    switch(m_Type)
+    {
+    case ALLOCATION_TYPE_BLOCK:
+        if(m_MapCount != 0)
+        {
+            void* pBlockData = m_BlockAllocation.m_Block->GetMappedData();
+            VMA_ASSERT(pBlockData != VMA_NULL);
+            return (char*)pBlockData + m_BlockAllocation.m_Offset;
+        }
+        else
+        {
+            return VMA_NULL;
+        }
+        break;
+    case ALLOCATION_TYPE_DEDICATED:
+        VMA_ASSERT((m_DedicatedAllocation.m_pMappedData != VMA_NULL) == (m_MapCount != 0));
+        return m_DedicatedAllocation.m_pMappedData;
+    default:
+        VMA_ASSERT(0);
+        return VMA_NULL;
+    }
+}
+
+bool VmaAllocation_T::CanBecomeLost() const
+{
+    switch(m_Type)
+    {
+    case ALLOCATION_TYPE_BLOCK:
+        return m_BlockAllocation.m_CanBecomeLost;
+    case ALLOCATION_TYPE_DEDICATED:
+        return false;
+    default:
+        VMA_ASSERT(0);
+        return false;
+    }
+}
+
+VmaPool VmaAllocation_T::GetPool() const
+{
+    VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
+    return m_BlockAllocation.m_hPool;
+}
+
+bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
+{
+    VMA_ASSERT(CanBecomeLost());
+
+    /*
+    Warning: This is a carefully designed algorithm.
+    Do not modify unless you really know what you're doing :)
+    */
+    uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
+    for(;;)
+    {
+        if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
+        {
+            VMA_ASSERT(0);
+            return false;
+        }
+        else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
+        {
+            return false;
+        }
+        else // Last use time earlier than current time.
+        {
+            if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
+            {
+                // Setting hAllocation.LastUseFrameIndex atomic to VMA_FRAME_INDEX_LOST is enough to mark it as LOST.
+                // Calling code just needs to unregister this allocation in owning VmaDeviceMemoryBlock.
+                return true;
+            }
+        }
+    }
+}
+
+#if VMA_STATS_STRING_ENABLED
+
+// Correspond to values of enum VmaSuballocationType.
+static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
+    "FREE",
+    "UNKNOWN",
+    "BUFFER",
+    "IMAGE_UNKNOWN",
+    "IMAGE_LINEAR",
+    "IMAGE_OPTIMAL",
+};
+
+void VmaAllocation_T::PrintParameters(class VmaJsonWriter& json) const
+{
+    json.WriteString("Type");
+    json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[m_SuballocationType]);
+
+    json.WriteString("Size");
+    json.WriteNumber(m_Size);
+
+    if(m_pUserData != VMA_NULL)
+    {
+        json.WriteString("UserData");
+        if(IsUserDataString())
+        {
+            json.WriteString((const char*)m_pUserData);
+        }
+        else
+        {
+            json.BeginString();
+            json.ContinueString_Pointer(m_pUserData);
+            json.EndString();
+        }
+    }
+
+    json.WriteString("CreationFrameIndex");
+    json.WriteNumber(m_CreationFrameIndex);
+
+    json.WriteString("LastUseFrameIndex");
+    json.WriteNumber(GetLastUseFrameIndex());
+
+    if(m_BufferImageUsage != 0)
+    {
+        json.WriteString("Usage");
+        json.WriteNumber(m_BufferImageUsage);
+    }
+}
+
+#endif
+
+void VmaAllocation_T::FreeUserDataString(VmaAllocator hAllocator)
+{
+    VMA_ASSERT(IsUserDataString());
+    if(m_pUserData != VMA_NULL)
+    {
+        char* const oldStr = (char*)m_pUserData;
+        const size_t oldStrLen = strlen(oldStr);
+        vma_delete_array(hAllocator, oldStr, oldStrLen + 1);
+        m_pUserData = VMA_NULL;
+    }
+}
+
+void VmaAllocation_T::BlockAllocMap()
+{
+    VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
+
+    if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
+    {
+        ++m_MapCount;
+    }
+    else
+    {
+        VMA_ASSERT(0 && "Allocation mapped too many times simultaneously.");
+    }
+}
+
+void VmaAllocation_T::BlockAllocUnmap()
+{
+    VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
+
+    if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
+    {
+        --m_MapCount;
+    }
+    else
+    {
+        VMA_ASSERT(0 && "Unmapping allocation not previously mapped.");
+    }
+}
+
+VkResult VmaAllocation_T::DedicatedAllocMap(VmaAllocator hAllocator, void** ppData)
+{
+    VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
+
+    if(m_MapCount != 0)
+    {
+        if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
+        {
+            VMA_ASSERT(m_DedicatedAllocation.m_pMappedData != VMA_NULL);
+            *ppData = m_DedicatedAllocation.m_pMappedData;
+            ++m_MapCount;
+            return VK_SUCCESS;
+        }
+        else
+        {
+            VMA_ASSERT(0 && "Dedicated allocation mapped too many times simultaneously.");
+            return VK_ERROR_MEMORY_MAP_FAILED;
+        }
+    }
+    else
+    {
+        VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
+            hAllocator->m_hDevice,
+            m_DedicatedAllocation.m_hMemory,
+            0, // offset
+            VK_WHOLE_SIZE,
+            0, // flags
+            ppData);
+        if(result == VK_SUCCESS)
+        {
+            m_DedicatedAllocation.m_pMappedData = *ppData;
+            m_MapCount = 1;
+        }
+        return result;
+    }
+}
+
+void VmaAllocation_T::DedicatedAllocUnmap(VmaAllocator hAllocator)
+{
+    VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
+
+    if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
+    {
+        --m_MapCount;
+        if(m_MapCount == 0)
+        {
+            m_DedicatedAllocation.m_pMappedData = VMA_NULL;
+            (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(
+                hAllocator->m_hDevice,
+                m_DedicatedAllocation.m_hMemory);
+        }
+    }
+    else
+    {
+        VMA_ASSERT(0 && "Unmapping dedicated allocation not previously mapped.");
+    }
+}
+
+#if VMA_STATS_STRING_ENABLED
+
+static void VmaPrintStatInfo(VmaJsonWriter& json, const VmaStatInfo& stat)
+{
+    json.BeginObject();
+
+    json.WriteString("Blocks");
+    json.WriteNumber(stat.blockCount);
+
+    json.WriteString("Allocations");
+    json.WriteNumber(stat.allocationCount);
+
+    json.WriteString("UnusedRanges");
+    json.WriteNumber(stat.unusedRangeCount);
+
+    json.WriteString("UsedBytes");
+    json.WriteNumber(stat.usedBytes);
+
+    json.WriteString("UnusedBytes");
+    json.WriteNumber(stat.unusedBytes);
+
+    if(stat.allocationCount > 1)
+    {
+        json.WriteString("AllocationSize");
+        json.BeginObject(true);
+        json.WriteString("Min");
+        json.WriteNumber(stat.allocationSizeMin);
+        json.WriteString("Avg");
+        json.WriteNumber(stat.allocationSizeAvg);
+        json.WriteString("Max");
+        json.WriteNumber(stat.allocationSizeMax);
+        json.EndObject();
+    }
+
+    if(stat.unusedRangeCount > 1)
+    {
+        json.WriteString("UnusedRangeSize");
+        json.BeginObject(true);
+        json.WriteString("Min");
+        json.WriteNumber(stat.unusedRangeSizeMin);
+        json.WriteString("Avg");
+        json.WriteNumber(stat.unusedRangeSizeAvg);
+        json.WriteString("Max");
+        json.WriteNumber(stat.unusedRangeSizeMax);
+        json.EndObject();
+    }
+
+    json.EndObject();
+}
+
+#endif // #if VMA_STATS_STRING_ENABLED
+
+struct VmaSuballocationItemSizeLess
+{
+    bool operator()(
+        const VmaSuballocationList::iterator lhs,
+        const VmaSuballocationList::iterator rhs) const
+    {
+        return lhs->size < rhs->size;
+    }
+    bool operator()(
+        const VmaSuballocationList::iterator lhs,
+        VkDeviceSize rhsSize) const
+    {
+        return lhs->size < rhsSize;
+    }
+};
+
+
+////////////////////////////////////////////////////////////////////////////////
+// class VmaBlockMetadata
+
+VmaBlockMetadata::VmaBlockMetadata(VmaAllocator hAllocator) :
+    m_Size(0),
+    m_pAllocationCallbacks(hAllocator->GetAllocationCallbacks())
+{
+}
+
+#if VMA_STATS_STRING_ENABLED
+
+void VmaBlockMetadata::PrintDetailedMap_Begin(class VmaJsonWriter& json,
+    VkDeviceSize unusedBytes,
+    size_t allocationCount,
+    size_t unusedRangeCount) const
+{
+    json.BeginObject();
+
+    json.WriteString("TotalBytes");
+    json.WriteNumber(GetSize());
+
+    json.WriteString("UnusedBytes");
+    json.WriteNumber(unusedBytes);
+
+    json.WriteString("Allocations");
+    json.WriteNumber((uint64_t)allocationCount);
+
+    json.WriteString("UnusedRanges");
+    json.WriteNumber((uint64_t)unusedRangeCount);
+
+    json.WriteString("Suballocations");
+    json.BeginArray();
+}
+
+void VmaBlockMetadata::PrintDetailedMap_Allocation(class VmaJsonWriter& json,
+    VkDeviceSize offset,
+    VmaAllocation hAllocation) const
+{
+    json.BeginObject(true);
+        
+    json.WriteString("Offset");
+    json.WriteNumber(offset);
+
+    hAllocation->PrintParameters(json);
+
+    json.EndObject();
+}
+
+void VmaBlockMetadata::PrintDetailedMap_UnusedRange(class VmaJsonWriter& json,
+    VkDeviceSize offset,
+    VkDeviceSize size) const
+{
+    json.BeginObject(true);
+        
+    json.WriteString("Offset");
+    json.WriteNumber(offset);
+
+    json.WriteString("Type");
+    json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[VMA_SUBALLOCATION_TYPE_FREE]);
+
+    json.WriteString("Size");
+    json.WriteNumber(size);
+
+    json.EndObject();
+}
+
+void VmaBlockMetadata::PrintDetailedMap_End(class VmaJsonWriter& json) const
+{
+    json.EndArray();
+    json.EndObject();
+}
+
+#endif // #if VMA_STATS_STRING_ENABLED
+
+////////////////////////////////////////////////////////////////////////////////
+// class VmaBlockMetadata_Generic
+
+VmaBlockMetadata_Generic::VmaBlockMetadata_Generic(VmaAllocator hAllocator) :
+    VmaBlockMetadata(hAllocator),
+    m_FreeCount(0),
+    m_SumFreeSize(0),
+    m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
+    m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
+{
+}
+
+VmaBlockMetadata_Generic::~VmaBlockMetadata_Generic()
+{
+}
+
+void VmaBlockMetadata_Generic::Init(VkDeviceSize size)
+{
+    VmaBlockMetadata::Init(size);
+
+    m_FreeCount = 1;
+    m_SumFreeSize = size;
+
+    VmaSuballocation suballoc = {};
+    suballoc.offset = 0;
+    suballoc.size = size;
+    suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
+    suballoc.hAllocation = VK_NULL_HANDLE;
+
+    VMA_ASSERT(size > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
+    m_Suballocations.push_back(suballoc);
+    VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
+    --suballocItem;
+    m_FreeSuballocationsBySize.push_back(suballocItem);
+}
+
+bool VmaBlockMetadata_Generic::Validate() const
+{
+    VMA_VALIDATE(!m_Suballocations.empty());
+    
+    // Expected offset of new suballocation as calculated from previous ones.
+    VkDeviceSize calculatedOffset = 0;
+    // Expected number of free suballocations as calculated from traversing their list.
+    uint32_t calculatedFreeCount = 0;
+    // Expected sum size of free suballocations as calculated from traversing their list.
+    VkDeviceSize calculatedSumFreeSize = 0;
+    // Expected number of free suballocations that should be registered in
+    // m_FreeSuballocationsBySize calculated from traversing their list.
+    size_t freeSuballocationsToRegister = 0;
+    // True if previous visited suballocation was free.
+    bool prevFree = false;
+
+    for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
+        suballocItem != m_Suballocations.cend();
+        ++suballocItem)
+    {
+        const VmaSuballocation& subAlloc = *suballocItem;
+        
+        // Actual offset of this suballocation doesn't match expected one.
+        VMA_VALIDATE(subAlloc.offset == calculatedOffset);
+
+        const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
+        // Two adjacent free suballocations are invalid. They should be merged.
+        VMA_VALIDATE(!prevFree || !currFree);
+
+        VMA_VALIDATE(currFree == (subAlloc.hAllocation == VK_NULL_HANDLE));
+
+        if(currFree)
+        {
+            calculatedSumFreeSize += subAlloc.size;
+            ++calculatedFreeCount;
+            if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
+            {
+                ++freeSuballocationsToRegister;
+            }
+
+            // Margin required between allocations - every free space must be at least that large.
+            VMA_VALIDATE(subAlloc.size >= VMA_DEBUG_MARGIN);
+        }
+        else
+        {
+            VMA_VALIDATE(subAlloc.hAllocation->GetOffset() == subAlloc.offset);
+            VMA_VALIDATE(subAlloc.hAllocation->GetSize() == subAlloc.size);
+
+            // Margin required between allocations - previous allocation must be free.
+            VMA_VALIDATE(VMA_DEBUG_MARGIN == 0 || prevFree);
+        }
+
+        calculatedOffset += subAlloc.size;
+        prevFree = currFree;
+    }
+
+    // Number of free suballocations registered in m_FreeSuballocationsBySize doesn't
+    // match expected one.
+    VMA_VALIDATE(m_FreeSuballocationsBySize.size() == freeSuballocationsToRegister);
+
+    VkDeviceSize lastSize = 0;
+    for(size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
+    {
+        VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
+        
+        // Only free suballocations can be registered in m_FreeSuballocationsBySize.
+        VMA_VALIDATE(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE);
+        // They must be sorted by size ascending.
+        VMA_VALIDATE(suballocItem->size >= lastSize);
+
+        lastSize = suballocItem->size;
+    }
+
+    // Check if totals match calculacted values.
+    VMA_VALIDATE(ValidateFreeSuballocationList());
+    VMA_VALIDATE(calculatedOffset == GetSize());
+    VMA_VALIDATE(calculatedSumFreeSize == m_SumFreeSize);
+    VMA_VALIDATE(calculatedFreeCount == m_FreeCount);
+
+    return true;
+}
+
+VkDeviceSize VmaBlockMetadata_Generic::GetUnusedRangeSizeMax() const
+{
+    if(!m_FreeSuballocationsBySize.empty())
+    {
+        return m_FreeSuballocationsBySize.back()->size;
+    }
+    else
+    {
+        return 0;
+    }
+}
+
+bool VmaBlockMetadata_Generic::IsEmpty() const
+{
+    return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
+}
+
+void VmaBlockMetadata_Generic::CalcAllocationStatInfo(VmaStatInfo& outInfo) const
+{
+    outInfo.blockCount = 1;
+
+    const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
+    outInfo.allocationCount = rangeCount - m_FreeCount;
+    outInfo.unusedRangeCount = m_FreeCount;
+    
+    outInfo.unusedBytes = m_SumFreeSize;
+    outInfo.usedBytes = GetSize() - outInfo.unusedBytes;
+
+    outInfo.allocationSizeMin = UINT64_MAX;
+    outInfo.allocationSizeMax = 0;
+    outInfo.unusedRangeSizeMin = UINT64_MAX;
+    outInfo.unusedRangeSizeMax = 0;
+
+    for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
+        suballocItem != m_Suballocations.cend();
+        ++suballocItem)
+    {
+        const VmaSuballocation& suballoc = *suballocItem;
+        if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
+        {
+            outInfo.allocationSizeMin = VMA_MIN(outInfo.allocationSizeMin, suballoc.size);
+            outInfo.allocationSizeMax = VMA_MAX(outInfo.allocationSizeMax, suballoc.size);
+        }
+        else
+        {
+            outInfo.unusedRangeSizeMin = VMA_MIN(outInfo.unusedRangeSizeMin, suballoc.size);
+            outInfo.unusedRangeSizeMax = VMA_MAX(outInfo.unusedRangeSizeMax, suballoc.size);
+        }
+    }
+}
+
+void VmaBlockMetadata_Generic::AddPoolStats(VmaPoolStats& inoutStats) const
+{
+    const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
+
+    inoutStats.size += GetSize();
+    inoutStats.unusedSize += m_SumFreeSize;
+    inoutStats.allocationCount += rangeCount - m_FreeCount;
+    inoutStats.unusedRangeCount += m_FreeCount;
+    inoutStats.unusedRangeSizeMax = VMA_MAX(inoutStats.unusedRangeSizeMax, GetUnusedRangeSizeMax());
+}
+
+#if VMA_STATS_STRING_ENABLED
+
+void VmaBlockMetadata_Generic::PrintDetailedMap(class VmaJsonWriter& json) const
+{
+    PrintDetailedMap_Begin(json,
+        m_SumFreeSize, // unusedBytes
+        m_Suballocations.size() - (size_t)m_FreeCount, // allocationCount
+        m_FreeCount); // unusedRangeCount
+
+    size_t i = 0;
+    for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
+        suballocItem != m_Suballocations.cend();
+        ++suballocItem, ++i)
+    {
+        if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
+        {
+            PrintDetailedMap_UnusedRange(json, suballocItem->offset, suballocItem->size);
+        }
+        else
+        {
+            PrintDetailedMap_Allocation(json, suballocItem->offset, suballocItem->hAllocation);
+        }
+    }
+
+    PrintDetailedMap_End(json);
+}
+
+#endif // #if VMA_STATS_STRING_ENABLED
+
+bool VmaBlockMetadata_Generic::CreateAllocationRequest(
+    uint32_t currentFrameIndex,
+    uint32_t frameInUseCount,
+    VkDeviceSize bufferImageGranularity,
+    VkDeviceSize allocSize,
+    VkDeviceSize allocAlignment,
+    bool upperAddress,
+    VmaSuballocationType allocType,
+    bool canMakeOtherLost,
+    uint32_t strategy,
+    VmaAllocationRequest* pAllocationRequest)
+{
+    VMA_ASSERT(allocSize > 0);
+    VMA_ASSERT(!upperAddress);
+    VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
+    VMA_ASSERT(pAllocationRequest != VMA_NULL);
+    VMA_HEAVY_ASSERT(Validate());
+
+    // There is not enough total free space in this block to fullfill the request: Early return.
+    if(canMakeOtherLost == false &&
+        m_SumFreeSize < allocSize + 2 * VMA_DEBUG_MARGIN)
+    {
+        return false;
+    }
+
+    // New algorithm, efficiently searching freeSuballocationsBySize.
+    const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
+    if(freeSuballocCount > 0)
+    {
+        if(strategy == VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT)
+        {
+            // Find first free suballocation with size not less than allocSize + 2 * VMA_DEBUG_MARGIN.
+            VmaSuballocationList::iterator* const it = VmaBinaryFindFirstNotLess(
+                m_FreeSuballocationsBySize.data(),
+                m_FreeSuballocationsBySize.data() + freeSuballocCount,
+                allocSize + 2 * VMA_DEBUG_MARGIN,
+                VmaSuballocationItemSizeLess());
+            size_t index = it - m_FreeSuballocationsBySize.data();
+            for(; index < freeSuballocCount; ++index)
+            {
+                if(CheckAllocation(
+                    currentFrameIndex,
+                    frameInUseCount,
+                    bufferImageGranularity,
+                    allocSize,
+                    allocAlignment,
+                    allocType,
+                    m_FreeSuballocationsBySize[index],
+                    false, // canMakeOtherLost
+                    &pAllocationRequest->offset,
+                    &pAllocationRequest->itemsToMakeLostCount,
+                    &pAllocationRequest->sumFreeSize,
+                    &pAllocationRequest->sumItemSize))
+                {
+                    pAllocationRequest->item = m_FreeSuballocationsBySize[index];
+                    return true;
+                }
+            }
+        }
+        else if(strategy == VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET)
+        {
+            for(VmaSuballocationList::iterator it = m_Suballocations.begin();
+                it != m_Suballocations.end();
+                ++it)
+            {
+                if(it->type == VMA_SUBALLOCATION_TYPE_FREE && CheckAllocation(
+                    currentFrameIndex,
+                    frameInUseCount,
+                    bufferImageGranularity,
+                    allocSize,
+                    allocAlignment,
+                    allocType,
+                    it,
+                    false, // canMakeOtherLost
+                    &pAllocationRequest->offset,
+                    &pAllocationRequest->itemsToMakeLostCount,
+                    &pAllocationRequest->sumFreeSize,
+                    &pAllocationRequest->sumItemSize))
+                {
+                    pAllocationRequest->item = it;
+                    return true;
+                }
+            }
+        }
+        else // WORST_FIT, FIRST_FIT
+        {
+            // Search staring from biggest suballocations.
+            for(size_t index = freeSuballocCount; index--; )
+            {
+                if(CheckAllocation(
+                    currentFrameIndex,
+                    frameInUseCount,
+                    bufferImageGranularity,
+                    allocSize,
+                    allocAlignment,
+                    allocType,
+                    m_FreeSuballocationsBySize[index],
+                    false, // canMakeOtherLost
+                    &pAllocationRequest->offset,
+                    &pAllocationRequest->itemsToMakeLostCount,
+                    &pAllocationRequest->sumFreeSize,
+                    &pAllocationRequest->sumItemSize))
+                {
+                    pAllocationRequest->item = m_FreeSuballocationsBySize[index];
+                    return true;
+                }
+            }
+        }
+    }
+
+    if(canMakeOtherLost)
+    {
+        // Brute-force algorithm. TODO: Come up with something better.
+
+        pAllocationRequest->sumFreeSize = VK_WHOLE_SIZE;
+        pAllocationRequest->sumItemSize = VK_WHOLE_SIZE;
+
+        VmaAllocationRequest tmpAllocRequest = {};
+        for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
+            suballocIt != m_Suballocations.end();
+            ++suballocIt)
+        {
+            if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
+                suballocIt->hAllocation->CanBecomeLost())
+            {
+                if(CheckAllocation(
+                    currentFrameIndex,
+                    frameInUseCount,
+                    bufferImageGranularity,
+                    allocSize,
+                    allocAlignment,
+                    allocType,
+                    suballocIt,
+                    canMakeOtherLost,
+                    &tmpAllocRequest.offset,
+                    &tmpAllocRequest.itemsToMakeLostCount,
+                    &tmpAllocRequest.sumFreeSize,
+                    &tmpAllocRequest.sumItemSize))
+                {
+                    tmpAllocRequest.item = suballocIt;
+
+                    if(tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost() ||
+                        strategy == VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT)
+                    {
+                        *pAllocationRequest = tmpAllocRequest;
+                    }
+                }
+            }
+        }
+
+        if(pAllocationRequest->sumItemSize != VK_WHOLE_SIZE)
+        {
+            return true;
+        }
+    }
+
+    return false;
+}
+
+bool VmaBlockMetadata_Generic::MakeRequestedAllocationsLost(
+    uint32_t currentFrameIndex,
+    uint32_t frameInUseCount,
+    VmaAllocationRequest* pAllocationRequest)
+{
+    while(pAllocationRequest->itemsToMakeLostCount > 0)
+    {
+        if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
+        {
+            ++pAllocationRequest->item;
+        }
+        VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
+        VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
+        VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
+        if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
+        {
+            pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
+            --pAllocationRequest->itemsToMakeLostCount;
+        }
+        else
+        {
+            return false;
+        }
+    }
+
+    VMA_HEAVY_ASSERT(Validate());
+    VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
+    VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
+    
+    return true;
+}
+
+uint32_t VmaBlockMetadata_Generic::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
+{
+    uint32_t lostAllocationCount = 0;
+    for(VmaSuballocationList::iterator it = m_Suballocations.begin();
+        it != m_Suballocations.end();
+        ++it)
+    {
+        if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
+            it->hAllocation->CanBecomeLost() &&
+            it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
+        {
+            it = FreeSuballocation(it);
+            ++lostAllocationCount;
+        }
+    }
+    return lostAllocationCount;
+}
+
+VkResult VmaBlockMetadata_Generic::CheckCorruption(const void* pBlockData)
+{
+    for(VmaSuballocationList::iterator it = m_Suballocations.begin();
+        it != m_Suballocations.end();
+        ++it)
+    {
+        if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
+        {
+            if(!VmaValidateMagicValue(pBlockData, it->offset - VMA_DEBUG_MARGIN))
+            {
+                VMA_ASSERT(0 && "MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
+                return VK_ERROR_VALIDATION_FAILED_EXT;
+            }
+            if(!VmaValidateMagicValue(pBlockData, it->offset + it->size))
+            {
+                VMA_ASSERT(0 && "MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
+                return VK_ERROR_VALIDATION_FAILED_EXT;
+            }
+        }
+    }
+
+    return VK_SUCCESS;
+}
+
+void VmaBlockMetadata_Generic::Alloc(
+    const VmaAllocationRequest& request,
+    VmaSuballocationType type,
+    VkDeviceSize allocSize,
+    bool upperAddress,
+    VmaAllocation hAllocation)
+{
+    VMA_ASSERT(!upperAddress);
+    VMA_ASSERT(request.item != m_Suballocations.end());
+    VmaSuballocation& suballoc = *request.item;
+    // Given suballocation is a free block.
+    VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
+    // Given offset is inside this suballocation.
+    VMA_ASSERT(request.offset >= suballoc.offset);
+    const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
+    VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
+    const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
+
+    // Unregister this free suballocation from m_FreeSuballocationsBySize and update
+    // it to become used.
+    UnregisterFreeSuballocation(request.item);
+
+    suballoc.offset = request.offset;
+    suballoc.size = allocSize;
+    suballoc.type = type;
+    suballoc.hAllocation = hAllocation;
+
+    // If there are any free bytes remaining at the end, insert new free suballocation after current one.
+    if(paddingEnd)
+    {
+        VmaSuballocation paddingSuballoc = {};
+        paddingSuballoc.offset = request.offset + allocSize;
+        paddingSuballoc.size = paddingEnd;
+        paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
+        VmaSuballocationList::iterator next = request.item;
+        ++next;
+        const VmaSuballocationList::iterator paddingEndItem =
+            m_Suballocations.insert(next, paddingSuballoc);
+        RegisterFreeSuballocation(paddingEndItem);
+    }
+
+    // If there are any free bytes remaining at the beginning, insert new free suballocation before current one.
+    if(paddingBegin)
+    {
+        VmaSuballocation paddingSuballoc = {};
+        paddingSuballoc.offset = request.offset - paddingBegin;
+        paddingSuballoc.size = paddingBegin;
+        paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
+        const VmaSuballocationList::iterator paddingBeginItem =
+            m_Suballocations.insert(request.item, paddingSuballoc);
+        RegisterFreeSuballocation(paddingBeginItem);
+    }
+
+    // Update totals.
+    m_FreeCount = m_FreeCount - 1;
+    if(paddingBegin > 0)
+    {
+        ++m_FreeCount;
+    }
+    if(paddingEnd > 0)
+    {
+        ++m_FreeCount;
+    }
+    m_SumFreeSize -= allocSize;
+}
+
+void VmaBlockMetadata_Generic::Free(const VmaAllocation allocation)
+{
+    for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
+        suballocItem != m_Suballocations.end();
+        ++suballocItem)
+    {
+        VmaSuballocation& suballoc = *suballocItem;
+        if(suballoc.hAllocation == allocation)
+        {
+            FreeSuballocation(suballocItem);
+            VMA_HEAVY_ASSERT(Validate());
+            return;
+        }
+    }
+    VMA_ASSERT(0 && "Not found!");
+}
+
+void VmaBlockMetadata_Generic::FreeAtOffset(VkDeviceSize offset)
+{
+    for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
+        suballocItem != m_Suballocations.end();
+        ++suballocItem)
+    {
+        VmaSuballocation& suballoc = *suballocItem;
+        if(suballoc.offset == offset)
+        {
+            FreeSuballocation(suballocItem);
+            return;
+        }
+    }
+    VMA_ASSERT(0 && "Not found!");
+}
+
+bool VmaBlockMetadata_Generic::ResizeAllocation(const VmaAllocation alloc, VkDeviceSize newSize)
+{
+    typedef VmaSuballocationList::iterator iter_type;
+    for(iter_type suballocItem = m_Suballocations.begin();
+        suballocItem != m_Suballocations.end();
+        ++suballocItem)
+    {
+        VmaSuballocation& suballoc = *suballocItem;
+        if(suballoc.hAllocation == alloc)
+        {
+            iter_type nextItem = suballocItem;
+            ++nextItem;
+
+            // Should have been ensured on higher level.
+            VMA_ASSERT(newSize != alloc->GetSize() && newSize > 0);
+
+            // Shrinking.
+            if(newSize < alloc->GetSize())
+            {
+                const VkDeviceSize sizeDiff = suballoc.size - newSize;
+
+                // There is next item.
+                if(nextItem != m_Suballocations.end())
+                {
+                    // Next item is free.
+                    if(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE)
+                    {
+                        // Grow this next item backward.
+                        UnregisterFreeSuballocation(nextItem);
+                        nextItem->offset -= sizeDiff;
+                        nextItem->size += sizeDiff;
+                        RegisterFreeSuballocation(nextItem);
+                    }
+                    // Next item is not free.
+                    else
+                    {
+                        // Create free item after current one.
+                        VmaSuballocation newFreeSuballoc;
+                        newFreeSuballoc.hAllocation = VK_NULL_HANDLE;
+                        newFreeSuballoc.offset = suballoc.offset + newSize;
+                        newFreeSuballoc.size = sizeDiff;
+                        newFreeSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
+                        iter_type newFreeSuballocIt = m_Suballocations.insert(nextItem, newFreeSuballoc);
+                        RegisterFreeSuballocation(newFreeSuballocIt);
+
+                        ++m_FreeCount;
+                    }
+                }
+                // This is the last item.
+                else
+                {
+                    // Create free item at the end.
+                    VmaSuballocation newFreeSuballoc;
+                    newFreeSuballoc.hAllocation = VK_NULL_HANDLE;
+                    newFreeSuballoc.offset = suballoc.offset + newSize;
+                    newFreeSuballoc.size = sizeDiff;
+                    newFreeSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
+                    m_Suballocations.push_back(newFreeSuballoc);
+
+                    iter_type newFreeSuballocIt = m_Suballocations.end();
+                    RegisterFreeSuballocation(--newFreeSuballocIt);
+
+                    ++m_FreeCount;
+                }
+
+                suballoc.size = newSize;
+                m_SumFreeSize += sizeDiff;
+            }
+            // Growing.
+            else
+            {
+                const VkDeviceSize sizeDiff = newSize - suballoc.size;
+
+                // There is next item.
+                if(nextItem != m_Suballocations.end())
+                {
+                    // Next item is free.
+                    if(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE)
+                    {
+                        // There is not enough free space, including margin.
+                        if(nextItem->size < sizeDiff + VMA_DEBUG_MARGIN)
+                        {
+                            return false;
+                        }
+
+                        // There is more free space than required.
+                        if(nextItem->size > sizeDiff)
+                        {
+                            // Move and shrink this next item.
+                            UnregisterFreeSuballocation(nextItem);
+                            nextItem->offset += sizeDiff;
+                            nextItem->size -= sizeDiff;
+                            RegisterFreeSuballocation(nextItem);
+                        }
+                        // There is exactly the amount of free space required.
+                        else
+                        {
+                            // Remove this next free item.
+                            UnregisterFreeSuballocation(nextItem);
+                            m_Suballocations.erase(nextItem);
+                            --m_FreeCount;
+                        }
+                    }
+                    // Next item is not free - there is no space to grow.
+                    else
+                    {
+                        return false;
+                    }
+                }
+                // This is the last item - there is no space to grow.
+                else
+                {
+                    return false;
+                }
+
+                suballoc.size = newSize;
+                m_SumFreeSize -= sizeDiff;
+            }
+
+            // We cannot call Validate() here because alloc object is updated to new size outside of this call.
+            return true;
+        }
+    }
+    VMA_ASSERT(0 && "Not found!");
+    return false;
+}
+
+bool VmaBlockMetadata_Generic::ValidateFreeSuballocationList() const
+{
+    VkDeviceSize lastSize = 0;
+    for(size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
+    {
+        const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
+
+        VMA_VALIDATE(it->type == VMA_SUBALLOCATION_TYPE_FREE);
+        VMA_VALIDATE(it->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
+        VMA_VALIDATE(it->size >= lastSize);
+        lastSize = it->size;
+    }
+    return true;
+}
+
+bool VmaBlockMetadata_Generic::CheckAllocation(
+    uint32_t currentFrameIndex,
+    uint32_t frameInUseCount,
+    VkDeviceSize bufferImageGranularity,
+    VkDeviceSize allocSize,
+    VkDeviceSize allocAlignment,
+    VmaSuballocationType allocType,
+    VmaSuballocationList::const_iterator suballocItem,
+    bool canMakeOtherLost,
+    VkDeviceSize* pOffset,
+    size_t* itemsToMakeLostCount,
+    VkDeviceSize* pSumFreeSize,
+    VkDeviceSize* pSumItemSize) const
+{
+    VMA_ASSERT(allocSize > 0);
+    VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
+    VMA_ASSERT(suballocItem != m_Suballocations.cend());
+    VMA_ASSERT(pOffset != VMA_NULL);
+    
+    *itemsToMakeLostCount = 0;
+    *pSumFreeSize = 0;
+    *pSumItemSize = 0;
+
+    if(canMakeOtherLost)
+    {
+        if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
+        {
+            *pSumFreeSize = suballocItem->size;
+        }
+        else
+        {
+            if(suballocItem->hAllocation->CanBecomeLost() &&
+                suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
+            {
+                ++*itemsToMakeLostCount;
+                *pSumItemSize = suballocItem->size;
+            }
+            else
+            {
+                return false;
+            }
+        }
+
+        // Remaining size is too small for this request: Early return.
+        if(GetSize() - suballocItem->offset < allocSize)
+        {
+            return false;
+        }
+
+        // Start from offset equal to beginning of this suballocation.
+        *pOffset = suballocItem->offset;
+    
+        // Apply VMA_DEBUG_MARGIN at the beginning.
+        if(VMA_DEBUG_MARGIN > 0)
+        {
+            *pOffset += VMA_DEBUG_MARGIN;
+        }
+    
+        // Apply alignment.
+        *pOffset = VmaAlignUp(*pOffset, allocAlignment);
+
+        // Check previous suballocations for BufferImageGranularity conflicts.
+        // Make bigger alignment if necessary.
+        if(bufferImageGranularity > 1)
+        {
+            bool bufferImageGranularityConflict = false;
+            VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
+            while(prevSuballocItem != m_Suballocations.cbegin())
+            {
+                --prevSuballocItem;
+                const VmaSuballocation& prevSuballoc = *prevSuballocItem;
+                if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
+                {
+                    if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
+                    {
+                        bufferImageGranularityConflict = true;
+                        break;
+                    }
+                }
+                else
+                    // Already on previous page.
+                    break;
+            }
+            if(bufferImageGranularityConflict)
+            {
+                *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
+            }
+        }
+    
+        // Now that we have final *pOffset, check if we are past suballocItem.
+        // If yes, return false - this function should be called for another suballocItem as starting point.
+        if(*pOffset >= suballocItem->offset + suballocItem->size)
+        {
+            return false;
+        }
+    
+        // Calculate padding at the beginning based on current offset.
+        const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
+
+        // Calculate required margin at the end.
+        const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
+
+        const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
+        // Another early return check.
+        if(suballocItem->offset + totalSize > GetSize())
+        {
+            return false;
+        }
+
+        // Advance lastSuballocItem until desired size is reached.
+        // Update itemsToMakeLostCount.
+        VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
+        if(totalSize > suballocItem->size)
+        {
+            VkDeviceSize remainingSize = totalSize - suballocItem->size;
+            while(remainingSize > 0)
+            {
+                ++lastSuballocItem;
+                if(lastSuballocItem == m_Suballocations.cend())
+                {
+                    return false;
+                }
+                if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
+                {
+                    *pSumFreeSize += lastSuballocItem->size;
+                }
+                else
+                {
+                    VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
+                    if(lastSuballocItem->hAllocation->CanBecomeLost() &&
+                        lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
+                    {
+                        ++*itemsToMakeLostCount;
+                        *pSumItemSize += lastSuballocItem->size;
+                    }
+                    else
+                    {
+                        return false;
+                    }
+                }
+                remainingSize = (lastSuballocItem->size < remainingSize) ?
+                    remainingSize - lastSuballocItem->size : 0;
+            }
+        }
+
+        // Check next suballocations for BufferImageGranularity conflicts.
+        // If conflict exists, we must mark more allocations lost or fail.
+        if(bufferImageGranularity > 1)
+        {
+            VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
+            ++nextSuballocItem;
+            while(nextSuballocItem != m_Suballocations.cend())
+            {
+                const VmaSuballocation& nextSuballoc = *nextSuballocItem;
+                if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
+                {
+                    if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
+                    {
+                        VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
+                        if(nextSuballoc.hAllocation->CanBecomeLost() &&
+                            nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
+                        {
+                            ++*itemsToMakeLostCount;
+                        }
+                        else
+                        {
+                            return false;
+                        }
+                    }
+                }
+                else
+                {
+                    // Already on next page.
+                    break;
+                }
+                ++nextSuballocItem;
+            }
+        }
+    }
+    else
+    {
+        const VmaSuballocation& suballoc = *suballocItem;
+        VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
+
+        *pSumFreeSize = suballoc.size;
+
+        // Size of this suballocation is too small for this request: Early return.
+        if(suballoc.size < allocSize)
+        {
+            return false;
+        }
+
+        // Start from offset equal to beginning of this suballocation.
+        *pOffset = suballoc.offset;
+    
+        // Apply VMA_DEBUG_MARGIN at the beginning.
+        if(VMA_DEBUG_MARGIN > 0)
+        {
+            *pOffset += VMA_DEBUG_MARGIN;
+        }
+    
+        // Apply alignment.
+        *pOffset = VmaAlignUp(*pOffset, allocAlignment);
+    
+        // Check previous suballocations for BufferImageGranularity conflicts.
+        // Make bigger alignment if necessary.
+        if(bufferImageGranularity > 1)
+        {
+            bool bufferImageGranularityConflict = false;
+            VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
+            while(prevSuballocItem != m_Suballocations.cbegin())
+            {
+                --prevSuballocItem;
+                const VmaSuballocation& prevSuballoc = *prevSuballocItem;
+                if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
+                {
+                    if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
+                    {
+                        bufferImageGranularityConflict = true;
+                        break;
+                    }
+                }
+                else
+                    // Already on previous page.
+                    break;
+            }
+            if(bufferImageGranularityConflict)
+            {
+                *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
+            }
+        }
+    
+        // Calculate padding at the beginning based on current offset.
+        const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
+
+        // Calculate required margin at the end.
+        const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
+
+        // Fail if requested size plus margin before and after is bigger than size of this suballocation.
+        if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
+        {
+            return false;
+        }
+
+        // Check next suballocations for BufferImageGranularity conflicts.
+        // If conflict exists, allocation cannot be made here.
+        if(bufferImageGranularity > 1)
+        {
+            VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
+            ++nextSuballocItem;
+            while(nextSuballocItem != m_Suballocations.cend())
+            {
+                const VmaSuballocation& nextSuballoc = *nextSuballocItem;
+                if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
+                {
+                    if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
+                    {
+                        return false;
+                    }
+                }
+                else
+                {
+                    // Already on next page.
+                    break;
+                }
+                ++nextSuballocItem;
+            }
+        }
+    }
+
+    // All tests passed: Success. pOffset is already filled.
+    return true;
+}
+
+void VmaBlockMetadata_Generic::MergeFreeWithNext(VmaSuballocationList::iterator item)
+{
+    VMA_ASSERT(item != m_Suballocations.end());
+    VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
+    
+    VmaSuballocationList::iterator nextItem = item;
+    ++nextItem;
+    VMA_ASSERT(nextItem != m_Suballocations.end());
+    VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
+
+    item->size += nextItem->size;
+    --m_FreeCount;
+    m_Suballocations.erase(nextItem);
+}
+
+VmaSuballocationList::iterator VmaBlockMetadata_Generic::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
+{
+    // Change this suballocation to be marked as free.
+    VmaSuballocation& suballoc = *suballocItem;
+    suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
+    suballoc.hAllocation = VK_NULL_HANDLE;
+    
+    // Update totals.
+    ++m_FreeCount;
+    m_SumFreeSize += suballoc.size;
+
+    // Merge with previous and/or next suballocation if it's also free.
+    bool mergeWithNext = false;
+    bool mergeWithPrev = false;
+    
+    VmaSuballocationList::iterator nextItem = suballocItem;
+    ++nextItem;
+    if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
+    {
+        mergeWithNext = true;
+    }
+
+    VmaSuballocationList::iterator prevItem = suballocItem;
+    if(suballocItem != m_Suballocations.begin())
+    {
+        --prevItem;
+        if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
+        {
+            mergeWithPrev = true;
+        }
+    }
+
+    if(mergeWithNext)
+    {
+        UnregisterFreeSuballocation(nextItem);
+        MergeFreeWithNext(suballocItem);
+    }
+
+    if(mergeWithPrev)
+    {
+        UnregisterFreeSuballocation(prevItem);
+        MergeFreeWithNext(prevItem);
+        RegisterFreeSuballocation(prevItem);
+        return prevItem;
+    }
+    else
+    {
+        RegisterFreeSuballocation(suballocItem);
+        return suballocItem;
+    }
+}
+
+void VmaBlockMetadata_Generic::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
+{
+    VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
+    VMA_ASSERT(item->size > 0);
+
+    // You may want to enable this validation at the beginning or at the end of
+    // this function, depending on what do you want to check.
+    VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
+
+    if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
+    {
+        if(m_FreeSuballocationsBySize.empty())
+        {
+            m_FreeSuballocationsBySize.push_back(item);
+        }
+        else
+        {
+            VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
+        }
+    }
+
+    //VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
+}
+
+
+void VmaBlockMetadata_Generic::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
+{
+    VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
+    VMA_ASSERT(item->size > 0);
+
+    // You may want to enable this validation at the beginning or at the end of
+    // this function, depending on what do you want to check.
+    VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
+
+    if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
+    {
+        VmaSuballocationList::iterator* const it = VmaBinaryFindFirstNotLess(
+            m_FreeSuballocationsBySize.data(),
+            m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
+            item,
+            VmaSuballocationItemSizeLess());
+        for(size_t index = it - m_FreeSuballocationsBySize.data();
+            index < m_FreeSuballocationsBySize.size();
+            ++index)
+        {
+            if(m_FreeSuballocationsBySize[index] == item)
+            {
+                VmaVectorRemove(m_FreeSuballocationsBySize, index);
+                return;
+            }
+            VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) && "Not found.");
+        }
+        VMA_ASSERT(0 && "Not found.");
+    }
+
+    //VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
+}
+
+bool VmaBlockMetadata_Generic::IsBufferImageGranularityConflictPossible(
+    VkDeviceSize bufferImageGranularity,
+    VmaSuballocationType& inOutPrevSuballocType) const
+{
+    if(bufferImageGranularity == 1 || IsEmpty())
+    {
+        return false;
+    }
+
+    VkDeviceSize minAlignment = VK_WHOLE_SIZE;
+    bool typeConflictFound = false;
+    for(VmaSuballocationList::const_iterator it = m_Suballocations.cbegin();
+        it != m_Suballocations.cend();
+        ++it)
+    {
+        const VmaSuballocationType suballocType = it->type;
+        if(suballocType != VMA_SUBALLOCATION_TYPE_FREE)
+        {
+            minAlignment = VMA_MIN(minAlignment, it->hAllocation->GetAlignment());
+            if(VmaIsBufferImageGranularityConflict(inOutPrevSuballocType, suballocType))
+            {
+                typeConflictFound = true;
+            }
+            inOutPrevSuballocType = suballocType;
+        }
+    }
+
+    return typeConflictFound || minAlignment >= bufferImageGranularity;
+}
+
+////////////////////////////////////////////////////////////////////////////////
+// class VmaBlockMetadata_Linear
+
+VmaBlockMetadata_Linear::VmaBlockMetadata_Linear(VmaAllocator hAllocator) :
+    VmaBlockMetadata(hAllocator),
+    m_SumFreeSize(0),
+    m_Suballocations0(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
+    m_Suballocations1(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
+    m_1stVectorIndex(0),
+    m_2ndVectorMode(SECOND_VECTOR_EMPTY),
+    m_1stNullItemsBeginCount(0),
+    m_1stNullItemsMiddleCount(0),
+    m_2ndNullItemsCount(0)
+{
+}
+
+VmaBlockMetadata_Linear::~VmaBlockMetadata_Linear()
+{
+}
+
+void VmaBlockMetadata_Linear::Init(VkDeviceSize size)
+{
+    VmaBlockMetadata::Init(size);
+    m_SumFreeSize = size;
+}
+
+bool VmaBlockMetadata_Linear::Validate() const
+{
+    const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
+    const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
+
+    VMA_VALIDATE(suballocations2nd.empty() == (m_2ndVectorMode == SECOND_VECTOR_EMPTY));
+    VMA_VALIDATE(!suballocations1st.empty() ||
+        suballocations2nd.empty() ||
+        m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER);
+
+    if(!suballocations1st.empty())
+    {
+        // Null item at the beginning should be accounted into m_1stNullItemsBeginCount.
+        VMA_VALIDATE(suballocations1st[m_1stNullItemsBeginCount].hAllocation != VK_NULL_HANDLE);
+        // Null item at the end should be just pop_back().
+        VMA_VALIDATE(suballocations1st.back().hAllocation != VK_NULL_HANDLE);
+    }
+    if(!suballocations2nd.empty())
+    {
+        // Null item at the end should be just pop_back().
+        VMA_VALIDATE(suballocations2nd.back().hAllocation != VK_NULL_HANDLE);
+    }
+
+    VMA_VALIDATE(m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount <= suballocations1st.size());
+    VMA_VALIDATE(m_2ndNullItemsCount <= suballocations2nd.size());
+
+    VkDeviceSize sumUsedSize = 0;
+    const size_t suballoc1stCount = suballocations1st.size();
+    VkDeviceSize offset = VMA_DEBUG_MARGIN;
+
+    if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
+    {
+        const size_t suballoc2ndCount = suballocations2nd.size();
+        size_t nullItem2ndCount = 0;
+        for(size_t i = 0; i < suballoc2ndCount; ++i)
+        {
+            const VmaSuballocation& suballoc = suballocations2nd[i];
+            const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
+
+            VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
+            VMA_VALIDATE(suballoc.offset >= offset);
+
+            if(!currFree)
+            {
+                VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
+                VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
+                sumUsedSize += suballoc.size;
+            }
+            else
+            {
+                ++nullItem2ndCount;
+            }
+
+            offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
+        }
+
+        VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
+    }
+
+    for(size_t i = 0; i < m_1stNullItemsBeginCount; ++i)
+    {
+        const VmaSuballocation& suballoc = suballocations1st[i];
+        VMA_VALIDATE(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE &&
+            suballoc.hAllocation == VK_NULL_HANDLE);
+    }
+
+    size_t nullItem1stCount = m_1stNullItemsBeginCount;
+
+    for(size_t i = m_1stNullItemsBeginCount; i < suballoc1stCount; ++i)
+    {
+        const VmaSuballocation& suballoc = suballocations1st[i];
+        const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
+
+        VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
+        VMA_VALIDATE(suballoc.offset >= offset);
+        VMA_VALIDATE(i >= m_1stNullItemsBeginCount || currFree);
+
+        if(!currFree)
+        {
+            VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
+            VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
+            sumUsedSize += suballoc.size;
+        }
+        else
+        {
+            ++nullItem1stCount;
+        }
+
+        offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
+    }
+    VMA_VALIDATE(nullItem1stCount == m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount);
+
+    if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
+    {
+        const size_t suballoc2ndCount = suballocations2nd.size();
+        size_t nullItem2ndCount = 0;
+        for(size_t i = suballoc2ndCount; i--; )
+        {
+            const VmaSuballocation& suballoc = suballocations2nd[i];
+            const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
+
+            VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
+            VMA_VALIDATE(suballoc.offset >= offset);
+
+            if(!currFree)
+            {
+                VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
+                VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
+                sumUsedSize += suballoc.size;
+            }
+            else
+            {
+                ++nullItem2ndCount;
+            }
+
+            offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
+        }
+
+        VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
+    }
+
+    VMA_VALIDATE(offset <= GetSize());
+    VMA_VALIDATE(m_SumFreeSize == GetSize() - sumUsedSize);
+
+    return true;
+}
+
+size_t VmaBlockMetadata_Linear::GetAllocationCount() const
+{
+    return AccessSuballocations1st().size() - (m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount) +
+        AccessSuballocations2nd().size() - m_2ndNullItemsCount;
+}
+
+VkDeviceSize VmaBlockMetadata_Linear::GetUnusedRangeSizeMax() const
+{
+    const VkDeviceSize size = GetSize();
+
+    /*
+    We don't consider gaps inside allocation vectors with freed allocations because
+    they are not suitable for reuse in linear allocator. We consider only space that
+    is available for new allocations.
+    */
+    if(IsEmpty())
+    {
+        return size;
+    }
+    
+    const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
+
+    switch(m_2ndVectorMode)
+    {
+    case SECOND_VECTOR_EMPTY:
+        /*
+        Available space is after end of 1st, as well as before beginning of 1st (which
+        whould make it a ring buffer).
+        */
+        {
+            const size_t suballocations1stCount = suballocations1st.size();
+            VMA_ASSERT(suballocations1stCount > m_1stNullItemsBeginCount);
+            const VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
+            const VmaSuballocation& lastSuballoc  = suballocations1st[suballocations1stCount - 1];
+            return VMA_MAX(
+                firstSuballoc.offset,
+                size - (lastSuballoc.offset + lastSuballoc.size));
+        }
+        break;
+
+    case SECOND_VECTOR_RING_BUFFER:
+        /*
+        Available space is only between end of 2nd and beginning of 1st.
+        */
+        {
+            const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
+            const VmaSuballocation& lastSuballoc2nd = suballocations2nd.back();
+            const VmaSuballocation& firstSuballoc1st = suballocations1st[m_1stNullItemsBeginCount];
+            return firstSuballoc1st.offset - (lastSuballoc2nd.offset + lastSuballoc2nd.size);
+        }
+        break;
+
+    case SECOND_VECTOR_DOUBLE_STACK:
+        /*
+        Available space is only between end of 1st and top of 2nd.
+        */
+        {
+            const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
+            const VmaSuballocation& topSuballoc2nd = suballocations2nd.back();
+            const VmaSuballocation& lastSuballoc1st = suballocations1st.back();
+            return topSuballoc2nd.offset - (lastSuballoc1st.offset + lastSuballoc1st.size);
+        }
+        break;
+
+    default:
+        VMA_ASSERT(0);
+        return 0;
+    }
+}
+
+void VmaBlockMetadata_Linear::CalcAllocationStatInfo(VmaStatInfo& outInfo) const
+{
+    const VkDeviceSize size = GetSize();
+    const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
+    const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
+    const size_t suballoc1stCount = suballocations1st.size();
+    const size_t suballoc2ndCount = suballocations2nd.size();
+
+    outInfo.blockCount = 1;
+    outInfo.allocationCount = (uint32_t)GetAllocationCount();
+    outInfo.unusedRangeCount = 0;
+    outInfo.usedBytes = 0;
+    outInfo.allocationSizeMin = UINT64_MAX;
+    outInfo.allocationSizeMax = 0;
+    outInfo.unusedRangeSizeMin = UINT64_MAX;
+    outInfo.unusedRangeSizeMax = 0;
+
+    VkDeviceSize lastOffset = 0;
+
+    if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
+    {
+        const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
+        size_t nextAlloc2ndIndex = 0;
+        while(lastOffset < freeSpace2ndTo1stEnd)
+        {
+            // Find next non-null allocation or move nextAllocIndex to the end.
+            while(nextAlloc2ndIndex < suballoc2ndCount &&
+                suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
+            {
+                ++nextAlloc2ndIndex;
+            }
+
+            // Found non-null allocation.
+            if(nextAlloc2ndIndex < suballoc2ndCount)
+            {
+                const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
+            
+                // 1. Process free space before this allocation.
+                if(lastOffset < suballoc.offset)
+                {
+                    // There is free space from lastOffset to suballoc.offset.
+                    const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
+                    ++outInfo.unusedRangeCount;
+                    outInfo.unusedBytes += unusedRangeSize;
+                    outInfo.unusedRangeSizeMin = VMA_MIN(outInfo.unusedRangeSizeMin, unusedRangeSize);
+                    outInfo.unusedRangeSizeMax = VMA_MIN(outInfo.unusedRangeSizeMax, unusedRangeSize);
+                }
+            
+                // 2. Process this allocation.
+                // There is allocation with suballoc.offset, suballoc.size.
+                outInfo.usedBytes += suballoc.size;
+                outInfo.allocationSizeMin = VMA_MIN(outInfo.allocationSizeMin, suballoc.size);
+                outInfo.allocationSizeMax = VMA_MIN(outInfo.allocationSizeMax, suballoc.size);
+            
+                // 3. Prepare for next iteration.
+                lastOffset = suballoc.offset + suballoc.size;
+                ++nextAlloc2ndIndex;
+            }
+            // We are at the end.
+            else
+            {
+                // There is free space from lastOffset to freeSpace2ndTo1stEnd.
+                if(lastOffset < freeSpace2ndTo1stEnd)
+                {
+                    const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
+                    ++outInfo.unusedRangeCount;
+                    outInfo.unusedBytes += unusedRangeSize;
+                    outInfo.unusedRangeSizeMin = VMA_MIN(outInfo.unusedRangeSizeMin, unusedRangeSize);
+                    outInfo.unusedRangeSizeMax = VMA_MIN(outInfo.unusedRangeSizeMax, unusedRangeSize);
+               }
+
+                // End of loop.
+                lastOffset = freeSpace2ndTo1stEnd;
+            }
+        }
+    }
+
+    size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
+    const VkDeviceSize freeSpace1stTo2ndEnd =
+        m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
+    while(lastOffset < freeSpace1stTo2ndEnd)
+    {
+        // Find next non-null allocation or move nextAllocIndex to the end.
+        while(nextAlloc1stIndex < suballoc1stCount &&
+            suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
+        {
+            ++nextAlloc1stIndex;
+        }
+
+        // Found non-null allocation.
+        if(nextAlloc1stIndex < suballoc1stCount)
+        {
+            const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
+            
+            // 1. Process free space before this allocation.
+            if(lastOffset < suballoc.offset)
+            {
+                // There is free space from lastOffset to suballoc.offset.
+                const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
+                ++outInfo.unusedRangeCount;
+                outInfo.unusedBytes += unusedRangeSize;
+                outInfo.unusedRangeSizeMin = VMA_MIN(outInfo.unusedRangeSizeMin, unusedRangeSize);
+                outInfo.unusedRangeSizeMax = VMA_MIN(outInfo.unusedRangeSizeMax, unusedRangeSize);
+            }
+            
+            // 2. Process this allocation.
+            // There is allocation with suballoc.offset, suballoc.size.
+            outInfo.usedBytes += suballoc.size;
+            outInfo.allocationSizeMin = VMA_MIN(outInfo.allocationSizeMin, suballoc.size);
+            outInfo.allocationSizeMax = VMA_MIN(outInfo.allocationSizeMax, suballoc.size);
+            
+            // 3. Prepare for next iteration.
+            lastOffset = suballoc.offset + suballoc.size;
+            ++nextAlloc1stIndex;
+        }
+        // We are at the end.
+        else
+        {
+            // There is free space from lastOffset to freeSpace1stTo2ndEnd.
+            if(lastOffset < freeSpace1stTo2ndEnd)
+            {
+                const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
+                ++outInfo.unusedRangeCount;
+                outInfo.unusedBytes += unusedRangeSize;
+                outInfo.unusedRangeSizeMin = VMA_MIN(outInfo.unusedRangeSizeMin, unusedRangeSize);
+                outInfo.unusedRangeSizeMax = VMA_MIN(outInfo.unusedRangeSizeMax, unusedRangeSize);
+           }
+
+            // End of loop.
+            lastOffset = freeSpace1stTo2ndEnd;
+        }
+    }
+
+    if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
+    {
+        size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
+        while(lastOffset < size)
+        {
+            // Find next non-null allocation or move nextAllocIndex to the end.
+            while(nextAlloc2ndIndex != SIZE_MAX &&
+                suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
+            {
+                --nextAlloc2ndIndex;
+            }
+
+            // Found non-null allocation.
+            if(nextAlloc2ndIndex != SIZE_MAX)
+            {
+                const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
+            
+                // 1. Process free space before this allocation.
+                if(lastOffset < suballoc.offset)
+                {
+                    // There is free space from lastOffset to suballoc.offset.
+                    const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
+                    ++outInfo.unusedRangeCount;
+                    outInfo.unusedBytes += unusedRangeSize;
+                    outInfo.unusedRangeSizeMin = VMA_MIN(outInfo.unusedRangeSizeMin, unusedRangeSize);
+                    outInfo.unusedRangeSizeMax = VMA_MIN(outInfo.unusedRangeSizeMax, unusedRangeSize);
+                }
+            
+                // 2. Process this allocation.
+                // There is allocation with suballoc.offset, suballoc.size.
+                outInfo.usedBytes += suballoc.size;
+                outInfo.allocationSizeMin = VMA_MIN(outInfo.allocationSizeMin, suballoc.size);
+                outInfo.allocationSizeMax = VMA_MIN(outInfo.allocationSizeMax, suballoc.size);
+            
+                // 3. Prepare for next iteration.
+                lastOffset = suballoc.offset + suballoc.size;
+                --nextAlloc2ndIndex;
+            }
+            // We are at the end.
+            else
+            {
+                // There is free space from lastOffset to size.
+                if(lastOffset < size)
+                {
+                    const VkDeviceSize unusedRangeSize = size - lastOffset;
+                    ++outInfo.unusedRangeCount;
+                    outInfo.unusedBytes += unusedRangeSize;
+                    outInfo.unusedRangeSizeMin = VMA_MIN(outInfo.unusedRangeSizeMin, unusedRangeSize);
+                    outInfo.unusedRangeSizeMax = VMA_MIN(outInfo.unusedRangeSizeMax, unusedRangeSize);
+               }
+
+                // End of loop.
+                lastOffset = size;
+            }
+        }
+    }
+
+    outInfo.unusedBytes = size - outInfo.usedBytes;
+}
+
+void VmaBlockMetadata_Linear::AddPoolStats(VmaPoolStats& inoutStats) const
+{
+    const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
+    const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
+    const VkDeviceSize size = GetSize();
+    const size_t suballoc1stCount = suballocations1st.size();
+    const size_t suballoc2ndCount = suballocations2nd.size();
+
+    inoutStats.size += size;
+
+    VkDeviceSize lastOffset = 0;
+
+    if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
+    {
+        const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
+        size_t nextAlloc2ndIndex = m_1stNullItemsBeginCount;
+        while(lastOffset < freeSpace2ndTo1stEnd)
+        {
+            // Find next non-null allocation or move nextAlloc2ndIndex to the end.
+            while(nextAlloc2ndIndex < suballoc2ndCount &&
+                suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
+            {
+                ++nextAlloc2ndIndex;
+            }
+
+            // Found non-null allocation.
+            if(nextAlloc2ndIndex < suballoc2ndCount)
+            {
+                const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
+            
+                // 1. Process free space before this allocation.
+                if(lastOffset < suballoc.offset)
+                {
+                    // There is free space from lastOffset to suballoc.offset.
+                    const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
+                    inoutStats.unusedSize += unusedRangeSize;
+                    ++inoutStats.unusedRangeCount;
+                    inoutStats.unusedRangeSizeMax = VMA_MAX(inoutStats.unusedRangeSizeMax, unusedRangeSize);
+                }
+            
+                // 2. Process this allocation.
+                // There is allocation with suballoc.offset, suballoc.size.
+                ++inoutStats.allocationCount;
+            
+                // 3. Prepare for next iteration.
+                lastOffset = suballoc.offset + suballoc.size;
+                ++nextAlloc2ndIndex;
+            }
+            // We are at the end.
+            else
+            {
+                if(lastOffset < freeSpace2ndTo1stEnd)
+                {
+                    // There is free space from lastOffset to freeSpace2ndTo1stEnd.
+                    const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
+                    inoutStats.unusedSize += unusedRangeSize;
+                    ++inoutStats.unusedRangeCount;
+                    inoutStats.unusedRangeSizeMax = VMA_MAX(inoutStats.unusedRangeSizeMax, unusedRangeSize);
+                }
+
+                // End of loop.
+                lastOffset = freeSpace2ndTo1stEnd;
+            }
+        }
+    }
+
+    size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
+    const VkDeviceSize freeSpace1stTo2ndEnd =
+        m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
+    while(lastOffset < freeSpace1stTo2ndEnd)
+    {
+        // Find next non-null allocation or move nextAllocIndex to the end.
+        while(nextAlloc1stIndex < suballoc1stCount &&
+            suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
+        {
+            ++nextAlloc1stIndex;
+        }
+
+        // Found non-null allocation.
+        if(nextAlloc1stIndex < suballoc1stCount)
+        {
+            const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
+            
+            // 1. Process free space before this allocation.
+            if(lastOffset < suballoc.offset)
+            {
+                // There is free space from lastOffset to suballoc.offset.
+                const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
+                inoutStats.unusedSize += unusedRangeSize;
+                ++inoutStats.unusedRangeCount;
+                inoutStats.unusedRangeSizeMax = VMA_MAX(inoutStats.unusedRangeSizeMax, unusedRangeSize);
+            }
+            
+            // 2. Process this allocation.
+            // There is allocation with suballoc.offset, suballoc.size.
+            ++inoutStats.allocationCount;
+            
+            // 3. Prepare for next iteration.
+            lastOffset = suballoc.offset + suballoc.size;
+            ++nextAlloc1stIndex;
+        }
+        // We are at the end.
+        else
+        {
+            if(lastOffset < freeSpace1stTo2ndEnd)
+            {
+                // There is free space from lastOffset to freeSpace1stTo2ndEnd.
+                const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
+                inoutStats.unusedSize += unusedRangeSize;
+                ++inoutStats.unusedRangeCount;
+                inoutStats.unusedRangeSizeMax = VMA_MAX(inoutStats.unusedRangeSizeMax, unusedRangeSize);
+            }
+
+            // End of loop.
+            lastOffset = freeSpace1stTo2ndEnd;
+        }
+    }
+
+    if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
+    {
+        size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
+        while(lastOffset < size)
+        {
+            // Find next non-null allocation or move nextAlloc2ndIndex to the end.
+            while(nextAlloc2ndIndex != SIZE_MAX &&
+                suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
+            {
+                --nextAlloc2ndIndex;
+            }
+
+            // Found non-null allocation.
+            if(nextAlloc2ndIndex != SIZE_MAX)
+            {
+                const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
+            
+                // 1. Process free space before this allocation.
+                if(lastOffset < suballoc.offset)
+                {
+                    // There is free space from lastOffset to suballoc.offset.
+                    const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
+                    inoutStats.unusedSize += unusedRangeSize;
+                    ++inoutStats.unusedRangeCount;
+                    inoutStats.unusedRangeSizeMax = VMA_MAX(inoutStats.unusedRangeSizeMax, unusedRangeSize);
+                }
+            
+                // 2. Process this allocation.
+                // There is allocation with suballoc.offset, suballoc.size.
+                ++inoutStats.allocationCount;
+            
+                // 3. Prepare for next iteration.
+                lastOffset = suballoc.offset + suballoc.size;
+                --nextAlloc2ndIndex;
+            }
+            // We are at the end.
+            else
+            {
+                if(lastOffset < size)
+                {
+                    // There is free space from lastOffset to size.
+                    const VkDeviceSize unusedRangeSize = size - lastOffset;
+                    inoutStats.unusedSize += unusedRangeSize;
+                    ++inoutStats.unusedRangeCount;
+                    inoutStats.unusedRangeSizeMax = VMA_MAX(inoutStats.unusedRangeSizeMax, unusedRangeSize);
+                }
+
+                // End of loop.
+                lastOffset = size;
+            }
+        }
+    }
+}
+
+#if VMA_STATS_STRING_ENABLED
+void VmaBlockMetadata_Linear::PrintDetailedMap(class VmaJsonWriter& json) const
+{
+    const VkDeviceSize size = GetSize();
+    const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
+    const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
+    const size_t suballoc1stCount = suballocations1st.size();
+    const size_t suballoc2ndCount = suballocations2nd.size();
+
+    // FIRST PASS
+
+    size_t unusedRangeCount = 0;
+    VkDeviceSize usedBytes = 0;
+
+    VkDeviceSize lastOffset = 0;
+
+    size_t alloc2ndCount = 0;
+    if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
+    {
+        const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
+        size_t nextAlloc2ndIndex = 0;
+        while(lastOffset < freeSpace2ndTo1stEnd)
+        {
+            // Find next non-null allocation or move nextAlloc2ndIndex to the end.
+            while(nextAlloc2ndIndex < suballoc2ndCount &&
+                suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
+            {
+                ++nextAlloc2ndIndex;
+            }
+
+            // Found non-null allocation.
+            if(nextAlloc2ndIndex < suballoc2ndCount)
+            {
+                const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
+            
+                // 1. Process free space before this allocation.
+                if(lastOffset < suballoc.offset)
+                {
+                    // There is free space from lastOffset to suballoc.offset.
+                    ++unusedRangeCount;
+                }
+            
+                // 2. Process this allocation.
+                // There is allocation with suballoc.offset, suballoc.size.
+                ++alloc2ndCount;
+                usedBytes += suballoc.size;
+            
+                // 3. Prepare for next iteration.
+                lastOffset = suballoc.offset + suballoc.size;
+                ++nextAlloc2ndIndex;
+            }
+            // We are at the end.
+            else
+            {
+                if(lastOffset < freeSpace2ndTo1stEnd)
+                {
+                    // There is free space from lastOffset to freeSpace2ndTo1stEnd.
+                    ++unusedRangeCount;
+                }
+
+                // End of loop.
+                lastOffset = freeSpace2ndTo1stEnd;
+            }
+        }
+    }
+
+    size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
+    size_t alloc1stCount = 0;
+    const VkDeviceSize freeSpace1stTo2ndEnd =
+        m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
+    while(lastOffset < freeSpace1stTo2ndEnd)
+    {
+        // Find next non-null allocation or move nextAllocIndex to the end.
+        while(nextAlloc1stIndex < suballoc1stCount &&
+            suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
+        {
+            ++nextAlloc1stIndex;
+        }
+
+        // Found non-null allocation.
+        if(nextAlloc1stIndex < suballoc1stCount)
+        {
+            const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
+            
+            // 1. Process free space before this allocation.
+            if(lastOffset < suballoc.offset)
+            {
+                // There is free space from lastOffset to suballoc.offset.
+                ++unusedRangeCount;
+            }
+            
+            // 2. Process this allocation.
+            // There is allocation with suballoc.offset, suballoc.size.
+            ++alloc1stCount;
+            usedBytes += suballoc.size;
+            
+            // 3. Prepare for next iteration.
+            lastOffset = suballoc.offset + suballoc.size;
+            ++nextAlloc1stIndex;
+        }
+        // We are at the end.
+        else
+        {
+            if(lastOffset < size)
+            {
+                // There is free space from lastOffset to freeSpace1stTo2ndEnd.
+                ++unusedRangeCount;
+            }
+
+            // End of loop.
+            lastOffset = freeSpace1stTo2ndEnd;
+        }
+    }
+
+    if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
+    {
+        size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
+        while(lastOffset < size)
+        {
+            // Find next non-null allocation or move nextAlloc2ndIndex to the end.
+            while(nextAlloc2ndIndex != SIZE_MAX &&
+                suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
+            {
+                --nextAlloc2ndIndex;
+            }
+
+            // Found non-null allocation.
+            if(nextAlloc2ndIndex != SIZE_MAX)
+            {
+                const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
+            
+                // 1. Process free space before this allocation.
+                if(lastOffset < suballoc.offset)
+                {
+                    // There is free space from lastOffset to suballoc.offset.
+                    ++unusedRangeCount;
+                }
+            
+                // 2. Process this allocation.
+                // There is allocation with suballoc.offset, suballoc.size.
+                ++alloc2ndCount;
+                usedBytes += suballoc.size;
+            
+                // 3. Prepare for next iteration.
+                lastOffset = suballoc.offset + suballoc.size;
+                --nextAlloc2ndIndex;
+            }
+            // We are at the end.
+            else
+            {
+                if(lastOffset < size)
+                {
+                    // There is free space from lastOffset to size.
+                    ++unusedRangeCount;
+                }
+
+                // End of loop.
+                lastOffset = size;
+            }
+        }
+    }
+
+    const VkDeviceSize unusedBytes = size - usedBytes;
+    PrintDetailedMap_Begin(json, unusedBytes, alloc1stCount + alloc2ndCount, unusedRangeCount);
+
+    // SECOND PASS
+    lastOffset = 0;
+
+    if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
+    {
+        const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
+        size_t nextAlloc2ndIndex = 0;
+        while(lastOffset < freeSpace2ndTo1stEnd)
+        {
+            // Find next non-null allocation or move nextAlloc2ndIndex to the end.
+            while(nextAlloc2ndIndex < suballoc2ndCount &&
+                suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
+            {
+                ++nextAlloc2ndIndex;
+            }
+
+            // Found non-null allocation.
+            if(nextAlloc2ndIndex < suballoc2ndCount)
+            {
+                const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
+            
+                // 1. Process free space before this allocation.
+                if(lastOffset < suballoc.offset)
+                {
+                    // There is free space from lastOffset to suballoc.offset.
+                    const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
+                    PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
+                }
+            
+                // 2. Process this allocation.
+                // There is allocation with suballoc.offset, suballoc.size.
+                PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
+            
+                // 3. Prepare for next iteration.
+                lastOffset = suballoc.offset + suballoc.size;
+                ++nextAlloc2ndIndex;
+            }
+            // We are at the end.
+            else
+            {
+                if(lastOffset < freeSpace2ndTo1stEnd)
+                {
+                    // There is free space from lastOffset to freeSpace2ndTo1stEnd.
+                    const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
+                    PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
+                }
+
+                // End of loop.
+                lastOffset = freeSpace2ndTo1stEnd;
+            }
+        }
+    }
+
+    nextAlloc1stIndex = m_1stNullItemsBeginCount;
+    while(lastOffset < freeSpace1stTo2ndEnd)
+    {
+        // Find next non-null allocation or move nextAllocIndex to the end.
+        while(nextAlloc1stIndex < suballoc1stCount &&
+            suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
+        {
+            ++nextAlloc1stIndex;
+        }
+
+        // Found non-null allocation.
+        if(nextAlloc1stIndex < suballoc1stCount)
+        {
+            const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
+            
+            // 1. Process free space before this allocation.
+            if(lastOffset < suballoc.offset)
+            {
+                // There is free space from lastOffset to suballoc.offset.
+                const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
+                PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
+            }
+            
+            // 2. Process this allocation.
+            // There is allocation with suballoc.offset, suballoc.size.
+            PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
+            
+            // 3. Prepare for next iteration.
+            lastOffset = suballoc.offset + suballoc.size;
+            ++nextAlloc1stIndex;
+        }
+        // We are at the end.
+        else
+        {
+            if(lastOffset < freeSpace1stTo2ndEnd)
+            {
+                // There is free space from lastOffset to freeSpace1stTo2ndEnd.
+                const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
+                PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
+            }
+
+            // End of loop.
+            lastOffset = freeSpace1stTo2ndEnd;
+        }
+    }
+
+    if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
+    {
+        size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
+        while(lastOffset < size)
+        {
+            // Find next non-null allocation or move nextAlloc2ndIndex to the end.
+            while(nextAlloc2ndIndex != SIZE_MAX &&
+                suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
+            {
+                --nextAlloc2ndIndex;
+            }
+
+            // Found non-null allocation.
+            if(nextAlloc2ndIndex != SIZE_MAX)
+            {
+                const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
+            
+                // 1. Process free space before this allocation.
+                if(lastOffset < suballoc.offset)
+                {
+                    // There is free space from lastOffset to suballoc.offset.
+                    const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
+                    PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
+                }
+            
+                // 2. Process this allocation.
+                // There is allocation with suballoc.offset, suballoc.size.
+                PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
+            
+                // 3. Prepare for next iteration.
+                lastOffset = suballoc.offset + suballoc.size;
+                --nextAlloc2ndIndex;
+            }
+            // We are at the end.
+            else
+            {
+                if(lastOffset < size)
+                {
+                    // There is free space from lastOffset to size.
+                    const VkDeviceSize unusedRangeSize = size - lastOffset;
+                    PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
+                }
+
+                // End of loop.
+                lastOffset = size;
+            }
+        }
+    }
+
+    PrintDetailedMap_End(json);
+}
+#endif // #if VMA_STATS_STRING_ENABLED
+
+bool VmaBlockMetadata_Linear::CreateAllocationRequest(
+    uint32_t currentFrameIndex,
+    uint32_t frameInUseCount,
+    VkDeviceSize bufferImageGranularity,
+    VkDeviceSize allocSize,
+    VkDeviceSize allocAlignment,
+    bool upperAddress,
+    VmaSuballocationType allocType,
+    bool canMakeOtherLost,
+    uint32_t strategy,
+    VmaAllocationRequest* pAllocationRequest)
+{
+    VMA_ASSERT(allocSize > 0);
+    VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
+    VMA_ASSERT(pAllocationRequest != VMA_NULL);
+    VMA_HEAVY_ASSERT(Validate());
+
+    const VkDeviceSize size = GetSize();
+    SuballocationVectorType& suballocations1st = AccessSuballocations1st();
+    SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
+
+    if(upperAddress)
+    {
+        if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
+        {
+            VMA_ASSERT(0 && "Trying to use pool with linear algorithm as double stack, while it is already being used as ring buffer.");
+            return false;
+        }
+
+        // Try to allocate before 2nd.back(), or end of block if 2nd.empty().
+        if(allocSize > size)
+        {
+            return false;
+        }
+        VkDeviceSize resultBaseOffset = size - allocSize;
+        if(!suballocations2nd.empty())
+        {
+            const VmaSuballocation& lastSuballoc = suballocations2nd.back();
+            resultBaseOffset = lastSuballoc.offset - allocSize;
+            if(allocSize > lastSuballoc.offset)
+            {
+                return false;
+            }
+        }
+
+        // Start from offset equal to end of free space.
+        VkDeviceSize resultOffset = resultBaseOffset;
+
+        // Apply VMA_DEBUG_MARGIN at the end.
+        if(VMA_DEBUG_MARGIN > 0)
+        {
+            if(resultOffset < VMA_DEBUG_MARGIN)
+            {
+                return false;
+            }
+            resultOffset -= VMA_DEBUG_MARGIN;
+        }
+
+        // Apply alignment.
+        resultOffset = VmaAlignDown(resultOffset, allocAlignment);
+
+        // Check next suballocations from 2nd for BufferImageGranularity conflicts.
+        // Make bigger alignment if necessary.
+        if(bufferImageGranularity > 1 && !suballocations2nd.empty())
+        {
+            bool bufferImageGranularityConflict = false;
+            for(size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
+            {
+                const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
+                if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
+                {
+                    if(VmaIsBufferImageGranularityConflict(nextSuballoc.type, allocType))
+                    {
+                        bufferImageGranularityConflict = true;
+                        break;
+                    }
+                }
+                else
+                    // Already on previous page.
+                    break;
+            }
+            if(bufferImageGranularityConflict)
+            {
+                resultOffset = VmaAlignDown(resultOffset, bufferImageGranularity);
+            }
+        }
+
+        // There is enough free space.
+        const VkDeviceSize endOf1st = !suballocations1st.empty() ?
+            suballocations1st.back().offset + suballocations1st.back().size :
+            0;
+        if(endOf1st + VMA_DEBUG_MARGIN <= resultOffset)
+        {
+            // Check previous suballocations for BufferImageGranularity conflicts.
+            // If conflict exists, allocation cannot be made here.
+            if(bufferImageGranularity > 1)
+            {
+                for(size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
+                {
+                    const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
+                    if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
+                    {
+                        if(VmaIsBufferImageGranularityConflict(allocType, prevSuballoc.type))
+                        {
+                            return false;
+                        }
+                    }
+                    else
+                    {
+                        // Already on next page.
+                        break;
+                    }
+                }
+            }
+
+            // All tests passed: Success.
+            pAllocationRequest->offset = resultOffset;
+            pAllocationRequest->sumFreeSize = resultBaseOffset + allocSize - endOf1st;
+            pAllocationRequest->sumItemSize = 0;
+            // pAllocationRequest->item unused.
+            pAllocationRequest->itemsToMakeLostCount = 0;
+            return true;
+        }
+    }
+    else // !upperAddress
+    {
+        if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
+        {
+            // Try to allocate at the end of 1st vector.
+
+            VkDeviceSize resultBaseOffset = 0;
+            if(!suballocations1st.empty())
+            {
+                const VmaSuballocation& lastSuballoc = suballocations1st.back();
+                resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
+            }
+
+            // Start from offset equal to beginning of free space.
+            VkDeviceSize resultOffset = resultBaseOffset;
+
+            // Apply VMA_DEBUG_MARGIN at the beginning.
+            if(VMA_DEBUG_MARGIN > 0)
+            {
+                resultOffset += VMA_DEBUG_MARGIN;
+            }
+
+            // Apply alignment.
+            resultOffset = VmaAlignUp(resultOffset, allocAlignment);
+
+            // Check previous suballocations for BufferImageGranularity conflicts.
+            // Make bigger alignment if necessary.
+            if(bufferImageGranularity > 1 && !suballocations1st.empty())
+            {
+                bool bufferImageGranularityConflict = false;
+                for(size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
+                {
+                    const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
+                    if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
+                    {
+                        if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
+                        {
+                            bufferImageGranularityConflict = true;
+                            break;
+                        }
+                    }
+                    else
+                        // Already on previous page.
+                        break;
+                }
+                if(bufferImageGranularityConflict)
+                {
+                    resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
+                }
+            }
+
+            const VkDeviceSize freeSpaceEnd = m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ?
+                suballocations2nd.back().offset : size;
+
+            // There is enough free space at the end after alignment.
+            if(resultOffset + allocSize + VMA_DEBUG_MARGIN <= freeSpaceEnd)
+            {
+                // Check next suballocations for BufferImageGranularity conflicts.
+                // If conflict exists, allocation cannot be made here.
+                if(bufferImageGranularity > 1 && m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
+                {
+                    for(size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
+                    {
+                        const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
+                        if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
+                        {
+                            if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
+                            {
+                                return false;
+                            }
+                        }
+                        else
+                        {
+                            // Already on previous page.
+                            break;
+                        }
+                    }
+                }
+
+                // All tests passed: Success.
+                pAllocationRequest->offset = resultOffset;
+                pAllocationRequest->sumFreeSize = freeSpaceEnd - resultBaseOffset;
+                pAllocationRequest->sumItemSize = 0;
+                // pAllocationRequest->item unused.
+                pAllocationRequest->itemsToMakeLostCount = 0;
+                return true;
+            }
+        }
+
+        // Wrap-around to end of 2nd vector. Try to allocate there, watching for the
+        // beginning of 1st vector as the end of free space.
+        if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
+        {
+            VMA_ASSERT(!suballocations1st.empty());
+
+            VkDeviceSize resultBaseOffset = 0;
+            if(!suballocations2nd.empty())
+            {
+                const VmaSuballocation& lastSuballoc = suballocations2nd.back();
+                resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
+            }
+
+            // Start from offset equal to beginning of free space.
+            VkDeviceSize resultOffset = resultBaseOffset;
+
+            // Apply VMA_DEBUG_MARGIN at the beginning.
+            if(VMA_DEBUG_MARGIN > 0)
+            {
+                resultOffset += VMA_DEBUG_MARGIN;
+            }
+
+            // Apply alignment.
+            resultOffset = VmaAlignUp(resultOffset, allocAlignment);
+
+            // Check previous suballocations for BufferImageGranularity conflicts.
+            // Make bigger alignment if necessary.
+            if(bufferImageGranularity > 1 && !suballocations2nd.empty())
+            {
+                bool bufferImageGranularityConflict = false;
+                for(size_t prevSuballocIndex = suballocations2nd.size(); prevSuballocIndex--; )
+                {
+                    const VmaSuballocation& prevSuballoc = suballocations2nd[prevSuballocIndex];
+                    if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
+                    {
+                        if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
+                        {
+                            bufferImageGranularityConflict = true;
+                            break;
+                        }
+                    }
+                    else
+                        // Already on previous page.
+                        break;
+                }
+                if(bufferImageGranularityConflict)
+                {
+                    resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
+                }
+            }
+
+            pAllocationRequest->itemsToMakeLostCount = 0;
+            pAllocationRequest->sumItemSize = 0;
+            size_t index1st = m_1stNullItemsBeginCount;
+
+            if(canMakeOtherLost)
+            {
+                while(index1st < suballocations1st.size() &&
+                    resultOffset + allocSize + VMA_DEBUG_MARGIN > suballocations1st[index1st].offset)
+                {
+                    // Next colliding allocation at the beginning of 1st vector found. Try to make it lost.
+                    const VmaSuballocation& suballoc = suballocations1st[index1st];
+                    if(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE)
+                    {
+                        // No problem.
+                    }
+                    else
+                    {
+                        VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
+                        if(suballoc.hAllocation->CanBecomeLost() &&
+                            suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
+                        {
+                            ++pAllocationRequest->itemsToMakeLostCount;
+                            pAllocationRequest->sumItemSize += suballoc.size;
+                        }
+                        else
+                        {
+                            return false;
+                        }
+                    }
+                    ++index1st;
+                }
+
+                // Check next suballocations for BufferImageGranularity conflicts.
+                // If conflict exists, we must mark more allocations lost or fail.
+                if(bufferImageGranularity > 1)
+                {
+                    while(index1st < suballocations1st.size())
+                    {
+                        const VmaSuballocation& suballoc = suballocations1st[index1st];
+                        if(VmaBlocksOnSamePage(resultOffset, allocSize, suballoc.offset, bufferImageGranularity))
+                        {
+                            if(suballoc.hAllocation != VK_NULL_HANDLE)
+                            {
+                                // Not checking actual VmaIsBufferImageGranularityConflict(allocType, suballoc.type).
+                                if(suballoc.hAllocation->CanBecomeLost() &&
+                                    suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
+                                {
+                                    ++pAllocationRequest->itemsToMakeLostCount;
+                                    pAllocationRequest->sumItemSize += suballoc.size;
+                                }
+                                else
+                                {
+                                    return false;
+                                }
+                            }
+                        }
+                        else
+                        {
+                            // Already on next page.
+                            break;
+                        }
+                        ++index1st;
+                    }
+                }
+            }
+
+            // There is enough free space at the end after alignment.
+            if((index1st == suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN < size) ||
+                (index1st < suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN <= suballocations1st[index1st].offset))
+            {
+                // Check next suballocations for BufferImageGranularity conflicts.
+                // If conflict exists, allocation cannot be made here.
+                if(bufferImageGranularity > 1)
+                {
+                    for(size_t nextSuballocIndex = index1st;
+                        nextSuballocIndex < suballocations1st.size();
+                        nextSuballocIndex++)
+                    {
+                        const VmaSuballocation& nextSuballoc = suballocations1st[nextSuballocIndex];
+                        if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
+                        {
+                            if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
+                            {
+                                return false;
+                            }
+                        }
+                        else
+                        {
+                            // Already on next page.
+                            break;
+                        }
+                    }
+                }
+
+                // All tests passed: Success.
+                pAllocationRequest->offset = resultOffset;
+                pAllocationRequest->sumFreeSize =
+                    (index1st < suballocations1st.size() ? suballocations1st[index1st].offset : size)
+                    - resultBaseOffset
+                    - pAllocationRequest->sumItemSize;
+                // pAllocationRequest->item unused.
+                return true;
+            }
+        }
+    }
+
+    return false;
+}
+
+bool VmaBlockMetadata_Linear::MakeRequestedAllocationsLost(
+    uint32_t currentFrameIndex,
+    uint32_t frameInUseCount,
+    VmaAllocationRequest* pAllocationRequest)
+{
+    if(pAllocationRequest->itemsToMakeLostCount == 0)
+    {
+        return true;
+    }
+
+    VMA_ASSERT(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER);
+    
+    SuballocationVectorType& suballocations1st = AccessSuballocations1st();
+    size_t index1st = m_1stNullItemsBeginCount;
+    size_t madeLostCount = 0;
+    while(madeLostCount < pAllocationRequest->itemsToMakeLostCount)
+    {
+        VMA_ASSERT(index1st < suballocations1st.size());
+        VmaSuballocation& suballoc = suballocations1st[index1st];
+        if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
+        {
+            VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
+            VMA_ASSERT(suballoc.hAllocation->CanBecomeLost());
+            if(suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
+            {
+                suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
+                suballoc.hAllocation = VK_NULL_HANDLE;
+                m_SumFreeSize += suballoc.size;
+                ++m_1stNullItemsMiddleCount;
+                ++madeLostCount;
+            }
+            else
+            {
+                return false;
+            }
+        }
+        ++index1st;
+    }
+
+    CleanupAfterFree();
+    //VMA_HEAVY_ASSERT(Validate()); // Already called by ClanupAfterFree().
+    
+    return true;
+}
+
+uint32_t VmaBlockMetadata_Linear::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
+{
+    uint32_t lostAllocationCount = 0;
+    
+    SuballocationVectorType& suballocations1st = AccessSuballocations1st();
+    for(size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
+    {
+        VmaSuballocation& suballoc = suballocations1st[i];
+        if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
+            suballoc.hAllocation->CanBecomeLost() &&
+            suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
+        {
+            suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
+            suballoc.hAllocation = VK_NULL_HANDLE;
+            ++m_1stNullItemsMiddleCount;
+            m_SumFreeSize += suballoc.size;
+            ++lostAllocationCount;
+        }
+    }
+
+    SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
+    for(size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
+    {
+        VmaSuballocation& suballoc = suballocations2nd[i];
+        if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
+            suballoc.hAllocation->CanBecomeLost() &&
+            suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
+        {
+            suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
+            suballoc.hAllocation = VK_NULL_HANDLE;
+            ++m_2ndNullItemsCount;
+            ++lostAllocationCount;
+        }
+    }
+
+    if(lostAllocationCount)
+    {
+        CleanupAfterFree();
+    }
+
+    return lostAllocationCount;
+}
+
+VkResult VmaBlockMetadata_Linear::CheckCorruption(const void* pBlockData)
+{
+    SuballocationVectorType& suballocations1st = AccessSuballocations1st();
+    for(size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
+    {
+        const VmaSuballocation& suballoc = suballocations1st[i];
+        if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
+        {
+            if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
+            {
+                VMA_ASSERT(0 && "MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
+                return VK_ERROR_VALIDATION_FAILED_EXT;
+            }
+            if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
+            {
+                VMA_ASSERT(0 && "MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
+                return VK_ERROR_VALIDATION_FAILED_EXT;
+            }
+        }
+    }
+
+    SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
+    for(size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
+    {
+        const VmaSuballocation& suballoc = suballocations2nd[i];
+        if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
+        {
+            if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
+            {
+                VMA_ASSERT(0 && "MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
+                return VK_ERROR_VALIDATION_FAILED_EXT;
+            }
+            if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
+            {
+                VMA_ASSERT(0 && "MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
+                return VK_ERROR_VALIDATION_FAILED_EXT;
+            }
+        }
+    }
+
+    return VK_SUCCESS;
+}
+
+void VmaBlockMetadata_Linear::Alloc(
+    const VmaAllocationRequest& request,
+    VmaSuballocationType type,
+    VkDeviceSize allocSize,
+    bool upperAddress,
+    VmaAllocation hAllocation)
+{
+    const VmaSuballocation newSuballoc = { request.offset, allocSize, hAllocation, type };
+
+    if(upperAddress)
+    {
+        VMA_ASSERT(m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER &&
+            "CRITICAL ERROR: Trying to use linear allocator as double stack while it was already used as ring buffer.");
+        SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
+        suballocations2nd.push_back(newSuballoc);
+        m_2ndVectorMode = SECOND_VECTOR_DOUBLE_STACK;
+    }
+    else
+    {
+        SuballocationVectorType& suballocations1st = AccessSuballocations1st();
+
+        // First allocation.
+        if(suballocations1st.empty())
+        {
+            suballocations1st.push_back(newSuballoc);
+        }
+        else
+        {
+            // New allocation at the end of 1st vector.
+            if(request.offset >= suballocations1st.back().offset + suballocations1st.back().size)
+            {
+                // Check if it fits before the end of the block.
+                VMA_ASSERT(request.offset + allocSize <= GetSize());
+                suballocations1st.push_back(newSuballoc);
+            }
+            // New allocation at the end of 2-part ring buffer, so before first allocation from 1st vector.
+            else if(request.offset + allocSize <= suballocations1st[m_1stNullItemsBeginCount].offset)
+            {
+                SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
+
+                switch(m_2ndVectorMode)
+                {
+                case SECOND_VECTOR_EMPTY:
+                    // First allocation from second part ring buffer.
+                    VMA_ASSERT(suballocations2nd.empty());
+                    m_2ndVectorMode = SECOND_VECTOR_RING_BUFFER;
+                    break;
+                case SECOND_VECTOR_RING_BUFFER:
+                    // 2-part ring buffer is already started.
+                    VMA_ASSERT(!suballocations2nd.empty());
+                    break;
+                case SECOND_VECTOR_DOUBLE_STACK:
+                    VMA_ASSERT(0 && "CRITICAL ERROR: Trying to use linear allocator as ring buffer while it was already used as double stack.");
+                    break;
+                default:
+                    VMA_ASSERT(0);
+                }
+
+                suballocations2nd.push_back(newSuballoc);
+            }
+            else
+            {
+                VMA_ASSERT(0 && "CRITICAL INTERNAL ERROR.");
+            }
+        }
+    }
+
+    m_SumFreeSize -= newSuballoc.size;
+}
+
+void VmaBlockMetadata_Linear::Free(const VmaAllocation allocation)
+{
+    FreeAtOffset(allocation->GetOffset());
+}
+
+void VmaBlockMetadata_Linear::FreeAtOffset(VkDeviceSize offset)
+{
+    SuballocationVectorType& suballocations1st = AccessSuballocations1st();
+    SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
+
+    if(!suballocations1st.empty())
+    {
+        // First allocation: Mark it as next empty at the beginning.
+        VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
+        if(firstSuballoc.offset == offset)
+        {
+            firstSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
+            firstSuballoc.hAllocation = VK_NULL_HANDLE;
+            m_SumFreeSize += firstSuballoc.size;
+            ++m_1stNullItemsBeginCount;
+            CleanupAfterFree();
+            return;
+        }
+    }
+
+    // Last allocation in 2-part ring buffer or top of upper stack (same logic).
+    if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ||
+        m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
+    {
+        VmaSuballocation& lastSuballoc = suballocations2nd.back();
+        if(lastSuballoc.offset == offset)
+        {
+            m_SumFreeSize += lastSuballoc.size;
+            suballocations2nd.pop_back();
+            CleanupAfterFree();
+            return;
+        }
+    }
+    // Last allocation in 1st vector.
+    else if(m_2ndVectorMode == SECOND_VECTOR_EMPTY)
+    {
+        VmaSuballocation& lastSuballoc = suballocations1st.back();
+        if(lastSuballoc.offset == offset)
+        {
+            m_SumFreeSize += lastSuballoc.size;
+            suballocations1st.pop_back();
+            CleanupAfterFree();
+            return;
+        }
+    }
+
+    // Item from the middle of 1st vector.
+    {
+        VmaSuballocation refSuballoc;
+        refSuballoc.offset = offset;
+        // Rest of members stays uninitialized intentionally for better performance.
+        SuballocationVectorType::iterator it = VmaVectorFindSorted<VmaSuballocationOffsetLess>(
+            suballocations1st.begin() + m_1stNullItemsBeginCount,
+            suballocations1st.end(),
+            refSuballoc);
+        if(it != suballocations1st.end())
+        {
+            it->type = VMA_SUBALLOCATION_TYPE_FREE;
+            it->hAllocation = VK_NULL_HANDLE;
+            ++m_1stNullItemsMiddleCount;
+            m_SumFreeSize += it->size;
+            CleanupAfterFree();
+            return;
+        }
+    }
+
+    if(m_2ndVectorMode != SECOND_VECTOR_EMPTY)
+    {
+        // Item from the middle of 2nd vector.
+        VmaSuballocation refSuballoc;
+        refSuballoc.offset = offset;
+        // Rest of members stays uninitialized intentionally for better performance.
+        SuballocationVectorType::iterator it = m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ?
+            VmaVectorFindSorted<VmaSuballocationOffsetLess>(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc) :
+            VmaVectorFindSorted<VmaSuballocationOffsetGreater>(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc);
+        if(it != suballocations2nd.end())
+        {
+            it->type = VMA_SUBALLOCATION_TYPE_FREE;
+            it->hAllocation = VK_NULL_HANDLE;
+            ++m_2ndNullItemsCount;
+            m_SumFreeSize += it->size;
+            CleanupAfterFree();
+            return;
+        }
+    }
+
+    VMA_ASSERT(0 && "Allocation to free not found in linear allocator!");
+}
+
+bool VmaBlockMetadata_Linear::ShouldCompact1st() const
+{
+    const size_t nullItemCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
+    const size_t suballocCount = AccessSuballocations1st().size();
+    return suballocCount > 32 && nullItemCount * 2 >= (suballocCount - nullItemCount) * 3;
+}
+
+void VmaBlockMetadata_Linear::CleanupAfterFree()
+{
+    SuballocationVectorType& suballocations1st = AccessSuballocations1st();
+    SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
+
+    if(IsEmpty())
+    {
+        suballocations1st.clear();
+        suballocations2nd.clear();
+        m_1stNullItemsBeginCount = 0;
+        m_1stNullItemsMiddleCount = 0;
+        m_2ndNullItemsCount = 0;
+        m_2ndVectorMode = SECOND_VECTOR_EMPTY;
+    }
+    else
+    {
+        const size_t suballoc1stCount = suballocations1st.size();
+        const size_t nullItem1stCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
+        VMA_ASSERT(nullItem1stCount <= suballoc1stCount);
+
+        // Find more null items at the beginning of 1st vector.
+        while(m_1stNullItemsBeginCount < suballoc1stCount &&
+            suballocations1st[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
+        {
+            ++m_1stNullItemsBeginCount;
+            --m_1stNullItemsMiddleCount;
+        }
+
+        // Find more null items at the end of 1st vector.
+        while(m_1stNullItemsMiddleCount > 0 &&
+            suballocations1st.back().hAllocation == VK_NULL_HANDLE)
+        {
+            --m_1stNullItemsMiddleCount;
+            suballocations1st.pop_back();
+        }
+
+        // Find more null items at the end of 2nd vector.
+        while(m_2ndNullItemsCount > 0 &&
+            suballocations2nd.back().hAllocation == VK_NULL_HANDLE)
+        {
+            --m_2ndNullItemsCount;
+            suballocations2nd.pop_back();
+        }
+
+        if(ShouldCompact1st())
+        {
+            const size_t nonNullItemCount = suballoc1stCount - nullItem1stCount;
+            size_t srcIndex = m_1stNullItemsBeginCount;
+            for(size_t dstIndex = 0; dstIndex < nonNullItemCount; ++dstIndex)
+            {
+                while(suballocations1st[srcIndex].hAllocation == VK_NULL_HANDLE)
+                {
+                    ++srcIndex;
+                }
+                if(dstIndex != srcIndex)
+                {
+                    suballocations1st[dstIndex] = suballocations1st[srcIndex];
+                }
+                ++srcIndex;
+            }
+            suballocations1st.resize(nonNullItemCount);
+            m_1stNullItemsBeginCount = 0;
+            m_1stNullItemsMiddleCount = 0;
+        }
+
+        // 2nd vector became empty.
+        if(suballocations2nd.empty())
+        {
+            m_2ndVectorMode = SECOND_VECTOR_EMPTY;
+        }
+
+        // 1st vector became empty.
+        if(suballocations1st.size() - m_1stNullItemsBeginCount == 0)
+        {
+            suballocations1st.clear();
+            m_1stNullItemsBeginCount = 0;
+
+            if(!suballocations2nd.empty() && m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
+            {
+                // Swap 1st with 2nd. Now 2nd is empty.
+                m_2ndVectorMode = SECOND_VECTOR_EMPTY;
+                m_1stNullItemsMiddleCount = m_2ndNullItemsCount;
+                while(m_1stNullItemsBeginCount < suballocations2nd.size() &&
+                    suballocations2nd[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
+                {
+                    ++m_1stNullItemsBeginCount;
+                    --m_1stNullItemsMiddleCount;
+                }
+                m_2ndNullItemsCount = 0;
+                m_1stVectorIndex ^= 1;
+            }
+        }
+    }
+
+    VMA_HEAVY_ASSERT(Validate());
+}
+
+
+////////////////////////////////////////////////////////////////////////////////
+// class VmaBlockMetadata_Buddy
+
+VmaBlockMetadata_Buddy::VmaBlockMetadata_Buddy(VmaAllocator hAllocator) :
+    VmaBlockMetadata(hAllocator),
+    m_Root(VMA_NULL),
+    m_AllocationCount(0),
+    m_FreeCount(1),
+    m_SumFreeSize(0)
+{
+    memset(m_FreeList, 0, sizeof(m_FreeList));
+}
+
+VmaBlockMetadata_Buddy::~VmaBlockMetadata_Buddy()
+{
+    DeleteNode(m_Root);
+}
+
+void VmaBlockMetadata_Buddy::Init(VkDeviceSize size)
+{
+    VmaBlockMetadata::Init(size);
+
+    m_UsableSize = VmaPrevPow2(size);
+    m_SumFreeSize = m_UsableSize;
+
+    // Calculate m_LevelCount.
+    m_LevelCount = 1;
+    while(m_LevelCount < MAX_LEVELS &&
+        LevelToNodeSize(m_LevelCount) >= MIN_NODE_SIZE)
+    {
+        ++m_LevelCount;
+    }
+
+    Node* rootNode = vma_new(GetAllocationCallbacks(), Node)();
+    rootNode->offset = 0;
+    rootNode->type = Node::TYPE_FREE;
+    rootNode->parent = VMA_NULL;
+    rootNode->buddy = VMA_NULL;
+
+    m_Root = rootNode;
+    AddToFreeListFront(0, rootNode);
+}
+
+bool VmaBlockMetadata_Buddy::Validate() const
+{
+    // Validate tree.
+    ValidationContext ctx;
+    if(!ValidateNode(ctx, VMA_NULL, m_Root, 0, LevelToNodeSize(0)))
+    {
+        VMA_VALIDATE(false && "ValidateNode failed.");
+    }
+    VMA_VALIDATE(m_AllocationCount == ctx.calculatedAllocationCount);
+    VMA_VALIDATE(m_SumFreeSize == ctx.calculatedSumFreeSize);
+
+    // Validate free node lists.
+    for(uint32_t level = 0; level < m_LevelCount; ++level)
+    {
+        VMA_VALIDATE(m_FreeList[level].front == VMA_NULL ||
+            m_FreeList[level].front->free.prev == VMA_NULL);
+
+        for(Node* node = m_FreeList[level].front;
+            node != VMA_NULL;
+            node = node->free.next)
+        {
+            VMA_VALIDATE(node->type == Node::TYPE_FREE);
+            
+            if(node->free.next == VMA_NULL)
+            {
+                VMA_VALIDATE(m_FreeList[level].back == node);
+            }
+            else
+            {
+                VMA_VALIDATE(node->free.next->free.prev == node);
+            }
+        }
+    }
+
+    // Validate that free lists ar higher levels are empty.
+    for(uint32_t level = m_LevelCount; level < MAX_LEVELS; ++level)
+    {
+        VMA_VALIDATE(m_FreeList[level].front == VMA_NULL && m_FreeList[level].back == VMA_NULL);
+    }
+
+    return true;
+}
+
+VkDeviceSize VmaBlockMetadata_Buddy::GetUnusedRangeSizeMax() const
+{
+    for(uint32_t level = 0; level < m_LevelCount; ++level)
+    {
+        if(m_FreeList[level].front != VMA_NULL)
+        {
+            return LevelToNodeSize(level);
+        }
+    }
+    return 0;
+}
+
+void VmaBlockMetadata_Buddy::CalcAllocationStatInfo(VmaStatInfo& outInfo) const
+{
+    const VkDeviceSize unusableSize = GetUnusableSize();
+
+    outInfo.blockCount = 1;
+
+    outInfo.allocationCount = outInfo.unusedRangeCount = 0;
+    outInfo.usedBytes = outInfo.unusedBytes = 0;
+
+    outInfo.allocationSizeMax = outInfo.unusedRangeSizeMax = 0;
+    outInfo.allocationSizeMin = outInfo.unusedRangeSizeMin = UINT64_MAX;
+    outInfo.allocationSizeAvg = outInfo.unusedRangeSizeAvg = 0; // Unused.
+
+    CalcAllocationStatInfoNode(outInfo, m_Root, LevelToNodeSize(0));
+
+    if(unusableSize > 0)
+    {
+        ++outInfo.unusedRangeCount;
+        outInfo.unusedBytes += unusableSize;
+        outInfo.unusedRangeSizeMax = VMA_MAX(outInfo.unusedRangeSizeMax, unusableSize);
+        outInfo.unusedRangeSizeMin = VMA_MIN(outInfo.unusedRangeSizeMin, unusableSize);
+    }
+}
+
+void VmaBlockMetadata_Buddy::AddPoolStats(VmaPoolStats& inoutStats) const
+{
+    const VkDeviceSize unusableSize = GetUnusableSize();
+
+    inoutStats.size += GetSize();
+    inoutStats.unusedSize += m_SumFreeSize + unusableSize;
+    inoutStats.allocationCount += m_AllocationCount;
+    inoutStats.unusedRangeCount += m_FreeCount;
+    inoutStats.unusedRangeSizeMax = VMA_MAX(inoutStats.unusedRangeSizeMax, GetUnusedRangeSizeMax());
+
+    if(unusableSize > 0)
+    {
+        ++inoutStats.unusedRangeCount;
+        // Not updating inoutStats.unusedRangeSizeMax with unusableSize because this space is not available for allocations.
+    }
+}
+
+#if VMA_STATS_STRING_ENABLED
+
+void VmaBlockMetadata_Buddy::PrintDetailedMap(class VmaJsonWriter& json) const
+{
+    // TODO optimize
+    VmaStatInfo stat;
+    CalcAllocationStatInfo(stat);
+
+    PrintDetailedMap_Begin(
+        json,
+        stat.unusedBytes,
+        stat.allocationCount,
+        stat.unusedRangeCount);
+
+    PrintDetailedMapNode(json, m_Root, LevelToNodeSize(0));
+
+    const VkDeviceSize unusableSize = GetUnusableSize();
+    if(unusableSize > 0)
+    {
+        PrintDetailedMap_UnusedRange(json,
+            m_UsableSize, // offset
+            unusableSize); // size
+    }
+
+    PrintDetailedMap_End(json);
+}
+
+#endif // #if VMA_STATS_STRING_ENABLED
+
+bool VmaBlockMetadata_Buddy::CreateAllocationRequest(
+    uint32_t currentFrameIndex,
+    uint32_t frameInUseCount,
+    VkDeviceSize bufferImageGranularity,
+    VkDeviceSize allocSize,
+    VkDeviceSize allocAlignment,
+    bool upperAddress,
+    VmaSuballocationType allocType,
+    bool canMakeOtherLost,
+    uint32_t strategy,
+    VmaAllocationRequest* pAllocationRequest)
+{
+    VMA_ASSERT(!upperAddress && "VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT can be used only with linear algorithm.");
+
+    // Simple way to respect bufferImageGranularity. May be optimized some day.
+    // Whenever it might be an OPTIMAL image...
+    if(allocType == VMA_SUBALLOCATION_TYPE_UNKNOWN ||
+        allocType == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
+        allocType == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL)
+    {
+        allocAlignment = VMA_MAX(allocAlignment, bufferImageGranularity);
+        allocSize = VMA_MAX(allocSize, bufferImageGranularity);
+    }
+
+    if(allocSize > m_UsableSize)
+    {
+        return false;
+    }
+
+    const uint32_t targetLevel = AllocSizeToLevel(allocSize);
+    for(uint32_t level = targetLevel + 1; level--; )
+    {
+        for(Node* freeNode = m_FreeList[level].front;
+            freeNode != VMA_NULL;
+            freeNode = freeNode->free.next)
+        {
+            if(freeNode->offset % allocAlignment == 0)
+            {
+                pAllocationRequest->offset = freeNode->offset;
+                pAllocationRequest->sumFreeSize = LevelToNodeSize(level);
+                pAllocationRequest->sumItemSize = 0;
+                pAllocationRequest->itemsToMakeLostCount = 0;
+                pAllocationRequest->customData = (void*)(uintptr_t)level;
+                return true;
+            }
+        }
+    }
+
+    return false;
+}
+
+bool VmaBlockMetadata_Buddy::MakeRequestedAllocationsLost(
+    uint32_t currentFrameIndex,
+    uint32_t frameInUseCount,
+    VmaAllocationRequest* pAllocationRequest)
+{
+    /*
+    Lost allocations are not supported in buddy allocator at the moment.
+    Support might be added in the future.
+    */
+    return pAllocationRequest->itemsToMakeLostCount == 0;
+}
+
+uint32_t VmaBlockMetadata_Buddy::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
+{
+    /*
+    Lost allocations are not supported in buddy allocator at the moment.
+    Support might be added in the future.
+    */
+    return 0;
+}
+
+void VmaBlockMetadata_Buddy::Alloc(
+    const VmaAllocationRequest& request,
+    VmaSuballocationType type,
+    VkDeviceSize allocSize,
+    bool upperAddress,
+    VmaAllocation hAllocation)
+{
+    const uint32_t targetLevel = AllocSizeToLevel(allocSize);
+    uint32_t currLevel = (uint32_t)(uintptr_t)request.customData;
+    
+    Node* currNode = m_FreeList[currLevel].front;
+    VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
+    while(currNode->offset != request.offset)
+    {
+        currNode = currNode->free.next;
+        VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
+    }
+    
+    // Go down, splitting free nodes.
+    while(currLevel < targetLevel)
+    {
+        // currNode is already first free node at currLevel.
+        // Remove it from list of free nodes at this currLevel.
+        RemoveFromFreeList(currLevel, currNode);
+         
+        const uint32_t childrenLevel = currLevel + 1;
+
+        // Create two free sub-nodes.
+        Node* leftChild = vma_new(GetAllocationCallbacks(), Node)();
+        Node* rightChild = vma_new(GetAllocationCallbacks(), Node)();
+
+        leftChild->offset = currNode->offset;
+        leftChild->type = Node::TYPE_FREE;
+        leftChild->parent = currNode;
+        leftChild->buddy = rightChild;
+
+        rightChild->offset = currNode->offset + LevelToNodeSize(childrenLevel);
+        rightChild->type = Node::TYPE_FREE;
+        rightChild->parent = currNode;
+        rightChild->buddy = leftChild;
+
+        // Convert current currNode to split type.
+        currNode->type = Node::TYPE_SPLIT;
+        currNode->split.leftChild = leftChild;
+
+        // Add child nodes to free list. Order is important!
+        AddToFreeListFront(childrenLevel, rightChild);
+        AddToFreeListFront(childrenLevel, leftChild);
+
+        ++m_FreeCount;
+        //m_SumFreeSize -= LevelToNodeSize(currLevel) % 2; // Useful only when level node sizes can be non power of 2.
+        ++currLevel;
+        currNode = m_FreeList[currLevel].front;
+
+        /*
+        We can be sure that currNode, as left child of node previously split,
+        also fullfills the alignment requirement.
+        */
+    }
+
+    // Remove from free list.
+    VMA_ASSERT(currLevel == targetLevel &&
+        currNode != VMA_NULL &&
+        currNode->type == Node::TYPE_FREE);
+    RemoveFromFreeList(currLevel, currNode);
+
+    // Convert to allocation node.
+    currNode->type = Node::TYPE_ALLOCATION;
+    currNode->allocation.alloc = hAllocation;
+
+    ++m_AllocationCount;
+    --m_FreeCount;
+    m_SumFreeSize -= allocSize;
+}
+
+void VmaBlockMetadata_Buddy::DeleteNode(Node* node)
+{
+    if(node->type == Node::TYPE_SPLIT)
+    {
+        DeleteNode(node->split.leftChild->buddy);
+        DeleteNode(node->split.leftChild);
+    }
+
+    vma_delete(GetAllocationCallbacks(), node);
+}
+
+bool VmaBlockMetadata_Buddy::ValidateNode(ValidationContext& ctx, const Node* parent, const Node* curr, uint32_t level, VkDeviceSize levelNodeSize) const
+{
+    VMA_VALIDATE(level < m_LevelCount);
+    VMA_VALIDATE(curr->parent == parent);
+    VMA_VALIDATE((curr->buddy == VMA_NULL) == (parent == VMA_NULL));
+    VMA_VALIDATE(curr->buddy == VMA_NULL || curr->buddy->buddy == curr);
+    switch(curr->type)
+    {
+    case Node::TYPE_FREE:
+        // curr->free.prev, next are validated separately.
+        ctx.calculatedSumFreeSize += levelNodeSize;
+        ++ctx.calculatedFreeCount;
+        break;
+    case Node::TYPE_ALLOCATION:
+        ++ctx.calculatedAllocationCount;
+        ctx.calculatedSumFreeSize += levelNodeSize - curr->allocation.alloc->GetSize();
+        VMA_VALIDATE(curr->allocation.alloc != VK_NULL_HANDLE);
+        break;
+    case Node::TYPE_SPLIT:
+        {
+            const uint32_t childrenLevel = level + 1;
+            const VkDeviceSize childrenLevelNodeSize = levelNodeSize / 2;
+            const Node* const leftChild = curr->split.leftChild;
+            VMA_VALIDATE(leftChild != VMA_NULL);
+            VMA_VALIDATE(leftChild->offset == curr->offset);
+            if(!ValidateNode(ctx, curr, leftChild, childrenLevel, childrenLevelNodeSize))
+            {
+                VMA_VALIDATE(false && "ValidateNode for left child failed.");
+            }
+            const Node* const rightChild = leftChild->buddy;
+            VMA_VALIDATE(rightChild->offset == curr->offset + childrenLevelNodeSize);
+            if(!ValidateNode(ctx, curr, rightChild, childrenLevel, childrenLevelNodeSize))
+            {
+                VMA_VALIDATE(false && "ValidateNode for right child failed.");
+            }
+        }
+        break;
+    default:
+        return false;
+    }
+
+    return true;
+}
+
+uint32_t VmaBlockMetadata_Buddy::AllocSizeToLevel(VkDeviceSize allocSize) const
+{
+    // I know this could be optimized somehow e.g. by using std::log2p1 from C++20.
+    uint32_t level = 0;
+    VkDeviceSize currLevelNodeSize = m_UsableSize;
+    VkDeviceSize nextLevelNodeSize = currLevelNodeSize >> 1;
+    while(allocSize <= nextLevelNodeSize && level + 1 < m_LevelCount)
+    {
+        ++level;
+        currLevelNodeSize = nextLevelNodeSize;
+        nextLevelNodeSize = currLevelNodeSize >> 1;
+    }
+    return level;
+}
+
+void VmaBlockMetadata_Buddy::FreeAtOffset(VmaAllocation alloc, VkDeviceSize offset)
+{
+    // Find node and level.
+    Node* node = m_Root;
+    VkDeviceSize nodeOffset = 0;
+    uint32_t level = 0;
+    VkDeviceSize levelNodeSize = LevelToNodeSize(0);
+    while(node->type == Node::TYPE_SPLIT)
+    {
+        const VkDeviceSize nextLevelSize = levelNodeSize >> 1;
+        if(offset < nodeOffset + nextLevelSize)
+        {
+            node = node->split.leftChild;
+        }
+        else
+        {
+            node = node->split.leftChild->buddy;
+            nodeOffset += nextLevelSize;
+        }
+        ++level;
+        levelNodeSize = nextLevelSize;
+    }
+
+    VMA_ASSERT(node != VMA_NULL && node->type == Node::TYPE_ALLOCATION);
+    VMA_ASSERT(alloc == VK_NULL_HANDLE || node->allocation.alloc == alloc);
+
+    ++m_FreeCount;
+    --m_AllocationCount;
+    m_SumFreeSize += alloc->GetSize();
+
+    node->type = Node::TYPE_FREE;
+
+    // Join free nodes if possible.
+    while(level > 0 && node->buddy->type == Node::TYPE_FREE)
+    {
+        RemoveFromFreeList(level, node->buddy);
+        Node* const parent = node->parent;
+
+        vma_delete(GetAllocationCallbacks(), node->buddy);
+        vma_delete(GetAllocationCallbacks(), node);
+        parent->type = Node::TYPE_FREE;
+        
+        node = parent;
+        --level;
+        //m_SumFreeSize += LevelToNodeSize(level) % 2; // Useful only when level node sizes can be non power of 2.
+        --m_FreeCount;
+    }
+
+    AddToFreeListFront(level, node);
+}
+
+void VmaBlockMetadata_Buddy::CalcAllocationStatInfoNode(VmaStatInfo& outInfo, const Node* node, VkDeviceSize levelNodeSize) const
+{
+    switch(node->type)
+    {
+    case Node::TYPE_FREE:
+        ++outInfo.unusedRangeCount;
+        outInfo.unusedBytes += levelNodeSize;
+        outInfo.unusedRangeSizeMax = VMA_MAX(outInfo.unusedRangeSizeMax, levelNodeSize);
+        outInfo.unusedRangeSizeMin = VMA_MAX(outInfo.unusedRangeSizeMin, levelNodeSize);
+        break;
+    case Node::TYPE_ALLOCATION:
+        {
+            const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
+            ++outInfo.allocationCount;
+            outInfo.usedBytes += allocSize;
+            outInfo.allocationSizeMax = VMA_MAX(outInfo.allocationSizeMax, allocSize);
+            outInfo.allocationSizeMin = VMA_MAX(outInfo.allocationSizeMin, allocSize);
+
+            const VkDeviceSize unusedRangeSize = levelNodeSize - allocSize;
+            if(unusedRangeSize > 0)
+            {
+                ++outInfo.unusedRangeCount;
+                outInfo.unusedBytes += unusedRangeSize;
+                outInfo.unusedRangeSizeMax = VMA_MAX(outInfo.unusedRangeSizeMax, unusedRangeSize);
+                outInfo.unusedRangeSizeMin = VMA_MAX(outInfo.unusedRangeSizeMin, unusedRangeSize);
+            }
+        }
+        break;
+    case Node::TYPE_SPLIT:
+        {
+            const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
+            const Node* const leftChild = node->split.leftChild;
+            CalcAllocationStatInfoNode(outInfo, leftChild, childrenNodeSize);
+            const Node* const rightChild = leftChild->buddy;
+            CalcAllocationStatInfoNode(outInfo, rightChild, childrenNodeSize);
+        }
+        break;
+    default:
+        VMA_ASSERT(0);
+    }
+}
+
+void VmaBlockMetadata_Buddy::AddToFreeListFront(uint32_t level, Node* node)
+{
+    VMA_ASSERT(node->type == Node::TYPE_FREE);
+
+    // List is empty.
+    Node* const frontNode = m_FreeList[level].front;
+    if(frontNode == VMA_NULL)
+    {
+        VMA_ASSERT(m_FreeList[level].back == VMA_NULL);
+        node->free.prev = node->free.next = VMA_NULL;
+        m_FreeList[level].front = m_FreeList[level].back = node;
+    }
+    else
+    {
+        VMA_ASSERT(frontNode->free.prev == VMA_NULL);
+        node->free.prev = VMA_NULL;
+        node->free.next = frontNode;
+        frontNode->free.prev = node;
+        m_FreeList[level].front = node;
+    }
+}
+
+void VmaBlockMetadata_Buddy::RemoveFromFreeList(uint32_t level, Node* node)
+{
+    VMA_ASSERT(m_FreeList[level].front != VMA_NULL);
+
+    // It is at the front.
+    if(node->free.prev == VMA_NULL)
+    {
+        VMA_ASSERT(m_FreeList[level].front == node);
+        m_FreeList[level].front = node->free.next;
+    }
+    else
+    {
+        Node* const prevFreeNode = node->free.prev;
+        VMA_ASSERT(prevFreeNode->free.next == node);
+        prevFreeNode->free.next = node->free.next;
+    }
+
+    // It is at the back.
+    if(node->free.next == VMA_NULL)
+    {
+        VMA_ASSERT(m_FreeList[level].back == node);
+        m_FreeList[level].back = node->free.prev;
+    }
+    else
+    {
+        Node* const nextFreeNode = node->free.next;
+        VMA_ASSERT(nextFreeNode->free.prev == node);
+        nextFreeNode->free.prev = node->free.prev;
+    }
+}
+
+#if VMA_STATS_STRING_ENABLED
+void VmaBlockMetadata_Buddy::PrintDetailedMapNode(class VmaJsonWriter& json, const Node* node, VkDeviceSize levelNodeSize) const
+{
+    switch(node->type)
+    {
+    case Node::TYPE_FREE:
+        PrintDetailedMap_UnusedRange(json, node->offset, levelNodeSize);
+        break;
+    case Node::TYPE_ALLOCATION:
+        {   
+            PrintDetailedMap_Allocation(json, node->offset, node->allocation.alloc);
+            const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
+            if(allocSize < levelNodeSize)
+            {
+                PrintDetailedMap_UnusedRange(json, node->offset + allocSize, levelNodeSize - allocSize);
+            }
+        }
+        break;
+    case Node::TYPE_SPLIT:
+        {
+            const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
+            const Node* const leftChild = node->split.leftChild;
+            PrintDetailedMapNode(json, leftChild, childrenNodeSize);
+            const Node* const rightChild = leftChild->buddy;
+            PrintDetailedMapNode(json, rightChild, childrenNodeSize);
+        }
+        break;
+    default:
+        VMA_ASSERT(0);
+    }
+}
+#endif // #if VMA_STATS_STRING_ENABLED
+
+
+////////////////////////////////////////////////////////////////////////////////
+// class VmaDeviceMemoryBlock
+
+VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(VmaAllocator hAllocator) :
+    m_pMetadata(VMA_NULL),
+    m_MemoryTypeIndex(UINT32_MAX),
+    m_Id(0),
+    m_hMemory(VK_NULL_HANDLE),
+    m_MapCount(0),
+    m_pMappedData(VMA_NULL)
+{
+}
+
+void VmaDeviceMemoryBlock::Init(
+    VmaAllocator hAllocator,
+    uint32_t newMemoryTypeIndex,
+    VkDeviceMemory newMemory,
+    VkDeviceSize newSize,
+    uint32_t id,
+    uint32_t algorithm)
+{
+    VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
+
+    m_MemoryTypeIndex = newMemoryTypeIndex;
+    m_Id = id;
+    m_hMemory = newMemory;
+
+    switch(algorithm)
+    {
+    case VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT:
+        m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Linear)(hAllocator);
+        break;
+    case VMA_POOL_CREATE_BUDDY_ALGORITHM_BIT:
+        m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Buddy)(hAllocator);
+        break;
+    default:
+        VMA_ASSERT(0);
+        // Fall-through.
+    case 0:
+        m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Generic)(hAllocator);
+    }
+    m_pMetadata->Init(newSize);
+}
+
+void VmaDeviceMemoryBlock::Destroy(VmaAllocator allocator)
+{
+    // This is the most important assert in the entire library.
+    // Hitting it means you have some memory leak - unreleased VmaAllocation objects.
+    VMA_ASSERT(m_pMetadata->IsEmpty() && "Some allocations were not freed before destruction of this memory block!");
+
+    VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
+    allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_pMetadata->GetSize(), m_hMemory);
+    m_hMemory = VK_NULL_HANDLE;
+
+    vma_delete(allocator, m_pMetadata);
+    m_pMetadata = VMA_NULL;
+}
+
+bool VmaDeviceMemoryBlock::Validate() const
+{
+    VMA_VALIDATE((m_hMemory != VK_NULL_HANDLE) &&
+        (m_pMetadata->GetSize() != 0));
+    
+    return m_pMetadata->Validate();
+}
+
+VkResult VmaDeviceMemoryBlock::CheckCorruption(VmaAllocator hAllocator)
+{
+    void* pData = nullptr;
+    VkResult res = Map(hAllocator, 1, &pData);
+    if(res != VK_SUCCESS)
+    {
+        return res;
+    }
+
+    res = m_pMetadata->CheckCorruption(pData);
+
+    Unmap(hAllocator, 1);
+
+    return res;
+}
+
+VkResult VmaDeviceMemoryBlock::Map(VmaAllocator hAllocator, uint32_t count, void** ppData)
+{
+    if(count == 0)
+    {
+        return VK_SUCCESS;
+    }
+
+    VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
+    if(m_MapCount != 0)
+    {
+        m_MapCount += count;
+        VMA_ASSERT(m_pMappedData != VMA_NULL);
+        if(ppData != VMA_NULL)
+        {
+            *ppData = m_pMappedData;
+        }
+        return VK_SUCCESS;
+    }
+    else
+    {
+        VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
+            hAllocator->m_hDevice,
+            m_hMemory,
+            0, // offset
+            VK_WHOLE_SIZE,
+            0, // flags
+            &m_pMappedData);
+        if(result == VK_SUCCESS)
+        {
+            if(ppData != VMA_NULL)
+            {
+                *ppData = m_pMappedData;
+            }
+            m_MapCount = count;
+        }
+        return result;
+    }
+}
+
+void VmaDeviceMemoryBlock::Unmap(VmaAllocator hAllocator, uint32_t count)
+{
+    if(count == 0)
+    {
+        return;
+    }
+
+    VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
+    if(m_MapCount >= count)
+    {
+        m_MapCount -= count;
+        if(m_MapCount == 0)
+        {
+            m_pMappedData = VMA_NULL;
+            (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_hMemory);
+        }
+    }
+    else
+    {
+        VMA_ASSERT(0 && "VkDeviceMemory block is being unmapped while it was not previously mapped.");
+    }
+}
+
+VkResult VmaDeviceMemoryBlock::WriteMagicValueAroundAllocation(VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
+{
+    VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
+    VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
+
+    void* pData;
+    VkResult res = Map(hAllocator, 1, &pData);
+    if(res != VK_SUCCESS)
+    {
+        return res;
+    }
+
+    VmaWriteMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN);
+    VmaWriteMagicValue(pData, allocOffset + allocSize);
+
+    Unmap(hAllocator, 1);
+
+    return VK_SUCCESS;
+}
+
+VkResult VmaDeviceMemoryBlock::ValidateMagicValueAroundAllocation(VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
+{
+    VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
+    VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
+
+    void* pData;
+    VkResult res = Map(hAllocator, 1, &pData);
+    if(res != VK_SUCCESS)
+    {
+        return res;
+    }
+
+    if(!VmaValidateMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN))
+    {
+        VMA_ASSERT(0 && "MEMORY CORRUPTION DETECTED BEFORE FREED ALLOCATION!");
+    }
+    else if(!VmaValidateMagicValue(pData, allocOffset + allocSize))
+    {
+        VMA_ASSERT(0 && "MEMORY CORRUPTION DETECTED AFTER FREED ALLOCATION!");
+    }
+
+    Unmap(hAllocator, 1);
+
+    return VK_SUCCESS;
+}
+
+VkResult VmaDeviceMemoryBlock::BindBufferMemory(
+    const VmaAllocator hAllocator,
+    const VmaAllocation hAllocation,
+    VkBuffer hBuffer)
+{
+    VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
+        hAllocation->GetBlock() == this);
+    // This lock is important so that we don't call vkBind... and/or vkMap... simultaneously on the same VkDeviceMemory from multiple threads.
+    VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
+    return hAllocator->GetVulkanFunctions().vkBindBufferMemory(
+        hAllocator->m_hDevice,
+        hBuffer,
+        m_hMemory,
+        hAllocation->GetOffset());
+}
+
+VkResult VmaDeviceMemoryBlock::BindImageMemory(
+    const VmaAllocator hAllocator,
+    const VmaAllocation hAllocation,
+    VkImage hImage)
+{
+    VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
+        hAllocation->GetBlock() == this);
+    // This lock is important so that we don't call vkBind... and/or vkMap... simultaneously on the same VkDeviceMemory from multiple threads.
+    VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
+    return hAllocator->GetVulkanFunctions().vkBindImageMemory(
+        hAllocator->m_hDevice,
+        hImage,
+        m_hMemory,
+        hAllocation->GetOffset());
+}
+
+static void InitStatInfo(VmaStatInfo& outInfo)
+{
+    memset(&outInfo, 0, sizeof(outInfo));
+    outInfo.allocationSizeMin = UINT64_MAX;
+    outInfo.unusedRangeSizeMin = UINT64_MAX;
+}
+
+// Adds statistics srcInfo into inoutInfo, like: inoutInfo += srcInfo.
+static void VmaAddStatInfo(VmaStatInfo& inoutInfo, const VmaStatInfo& srcInfo)
+{
+    inoutInfo.blockCount += srcInfo.blockCount;
+    inoutInfo.allocationCount += srcInfo.allocationCount;
+    inoutInfo.unusedRangeCount += srcInfo.unusedRangeCount;
+    inoutInfo.usedBytes += srcInfo.usedBytes;
+    inoutInfo.unusedBytes += srcInfo.unusedBytes;
+    inoutInfo.allocationSizeMin = VMA_MIN(inoutInfo.allocationSizeMin, srcInfo.allocationSizeMin);
+    inoutInfo.allocationSizeMax = VMA_MAX(inoutInfo.allocationSizeMax, srcInfo.allocationSizeMax);
+    inoutInfo.unusedRangeSizeMin = VMA_MIN(inoutInfo.unusedRangeSizeMin, srcInfo.unusedRangeSizeMin);
+    inoutInfo.unusedRangeSizeMax = VMA_MAX(inoutInfo.unusedRangeSizeMax, srcInfo.unusedRangeSizeMax);
+}
+
+static void VmaPostprocessCalcStatInfo(VmaStatInfo& inoutInfo)
+{
+    inoutInfo.allocationSizeAvg = (inoutInfo.allocationCount > 0) ?
+        VmaRoundDiv<VkDeviceSize>(inoutInfo.usedBytes, inoutInfo.allocationCount) : 0;
+    inoutInfo.unusedRangeSizeAvg = (inoutInfo.unusedRangeCount > 0) ?
+        VmaRoundDiv<VkDeviceSize>(inoutInfo.unusedBytes, inoutInfo.unusedRangeCount) : 0;
+}
+
+VmaPool_T::VmaPool_T(
+    VmaAllocator hAllocator,
+    const VmaPoolCreateInfo& createInfo,
+    VkDeviceSize preferredBlockSize) :
+    m_BlockVector(
+        hAllocator,
+        createInfo.memoryTypeIndex,
+        createInfo.blockSize != 0 ? createInfo.blockSize : preferredBlockSize,
+        createInfo.minBlockCount,
+        createInfo.maxBlockCount,
+        (createInfo.flags & VMA_POOL_CREATE_IGNORE_BUFFER_IMAGE_GRANULARITY_BIT) != 0 ? 1 : hAllocator->GetBufferImageGranularity(),
+        createInfo.frameInUseCount,
+        true, // isCustomPool
+        createInfo.blockSize != 0, // explicitBlockSize
+        createInfo.flags & VMA_POOL_CREATE_ALGORITHM_MASK), // algorithm
+    m_Id(0)
+{
+}
+
+VmaPool_T::~VmaPool_T()
+{
+}
+
+#if VMA_STATS_STRING_ENABLED
+
+#endif // #if VMA_STATS_STRING_ENABLED
+
+VmaBlockVector::VmaBlockVector(
+    VmaAllocator hAllocator,
+    uint32_t memoryTypeIndex,
+    VkDeviceSize preferredBlockSize,
+    size_t minBlockCount,
+    size_t maxBlockCount,
+    VkDeviceSize bufferImageGranularity,
+    uint32_t frameInUseCount,
+    bool isCustomPool,
+    bool explicitBlockSize,
+    uint32_t algorithm) :
+    m_hAllocator(hAllocator),
+    m_MemoryTypeIndex(memoryTypeIndex),
+    m_PreferredBlockSize(preferredBlockSize),
+    m_MinBlockCount(minBlockCount),
+    m_MaxBlockCount(maxBlockCount),
+    m_BufferImageGranularity(bufferImageGranularity),
+    m_FrameInUseCount(frameInUseCount),
+    m_IsCustomPool(isCustomPool),
+    m_ExplicitBlockSize(explicitBlockSize),
+    m_Algorithm(algorithm),
+    m_HasEmptyBlock(false),
+    m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
+    m_NextBlockId(0)
+{
+}
+
+VmaBlockVector::~VmaBlockVector()
+{
+    for(size_t i = m_Blocks.size(); i--; )
+    {
+        m_Blocks[i]->Destroy(m_hAllocator);
+        vma_delete(m_hAllocator, m_Blocks[i]);
+    }
+}
+
+VkResult VmaBlockVector::CreateMinBlocks()
+{
+    for(size_t i = 0; i < m_MinBlockCount; ++i)
+    {
+        VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
+        if(res != VK_SUCCESS)
+        {
+            return res;
+        }
+    }
+    return VK_SUCCESS;
+}
+
+void VmaBlockVector::GetPoolStats(VmaPoolStats* pStats)
+{
+    VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
+
+    const size_t blockCount = m_Blocks.size();
+
+    pStats->size = 0;
+    pStats->unusedSize = 0;
+    pStats->allocationCount = 0;
+    pStats->unusedRangeCount = 0;
+    pStats->unusedRangeSizeMax = 0;
+    pStats->blockCount = blockCount;
+
+    for(uint32_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
+    {
+        const VmaDeviceMemoryBlock* const pBlock = m_Blocks[blockIndex];
+        VMA_ASSERT(pBlock);
+        VMA_HEAVY_ASSERT(pBlock->Validate());
+        pBlock->m_pMetadata->AddPoolStats(*pStats);
+    }
+}
+
+bool VmaBlockVector::IsCorruptionDetectionEnabled() const
+{
+    const uint32_t requiredMemFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
+    return (VMA_DEBUG_DETECT_CORRUPTION != 0) &&
+        (VMA_DEBUG_MARGIN > 0) &&
+        (m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags & requiredMemFlags) == requiredMemFlags;
+}
+
+static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
+
+VkResult VmaBlockVector::Allocate(
+    VmaPool hCurrentPool,
+    uint32_t currentFrameIndex,
+    VkDeviceSize size,
+    VkDeviceSize alignment,
+    const VmaAllocationCreateInfo& createInfo,
+    VmaSuballocationType suballocType,
+    size_t allocationCount,
+    VmaAllocation* pAllocations)
+{
+    size_t allocIndex;
+    VkResult res = VK_SUCCESS;
+
+    {
+        VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
+        for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
+        {
+            res = AllocatePage(
+                hCurrentPool,
+                currentFrameIndex,
+                size,
+                alignment,
+                createInfo,
+                suballocType,
+                pAllocations + allocIndex);
+            if(res != VK_SUCCESS)
+            {
+                break;
+            }
+        }
+    }
+
+    if(res != VK_SUCCESS)
+    {
+        // Free all already created allocations.
+        while(allocIndex--)
+        {
+            Free(pAllocations[allocIndex]);
+        }
+        memset(pAllocations, 0, sizeof(VmaAllocation) * allocationCount);
+    }
+
+    return res;
+}
+
+VkResult VmaBlockVector::AllocatePage(
+    VmaPool hCurrentPool,
+    uint32_t currentFrameIndex,
+    VkDeviceSize size,
+    VkDeviceSize alignment,
+    const VmaAllocationCreateInfo& createInfo,
+    VmaSuballocationType suballocType,
+    VmaAllocation* pAllocation)
+{
+    const bool isUpperAddress = (createInfo.flags & VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT) != 0;
+    bool canMakeOtherLost = (createInfo.flags & VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT) != 0;
+    const bool mapped = (createInfo.flags & VMA_ALLOCATION_CREATE_MAPPED_BIT) != 0;
+    const bool isUserDataString = (createInfo.flags & VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT) != 0;
+    const bool canCreateNewBlock =
+        ((createInfo.flags & VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT) == 0) &&
+        (m_Blocks.size() < m_MaxBlockCount);
+    uint32_t strategy = createInfo.flags & VMA_ALLOCATION_CREATE_STRATEGY_MASK;
+
+    // If linearAlgorithm is used, canMakeOtherLost is available only when used as ring buffer.
+    // Which in turn is available only when maxBlockCount = 1.
+    if(m_Algorithm == VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT && m_MaxBlockCount > 1)
+    {
+        canMakeOtherLost = false;
+    }
+
+    // Upper address can only be used with linear allocator and within single memory block.
+    if(isUpperAddress &&
+        (m_Algorithm != VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT || m_MaxBlockCount > 1))
+    {
+        return VK_ERROR_FEATURE_NOT_PRESENT;
+    }
+
+    // Validate strategy.
+    switch(strategy)
+    {
+    case 0:
+        strategy = VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT;
+        break;
+    case VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT:
+    case VMA_ALLOCATION_CREATE_STRATEGY_WORST_FIT_BIT:
+    case VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT:
+        break;
+    default:
+        return VK_ERROR_FEATURE_NOT_PRESENT;
+    }
+
+    // Early reject: requested allocation size is larger that maximum block size for this block vector.
+    if(size + 2 * VMA_DEBUG_MARGIN > m_PreferredBlockSize)
+    {
+        return VK_ERROR_OUT_OF_DEVICE_MEMORY;
+    }
+
+    /*
+    Under certain condition, this whole section can be skipped for optimization, so
+    we move on directly to trying to allocate with canMakeOtherLost. That's the case
+    e.g. for custom pools with linear algorithm.
+    */
+    if(!canMakeOtherLost || canCreateNewBlock)
+    {
+        // 1. Search existing allocations. Try to allocate without making other allocations lost.
+        VmaAllocationCreateFlags allocFlagsCopy = createInfo.flags;
+        allocFlagsCopy &= ~VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT;
+
+        if(m_Algorithm == VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT)
+        {
+            // Use only last block.
+            if(!m_Blocks.empty())
+            {
+                VmaDeviceMemoryBlock* const pCurrBlock = m_Blocks.back();
+                VMA_ASSERT(pCurrBlock);
+                VkResult res = AllocateFromBlock(
+                    pCurrBlock,
+                    hCurrentPool,
+                    currentFrameIndex,
+                    size,
+                    alignment,
+                    allocFlagsCopy,
+                    createInfo.pUserData,
+                    suballocType,
+                    strategy,
+                    pAllocation);
+                if(res == VK_SUCCESS)
+                {
+                    VMA_DEBUG_LOG("    Returned from last block #%u", (uint32_t)(m_Blocks.size() - 1));
+                    return VK_SUCCESS;
+                }
+            }
+        }
+        else
+        {
+            if(strategy == VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT)
+            {
+                // Forward order in m_Blocks - prefer blocks with smallest amount of free space.
+                for(size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
+                {
+                    VmaDeviceMemoryBlock* const pCurrBlock = m_Blocks[blockIndex];
+                    VMA_ASSERT(pCurrBlock);
+                    VkResult res = AllocateFromBlock(
+                        pCurrBlock,
+                        hCurrentPool,
+                        currentFrameIndex,
+                        size,
+                        alignment,
+                        allocFlagsCopy,
+                        createInfo.pUserData,
+                        suballocType,
+                        strategy,
+                        pAllocation);
+                    if(res == VK_SUCCESS)
+                    {
+                        VMA_DEBUG_LOG("    Returned from existing block #%u", (uint32_t)blockIndex);
+                        return VK_SUCCESS;
+                    }
+                }
+            }
+            else // WORST_FIT, FIRST_FIT
+            {
+                // Backward order in m_Blocks - prefer blocks with largest amount of free space.
+                for(size_t blockIndex = m_Blocks.size(); blockIndex--; )
+                {
+                    VmaDeviceMemoryBlock* const pCurrBlock = m_Blocks[blockIndex];
+                    VMA_ASSERT(pCurrBlock);
+                    VkResult res = AllocateFromBlock(
+                        pCurrBlock,
+                        hCurrentPool,
+                        currentFrameIndex,
+                        size,
+                        alignment,
+                        allocFlagsCopy,
+                        createInfo.pUserData,
+                        suballocType,
+                        strategy,
+                        pAllocation);
+                    if(res == VK_SUCCESS)
+                    {
+                        VMA_DEBUG_LOG("    Returned from existing block #%u", (uint32_t)blockIndex);
+                        return VK_SUCCESS;
+                    }
+                }
+            }
+        }
+
+        // 2. Try to create new block.
+        if(canCreateNewBlock)
+        {
+            // Calculate optimal size for new block.
+            VkDeviceSize newBlockSize = m_PreferredBlockSize;
+            uint32_t newBlockSizeShift = 0;
+            const uint32_t NEW_BLOCK_SIZE_SHIFT_MAX = 3;
+
+            if(!m_ExplicitBlockSize)
+            {
+                // Allocate 1/8, 1/4, 1/2 as first blocks.
+                const VkDeviceSize maxExistingBlockSize = CalcMaxBlockSize();
+                for(uint32_t i = 0; i < NEW_BLOCK_SIZE_SHIFT_MAX; ++i)
+                {
+                    const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
+                    if(smallerNewBlockSize > maxExistingBlockSize && smallerNewBlockSize >= size * 2)
+                    {
+                        newBlockSize = smallerNewBlockSize;
+                        ++newBlockSizeShift;
+                    }
+                    else
+                    {
+                        break;
+                    }
+                }
+            }
+
+            size_t newBlockIndex = 0;
+            VkResult res = CreateBlock(newBlockSize, &newBlockIndex);
+            // Allocation of this size failed? Try 1/2, 1/4, 1/8 of m_PreferredBlockSize.
+            if(!m_ExplicitBlockSize)
+            {
+                while(res < 0 && newBlockSizeShift < NEW_BLOCK_SIZE_SHIFT_MAX)
+                {
+                    const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
+                    if(smallerNewBlockSize >= size)
+                    {
+                        newBlockSize = smallerNewBlockSize;
+                        ++newBlockSizeShift;
+                        res = CreateBlock(newBlockSize, &newBlockIndex);
+                    }
+                    else
+                    {
+                        break;
+                    }
+                }
+            }
+
+            if(res == VK_SUCCESS)
+            {
+                VmaDeviceMemoryBlock* const pBlock = m_Blocks[newBlockIndex];
+                VMA_ASSERT(pBlock->m_pMetadata->GetSize() >= size);
+
+                res = AllocateFromBlock(
+                    pBlock,
+                    hCurrentPool,
+                    currentFrameIndex,
+                    size,
+                    alignment,
+                    allocFlagsCopy,
+                    createInfo.pUserData,
+                    suballocType,
+                    strategy,
+                    pAllocation);
+                if(res == VK_SUCCESS)
+                {
+                    VMA_DEBUG_LOG("    Created new block Size=%llu", newBlockSize);
+                    return VK_SUCCESS;
+                }
+                else
+                {
+                    // Allocation from new block failed, possibly due to VMA_DEBUG_MARGIN or alignment.
+                    return VK_ERROR_OUT_OF_DEVICE_MEMORY;
+                }
+            }
+        }
+    }
+
+    // 3. Try to allocate from existing blocks with making other allocations lost.
+    if(canMakeOtherLost)
+    {
+        uint32_t tryIndex = 0;
+        for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
+        {
+            VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
+            VmaAllocationRequest bestRequest = {};
+            VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
+
+            // 1. Search existing allocations.
+            if(strategy == VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT)
+            {
+                // Forward order in m_Blocks - prefer blocks with smallest amount of free space.
+                for(size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
+                {
+                    VmaDeviceMemoryBlock* const pCurrBlock = m_Blocks[blockIndex];
+                    VMA_ASSERT(pCurrBlock);
+                    VmaAllocationRequest currRequest = {};
+                    if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
+                        currentFrameIndex,
+                        m_FrameInUseCount,
+                        m_BufferImageGranularity,
+                        size,
+                        alignment,
+                        (createInfo.flags & VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT) != 0,
+                        suballocType,
+                        canMakeOtherLost,
+                        strategy,
+                        &currRequest))
+                    {
+                        const VkDeviceSize currRequestCost = currRequest.CalcCost();
+                        if(pBestRequestBlock == VMA_NULL ||
+                            currRequestCost < bestRequestCost)
+                        {
+                            pBestRequestBlock = pCurrBlock;
+                            bestRequest = currRequest;
+                            bestRequestCost = currRequestCost;
+
+                            if(bestRequestCost == 0)
+                            {
+                                break;
+                            }
+                        }
+                    }
+                }
+            }
+            else // WORST_FIT, FIRST_FIT
+            {
+                // Backward order in m_Blocks - prefer blocks with largest amount of free space.
+                for(size_t blockIndex = m_Blocks.size(); blockIndex--; )
+                {
+                    VmaDeviceMemoryBlock* const pCurrBlock = m_Blocks[blockIndex];
+                    VMA_ASSERT(pCurrBlock);
+                    VmaAllocationRequest currRequest = {};
+                    if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
+                        currentFrameIndex,
+                        m_FrameInUseCount,
+                        m_BufferImageGranularity,
+                        size,
+                        alignment,
+                        (createInfo.flags & VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT) != 0,
+                        suballocType,
+                        canMakeOtherLost,
+                        strategy,
+                        &currRequest))
+                    {
+                        const VkDeviceSize currRequestCost = currRequest.CalcCost();
+                        if(pBestRequestBlock == VMA_NULL ||
+                            currRequestCost < bestRequestCost ||
+                            strategy == VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT)
+                        {
+                            pBestRequestBlock = pCurrBlock;
+                            bestRequest = currRequest;
+                            bestRequestCost = currRequestCost;
+
+                            if(bestRequestCost == 0 ||
+                                strategy == VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT)
+                            {
+                                break;
+                            }
+                        }
+                    }
+                }
+            }
+
+            if(pBestRequestBlock != VMA_NULL)
+            {
+                if(mapped)
+                {
+                    VkResult res = pBestRequestBlock->Map(m_hAllocator, 1, VMA_NULL);
+                    if(res != VK_SUCCESS)
+                    {
+                        return res;
+                    }
+                }
+
+                if(pBestRequestBlock->m_pMetadata->MakeRequestedAllocationsLost(
+                    currentFrameIndex,
+                    m_FrameInUseCount,
+                    &bestRequest))
+                {
+                    // We no longer have an empty Allocation.
+                    if(pBestRequestBlock->m_pMetadata->IsEmpty())
+                    {
+                        m_HasEmptyBlock = false;
+                    }
+                    // Allocate from this pBlock.
+                    *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
+                    pBestRequestBlock->m_pMetadata->Alloc(bestRequest, suballocType, size, isUpperAddress, *pAllocation);
+                    (*pAllocation)->InitBlockAllocation(
+                        hCurrentPool,
+                        pBestRequestBlock,
+                        bestRequest.offset,
+                        alignment,
+                        size,
+                        suballocType,
+                        mapped,
+                        (createInfo.flags & VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT) != 0);
+                    VMA_HEAVY_ASSERT(pBestRequestBlock->Validate());
+                    VMA_DEBUG_LOG("    Returned from existing allocation #%u", (uint32_t)blockIndex);
+                    (*pAllocation)->SetUserData(m_hAllocator, createInfo.pUserData);
+                    if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
+                    {
+                        m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
+                    }
+                    if(IsCorruptionDetectionEnabled())
+                    {
+                        VkResult res = pBestRequestBlock->WriteMagicValueAroundAllocation(m_hAllocator, bestRequest.offset, size);
+                        VMA_ASSERT(res == VK_SUCCESS && "Couldn't map block memory to write magic value.");
+                    }
+                    return VK_SUCCESS;
+                }
+                // else: Some allocations must have been touched while we are here. Next try.
+            }
+            else
+            {
+                // Could not find place in any of the blocks - break outer loop.
+                break;
+            }
+        }
+        /* Maximum number of tries exceeded - a very unlike event when many other
+        threads are simultaneously touching allocations making it impossible to make
+        lost at the same time as we try to allocate. */
+        if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
+        {
+            return VK_ERROR_TOO_MANY_OBJECTS;
+        }
+    }
+
+    return VK_ERROR_OUT_OF_DEVICE_MEMORY;
+}
+
+void VmaBlockVector::Free(
+    VmaAllocation hAllocation)
+{
+    VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
+
+    // Scope for lock.
+    {
+        VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
+
+        VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
+
+        if(IsCorruptionDetectionEnabled())
+        {
+            VkResult res = pBlock->ValidateMagicValueAroundAllocation(m_hAllocator, hAllocation->GetOffset(), hAllocation->GetSize());
+            VMA_ASSERT(res == VK_SUCCESS && "Couldn't map block memory to validate magic value.");
+        }
+
+        if(hAllocation->IsPersistentMap())
+        {
+            pBlock->Unmap(m_hAllocator, 1);
+        }
+
+        pBlock->m_pMetadata->Free(hAllocation);
+        VMA_HEAVY_ASSERT(pBlock->Validate());
+
+        VMA_DEBUG_LOG("  Freed from MemoryTypeIndex=%u", memTypeIndex);
+
+        // pBlock became empty after this deallocation.
+        if(pBlock->m_pMetadata->IsEmpty())
+        {
+            // Already has empty Allocation. We don't want to have two, so delete this one.
+            if(m_HasEmptyBlock && m_Blocks.size() > m_MinBlockCount)
+            {
+                pBlockToDelete = pBlock;
+                Remove(pBlock);
+            }
+            // We now have first empty block.
+            else
+            {
+                m_HasEmptyBlock = true;
+            }
+        }
+        // pBlock didn't become empty, but we have another empty block - find and free that one.
+        // (This is optional, heuristics.)
+        else if(m_HasEmptyBlock)
+        {
+            VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back();
+            if(pLastBlock->m_pMetadata->IsEmpty() && m_Blocks.size() > m_MinBlockCount)
+            {
+                pBlockToDelete = pLastBlock;
+                m_Blocks.pop_back();
+                m_HasEmptyBlock = false;
+            }
+        }
+
+        IncrementallySortBlocks();
+    }
+
+    // Destruction of a free Allocation. Deferred until this point, outside of mutex
+    // lock, for performance reason.
+    if(pBlockToDelete != VMA_NULL)
+    {
+        VMA_DEBUG_LOG("    Deleted empty allocation");
+        pBlockToDelete->Destroy(m_hAllocator);
+        vma_delete(m_hAllocator, pBlockToDelete);
+    }
+}
+
+VkDeviceSize VmaBlockVector::CalcMaxBlockSize() const
+{
+    VkDeviceSize result = 0;
+    for(size_t i = m_Blocks.size(); i--; )
+    {
+        result = VMA_MAX(result, m_Blocks[i]->m_pMetadata->GetSize());
+        if(result >= m_PreferredBlockSize)
+        {
+            break;
+        }
+    }
+    return result;
+}
+
+void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
+{
+    for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
+    {
+        if(m_Blocks[blockIndex] == pBlock)
+        {
+            VmaVectorRemove(m_Blocks, blockIndex);
+            return;
+        }
+    }
+    VMA_ASSERT(0);
+}
+
+void VmaBlockVector::IncrementallySortBlocks()
+{
+    if(m_Algorithm != VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT)
+    {
+        // Bubble sort only until first swap.
+        for(size_t i = 1; i < m_Blocks.size(); ++i)
+        {
+            if(m_Blocks[i - 1]->m_pMetadata->GetSumFreeSize() > m_Blocks[i]->m_pMetadata->GetSumFreeSize())
+            {
+                VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
+                return;
+            }
+        }
+    }
+}
+
+VkResult VmaBlockVector::AllocateFromBlock(
+    VmaDeviceMemoryBlock* pBlock,
+    VmaPool hCurrentPool,
+    uint32_t currentFrameIndex,
+    VkDeviceSize size,
+    VkDeviceSize alignment,
+    VmaAllocationCreateFlags allocFlags,
+    void* pUserData,
+    VmaSuballocationType suballocType,
+    uint32_t strategy,
+    VmaAllocation* pAllocation)
+{
+    VMA_ASSERT((allocFlags & VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT) == 0);
+    const bool isUpperAddress = (allocFlags & VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT) != 0;
+    const bool mapped = (allocFlags & VMA_ALLOCATION_CREATE_MAPPED_BIT) != 0;
+    const bool isUserDataString = (allocFlags & VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT) != 0;
+
+    VmaAllocationRequest currRequest = {};
+    if(pBlock->m_pMetadata->CreateAllocationRequest(
+        currentFrameIndex,
+        m_FrameInUseCount,
+        m_BufferImageGranularity,
+        size,
+        alignment,
+        isUpperAddress,
+        suballocType,
+        false, // canMakeOtherLost
+        strategy,
+        &currRequest))
+    {
+        // Allocate from pCurrBlock.
+        VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
+
+        if(mapped)
+        {
+            VkResult res = pBlock->Map(m_hAllocator, 1, VMA_NULL);
+            if(res != VK_SUCCESS)
+            {
+                return res;
+            }
+        }
+            
+        // We no longer have an empty Allocation.
+        if(pBlock->m_pMetadata->IsEmpty())
+        {
+            m_HasEmptyBlock = false;
+        }
+            
+        *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
+        pBlock->m_pMetadata->Alloc(currRequest, suballocType, size, isUpperAddress, *pAllocation);
+        (*pAllocation)->InitBlockAllocation(
+            hCurrentPool,
+            pBlock,
+            currRequest.offset,
+            alignment,
+            size,
+            suballocType,
+            mapped,
+            (allocFlags & VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT) != 0);
+        VMA_HEAVY_ASSERT(pBlock->Validate());
+        (*pAllocation)->SetUserData(m_hAllocator, pUserData);
+        if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
+        {
+            m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
+        }
+        if(IsCorruptionDetectionEnabled())
+        {
+            VkResult res = pBlock->WriteMagicValueAroundAllocation(m_hAllocator, currRequest.offset, size);
+            VMA_ASSERT(res == VK_SUCCESS && "Couldn't map block memory to write magic value.");
+        }
+        return VK_SUCCESS;
+    }
+    return VK_ERROR_OUT_OF_DEVICE_MEMORY;
+}
+
+VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize, size_t* pNewBlockIndex)
+{
+    VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
+    allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
+    allocInfo.allocationSize = blockSize;
+    VkDeviceMemory mem = VK_NULL_HANDLE;
+    VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
+    if(res < 0)
+    {
+        return res;
+    }
+
+    // New VkDeviceMemory successfully created.
+
+    // Create new Allocation for it.
+    VmaDeviceMemoryBlock* const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
+    pBlock->Init(
+        m_hAllocator,
+        m_MemoryTypeIndex,
+        mem,
+        allocInfo.allocationSize,
+        m_NextBlockId++,
+        m_Algorithm);
+
+    m_Blocks.push_back(pBlock);
+    if(pNewBlockIndex != VMA_NULL)
+    {
+        *pNewBlockIndex = m_Blocks.size() - 1;
+    }
+
+    return VK_SUCCESS;
+}
+
+void VmaBlockVector::ApplyDefragmentationMovesCpu(
+    class VmaBlockVectorDefragmentationContext* pDefragCtx,
+    const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves)
+{
+    const size_t blockCount = m_Blocks.size();
+    const bool isNonCoherent = m_hAllocator->IsMemoryTypeNonCoherent(m_MemoryTypeIndex);
+
+    enum BLOCK_FLAG
+    {
+        BLOCK_FLAG_USED = 0x00000001,
+        BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION = 0x00000002,
+    };
+
+    struct BlockInfo
+    {
+        uint32_t flags;
+        void* pMappedData;
+    };
+    VmaVector< BlockInfo, VmaStlAllocator<BlockInfo> >
+        blockInfo(blockCount, VmaStlAllocator<BlockInfo>(m_hAllocator->GetAllocationCallbacks()));
+    memset(blockInfo.data(), 0, blockCount * sizeof(BlockInfo));
+
+    // Go over all moves. Mark blocks that are used with BLOCK_FLAG_USED.
+    const size_t moveCount = moves.size();
+    for(size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
+    {
+        const VmaDefragmentationMove& move = moves[moveIndex];
+        blockInfo[move.srcBlockIndex].flags |= BLOCK_FLAG_USED;
+        blockInfo[move.dstBlockIndex].flags |= BLOCK_FLAG_USED;
+    }
+
+    VMA_ASSERT(pDefragCtx->res == VK_SUCCESS);
+
+    // Go over all blocks. Get mapped pointer or map if necessary.
+    for(size_t blockIndex = 0; pDefragCtx->res == VK_SUCCESS && blockIndex < blockCount; ++blockIndex)
+    {
+        BlockInfo& currBlockInfo = blockInfo[blockIndex];
+        VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
+        if((currBlockInfo.flags & BLOCK_FLAG_USED) != 0)
+        {
+            currBlockInfo.pMappedData = pBlock->GetMappedData();
+            // It is not originally mapped - map it.
+            if(currBlockInfo.pMappedData == VMA_NULL)
+            {
+                pDefragCtx->res = pBlock->Map(m_hAllocator, 1, &currBlockInfo.pMappedData);
+                if(pDefragCtx->res == VK_SUCCESS)
+                {
+                    currBlockInfo.flags |= BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION;
+                }
+            }
+        }
+    }
+
+    // Go over all moves. Do actual data transfer.
+    if(pDefragCtx->res == VK_SUCCESS)
+    {
+        const VkDeviceSize nonCoherentAtomSize = m_hAllocator->m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
+        VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
+
+        for(size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
+        {
+            const VmaDefragmentationMove& move = moves[moveIndex];
+
+            const BlockInfo& srcBlockInfo = blockInfo[move.srcBlockIndex];
+            const BlockInfo& dstBlockInfo = blockInfo[move.dstBlockIndex];
+
+            VMA_ASSERT(srcBlockInfo.pMappedData && dstBlockInfo.pMappedData);
+
+            // Invalidate source.
+            if(isNonCoherent)
+            {
+                VmaDeviceMemoryBlock* const pSrcBlock = m_Blocks[move.srcBlockIndex];
+                memRange.memory = pSrcBlock->GetDeviceMemory();
+                memRange.offset = VmaAlignDown(move.srcOffset, nonCoherentAtomSize);
+                memRange.size = VMA_MIN(
+                    VmaAlignUp(move.size + (move.srcOffset - memRange.offset), nonCoherentAtomSize),
+                    pSrcBlock->m_pMetadata->GetSize() - memRange.offset);
+                (*m_hAllocator->GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hAllocator->m_hDevice, 1, &memRange);
+            }
+
+            // THE PLACE WHERE ACTUAL DATA COPY HAPPENS.
+            memmove(
+                reinterpret_cast<char*>(dstBlockInfo.pMappedData) + move.dstOffset,
+                reinterpret_cast<char*>(srcBlockInfo.pMappedData) + move.srcOffset,
+                static_cast<size_t>(move.size));
+
+            if(IsCorruptionDetectionEnabled())
+            {
+                VmaWriteMagicValue(dstBlockInfo.pMappedData, move.dstOffset - VMA_DEBUG_MARGIN);
+                VmaWriteMagicValue(dstBlockInfo.pMappedData, move.dstOffset + move.size);
+            }
+
+            // Flush destination.
+            if(isNonCoherent)
+            {
+                VmaDeviceMemoryBlock* const pDstBlock = m_Blocks[move.dstBlockIndex];
+                memRange.memory = pDstBlock->GetDeviceMemory();
+                memRange.offset = VmaAlignDown(move.dstOffset, nonCoherentAtomSize);
+                memRange.size = VMA_MIN(
+                    VmaAlignUp(move.size + (move.dstOffset - memRange.offset), nonCoherentAtomSize),
+                    pDstBlock->m_pMetadata->GetSize() - memRange.offset);
+                (*m_hAllocator->GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hAllocator->m_hDevice, 1, &memRange);
+            }
+        }
+    }
+
+    // Go over all blocks in reverse order. Unmap those that were mapped just for defragmentation.
+    // Regardless of pCtx->res == VK_SUCCESS.
+    for(size_t blockIndex = blockCount; blockIndex--; )
+    {
+        const BlockInfo& currBlockInfo = blockInfo[blockIndex];
+        if((currBlockInfo.flags & BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION) != 0)
+        {
+            VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
+            pBlock->Unmap(m_hAllocator, 1);
+        }
+    }
+}
+
+void VmaBlockVector::ApplyDefragmentationMovesGpu(
+    class VmaBlockVectorDefragmentationContext* pDefragCtx,
+    const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
+    VkCommandBuffer commandBuffer)
+{
+    const size_t blockCount = m_Blocks.size();
+
+    pDefragCtx->blockContexts.resize(blockCount);
+    memset(pDefragCtx->blockContexts.data(), 0, blockCount * sizeof(VmaBlockDefragmentationContext));
+
+    // Go over all moves. Mark blocks that are used with BLOCK_FLAG_USED.
+    const size_t moveCount = moves.size();
+    for(size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
+    {
+        const VmaDefragmentationMove& move = moves[moveIndex];
+        pDefragCtx->blockContexts[move.srcBlockIndex].flags |= VmaBlockDefragmentationContext::BLOCK_FLAG_USED;
+        pDefragCtx->blockContexts[move.dstBlockIndex].flags |= VmaBlockDefragmentationContext::BLOCK_FLAG_USED;
+    }
+
+    VMA_ASSERT(pDefragCtx->res == VK_SUCCESS);
+
+    // Go over all blocks. Create and bind buffer for whole block if necessary.
+    {
+        VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
+        bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT |
+            VK_BUFFER_USAGE_TRANSFER_DST_BIT;
+
+        for(size_t blockIndex = 0; pDefragCtx->res == VK_SUCCESS && blockIndex < blockCount; ++blockIndex)
+        {
+            VmaBlockDefragmentationContext& currBlockCtx = pDefragCtx->blockContexts[blockIndex];
+            VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
+            if((currBlockCtx.flags & VmaBlockDefragmentationContext::BLOCK_FLAG_USED) != 0)
+            {
+                bufCreateInfo.size = pBlock->m_pMetadata->GetSize();
+                pDefragCtx->res = (*m_hAllocator->GetVulkanFunctions().vkCreateBuffer)(
+                    m_hAllocator->m_hDevice, &bufCreateInfo, m_hAllocator->GetAllocationCallbacks(), &currBlockCtx.hBuffer);
+                if(pDefragCtx->res == VK_SUCCESS)
+                {
+                    pDefragCtx->res = (*m_hAllocator->GetVulkanFunctions().vkBindBufferMemory)(
+                        m_hAllocator->m_hDevice, currBlockCtx.hBuffer, pBlock->GetDeviceMemory(), 0);
+                }
+            }
+        }
+    }
+
+    // Go over all moves. Post data transfer commands to command buffer.
+    if(pDefragCtx->res == VK_SUCCESS)
+    {
+        const VkDeviceSize nonCoherentAtomSize = m_hAllocator->m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
+        VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
+
+        for(size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
+        {
+            const VmaDefragmentationMove& move = moves[moveIndex];
+
+            const VmaBlockDefragmentationContext& srcBlockCtx = pDefragCtx->blockContexts[move.srcBlockIndex];
+            const VmaBlockDefragmentationContext& dstBlockCtx = pDefragCtx->blockContexts[move.dstBlockIndex];
+
+            VMA_ASSERT(srcBlockCtx.hBuffer && dstBlockCtx.hBuffer);
+
+            VkBufferCopy region = {
+                move.srcOffset,
+                move.dstOffset,
+                move.size };
+            (*m_hAllocator->GetVulkanFunctions().vkCmdCopyBuffer)(
+                commandBuffer, srcBlockCtx.hBuffer, dstBlockCtx.hBuffer, 1, &region);
+        }
+    }
+
+    // Save buffers to defrag context for later destruction.
+    if(pDefragCtx->res == VK_SUCCESS && moveCount > 0)
+    {
+        pDefragCtx->res = VK_NOT_READY;
+    }
+}
+
+void VmaBlockVector::FreeEmptyBlocks(VmaDefragmentationStats* pDefragmentationStats)
+{
+    m_HasEmptyBlock = false;
+    for(size_t blockIndex = m_Blocks.size(); blockIndex--; )
+    {
+        VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
+        if(pBlock->m_pMetadata->IsEmpty())
+        {
+            if(m_Blocks.size() > m_MinBlockCount)
+            {
+                if(pDefragmentationStats != VMA_NULL)
+                {
+                    ++pDefragmentationStats->deviceMemoryBlocksFreed;
+                    pDefragmentationStats->bytesFreed += pBlock->m_pMetadata->GetSize();
+                }
+
+                VmaVectorRemove(m_Blocks, blockIndex);
+                pBlock->Destroy(m_hAllocator);
+                vma_delete(m_hAllocator, pBlock);
+            }
+            else
+            {
+                m_HasEmptyBlock = true;
+            }
+        }
+    }
+}
+
+#if VMA_STATS_STRING_ENABLED
+
+void VmaBlockVector::PrintDetailedMap(class VmaJsonWriter& json)
+{
+    VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
+
+    json.BeginObject();
+
+    if(m_IsCustomPool)
+    {
+        json.WriteString("MemoryTypeIndex");
+        json.WriteNumber(m_MemoryTypeIndex);
+
+        json.WriteString("BlockSize");
+        json.WriteNumber(m_PreferredBlockSize);
+
+        json.WriteString("BlockCount");
+        json.BeginObject(true);
+        if(m_MinBlockCount > 0)
+        {
+            json.WriteString("Min");
+            json.WriteNumber((uint64_t)m_MinBlockCount);
+        }
+        if(m_MaxBlockCount < SIZE_MAX)
+        {
+            json.WriteString("Max");
+            json.WriteNumber((uint64_t)m_MaxBlockCount);
+        }
+        json.WriteString("Cur");
+        json.WriteNumber((uint64_t)m_Blocks.size());
+        json.EndObject();
+
+        if(m_FrameInUseCount > 0)
+        {
+            json.WriteString("FrameInUseCount");
+            json.WriteNumber(m_FrameInUseCount);
+        }
+
+        if(m_Algorithm != 0)
+        {
+            json.WriteString("Algorithm");
+            json.WriteString(VmaAlgorithmToStr(m_Algorithm));
+        }
+    }
+    else
+    {
+        json.WriteString("PreferredBlockSize");
+        json.WriteNumber(m_PreferredBlockSize);
+    }
+
+    json.WriteString("Blocks");
+    json.BeginObject();
+    for(size_t i = 0; i < m_Blocks.size(); ++i)
+    {
+        json.BeginString();
+        json.ContinueString(m_Blocks[i]->GetId());
+        json.EndString();
+
+        m_Blocks[i]->m_pMetadata->PrintDetailedMap(json);
+    }
+    json.EndObject();
+
+    json.EndObject();
+}
+
+#endif // #if VMA_STATS_STRING_ENABLED
+
+void VmaBlockVector::Defragment(
+    class VmaBlockVectorDefragmentationContext* pCtx,
+    VmaDefragmentationStats* pStats,
+    VkDeviceSize& maxCpuBytesToMove, uint32_t& maxCpuAllocationsToMove,
+    VkDeviceSize& maxGpuBytesToMove, uint32_t& maxGpuAllocationsToMove,
+    VkCommandBuffer commandBuffer)
+{
+    pCtx->res = VK_SUCCESS;
+    
+    const VkMemoryPropertyFlags memPropFlags =
+        m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags;
+    const bool isHostVisible = (memPropFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0;
+    const bool isHostCoherent = (memPropFlags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0;
+
+    const bool canDefragmentOnCpu = maxCpuBytesToMove > 0 && maxCpuAllocationsToMove > 0 &&
+        isHostVisible;
+    const bool canDefragmentOnGpu = maxGpuBytesToMove > 0 && maxGpuAllocationsToMove > 0 &&
+        (VMA_DEBUG_DETECT_CORRUPTION == 0 || !(isHostVisible && isHostCoherent));
+
+    // There are options to defragment this memory type.
+    if(canDefragmentOnCpu || canDefragmentOnGpu)
+    {
+        bool defragmentOnGpu;
+        // There is only one option to defragment this memory type.
+        if(canDefragmentOnGpu != canDefragmentOnCpu)
+        {
+            defragmentOnGpu = canDefragmentOnGpu;
+        }
+        // Both options are available: Heuristics to choose the best one.
+        else
+        {
+            defragmentOnGpu = (memPropFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0 ||
+                m_hAllocator->IsIntegratedGpu();
+        }
+
+        bool overlappingMoveSupported = !defragmentOnGpu;
+
+        if(m_hAllocator->m_UseMutex)
+        {
+            m_Mutex.LockWrite();
+            pCtx->mutexLocked = true;
+        }
+
+        pCtx->Begin(overlappingMoveSupported);
+
+        // Defragment.
+
+        const VkDeviceSize maxBytesToMove = defragmentOnGpu ? maxGpuBytesToMove : maxCpuBytesToMove;
+        const uint32_t maxAllocationsToMove = defragmentOnGpu ? maxGpuAllocationsToMove : maxCpuAllocationsToMove;
+        VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> > moves = 
+            VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >(VmaStlAllocator<VmaDefragmentationMove>(m_hAllocator->GetAllocationCallbacks()));
+        pCtx->res = pCtx->GetAlgorithm()->Defragment(moves, maxBytesToMove, maxAllocationsToMove);
+
+        // Accumulate statistics.
+        if(pStats != VMA_NULL)
+        {
+            const VkDeviceSize bytesMoved = pCtx->GetAlgorithm()->GetBytesMoved();
+            const uint32_t allocationsMoved = pCtx->GetAlgorithm()->GetAllocationsMoved();
+            pStats->bytesMoved += bytesMoved;
+            pStats->allocationsMoved += allocationsMoved;
+            VMA_ASSERT(bytesMoved <= maxBytesToMove);
+            VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
+            if(defragmentOnGpu)
+            {
+                maxGpuBytesToMove -= bytesMoved;
+                maxGpuAllocationsToMove -= allocationsMoved;
+            }
+            else
+            {
+                maxCpuBytesToMove -= bytesMoved;
+                maxCpuAllocationsToMove -= allocationsMoved;
+            }
+        }
+    
+        if(pCtx->res >= VK_SUCCESS)
+        {
+            if(defragmentOnGpu)
+            {
+                ApplyDefragmentationMovesGpu(pCtx, moves, commandBuffer);
+            }
+            else
+            {
+                ApplyDefragmentationMovesCpu(pCtx, moves);
+            }
+        }
+    }
+}
+
+void VmaBlockVector::DefragmentationEnd(
+    class VmaBlockVectorDefragmentationContext* pCtx,
+    VmaDefragmentationStats* pStats)
+{
+    // Destroy buffers.
+    for(size_t blockIndex = pCtx->blockContexts.size(); blockIndex--; )
+    {
+        VmaBlockDefragmentationContext& blockCtx = pCtx->blockContexts[blockIndex];
+        if(blockCtx.hBuffer)
+        {
+            (*m_hAllocator->GetVulkanFunctions().vkDestroyBuffer)(
+                m_hAllocator->m_hDevice, blockCtx.hBuffer, m_hAllocator->GetAllocationCallbacks());
+        }
+    }
+
+    if(pCtx->res >= VK_SUCCESS)
+    {
+        FreeEmptyBlocks(pStats);
+    }
+
+    if(pCtx->mutexLocked)
+    {
+        VMA_ASSERT(m_hAllocator->m_UseMutex);
+        m_Mutex.UnlockWrite();
+    }
+}
+
+size_t VmaBlockVector::CalcAllocationCount() const
+{
+    size_t result = 0;
+    for(size_t i = 0; i < m_Blocks.size(); ++i)
+    {
+        result += m_Blocks[i]->m_pMetadata->GetAllocationCount();
+    }
+    return result;
+}
+
+bool VmaBlockVector::IsBufferImageGranularityConflictPossible() const
+{
+    if(m_BufferImageGranularity == 1)
+    {
+        return false;
+    }
+    VmaSuballocationType lastSuballocType = VMA_SUBALLOCATION_TYPE_FREE;
+    for(size_t i = 0, count = m_Blocks.size(); i < count; ++i)
+    {
+        VmaDeviceMemoryBlock* const pBlock = m_Blocks[i];
+        VMA_ASSERT(m_Algorithm == 0);
+        VmaBlockMetadata_Generic* const pMetadata = (VmaBlockMetadata_Generic*)pBlock->m_pMetadata;
+        if(pMetadata->IsBufferImageGranularityConflictPossible(m_BufferImageGranularity, lastSuballocType))
+        {
+            return true;
+        }
+    }
+    return false;
+}
+
+void VmaBlockVector::MakePoolAllocationsLost(
+    uint32_t currentFrameIndex,
+    size_t* pLostAllocationCount)
+{
+    VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
+    size_t lostAllocationCount = 0;
+    for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
+    {
+        VmaDeviceMemoryBlock* const pBlock = m_Blocks[blockIndex];
+        VMA_ASSERT(pBlock);
+        lostAllocationCount += pBlock->m_pMetadata->MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
+    }
+    if(pLostAllocationCount != VMA_NULL)
+    {
+        *pLostAllocationCount = lostAllocationCount;
+    }
+}
+
+VkResult VmaBlockVector::CheckCorruption()
+{
+    if(!IsCorruptionDetectionEnabled())
+    {
+        return VK_ERROR_FEATURE_NOT_PRESENT;
+    }
+
+    VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
+    for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
+    {
+        VmaDeviceMemoryBlock* const pBlock = m_Blocks[blockIndex];
+        VMA_ASSERT(pBlock);
+        VkResult res = pBlock->CheckCorruption(m_hAllocator);
+        if(res != VK_SUCCESS)
+        {
+            return res;
+        }
+    }
+    return VK_SUCCESS;
+}
+
+void VmaBlockVector::AddStats(VmaStats* pStats)
+{
+    const uint32_t memTypeIndex = m_MemoryTypeIndex;
+    const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
+
+    VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
+
+    for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
+    {
+        const VmaDeviceMemoryBlock* const pBlock = m_Blocks[blockIndex];
+        VMA_ASSERT(pBlock);
+        VMA_HEAVY_ASSERT(pBlock->Validate());
+        VmaStatInfo allocationStatInfo;
+        pBlock->m_pMetadata->CalcAllocationStatInfo(allocationStatInfo);
+        VmaAddStatInfo(pStats->total, allocationStatInfo);
+        VmaAddStatInfo(pStats->memoryType[memTypeIndex], allocationStatInfo);
+        VmaAddStatInfo(pStats->memoryHeap[memHeapIndex], allocationStatInfo);
+    }
+}
+
+////////////////////////////////////////////////////////////////////////////////
+// VmaDefragmentationAlgorithm_Generic members definition
+
+VmaDefragmentationAlgorithm_Generic::VmaDefragmentationAlgorithm_Generic(
+    VmaAllocator hAllocator,
+    VmaBlockVector* pBlockVector,
+    uint32_t currentFrameIndex,
+    bool overlappingMoveSupported) :
+    VmaDefragmentationAlgorithm(hAllocator, pBlockVector, currentFrameIndex),
+    m_AllocationCount(0),
+    m_AllAllocations(false),
+    m_BytesMoved(0),
+    m_AllocationsMoved(0),
+    m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
+{
+    // Create block info for each block.
+    const size_t blockCount = m_pBlockVector->m_Blocks.size();
+    for(size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
+    {
+        BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
+        pBlockInfo->m_OriginalBlockIndex = blockIndex;
+        pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
+        m_Blocks.push_back(pBlockInfo);
+    }
+
+    // Sort them by m_pBlock pointer value.
+    VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
+}
+
+VmaDefragmentationAlgorithm_Generic::~VmaDefragmentationAlgorithm_Generic()
+{
+    for(size_t i = m_Blocks.size(); i--; )
+    {
+        vma_delete(m_hAllocator, m_Blocks[i]);
+    }
+}
+
+void VmaDefragmentationAlgorithm_Generic::AddAllocation(VmaAllocation hAlloc, VkBool32* pChanged)
+{
+    // Now as we are inside VmaBlockVector::m_Mutex, we can make final check if this allocation was not lost.
+    if(hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
+    {
+        VmaDeviceMemoryBlock* pBlock = hAlloc->GetBlock();
+        BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
+        if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
+        {
+            AllocationInfo allocInfo = AllocationInfo(hAlloc, pChanged);
+            (*it)->m_Allocations.push_back(allocInfo);
+        }
+        else
+        {
+            VMA_ASSERT(0);
+        }
+
+        ++m_AllocationCount;
+    }
+}
+
+VkResult VmaDefragmentationAlgorithm_Generic::DefragmentRound(
+    VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
+    VkDeviceSize maxBytesToMove,
+    uint32_t maxAllocationsToMove)
+{
+    if(m_Blocks.empty())
+    {
+        return VK_SUCCESS;
+    }
+
+    // This is a choice based on research.
+    // Option 1:
+    uint32_t strategy = VMA_ALLOCATION_CREATE_STRATEGY_MIN_TIME_BIT;
+    // Option 2:
+    //uint32_t strategy = VMA_ALLOCATION_CREATE_STRATEGY_MIN_MEMORY_BIT;
+    // Option 3:
+    //uint32_t strategy = VMA_ALLOCATION_CREATE_STRATEGY_MIN_FRAGMENTATION_BIT;
+
+    size_t srcBlockMinIndex = 0;
+    // When FAST_ALGORITHM, move allocations from only last out of blocks that contain non-movable allocations.
+    /*
+    if(m_AlgorithmFlags & VMA_DEFRAGMENTATION_FAST_ALGORITHM_BIT)
+    {
+        const size_t blocksWithNonMovableCount = CalcBlocksWithNonMovableCount();
+        if(blocksWithNonMovableCount > 0)
+        {
+            srcBlockMinIndex = blocksWithNonMovableCount - 1;
+        }
+    }
+    */
+
+    size_t srcBlockIndex = m_Blocks.size() - 1;
+    size_t srcAllocIndex = SIZE_MAX;
+    for(;;)
+    {
+        // 1. Find next allocation to move.
+        // 1.1. Start from last to first m_Blocks - they are sorted from most "destination" to most "source".
+        // 1.2. Then start from last to first m_Allocations.
+        while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
+        {
+            if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
+            {
+                // Finished: no more allocations to process.
+                if(srcBlockIndex == srcBlockMinIndex)
+                {
+                    return VK_SUCCESS;
+                }
+                else
+                {
+                    --srcBlockIndex;
+                    srcAllocIndex = SIZE_MAX;
+                }
+            }
+            else
+            {
+                srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
+            }
+        }
+        
+        BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
+        AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
+
+        const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
+        const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
+        const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
+        const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
+
+        // 2. Try to find new place for this allocation in preceding or current block.
+        for(size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
+        {
+            BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
+            VmaAllocationRequest dstAllocRequest;
+            if(pDstBlockInfo->m_pBlock->m_pMetadata->CreateAllocationRequest(
+                m_CurrentFrameIndex,
+                m_pBlockVector->GetFrameInUseCount(),
+                m_pBlockVector->GetBufferImageGranularity(),
+                size,
+                alignment,
+                false, // upperAddress
+                suballocType,
+                false, // canMakeOtherLost
+                strategy,
+                &dstAllocRequest) &&
+            MoveMakesSense(
+                dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
+            {
+                VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
+
+                // Reached limit on number of allocations or bytes to move.
+                if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
+                    (m_BytesMoved + size > maxBytesToMove))
+                {
+                    return VK_SUCCESS;
+                }
+
+                VmaDefragmentationMove move;
+                move.srcBlockIndex = pSrcBlockInfo->m_OriginalBlockIndex;
+                move.dstBlockIndex = pDstBlockInfo->m_OriginalBlockIndex;
+                move.srcOffset = srcOffset;
+                move.dstOffset = dstAllocRequest.offset;
+                move.size = size;
+                moves.push_back(move);
+
+                pDstBlockInfo->m_pBlock->m_pMetadata->Alloc(
+                    dstAllocRequest,
+                    suballocType,
+                    size,
+                    false, // upperAddress
+                    allocInfo.m_hAllocation);
+                pSrcBlockInfo->m_pBlock->m_pMetadata->FreeAtOffset(srcOffset);
+                
+                allocInfo.m_hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
+
+                if(allocInfo.m_pChanged != VMA_NULL)
+                {
+                    *allocInfo.m_pChanged = VK_TRUE;
+                }
+
+                ++m_AllocationsMoved;
+                m_BytesMoved += size;
+
+                VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
+
+                break;
+            }
+        }
+
+        // If not processed, this allocInfo remains in pBlockInfo->m_Allocations for next round.
+
+        if(srcAllocIndex > 0)
+        {
+            --srcAllocIndex;
+        }
+        else
+        {
+            if(srcBlockIndex > 0)
+            {
+                --srcBlockIndex;
+                srcAllocIndex = SIZE_MAX;
+            }
+            else
+            {
+                return VK_SUCCESS;
+            }
+        }
+    }
+}
+
+size_t VmaDefragmentationAlgorithm_Generic::CalcBlocksWithNonMovableCount() const
+{
+    size_t result = 0;
+    for(size_t i = 0; i < m_Blocks.size(); ++i)
+    {
+        if(m_Blocks[i]->m_HasNonMovableAllocations)
+        {
+            ++result;
+        }
+    }
+    return result;
+}
+
+VkResult VmaDefragmentationAlgorithm_Generic::Defragment(
+    VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
+    VkDeviceSize maxBytesToMove,
+    uint32_t maxAllocationsToMove)
+{
+    if(!m_AllAllocations && m_AllocationCount == 0)
+    {
+        return VK_SUCCESS;
+    }
+
+    const size_t blockCount = m_Blocks.size();
+    for(size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
+    {
+        BlockInfo* pBlockInfo = m_Blocks[blockIndex];
+
+        if(m_AllAllocations)
+        {
+            VmaBlockMetadata_Generic* pMetadata = (VmaBlockMetadata_Generic*)pBlockInfo->m_pBlock->m_pMetadata;
+            for(VmaSuballocationList::const_iterator it = pMetadata->m_Suballocations.begin();
+                it != pMetadata->m_Suballocations.end();
+                ++it)
+            {
+                if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
+                {
+                    AllocationInfo allocInfo = AllocationInfo(it->hAllocation, VMA_NULL);
+                    pBlockInfo->m_Allocations.push_back(allocInfo);
+                }
+            }
+        }
+
+        pBlockInfo->CalcHasNonMovableAllocations();
+        
+        // This is a choice based on research.
+        // Option 1:
+        pBlockInfo->SortAllocationsByOffsetDescending();
+        // Option 2:
+        //pBlockInfo->SortAllocationsBySizeDescending();
+    }
+
+    // Sort m_Blocks this time by the main criterium, from most "destination" to most "source" blocks.
+    VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
+
+    // This is a choice based on research.
+    const uint32_t roundCount = 2;
+
+    // Execute defragmentation rounds (the main part).
+    VkResult result = VK_SUCCESS;
+    for(uint32_t round = 0; (round < roundCount) && (result == VK_SUCCESS); ++round)
+    {
+        result = DefragmentRound(moves, maxBytesToMove, maxAllocationsToMove);
+    }
+
+    return result;
+}
+
+bool VmaDefragmentationAlgorithm_Generic::MoveMakesSense(
+        size_t dstBlockIndex, VkDeviceSize dstOffset,
+        size_t srcBlockIndex, VkDeviceSize srcOffset)
+{
+    if(dstBlockIndex < srcBlockIndex)
+    {
+        return true;
+    }
+    if(dstBlockIndex > srcBlockIndex)
+    {
+        return false;
+    }
+    if(dstOffset < srcOffset)
+    {
+        return true;
+    }
+    return false;
+}
+
+////////////////////////////////////////////////////////////////////////////////
+// VmaDefragmentationAlgorithm_Fast
+
+VmaDefragmentationAlgorithm_Fast::VmaDefragmentationAlgorithm_Fast(
+    VmaAllocator hAllocator,
+    VmaBlockVector* pBlockVector,
+    uint32_t currentFrameIndex,
+    bool overlappingMoveSupported) :
+    VmaDefragmentationAlgorithm(hAllocator, pBlockVector, currentFrameIndex),
+    m_OverlappingMoveSupported(overlappingMoveSupported),
+    m_AllocationCount(0),
+    m_AllAllocations(false),
+    m_BytesMoved(0),
+    m_AllocationsMoved(0),
+    m_BlockInfos(VmaStlAllocator<BlockInfo>(hAllocator->GetAllocationCallbacks()))
+{
+    VMA_ASSERT(VMA_DEBUG_MARGIN == 0);
+
+}
+
+VmaDefragmentationAlgorithm_Fast::~VmaDefragmentationAlgorithm_Fast()
+{
+}
+
+VkResult VmaDefragmentationAlgorithm_Fast::Defragment(
+    VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
+    VkDeviceSize maxBytesToMove,
+    uint32_t maxAllocationsToMove)
+{
+    VMA_ASSERT(m_AllAllocations || m_pBlockVector->CalcAllocationCount() == m_AllocationCount);
+
+    const size_t blockCount = m_pBlockVector->GetBlockCount();
+    if(blockCount == 0 || maxBytesToMove == 0 || maxAllocationsToMove == 0)
+    {
+        return VK_SUCCESS;
+    }
+
+    PreprocessMetadata();
+
+    // Sort blocks in order from most destination.
+
+    m_BlockInfos.resize(blockCount);
+    for(size_t i = 0; i < blockCount; ++i)
+    {
+        m_BlockInfos[i].origBlockIndex = i;
+    }
+
+    VMA_SORT(m_BlockInfos.begin(), m_BlockInfos.end(), [this](const BlockInfo& lhs, const BlockInfo& rhs) -> bool {
+        return m_pBlockVector->GetBlock(lhs.origBlockIndex)->m_pMetadata->GetSumFreeSize() <
+            m_pBlockVector->GetBlock(rhs.origBlockIndex)->m_pMetadata->GetSumFreeSize();
+    });
+
+    // THE MAIN ALGORITHM
+
+    FreeSpaceDatabase freeSpaceDb;
+
+    size_t dstBlockInfoIndex = 0;
+    size_t dstOrigBlockIndex = m_BlockInfos[dstBlockInfoIndex].origBlockIndex;
+    VmaDeviceMemoryBlock* pDstBlock = m_pBlockVector->GetBlock(dstOrigBlockIndex);
+    VmaBlockMetadata_Generic* pDstMetadata = (VmaBlockMetadata_Generic*)pDstBlock->m_pMetadata;
+    VkDeviceSize dstBlockSize = pDstMetadata->GetSize();
+    VkDeviceSize dstOffset = 0;
+
+    bool end = false;
+    for(size_t srcBlockInfoIndex = 0; !end && srcBlockInfoIndex < blockCount; ++srcBlockInfoIndex)
+    {
+        const size_t srcOrigBlockIndex = m_BlockInfos[srcBlockInfoIndex].origBlockIndex;
+        VmaDeviceMemoryBlock* const pSrcBlock = m_pBlockVector->GetBlock(srcOrigBlockIndex);
+        VmaBlockMetadata_Generic* const pSrcMetadata = (VmaBlockMetadata_Generic*)pSrcBlock->m_pMetadata;
+        for(VmaSuballocationList::iterator srcSuballocIt = pSrcMetadata->m_Suballocations.begin();
+            !end && srcSuballocIt != pSrcMetadata->m_Suballocations.end(); )
+        {
+            VmaAllocation_T* const pAlloc = srcSuballocIt->hAllocation;
+            const VkDeviceSize srcAllocAlignment = pAlloc->GetAlignment();
+            const VkDeviceSize srcAllocSize = srcSuballocIt->size;
+            if(m_AllocationsMoved == maxAllocationsToMove ||
+                m_BytesMoved + srcAllocSize > maxBytesToMove)
+            {
+                end = true;
+                break;
+            }
+            const VkDeviceSize srcAllocOffset = srcSuballocIt->offset;
+
+            // Try to place it in one of free spaces from the database.
+            size_t freeSpaceInfoIndex;
+            VkDeviceSize dstAllocOffset;
+            if(freeSpaceDb.Fetch(srcAllocAlignment, srcAllocSize,
+                freeSpaceInfoIndex, dstAllocOffset))
+            {
+                size_t freeSpaceOrigBlockIndex = m_BlockInfos[freeSpaceInfoIndex].origBlockIndex;
+                VmaDeviceMemoryBlock* pFreeSpaceBlock = m_pBlockVector->GetBlock(freeSpaceOrigBlockIndex);
+                VmaBlockMetadata_Generic* pFreeSpaceMetadata = (VmaBlockMetadata_Generic*)pFreeSpaceBlock->m_pMetadata;
+                VkDeviceSize freeSpaceBlockSize = pFreeSpaceMetadata->GetSize();
+
+                // Same block
+                if(freeSpaceInfoIndex == srcBlockInfoIndex)
+                {
+                    VMA_ASSERT(dstAllocOffset <= srcAllocOffset);
+
+                    // MOVE OPTION 1: Move the allocation inside the same block by decreasing offset.
+
+                    VmaSuballocation suballoc = *srcSuballocIt;
+                    suballoc.offset = dstAllocOffset;
+                    suballoc.hAllocation->ChangeOffset(dstAllocOffset);
+                    m_BytesMoved += srcAllocSize;
+                    ++m_AllocationsMoved;
+                    
+                    VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
+                    ++nextSuballocIt;
+                    pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
+                    srcSuballocIt = nextSuballocIt;
+
+                    InsertSuballoc(pFreeSpaceMetadata, suballoc);
+
+                    VmaDefragmentationMove move = {
+                        srcOrigBlockIndex, freeSpaceOrigBlockIndex,
+                        srcAllocOffset, dstAllocOffset,
+                        srcAllocSize };
+                    moves.push_back(move);
+                }
+                // Different block
+                else
+                {
+                    // MOVE OPTION 2: Move the allocation to a different block.
+
+                    VMA_ASSERT(freeSpaceInfoIndex < srcBlockInfoIndex);
+
+                    VmaSuballocation suballoc = *srcSuballocIt;
+                    suballoc.offset = dstAllocOffset;
+                    suballoc.hAllocation->ChangeBlockAllocation(m_hAllocator, pFreeSpaceBlock, dstAllocOffset);
+                    m_BytesMoved += srcAllocSize;
+                    ++m_AllocationsMoved;
+
+                    VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
+                    ++nextSuballocIt;
+                    pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
+                    srcSuballocIt = nextSuballocIt;
+
+                    InsertSuballoc(pFreeSpaceMetadata, suballoc);
+
+                    VmaDefragmentationMove move = {
+                        srcOrigBlockIndex, freeSpaceOrigBlockIndex,
+                        srcAllocOffset, dstAllocOffset,
+                        srcAllocSize };
+                    moves.push_back(move);
+                }
+            }
+            else
+            {
+                dstAllocOffset = VmaAlignUp(dstOffset, srcAllocAlignment);
+
+                // If the allocation doesn't fit before the end of dstBlock, forward to next block.
+                while(dstBlockInfoIndex < srcBlockInfoIndex &&
+                    dstAllocOffset + srcAllocSize > dstBlockSize)
+                {
+                    // But before that, register remaining free space at the end of dst block.
+                    freeSpaceDb.Register(dstBlockInfoIndex, dstOffset, dstBlockSize - dstOffset);
+
+                    ++dstBlockInfoIndex;
+                    dstOrigBlockIndex = m_BlockInfos[dstBlockInfoIndex].origBlockIndex;
+                    pDstBlock = m_pBlockVector->GetBlock(dstOrigBlockIndex);
+                    pDstMetadata = (VmaBlockMetadata_Generic*)pDstBlock->m_pMetadata;
+                    dstBlockSize = pDstMetadata->GetSize();
+                    dstOffset = 0;
+                    dstAllocOffset = 0;
+                }
+
+                // Same block
+                if(dstBlockInfoIndex == srcBlockInfoIndex)
+                {
+                    VMA_ASSERT(dstAllocOffset <= srcAllocOffset);
+
+                    const bool overlap = dstAllocOffset + srcAllocSize > srcAllocOffset;
+
+                    bool skipOver = overlap;
+                    if(overlap && m_OverlappingMoveSupported && dstAllocOffset < srcAllocOffset)
+                    {
+                        // If destination and source place overlap, skip if it would move it
+                        // by only < 1/64 of its size.
+                        skipOver = (srcAllocOffset - dstAllocOffset) * 64 < srcAllocSize;
+                    }
+
+                    if(skipOver)
+                    {
+                        freeSpaceDb.Register(dstBlockInfoIndex, dstOffset, srcAllocOffset - dstOffset);
+
+                        dstOffset = srcAllocOffset + srcAllocSize;
+                        ++srcSuballocIt;
+                    }
+                    // MOVE OPTION 1: Move the allocation inside the same block by decreasing offset.
+                    else
+                    {
+                        srcSuballocIt->offset = dstAllocOffset;
+                        srcSuballocIt->hAllocation->ChangeOffset(dstAllocOffset);
+                        dstOffset = dstAllocOffset + srcAllocSize;
+                        m_BytesMoved += srcAllocSize;
+                        ++m_AllocationsMoved;
+                        ++srcSuballocIt;
+                        VmaDefragmentationMove move = {
+                            srcOrigBlockIndex, dstOrigBlockIndex,
+                            srcAllocOffset, dstAllocOffset,
+                            srcAllocSize };
+                        moves.push_back(move);
+                    }
+                }
+                // Different block
+                else
+                {
+                    // MOVE OPTION 2: Move the allocation to a different block.
+
+                    VMA_ASSERT(dstBlockInfoIndex < srcBlockInfoIndex);
+                    VMA_ASSERT(dstAllocOffset + srcAllocSize <= dstBlockSize);
+
+                    VmaSuballocation suballoc = *srcSuballocIt;
+                    suballoc.offset = dstAllocOffset;
+                    suballoc.hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlock, dstAllocOffset);
+                    dstOffset = dstAllocOffset + srcAllocSize;
+                    m_BytesMoved += srcAllocSize;
+                    ++m_AllocationsMoved;
+
+                    VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
+                    ++nextSuballocIt;
+                    pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
+                    srcSuballocIt = nextSuballocIt;
+
+                    pDstMetadata->m_Suballocations.push_back(suballoc);
+
+                    VmaDefragmentationMove move = {
+                        srcOrigBlockIndex, dstOrigBlockIndex,
+                        srcAllocOffset, dstAllocOffset,
+                        srcAllocSize };
+                    moves.push_back(move);
+                }
+            }
+        }
+    }
+
+    m_BlockInfos.clear();
+    
+    PostprocessMetadata();
+
+    return VK_SUCCESS;
+}
+
+void VmaDefragmentationAlgorithm_Fast::PreprocessMetadata()
+{
+    const size_t blockCount = m_pBlockVector->GetBlockCount();
+    for(size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
+    {
+        VmaBlockMetadata_Generic* const pMetadata =
+            (VmaBlockMetadata_Generic*)m_pBlockVector->GetBlock(blockIndex)->m_pMetadata;
+        pMetadata->m_FreeCount = 0;
+        pMetadata->m_SumFreeSize = pMetadata->GetSize();
+        pMetadata->m_FreeSuballocationsBySize.clear();
+        for(VmaSuballocationList::iterator it = pMetadata->m_Suballocations.begin();
+            it != pMetadata->m_Suballocations.end(); )
+        {
+            if(it->type == VMA_SUBALLOCATION_TYPE_FREE)
+            {
+                VmaSuballocationList::iterator nextIt = it;
+                ++nextIt;
+                pMetadata->m_Suballocations.erase(it);
+                it = nextIt;
+            }
+            else
+            {
+                ++it;
+            }
+        }
+    }
+}
+
+void VmaDefragmentationAlgorithm_Fast::PostprocessMetadata()
+{
+    const size_t blockCount = m_pBlockVector->GetBlockCount();
+    for(size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
+    {
+        VmaBlockMetadata_Generic* const pMetadata =
+            (VmaBlockMetadata_Generic*)m_pBlockVector->GetBlock(blockIndex)->m_pMetadata;
+        const VkDeviceSize blockSize = pMetadata->GetSize();
+        
+        // No allocations in this block - entire area is free.
+        if(pMetadata->m_Suballocations.empty())
+        {
+            pMetadata->m_FreeCount = 1;
+            //pMetadata->m_SumFreeSize is already set to blockSize.
+            VmaSuballocation suballoc = {
+                0, // offset
+                blockSize, // size
+                VMA_NULL, // hAllocation
+                VMA_SUBALLOCATION_TYPE_FREE };
+            pMetadata->m_Suballocations.push_back(suballoc);
+            pMetadata->RegisterFreeSuballocation(pMetadata->m_Suballocations.begin());
+        }
+        // There are some allocations in this block.
+        else
+        {
+            VkDeviceSize offset = 0;
+            VmaSuballocationList::iterator it;
+            for(it = pMetadata->m_Suballocations.begin();
+                it != pMetadata->m_Suballocations.end();
+                ++it)
+            {
+                VMA_ASSERT(it->type != VMA_SUBALLOCATION_TYPE_FREE);
+                VMA_ASSERT(it->offset >= offset);
+
+                // Need to insert preceding free space.
+                if(it->offset > offset)
+                {
+                    ++pMetadata->m_FreeCount;
+                    const VkDeviceSize freeSize = it->offset - offset;
+                    VmaSuballocation suballoc = {
+                        offset, // offset
+                        freeSize, // size
+                        VMA_NULL, // hAllocation
+                        VMA_SUBALLOCATION_TYPE_FREE };
+                    VmaSuballocationList::iterator precedingFreeIt = pMetadata->m_Suballocations.insert(it, suballoc);
+                    if(freeSize >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
+                    {
+                        pMetadata->m_FreeSuballocationsBySize.push_back(precedingFreeIt);
+                    }
+                }
+
+                pMetadata->m_SumFreeSize -= it->size;
+                offset = it->offset + it->size;
+            }
+
+            // Need to insert trailing free space.
+            if(offset < blockSize)
+            {
+                ++pMetadata->m_FreeCount;
+                const VkDeviceSize freeSize = blockSize - offset;
+                VmaSuballocation suballoc = {
+                    offset, // offset
+                    freeSize, // size
+                    VMA_NULL, // hAllocation
+                    VMA_SUBALLOCATION_TYPE_FREE };
+                VMA_ASSERT(it == pMetadata->m_Suballocations.end());
+                VmaSuballocationList::iterator trailingFreeIt = pMetadata->m_Suballocations.insert(it, suballoc);
+                if(freeSize > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
+                {
+                    pMetadata->m_FreeSuballocationsBySize.push_back(trailingFreeIt);
+                }
+            }
+
+            VMA_SORT(
+                pMetadata->m_FreeSuballocationsBySize.begin(),
+                pMetadata->m_FreeSuballocationsBySize.end(),
+                VmaSuballocationItemSizeLess());
+        }
+
+        VMA_HEAVY_ASSERT(pMetadata->Validate());
+    }
+}
+
+void VmaDefragmentationAlgorithm_Fast::InsertSuballoc(VmaBlockMetadata_Generic* pMetadata, const VmaSuballocation& suballoc)
+{
+    // TODO: Optimize somehow. Remember iterator instead of searching for it linearly.
+    VmaSuballocationList::iterator it = pMetadata->m_Suballocations.begin();
+    while(it != pMetadata->m_Suballocations.end())
+    {
+        if(it->offset < suballoc.offset)
+        {
+            ++it;
+        }
+    }
+    pMetadata->m_Suballocations.insert(it, suballoc);
+}
+
+////////////////////////////////////////////////////////////////////////////////
+// VmaBlockVectorDefragmentationContext
+
+VmaBlockVectorDefragmentationContext::VmaBlockVectorDefragmentationContext(
+    VmaAllocator hAllocator,
+    VmaPool hCustomPool,
+    VmaBlockVector* pBlockVector,
+    uint32_t currFrameIndex,
+    uint32_t algorithmFlags) :
+    res(VK_SUCCESS),
+    mutexLocked(false),
+    blockContexts(VmaStlAllocator<VmaBlockDefragmentationContext>(hAllocator->GetAllocationCallbacks())),
+    m_hAllocator(hAllocator),
+    m_hCustomPool(hCustomPool),
+    m_pBlockVector(pBlockVector),
+    m_CurrFrameIndex(currFrameIndex),
+    //m_AlgorithmFlags(algorithmFlags),
+    m_pAlgorithm(VMA_NULL),
+    m_Allocations(VmaStlAllocator<AllocInfo>(hAllocator->GetAllocationCallbacks())),
+    m_AllAllocations(false)
+{
+}
+
+VmaBlockVectorDefragmentationContext::~VmaBlockVectorDefragmentationContext()
+{
+    vma_delete(m_hAllocator, m_pAlgorithm);
+}
+
+void VmaBlockVectorDefragmentationContext::AddAllocation(VmaAllocation hAlloc, VkBool32* pChanged)
+{
+    AllocInfo info = { hAlloc, pChanged };
+    m_Allocations.push_back(info);
+}
+
+void VmaBlockVectorDefragmentationContext::Begin(bool overlappingMoveSupported)
+{
+    const bool allAllocations = m_AllAllocations ||
+        m_Allocations.size() == m_pBlockVector->CalcAllocationCount();
+
+    /********************************
+    HERE IS THE CHOICE OF DEFRAGMENTATION ALGORITHM.
+    ********************************/
+
+    /*
+    Fast algorithm is supported only when certain criteria are met:
+    - VMA_DEBUG_MARGIN is 0.
+    - All allocations in this block vector are moveable.
+    - There is no possibility of image/buffer granularity conflict.
+    */
+    if(VMA_DEBUG_MARGIN == 0 &&
+        allAllocations &&
+        !m_pBlockVector->IsBufferImageGranularityConflictPossible())
+    {
+        m_pAlgorithm = vma_new(m_hAllocator, VmaDefragmentationAlgorithm_Fast)(
+            m_hAllocator, m_pBlockVector, m_CurrFrameIndex, overlappingMoveSupported);
+    }
+    else
+    {
+        m_pAlgorithm = vma_new(m_hAllocator, VmaDefragmentationAlgorithm_Generic)(
+            m_hAllocator, m_pBlockVector, m_CurrFrameIndex, overlappingMoveSupported);
+    }
+
+    if(allAllocations)
+    {
+        m_pAlgorithm->AddAll();
+    }
+    else
+    {
+        for(size_t i = 0, count = m_Allocations.size(); i < count; ++i)
+        {
+            m_pAlgorithm->AddAllocation(m_Allocations[i].hAlloc, m_Allocations[i].pChanged);
+        }
+    }
+}
+
+////////////////////////////////////////////////////////////////////////////////
+// VmaDefragmentationContext
+
+VmaDefragmentationContext_T::VmaDefragmentationContext_T(
+    VmaAllocator hAllocator,
+    uint32_t currFrameIndex,
+    uint32_t flags,
+    VmaDefragmentationStats* pStats) :
+    m_hAllocator(hAllocator),
+    m_CurrFrameIndex(currFrameIndex),
+    m_Flags(flags),
+    m_pStats(pStats),
+    m_CustomPoolContexts(VmaStlAllocator<VmaBlockVectorDefragmentationContext*>(hAllocator->GetAllocationCallbacks()))
+{
+    memset(m_DefaultPoolContexts, 0, sizeof(m_DefaultPoolContexts));
+}
+
+VmaDefragmentationContext_T::~VmaDefragmentationContext_T()
+{
+    for(size_t i = m_CustomPoolContexts.size(); i--; )
+    {
+        VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_CustomPoolContexts[i];
+        pBlockVectorCtx->GetBlockVector()->DefragmentationEnd(pBlockVectorCtx, m_pStats);
+        vma_delete(m_hAllocator, pBlockVectorCtx);
+    }
+    for(size_t i = m_hAllocator->m_MemProps.memoryTypeCount; i--; )
+    {
+        VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_DefaultPoolContexts[i];
+        if(pBlockVectorCtx)
+        {
+            pBlockVectorCtx->GetBlockVector()->DefragmentationEnd(pBlockVectorCtx, m_pStats);
+            vma_delete(m_hAllocator, pBlockVectorCtx);
+        }
+    }
+}
+
+void VmaDefragmentationContext_T::AddPools(uint32_t poolCount, VmaPool* pPools)
+{
+    for(uint32_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
+    {
+        VmaPool pool = pPools[poolIndex];
+        VMA_ASSERT(pool);
+        // Pools with algorithm other than default are not defragmented.
+        if(pool->m_BlockVector.GetAlgorithm() == 0)
+        {
+            VmaBlockVectorDefragmentationContext* pBlockVectorDefragCtx = VMA_NULL;
+            
+            for(size_t i = m_CustomPoolContexts.size(); i--; )
+            {
+                if(m_CustomPoolContexts[i]->GetCustomPool() == pool)
+                {
+                    pBlockVectorDefragCtx = m_CustomPoolContexts[i];
+                    break;
+                }
+            }
+            
+            if(!pBlockVectorDefragCtx)
+            {
+                pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
+                    m_hAllocator,
+                    pool,
+                    &pool->m_BlockVector,
+                    m_CurrFrameIndex,
+                    m_Flags);
+                m_CustomPoolContexts.push_back(pBlockVectorDefragCtx);
+            }
+
+            pBlockVectorDefragCtx->AddAll();
+        }
+    }
+}
+
+void VmaDefragmentationContext_T::AddAllocations(
+    uint32_t allocationCount,
+    VmaAllocation* pAllocations,
+    VkBool32* pAllocationsChanged)
+{
+    // Dispatch pAllocations among defragmentators. Create them when necessary.
+    for(uint32_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
+    {
+        const VmaAllocation hAlloc = pAllocations[allocIndex];
+        VMA_ASSERT(hAlloc);
+        // DedicatedAlloc cannot be defragmented.
+        if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
+            // Lost allocation cannot be defragmented.
+            (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
+        {
+            VmaBlockVectorDefragmentationContext* pBlockVectorDefragCtx = VMA_NULL;
+
+            const VmaPool hAllocPool = hAlloc->GetPool();
+            // This allocation belongs to custom pool.
+            if(hAllocPool != VK_NULL_HANDLE)
+            {
+                // Pools with algorithm other than default are not defragmented.
+                if(hAllocPool->m_BlockVector.GetAlgorithm() == 0)
+                {
+                    for(size_t i = m_CustomPoolContexts.size(); i--; )
+                    {
+                        if(m_CustomPoolContexts[i]->GetCustomPool() == hAllocPool)
+                        {
+                            pBlockVectorDefragCtx = m_CustomPoolContexts[i];
+                            break;
+                        }
+                    }
+                    if(!pBlockVectorDefragCtx)
+                    {
+                        pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
+                            m_hAllocator,
+                            hAllocPool,
+                            &hAllocPool->m_BlockVector,
+                            m_CurrFrameIndex,
+                            m_Flags);
+                        m_CustomPoolContexts.push_back(pBlockVectorDefragCtx);
+                    }
+                }
+            }
+            // This allocation belongs to default pool.
+            else
+            {
+                const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
+                pBlockVectorDefragCtx = m_DefaultPoolContexts[memTypeIndex];
+                if(!pBlockVectorDefragCtx)
+                {
+                    pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
+                        m_hAllocator,
+                        VMA_NULL, // hCustomPool
+                        m_hAllocator->m_pBlockVectors[memTypeIndex],
+                        m_CurrFrameIndex,
+                        m_Flags);
+                    m_DefaultPoolContexts[memTypeIndex] = pBlockVectorDefragCtx;
+                }
+            }
+
+            if(pBlockVectorDefragCtx)
+            {
+                VkBool32* const pChanged = (pAllocationsChanged != VMA_NULL) ?
+                    &pAllocationsChanged[allocIndex] : VMA_NULL;
+                pBlockVectorDefragCtx->AddAllocation(hAlloc, pChanged);
+            }
+        }
+    }
+}
+
+VkResult VmaDefragmentationContext_T::Defragment(
+    VkDeviceSize maxCpuBytesToMove, uint32_t maxCpuAllocationsToMove,
+    VkDeviceSize maxGpuBytesToMove, uint32_t maxGpuAllocationsToMove,
+    VkCommandBuffer commandBuffer, VmaDefragmentationStats* pStats)
+{
+    if(pStats)
+    {
+        memset(pStats, 0, sizeof(VmaDefragmentationStats));
+    }
+
+    if(commandBuffer == VK_NULL_HANDLE)
+    {
+        maxGpuBytesToMove = 0;
+        maxGpuAllocationsToMove = 0;
+    }
+
+    VkResult res = VK_SUCCESS;
+
+    // Process default pools.
+    for(uint32_t memTypeIndex = 0;
+        memTypeIndex < m_hAllocator->GetMemoryTypeCount() && res >= VK_SUCCESS;
+        ++memTypeIndex)
+    {
+        VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_DefaultPoolContexts[memTypeIndex];
+        if(pBlockVectorCtx)
+        {
+            VMA_ASSERT(pBlockVectorCtx->GetBlockVector());
+            pBlockVectorCtx->GetBlockVector()->Defragment(
+                pBlockVectorCtx,
+                pStats,
+                maxCpuBytesToMove, maxCpuAllocationsToMove,
+                maxGpuBytesToMove, maxGpuAllocationsToMove,
+                commandBuffer);
+            if(pBlockVectorCtx->res != VK_SUCCESS)
+            {
+                res = pBlockVectorCtx->res;
+            }
+        }
+    }
+
+    // Process custom pools.
+    for(size_t customCtxIndex = 0, customCtxCount = m_CustomPoolContexts.size();
+        customCtxIndex < customCtxCount && res >= VK_SUCCESS;
+        ++customCtxIndex)
+    {
+        VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_CustomPoolContexts[customCtxIndex];
+        VMA_ASSERT(pBlockVectorCtx && pBlockVectorCtx->GetBlockVector());
+        pBlockVectorCtx->GetBlockVector()->Defragment(
+            pBlockVectorCtx,
+            pStats,
+            maxCpuBytesToMove, maxCpuAllocationsToMove,
+            maxGpuBytesToMove, maxGpuAllocationsToMove,
+            commandBuffer);
+        if(pBlockVectorCtx->res != VK_SUCCESS)
+        {
+            res = pBlockVectorCtx->res;
+        }
+    }
+
+    return res;
+}
+
+////////////////////////////////////////////////////////////////////////////////
+// VmaRecorder
+
+#if VMA_RECORDING_ENABLED
+
+VmaRecorder::VmaRecorder() :
+    m_UseMutex(true),
+    m_Flags(0),
+    m_File(VMA_NULL),
+    m_Freq(INT64_MAX),
+    m_StartCounter(INT64_MAX)
+{
+}
+
+VkResult VmaRecorder::Init(const VmaRecordSettings& settings, bool useMutex)
+{
+    m_UseMutex = useMutex;
+    m_Flags = settings.flags;
+
+    QueryPerformanceFrequency((LARGE_INTEGER*)&m_Freq);
+    QueryPerformanceCounter((LARGE_INTEGER*)&m_StartCounter);
+
+    // Open file for writing.
+    errno_t err = fopen_s(&m_File, settings.pFilePath, "wb");
+    if(err != 0)
+    {
+        return VK_ERROR_INITIALIZATION_FAILED;
+    }
+
+    // Write header.
+    fprintf(m_File, "%s\n", "Vulkan Memory Allocator,Calls recording");
+    fprintf(m_File, "%s\n", "1,5");
+
+    return VK_SUCCESS;
+}
+
+VmaRecorder::~VmaRecorder()
+{
+    if(m_File != VMA_NULL)
+    {
+        fclose(m_File);
+    }
+}
+
+void VmaRecorder::RecordCreateAllocator(uint32_t frameIndex)
+{
+    CallParams callParams;
+    GetBasicParams(callParams);
+
+    VmaMutexLock lock(m_FileMutex, m_UseMutex);
+    fprintf(m_File, "%u,%.3f,%u,vmaCreateAllocator\n", callParams.threadId, callParams.time, frameIndex);
+    Flush();
+}
+
+void VmaRecorder::RecordDestroyAllocator(uint32_t frameIndex)
+{
+    CallParams callParams;
+    GetBasicParams(callParams);
+
+    VmaMutexLock lock(m_FileMutex, m_UseMutex);
+    fprintf(m_File, "%u,%.3f,%u,vmaDestroyAllocator\n", callParams.threadId, callParams.time, frameIndex);
+    Flush();
+}
+
+void VmaRecorder::RecordCreatePool(uint32_t frameIndex, const VmaPoolCreateInfo& createInfo, VmaPool pool)
+{
+    CallParams callParams;
+    GetBasicParams(callParams);
+
+    VmaMutexLock lock(m_FileMutex, m_UseMutex);
+    fprintf(m_File, "%u,%.3f,%u,vmaCreatePool,%u,%u,%llu,%llu,%llu,%u,%p\n", callParams.threadId, callParams.time, frameIndex,
+        createInfo.memoryTypeIndex,
+        createInfo.flags,
+        createInfo.blockSize,
+        (uint64_t)createInfo.minBlockCount,
+        (uint64_t)createInfo.maxBlockCount,
+        createInfo.frameInUseCount,
+        pool);
+    Flush();
+}
+
+void VmaRecorder::RecordDestroyPool(uint32_t frameIndex, VmaPool pool)
+{
+    CallParams callParams;
+    GetBasicParams(callParams);
+
+    VmaMutexLock lock(m_FileMutex, m_UseMutex);
+    fprintf(m_File, "%u,%.3f,%u,vmaDestroyPool,%p\n", callParams.threadId, callParams.time, frameIndex,
+        pool);
+    Flush();
+}
+
+void VmaRecorder::RecordAllocateMemory(uint32_t frameIndex,
+        const VkMemoryRequirements& vkMemReq,
+        const VmaAllocationCreateInfo& createInfo,
+        VmaAllocation allocation)
+{
+    CallParams callParams;
+    GetBasicParams(callParams);
+
+    VmaMutexLock lock(m_FileMutex, m_UseMutex);
+    UserDataString userDataStr(createInfo.flags, createInfo.pUserData);
+    fprintf(m_File, "%u,%.3f,%u,vmaAllocateMemory,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
+        vkMemReq.size,
+        vkMemReq.alignment,
+        vkMemReq.memoryTypeBits,
+        createInfo.flags,
+        createInfo.usage,
+        createInfo.requiredFlags,
+        createInfo.preferredFlags,
+        createInfo.memoryTypeBits,
+        createInfo.pool,
+        allocation,
+        userDataStr.GetString());
+    Flush();
+}
+
+void VmaRecorder::RecordAllocateMemoryPages(uint32_t frameIndex,
+    const VkMemoryRequirements& vkMemReq,
+    const VmaAllocationCreateInfo& createInfo,
+    uint64_t allocationCount,
+    const VmaAllocation* pAllocations)
+{
+    CallParams callParams;
+    GetBasicParams(callParams);
+
+    VmaMutexLock lock(m_FileMutex, m_UseMutex);
+    UserDataString userDataStr(createInfo.flags, createInfo.pUserData);
+    fprintf(m_File, "%u,%.3f,%u,vmaAllocateMemoryPages,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,", callParams.threadId, callParams.time, frameIndex,
+        vkMemReq.size,
+        vkMemReq.alignment,
+        vkMemReq.memoryTypeBits,
+        createInfo.flags,
+        createInfo.usage,
+        createInfo.requiredFlags,
+        createInfo.preferredFlags,
+        createInfo.memoryTypeBits,
+        createInfo.pool);
+    PrintPointerList(allocationCount, pAllocations);
+    fprintf(m_File, ",%s\n", userDataStr.GetString());
+    Flush();
+}
+
+void VmaRecorder::RecordAllocateMemoryForBuffer(uint32_t frameIndex,
+    const VkMemoryRequirements& vkMemReq,
+    bool requiresDedicatedAllocation,
+    bool prefersDedicatedAllocation,
+    const VmaAllocationCreateInfo& createInfo,
+    VmaAllocation allocation)
+{
+    CallParams callParams;
+    GetBasicParams(callParams);
+
+    VmaMutexLock lock(m_FileMutex, m_UseMutex);
+    UserDataString userDataStr(createInfo.flags, createInfo.pUserData);
+    fprintf(m_File, "%u,%.3f,%u,vmaAllocateMemoryForBuffer,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
+        vkMemReq.size,
+        vkMemReq.alignment,
+        vkMemReq.memoryTypeBits,
+        requiresDedicatedAllocation ? 1 : 0,
+        prefersDedicatedAllocation ? 1 : 0,
+        createInfo.flags,
+        createInfo.usage,
+        createInfo.requiredFlags,
+        createInfo.preferredFlags,
+        createInfo.memoryTypeBits,
+        createInfo.pool,
+        allocation,
+        userDataStr.GetString());
+    Flush();
+}
+
+void VmaRecorder::RecordAllocateMemoryForImage(uint32_t frameIndex,
+    const VkMemoryRequirements& vkMemReq,
+    bool requiresDedicatedAllocation,
+    bool prefersDedicatedAllocation,
+    const VmaAllocationCreateInfo& createInfo,
+    VmaAllocation allocation)
+{
+    CallParams callParams;
+    GetBasicParams(callParams);
+
+    VmaMutexLock lock(m_FileMutex, m_UseMutex);
+    UserDataString userDataStr(createInfo.flags, createInfo.pUserData);
+    fprintf(m_File, "%u,%.3f,%u,vmaAllocateMemoryForImage,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
+        vkMemReq.size,
+        vkMemReq.alignment,
+        vkMemReq.memoryTypeBits,
+        requiresDedicatedAllocation ? 1 : 0,
+        prefersDedicatedAllocation ? 1 : 0,
+        createInfo.flags,
+        createInfo.usage,
+        createInfo.requiredFlags,
+        createInfo.preferredFlags,
+        createInfo.memoryTypeBits,
+        createInfo.pool,
+        allocation,
+        userDataStr.GetString());
+    Flush();
+}
+
+void VmaRecorder::RecordFreeMemory(uint32_t frameIndex,
+    VmaAllocation allocation)
+{
+    CallParams callParams;
+    GetBasicParams(callParams);
+
+    VmaMutexLock lock(m_FileMutex, m_UseMutex);
+    fprintf(m_File, "%u,%.3f,%u,vmaFreeMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
+        allocation);
+    Flush();
+}
+
+void VmaRecorder::RecordFreeMemoryPages(uint32_t frameIndex,
+    uint64_t allocationCount,
+    const VmaAllocation* pAllocations)
+{
+    CallParams callParams;
+    GetBasicParams(callParams);
+
+    VmaMutexLock lock(m_FileMutex, m_UseMutex);
+    fprintf(m_File, "%u,%.3f,%u,vmaFreeMemoryPages,", callParams.threadId, callParams.time, frameIndex);
+    PrintPointerList(allocationCount, pAllocations);
+    fprintf(m_File, "\n");
+    Flush();
+}
+
+void VmaRecorder::RecordResizeAllocation(
+    uint32_t frameIndex,
+    VmaAllocation allocation,
+    VkDeviceSize newSize)
+{
+    CallParams callParams;
+    GetBasicParams(callParams);
+
+    VmaMutexLock lock(m_FileMutex, m_UseMutex);
+    fprintf(m_File, "%u,%.3f,%u,vmaResizeAllocation,%p,%llu\n", callParams.threadId, callParams.time, frameIndex,
+        allocation, newSize);
+    Flush();
+}
+
+void VmaRecorder::RecordSetAllocationUserData(uint32_t frameIndex,
+    VmaAllocation allocation,
+    const void* pUserData)
+{
+    CallParams callParams;
+    GetBasicParams(callParams);
+
+    VmaMutexLock lock(m_FileMutex, m_UseMutex);
+    UserDataString userDataStr(
+        allocation->IsUserDataString() ? VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT : 0,
+        pUserData);
+    fprintf(m_File, "%u,%.3f,%u,vmaSetAllocationUserData,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
+        allocation,
+        userDataStr.GetString());
+    Flush();
+}
+
+void VmaRecorder::RecordCreateLostAllocation(uint32_t frameIndex,
+    VmaAllocation allocation)
+{
+    CallParams callParams;
+    GetBasicParams(callParams);
+
+    VmaMutexLock lock(m_FileMutex, m_UseMutex);
+    fprintf(m_File, "%u,%.3f,%u,vmaCreateLostAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
+        allocation);
+    Flush();
+}
+
+void VmaRecorder::RecordMapMemory(uint32_t frameIndex,
+    VmaAllocation allocation)
+{
+    CallParams callParams;
+    GetBasicParams(callParams);
+
+    VmaMutexLock lock(m_FileMutex, m_UseMutex);
+    fprintf(m_File, "%u,%.3f,%u,vmaMapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
+        allocation);
+    Flush();
+}
+
+void VmaRecorder::RecordUnmapMemory(uint32_t frameIndex,
+    VmaAllocation allocation)
+{
+    CallParams callParams;
+    GetBasicParams(callParams);
+
+    VmaMutexLock lock(m_FileMutex, m_UseMutex);
+    fprintf(m_File, "%u,%.3f,%u,vmaUnmapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
+        allocation);
+    Flush();
+}
+
+void VmaRecorder::RecordFlushAllocation(uint32_t frameIndex,
+    VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
+{
+    CallParams callParams;
+    GetBasicParams(callParams);
+
+    VmaMutexLock lock(m_FileMutex, m_UseMutex);
+    fprintf(m_File, "%u,%.3f,%u,vmaFlushAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
+        allocation,
+        offset,
+        size);
+    Flush();
+}
+
+void VmaRecorder::RecordInvalidateAllocation(uint32_t frameIndex,
+    VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
+{
+    CallParams callParams;
+    GetBasicParams(callParams);
+
+    VmaMutexLock lock(m_FileMutex, m_UseMutex);
+    fprintf(m_File, "%u,%.3f,%u,vmaInvalidateAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
+        allocation,
+        offset,
+        size);
+    Flush();
+}
+
+void VmaRecorder::RecordCreateBuffer(uint32_t frameIndex,
+    const VkBufferCreateInfo& bufCreateInfo,
+    const VmaAllocationCreateInfo& allocCreateInfo,
+    VmaAllocation allocation)
+{
+    CallParams callParams;
+    GetBasicParams(callParams);
+
+    VmaMutexLock lock(m_FileMutex, m_UseMutex);
+    UserDataString userDataStr(allocCreateInfo.flags, allocCreateInfo.pUserData);
+    fprintf(m_File, "%u,%.3f,%u,vmaCreateBuffer,%u,%llu,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
+        bufCreateInfo.flags,
+        bufCreateInfo.size,
+        bufCreateInfo.usage,
+        bufCreateInfo.sharingMode,
+        allocCreateInfo.flags,
+        allocCreateInfo.usage,
+        allocCreateInfo.requiredFlags,
+        allocCreateInfo.preferredFlags,
+        allocCreateInfo.memoryTypeBits,
+        allocCreateInfo.pool,
+        allocation,
+        userDataStr.GetString());
+    Flush();
+}
+
+void VmaRecorder::RecordCreateImage(uint32_t frameIndex,
+    const VkImageCreateInfo& imageCreateInfo,
+    const VmaAllocationCreateInfo& allocCreateInfo,
+    VmaAllocation allocation)
+{
+    CallParams callParams;
+    GetBasicParams(callParams);
+
+    VmaMutexLock lock(m_FileMutex, m_UseMutex);
+    UserDataString userDataStr(allocCreateInfo.flags, allocCreateInfo.pUserData);
+    fprintf(m_File, "%u,%.3f,%u,vmaCreateImage,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
+        imageCreateInfo.flags,
+        imageCreateInfo.imageType,
+        imageCreateInfo.format,
+        imageCreateInfo.extent.width,
+        imageCreateInfo.extent.height,
+        imageCreateInfo.extent.depth,
+        imageCreateInfo.mipLevels,
+        imageCreateInfo.arrayLayers,
+        imageCreateInfo.samples,
+        imageCreateInfo.tiling,
+        imageCreateInfo.usage,
+        imageCreateInfo.sharingMode,
+        imageCreateInfo.initialLayout,
+        allocCreateInfo.flags,
+        allocCreateInfo.usage,
+        allocCreateInfo.requiredFlags,
+        allocCreateInfo.preferredFlags,
+        allocCreateInfo.memoryTypeBits,
+        allocCreateInfo.pool,
+        allocation,
+        userDataStr.GetString());
+    Flush();
+}
+
+void VmaRecorder::RecordDestroyBuffer(uint32_t frameIndex,
+    VmaAllocation allocation)
+{
+    CallParams callParams;
+    GetBasicParams(callParams);
+
+    VmaMutexLock lock(m_FileMutex, m_UseMutex);
+    fprintf(m_File, "%u,%.3f,%u,vmaDestroyBuffer,%p\n", callParams.threadId, callParams.time, frameIndex,
+        allocation);
+    Flush();
+}
+
+void VmaRecorder::RecordDestroyImage(uint32_t frameIndex,
+    VmaAllocation allocation)
+{
+    CallParams callParams;
+    GetBasicParams(callParams);
+
+    VmaMutexLock lock(m_FileMutex, m_UseMutex);
+    fprintf(m_File, "%u,%.3f,%u,vmaDestroyImage,%p\n", callParams.threadId, callParams.time, frameIndex,
+        allocation);
+    Flush();
+}
+
+void VmaRecorder::RecordTouchAllocation(uint32_t frameIndex,
+    VmaAllocation allocation)
+{
+    CallParams callParams;
+    GetBasicParams(callParams);
+
+    VmaMutexLock lock(m_FileMutex, m_UseMutex);
+    fprintf(m_File, "%u,%.3f,%u,vmaTouchAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
+        allocation);
+    Flush();
+}
+
+void VmaRecorder::RecordGetAllocationInfo(uint32_t frameIndex,
+    VmaAllocation allocation)
+{
+    CallParams callParams;
+    GetBasicParams(callParams);
+
+    VmaMutexLock lock(m_FileMutex, m_UseMutex);
+    fprintf(m_File, "%u,%.3f,%u,vmaGetAllocationInfo,%p\n", callParams.threadId, callParams.time, frameIndex,
+        allocation);
+    Flush();
+}
+
+void VmaRecorder::RecordMakePoolAllocationsLost(uint32_t frameIndex,
+    VmaPool pool)
+{
+    CallParams callParams;
+    GetBasicParams(callParams);
+
+    VmaMutexLock lock(m_FileMutex, m_UseMutex);
+    fprintf(m_File, "%u,%.3f,%u,vmaMakePoolAllocationsLost,%p\n", callParams.threadId, callParams.time, frameIndex,
+        pool);
+    Flush();
+}
+
+void VmaRecorder::RecordDefragmentationBegin(uint32_t frameIndex,
+    const VmaDefragmentationInfo2& info,
+    VmaDefragmentationContext ctx)
+{
+    CallParams callParams;
+    GetBasicParams(callParams);
+
+    VmaMutexLock lock(m_FileMutex, m_UseMutex);
+    fprintf(m_File, "%u,%.3f,%u,vmaDefragmentationBegin,%u,", callParams.threadId, callParams.time, frameIndex,
+        info.flags);
+    PrintPointerList(info.allocationCount, info.pAllocations);
+    fprintf(m_File, ",");
+    PrintPointerList(info.poolCount, info.pPools);
+    fprintf(m_File, ",%llu,%u,%llu,%u,%p,%p\n",
+        info.maxCpuBytesToMove,
+        info.maxCpuAllocationsToMove,
+        info.maxGpuBytesToMove,
+        info.maxGpuAllocationsToMove,
+        info.commandBuffer,
+        ctx);
+    Flush();
+}
+
+void VmaRecorder::RecordDefragmentationEnd(uint32_t frameIndex,
+    VmaDefragmentationContext ctx)
+{
+    CallParams callParams;
+    GetBasicParams(callParams);
+
+    VmaMutexLock lock(m_FileMutex, m_UseMutex);
+    fprintf(m_File, "%u,%.3f,%u,vmaDefragmentationEnd,%p\n", callParams.threadId, callParams.time, frameIndex,
+        ctx);
+    Flush();
+}
+
+VmaRecorder::UserDataString::UserDataString(VmaAllocationCreateFlags allocFlags, const void* pUserData)
+{
+    if(pUserData != VMA_NULL)
+    {
+        if((allocFlags & VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT) != 0)
+        {
+            m_Str = (const char*)pUserData;
+        }
+        else
+        {
+            sprintf_s(m_PtrStr, "%p", pUserData);
+            m_Str = m_PtrStr;
+        }
+    }
+    else
+    {
+        m_Str = "";
+    }
+}
+
+void VmaRecorder::WriteConfiguration(
+    const VkPhysicalDeviceProperties& devProps,
+    const VkPhysicalDeviceMemoryProperties& memProps,
+    bool dedicatedAllocationExtensionEnabled)
+{
+    fprintf(m_File, "Config,Begin\n");
+
+    fprintf(m_File, "PhysicalDevice,apiVersion,%u\n", devProps.apiVersion);
+    fprintf(m_File, "PhysicalDevice,driverVersion,%u\n", devProps.driverVersion);
+    fprintf(m_File, "PhysicalDevice,vendorID,%u\n", devProps.vendorID);
+    fprintf(m_File, "PhysicalDevice,deviceID,%u\n", devProps.deviceID);
+    fprintf(m_File, "PhysicalDevice,deviceType,%u\n", devProps.deviceType);
+    fprintf(m_File, "PhysicalDevice,deviceName,%s\n", devProps.deviceName);
+
+    fprintf(m_File, "PhysicalDeviceLimits,maxMemoryAllocationCount,%u\n", devProps.limits.maxMemoryAllocationCount);
+    fprintf(m_File, "PhysicalDeviceLimits,bufferImageGranularity,%llu\n", devProps.limits.bufferImageGranularity);
+    fprintf(m_File, "PhysicalDeviceLimits,nonCoherentAtomSize,%llu\n", devProps.limits.nonCoherentAtomSize);
+
+    fprintf(m_File, "PhysicalDeviceMemory,HeapCount,%u\n", memProps.memoryHeapCount);
+    for(uint32_t i = 0; i < memProps.memoryHeapCount; ++i)
+    {
+        fprintf(m_File, "PhysicalDeviceMemory,Heap,%u,size,%llu\n", i, memProps.memoryHeaps[i].size);
+        fprintf(m_File, "PhysicalDeviceMemory,Heap,%u,flags,%u\n", i, memProps.memoryHeaps[i].flags);
+    }
+    fprintf(m_File, "PhysicalDeviceMemory,TypeCount,%u\n", memProps.memoryTypeCount);
+    for(uint32_t i = 0; i < memProps.memoryTypeCount; ++i)
+    {
+        fprintf(m_File, "PhysicalDeviceMemory,Type,%u,heapIndex,%u\n", i, memProps.memoryTypes[i].heapIndex);
+        fprintf(m_File, "PhysicalDeviceMemory,Type,%u,propertyFlags,%u\n", i, memProps.memoryTypes[i].propertyFlags);
+    }
+
+    fprintf(m_File, "Extension,VK_KHR_dedicated_allocation,%u\n", dedicatedAllocationExtensionEnabled ? 1 : 0);
+
+    fprintf(m_File, "Macro,VMA_DEBUG_ALWAYS_DEDICATED_MEMORY,%u\n", VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ? 1 : 0);
+    fprintf(m_File, "Macro,VMA_DEBUG_ALIGNMENT,%llu\n", (VkDeviceSize)VMA_DEBUG_ALIGNMENT);
+    fprintf(m_File, "Macro,VMA_DEBUG_MARGIN,%llu\n", (VkDeviceSize)VMA_DEBUG_MARGIN);
+    fprintf(m_File, "Macro,VMA_DEBUG_INITIALIZE_ALLOCATIONS,%u\n", VMA_DEBUG_INITIALIZE_ALLOCATIONS ? 1 : 0);
+    fprintf(m_File, "Macro,VMA_DEBUG_DETECT_CORRUPTION,%u\n", VMA_DEBUG_DETECT_CORRUPTION ? 1 : 0);
+    fprintf(m_File, "Macro,VMA_DEBUG_GLOBAL_MUTEX,%u\n", VMA_DEBUG_GLOBAL_MUTEX ? 1 : 0);
+    fprintf(m_File, "Macro,VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY,%llu\n", (VkDeviceSize)VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY);
+    fprintf(m_File, "Macro,VMA_SMALL_HEAP_MAX_SIZE,%llu\n", (VkDeviceSize)VMA_SMALL_HEAP_MAX_SIZE);
+    fprintf(m_File, "Macro,VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE,%llu\n", (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE);
+
+    fprintf(m_File, "Config,End\n");
+}
+
+void VmaRecorder::GetBasicParams(CallParams& outParams)
+{
+    outParams.threadId = GetCurrentThreadId();
+
+    LARGE_INTEGER counter;
+    QueryPerformanceCounter(&counter);
+    outParams.time = (double)(counter.QuadPart - m_StartCounter) / (double)m_Freq;
+}
+
+void VmaRecorder::PrintPointerList(uint64_t count, const VmaAllocation* pItems)
+{
+    if(count)
+    {
+        fprintf(m_File, "%p", pItems[0]);
+        for(uint64_t i = 1; i < count; ++i)
+        {
+            fprintf(m_File, " %p", pItems[i]);
+        }
+    }
+}
+
+void VmaRecorder::Flush()
+{
+    if((m_Flags & VMA_RECORD_FLUSH_AFTER_CALL_BIT) != 0)
+    {
+        fflush(m_File);
+    }
+}
+
+#endif // #if VMA_RECORDING_ENABLED
+
+////////////////////////////////////////////////////////////////////////////////
+// VmaAllocator_T
+
+VmaAllocator_T::VmaAllocator_T(const VmaAllocatorCreateInfo* pCreateInfo) :
+    m_UseMutex((pCreateInfo->flags & VMA_ALLOCATOR_CREATE_EXTERNALLY_SYNCHRONIZED_BIT) == 0),
+    m_UseKhrDedicatedAllocation((pCreateInfo->flags & VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT) != 0),
+    m_hDevice(pCreateInfo->device),
+    m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
+    m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
+        *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
+    m_PreferredLargeHeapBlockSize(0),
+    m_PhysicalDevice(pCreateInfo->physicalDevice),
+    m_CurrentFrameIndex(0),
+    m_Pools(VmaStlAllocator<VmaPool>(GetAllocationCallbacks())),
+    m_NextPoolId(0)
+#if VMA_RECORDING_ENABLED
+    ,m_pRecorder(VMA_NULL)
+#endif
+{
+    if(VMA_DEBUG_DETECT_CORRUPTION)
+    {
+        // Needs to be multiply of uint32_t size because we are going to write VMA_CORRUPTION_DETECTION_MAGIC_VALUE to it.
+        VMA_ASSERT(VMA_DEBUG_MARGIN % sizeof(uint32_t) == 0);
+    }
+
+    VMA_ASSERT(pCreateInfo->physicalDevice && pCreateInfo->device);
+
+#if !(VMA_DEDICATED_ALLOCATION)
+    if((pCreateInfo->flags & VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT) != 0)
+    {
+        VMA_ASSERT(0 && "VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT set but required extensions are disabled by preprocessor macros.");
+    }
+#endif
+
+    memset(&m_DeviceMemoryCallbacks, 0 ,sizeof(m_DeviceMemoryCallbacks));
+    memset(&m_PhysicalDeviceProperties, 0, sizeof(m_PhysicalDeviceProperties));
+    memset(&m_MemProps, 0, sizeof(m_MemProps));
+        
+    memset(&m_pBlockVectors, 0, sizeof(m_pBlockVectors));
+    memset(&m_pDedicatedAllocations, 0, sizeof(m_pDedicatedAllocations));
+
+    for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
+    {
+        m_HeapSizeLimit[i] = VK_WHOLE_SIZE;
+    }
+
+    if(pCreateInfo->pDeviceMemoryCallbacks != VMA_NULL)
+    {
+        m_DeviceMemoryCallbacks.pfnAllocate = pCreateInfo->pDeviceMemoryCallbacks->pfnAllocate;
+        m_DeviceMemoryCallbacks.pfnFree = pCreateInfo->pDeviceMemoryCallbacks->pfnFree;
+    }
+
+    ImportVulkanFunctions(pCreateInfo->pVulkanFunctions);
+
+    (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
+    (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
+
+    VMA_ASSERT(VmaIsPow2(VMA_DEBUG_ALIGNMENT));
+    VMA_ASSERT(VmaIsPow2(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY));
+    VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.bufferImageGranularity));
+    VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.nonCoherentAtomSize));
+
+    m_PreferredLargeHeapBlockSize = (pCreateInfo->preferredLargeHeapBlockSize != 0) ?
+        pCreateInfo->preferredLargeHeapBlockSize : static_cast<VkDeviceSize>(VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE);
+
+    if(pCreateInfo->pHeapSizeLimit != VMA_NULL)
+    {
+        for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
+        {
+            const VkDeviceSize limit = pCreateInfo->pHeapSizeLimit[heapIndex];
+            if(limit != VK_WHOLE_SIZE)
+            {
+                m_HeapSizeLimit[heapIndex] = limit;
+                if(limit < m_MemProps.memoryHeaps[heapIndex].size)
+                {
+                    m_MemProps.memoryHeaps[heapIndex].size = limit;
+                }
+            }
+        }
+    }
+
+    for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
+    {
+        const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
+
+        m_pBlockVectors[memTypeIndex] = vma_new(this, VmaBlockVector)(
+            this,
+            memTypeIndex,
+            preferredBlockSize,
+            0,
+            SIZE_MAX,
+            GetBufferImageGranularity(),
+            pCreateInfo->frameInUseCount,
+            false, // isCustomPool
+            false, // explicitBlockSize
+            false); // linearAlgorithm
+        // No need to call m_pBlockVectors[memTypeIndex][blockVectorTypeIndex]->CreateMinBlocks here,
+        // becase minBlockCount is 0.
+        m_pDedicatedAllocations[memTypeIndex] = vma_new(this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
+
+    }
+}
+
+VkResult VmaAllocator_T::Init(const VmaAllocatorCreateInfo* pCreateInfo)
+{
+    VkResult res = VK_SUCCESS;
+
+    if(pCreateInfo->pRecordSettings != VMA_NULL &&
+        !VmaStrIsEmpty(pCreateInfo->pRecordSettings->pFilePath))
+    {
+#if VMA_RECORDING_ENABLED
+        m_pRecorder = vma_new(this, VmaRecorder)();
+        res = m_pRecorder->Init(*pCreateInfo->pRecordSettings, m_UseMutex);
+        if(res != VK_SUCCESS)
+        {
+            return res;
+        }
+        m_pRecorder->WriteConfiguration(
+            m_PhysicalDeviceProperties,
+            m_MemProps,
+            m_UseKhrDedicatedAllocation);
+        m_pRecorder->RecordCreateAllocator(GetCurrentFrameIndex());
+#else
+        VMA_ASSERT(0 && "VmaAllocatorCreateInfo::pRecordSettings used, but not supported due to VMA_RECORDING_ENABLED not defined to 1.");
+        return VK_ERROR_FEATURE_NOT_PRESENT;
+#endif
+    }
+
+    return res;
+}
+
+VmaAllocator_T::~VmaAllocator_T()
+{
+#if VMA_RECORDING_ENABLED
+    if(m_pRecorder != VMA_NULL)
+    {
+        m_pRecorder->RecordDestroyAllocator(GetCurrentFrameIndex());
+        vma_delete(this, m_pRecorder);
+    }
+#endif
+    
+    VMA_ASSERT(m_Pools.empty());
+
+    for(size_t i = GetMemoryTypeCount(); i--; )
+    {
+        vma_delete(this, m_pDedicatedAllocations[i]);
+        vma_delete(this, m_pBlockVectors[i]);
+    }
+}
+
+void VmaAllocator_T::ImportVulkanFunctions(const VmaVulkanFunctions* pVulkanFunctions)
+{
+#if VMA_STATIC_VULKAN_FUNCTIONS == 1
+    m_VulkanFunctions.vkGetPhysicalDeviceProperties = &vkGetPhysicalDeviceProperties;
+    m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = &vkGetPhysicalDeviceMemoryProperties;
+    m_VulkanFunctions.vkAllocateMemory = &vkAllocateMemory;
+    m_VulkanFunctions.vkFreeMemory = &vkFreeMemory;
+    m_VulkanFunctions.vkMapMemory = &vkMapMemory;
+    m_VulkanFunctions.vkUnmapMemory = &vkUnmapMemory;
+    m_VulkanFunctions.vkFlushMappedMemoryRanges = &vkFlushMappedMemoryRanges;
+    m_VulkanFunctions.vkInvalidateMappedMemoryRanges = &vkInvalidateMappedMemoryRanges;
+    m_VulkanFunctions.vkBindBufferMemory = &vkBindBufferMemory;
+    m_VulkanFunctions.vkBindImageMemory = &vkBindImageMemory;
+    m_VulkanFunctions.vkGetBufferMemoryRequirements = &vkGetBufferMemoryRequirements;
+    m_VulkanFunctions.vkGetImageMemoryRequirements = &vkGetImageMemoryRequirements;
+    m_VulkanFunctions.vkCreateBuffer = &vkCreateBuffer;
+    m_VulkanFunctions.vkDestroyBuffer = &vkDestroyBuffer;
+    m_VulkanFunctions.vkCreateImage = &vkCreateImage;
+    m_VulkanFunctions.vkDestroyImage = &vkDestroyImage;
+    m_VulkanFunctions.vkCmdCopyBuffer = &vkCmdCopyBuffer;
+#if VMA_DEDICATED_ALLOCATION
+    if(m_UseKhrDedicatedAllocation)
+    {
+        m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR =
+            (PFN_vkGetBufferMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice, "vkGetBufferMemoryRequirements2KHR");
+        m_VulkanFunctions.vkGetImageMemoryRequirements2KHR =
+            (PFN_vkGetImageMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice, "vkGetImageMemoryRequirements2KHR");
+    }
+#endif // #if VMA_DEDICATED_ALLOCATION
+#endif // #if VMA_STATIC_VULKAN_FUNCTIONS == 1
+
+#define VMA_COPY_IF_NOT_NULL(funcName) \
+    if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName;
+
+    if(pVulkanFunctions != VMA_NULL)
+    {
+        VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties);
+        VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties);
+        VMA_COPY_IF_NOT_NULL(vkAllocateMemory);
+        VMA_COPY_IF_NOT_NULL(vkFreeMemory);
+        VMA_COPY_IF_NOT_NULL(vkMapMemory);
+        VMA_COPY_IF_NOT_NULL(vkUnmapMemory);
+        VMA_COPY_IF_NOT_NULL(vkFlushMappedMemoryRanges);
+        VMA_COPY_IF_NOT_NULL(vkInvalidateMappedMemoryRanges);
+        VMA_COPY_IF_NOT_NULL(vkBindBufferMemory);
+        VMA_COPY_IF_NOT_NULL(vkBindImageMemory);
+        VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements);
+        VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements);
+        VMA_COPY_IF_NOT_NULL(vkCreateBuffer);
+        VMA_COPY_IF_NOT_NULL(vkDestroyBuffer);
+        VMA_COPY_IF_NOT_NULL(vkCreateImage);
+        VMA_COPY_IF_NOT_NULL(vkDestroyImage);
+        VMA_COPY_IF_NOT_NULL(vkCmdCopyBuffer);
+#if VMA_DEDICATED_ALLOCATION
+        VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR);
+        VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR);
+#endif
+    }
+
+#undef VMA_COPY_IF_NOT_NULL
+
+    // If these asserts are hit, you must either #define VMA_STATIC_VULKAN_FUNCTIONS 1
+    // or pass valid pointers as VmaAllocatorCreateInfo::pVulkanFunctions.
+    VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
+    VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
+    VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
+    VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
+    VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
+    VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
+    VMA_ASSERT(m_VulkanFunctions.vkFlushMappedMemoryRanges != VMA_NULL);
+    VMA_ASSERT(m_VulkanFunctions.vkInvalidateMappedMemoryRanges != VMA_NULL);
+    VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
+    VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
+    VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
+    VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
+    VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
+    VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
+    VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
+    VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
+    VMA_ASSERT(m_VulkanFunctions.vkCmdCopyBuffer != VMA_NULL);
+#if VMA_DEDICATED_ALLOCATION
+    if(m_UseKhrDedicatedAllocation)
+    {
+        VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL);
+        VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL);
+    }
+#endif
+}
+
+VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
+{
+    const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
+    const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
+    const bool isSmallHeap = heapSize <= VMA_SMALL_HEAP_MAX_SIZE;
+    return isSmallHeap ? (heapSize / 8) : m_PreferredLargeHeapBlockSize;
+}
+
+VkResult VmaAllocator_T::AllocateMemoryOfType(
+    VkDeviceSize size,
+    VkDeviceSize alignment,
+    bool dedicatedAllocation,
+    VkBuffer dedicatedBuffer,
+    VkImage dedicatedImage,
+    const VmaAllocationCreateInfo& createInfo,
+    uint32_t memTypeIndex,
+    VmaSuballocationType suballocType,
+    size_t allocationCount,
+    VmaAllocation* pAllocations)
+{
+    VMA_ASSERT(pAllocations != VMA_NULL);
+    VMA_DEBUG_LOG("  AllocateMemory: MemoryTypeIndex=%u, AllocationCount=%zu, Size=%llu", memTypeIndex, allocationCount, vkMemReq.size);
+
+    VmaAllocationCreateInfo finalCreateInfo = createInfo;
+
+    // If memory type is not HOST_VISIBLE, disable MAPPED.
+    if((finalCreateInfo.flags & VMA_ALLOCATION_CREATE_MAPPED_BIT) != 0 &&
+        (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
+    {
+        finalCreateInfo.flags &= ~VMA_ALLOCATION_CREATE_MAPPED_BIT;
+    }
+
+    VmaBlockVector* const blockVector = m_pBlockVectors[memTypeIndex];
+    VMA_ASSERT(blockVector);
+
+    const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
+    bool preferDedicatedMemory =
+        VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ||
+        dedicatedAllocation ||
+        // Heuristics: Allocate dedicated memory if requested size if greater than half of preferred block size.
+        size > preferredBlockSize / 2;
+
+    if(preferDedicatedMemory &&
+        (finalCreateInfo.flags & VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT) == 0 &&
+        finalCreateInfo.pool == VK_NULL_HANDLE)
+    {
+        finalCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
+    }
+
+    if((finalCreateInfo.flags & VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT) != 0)
+    {
+        if((finalCreateInfo.flags & VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT) != 0)
+        {
+            return VK_ERROR_OUT_OF_DEVICE_MEMORY;
+        }
+        else
+        {
+            return AllocateDedicatedMemory(
+                size,
+                suballocType,
+                memTypeIndex,
+                (finalCreateInfo.flags & VMA_ALLOCATION_CREATE_MAPPED_BIT) != 0,
+                (finalCreateInfo.flags & VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT) != 0,
+                finalCreateInfo.pUserData,
+                dedicatedBuffer,
+                dedicatedImage,
+                allocationCount,
+                pAllocations);
+        }
+    }
+    else
+    {
+        VkResult res = blockVector->Allocate(
+            VK_NULL_HANDLE, // hCurrentPool
+            m_CurrentFrameIndex.load(),
+            size,
+            alignment,
+            finalCreateInfo,
+            suballocType,
+            allocationCount,
+            pAllocations);
+        if(res == VK_SUCCESS)
+        {
+            return res;
+        }
+
+        // 5. Try dedicated memory.
+        if((finalCreateInfo.flags & VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT) != 0)
+        {
+            return VK_ERROR_OUT_OF_DEVICE_MEMORY;
+        }
+        else
+        {
+            res = AllocateDedicatedMemory(
+                size,
+                suballocType,
+                memTypeIndex,
+                (finalCreateInfo.flags & VMA_ALLOCATION_CREATE_MAPPED_BIT) != 0,
+                (finalCreateInfo.flags & VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT) != 0,
+                finalCreateInfo.pUserData,
+                dedicatedBuffer,
+                dedicatedImage,
+                allocationCount,
+                pAllocations);
+            if(res == VK_SUCCESS)
+            {
+                // Succeeded: AllocateDedicatedMemory function already filld pMemory, nothing more to do here.
+                VMA_DEBUG_LOG("    Allocated as DedicatedMemory");
+                return VK_SUCCESS;
+            }
+            else
+            {
+                // Everything failed: Return error code.
+                VMA_DEBUG_LOG("    vkAllocateMemory FAILED");
+                return res;
+            }
+        }
+    }
+}
+
+VkResult VmaAllocator_T::AllocateDedicatedMemory(
+    VkDeviceSize size,
+    VmaSuballocationType suballocType,
+    uint32_t memTypeIndex,
+    bool map,
+    bool isUserDataString,
+    void* pUserData,
+    VkBuffer dedicatedBuffer,
+    VkImage dedicatedImage,
+    size_t allocationCount,
+    VmaAllocation* pAllocations)
+{
+    VMA_ASSERT(allocationCount > 0 && pAllocations);
+
+    VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
+    allocInfo.memoryTypeIndex = memTypeIndex;
+    allocInfo.allocationSize = size;
+
+#if VMA_DEDICATED_ALLOCATION
+    VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR };
+    if(m_UseKhrDedicatedAllocation)
+    {
+        if(dedicatedBuffer != VK_NULL_HANDLE)
+        {
+            VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE);
+            dedicatedAllocInfo.buffer = dedicatedBuffer;
+            allocInfo.pNext = &dedicatedAllocInfo;
+        }
+        else if(dedicatedImage != VK_NULL_HANDLE)
+        {
+            dedicatedAllocInfo.image = dedicatedImage;
+            allocInfo.pNext = &dedicatedAllocInfo;
+        }
+    }
+#endif // #if VMA_DEDICATED_ALLOCATION
+
+    size_t allocIndex;
+    VkResult res = VK_SUCCESS;
+    for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
+    {
+        res = AllocateDedicatedMemoryPage(
+            size,
+            suballocType,
+            memTypeIndex,
+            allocInfo,
+            map,
+            isUserDataString,
+            pUserData,
+            pAllocations + allocIndex);
+        if(res != VK_SUCCESS)
+        {
+            break;
+        }
+    }
+
+    if(res == VK_SUCCESS)
+    {
+        // Register them in m_pDedicatedAllocations.
+        {
+            VmaMutexLockWrite lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
+            AllocationVectorType* pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
+            VMA_ASSERT(pDedicatedAllocations);
+            for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
+            {
+                VmaVectorInsertSorted<VmaPointerLess>(*pDedicatedAllocations, pAllocations[allocIndex]);
+            }
+        }
+
+        VMA_DEBUG_LOG("    Allocated DedicatedMemory Count=%zu, MemoryTypeIndex=#%u", allocationCount, memTypeIndex);
+    }
+    else
+    {
+        // Free all already created allocations.
+        while(allocIndex--)
+        {
+            VmaAllocation currAlloc = pAllocations[allocIndex];
+            VkDeviceMemory hMemory = currAlloc->GetMemory();
+    
+            /*
+            There is no need to call this, because Vulkan spec allows to skip vkUnmapMemory
+            before vkFreeMemory.
+
+            if(currAlloc->GetMappedData() != VMA_NULL)
+            {
+                (*m_VulkanFunctions.vkUnmapMemory)(m_hDevice, hMemory);
+            }
+            */
+    
+            FreeVulkanMemory(memTypeIndex, currAlloc->GetSize(), hMemory);
+
+            currAlloc->SetUserData(this, VMA_NULL);
+            vma_delete(this, currAlloc);
+        }
+
+        memset(pAllocations, 0, sizeof(VmaAllocation) * allocationCount);
+    }
+
+    return res;
+}
+
+VkResult VmaAllocator_T::AllocateDedicatedMemoryPage(
+    VkDeviceSize size,
+    VmaSuballocationType suballocType,
+    uint32_t memTypeIndex,
+    const VkMemoryAllocateInfo& allocInfo,
+    bool map,
+    bool isUserDataString,
+    void* pUserData,
+    VmaAllocation* pAllocation)
+{
+    VkDeviceMemory hMemory = VK_NULL_HANDLE;
+    VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
+    if(res < 0)
+    {
+        VMA_DEBUG_LOG("    vkAllocateMemory FAILED");
+        return res;
+    }
+
+    void* pMappedData = VMA_NULL;
+    if(map)
+    {
+        res = (*m_VulkanFunctions.vkMapMemory)(
+            m_hDevice,
+            hMemory,
+            0,
+            VK_WHOLE_SIZE,
+            0,
+            &pMappedData);
+        if(res < 0)
+        {
+            VMA_DEBUG_LOG("    vkMapMemory FAILED");
+            FreeVulkanMemory(memTypeIndex, size, hMemory);
+            return res;
+        }
+    }
+
+    *pAllocation = vma_new(this, VmaAllocation_T)(m_CurrentFrameIndex.load(), isUserDataString);
+    (*pAllocation)->InitDedicatedAllocation(memTypeIndex, hMemory, suballocType, pMappedData, size);
+    (*pAllocation)->SetUserData(this, pUserData);
+    if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
+    {
+        FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
+    }
+
+    return VK_SUCCESS;
+}
+
+void VmaAllocator_T::GetBufferMemoryRequirements(
+    VkBuffer hBuffer,
+    VkMemoryRequirements& memReq,
+    bool& requiresDedicatedAllocation,
+    bool& prefersDedicatedAllocation) const
+{
+#if VMA_DEDICATED_ALLOCATION
+    if(m_UseKhrDedicatedAllocation)
+    {
+        VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR };
+        memReqInfo.buffer = hBuffer;
+
+        VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
+
+        VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
+        memReq2.pNext = &memDedicatedReq;
+
+        (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
+
+        memReq = memReq2.memoryRequirements;
+        requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
+        prefersDedicatedAllocation  = (memDedicatedReq.prefersDedicatedAllocation  != VK_FALSE);
+    }
+    else
+#endif // #if VMA_DEDICATED_ALLOCATION
+    {
+        (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq);
+        requiresDedicatedAllocation = false;
+        prefersDedicatedAllocation  = false;
+    }
+}
+
+void VmaAllocator_T::GetImageMemoryRequirements(
+    VkImage hImage,
+    VkMemoryRequirements& memReq,
+    bool& requiresDedicatedAllocation,
+    bool& prefersDedicatedAllocation) const
+{
+#if VMA_DEDICATED_ALLOCATION
+    if(m_UseKhrDedicatedAllocation)
+    {
+        VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR };
+        memReqInfo.image = hImage;
+
+        VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
+
+        VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
+        memReq2.pNext = &memDedicatedReq;
+
+        (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
+
+        memReq = memReq2.memoryRequirements;
+        requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
+        prefersDedicatedAllocation  = (memDedicatedReq.prefersDedicatedAllocation  != VK_FALSE);
+    }
+    else
+#endif // #if VMA_DEDICATED_ALLOCATION
+    {
+        (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq);
+        requiresDedicatedAllocation = false;
+        prefersDedicatedAllocation  = false;
+    }
+}
+
+VkResult VmaAllocator_T::AllocateMemory(
+    const VkMemoryRequirements& vkMemReq,
+    bool requiresDedicatedAllocation,
+    bool prefersDedicatedAllocation,
+    VkBuffer dedicatedBuffer,
+    VkImage dedicatedImage,
+    const VmaAllocationCreateInfo& createInfo,
+    VmaSuballocationType suballocType,
+    size_t allocationCount,
+    VmaAllocation* pAllocations)
+{
+    memset(pAllocations, 0, sizeof(VmaAllocation) * allocationCount);
+
+    VMA_ASSERT(VmaIsPow2(vkMemReq.alignment));
+
+    if(vkMemReq.size == 0)
+    {
+        return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    if((createInfo.flags & VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT) != 0 &&
+        (createInfo.flags & VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT) != 0)
+    {
+        VMA_ASSERT(0 && "Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
+        return VK_ERROR_OUT_OF_DEVICE_MEMORY;
+    }
+    if((createInfo.flags & VMA_ALLOCATION_CREATE_MAPPED_BIT) != 0 &&
+        (createInfo.flags & VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT) != 0)
+    {
+        VMA_ASSERT(0 && "Specifying VMA_ALLOCATION_CREATE_MAPPED_BIT together with VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT is invalid.");
+        return VK_ERROR_OUT_OF_DEVICE_MEMORY;
+    }
+    if(requiresDedicatedAllocation)
+    {
+        if((createInfo.flags & VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT) != 0)
+        {
+            VMA_ASSERT(0 && "VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT specified while dedicated allocation is required.");
+            return VK_ERROR_OUT_OF_DEVICE_MEMORY;
+        }
+        if(createInfo.pool != VK_NULL_HANDLE)
+        {
+            VMA_ASSERT(0 && "Pool specified while dedicated allocation is required.");
+            return VK_ERROR_OUT_OF_DEVICE_MEMORY;
+        }
+    }
+    if((createInfo.pool != VK_NULL_HANDLE) &&
+        ((createInfo.flags & (VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT)) != 0))
+    {
+        VMA_ASSERT(0 && "Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT when pool != null is invalid.");
+        return VK_ERROR_OUT_OF_DEVICE_MEMORY;
+    }
+
+    if(createInfo.pool != VK_NULL_HANDLE)
+    {
+        const VkDeviceSize alignmentForPool = VMA_MAX(
+            vkMemReq.alignment,
+            GetMemoryTypeMinAlignment(createInfo.pool->m_BlockVector.GetMemoryTypeIndex()));
+        return createInfo.pool->m_BlockVector.Allocate(
+            createInfo.pool,
+            m_CurrentFrameIndex.load(),
+            vkMemReq.size,
+            alignmentForPool,
+            createInfo,
+            suballocType,
+            allocationCount,
+            pAllocations);
+    }
+    else
+    {
+        // Bit mask of memory Vulkan types acceptable for this allocation.
+        uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
+        uint32_t memTypeIndex = UINT32_MAX;
+        VkResult res = vmaFindMemoryTypeIndex(this, memoryTypeBits, &createInfo, &memTypeIndex);
+        if(res == VK_SUCCESS)
+        {
+            VkDeviceSize alignmentForMemType = VMA_MAX(
+                vkMemReq.alignment,
+                GetMemoryTypeMinAlignment(memTypeIndex));
+
+            res = AllocateMemoryOfType(
+                vkMemReq.size,
+                alignmentForMemType,
+                requiresDedicatedAllocation || prefersDedicatedAllocation,
+                dedicatedBuffer,
+                dedicatedImage,
+                createInfo,
+                memTypeIndex,
+                suballocType,
+                allocationCount,
+                pAllocations);
+            // Succeeded on first try.
+            if(res == VK_SUCCESS)
+            {
+                return res;
+            }
+            // Allocation from this memory type failed. Try other compatible memory types.
+            else
+            {
+                for(;;)
+                {
+                    // Remove old memTypeIndex from list of possibilities.
+                    memoryTypeBits &= ~(1u << memTypeIndex);
+                    // Find alternative memTypeIndex.
+                    res = vmaFindMemoryTypeIndex(this, memoryTypeBits, &createInfo, &memTypeIndex);
+                    if(res == VK_SUCCESS)
+                    {
+                        alignmentForMemType = VMA_MAX(
+                            vkMemReq.alignment,
+                            GetMemoryTypeMinAlignment(memTypeIndex));
+                        
+                        res = AllocateMemoryOfType(
+                            vkMemReq.size,
+                            alignmentForMemType,
+                            requiresDedicatedAllocation || prefersDedicatedAllocation,
+                            dedicatedBuffer,
+                            dedicatedImage,
+                            createInfo,
+                            memTypeIndex,
+                            suballocType,
+                            allocationCount,
+                            pAllocations);
+                        // Allocation from this alternative memory type succeeded.
+                        if(res == VK_SUCCESS)
+                        {
+                            return res;
+                        }
+                        // else: Allocation from this memory type failed. Try next one - next loop iteration.
+                    }
+                    // No other matching memory type index could be found.
+                    else
+                    {
+                        // Not returning res, which is VK_ERROR_FEATURE_NOT_PRESENT, because we already failed to allocate once.
+                        return VK_ERROR_OUT_OF_DEVICE_MEMORY;
+                    }
+                }
+            }
+        }
+        // Can't find any single memory type maching requirements. res is VK_ERROR_FEATURE_NOT_PRESENT.
+        else
+            return res;
+    }
+}
+
+void VmaAllocator_T::FreeMemory(
+    size_t allocationCount,
+    const VmaAllocation* pAllocations)
+{
+    VMA_ASSERT(pAllocations);
+
+    for(size_t allocIndex = allocationCount; allocIndex--; )
+    {
+        VmaAllocation allocation = pAllocations[allocIndex];
+
+        if(allocation != VK_NULL_HANDLE)
+        {
+            if(TouchAllocation(allocation))
+            {
+                if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
+                {
+                    FillAllocation(allocation, VMA_ALLOCATION_FILL_PATTERN_DESTROYED);
+                }
+
+                switch(allocation->GetType())
+                {
+                case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
+                    {
+                        VmaBlockVector* pBlockVector = VMA_NULL;
+                        VmaPool hPool = allocation->GetPool();
+                        if(hPool != VK_NULL_HANDLE)
+                        {
+                            pBlockVector = &hPool->m_BlockVector;
+                        }
+                        else
+                        {
+                            const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
+                            pBlockVector = m_pBlockVectors[memTypeIndex];
+                        }
+                        pBlockVector->Free(allocation);
+                    }
+                    break;
+                case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
+                    FreeDedicatedMemory(allocation);
+                    break;
+                default:
+                    VMA_ASSERT(0);
+                }
+            }
+
+            allocation->SetUserData(this, VMA_NULL);
+            vma_delete(this, allocation);
+        }
+    }
+}
+
+VkResult VmaAllocator_T::ResizeAllocation(
+    const VmaAllocation alloc,
+    VkDeviceSize newSize)
+{
+    if(newSize == 0 || alloc->GetLastUseFrameIndex() == VMA_FRAME_INDEX_LOST)
+    {
+        return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    if(newSize == alloc->GetSize())
+    {
+        return VK_SUCCESS;
+    }
+
+    switch(alloc->GetType())
+    {
+    case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
+        return VK_ERROR_FEATURE_NOT_PRESENT;
+    case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
+        if(alloc->GetBlock()->m_pMetadata->ResizeAllocation(alloc, newSize))
+        {
+            alloc->ChangeSize(newSize);
+            VMA_HEAVY_ASSERT(alloc->GetBlock()->m_pMetadata->Validate());
+            return VK_SUCCESS;
+        }
+        else
+        {
+            return VK_ERROR_OUT_OF_POOL_MEMORY;
+        }
+    default:
+        VMA_ASSERT(0);
+        return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+}
+
+void VmaAllocator_T::CalculateStats(VmaStats* pStats)
+{
+    // Initialize.
+    InitStatInfo(pStats->total);
+    for(size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
+        InitStatInfo(pStats->memoryType[i]);
+    for(size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
+        InitStatInfo(pStats->memoryHeap[i]);
+    
+    // Process default pools.
+    for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
+    {
+        VmaBlockVector* const pBlockVector = m_pBlockVectors[memTypeIndex];
+        VMA_ASSERT(pBlockVector);
+        pBlockVector->AddStats(pStats);
+    }
+
+    // Process custom pools.
+    {
+        VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
+        for(size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
+        {
+            m_Pools[poolIndex]->m_BlockVector.AddStats(pStats);
+        }
+    }
+
+    // Process dedicated allocations.
+    for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
+    {
+        const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
+        VmaMutexLockRead dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
+        AllocationVectorType* const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
+        VMA_ASSERT(pDedicatedAllocVector);
+        for(size_t allocIndex = 0, allocCount = pDedicatedAllocVector->size(); allocIndex < allocCount; ++allocIndex)
+        {
+            VmaStatInfo allocationStatInfo;
+            (*pDedicatedAllocVector)[allocIndex]->DedicatedAllocCalcStatsInfo(allocationStatInfo);
+            VmaAddStatInfo(pStats->total, allocationStatInfo);
+            VmaAddStatInfo(pStats->memoryType[memTypeIndex], allocationStatInfo);
+            VmaAddStatInfo(pStats->memoryHeap[memHeapIndex], allocationStatInfo);
+        }
+    }
+
+    // Postprocess.
+    VmaPostprocessCalcStatInfo(pStats->total);
+    for(size_t i = 0; i < GetMemoryTypeCount(); ++i)
+        VmaPostprocessCalcStatInfo(pStats->memoryType[i]);
+    for(size_t i = 0; i < GetMemoryHeapCount(); ++i)
+        VmaPostprocessCalcStatInfo(pStats->memoryHeap[i]);
+}
+
+static const uint32_t VMA_VENDOR_ID_AMD = 4098;
+
+VkResult VmaAllocator_T::DefragmentationBegin(
+    const VmaDefragmentationInfo2& info,
+    VmaDefragmentationStats* pStats,
+    VmaDefragmentationContext* pContext)
+{
+    if(info.pAllocationsChanged != VMA_NULL)
+    {
+        memset(info.pAllocationsChanged, 0, info.allocationCount * sizeof(VkBool32));
+    }
+
+    *pContext = vma_new(this, VmaDefragmentationContext_T)(
+        this, m_CurrentFrameIndex.load(), info.flags, pStats);
+
+    (*pContext)->AddPools(info.poolCount, info.pPools);
+    (*pContext)->AddAllocations(
+        info.allocationCount, info.pAllocations, info.pAllocationsChanged);
+
+    VkResult res = (*pContext)->Defragment(
+        info.maxCpuBytesToMove, info.maxCpuAllocationsToMove,
+        info.maxGpuBytesToMove, info.maxGpuAllocationsToMove,
+        info.commandBuffer, pStats);
+
+    if(res != VK_NOT_READY)
+    {
+        vma_delete(this, *pContext);
+        *pContext = VMA_NULL;
+    }
+
+    return res;
+}
+
+VkResult VmaAllocator_T::DefragmentationEnd(
+    VmaDefragmentationContext context)
+{
+    vma_delete(this, context);
+    return VK_SUCCESS;
+}
+
+void VmaAllocator_T::GetAllocationInfo(VmaAllocation hAllocation, VmaAllocationInfo* pAllocationInfo)
+{
+    if(hAllocation->CanBecomeLost())
+    {
+        /*
+        Warning: This is a carefully designed algorithm.
+        Do not modify unless you really know what you're doing :)
+        */
+        const uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
+        uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
+        for(;;)
+        {
+            if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
+            {
+                pAllocationInfo->memoryType = UINT32_MAX;
+                pAllocationInfo->deviceMemory = VK_NULL_HANDLE;
+                pAllocationInfo->offset = 0;
+                pAllocationInfo->size = hAllocation->GetSize();
+                pAllocationInfo->pMappedData = VMA_NULL;
+                pAllocationInfo->pUserData = hAllocation->GetUserData();
+                return;
+            }
+            else if(localLastUseFrameIndex == localCurrFrameIndex)
+            {
+                pAllocationInfo->memoryType = hAllocation->GetMemoryTypeIndex();
+                pAllocationInfo->deviceMemory = hAllocation->GetMemory();
+                pAllocationInfo->offset = hAllocation->GetOffset();
+                pAllocationInfo->size = hAllocation->GetSize();
+                pAllocationInfo->pMappedData = VMA_NULL;
+                pAllocationInfo->pUserData = hAllocation->GetUserData();
+                return;
+            }
+            else // Last use time earlier than current time.
+            {
+                if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
+                {
+                    localLastUseFrameIndex = localCurrFrameIndex;
+                }
+            }
+        }
+    }
+    else
+    {
+#if VMA_STATS_STRING_ENABLED
+        uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
+        uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
+        for(;;)
+        {
+            VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
+            if(localLastUseFrameIndex == localCurrFrameIndex)
+            {
+                break;
+            }
+            else // Last use time earlier than current time.
+            {
+                if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
+                {
+                    localLastUseFrameIndex = localCurrFrameIndex;
+                }
+            }
+        }
+#endif
+
+        pAllocationInfo->memoryType = hAllocation->GetMemoryTypeIndex();
+        pAllocationInfo->deviceMemory = hAllocation->GetMemory();
+        pAllocationInfo->offset = hAllocation->GetOffset();
+        pAllocationInfo->size = hAllocation->GetSize();
+        pAllocationInfo->pMappedData = hAllocation->GetMappedData();
+        pAllocationInfo->pUserData = hAllocation->GetUserData();
+    }
+}
+
+bool VmaAllocator_T::TouchAllocation(VmaAllocation hAllocation)
+{
+    // This is a stripped-down version of VmaAllocator_T::GetAllocationInfo.
+    if(hAllocation->CanBecomeLost())
+    {
+        uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
+        uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
+        for(;;)
+        {
+            if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
+            {
+                return false;
+            }
+            else if(localLastUseFrameIndex == localCurrFrameIndex)
+            {
+                return true;
+            }
+            else // Last use time earlier than current time.
+            {
+                if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
+                {
+                    localLastUseFrameIndex = localCurrFrameIndex;
+                }
+            }
+        }
+    }
+    else
+    {
+#if VMA_STATS_STRING_ENABLED
+        uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
+        uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
+        for(;;)
+        {
+            VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
+            if(localLastUseFrameIndex == localCurrFrameIndex)
+            {
+                break;
+            }
+            else // Last use time earlier than current time.
+            {
+                if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
+                {
+                    localLastUseFrameIndex = localCurrFrameIndex;
+                }
+            }
+        }
+#endif
+
+        return true;
+    }
+}
+
+VkResult VmaAllocator_T::CreatePool(const VmaPoolCreateInfo* pCreateInfo, VmaPool* pPool)
+{
+    VMA_DEBUG_LOG("  CreatePool: MemoryTypeIndex=%u, flags=%u", pCreateInfo->memoryTypeIndex, pCreateInfo->flags);
+
+    VmaPoolCreateInfo newCreateInfo = *pCreateInfo;
+
+    if(newCreateInfo.maxBlockCount == 0)
+    {
+        newCreateInfo.maxBlockCount = SIZE_MAX;
+    }
+    if(newCreateInfo.minBlockCount > newCreateInfo.maxBlockCount)
+    {
+        return VK_ERROR_INITIALIZATION_FAILED;
+    }
+
+    const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(newCreateInfo.memoryTypeIndex);
+
+    *pPool = vma_new(this, VmaPool_T)(this, newCreateInfo, preferredBlockSize);
+
+    VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
+    if(res != VK_SUCCESS)
+    {
+        vma_delete(this, *pPool);
+        *pPool = VMA_NULL;
+        return res;
+    }
+
+    // Add to m_Pools.
+    {
+        VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex);
+        (*pPool)->SetId(m_NextPoolId++);
+        VmaVectorInsertSorted<VmaPointerLess>(m_Pools, *pPool);
+    }
+
+    return VK_SUCCESS;
+}
+
+void VmaAllocator_T::DestroyPool(VmaPool pool)
+{
+    // Remove from m_Pools.
+    {
+        VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex);
+        bool success = VmaVectorRemoveSorted<VmaPointerLess>(m_Pools, pool);
+        VMA_ASSERT(success && "Pool not found in Allocator.");
+    }
+
+    vma_delete(this, pool);
+}
+
+void VmaAllocator_T::GetPoolStats(VmaPool pool, VmaPoolStats* pPoolStats)
+{
+    pool->m_BlockVector.GetPoolStats(pPoolStats);
+}
+
+void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
+{
+    m_CurrentFrameIndex.store(frameIndex);
+}
+
+void VmaAllocator_T::MakePoolAllocationsLost(
+    VmaPool hPool,
+    size_t* pLostAllocationCount)
+{
+    hPool->m_BlockVector.MakePoolAllocationsLost(
+        m_CurrentFrameIndex.load(),
+        pLostAllocationCount);
+}
+
+VkResult VmaAllocator_T::CheckPoolCorruption(VmaPool hPool)
+{
+    return hPool->m_BlockVector.CheckCorruption();
+}
+
+VkResult VmaAllocator_T::CheckCorruption(uint32_t memoryTypeBits)
+{
+    VkResult finalRes = VK_ERROR_FEATURE_NOT_PRESENT;
+
+    // Process default pools.
+    for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
+    {
+        if(((1u << memTypeIndex) & memoryTypeBits) != 0)
+        {
+            VmaBlockVector* const pBlockVector = m_pBlockVectors[memTypeIndex];
+            VMA_ASSERT(pBlockVector);
+            VkResult localRes = pBlockVector->CheckCorruption();
+            switch(localRes)
+            {
+            case VK_ERROR_FEATURE_NOT_PRESENT:
+                break;
+            case VK_SUCCESS:
+                finalRes = VK_SUCCESS;
+                break;
+            default:
+                return localRes;
+            }
+        }
+    }
+
+    // Process custom pools.
+    {
+        VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
+        for(size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
+        {
+            if(((1u << m_Pools[poolIndex]->m_BlockVector.GetMemoryTypeIndex()) & memoryTypeBits) != 0)
+            {
+                VkResult localRes = m_Pools[poolIndex]->m_BlockVector.CheckCorruption();
+                switch(localRes)
+                {
+                case VK_ERROR_FEATURE_NOT_PRESENT:
+                    break;
+                case VK_SUCCESS:
+                    finalRes = VK_SUCCESS;
+                    break;
+                default:
+                    return localRes;
+                }
+            }
+        }
+    }
+
+    return finalRes;
+}
+
+void VmaAllocator_T::CreateLostAllocation(VmaAllocation* pAllocation)
+{
+    *pAllocation = vma_new(this, VmaAllocation_T)(VMA_FRAME_INDEX_LOST, false);
+    (*pAllocation)->InitLost();
+}
+
+VkResult VmaAllocator_T::AllocateVulkanMemory(const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
+{
+    const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
+
+    VkResult res;
+    if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
+    {
+        VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
+        if(m_HeapSizeLimit[heapIndex] >= pAllocateInfo->allocationSize)
+        {
+            res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
+            if(res == VK_SUCCESS)
+            {
+                m_HeapSizeLimit[heapIndex] -= pAllocateInfo->allocationSize;
+            }
+        }
+        else
+        {
+            res = VK_ERROR_OUT_OF_DEVICE_MEMORY;
+        }
+    }
+    else
+    {
+        res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
+    }
+
+    if(res == VK_SUCCESS && m_DeviceMemoryCallbacks.pfnAllocate != VMA_NULL)
+    {
+        (*m_DeviceMemoryCallbacks.pfnAllocate)(this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize);
+    }
+
+    return res;
+}
+
+void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
+{
+    if(m_DeviceMemoryCallbacks.pfnFree != VMA_NULL)
+    {
+        (*m_DeviceMemoryCallbacks.pfnFree)(this, memoryType, hMemory, size);
+    }
+
+    (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
+
+    const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memoryType);
+    if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
+    {
+        VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
+        m_HeapSizeLimit[heapIndex] += size;
+    }
+}
+
+VkResult VmaAllocator_T::Map(VmaAllocation hAllocation, void** ppData)
+{
+    if(hAllocation->CanBecomeLost())
+    {
+        return VK_ERROR_MEMORY_MAP_FAILED;
+    }
+
+    switch(hAllocation->GetType())
+    {
+    case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
+        {
+            VmaDeviceMemoryBlock* const pBlock = hAllocation->GetBlock();
+            char *pBytes = VMA_NULL;
+            VkResult res = pBlock->Map(this, 1, (void**)&pBytes);
+            if(res == VK_SUCCESS)
+            {
+                *ppData = pBytes + (ptrdiff_t)hAllocation->GetOffset();
+                hAllocation->BlockAllocMap();
+            }
+            return res;
+        }
+    case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
+        return hAllocation->DedicatedAllocMap(this, ppData);
+    default:
+        VMA_ASSERT(0);
+        return VK_ERROR_MEMORY_MAP_FAILED;
+    }
+}
+
+void VmaAllocator_T::Unmap(VmaAllocation hAllocation)
+{
+    switch(hAllocation->GetType())
+    {
+    case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
+        {
+            VmaDeviceMemoryBlock* const pBlock = hAllocation->GetBlock();
+            hAllocation->BlockAllocUnmap();
+            pBlock->Unmap(this, 1);
+        }
+        break;
+    case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
+        hAllocation->DedicatedAllocUnmap(this);
+        break;
+    default:
+        VMA_ASSERT(0);
+    }
+}
+
+VkResult VmaAllocator_T::BindBufferMemory(VmaAllocation hAllocation, VkBuffer hBuffer)
+{
+    VkResult res = VK_SUCCESS;
+    switch(hAllocation->GetType())
+    {
+    case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
+        res = GetVulkanFunctions().vkBindBufferMemory(
+            m_hDevice,
+            hBuffer,
+            hAllocation->GetMemory(),
+            0); //memoryOffset
+        break;
+    case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
+    {
+        VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
+        VMA_ASSERT(pBlock && "Binding buffer to allocation that doesn't belong to any block. Is the allocation lost?");
+        res = pBlock->BindBufferMemory(this, hAllocation, hBuffer);
+        break;
+    }
+    default:
+        VMA_ASSERT(0);
+    }
+    return res;
+}
+
+VkResult VmaAllocator_T::BindImageMemory(VmaAllocation hAllocation, VkImage hImage)
+{
+    VkResult res = VK_SUCCESS;
+    switch(hAllocation->GetType())
+    {
+    case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
+        res = GetVulkanFunctions().vkBindImageMemory(
+            m_hDevice,
+            hImage,
+            hAllocation->GetMemory(),
+            0); //memoryOffset
+        break;
+    case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
+    {
+        VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
+        VMA_ASSERT(pBlock && "Binding image to allocation that doesn't belong to any block. Is the allocation lost?");
+        res = pBlock->BindImageMemory(this, hAllocation, hImage);
+        break;
+    }
+    default:
+        VMA_ASSERT(0);
+    }
+    return res;
+}
+
+void VmaAllocator_T::FlushOrInvalidateAllocation(
+    VmaAllocation hAllocation,
+    VkDeviceSize offset, VkDeviceSize size,
+    VMA_CACHE_OPERATION op)
+{
+    const uint32_t memTypeIndex = hAllocation->GetMemoryTypeIndex();
+    if(size > 0 && IsMemoryTypeNonCoherent(memTypeIndex))
+    {
+        const VkDeviceSize allocationSize = hAllocation->GetSize();
+        VMA_ASSERT(offset <= allocationSize);
+
+        const VkDeviceSize nonCoherentAtomSize = m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
+
+        VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
+        memRange.memory = hAllocation->GetMemory();
+        
+        switch(hAllocation->GetType())
+        {
+        case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
+            memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
+            if(size == VK_WHOLE_SIZE)
+            {
+                memRange.size = allocationSize - memRange.offset;
+            }
+            else
+            {
+                VMA_ASSERT(offset + size <= allocationSize);
+                memRange.size = VMA_MIN(
+                    VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize),
+                    allocationSize - memRange.offset);
+            }
+            break;
+
+        case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
+        {
+            // 1. Still within this allocation.
+            memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
+            if(size == VK_WHOLE_SIZE)
+            {
+                size = allocationSize - offset;
+            }
+            else
+            {
+                VMA_ASSERT(offset + size <= allocationSize);
+            }
+            memRange.size = VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize);
+
+            // 2. Adjust to whole block.
+            const VkDeviceSize allocationOffset = hAllocation->GetOffset();
+            VMA_ASSERT(allocationOffset % nonCoherentAtomSize == 0);
+            const VkDeviceSize blockSize = hAllocation->GetBlock()->m_pMetadata->GetSize();
+            memRange.offset += allocationOffset;
+            memRange.size = VMA_MIN(memRange.size, blockSize - memRange.offset);
+            
+            break;
+        }
+        
+        default:
+            VMA_ASSERT(0);
+        }
+
+        switch(op)
+        {
+        case VMA_CACHE_FLUSH:
+            (*GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hDevice, 1, &memRange);
+            break;
+        case VMA_CACHE_INVALIDATE:
+            (*GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hDevice, 1, &memRange);
+            break;
+        default:
+            VMA_ASSERT(0);
+        }
+    }
+    // else: Just ignore this call.
+}
+
+void VmaAllocator_T::FreeDedicatedMemory(VmaAllocation allocation)
+{
+    VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
+
+    const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
+    {
+        VmaMutexLockWrite lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
+        AllocationVectorType* const pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
+        VMA_ASSERT(pDedicatedAllocations);
+        bool success = VmaVectorRemoveSorted<VmaPointerLess>(*pDedicatedAllocations, allocation);
+        VMA_ASSERT(success);
+    }
+
+    VkDeviceMemory hMemory = allocation->GetMemory();
+    
+    /*
+    There is no need to call this, because Vulkan spec allows to skip vkUnmapMemory
+    before vkFreeMemory.
+
+    if(allocation->GetMappedData() != VMA_NULL)
+    {
+        (*m_VulkanFunctions.vkUnmapMemory)(m_hDevice, hMemory);
+    }
+    */
+    
+    FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
+
+    VMA_DEBUG_LOG("    Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex);
+}
+
+void VmaAllocator_T::FillAllocation(const VmaAllocation hAllocation, uint8_t pattern)
+{
+    if(VMA_DEBUG_INITIALIZE_ALLOCATIONS &&
+        !hAllocation->CanBecomeLost() &&
+        (m_MemProps.memoryTypes[hAllocation->GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
+    {
+        void* pData = VMA_NULL;
+        VkResult res = Map(hAllocation, &pData);
+        if(res == VK_SUCCESS)
+        {
+            memset(pData, (int)pattern, (size_t)hAllocation->GetSize());
+            FlushOrInvalidateAllocation(hAllocation, 0, VK_WHOLE_SIZE, VMA_CACHE_FLUSH);
+            Unmap(hAllocation);
+        }
+        else
+        {
+            VMA_ASSERT(0 && "VMA_DEBUG_INITIALIZE_ALLOCATIONS is enabled, but couldn't map memory to fill allocation.");
+        }
+    }
+}
+
+#if VMA_STATS_STRING_ENABLED
+
+void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
+{
+    bool dedicatedAllocationsStarted = false;
+    for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
+    {
+        VmaMutexLockRead dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
+        AllocationVectorType* const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
+        VMA_ASSERT(pDedicatedAllocVector);
+        if(pDedicatedAllocVector->empty() == false)
+        {
+            if(dedicatedAllocationsStarted == false)
+            {
+                dedicatedAllocationsStarted = true;
+                json.WriteString("DedicatedAllocations");
+                json.BeginObject();
+            }
+
+            json.BeginString("Type ");
+            json.ContinueString(memTypeIndex);
+            json.EndString();
+                
+            json.BeginArray();
+
+            for(size_t i = 0; i < pDedicatedAllocVector->size(); ++i)
+            {
+                json.BeginObject(true);
+                const VmaAllocation hAlloc = (*pDedicatedAllocVector)[i];
+                hAlloc->PrintParameters(json);
+                json.EndObject();
+            }
+
+            json.EndArray();
+        }
+    }
+    if(dedicatedAllocationsStarted)
+    {
+        json.EndObject();
+    }
+
+    {
+        bool allocationsStarted = false;
+        for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
+        {
+            if(m_pBlockVectors[memTypeIndex]->IsEmpty() == false)
+            {
+                if(allocationsStarted == false)
+                {
+                    allocationsStarted = true;
+                    json.WriteString("DefaultPools");
+                    json.BeginObject();
+                }
+
+                json.BeginString("Type ");
+                json.ContinueString(memTypeIndex);
+                json.EndString();
+
+                m_pBlockVectors[memTypeIndex]->PrintDetailedMap(json);
+            }
+        }
+        if(allocationsStarted)
+        {
+            json.EndObject();
+        }
+    }
+
+    // Custom pools
+    {
+        VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
+        const size_t poolCount = m_Pools.size();
+        if(poolCount > 0)
+        {
+            json.WriteString("Pools");
+            json.BeginObject();
+            for(size_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
+            {
+                json.BeginString();
+                json.ContinueString(m_Pools[poolIndex]->GetId());
+                json.EndString();
+
+                m_Pools[poolIndex]->m_BlockVector.PrintDetailedMap(json);
+            }
+            json.EndObject();
+        }
+    }
+}
+
+#endif // #if VMA_STATS_STRING_ENABLED
+
+////////////////////////////////////////////////////////////////////////////////
+// Public interface
+
+VkResult vmaCreateAllocator(
+    const VmaAllocatorCreateInfo* pCreateInfo,
+    VmaAllocator* pAllocator)
+{
+    VMA_ASSERT(pCreateInfo && pAllocator);
+    VMA_DEBUG_LOG("vmaCreateAllocator");
+    *pAllocator = vma_new(pCreateInfo->pAllocationCallbacks, VmaAllocator_T)(pCreateInfo);
+    return (*pAllocator)->Init(pCreateInfo);
+}
+
+void vmaDestroyAllocator(
+    VmaAllocator allocator)
+{
+    if(allocator != VK_NULL_HANDLE)
+    {
+        VMA_DEBUG_LOG("vmaDestroyAllocator");
+        VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
+        vma_delete(&allocationCallbacks, allocator);
+    }
+}
+
+void vmaGetPhysicalDeviceProperties(
+    VmaAllocator allocator,
+    const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
+{
+    VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
+    *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
+}
+
+void vmaGetMemoryProperties(
+    VmaAllocator allocator,
+    const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
+{
+    VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
+    *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
+}
+
+void vmaGetMemoryTypeProperties(
+    VmaAllocator allocator,
+    uint32_t memoryTypeIndex,
+    VkMemoryPropertyFlags* pFlags)
+{
+    VMA_ASSERT(allocator && pFlags);
+    VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
+    *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
+}
+
+void vmaSetCurrentFrameIndex(
+    VmaAllocator allocator,
+    uint32_t frameIndex)
+{
+    VMA_ASSERT(allocator);
+    VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
+
+    VMA_DEBUG_GLOBAL_MUTEX_LOCK
+
+    allocator->SetCurrentFrameIndex(frameIndex);
+}
+
+void vmaCalculateStats(
+    VmaAllocator allocator,
+    VmaStats* pStats)
+{
+    VMA_ASSERT(allocator && pStats);
+    VMA_DEBUG_GLOBAL_MUTEX_LOCK
+    allocator->CalculateStats(pStats);
+}
+
+#if VMA_STATS_STRING_ENABLED
+
+void vmaBuildStatsString(
+    VmaAllocator allocator,
+    char** ppStatsString,
+    VkBool32 detailedMap)
+{
+    VMA_ASSERT(allocator && ppStatsString);
+    VMA_DEBUG_GLOBAL_MUTEX_LOCK
+
+    VmaStringBuilder sb(allocator);
+    {
+        VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
+        json.BeginObject();
+
+        VmaStats stats;
+        allocator->CalculateStats(&stats);
+
+        json.WriteString("Total");
+        VmaPrintStatInfo(json, stats.total);
+    
+        for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
+        {
+            json.BeginString("Heap ");
+            json.ContinueString(heapIndex);
+            json.EndString();
+            json.BeginObject();
+
+            json.WriteString("Size");
+            json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
+
+            json.WriteString("Flags");
+            json.BeginArray(true);
+            if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
+            {
+                json.WriteString("DEVICE_LOCAL");
+            }
+            json.EndArray();
+
+            if(stats.memoryHeap[heapIndex].blockCount > 0)
+            {
+                json.WriteString("Stats");
+                VmaPrintStatInfo(json, stats.memoryHeap[heapIndex]);
+            }
+
+            for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
+            {
+                if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
+                {
+                    json.BeginString("Type ");
+                    json.ContinueString(typeIndex);
+                    json.EndString();
+
+                    json.BeginObject();
+
+                    json.WriteString("Flags");
+                    json.BeginArray(true);
+                    VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
+                    if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
+                    {
+                        json.WriteString("DEVICE_LOCAL");
+                    }
+                    if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
+                    {
+                        json.WriteString("HOST_VISIBLE");
+                    }
+                    if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
+                    {
+                        json.WriteString("HOST_COHERENT");
+                    }
+                    if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
+                    {
+                        json.WriteString("HOST_CACHED");
+                    }
+                    if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
+                    {
+                        json.WriteString("LAZILY_ALLOCATED");
+                    }
+                    json.EndArray();
+
+                    if(stats.memoryType[typeIndex].blockCount > 0)
+                    {
+                        json.WriteString("Stats");
+                        VmaPrintStatInfo(json, stats.memoryType[typeIndex]);
+                    }
+
+                    json.EndObject();
+                }
+            }
+
+            json.EndObject();
+        }
+        if(detailedMap == VK_TRUE)
+        {
+            allocator->PrintDetailedMap(json);
+        }
+
+        json.EndObject();
+    }
+
+    const size_t len = sb.GetLength();
+    char* const pChars = vma_new_array(allocator, char, len + 1);
+    if(len > 0)
+    {
+        memcpy(pChars, sb.GetData(), len);
+    }
+    pChars[len] = '\0';
+    *ppStatsString = pChars;
+}
+
+void vmaFreeStatsString(
+    VmaAllocator allocator,
+    char* pStatsString)
+{
+    if(pStatsString != VMA_NULL)
+    {
+        VMA_ASSERT(allocator);
+        size_t len = strlen(pStatsString);
+        vma_delete_array(allocator, pStatsString, len + 1);
+    }
+}
+
+#endif // #if VMA_STATS_STRING_ENABLED
+
+/*
+This function is not protected by any mutex because it just reads immutable data.
+*/
+VkResult vmaFindMemoryTypeIndex(
+    VmaAllocator allocator,
+    uint32_t memoryTypeBits,
+    const VmaAllocationCreateInfo* pAllocationCreateInfo,
+    uint32_t* pMemoryTypeIndex)
+{
+    VMA_ASSERT(allocator != VK_NULL_HANDLE);
+    VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
+    VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
+
+    if(pAllocationCreateInfo->memoryTypeBits != 0)
+    {
+        memoryTypeBits &= pAllocationCreateInfo->memoryTypeBits;
+    }
+    
+    uint32_t requiredFlags = pAllocationCreateInfo->requiredFlags;
+    uint32_t preferredFlags = pAllocationCreateInfo->preferredFlags;
+
+    const bool mapped = (pAllocationCreateInfo->flags & VMA_ALLOCATION_CREATE_MAPPED_BIT) != 0;
+    if(mapped)
+    {
+        preferredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
+    }
+
+    // Convert usage to requiredFlags and preferredFlags.
+    switch(pAllocationCreateInfo->usage)
+    {
+    case VMA_MEMORY_USAGE_UNKNOWN:
+        break;
+    case VMA_MEMORY_USAGE_GPU_ONLY:
+        if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
+        {
+            preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
+        }
+        break;
+    case VMA_MEMORY_USAGE_CPU_ONLY:
+        requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
+        break;
+    case VMA_MEMORY_USAGE_CPU_TO_GPU:
+        requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
+        if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
+        {
+            preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
+        }
+        break;
+    case VMA_MEMORY_USAGE_GPU_TO_CPU:
+        requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
+        preferredFlags |= VK_MEMORY_PROPERTY_HOST_COHERENT_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
+        break;
+    default:
+        break;
+    }
+
+    *pMemoryTypeIndex = UINT32_MAX;
+    uint32_t minCost = UINT32_MAX;
+    for(uint32_t memTypeIndex = 0, memTypeBit = 1;
+        memTypeIndex < allocator->GetMemoryTypeCount();
+        ++memTypeIndex, memTypeBit <<= 1)
+    {
+        // This memory type is acceptable according to memoryTypeBits bitmask.
+        if((memTypeBit & memoryTypeBits) != 0)
+        {
+            const VkMemoryPropertyFlags currFlags =
+                allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
+            // This memory type contains requiredFlags.
+            if((requiredFlags & ~currFlags) == 0)
+            {
+                // Calculate cost as number of bits from preferredFlags not present in this memory type.
+                uint32_t currCost = VmaCountBitsSet(preferredFlags & ~currFlags);
+                // Remember memory type with lowest cost.
+                if(currCost < minCost)
+                {
+                    *pMemoryTypeIndex = memTypeIndex;
+                    if(currCost == 0)
+                    {
+                        return VK_SUCCESS;
+                    }
+                    minCost = currCost;
+                }
+            }
+        }
+    }
+    return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
+}
+
+VkResult vmaFindMemoryTypeIndexForBufferInfo(
+    VmaAllocator allocator,
+    const VkBufferCreateInfo* pBufferCreateInfo,
+    const VmaAllocationCreateInfo* pAllocationCreateInfo,
+    uint32_t* pMemoryTypeIndex)
+{
+    VMA_ASSERT(allocator != VK_NULL_HANDLE);
+    VMA_ASSERT(pBufferCreateInfo != VMA_NULL);
+    VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
+    VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
+
+    const VkDevice hDev = allocator->m_hDevice;
+    VkBuffer hBuffer = VK_NULL_HANDLE;
+    VkResult res = allocator->GetVulkanFunctions().vkCreateBuffer(
+        hDev, pBufferCreateInfo, allocator->GetAllocationCallbacks(), &hBuffer);
+    if(res == VK_SUCCESS)
+    {
+        VkMemoryRequirements memReq = {};
+        allocator->GetVulkanFunctions().vkGetBufferMemoryRequirements(
+            hDev, hBuffer, &memReq);
+
+        res = vmaFindMemoryTypeIndex(
+            allocator,
+            memReq.memoryTypeBits,
+            pAllocationCreateInfo,
+            pMemoryTypeIndex);
+
+        allocator->GetVulkanFunctions().vkDestroyBuffer(
+            hDev, hBuffer, allocator->GetAllocationCallbacks());
+    }
+    return res;
+}
+
+VkResult vmaFindMemoryTypeIndexForImageInfo(
+    VmaAllocator allocator,
+    const VkImageCreateInfo* pImageCreateInfo,
+    const VmaAllocationCreateInfo* pAllocationCreateInfo,
+    uint32_t* pMemoryTypeIndex)
+{
+    VMA_ASSERT(allocator != VK_NULL_HANDLE);
+    VMA_ASSERT(pImageCreateInfo != VMA_NULL);
+    VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
+    VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
+
+    const VkDevice hDev = allocator->m_hDevice;
+    VkImage hImage = VK_NULL_HANDLE;
+    VkResult res = allocator->GetVulkanFunctions().vkCreateImage(
+        hDev, pImageCreateInfo, allocator->GetAllocationCallbacks(), &hImage);
+    if(res == VK_SUCCESS)
+    {
+        VkMemoryRequirements memReq = {};
+        allocator->GetVulkanFunctions().vkGetImageMemoryRequirements(
+            hDev, hImage, &memReq);
+
+        res = vmaFindMemoryTypeIndex(
+            allocator,
+            memReq.memoryTypeBits,
+            pAllocationCreateInfo,
+            pMemoryTypeIndex);
+
+        allocator->GetVulkanFunctions().vkDestroyImage(
+            hDev, hImage, allocator->GetAllocationCallbacks());
+    }
+    return res;
+}
+
+VkResult vmaCreatePool(
+	VmaAllocator allocator,
+	const VmaPoolCreateInfo* pCreateInfo,
+	VmaPool* pPool)
+{
+    VMA_ASSERT(allocator && pCreateInfo && pPool);
+    
+    VMA_DEBUG_LOG("vmaCreatePool");
+    
+    VMA_DEBUG_GLOBAL_MUTEX_LOCK
+    
+    VkResult res = allocator->CreatePool(pCreateInfo, pPool);
+    
+#if VMA_RECORDING_ENABLED
+    if(allocator->GetRecorder() != VMA_NULL)
+    {
+        allocator->GetRecorder()->RecordCreatePool(allocator->GetCurrentFrameIndex(), *pCreateInfo, *pPool);
+    }
+#endif
+    
+    return res;
+}
+
+void vmaDestroyPool(
+    VmaAllocator allocator,
+    VmaPool pool)
+{
+    VMA_ASSERT(allocator);
+    
+    if(pool == VK_NULL_HANDLE)
+    {
+        return;
+    }
+    
+    VMA_DEBUG_LOG("vmaDestroyPool");
+    
+    VMA_DEBUG_GLOBAL_MUTEX_LOCK
+    
+#if VMA_RECORDING_ENABLED
+    if(allocator->GetRecorder() != VMA_NULL)
+    {
+        allocator->GetRecorder()->RecordDestroyPool(allocator->GetCurrentFrameIndex(), pool);
+    }
+#endif
+
+    allocator->DestroyPool(pool);
+}
+
+void vmaGetPoolStats(
+    VmaAllocator allocator,
+    VmaPool pool,
+    VmaPoolStats* pPoolStats)
+{
+    VMA_ASSERT(allocator && pool && pPoolStats);
+
+    VMA_DEBUG_GLOBAL_MUTEX_LOCK
+
+    allocator->GetPoolStats(pool, pPoolStats);
+}
+
+void vmaMakePoolAllocationsLost(
+    VmaAllocator allocator,
+    VmaPool pool,
+    size_t* pLostAllocationCount)
+{
+    VMA_ASSERT(allocator && pool);
+
+    VMA_DEBUG_GLOBAL_MUTEX_LOCK
+
+#if VMA_RECORDING_ENABLED
+    if(allocator->GetRecorder() != VMA_NULL)
+    {
+        allocator->GetRecorder()->RecordMakePoolAllocationsLost(allocator->GetCurrentFrameIndex(), pool);
+    }
+#endif
+
+    allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
+}
+
+VkResult vmaCheckPoolCorruption(VmaAllocator allocator, VmaPool pool)
+{
+    VMA_ASSERT(allocator && pool);
+
+    VMA_DEBUG_GLOBAL_MUTEX_LOCK
+
+    VMA_DEBUG_LOG("vmaCheckPoolCorruption");
+
+    return allocator->CheckPoolCorruption(pool);
+}
+
+VkResult vmaAllocateMemory(
+    VmaAllocator allocator,
+    const VkMemoryRequirements* pVkMemoryRequirements,
+    const VmaAllocationCreateInfo* pCreateInfo,
+    VmaAllocation* pAllocation,
+    VmaAllocationInfo* pAllocationInfo)
+{
+    VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
+
+    VMA_DEBUG_LOG("vmaAllocateMemory");
+
+    VMA_DEBUG_GLOBAL_MUTEX_LOCK
+
+	VkResult result = allocator->AllocateMemory(
+        *pVkMemoryRequirements,
+        false, // requiresDedicatedAllocation
+        false, // prefersDedicatedAllocation
+        VK_NULL_HANDLE, // dedicatedBuffer
+        VK_NULL_HANDLE, // dedicatedImage
+        *pCreateInfo,
+        VMA_SUBALLOCATION_TYPE_UNKNOWN,
+        1, // allocationCount
+        pAllocation);
+
+#if VMA_RECORDING_ENABLED
+    if(allocator->GetRecorder() != VMA_NULL)
+    {
+        allocator->GetRecorder()->RecordAllocateMemory(
+            allocator->GetCurrentFrameIndex(),
+            *pVkMemoryRequirements,
+            *pCreateInfo,
+            *pAllocation);
+    }
+#endif
+        
+    if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
+    {
+        allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
+    }
+
+	return result;
+}
+
+VkResult vmaAllocateMemoryPages(
+    VmaAllocator allocator,
+    const VkMemoryRequirements* pVkMemoryRequirements,
+    const VmaAllocationCreateInfo* pCreateInfo,
+    size_t allocationCount,
+    VmaAllocation* pAllocations,
+    VmaAllocationInfo* pAllocationInfo)
+{
+    if(allocationCount == 0)
+    {
+        return VK_SUCCESS;
+    }
+
+    VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocations);
+
+    VMA_DEBUG_LOG("vmaAllocateMemoryPages");
+
+    VMA_DEBUG_GLOBAL_MUTEX_LOCK
+
+	VkResult result = allocator->AllocateMemory(
+        *pVkMemoryRequirements,
+        false, // requiresDedicatedAllocation
+        false, // prefersDedicatedAllocation
+        VK_NULL_HANDLE, // dedicatedBuffer
+        VK_NULL_HANDLE, // dedicatedImage
+        *pCreateInfo,
+        VMA_SUBALLOCATION_TYPE_UNKNOWN,
+        allocationCount,
+        pAllocations);
+
+#if VMA_RECORDING_ENABLED
+    if(allocator->GetRecorder() != VMA_NULL)
+    {
+        allocator->GetRecorder()->RecordAllocateMemoryPages(
+            allocator->GetCurrentFrameIndex(),
+            *pVkMemoryRequirements,
+            *pCreateInfo,
+            (uint64_t)allocationCount,
+            pAllocations);
+    }
+#endif
+        
+    if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
+    {
+        for(size_t i = 0; i < allocationCount; ++i)
+        {
+            allocator->GetAllocationInfo(pAllocations[i], pAllocationInfo + i);
+        }
+    }
+
+	return result;
+}
+
+VkResult vmaAllocateMemoryForBuffer(
+    VmaAllocator allocator,
+    VkBuffer buffer,
+    const VmaAllocationCreateInfo* pCreateInfo,
+    VmaAllocation* pAllocation,
+    VmaAllocationInfo* pAllocationInfo)
+{
+    VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
+
+    VMA_DEBUG_LOG("vmaAllocateMemoryForBuffer");
+
+    VMA_DEBUG_GLOBAL_MUTEX_LOCK
+
+    VkMemoryRequirements vkMemReq = {};
+    bool requiresDedicatedAllocation = false;
+    bool prefersDedicatedAllocation = false;
+    allocator->GetBufferMemoryRequirements(buffer, vkMemReq,
+        requiresDedicatedAllocation,
+        prefersDedicatedAllocation);
+
+    VkResult result = allocator->AllocateMemory(
+        vkMemReq,
+        requiresDedicatedAllocation,
+        prefersDedicatedAllocation,
+        buffer, // dedicatedBuffer
+        VK_NULL_HANDLE, // dedicatedImage
+        *pCreateInfo,
+        VMA_SUBALLOCATION_TYPE_BUFFER,
+        1, // allocationCount
+        pAllocation);
+
+#if VMA_RECORDING_ENABLED
+    if(allocator->GetRecorder() != VMA_NULL)
+    {
+        allocator->GetRecorder()->RecordAllocateMemoryForBuffer(
+            allocator->GetCurrentFrameIndex(),
+            vkMemReq,
+            requiresDedicatedAllocation,
+            prefersDedicatedAllocation,
+            *pCreateInfo,
+            *pAllocation);
+    }
+#endif
+
+    if(pAllocationInfo && result == VK_SUCCESS)
+    {
+        allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
+    }
+
+	return result;
+}
+
+VkResult vmaAllocateMemoryForImage(
+    VmaAllocator allocator,
+    VkImage image,
+    const VmaAllocationCreateInfo* pCreateInfo,
+    VmaAllocation* pAllocation,
+    VmaAllocationInfo* pAllocationInfo)
+{
+    VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
+
+    VMA_DEBUG_LOG("vmaAllocateMemoryForImage");
+
+    VMA_DEBUG_GLOBAL_MUTEX_LOCK
+
+    VkMemoryRequirements vkMemReq = {};
+    bool requiresDedicatedAllocation = false;
+    bool prefersDedicatedAllocation  = false;
+    allocator->GetImageMemoryRequirements(image, vkMemReq,
+        requiresDedicatedAllocation, prefersDedicatedAllocation);
+
+    VkResult result = allocator->AllocateMemory(
+        vkMemReq,
+        requiresDedicatedAllocation,
+        prefersDedicatedAllocation,
+        VK_NULL_HANDLE, // dedicatedBuffer
+        image, // dedicatedImage
+        *pCreateInfo,
+        VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
+        1, // allocationCount
+        pAllocation);
+
+#if VMA_RECORDING_ENABLED
+    if(allocator->GetRecorder() != VMA_NULL)
+    {
+        allocator->GetRecorder()->RecordAllocateMemoryForImage(
+            allocator->GetCurrentFrameIndex(),
+            vkMemReq,
+            requiresDedicatedAllocation,
+            prefersDedicatedAllocation,
+            *pCreateInfo,
+            *pAllocation);
+    }
+#endif
+
+    if(pAllocationInfo && result == VK_SUCCESS)
+    {
+        allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
+    }
+
+	return result;
+}
+
+void vmaFreeMemory(
+    VmaAllocator allocator,
+    VmaAllocation allocation)
+{
+    VMA_ASSERT(allocator);
+    
+    if(allocation == VK_NULL_HANDLE)
+    {
+        return;
+    }
+    
+    VMA_DEBUG_LOG("vmaFreeMemory");
+    
+    VMA_DEBUG_GLOBAL_MUTEX_LOCK
+
+#if VMA_RECORDING_ENABLED
+    if(allocator->GetRecorder() != VMA_NULL)
+    {
+        allocator->GetRecorder()->RecordFreeMemory(
+            allocator->GetCurrentFrameIndex(),
+            allocation);
+    }
+#endif
+    
+    allocator->FreeMemory(
+        1, // allocationCount
+        &allocation);
+}
+
+void vmaFreeMemoryPages(
+    VmaAllocator allocator,
+    size_t allocationCount,
+    VmaAllocation* pAllocations)
+{
+    if(allocationCount == 0)
+    {
+        return;
+    }
+
+    VMA_ASSERT(allocator);
+    
+    VMA_DEBUG_LOG("vmaFreeMemoryPages");
+    
+    VMA_DEBUG_GLOBAL_MUTEX_LOCK
+
+#if VMA_RECORDING_ENABLED
+    if(allocator->GetRecorder() != VMA_NULL)
+    {
+        allocator->GetRecorder()->RecordFreeMemoryPages(
+            allocator->GetCurrentFrameIndex(),
+            (uint64_t)allocationCount,
+            pAllocations);
+    }
+#endif
+    
+    allocator->FreeMemory(allocationCount, pAllocations);
+}
+
+VkResult vmaResizeAllocation(
+    VmaAllocator allocator,
+    VmaAllocation allocation,
+    VkDeviceSize newSize)
+{
+    VMA_ASSERT(allocator && allocation);
+    
+    VMA_DEBUG_LOG("vmaResizeAllocation");
+    
+    VMA_DEBUG_GLOBAL_MUTEX_LOCK
+
+#if VMA_RECORDING_ENABLED
+    if(allocator->GetRecorder() != VMA_NULL)
+    {
+        allocator->GetRecorder()->RecordResizeAllocation(
+            allocator->GetCurrentFrameIndex(),
+            allocation,
+            newSize);
+    }
+#endif
+    
+    return allocator->ResizeAllocation(allocation, newSize);
+}
+
+void vmaGetAllocationInfo(
+    VmaAllocator allocator,
+    VmaAllocation allocation,
+    VmaAllocationInfo* pAllocationInfo)
+{
+    VMA_ASSERT(allocator && allocation && pAllocationInfo);
+
+    VMA_DEBUG_GLOBAL_MUTEX_LOCK
+
+#if VMA_RECORDING_ENABLED
+    if(allocator->GetRecorder() != VMA_NULL)
+    {
+        allocator->GetRecorder()->RecordGetAllocationInfo(
+            allocator->GetCurrentFrameIndex(),
+            allocation);
+    }
+#endif
+
+    allocator->GetAllocationInfo(allocation, pAllocationInfo);
+}
+
+VkBool32 vmaTouchAllocation(
+    VmaAllocator allocator,
+    VmaAllocation allocation)
+{
+    VMA_ASSERT(allocator && allocation);
+
+    VMA_DEBUG_GLOBAL_MUTEX_LOCK
+
+#if VMA_RECORDING_ENABLED
+    if(allocator->GetRecorder() != VMA_NULL)
+    {
+        allocator->GetRecorder()->RecordTouchAllocation(
+            allocator->GetCurrentFrameIndex(),
+            allocation);
+    }
+#endif
+
+    return allocator->TouchAllocation(allocation);
+}
+
+void vmaSetAllocationUserData(
+    VmaAllocator allocator,
+    VmaAllocation allocation,
+    void* pUserData)
+{
+    VMA_ASSERT(allocator && allocation);
+
+    VMA_DEBUG_GLOBAL_MUTEX_LOCK
+
+    allocation->SetUserData(allocator, pUserData);
+
+#if VMA_RECORDING_ENABLED
+    if(allocator->GetRecorder() != VMA_NULL)
+    {
+        allocator->GetRecorder()->RecordSetAllocationUserData(
+            allocator->GetCurrentFrameIndex(),
+            allocation,
+            pUserData);
+    }
+#endif
+}
+
+void vmaCreateLostAllocation(
+    VmaAllocator allocator,
+    VmaAllocation* pAllocation)
+{
+    VMA_ASSERT(allocator && pAllocation);
+
+    VMA_DEBUG_GLOBAL_MUTEX_LOCK;
+
+    allocator->CreateLostAllocation(pAllocation);
+
+#if VMA_RECORDING_ENABLED
+    if(allocator->GetRecorder() != VMA_NULL)
+    {
+        allocator->GetRecorder()->RecordCreateLostAllocation(
+            allocator->GetCurrentFrameIndex(),
+            *pAllocation);
+    }
+#endif
+}
+
+VkResult vmaMapMemory(
+    VmaAllocator allocator,
+    VmaAllocation allocation,
+    void** ppData)
+{
+    VMA_ASSERT(allocator && allocation && ppData);
+
+    VMA_DEBUG_GLOBAL_MUTEX_LOCK
+
+    VkResult res = allocator->Map(allocation, ppData);
+
+#if VMA_RECORDING_ENABLED
+    if(allocator->GetRecorder() != VMA_NULL)
+    {
+        allocator->GetRecorder()->RecordMapMemory(
+            allocator->GetCurrentFrameIndex(),
+            allocation);
+    }
+#endif
+
+    return res;
+}
+
+void vmaUnmapMemory(
+    VmaAllocator allocator,
+    VmaAllocation allocation)
+{
+    VMA_ASSERT(allocator && allocation);
+
+    VMA_DEBUG_GLOBAL_MUTEX_LOCK
+
+#if VMA_RECORDING_ENABLED
+    if(allocator->GetRecorder() != VMA_NULL)
+    {
+        allocator->GetRecorder()->RecordUnmapMemory(
+            allocator->GetCurrentFrameIndex(),
+            allocation);
+    }
+#endif
+
+    allocator->Unmap(allocation);
+}
+
+void vmaFlushAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
+{
+    VMA_ASSERT(allocator && allocation);
+
+    VMA_DEBUG_LOG("vmaFlushAllocation");
+
+    VMA_DEBUG_GLOBAL_MUTEX_LOCK
+
+    allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_FLUSH);
+
+#if VMA_RECORDING_ENABLED
+    if(allocator->GetRecorder() != VMA_NULL)
+    {
+        allocator->GetRecorder()->RecordFlushAllocation(
+            allocator->GetCurrentFrameIndex(),
+            allocation, offset, size);
+    }
+#endif
+}
+
+void vmaInvalidateAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
+{
+    VMA_ASSERT(allocator && allocation);
+
+    VMA_DEBUG_LOG("vmaInvalidateAllocation");
+
+    VMA_DEBUG_GLOBAL_MUTEX_LOCK
+
+    allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_INVALIDATE);
+
+#if VMA_RECORDING_ENABLED
+    if(allocator->GetRecorder() != VMA_NULL)
+    {
+        allocator->GetRecorder()->RecordInvalidateAllocation(
+            allocator->GetCurrentFrameIndex(),
+            allocation, offset, size);
+    }
+#endif
+}
+
+VkResult vmaCheckCorruption(VmaAllocator allocator, uint32_t memoryTypeBits)
+{
+    VMA_ASSERT(allocator);
+
+    VMA_DEBUG_LOG("vmaCheckCorruption");
+
+    VMA_DEBUG_GLOBAL_MUTEX_LOCK
+
+    return allocator->CheckCorruption(memoryTypeBits);
+}
+
+VkResult vmaDefragment(
+    VmaAllocator allocator,
+    VmaAllocation* pAllocations,
+    size_t allocationCount,
+    VkBool32* pAllocationsChanged,
+    const VmaDefragmentationInfo *pDefragmentationInfo,
+    VmaDefragmentationStats* pDefragmentationStats)
+{
+    // Deprecated interface, reimplemented using new one.
+
+    VmaDefragmentationInfo2 info2 = {};
+    info2.allocationCount = (uint32_t)allocationCount;
+    info2.pAllocations = pAllocations;
+    info2.pAllocationsChanged = pAllocationsChanged;
+    if(pDefragmentationInfo != VMA_NULL)
+    {
+        info2.maxCpuAllocationsToMove = pDefragmentationInfo->maxAllocationsToMove;
+        info2.maxCpuBytesToMove = pDefragmentationInfo->maxBytesToMove;
+    }
+    else
+    {
+        info2.maxCpuAllocationsToMove = UINT32_MAX;
+        info2.maxCpuBytesToMove = VK_WHOLE_SIZE;
+    }
+    // info2.flags, maxGpuAllocationsToMove, maxGpuBytesToMove, commandBuffer deliberately left zero.
+
+    VmaDefragmentationContext ctx;
+    VkResult res = vmaDefragmentationBegin(allocator, &info2, pDefragmentationStats, &ctx);
+    if(res == VK_NOT_READY)
+    {
+        res = vmaDefragmentationEnd( allocator, ctx);
+    }
+    return res;
+}
+
+VkResult vmaDefragmentationBegin(
+    VmaAllocator allocator,
+    const VmaDefragmentationInfo2* pInfo,
+    VmaDefragmentationStats* pStats,
+    VmaDefragmentationContext *pContext)
+{
+    VMA_ASSERT(allocator && pInfo && pContext);
+
+    // Degenerate case: Nothing to defragment.
+    if(pInfo->allocationCount == 0 && pInfo->poolCount == 0)
+    {
+        return VK_SUCCESS;
+    }
+
+    VMA_ASSERT(pInfo->allocationCount == 0 || pInfo->pAllocations != VMA_NULL);
+    VMA_ASSERT(pInfo->poolCount == 0 || pInfo->pPools != VMA_NULL);
+    VMA_HEAVY_ASSERT(VmaValidatePointerArray(pInfo->allocationCount, pInfo->pAllocations));
+    VMA_HEAVY_ASSERT(VmaValidatePointerArray(pInfo->poolCount, pInfo->pPools));
+
+    VMA_DEBUG_LOG("vmaDefragmentationBegin");
+
+    VMA_DEBUG_GLOBAL_MUTEX_LOCK
+
+    VkResult res = allocator->DefragmentationBegin(*pInfo, pStats, pContext);
+
+#if VMA_RECORDING_ENABLED
+    if(allocator->GetRecorder() != VMA_NULL)
+    {
+        allocator->GetRecorder()->RecordDefragmentationBegin(
+            allocator->GetCurrentFrameIndex(), *pInfo, *pContext);
+    }
+#endif
+
+    return res;
+}
+
+VkResult vmaDefragmentationEnd(
+    VmaAllocator allocator,
+    VmaDefragmentationContext context)
+{
+    VMA_ASSERT(allocator);
+
+    VMA_DEBUG_LOG("vmaDefragmentationEnd");
+
+    if(context != VK_NULL_HANDLE)
+    {
+        VMA_DEBUG_GLOBAL_MUTEX_LOCK
+
+#if VMA_RECORDING_ENABLED
+        if(allocator->GetRecorder() != VMA_NULL)
+        {
+            allocator->GetRecorder()->RecordDefragmentationEnd(
+                allocator->GetCurrentFrameIndex(), context);
+        }
+#endif
+
+        return allocator->DefragmentationEnd(context);
+    }
+    else
+    {
+        return VK_SUCCESS;
+    }
+}
+
+VkResult vmaBindBufferMemory(
+    VmaAllocator allocator,
+    VmaAllocation allocation,
+    VkBuffer buffer)
+{
+    VMA_ASSERT(allocator && allocation && buffer);
+
+    VMA_DEBUG_LOG("vmaBindBufferMemory");
+
+    VMA_DEBUG_GLOBAL_MUTEX_LOCK
+
+    return allocator->BindBufferMemory(allocation, buffer);
+}
+
+VkResult vmaBindImageMemory(
+    VmaAllocator allocator,
+    VmaAllocation allocation,
+    VkImage image)
+{
+    VMA_ASSERT(allocator && allocation && image);
+
+    VMA_DEBUG_LOG("vmaBindImageMemory");
+
+    VMA_DEBUG_GLOBAL_MUTEX_LOCK
+
+    return allocator->BindImageMemory(allocation, image);
+}
+
+VkResult vmaCreateBuffer(
+    VmaAllocator allocator,
+    const VkBufferCreateInfo* pBufferCreateInfo,
+    const VmaAllocationCreateInfo* pAllocationCreateInfo,
+    VkBuffer* pBuffer,
+    VmaAllocation* pAllocation,
+    VmaAllocationInfo* pAllocationInfo)
+{
+    VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
+
+    if(pBufferCreateInfo->size == 0)
+    {
+        return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    
+    VMA_DEBUG_LOG("vmaCreateBuffer");
+    
+    VMA_DEBUG_GLOBAL_MUTEX_LOCK
+
+    *pBuffer = VK_NULL_HANDLE;
+    *pAllocation = VK_NULL_HANDLE;
+
+    // 1. Create VkBuffer.
+    VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
+        allocator->m_hDevice,
+        pBufferCreateInfo,
+        allocator->GetAllocationCallbacks(),
+        pBuffer);
+    if(res >= 0)
+    {
+        // 2. vkGetBufferMemoryRequirements.
+        VkMemoryRequirements vkMemReq = {};
+        bool requiresDedicatedAllocation = false;
+        bool prefersDedicatedAllocation  = false;
+        allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
+            requiresDedicatedAllocation, prefersDedicatedAllocation);
+
+        // Make sure alignment requirements for specific buffer usages reported
+        // in Physical Device Properties are included in alignment reported by memory requirements.
+        if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT) != 0)
+        {
+           VMA_ASSERT(vkMemReq.alignment %
+              allocator->m_PhysicalDeviceProperties.limits.minTexelBufferOffsetAlignment == 0);
+        }
+        if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT) != 0)
+        {
+           VMA_ASSERT(vkMemReq.alignment %
+              allocator->m_PhysicalDeviceProperties.limits.minUniformBufferOffsetAlignment == 0);
+        }
+        if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_STORAGE_BUFFER_BIT) != 0)
+        {
+           VMA_ASSERT(vkMemReq.alignment %
+              allocator->m_PhysicalDeviceProperties.limits.minStorageBufferOffsetAlignment == 0);
+        }
+
+        // 3. Allocate memory using allocator.
+        res = allocator->AllocateMemory(
+            vkMemReq,
+            requiresDedicatedAllocation,
+            prefersDedicatedAllocation,
+            *pBuffer, // dedicatedBuffer
+            VK_NULL_HANDLE, // dedicatedImage
+            *pAllocationCreateInfo,
+            VMA_SUBALLOCATION_TYPE_BUFFER,
+            1, // allocationCount
+            pAllocation);
+
+#if VMA_RECORDING_ENABLED
+        if(allocator->GetRecorder() != VMA_NULL)
+        {
+            allocator->GetRecorder()->RecordCreateBuffer(
+                allocator->GetCurrentFrameIndex(),
+                *pBufferCreateInfo,
+                *pAllocationCreateInfo,
+                *pAllocation);
+        }
+#endif
+
+        if(res >= 0)
+        {
+            // 3. Bind buffer with memory.
+            res = allocator->BindBufferMemory(*pAllocation, *pBuffer);
+            if(res >= 0)
+            {
+                // All steps succeeded.
+                #if VMA_STATS_STRING_ENABLED
+                    (*pAllocation)->InitBufferImageUsage(pBufferCreateInfo->usage);
+                #endif
+                if(pAllocationInfo != VMA_NULL)
+                {
+                    allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
+                }
+
+                return VK_SUCCESS;
+            }
+            allocator->FreeMemory(
+                1, // allocationCount
+                pAllocation);
+            *pAllocation = VK_NULL_HANDLE;
+            (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
+            *pBuffer = VK_NULL_HANDLE;
+            return res;
+        }
+        (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
+        *pBuffer = VK_NULL_HANDLE;
+        return res;
+    }
+    return res;
+}
+
+void vmaDestroyBuffer(
+    VmaAllocator allocator,
+    VkBuffer buffer,
+    VmaAllocation allocation)
+{
+    VMA_ASSERT(allocator);
+
+    if(buffer == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
+    {
+        return;
+    }
+
+    VMA_DEBUG_LOG("vmaDestroyBuffer");
+
+    VMA_DEBUG_GLOBAL_MUTEX_LOCK
+
+#if VMA_RECORDING_ENABLED
+    if(allocator->GetRecorder() != VMA_NULL)
+    {
+        allocator->GetRecorder()->RecordDestroyBuffer(
+            allocator->GetCurrentFrameIndex(),
+            allocation);
+    }
+#endif
+
+    if(buffer != VK_NULL_HANDLE)
+    {
+        (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
+    }
+
+    if(allocation != VK_NULL_HANDLE)
+    {
+        allocator->FreeMemory(
+            1, // allocationCount
+            &allocation);
+    }
+}
+
+VkResult vmaCreateImage(
+    VmaAllocator allocator,
+    const VkImageCreateInfo* pImageCreateInfo,
+    const VmaAllocationCreateInfo* pAllocationCreateInfo,
+    VkImage* pImage,
+    VmaAllocation* pAllocation,
+    VmaAllocationInfo* pAllocationInfo)
+{
+    VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
+
+    if(pImageCreateInfo->extent.width == 0 ||
+        pImageCreateInfo->extent.height == 0 ||
+        pImageCreateInfo->extent.depth == 0 ||
+        pImageCreateInfo->mipLevels == 0 ||
+        pImageCreateInfo->arrayLayers == 0)
+    {
+        return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+
+    VMA_DEBUG_LOG("vmaCreateImage");
+
+    VMA_DEBUG_GLOBAL_MUTEX_LOCK
+
+    *pImage = VK_NULL_HANDLE;
+    *pAllocation = VK_NULL_HANDLE;
+
+    // 1. Create VkImage.
+    VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
+        allocator->m_hDevice,
+        pImageCreateInfo,
+        allocator->GetAllocationCallbacks(),
+        pImage);
+    if(res >= 0)
+    {
+        VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
+            VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
+            VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
+        
+        // 2. Allocate memory using allocator.
+        VkMemoryRequirements vkMemReq = {};
+        bool requiresDedicatedAllocation = false;
+        bool prefersDedicatedAllocation  = false;
+        allocator->GetImageMemoryRequirements(*pImage, vkMemReq,
+            requiresDedicatedAllocation, prefersDedicatedAllocation);
+
+        res = allocator->AllocateMemory(
+            vkMemReq,
+            requiresDedicatedAllocation,
+            prefersDedicatedAllocation,
+            VK_NULL_HANDLE, // dedicatedBuffer
+            *pImage, // dedicatedImage
+            *pAllocationCreateInfo,
+            suballocType,
+            1, // allocationCount
+            pAllocation);
+
+#if VMA_RECORDING_ENABLED
+        if(allocator->GetRecorder() != VMA_NULL)
+        {
+            allocator->GetRecorder()->RecordCreateImage(
+                allocator->GetCurrentFrameIndex(),
+                *pImageCreateInfo,
+                *pAllocationCreateInfo,
+                *pAllocation);
+        }
+#endif
+
+        if(res >= 0)
+        {
+            // 3. Bind image with memory.
+            res = allocator->BindImageMemory(*pAllocation, *pImage);
+            if(res >= 0)
+            {
+                // All steps succeeded.
+                #if VMA_STATS_STRING_ENABLED
+                    (*pAllocation)->InitBufferImageUsage(pImageCreateInfo->usage);
+                #endif
+                if(pAllocationInfo != VMA_NULL)
+                {
+                    allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
+                }
+
+                return VK_SUCCESS;
+            }
+            allocator->FreeMemory(
+                1, // allocationCount
+                pAllocation);
+            *pAllocation = VK_NULL_HANDLE;
+            (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
+            *pImage = VK_NULL_HANDLE;
+            return res;
+        }
+        (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
+        *pImage = VK_NULL_HANDLE;
+        return res;
+    }
+    return res;
+}
+
+void vmaDestroyImage(
+    VmaAllocator allocator,
+    VkImage image,
+    VmaAllocation allocation)
+{
+    VMA_ASSERT(allocator);
+
+    if(image == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
+    {
+        return;
+    }
+
+    VMA_DEBUG_LOG("vmaDestroyImage");
+
+    VMA_DEBUG_GLOBAL_MUTEX_LOCK
+
+#if VMA_RECORDING_ENABLED
+    if(allocator->GetRecorder() != VMA_NULL)
+    {
+        allocator->GetRecorder()->RecordDestroyImage(
+            allocator->GetCurrentFrameIndex(),
+            allocation);
+    }
+#endif
+
+    if(image != VK_NULL_HANDLE)
+    {
+        (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
+    }
+    if(allocation != VK_NULL_HANDLE)
+    {
+        allocator->FreeMemory(
+            1, // allocationCount
+            &allocation);
+    }
+}
+#if defined(__GNUC__)
+#pragma GCC diagnostic pop
+#if defined(__clang__)
+#pragma clang diagnostic pop
+#endif
+#endif
+#endif // #ifdef VMA_IMPLEMENTATION
+// clang-format on
diff --git a/src/third_party/vulkan-validation-layers/src/layers/xxhash.c b/src/third_party/vulkan-validation-layers/src/layers/xxhash.c
new file mode 100644
index 0000000..833b99f
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/layers/xxhash.c
@@ -0,0 +1,888 @@
+/*
+*  xxHash - Fast Hash algorithm
+*  Copyright (C) 2012-2016, Yann Collet
+*
+*  BSD 2-Clause License (http://www.opensource.org/licenses/bsd-license.php)
+*
+*  Redistribution and use in source and binary forms, with or without
+*  modification, are permitted provided that the following conditions are
+*  met:
+*
+*  * Redistributions of source code must retain the above copyright
+*  notice, this list of conditions and the following disclaimer.
+*  * Redistributions in binary form must reproduce the above
+*  copyright notice, this list of conditions and the following disclaimer
+*  in the documentation and/or other materials provided with the
+*  distribution.
+*
+*  THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+*  "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+*  LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+*  A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+*  OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+*  SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+*  LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+*  DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+*  THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+*  (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+*  OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*
+*  You can contact the author at :
+*  - xxHash homepage: http://www.xxhash.com
+*  - xxHash source repository : https://github.com/Cyan4973/xxHash
+*/
+
+
+/* *************************************
+*  Tuning parameters
+***************************************/
+/*!XXH_FORCE_MEMORY_ACCESS :
+ * By default, access to unaligned memory is controlled by `memcpy()`, which is safe and portable.
+ * Unfortunately, on some target/compiler combinations, the generated assembly is sub-optimal.
+ * The below switch allow to select different access method for improved performance.
+ * Method 0 (default) : use `memcpy()`. Safe and portable.
+ * Method 1 : `__packed` statement. It depends on compiler extension (ie, not portable).
+ *            This method is safe if your compiler supports it, and *generally* as fast or faster than `memcpy`.
+ * Method 2 : direct access. This method doesn't depend on compiler but violate C standard.
+ *            It can generate buggy code on targets which do not support unaligned memory accesses.
+ *            But in some circumstances, it's the only known way to get the most performance (ie GCC + ARMv6)
+ * See http://stackoverflow.com/a/32095106/646947 for details.
+ * Prefer these methods in priority order (0 > 1 > 2)
+ */
+#ifndef XXH_FORCE_MEMORY_ACCESS   /* can be defined externally, on command line for example */
+#  if defined(__GNUC__) && ( defined(__ARM_ARCH_6__) || defined(__ARM_ARCH_6J__) || defined(__ARM_ARCH_6K__) || defined(__ARM_ARCH_6Z__) || defined(__ARM_ARCH_6ZK__) || defined(__ARM_ARCH_6T2__) )
+#    define XXH_FORCE_MEMORY_ACCESS 2
+#  elif defined(__INTEL_COMPILER) || \
+  (defined(__GNUC__) && ( defined(__ARM_ARCH_7__) || defined(__ARM_ARCH_7A__) || defined(__ARM_ARCH_7R__) || defined(__ARM_ARCH_7M__) || defined(__ARM_ARCH_7S__) ))
+#    define XXH_FORCE_MEMORY_ACCESS 1
+#  endif
+#endif
+
+/*!XXH_ACCEPT_NULL_INPUT_POINTER :
+ * If the input pointer is a null pointer, xxHash default behavior is to trigger a memory access error, since it is a bad pointer.
+ * When this option is enabled, xxHash output for null input pointers will be the same as a null-length input.
+ * By default, this option is disabled. To enable it, uncomment below define :
+ */
+/* #define XXH_ACCEPT_NULL_INPUT_POINTER 1 */
+
+/*!XXH_FORCE_NATIVE_FORMAT :
+ * By default, xxHash library provides endian-independent Hash values, based on little-endian convention.
+ * Results are therefore identical for little-endian and big-endian CPU.
+ * This comes at a performance cost for big-endian CPU, since some swapping is required to emulate little-endian format.
+ * Should endian-independence be of no importance for your application, you may set the #define below to 1,
+ * to improve speed for Big-endian CPU.
+ * This option has no impact on Little_Endian CPU.
+ */
+#ifndef XXH_FORCE_NATIVE_FORMAT   /* can be defined externally */
+#  define XXH_FORCE_NATIVE_FORMAT 0
+#endif
+
+/*!XXH_FORCE_ALIGN_CHECK :
+ * This is a minor performance trick, only useful with lots of very small keys.
+ * It means : check for aligned/unaligned input.
+ * The check costs one initial branch per hash;
+ * set it to 0 when the input is guaranteed to be aligned,
+ * or when alignment doesn't matter for performance.
+ */
+#ifndef XXH_FORCE_ALIGN_CHECK /* can be defined externally */
+#  if defined(__i386) || defined(_M_IX86) || defined(__x86_64__) || defined(_M_X64)
+#    define XXH_FORCE_ALIGN_CHECK 0
+#  else
+#    define XXH_FORCE_ALIGN_CHECK 1
+#  endif
+#endif
+
+
+/* *************************************
+*  Includes & Memory related functions
+***************************************/
+/*! Modify the local functions below should you wish to use some other memory routines
+*   for malloc(), free() */
+#include <stdlib.h>
+static void* XXH_malloc(size_t s) { return malloc(s); }
+static void  XXH_free  (void* p)  { free(p); }
+/*! and for memcpy() */
+#include <string.h>
+static void* XXH_memcpy(void* dest, const void* src, size_t size) { return memcpy(dest,src,size); }
+
+#define XXH_STATIC_LINKING_ONLY
+#include "xxhash.h"
+
+
+/* *************************************
+*  Compiler Specific Options
+***************************************/
+#ifdef _MSC_VER    /* Visual Studio */
+#  pragma warning(disable : 4127)      /* disable: C4127: conditional expression is constant */
+#  define FORCE_INLINE static __forceinline
+#else
+#  if defined (__cplusplus) || defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L   /* C99 */
+#    ifdef __GNUC__
+#      define FORCE_INLINE static inline __attribute__((always_inline))
+#    else
+#      define FORCE_INLINE static inline
+#    endif
+#  else
+#    define FORCE_INLINE static
+#  endif /* __STDC_VERSION__ */
+#endif
+
+
+/* *************************************
+*  Basic Types
+***************************************/
+#ifndef MEM_MODULE
+# if !defined (__VMS) && (defined (__cplusplus) || (defined (__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L) /* C99 */) )
+#   include <stdint.h>
+    typedef uint8_t  BYTE;
+    typedef uint16_t U16;
+    typedef uint32_t U32;
+# else
+    typedef unsigned char      BYTE;
+    typedef unsigned short     U16;
+    typedef unsigned int       U32;
+# endif
+#endif
+
+#if (defined(XXH_FORCE_MEMORY_ACCESS) && (XXH_FORCE_MEMORY_ACCESS==2))
+
+/* Force direct memory access. Only works on CPU which support unaligned memory access in hardware */
+static U32 XXH_read32(const void* memPtr) { return *(const U32*) memPtr; }
+
+#elif (defined(XXH_FORCE_MEMORY_ACCESS) && (XXH_FORCE_MEMORY_ACCESS==1))
+
+/* __pack instructions are safer, but compiler specific, hence potentially problematic for some compilers */
+/* currently only defined for gcc and icc */
+typedef union { U32 u32; } __attribute__((packed)) unalign;
+static U32 XXH_read32(const void* ptr) { return ((const unalign*)ptr)->u32; }
+
+#else
+
+/* portable and safe solution. Generally efficient.
+ * see : http://stackoverflow.com/a/32095106/646947
+ */
+static U32 XXH_read32(const void* memPtr)
+{
+    U32 val;
+    memcpy(&val, memPtr, sizeof(val));
+    return val;
+}
+
+#endif   /* XXH_FORCE_DIRECT_MEMORY_ACCESS */
+
+
+/* ****************************************
+*  Compiler-specific Functions and Macros
+******************************************/
+#define XXH_GCC_VERSION (__GNUC__ * 100 + __GNUC_MINOR__)
+
+/* Note : although _rotl exists for minGW (GCC under windows), performance seems poor */
+#if defined(_MSC_VER)
+#  define XXH_rotl32(x,r) _rotl(x,r)
+#  define XXH_rotl64(x,r) _rotl64(x,r)
+#else
+#  define XXH_rotl32(x,r) ((x << r) | (x >> (32 - r)))
+#  define XXH_rotl64(x,r) ((x << r) | (x >> (64 - r)))
+#endif
+
+#if defined(_MSC_VER)     /* Visual Studio */
+#  define XXH_swap32 _byteswap_ulong
+#elif XXH_GCC_VERSION >= 403
+#  define XXH_swap32 __builtin_bswap32
+#else
+static U32 XXH_swap32 (U32 x)
+{
+    return  ((x << 24) & 0xff000000 ) |
+            ((x <<  8) & 0x00ff0000 ) |
+            ((x >>  8) & 0x0000ff00 ) |
+            ((x >> 24) & 0x000000ff );
+}
+#endif
+
+
+/* *************************************
+*  Architecture Macros
+***************************************/
+typedef enum { XXH_bigEndian=0, XXH_littleEndian=1 } XXH_endianess;
+
+/* XXH_CPU_LITTLE_ENDIAN can be defined externally, for example on the compiler command line */
+#ifndef XXH_CPU_LITTLE_ENDIAN
+    static const int g_one = 1;
+#   define XXH_CPU_LITTLE_ENDIAN   (*(const char*)(&g_one))
+#endif
+
+
+/* ***************************
+*  Memory reads
+*****************************/
+typedef enum { XXH_aligned, XXH_unaligned } XXH_alignment;
+
+FORCE_INLINE U32 XXH_readLE32_align(const void* ptr, XXH_endianess endian, XXH_alignment align)
+{
+    if (align==XXH_unaligned)
+        return endian==XXH_littleEndian ? XXH_read32(ptr) : XXH_swap32(XXH_read32(ptr));
+    else
+        return endian==XXH_littleEndian ? *(const U32*)ptr : XXH_swap32(*(const U32*)ptr);
+}
+
+FORCE_INLINE U32 XXH_readLE32(const void* ptr, XXH_endianess endian)
+{
+    return XXH_readLE32_align(ptr, endian, XXH_unaligned);
+}
+
+static U32 XXH_readBE32(const void* ptr)
+{
+    return XXH_CPU_LITTLE_ENDIAN ? XXH_swap32(XXH_read32(ptr)) : XXH_read32(ptr);
+}
+
+
+/* *************************************
+*  Macros
+***************************************/
+#define XXH_STATIC_ASSERT(c)   { enum { XXH_static_assert = 1/(int)(!!(c)) }; }    /* use only *after* variable declarations */
+XXH_PUBLIC_API unsigned XXH_versionNumber (void) { return XXH_VERSION_NUMBER; }
+
+
+/* *******************************************************************
+*  32-bits hash functions
+*********************************************************************/
+static const U32 PRIME32_1 = 2654435761U;
+static const U32 PRIME32_2 = 2246822519U;
+static const U32 PRIME32_3 = 3266489917U;
+static const U32 PRIME32_4 =  668265263U;
+static const U32 PRIME32_5 =  374761393U;
+
+static U32 XXH32_round(U32 seed, U32 input)
+{
+    seed += input * PRIME32_2;
+    seed  = XXH_rotl32(seed, 13);
+    seed *= PRIME32_1;
+    return seed;
+}
+
+FORCE_INLINE U32 XXH32_endian_align(const void* input, size_t len, U32 seed, XXH_endianess endian, XXH_alignment align)
+{
+    const BYTE* p = (const BYTE*)input;
+    const BYTE* bEnd = p + len;
+    U32 h32;
+#define XXH_get32bits(p) XXH_readLE32_align(p, endian, align)
+
+#ifdef XXH_ACCEPT_NULL_INPUT_POINTER
+    if (p==NULL) {
+        len=0;
+        bEnd=p=(const BYTE*)(size_t)16;
+    }
+#endif
+
+    if (len>=16) {
+        const BYTE* const limit = bEnd - 16;
+        U32 v1 = seed + PRIME32_1 + PRIME32_2;
+        U32 v2 = seed + PRIME32_2;
+        U32 v3 = seed + 0;
+        U32 v4 = seed - PRIME32_1;
+
+        do {
+            v1 = XXH32_round(v1, XXH_get32bits(p)); p+=4;
+            v2 = XXH32_round(v2, XXH_get32bits(p)); p+=4;
+            v3 = XXH32_round(v3, XXH_get32bits(p)); p+=4;
+            v4 = XXH32_round(v4, XXH_get32bits(p)); p+=4;
+        } while (p<=limit);
+
+        h32 = XXH_rotl32(v1, 1) + XXH_rotl32(v2, 7) + XXH_rotl32(v3, 12) + XXH_rotl32(v4, 18);
+    } else {
+        h32  = seed + PRIME32_5;
+    }
+
+    h32 += (U32) len;
+
+    while (p+4<=bEnd) {
+        h32 += XXH_get32bits(p) * PRIME32_3;
+        h32  = XXH_rotl32(h32, 17) * PRIME32_4 ;
+        p+=4;
+    }
+
+    while (p<bEnd) {
+        h32 += (*p) * PRIME32_5;
+        h32 = XXH_rotl32(h32, 11) * PRIME32_1 ;
+        p++;
+    }
+
+    h32 ^= h32 >> 15;
+    h32 *= PRIME32_2;
+    h32 ^= h32 >> 13;
+    h32 *= PRIME32_3;
+    h32 ^= h32 >> 16;
+
+    return h32;
+}
+
+
+XXH_PUBLIC_API unsigned int XXH32 (const void* input, size_t len, unsigned int seed)
+{
+#if 0
+    /* Simple version, good for code maintenance, but unfortunately slow for small inputs */
+    XXH32_state_t state;
+    XXH32_reset(&state, seed);
+    XXH32_update(&state, input, len);
+    return XXH32_digest(&state);
+#else
+    XXH_endianess endian_detected = (XXH_endianess)XXH_CPU_LITTLE_ENDIAN;
+
+    if (XXH_FORCE_ALIGN_CHECK) {
+        if ((((size_t)input) & 3) == 0) {   /* Input is 4-bytes aligned, leverage the speed benefit */
+            if ((endian_detected==XXH_littleEndian) || XXH_FORCE_NATIVE_FORMAT)
+                return XXH32_endian_align(input, len, seed, XXH_littleEndian, XXH_aligned);
+            else
+                return XXH32_endian_align(input, len, seed, XXH_bigEndian, XXH_aligned);
+    }   }
+
+    if ((endian_detected==XXH_littleEndian) || XXH_FORCE_NATIVE_FORMAT)
+        return XXH32_endian_align(input, len, seed, XXH_littleEndian, XXH_unaligned);
+    else
+        return XXH32_endian_align(input, len, seed, XXH_bigEndian, XXH_unaligned);
+#endif
+}
+
+
+
+/*======   Hash streaming   ======*/
+
+XXH_PUBLIC_API XXH32_state_t* XXH32_createState(void)
+{
+    return (XXH32_state_t*)XXH_malloc(sizeof(XXH32_state_t));
+}
+XXH_PUBLIC_API XXH_errorcode XXH32_freeState(XXH32_state_t* statePtr)
+{
+    XXH_free(statePtr);
+    return XXH_OK;
+}
+
+XXH_PUBLIC_API void XXH32_copyState(XXH32_state_t* dstState, const XXH32_state_t* srcState)
+{
+    memcpy(dstState, srcState, sizeof(*dstState));
+}
+
+XXH_PUBLIC_API XXH_errorcode XXH32_reset(XXH32_state_t* statePtr, unsigned int seed)
+{
+    XXH32_state_t state;   /* using a local state to memcpy() in order to avoid strict-aliasing warnings */
+    memset(&state, 0, sizeof(state)-4);   /* do not write into reserved, for future removal */
+    state.v1 = seed + PRIME32_1 + PRIME32_2;
+    state.v2 = seed + PRIME32_2;
+    state.v3 = seed + 0;
+    state.v4 = seed - PRIME32_1;
+    memcpy(statePtr, &state, sizeof(state));
+    return XXH_OK;
+}
+
+
+FORCE_INLINE XXH_errorcode XXH32_update_endian (XXH32_state_t* state, const void* input, size_t len, XXH_endianess endian)
+{
+    const BYTE* p = (const BYTE*)input;
+    const BYTE* const bEnd = p + len;
+
+#ifdef XXH_ACCEPT_NULL_INPUT_POINTER
+    if (input==NULL) return XXH_ERROR;
+#endif
+
+    state->total_len_32 += (unsigned)len;
+    state->large_len |= (len>=16) | (state->total_len_32>=16);
+
+    if (state->memsize + len < 16)  {   /* fill in tmp buffer */
+        XXH_memcpy((BYTE*)(state->mem32) + state->memsize, input, len);
+        state->memsize += (unsigned)len;
+        return XXH_OK;
+    }
+
+    if (state->memsize) {   /* some data left from previous update */
+        XXH_memcpy((BYTE*)(state->mem32) + state->memsize, input, 16-state->memsize);
+        {   const U32* p32 = state->mem32;
+            state->v1 = XXH32_round(state->v1, XXH_readLE32(p32, endian)); p32++;
+            state->v2 = XXH32_round(state->v2, XXH_readLE32(p32, endian)); p32++;
+            state->v3 = XXH32_round(state->v3, XXH_readLE32(p32, endian)); p32++;
+            state->v4 = XXH32_round(state->v4, XXH_readLE32(p32, endian));
+        }
+        p += 16-state->memsize;
+        state->memsize = 0;
+    }
+
+    if (p <= bEnd-16) {
+        const BYTE* const limit = bEnd - 16;
+        U32 v1 = state->v1;
+        U32 v2 = state->v2;
+        U32 v3 = state->v3;
+        U32 v4 = state->v4;
+
+        do {
+            v1 = XXH32_round(v1, XXH_readLE32(p, endian)); p+=4;
+            v2 = XXH32_round(v2, XXH_readLE32(p, endian)); p+=4;
+            v3 = XXH32_round(v3, XXH_readLE32(p, endian)); p+=4;
+            v4 = XXH32_round(v4, XXH_readLE32(p, endian)); p+=4;
+        } while (p<=limit);
+
+        state->v1 = v1;
+        state->v2 = v2;
+        state->v3 = v3;
+        state->v4 = v4;
+    }
+
+    if (p < bEnd) {
+        XXH_memcpy(state->mem32, p, (size_t)(bEnd-p));
+        state->memsize = (unsigned)(bEnd-p);
+    }
+
+    return XXH_OK;
+}
+
+XXH_PUBLIC_API XXH_errorcode XXH32_update (XXH32_state_t* state_in, const void* input, size_t len)
+{
+    XXH_endianess endian_detected = (XXH_endianess)XXH_CPU_LITTLE_ENDIAN;
+
+    if ((endian_detected==XXH_littleEndian) || XXH_FORCE_NATIVE_FORMAT)
+        return XXH32_update_endian(state_in, input, len, XXH_littleEndian);
+    else
+        return XXH32_update_endian(state_in, input, len, XXH_bigEndian);
+}
+
+
+
+FORCE_INLINE U32 XXH32_digest_endian (const XXH32_state_t* state, XXH_endianess endian)
+{
+    const BYTE * p = (const BYTE*)state->mem32;
+    const BYTE* const bEnd = (const BYTE*)(state->mem32) + state->memsize;
+    U32 h32;
+
+    if (state->large_len) {
+        h32 = XXH_rotl32(state->v1, 1) + XXH_rotl32(state->v2, 7) + XXH_rotl32(state->v3, 12) + XXH_rotl32(state->v4, 18);
+    } else {
+        h32 = state->v3 /* == seed */ + PRIME32_5;
+    }
+
+    h32 += state->total_len_32;
+
+    while (p+4<=bEnd) {
+        h32 += XXH_readLE32(p, endian) * PRIME32_3;
+        h32  = XXH_rotl32(h32, 17) * PRIME32_4;
+        p+=4;
+    }
+
+    while (p<bEnd) {
+        h32 += (*p) * PRIME32_5;
+        h32  = XXH_rotl32(h32, 11) * PRIME32_1;
+        p++;
+    }
+
+    h32 ^= h32 >> 15;
+    h32 *= PRIME32_2;
+    h32 ^= h32 >> 13;
+    h32 *= PRIME32_3;
+    h32 ^= h32 >> 16;
+
+    return h32;
+}
+
+
+XXH_PUBLIC_API unsigned int XXH32_digest (const XXH32_state_t* state_in)
+{
+    XXH_endianess endian_detected = (XXH_endianess)XXH_CPU_LITTLE_ENDIAN;
+
+    if ((endian_detected==XXH_littleEndian) || XXH_FORCE_NATIVE_FORMAT)
+        return XXH32_digest_endian(state_in, XXH_littleEndian);
+    else
+        return XXH32_digest_endian(state_in, XXH_bigEndian);
+}
+
+
+/*======   Canonical representation   ======*/
+
+/*! Default XXH result types are basic unsigned 32 and 64 bits.
+*   The canonical representation follows human-readable write convention, aka big-endian (large digits first).
+*   These functions allow transformation of hash result into and from its canonical format.
+*   This way, hash values can be written into a file or buffer, and remain comparable across different systems and programs.
+*/
+
+XXH_PUBLIC_API void XXH32_canonicalFromHash(XXH32_canonical_t* dst, XXH32_hash_t hash)
+{
+    XXH_STATIC_ASSERT(sizeof(XXH32_canonical_t) == sizeof(XXH32_hash_t));
+    if (XXH_CPU_LITTLE_ENDIAN) hash = XXH_swap32(hash);
+    memcpy(dst, &hash, sizeof(*dst));
+}
+
+XXH_PUBLIC_API XXH32_hash_t XXH32_hashFromCanonical(const XXH32_canonical_t* src)
+{
+    return XXH_readBE32(src);
+}
+
+
+#ifndef XXH_NO_LONG_LONG
+
+/* *******************************************************************
+*  64-bits hash functions
+*********************************************************************/
+
+/*======   Memory access   ======*/
+
+#ifndef MEM_MODULE
+# define MEM_MODULE
+# if !defined (__VMS) && (defined (__cplusplus) || (defined (__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L) /* C99 */) )
+#   include <stdint.h>
+    typedef uint64_t U64;
+# else
+    typedef unsigned long long U64;   /* if your compiler doesn't support unsigned long long, replace by another 64-bit type here. Note that xxhash.h will also need to be updated. */
+# endif
+#endif
+
+
+#if (defined(XXH_FORCE_MEMORY_ACCESS) && (XXH_FORCE_MEMORY_ACCESS==2))
+
+/* Force direct memory access. Only works on CPU which support unaligned memory access in hardware */
+static U64 XXH_read64(const void* memPtr) { return *(const U64*) memPtr; }
+
+#elif (defined(XXH_FORCE_MEMORY_ACCESS) && (XXH_FORCE_MEMORY_ACCESS==1))
+
+/* __pack instructions are safer, but compiler specific, hence potentially problematic for some compilers */
+/* currently only defined for gcc and icc */
+typedef union { U32 u32; U64 u64; } __attribute__((packed)) unalign64;
+static U64 XXH_read64(const void* ptr) { return ((const unalign64*)ptr)->u64; }
+
+#else
+
+/* portable and safe solution. Generally efficient.
+ * see : http://stackoverflow.com/a/32095106/646947
+ */
+
+static U64 XXH_read64(const void* memPtr)
+{
+    U64 val;
+    memcpy(&val, memPtr, sizeof(val));
+    return val;
+}
+
+#endif   /* XXH_FORCE_DIRECT_MEMORY_ACCESS */
+
+#if defined(_MSC_VER)     /* Visual Studio */
+#  define XXH_swap64 _byteswap_uint64
+#elif XXH_GCC_VERSION >= 403
+#  define XXH_swap64 __builtin_bswap64
+#else
+static U64 XXH_swap64 (U64 x)
+{
+    return  ((x << 56) & 0xff00000000000000ULL) |
+            ((x << 40) & 0x00ff000000000000ULL) |
+            ((x << 24) & 0x0000ff0000000000ULL) |
+            ((x << 8)  & 0x000000ff00000000ULL) |
+            ((x >> 8)  & 0x00000000ff000000ULL) |
+            ((x >> 24) & 0x0000000000ff0000ULL) |
+            ((x >> 40) & 0x000000000000ff00ULL) |
+            ((x >> 56) & 0x00000000000000ffULL);
+}
+#endif
+
+FORCE_INLINE U64 XXH_readLE64_align(const void* ptr, XXH_endianess endian, XXH_alignment align)
+{
+    if (align==XXH_unaligned)
+        return endian==XXH_littleEndian ? XXH_read64(ptr) : XXH_swap64(XXH_read64(ptr));
+    else
+        return endian==XXH_littleEndian ? *(const U64*)ptr : XXH_swap64(*(const U64*)ptr);
+}
+
+FORCE_INLINE U64 XXH_readLE64(const void* ptr, XXH_endianess endian)
+{
+    return XXH_readLE64_align(ptr, endian, XXH_unaligned);
+}
+
+static U64 XXH_readBE64(const void* ptr)
+{
+    return XXH_CPU_LITTLE_ENDIAN ? XXH_swap64(XXH_read64(ptr)) : XXH_read64(ptr);
+}
+
+
+/*======   xxh64   ======*/
+
+static const U64 PRIME64_1 = 11400714785074694791ULL;
+static const U64 PRIME64_2 = 14029467366897019727ULL;
+static const U64 PRIME64_3 =  1609587929392839161ULL;
+static const U64 PRIME64_4 =  9650029242287828579ULL;
+static const U64 PRIME64_5 =  2870177450012600261ULL;
+
+static U64 XXH64_round(U64 acc, U64 input)
+{
+    acc += input * PRIME64_2;
+    acc  = XXH_rotl64(acc, 31);
+    acc *= PRIME64_1;
+    return acc;
+}
+
+static U64 XXH64_mergeRound(U64 acc, U64 val)
+{
+    val  = XXH64_round(0, val);
+    acc ^= val;
+    acc  = acc * PRIME64_1 + PRIME64_4;
+    return acc;
+}
+
+FORCE_INLINE U64 XXH64_endian_align(const void* input, size_t len, U64 seed, XXH_endianess endian, XXH_alignment align)
+{
+    const BYTE* p = (const BYTE*)input;
+    const BYTE* bEnd = p + len;
+    U64 h64;
+#define XXH_get64bits(p) XXH_readLE64_align(p, endian, align)
+
+#ifdef XXH_ACCEPT_NULL_INPUT_POINTER
+    if (p==NULL) {
+        len=0;
+        bEnd=p=(const BYTE*)(size_t)32;
+    }
+#endif
+
+    if (len>=32) {
+        const BYTE* const limit = bEnd - 32;
+        U64 v1 = seed + PRIME64_1 + PRIME64_2;
+        U64 v2 = seed + PRIME64_2;
+        U64 v3 = seed + 0;
+        U64 v4 = seed - PRIME64_1;
+
+        do {
+            v1 = XXH64_round(v1, XXH_get64bits(p)); p+=8;
+            v2 = XXH64_round(v2, XXH_get64bits(p)); p+=8;
+            v3 = XXH64_round(v3, XXH_get64bits(p)); p+=8;
+            v4 = XXH64_round(v4, XXH_get64bits(p)); p+=8;
+        } while (p<=limit);
+
+        h64 = XXH_rotl64(v1, 1) + XXH_rotl64(v2, 7) + XXH_rotl64(v3, 12) + XXH_rotl64(v4, 18);
+        h64 = XXH64_mergeRound(h64, v1);
+        h64 = XXH64_mergeRound(h64, v2);
+        h64 = XXH64_mergeRound(h64, v3);
+        h64 = XXH64_mergeRound(h64, v4);
+
+    } else {
+        h64  = seed + PRIME64_5;
+    }
+
+    h64 += (U64) len;
+
+    while (p+8<=bEnd) {
+        U64 const k1 = XXH64_round(0, XXH_get64bits(p));
+        h64 ^= k1;
+        h64  = XXH_rotl64(h64,27) * PRIME64_1 + PRIME64_4;
+        p+=8;
+    }
+
+    if (p+4<=bEnd) {
+        h64 ^= (U64)(XXH_get32bits(p)) * PRIME64_1;
+        h64 = XXH_rotl64(h64, 23) * PRIME64_2 + PRIME64_3;
+        p+=4;
+    }
+
+    while (p<bEnd) {
+        h64 ^= (*p) * PRIME64_5;
+        h64 = XXH_rotl64(h64, 11) * PRIME64_1;
+        p++;
+    }
+
+    h64 ^= h64 >> 33;
+    h64 *= PRIME64_2;
+    h64 ^= h64 >> 29;
+    h64 *= PRIME64_3;
+    h64 ^= h64 >> 32;
+
+    return h64;
+}
+
+
+XXH_PUBLIC_API unsigned long long XXH64 (const void* input, size_t len, unsigned long long seed)
+{
+#if 0
+    /* Simple version, good for code maintenance, but unfortunately slow for small inputs */
+    XXH64_state_t state;
+    XXH64_reset(&state, seed);
+    XXH64_update(&state, input, len);
+    return XXH64_digest(&state);
+#else
+    XXH_endianess endian_detected = (XXH_endianess)XXH_CPU_LITTLE_ENDIAN;
+
+    if (XXH_FORCE_ALIGN_CHECK) {
+        if ((((size_t)input) & 7)==0) {  /* Input is aligned, let's leverage the speed advantage */
+            if ((endian_detected==XXH_littleEndian) || XXH_FORCE_NATIVE_FORMAT)
+                return XXH64_endian_align(input, len, seed, XXH_littleEndian, XXH_aligned);
+            else
+                return XXH64_endian_align(input, len, seed, XXH_bigEndian, XXH_aligned);
+    }   }
+
+    if ((endian_detected==XXH_littleEndian) || XXH_FORCE_NATIVE_FORMAT)
+        return XXH64_endian_align(input, len, seed, XXH_littleEndian, XXH_unaligned);
+    else
+        return XXH64_endian_align(input, len, seed, XXH_bigEndian, XXH_unaligned);
+#endif
+}
+
+/*======   Hash Streaming   ======*/
+
+XXH_PUBLIC_API XXH64_state_t* XXH64_createState(void)
+{
+    return (XXH64_state_t*)XXH_malloc(sizeof(XXH64_state_t));
+}
+XXH_PUBLIC_API XXH_errorcode XXH64_freeState(XXH64_state_t* statePtr)
+{
+    XXH_free(statePtr);
+    return XXH_OK;
+}
+
+XXH_PUBLIC_API void XXH64_copyState(XXH64_state_t* dstState, const XXH64_state_t* srcState)
+{
+    memcpy(dstState, srcState, sizeof(*dstState));
+}
+
+XXH_PUBLIC_API XXH_errorcode XXH64_reset(XXH64_state_t* statePtr, unsigned long long seed)
+{
+    XXH64_state_t state;   /* using a local state to memcpy() in order to avoid strict-aliasing warnings */
+    memset(&state, 0, sizeof(state)-8);   /* do not write into reserved, for future removal */
+    state.v1 = seed + PRIME64_1 + PRIME64_2;
+    state.v2 = seed + PRIME64_2;
+    state.v3 = seed + 0;
+    state.v4 = seed - PRIME64_1;
+    memcpy(statePtr, &state, sizeof(state));
+    return XXH_OK;
+}
+
+FORCE_INLINE XXH_errorcode XXH64_update_endian (XXH64_state_t* state, const void* input, size_t len, XXH_endianess endian)
+{
+    const BYTE* p = (const BYTE*)input;
+    const BYTE* const bEnd = p + len;
+
+#ifdef XXH_ACCEPT_NULL_INPUT_POINTER
+    if (input==NULL) return XXH_ERROR;
+#endif
+
+    state->total_len += len;
+
+    if (state->memsize + len < 32) {  /* fill in tmp buffer */
+        XXH_memcpy(((BYTE*)state->mem64) + state->memsize, input, len);
+        state->memsize += (U32)len;
+        return XXH_OK;
+    }
+
+    if (state->memsize) {   /* tmp buffer is full */
+        XXH_memcpy(((BYTE*)state->mem64) + state->memsize, input, 32-state->memsize);
+        state->v1 = XXH64_round(state->v1, XXH_readLE64(state->mem64+0, endian));
+        state->v2 = XXH64_round(state->v2, XXH_readLE64(state->mem64+1, endian));
+        state->v3 = XXH64_round(state->v3, XXH_readLE64(state->mem64+2, endian));
+        state->v4 = XXH64_round(state->v4, XXH_readLE64(state->mem64+3, endian));
+        p += 32-state->memsize;
+        state->memsize = 0;
+    }
+
+    if (p+32 <= bEnd) {
+        const BYTE* const limit = bEnd - 32;
+        U64 v1 = state->v1;
+        U64 v2 = state->v2;
+        U64 v3 = state->v3;
+        U64 v4 = state->v4;
+
+        do {
+            v1 = XXH64_round(v1, XXH_readLE64(p, endian)); p+=8;
+            v2 = XXH64_round(v2, XXH_readLE64(p, endian)); p+=8;
+            v3 = XXH64_round(v3, XXH_readLE64(p, endian)); p+=8;
+            v4 = XXH64_round(v4, XXH_readLE64(p, endian)); p+=8;
+        } while (p<=limit);
+
+        state->v1 = v1;
+        state->v2 = v2;
+        state->v3 = v3;
+        state->v4 = v4;
+    }
+
+    if (p < bEnd) {
+        XXH_memcpy(state->mem64, p, (size_t)(bEnd-p));
+        state->memsize = (unsigned)(bEnd-p);
+    }
+
+    return XXH_OK;
+}
+
+XXH_PUBLIC_API XXH_errorcode XXH64_update (XXH64_state_t* state_in, const void* input, size_t len)
+{
+    XXH_endianess endian_detected = (XXH_endianess)XXH_CPU_LITTLE_ENDIAN;
+
+    if ((endian_detected==XXH_littleEndian) || XXH_FORCE_NATIVE_FORMAT)
+        return XXH64_update_endian(state_in, input, len, XXH_littleEndian);
+    else
+        return XXH64_update_endian(state_in, input, len, XXH_bigEndian);
+}
+
+FORCE_INLINE U64 XXH64_digest_endian (const XXH64_state_t* state, XXH_endianess endian)
+{
+    const BYTE * p = (const BYTE*)state->mem64;
+    const BYTE* const bEnd = (const BYTE*)state->mem64 + state->memsize;
+    U64 h64;
+
+    if (state->total_len >= 32) {
+        U64 const v1 = state->v1;
+        U64 const v2 = state->v2;
+        U64 const v3 = state->v3;
+        U64 const v4 = state->v4;
+
+        h64 = XXH_rotl64(v1, 1) + XXH_rotl64(v2, 7) + XXH_rotl64(v3, 12) + XXH_rotl64(v4, 18);
+        h64 = XXH64_mergeRound(h64, v1);
+        h64 = XXH64_mergeRound(h64, v2);
+        h64 = XXH64_mergeRound(h64, v3);
+        h64 = XXH64_mergeRound(h64, v4);
+    } else {
+        h64  = state->v3 + PRIME64_5;
+    }
+
+    h64 += (U64) state->total_len;
+
+    while (p+8<=bEnd) {
+        U64 const k1 = XXH64_round(0, XXH_readLE64(p, endian));
+        h64 ^= k1;
+        h64  = XXH_rotl64(h64,27) * PRIME64_1 + PRIME64_4;
+        p+=8;
+    }
+
+    if (p+4<=bEnd) {
+        h64 ^= (U64)(XXH_readLE32(p, endian)) * PRIME64_1;
+        h64  = XXH_rotl64(h64, 23) * PRIME64_2 + PRIME64_3;
+        p+=4;
+    }
+
+    while (p<bEnd) {
+        h64 ^= (*p) * PRIME64_5;
+        h64  = XXH_rotl64(h64, 11) * PRIME64_1;
+        p++;
+    }
+
+    h64 ^= h64 >> 33;
+    h64 *= PRIME64_2;
+    h64 ^= h64 >> 29;
+    h64 *= PRIME64_3;
+    h64 ^= h64 >> 32;
+
+    return h64;
+}
+
+XXH_PUBLIC_API unsigned long long XXH64_digest (const XXH64_state_t* state_in)
+{
+    XXH_endianess endian_detected = (XXH_endianess)XXH_CPU_LITTLE_ENDIAN;
+
+    if ((endian_detected==XXH_littleEndian) || XXH_FORCE_NATIVE_FORMAT)
+        return XXH64_digest_endian(state_in, XXH_littleEndian);
+    else
+        return XXH64_digest_endian(state_in, XXH_bigEndian);
+}
+
+
+/*====== Canonical representation   ======*/
+
+XXH_PUBLIC_API void XXH64_canonicalFromHash(XXH64_canonical_t* dst, XXH64_hash_t hash)
+{
+    XXH_STATIC_ASSERT(sizeof(XXH64_canonical_t) == sizeof(XXH64_hash_t));
+    if (XXH_CPU_LITTLE_ENDIAN) hash = XXH_swap64(hash);
+    memcpy(dst, &hash, sizeof(*dst));
+}
+
+XXH_PUBLIC_API XXH64_hash_t XXH64_hashFromCanonical(const XXH64_canonical_t* src)
+{
+    return XXH_readBE64(src);
+}
+
+#endif  /* XXH_NO_LONG_LONG */
diff --git a/src/third_party/vulkan-validation-layers/src/layers/xxhash.h b/src/third_party/vulkan-validation-layers/src/layers/xxhash.h
new file mode 100644
index 0000000..9d831e0
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/layers/xxhash.h
@@ -0,0 +1,293 @@
+/*
+   xxHash - Extremely Fast Hash algorithm
+   Header File
+   Copyright (C) 2012-2016, Yann Collet.
+
+   BSD 2-Clause License (http://www.opensource.org/licenses/bsd-license.php)
+
+   Redistribution and use in source and binary forms, with or without
+   modification, are permitted provided that the following conditions are
+   met:
+
+       * Redistributions of source code must retain the above copyright
+   notice, this list of conditions and the following disclaimer.
+       * Redistributions in binary form must reproduce the above
+   copyright notice, this list of conditions and the following disclaimer
+   in the documentation and/or other materials provided with the
+   distribution.
+
+   THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+   "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+   LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+   A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+   OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+   SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+   LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+   DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+   THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+   (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+   OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+   You can contact the author at :
+   - xxHash source repository : https://github.com/Cyan4973/xxHash
+*/
+
+/* Notice extracted from xxHash homepage :
+
+xxHash is an extremely fast Hash algorithm, running at RAM speed limits.
+It also successfully passes all tests from the SMHasher suite.
+
+Comparison (single thread, Windows Seven 32 bits, using SMHasher on a Core 2 Duo @3GHz)
+
+Name            Speed       Q.Score   Author
+xxHash          5.4 GB/s     10
+CrapWow         3.2 GB/s      2       Andrew
+MumurHash 3a    2.7 GB/s     10       Austin Appleby
+SpookyHash      2.0 GB/s     10       Bob Jenkins
+SBox            1.4 GB/s      9       Bret Mulvey
+Lookup3         1.2 GB/s      9       Bob Jenkins
+SuperFastHash   1.2 GB/s      1       Paul Hsieh
+CityHash64      1.05 GB/s    10       Pike & Alakuijala
+FNV             0.55 GB/s     5       Fowler, Noll, Vo
+CRC32           0.43 GB/s     9
+MD5-32          0.33 GB/s    10       Ronald L. Rivest
+SHA1-32         0.28 GB/s    10
+
+Q.Score is a measure of quality of the hash function.
+It depends on successfully passing SMHasher test set.
+10 is a perfect score.
+
+A 64-bits version, named XXH64, is available since r35.
+It offers much better speed, but for 64-bits applications only.
+Name     Speed on 64 bits    Speed on 32 bits
+XXH64       13.8 GB/s            1.9 GB/s
+XXH32        6.8 GB/s            6.0 GB/s
+*/
+
+#ifndef XXHASH_H_5627135585666179
+#define XXHASH_H_5627135585666179 1
+
+#if defined (__cplusplus)
+extern "C" {
+#endif
+
+
+/* ****************************
+*  Definitions
+******************************/
+#include <stddef.h>   /* size_t */
+typedef enum { XXH_OK=0, XXH_ERROR } XXH_errorcode;
+
+
+/* ****************************
+*  API modifier
+******************************/
+/** XXH_PRIVATE_API
+*   This is useful to include xxhash functions in `static` mode
+*   in order to inline them, and remove their symbol from the public list.
+*   Methodology :
+*     #define XXH_PRIVATE_API
+*     #include "xxhash.h"
+*   `xxhash.c` is automatically included.
+*   It's not useful to compile and link it as a separate module.
+*/
+#ifdef XXH_PRIVATE_API
+#  ifndef XXH_STATIC_LINKING_ONLY
+#    define XXH_STATIC_LINKING_ONLY
+#  endif
+#  if defined(__GNUC__)
+#    define XXH_PUBLIC_API static __inline __attribute__((unused))
+#  elif defined (__cplusplus) || (defined (__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L) /* C99 */)
+#    define XXH_PUBLIC_API static inline
+#  elif defined(_MSC_VER)
+#    define XXH_PUBLIC_API static __inline
+#  else
+#    define XXH_PUBLIC_API static   /* this version may generate warnings for unused static functions; disable the relevant warning */
+#  endif
+#else
+#  define XXH_PUBLIC_API   /* do nothing */
+#endif /* XXH_PRIVATE_API */
+
+/*!XXH_NAMESPACE, aka Namespace Emulation :
+
+If you want to include _and expose_ xxHash functions from within your own library,
+but also want to avoid symbol collisions with other libraries which may also include xxHash,
+
+you can use XXH_NAMESPACE, to automatically prefix any public symbol from xxhash library
+with the value of XXH_NAMESPACE (therefore, avoid NULL and numeric values).
+
+Note that no change is required within the calling program as long as it includes `xxhash.h` :
+regular symbol name will be automatically translated by this header.
+*/
+#ifdef XXH_NAMESPACE
+#  define XXH_CAT(A,B) A##B
+#  define XXH_NAME2(A,B) XXH_CAT(A,B)
+#  define XXH_versionNumber XXH_NAME2(XXH_NAMESPACE, XXH_versionNumber)
+#  define XXH32 XXH_NAME2(XXH_NAMESPACE, XXH32)
+#  define XXH32_createState XXH_NAME2(XXH_NAMESPACE, XXH32_createState)
+#  define XXH32_freeState XXH_NAME2(XXH_NAMESPACE, XXH32_freeState)
+#  define XXH32_reset XXH_NAME2(XXH_NAMESPACE, XXH32_reset)
+#  define XXH32_update XXH_NAME2(XXH_NAMESPACE, XXH32_update)
+#  define XXH32_digest XXH_NAME2(XXH_NAMESPACE, XXH32_digest)
+#  define XXH32_copyState XXH_NAME2(XXH_NAMESPACE, XXH32_copyState)
+#  define XXH32_canonicalFromHash XXH_NAME2(XXH_NAMESPACE, XXH32_canonicalFromHash)
+#  define XXH32_hashFromCanonical XXH_NAME2(XXH_NAMESPACE, XXH32_hashFromCanonical)
+#  define XXH64 XXH_NAME2(XXH_NAMESPACE, XXH64)
+#  define XXH64_createState XXH_NAME2(XXH_NAMESPACE, XXH64_createState)
+#  define XXH64_freeState XXH_NAME2(XXH_NAMESPACE, XXH64_freeState)
+#  define XXH64_reset XXH_NAME2(XXH_NAMESPACE, XXH64_reset)
+#  define XXH64_update XXH_NAME2(XXH_NAMESPACE, XXH64_update)
+#  define XXH64_digest XXH_NAME2(XXH_NAMESPACE, XXH64_digest)
+#  define XXH64_copyState XXH_NAME2(XXH_NAMESPACE, XXH64_copyState)
+#  define XXH64_canonicalFromHash XXH_NAME2(XXH_NAMESPACE, XXH64_canonicalFromHash)
+#  define XXH64_hashFromCanonical XXH_NAME2(XXH_NAMESPACE, XXH64_hashFromCanonical)
+#endif
+
+
+/* *************************************
+*  Version
+***************************************/
+#define XXH_VERSION_MAJOR    0
+#define XXH_VERSION_MINOR    6
+#define XXH_VERSION_RELEASE  2
+#define XXH_VERSION_NUMBER  (XXH_VERSION_MAJOR *100*100 + XXH_VERSION_MINOR *100 + XXH_VERSION_RELEASE)
+XXH_PUBLIC_API unsigned XXH_versionNumber (void);
+
+
+/*-**********************************************************************
+*  32-bits hash
+************************************************************************/
+typedef unsigned int XXH32_hash_t;
+
+/*! XXH32() :
+    Calculate the 32-bits hash of sequence "length" bytes stored at memory address "input".
+    The memory between input & input+length must be valid (allocated and read-accessible).
+    "seed" can be used to alter the result predictably.
+    Speed on Core 2 Duo @ 3 GHz (single thread, SMHasher benchmark) : 5.4 GB/s */
+XXH_PUBLIC_API XXH32_hash_t XXH32 (const void* input, size_t length, unsigned int seed);
+
+/*======   Streaming   ======*/
+typedef struct XXH32_state_s XXH32_state_t;   /* incomplete type */
+XXH_PUBLIC_API XXH32_state_t* XXH32_createState(void);
+XXH_PUBLIC_API XXH_errorcode  XXH32_freeState(XXH32_state_t* statePtr);
+XXH_PUBLIC_API void XXH32_copyState(XXH32_state_t* dst_state, const XXH32_state_t* src_state);
+
+XXH_PUBLIC_API XXH_errorcode XXH32_reset  (XXH32_state_t* statePtr, unsigned int seed);
+XXH_PUBLIC_API XXH_errorcode XXH32_update (XXH32_state_t* statePtr, const void* input, size_t length);
+XXH_PUBLIC_API XXH32_hash_t  XXH32_digest (const XXH32_state_t* statePtr);
+
+/*
+These functions generate the xxHash of an input provided in multiple segments.
+Note that, for small input, they are slower than single-call functions, due to state management.
+For small input, prefer `XXH32()` and `XXH64()` .
+
+XXH state must first be allocated, using XXH*_createState() .
+
+Start a new hash by initializing state with a seed, using XXH*_reset().
+
+Then, feed the hash state by calling XXH*_update() as many times as necessary.
+Obviously, input must be allocated and read accessible.
+The function returns an error code, with 0 meaning OK, and any other value meaning there is an error.
+
+Finally, a hash value can be produced anytime, by using XXH*_digest().
+This function returns the nn-bits hash as an int or long long.
+
+It's still possible to continue inserting input into the hash state after a digest,
+and generate some new hashes later on, by calling again XXH*_digest().
+
+When done, free XXH state space if it was allocated dynamically.
+*/
+
+/*======   Canonical representation   ======*/
+
+typedef struct { unsigned char digest[4]; } XXH32_canonical_t;
+XXH_PUBLIC_API void XXH32_canonicalFromHash(XXH32_canonical_t* dst, XXH32_hash_t hash);
+XXH_PUBLIC_API XXH32_hash_t XXH32_hashFromCanonical(const XXH32_canonical_t* src);
+
+/* Default result type for XXH functions are primitive unsigned 32 and 64 bits.
+*  The canonical representation uses human-readable write convention, aka big-endian (large digits first).
+*  These functions allow transformation of hash result into and from its canonical format.
+*  This way, hash values can be written into a file / memory, and remain comparable on different systems and programs.
+*/
+
+
+#ifndef XXH_NO_LONG_LONG
+/*-**********************************************************************
+*  64-bits hash
+************************************************************************/
+typedef unsigned long long XXH64_hash_t;
+
+/*! XXH64() :
+    Calculate the 64-bits hash of sequence of length "len" stored at memory address "input".
+    "seed" can be used to alter the result predictably.
+    This function runs faster on 64-bits systems, but slower on 32-bits systems (see benchmark).
+*/
+XXH_PUBLIC_API XXH64_hash_t XXH64 (const void* input, size_t length, unsigned long long seed);
+
+/*======   Streaming   ======*/
+typedef struct XXH64_state_s XXH64_state_t;   /* incomplete type */
+XXH_PUBLIC_API XXH64_state_t* XXH64_createState(void);
+XXH_PUBLIC_API XXH_errorcode  XXH64_freeState(XXH64_state_t* statePtr);
+XXH_PUBLIC_API void XXH64_copyState(XXH64_state_t* dst_state, const XXH64_state_t* src_state);
+
+XXH_PUBLIC_API XXH_errorcode XXH64_reset  (XXH64_state_t* statePtr, unsigned long long seed);
+XXH_PUBLIC_API XXH_errorcode XXH64_update (XXH64_state_t* statePtr, const void* input, size_t length);
+XXH_PUBLIC_API XXH64_hash_t  XXH64_digest (const XXH64_state_t* statePtr);
+
+/*======   Canonical representation   ======*/
+typedef struct { unsigned char digest[8]; } XXH64_canonical_t;
+XXH_PUBLIC_API void XXH64_canonicalFromHash(XXH64_canonical_t* dst, XXH64_hash_t hash);
+XXH_PUBLIC_API XXH64_hash_t XXH64_hashFromCanonical(const XXH64_canonical_t* src);
+#endif  /* XXH_NO_LONG_LONG */
+
+
+#ifdef XXH_STATIC_LINKING_ONLY
+
+/* ================================================================================================
+   This section contains definitions which are not guaranteed to remain stable.
+   They may change in future versions, becoming incompatible with a different version of the library.
+   They shall only be used with static linking.
+   Never use these definitions in association with dynamic linking !
+=================================================================================================== */
+
+/* These definitions are only meant to make possible
+   static allocation of XXH state, on stack or in a struct for example.
+   Never use members directly. */
+
+struct XXH32_state_s {
+   unsigned total_len_32;
+   unsigned large_len;
+   unsigned v1;
+   unsigned v2;
+   unsigned v3;
+   unsigned v4;
+   unsigned mem32[4];   /* buffer defined as U32 for alignment */
+   unsigned memsize;
+   unsigned reserved;   /* never read nor write, will be removed in a future version */
+};   /* typedef'd to XXH32_state_t */
+
+#ifndef XXH_NO_LONG_LONG   /* remove 64-bits support */
+struct XXH64_state_s {
+   unsigned long long total_len;
+   unsigned long long v1;
+   unsigned long long v2;
+   unsigned long long v3;
+   unsigned long long v4;
+   unsigned long long mem64[4];   /* buffer defined as U64 for alignment */
+   unsigned memsize;
+   unsigned reserved[2];          /* never read nor write, will be removed in a future version */
+};   /* typedef'd to XXH64_state_t */
+#endif
+
+#ifdef XXH_PRIVATE_API
+#  include "xxhash.c"   /* include xxhash function bodies as `static`, for inlining */
+#endif
+
+#endif /* XXH_STATIC_LINKING_ONLY */
+
+
+#if defined (__cplusplus)
+}
+#endif
+
+#endif /* XXHASH_H_5627135585666179 */
diff --git a/src/third_party/vulkan-validation-layers/src/scripts/check_code_format.sh b/src/third_party/vulkan-validation-layers/src/scripts/check_code_format.sh
new file mode 100755
index 0000000..0b5b01c
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/scripts/check_code_format.sh
@@ -0,0 +1,66 @@
+#!/bin/bash
+# Copyright (c) 2017-2019 Google Inc.
+# Copyright (c) 2019 LunarG, Inc.
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Script to determine if source code in Pull Request is properly formatted.
+# Exits with non 0 exit code if formatting is needed.
+#
+# This script assumes to be invoked at the project root directory.
+
+RED='\033[0;31m'
+GREEN='\033[0;32m'
+NC='\033[0m' # No Color
+FOUND_ERROR=0
+
+FILES_TO_CHECK=$(git diff --name-only master | grep -v -E "^include/vulkan" | grep -E ".*\.(cpp|cc|c\+\+|cxx|c|h|hpp)$")
+COPYRIGHTED_FILES_TO_CHECK=$(git diff --name-only master | grep -v -E "^include/vulkan")
+
+if [ -z "${FILES_TO_CHECK}" ]; then
+  echo -e "${GREEN}No source code to check for formatting.${NC}"
+else
+  # Check source files in PR for clang-format errors
+  FORMAT_DIFF=$(git diff -U0 master -- ${FILES_TO_CHECK} | python ./scripts/clang-format-diff.py -p1 -style=file)
+
+  if [ ! -z "${FORMAT_DIFF}" ]; then
+    echo -e "${RED}Found formatting errors!${NC}"
+    echo "${FORMAT_DIFF}"
+    FOUND_ERROR=1
+  fi
+
+  # Check files in PR out-of-date copyright notices
+  if [ -z "${COPYRIGHTED_FILES_TO_CHECK}" ]; then
+    echo -e "${GREEN}No source code to check for copyright dates.${NC}"
+  else
+    THISYEAR=$(date +"%Y")
+    # Look for current year in copyright lines
+    for AFILE in ${COPYRIGHTED_FILES_TO_CHECK}
+    do
+      COPYRIGHT_INFO=$(cat ${AFILE} | grep -E "Copyright (.)*LunarG")
+      if [ ! -z "${COPYRIGHT_INFO}" ]; then
+        if ! echo "$COPYRIGHT_INFO" | grep -q "$THISYEAR" ; then
+          echo -e "${RED} "$AFILE" has an out-of-date copyright notice.${NC}"
+          FOUND_ERROR=1
+        fi
+      fi
+    done
+  fi
+fi
+
+if [ $FOUND_ERROR  -gt 0 ]; then
+  exit 1
+else
+  echo -e "${GREEN}All source code in PR properly formatted.${NC}"
+  exit 0
+fi
diff --git a/src/third_party/vulkan-validation-layers/src/scripts/check_commit_message_format.sh b/src/third_party/vulkan-validation-layers/src/scripts/check_commit_message_format.sh
new file mode 100755
index 0000000..8b7c3ce
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/scripts/check_commit_message_format.sh
@@ -0,0 +1,111 @@
+#!/bin/bash
+# Copyright (c) 2018 Valve Corporation
+# Copyright (c) 2018 LunarG, Inc.
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Checks commit messages against project standards in CONTRIBUTING.md document
+# Script to determine if commit messages in Pull Request are properly formatted.
+# Exits with non 0 exit code if reformatting is needed.
+
+# Disable subshells
+shopt -s lastpipe
+
+RED='\033[0;31m'
+GREEN='\033[0;32m'
+NC='\033[0m' # No Color
+
+# TRAVIS_COMMIT_RANGE contains range of commits for this PR
+
+# Get user-supplied commit message text for applicable commits and insert
+# a unique separator string identifier. The git command returns ONLY the
+# subject line and body for each of the commits.
+TRAVIS_COMMIT_RANGE="${TRAVIS_COMMIT_RANGE/.../..}"
+COMMIT_TEXT=$(git log ${TRAVIS_COMMIT_RANGE} --pretty=format:"XXXNEWLINEXXX"%n%B)
+
+# Bail if there are none
+if [ -z "${COMMIT_TEXT}" ]; then
+  echo -e "${GREEN}No commit messgages to check for formatting.${NC}"
+  exit 0
+elif ! echo $TRAVIS_COMMIT_RANGE | grep -q "\.\.\."; then
+  echo -e "${GREEN}No commit messgages to check for formatting.${NC}"
+  exit 0
+fi
+
+# Process commit messages
+success=1
+current_line=0
+prevline=""
+
+# Process each line of the commit message output, resetting counter on separator
+printf %s "$COMMIT_TEXT" | while IFS='' read -r line; do
+  # echo "Count = $current_line <Line> = $line"
+  current_line=$((current_line+1))
+  if [ "$line" = "XXXNEWLINEXXX" ]; then
+    current_line=0
+  fi
+  chars=${#line}
+  if [ $current_line -eq 1 ]; then
+    # Subject line should be 64 chars or less
+    if [ $chars -gt 64 ]; then
+      echo "The following subject line exceeds 64 characters in length."
+      echo "     '$line'"
+      success=0
+    fi
+    i=$(($chars-1))
+    last_char=${line:$i:1}
+    # Output error if last char of subject line is not alpha-numeric
+    if [[ $last_char =~ [.,] ]]; then
+      echo "For the following commit, the last character of the subject line must not be a period or comma."
+      echo "     '$line'"
+      success=0
+    fi
+    # Checking if subject line doesn't start with 'module: '
+    prefix=$(echo $line | cut -f1 -d " ")
+    if [ "${prefix: -1}" != ":" ]; then
+      echo "The following subject line must start with a single word specifying the functional area of the change, followed by a colon and space. I.e., 'layers: Subject line here'"
+      echo "     '$line'"
+      success=0
+    fi
+    # Check if first character after the colon is lower-case
+    subject=$(echo $line | cut -f2 -d " ")
+    firstchar=$(echo ${subject} | cut -c 1)
+    if [[ "${firstchar}" =~ [a-z] ]]; then
+      echo "The first word of the subject line after the ':' character must be capitalized."
+      echo "     '$line'"
+      success=0
+    fi
+  elif [ $current_line -eq 2 ]; then
+    # Commit message must have a blank line between subject and body
+    if [ $chars -ne 0 ]; then
+      echo "The following subject line must be followed by a blank line."
+      echo "     '$prevline'"
+      success=0
+    fi
+  else
+    # Lines in a commit message body must be less than 72 characters in length (but give some slack)
+    if [ $chars -gt 76 ]; then
+      echo "The following commit message body line exceeds the 72 character limit."
+      echo "'$line\'"
+      success=0
+    fi
+  fi
+  prevline=$line
+done
+
+if [ $success -eq 1 ]; then
+  echo -e "${GREEN}All commit messages in pull request are properly formatted.${NC}"
+  exit 0
+else
+  exit 1
+fi
diff --git a/src/third_party/vulkan-validation-layers/src/scripts/cmake-format.py b/src/third_party/vulkan-validation-layers/src/scripts/cmake-format.py
new file mode 100644
index 0000000..07d2f99
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/scripts/cmake-format.py
@@ -0,0 +1,34 @@
+# Configuration for cmake-format (v0.4.1, circa Jul 2018)
+# https://github.com/cheshirekow/cmake_format
+
+# How wide to allow formatted cmake files
+line_width = 132
+
+# How many spaces to tab for indent
+tab_size = 4
+
+# If arglists are longer than this, break them always
+max_subargs_per_line = 3
+
+# If true, separate flow control names from their parentheses with a space
+separate_ctrl_name_with_space = False
+
+# If true, separate function names from parentheses with a space
+separate_fn_name_with_space = False
+
+# If a statement is wrapped to more than one line, than dangle the closing
+# parenthesis on it's own line
+dangle_parens = False
+
+# What character to use for bulleted lists
+bullet_char = u'*'
+
+# What character to use as punctuation after numerals in an enumerated list
+enum_char = u'.'
+
+# What style line endings to use in the output.
+line_ending = u'unix'
+
+# Format command names consistently as 'lower' or 'upper' case
+command_case = u'lower'
+
diff --git a/src/third_party/vulkan-validation-layers/src/scripts/command_counter_generator.py b/src/third_party/vulkan-validation-layers/src/scripts/command_counter_generator.py
new file mode 100644
index 0000000..09db56d
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/scripts/command_counter_generator.py
@@ -0,0 +1,210 @@
+#!/usr/bin/python3 -i
+#
+# Copyright (c) 2015-2019 The Khronos Group Inc.
+# Copyright (c) 2015-2019 Valve Corporation
+# Copyright (c) 2015-2019 LunarG, Inc.
+# Copyright (c) 2019      Intel coporation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Author: Mark Lobodzinski <mark@lunarg.com>
+# Author: Lionel Landwerlin <lionel.g.landwerlin@intel.com>
+
+import os,re,sys
+import xml.etree.ElementTree as etree
+from generator import *
+from collections import namedtuple
+from common_codegen import *
+
+#
+# CommandCounterOutputGeneratorOptions - subclass of GeneratorOptions.
+class CommandCounterOutputGeneratorOptions(GeneratorOptions):
+    def __init__(self,
+                 conventions = None,
+                 filename = None,
+                 directory = '.',
+                 apiname = None,
+                 profile = None,
+                 versions = '.*',
+                 emitversions = '.*',
+                 defaultExtensions = None,
+                 addExtensions = None,
+                 removeExtensions = None,
+                 emitExtensions = None,
+                 sortProcedure = regSortFeatures,
+                 prefixText = "",
+                 genFuncPointers = True,
+                 apicall = '',
+                 apientry = '',
+                 apientryp = '',
+                 alignFuncParam = 0,
+                 expandEnumerants = True,
+                 lvt_file_type = ''):
+        GeneratorOptions.__init__(self, conventions, filename, directory, apiname, profile,
+                                  versions, emitversions, defaultExtensions,
+                                  addExtensions, removeExtensions, emitExtensions, sortProcedure)
+        self.prefixText      = prefixText
+        self.genFuncPointers = genFuncPointers
+        self.prefixText      = None
+        self.apicall         = apicall
+        self.apientry        = apientry
+        self.apientryp       = apientryp
+        self.alignFuncParam  = alignFuncParam
+        self.lvt_file_type   = lvt_file_type
+#
+# CommandCounterOutputGenerator - subclass of OutputGenerator.
+# Generates files needed by the layer validation state tracker
+class CommandCounterOutputGenerator(OutputGenerator):
+    """Generate command counter in VkCommandBuffer based on XML element attributes"""
+    def __init__(self,
+                 errFile = sys.stderr,
+                 warnFile = sys.stderr,
+                 diagFile = sys.stdout):
+        OutputGenerator.__init__(self, errFile, warnFile, diagFile)
+        # Internal state - accumulators for different inner block text
+        self.dispatch_list = []               # List of entries for dispatch list
+    #
+    # Called once at the beginning of each run
+    def beginFile(self, genOpts):
+        OutputGenerator.beginFile(self, genOpts)
+
+        # Initialize members that require the tree
+        self.handle_types = GetHandleTypes(self.registry.tree)
+        self.lvt_file_type = genOpts.lvt_file_type
+
+        if genOpts.lvt_file_type == 'function_pointer_header':
+            write("#pragma once", file=self.outFile)
+
+        # User-supplied prefix text, if any (list of strings)
+        if (genOpts.prefixText):
+            for s in genOpts.prefixText:
+                write(s, file=self.outFile)
+        # File Comment
+        file_comment = '// *** THIS FILE IS GENERATED - DO NOT EDIT ***\n'
+        file_comment += '// See command_counter_generator.py for modifications\n'
+        write(file_comment, file=self.outFile)
+        # Copyright Notice
+        copyright =  '/*\n'
+        copyright += ' * Copyright (c) 2015-2019 The Khronos Group Inc.\n'
+        copyright += ' * Copyright (c) 2015-2019 Valve Corporation\n'
+        copyright += ' * Copyright (c) 2015-2019 LunarG, Inc.\n'
+        copyright += ' * Copyright (c) 2019      Intel Corporation\n'
+        copyright += ' *\n'
+        copyright += ' * Licensed under the Apache License, Version 2.0 (the "License");\n'
+        copyright += ' * you may not use this file except in compliance with the License.\n'
+        copyright += ' * You may obtain a copy of the License at\n'
+        copyright += ' *\n'
+        copyright += ' *     http://www.apache.org/licenses/LICENSE-2.0\n'
+        copyright += ' *\n'
+        copyright += ' * Unless required by applicable law or agreed to in writing, software\n'
+        copyright += ' * distributed under the License is distributed on an "AS IS" BASIS,\n'
+        copyright += ' * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n'
+        copyright += ' * See the License for the specific language governing permissions and\n'
+        copyright += ' * limitations under the License.\n'
+        copyright += ' *\n'
+        copyright += ' * Author: Mark Lobodzinski <mark@lunarg.com>\n'
+        copyright += ' * Author: Lionel Landwerlin <lionel.g.landwerlin@intel.com>\n'
+        copyright += ' */\n'
+        write(copyright, file=self.outFile)
+    #
+    # Write completed source code to output file
+    def endFile(self):
+        dest_file = ''
+        dest_file += self.OutputDestFile()
+        # Remove blank lines at EOF
+        if dest_file.endswith('\n'):
+            dest_file = dest_file[:-1]
+        write(dest_file, file=self.outFile);
+        # Finish processing in superclass
+        OutputGenerator.endFile(self)
+    #
+    # Processing at beginning of each feature or extension
+    def beginFeature(self, interface, emit):
+        OutputGenerator.beginFeature(self, interface, emit)
+        self.featureExtraProtect = GetFeatureProtect(interface)
+
+    #
+    # Process commands, adding to dispatch list
+    def genCmd(self, cmdinfo, name, alias):
+        OutputGenerator.genCmd(self, cmdinfo, name, alias)
+        # Get first param type
+        params = cmdinfo.elem.findall('param')
+        info = self.getTypeNameTuple(params[0])
+        if name.startswith('vkCmd') and info[0] == 'VkCommandBuffer':
+            self.dispatch_list.append((self.featureExtraProtect, name, cmdinfo))
+
+    #
+    # Retrieve the type and name for a parameter
+    def getTypeNameTuple(self, param):
+        type = ''
+        name = ''
+        for elem in param:
+            if elem.tag == 'type':
+                type = noneStr(elem.text)
+            elif elem.tag == 'name':
+                name = noneStr(elem.text)
+        return (type, name)
+
+    #
+    # Create the test function pointer source and return it as a string
+    def GenerateFunctionPointerSource(self):
+        entries = []
+        entries = self.dispatch_list
+
+        table = '#include "chassis.h"\n'
+        table += '#include "state_tracker.h"\n'
+        table += '#include "command_counter.h"\n'
+        table += '\n'
+
+        for item in entries:
+            # Remove 'vk' from proto name
+            base_name = item[1][2:]
+
+            if item[0] is not None:
+                table += '#ifdef %s\n' % item[0]
+            params = item[2].elem.findall('param')
+            paramstext = ', '.join([''.join(param.itertext()) for param in params])
+            table += 'void CommandCounter::PreCallRecord%s(%s) {\n' % (base_name, paramstext)
+            table += '    coreChecks->IncrementCommandCount(%s);\n' % params[0].findall('name')[0].text
+            table += '}\n'
+            if item[0] is not None:
+                table += '#endif // %s\n' % item[0]
+        return table
+    #
+    # Create the test function pointer source and return it as a string
+    def GenerateFunctionPointerHeader(self):
+        entries = []
+        table = ''
+        entries = self.dispatch_list
+
+        for item in entries:
+            # Remove 'vk' from proto name
+            base_name = item[1][2:]
+
+            if item[0] is not None:
+                table += '#ifdef %s\n' % item[0]
+            params = item[2].elem.findall('param')
+            paramstext = ', '.join([''.join(param.itertext()) for param in params])
+            table += 'void PreCallRecord%s(%s);\n' % (base_name, paramstext)
+            if item[0] is not None:
+                table += '#endif // %s\n' % item[0]
+        return table
+
+    # Create a helper file and return it as a string
+    def OutputDestFile(self):
+        if self.lvt_file_type == 'function_pointer_header':
+            return self.GenerateFunctionPointerHeader()
+        elif self.lvt_file_type == 'function_pointer_source':
+            return self.GenerateFunctionPointerSource()
+        else:
+            return 'Bad LVT File Generator Option %s' % self.lvt_file_type
diff --git a/src/third_party/vulkan-validation-layers/src/scripts/common_codegen.py b/src/third_party/vulkan-validation-layers/src/scripts/common_codegen.py
new file mode 100644
index 0000000..b505e15
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/scripts/common_codegen.py
@@ -0,0 +1,138 @@
+#!/usr/bin/python3 -i
+#
+# Copyright (c) 2015-2017, 2019 The Khronos Group Inc.
+# Copyright (c) 2015-2017, 2019 Valve Corporation
+# Copyright (c) 2015-2017, 2019 LunarG, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Author: Mark Lobodzinski <mark@lunarg.com>
+
+import os,re,sys,string
+import xml.etree.ElementTree as etree
+from collections import namedtuple, OrderedDict
+
+# Copyright text prefixing all headers (list of strings).
+prefixStrings = [
+    '/*',
+    '** Copyright (c) 2015-2017, 2019 The Khronos Group Inc.',
+    '** Copyright (c) 2015-2017, 2019 Valve Corporation',
+    '** Copyright (c) 2015-2017, 2019 LunarG, Inc.',
+    '** Copyright (c) 2015-2017, 2019 Google Inc.',
+    '**',
+    '** Licensed under the Apache License, Version 2.0 (the "License");',
+    '** you may not use this file except in compliance with the License.',
+    '** You may obtain a copy of the License at',
+    '**',
+    '**     http://www.apache.org/licenses/LICENSE-2.0',
+    '**',
+    '** Unless required by applicable law or agreed to in writing, software',
+    '** distributed under the License is distributed on an "AS IS" BASIS,',
+    '** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.',
+    '** See the License for the specific language governing permissions and',
+    '** limitations under the License.',
+    '*/',
+    ''
+]
+
+
+platform_dict = {
+    'android' : 'VK_USE_PLATFORM_ANDROID_KHR',
+    'fuchsia' : 'VK_USE_PLATFORM_FUCHSIA',
+    'ggp': 'VK_USE_PLATFORM_GGP',
+    'ios' : 'VK_USE_PLATFORM_IOS_MVK',
+    'macos' : 'VK_USE_PLATFORM_MACOS_MVK',
+    'metal' : 'VK_USE_PLATFORM_METAL_EXT',
+    'vi' : 'VK_USE_PLATFORM_VI_NN',
+    'wayland' : 'VK_USE_PLATFORM_WAYLAND_KHR',
+    'win32' : 'VK_USE_PLATFORM_WIN32_KHR',
+    'xcb' : 'VK_USE_PLATFORM_XCB_KHR',
+    'xlib' : 'VK_USE_PLATFORM_XLIB_KHR',
+    'xlib_xrandr' : 'VK_USE_PLATFORM_XLIB_XRANDR_EXT',
+}
+
+#
+# Return appropriate feature protect string from 'platform' tag on feature
+def GetFeatureProtect(interface):
+    """Get platform protection string"""
+    platform = interface.get('platform')
+    protect = None
+    if platform is not None:
+        protect = platform_dict[platform]
+    return protect
+
+# Return a dict containing the dispatchable/non-dispatchable type of every handle
+def GetHandleTypes(tree):
+    # Extend OrderedDict with common handle operations
+    class HandleDict(OrderedDict):
+        def IsDispatchable(self, handle_type):
+            return self.get(handle_type) == 'VK_DEFINE_HANDLE'
+        def IsNonDispatchable(self, handle_type):
+            return self.get(handle_type) == 'VK_DEFINE_NON_DISPATCHABLE_HANDLE'
+
+    handles = HandleDict()
+    for elem in tree.findall("types/type/[@category='handle']"):
+        if not elem.get('alias'):
+            name = elem.get('name')
+            handles[name] = elem.find('type').text
+    return handles
+
+# Return a dict containing the parent of every handle
+def GetHandleParents(tree):
+    # Extend OrderedDict with common handle operations
+    class HandleParentDict(OrderedDict):
+        def IsParentDevice(self, handle_type):
+            next_object = self.get(handle_type)
+            while next_object != 'VkDevice' and next_object != 'VkInstance' and next_object != 'VkPhysicalDevice' and next_object is not None:
+                next_object = self.get(next_object)
+            return next_object == 'VkDevice'
+        def GetHandleParent(self, handle_type):
+            return self.get(handle_type)
+
+    handle_parents = HandleParentDict()
+    for elem in tree.findall("types/type/[@category='handle']"):
+        if not elem.get('alias') or not elem.get('parent'):
+            name = elem.get('name')
+            handle_parents[name] = elem.get('parent')
+    return handle_parents
+
+# Return a dict containing the category attribute of every type
+def GetTypeCategories(tree):
+    type_categories = OrderedDict()
+    for elem in tree.findall("types/type"):
+        if not elem.get('alias'):
+            # name is either an attribute or the text of a child <name> tag
+            name = elem.get('name') or (elem.find("name") and elem.find('name').text)
+            type_categories[name] = elem.get('category')
+    return type_categories
+
+# Treats outdents a multiline string by the leading whitespace on the first line
+# Optionally indenting by the given prefix
+def Outdent(string_in, indent=''):
+    string_out = re.sub('^ *', '', string_in) # kill stray  leading spaces
+    if string_out[0] != '\n':
+        return string_in # needs new line to find the first line's indent level
+
+    first_indent = string_out[1:]
+    fake_indent = '\n' + ' ' * (len(first_indent) - len(first_indent.lstrip()))
+    indent = '\n' + indent
+
+    string_out = string_out.rstrip() + '\n' # remove trailing whitespace except for a newline
+    outdent = re.sub(fake_indent, indent, string_out)
+    return outdent[1:]
+
+
+# helper to define paths relative to the repo root
+def repo_relative(path):
+    return os.path.abspath(os.path.join(os.path.dirname(__file__), '..', path))
+
diff --git a/src/third_party/vulkan-validation-layers/src/scripts/dispatch_table_helper_generator.py b/src/third_party/vulkan-validation-layers/src/scripts/dispatch_table_helper_generator.py
new file mode 100644
index 0000000..f6f3a40
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/scripts/dispatch_table_helper_generator.py
@@ -0,0 +1,289 @@
+#!/usr/bin/python3 -i
+#
+# Copyright (c) 2015-2019 The Khronos Group Inc.
+# Copyright (c) 2015-2019 Valve Corporation
+# Copyright (c) 2015-2019 LunarG, Inc.
+# Copyright (c) 2015-2019 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Author: Mark Lobodzinski <mark@lunarg.com>
+
+import os,re,sys
+import xml.etree.ElementTree as etree
+from generator import *
+from collections import namedtuple
+from common_codegen import *
+
+#
+# DispatchTableHelperOutputGeneratorOptions - subclass of GeneratorOptions.
+class DispatchTableHelperOutputGeneratorOptions(GeneratorOptions):
+    def __init__(self,
+                 conventions = None,
+                 filename = None,
+                 directory = '.',
+                 apiname = None,
+                 profile = None,
+                 versions = '.*',
+                 emitversions = '.*',
+                 defaultExtensions = None,
+                 addExtensions = None,
+                 removeExtensions = None,
+                 emitExtensions = None,
+                 sortProcedure = regSortFeatures,
+                 prefixText = "",
+                 genFuncPointers = True,
+                 apicall = '',
+                 apientry = '',
+                 apientryp = '',
+                 alignFuncParam = 0,
+                 expandEnumerants = True):
+        GeneratorOptions.__init__(self, conventions, filename, directory, apiname, profile,
+                                  versions, emitversions, defaultExtensions,
+                                  addExtensions, removeExtensions, emitExtensions, sortProcedure)
+        self.prefixText      = prefixText
+        self.genFuncPointers = genFuncPointers
+        self.prefixText      = None
+        self.apicall         = apicall
+        self.apientry        = apientry
+        self.apientryp       = apientryp
+        self.alignFuncParam  = alignFuncParam
+#
+# DispatchTableHelperOutputGenerator - subclass of OutputGenerator.
+# Generates dispatch table helper header files for LVL
+class DispatchTableHelperOutputGenerator(OutputGenerator):
+    """Generate dispatch table helper header based on XML element attributes"""
+    def __init__(self,
+                 errFile = sys.stderr,
+                 warnFile = sys.stderr,
+                 diagFile = sys.stdout):
+        OutputGenerator.__init__(self, errFile, warnFile, diagFile)
+        # Internal state - accumulators for different inner block text
+        self.instance_dispatch_list = []      # List of entries for instance dispatch list
+        self.device_dispatch_list = []        # List of entries for device dispatch list
+        self.dev_ext_stub_list = []           # List of stub functions for device extension functions
+        self.device_extension_list = []       # List of device extension functions
+        self.device_stub_list = []            # List of device functions with stubs (promoted or extensions)
+        self.extension_type = ''
+    #
+    # Called once at the beginning of each run
+    def beginFile(self, genOpts):
+        OutputGenerator.beginFile(self, genOpts)
+        
+        # Initialize members that require the tree
+        self.handle_types = GetHandleTypes(self.registry.tree)
+
+        write("#pragma once", file=self.outFile)
+        # User-supplied prefix text, if any (list of strings)
+        if (genOpts.prefixText):
+            for s in genOpts.prefixText:
+                write(s, file=self.outFile)
+        # File Comment
+        file_comment = '// *** THIS FILE IS GENERATED - DO NOT EDIT ***\n'
+        file_comment += '// See dispatch_helper_generator.py for modifications\n'
+        write(file_comment, file=self.outFile)
+        # Copyright Notice
+        copyright =  '/*\n'
+        copyright += ' * Copyright (c) 2015-2019 The Khronos Group Inc.\n'
+        copyright += ' * Copyright (c) 2015-2019 Valve Corporation\n'
+        copyright += ' * Copyright (c) 2015-2019 LunarG, Inc.\n'
+        copyright += ' *\n'
+        copyright += ' * Licensed under the Apache License, Version 2.0 (the "License");\n'
+        copyright += ' * you may not use this file except in compliance with the License.\n'
+        copyright += ' * You may obtain a copy of the License at\n'
+        copyright += ' *\n'
+        copyright += ' *     http://www.apache.org/licenses/LICENSE-2.0\n'
+        copyright += ' *\n'
+        copyright += ' * Unless required by applicable law or agreed to in writing, software\n'
+        copyright += ' * distributed under the License is distributed on an "AS IS" BASIS,\n'
+        copyright += ' * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n'
+        copyright += ' * See the License for the specific language governing permissions and\n'
+        copyright += ' * limitations under the License.\n'
+        copyright += ' *\n'
+        copyright += ' * Author: Courtney Goeltzenleuchter <courtney@LunarG.com>\n'
+        copyright += ' * Author: Jon Ashburn <jon@lunarg.com>\n'
+        copyright += ' * Author: Mark Lobodzinski <mark@lunarg.com>\n'
+        copyright += ' */\n'
+
+        preamble = ''
+        preamble += '#include <vulkan/vulkan.h>\n'
+        preamble += '#include <vulkan/vk_layer.h>\n'
+        preamble += '#include <cstring>\n'
+        preamble += '#include <string>\n'
+        preamble += '#include <unordered_set>\n'
+        preamble += '#include <unordered_map>\n'
+        preamble += '#include "vk_layer_dispatch_table.h"\n'
+        preamble += '#include "vk_extension_helper.h"\n'
+
+        write(copyright, file=self.outFile)
+        write(preamble, file=self.outFile)
+    #
+    # Write generate and write dispatch tables to output file
+    def endFile(self):
+        ext_enabled_fcn = ''
+        device_table = ''
+        instance_table = ''
+
+        ext_enabled_fcn += self.OutputExtEnabledFunction()
+        device_table += self.OutputDispatchTableHelper('device')
+        instance_table += self.OutputDispatchTableHelper('instance')
+
+        for stub in self.dev_ext_stub_list:
+            write(stub, file=self.outFile)
+        write("\n\n", file=self.outFile)
+        write(ext_enabled_fcn, file=self.outFile)
+        write("\n", file=self.outFile)
+        write(device_table, file=self.outFile);
+        write("\n", file=self.outFile)
+        write(instance_table, file=self.outFile);
+
+        # Finish processing in superclass
+        OutputGenerator.endFile(self)
+    #
+    # Processing at beginning of each feature or extension
+    def beginFeature(self, interface, emit):
+        OutputGenerator.beginFeature(self, interface, emit)
+        self.featureExtraProtect = GetFeatureProtect(interface)
+        self.extension_type = interface.get('type')
+
+    #
+    # Process commands, adding to appropriate dispatch tables
+    def genCmd(self, cmdinfo, name, alias):
+        OutputGenerator.genCmd(self, cmdinfo, name, alias)
+
+        avoid_entries = ['vkCreateInstance',
+                         'vkCreateDevice']
+        # Get first param type
+        params = cmdinfo.elem.findall('param')
+        info = self.getTypeNameTuple(params[0])
+
+        if name not in avoid_entries:
+            self.AddCommandToDispatchList(name, info[0], self.featureExtraProtect, cmdinfo)
+
+    #
+    # Determine if this API should be ignored or added to the instance or device dispatch table
+    def AddCommandToDispatchList(self, name, handle_type, protect, cmdinfo):
+        if handle_type not in self.handle_types:
+            return
+        if handle_type != 'VkInstance' and handle_type != 'VkPhysicalDevice' and name != 'vkGetInstanceProcAddr':
+            self.device_dispatch_list.append((name, self.featureExtraProtect))
+            extension = "VK_VERSION" not in self.featureName
+            promoted = not extension and "VK_VERSION_1_0" != self.featureName
+            if promoted or extension:
+                # We want feature written for all promoted entrypoints, in addition to extensions
+                self.device_stub_list.append([name, self.featureName])
+                self.device_extension_list.append([name, self.featureName])
+                # Build up stub function
+                return_type = ''
+                decl = self.makeCDecls(cmdinfo.elem)[1]
+                if decl.startswith('typedef VkResult'):
+                    return_type = 'return VK_SUCCESS;'
+                elif decl.startswith('typedef VkDeviceAddress'):
+                    return_type = 'return 0;'
+                elif decl.startswith('typedef uint32_t'):
+                    return_type = 'return 0;'
+                elif decl.startswith('typedef uint64_t'):
+                    return_type = 'return 0;'
+                pre_decl, decl = decl.split('*PFN_vk')
+                pre_decl = pre_decl.replace('typedef ', '')
+                pre_decl = pre_decl.split(' (')[0]
+                decl = decl.replace(')(', '(')
+                decl = 'static VKAPI_ATTR ' + pre_decl + ' VKAPI_CALL Stub' + decl
+                func_body = ' { ' + return_type + ' };'
+                decl = decl.replace (';', func_body)
+                if self.featureExtraProtect is not None:
+                    self.dev_ext_stub_list.append('#ifdef %s' % self.featureExtraProtect)
+                self.dev_ext_stub_list.append(decl)
+                if self.featureExtraProtect is not None:
+                    self.dev_ext_stub_list.append('#endif // %s' % self.featureExtraProtect)
+        else:
+            self.instance_dispatch_list.append((name, self.featureExtraProtect))
+        return
+    #
+    # Retrieve the type and name for a parameter
+    def getTypeNameTuple(self, param):
+        type = ''
+        name = ''
+        for elem in param:
+            if elem.tag == 'type':
+                type = noneStr(elem.text)
+            elif elem.tag == 'name':
+                name = noneStr(elem.text)
+        return (type, name)
+    #
+    # Output a function that'll determine if an extension is in the enabled list
+    def OutputExtEnabledFunction(self):
+        ##extension_functions = dict(self.device_dispatch_list)
+        ext_fcn  = ''
+        # First, write out our static data structure -- map of all APIs that are part of extensions to their extension.
+        ext_fcn += 'const std::unordered_map<std::string, std::string> api_extension_map {\n'
+        for extn in self.device_extension_list:
+            ext_fcn += '    {"%s", "%s"},\n' % (extn[0], extn[1])
+        ext_fcn += '};\n\n'
+        ext_fcn += '// Using the above code-generated map of APINames-to-parent extension names, this function will:\n'
+        ext_fcn += '//   o  Determine if the API has an associated extension\n'
+        ext_fcn += '//   o  If it does, determine if that extension name is present in the passed-in set of enabled_ext_names \n'
+        ext_fcn += '//   If the APIname has no parent extension, OR its parent extension name is IN the set, return TRUE, else FALSE\n'
+        ext_fcn += 'static inline bool ApiParentExtensionEnabled(const std::string api_name, const DeviceExtensions *device_extension_info) {\n'
+        ext_fcn += '    auto has_ext = api_extension_map.find(api_name);\n'
+        ext_fcn += '    // Is this API part of an extension or feature group?\n'
+        ext_fcn += '    if (has_ext != api_extension_map.end()) {\n'
+        ext_fcn += '        // Was the extension for this API enabled in the CreateDevice call?\n'
+        ext_fcn += '        auto info = device_extension_info->get_info(has_ext->second.c_str());\n'
+        ext_fcn += '        if ((!info.state) || (device_extension_info->*(info.state) != kEnabledByCreateinfo)) {\n'
+        ext_fcn += '            return false;\n'
+        ext_fcn += '        }\n'
+        ext_fcn += '    }\n'
+        ext_fcn += '    return true;\n'
+        ext_fcn += '}\n'
+        return ext_fcn
+    #
+    # Create a dispatch table from the appropriate list and return it as a string
+    def OutputDispatchTableHelper(self, table_type):
+        entries = []
+        table = ''
+        if table_type == 'device':
+            entries = self.device_dispatch_list
+            table += 'static inline void layer_init_device_dispatch_table(VkDevice device, VkLayerDispatchTable *table, PFN_vkGetDeviceProcAddr gpa) {\n'
+            table += '    memset(table, 0, sizeof(*table));\n'
+            table += '    // Device function pointers\n'
+        else:
+            entries = self.instance_dispatch_list
+            table += 'static inline void layer_init_instance_dispatch_table(VkInstance instance, VkLayerInstanceDispatchTable *table, PFN_vkGetInstanceProcAddr gpa) {\n'
+            table += '    memset(table, 0, sizeof(*table));\n'
+            table += '    // Instance function pointers\n'
+
+        stubbed_functions = dict(self.device_stub_list)
+        for item in entries:
+            # Remove 'vk' from proto name
+            base_name = item[0][2:]
+
+            if item[1] is not None:
+                table += '#ifdef %s\n' % item[1]
+
+            # If we're looking for the proc we are passing in, just point the table to it.  This fixes the issue where
+            # a layer overrides the function name for the loader.
+            if ('device' in table_type and base_name == 'GetDeviceProcAddr'):
+                table += '    table->GetDeviceProcAddr = gpa;\n'
+            elif ('device' not in table_type and base_name == 'GetInstanceProcAddr'):
+                table += '    table->GetInstanceProcAddr = gpa;\n'
+            else:
+                table += '    table->%s = (PFN_%s) gpa(%s, "%s");\n' % (base_name, item[0], table_type, item[0])
+                if 'device' in table_type and item[0] in stubbed_functions:
+                    stub_check = '    if (table->%s == nullptr) { table->%s = (PFN_%s)Stub%s; }\n' % (base_name, base_name, item[0], base_name)
+                    table += stub_check
+            if item[1] is not None:
+                table += '#endif // %s\n' % item[1]
+
+        table += '}'
+        return table
diff --git a/src/third_party/vulkan-validation-layers/src/scripts/external_revision_generator.py b/src/third_party/vulkan-validation-layers/src/scripts/external_revision_generator.py
new file mode 100644
index 0000000..497291a
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/scripts/external_revision_generator.py
@@ -0,0 +1,143 @@
+#!/usr/bin/env python3
+#
+# Copyright (c) 2015-2019 The Khronos Group Inc.
+# Copyright (c) 2015-2019 Valve Corporation
+# Copyright (c) 2015-2019 LunarG, Inc.
+# Copyright (c) 2015-2019 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Author: Cort Stratton <cort@google.com>
+# Author: Jean-Francois Roy <jfroy@google.com>
+
+import argparse
+import hashlib
+import subprocess
+import uuid
+import json
+
+def generate(symbol_name, commit_id, output_header_file):
+    # Write commit ID to output header file
+    with open(output_header_file, "w") as header_file:
+         # File Comment
+        file_comment = '// *** THIS FILE IS GENERATED - DO NOT EDIT ***\n'
+        file_comment += '// See external_revision_generator.py for modifications\n'
+        header_file.write(file_comment)
+        # Copyright Notice
+        copyright = ''
+        copyright += '\n'
+        copyright += '/***************************************************************************\n'
+        copyright += ' *\n'
+        copyright += ' * Copyright (c) 2015-2019 The Khronos Group Inc.\n'
+        copyright += ' * Copyright (c) 2015-2019 Valve Corporation\n'
+        copyright += ' * Copyright (c) 2015-2019 LunarG, Inc.\n'
+        copyright += ' * Copyright (c) 2015-2019 Google Inc.\n'
+        copyright += ' *\n'
+        copyright += ' * Licensed under the Apache License, Version 2.0 (the "License");\n'
+        copyright += ' * you may not use this file except in compliance with the License.\n'
+        copyright += ' * You may obtain a copy of the License at\n'
+        copyright += ' *\n'
+        copyright += ' *     http://www.apache.org/licenses/LICENSE-2.0\n'
+        copyright += ' *\n'
+        copyright += ' * Unless required by applicable law or agreed to in writing, software\n'
+        copyright += ' * distributed under the License is distributed on an "AS IS" BASIS,\n'
+        copyright += ' * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n'
+        copyright += ' * See the License for the specific language governing permissions and\n'
+        copyright += ' * limitations under the License.\n'
+        copyright += ' *\n'
+        copyright += ' * Author: Chris Forbes <chrisforbes@google.com>\n'
+        copyright += ' * Author: Cort Stratton <cort@google.com>\n'
+        copyright += ' *\n'
+        copyright += ' ****************************************************************************/\n'
+        header_file.write(copyright)
+        # Contents
+        contents = '#pragma once\n\n'
+        contents += '#define %s "%s"\n' % (symbol_name, commit_id)
+        header_file.write(contents)
+
+def get_commit_id_from_git(git_binary, source_dir):
+    value = subprocess.check_output([git_binary, "rev-parse", "HEAD"], cwd=source_dir).decode('utf-8').strip()
+    return value
+
+def is_sha1(str):
+    try: str_as_int = int(str, 16)
+    except ValueError: return False
+    return len(str) == 40
+
+def get_commit_id_from_file(rev_file):
+    with open(rev_file, 'r') as rev_stream:
+        rev_contents = rev_stream.read()
+        rev_contents_stripped = rev_contents.strip()
+        if is_sha1(rev_contents_stripped):
+            return rev_contents_stripped;
+        # otherwise, SHA1 the entire (unstripped) file contents
+        sha1 = hashlib.sha1();
+        sha1.update(rev_contents.encode('utf-8'))
+        return sha1.hexdigest()
+
+def get_commit_id_from_uuid():
+    unique_uuid = str(uuid.uuid4())
+    sha1 = hashlib.sha1();
+    sha1.update(unique_uuid.encode())
+    return sha1.hexdigest()
+
+def get_commit_id_from_json(json_file, json_keys):
+    with open(json_file) as json_stream:
+        json_data = json.load(json_stream)
+    for key in json_keys.split(','):
+        if type(json_data) == list:
+            json_data = json_data[int(key)]
+        else:
+            json_data = json_data[key]
+    return json_data
+
+def main():
+    parser = argparse.ArgumentParser()
+    rev_method_group = parser.add_mutually_exclusive_group(required=True)
+    rev_method_group.add_argument("--git_dir", metavar="SOURCE_DIR", help="git working copy directory")
+    rev_method_group.add_argument("--rev_file", metavar="REVISION_FILE", help="source revision file path (must contain a SHA1 hash")
+    rev_method_group.add_argument("--from_uuid", action='store_true', help="base SHA1 on a dynamically generated UUID")
+    rev_method_group.add_argument("--json_file", metavar="JSON_FILE", help="path to json file")
+    parser.add_argument("-s", "--symbol_name", metavar="SYMBOL_NAME", required=True, help="C symbol name")
+    parser.add_argument("-o", "--output_header_file", metavar="OUTPUT_HEADER_FILE", required=True, help="output header file path")
+    parser.add_argument("--json_keys", action='store', metavar="JSON_KEYS", help="comma-separated list of keys specifying SHA1 location in root json object for --json_file option")
+    args = parser.parse_args()
+
+    if ('json_file' in args) != ('json_keys' in args):
+        parser.error('--json_file and --json_keys must be provided together')
+
+    # We can either parse the latest Git commit ID out of the specified repository (preferred where possible),
+    # or computing the SHA1 hash of the contents of a file passed on the command line and (where necessary --
+    # e.g. when building the layers outside of a Git environment).
+    if args.git_dir is not None:
+        # Extract commit ID from the specified source directory
+        try:
+            commit_id = get_commit_id_from_git('git', args.git_dir)
+        except WindowsError:
+            # Call git.bat on Windows for compatibility.
+            commit_id = get_commit_id_from_git('git.bat', args.git_dir)
+    elif args.rev_file is not None:
+        # Read the commit ID from a file.
+        commit_id = get_commit_id_from_file(args.rev_file)
+    elif args.json_file is not None:
+        commit_id = get_commit_id_from_json(args.json_file, args.json_keys)
+    elif args.from_uuid:
+        commit_id = get_commit_id_from_uuid()
+
+    if not is_sha1(commit_id):
+        raise ValueError("commit ID for " + args.symbol_name + " must be a SHA1 hash.")
+
+    generate(args.symbol_name, commit_id, args.output_header_file)
+
+if __name__ == '__main__':
+    main()
diff --git a/src/third_party/vulkan-validation-layers/src/scripts/generate_source.py b/src/third_party/vulkan-validation-layers/src/scripts/generate_source.py
new file mode 100755
index 0000000..b675e1a
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/scripts/generate_source.py
@@ -0,0 +1,136 @@
+#!/usr/bin/env python3
+# Copyright (c) 2019 The Khronos Group Inc.
+# Copyright (c) 2019 Valve Corporation
+# Copyright (c) 2019 LunarG, Inc.
+# Copyright (c) 2019 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Author: Mike Schuchardt <mikes@lunarg.com>
+
+import argparse
+import filecmp
+import os
+import shutil
+import subprocess
+import sys
+import tempfile
+
+import common_codegen
+
+# files to exclude from --verify check
+verify_exclude = ['.clang-format']
+
+def main(argv):
+    parser = argparse.ArgumentParser(description='Generate source code for this repository')
+    parser.add_argument('registry', metavar='REGISTRY_PATH', help='path to the Vulkan-Headers registry directory')
+    group = parser.add_mutually_exclusive_group()
+    group.add_argument('-i', '--incremental', action='store_true', help='only update repo files that change')
+    group.add_argument('-v', '--verify', action='store_true', help='verify repo files match generator output')
+    args = parser.parse_args(argv)
+
+    gen_cmds = [*[[common_codegen.repo_relative('scripts/lvl_genvk.py'),
+                   '-registry', os.path.abspath(os.path.join(args.registry,  'vk.xml')),
+                   '-quiet',
+                   filename] for filename in ["chassis.cpp",
+                                              "chassis.h",
+                                              "layer_chassis_dispatch.cpp",
+                                              "layer_chassis_dispatch.h",
+                                              "object_tracker.cpp",
+                                              "object_tracker.h",
+                                              "parameter_validation.cpp",
+                                              "parameter_validation.h",
+                                              "thread_safety.cpp",
+                                              "thread_safety.h",
+                                              "vk_dispatch_table_helper.h",
+                                              "vk_enum_string_helper.h",
+                                              "vk_extension_helper.h",
+                                              "vk_layer_dispatch_table.h",
+                                              "vk_object_types.h",
+                                              "vk_safe_struct.cpp",
+                                              "vk_safe_struct.h",
+                                              "lvt_function_pointers.cpp",
+                                              "lvt_function_pointers.h",
+                                              "vk_typemap_helper.h",
+                                              "command_counter_helper.cpp",
+                                              "command_counter_helper.h"]],
+                [common_codegen.repo_relative('scripts/vk_validation_stats.py'),
+                 os.path.abspath(os.path.join(args.registry, 'validusage.json')),
+                 '-export_header'],
+                [common_codegen.repo_relative('scripts/external_revision_generator.py'),
+                 '--json_file', common_codegen.repo_relative('scripts/known_good.json'),
+                 '--json_keys', 'repos,0,commit',
+                 '-s', 'SPIRV_TOOLS_COMMIT_ID',
+                 '-o', 'spirv_tools_commit_id.h']]
+
+    repo_dir = common_codegen.repo_relative('layers/generated')
+
+    # get directory where generators will run
+    if args.verify or args.incremental:
+        # generate in temp directory so we can compare or copy later
+        temp_obj = tempfile.TemporaryDirectory(prefix='VulkanVL_generated_source_')
+        temp_dir = temp_obj.name
+        gen_dir = temp_dir
+    else:
+        # generate directly in the repo
+        gen_dir = repo_dir
+
+    # run each code generator
+    for cmd in gen_cmds:
+        print(' '.join(cmd))
+        try:
+            subprocess.check_call([sys.executable] + cmd, cwd=gen_dir)
+        except Exception as e:
+            print('ERROR:', str(e))
+            return 1
+
+    # optional post-generation steps
+    if args.verify:
+        # compare contents of temp dir and repo
+        temp_files = set(os.listdir(temp_dir))
+        repo_files = set(os.listdir(repo_dir))
+        files_match = True
+        for filename in sorted((temp_files | repo_files) - set(verify_exclude)):
+            if filename not in repo_files:
+                print('ERROR: Missing repo file', filename)
+                files_match = False
+            elif filename not in temp_files:
+                print('ERROR: Missing generator for', filename)
+                files_match = False
+            elif not filecmp.cmp(os.path.join(temp_dir, filename),
+                               os.path.join(repo_dir, filename),
+                               shallow=False):
+                print('ERROR: Repo files do not match generator output for', filename)
+                files_match = False
+
+        # return code for test scripts
+        if files_match:
+            print('SUCCESS: Repo files match generator output')
+            return 0
+        return 1
+
+    elif args.incremental:
+        # copy missing or differing files from temp directory to repo
+        for filename in os.listdir(temp_dir):
+            temp_filename = os.path.join(temp_dir, filename)
+            repo_filename = os.path.join(repo_dir, filename)
+            if not os.path.exists(repo_filename) or \
+               not filecmp.cmp(temp_filename, repo_filename, shallow=False):
+                print('update', repo_filename)
+                shutil.copyfile(temp_filename, repo_filename)
+
+    return 0
+
+if __name__ == '__main__':
+    sys.exit(main(sys.argv[1:]))
+
diff --git a/src/third_party/vulkan-validation-layers/src/scripts/helper_file_generator.py b/src/third_party/vulkan-validation-layers/src/scripts/helper_file_generator.py
new file mode 100644
index 0000000..5af2648
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/scripts/helper_file_generator.py
@@ -0,0 +1,1527 @@
+#!/usr/bin/python3 -i
+#
+# Copyright (c) 2015-2019 The Khronos Group Inc.
+# Copyright (c) 2015-2019 Valve Corporation
+# Copyright (c) 2015-2019 LunarG, Inc.
+# Copyright (c) 2015-2019 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Author: Mark Lobodzinski <mark@lunarg.com>
+# Author: Tobin Ehlis <tobine@google.com>
+# Author: John Zulauf <jzulauf@lunarg.com>
+
+import os,re,sys
+import xml.etree.ElementTree as etree
+from generator import *
+from collections import namedtuple
+from common_codegen import *
+
+#
+# HelperFileOutputGeneratorOptions - subclass of GeneratorOptions.
+class HelperFileOutputGeneratorOptions(GeneratorOptions):
+    def __init__(self,
+                 conventions = None,
+                 filename = None,
+                 directory = '.',
+                 apiname = None,
+                 profile = None,
+                 versions = '.*',
+                 emitversions = '.*',
+                 defaultExtensions = None,
+                 addExtensions = None,
+                 removeExtensions = None,
+                 emitExtensions = None,
+                 sortProcedure = regSortFeatures,
+                 prefixText = "",
+                 genFuncPointers = True,
+                 protectFile = True,
+                 protectFeature = True,
+                 apicall = '',
+                 apientry = '',
+                 apientryp = '',
+                 alignFuncParam = 0,
+                 library_name = '',
+                 expandEnumerants = True,
+                 helper_file_type = ''):
+        GeneratorOptions.__init__(self, conventions, filename, directory, apiname, profile,
+                                  versions, emitversions, defaultExtensions,
+                                  addExtensions, removeExtensions, emitExtensions, sortProcedure)
+        self.prefixText       = prefixText
+        self.genFuncPointers  = genFuncPointers
+        self.protectFile      = protectFile
+        self.protectFeature   = protectFeature
+        self.apicall          = apicall
+        self.apientry         = apientry
+        self.apientryp        = apientryp
+        self.alignFuncParam   = alignFuncParam
+        self.library_name     = library_name
+        self.helper_file_type = helper_file_type
+#
+# HelperFileOutputGenerator - subclass of OutputGenerator. Outputs Vulkan helper files
+class HelperFileOutputGenerator(OutputGenerator):
+    """Generate helper file based on XML element attributes"""
+    def __init__(self,
+                 errFile = sys.stderr,
+                 warnFile = sys.stderr,
+                 diagFile = sys.stdout):
+        OutputGenerator.__init__(self, errFile, warnFile, diagFile)
+        # Internal state - accumulators for different inner block text
+        self.enum_output = ''                             # string built up of enum string routines
+        # Internal state - accumulators for different inner block text
+        self.structNames = []                             # List of Vulkan struct typenames
+        self.structTypes = dict()                         # Map of Vulkan struct typename to required VkStructureType
+        self.structMembers = []                           # List of StructMemberData records for all Vulkan structs
+        self.object_types = []                            # List of all handle types
+        self.object_type_aliases = []                     # Aliases to handles types (for handles that were extensions)
+        self.debug_report_object_types = []               # Handy copy of debug_report_object_type enum data
+        self.core_object_types = []                       # Handy copy of core_object_type enum data
+        self.device_extension_info = dict()               # Dict of device extension name defines and ifdef values
+        self.instance_extension_info = dict()             # Dict of instance extension name defines and ifdef values
+        self.structextends_list = []                      # List of structs which extend another struct via pNext
+
+
+        # Named tuples to store struct and command data
+        self.StructType = namedtuple('StructType', ['name', 'value'])
+        self.CommandParam = namedtuple('CommandParam', ['type', 'name', 'ispointer', 'isstaticarray', 'isconst', 'iscount', 'len', 'extstructs', 'cdecl'])
+        self.StructMemberData = namedtuple('StructMemberData', ['name', 'members', 'ifdef_protect'])
+
+        self.custom_construct_params = {
+            # safe_VkGraphicsPipelineCreateInfo needs to know if subpass has color and\or depth\stencil attachments to use its pointers
+            'VkGraphicsPipelineCreateInfo' :
+                ', const bool uses_color_attachment, const bool uses_depthstencil_attachment',
+            # safe_VkPipelineViewportStateCreateInfo needs to know if viewport and scissor is dynamic to use its pointers
+            'VkPipelineViewportStateCreateInfo' :
+                ', const bool is_dynamic_viewports, const bool is_dynamic_scissors',
+        }
+    #
+    # Called once at the beginning of each run
+    def beginFile(self, genOpts):
+        OutputGenerator.beginFile(self, genOpts)
+        # Initialize members that require the tree
+        self.handle_types = GetHandleTypes(self.registry.tree)
+        # User-supplied prefix text, if any (list of strings)
+        self.helper_file_type = genOpts.helper_file_type
+        self.library_name = genOpts.library_name
+        # File Comment
+        file_comment = '// *** THIS FILE IS GENERATED - DO NOT EDIT ***\n'
+        file_comment += '// See helper_file_generator.py for modifications\n'
+        write(file_comment, file=self.outFile)
+        # Copyright Notice
+        copyright = ''
+        copyright += '\n'
+        copyright += '/***************************************************************************\n'
+        copyright += ' *\n'
+        copyright += ' * Copyright (c) 2015-2019 The Khronos Group Inc.\n'
+        copyright += ' * Copyright (c) 2015-2019 Valve Corporation\n'
+        copyright += ' * Copyright (c) 2015-2019 LunarG, Inc.\n'
+        copyright += ' * Copyright (c) 2015-2019 Google Inc.\n'
+        copyright += ' *\n'
+        copyright += ' * Licensed under the Apache License, Version 2.0 (the "License");\n'
+        copyright += ' * you may not use this file except in compliance with the License.\n'
+        copyright += ' * You may obtain a copy of the License at\n'
+        copyright += ' *\n'
+        copyright += ' *     http://www.apache.org/licenses/LICENSE-2.0\n'
+        copyright += ' *\n'
+        copyright += ' * Unless required by applicable law or agreed to in writing, software\n'
+        copyright += ' * distributed under the License is distributed on an "AS IS" BASIS,\n'
+        copyright += ' * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n'
+        copyright += ' * See the License for the specific language governing permissions and\n'
+        copyright += ' * limitations under the License.\n'
+        copyright += ' *\n'
+        copyright += ' * Author: Mark Lobodzinski <mark@lunarg.com>\n'
+        copyright += ' * Author: Courtney Goeltzenleuchter <courtneygo@google.com>\n'
+        copyright += ' * Author: Tobin Ehlis <tobine@google.com>\n'
+        copyright += ' * Author: Chris Forbes <chrisforbes@google.com>\n'
+        copyright += ' * Author: John Zulauf<jzulauf@lunarg.com>\n'
+        copyright += ' *\n'
+        copyright += ' ****************************************************************************/\n'
+        write(copyright, file=self.outFile)
+    #
+    # Write generated file content to output file
+    def endFile(self):
+        dest_file = ''
+        dest_file += self.OutputDestFile()
+        # Remove blank lines at EOF
+        if dest_file.endswith('\n'):
+            dest_file = dest_file[:-1]
+        write(dest_file, file=self.outFile);
+        # Finish processing in superclass
+        OutputGenerator.endFile(self)
+    #
+    # Override parent class to be notified of the beginning of an extension
+    def beginFeature(self, interface, emit):
+        # Start processing in superclass
+        OutputGenerator.beginFeature(self, interface, emit)
+        self.featureExtraProtect = GetFeatureProtect(interface)
+
+        if self.featureName == 'VK_VERSION_1_0' or self.featureName == 'VK_VERSION_1_1':
+            return
+        name = self.featureName
+        nameElem = interface[0][1]
+        name_define = nameElem.get('name')
+        if 'EXTENSION_NAME' not in name_define:
+            print("Error in vk.xml file -- extension name is not available")
+        requires = interface.get('requires')
+        if requires is not None:
+            required_extensions = requires.split(',')
+        else:
+            required_extensions = list()
+        requiresCore = interface.get('requiresCore')
+        if requiresCore is not None:
+            required_extensions.append('VK_VERSION_%s' % ('_'.join(requiresCore.split('.'))))
+        info = { 'define': name_define, 'ifdef':self.featureExtraProtect, 'reqs':required_extensions }
+        if interface.get('type') == 'instance':
+            self.instance_extension_info[name] = info
+        else:
+            self.device_extension_info[name] = info
+
+    #
+    # Override parent class to be notified of the end of an extension
+    def endFeature(self):
+        # Finish processing in superclass
+        OutputGenerator.endFeature(self)
+    #
+    # Grab group (e.g. C "enum" type) info to output for enum-string conversion helper
+    def genGroup(self, groupinfo, groupName, alias):
+        OutputGenerator.genGroup(self, groupinfo, groupName, alias)
+        groupElem = groupinfo.elem
+        # For enum_string_header
+        if self.helper_file_type == 'enum_string_header':
+            value_set = set()
+            for elem in groupElem.findall('enum'):
+                if elem.get('supported') != 'disabled' and elem.get('alias') is None:
+                    value_set.add(elem.get('name'))
+            if value_set != set():
+                self.enum_output += self.GenerateEnumStringConversion(groupName, value_set)
+        elif self.helper_file_type == 'object_types_header':
+            if groupName == 'VkDebugReportObjectTypeEXT':
+                for elem in groupElem.findall('enum'):
+                    if elem.get('supported') != 'disabled':
+                        if elem.get('alias') is None: # TODO: Strangely the "alias" fn parameter does not work
+                            item_name = elem.get('name')
+                            if self.debug_report_object_types.count(item_name) == 0: # TODO: Strangely there are duplicates
+                                self.debug_report_object_types.append(item_name)
+            elif groupName == 'VkObjectType':
+                for elem in groupElem.findall('enum'):
+                    if elem.get('supported') != 'disabled':
+                        if elem.get('alias') is None: # TODO: Strangely the "alias" fn parameter does not work
+                            item_name = elem.get('name')
+                            self.core_object_types.append(item_name)
+
+    #
+    # Called for each type -- if the type is a struct/union, grab the metadata
+    def genType(self, typeinfo, name, alias):
+        OutputGenerator.genType(self, typeinfo, name, alias)
+        typeElem = typeinfo.elem
+        # If the type is a struct type, traverse the imbedded <member> tags generating a structure.
+        # Otherwise, emit the tag text.
+        category = typeElem.get('category')
+        if category == 'handle':
+            if alias:
+                self.object_type_aliases.append((name,alias))
+            else:
+                self.object_types.append(name)
+        elif (category == 'struct' or category == 'union'):
+            self.structNames.append(name)
+            self.genStruct(typeinfo, name, alias)
+    #
+    # Check if the parameter passed in is a pointer
+    def paramIsPointer(self, param):
+        ispointer = False
+        for elem in param:
+            if elem.tag == 'type' and elem.tail is not None and '*' in elem.tail:
+                ispointer = True
+        return ispointer
+    #
+    # Check if the parameter passed in is a static array
+    def paramIsStaticArray(self, param):
+        isstaticarray = 0
+        paramname = param.find('name')
+        if (paramname.tail is not None) and ('[' in paramname.tail):
+            isstaticarray = paramname.tail.count('[')
+        return isstaticarray
+    #
+    # Retrieve the type and name for a parameter
+    def getTypeNameTuple(self, param):
+        type = ''
+        name = ''
+        for elem in param:
+            if elem.tag == 'type':
+                type = noneStr(elem.text)
+            elif elem.tag == 'name':
+                name = noneStr(elem.text)
+        return (type, name)
+    # Extract length values from latexmath.  Currently an inflexible solution that looks for specific
+    # patterns that are found in vk.xml.  Will need to be updated when new patterns are introduced.
+    def parseLateXMath(self, source):
+        name = 'ERROR'
+        decoratedName = 'ERROR'
+        if 'mathit' in source:
+            # Matches expressions similar to 'latexmath:[\lceil{\mathit{rasterizationSamples} \over 32}\rceil]'
+            match = re.match(r'latexmath\s*\:\s*\[\s*\\l(\w+)\s*\{\s*\\mathit\s*\{\s*(\w+)\s*\}\s*\\over\s*(\d+)\s*\}\s*\\r(\w+)\s*\]', source)
+            if not match or match.group(1) != match.group(4):
+                raise 'Unrecognized latexmath expression'
+            name = match.group(2)
+            # Need to add 1 for ceiling function; otherwise, the allocated packet
+            # size will be less than needed during capture for some title which use
+            # this in VkPipelineMultisampleStateCreateInfo. based on ceiling function
+            # definition,it is '{0}%{1}?{0}/{1} + 1:{0}/{1}'.format(*match.group(2, 3)),
+            # its value <= '{}/{} + 1'.
+            if match.group(1) == 'ceil':
+                decoratedName = '{}/{} + 1'.format(*match.group(2, 3))
+            else:
+                decoratedName = '{}/{}'.format(*match.group(2, 3))
+        else:
+            # Matches expressions similar to 'latexmath : [dataSize \over 4]'
+            match = re.match(r'latexmath\s*\:\s*\[\s*(\\textrm\{)?(\w+)\}?\s*\\over\s*(\d+)\s*\]', source)
+            name = match.group(2)
+            decoratedName = '{}/{}'.format(*match.group(2, 3))
+        return name, decoratedName
+    #
+    # Retrieve the value of the len tag
+    def getLen(self, param):
+        result = None
+        len = param.attrib.get('len')
+        if len and len != 'null-terminated':
+            # For string arrays, 'len' can look like 'count,null-terminated', indicating that we
+            # have a null terminated array of strings.  We strip the null-terminated from the
+            # 'len' field and only return the parameter specifying the string count
+            if 'null-terminated' in len:
+                result = len.split(',')[0]
+            else:
+                result = len
+            if 'latexmath' in len:
+                param_type, param_name = self.getTypeNameTuple(param)
+                len_name, result = self.parseLateXMath(len)
+            # Spec has now notation for len attributes, using :: instead of platform specific pointer symbol
+            result = str(result).replace('::', '->')
+        return result
+    #
+    # Check if a structure is or contains a dispatchable (dispatchable = True) or
+    # non-dispatchable (dispatchable = False) handle
+    def TypeContainsObjectHandle(self, handle_type, dispatchable):
+        if dispatchable:
+            type_check = self.handle_types.IsDispatchable
+        else:
+            type_check = self.handle_types.IsNonDispatchable
+        if type_check(handle_type):
+            return True
+        # if handle_type is a struct, search its members
+        if handle_type in self.structNames:
+            member_index = next((i for i, v in enumerate(self.structMembers) if v[0] == handle_type), None)
+            if member_index is not None:
+                for item in self.structMembers[member_index].members:
+                    if type_check(item.type):
+                        return True
+        return False
+    #
+    # Generate local ready-access data describing Vulkan structures and unions from the XML metadata
+    def genStruct(self, typeinfo, typeName, alias):
+        OutputGenerator.genStruct(self, typeinfo, typeName, alias)
+        members = typeinfo.elem.findall('.//member')
+        # Iterate over members once to get length parameters for arrays
+        lens = set()
+        for member in members:
+            len = self.getLen(member)
+            if len:
+                lens.add(len)
+        # Generate member info
+        membersInfo = []
+        for member in members:
+            # Get the member's type and name
+            info = self.getTypeNameTuple(member)
+            type = info[0]
+            name = info[1]
+            cdecl = self.makeCParamDecl(member, 1)
+            # Process VkStructureType
+            if type == 'VkStructureType':
+                # Extract the required struct type value from the comments
+                # embedded in the original text defining the 'typeinfo' element
+                rawXml = etree.tostring(typeinfo.elem).decode('ascii')
+                result = re.search(r'VK_STRUCTURE_TYPE_\w+', rawXml)
+                if result:
+                    value = result.group(0)
+                    # Store the required type value
+                    self.structTypes[typeName] = self.StructType(name=name, value=value)
+            # Store pointer/array/string info
+            isstaticarray = self.paramIsStaticArray(member)
+            structextends = False
+            membersInfo.append(self.CommandParam(type=type,
+                                                 name=name,
+                                                 ispointer=self.paramIsPointer(member),
+                                                 isstaticarray=isstaticarray,
+                                                 isconst=True if 'const' in cdecl else False,
+                                                 iscount=True if name in lens else False,
+                                                 len=self.getLen(member),
+                                                 extstructs=self.registry.validextensionstructs[typeName] if name == 'pNext' else None,
+                                                 cdecl=cdecl))
+        # If this struct extends another, keep its name in list for further processing
+        if typeinfo.elem.attrib.get('structextends') is not None:
+            self.structextends_list.append(typeName)
+        self.structMembers.append(self.StructMemberData(name=typeName, members=membersInfo, ifdef_protect=self.featureExtraProtect))
+    #
+    # Enum_string_header: Create a routine to convert an enumerated value into a string
+    def GenerateEnumStringConversion(self, groupName, value_list):
+        outstring = '\n'
+        if self.featureExtraProtect is not None:
+            outstring += '\n#ifdef %s\n\n' % self.featureExtraProtect
+        outstring += 'static inline const char* string_%s(%s input_value)\n' % (groupName, groupName)
+        outstring += '{\n'
+        outstring += '    switch ((%s)input_value)\n' % groupName
+        outstring += '    {\n'
+        # Emit these in a repeatable order so file is generated with the same contents each time.
+        # This helps compiler caching systems like ccache.
+        for item in sorted(value_list):
+            outstring += '        case %s:\n' % item
+            outstring += '            return "%s";\n' % item
+        outstring += '        default:\n'
+        outstring += '            return "Unhandled %s";\n' % groupName
+        outstring += '    }\n'
+        outstring += '}\n'
+
+        bitsIndex = groupName.find('Bits')
+        if (bitsIndex != -1):
+            outstring += '\n'
+            flagsName = groupName[0:bitsIndex] + "s" +  groupName[bitsIndex+4:]
+            outstring += 'static inline std::string string_%s(%s input_value)\n' % (flagsName, flagsName)
+            outstring += '{\n'
+            outstring += '    std::string ret;\n'
+            outstring += '    int index = 0;\n'
+            outstring += '    while(input_value) {\n'
+            outstring += '        if (input_value & 1) {\n'
+            outstring += '            if( !ret.empty()) ret.append("|");\n'
+            outstring += '            ret.append(string_%s(static_cast<%s>(1 << index)));\n' % (groupName, groupName)
+            outstring += '        }\n'
+            outstring += '        ++index;\n'
+            outstring += '        input_value >>= 1;\n'
+            outstring += '    }\n'
+            outstring += '    if( ret.empty()) ret.append(string_%s(static_cast<%s>(0)));\n' % (groupName, groupName)
+            outstring += '    return ret;\n'
+            outstring += '}\n'
+
+        if self.featureExtraProtect is not None:
+            outstring += '#endif // %s\n' % self.featureExtraProtect
+        return outstring
+    #
+    # Tack on a helper which, given an index into a VkPhysicalDeviceFeatures structure, will print the corresponding feature name
+    def DeIndexPhysDevFeatures(self):
+        pdev_members = None
+        for name, members, ifdef in self.structMembers:
+            if name == 'VkPhysicalDeviceFeatures':
+                pdev_members = members
+                break
+        deindex = '\n'
+        deindex += 'static inline const char * GetPhysDevFeatureString(uint32_t index) {\n'
+        deindex += '    const char * IndexToPhysDevFeatureString[] = {\n'
+        for feature in pdev_members:
+            deindex += '        "%s",\n' % feature.name
+        deindex += '    };\n\n'
+        deindex += '    return IndexToPhysDevFeatureString[index];\n'
+        deindex += '}\n'
+        return deindex
+    #
+    # Combine enum string helper header file preamble with body text and return
+    def GenerateEnumStringHelperHeader(self):
+            enum_string_helper_header = '\n'
+            enum_string_helper_header += '#pragma once\n'
+            enum_string_helper_header += '#ifdef _WIN32\n'
+            enum_string_helper_header += '#pragma warning( disable : 4065 )\n'
+            enum_string_helper_header += '#endif\n'
+            enum_string_helper_header += '\n'
+            enum_string_helper_header += '#include <string>\n'
+            enum_string_helper_header += '#include <vulkan/vulkan.h>\n'
+            enum_string_helper_header += '\n'
+            enum_string_helper_header += self.enum_output
+            enum_string_helper_header += self.DeIndexPhysDevFeatures()
+            return enum_string_helper_header
+    #
+    # Helper function for declaring a counter variable only once
+    def DeclareCounter(self, string_var, declare_flag):
+        if declare_flag == False:
+            string_var += '        uint32_t i = 0;\n'
+            declare_flag = True
+        return string_var, declare_flag
+    #
+    # Combine safe struct helper header file preamble with body text and return
+    def GenerateSafeStructHelperHeader(self):
+        safe_struct_helper_header = '\n'
+        safe_struct_helper_header += '#pragma once\n'
+        safe_struct_helper_header += '#include <vulkan/vulkan.h>\n'
+        safe_struct_helper_header += '\n'
+        safe_struct_helper_header += 'void *SafePnextCopy(const void *pNext);\n'
+        safe_struct_helper_header += 'void FreePnextChain(const void *pNext);\n'
+        safe_struct_helper_header += 'char *SafeStringCopy(const char *in_string);\n'
+        safe_struct_helper_header += '\n'
+        safe_struct_helper_header += self.GenerateSafeStructHeader()
+        return safe_struct_helper_header
+    #
+    # safe_struct header: build function prototypes for header file
+    def GenerateSafeStructHeader(self):
+        safe_struct_header = ''
+        for item in self.structMembers:
+            if self.NeedSafeStruct(item) == True:
+                safe_struct_header += '\n'
+                if item.ifdef_protect is not None:
+                    safe_struct_header += '#ifdef %s\n' % item.ifdef_protect
+                safe_struct_header += 'struct safe_%s {\n' % (item.name)
+                for member in item.members:
+                    if member.type in self.structNames:
+                        member_index = next((i for i, v in enumerate(self.structMembers) if v[0] == member.type), None)
+                        if member_index is not None and self.NeedSafeStruct(self.structMembers[member_index]) == True:
+                            if member.ispointer:
+                                safe_struct_header += '    safe_%s* %s;\n' % (member.type, member.name)
+                            else:
+                                safe_struct_header += '    safe_%s %s;\n' % (member.type, member.name)
+                            continue
+                    if member.len is not None and (self.TypeContainsObjectHandle(member.type, True) or self.TypeContainsObjectHandle(member.type, False)):
+                            safe_struct_header += '    %s* %s;\n' % (member.type, member.name)
+                    else:
+                        safe_struct_header += '%s;\n' % member.cdecl
+                safe_struct_header += '    safe_%s(const %s* in_struct%s);\n' % (item.name, item.name, self.custom_construct_params.get(item.name, ''))
+                safe_struct_header += '    safe_%s(const safe_%s& src);\n' % (item.name, item.name)
+                safe_struct_header += '    safe_%s& operator=(const safe_%s& src);\n' % (item.name, item.name)
+                safe_struct_header += '    safe_%s();\n' % item.name
+                safe_struct_header += '    ~safe_%s();\n' % item.name
+                safe_struct_header += '    void initialize(const %s* in_struct%s);\n' % (item.name, self.custom_construct_params.get(item.name, ''))
+                safe_struct_header += '    void initialize(const safe_%s* src);\n' % (item.name)
+                safe_struct_header += '    %s *ptr() { return reinterpret_cast<%s *>(this); }\n' % (item.name, item.name)
+                safe_struct_header += '    %s const *ptr() const { return reinterpret_cast<%s const *>(this); }\n' % (item.name, item.name)
+                safe_struct_header += '};\n'
+                if item.ifdef_protect is not None:
+                    safe_struct_header += '#endif // %s\n' % item.ifdef_protect
+        return safe_struct_header
+    #
+    # Generate extension helper header file
+    def GenerateExtensionHelperHeader(self):
+
+        V_1_1_level_feature_set = [
+            'VK_VERSION_1_1',
+            ]
+
+        V_1_0_instance_extensions_promoted_to_V_1_1_core = [
+            'vk_khr_device_group_creation',
+            'vk_khr_external_fence_capabilities',
+            'vk_khr_external_memory_capabilities',
+            'vk_khr_external_semaphore_capabilities',
+            'vk_khr_get_physical_device_properties_2',
+            ]
+
+        V_1_0_device_extensions_promoted_to_V_1_1_core = [
+            'vk_khr_16bit_storage',
+            'vk_khr_bind_memory_2',
+            'vk_khr_dedicated_allocation',
+            'vk_khr_descriptor_update_template',
+            'vk_khr_device_group',
+            'vk_khr_external_fence',
+            'vk_khr_external_memory',
+            'vk_khr_external_semaphore',
+            'vk_khr_get_memory_requirements_2',
+            'vk_khr_maintenance1',
+            'vk_khr_maintenance2',
+            'vk_khr_maintenance3',
+            'vk_khr_multiview',
+            'vk_khr_relaxed_block_layout',
+            'vk_khr_sampler_ycbcr_conversion',
+            'vk_khr_shader_draw_parameters',
+            'vk_khr_storage_buffer_storage_class',
+            'vk_khr_variable_pointers',
+            ]
+
+        output = [
+            '',
+            '#ifndef VK_EXTENSION_HELPER_H_',
+            '#define VK_EXTENSION_HELPER_H_',
+            '#include <unordered_set>',
+            '#include <string>',
+            '#include <unordered_map>',
+            '#include <utility>',
+            '#include <set>',
+            '#include <vector>',
+            '#include <cassert>',
+            '',
+            '#include <vulkan/vulkan.h>',
+            '',
+            '#define VK_VERSION_1_1_NAME "VK_VERSION_1_1"',
+            '',
+            'enum ExtEnabled : unsigned char {',
+            '    kNotEnabled,',
+            '    kEnabledByCreateinfo,',
+            '    kEnabledByApiLevel,',
+            '};',
+            '',
+            'static bool IsExtEnabled(ExtEnabled feature) {',
+            '    if (feature == kNotEnabled) return false;',
+            '    return true;',
+            '};',
+            '']
+
+        def guarded(ifdef, value):
+            if ifdef is not None:
+                return '\n'.join([ '#ifdef %s' % ifdef, value, '#endif' ])
+            else:
+                return value
+
+        for type in ['Instance', 'Device']:
+            struct_type = '%sExtensions' % type
+            if type == 'Instance':
+                extension_dict = self.instance_extension_info
+                promoted_ext_list = V_1_0_instance_extensions_promoted_to_V_1_1_core
+                struct_decl = 'struct %s {' % struct_type
+                instance_struct_type = struct_type
+            else:
+                extension_dict = self.device_extension_info
+                promoted_ext_list = V_1_0_device_extensions_promoted_to_V_1_1_core
+                struct_decl = 'struct %s : public %s {' % (struct_type, instance_struct_type)
+
+            extension_items = sorted(extension_dict.items())
+
+            field_name = { ext_name: re.sub('_extension_name', '', info['define'].lower()) for ext_name, info in extension_items }
+
+            # Add in pseudo-extensions for core API versions so real extensions can depend on them
+            extension_dict['VK_VERSION_1_1'] = {'define':"VK_VERSION_1_1_NAME", 'ifdef':None, 'reqs':[]}
+            field_name['VK_VERSION_1_1'] = "vk_feature_version_1_1"
+
+            if type == 'Instance':
+                instance_field_name = field_name
+                instance_extension_dict = extension_dict
+            else:
+                # Get complete field name and extension data for both Instance and Device extensions
+                field_name.update(instance_field_name)
+                extension_dict = extension_dict.copy()  # Don't modify the self.<dict> we're pointing to
+                extension_dict.update(instance_extension_dict)
+
+            # Output the data member list
+            struct  = [struct_decl]
+            struct.extend([ '    ExtEnabled vk_feature_version_1_1{kNotEnabled};'])
+            struct.extend([ '    ExtEnabled %s{kNotEnabled};' % field_name[ext_name] for ext_name, info in extension_items])
+
+            # Construct the extension information map -- mapping name to data member (field), and required extensions
+            # The map is contained within a static function member for portability reasons.
+            info_type = '%sInfo' % type
+            info_map_type = '%sMap' % info_type
+            req_type = '%sReq' % type
+            req_vec_type = '%sVec' % req_type
+            struct.extend([
+                '',
+                '    struct %s {' % req_type,
+                '        const ExtEnabled %s::* enabled;' % struct_type,
+                '        const char *name;',
+                '    };',
+                '    typedef std::vector<%s> %s;' % (req_type, req_vec_type),
+                '    struct %s {' % info_type,
+                '       %s(ExtEnabled %s::* state_, const %s requires_): state(state_), requires(requires_) {}' % ( info_type, struct_type, req_vec_type),
+                '       ExtEnabled %s::* state;' % struct_type,
+                '       %s requires;' % req_vec_type,
+                '    };',
+                '',
+                '    typedef std::unordered_map<std::string,%s> %s;' % (info_type, info_map_type),
+                '    static const %s &get_info(const char *name) {' %info_type,
+                '        static const %s info_map = {' % info_map_type ])
+            struct.extend([
+                '            std::make_pair("VK_VERSION_1_1", %sInfo(&%sExtensions::vk_feature_version_1_1, {})),' % (type, type)])
+
+            field_format = '&' + struct_type + '::%s'
+            req_format = '{' + field_format+ ', %s}'
+            req_indent = '\n                           '
+            req_join = ',' + req_indent
+            info_format = ('            std::make_pair(%s, ' + info_type + '(' + field_format + ', {%s})),')
+            def format_info(ext_name, info):
+                reqs = req_join.join([req_format % (field_name[req], extension_dict[req]['define']) for req in info['reqs']])
+                return info_format % (info['define'], field_name[ext_name], '{%s}' % (req_indent + reqs) if reqs else '')
+
+            struct.extend([guarded(info['ifdef'], format_info(ext_name, info)) for ext_name, info in extension_items])
+            struct.extend([
+                '        };',
+                '',
+                '        static const %s empty_info {nullptr, %s()};' % (info_type, req_vec_type),
+                '        %s::const_iterator info = info_map.find(name);' % info_map_type,
+                '        if ( info != info_map.cend()) {',
+                '            return info->second;',
+                '        }',
+                '        return empty_info;',
+                '    }',
+                ''])
+
+            if type == 'Instance':
+                struct.extend([
+                    '    uint32_t NormalizeApiVersion(uint32_t specified_version) {',
+                    '        uint32_t api_version = (specified_version < VK_API_VERSION_1_1) ? VK_API_VERSION_1_0 : VK_API_VERSION_1_1;',
+                    '        return api_version;',
+                    '    }',
+                    '',
+                    '    uint32_t InitFromInstanceCreateInfo(uint32_t requested_api_version, const VkInstanceCreateInfo *pCreateInfo) {'])
+            else:
+                struct.extend([
+                    '    %s() = default;' % struct_type,
+                    '    %s(const %s& instance_ext) : %s(instance_ext) {}' % (struct_type, instance_struct_type, instance_struct_type),
+                    '',
+                    '    uint32_t InitFromDeviceCreateInfo(const %s *instance_extensions, uint32_t requested_api_version,' % instance_struct_type,
+                    '                                      const VkDeviceCreateInfo *pCreateInfo) {',
+                    '        // Initialize: this to defaults,  base class fields to input.',
+                    '        assert(instance_extensions);',
+                    '        *this = %s(*instance_extensions);' % struct_type,
+                    '']),
+            struct.extend([
+                '',
+                '        static const std::vector<const char *> V_1_1_promoted_%s_apis = {' % type.lower() ])
+            struct.extend(['            %s_EXTENSION_NAME,' % ext_name.upper() for ext_name in promoted_ext_list])
+            struct.extend([
+                '        };',
+                '',
+                '        // Initialize struct data, robust to invalid pCreateInfo',
+                '        uint32_t api_version = NormalizeApiVersion(requested_api_version);',
+                '        if (api_version >= VK_API_VERSION_1_1) {',
+                '            auto info = get_info("VK_VERSION_1_1");',
+                '            if (info.state) this->*(info.state) = kEnabledByCreateinfo;',
+                '            for (auto promoted_ext : V_1_1_promoted_%s_apis) {' % type.lower(),
+                '                info = get_info(promoted_ext);',
+                '                assert(info.state);',
+                '                if (info.state) this->*(info.state) = kEnabledByApiLevel;',
+                '            }',
+                '        }',
+                '        // CreateInfo takes precedence over promoted',
+                '        if (pCreateInfo->ppEnabledExtensionNames) {',
+                '            for (uint32_t i = 0; i < pCreateInfo->enabledExtensionCount; i++) {',
+                '                if (!pCreateInfo->ppEnabledExtensionNames[i]) continue;',
+                '                auto info = get_info(pCreateInfo->ppEnabledExtensionNames[i]);',
+                '                if (info.state) this->*(info.state) = kEnabledByCreateinfo;',
+                '            }',
+                '        }',
+
+                '        return api_version;',
+                '    }',
+                '};'])
+
+            # Output reference lists of instance/device extension names
+            struct.extend(['', 'static const std::set<std::string> k%sExtensionNames = {' % type])
+            struct.extend([guarded(info['ifdef'], '    %s,' % info['define']) for ext_name, info in extension_items])
+            struct.extend(['};', ''])
+            output.extend(struct)
+
+        output.extend(['', '#endif // VK_EXTENSION_HELPER_H_'])
+        return '\n'.join(output)
+    #
+    # Combine object types helper header file preamble with body text and return
+    def GenerateObjectTypesHelperHeader(self):
+        object_types_helper_header = '\n'
+        object_types_helper_header += '#pragma once\n'
+        object_types_helper_header += '\n'
+        object_types_helper_header += self.GenerateObjectTypesHeader()
+        return object_types_helper_header
+    #
+    # Object types header: create object enum type header file
+    def GenerateObjectTypesHeader(self):
+        object_types_header = '#include "cast_utils.h"\n'
+        object_types_header += '\n'
+        object_types_header += '// Object Type enum for validation layer internal object handling\n'
+        object_types_header += 'typedef enum VulkanObjectType {\n'
+        object_types_header += '    kVulkanObjectTypeUnknown = 0,\n'
+        enum_num = 1
+        type_list = [];
+        enum_entry_map = {}
+        non_dispatchable = {}
+        dispatchable = {}
+        object_type_info = {}
+
+        # Output enum definition as each handle is processed, saving the names to use for the conversion routine
+        for item in self.object_types:
+            fixup_name = item[2:]
+            enum_entry = 'kVulkanObjectType%s' % fixup_name
+            enum_entry_map[item] = enum_entry
+            object_types_header += '    ' + enum_entry
+            object_types_header += ' = %d,\n' % enum_num
+            enum_num += 1
+            type_list.append(enum_entry)
+            object_type_info[enum_entry] = { 'VkType': item }
+            # We'll want lists of the dispatchable and non dispatchable handles below with access to the same info
+            if self.handle_types.IsNonDispatchable(item):
+                non_dispatchable[item] = enum_entry
+            else:
+                dispatchable[item] = enum_entry
+
+        object_types_header += '    kVulkanObjectTypeMax = %d,\n' % enum_num
+        object_types_header += '    // Aliases for backwards compatibilty of "promoted" types\n'
+        for (name, alias) in self.object_type_aliases:
+            fixup_name = name[2:]
+            object_types_header += '    kVulkanObjectType{} = {},\n'.format(fixup_name, enum_entry_map[alias])
+        object_types_header += '} VulkanObjectType;\n\n'
+
+        # Output name string helper
+        object_types_header += '// Array of object name strings for OBJECT_TYPE enum conversion\n'
+        object_types_header += 'static const char * const object_string[kVulkanObjectTypeMax] = {\n'
+        object_types_header += '    "VkNonDispatchableHandle",\n'
+        for item in self.object_types:
+            object_types_header += '    "%s",\n' % item
+        object_types_header += '};\n'
+
+        # Helpers to create unified dict key from k<Name>, VK_OBJECT_TYPE_<Name>, and VK_DEBUG_REPORT_OBJECT_TYPE_<Name>
+        def dro_to_key(raw_key): return re.search('^VK_DEBUG_REPORT_OBJECT_TYPE_(.*)_EXT$', raw_key).group(1).lower().replace("_","")
+        def vko_to_key(raw_key): return re.search('^VK_OBJECT_TYPE_(.*)', raw_key).group(1).lower().replace("_","")
+        def kenum_to_key(raw_key): return re.search('^kVulkanObjectType(.*)', raw_key).group(1).lower()
+
+        dro_dict = {dro_to_key(dro) : dro for dro in self.debug_report_object_types}
+        vko_dict = {vko_to_key(vko) : vko for vko in self.core_object_types}
+
+        # Output a conversion routine from the layer object definitions to the debug report definitions
+        object_types_header += '\n'
+        object_types_header += '// Helper array to get Vulkan VK_EXT_debug_report object type enum from the internal layers version\n'
+        object_types_header += 'const VkDebugReportObjectTypeEXT get_debug_report_enum[] = {\n'
+        object_types_header += '    VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, // kVulkanObjectTypeUnknown\n' # no unknown handle, so this must be here explicitly
+
+        for object_type in type_list:
+            # VK_DEBUG_REPORT is not updated anymore; there might be missing object types
+            kenum_type = dro_dict.get(kenum_to_key(object_type), 'VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT')
+            object_types_header += '    %s,   // %s\n' % (kenum_type, object_type)
+            object_type_info[object_type]['DbgType'] = kenum_type
+        object_types_header += '};\n'
+
+        # Output a conversion routine from the layer object definitions to the core object type definitions
+        # This will intentionally *fail* for unmatched types as the VK_OBJECT_TYPE list should match the kVulkanObjectType list
+        object_types_header += '\n'
+        object_types_header += '// Helper array to get Official Vulkan VkObjectType enum from the internal layers version\n'
+        object_types_header += 'const VkObjectType get_object_type_enum[] = {\n'
+        object_types_header += '    VK_OBJECT_TYPE_UNKNOWN, // kVulkanObjectTypeUnknown\n' # no unknown handle, so must be here explicitly
+
+        for object_type in type_list:
+            kenum_type = vko_dict[kenum_to_key(object_type)]
+            object_types_header += '    %s,   // %s\n' % (kenum_type, object_type)
+            object_type_info[object_type]['VkoType'] = kenum_type
+        object_types_header += '};\n'
+
+        # Output a function converting from core object type definitions to the Vulkan object type enums
+        object_types_header += '\n'
+        object_types_header += '// Helper function to get internal layers object ids from the official Vulkan VkObjectType enum\n'
+        object_types_header += 'static inline VulkanObjectType ConvertCoreObjectToVulkanObject(VkObjectType vulkan_object_type) {\n'
+        object_types_header += '    switch (vulkan_object_type) {\n'
+
+        for object_type in type_list:
+            kenum_type = vko_dict[kenum_to_key(object_type)]
+            object_types_header += '        case %s: return %s;\n' % (kenum_type, object_type)
+        object_types_header += '        default: return kVulkanObjectTypeUnknown;\n'
+        object_types_header += '    }\n'
+        object_types_header += '};\n'
+
+        # Create a functions to convert between VkDebugReportObjectTypeEXT and VkObjectType
+        object_types_header +=     '\n'
+        object_types_header +=     'static inline VkObjectType convertDebugReportObjectToCoreObject(VkDebugReportObjectTypeEXT debug_report_obj) {\n'
+        object_types_header +=     '    switch (debug_report_obj) {\n'
+        for dr_object_type in self.debug_report_object_types:
+            object_types_header += '        case %s: return %s;\n' % (dr_object_type, vko_dict[dro_to_key(dr_object_type)])
+        object_types_header +=     '        default: return VK_OBJECT_TYPE_UNKNOWN;\n'
+        object_types_header +=     '    }\n'
+        object_types_header +=     '}\n'
+
+        object_types_header +=         '\n'
+        object_types_header +=         'static inline VkDebugReportObjectTypeEXT convertCoreObjectToDebugReportObject(VkObjectType core_report_obj) {\n'
+        object_types_header +=         '    switch (core_report_obj) {\n'
+        for core_object_type in self.core_object_types:
+            # VK_DEBUG_REPORT is not updated anymore; there might be missing object types
+            dr_object_type = dro_dict.get(vko_to_key(core_object_type))
+            if dr_object_type is not None:
+                object_types_header += '        case %s: return %s;\n' % (core_object_type, dr_object_type)
+        object_types_header +=         '        default: return VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT;\n'
+        object_types_header +=         '    }\n'
+        object_types_header +=         '}\n'
+
+        #
+        object_types_header += '\n'
+        traits_format = Outdent('''
+            template <> struct VkHandleInfo<{vk_type}> {{
+                static const VulkanObjectType kVulkanObjectType = {obj_type};
+                static const VkDebugReportObjectTypeEXT kDebugReportObjectType = {dbg_type};
+                static const VkObjectType kVkObjectType = {vko_type};
+                static const char* Typename() {{
+                    return "{vk_type}";
+                }}
+            }};
+            template <> struct VulkanObjectTypeInfo<{obj_type}> {{
+                typedef {vk_type} Type;
+            }};
+            ''')
+
+        object_types_header += Outdent('''
+            // Traits objects from each type statically map from Vk<handleType> to the various enums
+            template <typename VkType> struct VkHandleInfo {};
+            template <VulkanObjectType id> struct VulkanObjectTypeInfo {};
+
+            // The following line must match the vulkan_core.h condition guarding VK_DEFINE_NON_DISPATCHABLE_HANDLE
+            #if defined(__LP64__) || defined(_WIN64) || (defined(__x86_64__) && !defined(__ILP32__)) || defined(_M_X64) || defined(__ia64) || \
+                defined(_M_IA64) || defined(__aarch64__) || defined(__powerpc64__)
+            #define TYPESAFE_NONDISPATCHABLE_HANDLES
+            #else
+            VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkNonDispatchableHandle)
+            ''')  +'\n'
+        object_types_header += traits_format.format(vk_type='VkNonDispatchableHandle', obj_type='kVulkanObjectTypeUnknown',
+                                                  dbg_type='VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT',
+                                                  vko_type='VK_OBJECT_TYPE_UNKNOWN') + '\n'
+        object_types_header += '#endif //  VK_DEFINE_HANDLE logic duplication\n'
+
+        for vk_type, object_type in sorted(dispatchable.items()):
+            info = object_type_info[object_type]
+            object_types_header += traits_format.format(vk_type=vk_type, obj_type=object_type, dbg_type=info['DbgType'],
+                                                      vko_type=info['VkoType'])
+        object_types_header += '#ifdef TYPESAFE_NONDISPATCHABLE_HANDLES\n'
+        for vk_type, object_type in sorted(non_dispatchable.items()):
+            info = object_type_info[object_type]
+            object_types_header += traits_format.format(vk_type=vk_type, obj_type=object_type, dbg_type=info['DbgType'],
+                                                      vko_type=info['VkoType'])
+        object_types_header += '#endif // TYPESAFE_NONDISPATCHABLE_HANDLES\n'
+
+        object_types_header += Outdent('''
+            struct VulkanTypedHandle {
+                uint64_t handle;
+                VulkanObjectType type;
+                // node is optional, and if non-NULL is used to avoid a hash table lookup
+                class BASE_NODE *node;
+                template <typename Handle>
+                VulkanTypedHandle(Handle handle_, VulkanObjectType type_, class BASE_NODE *node_ = nullptr) :
+                    handle(CastToUint64(handle_)),
+                    type(type_),
+                    node(node_) {
+            #ifdef TYPESAFE_NONDISPATCHABLE_HANDLES
+                    // For 32 bit it's not always safe to check for traits <-> type
+                    // as all non-dispatchable handles have the same type-id and thus traits,
+                    // but on 64 bit we can validate the passed type matches the passed handle
+                    assert(type == VkHandleInfo<Handle>::kVulkanObjectType);
+            #endif // TYPESAFE_NONDISPATCHABLE_HANDLES
+                }
+                template <typename Handle>
+                Handle Cast() const {
+            #ifdef TYPESAFE_NONDISPATCHABLE_HANDLES
+                    assert(type == VkHandleInfo<Handle>::kVulkanObjectType);
+            #endif // TYPESAFE_NONDISPATCHABLE_HANDLES
+                    return CastFromUint64<Handle>(handle);
+                }
+                VulkanTypedHandle() :
+                    handle(VK_NULL_HANDLE),
+                    type(kVulkanObjectTypeUnknown),
+                    node(nullptr) {}
+            }; ''')  +'\n'
+
+        return object_types_header
+    #
+    # Generate pNext handling function
+    def build_safe_struct_utility_funcs(self):
+        # Construct Safe-struct helper functions
+
+        string_copy_proc = '\n\n'
+        string_copy_proc += 'char *SafeStringCopy(const char *in_string) {\n'
+        string_copy_proc += '    if (nullptr == in_string) return nullptr;\n'
+        string_copy_proc += '    char* dest = new char[std::strlen(in_string) + 1];\n'
+        string_copy_proc += '    return std::strcpy(dest, in_string);\n'
+        string_copy_proc += '}\n'
+
+        build_pnext_proc = '\n'
+        build_pnext_proc += 'void *SafePnextCopy(const void *pNext) {\n'
+        build_pnext_proc += '    if (!pNext) return nullptr;\n'
+        build_pnext_proc += '\n'
+        build_pnext_proc += '    void *safe_pNext;\n'
+        build_pnext_proc += '    const VkBaseOutStructure *header = reinterpret_cast<const VkBaseOutStructure *>(pNext);\n'
+        build_pnext_proc += '\n'
+        build_pnext_proc += '    switch (header->sType) {\n'
+        # Add special-case code to copy beloved secret loader structs
+        build_pnext_proc += '        // Special-case Loader Instance Struct passed to/from layer in pNext chain\n'
+        build_pnext_proc += '        case VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO: {\n'
+        build_pnext_proc += '            VkLayerInstanceCreateInfo *struct_copy = new VkLayerInstanceCreateInfo;\n'
+        build_pnext_proc += '            // TODO: Uses original VkLayerInstanceLink* chain, which should be okay for our uses\n'
+        build_pnext_proc += '            memcpy(struct_copy, pNext, sizeof(VkLayerInstanceCreateInfo));\n'
+        build_pnext_proc += '            struct_copy->pNext = SafePnextCopy(header->pNext);\n'
+        build_pnext_proc += '            safe_pNext = struct_copy;\n'
+        build_pnext_proc += '            break;\n'
+        build_pnext_proc += '        }\n'
+        build_pnext_proc += '        // Special-case Loader Device Struct passed to/from layer in pNext chain\n'
+        build_pnext_proc += '        case VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO: {\n'
+        build_pnext_proc += '            VkLayerDeviceCreateInfo *struct_copy = new VkLayerDeviceCreateInfo;\n'
+        build_pnext_proc += '            // TODO: Uses original VkLayerDeviceLink*, which should be okay for our uses\n'
+        build_pnext_proc += '            memcpy(struct_copy, pNext, sizeof(VkLayerDeviceCreateInfo));\n'
+        build_pnext_proc += '            struct_copy->pNext = SafePnextCopy(header->pNext);\n'
+        build_pnext_proc += '            safe_pNext = struct_copy;\n'
+        build_pnext_proc += '            break;\n'
+        build_pnext_proc += '        }\n'
+
+        free_pnext_proc = '\n'
+        free_pnext_proc += 'void FreePnextChain(const void *pNext) {\n'
+        free_pnext_proc += '    if (!pNext) return;\n'
+        free_pnext_proc += '\n'
+        free_pnext_proc += '    auto header = reinterpret_cast<const VkBaseOutStructure *>(pNext);\n'
+        free_pnext_proc += '\n'
+        free_pnext_proc += '    switch (header->sType) {\n'
+        free_pnext_proc += '        // Special-case Loader Instance Struct passed to/from layer in pNext chain\n'
+        free_pnext_proc += '        case VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO:\n'
+        free_pnext_proc += '            FreePnextChain(header->pNext);\n'
+        free_pnext_proc += '            delete reinterpret_cast<const VkLayerInstanceCreateInfo *>(pNext);\n'
+        free_pnext_proc += '            break;\n'
+        free_pnext_proc += '        // Special-case Loader Device Struct passed to/from layer in pNext chain\n'
+        free_pnext_proc += '        case VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO:\n'
+        free_pnext_proc += '            FreePnextChain(header->pNext);\n'
+        free_pnext_proc += '            delete reinterpret_cast<const VkLayerDeviceCreateInfo *>(pNext);\n'
+        free_pnext_proc += '            break;\n'
+
+        chain_structs = tuple(s for s in self.structMembers if s.name in self.structextends_list)
+        ifdefs = sorted({cs.ifdef_protect for cs in chain_structs}, key = lambda i : i if i is not None else '')
+        for ifdef in ifdefs:
+            if ifdef is not None:
+                build_pnext_proc += '#ifdef %s\n' % ifdef
+                free_pnext_proc += '#ifdef %s\n' % ifdef
+
+            assorted_chain_structs = tuple(s for s in chain_structs if s.ifdef_protect == ifdef)
+            for struct in assorted_chain_structs:
+                build_pnext_proc += '        case %s:\n' % self.structTypes[struct.name].value
+                build_pnext_proc += '            safe_pNext = new safe_%s(reinterpret_cast<const %s *>(pNext));\n' % (struct.name, struct.name)
+                build_pnext_proc += '            break;\n'
+
+                free_pnext_proc += '        case %s:\n' % self.structTypes[struct.name].value
+                free_pnext_proc += '            delete reinterpret_cast<const safe_%s *>(header);\n' % struct.name
+                free_pnext_proc += '            break;\n'
+
+            if ifdef is not None:
+                build_pnext_proc += '#endif // %s\n' % ifdef
+                free_pnext_proc += '#endif // %s\n' % ifdef
+
+        build_pnext_proc += '        default: // Encountered an unknown sType -- skip (do not copy) this entry in the chain\n'
+        build_pnext_proc += '            safe_pNext = SafePnextCopy(header->pNext);\n'
+        build_pnext_proc += '            break;\n'
+        build_pnext_proc += '    }\n'
+        build_pnext_proc += '\n'
+        build_pnext_proc += '    return safe_pNext;\n'
+        build_pnext_proc += '}\n'
+
+        free_pnext_proc += '        default: // Encountered an unknown sType -- panic, there should be none such in safe chain\n'
+        free_pnext_proc += '            assert(false);\n'
+        free_pnext_proc += '            FreePnextChain(header->pNext);\n'
+        free_pnext_proc += '            break;\n'
+        free_pnext_proc += '    }\n'
+        free_pnext_proc += '}\n'
+
+        pnext_procs = string_copy_proc + build_pnext_proc + free_pnext_proc
+        return pnext_procs
+    #
+    # Determine if a structure needs a safe_struct helper function
+    # That is, it has an sType or one of its members is a pointer
+    def NeedSafeStruct(self, structure):
+        if 'VkBase' in structure.name:
+            return False
+        if 'sType' == structure.name:
+            return True
+        for member in structure.members:
+            if member.ispointer == True:
+                return True
+        return False
+    #
+    # Combine safe struct helper source file preamble with body text and return
+    def GenerateSafeStructHelperSource(self):
+        safe_struct_helper_source = '\n'
+        safe_struct_helper_source += '#include "vk_safe_struct.h"\n'
+        safe_struct_helper_source += '\n'
+        safe_struct_helper_source += '#include <string.h>\n'
+        safe_struct_helper_source += '#include <cassert>\n'
+        safe_struct_helper_source += '#include <cstring>\n'
+        safe_struct_helper_source += '\n'
+        safe_struct_helper_source += '#include <vulkan/vk_layer.h>\n'
+        safe_struct_helper_source += '\n'
+        safe_struct_helper_source += self.GenerateSafeStructSource()
+        safe_struct_helper_source += self.build_safe_struct_utility_funcs()
+
+        return safe_struct_helper_source
+    #
+    # safe_struct source -- create bodies of safe struct helper functions
+    def GenerateSafeStructSource(self):
+        safe_struct_body = []
+        wsi_structs = ['VkXlibSurfaceCreateInfoKHR',
+                       'VkXcbSurfaceCreateInfoKHR',
+                       'VkWaylandSurfaceCreateInfoKHR',
+                       'VkAndroidSurfaceCreateInfoKHR',
+                       'VkWin32SurfaceCreateInfoKHR'
+                       ]
+
+        # For abstract types just want to save the pointer away
+        # since we cannot make a copy.
+        abstract_types = ['AHardwareBuffer',
+                          'ANativeWindow',
+                         ]
+        for item in self.structMembers:
+            if self.NeedSafeStruct(item) == False:
+                continue
+            if item.name in wsi_structs:
+                continue
+            if item.ifdef_protect is not None:
+                safe_struct_body.append("#ifdef %s\n" % item.ifdef_protect)
+            ss_name = "safe_%s" % item.name
+            init_list = ''          # list of members in struct constructor initializer
+            default_init_list = ''  # Default constructor just inits ptrs to nullptr in initializer
+            init_func_txt = ''      # Txt for initialize() function that takes struct ptr and inits members
+            construct_txt = ''      # Body of constuctor as well as body of initialize() func following init_func_txt
+            destruct_txt = ''
+
+            custom_construct_txt = {
+                # VkWriteDescriptorSet is special case because pointers may be non-null but ignored
+                'VkWriteDescriptorSet' :
+                    '    switch (descriptorType) {\n'
+                    '        case VK_DESCRIPTOR_TYPE_SAMPLER:\n'
+                    '        case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:\n'
+                    '        case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:\n'
+                    '        case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:\n'
+                    '        case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:\n'
+                    '        if (descriptorCount && in_struct->pImageInfo) {\n'
+                    '            pImageInfo = new VkDescriptorImageInfo[descriptorCount];\n'
+                    '            for (uint32_t i = 0; i < descriptorCount; ++i) {\n'
+                    '                pImageInfo[i] = in_struct->pImageInfo[i];\n'
+                    '            }\n'
+                    '        }\n'
+                    '        break;\n'
+                    '        case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:\n'
+                    '        case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:\n'
+                    '        case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:\n'
+                    '        case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:\n'
+                    '        if (descriptorCount && in_struct->pBufferInfo) {\n'
+                    '            pBufferInfo = new VkDescriptorBufferInfo[descriptorCount];\n'
+                    '            for (uint32_t i = 0; i < descriptorCount; ++i) {\n'
+                    '                pBufferInfo[i] = in_struct->pBufferInfo[i];\n'
+                    '            }\n'
+                    '        }\n'
+                    '        break;\n'
+                    '        case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:\n'
+                    '        case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:\n'
+                    '        if (descriptorCount && in_struct->pTexelBufferView) {\n'
+                    '            pTexelBufferView = new VkBufferView[descriptorCount];\n'
+                    '            for (uint32_t i = 0; i < descriptorCount; ++i) {\n'
+                    '                pTexelBufferView[i] = in_struct->pTexelBufferView[i];\n'
+                    '            }\n'
+                    '        }\n'
+                    '        break;\n'
+                    '        default:\n'
+                    '        break;\n'
+                    '    }\n',
+                'VkShaderModuleCreateInfo' :
+                    '    if (in_struct->pCode) {\n'
+                    '        pCode = reinterpret_cast<uint32_t *>(new uint8_t[codeSize]);\n'
+                    '        memcpy((void *)pCode, (void *)in_struct->pCode, codeSize);\n'
+                    '    }\n',
+                # VkGraphicsPipelineCreateInfo is special case because its pointers may be non-null but ignored
+                'VkGraphicsPipelineCreateInfo' :
+                    '    if (stageCount && in_struct->pStages) {\n'
+                    '        pStages = new safe_VkPipelineShaderStageCreateInfo[stageCount];\n'
+                    '        for (uint32_t i = 0; i < stageCount; ++i) {\n'
+                    '            pStages[i].initialize(&in_struct->pStages[i]);\n'
+                    '        }\n'
+                    '    }\n'
+                    '    if (in_struct->pVertexInputState)\n'
+                    '        pVertexInputState = new safe_VkPipelineVertexInputStateCreateInfo(in_struct->pVertexInputState);\n'
+                    '    else\n'
+                    '        pVertexInputState = NULL;\n'
+                    '    if (in_struct->pInputAssemblyState)\n'
+                    '        pInputAssemblyState = new safe_VkPipelineInputAssemblyStateCreateInfo(in_struct->pInputAssemblyState);\n'
+                    '    else\n'
+                    '        pInputAssemblyState = NULL;\n'
+                    '    bool has_tessellation_stage = false;\n'
+                    '    if (stageCount && pStages)\n'
+                    '        for (uint32_t i = 0; i < stageCount && !has_tessellation_stage; ++i)\n'
+                    '            if (pStages[i].stage == VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT || pStages[i].stage == VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT)\n'
+                    '                has_tessellation_stage = true;\n'
+                    '    if (in_struct->pTessellationState && has_tessellation_stage)\n'
+                    '        pTessellationState = new safe_VkPipelineTessellationStateCreateInfo(in_struct->pTessellationState);\n'
+                    '    else\n'
+                    '        pTessellationState = NULL; // original pTessellationState pointer ignored\n'
+                    '    bool has_rasterization = in_struct->pRasterizationState ? !in_struct->pRasterizationState->rasterizerDiscardEnable : false;\n'
+                    '    if (in_struct->pViewportState && has_rasterization) {\n'
+                    '        bool is_dynamic_viewports = false;\n'
+                    '        bool is_dynamic_scissors = false;\n'
+                    '        if (in_struct->pDynamicState && in_struct->pDynamicState->pDynamicStates) {\n'
+                    '            for (uint32_t i = 0; i < in_struct->pDynamicState->dynamicStateCount && !is_dynamic_viewports; ++i)\n'
+                    '                if (in_struct->pDynamicState->pDynamicStates[i] == VK_DYNAMIC_STATE_VIEWPORT)\n'
+                    '                    is_dynamic_viewports = true;\n'
+                    '            for (uint32_t i = 0; i < in_struct->pDynamicState->dynamicStateCount && !is_dynamic_scissors; ++i)\n'
+                    '                if (in_struct->pDynamicState->pDynamicStates[i] == VK_DYNAMIC_STATE_SCISSOR)\n'
+                    '                    is_dynamic_scissors = true;\n'
+                    '        }\n'
+                    '        pViewportState = new safe_VkPipelineViewportStateCreateInfo(in_struct->pViewportState, is_dynamic_viewports, is_dynamic_scissors);\n'
+                    '    } else\n'
+                    '        pViewportState = NULL; // original pViewportState pointer ignored\n'
+                    '    if (in_struct->pRasterizationState)\n'
+                    '        pRasterizationState = new safe_VkPipelineRasterizationStateCreateInfo(in_struct->pRasterizationState);\n'
+                    '    else\n'
+                    '        pRasterizationState = NULL;\n'
+                    '    if (in_struct->pMultisampleState && has_rasterization)\n'
+                    '        pMultisampleState = new safe_VkPipelineMultisampleStateCreateInfo(in_struct->pMultisampleState);\n'
+                    '    else\n'
+                    '        pMultisampleState = NULL; // original pMultisampleState pointer ignored\n'
+                    '    // needs a tracked subpass state uses_depthstencil_attachment\n'
+                    '    if (in_struct->pDepthStencilState && has_rasterization && uses_depthstencil_attachment)\n'
+                    '        pDepthStencilState = new safe_VkPipelineDepthStencilStateCreateInfo(in_struct->pDepthStencilState);\n'
+                    '    else\n'
+                    '        pDepthStencilState = NULL; // original pDepthStencilState pointer ignored\n'
+                    '    // needs a tracked subpass state usesColorAttachment\n'
+                    '    if (in_struct->pColorBlendState && has_rasterization && uses_color_attachment)\n'
+                    '        pColorBlendState = new safe_VkPipelineColorBlendStateCreateInfo(in_struct->pColorBlendState);\n'
+                    '    else\n'
+                    '        pColorBlendState = NULL; // original pColorBlendState pointer ignored\n'
+                    '    if (in_struct->pDynamicState)\n'
+                    '        pDynamicState = new safe_VkPipelineDynamicStateCreateInfo(in_struct->pDynamicState);\n'
+                    '    else\n'
+                    '        pDynamicState = NULL;\n',
+                 # VkPipelineViewportStateCreateInfo is special case because its pointers may be non-null but ignored
+                'VkPipelineViewportStateCreateInfo' :
+                    '    if (in_struct->pViewports && !is_dynamic_viewports) {\n'
+                    '        pViewports = new VkViewport[in_struct->viewportCount];\n'
+                    '        memcpy ((void *)pViewports, (void *)in_struct->pViewports, sizeof(VkViewport)*in_struct->viewportCount);\n'
+                    '    }\n'
+                    '    else\n'
+                    '        pViewports = NULL;\n'
+                    '    if (in_struct->pScissors && !is_dynamic_scissors) {\n'
+                    '        pScissors = new VkRect2D[in_struct->scissorCount];\n'
+                    '        memcpy ((void *)pScissors, (void *)in_struct->pScissors, sizeof(VkRect2D)*in_struct->scissorCount);\n'
+                    '    }\n'
+                    '    else\n'
+                    '        pScissors = NULL;\n',
+                # VkDescriptorSetLayoutBinding is special case because its pImmutableSamplers pointer may be non-null but ignored
+                'VkDescriptorSetLayoutBinding' :
+                    '    const bool sampler_type = in_struct->descriptorType == VK_DESCRIPTOR_TYPE_SAMPLER || in_struct->descriptorType == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;\n'
+                    '    if (descriptorCount && in_struct->pImmutableSamplers && sampler_type) {\n'
+                    '        pImmutableSamplers = new VkSampler[descriptorCount];\n'
+                    '        for (uint32_t i = 0; i < descriptorCount; ++i) {\n'
+                    '            pImmutableSamplers[i] = in_struct->pImmutableSamplers[i];\n'
+                    '        }\n'
+                    '    }\n',
+            }
+
+            custom_copy_txt = {
+                # VkGraphicsPipelineCreateInfo is special case because it has custom construct parameters
+                'VkGraphicsPipelineCreateInfo' :
+                    '    pNext = SafePnextCopy(src.pNext);\n'
+                    '    if (stageCount && src.pStages) {\n'
+                    '        pStages = new safe_VkPipelineShaderStageCreateInfo[stageCount];\n'
+                    '        for (uint32_t i = 0; i < stageCount; ++i) {\n'
+                    '            pStages[i].initialize(&src.pStages[i]);\n'
+                    '        }\n'
+                    '    }\n'
+                    '    if (src.pVertexInputState)\n'
+                    '        pVertexInputState = new safe_VkPipelineVertexInputStateCreateInfo(*src.pVertexInputState);\n'
+                    '    else\n'
+                    '        pVertexInputState = NULL;\n'
+                    '    if (src.pInputAssemblyState)\n'
+                    '        pInputAssemblyState = new safe_VkPipelineInputAssemblyStateCreateInfo(*src.pInputAssemblyState);\n'
+                    '    else\n'
+                    '        pInputAssemblyState = NULL;\n'
+                    '    bool has_tessellation_stage = false;\n'
+                    '    if (stageCount && pStages)\n'
+                    '        for (uint32_t i = 0; i < stageCount && !has_tessellation_stage; ++i)\n'
+                    '            if (pStages[i].stage == VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT || pStages[i].stage == VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT)\n'
+                    '                has_tessellation_stage = true;\n'
+                    '    if (src.pTessellationState && has_tessellation_stage)\n'
+                    '        pTessellationState = new safe_VkPipelineTessellationStateCreateInfo(*src.pTessellationState);\n'
+                    '    else\n'
+                    '        pTessellationState = NULL; // original pTessellationState pointer ignored\n'
+                    '    bool has_rasterization = src.pRasterizationState ? !src.pRasterizationState->rasterizerDiscardEnable : false;\n'
+                    '    if (src.pViewportState && has_rasterization) {\n'
+                    '        pViewportState = new safe_VkPipelineViewportStateCreateInfo(*src.pViewportState);\n'
+                    '    } else\n'
+                    '        pViewportState = NULL; // original pViewportState pointer ignored\n'
+                    '    if (src.pRasterizationState)\n'
+                    '        pRasterizationState = new safe_VkPipelineRasterizationStateCreateInfo(*src.pRasterizationState);\n'
+                    '    else\n'
+                    '        pRasterizationState = NULL;\n'
+                    '    if (src.pMultisampleState && has_rasterization)\n'
+                    '        pMultisampleState = new safe_VkPipelineMultisampleStateCreateInfo(*src.pMultisampleState);\n'
+                    '    else\n'
+                    '        pMultisampleState = NULL; // original pMultisampleState pointer ignored\n'
+                    '    if (src.pDepthStencilState && has_rasterization)\n'
+                    '        pDepthStencilState = new safe_VkPipelineDepthStencilStateCreateInfo(*src.pDepthStencilState);\n'
+                    '    else\n'
+                    '        pDepthStencilState = NULL; // original pDepthStencilState pointer ignored\n'
+                    '    if (src.pColorBlendState && has_rasterization)\n'
+                    '        pColorBlendState = new safe_VkPipelineColorBlendStateCreateInfo(*src.pColorBlendState);\n'
+                    '    else\n'
+                    '        pColorBlendState = NULL; // original pColorBlendState pointer ignored\n'
+                    '    if (src.pDynamicState)\n'
+                    '        pDynamicState = new safe_VkPipelineDynamicStateCreateInfo(*src.pDynamicState);\n'
+                    '    else\n'
+                    '        pDynamicState = NULL;\n',
+                 # VkPipelineViewportStateCreateInfo is special case because it has custom construct parameters
+                'VkPipelineViewportStateCreateInfo' :
+                    '    pNext = SafePnextCopy(src.pNext);\n'
+                    '    if (src.pViewports) {\n'
+                    '        pViewports = new VkViewport[src.viewportCount];\n'
+                    '        memcpy ((void *)pViewports, (void *)src.pViewports, sizeof(VkViewport)*src.viewportCount);\n'
+                    '    }\n'
+                    '    else\n'
+                    '        pViewports = NULL;\n'
+                    '    if (src.pScissors) {\n'
+                    '        pScissors = new VkRect2D[src.scissorCount];\n'
+                    '        memcpy ((void *)pScissors, (void *)src.pScissors, sizeof(VkRect2D)*src.scissorCount);\n'
+                    '    }\n'
+                    '    else\n'
+                    '        pScissors = NULL;\n',
+            }
+
+            custom_destruct_txt = {'VkShaderModuleCreateInfo' :
+                                   '    if (pCode)\n'
+                                   '        delete[] reinterpret_cast<const uint8_t *>(pCode);\n' }
+            copy_pnext = ''
+            copy_strings = ''
+            for member in item.members:
+                m_type = member.type
+                if member.name == 'pNext':
+                    copy_pnext = '    pNext = SafePnextCopy(in_struct->pNext);\n'
+                if member.type in self.structNames:
+                    member_index = next((i for i, v in enumerate(self.structMembers) if v[0] == member.type), None)
+                    if member_index is not None and self.NeedSafeStruct(self.structMembers[member_index]) == True:
+                        m_type = 'safe_%s' % member.type
+                if member.ispointer and 'safe_' not in m_type and self.TypeContainsObjectHandle(member.type, False) == False:
+                    # Ptr types w/o a safe_struct, for non-null case need to allocate new ptr and copy data in
+                    if m_type in ['void', 'char']:
+                        if member.name != 'pNext':
+                            if m_type == 'char':
+                                # Create deep copies of strings
+                                if member.len:
+                                    copy_strings += '    char **tmp_%s = new char *[in_struct->%s];\n' % (member.name, member.len)
+                                    copy_strings += '    for (uint32_t i = 0; i < %s; ++i) {\n' % member.len
+                                    copy_strings += '        tmp_%s[i] = SafeStringCopy(in_struct->%s[i]);\n' % (member.name, member.name)
+                                    copy_strings += '    }\n'
+                                    copy_strings += '    %s = tmp_%s;\n' % (member.name, member.name)
+
+                                    destruct_txt += '    if (%s) {\n' % member.name
+                                    destruct_txt += '        for (uint32_t i = 0; i < %s; ++i) {\n' % member.len
+                                    destruct_txt += '            delete [] %s[i];\n' % member.name
+                                    destruct_txt += '        }\n'
+                                    destruct_txt += '        delete [] %s;\n' % member.name
+                                    destruct_txt += '    }\n'
+                                else:
+                                    copy_strings += '    %s = SafeStringCopy(in_struct->%s);\n' % (member.name, member.name)
+                                    destruct_txt += '    if (%s) delete [] %s;\n' % (member.name, member.name)
+                            else:
+                                # For these exceptions just copy initial value over for now
+                                init_list += '\n    %s(in_struct->%s),' % (member.name, member.name)
+                                init_func_txt += '    %s = in_struct->%s;\n' % (member.name, member.name)
+                        default_init_list += '\n    %s(nullptr),' % (member.name)
+                    else:
+                        default_init_list += '\n    %s(nullptr),' % (member.name)
+                        init_list += '\n    %s(nullptr),' % (member.name)
+                        if m_type in abstract_types:
+                            construct_txt += '    %s = in_struct->%s;\n' % (member.name, member.name)
+                        else:
+                            init_func_txt += '    %s = nullptr;\n' % (member.name)
+                            if not member.isstaticarray and (member.len is None or '/' in member.len):
+                                construct_txt += '    if (in_struct->%s) {\n' % member.name
+                                construct_txt += '        %s = new %s(*in_struct->%s);\n' % (member.name, m_type, member.name)
+                                construct_txt += '    }\n'
+                                destruct_txt += '    if (%s)\n' % member.name
+                                destruct_txt += '        delete %s;\n' % member.name
+                            else:
+                                construct_txt += '    if (in_struct->%s) {\n' % member.name
+                                construct_txt += '        %s = new %s[in_struct->%s];\n' % (member.name, m_type, member.len)
+                                construct_txt += '        memcpy ((void *)%s, (void *)in_struct->%s, sizeof(%s)*in_struct->%s);\n' % (member.name, member.name, m_type, member.len)
+                                construct_txt += '    }\n'
+                                destruct_txt += '    if (%s)\n' % member.name
+                                destruct_txt += '        delete[] %s;\n' % member.name
+                elif member.isstaticarray or member.len is not None:
+                    if member.len is None:
+                        # Extract length of static array by grabbing val between []
+                        static_array_size = re.match(r"[^[]*\[([^]]*)\]", member.cdecl)
+                        construct_txt += '    for (uint32_t i = 0; i < %s; ++i) {\n' % static_array_size.group(1)
+                        construct_txt += '        %s[i] = in_struct->%s[i];\n' % (member.name, member.name)
+                        construct_txt += '    }\n'
+                    else:
+                        # Init array ptr to NULL
+                        default_init_list += '\n    %s(nullptr),' % member.name
+                        init_list += '\n    %s(nullptr),' % member.name
+                        init_func_txt += '    %s = nullptr;\n' % member.name
+                        array_element = 'in_struct->%s[i]' % member.name
+                        if member.type in self.structNames:
+                            member_index = next((i for i, v in enumerate(self.structMembers) if v[0] == member.type), None)
+                            if member_index is not None and self.NeedSafeStruct(self.structMembers[member_index]) == True:
+                                array_element = '%s(&in_struct->safe_%s[i])' % (member.type, member.name)
+                        construct_txt += '    if (%s && in_struct->%s) {\n' % (member.len, member.name)
+                        construct_txt += '        %s = new %s[%s];\n' % (member.name, m_type, member.len)
+                        destruct_txt += '    if (%s)\n' % member.name
+                        destruct_txt += '        delete[] %s;\n' % member.name
+                        construct_txt += '        for (uint32_t i = 0; i < %s; ++i) {\n' % (member.len)
+                        if 'safe_' in m_type:
+                            construct_txt += '            %s[i].initialize(&in_struct->%s[i]);\n' % (member.name, member.name)
+                        else:
+                            construct_txt += '            %s[i] = %s;\n' % (member.name, array_element)
+                        construct_txt += '        }\n'
+                        construct_txt += '    }\n'
+                elif member.ispointer == True:
+                    default_init_list += '\n    %s(nullptr),' % (member.name)
+                    init_list += '\n    %s(nullptr),' % (member.name)
+                    init_func_txt += '    %s = nullptr;\n' % (member.name)
+                    construct_txt += '    if (in_struct->%s)\n' % member.name
+                    construct_txt += '        %s = new %s(in_struct->%s);\n' % (member.name, m_type, member.name)
+                    destruct_txt += '    if (%s)\n' % member.name
+                    destruct_txt += '        delete %s;\n' % member.name
+                elif 'safe_' in m_type:
+                    init_list += '\n    %s(&in_struct->%s),' % (member.name, member.name)
+                    init_func_txt += '    %s.initialize(&in_struct->%s);\n' % (member.name, member.name)
+                else:
+                    init_list += '\n    %s(in_struct->%s),' % (member.name, member.name)
+                    init_func_txt += '    %s = in_struct->%s;\n' % (member.name, member.name)
+            if '' != init_list:
+                init_list = init_list[:-1] # hack off final comma
+
+
+            if item.name in custom_construct_txt:
+                construct_txt = custom_construct_txt[item.name]
+
+            construct_txt = copy_pnext + copy_strings + construct_txt
+
+            if item.name in custom_destruct_txt:
+                destruct_txt = custom_destruct_txt[item.name]
+
+            if copy_pnext:
+                destruct_txt += '    if (pNext)\n        FreePnextChain(pNext);\n'
+
+            safe_struct_body.append("\n%s::%s(const %s* in_struct%s) :%s\n{\n%s}" % (ss_name, ss_name, item.name, self.custom_construct_params.get(item.name, ''), init_list, construct_txt))
+            if '' != default_init_list:
+                default_init_list = " :%s" % (default_init_list[:-1])
+            safe_struct_body.append("\n%s::%s()%s\n{}" % (ss_name, ss_name, default_init_list))
+            # Create slight variation of init and construct txt for copy constructor that takes a src object reference vs. struct ptr
+            copy_construct_init = init_func_txt.replace('in_struct->', 'src.')
+            copy_construct_txt = construct_txt.replace(' (in_struct->', ' (src.')            # Exclude 'if' blocks from next line
+            copy_construct_txt = construct_txt.replace(' (in_struct->', ' (src.')               # Exclude 'if' blocks from next line
+            copy_construct_txt = re.sub('(new \\w+)\\(in_struct->', '\\1(*src.', construct_txt) # Pass object to copy constructors
+            copy_construct_txt = copy_construct_txt.replace('in_struct->', 'src.')              # Modify remaining struct refs for src object
+            if item.name in custom_copy_txt:
+                copy_construct_txt = custom_copy_txt[item.name]
+            copy_assign_txt = '    if (&src == this) return *this;\n\n' + destruct_txt + '\n' + copy_construct_init + copy_construct_txt + '\n    return *this;'
+            safe_struct_body.append("\n%s::%s(const %s& src)\n{\n%s%s}" % (ss_name, ss_name, ss_name, copy_construct_init, copy_construct_txt)) # Copy constructor
+            safe_struct_body.append("\n%s& %s::operator=(const %s& src)\n{\n%s\n}" % (ss_name, ss_name, ss_name, copy_assign_txt)) # Copy assignment operator
+            safe_struct_body.append("\n%s::~%s()\n{\n%s}" % (ss_name, ss_name, destruct_txt))
+            safe_struct_body.append("\nvoid %s::initialize(const %s* in_struct%s)\n{\n%s%s}" % (ss_name, item.name, self.custom_construct_params.get(item.name, ''), init_func_txt, construct_txt))
+            # Copy initializer uses same txt as copy constructor but has a ptr and not a reference
+            init_copy = copy_construct_init.replace('src.', 'src->')
+            init_construct = copy_construct_txt.replace('src.', 'src->')
+            safe_struct_body.append("\nvoid %s::initialize(const %s* src)\n{\n%s%s}" % (ss_name, ss_name, init_copy, init_construct))
+            if item.ifdef_protect is not None:
+                safe_struct_body.append("#endif // %s\n" % item.ifdef_protect)
+        return "\n".join(safe_struct_body)
+    #
+    # Generate the type map
+    def GenerateTypeMapHelperHeader(self):
+        prefix = 'Lvl'
+        fprefix = 'lvl_'
+        typemap = prefix + 'TypeMap'
+        idmap = prefix + 'STypeMap'
+        type_member = 'Type'
+        id_member = 'kSType'
+        id_decl = 'static const VkStructureType '
+        generic_header = 'VkBaseOutStructure'
+        typename_func = fprefix + 'typename'
+        idname_func = fprefix + 'stype_name'
+        find_func = fprefix + 'find_in_chain'
+        init_func = fprefix + 'init_struct'
+
+        explanatory_comment = '\n'.join((
+                '// These empty generic templates are specialized for each type with sType',
+                '// members and for each sType -- providing a two way map between structure',
+                '// types and sTypes'))
+
+        empty_typemap = 'template <typename T> struct ' + typemap + ' {};'
+        typemap_format  = 'template <> struct {template}<{typename}> {{\n'
+        typemap_format += '    {id_decl}{id_member} = {id_value};\n'
+        typemap_format += '}};\n'
+
+        empty_idmap = 'template <VkStructureType id> struct ' + idmap + ' {};'
+        idmap_format = ''.join((
+            'template <> struct {template}<{id_value}> {{\n',
+            '    typedef {typename} {typedef};\n',
+            '}};\n'))
+
+        # Define the utilities (here so any renaming stays consistent), if this grows large, refactor to a fixed .h file
+        utilities_format = '\n'.join((
+            '// Find an entry of the given type in the pNext chain',
+            'template <typename T> const T *{find_func}(const void *next) {{',
+            '    const {header} *current = reinterpret_cast<const {header} *>(next);',
+            '    const T *found = nullptr;',
+            '    while (current) {{',
+            '        if ({type_map}<T>::{id_member} == current->sType) {{',
+            '            found = reinterpret_cast<const T*>(current);',
+            '            current = nullptr;',
+            '        }} else {{',
+            '            current = current->pNext;',
+            '        }}',
+            '    }}',
+            '    return found;',
+            '}}',
+            '',
+            '// Init the header of an sType struct with pNext',
+            'template <typename T> T {init_func}(void *p_next) {{',
+            '    T out = {{}};',
+            '    out.sType = {type_map}<T>::kSType;',
+            '    out.pNext = p_next;',
+            '    return out;',
+            '}}',
+                        '',
+            '// Init the header of an sType struct',
+            'template <typename T> T {init_func}() {{',
+            '    T out = {{}};',
+            '    out.sType = {type_map}<T>::kSType;',
+            '    return out;',
+            '}}',
+
+            ''))
+
+        code = []
+
+        # Generate header
+        code.append('\n'.join((
+            '#pragma once',
+            '#include <vulkan/vulkan.h>\n',
+            explanatory_comment, '',
+            empty_idmap,
+            empty_typemap, '')))
+
+        # Generate the specializations for each type and stype
+        for item in self.structMembers:
+            typename = item.name
+            info = self.structTypes.get(typename)
+            if not info:
+                continue
+
+            if item.ifdef_protect is not None:
+                code.append('#ifdef %s' % item.ifdef_protect)
+
+            code.append('// Map type {} to id {}'.format(typename, info.value))
+            code.append(typemap_format.format(template=typemap, typename=typename, id_value=info.value,
+                id_decl=id_decl, id_member=id_member))
+            code.append(idmap_format.format(template=idmap, typename=typename, id_value=info.value, typedef=type_member))
+
+            if item.ifdef_protect is not None:
+                code.append('#endif // %s' % item.ifdef_protect)
+
+        # Generate utilities for all types
+        code.append('\n'.join((
+            utilities_format.format(id_member=id_member, id_map=idmap, type_map=typemap,
+                type_member=type_member, header=generic_header, typename_func=typename_func, idname_func=idname_func,
+                find_func=find_func, init_func=init_func), ''
+            )))
+
+        return "\n".join(code)
+
+    #
+    # Create a helper file and return it as a string
+    def OutputDestFile(self):
+        if self.helper_file_type == 'enum_string_header':
+            return self.GenerateEnumStringHelperHeader()
+        elif self.helper_file_type == 'safe_struct_header':
+            return self.GenerateSafeStructHelperHeader()
+        elif self.helper_file_type == 'safe_struct_source':
+            return self.GenerateSafeStructHelperSource()
+        elif self.helper_file_type == 'object_types_header':
+            return self.GenerateObjectTypesHelperHeader()
+        elif self.helper_file_type == 'extension_helper_header':
+            return self.GenerateExtensionHelperHeader()
+        elif self.helper_file_type == 'typemap_helper_header':
+            return self.GenerateTypeMapHelperHeader()
+        else:
+            return 'Bad Helper File Generator Option %s' % self.helper_file_type
diff --git a/src/third_party/vulkan-validation-layers/src/scripts/known_good.json b/src/third_party/vulkan-validation-layers/src/scripts/known_good.json
new file mode 100644
index 0000000..140836e
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/scripts/known_good.json
@@ -0,0 +1,98 @@
+{
+  "repos" : [
+    {
+      "name" : "glslang",
+      "url" : "https://github.com/KhronosGroup/glslang.git",
+      "sub_dir" : "glslang",
+      "build_dir" : "glslang/build",
+      "install_dir" : "glslang/build/install",
+      "commit" : "b131630e7c749a5dc19faa458024260c71fb170f",
+      "prebuild" : [
+        "python update_glslang_sources.py"
+      ],
+      "cmake_options" : [
+        "-DUSE_CCACHE=ON"
+      ]
+    },
+    {
+      "name" : "Vulkan-Headers",
+      "url" : "https://github.com/KhronosGroup/Vulkan-Headers.git",
+      "sub_dir" : "Vulkan-Headers",
+      "build_dir" : "Vulkan-Headers/build",
+      "install_dir" : "Vulkan-Headers/build/install",
+      "commit" : "v1.1.130"
+    },
+    {
+      "name" : "VulkanTools",
+      "url" : "https://github.com/LunarG/VulkanTools.git",
+      "sub_dir" : "VulkanTools",
+      "build_dir" : "VulkanTools/build",
+      "install_dir" : "VulkanTools/build/install",
+      "commit" : "843d884f8aa328d8e33bc6ce996cd5799e16c4f4",
+      "deps" : [
+        {
+          "var_name" : "VULKAN_HEADERS_INSTALL_DIR",
+          "repo_name" : "Vulkan-Headers"
+        },
+        {
+          "var_name" : "VULKAN_LOADER_INSTALL_DIR",
+          "repo_name" : "Vulkan-Loader"
+        },
+        {
+          "var_name" : "VULKAN_VALIDATIONLAYERS_INSTALL_DIR",
+          "repo_name" : "Vulkan-ValidationLayers"
+        }
+      ],
+      "prebuild_linux" : [
+        "bash update_external_sources.sh"
+      ],
+      "prebuild_windows" : [
+        ".\\update_external_sources.bat"
+      ],
+      "cmake_options" : [
+        "-DBUILD_TESTS=NO",
+        "-DBUILD_VKTRACE=NO",
+        "-DBUILD_VLF=NO",
+        "-DBUILD_VIA=NO"
+      ],
+      "ci_only" : [
+        "TRAVIS"
+      ],
+      "build_step" : "skip"
+    },
+    {
+      "name" : "Vulkan-Tools",
+      "url" : "https://github.com/KhronosGroup/Vulkan-Tools.git",
+      "sub_dir" : "Vulkan-Tools",
+      "build_dir" : "Vulkan-Tools/build",
+      "install_dir" : "Vulkan-Tools/build/install",
+      "commit" : "5ceb7be9a3bf86f34ade5a7f72459248d99a4b76",
+      "deps" : [
+        {
+          "var_name" : "VULKAN_HEADERS_INSTALL_DIR",
+          "repo_name" : "Vulkan-Headers"
+        },
+        {
+          "var_name" : "GLSLANG_INSTALL_DIR",
+          "repo_name" : "glslang"
+        },
+        {
+          "var_name" : "MOLTENVK_REPO_ROOT",
+          "repo_name" : "MoltenVK"
+        }
+      ],
+      "cmake_options" : [
+        "-DBUILD_CUBE=NO",
+        "-DBUILD_VULKANINFO=NO",
+        "-DINSTALL_ICD=ON"
+      ],
+      "ci_only" : [
+        "TRAVIS"
+      ]
+    }
+  ],
+  "install_names" : {
+      "glslang" : "GLSLANG_INSTALL_DIR",
+      "Vulkan-Headers" : "VULKAN_HEADERS_INSTALL_DIR"
+    }
+}
diff --git a/src/third_party/vulkan-validation-layers/src/scripts/layer_chassis_dispatch_generator.py b/src/third_party/vulkan-validation-layers/src/scripts/layer_chassis_dispatch_generator.py
new file mode 100644
index 0000000..ccbb0e0
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/scripts/layer_chassis_dispatch_generator.py
@@ -0,0 +1,1796 @@
+#!/usr/bin/python3 -i
+#
+# Copyright (c) 2015-2019 The Khronos Group Inc.
+# Copyright (c) 2015-2019 Valve Corporation
+# Copyright (c) 2015-2019 LunarG, Inc.
+# Copyright (c) 2015-2019 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Author: Tobin Ehlis <tobine@google.com>
+# Author: Mark Lobodzinski <mark@lunarg.com>
+
+import os,re,sys
+import xml.etree.ElementTree as etree
+from generator import *
+from collections import namedtuple
+from common_codegen import *
+
+# LayerChassisDispatchGeneratorOptions - subclass of GeneratorOptions.
+#
+# Adds options used by LayerChassisDispatchOutputGenerator objects during
+# layer chassis dispatch file generation.
+#
+# Additional members
+#   prefixText - list of strings to prefix generated header with
+#     (usually a copyright statement + calling convention macros).
+#   protectFile - True if multiple inclusion protection should be
+#     generated (based on the filename) around the entire header.
+#   protectFeature - True if #ifndef..#endif protection should be
+#     generated around a feature interface in the header file.
+#   genFuncPointers - True if function pointer typedefs should be
+#     generated
+#   protectProto - If conditional protection should be generated
+#     around prototype declarations, set to either '#ifdef'
+#     to require opt-in (#ifdef protectProtoStr) or '#ifndef'
+#     to require opt-out (#ifndef protectProtoStr). Otherwise
+#     set to None.
+#   protectProtoStr - #ifdef/#ifndef symbol to use around prototype
+#     declarations, if protectProto is set
+#   apicall - string to use for the function declaration prefix,
+#     such as APICALL on Windows.
+#   apientry - string to use for the calling convention macro,
+#     in typedefs, such as APIENTRY.
+#   apientryp - string to use for the calling convention macro
+#     in function pointer typedefs, such as APIENTRYP.
+#   indentFuncProto - True if prototype declarations should put each
+#     parameter on a separate line
+#   indentFuncPointer - True if typedefed function pointers should put each
+#     parameter on a separate line
+#   alignFuncParam - if nonzero and parameters are being put on a
+#     separate line, align parameter names at the specified column
+class LayerChassisDispatchGeneratorOptions(GeneratorOptions):
+    def __init__(self,
+                 conventions = None,
+                 filename = None,
+                 directory = '.',
+                 apiname = None,
+                 profile = None,
+                 versions = '.*',
+                 emitversions = '.*',
+                 defaultExtensions = None,
+                 addExtensions = None,
+                 removeExtensions = None,
+                 emitExtensions = None,
+                 sortProcedure = regSortFeatures,
+                 prefixText = "",
+                 genFuncPointers = True,
+                 protectFile = True,
+                 protectFeature = True,
+                 apicall = '',
+                 apientry = '',
+                 apientryp = '',
+                 indentFuncProto = True,
+                 indentFuncPointer = False,
+                 alignFuncParam = 0,
+                 expandEnumerants = True):
+        GeneratorOptions.__init__(self, conventions, filename, directory, apiname, profile,
+                                  versions, emitversions, defaultExtensions,
+                                  addExtensions, removeExtensions, emitExtensions, sortProcedure)
+        self.prefixText      = prefixText
+        self.genFuncPointers = genFuncPointers
+        self.protectFile     = protectFile
+        self.protectFeature  = protectFeature
+        self.apicall         = apicall
+        self.apientry        = apientry
+        self.apientryp       = apientryp
+        self.indentFuncProto = indentFuncProto
+        self.indentFuncPointer = indentFuncPointer
+        self.alignFuncParam   = alignFuncParam
+        self.expandEnumerants = expandEnumerants
+
+
+# LayerChassisDispatchOutputGenerator - subclass of OutputGenerator.
+# Generates layer chassis non-dispatchable handle-wrapping code.
+#
+# ---- methods ----
+# LayerChassisDispatchOutputGenerator(errFile, warnFile, diagFile) - args as for OutputGenerator. Defines additional internal state.
+# ---- methods overriding base class ----
+# beginFile(genOpts)
+# endFile()
+# beginFeature(interface, emit)
+# endFeature()
+# genCmd(cmdinfo)
+# genStruct()
+# genType()
+class LayerChassisDispatchOutputGenerator(OutputGenerator):
+    """Generate layer chassis handle wrapping code based on XML element attributes"""
+    inline_copyright_message = """
+// This file is ***GENERATED***.  Do Not Edit.
+// See layer_chassis_dispatch_generator.py for modifications.
+
+/* Copyright (c) 2015-2019 The Khronos Group Inc.
+ * Copyright (c) 2015-2019 Valve Corporation
+ * Copyright (c) 2015-2019 LunarG, Inc.
+ * Copyright (c) 2015-2019 Google Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Mark Lobodzinski <mark@lunarg.com>
+ */"""
+
+    inline_custom_source_preamble = """
+
+#define DISPATCH_MAX_STACK_ALLOCATIONS 32
+
+// The VK_EXT_pipeline_creation_feedback extension returns data from the driver -- we've created a copy of the pnext chain, so
+// copy the returned data to the caller before freeing the copy's data.
+void CopyCreatePipelineFeedbackData(const void *src_chain, const void *dst_chain) {
+    auto src_feedback_struct = lvl_find_in_chain<VkPipelineCreationFeedbackCreateInfoEXT>(src_chain);
+    if (!src_feedback_struct) return;
+    auto dst_feedback_struct = const_cast<VkPipelineCreationFeedbackCreateInfoEXT *>(
+        lvl_find_in_chain<VkPipelineCreationFeedbackCreateInfoEXT>(dst_chain));
+    *dst_feedback_struct->pPipelineCreationFeedback = *src_feedback_struct->pPipelineCreationFeedback;
+    for (uint32_t i = 0; i < src_feedback_struct->pipelineStageCreationFeedbackCount; i++) {
+        dst_feedback_struct->pPipelineStageCreationFeedbacks[i] = src_feedback_struct->pPipelineStageCreationFeedbacks[i];
+    }
+}
+
+VkResult DispatchCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount,
+                                         const VkGraphicsPipelineCreateInfo *pCreateInfos,
+                                         const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.CreateGraphicsPipelines(device, pipelineCache, createInfoCount,
+                                                                                           pCreateInfos, pAllocator, pPipelines);
+    safe_VkGraphicsPipelineCreateInfo *local_pCreateInfos = nullptr;
+    if (pCreateInfos) {
+        local_pCreateInfos = new safe_VkGraphicsPipelineCreateInfo[createInfoCount];
+        read_lock_guard_t lock(dispatch_lock);
+        for (uint32_t idx0 = 0; idx0 < createInfoCount; ++idx0) {
+            bool uses_color_attachment = false;
+            bool uses_depthstencil_attachment = false;
+            {
+                const auto subpasses_uses_it = layer_data->renderpasses_states.find(layer_data->Unwrap(pCreateInfos[idx0].renderPass));
+                if (subpasses_uses_it != layer_data->renderpasses_states.end()) {
+                    const auto &subpasses_uses = subpasses_uses_it->second;
+                    if (subpasses_uses.subpasses_using_color_attachment.count(pCreateInfos[idx0].subpass))
+                        uses_color_attachment = true;
+                    if (subpasses_uses.subpasses_using_depthstencil_attachment.count(pCreateInfos[idx0].subpass))
+                        uses_depthstencil_attachment = true;
+                }
+            }
+
+            local_pCreateInfos[idx0].initialize(&pCreateInfos[idx0], uses_color_attachment, uses_depthstencil_attachment);
+
+            if (pCreateInfos[idx0].basePipelineHandle) {
+                local_pCreateInfos[idx0].basePipelineHandle = layer_data->Unwrap(pCreateInfos[idx0].basePipelineHandle);
+            }
+            if (pCreateInfos[idx0].layout) {
+                local_pCreateInfos[idx0].layout = layer_data->Unwrap(pCreateInfos[idx0].layout);
+            }
+            if (pCreateInfos[idx0].pStages) {
+                for (uint32_t idx1 = 0; idx1 < pCreateInfos[idx0].stageCount; ++idx1) {
+                    if (pCreateInfos[idx0].pStages[idx1].module) {
+                        local_pCreateInfos[idx0].pStages[idx1].module = layer_data->Unwrap(pCreateInfos[idx0].pStages[idx1].module);
+                    }
+                }
+            }
+            if (pCreateInfos[idx0].renderPass) {
+                local_pCreateInfos[idx0].renderPass = layer_data->Unwrap(pCreateInfos[idx0].renderPass);
+            }
+        }
+    }
+    if (pipelineCache) {
+        pipelineCache = layer_data->Unwrap(pipelineCache);
+    }
+
+    VkResult result = layer_data->device_dispatch_table.CreateGraphicsPipelines(device, pipelineCache, createInfoCount,
+                                                                                local_pCreateInfos->ptr(), pAllocator, pPipelines);
+    for (uint32_t i = 0; i < createInfoCount; ++i) {
+        if (pCreateInfos[i].pNext != VK_NULL_HANDLE) {
+            CopyCreatePipelineFeedbackData(local_pCreateInfos[i].pNext, pCreateInfos[i].pNext);
+        }
+    }
+
+    delete[] local_pCreateInfos;
+    {
+        for (uint32_t i = 0; i < createInfoCount; ++i) {
+            if (pPipelines[i] != VK_NULL_HANDLE) {
+                pPipelines[i] = layer_data->WrapNew(pPipelines[i]);
+            }
+        }
+    }
+    return result;
+}
+
+template <typename T>
+static void UpdateCreateRenderPassState(ValidationObject *layer_data, const T *pCreateInfo, VkRenderPass renderPass) {
+    auto &renderpass_state = layer_data->renderpasses_states[renderPass];
+
+    for (uint32_t subpass = 0; subpass < pCreateInfo->subpassCount; ++subpass) {
+        bool uses_color = false;
+        for (uint32_t i = 0; i < pCreateInfo->pSubpasses[subpass].colorAttachmentCount && !uses_color; ++i)
+            if (pCreateInfo->pSubpasses[subpass].pColorAttachments[i].attachment != VK_ATTACHMENT_UNUSED) uses_color = true;
+
+        bool uses_depthstencil = false;
+        if (pCreateInfo->pSubpasses[subpass].pDepthStencilAttachment)
+            if (pCreateInfo->pSubpasses[subpass].pDepthStencilAttachment->attachment != VK_ATTACHMENT_UNUSED)
+                uses_depthstencil = true;
+
+        if (uses_color) renderpass_state.subpasses_using_color_attachment.insert(subpass);
+        if (uses_depthstencil) renderpass_state.subpasses_using_depthstencil_attachment.insert(subpass);
+    }
+}
+
+VkResult DispatchCreateRenderPass(VkDevice device, const VkRenderPassCreateInfo *pCreateInfo,
+                                  const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    VkResult result = layer_data->device_dispatch_table.CreateRenderPass(device, pCreateInfo, pAllocator, pRenderPass);
+    if (!wrap_handles) return result;
+    if (VK_SUCCESS == result) {
+        write_lock_guard_t lock(dispatch_lock);
+        UpdateCreateRenderPassState(layer_data, pCreateInfo, *pRenderPass);
+        *pRenderPass = layer_data->WrapNew(*pRenderPass);
+    }
+    return result;
+}
+
+VkResult DispatchCreateRenderPass2KHR(VkDevice device, const VkRenderPassCreateInfo2KHR *pCreateInfo,
+                                      const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    VkResult result = layer_data->device_dispatch_table.CreateRenderPass2KHR(device, pCreateInfo, pAllocator, pRenderPass);
+    if (!wrap_handles) return result;
+    if (VK_SUCCESS == result) {
+        write_lock_guard_t lock(dispatch_lock);
+        UpdateCreateRenderPassState(layer_data, pCreateInfo, *pRenderPass);
+        *pRenderPass = layer_data->WrapNew(*pRenderPass);
+    }
+    return result;
+}
+
+void DispatchDestroyRenderPass(VkDevice device, VkRenderPass renderPass, const VkAllocationCallbacks *pAllocator) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.DestroyRenderPass(device, renderPass, pAllocator);
+    uint64_t renderPass_id = reinterpret_cast<uint64_t &>(renderPass);
+
+    auto iter = unique_id_mapping.pop(renderPass_id);
+    if (iter != unique_id_mapping.end()) {
+        renderPass = (VkRenderPass)iter->second;
+    } else {
+        renderPass = (VkRenderPass)0;
+    }
+
+    layer_data->device_dispatch_table.DestroyRenderPass(device, renderPass, pAllocator);
+
+    write_lock_guard_t lock(dispatch_lock);
+    layer_data->renderpasses_states.erase(renderPass);
+}
+
+VkResult DispatchCreateSwapchainKHR(VkDevice device, const VkSwapchainCreateInfoKHR *pCreateInfo,
+                                    const VkAllocationCallbacks *pAllocator, VkSwapchainKHR *pSwapchain) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.CreateSwapchainKHR(device, pCreateInfo, pAllocator, pSwapchain);
+    safe_VkSwapchainCreateInfoKHR *local_pCreateInfo = NULL;
+    if (pCreateInfo) {
+        local_pCreateInfo = new safe_VkSwapchainCreateInfoKHR(pCreateInfo);
+        local_pCreateInfo->oldSwapchain = layer_data->Unwrap(pCreateInfo->oldSwapchain);
+        // Surface is instance-level object
+        local_pCreateInfo->surface = layer_data->Unwrap(pCreateInfo->surface);
+    }
+
+    VkResult result = layer_data->device_dispatch_table.CreateSwapchainKHR(device, local_pCreateInfo->ptr(), pAllocator, pSwapchain);
+    delete local_pCreateInfo;
+
+    if (VK_SUCCESS == result) {
+        *pSwapchain = layer_data->WrapNew(*pSwapchain);
+    }
+    return result;
+}
+
+VkResult DispatchCreateSharedSwapchainsKHR(VkDevice device, uint32_t swapchainCount, const VkSwapchainCreateInfoKHR *pCreateInfos,
+                                           const VkAllocationCallbacks *pAllocator, VkSwapchainKHR *pSwapchains) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles)
+        return layer_data->device_dispatch_table.CreateSharedSwapchainsKHR(device, swapchainCount, pCreateInfos, pAllocator,
+                                                                           pSwapchains);
+    safe_VkSwapchainCreateInfoKHR *local_pCreateInfos = NULL;
+    {
+        if (pCreateInfos) {
+            local_pCreateInfos = new safe_VkSwapchainCreateInfoKHR[swapchainCount];
+            for (uint32_t i = 0; i < swapchainCount; ++i) {
+                local_pCreateInfos[i].initialize(&pCreateInfos[i]);
+                if (pCreateInfos[i].surface) {
+                    // Surface is instance-level object
+                    local_pCreateInfos[i].surface = layer_data->Unwrap(pCreateInfos[i].surface);
+                }
+                if (pCreateInfos[i].oldSwapchain) {
+                    local_pCreateInfos[i].oldSwapchain = layer_data->Unwrap(pCreateInfos[i].oldSwapchain);
+                }
+            }
+        }
+    }
+    VkResult result = layer_data->device_dispatch_table.CreateSharedSwapchainsKHR(device, swapchainCount, local_pCreateInfos->ptr(),
+                                                                                  pAllocator, pSwapchains);
+    delete[] local_pCreateInfos;
+    if (VK_SUCCESS == result) {
+        for (uint32_t i = 0; i < swapchainCount; i++) {
+            pSwapchains[i] = layer_data->WrapNew(pSwapchains[i]);
+        }
+    }
+    return result;
+}
+
+VkResult DispatchGetSwapchainImagesKHR(VkDevice device, VkSwapchainKHR swapchain, uint32_t *pSwapchainImageCount,
+                                       VkImage *pSwapchainImages) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles)
+        return layer_data->device_dispatch_table.GetSwapchainImagesKHR(device, swapchain, pSwapchainImageCount, pSwapchainImages);
+    VkSwapchainKHR wrapped_swapchain_handle = swapchain;
+    if (VK_NULL_HANDLE != swapchain) {
+        swapchain = layer_data->Unwrap(swapchain);
+    }
+    VkResult result =
+        layer_data->device_dispatch_table.GetSwapchainImagesKHR(device, swapchain, pSwapchainImageCount, pSwapchainImages);
+    if ((VK_SUCCESS == result) || (VK_INCOMPLETE == result)) {
+        if ((*pSwapchainImageCount > 0) && pSwapchainImages) {
+            write_lock_guard_t lock(dispatch_lock);
+            auto &wrapped_swapchain_image_handles = layer_data->swapchain_wrapped_image_handle_map[wrapped_swapchain_handle];
+            for (uint32_t i = static_cast<uint32_t>(wrapped_swapchain_image_handles.size()); i < *pSwapchainImageCount; i++) {
+                wrapped_swapchain_image_handles.emplace_back(layer_data->WrapNew(pSwapchainImages[i]));
+            }
+            for (uint32_t i = 0; i < *pSwapchainImageCount; i++) {
+                pSwapchainImages[i] = wrapped_swapchain_image_handles[i];
+            }
+        }
+    }
+    return result;
+}
+
+void DispatchDestroySwapchainKHR(VkDevice device, VkSwapchainKHR swapchain, const VkAllocationCallbacks *pAllocator) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.DestroySwapchainKHR(device, swapchain, pAllocator);
+    write_lock_guard_t lock(dispatch_lock);
+
+    auto &image_array = layer_data->swapchain_wrapped_image_handle_map[swapchain];
+    for (auto &image_handle : image_array) {
+        unique_id_mapping.erase(HandleToUint64(image_handle));
+    }
+    layer_data->swapchain_wrapped_image_handle_map.erase(swapchain);
+    lock.unlock();
+
+    uint64_t swapchain_id = HandleToUint64(swapchain);
+
+    auto iter = unique_id_mapping.pop(swapchain_id);
+    if (iter != unique_id_mapping.end()) {
+        swapchain = (VkSwapchainKHR)iter->second;
+    } else {
+        swapchain = (VkSwapchainKHR)0;
+    }
+
+    layer_data->device_dispatch_table.DestroySwapchainKHR(device, swapchain, pAllocator);
+}
+
+VkResult DispatchQueuePresentKHR(VkQueue queue, const VkPresentInfoKHR *pPresentInfo) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(queue), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.QueuePresentKHR(queue, pPresentInfo);
+    safe_VkPresentInfoKHR *local_pPresentInfo = NULL;
+    {
+        if (pPresentInfo) {
+            local_pPresentInfo = new safe_VkPresentInfoKHR(pPresentInfo);
+            if (local_pPresentInfo->pWaitSemaphores) {
+                for (uint32_t index1 = 0; index1 < local_pPresentInfo->waitSemaphoreCount; ++index1) {
+                    local_pPresentInfo->pWaitSemaphores[index1] = layer_data->Unwrap(pPresentInfo->pWaitSemaphores[index1]);
+                }
+            }
+            if (local_pPresentInfo->pSwapchains) {
+                for (uint32_t index1 = 0; index1 < local_pPresentInfo->swapchainCount; ++index1) {
+                    local_pPresentInfo->pSwapchains[index1] = layer_data->Unwrap(pPresentInfo->pSwapchains[index1]);
+                }
+            }
+        }
+    }
+    VkResult result = layer_data->device_dispatch_table.QueuePresentKHR(queue, local_pPresentInfo->ptr());
+
+    // pResults is an output array embedded in a structure. The code generator neglects to copy back from the safe_* version,
+    // so handle it as a special case here:
+    if (pPresentInfo && pPresentInfo->pResults) {
+        for (uint32_t i = 0; i < pPresentInfo->swapchainCount; i++) {
+            pPresentInfo->pResults[i] = local_pPresentInfo->pResults[i];
+        }
+    }
+    delete local_pPresentInfo;
+    return result;
+}
+
+void DispatchDestroyDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool, const VkAllocationCallbacks *pAllocator) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.DestroyDescriptorPool(device, descriptorPool, pAllocator);
+    write_lock_guard_t lock(dispatch_lock);
+
+    // remove references to implicitly freed descriptor sets
+    for(auto descriptor_set : layer_data->pool_descriptor_sets_map[descriptorPool]) {
+        unique_id_mapping.erase(reinterpret_cast<uint64_t &>(descriptor_set));
+    }
+    layer_data->pool_descriptor_sets_map.erase(descriptorPool);
+    lock.unlock();
+
+    uint64_t descriptorPool_id = reinterpret_cast<uint64_t &>(descriptorPool);
+
+    auto iter = unique_id_mapping.pop(descriptorPool_id);
+    if (iter != unique_id_mapping.end()) {
+        descriptorPool = (VkDescriptorPool)iter->second;
+    } else {
+        descriptorPool = (VkDescriptorPool)0;
+    }
+
+    layer_data->device_dispatch_table.DestroyDescriptorPool(device, descriptorPool, pAllocator);
+}
+
+VkResult DispatchResetDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool, VkDescriptorPoolResetFlags flags) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.ResetDescriptorPool(device, descriptorPool, flags);
+    VkDescriptorPool local_descriptor_pool = VK_NULL_HANDLE;
+    {
+        local_descriptor_pool = layer_data->Unwrap(descriptorPool);
+    }
+    VkResult result = layer_data->device_dispatch_table.ResetDescriptorPool(device, local_descriptor_pool, flags);
+    if (VK_SUCCESS == result) {
+        write_lock_guard_t lock(dispatch_lock);
+        // remove references to implicitly freed descriptor sets
+        for(auto descriptor_set : layer_data->pool_descriptor_sets_map[descriptorPool]) {
+            unique_id_mapping.erase(reinterpret_cast<uint64_t &>(descriptor_set));
+        }
+        layer_data->pool_descriptor_sets_map[descriptorPool].clear();
+    }
+
+    return result;
+}
+
+VkResult DispatchAllocateDescriptorSets(VkDevice device, const VkDescriptorSetAllocateInfo *pAllocateInfo,
+                                        VkDescriptorSet *pDescriptorSets) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.AllocateDescriptorSets(device, pAllocateInfo, pDescriptorSets);
+    safe_VkDescriptorSetAllocateInfo *local_pAllocateInfo = NULL;
+    {
+        if (pAllocateInfo) {
+            local_pAllocateInfo = new safe_VkDescriptorSetAllocateInfo(pAllocateInfo);
+            if (pAllocateInfo->descriptorPool) {
+                local_pAllocateInfo->descriptorPool = layer_data->Unwrap(pAllocateInfo->descriptorPool);
+            }
+            if (local_pAllocateInfo->pSetLayouts) {
+                for (uint32_t index1 = 0; index1 < local_pAllocateInfo->descriptorSetCount; ++index1) {
+                    local_pAllocateInfo->pSetLayouts[index1] = layer_data->Unwrap(local_pAllocateInfo->pSetLayouts[index1]);
+                }
+            }
+        }
+    }
+    VkResult result = layer_data->device_dispatch_table.AllocateDescriptorSets(
+        device, (const VkDescriptorSetAllocateInfo *)local_pAllocateInfo, pDescriptorSets);
+    if (local_pAllocateInfo) {
+        delete local_pAllocateInfo;
+    }
+    if (VK_SUCCESS == result) {
+        write_lock_guard_t lock(dispatch_lock);
+        auto &pool_descriptor_sets = layer_data->pool_descriptor_sets_map[pAllocateInfo->descriptorPool];
+        for (uint32_t index0 = 0; index0 < pAllocateInfo->descriptorSetCount; index0++) {
+            pDescriptorSets[index0] = layer_data->WrapNew(pDescriptorSets[index0]);
+            pool_descriptor_sets.insert(pDescriptorSets[index0]);
+        }
+    }
+    return result;
+}
+
+VkResult DispatchFreeDescriptorSets(VkDevice device, VkDescriptorPool descriptorPool, uint32_t descriptorSetCount,
+                                    const VkDescriptorSet *pDescriptorSets) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles)
+        return layer_data->device_dispatch_table.FreeDescriptorSets(device, descriptorPool, descriptorSetCount, pDescriptorSets);
+    VkDescriptorSet *local_pDescriptorSets = NULL;
+    VkDescriptorPool local_descriptor_pool = VK_NULL_HANDLE;
+    {
+        local_descriptor_pool = layer_data->Unwrap(descriptorPool);
+        if (pDescriptorSets) {
+            local_pDescriptorSets = new VkDescriptorSet[descriptorSetCount];
+            for (uint32_t index0 = 0; index0 < descriptorSetCount; ++index0) {
+                local_pDescriptorSets[index0] = layer_data->Unwrap(pDescriptorSets[index0]);
+            }
+        }
+    }
+    VkResult result = layer_data->device_dispatch_table.FreeDescriptorSets(device, local_descriptor_pool, descriptorSetCount,
+                                                                           (const VkDescriptorSet *)local_pDescriptorSets);
+    if (local_pDescriptorSets) delete[] local_pDescriptorSets;
+    if ((VK_SUCCESS == result) && (pDescriptorSets)) {
+        write_lock_guard_t lock(dispatch_lock);
+        auto &pool_descriptor_sets = layer_data->pool_descriptor_sets_map[descriptorPool];
+        for (uint32_t index0 = 0; index0 < descriptorSetCount; index0++) {
+            VkDescriptorSet handle = pDescriptorSets[index0];
+            pool_descriptor_sets.erase(handle);
+            uint64_t unique_id = reinterpret_cast<uint64_t &>(handle);
+            unique_id_mapping.erase(unique_id);
+        }
+    }
+    return result;
+}
+
+// This is the core version of this routine.  The extension version is below.
+VkResult DispatchCreateDescriptorUpdateTemplate(VkDevice device, const VkDescriptorUpdateTemplateCreateInfoKHR *pCreateInfo,
+                                                const VkAllocationCallbacks *pAllocator,
+                                                VkDescriptorUpdateTemplateKHR *pDescriptorUpdateTemplate) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles)
+        return layer_data->device_dispatch_table.CreateDescriptorUpdateTemplate(device, pCreateInfo, pAllocator,
+                                                                                pDescriptorUpdateTemplate);
+    safe_VkDescriptorUpdateTemplateCreateInfo var_local_pCreateInfo;
+    safe_VkDescriptorUpdateTemplateCreateInfo *local_pCreateInfo = NULL;
+    if (pCreateInfo) {
+        local_pCreateInfo = &var_local_pCreateInfo;
+        local_pCreateInfo->initialize(pCreateInfo);
+        if (pCreateInfo->templateType == VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET) {
+            local_pCreateInfo->descriptorSetLayout = layer_data->Unwrap(pCreateInfo->descriptorSetLayout);
+        }
+        if (pCreateInfo->templateType == VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR) {
+            local_pCreateInfo->pipelineLayout = layer_data->Unwrap(pCreateInfo->pipelineLayout);
+        }
+    }
+    VkResult result = layer_data->device_dispatch_table.CreateDescriptorUpdateTemplate(device, local_pCreateInfo->ptr(), pAllocator,
+                                                                                       pDescriptorUpdateTemplate);
+    if (VK_SUCCESS == result) {
+        *pDescriptorUpdateTemplate = layer_data->WrapNew(*pDescriptorUpdateTemplate);
+
+        // Shadow template createInfo for later updates
+        if (local_pCreateInfo) {
+            write_lock_guard_t lock(dispatch_lock);
+            std::unique_ptr<TEMPLATE_STATE> template_state(new TEMPLATE_STATE(*pDescriptorUpdateTemplate, local_pCreateInfo));
+            layer_data->desc_template_createinfo_map[(uint64_t)*pDescriptorUpdateTemplate] = std::move(template_state);
+        }
+    }
+    return result;
+}
+
+// This is the extension version of this routine.  The core version is above.
+VkResult DispatchCreateDescriptorUpdateTemplateKHR(VkDevice device, const VkDescriptorUpdateTemplateCreateInfoKHR *pCreateInfo,
+                                                   const VkAllocationCallbacks *pAllocator,
+                                                   VkDescriptorUpdateTemplateKHR *pDescriptorUpdateTemplate) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles)
+        return layer_data->device_dispatch_table.CreateDescriptorUpdateTemplateKHR(device, pCreateInfo, pAllocator,
+                                                                                   pDescriptorUpdateTemplate);
+    safe_VkDescriptorUpdateTemplateCreateInfo var_local_pCreateInfo;
+    safe_VkDescriptorUpdateTemplateCreateInfo *local_pCreateInfo = NULL;
+    if (pCreateInfo) {
+        local_pCreateInfo = &var_local_pCreateInfo;
+        local_pCreateInfo->initialize(pCreateInfo);
+        if (pCreateInfo->templateType == VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET) {
+            local_pCreateInfo->descriptorSetLayout = layer_data->Unwrap(pCreateInfo->descriptorSetLayout);
+        }
+        if (pCreateInfo->templateType == VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR) {
+            local_pCreateInfo->pipelineLayout = layer_data->Unwrap(pCreateInfo->pipelineLayout);
+        }
+    }
+    VkResult result = layer_data->device_dispatch_table.CreateDescriptorUpdateTemplateKHR(device, local_pCreateInfo->ptr(),
+                                                                                          pAllocator, pDescriptorUpdateTemplate);
+
+    if (VK_SUCCESS == result) {
+        *pDescriptorUpdateTemplate = layer_data->WrapNew(*pDescriptorUpdateTemplate);
+
+        // Shadow template createInfo for later updates
+        if (local_pCreateInfo) {
+            write_lock_guard_t lock(dispatch_lock);
+            std::unique_ptr<TEMPLATE_STATE> template_state(new TEMPLATE_STATE(*pDescriptorUpdateTemplate, local_pCreateInfo));
+            layer_data->desc_template_createinfo_map[(uint64_t)*pDescriptorUpdateTemplate] = std::move(template_state);
+        }
+    }
+    return result;
+}
+
+// This is the core version of this routine.  The extension version is below.
+void DispatchDestroyDescriptorUpdateTemplate(VkDevice device, VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
+                                             const VkAllocationCallbacks *pAllocator) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles)
+        return layer_data->device_dispatch_table.DestroyDescriptorUpdateTemplate(device, descriptorUpdateTemplate, pAllocator);
+    write_lock_guard_t lock(dispatch_lock);
+    uint64_t descriptor_update_template_id = reinterpret_cast<uint64_t &>(descriptorUpdateTemplate);
+    layer_data->desc_template_createinfo_map.erase(descriptor_update_template_id);
+    lock.unlock();
+
+    auto iter = unique_id_mapping.pop(descriptor_update_template_id);
+    if (iter != unique_id_mapping.end()) {
+        descriptorUpdateTemplate = (VkDescriptorUpdateTemplate)iter->second;
+    } else {
+        descriptorUpdateTemplate = (VkDescriptorUpdateTemplate)0;
+    }
+
+    layer_data->device_dispatch_table.DestroyDescriptorUpdateTemplate(device, descriptorUpdateTemplate, pAllocator);
+}
+
+// This is the extension version of this routine.  The core version is above.
+void DispatchDestroyDescriptorUpdateTemplateKHR(VkDevice device, VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
+                                                const VkAllocationCallbacks *pAllocator) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles)
+        return layer_data->device_dispatch_table.DestroyDescriptorUpdateTemplateKHR(device, descriptorUpdateTemplate, pAllocator);
+    write_lock_guard_t lock(dispatch_lock);
+    uint64_t descriptor_update_template_id = reinterpret_cast<uint64_t &>(descriptorUpdateTemplate);
+    layer_data->desc_template_createinfo_map.erase(descriptor_update_template_id);
+    lock.unlock();
+
+    auto iter = unique_id_mapping.pop(descriptor_update_template_id);
+    if (iter != unique_id_mapping.end()) {
+        descriptorUpdateTemplate = (VkDescriptorUpdateTemplate)iter->second;
+    } else {
+        descriptorUpdateTemplate = (VkDescriptorUpdateTemplate)0;
+    }
+
+    layer_data->device_dispatch_table.DestroyDescriptorUpdateTemplateKHR(device, descriptorUpdateTemplate, pAllocator);
+}
+
+void *BuildUnwrappedUpdateTemplateBuffer(ValidationObject *layer_data, uint64_t descriptorUpdateTemplate, const void *pData) {
+    auto const template_map_entry = layer_data->desc_template_createinfo_map.find(descriptorUpdateTemplate);
+    auto const &create_info = template_map_entry->second->create_info;
+    size_t allocation_size = 0;
+    std::vector<std::tuple<size_t, VulkanObjectType, uint64_t, size_t>> template_entries;
+
+    for (uint32_t i = 0; i < create_info.descriptorUpdateEntryCount; i++) {
+        for (uint32_t j = 0; j < create_info.pDescriptorUpdateEntries[i].descriptorCount; j++) {
+            size_t offset = create_info.pDescriptorUpdateEntries[i].offset + j * create_info.pDescriptorUpdateEntries[i].stride;
+            char *update_entry = (char *)(pData) + offset;
+
+            switch (create_info.pDescriptorUpdateEntries[i].descriptorType) {
+                case VK_DESCRIPTOR_TYPE_SAMPLER:
+                case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
+                case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
+                case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
+                case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT: {
+                    auto image_entry = reinterpret_cast<VkDescriptorImageInfo *>(update_entry);
+                    allocation_size = std::max(allocation_size, offset + sizeof(VkDescriptorImageInfo));
+
+                    VkDescriptorImageInfo *wrapped_entry = new VkDescriptorImageInfo(*image_entry);
+                    wrapped_entry->sampler = layer_data->Unwrap(image_entry->sampler);
+                    wrapped_entry->imageView = layer_data->Unwrap(image_entry->imageView);
+                    template_entries.emplace_back(offset, kVulkanObjectTypeImage, CastToUint64(wrapped_entry), 0);
+                } break;
+
+                case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
+                case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
+                case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
+                case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC: {
+                    auto buffer_entry = reinterpret_cast<VkDescriptorBufferInfo *>(update_entry);
+                    allocation_size = std::max(allocation_size, offset + sizeof(VkDescriptorBufferInfo));
+
+                    VkDescriptorBufferInfo *wrapped_entry = new VkDescriptorBufferInfo(*buffer_entry);
+                    wrapped_entry->buffer = layer_data->Unwrap(buffer_entry->buffer);
+                    template_entries.emplace_back(offset, kVulkanObjectTypeBuffer, CastToUint64(wrapped_entry), 0);
+                } break;
+
+                case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
+                case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER: {
+                    auto buffer_view_handle = reinterpret_cast<VkBufferView *>(update_entry);
+                    allocation_size = std::max(allocation_size, offset + sizeof(VkBufferView));
+
+                    VkBufferView wrapped_entry = layer_data->Unwrap(*buffer_view_handle);
+                    template_entries.emplace_back(offset, kVulkanObjectTypeBufferView, CastToUint64(wrapped_entry), 0);
+                } break;
+                case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT: {
+                    size_t numBytes = create_info.pDescriptorUpdateEntries[i].descriptorCount;
+                    allocation_size = std::max(allocation_size, offset + numBytes);
+                    // nothing to unwrap, just plain data
+                    template_entries.emplace_back(offset, kVulkanObjectTypeUnknown, CastToUint64(update_entry),
+                                                  numBytes);
+                    // to break out of the loop
+                    j = create_info.pDescriptorUpdateEntries[i].descriptorCount;
+                } break;
+                default:
+                    assert(0);
+                    break;
+            }
+        }
+    }
+    // Allocate required buffer size and populate with source/unwrapped data
+    void *unwrapped_data = malloc(allocation_size);
+    for (auto &this_entry : template_entries) {
+        VulkanObjectType type = std::get<1>(this_entry);
+        void *destination = (char *)unwrapped_data + std::get<0>(this_entry);
+        uint64_t source = std::get<2>(this_entry);
+        size_t size = std::get<3>(this_entry);
+
+        if (size != 0) {
+            assert(type == kVulkanObjectTypeUnknown);
+            memcpy(destination, CastFromUint64<void *>(source), size);
+        } else {
+            switch (type) {
+                case kVulkanObjectTypeImage:
+                    *(reinterpret_cast<VkDescriptorImageInfo *>(destination)) =
+                        *(reinterpret_cast<VkDescriptorImageInfo *>(source));
+                    delete CastFromUint64<VkDescriptorImageInfo *>(source);
+                    break;
+                case kVulkanObjectTypeBuffer:
+                    *(reinterpret_cast<VkDescriptorBufferInfo *>(destination)) =
+                        *(CastFromUint64<VkDescriptorBufferInfo *>(source));
+                    delete CastFromUint64<VkDescriptorBufferInfo *>(source);
+                    break;
+                case kVulkanObjectTypeBufferView:
+                    *(reinterpret_cast<VkBufferView *>(destination)) = CastFromUint64<VkBufferView>(source);
+                    break;
+                default:
+                    assert(0);
+                    break;
+            }
+        }
+    }
+    return (void *)unwrapped_data;
+}
+
+void DispatchUpdateDescriptorSetWithTemplate(VkDevice device, VkDescriptorSet descriptorSet,
+                                             VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate, const void *pData) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles)
+        return layer_data->device_dispatch_table.UpdateDescriptorSetWithTemplate(device, descriptorSet, descriptorUpdateTemplate,
+                                                                                 pData);
+    uint64_t template_handle = reinterpret_cast<uint64_t &>(descriptorUpdateTemplate);
+    void *unwrapped_buffer = nullptr;
+    {
+        read_lock_guard_t lock(dispatch_lock);
+        descriptorSet = layer_data->Unwrap(descriptorSet);
+        descriptorUpdateTemplate = (VkDescriptorUpdateTemplate)layer_data->Unwrap(descriptorUpdateTemplate);
+        unwrapped_buffer = BuildUnwrappedUpdateTemplateBuffer(layer_data, template_handle, pData);
+    }
+    layer_data->device_dispatch_table.UpdateDescriptorSetWithTemplate(device, descriptorSet, descriptorUpdateTemplate, unwrapped_buffer);
+    free(unwrapped_buffer);
+}
+
+void DispatchUpdateDescriptorSetWithTemplateKHR(VkDevice device, VkDescriptorSet descriptorSet,
+                                                VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate, const void *pData) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles)
+        return layer_data->device_dispatch_table.UpdateDescriptorSetWithTemplateKHR(device, descriptorSet, descriptorUpdateTemplate,
+                                                                                    pData);
+    uint64_t template_handle = reinterpret_cast<uint64_t &>(descriptorUpdateTemplate);
+    void *unwrapped_buffer = nullptr;
+    {
+        read_lock_guard_t lock(dispatch_lock);
+        descriptorSet = layer_data->Unwrap(descriptorSet);
+        descriptorUpdateTemplate = layer_data->Unwrap(descriptorUpdateTemplate);
+        unwrapped_buffer = BuildUnwrappedUpdateTemplateBuffer(layer_data, template_handle, pData);
+    }
+    layer_data->device_dispatch_table.UpdateDescriptorSetWithTemplateKHR(device, descriptorSet, descriptorUpdateTemplate, unwrapped_buffer);
+    free(unwrapped_buffer);
+}
+
+void DispatchCmdPushDescriptorSetWithTemplateKHR(VkCommandBuffer commandBuffer,
+                                                 VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate, VkPipelineLayout layout,
+                                                 uint32_t set, const void *pData) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
+    if (!wrap_handles)
+        return layer_data->device_dispatch_table.CmdPushDescriptorSetWithTemplateKHR(commandBuffer, descriptorUpdateTemplate,
+                                                                                     layout, set, pData);
+    uint64_t template_handle = reinterpret_cast<uint64_t &>(descriptorUpdateTemplate);
+    void *unwrapped_buffer = nullptr;
+    {
+        read_lock_guard_t lock(dispatch_lock);
+        descriptorUpdateTemplate = layer_data->Unwrap(descriptorUpdateTemplate);
+        layout = layer_data->Unwrap(layout);
+        unwrapped_buffer = BuildUnwrappedUpdateTemplateBuffer(layer_data, template_handle, pData);
+    }
+    layer_data->device_dispatch_table.CmdPushDescriptorSetWithTemplateKHR(commandBuffer, descriptorUpdateTemplate, layout, set,
+                                                                 unwrapped_buffer);
+    free(unwrapped_buffer);
+}
+
+VkResult DispatchGetPhysicalDeviceDisplayPropertiesKHR(VkPhysicalDevice physicalDevice, uint32_t *pPropertyCount,
+                                                       VkDisplayPropertiesKHR *pProperties) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    VkResult result =
+        layer_data->instance_dispatch_table.GetPhysicalDeviceDisplayPropertiesKHR(physicalDevice, pPropertyCount, pProperties);
+    if (!wrap_handles) return result;
+    if ((result == VK_SUCCESS || result == VK_INCOMPLETE) && pProperties) {
+        for (uint32_t idx0 = 0; idx0 < *pPropertyCount; ++idx0) {
+            pProperties[idx0].display = layer_data->MaybeWrapDisplay(pProperties[idx0].display, layer_data);
+        }
+    }
+    return result;
+}
+
+VkResult DispatchGetPhysicalDeviceDisplayProperties2KHR(VkPhysicalDevice physicalDevice, uint32_t *pPropertyCount,
+                                                        VkDisplayProperties2KHR *pProperties) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    VkResult result =
+        layer_data->instance_dispatch_table.GetPhysicalDeviceDisplayProperties2KHR(physicalDevice, pPropertyCount, pProperties);
+    if (!wrap_handles) return result;
+    if ((result == VK_SUCCESS || result == VK_INCOMPLETE) && pProperties) {
+        for (uint32_t idx0 = 0; idx0 < *pPropertyCount; ++idx0) {
+            pProperties[idx0].displayProperties.display =
+                layer_data->MaybeWrapDisplay(pProperties[idx0].displayProperties.display, layer_data);
+        }
+    }
+    return result;
+}
+
+VkResult DispatchGetPhysicalDeviceDisplayPlanePropertiesKHR(VkPhysicalDevice physicalDevice, uint32_t *pPropertyCount,
+                                                            VkDisplayPlanePropertiesKHR *pProperties) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    VkResult result =
+        layer_data->instance_dispatch_table.GetPhysicalDeviceDisplayPlanePropertiesKHR(physicalDevice, pPropertyCount, pProperties);
+    if (!wrap_handles) return result;
+    if ((result == VK_SUCCESS || result == VK_INCOMPLETE) && pProperties) {
+        for (uint32_t idx0 = 0; idx0 < *pPropertyCount; ++idx0) {
+            VkDisplayKHR &opt_display = pProperties[idx0].currentDisplay;
+            if (opt_display) opt_display = layer_data->MaybeWrapDisplay(opt_display, layer_data);
+        }
+    }
+    return result;
+}
+
+VkResult DispatchGetPhysicalDeviceDisplayPlaneProperties2KHR(VkPhysicalDevice physicalDevice, uint32_t *pPropertyCount,
+                                                             VkDisplayPlaneProperties2KHR *pProperties) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    VkResult result = layer_data->instance_dispatch_table.GetPhysicalDeviceDisplayPlaneProperties2KHR(physicalDevice,
+                                                                                                      pPropertyCount, pProperties);
+    if (!wrap_handles) return result;
+    if ((result == VK_SUCCESS || result == VK_INCOMPLETE) && pProperties) {
+        for (uint32_t idx0 = 0; idx0 < *pPropertyCount; ++idx0) {
+            VkDisplayKHR &opt_display = pProperties[idx0].displayPlaneProperties.currentDisplay;
+            if (opt_display) opt_display = layer_data->MaybeWrapDisplay(opt_display, layer_data);
+        }
+    }
+    return result;
+}
+
+VkResult DispatchGetDisplayPlaneSupportedDisplaysKHR(VkPhysicalDevice physicalDevice, uint32_t planeIndex, uint32_t *pDisplayCount,
+                                                     VkDisplayKHR *pDisplays) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    VkResult result = layer_data->instance_dispatch_table.GetDisplayPlaneSupportedDisplaysKHR(physicalDevice, planeIndex,
+                                                                                              pDisplayCount, pDisplays);
+    if ((result == VK_SUCCESS || result == VK_INCOMPLETE) && pDisplays) {
+    if (!wrap_handles) return result;
+        for (uint32_t i = 0; i < *pDisplayCount; ++i) {
+            if (pDisplays[i]) pDisplays[i] = layer_data->MaybeWrapDisplay(pDisplays[i], layer_data);
+        }
+    }
+    return result;
+}
+
+VkResult DispatchGetDisplayModePropertiesKHR(VkPhysicalDevice physicalDevice, VkDisplayKHR display, uint32_t *pPropertyCount,
+                                             VkDisplayModePropertiesKHR *pProperties) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    if (!wrap_handles)
+        return layer_data->instance_dispatch_table.GetDisplayModePropertiesKHR(physicalDevice, display, pPropertyCount,
+                                                                               pProperties);
+    {
+        display = layer_data->Unwrap(display);
+    }
+
+    VkResult result = layer_data->instance_dispatch_table.GetDisplayModePropertiesKHR(physicalDevice, display, pPropertyCount, pProperties);
+    if ((result == VK_SUCCESS || result == VK_INCOMPLETE) && pProperties) {
+        for (uint32_t idx0 = 0; idx0 < *pPropertyCount; ++idx0) {
+            pProperties[idx0].displayMode = layer_data->WrapNew(pProperties[idx0].displayMode);
+        }
+    }
+    return result;
+}
+
+VkResult DispatchGetDisplayModeProperties2KHR(VkPhysicalDevice physicalDevice, VkDisplayKHR display, uint32_t *pPropertyCount,
+                                              VkDisplayModeProperties2KHR *pProperties) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    if (!wrap_handles)
+        return layer_data->instance_dispatch_table.GetDisplayModeProperties2KHR(physicalDevice, display, pPropertyCount,
+                                                                                pProperties);
+    {
+        display = layer_data->Unwrap(display);
+    }
+
+    VkResult result =
+        layer_data->instance_dispatch_table.GetDisplayModeProperties2KHR(physicalDevice, display, pPropertyCount, pProperties);
+    if ((result == VK_SUCCESS || result == VK_INCOMPLETE) && pProperties) {
+        for (uint32_t idx0 = 0; idx0 < *pPropertyCount; ++idx0) {
+            pProperties[idx0].displayModeProperties.displayMode = layer_data->WrapNew(pProperties[idx0].displayModeProperties.displayMode);
+        }
+    }
+    return result;
+}
+
+VkResult DispatchDebugMarkerSetObjectTagEXT(VkDevice device, const VkDebugMarkerObjectTagInfoEXT *pTagInfo) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.DebugMarkerSetObjectTagEXT(device, pTagInfo);
+    safe_VkDebugMarkerObjectTagInfoEXT local_tag_info(pTagInfo);
+    {
+        auto it = unique_id_mapping.find(reinterpret_cast<uint64_t &>(local_tag_info.object));
+        if (it != unique_id_mapping.end()) {
+            local_tag_info.object = it->second;
+        }
+    }
+    VkResult result = layer_data->device_dispatch_table.DebugMarkerSetObjectTagEXT(device, 
+                                                                                   reinterpret_cast<VkDebugMarkerObjectTagInfoEXT *>(&local_tag_info));
+    return result;
+}
+
+VkResult DispatchDebugMarkerSetObjectNameEXT(VkDevice device, const VkDebugMarkerObjectNameInfoEXT *pNameInfo) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.DebugMarkerSetObjectNameEXT(device, pNameInfo);
+    safe_VkDebugMarkerObjectNameInfoEXT local_name_info(pNameInfo);
+    {
+        auto it = unique_id_mapping.find(reinterpret_cast<uint64_t &>(local_name_info.object));
+        if (it != unique_id_mapping.end()) {
+            local_name_info.object = it->second;
+        }
+    }
+    VkResult result = layer_data->device_dispatch_table.DebugMarkerSetObjectNameEXT(
+        device, reinterpret_cast<VkDebugMarkerObjectNameInfoEXT *>(&local_name_info));
+    return result;
+}
+
+// VK_EXT_debug_utils
+VkResult DispatchSetDebugUtilsObjectTagEXT(VkDevice device, const VkDebugUtilsObjectTagInfoEXT *pTagInfo) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.SetDebugUtilsObjectTagEXT(device, pTagInfo);
+    safe_VkDebugUtilsObjectTagInfoEXT local_tag_info(pTagInfo);
+    {
+        auto it = unique_id_mapping.find(reinterpret_cast<uint64_t &>(local_tag_info.objectHandle));
+        if (it != unique_id_mapping.end()) {
+            local_tag_info.objectHandle = it->second;
+        }
+    }
+    VkResult result = layer_data->device_dispatch_table.SetDebugUtilsObjectTagEXT(
+        device, reinterpret_cast<const VkDebugUtilsObjectTagInfoEXT *>(&local_tag_info));
+    return result;
+}
+
+VkResult DispatchSetDebugUtilsObjectNameEXT(VkDevice device, const VkDebugUtilsObjectNameInfoEXT *pNameInfo) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!wrap_handles) return layer_data->device_dispatch_table.SetDebugUtilsObjectNameEXT(device, pNameInfo);
+    safe_VkDebugUtilsObjectNameInfoEXT local_name_info(pNameInfo);
+    {
+        auto it = unique_id_mapping.find(reinterpret_cast<uint64_t &>(local_name_info.objectHandle));
+        if (it != unique_id_mapping.end()) {
+            local_name_info.objectHandle = it->second;
+        }
+    }
+    VkResult result = layer_data->device_dispatch_table.SetDebugUtilsObjectNameEXT(
+        device, reinterpret_cast<const VkDebugUtilsObjectNameInfoEXT *>(&local_name_info));
+    return result;
+}
+
+"""
+    # Separate generated text for source and headers
+    ALL_SECTIONS = ['source_file', 'header_file']
+    def __init__(self,
+                 errFile = sys.stderr,
+                 warnFile = sys.stderr,
+                 diagFile = sys.stdout):
+        OutputGenerator.__init__(self, errFile, warnFile, diagFile)
+        self.INDENT_SPACES = 4
+        self.instance_extensions = []
+        self.device_extensions = []
+        # Commands which are not autogenerated but still intercepted
+        self.no_autogen_list = [
+            'vkCreateInstance',
+            'vkDestroyInstance',
+            'vkCreateDevice',
+            'vkDestroyDevice',
+            'vkCreateSwapchainKHR',
+            'vkCreateSharedSwapchainsKHR',
+            'vkGetSwapchainImagesKHR',
+            'vkDestroySwapchainKHR',
+            'vkQueuePresentKHR',
+            'vkCreateGraphicsPipelines',
+            'vkResetDescriptorPool',
+            'vkDestroyDescriptorPool',
+            'vkAllocateDescriptorSets',
+            'vkFreeDescriptorSets',
+            'vkCreateDescriptorUpdateTemplate',
+            'vkCreateDescriptorUpdateTemplateKHR',
+            'vkDestroyDescriptorUpdateTemplate',
+            'vkDestroyDescriptorUpdateTemplateKHR',
+            'vkUpdateDescriptorSetWithTemplate',
+            'vkUpdateDescriptorSetWithTemplateKHR',
+            'vkCmdPushDescriptorSetWithTemplateKHR',
+            'vkDebugMarkerSetObjectTagEXT',
+            'vkDebugMarkerSetObjectNameEXT',
+            'vkCreateRenderPass',
+            'vkCreateRenderPass2KHR',
+            'vkDestroyRenderPass',
+            'vkSetDebugUtilsObjectNameEXT',
+            'vkSetDebugUtilsObjectTagEXT',
+            'vkGetPhysicalDeviceDisplayPropertiesKHR',
+            'vkGetPhysicalDeviceDisplayProperties2KHR',
+            'vkGetPhysicalDeviceDisplayPlanePropertiesKHR',
+            'vkGetPhysicalDeviceDisplayPlaneProperties2KHR',
+            'vkGetDisplayPlaneSupportedDisplaysKHR',
+            'vkGetDisplayModePropertiesKHR',
+            'vkGetDisplayModeProperties2KHR',
+            'vkEnumerateInstanceExtensionProperties',
+            'vkEnumerateInstanceLayerProperties',
+            'vkEnumerateDeviceExtensionProperties',
+            'vkEnumerateDeviceLayerProperties',
+            'vkEnumerateInstanceVersion',
+            ]
+        self.headerVersion = None
+        # Internal state - accumulators for different inner block text
+        self.sections = dict([(section, []) for section in self.ALL_SECTIONS])
+
+        self.cmdMembers = []
+        self.cmd_feature_protect = []  # Save ifdef's for each command
+        self.cmd_info_data = []        # Save the cmdinfo data for wrapping the handles when processing is complete
+        self.structMembers = []        # List of StructMemberData records for all Vulkan structs
+        self.extension_structs = []    # List of all structs or sister-structs containing handles
+                                       # A sister-struct may contain no handles but shares a structextends attribute with one that does
+        self.pnext_extension_structs = []    # List of all structs which can be extended by a pnext chain
+        self.structTypes = dict()      # Map of Vulkan struct typename to required VkStructureType
+        self.struct_member_dict = dict()
+        # Named tuples to store struct and command data
+        self.StructType = namedtuple('StructType', ['name', 'value'])
+        self.CmdMemberData = namedtuple('CmdMemberData', ['name', 'members'])
+        self.CmdInfoData = namedtuple('CmdInfoData', ['name', 'cmdinfo'])
+        self.CmdExtraProtect = namedtuple('CmdExtraProtect', ['name', 'extra_protect'])
+
+        self.CommandParam = namedtuple('CommandParam', ['type', 'name', 'ispointer', 'isconst', 'iscount', 'len', 'extstructs', 'cdecl', 'islocal', 'iscreate', 'isdestroy', 'feature_protect'])
+        self.StructMemberData = namedtuple('StructMemberData', ['name', 'members'])
+    #
+    def incIndent(self, indent):
+        inc = ' ' * self.INDENT_SPACES
+        if indent:
+            return indent + inc
+        return inc
+    #
+    def decIndent(self, indent):
+        if indent and (len(indent) > self.INDENT_SPACES):
+            return indent[:-self.INDENT_SPACES]
+        return ''
+    #
+    # Override makeProtoName to drop the "vk" prefix
+    def makeProtoName(self, name, tail):
+        return self.genOpts.apientry + name[2:] + tail
+    #
+    # Check if the parameter passed in is a pointer to an array
+    def paramIsArray(self, param):
+        return param.attrib.get('len') is not None
+    #
+    def beginFile(self, genOpts):
+        OutputGenerator.beginFile(self, genOpts)
+        # Initialize members that require the tree
+        self.handle_types = GetHandleTypes(self.registry.tree)
+        self.type_categories = GetTypeCategories(self.registry.tree)
+        # Output Copyright
+        self.appendSection('header_file', self.inline_copyright_message)
+        # Multiple inclusion protection & C++ namespace.
+        self.header = False
+        if (self.genOpts.filename and 'h' == self.genOpts.filename[-1]):
+            self.header = True
+            self.appendSection('header_file', '#pragma once')
+            self.appendSection('header_file', '')
+            self.appendSection('header_file', '#if defined(LAYER_CHASSIS_CAN_WRAP_HANDLES)')
+            self.appendSection('header_file', 'extern bool wrap_handles;')
+            self.appendSection('header_file', '#else')
+            self.appendSection('header_file', 'extern bool wrap_handles;')
+            self.appendSection('header_file', '#endif')
+
+    # Now that the data is all collected and complete, generate and output the wrapping/unwrapping routines
+    def endFile(self):
+        self.struct_member_dict = dict(self.structMembers)
+        # Generate the list of APIs that might need to handle wrapped extension structs
+        self.GenerateCommandWrapExtensionList()
+        # Write out wrapping/unwrapping functions
+        self.WrapCommands()
+        # Build and write out pNext processing function
+        extension_proc = self.build_extension_processing_func()
+
+        if not self.header:
+            write(self.inline_copyright_message, file=self.outFile)
+            self.newline()
+            write('#include <mutex>', file=self.outFile)
+            write('#include "chassis.h"', file=self.outFile)
+            write('#include "layer_chassis_dispatch.h"', file=self.outFile)
+            write('#include "vk_layer_utils.h"', file=self.outFile)
+            self.newline()
+            write('// This intentionally includes a cpp file', file=self.outFile)
+            write('#include "vk_safe_struct.cpp"', file=self.outFile)
+            self.newline()
+            write('ReadWriteLock dispatch_lock;', file=self.outFile)
+            self.newline()
+            write('// Unique Objects pNext extension handling function', file=self.outFile)
+            write('%s' % extension_proc, file=self.outFile)
+            self.newline()
+            write('// Manually written Dispatch routines', file=self.outFile)
+            write('%s' % self.inline_custom_source_preamble, file=self.outFile)
+            self.newline()
+            if (self.sections['source_file']):
+                write('\n'.join(self.sections['source_file']), end=u'', file=self.outFile)
+        else:
+            self.newline()
+            if (self.sections['header_file']):
+                write('\n'.join(self.sections['header_file']), end=u'', file=self.outFile)
+
+        # Finish processing in superclass
+        OutputGenerator.endFile(self)
+    #
+    def beginFeature(self, interface, emit):
+        # Start processing in superclass
+        OutputGenerator.beginFeature(self, interface, emit)
+        self.headerVersion = None
+        self.featureExtraProtect = GetFeatureProtect(interface)
+        if self.featureName != 'VK_VERSION_1_0' and self.featureName != 'VK_VERSION_1_1':
+            white_list_entry = []
+            if (self.featureExtraProtect is not None):
+                white_list_entry += [ '#ifdef %s' % self.featureExtraProtect ]
+            white_list_entry += [ '"%s"' % self.featureName ]
+            if (self.featureExtraProtect is not None):
+                white_list_entry += [ '#endif' ]
+            featureType = interface.get('type')
+            if featureType == 'instance':
+                self.instance_extensions += white_list_entry
+            elif featureType == 'device':
+                self.device_extensions += white_list_entry
+    #
+    def endFeature(self):
+        # Finish processing in superclass
+        OutputGenerator.endFeature(self)
+    #
+    def genType(self, typeinfo, name, alias):
+        OutputGenerator.genType(self, typeinfo, name, alias)
+        typeElem = typeinfo.elem
+        # If the type is a struct type, traverse the imbedded <member> tags generating a structure.
+        # Otherwise, emit the tag text.
+        category = typeElem.get('category')
+        if (category == 'struct' or category == 'union'):
+            self.genStruct(typeinfo, name, alias)
+    #
+    # Append a definition to the specified section
+    def appendSection(self, section, text):
+        # self.sections[section].append('SECTION: ' + section + '\n')
+        self.sections[section].append(text)
+    #
+    # Check if the parameter passed in is a pointer
+    def paramIsPointer(self, param):
+        ispointer = False
+        for elem in param:
+            if elem.tag == 'type' and elem.tail is not None and '*' in elem.tail:
+                ispointer = True
+        return ispointer
+    #
+    # Retrieve the type and name for a parameter
+    def getTypeNameTuple(self, param):
+        type = ''
+        name = ''
+        for elem in param:
+            if elem.tag == 'type':
+                type = noneStr(elem.text)
+            elif elem.tag == 'name':
+                name = noneStr(elem.text)
+        return (type, name)
+    #
+    # Retrieve the value of the len tag
+    def getLen(self, param):
+        result = None
+        len = param.attrib.get('len')
+        if len and len != 'null-terminated':
+            # For string arrays, 'len' can look like 'count,null-terminated', indicating that we
+            # have a null terminated array of strings.  We strip the null-terminated from the
+            # 'len' field and only return the parameter specifying the string count
+            if 'null-terminated' in len:
+                result = len.split(',')[0]
+            else:
+                result = len
+            # Spec has now notation for len attributes, using :: instead of platform specific pointer symbol
+            result = str(result).replace('::', '->')
+        return result
+    #
+    # Generate a VkStructureType based on a structure typename
+    def genVkStructureType(self, typename):
+        # Add underscore between lowercase then uppercase
+        value = re.sub('([a-z0-9])([A-Z])', r'\1_\2', typename)
+        # Change to uppercase
+        value = value.upper()
+        # Add STRUCTURE_TYPE_
+        return re.sub('VK_', 'VK_STRUCTURE_TYPE_', value)
+    #
+    # Struct parameter check generation.
+    # This is a special case of the <type> tag where the contents are interpreted as a set of
+    # <member> tags instead of freeform C type declarations. The <member> tags are just like
+    # <param> tags - they are a declaration of a struct or union member. Only simple member
+    # declarations are supported (no nested structs etc.)
+    def genStruct(self, typeinfo, typeName, alias):
+        OutputGenerator.genStruct(self, typeinfo, typeName, alias)
+        members = typeinfo.elem.findall('.//member')
+        # Iterate over members once to get length parameters for arrays
+        lens = set()
+        for member in members:
+            len = self.getLen(member)
+            if len:
+                lens.add(len)
+        # Generate member info
+        membersInfo = []
+        for member in members:
+            # Get the member's type and name
+            info = self.getTypeNameTuple(member)
+            type = info[0]
+            name = info[1]
+            cdecl = self.makeCParamDecl(member, 0)
+            # Process VkStructureType
+            if type == 'VkStructureType':
+                # Extract the required struct type value from the comments
+                # embedded in the original text defining the 'typeinfo' element
+                rawXml = etree.tostring(typeinfo.elem).decode('ascii')
+                result = re.search(r'VK_STRUCTURE_TYPE_\w+', rawXml)
+                if result:
+                    value = result.group(0)
+                else:
+                    value = self.genVkStructureType(typeName)
+                # Store the required type value
+                self.structTypes[typeName] = self.StructType(name=name, value=value)
+            # Store pointer/array/string info
+            extstructs = self.registry.validextensionstructs[typeName] if name == 'pNext' else None
+            membersInfo.append(self.CommandParam(type=type,
+                                                 name=name,
+                                                 ispointer=self.paramIsPointer(member),
+                                                 isconst=True if 'const' in cdecl else False,
+                                                 iscount=True if name in lens else False,
+                                                 len=self.getLen(member),
+                                                 extstructs=extstructs,
+                                                 cdecl=cdecl,
+                                                 islocal=False,
+                                                 iscreate=False,
+                                                 isdestroy=False,
+                                                 feature_protect=self.featureExtraProtect))
+        self.structMembers.append(self.StructMemberData(name=typeName, members=membersInfo))
+
+    #
+    # Determine if a struct has an NDO as a member or an embedded member
+    def struct_contains_ndo(self, struct_item):
+        struct_member_dict = dict(self.structMembers)
+        struct_members = struct_member_dict[struct_item]
+
+        for member in struct_members:
+            if self.handle_types.IsNonDispatchable(member.type):
+                return True
+            elif member.type in struct_member_dict:
+                if self.struct_contains_ndo(member.type) == True:
+                    return True
+        return False
+    #
+    # Return list of struct members which contain, or which sub-structures contain
+    # an NDO in a given list of parameters or members
+    def getParmeterStructsWithNdos(self, item_list):
+        struct_list = set()
+        for item in item_list:
+            paramtype = item.find('type')
+            typecategory = self.type_categories[paramtype.text]
+            if typecategory == 'struct':
+                if self.struct_contains_ndo(paramtype.text) == True:
+                    struct_list.add(item)
+        return struct_list
+    #
+    # Return list of non-dispatchable objects from a given list of parameters or members
+    def getNdosInParameterList(self, item_list, create_func):
+        ndo_list = set()
+        if create_func == True:
+            member_list = item_list[0:-1]
+        else:
+            member_list = item_list
+        for item in member_list:
+            if self.handle_types.IsNonDispatchable(paramtype.text):
+                ndo_list.add(item)
+        return ndo_list
+    #
+    # Construct list of extension structs containing handles, or extension structs that share a structextends attribute
+    # WITH an extension struct containing handles. All extension structs in any pNext chain will have to be copied.
+    # TODO: make this recursive -- structs buried three or more levels deep are not searched for extensions
+    def GenerateCommandWrapExtensionList(self):
+        for struct in self.structMembers:
+            if (len(struct.members) > 1) and struct.members[1].extstructs is not None:
+                found = False;
+                for item in struct.members[1].extstructs:
+                    if item != '' and item not in self.pnext_extension_structs:
+                        self.pnext_extension_structs.append(item)
+                    if item != '' and self.struct_contains_ndo(item) == True:
+                        found = True
+                if found == True:
+                    for item in struct.members[1].extstructs:
+                        if item != '' and item not in self.extension_structs:
+                            self.extension_structs.append(item)
+    #
+    # Returns True if a struct may have a pNext chain containing an NDO
+    def StructWithExtensions(self, struct_type):
+        if struct_type in self.struct_member_dict:
+            param_info = self.struct_member_dict[struct_type]
+            if (len(param_info) > 1) and param_info[1].extstructs is not None:
+                for item in param_info[1].extstructs:
+                    if item in self.extension_structs:
+                        return True
+        return False
+    #
+    # Generate pNext handling function
+    def build_extension_processing_func(self):
+        # Construct helper functions to build and free pNext extension chains
+        pnext_proc = ''
+        pnext_proc += 'void WrapPnextChainHandles(ValidationObject *layer_data, const void *pNext) {\n'
+        pnext_proc += '    void *cur_pnext = const_cast<void *>(pNext);\n'
+        pnext_proc += '    while (cur_pnext != NULL) {\n'
+        pnext_proc += '        VkBaseOutStructure *header = reinterpret_cast<VkBaseOutStructure *>(cur_pnext);\n\n'
+        pnext_proc += '        switch (header->sType) {\n'
+        for item in self.pnext_extension_structs:
+            struct_info = self.struct_member_dict[item]
+            indent = '                '
+            (tmp_decl, tmp_pre, tmp_post) = self.uniquify_members(struct_info, indent, 'safe_struct->', 0, False, False, False, False)
+            # Only process extension structs containing handles
+            if not tmp_pre:
+                continue
+            if struct_info[0].feature_protect is not None:
+                pnext_proc += '#ifdef %s \n' % struct_info[0].feature_protect
+            pnext_proc += '            case %s: {\n' % self.structTypes[item].value
+            pnext_proc += '                    safe_%s *safe_struct = reinterpret_cast<safe_%s *>(cur_pnext);\n' % (item, item)
+            # Generate code to unwrap the handles
+            pnext_proc += tmp_pre
+            pnext_proc += '                } break;\n'
+            if struct_info[0].feature_protect is not None:
+                pnext_proc += '#endif // %s \n' % struct_info[0].feature_protect
+            pnext_proc += '\n'
+        pnext_proc += '            default:\n'
+        pnext_proc += '                break;\n'
+        pnext_proc += '        }\n\n'
+        pnext_proc += '        // Process the next structure in the chain\n'
+        pnext_proc += '        cur_pnext = header->pNext;\n'
+        pnext_proc += '    }\n'
+        pnext_proc += '}\n'
+        return pnext_proc
+
+    #
+    # Generate source for creating a non-dispatchable object
+    def generate_create_ndo_code(self, indent, proto, params, cmd_info):
+        create_ndo_code = ''
+        handle_type = params[-1].find('type')
+        if self.handle_types.IsNonDispatchable(handle_type.text):
+            # Check for special case where multiple handles are returned
+            ndo_array = False
+            if cmd_info[-1].len is not None:
+                ndo_array = True;
+            handle_name = params[-1].find('name')
+            # Special case return value handling for the createpipeline APIs
+            is_create_pipelines = ('CreateGraphicsPipelines' in proto.text) or ('CreateComputePipelines' in proto.text) or ('CreateRayTracingPipelines' in proto.text)
+            if is_create_pipelines:
+                create_ndo_code += '%s{\n' % (indent)
+            else:
+                create_ndo_code += '%sif (VK_SUCCESS == result) {\n' % (indent)
+            indent = self.incIndent(indent)
+            ndo_dest = '*%s' % handle_name.text
+            if ndo_array == True:
+                create_ndo_code += '%sfor (uint32_t index0 = 0; index0 < %s; index0++) {\n' % (indent, cmd_info[-1].len)
+                indent = self.incIndent(indent)
+                ndo_dest = '%s[index0]' % cmd_info[-1].name
+                if is_create_pipelines:
+                    create_ndo_code += '%sif (%s != VK_NULL_HANDLE) {\n' % (indent, ndo_dest)
+                    indent = self.incIndent(indent)
+            create_ndo_code += '%s%s = layer_data->WrapNew(%s);\n' % (indent, ndo_dest, ndo_dest)
+            if ndo_array == True:
+                if is_create_pipelines:
+                    indent = self.decIndent(indent)
+                    create_ndo_code += '%s}\n' % indent
+                indent = self.decIndent(indent)
+                create_ndo_code += '%s}\n' % indent
+            indent = self.decIndent(indent)
+            create_ndo_code += '%s}\n' % (indent)
+        return create_ndo_code
+    #
+    # Generate source for destroying a non-dispatchable object
+    def generate_destroy_ndo_code(self, indent, proto, cmd_info):
+        destroy_ndo_code = ''
+        ndo_array = False
+        if True in [destroy_txt in proto.text for destroy_txt in ['Destroy', 'Free']]:
+            # Check for special case where multiple handles are returned
+            if cmd_info[-1].len is not None:
+                ndo_array = True;
+                param = -1
+            else:
+                param = -2
+            if self.handle_types.IsNonDispatchable(cmd_info[param].type):
+                if ndo_array == True:
+                    # This API is freeing an array of handles.  Remove them from the unique_id map.
+                    destroy_ndo_code += '%sif ((VK_SUCCESS == result) && (%s)) {\n' % (indent, cmd_info[param].name)
+                    indent = self.incIndent(indent)
+                    destroy_ndo_code += '%sfor (uint32_t index0 = 0; index0 < %s; index0++) {\n' % (indent, cmd_info[param].len)
+                    indent = self.incIndent(indent)
+                    destroy_ndo_code += '%s%s handle = %s[index0];\n' % (indent, cmd_info[param].type, cmd_info[param].name)
+                    destroy_ndo_code += '%suint64_t unique_id = reinterpret_cast<uint64_t &>(handle);\n' % (indent)
+                    destroy_ndo_code += '%sunique_id_mapping.erase(unique_id);\n' % (indent)
+                    indent = self.decIndent(indent);
+                    destroy_ndo_code += '%s}\n' % indent
+                    indent = self.decIndent(indent);
+                    destroy_ndo_code += '%s}\n' % indent
+                else:
+                    # Remove a single handle from the map
+                    destroy_ndo_code += '%suint64_t %s_id = reinterpret_cast<uint64_t &>(%s);\n' % (indent, cmd_info[param].name, cmd_info[param].name)
+                    destroy_ndo_code += '%sauto iter = unique_id_mapping.pop(%s_id);\n' % (indent, cmd_info[param].name)
+                    destroy_ndo_code += '%sif (iter != unique_id_mapping.end()) {\n' % (indent)
+                    indent = self.incIndent(indent)
+                    destroy_ndo_code += '%s%s = (%s)iter->second;\n' % (indent, cmd_info[param].name, cmd_info[param].type)
+                    indent = self.decIndent(indent);
+                    destroy_ndo_code += '%s} else {\n' % (indent)
+                    indent = self.incIndent(indent)
+                    destroy_ndo_code += '%s%s = (%s)0;\n' % (indent, cmd_info[param].name, cmd_info[param].type)
+                    indent = self.decIndent(indent);
+                    destroy_ndo_code += '%s}\n' % (indent)
+
+        return ndo_array, destroy_ndo_code
+
+    #
+    # Clean up local declarations
+    def cleanUpLocalDeclarations(self, indent, prefix, name, len, index):
+        cleanup = ''
+        if len is not None:
+            cleanup = '%sif (local_%s%s) {\n' % (indent, prefix, name)
+            cleanup += '%s    delete[] local_%s%s;\n' % (indent, prefix, name)
+            cleanup += "%s}\n" % (indent)
+        return cleanup
+    #
+    # Output UO code for a single NDO (ndo_count is NULL) or a counted list of NDOs
+    def outputNDOs(self, ndo_type, ndo_name, ndo_count, prefix, index, indent, destroy_func, destroy_array, top_level):
+        decl_code = ''
+        pre_call_code = ''
+        post_call_code = ''
+        if ndo_count is not None:
+            if top_level == True:
+                decl_code += '%s%s var_local_%s%s[DISPATCH_MAX_STACK_ALLOCATIONS];\n' % (indent, ndo_type, prefix, ndo_name)
+                decl_code += '%s%s *local_%s%s = NULL;\n' % (indent, ndo_type, prefix, ndo_name)
+            pre_call_code += '%s    if (%s%s) {\n' % (indent, prefix, ndo_name)
+            indent = self.incIndent(indent)
+            if top_level == True:
+                pre_call_code += '%s    local_%s%s = %s > DISPATCH_MAX_STACK_ALLOCATIONS ? new %s[%s] : var_local_%s%s;\n' % (indent, prefix, ndo_name, ndo_count, ndo_type, ndo_count, prefix, ndo_name)
+                pre_call_code += '%s    for (uint32_t %s = 0; %s < %s; ++%s) {\n' % (indent, index, index, ndo_count, index)
+                indent = self.incIndent(indent)
+                pre_call_code += '%s    local_%s%s[%s] = layer_data->Unwrap(%s[%s]);\n' % (indent, prefix, ndo_name, index, ndo_name, index)
+            else:
+                pre_call_code += '%s    for (uint32_t %s = 0; %s < %s; ++%s) {\n' % (indent, index, index, ndo_count, index)
+                indent = self.incIndent(indent)
+                pre_call_code += '%s    %s%s[%s] = layer_data->Unwrap(%s%s[%s]);\n' % (indent, prefix, ndo_name, index, prefix, ndo_name, index)
+            indent = self.decIndent(indent)
+            pre_call_code += '%s    }\n' % indent
+            indent = self.decIndent(indent)
+            pre_call_code += '%s    }\n' % indent
+            if top_level == True:
+                post_call_code += '%sif (local_%s%s != var_local_%s%s)\n' % (indent, prefix, ndo_name, prefix, ndo_name)
+                indent = self.incIndent(indent)
+                post_call_code += '%sdelete[] local_%s;\n' % (indent, ndo_name)
+        else:
+            if top_level == True:
+                if (destroy_func == False) or (destroy_array == True):
+                    pre_call_code += '%s    %s = layer_data->Unwrap(%s);\n' % (indent, ndo_name, ndo_name)
+            else:
+                # Make temp copy of this var with the 'local' removed. It may be better to not pass in 'local_'
+                # as part of the string and explicitly print it
+                fix = str(prefix).strip('local_');
+                pre_call_code += '%s    if (%s%s) {\n' % (indent, fix, ndo_name)
+                indent = self.incIndent(indent)
+                pre_call_code += '%s    %s%s = layer_data->Unwrap(%s%s);\n' % (indent, prefix, ndo_name, fix, ndo_name)
+                indent = self.decIndent(indent)
+                pre_call_code += '%s    }\n' % indent
+        return decl_code, pre_call_code, post_call_code
+    #
+    # first_level_param indicates if elements are passed directly into the function else they're below a ptr/struct
+    # create_func means that this is API creates or allocates NDOs
+    # destroy_func indicates that this API destroys or frees NDOs
+    # destroy_array means that the destroy_func operated on an array of NDOs
+    def uniquify_members(self, members, indent, prefix, array_index, create_func, destroy_func, destroy_array, first_level_param):
+        decls = ''
+        pre_code = ''
+        post_code = ''
+        index = 'index%s' % str(array_index)
+        array_index += 1
+        # Process any NDOs in this structure and recurse for any sub-structs in this struct
+        for member in members:
+            process_pnext = self.StructWithExtensions(member.type)
+            # Handle NDOs
+            if self.handle_types.IsNonDispatchable(member.type):
+                count_name = member.len
+                if (count_name is not None):
+                    if first_level_param == False:
+                        count_name = '%s%s' % (prefix, member.len)
+
+                if (first_level_param == False) or (create_func == False) or (not '*' in member.cdecl):
+                    (tmp_decl, tmp_pre, tmp_post) = self.outputNDOs(member.type, member.name, count_name, prefix, index, indent, destroy_func, destroy_array, first_level_param)
+                    decls += tmp_decl
+                    pre_code += tmp_pre
+                    post_code += tmp_post
+            # Handle Structs that contain NDOs at some level
+            elif member.type in self.struct_member_dict:
+                # Structs at first level will have an NDO, OR, we need a safe_struct for the pnext chain
+                if self.struct_contains_ndo(member.type) == True or process_pnext:
+                    struct_info = self.struct_member_dict[member.type]
+                    # TODO (jbolz): Can this use paramIsPointer?
+                    ispointer = '*' in member.cdecl;
+                    # Struct Array
+                    if member.len is not None:
+                        # Update struct prefix
+                        if first_level_param == True:
+                            new_prefix = 'local_%s' % member.name
+                            # Declare safe_VarType for struct
+                            decls += '%ssafe_%s *%s = NULL;\n' % (indent, member.type, new_prefix)
+                        else:
+                            new_prefix = '%s%s' % (prefix, member.name)
+                        pre_code += '%s    if (%s%s) {\n' % (indent, prefix, member.name)
+                        indent = self.incIndent(indent)
+                        if first_level_param == True:
+                            pre_code += '%s    %s = new safe_%s[%s];\n' % (indent, new_prefix, member.type, member.len)
+                        pre_code += '%s    for (uint32_t %s = 0; %s < %s%s; ++%s) {\n' % (indent, index, index, prefix, member.len, index)
+                        indent = self.incIndent(indent)
+                        if first_level_param == True:
+                            pre_code += '%s    %s[%s].initialize(&%s[%s]);\n' % (indent, new_prefix, index, member.name, index)
+                            if process_pnext:
+                                pre_code += '%s    WrapPnextChainHandles(layer_data, %s[%s].pNext);\n' % (indent, new_prefix, index)
+                        local_prefix = '%s[%s].' % (new_prefix, index)
+                        # Process sub-structs in this struct
+                        (tmp_decl, tmp_pre, tmp_post) = self.uniquify_members(struct_info, indent, local_prefix, array_index, create_func, destroy_func, destroy_array, False)
+                        decls += tmp_decl
+                        pre_code += tmp_pre
+                        post_code += tmp_post
+                        indent = self.decIndent(indent)
+                        pre_code += '%s    }\n' % indent
+                        indent = self.decIndent(indent)
+                        pre_code += '%s    }\n' % indent
+                        if first_level_param == True:
+                            post_code += self.cleanUpLocalDeclarations(indent, prefix, member.name, member.len, index)
+                    # Single Struct
+                    elif ispointer:
+                        # Update struct prefix
+                        if first_level_param == True:
+                            new_prefix = 'local_%s->' % member.name
+                            decls += '%ssafe_%s var_local_%s%s;\n' % (indent, member.type, prefix, member.name)
+                            decls += '%ssafe_%s *local_%s%s = NULL;\n' % (indent, member.type, prefix, member.name)
+                        else:
+                            new_prefix = '%s%s->' % (prefix, member.name)
+                        # Declare safe_VarType for struct
+                        pre_code += '%s    if (%s%s) {\n' % (indent, prefix, member.name)
+                        indent = self.incIndent(indent)
+                        if first_level_param == True:
+                            pre_code += '%s    local_%s%s = &var_local_%s%s;\n' % (indent, prefix, member.name, prefix, member.name);
+                            pre_code += '%s    local_%s%s->initialize(%s);\n' % (indent, prefix, member.name, member.name)
+                        # Process sub-structs in this struct
+                        (tmp_decl, tmp_pre, tmp_post) = self.uniquify_members(struct_info, indent, new_prefix, array_index, create_func, destroy_func, destroy_array, False)
+                        decls += tmp_decl
+                        pre_code += tmp_pre
+                        post_code += tmp_post
+                        if process_pnext:
+                            pre_code += '%s    WrapPnextChainHandles(layer_data, local_%s%s->pNext);\n' % (indent, prefix, member.name)
+                        indent = self.decIndent(indent)
+                        pre_code += '%s    }\n' % indent
+                        if first_level_param == True:
+                            post_code += self.cleanUpLocalDeclarations(indent, prefix, member.name, member.len, index)
+                    else:
+                        # Update struct prefix
+                        if first_level_param == True:
+                            sys.exit(1)
+                        else:
+                            new_prefix = '%s%s.' % (prefix, member.name)
+                        # Process sub-structs in this struct
+                        (tmp_decl, tmp_pre, tmp_post) = self.uniquify_members(struct_info, indent, new_prefix, array_index, create_func, destroy_func, destroy_array, False)
+                        decls += tmp_decl
+                        pre_code += tmp_pre
+                        post_code += tmp_post
+                        if process_pnext:
+                            pre_code += '%s    WrapPnextChainHandles(layer_data, local_%s%s.pNext);\n' % (indent, prefix, member.name)
+        return decls, pre_code, post_code
+    #
+    # For a particular API, generate the non-dispatchable-object wrapping/unwrapping code
+    def generate_wrapping_code(self, cmd):
+        indent = '    '
+        proto = cmd.find('proto/name')
+        params = cmd.findall('param')
+
+        if proto.text is not None:
+            cmd_member_dict = dict(self.cmdMembers)
+            cmd_info = cmd_member_dict[proto.text]
+            # Handle ndo create/allocate operations
+            if cmd_info[0].iscreate:
+                create_ndo_code = self.generate_create_ndo_code(indent, proto, params, cmd_info)
+            else:
+                create_ndo_code = ''
+            # Handle ndo destroy/free operations
+            if cmd_info[0].isdestroy:
+                (destroy_array, destroy_ndo_code) = self.generate_destroy_ndo_code(indent, proto, cmd_info)
+            else:
+                destroy_array = False
+                destroy_ndo_code = ''
+            paramdecl = ''
+            param_pre_code = ''
+            param_post_code = ''
+            create_func = True if create_ndo_code else False
+            destroy_func = True if destroy_ndo_code else False
+            (paramdecl, param_pre_code, param_post_code) = self.uniquify_members(cmd_info, indent, '', 0, create_func, destroy_func, destroy_array, True)
+            param_post_code += create_ndo_code
+            if destroy_ndo_code:
+                if destroy_array == True:
+                    param_post_code += destroy_ndo_code
+                else:
+                    param_pre_code += destroy_ndo_code
+            if param_pre_code:
+                if (not destroy_func) or (destroy_array):
+                    param_pre_code = '%s{\n%s%s}\n' % ('    ', param_pre_code, indent)
+        return paramdecl, param_pre_code, param_post_code
+    #
+    # Capture command parameter info needed to wrap NDOs as well as handling some boilerplate code
+    def genCmd(self, cmdinfo, cmdname, alias):
+
+        # Add struct-member type information to command parameter information
+        OutputGenerator.genCmd(self, cmdinfo, cmdname, alias)
+        members = cmdinfo.elem.findall('.//param')
+        # Iterate over members once to get length parameters for arrays
+        lens = set()
+        for member in members:
+            len = self.getLen(member)
+            if len:
+                lens.add(len)
+        struct_member_dict = dict(self.structMembers)
+        # Generate member info
+        membersInfo = []
+        for member in members:
+            # Get type and name of member
+            info = self.getTypeNameTuple(member)
+            type = info[0]
+            name = info[1]
+            cdecl = self.makeCParamDecl(member, 0)
+            # Check for parameter name in lens set
+            iscount = True if name in lens else False
+            len = self.getLen(member)
+            isconst = True if 'const' in cdecl else False
+            ispointer = self.paramIsPointer(member)
+            # Mark param as local if it is an array of NDOs
+            islocal = False;
+            if self.handle_types.IsNonDispatchable(type):
+                if (len is not None) and (isconst == True):
+                    islocal = True
+            # Or if it's a struct that contains an NDO
+            elif type in struct_member_dict:
+                if self.struct_contains_ndo(type) == True:
+                    islocal = True
+            isdestroy = True if True in [destroy_txt in cmdname for destroy_txt in ['Destroy', 'Free']] else False
+            iscreate = True if True in [create_txt in cmdname for create_txt in ['Create', 'Allocate', 'GetRandROutputDisplayEXT', 'RegisterDeviceEvent', 'RegisterDisplayEvent']] else False
+            extstructs = self.registry.validextensionstructs[type] if name == 'pNext' else None
+            membersInfo.append(self.CommandParam(type=type,
+                                                 name=name,
+                                                 ispointer=ispointer,
+                                                 isconst=isconst,
+                                                 iscount=iscount,
+                                                 len=len,
+                                                 extstructs=extstructs,
+                                                 cdecl=cdecl,
+                                                 islocal=islocal,
+                                                 iscreate=iscreate,
+                                                 isdestroy=isdestroy,
+                                                 feature_protect=self.featureExtraProtect))
+        self.cmdMembers.append(self.CmdMemberData(name=cmdname, members=membersInfo))
+        self.cmd_info_data.append(self.CmdInfoData(name=cmdname, cmdinfo=cmdinfo))
+        self.cmd_feature_protect.append(self.CmdExtraProtect(name=cmdname, extra_protect=self.featureExtraProtect))
+    #
+    # Create prototype for dispatch header file
+    def GenDispatchFunctionPrototype(self, cmdinfo, ifdef_text):
+        decls = self.makeCDecls(cmdinfo.elem)
+        func_sig = decls[0][:-1]
+        func_sig = func_sig.replace("VKAPI_ATTR ", "")
+        func_sig = func_sig.replace("VKAPI_CALL ", "Dispatch")
+        func_sig += ';'
+        dispatch_prototype = ''
+        if ifdef_text is not None:
+            dispatch_prototype = '#ifdef %s\n' % ifdef_text
+        dispatch_prototype += func_sig
+        if ifdef_text is not None:
+            dispatch_prototype += '\n#endif // %s' % ifdef_text
+        return dispatch_prototype
+    #
+    # Create code to wrap NDOs as well as handling some boilerplate code
+    def WrapCommands(self):
+        cmd_member_dict = dict(self.cmdMembers)
+        cmd_info_dict = dict(self.cmd_info_data)
+        cmd_protect_dict = dict(self.cmd_feature_protect)
+
+        for api_call in self.cmdMembers:
+            cmdname = api_call.name
+            cmdinfo = cmd_info_dict[api_call.name]
+            feature_extra_protect = cmd_protect_dict[api_call.name]
+
+            # Add fuction prototype to header data
+            self.appendSection('header_file', self.GenDispatchFunctionPrototype(cmdinfo, feature_extra_protect))
+
+            if cmdname in self.no_autogen_list:
+                decls = self.makeCDecls(cmdinfo.elem)
+                self.appendSection('source_file', '')
+                self.appendSection('source_file', '// Skip %s dispatch, manually generated' % cmdname)
+                continue
+
+            # Generate NDO wrapping/unwrapping code for all parameters
+            (api_decls, api_pre, api_post) = self.generate_wrapping_code(cmdinfo.elem)
+            # If API doesn't contain NDO's, we still need to make a down-chain call
+            down_chain_call_only = False
+            if not api_decls and not api_pre and not api_post:
+                down_chain_call_only = True
+            if (feature_extra_protect is not None):
+                self.appendSection('source_file', '')
+                self.appendSection('source_file', '#ifdef ' + feature_extra_protect)
+
+            decls = self.makeCDecls(cmdinfo.elem)
+            func_sig = decls[0][:-1]
+            func_sig = func_sig.replace("VKAPI_ATTR ", "")
+            func_sig = func_sig.replace("VKAPI_CALL ", "Dispatch")
+            self.appendSection('source_file', '')
+            self.appendSection('source_file', func_sig)
+            self.appendSection('source_file', '{')
+            # Setup common to call wrappers, first parameter is always dispatchable
+            dispatchable_type = cmdinfo.elem.find('param/type').text
+            dispatchable_name = cmdinfo.elem.find('param/name').text
+
+            # Gather the parameter items
+            params = cmdinfo.elem.findall('param/name')
+            # Pull out the text for each of the parameters, separate them by commas in a list
+            paramstext = ', '.join([str(param.text) for param in params])
+            wrapped_paramstext = paramstext
+            # If any of these paramters has been replaced by a local var, fix up the list
+            params = cmd_member_dict[cmdname]
+            for param in params:
+                if param.islocal == True or self.StructWithExtensions(param.type):
+                    if param.ispointer == True:
+                        wrapped_paramstext = wrapped_paramstext.replace(param.name, '(%s %s*)local_%s' % ('const', param.type, param.name))
+                    else:
+                        wrapped_paramstext = wrapped_paramstext.replace(param.name, '(%s %s)local_%s' % ('const', param.type, param.name))
+
+            # First, add check and down-chain call. Use correct dispatch table
+            dispatch_table_type = "device_dispatch_table"
+            if dispatchable_type in ["VkPhysicalDevice", "VkInstance"]:
+                dispatch_table_type = "instance_dispatch_table"
+
+            api_func = cmdinfo.elem.attrib.get('name').replace('vk','layer_data->%s.',1) % dispatch_table_type
+            # Call to get the layer_data pointer
+            self.appendSection('source_file', '    auto layer_data = GetLayerDataPtr(get_dispatch_key(%s), layer_data_map);' % dispatchable_name)
+            # Put all this together for the final down-chain call
+            if not down_chain_call_only:
+                unwrapped_dispatch_call = api_func + '(' + paramstext + ')'
+                self.appendSection('source_file', '    if (!wrap_handles) return %s;' % unwrapped_dispatch_call)
+
+            # Handle return values, if any
+            resulttype = cmdinfo.elem.find('proto/type')
+            if (resulttype is not None and resulttype.text == 'void'):
+              resulttype = None
+            if (resulttype is not None):
+                assignresult = resulttype.text + ' result = '
+            else:
+                assignresult = ''
+            # Pre-pend declarations and pre-api-call codegen
+            if api_decls:
+                self.appendSection('source_file', "\n".join(str(api_decls).rstrip().split("\n")))
+            if api_pre:
+                self.appendSection('source_file', "\n".join(str(api_pre).rstrip().split("\n")))
+            # Generate the wrapped dispatch call 
+            self.appendSection('source_file', '    ' + assignresult + api_func + '(' + wrapped_paramstext + ');')
+
+            # And add the post-API-call codegen
+            if ('CreateGraphicsPipelines' in cmdname) or ('CreateComputePipelines' in cmdname) or ('CreateRayTracingPipelines' in cmdname):
+                copy_feedback_source  = '    for (uint32_t i = 0; i < createInfoCount; ++i) {\n'
+                copy_feedback_source += '        if (pCreateInfos[i].pNext != VK_NULL_HANDLE) {\n'
+                copy_feedback_source += '            CopyCreatePipelineFeedbackData(local_pCreateInfos[i].pNext, pCreateInfos[i].pNext);\n'
+                copy_feedback_source += '        }\n'
+                copy_feedback_source += '    }\n'
+                self.appendSection('source_file', copy_feedback_source)
+            self.appendSection('source_file', "\n".join(str(api_post).rstrip().split("\n")))
+            # Handle the return result variable, if any
+            if (resulttype is not None):
+                self.appendSection('source_file', '    return result;')
+            self.appendSection('source_file', '}')
+            if (feature_extra_protect is not None):
+                self.appendSection('source_file', '#endif // '+ feature_extra_protect)
+
diff --git a/src/third_party/vulkan-validation-layers/src/scripts/layer_chassis_generator.py b/src/third_party/vulkan-validation-layers/src/scripts/layer_chassis_generator.py
new file mode 100644
index 0000000..1362f20
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/scripts/layer_chassis_generator.py
@@ -0,0 +1,1768 @@
+#!/usr/bin/python3 -i
+#
+# Copyright (c) 2015-2019 Valve Corporation
+# Copyright (c) 2015-2019 LunarG, Inc.
+# Copyright (c) 2015-2019 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Author: Tobin Ehlis <tobine@google.com>
+# Author: Mark Lobodzinski <mark@lunarg.com>
+#
+# This script generates the dispatch portion of a factory layer which intercepts
+# all Vulkan  functions. The resultant factory layer allows rapid development of
+# layers and interceptors.
+
+import os,re,sys
+from generator import *
+from common_codegen import *
+
+# LayerFactoryGeneratorOptions - subclass of GeneratorOptions.
+#
+# Adds options used by LayerFactoryOutputGenerator objects during factory
+# layer generation.
+#
+# Additional members
+#   prefixText - list of strings to prefix generated header with
+#     (usually a copyright statement + calling convention macros).
+#   protectFile - True if multiple inclusion protection should be
+#     generated (based on the filename) around the entire header.
+#   protectFeature - True if #ifndef..#endif protection should be
+#     generated around a feature interface in the header file.
+#   genFuncPointers - True if function pointer typedefs should be
+#     generated
+#   protectProto - If conditional protection should be generated
+#     around prototype declarations, set to either '#ifdef'
+#     to require opt-in (#ifdef protectProtoStr) or '#ifndef'
+#     to require opt-out (#ifndef protectProtoStr). Otherwise
+#     set to None.
+#   protectProtoStr - #ifdef/#ifndef symbol to use around prototype
+#     declarations, if protectProto is set
+#   apicall - string to use for the function declaration prefix,
+#     such as APICALL on Windows.
+#   apientry - string to use for the calling convention macro,
+#     in typedefs, such as APIENTRY.
+#   apientryp - string to use for the calling convention macro
+#     in function pointer typedefs, such as APIENTRYP.
+#   indentFuncProto - True if prototype declarations should put each
+#     parameter on a separate line
+#   indentFuncPointer - True if typedefed function pointers should put each
+#     parameter on a separate line
+#   alignFuncParam - if nonzero and parameters are being put on a
+#     separate line, align parameter names at the specified column
+class LayerChassisGeneratorOptions(GeneratorOptions):
+    def __init__(self,
+                 conventions = None,
+                 filename = None,
+                 directory = '.',
+                 apiname = None,
+                 profile = None,
+                 versions = '.*',
+                 emitversions = '.*',
+                 defaultExtensions = None,
+                 addExtensions = None,
+                 removeExtensions = None,
+                 emitExtensions = None,
+                 sortProcedure = regSortFeatures,
+                 prefixText = "",
+                 genFuncPointers = True,
+                 protectFile = True,
+                 protectFeature = True,
+                 apicall = '',
+                 apientry = '',
+                 apientryp = '',
+                 indentFuncProto = True,
+                 indentFuncPointer = False,
+                 alignFuncParam = 0,
+                 helper_file_type = '',
+                 expandEnumerants = True):
+        GeneratorOptions.__init__(self, conventions, filename, directory, apiname, profile,
+                                  versions, emitversions, defaultExtensions,
+                                  addExtensions, removeExtensions, emitExtensions, sortProcedure)
+        self.prefixText      = prefixText
+        self.genFuncPointers = genFuncPointers
+        self.protectFile     = protectFile
+        self.protectFeature  = protectFeature
+        self.apicall         = apicall
+        self.apientry        = apientry
+        self.apientryp       = apientryp
+        self.indentFuncProto = indentFuncProto
+        self.indentFuncPointer = indentFuncPointer
+        self.alignFuncParam  = alignFuncParam
+
+# LayerChassisOutputGenerator - subclass of OutputGenerator.
+# Generates a LayerFactory layer that intercepts all API entrypoints
+#  This is intended to be used as a starting point for creating custom layers
+#
+# ---- methods ----
+# LayerChassisOutputGenerator(errFile, warnFile, diagFile) - args as for
+#   OutputGenerator. Defines additional internal state.
+# ---- methods overriding base class ----
+# beginFile(genOpts)
+# endFile()
+# beginFeature(interface, emit)
+# endFeature()
+# genType(typeinfo,name)
+# genStruct(typeinfo,name)
+# genGroup(groupinfo,name)
+# genEnum(enuminfo, name)
+# genCmd(cmdinfo)
+class LayerChassisOutputGenerator(OutputGenerator):
+    """Generate specified API interfaces in a specific style, such as a C header"""
+    # This is an ordered list of sections in the header file.
+    TYPE_SECTIONS = ['include', 'define', 'basetype', 'handle', 'enum',
+                     'group', 'bitmask', 'funcpointer', 'struct']
+    ALL_SECTIONS = TYPE_SECTIONS + ['command']
+
+    manual_functions = [
+        # Include functions here to be interecpted w/ manually implemented function bodies
+        'vkGetDeviceProcAddr',
+        'vkGetInstanceProcAddr',
+        'vkCreateDevice',
+        'vkDestroyDevice',
+        'vkCreateInstance',
+        'vkDestroyInstance',
+        'vkEnumerateInstanceLayerProperties',
+        'vkEnumerateInstanceExtensionProperties',
+        'vkEnumerateDeviceLayerProperties',
+        'vkEnumerateDeviceExtensionProperties',
+        # Functions that are handled explicitly due to chassis architecture violations
+        'vkCreateGraphicsPipelines',
+        'vkCreateComputePipelines',
+        'vkCreateRayTracingPipelinesNV',
+        'vkCreatePipelineLayout',
+        'vkCreateShaderModule',
+        'vkAllocateDescriptorSets',
+        'vkCreateBuffer',
+        # ValidationCache functions do not get dispatched
+        'vkCreateValidationCacheEXT',
+        'vkDestroyValidationCacheEXT',
+        'vkMergeValidationCachesEXT',
+        'vkGetValidationCacheDataEXT',
+        # We don't wanna hook this function
+        'vkGetPhysicalDeviceProcAddr',
+        ]
+
+    alt_ret_codes = [
+        # Include functions here which must tolerate VK_INCOMPLETE as a return code
+        'vkEnumeratePhysicalDevices',
+        'vkEnumeratePhysicalDeviceGroupsKHR',
+        'vkGetValidationCacheDataEXT',
+        'vkGetPipelineCacheData',
+        'vkGetShaderInfoAMD',
+        'vkGetPhysicalDeviceDisplayPropertiesKHR',
+        'vkGetPhysicalDeviceDisplayProperties2KHR',
+        'vkGetPhysicalDeviceDisplayPlanePropertiesKHR',
+        'vkGetDisplayPlaneSupportedDisplaysKHR',
+        'vkGetDisplayModePropertiesKHR',
+        'vkGetDisplayModeProperties2KHR',
+        'vkGetPhysicalDeviceSurfaceFormatsKHR',
+        'vkGetPhysicalDeviceSurfacePresentModesKHR',
+        'vkGetPhysicalDevicePresentRectanglesKHR',
+        'vkGetPastPresentationTimingGOOGLE',
+        'vkGetSwapchainImagesKHR',
+        'vkEnumerateInstanceLayerProperties',
+        'vkEnumerateDeviceLayerProperties',
+        'vkEnumerateInstanceExtensionProperties',
+        'vkEnumerateDeviceExtensionProperties',
+        'vkGetPhysicalDeviceCalibrateableTimeDomainsEXT',
+    ]
+
+    pre_dispatch_debug_utils_functions = {
+        'vkDebugMarkerSetObjectNameEXT' : 'layer_data->report_data->DebugReportSetMarkerObjectName(pNameInfo);',
+        'vkSetDebugUtilsObjectNameEXT' : 'layer_data->report_data->DebugReportSetUtilsObjectName(pNameInfo);',
+        'vkQueueBeginDebugUtilsLabelEXT' : 'BeginQueueDebugUtilsLabel(layer_data->report_data, queue, pLabelInfo);',
+        'vkQueueInsertDebugUtilsLabelEXT' : 'InsertQueueDebugUtilsLabel(layer_data->report_data, queue, pLabelInfo);',
+        }
+
+    post_dispatch_debug_utils_functions = {
+        'vkQueueEndDebugUtilsLabelEXT' : 'EndQueueDebugUtilsLabel(layer_data->report_data, queue);',
+        'vkCreateDebugReportCallbackEXT' : 'layer_create_report_callback(layer_data->report_data, false, pCreateInfo, pAllocator, pCallback);',
+        'vkDestroyDebugReportCallbackEXT' : 'layer_destroy_callback(layer_data->report_data, callback, pAllocator);',
+        'vkCreateDebugUtilsMessengerEXT' : 'layer_create_messenger_callback(layer_data->report_data, false, pCreateInfo, pAllocator, pMessenger);',
+        'vkDestroyDebugUtilsMessengerEXT' : 'layer_destroy_callback(layer_data->report_data, messenger, pAllocator);',
+        }
+
+    precallvalidate_loop = "for (auto intercept : layer_data->object_dispatch) {"
+    precallrecord_loop = precallvalidate_loop
+    postcallrecord_loop = "for (auto intercept : layer_data->object_dispatch) {"
+
+    inline_custom_header_preamble = """
+#define NOMINMAX
+#include <atomic>
+#include <mutex>
+#include <cinttypes>
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+#include <unordered_map>
+#include <unordered_set>
+#include <algorithm>
+#include <memory>
+
+#include "vk_loader_platform.h"
+#include "vulkan/vulkan.h"
+#include "vk_layer_config.h"
+#include "vk_layer_data.h"
+#include "vk_layer_logging.h"
+#include "vk_object_types.h"
+#include "vulkan/vk_layer.h"
+#include "vk_enum_string_helper.h"
+#include "vk_layer_extension_utils.h"
+#include "vk_layer_utils.h"
+#include "vulkan/vk_layer.h"
+#include "vk_dispatch_table_helper.h"
+#include "vk_extension_helper.h"
+#include "vk_safe_struct.h"
+#include "vk_typemap_helper.h"
+
+
+extern std::atomic<uint64_t> global_unique_id;
+
+// To avoid re-hashing unique ids on each use, we precompute the hash and store the
+// hash's LSBs in the high 24 bits.
+struct HashedUint64 {
+    static const int HASHED_UINT64_SHIFT = 40;
+    size_t operator()(const uint64_t &t) const { return t >> HASHED_UINT64_SHIFT; }
+
+    static uint64_t hash(uint64_t id) {
+        uint64_t h = (uint64_t)std::hash<uint64_t>()(id);
+        id |= h << HASHED_UINT64_SHIFT;
+        return id;
+    }
+};
+
+extern vl_concurrent_unordered_map<uint64_t, uint64_t, 4, HashedUint64> unique_id_mapping;
+"""
+
+    inline_custom_header_class_definition = """
+
+// Layer object type identifiers
+enum LayerObjectTypeId {
+    LayerObjectTypeInstance,                    // Container for an instance dispatch object
+    LayerObjectTypeDevice,                      // Container for a device dispatch object
+    LayerObjectTypeThreading,                   // Instance or device threading layer object
+    LayerObjectTypeParameterValidation,         // Instance or device parameter validation layer object
+    LayerObjectTypeObjectTracker,               // Instance or device object tracker layer object
+    LayerObjectTypeCoreValidation,              // Instance or device core validation layer object
+    LayerObjectTypeBestPractices,               // Instance or device best practices layer object
+    LayerObjectTypeGpuAssisted,                 // Instance or device gpu assisted validation layer object
+    LayerObjectTypeMaxEnum,                     // Max enum count
+};
+
+struct TEMPLATE_STATE {
+    VkDescriptorUpdateTemplateKHR desc_update_template;
+    safe_VkDescriptorUpdateTemplateCreateInfo create_info;
+    bool destroyed;
+
+    TEMPLATE_STATE(VkDescriptorUpdateTemplateKHR update_template, safe_VkDescriptorUpdateTemplateCreateInfo *pCreateInfo)
+        : desc_update_template(update_template), create_info(*pCreateInfo), destroyed(false) {}
+};
+
+class LAYER_PHYS_DEV_PROPERTIES {
+public:
+    VkPhysicalDeviceProperties properties;
+    std::vector<VkQueueFamilyProperties> queue_family_properties;
+};
+
+typedef enum ValidationCheckDisables {
+    VALIDATION_CHECK_DISABLE_COMMAND_BUFFER_STATE,
+    VALIDATION_CHECK_DISABLE_OBJECT_IN_USE,
+    VALIDATION_CHECK_DISABLE_IDLE_DESCRIPTOR_SET,
+    VALIDATION_CHECK_DISABLE_PUSH_CONSTANT_RANGE,
+    VALIDATION_CHECK_DISABLE_QUERY_VALIDATION,
+    VALIDATION_CHECK_DISABLE_IMAGE_LAYOUT_VALIDATION,
+} ValidationCheckDisables;
+
+// CHECK_DISABLED struct is a container for bools that can block validation checks from being performed.
+// These bools are all "false" by default meaning that all checks are enabled. Enum values can be specified
+// via the vk_layer_setting.txt config file or at CreateInstance time via the VK_EXT_validation_features extension
+// that can selectively disable checks.
+struct CHECK_DISABLED {
+    bool command_buffer_state;                      // Skip command buffer state validation
+    bool object_in_use;                             // Skip all object in_use checking
+    bool idle_descriptor_set;                       // Skip check to verify that descriptor set is not in-use
+    bool push_constant_range;                       // Skip push constant range checks
+    bool query_validation;                          // Disable all core validation query-related checks
+    bool image_layout_validation;                   // Disable image layout validation
+    bool object_tracking;                           // Disable object lifetime validation
+    bool core_checks;                               // Disable core validation checks
+    bool thread_safety;                             // Disable thread safety validation
+    bool stateless_checks;                          // Disable stateless validation checks
+    bool handle_wrapping;                           // Disable unique handles/handle wrapping
+    bool shader_validation;                         // Skip validation for shaders
+
+    void SetAll(bool value) { std::fill(&command_buffer_state, &shader_validation + 1, value); }
+};
+
+struct CHECK_ENABLED {
+    bool gpu_validation;
+    bool gpu_validation_reserve_binding_slot;
+    bool best_practices;
+
+    void SetAll(bool value) { std::fill(&gpu_validation, &gpu_validation_reserve_binding_slot + 1, value); }
+};
+
+// Layer chassis validation object base class definition
+class ValidationObject {
+    public:
+        uint32_t api_version;
+        debug_report_data* report_data = nullptr;
+
+        VkLayerInstanceDispatchTable instance_dispatch_table;
+        VkLayerDispatchTable device_dispatch_table;
+
+        InstanceExtensions instance_extensions;
+        DeviceExtensions device_extensions = {};
+        CHECK_DISABLED disabled = {};
+        CHECK_ENABLED enabled = {};
+
+        VkInstance instance = VK_NULL_HANDLE;
+        VkPhysicalDevice physical_device = VK_NULL_HANDLE;
+        VkDevice device = VK_NULL_HANDLE;
+        LAYER_PHYS_DEV_PROPERTIES phys_dev_properties = {};
+
+        std::vector<ValidationObject*> object_dispatch;
+        LayerObjectTypeId container_type;
+
+        std::string layer_name = "CHASSIS";
+
+        // Constructor
+        ValidationObject(){};
+        // Destructor
+        virtual ~ValidationObject() {};
+
+        ReadWriteLock validation_object_mutex;
+        virtual read_lock_guard_t read_lock() {
+            return read_lock_guard_t(validation_object_mutex);
+        }
+        virtual write_lock_guard_t write_lock() {
+            return write_lock_guard_t(validation_object_mutex);
+        }
+
+        ValidationObject* GetValidationObject(std::vector<ValidationObject*>& object_dispatch, LayerObjectTypeId object_type) {
+            for (auto validation_object : object_dispatch) {
+                if (validation_object->container_type == object_type) {
+                    return validation_object;
+                }
+            }
+            return nullptr;
+        };
+
+        // Handle Wrapping Data
+        // Reverse map display handles
+        vl_concurrent_unordered_map<VkDisplayKHR, uint64_t, 0> display_id_reverse_mapping;
+        // Wrapping Descriptor Template Update structures requires access to the template createinfo structs
+        std::unordered_map<uint64_t, std::unique_ptr<TEMPLATE_STATE>> desc_template_createinfo_map;
+        struct SubpassesUsageStates {
+            std::unordered_set<uint32_t> subpasses_using_color_attachment;
+            std::unordered_set<uint32_t> subpasses_using_depthstencil_attachment;
+        };
+        // Uses unwrapped handles
+        std::unordered_map<VkRenderPass, SubpassesUsageStates> renderpasses_states;
+        // Map of wrapped swapchain handles to arrays of wrapped swapchain image IDs
+        // Each swapchain has an immutable list of wrapped swapchain image IDs -- always return these IDs if they exist
+        std::unordered_map<VkSwapchainKHR, std::vector<VkImage>> swapchain_wrapped_image_handle_map;
+        // Map of wrapped descriptor pools to set of wrapped descriptor sets allocated from each pool
+        std::unordered_map<VkDescriptorPool, std::unordered_set<VkDescriptorSet>> pool_descriptor_sets_map;
+
+
+        // Unwrap a handle.
+        template <typename HandleType>
+        HandleType Unwrap(HandleType wrappedHandle) {
+            auto iter = unique_id_mapping.find(reinterpret_cast<uint64_t const &>(wrappedHandle));
+            if (iter == unique_id_mapping.end())
+                return (HandleType)0;
+            return (HandleType)iter->second;
+        }
+
+        // Wrap a newly created handle with a new unique ID, and return the new ID.
+        template <typename HandleType>
+        HandleType WrapNew(HandleType newlyCreatedHandle) {
+            auto unique_id = global_unique_id++;
+            unique_id = HashedUint64::hash(unique_id);
+            unique_id_mapping.insert_or_assign(unique_id, reinterpret_cast<uint64_t const &>(newlyCreatedHandle));
+            return (HandleType)unique_id;
+        }
+
+        // Specialized handling for VkDisplayKHR. Adds an entry to enable reverse-lookup.
+        VkDisplayKHR WrapDisplay(VkDisplayKHR newlyCreatedHandle, ValidationObject *map_data) {
+            auto unique_id = global_unique_id++;
+            unique_id = HashedUint64::hash(unique_id);
+            unique_id_mapping.insert_or_assign(unique_id, reinterpret_cast<uint64_t const &>(newlyCreatedHandle));
+            map_data->display_id_reverse_mapping.insert_or_assign(newlyCreatedHandle, unique_id);
+            return (VkDisplayKHR)unique_id;
+        }
+
+        // VkDisplayKHR objects don't have a single point of creation, so we need to see if one already exists in the map before
+        // creating another.
+        VkDisplayKHR MaybeWrapDisplay(VkDisplayKHR handle, ValidationObject *map_data) {
+            // See if this display is already known
+            auto it = map_data->display_id_reverse_mapping.find(handle);
+            if (it != map_data->display_id_reverse_mapping.end()) return (VkDisplayKHR)it->second;
+            // Unknown, so wrap
+            return WrapDisplay(handle, map_data);
+        }
+
+        // Pre/post hook point declarations
+"""
+
+    inline_copyright_message = """
+// This file is ***GENERATED***.  Do Not Edit.
+// See layer_chassis_generator.py for modifications.
+
+/* Copyright (c) 2015-2019 The Khronos Group Inc.
+ * Copyright (c) 2015-2019 Valve Corporation
+ * Copyright (c) 2015-2019 LunarG, Inc.
+ * Copyright (c) 2015-2019 Google Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Mark Lobodzinski <mark@lunarg.com>
+ */"""
+
+    inline_custom_source_preamble = """
+
+#include <string.h>
+#include <mutex>
+
+#define VALIDATION_ERROR_MAP_IMPL
+
+#include "chassis.h"
+#include "layer_chassis_dispatch.h"
+
+small_unordered_map<void*, ValidationObject*, 2> layer_data_map;
+
+// Global unique object identifier.
+std::atomic<uint64_t> global_unique_id(1ULL);
+// Map uniqueID to actual object handle. Accesses to the map itself are
+// internally synchronized.
+vl_concurrent_unordered_map<uint64_t, uint64_t, 4, HashedUint64> unique_id_mapping;
+
+bool wrap_handles = true;
+
+#define OBJECT_LAYER_NAME "VK_LAYER_KHRONOS_validation"
+#define OBJECT_LAYER_DESCRIPTION "khronos_validation"
+
+// Include layer validation object definitions
+#include "best_practices.h"
+#include "core_validation.h"
+#include "command_counter.h"
+#include "gpu_validation.h"
+#include "object_lifetime_validation.h"
+#include "stateless_validation.h"
+#include "thread_safety.h"
+
+namespace vulkan_layer_chassis {
+
+using std::unordered_map;
+
+static const VkLayerProperties global_layer = {
+    OBJECT_LAYER_NAME, VK_LAYER_API_VERSION, 1, "LunarG validation Layer",
+};
+
+static const VkExtensionProperties instance_extensions[] = {{VK_EXT_DEBUG_REPORT_EXTENSION_NAME, VK_EXT_DEBUG_REPORT_SPEC_VERSION},
+                                                            {VK_EXT_DEBUG_UTILS_EXTENSION_NAME, VK_EXT_DEBUG_UTILS_SPEC_VERSION}};
+static const VkExtensionProperties device_extensions[] = {
+    {VK_EXT_VALIDATION_CACHE_EXTENSION_NAME, VK_EXT_VALIDATION_CACHE_SPEC_VERSION},
+    {VK_EXT_DEBUG_MARKER_EXTENSION_NAME, VK_EXT_DEBUG_MARKER_SPEC_VERSION},
+};
+
+typedef struct {
+    bool is_instance_api;
+    void* funcptr;
+} function_data;
+
+extern const std::unordered_map<std::string, function_data> name_to_funcptr_map;
+
+// Manually written functions
+
+// Check enabled instance extensions against supported instance extension whitelist
+static void InstanceExtensionWhitelist(ValidationObject *layer_data, const VkInstanceCreateInfo *pCreateInfo, VkInstance instance) {
+    for (uint32_t i = 0; i < pCreateInfo->enabledExtensionCount; i++) {
+        // Check for recognized instance extensions
+        if (!white_list(pCreateInfo->ppEnabledExtensionNames[i], kInstanceExtensionNames)) {
+            log_msg(layer_data->report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                    kVUIDUndefined,
+                    "Instance Extension %s is not supported by this layer.  Using this extension may adversely affect validation "
+                    "results and/or produce undefined behavior.",
+                    pCreateInfo->ppEnabledExtensionNames[i]);
+        }
+    }
+}
+
+// Check enabled device extensions against supported device extension whitelist
+static void DeviceExtensionWhitelist(ValidationObject *layer_data, const VkDeviceCreateInfo *pCreateInfo, VkDevice device) {
+    for (uint32_t i = 0; i < pCreateInfo->enabledExtensionCount; i++) {
+        // Check for recognized device extensions
+        if (!white_list(pCreateInfo->ppEnabledExtensionNames[i], kDeviceExtensionNames)) {
+            log_msg(layer_data->report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+                    kVUIDUndefined,
+                    "Device Extension %s is not supported by this layer.  Using this extension may adversely affect validation "
+                    "results and/or produce undefined behavior.",
+                    pCreateInfo->ppEnabledExtensionNames[i]);
+        }
+    }
+}
+
+
+// Process validation features, flags and settings specified through extensions, a layer settings file, or environment variables
+
+static const std::unordered_map<std::string, VkValidationFeatureDisableEXT> VkValFeatureDisableLookup = {
+    {"VK_VALIDATION_FEATURE_DISABLE_SHADERS_EXT", VK_VALIDATION_FEATURE_DISABLE_SHADERS_EXT},
+    {"VK_VALIDATION_FEATURE_DISABLE_THREAD_SAFETY_EXT", VK_VALIDATION_FEATURE_DISABLE_THREAD_SAFETY_EXT},
+    {"VK_VALIDATION_FEATURE_DISABLE_API_PARAMETERS_EXT", VK_VALIDATION_FEATURE_DISABLE_API_PARAMETERS_EXT},
+    {"VK_VALIDATION_FEATURE_DISABLE_OBJECT_LIFETIMES_EXT", VK_VALIDATION_FEATURE_DISABLE_OBJECT_LIFETIMES_EXT},
+    {"VK_VALIDATION_FEATURE_DISABLE_CORE_CHECKS_EXT", VK_VALIDATION_FEATURE_DISABLE_CORE_CHECKS_EXT},
+    {"VK_VALIDATION_FEATURE_DISABLE_UNIQUE_HANDLES_EXT", VK_VALIDATION_FEATURE_DISABLE_UNIQUE_HANDLES_EXT},
+    {"VK_VALIDATION_FEATURE_DISABLE_ALL_EXT", VK_VALIDATION_FEATURE_DISABLE_ALL_EXT},
+};
+
+static const std::unordered_map<std::string, VkValidationFeatureEnableEXT> VkValFeatureEnableLookup = {
+    {"VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_EXT", VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_EXT},
+    {"VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_RESERVE_BINDING_SLOT_EXT", VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_RESERVE_BINDING_SLOT_EXT},
+    {"VK_VALIDATION_FEATURE_ENABLE_BEST_PRACTICES_EXT", VK_VALIDATION_FEATURE_ENABLE_BEST_PRACTICES_EXT},
+};
+
+static const std::unordered_map<std::string, ValidationCheckDisables> ValidationDisableLookup = {
+    {"VALIDATION_CHECK_DISABLE_COMMAND_BUFFER_STATE", VALIDATION_CHECK_DISABLE_COMMAND_BUFFER_STATE},
+    {"VALIDATION_CHECK_DISABLE_OBJECT_IN_USE", VALIDATION_CHECK_DISABLE_OBJECT_IN_USE},
+    {"VALIDATION_CHECK_DISABLE_IDLE_DESCRIPTOR_SET", VALIDATION_CHECK_DISABLE_IDLE_DESCRIPTOR_SET},
+    {"VALIDATION_CHECK_DISABLE_PUSH_CONSTANT_RANGE", VALIDATION_CHECK_DISABLE_PUSH_CONSTANT_RANGE},
+    {"VALIDATION_CHECK_DISABLE_QUERY_VALIDATION", VALIDATION_CHECK_DISABLE_QUERY_VALIDATION},
+    {"VALIDATION_CHECK_DISABLE_IMAGE_LAYOUT_VALIDATION", VALIDATION_CHECK_DISABLE_IMAGE_LAYOUT_VALIDATION},
+};
+
+// Set the local disable flag for the appropriate VALIDATION_CHECK_DISABLE enum
+void SetValidationDisable(CHECK_DISABLED* disable_data, const ValidationCheckDisables disable_id) {
+    switch (disable_id) {
+        case VALIDATION_CHECK_DISABLE_COMMAND_BUFFER_STATE:
+            disable_data->command_buffer_state = true;
+            break;
+        case VALIDATION_CHECK_DISABLE_OBJECT_IN_USE:
+            disable_data->object_in_use = true;
+            break;
+        case VALIDATION_CHECK_DISABLE_IDLE_DESCRIPTOR_SET:
+            disable_data->idle_descriptor_set = true;
+            break;
+        case VALIDATION_CHECK_DISABLE_PUSH_CONSTANT_RANGE:
+            disable_data->push_constant_range = true;
+            break;
+        case VALIDATION_CHECK_DISABLE_QUERY_VALIDATION:
+            disable_data->query_validation = true;
+            break;
+        case VALIDATION_CHECK_DISABLE_IMAGE_LAYOUT_VALIDATION:
+            disable_data->image_layout_validation = true;
+            break;
+        default:
+            assert(true);
+    }
+}
+
+// Set the local disable flag for a single VK_VALIDATION_FEATURE_DISABLE_* flag
+void SetValidationFeatureDisable(CHECK_DISABLED* disable_data, const VkValidationFeatureDisableEXT feature_disable) {
+    switch (feature_disable) {
+        case VK_VALIDATION_FEATURE_DISABLE_SHADERS_EXT:
+            disable_data->shader_validation = true;
+            break;
+        case VK_VALIDATION_FEATURE_DISABLE_THREAD_SAFETY_EXT:
+            disable_data->thread_safety = true;
+            break;
+        case VK_VALIDATION_FEATURE_DISABLE_API_PARAMETERS_EXT:
+            disable_data->stateless_checks = true;
+            break;
+        case VK_VALIDATION_FEATURE_DISABLE_OBJECT_LIFETIMES_EXT:
+            disable_data->object_tracking = true;
+            break;
+        case VK_VALIDATION_FEATURE_DISABLE_CORE_CHECKS_EXT:
+            disable_data->core_checks = true;
+            break;
+        case VK_VALIDATION_FEATURE_DISABLE_UNIQUE_HANDLES_EXT:
+            disable_data->handle_wrapping = true;
+            break;
+        case VK_VALIDATION_FEATURE_DISABLE_ALL_EXT:
+            // Set all disabled flags to true
+            disable_data->SetAll(true);
+            break;
+        default:
+            break;
+    }
+}
+
+// Set the local enable flag for a single VK_VALIDATION_FEATURE_ENABLE_* flag
+void SetValidationFeatureEnable(CHECK_ENABLED *enable_data, const VkValidationFeatureEnableEXT feature_enable) {
+    switch (feature_enable) {
+        case VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_EXT:
+            enable_data->gpu_validation = true;
+            break;
+        case VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_RESERVE_BINDING_SLOT_EXT:
+            enable_data->gpu_validation_reserve_binding_slot = true;
+            break;
+        case VK_VALIDATION_FEATURE_ENABLE_BEST_PRACTICES_EXT:
+            enable_data->best_practices = true;
+            break;
+        default:
+            break;
+    }
+}
+
+// Set the local disable flag for settings specified through the VK_EXT_validation_flags extension
+void SetValidationFlags(CHECK_DISABLED* disables, const VkValidationFlagsEXT* val_flags_struct) {
+    for (uint32_t i = 0; i < val_flags_struct->disabledValidationCheckCount; ++i) {
+        switch (val_flags_struct->pDisabledValidationChecks[i]) {
+            case VK_VALIDATION_CHECK_SHADERS_EXT:
+                disables->shader_validation = true;
+                break;
+            case VK_VALIDATION_CHECK_ALL_EXT:
+                // Set all disabled flags to true
+                disables->SetAll(true);
+                break;
+            default:
+                break;
+        }
+    }
+}
+
+// Process Validation Features flags specified through the ValidationFeature extension
+void SetValidationFeatures(CHECK_DISABLED *disable_data, CHECK_ENABLED *enable_data,
+                           const VkValidationFeaturesEXT *val_features_struct) {
+    for (uint32_t i = 0; i < val_features_struct->disabledValidationFeatureCount; ++i) {
+        SetValidationFeatureDisable(disable_data, val_features_struct->pDisabledValidationFeatures[i]);
+    }
+    for (uint32_t i = 0; i < val_features_struct->enabledValidationFeatureCount; ++i) {
+        SetValidationFeatureEnable(enable_data, val_features_struct->pEnabledValidationFeatures[i]);
+    }
+}
+
+// Given a string representation of a list of enable enum values, call the appropriate setter function
+void SetLocalEnableSetting(std::string list_of_enables, std::string delimiter, CHECK_ENABLED* enables) {
+    size_t pos = 0;
+    std::string token;
+    while (list_of_enables.length() != 0) {
+        pos = list_of_enables.find(delimiter);
+        if (pos != std::string::npos) {
+            token = list_of_enables.substr(0, pos);
+        } else {
+            pos = list_of_enables.length() - delimiter.length();
+            token = list_of_enables;
+        }
+        if (token.find("VK_VALIDATION_FEATURE_ENABLE_") != std::string::npos) {
+            auto result = VkValFeatureEnableLookup.find(token);
+            if (result != VkValFeatureEnableLookup.end()) {
+                SetValidationFeatureEnable(enables, result->second);
+            }
+        }
+        list_of_enables.erase(0, pos + delimiter.length());
+    }
+}
+
+// Given a string representation of a list of disable enum values, call the appropriate setter function
+void SetLocalDisableSetting(std::string list_of_disables, std::string delimiter, CHECK_DISABLED* disables) {
+    size_t pos = 0;
+    std::string token;
+    while (list_of_disables.length() != 0) {
+        pos = list_of_disables.find(delimiter);
+        if (pos != std::string::npos) {
+            token = list_of_disables.substr(0, pos);
+        } else {
+            pos = list_of_disables.length() - delimiter.length();
+            token = list_of_disables;
+        }
+        if (token.find("VK_VALIDATION_FEATURE_DISABLE_") != std::string::npos) {
+            auto result = VkValFeatureDisableLookup.find(token);
+            if (result != VkValFeatureDisableLookup.end()) {
+                SetValidationFeatureDisable(disables, result->second);
+            }
+        }
+        if (token.find("VALIDATION_CHECK_DISABLE_") != std::string::npos) {
+            auto result = ValidationDisableLookup.find(token);
+            if (result != ValidationDisableLookup.end()) {
+                SetValidationDisable(disables, result->second);
+            }
+        }
+        list_of_disables.erase(0, pos + delimiter.length());
+    }
+}
+
+// Process enables and disables set though the vk_layer_settings.txt config file or through an environment variable
+void ProcessConfigAndEnvSettings(const char* layer_description, CHECK_ENABLED* enables, CHECK_DISABLED* disables) {
+    std::string enable_key = layer_description;
+    std::string disable_key = layer_description;
+    enable_key.append(".enables");
+    disable_key.append(".disables");
+    std::string list_of_config_enables = getLayerOption(enable_key.c_str());
+    std::string list_of_env_enables = GetLayerEnvVar("VK_LAYER_ENABLES");
+    std::string list_of_config_disables = getLayerOption(disable_key.c_str());
+    std::string list_of_env_disables = GetLayerEnvVar("VK_LAYER_DISABLES");
+#if defined(_WIN32)
+    std::string env_delimiter = ";";
+#else
+    std::string env_delimiter = ":";
+#endif
+    SetLocalEnableSetting(list_of_config_enables, ",", enables);
+    SetLocalEnableSetting(list_of_env_enables, env_delimiter, enables);
+    SetLocalDisableSetting(list_of_config_disables, ",", disables);
+    SetLocalDisableSetting(list_of_env_disables, env_delimiter, disables);
+}
+
+
+// Non-code-generated chassis API functions
+
+VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL GetDeviceProcAddr(VkDevice device, const char *funcName) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    if (!ApiParentExtensionEnabled(funcName, &layer_data->device_extensions)) {
+        return nullptr;
+    }
+    const auto &item = name_to_funcptr_map.find(funcName);
+    if (item != name_to_funcptr_map.end()) {
+        if (item->second.is_instance_api) {
+            return nullptr;
+        } else {
+            return reinterpret_cast<PFN_vkVoidFunction>(item->second.funcptr);
+        }
+    }
+    auto &table = layer_data->device_dispatch_table;
+    if (!table.GetDeviceProcAddr) return nullptr;
+    return table.GetDeviceProcAddr(device, funcName);
+}
+
+VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL GetInstanceProcAddr(VkInstance instance, const char *funcName) {
+    const auto &item = name_to_funcptr_map.find(funcName);
+    if (item != name_to_funcptr_map.end()) {
+        return reinterpret_cast<PFN_vkVoidFunction>(item->second.funcptr);
+    }
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(instance), layer_data_map);
+    auto &table = layer_data->instance_dispatch_table;
+    if (!table.GetInstanceProcAddr) return nullptr;
+    return table.GetInstanceProcAddr(instance, funcName);
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL EnumerateInstanceLayerProperties(uint32_t *pCount, VkLayerProperties *pProperties) {
+    return util_GetLayerProperties(1, &global_layer, pCount, pProperties);
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL EnumerateDeviceLayerProperties(VkPhysicalDevice physicalDevice, uint32_t *pCount,
+                                                              VkLayerProperties *pProperties) {
+    return util_GetLayerProperties(1, &global_layer, pCount, pProperties);
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL EnumerateInstanceExtensionProperties(const char *pLayerName, uint32_t *pCount,
+                                                                    VkExtensionProperties *pProperties) {
+    if (pLayerName && !strcmp(pLayerName, global_layer.layerName))
+        return util_GetExtensionProperties(ARRAY_SIZE(instance_extensions), instance_extensions, pCount, pProperties);
+
+    return VK_ERROR_LAYER_NOT_PRESENT;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL EnumerateDeviceExtensionProperties(VkPhysicalDevice physicalDevice, const char *pLayerName,
+                                                                  uint32_t *pCount, VkExtensionProperties *pProperties) {
+    if (pLayerName && !strcmp(pLayerName, global_layer.layerName)) return util_GetExtensionProperties(ARRAY_SIZE(device_extensions), device_extensions, pCount, pProperties);
+    assert(physicalDevice);
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), layer_data_map);
+    return layer_data->instance_dispatch_table.EnumerateDeviceExtensionProperties(physicalDevice, pLayerName, pCount, pProperties);
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL CreateInstance(const VkInstanceCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator,
+                                              VkInstance *pInstance) {
+    VkLayerInstanceCreateInfo* chain_info = get_chain_info(pCreateInfo, VK_LAYER_LINK_INFO);
+
+    assert(chain_info->u.pLayerInfo);
+    PFN_vkGetInstanceProcAddr fpGetInstanceProcAddr = chain_info->u.pLayerInfo->pfnNextGetInstanceProcAddr;
+    PFN_vkCreateInstance fpCreateInstance = (PFN_vkCreateInstance)fpGetInstanceProcAddr(NULL, "vkCreateInstance");
+    if (fpCreateInstance == NULL) return VK_ERROR_INITIALIZATION_FAILED;
+    chain_info->u.pLayerInfo = chain_info->u.pLayerInfo->pNext;
+    uint32_t specified_version = (pCreateInfo->pApplicationInfo ? pCreateInfo->pApplicationInfo->apiVersion : VK_API_VERSION_1_0);
+    uint32_t api_version = (specified_version < VK_API_VERSION_1_1) ? VK_API_VERSION_1_0 : VK_API_VERSION_1_1;
+    auto report_data = new debug_report_data{};
+    report_data->instance_pnext_chain = SafePnextCopy(pCreateInfo->pNext);
+    ActivateInstanceDebugCallbacks(report_data);
+
+    CHECK_ENABLED local_enables {};
+    CHECK_DISABLED local_disables {};
+    const auto *validation_features_ext = lvl_find_in_chain<VkValidationFeaturesEXT>(pCreateInfo->pNext);
+    if (validation_features_ext) {
+        SetValidationFeatures(&local_disables, &local_enables, validation_features_ext);
+    }
+    const auto *validation_flags_ext = lvl_find_in_chain<VkValidationFlagsEXT>(pCreateInfo->pNext);
+    if (validation_flags_ext) {
+        SetValidationFlags(&local_disables, validation_flags_ext);
+    }
+    ProcessConfigAndEnvSettings(OBJECT_LAYER_DESCRIPTION, &local_enables, &local_disables);
+
+    // Create temporary dispatch vector for pre-calls until instance is created
+    std::vector<ValidationObject*> local_object_dispatch;
+    // Add VOs to dispatch vector. Order here will be the validation dispatch order!
+    auto thread_checker = new ThreadSafety(nullptr);
+    if (!local_disables.thread_safety) {
+        local_object_dispatch.emplace_back(thread_checker);
+    }
+    thread_checker->container_type = LayerObjectTypeThreading;
+    thread_checker->api_version = api_version;
+    thread_checker->report_data = report_data;
+    auto parameter_validation = new StatelessValidation;
+    if (!local_disables.stateless_checks) {
+        local_object_dispatch.emplace_back(parameter_validation);
+    }
+    parameter_validation->container_type = LayerObjectTypeParameterValidation;
+    parameter_validation->api_version = api_version;
+    parameter_validation->report_data = report_data;
+    auto object_tracker = new ObjectLifetimes;
+    if (!local_disables.object_tracking) {
+        local_object_dispatch.emplace_back(object_tracker);
+    }
+    object_tracker->container_type = LayerObjectTypeObjectTracker;
+    object_tracker->api_version = api_version;
+    object_tracker->report_data = report_data;
+    auto core_checks = new CoreChecks;
+    if (!local_disables.core_checks) {
+        local_object_dispatch.emplace_back(core_checks);
+    }
+    core_checks->container_type = LayerObjectTypeCoreValidation;
+    core_checks->api_version = api_version;
+    core_checks->report_data = report_data;
+    auto best_practices = new BestPractices;
+    if (local_enables.best_practices) {
+        local_object_dispatch.emplace_back(best_practices);
+    }
+    best_practices->container_type = LayerObjectTypeBestPractices;
+    best_practices->api_version = api_version;
+    best_practices->report_data = report_data;
+    auto gpu_assisted = new GpuAssisted;
+    if (local_enables.gpu_validation) {
+        local_object_dispatch.emplace_back(gpu_assisted);
+    }
+    gpu_assisted->container_type = LayerObjectTypeGpuAssisted;
+
+    // If handle wrapping is disabled via the ValidationFeatures extension, override build flag
+    if (local_disables.handle_wrapping) {
+        wrap_handles = false;
+    }
+
+    // Init dispatch array and call registration functions
+    for (auto intercept : local_object_dispatch) {
+        (const_cast<const ValidationObject*>(intercept))->PreCallValidateCreateInstance(pCreateInfo, pAllocator, pInstance);
+    }
+    for (auto intercept : local_object_dispatch) {
+        intercept->PreCallRecordCreateInstance(pCreateInfo, pAllocator, pInstance);
+    }
+
+    VkResult result = fpCreateInstance(pCreateInfo, pAllocator, pInstance);
+    if (result != VK_SUCCESS) return result;
+
+    auto framework = GetLayerDataPtr(get_dispatch_key(*pInstance), layer_data_map);
+
+    framework->object_dispatch = local_object_dispatch;
+    framework->container_type = LayerObjectTypeInstance;
+    framework->disabled = local_disables;
+    framework->enabled = local_enables;
+
+    framework->instance = *pInstance;
+    layer_init_instance_dispatch_table(*pInstance, &framework->instance_dispatch_table, fpGetInstanceProcAddr);
+    framework->report_data = report_data;
+    framework->api_version = api_version;
+    framework->instance_extensions.InitFromInstanceCreateInfo(specified_version, pCreateInfo);
+
+    layer_debug_messenger_actions(framework->report_data, pAllocator, OBJECT_LAYER_DESCRIPTION);
+
+    object_tracker->instance_dispatch_table = framework->instance_dispatch_table;
+    object_tracker->enabled = framework->enabled;
+    object_tracker->disabled = framework->disabled;
+    thread_checker->instance_dispatch_table = framework->instance_dispatch_table;
+    thread_checker->enabled = framework->enabled;
+    thread_checker->disabled = framework->disabled;
+    parameter_validation->instance_dispatch_table = framework->instance_dispatch_table;
+    parameter_validation->enabled = framework->enabled;
+    parameter_validation->disabled = framework->disabled;
+    core_checks->instance_dispatch_table = framework->instance_dispatch_table;
+    core_checks->instance = *pInstance;
+    core_checks->enabled = framework->enabled;
+    core_checks->disabled = framework->disabled;
+    core_checks->instance_state = core_checks;
+    best_practices->instance_dispatch_table = framework->instance_dispatch_table;
+    best_practices->enabled = framework->enabled;
+    best_practices->disabled = framework->disabled;
+    gpu_assisted->instance_dispatch_table = framework->instance_dispatch_table;
+    gpu_assisted->enabled = framework->enabled;
+    gpu_assisted->disabled = framework->disabled;
+
+    for (auto intercept : framework->object_dispatch) {
+        intercept->PostCallRecordCreateInstance(pCreateInfo, pAllocator, pInstance, result);
+    }
+
+    InstanceExtensionWhitelist(framework, pCreateInfo, *pInstance);
+    DeactivateInstanceDebugCallbacks(report_data);
+    return result;
+}
+
+VKAPI_ATTR void VKAPI_CALL DestroyInstance(VkInstance instance, const VkAllocationCallbacks *pAllocator) {
+    dispatch_key key = get_dispatch_key(instance);
+    auto layer_data = GetLayerDataPtr(key, layer_data_map);
+    ActivateInstanceDebugCallbacks(layer_data->report_data);
+
+    """ + precallvalidate_loop + """
+        auto lock = intercept->read_lock();
+        (const_cast<const ValidationObject*>(intercept))->PreCallValidateDestroyInstance(instance, pAllocator);
+    }
+    """ + precallrecord_loop + """
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordDestroyInstance(instance, pAllocator);
+    }
+
+    layer_data->instance_dispatch_table.DestroyInstance(instance, pAllocator);
+
+    """ + postcallrecord_loop + """
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordDestroyInstance(instance, pAllocator);
+    }
+
+    DeactivateInstanceDebugCallbacks(layer_data->report_data);
+    FreePnextChain(layer_data->report_data->instance_pnext_chain);
+
+    layer_debug_utils_destroy_instance(layer_data->report_data);
+
+    for (auto item = layer_data->object_dispatch.begin(); item != layer_data->object_dispatch.end(); item++) {
+        delete *item;
+    }
+    FreeLayerDataPtr(key, layer_data_map);
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL CreateDevice(VkPhysicalDevice gpu, const VkDeviceCreateInfo *pCreateInfo,
+                                            const VkAllocationCallbacks *pAllocator, VkDevice *pDevice) {
+    VkLayerDeviceCreateInfo *chain_info = get_chain_info(pCreateInfo, VK_LAYER_LINK_INFO);
+
+    auto instance_interceptor = GetLayerDataPtr(get_dispatch_key(gpu), layer_data_map);
+
+    PFN_vkGetInstanceProcAddr fpGetInstanceProcAddr = chain_info->u.pLayerInfo->pfnNextGetInstanceProcAddr;
+    PFN_vkGetDeviceProcAddr fpGetDeviceProcAddr = chain_info->u.pLayerInfo->pfnNextGetDeviceProcAddr;
+    PFN_vkCreateDevice fpCreateDevice = (PFN_vkCreateDevice)fpGetInstanceProcAddr(instance_interceptor->instance, "vkCreateDevice");
+    if (fpCreateDevice == NULL) {
+        return VK_ERROR_INITIALIZATION_FAILED;
+    }
+    chain_info->u.pLayerInfo = chain_info->u.pLayerInfo->pNext;
+
+    // Get physical device limits for device
+    VkPhysicalDeviceProperties device_properties = {};
+    instance_interceptor->instance_dispatch_table.GetPhysicalDeviceProperties(gpu, &device_properties);
+
+    // Setup the validation tables based on the application API version from the instance and the capabilities of the device driver
+    uint32_t effective_api_version = std::min(device_properties.apiVersion, instance_interceptor->api_version);
+
+    DeviceExtensions device_extensions = {};
+    device_extensions.InitFromDeviceCreateInfo(&instance_interceptor->instance_extensions, effective_api_version, pCreateInfo);
+    for (auto item : instance_interceptor->object_dispatch) {
+        item->device_extensions = device_extensions;
+    }
+
+    safe_VkDeviceCreateInfo modified_create_info(pCreateInfo);
+
+    bool skip = false;
+    for (auto intercept : instance_interceptor->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCreateDevice(gpu, pCreateInfo, pAllocator, pDevice);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : instance_interceptor->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCreateDevice(gpu, pCreateInfo, pAllocator, pDevice, &modified_create_info);
+    }
+
+    VkResult result = fpCreateDevice(gpu, reinterpret_cast<VkDeviceCreateInfo *>(&modified_create_info), pAllocator, pDevice);
+    if (result != VK_SUCCESS) {
+        return result;
+    }
+
+    auto device_interceptor = GetLayerDataPtr(get_dispatch_key(*pDevice), layer_data_map);
+    device_interceptor->container_type = LayerObjectTypeDevice;
+
+    // Save local info in device object
+    device_interceptor->phys_dev_properties.properties = device_properties;
+    device_interceptor->api_version = device_interceptor->device_extensions.InitFromDeviceCreateInfo(
+        &instance_interceptor->instance_extensions, effective_api_version, pCreateInfo);
+    device_interceptor->device_extensions = device_extensions;
+
+    layer_init_device_dispatch_table(*pDevice, &device_interceptor->device_dispatch_table, fpGetDeviceProcAddr);
+
+    device_interceptor->device = *pDevice;
+    device_interceptor->physical_device = gpu;
+    device_interceptor->instance = instance_interceptor->instance;
+    device_interceptor->report_data = instance_interceptor->report_data;
+
+    // Note that this defines the order in which the layer validation objects are called
+    auto thread_safety = new ThreadSafety(reinterpret_cast<ThreadSafety *>(instance_interceptor->GetValidationObject(instance_interceptor->object_dispatch, LayerObjectTypeThreading)));
+    thread_safety->container_type = LayerObjectTypeThreading;
+    if (!instance_interceptor->disabled.thread_safety) {
+        device_interceptor->object_dispatch.emplace_back(thread_safety);
+    }
+    auto stateless_validation = new StatelessValidation;
+    stateless_validation->container_type = LayerObjectTypeParameterValidation;
+    if (!instance_interceptor->disabled.stateless_checks) {
+        device_interceptor->object_dispatch.emplace_back(stateless_validation);
+    }
+    auto object_tracker = new ObjectLifetimes;
+    object_tracker->container_type = LayerObjectTypeObjectTracker;
+    if (!instance_interceptor->disabled.object_tracking) {
+        device_interceptor->object_dispatch.emplace_back(object_tracker);
+    }
+    auto core_checks = new CoreChecks;
+    core_checks->container_type = LayerObjectTypeCoreValidation;
+    core_checks->instance_state = reinterpret_cast<CoreChecks *>(
+        core_checks->GetValidationObject(instance_interceptor->object_dispatch, LayerObjectTypeCoreValidation));
+    if (!instance_interceptor->disabled.core_checks) {
+        // Only enable the command counters when needed.
+        if (device_extensions.vk_khr_performance_query) {
+            auto command_counter = new CommandCounter(core_checks);
+            command_counter->container_type = LayerObjectTypeDevice;
+            device_interceptor->object_dispatch.emplace_back(command_counter);
+        }
+        device_interceptor->object_dispatch.emplace_back(core_checks);
+    }
+    auto best_practices = new BestPractices;
+    best_practices->container_type = LayerObjectTypeBestPractices;
+    best_practices->instance_state = reinterpret_cast<BestPractices *>(
+        best_practices->GetValidationObject(instance_interceptor->object_dispatch, LayerObjectTypeBestPractices));
+    if (instance_interceptor->enabled.best_practices) {
+        device_interceptor->object_dispatch.emplace_back(best_practices);
+    }
+    auto gpu_assisted = new GpuAssisted;
+    gpu_assisted->container_type = LayerObjectTypeGpuAssisted;
+    gpu_assisted->instance_state = reinterpret_cast<GpuAssisted *>(
+        gpu_assisted->GetValidationObject(instance_interceptor->object_dispatch, LayerObjectTypeGpuAssisted));
+    if (instance_interceptor->enabled.gpu_validation) {
+        device_interceptor->object_dispatch.emplace_back(gpu_assisted);
+    }
+
+    // Set per-intercept common data items
+    for (auto dev_intercept : device_interceptor->object_dispatch) {
+        dev_intercept->device = *pDevice;
+        dev_intercept->physical_device = gpu;
+        dev_intercept->instance = instance_interceptor->instance;
+        dev_intercept->report_data = device_interceptor->report_data;
+        dev_intercept->device_dispatch_table = device_interceptor->device_dispatch_table;
+        dev_intercept->api_version = device_interceptor->api_version;
+        dev_intercept->disabled = instance_interceptor->disabled;
+        dev_intercept->enabled = instance_interceptor->enabled;
+        dev_intercept->instance_dispatch_table = instance_interceptor->instance_dispatch_table;
+        dev_intercept->instance_extensions = instance_interceptor->instance_extensions;
+        dev_intercept->device_extensions = device_interceptor->device_extensions;
+    }
+
+    for (auto intercept : instance_interceptor->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCreateDevice(gpu, pCreateInfo, pAllocator, pDevice, result);
+    }
+
+    DeviceExtensionWhitelist(device_interceptor, pCreateInfo, *pDevice);
+
+    return result;
+}
+
+VKAPI_ATTR void VKAPI_CALL DestroyDevice(VkDevice device, const VkAllocationCallbacks *pAllocator) {
+    dispatch_key key = get_dispatch_key(device);
+    auto layer_data = GetLayerDataPtr(key, layer_data_map);
+    """ + precallvalidate_loop + """
+        auto lock = intercept->read_lock();
+        (const_cast<const ValidationObject*>(intercept))->PreCallValidateDestroyDevice(device, pAllocator);
+    }
+    """ + precallrecord_loop + """
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordDestroyDevice(device, pAllocator);
+    }
+
+    layer_data->device_dispatch_table.DestroyDevice(device, pAllocator);
+
+    """ + postcallrecord_loop + """
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordDestroyDevice(device, pAllocator);
+    }
+
+    for (auto item = layer_data->object_dispatch.begin(); item != layer_data->object_dispatch.end(); item++) {
+        delete *item;
+    }
+    FreeLayerDataPtr(key, layer_data_map);
+}
+
+
+// Special-case APIs for which core_validation needs custom parameter lists and/or modifies parameters
+
+VKAPI_ATTR VkResult VKAPI_CALL CreateGraphicsPipelines(
+    VkDevice                                    device,
+    VkPipelineCache                             pipelineCache,
+    uint32_t                                    createInfoCount,
+    const VkGraphicsPipelineCreateInfo*         pCreateInfos,
+    const VkAllocationCallbacks*                pAllocator,
+    VkPipeline*                                 pPipelines) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+
+    create_graphics_pipeline_api_state cgpl_state[LayerObjectTypeMaxEnum]{};
+
+    for (auto intercept : layer_data->object_dispatch) {
+        cgpl_state[intercept->container_type].pCreateInfos = pCreateInfos;
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCreateGraphicsPipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines, &(cgpl_state[intercept->container_type]));
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCreateGraphicsPipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines, &(cgpl_state[intercept->container_type]));
+    }
+
+    auto usepCreateInfos = (!cgpl_state[LayerObjectTypeGpuAssisted].pCreateInfos) ? pCreateInfos : cgpl_state[LayerObjectTypeGpuAssisted].pCreateInfos;
+
+    VkResult result = DispatchCreateGraphicsPipelines(device, pipelineCache, createInfoCount, usepCreateInfos, pAllocator, pPipelines);
+
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCreateGraphicsPipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines, result, &(cgpl_state[intercept->container_type]));
+    }
+    return result;
+}
+
+// This API saves some core_validation pipeline state state on the stack for performance purposes
+VKAPI_ATTR VkResult VKAPI_CALL CreateComputePipelines(
+    VkDevice                                    device,
+    VkPipelineCache                             pipelineCache,
+    uint32_t                                    createInfoCount,
+    const VkComputePipelineCreateInfo*          pCreateInfos,
+    const VkAllocationCallbacks*                pAllocator,
+    VkPipeline*                                 pPipelines) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+
+    create_compute_pipeline_api_state ccpl_state[LayerObjectTypeMaxEnum]{};
+
+    for (auto intercept : layer_data->object_dispatch) {
+        ccpl_state[intercept->container_type].pCreateInfos = pCreateInfos;
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCreateComputePipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines, &(ccpl_state[intercept->container_type]));
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCreateComputePipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines, &(ccpl_state[intercept->container_type]));
+    }
+
+    auto usepCreateInfos = (!ccpl_state[LayerObjectTypeGpuAssisted].pCreateInfos) ? pCreateInfos : ccpl_state[LayerObjectTypeGpuAssisted].pCreateInfos;
+
+    VkResult result = DispatchCreateComputePipelines(device, pipelineCache, createInfoCount, usepCreateInfos, pAllocator, pPipelines);
+
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCreateComputePipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines, result, &(ccpl_state[intercept->container_type]));
+    }
+    return result;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL CreateRayTracingPipelinesNV(
+    VkDevice                                    device,
+    VkPipelineCache                             pipelineCache,
+    uint32_t                                    createInfoCount,
+    const VkRayTracingPipelineCreateInfoNV*     pCreateInfos,
+    const VkAllocationCallbacks*                pAllocator,
+    VkPipeline*                                 pPipelines) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+
+    create_ray_tracing_pipeline_api_state crtpl_state[LayerObjectTypeMaxEnum]{};
+
+    for (auto intercept : layer_data->object_dispatch) {
+        crtpl_state[intercept->container_type].pCreateInfos = pCreateInfos;
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCreateRayTracingPipelinesNV(device, pipelineCache, createInfoCount, pCreateInfos,
+                                                                      pAllocator, pPipelines, &(crtpl_state[intercept->container_type]));
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCreateRayTracingPipelinesNV(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator,
+                                                            pPipelines, &(crtpl_state[intercept->container_type]));
+    }
+
+    VkResult result = DispatchCreateRayTracingPipelinesNV(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines);
+
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCreateRayTracingPipelinesNV(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator,
+                                                             pPipelines, result, &(crtpl_state[intercept->container_type]));
+    }
+    return result;
+}
+
+// This API needs the ability to modify a down-chain parameter
+VKAPI_ATTR VkResult VKAPI_CALL CreatePipelineLayout(
+    VkDevice                                    device,
+    const VkPipelineLayoutCreateInfo*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkPipelineLayout*                           pPipelineLayout) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+
+    create_pipeline_layout_api_state cpl_state{};
+    cpl_state.modified_create_info = *pCreateInfo;
+
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCreatePipelineLayout(device, pCreateInfo, pAllocator, pPipelineLayout);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCreatePipelineLayout(device, pCreateInfo, pAllocator, pPipelineLayout, &cpl_state);
+    }
+    VkResult result = DispatchCreatePipelineLayout(device, &cpl_state.modified_create_info, pAllocator, pPipelineLayout);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCreatePipelineLayout(device, pCreateInfo, pAllocator, pPipelineLayout, result);
+    }
+    return result;
+}
+
+// This API needs some local stack data for performance reasons and also may modify a parameter
+VKAPI_ATTR VkResult VKAPI_CALL CreateShaderModule(
+    VkDevice                                    device,
+    const VkShaderModuleCreateInfo*             pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkShaderModule*                             pShaderModule) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+
+    create_shader_module_api_state csm_state{};
+    csm_state.instrumented_create_info = *pCreateInfo;
+
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCreateShaderModule(device, pCreateInfo, pAllocator, pShaderModule, &csm_state);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCreateShaderModule(device, pCreateInfo, pAllocator, pShaderModule, &csm_state);
+    }
+    VkResult result = DispatchCreateShaderModule(device, &csm_state.instrumented_create_info, pAllocator, pShaderModule);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCreateShaderModule(device, pCreateInfo, pAllocator, pShaderModule, result, &csm_state);
+    }
+    return result;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL AllocateDescriptorSets(
+    VkDevice                                    device,
+    const VkDescriptorSetAllocateInfo*          pAllocateInfo,
+    VkDescriptorSet*                            pDescriptorSets) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+
+    cvdescriptorset::AllocateDescriptorSetsData ads_state(pAllocateInfo->descriptorSetCount);
+
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateAllocateDescriptorSets(device, pAllocateInfo, pDescriptorSets, &ads_state);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordAllocateDescriptorSets(device, pAllocateInfo, pDescriptorSets);
+    }
+    VkResult result = DispatchAllocateDescriptorSets(device, pAllocateInfo, pDescriptorSets);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordAllocateDescriptorSets(device, pAllocateInfo, pDescriptorSets, result, &ads_state);
+    }
+    return result;
+}
+
+// This API needs the ability to modify a down-chain parameter
+VKAPI_ATTR VkResult VKAPI_CALL CreateBuffer(
+    VkDevice                                    device,
+    const VkBufferCreateInfo*                   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkBuffer*                                   pBuffer) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    bool skip = false;
+
+    create_buffer_api_state cb_state{};
+    cb_state.modified_create_info = *pCreateInfo;
+
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->read_lock();
+        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidateCreateBuffer(device, pCreateInfo, pAllocator, pBuffer);
+        if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
+    }
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PreCallRecordCreateBuffer(device, pCreateInfo, pAllocator, pBuffer, &cb_state);
+    }
+    VkResult result = DispatchCreateBuffer(device, &cb_state.modified_create_info, pAllocator, pBuffer);
+    for (auto intercept : layer_data->object_dispatch) {
+        auto lock = intercept->write_lock();
+        intercept->PostCallRecordCreateBuffer(device, pCreateInfo, pAllocator, pBuffer, result);
+    }
+    return result;
+}
+
+
+
+// ValidationCache APIs do not dispatch
+
+VKAPI_ATTR VkResult VKAPI_CALL CreateValidationCacheEXT(
+    VkDevice                                    device,
+    const VkValidationCacheCreateInfoEXT*       pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkValidationCacheEXT*                       pValidationCache) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    VkResult result = VK_SUCCESS;
+
+    ValidationObject *validation_data = layer_data->GetValidationObject(layer_data->object_dispatch, LayerObjectTypeCoreValidation);
+    if (validation_data) {
+        auto lock = validation_data->write_lock();
+        result = validation_data->CoreLayerCreateValidationCacheEXT(device, pCreateInfo, pAllocator, pValidationCache);
+    }
+    return result;
+}
+
+VKAPI_ATTR void VKAPI_CALL DestroyValidationCacheEXT(
+    VkDevice                                    device,
+    VkValidationCacheEXT                        validationCache,
+    const VkAllocationCallbacks*                pAllocator) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+
+    ValidationObject *validation_data = layer_data->GetValidationObject(layer_data->object_dispatch, LayerObjectTypeCoreValidation);
+    if (validation_data) {
+        auto lock = validation_data->write_lock();
+        validation_data->CoreLayerDestroyValidationCacheEXT(device, validationCache, pAllocator);
+    }
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL MergeValidationCachesEXT(
+    VkDevice                                    device,
+    VkValidationCacheEXT                        dstCache,
+    uint32_t                                    srcCacheCount,
+    const VkValidationCacheEXT*                 pSrcCaches) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    VkResult result = VK_SUCCESS;
+
+    ValidationObject *validation_data = layer_data->GetValidationObject(layer_data->object_dispatch, LayerObjectTypeCoreValidation);
+    if (validation_data) {
+        auto lock = validation_data->write_lock();
+        result = validation_data->CoreLayerMergeValidationCachesEXT(device, dstCache, srcCacheCount, pSrcCaches);
+    }
+    return result;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL GetValidationCacheDataEXT(
+    VkDevice                                    device,
+    VkValidationCacheEXT                        validationCache,
+    size_t*                                     pDataSize,
+    void*                                       pData) {
+    auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
+    VkResult result = VK_SUCCESS;
+
+    ValidationObject *validation_data = layer_data->GetValidationObject(layer_data->object_dispatch, LayerObjectTypeCoreValidation);
+    if (validation_data) {
+        auto lock = validation_data->write_lock();
+        result = validation_data->CoreLayerGetValidationCacheDataEXT(device, validationCache, pDataSize, pData);
+    }
+    return result;
+
+}"""
+
+    inline_custom_validation_class_definitions = """
+        virtual VkResult CoreLayerCreateValidationCacheEXT(VkDevice device, const VkValidationCacheCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkValidationCacheEXT* pValidationCache) { return VK_SUCCESS; };
+        virtual void CoreLayerDestroyValidationCacheEXT(VkDevice device, VkValidationCacheEXT validationCache, const VkAllocationCallbacks* pAllocator) {};
+        virtual VkResult CoreLayerMergeValidationCachesEXT(VkDevice device, VkValidationCacheEXT dstCache, uint32_t srcCacheCount, const VkValidationCacheEXT* pSrcCaches)  { return VK_SUCCESS; };
+        virtual VkResult CoreLayerGetValidationCacheDataEXT(VkDevice device, VkValidationCacheEXT validationCache, size_t* pDataSize, void* pData)  { return VK_SUCCESS; };
+
+        // Allow additional state parameter for CreateGraphicsPipelines
+        virtual bool PreCallValidateCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkGraphicsPipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines, void* cgpl_state) const {
+            return PreCallValidateCreateGraphicsPipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines);
+        };
+        virtual void PreCallRecordCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkGraphicsPipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines, void* cgpl_state) {
+            PreCallRecordCreateGraphicsPipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines);
+        };
+        virtual void PostCallRecordCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkGraphicsPipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines, VkResult result, void* cgpl_state) {
+            PostCallRecordCreateGraphicsPipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines, result);
+        };
+
+        // Allow additional state parameter for CreateComputePipelines
+        virtual bool PreCallValidateCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkComputePipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines, void* pipe_state) const {
+            return PreCallValidateCreateComputePipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines);
+        };
+        virtual void PreCallRecordCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkComputePipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines, void* ccpl_state) {
+            PreCallRecordCreateComputePipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines);
+        };
+        virtual void PostCallRecordCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkComputePipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines, VkResult result, void* pipe_state) {
+            PostCallRecordCreateComputePipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines, result);
+        };
+
+        // Allow additional state parameter for CreateRayTracingPipelinesNV
+        virtual bool PreCallValidateCreateRayTracingPipelinesNV(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkRayTracingPipelineCreateInfoNV* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines, void* pipe_state) const {
+            return PreCallValidateCreateRayTracingPipelinesNV(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines);
+        };
+        virtual void PreCallRecordCreateRayTracingPipelinesNV(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkRayTracingPipelineCreateInfoNV* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines, void* ccpl_state) {
+            PreCallRecordCreateRayTracingPipelinesNV(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines);
+        };
+        virtual void PostCallRecordCreateRayTracingPipelinesNV(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkRayTracingPipelineCreateInfoNV* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines, VkResult result, void* pipe_state) {
+            PostCallRecordCreateRayTracingPipelinesNV(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines, result);
+        };
+
+        // Allow modification of a down-chain parameter for CreatePipelineLayout
+        virtual void PreCallRecordCreatePipelineLayout(VkDevice device, const VkPipelineLayoutCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPipelineLayout* pPipelineLayout, void *cpl_state) {
+            PreCallRecordCreatePipelineLayout(device, pCreateInfo, pAllocator, pPipelineLayout);
+        };
+
+        // Enable the CreateShaderModule API to take an extra argument for state preservation and paramter modification
+        virtual bool PreCallValidateCreateShaderModule(VkDevice device, const VkShaderModuleCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkShaderModule* pShaderModule, void* csm_state) const {
+            return PreCallValidateCreateShaderModule(device, pCreateInfo, pAllocator, pShaderModule);
+        };
+        virtual void PreCallRecordCreateShaderModule(VkDevice device, const VkShaderModuleCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkShaderModule* pShaderModule, void* csm_state) {
+            PreCallRecordCreateShaderModule(device, pCreateInfo, pAllocator, pShaderModule);
+        };
+        virtual void PostCallRecordCreateShaderModule(VkDevice device, const VkShaderModuleCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkShaderModule* pShaderModule, VkResult result, void* csm_state) {
+            PostCallRecordCreateShaderModule(device, pCreateInfo, pAllocator, pShaderModule, result);
+        };
+
+        // Allow AllocateDescriptorSets to use some local stack storage for performance purposes
+        virtual bool PreCallValidateAllocateDescriptorSets(VkDevice device, const VkDescriptorSetAllocateInfo* pAllocateInfo, VkDescriptorSet* pDescriptorSets, void* ads_state) const {
+            return PreCallValidateAllocateDescriptorSets(device, pAllocateInfo, pDescriptorSets);
+        };
+        virtual void PostCallRecordAllocateDescriptorSets(VkDevice device, const VkDescriptorSetAllocateInfo* pAllocateInfo, VkDescriptorSet* pDescriptorSets, VkResult result, void* ads_state)  {
+            PostCallRecordAllocateDescriptorSets(device, pAllocateInfo, pDescriptorSets, result);
+        };
+
+        // Allow modification of a down-chain parameter for CreateBuffer
+        virtual void PreCallRecordCreateBuffer(VkDevice device, const VkBufferCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkBuffer* pBuffer, void *cb_state) {
+            PreCallRecordCreateBuffer(device, pCreateInfo, pAllocator, pBuffer);
+        };
+
+        // Modify a parameter to CreateDevice
+        virtual void PreCallRecordCreateDevice(VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDevice* pDevice, safe_VkDeviceCreateInfo *modified_create_info) {
+            PreCallRecordCreateDevice(physicalDevice, pCreateInfo, pAllocator, pDevice);
+        };
+"""
+
+    inline_custom_source_postamble = """
+// loader-layer interface v0, just wrappers since there is only a layer
+
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceExtensionProperties(const char *pLayerName, uint32_t *pCount,
+                                                                                      VkExtensionProperties *pProperties) {
+    return vulkan_layer_chassis::EnumerateInstanceExtensionProperties(pLayerName, pCount, pProperties);
+}
+
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceLayerProperties(uint32_t *pCount,
+                                                                                  VkLayerProperties *pProperties) {
+    return vulkan_layer_chassis::EnumerateInstanceLayerProperties(pCount, pProperties);
+}
+
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateDeviceLayerProperties(VkPhysicalDevice physicalDevice, uint32_t *pCount,
+                                                                                VkLayerProperties *pProperties) {
+    // the layer command handles VK_NULL_HANDLE just fine internally
+    assert(physicalDevice == VK_NULL_HANDLE);
+    return vulkan_layer_chassis::EnumerateDeviceLayerProperties(VK_NULL_HANDLE, pCount, pProperties);
+}
+
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateDeviceExtensionProperties(VkPhysicalDevice physicalDevice,
+                                                                                    const char *pLayerName, uint32_t *pCount,
+                                                                                    VkExtensionProperties *pProperties) {
+    // the layer command handles VK_NULL_HANDLE just fine internally
+    assert(physicalDevice == VK_NULL_HANDLE);
+    return vulkan_layer_chassis::EnumerateDeviceExtensionProperties(VK_NULL_HANDLE, pLayerName, pCount, pProperties);
+}
+
+VK_LAYER_EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vkGetDeviceProcAddr(VkDevice dev, const char *funcName) {
+    return vulkan_layer_chassis::GetDeviceProcAddr(dev, funcName);
+}
+
+VK_LAYER_EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vkGetInstanceProcAddr(VkInstance instance, const char *funcName) {
+    return vulkan_layer_chassis::GetInstanceProcAddr(instance, funcName);
+}
+
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkNegotiateLoaderLayerInterfaceVersion(VkNegotiateLayerInterface *pVersionStruct) {
+    assert(pVersionStruct != NULL);
+    assert(pVersionStruct->sType == LAYER_NEGOTIATE_INTERFACE_STRUCT);
+
+    // Fill in the function pointers if our version is at least capable of having the structure contain them.
+    if (pVersionStruct->loaderLayerInterfaceVersion >= 2) {
+        pVersionStruct->pfnGetInstanceProcAddr = vkGetInstanceProcAddr;
+        pVersionStruct->pfnGetDeviceProcAddr = vkGetDeviceProcAddr;
+        pVersionStruct->pfnGetPhysicalDeviceProcAddr = nullptr;
+    }
+
+    return VK_SUCCESS;
+}"""
+
+
+    def __init__(self,
+                 errFile = sys.stderr,
+                 warnFile = sys.stderr,
+                 diagFile = sys.stdout):
+        OutputGenerator.__init__(self, errFile, warnFile, diagFile)
+        # Internal state - accumulators for different inner block text
+        self.sections = dict([(section, []) for section in self.ALL_SECTIONS])
+        self.intercepts = []
+        self.layer_factory = ''                     # String containing base layer factory class definition
+
+    # Check if the parameter passed in is a pointer to an array
+    def paramIsArray(self, param):
+        return param.attrib.get('len') is not None
+
+    # Check if the parameter passed in is a pointer
+    def paramIsPointer(self, param):
+        ispointer = False
+        for elem in param:
+            if elem.tag == 'type' and elem.tail is not None and '*' in elem.tail:
+                ispointer = True
+        return ispointer
+
+    #
+    #
+    def beginFile(self, genOpts):
+        OutputGenerator.beginFile(self, genOpts)
+        # Output Copyright
+        write(self.inline_copyright_message, file=self.outFile)
+        # Multiple inclusion protection
+        self.header = False
+        if (self.genOpts.filename and 'h' == self.genOpts.filename[-1]):
+            self.header = True
+            write('#pragma once', file=self.outFile)
+            self.newline()
+        if self.header:
+            write(self.inline_custom_header_preamble, file=self.outFile)
+        else:
+            write(self.inline_custom_source_preamble, file=self.outFile)
+        self.layer_factory += self.inline_custom_header_class_definition
+    #
+    #
+    def endFile(self):
+        # Finish C++ namespace and multiple inclusion protection
+        self.newline()
+        if not self.header:
+            # Record intercepted procedures
+            write('// Map of intercepted ApiName to its associated function data', file=self.outFile)
+            write('#ifdef _MSC_VER', file=self.outFile)
+            write('#pragma warning( suppress: 6262 ) // VS analysis: this uses more than 16 kiB, which is fine here at global scope', file=self.outFile)
+            write('#endif', file=self.outFile)
+            write('const std::unordered_map<std::string, function_data> name_to_funcptr_map = {', file=self.outFile)
+            write('\n'.join(self.intercepts), file=self.outFile)
+            write('};\n', file=self.outFile)
+            self.newline()
+            write('} // namespace vulkan_layer_chassis', file=self.outFile)
+        if self.header:
+            self.newline()
+            # Output Layer Factory Class Definitions
+            self.layer_factory += self.inline_custom_validation_class_definitions
+            self.layer_factory += '};\n\n'
+            self.layer_factory += 'extern small_unordered_map<void*, ValidationObject*, 2> layer_data_map;'
+            write(self.layer_factory, file=self.outFile)
+        else:
+            write(self.inline_custom_source_postamble, file=self.outFile)
+        # Finish processing in superclass
+        OutputGenerator.endFile(self)
+
+    def beginFeature(self, interface, emit):
+        # Start processing in superclass
+        OutputGenerator.beginFeature(self, interface, emit)
+        # Get feature extra protect
+        self.featureExtraProtect = GetFeatureProtect(interface)
+        # Accumulate includes, defines, types, enums, function pointer typedefs, end function prototypes separately for this
+        # feature. They're only printed in endFeature().
+        self.sections = dict([(section, []) for section in self.ALL_SECTIONS])
+
+    def endFeature(self):
+        # Actually write the interface to the output file.
+        if (self.emit):
+            self.newline()
+            # If type declarations are needed by other features based on this one, it may be necessary to suppress the ExtraProtect,
+            # or move it below the 'for section...' loop.
+            if (self.featureExtraProtect != None):
+                write('#ifdef', self.featureExtraProtect, file=self.outFile)
+            for section in self.TYPE_SECTIONS:
+                contents = self.sections[section]
+                if contents:
+                    write('\n'.join(contents), file=self.outFile)
+                    self.newline()
+            if (self.sections['command']):
+                write('\n'.join(self.sections['command']), end=u'', file=self.outFile)
+                self.newline()
+            if (self.featureExtraProtect != None):
+                write('#endif //', self.featureExtraProtect, file=self.outFile)
+        # Finish processing in superclass
+        OutputGenerator.endFeature(self)
+    #
+    # Append a definition to the specified section
+    def appendSection(self, section, text):
+        self.sections[section].append(text)
+    #
+    # Type generation
+    def genType(self, typeinfo, name, alias):
+        pass
+    #
+    # Struct (e.g. C "struct" type) generation. This is a special case of the <type> tag where the contents are
+    # interpreted as a set of <member> tags instead of freeform C type declarations. The <member> tags are just like <param>
+    # tags - they are a declaration of a struct or union member. Only simple member declarations are supported (no nested
+    # structs etc.)
+    def genStruct(self, typeinfo, typeName):
+        OutputGenerator.genStruct(self, typeinfo, typeName)
+        body = 'typedef ' + typeinfo.elem.get('category') + ' ' + typeName + ' {\n'
+        # paramdecl = self.makeCParamDecl(typeinfo.elem, self.genOpts.alignFuncParam)
+        for member in typeinfo.elem.findall('.//member'):
+            body += self.makeCParamDecl(member, self.genOpts.alignFuncParam)
+            body += ';\n'
+        body += '} ' + typeName + ';\n'
+        self.appendSection('struct', body)
+    #
+    # Group (e.g. C "enum" type) generation. These are concatenated together with other types.
+    def genGroup(self, groupinfo, groupName, alias):
+        pass
+    # Enumerant generation
+    # <enum> tags may specify their values in several ways, but are usually just integers.
+    def genEnum(self, enuminfo, name, alias):
+        pass
+    #
+    # Customize Cdecl for layer factory base class
+    def BaseClassCdecl(self, elem, name):
+        raw = self.makeCDecls(elem)[1]
+
+        # Toss everything before the undecorated name
+        prototype = raw.split("VKAPI_PTR *PFN_vk")[1]
+        prototype = prototype.replace(")", "", 1)
+        prototype = prototype.replace(";", " {};")
+
+        # Build up pre/post call virtual function declarations
+        pre_call_validate = 'virtual bool PreCallValidate' + prototype
+        pre_call_validate = pre_call_validate.replace("{}", "const { return false; }")
+        pre_call_record = 'virtual void PreCallRecord' + prototype
+        post_call_record = 'virtual void PostCallRecord' + prototype
+        resulttype = elem.find('proto/type')
+        if resulttype.text == 'VkResult':
+            post_call_record = post_call_record.replace(')', ', VkResult result)')
+        elif resulttype.text == 'VkDeviceAddress':
+            post_call_record = post_call_record.replace(')', ', VkDeviceAddress result)')
+        return '        %s\n        %s\n        %s\n' % (pre_call_validate, pre_call_record, post_call_record)
+    #
+    # Command generation
+    def genCmd(self, cmdinfo, name, alias):
+        ignore_functions = [
+        'vkEnumerateInstanceVersion',
+        ]
+
+        if name in ignore_functions:
+            return
+
+        if self.header: # In the header declare all intercepts
+            self.appendSection('command', '')
+            self.appendSection('command', self.makeCDecls(cmdinfo.elem)[0])
+            if (self.featureExtraProtect != None):
+                self.layer_factory += '#ifdef %s\n' % self.featureExtraProtect
+            # Update base class with virtual function declarations
+            if 'ValidationCache' not in name:
+                self.layer_factory += self.BaseClassCdecl(cmdinfo.elem, name)
+            if (self.featureExtraProtect != None):
+                self.layer_factory += '#endif\n'
+            return
+
+        is_instance = 'false'
+        dispatchable_type = cmdinfo.elem.find('param/type').text
+        if dispatchable_type in ["VkPhysicalDevice", "VkInstance"] or name == 'vkCreateInstance':
+            is_instance = 'true'
+
+        if name in self.manual_functions:
+            self.intercepts += [ '    {"%s", {%s, (void*)%s}},' % (name, is_instance, name[2:]) ]
+            return
+        # Record that the function will be intercepted
+        if (self.featureExtraProtect != None):
+            self.intercepts += [ '#ifdef %s' % self.featureExtraProtect ]
+        self.intercepts += [ '    {"%s", {%s, (void*)%s}},' % (name, is_instance, name[2:]) ]
+        if (self.featureExtraProtect != None):
+            self.intercepts += [ '#endif' ]
+        OutputGenerator.genCmd(self, cmdinfo, name, alias)
+        #
+        decls = self.makeCDecls(cmdinfo.elem)
+        self.appendSection('command', '')
+        self.appendSection('command', '%s {' % decls[0][:-1])
+        # Setup common to call wrappers. First parameter is always dispatchable
+        dispatchable_name = cmdinfo.elem.find('param/name').text
+        self.appendSection('command', '    auto layer_data = GetLayerDataPtr(get_dispatch_key(%s), layer_data_map);' % (dispatchable_name))
+        api_function_name = cmdinfo.elem.attrib.get('name')
+        params = cmdinfo.elem.findall('param/name')
+        paramstext = ', '.join([str(param.text) for param in params])
+        API = api_function_name.replace('vk','Dispatch') + '('
+
+        # Declare result variable, if any.
+        return_map = {
+            'PFN_vkVoidFunction': 'return nullptr;',
+            'VkBool32': 'return VK_FALSE;',
+            'VkDeviceAddress': 'return 0;',
+            'VkResult': 'return VK_ERROR_VALIDATION_FAILED_EXT;',
+            'void': 'return;',
+            'uint32_t': 'return 0;',
+            'uint64_t': 'return 0;'
+            }
+        resulttype = cmdinfo.elem.find('proto/type')
+        assignresult = ''
+        if (resulttype.text != 'void'):
+            assignresult = resulttype.text + ' result = '
+
+        # Set up skip and locking
+        self.appendSection('command', '    bool skip = false;')
+
+        # Generate pre-call validation source code
+        self.appendSection('command', '    %s' % self.precallvalidate_loop)
+        self.appendSection('command', '        auto lock = intercept->read_lock();')
+        self.appendSection('command', '        skip |= (const_cast<const ValidationObject*>(intercept))->PreCallValidate%s(%s);' % (api_function_name[2:], paramstext))
+        self.appendSection('command', '        if (skip) %s' % return_map[resulttype.text])
+        self.appendSection('command', '    }')
+
+        # Generate pre-call state recording source code
+        self.appendSection('command', '    %s' % self.precallrecord_loop)
+        self.appendSection('command', '        auto lock = intercept->write_lock();')
+        self.appendSection('command', '        intercept->PreCallRecord%s(%s);' % (api_function_name[2:], paramstext))
+        self.appendSection('command', '    }')
+
+        # Insert pre-dispatch debug utils function call
+        if name in self.pre_dispatch_debug_utils_functions:
+            self.appendSection('command', '    %s' % self.pre_dispatch_debug_utils_functions[name])
+
+        # Output dispatch (down-chain) function call
+        self.appendSection('command', '    ' + assignresult + API + paramstext + ');')
+
+        # Insert post-dispatch debug utils function call
+        if name in self.post_dispatch_debug_utils_functions:
+            self.appendSection('command', '    %s' % self.post_dispatch_debug_utils_functions[name])
+
+        # Generate post-call object processing source code
+        self.appendSection('command', '    %s' % self.postcallrecord_loop)
+        returnparam = ''
+        if (resulttype.text == 'VkResult' or resulttype.text == 'VkDeviceAddress'):
+            returnparam = ', result'
+        self.appendSection('command', '        auto lock = intercept->write_lock();')
+        self.appendSection('command', '        intercept->PostCallRecord%s(%s%s);' % (api_function_name[2:], paramstext, returnparam))
+        self.appendSection('command', '    }')
+        # Return result variable, if any.
+        if (resulttype.text != 'void'):
+            self.appendSection('command', '    return result;')
+        self.appendSection('command', '}')
+    #
+    # Override makeProtoName to drop the "vk" prefix
+    def makeProtoName(self, name, tail):
+        return self.genOpts.apientry + name[2:] + tail
diff --git a/src/third_party/vulkan-validation-layers/src/scripts/layer_dispatch_table_generator.py b/src/third_party/vulkan-validation-layers/src/scripts/layer_dispatch_table_generator.py
new file mode 100644
index 0000000..85b9cd7
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/scripts/layer_dispatch_table_generator.py
@@ -0,0 +1,370 @@
+#!/usr/bin/python3 -i
+#
+# Copyright (c) 2015-2019 The Khronos Group Inc.
+# Copyright (c) 2015-2019 Valve Corporation
+# Copyright (c) 2015-2019 LunarG, Inc.
+# Copyright (c) 2015-2019 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Author: Mark Young <marky@lunarg.com>
+# Author: Mark Lobodzinski <mark@lunarg.com>
+
+import os,re,sys
+import xml.etree.ElementTree as etree
+from generator import *
+from collections import namedtuple
+from common_codegen import *
+
+ADD_INST_CMDS = ['vkCreateInstance',
+                 'vkEnumerateInstanceExtensionProperties',
+                 'vkEnumerateInstanceLayerProperties',
+                 'vkEnumerateInstanceVersion']
+
+#
+# LayerDispatchTableGeneratorOptions - subclass of GeneratorOptions.
+class LayerDispatchTableGeneratorOptions(GeneratorOptions):
+    def __init__(self,
+                 conventions = None,
+                 filename = None,
+                 directory = '.',
+                 apiname = None,
+                 profile = None,
+                 versions = '.*',
+                 emitversions = '.*',
+                 defaultExtensions = None,
+                 addExtensions = None,
+                 removeExtensions = None,
+                 emitExtensions = None,
+                 sortProcedure = regSortFeatures,
+                 prefixText = "",
+                 genFuncPointers = True,
+                 protectFile = True,
+                 protectFeature = True,
+                 apicall = '',
+                 apientry = '',
+                 apientryp = '',
+                 indentFuncProto = True,
+                 indentFuncPointer = False,
+                 alignFuncParam = 0,
+                 expandEnumerants = True):
+        GeneratorOptions.__init__(self, conventions, filename, directory, apiname, profile,
+                                  versions, emitversions, defaultExtensions,
+                                  addExtensions, removeExtensions, emitExtensions, sortProcedure)
+        self.prefixText      = prefixText
+        self.prefixText      = None
+        self.apicall         = apicall
+        self.apientry        = apientry
+        self.apientryp       = apientryp
+        self.alignFuncParam  = alignFuncParam
+        self.expandEnumerants = expandEnumerants
+
+#
+# LayerDispatchTableOutputGenerator - subclass of OutputGenerator.
+# Generates dispatch table helper header files for LVL
+class LayerDispatchTableOutputGenerator(OutputGenerator):
+    """Generate dispatch tables header based on XML element attributes"""
+    def __init__(self,
+                 errFile = sys.stderr,
+                 warnFile = sys.stderr,
+                 diagFile = sys.stdout):
+        OutputGenerator.__init__(self, errFile, warnFile, diagFile)
+
+        # Internal state - accumulators for different inner block text
+        self.ext_instance_dispatch_list = []  # List of extension entries for instance dispatch list
+        self.ext_device_dispatch_list = []    # List of extension entries for device dispatch list
+        self.core_commands = []               # List of CommandData records for core Vulkan commands
+        self.ext_commands = []                # List of CommandData records for extension Vulkan commands
+        self.CommandParam = namedtuple('CommandParam', ['type', 'name', 'cdecl'])
+        self.CommandData = namedtuple('CommandData', ['name', 'ext_name', 'ext_type', 'protect', 'return_type', 'handle_type', 'params', 'cdecl'])
+
+    #
+    # Called once at the beginning of each run
+    def beginFile(self, genOpts):
+        OutputGenerator.beginFile(self, genOpts)
+
+        # Initialize members that require the tree
+        self.handle_types = GetHandleTypes(self.registry.tree)
+
+        # User-supplied prefix text, if any (list of strings)
+        if (genOpts.prefixText):
+            for s in genOpts.prefixText:
+                write(s, file=self.outFile)
+
+        # File Comment
+        file_comment = '// *** THIS FILE IS GENERATED - DO NOT EDIT ***\n'
+        file_comment += '// See layer_dispatch_table_generator.py for modifications\n'
+        write(file_comment, file=self.outFile)
+
+        # Copyright Notice
+        copyright =  '/*\n'
+        copyright += ' * Copyright (c) 2015-2019 The Khronos Group Inc.\n'
+        copyright += ' * Copyright (c) 2015-2019 Valve Corporation\n'
+        copyright += ' * Copyright (c) 2015-2019 LunarG, Inc.\n'
+        copyright += ' *\n'
+        copyright += ' * Licensed under the Apache License, Version 2.0 (the "License");\n'
+        copyright += ' * you may not use this file except in compliance with the License.\n'
+        copyright += ' * You may obtain a copy of the License at\n'
+        copyright += ' *\n'
+        copyright += ' *     http://www.apache.org/licenses/LICENSE-2.0\n'
+        copyright += ' *\n'
+        copyright += ' * Unless required by applicable law or agreed to in writing, software\n'
+        copyright += ' * distributed under the License is distributed on an "AS IS" BASIS,\n'
+        copyright += ' * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n'
+        copyright += ' * See the License for the specific language governing permissions and\n'
+        copyright += ' * limitations under the License.\n'
+        copyright += ' *\n'
+        copyright += ' * Author: Mark Lobodzinski <mark@lunarg.com>\n'
+        copyright += ' * Author: Mark Young <marky@lunarg.com>\n'
+        copyright += ' */\n'
+
+        preamble = ''
+        if self.genOpts.filename == 'vk_layer_dispatch_table.h':
+            preamble += '#pragma once\n'
+            preamble += '\n'
+            preamble += 'typedef PFN_vkVoidFunction (VKAPI_PTR *PFN_GetPhysicalDeviceProcAddr)(VkInstance instance, const char* pName);\n'
+
+        write(copyright, file=self.outFile)
+        write(preamble, file=self.outFile)
+
+    #
+    # Write generate and write dispatch tables to output file
+    def endFile(self):
+        file_data = ''
+        if self.genOpts.filename == 'vk_layer_dispatch_table.h':
+            file_data += self.OutputLayerInstanceDispatchTable()
+            file_data += self.OutputLayerDeviceDispatchTable()
+
+        write(file_data, file=self.outFile);
+
+        # Finish processing in superclass
+        OutputGenerator.endFile(self)
+
+    def beginFeature(self, interface, emit):
+        # Start processing in superclass
+        OutputGenerator.beginFeature(self, interface, emit)
+        self.featureExtraProtect = GetFeatureProtect(interface)
+
+        enums = interface[0].findall('enum')
+        self.currentExtension = ''
+
+        self.type = interface.get('type')
+        self.num_commands = 0
+        name = interface.get('name')
+        self.currentExtension = name
+
+    #
+    # Process commands, adding to appropriate dispatch tables
+    def genCmd(self, cmdinfo, name, alias):
+        OutputGenerator.genCmd(self, cmdinfo, name, alias)
+
+        # Get first param type
+        params = cmdinfo.elem.findall('param')
+        info = self.getTypeNameTuple(params[0])
+
+        self.num_commands += 1
+
+        if 'android' not in name:
+            self.AddCommandToDispatchList(self.currentExtension, self.type, name, cmdinfo, info[0])
+
+    def endFeature(self):
+        # Finish processing in superclass
+        OutputGenerator.endFeature(self)
+
+    #
+    # Retrieve the value of the len tag
+    def getLen(self, param):
+        result = None
+        len = param.attrib.get('len')
+        if len and len != 'null-terminated':
+            # For string arrays, 'len' can look like 'count,null-terminated',
+            # indicating that we have a null terminated array of strings.  We
+            # strip the null-terminated from the 'len' field and only return
+            # the parameter specifying the string count
+            if 'null-terminated' in len:
+                result = len.split(',')[0]
+            else:
+                result = len
+            result = str(result).replace('::', '->')
+        return result
+
+    #
+    # Determine if this API should be ignored or added to the instance or device dispatch table
+    def AddCommandToDispatchList(self, extension_name, extension_type, name, cmdinfo, handle_type):
+        return_type =  cmdinfo.elem.find('proto/type')
+        if (return_type is not None and return_type.text == 'void'):
+           return_type = None
+
+        cmd_params = []
+
+        # Generate a list of commands for use in printing the necessary
+        # core instance terminator prototypes
+        params = cmdinfo.elem.findall('param')
+        lens = set()
+        for param in params:
+            len = self.getLen(param)
+            if len:
+                lens.add(len)
+        paramsInfo = []
+        for param in params:
+            paramInfo = self.getTypeNameTuple(param)
+            param_type = paramInfo[0]
+            param_name = paramInfo[1]
+            param_cdecl = self.makeCParamDecl(param, 0)
+            cmd_params.append(self.CommandParam(type=param_type, name=param_name,
+                                                cdecl=param_cdecl))
+
+        if handle_type in self.handle_types and handle_type != 'VkInstance' and handle_type != 'VkPhysicalDevice':
+            # The Core Vulkan code will be wrapped in a feature called VK_VERSION_#_#
+            # For example: VK_VERSION_1_0 wraps the core 1.0 Vulkan functionality
+            if 'VK_VERSION_' in extension_name:
+                self.core_commands.append(
+                    self.CommandData(name=name, ext_name=extension_name,
+                                     ext_type='device',
+                                     protect=self.featureExtraProtect,
+                                     return_type = return_type,
+                                     handle_type = handle_type,
+                                     params = cmd_params,
+                                     cdecl=self.makeCDecls(cmdinfo.elem)[0]))
+            else:
+                self.ext_device_dispatch_list.append((name, self.featureExtraProtect))
+                self.ext_commands.append(
+                    self.CommandData(name=name, ext_name=extension_name,
+                                     ext_type=extension_type,
+                                     protect=self.featureExtraProtect,
+                                     return_type = return_type,
+                                     handle_type = handle_type,
+                                     params = cmd_params,
+                                     cdecl=self.makeCDecls(cmdinfo.elem)[0]))
+        else:
+            # The Core Vulkan code will be wrapped in a feature called VK_VERSION_#_#
+            # For example: VK_VERSION_1_0 wraps the core 1.0 Vulkan functionality
+            if 'VK_VERSION_' in extension_name:
+                self.core_commands.append(
+                    self.CommandData(name=name, ext_name=extension_name,
+                                     ext_type='instance',
+                                     protect=self.featureExtraProtect,
+                                     return_type = return_type,
+                                     handle_type = handle_type,
+                                     params = cmd_params,
+                                     cdecl=self.makeCDecls(cmdinfo.elem)[0]))
+
+            else:
+                self.ext_instance_dispatch_list.append((name, self.featureExtraProtect))
+                self.ext_commands.append(
+                    self.CommandData(name=name, ext_name=extension_name,
+                                     ext_type=extension_type,
+                                     protect=self.featureExtraProtect,
+                                     return_type = return_type,
+                                     handle_type = handle_type,
+                                     params = cmd_params,
+                                     cdecl=self.makeCDecls(cmdinfo.elem)[0]))
+
+    #
+    # Retrieve the type and name for a parameter
+    def getTypeNameTuple(self, param):
+        type = ''
+        name = ''
+        for elem in param:
+            if elem.tag == 'type':
+                type = noneStr(elem.text)
+            elif elem.tag == 'name':
+                name = noneStr(elem.text)
+        return (type, name)
+
+    #
+    # Create a layer instance dispatch table from the appropriate list and return it as a string
+    def OutputLayerInstanceDispatchTable(self):
+        commands = []
+        table = ''
+        cur_extension_name = ''
+
+        table += '// Instance function pointer dispatch table\n'
+        table += 'typedef struct VkLayerInstanceDispatchTable_ {\n'
+
+        # First add in an entry for GetPhysicalDeviceProcAddr.  This will not
+        # ever show up in the XML or header, so we have to manually add it.
+        table += '    // Manually add in GetPhysicalDeviceProcAddr entry\n'
+        table += '    PFN_GetPhysicalDeviceProcAddr GetPhysicalDeviceProcAddr;\n'
+
+        for x in range(0, 2):
+            if x == 0:
+                commands = self.core_commands
+            else:
+                commands = self.ext_commands
+
+            for cur_cmd in commands:
+                is_inst_handle_type = cur_cmd.name in ADD_INST_CMDS or cur_cmd.handle_type == 'VkInstance' or cur_cmd.handle_type == 'VkPhysicalDevice'
+                if is_inst_handle_type:
+
+                    if cur_cmd.ext_name != cur_extension_name:
+                        if 'VK_VERSION_' in cur_cmd.ext_name:
+                            table += '\n    // ---- Core %s commands\n' % cur_cmd.ext_name[11:]
+                        else:
+                            table += '\n    // ---- %s extension commands\n' % cur_cmd.ext_name
+                        cur_extension_name = cur_cmd.ext_name
+
+                    # Remove 'vk' from proto name
+                    base_name = cur_cmd.name[2:]
+
+                    if cur_cmd.protect is not None:
+                        table += '#ifdef %s\n' % cur_cmd.protect
+
+                    table += '    PFN_%s %s;\n' % (cur_cmd.name, base_name)
+
+                    if cur_cmd.protect is not None:
+                        table += '#endif // %s\n' % cur_cmd.protect
+
+        table += '} VkLayerInstanceDispatchTable;\n\n'
+        return table
+
+    #
+    # Create a layer device dispatch table from the appropriate list and return it as a string
+    def OutputLayerDeviceDispatchTable(self):
+        commands = []
+        table = ''
+        cur_extension_name = ''
+
+        table += '// Device function pointer dispatch table\n'
+        table += 'typedef struct VkLayerDispatchTable_ {\n'
+
+        for x in range(0, 2):
+            if x == 0:
+                commands = self.core_commands
+            else:
+                commands = self.ext_commands
+
+            for cur_cmd in commands:
+                is_inst_handle_type = cur_cmd.name in ADD_INST_CMDS or cur_cmd.handle_type == 'VkInstance' or cur_cmd.handle_type == 'VkPhysicalDevice'
+                if not is_inst_handle_type:
+
+                    if cur_cmd.ext_name != cur_extension_name:
+                        if 'VK_VERSION_' in cur_cmd.ext_name:
+                            table += '\n    // ---- Core %s commands\n' % cur_cmd.ext_name[11:]
+                        else:
+                            table += '\n    // ---- %s extension commands\n' % cur_cmd.ext_name
+                        cur_extension_name = cur_cmd.ext_name
+
+                    # Remove 'vk' from proto name
+                    base_name = cur_cmd.name[2:]
+
+                    if cur_cmd.protect is not None:
+                        table += '#ifdef %s\n' % cur_cmd.protect
+
+                    table += '    PFN_%s %s;\n' % (cur_cmd.name, base_name)
+
+                    if cur_cmd.protect is not None:
+                        table += '#endif // %s\n' % cur_cmd.protect
+
+        table += '} VkLayerDispatchTable;\n\n'
+        return table
diff --git a/src/third_party/vulkan-validation-layers/src/scripts/lvl_genvk.py b/src/third_party/vulkan-validation-layers/src/scripts/lvl_genvk.py
new file mode 100644
index 0000000..afeac4c
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/scripts/lvl_genvk.py
@@ -0,0 +1,822 @@
+#!/usr/bin/python3
+#
+# Copyright (c) 2013-2019 The Khronos Group Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import argparse, cProfile, pdb, string, sys, time, os
+
+# Simple timer functions
+startTime = None
+
+def startTimer(timeit):
+    global startTime
+    if timeit:
+        startTime = time.process_time()
+
+def endTimer(timeit, msg):
+    global startTime
+    if timeit:
+        endTime = time.process_time()
+        write(msg, endTime - startTime, file=sys.stderr)
+        startTime = None
+
+# Turn a list of strings into a regexp string matching exactly those strings
+def makeREstring(list, default = None):
+    if len(list) > 0 or default is None:
+        return '^(' + '|'.join(list) + ')$'
+    else:
+        return default
+
+# Returns a directory of [ generator function, generator options ] indexed
+# by specified short names. The generator options incorporate the following
+# parameters:
+#
+# args is an parsed argument object; see below for the fields that are used.
+def makeGenOpts(args):
+    global genOpts
+    genOpts = {}
+
+    # Default class of extensions to include, or None
+    defaultExtensions = args.defaultExtensions
+
+    # Additional extensions to include (list of extensions)
+    extensions = args.extension
+
+    # Extensions to remove (list of extensions)
+    removeExtensions = args.removeExtensions
+
+    # Extensions to emit (list of extensions)
+    emitExtensions = args.emitExtensions
+
+    # Features to include (list of features)
+    features = args.feature
+
+    # Whether to disable inclusion protect in headers
+    protect = args.protect
+
+    # Output target directory
+    directory = args.directory
+
+    # Descriptive names for various regexp patterns used to select
+    # versions and extensions
+    allFeatures     = allExtensions = '.*'
+    noFeatures      = noExtensions = None
+
+    # Turn lists of names/patterns into matching regular expressions
+    addExtensionsPat     = makeREstring(extensions, None)
+    removeExtensionsPat  = makeREstring(removeExtensions, None)
+    emitExtensionsPat    = makeREstring(emitExtensions, allExtensions)
+    featuresPat          = makeREstring(features, allFeatures)
+
+    # Copyright text prefixing all headers (list of strings).
+    prefixStrings = [
+        '/*',
+        '** Copyright (c) 2015-2019 The Khronos Group Inc.',
+        '**',
+        '** Licensed under the Apache License, Version 2.0 (the "License");',
+        '** you may not use this file except in compliance with the License.',
+        '** You may obtain a copy of the License at',
+        '**',
+        '**     http://www.apache.org/licenses/LICENSE-2.0',
+        '**',
+        '** Unless required by applicable law or agreed to in writing, software',
+        '** distributed under the License is distributed on an "AS IS" BASIS,',
+        '** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.',
+        '** See the License for the specific language governing permissions and',
+        '** limitations under the License.',
+        '*/',
+        ''
+    ]
+
+    # Text specific to Vulkan headers
+    vkPrefixStrings = [
+        '/*',
+        '** This header is generated from the Khronos Vulkan XML API Registry.',
+        '**',
+        '*/',
+        ''
+    ]
+
+    # Defaults for generating re-inclusion protection wrappers (or not)
+    protectFeature = protect
+
+    # An API style convention object
+    conventions = VulkanConventions()
+
+    # ValidationLayer Generators
+    # Options for thread safety header code-generation
+    genOpts['thread_safety.h'] = [
+          ThreadOutputGenerator,
+          ThreadGeneratorOptions(
+            conventions       = conventions,
+            filename          = 'thread_safety.h',
+            directory         = directory,
+            apiname           = 'vulkan',
+            profile           = None,
+            versions          = featuresPat,
+            emitversions      = featuresPat,
+            defaultExtensions = 'vulkan',
+            addExtensions     = addExtensionsPat,
+            removeExtensions  = removeExtensionsPat,
+            emitExtensions    = emitExtensionsPat,
+            prefixText        = prefixStrings + vkPrefixStrings,
+            protectFeature    = False,
+            apicall           = 'VKAPI_ATTR ',
+            apientry          = 'VKAPI_CALL ',
+            apientryp         = 'VKAPI_PTR *',
+            alignFuncParam    = 48,
+            expandEnumerants = False)
+        ]
+
+    # Options for thread safety source code-generation
+    genOpts['thread_safety.cpp'] = [
+          ThreadOutputGenerator,
+          ThreadGeneratorOptions(
+            conventions       = conventions,
+            filename          = 'thread_safety.cpp',
+            directory         = directory,
+            apiname           = 'vulkan',
+            profile           = None,
+            versions          = featuresPat,
+            emitversions      = featuresPat,
+            defaultExtensions = 'vulkan',
+            addExtensions     = addExtensionsPat,
+            removeExtensions  = removeExtensionsPat,
+            emitExtensions    = emitExtensionsPat,
+            prefixText        = prefixStrings + vkPrefixStrings,
+            protectFeature    = False,
+            apicall           = 'VKAPI_ATTR ',
+            apientry          = 'VKAPI_CALL ',
+            apientryp         = 'VKAPI_PTR *',
+            alignFuncParam    = 48,
+            expandEnumerants = False)
+        ]
+
+    # Options for stateless validation source file
+    genOpts['parameter_validation.cpp'] = [
+          ParameterValidationOutputGenerator,
+          ParameterValidationGeneratorOptions(
+            conventions       = conventions,
+            filename          = 'parameter_validation.cpp',
+            directory         = directory,
+            apiname           = 'vulkan',
+            profile           = None,
+            versions          = featuresPat,
+            emitversions      = featuresPat,
+            defaultExtensions = 'vulkan',
+            addExtensions     = addExtensionsPat,
+            removeExtensions  = removeExtensionsPat,
+            emitExtensions    = emitExtensionsPat,
+            prefixText        = prefixStrings + vkPrefixStrings,
+            apicall           = 'VKAPI_ATTR ',
+            apientry          = 'VKAPI_CALL ',
+            apientryp         = 'VKAPI_PTR *',
+            alignFuncParam    = 48,
+            expandEnumerants  = False,
+            valid_usage_path  = args.scripts)
+          ]
+
+    # Options for stateless validation source file
+    genOpts['parameter_validation.h'] = [
+          ParameterValidationOutputGenerator,
+          ParameterValidationGeneratorOptions(
+            conventions       = conventions,
+            filename          = 'parameter_validation.h',
+            directory         = directory,
+            apiname           = 'vulkan',
+            profile           = None,
+            versions          = featuresPat,
+            emitversions      = featuresPat,
+            defaultExtensions = 'vulkan',
+            addExtensions     = addExtensionsPat,
+            removeExtensions  = removeExtensionsPat,
+            emitExtensions    = emitExtensionsPat,
+            prefixText        = prefixStrings + vkPrefixStrings,
+            apicall           = 'VKAPI_ATTR ',
+            apientry          = 'VKAPI_CALL ',
+            apientryp         = 'VKAPI_PTR *',
+            alignFuncParam    = 48,
+            expandEnumerants  = False,
+            valid_usage_path  = args.scripts)
+          ]
+
+    # Options for object_tracker code-generated validation routines
+    genOpts['object_tracker.cpp'] = [
+          ObjectTrackerOutputGenerator,
+          ObjectTrackerGeneratorOptions(
+            conventions       = conventions,
+            filename          = 'object_tracker.cpp',
+            directory         = directory,
+            apiname           = 'vulkan',
+            profile           = None,
+            versions          = featuresPat,
+            emitversions      = featuresPat,
+            defaultExtensions = 'vulkan',
+            addExtensions     = addExtensionsPat,
+            removeExtensions  = removeExtensionsPat,
+            emitExtensions    = emitExtensionsPat,
+            prefixText        = prefixStrings + vkPrefixStrings,
+            protectFeature    = False,
+            apicall           = 'VKAPI_ATTR ',
+            apientry          = 'VKAPI_CALL ',
+            apientryp         = 'VKAPI_PTR *',
+            alignFuncParam    = 48,
+            expandEnumerants  = False,
+            valid_usage_path  = args.scripts)
+        ]
+
+    # Options for object_tracker code-generated prototypes
+    genOpts['object_tracker.h'] = [
+          ObjectTrackerOutputGenerator,
+          ObjectTrackerGeneratorOptions(
+            conventions       = conventions,
+            filename          = 'object_tracker.h',
+            directory         = directory,
+            apiname           = 'vulkan',
+            profile           = None,
+            versions          = featuresPat,
+            emitversions      = featuresPat,
+            defaultExtensions = 'vulkan',
+            addExtensions     = addExtensionsPat,
+            removeExtensions  = removeExtensionsPat,
+            emitExtensions    = emitExtensionsPat,
+            prefixText        = prefixStrings + vkPrefixStrings,
+            protectFeature    = False,
+            apicall           = 'VKAPI_ATTR ',
+            apientry          = 'VKAPI_CALL ',
+            apientryp         = 'VKAPI_PTR *',
+            alignFuncParam    = 48,
+            expandEnumerants  = False,
+            valid_usage_path  = args.scripts)
+        ]
+
+    # Options for dispatch table helper generator
+    genOpts['vk_dispatch_table_helper.h'] = [
+          DispatchTableHelperOutputGenerator,
+          DispatchTableHelperOutputGeneratorOptions(
+            conventions       = conventions,
+            filename          = 'vk_dispatch_table_helper.h',
+            directory         = directory,
+            apiname           = 'vulkan',
+            profile           = None,
+            versions          = featuresPat,
+            emitversions      = featuresPat,
+            defaultExtensions = 'vulkan',
+            addExtensions     = addExtensionsPat,
+            removeExtensions  = removeExtensionsPat,
+            emitExtensions    = emitExtensionsPat,
+            prefixText        = prefixStrings + vkPrefixStrings,
+            apicall           = 'VKAPI_ATTR ',
+            apientry          = 'VKAPI_CALL ',
+            apientryp         = 'VKAPI_PTR *',
+            alignFuncParam    = 48,
+            expandEnumerants = False)
+        ]
+
+    # lvt_file generator options for lvt_function_pointers.h
+    genOpts['lvt_function_pointers.h'] = [
+          LvtFileOutputGenerator,
+          LvtFileOutputGeneratorOptions(
+            conventions       = conventions,
+            filename          = 'lvt_function_pointers.h',
+            directory         = directory,
+            apiname           = 'vulkan',
+            profile           = None,
+            versions          = featuresPat,
+            emitversions      = featuresPat,
+            defaultExtensions = 'vulkan',
+            addExtensions     = addExtensionsPat,
+            removeExtensions  = removeExtensionsPat,
+            emitExtensions    = emitExtensionsPat,
+            prefixText        = prefixStrings + vkPrefixStrings,
+            apicall           = 'VKAPI_ATTR ',
+            apientry          = 'VKAPI_CALL ',
+            apientryp         = 'VKAPI_PTR *',
+            alignFuncParam    = 48,
+            expandEnumerants = False,
+            lvt_file_type  = 'function_pointer_header')
+        ]
+
+    # lvt_file generator options for lvt_function_pointers.cpp
+    genOpts['lvt_function_pointers.cpp'] = [
+          LvtFileOutputGenerator,
+          LvtFileOutputGeneratorOptions(
+            conventions       = conventions,
+            filename          = 'lvt_function_pointers.cpp',
+            directory         = directory,
+            apiname           = 'vulkan',
+            profile           = None,
+            versions          = featuresPat,
+            emitversions      = featuresPat,
+            defaultExtensions = 'vulkan',
+            addExtensions     = addExtensionsPat,
+            removeExtensions  = removeExtensionsPat,
+            emitExtensions    = emitExtensionsPat,
+            prefixText        = prefixStrings + vkPrefixStrings,
+            apicall           = 'VKAPI_ATTR ',
+            apientry          = 'VKAPI_CALL ',
+            apientryp         = 'VKAPI_PTR *',
+            alignFuncParam    = 48,
+            expandEnumerants = False,
+            lvt_file_type  = 'function_pointer_source')
+        ]
+
+    # Options for Layer dispatch table generator
+    genOpts['vk_layer_dispatch_table.h'] = [
+          LayerDispatchTableOutputGenerator,
+          LayerDispatchTableGeneratorOptions(
+            conventions       = conventions,
+            filename          = 'vk_layer_dispatch_table.h',
+            directory         = directory,
+            apiname           = 'vulkan',
+            profile           = None,
+            versions          = featuresPat,
+            emitversions      = featuresPat,
+            defaultExtensions = 'vulkan',
+            addExtensions     = addExtensionsPat,
+            removeExtensions  = removeExtensionsPat,
+            emitExtensions    = emitExtensionsPat,
+            prefixText        = prefixStrings + vkPrefixStrings,
+            apicall           = 'VKAPI_ATTR ',
+            apientry          = 'VKAPI_CALL ',
+            apientryp         = 'VKAPI_PTR *',
+            alignFuncParam    = 48,
+            expandEnumerants = False)
+        ]
+
+    # Helper file generator options for vk_enum_string_helper.h
+    genOpts['vk_enum_string_helper.h'] = [
+          HelperFileOutputGenerator,
+          HelperFileOutputGeneratorOptions(
+            conventions       = conventions,
+            filename          = 'vk_enum_string_helper.h',
+            directory         = directory,
+            apiname           = 'vulkan',
+            profile           = None,
+            versions          = featuresPat,
+            emitversions      = featuresPat,
+            defaultExtensions = 'vulkan',
+            addExtensions     = addExtensionsPat,
+            removeExtensions  = removeExtensionsPat,
+            emitExtensions    = emitExtensionsPat,
+            prefixText        = prefixStrings + vkPrefixStrings,
+            apicall           = 'VKAPI_ATTR ',
+            apientry          = 'VKAPI_CALL ',
+            apientryp         = 'VKAPI_PTR *',
+            alignFuncParam    = 48,
+            expandEnumerants  = False,
+            helper_file_type  = 'enum_string_header')
+        ]
+
+    # Helper file generator options for vk_safe_struct.h
+    genOpts['vk_safe_struct.h'] = [
+          HelperFileOutputGenerator,
+          HelperFileOutputGeneratorOptions(
+            conventions       = conventions,
+            filename          = 'vk_safe_struct.h',
+            directory         = directory,
+            apiname           = 'vulkan',
+            profile           = None,
+            versions          = featuresPat,
+            emitversions      = featuresPat,
+            defaultExtensions = 'vulkan',
+            addExtensions     = addExtensionsPat,
+            removeExtensions  = removeExtensionsPat,
+            emitExtensions    = emitExtensionsPat,
+            prefixText        = prefixStrings + vkPrefixStrings,
+            apicall           = 'VKAPI_ATTR ',
+            apientry          = 'VKAPI_CALL ',
+            apientryp         = 'VKAPI_PTR *',
+            alignFuncParam    = 48,
+            expandEnumerants  = False,
+            helper_file_type  = 'safe_struct_header')
+        ]
+
+    # Helper file generator options for vk_safe_struct.cpp
+    genOpts['vk_safe_struct.cpp'] = [
+          HelperFileOutputGenerator,
+          HelperFileOutputGeneratorOptions(
+            conventions       = conventions,
+            filename          = 'vk_safe_struct.cpp',
+            directory         = directory,
+            apiname           = 'vulkan',
+            profile           = None,
+            versions          = featuresPat,
+            emitversions      = featuresPat,
+            defaultExtensions = 'vulkan',
+            addExtensions     = addExtensionsPat,
+            removeExtensions  = removeExtensionsPat,
+            emitExtensions    = emitExtensionsPat,
+            prefixText        = prefixStrings + vkPrefixStrings,
+            apicall           = 'VKAPI_ATTR ',
+            apientry          = 'VKAPI_CALL ',
+            apientryp         = 'VKAPI_PTR *',
+            alignFuncParam    = 48,
+            expandEnumerants  = False,
+            helper_file_type  = 'safe_struct_source')
+        ]
+
+    # Helper file generator options for vk_object_types.h
+    genOpts['vk_object_types.h'] = [
+          HelperFileOutputGenerator,
+          HelperFileOutputGeneratorOptions(
+            conventions       = conventions,
+            filename          = 'vk_object_types.h',
+            directory         = directory,
+            apiname           = 'vulkan',
+            profile           = None,
+            versions          = featuresPat,
+            emitversions      = featuresPat,
+            defaultExtensions = 'vulkan',
+            addExtensions     = addExtensionsPat,
+            removeExtensions  = removeExtensionsPat,
+            emitExtensions    = emitExtensionsPat,
+            prefixText        = prefixStrings + vkPrefixStrings,
+            apicall           = 'VKAPI_ATTR ',
+            apientry          = 'VKAPI_CALL ',
+            apientryp         = 'VKAPI_PTR *',
+            alignFuncParam    = 48,
+            expandEnumerants  = False,
+            helper_file_type  = 'object_types_header')
+        ]
+
+    # Helper file generator options for extension_helper.h
+    genOpts['vk_extension_helper.h'] = [
+          HelperFileOutputGenerator,
+          HelperFileOutputGeneratorOptions(
+            conventions       = conventions,
+            filename          = 'vk_extension_helper.h',
+            directory         = directory,
+            apiname           = 'vulkan',
+            profile           = None,
+            versions          = featuresPat,
+            emitversions      = featuresPat,
+            defaultExtensions = 'vulkan',
+            addExtensions     = addExtensionsPat,
+            removeExtensions  = removeExtensionsPat,
+            emitExtensions    = emitExtensionsPat,
+            prefixText        = prefixStrings + vkPrefixStrings,
+            apicall           = 'VKAPI_ATTR ',
+            apientry          = 'VKAPI_CALL ',
+            apientryp         = 'VKAPI_PTR *',
+            alignFuncParam    = 48,
+            expandEnumerants  = False,
+            helper_file_type  = 'extension_helper_header')
+        ]
+
+    # Helper file generator options for typemap_helper.h
+    genOpts['vk_typemap_helper.h'] = [
+          HelperFileOutputGenerator,
+          HelperFileOutputGeneratorOptions(
+            conventions       = conventions,
+            filename          = 'vk_typemap_helper.h',
+            directory         = directory,
+            apiname           = 'vulkan',
+            profile           = None,
+            versions          = featuresPat,
+            emitversions      = featuresPat,
+            defaultExtensions = 'vulkan',
+            addExtensions     = addExtensionsPat,
+            removeExtensions  = removeExtensionsPat,
+            emitExtensions    = emitExtensionsPat,
+            prefixText        = prefixStrings + vkPrefixStrings,
+            protectFeature    = False,
+            apicall           = 'VKAPI_ATTR ',
+            apientry          = 'VKAPI_CALL ',
+            apientryp         = 'VKAPI_PTR *',
+            alignFuncParam    = 48,
+            expandEnumerants  = False,
+            helper_file_type  = 'typemap_helper_header')
+        ]
+
+    # Layer chassis related generation structs
+    # Options for layer chassis header
+    genOpts['chassis.h'] = [
+          LayerChassisOutputGenerator,
+          LayerChassisGeneratorOptions(
+            conventions       = conventions,
+            filename          = 'chassis.h',
+            directory         = directory,
+            apiname           = 'vulkan',
+            profile           = None,
+            versions          = featuresPat,
+            emitversions      = featuresPat,
+            defaultExtensions = 'vulkan',
+            addExtensions     = addExtensionsPat,
+            removeExtensions  = removeExtensionsPat,
+            emitExtensions    = emitExtensionsPat,
+            prefixText        = prefixStrings + vkPrefixStrings,
+            apicall           = 'VKAPI_ATTR ',
+            apientry          = 'VKAPI_CALL ',
+            apientryp         = 'VKAPI_PTR *',
+            alignFuncParam    = 48,
+            helper_file_type  = 'layer_chassis_header',
+            expandEnumerants = False)
+        ]
+
+    # Options for layer chassis source file
+    genOpts['chassis.cpp'] = [
+          LayerChassisOutputGenerator,
+          LayerChassisGeneratorOptions(
+            conventions       = conventions,
+            filename          = 'chassis.cpp',
+            directory         = directory,
+            apiname           = 'vulkan',
+            profile           = None,
+            versions          = featuresPat,
+            emitversions      = featuresPat,
+            defaultExtensions = 'vulkan',
+            addExtensions     = addExtensionsPat,
+            removeExtensions  = removeExtensionsPat,
+            emitExtensions    = emitExtensionsPat,
+            prefixText        = prefixStrings + vkPrefixStrings,
+            apicall           = 'VKAPI_ATTR ',
+            apientry          = 'VKAPI_CALL ',
+            apientryp         = 'VKAPI_PTR *',
+            alignFuncParam    = 48,
+            helper_file_type  = 'layer_chassis_source',
+            expandEnumerants = False)
+        ]
+
+    # Options for layer chassis dispatch source file
+    genOpts['layer_chassis_dispatch.cpp'] = [
+          LayerChassisDispatchOutputGenerator,
+          LayerChassisDispatchGeneratorOptions(
+            conventions       = conventions,
+            filename          = 'layer_chassis_dispatch.cpp',
+            directory         = directory,
+            apiname           = 'vulkan',
+            profile           = None,
+            versions          = featuresPat,
+            emitversions      = featuresPat,
+            defaultExtensions = 'vulkan',
+            addExtensions     = addExtensionsPat,
+            removeExtensions  = removeExtensionsPat,
+            emitExtensions    = emitExtensionsPat,
+            prefixText        = prefixStrings + vkPrefixStrings,
+            protectFeature    = False,
+            apicall           = 'VKAPI_ATTR ',
+            apientry          = 'VKAPI_CALL ',
+            apientryp         = 'VKAPI_PTR *',
+            alignFuncParam    = 48,
+            expandEnumerants = False)
+        ]
+
+    # Options for layer chassis dispatch header file
+    genOpts['layer_chassis_dispatch.h'] = [
+          LayerChassisDispatchOutputGenerator,
+          LayerChassisDispatchGeneratorOptions(
+            conventions       = conventions,
+            filename          = 'layer_chassis_dispatch.h',
+            directory         = directory,
+            apiname           = 'vulkan',
+            profile           = None,
+            versions          = featuresPat,
+            emitversions      = featuresPat,
+            defaultExtensions = 'vulkan',
+            addExtensions     = addExtensionsPat,
+            removeExtensions  = removeExtensionsPat,
+            emitExtensions    = emitExtensionsPat,
+            prefixText        = prefixStrings + vkPrefixStrings,
+            protectFeature    = False,
+            apicall           = 'VKAPI_ATTR ',
+            apientry          = 'VKAPI_CALL ',
+            apientryp         = 'VKAPI_PTR *',
+            alignFuncParam    = 48,
+            expandEnumerants = False)
+        ]
+
+    # generator for command_counter.h
+    genOpts['command_counter_helper.h'] = [
+          CommandCounterOutputGenerator,
+          CommandCounterOutputGeneratorOptions(
+            conventions       = conventions,
+            filename          = 'command_counter_helper.h',
+            directory         = directory,
+            apiname           = 'vulkan',
+            profile           = None,
+            versions          = featuresPat,
+            emitversions      = featuresPat,
+            defaultExtensions = 'vulkan',
+            addExtensions     = addExtensionsPat,
+            removeExtensions  = removeExtensionsPat,
+            emitExtensions    = emitExtensionsPat,
+            prefixText        = prefixStrings + vkPrefixStrings,
+            apicall           = 'VKAPI_ATTR ',
+            apientry          = 'VKAPI_CALL ',
+            apientryp         = 'VKAPI_PTR *',
+            alignFuncParam    = 48,
+            expandEnumerants = False,
+            lvt_file_type  = 'function_pointer_header')
+        ]
+
+    # generator for command_counter.cpp
+    genOpts['command_counter_helper.cpp'] = [
+          CommandCounterOutputGenerator,
+          CommandCounterOutputGeneratorOptions(
+            conventions       = conventions,
+            filename          = 'command_counter_helper.cpp',
+            directory         = directory,
+            apiname           = 'vulkan',
+            profile           = None,
+            versions          = featuresPat,
+            emitversions      = featuresPat,
+            defaultExtensions = 'vulkan',
+            addExtensions     = addExtensionsPat,
+            removeExtensions  = removeExtensionsPat,
+            emitExtensions    = emitExtensionsPat,
+            prefixText        = prefixStrings + vkPrefixStrings,
+            apicall           = 'VKAPI_ATTR ',
+            apientry          = 'VKAPI_CALL ',
+            apientryp         = 'VKAPI_PTR *',
+            alignFuncParam    = 48,
+            expandEnumerants = False,
+            lvt_file_type  = 'function_pointer_source')
+        ]
+
+
+# Generate a target based on the options in the matching genOpts{} object.
+# This is encapsulated in a function so it can be profiled and/or timed.
+# The args parameter is an parsed argument object containing the following
+# fields that are used:
+#   target - target to generate
+#   directory - directory to generate it in
+#   protect - True if re-inclusion wrappers should be created
+#   extensions - list of additional extensions to include in generated
+#   interfaces
+def genTarget(args):
+    global genOpts
+
+    # Create generator options with specified parameters
+    makeGenOpts(args)
+
+    if (args.target in genOpts.keys()):
+        createGenerator = genOpts[args.target][0]
+        options = genOpts[args.target][1]
+
+        if not args.quiet:
+            write('* Building', options.filename, file=sys.stderr)
+            write('* options.versions          =', options.versions, file=sys.stderr)
+            write('* options.emitversions      =', options.emitversions, file=sys.stderr)
+            write('* options.defaultExtensions =', options.defaultExtensions, file=sys.stderr)
+            write('* options.addExtensions     =', options.addExtensions, file=sys.stderr)
+            write('* options.removeExtensions  =', options.removeExtensions, file=sys.stderr)
+            write('* options.emitExtensions    =', options.emitExtensions, file=sys.stderr)
+
+        startTimer(args.time)
+        gen = createGenerator(errFile=errWarn,
+                              warnFile=errWarn,
+                              diagFile=diag)
+        reg.setGenerator(gen)
+        reg.apiGen(options)
+
+        if not args.quiet:
+            write('* Generated', options.filename, file=sys.stderr)
+        endTimer(args.time, '* Time to generate ' + options.filename + ' =')
+    else:
+        write('No generator options for unknown target:',
+              args.target, file=sys.stderr)
+
+# -feature name
+# -extension name
+# For both, "name" may be a single name, or a space-separated list
+# of names, or a regular expression.
+if __name__ == '__main__':
+    parser = argparse.ArgumentParser()
+
+    parser.add_argument('-defaultExtensions', action='store',
+                        default='vulkan',
+                        help='Specify a single class of extensions to add to targets')
+    parser.add_argument('-extension', action='append',
+                        default=[],
+                        help='Specify an extension or extensions to add to targets')
+    parser.add_argument('-removeExtensions', action='append',
+                        default=[],
+                        help='Specify an extension or extensions to remove from targets')
+    parser.add_argument('-emitExtensions', action='append',
+                        default=[],
+                        help='Specify an extension or extensions to emit in targets')
+    parser.add_argument('-feature', action='append',
+                        default=[],
+                        help='Specify a core API feature name or names to add to targets')
+    parser.add_argument('-debug', action='store_true',
+                        help='Enable debugging')
+    parser.add_argument('-dump', action='store_true',
+                        help='Enable dump to stderr')
+    parser.add_argument('-diagfile', action='store',
+                        default=None,
+                        help='Write diagnostics to specified file')
+    parser.add_argument('-errfile', action='store',
+                        default=None,
+                        help='Write errors and warnings to specified file instead of stderr')
+    parser.add_argument('-noprotect', dest='protect', action='store_false',
+                        help='Disable inclusion protection in output headers')
+    parser.add_argument('-profile', action='store_true',
+                        help='Enable profiling')
+    parser.add_argument('-registry', action='store',
+                        default='vk.xml',
+                        help='Use specified registry file instead of vk.xml')
+    parser.add_argument('-time', action='store_true',
+                        help='Enable timing')
+    parser.add_argument('-validate', action='store_true',
+                        help='Enable group validation')
+    parser.add_argument('-o', action='store', dest='directory',
+                        default='.',
+                        help='Create target and related files in specified directory')
+    parser.add_argument('target', metavar='target', nargs='?',
+                        help='Specify target')
+    parser.add_argument('-quiet', action='store_true', default=True,
+                        help='Suppress script output during normal execution.')
+    parser.add_argument('-verbose', action='store_false', dest='quiet', default=True,
+                        help='Enable script output during normal execution.')
+
+    # This argument tells us where to load the script from the Vulkan-Headers registry
+    parser.add_argument('-scripts', action='store',
+                        help='Find additional scripts in this directory')
+
+    args = parser.parse_args()
+
+    # default scripts path to be same as registry
+    if not args.scripts:
+        args.scripts = os.path.dirname(args.registry)
+
+    scripts_directory_path = os.path.dirname(os.path.abspath(__file__))
+    registry_headers_path = os.path.join(scripts_directory_path, args.scripts)
+    sys.path.insert(0, registry_headers_path)
+
+    from reg import *
+    from generator import write
+    from cgenerator import CGeneratorOptions, COutputGenerator
+
+    # ValidationLayer Generator Modifications
+    from thread_safety_generator import  ThreadGeneratorOptions, ThreadOutputGenerator
+    from parameter_validation_generator import ParameterValidationGeneratorOptions, ParameterValidationOutputGenerator
+    from object_tracker_generator import ObjectTrackerGeneratorOptions, ObjectTrackerOutputGenerator
+    from dispatch_table_helper_generator import DispatchTableHelperOutputGenerator, DispatchTableHelperOutputGeneratorOptions
+    from helper_file_generator import HelperFileOutputGenerator, HelperFileOutputGeneratorOptions
+    from layer_dispatch_table_generator import LayerDispatchTableOutputGenerator, LayerDispatchTableGeneratorOptions
+    from layer_chassis_generator import LayerChassisOutputGenerator, LayerChassisGeneratorOptions
+    from layer_chassis_dispatch_generator import LayerChassisDispatchOutputGenerator, LayerChassisDispatchGeneratorOptions
+    from lvt_file_generator import LvtFileOutputGenerator, LvtFileOutputGeneratorOptions
+    from command_counter_generator import CommandCounterOutputGenerator, CommandCounterOutputGeneratorOptions
+
+    # Temporary workaround for vkconventions python2 compatibility
+    import abc; abc.ABC = abc.ABCMeta('ABC', (object,), {})
+    from vkconventions import VulkanConventions
+
+    # This splits arguments which are space-separated lists
+    args.feature = [name for arg in args.feature for name in arg.split()]
+    args.extension = [name for arg in args.extension for name in arg.split()]
+
+    # Load & parse registry
+    reg = Registry()
+
+    startTimer(args.time)
+    tree = etree.parse(args.registry)
+    endTimer(args.time, '* Time to make ElementTree =')
+
+    if args.debug:
+        pdb.run('reg.loadElementTree(tree)')
+    else:
+        startTimer(args.time)
+        reg.loadElementTree(tree)
+        endTimer(args.time, '* Time to parse ElementTree =')
+
+    if (args.validate):
+        reg.validateGroups()
+
+    if (args.dump):
+        write('* Dumping registry to regdump.txt', file=sys.stderr)
+        reg.dumpReg(filehandle = open('regdump.txt', 'w', encoding='utf-8'))
+
+    # create error/warning & diagnostic files
+    if (args.errfile):
+        errWarn = open(args.errfile, 'w', encoding='utf-8')
+    else:
+        errWarn = sys.stderr
+
+    if (args.diagfile):
+        diag = open(args.diagfile, 'w', encoding='utf-8')
+    else:
+        diag = None
+
+    if (args.debug):
+        pdb.run('genTarget(args)')
+    elif (args.profile):
+        import cProfile, pstats
+        cProfile.run('genTarget(args)', 'profile.txt')
+        p = pstats.Stats('profile.txt')
+        p.strip_dirs().sort_stats('time').print_stats(50)
+    else:
+        genTarget(args)
diff --git a/src/third_party/vulkan-validation-layers/src/scripts/lvt_file_generator.py b/src/third_party/vulkan-validation-layers/src/scripts/lvt_file_generator.py
new file mode 100644
index 0000000..5254f8a
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/scripts/lvt_file_generator.py
@@ -0,0 +1,258 @@
+#!/usr/bin/python3 -i
+#
+# Copyright (c) 2015-2019 The Khronos Group Inc.
+# Copyright (c) 2015-2019 Valve Corporation
+# Copyright (c) 2015-2019 LunarG, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Author: Mark Lobodzinski <mark@lunarg.com>
+
+import os,re,sys
+import xml.etree.ElementTree as etree
+from generator import *
+from collections import namedtuple
+from common_codegen import *
+
+funcptr_source_preamble = '''
+#include "lvt_function_pointers.h"
+#include <stdio.h>
+
+namespace vk {
+
+'''
+
+funcptr_header_preamble = '''
+#include <vulkan/vulkan.h>
+#include "vk_loader_platform.h"
+
+namespace vk {
+
+'''
+
+#
+# LvtFileOutputGeneratorOptions - subclass of GeneratorOptions.
+class LvtFileOutputGeneratorOptions(GeneratorOptions):
+    def __init__(self,
+                 conventions = None,
+                 filename = None,
+                 directory = '.',
+                 apiname = None,
+                 profile = None,
+                 versions = '.*',
+                 emitversions = '.*',
+                 defaultExtensions = None,
+                 addExtensions = None,
+                 removeExtensions = None,
+                 emitExtensions = None,
+                 sortProcedure = regSortFeatures,
+                 prefixText = "",
+                 genFuncPointers = True,
+                 apicall = '',
+                 apientry = '',
+                 apientryp = '',
+                 alignFuncParam = 0,
+                 expandEnumerants = True,
+                 lvt_file_type = ''):
+        GeneratorOptions.__init__(self, conventions, filename, directory, apiname, profile,
+                                  versions, emitversions, defaultExtensions,
+                                  addExtensions, removeExtensions, emitExtensions, sortProcedure)
+        self.prefixText      = prefixText
+        self.genFuncPointers = genFuncPointers
+        self.prefixText      = None
+        self.apicall         = apicall
+        self.apientry        = apientry
+        self.apientryp       = apientryp
+        self.alignFuncParam  = alignFuncParam
+        self.lvt_file_type   = lvt_file_type
+#
+# LvtFileOutputGenerator - subclass of OutputGenerator.
+# Generates files needed by the layer validation tests
+class LvtFileOutputGenerator(OutputGenerator):
+    """Generate LVT support files based on XML element attributes"""
+    def __init__(self,
+                 errFile = sys.stderr,
+                 warnFile = sys.stderr,
+                 diagFile = sys.stdout):
+        OutputGenerator.__init__(self, errFile, warnFile, diagFile)
+        # Internal state - accumulators for different inner block text
+        self.dispatch_list = []               # List of entries for dispatch list
+    #
+    # Called once at the beginning of each run
+    def beginFile(self, genOpts):
+        OutputGenerator.beginFile(self, genOpts)
+
+        # Initialize members that require the tree
+        self.handle_types = GetHandleTypes(self.registry.tree)
+        self.lvt_file_type = genOpts.lvt_file_type
+
+        if genOpts.lvt_file_type == 'function_pointer_header':
+            write("#pragma once", file=self.outFile)
+
+        # User-supplied prefix text, if any (list of strings)
+        if (genOpts.prefixText):
+            for s in genOpts.prefixText:
+                write(s, file=self.outFile)
+        # File Comment
+        file_comment = '// *** THIS FILE IS GENERATED - DO NOT EDIT ***\n'
+        file_comment += '// See lvt_file_generator.py for modifications\n'
+        write(file_comment, file=self.outFile)
+        # Copyright Notice
+        copyright =  '/*\n'
+        copyright += ' * Copyright (c) 2015-2019 The Khronos Group Inc.\n'
+        copyright += ' * Copyright (c) 2015-2019 Valve Corporation\n'
+        copyright += ' * Copyright (c) 2015-2019 LunarG, Inc.\n'
+        copyright += ' *\n'
+        copyright += ' * Licensed under the Apache License, Version 2.0 (the "License");\n'
+        copyright += ' * you may not use this file except in compliance with the License.\n'
+        copyright += ' * You may obtain a copy of the License at\n'
+        copyright += ' *\n'
+        copyright += ' *     http://www.apache.org/licenses/LICENSE-2.0\n'
+        copyright += ' *\n'
+        copyright += ' * Unless required by applicable law or agreed to in writing, software\n'
+        copyright += ' * distributed under the License is distributed on an "AS IS" BASIS,\n'
+        copyright += ' * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n'
+        copyright += ' * See the License for the specific language governing permissions and\n'
+        copyright += ' * limitations under the License.\n'
+        copyright += ' *\n'
+        copyright += ' * Author: Mark Lobodzinski <mark@lunarg.com>\n'
+        copyright += ' */\n'
+        write(copyright, file=self.outFile)
+    #
+    # Write completed source code to output file
+    def endFile(self):
+        dest_file = ''
+        dest_file += self.OutputDestFile()
+        # Remove blank lines at EOF
+        if dest_file.endswith('\n'):
+            dest_file = dest_file[:-1]
+        write(dest_file, file=self.outFile);
+        # Finish processing in superclass
+        OutputGenerator.endFile(self)
+    #
+    # Processing at beginning of each feature or extension
+    def beginFeature(self, interface, emit):
+        OutputGenerator.beginFeature(self, interface, emit)
+        self.featureExtraProtect = GetFeatureProtect(interface)
+
+    #
+    # Process commands, adding to dispatch list
+    def genCmd(self, cmdinfo, name, alias):
+        OutputGenerator.genCmd(self, cmdinfo, name, alias)
+        # Get first param type
+        params = cmdinfo.elem.findall('param')
+        info = self.getTypeNameTuple(params[0])
+        self.AddCommandToDispatchList(name, info[0], self.featureExtraProtect, cmdinfo)
+
+    #
+    # Determine if this API should be ignored or added to the funcptr list
+    def AddCommandToDispatchList(self, name, handle_type, protect, cmdinfo):
+        WSI_mandatory_extensions = [
+            'VK_KHR_win32_surface',
+            'VK_KHR_xcb_surface',
+            'VK_KHR_xlib_surface',
+            'VK_KHR_wayland_surface',
+            'VK_MVK_macos_surface',
+            'VK_KHR_surface',
+            'VK_KHR_swapchain',
+            'VK_KHR_display',
+            'VK_KHR_android_surface',
+            ]
+        if 'VK_VERSION' in self.featureName or self.featureName in WSI_mandatory_extensions:
+            self.dispatch_list.append((name, self.featureExtraProtect))
+        return
+    #
+    # Retrieve the type and name for a parameter
+    def getTypeNameTuple(self, param):
+        type = ''
+        name = ''
+        for elem in param:
+            if elem.tag == 'type':
+                type = noneStr(elem.text)
+            elif elem.tag == 'name':
+                name = noneStr(elem.text)
+        return (type, name)
+    #
+    # Create the test function pointer source and return it as a string
+    def GenerateFunctionPointerSource(self):
+        entries = []
+        table = funcptr_source_preamble
+        entries = self.dispatch_list
+
+        for item in entries:
+            # Remove 'vk' from proto name
+            base_name = item[0][2:]
+            if item[1] is not None:
+                table += '#ifdef %s\n' % item[1]
+            table += 'PFN_%s %s;\n' % (item[0], base_name)
+            if item[1] is not None:
+                table += '#endif // %s\n' % item[1]
+
+        table += '\n\n'
+        table += 'void InitDispatchTable() {\n'
+        table += '\n'
+        table += '#if(WIN32)\n'
+        table += '    const char filename[] = "vulkan-1.dll";\n'
+        table += '#elif(__APPLE__)\n'
+        table += '    const char filename[] = "libvulkan.dylib";\n'
+        table += '#else\n'
+        table += '    const char filename[] = "libvulkan.so";\n'
+        table += '#endif\n'
+        table += '\n'
+        table += '    auto loader_handle = loader_platform_open_library(filename);\n'
+        table += '\n'
+        table += '    if (loader_handle == nullptr) {\n'
+        table += '        printf("%s\\n", loader_platform_open_library_error(filename));\n'
+        table += '        exit(1);\n'
+        table += '    }\n\n'
+
+        for item in entries:
+            # Remove 'vk' from proto name
+            base_name = item[0][2:]
+
+            if item[1] is not None:
+                table += '#ifdef %s\n' % item[1]
+            table += '    %s = reinterpret_cast<PFN_%s>(loader_platform_get_proc_address(loader_handle, "%s"));\n' % (base_name, item[0], item[0])
+            if item[1] is not None:
+                table += '#endif // %s\n' % item[1]
+        table += '}\n\n'
+        table += '} // namespace vk'
+        return table
+    #
+    # Create the test function pointer source and return it as a string
+    def GenerateFunctionPointerHeader(self):
+        entries = []
+        table = funcptr_header_preamble
+        entries = self.dispatch_list
+
+        for item in entries:
+            # Remove 'vk' from proto name
+            base_name = item[0][2:]
+            if item[1] is not None:
+                table += '#ifdef %s\n' % item[1]
+            table += 'extern PFN_%s %s;\n' % (item[0], base_name)
+            if item[1] is not None:
+                table += '#endif // %s\n' % item[1]
+        table += '\n'
+        table += 'void InitDispatchTable();\n\n'
+        table += '} // namespace vk'
+        return table
+
+    # Create a helper file and return it as a string
+    def OutputDestFile(self):
+        if self.lvt_file_type == 'function_pointer_header':
+            return self.GenerateFunctionPointerHeader()
+        elif self.lvt_file_type == 'function_pointer_source':
+            return self.GenerateFunctionPointerSource()
+        else:
+            return 'Bad LVT File Generator Option %s' % self.lvt_file_type
diff --git a/src/third_party/vulkan-validation-layers/src/scripts/object_tracker_generator.py b/src/third_party/vulkan-validation-layers/src/scripts/object_tracker_generator.py
new file mode 100644
index 0000000..5964dbb
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/scripts/object_tracker_generator.py
@@ -0,0 +1,1021 @@
+#!/usr/bin/python3 -i
+#
+# Copyright (c) 2015-2019 The Khronos Group Inc.
+# Copyright (c) 2015-2019 Valve Corporation
+# Copyright (c) 2015-2019 LunarG, Inc.
+# Copyright (c) 2015-2019 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Author: Mark Lobodzinski <mark@lunarg.com>
+# Author: Dave Houlton <daveh@lunarg.com>
+
+import os,re,sys,string,json
+import xml.etree.ElementTree as etree
+from generator import *
+from collections import namedtuple
+from common_codegen import *
+
+# This is a workaround to use a Python 2.7 and 3.x compatible syntax.
+from io import open
+
+# ObjectTrackerGeneratorOptions - subclass of GeneratorOptions.
+#
+# Adds options used by ObjectTrackerOutputGenerator objects during
+# object_tracker layer generation.
+#
+# Additional members
+#   prefixText - list of strings to prefix generated header with
+#     (usually a copyright statement + calling convention macros).
+#   protectFile - True if multiple inclusion protection should be
+#     generated (based on the filename) around the entire header.
+#   protectFeature - True if #ifndef..#endif protection should be
+#     generated around a feature interface in the header file.
+#   genFuncPointers - True if function pointer typedefs should be
+#     generated
+#   protectProto - If conditional protection should be generated
+#     around prototype declarations, set to either '#ifdef'
+#     to require opt-in (#ifdef protectProtoStr) or '#ifndef'
+#     to require opt-out (#ifndef protectProtoStr). Otherwise
+#     set to None.
+#   protectProtoStr - #ifdef/#ifndef symbol to use around prototype
+#     declarations, if protectProto is set
+#   apicall - string to use for the function declaration prefix,
+#     such as APICALL on Windows.
+#   apientry - string to use for the calling convention macro,
+#     in typedefs, such as APIENTRY.
+#   apientryp - string to use for the calling convention macro
+#     in function pointer typedefs, such as APIENTRYP.
+#   indentFuncProto - True if prototype declarations should put each
+#     parameter on a separate line
+#   indentFuncPointer - True if typedefed function pointers should put each
+#     parameter on a separate line
+#   alignFuncParam - if nonzero and parameters are being put on a
+#     separate line, align parameter names at the specified column
+class ObjectTrackerGeneratorOptions(GeneratorOptions):
+    def __init__(self,
+                 conventions = None,
+                 filename = None,
+                 directory = '.',
+                 apiname = None,
+                 profile = None,
+                 versions = '.*',
+                 emitversions = '.*',
+                 defaultExtensions = None,
+                 addExtensions = None,
+                 removeExtensions = None,
+                 emitExtensions = None,
+                 sortProcedure = regSortFeatures,
+                 prefixText = "",
+                 genFuncPointers = True,
+                 protectFile = True,
+                 protectFeature = True,
+                 apicall = '',
+                 apientry = '',
+                 apientryp = '',
+                 indentFuncProto = True,
+                 indentFuncPointer = False,
+                 alignFuncParam = 0,
+                 expandEnumerants = True,
+                 valid_usage_path = ''):
+        GeneratorOptions.__init__(self, conventions, filename, directory, apiname, profile,
+                                  versions, emitversions, defaultExtensions,
+                                  addExtensions, removeExtensions, emitExtensions, sortProcedure)
+        self.prefixText      = prefixText
+        self.genFuncPointers = genFuncPointers
+        self.protectFile     = protectFile
+        self.protectFeature  = protectFeature
+        self.apicall         = apicall
+        self.apientry        = apientry
+        self.apientryp       = apientryp
+        self.indentFuncProto = indentFuncProto
+        self.indentFuncPointer = indentFuncPointer
+        self.alignFuncParam  = alignFuncParam
+        self.expandEnumerants = expandEnumerants
+        self.valid_usage_path = valid_usage_path
+
+
+# ObjectTrackerOutputGenerator - subclass of OutputGenerator.
+# Generates object_tracker layer object validation code
+#
+# ---- methods ----
+# ObjectTrackerOutputGenerator(errFile, warnFile, diagFile) - args as for OutputGenerator. Defines additional internal state.
+# ---- methods overriding base class ----
+# beginFile(genOpts)
+# endFile()
+# beginFeature(interface, emit)
+# endFeature()
+# genCmd(cmdinfo)
+# genStruct()
+# genType()
+class ObjectTrackerOutputGenerator(OutputGenerator):
+    """Generate ObjectTracker code based on XML element attributes"""
+    # This is an ordered list of sections in the header file.
+    ALL_SECTIONS = ['command']
+    def __init__(self,
+                 errFile = sys.stderr,
+                 warnFile = sys.stderr,
+                 diagFile = sys.stdout):
+        OutputGenerator.__init__(self, errFile, warnFile, diagFile)
+        self.INDENT_SPACES = 4
+        self.prototypes = []
+        self.instance_extensions = []
+        self.device_extensions = []
+        # Commands which are not autogenerated but still intercepted
+        self.no_autogen_list = [
+            'vkDestroyInstance',
+            'vkCreateInstance',
+            'vkEnumeratePhysicalDevices',
+            'vkGetPhysicalDeviceQueueFamilyProperties',
+            'vkGetPhysicalDeviceQueueFamilyProperties2',
+            'vkGetPhysicalDeviceQueueFamilyProperties2KHR',
+            'vkGetDeviceQueue',
+            'vkGetDeviceQueue2',
+            'vkCreateDescriptorSetLayout',
+            'vkDestroyDescriptorPool',
+            'vkDestroyCommandPool',
+            'vkAllocateCommandBuffers',
+            'vkAllocateDescriptorSets',
+            'vkFreeDescriptorSets',
+            'vkFreeCommandBuffers',
+            'vkUpdateDescriptorSets',
+            'vkBeginCommandBuffer',
+            'vkGetDescriptorSetLayoutSupport',
+            'vkGetDescriptorSetLayoutSupportKHR',
+            'vkDestroySwapchainKHR',
+            'vkGetSwapchainImagesKHR',
+            'vkCmdPushDescriptorSetKHR',
+            'vkDestroyDevice',
+            'vkResetDescriptorPool',
+            'vkGetPhysicalDeviceDisplayPropertiesKHR',
+            'vkGetPhysicalDeviceDisplayProperties2KHR',
+            'vkGetDisplayModePropertiesKHR',
+            'vkGetDisplayModeProperties2KHR',
+            'vkAcquirePerformanceConfigurationINTEL',
+            'vkReleasePerformanceConfigurationINTEL',
+            'vkQueueSetPerformanceConfigurationINTEL',
+            'vkCreateFramebuffer',
+            'vkSetDebugUtilsObjectNameEXT',
+            'vkSetDebugUtilsObjectTagEXT',
+            ]
+        # These VUIDS are not implicit, but are best handled in this layer. Codegen for vkDestroy calls will generate a key
+        # which is translated here into a good VU.  Saves ~40 checks.
+        self.manual_vuids = dict()
+        self.manual_vuids = {
+            "fence-compatalloc": "\"VUID-vkDestroyFence-fence-01121\"",
+            "fence-nullalloc": "\"VUID-vkDestroyFence-fence-01122\"",
+            "event-compatalloc": "\"VUID-vkDestroyEvent-event-01146\"",
+            "event-nullalloc": "\"VUID-vkDestroyEvent-event-01147\"",
+            "buffer-compatalloc": "\"VUID-vkDestroyBuffer-buffer-00923\"",
+            "buffer-nullalloc": "\"VUID-vkDestroyBuffer-buffer-00924\"",
+            "image-compatalloc": "\"VUID-vkDestroyImage-image-01001\"",
+            "image-nullalloc": "\"VUID-vkDestroyImage-image-01002\"",
+            "shaderModule-compatalloc": "\"VUID-vkDestroyShaderModule-shaderModule-01092\"",
+            "shaderModule-nullalloc": "\"VUID-vkDestroyShaderModule-shaderModule-01093\"",
+            "pipeline-compatalloc": "\"VUID-vkDestroyPipeline-pipeline-00766\"",
+            "pipeline-nullalloc": "\"VUID-vkDestroyPipeline-pipeline-00767\"",
+            "sampler-compatalloc": "\"VUID-vkDestroySampler-sampler-01083\"",
+            "sampler-nullalloc": "\"VUID-vkDestroySampler-sampler-01084\"",
+            "renderPass-compatalloc": "\"VUID-vkDestroyRenderPass-renderPass-00874\"",
+            "renderPass-nullalloc": "\"VUID-vkDestroyRenderPass-renderPass-00875\"",
+            "descriptorUpdateTemplate-compatalloc": "\"VUID-vkDestroyDescriptorUpdateTemplate-descriptorSetLayout-00356\"",
+            "descriptorUpdateTemplate-nullalloc": "\"VUID-vkDestroyDescriptorUpdateTemplate-descriptorSetLayout-00357\"",
+            "imageView-compatalloc": "\"VUID-vkDestroyImageView-imageView-01027\"",
+            "imageView-nullalloc": "\"VUID-vkDestroyImageView-imageView-01028\"",
+            "pipelineCache-compatalloc": "\"VUID-vkDestroyPipelineCache-pipelineCache-00771\"",
+            "pipelineCache-nullalloc": "\"VUID-vkDestroyPipelineCache-pipelineCache-00772\"",
+            "pipelineLayout-compatalloc": "\"VUID-vkDestroyPipelineLayout-pipelineLayout-00299\"",
+            "pipelineLayout-nullalloc": "\"VUID-vkDestroyPipelineLayout-pipelineLayout-00300\"",
+            "descriptorSetLayout-compatalloc": "\"VUID-vkDestroyDescriptorSetLayout-descriptorSetLayout-00284\"",
+            "descriptorSetLayout-nullalloc": "\"VUID-vkDestroyDescriptorSetLayout-descriptorSetLayout-00285\"",
+            "semaphore-compatalloc": "\"VUID-vkDestroySemaphore-semaphore-01138\"",
+            "semaphore-nullalloc": "\"VUID-vkDestroySemaphore-semaphore-01139\"",
+            "queryPool-compatalloc": "\"VUID-vkDestroyQueryPool-queryPool-00794\"",
+            "queryPool-nullalloc": "\"VUID-vkDestroyQueryPool-queryPool-00795\"",
+            "bufferView-compatalloc": "\"VUID-vkDestroyBufferView-bufferView-00937\"",
+            "bufferView-nullalloc": "\"VUID-vkDestroyBufferView-bufferView-00938\"",
+            "surface-compatalloc": "\"VUID-vkDestroySurfaceKHR-surface-01267\"",
+            "surface-nullalloc": "\"VUID-vkDestroySurfaceKHR-surface-01268\"",
+            "framebuffer-compatalloc": "\"VUID-vkDestroyFramebuffer-framebuffer-00893\"",
+            "framebuffer-nullalloc": "\"VUID-vkDestroyFramebuffer-framebuffer-00894\"",
+            "VkGraphicsPipelineCreateInfo-basePipelineHandle": "\"VUID-VkGraphicsPipelineCreateInfo-flags-00722\"",
+            "VkComputePipelineCreateInfo-basePipelineHandle": "\"VUID-VkComputePipelineCreateInfo-flags-00697\"",
+            "VkRayTracingPipelineCreateInfoNV-basePipelineHandle": "\"VUID-VkRayTracingPipelineCreateInfoNV-flags-02404\"",
+           }
+
+        # Commands shadowed by interface functions and are not implemented
+        self.interface_functions = [
+            ]
+        self.headerVersion = None
+        # Internal state - accumulators for different inner block text
+        self.sections = dict([(section, []) for section in self.ALL_SECTIONS])
+        self.cmd_list = []             # list of commands processed to maintain ordering
+        self.cmd_info_dict = {}        # Per entry-point data for code generation and validation
+        self.structMembers = []        # List of StructMemberData records for all Vulkan structs
+        self.extension_structs = []    # List of all structs or sister-structs containing handles
+                                       # A sister-struct may contain no handles but shares <validextensionstructs> with one that does
+        self.structTypes = dict()      # Map of Vulkan struct typename to required VkStructureType
+        self.struct_member_dict = dict()
+        # Named tuples to store struct and command data
+        self.StructType = namedtuple('StructType', ['name', 'value'])
+        self.CmdInfoData = namedtuple('CmdInfoData', ['name', 'cmdinfo', 'members', 'extra_protect', 'alias', 'iscreate', 'isdestroy', 'allocator'])
+        self.CommandParam = namedtuple('CommandParam', ['type', 'name', 'isconst', 'isoptional', 'iscount', 'iscreate', 'len', 'extstructs', 'cdecl', 'islocal'])
+        self.StructMemberData = namedtuple('StructMemberData', ['name', 'members'])
+        self.object_types = []         # List of all handle types
+        self.valid_vuids = set()       # Set of all valid VUIDs
+        self.vuid_dict = dict()        # VUID dictionary (from JSON)
+    #
+    # Check if the parameter passed in is optional
+    def paramIsOptional(self, param):
+        # See if the handle is optional
+        isoptional = False
+        # Simple, if it's optional, return true
+        optString = param.attrib.get('optional')
+        if optString:
+            if optString == 'true':
+                isoptional = True
+            elif ',' in optString:
+                opts = []
+                for opt in optString.split(','):
+                    val = opt.strip()
+                    if val == 'true':
+                        opts.append(True)
+                    elif val == 'false':
+                        opts.append(False)
+                    else:
+                        print('Unrecognized len attribute value',val)
+                isoptional = opts
+        if not isoptional:
+            # Matching logic in parameter validation and ValidityOutputGenerator.isHandleOptional
+            optString = param.attrib.get('noautovalidity')
+            if optString and optString == 'true':
+                isoptional = True;
+        return isoptional
+    #
+    # Get VUID identifier from implicit VUID tag
+    def GetVuid(self, parent, suffix):
+        vuid_string = 'VUID-%s-%s' % (parent, suffix)
+        vuid = "kVUIDUndefined"
+        if '->' in vuid_string:
+           return vuid
+        if vuid_string in self.valid_vuids:
+            vuid = "\"%s\"" % vuid_string
+        else:
+            alias =  self.cmd_info_dict[parent].alias if parent in self.cmd_info_dict else None
+            if alias:
+                alias_string = 'VUID-%s-%s' % (alias, suffix)
+                if alias_string in self.valid_vuids:
+                    vuid = "\"%s\"" % alias_string
+        return vuid
+    #
+    # Increases indent by 4 spaces and tracks it globally
+    def incIndent(self, indent):
+        inc = ' ' * self.INDENT_SPACES
+        if indent:
+            return indent + inc
+        return inc
+    #
+    # Decreases indent by 4 spaces and tracks it globally
+    def decIndent(self, indent):
+        if indent and (len(indent) > self.INDENT_SPACES):
+            return indent[:-self.INDENT_SPACES]
+        return ''
+    #
+    # Override makeProtoName to drop the "vk" prefix
+    def makeProtoName(self, name, tail):
+        return self.genOpts.apientry + name[2:] + tail
+    #
+    # Check if the parameter passed in is a pointer to an array
+    def paramIsArray(self, param):
+        return param.attrib.get('len') is not None
+
+    #
+    # Generate the object tracker undestroyed object validation function
+    def GenReportFunc(self):
+        output_func = ''
+        for objtype in ['instance', 'device']:
+            upper_objtype = objtype.capitalize();
+            output_func += 'bool ObjectLifetimes::ReportUndestroyed%sObjects(Vk%s %s, const std::string& error_code) const {\n' % (upper_objtype, upper_objtype, objtype)
+            output_func += '    bool skip = false;\n'
+            if objtype == 'device':
+                output_func += '    skip |= ReportLeaked%sObjects(%s, kVulkanObjectTypeCommandBuffer, error_code);\n' % (upper_objtype, objtype)
+            for handle in self.object_types:
+                if self.handle_types.IsNonDispatchable(handle):
+                    if (objtype == 'device' and self.handle_parents.IsParentDevice(handle)) or (objtype == 'instance' and not self.handle_parents.IsParentDevice(handle)):
+                        output_func += '    skip |= ReportLeaked%sObjects(%s, %s, error_code);\n' % (upper_objtype, objtype, self.GetVulkanObjType(handle))
+            output_func += '    return skip;\n'
+            output_func += '}\n'
+        return output_func
+
+    #
+    # Generate the object tracker undestroyed object destruction function
+    def GenDestroyFunc(self):
+        output_func = ''
+        for objtype in ['instance', 'device']:
+            upper_objtype = objtype.capitalize();
+            output_func += 'void ObjectLifetimes::DestroyLeaked%sObjects() {\n' % upper_objtype
+            if objtype == 'device':
+                output_func += '    DestroyUndestroyedObjects(kVulkanObjectTypeCommandBuffer);\n'
+            for handle in self.object_types:
+                if self.handle_types.IsNonDispatchable(handle):
+                    if (objtype == 'device' and self.handle_parents.IsParentDevice(handle)) or (objtype == 'instance' and not self.handle_parents.IsParentDevice(handle)):
+                        output_func += '    DestroyUndestroyedObjects(%s);\n' % self.GetVulkanObjType(handle)
+            output_func += '}\n'
+
+        return output_func
+
+    #
+    # Walk the JSON-derived dict and find all "vuid" key values
+    def ExtractVUIDs(self, d):
+        if hasattr(d, 'items'):
+            for k, v in d.items():
+                if k == "vuid":
+                    yield v
+                elif isinstance(v, dict):
+                    for s in self.ExtractVUIDs(v):
+                        yield s
+                elif isinstance (v, list):
+                    for l in v:
+                        for s in self.ExtractVUIDs(l):
+                            yield s
+    #
+    # Separate content for validation source and header files
+    def otwrite(self, dest, formatstring):
+        if 'object_tracker.h' in self.genOpts.filename and (dest == 'hdr' or dest == 'both'):
+            write(formatstring, file=self.outFile)
+        elif 'object_tracker.cpp' in self.genOpts.filename and (dest == 'cpp' or dest == 'both'):
+            write(formatstring, file=self.outFile)
+
+    #
+    # Called at beginning of processing as file is opened
+    def beginFile(self, genOpts):
+        OutputGenerator.beginFile(self, genOpts)
+
+        # Initialize members that require the tree
+        self.handle_types = GetHandleTypes(self.registry.tree)
+        self.handle_parents = GetHandleParents(self.registry.tree)
+        self.type_categories = GetTypeCategories(self.registry.tree)
+
+        header_file = (genOpts.filename == 'object_tracker.h')
+        source_file = (genOpts.filename == 'object_tracker.cpp')
+
+        if not header_file and not source_file:
+            print("Error: Output Filenames have changed, update generator source.\n")
+            sys.exit(1)
+
+        self.valid_usage_path = genOpts.valid_usage_path
+        vu_json_filename = os.path.join(self.valid_usage_path + os.sep, 'validusage.json')
+        if os.path.isfile(vu_json_filename):
+            json_file = open(vu_json_filename, 'r')
+            self.vuid_dict = json.load(json_file)
+            json_file.close()
+        if len(self.vuid_dict) == 0:
+            print("Error: Could not find, or error loading %s/validusage.json\n", vu_json_filename)
+            sys.exit(1)
+
+        # Build a set of all vuid text strings found in validusage.json
+        for json_vuid_string in self.ExtractVUIDs(self.vuid_dict):
+            self.valid_vuids.add(json_vuid_string)
+
+        # File Comment
+        file_comment = '// *** THIS FILE IS GENERATED - DO NOT EDIT ***\n'
+        file_comment += '// See object_tracker_generator.py for modifications\n'
+        self.otwrite('both', file_comment)
+        # Copyright Statement
+        copyright = ''
+        copyright += '\n'
+        copyright += '/***************************************************************************\n'
+        copyright += ' *\n'
+        copyright += ' * Copyright (c) 2015-2019 The Khronos Group Inc.\n'
+        copyright += ' * Copyright (c) 2015-2019 Valve Corporation\n'
+        copyright += ' * Copyright (c) 2015-2019 LunarG, Inc.\n'
+        copyright += ' * Copyright (c) 2015-2019 Google Inc.\n'
+        copyright += ' *\n'
+        copyright += ' * Licensed under the Apache License, Version 2.0 (the "License");\n'
+        copyright += ' * you may not use this file except in compliance with the License.\n'
+        copyright += ' * You may obtain a copy of the License at\n'
+        copyright += ' *\n'
+        copyright += ' *     http://www.apache.org/licenses/LICENSE-2.0\n'
+        copyright += ' *\n'
+        copyright += ' * Unless required by applicable law or agreed to in writing, software\n'
+        copyright += ' * distributed under the License is distributed on an "AS IS" BASIS,\n'
+        copyright += ' * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n'
+        copyright += ' * See the License for the specific language governing permissions and\n'
+        copyright += ' * limitations under the License.\n'
+        copyright += ' *\n'
+        copyright += ' * Author: Mark Lobodzinski <mark@lunarg.com>\n'
+        copyright += ' * Author: Dave Houlton <daveh@lunarg.com>\n'
+        copyright += ' *\n'
+        copyright += ' ****************************************************************************/\n'
+        self.otwrite('both', copyright)
+        self.newline()
+        self.otwrite('cpp', '#include "chassis.h"')
+        self.otwrite('cpp', '#include "object_lifetime_validation.h"')
+
+    #
+    # Now that the data is all collected and complete, generate and output the object validation routines
+    def endFile(self):
+        self.struct_member_dict = dict(self.structMembers)
+        # Generate the list of APIs that might need to handle wrapped extension structs
+        # self.GenerateCommandWrapExtensionList()
+        self.WrapCommands()
+        # Build undestroyed objects reporting function
+        report_func = self.GenReportFunc()
+        self.newline()
+        # Build undestroyed objects destruction function
+        destroy_func = self.GenDestroyFunc()
+        self.otwrite('cpp', '\n')
+        self.otwrite('cpp', '// ObjectTracker undestroyed objects validation function')
+        self.otwrite('cpp', '%s' % report_func)
+        self.otwrite('cpp', '%s' % destroy_func)
+        # Actually write the interface to the output file.
+        if (self.emit):
+            self.newline()
+            if self.featureExtraProtect is not None:
+                prot = '#ifdef %s' % self.featureExtraProtect
+                self.otwrite('both', '%s' % prot)
+            # Write the object_tracker code to the  file
+            if self.sections['command']:
+                source = ('\n'.join(self.sections['command']))
+                self.otwrite('both', '%s' % source)
+            if (self.featureExtraProtect is not None):
+                prot = '\n#endif // %s', self.featureExtraProtect
+                self.otwrite('both', prot)
+            else:
+                self.otwrite('both', '\n')
+
+
+        self.otwrite('hdr', 'void PostCallRecordDestroyInstance(VkInstance instance, const VkAllocationCallbacks *pAllocator);')
+        self.otwrite('hdr', 'void PreCallRecordResetDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool, VkDescriptorPoolResetFlags flags);')
+        self.otwrite('hdr', 'void PostCallRecordGetPhysicalDeviceQueueFamilyProperties(VkPhysicalDevice physicalDevice, uint32_t *pQueueFamilyPropertyCount, VkQueueFamilyProperties *pQueueFamilyProperties);')
+        self.otwrite('hdr', 'void PreCallRecordFreeCommandBuffers(VkDevice device, VkCommandPool commandPool, uint32_t commandBufferCount, const VkCommandBuffer *pCommandBuffers);')
+        self.otwrite('hdr', 'void PreCallRecordFreeDescriptorSets(VkDevice device, VkDescriptorPool descriptorPool, uint32_t descriptorSetCount, const VkDescriptorSet *pDescriptorSets);')
+        self.otwrite('hdr', 'void PostCallRecordGetPhysicalDeviceQueueFamilyProperties2(VkPhysicalDevice physicalDevice, uint32_t *pQueueFamilyPropertyCount, VkQueueFamilyProperties2KHR *pQueueFamilyProperties);')
+        self.otwrite('hdr', 'void PostCallRecordGetPhysicalDeviceQueueFamilyProperties2KHR(VkPhysicalDevice physicalDevice, uint32_t *pQueueFamilyPropertyCount, VkQueueFamilyProperties2KHR *pQueueFamilyProperties);')
+        self.otwrite('hdr', 'void PostCallRecordGetPhysicalDeviceDisplayPropertiesKHR(VkPhysicalDevice physicalDevice, uint32_t *pPropertyCount, VkDisplayPropertiesKHR *pProperties, VkResult result);')
+        self.otwrite('hdr', 'void PostCallRecordGetDisplayModePropertiesKHR(VkPhysicalDevice physicalDevice, VkDisplayKHR display, uint32_t *pPropertyCount, VkDisplayModePropertiesKHR *pProperties, VkResult result);')
+        self.otwrite('hdr', 'void PostCallRecordGetPhysicalDeviceDisplayProperties2KHR(VkPhysicalDevice physicalDevice, uint32_t *pPropertyCount, VkDisplayProperties2KHR *pProperties, VkResult result);')
+        self.otwrite('hdr', 'void PostCallRecordGetDisplayModeProperties2KHR(VkPhysicalDevice physicalDevice, VkDisplayKHR display, uint32_t *pPropertyCount, VkDisplayModeProperties2KHR *pProperties, VkResult result);')
+        OutputGenerator.endFile(self)
+    #
+    # Processing point at beginning of each extension definition
+    def beginFeature(self, interface, emit):
+        # Start processing in superclass
+        OutputGenerator.beginFeature(self, interface, emit)
+        self.headerVersion = None
+        self.featureExtraProtect = GetFeatureProtect(interface)
+
+        if self.featureName != 'VK_VERSION_1_0' and self.featureName != 'VK_VERSION_1_1':
+            white_list_entry = []
+            if (self.featureExtraProtect is not None):
+                white_list_entry += [ '#ifdef %s' % self.featureExtraProtect ]
+            white_list_entry += [ '"%s"' % self.featureName ]
+            if (self.featureExtraProtect is not None):
+                white_list_entry += [ '#endif' ]
+            featureType = interface.get('type')
+            if featureType == 'instance':
+                self.instance_extensions += white_list_entry
+            elif featureType == 'device':
+                self.device_extensions += white_list_entry
+    #
+    # Processing point at end of each extension definition
+    def endFeature(self):
+        # Finish processing in superclass
+        OutputGenerator.endFeature(self)
+    #
+    # Process enums, structs, etc.
+    def genType(self, typeinfo, name, alias):
+        OutputGenerator.genType(self, typeinfo, name, alias)
+        typeElem = typeinfo.elem
+        # If the type is a struct type, traverse the imbedded <member> tags generating a structure.
+        # Otherwise, emit the tag text.
+        category = typeElem.get('category')
+        if (category == 'struct' or category == 'union'):
+            self.genStruct(typeinfo, name, alias)
+        if category == 'handle':
+            self.object_types.append(name)
+    #
+    # Append a definition to the specified section
+    def appendSection(self, section, text):
+        # self.sections[section].append('SECTION: ' + section + '\n')
+        self.sections[section].append(text)
+    #
+    # Check if the parameter passed in is a pointer
+    def paramIsPointer(self, param):
+        ispointer = False
+        for elem in param:
+            if elem.tag == 'type' and elem.tail is not None and '*' in elem.tail:
+                ispointer = True
+        return ispointer
+    #
+    # Retrieve the type and name for a parameter
+    def getTypeNameTuple(self, param):
+        type = ''
+        name = ''
+        for elem in param:
+            if elem.tag == 'type':
+                type = noneStr(elem.text)
+            elif elem.tag == 'name':
+                name = noneStr(elem.text)
+        return (type, name)
+    #
+    # Retrieve the value of the len tag
+    def getLen(self, param):
+        result = None
+        len = param.attrib.get('len')
+        if len and len != 'null-terminated':
+            # For string arrays, 'len' can look like 'count,null-terminated', indicating that we
+            # have a null terminated array of strings.  We strip the null-terminated from the
+            # 'len' field and only return the parameter specifying the string count
+            if 'null-terminated' in len:
+                result = len.split(',')[0]
+            else:
+                result = len
+            # Spec has now notation for len attributes, using :: instead of platform specific pointer symbol
+            result = str(result).replace('::', '->')
+        return result
+    #
+    # Generate a VkStructureType based on a structure typename
+    def genVkStructureType(self, typename):
+        # Add underscore between lowercase then uppercase
+        value = re.sub('([a-z0-9])([A-Z])', r'\1_\2', typename)
+        # Change to uppercase
+        value = value.upper()
+        # Add STRUCTURE_TYPE_
+        return re.sub('VK_', 'VK_STRUCTURE_TYPE_', value)
+    #
+    # Struct parameter check generation.
+    # This is a special case of the <type> tag where the contents are interpreted as a set of
+    # <member> tags instead of freeform C type declarations. The <member> tags are just like
+    # <param> tags - they are a declaration of a struct or union member. Only simple member
+    # declarations are supported (no nested structs etc.)
+    def genStruct(self, typeinfo, typeName, alias):
+        OutputGenerator.genStruct(self, typeinfo, typeName, alias)
+        members = typeinfo.elem.findall('.//member')
+        # Iterate over members once to get length parameters for arrays
+        lens = set()
+        for member in members:
+            len = self.getLen(member)
+            if len:
+                lens.add(len)
+        # Generate member info
+        membersInfo = []
+        for member in members:
+            # Get the member's type and name
+            info = self.getTypeNameTuple(member)
+            type = info[0]
+            name = info[1]
+            cdecl = self.makeCParamDecl(member, 0)
+            # Process VkStructureType
+            if type == 'VkStructureType':
+                # Extract the required struct type value from the comments
+                # embedded in the original text defining the 'typeinfo' element
+                rawXml = etree.tostring(typeinfo.elem).decode('ascii')
+                result = re.search(r'VK_STRUCTURE_TYPE_\w+', rawXml)
+                if result:
+                    value = result.group(0)
+                else:
+                    value = self.genVkStructureType(typeName)
+                # Store the required type value
+                self.structTypes[typeName] = self.StructType(name=name, value=value)
+            # Store pointer/array/string info
+            extstructs = member.attrib.get('validextensionstructs') if name == 'pNext' else None
+            membersInfo.append(self.CommandParam(type=type,
+                                                 name=name,
+                                                 isconst=True if 'const' in cdecl else False,
+                                                 isoptional=self.paramIsOptional(member),
+                                                 iscount=True if name in lens else False,
+                                                 len=self.getLen(member),
+                                                 extstructs=extstructs,
+                                                 cdecl=cdecl,
+                                                 islocal=False,
+                                                 iscreate=False))
+        self.structMembers.append(self.StructMemberData(name=typeName, members=membersInfo))
+    #
+    # Insert a lock_guard line
+    def lock_guard(self, indent):
+        return '%sstd::lock_guard<std::mutex> lock(global_lock);\n' % indent
+    #
+    # Determine if a struct has an object as a member or an embedded member
+    def struct_contains_object(self, struct_item):
+        struct_member_dict = dict(self.structMembers)
+        struct_members = struct_member_dict[struct_item]
+
+        for member in struct_members:
+            if member.type in self.handle_types:
+                return True
+            # recurse for member structs, guard against infinite recursion
+            elif member.type in struct_member_dict and member.type != struct_item:
+                if self.struct_contains_object(member.type):
+                    return True
+        return False
+    #
+    # Return list of struct members which contain, or whose sub-structures contain an obj in a given list of parameters or members
+    def getParmeterStructsWithObjects(self, item_list):
+        struct_list = set()
+        for item in item_list:
+            paramtype = item.find('type')
+            typecategory = self.type_categories[paramtype.text]
+            if typecategory == 'struct':
+                if self.struct_contains_object(paramtype.text) == True:
+                    struct_list.add(item)
+        return struct_list
+    #
+    # Return list of objects from a given list of parameters or members
+    def getObjectsInParameterList(self, item_list, create_func):
+        object_list = set()
+        if create_func == True:
+            member_list = item_list[0:-1]
+        else:
+            member_list = item_list
+        for item in member_list:
+            if paramtype.text in self.handle_types:
+                object_list.add(item)
+        return object_list
+    #
+    # Construct list of extension structs containing handles, or extension structs that share a <validextensionstructs>
+    # tag WITH an extension struct containing handles.
+    def GenerateCommandWrapExtensionList(self):
+        for struct in self.structMembers:
+            if (len(struct.members) > 1) and struct.members[1].extstructs is not None:
+                found = False;
+                for item in struct.members[1].extstructs.split(','):
+                    if item != '' and self.struct_contains_object(item) == True:
+                        found = True
+                if found == True:
+                    for item in struct.members[1].extstructs.split(','):
+                        if item != '' and item not in self.extension_structs:
+                            self.extension_structs.append(item)
+    #
+    # Returns True if a struct may have a pNext chain containing an object
+    def StructWithExtensions(self, struct_type):
+        if struct_type in self.struct_member_dict:
+            param_info = self.struct_member_dict[struct_type]
+            if (len(param_info) > 1) and param_info[1].extstructs is not None:
+                for item in param_info[1].extstructs.split(','):
+                    if item in self.extension_structs:
+                        return True
+        return False
+    #
+    # Generate VulkanObjectType from object type
+    def GetVulkanObjType(self, type):
+        return 'kVulkanObjectType%s' % type[2:]
+    #
+    # Return correct dispatch table type -- instance or device
+    def GetDispType(self, type):
+        return 'instance' if type in ['VkInstance', 'VkPhysicalDevice'] else 'device'
+    #
+    # Generate source for creating a Vulkan object
+    def generate_create_object_code(self, indent, proto, params, cmd_info, allocator):
+        create_obj_code = ''
+        handle_type = params[-1].find('type')
+        is_create_pipelines = False
+
+        if handle_type.text in self.handle_types:
+            # Check for special case where multiple handles are returned
+            object_array = False
+            if cmd_info[-1].len is not None:
+                object_array = True;
+            handle_name = params[-1].find('name')
+            object_dest = '*%s' % handle_name.text
+            if object_array == True:
+                if 'CreateGraphicsPipelines' in proto.text or 'CreateComputePipelines' in proto.text or 'CreateRayTracingPipelines' in proto.text:
+                    is_create_pipelines = True
+                    create_obj_code += '%sif (VK_ERROR_VALIDATION_FAILED_EXT == result) return;\n' % indent
+                create_obj_code += '%sif (%s) {\n' % (indent, handle_name.text)
+                indent = self.incIndent(indent)
+                countispointer = ''
+                if 'uint32_t*' in cmd_info[-2].cdecl:
+                    countispointer = '*'
+                create_obj_code += '%sfor (uint32_t index = 0; index < %s%s; index++) {\n' % (indent, countispointer, cmd_info[-1].len)
+                indent = self.incIndent(indent)
+                object_dest = '%s[index]' % cmd_info[-1].name
+
+            dispobj = params[0].find('type').text
+            if is_create_pipelines:
+                create_obj_code += '%sif (!pPipelines[index]) continue;\n' % indent
+            create_obj_code += '%sCreateObject(%s, %s, %s);\n' % (indent, object_dest, self.GetVulkanObjType(cmd_info[-1].type), allocator)
+            if object_array == True:
+                indent = self.decIndent(indent)
+                create_obj_code += '%s}\n' % indent
+                indent = self.decIndent(indent)
+                create_obj_code += '%s}\n' % indent
+            indent = self.decIndent(indent)
+
+        return create_obj_code
+    #
+    # Generate source for destroying a non-dispatchable object
+    def generate_destroy_object_code(self, indent, proto, cmd_info):
+        validate_code = ''
+        record_code = ''
+        object_array = False
+        if True in [destroy_txt in proto.text for destroy_txt in ['Destroy', 'Free']]:
+            # Check for special case where multiple handles are returned
+            if cmd_info[-1].len is not None:
+                object_array = True;
+                param = -1
+            else:
+                param = -2
+            compatalloc_vuid_string = '%s-compatalloc' % cmd_info[param].name
+            nullalloc_vuid_string = '%s-nullalloc' % cmd_info[param].name
+            compatalloc_vuid = self.manual_vuids.get(compatalloc_vuid_string, "kVUIDUndefined")
+            nullalloc_vuid = self.manual_vuids.get(nullalloc_vuid_string, "kVUIDUndefined")
+            if cmd_info[param].type in self.handle_types:
+                if object_array == True:
+                    # This API is freeing an array of handles -- add loop control
+                    validate_code += 'HEY, NEED TO DESTROY AN ARRAY\n'
+                else:
+                    dispobj = cmd_info[0].type
+                    # Call Destroy a single time
+                    validate_code += '%sskip |= ValidateDestroyObject(%s, %s, pAllocator, %s, %s);\n' % (indent, cmd_info[param].name, self.GetVulkanObjType(cmd_info[param].type), compatalloc_vuid, nullalloc_vuid)
+                    record_code += '%sRecordDestroyObject(%s, %s);\n' % (indent, cmd_info[param].name, self.GetVulkanObjType(cmd_info[param].type))
+        return object_array, validate_code, record_code
+    #
+    # Output validation for a single object (obj_count is NULL) or a counted list of objects
+    def outputObjects(self, obj_type, obj_name, obj_count, prefix, index, indent, disp_name, parent_name, null_allowed, top_level):
+        pre_call_code = ''
+        param_suffix = '%s-parameter' % (obj_name)
+        parent_suffix = '%s-parent' % (obj_name)
+        param_vuid = self.GetVuid(parent_name, param_suffix)
+        parent_vuid = self.GetVuid(parent_name, parent_suffix)
+
+        # If no parent VUID for this member, look for a commonparent VUID
+        if parent_vuid == 'kVUIDUndefined':
+            parent_vuid = self.GetVuid(parent_name, 'commonparent')
+        if obj_count is not None:
+
+            pre_call_code += '%sif (%s%s) {\n' % (indent, prefix, obj_name)
+            indent = self.incIndent(indent)
+            pre_call_code += '%sfor (uint32_t %s = 0; %s < %s; ++%s) {\n' % (indent, index, index, obj_count, index)
+            indent = self.incIndent(indent)
+            pre_call_code += '%sskip |= ValidateObject(%s%s[%s], %s, %s, %s, %s);\n' % (indent, prefix, obj_name, index, self.GetVulkanObjType(obj_type), null_allowed, param_vuid, parent_vuid)
+            indent = self.decIndent(indent)
+            pre_call_code += '%s}\n' % indent
+            indent = self.decIndent(indent)
+            pre_call_code += '%s}\n' % indent
+        else:
+            bonus_indent = ''
+            if 'basePipelineHandle' in obj_name:
+                pre_call_code += '%sif ((%sflags & VK_PIPELINE_CREATE_DERIVATIVE_BIT) && (%sbasePipelineIndex == -1))\n' % (indent, prefix, prefix)
+                bonus_indent = '    '
+                null_allowed = 'false'
+                manual_vuid_index = parent_name + '-' + obj_name
+                param_vuid = self.manual_vuids.get(manual_vuid_index, "kVUIDUndefined")
+            pre_call_code += '%s%sskip |= ValidateObject(%s%s, %s, %s, %s, %s);\n' % (bonus_indent, indent, prefix, obj_name, self.GetVulkanObjType(obj_type), null_allowed, param_vuid, parent_vuid)
+        return pre_call_code
+    #
+    # first_level_param indicates if elements are passed directly into the function else they're below a ptr/struct
+    def validate_objects(self, members, indent, prefix, array_index, disp_name, parent_name, first_level_param):
+        pre_code = ''
+        index = 'index%s' % str(array_index)
+        array_index += 1
+        # Process any objects in this structure and recurse for any sub-structs in this struct
+        for member in members:
+            # Handle objects
+            if member.iscreate and first_level_param and member == members[-1]:
+                continue
+            if member.type in self.handle_types:
+                count_name = member.len
+                if (count_name is not None):
+                    count_name = '%s%s' % (prefix, member.len)
+                null_allowed = member.isoptional
+                tmp_pre = self.outputObjects(member.type, member.name, count_name, prefix, index, indent, disp_name, parent_name, str(null_allowed).lower(), first_level_param)
+                pre_code += tmp_pre
+            # Handle Structs that contain objects at some level
+            elif member.type in self.struct_member_dict:
+                # Structs at first level will have an object
+                if self.struct_contains_object(member.type) == True:
+                    struct_info = self.struct_member_dict[member.type]
+                    # TODO (jbolz): Can this use paramIsPointer?
+                    ispointer = '*' in member.cdecl;
+                    # Struct Array
+                    if member.len is not None:
+                        # Update struct prefix
+                        new_prefix = '%s%s' % (prefix, member.name)
+                        pre_code += '%sif (%s%s) {\n' % (indent, prefix, member.name)
+                        indent = self.incIndent(indent)
+                        pre_code += '%sfor (uint32_t %s = 0; %s < %s%s; ++%s) {\n' % (indent, index, index, prefix, member.len, index)
+                        indent = self.incIndent(indent)
+                        local_prefix = '%s[%s].' % (new_prefix, index)
+                        # Process sub-structs in this struct
+                        tmp_pre = self.validate_objects(struct_info, indent, local_prefix, array_index, disp_name, member.type, False)
+                        pre_code += tmp_pre
+                        indent = self.decIndent(indent)
+                        pre_code += '%s}\n' % indent
+                        indent = self.decIndent(indent)
+                        pre_code += '%s}\n' % indent
+                    # Single Struct Pointer
+                    elif ispointer:
+                        # Update struct prefix
+                        new_prefix = '%s%s->' % (prefix, member.name)
+                        # Declare safe_VarType for struct
+                        pre_code += '%sif (%s%s) {\n' % (indent, prefix, member.name)
+                        indent = self.incIndent(indent)
+                        # Process sub-structs in this struct
+                        tmp_pre = self.validate_objects(struct_info, indent, new_prefix, array_index, disp_name, member.type, False)
+                        pre_code += tmp_pre
+                        indent = self.decIndent(indent)
+                        pre_code += '%s}\n' % indent
+                    # Single Nested Struct
+                    else:
+                        # Update struct prefix
+                        new_prefix = '%s%s.' % (prefix, member.name)
+                        # Process sub-structs
+                        tmp_pre = self.validate_objects(struct_info, indent, new_prefix, array_index, disp_name, member.type, False)
+                        pre_code += tmp_pre
+        return pre_code
+    #
+    # For a particular API, generate the object handling code
+    def generate_wrapping_code(self, cmd):
+        indent = '    '
+        pre_call_validate = ''
+        pre_call_record = ''
+        post_call_record = ''
+
+        destroy_array = False
+        validate_destroy_code = ''
+        record_destroy_code = ''
+
+        proto = cmd.find('proto/name')
+        params = cmd.findall('param')
+        if proto.text is not None:
+            cmddata = self.cmd_info_dict[proto.text]
+            cmd_info = cmddata.members
+            disp_name = cmd_info[0].name
+            # Handle object create operations if last parameter is created by this call
+            if cmddata.iscreate:
+                post_call_record += self.generate_create_object_code(indent, proto, params, cmd_info, cmddata.allocator)
+            # Handle object destroy operations
+            if cmddata.isdestroy:
+                (destroy_array, validate_destroy_code, record_destroy_code) = self.generate_destroy_object_code(indent, proto, cmd_info)
+
+            pre_call_record += record_destroy_code
+            pre_call_validate += self.validate_objects(cmd_info, indent, '', 0, disp_name, proto.text, True)
+            pre_call_validate += validate_destroy_code
+
+        return pre_call_validate, pre_call_record, post_call_record
+    #
+    # Capture command parameter info needed to create, destroy, and validate objects
+    def genCmd(self, cmdinfo, cmdname, alias):
+        # Add struct-member type information to command parameter information
+        OutputGenerator.genCmd(self, cmdinfo, cmdname, alias)
+        members = cmdinfo.elem.findall('.//param')
+        # Iterate over members once to get length parameters for arrays
+        lens = set()
+        for member in members:
+            length = self.getLen(member)
+            if length:
+                lens.add(length)
+        struct_member_dict = dict(self.structMembers)
+
+        # Set command invariant information needed at a per member level in validate...
+        is_create_command = any(filter(lambda pat: pat in cmdname, ('Create', 'Allocate', 'Enumerate', 'RegisterDeviceEvent', 'RegisterDisplayEvent')))
+        last_member_is_pointer = len(members) and self.paramIsPointer(members[-1])
+        iscreate = is_create_command or ('vkGet' in cmdname and last_member_is_pointer)
+        isdestroy = any([destroy_txt in cmdname for destroy_txt in ['Destroy', 'Free']])
+
+        # Generate member info
+        membersInfo = []
+        allocator = 'nullptr'
+        for member in members:
+            # Get type and name of member
+            info = self.getTypeNameTuple(member)
+            type = info[0]
+            name = info[1]
+            cdecl = self.makeCParamDecl(member, 0)
+            # Check for parameter name in lens set
+            iscount = True if name in lens else False
+            length = self.getLen(member)
+            isconst = True if 'const' in cdecl else False
+            # Mark param as local if it is an array of objects
+            islocal = False;
+            if type in self.handle_types:
+                if (length is not None) and (isconst == True):
+                    islocal = True
+            # Or if it's a struct that contains an object
+            elif type in struct_member_dict:
+                if self.struct_contains_object(type) == True:
+                    islocal = True
+            if type == 'VkAllocationCallbacks':
+                allocator = name
+            extstructs = member.attrib.get('validextensionstructs') if name == 'pNext' else None
+            membersInfo.append(self.CommandParam(type=type,
+                                                 name=name,
+                                                 isconst=isconst,
+                                                 isoptional=self.paramIsOptional(member),
+                                                 iscount=iscount,
+                                                 len=length,
+                                                 extstructs=extstructs,
+                                                 cdecl=cdecl,
+                                                 islocal=islocal,
+                                                 iscreate=iscreate))
+
+        self.cmd_list.append(cmdname)
+        self.cmd_info_dict[cmdname] =self.CmdInfoData(name=cmdname, cmdinfo=cmdinfo, members=membersInfo, iscreate=iscreate, isdestroy=isdestroy, allocator=allocator, extra_protect=self.featureExtraProtect, alias=alias)
+    #
+    # Create code Create, Destroy, and validate Vulkan objects
+    def WrapCommands(self):
+        for cmdname in self.cmd_list:
+            cmddata = self.cmd_info_dict[cmdname]
+            cmdinfo = cmddata.cmdinfo
+            if cmdname in self.interface_functions:
+                continue
+            manual = False
+            if cmdname in self.no_autogen_list:
+                manual = True
+
+            # Generate object handling code
+            (pre_call_validate, pre_call_record, post_call_record) = self.generate_wrapping_code(cmdinfo.elem)
+
+            feature_extra_protect = cmddata.extra_protect
+            if (feature_extra_protect is not None):
+                self.appendSection('command', '')
+                self.appendSection('command', '#ifdef '+ feature_extra_protect)
+                self.prototypes += [ '#ifdef %s' % feature_extra_protect ]
+
+            # Add intercept to procmap
+            self.prototypes += [ '    {"%s", (void*)%s},' % (cmdname,cmdname[2:]) ]
+
+            decls = self.makeCDecls(cmdinfo.elem)
+
+            # Gather the parameter items
+            params = cmdinfo.elem.findall('param/name')
+            # Pull out the text for each of the parameters, separate them by commas in a list
+            paramstext = ', '.join([str(param.text) for param in params])
+            # Generate the API call template
+            fcn_call = cmdinfo.elem.attrib.get('name').replace('vk', 'TOKEN', 1) + '(' + paramstext + ');'
+
+            func_decl_template = decls[0][:-1].split('VKAPI_CALL ')
+            func_decl_template = func_decl_template[1]
+
+            result_type = cmdinfo.elem.find('proto/type')
+
+            if 'object_tracker.h' in self.genOpts.filename:
+                # Output PreCallValidateAPI prototype if necessary
+                if pre_call_validate:
+                    pre_cv_func_decl = 'bool PreCallValidate' + func_decl_template + ' const;'
+                    self.appendSection('command', pre_cv_func_decl)
+
+                # Output PreCallRecordAPI prototype if necessary
+                if pre_call_record:
+                    pre_cr_func_decl = 'void PreCallRecord' + func_decl_template + ';'
+                    self.appendSection('command', pre_cr_func_decl)
+
+                # Output PosCallRecordAPI prototype if necessary
+                if post_call_record:
+                    post_cr_func_decl = 'void PostCallRecord' + func_decl_template + ';'
+                    if result_type.text == 'VkResult':
+                        post_cr_func_decl = post_cr_func_decl.replace(')', ',\n    VkResult                                    result)')
+                    elif result_type.text == 'VkDeviceAddress':
+                        post_cr_func_decl = post_cr_func_decl.replace(')', ',\n    VkDeviceAddress                             result)')
+                    self.appendSection('command', post_cr_func_decl)
+
+            if 'object_tracker.cpp' in self.genOpts.filename:
+                # Output PreCallValidateAPI function if necessary
+                if pre_call_validate and not manual:
+                    pre_cv_func_decl = 'bool ObjectLifetimes::PreCallValidate' + func_decl_template + ' const {'
+                    self.appendSection('command', '')
+                    self.appendSection('command', pre_cv_func_decl)
+                    self.appendSection('command', '    bool skip = false;')
+                    self.appendSection('command', pre_call_validate)
+                    self.appendSection('command', '    return skip;')
+                    self.appendSection('command', '}')
+
+                # Output PreCallRecordAPI function if necessary
+                if pre_call_record and not manual:
+                    pre_cr_func_decl = 'void ObjectLifetimes::PreCallRecord' + func_decl_template + ' {'
+                    self.appendSection('command', '')
+                    self.appendSection('command', pre_cr_func_decl)
+                    self.appendSection('command', pre_call_record)
+                    self.appendSection('command', '}')
+
+                # Output PosCallRecordAPI function if necessary
+                if post_call_record and not manual:
+                    post_cr_func_decl = 'void ObjectLifetimes::PostCallRecord' + func_decl_template + ' {'
+                    self.appendSection('command', '')
+
+                    if result_type.text == 'VkResult':
+                        post_cr_func_decl = post_cr_func_decl.replace(')', ',\n    VkResult                                    result)')
+                        # The two createpipelines APIs may create on failure -- skip the success result check
+                        if 'CreateGraphicsPipelines' not in cmdname and 'CreateComputePipelines' not in cmdname and 'CreateRayTracingPipelines' not in cmdname:
+                            post_cr_func_decl = post_cr_func_decl.replace('{', '{\n    if (result != VK_SUCCESS) return;')
+                    elif result_type.text == 'VkDeviceAddress':
+                        post_cr_func_decl = post_cr_func_decl.replace(')', ',\n    VkDeviceAddress                             result)')
+                    self.appendSection('command', post_cr_func_decl)
+
+                    self.appendSection('command', post_call_record)
+                    self.appendSection('command', '}')
+
+            if (feature_extra_protect is not None):
+                self.appendSection('command', '#endif // '+ feature_extra_protect)
+                self.prototypes += [ '#endif' ]
diff --git a/src/third_party/vulkan-validation-layers/src/scripts/parameter_validation_generator.py b/src/third_party/vulkan-validation-layers/src/scripts/parameter_validation_generator.py
new file mode 100644
index 0000000..abe8165
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/scripts/parameter_validation_generator.py
@@ -0,0 +1,1308 @@
+#!/usr/bin/python3 -i
+#
+# Copyright (c) 2015-2019 The Khronos Group Inc.
+# Copyright (c) 2015-2019 Valve Corporation
+# Copyright (c) 2015-2019 LunarG, Inc.
+# Copyright (c) 2015-2019 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Author: Dustin Graves <dustin@lunarg.com>
+# Author: Mark Lobodzinski <mark@lunarg.com>
+# Author: Dave Houlton <daveh@lunarg.com>
+
+import os,re,sys,string,json
+import xml.etree.ElementTree as etree
+from generator import *
+from collections import namedtuple
+from common_codegen import *
+
+# This is a workaround to use a Python 2.7 and 3.x compatible syntax.
+from io import open
+
+# ParameterValidationGeneratorOptions - subclass of GeneratorOptions.
+#
+# Adds options used by ParameterValidationOutputGenerator object during Parameter validation layer generation.
+#
+# Additional members
+#   prefixText - list of strings to prefix generated header with
+#     (usually a copyright statement + calling convention macros).
+#   protectFile - True if multiple inclusion protection should be
+#     generated (based on the filename) around the entire header.
+#   protectFeature - True if #ifndef..#endif protection should be
+#     generated around a feature interface in the header file.
+#   genFuncPointers - True if function pointer typedefs should be
+#     generated
+#   protectProto - If conditional protection should be generated
+#     around prototype declarations, set to either '#ifdef'
+#     to require opt-in (#ifdef protectProtoStr) or '#ifndef'
+#     to require opt-out (#ifndef protectProtoStr). Otherwise
+#     set to None.
+#   protectProtoStr - #ifdef/#ifndef symbol to use around prototype
+#     declarations, if protectProto is set
+#   apicall - string to use for the function declaration prefix,
+#     such as APICALL on Windows.
+#   apientry - string to use for the calling convention macro,
+#     in typedefs, such as APIENTRY.
+#   apientryp - string to use for the calling convention macro
+#     in function pointer typedefs, such as APIENTRYP.
+#   indentFuncProto - True if prototype declarations should put each
+#     parameter on a separate line
+#   indentFuncPointer - True if typedefed function pointers should put each
+#     parameter on a separate line
+#   alignFuncParam - if nonzero and parameters are being put on a
+#     separate line, align parameter names at the specified column
+class ParameterValidationGeneratorOptions(GeneratorOptions):
+    def __init__(self,
+                 conventions = None,
+                 filename = None,
+                 directory = '.',
+                 apiname = None,
+                 profile = None,
+                 versions = '.*',
+                 emitversions = '.*',
+                 defaultExtensions = None,
+                 addExtensions = None,
+                 removeExtensions = None,
+                 emitExtensions = None,
+                 sortProcedure = regSortFeatures,
+                 prefixText = "",
+                 apicall = '',
+                 apientry = '',
+                 apientryp = '',
+                 indentFuncProto = True,
+                 indentFuncPointer = False,
+                 alignFuncParam = 0,
+                 expandEnumerants = True,
+                 valid_usage_path = ''):
+        GeneratorOptions.__init__(self, conventions, filename, directory, apiname, profile,
+                                  versions, emitversions, defaultExtensions,
+                                  addExtensions, removeExtensions, emitExtensions, sortProcedure)
+        self.prefixText      = prefixText
+        self.apicall         = apicall
+        self.apientry        = apientry
+        self.apientryp       = apientryp
+        self.indentFuncProto = indentFuncProto
+        self.indentFuncPointer = indentFuncPointer
+        self.alignFuncParam  = alignFuncParam
+        self.expandEnumerants = expandEnumerants
+        self.valid_usage_path = valid_usage_path
+
+# ParameterValidationOutputGenerator - subclass of OutputGenerator.
+# Generates param checker layer code.
+#
+# ---- methods ----
+# ParamCheckerOutputGenerator(errFile, warnFile, diagFile) - args as for
+#   OutputGenerator. Defines additional internal state.
+# ---- methods overriding base class ----
+# beginFile(genOpts)
+# endFile()
+# beginFeature(interface, emit)
+# endFeature()
+# genType(typeinfo,name)
+# genStruct(typeinfo,name)
+# genGroup(groupinfo,name)
+# genEnum(enuminfo, name)
+# genCmd(cmdinfo)
+class ParameterValidationOutputGenerator(OutputGenerator):
+    """Generate Parameter Validation code based on XML element attributes"""
+    # This is an ordered list of sections in the header file.
+    ALL_SECTIONS = ['command']
+    def __init__(self,
+                 errFile = sys.stderr,
+                 warnFile = sys.stderr,
+                 diagFile = sys.stdout):
+        OutputGenerator.__init__(self, errFile, warnFile, diagFile)
+        self.INDENT_SPACES = 4
+        self.declarations = []
+
+        inline_custom_source_preamble = """
+"""
+
+        # These functions have additional, custom-written checks in the utils cpp file. CodeGen will automatically add a call
+        # to those functions of the form 'bool manual_PreCallValidateAPIName', where the 'vk' is dropped.
+        # see 'manual_PreCallValidateCreateGraphicsPipelines' as an example.
+        self.functions_with_manual_checks = [
+            'vkCreateInstance',
+            'vkCreateDevice',
+            'vkCreateQueryPool',
+            'vkCreateRenderPass',
+            'vkCreateRenderPass2KHR',
+            'vkCreateBuffer',
+            'vkCreateImage',
+            'vkCreateGraphicsPipelines',
+            'vkCreateComputePipelines',
+            'vkCreateRayTracingPipelinesNV',
+            'vkCreateSampler',
+            'vkCreateDescriptorSetLayout',
+            'vkFreeDescriptorSets',
+            'vkUpdateDescriptorSets',
+            'vkBeginCommandBuffer',
+            'vkCmdSetViewport',
+            'vkCmdSetScissor',
+            'vkCmdSetLineWidth',
+            'vkCmdDraw',
+            'vkCmdDrawIndirect',
+            'vkCmdDrawIndexedIndirect',
+            'vkCmdClearAttachments',
+            'vkCmdCopyImage',
+            'vkCmdBindIndexBuffer',
+            'vkCmdBlitImage',
+            'vkCmdCopyBufferToImage',
+            'vkCmdCopyImageToBuffer',
+            'vkCmdUpdateBuffer',
+            'vkCmdFillBuffer',
+            'vkCreateSwapchainKHR',
+            'vkQueuePresentKHR',
+            'vkCreateDescriptorPool',
+            'vkCmdDispatch',
+            'vkCmdDispatchIndirect',
+            'vkCmdDispatchBaseKHR',
+            'vkCmdSetExclusiveScissorNV',
+            'vkCmdSetViewportShadingRatePaletteNV',
+            'vkCmdSetCoarseSampleOrderNV',
+            'vkCmdDrawMeshTasksNV',
+            'vkCmdDrawMeshTasksIndirectNV',
+            'vkCmdDrawMeshTasksIndirectCountNV',
+            'vkAllocateMemory',
+            'vkCreateAccelerationStructureNV',
+            'vkGetAccelerationStructureHandleNV',
+            'vkCmdBuildAccelerationStructureNV',
+            'vkCreateFramebuffer',
+            'vkCmdSetLineStippleEXT',
+            'vkSetDebugUtilsObjectNameEXT',
+            'vkSetDebugUtilsObjectTagEXT',
+            'vkCmdSetViewportWScalingNV',
+            'vkAcquireNextImageKHR',
+            'vkAcquireNextImage2KHR',
+            ]
+
+        # Commands to ignore
+        self.blacklist = [
+            'vkGetInstanceProcAddr',
+            'vkGetDeviceProcAddr',
+            'vkEnumerateInstanceVersion',
+            'vkEnumerateInstanceLayerProperties',
+            'vkEnumerateInstanceExtensionProperties',
+            'vkEnumerateDeviceLayerProperties',
+            'vkEnumerateDeviceExtensionProperties',
+            'vkGetDeviceGroupSurfacePresentModes2EXT'
+            ]
+
+        # Structure fields to ignore
+        self.structMemberBlacklist = { 'VkWriteDescriptorSet' : ['dstSet'] }
+        # Validation conditions for some special case struct members that are conditionally validated
+        self.structMemberValidationConditions = { 'VkPipelineColorBlendStateCreateInfo' : { 'logicOp' : '{}logicOpEnable == VK_TRUE' } }
+        # Header version
+        self.headerVersion = None
+        # Internal state - accumulators for different inner block text
+        self.validation = []                              # Text comprising the main per-api parameter validation routines
+        self.stypes = []                                  # Values from the VkStructureType enumeration
+        self.structTypes = dict()                         # Map of Vulkan struct typename to required VkStructureType
+        self.handleTypes = set()                          # Set of handle type names
+        self.commands = []                                # List of CommandData records for all Vulkan commands
+        self.structMembers = []                           # List of StructMemberData records for all Vulkan structs
+        self.validatedStructs = dict()                    # Map of structs type names to generated validation code for that struct type
+        self.enumRanges = dict()                          # Map of enum name to BEGIN/END range values
+        self.enumValueLists = ''                          # String containing enumerated type map definitions
+        self.flags = set()                                # Map of flags typenames
+        self.flagBits = dict()                            # Map of flag bits typename to list of values
+        self.newFlags = set()                             # Map of flags typenames /defined in the current feature/
+        self.required_extensions = dict()                 # Dictionary of required extensions for each item in the current extension
+        self.extension_type = ''                          # Type of active feature (extension), device or instance
+        self.extension_names = dict()                     # Dictionary of extension names to extension name defines
+        self.structextends_list = []                      # List of extensions which extend another struct
+        self.struct_feature_protect = dict()              # Dictionary of structnames and FeatureExtraProtect strings
+        self.valid_vuids = set()                          # Set of all valid VUIDs
+        self.vuid_dict = dict()                           # VUID dictionary (from JSON)
+        self.alias_dict = dict()                          # Dict of cmd|struct aliases
+        self.header_file = False                          # Header file generation flag
+        self.source_file = False                          # Source file generation flag
+        self.returnedonly_structs = []
+        # Named tuples to store struct and command data
+        self.CommandParam = namedtuple('CommandParam', ['type', 'name', 'ispointer', 'isstaticarray', 'isbool', 'israngedenum',
+                                                        'isconst', 'isoptional', 'iscount', 'noautovalidity',
+                                                        'len', 'extstructs', 'condition', 'cdecl'])
+        self.CommandData = namedtuple('CommandData', ['name', 'params', 'cdecl', 'extension_type', 'result'])
+        self.StructMemberData = namedtuple('StructMemberData', ['name', 'members'])
+
+    #
+    # Generate Copyright comment block for file
+    def GenerateCopyright(self):
+        copyright  = '/* *** THIS FILE IS GENERATED - DO NOT EDIT! ***\n'
+        copyright += ' * See parameter_validation_generator.py for modifications\n'
+        copyright += ' *\n'
+        copyright += ' * Copyright (c) 2015-2019 The Khronos Group Inc.\n'
+        copyright += ' * Copyright (c) 2015-2019 LunarG, Inc.\n'
+        copyright += ' * Copyright (C) 2015-2019 Google Inc.\n'
+        copyright += ' *\n'
+        copyright += ' * Licensed under the Apache License, Version 2.0 (the "License");\n'
+        copyright += ' * you may not use this file except in compliance with the License.\n'
+        copyright += ' * Copyright (c) 2015-2017 Valve Corporation\n'
+        copyright += ' * You may obtain a copy of the License at\n'
+        copyright += ' *\n'
+        copyright += ' *     http://www.apache.org/licenses/LICENSE-2.0\n'
+        copyright += ' *\n'
+        copyright += ' * Unless required by applicable law or agreed to in writing, software\n'
+        copyright += ' * distributed under the License is distributed on an "AS IS" BASIS,\n'
+        copyright += ' * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n'
+        copyright += ' * See the License for the specific language governing permissions and\n'
+        copyright += ' * limitations under the License.\n'
+        copyright += ' *\n'
+        copyright += ' * Author: Mark Lobodzinski <mark@LunarG.com>\n'
+        copyright += ' * Author: Dave Houlton <daveh@LunarG.com>\n'
+        copyright += ' */\n\n'
+        return copyright
+    #
+    # Increases the global indent variable
+    def incIndent(self, indent):
+        inc = ' ' * self.INDENT_SPACES
+        if indent:
+            return indent + inc
+        return inc
+    #
+    # Decreases the global indent variable
+    def decIndent(self, indent):
+        if indent and (len(indent) > self.INDENT_SPACES):
+            return indent[:-self.INDENT_SPACES]
+        return ''
+    #
+    # Walk the JSON-derived dict and find all "vuid" key values
+    def ExtractVUIDs(self, d):
+        if hasattr(d, 'items'):
+            for k, v in d.items():
+                if k == "vuid":
+                    yield v
+                elif isinstance(v, dict):
+                    for s in self.ExtractVUIDs(v):
+                        yield s
+                elif isinstance (v, list):
+                    for l in v:
+                        for s in self.ExtractVUIDs(l):
+                            yield s
+    #
+    # Called at file creation time
+    def beginFile(self, genOpts):
+        OutputGenerator.beginFile(self, genOpts)
+        self.header_file = (genOpts.filename == 'parameter_validation.h')
+        self.source_file = (genOpts.filename == 'parameter_validation.cpp')
+
+        if not self.header_file and not self.source_file:
+            print("Error: Output Filenames have changed, update generator source.\n")
+            sys.exit(1)
+
+        if self.source_file or self.header_file:
+            # Output Copyright text
+            s = self.GenerateCopyright()
+            write(s, file=self.outFile)
+
+        if self.header_file:
+            return
+
+        # Build map of structure type names to VkStructureType enum values
+        # Find all types of category "struct"
+        for struct in self.registry.tree.iterfind('types/type[@category="struct"]'):
+            # Check if struct has member named "sType" of type "VkStructureType" which has values defined
+            stype = struct.find('member[name="sType"][type="VkStructureType"][@values]')
+            if stype is not None:
+                # Store VkStructureType value for this type
+                self.structTypes[struct.get('name')] = stype.get('values')
+
+        self.valid_usage_path = genOpts.valid_usage_path
+        vu_json_filename = os.path.join(self.valid_usage_path + os.sep, 'validusage.json')
+        if os.path.isfile(vu_json_filename):
+            json_file = open(vu_json_filename, 'r')
+            self.vuid_dict = json.load(json_file)
+            json_file.close()
+        if len(self.vuid_dict) == 0:
+            print("Error: Could not find, or error loading %s/validusage.json\n", vu_json_filename)
+            sys.exit(1)
+        #
+        # Build a set of all vuid text strings found in validusage.json
+        for json_vuid_string in self.ExtractVUIDs(self.vuid_dict):
+            self.valid_vuids.add(json_vuid_string)
+        #
+        # Headers
+        write('#include "chassis.h"', file=self.outFile)
+        self.newline()
+        write('#include "stateless_validation.h"', file=self.outFile)
+        self.newline()
+    #
+    # Called at end-time for final content output
+    def endFile(self):
+        if self.source_file:
+            # C-specific
+            self.newline()
+            write(self.enumValueLists, file=self.outFile)
+            self.newline()
+
+            pnext_handler  = 'bool StatelessValidation::ValidatePnextStructContents(const char *api_name, const ParameterName &parameter_name, const VkBaseOutStructure* header) const {\n'
+            pnext_handler += '    bool skip = false;\n'
+            pnext_handler += '    switch(header->sType) {\n'
+
+            # Do some processing here to extract data from validatedstructs...
+            for item in self.structextends_list:
+                postProcSpec = {}
+                postProcSpec['ppp'] = '' if not item else '{postProcPrefix}'
+                postProcSpec['pps'] = '' if not item else '{postProcSuffix}'
+                postProcSpec['ppi'] = '' if not item else '{postProcInsert}'
+
+                pnext_case = '\n'
+                protect = ''
+                # Guard struct cases with feature ifdefs, if necessary
+                if item in self.struct_feature_protect.keys():
+                    protect = self.struct_feature_protect[item]
+                    pnext_case += '#ifdef %s\n' % protect
+                pnext_case += '        // Validation code for %s structure members\n' % item
+                pnext_case += '        case %s: {\n' % self.structTypes[item]
+                pnext_case += '            %s *structure = (%s *) header;\n' % (item, item)
+                expr = self.expandStructCode(item, item, 'structure->', '', '            ', [], postProcSpec)
+                struct_validation_source = self.ScrubStructCode(expr)
+                pnext_case += '%s' % struct_validation_source
+                pnext_case += '        } break;\n'
+                if protect:
+                    pnext_case += '#endif // %s\n' % protect
+                # Skip functions containing no validation
+                if struct_validation_source:
+                    pnext_handler += pnext_case;
+            pnext_handler += '        default:\n'
+            pnext_handler += '            skip = false;\n'
+            pnext_handler += '    }\n'
+            pnext_handler += '    return skip;\n'
+            pnext_handler += '}\n'
+            write(pnext_handler, file=self.outFile)
+            self.newline()
+
+            ext_template  = 'bool StatelessValidation::OutputExtensionError(const std::string &api_name, const std::string &extension_name) const {\n'
+            ext_template += '    return log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,\n'
+            ext_template += '                   kVUID_PVError_ExtensionNotEnabled, "Attemped to call %s() but its required extension %s has not been enabled\\n",\n'
+            ext_template += '                   api_name.c_str(), extension_name.c_str());\n'
+            ext_template += '}\n'
+            write(ext_template, file=self.outFile)
+            self.newline()
+            commands_text = '\n'.join(self.validation)
+            write(commands_text, file=self.outFile)
+            self.newline()
+        if self.header_file:
+            # Output declarations and record intercepted procedures
+            write('\n'.join(self.declarations), file=self.outFile)
+            # Finish processing in superclass
+            OutputGenerator.endFile(self)
+    #
+    # Processing at beginning of each feature or extension
+    def beginFeature(self, interface, emit):
+        # Start processing in superclass
+        OutputGenerator.beginFeature(self, interface, emit)
+        # C-specific
+        # Accumulate includes, defines, types, enums, function pointer typedefs, end function prototypes separately for this
+        # feature. They're only printed in endFeature().
+        self.headerVersion = None
+        self.stypes = []
+        self.commands = []
+        self.structMembers = []
+        self.newFlags = set()
+        self.featureExtraProtect = GetFeatureProtect(interface)
+        # Get base list of extension dependencies for all items in this extension
+        base_required_extensions = []
+        if "VK_VERSION_1" not in self.featureName:
+            # Save Name Define to get correct enable name later
+            nameElem = interface[0][1]
+            name = nameElem.get('name')
+            self.extension_names[self.featureName] = name
+            # This extension is the first dependency for this command
+            base_required_extensions.append(self.featureName)
+        # Add any defined extension dependencies to the base dependency list for this extension
+        requires = interface.get('requires')
+        if requires is not None:
+            base_required_extensions.extend(requires.split(','))
+        # Build dictionary of extension dependencies for each item in this extension
+        self.required_extensions = dict()
+        for require_element in interface.findall('require'):
+            # Copy base extension dependency list
+            required_extensions = list(base_required_extensions)
+            # Add any additional extension dependencies specified in this require block
+            additional_extensions = require_element.get('extension')
+            if additional_extensions:
+                required_extensions.extend(additional_extensions.split(','))
+            # Save full extension list for all named items
+            for element in require_element.findall('*[@name]'):
+                self.required_extensions[element.get('name')] = required_extensions
+
+        # And note if this is an Instance or Device extension
+        self.extension_type = interface.get('type')
+    #
+    # Called at the end of each extension (feature)
+    def endFeature(self):
+        if self.header_file:
+            return
+        # C-specific
+        # Actually write the interface to the output file.
+        if (self.emit):
+            # If type declarations are needed by other features based on this one, it may be necessary to suppress the ExtraProtect,
+            # or move it below the 'for section...' loop.
+            ifdef = ''
+            if (self.featureExtraProtect is not None):
+                ifdef = '#ifdef %s\n' % self.featureExtraProtect
+                self.validation.append(ifdef)
+            # Generate the struct member checking code from the captured data
+            self.processStructMemberData()
+            # Generate the command parameter checking code from the captured data
+            self.processCmdData()
+            # Write the declaration for the HeaderVersion
+            if self.headerVersion:
+                write('const uint32_t GeneratedVulkanHeaderVersion = {};'.format(self.headerVersion), file=self.outFile)
+                self.newline()
+            # Write the declarations for the VkFlags values combining all flag bits
+            for flag in sorted(self.newFlags):
+                flagBits = flag.replace('Flags', 'FlagBits')
+                if flagBits in self.flagBits:
+                    bits = self.flagBits[flagBits]
+                    decl = 'const {} All{} = {}'.format(flag, flagBits, bits[0])
+                    for bit in bits[1:]:
+                        decl += '|' + bit
+                    decl += ';'
+                    write(decl, file=self.outFile)
+            endif = '\n'
+            if (self.featureExtraProtect is not None):
+                endif = '#endif // %s\n' % self.featureExtraProtect
+            self.validation.append(endif)
+        # Finish processing in superclass
+        OutputGenerator.endFeature(self)
+    #
+    # Type generation
+    def genType(self, typeinfo, name, alias):
+        # record the name/alias pair
+        if alias is not None:
+            self.alias_dict[name]=alias
+        OutputGenerator.genType(self, typeinfo, name, alias)
+        typeElem = typeinfo.elem
+        # If the type is a struct type, traverse the embedded <member> tags generating a structure. Otherwise, emit the tag text.
+        category = typeElem.get('category')
+        if (category == 'struct' or category == 'union'):
+            self.genStruct(typeinfo, name, alias)
+        elif (category == 'handle'):
+            self.handleTypes.add(name)
+        elif (category == 'bitmask'):
+            self.flags.add(name)
+            self.newFlags.add(name)
+        elif (category == 'define'):
+            if name == 'VK_HEADER_VERSION':
+                nameElem = typeElem.find('name')
+                self.headerVersion = noneStr(nameElem.tail).strip()
+    #
+    # Struct parameter check generation.
+    # This is a special case of the <type> tag where the contents are interpreted as a set of <member> tags instead of freeform C
+    # type declarations. The <member> tags are just like <param> tags - they are a declaration of a struct or union member.
+    # Only simple member declarations are supported (no nested structs etc.)
+    def genStruct(self, typeinfo, typeName, alias):
+        if not self.source_file:
+            return
+        # alias has already been recorded in genType, above
+        OutputGenerator.genStruct(self, typeinfo, typeName, alias)
+        conditions = self.structMemberValidationConditions[typeName] if typeName in self.structMemberValidationConditions else None
+        members = typeinfo.elem.findall('.//member')
+        if self.featureExtraProtect is not None:
+            self.struct_feature_protect[typeName] = self.featureExtraProtect
+        #
+        # Iterate over members once to get length parameters for arrays
+        lens = set()
+        for member in members:
+            len = self.getLen(member)
+            if len:
+                lens.add(len)
+        #
+        # Generate member info
+        membersInfo = []
+        for member in members:
+            # Get the member's type and name
+            info = self.getTypeNameTuple(member)
+            type = info[0]
+            name = info[1]
+            stypeValue = ''
+            cdecl = self.makeCParamDecl(member, 0)
+
+            # Store pointer/array/string info -- Check for parameter name in lens set
+            iscount = False
+            if name in lens:
+                iscount = True
+            # The pNext members are not tagged as optional, but are treated as optional for parameter NULL checks.  Static array
+            # members are also treated as optional to skip NULL pointer validation, as they won't be NULL.
+            isstaticarray = self.paramIsStaticArray(member)
+            isoptional = False
+            if self.paramIsOptional(member) or (name == 'pNext') or (isstaticarray):
+                isoptional = True
+            # Determine if value should be ignored by code generation.
+            noautovalidity = False
+            if (member.attrib.get('noautovalidity') is not None) or ((typeName in self.structMemberBlacklist) and (name in self.structMemberBlacklist[typeName])):
+                noautovalidity = True
+            structextends = False
+            membersInfo.append(self.CommandParam(type=type, name=name,
+                                                ispointer=self.paramIsPointer(member),
+                                                isstaticarray=isstaticarray,
+                                                isbool=True if type == 'VkBool32' else False,
+                                                israngedenum=True if type in self.enumRanges else False,
+                                                isconst=True if 'const' in cdecl else False,
+                                                isoptional=isoptional,
+                                                iscount=iscount,
+                                                noautovalidity=noautovalidity,
+                                                len=self.getLen(member),
+                                                extstructs=self.registry.validextensionstructs[typeName] if name == 'pNext' else None,
+                                                condition=conditions[name] if conditions and name in conditions else None,
+                                                cdecl=cdecl))
+        # If this struct extends another, keep its name in list for further processing
+        if typeinfo.elem.attrib.get('structextends') is not None:
+            self.structextends_list.append(typeName)
+        # Returnedonly structs should have most of their members ignored -- on entry, we only care about validating the sType and
+        # pNext members. Everything else will be overwritten by the callee.
+        if typeinfo.elem.attrib.get('returnedonly') is not None:
+            self.returnedonly_structs.append(typeName)
+            membersInfo = [m for m in membersInfo if m.name in ('sType', 'pNext')]
+        self.structMembers.append(self.StructMemberData(name=typeName, members=membersInfo))
+    #
+    # Capture group (e.g. C "enum" type) info to be used for param check code generation.
+    # These are concatenated together with other types.
+    def genGroup(self, groupinfo, groupName, alias):
+        if not self.source_file:
+            return
+        # record the name/alias pair
+        if alias is not None:
+            self.alias_dict[groupName]=alias
+        OutputGenerator.genGroup(self, groupinfo, groupName, alias)
+        groupElem = groupinfo.elem
+        # Store the sType values
+        if groupName == 'VkStructureType':
+            for elem in groupElem.findall('enum'):
+                self.stypes.append(elem.get('name'))
+        elif 'FlagBits' in groupName:
+            bits = []
+            for elem in groupElem.findall('enum'):
+                if elem.get('supported') != 'disabled':
+                    bits.append(elem.get('name'))
+            if bits:
+                self.flagBits[groupName] = bits
+        else:
+            # Determine if begin/end ranges are needed (we don't do this for VkStructureType, which has a more finely grained check)
+            expandName = re.sub(r'([0-9a-z_])([A-Z0-9][^A-Z0-9]?)',r'\1_\2',groupName).upper()
+            expandPrefix = expandName
+            expandSuffix = ''
+            expandSuffixMatch = re.search(r'[A-Z][A-Z]+$',groupName)
+            if expandSuffixMatch:
+                expandSuffix = '_' + expandSuffixMatch.group()
+                # Strip off the suffix from the prefix
+                expandPrefix = expandName.rsplit(expandSuffix, 1)[0]
+            isEnum = ('FLAG_BITS' not in expandPrefix)
+            if isEnum:
+                self.enumRanges[groupName] = (expandPrefix + '_BEGIN_RANGE' + expandSuffix, expandPrefix + '_END_RANGE' + expandSuffix)
+                # Create definition for a list containing valid enum values for this enumerated type
+                if self.featureExtraProtect is not None:
+                    enum_entry = '\n#ifdef %s\n' % self.featureExtraProtect
+                else:
+                    enum_entry = ''
+                enum_entry += 'const std::vector<%s> All%sEnums = {' % (groupName, groupName)
+                for enum in groupElem:
+                    name = enum.get('name')
+                    if name is not None and enum.get('supported') != 'disabled':
+                        enum_entry += '%s, ' % name
+                enum_entry += '};\n'
+                if self.featureExtraProtect is not None:
+                    enum_entry += '#endif // %s\n' % self.featureExtraProtect
+                self.enumValueLists += enum_entry
+    #
+    # Capture command parameter info to be used for param check code generation.
+    def genCmd(self, cmdinfo, name, alias):
+        # record the name/alias pair
+        if alias is not None:
+            self.alias_dict[name]=alias
+        OutputGenerator.genCmd(self, cmdinfo, name, alias)
+        decls = self.makeCDecls(cmdinfo.elem)
+        typedef = decls[1]
+        typedef = typedef.split(')',1)[1]
+        if self.header_file:
+            if name not in self.blacklist:
+                if (self.featureExtraProtect is not None):
+                    self.declarations += [ '#ifdef %s' % self.featureExtraProtect ]
+                # Strip off 'vk' from API name
+                decl = '%s%s' % ('bool PreCallValidate', decls[0].split("VKAPI_CALL vk")[1])
+                decl = str(decl).replace(';', ' const;')
+                self.declarations += [ decl ]
+                if (self.featureExtraProtect is not None):
+                    self.declarations += [ '#endif' ]
+        if self.source_file:
+            if name not in self.blacklist:
+                params = cmdinfo.elem.findall('param')
+                # Get list of array lengths
+                lens = set()
+                for param in params:
+                    len = self.getLen(param)
+                    if len:
+                        lens.add(len)
+                # Get param info
+                paramsInfo = []
+                for param in params:
+                    paramInfo = self.getTypeNameTuple(param)
+                    cdecl = self.makeCParamDecl(param, 0)
+                    # Check for parameter name in lens set
+                    iscount = False
+                    if paramInfo[1] in lens:
+                        iscount = True
+                    paramsInfo.append(self.CommandParam(type=paramInfo[0], name=paramInfo[1],
+                                                        ispointer=self.paramIsPointer(param),
+                                                        isstaticarray=self.paramIsStaticArray(param),
+                                                        isbool=True if paramInfo[0] == 'VkBool32' else False,
+                                                        israngedenum=True if paramInfo[0] in self.enumRanges else False,
+                                                        isconst=True if 'const' in cdecl else False,
+                                                        isoptional=self.paramIsOptional(param),
+                                                        iscount=iscount,
+                                                        noautovalidity=True if param.attrib.get('noautovalidity') is not None else False,
+                                                        len=self.getLen(param),
+                                                        extstructs=None,
+                                                        condition=None,
+                                                        cdecl=cdecl))
+                # Save return value information, if any
+                result_type = ''
+                resultinfo = cmdinfo.elem.find('proto/type')
+                if (resultinfo is not None and resultinfo.text != 'void'):
+                    result_type = resultinfo.text
+                self.commands.append(self.CommandData(name=name, params=paramsInfo, cdecl=self.makeCDecls(cmdinfo.elem)[0], extension_type=self.extension_type, result=result_type))
+    #
+    # Check if the parameter passed in is a pointer
+    def paramIsPointer(self, param):
+        ispointer = 0
+        paramtype = param.find('type')
+        if (paramtype.tail is not None) and ('*' in paramtype.tail):
+            ispointer = paramtype.tail.count('*')
+        elif paramtype.text[:4] == 'PFN_':
+            # Treat function pointer typedefs as a pointer to a single value
+            ispointer = 1
+        return ispointer
+    #
+    # Check if the parameter passed in is a static array
+    def paramIsStaticArray(self, param):
+        isstaticarray = 0
+        paramname = param.find('name')
+        if (paramname.tail is not None) and ('[' in paramname.tail):
+            isstaticarray = paramname.tail.count('[')
+        return isstaticarray
+    #
+    # Check if the parameter passed in is optional
+    # Returns a list of Boolean values for comma separated len attributes (len='false,true')
+    def paramIsOptional(self, param):
+        # See if the handle is optional
+        isoptional = False
+        # Simple, if it's optional, return true
+        optString = param.attrib.get('optional')
+        if optString:
+            if optString == 'true':
+                isoptional = True
+            elif ',' in optString:
+                opts = []
+                for opt in optString.split(','):
+                    val = opt.strip()
+                    if val == 'true':
+                        opts.append(True)
+                    elif val == 'false':
+                        opts.append(False)
+                    else:
+                        print('Unrecognized len attribute value',val)
+                isoptional = opts
+        return isoptional
+    #
+    # Check if the handle passed in is optional
+    # Uses the same logic as ValidityOutputGenerator.isHandleOptional
+    def isHandleOptional(self, param, lenParam):
+        # Simple, if it's optional, return true
+        if param.isoptional:
+            return True
+        # If no validity is being generated, it usually means that validity is complex and not absolute, so let's say yes.
+        if param.noautovalidity:
+            return True
+        # If the parameter is an array and we haven't already returned, find out if any of the len parameters are optional
+        if lenParam and lenParam.isoptional:
+            return True
+        return False
+    #
+    # Retrieve the value of the len tag
+    def getLen(self, param):
+        result = None
+        len = param.attrib.get('len')
+        if len and len != 'null-terminated':
+            # For string arrays, 'len' can look like 'count,null-terminated', indicating that we have a null terminated array of
+            # strings.  We strip the null-terminated from the 'len' field and only return the parameter specifying the string count
+            if 'null-terminated' in len:
+                result = len.split(',')[0]
+            else:
+                result = len
+            result = str(result).replace('::', '->')
+        return result
+    #
+    # Retrieve the type and name for a parameter
+    def getTypeNameTuple(self, param):
+        type = ''
+        name = ''
+        for elem in param:
+            if elem.tag == 'type':
+                type = noneStr(elem.text)
+            elif elem.tag == 'name':
+                name = noneStr(elem.text)
+        return (type, name)
+    #
+    # Find a named parameter in a parameter list
+    def getParamByName(self, params, name):
+        for param in params:
+            if param.name == name:
+                return param
+        return None
+    #
+    # Extract length values from latexmath.  Currently an inflexible solution that looks for specific
+    # patterns that are found in vk.xml.  Will need to be updated when new patterns are introduced.
+    def parseLateXMath(self, source):
+        name = 'ERROR'
+        decoratedName = 'ERROR'
+        if 'mathit' in source:
+            # Matches expressions similar to 'latexmath:[\lceil{\mathit{rasterizationSamples} \over 32}\rceil]'
+            match = re.match(r'latexmath\s*\:\s*\[\s*\\l(\w+)\s*\{\s*\\mathit\s*\{\s*(\w+)\s*\}\s*\\over\s*(\d+)\s*\}\s*\\r(\w+)\s*\]', source)
+            if not match or match.group(1) != match.group(4):
+                raise 'Unrecognized latexmath expression'
+            name = match.group(2)
+            decoratedName = '{}({}/{})'.format(*match.group(1, 2, 3))
+        else:
+            # Matches expressions similar to 'latexmath : [dataSize \over 4]'
+            match = re.match(r'latexmath\s*\:\s*\[\s*(\\textrm\{)?(\w+)\}?\s*\\over\s*(\d+)\s*\]', source)
+            name = match.group(2)
+            decoratedName = '{}/{}'.format(*match.group(2, 3))
+        return name, decoratedName
+    #
+    # Get the length paramater record for the specified parameter name
+    def getLenParam(self, params, name):
+        lenParam = None
+        if name:
+            if '->' in name:
+                # The count is obtained by dereferencing a member of a struct parameter
+                lenParam = self.CommandParam(name=name, iscount=True, ispointer=False, isbool=False, israngedenum=False, isconst=False,
+                                             isstaticarray=None, isoptional=False, type=None, noautovalidity=False,
+                                             len=None, extstructs=None, condition=None, cdecl=None)
+            elif 'latexmath' in name:
+                lenName, decoratedName = self.parseLateXMath(name)
+                lenParam = self.getParamByName(params, lenName)
+            else:
+                lenParam = self.getParamByName(params, name)
+        return lenParam
+    #
+    # Convert a vulkan.h command declaration into a parameter_validation.h definition
+    def getCmdDef(self, cmd):
+        # Strip the trailing ';' and split into individual lines
+        lines = cmd.cdecl[:-1].split('\n')
+        cmd_hdr = '\n'.join(lines)
+        return cmd_hdr
+    #
+    # Generate the code to check for a NULL dereference before calling the
+    # validation function
+    def genCheckedLengthCall(self, name, exprs):
+        count = name.count('->')
+        if count:
+            checkedExpr = []
+            localIndent = ''
+            elements = name.split('->')
+            # Open the if expression blocks
+            for i in range(0, count):
+                checkedExpr.append(localIndent + 'if ({} != NULL) {{\n'.format('->'.join(elements[0:i+1])))
+                localIndent = self.incIndent(localIndent)
+            # Add the validation expression
+            for expr in exprs:
+                checkedExpr.append(localIndent + expr)
+            # Close the if blocks
+            for i in range(0, count):
+                localIndent = self.decIndent(localIndent)
+                checkedExpr.append(localIndent + '}\n')
+            return [checkedExpr]
+        # No if statements were required
+        return exprs
+    #
+    # Generate code to check for a specific condition before executing validation code
+    def genConditionalCall(self, prefix, condition, exprs):
+        checkedExpr = []
+        localIndent = ''
+        formattedCondition = condition.format(prefix)
+        checkedExpr.append(localIndent + 'if ({})\n'.format(formattedCondition))
+        checkedExpr.append(localIndent + '{\n')
+        localIndent = self.incIndent(localIndent)
+        for expr in exprs:
+            checkedExpr.append(localIndent + expr)
+        localIndent = self.decIndent(localIndent)
+        checkedExpr.append(localIndent + '}\n')
+        return [checkedExpr]
+    #
+    # Get VUID identifier from implicit VUID tag
+    def GetVuid(self, name, suffix):
+        vuid_string = 'VUID-%s-%s' % (name, suffix)
+        vuid = "kVUIDUndefined"
+        if '->' in vuid_string:
+           return vuid
+        if vuid_string in self.valid_vuids:
+            vuid = "\"%s\"" % vuid_string
+        else:
+            if name in self.alias_dict:
+                alias_string = 'VUID-%s-%s' % (self.alias_dict[name], suffix)
+                if alias_string in self.valid_vuids:
+                    vuid = "\"%s\"" % alias_string
+        return vuid
+    #
+    # Generate the sType check string
+    def makeStructTypeCheck(self, prefix, value, lenValue, valueRequired, lenValueRequired, lenPtrRequired, funcPrintName, lenPrintName, valuePrintName, postProcSpec, struct_type_name):
+        checkExpr = []
+        stype = self.structTypes[value.type]
+        vuid_name = struct_type_name if struct_type_name is not None else funcPrintName
+        stype_vuid = self.GetVuid(value.type, "sType-sType")
+        param_vuid = self.GetVuid(vuid_name, "%s-parameter" % value.name)
+
+        if lenValue:
+            count_required_vuid = self.GetVuid(vuid_name, "%s-arraylength" % lenValue.name)
+
+            # This is an array with a pointer to a count value
+            if lenValue.ispointer:
+                # When the length parameter is a pointer, there is an extra Boolean parameter in the function call to indicate if it is required
+                checkExpr.append('skip |= validate_struct_type_array("{}", {ppp}"{ldn}"{pps}, {ppp}"{dn}"{pps}, "{sv}", {pf}{ln}, {pf}{vn}, {sv}, {}, {}, {}, {}, {}, {});\n'.format(
+                    funcPrintName, lenPtrRequired, lenValueRequired, valueRequired, stype_vuid, param_vuid, count_required_vuid, ln=lenValue.name, ldn=lenPrintName, dn=valuePrintName, vn=value.name, sv=stype, pf=prefix, **postProcSpec))
+            # This is an array with an integer count value
+            else:
+                checkExpr.append('skip |= validate_struct_type_array("{}", {ppp}"{ldn}"{pps}, {ppp}"{dn}"{pps}, "{sv}", {pf}{ln}, {pf}{vn}, {sv}, {}, {}, {}, {}, {});\n'.format(
+                    funcPrintName, lenValueRequired, valueRequired, stype_vuid, param_vuid, count_required_vuid, ln=lenValue.name, ldn=lenPrintName, dn=valuePrintName, vn=value.name, sv=stype, pf=prefix, **postProcSpec))
+        # This is an individual struct
+        else:
+            checkExpr.append('skip |= validate_struct_type("{}", {ppp}"{}"{pps}, "{sv}", {}{vn}, {sv}, {}, {}, {});\n'.format(
+                funcPrintName, valuePrintName, prefix, valueRequired, param_vuid, stype_vuid, vn=value.name, sv=stype, vt=value.type, **postProcSpec))
+        return checkExpr
+    #
+    # Generate the handle check string
+    def makeHandleCheck(self, prefix, value, lenValue, valueRequired, lenValueRequired, funcPrintName, lenPrintName, valuePrintName, postProcSpec):
+        checkExpr = []
+        if lenValue:
+            if lenValue.ispointer:
+                # This is assumed to be an output array with a pointer to a count value
+                raise('Unsupported parameter validation case: Output handle array elements are not NULL checked')
+            else:
+                # This is an array with an integer count value
+                checkExpr.append('skip |= validate_handle_array("{}", {ppp}"{ldn}"{pps}, {ppp}"{dn}"{pps}, {pf}{ln}, {pf}{vn}, {}, {});\n'.format(
+                    funcPrintName, lenValueRequired, valueRequired, ln=lenValue.name, ldn=lenPrintName, dn=valuePrintName, vn=value.name, pf=prefix, **postProcSpec))
+        else:
+            # This is assumed to be an output handle pointer
+            raise('Unsupported parameter validation case: Output handles are not NULL checked')
+        return checkExpr
+    #
+    # Generate check string for an array of VkFlags values
+    def makeFlagsArrayCheck(self, prefix, value, lenValue, valueRequired, lenValueRequired, funcPrintName, lenPrintName, valuePrintName, postProcSpec):
+        checkExpr = []
+        flagBitsName = value.type.replace('Flags', 'FlagBits')
+        if not flagBitsName in self.flagBits:
+            raise('Unsupported parameter validation case: array of reserved VkFlags')
+        else:
+            allFlags = 'All' + flagBitsName
+            checkExpr.append('skip |= validate_flags_array("{}", {ppp}"{}"{pps}, {ppp}"{}"{pps}, "{}", {}, {pf}{}, {pf}{}, {}, {});\n'.format(funcPrintName, lenPrintName, valuePrintName, flagBitsName, allFlags, lenValue.name, value.name, lenValueRequired, valueRequired, pf=prefix, **postProcSpec))
+        return checkExpr
+    #
+    # Generate pNext check string
+    def makeStructNextCheck(self, prefix, value, funcPrintName, valuePrintName, postProcSpec, struct_type_name):
+        checkExpr = []
+        # Generate an array of acceptable VkStructureType values for pNext
+        extStructCount = 0
+        extStructVar = 'NULL'
+        extStructNames = 'NULL'
+        vuid = self.GetVuid(struct_type_name, "pNext-pNext")
+        if value.extstructs:
+            extStructVar = 'allowed_structs_{}'.format(struct_type_name)
+            extStructCount = 'ARRAY_SIZE({})'.format(extStructVar)
+            extStructNames = '"' + ', '.join(value.extstructs) + '"'
+            checkExpr.append('const VkStructureType {}[] = {{ {} }};\n'.format(extStructVar, ', '.join([self.structTypes[s] for s in value.extstructs])))
+        checkExpr.append('skip |= validate_struct_pnext("{}", {ppp}"{}"{pps}, {}, {}{}, {}, {}, GeneratedVulkanHeaderVersion, {});\n'.format(
+            funcPrintName, valuePrintName, extStructNames, prefix, value.name, extStructCount, extStructVar, vuid, **postProcSpec))
+        return checkExpr
+    #
+    # Generate the pointer check string
+    def makePointerCheck(self, prefix, value, lenValue, valueRequired, lenValueRequired, lenPtrRequired, funcPrintName, lenPrintName, valuePrintName, postProcSpec, struct_type_name):
+        checkExpr = []
+        vuid_tag_name = struct_type_name if struct_type_name is not None else funcPrintName
+        if lenValue:
+            count_required_vuid = self.GetVuid(vuid_tag_name, "%s-arraylength" % (lenValue.name))
+            array_required_vuid = self.GetVuid(vuid_tag_name, "%s-parameter" % (value.name))
+            # TODO: Remove workaround for missing optional tag in vk.xml
+            if array_required_vuid == '"VUID-VkFramebufferCreateInfo-pAttachments-parameter"':
+                return []
+            # This is an array with a pointer to a count value
+            if lenValue.ispointer:
+                # If count and array parameters are optional, there will be no validation
+                if valueRequired == 'true' or lenPtrRequired == 'true' or lenValueRequired == 'true':
+                    # When the length parameter is a pointer, there is an extra Boolean parameter in the function call to indicate if it is required
+                    checkExpr.append('skip |= validate_array("{}", {ppp}"{ldn}"{pps}, {ppp}"{dn}"{pps}, {pf}{ln}, &{pf}{vn}, {}, {}, {}, {}, {});\n'.format(
+                        funcPrintName, lenPtrRequired, lenValueRequired, valueRequired, count_required_vuid, array_required_vuid, ln=lenValue.name, ldn=lenPrintName, dn=valuePrintName, vn=value.name, pf=prefix, **postProcSpec))
+            # This is an array with an integer count value
+            else:
+                # If count and array parameters are optional, there will be no validation
+                if valueRequired == 'true' or lenValueRequired == 'true':
+                    if value.type != 'char':
+                        checkExpr.append('skip |= validate_array("{}", {ppp}"{ldn}"{pps}, {ppp}"{dn}"{pps}, {pf}{ln}, &{pf}{vn}, {}, {}, {}, {});\n'.format(
+                            funcPrintName, lenValueRequired, valueRequired, count_required_vuid, array_required_vuid, ln=lenValue.name, ldn=lenPrintName, dn=valuePrintName, vn=value.name, pf=prefix, **postProcSpec))
+                    else:
+                        # Arrays of strings receive special processing
+                        checkExpr.append('skip |= validate_string_array("{}", {ppp}"{ldn}"{pps}, {ppp}"{dn}"{pps}, {pf}{ln}, {pf}{vn}, {}, {}, {}, {});\n'.format(
+                            funcPrintName, lenValueRequired, valueRequired, count_required_vuid, array_required_vuid, ln=lenValue.name, ldn=lenPrintName, dn=valuePrintName, vn=value.name, pf=prefix, **postProcSpec))
+            if checkExpr:
+                if lenValue and ('->' in lenValue.name):
+                    # Add checks to ensure the validation call does not dereference a NULL pointer to obtain the count
+                    checkExpr = self.genCheckedLengthCall(lenValue.name, checkExpr)
+        # This is an individual struct that is not allowed to be NULL
+        elif not value.isoptional:
+            # Function pointers need a reinterpret_cast to void*
+            ptr_required_vuid = self.GetVuid(vuid_tag_name, "%s-parameter" % (value.name))
+            if value.type[:4] == 'PFN_':
+                allocator_dict = {'pfnAllocation': '"VUID-VkAllocationCallbacks-pfnAllocation-00632"',
+                                  'pfnReallocation': '"VUID-VkAllocationCallbacks-pfnReallocation-00633"',
+                                  'pfnFree': '"VUID-VkAllocationCallbacks-pfnFree-00634"',
+                                 }
+                vuid = allocator_dict.get(value.name)
+                if vuid is not None:
+                    ptr_required_vuid = vuid
+                checkExpr.append('skip |= validate_required_pointer("{}", {ppp}"{}"{pps}, reinterpret_cast<const void*>({}{}), {});\n'.format(funcPrintName, valuePrintName, prefix, value.name, ptr_required_vuid, **postProcSpec))
+            else:
+                checkExpr.append('skip |= validate_required_pointer("{}", {ppp}"{}"{pps}, {}{}, {});\n'.format(funcPrintName, valuePrintName, prefix, value.name, ptr_required_vuid, **postProcSpec))
+        else:
+            # Special case for optional internal allocation function pointers.
+            if (value.type, value.name) == ('PFN_vkInternalAllocationNotification', 'pfnInternalAllocation'):
+                checkExpr.extend(self.internalAllocationCheck(funcPrintName, prefix, value.name, 'pfnInternalFree', postProcSpec))
+            elif (value.type, value.name) == ('PFN_vkInternalFreeNotification', 'pfnInternalFree'):
+                checkExpr.extend(self.internalAllocationCheck(funcPrintName, prefix, value.name, 'pfnInternalAllocation', postProcSpec))
+        return checkExpr
+
+    #
+    # Generate internal allocation function pointer check.
+    def internalAllocationCheck(self, funcPrintName, prefix, name, complementaryName, postProcSpec):
+        checkExpr = []
+        vuid = '"VUID-VkAllocationCallbacks-pfnInternalAllocation-00635"'
+        checkExpr.append('if ({}{} != NULL)'.format(prefix, name))
+        checkExpr.append('{')
+        local_indent = self.incIndent('')
+        # Function pointers need a reinterpret_cast to void*
+        checkExpr.append(local_indent + 'skip |= validate_required_pointer("{}", {ppp}"{}{}"{pps}, reinterpret_cast<const void*>({}{}), {});\n'.format(funcPrintName, prefix, complementaryName, prefix, complementaryName, vuid, **postProcSpec))
+        checkExpr.append('}\n')
+        return checkExpr
+
+    #
+    # Process struct member validation code, performing name substitution if required
+    def processStructMemberCode(self, line, funcName, memberNamePrefix, memberDisplayNamePrefix, postProcSpec):
+        # Build format specifier list
+        kwargs = {}
+        if '{postProcPrefix}' in line:
+            # If we have a tuple that includes a format string and format parameters, need to use ParameterName class
+            if type(memberDisplayNamePrefix) is tuple:
+                kwargs['postProcPrefix'] = 'ParameterName('
+            else:
+                kwargs['postProcPrefix'] = postProcSpec['ppp']
+        if '{postProcSuffix}' in line:
+            # If we have a tuple that includes a format string and format parameters, need to use ParameterName class
+            if type(memberDisplayNamePrefix) is tuple:
+                kwargs['postProcSuffix'] = ', ParameterName::IndexVector{{ {}{} }})'.format(postProcSpec['ppi'], memberDisplayNamePrefix[1])
+            else:
+                kwargs['postProcSuffix'] = postProcSpec['pps']
+        if '{postProcInsert}' in line:
+            # If we have a tuple that includes a format string and format parameters, need to use ParameterName class
+            if type(memberDisplayNamePrefix) is tuple:
+                kwargs['postProcInsert'] = '{}{}, '.format(postProcSpec['ppi'], memberDisplayNamePrefix[1])
+            else:
+                kwargs['postProcInsert'] = postProcSpec['ppi']
+        if '{funcName}' in line:
+            kwargs['funcName'] = funcName
+        if '{valuePrefix}' in line:
+            kwargs['valuePrefix'] = memberNamePrefix
+        if '{displayNamePrefix}' in line:
+            # Check for a tuple that includes a format string and format parameters to be used with the ParameterName class
+            if type(memberDisplayNamePrefix) is tuple:
+                kwargs['displayNamePrefix'] = memberDisplayNamePrefix[0]
+            else:
+                kwargs['displayNamePrefix'] = memberDisplayNamePrefix
+
+        if kwargs:
+            # Need to escape the C++ curly braces
+            if 'IndexVector' in line:
+                line = line.replace('IndexVector{ ', 'IndexVector{{ ')
+                line = line.replace(' }),', ' }}),')
+            return line.format(**kwargs)
+        return line
+    #
+    # Process struct member validation code, stripping metadata
+    def ScrubStructCode(self, code):
+        scrubbed_lines = ''
+        for line in code:
+            if 'validate_struct_pnext' in line:
+                continue
+            if 'allowed_structs' in line:
+                continue
+            if 'xml-driven validation' in line:
+                continue
+            line = line.replace('{postProcPrefix}', '')
+            line = line.replace('{postProcSuffix}', '')
+            line = line.replace('{postProcInsert}', '')
+            line = line.replace('{funcName}', '')
+            line = line.replace('{valuePrefix}', '')
+            line = line.replace('{displayNamePrefix}', '')
+            line = line.replace('{IndexVector}', '')
+            line = line.replace('local_data->', '')
+            scrubbed_lines += line
+        return scrubbed_lines
+    #
+    # Process struct validation code for inclusion in function or parent struct validation code
+    def expandStructCode(self, item_type, funcName, memberNamePrefix, memberDisplayNamePrefix, indent, output, postProcSpec):
+        lines = self.validatedStructs[item_type]
+        for line in lines:
+            if output:
+                output[-1] += '\n'
+            if type(line) is list:
+                for sub in line:
+                    output.append(self.processStructMemberCode(indent + sub, funcName, memberNamePrefix, memberDisplayNamePrefix, postProcSpec))
+            else:
+                output.append(self.processStructMemberCode(indent + line, funcName, memberNamePrefix, memberDisplayNamePrefix, postProcSpec))
+        return output
+    #
+    # Process struct pointer/array validation code, performing name substitution if required
+    def expandStructPointerCode(self, prefix, value, lenValue, funcName, valueDisplayName, postProcSpec):
+        expr = []
+        expr.append('if ({}{} != NULL)\n'.format(prefix, value.name))
+        expr.append('{')
+        indent = self.incIndent(None)
+        if lenValue:
+            # Need to process all elements in the array
+            indexName = lenValue.name.replace('Count', 'Index')
+            expr[-1] += '\n'
+            if lenValue.ispointer:
+                # If the length value is a pointer, de-reference it for the count.
+                expr.append(indent + 'for (uint32_t {iname} = 0; {iname} < *{}{}; ++{iname})\n'.format(prefix, lenValue.name, iname=indexName))
+            else:
+                expr.append(indent + 'for (uint32_t {iname} = 0; {iname} < {}{}; ++{iname})\n'.format(prefix, lenValue.name, iname=indexName))
+            expr.append(indent + '{')
+            indent = self.incIndent(indent)
+            # Prefix for value name to display in error message
+            if value.ispointer == 2:
+                memberNamePrefix = '{}{}[{}]->'.format(prefix, value.name, indexName)
+                memberDisplayNamePrefix = ('{}[%i]->'.format(valueDisplayName), indexName)
+            else:
+                memberNamePrefix = '{}{}[{}].'.format(prefix, value.name, indexName)
+                memberDisplayNamePrefix = ('{}[%i].'.format(valueDisplayName), indexName)
+        else:
+            memberNamePrefix = '{}{}->'.format(prefix, value.name)
+            memberDisplayNamePrefix = '{}->'.format(valueDisplayName)
+        # Expand the struct validation lines
+        expr = self.expandStructCode(value.type, funcName, memberNamePrefix, memberDisplayNamePrefix, indent, expr, postProcSpec)
+        if lenValue:
+            # Close if and for scopes
+            indent = self.decIndent(indent)
+            expr.append(indent + '}\n')
+        expr.append('}\n')
+        return expr
+    #
+    # Generate the parameter checking code
+    def genFuncBody(self, funcName, values, valuePrefix, displayNamePrefix, structTypeName):
+        lines = []    # Generated lines of code
+        unused = []   # Unused variable names
+        for value in values:
+            usedLines = []
+            lenParam = None
+            #
+            # Prefix and suffix for post processing of parameter names for struct members.  Arrays of structures need special processing to include the array index in the full parameter name.
+            postProcSpec = {}
+            postProcSpec['ppp'] = '' if not structTypeName else '{postProcPrefix}'
+            postProcSpec['pps'] = '' if not structTypeName else '{postProcSuffix}'
+            postProcSpec['ppi'] = '' if not structTypeName else '{postProcInsert}'
+            #
+            # Generate the full name of the value, which will be printed in the error message, by adding the variable prefix to the value name
+            valueDisplayName = '{}{}'.format(displayNamePrefix, value.name)
+            #
+            # Check for NULL pointers, ignore the in-out count parameters that
+            # will be validated with their associated array
+            if (value.ispointer or value.isstaticarray) and not value.iscount:
+                # Parameters for function argument generation
+                req = 'true'    # Parameter cannot be NULL
+                cpReq = 'true'  # Count pointer cannot be NULL
+                cvReq = 'true'  # Count value cannot be 0
+                lenDisplayName = None # Name of length parameter to print with validation messages; parameter name with prefix applied
+                # Generate required/optional parameter strings for the pointer and count values
+                if value.isoptional:
+                    req = 'false'
+                if value.len:
+                    # The parameter is an array with an explicit count parameter
+                    lenParam = self.getLenParam(values, value.len)
+                    lenDisplayName = '{}{}'.format(displayNamePrefix, lenParam.name)
+                    if lenParam.ispointer:
+                        # Count parameters that are pointers are inout
+                        if type(lenParam.isoptional) is list:
+                            if lenParam.isoptional[0]:
+                                cpReq = 'false'
+                            if lenParam.isoptional[1]:
+                                cvReq = 'false'
+                        else:
+                            if lenParam.isoptional:
+                                cpReq = 'false'
+                    else:
+                        if lenParam.isoptional:
+                            cvReq = 'false'
+                #
+                # The parameter will not be processed when tagged as 'noautovalidity'
+                # For the pointer to struct case, the struct pointer will not be validated, but any
+                # members not tagged as 'noautovalidity' will be validated
+                # We special-case the custom allocator checks, as they are explicit but can be auto-generated.
+                AllocatorFunctions = ['PFN_vkAllocationFunction', 'PFN_vkReallocationFunction', 'PFN_vkFreeFunction', 'PFN_vkInternalAllocationNotification', 'PFN_vkInternalFreeNotification']
+                if value.noautovalidity and value.type not in AllocatorFunctions:
+                    # Log a diagnostic message when validation cannot be automatically generated and must be implemented manually
+                    self.logMsg('diag', 'ParameterValidation: No validation for {} {}'.format(structTypeName if structTypeName else funcName, value.name))
+                else:
+                    if value.type in self.structTypes:
+                        # If this is a pointer to a struct with an sType field, verify the type
+                        usedLines += self.makeStructTypeCheck(valuePrefix, value, lenParam, req, cvReq, cpReq, funcName, lenDisplayName, valueDisplayName, postProcSpec, structTypeName)
+                    # If this is an input handle array that is not allowed to contain NULL handles, verify that none of the handles are VK_NULL_HANDLE
+                    elif value.type in self.handleTypes and value.isconst and not self.isHandleOptional(value, lenParam):
+                        usedLines += self.makeHandleCheck(valuePrefix, value, lenParam, req, cvReq, funcName, lenDisplayName, valueDisplayName, postProcSpec)
+                    elif value.type in self.flags and value.isconst:
+                        usedLines += self.makeFlagsArrayCheck(valuePrefix, value, lenParam, req, cvReq, funcName, lenDisplayName, valueDisplayName, postProcSpec)
+                    elif value.isbool and value.isconst:
+                        usedLines.append('skip |= validate_bool32_array("{}", {ppp}"{}"{pps}, {ppp}"{}"{pps}, {pf}{}, {pf}{}, {}, {});\n'.format(funcName, lenDisplayName, valueDisplayName, lenParam.name, value.name, cvReq, req, pf=valuePrefix, **postProcSpec))
+                    elif value.israngedenum and value.isconst:
+                        enum_value_list = 'All%sEnums' % value.type
+                        usedLines.append('skip |= validate_ranged_enum_array("{}", {ppp}"{}"{pps}, {ppp}"{}"{pps}, "{}", {}, {pf}{}, {pf}{}, {}, {});\n'.format(funcName, lenDisplayName, valueDisplayName, value.type, enum_value_list, lenParam.name, value.name, cvReq, req, pf=valuePrefix, **postProcSpec))
+                    elif value.name == 'pNext' and value.isconst:
+                        usedLines += self.makeStructNextCheck(valuePrefix, value, funcName, valueDisplayName, postProcSpec, structTypeName)
+                    else:
+                        usedLines += self.makePointerCheck(valuePrefix, value, lenParam, req, cvReq, cpReq, funcName, lenDisplayName, valueDisplayName, postProcSpec, structTypeName)
+                    # If this is a pointer to a struct (input), see if it contains members that need to be checked
+                    if value.type in self.validatedStructs:
+                        if value.isconst: # or value.type in self.returnedonly_structs:
+                            usedLines.append(self.expandStructPointerCode(valuePrefix, value, lenParam, funcName, valueDisplayName, postProcSpec))
+                        elif value.type in self.returnedonly_structs:
+                            usedLines.append(self.expandStructPointerCode(valuePrefix, value, lenParam, funcName, valueDisplayName, postProcSpec))
+            # Non-pointer types
+            else:
+                # The parameter will not be processes when tagged as 'noautovalidity'
+                # For the struct case, the struct type will not be validated, but any
+                # members not tagged as 'noautovalidity' will be validated
+                if value.noautovalidity:
+                    # Log a diagnostic message when validation cannot be automatically generated and must be implemented manually
+                    self.logMsg('diag', 'ParameterValidation: No validation for {} {}'.format(structTypeName if structTypeName else funcName, value.name))
+                else:
+                    vuid_name_tag = structTypeName if structTypeName is not None else funcName
+                    if value.type in self.structTypes:
+                        stype = self.structTypes[value.type]
+                        vuid = self.GetVuid(value.type, "sType-sType")
+                        undefined_vuid = '"kVUIDUndefined"'
+                        usedLines.append('skip |= validate_struct_type("{}", {ppp}"{}"{pps}, "{sv}", &({}{vn}), {sv}, false, kVUIDUndefined, {});\n'.format(
+                            funcName, valueDisplayName, valuePrefix, vuid, vn=value.name, sv=stype, vt=value.type, **postProcSpec))
+                    elif value.type in self.handleTypes:
+                        if not self.isHandleOptional(value, None):
+                            usedLines.append('skip |= validate_required_handle("{}", {ppp}"{}"{pps}, {}{});\n'.format(funcName, valueDisplayName, valuePrefix, value.name, **postProcSpec))
+                    elif value.type in self.flags and value.type.replace('Flags', 'FlagBits') not in self.flagBits:
+                        vuid = self.GetVuid(vuid_name_tag, "%s-zerobitmask" % (value.name))
+                        usedLines.append('skip |= validate_reserved_flags("{}", {ppp}"{}"{pps}, {pf}{}, {});\n'.format(funcName, valueDisplayName, value.name, vuid, pf=valuePrefix, **postProcSpec))
+                    elif value.type in self.flags or value.type in self.flagBits:
+                        if value.type in self.flags:
+                            flagBitsName = value.type.replace('Flags', 'FlagBits')
+                            flagsType = 'kOptionalFlags' if value.isoptional else 'kRequiredFlags'
+                            invalidVuid = self.GetVuid(vuid_name_tag, "%s-parameter" % (value.name))
+                            zeroVuid = self.GetVuid(vuid_name_tag, "%s-requiredbitmask" % (value.name))
+                        elif value.type in self.flagBits:
+                            flagBitsName = value.type
+                            flagsType = 'kOptionalSingleBit' if value.isoptional else 'kRequiredSingleBit'
+                            invalidVuid = self.GetVuid(vuid_name_tag, "%s-parameter" % (value.name))
+                            zeroVuid = invalidVuid
+                        allFlagsName = 'All' + flagBitsName
+
+                        invalid_vuid = self.GetVuid(vuid_name_tag, "%s-parameter" % (value.name))
+                        allFlagsName = 'All' + flagBitsName
+                        zeroVuidArg = '' if value.isoptional else ', ' + zeroVuid
+                        usedLines.append('skip |= validate_flags("{}", {ppp}"{}"{pps}, "{}", {}, {pf}{}, {}, {}{});\n'.format(funcName, valueDisplayName, flagBitsName, allFlagsName, value.name, flagsType, invalidVuid, zeroVuidArg, pf=valuePrefix, **postProcSpec))
+                    elif value.isbool:
+                        usedLines.append('skip |= validate_bool32("{}", {ppp}"{}"{pps}, {}{});\n'.format(funcName, valueDisplayName, valuePrefix, value.name, **postProcSpec))
+                    elif value.israngedenum:
+                        vuid = self.GetVuid(vuid_name_tag, "%s-parameter" % (value.name))
+                        enum_value_list = 'All%sEnums' % value.type
+                        usedLines.append('skip |= validate_ranged_enum("{}", {ppp}"{}"{pps}, "{}", {}, {}{}, {});\n'.format(funcName, valueDisplayName, value.type, enum_value_list, valuePrefix, value.name, vuid, **postProcSpec))
+                    # If this is a struct, see if it contains members that need to be checked
+                    if value.type in self.validatedStructs:
+                        memberNamePrefix = '{}{}.'.format(valuePrefix, value.name)
+                        memberDisplayNamePrefix = '{}.'.format(valueDisplayName)
+                        usedLines.append(self.expandStructCode(value.type, funcName, memberNamePrefix, memberDisplayNamePrefix, '', [], postProcSpec))
+            # Append the parameter check to the function body for the current command
+            if usedLines:
+                # Apply special conditional checks
+                if value.condition:
+                    usedLines = self.genConditionalCall(valuePrefix, value.condition, usedLines)
+                lines += usedLines
+            elif not value.iscount:
+                # If no expression was generated for this value, it is unreferenced by the validation function, unless
+                # it is an array count, which is indirectly referenced for array valiadation.
+                unused.append(value.name)
+        if not lines:
+            lines.append('// No xml-driven validation\n')
+        return lines, unused
+    #
+    # Generate the struct member check code from the captured data
+    def processStructMemberData(self):
+        indent = self.incIndent(None)
+        for struct in self.structMembers:
+            #
+            # The string returned by genFuncBody will be nested in an if check for a NULL pointer, so needs its indent incremented
+            lines, unused = self.genFuncBody('{funcName}', struct.members, '{valuePrefix}', '{displayNamePrefix}', struct.name)
+            if lines:
+                self.validatedStructs[struct.name] = lines
+    #
+    # Generate the command param check code from the captured data
+    def processCmdData(self):
+        indent = self.incIndent(None)
+        for command in self.commands:
+            # Skip first parameter if it is a dispatch handle (everything except vkCreateInstance)
+            startIndex = 0 if command.name == 'vkCreateInstance' else 1
+            lines, unused = self.genFuncBody(command.name, command.params[startIndex:], '', '', None)
+            # Cannot validate extension dependencies for device extension APIs having a physical device as their dispatchable object
+            if (command.name in self.required_extensions) and (self.extension_type != 'device' or command.params[0].type != 'VkPhysicalDevice'):
+                ext_test = ''
+                if command.params[0].type in ["VkInstance", "VkPhysicalDevice"] or command.name == 'vkCreateInstance':
+                    ext_table_type = 'instance'
+                else:
+                    ext_table_type = 'device'
+                for ext in self.required_extensions[command.name]:
+                    ext_name_define = ''
+                    ext_enable_name = ''
+                    for extension in self.registry.extensions:
+                        if extension.attrib['name'] == ext:
+                            ext_name_define = extension[0][1].get('name')
+                            ext_enable_name = ext_name_define.lower()
+                            ext_enable_name = re.sub('_extension_name', '', ext_enable_name)
+                            break
+                    ext_test = 'if (!%s_extensions.%s) skip |= OutputExtensionError("%s", %s);\n' % (ext_table_type, ext_enable_name, command.name, ext_name_define)
+                    lines.insert(0, ext_test)
+            if lines:
+                func_sig = self.getCmdDef(command) + ' const {\n'
+                func_sig = func_sig.split('VKAPI_CALL vk')[1]
+                cmdDef = 'bool StatelessValidation::PreCallValidate' + func_sig
+                cmdDef += '%sbool skip = false;\n' % indent
+                for line in lines:
+                    if type(line) is list:
+                        for sub in line:
+                            cmdDef += indent + sub
+                    else:
+                        cmdDef += indent + line
+                # Insert call to custom-written function if present
+                if command.name in self.functions_with_manual_checks:
+                    # Generate parameter list for manual fcn and down-chain calls
+                    params_text = ''
+                    for param in command.params:
+                        params_text += '%s, ' % param.name
+                    params_text = params_text[:-2] + ');\n'
+                    cmdDef += '    if (!skip) skip |= manual_PreCallValidate'+ command.name[2:] + '(' + params_text
+                cmdDef += '%sreturn skip;\n' % indent
+                cmdDef += '}\n'
+                self.validation.append(cmdDef)
diff --git a/src/third_party/vulkan-validation-layers/src/scripts/parse_test_results.py b/src/third_party/vulkan-validation-layers/src/scripts/parse_test_results.py
new file mode 100644
index 0000000..6571d8c
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/scripts/parse_test_results.py
@@ -0,0 +1,173 @@
+#!/usr/bin/python3
+#
+# Copyright (c) 2018 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#           http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Author: William Henning <whenning@google.com>
+#
+# This script parses the validation layers test continuous integration ouput
+# and reports the number of tests that passed, failured, ouput unexpected errors,
+# or were skipped. As such, the script is only designed to parse the ouput
+# generated by the existing CI implementation.
+#
+# usage:
+#       for profile in tests/device_profiles/*.json; do echo Testing with
+#       profile $profile; VK_LAYER_PATH=DEVSIM_AND_VALIDATION_PATHS
+#       VK_DEVSIM_FILE=$profile VK_ICD_FILENAMES=MOCK_ICD_PATH
+#       ./build/tests/vk_layer_validation_tests --devsim; done
+#       | python3 parse_test_results.py [--fail_on_skip] [--fail_on_unexpected]
+#
+#       --fail_on_skip causes the script to exit with a non-zero exit code if a test
+#       didn't run on any device profile
+#
+#       --fail_on_unexpected causes the script to exit with a non-zero exit code if
+#       a test printed unexpected errors
+#
+
+import argparse
+import re
+import sys
+from collections import defaultdict
+
+class OutputStats(object):
+    def __init__(self):
+        self.current_profile = ""
+        self.current_test = ""
+        self.current_test_output = ""
+        self.test_results = defaultdict(defaultdict)
+        self.unexpected_errors = defaultdict(defaultdict)
+
+    def match(self, line):
+        self.new_profile_match(line)
+        self.test_suite_end_match(line)
+        self.start_test_match(line)
+        if self.current_test != "":
+            self.current_test_output += line
+        self.skip_test_match(line)
+        self.pass_test_match(line)
+        self.fail_test_match(line)
+        self.unexpected_error_match(line)
+
+    def print_summary(self, skip_is_failure, unexpected_is_failure):
+        if self.current_test != "":
+            self.test_died()
+
+        passed_tests = 0
+        skipped_tests = 0
+        failed_tests = 0
+        unexpected_error_tests = 0
+        did_fail = False
+
+        for test_name, results in self.test_results.items():
+            skipped_profiles = 0
+            passed_profiles = 0
+            failed_profiles = 0
+            aborted_profiles = 0
+            unexpected_error_profiles = 0
+            for profile, result in results.items():
+                if result == "pass":
+                    passed_profiles += 1
+                if result == "fail":
+                    failed_profiles += 1
+                if result == "skip":
+                    skipped_profiles += 1
+                if self.unexpected_errors.get(test_name, {}).get(profile, "") == "true":
+                    unexpected_error_profiles += 1
+            if failed_profiles != 0:
+                print("TEST FAILED:", test_name)
+                failed_tests += 1
+            elif skipped_profiles == len(results):
+                print("TEST SKIPPED ALL DEVICES:", test_name)
+                skipped_tests += 1
+            else:
+                passed_tests += 1
+            if unexpected_error_profiles != 0:
+                print("UNEXPECTED ERRORS:", test_name)
+                unexpected_error_tests += 1
+        num_tests = len(self.test_results)
+        print("PASSED: ", passed_tests, "/", num_tests, " tests")
+        if skipped_tests != 0:
+            did_fail |= skip_is_failure
+            print("NEVER RAN: ", skipped_tests, "/", num_tests, " tests")
+        if failed_tests != 0:
+            did_fail = True
+            print("FAILED: ", failed_tests, "/", num_tests, "tests")
+        if unexpected_error_tests != 0:
+            did_fail |= unexpected_is_failure
+            print("UNEXPECTED OUPUT: ", unexpected_error_tests, "/", num_tests, "tests")
+        return did_fail
+
+    def new_profile_match(self, line):
+        if re.search(r'Testing with profile .*/(.*)', line) is not None:
+            self.current_profile = re.search(r'Testing with profile .*/(.*)', line).group(1)
+
+    def test_suite_end_match(self, line):
+        if re.search(r'\[-*\]', line) is not None:
+            if self.current_test != "":
+                # Here we see a message that starts [----------] before another test
+                # finished running. This should mean that that other test died.
+                self.test_died()
+
+    def start_test_match(self, line):
+        if re.search(r'\[ RUN\s*\]', line) is not None:
+            # This parser doesn't handle the case where one test's start comes between another
+            # test's start and result.
+            assert self.current_test == ""
+            self.current_test = re.search(r'] (.*)', line).group(1)
+            self.current_test_output = ""
+
+    def skip_test_match(self, line):
+        if re.search(r'TEST SKIPPED', line) is not None:
+            self.test_results[self.current_test][self.current_profile] = "skip"
+
+    def pass_test_match(self, line):
+        if re.search(r'\[\s*OK \]', line) is not None:
+            # If gtest says the test passed, check if it was skipped before marking it passed
+            if self.test_results.get(self.current_test, {}).get(self.current_profile, "") != "skip":
+                    self.test_results[self.current_test][self.current_profile] = "pass"
+            self.current_test = ""
+
+    def fail_test_match(self, line):
+        if re.search(r'\[\s*FAILED\s*\]', line) is not None and self.current_test != "":
+            self.test_results[self.current_test][self.current_profile] = "fail"
+            self.current_test = ""
+
+    def unexpected_error_match(self, line):
+        if re.search(r'^Unexpected: ', line) is not None:
+            self.unexpected_errors[self.current_test][self.current_profile] = "true"
+
+    def test_died(self):
+        print("A test likely crashed. Testing is being aborted.")
+        print("Final test output: ")
+        print(self.current_test_output)
+        sys.exit(1)
+
+def main():
+    parser = argparse.ArgumentParser(description='Parse the output from validation layer tests.')
+    parser.add_argument('--fail_on_skip', action='store_true', help="Makes the script exit with a "
+                        "non-zero exit code if a test didn't run on any device profile.")
+    parser.add_argument('--fail_on_unexpected', action='store_true', help="Makes the script exit "
+                        "with a non-zero exit code if a test causes unexpected errors.")
+    args = parser.parse_args()
+
+    stats = OutputStats()
+    for line in sys.stdin:
+        stats.match(line)
+    failed = stats.print_summary(args.fail_on_skip, args.fail_on_unexpected)
+    if failed == True:
+        print("\nFAILED CI")
+        sys.exit(1)
+
+if __name__ == '__main__':
+    main()
diff --git a/src/third_party/vulkan-validation-layers/src/scripts/thread_safety_generator.py b/src/third_party/vulkan-validation-layers/src/scripts/thread_safety_generator.py
new file mode 100644
index 0000000..52d458e
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/scripts/thread_safety_generator.py
@@ -0,0 +1,1567 @@
+#!/usr/bin/python3 -i
+#
+# Copyright (c) 2015-2019 The Khronos Group Inc.
+# Copyright (c) 2015-2019 Valve Corporation
+# Copyright (c) 2015-2019 LunarG, Inc.
+# Copyright (c) 2015-2019 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Author: Mike Stroyan <stroyan@google.com>
+# Author: Mark Lobodzinski <mark@lunarg.com>
+
+import os,re,sys
+from generator import *
+from common_codegen import *
+
+# ThreadGeneratorOptions - subclass of GeneratorOptions.
+#
+# Adds options used by ThreadOutputGenerator objects during threading
+# layer generation.
+#
+# Additional members
+#   prefixText - list of strings to prefix generated header with
+#     (usually a copyright statement + calling convention macros).
+#   protectFile - True if multiple inclusion protection should be
+#     generated (based on the filename) around the entire header.
+#   protectFeature - True if #ifndef..#endif protection should be
+#     generated around a feature interface in the header file.
+#   genFuncPointers - True if function pointer typedefs should be
+#     generated
+#   protectProto - If conditional protection should be generated
+#     around prototype declarations, set to either '#ifdef'
+#     to require opt-in (#ifdef protectProtoStr) or '#ifndef'
+#     to require opt-out (#ifndef protectProtoStr). Otherwise
+#     set to None.
+#   protectProtoStr - #ifdef/#ifndef symbol to use around prototype
+#     declarations, if protectProto is set
+#   apicall - string to use for the function declaration prefix,
+#     such as APICALL on Windows.
+#   apientry - string to use for the calling convention macro,
+#     in typedefs, such as APIENTRY.
+#   apientryp - string to use for the calling convention macro
+#     in function pointer typedefs, such as APIENTRYP.
+#   indentFuncProto - True if prototype declarations should put each
+#     parameter on a separate line
+#   indentFuncPointer - True if typedefed function pointers should put each
+#     parameter on a separate line
+#   alignFuncParam - if nonzero and parameters are being put on a
+#     separate line, align parameter names at the specified column
+class ThreadGeneratorOptions(GeneratorOptions):
+    def __init__(self,
+                 conventions = None,
+                 filename = None,
+                 directory = '.',
+                 apiname = None,
+                 profile = None,
+                 versions = '.*',
+                 emitversions = '.*',
+                 defaultExtensions = None,
+                 addExtensions = None,
+                 removeExtensions = None,
+                 emitExtensions = None,
+                 sortProcedure = regSortFeatures,
+                 prefixText = "",
+                 genFuncPointers = True,
+                 protectFile = True,
+                 protectFeature = True,
+                 apicall = '',
+                 apientry = '',
+                 apientryp = '',
+                 indentFuncProto = True,
+                 indentFuncPointer = False,
+                 alignFuncParam = 0,
+                 expandEnumerants = True):
+        GeneratorOptions.__init__(self, conventions, filename, directory, apiname, profile,
+                                  versions, emitversions, defaultExtensions,
+                                  addExtensions, removeExtensions, emitExtensions, sortProcedure)
+        self.prefixText      = prefixText
+        self.genFuncPointers = genFuncPointers
+        self.protectFile     = protectFile
+        self.protectFeature  = protectFeature
+        self.apicall         = apicall
+        self.apientry        = apientry
+        self.apientryp       = apientryp
+        self.indentFuncProto = indentFuncProto
+        self.indentFuncPointer = indentFuncPointer
+        self.alignFuncParam  = alignFuncParam
+        self.expandEnumerants = expandEnumerants
+
+
+# ThreadOutputGenerator - subclass of OutputGenerator.
+# Generates Thread checking framework
+#
+# ---- methods ----
+# ThreadOutputGenerator(errFile, warnFile, diagFile) - args as for
+#   OutputGenerator. Defines additional internal state.
+# ---- methods overriding base class ----
+# beginFile(genOpts)
+# endFile()
+# beginFeature(interface, emit)
+# endFeature()
+# genType(typeinfo,name)
+# genStruct(typeinfo,name)
+# genGroup(groupinfo,name)
+# genEnum(enuminfo, name)
+# genCmd(cmdinfo)
+class ThreadOutputGenerator(OutputGenerator):
+    """Generate specified API interfaces in a specific style, such as a C header"""
+
+    inline_copyright_message = """
+// This file is ***GENERATED***.  Do Not Edit.
+// See thread_safety_generator.py for modifications.
+
+/* Copyright (c) 2015-2019 The Khronos Group Inc.
+ * Copyright (c) 2015-2019 Valve Corporation
+ * Copyright (c) 2015-2019 LunarG, Inc.
+ * Copyright (c) 2015-2019 Google Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Mark Lobodzinski <mark@lunarg.com>
+ */"""
+
+ # Note that the inline_custom_header_preamble template below contains three embedded template expansion identifiers.
+ # These get replaced with generated code sections, and are labeled:
+ #  o COUNTER_CLASS_DEFINITIONS_TEMPLATE
+ #  o COUNTER_CLASS_INSTANCES_TEMPLATE
+ #  o COUNTER_CLASS_BODIES_TEMPLATE
+    inline_custom_header_preamble = """
+#pragma once
+
+#include <atomic>
+#include <chrono>
+#include <mutex>
+#include <string>
+#include <thread>
+#include <unordered_set>
+#include <vector>
+
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(DISTINCT_NONDISPATCHABLE_PHONY_HANDLE)
+// The following line must match the vulkan_core.h condition guarding VK_DEFINE_NON_DISPATCHABLE_HANDLE
+#if defined(__LP64__) || defined(_WIN64) || (defined(__x86_64__) && !defined(__ILP32__)) || defined(_M_X64) || defined(__ia64) || \
+    defined(_M_IA64) || defined(__aarch64__) || defined(__powerpc64__)
+// If pointers are 64-bit, then there can be separate counters for each
+// NONDISPATCHABLE_HANDLE type.  Otherwise they are all typedef uint64_t.
+#define DISTINCT_NONDISPATCHABLE_HANDLES
+// Make sure we catch any disagreement between us and the vulkan definition
+static_assert(std::is_pointer<DISTINCT_NONDISPATCHABLE_PHONY_HANDLE>::value,
+              "Mismatched non-dispatchable handle handle, expected pointer type.");
+#else
+// Make sure we catch any disagreement between us and the vulkan definition
+static_assert(std::is_same<uint64_t, DISTINCT_NONDISPATCHABLE_PHONY_HANDLE>::value,
+              "Mismatched non-dispatchable handle handle, expected uint64_t.");
+#endif
+
+// Suppress unused warning on Linux
+#if defined(__GNUC__)
+#define DECORATE_UNUSED __attribute__((unused))
+#else
+#define DECORATE_UNUSED
+#endif
+
+// clang-format off
+static const char DECORATE_UNUSED *kVUID_Threading_Info = "UNASSIGNED-Threading-Info";
+static const char DECORATE_UNUSED *kVUID_Threading_MultipleThreads = "UNASSIGNED-Threading-MultipleThreads";
+static const char DECORATE_UNUSED *kVUID_Threading_SingleThreadReuse = "UNASSIGNED-Threading-SingleThreadReuse";
+// clang-format on
+
+#undef DECORATE_UNUSED
+
+class ObjectUseData
+{
+public:
+    class WriteReadCount
+    {
+    public:
+        WriteReadCount(int64_t v) : count(v) {}
+
+        int32_t GetReadCount() const { return (int32_t)(count & 0xFFFFFFFF); }
+        int32_t GetWriteCount() const { return (int32_t)(count >> 32); }
+
+    private:
+        int64_t count;
+    };
+
+    ObjectUseData() : thread(0), writer_reader_count(0) {
+        // silence -Wunused-private-field warning
+        padding[0] = 0;
+    }
+
+    WriteReadCount AddWriter() {
+        int64_t prev = writer_reader_count.fetch_add(1ULL << 32);
+        return WriteReadCount(prev);
+    }
+    WriteReadCount AddReader() {
+        int64_t prev = writer_reader_count.fetch_add(1ULL);
+        return WriteReadCount(prev);
+    }
+    WriteReadCount RemoveWriter() {
+        int64_t prev = writer_reader_count.fetch_add(-(1LL << 32));
+        return WriteReadCount(prev);
+    }
+    WriteReadCount RemoveReader() {
+        int64_t prev = writer_reader_count.fetch_add(-1LL);
+        return WriteReadCount(prev);
+    }
+    WriteReadCount GetCount() {
+        return WriteReadCount(writer_reader_count);
+    }
+
+    void WaitForObjectIdle(bool is_writer)  {
+        // Wait for thread-safe access to object instead of skipping call.
+        while (GetCount().GetReadCount() > (int)(!is_writer) || GetCount().GetWriteCount() > (int)is_writer) {
+            std::this_thread::sleep_for(std::chrono::microseconds(1));
+        }
+    }
+
+    std::atomic<loader_platform_thread_id> thread;
+
+private:
+    // need to update write and read counts atomically. Writer in high
+    // 32 bits, reader in low 32 bits.
+    std::atomic<int64_t> writer_reader_count;
+
+    // Put each lock on its own cache line to avoid false cache line sharing.
+    char padding[(-int(sizeof(std::atomic<loader_platform_thread_id>) + sizeof(std::atomic<int64_t>))) & 63];
+};
+
+
+template <typename T>
+class counter {
+public:
+    const char *typeName;
+    VkDebugReportObjectTypeEXT objectType;
+    debug_report_data **report_data;
+
+    vl_concurrent_unordered_map<T, std::shared_ptr<ObjectUseData>, 6> object_table;
+
+    void CreateObject(T object) {
+        object_table.insert_or_assign(object, std::make_shared<ObjectUseData>());
+    }
+
+    void DestroyObject(T object) {
+        if (object) {
+            object_table.erase(object);
+        }
+    }
+
+    std::shared_ptr<ObjectUseData> FindObject(T object) {
+        assert(object_table.contains(object));
+        auto iter = std::move(object_table.find(object));
+        if (iter != object_table.end()) {
+            return std::move(iter->second);
+        } else {
+            log_msg(*report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, objectType, (uint64_t)(object), kVUID_Threading_Info,
+                    "Couldn't find %s Object 0x%" PRIxLEAST64
+                    ". This should not happen and may indicate a bug in the application.",
+                    object_string[objectType], (uint64_t)(object));
+            return nullptr;
+        }
+    }
+
+    void StartWrite(T object) {
+        if (object == VK_NULL_HANDLE) {
+            return;
+        }
+        bool skip = false;
+        loader_platform_thread_id tid = loader_platform_get_thread_id();
+
+        auto use_data = FindObject(object);
+        if (!use_data) {
+            return;
+        }
+        const ObjectUseData::WriteReadCount prevCount = use_data->AddWriter();
+
+        if (prevCount.GetReadCount() == 0 && prevCount.GetWriteCount() == 0) {
+            // There is no current use of the object.  Record writer thread.
+            use_data->thread = tid;
+        } else {
+            if (prevCount.GetReadCount() == 0) {
+                assert(prevCount.GetWriteCount() != 0);
+                // There are no readers.  Two writers just collided.
+                if (use_data->thread != tid) {
+                    skip |= log_msg(*report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, objectType, (uint64_t)(object),
+                        kVUID_Threading_MultipleThreads,
+                        "THREADING ERROR : object of type %s is simultaneously used in "
+                        "thread 0x%" PRIx64 " and thread 0x%" PRIx64,
+                        typeName, (uint64_t)use_data->thread.load(std::memory_order_relaxed), (uint64_t)tid);
+                    if (skip) {
+                        // Wait for thread-safe access to object instead of skipping call.
+                        use_data->WaitForObjectIdle(true);
+                        // There is now no current use of the object.  Record writer thread.
+                        use_data->thread = tid;
+                    } else {
+                        // There is now no current use of the object.  Record writer thread.
+                        use_data->thread = tid;
+                    }
+                } else {
+                    // This is either safe multiple use in one call, or recursive use.
+                    // There is no way to make recursion safe.  Just forge ahead.
+                }
+            } else {
+                // There are readers.  This writer collided with them.
+                if (use_data->thread != tid) {
+                    skip |= log_msg(*report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, objectType, (uint64_t)(object),
+                        kVUID_Threading_MultipleThreads,
+                        "THREADING ERROR : object of type %s is simultaneously used in "
+                        "thread 0x%" PRIx64 " and thread 0x%" PRIx64,
+                        typeName, (uint64_t)use_data->thread.load(std::memory_order_relaxed), (uint64_t)tid);
+                    if (skip) {
+                        // Wait for thread-safe access to object instead of skipping call.
+                        use_data->WaitForObjectIdle(true);
+                        // There is now no current use of the object.  Record writer thread.
+                        use_data->thread = tid;
+                    } else {
+                        // Continue with an unsafe use of the object.
+                        use_data->thread = tid;
+                    }
+                } else {
+                    // This is either safe multiple use in one call, or recursive use.
+                    // There is no way to make recursion safe.  Just forge ahead.
+                }
+            }
+        }
+    }
+
+    void FinishWrite(T object) {
+        if (object == VK_NULL_HANDLE) {
+            return;
+        }
+        // Object is no longer in use
+        auto use_data = FindObject(object);
+        if (!use_data) {
+            return;
+        }
+        use_data->RemoveWriter();
+    }
+
+    void StartRead(T object) {
+        if (object == VK_NULL_HANDLE) {
+            return;
+        }
+        bool skip = false;
+        loader_platform_thread_id tid = loader_platform_get_thread_id();
+
+        auto use_data = FindObject(object);
+        if (!use_data) {
+            return;
+        }
+        const ObjectUseData::WriteReadCount prevCount = use_data->AddReader();
+
+        if (prevCount.GetReadCount() == 0 && prevCount.GetWriteCount() == 0) {
+            // There is no current use of the object.
+            use_data->thread = tid;
+        } else if (prevCount.GetWriteCount() > 0 && use_data->thread != tid) {
+            // There is a writer of the object.
+            skip |= log_msg(*report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, objectType, (uint64_t)(object),
+                kVUID_Threading_MultipleThreads,
+                "THREADING ERROR : object of type %s is simultaneously used in "
+                "thread 0x%" PRIx64 " and thread 0x%" PRIx64,
+                typeName, (uint64_t)use_data->thread.load(std::memory_order_relaxed), (uint64_t)tid);
+            if (skip) {
+                // Wait for thread-safe access to object instead of skipping call.
+                use_data->WaitForObjectIdle(false);
+                use_data->thread = tid;
+            }
+        } else {
+            // There are other readers of the object.
+        }
+    }
+    void FinishRead(T object) {
+        if (object == VK_NULL_HANDLE) {
+            return;
+        }
+
+        auto use_data = FindObject(object);
+        if (!use_data) {
+            return;
+        }
+        use_data->RemoveReader();
+    }
+    counter(const char *name = "", VkDebugReportObjectTypeEXT type = VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, debug_report_data **rep_data = nullptr) {
+        typeName = name;
+        objectType = type;
+        report_data = rep_data;
+    }
+
+private:
+};
+
+class ThreadSafety : public ValidationObject {
+public:
+
+    ReadWriteLock thread_safety_lock;
+
+    // Override chassis read/write locks for this validation object
+    // This override takes a deferred lock. i.e. it is not acquired.
+    virtual read_lock_guard_t read_lock() {
+        return read_lock_guard_t(validation_object_mutex, std::defer_lock);
+    }
+    virtual write_lock_guard_t write_lock() {
+        return write_lock_guard_t(validation_object_mutex, std::defer_lock);
+    }
+
+    // If this ThreadSafety is for a VkDevice, then parent_instance points to the
+    // ThreadSafety object of its parent VkInstance. This is used to get to the counters
+    // for objects created with the instance as parent.
+    ThreadSafety *parent_instance;
+
+    vl_concurrent_unordered_map<VkCommandBuffer, VkCommandPool, 6> command_pool_map;
+    std::unordered_map<VkCommandPool, std::unordered_set<VkCommandBuffer>> pool_command_buffers_map;
+    std::unordered_map<VkDevice, std::unordered_set<VkQueue>> device_queues_map;
+
+    // Track per-descriptorsetlayout and per-descriptorset whether UPDATE_AFTER_BIND is used.
+    // This is used to (sloppily) implement the relaxed externsync rules for UPDATE_AFTER_BIND
+    // descriptors. We model updates of UPDATE_AFTER_BIND descriptors as if they were reads
+    // rather than writes, because they only conflict with the set being freed or reset.
+    //
+    // We don't track the UPDATE_AFTER_BIND state per-binding for a couple reasons:
+    // (1) We only have one counter per object, and if we treated non-UAB as writes
+    //     and UAB as reads then they'd appear to conflict with each other.
+    // (2) Avoid additional tracking of descriptor binding state in the descriptor set
+    //     layout, and tracking of which bindings are accessed by a VkDescriptorUpdateTemplate.
+    vl_concurrent_unordered_map<VkDescriptorSetLayout, bool, 4> dsl_update_after_bind_map;
+    vl_concurrent_unordered_map<VkDescriptorSet, bool, 6> ds_update_after_bind_map;
+    bool DsUpdateAfterBind(VkDescriptorSet) const;
+
+    counter<VkCommandBuffer> c_VkCommandBuffer;
+    counter<VkDevice> c_VkDevice;
+    counter<VkInstance> c_VkInstance;
+    counter<VkQueue> c_VkQueue;
+#ifdef DISTINCT_NONDISPATCHABLE_HANDLES
+
+    // Special entry to allow tracking of command pool Reset and Destroy
+    counter<VkCommandPool> c_VkCommandPoolContents;
+COUNTER_CLASS_DEFINITIONS_TEMPLATE
+
+#else   // DISTINCT_NONDISPATCHABLE_HANDLES
+    // Special entry to allow tracking of command pool Reset and Destroy
+    counter<uint64_t> c_VkCommandPoolContents;
+
+    counter<uint64_t> c_uint64_t;
+#endif  // DISTINCT_NONDISPATCHABLE_HANDLES
+
+    ThreadSafety(ThreadSafety *parent)
+        : parent_instance(parent),
+          c_VkCommandBuffer("VkCommandBuffer", VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, &report_data),
+          c_VkDevice("VkDevice", VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, &report_data),
+          c_VkInstance("VkInstance", VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT, &report_data),
+          c_VkQueue("VkQueue", VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT, &report_data),
+          c_VkCommandPoolContents("VkCommandPool", VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT, &report_data),
+
+#ifdef DISTINCT_NONDISPATCHABLE_HANDLES
+COUNTER_CLASS_INSTANCES_TEMPLATE
+
+
+#else   // DISTINCT_NONDISPATCHABLE_HANDLES
+          c_uint64_t("NON_DISPATCHABLE_HANDLE", VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, &report_data)
+#endif  // DISTINCT_NONDISPATCHABLE_HANDLES
+              {};
+
+#define WRAPPER(type)                                                \\
+    void StartWriteObject(type object) {                             \\
+        c_##type.StartWrite(object);                                 \\
+    }                                                                \\
+    void FinishWriteObject(type object) {                            \\
+        c_##type.FinishWrite(object);                                \\
+    }                                                                \\
+    void StartReadObject(type object) {                              \\
+        c_##type.StartRead(object);                                  \\
+    }                                                                \\
+    void FinishReadObject(type object) {                             \\
+        c_##type.FinishRead(object);                                 \\
+    }                                                                \\
+    void CreateObject(type object) {                                 \\
+        c_##type.CreateObject(object);                               \\
+    }                                                                \\
+    void DestroyObject(type object) {                                \\
+        c_##type.DestroyObject(object);                              \\
+    }
+
+#define WRAPPER_PARENT_INSTANCE(type)                                                   \\
+    void StartWriteObjectParentInstance(type object) {                                  \\
+        (parent_instance ? parent_instance : this)->c_##type.StartWrite(object);        \\
+    }                                                                                   \\
+    void FinishWriteObjectParentInstance(type object) {                                 \\
+        (parent_instance ? parent_instance : this)->c_##type.FinishWrite(object);       \\
+    }                                                                                   \\
+    void StartReadObjectParentInstance(type object) {                                   \\
+        (parent_instance ? parent_instance : this)->c_##type.StartRead(object);         \\
+    }                                                                                   \\
+    void FinishReadObjectParentInstance(type object) {                                  \\
+        (parent_instance ? parent_instance : this)->c_##type.FinishRead(object);        \\
+    }                                                                                   \\
+    void CreateObjectParentInstance(type object) {                                      \\
+        (parent_instance ? parent_instance : this)->c_##type.CreateObject(object);      \\
+    }                                                                                   \\
+    void DestroyObjectParentInstance(type object) {                                     \\
+        (parent_instance ? parent_instance : this)->c_##type.DestroyObject(object);     \\
+    }
+
+WRAPPER_PARENT_INSTANCE(VkDevice)
+WRAPPER_PARENT_INSTANCE(VkInstance)
+WRAPPER(VkQueue)
+#ifdef DISTINCT_NONDISPATCHABLE_HANDLES
+COUNTER_CLASS_BODIES_TEMPLATE
+
+#else   // DISTINCT_NONDISPATCHABLE_HANDLES
+WRAPPER(uint64_t)
+WRAPPER_PARENT_INSTANCE(uint64_t)
+#endif  // DISTINCT_NONDISPATCHABLE_HANDLES
+
+    void CreateObject(VkCommandBuffer object) {
+        c_VkCommandBuffer.CreateObject(object);
+    }
+    void DestroyObject(VkCommandBuffer object) {
+        c_VkCommandBuffer.DestroyObject(object);
+    }
+
+    // VkCommandBuffer needs check for implicit use of command pool
+    void StartWriteObject(VkCommandBuffer object, bool lockPool = true) {
+        if (lockPool) {
+            auto iter = command_pool_map.find(object);
+            if (iter != command_pool_map.end()) {
+                VkCommandPool pool = iter->second;
+                StartWriteObject(pool);
+            }
+        }
+        c_VkCommandBuffer.StartWrite(object);
+    }
+    void FinishWriteObject(VkCommandBuffer object, bool lockPool = true) {
+        c_VkCommandBuffer.FinishWrite(object);
+        if (lockPool) {
+            auto iter = command_pool_map.find(object);
+            if (iter != command_pool_map.end()) {
+                VkCommandPool pool = iter->second;
+                FinishWriteObject(pool);
+            }
+        }
+    }
+    void StartReadObject(VkCommandBuffer object) {
+        auto iter = command_pool_map.find(object);
+        if (iter != command_pool_map.end()) {
+            VkCommandPool pool = iter->second;
+            // We set up a read guard against the "Contents" counter to catch conflict vs. vkResetCommandPool and vkDestroyCommandPool
+            // while *not* establishing a read guard against the command pool counter itself to avoid false postives for
+            // non-externally sync'd command buffers
+            c_VkCommandPoolContents.StartRead(pool);
+        }
+        c_VkCommandBuffer.StartRead(object);
+    }
+    void FinishReadObject(VkCommandBuffer object) {
+        c_VkCommandBuffer.FinishRead(object);
+        auto iter = command_pool_map.find(object);
+        if (iter != command_pool_map.end()) {
+            VkCommandPool pool = iter->second;
+            c_VkCommandPoolContents.FinishRead(pool);
+        }
+    } """
+
+
+    inline_custom_source_preamble = """
+void ThreadSafety::PreCallRecordAllocateCommandBuffers(VkDevice device, const VkCommandBufferAllocateInfo *pAllocateInfo,
+                                                       VkCommandBuffer *pCommandBuffers) {
+    StartReadObjectParentInstance(device);
+    StartWriteObject(pAllocateInfo->commandPool);
+}
+
+void ThreadSafety::PostCallRecordAllocateCommandBuffers(VkDevice device, const VkCommandBufferAllocateInfo *pAllocateInfo,
+                                                        VkCommandBuffer *pCommandBuffers, VkResult result) {
+    FinishReadObjectParentInstance(device);
+    FinishWriteObject(pAllocateInfo->commandPool);
+
+    // Record mapping from command buffer to command pool
+    if(pCommandBuffers) {
+        auto lock = write_lock_guard_t(thread_safety_lock);
+        auto &pool_command_buffers = pool_command_buffers_map[pAllocateInfo->commandPool];
+        for (uint32_t index = 0; index < pAllocateInfo->commandBufferCount; index++) {
+            command_pool_map.insert_or_assign(pCommandBuffers[index], pAllocateInfo->commandPool);
+            CreateObject(pCommandBuffers[index]);
+            pool_command_buffers.insert(pCommandBuffers[index]);
+        }
+    }
+}
+
+
+void ThreadSafety::PreCallRecordCreateDescriptorSetLayout(
+    VkDevice                                    device,
+    const VkDescriptorSetLayoutCreateInfo*      pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDescriptorSetLayout*                      pSetLayout) {
+    StartReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PostCallRecordCreateDescriptorSetLayout(
+    VkDevice                                    device,
+    const VkDescriptorSetLayoutCreateInfo*      pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDescriptorSetLayout*                      pSetLayout,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(device);
+    if (result == VK_SUCCESS) {
+        CreateObject(*pSetLayout);
+
+        // Check whether any binding uses UPDATE_AFTER_BIND
+        bool update_after_bind = false;
+        const auto *flags_create_info = lvl_find_in_chain<VkDescriptorSetLayoutBindingFlagsCreateInfoEXT>(pCreateInfo->pNext);
+        if (flags_create_info) {
+            for (uint32_t i = 0; i < flags_create_info->bindingCount; ++i) {
+                if (flags_create_info->pBindingFlags[i] & VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT) {
+                    update_after_bind = true;
+                    break;
+                }
+            }
+        }
+        dsl_update_after_bind_map.insert_or_assign(*pSetLayout, update_after_bind);
+    }
+}
+
+void ThreadSafety::PreCallRecordAllocateDescriptorSets(VkDevice device, const VkDescriptorSetAllocateInfo *pAllocateInfo,
+                                                       VkDescriptorSet *pDescriptorSets) {
+    StartReadObjectParentInstance(device);
+    StartWriteObject(pAllocateInfo->descriptorPool);
+    // Host access to pAllocateInfo::descriptorPool must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordAllocateDescriptorSets(VkDevice device, const VkDescriptorSetAllocateInfo *pAllocateInfo,
+                                                        VkDescriptorSet *pDescriptorSets, VkResult result) {
+    FinishReadObjectParentInstance(device);
+    FinishWriteObject(pAllocateInfo->descriptorPool);
+    // Host access to pAllocateInfo::descriptorPool must be externally synchronized
+    if (VK_SUCCESS == result) {
+        auto lock = write_lock_guard_t(thread_safety_lock);
+        auto &pool_descriptor_sets = pool_descriptor_sets_map[pAllocateInfo->descriptorPool];
+        for (uint32_t index0 = 0; index0 < pAllocateInfo->descriptorSetCount; index0++) {
+            CreateObject(pDescriptorSets[index0]);
+            pool_descriptor_sets.insert(pDescriptorSets[index0]);
+
+            auto iter = dsl_update_after_bind_map.find(pAllocateInfo->pSetLayouts[index0]);
+            if (iter != dsl_update_after_bind_map.end()) {
+                ds_update_after_bind_map.insert_or_assign(pDescriptorSets[index0], iter->second);
+            } else {
+                assert(0 && "descriptor set layout not found");
+            }
+        }
+    }
+}
+
+void ThreadSafety::PreCallRecordFreeDescriptorSets(
+    VkDevice                                    device,
+    VkDescriptorPool                            descriptorPool,
+    uint32_t                                    descriptorSetCount,
+    const VkDescriptorSet*                      pDescriptorSets) {
+    StartReadObjectParentInstance(device);
+    StartWriteObject(descriptorPool);
+    if (pDescriptorSets) {
+        for (uint32_t index=0; index < descriptorSetCount; index++) {
+            StartWriteObject(pDescriptorSets[index]);
+        }
+    }
+    // Host access to descriptorPool must be externally synchronized
+    // Host access to each member of pDescriptorSets must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordFreeDescriptorSets(
+    VkDevice                                    device,
+    VkDescriptorPool                            descriptorPool,
+    uint32_t                                    descriptorSetCount,
+    const VkDescriptorSet*                      pDescriptorSets,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(device);
+    FinishWriteObject(descriptorPool);
+    if (pDescriptorSets) {
+        for (uint32_t index=0; index < descriptorSetCount; index++) {
+            FinishWriteObject(pDescriptorSets[index]);
+        }
+    }
+    // Host access to descriptorPool must be externally synchronized
+    // Host access to each member of pDescriptorSets must be externally synchronized
+    // Host access to pAllocateInfo::descriptorPool must be externally synchronized
+    if (VK_SUCCESS == result) {
+        auto lock = write_lock_guard_t(thread_safety_lock);
+        auto &pool_descriptor_sets = pool_descriptor_sets_map[descriptorPool];
+        for (uint32_t index0 = 0; index0 < descriptorSetCount; index0++) {
+            DestroyObject(pDescriptorSets[index0]);
+            pool_descriptor_sets.erase(pDescriptorSets[index0]);
+        }
+    }
+}
+
+void ThreadSafety::PreCallRecordDestroyDescriptorPool(
+    VkDevice                                    device,
+    VkDescriptorPool                            descriptorPool,
+    const VkAllocationCallbacks*                pAllocator) {
+    StartReadObjectParentInstance(device);
+    StartWriteObject(descriptorPool);
+    // Host access to descriptorPool must be externally synchronized
+    auto lock = read_lock_guard_t(thread_safety_lock);
+    auto iterator = pool_descriptor_sets_map.find(descriptorPool);
+    // Possible to have no descriptor sets allocated from pool
+    if (iterator != pool_descriptor_sets_map.end()) {
+        for(auto descriptor_set : pool_descriptor_sets_map[descriptorPool]) {
+            StartWriteObject(descriptor_set);
+        }
+    }
+}
+
+void ThreadSafety::PostCallRecordDestroyDescriptorPool(
+    VkDevice                                    device,
+    VkDescriptorPool                            descriptorPool,
+    const VkAllocationCallbacks*                pAllocator) {
+    FinishReadObjectParentInstance(device);
+    FinishWriteObject(descriptorPool);
+    DestroyObject(descriptorPool);
+    // Host access to descriptorPool must be externally synchronized
+    {
+        auto lock = write_lock_guard_t(thread_safety_lock);
+        // remove references to implicitly freed descriptor sets
+        for(auto descriptor_set : pool_descriptor_sets_map[descriptorPool]) {
+            FinishWriteObject(descriptor_set);
+            DestroyObject(descriptor_set);
+        }
+        pool_descriptor_sets_map[descriptorPool].clear();
+        pool_descriptor_sets_map.erase(descriptorPool);
+    }
+}
+
+void ThreadSafety::PreCallRecordResetDescriptorPool(
+    VkDevice                                    device,
+    VkDescriptorPool                            descriptorPool,
+    VkDescriptorPoolResetFlags                  flags) {
+    StartReadObjectParentInstance(device);
+    StartWriteObject(descriptorPool);
+    // Host access to descriptorPool must be externally synchronized
+    // any sname:VkDescriptorSet objects allocated from pname:descriptorPool must be externally synchronized between host accesses
+    auto lock = read_lock_guard_t(thread_safety_lock);
+    auto iterator = pool_descriptor_sets_map.find(descriptorPool);
+    // Possible to have no descriptor sets allocated from pool
+    if (iterator != pool_descriptor_sets_map.end()) {
+        for(auto descriptor_set : pool_descriptor_sets_map[descriptorPool]) {
+            StartWriteObject(descriptor_set);
+        }
+    }
+}
+
+void ThreadSafety::PostCallRecordResetDescriptorPool(
+    VkDevice                                    device,
+    VkDescriptorPool                            descriptorPool,
+    VkDescriptorPoolResetFlags                  flags,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(device);
+    FinishWriteObject(descriptorPool);
+    // Host access to descriptorPool must be externally synchronized
+    // any sname:VkDescriptorSet objects allocated from pname:descriptorPool must be externally synchronized between host accesses
+    if (VK_SUCCESS == result) {
+        // remove references to implicitly freed descriptor sets
+        auto lock = write_lock_guard_t(thread_safety_lock);
+        for(auto descriptor_set : pool_descriptor_sets_map[descriptorPool]) {
+            FinishWriteObject(descriptor_set);
+            DestroyObject(descriptor_set);
+        }
+        pool_descriptor_sets_map[descriptorPool].clear();
+    }
+}
+
+bool ThreadSafety::DsUpdateAfterBind(VkDescriptorSet set) const
+{
+    auto iter = ds_update_after_bind_map.find(set);
+    if (iter != ds_update_after_bind_map.end()) {
+        return iter->second;
+    }
+    return false;
+}
+
+void ThreadSafety::PreCallRecordUpdateDescriptorSets(
+    VkDevice                                    device,
+    uint32_t                                    descriptorWriteCount,
+    const VkWriteDescriptorSet*                 pDescriptorWrites,
+    uint32_t                                    descriptorCopyCount,
+    const VkCopyDescriptorSet*                  pDescriptorCopies) {
+    StartReadObjectParentInstance(device);
+    if (pDescriptorWrites) {
+        for (uint32_t index=0; index < descriptorWriteCount; index++) {
+            auto dstSet = pDescriptorWrites[index].dstSet;
+            bool update_after_bind = DsUpdateAfterBind(dstSet);
+            if (update_after_bind) {
+                StartReadObject(dstSet);
+            } else {
+                StartWriteObject(dstSet);
+            }
+        }
+    }
+    if (pDescriptorCopies) {
+        for (uint32_t index=0; index < descriptorCopyCount; index++) {
+            auto dstSet = pDescriptorCopies[index].dstSet;
+            bool update_after_bind = DsUpdateAfterBind(dstSet);
+            if (update_after_bind) {
+                StartReadObject(dstSet);
+            } else {
+                StartWriteObject(dstSet);
+            }
+            StartReadObject(pDescriptorCopies[index].srcSet);
+        }
+    }
+    // Host access to pDescriptorWrites[].dstSet must be externally synchronized
+    // Host access to pDescriptorCopies[].dstSet must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordUpdateDescriptorSets(
+    VkDevice                                    device,
+    uint32_t                                    descriptorWriteCount,
+    const VkWriteDescriptorSet*                 pDescriptorWrites,
+    uint32_t                                    descriptorCopyCount,
+    const VkCopyDescriptorSet*                  pDescriptorCopies) {
+    FinishReadObjectParentInstance(device);
+    if (pDescriptorWrites) {
+        for (uint32_t index=0; index < descriptorWriteCount; index++) {
+            auto dstSet = pDescriptorWrites[index].dstSet;
+            bool update_after_bind = DsUpdateAfterBind(dstSet);
+            if (update_after_bind) {
+                FinishReadObject(dstSet);
+            } else {
+                FinishWriteObject(dstSet);
+            }
+        }
+    }
+    if (pDescriptorCopies) {
+        for (uint32_t index=0; index < descriptorCopyCount; index++) {
+            auto dstSet = pDescriptorCopies[index].dstSet;
+            bool update_after_bind = DsUpdateAfterBind(dstSet);
+            if (update_after_bind) {
+                FinishReadObject(dstSet);
+            } else {
+                FinishWriteObject(dstSet);
+            }
+            FinishReadObject(pDescriptorCopies[index].srcSet);
+        }
+    }
+    // Host access to pDescriptorWrites[].dstSet must be externally synchronized
+    // Host access to pDescriptorCopies[].dstSet must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordUpdateDescriptorSetWithTemplate(
+    VkDevice                                    device,
+    VkDescriptorSet                             descriptorSet,
+    VkDescriptorUpdateTemplate                  descriptorUpdateTemplate,
+    const void*                                 pData) {
+    StartReadObjectParentInstance(device);
+    StartReadObject(descriptorUpdateTemplate);
+
+    bool update_after_bind = DsUpdateAfterBind(descriptorSet);
+    if (update_after_bind) {
+        StartReadObject(descriptorSet);
+    } else {
+        StartWriteObject(descriptorSet);
+    }
+    // Host access to descriptorSet must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordUpdateDescriptorSetWithTemplate(
+    VkDevice                                    device,
+    VkDescriptorSet                             descriptorSet,
+    VkDescriptorUpdateTemplate                  descriptorUpdateTemplate,
+    const void*                                 pData) {
+    FinishReadObjectParentInstance(device);
+    FinishReadObject(descriptorUpdateTemplate);
+
+    bool update_after_bind = DsUpdateAfterBind(descriptorSet);
+    if (update_after_bind) {
+        FinishReadObject(descriptorSet);
+    } else {
+        FinishWriteObject(descriptorSet);
+    }
+    // Host access to descriptorSet must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordUpdateDescriptorSetWithTemplateKHR(
+    VkDevice                                    device,
+    VkDescriptorSet                             descriptorSet,
+    VkDescriptorUpdateTemplate                  descriptorUpdateTemplate,
+    const void*                                 pData) {
+    StartReadObjectParentInstance(device);
+    StartReadObject(descriptorUpdateTemplate);
+
+    bool update_after_bind = DsUpdateAfterBind(descriptorSet);
+    if (update_after_bind) {
+        StartReadObject(descriptorSet);
+    } else {
+        StartWriteObject(descriptorSet);
+    }
+    // Host access to descriptorSet must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordUpdateDescriptorSetWithTemplateKHR(
+    VkDevice                                    device,
+    VkDescriptorSet                             descriptorSet,
+    VkDescriptorUpdateTemplate                  descriptorUpdateTemplate,
+    const void*                                 pData) {
+    FinishReadObjectParentInstance(device);
+    FinishReadObject(descriptorUpdateTemplate);
+
+    bool update_after_bind = DsUpdateAfterBind(descriptorSet);
+    if (update_after_bind) {
+        FinishReadObject(descriptorSet);
+    } else {
+        FinishWriteObject(descriptorSet);
+    }
+    // Host access to descriptorSet must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordFreeCommandBuffers(VkDevice device, VkCommandPool commandPool, uint32_t commandBufferCount,
+                                                   const VkCommandBuffer *pCommandBuffers) {
+    const bool lockCommandPool = false;  // pool is already directly locked
+    StartReadObjectParentInstance(device);
+    StartWriteObject(commandPool);
+    if(pCommandBuffers) {
+        // Even though we're immediately "finishing" below, we still are testing for concurrency with any call in process
+        // so this isn't a no-op
+        // The driver may immediately reuse command buffers in another thread.
+        // These updates need to be done before calling down to the driver.
+        auto lock = write_lock_guard_t(thread_safety_lock);
+        auto &pool_command_buffers = pool_command_buffers_map[commandPool];
+        for (uint32_t index = 0; index < commandBufferCount; index++) {
+            StartWriteObject(pCommandBuffers[index], lockCommandPool);
+            FinishWriteObject(pCommandBuffers[index], lockCommandPool);
+            DestroyObject(pCommandBuffers[index]);
+            pool_command_buffers.erase(pCommandBuffers[index]);
+            command_pool_map.erase(pCommandBuffers[index]);
+        }
+    }
+}
+
+void ThreadSafety::PostCallRecordFreeCommandBuffers(VkDevice device, VkCommandPool commandPool, uint32_t commandBufferCount,
+                                                    const VkCommandBuffer *pCommandBuffers) {
+    FinishReadObjectParentInstance(device);
+    FinishWriteObject(commandPool);
+}
+
+void ThreadSafety::PreCallRecordCreateCommandPool(
+    VkDevice                                    device,
+    const VkCommandPoolCreateInfo*              pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkCommandPool*                              pCommandPool) {
+    StartReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PostCallRecordCreateCommandPool(
+    VkDevice                                    device,
+    const VkCommandPoolCreateInfo*              pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkCommandPool*                              pCommandPool,
+    VkResult                                    result) {
+    FinishReadObjectParentInstance(device);
+    if (result == VK_SUCCESS) {
+        CreateObject(*pCommandPool);
+        c_VkCommandPoolContents.CreateObject(*pCommandPool);
+    }
+}
+
+void ThreadSafety::PreCallRecordResetCommandPool(VkDevice device, VkCommandPool commandPool, VkCommandPoolResetFlags flags) {
+    StartReadObjectParentInstance(device);
+    StartWriteObject(commandPool);
+    // Check for any uses of non-externally sync'd command buffers (for example from vkCmdExecuteCommands)
+    c_VkCommandPoolContents.StartWrite(commandPool);
+    // Host access to commandPool must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordResetCommandPool(VkDevice device, VkCommandPool commandPool, VkCommandPoolResetFlags flags, VkResult result) {
+    FinishReadObjectParentInstance(device);
+    FinishWriteObject(commandPool);
+    c_VkCommandPoolContents.FinishWrite(commandPool);
+    // Host access to commandPool must be externally synchronized
+}
+
+void ThreadSafety::PreCallRecordDestroyCommandPool(VkDevice device, VkCommandPool commandPool, const VkAllocationCallbacks *pAllocator) {
+    StartReadObjectParentInstance(device);
+    StartWriteObject(commandPool);
+    // Check for any uses of non-externally sync'd command buffers (for example from vkCmdExecuteCommands)
+    c_VkCommandPoolContents.StartWrite(commandPool);
+    // Host access to commandPool must be externally synchronized
+
+    auto lock = write_lock_guard_t(thread_safety_lock);
+    // The driver may immediately reuse command buffers in another thread.
+    // These updates need to be done before calling down to the driver.
+    // remove references to implicitly freed command pools
+    for(auto command_buffer : pool_command_buffers_map[commandPool]) {
+        DestroyObject(command_buffer);
+    }
+    pool_command_buffers_map[commandPool].clear();
+    pool_command_buffers_map.erase(commandPool);
+}
+
+void ThreadSafety::PostCallRecordDestroyCommandPool(VkDevice device, VkCommandPool commandPool, const VkAllocationCallbacks *pAllocator) {
+    FinishReadObjectParentInstance(device);
+    FinishWriteObject(commandPool);
+    DestroyObject(commandPool);
+    c_VkCommandPoolContents.FinishWrite(commandPool);
+    c_VkCommandPoolContents.DestroyObject(commandPool);
+}
+
+// GetSwapchainImages can return a non-zero count with a NULL pSwapchainImages pointer.  Let's avoid crashes by ignoring
+// pSwapchainImages.
+void ThreadSafety::PreCallRecordGetSwapchainImagesKHR(VkDevice device, VkSwapchainKHR swapchain, uint32_t *pSwapchainImageCount,
+                                                      VkImage *pSwapchainImages) {
+    StartReadObjectParentInstance(device);
+    StartReadObject(swapchain);
+}
+
+void ThreadSafety::PostCallRecordGetSwapchainImagesKHR(VkDevice device, VkSwapchainKHR swapchain, uint32_t *pSwapchainImageCount,
+                                                       VkImage *pSwapchainImages, VkResult result) {
+    FinishReadObjectParentInstance(device);
+    FinishReadObject(swapchain);
+    if (pSwapchainImages != NULL) {
+        auto lock = write_lock_guard_t(thread_safety_lock);
+        auto &wrapped_swapchain_image_handles = swapchain_wrapped_image_handle_map[swapchain];
+        for (uint32_t i = static_cast<uint32_t>(wrapped_swapchain_image_handles.size()); i < *pSwapchainImageCount; i++) {
+            CreateObject(pSwapchainImages[i]);
+            wrapped_swapchain_image_handles.emplace_back(pSwapchainImages[i]);
+        }
+    }
+}
+
+void ThreadSafety::PreCallRecordDestroySwapchainKHR(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    const VkAllocationCallbacks*                pAllocator) {
+    StartReadObjectParentInstance(device);
+    StartWriteObject(swapchain);
+    // Host access to swapchain must be externally synchronized
+    auto lock = read_lock_guard_t(thread_safety_lock);
+    for (auto &image_handle : swapchain_wrapped_image_handle_map[swapchain]) {
+        StartWriteObject(image_handle);
+    }
+}
+
+void ThreadSafety::PostCallRecordDestroySwapchainKHR(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    const VkAllocationCallbacks*                pAllocator) {
+    FinishReadObjectParentInstance(device);
+    FinishWriteObject(swapchain);
+    DestroyObject(swapchain);
+    // Host access to swapchain must be externally synchronized
+    auto lock = write_lock_guard_t(thread_safety_lock);
+    for (auto &image_handle : swapchain_wrapped_image_handle_map[swapchain]) {
+        FinishWriteObject(image_handle);
+        DestroyObject(image_handle);
+    }
+    swapchain_wrapped_image_handle_map.erase(swapchain);
+}
+
+void ThreadSafety::PreCallRecordDestroyDevice(
+    VkDevice                                    device,
+    const VkAllocationCallbacks*                pAllocator) {
+    StartWriteObjectParentInstance(device);
+    // Host access to device must be externally synchronized
+}
+
+void ThreadSafety::PostCallRecordDestroyDevice(
+    VkDevice                                    device,
+    const VkAllocationCallbacks*                pAllocator) {
+    FinishWriteObjectParentInstance(device);
+    DestroyObjectParentInstance(device);
+    // Host access to device must be externally synchronized
+    auto lock = write_lock_guard_t(thread_safety_lock);
+    for (auto &queue : device_queues_map[device]) {
+        DestroyObject(queue);
+    }
+    device_queues_map[device].clear();
+}
+
+void ThreadSafety::PreCallRecordGetDeviceQueue(
+    VkDevice                                    device,
+    uint32_t                                    queueFamilyIndex,
+    uint32_t                                    queueIndex,
+    VkQueue*                                    pQueue) {
+    StartReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PostCallRecordGetDeviceQueue(
+    VkDevice                                    device,
+    uint32_t                                    queueFamilyIndex,
+    uint32_t                                    queueIndex,
+    VkQueue*                                    pQueue) {
+    FinishReadObjectParentInstance(device);
+    CreateObject(*pQueue);
+    auto lock = write_lock_guard_t(thread_safety_lock);
+    device_queues_map[device].insert(*pQueue);
+}
+
+void ThreadSafety::PreCallRecordGetDeviceQueue2(
+    VkDevice                                    device,
+    const VkDeviceQueueInfo2*                   pQueueInfo,
+    VkQueue*                                    pQueue) {
+    StartReadObjectParentInstance(device);
+}
+
+void ThreadSafety::PostCallRecordGetDeviceQueue2(
+    VkDevice                                    device,
+    const VkDeviceQueueInfo2*                   pQueueInfo,
+    VkQueue*                                    pQueue) {
+    FinishReadObjectParentInstance(device);
+    CreateObject(*pQueue);
+    auto lock = write_lock_guard_t(thread_safety_lock);
+    device_queues_map[device].insert(*pQueue);
+}
+
+"""
+
+
+    # This is an ordered list of sections in the header file.
+    ALL_SECTIONS = ['command']
+    def __init__(self,
+                 errFile = sys.stderr,
+                 warnFile = sys.stderr,
+                 diagFile = sys.stdout):
+        OutputGenerator.__init__(self, errFile, warnFile, diagFile)
+        # Internal state - accumulators for different inner block text
+        self.sections = dict([(section, []) for section in self.ALL_SECTIONS])
+        self.non_dispatchable_types = set()
+        self.object_to_debug_report_type = {
+            'VkInstance' : 'VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT',
+            'VkPhysicalDevice' : 'VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT',
+            'VkDevice' : 'VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT',
+            'VkQueue' : 'VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT',
+            'VkSemaphore' : 'VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT',
+            'VkCommandBuffer' : 'VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT',
+            'VkFence' : 'VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT',
+            'VkDeviceMemory' : 'VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT',
+            'VkBuffer' : 'VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT',
+            'VkImage' : 'VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT',
+            'VkEvent' : 'VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT',
+            'VkQueryPool' : 'VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT',
+            'VkBufferView' : 'VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT',
+            'VkImageView' : 'VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT',
+            'VkShaderModule' : 'VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT',
+            'VkPipelineCache' : 'VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT',
+            'VkPipelineLayout' : 'VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT',
+            'VkRenderPass' : 'VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT',
+            'VkPipeline' : 'VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT',
+            'VkDescriptorSetLayout' : 'VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT',
+            'VkSampler' : 'VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT',
+            'VkDescriptorPool' : 'VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT',
+            'VkDescriptorSet' : 'VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT',
+            'VkFramebuffer' : 'VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT',
+            'VkCommandPool' : 'VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT',
+            'VkSurfaceKHR' : 'VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT',
+            'VkSwapchainKHR' : 'VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT',
+            'VkDisplayKHR' : 'VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_KHR_EXT',
+            'VkDisplayModeKHR' : 'VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_MODE_KHR_EXT',
+            'VkObjectTableNVX' : 'VK_DEBUG_REPORT_OBJECT_TYPE_OBJECT_TABLE_NVX_EXT',
+            'VkIndirectCommandsLayoutNVX' : 'VK_DEBUG_REPORT_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX_EXT',
+            'VkSamplerYcbcrConversion' : 'VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT',
+            'VkDescriptorUpdateTemplate' : 'VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_EXT',
+            'VkAccelerationStructureNV' : 'VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV_EXT',
+            'VkDebugReportCallbackEXT' : 'VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_EXT',
+            'VkValidationCacheEXT' : 'VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT' }
+
+    # Check if the parameter passed in is a pointer to an array
+    def paramIsArray(self, param):
+        return param.attrib.get('len') is not None
+
+    # Check if the parameter passed in is a pointer
+    def paramIsPointer(self, param):
+        ispointer = False
+        for elem in param:
+            if elem.tag == 'type' and elem.tail is not None and '*' in elem.tail:
+                ispointer = True
+        return ispointer
+
+    # Map paramtype to a thread safety suffix, either 'ParentInstance' or ''
+    def paramSuffix(self, paramtype):
+        if paramtype is not None:
+            paramtype = paramtype.text
+        else:
+            paramtype = 'None'
+
+        # Use 'in' to check the types, to handle suffixes and pointers, except for VkDevice
+        # which can be confused with VkDeviceMemory
+        suffix = ''
+        if 'VkSurface' in paramtype or 'VkDebugReportCallback' in paramtype or 'VkDebugUtilsMessenger' in paramtype or 'VkDevice' == paramtype or 'VkDevice*' == paramtype or 'VkInstance' in paramtype:
+            suffix = 'ParentInstance'
+        return suffix
+
+    def makeThreadUseBlock(self, cmd, name, functionprefix):
+        """Generate C function pointer typedef for <command> Element"""
+        paramdecl = ''
+        # Find and add any parameters that are thread unsafe
+        params = cmd.findall('param')
+        for param in params:
+            paramname = param.find('name')
+            if False: # self.paramIsPointer(param):
+                paramdecl += '    // not watching use of pointer ' + paramname.text + '\n'
+            else:
+                externsync = param.attrib.get('externsync')
+                if externsync == 'true':
+                    if self.paramIsArray(param):
+                        paramdecl += 'if (' + paramname.text + ') {\n'
+                        paramdecl += '    for (uint32_t index=0; index < ' + param.attrib.get('len') + '; index++) {\n'
+                        paramdecl += '        ' + functionprefix + 'WriteObject' + self.paramSuffix(param.find('type')) + '(' + paramname.text + '[index]);\n'
+                        paramdecl += '    }\n'
+                        paramdecl += '}\n'
+                    else:
+                        paramdecl += functionprefix + 'WriteObject' + self.paramSuffix(param.find('type')) + '(' + paramname.text + ');\n'
+                        if ('Destroy' in name or 'Free' in name) and functionprefix == 'Finish':
+                            paramdecl += 'DestroyObject' + self.paramSuffix(param.find('type')) + '(' + paramname.text + ');\n'
+                elif (param.attrib.get('externsync')):
+                    if self.paramIsArray(param):
+                        # Externsync can list pointers to arrays of members to synchronize
+                        paramdecl += 'if (' + paramname.text + ') {\n'
+                        paramdecl += '    for (uint32_t index=0; index < ' + param.attrib.get('len') + '; index++) {\n'
+                        second_indent = '    '
+                        for member in externsync.split(","):
+                            # Replace first empty [] in member name with index
+                            element = member.replace('[]','[index]',1)
+
+                            # XXX TODO: Can we do better to lookup types of externsync members?
+                            suffix = ''
+                            if 'surface' in member:
+                                suffix = 'ParentInstance'
+
+                            if '[]' in element:
+                                # TODO: These null checks can be removed if threading ends up behind parameter
+                                #       validation in layer order
+                                element_ptr = element.split('[]')[0]
+                                paramdecl += '        if (' + element_ptr + ') {\n'
+                                # Replace any second empty [] in element name with inner array index based on mapping array
+                                # names like "pSomeThings[]" to "someThingCount" array size. This could be more robust by
+                                # mapping a param member name to a struct type and "len" attribute.
+                                limit = element[0:element.find('s[]')] + 'Count'
+                                dotp = limit.rfind('.p')
+                                limit = limit[0:dotp+1] + limit[dotp+2:dotp+3].lower() + limit[dotp+3:]
+                                paramdecl += '            for (uint32_t index2=0; index2 < '+limit+'; index2++) {\n'
+                                element = element.replace('[]','[index2]')
+                                second_indent = '        '
+                                paramdecl += '        ' + second_indent + functionprefix + 'WriteObject' + suffix + '(' + element + ');\n'
+                                paramdecl += '            }\n'
+                                paramdecl += '        }\n'
+                            else:
+                                paramdecl += '    ' + second_indent + functionprefix + 'WriteObject' + suffix + '(' + element + ');\n'
+                        paramdecl += '    }\n'
+                        paramdecl += '}\n'
+                    else:
+                        # externsync can list members to synchronize
+                        for member in externsync.split(","):
+                            member = str(member).replace("::", "->")
+                            member = str(member).replace(".", "->")
+                            # XXX TODO: Can we do better to lookup types of externsync members?
+                            suffix = ''
+                            if 'surface' in member:
+                                suffix = 'ParentInstance'
+                            paramdecl += '    ' + functionprefix + 'WriteObject' + suffix + '(' + member + ');\n'
+                elif self.paramIsPointer(param) and ('Create' in name or 'Allocate' in name) and functionprefix == 'Finish':
+                    paramtype = param.find('type')
+                    if paramtype is not None:
+                        paramtype = paramtype.text
+                    else:
+                        paramtype = 'None'
+                    if paramtype in self.handle_types:
+                        indent = ''
+                        create_pipelines_call = True
+                        # The CreateXxxPipelines APIs can return a list of partly created pipelines upon failure
+                        if not ('Create' in name and 'Pipelines' in name):
+                            paramdecl += 'if (result == VK_SUCCESS) {\n'
+                            create_pipelines_call = False
+                            indent = '    '
+                        if self.paramIsArray(param):
+                            # Add pointer dereference for array counts that are pointer values
+                            dereference = ''
+                            for candidate in params:
+                                if param.attrib.get('len') == candidate.find('name').text:
+                                    if self.paramIsPointer(candidate):
+                                        dereference = '*'
+                            param_len = str(param.attrib.get('len')).replace("::", "->")
+                            paramdecl += indent + 'if (' + paramname.text + ') {\n'
+                            paramdecl += indent + '    for (uint32_t index = 0; index < ' + dereference + param_len + '; index++) {\n'
+                            if create_pipelines_call:
+                                paramdecl += indent + '        if (!pPipelines[index]) continue;\n'
+                            paramdecl += indent + '        CreateObject' + self.paramSuffix(param.find('type')) + '(' + paramname.text + '[index]);\n'
+                            paramdecl += indent + '    }\n'
+                            paramdecl += indent + '}\n'
+                        else:
+                            paramdecl += '    CreateObject' + self.paramSuffix(param.find('type')) + '(*' + paramname.text + ');\n'
+                        if not create_pipelines_call:
+                            paramdecl += '}\n'
+                else:
+                    paramtype = param.find('type')
+                    if paramtype is not None:
+                        paramtype = paramtype.text
+                    else:
+                        paramtype = 'None'
+                    if paramtype in self.handle_types and paramtype != 'VkPhysicalDevice':
+                        if self.paramIsArray(param) and ('pPipelines' != paramname.text):
+                            # Add pointer dereference for array counts that are pointer values
+                            dereference = ''
+                            for candidate in params:
+                                if param.attrib.get('len') == candidate.find('name').text:
+                                    if self.paramIsPointer(candidate):
+                                        dereference = '*'
+                            param_len = str(param.attrib.get('len')).replace("::", "->")
+                            paramdecl += 'if (' + paramname.text + ') {\n'
+                            paramdecl += '    for (uint32_t index = 0; index < ' + dereference + param_len + '; index++) {\n'
+                            paramdecl += '        ' + functionprefix + 'ReadObject' + self.paramSuffix(param.find('type')) + '(' + paramname.text + '[index]);\n'
+                            paramdecl += '    }\n'
+                            paramdecl += '}\n'
+                        elif not self.paramIsPointer(param):
+                            # Pointer params are often being created.
+                            # They are not being read from.
+                            paramdecl += functionprefix + 'ReadObject' + self.paramSuffix(param.find('type')) + '(' + paramname.text + ');\n'
+        explicitexternsyncparams = cmd.findall("param[@externsync]")
+        if (explicitexternsyncparams is not None):
+            for param in explicitexternsyncparams:
+                externsyncattrib = param.attrib.get('externsync')
+                paramname = param.find('name')
+                paramdecl += '// Host access to '
+                if externsyncattrib == 'true':
+                    if self.paramIsArray(param):
+                        paramdecl += 'each member of ' + paramname.text
+                    elif self.paramIsPointer(param):
+                        paramdecl += 'the object referenced by ' + paramname.text
+                    else:
+                        paramdecl += paramname.text
+                else:
+                    paramdecl += externsyncattrib
+                paramdecl += ' must be externally synchronized\n'
+
+        # Find and add any "implicit" parameters that are thread unsafe
+        implicitexternsyncparams = cmd.find('implicitexternsyncparams')
+        if (implicitexternsyncparams is not None):
+            for elem in implicitexternsyncparams:
+                paramdecl += '// '
+                paramdecl += elem.text
+                paramdecl += ' must be externally synchronized between host accesses\n'
+
+        if (paramdecl == ''):
+            return None
+        else:
+            return paramdecl
+    def beginFile(self, genOpts):
+        OutputGenerator.beginFile(self, genOpts)
+        
+        # Initialize members that require the tree
+        self.handle_types = GetHandleTypes(self.registry.tree)
+
+        # TODO: LUGMAL -- remove this and add our copyright
+        # User-supplied prefix text, if any (list of strings)
+        write(self.inline_copyright_message, file=self.outFile)
+
+        self.header_file = (genOpts.filename == 'thread_safety.h')
+        self.source_file = (genOpts.filename == 'thread_safety.cpp')
+
+        if not self.header_file and not self.source_file:
+            print("Error: Output Filenames have changed, update generator source.\n")
+            sys.exit(1)
+
+        if self.source_file:
+            write('#include "chassis.h"', file=self.outFile)
+            write('#include "thread_safety.h"', file=self.outFile)
+            self.newline()
+            write(self.inline_custom_source_preamble, file=self.outFile)
+
+
+    def endFile(self):
+
+        # Create class definitions
+        counter_class_defs = ''
+        counter_class_instances = ''
+        counter_class_bodies = ''
+
+        for obj in sorted(self.non_dispatchable_types):
+            counter_class_defs += '    counter<%s> c_%s;\n' % (obj, obj)
+            if obj in self.object_to_debug_report_type:
+                obj_type = self.object_to_debug_report_type[obj]
+            else:
+                obj_type = 'VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT'
+            counter_class_instances += '          c_%s("%s", %s, &report_data),\n' % (obj, obj, obj_type)
+            if 'VkSurface' in obj or 'VkDebugReportCallback' in obj or 'VkDebugUtilsMessenger' in obj:
+                counter_class_bodies += 'WRAPPER_PARENT_INSTANCE(%s)\n' % obj
+            else:
+                counter_class_bodies += 'WRAPPER(%s)\n' % obj
+        if self.header_file:
+            class_def = self.inline_custom_header_preamble.replace('COUNTER_CLASS_DEFINITIONS_TEMPLATE', counter_class_defs)
+            class_def = class_def.replace('COUNTER_CLASS_INSTANCES_TEMPLATE', counter_class_instances[:-2]) # Kill last comma
+            class_def = class_def.replace('COUNTER_CLASS_BODIES_TEMPLATE', counter_class_bodies)
+            write(class_def, file=self.outFile)
+        write('\n'.join(self.sections['command']), file=self.outFile)
+        if self.header_file:
+            write('};', file=self.outFile)
+
+        # Finish processing in superclass
+        OutputGenerator.endFile(self)
+
+    def beginFeature(self, interface, emit):
+        #write('// starting beginFeature', file=self.outFile)
+        # Start processing in superclass
+        OutputGenerator.beginFeature(self, interface, emit)
+        # C-specific
+        # Accumulate includes, defines, types, enums, function pointer typedefs,
+        # end function prototypes separately for this feature. They're only
+        # printed in endFeature().
+        self.featureExtraProtect = GetFeatureProtect(interface)
+        if (self.featureExtraProtect is not None):
+            self.appendSection('command', '\n#ifdef %s' % self.featureExtraProtect)
+
+        #write('// ending beginFeature', file=self.outFile)
+    def endFeature(self):
+        # C-specific
+        if (self.emit):
+            if (self.featureExtraProtect is not None):
+                self.appendSection('command', '#endif // %s' % self.featureExtraProtect)
+        # Finish processing in superclass
+        OutputGenerator.endFeature(self)
+    #
+    # Append a definition to the specified section
+    def appendSection(self, section, text):
+        self.sections[section].append(text)
+    #
+    # Type generation
+    def genType(self, typeinfo, name, alias):
+        OutputGenerator.genType(self, typeinfo, name, alias)
+        if self.handle_types.IsNonDispatchable(name):
+            self.non_dispatchable_types.add(name)
+    #
+    # Struct (e.g. C "struct" type) generation.
+    # This is a special case of the <type> tag where the contents are
+    # interpreted as a set of <member> tags instead of freeform C
+    # C type declarations. The <member> tags are just like <param>
+    # tags - they are a declaration of a struct or union member.
+    # Only simple member declarations are supported (no nested
+    # structs etc.)
+    def genStruct(self, typeinfo, typeName, alias):
+        OutputGenerator.genStruct(self, typeinfo, typeName, alias)
+        body = 'typedef ' + typeinfo.elem.get('category') + ' ' + typeName + ' {\n'
+        # paramdecl = self.makeCParamDecl(typeinfo.elem, self.genOpts.alignFuncParam)
+        for member in typeinfo.elem.findall('.//member'):
+            body += self.makeCParamDecl(member, self.genOpts.alignFuncParam)
+            body += ';\n'
+        body += '} ' + typeName + ';\n'
+        self.appendSection('struct', body)
+    #
+    # Group (e.g. C "enum" type) generation.
+    # These are concatenated together with other types.
+    def genGroup(self, groupinfo, groupName, alias):
+        pass
+    # Enumerant generation
+    # <enum> tags may specify their values in several ways, but are usually
+    # just integers.
+    def genEnum(self, enuminfo, name, alias):
+        pass
+    #
+    # Command generation
+    def genCmd(self, cmdinfo, name, alias):
+        # Commands shadowed by interface functions and are not implemented
+        special_functions = [
+            'vkAllocateCommandBuffers',
+            'vkFreeCommandBuffers',
+            'vkCreateCommandPool',
+            'vkResetCommandPool',
+            'vkDestroyCommandPool',
+            'vkAllocateDescriptorSets',
+            'vkFreeDescriptorSets',
+            'vkResetDescriptorPool',
+            'vkDestroyDescriptorPool',
+            'vkQueuePresentKHR',
+            'vkGetSwapchainImagesKHR',
+            'vkDestroySwapchainKHR',
+            'vkDestroyDevice',
+            'vkGetDeviceQueue',
+            'vkGetDeviceQueue2',
+            'vkCreateDescriptorSetLayout',
+            'vkUpdateDescriptorSets',
+            'vkUpdateDescriptorSetWithTemplate',
+            'vkUpdateDescriptorSetWithTemplateKHR',
+        ]
+        if name == 'vkQueuePresentKHR' or (name in special_functions and self.source_file):
+            return
+
+        if (("DebugMarker" in name or "DebugUtilsObject" in name) and "EXT" in name):
+            self.appendSection('command', '// TODO - not wrapping EXT function ' + name)
+            return
+
+        # Determine first if this function needs to be intercepted
+        startthreadsafety = self.makeThreadUseBlock(cmdinfo.elem, name, 'Start')
+        finishthreadsafety = self.makeThreadUseBlock(cmdinfo.elem, name, 'Finish')
+        if startthreadsafety is None and finishthreadsafety is None:
+            return
+
+        if startthreadsafety is None:
+            startthreadsafety = ''
+        if finishthreadsafety is None:
+            finishthreadsafety = ''
+
+        OutputGenerator.genCmd(self, cmdinfo, name, alias)
+
+        # setup common to call wrappers
+        # first parameter is always dispatchable
+        dispatchable_type = cmdinfo.elem.find('param/type').text
+        dispatchable_name = cmdinfo.elem.find('param/name').text
+
+        decls = self.makeCDecls(cmdinfo.elem)
+
+        result_type = cmdinfo.elem.find('proto/type')
+
+        if self.source_file:
+            pre_decl = decls[0][:-1]
+            pre_decl = pre_decl.split("VKAPI_CALL ")[1]
+            pre_decl = 'void ThreadSafety::PreCallRecord' + pre_decl + ' {'
+
+            # PreCallRecord
+            self.appendSection('command', '')
+            self.appendSection('command', pre_decl)
+            self.appendSection('command', "    " + "\n    ".join(str(startthreadsafety).rstrip().split("\n")))
+            self.appendSection('command', '}')
+
+            # PostCallRecord
+            post_decl = pre_decl.replace('PreCallRecord', 'PostCallRecord')
+            if result_type.text == 'VkResult':
+                post_decl = post_decl.replace(')', ',\n    VkResult                                    result)')
+            elif result_type.text == 'VkDeviceAddress':
+                post_decl = post_decl.replace(')', ',\n    VkDeviceAddress                             result)')
+            self.appendSection('command', '')
+            self.appendSection('command', post_decl)
+            self.appendSection('command', "    " + "\n    ".join(str(finishthreadsafety).rstrip().split("\n")))
+            self.appendSection('command', '}')
+
+        if self.header_file:
+            pre_decl = decls[0][:-1]
+            pre_decl = pre_decl.split("VKAPI_CALL ")[1]
+            pre_decl = 'void PreCallRecord' + pre_decl + ';'
+
+            # PreCallRecord
+            self.appendSection('command', '')
+            self.appendSection('command', pre_decl)
+
+            # PostCallRecord
+            post_decl = pre_decl.replace('PreCallRecord', 'PostCallRecord')
+            if result_type.text == 'VkResult':
+                post_decl = post_decl.replace(')', ',\n    VkResult                                    result)')
+            elif result_type.text == 'VkDeviceAddress':
+                post_decl = post_decl.replace(')', ',\n    VkDeviceAddress                             result)')
+            self.appendSection('command', '')
+            self.appendSection('command', post_decl)
+
+    #
+    # override makeProtoName to drop the "vk" prefix
+    def makeProtoName(self, name, tail):
+        return self.genOpts.apientry + name[2:] + tail
diff --git a/src/third_party/vulkan-validation-layers/src/scripts/update_deps.py b/src/third_party/vulkan-validation-layers/src/scripts/update_deps.py
new file mode 100755
index 0000000..f1fe36d
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/scripts/update_deps.py
@@ -0,0 +1,679 @@
+#!/usr/bin/env python
+
+# Copyright 2017 The Glslang Authors. All rights reserved.
+# Copyright (c) 2018 Valve Corporation
+# Copyright (c) 2018 LunarG, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This script was heavily leveraged from KhronosGroup/glslang
+# update_glslang_sources.py.
+"""update_deps.py
+
+Get and build dependent repositories using known-good commits.
+
+Purpose
+-------
+
+This program is intended to assist a developer of this repository
+(the "home" repository) by gathering and building the repositories that
+this home repository depend on.  It also checks out each dependent
+repository at a "known-good" commit in order to provide stability in
+the dependent repositories.
+
+Python Compatibility
+--------------------
+
+This program can be used with Python 2.7 and Python 3.
+
+Known-Good JSON Database
+------------------------
+
+This program expects to find a file named "known-good.json" in the
+same directory as the program file.  This JSON file is tailored for
+the needs of the home repository by including its dependent repositories.
+
+Program Options
+---------------
+
+See the help text (update_deps.py --help) for a complete list of options.
+
+Program Operation
+-----------------
+
+The program uses the user's current directory at the time of program
+invocation as the location for fetching and building the dependent
+repositories.  The user can override this by using the "--dir" option.
+
+For example, a directory named "build" in the repository's root directory
+is a good place to put the dependent repositories because that directory
+is not tracked by Git. (See the .gitignore file.)  The "external" directory
+may also be a suitable location.
+A user can issue:
+
+$ cd My-Repo
+$ mkdir build
+$ cd build
+$ ../scripts/update_deps.py
+
+or, to do the same thing, but using the --dir option:
+
+$ cd My-Repo
+$ mkdir build
+$ scripts/update_deps.py --dir=build
+
+With these commands, the "build" directory is considered the "top"
+directory where the program clones the dependent repositories.  The
+JSON file configures the build and install working directories to be
+within this "top" directory.
+
+Note that the "dir" option can also specify an absolute path:
+
+$ cd My-Repo
+$ scripts/update_deps.py --dir=/tmp/deps
+
+The "top" dir is then /tmp/deps (Linux filesystem example) and is
+where this program will clone and build the dependent repositories.
+
+Helper CMake Config File
+------------------------
+
+When the program finishes building the dependencies, it writes a file
+named "helper.cmake" to the "top" directory that contains CMake commands
+for setting CMake variables for locating the dependent repositories.
+This helper file can be used to set up the CMake build files for this
+"home" repository.
+
+A complete sequence might look like:
+
+$ git clone git@github.com:My-Group/My-Repo.git
+$ cd My-Repo
+$ mkdir build
+$ cd build
+$ ../scripts/update_deps.py
+$ cmake -C helper.cmake ..
+$ cmake --build .
+
+JSON File Schema
+----------------
+
+There's no formal schema for the "known-good" JSON file, but here is
+a description of its elements.  All elements are required except those
+marked as optional.  Please see the "known_good.json" file for
+examples of all of these elements.
+
+- name
+
+The name of the dependent repository.  This field can be referenced
+by the "deps.repo_name" structure to record a dependency.
+
+- url
+
+Specifies the URL of the repository.
+Example: https://github.com/KhronosGroup/Vulkan-Loader.git
+
+- sub_dir
+
+The directory where the program clones the repository, relative to
+the "top" directory.
+
+- build_dir
+
+The directory used to build the repository, relative to the "top"
+directory.
+
+- install_dir
+
+The directory used to store the installed build artifacts, relative
+to the "top" directory.
+
+- commit
+
+The commit used to checkout the repository.  This can be a SHA-1
+object name or a refname used with the remote name "origin".
+For example, this field can be set to "origin/sdk-1.1.77" to
+select the end of the sdk-1.1.77 branch.
+
+- deps (optional)
+
+An array of pairs consisting of a CMake variable name and a
+repository name to specify a dependent repo and a "link" to
+that repo's install artifacts.  For example:
+
+"deps" : [
+    {
+        "var_name" : "VULKAN_HEADERS_INSTALL_DIR",
+        "repo_name" : "Vulkan-Headers"
+    }
+]
+
+which represents that this repository depends on the Vulkan-Headers
+repository and uses the VULKAN_HEADERS_INSTALL_DIR CMake variable to
+specify the location where it expects to find the Vulkan-Headers install
+directory.
+Note that the "repo_name" element must match the "name" element of some
+other repository in the JSON file.
+
+- prebuild (optional)
+- prebuild_linux (optional)  (For Linux and MacOS)
+- prebuild_windows (optional)
+
+A list of commands to execute before building a dependent repository.
+This is useful for repositories that require the execution of some
+sort of "update" script or need to clone an auxillary repository like
+googletest.
+
+The commands listed in "prebuild" are executed first, and then the
+commands for the specific platform are executed.
+
+- custom_build (optional)
+
+A list of commands to execute as a custom build instead of using
+the built in CMake way of building. Requires "build_step" to be
+set to "custom"
+
+You can insert the following keywords into the commands listed in
+"custom_build" if they require runtime information (like whether the
+build config is "Debug" or "Release").
+
+Keywords:
+{0} reference to a dictionary of repos and their attributes
+{1} reference to the command line arguments set before start
+{2} reference to the CONFIG_MAP value of config.
+
+Example:
+{2} returns the CONFIG_MAP value of config e.g. debug -> Debug
+{1}.config returns the config variable set when you ran update_dep.py
+{0}[Vulkan-Headers][repo_root] returns the repo_root variable from
+                                   the Vulkan-Headers GoodRepo object.
+
+- cmake_options (optional)
+
+A list of options to pass to CMake during the generation phase.
+
+- ci_only (optional)
+
+A list of environment variables where one must be set to "true"
+(case-insensitive) in order for this repo to be fetched and built.
+This list can be used to specify repos that should be built only in CI.
+Typically, this list might contain "TRAVIS" and/or "APPVEYOR" because
+each of these CI systems sets an environment variable with its own
+name to "true".  Note that this could also be (ab)used to control
+the processing of the repo with any environment variable.  The default
+is an empty list, which means that the repo is always processed.
+
+- build_step (optional)
+
+Specifies if the dependent repository should be built or not. This can
+have a value of 'build', 'custom',  or 'skip'. The dependent repositories are
+built by default.
+
+- build_platforms (optional)
+
+A list of platforms the repository will be built on.
+Legal options include:
+"windows"
+"linux"
+"darwin"
+
+Builds on all platforms by default.
+
+Note
+----
+
+The "sub_dir", "build_dir", and "install_dir" elements are all relative
+to the effective "top" directory.  Specifying absolute paths is not
+supported.  However, the "top" directory specified with the "--dir"
+option can be a relative or absolute path.
+
+"""
+
+from __future__ import print_function
+
+import argparse
+import json
+import distutils.dir_util
+import os.path
+import subprocess
+import sys
+import platform
+import multiprocessing
+import shlex
+import shutil
+
+KNOWN_GOOD_FILE_NAME = 'known_good.json'
+
+CONFIG_MAP = {
+    'debug': 'Debug',
+    'release': 'Release',
+    'relwithdebinfo': 'RelWithDebInfo',
+    'minsizerel': 'MinSizeRel'
+}
+
+VERBOSE = False
+
+DEVNULL = open(os.devnull, 'wb')
+
+
+def command_output(cmd, directory, fail_ok=False):
+    """Runs a command in a directory and returns its standard output stream.
+
+    Captures the standard error stream and prints it if error.
+
+    Raises a RuntimeError if the command fails to launch or otherwise fails.
+    """
+    if VERBOSE:
+        print('In {d}: {cmd}'.format(d=directory, cmd=cmd))
+    p = subprocess.Popen(
+        cmd, cwd=directory, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+    (stdout, stderr) = p.communicate()
+    if p.returncode != 0:
+        print('*** Error ***\nstderr contents:\n{}'.format(stderr))
+        if not fail_ok:
+            raise RuntimeError('Failed to run {} in {}'.format(cmd, directory))
+    if VERBOSE:
+        print(stdout)
+    return stdout
+
+class GoodRepo(object):
+    """Represents a repository at a known-good commit."""
+
+    def __init__(self, json, args):
+        """Initializes this good repo object.
+
+        Args:
+        'json':  A fully populated JSON object describing the repo.
+        'args':  Results from ArgumentParser
+        """
+        self._json = json
+        self._args = args
+        # Required JSON elements
+        self.name = json['name']
+        self.url = json['url']
+        self.sub_dir = json['sub_dir']
+        self.commit = json['commit']
+        # Optional JSON elements
+        self.build_dir = None
+        self.install_dir = None
+        if json.get('build_dir'):
+            self.build_dir = os.path.normpath(json['build_dir'])
+        if json.get('install_dir'):
+            self.install_dir = os.path.normpath(json['install_dir'])
+        self.deps = json['deps'] if ('deps' in json) else []
+        self.prebuild = json['prebuild'] if ('prebuild' in json) else []
+        self.prebuild_linux = json['prebuild_linux'] if (
+            'prebuild_linux' in json) else []
+        self.prebuild_windows = json['prebuild_windows'] if (
+            'prebuild_windows' in json) else []
+        self.custom_build = json['custom_build'] if ('custom_build' in json) else []
+        self.cmake_options = json['cmake_options'] if (
+            'cmake_options' in json) else []
+        self.ci_only = json['ci_only'] if ('ci_only' in json) else []
+        self.build_step = json['build_step'] if ('build_step' in json) else 'build'
+        self.build_platforms = json['build_platforms'] if ('build_platforms' in json) else []
+        # Absolute paths for a repo's directories
+        dir_top = os.path.abspath(args.dir)
+        self.repo_dir = os.path.join(dir_top, self.sub_dir)
+        if self.build_dir:
+            self.build_dir = os.path.join(dir_top, self.build_dir)
+        if self.install_dir:
+            self.install_dir = os.path.join(dir_top, self.install_dir)
+	    # Check if platform is one to build on
+        self.on_build_platform = False
+        if self.build_platforms == [] or platform.system().lower() in self.build_platforms:
+            self.on_build_platform = True
+
+    def Clone(self):
+        distutils.dir_util.mkpath(self.repo_dir)
+        command_output(['git', 'clone', self.url, '.'], self.repo_dir)
+
+    def Fetch(self):
+        command_output(['git', 'fetch', 'origin'], self.repo_dir)
+
+    def Checkout(self):
+        print('Checking out {n} in {d}'.format(n=self.name, d=self.repo_dir))
+        if self._args.do_clean_repo:
+            shutil.rmtree(self.repo_dir, ignore_errors=True)
+        if not os.path.exists(os.path.join(self.repo_dir, '.git')):
+            self.Clone()
+        self.Fetch()
+        if len(self._args.ref):
+            command_output(['git', 'checkout', self._args.ref], self.repo_dir)
+        else:
+            command_output(['git', 'checkout', self.commit], self.repo_dir)
+        print(command_output(['git', 'status'], self.repo_dir))
+
+    def CustomPreProcess(self, cmd_str, repo_dict):
+        return cmd_str.format(repo_dict, self._args, CONFIG_MAP[self._args.config])
+
+    def PreBuild(self):
+        """Execute any prebuild steps from the repo root"""
+        for p in self.prebuild:
+            command_output(shlex.split(p), self.repo_dir)
+        if platform.system() == 'Linux' or platform.system() == 'Darwin':
+            for p in self.prebuild_linux:
+                command_output(shlex.split(p), self.repo_dir)
+        if platform.system() == 'Windows':
+            for p in self.prebuild_windows:
+                command_output(shlex.split(p), self.repo_dir)
+
+    def CustomBuild(self, repo_dict):
+        """Execute any custom_build steps from the repo root"""
+        for p in self.custom_build:
+            cmd = self.CustomPreProcess(p, repo_dict)
+            command_output(shlex.split(cmd), self.repo_dir)
+
+    def CMakeConfig(self, repos):
+        """Build CMake command for the configuration phase and execute it"""
+        if self._args.do_clean_build:
+            shutil.rmtree(self.build_dir)
+        if self._args.do_clean_install:
+            shutil.rmtree(self.install_dir)
+
+        # Create and change to build directory
+        distutils.dir_util.mkpath(self.build_dir)
+        os.chdir(self.build_dir)
+
+        cmake_cmd = [
+            'cmake', self.repo_dir,
+            '-DCMAKE_INSTALL_PREFIX=' + self.install_dir
+        ]
+
+        # For each repo this repo depends on, generate a CMake variable
+        # definitions for "...INSTALL_DIR" that points to that dependent
+        # repo's install dir.
+        for d in self.deps:
+            dep_commit = [r for r in repos if r.name == d['repo_name']]
+            if len(dep_commit):
+                cmake_cmd.append('-D{var_name}={install_dir}'.format(
+                    var_name=d['var_name'],
+                    install_dir=dep_commit[0].install_dir))
+
+        # Add any CMake options
+        for option in self.cmake_options:
+            cmake_cmd.append(option)
+
+        # Set build config for single-configuration generators
+        if platform.system() == 'Linux' or platform.system() == 'Darwin':
+            cmake_cmd.append('-DCMAKE_BUILD_TYPE={config}'.format(
+                config=CONFIG_MAP[self._args.config]))
+
+        # Use the CMake -A option to select the platform architecture
+        # without needing a Visual Studio generator.
+        if platform.system() == 'Windows':
+            if self._args.arch == '64' or self._args.arch == 'x64' or self._args.arch == 'win64':
+                cmake_cmd.append('-A')
+                cmake_cmd.append('x64')
+
+        # Apply a generator, if one is specified.  This can be used to supply
+        # a specific generator for the dependent repositories to match
+        # that of the main repository.
+        if self._args.generator is not None:
+            cmake_cmd.extend(['-G', self._args.generator])
+
+        if VERBOSE:
+            print("CMake command: " + " ".join(cmake_cmd))
+
+        ret_code = subprocess.call(cmake_cmd)
+        if ret_code != 0:
+            sys.exit(ret_code)
+
+    def CMakeBuild(self):
+        """Build CMake command for the build phase and execute it"""
+        cmake_cmd = ['cmake', '--build', self.build_dir, '--target', 'install']
+        if self._args.do_clean:
+            cmake_cmd.append('--clean-first')
+
+        if platform.system() == 'Windows':
+            cmake_cmd.append('--config')
+            cmake_cmd.append(CONFIG_MAP[self._args.config])
+
+        # Speed up the build.
+        if platform.system() == 'Linux' or platform.system() == 'Darwin':
+            cmake_cmd.append('--')
+            num_make_jobs = multiprocessing.cpu_count()
+            env_make_jobs = os.environ.get('MAKE_JOBS', None)
+            if env_make_jobs is not None:
+                try:
+                    num_make_jobs = min(num_make_jobs, int(env_make_jobs))
+                except ValueError:
+                    print('warning: environment variable MAKE_JOBS has non-numeric value "{}".  '
+                          'Using {} (CPU count) instead.'.format(env_make_jobs, num_make_jobs))
+            cmake_cmd.append('-j{}'.format(num_make_jobs))
+        if platform.system() == 'Windows':
+            cmake_cmd.append('--')
+            cmake_cmd.append('/maxcpucount')
+
+        if VERBOSE:
+            print("CMake command: " + " ".join(cmake_cmd))
+
+        ret_code = subprocess.call(cmake_cmd)
+        if ret_code != 0:
+            sys.exit(ret_code)
+
+    def Build(self, repos, repo_dict):
+        """Build the dependent repo"""
+        print('Building {n} in {d}'.format(n=self.name, d=self.repo_dir))
+        print('Build dir = {b}'.format(b=self.build_dir))
+        print('Install dir = {i}\n'.format(i=self.install_dir))
+
+        # Run any prebuild commands
+        self.PreBuild()
+
+        if self.build_step == 'custom':
+            self.CustomBuild(repo_dict)
+            return
+
+        # Build and execute CMake command for creating build files
+        self.CMakeConfig(repos)
+
+        # Build and execute CMake command for the build
+        self.CMakeBuild()
+
+
+def GetGoodRepos(args):
+    """Returns the latest list of GoodRepo objects.
+
+    The known-good file is expected to be in the same
+    directory as this script unless overridden by the 'known_good_dir'
+    parameter.
+    """
+    if args.known_good_dir:
+        known_good_file = os.path.join( os.path.abspath(args.known_good_dir),
+            KNOWN_GOOD_FILE_NAME)
+    else:
+        known_good_file = os.path.join(
+            os.path.dirname(os.path.abspath(__file__)), KNOWN_GOOD_FILE_NAME)
+    with open(known_good_file) as known_good:
+        return [
+            GoodRepo(repo, args)
+            for repo in json.loads(known_good.read())['repos']
+        ]
+
+
+def GetInstallNames(args):
+    """Returns the install names list.
+
+    The known-good file is expected to be in the same
+    directory as this script unless overridden by the 'known_good_dir'
+    parameter.
+    """
+    if args.known_good_dir:
+        known_good_file = os.path.join(os.path.abspath(args.known_good_dir),
+            KNOWN_GOOD_FILE_NAME)
+    else:
+        known_good_file = os.path.join(
+            os.path.dirname(os.path.abspath(__file__)), KNOWN_GOOD_FILE_NAME)
+    with open(known_good_file) as known_good:
+        install_info = json.loads(known_good.read())
+        if install_info.get('install_names'):
+            return install_info['install_names']
+        else:
+            return None
+
+
+def CreateHelper(args, repos, filename):
+    """Create a CMake config helper file.
+
+    The helper file is intended to be used with 'cmake -C <file>'
+    to build this home repo using the dependencies built by this script.
+
+    The install_names dictionary represents the CMake variables used by the
+    home repo to locate the install dirs of the dependent repos.
+    This information is baked into the CMake files of the home repo and so
+    this dictionary is kept with the repo via the json file.
+    """
+    def escape(path):
+        return path.replace('\\', '\\\\')
+    install_names = GetInstallNames(args)
+    with open(filename, 'w') as helper_file:
+        for repo in repos:
+            if install_names and repo.name in install_names and repo.on_build_platform:
+                helper_file.write('set({var} "{dir}" CACHE STRING "" FORCE)\n'
+                                  .format(
+                                      var=install_names[repo.name],
+                                      dir=escape(repo.install_dir)))
+
+
+def main():
+    parser = argparse.ArgumentParser(
+        description='Get and build dependent repos at known-good commits')
+    parser.add_argument(
+        '--known_good_dir',
+        dest='known_good_dir',
+        help="Specify directory for known_good.json file.")
+    parser.add_argument(
+        '--dir',
+        dest='dir',
+        default='.',
+        help="Set target directory for repository roots. Default is \'.\'.")
+    parser.add_argument(
+        '--ref',
+        dest='ref',
+        default='',
+        help="Override 'commit' with git reference. E.g., 'origin/master'")
+    parser.add_argument(
+        '--no-build',
+        dest='do_build',
+        action='store_false',
+        help=
+        "Clone/update repositories and generate build files without performing compilation",
+        default=True)
+    parser.add_argument(
+        '--clean',
+        dest='do_clean',
+        action='store_true',
+        help="Clean files generated by compiler and linker before building",
+        default=False)
+    parser.add_argument(
+        '--clean-repo',
+        dest='do_clean_repo',
+        action='store_true',
+        help="Delete repository directory before building",
+        default=False)
+    parser.add_argument(
+        '--clean-build',
+        dest='do_clean_build',
+        action='store_true',
+        help="Delete build directory before building",
+        default=False)
+    parser.add_argument(
+        '--clean-install',
+        dest='do_clean_install',
+        action='store_true',
+        help="Delete install directory before building",
+        default=False)
+    parser.add_argument(
+        '--arch',
+        dest='arch',
+        choices=['32', '64', 'x86', 'x64', 'win32', 'win64'],
+        type=str.lower,
+        help="Set build files architecture (Windows)",
+        default='64')
+    parser.add_argument(
+        '--config',
+        dest='config',
+        choices=['debug', 'release', 'relwithdebinfo', 'minsizerel'],
+        type=str.lower,
+        help="Set build files configuration",
+        default='debug')
+    parser.add_argument(
+        '--generator',
+        dest='generator',
+        help="Set the CMake generator",
+        default=None)
+
+    args = parser.parse_args()
+    save_cwd = os.getcwd()
+
+    # Create working "top" directory if needed
+    distutils.dir_util.mkpath(args.dir)
+    abs_top_dir = os.path.abspath(args.dir)
+
+    repos = GetGoodRepos(args)
+    repo_dict = {}
+
+    print('Starting builds in {d}'.format(d=abs_top_dir))
+    for repo in repos:
+        # If the repo has a platform whitelist, skip the repo
+        # unless we are building on a whitelisted platform.
+        if not repo.on_build_platform:
+            continue
+
+        field_list = ('url',
+                      'sub_dir',
+                      'commit',
+                      'build_dir',
+                      'install_dir',
+                      'deps',
+                      'prebuild',
+                      'prebuild_linux',
+                      'prebuild_windows',
+                      'custom_build',
+                      'cmake_options',
+                      'ci_only',
+                      'build_step',
+                      'build_platforms',
+                      'repo_dir',
+                      'on_build_platform')
+        repo_dict[repo.name] = {field: getattr(repo, field) for field in field_list}
+
+        # If the repo has a CI whitelist, skip the repo unless
+        # one of the CI's environment variable is set to true.
+        if len(repo.ci_only):
+            do_build = False
+            for env in repo.ci_only:
+                if not env in os.environ:
+                    continue
+                if os.environ[env].lower() == 'true':
+                    do_build = True
+                    break
+            if not do_build:
+                continue
+
+        # Clone/update the repository
+        repo.Checkout()
+
+        # Build the repository
+        if args.do_build and repo.build_step != 'skip':
+            repo.Build(repos, repo_dict)
+
+    # Need to restore original cwd in order for CreateHelper to find json file
+    os.chdir(save_cwd)
+    CreateHelper(args, repos, os.path.join(abs_top_dir, 'helper.cmake'))
+
+    sys.exit(0)
+
+
+if __name__ == '__main__':
+    main()
diff --git a/src/third_party/vulkan-validation-layers/src/scripts/vk_validation_stats.py b/src/third_party/vulkan-validation-layers/src/scripts/vk_validation_stats.py
new file mode 100755
index 0000000..7707dde
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/scripts/vk_validation_stats.py
@@ -0,0 +1,819 @@
+#!/usr/bin/env python3
+# Copyright (c) 2015-2019 The Khronos Group Inc.
+# Copyright (c) 2015-2019 Valve Corporation
+# Copyright (c) 2015-2019 LunarG, Inc.
+# Copyright (c) 2015-2019 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Author: Tobin Ehlis <tobine@google.com>
+# Author: Dave Houlton <daveh@lunarg.com>
+# Author: Shannon McPherson <shannon@lunarg.com>
+
+import argparse
+import common_codegen
+import csv
+import glob
+import html
+import json
+import operator
+import os
+import platform
+import re
+import sys
+import time
+from collections import defaultdict
+
+verbose_mode = False
+txt_db = False
+csv_db = False
+html_db = False
+txt_filename = "validation_error_database.txt"
+csv_filename = "validation_error_database.csv"
+html_filename = "validation_error_database.html"
+header_filename = "vk_validation_error_messages.h"
+vuid_prefixes = ['VUID-', 'UNASSIGNED-']
+
+# Hard-coded flags that could be command line args, if we decide that's useful
+# replace KHR vuids with non-KHR during consistency checking
+dealias_khr = True
+ignore_unassigned = True # These are not found in layer code unless they appear explicitly (most don't), so produce false positives
+
+layer_source_files = [common_codegen.repo_relative(path) for path in [
+    'layers/buffer_validation.cpp',
+    'layers/core_validation.cpp',
+    'layers/descriptor_sets.cpp',
+    'layers/drawdispatch.cpp',
+    'layers/parameter_validation_utils.cpp',
+    'layers/object_tracker_utils.cpp',
+    'layers/shader_validation.cpp',
+    'layers/stateless_validation.h',
+    'layers/generated/parameter_validation.cpp',
+    'layers/generated/object_tracker.cpp',
+]]
+
+test_source_files = glob.glob(os.path.join(common_codegen.repo_relative('tests'), '*.cpp'))
+
+# This needs to be updated as new extensions roll in
+khr_aliases = {
+    'VUID-vkBindBufferMemory2KHR-device-parameter'                                        : 'VUID-vkBindBufferMemory2-device-parameter',
+    'VUID-vkBindBufferMemory2KHR-pBindInfos-parameter'                                    : 'VUID-vkBindBufferMemory2-pBindInfos-parameter',
+    'VUID-vkBindImageMemory2KHR-device-parameter'                                         : 'VUID-vkBindImageMemory2-device-parameter',
+    'VUID-vkBindImageMemory2KHR-pBindInfos-parameter'                                     : 'VUID-vkBindImageMemory2-pBindInfos-parameter',
+    'VUID-vkCmdDispatchBaseKHR-commandBuffer-parameter'                                   : 'VUID-vkCmdDispatchBase-commandBuffer-parameter',
+    'VUID-vkCmdSetDeviceMaskKHR-commandBuffer-parameter'                                  : 'VUID-vkCmdSetDeviceMask-commandBuffer-parameter',
+    'VUID-vkCreateDescriptorUpdateTemplateKHR-device-parameter'                           : 'VUID-vkCreateDescriptorUpdateTemplate-device-parameter',
+    'VUID-vkCreateDescriptorUpdateTemplateKHR-pDescriptorUpdateTemplate-parameter'        : 'VUID-vkCreateDescriptorUpdateTemplate-pDescriptorUpdateTemplate-parameter',
+    'VUID-vkCreateSamplerYcbcrConversionKHR-device-parameter'                             : 'VUID-vkCreateSamplerYcbcrConversion-device-parameter',
+    'VUID-vkCreateSamplerYcbcrConversionKHR-pYcbcrConversion-parameter'                   : 'VUID-vkCreateSamplerYcbcrConversion-pYcbcrConversion-parameter',
+    'VUID-vkDestroyDescriptorUpdateTemplateKHR-descriptorUpdateTemplate-parameter'        : 'VUID-vkDestroyDescriptorUpdateTemplate-descriptorUpdateTemplate-parameter',
+    'VUID-vkDestroyDescriptorUpdateTemplateKHR-descriptorUpdateTemplate-parent'           : 'VUID-vkDestroyDescriptorUpdateTemplate-descriptorUpdateTemplate-parent',
+    'VUID-vkDestroyDescriptorUpdateTemplateKHR-device-parameter'                          : 'VUID-vkDestroyDescriptorUpdateTemplate-device-parameter',
+    'VUID-vkDestroySamplerYcbcrConversionKHR-device-parameter'                            : 'VUID-vkDestroySamplerYcbcrConversion-device-parameter',
+    'VUID-vkDestroySamplerYcbcrConversionKHR-ycbcrConversion-parameter'                   : 'VUID-vkDestroySamplerYcbcrConversion-ycbcrConversion-parameter',
+    'VUID-vkDestroySamplerYcbcrConversionKHR-ycbcrConversion-parent'                      : 'VUID-vkDestroySamplerYcbcrConversion-ycbcrConversion-parent',
+    'VUID-vkEnumeratePhysicalDeviceGroupsKHR-instance-parameter'                          : 'VUID-vkEnumeratePhysicalDeviceGroups-instance-parameter',
+    'VUID-vkEnumeratePhysicalDeviceGroupsKHR-pPhysicalDeviceGroupProperties-parameter'    : 'VUID-vkEnumeratePhysicalDeviceGroups-pPhysicalDeviceGroupProperties-parameter',
+    'VUID-vkGetBufferMemoryRequirements2KHR-device-parameter'                             : 'VUID-vkGetBufferMemoryRequirements2-device-parameter',
+    'VUID-vkGetDescriptorSetLayoutSupportKHR-device-parameter'                            : 'VUID-vkGetDescriptorSetLayoutSupport-device-parameter',
+    'VUID-vkGetDeviceGroupPeerMemoryFeaturesKHR-device-parameter'                         : 'VUID-vkGetDeviceGroupPeerMemoryFeatures-device-parameter',
+    'VUID-vkGetDeviceGroupPeerMemoryFeaturesKHR-pPeerMemoryFeatures-parameter'            : 'VUID-vkGetDeviceGroupPeerMemoryFeatures-pPeerMemoryFeatures-parameter',
+    'VUID-vkGetImageMemoryRequirements2KHR-device-parameter'                              : 'VUID-vkGetImageMemoryRequirements2-device-parameter',
+    'VUID-vkGetImageSparseMemoryRequirements2KHR-device-parameter'                        : 'VUID-vkGetImageSparseMemoryRequirements2-device-parameter',
+    'VUID-vkGetImageSparseMemoryRequirements2KHR-pSparseMemoryRequirements-parameter'     : 'VUID-vkGetImageSparseMemoryRequirements2-pSparseMemoryRequirements-parameter',
+    'VUID-vkGetPhysicalDeviceExternalBufferPropertiesKHR-physicalDevice-parameter'        : 'VUID-vkGetPhysicalDeviceExternalBufferProperties-physicalDevice-parameter',
+    'VUID-vkGetPhysicalDeviceExternalFencePropertiesKHR-physicalDevice-parameter'         : 'VUID-vkGetPhysicalDeviceExternalFenceProperties-physicalDevice-parameter',
+    'VUID-vkGetPhysicalDeviceExternalSemaphorePropertiesKHR-physicalDevice-parameter'     : 'VUID-vkGetPhysicalDeviceExternalSemaphoreProperties-physicalDevice-parameter',
+    'VUID-vkGetPhysicalDeviceFeatures2KHR-physicalDevice-parameter'                       : 'VUID-vkGetPhysicalDeviceFeatures2-physicalDevice-parameter',
+    'VUID-vkGetPhysicalDeviceFormatProperties2KHR-format-parameter'                       : 'VUID-vkGetPhysicalDeviceFormatProperties2-format-parameter',
+    'VUID-vkGetPhysicalDeviceFormatProperties2KHR-physicalDevice-parameter'               : 'VUID-vkGetPhysicalDeviceFormatProperties2-physicalDevice-parameter',
+    'VUID-vkGetPhysicalDeviceImageFormatProperties2KHR-physicalDevice-parameter'          : 'VUID-vkGetPhysicalDeviceImageFormatProperties2-physicalDevice-parameter',
+    'VUID-vkGetPhysicalDeviceMemoryProperties2KHR-physicalDevice-parameter'               : 'VUID-vkGetPhysicalDeviceMemoryProperties2-physicalDevice-parameter',
+    'VUID-vkGetPhysicalDeviceProperties2KHR-physicalDevice-parameter'                     : 'VUID-vkGetPhysicalDeviceProperties2-physicalDevice-parameter',
+    'VUID-vkGetPhysicalDeviceQueueFamilyProperties2KHR-pQueueFamilyProperties-parameter'  : 'VUID-vkGetPhysicalDeviceQueueFamilyProperties2-pQueueFamilyProperties-parameter',
+    'VUID-vkGetPhysicalDeviceSparseImageFormatProperties2KHR-pProperties-parameter'       : 'VUID-vkGetPhysicalDeviceSparseImageFormatProperties2-pProperties-parameter',
+    'VUID-vkGetPhysicalDeviceSparseImageFormatProperties2KHR-physicalDevice-parameter'    : 'VUID-vkGetPhysicalDeviceSparseImageFormatProperties2-physicalDevice-parameter',
+    'VUID-vkTrimCommandPoolKHR-commandPool-parameter'                                     : 'VUID-vkTrimCommandPool-commandPool-parameter',
+    'VUID-vkTrimCommandPoolKHR-commandPool-parent'                                        : 'VUID-vkTrimCommandPool-commandPool-parent',
+    'VUID-vkTrimCommandPoolKHR-device-parameter'                                          : 'VUID-vkTrimCommandPool-device-parameter',
+    'VUID-vkTrimCommandPoolKHR-flags-zerobitmask'                                         : 'VUID-vkTrimCommandPool-flags-zerobitmask',
+    'VUID-vkUpdateDescriptorSetWithTemplateKHR-descriptorSet-parameter'                   : 'VUID-vkUpdateDescriptorSetWithTemplate-descriptorSet-parameter',
+    'VUID-vkUpdateDescriptorSetWithTemplateKHR-descriptorUpdateTemplate-parameter'        : 'VUID-vkUpdateDescriptorSetWithTemplate-descriptorUpdateTemplate-parameter',
+    'VUID-vkUpdateDescriptorSetWithTemplateKHR-descriptorUpdateTemplate-parent'           : 'VUID-vkUpdateDescriptorSetWithTemplate-descriptorUpdateTemplate-parent',
+    'VUID-vkUpdateDescriptorSetWithTemplateKHR-device-parameter'                          : 'VUID-vkUpdateDescriptorSetWithTemplate-device-parameter',
+    'VUID-vkCreateDescriptorUpdateTemplateKHR-pCreateInfo-parameter'                                : 'VUID-vkCreateDescriptorUpdateTemplate-pCreateInfo-parameter',
+    'VUID-vkCreateSamplerYcbcrConversionKHR-pCreateInfo-parameter'                                  : 'VUID-vkCreateSamplerYcbcrConversion-pCreateInfo-parameter',
+    'VUID-vkGetBufferMemoryRequirements2KHR-pInfo-parameter'                                        : 'VUID-vkGetBufferMemoryRequirements2-pInfo-parameter',
+    'VUID-vkGetBufferMemoryRequirements2KHR-pMemoryRequirements-parameter'                          : 'VUID-vkGetBufferMemoryRequirements2-pMemoryRequirements-parameter',
+    'VUID-vkGetDescriptorSetLayoutSupportKHR-pCreateInfo-parameter'                                 : 'VUID-vkGetDescriptorSetLayoutSupport-pCreateInfo-parameter',
+    'VUID-vkGetDescriptorSetLayoutSupportKHR-pSupport-parameter'                                    : 'VUID-vkGetDescriptorSetLayoutSupport-pSupport-parameter',
+    'VUID-vkGetImageMemoryRequirements2KHR-pInfo-parameter'                                         : 'VUID-vkGetImageMemoryRequirements2-pInfo-parameter',
+    'VUID-vkGetImageMemoryRequirements2KHR-pMemoryRequirements-parameter'                           : 'VUID-vkGetImageMemoryRequirements2-pMemoryRequirements-parameter',
+    'VUID-vkGetImageSparseMemoryRequirements2KHR-pInfo-parameter'                                   : 'VUID-vkGetImageSparseMemoryRequirements2-pInfo-parameter',
+    'VUID-vkGetPhysicalDeviceExternalBufferPropertiesKHR-pExternalBufferInfo-parameter'             : 'VUID-vkGetPhysicalDeviceExternalBufferProperties-pExternalBufferInfo-parameter',
+    'VUID-vkGetPhysicalDeviceExternalBufferPropertiesKHR-pExternalBufferProperties-parameter'       : 'VUID-vkGetPhysicalDeviceExternalBufferProperties-pExternalBufferProperties-parameter',
+    'VUID-vkGetPhysicalDeviceExternalFencePropertiesKHR-pExternalFenceInfo-parameter'               : 'VUID-vkGetPhysicalDeviceExternalFenceProperties-pExternalFenceInfo-parameter',
+    'VUID-vkGetPhysicalDeviceExternalFencePropertiesKHR-pExternalFenceProperties-parameter'         : 'VUID-vkGetPhysicalDeviceExternalFenceProperties-pExternalFenceProperties-parameter',
+    'VUID-vkGetPhysicalDeviceExternalSemaphorePropertiesKHR-pExternalSemaphoreInfo-parameter'       : 'VUID-vkGetPhysicalDeviceExternalSemaphoreProperties-pExternalSemaphoreInfo-parameter',
+    'VUID-vkGetPhysicalDeviceExternalSemaphorePropertiesKHR-pExternalSemaphoreProperties-parameter' : 'VUID-vkGetPhysicalDeviceExternalSemaphoreProperties-pExternalSemaphoreProperties-parameter',
+    'VUID-vkGetPhysicalDeviceFeatures2KHR-pFeatures-parameter'                                      : 'VUID-vkGetPhysicalDeviceFeatures2-pFeatures-parameter',
+    'VUID-vkGetPhysicalDeviceFormatProperties2KHR-pFormatProperties-parameter'                      : 'VUID-vkGetPhysicalDeviceFormatProperties2-pFormatProperties-parameter',
+    'VUID-vkGetPhysicalDeviceImageFormatProperties2KHR-pImageFormatInfo-parameter'                  : 'VUID-vkGetPhysicalDeviceImageFormatProperties2-pImageFormatInfo-parameter',
+    'VUID-vkGetPhysicalDeviceImageFormatProperties2KHR-pImageFormatProperties-parameter'            : 'VUID-vkGetPhysicalDeviceImageFormatProperties2-pImageFormatProperties-parameter',
+    'VUID-vkGetPhysicalDeviceMemoryProperties2KHR-pMemoryProperties-parameter'                      : 'VUID-vkGetPhysicalDeviceMemoryProperties2-pMemoryProperties-parameter',
+    'VUID-vkGetPhysicalDeviceProperties2KHR-pProperties-parameter'                                  : 'VUID-vkGetPhysicalDeviceProperties2-pProperties-parameter',
+    'VUID-vkGetPhysicalDeviceSparseImageFormatProperties2KHR-pFormatInfo-parameter'                 : 'VUID-vkGetPhysicalDeviceSparseImageFormatProperties2-pFormatInfo-parameter' }
+
+def printHelp():
+    print ("Usage:")
+    print ("  python vk_validation_stats.py <json_file>")
+    print ("                                [ -c ]")
+    print ("                                [ -todo ]")
+    print ("                                [ -vuid <vuid_name> ]")
+    print ("                                [ -text [ <text_out_filename>] ]")
+    print ("                                [ -csv  [ <csv_out_filename>]  ]")
+    print ("                                [ -html [ <html_out_filename>] ]")
+    print ("                                [ -export_header ]")
+    print ("                                [ -summary ]")
+    print ("                                [ -verbose ]")
+    print ("                                [ -help ]")
+    print ("\n  The vk_validation_stats script parses validation layer source files to")
+    print ("  determine the set of valid usage checks and tests currently implemented,")
+    print ("  and generates coverage values by comparing against the full set of valid")
+    print ("  usage identifiers in the Vulkan-Headers registry file 'validusage.json'")
+    print ("\nArguments: ")
+    print (" <json-file>       (required) registry file 'validusage.json'")
+    print (" -c                report consistency warnings")
+    print (" -todo             report unimplemented VUIDs")
+    print (" -vuid <vuid_name> report status of individual VUID <vuid_name>")
+    print (" -text [filename]  output the error database text to <text_database_filename>,")
+    print ("                   defaults to 'validation_error_database.txt'")
+    print (" -csv [filename]   output the error database in csv to <csv_database_filename>,")
+    print ("                   defaults to 'validation_error_database.csv'")
+    print (" -html [filename]  output the error database in html to <html_database_filename>,")
+    print ("                   defaults to 'validation_error_database.html'")
+    print (" -export_header    export a new VUID error text header file to <%s>" % header_filename)
+    print (" -summary          output summary of VUID coverage")
+    print (" -verbose          show your work (to stdout)")
+
+class ValidationJSON:
+    def __init__(self, filename):
+        self.filename = filename
+        self.explicit_vuids = set()
+        self.implicit_vuids = set()
+        self.all_vuids = set()
+        self.vuid_db = defaultdict(list) # Maps VUID string to list of json-data dicts
+        self.apiversion = ""
+        self.duplicate_vuids = set()
+
+        # A set of specific regular expression substitutions needed to clean up VUID text
+        self.regex_dict = {}
+        self.regex_dict[re.compile('<.*?>|&(amp;)+lt;|&(amp;)+gt;')] = ""
+        self.regex_dict[re.compile(r'\\\(codeSize \\over 4\\\)')] = "(codeSize/4)"
+        self.regex_dict[re.compile(r'\\\(\\lceil{\\frac{height}{maxFragmentDensityTexelSize_{height}}}\\rceil\\\)')] = "the ceiling of height/maxFragmentDensityTexelSize.height"
+        self.regex_dict[re.compile(r'\\\(\\lceil{\\frac{width}{maxFragmentDensityTexelSize_{width}}}\\rceil\\\)')] = "the ceiling of width/maxFragmentDensityTexelSize.width"
+        self.regex_dict[re.compile(r'\\\(\\lceil{\\frac{maxFramebufferHeight}{minFragmentDensityTexelSize_{height}}}\\rceil\\\)')] = "the ceiling of maxFramebufferHeight/minFragmentDensityTexelSize.height"
+        self.regex_dict[re.compile(r'\\\(\\lceil{\\frac{maxFramebufferWidth}{minFragmentDensityTexelSize_{width}}}\\rceil\\\)')] = "the ceiling of maxFramebufferWidth/minFragmentDensityTexelSize.width"
+        self.regex_dict[re.compile(r'\\\(\\lceil\{\\mathit\{rasterizationSamples} \\over 32}\\rceil\\\)')] = "(rasterizationSamples/32)"
+        self.regex_dict[re.compile(r'\\\(\\textrm\{codeSize} \\over 4\\\)')] = "(codeSize/4)"
+        # Some fancy punctuation chars that break the Android build...
+        self.regex_dict[re.compile('&#8594;')] = "->"       # Arrow char
+        self.regex_dict[re.compile('&#8217;')] = "'"        # Left-slanting apostrophe to apostrophe
+        self.regex_dict[re.compile('&#822(0|1);')] = "'"    # L/R-slanting quotes to apostrophe
+
+    def read(self):
+        self.json_dict = {}
+        if os.path.isfile(self.filename):
+            json_file = open(self.filename, 'r', encoding='utf-8')
+            self.json_dict = json.load(json_file)
+            json_file.close()
+        if len(self.json_dict) == 0:
+            print("Error: Error loading validusage.json file <%s>" % self.filename)
+            sys.exit(-1)
+        try:
+            version = self.json_dict['version info']
+            validation = self.json_dict['validation']
+            self.apiversion = version['api version']
+        except:
+            print("Error: Failure parsing validusage.json object")
+            sys.exit(-1)
+
+        # Parse vuid from json into local databases
+        for apiname in validation.keys():
+            # print("entrypoint:%s"%apiname)
+            apidict = validation[apiname]
+            for ext in apidict.keys():
+                vlist = apidict[ext]
+                for ventry in vlist:
+                    vuid_string = ventry['vuid']
+                    if (vuid_string[-5:-1].isdecimal()):
+                        self.explicit_vuids.add(vuid_string)    # explicit end in 5 numeric chars
+                        vtype = 'explicit'
+                    else:
+                        self.implicit_vuids.add(vuid_string)    # otherwise, implicit
+                        vtype = 'implicit'
+                    vuid_text = ventry['text']
+                    for regex, replacement in self.regex_dict.items():
+                        vuid_text = re.sub(regex, replacement, vuid_text)   # do regex substitution
+                    vuid_text = html.unescape(vuid_text)                    # anything missed by the regex
+                    self.vuid_db[vuid_string].append({'api':apiname, 'ext':ext, 'type':vtype, 'text':vuid_text})
+        self.all_vuids = self.explicit_vuids | self.implicit_vuids
+        self.duplicate_vuids = set({v for v in self.vuid_db if len(self.vuid_db[v]) > 1})
+        if len(self.duplicate_vuids) > 0:
+            print("Warning: duplicate VUIDs found in validusage.json")
+
+
+class ValidationSource:
+    def __init__(self, source_file_list):
+        self.source_files = source_file_list
+        self.vuid_count_dict = {} # dict of vuid values to the count of how much they're used, and location of where they're used
+        self.duplicated_checks = 0
+        self.explicit_vuids = set()
+        self.implicit_vuids = set()
+        self.unassigned_vuids = set()
+        self.all_vuids = set()
+
+    def parse(self):
+        prepend = None
+        for sf in self.source_files:
+            line_num = 0
+            with open(sf) as f:
+                for line in f:
+                    line_num = line_num + 1
+                    if True in [line.strip().startswith(comment) for comment in ['//', '/*']]:
+                        continue
+                    # Find vuid strings
+                    if prepend is not None:
+                        line = prepend[:-2] + line.lstrip().lstrip('"') # join lines skipping CR, whitespace and trailing/leading quote char
+                        prepend = None
+                    if any(prefix in line for prefix in vuid_prefixes):
+                        # Replace the '(' of lines containing validation helper functions with ' ' to make them easier to parse
+                        line = line.replace("(", " ")
+                        line_list = line.split()
+
+                        # A VUID string that has been broken by clang will start with a vuid prefix and end with -, and will be last in the list
+                        broken_vuid = line_list[-1].strip('"')
+                        if any(broken_vuid.startswith(prefix) for prefix in vuid_prefixes) and broken_vuid.endswith('-'):
+                            prepend = line
+                            continue
+
+                        vuid_list = []
+                        for str in line_list:
+                            if any(prefix in str for prefix in vuid_prefixes):
+                                vuid_list.append(str.strip(',);{}"'))
+                        for vuid in vuid_list:
+                            if vuid not in self.vuid_count_dict:
+                                self.vuid_count_dict[vuid] = {}
+                                self.vuid_count_dict[vuid]['count'] = 1
+                                self.vuid_count_dict[vuid]['file_line'] = []
+                            else:
+                                if self.vuid_count_dict[vuid]['count'] == 1:    # only count first time duplicated
+                                    self.duplicated_checks = self.duplicated_checks + 1
+                                self.vuid_count_dict[vuid]['count'] = self.vuid_count_dict[vuid]['count'] + 1
+                            self.vuid_count_dict[vuid]['file_line'].append('%s,%d' % (sf, line_num))
+        # Sort vuids by type
+        for vuid in self.vuid_count_dict.keys():
+            if (vuid.startswith('VUID-')):
+                if (vuid[-5:-1].isdecimal()):
+                    self.explicit_vuids.add(vuid)    # explicit end in 5 numeric chars
+                else:
+                    self.implicit_vuids.add(vuid)
+            elif (vuid.startswith('UNASSIGNED-')):
+                self.unassigned_vuids.add(vuid)
+            else:
+                print("Unable to categorize VUID: %s" % vuid)
+                print("Confused while parsing VUIDs in layer source code - cannot proceed. (FIXME)")
+                exit(-1)
+        self.all_vuids = self.explicit_vuids | self.implicit_vuids | self.unassigned_vuids
+
+# Class to parse the validation layer test source and store testnames
+class ValidationTests:
+    def __init__(self, test_file_list, test_group_name=['VkLayerTest', 'VkPositiveLayerTest', 'VkWsiEnabledLayerTest']):
+        self.test_files = test_file_list
+        self.test_trigger_txt_list = []
+        for tg in test_group_name:
+            self.test_trigger_txt_list.append('TEST_F(%s' % tg)
+        self.explicit_vuids = set()
+        self.implicit_vuids = set()
+        self.unassigned_vuids = set()
+        self.all_vuids = set()
+        #self.test_to_vuids = {} # Map test name to VUIDs tested
+        self.vuid_to_tests = defaultdict(set) # Map VUIDs to set of test names where implemented
+
+    # Parse test files into internal data struct
+    def parse(self):
+        # For each test file, parse test names into set
+        grab_next_line = False # handle testname on separate line than wildcard
+        testname = ''
+        prepend = None
+        for test_file in self.test_files:
+            with open(test_file) as tf:
+                for line in tf:
+                    if True in [line.strip().startswith(comment) for comment in ['//', '/*']]:
+                        continue
+
+                    # if line ends in a broken VUID string, fix that before proceeding
+                    if prepend is not None:
+                        line = prepend[:-2] + line.lstrip().lstrip('"') # join lines skipping CR, whitespace and trailing/leading quote char
+                        prepend = None
+                    if any(prefix in line for prefix in vuid_prefixes):
+                        line_list = line.split()
+
+                        # A VUID string that has been broken by clang will start with a vuid prefix and end with -, and will be last in the list
+                        broken_vuid = line_list[-1].strip('"')
+                        if any(broken_vuid.startswith(prefix) for prefix in vuid_prefixes) and broken_vuid.endswith('-'):
+                            prepend = line
+                            continue
+
+                    if any(ttt in line for ttt in self.test_trigger_txt_list):
+                        testname = line.split(',')[-1]
+                        testname = testname.strip().strip(' {)')
+                        if ('' == testname):
+                            grab_next_line = True
+                            continue
+                        #self.test_to_vuids[testname] = []
+                    if grab_next_line: # test name on its own line
+                        grab_next_line = False
+                        testname = testname.strip().strip(' {)')
+                        #self.test_to_vuids[testname] = []
+                    if any(prefix in line for prefix in vuid_prefixes):
+                        line_list = re.split('[\s{}[\]()"]+',line)
+                        for sub_str in line_list:
+                            if any(prefix in sub_str for prefix in vuid_prefixes):
+                                vuid_str = sub_str.strip(',);:"')
+                                self.vuid_to_tests[vuid_str].add(testname)
+                                #self.test_to_vuids[testname].append(vuid_str)
+                                if (vuid_str.startswith('VUID-')):
+                                    if (vuid_str[-5:-1].isdecimal()):
+                                        self.explicit_vuids.add(vuid_str)    # explicit end in 5 numeric chars
+                                    else:
+                                        self.implicit_vuids.add(vuid_str)
+                                elif (vuid_str.startswith('UNASSIGNED-')):
+                                    self.unassigned_vuids.add(vuid_str)
+                                else:
+                                    print("Unable to categorize VUID: %s" % vuid_str)
+                                    print("Confused while parsing VUIDs in test code - cannot proceed. (FIXME)")
+                                    exit(-1)
+        self.all_vuids = self.explicit_vuids | self.implicit_vuids | self.unassigned_vuids
+
+# Class to do consistency checking
+#
+class Consistency:
+    def __init__(self, all_json, all_checks, all_tests):
+        self.valid = all_json
+        self.checks = all_checks
+        self.tests = all_tests
+
+        if (dealias_khr):
+            dk = set()
+            for vuid in self.checks:
+                if vuid in khr_aliases:
+                    dk.add(khr_aliases[vuid])
+                else:
+                    dk.add(vuid)
+            self.checks = dk
+
+            dk = set()
+            for vuid in self.tests:
+                if vuid in khr_aliases:
+                    dk.add(khr_aliases[vuid])
+                else:
+                    dk.add(vuid)
+            self.tests = dk
+
+    # Report undefined VUIDs in source code
+    def undef_vuids_in_layer_code(self):
+        undef_set = self.checks - self.valid
+        undef_set.discard('VUID-Undefined') # don't report Undefined
+        if ignore_unassigned:
+            unassigned = set({uv for uv in undef_set if uv.startswith('UNASSIGNED-')})
+            undef_set = undef_set - unassigned
+        if (len(undef_set) > 0):
+            print("\nFollowing VUIDs found in layer code are not defined in validusage.json (%d):" % len(undef_set))
+            undef = list(undef_set)
+            undef.sort()
+            for vuid in undef:
+                print("    %s" % vuid)
+            return False
+        return True
+
+    # Report undefined VUIDs in tests
+    def undef_vuids_in_tests(self):
+        undef_set = self.tests - self.valid
+        undef_set.discard('VUID-Undefined') # don't report Undefined
+        if ignore_unassigned:
+            unassigned = set({uv for uv in undef_set if uv.startswith('UNASSIGNED-')})
+            undef_set = undef_set - unassigned
+        if (len(undef_set) > 0):
+            ok = False
+            print("\nFollowing VUIDs found in layer tests are not defined in validusage.json (%d):" % len(undef_set))
+            undef = list(undef_set)
+            undef.sort()
+            for vuid in undef:
+                print("    %s" % vuid)
+            return False
+        return True
+
+    # Report vuids in tests that are not in source
+    def vuids_tested_not_checked(self):
+        undef_set = self.tests - self.checks
+        undef_set.discard('VUID-Undefined') # don't report Undefined
+        if ignore_unassigned:
+            unassigned = set()
+            for vuid in undef_set:
+                if vuid.startswith('UNASSIGNED-'):
+                    unassigned.add(vuid)
+            undef_set = undef_set - unassigned
+        if (len(undef_set) > 0):
+            ok = False
+            print("\nFollowing VUIDs found in tests but are not checked in layer code (%d):" % len(undef_set))
+            undef = list(undef_set)
+            undef.sort()
+            for vuid in undef:
+                print("    %s" % vuid)
+            return False
+        return True
+
+    # TODO: Explicit checked VUIDs which have no test
+    # def explicit_vuids_checked_not_tested(self):
+
+
+# Class to output database in various flavors
+#
+class OutputDatabase:
+    def __init__(self, val_json, val_source, val_tests):
+        self.vj = val_json
+        self.vs = val_source
+        self.vt = val_tests
+        self.header_version = "/* THIS FILE IS GENERATED - DO NOT EDIT (scripts/vk_validation_stats.py) */"
+        self.header_version += "\n/* Vulkan specification version: %s */" % val_json.apiversion
+        self.header_preamble = """
+/*
+ * Vulkan
+ *
+ * Copyright (c) 2016-2019 Google Inc.
+ * Copyright (c) 2016-2019 LunarG, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Tobin Ehlis <tobine@google.com>
+ * Author: Dave Houlton <daveh@lunarg.com>
+ */
+
+#pragma once
+
+// Disable auto-formatting for generated file
+// clang-format off
+
+// Mapping from VUID string to the corresponding spec text
+typedef struct _vuid_spec_text_pair {
+    const char * vuid;
+    const char * spec_text;
+} vuid_spec_text_pair;
+
+static const vuid_spec_text_pair vuid_spec_text[] = {
+"""
+        self.header_postamble = """};
+"""
+        self.spec_url = "https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html"
+
+    def dump_txt(self, only_unimplemented = False):
+        print("\n Dumping database to text file: %s" % txt_filename)
+        with open (txt_filename, 'w') as txt:
+            txt.write("## VUID Database\n")
+            txt.write("## Format: VUID_NAME | CHECKED | TEST | TYPE | API/STRUCT | EXTENSION | VUID_TEXT\n##\n")
+            vuid_list = list(self.vj.all_vuids)
+            vuid_list.sort()
+            for vuid in vuid_list:
+                db_list = self.vj.vuid_db[vuid]
+                db_list.sort(key=operator.itemgetter('ext')) # sort list to ease diffs of output file
+                for db_entry in db_list:
+                    checked = 'N'
+                    if vuid in self.vs.all_vuids:
+                        if only_unimplemented:
+                            continue
+                        else:
+                            checked = 'Y'
+                    test = 'None'
+                    if vuid in self.vt.vuid_to_tests:
+                        test_list = list(self.vt.vuid_to_tests[vuid])
+                        test_list.sort()   # sort tests, for diff-ability
+                        sep = ', '
+                        test = sep.join(test_list)
+
+                    txt.write("%s | %s | %s | %s | %s | %s | %s\n" % (vuid, checked, test, db_entry['type'], db_entry['api'], db_entry['ext'], db_entry['text']))
+
+    def dump_csv(self, only_unimplemented = False):
+        print("\n Dumping database to csv file: %s" % csv_filename)
+        with open (csv_filename, 'w', newline='') as csvfile:
+            cw = csv.writer(csvfile)
+            cw.writerow(['VUID_NAME','CHECKED','TEST','TYPE','API/STRUCT','EXTENSION','VUID_TEXT'])
+            vuid_list = list(self.vj.all_vuids)
+            vuid_list.sort()
+            for vuid in vuid_list:
+                for db_entry in self.vj.vuid_db[vuid]:
+                    row = [vuid]
+                    if vuid in self.vs.all_vuids:
+                        if only_unimplemented:
+                            continue
+                        else:
+                            row.append('Y')
+                    else:
+                        row.append('N')
+                    test = 'None'
+                    if vuid in self.vt.vuid_to_tests:
+                        sep = ', '
+                        test = sep.join(self.vt.vuid_to_tests[vuid])
+                    row.append(test)
+                    row.append(db_entry['type'])
+                    row.append(db_entry['api'])
+                    row.append(db_entry['ext'])
+                    row.append(db_entry['text'])
+                    cw.writerow(row)
+
+    def dump_html(self, only_unimplemented = False):
+        print("\n Dumping database to html file: %s" % html_filename)
+        preamble = '<!DOCTYPE html>\n<html>\n<head>\n<style>\ntable, th, td {\n border: 1px solid black;\n border-collapse: collapse; \n}\n</style>\n<body>\n<h2>Valid Usage Database</h2>\n<font size="2" face="Arial">\n<table style="width:100%">\n'
+        headers = '<tr><th>VUID NAME</th><th>CHECKED</th><th>TEST</th><th>TYPE</th><th>API/STRUCT</th><th>EXTENSION</th><th>VUID TEXT</th></tr>\n'
+        with open (html_filename, 'w') as hfile:
+            hfile.write(preamble)
+            hfile.write(headers)
+            vuid_list = list(self.vj.all_vuids)
+            vuid_list.sort()
+            for vuid in vuid_list:
+                for db_entry in self.vj.vuid_db[vuid]:
+                    checked = '<span style="color:red;">N</span>'
+                    if vuid in self.vs.all_vuids:
+                        if only_unimplemented:
+                            continue
+                        else:
+                            checked = '<span style="color:limegreen;">Y</span>'
+                    hfile.write('<tr><th>%s</th>' % vuid)
+                    hfile.write('<th>%s</th>' % checked)
+                    test = 'None'
+                    if vuid in self.vt.vuid_to_tests:
+                        sep = ', '
+                        test = sep.join(self.vt.vuid_to_tests[vuid])
+                    hfile.write('<th>%s</th>' % test)
+                    hfile.write('<th>%s</th>' % db_entry['type'])
+                    hfile.write('<th>%s</th>' % db_entry['api'])
+                    hfile.write('<th>%s</th>' % db_entry['ext'])
+                    hfile.write('<th>%s</th></tr>\n' % db_entry['text'])
+            hfile.write('</table>\n</body>\n</html>\n')
+
+    def export_header(self):
+        if verbose_mode:
+            print("\n Exporting header file to: %s" % header_filename)
+        with open (header_filename, 'w') as hfile:
+            hfile.write(self.header_version)
+            hfile.write(self.header_preamble)
+            vuid_list = list(self.vj.all_vuids)
+            vuid_list.sort()
+            cmd_dict = {}
+            for vuid in vuid_list:
+                db_entry = self.vj.vuid_db[vuid][0]
+                db_text = db_entry['text'].strip(' ')
+                hfile.write('    {"%s", "%s (%s#%s)"},\n' % (vuid, db_text, self.spec_url, vuid))
+                # For multiply-defined VUIDs, include versions with extension appended
+                if len(self.vj.vuid_db[vuid]) > 1:
+                    for db_entry in self.vj.vuid_db[vuid]:
+                        hfile.write('    {"%s[%s]", "%s (%s#%s)"},\n' % (vuid, db_entry['ext'].strip(' '), db_text, self.spec_url, vuid))
+                if 'commandBuffer must be in the recording state' in db_text:
+                    cmd_dict[vuid] = db_text 
+            hfile.write(self.header_postamble)
+
+            # Generate the information for validating recording state VUID's 
+            cmd_prefix = 'prefix##'
+            cmd_regex = re.compile(r'VUID-vk(Cmd|End)(\w+)')
+            cmd_vuid_vector = ['    "VUID_Undefined"']
+            cmd_name_vector = [ '    "Command_Undefined"' ]
+            cmd_enum = ['    ' + cmd_prefix + 'NONE = 0']
+
+            cmd_ordinal = 1
+            for vuid, db_text in sorted(cmd_dict.items()):
+                cmd_match = cmd_regex.match(vuid)
+                if cmd_match.group(1) == "End":
+                    end = "END"
+                else:
+                    end = ""
+                cmd_name_vector.append('    "vk'+ cmd_match.group(1) + cmd_match.group(2) + '"')
+                cmd_name = cmd_prefix + end + cmd_match.group(2).upper()
+                cmd_enum.append('    {} = {}'.format(cmd_name, cmd_ordinal))
+                cmd_ordinal += 1
+                cmd_vuid_vector.append('    "{}"'.format(vuid))
+
+            hfile.write('\n// Defines to allow creating "must be recording" meta data\n')
+            cmd_enum.append('    {}RANGE_SIZE = {}'.format(cmd_prefix, cmd_ordinal))
+            cmd_enum_string = '#define VUID_CMD_ENUM_LIST(prefix)\\\n' + ',\\\n'.join(cmd_enum) + '\n\n'
+            hfile.write(cmd_enum_string)
+            cmd_name_list_string = '#define VUID_CMD_NAME_LIST\\\n' + ',\\\n'.join(cmd_name_vector) + '\n\n'
+            hfile.write(cmd_name_list_string)
+            vuid_vector_string = '#define VUID_MUST_BE_RECORDING_LIST\\\n' + ',\\\n'.join(cmd_vuid_vector) + '\n'
+            hfile.write(vuid_vector_string)
+
+def main(argv):
+    global verbose_mode
+    global txt_filename
+    global csv_filename
+    global html_filename
+
+    run_consistency = False
+    report_unimplemented = False
+    get_vuid_status = ''
+    txt_out = False
+    csv_out = False
+    html_out = False
+    header_out = False
+    show_summary = False
+
+    if (1 > len(argv)):
+        printHelp()
+        sys.exit()
+
+    # Parse script args
+    json_filename = argv[0]
+    i = 1
+    while (i < len(argv)):
+        arg = argv[i]
+        i = i + 1
+        if (arg == '-c'):
+            run_consistency = True
+        elif (arg == '-vuid'):
+            get_vuid_status = argv[i]
+            i = i + 1
+        elif (arg == '-todo'):
+            report_unimplemented = True
+        elif (arg == '-text'):
+            txt_out = True
+            # Set filename if supplied, else use default
+            if i < len(argv) and not argv[i].startswith('-'):
+                txt_filename = argv[i]
+                i = i + 1
+        elif (arg == '-csv'):
+            csv_out = True
+            # Set filename if supplied, else use default
+            if i < len(argv) and not argv[i].startswith('-'):
+                csv_filename = argv[i]
+                i = i + 1
+        elif (arg == '-html'):
+            html_out = True
+            # Set filename if supplied, else use default
+            if i < len(argv) and not argv[i].startswith('-'):
+                html_filename = argv[i]
+                i = i + 1
+        elif (arg == '-export_header'):
+            header_out = True
+        elif (arg in ['-verbose']):
+            verbose_mode = True
+        elif (arg in ['-summary']):
+            show_summary = True
+        elif (arg in ['-help', '-h']):
+            printHelp()
+            sys.exit()
+        else:
+            print("Unrecognized argument: %s\n" % arg)
+            printHelp()
+            sys.exit()
+
+    result = 0 # Non-zero result indicates an error case
+
+    # Parse validusage json
+    val_json = ValidationJSON(json_filename)
+    val_json.read()
+    exp_json = len(val_json.explicit_vuids)
+    imp_json = len(val_json.implicit_vuids)
+    all_json = len(val_json.all_vuids)
+    if verbose_mode:
+        print("Found %d unique error vuids in validusage.json file." % all_json)
+        print("  %d explicit" % exp_json)
+        print("  %d implicit" % imp_json)
+        if len(val_json.duplicate_vuids) > 0:
+            print("%d VUIDs appear in validusage.json more than once." % len(val_json.duplicate_vuids))
+            for vuid in val_json.duplicate_vuids:
+                print("  %s" % vuid)
+                for ext in val_json.vuid_db[vuid]:
+                    print("    with extension: %s" % ext['ext'])
+
+    # Parse layer source files
+    val_source = ValidationSource(layer_source_files)
+    val_source.parse()
+    exp_checks = len(val_source.explicit_vuids)
+    imp_checks = len(val_source.implicit_vuids)
+    all_checks = len(val_source.vuid_count_dict.keys())
+    if verbose_mode:
+        print("Found %d unique vuid checks in layer source code." % all_checks)
+        print("  %d explicit" % exp_checks)
+        print("  %d implicit" % imp_checks)
+        print("  %d unassigned" % len(val_source.unassigned_vuids))
+        print("  %d checks are implemented more that once" % val_source.duplicated_checks)
+
+    # Parse test files
+    val_tests = ValidationTests(test_source_files)
+    val_tests.parse()
+    exp_tests = len(val_tests.explicit_vuids)
+    imp_tests = len(val_tests.implicit_vuids)
+    all_tests = len(val_tests.all_vuids)
+    if verbose_mode:
+        print("Found %d unique error vuids in test source code." % all_tests)
+        print("  %d explicit" % exp_tests)
+        print("  %d implicit" % imp_tests)
+        print("  %d unassigned" % len(val_tests.unassigned_vuids))
+
+    # Process stats
+    if show_summary:
+        print("\nValidation Statistics (using validusage.json version %s)" % val_json.apiversion)
+        print("  VUIDs defined in JSON file:  %04d explicit, %04d implicit, %04d total." % (exp_json, imp_json, all_json))
+        print("  VUIDs checked in layer code: %04d explicit, %04d implicit, %04d total." % (exp_checks, imp_checks, all_checks))
+        print("  VUIDs tested in layer tests: %04d explicit, %04d implicit, %04d total." % (exp_tests, imp_tests, all_tests))
+
+        print("\nVUID check coverage")
+        print("  Explicit VUIDs checked: %.1f%% (%d checked vs %d defined)" % ((100.0 * exp_checks / exp_json), exp_checks, exp_json))
+        print("  Implicit VUIDs checked: %.1f%% (%d checked vs %d defined)" % ((100.0 * imp_checks / imp_json), imp_checks, imp_json))
+        print("  Overall VUIDs checked:  %.1f%% (%d checked vs %d defined)" % ((100.0 * all_checks / all_json), all_checks, all_json))
+
+        print("\nVUID test coverage")
+        print("  Explicit VUIDs tested: %.1f%% (%d tested vs %d checks)" % ((100.0 * exp_tests / exp_checks), exp_tests, exp_checks))
+        print("  Implicit VUIDs tested: %.1f%% (%d tested vs %d checks)" % ((100.0 * imp_tests / imp_checks), imp_tests, imp_checks))
+        print("  Overall VUIDs tested:  %.1f%% (%d tested vs %d checks)" % ((100.0 * all_tests / all_checks), all_tests, all_checks))
+
+    # Report status of a single VUID
+    if len(get_vuid_status) > 1:
+        print("\n\nChecking status of <%s>" % get_vuid_status);
+        if get_vuid_status not in val_json.all_vuids:
+            print('  Not a valid VUID string.')
+        else:
+            if get_vuid_status in val_source.explicit_vuids:
+                print('  Implemented!')
+                line_list = val_source.vuid_count_dict[get_vuid_status]['file_line']
+                for line in line_list:
+                    print('    => %s' % line)
+            elif get_vuid_status in val_source.implicit_vuids:
+                print('  Implemented! (Implicit)')
+                line_list = val_source.vuid_count_dict[get_vuid_status]['file_line']
+                for line in line_list:
+                    print('    => %s' % line)
+            else:
+                print('  Not implemented.')
+            if get_vuid_status in val_tests.all_vuids:
+                print('  Has a test!')
+                test_list = val_tests.vuid_to_tests[get_vuid_status]
+                for test in test_list:
+                    print('    => %s' % test)
+            else:
+                print('  Not tested.')
+
+    # Report unimplemented explicit VUIDs
+    if report_unimplemented:
+        unim_explicit = val_json.explicit_vuids - val_source.explicit_vuids
+        print("\n\n%d explicit VUID checks remain unimplemented:" % len(unim_explicit))
+        ulist = list(unim_explicit)
+        ulist.sort()
+        for vuid in ulist:
+            print("  => %s" % vuid)
+
+    # Consistency tests
+    if run_consistency:
+        print("\n\nRunning consistency tests...")
+        con = Consistency(val_json.all_vuids, val_source.all_vuids, val_tests.all_vuids)
+        ok = con.undef_vuids_in_layer_code()
+        ok &= con.undef_vuids_in_tests()
+        ok &= con.vuids_tested_not_checked()
+
+        if ok:
+            print("  OK! No inconsistencies found.")
+
+    # Output database in requested format(s)
+    db_out = OutputDatabase(val_json, val_source, val_tests)
+    if txt_out:
+        db_out.dump_txt(report_unimplemented)
+    if csv_out:
+        db_out.dump_csv(report_unimplemented)
+    if html_out:
+        db_out.dump_html(report_unimplemented)
+    if header_out:
+        db_out.export_header()
+    return result
+
+if __name__ == "__main__":
+    sys.exit(main(sys.argv[1:]))
+
diff --git a/src/third_party/vulkan-validation-layers/src/tests/CMakeLists.txt b/src/third_party/vulkan-validation-layers/src/tests/CMakeLists.txt
new file mode 100644
index 0000000..a5c0174
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/tests/CMakeLists.txt
@@ -0,0 +1,155 @@
+# ~~~
+# Copyright (c) 2014-2019 Valve Corporation
+# Copyright (c) 2014-2019 LunarG, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ~~~
+
+# Needed to make structure definitions match with glslang libraries
+add_definitions(-DNV_EXTENSIONS -DAMD_EXTENSIONS)
+
+if(WIN32)
+    add_definitions(-DVK_USE_PLATFORM_WIN32_KHR -DWIN32_LEAN_AND_MEAN)
+    # Workaround for TR1 deprecation in Visual Studio 15.5 until Google Test is updated
+    add_definitions(-D_SILENCE_TR1_NAMESPACE_DEPRECATION_WARNING)
+    # Allow Windows to use multiprocessor compilation
+    add_compile_options(/MP)
+elseif(ANDROID)
+    add_definitions(-DVK_USE_PLATFORM_ANDROID_KHR)
+elseif(APPLE)
+    add_definitions(-DVK_USE_PLATFORM_MACOS_MVK)
+elseif(UNIX AND NOT APPLE) # i.e. Linux
+    if(BUILD_WSI_XCB_SUPPORT)
+        add_definitions(-DVK_USE_PLATFORM_XCB_KHR)
+    endif()
+
+    if(BUILD_WSI_XLIB_SUPPORT)
+        add_definitions(-DVK_USE_PLATFORM_XLIB_KHR)
+    endif()
+
+    if(BUILD_WSI_WAYLAND_SUPPORT)
+        add_definitions(-DVK_USE_PLATFORM_WAYLAND_KHR)
+    endif()
+else()
+    message(FATAL_ERROR "Unsupported Platform!")
+endif()
+
+if(WIN32)
+    file(COPY vk_layer_validation_tests.vcxproj.user DESTINATION ${CMAKE_BINARY_DIR}/tests)
+endif()
+
+if(WIN32)
+    set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -D_CRT_SECURE_NO_WARNINGS -D_USE_MATH_DEFINES")
+
+    # If MSVC, disable some signed/unsigned mismatch warnings.
+    if(MSVC)
+        set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /wd4267")
+    endif()
+
+else()
+    set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11")
+endif()
+
+set(LIBGLM_INCLUDE_DIR ${PROJECT_SOURCE_DIR}/libs)
+
+set(COMMON_CPP
+    vklayertests_pipeline_shader.cpp
+    vklayertests_buffer_image_memory_sampler.cpp
+    vklayertests_others.cpp
+    vklayertests_best_practices.cpp
+    vklayertests_descriptor_renderpass_framebuffer.cpp
+    vklayertests_command.cpp
+    vklayertests_imageless_framebuffer.cpp
+    vkpositivelayertests.cpp
+    vkrenderframework.cpp
+    vktestbinding.cpp
+    vktestframework.cpp
+    test_environment.cpp)
+
+if(NOT TARGET vulkan)
+    set(
+        CMAKE_PREFIX_PATH
+        ${CMAKE_PREFIX_PATH};${VULKAN_HEADERS_INSTALL_DIR};$ENV{VULKAN_HEADERS_INSTALL_DIR}
+        )
+endif()
+
+set_source_files_properties(${PROJECT_BINARY_DIR}/vk_safe_struct.cpp PROPERTIES GENERATED TRUE)
+add_executable(vk_layer_validation_tests
+               layer_validation_tests.cpp
+               ../layers/vk_format_utils.cpp
+               ../layers/convert_to_renderpass2.cpp
+               ../layers/generated/vk_safe_struct.cpp
+               ../layers/generated/lvt_function_pointers.cpp
+               ${COMMON_CPP})
+add_test(NAME vk_layer_validation_tests COMMAND vk_layer_validation_tests)
+add_dependencies(vk_layer_validation_tests VkLayer_khronos_validation VkLayer_khronos_validation-json)
+if(NOT GTEST_IS_STATIC_LIB)
+    set_target_properties(vk_layer_validation_tests PROPERTIES COMPILE_DEFINITIONS "GTEST_LINKED_AS_SHARED_LIBRARY=1")
+endif()
+# Note that there is no need to add GTEST directories here due to googletest exporting them via the gtest target.
+target_include_directories(vk_layer_validation_tests
+                           PUBLIC ${CMAKE_CURRENT_SOURCE_DIR}
+                                  ${PROJECT_SOURCE_DIR}/layers
+                                  ${PROJECT_SOURCE_DIR}/layers/generated
+                                  ${GLSLANG_SPIRV_INCLUDE_DIR}
+                                  ${SPIRV_TOOLS_INCLUDE_DIR}
+                                  ${CMAKE_CURRENT_BINARY_DIR}
+                                  ${CMAKE_BINARY_DIR}
+                                  ${PROJECT_BINARY_DIR}
+                                  ${VulkanHeaders_INCLUDE_DIR}
+                                  ${PROJECT_BINARY_DIR}/layers)
+add_dependencies(vk_layer_validation_tests
+                 VkLayer_utils)
+
+# Specify target_link_libraries
+if(WIN32)
+    set_target_properties(gtest PROPERTIES FOLDER ${LAYERS_HELPER_FOLDER})
+    set_target_properties(gtest_main PROPERTIES FOLDER ${LAYERS_HELPER_FOLDER})
+    target_link_libraries(vk_layer_validation_tests
+                          PRIVATE gtest
+                                  gtest_main
+                                  ${GLSLANG_LIBRARIES})
+else()
+    target_compile_options(vk_layer_validation_tests PRIVATE "-Wno-sign-compare")
+    if(BUILD_WSI_XCB_SUPPORT OR BUILD_WSI_XLIB_SUPPORT)
+        target_link_libraries(vk_layer_validation_tests
+                              PRIVATE ${XCB_LIBRARIES}
+                                      ${X11_LIBRARIES}
+                                      gtest
+                                      gtest_main
+                                      dl
+                                      ${GLSLANG_LIBRARIES})
+    else()
+        target_link_libraries(vk_layer_validation_tests
+                              PRIVATE gtest
+                                      gtest_main
+                                      dl
+                                      ${GLSLANG_LIBRARIES})
+    endif()
+endif()
+
+if(WIN32)
+    # For Windows, copy necessary gtest DLLs to the right spot for the vk_layer_tests...
+    if(NOT GTEST_IS_STATIC_LIB)
+        file(TO_NATIVE_PATH ${PROJECT_BINARY_DIR}/external/googletest/googletest/$<CONFIG>/*.dll SRC_GTEST_DLLS)
+        file(TO_NATIVE_PATH ${CMAKE_CURRENT_BINARY_DIR}/$<CONFIG> DST_GTEST_DLLS)
+        add_custom_command(TARGET vk_layer_validation_tests POST_BUILD
+                           COMMAND xcopy /Y /I ${SRC_GTEST_DLLS} ${DST_GTEST_DLLS})
+    endif()
+endif()
+
+if(INSTALL_TESTS)
+    install(TARGETS vk_layer_validation_tests DESTINATION ${CMAKE_INSTALL_BINDIR})
+endif()
+
+add_subdirectory(layers)
diff --git a/src/third_party/vulkan-validation-layers/src/tests/device_profiles/adreno_540.json b/src/third_party/vulkan-validation-layers/src/tests/device_profiles/adreno_540.json
new file mode 100644
index 0000000..36f495c
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/tests/device_profiles/adreno_540.json
@@ -0,0 +1,1518 @@
+{
+    "$schema": "https://schema.khronos.org/vulkan/devsim_1_0_0.json#",
+    "comments": {
+        "info": "Vulkan Hardware Report generated via https://vulkan.gpuinfo.org",
+        "desc": "https://vulkan.gpuinfo.org/displayreport.php?id=2114"
+    },
+    "environment": {
+        "architecture": "arm",
+        "comment": "",
+        "name": "android",
+        "reportversion": "1.4",
+        "submitter": "",
+        "version": "8.0.0"
+    },
+    "extended": {
+        "devicefeatures2": [],
+        "deviceproperties2": [
+            {
+                "extension": "VK_KHR_push_descriptor",
+                "name": "maxPushDescriptors",
+                "value": "32"
+            }
+        ]
+    },
+    "platformdetails": {
+        "android.BuildID": "47.1.A.5.51",
+        "android.BuildVersionIncremental": "3742384072",
+        "android.ProductManufacturer": "Sony",
+        "android.ProductModel": "G8142"
+    },
+    "surfacecapabilites": {
+        "maxImageArrayLayers": 1,
+        "maxImageCount": 3,
+        "maxImageExtent": {
+            "height": 4096,
+            "width": 4096
+        },
+        "minImageCount": 2,
+        "minImageExtent": {
+            "height": 1,
+            "width": 1
+        },
+        "presentmodes": [
+            1,
+            2,
+            1000111000,
+            1000111001
+        ],
+        "supportedCompositeAlpha": 8,
+        "supportedTransforms": 271,
+        "supportedUsageFlags": 159,
+        "surfaceExtension": "VK_KHR_android_surface",
+        "surfaceformats": [
+            {
+                "colorSpace": 0,
+                "format": 37
+            },
+            {
+                "colorSpace": 0,
+                "format": 43
+            },
+            {
+                "colorSpace": 0,
+                "format": 4
+            }
+        ],
+        "validSurface": true
+    },
+    "VkPhysicalDeviceFeatures": {
+        "alphaToOne": 1,
+        "depthBiasClamp": 1,
+        "depthBounds": 0,
+        "depthClamp": 1,
+        "drawIndirectFirstInstance": 0,
+        "dualSrcBlend": 0,
+        "fillModeNonSolid": 1,
+        "fragmentStoresAndAtomics": 1,
+        "fullDrawIndexUint32": 1,
+        "geometryShader": 0,
+        "imageCubeArray": 1,
+        "independentBlend": 1,
+        "inheritedQueries": 1,
+        "largePoints": 1,
+        "logicOp": 0,
+        "multiDrawIndirect": 1,
+        "multiViewport": 0,
+        "occlusionQueryPrecise": 0,
+        "pipelineStatisticsQuery": 0,
+        "robustBufferAccess": 1,
+        "sampleRateShading": 1,
+        "samplerAnisotropy": 1,
+        "shaderClipDistance": 1,
+        "shaderCullDistance": 1,
+        "shaderFloat64": 0,
+        "shaderImageGatherExtended": 1,
+        "shaderInt16": 0,
+        "shaderInt64": 0,
+        "shaderResourceMinLod": 0,
+        "shaderResourceResidency": 0,
+        "shaderSampledImageArrayDynamicIndexing": 1,
+        "shaderStorageBufferArrayDynamicIndexing": 0,
+        "shaderStorageImageArrayDynamicIndexing": 0,
+        "shaderStorageImageExtendedFormats": 1,
+        "shaderStorageImageMultisample": 0,
+        "shaderStorageImageReadWithoutFormat": 0,
+        "shaderStorageImageWriteWithoutFormat": 1,
+        "shaderTessellationAndGeometryPointSize": 0,
+        "shaderUniformBufferArrayDynamicIndexing": 1,
+        "sparseBinding": 0,
+        "sparseResidency16Samples": 0,
+        "sparseResidency2Samples": 0,
+        "sparseResidency4Samples": 0,
+        "sparseResidency8Samples": 0,
+        "sparseResidencyAliased": 0,
+        "sparseResidencyBuffer": 0,
+        "sparseResidencyImage2D": 0,
+        "sparseResidencyImage3D": 0,
+        "tessellationShader": 0,
+        "textureCompressionASTC_LDR": 1,
+        "textureCompressionBC": 0,
+        "textureCompressionETC2": 1,
+        "variableMultisampleRate": 0,
+        "vertexPipelineStoresAndAtomics": 0,
+        "wideLines": 0
+    },
+    "VkPhysicalDeviceProperties": {
+        "apiVersion": 4194353,
+        "deviceID": 84148225,
+        "deviceName": "Adreno (TM) 540",
+        "deviceType": 1,
+        "driverVersion": 60111537,
+        "limits": {
+            "bufferImageGranularity": 1,
+            "discreteQueuePriorities": 3,
+            "framebufferColorSampleCounts": 7,
+            "framebufferDepthSampleCounts": 7,
+            "framebufferNoAttachmentsSampleCounts": 7,
+            "framebufferStencilSampleCounts": 7,
+            "lineWidthGranularity": 0,
+            "lineWidthRange": [
+                1,
+                1
+            ],
+            "maxBoundDescriptorSets": 4,
+            "maxClipDistances": 8,
+            "maxColorAttachments": 8,
+            "maxCombinedClipAndCullDistances": 8,
+            "maxComputeSharedMemorySize": 32768,
+            "maxComputeWorkGroupCount": [
+                65535,
+                65535,
+                65535
+            ],
+            "maxComputeWorkGroupInvocations": 1024,
+            "maxComputeWorkGroupSize": [
+                1024,
+                1024,
+                64
+            ],
+            "maxCullDistances": 8,
+            "maxDescriptorSetInputAttachments": 8,
+            "maxDescriptorSetSampledImages": 768,
+            "maxDescriptorSetSamplers": 96,
+            "maxDescriptorSetStorageBuffers": 24,
+            "maxDescriptorSetStorageBuffersDynamic": 4,
+            "maxDescriptorSetStorageImages": 24,
+            "maxDescriptorSetUniformBuffers": 84,
+            "maxDescriptorSetUniformBuffersDynamic": 8,
+            "maxDrawIndexedIndexValue": 4294967295,
+            "maxDrawIndirectCount": 4294967295,
+            "maxFragmentCombinedOutputResources": 72,
+            "maxFragmentDualSrcAttachments": 0,
+            "maxFragmentInputComponents": 128,
+            "maxFragmentOutputAttachments": 8,
+            "maxFramebufferHeight": 16384,
+            "maxFramebufferLayers": 2048,
+            "maxFramebufferWidth": 16384,
+            "maxGeometryInputComponents": 0,
+            "maxGeometryOutputComponents": 0,
+            "maxGeometryOutputVertices": 0,
+            "maxGeometryShaderInvocations": 0,
+            "maxGeometryTotalOutputComponents": 0,
+            "maxImageArrayLayers": 2048,
+            "maxImageDimension1D": 16384,
+            "maxImageDimension2D": 16384,
+            "maxImageDimension3D": 2048,
+            "maxImageDimensionCube": 16384,
+            "maxInterpolationOffset": 0.4375,
+            "maxMemoryAllocationCount": 4096,
+            "maxPerStageDescriptorInputAttachments": 8,
+            "maxPerStageDescriptorSampledImages": 128,
+            "maxPerStageDescriptorSamplers": 16,
+            "maxPerStageDescriptorStorageBuffers": 24,
+            "maxPerStageDescriptorStorageImages": 4,
+            "maxPerStageDescriptorUniformBuffers": 14,
+            "maxPerStageResources": 158,
+            "maxPushConstantsSize": 128,
+            "maxSampleMaskWords": 1,
+            "maxSamplerAllocationCount": 4000,
+            "maxSamplerAnisotropy": 16,
+            "maxSamplerLodBias": 15.99609375,
+            "maxStorageBufferRange": 2147483647,
+            "maxTessellationControlPerPatchOutputComponents": 0,
+            "maxTessellationControlPerVertexInputComponents": 0,
+            "maxTessellationControlPerVertexOutputComponents": 0,
+            "maxTessellationControlTotalOutputComponents": 0,
+            "maxTessellationEvaluationInputComponents": 0,
+            "maxTessellationEvaluationOutputComponents": 0,
+            "maxTessellationGenerationLevel": 0,
+            "maxTessellationPatchSize": 0,
+            "maxTexelBufferElements": 65536,
+            "maxTexelGatherOffset": 31,
+            "maxTexelOffset": 7,
+            "maxUniformBufferRange": 65536,
+            "maxVertexInputAttributeOffset": 4096,
+            "maxVertexInputAttributes": 32,
+            "maxVertexInputBindingStride": 2048,
+            "maxVertexInputBindings": 32,
+            "maxVertexOutputComponents": 128,
+            "maxViewportDimensions": [
+                16384,
+                16384
+            ],
+            "maxViewports": 1,
+            "minInterpolationOffset": -0.5,
+            "minMemoryMapAlignment": 64,
+            "minStorageBufferOffsetAlignment": 64,
+            "minTexelBufferOffsetAlignment": 64,
+            "minTexelGatherOffset": -32,
+            "minTexelOffset": -8,
+            "minUniformBufferOffsetAlignment": 64,
+            "mipmapPrecisionBits": 8,
+            "nonCoherentAtomSize": 1,
+            "optimalBufferCopyOffsetAlignment": 64,
+            "optimalBufferCopyRowPitchAlignment": 64,
+            "pointSizeGranularity": 0.0625,
+            "pointSizeRange": [
+                1,
+                1023
+            ],
+            "sampledImageColorSampleCounts": 7,
+            "sampledImageDepthSampleCounts": 7,
+            "sampledImageIntegerSampleCounts": 7,
+            "sampledImageStencilSampleCounts": 7,
+            "sparseAddressSpaceSize": 0,
+            "standardSampleLocations": 1,
+            "storageImageSampleCounts": 1,
+            "strictLines": 1,
+            "subPixelInterpolationOffsetBits": 4,
+            "subPixelPrecisionBits": 4,
+            "subTexelPrecisionBits": 8,
+            "timestampComputeAndGraphics": 1,
+            "timestampPeriod": 52.0833320618,
+            "viewportBoundsRange": [
+                -32768,
+                32767
+            ],
+            "viewportSubPixelBits": 0
+        },
+        "pipelineCacheUUID": [
+            177,
+            58,
+            149,
+            3,
+            67,
+            81,
+            0,
+            0,
+            0,
+            0,
+            1,
+            0,
+            4,
+            5,
+            0,
+            0
+        ],
+        "sparseProperties": {
+            "residencyAlignedMipSize": 0,
+            "residencyNonResidentStrict": 0,
+            "residencyStandard2DBlockShape": 0,
+            "residencyStandard2DMultisampleBlockShape": 0,
+            "residencyStandard3DBlockShape": 0
+        },
+        "vendorID": 20803
+    },
+    "VkPhysicalDeviceMemoryProperties": {
+        "memoryHeaps": [
+            {
+                "flags": 1,
+                "size": 3912097792
+            }
+        ],
+        "memoryTypes": [
+            {
+                "heapIndex": 0,
+                "propertyFlags": 1
+            },
+            {
+                "heapIndex": 0,
+                "propertyFlags": 11
+            },
+            {
+                "heapIndex": 0,
+                "propertyFlags": 15
+            },
+            {
+                "heapIndex": 0,
+                "propertyFlags": 1
+            },
+            {
+                "heapIndex": 0,
+                "propertyFlags": 7
+            }
+        ]
+    },
+    "ArrayOfVkExtensionProperties": [
+        {
+            "extensionName": "VK_KHR_incremental_present",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_KHR_shared_presentable_image",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_GOOGLE_display_timing",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_KHR_swapchain",
+            "specVersion": 68
+        },
+        {
+            "extensionName": "VK_KHR_maintenance1",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_KHR_variable_pointers",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_KHR_storage_buffer_storage_class",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_KHR_relaxed_block_layout",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_KHR_get_memory_requirements2",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_KHR_dedicated_allocation",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_KHR_external_memory",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_KHR_external_memory_fd",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_KHR_external_semaphore",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_KHR_external_semaphore_fd",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_KHR_external_fence",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_KHR_external_fence_fd",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_KHR_bind_memory2",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_KHR_shader_draw_parameters",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_KHR_push_descriptor",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_KHR_descriptor_update_template",
+            "specVersion": 1
+        }
+    ],
+    "ArrayOfVkLayerProperties": [],
+    "ArrayOfVkQueueFamilyProperties": [
+        {
+            "minImageTransferGranularity": {
+                "depth": 1,
+                "height": 1,
+                "width": 1
+            },
+            "queueCount": 3,
+            "queueFlags": 3,
+            "timestampValidBits": 48
+        }
+    ],
+    "ArrayOfVkFormatProperties": [
+        {
+            "formatID": 1,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 2,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 3,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 4,
+            "linearTilingFeatures": 56705,
+            "optimalTilingFeatures": 56705,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 5,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 6,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 7,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 8,
+            "linearTilingFeatures": 56705,
+            "optimalTilingFeatures": 56705,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 9,
+            "linearTilingFeatures": 56705,
+            "optimalTilingFeatures": 56705,
+            "bufferFeatures": 72
+        },
+        {
+            "formatID": 10,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 72
+        },
+        {
+            "formatID": 11,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 12,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 13,
+            "linearTilingFeatures": 52353,
+            "optimalTilingFeatures": 52353,
+            "bufferFeatures": 72
+        },
+        {
+            "formatID": 14,
+            "linearTilingFeatures": 52353,
+            "optimalTilingFeatures": 52353,
+            "bufferFeatures": 72
+        },
+        {
+            "formatID": 15,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 16,
+            "linearTilingFeatures": 56705,
+            "optimalTilingFeatures": 56705,
+            "bufferFeatures": 72
+        },
+        {
+            "formatID": 17,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 72
+        },
+        {
+            "formatID": 18,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 19,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 20,
+            "linearTilingFeatures": 52353,
+            "optimalTilingFeatures": 52353,
+            "bufferFeatures": 72
+        },
+        {
+            "formatID": 21,
+            "linearTilingFeatures": 52353,
+            "optimalTilingFeatures": 52353,
+            "bufferFeatures": 72
+        },
+        {
+            "formatID": 22,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 23,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 24,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 25,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 26,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 27,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 28,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 29,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 30,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 31,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 32,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 33,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 34,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 35,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 36,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 37,
+            "linearTilingFeatures": 449923,
+            "optimalTilingFeatures": 449923,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 38,
+            "linearTilingFeatures": 54275,
+            "optimalTilingFeatures": 54275,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 39,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 40,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 41,
+            "linearTilingFeatures": 52355,
+            "optimalTilingFeatures": 52355,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 42,
+            "linearTilingFeatures": 52355,
+            "optimalTilingFeatures": 52355,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 43,
+            "linearTilingFeatures": 56705,
+            "optimalTilingFeatures": 56705,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 44,
+            "linearTilingFeatures": 56705,
+            "optimalTilingFeatures": 56705,
+            "bufferFeatures": 72
+        },
+        {
+            "formatID": 45,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 46,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 47,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 48,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 49,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 50,
+            "linearTilingFeatures": 56705,
+            "optimalTilingFeatures": 56705,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 51,
+            "linearTilingFeatures": 449923,
+            "optimalTilingFeatures": 449923,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 52,
+            "linearTilingFeatures": 54275,
+            "optimalTilingFeatures": 54275,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 53,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 54,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 55,
+            "linearTilingFeatures": 52355,
+            "optimalTilingFeatures": 52355,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 56,
+            "linearTilingFeatures": 52355,
+            "optimalTilingFeatures": 52355,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 57,
+            "linearTilingFeatures": 56705,
+            "optimalTilingFeatures": 56705,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 58,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 59,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 60,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 61,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 62,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 63,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 64,
+            "linearTilingFeatures": 56705,
+            "optimalTilingFeatures": 56705,
+            "bufferFeatures": 72
+        },
+        {
+            "formatID": 65,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 66,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 67,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 68,
+            "linearTilingFeatures": 52353,
+            "optimalTilingFeatures": 52353,
+            "bufferFeatures": 8
+        },
+        {
+            "formatID": 69,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 70,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 71,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 72,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 73,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 74,
+            "linearTilingFeatures": 52353,
+            "optimalTilingFeatures": 52353,
+            "bufferFeatures": 72
+        },
+        {
+            "formatID": 75,
+            "linearTilingFeatures": 52353,
+            "optimalTilingFeatures": 52353,
+            "bufferFeatures": 72
+        },
+        {
+            "formatID": 76,
+            "linearTilingFeatures": 56705,
+            "optimalTilingFeatures": 56705,
+            "bufferFeatures": 72
+        },
+        {
+            "formatID": 77,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 78,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 79,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 80,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 81,
+            "linearTilingFeatures": 52353,
+            "optimalTilingFeatures": 52353,
+            "bufferFeatures": 72
+        },
+        {
+            "formatID": 82,
+            "linearTilingFeatures": 52353,
+            "optimalTilingFeatures": 52353,
+            "bufferFeatures": 72
+        },
+        {
+            "formatID": 83,
+            "linearTilingFeatures": 56705,
+            "optimalTilingFeatures": 56705,
+            "bufferFeatures": 72
+        },
+        {
+            "formatID": 84,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 85,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 86,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 87,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 88,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 89,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 90,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 91,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 92,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 93,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 94,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 95,
+            "linearTilingFeatures": 52355,
+            "optimalTilingFeatures": 52355,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 96,
+            "linearTilingFeatures": 52355,
+            "optimalTilingFeatures": 52355,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 97,
+            "linearTilingFeatures": 56707,
+            "optimalTilingFeatures": 56707,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 98,
+            "linearTilingFeatures": 52359,
+            "optimalTilingFeatures": 52359,
+            "bufferFeatures": 120
+        },
+        {
+            "formatID": 99,
+            "linearTilingFeatures": 52359,
+            "optimalTilingFeatures": 52359,
+            "bufferFeatures": 120
+        },
+        {
+            "formatID": 100,
+            "linearTilingFeatures": 52355,
+            "optimalTilingFeatures": 52355,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 101,
+            "linearTilingFeatures": 52355,
+            "optimalTilingFeatures": 52355,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 102,
+            "linearTilingFeatures": 52355,
+            "optimalTilingFeatures": 52355,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 103,
+            "linearTilingFeatures": 52355,
+            "optimalTilingFeatures": 52355,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 104,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 105,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 106,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 107,
+            "linearTilingFeatures": 52355,
+            "optimalTilingFeatures": 52355,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 108,
+            "linearTilingFeatures": 52355,
+            "optimalTilingFeatures": 52355,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 109,
+            "linearTilingFeatures": 52355,
+            "optimalTilingFeatures": 52355,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 110,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 111,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 112,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 113,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 114,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 115,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 116,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 117,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 118,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 119,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 120,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 121,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 122,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 8
+        },
+        {
+            "formatID": 123,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 124,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 50689,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 125,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 54785,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 126,
+            "linearTilingFeatures": 50177,
+            "optimalTilingFeatures": 50177,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 127,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 128,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 129,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 54785,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 130,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 131,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 132,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 133,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 134,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 135,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 136,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 137,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 138,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 139,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 140,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 141,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 142,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 143,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 144,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 145,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 146,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 147,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 148,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 149,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 150,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 151,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 152,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 153,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 154,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 155,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 156,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 157,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 158,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 159,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 160,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 161,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 162,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 163,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 164,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 165,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 166,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 167,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 168,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 169,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 170,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 171,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 172,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 173,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 174,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 175,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 176,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 177,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 178,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 179,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 180,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 181,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 182,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 183,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 184,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        }
+    ]
+}
\ No newline at end of file
diff --git a/src/third_party/vulkan-validation-layers/src/tests/device_profiles/amd_radeon_rx_480.json b/src/third_party/vulkan-validation-layers/src/tests/device_profiles/amd_radeon_rx_480.json
new file mode 100644
index 0000000..2eaa94b
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/tests/device_profiles/amd_radeon_rx_480.json
@@ -0,0 +1,1822 @@
+{
+    "$schema": "https://schema.khronos.org/vulkan/devsim_1_0_0.json#",
+    "comments": {
+        "info": "Vulkan Hardware Report generated via https://vulkan.gpuinfo.org",
+        "desc": "https://vulkan.gpuinfo.org/displayreport.php?id=3077"
+    },
+    "environment": {
+        "architecture": "x86_64",
+        "comment": "",
+        "name": "arch",
+        "reportversion": "1.5",
+        "submitter": "",
+        "version": "unknown"
+    },
+    "extended": {
+        "devicefeatures2": [
+            {
+                "extension": "VK_KHR_multiview",
+                "name": "multiview",
+                "supported": true
+            },
+            {
+                "extension": "VK_KHR_multiview",
+                "name": "multiviewGeometryShader",
+                "supported": false
+            },
+            {
+                "extension": "VK_KHR_multiview",
+                "name": "multiviewTessellationShader",
+                "supported": true
+            },
+            {
+                "extension": "VK_KHR_16bit_storage",
+                "name": "storageBuffer16BitAccess",
+                "supported": true
+            },
+            {
+                "extension": "VK_KHR_16bit_storage",
+                "name": "uniformAndStorageBuffer16BitAccess",
+                "supported": true
+            },
+            {
+                "extension": "VK_KHR_16bit_storage",
+                "name": "storagePushConstant16",
+                "supported": false
+            },
+            {
+                "extension": "VK_KHR_16bit_storage",
+                "name": "storageInputOutput16",
+                "supported": false
+            },
+            {
+                "extension": "VK_KHR_shader_draw_parameters",
+                "name": "shaderDrawParameters",
+                "supported": false
+            }
+        ],
+        "deviceproperties2": [
+            {
+                "extension": "VK_KHR_multiview",
+                "name": "maxMultiviewViewCount",
+                "value": "6"
+            },
+            {
+                "extension": "VK_KHR_multiview",
+                "name": "maxMultiviewInstanceIndex",
+                "value": "4294967295"
+            },
+            {
+                "extension": "VK_EXT_sampler_filter_minmax",
+                "name": "filterMinmaxSingleComponentFormats",
+                "value": "true"
+            },
+            {
+                "extension": "VK_EXT_sampler_filter_minmax",
+                "name": "filterMinmaxImageComponentMapping",
+                "value": "false"
+            },
+            {
+                "extension": "VK_EXT_sample_locations",
+                "name": "sampleLocationSampleCounts",
+                "value": "15"
+            },
+            {
+                "extension": "VK_EXT_sample_locations",
+                "name": "maxSampleLocationGridSize.width",
+                "value": "2"
+            },
+            {
+                "extension": "VK_EXT_sample_locations",
+                "name": "maxSampleLocationGridSize.height",
+                "value": "2"
+            },
+            {
+                "extension": "VK_EXT_sample_locations",
+                "name": "sampleLocationCoordinateRange[0]",
+                "value": "0"
+            },
+            {
+                "extension": "VK_EXT_sample_locations",
+                "name": "sampleLocationCoordinateRange[1]",
+                "value": "1"
+            },
+            {
+                "extension": "VK_EXT_sample_locations",
+                "name": "sampleLocationSubPixelBits",
+                "value": "4"
+            },
+            {
+                "extension": "VK_EXT_sample_locations",
+                "name": "variableSampleLocations",
+                "value": "true"
+            },
+            {
+                "extension": "VK_KHR_maintenance3",
+                "name": "maxPerSetDescriptors",
+                "value": "4294967295"
+            },
+            {
+                "extension": "VK_KHR_maintenance3",
+                "name": "maxMemoryAllocationSize",
+                "value": "2147483648"
+            }
+        ]
+    },
+    "instance": {
+        "extensions": [
+            {
+                "extensionName": "VK_KHR_surface",
+                "specVersion": 25
+            },
+            {
+                "extensionName": "VK_KHR_xcb_surface",
+                "specVersion": 6
+            },
+            {
+                "extensionName": "VK_KHR_xlib_surface",
+                "specVersion": 6
+            },
+            {
+                "extensionName": "VK_KHR_wayland_surface",
+                "specVersion": 6
+            },
+            {
+                "extensionName": "VK_KHR_get_physical_device_properties2",
+                "specVersion": 1
+            },
+            {
+                "extensionName": "VK_KHR_get_surface_capabilities2",
+                "specVersion": 1
+            },
+            {
+                "extensionName": "VK_KHR_external_memory_capabilities",
+                "specVersion": 1
+            },
+            {
+                "extensionName": "VK_KHR_device_group_creation",
+                "specVersion": 1
+            },
+            {
+                "extensionName": "VK_KHR_external_semaphore_capabilities",
+                "specVersion": 1
+            },
+            {
+                "extensionName": "VK_KHR_external_fence_capabilities",
+                "specVersion": 1
+            },
+            {
+                "extensionName": "VK_EXT_debug_report",
+                "specVersion": 9
+            },
+            {
+                "extensionName": "VK_EXT_debug_utils",
+                "specVersion": 1
+            }
+        ],
+        "layers": [
+            {
+                "description": "Steam Overlay Layer",
+                "extensions": [],
+                "implementationVersion": 1,
+                "layerName": "VK_LAYER_VALVE_steam_overlay_64",
+                "specVersion": 4194307
+            },
+            {
+                "description": "Steam Overlay Layer",
+                "extensions": [],
+                "implementationVersion": 1,
+                "layerName": "VK_LAYER_VALVE_steam_overlay_32",
+                "specVersion": 4194307
+            },
+            {
+                "description": "LunarG Standard Validation Layer",
+                "extensions": [],
+                "implementationVersion": 1,
+                "layerName": "VK_LAYER_LUNARG_standard_validation",
+                "specVersion": 4194374
+            }
+        ]
+    },
+    "platformdetails": [],
+    "surfacecapabilites": {
+        "maxImageArrayLayers": 1,
+        "maxImageCount": 16,
+        "maxImageExtent": {
+            "height": 755,
+            "width": 927
+        },
+        "minImageCount": 2,
+        "minImageExtent": {
+            "height": 755,
+            "width": 927
+        },
+        "presentmodes": [
+            0,
+            1,
+            2
+        ],
+        "supportedCompositeAlpha": 1,
+        "supportedTransforms": 1,
+        "supportedUsageFlags": 159,
+        "surfaceExtension": "VK_KHR_xcb_surface",
+        "surfaceformats": [
+            {
+                "colorSpace": 0,
+                "format": 44
+            },
+            {
+                "colorSpace": 0,
+                "format": 50
+            }
+        ],
+        "validSurface": true
+    },
+    "VkPhysicalDeviceFeatures": {
+        "alphaToOne": 0,
+        "depthBiasClamp": 1,
+        "depthBounds": 1,
+        "depthClamp": 1,
+        "drawIndirectFirstInstance": 1,
+        "dualSrcBlend": 1,
+        "fillModeNonSolid": 1,
+        "fragmentStoresAndAtomics": 1,
+        "fullDrawIndexUint32": 1,
+        "geometryShader": 1,
+        "imageCubeArray": 1,
+        "independentBlend": 1,
+        "inheritedQueries": 1,
+        "largePoints": 1,
+        "logicOp": 1,
+        "multiDrawIndirect": 1,
+        "multiViewport": 1,
+        "occlusionQueryPrecise": 1,
+        "pipelineStatisticsQuery": 1,
+        "robustBufferAccess": 1,
+        "sampleRateShading": 1,
+        "samplerAnisotropy": 1,
+        "shaderClipDistance": 1,
+        "shaderCullDistance": 1,
+        "shaderFloat64": 1,
+        "shaderImageGatherExtended": 1,
+        "shaderInt16": 0,
+        "shaderInt64": 1,
+        "shaderResourceMinLod": 0,
+        "shaderResourceResidency": 0,
+        "shaderSampledImageArrayDynamicIndexing": 1,
+        "shaderStorageBufferArrayDynamicIndexing": 1,
+        "shaderStorageImageArrayDynamicIndexing": 1,
+        "shaderStorageImageExtendedFormats": 1,
+        "shaderStorageImageMultisample": 1,
+        "shaderStorageImageReadWithoutFormat": 1,
+        "shaderStorageImageWriteWithoutFormat": 1,
+        "shaderTessellationAndGeometryPointSize": 1,
+        "shaderUniformBufferArrayDynamicIndexing": 1,
+        "sparseBinding": 0,
+        "sparseResidency16Samples": 0,
+        "sparseResidency2Samples": 0,
+        "sparseResidency4Samples": 0,
+        "sparseResidency8Samples": 0,
+        "sparseResidencyAliased": 0,
+        "sparseResidencyBuffer": 0,
+        "sparseResidencyImage2D": 0,
+        "sparseResidencyImage3D": 0,
+        "tessellationShader": 1,
+        "textureCompressionASTC_LDR": 0,
+        "textureCompressionBC": 1,
+        "textureCompressionETC2": 0,
+        "variableMultisampleRate": 1,
+        "vertexPipelineStoresAndAtomics": 1,
+        "wideLines": 1
+    },
+    "VkPhysicalDeviceProperties": {
+        "apiVersion": 4198470,
+        "deviceID": 26591,
+        "deviceName": "AMD Radeon (TM) RX 480 Graphics",
+        "deviceType": 2,
+        "driverVersion": 8388635,
+        "limits": {
+            "bufferImageGranularity": 1,
+            "discreteQueuePriorities": 2,
+            "framebufferColorSampleCounts": 15,
+            "framebufferDepthSampleCounts": 15,
+            "framebufferNoAttachmentsSampleCounts": 15,
+            "framebufferStencilSampleCounts": 15,
+            "lineWidthGranularity": 0.125,
+            "lineWidthRange": [
+                0,
+                8191.875
+            ],
+            "maxBoundDescriptorSets": 32,
+            "maxClipDistances": 8,
+            "maxColorAttachments": 8,
+            "maxCombinedClipAndCullDistances": 8,
+            "maxComputeSharedMemorySize": 32768,
+            "maxComputeWorkGroupCount": [
+                65535,
+                65535,
+                65535
+            ],
+            "maxComputeWorkGroupInvocations": 1024,
+            "maxComputeWorkGroupSize": [
+                1024,
+                1024,
+                1024
+            ],
+            "maxCullDistances": 8,
+            "maxDescriptorSetInputAttachments": 4294967295,
+            "maxDescriptorSetSampledImages": 4294967295,
+            "maxDescriptorSetSamplers": 4294967295,
+            "maxDescriptorSetStorageBuffers": 4294967295,
+            "maxDescriptorSetStorageBuffersDynamic": 8,
+            "maxDescriptorSetStorageImages": 4294967295,
+            "maxDescriptorSetUniformBuffers": 4294967295,
+            "maxDescriptorSetUniformBuffersDynamic": 8,
+            "maxDrawIndexedIndexValue": 4294967295,
+            "maxDrawIndirectCount": 4294967295,
+            "maxFragmentCombinedOutputResources": 4294967295,
+            "maxFragmentDualSrcAttachments": 1,
+            "maxFragmentInputComponents": 128,
+            "maxFragmentOutputAttachments": 8,
+            "maxFramebufferHeight": 16384,
+            "maxFramebufferLayers": 2048,
+            "maxFramebufferWidth": 16384,
+            "maxGeometryInputComponents": 128,
+            "maxGeometryOutputComponents": 128,
+            "maxGeometryOutputVertices": 1024,
+            "maxGeometryShaderInvocations": 127,
+            "maxGeometryTotalOutputComponents": 16384,
+            "maxImageArrayLayers": 2048,
+            "maxImageDimension1D": 16384,
+            "maxImageDimension2D": 16384,
+            "maxImageDimension3D": 2048,
+            "maxImageDimensionCube": 16384,
+            "maxInterpolationOffset": 2,
+            "maxMemoryAllocationCount": 4294967295,
+            "maxPerStageDescriptorInputAttachments": 4294967295,
+            "maxPerStageDescriptorSampledImages": 4294967295,
+            "maxPerStageDescriptorSamplers": 4294967295,
+            "maxPerStageDescriptorStorageBuffers": 4294967295,
+            "maxPerStageDescriptorStorageImages": 4294967295,
+            "maxPerStageDescriptorUniformBuffers": 4294967295,
+            "maxPerStageResources": 4294967295,
+            "maxPushConstantsSize": 128,
+            "maxSampleMaskWords": 1,
+            "maxSamplerAllocationCount": 1048576,
+            "maxSamplerAnisotropy": 16,
+            "maxSamplerLodBias": 15.99609375,
+            "maxStorageBufferRange": 4294967295,
+            "maxTessellationControlPerPatchOutputComponents": 120,
+            "maxTessellationControlPerVertexInputComponents": 128,
+            "maxTessellationControlPerVertexOutputComponents": 128,
+            "maxTessellationControlTotalOutputComponents": 4096,
+            "maxTessellationEvaluationInputComponents": 128,
+            "maxTessellationEvaluationOutputComponents": 128,
+            "maxTessellationGenerationLevel": 64,
+            "maxTessellationPatchSize": 32,
+            "maxTexelBufferElements": 4294967295,
+            "maxTexelGatherOffset": 31,
+            "maxTexelOffset": 63,
+            "maxUniformBufferRange": 4294967295,
+            "maxVertexInputAttributeOffset": 4294967295,
+            "maxVertexInputAttributes": 64,
+            "maxVertexInputBindingStride": 16383,
+            "maxVertexInputBindings": 32,
+            "maxVertexOutputComponents": 128,
+            "maxViewportDimensions": [
+                16384,
+                16384
+            ],
+            "maxViewports": 16,
+            "minInterpolationOffset": -2,
+            "minMemoryMapAlignment": 64,
+            "minStorageBufferOffsetAlignment": 4,
+            "minTexelBufferOffsetAlignment": 1,
+            "minTexelGatherOffset": -32,
+            "minTexelOffset": -64,
+            "minUniformBufferOffsetAlignment": 16,
+            "mipmapPrecisionBits": 8,
+            "nonCoherentAtomSize": 128,
+            "optimalBufferCopyOffsetAlignment": 1,
+            "optimalBufferCopyRowPitchAlignment": 1,
+            "pointSizeGranularity": 0.125,
+            "pointSizeRange": [
+                0,
+                8191.875
+            ],
+            "sampledImageColorSampleCounts": 15,
+            "sampledImageDepthSampleCounts": 15,
+            "sampledImageIntegerSampleCounts": 15,
+            "sampledImageStencilSampleCounts": 15,
+            "sparseAddressSpaceSize": 64424509440,
+            "standardSampleLocations": 1,
+            "storageImageSampleCounts": 15,
+            "strictLines": 0,
+            "subPixelInterpolationOffsetBits": 8,
+            "subPixelPrecisionBits": 8,
+            "subTexelPrecisionBits": 8,
+            "timestampComputeAndGraphics": 1,
+            "timestampPeriod": 40,
+            "viewportBoundsRange": [
+                -32768,
+                32767
+            ],
+            "viewportSubPixelBits": 8
+        },
+        "pipelineCacheUUID": [
+            2,
+            16,
+            0,
+            0,
+            223,
+            103,
+            0,
+            0,
+            144,
+            1,
+            0,
+            0,
+            0,
+            0,
+            0,
+            0
+        ],
+        "sparseProperties": {
+            "residencyAlignedMipSize": 0,
+            "residencyNonResidentStrict": 1,
+            "residencyStandard2DBlockShape": 1,
+            "residencyStandard2DMultisampleBlockShape": 0,
+            "residencyStandard3DBlockShape": 0
+        },
+        "subgroupProperties": {
+            "quadOperationsInAllStages": true,
+            "subgroupSize": 64,
+            "supportedOperations": 139,
+            "supportedStages": 63
+        },
+        "vendorID": 4098
+    },
+    "VkPhysicalDeviceMemoryProperties": {
+        "memoryHeaps": [
+            {
+                "flags": 3,
+                "size": 4026531840
+            },
+            {
+                "flags": 0,
+                "size": 4294967296
+            },
+            {
+                "flags": 3,
+                "size": 268435456
+            }
+        ],
+        "memoryTypes": [
+            {
+                "heapIndex": 0,
+                "propertyFlags": 1
+            },
+            {
+                "heapIndex": 1,
+                "propertyFlags": 6
+            },
+            {
+                "heapIndex": 2,
+                "propertyFlags": 7
+            },
+            {
+                "heapIndex": 1,
+                "propertyFlags": 14
+            }
+        ]
+    },
+    "ArrayOfVkExtensionProperties": [
+        {
+            "extensionName": "VK_KHR_sampler_mirror_clamp_to_edge",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_KHR_shader_draw_parameters",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_KHR_swapchain",
+            "specVersion": 70
+        },
+        {
+            "extensionName": "VK_KHR_maintenance1",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_KHR_maintenance2",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_KHR_maintenance3",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_KHR_relaxed_block_layout",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_KHR_dedicated_allocation",
+            "specVersion": 3
+        },
+        {
+            "extensionName": "VK_KHR_descriptor_update_template",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_KHR_external_memory",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_KHR_external_memory_fd",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_KHR_device_group",
+            "specVersion": 3
+        },
+        {
+            "extensionName": "VK_KHX_device_group",
+            "specVersion": 2
+        },
+        {
+            "extensionName": "VK_KHR_bind_memory2",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_KHR_external_semaphore",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_KHR_external_semaphore_fd",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_AMD_rasterization_order",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_AMD_shader_trinary_minmax",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_AMD_shader_explicit_vertex_parameter",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_AMD_gcn_shader",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_AMD_draw_indirect_count",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_AMD_negative_viewport_height",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_AMD_shader_info",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_EXT_sampler_filter_minmax",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_AMD_shader_fragment_mask",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_AMD_texture_gather_bias_lod",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_AMD_mixed_attachment_samples",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_EXT_sample_locations",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_EXT_shader_subgroup_vote",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_KHR_16bit_storage",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_KHR_storage_buffer_storage_class",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_AMD_gpa_interface",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_EXT_shader_subgroup_ballot",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_EXT_shader_stencil_export",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_EXT_shader_viewport_index_layer",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_KHR_get_memory_requirements2",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_KHR_image_format_list",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_KHR_multiview",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_KHR_external_fence",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_KHR_external_fence_fd",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_EXT_global_priority",
+            "specVersion": 2
+        },
+        {
+            "extensionName": "VK_AMD_buffer_marker",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_AMD_shader_image_load_store_lod",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_EXT_external_memory_host",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_EXT_depth_range_unrestricted",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_AMD_shader_core_properties",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_EXT_queue_family_foreign",
+            "specVersion": 1
+        }
+    ],
+    "ArrayOfVkLayerProperties": [],
+    "ArrayOfVkQueueFamilyProperties": [
+        {
+            "minImageTransferGranularity": {
+                "depth": 1,
+                "height": 1,
+                "width": 1
+            },
+            "queueCount": 1,
+            "queueFlags": 15,
+            "timestampValidBits": 64
+        },
+        {
+            "minImageTransferGranularity": {
+                "depth": 1,
+                "height": 1,
+                "width": 1
+            },
+            "queueCount": 8,
+            "queueFlags": 14,
+            "timestampValidBits": 64
+        },
+        {
+            "minImageTransferGranularity": {
+                "depth": 8,
+                "height": 8,
+                "width": 8
+            },
+            "queueCount": 2,
+            "queueFlags": 12,
+            "timestampValidBits": 64
+        }
+    ],
+    "ArrayOfVkFormatProperties": [
+        {
+            "formatID": 1,
+            "linearTilingFeatures": 119809,
+            "optimalTilingFeatures": 119809,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 2,
+            "linearTilingFeatures": 122243,
+            "optimalTilingFeatures": 122243,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 3,
+            "linearTilingFeatures": 122243,
+            "optimalTilingFeatures": 122243,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 4,
+            "linearTilingFeatures": 122243,
+            "optimalTilingFeatures": 122243,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 5,
+            "linearTilingFeatures": 122243,
+            "optimalTilingFeatures": 122243,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 6,
+            "linearTilingFeatures": 122243,
+            "optimalTilingFeatures": 122243,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 7,
+            "linearTilingFeatures": 122243,
+            "optimalTilingFeatures": 122243,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 8,
+            "linearTilingFeatures": 122243,
+            "optimalTilingFeatures": 122243,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 9,
+            "linearTilingFeatures": 122243,
+            "optimalTilingFeatures": 122243,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 10,
+            "linearTilingFeatures": 122243,
+            "optimalTilingFeatures": 122243,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 11,
+            "linearTilingFeatures": 5120,
+            "optimalTilingFeatures": 5120,
+            "bufferFeatures": 72
+        },
+        {
+            "formatID": 12,
+            "linearTilingFeatures": 5120,
+            "optimalTilingFeatures": 5120,
+            "bufferFeatures": 72
+        },
+        {
+            "formatID": 13,
+            "linearTilingFeatures": 52355,
+            "optimalTilingFeatures": 52355,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 14,
+            "linearTilingFeatures": 52355,
+            "optimalTilingFeatures": 52355,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 15,
+            "linearTilingFeatures": 122241,
+            "optimalTilingFeatures": 122241,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 16,
+            "linearTilingFeatures": 122243,
+            "optimalTilingFeatures": 122243,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 17,
+            "linearTilingFeatures": 122243,
+            "optimalTilingFeatures": 122243,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 18,
+            "linearTilingFeatures": 5120,
+            "optimalTilingFeatures": 5120,
+            "bufferFeatures": 72
+        },
+        {
+            "formatID": 19,
+            "linearTilingFeatures": 5120,
+            "optimalTilingFeatures": 5120,
+            "bufferFeatures": 72
+        },
+        {
+            "formatID": 20,
+            "linearTilingFeatures": 52355,
+            "optimalTilingFeatures": 52355,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 21,
+            "linearTilingFeatures": 52355,
+            "optimalTilingFeatures": 52355,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 22,
+            "linearTilingFeatures": 122241,
+            "optimalTilingFeatures": 122241,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 23,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 24,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 25,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 26,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 27,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 28,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 29,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 30,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 31,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 32,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 33,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 34,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 35,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 36,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 37,
+            "linearTilingFeatures": 122247,
+            "optimalTilingFeatures": 122247,
+            "bufferFeatures": 120
+        },
+        {
+            "formatID": 38,
+            "linearTilingFeatures": 122247,
+            "optimalTilingFeatures": 122247,
+            "bufferFeatures": 120
+        },
+        {
+            "formatID": 39,
+            "linearTilingFeatures": 5124,
+            "optimalTilingFeatures": 5124,
+            "bufferFeatures": 104
+        },
+        {
+            "formatID": 40,
+            "linearTilingFeatures": 5124,
+            "optimalTilingFeatures": 5124,
+            "bufferFeatures": 104
+        },
+        {
+            "formatID": 41,
+            "linearTilingFeatures": 52359,
+            "optimalTilingFeatures": 52359,
+            "bufferFeatures": 120
+        },
+        {
+            "formatID": 42,
+            "linearTilingFeatures": 52359,
+            "optimalTilingFeatures": 52359,
+            "bufferFeatures": 120
+        },
+        {
+            "formatID": 43,
+            "linearTilingFeatures": 122245,
+            "optimalTilingFeatures": 122245,
+            "bufferFeatures": 32
+        },
+        {
+            "formatID": 44,
+            "linearTilingFeatures": 122247,
+            "optimalTilingFeatures": 122247,
+            "bufferFeatures": 120
+        },
+        {
+            "formatID": 45,
+            "linearTilingFeatures": 122247,
+            "optimalTilingFeatures": 122247,
+            "bufferFeatures": 120
+        },
+        {
+            "formatID": 46,
+            "linearTilingFeatures": 5124,
+            "optimalTilingFeatures": 5124,
+            "bufferFeatures": 104
+        },
+        {
+            "formatID": 47,
+            "linearTilingFeatures": 5124,
+            "optimalTilingFeatures": 5124,
+            "bufferFeatures": 104
+        },
+        {
+            "formatID": 48,
+            "linearTilingFeatures": 52359,
+            "optimalTilingFeatures": 52359,
+            "bufferFeatures": 120
+        },
+        {
+            "formatID": 49,
+            "linearTilingFeatures": 52359,
+            "optimalTilingFeatures": 52359,
+            "bufferFeatures": 120
+        },
+        {
+            "formatID": 50,
+            "linearTilingFeatures": 122245,
+            "optimalTilingFeatures": 122245,
+            "bufferFeatures": 32
+        },
+        {
+            "formatID": 51,
+            "linearTilingFeatures": 122247,
+            "optimalTilingFeatures": 122247,
+            "bufferFeatures": 120
+        },
+        {
+            "formatID": 52,
+            "linearTilingFeatures": 122247,
+            "optimalTilingFeatures": 122247,
+            "bufferFeatures": 120
+        },
+        {
+            "formatID": 53,
+            "linearTilingFeatures": 5124,
+            "optimalTilingFeatures": 5124,
+            "bufferFeatures": 104
+        },
+        {
+            "formatID": 54,
+            "linearTilingFeatures": 5124,
+            "optimalTilingFeatures": 5124,
+            "bufferFeatures": 104
+        },
+        {
+            "formatID": 55,
+            "linearTilingFeatures": 52359,
+            "optimalTilingFeatures": 52359,
+            "bufferFeatures": 120
+        },
+        {
+            "formatID": 56,
+            "linearTilingFeatures": 52359,
+            "optimalTilingFeatures": 52359,
+            "bufferFeatures": 120
+        },
+        {
+            "formatID": 57,
+            "linearTilingFeatures": 122245,
+            "optimalTilingFeatures": 122245,
+            "bufferFeatures": 96
+        },
+        {
+            "formatID": 58,
+            "linearTilingFeatures": 122247,
+            "optimalTilingFeatures": 122247,
+            "bufferFeatures": 120
+        },
+        {
+            "formatID": 59,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 60,
+            "linearTilingFeatures": 5124,
+            "optimalTilingFeatures": 5124,
+            "bufferFeatures": 104
+        },
+        {
+            "formatID": 61,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 62,
+            "linearTilingFeatures": 52359,
+            "optimalTilingFeatures": 52359,
+            "bufferFeatures": 120
+        },
+        {
+            "formatID": 63,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 64,
+            "linearTilingFeatures": 122247,
+            "optimalTilingFeatures": 122247,
+            "bufferFeatures": 120
+        },
+        {
+            "formatID": 65,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 66,
+            "linearTilingFeatures": 5124,
+            "optimalTilingFeatures": 5124,
+            "bufferFeatures": 104
+        },
+        {
+            "formatID": 67,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 68,
+            "linearTilingFeatures": 52359,
+            "optimalTilingFeatures": 52359,
+            "bufferFeatures": 120
+        },
+        {
+            "formatID": 69,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 70,
+            "linearTilingFeatures": 122243,
+            "optimalTilingFeatures": 122243,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 71,
+            "linearTilingFeatures": 122243,
+            "optimalTilingFeatures": 122243,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 72,
+            "linearTilingFeatures": 5120,
+            "optimalTilingFeatures": 5120,
+            "bufferFeatures": 72
+        },
+        {
+            "formatID": 73,
+            "linearTilingFeatures": 5120,
+            "optimalTilingFeatures": 5120,
+            "bufferFeatures": 72
+        },
+        {
+            "formatID": 74,
+            "linearTilingFeatures": 52355,
+            "optimalTilingFeatures": 52355,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 75,
+            "linearTilingFeatures": 52355,
+            "optimalTilingFeatures": 52355,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 76,
+            "linearTilingFeatures": 122243,
+            "optimalTilingFeatures": 122243,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 77,
+            "linearTilingFeatures": 122247,
+            "optimalTilingFeatures": 122247,
+            "bufferFeatures": 120
+        },
+        {
+            "formatID": 78,
+            "linearTilingFeatures": 122247,
+            "optimalTilingFeatures": 122247,
+            "bufferFeatures": 120
+        },
+        {
+            "formatID": 79,
+            "linearTilingFeatures": 5124,
+            "optimalTilingFeatures": 5124,
+            "bufferFeatures": 104
+        },
+        {
+            "formatID": 80,
+            "linearTilingFeatures": 5124,
+            "optimalTilingFeatures": 5124,
+            "bufferFeatures": 104
+        },
+        {
+            "formatID": 81,
+            "linearTilingFeatures": 52359,
+            "optimalTilingFeatures": 52359,
+            "bufferFeatures": 120
+        },
+        {
+            "formatID": 82,
+            "linearTilingFeatures": 52359,
+            "optimalTilingFeatures": 52359,
+            "bufferFeatures": 120
+        },
+        {
+            "formatID": 83,
+            "linearTilingFeatures": 122247,
+            "optimalTilingFeatures": 122247,
+            "bufferFeatures": 120
+        },
+        {
+            "formatID": 84,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 85,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 86,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 87,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 88,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 89,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 90,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 91,
+            "linearTilingFeatures": 122247,
+            "optimalTilingFeatures": 122247,
+            "bufferFeatures": 120
+        },
+        {
+            "formatID": 92,
+            "linearTilingFeatures": 122247,
+            "optimalTilingFeatures": 122247,
+            "bufferFeatures": 120
+        },
+        {
+            "formatID": 93,
+            "linearTilingFeatures": 5124,
+            "optimalTilingFeatures": 5124,
+            "bufferFeatures": 104
+        },
+        {
+            "formatID": 94,
+            "linearTilingFeatures": 5124,
+            "optimalTilingFeatures": 5124,
+            "bufferFeatures": 104
+        },
+        {
+            "formatID": 95,
+            "linearTilingFeatures": 52359,
+            "optimalTilingFeatures": 52359,
+            "bufferFeatures": 120
+        },
+        {
+            "formatID": 96,
+            "linearTilingFeatures": 52359,
+            "optimalTilingFeatures": 52359,
+            "bufferFeatures": 120
+        },
+        {
+            "formatID": 97,
+            "linearTilingFeatures": 122247,
+            "optimalTilingFeatures": 122247,
+            "bufferFeatures": 120
+        },
+        {
+            "formatID": 98,
+            "linearTilingFeatures": 52359,
+            "optimalTilingFeatures": 52359,
+            "bufferFeatures": 120
+        },
+        {
+            "formatID": 99,
+            "linearTilingFeatures": 52359,
+            "optimalTilingFeatures": 52359,
+            "bufferFeatures": 120
+        },
+        {
+            "formatID": 100,
+            "linearTilingFeatures": 122247,
+            "optimalTilingFeatures": 122247,
+            "bufferFeatures": 120
+        },
+        {
+            "formatID": 101,
+            "linearTilingFeatures": 52359,
+            "optimalTilingFeatures": 52359,
+            "bufferFeatures": 120
+        },
+        {
+            "formatID": 102,
+            "linearTilingFeatures": 52359,
+            "optimalTilingFeatures": 52359,
+            "bufferFeatures": 120
+        },
+        {
+            "formatID": 103,
+            "linearTilingFeatures": 122247,
+            "optimalTilingFeatures": 122247,
+            "bufferFeatures": 120
+        },
+        {
+            "formatID": 104,
+            "linearTilingFeatures": 50177,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 105,
+            "linearTilingFeatures": 50177,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 106,
+            "linearTilingFeatures": 119809,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 107,
+            "linearTilingFeatures": 52355,
+            "optimalTilingFeatures": 52355,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 108,
+            "linearTilingFeatures": 52355,
+            "optimalTilingFeatures": 52355,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 109,
+            "linearTilingFeatures": 122243,
+            "optimalTilingFeatures": 122243,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 110,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 111,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 112,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 113,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 114,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 115,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 116,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 117,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 118,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 119,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 120,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 121,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 122,
+            "linearTilingFeatures": 122247,
+            "optimalTilingFeatures": 122247,
+            "bufferFeatures": 120
+        },
+        {
+            "formatID": 123,
+            "linearTilingFeatures": 119813,
+            "optimalTilingFeatures": 119813,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 124,
+            "linearTilingFeatures": 122243,
+            "optimalTilingFeatures": 122755,
+            "bufferFeatures": 24
+        },
+        {
+            "formatID": 125,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 126,
+            "linearTilingFeatures": 122247,
+            "optimalTilingFeatures": 122759,
+            "bufferFeatures": 56
+        },
+        {
+            "formatID": 127,
+            "linearTilingFeatures": 52355,
+            "optimalTilingFeatures": 52867,
+            "bufferFeatures": 24
+        },
+        {
+            "formatID": 128,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 120321,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 129,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 130,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 120321,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 131,
+            "linearTilingFeatures": 49152,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 132,
+            "linearTilingFeatures": 49152,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 133,
+            "linearTilingFeatures": 49152,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 134,
+            "linearTilingFeatures": 49152,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 135,
+            "linearTilingFeatures": 49152,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 136,
+            "linearTilingFeatures": 49152,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 137,
+            "linearTilingFeatures": 49152,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 138,
+            "linearTilingFeatures": 49152,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 139,
+            "linearTilingFeatures": 49152,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 140,
+            "linearTilingFeatures": 49152,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 141,
+            "linearTilingFeatures": 49152,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 142,
+            "linearTilingFeatures": 49152,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 143,
+            "linearTilingFeatures": 49152,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 144,
+            "linearTilingFeatures": 49152,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 145,
+            "linearTilingFeatures": 49152,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 146,
+            "linearTilingFeatures": 49152,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 147,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 148,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 149,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 150,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 151,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 152,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 153,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 154,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 155,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 156,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 157,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 158,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 159,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 160,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 161,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 162,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 163,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 164,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 165,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 166,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 167,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 168,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 169,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 170,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 171,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 172,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 173,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 174,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 175,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 176,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 177,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 178,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 179,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 180,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 181,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 182,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 183,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 184,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        }
+    ]
+}
\ No newline at end of file
diff --git a/src/third_party/vulkan-validation-layers/src/tests/device_profiles/amd_radv_polaris10.json b/src/third_party/vulkan-validation-layers/src/tests/device_profiles/amd_radv_polaris10.json
new file mode 100644
index 0000000..38e3ce0
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/tests/device_profiles/amd_radv_polaris10.json
@@ -0,0 +1,1506 @@
+{
+    "$schema": "https://schema.khronos.org/vulkan/devsim_1_0_0.json#",
+    "comments": {
+        "info": "Vulkan Hardware Report generated via https://vulkan.gpuinfo.org",
+        "desc": "https://vulkan.gpuinfo.org/displayreport.php?id=2117"
+    },
+    "environment": {
+        "architecture": "x86_64",
+        "comment": "",
+        "name": "arch",
+        "reportversion": "1.4",
+        "submitter": "",
+        "version": "unknown"
+    },
+    "extended": {
+        "devicefeatures2": [],
+        "deviceproperties2": [
+            {
+                "extension": "VK_KHR_push_descriptor",
+                "name": "maxPushDescriptors",
+                "value": "32"
+            }
+        ]
+    },
+    "platformdetails": [],
+    "surfacecapabilites": {
+        "maxImageArrayLayers": 1,
+        "maxImageCount": 0,
+        "maxImageExtent": {
+            "height": 755,
+            "width": 927
+        },
+        "minImageCount": 2,
+        "minImageExtent": {
+            "height": 755,
+            "width": 927
+        },
+        "presentmodes": [
+            0,
+            1,
+            2
+        ],
+        "supportedCompositeAlpha": 9,
+        "supportedTransforms": 1,
+        "supportedUsageFlags": 23,
+        "surfaceExtension": "VK_KHR_xcb_surface",
+        "surfaceformats": [
+            {
+                "colorSpace": 0,
+                "format": 50
+            },
+            {
+                "colorSpace": 0,
+                "format": 44
+            }
+        ],
+        "validSurface": true
+    },
+    "VkPhysicalDeviceFeatures": {
+        "alphaToOne": 1,
+        "depthBiasClamp": 1,
+        "depthBounds": 1,
+        "depthClamp": 1,
+        "drawIndirectFirstInstance": 1,
+        "dualSrcBlend": 1,
+        "fillModeNonSolid": 1,
+        "fragmentStoresAndAtomics": 1,
+        "fullDrawIndexUint32": 1,
+        "geometryShader": 1,
+        "imageCubeArray": 1,
+        "independentBlend": 1,
+        "inheritedQueries": 1,
+        "largePoints": 1,
+        "logicOp": 1,
+        "multiDrawIndirect": 1,
+        "multiViewport": 1,
+        "occlusionQueryPrecise": 1,
+        "pipelineStatisticsQuery": 1,
+        "robustBufferAccess": 1,
+        "sampleRateShading": 1,
+        "samplerAnisotropy": 1,
+        "shaderClipDistance": 1,
+        "shaderCullDistance": 1,
+        "shaderFloat64": 1,
+        "shaderImageGatherExtended": 1,
+        "shaderInt16": 0,
+        "shaderInt64": 1,
+        "shaderResourceMinLod": 0,
+        "shaderResourceResidency": 0,
+        "shaderSampledImageArrayDynamicIndexing": 1,
+        "shaderStorageBufferArrayDynamicIndexing": 1,
+        "shaderStorageImageArrayDynamicIndexing": 1,
+        "shaderStorageImageExtendedFormats": 1,
+        "shaderStorageImageMultisample": 0,
+        "shaderStorageImageReadWithoutFormat": 1,
+        "shaderStorageImageWriteWithoutFormat": 1,
+        "shaderTessellationAndGeometryPointSize": 1,
+        "shaderUniformBufferArrayDynamicIndexing": 1,
+        "sparseBinding": 1,
+        "sparseResidency16Samples": 0,
+        "sparseResidency2Samples": 0,
+        "sparseResidency4Samples": 0,
+        "sparseResidency8Samples": 0,
+        "sparseResidencyAliased": 0,
+        "sparseResidencyBuffer": 0,
+        "sparseResidencyImage2D": 0,
+        "sparseResidencyImage3D": 0,
+        "tessellationShader": 1,
+        "textureCompressionASTC_LDR": 0,
+        "textureCompressionBC": 1,
+        "textureCompressionETC2": 0,
+        "variableMultisampleRate": 1,
+        "vertexPipelineStoresAndAtomics": 1,
+        "wideLines": 1
+    },
+    "VkPhysicalDeviceProperties": {
+        "apiVersion": 4194346,
+        "deviceID": 26591,
+        "deviceName": "AMD RADV POLARIS10",
+        "deviceType": 2,
+        "driverVersion": 71311365,
+        "limits": {
+            "bufferImageGranularity": 64,
+            "discreteQueuePriorities": 1,
+            "framebufferColorSampleCounts": 15,
+            "framebufferDepthSampleCounts": 15,
+            "framebufferNoAttachmentsSampleCounts": 15,
+            "framebufferStencilSampleCounts": 15,
+            "lineWidthGranularity": 0.0078125,
+            "lineWidthRange": [
+                0,
+                7.9921875
+            ],
+            "maxBoundDescriptorSets": 32,
+            "maxClipDistances": 8,
+            "maxColorAttachments": 8,
+            "maxCombinedClipAndCullDistances": 8,
+            "maxComputeSharedMemorySize": 32768,
+            "maxComputeWorkGroupCount": [
+                65535,
+                65535,
+                65535
+            ],
+            "maxComputeWorkGroupInvocations": 2048,
+            "maxComputeWorkGroupSize": [
+                2048,
+                2048,
+                2048
+            ],
+            "maxCullDistances": 8,
+            "maxDescriptorSetInputAttachments": 9586979,
+            "maxDescriptorSetSampledImages": 9586979,
+            "maxDescriptorSetSamplers": 9586979,
+            "maxDescriptorSetStorageBuffers": 9586979,
+            "maxDescriptorSetStorageBuffersDynamic": 8,
+            "maxDescriptorSetStorageImages": 9586979,
+            "maxDescriptorSetUniformBuffers": 9586979,
+            "maxDescriptorSetUniformBuffersDynamic": 8,
+            "maxDrawIndexedIndexValue": 4294967295,
+            "maxDrawIndirectCount": 4294967295,
+            "maxFragmentCombinedOutputResources": 8,
+            "maxFragmentDualSrcAttachments": 1,
+            "maxFragmentInputComponents": 128,
+            "maxFragmentOutputAttachments": 8,
+            "maxFramebufferHeight": 16384,
+            "maxFramebufferLayers": 1024,
+            "maxFramebufferWidth": 16384,
+            "maxGeometryInputComponents": 64,
+            "maxGeometryOutputComponents": 128,
+            "maxGeometryOutputVertices": 256,
+            "maxGeometryShaderInvocations": 127,
+            "maxGeometryTotalOutputComponents": 1024,
+            "maxImageArrayLayers": 2048,
+            "maxImageDimension1D": 16384,
+            "maxImageDimension2D": 16384,
+            "maxImageDimension3D": 2048,
+            "maxImageDimensionCube": 16384,
+            "maxInterpolationOffset": 2,
+            "maxMemoryAllocationCount": 4294967295,
+            "maxPerStageDescriptorInputAttachments": 9586979,
+            "maxPerStageDescriptorSampledImages": 9586979,
+            "maxPerStageDescriptorSamplers": 9586979,
+            "maxPerStageDescriptorStorageBuffers": 9586979,
+            "maxPerStageDescriptorStorageImages": 9586979,
+            "maxPerStageDescriptorUniformBuffers": 9586979,
+            "maxPerStageResources": 9586979,
+            "maxPushConstantsSize": 128,
+            "maxSampleMaskWords": 1,
+            "maxSamplerAllocationCount": 65536,
+            "maxSamplerAnisotropy": 16,
+            "maxSamplerLodBias": 16,
+            "maxStorageBufferRange": 4294967295,
+            "maxTessellationControlPerPatchOutputComponents": 120,
+            "maxTessellationControlPerVertexInputComponents": 128,
+            "maxTessellationControlPerVertexOutputComponents": 128,
+            "maxTessellationControlTotalOutputComponents": 4096,
+            "maxTessellationEvaluationInputComponents": 128,
+            "maxTessellationEvaluationOutputComponents": 128,
+            "maxTessellationGenerationLevel": 64,
+            "maxTessellationPatchSize": 32,
+            "maxTexelBufferElements": 134217728,
+            "maxTexelGatherOffset": 31,
+            "maxTexelOffset": 31,
+            "maxUniformBufferRange": 4294967295,
+            "maxVertexInputAttributeOffset": 2047,
+            "maxVertexInputAttributes": 32,
+            "maxVertexInputBindingStride": 2048,
+            "maxVertexInputBindings": 32,
+            "maxVertexOutputComponents": 128,
+            "maxViewportDimensions": [
+                16384,
+                16384
+            ],
+            "maxViewports": 16,
+            "minInterpolationOffset": -2,
+            "minMemoryMapAlignment": 4096,
+            "minStorageBufferOffsetAlignment": 4,
+            "minTexelBufferOffsetAlignment": 1,
+            "minTexelGatherOffset": -32,
+            "minTexelOffset": -32,
+            "minUniformBufferOffsetAlignment": 4,
+            "mipmapPrecisionBits": 4,
+            "nonCoherentAtomSize": 64,
+            "optimalBufferCopyOffsetAlignment": 128,
+            "optimalBufferCopyRowPitchAlignment": 128,
+            "pointSizeGranularity": 0.125,
+            "pointSizeRange": [
+                0.125,
+                255.875
+            ],
+            "sampledImageColorSampleCounts": 15,
+            "sampledImageDepthSampleCounts": 15,
+            "sampledImageIntegerSampleCounts": 1,
+            "sampledImageStencilSampleCounts": 15,
+            "sparseAddressSpaceSize": 4294967295,
+            "standardSampleLocations": 1,
+            "storageImageSampleCounts": 1,
+            "strictLines": 0,
+            "subPixelInterpolationOffsetBits": 8,
+            "subPixelPrecisionBits": 4,
+            "subTexelPrecisionBits": 4,
+            "timestampComputeAndGraphics": 1,
+            "timestampPeriod": 40,
+            "viewportBoundsRange": [
+                -32768,
+                32767
+            ],
+            "viewportSubPixelBits": 13
+        },
+        "pipelineCacheUUID": [
+            49,
+            76,
+            7,
+            90,
+            252,
+            1,
+            178,
+            89,
+            64,
+            0,
+            114,
+            97,
+            100,
+            118,
+            0,
+            0
+        ],
+        "sparseProperties": {
+            "residencyAlignedMipSize": 0,
+            "residencyNonResidentStrict": 0,
+            "residencyStandard2DBlockShape": 0,
+            "residencyStandard2DMultisampleBlockShape": 0,
+            "residencyStandard3DBlockShape": 0
+        },
+        "vendorID": 4098
+    },
+    "VkPhysicalDeviceMemoryProperties": {
+        "memoryHeaps": [
+            {
+                "flags": 1,
+                "size": 8305770496
+            },
+            {
+                "flags": 1,
+                "size": 233246720
+            },
+            {
+                "flags": 0,
+                "size": 8588075008
+            }
+        ],
+        "memoryTypes": [
+            {
+                "heapIndex": 0,
+                "propertyFlags": 1
+            },
+            {
+                "heapIndex": 2,
+                "propertyFlags": 6
+            },
+            {
+                "heapIndex": 1,
+                "propertyFlags": 7
+            },
+            {
+                "heapIndex": 2,
+                "propertyFlags": 14
+            }
+        ]
+    },
+    "ArrayOfVkExtensionProperties": [
+        {
+            "extensionName": "VK_KHR_descriptor_update_template",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_KHR_incremental_present",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_KHR_maintenance1",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_KHR_push_descriptor",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_KHR_sampler_mirror_clamp_to_edge",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_KHR_swapchain",
+            "specVersion": 68
+        },
+        {
+            "extensionName": "VK_AMD_draw_indirect_count",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_KHR_shader_draw_parameters",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_KHR_get_memory_requirements2",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_KHR_dedicated_allocation",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_KHR_external_memory",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_KHR_external_memory_fd",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_KHR_storage_buffer_storage_class",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_KHR_variable_pointers",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_KHR_external_semaphore",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_KHR_external_semaphore_fd",
+            "specVersion": 1
+        }
+    ],
+    "ArrayOfVkLayerProperties": [],
+    "ArrayOfVkQueueFamilyProperties": [
+        {
+            "minImageTransferGranularity": {
+                "depth": 1,
+                "height": 1,
+                "width": 1
+            },
+            "queueCount": 1,
+            "queueFlags": 15,
+            "timestampValidBits": 64
+        },
+        {
+            "minImageTransferGranularity": {
+                "depth": 1,
+                "height": 1,
+                "width": 1
+            },
+            "queueCount": 8,
+            "queueFlags": 14,
+            "timestampValidBits": 64
+        }
+    ],
+    "ArrayOfVkFormatProperties": [
+        {
+            "formatID": 1,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 2,
+            "linearTilingFeatures": 56707,
+            "optimalTilingFeatures": 56707,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 3,
+            "linearTilingFeatures": 54275,
+            "optimalTilingFeatures": 54275,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 4,
+            "linearTilingFeatures": 56707,
+            "optimalTilingFeatures": 56707,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 5,
+            "linearTilingFeatures": 56707,
+            "optimalTilingFeatures": 56707,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 6,
+            "linearTilingFeatures": 56707,
+            "optimalTilingFeatures": 56707,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 7,
+            "linearTilingFeatures": 54275,
+            "optimalTilingFeatures": 54275,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 8,
+            "linearTilingFeatures": 56707,
+            "optimalTilingFeatures": 56707,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 9,
+            "linearTilingFeatures": 56707,
+            "optimalTilingFeatures": 56707,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 10,
+            "linearTilingFeatures": 56707,
+            "optimalTilingFeatures": 56707,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 11,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 12,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 13,
+            "linearTilingFeatures": 52355,
+            "optimalTilingFeatures": 52355,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 14,
+            "linearTilingFeatures": 52355,
+            "optimalTilingFeatures": 52355,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 15,
+            "linearTilingFeatures": 56705,
+            "optimalTilingFeatures": 56705,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 16,
+            "linearTilingFeatures": 56707,
+            "optimalTilingFeatures": 56707,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 17,
+            "linearTilingFeatures": 56707,
+            "optimalTilingFeatures": 56707,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 18,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 19,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 20,
+            "linearTilingFeatures": 52355,
+            "optimalTilingFeatures": 52355,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 21,
+            "linearTilingFeatures": 52355,
+            "optimalTilingFeatures": 52355,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 22,
+            "linearTilingFeatures": 51584,
+            "optimalTilingFeatures": 51584,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 23,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 24,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 25,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 26,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 27,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 28,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 29,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 30,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 31,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 32,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 33,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 34,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 35,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 36,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 37,
+            "linearTilingFeatures": 56707,
+            "optimalTilingFeatures": 56707,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 38,
+            "linearTilingFeatures": 56707,
+            "optimalTilingFeatures": 56707,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 39,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 40,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 41,
+            "linearTilingFeatures": 52355,
+            "optimalTilingFeatures": 52355,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 42,
+            "linearTilingFeatures": 52355,
+            "optimalTilingFeatures": 52355,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 43,
+            "linearTilingFeatures": 56705,
+            "optimalTilingFeatures": 56705,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 44,
+            "linearTilingFeatures": 56707,
+            "optimalTilingFeatures": 56707,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 45,
+            "linearTilingFeatures": 56707,
+            "optimalTilingFeatures": 56707,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 46,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 47,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 48,
+            "linearTilingFeatures": 52355,
+            "optimalTilingFeatures": 52355,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 49,
+            "linearTilingFeatures": 52355,
+            "optimalTilingFeatures": 52355,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 50,
+            "linearTilingFeatures": 56705,
+            "optimalTilingFeatures": 56705,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 51,
+            "linearTilingFeatures": 56707,
+            "optimalTilingFeatures": 56707,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 52,
+            "linearTilingFeatures": 56707,
+            "optimalTilingFeatures": 56707,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 53,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 54,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 55,
+            "linearTilingFeatures": 52355,
+            "optimalTilingFeatures": 52355,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 56,
+            "linearTilingFeatures": 52355,
+            "optimalTilingFeatures": 52355,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 57,
+            "linearTilingFeatures": 56705,
+            "optimalTilingFeatures": 56705,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 58,
+            "linearTilingFeatures": 56707,
+            "optimalTilingFeatures": 56707,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 59,
+            "linearTilingFeatures": 51584,
+            "optimalTilingFeatures": 51584,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 60,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 61,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 62,
+            "linearTilingFeatures": 52355,
+            "optimalTilingFeatures": 52355,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 63,
+            "linearTilingFeatures": 52355,
+            "optimalTilingFeatures": 52355,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 64,
+            "linearTilingFeatures": 56707,
+            "optimalTilingFeatures": 56707,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 65,
+            "linearTilingFeatures": 51584,
+            "optimalTilingFeatures": 51584,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 66,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 67,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 68,
+            "linearTilingFeatures": 52355,
+            "optimalTilingFeatures": 52355,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 69,
+            "linearTilingFeatures": 52355,
+            "optimalTilingFeatures": 52355,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 70,
+            "linearTilingFeatures": 56707,
+            "optimalTilingFeatures": 56707,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 71,
+            "linearTilingFeatures": 56707,
+            "optimalTilingFeatures": 56707,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 72,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 73,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 74,
+            "linearTilingFeatures": 52355,
+            "optimalTilingFeatures": 52355,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 75,
+            "linearTilingFeatures": 52355,
+            "optimalTilingFeatures": 52355,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 76,
+            "linearTilingFeatures": 56707,
+            "optimalTilingFeatures": 56707,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 77,
+            "linearTilingFeatures": 56707,
+            "optimalTilingFeatures": 56707,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 78,
+            "linearTilingFeatures": 56707,
+            "optimalTilingFeatures": 56707,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 79,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 80,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 81,
+            "linearTilingFeatures": 52355,
+            "optimalTilingFeatures": 52355,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 82,
+            "linearTilingFeatures": 52355,
+            "optimalTilingFeatures": 52355,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 83,
+            "linearTilingFeatures": 56707,
+            "optimalTilingFeatures": 56707,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 84,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 85,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 86,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 87,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 88,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 89,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 90,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 91,
+            "linearTilingFeatures": 56707,
+            "optimalTilingFeatures": 56707,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 92,
+            "linearTilingFeatures": 56707,
+            "optimalTilingFeatures": 56707,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 93,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 94,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 95,
+            "linearTilingFeatures": 52355,
+            "optimalTilingFeatures": 52355,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 96,
+            "linearTilingFeatures": 52355,
+            "optimalTilingFeatures": 52355,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 97,
+            "linearTilingFeatures": 56707,
+            "optimalTilingFeatures": 56707,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 98,
+            "linearTilingFeatures": 52359,
+            "optimalTilingFeatures": 52359,
+            "bufferFeatures": 120
+        },
+        {
+            "formatID": 99,
+            "linearTilingFeatures": 52359,
+            "optimalTilingFeatures": 52359,
+            "bufferFeatures": 120
+        },
+        {
+            "formatID": 100,
+            "linearTilingFeatures": 56707,
+            "optimalTilingFeatures": 56707,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 101,
+            "linearTilingFeatures": 52355,
+            "optimalTilingFeatures": 52355,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 102,
+            "linearTilingFeatures": 52355,
+            "optimalTilingFeatures": 52355,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 103,
+            "linearTilingFeatures": 56707,
+            "optimalTilingFeatures": 56707,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 104,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 105,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 106,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 107,
+            "linearTilingFeatures": 52355,
+            "optimalTilingFeatures": 52355,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 108,
+            "linearTilingFeatures": 52355,
+            "optimalTilingFeatures": 52355,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 109,
+            "linearTilingFeatures": 56707,
+            "optimalTilingFeatures": 56707,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 110,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 111,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 112,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 113,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 114,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 115,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 116,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 117,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 118,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 119,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 120,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 121,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 122,
+            "linearTilingFeatures": 56707,
+            "optimalTilingFeatures": 56707,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 123,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 124,
+            "linearTilingFeatures": 49154,
+            "optimalTilingFeatures": 52739,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 125,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 126,
+            "linearTilingFeatures": 49154,
+            "optimalTilingFeatures": 52739,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 127,
+            "linearTilingFeatures": 49154,
+            "optimalTilingFeatures": 52739,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 128,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 52737,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 129,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 130,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 52737,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 131,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 132,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 133,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 134,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 135,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 136,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 137,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 138,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 139,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 140,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 141,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 142,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 143,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 144,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 145,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 146,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 147,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 148,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 149,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 150,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 151,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 152,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 153,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 154,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 155,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 156,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 157,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 158,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 159,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 160,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 161,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 162,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 163,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 164,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 165,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 166,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 167,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 168,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 169,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 170,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 171,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 172,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 173,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 174,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 175,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 176,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 177,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 178,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 179,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 180,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 181,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 182,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 183,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 184,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        }
+    ]
+}
\ No newline at end of file
diff --git a/src/third_party/vulkan-validation-layers/src/tests/device_profiles/geforce_1080_ti.json b/src/third_party/vulkan-validation-layers/src/tests/device_profiles/geforce_1080_ti.json
new file mode 100644
index 0000000..86fc56a
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/tests/device_profiles/geforce_1080_ti.json
@@ -0,0 +1,2184 @@
+{
+    "$schema": "https://schema.khronos.org/vulkan/devsim_1_0_0.json#",
+    "comments": {
+        "info": "Modified version of Vulkan Hardware Report generated via https://vulkan.gpuinfo.org",
+        "desc": "Removed subgroup properties from https://vulkan.gpuinfo.org/displayreport.php?id=3292"
+    },
+    "environment": {
+        "architecture": "x86_64",
+        "comment": "",
+        "name": "windows",
+        "reportversion": "1.6",
+        "submitter": "eloj",
+        "version": "10"
+    },
+    "extended": {
+        "devicefeatures2": [
+            {
+                "extension": "VK_KHR_multiview",
+                "name": "multiview",
+                "supported": true
+            },
+            {
+                "extension": "VK_KHR_multiview",
+                "name": "multiviewGeometryShader",
+                "supported": true
+            },
+            {
+                "extension": "VK_KHR_multiview",
+                "name": "multiviewTessellationShader",
+                "supported": true
+            },
+            {
+                "extension": "VK_KHR_variable_pointers",
+                "name": "variablePointersStorageBuffer",
+                "supported": true
+            },
+            {
+                "extension": "VK_KHR_variable_pointers",
+                "name": "variablePointers",
+                "supported": true
+            },
+            {
+                "extension": "VK_KHR_16bit_storage",
+                "name": "storageBuffer16BitAccess",
+                "supported": true
+            },
+            {
+                "extension": "VK_KHR_16bit_storage",
+                "name": "uniformAndStorageBuffer16BitAccess",
+                "supported": true
+            },
+            {
+                "extension": "VK_KHR_16bit_storage",
+                "name": "storagePushConstant16",
+                "supported": true
+            },
+            {
+                "extension": "VK_KHR_16bit_storage",
+                "name": "storageInputOutput16",
+                "supported": false
+            },
+            {
+                "extension": "VK_EXT_blend_operation_advanced",
+                "name": "advancedBlendCoherentOperations",
+                "supported": true
+            },
+            {
+                "extension": "VK_KHR_sampler_ycbcr_conversion",
+                "name": "samplerYcbcrConversion",
+                "supported": true
+            },
+            {
+                "extension": "VK_KHR_shader_draw_parameters",
+                "name": "shaderDrawParameters",
+                "supported": true
+            }
+        ],
+        "deviceproperties2": [
+            {
+                "extension": "VK_KHR_multiview",
+                "name": "maxMultiviewViewCount",
+                "value": "32"
+            },
+            {
+                "extension": "VK_KHR_multiview",
+                "name": "maxMultiviewInstanceIndex",
+                "value": "134217727"
+            },
+            {
+                "extension": "VK_KHR_push_descriptor",
+                "name": "maxPushDescriptors",
+                "value": "32"
+            },
+            {
+                "extension": "VK_EXT_discard_rectangles",
+                "name": "maxDiscardRectangles",
+                "value": "8"
+            },
+            {
+                "extension": "VK_NVX_multiview_per_view_attributes",
+                "name": "perViewPositionAllComponents",
+                "value": "false"
+            },
+            {
+                "extension": "VK_EXT_conservative_rasterization",
+                "name": "primitiveOverestimationSize",
+                "value": "0"
+            },
+            {
+                "extension": "VK_EXT_conservative_rasterization",
+                "name": "maxExtraPrimitiveOverestimationSize",
+                "value": "0.75"
+            },
+            {
+                "extension": "VK_EXT_conservative_rasterization",
+                "name": "extraPrimitiveOverestimationSizeGranularity",
+                "value": "0.25"
+            },
+            {
+                "extension": "VK_EXT_conservative_rasterization",
+                "name": "primitiveUnderestimation",
+                "value": "false"
+            },
+            {
+                "extension": "VK_EXT_conservative_rasterization",
+                "name": "conservativePointAndLineRasterization",
+                "value": "true"
+            },
+            {
+                "extension": "VK_EXT_conservative_rasterization",
+                "name": "degenerateTrianglesRasterized",
+                "value": "true"
+            },
+            {
+                "extension": "VK_EXT_conservative_rasterization",
+                "name": "degenerateLinesRasterized",
+                "value": "false"
+            },
+            {
+                "extension": "VK_EXT_conservative_rasterization",
+                "name": "fullyCoveredFragmentShaderInputVariable",
+                "value": "false"
+            },
+            {
+                "extension": "VK_EXT_conservative_rasterization",
+                "name": "conservativeRasterizationPostDepthCoverage",
+                "value": "true"
+            },
+            {
+                "extension": "VK_EXT_sampler_filter_minmax",
+                "name": "filterMinmaxSingleComponentFormats",
+                "value": "true"
+            },
+            {
+                "extension": "VK_EXT_sampler_filter_minmax",
+                "name": "filterMinmaxImageComponentMapping",
+                "value": "true"
+            },
+            {
+                "extension": "VK_EXT_sample_locations",
+                "name": "sampleLocationSampleCounts",
+                "value": "31"
+            },
+            {
+                "extension": "VK_EXT_sample_locations",
+                "name": "maxSampleLocationGridSize.width",
+                "value": "1"
+            },
+            {
+                "extension": "VK_EXT_sample_locations",
+                "name": "maxSampleLocationGridSize.height",
+                "value": "1"
+            },
+            {
+                "extension": "VK_EXT_sample_locations",
+                "name": "sampleLocationCoordinateRange[0]",
+                "value": "0"
+            },
+            {
+                "extension": "VK_EXT_sample_locations",
+                "name": "sampleLocationCoordinateRange[1]",
+                "value": "0.9375"
+            },
+            {
+                "extension": "VK_EXT_sample_locations",
+                "name": "sampleLocationSubPixelBits",
+                "value": "4"
+            },
+            {
+                "extension": "VK_EXT_sample_locations",
+                "name": "variableSampleLocations",
+                "value": "true"
+            },
+            {
+                "extension": "VK_EXT_blend_operation_advanced",
+                "name": "advancedBlendMaxColorAttachments",
+                "value": "8"
+            },
+            {
+                "extension": "VK_EXT_blend_operation_advanced",
+                "name": "advancedBlendIndependentBlend",
+                "value": "false"
+            },
+            {
+                "extension": "VK_EXT_blend_operation_advanced",
+                "name": "advancedBlendNonPremultipliedSrcColor",
+                "value": "true"
+            },
+            {
+                "extension": "VK_EXT_blend_operation_advanced",
+                "name": "advancedBlendNonPremultipliedDstColor",
+                "value": "true"
+            },
+            {
+                "extension": "VK_EXT_blend_operation_advanced",
+                "name": "advancedBlendCorrelatedOverlap",
+                "value": "true"
+            },
+            {
+                "extension": "VK_EXT_blend_operation_advanced",
+                "name": "advancedBlendAllOperations",
+                "value": "true"
+            },
+            {
+                "extension": "VK_KHR_sampler_ycbcr_conversion",
+                "name": "combinedImageSamplerDescriptorCount",
+                "value": "0"
+            },
+            {
+                "extension": "VK_KHR_maintenance3",
+                "name": "maxPerSetDescriptors",
+                "value": "4294967295"
+            },
+            {
+                "extension": "VK_KHR_maintenance3",
+                "name": "maxMemoryAllocationSize",
+                "value": "4292870144"
+            }
+        ]
+    },
+    "instance": {
+        "extensions": [
+            {
+                "extensionName": "VK_EXT_debug_report",
+                "specVersion": 9
+            },
+            {
+                "extensionName": "VK_EXT_display_surface_counter",
+                "specVersion": 1
+            },
+            {
+                "extensionName": "VK_KHR_get_physical_device_properties2",
+                "specVersion": 1
+            },
+            {
+                "extensionName": "VK_KHR_get_surface_capabilities2",
+                "specVersion": 1
+            },
+            {
+                "extensionName": "VK_KHR_surface",
+                "specVersion": 25
+            },
+            {
+                "extensionName": "VK_KHR_win32_surface",
+                "specVersion": 6
+            },
+            {
+                "extensionName": "VK_KHR_device_group_creation",
+                "specVersion": 1
+            },
+            {
+                "extensionName": "VK_KHR_external_fence_capabilities",
+                "specVersion": 1
+            },
+            {
+                "extensionName": "VK_KHR_external_memory_capabilities",
+                "specVersion": 1
+            },
+            {
+                "extensionName": "VK_KHR_external_semaphore_capabilities",
+                "specVersion": 1
+            },
+            {
+                "extensionName": "VK_NV_external_memory_capabilities",
+                "specVersion": 1
+            },
+            {
+                "extensionName": "VK_EXT_debug_utils",
+                "specVersion": 1
+            }
+        ],
+        "layers": [
+            {
+                "description": "NVIDIA Optimus layer",
+                "extensions": [],
+                "implementationVersion": 1,
+                "layerName": "VK_LAYER_NV_optimus",
+                "specVersion": 4198470
+            },
+            {
+                "description": "Steam Overlay Layer",
+                "extensions": [],
+                "implementationVersion": 1,
+                "layerName": "VK_LAYER_VALVE_steam_overlay",
+                "specVersion": 4194307
+            },
+            {
+                "description": "LunarG Standard Validation Layer",
+                "extensions": [],
+                "implementationVersion": 1,
+                "layerName": "VK_LAYER_LUNARG_standard_validation",
+                "specVersion": 4194377
+            }
+        ]
+    },
+    "platformdetails": [],
+    "surfacecapabilites": {
+        "maxImageArrayLayers": 1,
+        "maxImageCount": 8,
+        "maxImageExtent": {
+            "height": 755,
+            "width": 927
+        },
+        "minImageCount": 2,
+        "minImageExtent": {
+            "height": 755,
+            "width": 927
+        },
+        "presentmodes": [
+            2,
+            3,
+            1
+        ],
+        "supportedCompositeAlpha": 1,
+        "supportedTransforms": 1,
+        "supportedUsageFlags": 159,
+        "surfaceExtension": "VK_KHR_win32_surface",
+        "surfaceformats": [
+            {
+                "colorSpace": 0,
+                "format": 44
+            },
+            {
+                "colorSpace": 0,
+                "format": 50
+            }
+        ],
+        "validSurface": true
+    },
+    "VkPhysicalDeviceFeatures": {
+        "alphaToOne": 1,
+        "depthBiasClamp": 1,
+        "depthBounds": 1,
+        "depthClamp": 1,
+        "drawIndirectFirstInstance": 1,
+        "dualSrcBlend": 1,
+        "fillModeNonSolid": 1,
+        "fragmentStoresAndAtomics": 1,
+        "fullDrawIndexUint32": 1,
+        "geometryShader": 1,
+        "imageCubeArray": 1,
+        "independentBlend": 1,
+        "inheritedQueries": 1,
+        "largePoints": 1,
+        "logicOp": 1,
+        "multiDrawIndirect": 1,
+        "multiViewport": 1,
+        "occlusionQueryPrecise": 1,
+        "pipelineStatisticsQuery": 1,
+        "robustBufferAccess": 1,
+        "sampleRateShading": 1,
+        "samplerAnisotropy": 1,
+        "shaderClipDistance": 1,
+        "shaderCullDistance": 1,
+        "shaderFloat64": 1,
+        "shaderImageGatherExtended": 1,
+        "shaderInt16": 1,
+        "shaderInt64": 1,
+        "shaderResourceMinLod": 1,
+        "shaderResourceResidency": 1,
+        "shaderSampledImageArrayDynamicIndexing": 1,
+        "shaderStorageBufferArrayDynamicIndexing": 1,
+        "shaderStorageImageArrayDynamicIndexing": 1,
+        "shaderStorageImageExtendedFormats": 1,
+        "shaderStorageImageMultisample": 1,
+        "shaderStorageImageReadWithoutFormat": 1,
+        "shaderStorageImageWriteWithoutFormat": 1,
+        "shaderTessellationAndGeometryPointSize": 1,
+        "shaderUniformBufferArrayDynamicIndexing": 1,
+        "sparseBinding": 1,
+        "sparseResidency16Samples": 1,
+        "sparseResidency2Samples": 1,
+        "sparseResidency4Samples": 1,
+        "sparseResidency8Samples": 1,
+        "sparseResidencyAliased": 1,
+        "sparseResidencyBuffer": 1,
+        "sparseResidencyImage2D": 1,
+        "sparseResidencyImage3D": 1,
+        "tessellationShader": 1,
+        "textureCompressionASTC_LDR": 0,
+        "textureCompressionBC": 1,
+        "textureCompressionETC2": 0,
+        "variableMultisampleRate": 1,
+        "vertexPipelineStoresAndAtomics": 1,
+        "wideLines": 1
+    },
+    "VkPhysicalDeviceProperties": {
+        "apiVersion": 4198470,
+        "deviceID": 6918,
+        "deviceName": "GeForce GTX 1080 Ti",
+        "deviceType": 2,
+        "driverVersion": 1666662400,
+        "limits": {
+            "bufferImageGranularity": 1024,
+            "discreteQueuePriorities": 2,
+            "framebufferColorSampleCounts": 15,
+            "framebufferDepthSampleCounts": 15,
+            "framebufferNoAttachmentsSampleCounts": 31,
+            "framebufferStencilSampleCounts": 31,
+            "lineWidthGranularity": 0.125,
+            "lineWidthRange": [
+                0.5,
+                10
+            ],
+            "maxBoundDescriptorSets": 8,
+            "maxClipDistances": 8,
+            "maxColorAttachments": 8,
+            "maxCombinedClipAndCullDistances": 8,
+            "maxComputeSharedMemorySize": 49152,
+            "maxComputeWorkGroupCount": [
+                2147483647,
+                65535,
+                65535
+            ],
+            "maxComputeWorkGroupInvocations": 1536,
+            "maxComputeWorkGroupSize": [
+                1536,
+                1024,
+                64
+            ],
+            "maxCullDistances": 8,
+            "maxDescriptorSetInputAttachments": 1048576,
+            "maxDescriptorSetSampledImages": 1048576,
+            "maxDescriptorSetSamplers": 1048576,
+            "maxDescriptorSetStorageBuffers": 1048576,
+            "maxDescriptorSetStorageBuffersDynamic": 16,
+            "maxDescriptorSetStorageImages": 1048576,
+            "maxDescriptorSetUniformBuffers": 90,
+            "maxDescriptorSetUniformBuffersDynamic": 15,
+            "maxDrawIndexedIndexValue": 4294967295,
+            "maxDrawIndirectCount": 4294967295,
+            "maxFragmentCombinedOutputResources": 16,
+            "maxFragmentDualSrcAttachments": 1,
+            "maxFragmentInputComponents": 128,
+            "maxFragmentOutputAttachments": 8,
+            "maxFramebufferHeight": 32768,
+            "maxFramebufferLayers": 2048,
+            "maxFramebufferWidth": 32768,
+            "maxGeometryInputComponents": 128,
+            "maxGeometryOutputComponents": 128,
+            "maxGeometryOutputVertices": 1024,
+            "maxGeometryShaderInvocations": 32,
+            "maxGeometryTotalOutputComponents": 1024,
+            "maxImageArrayLayers": 2048,
+            "maxImageDimension1D": 32768,
+            "maxImageDimension2D": 32768,
+            "maxImageDimension3D": 16384,
+            "maxImageDimensionCube": 32768,
+            "maxInterpolationOffset": 0.4375,
+            "maxMemoryAllocationCount": 4096,
+            "maxPerStageDescriptorInputAttachments": 1048576,
+            "maxPerStageDescriptorSampledImages": 1048576,
+            "maxPerStageDescriptorSamplers": 1048576,
+            "maxPerStageDescriptorStorageBuffers": 1048576,
+            "maxPerStageDescriptorStorageImages": 1048576,
+            "maxPerStageDescriptorUniformBuffers": 15,
+            "maxPerStageResources": 4294967295,
+            "maxPushConstantsSize": 256,
+            "maxSampleMaskWords": 1,
+            "maxSamplerAllocationCount": 4000,
+            "maxSamplerAnisotropy": 16,
+            "maxSamplerLodBias": 15,
+            "maxStorageBufferRange": 4294967295,
+            "maxTessellationControlPerPatchOutputComponents": 120,
+            "maxTessellationControlPerVertexInputComponents": 128,
+            "maxTessellationControlPerVertexOutputComponents": 128,
+            "maxTessellationControlTotalOutputComponents": 4216,
+            "maxTessellationEvaluationInputComponents": 128,
+            "maxTessellationEvaluationOutputComponents": 128,
+            "maxTessellationGenerationLevel": 64,
+            "maxTessellationPatchSize": 32,
+            "maxTexelBufferElements": 134217728,
+            "maxTexelGatherOffset": 31,
+            "maxTexelOffset": 7,
+            "maxUniformBufferRange": 65536,
+            "maxVertexInputAttributeOffset": 2047,
+            "maxVertexInputAttributes": 32,
+            "maxVertexInputBindingStride": 2048,
+            "maxVertexInputBindings": 32,
+            "maxVertexOutputComponents": 128,
+            "maxViewportDimensions": [
+                32768,
+                32768
+            ],
+            "maxViewports": 16,
+            "minInterpolationOffset": -0.5,
+            "minMemoryMapAlignment": 64,
+            "minStorageBufferOffsetAlignment": 32,
+            "minTexelBufferOffsetAlignment": 16,
+            "minTexelGatherOffset": -32,
+            "minTexelOffset": -8,
+            "minUniformBufferOffsetAlignment": 256,
+            "mipmapPrecisionBits": 8,
+            "nonCoherentAtomSize": 64,
+            "optimalBufferCopyOffsetAlignment": 1,
+            "optimalBufferCopyRowPitchAlignment": 1,
+            "pointSizeGranularity": 0.125,
+            "pointSizeRange": [
+                1,
+                189.875
+            ],
+            "sampledImageColorSampleCounts": 15,
+            "sampledImageDepthSampleCounts": 15,
+            "sampledImageIntegerSampleCounts": 15,
+            "sampledImageStencilSampleCounts": 31,
+            "sparseAddressSpaceSize": -1,
+            "standardSampleLocations": 1,
+            "storageImageSampleCounts": 15,
+            "strictLines": 1,
+            "subPixelInterpolationOffsetBits": 4,
+            "subPixelPrecisionBits": 8,
+            "subTexelPrecisionBits": 8,
+            "timestampComputeAndGraphics": 1,
+            "timestampPeriod": 1,
+            "viewportBoundsRange": [
+                -65536,
+                65536
+            ],
+            "viewportSubPixelBits": 8
+        },
+        "pipelineCacheUUID": [
+            120,
+            132,
+            235,
+            50,
+            24,
+            166,
+            79,
+            35,
+            161,
+            144,
+            218,
+            111,
+            217,
+            132,
+            252,
+            96
+        ],
+        "sparseProperties": {
+            "residencyAlignedMipSize": 0,
+            "residencyNonResidentStrict": 1,
+            "residencyStandard2DBlockShape": 1,
+            "residencyStandard2DMultisampleBlockShape": 1,
+            "residencyStandard3DBlockShape": 1
+        },
+        "vendorID": 4318
+    },
+    "VkPhysicalDeviceMemoryProperties": {
+        "memoryHeaps": [
+            {
+                "flags": 1,
+                "size": 11667505152
+            },
+            {
+                "flags": 0,
+                "size": 68656562176
+            }
+        ],
+        "memoryTypes": [
+            {
+                "heapIndex": 1,
+                "propertyFlags": 0
+            },
+            {
+                "heapIndex": 1,
+                "propertyFlags": 0
+            },
+            {
+                "heapIndex": 1,
+                "propertyFlags": 0
+            },
+            {
+                "heapIndex": 1,
+                "propertyFlags": 0
+            },
+            {
+                "heapIndex": 1,
+                "propertyFlags": 0
+            },
+            {
+                "heapIndex": 1,
+                "propertyFlags": 0
+            },
+            {
+                "heapIndex": 1,
+                "propertyFlags": 0
+            },
+            {
+                "heapIndex": 0,
+                "propertyFlags": 1
+            },
+            {
+                "heapIndex": 0,
+                "propertyFlags": 1
+            },
+            {
+                "heapIndex": 1,
+                "propertyFlags": 6
+            },
+            {
+                "heapIndex": 1,
+                "propertyFlags": 14
+            }
+        ]
+    },
+    "ArrayOfVkExtensionProperties": [
+        {
+            "extensionName": "VK_KHR_swapchain",
+            "specVersion": 70
+        },
+        {
+            "extensionName": "VK_KHR_16bit_storage",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_KHR_bind_memory2",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_KHR_dedicated_allocation",
+            "specVersion": 3
+        },
+        {
+            "extensionName": "VK_KHR_descriptor_update_template",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_KHR_device_group",
+            "specVersion": 3
+        },
+        {
+            "extensionName": "VK_KHR_get_memory_requirements2",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_KHR_image_format_list",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_KHR_maintenance1",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_KHR_maintenance2",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_KHR_maintenance3",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_KHR_multiview",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_KHR_push_descriptor",
+            "specVersion": 2
+        },
+        {
+            "extensionName": "VK_KHR_relaxed_block_layout",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_KHR_sampler_mirror_clamp_to_edge",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_KHR_sampler_ycbcr_conversion",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_KHR_shader_draw_parameters",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_KHR_storage_buffer_storage_class",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_KHR_external_memory",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_KHR_external_memory_win32",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_KHR_external_semaphore",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_KHR_external_semaphore_win32",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_KHR_win32_keyed_mutex",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_KHR_external_fence",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_KHR_external_fence_win32",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_KHR_variable_pointers",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_KHX_device_group",
+            "specVersion": 2
+        },
+        {
+            "extensionName": "VK_KHX_multiview",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_EXT_blend_operation_advanced",
+            "specVersion": 2
+        },
+        {
+            "extensionName": "VK_EXT_conservative_rasterization",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_EXT_depth_range_unrestricted",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_EXT_discard_rectangles",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_EXT_post_depth_coverage",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_EXT_sample_locations",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_EXT_sampler_filter_minmax",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_EXT_shader_subgroup_ballot",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_EXT_shader_subgroup_vote",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_EXT_shader_viewport_index_layer",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_NV_dedicated_allocation",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_NV_external_memory",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_NV_external_memory_win32",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_NV_fill_rectangle",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_NV_fragment_coverage_to_color",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_NV_framebuffer_mixed_samples",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_NV_glsl_shader",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_NV_win32_keyed_mutex",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_NV_clip_space_w_scaling",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_NV_sample_mask_override_coverage",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_NV_viewport_array2",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_NV_viewport_swizzle",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_NV_geometry_shader_passthrough",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_NVX_device_generated_commands",
+            "specVersion": 3
+        },
+        {
+            "extensionName": "VK_NVX_multiview_per_view_attributes",
+            "specVersion": 1
+        }
+    ],
+    "ArrayOfVkLayerProperties": [
+        {
+            "description": "NVIDIA Optimus layer",
+            "implementationVersion": 1,
+            "layerName": "VK_LAYER_NV_optimus",
+            "specVersion": 4198470
+        }
+    ],
+    "ArrayOfVkQueueFamilyProperties": [
+        {
+            "minImageTransferGranularity": {
+                "depth": 1,
+                "height": 1,
+                "width": 1
+            },
+            "queueCount": 16,
+            "queueFlags": 15,
+            "timestampValidBits": 64
+        },
+        {
+            "minImageTransferGranularity": {
+                "depth": 1,
+                "height": 1,
+                "width": 1
+            },
+            "queueCount": 1,
+            "queueFlags": 4,
+            "timestampValidBits": 64
+        },
+        {
+            "minImageTransferGranularity": {
+                "depth": 1,
+                "height": 1,
+                "width": 1
+            },
+            "queueCount": 8,
+            "queueFlags": 2,
+            "timestampValidBits": 64
+        }
+    ],
+    "ArrayOfVkFormatProperties": [
+        {
+            "formatID": 1,
+            "linearTilingFeatures": 119809,
+            "optimalTilingFeatures": 119809,
+            "bufferFeatures": 8
+        },
+        {
+            "formatID": 2,
+            "linearTilingFeatures": 119809,
+            "optimalTilingFeatures": 119809,
+            "bufferFeatures": 8
+        },
+        {
+            "formatID": 3,
+            "linearTilingFeatures": 119809,
+            "optimalTilingFeatures": 119809,
+            "bufferFeatures": 8
+        },
+        {
+            "formatID": 4,
+            "linearTilingFeatures": 119809,
+            "optimalTilingFeatures": 122241,
+            "bufferFeatures": 8
+        },
+        {
+            "formatID": 5,
+            "linearTilingFeatures": 119809,
+            "optimalTilingFeatures": 119809,
+            "bufferFeatures": 8
+        },
+        {
+            "formatID": 6,
+            "linearTilingFeatures": 119809,
+            "optimalTilingFeatures": 119809,
+            "bufferFeatures": 8
+        },
+        {
+            "formatID": 7,
+            "linearTilingFeatures": 119809,
+            "optimalTilingFeatures": 119809,
+            "bufferFeatures": 8
+        },
+        {
+            "formatID": 8,
+            "linearTilingFeatures": 119809,
+            "optimalTilingFeatures": 122241,
+            "bufferFeatures": 8
+        },
+        {
+            "formatID": 9,
+            "linearTilingFeatures": 119809,
+            "optimalTilingFeatures": 122243,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 10,
+            "linearTilingFeatures": 119809,
+            "optimalTilingFeatures": 122243,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 11,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 12,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 13,
+            "linearTilingFeatures": 115713,
+            "optimalTilingFeatures": 117891,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 14,
+            "linearTilingFeatures": 115713,
+            "optimalTilingFeatures": 117891,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 15,
+            "linearTilingFeatures": 119809,
+            "optimalTilingFeatures": 119809,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 16,
+            "linearTilingFeatures": 119809,
+            "optimalTilingFeatures": 122243,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 17,
+            "linearTilingFeatures": 119809,
+            "optimalTilingFeatures": 122243,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 18,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 19,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 20,
+            "linearTilingFeatures": 115713,
+            "optimalTilingFeatures": 117891,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 21,
+            "linearTilingFeatures": 115713,
+            "optimalTilingFeatures": 117891,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 22,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 23,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 24,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 25,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 26,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 27,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 28,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 29,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 30,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 31,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 32,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 33,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 34,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 35,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 36,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 37,
+            "linearTilingFeatures": 119809,
+            "optimalTilingFeatures": 122243,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 38,
+            "linearTilingFeatures": 119809,
+            "optimalTilingFeatures": 122243,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 39,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 40,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 41,
+            "linearTilingFeatures": 115713,
+            "optimalTilingFeatures": 117891,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 42,
+            "linearTilingFeatures": 115713,
+            "optimalTilingFeatures": 117891,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 43,
+            "linearTilingFeatures": 119809,
+            "optimalTilingFeatures": 122241,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 44,
+            "linearTilingFeatures": 119809,
+            "optimalTilingFeatures": 122243,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 45,
+            "linearTilingFeatures": 119809,
+            "optimalTilingFeatures": 119809,
+            "bufferFeatures": 72
+        },
+        {
+            "formatID": 46,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 47,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 48,
+            "linearTilingFeatures": 115713,
+            "optimalTilingFeatures": 115713,
+            "bufferFeatures": 72
+        },
+        {
+            "formatID": 49,
+            "linearTilingFeatures": 115713,
+            "optimalTilingFeatures": 115713,
+            "bufferFeatures": 72
+        },
+        {
+            "formatID": 50,
+            "linearTilingFeatures": 119809,
+            "optimalTilingFeatures": 122241,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 51,
+            "linearTilingFeatures": 119809,
+            "optimalTilingFeatures": 122243,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 52,
+            "linearTilingFeatures": 119809,
+            "optimalTilingFeatures": 122243,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 53,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 54,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 55,
+            "linearTilingFeatures": 115713,
+            "optimalTilingFeatures": 117891,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 56,
+            "linearTilingFeatures": 115713,
+            "optimalTilingFeatures": 117891,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 57,
+            "linearTilingFeatures": 119809,
+            "optimalTilingFeatures": 122241,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 58,
+            "linearTilingFeatures": 119809,
+            "optimalTilingFeatures": 122241,
+            "bufferFeatures": 72
+        },
+        {
+            "formatID": 59,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 60,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 61,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 62,
+            "linearTilingFeatures": 115713,
+            "optimalTilingFeatures": 115713,
+            "bufferFeatures": 72
+        },
+        {
+            "formatID": 63,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 64,
+            "linearTilingFeatures": 119809,
+            "optimalTilingFeatures": 122243,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 65,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 66,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 67,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 68,
+            "linearTilingFeatures": 115713,
+            "optimalTilingFeatures": 117891,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 69,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 70,
+            "linearTilingFeatures": 119809,
+            "optimalTilingFeatures": 122243,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 71,
+            "linearTilingFeatures": 119809,
+            "optimalTilingFeatures": 122243,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 72,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 73,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 74,
+            "linearTilingFeatures": 115713,
+            "optimalTilingFeatures": 117891,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 75,
+            "linearTilingFeatures": 115713,
+            "optimalTilingFeatures": 117891,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 76,
+            "linearTilingFeatures": 119809,
+            "optimalTilingFeatures": 122243,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 77,
+            "linearTilingFeatures": 119809,
+            "optimalTilingFeatures": 122243,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 78,
+            "linearTilingFeatures": 119809,
+            "optimalTilingFeatures": 122243,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 79,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 80,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 81,
+            "linearTilingFeatures": 115713,
+            "optimalTilingFeatures": 117891,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 82,
+            "linearTilingFeatures": 115713,
+            "optimalTilingFeatures": 117891,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 83,
+            "linearTilingFeatures": 119809,
+            "optimalTilingFeatures": 122243,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 84,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 85,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 86,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 87,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 88,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 89,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 90,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 91,
+            "linearTilingFeatures": 119809,
+            "optimalTilingFeatures": 122243,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 92,
+            "linearTilingFeatures": 119809,
+            "optimalTilingFeatures": 122243,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 93,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 94,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 95,
+            "linearTilingFeatures": 115713,
+            "optimalTilingFeatures": 117891,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 96,
+            "linearTilingFeatures": 115713,
+            "optimalTilingFeatures": 117891,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 97,
+            "linearTilingFeatures": 119809,
+            "optimalTilingFeatures": 122243,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 98,
+            "linearTilingFeatures": 115713,
+            "optimalTilingFeatures": 117895,
+            "bufferFeatures": 120
+        },
+        {
+            "formatID": 99,
+            "linearTilingFeatures": 115713,
+            "optimalTilingFeatures": 117895,
+            "bufferFeatures": 120
+        },
+        {
+            "formatID": 100,
+            "linearTilingFeatures": 119809,
+            "optimalTilingFeatures": 122247,
+            "bufferFeatures": 120
+        },
+        {
+            "formatID": 101,
+            "linearTilingFeatures": 115713,
+            "optimalTilingFeatures": 117891,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 102,
+            "linearTilingFeatures": 115713,
+            "optimalTilingFeatures": 117891,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 103,
+            "linearTilingFeatures": 119809,
+            "optimalTilingFeatures": 122243,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 104,
+            "linearTilingFeatures": 115713,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 72
+        },
+        {
+            "formatID": 105,
+            "linearTilingFeatures": 115713,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 72
+        },
+        {
+            "formatID": 106,
+            "linearTilingFeatures": 119809,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 72
+        },
+        {
+            "formatID": 107,
+            "linearTilingFeatures": 115713,
+            "optimalTilingFeatures": 117891,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 108,
+            "linearTilingFeatures": 115713,
+            "optimalTilingFeatures": 117891,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 109,
+            "linearTilingFeatures": 119809,
+            "optimalTilingFeatures": 122243,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 110,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 111,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 112,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 113,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 114,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 115,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 116,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 117,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 118,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 119,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 120,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 121,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 122,
+            "linearTilingFeatures": 119809,
+            "optimalTilingFeatures": 122243,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 123,
+            "linearTilingFeatures": 119809,
+            "optimalTilingFeatures": 119809,
+            "bufferFeatures": 8
+        },
+        {
+            "formatID": 124,
+            "linearTilingFeatures": 119809,
+            "optimalTilingFeatures": 120321,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 125,
+            "linearTilingFeatures": 119809,
+            "optimalTilingFeatures": 120321,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 126,
+            "linearTilingFeatures": 119809,
+            "optimalTilingFeatures": 120321,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 127,
+            "linearTilingFeatures": 115713,
+            "optimalTilingFeatures": 116225,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 128,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 129,
+            "linearTilingFeatures": 119809,
+            "optimalTilingFeatures": 120321,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 130,
+            "linearTilingFeatures": 119809,
+            "optimalTilingFeatures": 120321,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 131,
+            "linearTilingFeatures": 119809,
+            "optimalTilingFeatures": 119809,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 132,
+            "linearTilingFeatures": 119809,
+            "optimalTilingFeatures": 119809,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 133,
+            "linearTilingFeatures": 119809,
+            "optimalTilingFeatures": 119809,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 134,
+            "linearTilingFeatures": 119809,
+            "optimalTilingFeatures": 119809,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 135,
+            "linearTilingFeatures": 119809,
+            "optimalTilingFeatures": 119809,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 136,
+            "linearTilingFeatures": 119809,
+            "optimalTilingFeatures": 119809,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 137,
+            "linearTilingFeatures": 119809,
+            "optimalTilingFeatures": 119809,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 138,
+            "linearTilingFeatures": 119809,
+            "optimalTilingFeatures": 119809,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 139,
+            "linearTilingFeatures": 119809,
+            "optimalTilingFeatures": 119809,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 140,
+            "linearTilingFeatures": 119809,
+            "optimalTilingFeatures": 119809,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 141,
+            "linearTilingFeatures": 119809,
+            "optimalTilingFeatures": 119809,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 142,
+            "linearTilingFeatures": 119809,
+            "optimalTilingFeatures": 119809,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 143,
+            "linearTilingFeatures": 119809,
+            "optimalTilingFeatures": 119809,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 144,
+            "linearTilingFeatures": 119809,
+            "optimalTilingFeatures": 119809,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 145,
+            "linearTilingFeatures": 119809,
+            "optimalTilingFeatures": 119809,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 146,
+            "linearTilingFeatures": 119809,
+            "optimalTilingFeatures": 119809,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 147,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 148,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 149,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 150,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 151,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 152,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 153,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 154,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 155,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 156,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 157,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 158,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 159,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 160,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 161,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 162,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 163,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 164,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 165,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 166,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 167,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 168,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 169,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 170,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 171,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 172,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 173,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 174,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 175,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 176,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 177,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 178,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 179,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 180,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 181,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 182,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 183,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 184,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 1000156000,
+            "linearTilingFeatures": 9424897,
+            "optimalTilingFeatures": 9424897,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 1000156001,
+            "linearTilingFeatures": 9424897,
+            "optimalTilingFeatures": 9424897,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 1000156002,
+            "linearTilingFeatures": 13619201,
+            "optimalTilingFeatures": 13619201,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 1000156003,
+            "linearTilingFeatures": 13619201,
+            "optimalTilingFeatures": 13619201,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 1000156004,
+            "linearTilingFeatures": 13619201,
+            "optimalTilingFeatures": 13619201,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 1000156005,
+            "linearTilingFeatures": 13619201,
+            "optimalTilingFeatures": 13619201,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 1000156006,
+            "linearTilingFeatures": 5230593,
+            "optimalTilingFeatures": 5230593,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 1000156007,
+            "linearTilingFeatures": 1036289,
+            "optimalTilingFeatures": 1036289,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 1000156008,
+            "linearTilingFeatures": 1036289,
+            "optimalTilingFeatures": 1036289,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 1000156009,
+            "linearTilingFeatures": 1036289,
+            "optimalTilingFeatures": 1036289,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 1000156010,
+            "linearTilingFeatures": 9424897,
+            "optimalTilingFeatures": 9424897,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 1000156011,
+            "linearTilingFeatures": 9424897,
+            "optimalTilingFeatures": 9424897,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 1000156012,
+            "linearTilingFeatures": 13619201,
+            "optimalTilingFeatures": 13619201,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 1000156013,
+            "linearTilingFeatures": 13619201,
+            "optimalTilingFeatures": 13619201,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 1000156014,
+            "linearTilingFeatures": 13619201,
+            "optimalTilingFeatures": 13619201,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 1000156015,
+            "linearTilingFeatures": 13619201,
+            "optimalTilingFeatures": 13619201,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 1000156016,
+            "linearTilingFeatures": 5230593,
+            "optimalTilingFeatures": 5230593,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 1000156017,
+            "linearTilingFeatures": 1036289,
+            "optimalTilingFeatures": 1036289,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 1000156018,
+            "linearTilingFeatures": 1036289,
+            "optimalTilingFeatures": 1036289,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 1000156019,
+            "linearTilingFeatures": 1036289,
+            "optimalTilingFeatures": 1036289,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 1000156020,
+            "linearTilingFeatures": 9424897,
+            "optimalTilingFeatures": 9424897,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 1000156021,
+            "linearTilingFeatures": 9424897,
+            "optimalTilingFeatures": 9424897,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 1000156022,
+            "linearTilingFeatures": 13619201,
+            "optimalTilingFeatures": 13619201,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 1000156023,
+            "linearTilingFeatures": 13619201,
+            "optimalTilingFeatures": 13619201,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 1000156024,
+            "linearTilingFeatures": 13619201,
+            "optimalTilingFeatures": 13619201,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 1000156025,
+            "linearTilingFeatures": 13619201,
+            "optimalTilingFeatures": 13619201,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 1000156026,
+            "linearTilingFeatures": 5230593,
+            "optimalTilingFeatures": 5230593,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 1000156027,
+            "linearTilingFeatures": 9424897,
+            "optimalTilingFeatures": 9424897,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 1000156028,
+            "linearTilingFeatures": 9424897,
+            "optimalTilingFeatures": 9424897,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 1000156029,
+            "linearTilingFeatures": 13619201,
+            "optimalTilingFeatures": 13619201,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 1000156030,
+            "linearTilingFeatures": 13619201,
+            "optimalTilingFeatures": 13619201,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 1000156031,
+            "linearTilingFeatures": 13619201,
+            "optimalTilingFeatures": 13619201,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 1000156032,
+            "linearTilingFeatures": 13619201,
+            "optimalTilingFeatures": 13619201,
+            "bufferFeatures": 0
+        }
+    ]
+}
\ No newline at end of file
diff --git a/src/third_party/vulkan-validation-layers/src/tests/device_profiles/geforce_940m.json b/src/third_party/vulkan-validation-layers/src/tests/device_profiles/geforce_940m.json
new file mode 100644
index 0000000..201839e
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/tests/device_profiles/geforce_940m.json
@@ -0,0 +1,1687 @@
+{
+    "$schema": "https://schema.khronos.org/vulkan/devsim_1_0_0.json#",
+    "comments": {
+        "info": "Vulkan Hardware Report generated via https://vulkan.gpuinfo.org",
+        "desc": "https://vulkan.gpuinfo.org/displayreport.php?id=2110"
+    },
+    "environment": {
+        "architecture": "x86_64",
+        "comment": "",
+        "name": "windows",
+        "reportversion": "1.4",
+        "submitter": "",
+        "version": "10"
+    },
+    "extended": {
+        "devicefeatures2": [
+            {
+                "extension": "VK_KHX_multiview",
+                "name": "multiview",
+                "supported": true
+            },
+            {
+                "extension": "VK_KHX_multiview",
+                "name": "multiviewGeometryShader",
+                "supported": true
+            },
+            {
+                "extension": "VK_KHX_multiview",
+                "name": "multiviewTessellationShader",
+                "supported": true
+            }
+        ],
+        "deviceproperties2": [
+            {
+                "extension": "VK_KHX_multiview",
+                "name": "maxMultiviewViewCount",
+                "value": "32"
+            },
+            {
+                "extension": "VK_KHX_multiview",
+                "name": "maxMultiviewInstanceIndex",
+                "value": "134217727"
+            },
+            {
+                "extension": "VK_KHR_push_descriptor",
+                "name": "maxPushDescriptors",
+                "value": "32"
+            },
+            {
+                "extension": "VK_EXT_discard_rectangles",
+                "name": "maxDiscardRectangles",
+                "value": "8"
+            },
+            {
+                "extension": "VK_NVX_multiview_per_view_attributes",
+                "name": "perViewPositionAllComponents",
+                "value": "8"
+            }
+        ]
+    },
+    "platformdetails": [],
+    "surfacecapabilites": {
+        "maxImageArrayLayers": 1,
+        "maxImageCount": 8,
+        "maxImageExtent": {
+            "height": 749,
+            "width": 927
+        },
+        "minImageCount": 2,
+        "minImageExtent": {
+            "height": 749,
+            "width": 927
+        },
+        "presentmodes": [
+            2,
+            3,
+            1
+        ],
+        "supportedCompositeAlpha": 1,
+        "supportedTransforms": 1,
+        "supportedUsageFlags": 159,
+        "surfaceExtension": "VK_KHR_win32_surface",
+        "surfaceformats": [
+            {
+                "colorSpace": 0,
+                "format": 44
+            },
+            {
+                "colorSpace": 0,
+                "format": 50
+            }
+        ],
+        "validSurface": true
+    },
+    "VkPhysicalDeviceFeatures": {
+        "alphaToOne": 1,
+        "depthBiasClamp": 1,
+        "depthBounds": 1,
+        "depthClamp": 1,
+        "drawIndirectFirstInstance": 1,
+        "dualSrcBlend": 1,
+        "fillModeNonSolid": 1,
+        "fragmentStoresAndAtomics": 1,
+        "fullDrawIndexUint32": 1,
+        "geometryShader": 1,
+        "imageCubeArray": 1,
+        "independentBlend": 1,
+        "inheritedQueries": 1,
+        "largePoints": 1,
+        "logicOp": 1,
+        "multiDrawIndirect": 1,
+        "multiViewport": 1,
+        "occlusionQueryPrecise": 1,
+        "pipelineStatisticsQuery": 1,
+        "robustBufferAccess": 1,
+        "sampleRateShading": 1,
+        "samplerAnisotropy": 1,
+        "shaderClipDistance": 1,
+        "shaderCullDistance": 1,
+        "shaderFloat64": 1,
+        "shaderImageGatherExtended": 1,
+        "shaderInt16": 0,
+        "shaderInt64": 1,
+        "shaderResourceMinLod": 0,
+        "shaderResourceResidency": 0,
+        "shaderSampledImageArrayDynamicIndexing": 1,
+        "shaderStorageBufferArrayDynamicIndexing": 1,
+        "shaderStorageImageArrayDynamicIndexing": 1,
+        "shaderStorageImageExtendedFormats": 1,
+        "shaderStorageImageMultisample": 1,
+        "shaderStorageImageReadWithoutFormat": 1,
+        "shaderStorageImageWriteWithoutFormat": 1,
+        "shaderTessellationAndGeometryPointSize": 1,
+        "shaderUniformBufferArrayDynamicIndexing": 1,
+        "sparseBinding": 1,
+        "sparseResidency16Samples": 0,
+        "sparseResidency2Samples": 0,
+        "sparseResidency4Samples": 0,
+        "sparseResidency8Samples": 0,
+        "sparseResidencyAliased": 0,
+        "sparseResidencyBuffer": 0,
+        "sparseResidencyImage2D": 0,
+        "sparseResidencyImage3D": 0,
+        "tessellationShader": 1,
+        "textureCompressionASTC_LDR": 0,
+        "textureCompressionBC": 1,
+        "textureCompressionETC2": 0,
+        "variableMultisampleRate": 1,
+        "vertexPipelineStoresAndAtomics": 1,
+        "wideLines": 1
+    },
+    "VkPhysicalDeviceProperties": {
+        "apiVersion": 4194360,
+        "deviceID": 4935,
+        "deviceName": "GeForce 940M",
+        "deviceType": 2,
+        "driverVersion": 1627602944,
+        "limits": {
+            "bufferImageGranularity": 65536,
+            "discreteQueuePriorities": 2,
+            "framebufferColorSampleCounts": 15,
+            "framebufferDepthSampleCounts": 15,
+            "framebufferNoAttachmentsSampleCounts": 15,
+            "framebufferStencilSampleCounts": 15,
+            "lineWidthGranularity": 0.125,
+            "lineWidthRange": [
+                0.5,
+                10
+            ],
+            "maxBoundDescriptorSets": 8,
+            "maxClipDistances": 8,
+            "maxColorAttachments": 8,
+            "maxCombinedClipAndCullDistances": 8,
+            "maxComputeSharedMemorySize": 49152,
+            "maxComputeWorkGroupCount": [
+                2147483647,
+                65535,
+                65535
+            ],
+            "maxComputeWorkGroupInvocations": 1536,
+            "maxComputeWorkGroupSize": [
+                1536,
+                1024,
+                64
+            ],
+            "maxCullDistances": 8,
+            "maxDescriptorSetInputAttachments": 8,
+            "maxDescriptorSetSampledImages": 98304,
+            "maxDescriptorSetSamplers": 4000,
+            "maxDescriptorSetStorageBuffers": 4096,
+            "maxDescriptorSetStorageBuffersDynamic": 16,
+            "maxDescriptorSetStorageImages": 98304,
+            "maxDescriptorSetUniformBuffers": 72,
+            "maxDescriptorSetUniformBuffersDynamic": 72,
+            "maxDrawIndexedIndexValue": 4294967295,
+            "maxDrawIndirectCount": 4294967295,
+            "maxFragmentCombinedOutputResources": 16,
+            "maxFragmentDualSrcAttachments": 1,
+            "maxFragmentInputComponents": 128,
+            "maxFragmentOutputAttachments": 8,
+            "maxFramebufferHeight": 16384,
+            "maxFramebufferLayers": 2048,
+            "maxFramebufferWidth": 16384,
+            "maxGeometryInputComponents": 128,
+            "maxGeometryOutputComponents": 128,
+            "maxGeometryOutputVertices": 1024,
+            "maxGeometryShaderInvocations": 32,
+            "maxGeometryTotalOutputComponents": 1024,
+            "maxImageArrayLayers": 2048,
+            "maxImageDimension1D": 16384,
+            "maxImageDimension2D": 16384,
+            "maxImageDimension3D": 2048,
+            "maxImageDimensionCube": 16384,
+            "maxInterpolationOffset": 0.4375,
+            "maxMemoryAllocationCount": 4096,
+            "maxPerStageDescriptorInputAttachments": 8,
+            "maxPerStageDescriptorSampledImages": 16384,
+            "maxPerStageDescriptorSamplers": 4000,
+            "maxPerStageDescriptorStorageBuffers": 4096,
+            "maxPerStageDescriptorStorageImages": 16384,
+            "maxPerStageDescriptorUniformBuffers": 12,
+            "maxPerStageResources": 53268,
+            "maxPushConstantsSize": 256,
+            "maxSampleMaskWords": 1,
+            "maxSamplerAllocationCount": 4000,
+            "maxSamplerAnisotropy": 16,
+            "maxSamplerLodBias": 15,
+            "maxStorageBufferRange": 2147483647,
+            "maxTessellationControlPerPatchOutputComponents": 120,
+            "maxTessellationControlPerVertexInputComponents": 128,
+            "maxTessellationControlPerVertexOutputComponents": 128,
+            "maxTessellationControlTotalOutputComponents": 4216,
+            "maxTessellationEvaluationInputComponents": 128,
+            "maxTessellationEvaluationOutputComponents": 128,
+            "maxTessellationGenerationLevel": 64,
+            "maxTessellationPatchSize": 32,
+            "maxTexelBufferElements": 134217728,
+            "maxTexelGatherOffset": 31,
+            "maxTexelOffset": 7,
+            "maxUniformBufferRange": 65536,
+            "maxVertexInputAttributeOffset": 2047,
+            "maxVertexInputAttributes": 32,
+            "maxVertexInputBindingStride": 2048,
+            "maxVertexInputBindings": 32,
+            "maxVertexOutputComponents": 128,
+            "maxViewportDimensions": [
+                16384,
+                16384
+            ],
+            "maxViewports": 16,
+            "minInterpolationOffset": -0.5,
+            "minMemoryMapAlignment": 64,
+            "minStorageBufferOffsetAlignment": 32,
+            "minTexelBufferOffsetAlignment": 16,
+            "minTexelGatherOffset": -32,
+            "minTexelOffset": -8,
+            "minUniformBufferOffsetAlignment": 256,
+            "mipmapPrecisionBits": 8,
+            "nonCoherentAtomSize": 64,
+            "optimalBufferCopyOffsetAlignment": 1,
+            "optimalBufferCopyRowPitchAlignment": 1,
+            "pointSizeGranularity": 0.125,
+            "pointSizeRange": [
+                1,
+                189.875
+            ],
+            "sampledImageColorSampleCounts": 15,
+            "sampledImageDepthSampleCounts": 15,
+            "sampledImageIntegerSampleCounts": 15,
+            "sampledImageStencilSampleCounts": 15,
+            "sparseAddressSpaceSize": -1,
+            "standardSampleLocations": 1,
+            "storageImageSampleCounts": 15,
+            "strictLines": 1,
+            "subPixelInterpolationOffsetBits": 4,
+            "subPixelPrecisionBits": 8,
+            "subTexelPrecisionBits": 8,
+            "timestampComputeAndGraphics": 1,
+            "timestampPeriod": 1,
+            "viewportBoundsRange": [
+                -32768,
+                32768
+            ],
+            "viewportSubPixelBits": 8
+        },
+        "pipelineCacheUUID": [
+            5,
+            245,
+            46,
+            196,
+            153,
+            80,
+            67,
+            114,
+            195,
+            180,
+            214,
+            97,
+            191,
+            27,
+            94,
+            115
+        ],
+        "sparseProperties": {
+            "residencyAlignedMipSize": 1,
+            "residencyNonResidentStrict": 0,
+            "residencyStandard2DBlockShape": 1,
+            "residencyStandard2DMultisampleBlockShape": 1,
+            "residencyStandard3DBlockShape": 1
+        },
+        "vendorID": 4318
+    },
+    "VkPhysicalDeviceMemoryProperties": {
+        "memoryHeaps": [
+            {
+                "flags": 1,
+                "size": 2119041024
+            },
+            {
+                "flags": 0,
+                "size": 8500805632
+            }
+        ],
+        "memoryTypes": [
+            {
+                "heapIndex": 1,
+                "propertyFlags": 0
+            },
+            {
+                "heapIndex": 1,
+                "propertyFlags": 0
+            },
+            {
+                "heapIndex": 1,
+                "propertyFlags": 0
+            },
+            {
+                "heapIndex": 1,
+                "propertyFlags": 0
+            },
+            {
+                "heapIndex": 1,
+                "propertyFlags": 0
+            },
+            {
+                "heapIndex": 1,
+                "propertyFlags": 0
+            },
+            {
+                "heapIndex": 1,
+                "propertyFlags": 0
+            },
+            {
+                "heapIndex": 0,
+                "propertyFlags": 1
+            },
+            {
+                "heapIndex": 0,
+                "propertyFlags": 1
+            },
+            {
+                "heapIndex": 1,
+                "propertyFlags": 6
+            },
+            {
+                "heapIndex": 1,
+                "propertyFlags": 14
+            }
+        ]
+    },
+    "ArrayOfVkExtensionProperties": [
+        {
+            "extensionName": "VK_KHR_swapchain",
+            "specVersion": 68
+        },
+        {
+            "extensionName": "VK_KHR_16bit_storage",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_KHR_bind_memory2",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_KHR_descriptor_update_template",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_KHR_dedicated_allocation",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_KHR_get_memory_requirements2",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_KHR_get_physical_device_properties2",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_KHR_image_format_list",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_KHR_maintenance1",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_KHR_maintenance2",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_KHR_push_descriptor",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_KHR_relaxed_block_layout",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_KHR_sampler_mirror_clamp_to_edge",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_KHR_sampler_ycbcr_conversion",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_KHR_shader_draw_parameters",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_KHR_storage_buffer_storage_class",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_KHR_external_memory",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_KHR_external_memory_win32",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_KHR_external_semaphore",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_KHR_external_semaphore_win32",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_KHR_win32_keyed_mutex",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_KHR_external_fence",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_KHR_external_fence_win32",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_KHR_variable_pointers",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_KHX_device_group",
+            "specVersion": 2
+        },
+        {
+            "extensionName": "VK_KHX_external_memory",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_KHX_external_memory_win32",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_KHX_external_semaphore",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_KHX_external_semaphore_win32",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_KHX_multiview",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_KHX_win32_keyed_mutex",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_EXT_blend_operation_advanced",
+            "specVersion": 2
+        },
+        {
+            "extensionName": "VK_EXT_depth_range_unrestricted",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_EXT_discard_rectangles",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_EXT_shader_subgroup_ballot",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_EXT_shader_subgroup_vote",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_NV_dedicated_allocation",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_NV_external_memory",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_NV_external_memory_win32",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_NV_glsl_shader",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_NV_win32_keyed_mutex",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_NVX_device_generated_commands",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_NVX_multiview_per_view_attributes",
+            "specVersion": 1
+        }
+    ],
+    "ArrayOfVkLayerProperties": [
+        {
+            "description": "NVIDIA Optimus layer",
+            "implementationVersion": 1,
+            "layerName": "VK_LAYER_NV_optimus",
+            "specVersion": 4194360
+        }
+    ],
+    "ArrayOfVkQueueFamilyProperties": [
+        {
+            "minImageTransferGranularity": {
+                "depth": 1,
+                "height": 1,
+                "width": 1
+            },
+            "queueCount": 16,
+            "queueFlags": 15,
+            "timestampValidBits": 64
+        },
+        {
+            "minImageTransferGranularity": {
+                "depth": 1,
+                "height": 1,
+                "width": 1
+            },
+            "queueCount": 1,
+            "queueFlags": 4,
+            "timestampValidBits": 64
+        }
+    ],
+    "ArrayOfVkFormatProperties": [
+        {
+            "formatID": 1,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 8
+        },
+        {
+            "formatID": 2,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 8
+        },
+        {
+            "formatID": 3,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 8
+        },
+        {
+            "formatID": 4,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 56705,
+            "bufferFeatures": 8
+        },
+        {
+            "formatID": 5,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 8
+        },
+        {
+            "formatID": 6,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 8
+        },
+        {
+            "formatID": 7,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 8
+        },
+        {
+            "formatID": 8,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 56705,
+            "bufferFeatures": 8
+        },
+        {
+            "formatID": 9,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 56707,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 10,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 56707,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 11,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 12,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 13,
+            "linearTilingFeatures": 50177,
+            "optimalTilingFeatures": 52355,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 14,
+            "linearTilingFeatures": 50177,
+            "optimalTilingFeatures": 52355,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 15,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 16,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 56707,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 17,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 56707,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 18,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 19,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 20,
+            "linearTilingFeatures": 50177,
+            "optimalTilingFeatures": 52355,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 21,
+            "linearTilingFeatures": 50177,
+            "optimalTilingFeatures": 52355,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 22,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 23,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 24,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 25,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 26,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 27,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 28,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 29,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 30,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 31,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 32,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 33,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 34,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 35,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 36,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 37,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 56707,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 38,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 56707,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 39,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 40,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 41,
+            "linearTilingFeatures": 50177,
+            "optimalTilingFeatures": 52355,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 42,
+            "linearTilingFeatures": 50177,
+            "optimalTilingFeatures": 52355,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 43,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 56705,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 44,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 56707,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 45,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 72
+        },
+        {
+            "formatID": 46,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 47,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 48,
+            "linearTilingFeatures": 50177,
+            "optimalTilingFeatures": 50177,
+            "bufferFeatures": 72
+        },
+        {
+            "formatID": 49,
+            "linearTilingFeatures": 50177,
+            "optimalTilingFeatures": 50177,
+            "bufferFeatures": 72
+        },
+        {
+            "formatID": 50,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 56705,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 51,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 56707,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 52,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 56707,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 53,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 54,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 55,
+            "linearTilingFeatures": 50177,
+            "optimalTilingFeatures": 52355,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 56,
+            "linearTilingFeatures": 50177,
+            "optimalTilingFeatures": 52355,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 57,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 56705,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 58,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 56705,
+            "bufferFeatures": 72
+        },
+        {
+            "formatID": 59,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 60,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 61,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 62,
+            "linearTilingFeatures": 50177,
+            "optimalTilingFeatures": 50177,
+            "bufferFeatures": 72
+        },
+        {
+            "formatID": 63,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 64,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 56707,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 65,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 66,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 67,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 68,
+            "linearTilingFeatures": 50177,
+            "optimalTilingFeatures": 52355,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 69,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 70,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 56707,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 71,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 56707,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 72,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 73,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 74,
+            "linearTilingFeatures": 50177,
+            "optimalTilingFeatures": 52355,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 75,
+            "linearTilingFeatures": 50177,
+            "optimalTilingFeatures": 52355,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 76,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 56707,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 77,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 56707,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 78,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 56707,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 79,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 80,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 81,
+            "linearTilingFeatures": 50177,
+            "optimalTilingFeatures": 52355,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 82,
+            "linearTilingFeatures": 50177,
+            "optimalTilingFeatures": 52355,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 83,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 56707,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 84,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 85,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 86,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 87,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 88,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 89,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 90,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 91,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 56707,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 92,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 56707,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 93,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 94,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 95,
+            "linearTilingFeatures": 50177,
+            "optimalTilingFeatures": 52355,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 96,
+            "linearTilingFeatures": 50177,
+            "optimalTilingFeatures": 52355,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 97,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 56707,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 98,
+            "linearTilingFeatures": 50177,
+            "optimalTilingFeatures": 52359,
+            "bufferFeatures": 120
+        },
+        {
+            "formatID": 99,
+            "linearTilingFeatures": 50177,
+            "optimalTilingFeatures": 52359,
+            "bufferFeatures": 120
+        },
+        {
+            "formatID": 100,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 56711,
+            "bufferFeatures": 120
+        },
+        {
+            "formatID": 101,
+            "linearTilingFeatures": 50177,
+            "optimalTilingFeatures": 52355,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 102,
+            "linearTilingFeatures": 50177,
+            "optimalTilingFeatures": 52355,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 103,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 56707,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 104,
+            "linearTilingFeatures": 50177,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 72
+        },
+        {
+            "formatID": 105,
+            "linearTilingFeatures": 50177,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 72
+        },
+        {
+            "formatID": 106,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 72
+        },
+        {
+            "formatID": 107,
+            "linearTilingFeatures": 50177,
+            "optimalTilingFeatures": 52355,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 108,
+            "linearTilingFeatures": 50177,
+            "optimalTilingFeatures": 52355,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 109,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 56707,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 110,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 111,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 112,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 113,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 114,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 115,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 116,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 117,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 118,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 119,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 120,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 121,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 122,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 56707,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 123,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 8
+        },
+        {
+            "formatID": 124,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54785,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 125,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54785,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 126,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54785,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 127,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 128,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 129,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54785,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 130,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54785,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 131,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 132,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 133,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 134,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 135,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 136,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 137,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 138,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 139,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 140,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 141,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 142,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 143,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 144,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 145,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 146,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 147,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 148,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 149,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 150,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 151,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 152,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 153,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 154,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 155,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 156,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 157,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 158,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 159,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 160,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 161,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 162,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 163,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 164,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 165,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 166,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 167,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 168,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 169,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 170,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 171,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 172,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 173,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 174,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 175,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 176,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 177,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 178,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 179,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 180,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 181,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 182,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 183,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 184,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 1000156002,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 1,
+            "bufferFeatures": 1
+        }
+    ]
+}
\ No newline at end of file
diff --git a/src/third_party/vulkan-validation-layers/src/tests/device_profiles/intel_hd_graphics_520_skylake_gt2.json b/src/third_party/vulkan-validation-layers/src/tests/device_profiles/intel_hd_graphics_520_skylake_gt2.json
new file mode 100644
index 0000000..15dbd17
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/tests/device_profiles/intel_hd_graphics_520_skylake_gt2.json
@@ -0,0 +1,1472 @@
+{
+    "$schema": "https://schema.khronos.org/vulkan/devsim_1_0_0.json#",
+    "comments": {
+        "info": "Vulkan Hardware Report generated via https://vulkan.gpuinfo.org",
+        "desc": "https://vulkan.gpuinfo.org/displayreport.php?id=2098"
+    },
+    "environment": {
+        "architecture": "x86_64",
+        "comment": "",
+        "name": "arch",
+        "reportversion": "1.4",
+        "submitter": "Caio Silva",
+        "version": "unknown"
+    },
+    "extended": {
+        "devicefeatures2": [],
+        "deviceproperties2": [
+            {
+                "extension": "VK_KHR_push_descriptor",
+                "name": "maxPushDescriptors",
+                "value": "32"
+            }
+        ]
+    },
+    "platformdetails": [],
+    "surfacecapabilites": {
+        "maxImageArrayLayers": 1,
+        "maxImageCount": 0,
+        "maxImageExtent": {
+            "height": 755,
+            "width": 927
+        },
+        "minImageCount": 2,
+        "minImageExtent": {
+            "height": 755,
+            "width": 927
+        },
+        "presentmodes": [
+            0,
+            1,
+            2
+        ],
+        "supportedCompositeAlpha": 9,
+        "supportedTransforms": 1,
+        "supportedUsageFlags": 23,
+        "surfaceExtension": "VK_KHR_xcb_surface",
+        "surfaceformats": [
+            {
+                "colorSpace": 0,
+                "format": 50
+            },
+            {
+                "colorSpace": 0,
+                "format": 44
+            }
+        ],
+        "validSurface": true
+    },
+    "VkPhysicalDeviceFeatures": {
+        "alphaToOne": 1,
+        "depthBiasClamp": 1,
+        "depthBounds": 0,
+        "depthClamp": 1,
+        "drawIndirectFirstInstance": 1,
+        "dualSrcBlend": 1,
+        "fillModeNonSolid": 1,
+        "fragmentStoresAndAtomics": 1,
+        "fullDrawIndexUint32": 1,
+        "geometryShader": 1,
+        "imageCubeArray": 1,
+        "independentBlend": 1,
+        "inheritedQueries": 1,
+        "largePoints": 1,
+        "logicOp": 1,
+        "multiDrawIndirect": 1,
+        "multiViewport": 1,
+        "occlusionQueryPrecise": 1,
+        "pipelineStatisticsQuery": 1,
+        "robustBufferAccess": 1,
+        "sampleRateShading": 1,
+        "samplerAnisotropy": 1,
+        "shaderClipDistance": 1,
+        "shaderCullDistance": 1,
+        "shaderFloat64": 1,
+        "shaderImageGatherExtended": 1,
+        "shaderInt16": 0,
+        "shaderInt64": 1,
+        "shaderResourceMinLod": 0,
+        "shaderResourceResidency": 0,
+        "shaderSampledImageArrayDynamicIndexing": 1,
+        "shaderStorageBufferArrayDynamicIndexing": 1,
+        "shaderStorageImageArrayDynamicIndexing": 1,
+        "shaderStorageImageExtendedFormats": 1,
+        "shaderStorageImageMultisample": 0,
+        "shaderStorageImageReadWithoutFormat": 0,
+        "shaderStorageImageWriteWithoutFormat": 1,
+        "shaderTessellationAndGeometryPointSize": 1,
+        "shaderUniformBufferArrayDynamicIndexing": 1,
+        "sparseBinding": 0,
+        "sparseResidency16Samples": 0,
+        "sparseResidency2Samples": 0,
+        "sparseResidency4Samples": 0,
+        "sparseResidency8Samples": 0,
+        "sparseResidencyAliased": 0,
+        "sparseResidencyBuffer": 0,
+        "sparseResidencyImage2D": 0,
+        "sparseResidencyImage3D": 0,
+        "tessellationShader": 1,
+        "textureCompressionASTC_LDR": 1,
+        "textureCompressionBC": 1,
+        "textureCompressionETC2": 1,
+        "variableMultisampleRate": 0,
+        "vertexPipelineStoresAndAtomics": 1,
+        "wideLines": 1
+    },
+    "VkPhysicalDeviceProperties": {
+        "apiVersion": 4194358,
+        "deviceID": 6422,
+        "deviceName": "Intel(R) HD Graphics 520 (Skylake GT2)",
+        "deviceType": 1,
+        "driverVersion": 71311364,
+        "limits": {
+            "bufferImageGranularity": 64,
+            "discreteQueuePriorities": 1,
+            "framebufferColorSampleCounts": 31,
+            "framebufferDepthSampleCounts": 31,
+            "framebufferNoAttachmentsSampleCounts": 31,
+            "framebufferStencilSampleCounts": 31,
+            "lineWidthGranularity": 0.0078125,
+            "lineWidthRange": [
+                0,
+                7.9921875
+            ],
+            "maxBoundDescriptorSets": 8,
+            "maxClipDistances": 8,
+            "maxColorAttachments": 8,
+            "maxCombinedClipAndCullDistances": 8,
+            "maxComputeSharedMemorySize": 32768,
+            "maxComputeWorkGroupCount": [
+                65535,
+                65535,
+                65535
+            ],
+            "maxComputeWorkGroupInvocations": 896,
+            "maxComputeWorkGroupSize": [
+                896,
+                896,
+                896
+            ],
+            "maxCullDistances": 8,
+            "maxDescriptorSetInputAttachments": 256,
+            "maxDescriptorSetSampledImages": 256,
+            "maxDescriptorSetSamplers": 256,
+            "maxDescriptorSetStorageBuffers": 256,
+            "maxDescriptorSetStorageBuffersDynamic": 8,
+            "maxDescriptorSetStorageImages": 256,
+            "maxDescriptorSetUniformBuffers": 256,
+            "maxDescriptorSetUniformBuffersDynamic": 8,
+            "maxDrawIndexedIndexValue": 4294967295,
+            "maxDrawIndirectCount": 4294967295,
+            "maxFragmentCombinedOutputResources": 8,
+            "maxFragmentDualSrcAttachments": 1,
+            "maxFragmentInputComponents": 128,
+            "maxFragmentOutputAttachments": 8,
+            "maxFramebufferHeight": 16384,
+            "maxFramebufferLayers": 2048,
+            "maxFramebufferWidth": 16384,
+            "maxGeometryInputComponents": 64,
+            "maxGeometryOutputComponents": 128,
+            "maxGeometryOutputVertices": 256,
+            "maxGeometryShaderInvocations": 32,
+            "maxGeometryTotalOutputComponents": 1024,
+            "maxImageArrayLayers": 2048,
+            "maxImageDimension1D": 16384,
+            "maxImageDimension2D": 16384,
+            "maxImageDimension3D": 2048,
+            "maxImageDimensionCube": 16384,
+            "maxInterpolationOffset": 0.4375,
+            "maxMemoryAllocationCount": 4294967295,
+            "maxPerStageDescriptorInputAttachments": 64,
+            "maxPerStageDescriptorSampledImages": 128,
+            "maxPerStageDescriptorSamplers": 128,
+            "maxPerStageDescriptorStorageBuffers": 64,
+            "maxPerStageDescriptorStorageImages": 64,
+            "maxPerStageDescriptorUniformBuffers": 64,
+            "maxPerStageResources": 250,
+            "maxPushConstantsSize": 128,
+            "maxSampleMaskWords": 1,
+            "maxSamplerAllocationCount": 65536,
+            "maxSamplerAnisotropy": 16,
+            "maxSamplerLodBias": 16,
+            "maxStorageBufferRange": 1073741824,
+            "maxTessellationControlPerPatchOutputComponents": 128,
+            "maxTessellationControlPerVertexInputComponents": 128,
+            "maxTessellationControlPerVertexOutputComponents": 128,
+            "maxTessellationControlTotalOutputComponents": 2048,
+            "maxTessellationEvaluationInputComponents": 128,
+            "maxTessellationEvaluationOutputComponents": 128,
+            "maxTessellationGenerationLevel": 64,
+            "maxTessellationPatchSize": 32,
+            "maxTexelBufferElements": 134217728,
+            "maxTexelGatherOffset": 31,
+            "maxTexelOffset": 7,
+            "maxUniformBufferRange": 134217728,
+            "maxVertexInputAttributeOffset": 2047,
+            "maxVertexInputAttributes": 28,
+            "maxVertexInputBindingStride": 2048,
+            "maxVertexInputBindings": 28,
+            "maxVertexOutputComponents": 128,
+            "maxViewportDimensions": [
+                16384,
+                16384
+            ],
+            "maxViewports": 16,
+            "minInterpolationOffset": -0.5,
+            "minMemoryMapAlignment": 4096,
+            "minStorageBufferOffsetAlignment": 4,
+            "minTexelBufferOffsetAlignment": 1,
+            "minTexelGatherOffset": -32,
+            "minTexelOffset": -8,
+            "minUniformBufferOffsetAlignment": 16,
+            "mipmapPrecisionBits": 4,
+            "nonCoherentAtomSize": 64,
+            "optimalBufferCopyOffsetAlignment": 128,
+            "optimalBufferCopyRowPitchAlignment": 128,
+            "pointSizeGranularity": 0.125,
+            "pointSizeRange": [
+                0.125,
+                255.875
+            ],
+            "sampledImageColorSampleCounts": 31,
+            "sampledImageDepthSampleCounts": 31,
+            "sampledImageIntegerSampleCounts": 1,
+            "sampledImageStencilSampleCounts": 31,
+            "sparseAddressSpaceSize": 0,
+            "standardSampleLocations": 1,
+            "storageImageSampleCounts": 1,
+            "strictLines": 0,
+            "subPixelInterpolationOffsetBits": 4,
+            "subPixelPrecisionBits": 4,
+            "subTexelPrecisionBits": 4,
+            "timestampComputeAndGraphics": 0,
+            "timestampPeriod": 83.3333358765,
+            "viewportBoundsRange": [
+                -32768,
+                32767
+            ],
+            "viewportSubPixelBits": 13
+        },
+        "pipelineCacheUUID": [
+            55,
+            106,
+            221,
+            116,
+            216,
+            244,
+            14,
+            126,
+            210,
+            30,
+            145,
+            74,
+            98,
+            39,
+            52,
+            89
+        ],
+        "sparseProperties": {
+            "residencyAlignedMipSize": 0,
+            "residencyNonResidentStrict": 0,
+            "residencyStandard2DBlockShape": 0,
+            "residencyStandard2DMultisampleBlockShape": 0,
+            "residencyStandard3DBlockShape": 0
+        },
+        "vendorID": 32902
+    },
+    "VkPhysicalDeviceMemoryProperties": {
+        "memoryHeaps": [
+            {
+                "flags": 1,
+                "size": 11289211904
+            },
+            {
+                "flags": 1,
+                "size": 1073741824
+            }
+        ],
+        "memoryTypes": [
+            {
+                "heapIndex": 0,
+                "propertyFlags": 15
+            },
+            {
+                "heapIndex": 1,
+                "propertyFlags": 15
+            }
+        ]
+    },
+    "ArrayOfVkExtensionProperties": [
+        {
+            "extensionName": "VK_KHR_dedicated_allocation",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_KHR_descriptor_update_template",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_KHR_external_memory",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_KHR_external_memory_fd",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_KHR_get_memory_requirements2",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_KHR_incremental_present",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_KHR_maintenance1",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_KHR_push_descriptor",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_KHR_sampler_mirror_clamp_to_edge",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_KHR_shader_draw_parameters",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_KHR_storage_buffer_storage_class",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_KHR_swapchain",
+            "specVersion": 68
+        },
+        {
+            "extensionName": "VK_KHR_variable_pointers",
+            "specVersion": 1
+        }
+    ],
+    "ArrayOfVkLayerProperties": [],
+    "ArrayOfVkQueueFamilyProperties": [
+        {
+            "minImageTransferGranularity": {
+                "depth": 1,
+                "height": 1,
+                "width": 1
+            },
+            "queueCount": 1,
+            "queueFlags": 7,
+            "timestampValidBits": 36
+        }
+    ],
+    "ArrayOfVkFormatProperties": [
+        {
+            "formatID": 1,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 2,
+            "linearTilingFeatures": 56449,
+            "optimalTilingFeatures": 56449,
+            "bufferFeatures": 8
+        },
+        {
+            "formatID": 3,
+            "linearTilingFeatures": 56449,
+            "optimalTilingFeatures": 56449,
+            "bufferFeatures": 8
+        },
+        {
+            "formatID": 4,
+            "linearTilingFeatures": 56705,
+            "optimalTilingFeatures": 56705,
+            "bufferFeatures": 8
+        },
+        {
+            "formatID": 5,
+            "linearTilingFeatures": 56705,
+            "optimalTilingFeatures": 56705,
+            "bufferFeatures": 8
+        },
+        {
+            "formatID": 6,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 7,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 8,
+            "linearTilingFeatures": 56705,
+            "optimalTilingFeatures": 56705,
+            "bufferFeatures": 8
+        },
+        {
+            "formatID": 9,
+            "linearTilingFeatures": 56707,
+            "optimalTilingFeatures": 56707,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 10,
+            "linearTilingFeatures": 56707,
+            "optimalTilingFeatures": 56707,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 11,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 12,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 13,
+            "linearTilingFeatures": 52355,
+            "optimalTilingFeatures": 52355,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 14,
+            "linearTilingFeatures": 52355,
+            "optimalTilingFeatures": 52355,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 15,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 16,
+            "linearTilingFeatures": 56707,
+            "optimalTilingFeatures": 56707,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 17,
+            "linearTilingFeatures": 56707,
+            "optimalTilingFeatures": 56707,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 18,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 19,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 20,
+            "linearTilingFeatures": 52355,
+            "optimalTilingFeatures": 52355,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 21,
+            "linearTilingFeatures": 52355,
+            "optimalTilingFeatures": 52355,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 22,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 23,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 72
+        },
+        {
+            "formatID": 24,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 72
+        },
+        {
+            "formatID": 25,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 26,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 27,
+            "linearTilingFeatures": 50177,
+            "optimalTilingFeatures": 50177,
+            "bufferFeatures": 72
+        },
+        {
+            "formatID": 28,
+            "linearTilingFeatures": 50177,
+            "optimalTilingFeatures": 50177,
+            "bufferFeatures": 72
+        },
+        {
+            "formatID": 29,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 8
+        },
+        {
+            "formatID": 30,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 31,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 32,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 33,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 34,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 35,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 36,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 37,
+            "linearTilingFeatures": 56707,
+            "optimalTilingFeatures": 56707,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 38,
+            "linearTilingFeatures": 56707,
+            "optimalTilingFeatures": 56707,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 39,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 40,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 41,
+            "linearTilingFeatures": 52355,
+            "optimalTilingFeatures": 52355,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 42,
+            "linearTilingFeatures": 52355,
+            "optimalTilingFeatures": 52355,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 43,
+            "linearTilingFeatures": 56705,
+            "optimalTilingFeatures": 56705,
+            "bufferFeatures": 8
+        },
+        {
+            "formatID": 44,
+            "linearTilingFeatures": 56705,
+            "optimalTilingFeatures": 56705,
+            "bufferFeatures": 72
+        },
+        {
+            "formatID": 45,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 46,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 47,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 48,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 49,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 50,
+            "linearTilingFeatures": 56705,
+            "optimalTilingFeatures": 56705,
+            "bufferFeatures": 8
+        },
+        {
+            "formatID": 51,
+            "linearTilingFeatures": 56707,
+            "optimalTilingFeatures": 56707,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 52,
+            "linearTilingFeatures": 56707,
+            "optimalTilingFeatures": 56707,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 53,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 54,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 55,
+            "linearTilingFeatures": 52355,
+            "optimalTilingFeatures": 52355,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 56,
+            "linearTilingFeatures": 52355,
+            "optimalTilingFeatures": 52355,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 57,
+            "linearTilingFeatures": 56705,
+            "optimalTilingFeatures": 56705,
+            "bufferFeatures": 8
+        },
+        {
+            "formatID": 58,
+            "linearTilingFeatures": 56705,
+            "optimalTilingFeatures": 56705,
+            "bufferFeatures": 72
+        },
+        {
+            "formatID": 59,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 60,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 61,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 62,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 63,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 64,
+            "linearTilingFeatures": 56707,
+            "optimalTilingFeatures": 56707,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 65,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 66,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 67,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 68,
+            "linearTilingFeatures": 52355,
+            "optimalTilingFeatures": 52355,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 69,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 70,
+            "linearTilingFeatures": 56707,
+            "optimalTilingFeatures": 56707,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 71,
+            "linearTilingFeatures": 56707,
+            "optimalTilingFeatures": 56707,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 72,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 73,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 74,
+            "linearTilingFeatures": 52355,
+            "optimalTilingFeatures": 52355,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 75,
+            "linearTilingFeatures": 52355,
+            "optimalTilingFeatures": 52355,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 76,
+            "linearTilingFeatures": 56707,
+            "optimalTilingFeatures": 56707,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 77,
+            "linearTilingFeatures": 56707,
+            "optimalTilingFeatures": 56707,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 78,
+            "linearTilingFeatures": 56707,
+            "optimalTilingFeatures": 56707,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 79,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 80,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 81,
+            "linearTilingFeatures": 52355,
+            "optimalTilingFeatures": 52355,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 82,
+            "linearTilingFeatures": 52355,
+            "optimalTilingFeatures": 52355,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 83,
+            "linearTilingFeatures": 56707,
+            "optimalTilingFeatures": 56707,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 84,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 72
+        },
+        {
+            "formatID": 85,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 72
+        },
+        {
+            "formatID": 86,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 87,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 88,
+            "linearTilingFeatures": 50177,
+            "optimalTilingFeatures": 50177,
+            "bufferFeatures": 72
+        },
+        {
+            "formatID": 89,
+            "linearTilingFeatures": 50177,
+            "optimalTilingFeatures": 50177,
+            "bufferFeatures": 72
+        },
+        {
+            "formatID": 90,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 56705,
+            "bufferFeatures": 72
+        },
+        {
+            "formatID": 91,
+            "linearTilingFeatures": 56707,
+            "optimalTilingFeatures": 56707,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 92,
+            "linearTilingFeatures": 56707,
+            "optimalTilingFeatures": 56707,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 93,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 94,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 95,
+            "linearTilingFeatures": 52355,
+            "optimalTilingFeatures": 52355,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 96,
+            "linearTilingFeatures": 52355,
+            "optimalTilingFeatures": 52355,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 97,
+            "linearTilingFeatures": 56707,
+            "optimalTilingFeatures": 56707,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 98,
+            "linearTilingFeatures": 52359,
+            "optimalTilingFeatures": 52359,
+            "bufferFeatures": 120
+        },
+        {
+            "formatID": 99,
+            "linearTilingFeatures": 52359,
+            "optimalTilingFeatures": 52359,
+            "bufferFeatures": 120
+        },
+        {
+            "formatID": 100,
+            "linearTilingFeatures": 56707,
+            "optimalTilingFeatures": 56707,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 101,
+            "linearTilingFeatures": 52355,
+            "optimalTilingFeatures": 52355,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 102,
+            "linearTilingFeatures": 52355,
+            "optimalTilingFeatures": 52355,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 103,
+            "linearTilingFeatures": 56707,
+            "optimalTilingFeatures": 56707,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 104,
+            "linearTilingFeatures": 50177,
+            "optimalTilingFeatures": 50177,
+            "bufferFeatures": 72
+        },
+        {
+            "formatID": 105,
+            "linearTilingFeatures": 50177,
+            "optimalTilingFeatures": 50177,
+            "bufferFeatures": 72
+        },
+        {
+            "formatID": 106,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 72
+        },
+        {
+            "formatID": 107,
+            "linearTilingFeatures": 52355,
+            "optimalTilingFeatures": 52355,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 108,
+            "linearTilingFeatures": 52355,
+            "optimalTilingFeatures": 52355,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 109,
+            "linearTilingFeatures": 56707,
+            "optimalTilingFeatures": 56707,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 110,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 111,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 112,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 113,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 114,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 115,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 116,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 117,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 118,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 119,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 120,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 121,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 122,
+            "linearTilingFeatures": 56707,
+            "optimalTilingFeatures": 56707,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 123,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 8
+        },
+        {
+            "formatID": 124,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 52737,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 125,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 52737,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 126,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 52737,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 127,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 52737,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 128,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 129,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 52737,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 130,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 52737,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 131,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 132,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 133,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 134,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 135,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 136,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 137,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 138,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 139,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 140,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 141,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 142,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 143,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 144,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 145,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 146,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 147,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 148,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 149,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 150,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 151,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 152,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 153,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 154,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 155,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 156,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 157,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 158,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 159,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 160,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 161,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 162,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 163,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 164,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 165,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 166,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 167,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 168,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 169,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 170,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 171,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 172,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 173,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 174,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 175,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 176,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 177,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 178,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 179,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 180,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 181,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 182,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 183,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 184,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        }
+    ]
+}
\ No newline at end of file
diff --git a/src/third_party/vulkan-validation-layers/src/tests/device_profiles/mobile_chip.json b/src/third_party/vulkan-validation-layers/src/tests/device_profiles/mobile_chip.json
new file mode 100644
index 0000000..d24cd2e
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/tests/device_profiles/mobile_chip.json
@@ -0,0 +1,1436 @@
+{
+    "$schema": "https://schema.khronos.org/vulkan/devsim_1_0_0.json#",
+    "comments": {
+        "info": "Vulkan Hardware Report generated via https://vulkan.gpuinfo.org",
+        "desc": ""
+    },
+    "environment": {
+        "architecture": "",
+        "comment": "",
+        "name": "",
+        "reportversion": "1.4",
+        "submitter": "",
+        "version": ""
+    },
+    "extended": {
+        "devicefeatures2": [],
+        "deviceproperties2": []
+    },
+    "platformdetails": {
+        "android.BuildID": "",
+        "android.BuildVersionIncremental": "",
+        "android.ProductManufacturer": "",
+        "android.ProductModel": ""
+    },
+    "surfacecapabilites": {
+        "maxImageArrayLayers": 1,
+        "maxImageCount": 3,
+        "maxImageExtent": {
+            "height": 4096,
+            "width": 4096
+        },
+        "minImageCount": 2,
+        "minImageExtent": {
+            "height": 1,
+            "width": 1
+        },
+        "presentmodes": [
+            1,
+            2
+        ],
+        "supportedCompositeAlpha": 8,
+        "supportedTransforms": 271,
+        "supportedUsageFlags": 159,
+        "surfaceExtension": "VK_KHR_android_surface",
+        "surfaceformats": [
+            {
+                "colorSpace": 0,
+                "format": 37
+            },
+            {
+                "colorSpace": 0,
+                "format": 43
+            },
+            {
+                "colorSpace": 0,
+                "format": 4
+            }
+        ],
+        "validSurface": true
+    },
+    "VkPhysicalDeviceFeatures": {
+        "alphaToOne": 0,
+        "depthBiasClamp": 1,
+        "depthBounds": 0,
+        "depthClamp": 0,
+        "drawIndirectFirstInstance": 1,
+        "dualSrcBlend": 0,
+        "fillModeNonSolid": 0,
+        "fragmentStoresAndAtomics": 1,
+        "fullDrawIndexUint32": 1,
+        "geometryShader": 0,
+        "imageCubeArray": 0,
+        "independentBlend": 1,
+        "inheritedQueries": 0,
+        "largePoints": 1,
+        "logicOp": 0,
+        "multiDrawIndirect": 0,
+        "multiViewport": 0,
+        "occlusionQueryPrecise": 0,
+        "pipelineStatisticsQuery": 0,
+        "robustBufferAccess": 1,
+        "sampleRateShading": 0,
+        "samplerAnisotropy": 0,
+        "shaderClipDistance": 0,
+        "shaderCullDistance": 0,
+        "shaderFloat64": 0,
+        "shaderImageGatherExtended": 1,
+        "shaderInt16": 0,
+        "shaderInt64": 0,
+        "shaderResourceMinLod": 0,
+        "shaderResourceResidency": 0,
+        "shaderSampledImageArrayDynamicIndexing": 1,
+        "shaderStorageBufferArrayDynamicIndexing": 1,
+        "shaderStorageImageArrayDynamicIndexing": 1,
+        "shaderStorageImageExtendedFormats": 0,
+        "shaderStorageImageMultisample": 0,
+        "shaderStorageImageReadWithoutFormat": 0,
+        "shaderStorageImageWriteWithoutFormat": 0,
+        "shaderTessellationAndGeometryPointSize": 0,
+        "shaderUniformBufferArrayDynamicIndexing": 1,
+        "sparseBinding": 0,
+        "sparseResidency16Samples": 0,
+        "sparseResidency2Samples": 0,
+        "sparseResidency4Samples": 0,
+        "sparseResidency8Samples": 0,
+        "sparseResidencyAliased": 0,
+        "sparseResidencyBuffer": 0,
+        "sparseResidencyImage2D": 0,
+        "sparseResidencyImage3D": 0,
+        "tessellationShader": 0,
+        "textureCompressionASTC_LDR": 1,
+        "textureCompressionBC": 0,
+        "textureCompressionETC2": 1,
+        "variableMultisampleRate": 0,
+        "vertexPipelineStoresAndAtomics": 0,
+        "wideLines": 0
+    },
+    "VkPhysicalDeviceProperties": {
+        "apiVersion": 4194322,
+        "deviceID": 122683393,
+        "deviceName": "Mali-T760",
+        "deviceType": 1,
+        "driverVersion": 2734070174,
+        "limits": {
+            "bufferImageGranularity": 4096,
+            "discreteQueuePriorities": 2,
+            "framebufferColorSampleCounts": 5,
+            "framebufferDepthSampleCounts": 5,
+            "framebufferNoAttachmentsSampleCounts": 5,
+            "framebufferStencilSampleCounts": 5,
+            "lineWidthGranularity": 0,
+            "lineWidthRange": [
+                1,
+                1
+            ],
+            "maxBoundDescriptorSets": 4,
+            "maxClipDistances": 0,
+            "maxColorAttachments": 4,
+            "maxCombinedClipAndCullDistances": 0,
+            "maxComputeSharedMemorySize": 32768,
+            "maxComputeWorkGroupCount": [
+                65536,
+                65536,
+                65536
+            ],
+            "maxComputeWorkGroupInvocations": 256,
+            "maxComputeWorkGroupSize": [
+                256,
+                256,
+                256
+            ],
+            "maxCullDistances": 0,
+            "maxDescriptorSetInputAttachments": 4,
+            "maxDescriptorSetSampledImages": 96,
+            "maxDescriptorSetSamplers": 768,
+            "maxDescriptorSetStorageBuffers": 24,
+            "maxDescriptorSetStorageBuffersDynamic": 4,
+            "maxDescriptorSetStorageImages": 24,
+            "maxDescriptorSetUniformBuffers": 72,
+            "maxDescriptorSetUniformBuffersDynamic": 8,
+            "maxDrawIndexedIndexValue": 4294967295,
+            "maxDrawIndirectCount": 1,
+            "maxFragmentCombinedOutputResources": 12,
+            "maxFragmentDualSrcAttachments": 0,
+            "maxFragmentInputComponents": 128,
+            "maxFragmentOutputAttachments": 4,
+            "maxFramebufferHeight": 8192,
+            "maxFramebufferLayers": 256,
+            "maxFramebufferWidth": 8192,
+            "maxGeometryInputComponents": 0,
+            "maxGeometryOutputComponents": 0,
+            "maxGeometryOutputVertices": 0,
+            "maxGeometryShaderInvocations": 0,
+            "maxGeometryTotalOutputComponents": 0,
+            "maxImageArrayLayers": 256,
+            "maxImageDimension1D": 8192,
+            "maxImageDimension2D": 8192,
+            "maxImageDimension3D": 4096,
+            "maxImageDimensionCube": 8192,
+            "maxInterpolationOffset": 0,
+            "maxMemoryAllocationCount": 4294967295,
+            "maxPerStageDescriptorInputAttachments": 4,
+            "maxPerStageDescriptorSampledImages": 16,
+            "maxPerStageDescriptorSamplers": 128,
+            "maxPerStageDescriptorStorageBuffers": 4,
+            "maxPerStageDescriptorStorageImages": 4,
+            "maxPerStageDescriptorUniformBuffers": 12,
+            "maxPerStageResources": 44,
+            "maxPushConstantsSize": 128,
+            "maxSampleMaskWords": 1,
+            "maxSamplerAllocationCount": 16384,
+            "maxSamplerAnisotropy": 1,
+            "maxSamplerLodBias": 2,
+            "maxStorageBufferRange": 134217728,
+            "maxTessellationControlPerPatchOutputComponents": 0,
+            "maxTessellationControlPerVertexInputComponents": 0,
+            "maxTessellationControlPerVertexOutputComponents": 0,
+            "maxTessellationControlTotalOutputComponents": 0,
+            "maxTessellationEvaluationInputComponents": 0,
+            "maxTessellationEvaluationOutputComponents": 0,
+            "maxTessellationGenerationLevel": 0,
+            "maxTessellationPatchSize": 0,
+            "maxTexelBufferElements": 65536,
+            "maxTexelGatherOffset": 7,
+            "maxTexelOffset": 7,
+            "maxUniformBufferRange": 16384,
+            "maxVertexInputAttributeOffset": 2047,
+            "maxVertexInputAttributes": 16,
+            "maxVertexInputBindingStride": 2048,
+            "maxVertexInputBindings": 16,
+            "maxVertexOutputComponents": 128,
+            "maxViewportDimensions": [
+                8192,
+                8192
+            ],
+            "maxViewports": 1,
+            "minInterpolationOffset": 0,
+            "minMemoryMapAlignment": 64,
+            "minStorageBufferOffsetAlignment": 256,
+            "minTexelBufferOffsetAlignment": 256,
+            "minTexelGatherOffset": -8,
+            "minTexelOffset": -8,
+            "minUniformBufferOffsetAlignment": 16,
+            "mipmapPrecisionBits": 4,
+            "nonCoherentAtomSize": 64,
+            "optimalBufferCopyOffsetAlignment": 64,
+            "optimalBufferCopyRowPitchAlignment": 64,
+            "pointSizeGranularity": 0.0625,
+            "pointSizeRange": [
+                1,
+                64
+            ],
+            "sampledImageColorSampleCounts": 5,
+            "sampledImageDepthSampleCounts": 5,
+            "sampledImageIntegerSampleCounts": 1,
+            "sampledImageStencilSampleCounts": 5,
+            "sparseAddressSpaceSize": 0,
+            "standardSampleLocations": 1,
+            "storageImageSampleCounts": 1,
+            "strictLines": 1,
+            "subPixelInterpolationOffsetBits": 0,
+            "subPixelPrecisionBits": 4,
+            "subTexelPrecisionBits": 4,
+            "timestampComputeAndGraphics": 0,
+            "timestampPeriod": 0,
+            "viewportBoundsRange": [
+                -16384,
+                16383
+            ],
+            "viewportSubPixelBits": 0
+        },
+        "pipelineCacheUUID": [
+            88,
+            5,
+            49,
+            68,
+            63,
+            234,
+            15,
+            79,
+            105,
+            109,
+            177,
+            149,
+            123,
+            226,
+            236,
+            179
+        ],
+        "sparseProperties": {
+            "residencyAlignedMipSize": 0,
+            "residencyNonResidentStrict": 0,
+            "residencyStandard2DBlockShape": 0,
+            "residencyStandard2DMultisampleBlockShape": 0,
+            "residencyStandard3DBlockShape": 0
+        },
+        "vendorID": 5045
+    },
+    "VkPhysicalDeviceMemoryProperties": {
+        "memoryHeaps": [
+            {
+                "flags": 1,
+                "size": 2108874752
+            }
+        ],
+        "memoryTypes": [
+            {
+                "heapIndex": 0,
+                "propertyFlags": 7
+            },
+            {
+                "heapIndex": 0,
+                "propertyFlags": 11
+            },
+            {
+                "heapIndex": 0,
+                "propertyFlags": 17
+            }
+        ]
+    },
+    "ArrayOfVkExtensionProperties": [
+        {
+            "extensionName": "VK_KHR_swapchain",
+            "specVersion": 68
+        }
+    ],
+    "ArrayOfVkLayerProperties": [],
+    "ArrayOfVkQueueFamilyProperties": [
+        {
+            "minImageTransferGranularity": {
+                "depth": 1,
+                "height": 1,
+                "width": 1
+            },
+            "queueCount": 2,
+            "queueFlags": 7,
+            "timestampValidBits": 0
+        },
+        {
+            "minImageTransferGranularity": {
+                "depth": 1,
+                "height": 1,
+                "width": 1
+            },
+            "queueCount": 2,
+            "queueFlags": 1,
+            "timestampValidBits": 0
+        }
+    ],
+    "ArrayOfVkFormatProperties": [
+        {
+            "formatID": 1,
+            "linearTilingFeatures": 1025,
+            "optimalTilingFeatures": 1025,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 2,
+            "linearTilingFeatures": 5121,
+            "optimalTilingFeatures": 5121,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 3,
+            "linearTilingFeatures": 5121,
+            "optimalTilingFeatures": 5121,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 4,
+            "linearTilingFeatures": 7553,
+            "optimalTilingFeatures": 7553,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 5,
+            "linearTilingFeatures": 7553,
+            "optimalTilingFeatures": 7553,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 6,
+            "linearTilingFeatures": 5121,
+            "optimalTilingFeatures": 5121,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 7,
+            "linearTilingFeatures": 5121,
+            "optimalTilingFeatures": 5121,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 8,
+            "linearTilingFeatures": 7553,
+            "optimalTilingFeatures": 7553,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 9,
+            "linearTilingFeatures": 7553,
+            "optimalTilingFeatures": 7553,
+            "bufferFeatures": 72
+        },
+        {
+            "formatID": 10,
+            "linearTilingFeatures": 7553,
+            "optimalTilingFeatures": 7553,
+            "bufferFeatures": 72
+        },
+        {
+            "formatID": 11,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 12,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 13,
+            "linearTilingFeatures": 3201,
+            "optimalTilingFeatures": 3201,
+            "bufferFeatures": 72
+        },
+        {
+            "formatID": 14,
+            "linearTilingFeatures": 3201,
+            "optimalTilingFeatures": 3201,
+            "bufferFeatures": 72
+        },
+        {
+            "formatID": 15,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 16,
+            "linearTilingFeatures": 7553,
+            "optimalTilingFeatures": 7553,
+            "bufferFeatures": 72
+        },
+        {
+            "formatID": 17,
+            "linearTilingFeatures": 7553,
+            "optimalTilingFeatures": 7553,
+            "bufferFeatures": 72
+        },
+        {
+            "formatID": 18,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 19,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 20,
+            "linearTilingFeatures": 3201,
+            "optimalTilingFeatures": 3201,
+            "bufferFeatures": 72
+        },
+        {
+            "formatID": 21,
+            "linearTilingFeatures": 3201,
+            "optimalTilingFeatures": 3201,
+            "bufferFeatures": 72
+        },
+        {
+            "formatID": 22,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 23,
+            "linearTilingFeatures": 7553,
+            "optimalTilingFeatures": 7553,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 24,
+            "linearTilingFeatures": 5121,
+            "optimalTilingFeatures": 5121,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 25,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 26,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 27,
+            "linearTilingFeatures": 1025,
+            "optimalTilingFeatures": 1025,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 28,
+            "linearTilingFeatures": 1025,
+            "optimalTilingFeatures": 1025,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 29,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 30,
+            "linearTilingFeatures": 7553,
+            "optimalTilingFeatures": 7553,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 31,
+            "linearTilingFeatures": 5121,
+            "optimalTilingFeatures": 5121,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 32,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 33,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 34,
+            "linearTilingFeatures": 1025,
+            "optimalTilingFeatures": 1025,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 35,
+            "linearTilingFeatures": 1025,
+            "optimalTilingFeatures": 1025,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 36,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 37,
+            "linearTilingFeatures": 7555,
+            "optimalTilingFeatures": 7555,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 38,
+            "linearTilingFeatures": 7555,
+            "optimalTilingFeatures": 7555,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 39,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 40,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 41,
+            "linearTilingFeatures": 3203,
+            "optimalTilingFeatures": 3203,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 42,
+            "linearTilingFeatures": 3203,
+            "optimalTilingFeatures": 3203,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 43,
+            "linearTilingFeatures": 7553,
+            "optimalTilingFeatures": 7553,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 44,
+            "linearTilingFeatures": 7553,
+            "optimalTilingFeatures": 7553,
+            "bufferFeatures": 72
+        },
+        {
+            "formatID": 45,
+            "linearTilingFeatures": 5121,
+            "optimalTilingFeatures": 5121,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 46,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 47,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 48,
+            "linearTilingFeatures": 1025,
+            "optimalTilingFeatures": 1025,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 49,
+            "linearTilingFeatures": 1025,
+            "optimalTilingFeatures": 1025,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 50,
+            "linearTilingFeatures": 7553,
+            "optimalTilingFeatures": 7553,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 51,
+            "linearTilingFeatures": 7555,
+            "optimalTilingFeatures": 7555,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 52,
+            "linearTilingFeatures": 7555,
+            "optimalTilingFeatures": 7555,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 53,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 54,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 55,
+            "linearTilingFeatures": 3203,
+            "optimalTilingFeatures": 3203,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 56,
+            "linearTilingFeatures": 3203,
+            "optimalTilingFeatures": 3203,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 57,
+            "linearTilingFeatures": 7553,
+            "optimalTilingFeatures": 7553,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 58,
+            "linearTilingFeatures": 7553,
+            "optimalTilingFeatures": 7553,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 59,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 60,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 61,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 62,
+            "linearTilingFeatures": 1025,
+            "optimalTilingFeatures": 1025,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 63,
+            "linearTilingFeatures": 1025,
+            "optimalTilingFeatures": 1025,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 64,
+            "linearTilingFeatures": 7553,
+            "optimalTilingFeatures": 7553,
+            "bufferFeatures": 72
+        },
+        {
+            "formatID": 65,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 66,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 67,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 68,
+            "linearTilingFeatures": 3201,
+            "optimalTilingFeatures": 3201,
+            "bufferFeatures": 72
+        },
+        {
+            "formatID": 69,
+            "linearTilingFeatures": 1025,
+            "optimalTilingFeatures": 1025,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 70,
+            "linearTilingFeatures": 3457,
+            "optimalTilingFeatures": 3457,
+            "bufferFeatures": 72
+        },
+        {
+            "formatID": 71,
+            "linearTilingFeatures": 3457,
+            "optimalTilingFeatures": 3457,
+            "bufferFeatures": 72
+        },
+        {
+            "formatID": 72,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 73,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 74,
+            "linearTilingFeatures": 3201,
+            "optimalTilingFeatures": 3201,
+            "bufferFeatures": 72
+        },
+        {
+            "formatID": 75,
+            "linearTilingFeatures": 3201,
+            "optimalTilingFeatures": 3201,
+            "bufferFeatures": 72
+        },
+        {
+            "formatID": 76,
+            "linearTilingFeatures": 7553,
+            "optimalTilingFeatures": 7553,
+            "bufferFeatures": 72
+        },
+        {
+            "formatID": 77,
+            "linearTilingFeatures": 3457,
+            "optimalTilingFeatures": 3457,
+            "bufferFeatures": 72
+        },
+        {
+            "formatID": 78,
+            "linearTilingFeatures": 3457,
+            "optimalTilingFeatures": 3457,
+            "bufferFeatures": 72
+        },
+        {
+            "formatID": 79,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 80,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 81,
+            "linearTilingFeatures": 3201,
+            "optimalTilingFeatures": 3201,
+            "bufferFeatures": 72
+        },
+        {
+            "formatID": 82,
+            "linearTilingFeatures": 3201,
+            "optimalTilingFeatures": 3201,
+            "bufferFeatures": 72
+        },
+        {
+            "formatID": 83,
+            "linearTilingFeatures": 7553,
+            "optimalTilingFeatures": 7553,
+            "bufferFeatures": 72
+        },
+        {
+            "formatID": 84,
+            "linearTilingFeatures": 1025,
+            "optimalTilingFeatures": 1025,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 85,
+            "linearTilingFeatures": 1025,
+            "optimalTilingFeatures": 1025,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 86,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 87,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 88,
+            "linearTilingFeatures": 1025,
+            "optimalTilingFeatures": 1025,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 89,
+            "linearTilingFeatures": 1025,
+            "optimalTilingFeatures": 1025,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 90,
+            "linearTilingFeatures": 5121,
+            "optimalTilingFeatures": 5121,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 91,
+            "linearTilingFeatures": 3457,
+            "optimalTilingFeatures": 3457,
+            "bufferFeatures": 72
+        },
+        {
+            "formatID": 92,
+            "linearTilingFeatures": 3457,
+            "optimalTilingFeatures": 3457,
+            "bufferFeatures": 72
+        },
+        {
+            "formatID": 93,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 94,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 95,
+            "linearTilingFeatures": 3203,
+            "optimalTilingFeatures": 3203,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 96,
+            "linearTilingFeatures": 3203,
+            "optimalTilingFeatures": 3203,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 97,
+            "linearTilingFeatures": 7555,
+            "optimalTilingFeatures": 7555,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 98,
+            "linearTilingFeatures": 3207,
+            "optimalTilingFeatures": 3207,
+            "bufferFeatures": 120
+        },
+        {
+            "formatID": 99,
+            "linearTilingFeatures": 3207,
+            "optimalTilingFeatures": 3207,
+            "bufferFeatures": 120
+        },
+        {
+            "formatID": 100,
+            "linearTilingFeatures": 7299,
+            "optimalTilingFeatures": 7299,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 101,
+            "linearTilingFeatures": 3203,
+            "optimalTilingFeatures": 3203,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 102,
+            "linearTilingFeatures": 3203,
+            "optimalTilingFeatures": 3203,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 103,
+            "linearTilingFeatures": 7299,
+            "optimalTilingFeatures": 7299,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 104,
+            "linearTilingFeatures": 1025,
+            "optimalTilingFeatures": 1025,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 105,
+            "linearTilingFeatures": 1025,
+            "optimalTilingFeatures": 1025,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 106,
+            "linearTilingFeatures": 5121,
+            "optimalTilingFeatures": 5121,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 107,
+            "linearTilingFeatures": 3203,
+            "optimalTilingFeatures": 3203,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 108,
+            "linearTilingFeatures": 3203,
+            "optimalTilingFeatures": 3203,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 109,
+            "linearTilingFeatures": 7299,
+            "optimalTilingFeatures": 7299,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 110,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 111,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 112,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 113,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 114,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 115,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 116,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 117,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 118,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 119,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 120,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 121,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 122,
+            "linearTilingFeatures": 7297,
+            "optimalTilingFeatures": 7297,
+            "bufferFeatures": 8
+        },
+        {
+            "formatID": 123,
+            "linearTilingFeatures": 5121,
+            "optimalTilingFeatures": 5121,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 124,
+            "linearTilingFeatures": 5633,
+            "optimalTilingFeatures": 5633,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 125,
+            "linearTilingFeatures": 5633,
+            "optimalTilingFeatures": 5633,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 126,
+            "linearTilingFeatures": 5121,
+            "optimalTilingFeatures": 5121,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 127,
+            "linearTilingFeatures": 1025,
+            "optimalTilingFeatures": 1025,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 128,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 129,
+            "linearTilingFeatures": 5633,
+            "optimalTilingFeatures": 5633,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 130,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 131,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 132,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 133,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 134,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 135,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 136,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 137,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 138,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 139,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 140,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 141,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 142,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 143,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 144,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 145,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 146,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 147,
+            "linearTilingFeatures": 5121,
+            "optimalTilingFeatures": 5121,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 148,
+            "linearTilingFeatures": 5121,
+            "optimalTilingFeatures": 5121,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 149,
+            "linearTilingFeatures": 5121,
+            "optimalTilingFeatures": 5121,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 150,
+            "linearTilingFeatures": 5121,
+            "optimalTilingFeatures": 5121,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 151,
+            "linearTilingFeatures": 5121,
+            "optimalTilingFeatures": 5121,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 152,
+            "linearTilingFeatures": 5121,
+            "optimalTilingFeatures": 5121,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 153,
+            "linearTilingFeatures": 5121,
+            "optimalTilingFeatures": 5121,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 154,
+            "linearTilingFeatures": 5121,
+            "optimalTilingFeatures": 5121,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 155,
+            "linearTilingFeatures": 5121,
+            "optimalTilingFeatures": 5121,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 156,
+            "linearTilingFeatures": 5121,
+            "optimalTilingFeatures": 5121,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 157,
+            "linearTilingFeatures": 5121,
+            "optimalTilingFeatures": 5121,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 158,
+            "linearTilingFeatures": 5121,
+            "optimalTilingFeatures": 5121,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 159,
+            "linearTilingFeatures": 5121,
+            "optimalTilingFeatures": 5121,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 160,
+            "linearTilingFeatures": 5121,
+            "optimalTilingFeatures": 5121,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 161,
+            "linearTilingFeatures": 5121,
+            "optimalTilingFeatures": 5121,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 162,
+            "linearTilingFeatures": 5121,
+            "optimalTilingFeatures": 5121,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 163,
+            "linearTilingFeatures": 5121,
+            "optimalTilingFeatures": 5121,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 164,
+            "linearTilingFeatures": 5121,
+            "optimalTilingFeatures": 5121,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 165,
+            "linearTilingFeatures": 5121,
+            "optimalTilingFeatures": 5121,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 166,
+            "linearTilingFeatures": 5121,
+            "optimalTilingFeatures": 5121,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 167,
+            "linearTilingFeatures": 5121,
+            "optimalTilingFeatures": 5121,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 168,
+            "linearTilingFeatures": 5121,
+            "optimalTilingFeatures": 5121,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 169,
+            "linearTilingFeatures": 5121,
+            "optimalTilingFeatures": 5121,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 170,
+            "linearTilingFeatures": 5121,
+            "optimalTilingFeatures": 5121,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 171,
+            "linearTilingFeatures": 5121,
+            "optimalTilingFeatures": 5121,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 172,
+            "linearTilingFeatures": 5121,
+            "optimalTilingFeatures": 5121,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 173,
+            "linearTilingFeatures": 5121,
+            "optimalTilingFeatures": 5121,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 174,
+            "linearTilingFeatures": 5121,
+            "optimalTilingFeatures": 5121,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 175,
+            "linearTilingFeatures": 5121,
+            "optimalTilingFeatures": 5121,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 176,
+            "linearTilingFeatures": 5121,
+            "optimalTilingFeatures": 5121,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 177,
+            "linearTilingFeatures": 5121,
+            "optimalTilingFeatures": 5121,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 178,
+            "linearTilingFeatures": 5121,
+            "optimalTilingFeatures": 5121,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 179,
+            "linearTilingFeatures": 5121,
+            "optimalTilingFeatures": 5121,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 180,
+            "linearTilingFeatures": 5121,
+            "optimalTilingFeatures": 5121,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 181,
+            "linearTilingFeatures": 5121,
+            "optimalTilingFeatures": 5121,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 182,
+            "linearTilingFeatures": 5121,
+            "optimalTilingFeatures": 5121,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 183,
+            "linearTilingFeatures": 5121,
+            "optimalTilingFeatures": 5121,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 184,
+            "linearTilingFeatures": 5121,
+            "optimalTilingFeatures": 5121,
+            "bufferFeatures": 0
+        }
+    ]
+}
diff --git a/src/third_party/vulkan-validation-layers/src/tests/device_profiles/nvidia_tegra_x1.json b/src/third_party/vulkan-validation-layers/src/tests/device_profiles/nvidia_tegra_x1.json
new file mode 100644
index 0000000..b032970
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/tests/device_profiles/nvidia_tegra_x1.json
@@ -0,0 +1,1462 @@
+{
+    "$schema": "https://schema.khronos.org/vulkan/devsim_1_0_0.json#",
+    "comments": {
+        "info": "Vulkan Hardware Report generated via https://vulkan.gpuinfo.org",
+        "desc": "https://vulkan.gpuinfo.org/displayreport.php?id=2115"
+    },
+    "environment": {
+        "architecture": "arm",
+        "comment": "",
+        "name": "android",
+        "reportversion": "1.4",
+        "submitter": "docofkult",
+        "version": "8.1.0"
+    },
+    "extended": {
+        "devicefeatures2": [],
+        "deviceproperties2": []
+    },
+    "platformdetails": {
+        "android.BuildID": "OPP5.170921.005",
+        "android.BuildVersionIncremental": "4373449",
+        "android.ProductManufacturer": "Google",
+        "android.ProductModel": "Pixel C"
+    },
+    "surfacecapabilites": {
+        "maxImageArrayLayers": 1,
+        "maxImageCount": 3,
+        "maxImageExtent": {
+            "height": 4096,
+            "width": 4096
+        },
+        "minImageCount": 2,
+        "minImageExtent": {
+            "height": 1,
+            "width": 1
+        },
+        "presentmodes": [
+            1,
+            2
+        ],
+        "supportedCompositeAlpha": 8,
+        "supportedTransforms": 271,
+        "supportedUsageFlags": 159,
+        "surfaceExtension": "VK_KHR_android_surface",
+        "surfaceformats": [
+            {
+                "colorSpace": 0,
+                "format": 37
+            },
+            {
+                "colorSpace": 0,
+                "format": 43
+            },
+            {
+                "colorSpace": 0,
+                "format": 4
+            }
+        ],
+        "validSurface": true
+    },
+    "VkPhysicalDeviceFeatures": {
+        "alphaToOne": 1,
+        "depthBiasClamp": 1,
+        "depthBounds": 1,
+        "depthClamp": 1,
+        "drawIndirectFirstInstance": 1,
+        "dualSrcBlend": 1,
+        "fillModeNonSolid": 1,
+        "fragmentStoresAndAtomics": 1,
+        "fullDrawIndexUint32": 1,
+        "geometryShader": 1,
+        "imageCubeArray": 1,
+        "independentBlend": 1,
+        "inheritedQueries": 1,
+        "largePoints": 1,
+        "logicOp": 1,
+        "multiDrawIndirect": 1,
+        "multiViewport": 1,
+        "occlusionQueryPrecise": 1,
+        "pipelineStatisticsQuery": 1,
+        "robustBufferAccess": 1,
+        "sampleRateShading": 1,
+        "samplerAnisotropy": 1,
+        "shaderClipDistance": 1,
+        "shaderCullDistance": 1,
+        "shaderFloat64": 1,
+        "shaderImageGatherExtended": 1,
+        "shaderInt16": 0,
+        "shaderInt64": 1,
+        "shaderResourceMinLod": 1,
+        "shaderResourceResidency": 1,
+        "shaderSampledImageArrayDynamicIndexing": 1,
+        "shaderStorageBufferArrayDynamicIndexing": 1,
+        "shaderStorageImageArrayDynamicIndexing": 1,
+        "shaderStorageImageExtendedFormats": 1,
+        "shaderStorageImageMultisample": 1,
+        "shaderStorageImageReadWithoutFormat": 1,
+        "shaderStorageImageWriteWithoutFormat": 1,
+        "shaderTessellationAndGeometryPointSize": 1,
+        "shaderUniformBufferArrayDynamicIndexing": 1,
+        "sparseBinding": 1,
+        "sparseResidency16Samples": 1,
+        "sparseResidency2Samples": 1,
+        "sparseResidency4Samples": 1,
+        "sparseResidency8Samples": 1,
+        "sparseResidencyAliased": 1,
+        "sparseResidencyBuffer": 1,
+        "sparseResidencyImage2D": 1,
+        "sparseResidencyImage3D": 1,
+        "tessellationShader": 1,
+        "textureCompressionASTC_LDR": 1,
+        "textureCompressionBC": 1,
+        "textureCompressionETC2": 1,
+        "variableMultisampleRate": 1,
+        "vertexPipelineStoresAndAtomics": 1,
+        "wideLines": 1
+    },
+    "VkPhysicalDeviceProperties": {
+        "apiVersion": 4194341,
+        "deviceID": 2461664215,
+        "deviceName": "NVIDIA Tegra X1",
+        "deviceType": 1,
+        "driverVersion": 1514143744,
+        "limits": {
+            "bufferImageGranularity": 1024,
+            "discreteQueuePriorities": 2,
+            "framebufferColorSampleCounts": 15,
+            "framebufferDepthSampleCounts": 15,
+            "framebufferNoAttachmentsSampleCounts": 15,
+            "framebufferStencilSampleCounts": 31,
+            "lineWidthGranularity": 0.125,
+            "lineWidthRange": [
+                0.5,
+                10
+            ],
+            "maxBoundDescriptorSets": 8,
+            "maxClipDistances": 8,
+            "maxColorAttachments": 8,
+            "maxCombinedClipAndCullDistances": 8,
+            "maxComputeSharedMemorySize": 49152,
+            "maxComputeWorkGroupCount": [
+                2147483647,
+                65535,
+                65535
+            ],
+            "maxComputeWorkGroupInvocations": 1536,
+            "maxComputeWorkGroupSize": [
+                1536,
+                1024,
+                64
+            ],
+            "maxCullDistances": 8,
+            "maxDescriptorSetInputAttachments": 8,
+            "maxDescriptorSetSampledImages": 49152,
+            "maxDescriptorSetSamplers": 4000,
+            "maxDescriptorSetStorageBuffers": 96,
+            "maxDescriptorSetStorageBuffersDynamic": 96,
+            "maxDescriptorSetStorageImages": 49152,
+            "maxDescriptorSetUniformBuffers": 72,
+            "maxDescriptorSetUniformBuffersDynamic": 72,
+            "maxDrawIndexedIndexValue": 4294967295,
+            "maxDrawIndirectCount": 4294967295,
+            "maxFragmentCombinedOutputResources": 16,
+            "maxFragmentDualSrcAttachments": 1,
+            "maxFragmentInputComponents": 128,
+            "maxFragmentOutputAttachments": 8,
+            "maxFramebufferHeight": 16384,
+            "maxFramebufferLayers": 2048,
+            "maxFramebufferWidth": 16384,
+            "maxGeometryInputComponents": 128,
+            "maxGeometryOutputComponents": 128,
+            "maxGeometryOutputVertices": 1024,
+            "maxGeometryShaderInvocations": 32,
+            "maxGeometryTotalOutputComponents": 1024,
+            "maxImageArrayLayers": 2048,
+            "maxImageDimension1D": 16384,
+            "maxImageDimension2D": 16384,
+            "maxImageDimension3D": 2048,
+            "maxImageDimensionCube": 16384,
+            "maxInterpolationOffset": 0.4375,
+            "maxMemoryAllocationCount": 4294967295,
+            "maxPerStageDescriptorInputAttachments": 8,
+            "maxPerStageDescriptorSampledImages": 8192,
+            "maxPerStageDescriptorSamplers": 4000,
+            "maxPerStageDescriptorStorageBuffers": 16,
+            "maxPerStageDescriptorStorageImages": 8192,
+            "maxPerStageDescriptorUniformBuffers": 12,
+            "maxPerStageResources": 24612,
+            "maxPushConstantsSize": 256,
+            "maxSampleMaskWords": 1,
+            "maxSamplerAllocationCount": 4000,
+            "maxSamplerAnisotropy": 16,
+            "maxSamplerLodBias": 15,
+            "maxStorageBufferRange": 2147483647,
+            "maxTessellationControlPerPatchOutputComponents": 120,
+            "maxTessellationControlPerVertexInputComponents": 128,
+            "maxTessellationControlPerVertexOutputComponents": 128,
+            "maxTessellationControlTotalOutputComponents": 4216,
+            "maxTessellationEvaluationInputComponents": 128,
+            "maxTessellationEvaluationOutputComponents": 128,
+            "maxTessellationGenerationLevel": 64,
+            "maxTessellationPatchSize": 32,
+            "maxTexelBufferElements": 134217728,
+            "maxTexelGatherOffset": 31,
+            "maxTexelOffset": 7,
+            "maxUniformBufferRange": 65536,
+            "maxVertexInputAttributeOffset": 2047,
+            "maxVertexInputAttributes": 32,
+            "maxVertexInputBindingStride": 2048,
+            "maxVertexInputBindings": 32,
+            "maxVertexOutputComponents": 128,
+            "maxViewportDimensions": [
+                16384,
+                16384
+            ],
+            "maxViewports": 16,
+            "minInterpolationOffset": -0.5,
+            "minMemoryMapAlignment": 64,
+            "minStorageBufferOffsetAlignment": 32,
+            "minTexelBufferOffsetAlignment": 16,
+            "minTexelGatherOffset": -32,
+            "minTexelOffset": -8,
+            "minUniformBufferOffsetAlignment": 256,
+            "mipmapPrecisionBits": 8,
+            "nonCoherentAtomSize": 64,
+            "optimalBufferCopyOffsetAlignment": 1,
+            "optimalBufferCopyRowPitchAlignment": 1,
+            "pointSizeGranularity": 0.125,
+            "pointSizeRange": [
+                1,
+                189.875
+            ],
+            "sampledImageColorSampleCounts": 15,
+            "sampledImageDepthSampleCounts": 15,
+            "sampledImageIntegerSampleCounts": 15,
+            "sampledImageStencilSampleCounts": 31,
+            "sparseAddressSpaceSize": -1,
+            "standardSampleLocations": 1,
+            "storageImageSampleCounts": 15,
+            "strictLines": 1,
+            "subPixelInterpolationOffsetBits": 4,
+            "subPixelPrecisionBits": 8,
+            "subTexelPrecisionBits": 8,
+            "timestampComputeAndGraphics": 1,
+            "timestampPeriod": 1,
+            "viewportBoundsRange": [
+                -32768,
+                32768
+            ],
+            "viewportSubPixelBits": 8
+        },
+        "pipelineCacheUUID": [
+            3,
+            97,
+            64,
+            32,
+            90,
+            58,
+            133,
+            178,
+            194,
+            22,
+            194,
+            34,
+            248,
+            46,
+            137,
+            8
+        ],
+        "sparseProperties": {
+            "residencyAlignedMipSize": 0,
+            "residencyNonResidentStrict": 1,
+            "residencyStandard2DBlockShape": 1,
+            "residencyStandard2DMultisampleBlockShape": 1,
+            "residencyStandard3DBlockShape": 1
+        },
+        "vendorID": 4318
+    },
+    "VkPhysicalDeviceMemoryProperties": {
+        "memoryHeaps": [
+            {
+                "flags": 1,
+                "size": 1610612736
+            }
+        ],
+        "memoryTypes": [
+            {
+                "heapIndex": 0,
+                "propertyFlags": 1
+            },
+            {
+                "heapIndex": 0,
+                "propertyFlags": 1
+            },
+            {
+                "heapIndex": 0,
+                "propertyFlags": 7
+            },
+            {
+                "heapIndex": 0,
+                "propertyFlags": 11
+            }
+        ]
+    },
+    "ArrayOfVkExtensionProperties": [
+        {
+            "extensionName": "VK_KHR_incremental_present",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_KHR_swapchain",
+            "specVersion": 68
+        },
+        {
+            "extensionName": "VK_KHR_maintenance1",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_KHR_shader_draw_parameters",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_KHR_sampler_mirror_clamp_to_edge",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_EXT_shader_subgroup_ballot",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_EXT_shader_subgroup_vote",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_NV_dedicated_allocation",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_NV_glsl_shader",
+            "specVersion": 1
+        }
+    ],
+    "ArrayOfVkLayerProperties": [],
+    "ArrayOfVkQueueFamilyProperties": [
+        {
+            "minImageTransferGranularity": {
+                "depth": 1,
+                "height": 1,
+                "width": 1
+            },
+            "queueCount": 16,
+            "queueFlags": 15,
+            "timestampValidBits": 64
+        }
+    ],
+    "ArrayOfVkFormatProperties": [
+        {
+            "formatID": 1,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 8
+        },
+        {
+            "formatID": 2,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 8
+        },
+        {
+            "formatID": 3,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 8
+        },
+        {
+            "formatID": 4,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 56705,
+            "bufferFeatures": 8
+        },
+        {
+            "formatID": 5,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 8
+        },
+        {
+            "formatID": 6,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 8
+        },
+        {
+            "formatID": 7,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 8
+        },
+        {
+            "formatID": 8,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 56705,
+            "bufferFeatures": 8
+        },
+        {
+            "formatID": 9,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 56707,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 10,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 56707,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 11,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 12,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 13,
+            "linearTilingFeatures": 50177,
+            "optimalTilingFeatures": 52355,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 14,
+            "linearTilingFeatures": 50177,
+            "optimalTilingFeatures": 52355,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 15,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 16,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 56707,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 17,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 56707,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 18,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 19,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 20,
+            "linearTilingFeatures": 50177,
+            "optimalTilingFeatures": 52355,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 21,
+            "linearTilingFeatures": 50177,
+            "optimalTilingFeatures": 52355,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 22,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 23,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 24,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 25,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 26,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 27,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 28,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 29,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 30,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 31,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 32,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 33,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 34,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 35,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 36,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 37,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 56707,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 38,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 56707,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 39,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 40,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 41,
+            "linearTilingFeatures": 50177,
+            "optimalTilingFeatures": 52355,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 42,
+            "linearTilingFeatures": 50177,
+            "optimalTilingFeatures": 52355,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 43,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 56705,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 44,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 56707,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 45,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 72
+        },
+        {
+            "formatID": 46,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 47,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 48,
+            "linearTilingFeatures": 50177,
+            "optimalTilingFeatures": 50177,
+            "bufferFeatures": 72
+        },
+        {
+            "formatID": 49,
+            "linearTilingFeatures": 50177,
+            "optimalTilingFeatures": 50177,
+            "bufferFeatures": 72
+        },
+        {
+            "formatID": 50,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 56705,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 51,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 56707,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 52,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 56707,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 53,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 54,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 55,
+            "linearTilingFeatures": 50177,
+            "optimalTilingFeatures": 52355,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 56,
+            "linearTilingFeatures": 50177,
+            "optimalTilingFeatures": 52355,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 57,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 56705,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 58,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 56705,
+            "bufferFeatures": 72
+        },
+        {
+            "formatID": 59,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 60,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 61,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 62,
+            "linearTilingFeatures": 50177,
+            "optimalTilingFeatures": 50177,
+            "bufferFeatures": 72
+        },
+        {
+            "formatID": 63,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 64,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 56707,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 65,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 66,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 67,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 68,
+            "linearTilingFeatures": 50177,
+            "optimalTilingFeatures": 52355,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 69,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 70,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 56707,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 71,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 56707,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 72,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 73,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 74,
+            "linearTilingFeatures": 50177,
+            "optimalTilingFeatures": 52355,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 75,
+            "linearTilingFeatures": 50177,
+            "optimalTilingFeatures": 52355,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 76,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 56707,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 77,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 56707,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 78,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 56707,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 79,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 80,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 81,
+            "linearTilingFeatures": 50177,
+            "optimalTilingFeatures": 52355,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 82,
+            "linearTilingFeatures": 50177,
+            "optimalTilingFeatures": 52355,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 83,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 56707,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 84,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 85,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 86,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 87,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 88,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 89,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 90,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 91,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 56707,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 92,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 56707,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 93,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 94,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 95,
+            "linearTilingFeatures": 50177,
+            "optimalTilingFeatures": 52355,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 96,
+            "linearTilingFeatures": 50177,
+            "optimalTilingFeatures": 52355,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 97,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 56707,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 98,
+            "linearTilingFeatures": 50177,
+            "optimalTilingFeatures": 52359,
+            "bufferFeatures": 120
+        },
+        {
+            "formatID": 99,
+            "linearTilingFeatures": 50177,
+            "optimalTilingFeatures": 52359,
+            "bufferFeatures": 120
+        },
+        {
+            "formatID": 100,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 56711,
+            "bufferFeatures": 120
+        },
+        {
+            "formatID": 101,
+            "linearTilingFeatures": 50177,
+            "optimalTilingFeatures": 52355,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 102,
+            "linearTilingFeatures": 50177,
+            "optimalTilingFeatures": 52355,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 103,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 56707,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 104,
+            "linearTilingFeatures": 50177,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 72
+        },
+        {
+            "formatID": 105,
+            "linearTilingFeatures": 50177,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 72
+        },
+        {
+            "formatID": 106,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 72
+        },
+        {
+            "formatID": 107,
+            "linearTilingFeatures": 50177,
+            "optimalTilingFeatures": 52355,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 108,
+            "linearTilingFeatures": 50177,
+            "optimalTilingFeatures": 52355,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 109,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 56707,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 110,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 111,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 112,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 113,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 114,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 115,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 116,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 117,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 118,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 119,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 120,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 121,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 122,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 56707,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 123,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 8
+        },
+        {
+            "formatID": 124,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54785,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 125,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54785,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 126,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54785,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 127,
+            "linearTilingFeatures": 50177,
+            "optimalTilingFeatures": 50689,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 128,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 129,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54785,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 130,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54785,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 131,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 132,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 133,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 134,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 135,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 136,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 137,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 138,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 139,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 140,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 141,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 142,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 143,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 144,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 145,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 146,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 147,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 148,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 149,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 150,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 151,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 152,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 153,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 154,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 155,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 156,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 157,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 158,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 159,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 160,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 161,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 162,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 163,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 164,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 165,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 166,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 167,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 168,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 169,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 170,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 171,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 172,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 173,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 174,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 175,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 176,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 177,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 178,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 179,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 180,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 181,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 182,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 183,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 184,
+            "linearTilingFeatures": 54273,
+            "optimalTilingFeatures": 54273,
+            "bufferFeatures": 0
+        }
+    ]
+}
\ No newline at end of file
diff --git a/src/third_party/vulkan-validation-layers/src/tests/device_profiles/powervr_rogue_ge8300.json b/src/third_party/vulkan-validation-layers/src/tests/device_profiles/powervr_rogue_ge8300.json
new file mode 100644
index 0000000..cb88c92
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/tests/device_profiles/powervr_rogue_ge8300.json
@@ -0,0 +1,1440 @@
+{
+    "$schema": "https://schema.khronos.org/vulkan/devsim_1_0_0.json#",
+    "comments": {
+        "info": "Vulkan Hardware Report generated via https://vulkan.gpuinfo.org",
+        "desc": "https://vulkan.gpuinfo.org/displayreport.php?id=1999"
+    },
+    "environment": {
+        "architecture": "arm",
+        "comment": "",
+        "name": "android",
+        "reportversion": "1.4",
+        "submitter": "",
+        "version": "7.0"
+    },
+    "extended": {
+        "devicefeatures2": [],
+        "deviceproperties2": []
+    },
+    "platformdetails": {
+        "android.BuildID": "NRD90M",
+        "android.BuildVersionIncremental": "1496325358",
+        "android.ProductManufacturer": "Acer",
+        "android.ProductModel": "B3-A40"
+    },
+    "surfacecapabilites": {
+        "maxImageArrayLayers": 1,
+        "maxImageCount": 3,
+        "maxImageExtent": {
+            "height": 4096,
+            "width": 4096
+        },
+        "minImageCount": 2,
+        "minImageExtent": {
+            "height": 1,
+            "width": 1
+        },
+        "presentmodes": [
+            1,
+            2
+        ],
+        "supportedCompositeAlpha": 8,
+        "supportedTransforms": 271,
+        "supportedUsageFlags": 159,
+        "surfaceExtension": "VK_KHR_android_surface",
+        "surfaceformats": [
+            {
+                "colorSpace": 0,
+                "format": 37
+            },
+            {
+                "colorSpace": 0,
+                "format": 43
+            },
+            {
+                "colorSpace": 0,
+                "format": 4
+            }
+        ],
+        "validSurface": true
+    },
+    "VkPhysicalDeviceFeatures": {
+        "alphaToOne": 1,
+        "depthBiasClamp": 1,
+        "depthBounds": 0,
+        "depthClamp": 0,
+        "drawIndirectFirstInstance": 0,
+        "dualSrcBlend": 0,
+        "fillModeNonSolid": 0,
+        "fragmentStoresAndAtomics": 0,
+        "fullDrawIndexUint32": 1,
+        "geometryShader": 0,
+        "imageCubeArray": 1,
+        "independentBlend": 1,
+        "inheritedQueries": 0,
+        "largePoints": 1,
+        "logicOp": 1,
+        "multiDrawIndirect": 1,
+        "multiViewport": 0,
+        "occlusionQueryPrecise": 1,
+        "pipelineStatisticsQuery": 0,
+        "robustBufferAccess": 1,
+        "sampleRateShading": 1,
+        "samplerAnisotropy": 1,
+        "shaderClipDistance": 0,
+        "shaderCullDistance": 0,
+        "shaderFloat64": 0,
+        "shaderImageGatherExtended": 0,
+        "shaderInt16": 0,
+        "shaderInt64": 0,
+        "shaderResourceMinLod": 0,
+        "shaderResourceResidency": 0,
+        "shaderSampledImageArrayDynamicIndexing": 0,
+        "shaderStorageBufferArrayDynamicIndexing": 0,
+        "shaderStorageImageArrayDynamicIndexing": 0,
+        "shaderStorageImageExtendedFormats": 0,
+        "shaderStorageImageMultisample": 0,
+        "shaderStorageImageReadWithoutFormat": 0,
+        "shaderStorageImageWriteWithoutFormat": 0,
+        "shaderTessellationAndGeometryPointSize": 0,
+        "shaderUniformBufferArrayDynamicIndexing": 1,
+        "sparseBinding": 0,
+        "sparseResidency16Samples": 0,
+        "sparseResidency2Samples": 0,
+        "sparseResidency4Samples": 0,
+        "sparseResidency8Samples": 0,
+        "sparseResidencyAliased": 0,
+        "sparseResidencyBuffer": 0,
+        "sparseResidencyImage2D": 0,
+        "sparseResidencyImage3D": 0,
+        "tessellationShader": 0,
+        "textureCompressionASTC_LDR": 0,
+        "textureCompressionBC": 0,
+        "textureCompressionETC2": 1,
+        "variableMultisampleRate": 0,
+        "vertexPipelineStoresAndAtomics": 0,
+        "wideLines": 0
+    },
+    "VkPhysicalDeviceProperties": {
+        "apiVersion": 4194307,
+        "deviceID": 1455646,
+        "deviceName": "PowerVR Rogue GE8300",
+        "deviceType": 2,
+        "driverVersion": 4490469,
+        "limits": {
+            "bufferImageGranularity": 1,
+            "discreteQueuePriorities": 3,
+            "framebufferColorSampleCounts": 7,
+            "framebufferDepthSampleCounts": 7,
+            "framebufferNoAttachmentsSampleCounts": 7,
+            "framebufferStencilSampleCounts": 7,
+            "lineWidthGranularity": 0,
+            "lineWidthRange": [
+                1,
+                1
+            ],
+            "maxBoundDescriptorSets": 4,
+            "maxClipDistances": 0,
+            "maxColorAttachments": 8,
+            "maxCombinedClipAndCullDistances": 0,
+            "maxComputeSharedMemorySize": 16384,
+            "maxComputeWorkGroupCount": [
+                65536,
+                65536,
+                65536
+            ],
+            "maxComputeWorkGroupInvocations": 512,
+            "maxComputeWorkGroupSize": [
+                512,
+                512,
+                64
+            ],
+            "maxCullDistances": 0,
+            "maxDescriptorSetInputAttachments": 256,
+            "maxDescriptorSetSampledImages": 256,
+            "maxDescriptorSetSamplers": 256,
+            "maxDescriptorSetStorageBuffers": 256,
+            "maxDescriptorSetStorageBuffersDynamic": 8,
+            "maxDescriptorSetStorageImages": 256,
+            "maxDescriptorSetUniformBuffers": 256,
+            "maxDescriptorSetUniformBuffersDynamic": 8,
+            "maxDrawIndexedIndexValue": 4294967295,
+            "maxDrawIndirectCount": 2147483648,
+            "maxFragmentCombinedOutputResources": 8,
+            "maxFragmentDualSrcAttachments": 0,
+            "maxFragmentInputComponents": 128,
+            "maxFragmentOutputAttachments": 8,
+            "maxFramebufferHeight": 8192,
+            "maxFramebufferLayers": 2048,
+            "maxFramebufferWidth": 8192,
+            "maxGeometryInputComponents": 128,
+            "maxGeometryOutputComponents": 128,
+            "maxGeometryOutputVertices": 256,
+            "maxGeometryShaderInvocations": 32,
+            "maxGeometryTotalOutputComponents": 1024,
+            "maxImageArrayLayers": 2048,
+            "maxImageDimension1D": 8192,
+            "maxImageDimension2D": 8192,
+            "maxImageDimension3D": 2048,
+            "maxImageDimensionCube": 8192,
+            "maxInterpolationOffset": 0.499999970198,
+            "maxMemoryAllocationCount": 9999,
+            "maxPerStageDescriptorInputAttachments": 128,
+            "maxPerStageDescriptorSampledImages": 128,
+            "maxPerStageDescriptorSamplers": 128,
+            "maxPerStageDescriptorStorageBuffers": 128,
+            "maxPerStageDescriptorStorageImages": 128,
+            "maxPerStageDescriptorUniformBuffers": 128,
+            "maxPerStageResources": 128,
+            "maxPushConstantsSize": 128,
+            "maxSampleMaskWords": 1,
+            "maxSamplerAllocationCount": 9999,
+            "maxSamplerAnisotropy": 16,
+            "maxSamplerLodBias": 15,
+            "maxStorageBufferRange": 134217728,
+            "maxTessellationControlPerPatchOutputComponents": 0,
+            "maxTessellationControlPerVertexInputComponents": 0,
+            "maxTessellationControlPerVertexOutputComponents": 0,
+            "maxTessellationControlTotalOutputComponents": 0,
+            "maxTessellationEvaluationInputComponents": 0,
+            "maxTessellationEvaluationOutputComponents": 0,
+            "maxTessellationGenerationLevel": 0,
+            "maxTessellationPatchSize": 0,
+            "maxTexelBufferElements": 65536,
+            "maxTexelGatherOffset": 0,
+            "maxTexelOffset": 7,
+            "maxUniformBufferRange": 134217728,
+            "maxVertexInputAttributeOffset": 65535,
+            "maxVertexInputAttributes": 16,
+            "maxVertexInputBindingStride": 2147483648,
+            "maxVertexInputBindings": 16,
+            "maxVertexOutputComponents": 128,
+            "maxViewportDimensions": [
+                8192,
+                8192
+            ],
+            "maxViewports": 16,
+            "minInterpolationOffset": -0.5,
+            "minMemoryMapAlignment": 64,
+            "minStorageBufferOffsetAlignment": 4,
+            "minTexelBufferOffsetAlignment": 4,
+            "minTexelGatherOffset": 0,
+            "minTexelOffset": -8,
+            "minUniformBufferOffsetAlignment": 4,
+            "mipmapPrecisionBits": 4,
+            "nonCoherentAtomSize": 128,
+            "optimalBufferCopyOffsetAlignment": 4,
+            "optimalBufferCopyRowPitchAlignment": 4,
+            "pointSizeGranularity": 0,
+            "pointSizeRange": [
+                1,
+                511
+            ],
+            "sampledImageColorSampleCounts": 7,
+            "sampledImageDepthSampleCounts": 7,
+            "sampledImageIntegerSampleCounts": 7,
+            "sampledImageStencilSampleCounts": 7,
+            "sparseAddressSpaceSize": 274877906944,
+            "standardSampleLocations": 1,
+            "storageImageSampleCounts": 7,
+            "strictLines": 0,
+            "subPixelInterpolationOffsetBits": 4,
+            "subPixelPrecisionBits": 8,
+            "subTexelPrecisionBits": 8,
+            "timestampComputeAndGraphics": 0,
+            "timestampPeriod": 0,
+            "viewportBoundsRange": [
+                -16384,
+                16383
+            ],
+            "viewportSubPixelBits": 0
+        },
+        "pipelineCacheUUID": [
+            50,
+            50,
+            32,
+            52,
+            48,
+            32,
+            53,
+            52,
+            32,
+            51,
+            48,
+            0,
+            0,
+            0,
+            0,
+            0
+        ],
+        "sparseProperties": {
+            "residencyAlignedMipSize": 0,
+            "residencyNonResidentStrict": 0,
+            "residencyStandard2DBlockShape": 0,
+            "residencyStandard2DMultisampleBlockShape": 0,
+            "residencyStandard3DBlockShape": 0
+        },
+        "vendorID": 4112
+    },
+    "VkPhysicalDeviceMemoryProperties": {
+        "memoryHeaps": [
+            {
+                "flags": 1,
+                "size": 1073741824
+            }
+        ],
+        "memoryTypes": [
+            {
+                "heapIndex": 0,
+                "propertyFlags": 7
+            }
+        ]
+    },
+    "ArrayOfVkExtensionProperties": [
+        {
+            "extensionName": "VK_IMG_filter_cubic",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_KHR_sampler_mirror_clamp_to_edge",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_IMG_format_pvrtc",
+            "specVersion": 1
+        },
+        {
+            "extensionName": "VK_KHR_swapchain",
+            "specVersion": 68
+        }
+    ],
+    "ArrayOfVkLayerProperties": [],
+    "ArrayOfVkQueueFamilyProperties": [
+        {
+            "minImageTransferGranularity": {
+                "depth": 1,
+                "height": 1,
+                "width": 1
+            },
+            "queueCount": 2,
+            "queueFlags": 7,
+            "timestampValidBits": 0
+        },
+        {
+            "minImageTransferGranularity": {
+                "depth": 0,
+                "height": 0,
+                "width": 0
+            },
+            "queueCount": 0,
+            "queueFlags": 0,
+            "timestampValidBits": 0
+        }
+    ],
+    "ArrayOfVkFormatProperties": [
+        {
+            "formatID": 1,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 2,
+            "linearTilingFeatures": 3457,
+            "optimalTilingFeatures": 11649,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 3,
+            "linearTilingFeatures": 3457,
+            "optimalTilingFeatures": 15745,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 4,
+            "linearTilingFeatures": 3457,
+            "optimalTilingFeatures": 15745,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 5,
+            "linearTilingFeatures": 3457,
+            "optimalTilingFeatures": 11649,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 6,
+            "linearTilingFeatures": 3457,
+            "optimalTilingFeatures": 11649,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 7,
+            "linearTilingFeatures": 3457,
+            "optimalTilingFeatures": 11649,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 8,
+            "linearTilingFeatures": 3457,
+            "optimalTilingFeatures": 15745,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 9,
+            "linearTilingFeatures": 3457,
+            "optimalTilingFeatures": 15745,
+            "bufferFeatures": 72
+        },
+        {
+            "formatID": 10,
+            "linearTilingFeatures": 3457,
+            "optimalTilingFeatures": 15745,
+            "bufferFeatures": 72
+        },
+        {
+            "formatID": 11,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 12,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 13,
+            "linearTilingFeatures": 3201,
+            "optimalTilingFeatures": 3201,
+            "bufferFeatures": 72
+        },
+        {
+            "formatID": 14,
+            "linearTilingFeatures": 3201,
+            "optimalTilingFeatures": 3201,
+            "bufferFeatures": 72
+        },
+        {
+            "formatID": 15,
+            "linearTilingFeatures": 3457,
+            "optimalTilingFeatures": 11649,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 16,
+            "linearTilingFeatures": 3457,
+            "optimalTilingFeatures": 15745,
+            "bufferFeatures": 72
+        },
+        {
+            "formatID": 17,
+            "linearTilingFeatures": 3457,
+            "optimalTilingFeatures": 15745,
+            "bufferFeatures": 72
+        },
+        {
+            "formatID": 18,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 19,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 20,
+            "linearTilingFeatures": 3201,
+            "optimalTilingFeatures": 3201,
+            "bufferFeatures": 72
+        },
+        {
+            "formatID": 21,
+            "linearTilingFeatures": 3201,
+            "optimalTilingFeatures": 3201,
+            "bufferFeatures": 72
+        },
+        {
+            "formatID": 22,
+            "linearTilingFeatures": 3457,
+            "optimalTilingFeatures": 11649,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 23,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 24,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 25,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 26,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 27,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 28,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 29,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 30,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 31,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 32,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 33,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 34,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 35,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 36,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 37,
+            "linearTilingFeatures": 3457,
+            "optimalTilingFeatures": 15747,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 38,
+            "linearTilingFeatures": 3457,
+            "optimalTilingFeatures": 15747,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 39,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 40,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 41,
+            "linearTilingFeatures": 3201,
+            "optimalTilingFeatures": 3203,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 42,
+            "linearTilingFeatures": 3201,
+            "optimalTilingFeatures": 3203,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 43,
+            "linearTilingFeatures": 3457,
+            "optimalTilingFeatures": 15745,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 44,
+            "linearTilingFeatures": 3457,
+            "optimalTilingFeatures": 15745,
+            "bufferFeatures": 72
+        },
+        {
+            "formatID": 45,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 46,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 47,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 48,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 49,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 50,
+            "linearTilingFeatures": 3457,
+            "optimalTilingFeatures": 15745,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 51,
+            "linearTilingFeatures": 3457,
+            "optimalTilingFeatures": 15745,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 52,
+            "linearTilingFeatures": 3457,
+            "optimalTilingFeatures": 15745,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 53,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 54,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 55,
+            "linearTilingFeatures": 3201,
+            "optimalTilingFeatures": 11393,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 56,
+            "linearTilingFeatures": 3201,
+            "optimalTilingFeatures": 11393,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 57,
+            "linearTilingFeatures": 3457,
+            "optimalTilingFeatures": 15745,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 58,
+            "linearTilingFeatures": 3457,
+            "optimalTilingFeatures": 3457,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 59,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 60,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 61,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 62,
+            "linearTilingFeatures": 3201,
+            "optimalTilingFeatures": 3201,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 63,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 64,
+            "linearTilingFeatures": 3457,
+            "optimalTilingFeatures": 7553,
+            "bufferFeatures": 72
+        },
+        {
+            "formatID": 65,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 66,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 67,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 68,
+            "linearTilingFeatures": 3201,
+            "optimalTilingFeatures": 3201,
+            "bufferFeatures": 72
+        },
+        {
+            "formatID": 69,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 70,
+            "linearTilingFeatures": 3457,
+            "optimalTilingFeatures": 3457,
+            "bufferFeatures": 72
+        },
+        {
+            "formatID": 71,
+            "linearTilingFeatures": 3457,
+            "optimalTilingFeatures": 3457,
+            "bufferFeatures": 72
+        },
+        {
+            "formatID": 72,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 73,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 74,
+            "linearTilingFeatures": 3201,
+            "optimalTilingFeatures": 3201,
+            "bufferFeatures": 72
+        },
+        {
+            "formatID": 75,
+            "linearTilingFeatures": 3201,
+            "optimalTilingFeatures": 3201,
+            "bufferFeatures": 72
+        },
+        {
+            "formatID": 76,
+            "linearTilingFeatures": 3457,
+            "optimalTilingFeatures": 7553,
+            "bufferFeatures": 72
+        },
+        {
+            "formatID": 77,
+            "linearTilingFeatures": 3457,
+            "optimalTilingFeatures": 3457,
+            "bufferFeatures": 72
+        },
+        {
+            "formatID": 78,
+            "linearTilingFeatures": 3457,
+            "optimalTilingFeatures": 3457,
+            "bufferFeatures": 72
+        },
+        {
+            "formatID": 79,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 80,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 81,
+            "linearTilingFeatures": 3201,
+            "optimalTilingFeatures": 3201,
+            "bufferFeatures": 72
+        },
+        {
+            "formatID": 82,
+            "linearTilingFeatures": 3201,
+            "optimalTilingFeatures": 3201,
+            "bufferFeatures": 72
+        },
+        {
+            "formatID": 83,
+            "linearTilingFeatures": 3457,
+            "optimalTilingFeatures": 7553,
+            "bufferFeatures": 72
+        },
+        {
+            "formatID": 84,
+            "linearTilingFeatures": 3457,
+            "optimalTilingFeatures": 3457,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 85,
+            "linearTilingFeatures": 3457,
+            "optimalTilingFeatures": 3457,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 86,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 87,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 88,
+            "linearTilingFeatures": 3201,
+            "optimalTilingFeatures": 3201,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 89,
+            "linearTilingFeatures": 3201,
+            "optimalTilingFeatures": 3201,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 90,
+            "linearTilingFeatures": 3457,
+            "optimalTilingFeatures": 3457,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 91,
+            "linearTilingFeatures": 3457,
+            "optimalTilingFeatures": 3457,
+            "bufferFeatures": 72
+        },
+        {
+            "formatID": 92,
+            "linearTilingFeatures": 3457,
+            "optimalTilingFeatures": 3457,
+            "bufferFeatures": 72
+        },
+        {
+            "formatID": 93,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 94,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 95,
+            "linearTilingFeatures": 3201,
+            "optimalTilingFeatures": 3203,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 96,
+            "linearTilingFeatures": 3201,
+            "optimalTilingFeatures": 3203,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 97,
+            "linearTilingFeatures": 3457,
+            "optimalTilingFeatures": 7555,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 98,
+            "linearTilingFeatures": 3201,
+            "optimalTilingFeatures": 3207,
+            "bufferFeatures": 120
+        },
+        {
+            "formatID": 99,
+            "linearTilingFeatures": 3201,
+            "optimalTilingFeatures": 3207,
+            "bufferFeatures": 120
+        },
+        {
+            "formatID": 100,
+            "linearTilingFeatures": 3457,
+            "optimalTilingFeatures": 3459,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 101,
+            "linearTilingFeatures": 3201,
+            "optimalTilingFeatures": 3203,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 102,
+            "linearTilingFeatures": 3201,
+            "optimalTilingFeatures": 3203,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 103,
+            "linearTilingFeatures": 3457,
+            "optimalTilingFeatures": 3459,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 104,
+            "linearTilingFeatures": 3201,
+            "optimalTilingFeatures": 3201,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 105,
+            "linearTilingFeatures": 3201,
+            "optimalTilingFeatures": 3201,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 106,
+            "linearTilingFeatures": 3457,
+            "optimalTilingFeatures": 3457,
+            "bufferFeatures": 64
+        },
+        {
+            "formatID": 107,
+            "linearTilingFeatures": 3201,
+            "optimalTilingFeatures": 3203,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 108,
+            "linearTilingFeatures": 3201,
+            "optimalTilingFeatures": 3203,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 109,
+            "linearTilingFeatures": 3457,
+            "optimalTilingFeatures": 3459,
+            "bufferFeatures": 88
+        },
+        {
+            "formatID": 110,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 111,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 112,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 113,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 114,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 115,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 116,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 117,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 118,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 119,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 120,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 121,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 122,
+            "linearTilingFeatures": 3457,
+            "optimalTilingFeatures": 7553,
+            "bufferFeatures": 8
+        },
+        {
+            "formatID": 123,
+            "linearTilingFeatures": 1025,
+            "optimalTilingFeatures": 5121,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 124,
+            "linearTilingFeatures": 3585,
+            "optimalTilingFeatures": 3585,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 125,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 126,
+            "linearTilingFeatures": 3585,
+            "optimalTilingFeatures": 3585,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 127,
+            "linearTilingFeatures": 3585,
+            "optimalTilingFeatures": 3585,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 128,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 129,
+            "linearTilingFeatures": 3585,
+            "optimalTilingFeatures": 3585,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 130,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 131,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 132,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 133,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 134,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 135,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 136,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 137,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 138,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 139,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 140,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 141,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 142,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 143,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 144,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 145,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 146,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 147,
+            "linearTilingFeatures": 1025,
+            "optimalTilingFeatures": 9217,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 148,
+            "linearTilingFeatures": 1025,
+            "optimalTilingFeatures": 9217,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 149,
+            "linearTilingFeatures": 1025,
+            "optimalTilingFeatures": 9217,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 150,
+            "linearTilingFeatures": 1025,
+            "optimalTilingFeatures": 9217,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 151,
+            "linearTilingFeatures": 1025,
+            "optimalTilingFeatures": 9217,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 152,
+            "linearTilingFeatures": 1025,
+            "optimalTilingFeatures": 9217,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 153,
+            "linearTilingFeatures": 1025,
+            "optimalTilingFeatures": 1025,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 154,
+            "linearTilingFeatures": 1025,
+            "optimalTilingFeatures": 1025,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 155,
+            "linearTilingFeatures": 1025,
+            "optimalTilingFeatures": 1025,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 156,
+            "linearTilingFeatures": 1025,
+            "optimalTilingFeatures": 1025,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 157,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 158,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 159,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 160,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 161,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 162,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 163,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 164,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 165,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 166,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 167,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 168,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 169,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 170,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 171,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 172,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 173,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 174,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 175,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 176,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 177,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 178,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 179,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 180,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 181,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 182,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 183,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        },
+        {
+            "formatID": 184,
+            "linearTilingFeatures": 0,
+            "optimalTilingFeatures": 0,
+            "bufferFeatures": 0
+        }
+    ]
+}
\ No newline at end of file
diff --git a/src/third_party/vulkan-validation-layers/src/tests/icd-spv.h b/src/third_party/vulkan-validation-layers/src/tests/icd-spv.h
new file mode 100644
index 0000000..b7a92a2
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/tests/icd-spv.h
@@ -0,0 +1,29 @@
+/*
+ * Copyright (c) 2015-2016 The Khronos Group Inc.
+ * Copyright (c) 2015-2016 Valve Corporation
+ * Copyright (c) 2015-2016 LunarG, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Author: Cody Northrop <cody@lunarg.com>
+ */
+
+#ifndef ICD_SPV_H
+#define ICD_SPV_H
+
+#include <stdint.h>
+
+#define ICD_SPV_MAGIC 0x07230203
+#define ICD_SPV_VERSION 99
+
+struct icd_spv_header {
+    uint32_t magic;
+    uint32_t version;
+    uint32_t gen_magic;  // Generator's magic number
+};
+
+#endif /* ICD_SPV_H */
diff --git a/src/third_party/vulkan-validation-layers/src/tests/layer_validation_tests.cpp b/src/third_party/vulkan-validation-layers/src/tests/layer_validation_tests.cpp
new file mode 100644
index 0000000..3ac4004
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/tests/layer_validation_tests.cpp
@@ -0,0 +1,2091 @@
+/*
+ * Copyright (c) 2015-2019 The Khronos Group Inc.
+ * Copyright (c) 2015-2019 Valve Corporation
+ * Copyright (c) 2015-2019 LunarG, Inc.
+ * Copyright (c) 2015-2019 Google, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Author: Chia-I Wu <olvaffe@gmail.com>
+ * Author: Chris Forbes <chrisf@ijw.co.nz>
+ * Author: Courtney Goeltzenleuchter <courtney@LunarG.com>
+ * Author: Mark Lobodzinski <mark@lunarg.com>
+ * Author: Mike Stroyan <mike@LunarG.com>
+ * Author: Tobin Ehlis <tobine@google.com>
+ * Author: Tony Barbour <tony@LunarG.com>
+ * Author: Cody Northrop <cnorthrop@google.com>
+ * Author: Dave Houlton <daveh@lunarg.com>
+ * Author: Jeremy Kniager <jeremyk@lunarg.com>
+ * Author: Shannon McPherson <shannon@lunarg.com>
+ * Author: John Zulauf <jzulauf@lunarg.com>
+ */
+#include "cast_utils.h"
+#include "layer_validation_tests.h"
+
+VkFormat FindSupportedDepthOnlyFormat(VkPhysicalDevice phy) {
+    const VkFormat ds_formats[] = {VK_FORMAT_D16_UNORM, VK_FORMAT_X8_D24_UNORM_PACK32, VK_FORMAT_D32_SFLOAT};
+    for (uint32_t i = 0; i < size(ds_formats); ++i) {
+        VkFormatProperties format_props;
+        vk::GetPhysicalDeviceFormatProperties(phy, ds_formats[i], &format_props);
+
+        if (format_props.optimalTilingFeatures & VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT) {
+            return ds_formats[i];
+        }
+    }
+    return VK_FORMAT_UNDEFINED;
+}
+
+VkFormat FindSupportedStencilOnlyFormat(VkPhysicalDevice phy) {
+    const VkFormat ds_formats[] = {VK_FORMAT_S8_UINT};
+    for (uint32_t i = 0; i < size(ds_formats); ++i) {
+        VkFormatProperties format_props;
+        vk::GetPhysicalDeviceFormatProperties(phy, ds_formats[i], &format_props);
+
+        if (format_props.optimalTilingFeatures & VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT) {
+            return ds_formats[i];
+        }
+    }
+    return VK_FORMAT_UNDEFINED;
+}
+
+VkFormat FindSupportedDepthStencilFormat(VkPhysicalDevice phy) {
+    const VkFormat ds_formats[] = {VK_FORMAT_D16_UNORM_S8_UINT, VK_FORMAT_D24_UNORM_S8_UINT, VK_FORMAT_D32_SFLOAT_S8_UINT};
+    for (uint32_t i = 0; i < size(ds_formats); ++i) {
+        VkFormatProperties format_props;
+        vk::GetPhysicalDeviceFormatProperties(phy, ds_formats[i], &format_props);
+
+        if (format_props.optimalTilingFeatures & VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT) {
+            return ds_formats[i];
+        }
+    }
+    return VK_FORMAT_UNDEFINED;
+}
+
+bool ImageFormatIsSupported(VkPhysicalDevice phy, VkFormat format, VkImageTiling tiling, VkFormatFeatureFlags features) {
+    VkFormatProperties format_props;
+    vk::GetPhysicalDeviceFormatProperties(phy, format, &format_props);
+    VkFormatFeatureFlags phy_features =
+        (VK_IMAGE_TILING_OPTIMAL == tiling ? format_props.optimalTilingFeatures : format_props.linearTilingFeatures);
+    return (0 != (phy_features & features));
+}
+
+bool ImageFormatAndFeaturesSupported(VkPhysicalDevice phy, VkFormat format, VkImageTiling tiling, VkFormatFeatureFlags features) {
+    VkFormatProperties format_props;
+    vk::GetPhysicalDeviceFormatProperties(phy, format, &format_props);
+    VkFormatFeatureFlags phy_features =
+        (VK_IMAGE_TILING_OPTIMAL == tiling ? format_props.optimalTilingFeatures : format_props.linearTilingFeatures);
+    return (features == (phy_features & features));
+}
+
+bool ImageFormatAndFeaturesSupported(const VkInstance inst, const VkPhysicalDevice phy, const VkImageCreateInfo info,
+                                     const VkFormatFeatureFlags features) {
+    // Verify physical device support of format features
+    if (!ImageFormatAndFeaturesSupported(phy, info.format, info.tiling, features)) {
+        return false;
+    }
+
+    // Verify that PhysDevImageFormatProp() also claims support for the specific usage
+    VkImageFormatProperties props;
+    VkResult err =
+        vk::GetPhysicalDeviceImageFormatProperties(phy, info.format, info.imageType, info.tiling, info.usage, info.flags, &props);
+    if (VK_SUCCESS != err) {
+        return false;
+    }
+
+#if 0  // Convinced this chunk doesn't currently add any additional info, but leaving in place because it may be
+       // necessary with future extensions
+
+    // Verify again using version 2, if supported, which *can* return more property data than the original...
+    // (It's not clear that this is any more definitive than using the original version - but no harm)
+    PFN_vkGetPhysicalDeviceImageFormatProperties2KHR p_GetPDIFP2KHR =
+        (PFN_vkGetPhysicalDeviceImageFormatProperties2KHR)vk::GetInstanceProcAddr(inst,
+                                                                                "vkGetPhysicalDeviceImageFormatProperties2KHR");
+    if (NULL != p_GetPDIFP2KHR) {
+        VkPhysicalDeviceImageFormatInfo2KHR fmt_info{};
+        fmt_info.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2_KHR;
+        fmt_info.pNext = nullptr;
+        fmt_info.format = info.format;
+        fmt_info.type = info.imageType;
+        fmt_info.tiling = info.tiling;
+        fmt_info.usage = info.usage;
+        fmt_info.flags = info.flags;
+
+        VkImageFormatProperties2KHR fmt_props = {};
+        fmt_props.sType = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2_KHR;
+        err = p_GetPDIFP2KHR(phy, &fmt_info, &fmt_props);
+        if (VK_SUCCESS != err) {
+            return false;
+        }
+    }
+#endif
+
+    return true;
+}
+
+VKAPI_ATTR VkBool32 VKAPI_CALL myDbgFunc(VkFlags msgFlags, VkDebugReportObjectTypeEXT objType, uint64_t srcObject, size_t location,
+                                         int32_t msgCode, const char *pLayerPrefix, const char *pMsg, void *pUserData) {
+    ErrorMonitor *errMonitor = (ErrorMonitor *)pUserData;
+    if (msgFlags & errMonitor->GetMessageFlags()) {
+        return errMonitor->CheckForDesiredMsg(pMsg);
+    }
+    return VK_FALSE;
+}
+
+VkPhysicalDevicePushDescriptorPropertiesKHR GetPushDescriptorProperties(VkInstance instance, VkPhysicalDevice gpu) {
+    // Find address of extension call and make the call -- assumes needed extensions are enabled.
+    PFN_vkGetPhysicalDeviceProperties2KHR vkGetPhysicalDeviceProperties2KHR =
+        (PFN_vkGetPhysicalDeviceProperties2KHR)vk::GetInstanceProcAddr(instance, "vkGetPhysicalDeviceProperties2KHR");
+    assert(vkGetPhysicalDeviceProperties2KHR != nullptr);
+
+    // Get the push descriptor limits
+    auto push_descriptor_prop = lvl_init_struct<VkPhysicalDevicePushDescriptorPropertiesKHR>();
+    auto prop2 = lvl_init_struct<VkPhysicalDeviceProperties2KHR>(&push_descriptor_prop);
+    vkGetPhysicalDeviceProperties2KHR(gpu, &prop2);
+    return push_descriptor_prop;
+}
+
+VkPhysicalDeviceSubgroupProperties GetSubgroupProperties(VkInstance instance, VkPhysicalDevice gpu) {
+    auto subgroup_prop = lvl_init_struct<VkPhysicalDeviceSubgroupProperties>();
+
+    auto prop2 = lvl_init_struct<VkPhysicalDeviceProperties2>(&subgroup_prop);
+    vk::GetPhysicalDeviceProperties2(gpu, &prop2);
+    return subgroup_prop;
+}
+
+bool operator==(const VkDebugUtilsLabelEXT &rhs, const VkDebugUtilsLabelEXT &lhs) {
+    bool is_equal = (rhs.color[0] == lhs.color[0]) && (rhs.color[1] == lhs.color[1]) && (rhs.color[2] == lhs.color[2]) &&
+                    (rhs.color[3] == lhs.color[3]);
+    if (is_equal) {
+        if (rhs.pLabelName && lhs.pLabelName) {
+            is_equal = (0 == strcmp(rhs.pLabelName, lhs.pLabelName));
+        } else {
+            is_equal = (rhs.pLabelName == nullptr) && (lhs.pLabelName == nullptr);
+        }
+    }
+    return is_equal;
+}
+
+VKAPI_ATTR VkBool32 VKAPI_CALL DebugUtilsCallback(VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity,
+                                                  VkDebugUtilsMessageTypeFlagsEXT messageTypes,
+                                                  const VkDebugUtilsMessengerCallbackDataEXT *pCallbackData, void *pUserData) {
+    auto *data = reinterpret_cast<DebugUtilsLabelCheckData *>(pUserData);
+    data->callback(pCallbackData, data);
+    return VK_FALSE;
+}
+
+#if GTEST_IS_THREADSAFE
+extern "C" void *AddToCommandBuffer(void *arg) {
+    struct thread_data_struct *data = (struct thread_data_struct *)arg;
+
+    for (int i = 0; i < 80000; i++) {
+        vk::CmdSetEvent(data->commandBuffer, data->event, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT);
+        if (*data->bailout) {
+            break;
+        }
+    }
+    return NULL;
+}
+
+extern "C" void *UpdateDescriptor(void *arg) {
+    struct thread_data_struct *data = (struct thread_data_struct *)arg;
+
+    VkDescriptorBufferInfo buffer_info = {};
+    buffer_info.buffer = data->buffer;
+    buffer_info.offset = 0;
+    buffer_info.range = 1;
+
+    VkWriteDescriptorSet descriptor_write;
+    memset(&descriptor_write, 0, sizeof(descriptor_write));
+    descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
+    descriptor_write.dstSet = data->descriptorSet;
+    descriptor_write.dstBinding = data->binding;
+    descriptor_write.descriptorCount = 1;
+    descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
+    descriptor_write.pBufferInfo = &buffer_info;
+
+    for (int i = 0; i < 80000; i++) {
+        vk::UpdateDescriptorSets(data->device, 1, &descriptor_write, 0, NULL);
+        if (*data->bailout) {
+            break;
+        }
+    }
+    return NULL;
+}
+
+#endif  // GTEST_IS_THREADSAFE
+
+extern "C" void *ReleaseNullFence(void *arg) {
+    struct thread_data_struct *data = (struct thread_data_struct *)arg;
+
+    for (int i = 0; i < 40000; i++) {
+        vk::DestroyFence(data->device, VK_NULL_HANDLE, NULL);
+        if (*data->bailout) {
+            break;
+        }
+    }
+    return NULL;
+}
+
+void TestRenderPassCreate(ErrorMonitor *error_monitor, const VkDevice device, const VkRenderPassCreateInfo *create_info,
+                          bool rp2_supported, const char *rp1_vuid, const char *rp2_vuid) {
+    VkRenderPass render_pass = VK_NULL_HANDLE;
+    VkResult err;
+
+    if (rp1_vuid) {
+        error_monitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, rp1_vuid);
+        err = vk::CreateRenderPass(device, create_info, nullptr, &render_pass);
+        if (err == VK_SUCCESS) vk::DestroyRenderPass(device, render_pass, nullptr);
+        error_monitor->VerifyFound();
+    }
+
+    if (rp2_supported && rp2_vuid) {
+        PFN_vkCreateRenderPass2KHR vkCreateRenderPass2KHR =
+            (PFN_vkCreateRenderPass2KHR)vk::GetDeviceProcAddr(device, "vkCreateRenderPass2KHR");
+        safe_VkRenderPassCreateInfo2KHR create_info2;
+        ConvertVkRenderPassCreateInfoToV2KHR(*create_info, &create_info2);
+
+        error_monitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, rp2_vuid);
+        err = vkCreateRenderPass2KHR(device, create_info2.ptr(), nullptr, &render_pass);
+        if (err == VK_SUCCESS) vk::DestroyRenderPass(device, render_pass, nullptr);
+        error_monitor->VerifyFound();
+    }
+}
+
+void PositiveTestRenderPassCreate(ErrorMonitor *error_monitor, const VkDevice device, const VkRenderPassCreateInfo *create_info,
+                                  bool rp2_supported) {
+    VkRenderPass render_pass = VK_NULL_HANDLE;
+    VkResult err;
+
+    error_monitor->ExpectSuccess();
+    err = vk::CreateRenderPass(device, create_info, nullptr, &render_pass);
+    if (err == VK_SUCCESS) vk::DestroyRenderPass(device, render_pass, nullptr);
+    error_monitor->VerifyNotFound();
+
+    if (rp2_supported) {
+        PFN_vkCreateRenderPass2KHR vkCreateRenderPass2KHR =
+            (PFN_vkCreateRenderPass2KHR)vk::GetDeviceProcAddr(device, "vkCreateRenderPass2KHR");
+        safe_VkRenderPassCreateInfo2KHR create_info2;
+        ConvertVkRenderPassCreateInfoToV2KHR(*create_info, &create_info2);
+
+        error_monitor->ExpectSuccess();
+        err = vkCreateRenderPass2KHR(device, create_info2.ptr(), nullptr, &render_pass);
+        if (err == VK_SUCCESS) vk::DestroyRenderPass(device, render_pass, nullptr);
+        error_monitor->VerifyNotFound();
+    }
+}
+
+void TestRenderPass2KHRCreate(ErrorMonitor *error_monitor, const VkDevice device, const VkRenderPassCreateInfo2KHR *create_info,
+                              const char *rp2_vuid) {
+    VkRenderPass render_pass = VK_NULL_HANDLE;
+    VkResult err;
+    PFN_vkCreateRenderPass2KHR vkCreateRenderPass2KHR =
+        (PFN_vkCreateRenderPass2KHR)vk::GetDeviceProcAddr(device, "vkCreateRenderPass2KHR");
+
+    error_monitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, rp2_vuid);
+    err = vkCreateRenderPass2KHR(device, create_info, nullptr, &render_pass);
+    if (err == VK_SUCCESS) vk::DestroyRenderPass(device, render_pass, nullptr);
+    error_monitor->VerifyFound();
+}
+
+void TestRenderPassBegin(ErrorMonitor *error_monitor, const VkDevice device, const VkCommandBuffer command_buffer,
+                         const VkRenderPassBeginInfo *begin_info, bool rp2Supported, const char *rp1_vuid, const char *rp2_vuid) {
+    VkCommandBufferBeginInfo cmd_begin_info = {VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO, nullptr,
+                                               VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT, nullptr};
+
+    if (rp1_vuid) {
+        vk::BeginCommandBuffer(command_buffer, &cmd_begin_info);
+        error_monitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, rp1_vuid);
+        vk::CmdBeginRenderPass(command_buffer, begin_info, VK_SUBPASS_CONTENTS_INLINE);
+        error_monitor->VerifyFound();
+        vk::ResetCommandBuffer(command_buffer, 0);
+    }
+    if (rp2Supported && rp2_vuid) {
+        PFN_vkCmdBeginRenderPass2KHR vkCmdBeginRenderPass2KHR =
+            (PFN_vkCmdBeginRenderPass2KHR)vk::GetDeviceProcAddr(device, "vkCmdBeginRenderPass2KHR");
+        VkSubpassBeginInfoKHR subpass_begin_info = {VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO_KHR, nullptr, VK_SUBPASS_CONTENTS_INLINE};
+        vk::BeginCommandBuffer(command_buffer, &cmd_begin_info);
+        error_monitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, rp2_vuid);
+        vkCmdBeginRenderPass2KHR(command_buffer, begin_info, &subpass_begin_info);
+        error_monitor->VerifyFound();
+        vk::ResetCommandBuffer(command_buffer, 0);
+    }
+}
+
+void ValidOwnershipTransferOp(ErrorMonitor *monitor, VkCommandBufferObj *cb, VkPipelineStageFlags src_stages,
+                              VkPipelineStageFlags dst_stages, const VkBufferMemoryBarrier *buf_barrier,
+                              const VkImageMemoryBarrier *img_barrier) {
+    monitor->ExpectSuccess();
+    cb->begin();
+    uint32_t num_buf_barrier = (buf_barrier) ? 1 : 0;
+    uint32_t num_img_barrier = (img_barrier) ? 1 : 0;
+    cb->PipelineBarrier(src_stages, dst_stages, 0, 0, nullptr, num_buf_barrier, buf_barrier, num_img_barrier, img_barrier);
+    cb->end();
+    cb->QueueCommandBuffer();  // Implicitly waits
+    monitor->VerifyNotFound();
+}
+
+void ValidOwnershipTransfer(ErrorMonitor *monitor, VkCommandBufferObj *cb_from, VkCommandBufferObj *cb_to,
+                            VkPipelineStageFlags src_stages, VkPipelineStageFlags dst_stages,
+                            const VkBufferMemoryBarrier *buf_barrier, const VkImageMemoryBarrier *img_barrier) {
+    ValidOwnershipTransferOp(monitor, cb_from, src_stages, dst_stages, buf_barrier, img_barrier);
+    ValidOwnershipTransferOp(monitor, cb_to, src_stages, dst_stages, buf_barrier, img_barrier);
+}
+
+VkResult GPDIFPHelper(VkPhysicalDevice dev, const VkImageCreateInfo *ci, VkImageFormatProperties *limits) {
+    VkImageFormatProperties tmp_limits;
+    limits = limits ? limits : &tmp_limits;
+    return vk::GetPhysicalDeviceImageFormatProperties(dev, ci->format, ci->imageType, ci->tiling, ci->usage, ci->flags, limits);
+}
+
+VkFormat FindFormatLinearWithoutMips(VkPhysicalDevice gpu, VkImageCreateInfo image_ci) {
+    image_ci.tiling = VK_IMAGE_TILING_LINEAR;
+
+    const VkFormat first_vk_format = static_cast<VkFormat>(1);
+    const VkFormat last_vk_format = static_cast<VkFormat>(130);  // avoid compressed/feature protected, otherwise 184
+
+    for (VkFormat format = first_vk_format; format <= last_vk_format; format = static_cast<VkFormat>(format + 1)) {
+        image_ci.format = format;
+
+        // WORKAROUND for dev_sim and mock_icd not containing valid format limits yet
+        VkFormatProperties format_props;
+        vk::GetPhysicalDeviceFormatProperties(gpu, format, &format_props);
+        const VkFormatFeatureFlags core_filter = 0x1FFF;
+        const auto features = (image_ci.tiling == VK_IMAGE_TILING_LINEAR) ? format_props.linearTilingFeatures & core_filter
+                                                                          : format_props.optimalTilingFeatures & core_filter;
+        if (!(features & core_filter)) continue;
+
+        VkImageFormatProperties img_limits;
+        if (VK_SUCCESS == GPDIFPHelper(gpu, &image_ci, &img_limits) && img_limits.maxMipLevels == 1) return format;
+    }
+
+    return VK_FORMAT_UNDEFINED;
+}
+
+bool FindFormatWithoutSamples(VkPhysicalDevice gpu, VkImageCreateInfo &image_ci) {
+    const VkFormat first_vk_format = static_cast<VkFormat>(1);
+    const VkFormat last_vk_format = static_cast<VkFormat>(130);  // avoid compressed/feature protected, otherwise 184
+
+    for (VkFormat format = first_vk_format; format <= last_vk_format; format = static_cast<VkFormat>(format + 1)) {
+        image_ci.format = format;
+
+        // WORKAROUND for dev_sim and mock_icd not containing valid format limits yet
+        VkFormatProperties format_props;
+        vk::GetPhysicalDeviceFormatProperties(gpu, format, &format_props);
+        const VkFormatFeatureFlags core_filter = 0x1FFF;
+        const auto features = (image_ci.tiling == VK_IMAGE_TILING_LINEAR) ? format_props.linearTilingFeatures & core_filter
+                                                                          : format_props.optimalTilingFeatures & core_filter;
+        if (!(features & core_filter)) continue;
+
+        for (VkSampleCountFlagBits samples = VK_SAMPLE_COUNT_64_BIT; samples > 0;
+             samples = static_cast<VkSampleCountFlagBits>(samples >> 1)) {
+            image_ci.samples = samples;
+            VkImageFormatProperties img_limits;
+            if (VK_SUCCESS == GPDIFPHelper(gpu, &image_ci, &img_limits) && !(img_limits.sampleCounts & samples)) return true;
+        }
+    }
+
+    return false;
+}
+
+bool FindUnsupportedImage(VkPhysicalDevice gpu, VkImageCreateInfo &image_ci) {
+    const VkFormat first_vk_format = static_cast<VkFormat>(1);
+    const VkFormat last_vk_format = static_cast<VkFormat>(130);  // avoid compressed/feature protected, otherwise 184
+
+    const std::vector<VkImageTiling> tilings = {VK_IMAGE_TILING_LINEAR, VK_IMAGE_TILING_OPTIMAL};
+    for (const auto tiling : tilings) {
+        image_ci.tiling = tiling;
+
+        for (VkFormat format = first_vk_format; format <= last_vk_format; format = static_cast<VkFormat>(format + 1)) {
+            image_ci.format = format;
+
+            VkFormatProperties format_props;
+            vk::GetPhysicalDeviceFormatProperties(gpu, format, &format_props);
+
+            const VkFormatFeatureFlags core_filter = 0x1FFF;
+            const auto features = (tiling == VK_IMAGE_TILING_LINEAR) ? format_props.linearTilingFeatures & core_filter
+                                                                     : format_props.optimalTilingFeatures & core_filter;
+            if (!(features & core_filter)) continue;  // We wand supported by features, but not by ImageFormatProperties
+
+            // get as many usage flags as possible
+            image_ci.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
+            if (features & VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT) image_ci.usage |= VK_IMAGE_USAGE_SAMPLED_BIT;
+            if (features & VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT) image_ci.usage |= VK_IMAGE_USAGE_STORAGE_BIT;
+            if (features & VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT) image_ci.usage |= VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
+            if (features & VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT)
+                image_ci.usage |= VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT;
+
+            VkImageFormatProperties img_limits;
+            if (VK_ERROR_FORMAT_NOT_SUPPORTED == GPDIFPHelper(gpu, &image_ci, &img_limits)) {
+                return true;
+            }
+        }
+    }
+
+    return false;
+}
+
+VkFormat FindFormatWithoutFeatures(VkPhysicalDevice gpu, VkImageTiling tiling, VkFormatFeatureFlags undesired_features) {
+    const VkFormat first_vk_format = static_cast<VkFormat>(1);
+    const VkFormat last_vk_format = static_cast<VkFormat>(130);  // avoid compressed/feature protected, otherwise 184
+
+    for (VkFormat format = first_vk_format; format <= last_vk_format; format = static_cast<VkFormat>(format + 1)) {
+        VkFormatProperties format_props;
+        vk::GetPhysicalDeviceFormatProperties(gpu, format, &format_props);
+
+        const VkFormatFeatureFlags core_filter = 0x1FFF;
+        const auto features = (tiling == VK_IMAGE_TILING_LINEAR) ? format_props.linearTilingFeatures & core_filter
+                                                                 : format_props.optimalTilingFeatures & core_filter;
+
+        const auto valid_features = features & core_filter;
+        if (undesired_features == UINT32_MAX) {
+            if (!valid_features) return format;
+        } else {
+            if (valid_features && !(valid_features & undesired_features)) return format;
+        }
+    }
+
+    return VK_FORMAT_UNDEFINED;
+}
+
+void NegHeightViewportTests(VkDeviceObj *m_device, VkCommandBufferObj *m_commandBuffer, ErrorMonitor *m_errorMonitor) {
+    const auto &limits = m_device->props.limits;
+
+    m_commandBuffer->begin();
+
+    using std::vector;
+    struct TestCase {
+        VkViewport vp;
+        vector<std::string> vuids;
+    };
+
+    // not necessarily boundary values (unspecified cast rounding), but guaranteed to be over limit
+    const auto one_before_min_h = NearestSmaller(-static_cast<float>(limits.maxViewportDimensions[1]));
+    const auto one_past_max_h = NearestGreater(static_cast<float>(limits.maxViewportDimensions[1]));
+
+    const auto min_bound = limits.viewportBoundsRange[0];
+    const auto max_bound = limits.viewportBoundsRange[1];
+    const auto one_before_min_bound = NearestSmaller(min_bound);
+    const auto one_past_max_bound = NearestGreater(max_bound);
+
+    const vector<TestCase> test_cases = {{{0.0, 0.0, 64.0, one_before_min_h, 0.0, 1.0}, {"VUID-VkViewport-height-01773"}},
+                                         {{0.0, 0.0, 64.0, one_past_max_h, 0.0, 1.0}, {"VUID-VkViewport-height-01773"}},
+                                         {{0.0, 0.0, 64.0, NAN, 0.0, 1.0}, {"VUID-VkViewport-height-01773"}},
+                                         {{0.0, one_before_min_bound, 64.0, 1.0, 0.0, 1.0}, {"VUID-VkViewport-y-01775"}},
+                                         {{0.0, one_past_max_bound, 64.0, -1.0, 0.0, 1.0}, {"VUID-VkViewport-y-01776"}},
+                                         {{0.0, min_bound, 64.0, -1.0, 0.0, 1.0}, {"VUID-VkViewport-y-01777"}},
+                                         {{0.0, max_bound, 64.0, 1.0, 0.0, 1.0}, {"VUID-VkViewport-y-01233"}}};
+
+    for (const auto &test_case : test_cases) {
+        for (const auto vuid : test_case.vuids) {
+            if (vuid == "VUID-Undefined")
+                m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                                     "is less than VkPhysicalDeviceLimits::viewportBoundsRange[0]");
+            else
+                m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, vuid);
+        }
+        vk::CmdSetViewport(m_commandBuffer->handle(), 0, 1, &test_case.vp);
+        m_errorMonitor->VerifyFound();
+    }
+}
+
+void CreateSamplerTest(VkLayerTest &test, const VkSamplerCreateInfo *pCreateInfo, std::string code) {
+    VkResult err;
+    VkSampler sampler = VK_NULL_HANDLE;
+    if (code.length())
+        test.Monitor()->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT | VK_DEBUG_REPORT_WARNING_BIT_EXT, code);
+    else
+        test.Monitor()->ExpectSuccess();
+
+    err = vk::CreateSampler(test.device(), pCreateInfo, NULL, &sampler);
+    if (code.length())
+        test.Monitor()->VerifyFound();
+    else
+        test.Monitor()->VerifyNotFound();
+
+    if (VK_SUCCESS == err) {
+        vk::DestroySampler(test.device(), sampler, NULL);
+    }
+}
+
+void CreateBufferTest(VkLayerTest &test, const VkBufferCreateInfo *pCreateInfo, std::string code) {
+    VkResult err;
+    VkBuffer buffer = VK_NULL_HANDLE;
+    if (code.length())
+        test.Monitor()->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, code);
+    else
+        test.Monitor()->ExpectSuccess();
+
+    err = vk::CreateBuffer(test.device(), pCreateInfo, NULL, &buffer);
+    if (code.length())
+        test.Monitor()->VerifyFound();
+    else
+        test.Monitor()->VerifyNotFound();
+
+    if (VK_SUCCESS == err) {
+        vk::DestroyBuffer(test.device(), buffer, NULL);
+    }
+}
+
+void CreateImageTest(VkLayerTest &test, const VkImageCreateInfo *pCreateInfo, std::string code) {
+    VkResult err;
+    VkImage image = VK_NULL_HANDLE;
+    if (code.length())
+        test.Monitor()->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, code);
+    else
+        test.Monitor()->ExpectSuccess();
+
+    err = vk::CreateImage(test.device(), pCreateInfo, NULL, &image);
+    if (code.length())
+        test.Monitor()->VerifyFound();
+    else
+        test.Monitor()->VerifyNotFound();
+
+    if (VK_SUCCESS == err) {
+        vk::DestroyImage(test.device(), image, NULL);
+    }
+}
+
+void CreateBufferViewTest(VkLayerTest &test, const VkBufferViewCreateInfo *pCreateInfo, const std::vector<std::string> &codes) {
+    VkResult err;
+    VkBufferView view = VK_NULL_HANDLE;
+    if (codes.size())
+        std::for_each(codes.begin(), codes.end(),
+                      [&](const std::string &s) { test.Monitor()->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, s); });
+    else
+        test.Monitor()->ExpectSuccess();
+
+    err = vk::CreateBufferView(test.device(), pCreateInfo, NULL, &view);
+    if (codes.size())
+        test.Monitor()->VerifyFound();
+    else
+        test.Monitor()->VerifyNotFound();
+
+    if (VK_SUCCESS == err) {
+        vk::DestroyBufferView(test.device(), view, NULL);
+    }
+}
+
+void CreateImageViewTest(VkLayerTest &test, const VkImageViewCreateInfo *pCreateInfo, std::string code) {
+    VkResult err;
+    VkImageView view = VK_NULL_HANDLE;
+    if (code.length())
+        test.Monitor()->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, code);
+    else
+        test.Monitor()->ExpectSuccess();
+
+    err = vk::CreateImageView(test.device(), pCreateInfo, NULL, &view);
+    if (code.length())
+        test.Monitor()->VerifyFound();
+    else
+        test.Monitor()->VerifyNotFound();
+
+    if (VK_SUCCESS == err) {
+        vk::DestroyImageView(test.device(), view, NULL);
+    }
+}
+
+VkSamplerCreateInfo SafeSaneSamplerCreateInfo() {
+    VkSamplerCreateInfo sampler_create_info = {};
+    sampler_create_info.sType = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO;
+    sampler_create_info.pNext = nullptr;
+    sampler_create_info.magFilter = VK_FILTER_NEAREST;
+    sampler_create_info.minFilter = VK_FILTER_NEAREST;
+    sampler_create_info.mipmapMode = VK_SAMPLER_MIPMAP_MODE_NEAREST;
+    sampler_create_info.addressModeU = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE;
+    sampler_create_info.addressModeV = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE;
+    sampler_create_info.addressModeW = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE;
+    sampler_create_info.mipLodBias = 0.0;
+    sampler_create_info.anisotropyEnable = VK_FALSE;
+    sampler_create_info.maxAnisotropy = 1.0;
+    sampler_create_info.compareEnable = VK_FALSE;
+    sampler_create_info.compareOp = VK_COMPARE_OP_NEVER;
+    sampler_create_info.minLod = 0.0;
+    sampler_create_info.maxLod = 16.0;
+    sampler_create_info.borderColor = VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE;
+    sampler_create_info.unnormalizedCoordinates = VK_FALSE;
+
+    return sampler_create_info;
+}
+
+VkImageViewCreateInfo SafeSaneImageViewCreateInfo(VkImage image, VkFormat format, VkImageAspectFlags aspect_mask) {
+    VkImageViewCreateInfo image_view_create_info = {};
+    image_view_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
+    image_view_create_info.image = image;
+    image_view_create_info.viewType = VK_IMAGE_VIEW_TYPE_2D;
+    image_view_create_info.format = format;
+    image_view_create_info.subresourceRange.layerCount = 1;
+    image_view_create_info.subresourceRange.baseMipLevel = 0;
+    image_view_create_info.subresourceRange.levelCount = 1;
+    image_view_create_info.subresourceRange.aspectMask = aspect_mask;
+
+    return image_view_create_info;
+}
+
+VkImageViewCreateInfo SafeSaneImageViewCreateInfo(const VkImageObj &image, VkFormat format, VkImageAspectFlags aspect_mask) {
+    return SafeSaneImageViewCreateInfo(image.handle(), format, aspect_mask);
+}
+
+bool CheckCreateRenderPass2Support(VkRenderFramework *renderFramework, std::vector<const char *> &device_extension_names) {
+    if (renderFramework->DeviceExtensionSupported(renderFramework->gpu(), nullptr, VK_KHR_CREATE_RENDERPASS_2_EXTENSION_NAME)) {
+        device_extension_names.push_back(VK_KHR_MULTIVIEW_EXTENSION_NAME);
+        device_extension_names.push_back(VK_KHR_MAINTENANCE2_EXTENSION_NAME);
+        device_extension_names.push_back(VK_KHR_CREATE_RENDERPASS_2_EXTENSION_NAME);
+        return true;
+    }
+    return false;
+}
+
+bool CheckDescriptorIndexingSupportAndInitFramework(VkRenderFramework *renderFramework,
+                                                    std::vector<const char *> &instance_extension_names,
+                                                    std::vector<const char *> &device_extension_names,
+                                                    VkValidationFeaturesEXT *features, void *userData) {
+    bool descriptor_indexing = renderFramework->InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    if (descriptor_indexing) {
+        instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    }
+    renderFramework->InitFramework(myDbgFunc, userData, features);
+    descriptor_indexing = descriptor_indexing && renderFramework->DeviceExtensionSupported(renderFramework->gpu(), nullptr,
+                                                                                           VK_KHR_MAINTENANCE3_EXTENSION_NAME);
+    descriptor_indexing = descriptor_indexing && renderFramework->DeviceExtensionSupported(
+                                                     renderFramework->gpu(), nullptr, VK_EXT_DESCRIPTOR_INDEXING_EXTENSION_NAME);
+    if (descriptor_indexing) {
+        device_extension_names.push_back(VK_KHR_MAINTENANCE3_EXTENSION_NAME);
+        device_extension_names.push_back(VK_EXT_DESCRIPTOR_INDEXING_EXTENSION_NAME);
+        return true;
+    }
+    return false;
+}
+
+void VkLayerTest::VKTriangleTest(BsoFailSelect failCase) {
+    ASSERT_TRUE(m_device && m_device->initialized());  // VKTriangleTest assumes Init() has finished
+
+    ASSERT_NO_FATAL_FAILURE(InitViewport());
+
+    VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
+    VkShaderObj ps(m_device, bindStateFragShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+    VkPipelineObj pipelineobj(m_device);
+    pipelineobj.AddDefaultColorAttachment();
+    pipelineobj.AddShader(&vs);
+    pipelineobj.AddShader(&ps);
+
+    bool failcase_needs_depth = false;  // to mark cases that need depth attachment
+
+    VkBufferObj index_buffer;
+
+    switch (failCase) {
+        case BsoFailLineWidth: {
+            pipelineobj.MakeDynamic(VK_DYNAMIC_STATE_LINE_WIDTH);
+            VkPipelineInputAssemblyStateCreateInfo ia_state = {};
+            ia_state.sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO;
+            ia_state.topology = VK_PRIMITIVE_TOPOLOGY_LINE_LIST;
+            pipelineobj.SetInputAssembly(&ia_state);
+            break;
+        }
+        case BsoFailLineStipple: {
+            pipelineobj.MakeDynamic(VK_DYNAMIC_STATE_LINE_STIPPLE_EXT);
+            VkPipelineInputAssemblyStateCreateInfo ia_state = {};
+            ia_state.sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO;
+            ia_state.topology = VK_PRIMITIVE_TOPOLOGY_LINE_LIST;
+            pipelineobj.SetInputAssembly(&ia_state);
+
+            VkPipelineRasterizationLineStateCreateInfoEXT line_state = {};
+            line_state.sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO_EXT;
+            line_state.lineRasterizationMode = VK_LINE_RASTERIZATION_MODE_BRESENHAM_EXT;
+            line_state.stippledLineEnable = VK_TRUE;
+            line_state.lineStippleFactor = 0;
+            line_state.lineStipplePattern = 0;
+            pipelineobj.SetLineState(&line_state);
+            break;
+        }
+        case BsoFailDepthBias: {
+            pipelineobj.MakeDynamic(VK_DYNAMIC_STATE_DEPTH_BIAS);
+            VkPipelineRasterizationStateCreateInfo rs_state = {};
+            rs_state.sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO;
+            rs_state.depthBiasEnable = VK_TRUE;
+            rs_state.lineWidth = 1.0f;
+            pipelineobj.SetRasterization(&rs_state);
+            break;
+        }
+        case BsoFailViewport: {
+            pipelineobj.MakeDynamic(VK_DYNAMIC_STATE_VIEWPORT);
+            break;
+        }
+        case BsoFailScissor: {
+            pipelineobj.MakeDynamic(VK_DYNAMIC_STATE_SCISSOR);
+            break;
+        }
+        case BsoFailBlend: {
+            pipelineobj.MakeDynamic(VK_DYNAMIC_STATE_BLEND_CONSTANTS);
+            VkPipelineColorBlendAttachmentState att_state = {};
+            att_state.dstAlphaBlendFactor = VK_BLEND_FACTOR_CONSTANT_COLOR;
+            att_state.blendEnable = VK_TRUE;
+            pipelineobj.AddColorAttachment(0, att_state);
+            break;
+        }
+        case BsoFailDepthBounds: {
+            failcase_needs_depth = true;
+            pipelineobj.MakeDynamic(VK_DYNAMIC_STATE_DEPTH_BOUNDS);
+            break;
+        }
+        case BsoFailStencilReadMask: {
+            failcase_needs_depth = true;
+            pipelineobj.MakeDynamic(VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK);
+            break;
+        }
+        case BsoFailStencilWriteMask: {
+            failcase_needs_depth = true;
+            pipelineobj.MakeDynamic(VK_DYNAMIC_STATE_STENCIL_WRITE_MASK);
+            break;
+        }
+        case BsoFailStencilReference: {
+            failcase_needs_depth = true;
+            pipelineobj.MakeDynamic(VK_DYNAMIC_STATE_STENCIL_REFERENCE);
+            break;
+        }
+
+        case BsoFailIndexBuffer:
+            break;
+        case BsoFailIndexBufferBadSize:
+        case BsoFailIndexBufferBadOffset:
+        case BsoFailIndexBufferBadMapSize:
+        case BsoFailIndexBufferBadMapOffset: {
+            // Create an index buffer for these tests.
+            // There is no need to populate it because we should bail before trying to draw.
+            uint32_t const indices[] = {0};
+            VkBufferCreateInfo buffer_info = {};
+            buffer_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
+            buffer_info.size = 1024;
+            buffer_info.usage = VK_BUFFER_USAGE_INDEX_BUFFER_BIT;
+            buffer_info.queueFamilyIndexCount = 1;
+            buffer_info.pQueueFamilyIndices = indices;
+            index_buffer.init(*m_device, buffer_info, (VkMemoryPropertyFlags)VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT);
+        } break;
+        case BsoFailCmdClearAttachments:
+            break;
+        case BsoFailNone:
+            break;
+        default:
+            break;
+    }
+
+    VkDescriptorSetObj descriptorSet(m_device);
+
+    VkImageView *depth_attachment = nullptr;
+    if (failcase_needs_depth) {
+        m_depth_stencil_fmt = FindSupportedDepthStencilFormat(gpu());
+        ASSERT_TRUE(m_depth_stencil_fmt != VK_FORMAT_UNDEFINED);
+
+        m_depthStencil->Init(m_device, static_cast<uint32_t>(m_width), static_cast<uint32_t>(m_height), m_depth_stencil_fmt,
+                             VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT);
+        depth_attachment = m_depthStencil->BindInfo();
+    }
+
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget(1, depth_attachment));
+    m_commandBuffer->begin();
+
+    GenericDrawPreparation(m_commandBuffer, pipelineobj, descriptorSet, failCase);
+
+    m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
+
+    // render triangle
+    if (failCase == BsoFailIndexBuffer) {
+        // Use DrawIndexed w/o an index buffer bound
+        m_commandBuffer->DrawIndexed(3, 1, 0, 0, 0);
+    } else if (failCase == BsoFailIndexBufferBadSize) {
+        // Bind the index buffer and draw one too many indices
+        m_commandBuffer->BindIndexBuffer(&index_buffer, 0, VK_INDEX_TYPE_UINT16);
+        m_commandBuffer->DrawIndexed(513, 1, 0, 0, 0);
+    } else if (failCase == BsoFailIndexBufferBadOffset) {
+        // Bind the index buffer and draw one past the end of the buffer using the offset
+        m_commandBuffer->BindIndexBuffer(&index_buffer, 0, VK_INDEX_TYPE_UINT16);
+        m_commandBuffer->DrawIndexed(512, 1, 1, 0, 0);
+    } else if (failCase == BsoFailIndexBufferBadMapSize) {
+        // Bind the index buffer at the middle point and draw one too many indices
+        m_commandBuffer->BindIndexBuffer(&index_buffer, 512, VK_INDEX_TYPE_UINT16);
+        m_commandBuffer->DrawIndexed(257, 1, 0, 0, 0);
+    } else if (failCase == BsoFailIndexBufferBadMapOffset) {
+        // Bind the index buffer at the middle point and draw one past the end of the buffer
+        m_commandBuffer->BindIndexBuffer(&index_buffer, 512, VK_INDEX_TYPE_UINT16);
+        m_commandBuffer->DrawIndexed(256, 1, 1, 0, 0);
+    } else {
+        m_commandBuffer->Draw(3, 1, 0, 0);
+    }
+
+    if (failCase == BsoFailCmdClearAttachments) {
+        VkClearAttachment color_attachment = {};
+        color_attachment.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+        color_attachment.colorAttachment = 2000000000;  // Someone who knew what they were doing would use 0 for the index;
+        VkClearRect clear_rect = {{{0, 0}, {static_cast<uint32_t>(m_width), static_cast<uint32_t>(m_height)}}, 0, 1};
+
+        vk::CmdClearAttachments(m_commandBuffer->handle(), 1, &color_attachment, 1, &clear_rect);
+    }
+
+    // finalize recording of the command buffer
+    m_commandBuffer->EndRenderPass();
+    m_commandBuffer->end();
+    m_commandBuffer->QueueCommandBuffer(true);
+    DestroyRenderTarget();
+}
+
+void VkLayerTest::GenericDrawPreparation(VkCommandBufferObj *commandBuffer, VkPipelineObj &pipelineobj,
+                                         VkDescriptorSetObj &descriptorSet, BsoFailSelect failCase) {
+    commandBuffer->ClearAllBuffers(m_renderTargets, m_clear_color, m_depthStencil, m_depth_clear_color, m_stencil_clear_color);
+
+    commandBuffer->PrepareAttachments(m_renderTargets, m_depthStencil);
+    // Make sure depthWriteEnable is set so that Depth fail test will work
+    // correctly
+    // Make sure stencilTestEnable is set so that Stencil fail test will work
+    // correctly
+    VkStencilOpState stencil = {};
+    stencil.failOp = VK_STENCIL_OP_KEEP;
+    stencil.passOp = VK_STENCIL_OP_KEEP;
+    stencil.depthFailOp = VK_STENCIL_OP_KEEP;
+    stencil.compareOp = VK_COMPARE_OP_NEVER;
+
+    VkPipelineDepthStencilStateCreateInfo ds_ci = {};
+    ds_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO;
+    ds_ci.pNext = NULL;
+    ds_ci.depthTestEnable = VK_FALSE;
+    ds_ci.depthWriteEnable = VK_TRUE;
+    ds_ci.depthCompareOp = VK_COMPARE_OP_NEVER;
+    ds_ci.depthBoundsTestEnable = VK_FALSE;
+    if (failCase == BsoFailDepthBounds) {
+        ds_ci.depthBoundsTestEnable = VK_TRUE;
+        ds_ci.maxDepthBounds = 0.0f;
+        ds_ci.minDepthBounds = 0.0f;
+    }
+    ds_ci.stencilTestEnable = VK_TRUE;
+    ds_ci.front = stencil;
+    ds_ci.back = stencil;
+
+    pipelineobj.SetDepthStencil(&ds_ci);
+    pipelineobj.SetViewport(m_viewports);
+    pipelineobj.SetScissor(m_scissors);
+    descriptorSet.CreateVKDescriptorSet(commandBuffer);
+    VkResult err = pipelineobj.CreateVKPipeline(descriptorSet.GetPipelineLayout(), renderPass());
+    ASSERT_VK_SUCCESS(err);
+    vk::CmdBindPipeline(commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipelineobj.handle());
+    commandBuffer->BindDescriptorSet(descriptorSet);
+}
+
+void VkLayerTest::Init(VkPhysicalDeviceFeatures *features, VkPhysicalDeviceFeatures2 *features2,
+                       const VkCommandPoolCreateFlags flags, void *instance_pnext) {
+    InitFramework(myDbgFunc, m_errorMonitor, instance_pnext);
+    InitState(features, features2, flags);
+}
+
+VkCommandBufferObj *VkLayerTest::CommandBuffer() { return m_commandBuffer; }
+
+VkLayerTest::VkLayerTest() {
+    m_enableWSI = false;
+
+    m_instance_layer_names.clear();
+    m_instance_extension_names.clear();
+    m_device_extension_names.clear();
+
+    // Add default instance extensions to the list
+    m_instance_extension_names.push_back(VK_EXT_DEBUG_REPORT_EXTENSION_NAME);
+
+    m_instance_layer_names.push_back("VK_LAYER_KHRONOS_validation");
+
+    if (VkTestFramework::m_devsim_layer) {
+        if (InstanceLayerSupported("VK_LAYER_LUNARG_device_simulation")) {
+            m_instance_layer_names.push_back("VK_LAYER_LUNARG_device_simulation");
+        } else {
+            VkTestFramework::m_devsim_layer = false;
+            printf("             Did not find VK_LAYER_LUNARG_device_simulation layer so it will not be enabled.\n");
+        }
+    }
+
+    this->app_info.sType = VK_STRUCTURE_TYPE_APPLICATION_INFO;
+    this->app_info.pNext = NULL;
+    this->app_info.pApplicationName = "layer_tests";
+    this->app_info.applicationVersion = 1;
+    this->app_info.pEngineName = "unittest";
+    this->app_info.engineVersion = 1;
+    this->app_info.apiVersion = VK_API_VERSION_1_0;
+
+    // Find out what version the instance supports and record the default target instance
+    auto enumerateInstanceVersion = (PFN_vkEnumerateInstanceVersion)vk::GetInstanceProcAddr(nullptr, "vkEnumerateInstanceVersion");
+    if (enumerateInstanceVersion) {
+        enumerateInstanceVersion(&m_instance_api_version);
+    } else {
+        m_instance_api_version = VK_API_VERSION_1_0;
+    }
+    m_target_api_version = app_info.apiVersion;
+}
+
+bool VkLayerTest::AddSurfaceInstanceExtension() {
+    m_enableWSI = true;
+    if (!InstanceExtensionSupported(VK_KHR_SURFACE_EXTENSION_NAME)) {
+        printf("%s %s extension not supported\n", kSkipPrefix, VK_KHR_SURFACE_EXTENSION_NAME);
+        return false;
+    }
+    m_instance_extension_names.push_back(VK_KHR_SURFACE_EXTENSION_NAME);
+
+    bool bSupport = false;
+#if defined(VK_USE_PLATFORM_WIN32_KHR)
+    if (!InstanceExtensionSupported(VK_KHR_WIN32_SURFACE_EXTENSION_NAME)) {
+        printf("%s %s extension not supported\n", kSkipPrefix, VK_KHR_WIN32_SURFACE_EXTENSION_NAME);
+        return false;
+    }
+    m_instance_extension_names.push_back(VK_KHR_WIN32_SURFACE_EXTENSION_NAME);
+    bSupport = true;
+#endif
+
+#if defined(VK_USE_PLATFORM_ANDROID_KHR) && defined(VALIDATION_APK)
+    if (!InstanceExtensionSupported(VK_KHR_ANDROID_SURFACE_EXTENSION_NAME)) {
+        printf("%s %s extension not supported\n", kSkipPrefix, VK_KHR_ANDROID_SURFACE_EXTENSION_NAME);
+        return false;
+    }
+    m_instance_extension_names.push_back(VK_KHR_ANDROID_SURFACE_EXTENSION_NAME);
+    bSupport = true;
+#endif
+
+#if defined(VK_USE_PLATFORM_XLIB_KHR)
+    if (!InstanceExtensionSupported(VK_KHR_XLIB_SURFACE_EXTENSION_NAME)) {
+        printf("%s %s extension not supported\n", kSkipPrefix, VK_KHR_XLIB_SURFACE_EXTENSION_NAME);
+        return false;
+    }
+    if (XOpenDisplay(NULL)) {
+        m_instance_extension_names.push_back(VK_KHR_XLIB_SURFACE_EXTENSION_NAME);
+        bSupport = true;
+    }
+#endif
+
+#if defined(VK_USE_PLATFORM_XCB_KHR)
+    if (!InstanceExtensionSupported(VK_KHR_XCB_SURFACE_EXTENSION_NAME)) {
+        printf("%s %s extension not supported\n", kSkipPrefix, VK_KHR_XCB_SURFACE_EXTENSION_NAME);
+        return false;
+    }
+    if (!bSupport && xcb_connect(NULL, NULL)) {
+        m_instance_extension_names.push_back(VK_KHR_XCB_SURFACE_EXTENSION_NAME);
+        bSupport = true;
+    }
+#endif
+
+    if (bSupport) return true;
+    printf("%s No platform's surface extension supported\n", kSkipPrefix);
+    return false;
+}
+
+bool VkLayerTest::AddSwapchainDeviceExtension() {
+    if (!DeviceExtensionSupported(gpu(), nullptr, VK_KHR_SWAPCHAIN_EXTENSION_NAME)) {
+        printf("%s %s extension not supported\n", kSkipPrefix, VK_KHR_SWAPCHAIN_EXTENSION_NAME);
+        return false;
+    }
+    m_device_extension_names.push_back(VK_KHR_SWAPCHAIN_EXTENSION_NAME);
+    return true;
+}
+
+uint32_t VkLayerTest::SetTargetApiVersion(uint32_t target_api_version) {
+    if (target_api_version == 0) target_api_version = VK_API_VERSION_1_0;
+    if (target_api_version <= m_instance_api_version) {
+        m_target_api_version = target_api_version;
+        app_info.apiVersion = m_target_api_version;
+    }
+    return m_target_api_version;
+}
+uint32_t VkLayerTest::DeviceValidationVersion() {
+    // The validation layers, assume the version we are validating to is the apiVersion unless the device apiVersion is lower
+    VkPhysicalDeviceProperties props;
+    GetPhysicalDeviceProperties(&props);
+    return std::min(m_target_api_version, props.apiVersion);
+}
+
+bool VkLayerTest::LoadDeviceProfileLayer(
+    PFN_vkSetPhysicalDeviceFormatPropertiesEXT &fpvkSetPhysicalDeviceFormatPropertiesEXT,
+    PFN_vkGetOriginalPhysicalDeviceFormatPropertiesEXT &fpvkGetOriginalPhysicalDeviceFormatPropertiesEXT) {
+    // Load required functions
+    fpvkSetPhysicalDeviceFormatPropertiesEXT =
+        (PFN_vkSetPhysicalDeviceFormatPropertiesEXT)vk::GetInstanceProcAddr(instance(), "vkSetPhysicalDeviceFormatPropertiesEXT");
+    fpvkGetOriginalPhysicalDeviceFormatPropertiesEXT = (PFN_vkGetOriginalPhysicalDeviceFormatPropertiesEXT)vk::GetInstanceProcAddr(
+        instance(), "vkGetOriginalPhysicalDeviceFormatPropertiesEXT");
+
+    if (!(fpvkSetPhysicalDeviceFormatPropertiesEXT) || !(fpvkGetOriginalPhysicalDeviceFormatPropertiesEXT)) {
+        printf("%s Can't find device_profile_api functions; skipped.\n", kSkipPrefix);
+        return 0;
+    }
+
+    return 1;
+}
+
+bool VkBufferTest::GetTestConditionValid(VkDeviceObj *aVulkanDevice, eTestEnFlags aTestFlag, VkBufferUsageFlags aBufferUsage) {
+    if (eInvalidDeviceOffset != aTestFlag && eInvalidMemoryOffset != aTestFlag) {
+        return true;
+    }
+    VkDeviceSize offset_limit = 0;
+    if (eInvalidMemoryOffset == aTestFlag) {
+        VkBuffer vulkanBuffer;
+        VkBufferCreateInfo buffer_create_info = {};
+        buffer_create_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
+        buffer_create_info.size = 32;
+        buffer_create_info.usage = aBufferUsage;
+
+        vk::CreateBuffer(aVulkanDevice->device(), &buffer_create_info, nullptr, &vulkanBuffer);
+        VkMemoryRequirements memory_reqs = {};
+
+        vk::GetBufferMemoryRequirements(aVulkanDevice->device(), vulkanBuffer, &memory_reqs);
+        vk::DestroyBuffer(aVulkanDevice->device(), vulkanBuffer, nullptr);
+        offset_limit = memory_reqs.alignment;
+    } else if ((VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT | VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT) & aBufferUsage) {
+        offset_limit = aVulkanDevice->props.limits.minTexelBufferOffsetAlignment;
+    } else if (VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT & aBufferUsage) {
+        offset_limit = aVulkanDevice->props.limits.minUniformBufferOffsetAlignment;
+    } else if (VK_BUFFER_USAGE_STORAGE_BUFFER_BIT & aBufferUsage) {
+        offset_limit = aVulkanDevice->props.limits.minStorageBufferOffsetAlignment;
+    }
+    return eOffsetAlignment < offset_limit;
+}
+
+VkBufferTest::VkBufferTest(VkDeviceObj *aVulkanDevice, VkBufferUsageFlags aBufferUsage, eTestEnFlags aTestFlag)
+    : AllocateCurrent(true),
+      BoundCurrent(false),
+      CreateCurrent(false),
+      InvalidDeleteEn(false),
+      VulkanDevice(aVulkanDevice->device()) {
+    if (eBindNullBuffer == aTestFlag || eBindFakeBuffer == aTestFlag) {
+        VkMemoryAllocateInfo memory_allocate_info = {};
+        memory_allocate_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+        memory_allocate_info.allocationSize = 1;   // fake size -- shouldn't matter for the test
+        memory_allocate_info.memoryTypeIndex = 0;  // fake type -- shouldn't matter for the test
+        vk::AllocateMemory(VulkanDevice, &memory_allocate_info, nullptr, &VulkanMemory);
+
+        VulkanBuffer = (aTestFlag == eBindNullBuffer) ? VK_NULL_HANDLE : (VkBuffer)0xCDCDCDCDCDCDCDCD;
+
+        vk::BindBufferMemory(VulkanDevice, VulkanBuffer, VulkanMemory, 0);
+    } else {
+        VkBufferCreateInfo buffer_create_info = {};
+        buffer_create_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
+        buffer_create_info.size = 32;
+        buffer_create_info.usage = aBufferUsage;
+
+        vk::CreateBuffer(VulkanDevice, &buffer_create_info, nullptr, &VulkanBuffer);
+
+        CreateCurrent = true;
+
+        VkMemoryRequirements memory_requirements;
+        vk::GetBufferMemoryRequirements(VulkanDevice, VulkanBuffer, &memory_requirements);
+
+        VkMemoryAllocateInfo memory_allocate_info = {};
+        memory_allocate_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+        memory_allocate_info.allocationSize = memory_requirements.size + eOffsetAlignment;
+        bool pass = aVulkanDevice->phy().set_memory_type(memory_requirements.memoryTypeBits, &memory_allocate_info,
+                                                         VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
+        if (!pass) {
+            CreateCurrent = false;
+            vk::DestroyBuffer(VulkanDevice, VulkanBuffer, nullptr);
+            return;
+        }
+
+        vk::AllocateMemory(VulkanDevice, &memory_allocate_info, NULL, &VulkanMemory);
+        // NB: 1 is intentionally an invalid offset value
+        const bool offset_en = eInvalidDeviceOffset == aTestFlag || eInvalidMemoryOffset == aTestFlag;
+        vk::BindBufferMemory(VulkanDevice, VulkanBuffer, VulkanMemory, offset_en ? eOffsetAlignment : 0);
+        BoundCurrent = true;
+
+        InvalidDeleteEn = (eFreeInvalidHandle == aTestFlag);
+    }
+}
+
+VkBufferTest::~VkBufferTest() {
+    if (CreateCurrent) {
+        vk::DestroyBuffer(VulkanDevice, VulkanBuffer, nullptr);
+    }
+    if (AllocateCurrent) {
+        if (InvalidDeleteEn) {
+            auto bad_memory = CastFromUint64<VkDeviceMemory>(CastToUint64(VulkanMemory) + 1);
+            vk::FreeMemory(VulkanDevice, bad_memory, nullptr);
+        }
+        vk::FreeMemory(VulkanDevice, VulkanMemory, nullptr);
+    }
+}
+
+bool VkBufferTest::GetBufferCurrent() { return AllocateCurrent && BoundCurrent && CreateCurrent; }
+
+const VkBuffer &VkBufferTest::GetBuffer() { return VulkanBuffer; }
+
+void VkBufferTest::TestDoubleDestroy() {
+    // Destroy the buffer but leave the flag set, which will cause
+    // the buffer to be destroyed again in the destructor.
+    vk::DestroyBuffer(VulkanDevice, VulkanBuffer, nullptr);
+}
+
+uint32_t VkVerticesObj::BindIdGenerator;
+
+VkVerticesObj::VkVerticesObj(VkDeviceObj *aVulkanDevice, unsigned aAttributeCount, unsigned aBindingCount, unsigned aByteStride,
+                             VkDeviceSize aVertexCount, const float *aVerticies)
+    : BoundCurrent(false),
+      AttributeCount(aAttributeCount),
+      BindingCount(aBindingCount),
+      BindId(BindIdGenerator),
+      PipelineVertexInputStateCreateInfo(),
+      VulkanMemoryBuffer(aVulkanDevice, static_cast<int>(aByteStride * aVertexCount), reinterpret_cast<const void *>(aVerticies),
+                         VK_BUFFER_USAGE_VERTEX_BUFFER_BIT) {
+    BindIdGenerator++;  // NB: This can wrap w/misuse
+
+    VertexInputAttributeDescription = new VkVertexInputAttributeDescription[AttributeCount];
+    VertexInputBindingDescription = new VkVertexInputBindingDescription[BindingCount];
+
+    PipelineVertexInputStateCreateInfo.pVertexAttributeDescriptions = VertexInputAttributeDescription;
+    PipelineVertexInputStateCreateInfo.vertexAttributeDescriptionCount = AttributeCount;
+    PipelineVertexInputStateCreateInfo.pVertexBindingDescriptions = VertexInputBindingDescription;
+    PipelineVertexInputStateCreateInfo.vertexBindingDescriptionCount = BindingCount;
+    PipelineVertexInputStateCreateInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO;
+
+    unsigned i = 0;
+    do {
+        VertexInputAttributeDescription[i].binding = BindId;
+        VertexInputAttributeDescription[i].location = i;
+        VertexInputAttributeDescription[i].format = VK_FORMAT_R32G32B32_SFLOAT;
+        VertexInputAttributeDescription[i].offset = sizeof(float) * aByteStride;
+        i++;
+    } while (AttributeCount < i);
+
+    i = 0;
+    do {
+        VertexInputBindingDescription[i].binding = BindId;
+        VertexInputBindingDescription[i].stride = aByteStride;
+        VertexInputBindingDescription[i].inputRate = VK_VERTEX_INPUT_RATE_VERTEX;
+        i++;
+    } while (BindingCount < i);
+}
+
+VkVerticesObj::~VkVerticesObj() {
+    if (VertexInputAttributeDescription) {
+        delete[] VertexInputAttributeDescription;
+    }
+    if (VertexInputBindingDescription) {
+        delete[] VertexInputBindingDescription;
+    }
+}
+
+bool VkVerticesObj::AddVertexInputToPipe(VkPipelineObj &aPipelineObj) {
+    aPipelineObj.AddVertexInputAttribs(VertexInputAttributeDescription, AttributeCount);
+    aPipelineObj.AddVertexInputBindings(VertexInputBindingDescription, BindingCount);
+    return true;
+}
+
+bool VkVerticesObj::AddVertexInputToPipeHelpr(CreatePipelineHelper *pipelineHelper) {
+    pipelineHelper->vi_ci_.pVertexBindingDescriptions = VertexInputBindingDescription;
+    pipelineHelper->vi_ci_.vertexBindingDescriptionCount = BindingCount;
+    pipelineHelper->vi_ci_.pVertexAttributeDescriptions = VertexInputAttributeDescription;
+    pipelineHelper->vi_ci_.vertexAttributeDescriptionCount = AttributeCount;
+    return true;
+}
+
+void VkVerticesObj::BindVertexBuffers(VkCommandBuffer aCommandBuffer, unsigned aOffsetCount, VkDeviceSize *aOffsetList) {
+    VkDeviceSize *offsetList;
+    unsigned offsetCount;
+
+    if (aOffsetCount) {
+        offsetList = aOffsetList;
+        offsetCount = aOffsetCount;
+    } else {
+        offsetList = new VkDeviceSize[1]();
+        offsetCount = 1;
+    }
+
+    vk::CmdBindVertexBuffers(aCommandBuffer, BindId, offsetCount, &VulkanMemoryBuffer.handle(), offsetList);
+    BoundCurrent = true;
+
+    if (!aOffsetCount) {
+        delete[] offsetList;
+    }
+}
+
+OneOffDescriptorSet::OneOffDescriptorSet(VkDeviceObj *device, const Bindings &bindings,
+                                         VkDescriptorSetLayoutCreateFlags layout_flags, void *layout_pnext,
+                                         VkDescriptorPoolCreateFlags poolFlags, void *allocate_pnext)
+    : device_{device}, pool_{}, layout_(device, bindings, layout_flags, layout_pnext), set_{} {
+    VkResult err;
+
+    std::vector<VkDescriptorPoolSize> sizes;
+    for (const auto &b : bindings) sizes.push_back({b.descriptorType, std::max(1u, b.descriptorCount)});
+
+    VkDescriptorPoolCreateInfo dspci = {
+        VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO, nullptr, poolFlags, 1, uint32_t(sizes.size()), sizes.data()};
+    err = vk::CreateDescriptorPool(device_->handle(), &dspci, nullptr, &pool_);
+    if (err != VK_SUCCESS) return;
+
+    if ((layout_flags & VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR) == 0) {
+        VkDescriptorSetAllocateInfo alloc_info = {VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO, allocate_pnext, pool_, 1,
+                                                  &layout_.handle()};
+        err = vk::AllocateDescriptorSets(device_->handle(), &alloc_info, &set_);
+    }
+}
+
+OneOffDescriptorSet::~OneOffDescriptorSet() {
+    // No need to destroy set-- it's going away with the pool.
+    vk::DestroyDescriptorPool(device_->handle(), pool_, nullptr);
+}
+
+bool OneOffDescriptorSet::Initialized() { return pool_ != VK_NULL_HANDLE && layout_.initialized() && set_ != VK_NULL_HANDLE; }
+
+void OneOffDescriptorSet::WriteDescriptorBufferInfo(int blinding, VkBuffer buffer, VkDeviceSize size,
+                                                    VkDescriptorType descriptorType) {
+    VkDescriptorBufferInfo buffer_info = {};
+    buffer_info.buffer = buffer;
+    buffer_info.offset = 0;
+    buffer_info.range = size;
+    buffer_infos.emplace_back(buffer_info);
+    size_t index = buffer_infos.size() - 1;
+
+    VkWriteDescriptorSet descriptor_write;
+    memset(&descriptor_write, 0, sizeof(descriptor_write));
+    descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
+    descriptor_write.dstSet = set_;
+    descriptor_write.dstBinding = blinding;
+    descriptor_write.descriptorCount = 1;
+    descriptor_write.descriptorType = descriptorType;
+    descriptor_write.pBufferInfo = &buffer_infos[index];
+    descriptor_write.pImageInfo = nullptr;
+    descriptor_write.pTexelBufferView = nullptr;
+
+    descriptor_writes.emplace_back(descriptor_write);
+}
+
+void OneOffDescriptorSet::WriteDescriptorBufferView(int blinding, VkBufferView &buffer_view, VkDescriptorType descriptorType) {
+    VkWriteDescriptorSet descriptor_write;
+    memset(&descriptor_write, 0, sizeof(descriptor_write));
+    descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
+    descriptor_write.dstSet = set_;
+    descriptor_write.dstBinding = blinding;
+    descriptor_write.descriptorCount = 1;
+    descriptor_write.descriptorType = descriptorType;
+    descriptor_write.pTexelBufferView = &buffer_view;
+    descriptor_write.pImageInfo = nullptr;
+    descriptor_write.pBufferInfo = nullptr;
+
+    descriptor_writes.emplace_back(descriptor_write);
+}
+
+void OneOffDescriptorSet::WriteDescriptorImageInfo(int blinding, VkImageView image_view, VkSampler sampler,
+                                                   VkDescriptorType descriptorType) {
+    VkDescriptorImageInfo image_info = {};
+    image_info.imageView = image_view;
+    image_info.sampler = sampler;
+    image_info.imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
+    image_infos.emplace_back(image_info);
+    size_t index = image_infos.size() - 1;
+
+    VkWriteDescriptorSet descriptor_write;
+    memset(&descriptor_write, 0, sizeof(descriptor_write));
+    descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
+    descriptor_write.dstSet = set_;
+    descriptor_write.dstBinding = blinding;
+    descriptor_write.descriptorCount = 1;
+    descriptor_write.descriptorType = descriptorType;
+    descriptor_write.pImageInfo = &image_infos[index];
+    descriptor_write.pBufferInfo = nullptr;
+    descriptor_write.pTexelBufferView = nullptr;
+
+    descriptor_writes.emplace_back(descriptor_write);
+}
+
+void OneOffDescriptorSet::UpdateDescriptorSets() {
+    vk::UpdateDescriptorSets(device_->handle(), descriptor_writes.size(), descriptor_writes.data(), 0, NULL);
+}
+
+CreatePipelineHelper::CreatePipelineHelper(VkLayerTest &test) : layer_test_(test) {}
+
+CreatePipelineHelper::~CreatePipelineHelper() {
+    VkDevice device = layer_test_.device();
+    vk::DestroyPipelineCache(device, pipeline_cache_, nullptr);
+    vk::DestroyPipeline(device, pipeline_, nullptr);
+}
+
+void CreatePipelineHelper::InitDescriptorSetInfo() {
+    dsl_bindings_ = {{0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr}};
+}
+
+void CreatePipelineHelper::InitInputAndVertexInfo() {
+    vi_ci_.sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO;
+
+    ia_ci_.sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO;
+    ia_ci_.topology = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP;
+}
+
+void CreatePipelineHelper::InitMultisampleInfo() {
+    pipe_ms_state_ci_.sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO;
+    pipe_ms_state_ci_.pNext = nullptr;
+    pipe_ms_state_ci_.rasterizationSamples = VK_SAMPLE_COUNT_1_BIT;
+    pipe_ms_state_ci_.sampleShadingEnable = VK_FALSE;
+    pipe_ms_state_ci_.minSampleShading = 1.0;
+    pipe_ms_state_ci_.pSampleMask = NULL;
+}
+
+void CreatePipelineHelper::InitPipelineLayoutInfo() {
+    pipeline_layout_ci_.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO;
+    pipeline_layout_ci_.setLayoutCount = 1;     // Not really changeable because InitState() sets exactly one pSetLayout
+    pipeline_layout_ci_.pSetLayouts = nullptr;  // must bound after it is created
+}
+
+void CreatePipelineHelper::InitViewportInfo() {
+    viewport_ = {0.0f, 0.0f, 64.0f, 64.0f, 0.0f, 1.0f};
+    scissor_ = {{0, 0}, {64, 64}};
+
+    vp_state_ci_.sType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO;
+    vp_state_ci_.pNext = nullptr;
+    vp_state_ci_.viewportCount = 1;
+    vp_state_ci_.pViewports = &viewport_;  // ignored if dynamic
+    vp_state_ci_.scissorCount = 1;
+    vp_state_ci_.pScissors = &scissor_;  // ignored if dynamic
+}
+
+void CreatePipelineHelper::InitDynamicStateInfo() {
+    // Use a "validity" check on the {} initialized structure to detect initialization
+    // during late bind
+}
+
+void CreatePipelineHelper::InitShaderInfo() {
+    vs_.reset(new VkShaderObj(layer_test_.DeviceObj(), bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, &layer_test_));
+    fs_.reset(new VkShaderObj(layer_test_.DeviceObj(), bindStateFragShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, &layer_test_));
+    // We shouldn't need a fragment shader but add it to be able to run on more devices
+    shader_stages_ = {vs_->GetStageCreateInfo(), fs_->GetStageCreateInfo()};
+}
+
+void CreatePipelineHelper::InitRasterizationInfo() {
+    rs_state_ci_.sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO;
+    rs_state_ci_.pNext = &line_state_ci_;
+    rs_state_ci_.flags = 0;
+    rs_state_ci_.depthClampEnable = VK_FALSE;
+    rs_state_ci_.rasterizerDiscardEnable = VK_FALSE;
+    rs_state_ci_.polygonMode = VK_POLYGON_MODE_FILL;
+    rs_state_ci_.cullMode = VK_CULL_MODE_BACK_BIT;
+    rs_state_ci_.frontFace = VK_FRONT_FACE_COUNTER_CLOCKWISE;
+    rs_state_ci_.depthBiasEnable = VK_FALSE;
+    rs_state_ci_.lineWidth = 1.0F;
+}
+
+void CreatePipelineHelper::InitLineRasterizationInfo() {
+    line_state_ci_.sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO_EXT;
+    line_state_ci_.pNext = nullptr;
+    line_state_ci_.lineRasterizationMode = VK_LINE_RASTERIZATION_MODE_DEFAULT_EXT;
+    line_state_ci_.stippledLineEnable = VK_FALSE;
+    line_state_ci_.lineStippleFactor = 0;
+    line_state_ci_.lineStipplePattern = 0;
+}
+
+void CreatePipelineHelper::InitBlendStateInfo() {
+    cb_ci_.sType = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO;
+    cb_ci_.logicOpEnable = VK_FALSE;
+    cb_ci_.logicOp = VK_LOGIC_OP_COPY;  // ignored if enable is VK_FALSE above
+    cb_ci_.attachmentCount = layer_test_.RenderPassInfo().subpassCount;
+    ASSERT_TRUE(IsValidVkStruct(layer_test_.RenderPassInfo()));
+    cb_ci_.pAttachments = &cb_attachments_;
+    for (int i = 0; i < 4; i++) {
+        cb_ci_.blendConstants[0] = 1.0F;
+    }
+}
+
+void CreatePipelineHelper::InitGraphicsPipelineInfo() {
+    // Color-only rendering in a subpass with no depth/stencil attachment
+    // Active Pipeline Shader Stages
+    //    Vertex Shader
+    //    Fragment Shader
+    // Required: Fixed-Function Pipeline Stages
+    //    VkPipelineVertexInputStateCreateInfo
+    //    VkPipelineInputAssemblyStateCreateInfo
+    //    VkPipelineViewportStateCreateInfo
+    //    VkPipelineRasterizationStateCreateInfo
+    //    VkPipelineMultisampleStateCreateInfo
+    //    VkPipelineColorBlendStateCreateInfo
+    gp_ci_.sType = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO;
+    gp_ci_.pNext = nullptr;
+    gp_ci_.flags = VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT;
+    gp_ci_.pVertexInputState = &vi_ci_;
+    gp_ci_.pInputAssemblyState = &ia_ci_;
+    gp_ci_.pTessellationState = nullptr;
+    gp_ci_.pViewportState = &vp_state_ci_;
+    gp_ci_.pRasterizationState = &rs_state_ci_;
+    gp_ci_.pMultisampleState = &pipe_ms_state_ci_;
+    gp_ci_.pDepthStencilState = nullptr;
+    gp_ci_.pColorBlendState = &cb_ci_;
+    gp_ci_.pDynamicState = nullptr;
+    gp_ci_.renderPass = layer_test_.renderPass();
+}
+
+void CreatePipelineHelper::InitPipelineCacheInfo() {
+    pc_ci_.sType = VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO;
+    pc_ci_.pNext = nullptr;
+    pc_ci_.flags = 0;
+    pc_ci_.initialDataSize = 0;
+    pc_ci_.pInitialData = nullptr;
+}
+
+void CreatePipelineHelper::InitTesselationState() {
+    // TBD -- add shaders and create_info
+}
+
+void CreatePipelineHelper::InitInfo() {
+    InitDescriptorSetInfo();
+    InitInputAndVertexInfo();
+    InitMultisampleInfo();
+    InitPipelineLayoutInfo();
+    InitViewportInfo();
+    InitDynamicStateInfo();
+    InitShaderInfo();
+    InitRasterizationInfo();
+    InitLineRasterizationInfo();
+    InitBlendStateInfo();
+    InitGraphicsPipelineInfo();
+    InitPipelineCacheInfo();
+}
+
+void CreatePipelineHelper::InitState() {
+    VkResult err;
+    descriptor_set_.reset(new OneOffDescriptorSet(layer_test_.DeviceObj(), dsl_bindings_));
+    ASSERT_TRUE(descriptor_set_->Initialized());
+
+    const std::vector<VkPushConstantRange> push_ranges(
+        pipeline_layout_ci_.pPushConstantRanges,
+        pipeline_layout_ci_.pPushConstantRanges + pipeline_layout_ci_.pushConstantRangeCount);
+    pipeline_layout_ = VkPipelineLayoutObj(layer_test_.DeviceObj(), {&descriptor_set_->layout_}, push_ranges);
+
+    err = vk::CreatePipelineCache(layer_test_.device(), &pc_ci_, NULL, &pipeline_cache_);
+    ASSERT_VK_SUCCESS(err);
+}
+
+void CreatePipelineHelper::LateBindPipelineInfo() {
+    // By value or dynamically located items must be late bound
+    gp_ci_.layout = pipeline_layout_.handle();
+    gp_ci_.stageCount = shader_stages_.size();
+    gp_ci_.pStages = shader_stages_.data();
+    if ((gp_ci_.pTessellationState == nullptr) && IsValidVkStruct(tess_ci_)) {
+        gp_ci_.pTessellationState = &tess_ci_;
+    }
+    if ((gp_ci_.pDynamicState == nullptr) && IsValidVkStruct(dyn_state_ci_)) {
+        gp_ci_.pDynamicState = &dyn_state_ci_;
+    }
+}
+
+VkResult CreatePipelineHelper::CreateGraphicsPipeline(bool implicit_destroy, bool do_late_bind) {
+    VkResult err;
+    if (do_late_bind) {
+        LateBindPipelineInfo();
+    }
+    if (implicit_destroy && (pipeline_ != VK_NULL_HANDLE)) {
+        vk::DestroyPipeline(layer_test_.device(), pipeline_, nullptr);
+        pipeline_ = VK_NULL_HANDLE;
+    }
+    err = vk::CreateGraphicsPipelines(layer_test_.device(), pipeline_cache_, 1, &gp_ci_, NULL, &pipeline_);
+    return err;
+}
+
+CreateComputePipelineHelper::CreateComputePipelineHelper(VkLayerTest &test) : layer_test_(test) {}
+
+CreateComputePipelineHelper::~CreateComputePipelineHelper() {
+    VkDevice device = layer_test_.device();
+    vk::DestroyPipelineCache(device, pipeline_cache_, nullptr);
+    vk::DestroyPipeline(device, pipeline_, nullptr);
+}
+
+void CreateComputePipelineHelper::InitDescriptorSetInfo() {
+    dsl_bindings_ = {{0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr}};
+}
+
+void CreateComputePipelineHelper::InitPipelineLayoutInfo() {
+    pipeline_layout_ci_.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO;
+    pipeline_layout_ci_.setLayoutCount = 1;     // Not really changeable because InitState() sets exactly one pSetLayout
+    pipeline_layout_ci_.pSetLayouts = nullptr;  // must bound after it is created
+}
+
+void CreateComputePipelineHelper::InitShaderInfo() {
+    cs_.reset(new VkShaderObj(layer_test_.DeviceObj(), bindStateMinimalShaderText, VK_SHADER_STAGE_COMPUTE_BIT, &layer_test_));
+    // We shouldn't need a fragment shader but add it to be able to run on more devices
+}
+
+void CreateComputePipelineHelper::InitComputePipelineInfo() {
+    cp_ci_.sType = VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO;
+    cp_ci_.pNext = nullptr;
+    cp_ci_.flags = 0;
+}
+
+void CreateComputePipelineHelper::InitPipelineCacheInfo() {
+    pc_ci_.sType = VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO;
+    pc_ci_.pNext = nullptr;
+    pc_ci_.flags = 0;
+    pc_ci_.initialDataSize = 0;
+    pc_ci_.pInitialData = nullptr;
+}
+
+void CreateComputePipelineHelper::InitInfo() {
+    InitDescriptorSetInfo();
+    InitPipelineLayoutInfo();
+    InitShaderInfo();
+    InitComputePipelineInfo();
+    InitPipelineCacheInfo();
+}
+
+void CreateComputePipelineHelper::InitState() {
+    VkResult err;
+    descriptor_set_.reset(new OneOffDescriptorSet(layer_test_.DeviceObj(), dsl_bindings_));
+    ASSERT_TRUE(descriptor_set_->Initialized());
+
+    const std::vector<VkPushConstantRange> push_ranges(
+        pipeline_layout_ci_.pPushConstantRanges,
+        pipeline_layout_ci_.pPushConstantRanges + pipeline_layout_ci_.pushConstantRangeCount);
+    pipeline_layout_ = VkPipelineLayoutObj(layer_test_.DeviceObj(), {&descriptor_set_->layout_}, push_ranges);
+
+    err = vk::CreatePipelineCache(layer_test_.device(), &pc_ci_, NULL, &pipeline_cache_);
+    ASSERT_VK_SUCCESS(err);
+}
+
+void CreateComputePipelineHelper::LateBindPipelineInfo() {
+    // By value or dynamically located items must be late bound
+    cp_ci_.layout = pipeline_layout_.handle();
+    cp_ci_.stage = cs_.get()->GetStageCreateInfo();
+}
+
+VkResult CreateComputePipelineHelper::CreateComputePipeline(bool implicit_destroy, bool do_late_bind) {
+    VkResult err;
+    if (do_late_bind) {
+        LateBindPipelineInfo();
+    }
+    if (implicit_destroy && (pipeline_ != VK_NULL_HANDLE)) {
+        vk::DestroyPipeline(layer_test_.device(), pipeline_, nullptr);
+        pipeline_ = VK_NULL_HANDLE;
+    }
+    err = vk::CreateComputePipelines(layer_test_.device(), pipeline_cache_, 1, &cp_ci_, NULL, &pipeline_);
+    return err;
+}
+
+CreateNVRayTracingPipelineHelper::CreateNVRayTracingPipelineHelper(VkLayerTest &test) : layer_test_(test) {}
+CreateNVRayTracingPipelineHelper::~CreateNVRayTracingPipelineHelper() {
+    VkDevice device = layer_test_.device();
+    vk::DestroyPipelineCache(device, pipeline_cache_, nullptr);
+    vk::DestroyPipeline(device, pipeline_, nullptr);
+}
+
+bool CreateNVRayTracingPipelineHelper::InitInstanceExtensions(VkLayerTest &test,
+                                                              std::vector<const char *> &instance_extension_names) {
+    if (test.InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+        instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    } else {
+        printf("%s Did not find required instance extension %s; skipped.\n", kSkipPrefix,
+               VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+        return false;
+    }
+    return true;
+}
+
+bool CreateNVRayTracingPipelineHelper::InitDeviceExtensions(VkLayerTest &test, std::vector<const char *> &device_extension_names) {
+    std::array<const char *, 2> required_device_extensions = {
+        {VK_NV_RAY_TRACING_EXTENSION_NAME, VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME}};
+    for (auto device_extension : required_device_extensions) {
+        if (test.DeviceExtensionSupported(test.gpu(), nullptr, device_extension)) {
+            device_extension_names.push_back(device_extension);
+        } else {
+            printf("%s %s Extension not supported, skipping tests\n", kSkipPrefix, device_extension);
+            return false;
+        }
+    }
+    return true;
+}
+
+void CreateNVRayTracingPipelineHelper::InitShaderGroups() {
+    {
+        VkRayTracingShaderGroupCreateInfoNV group = {};
+        group.sType = VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV;
+        group.type = VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_NV;
+        group.generalShader = 0;
+        group.closestHitShader = VK_SHADER_UNUSED_NV;
+        group.anyHitShader = VK_SHADER_UNUSED_NV;
+        group.intersectionShader = VK_SHADER_UNUSED_NV;
+        groups_.push_back(group);
+    }
+    {
+        VkRayTracingShaderGroupCreateInfoNV group = {};
+        group.sType = VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV;
+        group.type = VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_NV;
+        group.generalShader = VK_SHADER_UNUSED_NV;
+        group.closestHitShader = 1;
+        group.anyHitShader = VK_SHADER_UNUSED_NV;
+        group.intersectionShader = VK_SHADER_UNUSED_NV;
+        groups_.push_back(group);
+    }
+    {
+        VkRayTracingShaderGroupCreateInfoNV group = {};
+        group.sType = VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV;
+        group.type = VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_NV;
+        group.generalShader = 2;
+        group.closestHitShader = VK_SHADER_UNUSED_NV;
+        group.anyHitShader = VK_SHADER_UNUSED_NV;
+        group.intersectionShader = VK_SHADER_UNUSED_NV;
+        groups_.push_back(group);
+    }
+}
+
+void CreateNVRayTracingPipelineHelper::InitDescriptorSetInfo() {
+    dsl_bindings_ = {
+        {0, VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, 1, VK_SHADER_STAGE_RAYGEN_BIT_NV, nullptr},
+        {1, VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV, 1, VK_SHADER_STAGE_RAYGEN_BIT_NV, nullptr},
+    };
+}
+
+void CreateNVRayTracingPipelineHelper::InitPipelineLayoutInfo() {
+    pipeline_layout_ci_.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO;
+    pipeline_layout_ci_.setLayoutCount = 1;     // Not really changeable because InitState() sets exactly one pSetLayout
+    pipeline_layout_ci_.pSetLayouts = nullptr;  // must bound after it is created
+}
+
+void CreateNVRayTracingPipelineHelper::InitShaderInfo() {  // DONE
+    static const char rayGenShaderText[] =
+        "#version 460 core                                                \n"
+        "#extension GL_NV_ray_tracing : require                           \n"
+        "layout(set = 0, binding = 0, rgba8) uniform image2D image;       \n"
+        "layout(set = 0, binding = 1) uniform accelerationStructureNV as; \n"
+        "                                                                 \n"
+        "layout(location = 0) rayPayloadNV float payload;                 \n"
+        "                                                                 \n"
+        "void main()                                                      \n"
+        "{                                                                \n"
+        "   vec4 col = vec4(0, 0, 0, 1);                                  \n"
+        "                                                                 \n"
+        "   vec3 origin = vec3(float(gl_LaunchIDNV.x)/float(gl_LaunchSizeNV.x), "
+        "float(gl_LaunchIDNV.y)/float(gl_LaunchSizeNV.y), "
+        "1.0); \n"
+        "   vec3 dir = vec3(0.0, 0.0, -1.0);                              \n"
+        "                                                                 \n"
+        "   payload = 0.5;                                                \n"
+        "   traceNV(as, gl_RayFlagsCullBackFacingTrianglesNV, 0xff, 0, 1, 0, origin, 0.0, dir, 1000.0, 0); \n"
+        "                                                                 \n"
+        "   col.y = payload;                                              \n"
+        "                                                                 \n"
+        "   imageStore(image, ivec2(gl_LaunchIDNV.xy), col);              \n"
+        "}\n";
+
+    static char const closestHitShaderText[] =
+        "#version 460 core                              \n"
+        "#extension GL_NV_ray_tracing : require         \n"
+        "layout(location = 0) rayPayloadInNV float hitValue;             \n"
+        "                                               \n"
+        "void main() {                                  \n"
+        "    hitValue = 1.0;                            \n"
+        "}                                              \n";
+
+    static char const missShaderText[] =
+        "#version 460 core                              \n"
+        "#extension GL_NV_ray_tracing : require         \n"
+        "layout(location = 0) rayPayloadInNV float hitValue; \n"
+        "                                               \n"
+        "void main() {                                  \n"
+        "    hitValue = 0.0;                            \n"
+        "}                                              \n";
+
+    rgs_.reset(new VkShaderObj(layer_test_.DeviceObj(), rayGenShaderText, VK_SHADER_STAGE_RAYGEN_BIT_NV, &layer_test_));
+    chs_.reset(new VkShaderObj(layer_test_.DeviceObj(), closestHitShaderText, VK_SHADER_STAGE_CLOSEST_HIT_BIT_NV, &layer_test_));
+    mis_.reset(new VkShaderObj(layer_test_.DeviceObj(), missShaderText, VK_SHADER_STAGE_MISS_BIT_NV, &layer_test_));
+
+    shader_stages_ = {rgs_->GetStageCreateInfo(), chs_->GetStageCreateInfo(), mis_->GetStageCreateInfo()};
+}
+
+void CreateNVRayTracingPipelineHelper::InitNVRayTracingPipelineInfo() {
+    rp_ci_.sType = VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_NV;
+    rp_ci_.maxRecursionDepth = 0;
+    rp_ci_.stageCount = shader_stages_.size();
+    rp_ci_.pStages = shader_stages_.data();
+    rp_ci_.groupCount = groups_.size();
+    rp_ci_.pGroups = groups_.data();
+}
+
+void CreateNVRayTracingPipelineHelper::InitPipelineCacheInfo() {
+    pc_ci_.sType = VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO;
+    pc_ci_.pNext = nullptr;
+    pc_ci_.flags = 0;
+    pc_ci_.initialDataSize = 0;
+    pc_ci_.pInitialData = nullptr;
+}
+
+void CreateNVRayTracingPipelineHelper::InitInfo() {
+    InitShaderGroups();
+    InitDescriptorSetInfo();
+    InitPipelineLayoutInfo();
+    InitShaderInfo();
+    InitNVRayTracingPipelineInfo();
+    InitPipelineCacheInfo();
+}
+
+void CreateNVRayTracingPipelineHelper::InitState() {
+    VkResult err;
+    descriptor_set_.reset(new OneOffDescriptorSet(layer_test_.DeviceObj(), dsl_bindings_));
+    ASSERT_TRUE(descriptor_set_->Initialized());
+
+    pipeline_layout_ = VkPipelineLayoutObj(layer_test_.DeviceObj(), {&descriptor_set_->layout_});
+
+    err = vk::CreatePipelineCache(layer_test_.device(), &pc_ci_, NULL, &pipeline_cache_);
+    ASSERT_VK_SUCCESS(err);
+}
+
+void CreateNVRayTracingPipelineHelper::LateBindPipelineInfo() {
+    // By value or dynamically located items must be late bound
+    rp_ci_.layout = pipeline_layout_.handle();
+    rp_ci_.stageCount = shader_stages_.size();
+    rp_ci_.pStages = shader_stages_.data();
+}
+
+VkResult CreateNVRayTracingPipelineHelper::CreateNVRayTracingPipeline(bool implicit_destroy, bool do_late_bind) {
+    VkResult err;
+    if (do_late_bind) {
+        LateBindPipelineInfo();
+    }
+    if (implicit_destroy && (pipeline_ != VK_NULL_HANDLE)) {
+        vk::DestroyPipeline(layer_test_.device(), pipeline_, nullptr);
+        pipeline_ = VK_NULL_HANDLE;
+    }
+
+    PFN_vkCreateRayTracingPipelinesNV vkCreateRayTracingPipelinesNV =
+        (PFN_vkCreateRayTracingPipelinesNV)vk::GetInstanceProcAddr(layer_test_.instance(), "vkCreateRayTracingPipelinesNV");
+    err = vkCreateRayTracingPipelinesNV(layer_test_.device(), pipeline_cache_, 1, &rp_ci_, nullptr, &pipeline_);
+    return err;
+}
+
+namespace chain_util {
+const void *ExtensionChain::Head() const { return head_; }
+}  // namespace chain_util
+
+BarrierQueueFamilyTestHelper::QueueFamilyObjs::~QueueFamilyObjs() {
+    delete command_buffer2;
+    delete command_buffer;
+    delete command_pool;
+    delete queue;
+}
+
+void BarrierQueueFamilyTestHelper::QueueFamilyObjs::Init(VkDeviceObj *device, uint32_t qf_index, VkQueue qf_queue,
+                                                         VkCommandPoolCreateFlags cp_flags) {
+    index = qf_index;
+    queue = new VkQueueObj(qf_queue, qf_index);
+    command_pool = new VkCommandPoolObj(device, qf_index, cp_flags);
+    command_buffer = new VkCommandBufferObj(device, command_pool, VK_COMMAND_BUFFER_LEVEL_PRIMARY, queue);
+    command_buffer2 = new VkCommandBufferObj(device, command_pool, VK_COMMAND_BUFFER_LEVEL_PRIMARY, queue);
+};
+
+BarrierQueueFamilyTestHelper::Context::Context(VkLayerTest *test, const std::vector<uint32_t> &queue_family_indices)
+    : layer_test(test) {
+    if (0 == queue_family_indices.size()) {
+        return;  // This is invalid
+    }
+    VkDeviceObj *device_obj = layer_test->DeviceObj();
+    queue_families.reserve(queue_family_indices.size());
+    default_index = queue_family_indices[0];
+    for (auto qfi : queue_family_indices) {
+        VkQueue queue = device_obj->queue_family_queues(qfi)[0]->handle();
+        queue_families.emplace(std::make_pair(qfi, QueueFamilyObjs()));
+        queue_families[qfi].Init(device_obj, qfi, queue, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT);
+    }
+    Reset();
+}
+
+void BarrierQueueFamilyTestHelper::Context::Reset() {
+    layer_test->DeviceObj()->wait();
+    for (auto &qf : queue_families) {
+        vk::ResetCommandPool(layer_test->device(), qf.second.command_pool->handle(), 0);
+    }
+}
+
+BarrierQueueFamilyTestHelper::BarrierQueueFamilyTestHelper(Context *context)
+    : context_(context), image_(context->layer_test->DeviceObj()) {}
+
+void BarrierQueueFamilyTestHelper::Init(std::vector<uint32_t> *families, bool image_memory, bool buffer_memory) {
+    VkDeviceObj *device_obj = context_->layer_test->DeviceObj();
+
+    image_.Init(32, 32, 1, VK_FORMAT_B8G8R8A8_UNORM, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL, 0, families,
+                image_memory);
+
+    ASSERT_TRUE(image_.initialized());
+
+    image_barrier_ = image_.image_memory_barrier(VK_ACCESS_TRANSFER_READ_BIT, VK_ACCESS_TRANSFER_READ_BIT, image_.Layout(),
+                                                 image_.Layout(), image_.subresource_range(VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 1));
+
+    VkMemoryPropertyFlags mem_prop = VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
+    buffer_.init_as_src_and_dst(*device_obj, 256, mem_prop, families, buffer_memory);
+    ASSERT_TRUE(buffer_.initialized());
+    buffer_barrier_ = buffer_.buffer_memory_barrier(VK_ACCESS_TRANSFER_READ_BIT, VK_ACCESS_TRANSFER_READ_BIT, 0, VK_WHOLE_SIZE);
+}
+
+BarrierQueueFamilyTestHelper::QueueFamilyObjs *BarrierQueueFamilyTestHelper::GetQueueFamilyInfo(Context *context, uint32_t qfi) {
+    QueueFamilyObjs *qf;
+
+    auto qf_it = context->queue_families.find(qfi);
+    if (qf_it != context->queue_families.end()) {
+        qf = &(qf_it->second);
+    } else {
+        qf = &(context->queue_families[context->default_index]);
+    }
+    return qf;
+}
+
+void BarrierQueueFamilyTestHelper::operator()(std::string img_err, std::string buf_err, uint32_t src, uint32_t dst, bool positive,
+                                              uint32_t queue_family_index, Modifier mod) {
+    auto monitor = context_->layer_test->Monitor();
+    if (img_err.length()) monitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT | VK_DEBUG_REPORT_WARNING_BIT_EXT, img_err);
+    if (buf_err.length()) monitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT | VK_DEBUG_REPORT_WARNING_BIT_EXT, buf_err);
+
+    image_barrier_.srcQueueFamilyIndex = src;
+    image_barrier_.dstQueueFamilyIndex = dst;
+    buffer_barrier_.srcQueueFamilyIndex = src;
+    buffer_barrier_.dstQueueFamilyIndex = dst;
+
+    QueueFamilyObjs *qf = GetQueueFamilyInfo(context_, queue_family_index);
+
+    VkCommandBufferObj *command_buffer = qf->command_buffer;
+    for (int cb_repeat = 0; cb_repeat < (mod == Modifier::DOUBLE_COMMAND_BUFFER ? 2 : 1); cb_repeat++) {
+        command_buffer->begin();
+        for (int repeat = 0; repeat < (mod == Modifier::DOUBLE_RECORD ? 2 : 1); repeat++) {
+            vk::CmdPipelineBarrier(command_buffer->handle(), VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT,
+                                   VK_DEPENDENCY_BY_REGION_BIT, 0, nullptr, 1, &buffer_barrier_, 1, &image_barrier_);
+        }
+        command_buffer->end();
+        command_buffer = qf->command_buffer2;  // Second pass (if any) goes to the secondary command_buffer.
+    }
+
+    if (queue_family_index != kInvalidQueueFamily) {
+        if (mod == Modifier::DOUBLE_COMMAND_BUFFER) {
+            // the Fence resolves to VK_NULL_HANLE... i.e. no fence
+            qf->queue->submit({{qf->command_buffer, qf->command_buffer2}}, vk_testing::Fence(), positive);
+        } else {
+            qf->command_buffer->QueueCommandBuffer(positive);  // Check for success on positive tests only
+        }
+    }
+
+    if (positive) {
+        monitor->VerifyNotFound();
+    } else {
+        monitor->VerifyFound();
+    }
+    context_->Reset();
+};
+
+void print_android(const char *c) {
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+    __android_log_print(ANDROID_LOG_INFO, "VulkanLayerValidationTests", "%s", c);
+#endif  // VK_USE_PLATFORM_ANDROID_KHR
+}
+
+#if defined(ANDROID) && defined(VALIDATION_APK)
+const char *appTag = "VulkanLayerValidationTests";
+static bool initialized = false;
+static bool active = false;
+
+// Convert Intents to argv
+// Ported from Hologram sample, only difference is flexible key
+std::vector<std::string> get_args(android_app &app, const char *intent_extra_data_key) {
+    std::vector<std::string> args;
+    JavaVM &vm = *app.activity->vm;
+    JNIEnv *p_env;
+    if (vm.AttachCurrentThread(&p_env, nullptr) != JNI_OK) return args;
+
+    JNIEnv &env = *p_env;
+    jobject activity = app.activity->clazz;
+    jmethodID get_intent_method = env.GetMethodID(env.GetObjectClass(activity), "getIntent", "()Landroid/content/Intent;");
+    jobject intent = env.CallObjectMethod(activity, get_intent_method);
+    jmethodID get_string_extra_method =
+        env.GetMethodID(env.GetObjectClass(intent), "getStringExtra", "(Ljava/lang/String;)Ljava/lang/String;");
+    jvalue get_string_extra_args;
+    get_string_extra_args.l = env.NewStringUTF(intent_extra_data_key);
+    jstring extra_str = static_cast<jstring>(env.CallObjectMethodA(intent, get_string_extra_method, &get_string_extra_args));
+
+    std::string args_str;
+    if (extra_str) {
+        const char *extra_utf = env.GetStringUTFChars(extra_str, nullptr);
+        args_str = extra_utf;
+        env.ReleaseStringUTFChars(extra_str, extra_utf);
+        env.DeleteLocalRef(extra_str);
+    }
+
+    env.DeleteLocalRef(get_string_extra_args.l);
+    env.DeleteLocalRef(intent);
+    vm.DetachCurrentThread();
+
+    // split args_str
+    std::stringstream ss(args_str);
+    std::string arg;
+    while (std::getline(ss, arg, ' ')) {
+        if (!arg.empty()) args.push_back(arg);
+    }
+
+    return args;
+}
+
+void addFullTestCommentIfPresent(const ::testing::TestInfo &test_info, std::string &error_message) {
+    const char *const type_param = test_info.type_param();
+    const char *const value_param = test_info.value_param();
+
+    if (type_param != NULL || value_param != NULL) {
+        error_message.append(", where ");
+        if (type_param != NULL) {
+            error_message.append("TypeParam = ").append(type_param);
+            if (value_param != NULL) error_message.append(" and ");
+        }
+        if (value_param != NULL) {
+            error_message.append("GetParam() = ").append(value_param);
+        }
+    }
+}
+
+// Inspired by https://github.com/google/googletest/blob/master/googletest/docs/AdvancedGuide.md
+class LogcatPrinter : public ::testing::EmptyTestEventListener {
+    // Called before a test starts.
+    virtual void OnTestStart(const ::testing::TestInfo &test_info) {
+        __android_log_print(ANDROID_LOG_INFO, appTag, "[ RUN      ] %s.%s", test_info.test_case_name(), test_info.name());
+    }
+
+    // Called after a failed assertion or a SUCCEED() invocation.
+    virtual void OnTestPartResult(const ::testing::TestPartResult &result) {
+        // If the test part succeeded, we don't need to do anything.
+        if (result.type() == ::testing::TestPartResult::kSuccess) return;
+
+        __android_log_print(ANDROID_LOG_INFO, appTag, "%s in %s:%d %s", result.failed() ? "*** Failure" : "Success",
+                            result.file_name(), result.line_number(), result.summary());
+    }
+
+    // Called after a test ends.
+    virtual void OnTestEnd(const ::testing::TestInfo &info) {
+        std::string result;
+        if (info.result()->Passed()) {
+            result.append("[       OK ]");
+        } else {
+            result.append("[  FAILED  ]");
+        }
+        result.append(info.test_case_name()).append(".").append(info.name());
+        if (info.result()->Failed()) addFullTestCommentIfPresent(info, result);
+
+        if (::testing::GTEST_FLAG(print_time)) {
+            std::ostringstream os;
+            os << info.result()->elapsed_time();
+            result.append(" (").append(os.str()).append(" ms)");
+        }
+
+        __android_log_print(ANDROID_LOG_INFO, appTag, "%s", result.c_str());
+    };
+};
+
+static int32_t processInput(struct android_app *app, AInputEvent *event) { return 0; }
+
+static void processCommand(struct android_app *app, int32_t cmd) {
+    switch (cmd) {
+        case APP_CMD_INIT_WINDOW: {
+            if (app->window) {
+                initialized = true;
+                VkTestFramework::window = app->window;
+            }
+            break;
+        }
+        case APP_CMD_GAINED_FOCUS: {
+            active = true;
+            break;
+        }
+        case APP_CMD_LOST_FOCUS: {
+            active = false;
+            break;
+        }
+    }
+}
+
+void android_main(struct android_app *app) {
+    int vulkanSupport = InitVulkan();
+    if (vulkanSupport == 0) {
+        __android_log_print(ANDROID_LOG_INFO, appTag, "==== FAILED ==== No Vulkan support found");
+        return;
+    }
+
+    app->onAppCmd = processCommand;
+    app->onInputEvent = processInput;
+
+    while (1) {
+        int events;
+        struct android_poll_source *source;
+        while (ALooper_pollAll(active ? 0 : -1, NULL, &events, (void **)&source) >= 0) {
+            if (source) {
+                source->process(app, source);
+            }
+
+            if (app->destroyRequested != 0) {
+                VkTestFramework::Finish();
+                return;
+            }
+        }
+
+        if (initialized && active) {
+            // Use the following key to send arguments to gtest, i.e.
+            // --es args "--gtest_filter=-VkLayerTest.foo"
+            const char key[] = "args";
+            std::vector<std::string> args = get_args(*app, key);
+
+            std::string filter = "";
+            if (args.size() > 0) {
+                __android_log_print(ANDROID_LOG_INFO, appTag, "Intent args = %s", args[0].c_str());
+                filter += args[0];
+            } else {
+                __android_log_print(ANDROID_LOG_INFO, appTag, "No Intent args detected");
+            }
+
+            int argc = 2;
+            char *argv[] = {(char *)"foo", (char *)filter.c_str()};
+            __android_log_print(ANDROID_LOG_DEBUG, appTag, "filter = %s", argv[1]);
+
+            // Route output to files until we can override the gtest output
+            freopen("/sdcard/Android/data/com.example.VulkanLayerValidationTests/files/out.txt", "w", stdout);
+            freopen("/sdcard/Android/data/com.example.VulkanLayerValidationTests/files/err.txt", "w", stderr);
+
+            ::testing::InitGoogleTest(&argc, argv);
+
+            ::testing::TestEventListeners &listeners = ::testing::UnitTest::GetInstance()->listeners();
+            listeners.Append(new LogcatPrinter);
+
+            VkTestFramework::InitArgs(&argc, argv);
+            ::testing::AddGlobalTestEnvironment(new TestEnvironment);
+
+            int result = RUN_ALL_TESTS();
+
+            if (result != 0) {
+                __android_log_print(ANDROID_LOG_INFO, appTag, "==== Tests FAILED ====");
+            } else {
+                __android_log_print(ANDROID_LOG_INFO, appTag, "==== Tests PASSED ====");
+            }
+
+            VkTestFramework::Finish();
+
+            fclose(stdout);
+            fclose(stderr);
+
+            ANativeActivity_finish(app->activity);
+            return;
+        }
+    }
+}
+#endif
+
+#if defined(_WIN32) && !defined(NDEBUG)
+#include <crtdbg.h>
+#endif
+
+int main(int argc, char **argv) {
+    int result;
+
+#ifdef ANDROID
+    int vulkanSupport = InitVulkan();
+    if (vulkanSupport == 0) return 1;
+#endif
+
+#if defined(_WIN32) && !defined(NDEBUG)
+    _CrtSetReportMode(_CRT_WARN, _CRTDBG_MODE_FILE);
+    _CrtSetReportFile(_CRT_ASSERT, _CRTDBG_FILE_STDERR);
+#endif
+
+    ::testing::InitGoogleTest(&argc, argv);
+    VkTestFramework::InitArgs(&argc, argv);
+
+    ::testing::AddGlobalTestEnvironment(new TestEnvironment);
+
+    result = RUN_ALL_TESTS();
+
+    VkTestFramework::Finish();
+    return result;
+}
diff --git a/src/third_party/vulkan-validation-layers/src/tests/layer_validation_tests.h b/src/third_party/vulkan-validation-layers/src/tests/layer_validation_tests.h
new file mode 100644
index 0000000..0e71d13
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/tests/layer_validation_tests.h
@@ -0,0 +1,727 @@
+/*
+ * Copyright (c) 2015-2019 The Khronos Group Inc.
+ * Copyright (c) 2015-2019 Valve Corporation
+ * Copyright (c) 2015-2019 LunarG, Inc.
+ * Copyright (c) 2015-2019 Google, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Author: Chia-I Wu <olvaffe@gmail.com>
+ * Author: Chris Forbes <chrisf@ijw.co.nz>
+ * Author: Courtney Goeltzenleuchter <courtney@LunarG.com>
+ * Author: Mark Lobodzinski <mark@lunarg.com>
+ * Author: Mike Stroyan <mike@LunarG.com>
+ * Author: Tobin Ehlis <tobine@google.com>
+ * Author: Tony Barbour <tony@LunarG.com>
+ * Author: Cody Northrop <cnorthrop@google.com>
+ * Author: Dave Houlton <daveh@lunarg.com>
+ * Author: Jeremy Kniager <jeremyk@lunarg.com>
+ * Author: Shannon McPherson <shannon@lunarg.com>
+ * Author: John Zulauf <jzulauf@lunarg.com>
+ */
+
+#ifndef VKLAYERTEST_H
+#define VKLAYERTEST_H
+
+#ifdef ANDROID
+#include "vulkan_wrapper.h"
+#else
+#define NOMINMAX
+#include <vulkan/vulkan.h>
+#endif
+
+#include "layers/vk_device_profile_api_layer.h"
+
+#if defined(ANDROID)
+#include <android/log.h>
+#if defined(VALIDATION_APK)
+#include <android_native_app_glue.h>
+#endif
+#endif
+
+#include "icd-spv.h"
+#include "test_common.h"
+#include "vk_layer_config.h"
+#include "vk_format_utils.h"
+#include "vkrenderframework.h"
+#include "vk_typemap_helper.h"
+#include "convert_to_renderpass2.h"
+
+#include <algorithm>
+#include <cmath>
+#include <functional>
+#include <limits>
+#include <memory>
+#include <unordered_set>
+
+//--------------------------------------------------------------------------------------
+// Mesh and VertexFormat Data
+//--------------------------------------------------------------------------------------
+
+static const char kSkipPrefix[] = "             TEST SKIPPED:";
+
+enum BsoFailSelect {
+    BsoFailNone,
+    BsoFailLineWidth,
+    BsoFailDepthBias,
+    BsoFailViewport,
+    BsoFailScissor,
+    BsoFailBlend,
+    BsoFailDepthBounds,
+    BsoFailStencilReadMask,
+    BsoFailStencilWriteMask,
+    BsoFailStencilReference,
+    BsoFailCmdClearAttachments,
+    BsoFailIndexBuffer,
+    BsoFailIndexBufferBadSize,
+    BsoFailIndexBufferBadOffset,
+    BsoFailIndexBufferBadMapSize,
+    BsoFailIndexBufferBadMapOffset,
+    BsoFailLineStipple,
+};
+
+static const char bindStateMinimalShaderText[] = "#version 450\nvoid main() {}\n";
+
+static const char bindStateVertShaderText[] =
+    "#version 450\n"
+    "void main() {\n"
+    "   gl_Position = vec4(1);\n"
+    "}\n";
+
+static const char bindStateVertPointSizeShaderText[] =
+    "#version 450\n"
+    "out gl_PerVertex {\n"
+    "    vec4 gl_Position;\n"
+    "    float gl_PointSize;\n"
+    "};\n"
+    "void main() {\n"
+    "    gl_Position = vec4(1);\n"
+    "    gl_PointSize = 1.0;\n"
+    "}\n";
+
+static char const bindStateGeomShaderText[] =
+    "#version 450\n"
+    "layout(triangles) in;\n"
+    "layout(triangle_strip, max_vertices=3) out;\n"
+    "void main() {\n"
+    "   gl_Position = vec4(1);\n"
+    "   EmitVertex();\n"
+    "}\n";
+
+static char const bindStateGeomPointSizeShaderText[] =
+    "#version 450\n"
+    "layout (points) in;\n"
+    "layout (points) out;\n"
+    "layout (max_vertices = 1) out;\n"
+    "void main() {\n"
+    "   gl_Position = vec4(1);\n"
+    "   gl_PointSize = 1.0;\n"
+    "   EmitVertex();\n"
+    "}\n";
+
+static const char bindStateTscShaderText[] =
+    "#version 450\n"
+    "layout(vertices=3) out;\n"
+    "void main() {\n"
+    "   gl_TessLevelOuter[0] = gl_TessLevelOuter[1] = gl_TessLevelOuter[2] = 1;\n"
+    "   gl_TessLevelInner[0] = 1;\n"
+    "}\n";
+
+static const char bindStateTeshaderText[] =
+    "#version 450\n"
+    "layout(triangles, equal_spacing, cw) in;\n"
+    "void main() { gl_Position = vec4(1); }\n";
+
+static const char bindStateFragShaderText[] =
+    "#version 450\n"
+    "layout(location = 0) out vec4 uFragColor;\n"
+    "void main(){\n"
+    "   uFragColor = vec4(0,1,0,1);\n"
+    "}\n";
+
+static const char bindStateFragSamplerShaderText[] =
+    "#version 450\n"
+    "layout(set=0, binding=0) uniform sampler2D s;\n"
+    "layout(location=0) out vec4 x;\n"
+    "void main(){\n"
+    "   x = texture(s, vec2(1));\n"
+    "}\n";
+
+static const char bindStateFragUniformShaderText[] =
+    "#version 450\n"
+    "layout(set=0) layout(binding=0) uniform foo { int x; int y; } bar;\n"
+    "layout(location=0) out vec4 x;\n"
+    "void main(){\n"
+    "   x = vec4(bar.y);\n"
+    "}\n";
+
+// Static arrays helper
+template <class ElementT, size_t array_size>
+size_t size(ElementT (&)[array_size]) {
+    return array_size;
+}
+
+// Format search helper
+VkFormat FindSupportedDepthOnlyFormat(VkPhysicalDevice phy);
+VkFormat FindSupportedStencilOnlyFormat(VkPhysicalDevice phy);
+VkFormat FindSupportedDepthStencilFormat(VkPhysicalDevice phy);
+
+// Returns true if *any* requested features are available.
+// Assumption is that the framework can successfully create an image as
+// long as at least one of the feature bits is present (excepting VTX_BUF).
+bool ImageFormatIsSupported(VkPhysicalDevice phy, VkFormat format, VkImageTiling tiling = VK_IMAGE_TILING_OPTIMAL,
+                            VkFormatFeatureFlags features = ~VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT);
+
+// Returns true if format and *all* requested features are available.
+bool ImageFormatAndFeaturesSupported(VkPhysicalDevice phy, VkFormat format, VkImageTiling tiling, VkFormatFeatureFlags features);
+
+// Returns true if format and *all* requested features are available.
+bool ImageFormatAndFeaturesSupported(const VkInstance inst, const VkPhysicalDevice phy, const VkImageCreateInfo info,
+                                     const VkFormatFeatureFlags features);
+
+// Validation report callback prototype
+VKAPI_ATTR VkBool32 VKAPI_CALL myDbgFunc(VkFlags msgFlags, VkDebugReportObjectTypeEXT objType, uint64_t srcObject, size_t location,
+                                         int32_t msgCode, const char *pLayerPrefix, const char *pMsg, void *pUserData);
+
+// Simple sane SamplerCreateInfo boilerplate
+VkSamplerCreateInfo SafeSaneSamplerCreateInfo();
+
+VkImageViewCreateInfo SafeSaneImageViewCreateInfo(VkImage image, VkFormat format, VkImageAspectFlags aspect_mask);
+
+VkImageViewCreateInfo SafeSaneImageViewCreateInfo(const VkImageObj &image, VkFormat format, VkImageAspectFlags aspect_mask);
+
+// Helper for checking createRenderPass2 support and adding related extensions.
+bool CheckCreateRenderPass2Support(VkRenderFramework *renderFramework, std::vector<const char *> &device_extension_names);
+
+// Helper for checking descriptor_indexing support and adding related extensions.
+bool CheckDescriptorIndexingSupportAndInitFramework(VkRenderFramework *renderFramework,
+                                                    std::vector<const char *> &instance_extension_names,
+                                                    std::vector<const char *> &device_extension_names,
+                                                    VkValidationFeaturesEXT *features, void *userData);
+
+// Dependent "false" type for the static assert, as GCC will evaluate
+// non-dependent static_asserts even for non-instantiated templates
+template <typename T>
+struct AlwaysFalse : std::false_type {};
+
+// Helpers to get nearest greater or smaller value (of float) -- useful for testing the boundary cases of Vulkan limits
+template <typename T>
+T NearestGreater(const T from) {
+    using Lim = std::numeric_limits<T>;
+    const auto positive_direction = Lim::has_infinity ? Lim::infinity() : Lim::max();
+
+    return std::nextafter(from, positive_direction);
+}
+
+template <typename T>
+T NearestSmaller(const T from) {
+    using Lim = std::numeric_limits<T>;
+    const auto negative_direction = Lim::has_infinity ? -Lim::infinity() : Lim::lowest();
+
+    return std::nextafter(from, negative_direction);
+}
+
+class VkLayerTest : public VkRenderFramework {
+  public:
+    void VKTriangleTest(BsoFailSelect failCase);
+
+    void GenericDrawPreparation(VkCommandBufferObj *commandBuffer, VkPipelineObj &pipelineobj, VkDescriptorSetObj &descriptorSet,
+                                BsoFailSelect failCase);
+
+    void Init(VkPhysicalDeviceFeatures *features = nullptr, VkPhysicalDeviceFeatures2 *features2 = nullptr,
+              const VkCommandPoolCreateFlags flags = 0, void *instance_pnext = nullptr);
+    bool AddSurfaceInstanceExtension();
+    bool AddSwapchainDeviceExtension();
+    VkCommandBufferObj *CommandBuffer();
+
+  protected:
+    uint32_t m_instance_api_version = 0;
+    uint32_t m_target_api_version = 0;
+    bool m_enableWSI;
+
+    uint32_t SetTargetApiVersion(uint32_t target_api_version);
+    uint32_t DeviceValidationVersion();
+    bool LoadDeviceProfileLayer(
+        PFN_vkSetPhysicalDeviceFormatPropertiesEXT &fpvkSetPhysicalDeviceFormatPropertiesEXT,
+        PFN_vkGetOriginalPhysicalDeviceFormatPropertiesEXT &fpvkGetOriginalPhysicalDeviceFormatPropertiesEXT);
+
+    VkLayerTest();
+};
+
+class VkPositiveLayerTest : public VkLayerTest {
+  public:
+  protected:
+};
+
+class VkBestPracticesLayerTest : public VkLayerTest {
+  public:
+    void InitBestPracticesFramework();
+
+  protected:
+};
+
+class VkWsiEnabledLayerTest : public VkLayerTest {
+  public:
+  protected:
+    VkWsiEnabledLayerTest() { m_enableWSI = true; }
+};
+
+class VkBufferTest {
+  public:
+    enum eTestEnFlags {
+        eDoubleDelete,
+        eInvalidDeviceOffset,
+        eInvalidMemoryOffset,
+        eBindNullBuffer,
+        eBindFakeBuffer,
+        eFreeInvalidHandle,
+        eNone,
+    };
+
+    enum eTestConditions { eOffsetAlignment = 1 };
+
+    static bool GetTestConditionValid(VkDeviceObj *aVulkanDevice, eTestEnFlags aTestFlag, VkBufferUsageFlags aBufferUsage = 0);
+    // A constructor which performs validation tests within construction.
+    VkBufferTest(VkDeviceObj *aVulkanDevice, VkBufferUsageFlags aBufferUsage, eTestEnFlags aTestFlag = eNone);
+    ~VkBufferTest();
+    bool GetBufferCurrent();
+    const VkBuffer &GetBuffer();
+    void TestDoubleDestroy();
+
+  protected:
+    bool AllocateCurrent;
+    bool BoundCurrent;
+    bool CreateCurrent;
+    bool InvalidDeleteEn;
+
+    VkBuffer VulkanBuffer;
+    VkDevice VulkanDevice;
+    VkDeviceMemory VulkanMemory;
+};
+
+struct CreatePipelineHelper;
+class VkVerticesObj {
+  public:
+    VkVerticesObj(VkDeviceObj *aVulkanDevice, unsigned aAttributeCount, unsigned aBindingCount, unsigned aByteStride,
+                  VkDeviceSize aVertexCount, const float *aVerticies);
+    ~VkVerticesObj();
+    bool AddVertexInputToPipe(VkPipelineObj &aPipelineObj);
+    bool AddVertexInputToPipeHelpr(CreatePipelineHelper *pipelineHelper);
+    void BindVertexBuffers(VkCommandBuffer aCommandBuffer, unsigned aOffsetCount = 0, VkDeviceSize *aOffsetList = nullptr);
+
+  protected:
+    static uint32_t BindIdGenerator;
+
+    bool BoundCurrent;
+    unsigned AttributeCount;
+    unsigned BindingCount;
+    uint32_t BindId;
+
+    VkPipelineVertexInputStateCreateInfo PipelineVertexInputStateCreateInfo;
+    VkVertexInputAttributeDescription *VertexInputAttributeDescription;
+    VkVertexInputBindingDescription *VertexInputBindingDescription;
+    VkConstantBufferObj VulkanMemoryBuffer;
+};
+
+struct OneOffDescriptorSet {
+    VkDeviceObj *device_;
+    VkDescriptorPool pool_;
+    VkDescriptorSetLayoutObj layout_;
+    VkDescriptorSet set_;
+    typedef std::vector<VkDescriptorSetLayoutBinding> Bindings;
+    std::vector<VkDescriptorBufferInfo> buffer_infos;
+    std::vector<VkDescriptorImageInfo> image_infos;
+    std::vector<VkWriteDescriptorSet> descriptor_writes;
+
+    OneOffDescriptorSet(VkDeviceObj *device, const Bindings &bindings, VkDescriptorSetLayoutCreateFlags layout_flags = 0,
+                        void *layout_pnext = NULL, VkDescriptorPoolCreateFlags poolFlags = 0, void *allocate_pnext = NULL);
+    ~OneOffDescriptorSet();
+    bool Initialized();
+    void WriteDescriptorBufferInfo(int binding, VkBuffer buffer, VkDeviceSize size,
+                                   VkDescriptorType descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER);
+    void WriteDescriptorBufferView(int binding, VkBufferView &buffer_view,
+                                   VkDescriptorType descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER);
+    void WriteDescriptorImageInfo(int binding, VkImageView image_view, VkSampler sampler,
+                                  VkDescriptorType descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER);
+    void UpdateDescriptorSets();
+};
+
+template <typename T>
+bool IsValidVkStruct(const T &s) {
+    return LvlTypeMap<T>::kSType == s.sType;
+}
+
+// Helper class for tersely creating create pipeline tests
+//
+// Designed with minimal error checking to ensure easy error state creation
+// See OneshotTest for typical usage
+struct CreatePipelineHelper {
+  public:
+    std::vector<VkDescriptorSetLayoutBinding> dsl_bindings_;
+    std::unique_ptr<OneOffDescriptorSet> descriptor_set_;
+    std::vector<VkPipelineShaderStageCreateInfo> shader_stages_;
+    VkPipelineVertexInputStateCreateInfo vi_ci_ = {};
+    VkPipelineInputAssemblyStateCreateInfo ia_ci_ = {};
+    VkPipelineTessellationStateCreateInfo tess_ci_ = {};
+    VkViewport viewport_ = {};
+    VkRect2D scissor_ = {};
+    VkPipelineViewportStateCreateInfo vp_state_ci_ = {};
+    VkPipelineMultisampleStateCreateInfo pipe_ms_state_ci_ = {};
+    VkPipelineLayoutCreateInfo pipeline_layout_ci_ = {};
+    VkPipelineLayoutObj pipeline_layout_;
+    VkPipelineDynamicStateCreateInfo dyn_state_ci_ = {};
+    VkPipelineRasterizationStateCreateInfo rs_state_ci_ = {};
+    VkPipelineRasterizationLineStateCreateInfoEXT line_state_ci_ = {};
+    VkPipelineColorBlendAttachmentState cb_attachments_ = {};
+    VkPipelineColorBlendStateCreateInfo cb_ci_ = {};
+    VkGraphicsPipelineCreateInfo gp_ci_ = {};
+    VkPipelineCacheCreateInfo pc_ci_ = {};
+    VkPipeline pipeline_ = VK_NULL_HANDLE;
+    VkPipelineCache pipeline_cache_ = VK_NULL_HANDLE;
+    std::unique_ptr<VkShaderObj> vs_;
+    std::unique_ptr<VkShaderObj> fs_;
+    VkLayerTest &layer_test_;
+    CreatePipelineHelper(VkLayerTest &test);
+    ~CreatePipelineHelper();
+
+    void InitDescriptorSetInfo();
+    void InitInputAndVertexInfo();
+    void InitMultisampleInfo();
+    void InitPipelineLayoutInfo();
+    void InitViewportInfo();
+    void InitDynamicStateInfo();
+    void InitShaderInfo();
+    void InitRasterizationInfo();
+    void InitLineRasterizationInfo();
+    void InitBlendStateInfo();
+    void InitGraphicsPipelineInfo();
+    void InitPipelineCacheInfo();
+
+    // Not called by default during init_info
+    void InitTesselationState();
+
+    // TDB -- add control for optional and/or additional initialization
+    void InitInfo();
+    void InitState();
+    void LateBindPipelineInfo();
+    VkResult CreateGraphicsPipeline(bool implicit_destroy = true, bool do_late_bind = true);
+
+    // Helper function to create a simple test case (positive or negative)
+    //
+    // info_override can be any callable that takes a CreatePipelineHeper &
+    // flags, error can be any args accepted by "SetDesiredFailure".
+    template <typename Test, typename OverrideFunc, typename Error>
+    static void OneshotTest(Test &test, const OverrideFunc &info_override, const VkFlags flags, const std::vector<Error> &errors,
+                            bool positive_test = false) {
+        CreatePipelineHelper helper(test);
+        helper.InitInfo();
+        info_override(helper);
+        helper.InitState();
+
+        for (const auto &error : errors) test.Monitor()->SetDesiredFailureMsg(flags, error);
+        helper.CreateGraphicsPipeline();
+
+        if (positive_test) {
+            test.Monitor()->VerifyNotFound();
+        } else {
+            test.Monitor()->VerifyFound();
+        }
+    }
+
+    template <typename Test, typename OverrideFunc, typename Error>
+    static void OneshotTest(Test &test, const OverrideFunc &info_override, const VkFlags flags, Error error,
+                            bool positive_test = false) {
+        OneshotTest(test, info_override, flags, std::vector<Error>(1, error), positive_test);
+    }
+};
+
+struct CreateComputePipelineHelper {
+  public:
+    std::vector<VkDescriptorSetLayoutBinding> dsl_bindings_;
+    std::unique_ptr<OneOffDescriptorSet> descriptor_set_;
+    VkPipelineLayoutCreateInfo pipeline_layout_ci_ = {};
+    VkPipelineLayoutObj pipeline_layout_;
+    VkComputePipelineCreateInfo cp_ci_ = {};
+    VkPipelineCacheCreateInfo pc_ci_ = {};
+    VkPipeline pipeline_ = VK_NULL_HANDLE;
+    VkPipelineCache pipeline_cache_ = VK_NULL_HANDLE;
+    std::unique_ptr<VkShaderObj> cs_;
+    VkLayerTest &layer_test_;
+    CreateComputePipelineHelper(VkLayerTest &test);
+    ~CreateComputePipelineHelper();
+
+    void InitDescriptorSetInfo();
+    void InitPipelineLayoutInfo();
+    void InitShaderInfo();
+    void InitComputePipelineInfo();
+    void InitPipelineCacheInfo();
+
+    // TDB -- add control for optional and/or additional initialization
+    void InitInfo();
+    void InitState();
+    void LateBindPipelineInfo();
+    VkResult CreateComputePipeline(bool implicit_destroy = true, bool do_late_bind = true);
+
+    // Helper function to create a simple test case (positive or negative)
+    //
+    // info_override can be any callable that takes a CreatePipelineHeper &
+    // flags, error can be any args accepted by "SetDesiredFailure".
+    template <typename Test, typename OverrideFunc, typename Error>
+    static void OneshotTest(Test &test, const OverrideFunc &info_override, const VkFlags flags, const std::vector<Error> &errors,
+                            bool positive_test = false) {
+        CreateComputePipelineHelper helper(test);
+        helper.InitInfo();
+        info_override(helper);
+        helper.InitState();
+
+        for (const auto &error : errors) test.Monitor()->SetDesiredFailureMsg(flags, error);
+        helper.CreateComputePipeline();
+
+        if (positive_test) {
+            test.Monitor()->VerifyNotFound();
+        } else {
+            test.Monitor()->VerifyFound();
+        }
+    }
+
+    template <typename Test, typename OverrideFunc, typename Error>
+    static void OneshotTest(Test &test, const OverrideFunc &info_override, const VkFlags flags, Error error,
+                            bool positive_test = false) {
+        OneshotTest(test, info_override, flags, std::vector<Error>(1, error), positive_test);
+    }
+};
+
+// Helper class for tersely creating create ray tracing pipeline tests
+//
+// Designed with minimal error checking to ensure easy error state creation
+// See OneshotTest for typical usage
+struct CreateNVRayTracingPipelineHelper {
+  public:
+    std::vector<VkDescriptorSetLayoutBinding> dsl_bindings_;
+    std::unique_ptr<OneOffDescriptorSet> descriptor_set_;
+    std::vector<VkPipelineShaderStageCreateInfo> shader_stages_;
+    VkPipelineLayoutCreateInfo pipeline_layout_ci_ = {};
+    VkPipelineLayoutObj pipeline_layout_;
+    VkRayTracingPipelineCreateInfoNV rp_ci_ = {};
+    VkPipelineCacheCreateInfo pc_ci_ = {};
+    VkPipeline pipeline_ = VK_NULL_HANDLE;
+    VkPipelineCache pipeline_cache_ = VK_NULL_HANDLE;
+    std::vector<VkRayTracingShaderGroupCreateInfoNV> groups_;
+    std::unique_ptr<VkShaderObj> rgs_;
+    std::unique_ptr<VkShaderObj> chs_;
+    std::unique_ptr<VkShaderObj> mis_;
+    VkLayerTest &layer_test_;
+    CreateNVRayTracingPipelineHelper(VkLayerTest &test);
+    ~CreateNVRayTracingPipelineHelper();
+
+    static bool InitInstanceExtensions(VkLayerTest &test, std::vector<const char *> &instance_extension_names);
+    static bool InitDeviceExtensions(VkLayerTest &test, std::vector<const char *> &device_extension_names);
+    void InitShaderGroups();
+    void InitDescriptorSetInfo();
+    void InitPipelineLayoutInfo();
+    void InitShaderInfo();
+    void InitNVRayTracingPipelineInfo();
+    void InitPipelineCacheInfo();
+    void InitInfo();
+    void InitState();
+    void LateBindPipelineInfo();
+    VkResult CreateNVRayTracingPipeline(bool implicit_destroy = true, bool do_late_bind = true);
+
+    // Helper function to create a simple test case (positive or negative)
+    //
+    // info_override can be any callable that takes a CreateNVRayTracingPipelineHelper &
+    // flags, error can be any args accepted by "SetDesiredFailure".
+    template <typename Test, typename OverrideFunc, typename Error>
+    static void OneshotTest(Test &test, const OverrideFunc &info_override, const std::vector<Error> &errors,
+                            const VkFlags flags = VK_DEBUG_REPORT_ERROR_BIT_EXT) {
+        CreateNVRayTracingPipelineHelper helper(test);
+        helper.InitInfo();
+        info_override(helper);
+        helper.InitState();
+
+        for (const auto &error : errors) test.Monitor()->SetDesiredFailureMsg(flags, error);
+        helper.CreateNVRayTracingPipeline();
+        test.Monitor()->VerifyFound();
+    }
+
+    template <typename Test, typename OverrideFunc, typename Error>
+    static void OneshotTest(Test &test, const OverrideFunc &info_override, Error error,
+                            const VkFlags flags = VK_DEBUG_REPORT_ERROR_BIT_EXT) {
+        OneshotTest(test, info_override, std::vector<Error>(1, error), flags);
+    }
+
+    template <typename Test, typename OverrideFunc>
+    static void OneshotPositiveTest(Test &test, const OverrideFunc &info_override,
+                                    const VkDebugReportFlagsEXT message_flag_mask = VK_DEBUG_REPORT_ERROR_BIT_EXT) {
+        CreateNVRayTracingPipelineHelper helper(test);
+        helper.InitInfo();
+        info_override(helper);
+        helper.InitState();
+
+        test.Monitor()->ExpectSuccess(message_flag_mask);
+        ASSERT_VK_SUCCESS(helper.CreateNVRayTracingPipeline());
+        test.Monitor()->VerifyNotFound();
+    }
+};
+
+namespace chain_util {
+template <typename T>
+T Init(const void *pnext_in = nullptr) {
+    T pnext_obj = {};
+    pnext_obj.sType = LvlTypeMap<T>::kSType;
+    pnext_obj.pNext = pnext_in;
+    return pnext_obj;
+}
+
+class ExtensionChain {
+    const void *head_ = nullptr;
+    typedef std::function<bool(const char *)> AddIfFunction;
+    AddIfFunction add_if_;
+    typedef std::vector<const char *> List;
+    List *list_;
+
+  public:
+    template <typename F>
+    ExtensionChain(F &add_if, List *list) : add_if_(add_if), list_(list) {}
+
+    template <typename T>
+    void Add(const char *name, T &obj) {
+        if (add_if_(name)) {
+            if (list_) {
+                list_->push_back(name);
+            }
+            obj.pNext = head_;
+            head_ = &obj;
+        }
+    }
+
+    const void *Head() const;
+};
+}  // namespace chain_util
+
+// PushDescriptorProperties helper
+VkPhysicalDevicePushDescriptorPropertiesKHR GetPushDescriptorProperties(VkInstance instance, VkPhysicalDevice gpu);
+
+// Subgroup properties helper
+VkPhysicalDeviceSubgroupProperties GetSubgroupProperties(VkInstance instance, VkPhysicalDevice gpu);
+
+class BarrierQueueFamilyTestHelper {
+  public:
+    struct QueueFamilyObjs {
+        uint32_t index;
+        // We would use std::unique_ptr, but this triggers a compiler error on older compilers
+        VkQueueObj *queue = nullptr;
+        VkCommandPoolObj *command_pool = nullptr;
+        VkCommandBufferObj *command_buffer = nullptr;
+        VkCommandBufferObj *command_buffer2 = nullptr;
+        ~QueueFamilyObjs();
+        void Init(VkDeviceObj *device, uint32_t qf_index, VkQueue qf_queue, VkCommandPoolCreateFlags cp_flags);
+    };
+
+    struct Context {
+        VkLayerTest *layer_test;
+        uint32_t default_index;
+        std::unordered_map<uint32_t, QueueFamilyObjs> queue_families;
+        Context(VkLayerTest *test, const std::vector<uint32_t> &queue_family_indices);
+        void Reset();
+    };
+
+    BarrierQueueFamilyTestHelper(Context *context);
+    // Init with queue families non-null for CONCURRENT sharing mode (which requires them)
+    void Init(std::vector<uint32_t> *families, bool image_memory = true, bool buffer_memory = true);
+
+    QueueFamilyObjs *GetQueueFamilyInfo(Context *context, uint32_t qfi);
+
+    enum Modifier {
+        NONE,
+        DOUBLE_RECORD,
+        DOUBLE_COMMAND_BUFFER,
+    };
+
+    void operator()(std::string img_err, std::string buf_err = "", uint32_t src = VK_QUEUE_FAMILY_IGNORED,
+                    uint32_t dst = VK_QUEUE_FAMILY_IGNORED, bool positive = false,
+                    uint32_t queue_family_index = kInvalidQueueFamily, Modifier mod = Modifier::NONE);
+
+    static const uint32_t kInvalidQueueFamily = UINT32_MAX;
+    Context *context_;
+    VkImageObj image_;
+    VkImageMemoryBarrier image_barrier_;
+    VkBufferObj buffer_;
+    VkBufferMemoryBarrier buffer_barrier_;
+};
+
+struct DebugUtilsLabelCheckData {
+    std::function<void(const VkDebugUtilsMessengerCallbackDataEXT *pCallbackData, DebugUtilsLabelCheckData *)> callback;
+    size_t count;
+};
+
+bool operator==(const VkDebugUtilsLabelEXT &rhs, const VkDebugUtilsLabelEXT &lhs);
+
+VKAPI_ATTR VkBool32 VKAPI_CALL DebugUtilsCallback(VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity,
+                                                  VkDebugUtilsMessageTypeFlagsEXT messageTypes,
+                                                  const VkDebugUtilsMessengerCallbackDataEXT *pCallbackData, void *pUserData);
+
+#if GTEST_IS_THREADSAFE
+struct thread_data_struct {
+    VkCommandBuffer commandBuffer;
+    VkDevice device;
+    VkEvent event;
+    VkDescriptorSet descriptorSet;
+    VkBuffer buffer;
+    uint32_t binding;
+    bool *bailout;
+};
+
+extern "C" void *AddToCommandBuffer(void *arg);
+extern "C" void *UpdateDescriptor(void *arg);
+#endif  // GTEST_IS_THREADSAFE
+
+extern "C" void *ReleaseNullFence(void *arg);
+
+void TestRenderPassCreate(ErrorMonitor *error_monitor, const VkDevice device, const VkRenderPassCreateInfo *create_info,
+                          bool rp2_supported, const char *rp1_vuid, const char *rp2_vuid);
+void PositiveTestRenderPassCreate(ErrorMonitor *error_monitor, const VkDevice device, const VkRenderPassCreateInfo *create_info,
+                                  bool rp2_supported);
+void TestRenderPass2KHRCreate(ErrorMonitor *error_monitor, const VkDevice device, const VkRenderPassCreateInfo2KHR *create_info,
+                              const char *rp2_vuid);
+void TestRenderPassBegin(ErrorMonitor *error_monitor, const VkDevice device, const VkCommandBuffer command_buffer,
+                         const VkRenderPassBeginInfo *begin_info, bool rp2Supported, const char *rp1_vuid, const char *rp2_vuid);
+
+// Helpers for the tests below
+void ValidOwnershipTransferOp(ErrorMonitor *monitor, VkCommandBufferObj *cb, VkPipelineStageFlags src_stages,
+                              VkPipelineStageFlags dst_stages, const VkBufferMemoryBarrier *buf_barrier,
+                              const VkImageMemoryBarrier *img_barrier);
+
+void ValidOwnershipTransfer(ErrorMonitor *monitor, VkCommandBufferObj *cb_from, VkCommandBufferObj *cb_to,
+                            VkPipelineStageFlags src_stages, VkPipelineStageFlags dst_stages,
+                            const VkBufferMemoryBarrier *buf_barrier, const VkImageMemoryBarrier *img_barrier);
+
+VkResult GPDIFPHelper(VkPhysicalDevice dev, const VkImageCreateInfo *ci, VkImageFormatProperties *limits = nullptr);
+
+VkFormat FindFormatLinearWithoutMips(VkPhysicalDevice gpu, VkImageCreateInfo image_ci);
+
+bool FindFormatWithoutSamples(VkPhysicalDevice gpu, VkImageCreateInfo &image_ci);
+
+bool FindUnsupportedImage(VkPhysicalDevice gpu, VkImageCreateInfo &image_ci);
+
+VkFormat FindFormatWithoutFeatures(VkPhysicalDevice gpu, VkImageTiling tiling,
+                                   VkFormatFeatureFlags undesired_features = UINT32_MAX);
+
+void NegHeightViewportTests(VkDeviceObj *m_device, VkCommandBufferObj *m_commandBuffer, ErrorMonitor *m_errorMonitor);
+
+void CreateSamplerTest(VkLayerTest &test, const VkSamplerCreateInfo *pCreateInfo, std::string code = "");
+
+void CreateBufferTest(VkLayerTest &test, const VkBufferCreateInfo *pCreateInfo, std::string code = "");
+
+void CreateImageTest(VkLayerTest &test, const VkImageCreateInfo *pCreateInfo, std::string code = "");
+
+void CreateBufferViewTest(VkLayerTest &test, const VkBufferViewCreateInfo *pCreateInfo, const std::vector<std::string> &codes);
+
+void CreateImageViewTest(VkLayerTest &test, const VkImageViewCreateInfo *pCreateInfo, std::string code = "");
+
+void print_android(const char *c);
+#endif  // VKLAYERTEST_H
diff --git a/src/third_party/vulkan-validation-layers/src/tests/layers/CMakeLists.txt b/src/third_party/vulkan-validation-layers/src/tests/layers/CMakeLists.txt
new file mode 100644
index 0000000..678a71e
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/tests/layers/CMakeLists.txt
@@ -0,0 +1,143 @@
+# ~~~
+# Copyright (c) 2016-2019 Valve Corporation
+# Copyright (c) 2016-2019 LunarG, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ~~~
+
+set(TEST_LAYER_NAME VkLayer_device_profile_api)
+
+set(VK_LAYER_RPATH /usr/lib/x86_64-linux-gnu/vulkan/layer:/usr/lib/i386-linux-gnu/vulkan/layer)
+set(CMAKE_INSTALL_RPATH ${VK_LAYER_RPATH})
+
+if(WIN32)
+    if(NOT (CMAKE_CURRENT_SOURCE_DIR STREQUAL CMAKE_CURRENT_BINARY_DIR))
+        file(TO_NATIVE_PATH ${CMAKE_CURRENT_SOURCE_DIR}/windows/${TEST_LAYER_NAME}.json src_json)
+        if(CMAKE_GENERATOR MATCHES "^Visual Studio.*")
+            file(TO_NATIVE_PATH ${CMAKE_CURRENT_BINARY_DIR}/$<CONFIG>/${TEST_LAYER_NAME}.json dst_json)
+        else()
+            file(TO_NATIVE_PATH ${CMAKE_CURRENT_BINARY_DIR}/${TEST_LAYER_NAME}.json dst_json)
+        endif()
+        add_custom_target(${TEST_LAYER_NAME}-json ALL COMMAND copy ${src_json} ${dst_json} VERBATIM)
+        add_dependencies(${TEST_LAYER_NAME}-json ${TEST_LAYER_NAME})
+        set_target_properties(${TEST_LAYER_NAME}-json PROPERTIES FOLDER ${LAYERS_HELPER_FOLDER})
+    endif()
+elseif(APPLE)
+    # extra setup for out-of-tree builds
+    if(NOT (CMAKE_CURRENT_SOURCE_DIR STREQUAL CMAKE_CURRENT_BINARY_DIR))
+        if(CMAKE_GENERATOR MATCHES "^Xcode.*")
+            add_custom_target(mk_test_layer_config_dir ALL
+                              COMMAND ${CMAKE_COMMAND} -E make_directory ${CMAKE_CURRENT_BINARY_DIR}/$<CONFIG>)
+            add_custom_target(${TEST_LAYER_NAME}-json ALL
+                              DEPENDS mk_test_layer_config_dir
+                              COMMAND ln -sf ${CMAKE_CURRENT_SOURCE_DIR}/macos/${TEST_LAYER_NAME}.json $<CONFIG>
+                              VERBATIM)
+        else()
+            add_custom_target(${TEST_LAYER_NAME}-json ALL
+                              COMMAND ln -sf ${CMAKE_CURRENT_SOURCE_DIR}/macos/${TEST_LAYER_NAME}.json
+                              VERBATIM)
+        endif()
+    endif()
+else()
+    # extra setup for out-of-tree builds
+    if(NOT (CMAKE_CURRENT_SOURCE_DIR STREQUAL CMAKE_CURRENT_BINARY_DIR))
+        add_custom_target(${TEST_LAYER_NAME}-json ALL
+                          COMMAND ln -sf ${CMAKE_CURRENT_SOURCE_DIR}/linux/${TEST_LAYER_NAME}.json
+                          VERBATIM)
+    endif()
+endif()
+
+# --------------------------------------------------------------------------------------------------------------------------------
+
+# System-specific macros to create a library target.
+if(WIN32)
+    macro(AddVkLayer target)
+        file(TO_NATIVE_PATH ${CMAKE_CURRENT_SOURCE_DIR}/VkLayer_${target}.def DEF_FILE)
+        add_custom_target(copy-${target}-def-file ALL
+                          COMMAND ${CMAKE_COMMAND} -E copy_if_different ${DEF_FILE} VkLayer_${target}.def
+                          VERBATIM)
+        set_target_properties(copy-${target}-def-file PROPERTIES FOLDER ${LAYERS_HELPER_FOLDER})
+
+        add_library(VkLayer_${target} SHARED ${ARGN} VkLayer_${target}.def)
+        add_dependencies(VkLayer_${target} VkLayer_utils)
+        target_link_libraries(VkLayer_${target} PRIVATE VkLayer_utils)
+        target_compile_definitions(VkLayer_${target} PRIVATE "_CRT_SECURE_NO_WARNINGS")
+        target_compile_options(VkLayer_${target} PRIVATE $<$<CONFIG:Debug>:/bigobj>)
+    endmacro()
+elseif(APPLE)
+    macro(AddVkLayer target)
+        add_library(VkLayer_${target} SHARED ${ARGN})
+        add_dependencies(VkLayer_${target} VkLayer_utils)
+        set_target_properties(VkLayer_${target} PROPERTIES LINK_FLAGS "-Wl")
+        target_link_libraries(VkLayer_${target} PRIVATE VkLayer_utils)
+        target_compile_options(VkLayer_${target} PRIVATE "-Wpointer-arith" "-Wno-unused-function")
+    endmacro()
+else(UNIX AND NOT APPLE) # i.e.: Linux
+    macro(AddVkLayer target)
+        add_library(VkLayer_${target} SHARED ${ARGN})
+        add_dependencies(VkLayer_${target} VkLayer_utils)
+        set_target_properties(VkLayer_${target} PROPERTIES LINK_FLAGS "-Wl,--version-script=${CMAKE_CURRENT_SOURCE_DIR}/libVkLayer_${target}.map,-Bsymbolic")
+        target_link_libraries(VkLayer_${target} PRIVATE VkLayer_utils)
+        target_compile_options(VkLayer_${target} PRIVATE "-Wpointer-arith" "-Wno-unused-function")
+    endmacro()
+endif()
+
+AddVkLayer(device_profile_api device_profile_api.cpp ${PROJECT_SOURCE_DIR}/layers/vk_layer_extension_utils.cpp)
+
+# --------------------------------------------------------------------------------------------------------------------------------
+
+target_include_directories(${TEST_LAYER_NAME}
+                           PRIVATE ${CMAKE_CURRENT_SOURCE_DIR}
+                                   ${PROJECT_SOURCE_DIR}/layers
+                                   ${CMAKE_CURRENT_BINARY_DIR}
+                                   ${PROJECT_BINARY_DIR}
+                                   ${PROJECT_BINARY_DIR}/layers
+                                   ${CMAKE_BINARY_DIR}
+                                   ${VulkanHeaders_INCLUDE_DIR})
+
+if(WIN32)
+    set_target_properties(${TEST_LAYER_NAME} PROPERTIES FOLDER ${LAYERS_HELPER_FOLDER})
+    # For Windows, copy necessary device_profile_api layer files for the layer_tests
+    file(TO_NATIVE_PATH ${CMAKE_CURRENT_SOURCE_DIR}/windows/${TEST_LAYER_NAME}.json SRC_JSON)
+    file(TO_NATIVE_PATH ${PROJECT_BINARY_DIR}/layers/$<CONFIG>/${TEST_LAYER_NAME}.json DST_JSON)
+    add_custom_command(TARGET ${TEST_LAYER_NAME} POST_BUILD
+                       COMMAND ${CMAKE_COMMAND} -E copy ${SRC_JSON} ${DST_JSON})
+    SET(DEVICE_PROFILE_LAYER_FILES
+        VkLayer_device_profile_api.dll
+        VkLayer_device_profile_api.exp
+        VkLayer_device_profile_api.lib)
+    file(TO_NATIVE_PATH ${PROJECT_BINARY_DIR}/layers/$<CONFIG> DST_LAYER)
+    foreach(DEV_PROF_FILE ${DEVICE_PROFILE_LAYER_FILES})
+        file(TO_NATIVE_PATH ${CMAKE_CURRENT_BINARY_DIR}/$<CONFIG>/${DEV_PROF_FILE} SRC_LAYER)
+        add_custom_command(TARGET ${TEST_LAYER_NAME} POST_BUILD
+                           COMMAND ${CMAKE_COMMAND} -E copy_if_different ${SRC_LAYER} ${DST_LAYER})
+    endforeach()
+elseif(APPLE)
+    if(CMAKE_GENERATOR MATCHES "^Xcode.*")
+        add_custom_command(TARGET ${TEST_LAYER_NAME} POST_BUILD
+                           COMMAND ln -sf ${CMAKE_CURRENT_BINARY_DIR}/${TEST_LAYER_NAME}.json ${CMAKE_BINARY_DIR}/layers/$<CONFIG>
+                           COMMAND ln -sf ${CMAKE_CURRENT_BINARY_DIR}/lib${TEST_LAYER_NAME}.dylib
+                                   ${CMAKE_BINARY_DIR}/layers/$<CONFIG>
+                           VERBATIM)
+    else()
+        add_custom_command(TARGET ${TEST_LAYER_NAME} POST_BUILD
+                           COMMAND ln -sf ${CMAKE_CURRENT_BINARY_DIR}/${TEST_LAYER_NAME}.json ${CMAKE_BINARY_DIR}/layers
+                           COMMAND ln -sf ${CMAKE_CURRENT_BINARY_DIR}/lib${TEST_LAYER_NAME}.dylib ${CMAKE_BINARY_DIR}/layers
+                           VERBATIM)
+    endif()
+else(UNIX AND NOT APPLE) # i.e.: Linux
+    add_custom_command(TARGET ${TEST_LAYER_NAME} POST_BUILD
+                       COMMAND ln -sf ${CMAKE_CURRENT_BINARY_DIR}/${TEST_LAYER_NAME}.json ${PROJECT_BINARY_DIR}/layers
+                       COMMAND ln -sf ${CMAKE_CURRENT_BINARY_DIR}/lib${TEST_LAYER_NAME}.so ${PROJECT_BINARY_DIR}/layers
+                       VERBATIM)
+endif()
diff --git a/src/third_party/vulkan-validation-layers/src/tests/layers/VkLayer_device_profile_api.def b/src/third_party/vulkan-validation-layers/src/tests/layers/VkLayer_device_profile_api.def
new file mode 100644
index 0000000..b7b8042
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/tests/layers/VkLayer_device_profile_api.def
@@ -0,0 +1,27 @@
+;;;; Begin Copyright Notice ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+;
+; Copyright (c) 2017 Valve Corporation
+; Copyright (c) 2017 LunarG, Inc.
+;
+; Licensed under the Apache License, Version 2.0 (the "License");
+; you may not use this file except in compliance with the License.
+; You may obtain a copy of the License at
+;
+;     http://www.apache.org/licenses/LICENSE-2.0
+;
+; Unless required by applicable law or agreed to in writing, software
+; distributed under the License is distributed on an "AS IS" BASIS,
+; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+; See the License for the specific language governing permissions and
+; limitations under the License.
+;
+;  Author: Arda Coskunses <arda@lunarg.com>
+;
+;;;;  End Copyright Notice ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+
+LIBRARY VkLayer_device_profile_api
+EXPORTS
+vkGetInstanceProcAddr
+vkEnumerateInstanceLayerProperties
+vkEnumerateInstanceExtensionProperties
+vkNegotiateLoaderLayerInterfaceVersion
diff --git a/src/third_party/vulkan-validation-layers/src/tests/layers/device_profile_api.cpp b/src/third_party/vulkan-validation-layers/src/tests/layers/device_profile_api.cpp
new file mode 100644
index 0000000..b7795ee
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/tests/layers/device_profile_api.cpp
@@ -0,0 +1,250 @@
+/*
+ * Copyright (c) 2015-2019 The Khronos Group Inc.
+ * Copyright (c) 2015-2019 Valve Corporation
+ * Copyright (c) 2015-2019 LunarG, Inc.
+ * Copyright (C) 2015-2019 Google Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Arda Coskunses <arda@lunarg.com>
+ * Author: Tony Barbour <tony@LunarG.com>
+ * Author: Mark Lobodzinski <mark@lunarg.com>
+ */
+#include <string.h>
+#include <stdlib.h>
+#include <cassert>
+#include <mutex>
+#include <unordered_map>
+#include <vector>
+
+#include "vk_layer_data.h"
+#include "vk_dispatch_table_helper.h"
+#include "vk_layer_utils.h"
+#include "vk_lunarg_device_profile_api_layer.h"
+#include "vk_device_profile_api_layer.h"
+
+namespace device_profile_api {
+
+static std::mutex global_lock;
+
+static uint32_t loader_layer_if_version = CURRENT_LOADER_LAYER_INTERFACE_VERSION;
+
+struct layer_data {
+    VkInstance instance;
+    VkPhysicalDeviceProperties phy_device_props;
+    std::unordered_map<VkFormat, VkFormatProperties, std::hash<int> > format_properties_map;
+    VkLayerInstanceDispatchTable dispatch_table;
+};
+
+static std::unordered_map<void *, layer_data *> device_profile_api_dev_data_map;
+
+// device_profile_api Layer EXT APIs
+typedef void(VKAPI_PTR *PFN_vkGetOriginalPhysicalDeviceLimitsEXT)(VkPhysicalDevice physicalDevice,
+                                                                  const VkPhysicalDeviceLimits *limits);
+typedef void(VKAPI_PTR *PFN_vkSetPhysicalDeviceLimitsEXT)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceLimits *newLimits);
+typedef void(VKAPI_PTR *PFN_vkGetOriginalPhysicalDeviceFormatPropertiesEXT)(VkPhysicalDevice physicalDevice, VkFormat format,
+                                                                            const VkFormatProperties *properties);
+typedef void(VKAPI_PTR *PFN_vkSetPhysicalDeviceFormatPropertiesEXT)(VkPhysicalDevice physicalDevice, VkFormat format,
+                                                                    const VkFormatProperties newProperties);
+
+VKAPI_ATTR void VKAPI_CALL GetOriginalPhysicalDeviceLimitsEXT(VkPhysicalDevice physicalDevice, VkPhysicalDeviceLimits *orgLimits) {
+    std::lock_guard<std::mutex> lock(global_lock);
+    layer_data *phy_dev_data = GetLayerDataPtr(physicalDevice, device_profile_api_dev_data_map);
+    layer_data *instance_data = GetLayerDataPtr(phy_dev_data->instance, device_profile_api_dev_data_map);
+    VkPhysicalDeviceProperties props;
+    instance_data->dispatch_table.GetPhysicalDeviceProperties(physicalDevice, &props);
+    memcpy(orgLimits, &props.limits, sizeof(VkPhysicalDeviceLimits));
+}
+
+VKAPI_ATTR void VKAPI_CALL SetPhysicalDeviceLimitsEXT(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceLimits *newLimits) {
+    std::lock_guard<std::mutex> lock(global_lock);
+    layer_data *phy_dev_data = GetLayerDataPtr(physicalDevice, device_profile_api_dev_data_map);
+    memcpy(&(phy_dev_data->phy_device_props.limits), newLimits, sizeof(VkPhysicalDeviceLimits));
+}
+
+VKAPI_ATTR void VKAPI_CALL GetOriginalPhysicalDeviceFormatPropertiesEXT(VkPhysicalDevice physicalDevice, VkFormat format,
+                                                                        VkFormatProperties *properties) {
+    std::lock_guard<std::mutex> lock(global_lock);
+    layer_data *phy_dev_data = GetLayerDataPtr(physicalDevice, device_profile_api_dev_data_map);
+    layer_data *instance_data = GetLayerDataPtr(phy_dev_data->instance, device_profile_api_dev_data_map);
+    instance_data->dispatch_table.GetPhysicalDeviceFormatProperties(physicalDevice, format, properties);
+}
+
+VKAPI_ATTR void VKAPI_CALL SetPhysicalDeviceFormatPropertiesEXT(VkPhysicalDevice physicalDevice, VkFormat format,
+                                                                const VkFormatProperties newProperties) {
+    std::lock_guard<std::mutex> lock(global_lock);
+    layer_data *phy_dev_data = GetLayerDataPtr(physicalDevice, device_profile_api_dev_data_map);
+
+    memcpy(&(phy_dev_data->format_properties_map[format]), &newProperties, sizeof(VkFormatProperties));
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL CreateInstance(const VkInstanceCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator,
+                                              VkInstance *pInstance) {
+    VkLayerInstanceCreateInfo *chain_info = get_chain_info(pCreateInfo, VK_LAYER_LINK_INFO);
+    std::lock_guard<std::mutex> lock(global_lock);
+
+    assert(chain_info->u.pLayerInfo);
+    PFN_vkGetInstanceProcAddr fp_get_instance_proc_addr = chain_info->u.pLayerInfo->pfnNextGetInstanceProcAddr;
+    PFN_vkCreateInstance fp_create_instance = (PFN_vkCreateInstance)fp_get_instance_proc_addr(NULL, "vkCreateInstance");
+    if (fp_create_instance == NULL) return VK_ERROR_INITIALIZATION_FAILED;
+
+    // Advance the link info for the next element on the chain
+    chain_info->u.pLayerInfo = chain_info->u.pLayerInfo->pNext;
+
+    VkResult result = fp_create_instance(pCreateInfo, pAllocator, pInstance);
+    if (result != VK_SUCCESS) return result;
+
+    layer_data *instance_data = GetLayerDataPtr(*pInstance, device_profile_api_dev_data_map);
+    instance_data->instance = *pInstance;
+    layer_init_instance_dispatch_table(*pInstance, &instance_data->dispatch_table, fp_get_instance_proc_addr);
+    instance_data->dispatch_table.GetPhysicalDeviceProcAddr =
+        (PFN_GetPhysicalDeviceProcAddr)fp_get_instance_proc_addr(*pInstance, "vk_layerGetPhysicalDeviceProcAddr");
+
+    uint32_t physical_device_count = 0;
+    instance_data->dispatch_table.EnumeratePhysicalDevices(*pInstance, &physical_device_count, NULL);
+
+    std::vector<VkPhysicalDevice> physical_devices(physical_device_count);
+    result = instance_data->dispatch_table.EnumeratePhysicalDevices(*pInstance, &physical_device_count, physical_devices.data());
+    if (result != VK_SUCCESS) return result;
+
+    for (VkPhysicalDevice physical_device : physical_devices) {
+        layer_data *phy_dev_data = GetLayerDataPtr(physical_device, device_profile_api_dev_data_map);
+        instance_data->dispatch_table.GetPhysicalDeviceProperties(physical_device, &phy_dev_data->phy_device_props);
+        phy_dev_data->instance = *pInstance;
+    }
+    return result;
+}
+
+VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceProperties(VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties *pProperties) {
+    std::lock_guard<std::mutex> lock(global_lock);
+    layer_data *phy_dev_data = GetLayerDataPtr(physicalDevice, device_profile_api_dev_data_map);
+    memcpy(pProperties, &phy_dev_data->phy_device_props, sizeof(VkPhysicalDeviceProperties));
+}
+
+VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format,
+                                                             VkFormatProperties *pProperties) {
+    std::lock_guard<std::mutex> lock(global_lock);
+    layer_data *phy_dev_data = GetLayerDataPtr(physicalDevice, device_profile_api_dev_data_map);
+    layer_data *instance_data = GetLayerDataPtr(phy_dev_data->instance, device_profile_api_dev_data_map);
+    auto device_format_map_it = phy_dev_data->format_properties_map.find(format);
+    if (device_format_map_it != phy_dev_data->format_properties_map.end()) {
+        memcpy(pProperties, &phy_dev_data->format_properties_map[format], sizeof(VkFormatProperties));
+    } else {
+        instance_data->dispatch_table.GetPhysicalDeviceFormatProperties(physicalDevice, format, pProperties);
+    }
+}
+
+static const VkLayerProperties device_profile_api_LayerProps = {
+    "VK_LAYER_LUNARG_device_profile_api",
+    VK_MAKE_VERSION(1, 0, VK_HEADER_VERSION),  // specVersion
+    1,                                         // implementationVersion
+    "LunarG device profile api Layer",
+};
+
+template <typename T>
+VkResult EnumerateProperties(uint32_t src_count, const T *src_props, uint32_t *dst_count, T *dst_props) {
+    if (!dst_props || !src_props) {
+        *dst_count = src_count;
+        return VK_SUCCESS;
+    }
+
+    uint32_t copy_count = (*dst_count < src_count) ? *dst_count : src_count;
+    memcpy(dst_props, src_props, sizeof(T) * copy_count);
+    *dst_count = copy_count;
+
+    return (copy_count == src_count) ? VK_SUCCESS : VK_INCOMPLETE;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL EnumerateInstanceLayerProperties(uint32_t *pCount, VkLayerProperties *pProperties) {
+    return EnumerateProperties(1, &device_profile_api_LayerProps, pCount, pProperties);
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL EnumerateInstanceExtensionProperties(const char *pLayerName, uint32_t *pCount,
+                                                                    VkExtensionProperties *pProperties) {
+    if (pLayerName && !strcmp(pLayerName, device_profile_api_LayerProps.layerName))
+        return EnumerateProperties<VkExtensionProperties>(0, NULL, pCount, pProperties);
+
+    return VK_ERROR_LAYER_NOT_PRESENT;
+}
+
+VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL GetPhysicalDeviceProcAddr(VkInstance instance, const char *name) {
+    if (!strcmp(name, "vkSetPhysicalDeviceLimitsEXT")) return (PFN_vkVoidFunction)SetPhysicalDeviceLimitsEXT;
+    if (!strcmp(name, "vkGetOriginalPhysicalDeviceLimitsEXT")) return (PFN_vkVoidFunction)GetOriginalPhysicalDeviceLimitsEXT;
+    if (!strcmp(name, "vkSetPhysicalDeviceFormatPropertiesEXT")) return (PFN_vkVoidFunction)SetPhysicalDeviceFormatPropertiesEXT;
+    if (!strcmp(name, "vkGetOriginalPhysicalDeviceFormatPropertiesEXT"))
+        return (PFN_vkVoidFunction)GetOriginalPhysicalDeviceFormatPropertiesEXT;
+    layer_data *instance_data = GetLayerDataPtr(instance, device_profile_api_dev_data_map);
+    auto &table = instance_data->dispatch_table;
+    if (!table.GetPhysicalDeviceProcAddr) return nullptr;
+    return table.GetPhysicalDeviceProcAddr(instance, name);
+}
+
+VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL GetInstanceProcAddr(VkInstance instance, const char *name) {
+    if (!strcmp(name, "vkCreateInstance")) return (PFN_vkVoidFunction)CreateInstance;
+    if (!strcmp(name, "vkGetPhysicalDeviceProperties")) return (PFN_vkVoidFunction)GetPhysicalDeviceProperties;
+    if (!strcmp(name, "vkGetPhysicalDeviceFormatProperties")) return (PFN_vkVoidFunction)GetPhysicalDeviceFormatProperties;
+    if (!strcmp(name, "vkGetInstanceProcAddr")) return (PFN_vkVoidFunction)GetInstanceProcAddr;
+    if (!strcmp(name, "vkEnumerateInstanceExtensionProperties")) return (PFN_vkVoidFunction)EnumerateInstanceExtensionProperties;
+    if (!strcmp(name, "vkEnumerateInstanceLayerProperties")) return (PFN_vkVoidFunction)EnumerateInstanceLayerProperties;
+    if (!strcmp(name, "vkSetPhysicalDeviceLimitsEXT")) return (PFN_vkVoidFunction)SetPhysicalDeviceLimitsEXT;
+    if (!strcmp(name, "vkGetOriginalPhysicalDeviceLimitsEXT")) return (PFN_vkVoidFunction)GetOriginalPhysicalDeviceLimitsEXT;
+    if (!strcmp(name, "vkSetPhysicalDeviceFormatPropertiesEXT")) return (PFN_vkVoidFunction)SetPhysicalDeviceFormatPropertiesEXT;
+    if (!strcmp(name, "vkGetOriginalPhysicalDeviceFormatPropertiesEXT"))
+        return (PFN_vkVoidFunction)GetOriginalPhysicalDeviceFormatPropertiesEXT;
+    assert(instance);
+    layer_data *instance_data = GetLayerDataPtr(instance, device_profile_api_dev_data_map);
+    auto &table = instance_data->dispatch_table;
+    if (!table.GetInstanceProcAddr) return nullptr;
+    return table.GetInstanceProcAddr(instance, name);
+}
+
+}  // namespace device_profile_api
+
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceLayerProperties(uint32_t *pCount,
+                                                                                  VkLayerProperties *pProperties) {
+    return device_profile_api::EnumerateInstanceLayerProperties(pCount, pProperties);
+}
+
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceExtensionProperties(const char *pLayerName, uint32_t *pCount,
+                                                                                      VkExtensionProperties *pProperties) {
+    return device_profile_api::EnumerateInstanceExtensionProperties(pLayerName, pCount, pProperties);
+}
+
+VK_LAYER_EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vkGetInstanceProcAddr(VkInstance instance, const char *funcName) {
+    return device_profile_api::GetInstanceProcAddr(instance, funcName);
+}
+
+VK_LAYER_EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vk_layerGetPhysicalDeviceProcAddr(VkInstance instance,
+                                                                                           const char *funcName) {
+    return device_profile_api::GetPhysicalDeviceProcAddr(instance, funcName);
+}
+
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkNegotiateLoaderLayerInterfaceVersion(VkNegotiateLayerInterface *pVersionStruct) {
+    assert(pVersionStruct != NULL);
+    assert(pVersionStruct->sType == LAYER_NEGOTIATE_INTERFACE_STRUCT);
+
+    // Fill in the function pointers if our version is at least capable of having the structure contain them.
+    if (pVersionStruct->loaderLayerInterfaceVersion >= 2) {
+        pVersionStruct->pfnGetInstanceProcAddr = vkGetInstanceProcAddr;
+        pVersionStruct->pfnGetDeviceProcAddr = nullptr;
+        pVersionStruct->pfnGetPhysicalDeviceProcAddr = vk_layerGetPhysicalDeviceProcAddr;
+    }
+
+    if (pVersionStruct->loaderLayerInterfaceVersion < CURRENT_LOADER_LAYER_INTERFACE_VERSION) {
+        device_profile_api::loader_layer_if_version = pVersionStruct->loaderLayerInterfaceVersion;
+    } else if (pVersionStruct->loaderLayerInterfaceVersion > CURRENT_LOADER_LAYER_INTERFACE_VERSION) {
+        pVersionStruct->loaderLayerInterfaceVersion = CURRENT_LOADER_LAYER_INTERFACE_VERSION;
+    }
+
+    return VK_SUCCESS;
+}
diff --git a/src/third_party/vulkan-validation-layers/src/tests/layers/libVkLayer_device_profile_api.map b/src/third_party/vulkan-validation-layers/src/tests/layers/libVkLayer_device_profile_api.map
new file mode 100644
index 0000000..e39706b
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/tests/layers/libVkLayer_device_profile_api.map
@@ -0,0 +1,9 @@
+{
+  global:
+    vkGetInstanceProcAddr;
+    vkEnumerateInstanceLayerProperties;
+    vkEnumerateInstanceExtensionProperties;
+    vkNegotiateLoaderLayerInterfaceVersion;
+  local:
+    *;
+};
diff --git a/src/third_party/vulkan-validation-layers/src/tests/layers/linux/VkLayer_device_profile_api.json b/src/third_party/vulkan-validation-layers/src/tests/layers/linux/VkLayer_device_profile_api.json
new file mode 100644
index 0000000..3f837b8
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/tests/layers/linux/VkLayer_device_profile_api.json
@@ -0,0 +1,18 @@
+{
+    "file_format_version" : "1.1.0",
+    "layer" : {
+        "name": "VK_LAYER_LUNARG_device_profile_api",
+        "type": "GLOBAL",
+        "library_path": "./libVkLayer_device_profile_api.so",
+        "api_version": "1.1.101",
+        "implementation_version": "2",
+        "description": "LunarG Device Profile Api Layer",
+        "device_extensions": [
+             {
+                 "name": "VK_LUNARG_LayerDeviceProfileApiEXT",
+                 "spec_version": "0",
+                 "entrypoints": ["vkSetPhysicalDeviceLimitsEXT", "vkGetOriginalPhysicalDeviceLimitsEXT"]
+             }
+         ]
+    }
+}
diff --git a/src/third_party/vulkan-validation-layers/src/tests/layers/macos/VkLayer_device_profile_api.json b/src/third_party/vulkan-validation-layers/src/tests/layers/macos/VkLayer_device_profile_api.json
new file mode 100644
index 0000000..3600845
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/tests/layers/macos/VkLayer_device_profile_api.json
@@ -0,0 +1,18 @@
+{
+    "file_format_version" : "1.1.0",
+    "layer" : {
+        "name": "VK_LAYER_LUNARG_device_profile_api",
+        "type": "GLOBAL",
+        "library_path": "./libVkLayer_device_profile_api.dylib",
+        "api_version": "1.0.101",
+        "implementation_version": "2",
+        "description": "LunarG Device Profile Api Layer",
+        "device_extensions": [
+             {
+                 "name": "VK_LUNARG_LayerDeviceProfileApiEXT",
+                 "spec_version": "0",
+                 "entrypoints": ["vkSetPhysicalDeviceLimitsEXT", "vkGetOriginalPhysicalDeviceLimitsEXT"]
+             }
+         ]
+    }
+}
diff --git a/src/third_party/vulkan-validation-layers/src/tests/layers/vk_device_profile_api_layer.h b/src/third_party/vulkan-validation-layers/src/tests/layers/vk_device_profile_api_layer.h
new file mode 100644
index 0000000..8a71aa3
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/tests/layers/vk_device_profile_api_layer.h
@@ -0,0 +1,29 @@
+/*
+ *
+ * Copyright (c) 2016-2017 Valve Corporation
+ * Copyright (c) 2016-2017 LunarG, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Arda Coskunses <arda@lunarg.com>
+ *
+ */
+
+#pragma once
+
+#include "vk_lunarg_device_profile_api_layer.h"
+
+typedef struct VkLayerDeviceProfileApiDispatchTable_ {
+    PFN_vkSetPhysicalDeviceLimitsEXT vkSetPhysicalDeviceLimitsEXT;
+    PFN_vkGetOriginalPhysicalDeviceLimitsEXT vkGetOriginalPhysicalDeviceLimitsEXT;
+} VkLayerDeviceProfileApiDispatchTable;
diff --git a/src/third_party/vulkan-validation-layers/src/tests/layers/vk_lunarg_device_profile_api_layer.h b/src/third_party/vulkan-validation-layers/src/tests/layers/vk_lunarg_device_profile_api_layer.h
new file mode 100644
index 0000000..07e6c83
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/tests/layers/vk_lunarg_device_profile_api_layer.h
@@ -0,0 +1,47 @@
+/*
+ *
+ * Copyright (c) 2016-2017 Valve Corporation
+ * Copyright (c) 2016-2017 LunarG, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Arda Coskunses <arda@lunarg.com>
+ *
+ */
+
+#ifndef __VK_DEVICE_PROFILE_API_H__
+#define __VK_DEVICE_PROFILE_API_H__
+
+#include "vulkan/vulkan.h"
+#ifdef __cplusplus
+extern "C" {
+#endif  // __cplusplus
+
+// Device Profile Api Vulkan Extension API
+
+#define DEVICE_PROFILE_API_EXTENSION_NAME "VK_LUNARG_DEVICE_PROFILE"
+
+// API functions
+
+typedef void(VKAPI_PTR *PFN_vkSetPhysicalDeviceLimitsEXT)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceLimits *newLimits);
+typedef void(VKAPI_PTR *PFN_vkGetOriginalPhysicalDeviceLimitsEXT)(VkPhysicalDevice physicalDevice,
+                                                                  const VkPhysicalDeviceLimits *orgLimits);
+typedef void(VKAPI_PTR *PFN_vkGetOriginalPhysicalDeviceFormatPropertiesEXT)(VkPhysicalDevice physicalDevice, VkFormat format,
+                                                                            const VkFormatProperties *properties);
+typedef void(VKAPI_PTR *PFN_vkSetPhysicalDeviceFormatPropertiesEXT)(VkPhysicalDevice physicalDevice, VkFormat format,
+                                                                    const VkFormatProperties newProperties);
+#ifdef __cplusplus
+}  // extern "C"
+#endif  // __cplusplus
+
+#endif  // __VK_DEVICE_PROFILE_API_H__
diff --git a/src/third_party/vulkan-validation-layers/src/tests/layers/windows/VkLayer_device_profile_api.json b/src/third_party/vulkan-validation-layers/src/tests/layers/windows/VkLayer_device_profile_api.json
new file mode 100644
index 0000000..0e890c1
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/tests/layers/windows/VkLayer_device_profile_api.json
@@ -0,0 +1,18 @@
+{
+    "file_format_version" : "1.1.0",
+    "layer" : {
+        "name": "VK_LAYER_LUNARG_device_profile_api",
+        "type": "GLOBAL",
+        "library_path": ".\\VkLayer_device_profile_api.dll",
+        "api_version": "1.1.101",
+        "implementation_version": "2",
+        "description": "LunarG Device Profile Api Layer",
+        "device_extensions": [
+             {
+                 "name": "VK_LUNARG_LayerDeviceProfileApiEXT",
+                 "spec_version": "0",
+                 "entrypoints": ["vkSetPhysicalDeviceLimitsEXT", "vkGetOriginalPhysicalDeviceLimitsEXT"]
+             }
+         ]
+    }
+}
diff --git a/src/third_party/vulkan-validation-layers/src/tests/test_common.h b/src/third_party/vulkan-validation-layers/src/tests/test_common.h
new file mode 100644
index 0000000..a6aaf24
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/tests/test_common.h
@@ -0,0 +1,195 @@
+/*
+ * Copyright (c) 2015-2019 The Khronos Group Inc.
+ * Copyright (c) 2015-2019 Valve Corporation
+ * Copyright (c) 2015-2019 LunarG, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Chia-I Wu <olvaffe@gmail.com>
+ * Author: Chris Forbes <chrisf@ijw.co.nz>
+ * Author: Courtney Goeltzenleuchter <courtney@LunarG.com>
+ * Author: Mark Lobodzinski <mark@lunarg.com>
+ * Author: Mike Stroyan <mike@LunarG.com>
+ * Author: Tobin Ehlis <tobine@google.com>
+ * Author: Tony Barbour <tony@LunarG.com>
+ */
+
+#ifndef TEST_COMMON_H
+#define TEST_COMMON_H
+
+#include <assert.h>
+#include <stdbool.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+
+#ifdef _WIN32
+#define NOMINMAX
+// WinSock2.h must be included *BEFORE* windows.h
+#include <winsock2.h>
+#endif
+
+#include <vulkan/vk_sdk_platform.h>
+#include <vulkan/vulkan.h>
+
+#ifdef _WIN32
+#pragma warning(push)
+/*
+    warnings 4251 and 4275 have to do with potential dll-interface mismatch
+    between library (gtest) and users. Since we build the gtest library
+    as part of the test build we know that the dll-interface will match and
+    can disable these warnings.
+ */
+#pragma warning(disable : 4251)
+#pragma warning(disable : 4275)
+#endif
+
+// GTest and Xlib collide due to redefinitions of "None" and "Bool"
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+#pragma push_macro("None")
+#pragma push_macro("Bool")
+#undef None
+#undef Bool
+#endif
+
+// Use the NDK's header on Android
+#ifndef __ANDROID__
+#include "gtest/gtest.h"
+#else
+#include "gtest/gtest.h"
+#endif
+
+// Redefine Xlib definitions
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+#pragma pop_macro("Bool")
+#pragma pop_macro("None")
+#endif
+
+#ifdef _WIN32
+#pragma warning(pop)
+#endif
+#include "vktestbinding.h"
+
+#define ASSERT_VK_SUCCESS(err)                                                 \
+    {                                                                          \
+        const VkResult resolved_err = err;                                     \
+        ASSERT_EQ(VK_SUCCESS, resolved_err) << vk_result_string(resolved_err); \
+    }
+
+static inline const char *vk_result_string(VkResult err) {
+    switch (err) {
+#define STR(r) \
+    case r:    \
+        return #r
+        STR(VK_SUCCESS);
+        STR(VK_NOT_READY);
+        STR(VK_TIMEOUT);
+        STR(VK_EVENT_SET);
+        STR(VK_EVENT_RESET);
+        STR(VK_ERROR_INITIALIZATION_FAILED);
+        STR(VK_ERROR_OUT_OF_HOST_MEMORY);
+        STR(VK_ERROR_OUT_OF_DEVICE_MEMORY);
+        STR(VK_ERROR_DEVICE_LOST);
+        STR(VK_ERROR_EXTENSION_NOT_PRESENT);
+        STR(VK_ERROR_LAYER_NOT_PRESENT);
+        STR(VK_ERROR_MEMORY_MAP_FAILED);
+        STR(VK_ERROR_INCOMPATIBLE_DRIVER);
+#undef STR
+        default:
+            return "UNKNOWN_RESULT";
+    }
+}
+
+static inline void test_error_callback(const char *expr, const char *file, unsigned int line, const char *function) {
+    ADD_FAILURE_AT(file, line) << "Assertion: `" << expr << "'";
+}
+
+#if defined(__linux__) || defined(__APPLE__)
+/* Linux-specific common code: */
+
+#include <pthread.h>
+
+// Threads:
+typedef pthread_t test_platform_thread;
+
+static inline int test_platform_thread_create(test_platform_thread *thread, void *(*func)(void *), void *data) {
+    pthread_attr_t thread_attr;
+    pthread_attr_init(&thread_attr);
+    return pthread_create(thread, &thread_attr, func, data);
+}
+static inline int test_platform_thread_join(test_platform_thread thread, void **retval) { return pthread_join(thread, retval); }
+
+// Thread IDs:
+typedef pthread_t test_platform_thread_id;
+static inline test_platform_thread_id test_platform_get_thread_id() { return pthread_self(); }
+
+// Thread mutex:
+typedef pthread_mutex_t test_platform_thread_mutex;
+static inline void test_platform_thread_create_mutex(test_platform_thread_mutex *pMutex) { pthread_mutex_init(pMutex, NULL); }
+static inline void test_platform_thread_lock_mutex(test_platform_thread_mutex *pMutex) { pthread_mutex_lock(pMutex); }
+static inline void test_platform_thread_unlock_mutex(test_platform_thread_mutex *pMutex) { pthread_mutex_unlock(pMutex); }
+static inline void test_platform_thread_delete_mutex(test_platform_thread_mutex *pMutex) { pthread_mutex_destroy(pMutex); }
+typedef pthread_cond_t test_platform_thread_cond;
+static inline void test_platform_thread_init_cond(test_platform_thread_cond *pCond) { pthread_cond_init(pCond, NULL); }
+static inline void test_platform_thread_cond_wait(test_platform_thread_cond *pCond, test_platform_thread_mutex *pMutex) {
+    pthread_cond_wait(pCond, pMutex);
+}
+static inline void test_platform_thread_cond_broadcast(test_platform_thread_cond *pCond) { pthread_cond_broadcast(pCond); }
+
+#elif defined(_WIN32)  // defined(__linux__)
+// Threads:
+typedef HANDLE test_platform_thread;
+static inline int test_platform_thread_create(test_platform_thread *thread, void *(*func)(void *), void *data) {
+    DWORD threadID;
+    *thread = CreateThread(NULL,  // default security attributes
+                           0,     // use default stack size
+                           (LPTHREAD_START_ROUTINE)func,
+                           data,        // thread function argument
+                           0,           // use default creation flags
+                           &threadID);  // returns thread identifier
+    return (*thread != NULL);
+}
+static inline int test_platform_thread_join(test_platform_thread thread, void **retval) {
+    return WaitForSingleObject(thread, INFINITE);
+}
+
+// Thread IDs:
+typedef DWORD test_platform_thread_id;
+static test_platform_thread_id test_platform_get_thread_id() { return GetCurrentThreadId(); }
+
+// Thread mutex:
+typedef CRITICAL_SECTION test_platform_thread_mutex;
+static void test_platform_thread_create_mutex(test_platform_thread_mutex *pMutex) { InitializeCriticalSection(pMutex); }
+static void test_platform_thread_lock_mutex(test_platform_thread_mutex *pMutex) { EnterCriticalSection(pMutex); }
+static void test_platform_thread_unlock_mutex(test_platform_thread_mutex *pMutex) { LeaveCriticalSection(pMutex); }
+static void test_platform_thread_delete_mutex(test_platform_thread_mutex *pMutex) { DeleteCriticalSection(pMutex); }
+typedef CONDITION_VARIABLE test_platform_thread_cond;
+static void test_platform_thread_init_cond(test_platform_thread_cond *pCond) { InitializeConditionVariable(pCond); }
+static void test_platform_thread_cond_wait(test_platform_thread_cond *pCond, test_platform_thread_mutex *pMutex) {
+    SleepConditionVariableCS(pCond, pMutex, INFINITE);
+}
+static void test_platform_thread_cond_broadcast(test_platform_thread_cond *pCond) { WakeAllConditionVariable(pCond); }
+#else                  // defined(_WIN32)
+
+#error The "test_common.h" file must be modified for this OS.
+
+// NOTE: In order to support another OS, an #elif needs to be added (above the
+// "#else // defined(_WIN32)") for that OS, and OS-specific versions of the
+// contents of this file must be created.
+
+// NOTE: Other OS-specific changes are also needed for this OS.  Search for
+// files with "WIN32" in it, as a quick way to find files that must be changed.
+
+#endif  // defined(_WIN32)
+
+#endif  // TEST_COMMON_H
diff --git a/src/third_party/vulkan-validation-layers/src/tests/test_environment.cpp b/src/third_party/vulkan-validation-layers/src/tests/test_environment.cpp
new file mode 100644
index 0000000..dcad817
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/tests/test_environment.cpp
@@ -0,0 +1,153 @@
+/*
+ * Copyright (c) 2015-2019 The Khronos Group Inc.
+ * Copyright (c) 2015-2019 Valve Corporation
+ * Copyright (c) 2015-2019 LunarG, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Chia-I Wu <olvaffe@gmail.com>
+ * Author: Chris Forbes <chrisf@ijw.co.nz>
+ * Author: Courtney Goeltzenleuchter <courtney@LunarG.com>
+ * Author: Mark Lobodzinski <mark@lunarg.com>
+ * Author: Mike Stroyan <mike@LunarG.com>
+ * Author: Tobin Ehlis <tobine@google.com>
+ * Author: Tony Barbour <tony@LunarG.com>
+ */
+
+#include "test_common.h"
+#include "lvt_function_pointers.h"
+#include "test_environment.h"
+
+#define ARRAY_SIZE(a) (sizeof(a) / sizeof(a[0]))
+
+namespace vk_testing {
+
+Environment::Environment() : default_dev_(0) {
+    app_.sType = VK_STRUCTURE_TYPE_APPLICATION_INFO;
+    app_.pApplicationName = "vk_testing";
+    app_.applicationVersion = 1;
+    app_.pEngineName = "vk_testing";
+    app_.engineVersion = 1;
+    app_.apiVersion = VK_API_VERSION_1_0;
+    app_.pNext = NULL;
+}
+
+bool Environment::parse_args(int argc, char **argv) {
+    int i;
+
+    for (i = 1; i < argc; i++) {
+#define ARG(name) (strcmp(argv[i], name) == 0)
+#define ARG_P(name) (i < argc - 1 && ARG(name))
+        if (ARG_P("--gpu")) {
+            default_dev_ = atoi(argv[++i]);
+        } else {
+            break;
+        }
+#undef ARG
+#undef ARG_P
+    }
+
+    if (i < argc) {
+        std::cout << "invalid argument: " << argv[i] << "\n\n"
+                  << "Usage: " << argv[0] << " <options>\n\n"
+                  << "Options:\n"
+                     "  --gpu <n>  Use GPU<n> as the default GPU\n";
+
+        return false;
+    }
+
+    return true;
+}
+
+void Environment::SetUp() {
+    std::vector<VkExtensionProperties> instance_extensions;
+    std::vector<VkExtensionProperties> device_extensions;
+
+    std::vector<const char *> instance_extension_names;
+    std::vector<const char *> device_extension_names;
+
+    instance_extension_names.push_back(VK_KHR_SURFACE_EXTENSION_NAME);
+    device_extension_names.push_back(VK_KHR_SWAPCHAIN_EXTENSION_NAME);
+#ifdef _WIN32
+    instance_extension_names.push_back(VK_KHR_WIN32_SURFACE_EXTENSION_NAME);
+#endif
+#ifdef VK_USE_PLATFORM_XCB_KHR
+    instance_extension_names.push_back(VK_KHR_XCB_SURFACE_EXTENSION_NAME);
+#endif
+
+    VkBool32 extFound;
+
+    instance_extensions = vk_testing::GetGlobalExtensions();
+
+    for (uint32_t i = 0; i < instance_extension_names.size(); i++) {
+        extFound = 0;
+        for (uint32_t j = 0; j < instance_extensions.size(); j++) {
+            if (!strcmp(instance_extension_names[i], instance_extensions[j].extensionName)) {
+                extFound = 1;
+            }
+        }
+        ASSERT_EQ(extFound, 1) << "ERROR: Cannot find extension named " << instance_extension_names[i]
+                               << " which is necessary to pass this test";
+    }
+    VkInstanceCreateInfo inst_info = {};
+    inst_info.sType = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO;
+    inst_info.pNext = NULL;
+    inst_info.pApplicationInfo = &app_;
+    inst_info.enabledExtensionCount = instance_extension_names.size();
+    inst_info.ppEnabledExtensionNames = (instance_extension_names.size()) ? &instance_extension_names[0] : NULL;
+    inst_info.enabledLayerCount = 0;
+    inst_info.ppEnabledLayerNames = NULL;
+
+    VkResult err;
+    uint32_t count;
+    err = vk::CreateInstance(&inst_info, NULL, &inst);
+    ASSERT_EQ(VK_SUCCESS, err);
+    err = vk::EnumeratePhysicalDevices(inst, &count, NULL);
+    ASSERT_EQ(VK_SUCCESS, err);
+    ASSERT_LE(count, ARRAY_SIZE(gpus));
+    err = vk::EnumeratePhysicalDevices(inst, &count, gpus);
+    ASSERT_EQ(VK_SUCCESS, err);
+    ASSERT_GT(count, default_dev_);
+
+    vk_testing::PhysicalDevice phys_dev(gpus[0]);
+    device_extensions = phys_dev.extensions();
+
+    for (uint32_t i = 0; i < device_extension_names.size(); i++) {
+        extFound = 0;
+        for (uint32_t j = 0; j < device_extensions.size(); j++) {
+            if (!strcmp(device_extension_names[i], device_extensions[j].extensionName)) {
+                extFound = 1;
+            }
+        }
+        ASSERT_EQ(extFound, 1) << "ERROR: Cannot find extension named " << device_extension_names[i]
+                               << " which is necessary to pass this test";
+    }
+
+    devs_.reserve(count);
+    for (uint32_t i = 0; i < count; i++) {
+        devs_.push_back(new Device(gpus[i]));
+        if (i == default_dev_) {
+            devs_[i]->init(device_extension_names);
+            ASSERT_NE(true, devs_[i]->graphics_queues().empty());
+        }
+    }
+}
+
+void Environment::TearDown() {
+    // destroy devices first
+    for (std::vector<Device *>::iterator it = devs_.begin(); it != devs_.end(); it++) delete *it;
+    devs_.clear();
+
+    if (inst) vk::DestroyInstance(inst, NULL);
+}
+}  // namespace vk_testing
diff --git a/src/third_party/vulkan-validation-layers/src/tests/test_environment.h b/src/third_party/vulkan-validation-layers/src/tests/test_environment.h
new file mode 100644
index 0000000..baa7ba1
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/tests/test_environment.h
@@ -0,0 +1,55 @@
+/*
+ * Copyright (c) 2015-2019 The Khronos Group Inc.
+ * Copyright (c) 2015-2019 Valve Corporation
+ * Copyright (c) 2015-2019 LunarG, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Chia-I Wu <olvaffe@gmail.com>
+ * Author: Chris Forbes <chrisf@ijw.co.nz>
+ * Author: Courtney Goeltzenleuchter <courtney@LunarG.com>
+ * Author: Mark Lobodzinski <mark@lunarg.com>
+ * Author: Mike Stroyan <mike@LunarG.com>
+ * Author: Tobin Ehlis <tobine@google.com>
+ * Author: Tony Barbour <tony@LunarG.com>
+ */
+
+#ifndef TEST_ENVIRONMENT_H
+#define TEST_ENVIRONMENT_H
+
+#include "vktestbinding.h"
+
+namespace vk_testing {
+class Environment : public ::testing::Environment {
+  public:
+    Environment();
+
+    bool parse_args(int argc, char **argv);
+
+    virtual void SetUp();
+    virtual void TearDown();
+
+    const std::vector<Device *> &devices() { return devs_; }
+    Device &default_device() { return *(devs_[default_dev_]); }
+    VkInstance get_instance() { return inst; }
+    VkPhysicalDevice gpus[16];
+
+  private:
+    VkApplicationInfo app_;
+    uint32_t default_dev_;
+    VkInstance inst;
+
+    std::vector<Device *> devs_;
+};
+}  // namespace vk_testing
+#endif  // TEST_ENVIRONMENT_H
diff --git a/src/third_party/vulkan-validation-layers/src/tests/vk_layer_settings.txt b/src/third_party/vulkan-validation-layers/src/tests/vk_layer_settings.txt
new file mode 100644
index 0000000..3d8dae9
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/tests/vk_layer_settings.txt
@@ -0,0 +1,8 @@
+lunarg_core_validation.report_flags = error
+lunarg_core_validation.debug_action = VK_DBG_LAYER_ACTION_LOG_MSG
+lunarg_object_tracker.report_flags = error
+lunarg_object_tracker.debug_action = VK_DBG_LAYER_ACTION_LOG_MSG
+lunarg_parameter_validation.report_flags = error
+lunarg_parameter_validation.debug_action = VK_DBG_LAYER_ACTION_LOG_MSG
+google_threading.report_flags = error
+google_threading.debug_action = VK_DBG_LAYER_ACTION_LOG_MSG
diff --git a/src/third_party/vulkan-validation-layers/src/tests/vk_layer_validation_tests.vcxproj.user b/src/third_party/vulkan-validation-layers/src/tests/vk_layer_validation_tests.vcxproj.user
new file mode 100644
index 0000000..ee3f768
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/tests/vk_layer_validation_tests.vcxproj.user
@@ -0,0 +1,35 @@
+<?xml version="1.0" encoding="utf-8"?>
+<Project ToolsVersion="12.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
+  <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='RelWithDebInfo|x64'">
+    <LocalDebuggerEnvironment>VK_LAYER_PATH=$(SolutionDir)\layers\$(Configuration)</LocalDebuggerEnvironment>
+    <DebuggerFlavor>WindowsLocalDebugger</DebuggerFlavor>
+  </PropertyGroup>
+  <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
+    <LocalDebuggerEnvironment>VK_LAYER_PATH=$(SolutionDir)\layers\$(Configuration)</LocalDebuggerEnvironment>
+    <DebuggerFlavor>WindowsLocalDebugger</DebuggerFlavor>
+  </PropertyGroup>
+  <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
+    <LocalDebuggerEnvironment>VK_LAYER_PATH=$(SolutionDir)\layers\$(Configuration)</LocalDebuggerEnvironment>
+    <DebuggerFlavor>WindowsLocalDebugger</DebuggerFlavor>
+  </PropertyGroup>
+  <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='MinSizeRel|x64'">
+    <LocalDebuggerEnvironment>VK_LAYER_PATH=$(SolutionDir)\layers\$(Configuration)</LocalDebuggerEnvironment>
+    <DebuggerFlavor>WindowsLocalDebugger</DebuggerFlavor>
+  </PropertyGroup>
+  <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='RelWithDebInfo|Win32'">
+    <LocalDebuggerEnvironment>VK_LAYER_PATH=$(SolutionDir)\layers\$(Configuration)</LocalDebuggerEnvironment>
+    <DebuggerFlavor>WindowsLocalDebugger</DebuggerFlavor>
+  </PropertyGroup>
+  <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
+    <LocalDebuggerEnvironment>VK_LAYER_PATH=$(SolutionDir)\layers\$(Configuration)</LocalDebuggerEnvironment>
+    <DebuggerFlavor>WindowsLocalDebugger</DebuggerFlavor>
+  </PropertyGroup>
+  <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='MinSizeRel|Win32'">
+    <LocalDebuggerEnvironment>VK_LAYER_PATH=$(SolutionDir)\layers\$(Configuration)</LocalDebuggerEnvironment>
+    <DebuggerFlavor>WindowsLocalDebugger</DebuggerFlavor>
+  </PropertyGroup>
+  <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
+    <LocalDebuggerEnvironment>VK_LAYER_PATH=$(SolutionDir)\layers\$(Configuration)</LocalDebuggerEnvironment>
+    <DebuggerFlavor>WindowsLocalDebugger</DebuggerFlavor>
+  </PropertyGroup>
+</Project>
\ No newline at end of file
diff --git a/src/third_party/vulkan-validation-layers/src/tests/vklayertests_best_practices.cpp b/src/third_party/vulkan-validation-layers/src/tests/vklayertests_best_practices.cpp
new file mode 100644
index 0000000..541ddb0
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/tests/vklayertests_best_practices.cpp
@@ -0,0 +1,56 @@
+/*
+ * Copyright (c) 2015-2019 The Khronos Group Inc.
+ * Copyright (c) 2015-2019 Valve Corporation
+ * Copyright (c) 2015-2019 LunarG, Inc.
+ * Copyright (c) 2015-2019 Google, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Author: Camden Stocker <camden@lunarg.com>
+ */
+
+#include "cast_utils.h"
+#include "layer_validation_tests.h"
+
+void VkBestPracticesLayerTest::InitBestPracticesFramework() {
+    VkValidationFeatureEnableEXT enables[] = {VK_VALIDATION_FEATURE_ENABLE_BEST_PRACTICES_EXT};
+    VkValidationFeaturesEXT features = {};
+    features.sType = VK_STRUCTURE_TYPE_VALIDATION_FEATURES_EXT;
+    features.enabledValidationFeatureCount = 1;
+    features.pEnabledValidationFeatures = enables;
+
+    InitFramework(myDbgFunc, m_errorMonitor, &features);
+}
+
+TEST_F(VkBestPracticesLayerTest, CmdClearAttachmentTest) {
+    TEST_DESCRIPTION("Test for validating usage of vkCmdClearAttachments");
+
+    InitBestPracticesFramework();
+    InitState();
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    m_commandBuffer->begin();
+    m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
+
+    // Main thing we care about for this test is that the VkImage obj we're
+    // clearing matches Color Attachment of FB
+    //  Also pass down other dummy params to keep driver and paramchecker happy
+    VkClearAttachment color_attachment;
+    color_attachment.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    color_attachment.clearValue.color.float32[0] = 1.0;
+    color_attachment.clearValue.color.float32[1] = 1.0;
+    color_attachment.clearValue.color.float32[2] = 1.0;
+    color_attachment.clearValue.color.float32[3] = 1.0;
+    color_attachment.colorAttachment = 0;
+    VkClearRect clear_rect = {{{0, 0}, {(uint32_t)m_width, (uint32_t)m_height}}, 0, 1};
+
+    // Call for full-sized FB Color attachment prior to issuing a Draw
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT,
+                                         "UNASSIGNED-CoreValidation-DrawState-ClearCmdBeforeDraw");
+    vk::CmdClearAttachments(m_commandBuffer->handle(), 1, &color_attachment, 1, &clear_rect);
+    m_errorMonitor->VerifyFound();
+}
diff --git a/src/third_party/vulkan-validation-layers/src/tests/vklayertests_buffer_image_memory_sampler.cpp b/src/third_party/vulkan-validation-layers/src/tests/vklayertests_buffer_image_memory_sampler.cpp
new file mode 100644
index 0000000..300392d
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/tests/vklayertests_buffer_image_memory_sampler.cpp
@@ -0,0 +1,7644 @@
+/*
+ * Copyright (c) 2015-2019 The Khronos Group Inc.
+ * Copyright (c) 2015-2019 Valve Corporation
+ * Copyright (c) 2015-2019 LunarG, Inc.
+ * Copyright (c) 2015-2019 Google, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Author: Chia-I Wu <olvaffe@gmail.com>
+ * Author: Chris Forbes <chrisf@ijw.co.nz>
+ * Author: Courtney Goeltzenleuchter <courtney@LunarG.com>
+ * Author: Mark Lobodzinski <mark@lunarg.com>
+ * Author: Mike Stroyan <mike@LunarG.com>
+ * Author: Tobin Ehlis <tobine@google.com>
+ * Author: Tony Barbour <tony@LunarG.com>
+ * Author: Cody Northrop <cnorthrop@google.com>
+ * Author: Dave Houlton <daveh@lunarg.com>
+ * Author: Jeremy Kniager <jeremyk@lunarg.com>
+ * Author: Shannon McPherson <shannon@lunarg.com>
+ * Author: John Zulauf <jzulauf@lunarg.com>
+ */
+
+#include "cast_utils.h"
+#include "layer_validation_tests.h"
+
+TEST_F(VkLayerTest, BufferExtents) {
+    TEST_DESCRIPTION("Perform copies across a buffer, provoking out-of-range errors.");
+
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+    ASSERT_NO_FATAL_FAILURE(InitState());
+
+    const VkDeviceSize buffer_size = 2048;
+
+    VkBufferObj buffer_one;
+    VkBufferObj buffer_two;
+    VkMemoryPropertyFlags reqs = 0;
+    buffer_one.init_as_src_and_dst(*m_device, buffer_size, reqs);
+    buffer_two.init_as_src_and_dst(*m_device, buffer_size, reqs);
+
+    m_commandBuffer->begin();
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdCopyBuffer-srcOffset-00113");
+    VkBufferCopy copy_info = {4096, 256, 256};
+    vk::CmdCopyBuffer(m_commandBuffer->handle(), buffer_one.handle(), buffer_two.handle(), 1, &copy_info);
+    m_errorMonitor->VerifyFound();
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdCopyBuffer-dstOffset-00114");
+    copy_info = {256, 4096, 256};
+    vk::CmdCopyBuffer(m_commandBuffer->handle(), buffer_one.handle(), buffer_two.handle(), 1, &copy_info);
+    m_errorMonitor->VerifyFound();
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdCopyBuffer-size-00115");
+    copy_info = {1024, 256, 1280};
+    vk::CmdCopyBuffer(m_commandBuffer->handle(), buffer_one.handle(), buffer_two.handle(), 1, &copy_info);
+    m_errorMonitor->VerifyFound();
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdCopyBuffer-size-00116");
+    copy_info = {256, 1024, 1280};
+    vk::CmdCopyBuffer(m_commandBuffer->handle(), buffer_one.handle(), buffer_two.handle(), 1, &copy_info);
+    m_errorMonitor->VerifyFound();
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdCopyBuffer-pRegions-00117");
+    copy_info = {256, 512, 512};
+    vk::CmdCopyBuffer(m_commandBuffer->handle(), buffer_two.handle(), buffer_two.handle(), 1, &copy_info);
+    m_errorMonitor->VerifyFound();
+
+    m_commandBuffer->end();
+}
+
+TEST_F(VkLayerTest, MirrorClampToEdgeNotEnabled) {
+    TEST_DESCRIPTION("Validation should catch using CLAMP_TO_EDGE addressing mode if the extension is not enabled.");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkSamplerCreateInfo-addressModeU-01079");
+    VkSampler sampler = VK_NULL_HANDLE;
+    VkSamplerCreateInfo sampler_info = SafeSaneSamplerCreateInfo();
+    // Set the modes to cause the error
+    sampler_info.addressModeU = VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE;
+    sampler_info.addressModeV = VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE;
+    sampler_info.addressModeW = VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE;
+
+    vk::CreateSampler(m_device->device(), &sampler_info, NULL, &sampler);
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, AnisotropyFeatureDisabled) {
+    TEST_DESCRIPTION("Validation should check anisotropy parameters are correct with samplerAnisotropy disabled.");
+
+    // Determine if required device features are available
+    VkPhysicalDeviceFeatures device_features = {};
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+    ASSERT_NO_FATAL_FAILURE(GetPhysicalDeviceFeatures(&device_features));
+    device_features.samplerAnisotropy = VK_FALSE;  // force anisotropy off
+    ASSERT_NO_FATAL_FAILURE(InitState(&device_features));
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkSamplerCreateInfo-anisotropyEnable-01070");
+    VkSamplerCreateInfo sampler_info = SafeSaneSamplerCreateInfo();
+    // With the samplerAnisotropy disable, the sampler must not enable it.
+    sampler_info.anisotropyEnable = VK_TRUE;
+    VkSampler sampler = VK_NULL_HANDLE;
+
+    VkResult err;
+    err = vk::CreateSampler(m_device->device(), &sampler_info, NULL, &sampler);
+    m_errorMonitor->VerifyFound();
+    if (VK_SUCCESS == err) {
+        vk::DestroySampler(m_device->device(), sampler, NULL);
+    }
+    sampler = VK_NULL_HANDLE;
+}
+
+TEST_F(VkLayerTest, AnisotropyFeatureEnabled) {
+    TEST_DESCRIPTION("Validation must check several conditions that apply only when Anisotropy is enabled.");
+
+    // Determine if required device features are available
+    VkPhysicalDeviceFeatures device_features = {};
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+    ASSERT_NO_FATAL_FAILURE(GetPhysicalDeviceFeatures(&device_features));
+
+    // These tests require that the device support anisotropic filtering
+    if (VK_TRUE != device_features.samplerAnisotropy) {
+        printf("%s Test requires unsupported samplerAnisotropy feature. Skipped.\n", kSkipPrefix);
+        return;
+    }
+
+    bool cubic_support = false;
+    if (DeviceExtensionSupported(gpu(), nullptr, "VK_IMG_filter_cubic")) {
+        m_device_extension_names.push_back("VK_IMG_filter_cubic");
+        cubic_support = true;
+    }
+
+    VkSamplerCreateInfo sampler_info_ref = SafeSaneSamplerCreateInfo();
+    sampler_info_ref.anisotropyEnable = VK_TRUE;
+    VkSamplerCreateInfo sampler_info = sampler_info_ref;
+    ASSERT_NO_FATAL_FAILURE(InitState());
+
+    // maxAnisotropy out-of-bounds low.
+    sampler_info.maxAnisotropy = NearestSmaller(1.0F);
+    CreateSamplerTest(*this, &sampler_info, "VUID-VkSamplerCreateInfo-anisotropyEnable-01071");
+    sampler_info.maxAnisotropy = sampler_info_ref.maxAnisotropy;
+
+    // maxAnisotropy out-of-bounds high.
+    sampler_info.maxAnisotropy = NearestGreater(m_device->phy().properties().limits.maxSamplerAnisotropy);
+    CreateSamplerTest(*this, &sampler_info, "VUID-VkSamplerCreateInfo-anisotropyEnable-01071");
+    sampler_info.maxAnisotropy = sampler_info_ref.maxAnisotropy;
+
+    // Both anisotropy and unnormalized coords enabled
+    sampler_info.unnormalizedCoordinates = VK_TRUE;
+    // If unnormalizedCoordinates is VK_TRUE, minLod and maxLod must be zero
+    sampler_info.minLod = 0;
+    sampler_info.maxLod = 0;
+    CreateSamplerTest(*this, &sampler_info, "VUID-VkSamplerCreateInfo-unnormalizedCoordinates-01076");
+    sampler_info.unnormalizedCoordinates = sampler_info_ref.unnormalizedCoordinates;
+
+    // Both anisotropy and cubic filtering enabled
+    if (cubic_support) {
+        sampler_info.minFilter = VK_FILTER_CUBIC_IMG;
+        CreateSamplerTest(*this, &sampler_info, "VUID-VkSamplerCreateInfo-magFilter-01081");
+        sampler_info.minFilter = sampler_info_ref.minFilter;
+
+        sampler_info.magFilter = VK_FILTER_CUBIC_IMG;
+        CreateSamplerTest(*this, &sampler_info, "VUID-VkSamplerCreateInfo-magFilter-01081");
+        sampler_info.magFilter = sampler_info_ref.magFilter;
+    } else {
+        printf("%s Test requires unsupported extension \"VK_IMG_filter_cubic\". Skipped.\n", kSkipPrefix);
+    }
+}
+
+TEST_F(VkLayerTest, UnnormalizedCoordinatesEnabled) {
+    TEST_DESCRIPTION("Validate restrictions on sampler parameters when unnormalizedCoordinates is true.");
+
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+    VkSamplerCreateInfo sampler_info_ref = SafeSaneSamplerCreateInfo();
+    sampler_info_ref.unnormalizedCoordinates = VK_TRUE;
+    sampler_info_ref.minLod = 0.0f;
+    sampler_info_ref.maxLod = 0.0f;
+    VkSamplerCreateInfo sampler_info = sampler_info_ref;
+    ASSERT_NO_FATAL_FAILURE(InitState());
+
+    // min and mag filters must be the same
+    sampler_info.minFilter = VK_FILTER_NEAREST;
+    sampler_info.magFilter = VK_FILTER_LINEAR;
+    CreateSamplerTest(*this, &sampler_info, "VUID-VkSamplerCreateInfo-unnormalizedCoordinates-01072");
+    std::swap(sampler_info.minFilter, sampler_info.magFilter);
+    CreateSamplerTest(*this, &sampler_info, "VUID-VkSamplerCreateInfo-unnormalizedCoordinates-01072");
+    sampler_info = sampler_info_ref;
+
+    // mipmapMode must be NEAREST
+    sampler_info.mipmapMode = VK_SAMPLER_MIPMAP_MODE_LINEAR;
+    CreateSamplerTest(*this, &sampler_info, "VUID-VkSamplerCreateInfo-unnormalizedCoordinates-01073");
+    sampler_info = sampler_info_ref;
+
+    // minlod and maxlod must be zero
+    sampler_info.maxLod = 3.14159f;
+    CreateSamplerTest(*this, &sampler_info, "VUID-VkSamplerCreateInfo-unnormalizedCoordinates-01074");
+    sampler_info.minLod = 2.71828f;
+    CreateSamplerTest(*this, &sampler_info, "VUID-VkSamplerCreateInfo-unnormalizedCoordinates-01074");
+    sampler_info = sampler_info_ref;
+
+    // addressModeU and addressModeV must both be CLAMP_TO_EDGE or CLAMP_TO_BORDER
+    // checks all 12 invalid combinations out of 16 total combinations
+    const std::array<VkSamplerAddressMode, 4> kAddressModes = {{
+        VK_SAMPLER_ADDRESS_MODE_REPEAT,
+        VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT,
+        VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,
+        VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER,
+    }};
+    for (const auto umode : kAddressModes) {
+        for (const auto vmode : kAddressModes) {
+            if ((umode != VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE && umode != VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER) ||
+                (vmode != VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE && vmode != VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER)) {
+                sampler_info.addressModeU = umode;
+                sampler_info.addressModeV = vmode;
+                CreateSamplerTest(*this, &sampler_info, "VUID-VkSamplerCreateInfo-unnormalizedCoordinates-01075");
+            }
+        }
+    }
+    sampler_info = sampler_info_ref;
+
+    // VUID-VkSamplerCreateInfo-unnormalizedCoordinates-01076 is tested in AnisotropyFeatureEnabled above
+    // Since it requires checking/enabling the anisotropic filtering feature, it's easier to do it
+    // with the other anisotropic tests.
+
+    // compareEnable must be VK_FALSE
+    sampler_info.compareEnable = VK_TRUE;
+    CreateSamplerTest(*this, &sampler_info, "VUID-VkSamplerCreateInfo-unnormalizedCoordinates-01077");
+    sampler_info = sampler_info_ref;
+}
+
+TEST_F(VkLayerTest, UpdateBufferAlignment) {
+    TEST_DESCRIPTION("Check alignment parameters for vkCmdUpdateBuffer");
+    uint32_t updateData[] = {1, 2, 3, 4, 5, 6, 7, 8};
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    VkMemoryPropertyFlags reqs = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
+    VkBufferObj buffer;
+    buffer.init_as_dst(*m_device, (VkDeviceSize)20, reqs);
+
+    m_commandBuffer->begin();
+    // Introduce failure by using dstOffset that is not multiple of 4
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, " is not a multiple of 4");
+    m_commandBuffer->UpdateBuffer(buffer.handle(), 1, 4, updateData);
+    m_errorMonitor->VerifyFound();
+
+    // Introduce failure by using dataSize that is not multiple of 4
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, " is not a multiple of 4");
+    m_commandBuffer->UpdateBuffer(buffer.handle(), 0, 6, updateData);
+    m_errorMonitor->VerifyFound();
+
+    // Introduce failure by using dataSize that is < 0
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "must be greater than zero and less than or equal to 65536");
+    m_commandBuffer->UpdateBuffer(buffer.handle(), 0, (VkDeviceSize)-44, updateData);
+    m_errorMonitor->VerifyFound();
+
+    // Introduce failure by using dataSize that is > 65536
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "must be greater than zero and less than or equal to 65536");
+    m_commandBuffer->UpdateBuffer(buffer.handle(), 0, (VkDeviceSize)80000, updateData);
+    m_errorMonitor->VerifyFound();
+
+    m_commandBuffer->end();
+}
+
+TEST_F(VkLayerTest, FillBufferAlignmentAndSize) {
+    TEST_DESCRIPTION("Check alignment and size parameters for vkCmdFillBuffer");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    VkMemoryPropertyFlags reqs = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
+    VkBufferObj buffer;
+    buffer.init_as_dst(*m_device, (VkDeviceSize)20, reqs);
+
+    m_commandBuffer->begin();
+
+    // Introduce failure by using dstOffset greater than bufferSize
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdFillBuffer-dstOffset-00024");
+    m_commandBuffer->FillBuffer(buffer.handle(), 40, 4, 0x11111111);
+    m_errorMonitor->VerifyFound();
+
+    // Introduce failure by using size <= buffersize minus dstoffset
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdFillBuffer-size-00027");
+    m_commandBuffer->FillBuffer(buffer.handle(), 16, 12, 0x11111111);
+    m_errorMonitor->VerifyFound();
+
+    // Introduce failure by using dstOffset that is not multiple of 4
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, " is not a multiple of 4");
+    m_commandBuffer->FillBuffer(buffer.handle(), 1, 4, 0x11111111);
+    m_errorMonitor->VerifyFound();
+
+    // Introduce failure by using size that is not multiple of 4
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, " is not a multiple of 4");
+    m_commandBuffer->FillBuffer(buffer.handle(), 0, 6, 0x11111111);
+    m_errorMonitor->VerifyFound();
+
+    // Introduce failure by using size that is zero
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "must be greater than zero");
+    m_commandBuffer->FillBuffer(buffer.handle(), 0, 0, 0x11111111);
+    m_errorMonitor->VerifyFound();
+
+    m_commandBuffer->end();
+}
+
+TEST_F(VkLayerTest, SparseBindingImageBufferCreate) {
+    TEST_DESCRIPTION("Create buffer/image with sparse attributes but without the sparse_binding bit set");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    VkBufferCreateInfo buf_info = {};
+    buf_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
+    buf_info.pNext = NULL;
+    buf_info.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
+    buf_info.size = 2048;
+    buf_info.queueFamilyIndexCount = 0;
+    buf_info.pQueueFamilyIndices = NULL;
+    buf_info.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
+
+    if (m_device->phy().features().sparseResidencyBuffer) {
+        buf_info.flags = VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT;
+        CreateBufferTest(*this, &buf_info, "VUID-VkBufferCreateInfo-flags-00918");
+    } else {
+        printf("%s Test requires unsupported sparseResidencyBuffer feature. Skipped.\n", kSkipPrefix);
+        return;
+    }
+
+    if (m_device->phy().features().sparseResidencyAliased) {
+        buf_info.flags = VK_BUFFER_CREATE_SPARSE_ALIASED_BIT;
+        CreateBufferTest(*this, &buf_info, "VUID-VkBufferCreateInfo-flags-00918");
+    } else {
+        printf("%s Test requires unsupported sparseResidencyAliased feature. Skipped.\n", kSkipPrefix);
+        return;
+    }
+
+    VkImageCreateInfo image_create_info = {};
+    image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+    image_create_info.pNext = NULL;
+    image_create_info.imageType = VK_IMAGE_TYPE_2D;
+    image_create_info.format = VK_FORMAT_R8G8B8A8_UNORM;
+    image_create_info.extent.width = 512;
+    image_create_info.extent.height = 64;
+    image_create_info.extent.depth = 1;
+    image_create_info.mipLevels = 1;
+    image_create_info.arrayLayers = 1;
+    image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
+    image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
+    image_create_info.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
+    image_create_info.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
+    image_create_info.queueFamilyIndexCount = 0;
+    image_create_info.pQueueFamilyIndices = NULL;
+    image_create_info.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
+
+    if (m_device->phy().features().sparseResidencyImage2D) {
+        image_create_info.flags = VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT;
+        CreateImageTest(*this, &image_create_info, "VUID-VkImageCreateInfo-flags-00987");
+    } else {
+        printf("%s Test requires unsupported sparseResidencyImage2D feature. Skipped.\n", kSkipPrefix);
+        return;
+    }
+
+    if (m_device->phy().features().sparseResidencyAliased) {
+        image_create_info.flags = VK_IMAGE_CREATE_SPARSE_ALIASED_BIT;
+        CreateImageTest(*this, &image_create_info, "VUID-VkImageCreateInfo-flags-00987");
+    } else {
+        printf("%s Test requires unsupported sparseResidencyAliased feature. Skipped.\n", kSkipPrefix);
+        return;
+    }
+}
+
+TEST_F(VkLayerTest, SparseResidencyImageCreateUnsupportedTypes) {
+    TEST_DESCRIPTION("Create images with sparse residency with unsupported types");
+
+    // Determine which device feature are available
+    VkPhysicalDeviceFeatures device_features = {};
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+    ASSERT_NO_FATAL_FAILURE(GetPhysicalDeviceFeatures(&device_features));
+
+    // Mask out device features we don't want and initialize device state
+    device_features.sparseResidencyImage2D = VK_FALSE;
+    device_features.sparseResidencyImage3D = VK_FALSE;
+    ASSERT_NO_FATAL_FAILURE(InitState(&device_features));
+
+    if (!m_device->phy().features().sparseBinding) {
+        printf("%s Test requires unsupported sparseBinding feature. Skipped.\n", kSkipPrefix);
+        return;
+    }
+
+    VkImageCreateInfo image_create_info = {};
+    image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+    image_create_info.pNext = NULL;
+    image_create_info.imageType = VK_IMAGE_TYPE_1D;
+    image_create_info.format = VK_FORMAT_R8G8B8A8_UNORM;
+    image_create_info.extent.width = 512;
+    image_create_info.extent.height = 1;
+    image_create_info.extent.depth = 1;
+    image_create_info.mipLevels = 1;
+    image_create_info.arrayLayers = 1;
+    image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
+    image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
+    image_create_info.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
+    image_create_info.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
+    image_create_info.queueFamilyIndexCount = 0;
+    image_create_info.pQueueFamilyIndices = NULL;
+    image_create_info.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
+    image_create_info.flags = VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT | VK_BUFFER_CREATE_SPARSE_BINDING_BIT;
+
+    // 1D image w/ sparse residency is an error
+    CreateImageTest(*this, &image_create_info, "VUID-VkImageCreateInfo-imageType-00970");
+
+    // 2D image w/ sparse residency when feature isn't available
+    image_create_info.imageType = VK_IMAGE_TYPE_2D;
+    image_create_info.extent.height = 64;
+    CreateImageTest(*this, &image_create_info, "VUID-VkImageCreateInfo-imageType-00971");
+
+    // 3D image w/ sparse residency when feature isn't available
+    image_create_info.imageType = VK_IMAGE_TYPE_3D;
+    image_create_info.extent.depth = 8;
+    CreateImageTest(*this, &image_create_info, "VUID-VkImageCreateInfo-imageType-00972");
+}
+
+TEST_F(VkLayerTest, SparseResidencyImageCreateUnsupportedSamples) {
+    TEST_DESCRIPTION("Create images with sparse residency with unsupported tiling or sample counts");
+
+    // Determine which device feature are available
+    VkPhysicalDeviceFeatures device_features = {};
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+    ASSERT_NO_FATAL_FAILURE(GetPhysicalDeviceFeatures(&device_features));
+
+    // These tests require that the device support sparse residency for 2D images
+    if (VK_TRUE != device_features.sparseResidencyImage2D) {
+        printf("%s Test requires unsupported SparseResidencyImage2D feature. Skipped.\n", kSkipPrefix);
+        return;
+    }
+
+    // Mask out device features we don't want and initialize device state
+    device_features.sparseResidency2Samples = VK_FALSE;
+    device_features.sparseResidency4Samples = VK_FALSE;
+    device_features.sparseResidency8Samples = VK_FALSE;
+    device_features.sparseResidency16Samples = VK_FALSE;
+    ASSERT_NO_FATAL_FAILURE(InitState(&device_features));
+
+    VkImageCreateInfo image_create_info = {};
+    image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+    image_create_info.pNext = NULL;
+    image_create_info.imageType = VK_IMAGE_TYPE_2D;
+    image_create_info.format = VK_FORMAT_R8G8B8A8_UNORM;
+    image_create_info.extent.width = 64;
+    image_create_info.extent.height = 64;
+    image_create_info.extent.depth = 1;
+    image_create_info.mipLevels = 1;
+    image_create_info.arrayLayers = 1;
+    image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
+    image_create_info.tiling = VK_IMAGE_TILING_LINEAR;
+    image_create_info.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
+    image_create_info.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
+    image_create_info.queueFamilyIndexCount = 0;
+    image_create_info.pQueueFamilyIndices = NULL;
+    image_create_info.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
+    image_create_info.flags = VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT | VK_BUFFER_CREATE_SPARSE_BINDING_BIT;
+
+    // 2D image w/ sparse residency and linear tiling is an error
+    CreateImageTest(*this, &image_create_info,
+                    "VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT then image tiling of VK_IMAGE_TILING_LINEAR is not supported");
+    image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
+
+    // Multi-sample image w/ sparse residency when feature isn't available (4 flavors)
+    image_create_info.samples = VK_SAMPLE_COUNT_2_BIT;
+    CreateImageTest(*this, &image_create_info, "VUID-VkImageCreateInfo-imageType-00973");
+
+    image_create_info.samples = VK_SAMPLE_COUNT_4_BIT;
+    CreateImageTest(*this, &image_create_info, "VUID-VkImageCreateInfo-imageType-00974");
+
+    image_create_info.samples = VK_SAMPLE_COUNT_8_BIT;
+    CreateImageTest(*this, &image_create_info, "VUID-VkImageCreateInfo-imageType-00975");
+
+    image_create_info.samples = VK_SAMPLE_COUNT_16_BIT;
+    CreateImageTest(*this, &image_create_info, "VUID-VkImageCreateInfo-imageType-00976");
+}
+
+TEST_F(VkLayerTest, InvalidMemoryMapping) {
+    TEST_DESCRIPTION("Attempt to map memory in a number of incorrect ways");
+    VkResult err;
+    bool pass;
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    VkBuffer buffer;
+    VkDeviceMemory mem;
+    VkMemoryRequirements mem_reqs;
+
+    const VkDeviceSize atom_size = m_device->props.limits.nonCoherentAtomSize;
+
+    VkBufferCreateInfo buf_info = {};
+    buf_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
+    buf_info.pNext = NULL;
+    buf_info.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT;
+    buf_info.size = 256;
+    buf_info.queueFamilyIndexCount = 0;
+    buf_info.pQueueFamilyIndices = NULL;
+    buf_info.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
+    buf_info.flags = 0;
+    err = vk::CreateBuffer(m_device->device(), &buf_info, NULL, &buffer);
+    ASSERT_VK_SUCCESS(err);
+
+    vk::GetBufferMemoryRequirements(m_device->device(), buffer, &mem_reqs);
+    VkMemoryAllocateInfo alloc_info = {};
+    alloc_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+    alloc_info.pNext = NULL;
+    alloc_info.memoryTypeIndex = 0;
+
+    // Ensure memory is big enough for both bindings
+    static const VkDeviceSize allocation_size = 0x10000;
+    alloc_info.allocationSize = allocation_size;
+    pass = m_device->phy().set_memory_type(mem_reqs.memoryTypeBits, &alloc_info, VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
+    if (!pass) {
+        printf("%s Failed to set memory type.\n", kSkipPrefix);
+        vk::DestroyBuffer(m_device->device(), buffer, NULL);
+        return;
+    }
+    err = vk::AllocateMemory(m_device->device(), &alloc_info, NULL, &mem);
+    ASSERT_VK_SUCCESS(err);
+
+    uint8_t *pData;
+    // Attempt to map memory size 0 is invalid
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VkMapMemory: Attempting to map memory range of size zero");
+    err = vk::MapMemory(m_device->device(), mem, 0, 0, 0, (void **)&pData);
+    m_errorMonitor->VerifyFound();
+    // Map memory twice
+    err = vk::MapMemory(m_device->device(), mem, 0, mem_reqs.size, 0, (void **)&pData);
+    ASSERT_VK_SUCCESS(err);
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "UNASSIGNED-CoreValidation-MemTrack-InvalidMap");
+    err = vk::MapMemory(m_device->device(), mem, 0, mem_reqs.size, 0, (void **)&pData);
+    m_errorMonitor->VerifyFound();
+
+    // Unmap the memory to avoid re-map error
+    vk::UnmapMemory(m_device->device(), mem);
+    // overstep allocation with VK_WHOLE_SIZE
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         " with size of VK_WHOLE_SIZE oversteps total array size 0x");
+    err = vk::MapMemory(m_device->device(), mem, allocation_size + 1, VK_WHOLE_SIZE, 0, (void **)&pData);
+    m_errorMonitor->VerifyFound();
+    // overstep allocation w/o VK_WHOLE_SIZE
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, " oversteps total array size 0x");
+    err = vk::MapMemory(m_device->device(), mem, 1, allocation_size, 0, (void **)&pData);
+    m_errorMonitor->VerifyFound();
+    // Now error due to unmapping memory that's not mapped
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "Unmapping Memory without memory being mapped: ");
+    vk::UnmapMemory(m_device->device(), mem);
+    m_errorMonitor->VerifyFound();
+
+    // Now map memory and cause errors due to flushing invalid ranges
+    err = vk::MapMemory(m_device->device(), mem, 4 * atom_size, VK_WHOLE_SIZE, 0, (void **)&pData);
+    ASSERT_VK_SUCCESS(err);
+    VkMappedMemoryRange mmr = {};
+    mmr.sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE;
+    mmr.memory = mem;
+    mmr.offset = atom_size;  // Error b/c offset less than offset of mapped mem
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkMappedMemoryRange-size-00685");
+    vk::FlushMappedMemoryRanges(m_device->device(), 1, &mmr);
+    m_errorMonitor->VerifyFound();
+
+    // Now flush range that oversteps mapped range
+    vk::UnmapMemory(m_device->device(), mem);
+    err = vk::MapMemory(m_device->device(), mem, 0, 4 * atom_size, 0, (void **)&pData);
+    ASSERT_VK_SUCCESS(err);
+    mmr.offset = atom_size;
+    mmr.size = 4 * atom_size;  // Flushing bounds exceed mapped bounds
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkMappedMemoryRange-size-00685");
+    vk::FlushMappedMemoryRanges(m_device->device(), 1, &mmr);
+    m_errorMonitor->VerifyFound();
+
+    // Now flush range with VK_WHOLE_SIZE that oversteps offset
+    vk::UnmapMemory(m_device->device(), mem);
+    err = vk::MapMemory(m_device->device(), mem, 2 * atom_size, 4 * atom_size, 0, (void **)&pData);
+    ASSERT_VK_SUCCESS(err);
+    mmr.offset = atom_size;
+    mmr.size = VK_WHOLE_SIZE;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkMappedMemoryRange-size-00686");
+    vk::FlushMappedMemoryRanges(m_device->device(), 1, &mmr);
+    m_errorMonitor->VerifyFound();
+
+    // Some platforms have an atomsize of 1 which makes the test meaningless
+    if (atom_size > 3) {
+        // Now with an offset NOT a multiple of the device limit
+        vk::UnmapMemory(m_device->device(), mem);
+        err = vk::MapMemory(m_device->device(), mem, 0, 4 * atom_size, 0, (void **)&pData);
+        ASSERT_VK_SUCCESS(err);
+        mmr.offset = 3;  // Not a multiple of atom_size
+        mmr.size = VK_WHOLE_SIZE;
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkMappedMemoryRange-offset-00687");
+        vk::FlushMappedMemoryRanges(m_device->device(), 1, &mmr);
+        m_errorMonitor->VerifyFound();
+
+        // Now with a size NOT a multiple of the device limit
+        vk::UnmapMemory(m_device->device(), mem);
+        err = vk::MapMemory(m_device->device(), mem, 0, 4 * atom_size, 0, (void **)&pData);
+        ASSERT_VK_SUCCESS(err);
+        mmr.offset = atom_size;
+        mmr.size = 2 * atom_size + 1;  // Not a multiple of atom_size
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkMappedMemoryRange-size-01390");
+        vk::FlushMappedMemoryRanges(m_device->device(), 1, &mmr);
+        m_errorMonitor->VerifyFound();
+    }
+
+    pass = m_device->phy().set_memory_type(mem_reqs.memoryTypeBits, &alloc_info, VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT,
+                                           VK_MEMORY_PROPERTY_HOST_COHERENT_BIT);
+    if (!pass) {
+        printf("%s Failed to set memory type.\n", kSkipPrefix);
+        vk::FreeMemory(m_device->device(), mem, NULL);
+        vk::DestroyBuffer(m_device->device(), buffer, NULL);
+        return;
+    }
+    // TODO : If we can get HOST_VISIBLE w/o HOST_COHERENT we can test cases of
+    //  kVUID_Core_MemTrack_InvalidMap in validateAndCopyNoncoherentMemoryToDriver()
+
+    vk::DestroyBuffer(m_device->device(), buffer, NULL);
+    vk::FreeMemory(m_device->device(), mem, NULL);
+}
+
+TEST_F(VkLayerTest, MapMemWithoutHostVisibleBit) {
+    TEST_DESCRIPTION("Allocate memory that is not mappable and then attempt to map it.");
+    VkResult err;
+    bool pass;
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkMapMemory-memory-00682");
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    VkMemoryAllocateInfo mem_alloc = {};
+    mem_alloc.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+    mem_alloc.pNext = NULL;
+    mem_alloc.allocationSize = 1024;
+
+    pass = m_device->phy().set_memory_type(0xFFFFFFFF, &mem_alloc, 0, VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
+    if (!pass) {  // If we can't find any unmappable memory this test doesn't
+                  // make sense
+        printf("%s No unmappable memory types found, skipping test\n", kSkipPrefix);
+        return;
+    }
+
+    VkDeviceMemory mem;
+    err = vk::AllocateMemory(m_device->device(), &mem_alloc, NULL, &mem);
+    ASSERT_VK_SUCCESS(err);
+
+    void *mappedAddress = NULL;
+    err = vk::MapMemory(m_device->device(), mem, 0, VK_WHOLE_SIZE, 0, &mappedAddress);
+    m_errorMonitor->VerifyFound();
+
+    vk::FreeMemory(m_device->device(), mem, NULL);
+}
+
+TEST_F(VkLayerTest, RebindMemory) {
+    VkResult err;
+    bool pass;
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkBindImageMemory-image-01044");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    // Create an image, allocate memory, free it, and then try to bind it
+    VkImage image;
+    VkDeviceMemory mem1;
+    VkDeviceMemory mem2;
+    VkMemoryRequirements mem_reqs;
+
+    const VkFormat tex_format = VK_FORMAT_B8G8R8A8_UNORM;
+    const int32_t tex_width = 32;
+    const int32_t tex_height = 32;
+
+    VkImageCreateInfo image_create_info = {};
+    image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+    image_create_info.pNext = NULL;
+    image_create_info.imageType = VK_IMAGE_TYPE_2D;
+    image_create_info.format = tex_format;
+    image_create_info.extent.width = tex_width;
+    image_create_info.extent.height = tex_height;
+    image_create_info.extent.depth = 1;
+    image_create_info.mipLevels = 1;
+    image_create_info.arrayLayers = 1;
+    image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
+    image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
+    image_create_info.usage = VK_IMAGE_USAGE_SAMPLED_BIT;
+    image_create_info.flags = 0;
+
+    VkMemoryAllocateInfo mem_alloc = {};
+    mem_alloc.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+    mem_alloc.pNext = NULL;
+    mem_alloc.allocationSize = 0;
+    mem_alloc.memoryTypeIndex = 0;
+
+    // Introduce failure, do NOT set memProps to
+    // VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT
+    mem_alloc.memoryTypeIndex = 1;
+    err = vk::CreateImage(m_device->device(), &image_create_info, NULL, &image);
+    ASSERT_VK_SUCCESS(err);
+
+    vk::GetImageMemoryRequirements(m_device->device(), image, &mem_reqs);
+
+    mem_alloc.allocationSize = mem_reqs.size;
+    pass = m_device->phy().set_memory_type(mem_reqs.memoryTypeBits, &mem_alloc, 0);
+    ASSERT_TRUE(pass);
+
+    // allocate 2 memory objects
+    err = vk::AllocateMemory(m_device->device(), &mem_alloc, NULL, &mem1);
+    ASSERT_VK_SUCCESS(err);
+    err = vk::AllocateMemory(m_device->device(), &mem_alloc, NULL, &mem2);
+    ASSERT_VK_SUCCESS(err);
+
+    // Bind first memory object to Image object
+    err = vk::BindImageMemory(m_device->device(), image, mem1, 0);
+    ASSERT_VK_SUCCESS(err);
+
+    // Introduce validation failure, try to bind a different memory object to
+    // the same image object
+    err = vk::BindImageMemory(m_device->device(), image, mem2, 0);
+
+    m_errorMonitor->VerifyFound();
+
+    vk::DestroyImage(m_device->device(), image, NULL);
+    vk::FreeMemory(m_device->device(), mem1, NULL);
+    vk::FreeMemory(m_device->device(), mem2, NULL);
+}
+
+TEST_F(VkLayerTest, QueryMemoryCommitmentWithoutLazyProperty) {
+    TEST_DESCRIPTION("Attempt to query memory commitment on memory without lazy allocation");
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    auto image_ci = vk_testing::Image::create_info();
+    image_ci.imageType = VK_IMAGE_TYPE_2D;
+    image_ci.format = VK_FORMAT_B8G8R8A8_UNORM;
+    image_ci.extent.width = 32;
+    image_ci.extent.height = 32;
+    image_ci.tiling = VK_IMAGE_TILING_OPTIMAL;
+    image_ci.usage = VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
+    VkImageObj image(m_device);
+    image.init_no_mem(*m_device, image_ci);
+
+    auto mem_reqs = image.memory_requirements();
+    // memory_type_index is set to 0 here, but is set properly below
+    auto image_alloc_info = vk_testing::DeviceMemory::alloc_info(mem_reqs.size, 0);
+
+    bool pass;
+    // the last argument is the "forbid" argument for set_memory_type, disallowing
+    // that particular memory type rather than requiring it
+    pass = m_device->phy().set_memory_type(mem_reqs.memoryTypeBits, &image_alloc_info, 0, VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT);
+    if (!pass) {
+        printf("%s Failed to set memory type.\n", kSkipPrefix);
+        return;
+    }
+    vk_testing::DeviceMemory mem;
+    mem.init(*m_device, image_alloc_info);
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkGetDeviceMemoryCommitment-memory-00690");
+    VkDeviceSize size;
+    vk::GetDeviceMemoryCommitment(m_device->device(), mem.handle(), &size);
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, InvalidUsageBits) {
+    TEST_DESCRIPTION(
+        "Specify wrong usage for image then create conflicting view of image Initialize buffer with wrong usage then perform copy "
+        "expecting errors from both the image and the buffer (2 calls)");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    auto format = FindSupportedDepthStencilFormat(gpu());
+    if (!format) {
+        printf("%s No Depth + Stencil format found. Skipped.\n", kSkipPrefix);
+        return;
+    }
+
+    VkImageObj image(m_device);
+    // Initialize image with transfer source usage
+    image.Init(128, 128, 1, format, VK_IMAGE_USAGE_TRANSFER_SRC_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
+    ASSERT_TRUE(image.initialized());
+
+    VkImageView dsv;
+    VkImageViewCreateInfo dsvci = {};
+    dsvci.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
+    dsvci.image = image.handle();
+    dsvci.viewType = VK_IMAGE_VIEW_TYPE_2D;
+    dsvci.format = format;
+    dsvci.subresourceRange.layerCount = 1;
+    dsvci.subresourceRange.baseMipLevel = 0;
+    dsvci.subresourceRange.levelCount = 1;
+    dsvci.subresourceRange.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT;
+
+    // Create a view with depth / stencil aspect for image with different usage
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "UNASSIGNED-CoreValidation-MemTrack-InvalidUsageFlag");
+    vk::CreateImageView(m_device->device(), &dsvci, NULL, &dsv);
+    m_errorMonitor->VerifyFound();
+
+    // Initialize buffer with TRANSFER_DST usage
+    VkBufferObj buffer;
+    VkMemoryPropertyFlags reqs = 0;
+    buffer.init_as_dst(*m_device, 128 * 128, reqs);
+    VkBufferImageCopy region = {};
+    region.bufferRowLength = 128;
+    region.bufferImageHeight = 128;
+    region.imageSubresource.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
+    region.imageSubresource.layerCount = 1;
+    region.imageExtent.height = 16;
+    region.imageExtent.width = 16;
+    region.imageExtent.depth = 1;
+
+    // Buffer usage not set to TRANSFER_SRC and image usage not set to TRANSFER_DST
+    m_commandBuffer->begin();
+
+    // two separate errors from this call:
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdCopyBufferToImage-dstImage-00177");
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdCopyBufferToImage-srcBuffer-00174");
+
+    vk::CmdCopyBufferToImage(m_commandBuffer->handle(), buffer.handle(), image.handle(), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1,
+                             &region);
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, CopyBufferToCompressedImage) {
+    TEST_DESCRIPTION("Copy buffer to compressed image when buffer is larger than image.");
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    // Verify format support
+    if (!ImageFormatAndFeaturesSupported(gpu(), VK_FORMAT_BC1_RGBA_SRGB_BLOCK, VK_IMAGE_TILING_OPTIMAL,
+                                         VK_FORMAT_FEATURE_TRANSFER_DST_BIT_KHR)) {
+        printf("%s Required formats/features not supported - CopyBufferToCompressedImage skipped.\n", kSkipPrefix);
+        return;
+    }
+
+    VkImageObj width_image(m_device);
+    VkImageObj height_image(m_device);
+    VkBufferObj buffer;
+    VkMemoryPropertyFlags reqs = 0;
+    buffer.init_as_src(*m_device, 8 * 4 * 2, reqs);
+    VkBufferImageCopy region = {};
+    region.bufferRowLength = 0;
+    region.bufferImageHeight = 0;
+    region.imageSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    region.imageSubresource.layerCount = 1;
+    region.imageExtent.width = 8;
+    region.imageExtent.height = 4;
+    region.imageExtent.depth = 1;
+
+    width_image.Init(5, 4, 1, VK_FORMAT_BC1_RGBA_SRGB_BLOCK, VK_IMAGE_USAGE_TRANSFER_DST_BIT, VK_IMAGE_TILING_OPTIMAL);
+    height_image.Init(8, 3, 1, VK_FORMAT_BC1_RGBA_SRGB_BLOCK, VK_IMAGE_USAGE_TRANSFER_DST_BIT, VK_IMAGE_TILING_OPTIMAL);
+    if (!width_image.initialized() || (!height_image.initialized())) {
+        printf("%s Unable to initialize surfaces - UncompressedToCompressedImageCopy skipped.\n", kSkipPrefix);
+        return;
+    }
+    m_commandBuffer->begin();
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkBufferImageCopy-imageOffset-00197");
+    vk::CmdCopyBufferToImage(m_commandBuffer->handle(), buffer.handle(), width_image.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &region);
+    m_errorMonitor->VerifyFound();
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkBufferImageCopy-imageOffset-00200");
+    m_errorMonitor->SetUnexpectedError("VUID-vkCmdCopyBufferToImage-pRegions-00172");
+
+    VkResult err;
+    VkImageCreateInfo depth_image_create_info = {};
+    depth_image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+    depth_image_create_info.pNext = NULL;
+    depth_image_create_info.imageType = VK_IMAGE_TYPE_3D;
+    depth_image_create_info.format = VK_FORMAT_BC1_RGBA_SRGB_BLOCK;
+    depth_image_create_info.extent.width = 8;
+    depth_image_create_info.extent.height = 4;
+    depth_image_create_info.extent.depth = 1;
+    depth_image_create_info.mipLevels = 1;
+    depth_image_create_info.arrayLayers = 1;
+    depth_image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
+    depth_image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
+    depth_image_create_info.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
+    depth_image_create_info.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT;
+    depth_image_create_info.queueFamilyIndexCount = 0;
+    depth_image_create_info.pQueueFamilyIndices = NULL;
+
+    VkImage depth_image = VK_NULL_HANDLE;
+    err = vk::CreateImage(m_device->handle(), &depth_image_create_info, NULL, &depth_image);
+    ASSERT_VK_SUCCESS(err);
+
+    VkDeviceMemory mem1;
+    VkMemoryRequirements mem_reqs;
+    mem_reqs.memoryTypeBits = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
+    VkMemoryAllocateInfo mem_alloc = {};
+    mem_alloc.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+    mem_alloc.pNext = NULL;
+    mem_alloc.allocationSize = 0;
+    mem_alloc.memoryTypeIndex = 0;
+    mem_alloc.memoryTypeIndex = 1;
+    vk::GetImageMemoryRequirements(m_device->device(), depth_image, &mem_reqs);
+    mem_alloc.allocationSize = mem_reqs.size;
+    bool pass = m_device->phy().set_memory_type(mem_reqs.memoryTypeBits, &mem_alloc, 0);
+    ASSERT_TRUE(pass);
+    err = vk::AllocateMemory(m_device->device(), &mem_alloc, NULL, &mem1);
+    ASSERT_VK_SUCCESS(err);
+    err = vk::BindImageMemory(m_device->device(), depth_image, mem1, 0);
+
+    region.imageExtent.depth = 2;
+    vk::CmdCopyBufferToImage(m_commandBuffer->handle(), buffer.handle(), depth_image, VK_IMAGE_LAYOUT_GENERAL, 1, &region);
+    m_errorMonitor->VerifyFound();
+
+    vk::DestroyImage(m_device->device(), depth_image, NULL);
+    vk::FreeMemory(m_device->device(), mem1, NULL);
+    m_commandBuffer->end();
+}
+
+TEST_F(VkLayerTest, CreateUnknownObject) {
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkGetImageMemoryRequirements-image-parameter");
+
+    TEST_DESCRIPTION("Pass an invalid image object handle into a Vulkan API call.");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    // Pass bogus handle into GetImageMemoryRequirements
+    VkMemoryRequirements mem_reqs;
+    uint64_t fakeImageHandle = 0xCADECADE;
+    VkImage fauxImage = reinterpret_cast<VkImage &>(fakeImageHandle);
+
+    vk::GetImageMemoryRequirements(m_device->device(), fauxImage, &mem_reqs);
+
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, BindImageInvalidMemoryType) {
+    VkResult err;
+
+    TEST_DESCRIPTION("Test validation check for an invalid memory type index during bind[Buffer|Image]Memory time");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    // Create an image, allocate memory, set a bad typeIndex and then try to
+    // bind it
+    VkImage image;
+    VkDeviceMemory mem;
+    VkMemoryRequirements mem_reqs;
+    const VkFormat tex_format = VK_FORMAT_B8G8R8A8_UNORM;
+    const int32_t tex_width = 32;
+    const int32_t tex_height = 32;
+
+    VkImageCreateInfo image_create_info = {};
+    image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+    image_create_info.pNext = NULL;
+    image_create_info.imageType = VK_IMAGE_TYPE_2D;
+    image_create_info.format = tex_format;
+    image_create_info.extent.width = tex_width;
+    image_create_info.extent.height = tex_height;
+    image_create_info.extent.depth = 1;
+    image_create_info.mipLevels = 1;
+    image_create_info.arrayLayers = 1;
+    image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
+    image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
+    image_create_info.usage = VK_IMAGE_USAGE_SAMPLED_BIT;
+    image_create_info.flags = 0;
+
+    VkMemoryAllocateInfo mem_alloc = {};
+    mem_alloc.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+    mem_alloc.pNext = NULL;
+    mem_alloc.allocationSize = 0;
+    mem_alloc.memoryTypeIndex = 0;
+
+    err = vk::CreateImage(m_device->device(), &image_create_info, NULL, &image);
+    ASSERT_VK_SUCCESS(err);
+
+    vk::GetImageMemoryRequirements(m_device->device(), image, &mem_reqs);
+    mem_alloc.allocationSize = mem_reqs.size;
+
+    // Introduce Failure, select invalid TypeIndex
+    VkPhysicalDeviceMemoryProperties memory_info;
+
+    vk::GetPhysicalDeviceMemoryProperties(gpu(), &memory_info);
+    unsigned int i;
+    for (i = 0; i < memory_info.memoryTypeCount; i++) {
+        if ((mem_reqs.memoryTypeBits & (1 << i)) == 0) {
+            mem_alloc.memoryTypeIndex = i;
+            break;
+        }
+    }
+    if (i >= memory_info.memoryTypeCount) {
+        printf("%s No invalid memory type index could be found; skipped.\n", kSkipPrefix);
+        vk::DestroyImage(m_device->device(), image, NULL);
+        return;
+    }
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "for this object type are not compatible with the memory");
+
+    err = vk::AllocateMemory(m_device->device(), &mem_alloc, NULL, &mem);
+    ASSERT_VK_SUCCESS(err);
+
+    err = vk::BindImageMemory(m_device->device(), image, mem, 0);
+    (void)err;
+
+    m_errorMonitor->VerifyFound();
+
+    vk::DestroyImage(m_device->device(), image, NULL);
+    vk::FreeMemory(m_device->device(), mem, NULL);
+}
+
+TEST_F(VkLayerTest, BindInvalidMemory) {
+    VkResult err;
+    bool pass;
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    const VkFormat tex_format = VK_FORMAT_R8G8B8A8_UNORM;
+    const int32_t tex_width = 256;
+    const int32_t tex_height = 256;
+
+    VkImageCreateInfo image_create_info = {};
+    image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+    image_create_info.pNext = NULL;
+    image_create_info.imageType = VK_IMAGE_TYPE_2D;
+    image_create_info.format = tex_format;
+    image_create_info.extent.width = tex_width;
+    image_create_info.extent.height = tex_height;
+    image_create_info.extent.depth = 1;
+    image_create_info.mipLevels = 1;
+    image_create_info.arrayLayers = 1;
+    image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
+    image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
+    image_create_info.usage = VK_IMAGE_USAGE_SAMPLED_BIT;
+    image_create_info.flags = 0;
+
+    VkBufferCreateInfo buffer_create_info = {};
+    buffer_create_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
+    buffer_create_info.pNext = NULL;
+    buffer_create_info.flags = 0;
+    buffer_create_info.size = 4 * 1024 * 1024;
+    buffer_create_info.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
+    buffer_create_info.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
+
+    // Create an image/buffer, allocate memory, free it, and then try to bind it
+    {
+        VkImage image = VK_NULL_HANDLE;
+        VkBuffer buffer = VK_NULL_HANDLE;
+        err = vk::CreateImage(device(), &image_create_info, NULL, &image);
+        ASSERT_VK_SUCCESS(err);
+        err = vk::CreateBuffer(device(), &buffer_create_info, NULL, &buffer);
+        ASSERT_VK_SUCCESS(err);
+        VkMemoryRequirements image_mem_reqs = {}, buffer_mem_reqs = {};
+        vk::GetImageMemoryRequirements(device(), image, &image_mem_reqs);
+        vk::GetBufferMemoryRequirements(device(), buffer, &buffer_mem_reqs);
+
+        VkMemoryAllocateInfo image_mem_alloc = {}, buffer_mem_alloc = {};
+        image_mem_alloc.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+        image_mem_alloc.allocationSize = image_mem_reqs.size;
+        pass = m_device->phy().set_memory_type(image_mem_reqs.memoryTypeBits, &image_mem_alloc, 0);
+        ASSERT_TRUE(pass);
+        buffer_mem_alloc.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+        buffer_mem_alloc.allocationSize = buffer_mem_reqs.size;
+        pass = m_device->phy().set_memory_type(buffer_mem_reqs.memoryTypeBits, &buffer_mem_alloc, 0);
+        ASSERT_TRUE(pass);
+
+        VkDeviceMemory image_mem = VK_NULL_HANDLE, buffer_mem = VK_NULL_HANDLE;
+        err = vk::AllocateMemory(device(), &image_mem_alloc, NULL, &image_mem);
+        ASSERT_VK_SUCCESS(err);
+        err = vk::AllocateMemory(device(), &buffer_mem_alloc, NULL, &buffer_mem);
+        ASSERT_VK_SUCCESS(err);
+
+        vk::FreeMemory(device(), image_mem, NULL);
+        vk::FreeMemory(device(), buffer_mem, NULL);
+
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkBindImageMemory-memory-parameter");
+        err = vk::BindImageMemory(device(), image, image_mem, 0);
+        (void)err;  // This may very well return an error.
+        m_errorMonitor->VerifyFound();
+
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkBindBufferMemory-memory-parameter");
+        err = vk::BindBufferMemory(device(), buffer, buffer_mem, 0);
+        (void)err;  // This may very well return an error.
+        m_errorMonitor->VerifyFound();
+
+        vk::DestroyImage(m_device->device(), image, NULL);
+        vk::DestroyBuffer(m_device->device(), buffer, NULL);
+    }
+
+    // Try to bind memory to an object that already has a memory binding
+    {
+        VkImage image = VK_NULL_HANDLE;
+        err = vk::CreateImage(device(), &image_create_info, NULL, &image);
+        ASSERT_VK_SUCCESS(err);
+        VkBuffer buffer = VK_NULL_HANDLE;
+        err = vk::CreateBuffer(device(), &buffer_create_info, NULL, &buffer);
+        ASSERT_VK_SUCCESS(err);
+        VkMemoryRequirements image_mem_reqs = {}, buffer_mem_reqs = {};
+        vk::GetImageMemoryRequirements(device(), image, &image_mem_reqs);
+        vk::GetBufferMemoryRequirements(device(), buffer, &buffer_mem_reqs);
+        VkMemoryAllocateInfo image_alloc_info = {}, buffer_alloc_info = {};
+        image_alloc_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+        image_alloc_info.allocationSize = image_mem_reqs.size;
+        buffer_alloc_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+        buffer_alloc_info.allocationSize = buffer_mem_reqs.size;
+        pass = m_device->phy().set_memory_type(image_mem_reqs.memoryTypeBits, &image_alloc_info, 0);
+        ASSERT_TRUE(pass);
+        pass = m_device->phy().set_memory_type(buffer_mem_reqs.memoryTypeBits, &buffer_alloc_info, 0);
+        ASSERT_TRUE(pass);
+        VkDeviceMemory image_mem, buffer_mem;
+        err = vk::AllocateMemory(device(), &image_alloc_info, NULL, &image_mem);
+        ASSERT_VK_SUCCESS(err);
+        err = vk::AllocateMemory(device(), &buffer_alloc_info, NULL, &buffer_mem);
+        ASSERT_VK_SUCCESS(err);
+
+        err = vk::BindImageMemory(device(), image, image_mem, 0);
+        ASSERT_VK_SUCCESS(err);
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkBindImageMemory-image-01044");
+        err = vk::BindImageMemory(device(), image, image_mem, 0);
+        (void)err;  // This may very well return an error.
+        m_errorMonitor->VerifyFound();
+
+        err = vk::BindBufferMemory(device(), buffer, buffer_mem, 0);
+        ASSERT_VK_SUCCESS(err);
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkBindBufferMemory-buffer-01029");
+        err = vk::BindBufferMemory(device(), buffer, buffer_mem, 0);
+        (void)err;  // This may very well return an error.
+        m_errorMonitor->VerifyFound();
+
+        vk::FreeMemory(device(), image_mem, NULL);
+        vk::FreeMemory(device(), buffer_mem, NULL);
+        vk::DestroyImage(device(), image, NULL);
+        vk::DestroyBuffer(device(), buffer, NULL);
+    }
+
+    // Try to bind memory to an object with an invalid memoryOffset
+    {
+        VkImage image = VK_NULL_HANDLE;
+        err = vk::CreateImage(device(), &image_create_info, NULL, &image);
+        ASSERT_VK_SUCCESS(err);
+        VkBuffer buffer = VK_NULL_HANDLE;
+        err = vk::CreateBuffer(device(), &buffer_create_info, NULL, &buffer);
+        ASSERT_VK_SUCCESS(err);
+        VkMemoryRequirements image_mem_reqs = {}, buffer_mem_reqs = {};
+        vk::GetImageMemoryRequirements(device(), image, &image_mem_reqs);
+        vk::GetBufferMemoryRequirements(device(), buffer, &buffer_mem_reqs);
+        VkMemoryAllocateInfo image_alloc_info = {}, buffer_alloc_info = {};
+        image_alloc_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+        // Leave some extra space for alignment wiggle room
+        image_alloc_info.allocationSize = image_mem_reqs.size + image_mem_reqs.alignment;
+        buffer_alloc_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+        buffer_alloc_info.allocationSize = buffer_mem_reqs.size + buffer_mem_reqs.alignment;
+        pass = m_device->phy().set_memory_type(image_mem_reqs.memoryTypeBits, &image_alloc_info, 0);
+        ASSERT_TRUE(pass);
+        pass = m_device->phy().set_memory_type(buffer_mem_reqs.memoryTypeBits, &buffer_alloc_info, 0);
+        ASSERT_TRUE(pass);
+        VkDeviceMemory image_mem, buffer_mem;
+        err = vk::AllocateMemory(device(), &image_alloc_info, NULL, &image_mem);
+        ASSERT_VK_SUCCESS(err);
+        err = vk::AllocateMemory(device(), &buffer_alloc_info, NULL, &buffer_mem);
+        ASSERT_VK_SUCCESS(err);
+
+        // Test unaligned memory offset
+        {
+            if (image_mem_reqs.alignment > 1) {
+                VkDeviceSize image_offset = 1;
+                m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkBindImageMemory-memoryOffset-01048");
+                err = vk::BindImageMemory(device(), image, image_mem, image_offset);
+                (void)err;  // This may very well return an error.
+                m_errorMonitor->VerifyFound();
+            }
+
+            if (buffer_mem_reqs.alignment > 1) {
+                VkDeviceSize buffer_offset = 1;
+                m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkBindBufferMemory-memoryOffset-01036");
+                err = vk::BindBufferMemory(device(), buffer, buffer_mem, buffer_offset);
+                (void)err;  // This may very well return an error.
+                m_errorMonitor->VerifyFound();
+            }
+        }
+
+        // Test memory offsets outside the memory allocation
+        {
+            VkDeviceSize image_offset =
+                (image_alloc_info.allocationSize + image_mem_reqs.alignment) & ~(image_mem_reqs.alignment - 1);
+            m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkBindImageMemory-memoryOffset-01046");
+            err = vk::BindImageMemory(device(), image, image_mem, image_offset);
+            (void)err;  // This may very well return an error.
+            m_errorMonitor->VerifyFound();
+
+            VkDeviceSize buffer_offset =
+                (buffer_alloc_info.allocationSize + buffer_mem_reqs.alignment) & ~(buffer_mem_reqs.alignment - 1);
+            m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkBindBufferMemory-memoryOffset-01031");
+            err = vk::BindBufferMemory(device(), buffer, buffer_mem, buffer_offset);
+            (void)err;  // This may very well return an error.
+            m_errorMonitor->VerifyFound();
+        }
+
+        // Test memory offsets within the memory allocation, but which leave too little memory for
+        // the resource.
+        {
+            VkDeviceSize image_offset = (image_mem_reqs.size - 1) & ~(image_mem_reqs.alignment - 1);
+            if ((image_offset > 0) && (image_mem_reqs.size < (image_alloc_info.allocationSize - image_mem_reqs.alignment))) {
+                m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkBindImageMemory-size-01049");
+                err = vk::BindImageMemory(device(), image, image_mem, image_offset);
+                (void)err;  // This may very well return an error.
+                m_errorMonitor->VerifyFound();
+            }
+
+            VkDeviceSize buffer_offset = (buffer_mem_reqs.size - 1) & ~(buffer_mem_reqs.alignment - 1);
+            if (buffer_offset > 0) {
+                m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkBindBufferMemory-size-01037");
+                err = vk::BindBufferMemory(device(), buffer, buffer_mem, buffer_offset);
+                (void)err;  // This may very well return an error.
+                m_errorMonitor->VerifyFound();
+            }
+        }
+
+        vk::FreeMemory(device(), image_mem, NULL);
+        vk::FreeMemory(device(), buffer_mem, NULL);
+        vk::DestroyImage(device(), image, NULL);
+        vk::DestroyBuffer(device(), buffer, NULL);
+    }
+
+    // Try to bind memory to an object with an invalid memory type
+    {
+        VkImage image = VK_NULL_HANDLE;
+        err = vk::CreateImage(device(), &image_create_info, NULL, &image);
+        ASSERT_VK_SUCCESS(err);
+        VkBuffer buffer = VK_NULL_HANDLE;
+        err = vk::CreateBuffer(device(), &buffer_create_info, NULL, &buffer);
+        ASSERT_VK_SUCCESS(err);
+        VkMemoryRequirements image_mem_reqs = {}, buffer_mem_reqs = {};
+        vk::GetImageMemoryRequirements(device(), image, &image_mem_reqs);
+        vk::GetBufferMemoryRequirements(device(), buffer, &buffer_mem_reqs);
+        VkMemoryAllocateInfo image_alloc_info = {}, buffer_alloc_info = {};
+        image_alloc_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+        image_alloc_info.allocationSize = image_mem_reqs.size;
+        buffer_alloc_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+        buffer_alloc_info.allocationSize = buffer_mem_reqs.size;
+        // Create a mask of available memory types *not* supported by these resources,
+        // and try to use one of them.
+        VkPhysicalDeviceMemoryProperties memory_properties = {};
+        vk::GetPhysicalDeviceMemoryProperties(m_device->phy().handle(), &memory_properties);
+        VkDeviceMemory image_mem, buffer_mem;
+
+        uint32_t image_unsupported_mem_type_bits = ((1 << memory_properties.memoryTypeCount) - 1) & ~image_mem_reqs.memoryTypeBits;
+        if (image_unsupported_mem_type_bits != 0) {
+            pass = m_device->phy().set_memory_type(image_unsupported_mem_type_bits, &image_alloc_info, 0);
+            ASSERT_TRUE(pass);
+            err = vk::AllocateMemory(device(), &image_alloc_info, NULL, &image_mem);
+            ASSERT_VK_SUCCESS(err);
+            m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkBindImageMemory-memory-01047");
+            err = vk::BindImageMemory(device(), image, image_mem, 0);
+            (void)err;  // This may very well return an error.
+            m_errorMonitor->VerifyFound();
+            vk::FreeMemory(device(), image_mem, NULL);
+        }
+
+        uint32_t buffer_unsupported_mem_type_bits =
+            ((1 << memory_properties.memoryTypeCount) - 1) & ~buffer_mem_reqs.memoryTypeBits;
+        if (buffer_unsupported_mem_type_bits != 0) {
+            pass = m_device->phy().set_memory_type(buffer_unsupported_mem_type_bits, &buffer_alloc_info, 0);
+            ASSERT_TRUE(pass);
+            err = vk::AllocateMemory(device(), &buffer_alloc_info, NULL, &buffer_mem);
+            ASSERT_VK_SUCCESS(err);
+            m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkBindBufferMemory-memory-01035");
+            err = vk::BindBufferMemory(device(), buffer, buffer_mem, 0);
+            (void)err;  // This may very well return an error.
+            m_errorMonitor->VerifyFound();
+            vk::FreeMemory(device(), buffer_mem, NULL);
+        }
+
+        vk::DestroyImage(device(), image, NULL);
+        vk::DestroyBuffer(device(), buffer, NULL);
+    }
+
+    // Try to bind memory to an image created with sparse memory flags
+    {
+        VkImageCreateInfo sparse_image_create_info = image_create_info;
+        sparse_image_create_info.flags |= VK_IMAGE_CREATE_SPARSE_BINDING_BIT;
+        VkImageFormatProperties image_format_properties = {};
+        err = vk::GetPhysicalDeviceImageFormatProperties(m_device->phy().handle(), sparse_image_create_info.format,
+                                                         sparse_image_create_info.imageType, sparse_image_create_info.tiling,
+                                                         sparse_image_create_info.usage, sparse_image_create_info.flags,
+                                                         &image_format_properties);
+        if (!m_device->phy().features().sparseResidencyImage2D || err == VK_ERROR_FORMAT_NOT_SUPPORTED) {
+            // most likely means sparse formats aren't supported here; skip this test.
+        } else {
+            ASSERT_VK_SUCCESS(err);
+            if (image_format_properties.maxExtent.width == 0) {
+                printf("%s Sparse image format not supported; skipped.\n", kSkipPrefix);
+                return;
+            } else {
+                VkImage sparse_image = VK_NULL_HANDLE;
+                err = vk::CreateImage(m_device->device(), &sparse_image_create_info, NULL, &sparse_image);
+                ASSERT_VK_SUCCESS(err);
+                VkMemoryRequirements sparse_mem_reqs = {};
+                vk::GetImageMemoryRequirements(m_device->device(), sparse_image, &sparse_mem_reqs);
+                if (sparse_mem_reqs.memoryTypeBits != 0) {
+                    VkMemoryAllocateInfo sparse_mem_alloc = {};
+                    sparse_mem_alloc.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+                    sparse_mem_alloc.pNext = NULL;
+                    sparse_mem_alloc.allocationSize = sparse_mem_reqs.size;
+                    sparse_mem_alloc.memoryTypeIndex = 0;
+                    pass = m_device->phy().set_memory_type(sparse_mem_reqs.memoryTypeBits, &sparse_mem_alloc, 0);
+                    ASSERT_TRUE(pass);
+                    VkDeviceMemory sparse_mem = VK_NULL_HANDLE;
+                    err = vk::AllocateMemory(m_device->device(), &sparse_mem_alloc, NULL, &sparse_mem);
+                    ASSERT_VK_SUCCESS(err);
+                    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkBindImageMemory-image-01045");
+                    err = vk::BindImageMemory(m_device->device(), sparse_image, sparse_mem, 0);
+                    // This may very well return an error.
+                    (void)err;
+                    m_errorMonitor->VerifyFound();
+                    vk::FreeMemory(m_device->device(), sparse_mem, NULL);
+                }
+                vk::DestroyImage(m_device->device(), sparse_image, NULL);
+            }
+        }
+    }
+
+    // Try to bind memory to a buffer created with sparse memory flags
+    {
+        VkBufferCreateInfo sparse_buffer_create_info = buffer_create_info;
+        sparse_buffer_create_info.flags |= VK_IMAGE_CREATE_SPARSE_BINDING_BIT;
+        if (!m_device->phy().features().sparseResidencyBuffer) {
+            // most likely means sparse formats aren't supported here; skip this test.
+        } else {
+            VkBuffer sparse_buffer = VK_NULL_HANDLE;
+            err = vk::CreateBuffer(m_device->device(), &sparse_buffer_create_info, NULL, &sparse_buffer);
+            ASSERT_VK_SUCCESS(err);
+            VkMemoryRequirements sparse_mem_reqs = {};
+            vk::GetBufferMemoryRequirements(m_device->device(), sparse_buffer, &sparse_mem_reqs);
+            if (sparse_mem_reqs.memoryTypeBits != 0) {
+                VkMemoryAllocateInfo sparse_mem_alloc = {};
+                sparse_mem_alloc.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+                sparse_mem_alloc.pNext = NULL;
+                sparse_mem_alloc.allocationSize = sparse_mem_reqs.size;
+                sparse_mem_alloc.memoryTypeIndex = 0;
+                pass = m_device->phy().set_memory_type(sparse_mem_reqs.memoryTypeBits, &sparse_mem_alloc, 0);
+                ASSERT_TRUE(pass);
+                VkDeviceMemory sparse_mem = VK_NULL_HANDLE;
+                err = vk::AllocateMemory(m_device->device(), &sparse_mem_alloc, NULL, &sparse_mem);
+                ASSERT_VK_SUCCESS(err);
+                m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkBindBufferMemory-buffer-01030");
+                err = vk::BindBufferMemory(m_device->device(), sparse_buffer, sparse_mem, 0);
+                // This may very well return an error.
+                (void)err;
+                m_errorMonitor->VerifyFound();
+                vk::FreeMemory(m_device->device(), sparse_mem, NULL);
+            }
+            vk::DestroyBuffer(m_device->device(), sparse_buffer, NULL);
+        }
+    }
+}
+
+TEST_F(VkLayerTest, BindMemoryToDestroyedObject) {
+    VkResult err;
+    bool pass;
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkBindImageMemory-image-parameter");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    // Create an image object, allocate memory, destroy the object and then try
+    // to bind it
+    VkImage image;
+    VkDeviceMemory mem;
+    VkMemoryRequirements mem_reqs;
+
+    const VkFormat tex_format = VK_FORMAT_B8G8R8A8_UNORM;
+    const int32_t tex_width = 32;
+    const int32_t tex_height = 32;
+
+    VkImageCreateInfo image_create_info = {};
+    image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+    image_create_info.pNext = NULL;
+    image_create_info.imageType = VK_IMAGE_TYPE_2D;
+    image_create_info.format = tex_format;
+    image_create_info.extent.width = tex_width;
+    image_create_info.extent.height = tex_height;
+    image_create_info.extent.depth = 1;
+    image_create_info.mipLevels = 1;
+    image_create_info.arrayLayers = 1;
+    image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
+    image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
+    image_create_info.usage = VK_IMAGE_USAGE_SAMPLED_BIT;
+    image_create_info.flags = 0;
+
+    VkMemoryAllocateInfo mem_alloc = {};
+    mem_alloc.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+    mem_alloc.pNext = NULL;
+    mem_alloc.allocationSize = 0;
+    mem_alloc.memoryTypeIndex = 0;
+
+    err = vk::CreateImage(m_device->device(), &image_create_info, NULL, &image);
+    ASSERT_VK_SUCCESS(err);
+
+    vk::GetImageMemoryRequirements(m_device->device(), image, &mem_reqs);
+
+    mem_alloc.allocationSize = mem_reqs.size;
+    pass = m_device->phy().set_memory_type(mem_reqs.memoryTypeBits, &mem_alloc, 0);
+    ASSERT_TRUE(pass);
+
+    // Allocate memory
+    err = vk::AllocateMemory(m_device->device(), &mem_alloc, NULL, &mem);
+    ASSERT_VK_SUCCESS(err);
+
+    // Introduce validation failure, destroy Image object before binding
+    vk::DestroyImage(m_device->device(), image, NULL);
+    ASSERT_VK_SUCCESS(err);
+
+    // Now Try to bind memory to this destroyed object
+    err = vk::BindImageMemory(m_device->device(), image, mem, 0);
+    // This may very well return an error.
+    (void)err;
+
+    m_errorMonitor->VerifyFound();
+
+    vk::FreeMemory(m_device->device(), mem, NULL);
+}
+
+TEST_F(VkLayerTest, ExceedMemoryAllocationCount) {
+    VkResult err = VK_SUCCESS;
+    const int max_mems = 32;
+    VkDeviceMemory mems[max_mems + 1];
+
+    if (!EnableDeviceProfileLayer()) {
+        printf("%s Failed to enable device profile layer.\n", kSkipPrefix);
+        return;
+    }
+
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+
+    PFN_vkSetPhysicalDeviceLimitsEXT fpvkSetPhysicalDeviceLimitsEXT =
+        (PFN_vkSetPhysicalDeviceLimitsEXT)vk::GetInstanceProcAddr(instance(), "vkSetPhysicalDeviceLimitsEXT");
+    PFN_vkGetOriginalPhysicalDeviceLimitsEXT fpvkGetOriginalPhysicalDeviceLimitsEXT =
+        (PFN_vkGetOriginalPhysicalDeviceLimitsEXT)vk::GetInstanceProcAddr(instance(), "vkGetOriginalPhysicalDeviceLimitsEXT");
+
+    if (!(fpvkSetPhysicalDeviceLimitsEXT) || !(fpvkGetOriginalPhysicalDeviceLimitsEXT)) {
+        printf("%s Can't find device_profile_api functions; skipped.\n", kSkipPrefix);
+        return;
+    }
+    VkPhysicalDeviceProperties props;
+    fpvkGetOriginalPhysicalDeviceLimitsEXT(gpu(), &props.limits);
+    if (props.limits.maxMemoryAllocationCount > max_mems) {
+        props.limits.maxMemoryAllocationCount = max_mems;
+        fpvkSetPhysicalDeviceLimitsEXT(gpu(), &props.limits);
+    }
+    ASSERT_NO_FATAL_FAILURE(InitState());
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "Number of currently valid memory objects is not less than the maximum allowed");
+
+    VkMemoryAllocateInfo mem_alloc = {};
+    mem_alloc.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+    mem_alloc.pNext = NULL;
+    mem_alloc.memoryTypeIndex = 0;
+    mem_alloc.allocationSize = 4;
+
+    int i;
+    for (i = 0; i <= max_mems; i++) {
+        err = vk::AllocateMemory(m_device->device(), &mem_alloc, NULL, &mems[i]);
+        if (err != VK_SUCCESS) {
+            break;
+        }
+    }
+    m_errorMonitor->VerifyFound();
+
+    for (int j = 0; j < i; j++) {
+        vk::FreeMemory(m_device->device(), mems[j], NULL);
+    }
+}
+
+TEST_F(VkLayerTest, ImageSampleCounts) {
+    TEST_DESCRIPTION("Use bad sample counts in image transfer calls to trigger validation errors.");
+    ASSERT_NO_FATAL_FAILURE(Init(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
+
+    VkMemoryPropertyFlags reqs = 0;
+    VkImageCreateInfo image_create_info = {};
+    image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+    image_create_info.pNext = NULL;
+    image_create_info.imageType = VK_IMAGE_TYPE_2D;
+    image_create_info.format = VK_FORMAT_B8G8R8A8_UNORM;
+    image_create_info.extent.width = 256;
+    image_create_info.extent.height = 256;
+    image_create_info.extent.depth = 1;
+    image_create_info.mipLevels = 1;
+    image_create_info.arrayLayers = 1;
+    image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
+    image_create_info.flags = 0;
+
+    VkImageBlit blit_region = {};
+    blit_region.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    blit_region.srcSubresource.baseArrayLayer = 0;
+    blit_region.srcSubresource.layerCount = 1;
+    blit_region.srcSubresource.mipLevel = 0;
+    blit_region.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    blit_region.dstSubresource.baseArrayLayer = 0;
+    blit_region.dstSubresource.layerCount = 1;
+    blit_region.dstSubresource.mipLevel = 0;
+    blit_region.srcOffsets[0] = {0, 0, 0};
+    blit_region.srcOffsets[1] = {256, 256, 1};
+    blit_region.dstOffsets[0] = {0, 0, 0};
+    blit_region.dstOffsets[1] = {128, 128, 1};
+
+    // Create two images, the source with sampleCount = 4, and attempt to blit
+    // between them
+    {
+        image_create_info.samples = VK_SAMPLE_COUNT_4_BIT;
+        image_create_info.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
+        VkImageObj src_image(m_device);
+        src_image.init(&image_create_info);
+        src_image.SetLayout(VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL);
+        image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
+        image_create_info.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
+        VkImageObj dst_image(m_device);
+        dst_image.init(&image_create_info);
+        dst_image.SetLayout(VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL);
+        m_commandBuffer->begin();
+        // TODO: These 2 VUs are redundant - expect one of them to go away
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-srcImage-00233");
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-srcImage-00228");
+        vk::CmdBlitImage(m_commandBuffer->handle(), src_image.handle(), VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, dst_image.handle(),
+                         VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, &blit_region, VK_FILTER_NEAREST);
+        m_errorMonitor->VerifyFound();
+        m_commandBuffer->end();
+    }
+
+    // Create two images, the dest with sampleCount = 4, and attempt to blit
+    // between them
+    {
+        image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
+        image_create_info.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
+        VkImageObj src_image(m_device);
+        src_image.init(&image_create_info);
+        src_image.SetLayout(VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL);
+        image_create_info.samples = VK_SAMPLE_COUNT_4_BIT;
+        image_create_info.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
+        VkImageObj dst_image(m_device);
+        dst_image.init(&image_create_info);
+        dst_image.SetLayout(VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL);
+        m_commandBuffer->begin();
+        // TODO: These 2 VUs are redundant - expect one of them to go away
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-dstImage-00234");
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-srcImage-00228");
+        vk::CmdBlitImage(m_commandBuffer->handle(), src_image.handle(), VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, dst_image.handle(),
+                         VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, &blit_region, VK_FILTER_NEAREST);
+        m_errorMonitor->VerifyFound();
+        m_commandBuffer->end();
+    }
+
+    VkBufferImageCopy copy_region = {};
+    copy_region.bufferRowLength = 128;
+    copy_region.bufferImageHeight = 128;
+    copy_region.imageSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    copy_region.imageSubresource.layerCount = 1;
+    copy_region.imageExtent.height = 64;
+    copy_region.imageExtent.width = 64;
+    copy_region.imageExtent.depth = 1;
+
+    // Create src buffer and dst image with sampleCount = 4 and attempt to copy
+    // buffer to image
+    {
+        VkBufferObj src_buffer;
+        src_buffer.init_as_src(*m_device, 128 * 128 * 4, reqs);
+        image_create_info.samples = VK_SAMPLE_COUNT_4_BIT;
+        image_create_info.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
+        VkImageObj dst_image(m_device);
+        dst_image.init(&image_create_info);
+        dst_image.SetLayout(VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL);
+        m_commandBuffer->begin();
+        m_errorMonitor->SetDesiredFailureMsg(
+            VK_DEBUG_REPORT_ERROR_BIT_EXT,
+            "was created with a sample count of VK_SAMPLE_COUNT_4_BIT but must be VK_SAMPLE_COUNT_1_BIT");
+        vk::CmdCopyBufferToImage(m_commandBuffer->handle(), src_buffer.handle(), dst_image.handle(),
+                                 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, &copy_region);
+        m_errorMonitor->VerifyFound();
+        m_commandBuffer->end();
+    }
+
+    // Create dst buffer and src image with sampleCount = 4 and attempt to copy
+    // image to buffer
+    {
+        VkBufferObj dst_buffer;
+        dst_buffer.init_as_dst(*m_device, 128 * 128 * 4, reqs);
+        image_create_info.samples = VK_SAMPLE_COUNT_4_BIT;
+        image_create_info.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
+        vk_testing::Image src_image;
+        src_image.init(*m_device, (const VkImageCreateInfo &)image_create_info, reqs);
+        m_commandBuffer->begin();
+        m_errorMonitor->SetDesiredFailureMsg(
+            VK_DEBUG_REPORT_ERROR_BIT_EXT,
+            "was created with a sample count of VK_SAMPLE_COUNT_4_BIT but must be VK_SAMPLE_COUNT_1_BIT");
+        vk::CmdCopyImageToBuffer(m_commandBuffer->handle(), src_image.handle(), VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
+                                 dst_buffer.handle(), 1, &copy_region);
+        m_errorMonitor->VerifyFound();
+        m_commandBuffer->end();
+    }
+}
+
+TEST_F(VkLayerTest, BlitImageFormatTypes) {
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    VkFormat f_unsigned = VK_FORMAT_R8G8B8A8_UINT;
+    VkFormat f_signed = VK_FORMAT_R8G8B8A8_SINT;
+    VkFormat f_float = VK_FORMAT_R32_SFLOAT;
+    VkFormat f_depth = VK_FORMAT_D32_SFLOAT_S8_UINT;
+    VkFormat f_depth2 = VK_FORMAT_D32_SFLOAT;
+
+    if (!ImageFormatIsSupported(gpu(), f_unsigned, VK_IMAGE_TILING_OPTIMAL) ||
+        !ImageFormatIsSupported(gpu(), f_signed, VK_IMAGE_TILING_OPTIMAL) ||
+        !ImageFormatIsSupported(gpu(), f_float, VK_IMAGE_TILING_OPTIMAL) ||
+        !ImageFormatIsSupported(gpu(), f_depth, VK_IMAGE_TILING_OPTIMAL) ||
+        !ImageFormatIsSupported(gpu(), f_depth2, VK_IMAGE_TILING_OPTIMAL)) {
+        printf("%s Requested formats not supported - BlitImageFormatTypes skipped.\n", kSkipPrefix);
+        return;
+    }
+
+    // Note any missing feature bits
+    bool usrc = !ImageFormatAndFeaturesSupported(gpu(), f_unsigned, VK_IMAGE_TILING_OPTIMAL, VK_FORMAT_FEATURE_BLIT_SRC_BIT);
+    bool udst = !ImageFormatAndFeaturesSupported(gpu(), f_unsigned, VK_IMAGE_TILING_OPTIMAL, VK_FORMAT_FEATURE_BLIT_DST_BIT);
+    bool ssrc = !ImageFormatAndFeaturesSupported(gpu(), f_signed, VK_IMAGE_TILING_OPTIMAL, VK_FORMAT_FEATURE_BLIT_SRC_BIT);
+    bool sdst = !ImageFormatAndFeaturesSupported(gpu(), f_signed, VK_IMAGE_TILING_OPTIMAL, VK_FORMAT_FEATURE_BLIT_DST_BIT);
+    bool fsrc = !ImageFormatAndFeaturesSupported(gpu(), f_float, VK_IMAGE_TILING_OPTIMAL, VK_FORMAT_FEATURE_BLIT_SRC_BIT);
+    bool fdst = !ImageFormatAndFeaturesSupported(gpu(), f_float, VK_IMAGE_TILING_OPTIMAL, VK_FORMAT_FEATURE_BLIT_DST_BIT);
+    bool d1dst = !ImageFormatAndFeaturesSupported(gpu(), f_depth, VK_IMAGE_TILING_OPTIMAL, VK_FORMAT_FEATURE_BLIT_DST_BIT);
+    bool d2src = !ImageFormatAndFeaturesSupported(gpu(), f_depth2, VK_IMAGE_TILING_OPTIMAL, VK_FORMAT_FEATURE_BLIT_SRC_BIT);
+
+    VkImageObj unsigned_image(m_device);
+    unsigned_image.Init(64, 64, 1, f_unsigned, VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT,
+                        VK_IMAGE_TILING_OPTIMAL, 0);
+    ASSERT_TRUE(unsigned_image.initialized());
+    unsigned_image.SetLayout(VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
+
+    VkImageObj signed_image(m_device);
+    signed_image.Init(64, 64, 1, f_signed, VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT,
+                      VK_IMAGE_TILING_OPTIMAL, 0);
+    ASSERT_TRUE(signed_image.initialized());
+    signed_image.SetLayout(VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
+
+    VkImageObj float_image(m_device);
+    float_image.Init(64, 64, 1, f_float, VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT, VK_IMAGE_TILING_OPTIMAL,
+                     0);
+    ASSERT_TRUE(float_image.initialized());
+    float_image.SetLayout(VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
+
+    VkImageObj depth_image(m_device);
+    depth_image.Init(64, 64, 1, f_depth, VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT, VK_IMAGE_TILING_OPTIMAL,
+                     0);
+    ASSERT_TRUE(depth_image.initialized());
+    depth_image.SetLayout(VK_IMAGE_ASPECT_STENCIL_BIT | VK_IMAGE_ASPECT_DEPTH_BIT, VK_IMAGE_LAYOUT_GENERAL);
+
+    VkImageObj depth_image2(m_device);
+    depth_image2.Init(64, 64, 1, f_depth2, VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT,
+                      VK_IMAGE_TILING_OPTIMAL, 0);
+    ASSERT_TRUE(depth_image2.initialized());
+    depth_image2.SetLayout(VK_IMAGE_ASPECT_DEPTH_BIT, VK_IMAGE_LAYOUT_GENERAL);
+
+    VkImageBlit blitRegion = {};
+    blitRegion.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    blitRegion.srcSubresource.baseArrayLayer = 0;
+    blitRegion.srcSubresource.layerCount = 1;
+    blitRegion.srcSubresource.mipLevel = 0;
+    blitRegion.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    blitRegion.dstSubresource.baseArrayLayer = 0;
+    blitRegion.dstSubresource.layerCount = 1;
+    blitRegion.dstSubresource.mipLevel = 0;
+    blitRegion.srcOffsets[0] = {0, 0, 0};
+    blitRegion.srcOffsets[1] = {64, 64, 1};
+    blitRegion.dstOffsets[0] = {0, 0, 0};
+    blitRegion.dstOffsets[1] = {32, 32, 1};
+
+    m_commandBuffer->begin();
+
+    // Unsigned int vs not an int
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-srcImage-00230");
+    if (usrc) m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-srcImage-01999");
+    if (fdst) m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-dstImage-02000");
+    vk::CmdBlitImage(m_commandBuffer->handle(), unsigned_image.image(), unsigned_image.Layout(), float_image.image(),
+                     float_image.Layout(), 1, &blitRegion, VK_FILTER_NEAREST);
+    m_errorMonitor->VerifyFound();
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-srcImage-00230");
+    if (fsrc) m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-srcImage-01999");
+    if (udst) m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-dstImage-02000");
+    vk::CmdBlitImage(m_commandBuffer->handle(), float_image.image(), float_image.Layout(), unsigned_image.image(),
+                     unsigned_image.Layout(), 1, &blitRegion, VK_FILTER_NEAREST);
+    m_errorMonitor->VerifyFound();
+
+    // Signed int vs not an int,
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-srcImage-00229");
+    if (ssrc) m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-srcImage-01999");
+    if (fdst) m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-dstImage-02000");
+    vk::CmdBlitImage(m_commandBuffer->handle(), signed_image.image(), signed_image.Layout(), float_image.image(),
+                     float_image.Layout(), 1, &blitRegion, VK_FILTER_NEAREST);
+    m_errorMonitor->VerifyFound();
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-srcImage-00229");
+    if (fsrc) m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-srcImage-01999");
+    if (sdst) m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-dstImage-02000");
+    vk::CmdBlitImage(m_commandBuffer->handle(), float_image.image(), float_image.Layout(), signed_image.image(),
+                     signed_image.Layout(), 1, &blitRegion, VK_FILTER_NEAREST);
+    m_errorMonitor->VerifyFound();
+
+    // Signed vs Unsigned int - generates both VUs
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-srcImage-00229");
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-srcImage-00230");
+    if (ssrc) m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-srcImage-01999");
+    if (udst) m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-dstImage-02000");
+    vk::CmdBlitImage(m_commandBuffer->handle(), signed_image.image(), signed_image.Layout(), unsigned_image.image(),
+                     unsigned_image.Layout(), 1, &blitRegion, VK_FILTER_NEAREST);
+    m_errorMonitor->VerifyFound();
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-srcImage-00229");
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-srcImage-00230");
+    if (usrc) m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-srcImage-01999");
+    if (sdst) m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-dstImage-02000");
+    vk::CmdBlitImage(m_commandBuffer->handle(), unsigned_image.image(), unsigned_image.Layout(), signed_image.image(),
+                     signed_image.Layout(), 1, &blitRegion, VK_FILTER_NEAREST);
+    m_errorMonitor->VerifyFound();
+
+    // Depth vs any non-identical depth format
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-srcImage-00231");
+    blitRegion.srcSubresource.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
+    blitRegion.dstSubresource.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
+    if (d2src) m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-srcImage-01999");
+    if (d1dst) m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-dstImage-02000");
+    vk::CmdBlitImage(m_commandBuffer->handle(), depth_image2.image(), depth_image2.Layout(), depth_image.image(),
+                     depth_image.Layout(), 1, &blitRegion, VK_FILTER_NEAREST);
+    m_errorMonitor->VerifyFound();
+
+    m_commandBuffer->end();
+}
+
+TEST_F(VkLayerTest, BlitImageFilters) {
+    bool cubic_support = false;
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+    if (DeviceExtensionSupported(gpu(), nullptr, "VK_IMG_filter_cubic")) {
+        m_device_extension_names.push_back("VK_IMG_filter_cubic");
+        cubic_support = true;
+    }
+    ASSERT_NO_FATAL_FAILURE(InitState());
+
+    VkFormat fmt = VK_FORMAT_R8_UINT;
+    if (!ImageFormatIsSupported(gpu(), fmt, VK_IMAGE_TILING_OPTIMAL)) {
+        printf("%s No R8_UINT format support - BlitImageFilters skipped.\n", kSkipPrefix);
+        return;
+    }
+
+    // Create 2D images
+    VkImageObj src2D(m_device);
+    VkImageObj dst2D(m_device);
+    src2D.Init(64, 64, 1, fmt, VK_IMAGE_USAGE_TRANSFER_SRC_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
+    dst2D.Init(64, 64, 1, fmt, VK_IMAGE_USAGE_TRANSFER_DST_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
+    ASSERT_TRUE(src2D.initialized());
+    ASSERT_TRUE(dst2D.initialized());
+    src2D.SetLayout(VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
+    dst2D.SetLayout(VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
+
+    // Create 3D image
+    VkImageCreateInfo ci;
+    ci.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+    ci.pNext = NULL;
+    ci.flags = 0;
+    ci.imageType = VK_IMAGE_TYPE_3D;
+    ci.format = fmt;
+    ci.extent = {64, 64, 4};
+    ci.mipLevels = 1;
+    ci.arrayLayers = 1;
+    ci.samples = VK_SAMPLE_COUNT_1_BIT;
+    ci.tiling = VK_IMAGE_TILING_OPTIMAL;
+    ci.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
+    ci.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
+    ci.queueFamilyIndexCount = 0;
+    ci.pQueueFamilyIndices = NULL;
+    ci.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+
+    VkImageObj src3D(m_device);
+    src3D.init(&ci);
+    ASSERT_TRUE(src3D.initialized());
+
+    VkImageBlit blitRegion = {};
+    blitRegion.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    blitRegion.srcSubresource.baseArrayLayer = 0;
+    blitRegion.srcSubresource.layerCount = 1;
+    blitRegion.srcSubresource.mipLevel = 0;
+    blitRegion.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    blitRegion.dstSubresource.baseArrayLayer = 0;
+    blitRegion.dstSubresource.layerCount = 1;
+    blitRegion.dstSubresource.mipLevel = 0;
+    blitRegion.srcOffsets[0] = {0, 0, 0};
+    blitRegion.srcOffsets[1] = {48, 48, 1};
+    blitRegion.dstOffsets[0] = {0, 0, 0};
+    blitRegion.dstOffsets[1] = {64, 64, 1};
+
+    m_commandBuffer->begin();
+
+    // UINT format should not support linear filtering, but check to be sure
+    if (!ImageFormatAndFeaturesSupported(gpu(), fmt, VK_IMAGE_TILING_OPTIMAL, VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT)) {
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-filter-02001");
+        vk::CmdBlitImage(m_commandBuffer->handle(), src2D.image(), src2D.Layout(), dst2D.image(), dst2D.Layout(), 1, &blitRegion,
+                         VK_FILTER_LINEAR);
+        m_errorMonitor->VerifyFound();
+    }
+
+    if (cubic_support && !ImageFormatAndFeaturesSupported(gpu(), fmt, VK_IMAGE_TILING_OPTIMAL,
+                                                          VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_IMG)) {
+        // Invalid filter CUBIC_IMG
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-filter-02002");
+        vk::CmdBlitImage(m_commandBuffer->handle(), src3D.image(), src3D.Layout(), dst2D.image(), dst2D.Layout(), 1, &blitRegion,
+                         VK_FILTER_CUBIC_IMG);
+        m_errorMonitor->VerifyFound();
+
+        // Invalid filter CUBIC_IMG + invalid 2D source image
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-filter-02002");
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-filter-00237");
+        vk::CmdBlitImage(m_commandBuffer->handle(), src2D.image(), src2D.Layout(), dst2D.image(), dst2D.Layout(), 1, &blitRegion,
+                         VK_FILTER_CUBIC_IMG);
+        m_errorMonitor->VerifyFound();
+    }
+
+    m_commandBuffer->end();
+}
+
+TEST_F(VkLayerTest, BlitImageLayout) {
+    TEST_DESCRIPTION("Incorrect vkCmdBlitImage layouts");
+
+    ASSERT_NO_FATAL_FAILURE(Init(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
+
+    VkResult err;
+    VkFormat fmt = VK_FORMAT_R8G8B8A8_UNORM;
+
+    VkSubmitInfo submit_info = {};
+    submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+    submit_info.commandBufferCount = 1;
+    submit_info.pCommandBuffers = &m_commandBuffer->handle();
+
+    // Create images
+    VkImageObj img_src_transfer(m_device);
+    VkImageObj img_dst_transfer(m_device);
+    VkImageObj img_general(m_device);
+    VkImageObj img_color(m_device);
+
+    img_src_transfer.InitNoLayout(64, 64, 1, fmt, VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT,
+                                  VK_IMAGE_TILING_OPTIMAL, 0);
+    img_dst_transfer.InitNoLayout(64, 64, 1, fmt, VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT,
+                                  VK_IMAGE_TILING_OPTIMAL, 0);
+    img_general.InitNoLayout(64, 64, 1, fmt, VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT,
+                             VK_IMAGE_TILING_OPTIMAL, 0);
+    img_color.InitNoLayout(64, 64, 1, fmt,
+                           VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT,
+                           VK_IMAGE_TILING_OPTIMAL, 0);
+
+    ASSERT_TRUE(img_src_transfer.initialized());
+    ASSERT_TRUE(img_dst_transfer.initialized());
+    ASSERT_TRUE(img_general.initialized());
+    ASSERT_TRUE(img_color.initialized());
+
+    img_src_transfer.SetLayout(VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL);
+    img_dst_transfer.SetLayout(VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL);
+    img_general.SetLayout(VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
+    img_color.SetLayout(VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL);
+
+    VkImageBlit blit_region = {};
+    blit_region.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    blit_region.srcSubresource.baseArrayLayer = 0;
+    blit_region.srcSubresource.layerCount = 1;
+    blit_region.srcSubresource.mipLevel = 0;
+    blit_region.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    blit_region.dstSubresource.baseArrayLayer = 0;
+    blit_region.dstSubresource.layerCount = 1;
+    blit_region.dstSubresource.mipLevel = 0;
+    blit_region.srcOffsets[0] = {0, 0, 0};
+    blit_region.srcOffsets[1] = {48, 48, 1};
+    blit_region.dstOffsets[0] = {0, 0, 0};
+    blit_region.dstOffsets[1] = {64, 64, 1};
+
+    m_commandBuffer->begin();
+
+    // Illegal srcImageLayout
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-srcImageLayout-00222");
+    vk::CmdBlitImage(m_commandBuffer->handle(), img_src_transfer.image(), VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
+                     img_dst_transfer.image(), img_dst_transfer.Layout(), 1, &blit_region, VK_FILTER_LINEAR);
+    m_errorMonitor->VerifyFound();
+
+    // Illegal destImageLayout
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-dstImageLayout-00227");
+    vk::CmdBlitImage(m_commandBuffer->handle(), img_src_transfer.image(), img_src_transfer.Layout(), img_dst_transfer.image(),
+                     VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, 1, &blit_region, VK_FILTER_LINEAR);
+
+    m_commandBuffer->end();
+    vk::QueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+    m_errorMonitor->VerifyFound();
+
+    err = vk::QueueWaitIdle(m_device->m_queue);
+    ASSERT_VK_SUCCESS(err);
+
+    m_commandBuffer->reset(0);
+    m_commandBuffer->begin();
+
+    // Source image in invalid layout at start of the CB
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "UNASSIGNED-CoreValidation-DrawState-InvalidImageLayout");
+    vk::CmdBlitImage(m_commandBuffer->handle(), img_src_transfer.image(), img_src_transfer.Layout(), img_color.image(),
+                     VK_IMAGE_LAYOUT_GENERAL, 1, &blit_region, VK_FILTER_LINEAR);
+
+    m_commandBuffer->end();
+    vk::QueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+    m_errorMonitor->VerifyFound();
+    err = vk::QueueWaitIdle(m_device->m_queue);
+    ASSERT_VK_SUCCESS(err);
+
+    m_commandBuffer->reset(0);
+    m_commandBuffer->begin();
+
+    // Destination image in invalid layout at start of the CB
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "UNASSIGNED-CoreValidation-DrawState-InvalidImageLayout");
+    vk::CmdBlitImage(m_commandBuffer->handle(), img_color.image(), VK_IMAGE_LAYOUT_GENERAL, img_dst_transfer.image(),
+                     img_dst_transfer.Layout(), 1, &blit_region, VK_FILTER_LINEAR);
+
+    m_commandBuffer->end();
+    vk::QueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+    m_errorMonitor->VerifyFound();
+    err = vk::QueueWaitIdle(m_device->m_queue);
+    ASSERT_VK_SUCCESS(err);
+
+    // Source image in invalid layout in the middle of CB
+    m_commandBuffer->reset(0);
+    m_commandBuffer->begin();
+
+    VkImageMemoryBarrier img_barrier = {};
+    img_barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
+    img_barrier.pNext = nullptr;
+    img_barrier.srcAccessMask = 0;
+    img_barrier.dstAccessMask = 0;
+    img_barrier.oldLayout = VK_IMAGE_LAYOUT_GENERAL;
+    img_barrier.newLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
+    img_barrier.image = img_general.handle();
+    img_barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
+    img_barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
+    img_barrier.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    img_barrier.subresourceRange.baseArrayLayer = 0;
+    img_barrier.subresourceRange.baseMipLevel = 0;
+    img_barrier.subresourceRange.layerCount = 1;
+    img_barrier.subresourceRange.levelCount = 1;
+
+    vk::CmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, 0, 0,
+                           nullptr, 0, nullptr, 1, &img_barrier);
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-srcImageLayout-00221");
+    vk::CmdBlitImage(m_commandBuffer->handle(), img_general.image(), VK_IMAGE_LAYOUT_GENERAL, img_dst_transfer.image(),
+                     img_dst_transfer.Layout(), 1, &blit_region, VK_FILTER_LINEAR);
+
+    m_commandBuffer->end();
+    vk::QueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+    m_errorMonitor->VerifyFound();
+    err = vk::QueueWaitIdle(m_device->m_queue);
+    ASSERT_VK_SUCCESS(err);
+
+    // Destination image in invalid layout in the middle of CB
+    m_commandBuffer->reset(0);
+    m_commandBuffer->begin();
+
+    img_barrier.oldLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
+    img_barrier.newLayout = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL;
+    img_barrier.image = img_dst_transfer.handle();
+
+    vk::CmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, 0, 0,
+                           nullptr, 0, nullptr, 1, &img_barrier);
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-dstImageLayout-00226");
+    vk::CmdBlitImage(m_commandBuffer->handle(), img_src_transfer.image(), img_src_transfer.Layout(), img_dst_transfer.image(),
+                     VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, &blit_region, VK_FILTER_LINEAR);
+
+    m_commandBuffer->end();
+    vk::QueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+    m_errorMonitor->VerifyFound();
+    err = vk::QueueWaitIdle(m_device->m_queue);
+    ASSERT_VK_SUCCESS(err);
+}
+
+TEST_F(VkLayerTest, BlitImageOffsets) {
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    VkFormat fmt = VK_FORMAT_R8G8B8A8_UNORM;
+    if (!ImageFormatAndFeaturesSupported(gpu(), fmt, VK_IMAGE_TILING_OPTIMAL,
+                                         VK_FORMAT_FEATURE_BLIT_SRC_BIT | VK_FORMAT_FEATURE_BLIT_DST_BIT)) {
+        printf("%s No blit feature bits - BlitImageOffsets skipped.\n", kSkipPrefix);
+        return;
+    }
+
+    VkImageCreateInfo ci;
+    ci.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+    ci.pNext = NULL;
+    ci.flags = 0;
+    ci.imageType = VK_IMAGE_TYPE_1D;
+    ci.format = fmt;
+    ci.extent = {64, 1, 1};
+    ci.mipLevels = 1;
+    ci.arrayLayers = 1;
+    ci.samples = VK_SAMPLE_COUNT_1_BIT;
+    ci.tiling = VK_IMAGE_TILING_OPTIMAL;
+    ci.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
+    ci.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
+    ci.queueFamilyIndexCount = 0;
+    ci.pQueueFamilyIndices = NULL;
+    ci.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+
+    VkImageObj image_1D(m_device);
+    image_1D.init(&ci);
+    ASSERT_TRUE(image_1D.initialized());
+
+    ci.imageType = VK_IMAGE_TYPE_2D;
+    ci.extent = {64, 64, 1};
+    VkImageObj image_2D(m_device);
+    image_2D.init(&ci);
+    ASSERT_TRUE(image_2D.initialized());
+
+    ci.imageType = VK_IMAGE_TYPE_3D;
+    ci.extent = {64, 64, 64};
+    VkImageObj image_3D(m_device);
+    image_3D.init(&ci);
+    ASSERT_TRUE(image_3D.initialized());
+
+    VkImageBlit blit_region = {};
+    blit_region.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    blit_region.srcSubresource.baseArrayLayer = 0;
+    blit_region.srcSubresource.layerCount = 1;
+    blit_region.srcSubresource.mipLevel = 0;
+    blit_region.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    blit_region.dstSubresource.baseArrayLayer = 0;
+    blit_region.dstSubresource.layerCount = 1;
+    blit_region.dstSubresource.mipLevel = 0;
+
+    m_commandBuffer->begin();
+
+    // 1D, with src/dest y offsets other than (0,1)
+    blit_region.srcOffsets[0] = {0, 1, 0};
+    blit_region.srcOffsets[1] = {30, 1, 1};
+    blit_region.dstOffsets[0] = {32, 0, 0};
+    blit_region.dstOffsets[1] = {64, 1, 1};
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageBlit-srcImage-00245");
+    vk::CmdBlitImage(m_commandBuffer->handle(), image_1D.image(), image_1D.Layout(), image_1D.image(), image_1D.Layout(), 1,
+                     &blit_region, VK_FILTER_NEAREST);
+    m_errorMonitor->VerifyFound();
+
+    blit_region.srcOffsets[0] = {0, 0, 0};
+    blit_region.dstOffsets[0] = {32, 1, 0};
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageBlit-dstImage-00250");
+    vk::CmdBlitImage(m_commandBuffer->handle(), image_1D.image(), image_1D.Layout(), image_1D.image(), image_1D.Layout(), 1,
+                     &blit_region, VK_FILTER_NEAREST);
+    m_errorMonitor->VerifyFound();
+
+    // 2D, with src/dest z offsets other than (0,1)
+    blit_region.srcOffsets[0] = {0, 0, 1};
+    blit_region.srcOffsets[1] = {24, 31, 1};
+    blit_region.dstOffsets[0] = {32, 32, 0};
+    blit_region.dstOffsets[1] = {64, 64, 1};
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageBlit-srcImage-00247");
+    vk::CmdBlitImage(m_commandBuffer->handle(), image_2D.image(), image_2D.Layout(), image_2D.image(), image_2D.Layout(), 1,
+                     &blit_region, VK_FILTER_NEAREST);
+    m_errorMonitor->VerifyFound();
+
+    blit_region.srcOffsets[0] = {0, 0, 0};
+    blit_region.dstOffsets[0] = {32, 32, 1};
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageBlit-dstImage-00252");
+    vk::CmdBlitImage(m_commandBuffer->handle(), image_2D.image(), image_2D.Layout(), image_2D.image(), image_2D.Layout(), 1,
+                     &blit_region, VK_FILTER_NEAREST);
+    m_errorMonitor->VerifyFound();
+
+    // Source offsets exceeding source image dimensions
+    blit_region.srcOffsets[0] = {0, 0, 0};
+    blit_region.srcOffsets[1] = {65, 64, 1};  // src x
+    blit_region.dstOffsets[0] = {0, 0, 0};
+    blit_region.dstOffsets[1] = {64, 64, 1};
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageBlit-srcOffset-00243");    // x
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-pRegions-00215");  // src region
+    vk::CmdBlitImage(m_commandBuffer->handle(), image_3D.image(), image_3D.Layout(), image_2D.image(), image_2D.Layout(), 1,
+                     &blit_region, VK_FILTER_NEAREST);
+    m_errorMonitor->VerifyFound();
+
+    blit_region.srcOffsets[1] = {64, 65, 1};                                                                    // src y
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageBlit-srcOffset-00244");    // y
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-pRegions-00215");  // src region
+    vk::CmdBlitImage(m_commandBuffer->handle(), image_3D.image(), image_3D.Layout(), image_2D.image(), image_2D.Layout(), 1,
+                     &blit_region, VK_FILTER_NEAREST);
+    m_errorMonitor->VerifyFound();
+
+    blit_region.srcOffsets[0] = {0, 0, 65};  // src z
+    blit_region.srcOffsets[1] = {64, 64, 64};
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageBlit-srcOffset-00246");    // z
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-pRegions-00215");  // src region
+    vk::CmdBlitImage(m_commandBuffer->handle(), image_3D.image(), image_3D.Layout(), image_2D.image(), image_2D.Layout(), 1,
+                     &blit_region, VK_FILTER_NEAREST);
+    m_errorMonitor->VerifyFound();
+
+    // Dest offsets exceeding source image dimensions
+    blit_region.srcOffsets[0] = {0, 0, 0};
+    blit_region.srcOffsets[1] = {64, 64, 1};
+    blit_region.dstOffsets[0] = {96, 64, 32};  // dst x
+    blit_region.dstOffsets[1] = {64, 0, 33};
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageBlit-dstOffset-00248");    // x
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-pRegions-00216");  // dst region
+    vk::CmdBlitImage(m_commandBuffer->handle(), image_2D.image(), image_2D.Layout(), image_3D.image(), image_3D.Layout(), 1,
+                     &blit_region, VK_FILTER_NEAREST);
+    m_errorMonitor->VerifyFound();
+
+    blit_region.dstOffsets[0] = {0, 65, 32};                                                                    // dst y
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageBlit-dstOffset-00249");    // y
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-pRegions-00216");  // dst region
+    vk::CmdBlitImage(m_commandBuffer->handle(), image_2D.image(), image_2D.Layout(), image_3D.image(), image_3D.Layout(), 1,
+                     &blit_region, VK_FILTER_NEAREST);
+    m_errorMonitor->VerifyFound();
+
+    blit_region.dstOffsets[0] = {0, 64, 65};  // dst z
+    blit_region.dstOffsets[1] = {64, 0, 64};
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageBlit-dstOffset-00251");    // z
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-pRegions-00216");  // dst region
+    vk::CmdBlitImage(m_commandBuffer->handle(), image_2D.image(), image_2D.Layout(), image_3D.image(), image_3D.Layout(), 1,
+                     &blit_region, VK_FILTER_NEAREST);
+    m_errorMonitor->VerifyFound();
+
+    m_commandBuffer->end();
+}
+
+TEST_F(VkLayerTest, MiscBlitImageTests) {
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    VkFormat f_color = VK_FORMAT_R32_SFLOAT;  // Need features ..BLIT_SRC_BIT & ..BLIT_DST_BIT
+
+    if (!ImageFormatAndFeaturesSupported(gpu(), f_color, VK_IMAGE_TILING_OPTIMAL,
+                                         VK_FORMAT_FEATURE_BLIT_SRC_BIT | VK_FORMAT_FEATURE_BLIT_DST_BIT)) {
+        printf("%s Requested format features unavailable - MiscBlitImageTests skipped.\n", kSkipPrefix);
+        return;
+    }
+
+    VkImageCreateInfo ci;
+    ci.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+    ci.pNext = NULL;
+    ci.flags = 0;
+    ci.imageType = VK_IMAGE_TYPE_2D;
+    ci.format = f_color;
+    ci.extent = {64, 64, 1};
+    ci.mipLevels = 1;
+    ci.arrayLayers = 1;
+    ci.samples = VK_SAMPLE_COUNT_1_BIT;
+    ci.tiling = VK_IMAGE_TILING_OPTIMAL;
+    ci.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
+    ci.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
+    ci.queueFamilyIndexCount = 0;
+    ci.pQueueFamilyIndices = NULL;
+    ci.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+
+    // 2D color image
+    VkImageObj color_img(m_device);
+    color_img.init(&ci);
+    ASSERT_TRUE(color_img.initialized());
+
+    // 2D multi-sample image
+    ci.samples = VK_SAMPLE_COUNT_4_BIT;
+    VkImageObj ms_img(m_device);
+    ms_img.init(&ci);
+    ASSERT_TRUE(ms_img.initialized());
+
+    // 3D color image
+    ci.samples = VK_SAMPLE_COUNT_1_BIT;
+    ci.imageType = VK_IMAGE_TYPE_3D;
+    ci.extent = {64, 64, 8};
+    VkImageObj color_3D_img(m_device);
+    color_3D_img.init(&ci);
+    ASSERT_TRUE(color_3D_img.initialized());
+
+    VkImageBlit blitRegion = {};
+    blitRegion.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    blitRegion.srcSubresource.baseArrayLayer = 0;
+    blitRegion.srcSubresource.layerCount = 1;
+    blitRegion.srcSubresource.mipLevel = 0;
+    blitRegion.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    blitRegion.dstSubresource.baseArrayLayer = 0;
+    blitRegion.dstSubresource.layerCount = 1;
+    blitRegion.dstSubresource.mipLevel = 0;
+    blitRegion.srcOffsets[0] = {0, 0, 0};
+    blitRegion.srcOffsets[1] = {16, 16, 1};
+    blitRegion.dstOffsets[0] = {32, 32, 0};
+    blitRegion.dstOffsets[1] = {64, 64, 1};
+
+    m_commandBuffer->begin();
+
+    // Blit with aspectMask errors
+    blitRegion.srcSubresource.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
+    blitRegion.dstSubresource.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageBlit-aspectMask-00241");
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageBlit-aspectMask-00242");
+    vk::CmdBlitImage(m_commandBuffer->handle(), color_img.image(), color_img.Layout(), color_img.image(), color_img.Layout(), 1,
+                     &blitRegion, VK_FILTER_NEAREST);
+    m_errorMonitor->VerifyFound();
+
+    // Blit with invalid src mip level
+    blitRegion.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    blitRegion.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    blitRegion.srcSubresource.mipLevel = ci.mipLevels;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "VUID-vkCmdBlitImage-srcSubresource-01705");  // invalid srcSubresource.mipLevel
+    // Redundant unavoidable errors
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "VUID-VkImageBlit-srcOffset-00243");  // out-of-bounds srcOffset.x
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "VUID-VkImageBlit-srcOffset-00244");  // out-of-bounds srcOffset.y
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "VUID-VkImageBlit-srcOffset-00246");  // out-of-bounds srcOffset.z
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "VUID-vkCmdBlitImage-pRegions-00215");  // region not contained within src image
+    vk::CmdBlitImage(m_commandBuffer->handle(), color_img.image(), color_img.Layout(), color_img.image(), color_img.Layout(), 1,
+                     &blitRegion, VK_FILTER_NEAREST);
+    m_errorMonitor->VerifyFound();
+
+    // Blit with invalid dst mip level
+    blitRegion.srcSubresource.mipLevel = 0;
+    blitRegion.dstSubresource.mipLevel = ci.mipLevels;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "VUID-vkCmdBlitImage-dstSubresource-01706");  // invalid dstSubresource.mipLevel
+    // Redundant unavoidable errors
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "VUID-VkImageBlit-dstOffset-00248");  // out-of-bounds dstOffset.x
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "VUID-VkImageBlit-dstOffset-00249");  // out-of-bounds dstOffset.y
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "VUID-VkImageBlit-dstOffset-00251");  // out-of-bounds dstOffset.z
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "VUID-vkCmdBlitImage-pRegions-00216");  // region not contained within dst image
+    vk::CmdBlitImage(m_commandBuffer->handle(), color_img.image(), color_img.Layout(), color_img.image(), color_img.Layout(), 1,
+                     &blitRegion, VK_FILTER_NEAREST);
+    m_errorMonitor->VerifyFound();
+
+    // Blit with invalid src array layer
+    blitRegion.dstSubresource.mipLevel = 0;
+    blitRegion.srcSubresource.baseArrayLayer = ci.arrayLayers;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "VUID-vkCmdBlitImage-srcSubresource-01707");  // invalid srcSubresource layer range
+    vk::CmdBlitImage(m_commandBuffer->handle(), color_img.image(), color_img.Layout(), color_img.image(), color_img.Layout(), 1,
+                     &blitRegion, VK_FILTER_NEAREST);
+    m_errorMonitor->VerifyFound();
+
+    // Blit with invalid dst array layer
+    blitRegion.srcSubresource.baseArrayLayer = 0;
+    blitRegion.dstSubresource.baseArrayLayer = ci.arrayLayers;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "VUID-vkCmdBlitImage-dstSubresource-01708");  // invalid dstSubresource layer range
+                                                                                       // Redundant unavoidable errors
+    vk::CmdBlitImage(m_commandBuffer->handle(), color_img.image(), color_img.Layout(), color_img.image(), color_img.Layout(), 1,
+                     &blitRegion, VK_FILTER_NEAREST);
+    m_errorMonitor->VerifyFound();
+
+    blitRegion.dstSubresource.baseArrayLayer = 0;
+
+    // Blit multi-sample image
+    // TODO: redundant VUs, one (1c8) or two (1d2 & 1d4) should be eliminated.
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-srcImage-00228");
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-srcImage-00233");
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-dstImage-00234");
+    vk::CmdBlitImage(m_commandBuffer->handle(), ms_img.image(), ms_img.Layout(), ms_img.image(), ms_img.Layout(), 1, &blitRegion,
+                     VK_FILTER_NEAREST);
+    m_errorMonitor->VerifyFound();
+
+    // Blit 3D with baseArrayLayer != 0 or layerCount != 1
+    blitRegion.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    blitRegion.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    blitRegion.srcSubresource.baseArrayLayer = 1;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageBlit-srcImage-00240");
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "VUID-vkCmdBlitImage-srcSubresource-01707");  // base+count > total layer count
+    vk::CmdBlitImage(m_commandBuffer->handle(), color_3D_img.image(), color_3D_img.Layout(), color_3D_img.image(),
+                     color_3D_img.Layout(), 1, &blitRegion, VK_FILTER_NEAREST);
+    m_errorMonitor->VerifyFound();
+    blitRegion.srcSubresource.baseArrayLayer = 0;
+    blitRegion.srcSubresource.layerCount = 0;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageBlit-srcImage-00240");
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "VUID-VkImageSubresourceLayers-layerCount-01700");  // layer count == 0 (src)
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "VUID-VkImageBlit-layerCount-00239");  // src/dst layer count mismatch
+    vk::CmdBlitImage(m_commandBuffer->handle(), color_3D_img.image(), color_3D_img.Layout(), color_3D_img.image(),
+                     color_3D_img.Layout(), 1, &blitRegion, VK_FILTER_NEAREST);
+    m_errorMonitor->VerifyFound();
+
+    m_commandBuffer->end();
+}
+
+TEST_F(VkLayerTest, BlitToDepthImageTests) {
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    // Need feature ..BLIT_SRC_BIT but not ..BLIT_DST_BIT
+    // TODO: provide more choices here; supporting D32_SFLOAT as BLIT_DST isn't unheard of.
+    VkFormat f_depth = VK_FORMAT_D32_SFLOAT;
+
+    if (!ImageFormatAndFeaturesSupported(gpu(), f_depth, VK_IMAGE_TILING_OPTIMAL, VK_FORMAT_FEATURE_BLIT_SRC_BIT) ||
+        ImageFormatAndFeaturesSupported(gpu(), f_depth, VK_IMAGE_TILING_OPTIMAL, VK_FORMAT_FEATURE_BLIT_DST_BIT)) {
+        printf("%s Requested format features unavailable - BlitToDepthImageTests skipped.\n", kSkipPrefix);
+        return;
+    }
+
+    VkImageCreateInfo ci;
+    ci.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+    ci.pNext = NULL;
+    ci.flags = 0;
+    ci.imageType = VK_IMAGE_TYPE_2D;
+    ci.format = f_depth;
+    ci.extent = {64, 64, 1};
+    ci.mipLevels = 1;
+    ci.arrayLayers = 1;
+    ci.samples = VK_SAMPLE_COUNT_1_BIT;
+    ci.tiling = VK_IMAGE_TILING_OPTIMAL;
+    ci.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
+    ci.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
+    ci.queueFamilyIndexCount = 0;
+    ci.pQueueFamilyIndices = NULL;
+    ci.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+
+    // 2D depth image
+    VkImageObj depth_img(m_device);
+    depth_img.init(&ci);
+    ASSERT_TRUE(depth_img.initialized());
+
+    VkImageBlit blitRegion = {};
+    blitRegion.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    blitRegion.srcSubresource.baseArrayLayer = 0;
+    blitRegion.srcSubresource.layerCount = 1;
+    blitRegion.srcSubresource.mipLevel = 0;
+    blitRegion.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    blitRegion.dstSubresource.baseArrayLayer = 0;
+    blitRegion.dstSubresource.layerCount = 1;
+    blitRegion.dstSubresource.mipLevel = 0;
+    blitRegion.srcOffsets[0] = {0, 0, 0};
+    blitRegion.srcOffsets[1] = {16, 16, 1};
+    blitRegion.dstOffsets[0] = {32, 32, 0};
+    blitRegion.dstOffsets[1] = {64, 64, 1};
+
+    m_commandBuffer->begin();
+
+    // Blit depth image - has SRC_BIT but not DST_BIT
+    blitRegion.srcSubresource.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
+    blitRegion.dstSubresource.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBlitImage-dstImage-02000");
+    vk::CmdBlitImage(m_commandBuffer->handle(), depth_img.image(), depth_img.Layout(), depth_img.image(), depth_img.Layout(), 1,
+                     &blitRegion, VK_FILTER_NEAREST);
+    m_errorMonitor->VerifyFound();
+
+    m_commandBuffer->end();
+}
+
+TEST_F(VkLayerTest, MinImageTransferGranularity) {
+    TEST_DESCRIPTION("Tests for validation of Queue Family property minImageTransferGranularity.");
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    auto queue_family_properties = m_device->phy().queue_properties();
+    auto large_granularity_family =
+        std::find_if(queue_family_properties.begin(), queue_family_properties.end(), [](VkQueueFamilyProperties family_properties) {
+            VkExtent3D family_granularity = family_properties.minImageTransferGranularity;
+            // We need a queue family that supports copy operations and has a large enough minImageTransferGranularity for the tests
+            // below to make sense.
+            return (family_properties.queueFlags & VK_QUEUE_TRANSFER_BIT || family_properties.queueFlags & VK_QUEUE_GRAPHICS_BIT ||
+                    family_properties.queueFlags & VK_QUEUE_COMPUTE_BIT) &&
+                   family_granularity.depth >= 4 && family_granularity.width >= 4 && family_granularity.height >= 4;
+        });
+
+    if (large_granularity_family == queue_family_properties.end()) {
+        printf("%s No queue family has a large enough granularity for this test to be meaningful, skipping test\n", kSkipPrefix);
+        return;
+    }
+    const size_t queue_family_index = std::distance(queue_family_properties.begin(), large_granularity_family);
+    VkExtent3D granularity = queue_family_properties[queue_family_index].minImageTransferGranularity;
+    VkCommandPoolObj command_pool(m_device, queue_family_index, 0);
+
+    // Create two images of different types and try to copy between them
+    VkImage srcImage;
+    VkImage dstImage;
+
+    VkImageCreateInfo image_create_info = {};
+    image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+    image_create_info.pNext = NULL;
+    image_create_info.imageType = VK_IMAGE_TYPE_3D;
+    image_create_info.format = VK_FORMAT_B8G8R8A8_UNORM;
+    image_create_info.extent.width = granularity.width * 2;
+    image_create_info.extent.height = granularity.height * 2;
+    image_create_info.extent.depth = granularity.depth * 2;
+    image_create_info.mipLevels = 1;
+    image_create_info.arrayLayers = 1;
+    image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
+    image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
+    image_create_info.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
+    image_create_info.flags = 0;
+
+    VkImageObj src_image_obj(m_device);
+    src_image_obj.init(&image_create_info);
+    ASSERT_TRUE(src_image_obj.initialized());
+    srcImage = src_image_obj.handle();
+
+    image_create_info.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
+
+    VkImageObj dst_image_obj(m_device);
+    dst_image_obj.init(&image_create_info);
+    ASSERT_TRUE(dst_image_obj.initialized());
+    dstImage = dst_image_obj.handle();
+
+    VkCommandBufferObj command_buffer(m_device, &command_pool);
+    ASSERT_TRUE(command_buffer.initialized());
+    command_buffer.begin();
+
+    VkImageCopy copyRegion;
+    copyRegion.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    copyRegion.srcSubresource.mipLevel = 0;
+    copyRegion.srcSubresource.baseArrayLayer = 0;
+    copyRegion.srcSubresource.layerCount = 1;
+    copyRegion.srcOffset.x = 0;
+    copyRegion.srcOffset.y = 0;
+    copyRegion.srcOffset.z = 0;
+    copyRegion.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    copyRegion.dstSubresource.mipLevel = 0;
+    copyRegion.dstSubresource.baseArrayLayer = 0;
+    copyRegion.dstSubresource.layerCount = 1;
+    copyRegion.dstOffset.x = 0;
+    copyRegion.dstOffset.y = 0;
+    copyRegion.dstOffset.z = 0;
+    copyRegion.extent.width = granularity.width;
+    copyRegion.extent.height = granularity.height;
+    copyRegion.extent.depth = granularity.depth;
+
+    // Introduce failure by setting srcOffset to a bad granularity value
+    copyRegion.srcOffset.y = 3;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "VUID-vkCmdCopyImage-srcOffset-01783");  // srcOffset image transfer granularity
+    command_buffer.CopyImage(srcImage, VK_IMAGE_LAYOUT_GENERAL, dstImage, VK_IMAGE_LAYOUT_GENERAL, 1, &copyRegion);
+    m_errorMonitor->VerifyFound();
+
+    // Introduce failure by setting extent to a granularity value that is bad
+    // for both the source and destination image.
+    copyRegion.srcOffset.y = 0;
+    copyRegion.extent.width = 3;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "VUID-vkCmdCopyImage-srcOffset-01783");  // src extent image transfer granularity
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "VUID-vkCmdCopyImage-dstOffset-01784");  // dst extent image transfer granularity
+    command_buffer.CopyImage(srcImage, VK_IMAGE_LAYOUT_GENERAL, dstImage, VK_IMAGE_LAYOUT_GENERAL, 1, &copyRegion);
+    m_errorMonitor->VerifyFound();
+
+    // Now do some buffer/image copies
+    VkBufferObj buffer;
+    VkMemoryPropertyFlags reqs = 0;
+    buffer.init_as_src_and_dst(*m_device, 8 * granularity.height * granularity.width * granularity.depth, reqs);
+    VkBufferImageCopy region = {};
+    region.bufferOffset = 0;
+    region.bufferRowLength = 0;
+    region.bufferImageHeight = 0;
+    region.imageSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    region.imageSubresource.layerCount = 1;
+    region.imageExtent.height = granularity.height;
+    region.imageExtent.width = granularity.width;
+    region.imageExtent.depth = granularity.depth;
+    region.imageOffset.x = 0;
+    region.imageOffset.y = 0;
+    region.imageOffset.z = 0;
+
+    // Introduce failure by setting imageExtent to a bad granularity value
+    region.imageExtent.width = 3;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "VUID-vkCmdCopyImageToBuffer-imageOffset-01794");  // image transfer granularity
+    vk::CmdCopyImageToBuffer(command_buffer.handle(), srcImage, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, buffer.handle(), 1, &region);
+    m_errorMonitor->VerifyFound();
+    region.imageExtent.width = granularity.width;
+
+    // Introduce failure by setting imageOffset to a bad granularity value
+    region.imageOffset.z = 3;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "VUID-vkCmdCopyBufferToImage-imageOffset-01793");  // image transfer granularity
+    vk::CmdCopyBufferToImage(command_buffer.handle(), buffer.handle(), dstImage, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, &region);
+    m_errorMonitor->VerifyFound();
+
+    command_buffer.end();
+}
+
+TEST_F(VkLayerTest, ImageBarrierSubpassConflicts) {
+    TEST_DESCRIPTION("Add a pipeline barrier within a subpass that has conflicting state");
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    // A renderpass with a single subpass that declared a self-dependency
+    VkAttachmentDescription attach[] = {
+        {0, VK_FORMAT_R8G8B8A8_UNORM, VK_SAMPLE_COUNT_1_BIT, VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE,
+         VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE, VK_IMAGE_LAYOUT_UNDEFINED,
+         VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL},
+    };
+    VkAttachmentReference ref = {0, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL};
+    VkSubpassDescription subpasses[] = {
+        {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 0, nullptr, 1, &ref, nullptr, nullptr, 0, nullptr},
+    };
+    VkSubpassDependency dep = {0,
+                               0,
+                               VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
+                               VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
+                               VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
+                               VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
+                               VK_DEPENDENCY_BY_REGION_BIT};
+    VkRenderPassCreateInfo rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, nullptr, 0, 1, attach, 1, subpasses, 1, &dep};
+    VkRenderPass rp;
+    VkRenderPass rp_noselfdep;
+
+    VkResult err = vk::CreateRenderPass(m_device->device(), &rpci, nullptr, &rp);
+    ASSERT_VK_SUCCESS(err);
+    rpci.dependencyCount = 0;
+    rpci.pDependencies = nullptr;
+    err = vk::CreateRenderPass(m_device->device(), &rpci, nullptr, &rp_noselfdep);
+    ASSERT_VK_SUCCESS(err);
+
+    VkImageObj image(m_device);
+    image.InitNoLayout(32, 32, 1, VK_FORMAT_R8G8B8A8_UNORM, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
+    VkImageView imageView = image.targetView(VK_FORMAT_R8G8B8A8_UNORM);
+
+    VkFramebufferCreateInfo fbci = {VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, nullptr, 0, rp, 1, &imageView, 32, 32, 1};
+    VkFramebuffer fb;
+    err = vk::CreateFramebuffer(m_device->device(), &fbci, nullptr, &fb);
+    ASSERT_VK_SUCCESS(err);
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdPipelineBarrier-pDependencies-02285");
+    m_commandBuffer->begin();
+    VkRenderPassBeginInfo rpbi = {VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO,
+                                  nullptr,
+                                  rp_noselfdep,
+                                  fb,
+                                  {{
+                                       0,
+                                       0,
+                                   },
+                                   {32, 32}},
+                                  0,
+                                  nullptr};
+
+    vk::CmdBeginRenderPass(m_commandBuffer->handle(), &rpbi, VK_SUBPASS_CONTENTS_INLINE);
+    VkMemoryBarrier mem_barrier = {};
+    mem_barrier.sType = VK_STRUCTURE_TYPE_MEMORY_BARRIER;
+    mem_barrier.pNext = NULL;
+    mem_barrier.srcAccessMask = VK_ACCESS_HOST_WRITE_BIT;
+    mem_barrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT;
+    vk::CmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_HOST_BIT, VK_PIPELINE_STAGE_VERTEX_SHADER_BIT, 0, 1,
+                           &mem_barrier, 0, nullptr, 0, nullptr);
+    m_errorMonitor->VerifyFound();
+    vk::CmdEndRenderPass(m_commandBuffer->handle());
+
+    rpbi.renderPass = rp;
+    vk::CmdBeginRenderPass(m_commandBuffer->handle(), &rpbi, VK_SUBPASS_CONTENTS_INLINE);
+    VkImageMemoryBarrier img_barrier = {};
+    img_barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
+    img_barrier.srcAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
+    img_barrier.dstAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
+    img_barrier.oldLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
+    img_barrier.newLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
+    img_barrier.image = image.handle();
+    img_barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
+    img_barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
+    img_barrier.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    img_barrier.subresourceRange.baseArrayLayer = 0;
+    img_barrier.subresourceRange.baseMipLevel = 0;
+    img_barrier.subresourceRange.layerCount = 1;
+    img_barrier.subresourceRange.levelCount = 1;
+    // Mis-match src stage mask
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdPipelineBarrier-pDependencies-02285");
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdPipelineBarrier-pDependencies-02285");
+    vk::CmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_HOST_BIT, VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
+                           VK_DEPENDENCY_BY_REGION_BIT, 0, nullptr, 0, nullptr, 1, &img_barrier);
+    m_errorMonitor->VerifyFound();
+    // Now mis-match dst stage mask
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdPipelineBarrier-pDependencies-02285");
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdPipelineBarrier-pDependencies-02285");
+    vk::CmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, VK_PIPELINE_STAGE_HOST_BIT,
+                           VK_DEPENDENCY_BY_REGION_BIT, 0, nullptr, 0, nullptr, 1, &img_barrier);
+    m_errorMonitor->VerifyFound();
+    // Set srcQueueFamilyIndex to something other than IGNORED
+    img_barrier.srcQueueFamilyIndex = 0;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdPipelineBarrier-srcQueueFamilyIndex-01182");
+    vk::CmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
+                           VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, VK_DEPENDENCY_BY_REGION_BIT, 0, nullptr, 0, nullptr, 1,
+                           &img_barrier);
+    m_errorMonitor->VerifyFound();
+    img_barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
+    // Mis-match mem barrier src access mask
+    mem_barrier = {};
+    mem_barrier.sType = VK_STRUCTURE_TYPE_MEMORY_BARRIER;
+    mem_barrier.srcAccessMask = VK_ACCESS_SHADER_READ_BIT;
+    mem_barrier.dstAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdPipelineBarrier-pDependencies-02285");
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdPipelineBarrier-pDependencies-02285");
+    vk::CmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
+                           VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, VK_DEPENDENCY_BY_REGION_BIT, 1, &mem_barrier, 0, nullptr,
+                           0, nullptr);
+    m_errorMonitor->VerifyFound();
+    // Mis-match mem barrier dst access mask. Also set srcAccessMask to 0 which should not cause an error
+    mem_barrier.srcAccessMask = 0;
+    mem_barrier.dstAccessMask = VK_ACCESS_HOST_WRITE_BIT;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdPipelineBarrier-pDependencies-02285");
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdPipelineBarrier-pDependencies-02285");
+    vk::CmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
+                           VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, VK_DEPENDENCY_BY_REGION_BIT, 1, &mem_barrier, 0, nullptr,
+                           0, nullptr);
+    m_errorMonitor->VerifyFound();
+    // Mis-match image barrier src access mask
+    img_barrier.srcAccessMask = VK_ACCESS_SHADER_READ_BIT;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdPipelineBarrier-pDependencies-02285");
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdPipelineBarrier-pDependencies-02285");
+    vk::CmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
+                           VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, VK_DEPENDENCY_BY_REGION_BIT, 0, nullptr, 0, nullptr, 1,
+                           &img_barrier);
+    m_errorMonitor->VerifyFound();
+    // Mis-match image barrier dst access mask
+    img_barrier.srcAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
+    img_barrier.dstAccessMask = VK_ACCESS_HOST_WRITE_BIT;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdPipelineBarrier-pDependencies-02285");
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdPipelineBarrier-pDependencies-02285");
+    vk::CmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
+                           VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, VK_DEPENDENCY_BY_REGION_BIT, 0, nullptr, 0, nullptr, 1,
+                           &img_barrier);
+    m_errorMonitor->VerifyFound();
+    // Mis-match dependencyFlags
+    img_barrier.dstAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdPipelineBarrier-pDependencies-02285");
+    vk::CmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
+                           VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, 0 /* wrong */, 0, nullptr, 0, nullptr, 1, &img_barrier);
+    m_errorMonitor->VerifyFound();
+    // Send non-zero bufferMemoryBarrierCount
+    // Construct a valid BufferMemoryBarrier to avoid any parameter errors
+    // First we need a valid buffer to reference
+    VkBufferObj buffer;
+    VkMemoryPropertyFlags mem_reqs = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
+    buffer.init_as_src_and_dst(*m_device, 256, mem_reqs);
+    VkBufferMemoryBarrier bmb = {};
+    bmb.sType = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER;
+    bmb.srcAccessMask = VK_ACCESS_HOST_WRITE_BIT;
+    bmb.dstAccessMask = VK_ACCESS_SHADER_READ_BIT;
+    bmb.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
+    bmb.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
+    bmb.buffer = buffer.handle();
+    bmb.offset = 0;
+    bmb.size = VK_WHOLE_SIZE;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdPipelineBarrier-bufferMemoryBarrierCount-01178");
+    vk::CmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
+                           VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, VK_DEPENDENCY_BY_REGION_BIT, 0, nullptr, 1, &bmb, 0,
+                           nullptr);
+    m_errorMonitor->VerifyFound();
+    // Add image barrier w/ image handle that's not in framebuffer
+    VkImageObj lone_image(m_device);
+    lone_image.InitNoLayout(32, 32, 1, VK_FORMAT_R8G8B8A8_UNORM, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
+    img_barrier.image = lone_image.handle();
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdPipelineBarrier-image-02635");
+    vk::CmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
+                           VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, VK_DEPENDENCY_BY_REGION_BIT, 0, nullptr, 0, nullptr, 1,
+                           &img_barrier);
+    m_errorMonitor->VerifyFound();
+    // Have image barrier with mis-matched layouts
+    img_barrier.image = image.handle();
+    img_barrier.oldLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdPipelineBarrier-oldLayout-01181");
+    vk::CmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
+                           VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, VK_DEPENDENCY_BY_REGION_BIT, 0, nullptr, 0, nullptr, 1,
+                           &img_barrier);
+    m_errorMonitor->VerifyFound();
+
+    img_barrier.oldLayout = VK_IMAGE_LAYOUT_GENERAL;
+    img_barrier.newLayout = VK_IMAGE_LAYOUT_GENERAL;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdPipelineBarrier-oldLayout-02636");
+    vk::CmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
+                           VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, VK_DEPENDENCY_BY_REGION_BIT, 0, nullptr, 0, nullptr, 1,
+                           &img_barrier);
+    m_errorMonitor->VerifyFound();
+    vk::CmdEndRenderPass(m_commandBuffer->handle());
+
+    vk::DestroyFramebuffer(m_device->device(), fb, nullptr);
+    vk::DestroyRenderPass(m_device->device(), rp, nullptr);
+    vk::DestroyRenderPass(m_device->device(), rp_noselfdep, nullptr);
+}
+
+TEST_F(VkLayerTest, InvalidCmdBufferBufferDestroyed) {
+    TEST_DESCRIPTION("Attempt to draw with a command buffer that is invalid due to a buffer dependency being destroyed.");
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    VkBuffer buffer;
+    VkDeviceMemory mem;
+    VkMemoryRequirements mem_reqs;
+
+    VkBufferCreateInfo buf_info = {};
+    buf_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
+    buf_info.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT;
+    buf_info.size = 256;
+    buf_info.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
+    VkResult err = vk::CreateBuffer(m_device->device(), &buf_info, NULL, &buffer);
+    ASSERT_VK_SUCCESS(err);
+
+    vk::GetBufferMemoryRequirements(m_device->device(), buffer, &mem_reqs);
+
+    VkMemoryAllocateInfo alloc_info = {};
+    alloc_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+    alloc_info.allocationSize = mem_reqs.size;
+    bool pass = false;
+    pass = m_device->phy().set_memory_type(mem_reqs.memoryTypeBits, &alloc_info, VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
+    if (!pass) {
+        printf("%s Failed to set memory type.\n", kSkipPrefix);
+        vk::DestroyBuffer(m_device->device(), buffer, NULL);
+        return;
+    }
+    err = vk::AllocateMemory(m_device->device(), &alloc_info, NULL, &mem);
+    ASSERT_VK_SUCCESS(err);
+
+    err = vk::BindBufferMemory(m_device->device(), buffer, mem, 0);
+    ASSERT_VK_SUCCESS(err);
+
+    m_commandBuffer->begin();
+    vk::CmdFillBuffer(m_commandBuffer->handle(), buffer, 0, VK_WHOLE_SIZE, 0);
+    m_commandBuffer->end();
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "UNASSIGNED-CoreValidation-DrawState-InvalidCommandBuffer-VkBuffer");
+    // Destroy buffer dependency prior to submit to cause ERROR
+    vk::DestroyBuffer(m_device->device(), buffer, NULL);
+
+    VkSubmitInfo submit_info = {};
+    submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+    submit_info.commandBufferCount = 1;
+    submit_info.pCommandBuffers = &m_commandBuffer->handle();
+    vk::QueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+
+    m_errorMonitor->VerifyFound();
+    vk::QueueWaitIdle(m_device->m_queue);
+    vk::FreeMemory(m_device->handle(), mem, NULL);
+}
+
+TEST_F(VkLayerTest, InvalidCmdBufferBufferViewDestroyed) {
+    TEST_DESCRIPTION("Delete bufferView bound to cmd buffer, then attempt to submit cmd buffer.");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    OneOffDescriptorSet descriptor_set(m_device,
+                                       {
+                                           {0, VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER, 1, VK_SHADER_STAGE_FRAGMENT_BIT, nullptr},
+                                       });
+    CreatePipelineHelper pipe(*this);
+    VkBufferCreateInfo buffer_create_info = {};
+    VkBufferViewCreateInfo bvci = {};
+    VkBufferView view;
+
+    {
+        uint32_t queue_family_index = 0;
+        buffer_create_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
+        buffer_create_info.size = 1024;
+        buffer_create_info.usage = VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT;
+        buffer_create_info.queueFamilyIndexCount = 1;
+        buffer_create_info.pQueueFamilyIndices = &queue_family_index;
+        VkBufferObj buffer;
+        buffer.init(*m_device, buffer_create_info);
+
+        bvci.sType = VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO;
+        bvci.buffer = buffer.handle();
+        bvci.format = VK_FORMAT_R32_SFLOAT;
+        bvci.range = VK_WHOLE_SIZE;
+
+        VkResult err = vk::CreateBufferView(m_device->device(), &bvci, NULL, &view);
+        ASSERT_VK_SUCCESS(err);
+
+        descriptor_set.WriteDescriptorBufferView(0, view);
+        descriptor_set.UpdateDescriptorSets();
+
+        char const *fsSource =
+            "#version 450\n"
+            "\n"
+            "layout(set=0, binding=0, r32f) uniform readonly imageBuffer s;\n"
+            "layout(location=0) out vec4 x;\n"
+            "void main(){\n"
+            "   x = imageLoad(s, 0);\n"
+            "}\n";
+        VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
+        VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+        pipe.InitInfo();
+        pipe.InitState();
+        pipe.shader_stages_ = {vs.GetStageCreateInfo(), fs.GetStageCreateInfo()};
+        pipe.pipeline_layout_ = VkPipelineLayoutObj(m_device, {&descriptor_set.layout_});
+        pipe.CreateGraphicsPipeline();
+
+        m_commandBuffer->begin();
+        m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
+
+        VkViewport viewport = {0, 0, 16, 16, 0, 1};
+        vk::CmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
+        VkRect2D scissor = {{0, 0}, {16, 16}};
+        vk::CmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
+        // Bind pipeline to cmd buffer - This causes crash on Mali
+        vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.pipeline_);
+        vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.pipeline_layout_.handle(), 0, 1,
+                                  &descriptor_set.set_, 0, nullptr);
+    }
+    // buffer is released.
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "Descriptor in binding #0 index 0 is using buffer");
+    m_commandBuffer->Draw(1, 0, 0, 0);
+    m_errorMonitor->VerifyFound();
+
+    vk::DestroyBufferView(m_device->device(), view, NULL);
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "Descriptor in binding #0 index 0 is using bufferView");
+    m_commandBuffer->Draw(1, 0, 0, 0);
+    m_errorMonitor->VerifyFound();
+
+    VkBufferObj buffer;
+    buffer.init(*m_device, buffer_create_info);
+
+    bvci.buffer = buffer.handle();
+    VkResult err = vk::CreateBufferView(m_device->device(), &bvci, NULL, &view);
+    ASSERT_VK_SUCCESS(err);
+    descriptor_set.descriptor_writes.clear();
+    descriptor_set.WriteDescriptorBufferView(0, view);
+    descriptor_set.UpdateDescriptorSets();
+
+    vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.pipeline_layout_.handle(), 0, 1,
+                              &descriptor_set.set_, 0, nullptr);
+    m_commandBuffer->Draw(1, 0, 0, 0);
+    m_commandBuffer->EndRenderPass();
+    m_commandBuffer->end();
+
+    // Delete BufferView in order to invalidate cmd buffer
+    vk::DestroyBufferView(m_device->device(), view, NULL);
+    // Now attempt submit of cmd buffer
+    VkSubmitInfo submit_info = {};
+    submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+    submit_info.commandBufferCount = 1;
+    submit_info.pCommandBuffers = &m_commandBuffer->handle();
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "UNASSIGNED-CoreValidation-DrawState-InvalidCommandBuffer-VkBufferView");
+    vk::QueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, InvalidCmdBufferImageDestroyed) {
+    TEST_DESCRIPTION("Attempt to draw with a command buffer that is invalid due to an image dependency being destroyed.");
+    ASSERT_NO_FATAL_FAILURE(Init());
+    {
+        const VkFormat tex_format = VK_FORMAT_B8G8R8A8_UNORM;
+        VkImageCreateInfo image_create_info = {};
+        image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+        image_create_info.pNext = NULL;
+        image_create_info.imageType = VK_IMAGE_TYPE_2D;
+        image_create_info.format = tex_format;
+        image_create_info.extent.width = 32;
+        image_create_info.extent.height = 32;
+        image_create_info.extent.depth = 1;
+        image_create_info.mipLevels = 1;
+        image_create_info.arrayLayers = 1;
+        image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
+        image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
+        image_create_info.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
+        image_create_info.flags = 0;
+        VkImageObj image(m_device);
+        image.init(&image_create_info);
+
+        m_commandBuffer->begin();
+        VkClearColorValue ccv;
+        ccv.float32[0] = 1.0f;
+        ccv.float32[1] = 1.0f;
+        ccv.float32[2] = 1.0f;
+        ccv.float32[3] = 1.0f;
+        VkImageSubresourceRange isr = {};
+        isr.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+        isr.baseArrayLayer = 0;
+        isr.baseMipLevel = 0;
+        isr.layerCount = 1;
+        isr.levelCount = 1;
+        vk::CmdClearColorImage(m_commandBuffer->handle(), image.handle(), VK_IMAGE_LAYOUT_GENERAL, &ccv, 1, &isr);
+        m_commandBuffer->end();
+    }
+    // Destroy image dependency prior to submit to cause ERROR
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "UNASSIGNED-CoreValidation-DrawState-InvalidCommandBuffer-VkImage");
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "UNASSIGNED-CoreValidation-DrawState-InvalidCommandBuffer-VkDeviceMemory");
+
+    VkSubmitInfo submit_info = {};
+    submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+    submit_info.commandBufferCount = 1;
+    submit_info.pCommandBuffers = &m_commandBuffer->handle();
+    vk::QueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, InvalidCmdBufferFramebufferImageDestroyed) {
+    TEST_DESCRIPTION(
+        "Attempt to draw with a command buffer that is invalid due to a framebuffer image dependency being destroyed.");
+    ASSERT_NO_FATAL_FAILURE(Init());
+    VkFormatProperties format_properties;
+    VkResult err = VK_SUCCESS;
+    vk::GetPhysicalDeviceFormatProperties(gpu(), VK_FORMAT_B8G8R8A8_UNORM, &format_properties);
+    if (!(format_properties.optimalTilingFeatures & VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT)) {
+        printf("%s Image format doesn't support required features.\n", kSkipPrefix);
+        return;
+    }
+    VkFramebuffer fb;
+    VkImageView view;
+
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+    {
+        VkImageCreateInfo image_ci = {};
+        image_ci.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+        image_ci.pNext = NULL;
+        image_ci.imageType = VK_IMAGE_TYPE_2D;
+        image_ci.format = VK_FORMAT_B8G8R8A8_UNORM;
+        image_ci.extent.width = 32;
+        image_ci.extent.height = 32;
+        image_ci.extent.depth = 1;
+        image_ci.mipLevels = 1;
+        image_ci.arrayLayers = 1;
+        image_ci.samples = VK_SAMPLE_COUNT_1_BIT;
+        image_ci.tiling = VK_IMAGE_TILING_OPTIMAL;
+        image_ci.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_SAMPLED_BIT;
+        image_ci.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+        image_ci.flags = 0;
+        VkImageObj image(m_device);
+        image.init(&image_ci);
+
+        VkImageViewCreateInfo ivci = {
+            VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
+            nullptr,
+            0,
+            image.handle(),
+            VK_IMAGE_VIEW_TYPE_2D,
+            VK_FORMAT_B8G8R8A8_UNORM,
+            {VK_COMPONENT_SWIZZLE_R, VK_COMPONENT_SWIZZLE_G, VK_COMPONENT_SWIZZLE_B, VK_COMPONENT_SWIZZLE_A},
+            {VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 1},
+        };
+        err = vk::CreateImageView(m_device->device(), &ivci, nullptr, &view);
+        ASSERT_VK_SUCCESS(err);
+
+        VkFramebufferCreateInfo fci = {VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, nullptr, 0, m_renderPass, 1, &view, 32, 32, 1};
+        err = vk::CreateFramebuffer(m_device->device(), &fci, nullptr, &fb);
+        ASSERT_VK_SUCCESS(err);
+
+        // Just use default renderpass with our framebuffer
+        m_renderPassBeginInfo.framebuffer = fb;
+        m_renderPassBeginInfo.renderArea.extent.width = 32;
+        m_renderPassBeginInfo.renderArea.extent.height = 32;
+        // Create Null cmd buffer for submit
+        m_commandBuffer->begin();
+        m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
+        m_commandBuffer->EndRenderPass();
+        m_commandBuffer->end();
+    }
+    // Destroy image attached to framebuffer to invalidate cmd buffer
+    // Now attempt to submit cmd buffer and verify error
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "UNASSIGNED-CoreValidation-DrawState-InvalidCommandBuffer-VkImage");
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "UNASSIGNED-CoreValidation-DrawState-InvalidCommandBuffer-VkDeviceMemory");
+    m_commandBuffer->QueueCommandBuffer(false);
+    m_errorMonitor->VerifyFound();
+
+    vk::DestroyFramebuffer(m_device->device(), fb, nullptr);
+    vk::DestroyImageView(m_device->device(), view, nullptr);
+}
+
+TEST_F(VkLayerTest, ImageMemoryNotBound) {
+    TEST_DESCRIPTION("Attempt to draw with an image which has not had memory bound to it.");
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    VkImage image;
+    const VkFormat tex_format = VK_FORMAT_B8G8R8A8_UNORM;
+    VkImageCreateInfo image_create_info = {};
+    image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+    image_create_info.pNext = NULL;
+    image_create_info.imageType = VK_IMAGE_TYPE_2D;
+    image_create_info.format = tex_format;
+    image_create_info.extent.width = 32;
+    image_create_info.extent.height = 32;
+    image_create_info.extent.depth = 1;
+    image_create_info.mipLevels = 1;
+    image_create_info.arrayLayers = 1;
+    image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
+    image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
+    image_create_info.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
+    image_create_info.flags = 0;
+    VkResult err = vk::CreateImage(m_device->device(), &image_create_info, NULL, &image);
+    ASSERT_VK_SUCCESS(err);
+    // Have to bind memory to image before recording cmd in cmd buffer using it
+    VkMemoryRequirements mem_reqs;
+    VkDeviceMemory image_mem;
+    bool pass;
+    VkMemoryAllocateInfo mem_alloc = {};
+    mem_alloc.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+    mem_alloc.pNext = NULL;
+    mem_alloc.memoryTypeIndex = 0;
+    vk::GetImageMemoryRequirements(m_device->device(), image, &mem_reqs);
+    mem_alloc.allocationSize = mem_reqs.size;
+    pass = m_device->phy().set_memory_type(mem_reqs.memoryTypeBits, &mem_alloc, 0);
+    ASSERT_TRUE(pass);
+    err = vk::AllocateMemory(m_device->device(), &mem_alloc, NULL, &image_mem);
+    ASSERT_VK_SUCCESS(err);
+
+    // Introduce error, do not call vk::BindImageMemory(m_device->device(), image, image_mem, 0);
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         " used with no memory bound. Memory should be bound by calling vkBindImageMemory().");
+
+    m_commandBuffer->begin();
+    VkClearColorValue ccv;
+    ccv.float32[0] = 1.0f;
+    ccv.float32[1] = 1.0f;
+    ccv.float32[2] = 1.0f;
+    ccv.float32[3] = 1.0f;
+    VkImageSubresourceRange isr = {};
+    isr.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    isr.baseArrayLayer = 0;
+    isr.baseMipLevel = 0;
+    isr.layerCount = 1;
+    isr.levelCount = 1;
+    vk::CmdClearColorImage(m_commandBuffer->handle(), image, VK_IMAGE_LAYOUT_GENERAL, &ccv, 1, &isr);
+    m_commandBuffer->end();
+
+    m_errorMonitor->VerifyFound();
+    vk::DestroyImage(m_device->device(), image, NULL);
+    vk::FreeMemory(m_device->device(), image_mem, nullptr);
+}
+
+TEST_F(VkLayerTest, BufferMemoryNotBound) {
+    TEST_DESCRIPTION("Attempt to copy from a buffer which has not had memory bound to it.");
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    VkImageObj image(m_device);
+    image.Init(128, 128, 1, VK_FORMAT_B8G8R8A8_UNORM, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT,
+               VK_IMAGE_TILING_OPTIMAL, 0);
+    ASSERT_TRUE(image.initialized());
+
+    VkBuffer buffer;
+    VkDeviceMemory mem;
+    VkMemoryRequirements mem_reqs;
+
+    VkBufferCreateInfo buf_info = {};
+    buf_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
+    buf_info.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
+    buf_info.size = 1024;
+    buf_info.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
+    VkResult err = vk::CreateBuffer(m_device->device(), &buf_info, NULL, &buffer);
+    ASSERT_VK_SUCCESS(err);
+
+    vk::GetBufferMemoryRequirements(m_device->device(), buffer, &mem_reqs);
+
+    VkMemoryAllocateInfo alloc_info = {};
+    alloc_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+    alloc_info.allocationSize = 1024;
+    bool pass = false;
+    pass = m_device->phy().set_memory_type(mem_reqs.memoryTypeBits, &alloc_info, VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
+    if (!pass) {
+        printf("%s Failed to set memory type.\n", kSkipPrefix);
+        vk::DestroyBuffer(m_device->device(), buffer, NULL);
+        return;
+    }
+    err = vk::AllocateMemory(m_device->device(), &alloc_info, NULL, &mem);
+    ASSERT_VK_SUCCESS(err);
+
+    // Introduce failure by not calling vkBindBufferMemory(m_device->device(), buffer, mem, 0);
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         " used with no memory bound. Memory should be bound by calling vkBindBufferMemory().");
+    VkBufferImageCopy region = {};
+    region.bufferRowLength = 16;
+    region.bufferImageHeight = 16;
+    region.imageSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+
+    region.imageSubresource.layerCount = 1;
+    region.imageExtent.height = 4;
+    region.imageExtent.width = 4;
+    region.imageExtent.depth = 1;
+    m_commandBuffer->begin();
+    vk::CmdCopyBufferToImage(m_commandBuffer->handle(), buffer, image.handle(), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, &region);
+    m_commandBuffer->end();
+
+    m_errorMonitor->VerifyFound();
+
+    vk::DestroyBuffer(m_device->device(), buffer, NULL);
+    vk::FreeMemory(m_device->handle(), mem, NULL);
+}
+
+TEST_F(VkLayerTest, MultiplaneImageLayoutBadAspectFlags) {
+    TEST_DESCRIPTION("Query layout of a multiplane image using illegal aspect flag masks");
+
+    // Enable KHR multiplane req'd extensions
+    bool mp_extensions = InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME,
+                                                    VK_KHR_GET_MEMORY_REQUIREMENTS_2_SPEC_VERSION);
+    if (mp_extensions) {
+        m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    }
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+    mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_MAINTENANCE1_EXTENSION_NAME);
+    mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
+    mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
+    mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
+    if (mp_extensions) {
+        m_device_extension_names.push_back(VK_KHR_MAINTENANCE1_EXTENSION_NAME);
+        m_device_extension_names.push_back(VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
+        m_device_extension_names.push_back(VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
+        m_device_extension_names.push_back(VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
+    } else {
+        printf("%s test requires KHR multiplane extensions, not available.  Skipping.\n", kSkipPrefix);
+        return;
+    }
+    ASSERT_NO_FATAL_FAILURE(InitState());
+
+    VkImageCreateInfo ci = {};
+    ci.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+    ci.pNext = NULL;
+    ci.flags = 0;
+    ci.imageType = VK_IMAGE_TYPE_2D;
+    ci.format = VK_FORMAT_G8_B8R8_2PLANE_420_UNORM_KHR;
+    ci.extent = {128, 128, 1};
+    ci.mipLevels = 1;
+    ci.arrayLayers = 1;
+    ci.samples = VK_SAMPLE_COUNT_1_BIT;
+    ci.tiling = VK_IMAGE_TILING_LINEAR;
+    ci.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
+    ci.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
+    ci.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+
+    // Verify formats
+    bool supported = ImageFormatAndFeaturesSupported(instance(), gpu(), ci, VK_FORMAT_FEATURE_TRANSFER_SRC_BIT);
+    ci.format = VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM_KHR;
+    supported = supported && ImageFormatAndFeaturesSupported(instance(), gpu(), ci, VK_FORMAT_FEATURE_TRANSFER_SRC_BIT);
+    if (!supported) {
+        printf("%s Multiplane image format not supported.  Skipping test.\n", kSkipPrefix);
+        return;  // Assume there's low ROI on searching for different mp formats
+    }
+
+    VkImage image_2plane, image_3plane;
+    ci.format = VK_FORMAT_G8_B8R8_2PLANE_420_UNORM_KHR;
+    VkResult err = vk::CreateImage(device(), &ci, NULL, &image_2plane);
+    ASSERT_VK_SUCCESS(err);
+
+    ci.format = VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM_KHR;
+    err = vk::CreateImage(device(), &ci, NULL, &image_3plane);
+    ASSERT_VK_SUCCESS(err);
+
+    // Query layout of 3rd plane, for a 2-plane image
+    VkImageSubresource subres = {};
+    subres.aspectMask = VK_IMAGE_ASPECT_PLANE_2_BIT_KHR;
+    subres.mipLevel = 0;
+    subres.arrayLayer = 0;
+    VkSubresourceLayout layout = {};
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkGetImageSubresourceLayout-format-01581");
+    vk::GetImageSubresourceLayout(device(), image_2plane, &subres, &layout);
+    m_errorMonitor->VerifyFound();
+
+    // Query layout using color aspect, for a 3-plane image
+    subres.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkGetImageSubresourceLayout-format-01582");
+    vk::GetImageSubresourceLayout(device(), image_3plane, &subres, &layout);
+    m_errorMonitor->VerifyFound();
+
+    // Clean up
+    vk::DestroyImage(device(), image_2plane, NULL);
+    vk::DestroyImage(device(), image_3plane, NULL);
+}
+
+TEST_F(VkLayerTest, InvalidBufferViewObject) {
+    // Create a single TEXEL_BUFFER descriptor and send it an invalid bufferView
+    // First, cause the bufferView to be invalid due to underlying buffer being destroyed
+    // Then destroy view itself and verify that same error is hit
+    VkResult err;
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkWriteDescriptorSet-descriptorType-00323");
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    OneOffDescriptorSet descriptor_set(m_device, {
+                                                     {0, VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
+                                                 });
+    VkBufferView view;
+    {
+        // Create a valid bufferView to start with
+        uint32_t queue_family_index = 0;
+        VkBufferCreateInfo buffer_create_info = {};
+        buffer_create_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
+        buffer_create_info.size = 1024;
+        buffer_create_info.usage = VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT;
+        buffer_create_info.queueFamilyIndexCount = 1;
+        buffer_create_info.pQueueFamilyIndices = &queue_family_index;
+        VkBufferObj buffer;
+        buffer.init(*m_device, buffer_create_info);
+
+        VkBufferViewCreateInfo bvci = {};
+        bvci.sType = VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO;
+        bvci.buffer = buffer.handle();
+        bvci.format = VK_FORMAT_R32_SFLOAT;
+        bvci.range = VK_WHOLE_SIZE;
+
+        err = vk::CreateBufferView(m_device->device(), &bvci, NULL, &view);
+        ASSERT_VK_SUCCESS(err);
+    }
+    // First Destroy buffer underlying view which should hit error in CV
+
+    VkWriteDescriptorSet descriptor_write;
+    memset(&descriptor_write, 0, sizeof(descriptor_write));
+    descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
+    descriptor_write.dstSet = descriptor_set.set_;
+    descriptor_write.dstBinding = 0;
+    descriptor_write.descriptorCount = 1;
+    descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER;
+    descriptor_write.pTexelBufferView = &view;
+
+    vk::UpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
+    m_errorMonitor->VerifyFound();
+
+    // Now destroy view itself and verify same error, which is hit in PV this time
+    vk::DestroyBufferView(m_device->device(), view, NULL);
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkWriteDescriptorSet-descriptorType-00323");
+    vk::UpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, CreateBufferViewNoMemoryBoundToBuffer) {
+    TEST_DESCRIPTION("Attempt to create a buffer view with a buffer that has no memory bound to it.");
+
+    VkResult err;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         " used with no memory bound. Memory should be bound by calling vkBindBufferMemory().");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    // Create a buffer with no bound memory and then attempt to create
+    // a buffer view.
+    VkBufferCreateInfo buff_ci = {};
+    buff_ci.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
+    buff_ci.usage = VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT;
+    buff_ci.size = 256;
+    buff_ci.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
+    VkBuffer buffer;
+    err = vk::CreateBuffer(m_device->device(), &buff_ci, NULL, &buffer);
+    ASSERT_VK_SUCCESS(err);
+
+    VkBufferViewCreateInfo buff_view_ci = {};
+    buff_view_ci.sType = VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO;
+    buff_view_ci.buffer = buffer;
+    buff_view_ci.format = VK_FORMAT_R8_UNORM;
+    buff_view_ci.range = VK_WHOLE_SIZE;
+    VkBufferView buff_view;
+    err = vk::CreateBufferView(m_device->device(), &buff_view_ci, NULL, &buff_view);
+
+    m_errorMonitor->VerifyFound();
+    vk::DestroyBuffer(m_device->device(), buffer, NULL);
+    // If last error is success, it still created the view, so delete it.
+    if (err == VK_SUCCESS) {
+        vk::DestroyBufferView(m_device->device(), buff_view, NULL);
+    }
+}
+
+TEST_F(VkLayerTest, InvalidBufferViewCreateInfoEntries) {
+    TEST_DESCRIPTION("Attempt to create a buffer view with invalid create info.");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    const VkPhysicalDeviceLimits &dev_limits = m_device->props.limits;
+    const VkDeviceSize minTexelBufferOffsetAlignment = dev_limits.minTexelBufferOffsetAlignment;
+    if (minTexelBufferOffsetAlignment == 1) {
+        printf("%s Test requires minTexelOffsetAlignment to not be equal to 1. \n", kSkipPrefix);
+        return;
+    }
+
+    const VkFormat format_with_uniform_texel_support = VK_FORMAT_R8G8B8A8_UNORM;
+    const char *format_with_uniform_texel_support_string = "VK_FORMAT_R8G8B8A8_UNORM";
+    const VkFormat format_without_texel_support = VK_FORMAT_R8G8B8_UNORM;
+    const char *format_without_texel_support_string = "VK_FORMAT_R8G8B8_UNORM";
+    VkFormatProperties format_properties;
+    vk::GetPhysicalDeviceFormatProperties(gpu(), format_with_uniform_texel_support, &format_properties);
+    if (!(format_properties.bufferFeatures & VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT)) {
+        printf("%s Test requires %s to support VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT\n", kSkipPrefix,
+               format_with_uniform_texel_support_string);
+        return;
+    }
+    vk::GetPhysicalDeviceFormatProperties(gpu(), format_without_texel_support, &format_properties);
+    if ((format_properties.bufferFeatures & VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT) ||
+        (format_properties.bufferFeatures & VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT)) {
+        printf(
+            "%s Test requires %s to not support VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT nor "
+            "VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT\n",
+            kSkipPrefix, format_without_texel_support_string);
+        return;
+    }
+
+    // Create a test buffer--buffer must have been created using VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT or
+    // VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT, so use a different usage value instead to cause an error
+    const VkDeviceSize resource_size = 1024;
+    const VkBufferCreateInfo bad_buffer_info = VkBufferObj::create_info(resource_size, VK_BUFFER_USAGE_INDEX_BUFFER_BIT);
+    VkBufferObj bad_buffer;
+    bad_buffer.init(*m_device, bad_buffer_info, (VkMemoryPropertyFlags)VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT);
+
+    // Create a test buffer view
+    VkBufferViewCreateInfo buff_view_ci = {};
+    buff_view_ci.sType = VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO;
+    buff_view_ci.buffer = bad_buffer.handle();
+    buff_view_ci.format = format_with_uniform_texel_support;
+    buff_view_ci.range = VK_WHOLE_SIZE;
+    CreateBufferViewTest(*this, &buff_view_ci, {"VUID-VkBufferViewCreateInfo-buffer-00932"});
+
+    // Create a better test buffer
+    const VkBufferCreateInfo buffer_info = VkBufferObj::create_info(resource_size, VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT);
+    VkBufferObj buffer;
+    buffer.init(*m_device, buffer_info, (VkMemoryPropertyFlags)VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT);
+
+    // Offset must be less than the size of the buffer, so set it equal to the buffer size to cause an error
+    buff_view_ci.buffer = buffer.handle();
+    buff_view_ci.offset = buffer.create_info().size;
+    CreateBufferViewTest(*this, &buff_view_ci, {"VUID-VkBufferViewCreateInfo-offset-00925"});
+
+    // Offset must be a multiple of VkPhysicalDeviceLimits::minTexelBufferOffsetAlignment so add 1 to ensure it is not
+    buff_view_ci.offset = minTexelBufferOffsetAlignment + 1;
+    CreateBufferViewTest(*this, &buff_view_ci, {"VUID-VkBufferViewCreateInfo-offset-02749"});
+
+    // Set offset to acceptable value for range tests
+    buff_view_ci.offset = minTexelBufferOffsetAlignment;
+    // Setting range equal to 0 will cause an error to occur
+    buff_view_ci.range = 0;
+    CreateBufferViewTest(*this, &buff_view_ci, {"VUID-VkBufferViewCreateInfo-range-00928"});
+
+    uint32_t format_size = FormatElementSize(buff_view_ci.format);
+    // Range must be a multiple of the element size of format, so add one to ensure it is not
+    buff_view_ci.range = format_size + 1;
+    CreateBufferViewTest(*this, &buff_view_ci, {"VUID-VkBufferViewCreateInfo-range-00929"});
+
+    // Twice the element size of format multiplied by VkPhysicalDeviceLimits::maxTexelBufferElements guarantees range divided by the
+    // element size is greater than maxTexelBufferElements, causing failure
+    buff_view_ci.range = 2 * static_cast<VkDeviceSize>(format_size) * static_cast<VkDeviceSize>(dev_limits.maxTexelBufferElements);
+    CreateBufferViewTest(*this, &buff_view_ci,
+                         {"VUID-VkBufferViewCreateInfo-range-00930", "VUID-VkBufferViewCreateInfo-offset-00931"});
+
+    // Set rage to acceptable value for buffer tests
+    buff_view_ci.format = format_without_texel_support;
+    buff_view_ci.range = VK_WHOLE_SIZE;
+
+    // `buffer` was created using VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT so we can use that for the first buffer test
+    CreateBufferViewTest(*this, &buff_view_ci, {"VUID-VkBufferViewCreateInfo-buffer-00933"});
+
+    // Create a new buffer using VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT
+    const VkBufferCreateInfo storage_buffer_info =
+        VkBufferObj::create_info(resource_size, VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT);
+    VkBufferObj storage_buffer;
+    storage_buffer.init(*m_device, storage_buffer_info, (VkMemoryPropertyFlags)VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT);
+
+    buff_view_ci.buffer = storage_buffer.handle();
+    CreateBufferViewTest(*this, &buff_view_ci, {"VUID-VkBufferViewCreateInfo-buffer-00934"});
+}
+
+TEST_F(VkLayerTest, InvalidTexelBufferAlignment) {
+    TEST_DESCRIPTION("Test VK_EXT_texel_buffer_alignment.");
+
+    if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+        m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    } else {
+        printf("%s Did not find required instance extension %s; skipped.\n", kSkipPrefix,
+               VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+        return;
+    }
+
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+    std::array<const char *, 1> required_device_extensions = {{VK_EXT_TEXEL_BUFFER_ALIGNMENT_EXTENSION_NAME}};
+    for (auto device_extension : required_device_extensions) {
+        if (DeviceExtensionSupported(gpu(), nullptr, device_extension)) {
+            m_device_extension_names.push_back(device_extension);
+        } else {
+            printf("%s %s Extension not supported, skipping tests\n", kSkipPrefix, device_extension);
+            return;
+        }
+    }
+
+    if (DeviceIsMockICD() || DeviceSimulation()) {
+        printf("%s MockICD does not support this feature, skipping tests\n", kSkipPrefix);
+        return;
+    }
+
+    PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR =
+        (PFN_vkGetPhysicalDeviceFeatures2KHR)vk::GetInstanceProcAddr(instance(), "vkGetPhysicalDeviceFeatures2KHR");
+    ASSERT_TRUE(vkGetPhysicalDeviceFeatures2KHR != nullptr);
+
+    // Create a device that enables texel_buffer_alignment
+    auto texel_buffer_alignment_features = lvl_init_struct<VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT>();
+    auto features2 = lvl_init_struct<VkPhysicalDeviceFeatures2KHR>(&texel_buffer_alignment_features);
+    vkGetPhysicalDeviceFeatures2KHR(gpu(), &features2);
+    texel_buffer_alignment_features.texelBufferAlignment = VK_TRUE;
+
+    VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT align_props = {};
+    align_props.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES_EXT;
+    VkPhysicalDeviceProperties2 pd_props2 = {};
+    pd_props2.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2;
+    pd_props2.pNext = &align_props;
+    vk::GetPhysicalDeviceProperties2(gpu(), &pd_props2);
+
+    ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &features2));
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    const VkFormat format_with_uniform_texel_support = VK_FORMAT_R8G8B8A8_UNORM;
+
+    const VkDeviceSize resource_size = 1024;
+    VkBufferCreateInfo buffer_info = VkBufferObj::create_info(
+        resource_size, VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT | VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT);
+    VkBufferObj buffer;
+    buffer.init(*m_device, buffer_info, (VkMemoryPropertyFlags)VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT);
+
+    // Create a test buffer view
+    VkBufferViewCreateInfo buff_view_ci = {};
+    buff_view_ci.sType = VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO;
+    buff_view_ci.buffer = buffer.handle();
+    buff_view_ci.format = format_with_uniform_texel_support;
+    buff_view_ci.range = VK_WHOLE_SIZE;
+
+    buff_view_ci.offset = 1;
+    std::vector<std::string> expectedErrors;
+    if (buff_view_ci.offset < align_props.storageTexelBufferOffsetAlignmentBytes) {
+        expectedErrors.push_back("VUID-VkBufferViewCreateInfo-buffer-02750");
+    }
+    if (buff_view_ci.offset < align_props.uniformTexelBufferOffsetAlignmentBytes) {
+        expectedErrors.push_back("VUID-VkBufferViewCreateInfo-buffer-02751");
+    }
+    CreateBufferViewTest(*this, &buff_view_ci, expectedErrors);
+    expectedErrors.clear();
+
+    buff_view_ci.offset = 4;
+    if (buff_view_ci.offset < align_props.storageTexelBufferOffsetAlignmentBytes &&
+        !align_props.storageTexelBufferOffsetSingleTexelAlignment) {
+        expectedErrors.push_back("VUID-VkBufferViewCreateInfo-buffer-02750");
+    }
+    if (buff_view_ci.offset < align_props.uniformTexelBufferOffsetAlignmentBytes &&
+        !align_props.uniformTexelBufferOffsetSingleTexelAlignment) {
+        expectedErrors.push_back("VUID-VkBufferViewCreateInfo-buffer-02751");
+    }
+    CreateBufferViewTest(*this, &buff_view_ci, expectedErrors);
+    expectedErrors.clear();
+
+    // Test a 3-component format
+    VkFormatProperties format_properties;
+    vk::GetPhysicalDeviceFormatProperties(gpu(), VK_FORMAT_R32G32B32_SFLOAT, &format_properties);
+    if (format_properties.bufferFeatures & VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT) {
+        buffer_info.usage = VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT;
+        VkBufferObj buffer2;
+        buffer2.init(*m_device, buffer_info, (VkMemoryPropertyFlags)VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT);
+
+        // Create a test buffer view
+        buff_view_ci.buffer = buffer2.handle();
+
+        buff_view_ci.format = VK_FORMAT_R32G32B32_SFLOAT;
+        buff_view_ci.offset = 1;
+        if (buff_view_ci.offset < align_props.uniformTexelBufferOffsetAlignmentBytes) {
+            expectedErrors.push_back("VUID-VkBufferViewCreateInfo-buffer-02751");
+        }
+        CreateBufferViewTest(*this, &buff_view_ci, expectedErrors);
+        expectedErrors.clear();
+
+        buff_view_ci.offset = 4;
+        if (buff_view_ci.offset < align_props.uniformTexelBufferOffsetAlignmentBytes &&
+            !align_props.uniformTexelBufferOffsetSingleTexelAlignment) {
+            expectedErrors.push_back("VUID-VkBufferViewCreateInfo-buffer-02751");
+        }
+        CreateBufferViewTest(*this, &buff_view_ci, expectedErrors);
+        expectedErrors.clear();
+    }
+}
+
+TEST_F(VkLayerTest, FillBufferWithinRenderPass) {
+    // Call CmdFillBuffer within an active renderpass
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdFillBuffer-renderpass");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    m_commandBuffer->begin();
+    m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
+
+    VkMemoryPropertyFlags reqs = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
+    VkBufferObj dstBuffer;
+    dstBuffer.init_as_dst(*m_device, (VkDeviceSize)1024, reqs);
+
+    m_commandBuffer->FillBuffer(dstBuffer.handle(), 0, 4, 0x11111111);
+
+    m_errorMonitor->VerifyFound();
+
+    m_commandBuffer->EndRenderPass();
+    m_commandBuffer->end();
+}
+
+TEST_F(VkLayerTest, UpdateBufferWithinRenderPass) {
+    // Call CmdUpdateBuffer within an active renderpass
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdUpdateBuffer-renderpass");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    m_commandBuffer->begin();
+    m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
+
+    VkMemoryPropertyFlags reqs = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
+    VkBufferObj dstBuffer;
+    dstBuffer.init_as_dst(*m_device, (VkDeviceSize)1024, reqs);
+
+    VkDeviceSize dstOffset = 0;
+    uint32_t Data[] = {1, 2, 3, 4, 5, 6, 7, 8};
+    VkDeviceSize dataSize = sizeof(Data) / sizeof(uint32_t);
+    vk::CmdUpdateBuffer(m_commandBuffer->handle(), dstBuffer.handle(), dstOffset, dataSize, &Data);
+
+    m_errorMonitor->VerifyFound();
+
+    m_commandBuffer->EndRenderPass();
+    m_commandBuffer->end();
+}
+
+TEST_F(VkLayerTest, ClearColorImageWithBadRange) {
+    TEST_DESCRIPTION("Record clear color with an invalid VkImageSubresourceRange");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    VkImageObj image(m_device);
+    image.Init(32, 32, 1, VK_FORMAT_B8G8R8A8_UNORM, VK_IMAGE_USAGE_TRANSFER_DST_BIT, VK_IMAGE_TILING_OPTIMAL);
+    ASSERT_TRUE(image.create_info().arrayLayers == 1);
+    ASSERT_TRUE(image.initialized());
+    image.SetLayout(VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL);
+
+    const VkClearColorValue clear_color = {{0.0f, 0.0f, 0.0f, 1.0f}};
+
+    m_commandBuffer->begin();
+    const auto cb_handle = m_commandBuffer->handle();
+
+    // Try baseMipLevel >= image.mipLevels with VK_REMAINING_MIP_LEVELS
+    {
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdClearColorImage-baseMipLevel-01470");
+        const VkImageSubresourceRange range = {VK_IMAGE_ASPECT_COLOR_BIT, 1, VK_REMAINING_MIP_LEVELS, 0, 1};
+        vk::CmdClearColorImage(cb_handle, image.handle(), image.Layout(), &clear_color, 1, &range);
+        m_errorMonitor->VerifyFound();
+    }
+
+    // Try baseMipLevel >= image.mipLevels without VK_REMAINING_MIP_LEVELS
+    {
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdClearColorImage-baseMipLevel-01470");
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdClearColorImage-pRanges-01692");
+        const VkImageSubresourceRange range = {VK_IMAGE_ASPECT_COLOR_BIT, 1, 1, 0, 1};
+        vk::CmdClearColorImage(cb_handle, image.handle(), image.Layout(), &clear_color, 1, &range);
+        m_errorMonitor->VerifyFound();
+    }
+
+    // Try levelCount = 0
+    {
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdClearColorImage-pRanges-01692");
+        const VkImageSubresourceRange range = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 0, 0, 1};
+        vk::CmdClearColorImage(cb_handle, image.handle(), image.Layout(), &clear_color, 1, &range);
+        m_errorMonitor->VerifyFound();
+    }
+
+    // Try baseMipLevel + levelCount > image.mipLevels
+    {
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdClearColorImage-pRanges-01692");
+        const VkImageSubresourceRange range = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 2, 0, 1};
+        vk::CmdClearColorImage(cb_handle, image.handle(), image.Layout(), &clear_color, 1, &range);
+        m_errorMonitor->VerifyFound();
+    }
+
+    // Try baseArrayLayer >= image.arrayLayers with VK_REMAINING_ARRAY_LAYERS
+    {
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdClearColorImage-baseArrayLayer-01472");
+        const VkImageSubresourceRange range = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 1, VK_REMAINING_ARRAY_LAYERS};
+        vk::CmdClearColorImage(cb_handle, image.handle(), image.Layout(), &clear_color, 1, &range);
+        m_errorMonitor->VerifyFound();
+    }
+
+    // Try baseArrayLayer >= image.arrayLayers without VK_REMAINING_ARRAY_LAYERS
+    {
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdClearColorImage-baseArrayLayer-01472");
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdClearColorImage-pRanges-01693");
+        const VkImageSubresourceRange range = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 1, 1};
+        vk::CmdClearColorImage(cb_handle, image.handle(), image.Layout(), &clear_color, 1, &range);
+        m_errorMonitor->VerifyFound();
+    }
+
+    // Try layerCount = 0
+    {
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdClearColorImage-pRanges-01693");
+        const VkImageSubresourceRange range = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 0};
+        vk::CmdClearColorImage(cb_handle, image.handle(), image.Layout(), &clear_color, 1, &range);
+        m_errorMonitor->VerifyFound();
+    }
+
+    // Try baseArrayLayer + layerCount > image.arrayLayers
+    {
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdClearColorImage-pRanges-01693");
+        const VkImageSubresourceRange range = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 2};
+        vk::CmdClearColorImage(cb_handle, image.handle(), image.Layout(), &clear_color, 1, &range);
+        m_errorMonitor->VerifyFound();
+    }
+}
+
+TEST_F(VkLayerTest, ClearDepthStencilWithBadRange) {
+    TEST_DESCRIPTION("Record clear depth with an invalid VkImageSubresourceRange");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    const auto depth_format = FindSupportedDepthStencilFormat(gpu());
+    if (!depth_format) {
+        printf("%s No Depth + Stencil format found. Skipped.\n", kSkipPrefix);
+        return;
+    }
+
+    VkImageObj image(m_device);
+    image.Init(32, 32, 1, depth_format, VK_IMAGE_USAGE_TRANSFER_DST_BIT, VK_IMAGE_TILING_OPTIMAL);
+    ASSERT_TRUE(image.create_info().arrayLayers == 1);
+    ASSERT_TRUE(image.initialized());
+    const VkImageAspectFlags ds_aspect = VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT;
+    image.SetLayout(ds_aspect, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL);
+
+    const VkClearDepthStencilValue clear_value = {};
+
+    m_commandBuffer->begin();
+    const auto cb_handle = m_commandBuffer->handle();
+
+    // Try baseMipLevel >= image.mipLevels with VK_REMAINING_MIP_LEVELS
+    {
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdClearDepthStencilImage-baseMipLevel-01474");
+        const VkImageSubresourceRange range = {ds_aspect, 1, VK_REMAINING_MIP_LEVELS, 0, 1};
+        vk::CmdClearDepthStencilImage(cb_handle, image.handle(), image.Layout(), &clear_value, 1, &range);
+        m_errorMonitor->VerifyFound();
+    }
+
+    // Try baseMipLevel >= image.mipLevels without VK_REMAINING_MIP_LEVELS
+    {
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdClearDepthStencilImage-baseMipLevel-01474");
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdClearDepthStencilImage-pRanges-01694");
+        const VkImageSubresourceRange range = {ds_aspect, 1, 1, 0, 1};
+        vk::CmdClearDepthStencilImage(cb_handle, image.handle(), image.Layout(), &clear_value, 1, &range);
+        m_errorMonitor->VerifyFound();
+    }
+
+    // Try levelCount = 0
+    {
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdClearDepthStencilImage-pRanges-01694");
+        const VkImageSubresourceRange range = {ds_aspect, 0, 0, 0, 1};
+        vk::CmdClearDepthStencilImage(cb_handle, image.handle(), image.Layout(), &clear_value, 1, &range);
+        m_errorMonitor->VerifyFound();
+    }
+
+    // Try baseMipLevel + levelCount > image.mipLevels
+    {
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdClearDepthStencilImage-pRanges-01694");
+        const VkImageSubresourceRange range = {ds_aspect, 0, 2, 0, 1};
+        vk::CmdClearDepthStencilImage(cb_handle, image.handle(), image.Layout(), &clear_value, 1, &range);
+        m_errorMonitor->VerifyFound();
+    }
+
+    // Try baseArrayLayer >= image.arrayLayers with VK_REMAINING_ARRAY_LAYERS
+    {
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                             "VUID-vkCmdClearDepthStencilImage-baseArrayLayer-01476");
+        const VkImageSubresourceRange range = {ds_aspect, 0, 1, 1, VK_REMAINING_ARRAY_LAYERS};
+        vk::CmdClearDepthStencilImage(cb_handle, image.handle(), image.Layout(), &clear_value, 1, &range);
+        m_errorMonitor->VerifyFound();
+    }
+
+    // Try baseArrayLayer >= image.arrayLayers without VK_REMAINING_ARRAY_LAYERS
+    {
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                             "VUID-vkCmdClearDepthStencilImage-baseArrayLayer-01476");
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdClearDepthStencilImage-pRanges-01695");
+        const VkImageSubresourceRange range = {ds_aspect, 0, 1, 1, 1};
+        vk::CmdClearDepthStencilImage(cb_handle, image.handle(), image.Layout(), &clear_value, 1, &range);
+        m_errorMonitor->VerifyFound();
+    }
+
+    // Try layerCount = 0
+    {
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdClearDepthStencilImage-pRanges-01695");
+        const VkImageSubresourceRange range = {ds_aspect, 0, 1, 0, 0};
+        vk::CmdClearDepthStencilImage(cb_handle, image.handle(), image.Layout(), &clear_value, 1, &range);
+        m_errorMonitor->VerifyFound();
+    }
+
+    // Try baseArrayLayer + layerCount > image.arrayLayers
+    {
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdClearDepthStencilImage-pRanges-01695");
+        const VkImageSubresourceRange range = {ds_aspect, 0, 1, 0, 2};
+        vk::CmdClearDepthStencilImage(cb_handle, image.handle(), image.Layout(), &clear_value, 1, &range);
+        m_errorMonitor->VerifyFound();
+    }
+}
+
+TEST_F(VkLayerTest, ClearColorImageWithinRenderPass) {
+    // Call CmdClearColorImage within an active RenderPass
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdClearColorImage-renderpass");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    m_commandBuffer->begin();
+    m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
+
+    VkClearColorValue clear_color;
+    memset(clear_color.uint32, 0, sizeof(uint32_t) * 4);
+    const VkFormat tex_format = VK_FORMAT_B8G8R8A8_UNORM;
+    const int32_t tex_width = 32;
+    const int32_t tex_height = 32;
+    VkImageCreateInfo image_create_info = {};
+    image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+    image_create_info.pNext = NULL;
+    image_create_info.imageType = VK_IMAGE_TYPE_2D;
+    image_create_info.format = tex_format;
+    image_create_info.extent.width = tex_width;
+    image_create_info.extent.height = tex_height;
+    image_create_info.extent.depth = 1;
+    image_create_info.mipLevels = 1;
+    image_create_info.arrayLayers = 1;
+    image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
+    image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
+    image_create_info.usage = VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
+
+    VkImageObj dstImage(m_device);
+    dstImage.init(&image_create_info);
+
+    const VkImageSubresourceRange range = VkImageObj::subresource_range(image_create_info, VK_IMAGE_ASPECT_COLOR_BIT);
+
+    vk::CmdClearColorImage(m_commandBuffer->handle(), dstImage.handle(), VK_IMAGE_LAYOUT_GENERAL, &clear_color, 1, &range);
+
+    m_errorMonitor->VerifyFound();
+
+    m_commandBuffer->EndRenderPass();
+    m_commandBuffer->end();
+}
+
+TEST_F(VkLayerTest, ClearDepthStencilImageErrors) {
+    // Hit errors related to vk::CmdClearDepthStencilImage()
+    // 1. Use an image that doesn't have VK_IMAGE_USAGE_TRANSFER_DST_BIT set
+    // 2. Call CmdClearDepthStencilImage within an active RenderPass
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    auto depth_format = FindSupportedDepthStencilFormat(gpu());
+    if (!depth_format) {
+        printf("%s No Depth + Stencil format found. Skipped.\n", kSkipPrefix);
+        return;
+    }
+
+    VkClearDepthStencilValue clear_value = {0};
+    VkImageCreateInfo image_create_info = VkImageObj::create_info();
+    image_create_info.imageType = VK_IMAGE_TYPE_2D;
+    image_create_info.format = depth_format;
+    image_create_info.extent.width = 64;
+    image_create_info.extent.height = 64;
+    image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
+    // Error here is that VK_IMAGE_USAGE_TRANSFER_DST_BIT is excluded for DS image that we'll call Clear on below
+    image_create_info.usage = VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT;
+
+    VkImageObj dst_image_bad_usage(m_device);
+    dst_image_bad_usage.init(&image_create_info);
+    const VkImageSubresourceRange range = VkImageObj::subresource_range(image_create_info, VK_IMAGE_ASPECT_DEPTH_BIT);
+
+    m_commandBuffer->begin();
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdClearDepthStencilImage-image-00009");
+    vk::CmdClearDepthStencilImage(m_commandBuffer->handle(), dst_image_bad_usage.handle(), VK_IMAGE_LAYOUT_GENERAL, &clear_value, 1,
+                                  &range);
+    m_errorMonitor->VerifyFound();
+
+    // Fix usage for next test case
+    image_create_info.usage = VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
+    VkImageObj dst_image(m_device);
+    dst_image.init(&image_create_info);
+
+    m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdClearDepthStencilImage-renderpass");
+    vk::CmdClearDepthStencilImage(m_commandBuffer->handle(), dst_image.handle(), VK_IMAGE_LAYOUT_GENERAL, &clear_value, 1, &range);
+    m_errorMonitor->VerifyFound();
+
+    m_commandBuffer->EndRenderPass();
+    m_commandBuffer->end();
+}
+
+TEST_F(VkLayerTest, BufferMemoryBarrierNoBuffer) {
+    // Try to add a buffer memory barrier with no buffer.
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "required parameter pBufferMemoryBarriers[0].buffer specified as VK_NULL_HANDLE");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    m_commandBuffer->begin();
+
+    VkBufferMemoryBarrier buf_barrier = {};
+    buf_barrier.sType = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER;
+    buf_barrier.srcAccessMask = VK_ACCESS_HOST_WRITE_BIT;
+    buf_barrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT;
+    buf_barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
+    buf_barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
+    buf_barrier.buffer = VK_NULL_HANDLE;
+    buf_barrier.offset = 0;
+    buf_barrier.size = VK_WHOLE_SIZE;
+    vk::CmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_HOST_BIT, VK_PIPELINE_STAGE_VERTEX_SHADER_BIT, 0, 0,
+                           nullptr, 1, &buf_barrier, 0, nullptr);
+
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, InvalidBarriers) {
+    TEST_DESCRIPTION("A variety of ways to get VK_INVALID_BARRIER ");
+
+    if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+        m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    }
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+    if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_SEPARATE_DEPTH_STENCIL_LAYOUTS_EXTENSION_NAME)) {
+        m_device_extension_names.push_back(VK_KHR_SEPARATE_DEPTH_STENCIL_LAYOUTS_EXTENSION_NAME);
+    }
+    PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR =
+        (PFN_vkGetPhysicalDeviceFeatures2KHR)vk::GetInstanceProcAddr(instance(), "vkGetPhysicalDeviceFeatures2KHR");
+    auto separate_depth_stencil_layouts_features = lvl_init_struct<VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR>();
+    auto features2 = lvl_init_struct<VkPhysicalDeviceFeatures2KHR>(&separate_depth_stencil_layouts_features);
+    if (vkGetPhysicalDeviceFeatures2KHR) {
+        vkGetPhysicalDeviceFeatures2KHR(gpu(), &features2);
+    } else {
+        separate_depth_stencil_layouts_features.separateDepthStencilLayouts = VK_FALSE;
+    }
+    ASSERT_NO_FATAL_FAILURE(InitState(nullptr, (vkGetPhysicalDeviceFeatures2KHR) ? &features2 : nullptr));
+
+    auto depth_format = FindSupportedDepthStencilFormat(gpu());
+    if (!depth_format) {
+        printf("%s No Depth + Stencil format found. Skipped.\n", kSkipPrefix);
+        return;
+    }
+    // Add a token self-dependency for this test to avoid unexpected errors
+    m_addRenderPassSelfDependency = true;
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    const uint32_t submit_family = m_device->graphics_queue_node_index_;
+    const uint32_t invalid = static_cast<uint32_t>(m_device->queue_props.size());
+    const uint32_t other_family = submit_family != 0 ? 0 : 1;
+    const bool only_one_family = (invalid == 1) || (m_device->queue_props[other_family].queueCount == 0);
+    std::vector<uint32_t> qf_indices{{submit_family, other_family}};
+    if (only_one_family) {
+        qf_indices.resize(1);
+    }
+    BarrierQueueFamilyTestHelper::Context test_context(this, qf_indices);
+
+    // Use image unbound to memory in barrier
+    // Use buffer unbound to memory in barrier
+    BarrierQueueFamilyTestHelper conc_test(&test_context);
+    conc_test.Init(nullptr, false, false);
+
+    conc_test.image_barrier_.newLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
+    conc_test(" used with no memory bound. Memory should be bound by calling vkBindImageMemory()",
+              " used with no memory bound. Memory should be bound by calling vkBindBufferMemory()");
+
+    VkBufferObj buffer;
+    VkMemoryPropertyFlags mem_reqs = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
+    buffer.init_as_src_and_dst(*m_device, 256, mem_reqs);
+    conc_test.buffer_barrier_.buffer = buffer.handle();
+
+    VkImageObj image(m_device);
+    image.Init(128, 128, 1, VK_FORMAT_B8G8R8A8_UNORM, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
+    conc_test.image_barrier_.image = image.handle();
+
+    // New layout can't be UNDEFINED
+    conc_test.image_barrier_.newLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+    conc_test("VUID-VkImageMemoryBarrier-newLayout-01198", "");
+
+    // Transition image to color attachment optimal
+    conc_test.image_barrier_.newLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
+    conc_test("");
+
+    // TODO: this looks vestigal or incomplete...
+    m_commandBuffer->begin();
+    m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
+
+    // Can't send buffer memory barrier during a render pass
+    vk::CmdEndRenderPass(m_commandBuffer->handle());
+
+    // Duplicate barriers that change layout
+    VkImageMemoryBarrier img_barrier = {};
+    img_barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
+    img_barrier.pNext = NULL;
+    img_barrier.image = image.handle();
+    img_barrier.oldLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+    img_barrier.newLayout = VK_IMAGE_LAYOUT_GENERAL;
+    img_barrier.srcAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
+    img_barrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT;
+    img_barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
+    img_barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
+    img_barrier.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    img_barrier.subresourceRange.baseArrayLayer = 0;
+    img_barrier.subresourceRange.baseMipLevel = 0;
+    img_barrier.subresourceRange.layerCount = 1;
+    img_barrier.subresourceRange.levelCount = 1;
+    VkImageMemoryBarrier img_barriers[2] = {img_barrier, img_barrier};
+
+    // Transitions from UNDEFINED  are valid, even if duplicated
+    m_errorMonitor->ExpectSuccess();
+    vk::CmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
+                           VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, VK_DEPENDENCY_BY_REGION_BIT, 0, nullptr, 0, nullptr, 2,
+                           img_barriers);
+    m_errorMonitor->VerifyNotFound();
+
+    // Duplication of layout transitions (not from undefined) are not valid
+    img_barriers[0].oldLayout = VK_IMAGE_LAYOUT_GENERAL;
+    img_barriers[0].newLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
+    img_barriers[1].oldLayout = img_barriers[0].oldLayout;
+    img_barriers[1].newLayout = img_barriers[0].newLayout;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageMemoryBarrier-oldLayout-01197");
+    vk::CmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
+                           VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, VK_DEPENDENCY_BY_REGION_BIT, 0, nullptr, 0, nullptr, 2,
+                           img_barriers);
+    m_errorMonitor->VerifyFound();
+
+    // Exceed the buffer size
+    conc_test.buffer_barrier_.offset = conc_test.buffer_.create_info().size + 1;
+    conc_test("", "VUID-VkBufferMemoryBarrier-offset-01187");
+
+    conc_test.buffer_barrier_.offset = 0;
+    conc_test.buffer_barrier_.size = conc_test.buffer_.create_info().size + 1;
+    // Size greater than total size
+    conc_test("", "VUID-VkBufferMemoryBarrier-size-01189");
+
+    conc_test.buffer_barrier_.size = VK_WHOLE_SIZE;
+
+    // Now exercise barrier aspect bit errors, first DS
+    VkDepthStencilObj ds_image(m_device);
+    ds_image.Init(m_device, 128, 128, depth_format);
+    ASSERT_TRUE(ds_image.initialized());
+
+    conc_test.image_barrier_.oldLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
+    conc_test.image_barrier_.newLayout = VK_IMAGE_LAYOUT_GENERAL;
+    conc_test.image_barrier_.image = ds_image.handle();
+
+    // Not having DEPTH or STENCIL set is an error
+    conc_test.image_barrier_.subresourceRange.aspectMask = VK_IMAGE_ASPECT_METADATA_BIT;
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "UNASSIGNED-CoreValidation-DrawState-InvalidImageAspect");
+    if (separate_depth_stencil_layouts_features.separateDepthStencilLayouts) {
+        conc_test("VUID-VkImageMemoryBarrier-image-03319");
+    } else {
+        conc_test("VUID-VkImageMemoryBarrier-image-03320");
+
+        // Having only one of depth or stencil set for DS image is an error
+        conc_test.image_barrier_.subresourceRange.aspectMask = VK_IMAGE_ASPECT_STENCIL_BIT;
+        conc_test("VUID-VkImageMemoryBarrier-image-03320");
+    }
+
+    // Having anything other than DEPTH and STENCIL is an error
+    conc_test.image_barrier_.subresourceRange.aspectMask =
+        VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT | VK_IMAGE_ASPECT_COLOR_BIT;
+    conc_test("UNASSIGNED-CoreValidation-DrawState-InvalidImageAspect");
+
+    // Now test depth-only
+    VkFormatProperties format_props;
+    vk::GetPhysicalDeviceFormatProperties(m_device->phy().handle(), VK_FORMAT_D16_UNORM, &format_props);
+    if (format_props.optimalTilingFeatures & VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT) {
+        VkDepthStencilObj d_image(m_device);
+        d_image.Init(m_device, 128, 128, VK_FORMAT_D16_UNORM);
+        ASSERT_TRUE(d_image.initialized());
+
+        conc_test.image_barrier_.oldLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
+        conc_test.image_barrier_.newLayout = VK_IMAGE_LAYOUT_GENERAL;
+        conc_test.image_barrier_.image = d_image.handle();
+
+        // DEPTH bit must be set
+        conc_test.image_barrier_.subresourceRange.aspectMask = VK_IMAGE_ASPECT_METADATA_BIT;
+        conc_test("Depth-only image formats must have the VK_IMAGE_ASPECT_DEPTH_BIT set.");
+
+        // No bits other than DEPTH may be set
+        conc_test.image_barrier_.subresourceRange.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_COLOR_BIT;
+        conc_test("Depth-only image formats can have only the VK_IMAGE_ASPECT_DEPTH_BIT set.");
+    }
+
+    // Now test stencil-only
+    vk::GetPhysicalDeviceFormatProperties(m_device->phy().handle(), VK_FORMAT_S8_UINT, &format_props);
+    if (format_props.optimalTilingFeatures & VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT) {
+        VkDepthStencilObj s_image(m_device);
+        s_image.Init(m_device, 128, 128, VK_FORMAT_S8_UINT);
+        ASSERT_TRUE(s_image.initialized());
+
+        conc_test.image_barrier_.oldLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
+        conc_test.image_barrier_.newLayout = VK_IMAGE_LAYOUT_GENERAL;
+        conc_test.image_barrier_.image = s_image.handle();
+
+        // Use of COLOR aspect on depth image is error
+        conc_test.image_barrier_.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+        conc_test("Stencil-only image formats must have the VK_IMAGE_ASPECT_STENCIL_BIT set.");
+    }
+
+    // Finally test color
+    VkImageObj c_image(m_device);
+    c_image.Init(128, 128, 1, VK_FORMAT_B8G8R8A8_UNORM, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
+    ASSERT_TRUE(c_image.initialized());
+    conc_test.image_barrier_.oldLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
+    conc_test.image_barrier_.newLayout = VK_IMAGE_LAYOUT_GENERAL;
+    conc_test.image_barrier_.image = c_image.handle();
+
+    // COLOR bit must be set
+    conc_test.image_barrier_.subresourceRange.aspectMask = VK_IMAGE_ASPECT_METADATA_BIT;
+    conc_test("Color image formats must have the VK_IMAGE_ASPECT_COLOR_BIT set.");
+
+    // No bits other than COLOR may be set
+    conc_test.image_barrier_.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT | VK_IMAGE_ASPECT_DEPTH_BIT;
+    conc_test("Color image formats must have ONLY the VK_IMAGE_ASPECT_COLOR_BIT set.");
+
+    // A barrier's new and old VkImageLayout must be compatible with an image's VkImageUsageFlags.
+    {
+        VkImageObj img_color(m_device);
+        img_color.Init(128, 128, 1, VK_FORMAT_B8G8R8A8_UNORM, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL);
+        ASSERT_TRUE(img_color.initialized());
+
+        VkImageObj img_ds(m_device);
+        img_ds.Init(128, 128, 1, depth_format, VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL);
+        ASSERT_TRUE(img_ds.initialized());
+
+        VkImageObj img_xfer_src(m_device);
+        img_xfer_src.Init(128, 128, 1, VK_FORMAT_B8G8R8A8_UNORM, VK_IMAGE_USAGE_TRANSFER_SRC_BIT, VK_IMAGE_TILING_OPTIMAL);
+        ASSERT_TRUE(img_xfer_src.initialized());
+
+        VkImageObj img_xfer_dst(m_device);
+        img_xfer_dst.Init(128, 128, 1, VK_FORMAT_B8G8R8A8_UNORM, VK_IMAGE_USAGE_TRANSFER_DST_BIT, VK_IMAGE_TILING_OPTIMAL);
+        ASSERT_TRUE(img_xfer_dst.initialized());
+
+        VkImageObj img_sampled(m_device);
+        img_sampled.Init(32, 32, 1, VK_FORMAT_B8G8R8A8_UNORM, VK_IMAGE_USAGE_SAMPLED_BIT, VK_IMAGE_TILING_OPTIMAL);
+        ASSERT_TRUE(img_sampled.initialized());
+
+        VkImageObj img_input(m_device);
+        img_input.Init(128, 128, 1, VK_FORMAT_R8G8B8A8_UNORM, VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL);
+        ASSERT_TRUE(img_input.initialized());
+
+        const struct {
+            VkImageObj &image_obj;
+            VkImageLayout bad_layout;
+            std::string msg_code;
+        } bad_buffer_layouts[] = {
+            // clang-format off
+            // images _without_ VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT
+            {img_ds,       VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,         "VUID-VkImageMemoryBarrier-oldLayout-01208"},
+            {img_xfer_src, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,         "VUID-VkImageMemoryBarrier-oldLayout-01208"},
+            {img_xfer_dst, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,         "VUID-VkImageMemoryBarrier-oldLayout-01208"},
+            {img_sampled,  VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,         "VUID-VkImageMemoryBarrier-oldLayout-01208"},
+            {img_input,    VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,         "VUID-VkImageMemoryBarrier-oldLayout-01208"},
+            // images _without_ VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT
+            {img_color,    VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL, "VUID-VkImageMemoryBarrier-oldLayout-01209"},
+            {img_xfer_src, VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL, "VUID-VkImageMemoryBarrier-oldLayout-01209"},
+            {img_xfer_dst, VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL, "VUID-VkImageMemoryBarrier-oldLayout-01209"},
+            {img_sampled,  VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL, "VUID-VkImageMemoryBarrier-oldLayout-01209"},
+            {img_input,    VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL, "VUID-VkImageMemoryBarrier-oldLayout-01209"},
+            {img_color,    VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL,  "VUID-VkImageMemoryBarrier-oldLayout-01210"},
+            {img_xfer_src, VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL,  "VUID-VkImageMemoryBarrier-oldLayout-01210"},
+            {img_xfer_dst, VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL,  "VUID-VkImageMemoryBarrier-oldLayout-01210"},
+            {img_sampled,  VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL,  "VUID-VkImageMemoryBarrier-oldLayout-01210"},
+            {img_input,    VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL,  "VUID-VkImageMemoryBarrier-oldLayout-01210"},
+            // images _without_ VK_IMAGE_USAGE_SAMPLED_BIT or VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT
+            {img_color,    VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL,         "VUID-VkImageMemoryBarrier-oldLayout-01211"},
+            {img_ds,       VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL,         "VUID-VkImageMemoryBarrier-oldLayout-01211"},
+            {img_xfer_src, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL,         "VUID-VkImageMemoryBarrier-oldLayout-01211"},
+            {img_xfer_dst, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL,         "VUID-VkImageMemoryBarrier-oldLayout-01211"},
+            // images _without_ VK_IMAGE_USAGE_TRANSFER_SRC_BIT
+            {img_color,    VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,             "VUID-VkImageMemoryBarrier-oldLayout-01212"},
+            {img_ds,       VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,             "VUID-VkImageMemoryBarrier-oldLayout-01212"},
+            {img_xfer_dst, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,             "VUID-VkImageMemoryBarrier-oldLayout-01212"},
+            {img_sampled,  VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,             "VUID-VkImageMemoryBarrier-oldLayout-01212"},
+            {img_input,    VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,             "VUID-VkImageMemoryBarrier-oldLayout-01212"},
+            // images _without_ VK_IMAGE_USAGE_TRANSFER_DST_BIT
+            {img_color,    VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,             "VUID-VkImageMemoryBarrier-oldLayout-01213"},
+            {img_ds,       VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,             "VUID-VkImageMemoryBarrier-oldLayout-01213"},
+            {img_xfer_src, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,             "VUID-VkImageMemoryBarrier-oldLayout-01213"},
+            {img_sampled,  VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,             "VUID-VkImageMemoryBarrier-oldLayout-01213"},
+            {img_input,    VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,             "VUID-VkImageMemoryBarrier-oldLayout-01213"},
+            // clang-format on
+        };
+        const uint32_t layout_count = sizeof(bad_buffer_layouts) / sizeof(bad_buffer_layouts[0]);
+
+        for (uint32_t i = 0; i < layout_count; ++i) {
+            conc_test.image_barrier_.image = bad_buffer_layouts[i].image_obj.handle();
+            const VkImageUsageFlags usage = bad_buffer_layouts[i].image_obj.usage();
+            conc_test.image_barrier_.subresourceRange.aspectMask = (usage == VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT)
+                                                                       ? (VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT)
+                                                                       : VK_IMAGE_ASPECT_COLOR_BIT;
+
+            conc_test.image_barrier_.oldLayout = bad_buffer_layouts[i].bad_layout;
+            conc_test.image_barrier_.newLayout = VK_IMAGE_LAYOUT_GENERAL;
+            conc_test(bad_buffer_layouts[i].msg_code);
+
+            conc_test.image_barrier_.oldLayout = VK_IMAGE_LAYOUT_GENERAL;
+            conc_test.image_barrier_.newLayout = bad_buffer_layouts[i].bad_layout;
+            conc_test(bad_buffer_layouts[i].msg_code);
+        }
+
+        conc_test.image_barrier_.oldLayout = VK_IMAGE_LAYOUT_GENERAL;
+        conc_test.image_barrier_.newLayout = VK_IMAGE_LAYOUT_GENERAL;
+        conc_test.image_barrier_.image = image.handle();
+    }
+
+    // Attempt barrier where srcAccessMask is not supported by srcStageMask
+    // Have lower-order bit that's supported (shader write), but higher-order bit not supported to verify multi-bit validation
+    conc_test.buffer_barrier_.srcAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT | VK_ACCESS_SHADER_WRITE_BIT;
+    conc_test.buffer_barrier_.offset = 0;
+    conc_test.buffer_barrier_.size = VK_WHOLE_SIZE;
+    conc_test("", "VUID-vkCmdPipelineBarrier-srcAccessMask-02815");
+
+    // Attempt barrier where dstAccessMask is not supported by dstStageMask
+    conc_test.buffer_barrier_.srcAccessMask = VK_ACCESS_TRANSFER_READ_BIT;
+    conc_test.buffer_barrier_.dstAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
+    conc_test("", "VUID-vkCmdPipelineBarrier-dstAccessMask-02816");
+
+    // Attempt to mismatch barriers/waitEvents calls with incompatible queues
+    // Create command pool with incompatible queueflags
+    const std::vector<VkQueueFamilyProperties> queue_props = m_device->queue_props;
+    uint32_t queue_family_index = m_device->QueueFamilyMatching(VK_QUEUE_GRAPHICS_BIT, VK_QUEUE_COMPUTE_BIT);
+    if (queue_family_index == UINT32_MAX) {
+        printf("%s No non-compute queue supporting graphics found; skipped.\n", kSkipPrefix);
+        return;  // NOTE: this exits the test function!
+    }
+
+    VkBufferMemoryBarrier buf_barrier = {};
+    buf_barrier.sType = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER;
+    buf_barrier.pNext = NULL;
+    buf_barrier.srcAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
+    buf_barrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT;
+    buf_barrier.buffer = buffer.handle();
+    buf_barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
+    buf_barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
+    buf_barrier.offset = 0;
+    buf_barrier.size = VK_WHOLE_SIZE;
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdPipelineBarrier-srcStageMask-01183");
+
+    VkCommandPoolObj command_pool(m_device, queue_family_index, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT);
+    VkCommandBufferObj bad_command_buffer(m_device, &command_pool);
+
+    bad_command_buffer.begin();
+    buf_barrier.srcAccessMask = VK_ACCESS_SHADER_WRITE_BIT;
+    // Set two bits that should both be supported as a bonus positive check
+    buf_barrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT | VK_ACCESS_TRANSFER_READ_BIT;
+    vk::CmdPipelineBarrier(bad_command_buffer.handle(), VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT,
+                           VK_DEPENDENCY_BY_REGION_BIT, 0, nullptr, 1, &buf_barrier, 0, nullptr);
+    m_errorMonitor->VerifyFound();
+
+    // Check for error for trying to wait on pipeline stage not supported by this queue. Specifically since our queue is not a
+    // compute queue, vk::CmdWaitEvents cannot have it's source stage mask be VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdWaitEvents-srcStageMask-01164");
+    VkEvent event;
+    VkEventCreateInfo event_create_info{};
+    event_create_info.sType = VK_STRUCTURE_TYPE_EVENT_CREATE_INFO;
+    vk::CreateEvent(m_device->device(), &event_create_info, nullptr, &event);
+    vk::CmdWaitEvents(bad_command_buffer.handle(), 1, &event, /*source stage mask*/ VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT,
+                      VK_PIPELINE_STAGE_VERTEX_SHADER_BIT, 0, nullptr, 0, nullptr, 0, nullptr);
+    m_errorMonitor->VerifyFound();
+    bad_command_buffer.end();
+
+    vk::DestroyEvent(m_device->device(), event, nullptr);
+}
+
+TEST_F(VkLayerTest, InvalidBarrierQueueFamily) {
+    TEST_DESCRIPTION("Create and submit barriers with invalid queue families");
+    ASSERT_NO_FATAL_FAILURE(Init(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
+
+    // Find queues of two families
+    const uint32_t submit_family = m_device->graphics_queue_node_index_;
+    const uint32_t invalid = static_cast<uint32_t>(m_device->queue_props.size());
+    const uint32_t other_family = submit_family != 0 ? 0 : 1;
+    const bool only_one_family = (invalid == 1) || (m_device->queue_props[other_family].queueCount == 0);
+
+    std::vector<uint32_t> qf_indices{{submit_family, other_family}};
+    if (only_one_family) {
+        qf_indices.resize(1);
+    }
+    BarrierQueueFamilyTestHelper::Context test_context(this, qf_indices);
+
+    if (m_device->props.apiVersion >= VK_API_VERSION_1_1) {
+        printf(
+            "%s Device has apiVersion greater than 1.0 -- skipping test cases that require external memory "
+            "to be "
+            "disabled.\n",
+            kSkipPrefix);
+    } else {
+        if (only_one_family) {
+            printf("%s Single queue family found -- VK_SHARING_MODE_CONCURRENT testcases skipped.\n", kSkipPrefix);
+        } else {
+            std::vector<uint32_t> families = {submit_family, other_family};
+            BarrierQueueFamilyTestHelper conc_test(&test_context);
+            conc_test.Init(&families);
+            // core_validation::barrier_queue_families::kSrcAndDestMustBeIgnore
+            conc_test("VUID-VkImageMemoryBarrier-image-01199", "VUID-VkBufferMemoryBarrier-buffer-01190", VK_QUEUE_FAMILY_IGNORED,
+                      submit_family);
+            conc_test("VUID-VkImageMemoryBarrier-image-01199", "VUID-VkBufferMemoryBarrier-buffer-01190", submit_family,
+                      VK_QUEUE_FAMILY_IGNORED);
+            conc_test("VUID-VkImageMemoryBarrier-image-01199", "VUID-VkBufferMemoryBarrier-buffer-01190", submit_family,
+                      submit_family);
+            // true -> positive test
+            conc_test("VUID-VkImageMemoryBarrier-image-01199", "VUID-VkBufferMemoryBarrier-buffer-01190", VK_QUEUE_FAMILY_IGNORED,
+                      VK_QUEUE_FAMILY_IGNORED, true);
+        }
+
+        BarrierQueueFamilyTestHelper excl_test(&test_context);
+        excl_test.Init(nullptr);  // no queue families means *exclusive* sharing mode.
+
+        // core_validation::barrier_queue_families::kBothIgnoreOrBothValid
+        excl_test("VUID-VkImageMemoryBarrier-image-01200", "VUID-VkBufferMemoryBarrier-buffer-01192", VK_QUEUE_FAMILY_IGNORED,
+                  submit_family);
+        excl_test("VUID-VkImageMemoryBarrier-image-01200", "VUID-VkBufferMemoryBarrier-buffer-01192", submit_family,
+                  VK_QUEUE_FAMILY_IGNORED);
+        // true -> positive test
+        excl_test("VUID-VkImageMemoryBarrier-image-01200", "VUID-VkBufferMemoryBarrier-buffer-01192", submit_family, submit_family,
+                  true);
+        excl_test("VUID-VkImageMemoryBarrier-image-01200", "VUID-VkBufferMemoryBarrier-buffer-01192", VK_QUEUE_FAMILY_IGNORED,
+                  VK_QUEUE_FAMILY_IGNORED, true);
+    }
+
+    if (only_one_family) {
+        printf("%s Single queue family found -- VK_SHARING_MODE_EXCLUSIVE submit testcases skipped.\n", kSkipPrefix);
+    } else {
+        BarrierQueueFamilyTestHelper excl_test(&test_context);
+        excl_test.Init(nullptr);
+
+        // core_validation::barrier_queue_families::kSubmitQueueMustMatchSrcOrDst
+        excl_test("VUID-VkImageMemoryBarrier-image-01205", "VUID-VkBufferMemoryBarrier-buffer-01196", other_family, other_family,
+                  false, submit_family);
+
+        // true -> positive test (testing both the index logic and the QFO transfer tracking.
+        excl_test("POSITIVE_TEST", "POSITIVE_TEST", submit_family, other_family, true, submit_family);
+        excl_test("POSITIVE_TEST", "POSITIVE_TEST", submit_family, other_family, true, other_family);
+        excl_test("POSITIVE_TEST", "POSITIVE_TEST", other_family, submit_family, true, other_family);
+        excl_test("POSITIVE_TEST", "POSITIVE_TEST", other_family, submit_family, true, submit_family);
+
+        // negative testing for QFO transfer tracking
+        // Duplicate release in one CB
+        excl_test("UNASSIGNED-VkImageMemoryBarrier-image-00001", "UNASSIGNED-VkBufferMemoryBarrier-buffer-00001", submit_family,
+                  other_family, false, submit_family, BarrierQueueFamilyTestHelper::DOUBLE_RECORD);
+        // Duplicate pending release
+        excl_test("UNASSIGNED-VkImageMemoryBarrier-image-00003", "UNASSIGNED-VkBufferMemoryBarrier-buffer-00003", submit_family,
+                  other_family, false, submit_family);
+        // Duplicate acquire in one CB
+        excl_test("UNASSIGNED-VkImageMemoryBarrier-image-00001", "UNASSIGNED-VkBufferMemoryBarrier-buffer-00001", submit_family,
+                  other_family, false, other_family, BarrierQueueFamilyTestHelper::DOUBLE_RECORD);
+        // No pending release
+        excl_test("UNASSIGNED-VkImageMemoryBarrier-image-00004", "UNASSIGNED-VkBufferMemoryBarrier-buffer-00004", submit_family,
+                  other_family, false, other_family);
+        // Duplicate release in two CB
+        excl_test("UNASSIGNED-VkImageMemoryBarrier-image-00002", "UNASSIGNED-VkBufferMemoryBarrier-buffer-00002", submit_family,
+                  other_family, false, submit_family, BarrierQueueFamilyTestHelper::DOUBLE_COMMAND_BUFFER);
+        // Duplicate acquire in two CB
+        excl_test("POSITIVE_TEST", "POSITIVE_TEST", submit_family, other_family, true, submit_family);  // need a succesful release
+        excl_test("UNASSIGNED-VkImageMemoryBarrier-image-00002", "UNASSIGNED-VkBufferMemoryBarrier-buffer-00002", submit_family,
+                  other_family, false, other_family, BarrierQueueFamilyTestHelper::DOUBLE_COMMAND_BUFFER);
+    }
+}
+
+TEST_F(VkLayerTest, InvalidBarrierQueueFamilyWithMemExt) {
+    TEST_DESCRIPTION("Create and submit barriers with invalid queue families when memory extension is enabled ");
+    std::vector<const char *> reqd_instance_extensions = {
+        {VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME, VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME}};
+    for (auto extension_name : reqd_instance_extensions) {
+        if (InstanceExtensionSupported(extension_name)) {
+            m_instance_extension_names.push_back(extension_name);
+        } else {
+            printf("%s Required instance extension %s not supported, skipping test\n", kSkipPrefix, extension_name);
+            return;
+        }
+    }
+
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+    // Check for external memory device extensions
+    if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME)) {
+        m_device_extension_names.push_back(VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME);
+    } else {
+        printf("%s External memory extension not supported, skipping test\n", kSkipPrefix);
+        return;
+    }
+
+    ASSERT_NO_FATAL_FAILURE(InitState(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
+
+    // Find queues of two families
+    const uint32_t submit_family = m_device->graphics_queue_node_index_;
+    const uint32_t invalid = static_cast<uint32_t>(m_device->queue_props.size());
+    const uint32_t other_family = submit_family != 0 ? 0 : 1;
+    const bool only_one_family = (invalid == 1) || (m_device->queue_props[other_family].queueCount == 0);
+
+    std::vector<uint32_t> qf_indices{{submit_family, other_family}};
+    if (only_one_family) {
+        qf_indices.resize(1);
+    }
+    BarrierQueueFamilyTestHelper::Context test_context(this, qf_indices);
+
+    if (only_one_family) {
+        printf("%s Single queue family found -- VK_SHARING_MODE_CONCURRENT testcases skipped.\n", kSkipPrefix);
+    } else {
+        std::vector<uint32_t> families = {submit_family, other_family};
+        BarrierQueueFamilyTestHelper conc_test(&test_context);
+
+        // core_validation::barrier_queue_families::kSrcOrDstMustBeIgnore
+        conc_test.Init(&families);
+        conc_test("VUID-VkImageMemoryBarrier-image-01381", "VUID-VkBufferMemoryBarrier-buffer-01191", submit_family, submit_family);
+        // true -> positive test
+        conc_test("VUID-VkImageMemoryBarrier-image-01381", "VUID-VkBufferMemoryBarrier-buffer-01191", VK_QUEUE_FAMILY_IGNORED,
+                  VK_QUEUE_FAMILY_IGNORED, true);
+        conc_test("VUID-VkImageMemoryBarrier-image-01381", "VUID-VkBufferMemoryBarrier-buffer-01191", VK_QUEUE_FAMILY_IGNORED,
+                  VK_QUEUE_FAMILY_EXTERNAL_KHR, true);
+        conc_test("VUID-VkImageMemoryBarrier-image-01381", "VUID-VkBufferMemoryBarrier-buffer-01191", VK_QUEUE_FAMILY_EXTERNAL_KHR,
+                  VK_QUEUE_FAMILY_IGNORED, true);
+
+        // core_validation::barrier_queue_families::kSpecialOrIgnoreOnly
+        conc_test("VUID-VkImageMemoryBarrier-image-01766", "VUID-VkBufferMemoryBarrier-buffer-01763", submit_family,
+                  VK_QUEUE_FAMILY_IGNORED);
+        conc_test("VUID-VkImageMemoryBarrier-image-01766", "VUID-VkBufferMemoryBarrier-buffer-01763", VK_QUEUE_FAMILY_IGNORED,
+                  submit_family);
+        // This is to flag the errors that would be considered only "unexpected" in the parallel case above
+        // true -> positive test
+        conc_test("VUID-VkImageMemoryBarrier-image-01766", "VUID-VkBufferMemoryBarrier-buffer-01763", VK_QUEUE_FAMILY_IGNORED,
+                  VK_QUEUE_FAMILY_EXTERNAL_KHR, true);
+        conc_test("VUID-VkImageMemoryBarrier-image-01766", "VUID-VkBufferMemoryBarrier-buffer-01763", VK_QUEUE_FAMILY_EXTERNAL_KHR,
+                  VK_QUEUE_FAMILY_IGNORED, true);
+    }
+
+    BarrierQueueFamilyTestHelper excl_test(&test_context);
+    excl_test.Init(nullptr);  // no queue families means *exclusive* sharing mode.
+
+    // core_validation::barrier_queue_families::kSrcIgnoreRequiresDstIgnore
+    excl_test("VUID-VkImageMemoryBarrier-image-01201", "VUID-VkBufferMemoryBarrier-buffer-01193", VK_QUEUE_FAMILY_IGNORED,
+              submit_family);
+    excl_test("VUID-VkImageMemoryBarrier-image-01201", "VUID-VkBufferMemoryBarrier-buffer-01193", VK_QUEUE_FAMILY_IGNORED,
+              VK_QUEUE_FAMILY_EXTERNAL_KHR);
+    // true -> positive test
+    excl_test("VUID-VkImageMemoryBarrier-image-01201", "VUID-VkBufferMemoryBarrier-buffer-01193", VK_QUEUE_FAMILY_IGNORED,
+              VK_QUEUE_FAMILY_IGNORED, true);
+
+    // core_validation::barrier_queue_families::kDstValidOrSpecialIfNotIgnore
+    excl_test("VUID-VkImageMemoryBarrier-image-01768", "VUID-VkBufferMemoryBarrier-buffer-01765", submit_family, invalid);
+    // true -> positive test
+    excl_test("VUID-VkImageMemoryBarrier-image-01768", "VUID-VkBufferMemoryBarrier-buffer-01765", submit_family, submit_family,
+              true);
+    excl_test("VUID-VkImageMemoryBarrier-image-01768", "VUID-VkBufferMemoryBarrier-buffer-01765", submit_family,
+              VK_QUEUE_FAMILY_IGNORED, true);
+    excl_test("VUID-VkImageMemoryBarrier-image-01768", "VUID-VkBufferMemoryBarrier-buffer-01765", submit_family,
+              VK_QUEUE_FAMILY_EXTERNAL_KHR, true);
+
+    // core_validation::barrier_queue_families::kSrcValidOrSpecialIfNotIgnore
+    excl_test("VUID-VkImageMemoryBarrier-image-01767", "VUID-VkBufferMemoryBarrier-buffer-01764", invalid, submit_family);
+    // true -> positive test
+    excl_test("VUID-VkImageMemoryBarrier-image-01767", "VUID-VkBufferMemoryBarrier-buffer-01764", submit_family, submit_family,
+              true);
+    excl_test("VUID-VkImageMemoryBarrier-image-01767", "VUID-VkBufferMemoryBarrier-buffer-01764", VK_QUEUE_FAMILY_IGNORED,
+              VK_QUEUE_FAMILY_IGNORED, true);
+    excl_test("VUID-VkImageMemoryBarrier-image-01767", "VUID-VkBufferMemoryBarrier-buffer-01764", VK_QUEUE_FAMILY_EXTERNAL_KHR,
+              submit_family, true);
+}
+
+TEST_F(VkLayerTest, ImageBarrierWithBadRange) {
+    TEST_DESCRIPTION("VkImageMemoryBarrier with an invalid subresourceRange");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    VkImageMemoryBarrier img_barrier_template = {};
+    img_barrier_template.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
+    img_barrier_template.pNext = NULL;
+    img_barrier_template.srcAccessMask = 0;
+    img_barrier_template.dstAccessMask = 0;
+    img_barrier_template.oldLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+    img_barrier_template.newLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
+    img_barrier_template.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
+    img_barrier_template.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
+    // subresourceRange to be set later for the for the purposes of this test
+    img_barrier_template.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    img_barrier_template.subresourceRange.baseArrayLayer = 0;
+    img_barrier_template.subresourceRange.baseMipLevel = 0;
+    img_barrier_template.subresourceRange.layerCount = 0;
+    img_barrier_template.subresourceRange.levelCount = 0;
+
+    const uint32_t submit_family = m_device->graphics_queue_node_index_;
+    const uint32_t invalid = static_cast<uint32_t>(m_device->queue_props.size());
+    const uint32_t other_family = submit_family != 0 ? 0 : 1;
+    const bool only_one_family = (invalid == 1) || (m_device->queue_props[other_family].queueCount == 0);
+    std::vector<uint32_t> qf_indices{{submit_family, other_family}};
+    if (only_one_family) {
+        qf_indices.resize(1);
+    }
+    BarrierQueueFamilyTestHelper::Context test_context(this, qf_indices);
+
+    // Use image unbound to memory in barrier
+    // Use buffer unbound to memory in barrier
+    BarrierQueueFamilyTestHelper conc_test(&test_context);
+    conc_test.Init(nullptr);
+    img_barrier_template.image = conc_test.image_.handle();
+    conc_test.image_barrier_ = img_barrier_template;
+    // Nested scope here confuses clang-format, somehow
+    // clang-format off
+
+    // try for vk::CmdPipelineBarrier
+    {
+        // Try baseMipLevel >= image.mipLevels with VK_REMAINING_MIP_LEVELS
+        {
+            conc_test.image_barrier_.subresourceRange = {VK_IMAGE_ASPECT_COLOR_BIT, 1, VK_REMAINING_MIP_LEVELS, 0, 1};
+            conc_test("VUID-VkImageMemoryBarrier-subresourceRange-01486");
+        }
+
+        // Try baseMipLevel >= image.mipLevels without VK_REMAINING_MIP_LEVELS
+        {
+            conc_test.image_barrier_.subresourceRange = {VK_IMAGE_ASPECT_COLOR_BIT, 1, 1, 0, 1};
+            m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageMemoryBarrier-subresourceRange-01724");
+            conc_test("VUID-VkImageMemoryBarrier-subresourceRange-01486");
+        }
+
+        // Try levelCount = 0
+        {
+            conc_test.image_barrier_.subresourceRange = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 0, 0, 1};
+            conc_test("VUID-VkImageMemoryBarrier-subresourceRange-01724");
+        }
+
+        // Try baseMipLevel + levelCount > image.mipLevels
+        {
+            conc_test.image_barrier_.subresourceRange = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 2, 0, 1};
+            conc_test("VUID-VkImageMemoryBarrier-subresourceRange-01724");
+        }
+
+        // Try baseArrayLayer >= image.arrayLayers with VK_REMAINING_ARRAY_LAYERS
+        {
+            conc_test.image_barrier_.subresourceRange = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 1, VK_REMAINING_ARRAY_LAYERS};
+            conc_test("VUID-VkImageMemoryBarrier-subresourceRange-01488");
+        }
+
+        // Try baseArrayLayer >= image.arrayLayers without VK_REMAINING_ARRAY_LAYERS
+        {
+            conc_test.image_barrier_.subresourceRange = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 1, 1};
+            m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageMemoryBarrier-subresourceRange-01725");
+            conc_test("VUID-VkImageMemoryBarrier-subresourceRange-01488");
+        }
+
+        // Try layerCount = 0
+        {
+            conc_test.image_barrier_.subresourceRange = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 0};
+            conc_test("VUID-VkImageMemoryBarrier-subresourceRange-01725");
+        }
+
+        // Try baseArrayLayer + layerCount > image.arrayLayers
+        {
+            conc_test.image_barrier_.subresourceRange = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 2};
+            conc_test("VUID-VkImageMemoryBarrier-subresourceRange-01725");
+        }
+    }
+
+    m_commandBuffer->begin();
+    // try for vk::CmdWaitEvents
+    {
+        VkEvent event;
+        VkEventCreateInfo eci{VK_STRUCTURE_TYPE_EVENT_CREATE_INFO, NULL, 0};
+        VkResult err = vk::CreateEvent(m_device->handle(), &eci, nullptr, &event);
+        ASSERT_VK_SUCCESS(err);
+
+        // Try baseMipLevel >= image.mipLevels with VK_REMAINING_MIP_LEVELS
+        {
+            m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageMemoryBarrier-subresourceRange-01486");
+            const VkImageSubresourceRange range = {VK_IMAGE_ASPECT_COLOR_BIT, 1, VK_REMAINING_MIP_LEVELS, 0, 1};
+            VkImageMemoryBarrier img_barrier = img_barrier_template;
+            img_barrier.subresourceRange = range;
+            vk::CmdWaitEvents(m_commandBuffer->handle(), 1, &event, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
+                            VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, nullptr, 0, nullptr, 1, &img_barrier);
+            m_errorMonitor->VerifyFound();
+        }
+
+        // Try baseMipLevel >= image.mipLevels without VK_REMAINING_MIP_LEVELS
+        {
+            m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageMemoryBarrier-subresourceRange-01486");
+            m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageMemoryBarrier-subresourceRange-01724");
+            const VkImageSubresourceRange range = {VK_IMAGE_ASPECT_COLOR_BIT, 1, 1, 0, 1};
+            VkImageMemoryBarrier img_barrier = img_barrier_template;
+            img_barrier.subresourceRange = range;
+            vk::CmdWaitEvents(m_commandBuffer->handle(), 1, &event, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
+                            VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, nullptr, 0, nullptr, 1, &img_barrier);
+            m_errorMonitor->VerifyFound();
+        }
+
+        // Try levelCount = 0
+        {
+            m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageMemoryBarrier-subresourceRange-01724");
+            const VkImageSubresourceRange range = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 0, 0, 1};
+            VkImageMemoryBarrier img_barrier = img_barrier_template;
+            img_barrier.subresourceRange = range;
+            vk::CmdWaitEvents(m_commandBuffer->handle(), 1, &event, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
+                            VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, nullptr, 0, nullptr, 1, &img_barrier);
+            m_errorMonitor->VerifyFound();
+        }
+
+        // Try baseMipLevel + levelCount > image.mipLevels
+        {
+            m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageMemoryBarrier-subresourceRange-01724");
+            const VkImageSubresourceRange range = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 2, 0, 1};
+            VkImageMemoryBarrier img_barrier = img_barrier_template;
+            img_barrier.subresourceRange = range;
+            vk::CmdWaitEvents(m_commandBuffer->handle(), 1, &event, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
+                            VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, nullptr, 0, nullptr, 1, &img_barrier);
+            m_errorMonitor->VerifyFound();
+        }
+
+        // Try baseArrayLayer >= image.arrayLayers with VK_REMAINING_ARRAY_LAYERS
+        {
+            m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageMemoryBarrier-subresourceRange-01488");
+            const VkImageSubresourceRange range = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 1, VK_REMAINING_ARRAY_LAYERS};
+            VkImageMemoryBarrier img_barrier = img_barrier_template;
+            img_barrier.subresourceRange = range;
+            vk::CmdWaitEvents(m_commandBuffer->handle(), 1, &event, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
+                            VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, nullptr, 0, nullptr, 1, &img_barrier);
+            m_errorMonitor->VerifyFound();
+        }
+
+        // Try baseArrayLayer >= image.arrayLayers without VK_REMAINING_ARRAY_LAYERS
+        {
+            m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageMemoryBarrier-subresourceRange-01488");
+            m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageMemoryBarrier-subresourceRange-01725");
+            const VkImageSubresourceRange range = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 1, 1};
+            VkImageMemoryBarrier img_barrier = img_barrier_template;
+            img_barrier.subresourceRange = range;
+            vk::CmdWaitEvents(m_commandBuffer->handle(), 1, &event, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
+                            VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, nullptr, 0, nullptr, 1, &img_barrier);
+            m_errorMonitor->VerifyFound();
+        }
+
+        // Try layerCount = 0
+        {
+            m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageMemoryBarrier-subresourceRange-01725");
+            const VkImageSubresourceRange range = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 0};
+            VkImageMemoryBarrier img_barrier = img_barrier_template;
+            img_barrier.subresourceRange = range;
+            vk::CmdWaitEvents(m_commandBuffer->handle(), 1, &event, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
+                            VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, nullptr, 0, nullptr, 1, &img_barrier);
+            m_errorMonitor->VerifyFound();
+        }
+
+        // Try baseArrayLayer + layerCount > image.arrayLayers
+        {
+            m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageMemoryBarrier-subresourceRange-01725");
+            const VkImageSubresourceRange range = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 2};
+            VkImageMemoryBarrier img_barrier = img_barrier_template;
+            img_barrier.subresourceRange = range;
+            vk::CmdWaitEvents(m_commandBuffer->handle(), 1, &event, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
+                            VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, nullptr, 0, nullptr, 1, &img_barrier);
+            m_errorMonitor->VerifyFound();
+        }
+
+        vk::DestroyEvent(m_device->handle(), event, nullptr);
+    }
+    // clang-format on
+}
+
+TEST_F(VkLayerTest, IdxBufferAlignmentError) {
+    // Bind a BeginRenderPass within an active RenderPass
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    uint32_t const indices[] = {0};
+    VkBufferCreateInfo buf_info = {};
+    buf_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
+    buf_info.size = 1024;
+    buf_info.usage = VK_BUFFER_USAGE_INDEX_BUFFER_BIT;
+    buf_info.queueFamilyIndexCount = 1;
+    buf_info.pQueueFamilyIndices = indices;
+
+    VkBufferObj buffer;
+    buffer.init(*m_device, buf_info);
+
+    m_commandBuffer->begin();
+
+    // vk::CmdBindPipeline(m_commandBuffer->handle(),
+    // VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.handle());
+    // Should error before calling to driver so don't care about actual data
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "vkCmdBindIndexBuffer() offset (0x7) does not fall on ");
+    vk::CmdBindIndexBuffer(m_commandBuffer->handle(), buffer.handle(), 7, VK_INDEX_TYPE_UINT16);
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, Bad2DArrayImageType) {
+    TEST_DESCRIPTION("Create an image with a flag specifying 2D_ARRAY_COMPATIBLE but not of imageType 3D.");
+
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+    if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_MAINTENANCE1_EXTENSION_NAME)) {
+        m_device_extension_names.push_back(VK_KHR_MAINTENANCE1_EXTENSION_NAME);
+    } else {
+        printf("%s %s is not supported; skipping\n", kSkipPrefix, VK_KHR_MAINTENANCE1_EXTENSION_NAME);
+        return;
+    }
+    ASSERT_NO_FATAL_FAILURE(InitState());
+
+    // Trigger check by setting imagecreateflags to 2d_array_compat and imageType to 2D
+    VkImageCreateInfo ici = {VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
+                             nullptr,
+                             VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT_KHR,
+                             VK_IMAGE_TYPE_2D,
+                             VK_FORMAT_R8G8B8A8_UNORM,
+                             {32, 32, 1},
+                             1,
+                             1,
+                             VK_SAMPLE_COUNT_1_BIT,
+                             VK_IMAGE_TILING_OPTIMAL,
+                             VK_IMAGE_USAGE_SAMPLED_BIT,
+                             VK_SHARING_MODE_EXCLUSIVE,
+                             0,
+                             nullptr,
+                             VK_IMAGE_LAYOUT_UNDEFINED};
+    CreateImageTest(*this, &ici, "VUID-VkImageCreateInfo-flags-00950");
+}
+
+TEST_F(VkLayerTest, VertexBufferInvalid) {
+    TEST_DESCRIPTION(
+        "Submit a command buffer using deleted vertex buffer, delete a buffer twice, use an invalid offset for each buffer type, "
+        "and attempt to bind a null buffer");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitViewport());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    CreatePipelineHelper pipe(*this);
+    pipe.InitInfo();
+    pipe.InitState();
+    pipe.CreateGraphicsPipeline();
+
+    m_commandBuffer->begin();
+    m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
+    vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.pipeline_);
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "CoreValidation-DrawState-InvalidCommandBuffer-VkBuffer");
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "CoreValidation-DrawState-InvalidCommandBuffer-VkDeviceMemory");
+
+    {
+        // Create and bind a vertex buffer in a reduced scope, which will cause it to be deleted upon leaving this scope
+        const float vbo_data[3] = {1.f, 0.f, 1.f};
+        VkVerticesObj draw_verticies(m_device, 1, 1, sizeof(vbo_data[0]), sizeof(vbo_data) / sizeof(vbo_data[0]), vbo_data);
+        draw_verticies.BindVertexBuffers(m_commandBuffer->handle());
+        draw_verticies.AddVertexInputToPipeHelpr(&pipe);
+
+        m_commandBuffer->Draw(1, 0, 0, 0);
+
+        m_commandBuffer->EndRenderPass();
+    }
+
+    vk::EndCommandBuffer(m_commandBuffer->handle());
+    m_errorMonitor->VerifyFound();
+
+    {
+        // Create and bind a vertex buffer in a reduced scope, and delete it
+        // twice, the second through the destructor
+        VkBufferTest buffer_test(m_device, VK_BUFFER_USAGE_STORAGE_BUFFER_BIT, VkBufferTest::eDoubleDelete);
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkDestroyBuffer-buffer-parameter");
+        buffer_test.TestDoubleDestroy();
+    }
+    m_errorMonitor->VerifyFound();
+
+    m_errorMonitor->SetUnexpectedError("value of pCreateInfo->usage must not be 0");
+    if (VkBufferTest::GetTestConditionValid(m_device, VkBufferTest::eInvalidMemoryOffset)) {
+        // Create and bind a memory buffer with an invalid offset.
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkBindBufferMemory-memoryOffset-01036");
+        m_errorMonitor->SetUnexpectedError(
+            "If buffer was created with the VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT or VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT, "
+            "memoryOffset must be a multiple of VkPhysicalDeviceLimits::minTexelBufferOffsetAlignment");
+        VkBufferTest buffer_test(m_device, VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT, VkBufferTest::eInvalidMemoryOffset);
+        (void)buffer_test;
+        m_errorMonitor->VerifyFound();
+    }
+
+    {
+        // Attempt to bind a null buffer.
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                             "vkBindBufferMemory: required parameter buffer specified as VK_NULL_HANDLE");
+        VkBufferTest buffer_test(m_device, 0, VkBufferTest::eBindNullBuffer);
+        (void)buffer_test;
+        m_errorMonitor->VerifyFound();
+    }
+
+    {
+        // Attempt to bind a fake buffer.
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkBindBufferMemory-buffer-parameter");
+        VkBufferTest buffer_test(m_device, 0, VkBufferTest::eBindFakeBuffer);
+        (void)buffer_test;
+        m_errorMonitor->VerifyFound();
+    }
+
+    {
+        // Attempt to use an invalid handle to delete a buffer.
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkFreeMemory-memory-parameter");
+        VkBufferTest buffer_test(m_device, VK_BUFFER_USAGE_STORAGE_BUFFER_BIT, VkBufferTest::eFreeInvalidHandle);
+        (void)buffer_test;
+    }
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, BadVertexBufferOffset) {
+    TEST_DESCRIPTION("Submit an offset past the end of a vertex buffer");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+    static const float vbo_data[3] = {1.f, 0.f, 1.f};
+    VkConstantBufferObj vbo(m_device, sizeof(vbo_data), (const void *)&vbo_data, VK_BUFFER_USAGE_VERTEX_BUFFER_BIT);
+    m_commandBuffer->begin();
+    m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBindVertexBuffers-pOffsets-00626");
+    m_commandBuffer->BindVertexBuffer(&vbo, (VkDeviceSize)(3 * sizeof(float)), 1);  // Offset at the end of the buffer
+    m_errorMonitor->VerifyFound();
+
+    m_commandBuffer->EndRenderPass();
+    m_commandBuffer->end();
+}
+
+// INVALID_IMAGE_LAYOUT tests (one other case is hit by MapMemWithoutHostVisibleBit and not here)
+TEST_F(VkLayerTest, InvalidImageLayout) {
+    TEST_DESCRIPTION(
+        "Hit all possible validation checks associated with the UNASSIGNED-CoreValidation-DrawState-InvalidImageLayout error. "
+        "Generally these involve having images in the wrong layout when they're copied or transitioned.");
+    // 3 in ValidateCmdBufImageLayouts
+    // *  -1 Attempt to submit cmd buf w/ deleted image
+    // *  -2 Cmd buf submit of image w/ layout not matching first use w/ subresource
+    // *  -3 Cmd buf submit of image w/ layout not matching first use w/o subresource
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    auto depth_format = FindSupportedDepthStencilFormat(gpu());
+    if (!depth_format) {
+        printf("%s No Depth + Stencil format found. Skipped.\n", kSkipPrefix);
+        return;
+    }
+    // Create src & dst images to use for copy operations
+    VkImageObj src_image(m_device);
+    VkImageObj dst_image(m_device);
+    VkImageObj depth_image(m_device);
+
+    const VkFormat tex_format = VK_FORMAT_B8G8R8A8_UNORM;
+    const int32_t tex_width = 32;
+    const int32_t tex_height = 32;
+
+    VkImageCreateInfo image_create_info = {};
+    image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+    image_create_info.pNext = NULL;
+    image_create_info.imageType = VK_IMAGE_TYPE_2D;
+    image_create_info.format = tex_format;
+    image_create_info.extent.width = tex_width;
+    image_create_info.extent.height = tex_height;
+    image_create_info.extent.depth = 1;
+    image_create_info.mipLevels = 1;
+    image_create_info.arrayLayers = 4;
+    image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
+    image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
+    image_create_info.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
+    image_create_info.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+    image_create_info.flags = 0;
+
+    src_image.init(&image_create_info);
+
+    image_create_info.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT;
+    dst_image.init(&image_create_info);
+
+    image_create_info.format = VK_FORMAT_D16_UNORM;
+    image_create_info.usage |= VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT;
+    depth_image.init(&image_create_info);
+
+    m_commandBuffer->begin();
+    VkImageCopy copy_region;
+    copy_region.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    copy_region.srcSubresource.mipLevel = 0;
+    copy_region.srcSubresource.baseArrayLayer = 0;
+    copy_region.srcSubresource.layerCount = 1;
+    copy_region.srcOffset.x = 0;
+    copy_region.srcOffset.y = 0;
+    copy_region.srcOffset.z = 0;
+    copy_region.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    copy_region.dstSubresource.mipLevel = 0;
+    copy_region.dstSubresource.baseArrayLayer = 0;
+    copy_region.dstSubresource.layerCount = 1;
+    copy_region.dstOffset.x = 0;
+    copy_region.dstOffset.y = 0;
+    copy_region.dstOffset.z = 0;
+    copy_region.extent.width = 1;
+    copy_region.extent.height = 1;
+    copy_region.extent.depth = 1;
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT,
+                                         "layout should be VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL instead of GENERAL.");
+    m_errorMonitor->SetUnexpectedError("layout should be VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL instead of GENERAL.");
+
+    m_commandBuffer->CopyImage(src_image.handle(), VK_IMAGE_LAYOUT_GENERAL, dst_image.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
+                               &copy_region);
+    m_errorMonitor->VerifyFound();
+    // The first call hits the expected WARNING and skips the call down the chain, so call a second time to call down chain and
+    // update layer state
+    m_errorMonitor->SetUnexpectedError("layout should be VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL instead of GENERAL.");
+    m_errorMonitor->SetUnexpectedError("layout should be VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL instead of GENERAL.");
+    m_commandBuffer->CopyImage(src_image.handle(), VK_IMAGE_LAYOUT_GENERAL, dst_image.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
+                               &copy_region);
+    // Now cause error due to src image layout changing
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdCopyImage-srcImageLayout-00128");
+    m_errorMonitor->SetUnexpectedError("is VK_IMAGE_LAYOUT_UNDEFINED but can only be VK_IMAGE_LAYOUT");
+    m_commandBuffer->CopyImage(src_image.handle(), VK_IMAGE_LAYOUT_UNDEFINED, dst_image.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
+                               &copy_region);
+    m_errorMonitor->VerifyFound();
+    // Final src error is due to bad layout type
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdCopyImage-srcImageLayout-00129");
+    m_errorMonitor->SetUnexpectedError(
+        "with specific layout VK_IMAGE_LAYOUT_UNDEFINED that doesn't match the previously used layout VK_IMAGE_LAYOUT_GENERAL.");
+    m_commandBuffer->CopyImage(src_image.handle(), VK_IMAGE_LAYOUT_UNDEFINED, dst_image.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
+                               &copy_region);
+    m_errorMonitor->VerifyFound();
+    // Now verify same checks for dst
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT,
+                                         "layout should be VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL instead of GENERAL.");
+    m_errorMonitor->SetUnexpectedError("layout should be VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL instead of GENERAL.");
+    m_commandBuffer->CopyImage(src_image.handle(), VK_IMAGE_LAYOUT_GENERAL, dst_image.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
+                               &copy_region);
+    m_errorMonitor->VerifyFound();
+    // Now cause error due to src image layout changing
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdCopyImage-dstImageLayout-00133");
+    m_errorMonitor->SetUnexpectedError(
+        "is VK_IMAGE_LAYOUT_UNDEFINED but can only be VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL or VK_IMAGE_LAYOUT_GENERAL.");
+    m_commandBuffer->CopyImage(src_image.handle(), VK_IMAGE_LAYOUT_GENERAL, dst_image.handle(), VK_IMAGE_LAYOUT_UNDEFINED, 1,
+                               &copy_region);
+    m_errorMonitor->VerifyFound();
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdCopyImage-dstImageLayout-00134");
+    m_errorMonitor->SetUnexpectedError(
+        "with specific layout VK_IMAGE_LAYOUT_UNDEFINED that doesn't match the previously used layout VK_IMAGE_LAYOUT_GENERAL.");
+    m_commandBuffer->CopyImage(src_image.handle(), VK_IMAGE_LAYOUT_GENERAL, dst_image.handle(), VK_IMAGE_LAYOUT_UNDEFINED, 1,
+                               &copy_region);
+    m_errorMonitor->VerifyFound();
+
+    // Convert dst and depth images to TRANSFER_DST for subsequent tests
+    VkImageMemoryBarrier transfer_dst_image_barrier[1] = {};
+    transfer_dst_image_barrier[0].sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
+    transfer_dst_image_barrier[0].oldLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+    transfer_dst_image_barrier[0].newLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
+    transfer_dst_image_barrier[0].srcAccessMask = 0;
+    transfer_dst_image_barrier[0].dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
+    transfer_dst_image_barrier[0].image = dst_image.handle();
+    transfer_dst_image_barrier[0].subresourceRange.layerCount = image_create_info.arrayLayers;
+    transfer_dst_image_barrier[0].subresourceRange.levelCount = image_create_info.mipLevels;
+    transfer_dst_image_barrier[0].subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    vk::CmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, 0,
+                           NULL, 0, NULL, 1, transfer_dst_image_barrier);
+    transfer_dst_image_barrier[0].image = depth_image.handle();
+    transfer_dst_image_barrier[0].subresourceRange.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
+    vk::CmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, 0,
+                           NULL, 0, NULL, 1, transfer_dst_image_barrier);
+
+    // Cause errors due to clearing with invalid image layouts
+    VkClearColorValue color_clear_value = {};
+    VkImageSubresourceRange clear_range;
+    clear_range.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    clear_range.baseMipLevel = 0;
+    clear_range.baseArrayLayer = 0;
+    clear_range.layerCount = 1;
+    clear_range.levelCount = 1;
+
+    // Fail due to explicitly prohibited layout for color clear (only GENERAL and TRANSFER_DST are permitted).
+    // Since the image is currently not in UNDEFINED layout, this will emit two errors.
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdClearColorImage-imageLayout-00005");
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdClearColorImage-imageLayout-00004");
+    m_commandBuffer->ClearColorImage(dst_image.handle(), VK_IMAGE_LAYOUT_UNDEFINED, &color_clear_value, 1, &clear_range);
+    m_errorMonitor->VerifyFound();
+    // Fail due to provided layout not matching actual current layout for color clear.
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdClearColorImage-imageLayout-00004");
+    m_commandBuffer->ClearColorImage(dst_image.handle(), VK_IMAGE_LAYOUT_GENERAL, &color_clear_value, 1, &clear_range);
+    m_errorMonitor->VerifyFound();
+
+    VkClearDepthStencilValue depth_clear_value = {};
+    clear_range.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
+
+    // Fail due to explicitly prohibited layout for depth clear (only GENERAL and TRANSFER_DST are permitted).
+    // Since the image is currently not in UNDEFINED layout, this will emit two errors.
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdClearDepthStencilImage-imageLayout-00012");
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdClearDepthStencilImage-imageLayout-00011");
+    m_commandBuffer->ClearDepthStencilImage(depth_image.handle(), VK_IMAGE_LAYOUT_UNDEFINED, &depth_clear_value, 1, &clear_range);
+    m_errorMonitor->VerifyFound();
+    // Fail due to provided layout not matching actual current layout for depth clear.
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdClearDepthStencilImage-imageLayout-00011");
+    m_commandBuffer->ClearDepthStencilImage(depth_image.handle(), VK_IMAGE_LAYOUT_GENERAL, &depth_clear_value, 1, &clear_range);
+    m_errorMonitor->VerifyFound();
+
+    // Now cause error due to bad image layout transition in PipelineBarrier
+    VkImageMemoryBarrier image_barrier[1] = {};
+    image_barrier[0].sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
+    image_barrier[0].oldLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL;
+    image_barrier[0].newLayout = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL;
+    image_barrier[0].image = src_image.handle();
+    image_barrier[0].subresourceRange.layerCount = image_create_info.arrayLayers;
+    image_barrier[0].subresourceRange.levelCount = image_create_info.mipLevels;
+    image_barrier[0].subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageMemoryBarrier-oldLayout-01197");
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageMemoryBarrier-oldLayout-01210");
+    vk::CmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, 0,
+                           NULL, 0, NULL, 1, image_barrier);
+    m_errorMonitor->VerifyFound();
+
+    // Finally some layout errors at RenderPass create time
+    // Just hacking in specific state to get to the errors we want so don't copy this unless you know what you're doing.
+    VkAttachmentReference attach = {};
+    // perf warning for GENERAL layout w/ non-DS input attachment
+    attach.layout = VK_IMAGE_LAYOUT_GENERAL;
+    VkSubpassDescription subpass = {};
+    subpass.inputAttachmentCount = 1;
+    subpass.pInputAttachments = &attach;
+    VkRenderPassCreateInfo rpci = {};
+    rpci.subpassCount = 1;
+    rpci.pSubpasses = &subpass;
+    rpci.attachmentCount = 1;
+    VkAttachmentDescription attach_desc = {};
+    attach_desc.format = VK_FORMAT_UNDEFINED;
+    attach_desc.samples = VK_SAMPLE_COUNT_1_BIT;
+    attach_desc.finalLayout = VK_IMAGE_LAYOUT_GENERAL;
+    rpci.pAttachments = &attach_desc;
+    rpci.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
+    VkRenderPass rp;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT,
+                                         "Layout for input attachment is GENERAL but should be READ_ONLY_OPTIMAL.");
+    vk::CreateRenderPass(m_device->device(), &rpci, NULL, &rp);
+    m_errorMonitor->VerifyFound();
+    // error w/ non-general layout
+    attach.layout = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL;
+
+    m_errorMonitor->SetDesiredFailureMsg(
+        VK_DEBUG_REPORT_ERROR_BIT_EXT,
+        "Layout for input attachment is VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL but can only be READ_ONLY_OPTIMAL or GENERAL.");
+    vk::CreateRenderPass(m_device->device(), &rpci, NULL, &rp);
+    m_errorMonitor->VerifyFound();
+    subpass.inputAttachmentCount = 0;
+    subpass.colorAttachmentCount = 1;
+    subpass.pColorAttachments = &attach;
+    attach.layout = VK_IMAGE_LAYOUT_GENERAL;
+    // perf warning for GENERAL layout on color attachment
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT,
+                                         "Layout for color attachment is GENERAL but should be COLOR_ATTACHMENT_OPTIMAL.");
+    vk::CreateRenderPass(m_device->device(), &rpci, NULL, &rp);
+    m_errorMonitor->VerifyFound();
+    // error w/ non-color opt or GENERAL layout for color attachment
+    attach.layout = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL;
+    m_errorMonitor->SetDesiredFailureMsg(
+        VK_DEBUG_REPORT_ERROR_BIT_EXT,
+        "Layout for color attachment is VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL but can only be COLOR_ATTACHMENT_OPTIMAL or GENERAL.");
+    vk::CreateRenderPass(m_device->device(), &rpci, NULL, &rp);
+    m_errorMonitor->VerifyFound();
+    subpass.colorAttachmentCount = 0;
+    subpass.pDepthStencilAttachment = &attach;
+    attach.layout = VK_IMAGE_LAYOUT_GENERAL;
+    // perf warning for GENERAL layout on DS attachment
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT,
+                                         "GENERAL layout for depth attachment may not give optimal performance.");
+    vk::CreateRenderPass(m_device->device(), &rpci, NULL, &rp);
+    m_errorMonitor->VerifyFound();
+    // error w/ non-ds opt or GENERAL layout for color attachment
+    attach.layout = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "Layout for depth attachment is VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL but can only be "
+                                         "DEPTH_STENCIL_ATTACHMENT_OPTIMAL, DEPTH_STENCIL_READ_ONLY_OPTIMAL or GENERAL.");
+    vk::CreateRenderPass(m_device->device(), &rpci, NULL, &rp);
+    m_errorMonitor->VerifyFound();
+    // For this error we need a valid renderpass so create default one
+    attach.layout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL;
+    attach.attachment = 0;
+    attach_desc.format = depth_format;
+    attach_desc.samples = VK_SAMPLE_COUNT_1_BIT;
+    attach_desc.storeOp = VK_ATTACHMENT_STORE_OP_STORE;
+    attach_desc.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
+    attach_desc.stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
+    // Can't do a CLEAR load on READ_ONLY initialLayout
+    attach_desc.loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR;
+    attach_desc.initialLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL;
+    attach_desc.finalLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "with invalid first layout VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL");
+    vk::CreateRenderPass(m_device->device(), &rpci, NULL, &rp);
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, InvalidStorageImageLayout) {
+    TEST_DESCRIPTION("Attempt to update a STORAGE_IMAGE descriptor w/o GENERAL layout.");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    const VkFormat tex_format = VK_FORMAT_R8G8B8A8_UNORM;
+    VkImageTiling tiling;
+    VkFormatProperties format_properties;
+    vk::GetPhysicalDeviceFormatProperties(gpu(), tex_format, &format_properties);
+    if (format_properties.linearTilingFeatures & VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT) {
+        tiling = VK_IMAGE_TILING_LINEAR;
+    } else if (format_properties.optimalTilingFeatures & VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT) {
+        tiling = VK_IMAGE_TILING_OPTIMAL;
+    } else {
+        printf("%s Device does not support VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT; skipped.\n", kSkipPrefix);
+        return;
+    }
+
+    OneOffDescriptorSet descriptor_set(m_device,
+                                       {
+                                           {0, VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, 1, VK_SHADER_STAGE_FRAGMENT_BIT, nullptr},
+                                       });
+
+    VkImageObj image(m_device);
+    image.Init(32, 32, 1, tex_format, VK_IMAGE_USAGE_STORAGE_BIT, tiling, 0);
+    ASSERT_TRUE(image.initialized());
+    VkImageView view = image.targetView(tex_format);
+
+    descriptor_set.WriteDescriptorImageInfo(0, view, VK_NULL_HANDLE, VK_DESCRIPTOR_TYPE_STORAGE_IMAGE);
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         " of VK_DESCRIPTOR_TYPE_STORAGE_IMAGE type is being updated with layout "
+                                         "VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL but according to spec ");
+    descriptor_set.UpdateDescriptorSets();
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, CreateImageViewBreaksParameterCompatibilityRequirements) {
+    TEST_DESCRIPTION(
+        "Attempts to create an Image View with a view type that does not match the image type it is being created from.");
+
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+    if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_MAINTENANCE1_EXTENSION_NAME)) {
+        m_device_extension_names.push_back(VK_KHR_MAINTENANCE1_EXTENSION_NAME);
+    }
+    ASSERT_NO_FATAL_FAILURE(InitState());
+
+    VkPhysicalDeviceMemoryProperties memProps;
+    vk::GetPhysicalDeviceMemoryProperties(m_device->phy().handle(), &memProps);
+
+    // Test mismatch detection for image of type VK_IMAGE_TYPE_1D
+    VkImageCreateInfo imgInfo = {VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
+                                 nullptr,
+                                 VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT,
+                                 VK_IMAGE_TYPE_1D,
+                                 VK_FORMAT_R8G8B8A8_UNORM,
+                                 {1, 1, 1},
+                                 1,
+                                 1,
+                                 VK_SAMPLE_COUNT_1_BIT,
+                                 VK_IMAGE_TILING_OPTIMAL,
+                                 VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT,
+                                 VK_SHARING_MODE_EXCLUSIVE,
+                                 0,
+                                 nullptr,
+                                 VK_IMAGE_LAYOUT_UNDEFINED};
+    VkImageObj image1D(m_device);
+    image1D.init(&imgInfo);
+    ASSERT_TRUE(image1D.initialized());
+
+    // Initialize VkImageViewCreateInfo with mismatched viewType
+    VkImageViewCreateInfo ivci = {};
+    ivci.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
+    ivci.image = image1D.handle();
+    ivci.viewType = VK_IMAGE_VIEW_TYPE_2D;
+    ivci.format = VK_FORMAT_R8G8B8A8_UNORM;
+    ivci.subresourceRange.layerCount = 1;
+    ivci.subresourceRange.baseMipLevel = 0;
+    ivci.subresourceRange.levelCount = 1;
+    ivci.subresourceRange.baseArrayLayer = 0;
+    ivci.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+
+    // Test for error message
+    CreateImageViewTest(*this, &ivci,
+                        "vkCreateImageView(): pCreateInfo->viewType VK_IMAGE_VIEW_TYPE_2D is not compatible with image");
+
+    // Test mismatch detection for image of type VK_IMAGE_TYPE_2D
+    imgInfo = {VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
+               nullptr,
+               VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT,
+               VK_IMAGE_TYPE_2D,
+               VK_FORMAT_R8G8B8A8_UNORM,
+               {1, 1, 1},
+               1,
+               6,
+               VK_SAMPLE_COUNT_1_BIT,
+               VK_IMAGE_TILING_OPTIMAL,
+               VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT,
+               VK_SHARING_MODE_EXCLUSIVE,
+               0,
+               nullptr,
+               VK_IMAGE_LAYOUT_UNDEFINED};
+    VkImageObj image2D(m_device);
+    image2D.init(&imgInfo);
+    ASSERT_TRUE(image2D.initialized());
+
+    // Initialize VkImageViewCreateInfo with mismatched viewType
+    ivci = {};
+    ivci.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
+    ivci.image = image2D.handle();
+    ivci.viewType = VK_IMAGE_VIEW_TYPE_3D;
+    ivci.format = VK_FORMAT_R8G8B8A8_UNORM;
+    ivci.subresourceRange.layerCount = 1;
+    ivci.subresourceRange.baseMipLevel = 0;
+    ivci.subresourceRange.levelCount = 1;
+    ivci.subresourceRange.baseArrayLayer = 0;
+    ivci.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+
+    // Test for error message
+    CreateImageViewTest(*this, &ivci,
+                        "vkCreateImageView(): pCreateInfo->viewType VK_IMAGE_VIEW_TYPE_3D is not compatible with image");
+
+    // Change VkImageViewCreateInfo to different mismatched viewType
+    ivci.viewType = VK_IMAGE_VIEW_TYPE_CUBE;
+    ivci.subresourceRange.layerCount = 6;
+
+    // Test for error message
+    CreateImageViewTest(*this, &ivci, "VUID-VkImageViewCreateInfo-image-01003");
+
+    // Test mismatch detection for image of type VK_IMAGE_TYPE_3D
+    imgInfo = {VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
+               nullptr,
+               VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT,
+               VK_IMAGE_TYPE_3D,
+               VK_FORMAT_R8G8B8A8_UNORM,
+               {1, 1, 1},
+               1,
+               1,
+               VK_SAMPLE_COUNT_1_BIT,
+               VK_IMAGE_TILING_OPTIMAL,
+               VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT,
+               VK_SHARING_MODE_EXCLUSIVE,
+               0,
+               nullptr,
+               VK_IMAGE_LAYOUT_UNDEFINED};
+    VkImageObj image3D(m_device);
+    image3D.init(&imgInfo);
+    ASSERT_TRUE(image3D.initialized());
+
+    // Initialize VkImageViewCreateInfo with mismatched viewType
+    ivci = {};
+    ivci.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
+    ivci.image = image3D.handle();
+    ivci.viewType = VK_IMAGE_VIEW_TYPE_1D;
+    ivci.format = VK_FORMAT_R8G8B8A8_UNORM;
+    ivci.subresourceRange.layerCount = 1;
+    ivci.subresourceRange.baseMipLevel = 0;
+    ivci.subresourceRange.levelCount = 1;
+    ivci.subresourceRange.baseArrayLayer = 0;
+    ivci.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+
+    // Test for error message
+    CreateImageViewTest(*this, &ivci,
+                        "vkCreateImageView(): pCreateInfo->viewType VK_IMAGE_VIEW_TYPE_1D is not compatible with image");
+
+    // Change VkImageViewCreateInfo to different mismatched viewType
+    ivci.viewType = VK_IMAGE_VIEW_TYPE_2D;
+
+    // Test for error message
+    if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_MAINTENANCE1_EXTENSION_NAME)) {
+        CreateImageViewTest(*this, &ivci, "VUID-VkImageViewCreateInfo-image-01005");
+    } else {
+        CreateImageViewTest(*this, &ivci, "VUID-VkImageViewCreateInfo-subResourceRange-01021");
+    }
+
+    // Check if the device can make the image required for this test case.
+    VkImageFormatProperties formProps = {{0, 0, 0}, 0, 0, 0, 0};
+    VkResult res = vk::GetPhysicalDeviceImageFormatProperties(
+        m_device->phy().handle(), VK_FORMAT_R8G8B8A8_UNORM, VK_IMAGE_TYPE_3D, VK_IMAGE_TILING_OPTIMAL,
+        VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT,
+        VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT | VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT_KHR | VK_IMAGE_CREATE_SPARSE_BINDING_BIT,
+        &formProps);
+
+    // If not, skip this part of the test.
+    if (res || !m_device->phy().features().sparseBinding ||
+        !DeviceExtensionSupported(gpu(), nullptr, VK_KHR_MAINTENANCE1_EXTENSION_NAME)) {
+        printf("%s %s is not supported.\n", kSkipPrefix, VK_KHR_MAINTENANCE1_EXTENSION_NAME);
+        return;
+    }
+
+    // Initialize VkImageCreateInfo with VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT_KHR and VK_IMAGE_CREATE_SPARSE_BINDING_BIT which
+    // are incompatible create flags.
+    imgInfo = {
+        VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
+        nullptr,
+        VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT | VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT_KHR | VK_IMAGE_CREATE_SPARSE_BINDING_BIT,
+        VK_IMAGE_TYPE_3D,
+        VK_FORMAT_R8G8B8A8_UNORM,
+        {1, 1, 1},
+        1,
+        1,
+        VK_SAMPLE_COUNT_1_BIT,
+        VK_IMAGE_TILING_OPTIMAL,
+        VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT,
+        VK_SHARING_MODE_EXCLUSIVE,
+        0,
+        nullptr,
+        VK_IMAGE_LAYOUT_UNDEFINED};
+    VkImage imageSparse;
+
+    // Creating a sparse image means we should not bind memory to it.
+    res = vk::CreateImage(m_device->device(), &imgInfo, NULL, &imageSparse);
+    ASSERT_FALSE(res);
+
+    // Initialize VkImageViewCreateInfo to create a view that will attempt to utilize VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT_KHR.
+    ivci = {};
+    ivci.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
+    ivci.image = imageSparse;
+    ivci.viewType = VK_IMAGE_VIEW_TYPE_2D;
+    ivci.format = VK_FORMAT_R8G8B8A8_UNORM;
+    ivci.subresourceRange.layerCount = 1;
+    ivci.subresourceRange.baseMipLevel = 0;
+    ivci.subresourceRange.levelCount = 1;
+    ivci.subresourceRange.baseArrayLayer = 0;
+    ivci.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+
+    // Test for error message
+    CreateImageViewTest(*this, &ivci,
+                        " when the VK_IMAGE_CREATE_SPARSE_BINDING_BIT, VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT, or "
+                        "VK_IMAGE_CREATE_SPARSE_ALIASED_BIT flags are enabled.");
+
+    // Clean up
+    vk::DestroyImage(m_device->device(), imageSparse, nullptr);
+}
+
+TEST_F(VkLayerTest, CreateImageViewFormatFeatureMismatch) {
+    TEST_DESCRIPTION("Create view with a format that does not have the same features as the image format.");
+
+    if (!EnableDeviceProfileLayer()) {
+        printf("%s Failed to enable device profile layer.\n", kSkipPrefix);
+        return;
+    }
+
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+    ASSERT_NO_FATAL_FAILURE(InitState());
+
+    PFN_vkSetPhysicalDeviceFormatPropertiesEXT fpvkSetPhysicalDeviceFormatPropertiesEXT = nullptr;
+    PFN_vkGetOriginalPhysicalDeviceFormatPropertiesEXT fpvkGetOriginalPhysicalDeviceFormatPropertiesEXT = nullptr;
+
+    // Load required functions
+    if (!LoadDeviceProfileLayer(fpvkSetPhysicalDeviceFormatPropertiesEXT, fpvkGetOriginalPhysicalDeviceFormatPropertiesEXT)) {
+        printf("%s Failed to device profile layer.\n", kSkipPrefix);
+        return;
+    }
+
+    // List of features to be tested
+    VkFormatFeatureFlagBits features[] = {VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT, VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT,
+                                          VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT, VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT};
+    uint32_t feature_count = 4;
+    // List of usage cases for each feature test
+    VkImageUsageFlags usages[] = {VK_IMAGE_USAGE_SAMPLED_BIT, VK_IMAGE_USAGE_STORAGE_BIT, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT,
+                                  VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT};
+    // List of errors that will be thrown in order of tests run
+    std::string optimal_error_codes[] = {
+        "VUID-VkImageViewCreateInfo-usage-02274",
+        "VUID-VkImageViewCreateInfo-usage-02275",
+        "VUID-VkImageViewCreateInfo-usage-02276",
+        "VUID-VkImageViewCreateInfo-usage-02277",
+    };
+
+    VkFormatProperties formatProps;
+
+    // First three tests
+    uint32_t i = 0;
+    for (i = 0; i < (feature_count - 1); i++) {
+        // Modify formats to have mismatched features
+
+        // Format for image
+        fpvkGetOriginalPhysicalDeviceFormatPropertiesEXT(gpu(), VK_FORMAT_R32G32B32A32_UINT, &formatProps);
+        formatProps.optimalTilingFeatures |= features[i];
+        fpvkSetPhysicalDeviceFormatPropertiesEXT(gpu(), VK_FORMAT_R32G32B32A32_UINT, formatProps);
+
+        memset(&formatProps, 0, sizeof(formatProps));
+
+        // Format for view
+        fpvkGetOriginalPhysicalDeviceFormatPropertiesEXT(gpu(), VK_FORMAT_R32G32B32A32_SINT, &formatProps);
+        formatProps.optimalTilingFeatures = features[(i + 1) % feature_count];
+        fpvkSetPhysicalDeviceFormatPropertiesEXT(gpu(), VK_FORMAT_R32G32B32A32_SINT, formatProps);
+
+        // Create image with modified format
+        VkImageCreateInfo imgInfo = {VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
+                                     nullptr,
+                                     VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT,
+                                     VK_IMAGE_TYPE_2D,
+                                     VK_FORMAT_R32G32B32A32_UINT,
+                                     {1, 1, 1},
+                                     1,
+                                     1,
+                                     VK_SAMPLE_COUNT_1_BIT,
+                                     VK_IMAGE_TILING_OPTIMAL,
+                                     usages[i],
+                                     VK_SHARING_MODE_EXCLUSIVE,
+                                     0,
+                                     nullptr,
+                                     VK_IMAGE_LAYOUT_UNDEFINED};
+        VkImageObj image(m_device);
+        image.init(&imgInfo);
+        ASSERT_TRUE(image.initialized());
+
+        // Initialize VkImageViewCreateInfo with modified format
+        VkImageViewCreateInfo ivci = {};
+        ivci.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
+        ivci.image = image.handle();
+        ivci.viewType = VK_IMAGE_VIEW_TYPE_2D;
+        ivci.format = VK_FORMAT_R32G32B32A32_SINT;
+        ivci.subresourceRange.layerCount = 1;
+        ivci.subresourceRange.baseMipLevel = 0;
+        ivci.subresourceRange.levelCount = 1;
+        ivci.subresourceRange.baseArrayLayer = 0;
+        ivci.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+
+        // Test for error message
+        CreateImageViewTest(*this, &ivci, optimal_error_codes[i]);
+    }
+
+    // Test for VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT.  Needs special formats
+
+    // Only run this test if format supported
+    if (!ImageFormatIsSupported(gpu(), VK_FORMAT_D24_UNORM_S8_UINT, VK_IMAGE_TILING_OPTIMAL)) {
+        printf("%s VK_FORMAT_D24_UNORM_S8_UINT format not supported - skipped.\n", kSkipPrefix);
+        return;
+    }
+    // Modify formats to have mismatched features
+
+    // Format for image
+    fpvkGetOriginalPhysicalDeviceFormatPropertiesEXT(gpu(), VK_FORMAT_D24_UNORM_S8_UINT, &formatProps);
+    formatProps.optimalTilingFeatures |= features[i];
+    fpvkSetPhysicalDeviceFormatPropertiesEXT(gpu(), VK_FORMAT_D24_UNORM_S8_UINT, formatProps);
+
+    memset(&formatProps, 0, sizeof(formatProps));
+
+    // Format for view
+    fpvkGetOriginalPhysicalDeviceFormatPropertiesEXT(gpu(), VK_FORMAT_D32_SFLOAT_S8_UINT, &formatProps);
+    formatProps.optimalTilingFeatures = features[(i + 1) % feature_count];
+    fpvkSetPhysicalDeviceFormatPropertiesEXT(gpu(), VK_FORMAT_D32_SFLOAT_S8_UINT, formatProps);
+
+    // Create image with modified format
+    VkImageCreateInfo imgInfo = {VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
+                                 nullptr,
+                                 VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT,
+                                 VK_IMAGE_TYPE_2D,
+                                 VK_FORMAT_D24_UNORM_S8_UINT,
+                                 {1, 1, 1},
+                                 1,
+                                 1,
+                                 VK_SAMPLE_COUNT_1_BIT,
+                                 VK_IMAGE_TILING_OPTIMAL,
+                                 usages[i],
+                                 VK_SHARING_MODE_EXCLUSIVE,
+                                 0,
+                                 nullptr,
+                                 VK_IMAGE_LAYOUT_UNDEFINED};
+    VkImageObj image(m_device);
+    image.init(&imgInfo);
+    ASSERT_TRUE(image.initialized());
+
+    // Initialize VkImageViewCreateInfo with modified format
+    VkImageViewCreateInfo ivci = {};
+    ivci.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
+    ivci.image = image.handle();
+    ivci.viewType = VK_IMAGE_VIEW_TYPE_2D;
+    ivci.format = VK_FORMAT_D32_SFLOAT_S8_UINT;
+    ivci.subresourceRange.layerCount = 1;
+    ivci.subresourceRange.baseMipLevel = 0;
+    ivci.subresourceRange.levelCount = 1;
+    ivci.subresourceRange.baseArrayLayer = 0;
+    ivci.subresourceRange.aspectMask = VK_IMAGE_ASPECT_STENCIL_BIT;
+
+    // Test for error message
+    CreateImageViewTest(*this, &ivci, optimal_error_codes[i]);
+}
+
+TEST_F(VkLayerTest, InvalidImageViewUsageCreateInfo) {
+    TEST_DESCRIPTION("Usage modification via a chained VkImageViewUsageCreateInfo struct");
+
+    if (!EnableDeviceProfileLayer()) {
+        printf("%s Test requires DeviceProfileLayer, unavailable - skipped.\n", kSkipPrefix);
+        return;
+    }
+
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+    if (!DeviceExtensionSupported(gpu(), nullptr, VK_KHR_MAINTENANCE2_EXTENSION_NAME)) {
+        printf("%s Test requires API >= 1.1 or KHR_MAINTENANCE2 extension, unavailable - skipped.\n", kSkipPrefix);
+        return;
+    }
+    m_device_extension_names.push_back(VK_KHR_MAINTENANCE2_EXTENSION_NAME);
+    ASSERT_NO_FATAL_FAILURE(InitState());
+
+    PFN_vkSetPhysicalDeviceFormatPropertiesEXT fpvkSetPhysicalDeviceFormatPropertiesEXT = nullptr;
+    PFN_vkGetOriginalPhysicalDeviceFormatPropertiesEXT fpvkGetOriginalPhysicalDeviceFormatPropertiesEXT = nullptr;
+
+    // Load required functions
+    if (!LoadDeviceProfileLayer(fpvkSetPhysicalDeviceFormatPropertiesEXT, fpvkGetOriginalPhysicalDeviceFormatPropertiesEXT)) {
+        printf("%s Required extensions are not avaiable.\n", kSkipPrefix);
+        return;
+    }
+
+    VkFormatProperties formatProps;
+
+    // Ensure image format claims support for sampled and storage, excludes color attachment
+    memset(&formatProps, 0, sizeof(formatProps));
+    fpvkGetOriginalPhysicalDeviceFormatPropertiesEXT(gpu(), VK_FORMAT_R32G32B32A32_UINT, &formatProps);
+    formatProps.optimalTilingFeatures |= (VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT | VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT);
+    formatProps.optimalTilingFeatures = formatProps.optimalTilingFeatures & ~VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT;
+    fpvkSetPhysicalDeviceFormatPropertiesEXT(gpu(), VK_FORMAT_R32G32B32A32_UINT, formatProps);
+
+    // Create image with sampled and storage usages
+    VkImageCreateInfo imgInfo = {VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
+                                 nullptr,
+                                 VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT,
+                                 VK_IMAGE_TYPE_2D,
+                                 VK_FORMAT_R32G32B32A32_UINT,
+                                 {1, 1, 1},
+                                 1,
+                                 1,
+                                 VK_SAMPLE_COUNT_1_BIT,
+                                 VK_IMAGE_TILING_OPTIMAL,
+                                 VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_STORAGE_BIT,
+                                 VK_SHARING_MODE_EXCLUSIVE,
+                                 0,
+                                 nullptr,
+                                 VK_IMAGE_LAYOUT_UNDEFINED};
+    VkImageObj image(m_device);
+    image.init(&imgInfo);
+    ASSERT_TRUE(image.initialized());
+
+    // Force the imageview format to exclude storage feature, include color attachment
+    memset(&formatProps, 0, sizeof(formatProps));
+    fpvkGetOriginalPhysicalDeviceFormatPropertiesEXT(gpu(), VK_FORMAT_R32G32B32A32_SINT, &formatProps);
+    formatProps.optimalTilingFeatures |= VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT;
+    formatProps.optimalTilingFeatures = (formatProps.optimalTilingFeatures & ~VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT);
+    fpvkSetPhysicalDeviceFormatPropertiesEXT(gpu(), VK_FORMAT_R32G32B32A32_SINT, formatProps);
+
+    VkImageViewCreateInfo ivci = {};
+    ivci.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
+    ivci.image = image.handle();
+    ivci.viewType = VK_IMAGE_VIEW_TYPE_2D;
+    ivci.format = VK_FORMAT_R32G32B32A32_SINT;
+    ivci.subresourceRange.layerCount = 1;
+    ivci.subresourceRange.baseMipLevel = 0;
+    ivci.subresourceRange.levelCount = 1;
+    ivci.subresourceRange.baseArrayLayer = 0;
+    ivci.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+
+    // ImageView creation should fail because view format doesn't support all the underlying image's usages
+    CreateImageViewTest(*this, &ivci, "VUID-VkImageViewCreateInfo-usage-02275");
+
+    // Add a chained VkImageViewUsageCreateInfo to override original image usage bits, removing storage
+    VkImageViewUsageCreateInfo usage_ci = {VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO, nullptr, VK_IMAGE_USAGE_SAMPLED_BIT};
+    // Link the VkImageViewUsageCreateInfo struct into the view's create info pNext chain
+    ivci.pNext = &usage_ci;
+
+    // ImageView should now succeed without error
+    CreateImageViewTest(*this, &ivci);
+
+    // Try a zero usage field
+    usage_ci.usage = 0;
+    CreateImageViewTest(*this, &ivci, "VUID-VkImageViewUsageCreateInfo-usage-requiredbitmask");
+
+    // Try an illegal bit in usage field
+    usage_ci.usage = 0x10000000 | VK_IMAGE_USAGE_SAMPLED_BIT;
+    CreateImageViewTest(*this, &ivci, "VUID-VkImageViewUsageCreateInfo-usage-parameter");
+}
+
+TEST_F(VkLayerTest, CreateImageViewNoMemoryBoundToImage) {
+    VkResult err;
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    // Create an image and try to create a view with no memory backing the image
+    VkImage image;
+
+    const VkFormat tex_format = VK_FORMAT_B8G8R8A8_UNORM;
+    const int32_t tex_width = 32;
+    const int32_t tex_height = 32;
+
+    VkImageCreateInfo image_create_info = {};
+    image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+    image_create_info.pNext = NULL;
+    image_create_info.imageType = VK_IMAGE_TYPE_2D;
+    image_create_info.format = tex_format;
+    image_create_info.extent.width = tex_width;
+    image_create_info.extent.height = tex_height;
+    image_create_info.extent.depth = 1;
+    image_create_info.mipLevels = 1;
+    image_create_info.arrayLayers = 1;
+    image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
+    image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
+    image_create_info.usage = VK_IMAGE_USAGE_SAMPLED_BIT;
+    image_create_info.flags = 0;
+
+    err = vk::CreateImage(m_device->device(), &image_create_info, NULL, &image);
+    ASSERT_VK_SUCCESS(err);
+
+    VkImageViewCreateInfo image_view_create_info = {};
+    image_view_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
+    image_view_create_info.image = image;
+    image_view_create_info.viewType = VK_IMAGE_VIEW_TYPE_2D;
+    image_view_create_info.format = tex_format;
+    image_view_create_info.subresourceRange.layerCount = 1;
+    image_view_create_info.subresourceRange.baseMipLevel = 0;
+    image_view_create_info.subresourceRange.levelCount = 1;
+    image_view_create_info.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+
+    CreateImageViewTest(*this, &image_view_create_info,
+                        " used with no memory bound. Memory should be bound by calling vkBindImageMemory().");
+    vk::DestroyImage(m_device->device(), image, NULL);
+}
+
+TEST_F(VkLayerTest, InvalidImageViewAspect) {
+    TEST_DESCRIPTION("Create an image and try to create a view with an invalid aspectMask");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    const VkFormat tex_format = VK_FORMAT_B8G8R8A8_UNORM;
+    VkImageObj image(m_device);
+    image.Init(32, 32, 1, tex_format, VK_IMAGE_USAGE_SAMPLED_BIT, VK_IMAGE_TILING_LINEAR, 0);
+    ASSERT_TRUE(image.initialized());
+
+    VkImageViewCreateInfo image_view_create_info = {};
+    image_view_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
+    image_view_create_info.image = image.handle();
+    image_view_create_info.viewType = VK_IMAGE_VIEW_TYPE_2D;
+    image_view_create_info.format = tex_format;
+    image_view_create_info.subresourceRange.baseMipLevel = 0;
+    image_view_create_info.subresourceRange.levelCount = 1;
+    image_view_create_info.subresourceRange.layerCount = 1;
+    // Cause an error by setting an invalid image aspect
+    image_view_create_info.subresourceRange.aspectMask = VK_IMAGE_ASPECT_METADATA_BIT;
+
+    CreateImageViewTest(*this, &image_view_create_info, "UNASSIGNED-CoreValidation-DrawState-InvalidImageAspect");
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, ExerciseGetImageSubresourceLayout) {
+    TEST_DESCRIPTION("Test vkGetImageSubresourceLayout() valid usages");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    VkSubresourceLayout subres_layout = {};
+
+    // VU 00732: image must have been created with tiling equal to VK_IMAGE_TILING_LINEAR
+    {
+        const VkImageTiling tiling = VK_IMAGE_TILING_OPTIMAL;  // ERROR: violates VU 00732
+        VkImageObj img(m_device);
+        img.InitNoLayout(32, 32, 1, VK_FORMAT_B8G8R8A8_UNORM, VK_IMAGE_USAGE_TRANSFER_SRC_BIT, tiling);
+        ASSERT_TRUE(img.initialized());
+
+        VkImageSubresource subres = {};
+        subres.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+        subres.mipLevel = 0;
+        subres.arrayLayer = 0;
+
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkGetImageSubresourceLayout-image-00996");
+        vk::GetImageSubresourceLayout(m_device->device(), img.image(), &subres, &subres_layout);
+        m_errorMonitor->VerifyFound();
+    }
+
+    // VU 00733: The aspectMask member of pSubresource must only have a single bit set
+    {
+        VkImageObj img(m_device);
+        img.InitNoLayout(32, 32, 1, VK_FORMAT_B8G8R8A8_UNORM, VK_IMAGE_USAGE_TRANSFER_SRC_BIT);
+        ASSERT_TRUE(img.initialized());
+
+        VkImageSubresource subres = {};
+        subres.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT | VK_IMAGE_ASPECT_METADATA_BIT;  // ERROR: triggers VU 00733
+        subres.mipLevel = 0;
+        subres.arrayLayer = 0;
+
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkGetImageSubresourceLayout-aspectMask-00997");
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                             "UNASSIGNED-CoreValidation-DrawState-InvalidImageAspect");
+        vk::GetImageSubresourceLayout(m_device->device(), img.image(), &subres, &subres_layout);
+        m_errorMonitor->VerifyFound();
+    }
+
+    // 00739 mipLevel must be less than the mipLevels specified in VkImageCreateInfo when the image was created
+    {
+        VkImageObj img(m_device);
+        img.InitNoLayout(32, 32, 1, VK_FORMAT_B8G8R8A8_UNORM, VK_IMAGE_USAGE_TRANSFER_SRC_BIT);
+        ASSERT_TRUE(img.initialized());
+
+        VkImageSubresource subres = {};
+        subres.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+        subres.mipLevel = 1;  // ERROR: triggers VU 00739
+        subres.arrayLayer = 0;
+
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkGetImageSubresourceLayout-mipLevel-01716");
+        vk::GetImageSubresourceLayout(m_device->device(), img.image(), &subres, &subres_layout);
+        m_errorMonitor->VerifyFound();
+    }
+
+    // 00740 arrayLayer must be less than the arrayLayers specified in VkImageCreateInfo when the image was created
+    {
+        VkImageObj img(m_device);
+        img.InitNoLayout(32, 32, 1, VK_FORMAT_B8G8R8A8_UNORM, VK_IMAGE_USAGE_TRANSFER_SRC_BIT);
+        ASSERT_TRUE(img.initialized());
+
+        VkImageSubresource subres = {};
+        subres.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+        subres.mipLevel = 0;
+        subres.arrayLayer = 1;  // ERROR: triggers VU 00740
+
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkGetImageSubresourceLayout-arrayLayer-01717");
+        vk::GetImageSubresourceLayout(m_device->device(), img.image(), &subres, &subres_layout);
+        m_errorMonitor->VerifyFound();
+    }
+}
+
+TEST_F(VkLayerTest, ImageLayerUnsupportedFormat) {
+    TEST_DESCRIPTION("Creating images with unsupported formats ");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    // Create image with unsupported format - Expect FORMAT_UNSUPPORTED
+    VkImageCreateInfo image_create_info = {};
+    image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+    image_create_info.imageType = VK_IMAGE_TYPE_2D;
+    image_create_info.format = VK_FORMAT_UNDEFINED;
+    image_create_info.extent.width = 32;
+    image_create_info.extent.height = 32;
+    image_create_info.extent.depth = 1;
+    image_create_info.mipLevels = 1;
+    image_create_info.arrayLayers = 1;
+    image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
+    image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
+    image_create_info.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
+
+    CreateImageTest(*this, &image_create_info, "VUID-VkImageCreateInfo-format-00943");
+}
+
+TEST_F(VkLayerTest, CreateImageViewFormatMismatchUnrelated) {
+    TEST_DESCRIPTION("Create an image with a color format, then try to create a depth view of it");
+
+    if (!EnableDeviceProfileLayer()) {
+        printf("%s Failed to enable device profile layer.\n", kSkipPrefix);
+        return;
+    }
+
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+    ASSERT_NO_FATAL_FAILURE(InitState());
+
+    // Load required functions
+    PFN_vkSetPhysicalDeviceFormatPropertiesEXT fpvkSetPhysicalDeviceFormatPropertiesEXT =
+        (PFN_vkSetPhysicalDeviceFormatPropertiesEXT)vk::GetInstanceProcAddr(instance(), "vkSetPhysicalDeviceFormatPropertiesEXT");
+    PFN_vkGetOriginalPhysicalDeviceFormatPropertiesEXT fpvkGetOriginalPhysicalDeviceFormatPropertiesEXT =
+        (PFN_vkGetOriginalPhysicalDeviceFormatPropertiesEXT)vk::GetInstanceProcAddr(
+            instance(), "vkGetOriginalPhysicalDeviceFormatPropertiesEXT");
+
+    if (!(fpvkSetPhysicalDeviceFormatPropertiesEXT) || !(fpvkGetOriginalPhysicalDeviceFormatPropertiesEXT)) {
+        printf("%s Can't find device_profile_api functions; skipped.\n", kSkipPrefix);
+        return;
+    }
+
+    auto depth_format = FindSupportedDepthStencilFormat(gpu());
+    if (!depth_format) {
+        printf("%s Couldn't find depth stencil image format.\n", kSkipPrefix);
+        return;
+    }
+
+    VkFormatProperties formatProps;
+
+    fpvkGetOriginalPhysicalDeviceFormatPropertiesEXT(gpu(), depth_format, &formatProps);
+    formatProps.optimalTilingFeatures |= VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT;
+    fpvkSetPhysicalDeviceFormatPropertiesEXT(gpu(), depth_format, formatProps);
+
+    VkImageObj image(m_device);
+    image.Init(128, 128, 1, VK_FORMAT_B8G8R8A8_UNORM, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
+    ASSERT_TRUE(image.initialized());
+
+    VkImageViewCreateInfo imgViewInfo = {};
+    imgViewInfo.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
+    imgViewInfo.image = image.handle();
+    imgViewInfo.viewType = VK_IMAGE_VIEW_TYPE_2D;
+    imgViewInfo.format = depth_format;
+    imgViewInfo.subresourceRange.layerCount = 1;
+    imgViewInfo.subresourceRange.baseMipLevel = 0;
+    imgViewInfo.subresourceRange.levelCount = 1;
+    imgViewInfo.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+
+    // Can't use depth format for view into color image - Expect INVALID_FORMAT
+    CreateImageViewTest(*this, &imgViewInfo,
+                        "Formats MUST be IDENTICAL unless VK_IMAGE_CREATE_MUTABLE_FORMAT BIT was set on image creation.");
+}
+
+TEST_F(VkLayerTest, CreateImageViewNoMutableFormatBit) {
+    TEST_DESCRIPTION("Create an image view with a different format, when the image does not have MUTABLE_FORMAT bit");
+
+    if (!EnableDeviceProfileLayer()) {
+        printf("%s Couldn't enable device profile layer.\n", kSkipPrefix);
+        return;
+    }
+
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+    ASSERT_NO_FATAL_FAILURE(InitState());
+
+    PFN_vkSetPhysicalDeviceFormatPropertiesEXT fpvkSetPhysicalDeviceFormatPropertiesEXT = nullptr;
+    PFN_vkGetOriginalPhysicalDeviceFormatPropertiesEXT fpvkGetOriginalPhysicalDeviceFormatPropertiesEXT = nullptr;
+
+    // Load required functions
+    if (!LoadDeviceProfileLayer(fpvkSetPhysicalDeviceFormatPropertiesEXT, fpvkGetOriginalPhysicalDeviceFormatPropertiesEXT)) {
+        printf("%s Required extensions are not present.\n", kSkipPrefix);
+        return;
+    }
+
+    VkImageObj image(m_device);
+    image.Init(128, 128, 1, VK_FORMAT_B8G8R8A8_UNORM, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
+    ASSERT_TRUE(image.initialized());
+
+    VkFormatProperties formatProps;
+
+    fpvkGetOriginalPhysicalDeviceFormatPropertiesEXT(gpu(), VK_FORMAT_B8G8R8A8_UINT, &formatProps);
+    formatProps.optimalTilingFeatures |= VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT;
+    fpvkSetPhysicalDeviceFormatPropertiesEXT(gpu(), VK_FORMAT_B8G8R8A8_UINT, formatProps);
+
+    VkImageViewCreateInfo imgViewInfo = {};
+    imgViewInfo.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
+    imgViewInfo.image = image.handle();
+    imgViewInfo.viewType = VK_IMAGE_VIEW_TYPE_2D;
+    imgViewInfo.format = VK_FORMAT_B8G8R8A8_UINT;
+    imgViewInfo.subresourceRange.layerCount = 1;
+    imgViewInfo.subresourceRange.baseMipLevel = 0;
+    imgViewInfo.subresourceRange.levelCount = 1;
+    imgViewInfo.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+
+    // Same compatibility class but no MUTABLE_FORMAT bit - Expect
+    // VIEW_CREATE_ERROR
+    CreateImageViewTest(*this, &imgViewInfo, "VUID-VkImageViewCreateInfo-image-01019");
+}
+
+TEST_F(VkLayerTest, CreateImageViewDifferentClass) {
+    TEST_DESCRIPTION("Passing bad parameters to CreateImageView");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    if (!(m_device->format_properties(VK_FORMAT_R8_UINT).optimalTilingFeatures & VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT)) {
+        printf("%s Device does not support R8_UINT as color attachment; skipped", kSkipPrefix);
+        return;
+    }
+
+    VkImageCreateInfo mutImgInfo = {VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
+                                    nullptr,
+                                    VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT,
+                                    VK_IMAGE_TYPE_2D,
+                                    VK_FORMAT_R8_UINT,
+                                    {128, 128, 1},
+                                    1,
+                                    1,
+                                    VK_SAMPLE_COUNT_1_BIT,
+                                    VK_IMAGE_TILING_OPTIMAL,
+                                    VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT,
+                                    VK_SHARING_MODE_EXCLUSIVE,
+                                    0,
+                                    nullptr,
+                                    VK_IMAGE_LAYOUT_UNDEFINED};
+    VkImageObj mutImage(m_device);
+    mutImage.init(&mutImgInfo);
+    ASSERT_TRUE(mutImage.initialized());
+
+    VkImageViewCreateInfo imgViewInfo = {};
+    imgViewInfo.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
+    imgViewInfo.viewType = VK_IMAGE_VIEW_TYPE_2D;
+    imgViewInfo.format = VK_FORMAT_B8G8R8A8_UNORM;
+    imgViewInfo.subresourceRange.layerCount = 1;
+    imgViewInfo.subresourceRange.baseMipLevel = 0;
+    imgViewInfo.subresourceRange.levelCount = 1;
+    imgViewInfo.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    imgViewInfo.image = mutImage.handle();
+
+    CreateImageViewTest(*this, &imgViewInfo, "VUID-VkImageViewCreateInfo-image-01018");
+}
+
+TEST_F(VkLayerTest, MultiplaneIncompatibleViewFormat) {
+    TEST_DESCRIPTION("Postive/negative tests of multiplane imageview format compatibility");
+
+    // Enable KHR multiplane req'd extensions
+    bool mp_extensions = InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME,
+                                                    VK_KHR_GET_MEMORY_REQUIREMENTS_2_SPEC_VERSION);
+    if (mp_extensions) {
+        m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    }
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+    mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_MAINTENANCE1_EXTENSION_NAME);
+    mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
+    mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
+    mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
+    if (mp_extensions) {
+        m_device_extension_names.push_back(VK_KHR_MAINTENANCE1_EXTENSION_NAME);
+        m_device_extension_names.push_back(VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
+        m_device_extension_names.push_back(VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
+        m_device_extension_names.push_back(VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
+    } else {
+        printf("%s test requires KHR multiplane extensions, not available.  Skipping.\n", kSkipPrefix);
+        return;
+    }
+    ASSERT_NO_FATAL_FAILURE(InitState());
+
+    VkImageCreateInfo ci = {};
+    ci.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+    ci.pNext = NULL;
+    ci.flags = VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT;
+    ci.imageType = VK_IMAGE_TYPE_2D;
+    ci.format = VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM;
+    ci.tiling = VK_IMAGE_TILING_OPTIMAL;
+    ci.usage = VK_IMAGE_USAGE_SAMPLED_BIT;
+    ci.extent = {128, 128, 1};
+    ci.mipLevels = 1;
+    ci.arrayLayers = 1;
+    ci.samples = VK_SAMPLE_COUNT_1_BIT;
+    ci.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
+    ci.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+
+    // Verify format
+    VkFormatFeatureFlags features = VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT;
+    bool supported = ImageFormatAndFeaturesSupported(instance(), gpu(), ci, features);
+    if (!supported) {
+        printf("%s Multiplane image format not supported.  Skipping test.\n", kSkipPrefix);
+        return;
+    }
+
+    VkImageObj image_obj(m_device);
+    image_obj.init(&ci);
+    ASSERT_TRUE(image_obj.initialized());
+
+    VkImageViewCreateInfo ivci = {};
+    ivci.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
+    ivci.image = image_obj.image();
+    ivci.viewType = VK_IMAGE_VIEW_TYPE_2D;
+    ivci.format = VK_FORMAT_R8_SNORM;  // Compat is VK_FORMAT_R8_UNORM
+    ivci.subresourceRange.layerCount = 1;
+    ivci.subresourceRange.baseMipLevel = 0;
+    ivci.subresourceRange.levelCount = 1;
+    ivci.subresourceRange.aspectMask = VK_IMAGE_ASPECT_PLANE_1_BIT;
+
+    // Incompatible format error
+    CreateImageViewTest(*this, &ivci, "VUID-VkImageViewCreateInfo-image-01586");
+
+    // Correct format succeeds
+    ivci.format = VK_FORMAT_R8_UNORM;
+    CreateImageViewTest(*this, &ivci);
+
+    // Try a multiplane imageview
+    ivci.format = VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM;
+    ivci.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    CreateImageViewTest(*this, &ivci);
+}
+
+TEST_F(VkLayerTest, CreateImageViewInvalidSubresourceRange) {
+    TEST_DESCRIPTION("Passing bad image subrange to CreateImageView");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    VkImageObj image(m_device);
+    image.Init(32, 32, 1, VK_FORMAT_B8G8R8A8_UNORM, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL);
+    ASSERT_TRUE(image.create_info().arrayLayers == 1);
+    ASSERT_TRUE(image.initialized());
+
+    VkImageViewCreateInfo img_view_info_template = {};
+    img_view_info_template.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
+    img_view_info_template.image = image.handle();
+    img_view_info_template.viewType = VK_IMAGE_VIEW_TYPE_2D_ARRAY;
+    img_view_info_template.format = image.format();
+    // subresourceRange to be filled later for the purposes of this test
+    img_view_info_template.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    img_view_info_template.subresourceRange.baseMipLevel = 0;
+    img_view_info_template.subresourceRange.levelCount = 0;
+    img_view_info_template.subresourceRange.baseArrayLayer = 0;
+    img_view_info_template.subresourceRange.layerCount = 0;
+
+    // Try baseMipLevel >= image.mipLevels with VK_REMAINING_MIP_LEVELS
+    {
+        const VkImageSubresourceRange range = {VK_IMAGE_ASPECT_COLOR_BIT, 1, VK_REMAINING_MIP_LEVELS, 0, 1};
+        VkImageViewCreateInfo img_view_info = img_view_info_template;
+        img_view_info.subresourceRange = range;
+        CreateImageViewTest(*this, &img_view_info, "VUID-VkImageViewCreateInfo-subresourceRange-01478");
+    }
+
+    // Try baseMipLevel >= image.mipLevels without VK_REMAINING_MIP_LEVELS
+    {
+        const VkImageSubresourceRange range = {VK_IMAGE_ASPECT_COLOR_BIT, 1, 1, 0, 1};
+        VkImageViewCreateInfo img_view_info = img_view_info_template;
+        img_view_info.subresourceRange = range;
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageViewCreateInfo-subresourceRange-01718");
+        CreateImageViewTest(*this, &img_view_info, "VUID-VkImageViewCreateInfo-subresourceRange-01478");
+    }
+
+    // Try levelCount = 0
+    {
+        const VkImageSubresourceRange range = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 0, 0, 1};
+        VkImageViewCreateInfo img_view_info = img_view_info_template;
+        img_view_info.subresourceRange = range;
+        CreateImageViewTest(*this, &img_view_info, "VUID-VkImageViewCreateInfo-subresourceRange-01718");
+    }
+
+    // Try baseMipLevel + levelCount > image.mipLevels
+    {
+        const VkImageSubresourceRange range = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 2, 0, 1};
+        VkImageViewCreateInfo img_view_info = img_view_info_template;
+        img_view_info.subresourceRange = range;
+        CreateImageViewTest(*this, &img_view_info, "VUID-VkImageViewCreateInfo-subresourceRange-01718");
+    }
+
+    // These tests rely on having the Maintenance1 extension not being enabled, and are invalid on all but version 1.0
+    if (m_device->props.apiVersion < VK_API_VERSION_1_1) {
+        // Try baseArrayLayer >= image.arrayLayers with VK_REMAINING_ARRAY_LAYERS
+        {
+            const VkImageSubresourceRange range = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 1, VK_REMAINING_ARRAY_LAYERS};
+            VkImageViewCreateInfo img_view_info = img_view_info_template;
+            img_view_info.subresourceRange = range;
+            CreateImageViewTest(*this, &img_view_info, "VUID-VkImageViewCreateInfo-subresourceRange-01480");
+        }
+
+        // Try baseArrayLayer >= image.arrayLayers without VK_REMAINING_ARRAY_LAYERS
+        {
+            const VkImageSubresourceRange range = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 1, 1};
+            VkImageViewCreateInfo img_view_info = img_view_info_template;
+            img_view_info.subresourceRange = range;
+            m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                                 "VUID-VkImageViewCreateInfo-subresourceRange-01719");
+            CreateImageViewTest(*this, &img_view_info, "VUID-VkImageViewCreateInfo-subresourceRange-01480");
+        }
+
+        // Try layerCount = 0
+        {
+            const VkImageSubresourceRange range = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 0};
+            VkImageViewCreateInfo img_view_info = img_view_info_template;
+            img_view_info.subresourceRange = range;
+            CreateImageViewTest(*this, &img_view_info, "VUID-VkImageViewCreateInfo-subresourceRange-01719");
+        }
+
+        // Try baseArrayLayer + layerCount > image.arrayLayers
+        {
+            const VkImageSubresourceRange range = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 2};
+            VkImageViewCreateInfo img_view_info = img_view_info_template;
+            img_view_info.subresourceRange = range;
+            CreateImageViewTest(*this, &img_view_info, "VUID-VkImageViewCreateInfo-subresourceRange-01719");
+        }
+    }
+}
+
+TEST_F(VkLayerTest, CreateImageMiscErrors) {
+    TEST_DESCRIPTION("Misc leftover valid usage errors in VkImageCreateInfo struct");
+
+    VkPhysicalDeviceFeatures features{};
+    ASSERT_NO_FATAL_FAILURE(Init(&features));
+
+    VkImageCreateInfo tmp_img_ci = {};
+    tmp_img_ci.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+    tmp_img_ci.flags = 0;                          // assumably any is supported
+    tmp_img_ci.imageType = VK_IMAGE_TYPE_2D;       // any is supported
+    tmp_img_ci.format = VK_FORMAT_R8G8B8A8_UNORM;  // has mandatory support for all usages
+    tmp_img_ci.extent = {64, 64, 1};               // limit is 256 for 3D, or 4096
+    tmp_img_ci.mipLevels = 1;                      // any is supported
+    tmp_img_ci.arrayLayers = 1;                    // limit is 256
+    tmp_img_ci.samples = VK_SAMPLE_COUNT_1_BIT;    // needs to be 1 if TILING_LINEAR
+    // if VK_IMAGE_TILING_LINEAR imageType must be 2D, usage must be TRANSFER, and levels layers samplers all 1
+    tmp_img_ci.tiling = VK_IMAGE_TILING_OPTIMAL;
+    tmp_img_ci.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;  // depends on format
+    tmp_img_ci.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+    const VkImageCreateInfo safe_image_ci = tmp_img_ci;
+
+    ASSERT_VK_SUCCESS(GPDIFPHelper(gpu(), &safe_image_ci));
+
+    {
+        VkImageCreateInfo image_ci = safe_image_ci;
+        image_ci.sharingMode = VK_SHARING_MODE_CONCURRENT;
+        image_ci.queueFamilyIndexCount = 2;
+        image_ci.pQueueFamilyIndices = nullptr;
+        CreateImageTest(*this, &image_ci, "VUID-VkImageCreateInfo-sharingMode-00941");
+    }
+
+    {
+        VkImageCreateInfo image_ci = safe_image_ci;
+        image_ci.sharingMode = VK_SHARING_MODE_CONCURRENT;
+        image_ci.queueFamilyIndexCount = 1;
+        const uint32_t queue_family = 0;
+        image_ci.pQueueFamilyIndices = &queue_family;
+        CreateImageTest(*this, &image_ci, "VUID-VkImageCreateInfo-sharingMode-00942");
+    }
+
+    {
+        VkImageCreateInfo image_ci = safe_image_ci;
+        image_ci.format = VK_FORMAT_UNDEFINED;
+        CreateImageTest(*this, &image_ci, "VUID-VkImageCreateInfo-format-00943");
+    }
+
+    {
+        VkImageCreateInfo image_ci = safe_image_ci;
+        image_ci.flags = VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT;
+        image_ci.arrayLayers = 6;
+        image_ci.imageType = VK_IMAGE_TYPE_1D;
+        m_errorMonitor->SetUnexpectedError("VUID-VkImageCreateInfo-imageType-00954");
+        image_ci.extent = {64, 1, 1};
+        CreateImageTest(*this, &image_ci, "VUID-VkImageCreateInfo-flags-00949");
+
+        image_ci = safe_image_ci;
+        image_ci.flags = VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT;
+        image_ci.imageType = VK_IMAGE_TYPE_3D;
+        m_errorMonitor->SetUnexpectedError("VUID-VkImageCreateInfo-imageType-00954");
+        image_ci.extent = {4, 4, 4};
+        CreateImageTest(*this, &image_ci, "VUID-VkImageCreateInfo-flags-00949");
+
+        image_ci = safe_image_ci;
+        image_ci.flags = VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT;
+        image_ci.imageType = VK_IMAGE_TYPE_2D;
+        image_ci.extent = {8, 6, 1};
+        image_ci.arrayLayers = 6;
+        CreateImageTest(*this, &image_ci, "VUID-VkImageCreateInfo-imageType-00954");
+
+        image_ci = safe_image_ci;
+        image_ci.flags = VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT;
+        image_ci.imageType = VK_IMAGE_TYPE_2D;
+        image_ci.extent = {8, 8, 1};
+        image_ci.arrayLayers = 4;
+        CreateImageTest(*this, &image_ci, "VUID-VkImageCreateInfo-imageType-00954");
+    }
+
+    {
+        VkImageCreateInfo image_ci = safe_image_ci;
+        image_ci.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;  // always has 4 samples support
+        image_ci.samples = VK_SAMPLE_COUNT_4_BIT;
+        image_ci.imageType = VK_IMAGE_TYPE_3D;
+        image_ci.extent = {4, 4, 4};
+        CreateImageTest(*this, &image_ci, "VUID-VkImageCreateInfo-samples-02257");
+
+        image_ci = safe_image_ci;
+        image_ci.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;  // always has 4 samples support
+        image_ci.samples = VK_SAMPLE_COUNT_4_BIT;
+        image_ci.flags = VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT;
+        image_ci.arrayLayers = 6;
+        CreateImageTest(*this, &image_ci, "VUID-VkImageCreateInfo-samples-02257");
+
+        image_ci = safe_image_ci;
+        image_ci.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;  // always has 4 samples support
+        image_ci.samples = VK_SAMPLE_COUNT_4_BIT;
+        image_ci.tiling = VK_IMAGE_TILING_LINEAR;
+        CreateImageTest(*this, &image_ci, "VUID-VkImageCreateInfo-samples-02257");
+
+        image_ci = safe_image_ci;
+        image_ci.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;  // always has 4 samples support
+        image_ci.samples = VK_SAMPLE_COUNT_4_BIT;
+        image_ci.mipLevels = 2;
+        CreateImageTest(*this, &image_ci, "VUID-VkImageCreateInfo-samples-02257");
+    }
+
+    {
+        VkImageCreateInfo image_ci = safe_image_ci;
+        image_ci.usage = VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
+        image_ci.usage |= VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
+        CreateImageTest(*this, &image_ci, "VUID-VkImageCreateInfo-usage-00963");
+
+        image_ci.usage = VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT;
+        CreateImageTest(*this, &image_ci, "VUID-VkImageCreateInfo-usage-00966");
+
+        image_ci.usage = VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT;
+        image_ci.usage |= VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCreateInfo-usage-00963");
+        CreateImageTest(*this, &image_ci, "VUID-VkImageCreateInfo-usage-00966");
+    }
+
+    {
+        VkImageCreateInfo image_ci = safe_image_ci;
+        image_ci.flags = VK_IMAGE_CREATE_SPARSE_BINDING_BIT;
+        CreateImageTest(*this, &image_ci, "VUID-VkImageCreateInfo-flags-00969");
+    }
+
+    // InitialLayout not VK_IMAGE_LAYOUT_UNDEFINED or VK_IMAGE_LAYOUT_PREDEFINED
+    {
+        VkImageCreateInfo image_ci = safe_image_ci;
+        image_ci.initialLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
+        CreateImageTest(*this, &image_ci, "VUID-VkImageCreateInfo-initialLayout-00993");
+    }
+}
+
+TEST_F(VkLayerTest, CreateImageMinLimitsViolation) {
+    TEST_DESCRIPTION("Create invalid image with invalid parameters violation minimum limit, such as being zero.");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    VkImage null_image;  // throwaway target for all the vk::CreateImage
+
+    VkImageCreateInfo tmp_img_ci = {};
+    tmp_img_ci.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+    tmp_img_ci.flags = 0;                          // assumably any is supported
+    tmp_img_ci.imageType = VK_IMAGE_TYPE_2D;       // any is supported
+    tmp_img_ci.format = VK_FORMAT_R8G8B8A8_UNORM;  // has mandatory support for all usages
+    tmp_img_ci.extent = {1, 1, 1};                 // limit is 256 for 3D, or 4096
+    tmp_img_ci.mipLevels = 1;                      // any is supported
+    tmp_img_ci.arrayLayers = 1;                    // limit is 256
+    tmp_img_ci.samples = VK_SAMPLE_COUNT_1_BIT;    // needs to be 1 if TILING_LINEAR
+    // if VK_IMAGE_TILING_LINEAR imageType must be 2D, usage must be TRANSFER, and levels layers samplers all 1
+    tmp_img_ci.tiling = VK_IMAGE_TILING_OPTIMAL;
+    tmp_img_ci.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;  // depends on format
+    tmp_img_ci.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+    const VkImageCreateInfo safe_image_ci = tmp_img_ci;
+
+    enum Dimension { kWidth = 0x1, kHeight = 0x2, kDepth = 0x4 };
+
+    for (underlying_type<Dimension>::type bad_dimensions = 0x1; bad_dimensions < 0x8; ++bad_dimensions) {
+        VkExtent3D extent = {1, 1, 1};
+
+        if (bad_dimensions & kWidth) {
+            extent.width = 0;
+            m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCreateInfo-extent-00944");
+        }
+
+        if (bad_dimensions & kHeight) {
+            extent.height = 0;
+            m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCreateInfo-extent-00945");
+        }
+
+        if (bad_dimensions & kDepth) {
+            extent.depth = 0;
+            m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCreateInfo-extent-00946");
+        }
+
+        VkImageCreateInfo bad_image_ci = safe_image_ci;
+        bad_image_ci.imageType = VK_IMAGE_TYPE_3D;  // has to be 3D otherwise it might trigger the non-1 error instead
+        bad_image_ci.extent = extent;
+
+        vk::CreateImage(m_device->device(), &bad_image_ci, NULL, &null_image);
+
+        m_errorMonitor->VerifyFound();
+    }
+
+    {
+        VkImageCreateInfo bad_image_ci = safe_image_ci;
+        bad_image_ci.mipLevels = 0;
+        CreateImageTest(*this, &bad_image_ci, "VUID-VkImageCreateInfo-mipLevels-00947");
+    }
+
+    {
+        VkImageCreateInfo bad_image_ci = safe_image_ci;
+        bad_image_ci.arrayLayers = 0;
+        CreateImageTest(*this, &bad_image_ci, "VUID-VkImageCreateInfo-arrayLayers-00948");
+    }
+
+    {
+        VkImageCreateInfo bad_image_ci = safe_image_ci;
+        bad_image_ci.flags = VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT;
+        bad_image_ci.arrayLayers = 5;
+        CreateImageTest(*this, &bad_image_ci, "VUID-VkImageCreateInfo-imageType-00954");
+
+        bad_image_ci.arrayLayers = 6;
+        bad_image_ci.extent = {64, 63, 1};
+        CreateImageTest(*this, &bad_image_ci, "VUID-VkImageCreateInfo-imageType-00954");
+    }
+
+    {
+        VkImageCreateInfo bad_image_ci = safe_image_ci;
+        bad_image_ci.imageType = VK_IMAGE_TYPE_1D;
+        bad_image_ci.extent = {64, 2, 1};
+        CreateImageTest(*this, &bad_image_ci, "VUID-VkImageCreateInfo-imageType-00956");
+
+        bad_image_ci.imageType = VK_IMAGE_TYPE_1D;
+        bad_image_ci.extent = {64, 1, 2};
+        CreateImageTest(*this, &bad_image_ci, "VUID-VkImageCreateInfo-imageType-00956");
+
+        bad_image_ci.imageType = VK_IMAGE_TYPE_2D;
+        bad_image_ci.extent = {64, 64, 2};
+        CreateImageTest(*this, &bad_image_ci, "VUID-VkImageCreateInfo-imageType-00957");
+
+        bad_image_ci.imageType = VK_IMAGE_TYPE_2D;
+        bad_image_ci.flags = VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT;
+        bad_image_ci.arrayLayers = 6;
+        bad_image_ci.extent = {64, 64, 2};
+        CreateImageTest(*this, &bad_image_ci, "VUID-VkImageCreateInfo-imageType-00957");
+    }
+
+    {
+        VkImageCreateInfo bad_image_ci = safe_image_ci;
+        bad_image_ci.imageType = VK_IMAGE_TYPE_3D;
+        bad_image_ci.arrayLayers = 2;
+        CreateImageTest(*this, &bad_image_ci, "VUID-VkImageCreateInfo-imageType-00961");
+    }
+}
+
+TEST_F(VkLayerTest, CreateImageMaxLimitsViolation) {
+    TEST_DESCRIPTION("Create invalid image with invalid parameters exceeding physical device limits.");
+
+    // Check for VK_KHR_get_physical_device_properties2
+    bool push_physical_device_properties_2_support =
+        InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    if (push_physical_device_properties_2_support) {
+        m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    }
+
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+
+    bool push_fragment_density_support = false;
+
+    if (push_physical_device_properties_2_support) {
+        push_fragment_density_support = DeviceExtensionSupported(gpu(), nullptr, VK_EXT_FRAGMENT_DENSITY_MAP_EXTENSION_NAME);
+        if (push_fragment_density_support) m_device_extension_names.push_back(VK_EXT_FRAGMENT_DENSITY_MAP_EXTENSION_NAME);
+    }
+
+    ASSERT_NO_FATAL_FAILURE(InitState(nullptr, nullptr, 0));
+
+    VkImageCreateInfo tmp_img_ci = {};
+    tmp_img_ci.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+    tmp_img_ci.flags = 0;                          // assumably any is supported
+    tmp_img_ci.imageType = VK_IMAGE_TYPE_2D;       // any is supported
+    tmp_img_ci.format = VK_FORMAT_R8G8B8A8_UNORM;  // has mandatory support for all usages
+    tmp_img_ci.extent = {1, 1, 1};                 // limit is 256 for 3D, or 4096
+    tmp_img_ci.mipLevels = 1;                      // any is supported
+    tmp_img_ci.arrayLayers = 1;                    // limit is 256
+    tmp_img_ci.samples = VK_SAMPLE_COUNT_1_BIT;    // needs to be 1 if TILING_LINEAR
+    // if VK_IMAGE_TILING_LINEAR imageType must be 2D, usage must be TRANSFER, and levels layers samplers all 1
+    tmp_img_ci.tiling = VK_IMAGE_TILING_OPTIMAL;
+    tmp_img_ci.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;  // depends on format
+    tmp_img_ci.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+    const VkImageCreateInfo safe_image_ci = tmp_img_ci;
+
+    ASSERT_VK_SUCCESS(GPDIFPHelper(gpu(), &safe_image_ci));
+
+    const VkPhysicalDeviceLimits &dev_limits = m_device->props.limits;
+
+    {
+        VkImageCreateInfo image_ci = safe_image_ci;
+        image_ci.extent = {8, 8, 1};
+        image_ci.mipLevels = 4 + 1;  // 4 = log2(8) + 1
+        CreateImageTest(*this, &image_ci, "VUID-VkImageCreateInfo-mipLevels-00958");
+
+        image_ci.extent = {8, 15, 1};
+        image_ci.mipLevels = 4 + 1;  // 4 = floor(log2(15)) + 1
+        CreateImageTest(*this, &image_ci, "VUID-VkImageCreateInfo-mipLevels-00958");
+    }
+
+    {
+        VkImageCreateInfo image_ci = safe_image_ci;
+        image_ci.tiling = VK_IMAGE_TILING_LINEAR;
+        image_ci.extent = {64, 64, 1};
+        image_ci.format = FindFormatLinearWithoutMips(gpu(), image_ci);
+        image_ci.mipLevels = 2;
+
+        if (image_ci.format != VK_FORMAT_UNDEFINED) {
+            CreateImageTest(*this, &image_ci, "VUID-VkImageCreateInfo-mipLevels-02255");
+        } else {
+            printf("%s Cannot find a format to test maxMipLevels limit; skipping part of test.\n", kSkipPrefix);
+        }
+    }
+
+    {
+        VkImageCreateInfo image_ci = safe_image_ci;
+
+        VkImageFormatProperties img_limits;
+        ASSERT_VK_SUCCESS(GPDIFPHelper(gpu(), &image_ci, &img_limits));
+
+        if (img_limits.maxArrayLayers != UINT32_MAX) {
+            image_ci.arrayLayers = img_limits.maxArrayLayers + 1;
+            CreateImageTest(*this, &image_ci, "VUID-VkImageCreateInfo-arrayLayers-02256");
+        } else {
+            printf("%s VkImageFormatProperties::maxArrayLayers is already UINT32_MAX; skipping part of test.\n", kSkipPrefix);
+        }
+    }
+
+    {
+        VkImageCreateInfo image_ci = safe_image_ci;
+        bool found = FindFormatWithoutSamples(gpu(), image_ci);
+
+        if (found) {
+            CreateImageTest(*this, &image_ci, "VUID-VkImageCreateInfo-samples-02258");
+        } else {
+            printf("%s Could not find a format with some unsupported samples; skipping part of test.\n", kSkipPrefix);
+        }
+    }
+
+    {
+        VkImageCreateInfo image_ci = safe_image_ci;
+        image_ci.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;  // (any attachment bit)
+
+        VkImageFormatProperties img_limits;
+        ASSERT_VK_SUCCESS(GPDIFPHelper(gpu(), &image_ci, &img_limits));
+
+        if (dev_limits.maxFramebufferWidth != UINT32_MAX) {
+            image_ci.extent = {dev_limits.maxFramebufferWidth + 1, 64, 1};
+            CreateImageTest(*this, &image_ci, "VUID-VkImageCreateInfo-usage-00964");
+        } else {
+            printf("%s VkPhysicalDeviceLimits::maxFramebufferWidth is already UINT32_MAX; skipping part of test.\n", kSkipPrefix);
+        }
+
+        if (dev_limits.maxFramebufferHeight != UINT32_MAX) {
+            image_ci.usage = VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT;  // try different one too
+            image_ci.extent = {64, dev_limits.maxFramebufferHeight + 1, 1};
+            CreateImageTest(*this, &image_ci, "VUID-VkImageCreateInfo-usage-00965");
+        } else {
+            printf("%s VkPhysicalDeviceLimits::maxFramebufferHeight is already UINT32_MAX; skipping part of test.\n", kSkipPrefix);
+        }
+    }
+
+    {
+        if (!push_fragment_density_support) {
+            printf("%s VK_EXT_fragment_density_map Extension not supported, skipping tests\n", kSkipPrefix);
+        } else {
+            VkImageCreateInfo image_ci = safe_image_ci;
+            image_ci.usage = VK_IMAGE_USAGE_FRAGMENT_DENSITY_MAP_BIT_EXT;
+            VkImageFormatProperties img_limits;
+            ASSERT_VK_SUCCESS(GPDIFPHelper(gpu(), &image_ci, &img_limits));
+
+            image_ci.extent = {dev_limits.maxFramebufferWidth + 1, 64, 1};
+            CreateImageTest(*this, &image_ci, "VUID-VkImageCreateInfo-usage-02559");
+
+            image_ci.extent = {64, dev_limits.maxFramebufferHeight + 1, 1};
+            CreateImageTest(*this, &image_ci, "VUID-VkImageCreateInfo-usage-02560");
+        }
+    }
+}
+
+TEST_F(VkLayerTest, MultiplaneImageSamplerConversionMismatch) {
+    TEST_DESCRIPTION(
+        "Create sampler with ycbcr conversion and use with an image created without ycrcb conversion or immutable sampler");
+
+    // Enable KHR multiplane req'd extensions
+    bool mp_extensions = InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME,
+                                                    VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_SPEC_VERSION);
+    if (mp_extensions) {
+        m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    }
+    SetTargetApiVersion(VK_API_VERSION_1_1);
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+    mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_MAINTENANCE1_EXTENSION_NAME);
+    mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
+    mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
+    mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
+    if (mp_extensions) {
+        m_device_extension_names.push_back(VK_KHR_MAINTENANCE1_EXTENSION_NAME);
+        m_device_extension_names.push_back(VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
+        m_device_extension_names.push_back(VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
+        m_device_extension_names.push_back(VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
+    } else {
+        printf("%s test requires KHR multiplane extensions, not available.  Skipping.\n", kSkipPrefix);
+        return;
+    }
+
+    // Enable Ycbcr Conversion Features
+    VkPhysicalDeviceSamplerYcbcrConversionFeatures ycbcr_features = {};
+    ycbcr_features.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES;
+    ycbcr_features.samplerYcbcrConversion = VK_TRUE;
+    ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &ycbcr_features));
+
+    PFN_vkCreateSamplerYcbcrConversionKHR vkCreateSamplerYcbcrConversionFunction = nullptr;
+    PFN_vkDestroySamplerYcbcrConversionKHR vkDestroySamplerYcbcrConversionFunction = nullptr;
+
+    if (DeviceValidationVersion() >= VK_API_VERSION_1_1) {
+        vkCreateSamplerYcbcrConversionFunction = vk::CreateSamplerYcbcrConversion;
+        vkDestroySamplerYcbcrConversionFunction = vk::DestroySamplerYcbcrConversion;
+    } else {
+        vkCreateSamplerYcbcrConversionFunction =
+            (PFN_vkCreateSamplerYcbcrConversionKHR)vk::GetDeviceProcAddr(m_device->handle(), "vkCreateSamplerYcbcrConversionKHR");
+        vkDestroySamplerYcbcrConversionFunction =
+            (PFN_vkDestroySamplerYcbcrConversionKHR)vk::GetDeviceProcAddr(m_device->handle(), "vkDestroySamplerYcbcrConversionKHR");
+    }
+
+    if (!vkCreateSamplerYcbcrConversionFunction || !vkDestroySamplerYcbcrConversionFunction) {
+        printf("%s Did not find required device extension %s; test skipped.\n", kSkipPrefix,
+               VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
+        return;
+    }
+
+    ASSERT_NO_FATAL_FAILURE(InitViewport());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    const VkImageCreateInfo ci = {VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
+                                  NULL,
+                                  0,
+                                  VK_IMAGE_TYPE_2D,
+                                  VK_FORMAT_G8_B8R8_2PLANE_420_UNORM_KHR,
+                                  {128, 128, 1},
+                                  1,
+                                  1,
+                                  VK_SAMPLE_COUNT_1_BIT,
+                                  VK_IMAGE_TILING_LINEAR,
+                                  VK_IMAGE_USAGE_SAMPLED_BIT,
+                                  VK_SHARING_MODE_EXCLUSIVE,
+                                  VK_IMAGE_LAYOUT_UNDEFINED};
+
+    // Verify formats
+    bool supported = ImageFormatAndFeaturesSupported(instance(), gpu(), ci, VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT);
+    if (!supported) {
+        printf("%s Multiplane image format not supported.  Skipping test.\n", kSkipPrefix);
+        return;
+    }
+
+    // Create Ycbcr conversion
+    VkSamplerYcbcrConversionCreateInfo ycbcr_create_info = {VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO,
+                                                            NULL,
+                                                            VK_FORMAT_G8_B8R8_2PLANE_420_UNORM_KHR,
+                                                            VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY,
+                                                            VK_SAMPLER_YCBCR_RANGE_ITU_FULL,
+                                                            {VK_COMPONENT_SWIZZLE_IDENTITY, VK_COMPONENT_SWIZZLE_IDENTITY,
+                                                             VK_COMPONENT_SWIZZLE_IDENTITY, VK_COMPONENT_SWIZZLE_IDENTITY},
+                                                            VK_CHROMA_LOCATION_COSITED_EVEN,
+                                                            VK_CHROMA_LOCATION_COSITED_EVEN,
+                                                            VK_FILTER_NEAREST,
+                                                            false};
+    VkSamplerYcbcrConversion conversions[2];
+    vkCreateSamplerYcbcrConversionFunction(m_device->handle(), &ycbcr_create_info, nullptr, &conversions[0]);
+    ycbcr_create_info.components.r = VK_COMPONENT_SWIZZLE_ZERO;  // Just anything different than above
+    vkCreateSamplerYcbcrConversionFunction(m_device->handle(), &ycbcr_create_info, nullptr, &conversions[1]);
+
+    VkSamplerYcbcrConversionInfo ycbcr_info = {};
+    ycbcr_info.sType = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO;
+    ycbcr_info.conversion = conversions[0];
+
+    // Create a sampler using conversion
+    VkSamplerCreateInfo sci = SafeSaneSamplerCreateInfo();
+    sci.pNext = &ycbcr_info;
+    // Create two samplers with two different conversions, such that one will mismatch
+    // It will make the second sampler fail to see if the log prints the second sampler or the first sampler.
+    VkSampler samplers[2];
+    VkResult err = vk::CreateSampler(m_device->device(), &sci, NULL, &samplers[0]);
+    ASSERT_VK_SUCCESS(err);
+    ycbcr_info.conversion = conversions[1];  // Need two samplers with different conversions
+    err = vk::CreateSampler(m_device->device(), &sci, NULL, &samplers[1]);
+    ASSERT_VK_SUCCESS(err);
+
+    // Create an image without a Ycbcr conversion
+    VkImageObj mpimage(m_device);
+    mpimage.init(&ci);
+
+    VkImageView view;
+    VkImageViewCreateInfo ivci = {};
+    ivci.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
+    ycbcr_info.conversion = conversions[0];  // Need two samplers with different conversions
+    ivci.pNext = &ycbcr_info;
+    ivci.image = mpimage.handle();
+    ivci.viewType = VK_IMAGE_VIEW_TYPE_2D;
+    ivci.format = VK_FORMAT_G8_B8R8_2PLANE_420_UNORM_KHR;
+    ivci.subresourceRange.layerCount = 1;
+    ivci.subresourceRange.baseMipLevel = 0;
+    ivci.subresourceRange.levelCount = 1;
+    ivci.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    vk::CreateImageView(m_device->device(), &ivci, nullptr, &view);
+
+    // Use the image and sampler together in a descriptor set
+    OneOffDescriptorSet descriptor_set(m_device,
+                                       {
+                                           {0, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, 2, VK_SHADER_STAGE_ALL, samplers},
+                                       });
+
+    // Use the same image view twice, using the same sampler, with the *second* mismatched with the *second* immutable sampler
+    VkDescriptorImageInfo image_infos[2];
+    image_infos[0] = {};
+    image_infos[0].imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
+    image_infos[0].imageView = view;
+    image_infos[0].sampler = samplers[0];
+    image_infos[1] = image_infos[0];
+
+    // Update the descriptor set expecting to get an error
+    VkWriteDescriptorSet descriptor_write = {};
+    descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
+    descriptor_write.dstSet = descriptor_set.set_;
+    descriptor_write.dstBinding = 0;
+    descriptor_write.descriptorCount = 2;
+    descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
+    descriptor_write.pImageInfo = image_infos;
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkWriteDescriptorSet-descriptorType-01948");
+    vk::UpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
+    m_errorMonitor->VerifyFound();
+
+    // pImmutableSamplers = nullptr causes an error , VUID-VkWriteDescriptorSet-descriptorType-02738.
+    // Because if pNext chains a VkSamplerYcbcrConversionInfo, the sampler has to be a immutable sampler.
+    OneOffDescriptorSet descriptor_set_1947(m_device,
+                                            {
+                                                {0, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, 1, VK_SHADER_STAGE_ALL, nullptr},
+                                            });
+    descriptor_write.dstSet = descriptor_set_1947.set_;
+    descriptor_write.descriptorCount = 1;
+    descriptor_write.pImageInfo = &image_infos[0];
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkWriteDescriptorSet-descriptorType-02738");
+    vk::UpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
+    m_errorMonitor->VerifyFound();
+
+    vkDestroySamplerYcbcrConversionFunction(m_device->device(), conversions[0], nullptr);
+    vkDestroySamplerYcbcrConversionFunction(m_device->device(), conversions[1], nullptr);
+    vk::DestroyImageView(m_device->device(), view, NULL);
+    vk::DestroySampler(m_device->device(), samplers[0], nullptr);
+    vk::DestroySampler(m_device->device(), samplers[1], nullptr);
+}
+
+TEST_F(VkLayerTest, DepthStencilImageViewWithColorAspectBitError) {
+    // Create a single Image descriptor and cause it to first hit an error due
+    //  to using a DS format, then cause it to hit error due to COLOR_BIT not
+    //  set in aspect
+    // The image format check comes 2nd in validation so we trigger it first,
+    //  then when we cause aspect fail next, bad format check will be preempted
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "Combination depth/stencil image formats can have only the ");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    auto depth_format = FindSupportedDepthStencilFormat(gpu());
+    if (!depth_format) {
+        printf("%s Couldn't find depth stencil format.\n", kSkipPrefix);
+        return;
+    }
+
+    VkImageObj image_bad(m_device);
+    VkImageObj image_good(m_device);
+    // One bad format and one good format for Color attachment
+    const VkFormat tex_format_bad = depth_format;
+    const VkFormat tex_format_good = VK_FORMAT_B8G8R8A8_UNORM;
+    const int32_t tex_width = 32;
+    const int32_t tex_height = 32;
+
+    VkImageCreateInfo image_create_info = {};
+    image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+    image_create_info.pNext = NULL;
+    image_create_info.imageType = VK_IMAGE_TYPE_2D;
+    image_create_info.format = tex_format_bad;
+    image_create_info.extent.width = tex_width;
+    image_create_info.extent.height = tex_height;
+    image_create_info.extent.depth = 1;
+    image_create_info.mipLevels = 1;
+    image_create_info.arrayLayers = 1;
+    image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
+    image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
+    image_create_info.usage = VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT;
+    image_create_info.flags = 0;
+
+    image_bad.init(&image_create_info);
+
+    image_create_info.format = tex_format_good;
+    image_create_info.usage = VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
+    image_good.init(&image_create_info);
+
+    VkImageViewCreateInfo image_view_create_info = {};
+    image_view_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
+    image_view_create_info.image = image_bad.handle();
+    image_view_create_info.viewType = VK_IMAGE_VIEW_TYPE_2D;
+    image_view_create_info.format = tex_format_bad;
+    image_view_create_info.subresourceRange.baseArrayLayer = 0;
+    image_view_create_info.subresourceRange.baseMipLevel = 0;
+    image_view_create_info.subresourceRange.layerCount = 1;
+    image_view_create_info.subresourceRange.levelCount = 1;
+    image_view_create_info.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT | VK_IMAGE_ASPECT_DEPTH_BIT;
+
+    VkImageView view;
+    vk::CreateImageView(m_device->device(), &image_view_create_info, NULL, &view);
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, ExtensionNotEnabled) {
+    TEST_DESCRIPTION("Validate that using an API from an unenabled extension returns an error");
+
+    if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+        m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    } else {
+        printf("%s Did not find required instance extension %s; skipped.\n", kSkipPrefix,
+               VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+        return;
+    }
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+
+    // Required extensions except VK_KHR_GET_MEMORY_REQUIREMENTS_2 -- to create the needed error
+    std::vector<const char *> required_device_extensions = {VK_KHR_MAINTENANCE1_EXTENSION_NAME, VK_KHR_BIND_MEMORY_2_EXTENSION_NAME,
+                                                            VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME};
+    for (auto dev_ext : required_device_extensions) {
+        if (DeviceExtensionSupported(gpu(), nullptr, dev_ext)) {
+            m_device_extension_names.push_back(dev_ext);
+        } else {
+            printf("%s Did not find required device extension %s; skipped.\n", kSkipPrefix, dev_ext);
+            break;
+        }
+    }
+
+    // Need to ignore this error to get to the one we're testing
+    m_errorMonitor->SetUnexpectedError("VUID-vkCreateDevice-ppEnabledExtensionNames-01387");
+    ASSERT_NO_FATAL_FAILURE(InitState());
+
+    // Find address of extension API
+    auto vkCreateSamplerYcbcrConversionKHR =
+        (PFN_vkCreateSamplerYcbcrConversionKHR)vk::GetDeviceProcAddr(m_device->handle(), "vkCreateSamplerYcbcrConversionKHR");
+    if (vkCreateSamplerYcbcrConversionKHR == nullptr) {
+        printf("%s VK_KHR_sampler_ycbcr_conversion not supported by device; skipped.\n", kSkipPrefix);
+        return;
+    }
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "UNASSIGNED-GeneralParameterError-ExtensionNotEnabled");
+    VkSamplerYcbcrConversionCreateInfo ycbcr_info = {VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO,
+                                                     NULL,
+                                                     VK_FORMAT_UNDEFINED,
+                                                     VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY,
+                                                     VK_SAMPLER_YCBCR_RANGE_ITU_FULL,
+                                                     {VK_COMPONENT_SWIZZLE_IDENTITY, VK_COMPONENT_SWIZZLE_IDENTITY,
+                                                      VK_COMPONENT_SWIZZLE_IDENTITY, VK_COMPONENT_SWIZZLE_IDENTITY},
+                                                     VK_CHROMA_LOCATION_COSITED_EVEN,
+                                                     VK_CHROMA_LOCATION_COSITED_EVEN,
+                                                     VK_FILTER_NEAREST,
+                                                     false};
+    VkSamplerYcbcrConversion conversion;
+    vkCreateSamplerYcbcrConversionKHR(m_device->handle(), &ycbcr_info, nullptr, &conversion);
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, InvalidCreateBufferSize) {
+    TEST_DESCRIPTION("Attempt to create VkBuffer with size of zero");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    VkBufferCreateInfo info = {};
+    info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
+    info.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
+    info.size = 0;
+    CreateBufferTest(*this, &info, "VUID-VkBufferCreateInfo-size-00912");
+}
+
+TEST_F(VkLayerTest, DuplicateValidPNextStructures) {
+    TEST_DESCRIPTION("Create a pNext chain containing valid structures, but with a duplicate structure type");
+
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+    if (DeviceExtensionSupported(gpu(), nullptr, VK_NV_DEDICATED_ALLOCATION_EXTENSION_NAME)) {
+        m_device_extension_names.push_back(VK_NV_DEDICATED_ALLOCATION_EXTENSION_NAME);
+    } else {
+        printf("%s VK_NV_dedicated_allocation extension not supported, skipping test\n", kSkipPrefix);
+        return;
+    }
+    ASSERT_NO_FATAL_FAILURE(InitState());
+
+    // Create two pNext structures which by themselves would be valid
+    VkDedicatedAllocationBufferCreateInfoNV dedicated_buffer_create_info = {};
+    VkDedicatedAllocationBufferCreateInfoNV dedicated_buffer_create_info_2 = {};
+    dedicated_buffer_create_info.sType = VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_BUFFER_CREATE_INFO_NV;
+    dedicated_buffer_create_info.pNext = &dedicated_buffer_create_info_2;
+    dedicated_buffer_create_info.dedicatedAllocation = VK_TRUE;
+
+    dedicated_buffer_create_info_2.sType = VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_BUFFER_CREATE_INFO_NV;
+    dedicated_buffer_create_info_2.pNext = nullptr;
+    dedicated_buffer_create_info_2.dedicatedAllocation = VK_TRUE;
+
+    uint32_t queue_family_index = 0;
+    VkBufferCreateInfo buffer_create_info = {};
+    buffer_create_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
+    buffer_create_info.pNext = &dedicated_buffer_create_info;
+    buffer_create_info.size = 1024;
+    buffer_create_info.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT;
+    buffer_create_info.queueFamilyIndexCount = 1;
+    buffer_create_info.pQueueFamilyIndices = &queue_family_index;
+
+    CreateBufferTest(*this, &buffer_create_info, "chain contains duplicate structure types");
+}
+
+TEST_F(VkLayerTest, DedicatedAllocation) {
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+    if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_DEDICATED_ALLOCATION_EXTENSION_NAME)) {
+        m_device_extension_names.push_back(VK_KHR_DEDICATED_ALLOCATION_EXTENSION_NAME);
+        m_device_extension_names.push_back(VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
+    } else {
+        printf("%s Dedicated allocation extension not supported, skipping test\n", kSkipPrefix);
+        return;
+    }
+    ASSERT_NO_FATAL_FAILURE(InitState());
+
+    VkMemoryPropertyFlags mem_flags = 0;
+    const VkDeviceSize resource_size = 1024;
+    auto buffer_info = VkBufferObj::create_info(resource_size, VK_BUFFER_USAGE_TRANSFER_DST_BIT);
+    VkBufferObj buffer;
+    buffer.init_no_mem(*m_device, buffer_info);
+    auto buffer_alloc_info = vk_testing::DeviceMemory::get_resource_alloc_info(*m_device, buffer.memory_requirements(), mem_flags);
+    auto buffer_dedicated_info = lvl_init_struct<VkMemoryDedicatedAllocateInfoKHR>();
+    buffer_dedicated_info.buffer = buffer.handle();
+    buffer_alloc_info.pNext = &buffer_dedicated_info;
+    vk_testing::DeviceMemory dedicated_buffer_memory;
+    dedicated_buffer_memory.init(*m_device, buffer_alloc_info);
+
+    VkBufferObj wrong_buffer;
+    wrong_buffer.init_no_mem(*m_device, buffer_info);
+
+    // Bind with wrong buffer
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkBindBufferMemory-memory-01508");
+    vk::BindBufferMemory(m_device->handle(), wrong_buffer.handle(), dedicated_buffer_memory.handle(), 0);
+    m_errorMonitor->VerifyFound();
+
+    // Bind with non-zero offset (same VUID)
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "VUID-vkBindBufferMemory-memory-01508");  // offset must be zero
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "VUID-vkBindBufferMemory-size-01037");  // offset pushes us past size
+    auto offset = buffer.memory_requirements().alignment;
+    vk::BindBufferMemory(m_device->handle(), buffer.handle(), dedicated_buffer_memory.handle(), offset);
+    m_errorMonitor->VerifyFound();
+
+    // Bind correctly (depends on the "skip" above)
+    m_errorMonitor->ExpectSuccess();
+    vk::BindBufferMemory(m_device->handle(), buffer.handle(), dedicated_buffer_memory.handle(), 0);
+    m_errorMonitor->VerifyNotFound();
+
+    // And for images...
+    VkImageObj image(m_device);
+    VkImageObj wrong_image(m_device);
+    auto image_info = VkImageObj::create_info();
+    image_info.extent.width = resource_size;
+    image_info.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT;
+    image_info.format = VK_FORMAT_R8G8B8A8_UNORM;
+    image.init_no_mem(*m_device, image_info);
+    wrong_image.init_no_mem(*m_device, image_info);
+
+    auto image_dedicated_info = lvl_init_struct<VkMemoryDedicatedAllocateInfoKHR>();
+    image_dedicated_info.image = image.handle();
+    auto image_alloc_info = vk_testing::DeviceMemory::get_resource_alloc_info(*m_device, image.memory_requirements(), mem_flags);
+    image_alloc_info.pNext = &image_dedicated_info;
+    vk_testing::DeviceMemory dedicated_image_memory;
+    dedicated_image_memory.init(*m_device, image_alloc_info);
+
+    // Bind with wrong image
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkBindImageMemory-memory-01509");
+    vk::BindImageMemory(m_device->handle(), wrong_image.handle(), dedicated_image_memory.handle(), 0);
+    m_errorMonitor->VerifyFound();
+
+    // Bind with non-zero offset (same VUID)
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "VUID-vkBindImageMemory-memory-01509");  // offset must be zero
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "VUID-vkBindImageMemory-size-01049");  // offset pushes us past size
+    auto image_offset = image.memory_requirements().alignment;
+    vk::BindImageMemory(m_device->handle(), image.handle(), dedicated_image_memory.handle(), image_offset);
+    m_errorMonitor->VerifyFound();
+
+    // Bind correctly (depends on the "skip" above)
+    m_errorMonitor->ExpectSuccess();
+    vk::BindImageMemory(m_device->handle(), image.handle(), dedicated_image_memory.handle(), 0);
+    m_errorMonitor->VerifyNotFound();
+}
+
+TEST_F(VkLayerTest, DedicatedAllocationImageAliasing) {
+    if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+        m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    } else {
+        printf("%s Did not find required instance extension %s; skipped.\n", kSkipPrefix,
+               VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+        return;
+    }
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+
+    if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_DEDICATED_ALLOCATION_EXTENSION_NAME) &&
+        DeviceExtensionSupported(gpu(), nullptr, VK_NV_DEDICATED_ALLOCATION_IMAGE_ALIASING_EXTENSION_NAME)) {
+        m_device_extension_names.push_back(VK_KHR_DEDICATED_ALLOCATION_EXTENSION_NAME);
+        m_device_extension_names.push_back(VK_NV_DEDICATED_ALLOCATION_IMAGE_ALIASING_EXTENSION_NAME);
+        m_device_extension_names.push_back(VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
+    } else {
+        printf("%s Dedicated allocation extension not supported, skipping test\n", kSkipPrefix);
+        return;
+    }
+
+    PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR =
+        (PFN_vkGetPhysicalDeviceFeatures2KHR)vk::GetInstanceProcAddr(instance(), "vkGetPhysicalDeviceFeatures2KHR");
+    ASSERT_TRUE(vkGetPhysicalDeviceFeatures2KHR != nullptr);
+
+    auto aliasing_features = lvl_init_struct<VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV>();
+    auto features2 = lvl_init_struct<VkPhysicalDeviceFeatures2KHR>(&aliasing_features);
+    vkGetPhysicalDeviceFeatures2KHR(gpu(), &features2);
+    aliasing_features.dedicatedAllocationImageAliasing = VK_TRUE;
+
+    ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &features2));
+
+    VkMemoryPropertyFlags mem_flags = 0;
+    const VkDeviceSize resource_size = 1024;
+
+    VkImageObj image(m_device);
+    VkImageObj identical_image(m_device);
+    auto image_info = VkImageObj::create_info();
+    image_info.extent.width = resource_size;
+    image_info.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT;
+    image_info.format = VK_FORMAT_R8G8B8A8_UNORM;
+    image.init_no_mem(*m_device, image_info);
+    identical_image.init_no_mem(*m_device, image_info);
+
+    auto image_dedicated_info = lvl_init_struct<VkMemoryDedicatedAllocateInfoKHR>();
+    image_dedicated_info.image = image.handle();
+    auto image_alloc_info = vk_testing::DeviceMemory::get_resource_alloc_info(*m_device, image.memory_requirements(), mem_flags);
+    image_alloc_info.pNext = &image_dedicated_info;
+    vk_testing::DeviceMemory dedicated_image_memory;
+    dedicated_image_memory.init(*m_device, image_alloc_info);
+
+    // Bind with different but identical image
+    m_errorMonitor->ExpectSuccess();
+    vk::BindImageMemory(m_device->handle(), identical_image.handle(), dedicated_image_memory.handle(), 0);
+    m_errorMonitor->VerifyNotFound();
+
+    VkImageObj smaller_image(m_device);
+    image_info = VkImageObj::create_info();
+    image_info.extent.width = resource_size - 1;
+    image_info.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT;
+    image_info.format = VK_FORMAT_R8G8B8A8_UNORM;
+    smaller_image.init_no_mem(*m_device, image_info);
+
+    // Bind with a smaller image
+    m_errorMonitor->ExpectSuccess();
+    vk::BindImageMemory(m_device->handle(), smaller_image.handle(), dedicated_image_memory.handle(), 0);
+    m_errorMonitor->VerifyNotFound();
+
+    VkImageObj larger_image(m_device);
+    image_info = VkImageObj::create_info();
+    image_info.extent.width = resource_size + 1;
+    image_info.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT;
+    image_info.format = VK_FORMAT_R8G8B8A8_UNORM;
+    larger_image.init_no_mem(*m_device, image_info);
+
+    // Bind with a larger image (not supported, and not enough memory)
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkBindImageMemory-memory-02629");
+    if (larger_image.memory_requirements().size > image.memory_requirements().size) {
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkBindImageMemory-size-01049");
+    }
+    vk::BindImageMemory(m_device->handle(), larger_image.handle(), dedicated_image_memory.handle(), 0);
+    m_errorMonitor->VerifyFound();
+
+    // Bind with non-zero offset
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "VUID-vkBindImageMemory-memory-02629");  // offset must be zero
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "VUID-vkBindImageMemory-size-01049");  // offset pushes us past size
+    auto image_offset = image.memory_requirements().alignment;
+    vk::BindImageMemory(m_device->handle(), image.handle(), dedicated_image_memory.handle(), image_offset);
+    m_errorMonitor->VerifyFound();
+
+    // Bind correctly (depends on the "skip" above)
+    m_errorMonitor->ExpectSuccess();
+    vk::BindImageMemory(m_device->handle(), image.handle(), dedicated_image_memory.handle(), 0);
+    m_errorMonitor->VerifyNotFound();
+}
+
+TEST_F(VkLayerTest, CornerSampledImageNV) {
+    TEST_DESCRIPTION("Test VK_NV_corner_sampled_image.");
+
+    if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+        m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    } else {
+        printf("%s Did not find required instance extension %s; skipped.\n", kSkipPrefix,
+               VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+        return;
+    }
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+    std::array<const char *, 1> required_device_extensions = {{VK_NV_CORNER_SAMPLED_IMAGE_EXTENSION_NAME}};
+    for (auto device_extension : required_device_extensions) {
+        if (DeviceExtensionSupported(gpu(), nullptr, device_extension)) {
+            m_device_extension_names.push_back(device_extension);
+        } else {
+            printf("%s %s Extension not supported, skipping tests\n", kSkipPrefix, device_extension);
+            return;
+        }
+    }
+
+    PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR =
+        (PFN_vkGetPhysicalDeviceFeatures2KHR)vk::GetInstanceProcAddr(instance(), "vkGetPhysicalDeviceFeatures2KHR");
+    ASSERT_TRUE(vkGetPhysicalDeviceFeatures2KHR != nullptr);
+
+    // Create a device that enables exclusive scissor but disables multiViewport
+    auto corner_sampled_image_features = lvl_init_struct<VkPhysicalDeviceCornerSampledImageFeaturesNV>();
+    auto features2 = lvl_init_struct<VkPhysicalDeviceFeatures2KHR>(&corner_sampled_image_features);
+    vkGetPhysicalDeviceFeatures2KHR(gpu(), &features2);
+
+    ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &features2));
+
+    VkImageCreateInfo image_create_info = {};
+    image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+    image_create_info.pNext = NULL;
+    image_create_info.imageType = VK_IMAGE_TYPE_1D;
+    image_create_info.format = VK_FORMAT_R8G8B8A8_UNORM;
+    image_create_info.extent.width = 2;
+    image_create_info.extent.height = 1;
+    image_create_info.extent.depth = 1;
+    image_create_info.mipLevels = 1;
+    image_create_info.arrayLayers = 1;
+    image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
+    image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
+    image_create_info.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+    image_create_info.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT;
+    image_create_info.queueFamilyIndexCount = 0;
+    image_create_info.pQueueFamilyIndices = NULL;
+    image_create_info.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
+    image_create_info.flags = VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV;
+
+    // image type must be 2D or 3D
+    CreateImageTest(*this, &image_create_info, "VUID-VkImageCreateInfo-flags-02050");
+
+    // cube/depth not supported
+    image_create_info.imageType = VK_IMAGE_TYPE_2D;
+    image_create_info.extent.height = 2;
+    image_create_info.format = VK_FORMAT_D24_UNORM_S8_UINT;
+    CreateImageTest(*this, &image_create_info, "VUID-VkImageCreateInfo-flags-02051");
+
+    image_create_info.format = VK_FORMAT_R8G8B8A8_UNORM;
+
+    // 2D width/height must be > 1
+    image_create_info.imageType = VK_IMAGE_TYPE_2D;
+    image_create_info.extent.height = 1;
+    CreateImageTest(*this, &image_create_info, "VUID-VkImageCreateInfo-flags-02052");
+
+    // 3D width/height/depth must be > 1
+    image_create_info.imageType = VK_IMAGE_TYPE_3D;
+    image_create_info.extent.height = 2;
+    CreateImageTest(*this, &image_create_info, "VUID-VkImageCreateInfo-flags-02053");
+
+    image_create_info.imageType = VK_IMAGE_TYPE_2D;
+
+    // Valid # of mip levels
+    image_create_info.extent = {7, 7, 1};
+    image_create_info.mipLevels = 3;  // 3 = ceil(log2(7))
+    CreateImageTest(*this, &image_create_info);
+
+    image_create_info.extent = {8, 8, 1};
+    image_create_info.mipLevels = 3;  // 3 = ceil(log2(8))
+    CreateImageTest(*this, &image_create_info);
+
+    image_create_info.extent = {9, 9, 1};
+    image_create_info.mipLevels = 3;  // 4 = ceil(log2(9))
+    CreateImageTest(*this, &image_create_info);
+
+    // Invalid # of mip levels
+    image_create_info.extent = {8, 8, 1};
+    image_create_info.mipLevels = 4;  // 3 = ceil(log2(8))
+    CreateImageTest(*this, &image_create_info, "VUID-VkImageCreateInfo-mipLevels-00958");
+}
+
+TEST_F(VkLayerTest, CreateYCbCrSampler) {
+    TEST_DESCRIPTION("Verify YCbCr sampler creation.");
+
+    // Test requires API 1.1 or (API 1.0 + SamplerYCbCr extension). Request API 1.1
+    SetTargetApiVersion(VK_API_VERSION_1_1);
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+
+    // In case we don't have API 1.1+, try enabling the extension directly (and it's dependencies)
+    if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME)) {
+        m_device_extension_names.push_back(VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
+        m_device_extension_names.push_back(VK_KHR_MAINTENANCE1_EXTENSION_NAME);
+        m_device_extension_names.push_back(VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
+        m_device_extension_names.push_back(VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
+    }
+
+    ASSERT_NO_FATAL_FAILURE(InitState());
+    VkDevice dev = m_device->device();
+
+    PFN_vkCreateSamplerYcbcrConversionKHR vkCreateSamplerYcbcrConversionFunction = nullptr;
+    if (DeviceValidationVersion() >= VK_API_VERSION_1_1) {
+        vkCreateSamplerYcbcrConversionFunction = vk::CreateSamplerYcbcrConversion;
+    } else {
+        vkCreateSamplerYcbcrConversionFunction =
+            (PFN_vkCreateSamplerYcbcrConversionKHR)vk::GetDeviceProcAddr(m_device->handle(), "vkCreateSamplerYcbcrConversionKHR");
+    }
+
+    if (!vkCreateSamplerYcbcrConversionFunction) {
+        printf("%s Did not find required device support for YcbcrSamplerConversion; test skipped.\n", kSkipPrefix);
+        return;
+    }
+
+    // Verify we have the requested support
+    bool ycbcr_support = (DeviceExtensionEnabled(VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME) ||
+                          (DeviceValidationVersion() >= VK_API_VERSION_1_1));
+    if (!ycbcr_support) {
+        printf("%s Did not find required device extension %s; test skipped.\n", kSkipPrefix,
+               VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
+        return;
+    }
+
+    VkSamplerYcbcrConversion ycbcr_conv = VK_NULL_HANDLE;
+    VkSamplerYcbcrConversionCreateInfo sycci = {};
+    sycci.sType = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO;
+    sycci.format = VK_FORMAT_UNDEFINED;
+    sycci.ycbcrModel = VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY;
+    sycci.ycbcrRange = VK_SAMPLER_YCBCR_RANGE_ITU_FULL;
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkSamplerYcbcrConversionCreateInfo-format-01649");
+    vkCreateSamplerYcbcrConversionFunction(dev, &sycci, NULL, &ycbcr_conv);
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, BufferDeviceAddressEXT) {
+    TEST_DESCRIPTION("Test VK_EXT_buffer_device_address.");
+
+    if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+        m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    } else {
+        printf("%s Did not find required instance extension %s; skipped.\n", kSkipPrefix,
+               VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+        return;
+    }
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+    std::array<const char *, 1> required_device_extensions = {{VK_EXT_BUFFER_DEVICE_ADDRESS_EXTENSION_NAME}};
+    for (auto device_extension : required_device_extensions) {
+        if (DeviceExtensionSupported(gpu(), nullptr, device_extension)) {
+            m_device_extension_names.push_back(device_extension);
+        } else {
+            printf("%s %s Extension not supported, skipping tests\n", kSkipPrefix, device_extension);
+            return;
+        }
+    }
+
+    if (DeviceIsMockICD() || DeviceSimulation()) {
+        printf("%s MockICD does not support this feature, skipping tests\n", kSkipPrefix);
+        return;
+    }
+
+    PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR =
+        (PFN_vkGetPhysicalDeviceFeatures2KHR)vk::GetInstanceProcAddr(instance(), "vkGetPhysicalDeviceFeatures2KHR");
+    ASSERT_TRUE(vkGetPhysicalDeviceFeatures2KHR != nullptr);
+
+    // Create a device that enables buffer_device_address
+    auto buffer_device_address_features = lvl_init_struct<VkPhysicalDeviceBufferAddressFeaturesEXT>();
+    auto features2 = lvl_init_struct<VkPhysicalDeviceFeatures2KHR>(&buffer_device_address_features);
+    vkGetPhysicalDeviceFeatures2KHR(gpu(), &features2);
+    buffer_device_address_features.bufferDeviceAddressCaptureReplay = VK_FALSE;
+
+    ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &features2));
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    PFN_vkGetBufferDeviceAddressEXT vkGetBufferDeviceAddressEXT =
+        (PFN_vkGetBufferDeviceAddressEXT)vk::GetDeviceProcAddr(device(), "vkGetBufferDeviceAddressEXT");
+
+    VkBufferCreateInfo buffer_create_info = {VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO};
+    buffer_create_info.size = sizeof(uint32_t);
+    buffer_create_info.usage = VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_EXT;
+    buffer_create_info.flags = VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_EXT;
+    CreateBufferTest(*this, &buffer_create_info, "VUID-VkBufferCreateInfo-flags-03338");
+
+    buffer_create_info.flags = 0;
+    VkBufferDeviceAddressCreateInfoEXT addr_ci = {VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_CREATE_INFO_EXT};
+    addr_ci.deviceAddress = 1;
+    buffer_create_info.pNext = &addr_ci;
+    CreateBufferTest(*this, &buffer_create_info, "VUID-VkBufferCreateInfo-deviceAddress-02604");
+
+    buffer_create_info.pNext = nullptr;
+    VkBuffer buffer;
+    VkResult err = vk::CreateBuffer(m_device->device(), &buffer_create_info, NULL, &buffer);
+    ASSERT_VK_SUCCESS(err);
+
+    VkBufferDeviceAddressInfoEXT info = {VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO_EXT};
+    info.buffer = buffer;
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkBufferDeviceAddressInfoKHR-buffer-02600");
+    vkGetBufferDeviceAddressEXT(m_device->device(), &info);
+    m_errorMonitor->VerifyFound();
+
+    vk::DestroyBuffer(m_device->device(), buffer, NULL);
+}
+
+TEST_F(VkLayerTest, BufferDeviceAddressEXTDisabled) {
+    TEST_DESCRIPTION("Test VK_EXT_buffer_device_address.");
+
+    if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+        m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    } else {
+        printf("%s Did not find required instance extension %s; skipped.\n", kSkipPrefix,
+               VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+        return;
+    }
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+    std::array<const char *, 1> required_device_extensions = {{VK_EXT_BUFFER_DEVICE_ADDRESS_EXTENSION_NAME}};
+    for (auto device_extension : required_device_extensions) {
+        if (DeviceExtensionSupported(gpu(), nullptr, device_extension)) {
+            m_device_extension_names.push_back(device_extension);
+        } else {
+            printf("%s %s Extension not supported, skipping tests\n", kSkipPrefix, device_extension);
+            return;
+        }
+    }
+
+    if (DeviceIsMockICD() || DeviceSimulation()) {
+        printf("%s MockICD does not support this feature, skipping tests\n", kSkipPrefix);
+        return;
+    }
+
+    PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR =
+        (PFN_vkGetPhysicalDeviceFeatures2KHR)vk::GetInstanceProcAddr(instance(), "vkGetPhysicalDeviceFeatures2KHR");
+    ASSERT_TRUE(vkGetPhysicalDeviceFeatures2KHR != nullptr);
+
+    // Create a device that disables buffer_device_address
+    auto buffer_device_address_features = lvl_init_struct<VkPhysicalDeviceBufferAddressFeaturesEXT>();
+    auto features2 = lvl_init_struct<VkPhysicalDeviceFeatures2KHR>(&buffer_device_address_features);
+    vkGetPhysicalDeviceFeatures2KHR(gpu(), &features2);
+    buffer_device_address_features.bufferDeviceAddress = VK_FALSE;
+    buffer_device_address_features.bufferDeviceAddressCaptureReplay = VK_FALSE;
+
+    ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &features2));
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    PFN_vkGetBufferDeviceAddressEXT vkGetBufferDeviceAddressEXT =
+        (PFN_vkGetBufferDeviceAddressEXT)vk::GetDeviceProcAddr(device(), "vkGetBufferDeviceAddressEXT");
+
+    VkBufferCreateInfo buffer_create_info = {VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO};
+    buffer_create_info.size = sizeof(uint32_t);
+    buffer_create_info.usage = VK_BUFFER_USAGE_INDEX_BUFFER_BIT;
+    VkBuffer buffer;
+    VkResult err = vk::CreateBuffer(m_device->device(), &buffer_create_info, NULL, &buffer);
+    ASSERT_VK_SUCCESS(err);
+
+    VkBufferDeviceAddressInfoEXT info = {VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO_EXT};
+    info.buffer = buffer;
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "VUID-vkGetBufferDeviceAddressKHR-bufferDeviceAddress-03324");
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkBufferDeviceAddressInfoKHR-buffer-02601");
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkBufferDeviceAddressInfoKHR-buffer-02600");
+    vkGetBufferDeviceAddressEXT(m_device->device(), &info);
+    m_errorMonitor->VerifyFound();
+
+    vk::DestroyBuffer(m_device->device(), buffer, NULL);
+}
+
+TEST_F(VkLayerTest, BufferDeviceAddressKHR) {
+    TEST_DESCRIPTION("Test VK_KHR_buffer_device_address.");
+
+    if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+        m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    } else {
+        printf("%s Did not find required instance extension %s; skipped.\n", kSkipPrefix,
+               VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+        return;
+    }
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+    std::array<const char *, 1> required_device_extensions = {{VK_KHR_BUFFER_DEVICE_ADDRESS_EXTENSION_NAME}};
+    for (auto device_extension : required_device_extensions) {
+        if (DeviceExtensionSupported(gpu(), nullptr, device_extension)) {
+            m_device_extension_names.push_back(device_extension);
+        } else {
+            printf("%s %s Extension not supported, skipping tests\n", kSkipPrefix, device_extension);
+            return;
+        }
+    }
+
+    if (DeviceIsMockICD() || DeviceSimulation()) {
+        printf("%s MockICD does not support this feature, skipping tests\n", kSkipPrefix);
+        return;
+    }
+
+    PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR =
+        (PFN_vkGetPhysicalDeviceFeatures2KHR)vk::GetInstanceProcAddr(instance(), "vkGetPhysicalDeviceFeatures2KHR");
+    ASSERT_TRUE(vkGetPhysicalDeviceFeatures2KHR != nullptr);
+
+    // Create a device that enables buffer_device_address
+    auto buffer_device_address_features = lvl_init_struct<VkPhysicalDeviceBufferDeviceAddressFeaturesKHR>();
+    auto features2 = lvl_init_struct<VkPhysicalDeviceFeatures2KHR>(&buffer_device_address_features);
+    vkGetPhysicalDeviceFeatures2KHR(gpu(), &features2);
+    buffer_device_address_features.bufferDeviceAddressCaptureReplay = VK_FALSE;
+
+    ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &features2));
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    PFN_vkGetBufferDeviceAddressKHR vkGetBufferDeviceAddressKHR =
+        (PFN_vkGetBufferDeviceAddressKHR)vk::GetDeviceProcAddr(device(), "vkGetBufferDeviceAddressKHR");
+    PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR vkGetDeviceMemoryOpaqueCaptureAddressKHR =
+        (PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR)vk::GetDeviceProcAddr(device(), "vkGetDeviceMemoryOpaqueCaptureAddressKHR");
+
+    VkBufferCreateInfo buffer_create_info = {VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO};
+    buffer_create_info.size = sizeof(uint32_t);
+    buffer_create_info.usage = VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_KHR;
+    buffer_create_info.flags = VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_KHR;
+    CreateBufferTest(*this, &buffer_create_info, "VUID-VkBufferCreateInfo-flags-03338");
+
+    buffer_create_info.flags = 0;
+    VkBufferOpaqueCaptureAddressCreateInfoKHR addr_ci = {VK_STRUCTURE_TYPE_BUFFER_OPAQUE_CAPTURE_ADDRESS_CREATE_INFO_KHR};
+    addr_ci.opaqueCaptureAddress = 1;
+    buffer_create_info.pNext = &addr_ci;
+    CreateBufferTest(*this, &buffer_create_info, "VUID-VkBufferCreateInfo-opaqueCaptureAddress-03337");
+
+    buffer_create_info.pNext = nullptr;
+    VkBuffer buffer;
+    VkResult err = vk::CreateBuffer(m_device->device(), &buffer_create_info, NULL, &buffer);
+    ASSERT_VK_SUCCESS(err);
+
+    VkBufferDeviceAddressInfoKHR info = {VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO_KHR};
+    info.buffer = buffer;
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkBufferDeviceAddressInfoKHR-buffer-02600");
+    vkGetBufferDeviceAddressKHR(m_device->device(), &info);
+    m_errorMonitor->VerifyFound();
+
+    VkMemoryRequirements buffer_mem_reqs = {};
+    vk::GetBufferMemoryRequirements(device(), buffer, &buffer_mem_reqs);
+    VkMemoryAllocateInfo buffer_alloc_info = {};
+    buffer_alloc_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+    buffer_alloc_info.allocationSize = buffer_mem_reqs.size;
+    m_device->phy().set_memory_type(buffer_mem_reqs.memoryTypeBits, &buffer_alloc_info, 0);
+    VkDeviceMemory buffer_mem;
+    err = vk::AllocateMemory(device(), &buffer_alloc_info, NULL, &buffer_mem);
+    ASSERT_VK_SUCCESS(err);
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkBindBufferMemory-bufferDeviceAddress-03339");
+    vk::BindBufferMemory(m_device->device(), buffer, buffer_mem, 0);
+    m_errorMonitor->VerifyFound();
+
+    VkDeviceMemoryOpaqueCaptureAddressInfoKHR mem_opaque_addr_info = {
+        VK_STRUCTURE_TYPE_DEVICE_MEMORY_OPAQUE_CAPTURE_ADDRESS_INFO_KHR};
+    mem_opaque_addr_info.memory = buffer_mem;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "VUID-VkDeviceMemoryOpaqueCaptureAddressInfoKHR-memory-03336");
+    vkGetDeviceMemoryOpaqueCaptureAddressKHR(m_device->device(), &mem_opaque_addr_info);
+    m_errorMonitor->VerifyFound();
+
+    vk::FreeMemory(m_device->device(), buffer_mem, NULL);
+
+    VkMemoryAllocateFlagsInfo alloc_flags = {VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO};
+    alloc_flags.flags = VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT_KHR;
+    buffer_alloc_info.pNext = &alloc_flags;
+    err = vk::AllocateMemory(device(), &buffer_alloc_info, NULL, &buffer_mem);
+
+    mem_opaque_addr_info.memory = buffer_mem;
+    m_errorMonitor->ExpectSuccess();
+    vkGetDeviceMemoryOpaqueCaptureAddressKHR(m_device->device(), &mem_opaque_addr_info);
+    m_errorMonitor->VerifyNotFound();
+
+    m_errorMonitor->ExpectSuccess();
+    vk::BindBufferMemory(m_device->device(), buffer, buffer_mem, 0);
+    m_errorMonitor->VerifyNotFound();
+
+    m_errorMonitor->ExpectSuccess();
+    vkGetBufferDeviceAddressKHR(m_device->device(), &info);
+    m_errorMonitor->VerifyNotFound();
+
+    vk::FreeMemory(m_device->device(), buffer_mem, NULL);
+    vk::DestroyBuffer(m_device->device(), buffer, NULL);
+}
+
+TEST_F(VkLayerTest, BufferDeviceAddressKHRDisabled) {
+    TEST_DESCRIPTION("Test VK_KHR_buffer_device_address.");
+
+    if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+        m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    } else {
+        printf("%s Did not find required instance extension %s; skipped.\n", kSkipPrefix,
+               VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+        return;
+    }
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+    std::array<const char *, 1> required_device_extensions = {{VK_KHR_BUFFER_DEVICE_ADDRESS_EXTENSION_NAME}};
+    for (auto device_extension : required_device_extensions) {
+        if (DeviceExtensionSupported(gpu(), nullptr, device_extension)) {
+            m_device_extension_names.push_back(device_extension);
+        } else {
+            printf("%s %s Extension not supported, skipping tests\n", kSkipPrefix, device_extension);
+            return;
+        }
+    }
+
+    if (DeviceIsMockICD() || DeviceSimulation()) {
+        printf("%s MockICD does not support this feature, skipping tests\n", kSkipPrefix);
+        return;
+    }
+
+    PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR =
+        (PFN_vkGetPhysicalDeviceFeatures2KHR)vk::GetInstanceProcAddr(instance(), "vkGetPhysicalDeviceFeatures2KHR");
+    ASSERT_TRUE(vkGetPhysicalDeviceFeatures2KHR != nullptr);
+
+    // Create a device that disables buffer_device_address
+    auto buffer_device_address_features = lvl_init_struct<VkPhysicalDeviceBufferDeviceAddressFeaturesKHR>();
+    auto features2 = lvl_init_struct<VkPhysicalDeviceFeatures2KHR>(&buffer_device_address_features);
+    vkGetPhysicalDeviceFeatures2KHR(gpu(), &features2);
+    buffer_device_address_features.bufferDeviceAddress = VK_FALSE;
+    buffer_device_address_features.bufferDeviceAddressCaptureReplay = VK_FALSE;
+
+    ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &features2));
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    PFN_vkGetBufferDeviceAddressKHR vkGetBufferDeviceAddressKHR =
+        (PFN_vkGetBufferDeviceAddressKHR)vk::GetDeviceProcAddr(device(), "vkGetBufferDeviceAddressKHR");
+    PFN_vkGetBufferOpaqueCaptureAddressKHR vkGetBufferOpaqueCaptureAddressKHR =
+        (PFN_vkGetBufferOpaqueCaptureAddressKHR)vk::GetDeviceProcAddr(device(), "vkGetBufferOpaqueCaptureAddressKHR");
+    PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR vkGetDeviceMemoryOpaqueCaptureAddressKHR =
+        (PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR)vk::GetDeviceProcAddr(device(), "vkGetDeviceMemoryOpaqueCaptureAddressKHR");
+
+    VkBufferCreateInfo buffer_create_info = {VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO};
+    buffer_create_info.size = sizeof(uint32_t);
+    buffer_create_info.usage = VK_BUFFER_USAGE_INDEX_BUFFER_BIT;
+    VkBuffer buffer;
+    VkResult err = vk::CreateBuffer(m_device->device(), &buffer_create_info, NULL, &buffer);
+    ASSERT_VK_SUCCESS(err);
+
+    VkBufferDeviceAddressInfoKHR info = {VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO_KHR};
+    info.buffer = buffer;
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "VUID-vkGetBufferDeviceAddressKHR-bufferDeviceAddress-03324");
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkBufferDeviceAddressInfoKHR-buffer-02601");
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkBufferDeviceAddressInfoKHR-buffer-02600");
+    vkGetBufferDeviceAddressKHR(m_device->device(), &info);
+    m_errorMonitor->VerifyFound();
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkGetBufferOpaqueCaptureAddressKHR-None-03326");
+    vkGetBufferOpaqueCaptureAddressKHR(m_device->device(), &info);
+    m_errorMonitor->VerifyFound();
+
+    VkMemoryRequirements buffer_mem_reqs = {};
+    vk::GetBufferMemoryRequirements(device(), buffer, &buffer_mem_reqs);
+    VkMemoryAllocateInfo buffer_alloc_info = {};
+    buffer_alloc_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+    buffer_alloc_info.allocationSize = buffer_mem_reqs.size;
+    m_device->phy().set_memory_type(buffer_mem_reqs.memoryTypeBits, &buffer_alloc_info, 0);
+    VkDeviceMemory buffer_mem;
+    err = vk::AllocateMemory(device(), &buffer_alloc_info, NULL, &buffer_mem);
+    ASSERT_VK_SUCCESS(err);
+
+    VkDeviceMemoryOpaqueCaptureAddressInfoKHR mem_opaque_addr_info = {
+        VK_STRUCTURE_TYPE_DEVICE_MEMORY_OPAQUE_CAPTURE_ADDRESS_INFO_KHR};
+    mem_opaque_addr_info.memory = buffer_mem;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkGetDeviceMemoryOpaqueCaptureAddressKHR-None-03334");
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "VUID-VkDeviceMemoryOpaqueCaptureAddressInfoKHR-memory-03336");
+    vkGetDeviceMemoryOpaqueCaptureAddressKHR(m_device->device(), &mem_opaque_addr_info);
+    m_errorMonitor->VerifyFound();
+
+    vk::FreeMemory(m_device->device(), buffer_mem, NULL);
+
+    VkMemoryAllocateFlagsInfo alloc_flags = {VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO};
+    alloc_flags.flags = VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT_KHR | VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_KHR;
+    buffer_alloc_info.pNext = &alloc_flags;
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkMemoryAllocateInfo-flags-03330");
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkMemoryAllocateInfo-flags-03331");
+    err = vk::AllocateMemory(device(), &buffer_alloc_info, NULL, &buffer_mem);
+    m_errorMonitor->VerifyFound();
+
+    vk::DestroyBuffer(m_device->device(), buffer, NULL);
+}
+
+TEST_F(VkLayerTest, CreateImageYcbcrArrayLayers) {
+    TEST_DESCRIPTION("Creating images with out-of-range arrayLayers ");
+
+    // Enable KHR multiplane req'd extensions
+    bool mp_extensions = InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME,
+                                                    VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_SPEC_VERSION);
+    if (mp_extensions) {
+        m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    }
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+    mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_MAINTENANCE1_EXTENSION_NAME);
+    mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
+    mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
+    mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
+    if (mp_extensions) {
+        m_device_extension_names.push_back(VK_KHR_MAINTENANCE1_EXTENSION_NAME);
+        m_device_extension_names.push_back(VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
+        m_device_extension_names.push_back(VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
+        m_device_extension_names.push_back(VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
+    } else {
+        printf("%s test requires KHR multiplane extensions, not available.  Skipping.\n", kSkipPrefix);
+        return;
+    }
+
+    ASSERT_NO_FATAL_FAILURE(InitState());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    // Create ycbcr image with unsupported arrayLayers
+    VkImageCreateInfo image_create_info = {};
+    image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+    image_create_info.imageType = VK_IMAGE_TYPE_2D;
+    image_create_info.format = VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM;
+    image_create_info.extent.width = 32;
+    image_create_info.extent.height = 32;
+    image_create_info.extent.depth = 1;
+    image_create_info.mipLevels = 1;
+    image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
+    image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
+    image_create_info.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
+
+    bool supported = ImageFormatAndFeaturesSupported(instance(), gpu(), image_create_info, VK_FORMAT_FEATURE_TRANSFER_SRC_BIT);
+    if (!supported) {
+        printf("%s Multiplane image format not supported.  Skipping test.\n", kSkipPrefix);
+        return;
+    }
+
+    VkImageFormatProperties img_limits;
+    ASSERT_VK_SUCCESS(GPDIFPHelper(gpu(), &image_create_info, &img_limits));
+    if (img_limits.maxArrayLayers == 1) {
+        return;
+    }
+    image_create_info.arrayLayers = img_limits.maxArrayLayers;
+
+    CreateImageTest(*this, &image_create_info, "VUID-VkImageCreateInfo-format-02653");
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCreateInfo-format-02653");
+}
+
+TEST_F(VkLayerTest, BindImageMemorySwapchain) {
+    TEST_DESCRIPTION("Invalid bind image with a swapchain");
+    SetTargetApiVersion(VK_API_VERSION_1_1);
+
+    if (!AddSurfaceInstanceExtension()) {
+        printf("%s surface extensions not supported, skipping BindSwapchainImageMemory test\n", kSkipPrefix);
+        return;
+    }
+
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+
+    if (!AddSwapchainDeviceExtension()) {
+        printf("%s swapchain extensions not supported, skipping BindSwapchainImageMemory test\n", kSkipPrefix);
+        return;
+    }
+
+    if (DeviceValidationVersion() < VK_API_VERSION_1_1) {
+        printf("%s VkBindImageMemoryInfo requires Vulkan 1.1+, skipping test\n", kSkipPrefix);
+        return;
+    }
+
+    ASSERT_NO_FATAL_FAILURE(InitState());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+    if (!InitSwapchain()) {
+        printf("%s Cannot create surface or swapchain, skipping BindSwapchainImageMemory test\n", kSkipPrefix);
+        return;
+    }
+
+    auto image_create_info = lvl_init_struct<VkImageCreateInfo>();
+    image_create_info.imageType = VK_IMAGE_TYPE_2D;
+    image_create_info.format = VK_FORMAT_R8G8B8A8_UNORM;
+    image_create_info.extent.width = 64;
+    image_create_info.extent.height = 64;
+    image_create_info.extent.depth = 1;
+    image_create_info.mipLevels = 1;
+    image_create_info.arrayLayers = 1;
+    image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
+    image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
+    image_create_info.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
+    image_create_info.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
+    image_create_info.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
+
+    auto image_swapchain_create_info = lvl_init_struct<VkImageSwapchainCreateInfoKHR>();
+    image_swapchain_create_info.swapchain = m_swapchain;
+    image_create_info.pNext = &image_swapchain_create_info;
+
+    VkImage image_from_swapchain;
+    vk::CreateImage(device(), &image_create_info, NULL, &image_from_swapchain);
+
+    VkMemoryRequirements mem_reqs = {};
+    vk::GetImageMemoryRequirements(device(), image_from_swapchain, &mem_reqs);
+
+    auto alloc_info = lvl_init_struct<VkMemoryAllocateInfo>();
+    alloc_info.memoryTypeIndex = 0;
+    alloc_info.allocationSize = mem_reqs.size;
+
+    bool pass = m_device->phy().set_memory_type(mem_reqs.memoryTypeBits, &alloc_info, 0);
+    ASSERT_TRUE(pass);
+
+    VkDeviceMemory mem;
+    VkResult err = vk::AllocateMemory(m_device->device(), &alloc_info, NULL, &mem);
+    ASSERT_VK_SUCCESS(err);
+
+    auto bind_info = lvl_init_struct<VkBindImageMemoryInfo>();
+    bind_info.image = image_from_swapchain;
+    bind_info.memory = VK_NULL_HANDLE;
+    bind_info.memoryOffset = 0;
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkBindImageMemoryInfo-image-01630");
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkBindImageMemoryInfo-pNext-01632");
+    vk::BindImageMemory2(m_device->device(), 1, &bind_info);
+    m_errorMonitor->VerifyFound();
+
+    auto bind_swapchain_info = lvl_init_struct<VkBindImageMemorySwapchainInfoKHR>();
+    bind_swapchain_info.swapchain = VK_NULL_HANDLE;
+    bind_swapchain_info.imageIndex = 0;
+    bind_info.pNext = &bind_swapchain_info;
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "UNASSIGNED-GeneralParameterError-RequiredParameter");
+    vk::BindImageMemory2(m_device->device(), 1, &bind_info);
+    m_errorMonitor->VerifyFound();
+
+    bind_info.memory = mem;
+    bind_swapchain_info.swapchain = m_swapchain;
+    bind_swapchain_info.imageIndex = UINT32_MAX;
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkBindImageMemoryInfo-pNext-01631");
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkBindImageMemorySwapchainInfoKHR-imageIndex-01644");
+    vk::BindImageMemory2(m_device->device(), 1, &bind_info);
+    m_errorMonitor->VerifyFound();
+
+    vk::DestroyImage(m_device->device(), image_from_swapchain, NULL);
+    vk::FreeMemory(m_device->device(), mem, NULL);
+    DestroySwapchain();
+}
+
+TEST_F(VkLayerTest, TransferImageToSwapchainWithInvalidLayoutDeviceGroup) {
+    TEST_DESCRIPTION("Transfer an image to a swapchain's image with a invalid layout between device group");
+
+#if defined(VK_USE_PLATFORM_ANDROID_KHR)
+    printf(
+        "%s According to valid usage, VkBindImageMemoryInfo-memory should be NULL. But Android will crash if memory is NULL, "
+        "skipping test\n",
+        kSkipPrefix);
+    return;
+#endif
+
+    SetTargetApiVersion(VK_API_VERSION_1_1);
+
+    if (!AddSurfaceInstanceExtension()) {
+        printf("%s surface extensions not supported, skipping test\n", kSkipPrefix);
+        return;
+    }
+
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+
+    if (!AddSwapchainDeviceExtension()) {
+        printf("%s swapchain extensions not supported, skipping test\n", kSkipPrefix);
+        return;
+    }
+
+    if (DeviceValidationVersion() < VK_API_VERSION_1_1) {
+        printf("%s VkBindImageMemoryInfo requires Vulkan 1.1+, skipping test\n", kSkipPrefix);
+        return;
+    }
+    uint32_t physical_device_group_count = 0;
+    vk::EnumeratePhysicalDeviceGroups(instance(), &physical_device_group_count, nullptr);
+
+    if (physical_device_group_count == 0) {
+        printf("%s physical_device_group_count is 0, skipping test\n", kSkipPrefix);
+        return;
+    }
+
+    std::vector<VkPhysicalDeviceGroupProperties> physical_device_group(physical_device_group_count,
+                                                                       {VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES});
+    vk::EnumeratePhysicalDeviceGroups(instance(), &physical_device_group_count, physical_device_group.data());
+    VkDeviceGroupDeviceCreateInfo create_device_pnext = {};
+    create_device_pnext.sType = VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO;
+    create_device_pnext.physicalDeviceCount = physical_device_group[0].physicalDeviceCount;
+    create_device_pnext.pPhysicalDevices = physical_device_group[0].physicalDevices;
+    ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &create_device_pnext));
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+    if (!InitSwapchain(VK_IMAGE_USAGE_TRANSFER_DST_BIT)) {
+        printf("%s Cannot create surface or swapchain, skipping test\n", kSkipPrefix);
+        return;
+    }
+
+    auto image_create_info = lvl_init_struct<VkImageCreateInfo>();
+    image_create_info.imageType = VK_IMAGE_TYPE_2D;
+    image_create_info.format = VK_FORMAT_R8G8B8A8_UNORM;
+    image_create_info.extent.width = 64;
+    image_create_info.extent.height = 64;
+    image_create_info.extent.depth = 1;
+    image_create_info.mipLevels = 1;
+    image_create_info.arrayLayers = 1;
+    image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
+    image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
+    image_create_info.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
+    image_create_info.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
+    image_create_info.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
+
+    VkImageObj src_Image(m_device);
+    src_Image.init(&image_create_info);
+
+    image_create_info.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT;
+    image_create_info.flags = VK_IMAGE_CREATE_ALIAS_BIT;
+
+    auto image_swapchain_create_info = lvl_init_struct<VkImageSwapchainCreateInfoKHR>();
+    image_swapchain_create_info.swapchain = m_swapchain;
+    image_create_info.pNext = &image_swapchain_create_info;
+
+    VkImage peer_image;
+    vk::CreateImage(device(), &image_create_info, NULL, &peer_image);
+
+    auto bind_devicegroup_info = lvl_init_struct<VkBindImageMemoryDeviceGroupInfo>();
+    bind_devicegroup_info.deviceIndexCount = 2;
+    std::array<uint32_t, 2> deviceIndices = {0, 0};
+    bind_devicegroup_info.pDeviceIndices = deviceIndices.data();
+    bind_devicegroup_info.splitInstanceBindRegionCount = 0;
+    bind_devicegroup_info.pSplitInstanceBindRegions = nullptr;
+
+    auto bind_swapchain_info = lvl_init_struct<VkBindImageMemorySwapchainInfoKHR>(&bind_devicegroup_info);
+    bind_swapchain_info.swapchain = m_swapchain;
+    bind_swapchain_info.imageIndex = 0;
+
+    auto bind_info = lvl_init_struct<VkBindImageMemoryInfo>(&bind_swapchain_info);
+    bind_info.image = peer_image;
+    bind_info.memory = VK_NULL_HANDLE;
+    bind_info.memoryOffset = 0;
+
+    vk::BindImageMemory2(m_device->device(), 1, &bind_info);
+
+    uint32_t swapchain_images_count = 0;
+    vk::GetSwapchainImagesKHR(device(), m_swapchain, &swapchain_images_count, nullptr);
+    std::vector<VkImage> swapchain_images;
+    swapchain_images.resize(swapchain_images_count);
+    vk::GetSwapchainImagesKHR(device(), m_swapchain, &swapchain_images_count, swapchain_images.data());
+
+    m_commandBuffer->begin();
+
+    VkImageCopy copy_region = {};
+    copy_region.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    copy_region.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    copy_region.srcSubresource.mipLevel = 0;
+    copy_region.dstSubresource.mipLevel = 0;
+    copy_region.srcSubresource.baseArrayLayer = 0;
+    copy_region.dstSubresource.baseArrayLayer = 0;
+    copy_region.srcSubresource.layerCount = 1;
+    copy_region.dstSubresource.layerCount = 1;
+    copy_region.srcOffset = {0, 0, 0};
+    copy_region.dstOffset = {0, 0, 0};
+    copy_region.extent = {10, 10, 1};
+    vk::CmdCopyImage(m_commandBuffer->handle(), src_Image.handle(), VK_IMAGE_LAYOUT_GENERAL, peer_image,
+                     VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, &copy_region);
+
+    m_commandBuffer->end();
+
+    VkSubmitInfo submit_info = {};
+    submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+    submit_info.commandBufferCount = 1;
+    submit_info.pCommandBuffers = &m_commandBuffer->handle();
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "UNASSIGNED-CoreValidation-DrawState-InvalidImageLayout");
+    vk::QueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+    m_errorMonitor->VerifyFound();
+
+    vk::DestroyImage(m_device->device(), peer_image, NULL);
+    DestroySwapchain();
+}
+
+TEST_F(VkLayerTest, InvalidMemoryType) {
+    // Attempts to allocate from a memory type that doesn't exist
+
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+    ASSERT_NO_FATAL_FAILURE(InitState());
+
+    VkPhysicalDeviceMemoryProperties memory_info;
+    vk::GetPhysicalDeviceMemoryProperties(gpu(), &memory_info);
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkAllocateMemory-pAllocateInfo-01714");
+
+    VkMemoryAllocateInfo mem_alloc = {};
+    mem_alloc.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+    mem_alloc.pNext = NULL;
+    mem_alloc.memoryTypeIndex = memory_info.memoryTypeCount;
+    mem_alloc.allocationSize = 4;
+
+    VkDeviceMemory mem;
+    vk::AllocateMemory(m_device->device(), &mem_alloc, NULL, &mem);
+
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, AllocationBeyondHeapSize) {
+    // Attempts to allocate a single piece of memory that's larger than the heap size
+
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+    ASSERT_NO_FATAL_FAILURE(InitState());
+
+    VkPhysicalDeviceMemoryProperties memory_info;
+    vk::GetPhysicalDeviceMemoryProperties(gpu(), &memory_info);
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkAllocateMemory-pAllocateInfo-01713");
+
+    VkMemoryAllocateInfo mem_alloc = {};
+    mem_alloc.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+    mem_alloc.pNext = NULL;
+    mem_alloc.memoryTypeIndex = 0;
+    mem_alloc.allocationSize = memory_info.memoryHeaps[memory_info.memoryTypes[0].heapIndex].size + 1;
+
+    VkDeviceMemory mem;
+    vk::AllocateMemory(m_device->device(), &mem_alloc, NULL, &mem);
+
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, DeviceCoherentMemoryDisabledAMD) {
+    // Attempts to allocate device coherent memory without enabling the extension/feature
+
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+    ASSERT_NO_FATAL_FAILURE(InitState());
+
+    if (DeviceIsMockICD() || DeviceSimulation()) {
+        printf("%s MockICD does not support the necessary memory type, skipping test\n", kSkipPrefix);
+        return;
+    }
+
+    // Check extension support but do not enable it
+    if (!DeviceExtensionSupported(gpu(), nullptr, VK_AMD_DEVICE_COHERENT_MEMORY_EXTENSION_NAME)) {
+        printf("%s %s Extension not supported, skipping tests\n", kSkipPrefix, VK_AMD_DEVICE_COHERENT_MEMORY_EXTENSION_NAME);
+        return;
+    }
+
+    // Find a memory type that includes the device coherent memory property
+    VkPhysicalDeviceMemoryProperties memory_info;
+    vk::GetPhysicalDeviceMemoryProperties(gpu(), &memory_info);
+    uint32_t deviceCoherentMemoryTypeIndex = memory_info.memoryTypeCount;  // Set to an invalid value just in case
+
+    for (uint32_t i = 0; i < memory_info.memoryTypeCount; ++i) {
+        if ((memory_info.memoryTypes[i].propertyFlags & VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD) != 0) {
+            deviceCoherentMemoryTypeIndex = i;
+            break;
+        }
+    }
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkAllocateMemory-deviceCoherentMemory-02790");
+
+    VkMemoryAllocateInfo mem_alloc = {};
+    mem_alloc.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+    mem_alloc.pNext = NULL;
+    mem_alloc.memoryTypeIndex = deviceCoherentMemoryTypeIndex;
+    mem_alloc.allocationSize = 4;
+
+    VkDeviceMemory mem;
+    vk::AllocateMemory(m_device->device(), &mem_alloc, NULL, &mem);
+
+    m_errorMonitor->VerifyFound();
+}
diff --git a/src/third_party/vulkan-validation-layers/src/tests/vklayertests_command.cpp b/src/third_party/vulkan-validation-layers/src/tests/vklayertests_command.cpp
new file mode 100644
index 0000000..533d08e
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/tests/vklayertests_command.cpp
@@ -0,0 +1,5204 @@
+/*
+ * Copyright (c) 2015-2019 The Khronos Group Inc.
+ * Copyright (c) 2015-2019 Valve Corporation
+ * Copyright (c) 2015-2019 LunarG, Inc.
+ * Copyright (c) 2015-2019 Google, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Author: Chia-I Wu <olvaffe@gmail.com>
+ * Author: Chris Forbes <chrisf@ijw.co.nz>
+ * Author: Courtney Goeltzenleuchter <courtney@LunarG.com>
+ * Author: Mark Lobodzinski <mark@lunarg.com>
+ * Author: Mike Stroyan <mike@LunarG.com>
+ * Author: Tobin Ehlis <tobine@google.com>
+ * Author: Tony Barbour <tony@LunarG.com>
+ * Author: Cody Northrop <cnorthrop@google.com>
+ * Author: Dave Houlton <daveh@lunarg.com>
+ * Author: Jeremy Kniager <jeremyk@lunarg.com>
+ * Author: Shannon McPherson <shannon@lunarg.com>
+ * Author: John Zulauf <jzulauf@lunarg.com>
+ */
+
+#include "cast_utils.h"
+#include "layer_validation_tests.h"
+
+TEST_F(VkLayerTest, InvalidCommandPoolConsistency) {
+    TEST_DESCRIPTION("Allocate command buffers from one command pool and attempt to delete them from another.");
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkFreeCommandBuffers-pCommandBuffers-parent");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    VkCommandPool command_pool_one;
+    VkCommandPool command_pool_two;
+
+    VkCommandPoolCreateInfo pool_create_info{};
+    pool_create_info.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
+    pool_create_info.queueFamilyIndex = m_device->graphics_queue_node_index_;
+    pool_create_info.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
+
+    vk::CreateCommandPool(m_device->device(), &pool_create_info, nullptr, &command_pool_one);
+
+    vk::CreateCommandPool(m_device->device(), &pool_create_info, nullptr, &command_pool_two);
+
+    VkCommandBuffer cb;
+    VkCommandBufferAllocateInfo command_buffer_allocate_info{};
+    command_buffer_allocate_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
+    command_buffer_allocate_info.commandPool = command_pool_one;
+    command_buffer_allocate_info.commandBufferCount = 1;
+    command_buffer_allocate_info.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
+    vk::AllocateCommandBuffers(m_device->device(), &command_buffer_allocate_info, &cb);
+
+    vk::FreeCommandBuffers(m_device->device(), command_pool_two, 1, &cb);
+
+    m_errorMonitor->VerifyFound();
+
+    vk::DestroyCommandPool(m_device->device(), command_pool_one, NULL);
+    vk::DestroyCommandPool(m_device->device(), command_pool_two, NULL);
+}
+
+TEST_F(VkLayerTest, InvalidSecondaryCommandBufferBarrier) {
+    TEST_DESCRIPTION("Add an invalid image barrier in a secondary command buffer");
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    // A renderpass with a single subpass that declared a self-dependency
+    VkAttachmentDescription attach[] = {
+        {0, VK_FORMAT_R8G8B8A8_UNORM, VK_SAMPLE_COUNT_1_BIT, VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE,
+         VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE, VK_IMAGE_LAYOUT_UNDEFINED,
+         VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL},
+    };
+    VkAttachmentReference ref = {0, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL};
+    VkSubpassDescription subpasses[] = {
+        {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 0, nullptr, 1, &ref, nullptr, nullptr, 0, nullptr},
+    };
+    VkSubpassDependency dep = {0,
+                               0,
+                               VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT,
+                               VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT,
+                               VK_ACCESS_SHADER_WRITE_BIT,
+                               VK_ACCESS_SHADER_WRITE_BIT,
+                               VK_DEPENDENCY_BY_REGION_BIT};
+    VkRenderPassCreateInfo rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, nullptr, 0, 1, attach, 1, subpasses, 1, &dep};
+    VkRenderPass rp;
+
+    VkResult err = vk::CreateRenderPass(m_device->device(), &rpci, nullptr, &rp);
+    ASSERT_VK_SUCCESS(err);
+
+    VkImageObj image(m_device);
+    image.Init(32, 32, 1, VK_FORMAT_R8G8B8A8_UNORM, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
+    VkImageView imageView = image.targetView(VK_FORMAT_R8G8B8A8_UNORM);
+    // Second image that img_barrier will incorrectly use
+    VkImageObj image2(m_device);
+    image2.Init(32, 32, 1, VK_FORMAT_R8G8B8A8_UNORM, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
+
+    VkFramebufferCreateInfo fbci = {VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, nullptr, 0, rp, 1, &imageView, 32, 32, 1};
+    VkFramebuffer fb;
+    err = vk::CreateFramebuffer(m_device->device(), &fbci, nullptr, &fb);
+    ASSERT_VK_SUCCESS(err);
+
+    m_commandBuffer->begin();
+
+    VkRenderPassBeginInfo rpbi = {VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO,
+                                  nullptr,
+                                  rp,
+                                  fb,
+                                  {{
+                                       0,
+                                       0,
+                                   },
+                                   {32, 32}},
+                                  0,
+                                  nullptr};
+
+    vk::CmdBeginRenderPass(m_commandBuffer->handle(), &rpbi, VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS);
+
+    VkCommandPoolObj pool(m_device, m_device->graphics_queue_node_index_, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT);
+    VkCommandBufferObj secondary(m_device, &pool, VK_COMMAND_BUFFER_LEVEL_SECONDARY);
+
+    VkCommandBufferInheritanceInfo cbii = {VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO,
+                                           nullptr,
+                                           rp,
+                                           0,
+                                           VK_NULL_HANDLE,  // Set to NULL FB handle intentionally to flesh out any errors
+                                           VK_FALSE,
+                                           0,
+                                           0};
+    VkCommandBufferBeginInfo cbbi = {VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO, nullptr,
+                                     VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT | VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT,
+                                     &cbii};
+    vk::BeginCommandBuffer(secondary.handle(), &cbbi);
+    VkImageMemoryBarrier img_barrier = {};
+    img_barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
+    img_barrier.srcAccessMask = VK_ACCESS_SHADER_WRITE_BIT;
+    img_barrier.dstAccessMask = VK_ACCESS_SHADER_WRITE_BIT;
+    img_barrier.oldLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
+    img_barrier.newLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
+    img_barrier.image = image2.handle();  // Image mis-matches with FB image
+    img_barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
+    img_barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
+    img_barrier.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    img_barrier.subresourceRange.baseArrayLayer = 0;
+    img_barrier.subresourceRange.baseMipLevel = 0;
+    img_barrier.subresourceRange.layerCount = 1;
+    img_barrier.subresourceRange.levelCount = 1;
+    vk::CmdPipelineBarrier(secondary.handle(), VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT,
+                           VK_DEPENDENCY_BY_REGION_BIT, 0, nullptr, 0, nullptr, 1, &img_barrier);
+    secondary.end();
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdPipelineBarrier-image-02635");
+    vk::CmdExecuteCommands(m_commandBuffer->handle(), 1, &secondary.handle());
+    m_errorMonitor->VerifyFound();
+
+    vk::DestroyFramebuffer(m_device->device(), fb, nullptr);
+    vk::DestroyRenderPass(m_device->device(), rp, nullptr);
+}
+
+TEST_F(VkLayerTest, DynamicDepthBiasNotBound) {
+    TEST_DESCRIPTION(
+        "Run a simple draw calls to validate failure when Depth Bias dynamic state is required but not correctly bound.");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    // Dynamic depth bias
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "Dynamic depth bias state not set for this command buffer");
+    VKTriangleTest(BsoFailDepthBias);
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, DynamicLineWidthNotBound) {
+    TEST_DESCRIPTION(
+        "Run a simple draw calls to validate failure when Line Width dynamic state is required but not correctly bound.");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    // Dynamic line width
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "Dynamic line width state not set for this command buffer");
+    VKTriangleTest(BsoFailLineWidth);
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, DynamicLineStippleNotBound) {
+    TEST_DESCRIPTION(
+        "Run a simple draw calls to validate failure when Line Stipple dynamic state is required but not correctly bound.");
+
+    if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+        m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    } else {
+        printf("%s Did not find required instance extension %s; skipped.\n", kSkipPrefix,
+               VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+        return;
+    }
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+    std::array<const char *, 1> required_device_extensions = {{VK_EXT_LINE_RASTERIZATION_EXTENSION_NAME}};
+    for (auto device_extension : required_device_extensions) {
+        if (DeviceExtensionSupported(gpu(), nullptr, device_extension)) {
+            m_device_extension_names.push_back(device_extension);
+        } else {
+            printf("%s %s Extension not supported, skipping tests\n", kSkipPrefix, device_extension);
+            return;
+        }
+    }
+
+    PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR =
+        (PFN_vkGetPhysicalDeviceFeatures2KHR)vk::GetInstanceProcAddr(instance(), "vkGetPhysicalDeviceFeatures2KHR");
+    ASSERT_TRUE(vkGetPhysicalDeviceFeatures2KHR != nullptr);
+
+    auto line_rasterization_features = lvl_init_struct<VkPhysicalDeviceLineRasterizationFeaturesEXT>();
+    auto features2 = lvl_init_struct<VkPhysicalDeviceFeatures2KHR>(&line_rasterization_features);
+    vkGetPhysicalDeviceFeatures2KHR(gpu(), &features2);
+
+    if (!line_rasterization_features.stippledBresenhamLines || !line_rasterization_features.bresenhamLines) {
+        printf("%sStipple Bresenham lines not supported; skipped.\n", kSkipPrefix);
+        return;
+    }
+
+    ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &features2, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "Dynamic line stipple state not set for this command buffer");
+    VKTriangleTest(BsoFailLineStipple);
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, DynamicViewportNotBound) {
+    TEST_DESCRIPTION(
+        "Run a simple draw calls to validate failure when Viewport dynamic state is required but not correctly bound.");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    // Dynamic viewport state
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "Dynamic viewport(s) 0 are used by pipeline state object, but were not provided");
+    VKTriangleTest(BsoFailViewport);
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, DynamicScissorNotBound) {
+    TEST_DESCRIPTION("Run a simple draw calls to validate failure when Scissor dynamic state is required but not correctly bound.");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    // Dynamic scissor state
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "Dynamic scissor(s) 0 are used by pipeline state object, but were not provided");
+    VKTriangleTest(BsoFailScissor);
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, DynamicBlendConstantsNotBound) {
+    TEST_DESCRIPTION(
+        "Run a simple draw calls to validate failure when Blend Constants dynamic state is required but not correctly bound.");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    // Dynamic blend constant state
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "Dynamic blend constants state not set for this command buffer");
+    VKTriangleTest(BsoFailBlend);
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, DynamicDepthBoundsNotBound) {
+    TEST_DESCRIPTION(
+        "Run a simple draw calls to validate failure when Depth Bounds dynamic state is required but not correctly bound.");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    if (!m_device->phy().features().depthBounds) {
+        printf("%s Device does not support depthBounds test; skipped.\n", kSkipPrefix);
+        return;
+    }
+    // Dynamic depth bounds
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "Dynamic depth bounds state not set for this command buffer");
+    VKTriangleTest(BsoFailDepthBounds);
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, DynamicStencilReadNotBound) {
+    TEST_DESCRIPTION(
+        "Run a simple draw calls to validate failure when Stencil Read dynamic state is required but not correctly bound.");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    // Dynamic stencil read mask
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "Dynamic stencil read mask state not set for this command buffer");
+    VKTriangleTest(BsoFailStencilReadMask);
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, DynamicStencilWriteNotBound) {
+    TEST_DESCRIPTION(
+        "Run a simple draw calls to validate failure when Stencil Write dynamic state is required but not correctly bound.");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    // Dynamic stencil write mask
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "Dynamic stencil write mask state not set for this command buffer");
+    VKTriangleTest(BsoFailStencilWriteMask);
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, DynamicStencilRefNotBound) {
+    TEST_DESCRIPTION(
+        "Run a simple draw calls to validate failure when Stencil Ref dynamic state is required but not correctly bound.");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    // Dynamic stencil reference
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "Dynamic stencil reference state not set for this command buffer");
+    VKTriangleTest(BsoFailStencilReference);
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, IndexBufferNotBound) {
+    TEST_DESCRIPTION("Run an indexed draw call without an index buffer bound.");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "Index buffer object not bound to this command buffer when Indexed ");
+    VKTriangleTest(BsoFailIndexBuffer);
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, IndexBufferBadSize) {
+    TEST_DESCRIPTION("Run indexed draw call with bad index buffer size.");
+
+    ASSERT_NO_FATAL_FAILURE(Init(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "vkCmdDrawIndexed() index size ");
+    VKTriangleTest(BsoFailIndexBufferBadSize);
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, IndexBufferBadOffset) {
+    TEST_DESCRIPTION("Run indexed draw call with bad index buffer offset.");
+
+    ASSERT_NO_FATAL_FAILURE(Init(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "vkCmdDrawIndexed() index size ");
+    VKTriangleTest(BsoFailIndexBufferBadOffset);
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, IndexBufferBadBindSize) {
+    TEST_DESCRIPTION("Run bind index buffer with a size greater than the index buffer.");
+
+    ASSERT_NO_FATAL_FAILURE(Init(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "vkCmdDrawIndexed() index size ");
+    VKTriangleTest(BsoFailIndexBufferBadMapSize);
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, IndexBufferBadBindOffset) {
+    TEST_DESCRIPTION("Run bind index buffer with an offset greater than the size of the index buffer.");
+
+    ASSERT_NO_FATAL_FAILURE(Init(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "vkCmdDrawIndexed() index size ");
+    VKTriangleTest(BsoFailIndexBufferBadMapOffset);
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, MissingClearAttachment) {
+    TEST_DESCRIPTION("Points to a wrong colorAttachment index in a VkClearAttachment structure passed to vkCmdClearAttachments");
+    ASSERT_NO_FATAL_FAILURE(Init());
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdClearAttachments-aspectMask-02501");
+
+    VKTriangleTest(BsoFailCmdClearAttachments);
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, SecondaryCommandbufferAsPrimary) {
+    TEST_DESCRIPTION("Create a secondary command buffer and pass it to QueueSubmit.");
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkSubmitInfo-pCommandBuffers-00075");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    VkCommandBufferObj secondary(m_device, m_commandPool, VK_COMMAND_BUFFER_LEVEL_SECONDARY);
+    secondary.begin();
+    secondary.ClearAllBuffers(m_renderTargets, m_clear_color, nullptr, m_depth_clear_color, m_stencil_clear_color);
+    secondary.end();
+
+    VkSubmitInfo submit_info;
+    submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+    submit_info.pNext = NULL;
+    submit_info.waitSemaphoreCount = 0;
+    submit_info.pWaitSemaphores = NULL;
+    submit_info.pWaitDstStageMask = NULL;
+    submit_info.commandBufferCount = 1;
+    submit_info.pCommandBuffers = &secondary.handle();
+    submit_info.signalSemaphoreCount = 0;
+    submit_info.pSignalSemaphores = NULL;
+
+    vk::QueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, CommandBufferTwoSubmits) {
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "was begun w/ VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT set, but has been submitted");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitViewport());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    // We luck out b/c by default the framework creates CB w/ the
+    // VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT set
+    m_commandBuffer->begin();
+    m_commandBuffer->ClearAllBuffers(m_renderTargets, m_clear_color, nullptr, m_depth_clear_color, m_stencil_clear_color);
+    m_commandBuffer->end();
+
+    // Bypass framework since it does the waits automatically
+    VkResult err = VK_SUCCESS;
+    VkSubmitInfo submit_info;
+    submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+    submit_info.pNext = NULL;
+    submit_info.waitSemaphoreCount = 0;
+    submit_info.pWaitSemaphores = NULL;
+    submit_info.pWaitDstStageMask = NULL;
+    submit_info.commandBufferCount = 1;
+    submit_info.pCommandBuffers = &m_commandBuffer->handle();
+    submit_info.signalSemaphoreCount = 0;
+    submit_info.pSignalSemaphores = NULL;
+
+    err = vk::QueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+    ASSERT_VK_SUCCESS(err);
+    vk::QueueWaitIdle(m_device->m_queue);
+
+    // Cause validation error by re-submitting cmd buffer that should only be
+    // submitted once
+    err = vk::QueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+    vk::QueueWaitIdle(m_device->m_queue);
+
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, InvalidPushConstants) {
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitViewport());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    VkPipelineLayout pipeline_layout;
+    VkPushConstantRange pc_range = {};
+    VkPipelineLayoutCreateInfo pipeline_layout_ci = {};
+    pipeline_layout_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO;
+    pipeline_layout_ci.pushConstantRangeCount = 1;
+    pipeline_layout_ci.pPushConstantRanges = &pc_range;
+
+    //
+    // Check for invalid push constant ranges in pipeline layouts.
+    //
+    struct PipelineLayoutTestCase {
+        VkPushConstantRange const range;
+        char const *msg;
+    };
+
+    const uint32_t too_big = m_device->props.limits.maxPushConstantsSize + 0x4;
+    const std::array<PipelineLayoutTestCase, 10> range_tests = {{
+        {{VK_SHADER_STAGE_VERTEX_BIT, 0, 0}, "vkCreatePipelineLayout() call has push constants index 0 with size 0."},
+        {{VK_SHADER_STAGE_VERTEX_BIT, 0, 1}, "vkCreatePipelineLayout() call has push constants index 0 with size 1."},
+        {{VK_SHADER_STAGE_VERTEX_BIT, 4, 1}, "vkCreatePipelineLayout() call has push constants index 0 with size 1."},
+        {{VK_SHADER_STAGE_VERTEX_BIT, 4, 0}, "vkCreatePipelineLayout() call has push constants index 0 with size 0."},
+        {{VK_SHADER_STAGE_VERTEX_BIT, 1, 4}, "vkCreatePipelineLayout() call has push constants index 0 with offset 1. Offset must"},
+        {{VK_SHADER_STAGE_VERTEX_BIT, 0, too_big}, "vkCreatePipelineLayout() call has push constants index 0 with offset "},
+        {{VK_SHADER_STAGE_VERTEX_BIT, too_big, too_big}, "vkCreatePipelineLayout() call has push constants index 0 with offset "},
+        {{VK_SHADER_STAGE_VERTEX_BIT, too_big, 4}, "vkCreatePipelineLayout() call has push constants index 0 with offset "},
+        {{VK_SHADER_STAGE_VERTEX_BIT, 0xFFFFFFF0, 0x00000020},
+         "vkCreatePipelineLayout() call has push constants index 0 with offset "},
+        {{VK_SHADER_STAGE_VERTEX_BIT, 0x00000020, 0xFFFFFFF0},
+         "vkCreatePipelineLayout() call has push constants index 0 with offset "},
+    }};
+
+    // Check for invalid offset and size
+    for (const auto &iter : range_tests) {
+        pc_range = iter.range;
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, iter.msg);
+        vk::CreatePipelineLayout(m_device->device(), &pipeline_layout_ci, NULL, &pipeline_layout);
+        m_errorMonitor->VerifyFound();
+    }
+
+    // Check for invalid stage flag
+    pc_range.offset = 0;
+    pc_range.size = 16;
+    pc_range.stageFlags = 0;
+    m_errorMonitor->SetDesiredFailureMsg(
+        VK_DEBUG_REPORT_ERROR_BIT_EXT,
+        "vkCreatePipelineLayout: value of pCreateInfo->pPushConstantRanges[0].stageFlags must not be 0");
+    vk::CreatePipelineLayout(m_device->device(), &pipeline_layout_ci, NULL, &pipeline_layout);
+    m_errorMonitor->VerifyFound();
+
+    // Check for duplicate stage flags in a list of push constant ranges.
+    // A shader can only have one push constant block and that block is mapped
+    // to the push constant range that has that shader's stage flag set.
+    // The shader's stage flag can only appear once in all the ranges, so the
+    // implementation can find the one and only range to map it to.
+    const uint32_t ranges_per_test = 5;
+    struct DuplicateStageFlagsTestCase {
+        VkPushConstantRange const ranges[ranges_per_test];
+        std::vector<char const *> const msg;
+    };
+    // Overlapping ranges are OK, but a stage flag can appear only once.
+    const std::array<DuplicateStageFlagsTestCase, 3> duplicate_stageFlags_tests = {
+        {
+            {{{VK_SHADER_STAGE_VERTEX_BIT, 0, 4},
+              {VK_SHADER_STAGE_VERTEX_BIT, 0, 4},
+              {VK_SHADER_STAGE_VERTEX_BIT, 0, 4},
+              {VK_SHADER_STAGE_VERTEX_BIT, 0, 4},
+              {VK_SHADER_STAGE_VERTEX_BIT, 0, 4}},
+             {
+                 "vkCreatePipelineLayout() Duplicate stage flags found in ranges 0 and 1.",
+                 "vkCreatePipelineLayout() Duplicate stage flags found in ranges 0 and 2.",
+                 "vkCreatePipelineLayout() Duplicate stage flags found in ranges 0 and 3.",
+                 "vkCreatePipelineLayout() Duplicate stage flags found in ranges 0 and 4.",
+                 "vkCreatePipelineLayout() Duplicate stage flags found in ranges 1 and 2.",
+                 "vkCreatePipelineLayout() Duplicate stage flags found in ranges 1 and 3.",
+                 "vkCreatePipelineLayout() Duplicate stage flags found in ranges 1 and 4.",
+                 "vkCreatePipelineLayout() Duplicate stage flags found in ranges 2 and 3.",
+                 "vkCreatePipelineLayout() Duplicate stage flags found in ranges 2 and 4.",
+                 "vkCreatePipelineLayout() Duplicate stage flags found in ranges 3 and 4.",
+             }},
+            {{{VK_SHADER_STAGE_VERTEX_BIT, 0, 4},
+              {VK_SHADER_STAGE_GEOMETRY_BIT, 0, 4},
+              {VK_SHADER_STAGE_FRAGMENT_BIT, 0, 4},
+              {VK_SHADER_STAGE_VERTEX_BIT, 0, 4},
+              {VK_SHADER_STAGE_GEOMETRY_BIT, 0, 4}},
+             {
+                 "vkCreatePipelineLayout() Duplicate stage flags found in ranges 0 and 3.",
+                 "vkCreatePipelineLayout() Duplicate stage flags found in ranges 1 and 4.",
+             }},
+            {{{VK_SHADER_STAGE_FRAGMENT_BIT, 0, 4},
+              {VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT, 0, 4},
+              {VK_SHADER_STAGE_VERTEX_BIT, 0, 4},
+              {VK_SHADER_STAGE_VERTEX_BIT, 0, 4},
+              {VK_SHADER_STAGE_GEOMETRY_BIT, 0, 4}},
+             {
+                 "vkCreatePipelineLayout() Duplicate stage flags found in ranges 2 and 3.",
+             }},
+        },
+    };
+
+    for (const auto &iter : duplicate_stageFlags_tests) {
+        pipeline_layout_ci.pPushConstantRanges = iter.ranges;
+        pipeline_layout_ci.pushConstantRangeCount = ranges_per_test;
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, iter.msg.begin(), iter.msg.end());
+        vk::CreatePipelineLayout(m_device->device(), &pipeline_layout_ci, NULL, &pipeline_layout);
+        m_errorMonitor->VerifyFound();
+    }
+
+    //
+    // CmdPushConstants tests
+    //
+
+    // Setup a pipeline layout with ranges: [0,32) [16,80)
+    const std::vector<VkPushConstantRange> pc_range2 = {{VK_SHADER_STAGE_VERTEX_BIT, 16, 64},
+                                                        {VK_SHADER_STAGE_FRAGMENT_BIT, 0, 32}};
+    const VkPipelineLayoutObj pipeline_layout_obj(m_device, {}, pc_range2);
+
+    const uint8_t dummy_values[100] = {};
+
+    m_commandBuffer->begin();
+    m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
+
+    // Check for invalid stage flag
+    // Note that VU 00996 isn't reached due to parameter validation
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "vkCmdPushConstants: value of stageFlags must not be 0");
+    vk::CmdPushConstants(m_commandBuffer->handle(), pipeline_layout_obj.handle(), 0, 0, 16, dummy_values);
+    m_errorMonitor->VerifyFound();
+
+    // Positive tests for the overlapping ranges
+    m_errorMonitor->ExpectSuccess();
+    vk::CmdPushConstants(m_commandBuffer->handle(), pipeline_layout_obj.handle(), VK_SHADER_STAGE_FRAGMENT_BIT, 0, 16,
+                         dummy_values);
+    m_errorMonitor->VerifyNotFound();
+    m_errorMonitor->ExpectSuccess();
+    vk::CmdPushConstants(m_commandBuffer->handle(), pipeline_layout_obj.handle(), VK_SHADER_STAGE_VERTEX_BIT, 32, 48, dummy_values);
+    m_errorMonitor->VerifyNotFound();
+    m_errorMonitor->ExpectSuccess();
+    vk::CmdPushConstants(m_commandBuffer->handle(), pipeline_layout_obj.handle(),
+                         VK_SHADER_STAGE_VERTEX_BIT | VK_SHADER_STAGE_FRAGMENT_BIT, 16, 16, dummy_values);
+    m_errorMonitor->VerifyNotFound();
+
+    // Wrong cmd stages for extant range
+    // No range for all cmd stages -- "VUID-vkCmdPushConstants-offset-01795" VUID-vkCmdPushConstants-offset-01795
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdPushConstants-offset-01795");
+    // Missing cmd stages for found overlapping range -- "VUID-vkCmdPushConstants-offset-01796" VUID-vkCmdPushConstants-offset-01796
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdPushConstants-offset-01796");
+    vk::CmdPushConstants(m_commandBuffer->handle(), pipeline_layout_obj.handle(), VK_SHADER_STAGE_GEOMETRY_BIT, 0, 16,
+                         dummy_values);
+    m_errorMonitor->VerifyFound();
+
+    // Wrong no extant range
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdPushConstants-offset-01795");
+    vk::CmdPushConstants(m_commandBuffer->handle(), pipeline_layout_obj.handle(), VK_SHADER_STAGE_FRAGMENT_BIT, 80, 4,
+                         dummy_values);
+    m_errorMonitor->VerifyFound();
+
+    // Wrong overlapping extent
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdPushConstants-offset-01795");
+    vk::CmdPushConstants(m_commandBuffer->handle(), pipeline_layout_obj.handle(),
+                         VK_SHADER_STAGE_VERTEX_BIT | VK_SHADER_STAGE_FRAGMENT_BIT, 0, 20, dummy_values);
+    m_errorMonitor->VerifyFound();
+
+    // Wrong stage flags for valid overlapping range
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdPushConstants-offset-01796");
+    vk::CmdPushConstants(m_commandBuffer->handle(), pipeline_layout_obj.handle(), VK_SHADER_STAGE_VERTEX_BIT, 16, 16, dummy_values);
+    m_errorMonitor->VerifyFound();
+
+    m_commandBuffer->EndRenderPass();
+    m_commandBuffer->end();
+}
+
+TEST_F(VkLayerTest, NoBeginCommandBuffer) {
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "You must call vkBeginCommandBuffer() before this call to ");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    VkCommandBufferObj commandBuffer(m_device, m_commandPool);
+    // Call EndCommandBuffer() w/o calling BeginCommandBuffer()
+    vk::EndCommandBuffer(commandBuffer.handle());
+
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, SecondaryCommandBufferNullRenderpass) {
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    VkCommandBufferObj cb(m_device, m_commandPool, VK_COMMAND_BUFFER_LEVEL_SECONDARY);
+
+    // Force the failure by not setting the Renderpass and Framebuffer fields
+    VkCommandBufferInheritanceInfo cmd_buf_hinfo = {};
+    cmd_buf_hinfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO;
+
+    VkCommandBufferBeginInfo cmd_buf_info = {};
+    cmd_buf_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
+    cmd_buf_info.pNext = NULL;
+    cmd_buf_info.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT | VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT;
+    cmd_buf_info.pInheritanceInfo = &cmd_buf_hinfo;
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkCommandBufferBeginInfo-flags-00053");
+    vk::BeginCommandBuffer(cb.handle(), &cmd_buf_info);
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, SecondaryCommandBufferRerecordedExplicitReset) {
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "was destroyed or rerecorded");
+
+    // A pool we can reset in.
+    VkCommandPoolObj pool(m_device, m_device->graphics_queue_node_index_, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT);
+    VkCommandBufferObj secondary(m_device, &pool, VK_COMMAND_BUFFER_LEVEL_SECONDARY);
+
+    secondary.begin();
+    secondary.end();
+
+    m_commandBuffer->begin();
+    vk::CmdExecuteCommands(m_commandBuffer->handle(), 1, &secondary.handle());
+
+    // rerecording of secondary
+    secondary.reset();  // explicit reset here.
+    secondary.begin();
+    secondary.end();
+
+    vk::CmdExecuteCommands(m_commandBuffer->handle(), 1, &secondary.handle());
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, SecondaryCommandBufferRerecordedNoReset) {
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "was destroyed or rerecorded");
+
+    // A pool we can reset in.
+    VkCommandPoolObj pool(m_device, m_device->graphics_queue_node_index_, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT);
+    VkCommandBufferObj secondary(m_device, &pool, VK_COMMAND_BUFFER_LEVEL_SECONDARY);
+
+    secondary.begin();
+    secondary.end();
+
+    m_commandBuffer->begin();
+    vk::CmdExecuteCommands(m_commandBuffer->handle(), 1, &secondary.handle());
+
+    // rerecording of secondary
+    secondary.begin();  // implicit reset in begin
+    secondary.end();
+
+    vk::CmdExecuteCommands(m_commandBuffer->handle(), 1, &secondary.handle());
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, CascadedInvalidation) {
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    VkEventCreateInfo eci = {VK_STRUCTURE_TYPE_EVENT_CREATE_INFO, nullptr, 0};
+    VkEvent event;
+    vk::CreateEvent(m_device->device(), &eci, nullptr, &event);
+
+    VkCommandBufferObj secondary(m_device, m_commandPool, VK_COMMAND_BUFFER_LEVEL_SECONDARY);
+    secondary.begin();
+    vk::CmdSetEvent(secondary.handle(), event, VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT);
+    secondary.end();
+
+    m_commandBuffer->begin();
+    vk::CmdExecuteCommands(m_commandBuffer->handle(), 1, &secondary.handle());
+    m_commandBuffer->end();
+
+    // destroying the event should invalidate both primary and secondary CB
+    vk::DestroyEvent(m_device->device(), event, nullptr);
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "UNASSIGNED-CoreValidation-DrawState-InvalidCommandBuffer-VkEvent");
+    m_commandBuffer->QueueCommandBuffer(false);
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, CommandBufferResetErrors) {
+    // Cause error due to Begin while recording CB
+    // Then cause 2 errors for attempting to reset CB w/o having
+    // VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT set for the pool from
+    // which CBs were allocated. Note that this bit is off by default.
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkBeginCommandBuffer-commandBuffer-00049");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    // Calls AllocateCommandBuffers
+    VkCommandBufferObj commandBuffer(m_device, m_commandPool);
+
+    // Force the failure by setting the Renderpass and Framebuffer fields with (fake) data
+    VkCommandBufferInheritanceInfo cmd_buf_hinfo = {};
+    cmd_buf_hinfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO;
+    VkCommandBufferBeginInfo cmd_buf_info = {};
+    cmd_buf_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
+    cmd_buf_info.pNext = NULL;
+    cmd_buf_info.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT;
+    cmd_buf_info.pInheritanceInfo = &cmd_buf_hinfo;
+
+    // Begin CB to transition to recording state
+    vk::BeginCommandBuffer(commandBuffer.handle(), &cmd_buf_info);
+    // Can't re-begin. This should trigger error
+    vk::BeginCommandBuffer(commandBuffer.handle(), &cmd_buf_info);
+    m_errorMonitor->VerifyFound();
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkResetCommandBuffer-commandBuffer-00046");
+    VkCommandBufferResetFlags flags = 0;  // Don't care about flags for this test
+    // Reset attempt will trigger error due to incorrect CommandPool state
+    vk::ResetCommandBuffer(commandBuffer.handle(), flags);
+    m_errorMonitor->VerifyFound();
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkBeginCommandBuffer-commandBuffer-00050");
+    // Transition CB to RECORDED state
+    vk::EndCommandBuffer(commandBuffer.handle());
+    // Now attempting to Begin will implicitly reset, which triggers error
+    vk::BeginCommandBuffer(commandBuffer.handle(), &cmd_buf_info);
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, ClearColorAttachmentsOutsideRenderPass) {
+    // Call CmdClearAttachmentss outside of an active RenderPass
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "vkCmdClearAttachments(): This call must be issued inside an active render pass");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    // Start no RenderPass
+    m_commandBuffer->begin();
+
+    VkClearAttachment color_attachment;
+    color_attachment.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    color_attachment.clearValue.color.float32[0] = 0;
+    color_attachment.clearValue.color.float32[1] = 0;
+    color_attachment.clearValue.color.float32[2] = 0;
+    color_attachment.clearValue.color.float32[3] = 0;
+    color_attachment.colorAttachment = 0;
+    VkClearRect clear_rect = {{{0, 0}, {32, 32}}, 0, 1};
+    vk::CmdClearAttachments(m_commandBuffer->handle(), 1, &color_attachment, 1, &clear_rect);
+
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, ClearColorAttachmentsZeroLayercount) {
+    TEST_DESCRIPTION("Call CmdClearAttachments with a pRect having a layerCount of zero.");
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdClearAttachments-layerCount-01934");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    m_commandBuffer->begin();
+    vk::CmdBeginRenderPass(m_commandBuffer->handle(), &renderPassBeginInfo(), VK_SUBPASS_CONTENTS_INLINE);
+
+    VkClearAttachment color_attachment;
+    color_attachment.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    color_attachment.clearValue.color.float32[0] = 0;
+    color_attachment.clearValue.color.float32[1] = 0;
+    color_attachment.clearValue.color.float32[2] = 0;
+    color_attachment.clearValue.color.float32[3] = 0;
+    color_attachment.colorAttachment = 0;
+    VkClearRect clear_rect = {{{0, 0}, {32, 32}}};
+    vk::CmdClearAttachments(m_commandBuffer->handle(), 1, &color_attachment, 1, &clear_rect);
+
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, ExecuteCommandsPrimaryCB) {
+    TEST_DESCRIPTION("Attempt vkCmdExecuteCommands with a primary command buffer (should only be secondary)");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    // An empty primary command buffer
+    VkCommandBufferObj cb(m_device, m_commandPool);
+    cb.begin();
+    cb.end();
+
+    m_commandBuffer->begin();
+    vk::CmdBeginRenderPass(m_commandBuffer->handle(), &renderPassBeginInfo(), VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS);
+    VkCommandBuffer handle = cb.handle();
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdExecuteCommands-pCommandBuffers-00088");
+    vk::CmdExecuteCommands(m_commandBuffer->handle(), 1, &handle);
+    m_errorMonitor->VerifyFound();
+
+    m_errorMonitor->SetUnexpectedError("All elements of pCommandBuffers must not be in the pending state");
+
+    m_commandBuffer->EndRenderPass();
+    m_commandBuffer->end();
+}
+
+TEST_F(VkLayerTest, InvalidVertexAttributeAlignment) {
+    TEST_DESCRIPTION("Check for proper aligment of attribAddress which depends on a bound pipeline and on a bound vertex buffer");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitViewport());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    const VkPipelineLayoutObj pipeline_layout(m_device);
+
+    struct VboEntry {
+        uint16_t input0[2];
+        uint32_t input1;
+        float input2[4];
+    };
+
+    const unsigned vbo_entry_count = 3;
+    const VboEntry vbo_data[vbo_entry_count] = {};
+
+    VkConstantBufferObj vbo(m_device, static_cast<int>(sizeof(VboEntry) * vbo_entry_count),
+                            reinterpret_cast<const void *>(vbo_data), VK_BUFFER_USAGE_VERTEX_BUFFER_BIT);
+
+    VkVertexInputBindingDescription input_binding;
+    input_binding.binding = 0;
+    input_binding.stride = sizeof(VboEntry);
+    input_binding.inputRate = VK_VERTEX_INPUT_RATE_VERTEX;
+
+    VkVertexInputAttributeDescription input_attribs[3];
+
+    input_attribs[0].binding = 0;
+    // Location switch between attrib[0] and attrib[1] is intentional
+    input_attribs[0].location = 1;
+    input_attribs[0].format = VK_FORMAT_A8B8G8R8_UNORM_PACK32;
+    input_attribs[0].offset = offsetof(VboEntry, input1);
+
+    input_attribs[1].binding = 0;
+    input_attribs[1].location = 0;
+    input_attribs[1].format = VK_FORMAT_R16G16_UNORM;
+    input_attribs[1].offset = offsetof(VboEntry, input0);
+
+    input_attribs[2].binding = 0;
+    input_attribs[2].location = 2;
+    input_attribs[2].format = VK_FORMAT_R32G32B32A32_SFLOAT;
+    input_attribs[2].offset = offsetof(VboEntry, input2);
+
+    char const *vsSource =
+        "#version 450\n"
+        "\n"
+        "layout(location = 0) in vec2 input0;"
+        "layout(location = 1) in vec4 input1;"
+        "layout(location = 2) in vec4 input2;"
+        "\n"
+        "void main(){\n"
+        "   gl_Position = input1 + input2;\n"
+        "   gl_Position.xy += input0;\n"
+        "}\n";
+
+    VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
+    VkShaderObj fs(m_device, bindStateFragShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+    VkPipelineObj pipe1(m_device);
+    pipe1.AddDefaultColorAttachment();
+    pipe1.AddShader(&vs);
+    pipe1.AddShader(&fs);
+    pipe1.AddVertexInputBindings(&input_binding, 1);
+    pipe1.AddVertexInputAttribs(&input_attribs[0], 3);
+    pipe1.SetViewport(m_viewports);
+    pipe1.SetScissor(m_scissors);
+    pipe1.CreateVKPipeline(pipeline_layout.handle(), renderPass());
+
+    input_binding.stride = 6;
+
+    VkPipelineObj pipe2(m_device);
+    pipe2.AddDefaultColorAttachment();
+    pipe2.AddShader(&vs);
+    pipe2.AddShader(&fs);
+    pipe2.AddVertexInputBindings(&input_binding, 1);
+    pipe2.AddVertexInputAttribs(&input_attribs[0], 3);
+    pipe2.SetViewport(m_viewports);
+    pipe2.SetScissor(m_scissors);
+    pipe2.CreateVKPipeline(pipeline_layout.handle(), renderPass());
+
+    m_commandBuffer->begin();
+    m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
+
+    // Test with invalid buffer offset
+    VkDeviceSize offset = 1;
+    vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe1.handle());
+    vk::CmdBindVertexBuffers(m_commandBuffer->handle(), 0, 1, &vbo.handle(), &offset);
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "Invalid attribAddress alignment for vertex attribute 0");
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "Invalid attribAddress alignment for vertex attribute 1");
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "Invalid attribAddress alignment for vertex attribute 2");
+    m_commandBuffer->Draw(1, 0, 0, 0);
+    m_errorMonitor->VerifyFound();
+
+    // Test with invalid buffer stride
+    offset = 0;
+    vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe2.handle());
+    vk::CmdBindVertexBuffers(m_commandBuffer->handle(), 0, 1, &vbo.handle(), &offset);
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "Invalid attribAddress alignment for vertex attribute 0");
+    // Attribute[1] is aligned properly even with a wrong stride
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "Invalid attribAddress alignment for vertex attribute 2");
+    m_commandBuffer->Draw(1, 0, 0, 0);
+    m_errorMonitor->VerifyFound();
+
+    m_commandBuffer->EndRenderPass();
+    m_commandBuffer->end();
+}
+
+TEST_F(VkLayerTest, NonSimultaneousSecondaryMarksPrimary) {
+    ASSERT_NO_FATAL_FAILURE(Init());
+    const char *simultaneous_use_message = "UNASSIGNED-CoreValidation-DrawState-InvalidCommandBufferSimultaneousUse";
+
+    VkCommandBufferObj secondary(m_device, m_commandPool, VK_COMMAND_BUFFER_LEVEL_SECONDARY);
+
+    secondary.begin();
+    secondary.end();
+
+    VkCommandBufferBeginInfo cbbi = {
+        VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
+        nullptr,
+        VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT,
+        nullptr,
+    };
+
+    m_commandBuffer->begin(&cbbi);
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_WARNING_BIT_EXT, simultaneous_use_message);
+    vk::CmdExecuteCommands(m_commandBuffer->handle(), 1, &secondary.handle());
+    m_errorMonitor->VerifyFound();
+    m_commandBuffer->end();
+}
+
+TEST_F(VkLayerTest, SimultaneousUseSecondaryTwoExecutes) {
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    const char *simultaneous_use_message = "VUID-vkCmdExecuteCommands-pCommandBuffers-00092";
+
+    VkCommandBufferObj secondary(m_device, m_commandPool, VK_COMMAND_BUFFER_LEVEL_SECONDARY);
+
+    VkCommandBufferInheritanceInfo inh = {
+        VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO,
+        nullptr,
+    };
+    VkCommandBufferBeginInfo cbbi = {VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO, nullptr, 0, &inh};
+
+    secondary.begin(&cbbi);
+    secondary.end();
+
+    m_commandBuffer->begin();
+    vk::CmdExecuteCommands(m_commandBuffer->handle(), 1, &secondary.handle());
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, simultaneous_use_message);
+    vk::CmdExecuteCommands(m_commandBuffer->handle(), 1, &secondary.handle());
+    m_errorMonitor->VerifyFound();
+    m_commandBuffer->end();
+}
+
+TEST_F(VkLayerTest, SimultaneousUseSecondarySingleExecute) {
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    // variation on previous test executing the same CB twice in the same
+    // CmdExecuteCommands call
+
+    const char *simultaneous_use_message = "VUID-vkCmdExecuteCommands-pCommandBuffers-00093";
+
+    VkCommandBufferObj secondary(m_device, m_commandPool, VK_COMMAND_BUFFER_LEVEL_SECONDARY);
+
+    VkCommandBufferInheritanceInfo inh = {
+        VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO,
+        nullptr,
+    };
+    VkCommandBufferBeginInfo cbbi = {VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO, nullptr, 0, &inh};
+
+    secondary.begin(&cbbi);
+    secondary.end();
+
+    m_commandBuffer->begin();
+    VkCommandBuffer cbs[] = {secondary.handle(), secondary.handle()};
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, simultaneous_use_message);
+    vk::CmdExecuteCommands(m_commandBuffer->handle(), 2, cbs);
+    m_errorMonitor->VerifyFound();
+    m_commandBuffer->end();
+}
+
+TEST_F(VkLayerTest, SimultaneousUseOneShot) {
+    TEST_DESCRIPTION("Submit the same command buffer twice in one submit looking for simultaneous use and one time submit errors");
+    const char *simultaneous_use_message = "is already in use and is not marked for simultaneous use";
+    const char *one_shot_message = "VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT set, but has been submitted";
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    VkCommandBuffer cmd_bufs[2];
+    VkCommandBufferAllocateInfo alloc_info;
+    alloc_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
+    alloc_info.pNext = NULL;
+    alloc_info.commandBufferCount = 2;
+    alloc_info.commandPool = m_commandPool->handle();
+    alloc_info.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
+    vk::AllocateCommandBuffers(m_device->device(), &alloc_info, cmd_bufs);
+
+    VkCommandBufferBeginInfo cb_binfo;
+    cb_binfo.pNext = NULL;
+    cb_binfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
+    cb_binfo.pInheritanceInfo = VK_NULL_HANDLE;
+    cb_binfo.flags = 0;
+    vk::BeginCommandBuffer(cmd_bufs[0], &cb_binfo);
+    VkViewport viewport = {0, 0, 16, 16, 0, 1};
+    vk::CmdSetViewport(cmd_bufs[0], 0, 1, &viewport);
+    vk::EndCommandBuffer(cmd_bufs[0]);
+    VkCommandBuffer duplicates[2] = {cmd_bufs[0], cmd_bufs[0]};
+
+    VkSubmitInfo submit_info = {};
+    submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+    submit_info.commandBufferCount = 2;
+    submit_info.pCommandBuffers = duplicates;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, simultaneous_use_message);
+    vk::QueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+    m_errorMonitor->VerifyFound();
+    vk::QueueWaitIdle(m_device->m_queue);
+
+    // Set one time use and now look for one time submit
+    duplicates[0] = duplicates[1] = cmd_bufs[1];
+    cb_binfo.flags = VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT | VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT;
+    vk::BeginCommandBuffer(cmd_bufs[1], &cb_binfo);
+    vk::CmdSetViewport(cmd_bufs[1], 0, 1, &viewport);
+    vk::EndCommandBuffer(cmd_bufs[1]);
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, one_shot_message);
+    vk::QueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+    m_errorMonitor->VerifyFound();
+    vk::QueueWaitIdle(m_device->m_queue);
+}
+
+TEST_F(VkLayerTest, DrawTimeImageViewTypeMismatchWithPipeline) {
+    TEST_DESCRIPTION(
+        "Test that an error is produced when an image view type does not match the dimensionality declared in the shader");
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "requires an image view of type VK_IMAGE_VIEW_TYPE_3D");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    char const *fsSource =
+        "#version 450\n"
+        "\n"
+        "layout(set=0, binding=0) uniform sampler3D s;\n"
+        "layout(location=0) out vec4 color;\n"
+        "void main() {\n"
+        "   color = texture(s, vec3(0));\n"
+        "}\n";
+    VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
+    VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+    VkPipelineObj pipe(m_device);
+    pipe.AddShader(&vs);
+    pipe.AddShader(&fs);
+    pipe.AddDefaultColorAttachment();
+
+    VkTextureObj texture(m_device, nullptr);
+    VkSamplerObj sampler(m_device);
+
+    VkDescriptorSetObj descriptorSet(m_device);
+    descriptorSet.AppendSamplerTexture(&sampler, &texture);
+    descriptorSet.CreateVKDescriptorSet(m_commandBuffer);
+
+    VkResult err = pipe.CreateVKPipeline(descriptorSet.GetPipelineLayout(), renderPass());
+    ASSERT_VK_SUCCESS(err);
+
+    m_commandBuffer->begin();
+    m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
+
+    vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.handle());
+    m_commandBuffer->BindDescriptorSet(descriptorSet);
+
+    VkViewport viewport = {0, 0, 16, 16, 0, 1};
+    vk::CmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
+    VkRect2D scissor = {{0, 0}, {16, 16}};
+    vk::CmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
+
+    // error produced here.
+    vk::CmdDraw(m_commandBuffer->handle(), 3, 1, 0, 0);
+
+    m_errorMonitor->VerifyFound();
+
+    m_commandBuffer->EndRenderPass();
+    m_commandBuffer->end();
+}
+
+TEST_F(VkLayerTest, DrawTimeImageMultisampleMismatchWithPipeline) {
+    TEST_DESCRIPTION(
+        "Test that an error is produced when a multisampled images are consumed via singlesample images types in the shader, or "
+        "vice versa.");
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "requires bound image to have multiple samples");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    char const *fsSource =
+        "#version 450\n"
+        "\n"
+        "layout(set=0, binding=0) uniform sampler2DMS s;\n"
+        "layout(location=0) out vec4 color;\n"
+        "void main() {\n"
+        "   color = texelFetch(s, ivec2(0), 0);\n"
+        "}\n";
+    VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
+    VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+    VkPipelineObj pipe(m_device);
+    pipe.AddShader(&vs);
+    pipe.AddShader(&fs);
+    pipe.AddDefaultColorAttachment();
+
+    VkTextureObj texture(m_device, nullptr);  // THIS LINE CAUSES CRASH ON MALI
+    VkSamplerObj sampler(m_device);
+
+    VkDescriptorSetObj descriptorSet(m_device);
+    descriptorSet.AppendSamplerTexture(&sampler, &texture);
+    descriptorSet.CreateVKDescriptorSet(m_commandBuffer);
+
+    VkResult err = pipe.CreateVKPipeline(descriptorSet.GetPipelineLayout(), renderPass());
+    ASSERT_VK_SUCCESS(err);
+
+    m_commandBuffer->begin();
+    m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
+
+    vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.handle());
+    m_commandBuffer->BindDescriptorSet(descriptorSet);
+
+    VkViewport viewport = {0, 0, 16, 16, 0, 1};
+    vk::CmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
+    VkRect2D scissor = {{0, 0}, {16, 16}};
+    vk::CmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
+
+    // error produced here.
+    vk::CmdDraw(m_commandBuffer->handle(), 3, 1, 0, 0);
+
+    m_errorMonitor->VerifyFound();
+
+    m_commandBuffer->EndRenderPass();
+    m_commandBuffer->end();
+}
+
+TEST_F(VkLayerTest, DrawTimeImageComponentTypeMismatchWithPipeline) {
+    TEST_DESCRIPTION(
+        "Test that an error is produced when the component type of an imageview disagrees with the type in the shader.");
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SINT component type, but bound descriptor");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    char const *fsSource =
+        "#version 450\n"
+        "\n"
+        "layout(set=0, binding=0) uniform isampler2D s;\n"
+        "layout(location=0) out vec4 color;\n"
+        "void main() {\n"
+        "   color = texelFetch(s, ivec2(0), 0);\n"
+        "}\n";
+    VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
+    VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+    VkPipelineObj pipe(m_device);
+    pipe.AddShader(&vs);
+    pipe.AddShader(&fs);
+    pipe.AddDefaultColorAttachment();
+
+    VkTextureObj texture(m_device, nullptr);  // UNORM texture by default, incompatible with isampler2D
+    VkSamplerObj sampler(m_device);
+
+    VkDescriptorSetObj descriptorSet(m_device);
+    descriptorSet.AppendSamplerTexture(&sampler, &texture);
+    descriptorSet.CreateVKDescriptorSet(m_commandBuffer);
+
+    VkResult err = pipe.CreateVKPipeline(descriptorSet.GetPipelineLayout(), renderPass());
+    ASSERT_VK_SUCCESS(err);
+
+    m_commandBuffer->begin();
+    m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
+
+    vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.handle());
+    m_commandBuffer->BindDescriptorSet(descriptorSet);
+
+    VkViewport viewport = {0, 0, 16, 16, 0, 1};
+    vk::CmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
+    VkRect2D scissor = {{0, 0}, {16, 16}};
+    vk::CmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
+
+    // error produced here.
+    vk::CmdDraw(m_commandBuffer->handle(), 3, 1, 0, 0);
+
+    m_errorMonitor->VerifyFound();
+
+    m_commandBuffer->EndRenderPass();
+    m_commandBuffer->end();
+}
+
+TEST_F(VkLayerTest, CopyImageLayerCountMismatch) {
+    TEST_DESCRIPTION(
+        "Try to copy between images with the source subresource having a different layerCount than the destination subresource");
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    // Create two images to copy between
+    VkImageObj src_image_obj(m_device);
+    VkImageObj dst_image_obj(m_device);
+
+    VkImageCreateInfo image_create_info = {};
+    image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+    image_create_info.pNext = NULL;
+    image_create_info.imageType = VK_IMAGE_TYPE_2D;
+    image_create_info.format = VK_FORMAT_B8G8R8A8_UNORM;
+    image_create_info.extent.width = 32;
+    image_create_info.extent.height = 32;
+    image_create_info.extent.depth = 1;
+    image_create_info.mipLevels = 1;
+    image_create_info.arrayLayers = 4;
+    image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
+    image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
+    image_create_info.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
+    image_create_info.flags = 0;
+
+    src_image_obj.init(&image_create_info);
+    ASSERT_TRUE(src_image_obj.initialized());
+
+    image_create_info.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT;
+    dst_image_obj.init(&image_create_info);
+    ASSERT_TRUE(dst_image_obj.initialized());
+
+    m_commandBuffer->begin();
+    VkImageCopy copyRegion;
+    copyRegion.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    copyRegion.srcSubresource.mipLevel = 0;
+    copyRegion.srcSubresource.baseArrayLayer = 0;
+    copyRegion.srcSubresource.layerCount = 1;
+    copyRegion.srcOffset.x = 0;
+    copyRegion.srcOffset.y = 0;
+    copyRegion.srcOffset.z = 0;
+    copyRegion.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    copyRegion.dstSubresource.mipLevel = 0;
+    copyRegion.dstSubresource.baseArrayLayer = 0;
+    // Introduce failure by forcing the dst layerCount to differ from src
+    copyRegion.dstSubresource.layerCount = 3;
+    copyRegion.dstOffset.x = 0;
+    copyRegion.dstOffset.y = 0;
+    copyRegion.dstOffset.z = 0;
+    copyRegion.extent.width = 1;
+    copyRegion.extent.height = 1;
+    copyRegion.extent.depth = 1;
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCopy-extent-00140");
+    m_commandBuffer->CopyImage(src_image_obj.image(), VK_IMAGE_LAYOUT_GENERAL, dst_image_obj.image(), VK_IMAGE_LAYOUT_GENERAL, 1,
+                               &copyRegion);
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, CompressedImageMipCopyTests) {
+    TEST_DESCRIPTION("Image/Buffer copies for higher mip levels");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    VkPhysicalDeviceFeatures device_features = {};
+    ASSERT_NO_FATAL_FAILURE(GetPhysicalDeviceFeatures(&device_features));
+    VkFormat compressed_format = VK_FORMAT_UNDEFINED;
+    if (device_features.textureCompressionBC) {
+        compressed_format = VK_FORMAT_BC3_SRGB_BLOCK;
+    } else if (device_features.textureCompressionETC2) {
+        compressed_format = VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK;
+    } else if (device_features.textureCompressionASTC_LDR) {
+        compressed_format = VK_FORMAT_ASTC_4x4_UNORM_BLOCK;
+    } else {
+        printf("%s No compressed formats supported - CompressedImageMipCopyTests skipped.\n", kSkipPrefix);
+        return;
+    }
+
+    VkImageCreateInfo ci;
+    ci.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+    ci.pNext = NULL;
+    ci.flags = 0;
+    ci.imageType = VK_IMAGE_TYPE_2D;
+    ci.format = compressed_format;
+    ci.extent = {32, 32, 1};
+    ci.mipLevels = 6;
+    ci.arrayLayers = 1;
+    ci.samples = VK_SAMPLE_COUNT_1_BIT;
+    ci.tiling = VK_IMAGE_TILING_OPTIMAL;
+    ci.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
+    ci.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
+    ci.queueFamilyIndexCount = 0;
+    ci.pQueueFamilyIndices = NULL;
+    ci.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+
+    VkImageObj image(m_device);
+    image.init(&ci);
+    ASSERT_TRUE(image.initialized());
+
+    VkImageObj odd_image(m_device);
+    ci.extent = {31, 32, 1};  // Mips are [31,32] [15,16] [7,8] [3,4], [1,2] [1,1]
+    odd_image.init(&ci);
+    ASSERT_TRUE(odd_image.initialized());
+
+    // Allocate buffers
+    VkMemoryPropertyFlags reqs = 0;
+    VkBufferObj buffer_1024, buffer_64, buffer_16, buffer_8;
+    buffer_1024.init_as_src_and_dst(*m_device, 1024, reqs);
+    buffer_64.init_as_src_and_dst(*m_device, 64, reqs);
+    buffer_16.init_as_src_and_dst(*m_device, 16, reqs);
+    buffer_8.init_as_src_and_dst(*m_device, 8, reqs);
+
+    VkBufferImageCopy region = {};
+    region.bufferRowLength = 0;
+    region.bufferImageHeight = 0;
+    region.imageSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    region.imageSubresource.layerCount = 1;
+    region.imageOffset = {0, 0, 0};
+    region.bufferOffset = 0;
+
+    // start recording
+    m_commandBuffer->begin();
+
+    // Mip level copies that work - 5 levels
+    m_errorMonitor->ExpectSuccess();
+
+    // Mip 0 should fit in 1k buffer - 1k texels @ 1b each
+    region.imageExtent = {32, 32, 1};
+    region.imageSubresource.mipLevel = 0;
+    vk::CmdCopyImageToBuffer(m_commandBuffer->handle(), image.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_1024.handle(), 1, &region);
+    vk::CmdCopyBufferToImage(m_commandBuffer->handle(), buffer_1024.handle(), image.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &region);
+
+    // Mip 2 should fit in 64b buffer - 64 texels @ 1b each
+    region.imageExtent = {8, 8, 1};
+    region.imageSubresource.mipLevel = 2;
+    vk::CmdCopyImageToBuffer(m_commandBuffer->handle(), image.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_64.handle(), 1, &region);
+    vk::CmdCopyBufferToImage(m_commandBuffer->handle(), buffer_64.handle(), image.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &region);
+
+    // Mip 3 should fit in 16b buffer - 16 texels @ 1b each
+    region.imageExtent = {4, 4, 1};
+    region.imageSubresource.mipLevel = 3;
+    vk::CmdCopyImageToBuffer(m_commandBuffer->handle(), image.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_16.handle(), 1, &region);
+    vk::CmdCopyBufferToImage(m_commandBuffer->handle(), buffer_16.handle(), image.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &region);
+
+    // Mip 4&5 should fit in 16b buffer with no complaint - 4 & 1 texels @ 1b each
+    region.imageExtent = {2, 2, 1};
+    region.imageSubresource.mipLevel = 4;
+    vk::CmdCopyImageToBuffer(m_commandBuffer->handle(), image.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_16.handle(), 1, &region);
+    vk::CmdCopyBufferToImage(m_commandBuffer->handle(), buffer_16.handle(), image.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &region);
+
+    region.imageExtent = {1, 1, 1};
+    region.imageSubresource.mipLevel = 5;
+    vk::CmdCopyImageToBuffer(m_commandBuffer->handle(), image.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_16.handle(), 1, &region);
+    vk::CmdCopyBufferToImage(m_commandBuffer->handle(), buffer_16.handle(), image.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &region);
+    m_errorMonitor->VerifyNotFound();
+
+    // Buffer must accommodate a full compressed block, regardless of texel count
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdCopyImageToBuffer-pRegions-00183");
+    vk::CmdCopyImageToBuffer(m_commandBuffer->handle(), image.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_8.handle(), 1, &region);
+    m_errorMonitor->VerifyFound();
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdCopyBufferToImage-pRegions-00171");
+    vk::CmdCopyBufferToImage(m_commandBuffer->handle(), buffer_8.handle(), image.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &region);
+    m_errorMonitor->VerifyFound();
+
+    // Copy width < compressed block size, but not the full mip width
+    region.imageExtent = {1, 2, 1};
+    region.imageSubresource.mipLevel = 4;
+    m_errorMonitor->SetDesiredFailureMsg(
+        VK_DEBUG_REPORT_ERROR_BIT_EXT,
+        "VUID-VkBufferImageCopy-imageExtent-00207");  // width not a multiple of compressed block width
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "VUID-vkCmdCopyImageToBuffer-imageOffset-01794");  // image transfer granularity
+    vk::CmdCopyImageToBuffer(m_commandBuffer->handle(), image.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_16.handle(), 1, &region);
+    m_errorMonitor->VerifyFound();
+    m_errorMonitor->SetDesiredFailureMsg(
+        VK_DEBUG_REPORT_ERROR_BIT_EXT,
+        "VUID-VkBufferImageCopy-imageExtent-00207");  // width not a multiple of compressed block width
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "VUID-vkCmdCopyBufferToImage-imageOffset-01793");  // image transfer granularity
+    vk::CmdCopyBufferToImage(m_commandBuffer->handle(), buffer_16.handle(), image.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &region);
+    m_errorMonitor->VerifyFound();
+
+    // Copy height < compressed block size but not the full mip height
+    region.imageExtent = {2, 1, 1};
+    m_errorMonitor->SetDesiredFailureMsg(
+        VK_DEBUG_REPORT_ERROR_BIT_EXT,
+        "VUID-VkBufferImageCopy-imageExtent-00208");  // height not a multiple of compressed block width
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "VUID-vkCmdCopyImageToBuffer-imageOffset-01794");  // image transfer granularity
+    vk::CmdCopyImageToBuffer(m_commandBuffer->handle(), image.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_16.handle(), 1, &region);
+    m_errorMonitor->VerifyFound();
+    m_errorMonitor->SetDesiredFailureMsg(
+        VK_DEBUG_REPORT_ERROR_BIT_EXT,
+        "VUID-VkBufferImageCopy-imageExtent-00208");  // height not a multiple of compressed block width
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "VUID-vkCmdCopyBufferToImage-imageOffset-01793");  // image transfer granularity
+    vk::CmdCopyBufferToImage(m_commandBuffer->handle(), buffer_16.handle(), image.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &region);
+    m_errorMonitor->VerifyFound();
+
+    // Offsets must be multiple of compressed block size
+    region.imageOffset = {1, 1, 0};
+    region.imageExtent = {1, 1, 1};
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "VUID-VkBufferImageCopy-imageOffset-00205");  // imageOffset not a multiple of block size
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "VUID-vkCmdCopyImageToBuffer-imageOffset-01794");  // image transfer granularity
+    vk::CmdCopyImageToBuffer(m_commandBuffer->handle(), image.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_16.handle(), 1, &region);
+    m_errorMonitor->VerifyFound();
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "VUID-VkBufferImageCopy-imageOffset-00205");  // imageOffset not a multiple of block size
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "VUID-vkCmdCopyBufferToImage-imageOffset-01793");  // image transfer granularity
+    vk::CmdCopyBufferToImage(m_commandBuffer->handle(), buffer_16.handle(), image.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &region);
+    m_errorMonitor->VerifyFound();
+
+    // Offset + extent width = mip width - should succeed
+    region.imageOffset = {4, 4, 0};
+    region.imageExtent = {3, 4, 1};
+    region.imageSubresource.mipLevel = 2;
+    m_errorMonitor->ExpectSuccess();
+    vk::CmdCopyImageToBuffer(m_commandBuffer->handle(), odd_image.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_16.handle(), 1,
+                             &region);
+    vk::CmdCopyBufferToImage(m_commandBuffer->handle(), buffer_16.handle(), odd_image.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
+                             &region);
+    m_errorMonitor->VerifyNotFound();
+
+    // Offset + extent width < mip width and not a multiple of block width - should fail
+    region.imageExtent = {3, 3, 1};
+    m_errorMonitor->SetDesiredFailureMsg(
+        VK_DEBUG_REPORT_ERROR_BIT_EXT,
+        "VUID-VkBufferImageCopy-imageExtent-00208");  // offset+extent not a multiple of block width
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "VUID-vkCmdCopyImageToBuffer-imageOffset-01794");  // image transfer granularity
+    vk::CmdCopyImageToBuffer(m_commandBuffer->handle(), odd_image.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_16.handle(), 1,
+                             &region);
+    m_errorMonitor->VerifyFound();
+    m_errorMonitor->SetDesiredFailureMsg(
+        VK_DEBUG_REPORT_ERROR_BIT_EXT,
+        "VUID-VkBufferImageCopy-imageExtent-00208");  // offset+extent not a multiple of block width
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "VUID-vkCmdCopyBufferToImage-imageOffset-01793");  // image transfer granularity
+    vk::CmdCopyBufferToImage(m_commandBuffer->handle(), buffer_16.handle(), odd_image.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
+                             &region);
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, ImageBufferCopyTests) {
+    TEST_DESCRIPTION("Image to buffer and buffer to image tests");
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    // Bail if any dimension of transfer granularity is 0.
+    auto index = m_device->graphics_queue_node_index_;
+    auto queue_family_properties = m_device->phy().queue_properties();
+    if ((queue_family_properties[index].minImageTransferGranularity.depth == 0) ||
+        (queue_family_properties[index].minImageTransferGranularity.width == 0) ||
+        (queue_family_properties[index].minImageTransferGranularity.height == 0)) {
+        printf("%s Subresource copies are disallowed when xfer granularity (x|y|z) is 0. Skipped.\n", kSkipPrefix);
+        return;
+    }
+
+    VkImageObj image_64k(m_device);        // 128^2 texels, 64k
+    VkImageObj image_16k(m_device);        // 64^2 texels, 16k
+    VkImageObj image_16k_depth(m_device);  // 64^2 texels, depth, 16k
+    VkImageObj ds_image_4D_1S(m_device);   // 256^2 texels, 512kb (256k depth, 64k stencil, 192k pack)
+    VkImageObj ds_image_3D_1S(m_device);   // 256^2 texels, 256kb (192k depth, 64k stencil)
+    VkImageObj ds_image_2D(m_device);      // 256^2 texels, 128k (128k depth)
+    VkImageObj ds_image_1S(m_device);      // 256^2 texels, 64k (64k stencil)
+
+    image_64k.Init(128, 128, 1, VK_FORMAT_R8G8B8A8_UINT,
+                   VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT,
+                   VK_IMAGE_TILING_OPTIMAL, 0);
+    image_16k.Init(64, 64, 1, VK_FORMAT_R8G8B8A8_UINT,
+                   VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT,
+                   VK_IMAGE_TILING_OPTIMAL, 0);
+    ASSERT_TRUE(image_64k.initialized());
+    ASSERT_TRUE(image_16k.initialized());
+
+    // Verify all needed Depth/Stencil formats are supported
+    bool missing_ds_support = false;
+    VkFormatProperties props = {0, 0, 0};
+    vk::GetPhysicalDeviceFormatProperties(m_device->phy().handle(), VK_FORMAT_D32_SFLOAT_S8_UINT, &props);
+    missing_ds_support |= (props.bufferFeatures == 0 && props.linearTilingFeatures == 0 && props.optimalTilingFeatures == 0);
+    missing_ds_support |= (props.optimalTilingFeatures & VK_FORMAT_FEATURE_TRANSFER_SRC_BIT) == 0;
+    missing_ds_support |= (props.optimalTilingFeatures & VK_FORMAT_FEATURE_TRANSFER_DST_BIT) == 0;
+    vk::GetPhysicalDeviceFormatProperties(m_device->phy().handle(), VK_FORMAT_D24_UNORM_S8_UINT, &props);
+    missing_ds_support |= (props.bufferFeatures == 0 && props.linearTilingFeatures == 0 && props.optimalTilingFeatures == 0);
+    missing_ds_support |= (props.optimalTilingFeatures & VK_FORMAT_FEATURE_TRANSFER_SRC_BIT) == 0;
+    missing_ds_support |= (props.optimalTilingFeatures & VK_FORMAT_FEATURE_TRANSFER_DST_BIT) == 0;
+    vk::GetPhysicalDeviceFormatProperties(m_device->phy().handle(), VK_FORMAT_D16_UNORM, &props);
+    missing_ds_support |= (props.bufferFeatures == 0 && props.linearTilingFeatures == 0 && props.optimalTilingFeatures == 0);
+    missing_ds_support |= (props.optimalTilingFeatures & VK_FORMAT_FEATURE_TRANSFER_SRC_BIT) == 0;
+    missing_ds_support |= (props.optimalTilingFeatures & VK_FORMAT_FEATURE_TRANSFER_DST_BIT) == 0;
+    vk::GetPhysicalDeviceFormatProperties(m_device->phy().handle(), VK_FORMAT_S8_UINT, &props);
+    missing_ds_support |= (props.bufferFeatures == 0 && props.linearTilingFeatures == 0 && props.optimalTilingFeatures == 0);
+    missing_ds_support |= (props.optimalTilingFeatures & VK_FORMAT_FEATURE_TRANSFER_SRC_BIT) == 0;
+    missing_ds_support |= (props.optimalTilingFeatures & VK_FORMAT_FEATURE_TRANSFER_DST_BIT) == 0;
+
+    if (!missing_ds_support) {
+        image_16k_depth.Init(64, 64, 1, VK_FORMAT_D24_UNORM_S8_UINT,
+                             VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
+        ASSERT_TRUE(image_16k_depth.initialized());
+
+        ds_image_4D_1S.Init(
+            256, 256, 1, VK_FORMAT_D32_SFLOAT_S8_UINT,
+            VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT,
+            VK_IMAGE_TILING_OPTIMAL, 0);
+        ASSERT_TRUE(ds_image_4D_1S.initialized());
+
+        ds_image_3D_1S.Init(
+            256, 256, 1, VK_FORMAT_D24_UNORM_S8_UINT,
+            VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT,
+            VK_IMAGE_TILING_OPTIMAL, 0);
+        ASSERT_TRUE(ds_image_3D_1S.initialized());
+
+        ds_image_2D.Init(
+            256, 256, 1, VK_FORMAT_D16_UNORM,
+            VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT,
+            VK_IMAGE_TILING_OPTIMAL, 0);
+        ASSERT_TRUE(ds_image_2D.initialized());
+
+        ds_image_1S.Init(
+            256, 256, 1, VK_FORMAT_S8_UINT,
+            VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT,
+            VK_IMAGE_TILING_OPTIMAL, 0);
+        ASSERT_TRUE(ds_image_1S.initialized());
+    }
+
+    // Allocate buffers
+    VkBufferObj buffer_256k, buffer_128k, buffer_64k, buffer_16k;
+    VkMemoryPropertyFlags reqs = 0;
+    buffer_256k.init_as_src_and_dst(*m_device, 262144, reqs);  // 256k
+    buffer_128k.init_as_src_and_dst(*m_device, 131072, reqs);  // 128k
+    buffer_64k.init_as_src_and_dst(*m_device, 65536, reqs);    // 64k
+    buffer_16k.init_as_src_and_dst(*m_device, 16384, reqs);    // 16k
+
+    VkBufferImageCopy region = {};
+    region.bufferRowLength = 0;
+    region.bufferImageHeight = 0;
+    region.imageSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    region.imageSubresource.layerCount = 1;
+    region.imageOffset = {0, 0, 0};
+    region.imageExtent = {64, 64, 1};
+    region.bufferOffset = 0;
+
+    // attempt copies before putting command buffer in recording state
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdCopyBufferToImage-commandBuffer-recording");
+    vk::CmdCopyBufferToImage(m_commandBuffer->handle(), buffer_64k.handle(), image_64k.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
+                             &region);
+    m_errorMonitor->VerifyFound();
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdCopyImageToBuffer-commandBuffer-recording");
+    vk::CmdCopyImageToBuffer(m_commandBuffer->handle(), image_64k.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_64k.handle(), 1,
+                             &region);
+    m_errorMonitor->VerifyFound();
+
+    // start recording
+    m_commandBuffer->begin();
+
+    // successful copies
+    m_errorMonitor->ExpectSuccess();
+    vk::CmdCopyImageToBuffer(m_commandBuffer->handle(), image_16k.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_16k.handle(), 1,
+                             &region);
+    vk::CmdCopyBufferToImage(m_commandBuffer->handle(), buffer_16k.handle(), image_16k.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
+                             &region);
+    region.imageOffset.x = 16;  // 16k copy, offset requires larger image
+    vk::CmdCopyImageToBuffer(m_commandBuffer->handle(), image_64k.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_16k.handle(), 1,
+                             &region);
+    region.imageExtent.height = 78;  // > 16k copy requires larger buffer & image
+    vk::CmdCopyBufferToImage(m_commandBuffer->handle(), buffer_64k.handle(), image_64k.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
+                             &region);
+    region.imageOffset.x = 0;
+    region.imageExtent.height = 64;
+    region.bufferOffset = 256;  // 16k copy with buffer offset, requires larger buffer
+    vk::CmdCopyImageToBuffer(m_commandBuffer->handle(), image_16k.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_64k.handle(), 1,
+                             &region);
+    m_errorMonitor->VerifyNotFound();
+
+    // image/buffer too small (extent too large) on copy to image
+    region.imageExtent = {65, 64, 1};
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "VUID-vkCmdCopyBufferToImage-pRegions-00171");  // buffer too small
+    vk::CmdCopyBufferToImage(m_commandBuffer->handle(), buffer_16k.handle(), image_64k.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
+                             &region);
+    m_errorMonitor->VerifyFound();
+
+    m_errorMonitor->SetUnexpectedError("VUID-VkBufferImageCopy-imageOffset-00197");
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "VUID-vkCmdCopyBufferToImage-pRegions-00172");  // image too small
+    vk::CmdCopyBufferToImage(m_commandBuffer->handle(), buffer_64k.handle(), image_16k.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
+                             &region);
+    m_errorMonitor->VerifyFound();
+
+    // image/buffer too small (offset) on copy to image
+    region.imageExtent = {64, 64, 1};
+    region.imageOffset = {0, 4, 0};
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "VUID-vkCmdCopyBufferToImage-pRegions-00171");  // buffer too small
+    vk::CmdCopyBufferToImage(m_commandBuffer->handle(), buffer_16k.handle(), image_64k.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
+                             &region);
+    m_errorMonitor->VerifyFound();
+
+    m_errorMonitor->SetUnexpectedError("VUID-VkBufferImageCopy-imageOffset-00197");
+    m_errorMonitor->SetUnexpectedError("VUID-VkBufferImageCopy-imageOffset-00198");
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "VUID-vkCmdCopyBufferToImage-pRegions-00172");  // image too small
+    vk::CmdCopyBufferToImage(m_commandBuffer->handle(), buffer_64k.handle(), image_16k.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
+                             &region);
+    m_errorMonitor->VerifyFound();
+
+    // image/buffer too small on copy to buffer
+    region.imageExtent = {64, 64, 1};
+    region.imageOffset = {0, 0, 0};
+    region.bufferOffset = 4;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "VUID-vkCmdCopyImageToBuffer-pRegions-00183");  // buffer too small
+    vk::CmdCopyImageToBuffer(m_commandBuffer->handle(), image_64k.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_16k.handle(), 1,
+                             &region);
+    m_errorMonitor->VerifyFound();
+
+    region.imageExtent = {64, 65, 1};
+    region.bufferOffset = 0;
+    m_errorMonitor->SetUnexpectedError("VUID-VkBufferImageCopy-imageOffset-00198");
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "VUID-vkCmdCopyImageToBuffer-pRegions-00182");  // image too small
+    vk::CmdCopyImageToBuffer(m_commandBuffer->handle(), image_16k.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_64k.handle(), 1,
+                             &region);
+    m_errorMonitor->VerifyFound();
+
+    // buffer size OK but rowlength causes loose packing
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdCopyImageToBuffer-pRegions-00183");
+    region.imageExtent = {64, 64, 1};
+    region.bufferRowLength = 68;
+    vk::CmdCopyImageToBuffer(m_commandBuffer->handle(), image_16k.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_16k.handle(), 1,
+                             &region);
+    m_errorMonitor->VerifyFound();
+
+    // An extent with zero area should produce a warning, but no error
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_WARNING_BIT_EXT | VK_DEBUG_REPORT_ERROR_BIT_EXT, "} has zero area");
+    region.imageExtent.width = 0;
+    vk::CmdCopyImageToBuffer(m_commandBuffer->handle(), image_16k.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_16k.handle(), 1,
+                             &region);
+    m_errorMonitor->VerifyFound();
+
+    // aspect bits
+    region.imageExtent = {64, 64, 1};
+    region.bufferRowLength = 0;
+    region.bufferImageHeight = 0;
+    if (!missing_ds_support) {
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                             "VUID-VkBufferImageCopy-aspectMask-00212");  // more than 1 aspect bit set
+        region.imageSubresource.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT;
+        vk::CmdCopyImageToBuffer(m_commandBuffer->handle(), image_16k_depth.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_16k.handle(),
+                                 1, &region);
+        m_errorMonitor->VerifyFound();
+
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                             "VUID-VkBufferImageCopy-aspectMask-00211");  // different mis-matched aspect
+        region.imageSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+        vk::CmdCopyImageToBuffer(m_commandBuffer->handle(), image_16k_depth.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_16k.handle(),
+                                 1, &region);
+        m_errorMonitor->VerifyFound();
+    }
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "VUID-VkBufferImageCopy-aspectMask-00211");  // mis-matched aspect
+    region.imageSubresource.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
+    vk::CmdCopyImageToBuffer(m_commandBuffer->handle(), image_16k.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_16k.handle(), 1,
+                             &region);
+    m_errorMonitor->VerifyFound();
+    region.imageSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+
+    // Out-of-range mip levels should fail
+    region.imageSubresource.mipLevel = image_16k.create_info().mipLevels + 1;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdCopyImageToBuffer-imageSubresource-01703");
+    m_errorMonitor->SetUnexpectedError("VUID-VkBufferImageCopy-imageOffset-00197");
+    m_errorMonitor->SetUnexpectedError("VUID-VkBufferImageCopy-imageOffset-00198");
+    m_errorMonitor->SetUnexpectedError("VUID-VkBufferImageCopy-imageOffset-00200");
+    m_errorMonitor->SetDesiredFailureMsg(
+        VK_DEBUG_REPORT_ERROR_BIT_EXT,
+        "VUID-vkCmdCopyImageToBuffer-pRegions-00182");  // unavoidable "region exceeds image bounds" for non-existent mip
+    vk::CmdCopyImageToBuffer(m_commandBuffer->handle(), image_16k.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_16k.handle(), 1,
+                             &region);
+    m_errorMonitor->VerifyFound();
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdCopyBufferToImage-imageSubresource-01701");
+    m_errorMonitor->SetUnexpectedError("VUID-VkBufferImageCopy-imageOffset-00197");
+    m_errorMonitor->SetUnexpectedError("VUID-VkBufferImageCopy-imageOffset-00198");
+    m_errorMonitor->SetUnexpectedError("VUID-VkBufferImageCopy-imageOffset-00200");
+    m_errorMonitor->SetDesiredFailureMsg(
+        VK_DEBUG_REPORT_ERROR_BIT_EXT,
+        "VUID-vkCmdCopyBufferToImage-pRegions-00172");  // unavoidable "region exceeds image bounds" for non-existent mip
+    vk::CmdCopyBufferToImage(m_commandBuffer->handle(), buffer_16k.handle(), image_16k.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
+                             &region);
+    m_errorMonitor->VerifyFound();
+    region.imageSubresource.mipLevel = 0;
+
+    // Out-of-range array layers should fail
+    region.imageSubresource.baseArrayLayer = image_16k.create_info().arrayLayers;
+    region.imageSubresource.layerCount = 1;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdCopyImageToBuffer-imageSubresource-01704");
+    vk::CmdCopyImageToBuffer(m_commandBuffer->handle(), image_16k.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_16k.handle(), 1,
+                             &region);
+    m_errorMonitor->VerifyFound();
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdCopyBufferToImage-imageSubresource-01702");
+    vk::CmdCopyBufferToImage(m_commandBuffer->handle(), buffer_16k.handle(), image_16k.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
+                             &region);
+    m_errorMonitor->VerifyFound();
+    region.imageSubresource.baseArrayLayer = 0;
+
+    // Layout mismatch should fail
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdCopyImageToBuffer-srcImageLayout-00189");
+    vk::CmdCopyImageToBuffer(m_commandBuffer->handle(), image_16k.handle(), VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
+                             buffer_16k.handle(), 1, &region);
+    m_errorMonitor->VerifyFound();
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdCopyBufferToImage-dstImageLayout-00180");
+    vk::CmdCopyBufferToImage(m_commandBuffer->handle(), buffer_16k.handle(), image_16k.handle(),
+                             VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, &region);
+    m_errorMonitor->VerifyFound();
+
+    // Test Depth/Stencil copies
+    if (missing_ds_support) {
+        printf("%s Depth / Stencil formats unsupported - skipping D/S tests.\n", kSkipPrefix);
+    } else {
+        VkBufferImageCopy ds_region = {};
+        ds_region.bufferOffset = 0;
+        ds_region.bufferRowLength = 0;
+        ds_region.bufferImageHeight = 0;
+        ds_region.imageSubresource.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
+        ds_region.imageSubresource.mipLevel = 0;
+        ds_region.imageSubresource.baseArrayLayer = 0;
+        ds_region.imageSubresource.layerCount = 1;
+        ds_region.imageOffset = {0, 0, 0};
+        ds_region.imageExtent = {256, 256, 1};
+
+        // Depth copies that should succeed
+        m_errorMonitor->ExpectSuccess();  // Extract 4b depth per texel, pack into 256k buffer
+        vk::CmdCopyImageToBuffer(m_commandBuffer->handle(), ds_image_4D_1S.handle(), VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
+                                 buffer_256k.handle(), 1, &ds_region);
+        m_errorMonitor->VerifyNotFound();
+
+        m_errorMonitor->ExpectSuccess();  // Extract 3b depth per texel, pack (loose) into 256k buffer
+        vk::CmdCopyImageToBuffer(m_commandBuffer->handle(), ds_image_3D_1S.handle(), VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
+                                 buffer_256k.handle(), 1, &ds_region);
+        m_errorMonitor->VerifyNotFound();
+
+        m_errorMonitor->ExpectSuccess();  // Copy 2b depth per texel, into 128k buffer
+        vk::CmdCopyImageToBuffer(m_commandBuffer->handle(), ds_image_2D.handle(), VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
+                                 buffer_128k.handle(), 1, &ds_region);
+        m_errorMonitor->VerifyNotFound();
+
+        // Depth copies that should fail
+        ds_region.bufferOffset = 4;
+        m_errorMonitor->SetDesiredFailureMsg(
+            VK_DEBUG_REPORT_ERROR_BIT_EXT,
+            "VUID-vkCmdCopyImageToBuffer-pRegions-00183");  // Extract 4b depth per texel, pack into 256k buffer
+        vk::CmdCopyImageToBuffer(m_commandBuffer->handle(), ds_image_4D_1S.handle(), VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
+                                 buffer_256k.handle(), 1, &ds_region);
+        m_errorMonitor->VerifyFound();
+
+        m_errorMonitor->SetDesiredFailureMsg(
+            VK_DEBUG_REPORT_ERROR_BIT_EXT,
+            "VUID-vkCmdCopyImageToBuffer-pRegions-00183");  // Extract 3b depth per texel, pack (loose) into 256k buffer
+        vk::CmdCopyImageToBuffer(m_commandBuffer->handle(), ds_image_3D_1S.handle(), VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
+                                 buffer_256k.handle(), 1, &ds_region);
+        m_errorMonitor->VerifyFound();
+
+        m_errorMonitor->SetDesiredFailureMsg(
+            VK_DEBUG_REPORT_ERROR_BIT_EXT,
+            "VUID-vkCmdCopyImageToBuffer-pRegions-00183");  // Copy 2b depth per texel, into 128k buffer
+        vk::CmdCopyImageToBuffer(m_commandBuffer->handle(), ds_image_2D.handle(), VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
+                                 buffer_128k.handle(), 1, &ds_region);
+        m_errorMonitor->VerifyFound();
+
+        // Stencil copies that should succeed
+        ds_region.bufferOffset = 0;
+        ds_region.imageSubresource.aspectMask = VK_IMAGE_ASPECT_STENCIL_BIT;
+        m_errorMonitor->ExpectSuccess();  // Extract 1b stencil per texel, pack into 64k buffer
+        vk::CmdCopyImageToBuffer(m_commandBuffer->handle(), ds_image_4D_1S.handle(), VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
+                                 buffer_64k.handle(), 1, &ds_region);
+        m_errorMonitor->VerifyNotFound();
+
+        m_errorMonitor->ExpectSuccess();  // Extract 1b stencil per texel, pack into 64k buffer
+        vk::CmdCopyImageToBuffer(m_commandBuffer->handle(), ds_image_3D_1S.handle(), VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
+                                 buffer_64k.handle(), 1, &ds_region);
+        m_errorMonitor->VerifyNotFound();
+
+        m_errorMonitor->ExpectSuccess();  // Copy 1b depth per texel, into 64k buffer
+        vk::CmdCopyImageToBuffer(m_commandBuffer->handle(), ds_image_1S.handle(), VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
+                                 buffer_64k.handle(), 1, &ds_region);
+        m_errorMonitor->VerifyNotFound();
+
+        // Stencil copies that should fail
+        m_errorMonitor->SetDesiredFailureMsg(
+            VK_DEBUG_REPORT_ERROR_BIT_EXT,
+            "VUID-vkCmdCopyImageToBuffer-pRegions-00183");  // Extract 1b stencil per texel, pack into 64k buffer
+        vk::CmdCopyImageToBuffer(m_commandBuffer->handle(), ds_image_4D_1S.handle(), VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
+                                 buffer_16k.handle(), 1, &ds_region);
+        m_errorMonitor->VerifyFound();
+
+        m_errorMonitor->SetDesiredFailureMsg(
+            VK_DEBUG_REPORT_ERROR_BIT_EXT,
+            "VUID-vkCmdCopyImageToBuffer-pRegions-00183");  // Extract 1b stencil per texel, pack into 64k buffer
+        ds_region.bufferRowLength = 260;
+        vk::CmdCopyImageToBuffer(m_commandBuffer->handle(), ds_image_3D_1S.handle(), VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
+                                 buffer_64k.handle(), 1, &ds_region);
+        m_errorMonitor->VerifyFound();
+
+        ds_region.bufferRowLength = 0;
+        ds_region.bufferOffset = 4;
+        m_errorMonitor->SetDesiredFailureMsg(
+            VK_DEBUG_REPORT_ERROR_BIT_EXT,
+            "VUID-vkCmdCopyImageToBuffer-pRegions-00183");  // Copy 1b depth per texel, into 64k buffer
+        vk::CmdCopyImageToBuffer(m_commandBuffer->handle(), ds_image_1S.handle(), VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
+                                 buffer_64k.handle(), 1, &ds_region);
+        m_errorMonitor->VerifyFound();
+    }
+
+    // Test compressed formats, if supported
+    VkPhysicalDeviceFeatures device_features = {};
+    ASSERT_NO_FATAL_FAILURE(GetPhysicalDeviceFeatures(&device_features));
+    if (!(device_features.textureCompressionBC || device_features.textureCompressionETC2 ||
+          device_features.textureCompressionASTC_LDR)) {
+        printf("%s No compressed formats supported - block compression tests skipped.\n", kSkipPrefix);
+    } else {
+        VkImageObj image_16k_4x4comp(m_device);   // 128^2 texels as 32^2 compressed (4x4) blocks, 16k
+        VkImageObj image_NPOT_4x4comp(m_device);  // 130^2 texels as 33^2 compressed (4x4) blocks
+        if (device_features.textureCompressionBC) {
+            image_16k_4x4comp.Init(128, 128, 1, VK_FORMAT_BC3_SRGB_BLOCK, VK_IMAGE_USAGE_TRANSFER_SRC_BIT, VK_IMAGE_TILING_OPTIMAL,
+                                   0);
+            image_NPOT_4x4comp.Init(130, 130, 1, VK_FORMAT_BC3_SRGB_BLOCK, VK_IMAGE_USAGE_TRANSFER_SRC_BIT, VK_IMAGE_TILING_OPTIMAL,
+                                    0);
+        } else if (device_features.textureCompressionETC2) {
+            image_16k_4x4comp.Init(128, 128, 1, VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK, VK_IMAGE_USAGE_TRANSFER_SRC_BIT,
+                                   VK_IMAGE_TILING_OPTIMAL, 0);
+            image_NPOT_4x4comp.Init(130, 130, 1, VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK, VK_IMAGE_USAGE_TRANSFER_SRC_BIT,
+                                    VK_IMAGE_TILING_OPTIMAL, 0);
+        } else {
+            image_16k_4x4comp.Init(128, 128, 1, VK_FORMAT_ASTC_4x4_UNORM_BLOCK, VK_IMAGE_USAGE_TRANSFER_SRC_BIT,
+                                   VK_IMAGE_TILING_OPTIMAL, 0);
+            image_NPOT_4x4comp.Init(130, 130, 1, VK_FORMAT_ASTC_4x4_UNORM_BLOCK, VK_IMAGE_USAGE_TRANSFER_SRC_BIT,
+                                    VK_IMAGE_TILING_OPTIMAL, 0);
+        }
+        ASSERT_TRUE(image_16k_4x4comp.initialized());
+
+        // Just fits
+        m_errorMonitor->ExpectSuccess();
+        region.imageExtent = {128, 128, 1};
+        vk::CmdCopyImageToBuffer(m_commandBuffer->handle(), image_16k_4x4comp.handle(), VK_IMAGE_LAYOUT_GENERAL,
+                                 buffer_16k.handle(), 1, &region);
+        m_errorMonitor->VerifyNotFound();
+
+        // with offset, too big for buffer
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdCopyImageToBuffer-pRegions-00183");
+        region.bufferOffset = 16;
+        vk::CmdCopyImageToBuffer(m_commandBuffer->handle(), image_16k_4x4comp.handle(), VK_IMAGE_LAYOUT_GENERAL,
+                                 buffer_16k.handle(), 1, &region);
+        m_errorMonitor->VerifyFound();
+        region.bufferOffset = 0;
+
+        // extents that are not a multiple of compressed block size
+        m_errorMonitor->SetDesiredFailureMsg(
+            VK_DEBUG_REPORT_ERROR_BIT_EXT,
+            "VUID-VkBufferImageCopy-imageExtent-00207");  // extent width not a multiple of block size
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                             "VUID-vkCmdCopyImageToBuffer-imageOffset-01794");  // image transfer granularity
+        region.imageExtent.width = 66;
+        vk::CmdCopyImageToBuffer(m_commandBuffer->handle(), image_NPOT_4x4comp.handle(), VK_IMAGE_LAYOUT_GENERAL,
+                                 buffer_16k.handle(), 1, &region);
+        m_errorMonitor->VerifyFound();
+        region.imageExtent.width = 128;
+
+        m_errorMonitor->SetDesiredFailureMsg(
+            VK_DEBUG_REPORT_ERROR_BIT_EXT,
+            "VUID-VkBufferImageCopy-imageExtent-00208");  // extent height not a multiple of block size
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                             "VUID-vkCmdCopyImageToBuffer-imageOffset-01794");  // image transfer granularity
+        region.imageExtent.height = 2;
+        vk::CmdCopyImageToBuffer(m_commandBuffer->handle(), image_NPOT_4x4comp.handle(), VK_IMAGE_LAYOUT_GENERAL,
+                                 buffer_16k.handle(), 1, &region);
+        m_errorMonitor->VerifyFound();
+        region.imageExtent.height = 128;
+
+        // TODO: All available compressed formats are 2D, with block depth of 1. Unable to provoke VU_01277.
+
+        // non-multiple extents are allowed if at the far edge of a non-block-multiple image - these should pass
+        m_errorMonitor->ExpectSuccess();
+        region.imageExtent.width = 66;
+        region.imageOffset.x = 64;
+        vk::CmdCopyImageToBuffer(m_commandBuffer->handle(), image_NPOT_4x4comp.handle(), VK_IMAGE_LAYOUT_GENERAL,
+                                 buffer_16k.handle(), 1, &region);
+        region.imageExtent.width = 16;
+        region.imageOffset.x = 0;
+        region.imageExtent.height = 2;
+        region.imageOffset.y = 128;
+        vk::CmdCopyImageToBuffer(m_commandBuffer->handle(), image_NPOT_4x4comp.handle(), VK_IMAGE_LAYOUT_GENERAL,
+                                 buffer_16k.handle(), 1, &region);
+        m_errorMonitor->VerifyNotFound();
+        region.imageOffset = {0, 0, 0};
+
+        // buffer offset must be a multiple of texel block size (16)
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkBufferImageCopy-bufferOffset-00206");
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkBufferImageCopy-bufferOffset-00193");
+        region.imageExtent = {64, 64, 1};
+        region.bufferOffset = 24;
+        vk::CmdCopyImageToBuffer(m_commandBuffer->handle(), image_16k_4x4comp.handle(), VK_IMAGE_LAYOUT_GENERAL,
+                                 buffer_16k.handle(), 1, &region);
+        m_errorMonitor->VerifyFound();
+
+        // rowlength not a multiple of block width (4)
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkBufferImageCopy-bufferRowLength-00203");
+        region.bufferOffset = 0;
+        region.bufferRowLength = 130;
+        region.bufferImageHeight = 0;
+        vk::CmdCopyImageToBuffer(m_commandBuffer->handle(), image_16k_4x4comp.handle(), VK_IMAGE_LAYOUT_GENERAL,
+                                 buffer_64k.handle(), 1, &region);
+        m_errorMonitor->VerifyFound();
+
+        // imageheight not a multiple of block height (4)
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkBufferImageCopy-bufferImageHeight-00204");
+        region.bufferRowLength = 0;
+        region.bufferImageHeight = 130;
+        vk::CmdCopyImageToBuffer(m_commandBuffer->handle(), image_16k_4x4comp.handle(), VK_IMAGE_LAYOUT_GENERAL,
+                                 buffer_64k.handle(), 1, &region);
+        m_errorMonitor->VerifyFound();
+    }
+}
+
+TEST_F(VkLayerTest, MiscImageLayerTests) {
+    TEST_DESCRIPTION("Image-related tests that don't belong elsewhere");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    // TODO: Ideally we should check if a format is supported, before using it.
+    VkImageObj image(m_device);
+    image.Init(128, 128, 1, VK_FORMAT_R16G16B16A16_UINT, VK_IMAGE_USAGE_TRANSFER_DST_BIT, VK_IMAGE_TILING_OPTIMAL, 0);  // 64bpp
+    ASSERT_TRUE(image.initialized());
+    VkBufferObj buffer;
+    VkMemoryPropertyFlags reqs = 0;
+    buffer.init_as_src(*m_device, 128 * 128 * 8, reqs);
+    VkBufferImageCopy region = {};
+    region.bufferRowLength = 128;
+    region.bufferImageHeight = 128;
+    region.imageSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    // layerCount can't be 0 - Expect MISMATCHED_IMAGE_ASPECT
+    region.imageSubresource.layerCount = 1;
+    region.imageExtent.height = 4;
+    region.imageExtent.width = 4;
+    region.imageExtent.depth = 1;
+
+    VkImageObj image2(m_device);
+    image2.Init(128, 128, 1, VK_FORMAT_R8G8_UNORM, VK_IMAGE_USAGE_TRANSFER_DST_BIT, VK_IMAGE_TILING_OPTIMAL, 0);  // 16bpp
+    ASSERT_TRUE(image2.initialized());
+    VkBufferObj buffer2;
+    VkMemoryPropertyFlags reqs2 = 0;
+    buffer2.init_as_src(*m_device, 128 * 128 * 2, reqs2);
+    VkBufferImageCopy region2 = {};
+    region2.bufferRowLength = 128;
+    region2.bufferImageHeight = 128;
+    region2.imageSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    // layerCount can't be 0 - Expect MISMATCHED_IMAGE_ASPECT
+    region2.imageSubresource.layerCount = 1;
+    region2.imageExtent.height = 4;
+    region2.imageExtent.width = 4;
+    region2.imageExtent.depth = 1;
+    m_commandBuffer->begin();
+
+    // Image must have offset.z of 0 and extent.depth of 1
+    // Introduce failure by setting imageExtent.depth to 0
+    region.imageExtent.depth = 0;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkBufferImageCopy-srcImage-00201");
+    vk::CmdCopyBufferToImage(m_commandBuffer->handle(), buffer.handle(), image.handle(), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1,
+                             &region);
+    m_errorMonitor->VerifyFound();
+
+    region.imageExtent.depth = 1;
+
+    // Image must have offset.z of 0 and extent.depth of 1
+    // Introduce failure by setting imageOffset.z to 4
+    // Note: Also (unavoidably) triggers 'region exceeds image' #1228
+    region.imageOffset.z = 4;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkBufferImageCopy-srcImage-00201");
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkBufferImageCopy-imageOffset-00200");
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdCopyBufferToImage-pRegions-00172");
+    vk::CmdCopyBufferToImage(m_commandBuffer->handle(), buffer.handle(), image.handle(), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1,
+                             &region);
+    m_errorMonitor->VerifyFound();
+
+    region.imageOffset.z = 0;
+    // BufferOffset must be a multiple of the calling command's VkImage parameter's texel size
+    // Introduce failure by setting bufferOffset to 1 and 1/2 texels
+    region.bufferOffset = 4;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkBufferImageCopy-bufferOffset-00193");
+    vk::CmdCopyBufferToImage(m_commandBuffer->handle(), buffer.handle(), image.handle(), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1,
+                             &region);
+    m_errorMonitor->VerifyFound();
+
+    // BufferOffset must be a multiple of 4
+    // Introduce failure by setting bufferOffset to a value not divisible by 4
+    region2.bufferOffset = 6;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkBufferImageCopy-bufferOffset-00194");
+    vk::CmdCopyBufferToImage(m_commandBuffer->handle(), buffer2.handle(), image2.handle(), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1,
+                             &region2);
+    m_errorMonitor->VerifyFound();
+
+    // BufferRowLength must be 0, or greater than or equal to the width member of imageExtent
+    region.bufferOffset = 0;
+    region.imageExtent.height = 128;
+    region.imageExtent.width = 128;
+    // Introduce failure by setting bufferRowLength > 0 but less than width
+    region.bufferRowLength = 64;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkBufferImageCopy-bufferRowLength-00195");
+    vk::CmdCopyBufferToImage(m_commandBuffer->handle(), buffer.handle(), image.handle(), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1,
+                             &region);
+    m_errorMonitor->VerifyFound();
+
+    // BufferImageHeight must be 0, or greater than or equal to the height member of imageExtent
+    region.bufferRowLength = 128;
+    // Introduce failure by setting bufferRowHeight > 0 but less than height
+    region.bufferImageHeight = 64;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkBufferImageCopy-bufferImageHeight-00196");
+    vk::CmdCopyBufferToImage(m_commandBuffer->handle(), buffer.handle(), image.handle(), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1,
+                             &region);
+    m_errorMonitor->VerifyFound();
+
+    region.bufferImageHeight = 128;
+    VkImageObj intImage1(m_device);
+    intImage1.Init(128, 128, 1, VK_FORMAT_R8_UNORM, VK_IMAGE_USAGE_TRANSFER_SRC_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
+    intImage1.SetLayout(VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
+    VkImageObj intImage2(m_device);
+    intImage2.Init(128, 128, 1, VK_FORMAT_R8_UNORM, VK_IMAGE_USAGE_TRANSFER_DST_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
+    intImage2.SetLayout(VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
+    VkImageBlit blitRegion = {};
+    blitRegion.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    blitRegion.srcSubresource.baseArrayLayer = 0;
+    blitRegion.srcSubresource.layerCount = 1;
+    blitRegion.srcSubresource.mipLevel = 0;
+    blitRegion.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    blitRegion.dstSubresource.baseArrayLayer = 0;
+    blitRegion.dstSubresource.layerCount = 1;
+    blitRegion.dstSubresource.mipLevel = 0;
+    blitRegion.srcOffsets[0] = {128, 0, 0};
+    blitRegion.srcOffsets[1] = {128, 128, 1};
+    blitRegion.dstOffsets[0] = {0, 128, 0};
+    blitRegion.dstOffsets[1] = {128, 128, 1};
+
+    // Look for NULL-blit warning
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_WARNING_BIT_EXT,
+                                         "vkCmdBlitImage(): pRegions[0].srcOffsets specify a zero-volume area.");
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_WARNING_BIT_EXT,
+                                         "vkCmdBlitImage(): pRegions[0].dstOffsets specify a zero-volume area.");
+    vk::CmdBlitImage(m_commandBuffer->handle(), intImage1.handle(), intImage1.Layout(), intImage2.handle(), intImage2.Layout(), 1,
+                     &blitRegion, VK_FILTER_LINEAR);
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, CopyImageTypeExtentMismatch) {
+    // Image copy tests where format type and extents don't match
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    VkImageCreateInfo ci;
+    ci.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+    ci.pNext = NULL;
+    ci.flags = 0;
+    ci.imageType = VK_IMAGE_TYPE_1D;
+    ci.format = VK_FORMAT_R8G8B8A8_UNORM;
+    ci.extent = {32, 1, 1};
+    ci.mipLevels = 1;
+    ci.arrayLayers = 1;
+    ci.samples = VK_SAMPLE_COUNT_1_BIT;
+    ci.tiling = VK_IMAGE_TILING_OPTIMAL;
+    ci.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
+    ci.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
+    ci.queueFamilyIndexCount = 0;
+    ci.pQueueFamilyIndices = NULL;
+    ci.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+
+    // Create 1D image
+    VkImageObj image_1D(m_device);
+    image_1D.init(&ci);
+    ASSERT_TRUE(image_1D.initialized());
+
+    // 2D image
+    ci.imageType = VK_IMAGE_TYPE_2D;
+    ci.extent = {32, 32, 1};
+    VkImageObj image_2D(m_device);
+    image_2D.init(&ci);
+    ASSERT_TRUE(image_2D.initialized());
+
+    // 3D image
+    ci.imageType = VK_IMAGE_TYPE_3D;
+    ci.extent = {32, 32, 8};
+    VkImageObj image_3D(m_device);
+    image_3D.init(&ci);
+    ASSERT_TRUE(image_3D.initialized());
+
+    // 2D image array
+    ci.imageType = VK_IMAGE_TYPE_2D;
+    ci.extent = {32, 32, 1};
+    ci.arrayLayers = 8;
+    VkImageObj image_2D_array(m_device);
+    image_2D_array.init(&ci);
+    ASSERT_TRUE(image_2D_array.initialized());
+
+    m_commandBuffer->begin();
+
+    VkImageCopy copy_region;
+    copy_region.extent = {32, 1, 1};
+    copy_region.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    copy_region.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    copy_region.srcSubresource.mipLevel = 0;
+    copy_region.dstSubresource.mipLevel = 0;
+    copy_region.srcSubresource.baseArrayLayer = 0;
+    copy_region.dstSubresource.baseArrayLayer = 0;
+    copy_region.srcSubresource.layerCount = 1;
+    copy_region.dstSubresource.layerCount = 1;
+    copy_region.srcOffset = {0, 0, 0};
+    copy_region.dstOffset = {0, 0, 0};
+
+    // Sanity check
+    m_errorMonitor->ExpectSuccess();
+    m_commandBuffer->CopyImage(image_1D.image(), VK_IMAGE_LAYOUT_GENERAL, image_2D.image(), VK_IMAGE_LAYOUT_GENERAL, 1,
+                               &copy_region);
+    m_errorMonitor->VerifyNotFound();
+
+    // 1D texture w/ offset.y > 0. Source = VU 09c00124, dest = 09c00130
+    copy_region.srcOffset.y = 1;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCopy-srcImage-00146");
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCopy-srcOffset-00145");  // also y-dim overrun
+    m_commandBuffer->CopyImage(image_1D.image(), VK_IMAGE_LAYOUT_GENERAL, image_2D.image(), VK_IMAGE_LAYOUT_GENERAL, 1,
+                               &copy_region);
+    m_errorMonitor->VerifyFound();
+    copy_region.srcOffset.y = 0;
+    copy_region.dstOffset.y = 1;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCopy-dstImage-00152");
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCopy-dstOffset-00151");  // also y-dim overrun
+    m_commandBuffer->CopyImage(image_2D.image(), VK_IMAGE_LAYOUT_GENERAL, image_1D.image(), VK_IMAGE_LAYOUT_GENERAL, 1,
+                               &copy_region);
+    m_errorMonitor->VerifyFound();
+    copy_region.dstOffset.y = 0;
+
+    // 1D texture w/ extent.height > 1. Source = VU 09c00124, dest = 09c00130
+    copy_region.extent.height = 2;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCopy-srcImage-00146");
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCopy-srcOffset-00145");  // also y-dim overrun
+    m_commandBuffer->CopyImage(image_1D.image(), VK_IMAGE_LAYOUT_GENERAL, image_2D.image(), VK_IMAGE_LAYOUT_GENERAL, 1,
+                               &copy_region);
+    m_errorMonitor->VerifyFound();
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCopy-dstImage-00152");
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCopy-dstOffset-00151");  // also y-dim overrun
+    m_commandBuffer->CopyImage(image_2D.image(), VK_IMAGE_LAYOUT_GENERAL, image_1D.image(), VK_IMAGE_LAYOUT_GENERAL, 1,
+                               &copy_region);
+    m_errorMonitor->VerifyFound();
+    copy_region.extent.height = 1;
+
+    // 1D texture w/ offset.z > 0. Source = VU 09c00df2, dest = 09c00df4
+    copy_region.srcOffset.z = 1;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCopy-srcImage-01785");
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCopy-srcOffset-00147");  // also z-dim overrun
+    m_commandBuffer->CopyImage(image_1D.image(), VK_IMAGE_LAYOUT_GENERAL, image_2D.image(), VK_IMAGE_LAYOUT_GENERAL, 1,
+                               &copy_region);
+    m_errorMonitor->VerifyFound();
+    copy_region.srcOffset.z = 0;
+    copy_region.dstOffset.z = 1;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCopy-dstImage-01786");
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCopy-dstOffset-00153");  // also z-dim overrun
+    m_commandBuffer->CopyImage(image_2D.image(), VK_IMAGE_LAYOUT_GENERAL, image_1D.image(), VK_IMAGE_LAYOUT_GENERAL, 1,
+                               &copy_region);
+    m_errorMonitor->VerifyFound();
+    copy_region.dstOffset.z = 0;
+
+    // 1D texture w/ extent.depth > 1. Source = VU 09c00df2, dest = 09c00df4
+    copy_region.extent.depth = 2;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCopy-srcImage-01785");
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "VUID-VkImageCopy-srcOffset-00147");  // also z-dim overrun (src)
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "VUID-VkImageCopy-dstOffset-00153");  // also z-dim overrun (dst)
+    m_commandBuffer->CopyImage(image_1D.image(), VK_IMAGE_LAYOUT_GENERAL, image_2D.image(), VK_IMAGE_LAYOUT_GENERAL, 1,
+                               &copy_region);
+    m_errorMonitor->VerifyFound();
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCopy-dstImage-01786");
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "VUID-VkImageCopy-srcOffset-00147");  // also z-dim overrun (src)
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "VUID-VkImageCopy-dstOffset-00153");  // also z-dim overrun (dst)
+    m_commandBuffer->CopyImage(image_2D.image(), VK_IMAGE_LAYOUT_GENERAL, image_1D.image(), VK_IMAGE_LAYOUT_GENERAL, 1,
+                               &copy_region);
+    m_errorMonitor->VerifyFound();
+    copy_region.extent.depth = 1;
+
+    // 2D texture w/ offset.z > 0. Source = VU 09c00df6, dest = 09c00df8
+    copy_region.extent = {16, 16, 1};
+    copy_region.srcOffset.z = 4;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCopy-srcImage-01787");
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "VUID-VkImageCopy-srcOffset-00147");  // also z-dim overrun (src)
+    m_commandBuffer->CopyImage(image_2D.image(), VK_IMAGE_LAYOUT_GENERAL, image_3D.image(), VK_IMAGE_LAYOUT_GENERAL, 1,
+                               &copy_region);
+    m_errorMonitor->VerifyFound();
+    copy_region.srcOffset.z = 0;
+    copy_region.dstOffset.z = 1;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCopy-dstImage-01788");
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "VUID-VkImageCopy-dstOffset-00153");  // also z-dim overrun (dst)
+    m_commandBuffer->CopyImage(image_3D.image(), VK_IMAGE_LAYOUT_GENERAL, image_2D.image(), VK_IMAGE_LAYOUT_GENERAL, 1,
+                               &copy_region);
+    m_errorMonitor->VerifyFound();
+    copy_region.dstOffset.z = 0;
+
+    // 3D texture accessing an array layer other than 0. VU 09c0011a
+    copy_region.extent = {4, 4, 1};
+    copy_region.srcSubresource.baseArrayLayer = 1;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCopy-srcImage-00141");
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "VUID-vkCmdCopyImage-srcSubresource-01698");  // also 'too many layers'
+    m_commandBuffer->CopyImage(image_3D.image(), VK_IMAGE_LAYOUT_GENERAL, image_2D.image(), VK_IMAGE_LAYOUT_GENERAL, 1,
+                               &copy_region);
+    m_errorMonitor->VerifyFound();
+    m_commandBuffer->end();
+}
+
+TEST_F(VkLayerTest, CopyImageTypeExtentMismatchMaintenance1) {
+    // Image copy tests where format type and extents don't match and the Maintenance1 extension is enabled
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+    if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_MAINTENANCE1_EXTENSION_NAME)) {
+        m_device_extension_names.push_back(VK_KHR_MAINTENANCE1_EXTENSION_NAME);
+    } else {
+        printf("%s Maintenance1 extension cannot be enabled, test skipped.\n", kSkipPrefix);
+        return;
+    }
+    ASSERT_NO_FATAL_FAILURE(InitState());
+
+    VkFormat image_format = VK_FORMAT_R8G8B8A8_UNORM;
+    VkFormatProperties format_props;
+    // TODO: Remove this check if or when devsim handles extensions.
+    // The chosen format has mandatory support the transfer src and dst format features when Maitenance1 is enabled. However, our
+    // use of devsim and the mock ICD violate this guarantee.
+    vk::GetPhysicalDeviceFormatProperties(m_device->phy().handle(), image_format, &format_props);
+    if (!(format_props.optimalTilingFeatures & VK_FORMAT_FEATURE_TRANSFER_SRC_BIT)) {
+        printf("%s Maintenance1 extension is not supported.\n", kSkipPrefix);
+        return;
+    }
+
+    VkImageCreateInfo ci;
+    ci.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+    ci.pNext = NULL;
+    ci.flags = 0;
+    ci.imageType = VK_IMAGE_TYPE_1D;
+    ci.format = image_format;
+    ci.extent = {32, 1, 1};
+    ci.mipLevels = 1;
+    ci.arrayLayers = 1;
+    ci.samples = VK_SAMPLE_COUNT_1_BIT;
+    ci.tiling = VK_IMAGE_TILING_OPTIMAL;
+    ci.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
+    ci.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
+    ci.queueFamilyIndexCount = 0;
+    ci.pQueueFamilyIndices = NULL;
+    ci.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+
+    // Create 1D image
+    VkImageObj image_1D(m_device);
+    image_1D.init(&ci);
+    ASSERT_TRUE(image_1D.initialized());
+
+    // 2D image
+    ci.imageType = VK_IMAGE_TYPE_2D;
+    ci.extent = {32, 32, 1};
+    VkImageObj image_2D(m_device);
+    image_2D.init(&ci);
+    ASSERT_TRUE(image_2D.initialized());
+
+    // 3D image
+    ci.imageType = VK_IMAGE_TYPE_3D;
+    ci.extent = {32, 32, 8};
+    VkImageObj image_3D(m_device);
+    image_3D.init(&ci);
+    ASSERT_TRUE(image_3D.initialized());
+
+    // 2D image array
+    ci.imageType = VK_IMAGE_TYPE_2D;
+    ci.extent = {32, 32, 1};
+    ci.arrayLayers = 8;
+    VkImageObj image_2D_array(m_device);
+    image_2D_array.init(&ci);
+    ASSERT_TRUE(image_2D_array.initialized());
+
+    m_commandBuffer->begin();
+
+    VkImageCopy copy_region;
+    copy_region.extent = {32, 1, 1};
+    copy_region.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    copy_region.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    copy_region.srcSubresource.mipLevel = 0;
+    copy_region.dstSubresource.mipLevel = 0;
+    copy_region.srcSubresource.baseArrayLayer = 0;
+    copy_region.dstSubresource.baseArrayLayer = 0;
+    copy_region.srcSubresource.layerCount = 1;
+    copy_region.dstSubresource.layerCount = 1;
+    copy_region.srcOffset = {0, 0, 0};
+    copy_region.dstOffset = {0, 0, 0};
+
+    // Copy from layer not present
+    copy_region.srcSubresource.baseArrayLayer = 4;
+    copy_region.srcSubresource.layerCount = 6;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdCopyImage-srcSubresource-01698");
+    m_commandBuffer->CopyImage(image_2D_array.image(), VK_IMAGE_LAYOUT_GENERAL, image_3D.image(), VK_IMAGE_LAYOUT_GENERAL, 1,
+                               &copy_region);
+    m_errorMonitor->VerifyFound();
+    copy_region.srcSubresource.baseArrayLayer = 0;
+    copy_region.srcSubresource.layerCount = 1;
+
+    // Copy to layer not present
+    copy_region.dstSubresource.baseArrayLayer = 1;
+    copy_region.dstSubresource.layerCount = 8;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdCopyImage-dstSubresource-01699");
+    m_commandBuffer->CopyImage(image_3D.image(), VK_IMAGE_LAYOUT_GENERAL, image_2D_array.image(), VK_IMAGE_LAYOUT_GENERAL, 1,
+                               &copy_region);
+    m_errorMonitor->VerifyFound();
+    copy_region.dstSubresource.layerCount = 1;
+
+    m_commandBuffer->end();
+}
+
+TEST_F(VkLayerTest, CopyImageCompressedBlockAlignment) {
+    // Image copy tests on compressed images with block alignment errors
+    SetTargetApiVersion(VK_API_VERSION_1_1);
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    // Select a compressed format and verify support
+    VkPhysicalDeviceFeatures device_features = {};
+    ASSERT_NO_FATAL_FAILURE(GetPhysicalDeviceFeatures(&device_features));
+    VkFormat compressed_format = VK_FORMAT_UNDEFINED;
+    if (device_features.textureCompressionBC) {
+        compressed_format = VK_FORMAT_BC3_SRGB_BLOCK;
+    } else if (device_features.textureCompressionETC2) {
+        compressed_format = VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK;
+    } else if (device_features.textureCompressionASTC_LDR) {
+        compressed_format = VK_FORMAT_ASTC_4x4_UNORM_BLOCK;
+    }
+
+    VkImageCreateInfo ci;
+    ci.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+    ci.pNext = NULL;
+    ci.flags = 0;
+    ci.imageType = VK_IMAGE_TYPE_2D;
+    ci.format = compressed_format;
+    ci.extent = {64, 64, 1};
+    ci.mipLevels = 1;
+    ci.arrayLayers = 1;
+    ci.samples = VK_SAMPLE_COUNT_1_BIT;
+    ci.tiling = VK_IMAGE_TILING_OPTIMAL;
+    ci.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
+    ci.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
+    ci.queueFamilyIndexCount = 0;
+    ci.pQueueFamilyIndices = NULL;
+    ci.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+
+    VkImageFormatProperties img_prop = {};
+    if (VK_SUCCESS != vk::GetPhysicalDeviceImageFormatProperties(m_device->phy().handle(), ci.format, ci.imageType, ci.tiling,
+                                                                 ci.usage, ci.flags, &img_prop)) {
+        printf("%s No compressed formats supported - CopyImageCompressedBlockAlignment skipped.\n", kSkipPrefix);
+        return;
+    }
+
+    // Create images
+    VkImageObj image_1(m_device);
+    image_1.init(&ci);
+    ASSERT_TRUE(image_1.initialized());
+
+    ci.extent = {62, 62, 1};  // slightly smaller and not divisible by block size
+    VkImageObj image_2(m_device);
+    image_2.init(&ci);
+    ASSERT_TRUE(image_2.initialized());
+
+    m_commandBuffer->begin();
+
+    VkImageCopy copy_region;
+    copy_region.extent = {48, 48, 1};
+    copy_region.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    copy_region.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    copy_region.srcSubresource.mipLevel = 0;
+    copy_region.dstSubresource.mipLevel = 0;
+    copy_region.srcSubresource.baseArrayLayer = 0;
+    copy_region.dstSubresource.baseArrayLayer = 0;
+    copy_region.srcSubresource.layerCount = 1;
+    copy_region.dstSubresource.layerCount = 1;
+    copy_region.srcOffset = {0, 0, 0};
+    copy_region.dstOffset = {0, 0, 0};
+
+    // Sanity check
+    m_errorMonitor->ExpectSuccess();
+    m_commandBuffer->CopyImage(image_1.image(), VK_IMAGE_LAYOUT_GENERAL, image_2.image(), VK_IMAGE_LAYOUT_GENERAL, 1, &copy_region);
+    m_errorMonitor->VerifyNotFound();
+
+    std::string vuid;
+    bool ycbcr = (DeviceExtensionEnabled(VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME) ||
+                  (DeviceValidationVersion() >= VK_API_VERSION_1_1));
+
+    // Src, Dest offsets must be multiples of compressed block sizes {4, 4, 1}
+    // Image transfer granularity gets set to compressed block size, so an ITG error is also (unavoidably) triggered.
+    vuid = ycbcr ? "VUID-VkImageCopy-srcImage-01727" : "VUID-VkImageCopy-srcOffset-00157";
+    copy_region.srcOffset = {2, 4, 0};  // source width
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, vuid);
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "VUID-vkCmdCopyImage-srcOffset-01783");  // srcOffset image transfer granularity
+    m_commandBuffer->CopyImage(image_1.image(), VK_IMAGE_LAYOUT_GENERAL, image_2.image(), VK_IMAGE_LAYOUT_GENERAL, 1, &copy_region);
+    m_errorMonitor->VerifyFound();
+    copy_region.srcOffset = {12, 1, 0};  // source height
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, vuid);
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "VUID-vkCmdCopyImage-srcOffset-01783");  // srcOffset image transfer granularity
+    m_commandBuffer->CopyImage(image_1.image(), VK_IMAGE_LAYOUT_GENERAL, image_2.image(), VK_IMAGE_LAYOUT_GENERAL, 1, &copy_region);
+    m_errorMonitor->VerifyFound();
+    copy_region.srcOffset = {0, 0, 0};
+
+    vuid = ycbcr ? "VUID-VkImageCopy-dstImage-01731" : "VUID-VkImageCopy-dstOffset-00162";
+    copy_region.dstOffset = {1, 0, 0};  // dest width
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, vuid);
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "VUID-vkCmdCopyImage-dstOffset-01784");  // dstOffset image transfer granularity
+    m_commandBuffer->CopyImage(image_1.image(), VK_IMAGE_LAYOUT_GENERAL, image_2.image(), VK_IMAGE_LAYOUT_GENERAL, 1, &copy_region);
+    m_errorMonitor->VerifyFound();
+    copy_region.dstOffset = {4, 1, 0};  // dest height
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, vuid);
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "VUID-vkCmdCopyImage-dstOffset-01784");  // dstOffset image transfer granularity
+    m_commandBuffer->CopyImage(image_1.image(), VK_IMAGE_LAYOUT_GENERAL, image_2.image(), VK_IMAGE_LAYOUT_GENERAL, 1, &copy_region);
+    m_errorMonitor->VerifyFound();
+    copy_region.dstOffset = {0, 0, 0};
+
+    // Copy extent must be multiples of compressed block sizes {4, 4, 1} if not full width/height
+    vuid = ycbcr ? "VUID-VkImageCopy-srcImage-01728" : "VUID-VkImageCopy-extent-00158";
+    copy_region.extent = {62, 60, 1};  // source width
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, vuid);
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "VUID-vkCmdCopyImage-srcOffset-01783");  // src extent image transfer granularity
+    m_commandBuffer->CopyImage(image_1.image(), VK_IMAGE_LAYOUT_GENERAL, image_2.image(), VK_IMAGE_LAYOUT_GENERAL, 1, &copy_region);
+    m_errorMonitor->VerifyFound();
+    vuid = ycbcr ? "VUID-VkImageCopy-srcImage-01729" : "VUID-VkImageCopy-extent-00159";
+    copy_region.extent = {60, 62, 1};  // source height
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, vuid);
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "VUID-vkCmdCopyImage-srcOffset-01783");  // src extent image transfer granularity
+    m_commandBuffer->CopyImage(image_1.image(), VK_IMAGE_LAYOUT_GENERAL, image_2.image(), VK_IMAGE_LAYOUT_GENERAL, 1, &copy_region);
+    m_errorMonitor->VerifyFound();
+
+    vuid = ycbcr ? "VUID-VkImageCopy-dstImage-01732" : "VUID-VkImageCopy-extent-00163";
+    copy_region.extent = {62, 60, 1};  // dest width
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, vuid);
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "VUID-vkCmdCopyImage-dstOffset-01784");  // dst extent image transfer granularity
+    m_commandBuffer->CopyImage(image_2.image(), VK_IMAGE_LAYOUT_GENERAL, image_1.image(), VK_IMAGE_LAYOUT_GENERAL, 1, &copy_region);
+    m_errorMonitor->VerifyFound();
+    vuid = ycbcr ? "VUID-VkImageCopy-dstImage-01733" : "VUID-VkImageCopy-extent-00164";
+    copy_region.extent = {60, 62, 1};  // dest height
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, vuid);
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "VUID-vkCmdCopyImage-dstOffset-01784");  // dst extent image transfer granularity
+    m_commandBuffer->CopyImage(image_2.image(), VK_IMAGE_LAYOUT_GENERAL, image_1.image(), VK_IMAGE_LAYOUT_GENERAL, 1, &copy_region);
+    m_errorMonitor->VerifyFound();
+
+    // Note: "VUID-VkImageCopy-extent-00160", "VUID-VkImageCopy-extent-00165", "VUID-VkImageCopy-srcImage-01730",
+    // "VUID-VkImageCopy-dstImage-01734"
+    //       There are currently no supported compressed formats with a block depth other than 1,
+    //       so impossible to create a 'not a multiple' condition for depth.
+    m_commandBuffer->end();
+}
+
+TEST_F(VkLayerTest, CopyImageSinglePlane422Alignment) {
+    // Image copy tests on single-plane _422 formats with block alignment errors
+
+    // Enable KHR multiplane req'd extensions
+    bool mp_extensions = InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME,
+                                                    VK_KHR_GET_MEMORY_REQUIREMENTS_2_SPEC_VERSION);
+    if (mp_extensions) {
+        m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    }
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+    mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_MAINTENANCE1_EXTENSION_NAME);
+    mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
+    mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
+    mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
+    if (mp_extensions) {
+        m_device_extension_names.push_back(VK_KHR_MAINTENANCE1_EXTENSION_NAME);
+        m_device_extension_names.push_back(VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
+        m_device_extension_names.push_back(VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
+        m_device_extension_names.push_back(VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
+    } else {
+        printf("%s test requires KHR multiplane extensions, not available.  Skipping.\n", kSkipPrefix);
+        return;
+    }
+    ASSERT_NO_FATAL_FAILURE(InitState());
+
+    // Select a _422 format and verify support
+    VkImageCreateInfo ci = {};
+    ci.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+    ci.pNext = NULL;
+    ci.flags = 0;
+    ci.imageType = VK_IMAGE_TYPE_2D;
+    ci.format = VK_FORMAT_G8B8G8R8_422_UNORM_KHR;
+    ci.tiling = VK_IMAGE_TILING_OPTIMAL;
+    ci.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
+    ci.mipLevels = 1;
+    ci.arrayLayers = 1;
+    ci.samples = VK_SAMPLE_COUNT_1_BIT;
+    ci.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
+    ci.queueFamilyIndexCount = 0;
+    ci.pQueueFamilyIndices = NULL;
+    ci.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+
+    // Verify formats
+    VkFormatFeatureFlags features = VK_FORMAT_FEATURE_TRANSFER_SRC_BIT | VK_FORMAT_FEATURE_TRANSFER_DST_BIT;
+    bool supported = ImageFormatAndFeaturesSupported(instance(), gpu(), ci, features);
+    if (!supported) {
+        printf("%s Single-plane _422 image format not supported.  Skipping test.\n", kSkipPrefix);
+        return;  // Assume there's low ROI on searching for different mp formats
+    }
+
+    // Create images
+    ci.extent = {64, 64, 1};
+    VkImageObj image_422(m_device);
+    image_422.init(&ci);
+    ASSERT_TRUE(image_422.initialized());
+
+    ci.extent = {64, 64, 1};
+    ci.format = VK_FORMAT_R8G8B8A8_UNORM;
+    VkImageObj image_ucmp(m_device);
+    image_ucmp.init(&ci);
+    ASSERT_TRUE(image_ucmp.initialized());
+
+    m_commandBuffer->begin();
+
+    VkImageCopy copy_region;
+    copy_region.extent = {48, 48, 1};
+    copy_region.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    copy_region.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    copy_region.srcSubresource.mipLevel = 0;
+    copy_region.dstSubresource.mipLevel = 0;
+    copy_region.srcSubresource.baseArrayLayer = 0;
+    copy_region.dstSubresource.baseArrayLayer = 0;
+    copy_region.srcSubresource.layerCount = 1;
+    copy_region.dstSubresource.layerCount = 1;
+    copy_region.srcOffset = {0, 0, 0};
+    copy_region.dstOffset = {0, 0, 0};
+
+    // Src offsets must be multiples of compressed block sizes
+    copy_region.srcOffset = {3, 4, 0};  // source offset x
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCopy-srcImage-01727");
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdCopyImage-srcOffset-01783");
+    m_commandBuffer->CopyImage(image_422.image(), VK_IMAGE_LAYOUT_GENERAL, image_ucmp.image(), VK_IMAGE_LAYOUT_GENERAL, 1,
+                               &copy_region);
+    m_errorMonitor->VerifyFound();
+    copy_region.srcOffset = {0, 0, 0};
+
+    // Dst offsets must be multiples of compressed block sizes
+    copy_region.dstOffset = {1, 0, 0};
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCopy-dstImage-01731");
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdCopyImage-dstOffset-01784");
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdCopyImage-pRegions-00123");
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCopy-dstOffset-00150");
+    m_commandBuffer->CopyImage(image_ucmp.image(), VK_IMAGE_LAYOUT_GENERAL, image_422.image(), VK_IMAGE_LAYOUT_GENERAL, 1,
+                               &copy_region);
+    m_errorMonitor->VerifyFound();
+    copy_region.dstOffset = {0, 0, 0};
+
+    // Copy extent must be multiples of compressed block sizes if not full width/height
+    copy_region.extent = {31, 60, 1};  // 422 source, extent.x
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCopy-srcImage-01728");
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdCopyImage-srcOffset-01783");
+    m_commandBuffer->CopyImage(image_422.image(), VK_IMAGE_LAYOUT_GENERAL, image_ucmp.image(), VK_IMAGE_LAYOUT_GENERAL, 1,
+                               &copy_region);
+    m_errorMonitor->VerifyFound();
+
+    // 422 dest
+    m_commandBuffer->CopyImage(image_ucmp.image(), VK_IMAGE_LAYOUT_GENERAL, image_422.image(), VK_IMAGE_LAYOUT_GENERAL, 1,
+                               &copy_region);
+    m_errorMonitor->VerifyNotFound();
+    copy_region.dstOffset = {0, 0, 0};
+
+    m_commandBuffer->end();
+}
+
+TEST_F(VkLayerTest, CopyImageMultiplaneAspectBits) {
+    // Image copy tests on multiplane images with aspect errors
+
+    // Enable KHR multiplane req'd extensions
+    bool mp_extensions = InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME,
+                                                    VK_KHR_GET_MEMORY_REQUIREMENTS_2_SPEC_VERSION);
+    if (mp_extensions) {
+        m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    }
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+    mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_MAINTENANCE1_EXTENSION_NAME);
+    mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
+    mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
+    mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
+    if (mp_extensions) {
+        m_device_extension_names.push_back(VK_KHR_MAINTENANCE1_EXTENSION_NAME);
+        m_device_extension_names.push_back(VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
+        m_device_extension_names.push_back(VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
+        m_device_extension_names.push_back(VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
+    } else {
+        printf("%s test requires KHR multiplane extensions, not available.  Skipping.\n", kSkipPrefix);
+        return;
+    }
+    ASSERT_NO_FATAL_FAILURE(InitState());
+
+    // Select multi-plane formats and verify support
+    VkFormat mp3_format = VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM_KHR;
+    VkFormat mp2_format = VK_FORMAT_G8_B8R8_2PLANE_422_UNORM_KHR;
+
+    VkImageCreateInfo ci = {};
+    ci.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+    ci.pNext = NULL;
+    ci.flags = 0;
+    ci.imageType = VK_IMAGE_TYPE_2D;
+    ci.format = mp2_format;
+    ci.extent = {256, 256, 1};
+    ci.tiling = VK_IMAGE_TILING_OPTIMAL;
+    ci.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
+    ci.mipLevels = 1;
+    ci.arrayLayers = 1;
+    ci.samples = VK_SAMPLE_COUNT_1_BIT;
+    ci.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
+    ci.queueFamilyIndexCount = 0;
+    ci.pQueueFamilyIndices = NULL;
+    ci.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+
+    // Verify formats
+    VkFormatFeatureFlags features = VK_FORMAT_FEATURE_TRANSFER_SRC_BIT | VK_FORMAT_FEATURE_TRANSFER_DST_BIT;
+    bool supported = ImageFormatAndFeaturesSupported(instance(), gpu(), ci, features);
+    ci.format = VK_FORMAT_D24_UNORM_S8_UINT;
+    supported = supported && ImageFormatAndFeaturesSupported(instance(), gpu(), ci, features);
+    ci.format = mp3_format;
+    supported = supported && ImageFormatAndFeaturesSupported(instance(), gpu(), ci, features);
+    if (!supported) {
+        printf("%s Multiplane image formats or optimally tiled depth-stencil buffers not supported.  Skipping test.\n",
+               kSkipPrefix);
+        return;  // Assume there's low ROI on searching for different mp formats
+    }
+
+    // Create images
+    VkImageObj mp3_image(m_device);
+    mp3_image.init(&ci);
+    ASSERT_TRUE(mp3_image.initialized());
+
+    ci.format = mp2_format;
+    VkImageObj mp2_image(m_device);
+    mp2_image.init(&ci);
+    ASSERT_TRUE(mp2_image.initialized());
+
+    ci.format = VK_FORMAT_D24_UNORM_S8_UINT;
+    VkImageObj sp_image(m_device);
+    sp_image.init(&ci);
+    ASSERT_TRUE(sp_image.initialized());
+
+    m_commandBuffer->begin();
+
+    VkImageCopy copy_region;
+    copy_region.extent = {128, 128, 1};
+    copy_region.srcSubresource.aspectMask = VK_IMAGE_ASPECT_PLANE_2_BIT_KHR;
+    copy_region.dstSubresource.aspectMask = VK_IMAGE_ASPECT_PLANE_2_BIT_KHR;
+    copy_region.srcSubresource.mipLevel = 0;
+    copy_region.dstSubresource.mipLevel = 0;
+    copy_region.srcSubresource.baseArrayLayer = 0;
+    copy_region.dstSubresource.baseArrayLayer = 0;
+    copy_region.srcSubresource.layerCount = 1;
+    copy_region.dstSubresource.layerCount = 1;
+    copy_region.srcOffset = {0, 0, 0};
+    copy_region.dstOffset = {0, 0, 0};
+
+    m_errorMonitor->SetUnexpectedError("VUID-vkCmdCopyImage-srcImage-00135");
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCopy-srcImage-01552");
+    m_commandBuffer->CopyImage(mp2_image.image(), VK_IMAGE_LAYOUT_GENERAL, mp3_image.image(), VK_IMAGE_LAYOUT_GENERAL, 1,
+                               &copy_region);
+    m_errorMonitor->VerifyFound();
+
+    m_errorMonitor->SetUnexpectedError("VUID-vkCmdCopyImage-srcImage-00135");
+    copy_region.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    copy_region.dstSubresource.aspectMask = VK_IMAGE_ASPECT_PLANE_0_BIT_KHR;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCopy-srcImage-01553");
+    m_commandBuffer->CopyImage(mp3_image.image(), VK_IMAGE_LAYOUT_GENERAL, mp2_image.image(), VK_IMAGE_LAYOUT_GENERAL, 1,
+                               &copy_region);
+    m_errorMonitor->VerifyFound();
+
+    copy_region.srcSubresource.aspectMask = VK_IMAGE_ASPECT_PLANE_1_BIT_KHR;
+    copy_region.dstSubresource.aspectMask = VK_IMAGE_ASPECT_PLANE_2_BIT_KHR;
+    m_errorMonitor->SetUnexpectedError("VUID-vkCmdCopyImage-srcImage-00135");
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCopy-dstImage-01554");
+    m_commandBuffer->CopyImage(mp3_image.image(), VK_IMAGE_LAYOUT_GENERAL, mp2_image.image(), VK_IMAGE_LAYOUT_GENERAL, 1,
+                               &copy_region);
+    m_errorMonitor->VerifyFound();
+
+    copy_region.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    m_errorMonitor->SetUnexpectedError("VUID-vkCmdCopyImage-srcImage-00135");
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCopy-dstImage-01555");
+    m_commandBuffer->CopyImage(mp2_image.image(), VK_IMAGE_LAYOUT_GENERAL, mp3_image.image(), VK_IMAGE_LAYOUT_GENERAL, 1,
+                               &copy_region);
+    m_errorMonitor->VerifyFound();
+
+    copy_region.dstSubresource.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCopy-srcImage-01556");
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "dest image depth/stencil formats");  // also
+    m_commandBuffer->CopyImage(mp2_image.image(), VK_IMAGE_LAYOUT_GENERAL, sp_image.image(), VK_IMAGE_LAYOUT_GENERAL, 1,
+                               &copy_region);
+    m_errorMonitor->VerifyFound();
+
+    copy_region.srcSubresource.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
+    copy_region.dstSubresource.aspectMask = VK_IMAGE_ASPECT_PLANE_2_BIT_KHR;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCopy-dstImage-01557");
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "dest image depth/stencil formats");  // also
+    m_commandBuffer->CopyImage(sp_image.image(), VK_IMAGE_LAYOUT_GENERAL, mp3_image.image(), VK_IMAGE_LAYOUT_GENERAL, 1,
+                               &copy_region);
+    m_errorMonitor->VerifyFound();
+
+    m_commandBuffer->end();
+}
+
+TEST_F(VkLayerTest, CopyImageSrcSizeExceeded) {
+    // Image copy with source region specified greater than src image size
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    // Create images with full mip chain
+    VkImageCreateInfo ci;
+    ci.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+    ci.pNext = NULL;
+    ci.flags = 0;
+    ci.imageType = VK_IMAGE_TYPE_3D;
+    ci.format = VK_FORMAT_R8G8B8A8_UNORM;
+    ci.extent = {32, 32, 8};
+    ci.mipLevels = 6;
+    ci.arrayLayers = 1;
+    ci.samples = VK_SAMPLE_COUNT_1_BIT;
+    ci.tiling = VK_IMAGE_TILING_OPTIMAL;
+    ci.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
+    ci.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
+    ci.queueFamilyIndexCount = 0;
+    ci.pQueueFamilyIndices = NULL;
+    ci.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+
+    VkImageObj src_image(m_device);
+    src_image.init(&ci);
+    ASSERT_TRUE(src_image.initialized());
+
+    // Dest image with one more mip level
+    ci.extent = {64, 64, 16};
+    ci.mipLevels = 7;
+    ci.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT;
+    VkImageObj dst_image(m_device);
+    dst_image.init(&ci);
+    ASSERT_TRUE(dst_image.initialized());
+
+    m_commandBuffer->begin();
+
+    VkImageCopy copy_region;
+    copy_region.extent = {32, 32, 8};
+    copy_region.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    copy_region.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    copy_region.srcSubresource.mipLevel = 0;
+    copy_region.dstSubresource.mipLevel = 0;
+    copy_region.srcSubresource.baseArrayLayer = 0;
+    copy_region.dstSubresource.baseArrayLayer = 0;
+    copy_region.srcSubresource.layerCount = 1;
+    copy_region.dstSubresource.layerCount = 1;
+    copy_region.srcOffset = {0, 0, 0};
+    copy_region.dstOffset = {0, 0, 0};
+
+    m_errorMonitor->ExpectSuccess();
+    m_commandBuffer->CopyImage(src_image.image(), VK_IMAGE_LAYOUT_GENERAL, dst_image.image(), VK_IMAGE_LAYOUT_GENERAL, 1,
+                               &copy_region);
+    m_errorMonitor->VerifyNotFound();
+
+    // Source exceeded in x-dim, VU 01202
+    copy_region.srcOffset.x = 4;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "VUID-vkCmdCopyImage-pRegions-00122");  // General "contained within" VU
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCopy-srcOffset-00144");
+    m_commandBuffer->CopyImage(src_image.image(), VK_IMAGE_LAYOUT_GENERAL, dst_image.image(), VK_IMAGE_LAYOUT_GENERAL, 1,
+                               &copy_region);
+    m_errorMonitor->VerifyFound();
+
+    // Source exceeded in y-dim, VU 01203
+    copy_region.srcOffset.x = 0;
+    copy_region.extent.height = 48;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdCopyImage-pRegions-00122");
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCopy-srcOffset-00145");
+    m_commandBuffer->CopyImage(src_image.image(), VK_IMAGE_LAYOUT_GENERAL, dst_image.image(), VK_IMAGE_LAYOUT_GENERAL, 1,
+                               &copy_region);
+    m_errorMonitor->VerifyFound();
+
+    // Source exceeded in z-dim, VU 01204
+    copy_region.extent = {4, 4, 4};
+    copy_region.srcSubresource.mipLevel = 2;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdCopyImage-pRegions-00122");
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCopy-srcOffset-00147");
+    m_commandBuffer->CopyImage(src_image.image(), VK_IMAGE_LAYOUT_GENERAL, dst_image.image(), VK_IMAGE_LAYOUT_GENERAL, 1,
+                               &copy_region);
+    m_errorMonitor->VerifyFound();
+
+    m_commandBuffer->end();
+}
+
+TEST_F(VkLayerTest, CopyImageDstSizeExceeded) {
+    // Image copy with dest region specified greater than dest image size
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    // Create images with full mip chain
+    VkImageCreateInfo ci;
+    ci.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+    ci.pNext = NULL;
+    ci.flags = 0;
+    ci.imageType = VK_IMAGE_TYPE_3D;
+    ci.format = VK_FORMAT_R8G8B8A8_UNORM;
+    ci.extent = {32, 32, 8};
+    ci.mipLevels = 6;
+    ci.arrayLayers = 1;
+    ci.samples = VK_SAMPLE_COUNT_1_BIT;
+    ci.tiling = VK_IMAGE_TILING_OPTIMAL;
+    ci.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT;
+    ci.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
+    ci.queueFamilyIndexCount = 0;
+    ci.pQueueFamilyIndices = NULL;
+    ci.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+
+    VkImageObj dst_image(m_device);
+    dst_image.init(&ci);
+    ASSERT_TRUE(dst_image.initialized());
+
+    // Src image with one more mip level
+    ci.extent = {64, 64, 16};
+    ci.mipLevels = 7;
+    ci.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
+    VkImageObj src_image(m_device);
+    src_image.init(&ci);
+    ASSERT_TRUE(src_image.initialized());
+
+    m_commandBuffer->begin();
+
+    VkImageCopy copy_region;
+    copy_region.extent = {32, 32, 8};
+    copy_region.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    copy_region.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    copy_region.srcSubresource.mipLevel = 0;
+    copy_region.dstSubresource.mipLevel = 0;
+    copy_region.srcSubresource.baseArrayLayer = 0;
+    copy_region.dstSubresource.baseArrayLayer = 0;
+    copy_region.srcSubresource.layerCount = 1;
+    copy_region.dstSubresource.layerCount = 1;
+    copy_region.srcOffset = {0, 0, 0};
+    copy_region.dstOffset = {0, 0, 0};
+
+    m_errorMonitor->ExpectSuccess();
+    m_commandBuffer->CopyImage(src_image.image(), VK_IMAGE_LAYOUT_GENERAL, dst_image.image(), VK_IMAGE_LAYOUT_GENERAL, 1,
+                               &copy_region);
+    m_errorMonitor->VerifyNotFound();
+
+    // Dest exceeded in x-dim, VU 01205
+    copy_region.dstOffset.x = 4;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "VUID-vkCmdCopyImage-pRegions-00123");  // General "contained within" VU
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCopy-dstOffset-00150");
+    m_commandBuffer->CopyImage(src_image.image(), VK_IMAGE_LAYOUT_GENERAL, dst_image.image(), VK_IMAGE_LAYOUT_GENERAL, 1,
+                               &copy_region);
+    m_errorMonitor->VerifyFound();
+
+    // Dest exceeded in y-dim, VU 01206
+    copy_region.dstOffset.x = 0;
+    copy_region.extent.height = 48;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdCopyImage-pRegions-00123");
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCopy-dstOffset-00151");
+    m_commandBuffer->CopyImage(src_image.image(), VK_IMAGE_LAYOUT_GENERAL, dst_image.image(), VK_IMAGE_LAYOUT_GENERAL, 1,
+                               &copy_region);
+    m_errorMonitor->VerifyFound();
+
+    // Dest exceeded in z-dim, VU 01207
+    copy_region.extent = {4, 4, 4};
+    copy_region.dstSubresource.mipLevel = 2;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdCopyImage-pRegions-00123");
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCopy-dstOffset-00153");
+    m_commandBuffer->CopyImage(src_image.image(), VK_IMAGE_LAYOUT_GENERAL, dst_image.image(), VK_IMAGE_LAYOUT_GENERAL, 1,
+                               &copy_region);
+    m_errorMonitor->VerifyFound();
+
+    m_commandBuffer->end();
+}
+
+TEST_F(VkLayerTest, CopyImageFormatSizeMismatch) {
+    VkResult err;
+    bool pass;
+
+    // Create color images with different format sizes and try to copy between them
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdCopyImage-srcImage-00135");
+
+    SetTargetApiVersion(VK_API_VERSION_1_1);
+    ASSERT_NO_FATAL_FAILURE(Init(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
+
+    // Create two images of different types and try to copy between them
+    VkImage srcImage;
+    VkImage dstImage;
+    VkDeviceMemory srcMem;
+    VkDeviceMemory destMem;
+    VkMemoryRequirements memReqs;
+
+    VkImageCreateInfo image_create_info = {};
+    image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+    image_create_info.pNext = NULL;
+    image_create_info.imageType = VK_IMAGE_TYPE_2D;
+    image_create_info.format = VK_FORMAT_B8G8R8A8_UNORM;
+    image_create_info.extent.width = 32;
+    image_create_info.extent.height = 32;
+    image_create_info.extent.depth = 1;
+    image_create_info.mipLevels = 1;
+    image_create_info.arrayLayers = 1;
+    image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
+    image_create_info.tiling = VK_IMAGE_TILING_LINEAR;
+    image_create_info.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
+    image_create_info.flags = 0;
+
+    err = vk::CreateImage(m_device->device(), &image_create_info, NULL, &srcImage);
+    ASSERT_VK_SUCCESS(err);
+
+    image_create_info.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT;
+    // Introduce failure by creating second image with a different-sized format.
+    image_create_info.format = VK_FORMAT_R5G5B5A1_UNORM_PACK16;
+    VkFormatProperties properties;
+    vk::GetPhysicalDeviceFormatProperties(m_device->phy().handle(), image_create_info.format, &properties);
+    if (properties.optimalTilingFeatures == 0) {
+        vk::DestroyImage(m_device->device(), srcImage, NULL);
+        printf("%s Image format not supported; skipped.\n", kSkipPrefix);
+        return;
+    }
+
+    err = vk::CreateImage(m_device->device(), &image_create_info, NULL, &dstImage);
+    ASSERT_VK_SUCCESS(err);
+
+    // Allocate memory
+    VkMemoryAllocateInfo memAlloc = {};
+    memAlloc.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+    memAlloc.pNext = NULL;
+    memAlloc.allocationSize = 0;
+    memAlloc.memoryTypeIndex = 0;
+
+    vk::GetImageMemoryRequirements(m_device->device(), srcImage, &memReqs);
+    memAlloc.allocationSize = memReqs.size;
+    pass = m_device->phy().set_memory_type(memReqs.memoryTypeBits, &memAlloc, 0);
+    ASSERT_TRUE(pass);
+    err = vk::AllocateMemory(m_device->device(), &memAlloc, NULL, &srcMem);
+    ASSERT_VK_SUCCESS(err);
+
+    vk::GetImageMemoryRequirements(m_device->device(), dstImage, &memReqs);
+    memAlloc.allocationSize = memReqs.size;
+    pass = m_device->phy().set_memory_type(memReqs.memoryTypeBits, &memAlloc, 0);
+    ASSERT_TRUE(pass);
+    err = vk::AllocateMemory(m_device->device(), &memAlloc, NULL, &destMem);
+    ASSERT_VK_SUCCESS(err);
+
+    err = vk::BindImageMemory(m_device->device(), srcImage, srcMem, 0);
+    ASSERT_VK_SUCCESS(err);
+    err = vk::BindImageMemory(m_device->device(), dstImage, destMem, 0);
+    ASSERT_VK_SUCCESS(err);
+
+    m_commandBuffer->begin();
+    VkImageCopy copyRegion;
+    copyRegion.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    copyRegion.srcSubresource.mipLevel = 0;
+    copyRegion.srcSubresource.baseArrayLayer = 0;
+    copyRegion.srcSubresource.layerCount = 1;
+    copyRegion.srcOffset.x = 0;
+    copyRegion.srcOffset.y = 0;
+    copyRegion.srcOffset.z = 0;
+    copyRegion.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    copyRegion.dstSubresource.mipLevel = 0;
+    copyRegion.dstSubresource.baseArrayLayer = 0;
+    copyRegion.dstSubresource.layerCount = 1;
+    copyRegion.dstOffset.x = 0;
+    copyRegion.dstOffset.y = 0;
+    copyRegion.dstOffset.z = 0;
+    copyRegion.extent.width = 1;
+    copyRegion.extent.height = 1;
+    copyRegion.extent.depth = 1;
+    m_commandBuffer->CopyImage(srcImage, VK_IMAGE_LAYOUT_GENERAL, dstImage, VK_IMAGE_LAYOUT_GENERAL, 1, &copyRegion);
+    m_commandBuffer->end();
+
+    m_errorMonitor->VerifyFound();
+
+    vk::DestroyImage(m_device->device(), dstImage, NULL);
+    vk::FreeMemory(m_device->device(), destMem, NULL);
+
+    // Copy to multiplane image with mismatched sizes
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdCopyImage-srcImage-00135");
+
+    VkImageCreateInfo ci;
+    ci.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+    ci.pNext = NULL;
+    ci.flags = 0;
+    ci.imageType = VK_IMAGE_TYPE_2D;
+    ci.format = VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM;
+    ci.extent = {32, 32, 1};
+    ci.mipLevels = 1;
+    ci.arrayLayers = 1;
+    ci.samples = VK_SAMPLE_COUNT_1_BIT;
+    ci.tiling = VK_IMAGE_TILING_LINEAR;
+    ci.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT;
+    ci.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
+    ci.queueFamilyIndexCount = 0;
+    ci.pQueueFamilyIndices = NULL;
+    ci.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+
+    VkFormatFeatureFlags features = VK_FORMAT_FEATURE_TRANSFER_DST_BIT;
+    bool supported = ImageFormatAndFeaturesSupported(instance(), gpu(), ci, features);
+    bool ycbcr = (DeviceExtensionEnabled(VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME) ||
+                  (DeviceValidationVersion() >= VK_API_VERSION_1_1));
+    if (!supported || !ycbcr) {
+        printf("%s Image format not supported; skipped multiplanar copy test.\n", kSkipPrefix);
+        vk::DestroyImage(m_device->device(), srcImage, NULL);
+        vk::FreeMemory(m_device->device(), srcMem, NULL);
+        return;
+    }
+
+    VkImageObj mpImage(m_device);
+    mpImage.init(&ci);
+    ASSERT_TRUE(mpImage.initialized());
+    copyRegion.dstSubresource.aspectMask = VK_IMAGE_ASPECT_PLANE_0_BIT;
+    vk::ResetCommandBuffer(m_commandBuffer->handle(), 0);
+    m_commandBuffer->begin();
+    m_commandBuffer->CopyImage(srcImage, VK_IMAGE_LAYOUT_GENERAL, mpImage.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &copyRegion);
+    m_commandBuffer->end();
+
+    m_errorMonitor->VerifyFound();
+
+    vk::DestroyImage(m_device->device(), srcImage, NULL);
+    vk::FreeMemory(m_device->device(), srcMem, NULL);
+}
+
+TEST_F(VkLayerTest, CopyImageDepthStencilFormatMismatch) {
+    ASSERT_NO_FATAL_FAILURE(Init());
+    auto depth_format = FindSupportedDepthStencilFormat(gpu());
+    if (!depth_format) {
+        printf("%s Couldn't depth stencil image format.\n", kSkipPrefix);
+        return;
+    }
+
+    VkFormatProperties properties;
+    vk::GetPhysicalDeviceFormatProperties(m_device->phy().handle(), VK_FORMAT_D32_SFLOAT, &properties);
+    if (properties.optimalTilingFeatures == 0) {
+        printf("%s Image format not supported; skipped.\n", kSkipPrefix);
+        return;
+    }
+
+    VkImageObj srcImage(m_device);
+    srcImage.Init(32, 32, 1, VK_FORMAT_D32_SFLOAT, VK_IMAGE_USAGE_TRANSFER_SRC_BIT, VK_IMAGE_TILING_OPTIMAL);
+    ASSERT_TRUE(srcImage.initialized());
+    VkImageObj dstImage(m_device);
+    dstImage.Init(32, 32, 1, depth_format, VK_IMAGE_USAGE_TRANSFER_DST_BIT, VK_IMAGE_TILING_OPTIMAL);
+    ASSERT_TRUE(dstImage.initialized());
+
+    // Create two images of different types and try to copy between them
+
+    m_commandBuffer->begin();
+    VkImageCopy copyRegion;
+    copyRegion.srcSubresource.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
+    copyRegion.srcSubresource.mipLevel = 0;
+    copyRegion.srcSubresource.baseArrayLayer = 0;
+    copyRegion.srcSubresource.layerCount = 1;
+    copyRegion.srcOffset.x = 0;
+    copyRegion.srcOffset.y = 0;
+    copyRegion.srcOffset.z = 0;
+    copyRegion.dstSubresource.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
+    copyRegion.dstSubresource.mipLevel = 0;
+    copyRegion.dstSubresource.baseArrayLayer = 0;
+    copyRegion.dstSubresource.layerCount = 1;
+    copyRegion.dstOffset.x = 0;
+    copyRegion.dstOffset.y = 0;
+    copyRegion.dstOffset.z = 0;
+    copyRegion.extent.width = 1;
+    copyRegion.extent.height = 1;
+    copyRegion.extent.depth = 1;
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "vkCmdCopyImage called with unmatched source and dest image depth");
+    m_commandBuffer->CopyImage(srcImage.handle(), VK_IMAGE_LAYOUT_GENERAL, dstImage.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
+                               &copyRegion);
+    m_commandBuffer->end();
+
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, CopyImageSampleCountMismatch) {
+    TEST_DESCRIPTION("Image copies with sample count mis-matches");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    VkImageFormatProperties image_format_properties;
+    vk::GetPhysicalDeviceImageFormatProperties(gpu(), VK_FORMAT_R8G8B8A8_UNORM, VK_IMAGE_TYPE_2D, VK_IMAGE_TILING_OPTIMAL,
+                                               VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT, 0,
+                                               &image_format_properties);
+
+    if ((0 == (VK_SAMPLE_COUNT_2_BIT & image_format_properties.sampleCounts)) ||
+        (0 == (VK_SAMPLE_COUNT_4_BIT & image_format_properties.sampleCounts))) {
+        printf("%s Image multi-sample support not found; skipped.\n", kSkipPrefix);
+        return;
+    }
+
+    VkImageCreateInfo ci;
+    ci.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+    ci.pNext = NULL;
+    ci.flags = 0;
+    ci.imageType = VK_IMAGE_TYPE_2D;
+    ci.format = VK_FORMAT_R8G8B8A8_UNORM;
+    ci.extent = {128, 128, 1};
+    ci.mipLevels = 1;
+    ci.arrayLayers = 1;
+    ci.samples = VK_SAMPLE_COUNT_1_BIT;
+    ci.tiling = VK_IMAGE_TILING_OPTIMAL;
+    ci.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
+    ci.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
+    ci.queueFamilyIndexCount = 0;
+    ci.pQueueFamilyIndices = NULL;
+    ci.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+
+    VkImageObj image1(m_device);
+    image1.init(&ci);
+    ASSERT_TRUE(image1.initialized());
+
+    ci.samples = VK_SAMPLE_COUNT_2_BIT;
+    VkImageObj image2(m_device);
+    image2.init(&ci);
+    ASSERT_TRUE(image2.initialized());
+
+    ci.samples = VK_SAMPLE_COUNT_4_BIT;
+    VkImageObj image4(m_device);
+    image4.init(&ci);
+    ASSERT_TRUE(image4.initialized());
+
+    m_commandBuffer->begin();
+
+    VkImageCopy copyRegion;
+    copyRegion.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    copyRegion.srcSubresource.mipLevel = 0;
+    copyRegion.srcSubresource.baseArrayLayer = 0;
+    copyRegion.srcSubresource.layerCount = 1;
+    copyRegion.srcOffset = {0, 0, 0};
+    copyRegion.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    copyRegion.dstSubresource.mipLevel = 0;
+    copyRegion.dstSubresource.baseArrayLayer = 0;
+    copyRegion.dstSubresource.layerCount = 1;
+    copyRegion.dstOffset = {0, 0, 0};
+    copyRegion.extent = {128, 128, 1};
+
+    // Copy a single sample image to/from a multi-sample image
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdCopyImage-srcImage-00136");
+    vk::CmdCopyImage(m_commandBuffer->handle(), image1.handle(), VK_IMAGE_LAYOUT_GENERAL, image4.handle(), VK_IMAGE_LAYOUT_GENERAL,
+                     1, &copyRegion);
+    m_errorMonitor->VerifyFound();
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdCopyImage-srcImage-00136");
+    vk::CmdCopyImage(m_commandBuffer->handle(), image2.handle(), VK_IMAGE_LAYOUT_GENERAL, image1.handle(), VK_IMAGE_LAYOUT_GENERAL,
+                     1, &copyRegion);
+    m_errorMonitor->VerifyFound();
+
+    // Copy between multi-sample images with different sample counts
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdCopyImage-srcImage-00136");
+    vk::CmdCopyImage(m_commandBuffer->handle(), image2.handle(), VK_IMAGE_LAYOUT_GENERAL, image4.handle(), VK_IMAGE_LAYOUT_GENERAL,
+                     1, &copyRegion);
+    m_errorMonitor->VerifyFound();
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdCopyImage-srcImage-00136");
+    vk::CmdCopyImage(m_commandBuffer->handle(), image4.handle(), VK_IMAGE_LAYOUT_GENERAL, image2.handle(), VK_IMAGE_LAYOUT_GENERAL,
+                     1, &copyRegion);
+    m_errorMonitor->VerifyFound();
+
+    m_commandBuffer->end();
+}
+
+TEST_F(VkLayerTest, CopyImageAspectMismatch) {
+    TEST_DESCRIPTION("Image copies with aspect mask errors");
+    SetTargetApiVersion(VK_API_VERSION_1_1);
+    ASSERT_NO_FATAL_FAILURE(Init());
+    auto ds_format = FindSupportedDepthStencilFormat(gpu());
+    if (!ds_format) {
+        printf("%s Couldn't find depth stencil format.\n", kSkipPrefix);
+        return;
+    }
+
+    VkFormatProperties properties;
+    vk::GetPhysicalDeviceFormatProperties(m_device->phy().handle(), VK_FORMAT_D32_SFLOAT, &properties);
+    if (properties.optimalTilingFeatures == 0) {
+        printf("%s Image format VK_FORMAT_D32_SFLOAT not supported; skipped.\n", kSkipPrefix);
+        return;
+    }
+    VkImageObj color_image(m_device), ds_image(m_device), depth_image(m_device);
+    color_image.Init(128, 128, 1, VK_FORMAT_R32_SFLOAT, VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT);
+    depth_image.Init(128, 128, 1, VK_FORMAT_D32_SFLOAT, VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT,
+                     VK_IMAGE_TILING_OPTIMAL, 0);
+    ds_image.Init(128, 128, 1, ds_format, VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT,
+                  VK_IMAGE_TILING_OPTIMAL, 0);
+    ASSERT_TRUE(color_image.initialized());
+    ASSERT_TRUE(depth_image.initialized());
+    ASSERT_TRUE(ds_image.initialized());
+
+    VkImageCopy copyRegion;
+    copyRegion.srcSubresource.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
+    copyRegion.srcSubresource.mipLevel = 0;
+    copyRegion.srcSubresource.baseArrayLayer = 0;
+    copyRegion.srcSubresource.layerCount = 1;
+    copyRegion.srcOffset = {0, 0, 0};
+    copyRegion.dstSubresource.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
+    copyRegion.dstSubresource.mipLevel = 0;
+    copyRegion.dstSubresource.baseArrayLayer = 0;
+    copyRegion.dstSubresource.layerCount = 1;
+    copyRegion.dstOffset = {64, 0, 0};
+    copyRegion.extent = {64, 128, 1};
+
+    // Submitting command before command buffer is in recording state
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "You must call vkBeginCommandBuffer");  // "VUID-vkCmdCopyImage-commandBuffer-recording");
+    vk::CmdCopyImage(m_commandBuffer->handle(), depth_image.handle(), VK_IMAGE_LAYOUT_GENERAL, depth_image.handle(),
+                     VK_IMAGE_LAYOUT_GENERAL, 1, &copyRegion);
+    m_errorMonitor->VerifyFound();
+
+    m_commandBuffer->begin();
+
+    // Src and dest aspect masks don't match
+    copyRegion.dstSubresource.aspectMask = VK_IMAGE_ASPECT_STENCIL_BIT;
+    bool ycbcr = (DeviceExtensionEnabled(VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME) ||
+                  (DeviceValidationVersion() >= VK_API_VERSION_1_1));
+    std::string vuid = (ycbcr ? "VUID-VkImageCopy-srcImage-01551" : "VUID-VkImageCopy-aspectMask-00137");
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, vuid);
+    vk::CmdCopyImage(m_commandBuffer->handle(), ds_image.handle(), VK_IMAGE_LAYOUT_GENERAL, ds_image.handle(),
+                     VK_IMAGE_LAYOUT_GENERAL, 1, &copyRegion);
+    m_errorMonitor->VerifyFound();
+    copyRegion.dstSubresource.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
+
+    // Illegal combinations of aspect bits
+    copyRegion.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT | VK_IMAGE_ASPECT_DEPTH_BIT;  // color must be alone
+    copyRegion.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageSubresourceLayers-aspectMask-00167");
+    // These aspect/format mismatches are redundant but unavoidable here
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCopy-aspectMask-00142");
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, vuid);
+    vk::CmdCopyImage(m_commandBuffer->handle(), color_image.handle(), VK_IMAGE_LAYOUT_GENERAL, color_image.handle(),
+                     VK_IMAGE_LAYOUT_GENERAL, 1, &copyRegion);
+    m_errorMonitor->VerifyFound();
+    // same test for dstSubresource
+    copyRegion.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    copyRegion.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT | VK_IMAGE_ASPECT_DEPTH_BIT;  // color must be alone
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageSubresourceLayers-aspectMask-00167");
+    // These aspect/format mismatches are redundant but unavoidable here
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCopy-aspectMask-00143");
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, vuid);
+    vk::CmdCopyImage(m_commandBuffer->handle(), color_image.handle(), VK_IMAGE_LAYOUT_GENERAL, color_image.handle(),
+                     VK_IMAGE_LAYOUT_GENERAL, 1, &copyRegion);
+    m_errorMonitor->VerifyFound();
+
+    // Metadata aspect is illegal
+    copyRegion.srcSubresource.aspectMask = VK_IMAGE_ASPECT_METADATA_BIT;
+    copyRegion.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageSubresourceLayers-aspectMask-00168");
+    // These aspect/format mismatches are redundant but unavoidable here
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, vuid);
+    vk::CmdCopyImage(m_commandBuffer->handle(), color_image.handle(), VK_IMAGE_LAYOUT_GENERAL, color_image.handle(),
+                     VK_IMAGE_LAYOUT_GENERAL, 1, &copyRegion);
+    m_errorMonitor->VerifyFound();
+    // same test for dstSubresource
+    copyRegion.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    copyRegion.dstSubresource.aspectMask = VK_IMAGE_ASPECT_METADATA_BIT;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageSubresourceLayers-aspectMask-00168");
+    // These aspect/format mismatches are redundant but unavoidable here
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, vuid);
+    vk::CmdCopyImage(m_commandBuffer->handle(), color_image.handle(), VK_IMAGE_LAYOUT_GENERAL, color_image.handle(),
+                     VK_IMAGE_LAYOUT_GENERAL, 1, &copyRegion);
+    m_errorMonitor->VerifyFound();
+
+    copyRegion.srcSubresource.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
+    copyRegion.dstSubresource.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
+
+    // Aspect mask doesn't match source image format
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCopy-aspectMask-00142");
+    // Again redundant but unavoidable
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "unmatched source and dest image depth/stencil formats");
+    vk::CmdCopyImage(m_commandBuffer->handle(), color_image.handle(), VK_IMAGE_LAYOUT_GENERAL, depth_image.handle(),
+                     VK_IMAGE_LAYOUT_GENERAL, 1, &copyRegion);
+    m_errorMonitor->VerifyFound();
+
+    // Aspect mask doesn't match dest image format
+    copyRegion.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    copyRegion.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCopy-aspectMask-00143");
+    // Again redundant but unavoidable
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "unmatched source and dest image depth/stencil formats");
+    vk::CmdCopyImage(m_commandBuffer->handle(), color_image.handle(), VK_IMAGE_LAYOUT_GENERAL, depth_image.handle(),
+                     VK_IMAGE_LAYOUT_GENERAL, 1, &copyRegion);
+    m_errorMonitor->VerifyFound();
+
+    m_commandBuffer->end();
+}
+
+TEST_F(VkLayerTest, ResolveImageLowSampleCount) {
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "vkCmdResolveImage called with source sample count less than 2.");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    // Create two images of sample count 1 and try to Resolve between them
+
+    VkImageCreateInfo image_create_info = {};
+    image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+    image_create_info.pNext = NULL;
+    image_create_info.imageType = VK_IMAGE_TYPE_2D;
+    image_create_info.format = VK_FORMAT_B8G8R8A8_UNORM;
+    image_create_info.extent.width = 32;
+    image_create_info.extent.height = 1;
+    image_create_info.extent.depth = 1;
+    image_create_info.mipLevels = 1;
+    image_create_info.arrayLayers = 1;
+    image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
+    image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
+    image_create_info.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
+    image_create_info.flags = 0;
+
+    VkImageObj srcImage(m_device);
+    srcImage.init(&image_create_info);
+    ASSERT_TRUE(srcImage.initialized());
+
+    VkImageObj dstImage(m_device);
+    dstImage.init(&image_create_info);
+    ASSERT_TRUE(dstImage.initialized());
+
+    m_commandBuffer->begin();
+    VkImageResolve resolveRegion;
+    resolveRegion.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    resolveRegion.srcSubresource.mipLevel = 0;
+    resolveRegion.srcSubresource.baseArrayLayer = 0;
+    resolveRegion.srcSubresource.layerCount = 1;
+    resolveRegion.srcOffset.x = 0;
+    resolveRegion.srcOffset.y = 0;
+    resolveRegion.srcOffset.z = 0;
+    resolveRegion.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    resolveRegion.dstSubresource.mipLevel = 0;
+    resolveRegion.dstSubresource.baseArrayLayer = 0;
+    resolveRegion.dstSubresource.layerCount = 1;
+    resolveRegion.dstOffset.x = 0;
+    resolveRegion.dstOffset.y = 0;
+    resolveRegion.dstOffset.z = 0;
+    resolveRegion.extent.width = 1;
+    resolveRegion.extent.height = 1;
+    resolveRegion.extent.depth = 1;
+    m_commandBuffer->ResolveImage(srcImage.handle(), VK_IMAGE_LAYOUT_GENERAL, dstImage.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
+                                  &resolveRegion);
+    m_commandBuffer->end();
+
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, ResolveImageHighSampleCount) {
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "vkCmdResolveImage called with dest sample count greater than 1.");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    // Create two images of sample count 4 and try to Resolve between them
+
+    VkImageCreateInfo image_create_info = {};
+    image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+    image_create_info.pNext = NULL;
+    image_create_info.imageType = VK_IMAGE_TYPE_2D;
+    image_create_info.format = VK_FORMAT_B8G8R8A8_UNORM;
+    image_create_info.extent.width = 32;
+    image_create_info.extent.height = 1;
+    image_create_info.extent.depth = 1;
+    image_create_info.mipLevels = 1;
+    image_create_info.arrayLayers = 1;
+    image_create_info.samples = VK_SAMPLE_COUNT_4_BIT;
+    image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
+    // Note: Some implementations expect color attachment usage for any
+    // multisample surface
+    image_create_info.usage =
+        VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
+    image_create_info.flags = 0;
+
+    VkImageObj srcImage(m_device);
+    srcImage.init(&image_create_info);
+    ASSERT_TRUE(srcImage.initialized());
+
+    VkImageObj dstImage(m_device);
+    dstImage.init(&image_create_info);
+    ASSERT_TRUE(dstImage.initialized());
+
+    m_commandBuffer->begin();
+    // Need memory barrier to VK_IMAGE_LAYOUT_GENERAL for source and dest?
+    // VK_IMAGE_LAYOUT_UNDEFINED = 0,
+    // VK_IMAGE_LAYOUT_GENERAL = 1,
+    VkImageResolve resolveRegion;
+    resolveRegion.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    resolveRegion.srcSubresource.mipLevel = 0;
+    resolveRegion.srcSubresource.baseArrayLayer = 0;
+    resolveRegion.srcSubresource.layerCount = 1;
+    resolveRegion.srcOffset.x = 0;
+    resolveRegion.srcOffset.y = 0;
+    resolveRegion.srcOffset.z = 0;
+    resolveRegion.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    resolveRegion.dstSubresource.mipLevel = 0;
+    resolveRegion.dstSubresource.baseArrayLayer = 0;
+    resolveRegion.dstSubresource.layerCount = 1;
+    resolveRegion.dstOffset.x = 0;
+    resolveRegion.dstOffset.y = 0;
+    resolveRegion.dstOffset.z = 0;
+    resolveRegion.extent.width = 1;
+    resolveRegion.extent.height = 1;
+    resolveRegion.extent.depth = 1;
+    m_commandBuffer->ResolveImage(srcImage.handle(), VK_IMAGE_LAYOUT_GENERAL, dstImage.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
+                                  &resolveRegion);
+    m_commandBuffer->end();
+
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, ResolveImageFormatMismatch) {
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_WARNING_BIT_EXT,
+                                         "vkCmdResolveImage called with unmatched source and dest formats.");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    // Create two images of different types and try to copy between them
+    VkImageObj srcImage(m_device);
+    VkImageObj dstImage(m_device);
+
+    VkImageCreateInfo image_create_info = {};
+    image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+    image_create_info.pNext = NULL;
+    image_create_info.imageType = VK_IMAGE_TYPE_2D;
+    image_create_info.format = VK_FORMAT_B8G8R8A8_UNORM;
+    image_create_info.extent.width = 32;
+    image_create_info.extent.height = 1;
+    image_create_info.extent.depth = 1;
+    image_create_info.mipLevels = 1;
+    image_create_info.arrayLayers = 1;
+    image_create_info.samples = VK_SAMPLE_COUNT_2_BIT;
+    image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
+    // Note: Some implementations expect color attachment usage for any
+    // multisample surface
+    image_create_info.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
+    image_create_info.flags = 0;
+    srcImage.init(&image_create_info);
+
+    // Set format to something other than source image
+    image_create_info.format = VK_FORMAT_R32_SFLOAT;
+    // Note: Some implementations expect color attachment usage for any
+    // multisample surface
+    image_create_info.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
+    image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
+    dstImage.init(&image_create_info);
+
+    m_commandBuffer->begin();
+    // Need memory barrier to VK_IMAGE_LAYOUT_GENERAL for source and dest?
+    // VK_IMAGE_LAYOUT_UNDEFINED = 0,
+    // VK_IMAGE_LAYOUT_GENERAL = 1,
+    VkImageResolve resolveRegion;
+    resolveRegion.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    resolveRegion.srcSubresource.mipLevel = 0;
+    resolveRegion.srcSubresource.baseArrayLayer = 0;
+    resolveRegion.srcSubresource.layerCount = 1;
+    resolveRegion.srcOffset.x = 0;
+    resolveRegion.srcOffset.y = 0;
+    resolveRegion.srcOffset.z = 0;
+    resolveRegion.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    resolveRegion.dstSubresource.mipLevel = 0;
+    resolveRegion.dstSubresource.baseArrayLayer = 0;
+    resolveRegion.dstSubresource.layerCount = 1;
+    resolveRegion.dstOffset.x = 0;
+    resolveRegion.dstOffset.y = 0;
+    resolveRegion.dstOffset.z = 0;
+    resolveRegion.extent.width = 1;
+    resolveRegion.extent.height = 1;
+    resolveRegion.extent.depth = 1;
+    m_commandBuffer->ResolveImage(srcImage.handle(), VK_IMAGE_LAYOUT_GENERAL, dstImage.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
+                                  &resolveRegion);
+    m_commandBuffer->end();
+
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, ResolveImageTypeMismatch) {
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_WARNING_BIT_EXT,
+                                         "vkCmdResolveImage called with unmatched source and dest image types.");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    // Create two images of different types and try to copy between them
+    VkImageObj srcImage(m_device);
+    VkImageObj dstImage(m_device);
+
+    VkImageCreateInfo image_create_info = {};
+    image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+    image_create_info.pNext = NULL;
+    image_create_info.imageType = VK_IMAGE_TYPE_2D;
+    image_create_info.format = VK_FORMAT_B8G8R8A8_UNORM;
+    image_create_info.extent.width = 32;
+    image_create_info.extent.height = 1;
+    image_create_info.extent.depth = 1;
+    image_create_info.mipLevels = 1;
+    image_create_info.arrayLayers = 1;
+    image_create_info.samples = VK_SAMPLE_COUNT_2_BIT;
+    image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
+    // Note: Some implementations expect color attachment usage for any
+    // multisample surface
+    image_create_info.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
+    image_create_info.flags = 0;
+    srcImage.init(&image_create_info);
+
+    image_create_info.imageType = VK_IMAGE_TYPE_1D;
+    // Note: Some implementations expect color attachment usage for any
+    // multisample surface
+    image_create_info.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
+    image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
+    dstImage.init(&image_create_info);
+
+    m_commandBuffer->begin();
+    // Need memory barrier to VK_IMAGE_LAYOUT_GENERAL for source and dest?
+    // VK_IMAGE_LAYOUT_UNDEFINED = 0,
+    // VK_IMAGE_LAYOUT_GENERAL = 1,
+    VkImageResolve resolveRegion;
+    resolveRegion.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    resolveRegion.srcSubresource.mipLevel = 0;
+    resolveRegion.srcSubresource.baseArrayLayer = 0;
+    resolveRegion.srcSubresource.layerCount = 1;
+    resolveRegion.srcOffset.x = 0;
+    resolveRegion.srcOffset.y = 0;
+    resolveRegion.srcOffset.z = 0;
+    resolveRegion.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    resolveRegion.dstSubresource.mipLevel = 0;
+    resolveRegion.dstSubresource.baseArrayLayer = 0;
+    resolveRegion.dstSubresource.layerCount = 1;
+    resolveRegion.dstOffset.x = 0;
+    resolveRegion.dstOffset.y = 0;
+    resolveRegion.dstOffset.z = 0;
+    resolveRegion.extent.width = 1;
+    resolveRegion.extent.height = 1;
+    resolveRegion.extent.depth = 1;
+    m_commandBuffer->ResolveImage(srcImage.handle(), VK_IMAGE_LAYOUT_GENERAL, dstImage.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
+                                  &resolveRegion);
+    m_commandBuffer->end();
+
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, ResolveImageLayoutMismatch) {
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    // Create two images of different types and try to copy between them
+    VkImageObj srcImage(m_device);
+    VkImageObj dstImage(m_device);
+
+    VkImageCreateInfo image_create_info = {};
+    image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+    image_create_info.pNext = NULL;
+    image_create_info.imageType = VK_IMAGE_TYPE_2D;
+    image_create_info.format = VK_FORMAT_B8G8R8A8_UNORM;
+    image_create_info.extent.width = 32;
+    image_create_info.extent.height = 32;
+    image_create_info.extent.depth = 1;
+    image_create_info.mipLevels = 1;
+    image_create_info.arrayLayers = 1;
+    image_create_info.samples = VK_SAMPLE_COUNT_2_BIT;
+    image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
+    image_create_info.usage =
+        VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
+    // Note: Some implementations expect color attachment usage for any
+    // multisample surface
+    image_create_info.flags = 0;
+    srcImage.init(&image_create_info);
+    ASSERT_TRUE(srcImage.initialized());
+
+    // Note: Some implementations expect color attachment usage for any
+    // multisample surface
+    image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
+    dstImage.init(&image_create_info);
+    ASSERT_TRUE(dstImage.initialized());
+
+    m_commandBuffer->begin();
+    // source image must have valid contents before resolve
+    VkClearColorValue clear_color = {{0, 0, 0, 0}};
+    VkImageSubresourceRange subresource = {};
+    subresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    subresource.layerCount = 1;
+    subresource.levelCount = 1;
+    srcImage.SetLayout(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL);
+    m_commandBuffer->ClearColorImage(srcImage.image(), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, &clear_color, 1, &subresource);
+    srcImage.SetLayout(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL);
+    dstImage.SetLayout(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL);
+
+    VkImageResolve resolveRegion;
+    resolveRegion.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    resolveRegion.srcSubresource.mipLevel = 0;
+    resolveRegion.srcSubresource.baseArrayLayer = 0;
+    resolveRegion.srcSubresource.layerCount = 1;
+    resolveRegion.srcOffset.x = 0;
+    resolveRegion.srcOffset.y = 0;
+    resolveRegion.srcOffset.z = 0;
+    resolveRegion.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    resolveRegion.dstSubresource.mipLevel = 0;
+    resolveRegion.dstSubresource.baseArrayLayer = 0;
+    resolveRegion.dstSubresource.layerCount = 1;
+    resolveRegion.dstOffset.x = 0;
+    resolveRegion.dstOffset.y = 0;
+    resolveRegion.dstOffset.z = 0;
+    resolveRegion.extent.width = 1;
+    resolveRegion.extent.height = 1;
+    resolveRegion.extent.depth = 1;
+    // source image layout mismatch
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdResolveImage-srcImageLayout-00260");
+    m_commandBuffer->ResolveImage(srcImage.image(), VK_IMAGE_LAYOUT_GENERAL, dstImage.image(), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
+                                  1, &resolveRegion);
+    m_errorMonitor->VerifyFound();
+    // dst image layout mismatch
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdResolveImage-dstImageLayout-00262");
+    m_commandBuffer->ResolveImage(srcImage.image(), VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, dstImage.image(), VK_IMAGE_LAYOUT_GENERAL,
+                                  1, &resolveRegion);
+    m_errorMonitor->VerifyFound();
+    m_commandBuffer->end();
+}
+
+TEST_F(VkLayerTest, ResolveInvalidSubresource) {
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    // Create two images of different types and try to copy between them
+    VkImageObj srcImage(m_device);
+    VkImageObj dstImage(m_device);
+
+    VkImageCreateInfo image_create_info = {};
+    image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+    image_create_info.pNext = NULL;
+    image_create_info.imageType = VK_IMAGE_TYPE_2D;
+    image_create_info.format = VK_FORMAT_B8G8R8A8_UNORM;
+    image_create_info.extent.width = 32;
+    image_create_info.extent.height = 32;
+    image_create_info.extent.depth = 1;
+    image_create_info.mipLevels = 1;
+    image_create_info.arrayLayers = 1;
+    image_create_info.samples = VK_SAMPLE_COUNT_2_BIT;
+    image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
+    image_create_info.usage =
+        VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
+    // Note: Some implementations expect color attachment usage for any
+    // multisample surface
+    image_create_info.flags = 0;
+    srcImage.init(&image_create_info);
+    ASSERT_TRUE(srcImage.initialized());
+
+    // Note: Some implementations expect color attachment usage for any
+    // multisample surface
+    image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
+    dstImage.init(&image_create_info);
+    ASSERT_TRUE(dstImage.initialized());
+
+    m_commandBuffer->begin();
+    // source image must have valid contents before resolve
+    VkClearColorValue clear_color = {{0, 0, 0, 0}};
+    VkImageSubresourceRange subresource = {};
+    subresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    subresource.layerCount = 1;
+    subresource.levelCount = 1;
+    srcImage.SetLayout(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL);
+    m_commandBuffer->ClearColorImage(srcImage.image(), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, &clear_color, 1, &subresource);
+    srcImage.SetLayout(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL);
+    dstImage.SetLayout(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL);
+
+    VkImageResolve resolveRegion;
+    resolveRegion.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    resolveRegion.srcSubresource.mipLevel = 0;
+    resolveRegion.srcSubresource.baseArrayLayer = 0;
+    resolveRegion.srcSubresource.layerCount = 1;
+    resolveRegion.srcOffset.x = 0;
+    resolveRegion.srcOffset.y = 0;
+    resolveRegion.srcOffset.z = 0;
+    resolveRegion.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    resolveRegion.dstSubresource.mipLevel = 0;
+    resolveRegion.dstSubresource.baseArrayLayer = 0;
+    resolveRegion.dstSubresource.layerCount = 1;
+    resolveRegion.dstOffset.x = 0;
+    resolveRegion.dstOffset.y = 0;
+    resolveRegion.dstOffset.z = 0;
+    resolveRegion.extent.width = 1;
+    resolveRegion.extent.height = 1;
+    resolveRegion.extent.depth = 1;
+    // invalid source mip level
+    resolveRegion.srcSubresource.mipLevel = image_create_info.mipLevels;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdResolveImage-srcSubresource-01709");
+    m_commandBuffer->ResolveImage(srcImage.image(), VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, dstImage.image(),
+                                  VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, &resolveRegion);
+    m_errorMonitor->VerifyFound();
+    resolveRegion.srcSubresource.mipLevel = 0;
+    // invalid dest mip level
+    resolveRegion.dstSubresource.mipLevel = image_create_info.mipLevels;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdResolveImage-dstSubresource-01710");
+    m_commandBuffer->ResolveImage(srcImage.image(), VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, dstImage.image(),
+                                  VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, &resolveRegion);
+    m_errorMonitor->VerifyFound();
+    resolveRegion.dstSubresource.mipLevel = 0;
+    // invalid source array layer range
+    resolveRegion.srcSubresource.baseArrayLayer = image_create_info.arrayLayers;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdResolveImage-srcSubresource-01711");
+    m_commandBuffer->ResolveImage(srcImage.image(), VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, dstImage.image(),
+                                  VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, &resolveRegion);
+    m_errorMonitor->VerifyFound();
+    resolveRegion.srcSubresource.baseArrayLayer = 0;
+    // invalid dest array layer range
+    resolveRegion.dstSubresource.baseArrayLayer = image_create_info.arrayLayers;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdResolveImage-dstSubresource-01712");
+    m_commandBuffer->ResolveImage(srcImage.image(), VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, dstImage.image(),
+                                  VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, &resolveRegion);
+    m_errorMonitor->VerifyFound();
+    resolveRegion.dstSubresource.baseArrayLayer = 0;
+
+    m_commandBuffer->end();
+}
+
+TEST_F(VkLayerTest, ClearImageErrors) {
+    TEST_DESCRIPTION("Call ClearColorImage w/ a depth|stencil image and ClearDepthStencilImage with a color image.");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    m_commandBuffer->begin();
+
+    // Color image
+    VkClearColorValue clear_color;
+    memset(clear_color.uint32, 0, sizeof(uint32_t) * 4);
+    const VkFormat color_format = VK_FORMAT_B8G8R8A8_UNORM;
+    const int32_t img_width = 32;
+    const int32_t img_height = 32;
+    VkImageCreateInfo image_create_info = {};
+    image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+    image_create_info.pNext = NULL;
+    image_create_info.imageType = VK_IMAGE_TYPE_2D;
+    image_create_info.format = color_format;
+    image_create_info.extent.width = img_width;
+    image_create_info.extent.height = img_height;
+    image_create_info.extent.depth = 1;
+    image_create_info.mipLevels = 1;
+    image_create_info.arrayLayers = 1;
+    image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
+    image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
+
+    image_create_info.usage = VK_IMAGE_USAGE_SAMPLED_BIT;
+    vk_testing::Image color_image_no_transfer;
+    color_image_no_transfer.init(*m_device, image_create_info);
+
+    image_create_info.usage = VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
+    vk_testing::Image color_image;
+    color_image.init(*m_device, image_create_info);
+
+    const VkImageSubresourceRange color_range = vk_testing::Image::subresource_range(image_create_info, VK_IMAGE_ASPECT_COLOR_BIT);
+
+    // Depth/Stencil image
+    VkClearDepthStencilValue clear_value = {0};
+    VkImageCreateInfo ds_image_create_info = vk_testing::Image::create_info();
+    ds_image_create_info.imageType = VK_IMAGE_TYPE_2D;
+    ds_image_create_info.format = VK_FORMAT_D16_UNORM;
+    ds_image_create_info.extent.width = 64;
+    ds_image_create_info.extent.height = 64;
+    ds_image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
+    ds_image_create_info.usage = VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
+
+    vk_testing::Image ds_image;
+    ds_image.init(*m_device, ds_image_create_info);
+
+    const VkImageSubresourceRange ds_range = vk_testing::Image::subresource_range(ds_image_create_info, VK_IMAGE_ASPECT_DEPTH_BIT);
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "vkCmdClearColorImage called with depth/stencil image.");
+
+    vk::CmdClearColorImage(m_commandBuffer->handle(), ds_image.handle(), VK_IMAGE_LAYOUT_GENERAL, &clear_color, 1, &color_range);
+
+    m_errorMonitor->VerifyFound();
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "vkCmdClearColorImage called with image created without VK_IMAGE_USAGE_TRANSFER_DST_BIT");
+
+    vk::CmdClearColorImage(m_commandBuffer->handle(), color_image_no_transfer.handle(), VK_IMAGE_LAYOUT_GENERAL, &clear_color, 1,
+                           &color_range);
+
+    m_errorMonitor->VerifyFound();
+
+    // Call CmdClearDepthStencilImage with color image
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "vkCmdClearDepthStencilImage called without a depth/stencil image.");
+
+    vk::CmdClearDepthStencilImage(m_commandBuffer->handle(), color_image.handle(), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
+                                  &clear_value, 1, &ds_range);
+
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, CommandQueueFlags) {
+    TEST_DESCRIPTION(
+        "Allocate a command buffer on a queue that does not support graphics and try to issue a graphics-only command");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    uint32_t queueFamilyIndex = m_device->QueueFamilyWithoutCapabilities(VK_QUEUE_GRAPHICS_BIT);
+    if (queueFamilyIndex == UINT32_MAX) {
+        printf("%s Non-graphics queue family not found; skipped.\n", kSkipPrefix);
+        return;
+    } else {
+        // Create command pool on a non-graphics queue
+        VkCommandPoolObj command_pool(m_device, queueFamilyIndex);
+
+        // Setup command buffer on pool
+        VkCommandBufferObj command_buffer(m_device, &command_pool);
+        command_buffer.begin();
+
+        // Issue a graphics only command
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdSetViewport-commandBuffer-cmdpool");
+        VkViewport viewport = {0, 0, 16, 16, 0, 1};
+        command_buffer.SetViewport(0, 1, &viewport);
+        m_errorMonitor->VerifyFound();
+    }
+}
+
+TEST_F(VkLayerTest, ExecuteUnrecordedSecondaryCB) {
+    TEST_DESCRIPTION("Attempt vkCmdExecuteCommands with a CB in the initial state");
+    ASSERT_NO_FATAL_FAILURE(Init());
+    VkCommandBufferObj secondary(m_device, m_commandPool, VK_COMMAND_BUFFER_LEVEL_SECONDARY);
+    // never record secondary
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdExecuteCommands-pCommandBuffers-00089");
+    m_commandBuffer->begin();
+    vk::CmdExecuteCommands(m_commandBuffer->handle(), 1, &secondary.handle());
+    m_errorMonitor->VerifyFound();
+    m_commandBuffer->end();
+}
+
+TEST_F(VkLayerTest, ExecuteSecondaryCBWithLayoutMismatch) {
+    TEST_DESCRIPTION("Attempt vkCmdExecuteCommands with a CB with incorrect initial layout.");
+
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+    ASSERT_NO_FATAL_FAILURE(InitState(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
+
+    VkImageCreateInfo image_create_info = {};
+    image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+    image_create_info.pNext = NULL;
+    image_create_info.imageType = VK_IMAGE_TYPE_2D;
+    image_create_info.format = VK_FORMAT_B8G8R8A8_UNORM;
+    image_create_info.extent.width = 32;
+    image_create_info.extent.height = 1;
+    image_create_info.extent.depth = 1;
+    image_create_info.mipLevels = 1;
+    image_create_info.arrayLayers = 1;
+    image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
+    image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
+    image_create_info.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
+    image_create_info.flags = 0;
+
+    VkImageSubresource image_sub = VkImageObj::subresource(VK_IMAGE_ASPECT_COLOR_BIT, 0, 0);
+    VkImageSubresourceRange image_sub_range = VkImageObj::subresource_range(image_sub);
+
+    VkImageObj image(m_device);
+    image.init(&image_create_info);
+    ASSERT_TRUE(image.initialized());
+    VkImageMemoryBarrier image_barrier =
+        image.image_memory_barrier(0, 0, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_GENERAL, image_sub_range);
+
+    auto pipeline = [&image_barrier](const VkCommandBufferObj &cb, VkImageLayout old_layout, VkImageLayout new_layout) {
+        image_barrier.oldLayout = old_layout;
+        image_barrier.newLayout = new_layout;
+        vk::CmdPipelineBarrier(cb.handle(), VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, 0, nullptr,
+                               0, nullptr, 1, &image_barrier);
+    };
+
+    // Validate that mismatched use of image layout in secondary command buffer is caught at record time
+    VkCommandBufferObj secondary(m_device, m_commandPool, VK_COMMAND_BUFFER_LEVEL_SECONDARY);
+    secondary.begin();
+    pipeline(secondary, VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL);
+    secondary.end();
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "UNASSIGNED-vkCmdExecuteCommands-commandBuffer-00001");
+    m_commandBuffer->begin();
+    pipeline(*m_commandBuffer, VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL);
+    vk::CmdExecuteCommands(m_commandBuffer->handle(), 1, &secondary.handle());
+    m_errorMonitor->VerifyFound();
+
+    // Validate that we've tracked the changes from the secondary CB correctly
+    m_errorMonitor->ExpectSuccess();
+    pipeline(*m_commandBuffer, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, VK_IMAGE_LAYOUT_GENERAL);
+    m_errorMonitor->VerifyNotFound();
+    m_commandBuffer->end();
+
+    m_commandBuffer->reset();
+    secondary.reset();
+
+    // Validate that UNDEFINED doesn't false positive on us
+    secondary.begin();
+    pipeline(secondary, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL);
+    secondary.end();
+    m_commandBuffer->begin();
+    pipeline(*m_commandBuffer, VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL);
+    m_errorMonitor->ExpectSuccess();
+    vk::CmdExecuteCommands(m_commandBuffer->handle(), 1, &secondary.handle());
+    m_errorMonitor->VerifyNotFound();
+    m_commandBuffer->end();
+}
+
+TEST_F(VkLayerTest, SetDynViewportParamTests) {
+    TEST_DESCRIPTION("Test parameters of vkCmdSetViewport without multiViewport feature");
+
+    SetTargetApiVersion(VK_API_VERSION_1_1);
+    VkPhysicalDeviceFeatures features{};
+    ASSERT_NO_FATAL_FAILURE(Init(&features));
+
+    const VkViewport vp = {0.0, 0.0, 64.0, 64.0, 0.0, 1.0};
+    const VkViewport viewports[] = {vp, vp};
+
+    m_commandBuffer->begin();
+
+    // array tests
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdSetViewport-firstViewport-01224");
+    vk::CmdSetViewport(m_commandBuffer->handle(), 1, 1, viewports);
+    m_errorMonitor->VerifyFound();
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdSetViewport-viewportCount-arraylength");
+    vk::CmdSetViewport(m_commandBuffer->handle(), 0, 0, nullptr);
+    m_errorMonitor->VerifyFound();
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdSetViewport-viewportCount-01225");
+    vk::CmdSetViewport(m_commandBuffer->handle(), 0, 2, viewports);
+    m_errorMonitor->VerifyFound();
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdSetViewport-firstViewport-01224");
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdSetViewport-viewportCount-01225");
+    vk::CmdSetViewport(m_commandBuffer->handle(), 1, 2, viewports);
+    m_errorMonitor->VerifyFound();
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdSetViewport-pViewports-parameter");
+    vk::CmdSetViewport(m_commandBuffer->handle(), 0, 1, nullptr);
+    m_errorMonitor->VerifyFound();
+
+    // core viewport tests
+    using std::vector;
+    struct TestCase {
+        VkViewport vp;
+        std::string veid;
+    };
+
+    // not necessarily boundary values (unspecified cast rounding), but guaranteed to be over limit
+    const auto one_past_max_w = NearestGreater(static_cast<float>(m_device->props.limits.maxViewportDimensions[0]));
+    const auto one_past_max_h = NearestGreater(static_cast<float>(m_device->props.limits.maxViewportDimensions[1]));
+
+    const auto min_bound = m_device->props.limits.viewportBoundsRange[0];
+    const auto max_bound = m_device->props.limits.viewportBoundsRange[1];
+    const auto one_before_min_bounds = NearestSmaller(min_bound);
+    const auto one_past_max_bounds = NearestGreater(max_bound);
+
+    const auto below_zero = NearestSmaller(0.0f);
+    const auto past_one = NearestGreater(1.0f);
+
+    vector<TestCase> test_cases = {
+        {{0.0, 0.0, 0.0, 64.0, 0.0, 1.0}, "VUID-VkViewport-width-01770"},
+        {{0.0, 0.0, one_past_max_w, 64.0, 0.0, 1.0}, "VUID-VkViewport-width-01771"},
+        {{0.0, 0.0, NAN, 64.0, 0.0, 1.0}, "VUID-VkViewport-width-01770"},
+        {{0.0, 0.0, 64.0, one_past_max_h, 0.0, 1.0}, "VUID-VkViewport-height-01773"},
+        {{one_before_min_bounds, 0.0, 64.0, 64.0, 0.0, 1.0}, "VUID-VkViewport-x-01774"},
+        {{one_past_max_bounds, 0.0, 64.0, 64.0, 0.0, 1.0}, "VUID-VkViewport-x-01232"},
+        {{NAN, 0.0, 64.0, 64.0, 0.0, 1.0}, "VUID-VkViewport-x-01774"},
+        {{0.0, one_before_min_bounds, 64.0, 64.0, 0.0, 1.0}, "VUID-VkViewport-y-01775"},
+        {{0.0, NAN, 64.0, 64.0, 0.0, 1.0}, "VUID-VkViewport-y-01775"},
+        {{max_bound, 0.0, 1.0, 64.0, 0.0, 1.0}, "VUID-VkViewport-x-01232"},
+        {{0.0, max_bound, 64.0, 1.0, 0.0, 1.0}, "VUID-VkViewport-y-01233"},
+        {{0.0, 0.0, 64.0, 64.0, below_zero, 1.0}, "VUID-VkViewport-minDepth-01234"},
+        {{0.0, 0.0, 64.0, 64.0, past_one, 1.0}, "VUID-VkViewport-minDepth-01234"},
+        {{0.0, 0.0, 64.0, 64.0, NAN, 1.0}, "VUID-VkViewport-minDepth-01234"},
+        {{0.0, 0.0, 64.0, 64.0, 0.0, below_zero}, "VUID-VkViewport-maxDepth-01235"},
+        {{0.0, 0.0, 64.0, 64.0, 0.0, past_one}, "VUID-VkViewport-maxDepth-01235"},
+        {{0.0, 0.0, 64.0, 64.0, 0.0, NAN}, "VUID-VkViewport-maxDepth-01235"},
+    };
+
+    if (DeviceValidationVersion() < VK_API_VERSION_1_1) {
+        test_cases.push_back({{0.0, 0.0, 64.0, 0.0, 0.0, 1.0}, "VUID-VkViewport-height-01772"});
+        test_cases.push_back({{0.0, 0.0, 64.0, NAN, 0.0, 1.0}, "VUID-VkViewport-height-01772"});
+    } else {
+        test_cases.push_back({{0.0, 0.0, 64.0, NAN, 0.0, 1.0}, "VUID-VkViewport-height-01773"});
+    }
+
+    for (const auto &test_case : test_cases) {
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, test_case.veid);
+        vk::CmdSetViewport(m_commandBuffer->handle(), 0, 1, &test_case.vp);
+        m_errorMonitor->VerifyFound();
+    }
+}
+
+TEST_F(VkLayerTest, SetDynViewportParamMaintenance1Tests) {
+    TEST_DESCRIPTION("Verify errors are detected on misuse of SetViewport with a negative viewport extension enabled.");
+
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+
+    if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_MAINTENANCE1_EXTENSION_NAME)) {
+        m_device_extension_names.push_back(VK_KHR_MAINTENANCE1_EXTENSION_NAME);
+    } else {
+        printf("%s VK_KHR_maintenance1 extension not supported -- skipping test\n", kSkipPrefix);
+        return;
+    }
+    ASSERT_NO_FATAL_FAILURE(InitState());
+
+    NegHeightViewportTests(m_device, m_commandBuffer, m_errorMonitor);
+}
+
+TEST_F(VkLayerTest, SetDynViewportParamMultiviewportTests) {
+    TEST_DESCRIPTION("Test parameters of vkCmdSetViewport with multiViewport feature enabled");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    if (!m_device->phy().features().multiViewport) {
+        printf("%s VkPhysicalDeviceFeatures::multiViewport is not supported -- skipping test.\n", kSkipPrefix);
+        return;
+    }
+
+    m_commandBuffer->begin();
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdSetViewport-viewportCount-arraylength");
+    vk::CmdSetViewport(m_commandBuffer->handle(), 0, 0, nullptr);
+    m_errorMonitor->VerifyFound();
+
+    const auto max_viewports = m_device->props.limits.maxViewports;
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdSetViewport-pViewports-parameter");
+    vk::CmdSetViewport(m_commandBuffer->handle(), 0, max_viewports, nullptr);
+    m_errorMonitor->VerifyFound();
+
+    const uint32_t too_big_max_viewports = 65536 + 1;  // let's say this is too much to allocate
+    if (max_viewports >= too_big_max_viewports) {
+        printf("%s VkPhysicalDeviceLimits::maxViewports is too large to practically test against -- skipping part of test.\n",
+               kSkipPrefix);
+    } else {
+        const VkViewport vp = {0.0, 0.0, 64.0, 64.0, 0.0, 1.0};
+        const std::vector<VkViewport> viewports(max_viewports + 1, vp);
+
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdSetViewport-firstViewport-01223");
+        vk::CmdSetViewport(m_commandBuffer->handle(), 0, max_viewports + 1, viewports.data());
+        m_errorMonitor->VerifyFound();
+
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdSetViewport-firstViewport-01223");
+        vk::CmdSetViewport(m_commandBuffer->handle(), max_viewports, 1, viewports.data());
+        m_errorMonitor->VerifyFound();
+
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdSetViewport-firstViewport-01223");
+        vk::CmdSetViewport(m_commandBuffer->handle(), 1, max_viewports, viewports.data());
+        m_errorMonitor->VerifyFound();
+
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdSetViewport-viewportCount-arraylength");
+        vk::CmdSetViewport(m_commandBuffer->handle(), 1, 0, viewports.data());
+        m_errorMonitor->VerifyFound();
+    }
+}
+
+TEST_F(VkLayerTest, BadRenderPassScopeSecondaryCmdBuffer) {
+    TEST_DESCRIPTION(
+        "Test secondary buffers executed in wrong render pass scope wrt VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    VkCommandBufferObj sec_cmdbuff_inside_rp(m_device, m_commandPool, VK_COMMAND_BUFFER_LEVEL_SECONDARY);
+    VkCommandBufferObj sec_cmdbuff_outside_rp(m_device, m_commandPool, VK_COMMAND_BUFFER_LEVEL_SECONDARY);
+
+    const VkCommandBufferInheritanceInfo cmdbuff_ii = {
+        VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO,
+        nullptr,  // pNext
+        m_renderPass,
+        0,  // subpass
+        m_framebuffer,
+    };
+    const VkCommandBufferBeginInfo cmdbuff_bi_tmpl = {VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
+                                                      nullptr,  // pNext
+                                                      VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT, &cmdbuff_ii};
+
+    VkCommandBufferBeginInfo cmdbuff_inside_rp_bi = cmdbuff_bi_tmpl;
+    cmdbuff_inside_rp_bi.flags |= VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT;
+    sec_cmdbuff_inside_rp.begin(&cmdbuff_inside_rp_bi);
+    sec_cmdbuff_inside_rp.end();
+
+    VkCommandBufferBeginInfo cmdbuff_outside_rp_bi = cmdbuff_bi_tmpl;
+    cmdbuff_outside_rp_bi.flags &= ~VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT;
+    sec_cmdbuff_outside_rp.begin(&cmdbuff_outside_rp_bi);
+    sec_cmdbuff_outside_rp.end();
+
+    m_commandBuffer->begin();
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdExecuteCommands-pCommandBuffers-00100");
+    vk::CmdExecuteCommands(m_commandBuffer->handle(), 1, &sec_cmdbuff_inside_rp.handle());
+    m_errorMonitor->VerifyFound();
+
+    const VkRenderPassBeginInfo rp_bi{VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO,
+                                      nullptr,  // pNext
+                                      m_renderPass,
+                                      m_framebuffer,
+                                      {{0, 0}, {32, 32}},
+                                      static_cast<uint32_t>(m_renderPassClearValues.size()),
+                                      m_renderPassClearValues.data()};
+    vk::CmdBeginRenderPass(m_commandBuffer->handle(), &rp_bi, VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS);
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdExecuteCommands-pCommandBuffers-00096");
+    vk::CmdExecuteCommands(m_commandBuffer->handle(), 1, &sec_cmdbuff_outside_rp.handle());
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, SecondaryCommandBufferClearColorAttachmentsRenderArea) {
+    TEST_DESCRIPTION(
+        "Create a secondary command buffer with CmdClearAttachments call that has a rect outside of renderPass renderArea");
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    VkCommandBufferAllocateInfo command_buffer_allocate_info = {};
+    command_buffer_allocate_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
+    command_buffer_allocate_info.commandPool = m_commandPool->handle();
+    command_buffer_allocate_info.level = VK_COMMAND_BUFFER_LEVEL_SECONDARY;
+    command_buffer_allocate_info.commandBufferCount = 1;
+
+    VkCommandBuffer secondary_command_buffer;
+    ASSERT_VK_SUCCESS(vk::AllocateCommandBuffers(m_device->device(), &command_buffer_allocate_info, &secondary_command_buffer));
+    VkCommandBufferBeginInfo command_buffer_begin_info = {};
+    VkCommandBufferInheritanceInfo command_buffer_inheritance_info = {};
+    command_buffer_inheritance_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO;
+    command_buffer_inheritance_info.renderPass = m_renderPass;
+    command_buffer_inheritance_info.framebuffer = m_framebuffer;
+
+    command_buffer_begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
+    command_buffer_begin_info.flags =
+        VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT | VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT;
+    command_buffer_begin_info.pInheritanceInfo = &command_buffer_inheritance_info;
+
+    vk::BeginCommandBuffer(secondary_command_buffer, &command_buffer_begin_info);
+    VkClearAttachment color_attachment;
+    color_attachment.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    color_attachment.clearValue.color.float32[0] = 0;
+    color_attachment.clearValue.color.float32[1] = 0;
+    color_attachment.clearValue.color.float32[2] = 0;
+    color_attachment.clearValue.color.float32[3] = 0;
+    color_attachment.colorAttachment = 0;
+    // x extent of 257 exceeds render area of 256
+    VkClearRect clear_rect = {{{0, 0}, {257, 32}}, 0, 1};
+    vk::CmdClearAttachments(secondary_command_buffer, 1, &color_attachment, 1, &clear_rect);
+    vk::EndCommandBuffer(secondary_command_buffer);
+    m_commandBuffer->begin();
+    vk::CmdBeginRenderPass(m_commandBuffer->handle(), &m_renderPassBeginInfo, VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS);
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdClearAttachments-pRects-00016");
+    vk::CmdExecuteCommands(m_commandBuffer->handle(), 1, &secondary_command_buffer);
+    m_errorMonitor->VerifyFound();
+
+    vk::CmdEndRenderPass(m_commandBuffer->handle());
+    m_commandBuffer->end();
+}
+
+TEST_F(VkLayerTest, PushDescriptorSetCmdPushBadArgs) {
+    TEST_DESCRIPTION("Attempt to push a push descriptor set with incorrect arguments.");
+    if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+        m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    } else {
+        printf("%s %s Extension not supported, skipping tests\n", kSkipPrefix,
+               VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+        return;
+    }
+
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+    if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME)) {
+        m_device_extension_names.push_back(VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME);
+    } else {
+        printf("%s %s Extension not supported, skipping tests\n", kSkipPrefix, VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME);
+        return;
+    }
+    ASSERT_NO_FATAL_FAILURE(InitState());
+
+    auto push_descriptor_prop = GetPushDescriptorProperties(instance(), gpu());
+    if (push_descriptor_prop.maxPushDescriptors < 1) {
+        // Some implementations report an invalid maxPushDescriptors of 0
+        printf("%s maxPushDescriptors is zero, skipping tests\n", kSkipPrefix);
+        return;
+    }
+
+    // Create ordinary and push descriptor set layout
+    VkDescriptorSetLayoutBinding binding = {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_FRAGMENT_BIT, nullptr};
+    const VkDescriptorSetLayoutObj ds_layout(m_device, {binding});
+    ASSERT_TRUE(ds_layout.initialized());
+    const VkDescriptorSetLayoutObj push_ds_layout(m_device, {binding}, VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR);
+    ASSERT_TRUE(push_ds_layout.initialized());
+
+    // Now use the descriptor set layouts to create a pipeline layout
+    const VkPipelineLayoutObj pipeline_layout(m_device, {&push_ds_layout, &ds_layout});
+    ASSERT_TRUE(pipeline_layout.initialized());
+
+    // Create a descriptor to push
+    const uint32_t buffer_data[4] = {4, 5, 6, 7};
+    VkConstantBufferObj buffer_obj(m_device, sizeof(buffer_data), &buffer_data);
+    ASSERT_TRUE(buffer_obj.initialized());
+
+    // Create a "write" struct, noting that the buffer_info cannot be a temporary arg (the return from write_descriptor_set
+    // references its data), and the DescriptorSet() can be temporary, because the value is ignored
+    VkDescriptorBufferInfo buffer_info = {buffer_obj.handle(), 0, VK_WHOLE_SIZE};
+
+    VkWriteDescriptorSet descriptor_write = vk_testing::Device::write_descriptor_set(
+        vk_testing::DescriptorSet(), 0, 0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, &buffer_info);
+
+    // Find address of extension call and make the call
+    PFN_vkCmdPushDescriptorSetKHR vkCmdPushDescriptorSetKHR =
+        (PFN_vkCmdPushDescriptorSetKHR)vk::GetDeviceProcAddr(m_device->device(), "vkCmdPushDescriptorSetKHR");
+    ASSERT_TRUE(vkCmdPushDescriptorSetKHR != nullptr);
+
+    // Section 1: Queue family matching/capabilities.
+    // Create command pool on a non-graphics queue
+    const uint32_t no_gfx_qfi = m_device->QueueFamilyMatching(VK_QUEUE_COMPUTE_BIT, VK_QUEUE_GRAPHICS_BIT);
+    const uint32_t transfer_only_qfi =
+        m_device->QueueFamilyMatching(VK_QUEUE_TRANSFER_BIT, (VK_QUEUE_COMPUTE_BIT | VK_QUEUE_GRAPHICS_BIT));
+    if ((UINT32_MAX == transfer_only_qfi) && (UINT32_MAX == no_gfx_qfi)) {
+        printf("%s No compute or transfer only queue family, skipping bindpoint and queue tests.\n", kSkipPrefix);
+    } else {
+        const uint32_t err_qfi = (UINT32_MAX == no_gfx_qfi) ? transfer_only_qfi : no_gfx_qfi;
+
+        VkCommandPoolObj command_pool(m_device, err_qfi);
+        ASSERT_TRUE(command_pool.initialized());
+        VkCommandBufferObj command_buffer(m_device, &command_pool);
+        ASSERT_TRUE(command_buffer.initialized());
+        command_buffer.begin();
+
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                             "VUID-vkCmdPushDescriptorSetKHR-pipelineBindPoint-00363");
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkWriteDescriptorSet-descriptorType-00330");
+        if (err_qfi == transfer_only_qfi) {
+            // This as this queue neither supports the gfx or compute bindpoints, we'll get two errors
+            m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                                 "VUID-vkCmdPushDescriptorSetKHR-commandBuffer-cmdpool");
+        }
+        vkCmdPushDescriptorSetKHR(command_buffer.handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_layout.handle(), 0, 1,
+                                  &descriptor_write);
+        m_errorMonitor->VerifyFound();
+        command_buffer.end();
+
+        // If we succeed in testing only one condition above, we need to test the other below.
+        if ((UINT32_MAX != transfer_only_qfi) && (err_qfi != transfer_only_qfi)) {
+            // Need to test the neither compute/gfx supported case separately.
+            VkCommandPoolObj tran_command_pool(m_device, transfer_only_qfi);
+            ASSERT_TRUE(tran_command_pool.initialized());
+            VkCommandBufferObj tran_command_buffer(m_device, &tran_command_pool);
+            ASSERT_TRUE(tran_command_buffer.initialized());
+            tran_command_buffer.begin();
+
+            // We can't avoid getting *both* errors in this case
+            m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                                 "VUID-vkCmdPushDescriptorSetKHR-pipelineBindPoint-00363");
+            m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkWriteDescriptorSet-descriptorType-00330");
+            m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                                 "VUID-vkCmdPushDescriptorSetKHR-commandBuffer-cmdpool");
+            vkCmdPushDescriptorSetKHR(tran_command_buffer.handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_layout.handle(), 0, 1,
+                                      &descriptor_write);
+            m_errorMonitor->VerifyFound();
+            tran_command_buffer.end();
+        }
+    }
+
+    // Push to the non-push binding
+    m_commandBuffer->begin();
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdPushDescriptorSetKHR-set-00365");
+    vkCmdPushDescriptorSetKHR(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_layout.handle(), 1, 1,
+                              &descriptor_write);
+    m_errorMonitor->VerifyFound();
+
+    // Specify set out of bounds
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdPushDescriptorSetKHR-set-00364");
+    vkCmdPushDescriptorSetKHR(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_layout.handle(), 2, 1,
+                              &descriptor_write);
+    m_errorMonitor->VerifyFound();
+    m_commandBuffer->end();
+
+    // This is a test for VUID-vkCmdPushDescriptorSetKHR-commandBuffer-recording
+    // TODO: Add VALIDATION_ERROR_ code support to core_validation::ValidateCmd
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "You must call vkBeginCommandBuffer() before this call to vkCmdPushDescriptorSetKHR()");
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkWriteDescriptorSet-descriptorType-00330");
+    vkCmdPushDescriptorSetKHR(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_layout.handle(), 0, 1,
+                              &descriptor_write);
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, SetDynScissorParamTests) {
+    TEST_DESCRIPTION("Test parameters of vkCmdSetScissor without multiViewport feature");
+
+    VkPhysicalDeviceFeatures features{};
+    ASSERT_NO_FATAL_FAILURE(Init(&features));
+
+    const VkRect2D scissor = {{0, 0}, {16, 16}};
+    const VkRect2D scissors[] = {scissor, scissor};
+
+    m_commandBuffer->begin();
+
+    // array tests
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdSetScissor-firstScissor-00593");
+    vk::CmdSetScissor(m_commandBuffer->handle(), 1, 1, scissors);
+    m_errorMonitor->VerifyFound();
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdSetScissor-scissorCount-arraylength");
+    vk::CmdSetScissor(m_commandBuffer->handle(), 0, 0, nullptr);
+    m_errorMonitor->VerifyFound();
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdSetScissor-scissorCount-00594");
+    vk::CmdSetScissor(m_commandBuffer->handle(), 0, 2, scissors);
+    m_errorMonitor->VerifyFound();
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdSetScissor-firstScissor-00593");
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdSetScissor-scissorCount-00594");
+    vk::CmdSetScissor(m_commandBuffer->handle(), 1, 2, scissors);
+    m_errorMonitor->VerifyFound();
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdSetScissor-pScissors-parameter");
+    vk::CmdSetScissor(m_commandBuffer->handle(), 0, 1, nullptr);
+    m_errorMonitor->VerifyFound();
+
+    struct TestCase {
+        VkRect2D scissor;
+        std::string vuid;
+    };
+
+    std::vector<TestCase> test_cases = {{{{-1, 0}, {16, 16}}, "VUID-vkCmdSetScissor-x-00595"},
+                                        {{{0, -1}, {16, 16}}, "VUID-vkCmdSetScissor-x-00595"},
+                                        {{{1, 0}, {INT32_MAX, 16}}, "VUID-vkCmdSetScissor-offset-00596"},
+                                        {{{INT32_MAX, 0}, {1, 16}}, "VUID-vkCmdSetScissor-offset-00596"},
+                                        {{{0, 0}, {uint32_t{INT32_MAX} + 1, 16}}, "VUID-vkCmdSetScissor-offset-00596"},
+                                        {{{0, 1}, {16, INT32_MAX}}, "VUID-vkCmdSetScissor-offset-00597"},
+                                        {{{0, INT32_MAX}, {16, 1}}, "VUID-vkCmdSetScissor-offset-00597"},
+                                        {{{0, 0}, {16, uint32_t{INT32_MAX} + 1}}, "VUID-vkCmdSetScissor-offset-00597"}};
+
+    for (const auto &test_case : test_cases) {
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, test_case.vuid);
+        vk::CmdSetScissor(m_commandBuffer->handle(), 0, 1, &test_case.scissor);
+        m_errorMonitor->VerifyFound();
+    }
+
+    m_commandBuffer->end();
+}
+
+TEST_F(VkLayerTest, SetDynScissorParamMultiviewportTests) {
+    TEST_DESCRIPTION("Test parameters of vkCmdSetScissor with multiViewport feature enabled");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    if (!m_device->phy().features().multiViewport) {
+        printf("%s VkPhysicalDeviceFeatures::multiViewport is not supported -- skipping test.\n", kSkipPrefix);
+        return;
+    }
+
+    m_commandBuffer->begin();
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdSetScissor-scissorCount-arraylength");
+    vk::CmdSetScissor(m_commandBuffer->handle(), 0, 0, nullptr);
+    m_errorMonitor->VerifyFound();
+
+    const auto max_scissors = m_device->props.limits.maxViewports;
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdSetScissor-pScissors-parameter");
+    vk::CmdSetScissor(m_commandBuffer->handle(), 0, max_scissors, nullptr);
+    m_errorMonitor->VerifyFound();
+
+    const uint32_t too_big_max_scissors = 65536 + 1;  // let's say this is too much to allocate
+    if (max_scissors >= too_big_max_scissors) {
+        printf("%s VkPhysicalDeviceLimits::maxViewports is too large to practically test against -- skipping part of test.\n",
+               kSkipPrefix);
+    } else {
+        const VkRect2D scissor = {{0, 0}, {16, 16}};
+        const std::vector<VkRect2D> scissors(max_scissors + 1, scissor);
+
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdSetScissor-firstScissor-00592");
+        vk::CmdSetScissor(m_commandBuffer->handle(), 0, max_scissors + 1, scissors.data());
+        m_errorMonitor->VerifyFound();
+
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdSetScissor-firstScissor-00592");
+        vk::CmdSetScissor(m_commandBuffer->handle(), max_scissors, 1, scissors.data());
+        m_errorMonitor->VerifyFound();
+
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdSetScissor-firstScissor-00592");
+        vk::CmdSetScissor(m_commandBuffer->handle(), 1, max_scissors, scissors.data());
+        m_errorMonitor->VerifyFound();
+
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdSetScissor-scissorCount-arraylength");
+        vk::CmdSetScissor(m_commandBuffer->handle(), 1, 0, scissors.data());
+        m_errorMonitor->VerifyFound();
+    }
+}
+
+TEST_F(VkLayerTest, DrawIndirect) {
+    TEST_DESCRIPTION("Test covered valid usage for vkCmdDrawIndirect");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    CreatePipelineHelper pipe(*this);
+    pipe.InitInfo();
+    const VkDynamicState dyn_states[] = {VK_DYNAMIC_STATE_VIEWPORT, VK_DYNAMIC_STATE_SCISSOR};
+    VkPipelineDynamicStateCreateInfo dyn_state_ci = {};
+    dyn_state_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO;
+    dyn_state_ci.dynamicStateCount = size(dyn_states);
+    dyn_state_ci.pDynamicStates = dyn_states;
+    pipe.dyn_state_ci_ = dyn_state_ci;
+    pipe.InitState();
+    pipe.CreateGraphicsPipeline();
+
+    m_commandBuffer->begin();
+    m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
+
+    vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.pipeline_);
+    vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.pipeline_layout_.handle(), 0, 1,
+                              &pipe.descriptor_set_->set_, 0, NULL);
+
+    VkViewport viewport = {0, 0, 16, 16, 0, 1};
+    vk::CmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
+    VkRect2D scissor = {{0, 0}, {16, 16}};
+    vk::CmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
+
+    VkBufferCreateInfo buffer_create_info = {VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO};
+    buffer_create_info.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
+    buffer_create_info.size = sizeof(VkDrawIndirectCommand);
+    VkBufferObj draw_buffer;
+    draw_buffer.init(*m_device, buffer_create_info);
+
+    // VUID-vkCmdDrawIndirect-buffer-02709
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdDrawIndirect-buffer-02709");
+    vk::CmdDrawIndirect(m_commandBuffer->handle(), draw_buffer.handle(), 0, 1, sizeof(VkDrawIndirectCommand));
+    m_errorMonitor->VerifyFound();
+
+    m_commandBuffer->EndRenderPass();
+    m_commandBuffer->end();
+}
+
+TEST_F(VkLayerTest, DrawIndirectCountKHR) {
+    TEST_DESCRIPTION("Test covered valid usage for vkCmdDrawIndirectCountKHR");
+
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+    if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_DRAW_INDIRECT_COUNT_EXTENSION_NAME)) {
+        m_device_extension_names.push_back(VK_KHR_DRAW_INDIRECT_COUNT_EXTENSION_NAME);
+    } else {
+        printf("             VK_KHR_draw_indirect_count Extension not supported, skipping test\n");
+        return;
+    }
+    ASSERT_NO_FATAL_FAILURE(InitState());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    VkMemoryRequirements memory_requirements;
+    VkMemoryAllocateInfo memory_allocate_info = {VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO};
+
+    auto vkCmdDrawIndirectCountKHR =
+        (PFN_vkCmdDrawIndirectCountKHR)vk::GetDeviceProcAddr(m_device->device(), "vkCmdDrawIndirectCountKHR");
+
+    CreatePipelineHelper pipe(*this);
+    pipe.InitInfo();
+    const VkDynamicState dyn_states[] = {VK_DYNAMIC_STATE_VIEWPORT, VK_DYNAMIC_STATE_SCISSOR};
+    VkPipelineDynamicStateCreateInfo dyn_state_ci = {};
+    dyn_state_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO;
+    dyn_state_ci.dynamicStateCount = size(dyn_states);
+    dyn_state_ci.pDynamicStates = dyn_states;
+    pipe.dyn_state_ci_ = dyn_state_ci;
+    pipe.InitState();
+    pipe.CreateGraphicsPipeline();
+
+    m_commandBuffer->begin();
+    m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
+
+    vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.pipeline_);
+    vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.pipeline_layout_.handle(), 0, 1,
+                              &pipe.descriptor_set_->set_, 0, NULL);
+
+    VkViewport viewport = {0, 0, 16, 16, 0, 1};
+    vk::CmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
+    VkRect2D scissor = {{0, 0}, {16, 16}};
+    vk::CmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
+
+    VkBufferCreateInfo buffer_create_info = {VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO};
+    buffer_create_info.size = sizeof(VkDrawIndirectCommand);
+    buffer_create_info.usage = VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT;
+    VkBuffer draw_buffer;
+    vk::CreateBuffer(m_device->device(), &buffer_create_info, nullptr, &draw_buffer);
+
+    VkBufferCreateInfo count_buffer_create_info = {VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO};
+    count_buffer_create_info.size = sizeof(uint32_t);
+    count_buffer_create_info.usage = VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT;
+    VkBufferObj count_buffer;
+    count_buffer.init(*m_device, count_buffer_create_info);
+
+    // VUID-vkCmdDrawIndirectCountKHR-buffer-02708
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdDrawIndirectCountKHR-buffer-02708");
+    vkCmdDrawIndirectCountKHR(m_commandBuffer->handle(), draw_buffer, 0, count_buffer.handle(), 0, 1,
+                              sizeof(VkDrawIndirectCommand));
+    m_errorMonitor->VerifyFound();
+
+    vk::GetBufferMemoryRequirements(m_device->device(), draw_buffer, &memory_requirements);
+    memory_allocate_info.allocationSize = memory_requirements.size;
+    m_device->phy().set_memory_type(memory_requirements.memoryTypeBits, &memory_allocate_info, VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
+    VkDeviceMemory draw_buffer_memory;
+    vk::AllocateMemory(m_device->device(), &memory_allocate_info, NULL, &draw_buffer_memory);
+    vk::BindBufferMemory(m_device->device(), draw_buffer, draw_buffer_memory, 0);
+
+    VkBuffer count_buffer_unbound;
+    vk::CreateBuffer(m_device->device(), &count_buffer_create_info, nullptr, &count_buffer_unbound);
+
+    // VUID-vkCmdDrawIndirectCountKHR-countBuffer-02714
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdDrawIndirectCountKHR-countBuffer-02714");
+    vkCmdDrawIndirectCountKHR(m_commandBuffer->handle(), draw_buffer, 0, count_buffer_unbound, 0, 1, sizeof(VkDrawIndirectCommand));
+    m_errorMonitor->VerifyFound();
+
+    // VUID-vkCmdDrawIndirectCountKHR-offset-02710
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdDrawIndirectCountKHR-offset-02710");
+    vkCmdDrawIndirectCountKHR(m_commandBuffer->handle(), draw_buffer, 1, count_buffer.handle(), 0, 1,
+                              sizeof(VkDrawIndirectCommand));
+    m_errorMonitor->VerifyFound();
+
+    // VUID-vkCmdDrawIndirectCountKHR-countBufferOffset-02716
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdDrawIndirectCountKHR-countBufferOffset-02716");
+    vkCmdDrawIndirectCountKHR(m_commandBuffer->handle(), draw_buffer, 0, count_buffer.handle(), 1, 1,
+                              sizeof(VkDrawIndirectCommand));
+    m_errorMonitor->VerifyFound();
+
+    // VUID-vkCmdDrawIndirectCountKHR-stride-03110
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdDrawIndirectCountKHR-stride-03110");
+    vkCmdDrawIndirectCountKHR(m_commandBuffer->handle(), draw_buffer, 0, count_buffer.handle(), 0, 1, 1);
+    m_errorMonitor->VerifyFound();
+
+    // TODO: These covered VUIDs aren't tested. There is also no test coverage for the core Vulkan 1.0 vk::CmdDraw* equivalent of
+    // these:
+    //     VUID-vkCmdDrawIndirectCountKHR-renderPass-02684
+    //     VUID-vkCmdDrawIndirectCountKHR-subpass-02685
+    //     VUID-vkCmdDrawIndirectCountKHR-commandBuffer-02701
+
+    m_commandBuffer->EndRenderPass();
+    m_commandBuffer->end();
+
+    vk::DestroyBuffer(m_device->device(), draw_buffer, 0);
+    vk::DestroyBuffer(m_device->device(), count_buffer_unbound, 0);
+
+    vk::FreeMemory(m_device->device(), draw_buffer_memory, 0);
+}
+
+TEST_F(VkLayerTest, DrawIndexedIndirectCountKHR) {
+    TEST_DESCRIPTION("Test covered valid usage for vkCmdDrawIndexedIndirectCountKHR");
+
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+    if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_DRAW_INDIRECT_COUNT_EXTENSION_NAME)) {
+        m_device_extension_names.push_back(VK_KHR_DRAW_INDIRECT_COUNT_EXTENSION_NAME);
+    } else {
+        printf("             VK_KHR_draw_indirect_count Extension not supported, skipping test\n");
+        return;
+    }
+    ASSERT_NO_FATAL_FAILURE(InitState());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    auto vkCmdDrawIndexedIndirectCountKHR =
+        (PFN_vkCmdDrawIndexedIndirectCountKHR)vk::GetDeviceProcAddr(m_device->device(), "vkCmdDrawIndexedIndirectCountKHR");
+
+    CreatePipelineHelper pipe(*this);
+    pipe.InitInfo();
+    const VkDynamicState dyn_states[] = {VK_DYNAMIC_STATE_VIEWPORT, VK_DYNAMIC_STATE_SCISSOR};
+    VkPipelineDynamicStateCreateInfo dyn_state_ci = {};
+    dyn_state_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO;
+    dyn_state_ci.dynamicStateCount = size(dyn_states);
+    dyn_state_ci.pDynamicStates = dyn_states;
+    pipe.dyn_state_ci_ = dyn_state_ci;
+    pipe.InitState();
+    pipe.CreateGraphicsPipeline();
+
+    m_commandBuffer->begin();
+    m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
+
+    vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.pipeline_);
+    vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.pipeline_layout_.handle(), 0, 1,
+                              &pipe.descriptor_set_->set_, 0, NULL);
+
+    VkViewport viewport = {0, 0, 16, 16, 0, 1};
+    vk::CmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
+    VkRect2D scissor = {{0, 0}, {16, 16}};
+    vk::CmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
+
+    VkBufferCreateInfo buffer_create_info = {VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO};
+    buffer_create_info.size = sizeof(VkDrawIndexedIndirectCommand);
+    buffer_create_info.usage = VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT;
+    VkBufferObj draw_buffer;
+    draw_buffer.init(*m_device, buffer_create_info);
+
+    VkBufferCreateInfo count_buffer_create_info = {VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO};
+    count_buffer_create_info.size = sizeof(uint32_t);
+    count_buffer_create_info.usage = VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT;
+    VkBufferObj count_buffer;
+    count_buffer.init(*m_device, count_buffer_create_info);
+
+    VkBufferCreateInfo index_buffer_create_info = {VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO};
+    index_buffer_create_info.size = sizeof(uint32_t);
+    index_buffer_create_info.usage = VK_BUFFER_USAGE_INDEX_BUFFER_BIT;
+    VkBufferObj index_buffer;
+    index_buffer.init(*m_device, index_buffer_create_info);
+
+    // VUID-vkCmdDrawIndexedIndirectCountKHR-commandBuffer-02701 (partial - only tests whether the index buffer is bound)
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "VUID-vkCmdDrawIndexedIndirectCountKHR-commandBuffer-02701");
+    vkCmdDrawIndexedIndirectCountKHR(m_commandBuffer->handle(), draw_buffer.handle(), 0, count_buffer.handle(), 0, 1,
+                                     sizeof(VkDrawIndexedIndirectCommand));
+    m_errorMonitor->VerifyFound();
+
+    vk::CmdBindIndexBuffer(m_commandBuffer->handle(), index_buffer.handle(), 0, VK_INDEX_TYPE_UINT32);
+
+    VkBuffer draw_buffer_unbound;
+    vk::CreateBuffer(m_device->device(), &count_buffer_create_info, nullptr, &draw_buffer_unbound);
+
+    // VUID-vkCmdDrawIndexedIndirectCountKHR-buffer-02708
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdDrawIndexedIndirectCountKHR-buffer-02708");
+    vkCmdDrawIndexedIndirectCountKHR(m_commandBuffer->handle(), draw_buffer_unbound, 0, count_buffer.handle(), 0, 1,
+                                     sizeof(VkDrawIndexedIndirectCommand));
+    m_errorMonitor->VerifyFound();
+
+    VkBuffer count_buffer_unbound;
+    vk::CreateBuffer(m_device->device(), &count_buffer_create_info, nullptr, &count_buffer_unbound);
+
+    // VUID-vkCmdDrawIndexedIndirectCountKHR-countBuffer-02714
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdDrawIndexedIndirectCountKHR-countBuffer-02714");
+    vkCmdDrawIndexedIndirectCountKHR(m_commandBuffer->handle(), draw_buffer.handle(), 0, count_buffer_unbound, 0, 1,
+                                     sizeof(VkDrawIndexedIndirectCommand));
+    m_errorMonitor->VerifyFound();
+
+    // VUID-vkCmdDrawIndexedIndirectCountKHR-offset-02710
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdDrawIndexedIndirectCountKHR-offset-02710");
+    vkCmdDrawIndexedIndirectCountKHR(m_commandBuffer->handle(), draw_buffer.handle(), 1, count_buffer.handle(), 0, 1,
+                                     sizeof(VkDrawIndexedIndirectCommand));
+    m_errorMonitor->VerifyFound();
+
+    // VUID-vkCmdDrawIndexedIndirectCountKHR-countBufferOffset-02716
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "VUID-vkCmdDrawIndexedIndirectCountKHR-countBufferOffset-02716");
+    vkCmdDrawIndexedIndirectCountKHR(m_commandBuffer->handle(), draw_buffer.handle(), 0, count_buffer.handle(), 1, 1,
+                                     sizeof(VkDrawIndexedIndirectCommand));
+    m_errorMonitor->VerifyFound();
+
+    // VUID-vkCmdDrawIndexedIndirectCountKHR-stride-03142
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdDrawIndexedIndirectCountKHR-stride-03142");
+    vkCmdDrawIndexedIndirectCountKHR(m_commandBuffer->handle(), draw_buffer.handle(), 0, count_buffer.handle(), 0, 1, 1);
+    m_errorMonitor->VerifyFound();
+
+    // TODO: These covered VUIDs aren't tested. There is also no test coverage for the core Vulkan 1.0 vk::CmdDraw* equivalent of
+    // these:
+    //     VUID-vkCmdDrawIndexedIndirectCountKHR-renderPass-02684
+    //     VUID-vkCmdDrawIndexedIndirectCountKHR-subpass-02685
+    //     VUID-vkCmdDrawIndexedIndirectCountKHR-commandBuffer-02701 (partial)
+
+    m_commandBuffer->EndRenderPass();
+    m_commandBuffer->end();
+
+    vk::DestroyBuffer(m_device->device(), draw_buffer_unbound, 0);
+    vk::DestroyBuffer(m_device->device(), count_buffer_unbound, 0);
+}
+
+TEST_F(VkLayerTest, ExclusiveScissorNV) {
+    TEST_DESCRIPTION("Test VK_NV_scissor_exclusive with multiViewport disabled.");
+
+    if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+        m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    } else {
+        printf("%s Did not find required instance extension %s; skipped.\n", kSkipPrefix,
+               VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+        return;
+    }
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+    std::array<const char *, 1> required_device_extensions = {{VK_NV_SCISSOR_EXCLUSIVE_EXTENSION_NAME}};
+    for (auto device_extension : required_device_extensions) {
+        if (DeviceExtensionSupported(gpu(), nullptr, device_extension)) {
+            m_device_extension_names.push_back(device_extension);
+        } else {
+            printf("%s %s Extension not supported, skipping tests\n", kSkipPrefix, device_extension);
+            return;
+        }
+    }
+
+    PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR =
+        (PFN_vkGetPhysicalDeviceFeatures2KHR)vk::GetInstanceProcAddr(instance(), "vkGetPhysicalDeviceFeatures2KHR");
+    ASSERT_TRUE(vkGetPhysicalDeviceFeatures2KHR != nullptr);
+
+    // Create a device that enables exclusive scissor but disables multiViewport
+    auto exclusive_scissor_features = lvl_init_struct<VkPhysicalDeviceExclusiveScissorFeaturesNV>();
+    auto features2 = lvl_init_struct<VkPhysicalDeviceFeatures2KHR>(&exclusive_scissor_features);
+    vkGetPhysicalDeviceFeatures2KHR(gpu(), &features2);
+
+    features2.features.multiViewport = VK_FALSE;
+
+    ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &features2));
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    if (m_device->phy().properties().limits.maxViewports) {
+        printf("%s Device doesn't support the necessary number of viewports, skipping test.\n", kSkipPrefix);
+        return;
+    }
+
+    // Based on PSOViewportStateTests
+    {
+        VkViewport viewport = {0.0f, 0.0f, 64.0f, 64.0f, 0.0f, 1.0f};
+        VkViewport viewports[] = {viewport, viewport};
+        VkRect2D scissor = {{0, 0}, {64, 64}};
+        VkRect2D scissors[100] = {scissor, scissor};
+
+        using std::vector;
+        struct TestCase {
+            uint32_t viewport_count;
+            VkViewport *viewports;
+            uint32_t scissor_count;
+            VkRect2D *scissors;
+            uint32_t exclusive_scissor_count;
+            VkRect2D *exclusive_scissors;
+
+            vector<std::string> vuids;
+        };
+
+        vector<TestCase> test_cases = {
+            {1,
+             viewports,
+             1,
+             scissors,
+             2,
+             scissors,
+             {"VUID-VkPipelineViewportExclusiveScissorStateCreateInfoNV-exclusiveScissorCount-02027",
+              "VUID-VkPipelineViewportExclusiveScissorStateCreateInfoNV-exclusiveScissorCount-02029"}},
+            {1,
+             viewports,
+             1,
+             scissors,
+             100,
+             scissors,
+             {"VUID-VkPipelineViewportExclusiveScissorStateCreateInfoNV-exclusiveScissorCount-02027",
+              "VUID-VkPipelineViewportExclusiveScissorStateCreateInfoNV-exclusiveScissorCount-02028",
+              "VUID-VkPipelineViewportExclusiveScissorStateCreateInfoNV-exclusiveScissorCount-02029"}},
+            {1,
+             viewports,
+             1,
+             scissors,
+             1,
+             nullptr,
+             {"VUID-VkPipelineViewportExclusiveScissorStateCreateInfoNV-pDynamicStates-02030"}},
+        };
+
+        for (const auto &test_case : test_cases) {
+            VkPipelineViewportExclusiveScissorStateCreateInfoNV exc = {
+                VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_EXCLUSIVE_SCISSOR_STATE_CREATE_INFO_NV};
+
+            const auto break_vp = [&test_case, &exc](CreatePipelineHelper &helper) {
+                helper.vp_state_ci_.viewportCount = test_case.viewport_count;
+                helper.vp_state_ci_.pViewports = test_case.viewports;
+                helper.vp_state_ci_.scissorCount = test_case.scissor_count;
+                helper.vp_state_ci_.pScissors = test_case.scissors;
+                helper.vp_state_ci_.pNext = &exc;
+
+                exc.exclusiveScissorCount = test_case.exclusive_scissor_count;
+                exc.pExclusiveScissors = test_case.exclusive_scissors;
+            };
+            CreatePipelineHelper::OneshotTest(*this, break_vp, VK_DEBUG_REPORT_ERROR_BIT_EXT, test_case.vuids);
+        }
+    }
+
+    // Based on SetDynScissorParamTests
+    {
+        auto vkCmdSetExclusiveScissorNV =
+            (PFN_vkCmdSetExclusiveScissorNV)vk::GetDeviceProcAddr(m_device->device(), "vkCmdSetExclusiveScissorNV");
+
+        const VkRect2D scissor = {{0, 0}, {16, 16}};
+        const VkRect2D scissors[] = {scissor, scissor};
+
+        m_commandBuffer->begin();
+
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                             "VUID-vkCmdSetExclusiveScissorNV-firstExclusiveScissor-02035");
+        vkCmdSetExclusiveScissorNV(m_commandBuffer->handle(), 1, 1, scissors);
+        m_errorMonitor->VerifyFound();
+
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                             "vkCmdSetExclusiveScissorNV: parameter exclusiveScissorCount must be greater than 0");
+        vkCmdSetExclusiveScissorNV(m_commandBuffer->handle(), 0, 0, nullptr);
+        m_errorMonitor->VerifyFound();
+
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                             "VUID-vkCmdSetExclusiveScissorNV-exclusiveScissorCount-02036");
+        vkCmdSetExclusiveScissorNV(m_commandBuffer->handle(), 0, 2, scissors);
+        m_errorMonitor->VerifyFound();
+
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                             "vkCmdSetExclusiveScissorNV: parameter exclusiveScissorCount must be greater than 0");
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                             "VUID-vkCmdSetExclusiveScissorNV-firstExclusiveScissor-02035");
+        vkCmdSetExclusiveScissorNV(m_commandBuffer->handle(), 1, 0, scissors);
+        m_errorMonitor->VerifyFound();
+
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                             "VUID-vkCmdSetExclusiveScissorNV-firstExclusiveScissor-02035");
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                             "VUID-vkCmdSetExclusiveScissorNV-exclusiveScissorCount-02036");
+        vkCmdSetExclusiveScissorNV(m_commandBuffer->handle(), 1, 2, scissors);
+        m_errorMonitor->VerifyFound();
+
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                             "vkCmdSetExclusiveScissorNV: required parameter pExclusiveScissors specified as NULL");
+        vkCmdSetExclusiveScissorNV(m_commandBuffer->handle(), 0, 1, nullptr);
+        m_errorMonitor->VerifyFound();
+
+        struct TestCase {
+            VkRect2D scissor;
+            std::string vuid;
+        };
+
+        std::vector<TestCase> test_cases = {
+            {{{-1, 0}, {16, 16}}, "VUID-vkCmdSetExclusiveScissorNV-x-02037"},
+            {{{0, -1}, {16, 16}}, "VUID-vkCmdSetExclusiveScissorNV-x-02037"},
+            {{{1, 0}, {INT32_MAX, 16}}, "VUID-vkCmdSetExclusiveScissorNV-offset-02038"},
+            {{{INT32_MAX, 0}, {1, 16}}, "VUID-vkCmdSetExclusiveScissorNV-offset-02038"},
+            {{{0, 0}, {uint32_t{INT32_MAX} + 1, 16}}, "VUID-vkCmdSetExclusiveScissorNV-offset-02038"},
+            {{{0, 1}, {16, INT32_MAX}}, "VUID-vkCmdSetExclusiveScissorNV-offset-02039"},
+            {{{0, INT32_MAX}, {16, 1}}, "VUID-vkCmdSetExclusiveScissorNV-offset-02039"},
+            {{{0, 0}, {16, uint32_t{INT32_MAX} + 1}}, "VUID-vkCmdSetExclusiveScissorNV-offset-02039"}};
+
+        for (const auto &test_case : test_cases) {
+            m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, test_case.vuid);
+            vkCmdSetExclusiveScissorNV(m_commandBuffer->handle(), 0, 1, &test_case.scissor);
+            m_errorMonitor->VerifyFound();
+        }
+
+        m_commandBuffer->end();
+    }
+}
+
+TEST_F(VkLayerTest, MeshShaderNV) {
+    TEST_DESCRIPTION("Test VK_NV_mesh_shader.");
+
+    if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+        m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    } else {
+        printf("%s Did not find required instance extension %s; skipped.\n", kSkipPrefix,
+               VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+        return;
+    }
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+    std::array<const char *, 1> required_device_extensions = {{VK_NV_MESH_SHADER_EXTENSION_NAME}};
+    for (auto device_extension : required_device_extensions) {
+        if (DeviceExtensionSupported(gpu(), nullptr, device_extension)) {
+            m_device_extension_names.push_back(device_extension);
+        } else {
+            printf("%s %s Extension not supported, skipping tests\n", kSkipPrefix, device_extension);
+            return;
+        }
+    }
+
+    if (DeviceIsMockICD() || DeviceSimulation()) {
+        printf("%sNot suppored by MockICD, skipping tests\n", kSkipPrefix);
+        return;
+    }
+
+    PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR =
+        (PFN_vkGetPhysicalDeviceFeatures2KHR)vk::GetInstanceProcAddr(instance(), "vkGetPhysicalDeviceFeatures2KHR");
+    ASSERT_TRUE(vkGetPhysicalDeviceFeatures2KHR != nullptr);
+
+    // Create a device that enables mesh_shader
+    auto mesh_shader_features = lvl_init_struct<VkPhysicalDeviceMeshShaderFeaturesNV>();
+    auto features2 = lvl_init_struct<VkPhysicalDeviceFeatures2KHR>(&mesh_shader_features);
+    vkGetPhysicalDeviceFeatures2KHR(gpu(), &features2);
+    features2.features.multiDrawIndirect = VK_FALSE;
+
+    ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &features2));
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    static const char vertShaderText[] =
+        "#version 450\n"
+        "vec2 vertices[3];\n"
+        "void main() {\n"
+        "      vertices[0] = vec2(-1.0, -1.0);\n"
+        "      vertices[1] = vec2( 1.0, -1.0);\n"
+        "      vertices[2] = vec2( 0.0,  1.0);\n"
+        "   gl_Position = vec4(vertices[gl_VertexIndex % 3], 0.0, 1.0);\n"
+        "   gl_PointSize = 1.0f;\n"
+        "}\n";
+
+    static const char meshShaderText[] =
+        "#version 450\n"
+        "#extension GL_NV_mesh_shader : require\n"
+        "layout(local_size_x = 1) in;\n"
+        "layout(max_vertices = 3) out;\n"
+        "layout(max_primitives = 1) out;\n"
+        "layout(triangles) out;\n"
+        "void main() {\n"
+        "      gl_MeshVerticesNV[0].gl_Position = vec4(-1.0, -1.0, 0, 1);\n"
+        "      gl_MeshVerticesNV[1].gl_Position = vec4( 1.0, -1.0, 0, 1);\n"
+        "      gl_MeshVerticesNV[2].gl_Position = vec4( 0.0,  1.0, 0, 1);\n"
+        "      gl_PrimitiveIndicesNV[0] = 0;\n"
+        "      gl_PrimitiveIndicesNV[1] = 1;\n"
+        "      gl_PrimitiveIndicesNV[2] = 2;\n"
+        "      gl_PrimitiveCountNV = 1;\n"
+        "}\n";
+
+    VkShaderObj vs(m_device, vertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
+    VkShaderObj ms(m_device, meshShaderText, VK_SHADER_STAGE_MESH_BIT_NV, this);
+    VkShaderObj fs(m_device, bindStateFragShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+    // Test pipeline creation
+    {
+        // can't mix mesh with vertex
+        const auto break_vp = [&](CreatePipelineHelper &helper) {
+            helper.shader_stages_ = {vs.GetStageCreateInfo(), fs.GetStageCreateInfo(), ms.GetStageCreateInfo()};
+        };
+        CreatePipelineHelper::OneshotTest(*this, break_vp, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                          vector<std::string>({"VUID-VkGraphicsPipelineCreateInfo-pStages-02095"}));
+
+        // vertex or mesh must be present
+        const auto break_vp2 = [&](CreatePipelineHelper &helper) { helper.shader_stages_ = {fs.GetStageCreateInfo()}; };
+        CreatePipelineHelper::OneshotTest(*this, break_vp2, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                          vector<std::string>({"VUID-VkGraphicsPipelineCreateInfo-stage-02096"}));
+
+        // vertexinput and inputassembly must be valid when vertex stage is present
+        const auto break_vp3 = [&](CreatePipelineHelper &helper) {
+            helper.shader_stages_ = {vs.GetStageCreateInfo(), fs.GetStageCreateInfo()};
+            helper.gp_ci_.pVertexInputState = nullptr;
+            helper.gp_ci_.pInputAssemblyState = nullptr;
+        };
+        CreatePipelineHelper::OneshotTest(*this, break_vp3, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                          vector<std::string>({"VUID-VkGraphicsPipelineCreateInfo-pStages-02097",
+                                                               "VUID-VkGraphicsPipelineCreateInfo-pStages-02098"}));
+    }
+
+    PFN_vkCmdDrawMeshTasksIndirectNV vkCmdDrawMeshTasksIndirectNV =
+        (PFN_vkCmdDrawMeshTasksIndirectNV)vk::GetInstanceProcAddr(instance(), "vkCmdDrawMeshTasksIndirectNV");
+
+    VkBufferCreateInfo buffer_create_info = {VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO};
+    buffer_create_info.size = sizeof(uint32_t);
+    buffer_create_info.usage = VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT;
+    VkBuffer buffer;
+    VkResult result = vk::CreateBuffer(m_device->device(), &buffer_create_info, nullptr, &buffer);
+    ASSERT_VK_SUCCESS(result);
+
+    m_commandBuffer->begin();
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdDrawMeshTasksIndirectNV-drawCount-02146");
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdDrawMeshTasksIndirectNV-drawCount-02718");
+    vkCmdDrawMeshTasksIndirectNV(m_commandBuffer->handle(), buffer, 0, 2, 0);
+    m_errorMonitor->VerifyFound();
+
+    m_commandBuffer->end();
+
+    vk::DestroyBuffer(m_device->device(), buffer, 0);
+}
+
+TEST_F(VkLayerTest, MeshShaderDisabledNV) {
+    TEST_DESCRIPTION("Test VK_NV_mesh_shader VUs with NV_mesh_shader disabled.");
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    VkEvent event;
+    VkEventCreateInfo event_create_info{};
+    event_create_info.sType = VK_STRUCTURE_TYPE_EVENT_CREATE_INFO;
+    vk::CreateEvent(m_device->device(), &event_create_info, nullptr, &event);
+
+    m_commandBuffer->begin();
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdSetEvent-stageMask-02107");
+    vk::CmdSetEvent(m_commandBuffer->handle(), event, VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV);
+    m_errorMonitor->VerifyFound();
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdSetEvent-stageMask-02108");
+    vk::CmdSetEvent(m_commandBuffer->handle(), event, VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV);
+    m_errorMonitor->VerifyFound();
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdResetEvent-stageMask-02109");
+    vk::CmdResetEvent(m_commandBuffer->handle(), event, VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV);
+    m_errorMonitor->VerifyFound();
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdResetEvent-stageMask-02110");
+    vk::CmdResetEvent(m_commandBuffer->handle(), event, VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV);
+    m_errorMonitor->VerifyFound();
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdWaitEvents-srcStageMask-02111");
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdWaitEvents-dstStageMask-02113");
+    vk::CmdWaitEvents(m_commandBuffer->handle(), 1, &event, VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV,
+                      VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV, 0, nullptr, 0, nullptr, 0, nullptr);
+    m_errorMonitor->VerifyFound();
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdWaitEvents-srcStageMask-02112");
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdWaitEvents-dstStageMask-02114");
+    vk::CmdWaitEvents(m_commandBuffer->handle(), 1, &event, VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV,
+                      VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV, 0, nullptr, 0, nullptr, 0, nullptr);
+    m_errorMonitor->VerifyFound();
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdPipelineBarrier-srcStageMask-02115");
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdPipelineBarrier-dstStageMask-02117");
+    vk::CmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV, VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV, 0,
+                           0, nullptr, 0, nullptr, 0, nullptr);
+    m_errorMonitor->VerifyFound();
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdPipelineBarrier-srcStageMask-02116");
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdPipelineBarrier-dstStageMask-02118");
+    vk::CmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV, VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV, 0,
+                           0, nullptr, 0, nullptr, 0, nullptr);
+    m_errorMonitor->VerifyFound();
+
+    m_commandBuffer->end();
+
+    VkSemaphoreCreateInfo semaphore_create_info = {};
+    semaphore_create_info.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO;
+    VkSemaphore semaphore;
+    ASSERT_VK_SUCCESS(vk::CreateSemaphore(m_device->device(), &semaphore_create_info, nullptr, &semaphore));
+
+    VkPipelineStageFlags stage_flags = VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV | VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV;
+    VkSubmitInfo submit_info = {};
+
+    // Signal the semaphore so the next test can wait on it.
+    submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+    submit_info.signalSemaphoreCount = 1;
+    submit_info.pSignalSemaphores = &semaphore;
+    vk::QueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+    m_errorMonitor->VerifyNotFound();
+
+    submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+    submit_info.signalSemaphoreCount = 0;
+    submit_info.pSignalSemaphores = nullptr;
+    submit_info.waitSemaphoreCount = 1;
+    submit_info.pWaitSemaphores = &semaphore;
+    submit_info.pWaitDstStageMask = &stage_flags;
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkSubmitInfo-pWaitDstStageMask-02089");
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkSubmitInfo-pWaitDstStageMask-02090");
+    vk::QueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+    m_errorMonitor->VerifyFound();
+
+    vk::QueueWaitIdle(m_device->m_queue);
+
+    VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
+    VkPipelineShaderStageCreateInfo meshStage = {VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO};
+    meshStage = vs.GetStageCreateInfo();
+    meshStage.stage = VK_SHADER_STAGE_MESH_BIT_NV;
+    VkPipelineShaderStageCreateInfo taskStage = {VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO};
+    taskStage = vs.GetStageCreateInfo();
+    taskStage.stage = VK_SHADER_STAGE_TASK_BIT_NV;
+
+    // mesh and task shaders not supported
+    const auto break_vp = [&](CreatePipelineHelper &helper) {
+        helper.shader_stages_ = {meshStage, taskStage, vs.GetStageCreateInfo()};
+    };
+    CreatePipelineHelper::OneshotTest(
+        *this, break_vp, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+        vector<std::string>({"VUID-VkPipelineShaderStageCreateInfo-pName-00707", "VUID-VkPipelineShaderStageCreateInfo-pName-00707",
+                             "VUID-VkPipelineShaderStageCreateInfo-stage-02091",
+                             "VUID-VkPipelineShaderStageCreateInfo-stage-02092"}));
+
+    vk::DestroyEvent(m_device->device(), event, nullptr);
+    vk::DestroySemaphore(m_device->device(), semaphore, nullptr);
+}
+
+TEST_F(VkLayerTest, ViewportWScalingNV) {
+    TEST_DESCRIPTION("Verify VK_NV_clip_space_w_scaling");
+
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+
+    VkPhysicalDeviceFeatures device_features = {};
+    ASSERT_NO_FATAL_FAILURE(GetPhysicalDeviceFeatures(&device_features));
+
+    if (!device_features.multiViewport) {
+        printf("%s VkPhysicalDeviceFeatures::multiViewport is not supported, skipping tests\n", kSkipPrefix);
+        return;
+    }
+
+    if (DeviceExtensionSupported(gpu(), nullptr, VK_NV_CLIP_SPACE_W_SCALING_EXTENSION_NAME)) {
+        m_device_extension_names.push_back(VK_NV_CLIP_SPACE_W_SCALING_EXTENSION_NAME);
+    } else {
+        printf("%s %s Extension not supported, skipping tests\n", kSkipPrefix, VK_NV_CLIP_SPACE_W_SCALING_EXTENSION_NAME);
+        return;
+    }
+
+    ASSERT_NO_FATAL_FAILURE(InitState(&device_features));
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    auto vkCmdSetViewportWScalingNV =
+        reinterpret_cast<PFN_vkCmdSetViewportWScalingNV>(vk::GetDeviceProcAddr(m_device->device(), "vkCmdSetViewportWScalingNV"));
+
+    const char vs_src[] = R"(
+        #version 450
+        const vec2 positions[] = { vec2(-1.0f,  1.0f),
+                                   vec2( 1.0f,  1.0f),
+                                   vec2(-1.0f, -1.0f),
+                                   vec2( 1.0f, -1.0f) };
+        out gl_PerVertex {
+            vec4 gl_Position;
+        };
+
+        void main() {
+            gl_Position = vec4(positions[gl_VertexIndex % 4], 0.0f, 1.0f);
+        })";
+
+    const char fs_src[] = R"(
+        #version 450
+        layout(location = 0) out vec4 outColor;
+
+        void main() {
+            outColor = vec4(0.0f, 1.0f, 0.0f, 1.0f);
+        })";
+
+    const std::vector<VkViewport> vp = {
+        {0.0f, 0.0f, 64.0f, 64.0f}, {0.0f, 0.0f, 64.0f, 64.0f}, {0.0f, 0.0f, 64.0f, 64.0f}, {0.0f, 0.0f, 64.0f, 64.0f}};
+    const std::vector<VkRect2D> sc = {{{0, 0}, {32, 32}}, {{32, 0}, {32, 32}}, {{0, 32}, {32, 32}}, {{32, 32}, {32, 32}}};
+    const std::vector<VkViewportWScalingNV> scale = {{-0.2f, -0.2f}, {0.2f, -0.2f}, {-0.2f, 0.2f}, {0.2f, 0.2f}};
+
+    const uint32_t vp_count = static_cast<uint32_t>(vp.size());
+
+    VkPipelineViewportWScalingStateCreateInfoNV vpsi = {VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_W_SCALING_STATE_CREATE_INFO_NV};
+    vpsi.viewportWScalingEnable = VK_TRUE;
+    vpsi.viewportCount = vp_count;
+    vpsi.pViewportWScalings = scale.data();
+
+    VkPipelineViewportStateCreateInfo vpci = {VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO};
+    vpci.viewportCount = vp_count;
+    vpci.pViewports = vp.data();
+    vpci.scissorCount = vp_count;
+    vpci.pScissors = sc.data();
+    vpci.pNext = &vpsi;
+
+    const auto set_vpci = [&vpci](CreatePipelineHelper &helper) { helper.vp_state_ci_ = vpci; };
+
+    // Make sure no errors show up when creating the pipeline with w-scaling enabled
+    CreatePipelineHelper::OneshotTest(*this, set_vpci, VK_DEBUG_REPORT_ERROR_BIT_EXT, vector<std::string>(), true);
+
+    // Create pipeline with w-scaling enabled but without a valid scaling array
+    vpsi.pViewportWScalings = nullptr;
+    CreatePipelineHelper::OneshotTest(*this, set_vpci, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                      vector<std::string>({"VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-01715"}));
+
+    vpsi.pViewportWScalings = scale.data();
+
+    // Create pipeline with w-scaling enabled but without matching viewport counts
+    vpsi.viewportCount = 1;
+    CreatePipelineHelper::OneshotTest(*this, set_vpci, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                      vector<std::string>({"VUID-VkPipelineViewportStateCreateInfo-viewportWScalingEnable-01726"}));
+
+    const VkPipelineLayoutObj pl(m_device);
+
+    VkShaderObj vs(m_device, vs_src, VK_SHADER_STAGE_VERTEX_BIT, this);
+    VkShaderObj fs(m_device, fs_src, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+    VkPipelineObj pipe(m_device);
+    pipe.AddDefaultColorAttachment();
+    pipe.AddShader(&vs);
+    pipe.AddShader(&fs);
+    pipe.SetViewport(vp);
+    pipe.SetScissor(sc);
+    pipe.CreateVKPipeline(pl.handle(), renderPass());
+
+    VkPipelineObj pipeDynWScale(m_device);
+    pipeDynWScale.AddDefaultColorAttachment();
+    pipeDynWScale.AddShader(&vs);
+    pipeDynWScale.AddShader(&fs);
+    pipeDynWScale.SetViewport(vp);
+    pipeDynWScale.SetScissor(sc);
+    pipeDynWScale.MakeDynamic(VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV);
+    pipeDynWScale.CreateVKPipeline(pl.handle(), renderPass());
+
+    m_commandBuffer->begin();
+
+    // Bind pipeline without dynamic w scaling enabled
+    m_errorMonitor->ExpectSuccess();
+    vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.handle());
+    m_errorMonitor->VerifyNotFound();
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdSetViewportWScalingNV-None-01322");
+    vkCmdSetViewportWScalingNV(m_commandBuffer->handle(), 0, vp_count, scale.data());
+    m_errorMonitor->VerifyFound();
+
+    // Bind pipeline that has dynamic w-scaling enabled
+    m_errorMonitor->ExpectSuccess();
+    vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeDynWScale.handle());
+    m_errorMonitor->VerifyNotFound();
+
+    const auto max_vps = m_device->props.limits.maxViewports;
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdSetViewportWScalingNV-firstViewport-01323");
+    vkCmdSetViewportWScalingNV(m_commandBuffer->handle(), max_vps, vp_count, scale.data());
+    m_errorMonitor->VerifyFound();
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdSetViewportWScalingNV-firstViewport-01324");
+    vkCmdSetViewportWScalingNV(m_commandBuffer->handle(), 1, max_vps, scale.data());
+    m_errorMonitor->VerifyFound();
+
+    m_errorMonitor->ExpectSuccess();
+    vkCmdSetViewportWScalingNV(m_commandBuffer->handle(), 0, vp_count, scale.data());
+    m_errorMonitor->VerifyNotFound();
+
+    m_commandBuffer->end();
+}
diff --git a/src/third_party/vulkan-validation-layers/src/tests/vklayertests_descriptor_renderpass_framebuffer.cpp b/src/third_party/vulkan-validation-layers/src/tests/vklayertests_descriptor_renderpass_framebuffer.cpp
new file mode 100644
index 0000000..7e74eb7
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/tests/vklayertests_descriptor_renderpass_framebuffer.cpp
@@ -0,0 +1,7869 @@
+/*
+ * Copyright (c) 2015-2019 The Khronos Group Inc.
+ * Copyright (c) 2015-2019 Valve Corporation
+ * Copyright (c) 2015-2019 LunarG, Inc.
+ * Copyright (c) 2015-2019 Google, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Author: Chia-I Wu <olvaffe@gmail.com>
+ * Author: Chris Forbes <chrisf@ijw.co.nz>
+ * Author: Courtney Goeltzenleuchter <courtney@LunarG.com>
+ * Author: Mark Lobodzinski <mark@lunarg.com>
+ * Author: Mike Stroyan <mike@LunarG.com>
+ * Author: Tobin Ehlis <tobine@google.com>
+ * Author: Tony Barbour <tony@LunarG.com>
+ * Author: Cody Northrop <cnorthrop@google.com>
+ * Author: Dave Houlton <daveh@lunarg.com>
+ * Author: Jeremy Kniager <jeremyk@lunarg.com>
+ * Author: Shannon McPherson <shannon@lunarg.com>
+ * Author: John Zulauf <jzulauf@lunarg.com>
+ */
+
+#include "cast_utils.h"
+#include "layer_validation_tests.h"
+
+TEST_F(VkLayerTest, GpuValidationArrayOOBGraphicsShaders) {
+    TEST_DESCRIPTION(
+        "GPU validation: Verify detection of out-of-bounds descriptor array indexing and use of uninitialized descriptors.");
+
+    VkValidationFeatureEnableEXT enables[] = {VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_EXT};
+    VkValidationFeaturesEXT features = {};
+    features.sType = VK_STRUCTURE_TYPE_VALIDATION_FEATURES_EXT;
+    features.enabledValidationFeatureCount = 1;
+    features.pEnabledValidationFeatures = enables;
+    bool descriptor_indexing = CheckDescriptorIndexingSupportAndInitFramework(this, m_instance_extension_names,
+                                                                              m_device_extension_names, &features, m_errorMonitor);
+    if (DeviceIsMockICD() || DeviceSimulation()) {
+        printf("%s GPU-Assisted validation test requires a driver that can draw.\n", kSkipPrefix);
+        return;
+    }
+
+    VkPhysicalDeviceFeatures2KHR features2 = {};
+    auto indexing_features = lvl_init_struct<VkPhysicalDeviceDescriptorIndexingFeaturesEXT>();
+    if (descriptor_indexing) {
+        PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR =
+            (PFN_vkGetPhysicalDeviceFeatures2KHR)vk::GetInstanceProcAddr(instance(), "vkGetPhysicalDeviceFeatures2KHR");
+        ASSERT_TRUE(vkGetPhysicalDeviceFeatures2KHR != nullptr);
+
+        features2 = lvl_init_struct<VkPhysicalDeviceFeatures2KHR>(&indexing_features);
+        vkGetPhysicalDeviceFeatures2KHR(gpu(), &features2);
+
+        if (!indexing_features.runtimeDescriptorArray || !indexing_features.descriptorBindingSampledImageUpdateAfterBind ||
+            !indexing_features.descriptorBindingPartiallyBound || !indexing_features.descriptorBindingVariableDescriptorCount ||
+            !indexing_features.shaderSampledImageArrayNonUniformIndexing ||
+            !indexing_features.shaderStorageBufferArrayNonUniformIndexing) {
+            printf("Not all descriptor indexing features supported, skipping descriptor indexing tests\n");
+            descriptor_indexing = false;
+        }
+    }
+
+    VkCommandPoolCreateFlags pool_flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
+    ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &features2, pool_flags));
+    if (m_device->props.apiVersion < VK_API_VERSION_1_1) {
+        printf("%s GPU-Assisted validation test requires Vulkan 1.1+.\n", kSkipPrefix);
+        return;
+    }
+    ASSERT_NO_FATAL_FAILURE(InitViewport());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    // Make a uniform buffer to be passed to the shader that contains the invalid array index.
+    uint32_t qfi = 0;
+    VkBufferCreateInfo bci = {};
+    bci.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
+    bci.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT;
+    bci.size = 1024;
+    bci.queueFamilyIndexCount = 1;
+    bci.pQueueFamilyIndices = &qfi;
+    VkBufferObj buffer0;
+    VkMemoryPropertyFlags mem_props = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
+    buffer0.init(*m_device, bci, mem_props);
+
+    bci.usage = VK_BUFFER_USAGE_STORAGE_BUFFER_BIT;
+    // Make another buffer to populate the buffer array to be indexed
+    VkBufferObj buffer1;
+    buffer1.init(*m_device, bci, mem_props);
+
+    void *layout_pnext = nullptr;
+    void *allocate_pnext = nullptr;
+    auto pool_create_flags = 0;
+    auto layout_create_flags = 0;
+    VkDescriptorBindingFlagsEXT ds_binding_flags[2] = {};
+    VkDescriptorSetLayoutBindingFlagsCreateInfoEXT layout_createinfo_binding_flags[1] = {};
+    if (descriptor_indexing) {
+        ds_binding_flags[0] = 0;
+        ds_binding_flags[1] = VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT_EXT | VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT;
+
+        layout_createinfo_binding_flags[0].sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT;
+        layout_createinfo_binding_flags[0].pNext = NULL;
+        layout_createinfo_binding_flags[0].bindingCount = 2;
+        layout_createinfo_binding_flags[0].pBindingFlags = ds_binding_flags;
+        layout_create_flags = VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT;
+        pool_create_flags = VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT;
+        layout_pnext = layout_createinfo_binding_flags;
+    }
+
+    // Prepare descriptors
+    OneOffDescriptorSet descriptor_set(m_device,
+                                       {
+                                           {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
+                                           {1, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, 6, VK_SHADER_STAGE_ALL, nullptr},
+                                       },
+                                       layout_create_flags, layout_pnext, pool_create_flags);
+
+    VkDescriptorSetVariableDescriptorCountAllocateInfoEXT variable_count = {};
+    uint32_t desc_counts;
+    if (descriptor_indexing) {
+        layout_create_flags = 0;
+        pool_create_flags = 0;
+        ds_binding_flags[1] =
+            VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT_EXT | VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT_EXT;
+        desc_counts = 6;  // We'll reserve 8 spaces in the layout, but the descriptor will only use 6
+        variable_count.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO_EXT;
+        variable_count.descriptorSetCount = 1;
+        variable_count.pDescriptorCounts = &desc_counts;
+        allocate_pnext = &variable_count;
+    }
+
+    OneOffDescriptorSet descriptor_set_variable(m_device,
+                                                {
+                                                    {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
+                                                    {1, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, 8, VK_SHADER_STAGE_ALL, nullptr},
+                                                },
+                                                layout_create_flags, layout_pnext, pool_create_flags, allocate_pnext);
+
+    const VkPipelineLayoutObj pipeline_layout(m_device, {&descriptor_set.layout_});
+    const VkPipelineLayoutObj pipeline_layout_variable(m_device, {&descriptor_set_variable.layout_});
+    VkTextureObj texture(m_device, nullptr);
+    VkSamplerObj sampler(m_device);
+
+    VkDescriptorBufferInfo buffer_info[1] = {};
+    buffer_info[0].buffer = buffer0.handle();
+    buffer_info[0].offset = 0;
+    buffer_info[0].range = sizeof(uint32_t);
+
+    VkDescriptorImageInfo image_info[6] = {};
+    for (int i = 0; i < 6; i++) {
+        image_info[i] = texture.DescriptorImageInfo();
+        image_info[i].sampler = sampler.handle();
+        image_info[i].imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
+    }
+
+    VkWriteDescriptorSet descriptor_writes[2] = {};
+    descriptor_writes[0].sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
+    descriptor_writes[0].dstSet = descriptor_set.set_;  // descriptor_set;
+    descriptor_writes[0].dstBinding = 0;
+    descriptor_writes[0].descriptorCount = 1;
+    descriptor_writes[0].descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
+    descriptor_writes[0].pBufferInfo = buffer_info;
+    descriptor_writes[1].sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
+    descriptor_writes[1].dstSet = descriptor_set.set_;  // descriptor_set;
+    descriptor_writes[1].dstBinding = 1;
+    if (descriptor_indexing)
+        descriptor_writes[1].descriptorCount = 5;  // Intentionally don't write index 5
+    else
+        descriptor_writes[1].descriptorCount = 6;
+    descriptor_writes[1].descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
+    descriptor_writes[1].pImageInfo = image_info;
+    vk::UpdateDescriptorSets(m_device->device(), 2, descriptor_writes, 0, NULL);
+    if (descriptor_indexing) {
+        descriptor_writes[0].dstSet = descriptor_set_variable.set_;
+        descriptor_writes[1].dstSet = descriptor_set_variable.set_;
+        vk::UpdateDescriptorSets(m_device->device(), 2, descriptor_writes, 0, NULL);
+    }
+
+    ds_binding_flags[0] = 0;
+    ds_binding_flags[1] = VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT_EXT;
+
+    // Resources for buffer tests
+    OneOffDescriptorSet descriptor_set_buffer(m_device,
+                                              {
+                                                  {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
+                                                  {1, VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, 6, VK_SHADER_STAGE_ALL, nullptr},
+                                              },
+                                              0, layout_pnext, 0);
+
+    const VkPipelineLayoutObj pipeline_layout_buffer(m_device, {&descriptor_set_buffer.layout_});
+
+    VkDescriptorBufferInfo buffer_test_buffer_info[7] = {};
+    buffer_test_buffer_info[0].buffer = buffer0.handle();
+    buffer_test_buffer_info[0].offset = 0;
+    buffer_test_buffer_info[0].range = sizeof(uint32_t);
+
+    for (int i = 1; i < 7; i++) {
+        buffer_test_buffer_info[i].buffer = buffer1.handle();
+        buffer_test_buffer_info[i].offset = 0;
+        buffer_test_buffer_info[i].range = 4 * sizeof(float);
+    }
+
+    if (descriptor_indexing) {
+        VkWriteDescriptorSet buffer_descriptor_writes[2] = {};
+        buffer_descriptor_writes[0].sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
+        buffer_descriptor_writes[0].dstSet = descriptor_set_buffer.set_;  // descriptor_set;
+        buffer_descriptor_writes[0].dstBinding = 0;
+        buffer_descriptor_writes[0].descriptorCount = 1;
+        buffer_descriptor_writes[0].descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
+        buffer_descriptor_writes[0].pBufferInfo = buffer_test_buffer_info;
+        buffer_descriptor_writes[1].sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
+        buffer_descriptor_writes[1].dstSet = descriptor_set_buffer.set_;  // descriptor_set;
+        buffer_descriptor_writes[1].dstBinding = 1;
+        buffer_descriptor_writes[1].descriptorCount = 5;  // Intentionally don't write index 5
+        buffer_descriptor_writes[1].descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
+        buffer_descriptor_writes[1].pBufferInfo = &buffer_test_buffer_info[1];
+        vk::UpdateDescriptorSets(m_device->device(), 2, buffer_descriptor_writes, 0, NULL);
+    }
+
+    // Shader programs for array OOB test in vertex stage:
+    // - The vertex shader fetches the invalid index from the uniform buffer and uses it to make an invalid index into another
+    // array.
+    char const *vsSource_vert =
+        "#version 450\n"
+        "\n"
+        "layout(std140, set = 0, binding = 0) uniform foo { uint tex_index[1]; } uniform_index_buffer;\n"
+        "layout(set = 0, binding = 1) uniform sampler2D tex[6];\n"
+        "vec2 vertices[3];\n"
+        "void main(){\n"
+        "      vertices[0] = vec2(-1.0, -1.0);\n"
+        "      vertices[1] = vec2( 1.0, -1.0);\n"
+        "      vertices[2] = vec2( 0.0,  1.0);\n"
+        "   gl_Position = vec4(vertices[gl_VertexIndex % 3], 0.0, 1.0);\n"
+        "   gl_Position += 1e-30 * texture(tex[uniform_index_buffer.tex_index[0]], vec2(0, 0));\n"
+        "}\n";
+    char const *fsSource_vert =
+        "#version 450\n"
+        "\n"
+        "layout(set = 0, binding = 1) uniform sampler2D tex[6];\n"
+        "layout(location = 0) out vec4 uFragColor;\n"
+        "void main(){\n"
+        "   uFragColor = texture(tex[0], vec2(0, 0));\n"
+        "}\n";
+
+    // Shader programs for array OOB test in fragment stage:
+    // - The vertex shader fetches the invalid index from the uniform buffer and passes it to the fragment shader.
+    // - The fragment shader makes the invalid array access.
+    char const *vsSource_frag =
+        "#version 450\n"
+        "\n"
+        "layout(std140, binding = 0) uniform foo { uint tex_index[1]; } uniform_index_buffer;\n"
+        "layout(location = 0) out flat uint index;\n"
+        "vec2 vertices[3];\n"
+        "void main(){\n"
+        "      vertices[0] = vec2(-1.0, -1.0);\n"
+        "      vertices[1] = vec2( 1.0, -1.0);\n"
+        "      vertices[2] = vec2( 0.0,  1.0);\n"
+        "   gl_Position = vec4(vertices[gl_VertexIndex % 3], 0.0, 1.0);\n"
+        "   index = uniform_index_buffer.tex_index[0];\n"
+        "}\n";
+    char const *fsSource_frag =
+        "#version 450\n"
+        "\n"
+        "layout(set = 0, binding = 1) uniform sampler2D tex[6];\n"
+        "layout(location = 0) out vec4 uFragColor;\n"
+        "layout(location = 0) in flat uint index;\n"
+        "void main(){\n"
+        "   uFragColor = texture(tex[index], vec2(0, 0));\n"
+        "}\n";
+    char const *fsSource_frag_runtime =
+        "#version 450\n"
+        "#extension GL_EXT_nonuniform_qualifier : enable\n"
+        "\n"
+        "layout(set = 0, binding = 1) uniform sampler2D tex[];\n"
+        "layout(location = 0) out vec4 uFragColor;\n"
+        "layout(location = 0) in flat uint index;\n"
+        "void main(){\n"
+        "   uFragColor = texture(tex[index], vec2(0, 0));\n"
+        "}\n";
+    char const *fsSource_buffer =
+        "#version 450\n"
+        "#extension GL_EXT_nonuniform_qualifier : enable\n "
+        "\n"
+        "layout(set = 0, binding = 1) buffer foo { vec4 val; } colors[];\n"
+        "layout(location = 0) out vec4 uFragColor;\n"
+        "layout(location = 0) in flat uint index;\n"
+        "void main(){\n"
+        "   uFragColor = colors[index].val;\n"
+        "}\n";
+    char const *gsSource =
+        "#version 450\n"
+        "#extension GL_EXT_nonuniform_qualifier : enable\n "
+        "layout(triangles) in;\n"
+        "layout(triangle_strip, max_vertices=3) out;\n"
+        "layout(location=0) in VertexData { vec4 x; } gs_in[];\n"
+        "layout(std140, set = 0, binding = 0) uniform ufoo { uint index; } uniform_index_buffer;\n"
+        "layout(set = 0, binding = 1) buffer bfoo { vec4 val; } adds[];\n"
+        "void main() {\n"
+        "   gl_Position = gs_in[0].x + adds[uniform_index_buffer.index].val.x;\n"
+        "   EmitVertex();\n"
+        "}\n";
+    static const char *tesSource =
+        "#version 450\n"
+        "#extension GL_EXT_nonuniform_qualifier : enable\n "
+        "layout(std140, set = 0, binding = 0) uniform ufoo { uint index; } uniform_index_buffer;\n"
+        "layout(set = 0, binding = 1) buffer bfoo { vec4 val; } adds[];\n"
+        "layout(triangles, equal_spacing, cw) in;\n"
+        "void main() {\n"
+        "    gl_Position = adds[uniform_index_buffer.index].val;\n"
+        "}\n";
+
+    struct TestCase {
+        char const *vertex_source;
+        char const *fragment_source;
+        char const *geometry_source;
+        char const *tess_ctrl_source;
+        char const *tess_eval_source;
+        bool debug;
+        const VkPipelineLayoutObj *pipeline_layout;
+        const OneOffDescriptorSet *descriptor_set;
+        uint32_t index;
+        char const *expected_error;
+    };
+
+    std::vector<TestCase> tests;
+    tests.push_back({vsSource_vert, fsSource_vert, nullptr, nullptr, nullptr, false, &pipeline_layout, &descriptor_set, 25,
+                     "Index of 25 used to index descriptor array of length 6."});
+    tests.push_back({vsSource_frag, fsSource_frag, nullptr, nullptr, nullptr, false, &pipeline_layout, &descriptor_set, 25,
+                     "Index of 25 used to index descriptor array of length 6."});
+#if !defined(ANDROID)
+    // The Android test framework uses shaderc for online compilations.  Even when configured to compile with debug info,
+    // shaderc seems to drop the OpLine instructions from the shader binary.  This causes the following two tests to fail
+    // on Android platforms.  Skip these tests until the shaderc issue is understood/resolved.
+    tests.push_back({vsSource_vert, fsSource_vert, nullptr, nullptr, nullptr, true, &pipeline_layout, &descriptor_set, 25,
+                     "gl_Position += 1e-30 * texture(tex[uniform_index_buffer.tex_index[0]], vec2(0, 0));"});
+    tests.push_back({vsSource_frag, fsSource_frag, nullptr, nullptr, nullptr, true, &pipeline_layout, &descriptor_set, 25,
+                     "uFragColor = texture(tex[index], vec2(0, 0));"});
+#endif
+    if (descriptor_indexing) {
+        tests.push_back({vsSource_frag, fsSource_frag_runtime, nullptr, nullptr, nullptr, false, &pipeline_layout, &descriptor_set,
+                         25, "Index of 25 used to index descriptor array of length 6."});
+        tests.push_back({vsSource_frag, fsSource_frag_runtime, nullptr, nullptr, nullptr, false, &pipeline_layout, &descriptor_set,
+                         5, "Descriptor index 5 is uninitialized"});
+        // Pick 6 below because it is less than the maximum specified, but more than the actual specified
+        tests.push_back({vsSource_frag, fsSource_frag_runtime, nullptr, nullptr, nullptr, false, &pipeline_layout_variable,
+                         &descriptor_set_variable, 6, "Index of 6 used to index descriptor array of length 6."});
+        tests.push_back({vsSource_frag, fsSource_frag_runtime, nullptr, nullptr, nullptr, false, &pipeline_layout_variable,
+                         &descriptor_set_variable, 5, "Descriptor index 5 is uninitialized"});
+        tests.push_back({vsSource_frag, fsSource_buffer, nullptr, nullptr, nullptr, false, &pipeline_layout_buffer,
+                         &descriptor_set_buffer, 25, "Index of 25 used to index descriptor array of length 6."});
+        tests.push_back({vsSource_frag, fsSource_buffer, nullptr, nullptr, nullptr, false, &pipeline_layout_buffer,
+                         &descriptor_set_buffer, 5, "Descriptor index 5 is uninitialized"});
+        if (m_device->phy().features().geometryShader) {
+            // OOB Geometry
+            tests.push_back({bindStateVertShaderText, bindStateFragShaderText, gsSource, nullptr, nullptr, false,
+                             &pipeline_layout_buffer, &descriptor_set_buffer, 25, "Stage = Geometry"});
+            // Uninitialized Geometry
+            tests.push_back({bindStateVertShaderText, bindStateFragShaderText, gsSource, nullptr, nullptr, false,
+                             &pipeline_layout_buffer, &descriptor_set_buffer, 5, "Stage = Geometry"});
+        }
+        if (m_device->phy().features().tessellationShader) {
+            tests.push_back({bindStateVertShaderText, bindStateFragShaderText, nullptr, bindStateTscShaderText, tesSource, false,
+                             &pipeline_layout_buffer, &descriptor_set_buffer, 25, "Stage = Tessellation Eval"});
+            tests.push_back({bindStateVertShaderText, bindStateFragShaderText, nullptr, bindStateTscShaderText, tesSource, false,
+                             &pipeline_layout_buffer, &descriptor_set_buffer, 5, "Stage = Tessellation Eval"});
+        }
+    }
+
+    VkViewport viewport = m_viewports[0];
+    VkRect2D scissors = m_scissors[0];
+
+    VkSubmitInfo submit_info = {};
+    submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+    submit_info.commandBufferCount = 1;
+    submit_info.pCommandBuffers = &m_commandBuffer->handle();
+
+    for (const auto &iter : tests) {
+        VkResult err;
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, iter.expected_error);
+        VkShaderObj vs(m_device, iter.vertex_source, VK_SHADER_STAGE_VERTEX_BIT, this, "main", iter.debug);
+        VkShaderObj fs(m_device, iter.fragment_source, VK_SHADER_STAGE_FRAGMENT_BIT, this, "main", iter.debug);
+        VkShaderObj *gs = nullptr;
+        VkShaderObj *tcs = nullptr;
+        VkShaderObj *tes = nullptr;
+        VkPipelineObj pipe(m_device);
+        pipe.AddShader(&vs);
+        pipe.AddShader(&fs);
+        if (iter.geometry_source) {
+            gs = new VkShaderObj(m_device, iter.geometry_source, VK_SHADER_STAGE_GEOMETRY_BIT, this, "main", iter.debug);
+            pipe.AddShader(gs);
+        }
+        if (iter.tess_ctrl_source && iter.tess_eval_source) {
+            tcs = new VkShaderObj(m_device, iter.tess_ctrl_source, VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT, this, "main",
+                                  iter.debug);
+            tes = new VkShaderObj(m_device, iter.tess_eval_source, VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT, this, "main",
+                                  iter.debug);
+            pipe.AddShader(tcs);
+            pipe.AddShader(tes);
+            VkPipelineInputAssemblyStateCreateInfo iasci{VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO, nullptr, 0,
+                                                         VK_PRIMITIVE_TOPOLOGY_PATCH_LIST, VK_FALSE};
+            VkPipelineTessellationDomainOriginStateCreateInfo tessellationDomainOriginStateInfo = {
+                VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO, VK_NULL_HANDLE,
+                VK_TESSELLATION_DOMAIN_ORIGIN_UPPER_LEFT};
+
+            VkPipelineTessellationStateCreateInfo tsci{VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO,
+                                                       &tessellationDomainOriginStateInfo, 0, 3};
+            pipe.SetTessellation(&tsci);
+            pipe.SetInputAssembly(&iasci);
+        }
+        pipe.AddDefaultColorAttachment();
+        err = pipe.CreateVKPipeline(iter.pipeline_layout->handle(), renderPass());
+        ASSERT_VK_SUCCESS(err);
+        m_commandBuffer->begin();
+        m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
+        vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.handle());
+        vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, iter.pipeline_layout->handle(), 0, 1,
+                                  &iter.descriptor_set->set_, 0, nullptr);
+        vk::CmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
+        vk::CmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissors);
+        vk::CmdDraw(m_commandBuffer->handle(), 3, 1, 0, 0);
+        vk::CmdEndRenderPass(m_commandBuffer->handle());
+        m_commandBuffer->end();
+        uint32_t *data = (uint32_t *)buffer0.memory().map();
+        data[0] = iter.index;
+        buffer0.memory().unmap();
+
+        m_errorMonitor->SetUnexpectedError("UNASSIGNED-CoreValidation-DrawState-DescriptorSetNotUpdated");
+        vk::QueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+        vk::QueueWaitIdle(m_device->m_queue);
+        m_errorMonitor->VerifyFound();
+        if (gs) {
+            delete gs;
+        }
+        if (tcs && tes) {
+            delete tcs;
+            delete tes;
+        }
+    }
+    auto c_queue = m_device->GetDefaultComputeQueue();
+    if (c_queue && descriptor_indexing) {
+        char const *csSource =
+            "#version 450\n"
+            "#extension GL_EXT_nonuniform_qualifier : enable\n "
+            "layout(set = 0, binding = 0) uniform ufoo { uint index; } u_index;"
+            "layout(set = 0, binding = 1) buffer StorageBuffer {\n"
+            "    uint data;\n"
+            "} Data[];\n"
+            "void main() {\n"
+            "   Data[(u_index.index - 1)].data = Data[u_index.index].data;\n"
+            "}\n";
+
+        auto shader_module = new VkShaderObj(m_device, csSource, VK_SHADER_STAGE_COMPUTE_BIT, this);
+
+        VkPipelineShaderStageCreateInfo stage;
+        stage.sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
+        stage.pNext = nullptr;
+        stage.flags = 0;
+        stage.stage = VK_SHADER_STAGE_COMPUTE_BIT;
+        stage.module = shader_module->handle();
+        stage.pName = "main";
+        stage.pSpecializationInfo = nullptr;
+
+        // CreateComputePipelines
+        VkComputePipelineCreateInfo pipeline_info = {};
+        pipeline_info.sType = VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO;
+        pipeline_info.pNext = nullptr;
+        pipeline_info.flags = 0;
+        pipeline_info.layout = pipeline_layout_buffer.handle();
+        pipeline_info.basePipelineHandle = VK_NULL_HANDLE;
+        pipeline_info.basePipelineIndex = -1;
+        pipeline_info.stage = stage;
+
+        VkPipeline c_pipeline;
+        vk::CreateComputePipelines(device(), VK_NULL_HANDLE, 1, &pipeline_info, nullptr, &c_pipeline);
+        VkCommandBufferBeginInfo begin_info = {};
+        VkCommandBufferInheritanceInfo hinfo = {};
+        hinfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO;
+        begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
+        begin_info.pInheritanceInfo = &hinfo;
+
+        m_commandBuffer->begin(&begin_info);
+        vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_COMPUTE, c_pipeline);
+        vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_COMPUTE, pipeline_layout_buffer.handle(), 0, 1,
+                                  &descriptor_set_buffer.set_, 0, nullptr);
+        vk::CmdDispatch(m_commandBuffer->handle(), 1, 1, 1);
+        m_commandBuffer->end();
+
+        // Uninitialized
+        uint32_t *data = (uint32_t *)buffer0.memory().map();
+        data[0] = 5;
+        buffer0.memory().unmap();
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "Stage = Compute");
+        m_errorMonitor->SetUnexpectedError("UNASSIGNED-CoreValidation-DrawState-DescriptorSetNotUpdated");
+        vk::QueueSubmit(c_queue->handle(), 1, &submit_info, VK_NULL_HANDLE);
+        vk::QueueWaitIdle(m_device->m_queue);
+        m_errorMonitor->VerifyFound();
+        // Out of Bounds
+        data = (uint32_t *)buffer0.memory().map();
+        data[0] = 25;
+        buffer0.memory().unmap();
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "Stage = Compute");
+        m_errorMonitor->SetUnexpectedError("UNASSIGNED-CoreValidation-DrawState-DescriptorSetNotUpdated");
+        vk::QueueSubmit(c_queue->handle(), 1, &submit_info, VK_NULL_HANDLE);
+        vk::QueueWaitIdle(m_device->m_queue);
+        m_errorMonitor->VerifyFound();
+        vk::DestroyPipeline(m_device->handle(), c_pipeline, NULL);
+        vk::DestroyShaderModule(m_device->handle(), shader_module->handle(), NULL);
+    }
+    return;
+}
+
+TEST_F(VkLayerTest, GpuBufferDeviceAddressOOB) {
+    bool supported = InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+
+    VkValidationFeatureEnableEXT enables[] = {VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_EXT};
+    VkValidationFeaturesEXT features = {};
+    features.sType = VK_STRUCTURE_TYPE_VALIDATION_FEATURES_EXT;
+    features.enabledValidationFeatureCount = 1;
+    features.pEnabledValidationFeatures = enables;
+    InitFramework(myDbgFunc, m_errorMonitor, &features);
+
+    if (DeviceIsMockICD() || DeviceSimulation()) {
+        printf("%s GPU-Assisted validation test requires a driver that can draw.\n", kSkipPrefix);
+        return;
+    }
+
+    supported = supported && DeviceExtensionSupported(gpu(), nullptr, VK_EXT_BUFFER_DEVICE_ADDRESS_EXTENSION_NAME);
+    m_device_extension_names.push_back(VK_EXT_BUFFER_DEVICE_ADDRESS_EXTENSION_NAME);
+
+    VkPhysicalDeviceFeatures2KHR features2 = {};
+    auto bda_features = lvl_init_struct<VkPhysicalDeviceBufferDeviceAddressFeaturesEXT>();
+    PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR =
+        (PFN_vkGetPhysicalDeviceFeatures2KHR)vk::GetInstanceProcAddr(instance(), "vkGetPhysicalDeviceFeatures2KHR");
+    ASSERT_TRUE(vkGetPhysicalDeviceFeatures2KHR != nullptr);
+
+    features2 = lvl_init_struct<VkPhysicalDeviceFeatures2KHR>(&bda_features);
+    vkGetPhysicalDeviceFeatures2KHR(gpu(), &features2);
+    supported = supported && bda_features.bufferDeviceAddress;
+
+    if (!supported) {
+        printf("Buffer Device Address feature not supported, skipping test\n");
+        return;
+    }
+    VkCommandPoolCreateFlags pool_flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
+    ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &features2, pool_flags));
+    if (m_device->props.apiVersion < VK_API_VERSION_1_1) {
+        printf("%s GPU-Assisted validation test requires Vulkan 1.1+.\n", kSkipPrefix);
+        return;
+    }
+    ASSERT_NO_FATAL_FAILURE(InitViewport());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    // Make a uniform buffer to be passed to the shader that contains the pointer and write count
+    uint32_t qfi = 0;
+    VkBufferCreateInfo bci = {};
+    bci.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
+    bci.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT;
+    bci.size = 8;
+    bci.queueFamilyIndexCount = 1;
+    bci.pQueueFamilyIndices = &qfi;
+    VkBufferObj buffer0;
+    VkMemoryPropertyFlags mem_props = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
+    buffer0.init(*m_device, bci, mem_props);
+
+    // Make another buffer to write to
+    bci.usage = VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_EXT;
+    bci.size = 64;  // Buffer should be 16*4 = 64 bytes
+    VkBufferObj buffer1;
+    buffer1.init(*m_device, bci, mem_props);
+
+    // Get device address of buffer to write to
+    VkBufferDeviceAddressInfoEXT bda_info = {};
+    bda_info.sType = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO_EXT;
+    bda_info.buffer = buffer1.handle();
+    auto vkGetBufferDeviceAddressEXT =
+        (PFN_vkGetBufferDeviceAddressEXT)vk::GetDeviceProcAddr(m_device->device(), "vkGetBufferDeviceAddressEXT");
+    ASSERT_TRUE(vkGetBufferDeviceAddressEXT != nullptr);
+    auto pBuffer = vkGetBufferDeviceAddressEXT(m_device->device(), &bda_info);
+
+    OneOffDescriptorSet descriptor_set(m_device, {{0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr}});
+
+    const VkPipelineLayoutObj pipeline_layout(m_device, {&descriptor_set.layout_});
+    VkDescriptorBufferInfo buffer_test_buffer_info[2] = {};
+    buffer_test_buffer_info[0].buffer = buffer0.handle();
+    buffer_test_buffer_info[0].offset = 0;
+    buffer_test_buffer_info[0].range = sizeof(uint32_t);
+
+    VkWriteDescriptorSet descriptor_writes[1] = {};
+    descriptor_writes[0].sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
+    descriptor_writes[0].dstSet = descriptor_set.set_;
+    descriptor_writes[0].dstBinding = 0;
+    descriptor_writes[0].descriptorCount = 1;
+    descriptor_writes[0].descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
+    descriptor_writes[0].pBufferInfo = buffer_test_buffer_info;
+    vk::UpdateDescriptorSets(m_device->device(), 1, descriptor_writes, 0, NULL);
+
+    char const *shader_source =
+        "#version 450\n"
+        "#extension GL_EXT_buffer_reference : enable\n "
+        "layout(buffer_reference, buffer_reference_align = 16) buffer bufStruct;\n"
+        "layout(set = 0, binding = 0) uniform ufoo {\n"
+        "    bufStruct data;\n"
+        "    int nWrites;\n"
+        "} u_info;\n"
+        "layout(buffer_reference, std140) buffer bufStruct {\n"
+        "    int a[4];\n"
+        "};\n"
+        "void main() {\n"
+        "    for (int i=0; i < u_info.nWrites; ++i) {\n"
+        "        u_info.data.a[i] = 0xdeadca71;\n"
+        "    }\n"
+        "}\n";
+    VkShaderObj vs(m_device, shader_source, VK_SHADER_STAGE_VERTEX_BIT, this, "main", true);
+
+    VkViewport viewport = m_viewports[0];
+    VkRect2D scissors = m_scissors[0];
+
+    VkSubmitInfo submit_info = {};
+    submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+    submit_info.commandBufferCount = 1;
+    submit_info.pCommandBuffers = &m_commandBuffer->handle();
+
+    VkPipelineObj pipe(m_device);
+    pipe.AddShader(&vs);
+    pipe.AddDefaultColorAttachment();
+    VkResult err = pipe.CreateVKPipeline(pipeline_layout.handle(), renderPass());
+    ASSERT_VK_SUCCESS(err);
+
+    VkCommandBufferBeginInfo begin_info = {};
+    VkCommandBufferInheritanceInfo hinfo = {};
+    hinfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO;
+    begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
+    begin_info.pInheritanceInfo = &hinfo;
+
+    m_commandBuffer->begin(&begin_info);
+    m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
+    vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.handle());
+    vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_layout.handle(), 0, 1,
+                              &descriptor_set.set_, 0, nullptr);
+    vk::CmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
+    vk::CmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissors);
+    vk::CmdDraw(m_commandBuffer->handle(), 3, 1, 0, 0);
+    vk::CmdEndRenderPass(m_commandBuffer->handle());
+    m_commandBuffer->end();
+
+    // Starting address too low
+    VkDeviceAddress *data = (VkDeviceAddress *)buffer0.memory().map();
+    data[0] = pBuffer - 16;
+    data[1] = 4;
+    buffer0.memory().unmap();
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "access out of bounds");
+    err = vk::QueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+    ASSERT_VK_SUCCESS(err);
+    err = vk::QueueWaitIdle(m_device->m_queue);
+    ASSERT_VK_SUCCESS(err);
+    m_errorMonitor->VerifyFound();
+
+    // Run past the end
+    data = (VkDeviceAddress *)buffer0.memory().map();
+    data[0] = pBuffer;
+    data[1] = 5;
+    buffer0.memory().unmap();
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "access out of bounds");
+    err = vk::QueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+    ASSERT_VK_SUCCESS(err);
+    err = vk::QueueWaitIdle(m_device->m_queue);
+    ASSERT_VK_SUCCESS(err);
+    m_errorMonitor->VerifyFound();
+
+    // Positive test - stay inside buffer
+    m_errorMonitor->ExpectSuccess();
+    data = (VkDeviceAddress *)buffer0.memory().map();
+    data[0] = pBuffer;
+    data[1] = 4;
+    buffer0.memory().unmap();
+    err = vk::QueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+    ASSERT_VK_SUCCESS(err);
+    err = vk::QueueWaitIdle(m_device->m_queue);
+    ASSERT_VK_SUCCESS(err);
+    m_errorMonitor->VerifyNotFound();
+}
+
+TEST_F(VkLayerTest, GpuValidationArrayOOBRayTracingShaders) {
+    TEST_DESCRIPTION(
+        "GPU validation: Verify detection of out-of-bounds descriptor array indexing and use of uninitialized descriptors for "
+        "ray tracing shaders.");
+
+    std::array<const char *, 1> required_instance_extensions = {VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME};
+    for (auto instance_extension : required_instance_extensions) {
+        if (InstanceExtensionSupported(instance_extension)) {
+            m_instance_extension_names.push_back(instance_extension);
+        } else {
+            printf("%s Did not find required instance extension %s; skipped.\n", kSkipPrefix, instance_extension);
+            return;
+        }
+    }
+
+    VkValidationFeatureEnableEXT validation_feature_enables[] = {VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_EXT};
+    VkValidationFeaturesEXT validation_features = {};
+    validation_features.sType = VK_STRUCTURE_TYPE_VALIDATION_FEATURES_EXT;
+    validation_features.enabledValidationFeatureCount = 1;
+    validation_features.pEnabledValidationFeatures = validation_feature_enables;
+    bool descriptor_indexing = CheckDescriptorIndexingSupportAndInitFramework(
+        this, m_instance_extension_names, m_device_extension_names, &validation_features, m_errorMonitor);
+
+    if (DeviceIsMockICD() || DeviceSimulation()) {
+        printf("%s Test not supported by MockICD, skipping tests\n", kSkipPrefix);
+        return;
+    }
+
+    std::array<const char *, 2> required_device_extensions = {VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME,
+                                                              VK_NV_RAY_TRACING_EXTENSION_NAME};
+    for (auto device_extension : required_device_extensions) {
+        if (DeviceExtensionSupported(gpu(), nullptr, device_extension)) {
+            m_device_extension_names.push_back(device_extension);
+        } else {
+            printf("%s %s Extension not supported, skipping tests\n", kSkipPrefix, device_extension);
+            return;
+        }
+    }
+
+    VkPhysicalDeviceFeatures2KHR features2 = {};
+    auto indexing_features = lvl_init_struct<VkPhysicalDeviceDescriptorIndexingFeaturesEXT>();
+    if (descriptor_indexing) {
+        PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR =
+            (PFN_vkGetPhysicalDeviceFeatures2KHR)vk::GetInstanceProcAddr(instance(), "vkGetPhysicalDeviceFeatures2KHR");
+        ASSERT_TRUE(vkGetPhysicalDeviceFeatures2KHR != nullptr);
+
+        features2 = lvl_init_struct<VkPhysicalDeviceFeatures2KHR>(&indexing_features);
+        vkGetPhysicalDeviceFeatures2KHR(gpu(), &features2);
+
+        if (!indexing_features.runtimeDescriptorArray || !indexing_features.descriptorBindingPartiallyBound ||
+            !indexing_features.descriptorBindingSampledImageUpdateAfterBind ||
+            !indexing_features.descriptorBindingVariableDescriptorCount) {
+            printf("Not all descriptor indexing features supported, skipping descriptor indexing tests\n");
+            descriptor_indexing = false;
+        }
+    }
+    VkCommandPoolCreateFlags pool_flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
+    ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &features2, pool_flags));
+
+    PFN_vkGetPhysicalDeviceProperties2KHR vkGetPhysicalDeviceProperties2KHR =
+        (PFN_vkGetPhysicalDeviceProperties2KHR)vk::GetInstanceProcAddr(instance(), "vkGetPhysicalDeviceProperties2KHR");
+    ASSERT_TRUE(vkGetPhysicalDeviceProperties2KHR != nullptr);
+
+    auto ray_tracing_properties = lvl_init_struct<VkPhysicalDeviceRayTracingPropertiesNV>();
+    auto properties2 = lvl_init_struct<VkPhysicalDeviceProperties2KHR>(&ray_tracing_properties);
+    vkGetPhysicalDeviceProperties2KHR(gpu(), &properties2);
+    if (ray_tracing_properties.maxTriangleCount == 0) {
+        printf("%s Did not find required ray tracing properties; skipped.\n", kSkipPrefix);
+        return;
+    }
+
+    VkQueue ray_tracing_queue = m_device->m_queue;
+    uint32_t ray_tracing_queue_family_index = 0;
+
+    // If supported, run on the compute only queue.
+    uint32_t compute_only_queue_family_index = m_device->QueueFamilyMatching(VK_QUEUE_COMPUTE_BIT, VK_QUEUE_GRAPHICS_BIT);
+    if (compute_only_queue_family_index != UINT32_MAX) {
+        const auto &compute_only_queues = m_device->queue_family_queues(compute_only_queue_family_index);
+        if (!compute_only_queues.empty()) {
+            ray_tracing_queue = compute_only_queues[0]->handle();
+            ray_tracing_queue_family_index = compute_only_queue_family_index;
+        }
+    }
+
+    VkCommandPoolObj ray_tracing_command_pool(m_device, ray_tracing_queue_family_index,
+                                              VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT);
+    VkCommandBufferObj ray_tracing_command_buffer(m_device, &ray_tracing_command_pool);
+
+    struct AABB {
+        float min_x;
+        float min_y;
+        float min_z;
+        float max_x;
+        float max_y;
+        float max_z;
+    };
+
+    const std::vector<AABB> aabbs = {{-1.0f, -1.0f, -1.0f, +1.0f, +1.0f, +1.0f}};
+
+    struct VkGeometryInstanceNV {
+        float transform[12];
+        uint32_t instanceCustomIndex : 24;
+        uint32_t mask : 8;
+        uint32_t instanceOffset : 24;
+        uint32_t flags : 8;
+        uint64_t accelerationStructureHandle;
+    };
+
+    VkDeviceSize aabb_buffer_size = sizeof(AABB) * aabbs.size();
+    VkBufferObj aabb_buffer;
+    aabb_buffer.init(*m_device, aabb_buffer_size, VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT,
+                     VK_BUFFER_USAGE_RAY_TRACING_BIT_NV, {ray_tracing_queue_family_index});
+
+    uint8_t *mapped_aabb_buffer_data = (uint8_t *)aabb_buffer.memory().map();
+    std::memcpy(mapped_aabb_buffer_data, (uint8_t *)aabbs.data(), static_cast<std::size_t>(aabb_buffer_size));
+    aabb_buffer.memory().unmap();
+
+    VkGeometryNV geometry = {};
+    geometry.sType = VK_STRUCTURE_TYPE_GEOMETRY_NV;
+    geometry.geometryType = VK_GEOMETRY_TYPE_AABBS_NV;
+    geometry.geometry.triangles = {};
+    geometry.geometry.triangles.sType = VK_STRUCTURE_TYPE_GEOMETRY_TRIANGLES_NV;
+    geometry.geometry.aabbs = {};
+    geometry.geometry.aabbs.sType = VK_STRUCTURE_TYPE_GEOMETRY_AABB_NV;
+    geometry.geometry.aabbs.aabbData = aabb_buffer.handle();
+    geometry.geometry.aabbs.numAABBs = static_cast<uint32_t>(aabbs.size());
+    geometry.geometry.aabbs.offset = 0;
+    geometry.geometry.aabbs.stride = static_cast<VkDeviceSize>(sizeof(AABB));
+    geometry.flags = 0;
+
+    VkAccelerationStructureInfoNV bot_level_as_info = {};
+    bot_level_as_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_INFO_NV;
+    bot_level_as_info.type = VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_NV;
+    bot_level_as_info.instanceCount = 0;
+    bot_level_as_info.geometryCount = 1;
+    bot_level_as_info.pGeometries = &geometry;
+
+    VkAccelerationStructureCreateInfoNV bot_level_as_create_info = {};
+    bot_level_as_create_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_NV;
+    bot_level_as_create_info.info = bot_level_as_info;
+
+    VkAccelerationStructureObj bot_level_as(*m_device, bot_level_as_create_info);
+
+    const std::vector<VkGeometryInstanceNV> instances = {
+        VkGeometryInstanceNV{
+            {
+                // clang-format off
+                1.0f, 0.0f, 0.0f, 0.0f,
+                0.0f, 1.0f, 0.0f, 0.0f,
+                0.0f, 0.0f, 1.0f, 0.0f,
+                // clang-format on
+            },
+            0,
+            0xFF,
+            0,
+            VK_GEOMETRY_INSTANCE_TRIANGLE_CULL_DISABLE_BIT_NV,
+            bot_level_as.opaque_handle(),
+        },
+    };
+
+    VkDeviceSize instance_buffer_size = sizeof(VkGeometryInstanceNV) * instances.size();
+    VkBufferObj instance_buffer;
+    instance_buffer.init(*m_device, instance_buffer_size,
+                         VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT,
+                         VK_BUFFER_USAGE_RAY_TRACING_BIT_NV, {ray_tracing_queue_family_index});
+
+    uint8_t *mapped_instance_buffer_data = (uint8_t *)instance_buffer.memory().map();
+    std::memcpy(mapped_instance_buffer_data, (uint8_t *)instances.data(), static_cast<std::size_t>(instance_buffer_size));
+    instance_buffer.memory().unmap();
+
+    VkAccelerationStructureInfoNV top_level_as_info = {};
+    top_level_as_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_INFO_NV;
+    top_level_as_info.type = VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_NV;
+    top_level_as_info.instanceCount = 1;
+    top_level_as_info.geometryCount = 0;
+
+    VkAccelerationStructureCreateInfoNV top_level_as_create_info = {};
+    top_level_as_create_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_NV;
+    top_level_as_create_info.info = top_level_as_info;
+
+    VkAccelerationStructureObj top_level_as(*m_device, top_level_as_create_info);
+
+    VkDeviceSize scratch_buffer_size = std::max(bot_level_as.build_scratch_memory_requirements().memoryRequirements.size,
+                                                top_level_as.build_scratch_memory_requirements().memoryRequirements.size);
+    VkBufferObj scratch_buffer;
+    scratch_buffer.init(*m_device, scratch_buffer_size, VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT, VK_BUFFER_USAGE_RAY_TRACING_BIT_NV);
+
+    ray_tracing_command_buffer.begin();
+
+    // Build bot level acceleration structure
+    ray_tracing_command_buffer.BuildAccelerationStructure(&bot_level_as, scratch_buffer.handle());
+
+    // Barrier to prevent using scratch buffer for top level build before bottom level build finishes
+    VkMemoryBarrier memory_barrier = {};
+    memory_barrier.sType = VK_STRUCTURE_TYPE_MEMORY_BARRIER;
+    memory_barrier.srcAccessMask = VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_NV | VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_NV;
+    memory_barrier.dstAccessMask = VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_NV | VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_NV;
+    ray_tracing_command_buffer.PipelineBarrier(VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_NV,
+                                               VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_NV, 0, 1, &memory_barrier, 0,
+                                               nullptr, 0, nullptr);
+
+    // Build top level acceleration structure
+    ray_tracing_command_buffer.BuildAccelerationStructure(&top_level_as, scratch_buffer.handle(), instance_buffer.handle());
+
+    ray_tracing_command_buffer.end();
+
+    VkSubmitInfo submit_info = {};
+    submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+    submit_info.commandBufferCount = 1;
+    submit_info.pCommandBuffers = &ray_tracing_command_buffer.handle();
+    vk::QueueSubmit(ray_tracing_queue, 1, &submit_info, VK_NULL_HANDLE);
+    vk::QueueWaitIdle(ray_tracing_queue);
+    m_errorMonitor->VerifyNotFound();
+
+    VkTextureObj texture(m_device, nullptr);
+    VkSamplerObj sampler(m_device);
+
+    VkDeviceSize storage_buffer_size = 1024;
+    VkBufferObj storage_buffer;
+    storage_buffer.init(*m_device, storage_buffer_size, VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT,
+                        VK_BUFFER_USAGE_STORAGE_BUFFER_BIT, {ray_tracing_queue_family_index});
+
+    VkDeviceSize shader_binding_table_buffer_size = ray_tracing_properties.shaderGroupHandleSize * 4ull;
+    VkBufferObj shader_binding_table_buffer;
+    shader_binding_table_buffer.init(*m_device, shader_binding_table_buffer_size,
+                                     VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT,
+                                     VK_BUFFER_USAGE_RAY_TRACING_BIT_NV, {ray_tracing_queue_family_index});
+
+    // Setup descriptors!
+    const VkShaderStageFlags kAllRayTracingStages = VK_SHADER_STAGE_RAYGEN_BIT_NV | VK_SHADER_STAGE_ANY_HIT_BIT_NV |
+                                                    VK_SHADER_STAGE_CLOSEST_HIT_BIT_NV | VK_SHADER_STAGE_MISS_BIT_NV |
+                                                    VK_SHADER_STAGE_INTERSECTION_BIT_NV | VK_SHADER_STAGE_CALLABLE_BIT_NV;
+
+    void *layout_pnext = nullptr;
+    void *allocate_pnext = nullptr;
+    VkDescriptorPoolCreateFlags pool_create_flags = 0;
+    VkDescriptorSetLayoutCreateFlags layout_create_flags = 0;
+    VkDescriptorBindingFlagsEXT ds_binding_flags[3] = {};
+    VkDescriptorSetLayoutBindingFlagsCreateInfoEXT layout_createinfo_binding_flags[1] = {};
+    if (descriptor_indexing) {
+        ds_binding_flags[0] = 0;
+        ds_binding_flags[1] = 0;
+        ds_binding_flags[2] = VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT_EXT | VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT;
+
+        layout_createinfo_binding_flags[0].sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT;
+        layout_createinfo_binding_flags[0].pNext = NULL;
+        layout_createinfo_binding_flags[0].bindingCount = 3;
+        layout_createinfo_binding_flags[0].pBindingFlags = ds_binding_flags;
+        layout_create_flags = VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT;
+        pool_create_flags = VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT;
+        layout_pnext = layout_createinfo_binding_flags;
+    }
+
+    // Prepare descriptors
+    OneOffDescriptorSet ds(m_device,
+                           {
+                               {0, VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV, 1, kAllRayTracingStages, nullptr},
+                               {1, VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, 1, kAllRayTracingStages, nullptr},
+                               {2, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, 6, kAllRayTracingStages, nullptr},
+                           },
+                           layout_create_flags, layout_pnext, pool_create_flags);
+
+    VkDescriptorSetVariableDescriptorCountAllocateInfoEXT variable_count = {};
+    uint32_t desc_counts;
+    if (descriptor_indexing) {
+        layout_create_flags = 0;
+        pool_create_flags = 0;
+        ds_binding_flags[2] =
+            VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT_EXT | VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT_EXT;
+        desc_counts = 6;  // We'll reserve 8 spaces in the layout, but the descriptor will only use 6
+        variable_count.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO_EXT;
+        variable_count.descriptorSetCount = 1;
+        variable_count.pDescriptorCounts = &desc_counts;
+        allocate_pnext = &variable_count;
+    }
+
+    OneOffDescriptorSet ds_variable(m_device,
+                                    {
+                                        {0, VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV, 1, kAllRayTracingStages, nullptr},
+                                        {1, VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, 1, kAllRayTracingStages, nullptr},
+                                        {2, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, 8, kAllRayTracingStages, nullptr},
+                                    },
+                                    layout_create_flags, layout_pnext, pool_create_flags, allocate_pnext);
+
+    VkAccelerationStructureNV top_level_as_handle = top_level_as.handle();
+    VkWriteDescriptorSetAccelerationStructureNV write_descript_set_as = {};
+    write_descript_set_as.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_NV;
+    write_descript_set_as.accelerationStructureCount = 1;
+    write_descript_set_as.pAccelerationStructures = &top_level_as_handle;
+
+    VkDescriptorBufferInfo descriptor_buffer_info = {};
+    descriptor_buffer_info.buffer = storage_buffer.handle();
+    descriptor_buffer_info.offset = 0;
+    descriptor_buffer_info.range = storage_buffer_size;
+
+    VkDescriptorImageInfo descriptor_image_infos[6] = {};
+    for (int i = 0; i < 6; i++) {
+        descriptor_image_infos[i] = texture.DescriptorImageInfo();
+        descriptor_image_infos[i].sampler = sampler.handle();
+        descriptor_image_infos[i].imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
+    }
+
+    VkWriteDescriptorSet descriptor_writes[3] = {};
+    descriptor_writes[0].sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
+    descriptor_writes[0].dstSet = ds.set_;
+    descriptor_writes[0].dstBinding = 0;
+    descriptor_writes[0].descriptorCount = 1;
+    descriptor_writes[0].descriptorType = VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV;
+    descriptor_writes[0].pNext = &write_descript_set_as;
+
+    descriptor_writes[1].sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
+    descriptor_writes[1].dstSet = ds.set_;
+    descriptor_writes[1].dstBinding = 1;
+    descriptor_writes[1].descriptorCount = 1;
+    descriptor_writes[1].descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
+    descriptor_writes[1].pBufferInfo = &descriptor_buffer_info;
+
+    descriptor_writes[2].sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
+    descriptor_writes[2].dstSet = ds.set_;
+    descriptor_writes[2].dstBinding = 2;
+    if (descriptor_indexing) {
+        descriptor_writes[2].descriptorCount = 5;  // Intentionally don't write index 5
+    } else {
+        descriptor_writes[2].descriptorCount = 6;
+    }
+    descriptor_writes[2].descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
+    descriptor_writes[2].pImageInfo = descriptor_image_infos;
+    vk::UpdateDescriptorSets(m_device->device(), 3, descriptor_writes, 0, NULL);
+    if (descriptor_indexing) {
+        descriptor_writes[0].dstSet = ds_variable.set_;
+        descriptor_writes[1].dstSet = ds_variable.set_;
+        descriptor_writes[2].dstSet = ds_variable.set_;
+        vk::UpdateDescriptorSets(m_device->device(), 3, descriptor_writes, 0, NULL);
+    }
+
+    const VkPipelineLayoutObj pipeline_layout(m_device, {&ds.layout_});
+    const VkPipelineLayoutObj pipeline_layout_variable(m_device, {&ds_variable.layout_});
+
+    const auto SetImagesArrayLength = [](const std::string &shader_template, const std::string &length_str) {
+        const std::string to_replace = "IMAGES_ARRAY_LENGTH";
+
+        std::string result = shader_template;
+        auto position = result.find(to_replace);
+        assert(position != std::string::npos);
+        result.replace(position, to_replace.length(), length_str);
+        return result;
+    };
+
+    const std::string rgen_source_template = R"(#version 460
+        #extension GL_EXT_nonuniform_qualifier : require
+        #extension GL_EXT_samplerless_texture_functions : require
+        #extension GL_NV_ray_tracing : require
+
+        layout(set = 0, binding = 0) uniform accelerationStructureNV topLevelAS;
+        layout(set = 0, binding = 1, std430) buffer RayTracingSbo {
+	        uint rgen_index;
+	        uint ahit_index;
+	        uint chit_index;
+	        uint miss_index;
+	        uint intr_index;
+	        uint call_index;
+
+	        uint rgen_ran;
+	        uint ahit_ran;
+	        uint chit_ran;
+	        uint miss_ran;
+	        uint intr_ran;
+	        uint call_ran;
+
+	        float result1;
+	        float result2;
+	        float result3;
+        } sbo;
+        layout(set = 0, binding = 2) uniform texture2D textures[IMAGES_ARRAY_LENGTH];
+
+        layout(location = 0) rayPayloadNV vec3 payload;
+        layout(location = 3) callableDataNV vec3 callableData;
+
+        void main() {
+            sbo.rgen_ran = 1;
+
+	        executeCallableNV(0, 3);
+	        sbo.result1 = callableData.x;
+
+	        vec3 origin = vec3(0.0f, 0.0f, -2.0f);
+	        vec3 direction = vec3(0.0f, 0.0f, 1.0f);
+
+	        traceNV(topLevelAS, gl_RayFlagsNoneNV, 0xFF, 0, 1, 0, origin, 0.001, direction, 10000.0, 0);
+	        sbo.result2 = payload.x;
+
+	        traceNV(topLevelAS, gl_RayFlagsNoneNV, 0xFF, 0, 1, 0, origin, 0.001, -direction, 10000.0, 0);
+	        sbo.result3 = payload.x;
+
+            if (sbo.rgen_index > 0) {
+                // OOB here:
+                sbo.result3 = texelFetch(textures[sbo.rgen_index], ivec2(0, 0), 0).x;
+            }
+        }
+        )";
+
+    const std::string rgen_source = SetImagesArrayLength(rgen_source_template, "6");
+    const std::string rgen_source_runtime = SetImagesArrayLength(rgen_source_template, "");
+
+    const std::string ahit_source_template = R"(#version 460
+        #extension GL_EXT_nonuniform_qualifier : require
+        #extension GL_EXT_samplerless_texture_functions : require
+        #extension GL_NV_ray_tracing : require
+
+        layout(set = 0, binding = 1, std430) buffer StorageBuffer {
+	        uint rgen_index;
+	        uint ahit_index;
+	        uint chit_index;
+	        uint miss_index;
+	        uint intr_index;
+	        uint call_index;
+
+	        uint rgen_ran;
+	        uint ahit_ran;
+	        uint chit_ran;
+	        uint miss_ran;
+	        uint intr_ran;
+	        uint call_ran;
+
+	        float result1;
+	        float result2;
+	        float result3;
+        } sbo;
+        layout(set = 0, binding = 2) uniform texture2D textures[IMAGES_ARRAY_LENGTH];
+
+        hitAttributeNV vec3 hitValue;
+
+        layout(location = 0) rayPayloadInNV vec3 payload;
+
+        void main() {
+	        sbo.ahit_ran = 2;
+
+	        payload = vec3(0.1234f);
+
+            if (sbo.ahit_index > 0) {
+                // OOB here:
+                payload.x = texelFetch(textures[sbo.ahit_index], ivec2(0, 0), 0).x;
+            }
+        }
+    )";
+    const std::string ahit_source = SetImagesArrayLength(ahit_source_template, "6");
+    const std::string ahit_source_runtime = SetImagesArrayLength(ahit_source_template, "");
+
+    const std::string chit_source_template = R"(#version 460
+        #extension GL_EXT_nonuniform_qualifier : require
+        #extension GL_EXT_samplerless_texture_functions : require
+        #extension GL_NV_ray_tracing : require
+
+        layout(set = 0, binding = 1, std430) buffer RayTracingSbo {
+	        uint rgen_index;
+	        uint ahit_index;
+	        uint chit_index;
+	        uint miss_index;
+	        uint intr_index;
+	        uint call_index;
+
+	        uint rgen_ran;
+	        uint ahit_ran;
+	        uint chit_ran;
+	        uint miss_ran;
+	        uint intr_ran;
+	        uint call_ran;
+
+	        float result1;
+	        float result2;
+	        float result3;
+        } sbo;
+        layout(set = 0, binding = 2) uniform texture2D textures[IMAGES_ARRAY_LENGTH];
+
+        layout(location = 0) rayPayloadInNV vec3 payload;
+
+        hitAttributeNV vec3 attribs;
+
+        void main() {
+            sbo.chit_ran = 3;
+
+            payload = attribs;
+            if (sbo.chit_index > 0) {
+                // OOB here:
+                payload.x = texelFetch(textures[sbo.chit_index], ivec2(0, 0), 0).x;
+            }
+        }
+        )";
+    const std::string chit_source = SetImagesArrayLength(chit_source_template, "6");
+    const std::string chit_source_runtime = SetImagesArrayLength(chit_source_template, "");
+
+    const std::string miss_source_template = R"(#version 460
+        #extension GL_EXT_nonuniform_qualifier : enable
+        #extension GL_EXT_samplerless_texture_functions : require
+        #extension GL_NV_ray_tracing : require
+
+        layout(set = 0, binding = 1, std430) buffer RayTracingSbo {
+	        uint rgen_index;
+	        uint ahit_index;
+	        uint chit_index;
+	        uint miss_index;
+	        uint intr_index;
+	        uint call_index;
+
+	        uint rgen_ran;
+	        uint ahit_ran;
+	        uint chit_ran;
+	        uint miss_ran;
+	        uint intr_ran;
+	        uint call_ran;
+
+	        float result1;
+	        float result2;
+	        float result3;
+        } sbo;
+        layout(set = 0, binding = 2) uniform texture2D textures[IMAGES_ARRAY_LENGTH];
+
+        layout(location = 0) rayPayloadInNV vec3 payload;
+
+        void main() {
+            sbo.miss_ran = 4;
+
+            payload = vec3(1.0, 0.0, 0.0);
+
+            if (sbo.miss_index > 0) {
+                // OOB here:
+                payload.x = texelFetch(textures[sbo.miss_index], ivec2(0, 0), 0).x;
+            }
+        }
+    )";
+    const std::string miss_source = SetImagesArrayLength(miss_source_template, "6");
+    const std::string miss_source_runtime = SetImagesArrayLength(miss_source_template, "");
+
+    const std::string intr_source_template = R"(#version 460
+        #extension GL_EXT_nonuniform_qualifier : require
+        #extension GL_EXT_samplerless_texture_functions : require
+        #extension GL_NV_ray_tracing : require
+
+        layout(set = 0, binding = 1, std430) buffer StorageBuffer {
+	        uint rgen_index;
+	        uint ahit_index;
+	        uint chit_index;
+	        uint miss_index;
+	        uint intr_index;
+	        uint call_index;
+
+	        uint rgen_ran;
+	        uint ahit_ran;
+	        uint chit_ran;
+	        uint miss_ran;
+	        uint intr_ran;
+	        uint call_ran;
+
+	        float result1;
+	        float result2;
+	        float result3;
+        } sbo;
+        layout(set = 0, binding = 2) uniform texture2D textures[IMAGES_ARRAY_LENGTH];
+
+        hitAttributeNV vec3 hitValue;
+
+        void main() {
+	        sbo.intr_ran = 5;
+
+	        hitValue = vec3(0.0f, 0.5f, 0.0f);
+
+	        reportIntersectionNV(1.0f, 0);
+
+            if (sbo.intr_index > 0) {
+                // OOB here:
+                hitValue.x = texelFetch(textures[sbo.intr_index], ivec2(0, 0), 0).x;
+            }
+        }
+    )";
+    const std::string intr_source = SetImagesArrayLength(intr_source_template, "6");
+    const std::string intr_source_runtime = SetImagesArrayLength(intr_source_template, "");
+
+    const std::string call_source_template = R"(#version 460
+        #extension GL_EXT_nonuniform_qualifier : require
+        #extension GL_EXT_samplerless_texture_functions : require
+        #extension GL_NV_ray_tracing : require
+
+        layout(set = 0, binding = 1, std430) buffer StorageBuffer {
+	        uint rgen_index;
+	        uint ahit_index;
+	        uint chit_index;
+	        uint miss_index;
+	        uint intr_index;
+	        uint call_index;
+
+	        uint rgen_ran;
+	        uint ahit_ran;
+	        uint chit_ran;
+	        uint miss_ran;
+	        uint intr_ran;
+	        uint call_ran;
+
+	        float result1;
+	        float result2;
+	        float result3;
+        } sbo;
+        layout(set = 0, binding = 2) uniform texture2D textures[IMAGES_ARRAY_LENGTH];
+
+        layout(location = 3) callableDataInNV vec3 callableData;
+
+        void main() {
+	        sbo.call_ran = 6;
+
+	        callableData = vec3(0.1234f);
+
+            if (sbo.call_index > 0) {
+                // OOB here:
+                callableData.x = texelFetch(textures[sbo.call_index], ivec2(0, 0), 0).x;
+            }
+        }
+    )";
+    const std::string call_source = SetImagesArrayLength(call_source_template, "6");
+    const std::string call_source_runtime = SetImagesArrayLength(call_source_template, "");
+
+    struct TestCase {
+        const std::string &rgen_shader_source;
+        const std::string &ahit_shader_source;
+        const std::string &chit_shader_source;
+        const std::string &miss_shader_source;
+        const std::string &intr_shader_source;
+        const std::string &call_shader_source;
+        bool variable_length;
+        uint32_t rgen_index;
+        uint32_t ahit_index;
+        uint32_t chit_index;
+        uint32_t miss_index;
+        uint32_t intr_index;
+        uint32_t call_index;
+        const char *expected_error;
+    };
+
+    std::vector<TestCase> tests;
+    tests.push_back({rgen_source, ahit_source, chit_source, miss_source, intr_source, call_source, false, 25, 0, 0, 0, 0, 0,
+                     "Index of 25 used to index descriptor array of length 6."});
+    tests.push_back({rgen_source, ahit_source, chit_source, miss_source, intr_source, call_source, false, 0, 25, 0, 0, 0, 0,
+                     "Index of 25 used to index descriptor array of length 6."});
+    tests.push_back({rgen_source, ahit_source, chit_source, miss_source, intr_source, call_source, false, 0, 0, 25, 0, 0, 0,
+                     "Index of 25 used to index descriptor array of length 6."});
+    tests.push_back({rgen_source, ahit_source, chit_source, miss_source, intr_source, call_source, false, 0, 0, 0, 25, 0, 0,
+                     "Index of 25 used to index descriptor array of length 6."});
+    tests.push_back({rgen_source, ahit_source, chit_source, miss_source, intr_source, call_source, false, 0, 0, 0, 0, 25, 0,
+                     "Index of 25 used to index descriptor array of length 6."});
+    tests.push_back({rgen_source, ahit_source, chit_source, miss_source, intr_source, call_source, false, 0, 0, 0, 0, 0, 25,
+                     "Index of 25 used to index descriptor array of length 6."});
+
+    if (descriptor_indexing) {
+        tests.push_back({rgen_source_runtime, ahit_source_runtime, chit_source_runtime, miss_source_runtime, intr_source_runtime,
+                         call_source_runtime, true, 25, 0, 0, 0, 0, 0, "Index of 25 used to index descriptor array of length 6."});
+        tests.push_back({rgen_source_runtime, ahit_source_runtime, chit_source_runtime, miss_source_runtime, intr_source_runtime,
+                         call_source_runtime, true, 0, 25, 0, 0, 0, 0, "Index of 25 used to index descriptor array of length 6."});
+        tests.push_back({rgen_source_runtime, ahit_source_runtime, chit_source_runtime, miss_source_runtime, intr_source_runtime,
+                         call_source_runtime, true, 0, 0, 25, 0, 0, 0, "Index of 25 used to index descriptor array of length 6."});
+        tests.push_back({rgen_source_runtime, ahit_source_runtime, chit_source_runtime, miss_source_runtime, intr_source_runtime,
+                         call_source_runtime, true, 0, 0, 0, 25, 0, 0, "Index of 25 used to index descriptor array of length 6."});
+        tests.push_back({rgen_source_runtime, ahit_source_runtime, chit_source_runtime, miss_source_runtime, intr_source_runtime,
+                         call_source_runtime, true, 0, 0, 0, 0, 25, 0, "Index of 25 used to index descriptor array of length 6."});
+        tests.push_back({rgen_source_runtime, ahit_source_runtime, chit_source_runtime, miss_source_runtime, intr_source_runtime,
+                         call_source_runtime, true, 0, 0, 0, 0, 0, 25, "Index of 25 used to index descriptor array of length 6."});
+
+        // For this group, 6 is less than max specified (max specified is 8) but more than actual specified (actual specified is 5)
+        tests.push_back({rgen_source_runtime, ahit_source_runtime, chit_source_runtime, miss_source_runtime, intr_source_runtime,
+                         call_source_runtime, true, 6, 0, 0, 0, 0, 0, "Index of 6 used to index descriptor array of length 6."});
+        tests.push_back({rgen_source_runtime, ahit_source_runtime, chit_source_runtime, miss_source_runtime, intr_source_runtime,
+                         call_source_runtime, true, 0, 6, 0, 0, 0, 0, "Index of 6 used to index descriptor array of length 6."});
+        tests.push_back({rgen_source_runtime, ahit_source_runtime, chit_source_runtime, miss_source_runtime, intr_source_runtime,
+                         call_source_runtime, true, 0, 0, 6, 0, 0, 0, "Index of 6 used to index descriptor array of length 6."});
+        tests.push_back({rgen_source_runtime, ahit_source_runtime, chit_source_runtime, miss_source_runtime, intr_source_runtime,
+                         call_source_runtime, true, 0, 0, 0, 6, 0, 0, "Index of 6 used to index descriptor array of length 6."});
+        tests.push_back({rgen_source_runtime, ahit_source_runtime, chit_source_runtime, miss_source_runtime, intr_source_runtime,
+                         call_source_runtime, true, 0, 0, 0, 0, 6, 0, "Index of 6 used to index descriptor array of length 6."});
+        tests.push_back({rgen_source_runtime, ahit_source_runtime, chit_source_runtime, miss_source_runtime, intr_source_runtime,
+                         call_source_runtime, true, 0, 0, 0, 0, 0, 6, "Index of 6 used to index descriptor array of length 6."});
+
+        tests.push_back({rgen_source_runtime, ahit_source_runtime, chit_source_runtime, miss_source_runtime, intr_source_runtime,
+                         call_source_runtime, true, 5, 0, 0, 0, 0, 0, "Descriptor index 5 is uninitialized."});
+        tests.push_back({rgen_source_runtime, ahit_source_runtime, chit_source_runtime, miss_source_runtime, intr_source_runtime,
+                         call_source_runtime, true, 0, 5, 0, 0, 0, 0, "Descriptor index 5 is uninitialized."});
+        tests.push_back({rgen_source_runtime, ahit_source_runtime, chit_source_runtime, miss_source_runtime, intr_source_runtime,
+                         call_source_runtime, true, 0, 0, 5, 0, 0, 0, "Descriptor index 5 is uninitialized."});
+        tests.push_back({rgen_source_runtime, ahit_source_runtime, chit_source_runtime, miss_source_runtime, intr_source_runtime,
+                         call_source_runtime, true, 0, 0, 0, 5, 0, 0, "Descriptor index 5 is uninitialized."});
+        tests.push_back({rgen_source_runtime, ahit_source_runtime, chit_source_runtime, miss_source_runtime, intr_source_runtime,
+                         call_source_runtime, true, 0, 0, 0, 0, 5, 0, "Descriptor index 5 is uninitialized."});
+        tests.push_back({rgen_source_runtime, ahit_source_runtime, chit_source_runtime, miss_source_runtime, intr_source_runtime,
+                         call_source_runtime, true, 0, 0, 0, 0, 0, 5, "Descriptor index 5 is uninitialized."});
+    }
+
+    PFN_vkCreateRayTracingPipelinesNV vkCreateRayTracingPipelinesNV = reinterpret_cast<PFN_vkCreateRayTracingPipelinesNV>(
+        vk::GetDeviceProcAddr(m_device->handle(), "vkCreateRayTracingPipelinesNV"));
+    ASSERT_TRUE(vkCreateRayTracingPipelinesNV != nullptr);
+
+    PFN_vkGetRayTracingShaderGroupHandlesNV vkGetRayTracingShaderGroupHandlesNV =
+        reinterpret_cast<PFN_vkGetRayTracingShaderGroupHandlesNV>(
+            vk::GetDeviceProcAddr(m_device->handle(), "vkGetRayTracingShaderGroupHandlesNV"));
+    ASSERT_TRUE(vkGetRayTracingShaderGroupHandlesNV != nullptr);
+
+    PFN_vkCmdTraceRaysNV vkCmdTraceRaysNV =
+        reinterpret_cast<PFN_vkCmdTraceRaysNV>(vk::GetDeviceProcAddr(m_device->handle(), "vkCmdTraceRaysNV"));
+    ASSERT_TRUE(vkCmdTraceRaysNV != nullptr);
+
+    // Iteration 0 tests with no descriptor set bound (to sanity test "draw" validation). Iteration 1
+    // tests what's in the test case vector.
+    for (int i = 0; i < 2; ++i) {
+        for (const auto &test : tests) {
+            if (i == 1) {
+                m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, test.expected_error);
+            }
+
+            VkShaderObj rgen_shader(m_device, test.rgen_shader_source.c_str(), VK_SHADER_STAGE_RAYGEN_BIT_NV, this, "main");
+            VkShaderObj ahit_shader(m_device, test.ahit_shader_source.c_str(), VK_SHADER_STAGE_ANY_HIT_BIT_NV, this, "main");
+            VkShaderObj chit_shader(m_device, test.chit_shader_source.c_str(), VK_SHADER_STAGE_CLOSEST_HIT_BIT_NV, this, "main");
+            VkShaderObj miss_shader(m_device, test.miss_shader_source.c_str(), VK_SHADER_STAGE_MISS_BIT_NV, this, "main");
+            VkShaderObj intr_shader(m_device, test.intr_shader_source.c_str(), VK_SHADER_STAGE_INTERSECTION_BIT_NV, this, "main");
+            VkShaderObj call_shader(m_device, test.call_shader_source.c_str(), VK_SHADER_STAGE_CALLABLE_BIT_NV, this, "main");
+
+            VkPipelineShaderStageCreateInfo stage_create_infos[6] = {};
+            stage_create_infos[0].sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
+            stage_create_infos[0].stage = VK_SHADER_STAGE_RAYGEN_BIT_NV;
+            stage_create_infos[0].module = rgen_shader.handle();
+            stage_create_infos[0].pName = "main";
+
+            stage_create_infos[1].sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
+            stage_create_infos[1].stage = VK_SHADER_STAGE_ANY_HIT_BIT_NV;
+            stage_create_infos[1].module = ahit_shader.handle();
+            stage_create_infos[1].pName = "main";
+
+            stage_create_infos[2].sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
+            stage_create_infos[2].stage = VK_SHADER_STAGE_CLOSEST_HIT_BIT_NV;
+            stage_create_infos[2].module = chit_shader.handle();
+            stage_create_infos[2].pName = "main";
+
+            stage_create_infos[3].sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
+            stage_create_infos[3].stage = VK_SHADER_STAGE_MISS_BIT_NV;
+            stage_create_infos[3].module = miss_shader.handle();
+            stage_create_infos[3].pName = "main";
+
+            stage_create_infos[4].sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
+            stage_create_infos[4].stage = VK_SHADER_STAGE_INTERSECTION_BIT_NV;
+            stage_create_infos[4].module = intr_shader.handle();
+            stage_create_infos[4].pName = "main";
+
+            stage_create_infos[5].sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
+            stage_create_infos[5].stage = VK_SHADER_STAGE_CALLABLE_BIT_NV;
+            stage_create_infos[5].module = call_shader.handle();
+            stage_create_infos[5].pName = "main";
+
+            VkRayTracingShaderGroupCreateInfoNV group_create_infos[4] = {};
+            group_create_infos[0].sType = VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV;
+            group_create_infos[0].type = VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_NV;
+            group_create_infos[0].generalShader = 0;  // rgen
+            group_create_infos[0].closestHitShader = VK_SHADER_UNUSED_NV;
+            group_create_infos[0].anyHitShader = VK_SHADER_UNUSED_NV;
+            group_create_infos[0].intersectionShader = VK_SHADER_UNUSED_NV;
+
+            group_create_infos[1].sType = VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV;
+            group_create_infos[1].type = VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_NV;
+            group_create_infos[1].generalShader = 3;  // miss
+            group_create_infos[1].closestHitShader = VK_SHADER_UNUSED_NV;
+            group_create_infos[1].anyHitShader = VK_SHADER_UNUSED_NV;
+            group_create_infos[1].intersectionShader = VK_SHADER_UNUSED_NV;
+
+            group_create_infos[2].sType = VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV;
+            group_create_infos[2].type = VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_NV;
+            group_create_infos[2].generalShader = VK_SHADER_UNUSED_NV;
+            group_create_infos[2].closestHitShader = 2;
+            group_create_infos[2].anyHitShader = 1;
+            group_create_infos[2].intersectionShader = 4;
+
+            group_create_infos[3].sType = VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV;
+            group_create_infos[3].type = VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_NV;
+            group_create_infos[3].generalShader = 5;  // call
+            group_create_infos[3].closestHitShader = VK_SHADER_UNUSED_NV;
+            group_create_infos[3].anyHitShader = VK_SHADER_UNUSED_NV;
+            group_create_infos[3].intersectionShader = VK_SHADER_UNUSED_NV;
+
+            VkRayTracingPipelineCreateInfoNV pipeline_ci = {};
+            pipeline_ci.sType = VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_NV;
+            pipeline_ci.stageCount = 6;
+            pipeline_ci.pStages = stage_create_infos;
+            pipeline_ci.groupCount = 4;
+            pipeline_ci.pGroups = group_create_infos;
+            pipeline_ci.maxRecursionDepth = 2;
+            pipeline_ci.layout = test.variable_length ? pipeline_layout_variable.handle() : pipeline_layout.handle();
+
+            VkPipeline pipeline = VK_NULL_HANDLE;
+            ASSERT_VK_SUCCESS(
+                vkCreateRayTracingPipelinesNV(m_device->handle(), VK_NULL_HANDLE, 1, &pipeline_ci, nullptr, &pipeline));
+
+            std::vector<uint8_t> shader_binding_table_data;
+            shader_binding_table_data.resize(static_cast<std::size_t>(shader_binding_table_buffer_size), 0);
+            ASSERT_VK_SUCCESS(vkGetRayTracingShaderGroupHandlesNV(m_device->handle(), pipeline, 0, 4,
+                                                                  static_cast<std::size_t>(shader_binding_table_buffer_size),
+                                                                  shader_binding_table_data.data()));
+
+            uint8_t *mapped_shader_binding_table_data = (uint8_t *)shader_binding_table_buffer.memory().map();
+            std::memcpy(mapped_shader_binding_table_data, shader_binding_table_data.data(), shader_binding_table_data.size());
+            shader_binding_table_buffer.memory().unmap();
+
+            ray_tracing_command_buffer.begin();
+
+            vk::CmdBindPipeline(ray_tracing_command_buffer.handle(), VK_PIPELINE_BIND_POINT_RAY_TRACING_NV, pipeline);
+
+            if (i == 1) {
+                vk::CmdBindDescriptorSets(ray_tracing_command_buffer.handle(), VK_PIPELINE_BIND_POINT_RAY_TRACING_NV,
+                                          test.variable_length ? pipeline_layout_variable.handle() : pipeline_layout.handle(), 0, 1,
+                                          test.variable_length ? &ds_variable.set_ : &ds.set_, 0, nullptr);
+            } else {
+                m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdTraceRaysNV-None-02697");
+                m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                                     "UNASSIGNED-CoreValidation-DrawState-DescriptorSetNotBound");
+            }
+
+            vkCmdTraceRaysNV(ray_tracing_command_buffer.handle(), shader_binding_table_buffer.handle(),
+                             ray_tracing_properties.shaderGroupHandleSize * 0ull, shader_binding_table_buffer.handle(),
+                             ray_tracing_properties.shaderGroupHandleSize * 1ull, ray_tracing_properties.shaderGroupHandleSize,
+                             shader_binding_table_buffer.handle(), ray_tracing_properties.shaderGroupHandleSize * 2ull,
+                             ray_tracing_properties.shaderGroupHandleSize, shader_binding_table_buffer.handle(),
+                             ray_tracing_properties.shaderGroupHandleSize * 3ull, ray_tracing_properties.shaderGroupHandleSize,
+                             /*width=*/1, /*height=*/1, /*depth=*/1);
+
+            ray_tracing_command_buffer.end();
+
+            // Update the index of the texture that the shaders should read
+            uint32_t *mapped_storage_buffer_data = (uint32_t *)storage_buffer.memory().map();
+            mapped_storage_buffer_data[0] = test.rgen_index;
+            mapped_storage_buffer_data[1] = test.ahit_index;
+            mapped_storage_buffer_data[2] = test.chit_index;
+            mapped_storage_buffer_data[3] = test.miss_index;
+            mapped_storage_buffer_data[4] = test.intr_index;
+            mapped_storage_buffer_data[5] = test.call_index;
+            mapped_storage_buffer_data[6] = 0;
+            mapped_storage_buffer_data[7] = 0;
+            mapped_storage_buffer_data[8] = 0;
+            mapped_storage_buffer_data[9] = 0;
+            mapped_storage_buffer_data[10] = 0;
+            mapped_storage_buffer_data[11] = 0;
+            storage_buffer.memory().unmap();
+
+            m_errorMonitor->SetUnexpectedError("UNASSIGNED-CoreValidation-DrawState-DescriptorSetNotUpdated");
+            vk::QueueSubmit(ray_tracing_queue, 1, &submit_info, VK_NULL_HANDLE);
+            vk::QueueWaitIdle(ray_tracing_queue);
+            m_errorMonitor->VerifyFound();
+
+            mapped_storage_buffer_data = (uint32_t *)storage_buffer.memory().map();
+            if (i == 1) {
+                ASSERT_TRUE(mapped_storage_buffer_data[6] == 1);
+                ASSERT_TRUE(mapped_storage_buffer_data[7] == 2);
+                ASSERT_TRUE(mapped_storage_buffer_data[8] == 3);
+                ASSERT_TRUE(mapped_storage_buffer_data[9] == 4);
+                ASSERT_TRUE(mapped_storage_buffer_data[10] == 5);
+                ASSERT_TRUE(mapped_storage_buffer_data[11] == 6);
+            }
+            storage_buffer.memory().unmap();
+
+            vk::DestroyPipeline(m_device->handle(), pipeline, nullptr);
+        }
+    }
+}
+
+TEST_F(VkLayerTest, InvalidDescriptorPoolConsistency) {
+    VkResult err;
+
+    TEST_DESCRIPTION("Allocate descriptor sets from one DS pool and attempt to delete them from another.");
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkFreeDescriptorSets-pDescriptorSets-parent");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    VkDescriptorPoolSize ds_type_count = {};
+    ds_type_count.type = VK_DESCRIPTOR_TYPE_SAMPLER;
+    ds_type_count.descriptorCount = 1;
+
+    VkDescriptorPoolCreateInfo ds_pool_ci = {};
+    ds_pool_ci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
+    ds_pool_ci.pNext = NULL;
+    ds_pool_ci.flags = 0;
+    ds_pool_ci.maxSets = 1;
+    ds_pool_ci.poolSizeCount = 1;
+    ds_pool_ci.pPoolSizes = &ds_type_count;
+
+    VkDescriptorPool bad_pool;
+    err = vk::CreateDescriptorPool(m_device->device(), &ds_pool_ci, NULL, &bad_pool);
+    ASSERT_VK_SUCCESS(err);
+
+    OneOffDescriptorSet descriptor_set(m_device, {
+                                                     {0, VK_DESCRIPTOR_TYPE_SAMPLER, 1, VK_SHADER_STAGE_ALL, nullptr},
+                                                 });
+
+    err = vk::FreeDescriptorSets(m_device->device(), bad_pool, 1, &descriptor_set.set_);
+
+    m_errorMonitor->VerifyFound();
+
+    vk::DestroyDescriptorPool(m_device->device(), bad_pool, NULL);
+}
+
+TEST_F(VkLayerTest, DrawWithPipelineIncompatibleWithSubpass) {
+    TEST_DESCRIPTION("Use a pipeline for the wrong subpass in a render pass instance");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    // A renderpass with two subpasses, both writing the same attachment.
+    VkAttachmentDescription attach[] = {
+        {0, VK_FORMAT_R8G8B8A8_UNORM, VK_SAMPLE_COUNT_1_BIT, VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE,
+         VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE, VK_IMAGE_LAYOUT_UNDEFINED,
+         VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL},
+    };
+    VkAttachmentReference ref = {0, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL};
+    VkSubpassDescription subpasses[] = {
+        {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 0, nullptr, 1, &ref, nullptr, nullptr, 0, nullptr},
+        {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 0, nullptr, 1, &ref, nullptr, nullptr, 0, nullptr},
+    };
+    VkSubpassDependency dep = {0,
+                               1,
+                               VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
+                               VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
+                               VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
+                               VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
+                               VK_DEPENDENCY_BY_REGION_BIT};
+    VkRenderPassCreateInfo rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, nullptr, 0, 1, attach, 2, subpasses, 1, &dep};
+    VkRenderPass rp;
+    VkResult err = vk::CreateRenderPass(m_device->device(), &rpci, nullptr, &rp);
+    ASSERT_VK_SUCCESS(err);
+
+    VkImageObj image(m_device);
+    image.InitNoLayout(32, 32, 1, VK_FORMAT_R8G8B8A8_UNORM, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
+    VkImageView imageView = image.targetView(VK_FORMAT_R8G8B8A8_UNORM);
+
+    VkFramebufferCreateInfo fbci = {VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, nullptr, 0, rp, 1, &imageView, 32, 32, 1};
+    VkFramebuffer fb;
+    err = vk::CreateFramebuffer(m_device->device(), &fbci, nullptr, &fb);
+    ASSERT_VK_SUCCESS(err);
+
+    VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
+    VkShaderObj fs(m_device, bindStateFragShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+    VkPipelineObj pipe(m_device);
+    pipe.AddDefaultColorAttachment();
+    pipe.AddShader(&vs);
+    pipe.AddShader(&fs);
+    VkViewport viewport = {0.0f, 0.0f, 64.0f, 64.0f, 0.0f, 1.0f};
+    m_viewports.push_back(viewport);
+    pipe.SetViewport(m_viewports);
+    VkRect2D rect = {};
+    m_scissors.push_back(rect);
+    pipe.SetScissor(m_scissors);
+
+    const VkPipelineLayoutObj pl(m_device);
+    pipe.CreateVKPipeline(pl.handle(), rp);
+
+    m_commandBuffer->begin();
+
+    VkRenderPassBeginInfo rpbi = {VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO,
+                                  nullptr,
+                                  rp,
+                                  fb,
+                                  {{
+                                       0,
+                                       0,
+                                   },
+                                   {32, 32}},
+                                  0,
+                                  nullptr};
+
+    // subtest 1: bind in the wrong subpass
+    vk::CmdBeginRenderPass(m_commandBuffer->handle(), &rpbi, VK_SUBPASS_CONTENTS_INLINE);
+    vk::CmdNextSubpass(m_commandBuffer->handle(), VK_SUBPASS_CONTENTS_INLINE);
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "built for subpass 0 but used in subpass 1");
+    vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.handle());
+    vk::CmdDraw(m_commandBuffer->handle(), 3, 1, 0, 0);
+    m_errorMonitor->VerifyFound();
+
+    vk::CmdEndRenderPass(m_commandBuffer->handle());
+
+    // subtest 2: bind in correct subpass, then transition to next subpass
+    vk::CmdBeginRenderPass(m_commandBuffer->handle(), &rpbi, VK_SUBPASS_CONTENTS_INLINE);
+    vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.handle());
+    vk::CmdNextSubpass(m_commandBuffer->handle(), VK_SUBPASS_CONTENTS_INLINE);
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "built for subpass 0 but used in subpass 1");
+    vk::CmdDraw(m_commandBuffer->handle(), 3, 1, 0, 0);
+    m_errorMonitor->VerifyFound();
+
+    vk::CmdEndRenderPass(m_commandBuffer->handle());
+
+    m_commandBuffer->end();
+
+    vk::DestroyFramebuffer(m_device->device(), fb, nullptr);
+    vk::DestroyRenderPass(m_device->device(), rp, nullptr);
+}
+
+TEST_F(VkLayerTest, ImageBarrierSubpassConflict) {
+    TEST_DESCRIPTION("Check case where subpass index references different image from image barrier");
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    // Create RP/FB combo where subpass has incorrect index attachment, this is 2nd half of "VUID-vkCmdPipelineBarrier-image-02635"
+    VkAttachmentDescription attach[] = {
+        {0, VK_FORMAT_R8G8B8A8_UNORM, VK_SAMPLE_COUNT_1_BIT, VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE,
+         VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE, VK_IMAGE_LAYOUT_UNDEFINED,
+         VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL},
+        {0, VK_FORMAT_R8G8B8A8_UNORM, VK_SAMPLE_COUNT_1_BIT, VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE,
+         VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE, VK_IMAGE_LAYOUT_UNDEFINED,
+         VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL},
+    };
+    // ref attachment points to wrong attachment index compared to img_barrier below
+    VkAttachmentReference ref = {1, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL};
+    VkSubpassDescription subpasses[] = {
+        {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 0, nullptr, 1, &ref, nullptr, nullptr, 0, nullptr},
+    };
+    VkSubpassDependency dep = {0,
+                               0,
+                               VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
+                               VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
+                               VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
+                               VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
+                               VK_DEPENDENCY_BY_REGION_BIT};
+
+    VkRenderPassCreateInfo rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, nullptr, 0, 2, attach, 1, subpasses, 1, &dep};
+    VkRenderPass rp;
+
+    VkResult err = vk::CreateRenderPass(m_device->device(), &rpci, nullptr, &rp);
+    ASSERT_VK_SUCCESS(err);
+
+    VkImageObj image(m_device);
+    image.InitNoLayout(32, 32, 1, VK_FORMAT_R8G8B8A8_UNORM, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
+    VkImageView imageView = image.targetView(VK_FORMAT_R8G8B8A8_UNORM);
+    VkImageObj image2(m_device);
+    image2.InitNoLayout(32, 32, 1, VK_FORMAT_R8G8B8A8_UNORM, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
+    VkImageView imageView2 = image2.targetView(VK_FORMAT_R8G8B8A8_UNORM);
+    // re-use imageView from start of test
+    VkImageView iv_array[2] = {imageView, imageView2};
+
+    VkFramebufferCreateInfo fbci = {VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, nullptr, 0, rp, 2, iv_array, 32, 32, 1};
+    VkFramebuffer fb;
+    err = vk::CreateFramebuffer(m_device->device(), &fbci, nullptr, &fb);
+    ASSERT_VK_SUCCESS(err);
+
+    VkRenderPassBeginInfo rpbi = {VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO,
+                                  nullptr,
+                                  rp,
+                                  fb,
+                                  {{
+                                       0,
+                                       0,
+                                   },
+                                   {32, 32}},
+                                  0,
+                                  nullptr};
+
+    VkImageMemoryBarrier img_barrier = {};
+    img_barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
+    img_barrier.srcAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
+    img_barrier.dstAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
+    img_barrier.oldLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
+    img_barrier.newLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
+    img_barrier.image = image.handle(); /* barrier references image from attachment index 0 */
+    img_barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
+    img_barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
+    img_barrier.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    img_barrier.subresourceRange.baseArrayLayer = 0;
+    img_barrier.subresourceRange.baseMipLevel = 0;
+    img_barrier.subresourceRange.layerCount = 1;
+    img_barrier.subresourceRange.levelCount = 1;
+    m_commandBuffer->begin();
+    vk::CmdBeginRenderPass(m_commandBuffer->handle(), &rpbi, VK_SUBPASS_CONTENTS_INLINE);
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdPipelineBarrier-image-02635");
+    vk::CmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
+                           VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, VK_DEPENDENCY_BY_REGION_BIT, 0, nullptr, 0, nullptr, 1,
+                           &img_barrier);
+    m_errorMonitor->VerifyFound();
+
+    vk::DestroyFramebuffer(m_device->device(), fb, nullptr);
+    vk::DestroyRenderPass(m_device->device(), rp, nullptr);
+}
+
+TEST_F(VkLayerTest, RenderPassCreateAttachmentIndexOutOfRange) {
+    // Check for VK_KHR_get_physical_device_properties2
+    if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+        m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    }
+
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+    bool rp2Supported = CheckCreateRenderPass2Support(this, m_device_extension_names);
+    ASSERT_NO_FATAL_FAILURE(InitState());
+
+    // There are no attachments, but refer to attachment 0.
+    VkAttachmentReference ref = {0, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL};
+    VkSubpassDescription subpasses[] = {
+        {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 0, nullptr, 1, &ref, nullptr, nullptr, 0, nullptr},
+    };
+
+    VkRenderPassCreateInfo rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, nullptr, 0, 0, nullptr, 1, subpasses, 0, nullptr};
+
+    // "... must be less than the total number of attachments ..."
+    TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported, "VUID-VkRenderPassCreateInfo-attachment-00834",
+                         "VUID-VkRenderPassCreateInfo2KHR-attachment-03051");
+}
+
+TEST_F(VkLayerTest, RenderPassCreateAttachmentReadOnlyButCleared) {
+    // Check for VK_KHR_get_physical_device_properties2
+    if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+        m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    }
+
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+
+    bool rp2Supported = CheckCreateRenderPass2Support(this, m_device_extension_names);
+    bool maintenance2Supported = rp2Supported;
+
+    // Check for VK_KHR_maintenance2
+    if (!rp2Supported && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_MAINTENANCE2_EXTENSION_NAME)) {
+        m_device_extension_names.push_back(VK_KHR_MAINTENANCE2_EXTENSION_NAME);
+        maintenance2Supported = true;
+    }
+
+    ASSERT_NO_FATAL_FAILURE(InitState());
+
+    if (m_device->props.apiVersion < VK_API_VERSION_1_1) {
+        maintenance2Supported = true;
+    }
+
+    VkAttachmentDescription description = {0,
+                                           VK_FORMAT_D32_SFLOAT_S8_UINT,
+                                           VK_SAMPLE_COUNT_1_BIT,
+                                           VK_ATTACHMENT_LOAD_OP_CLEAR,
+                                           VK_ATTACHMENT_STORE_OP_DONT_CARE,
+                                           VK_ATTACHMENT_LOAD_OP_CLEAR,
+                                           VK_ATTACHMENT_STORE_OP_DONT_CARE,
+                                           VK_IMAGE_LAYOUT_GENERAL,
+                                           VK_IMAGE_LAYOUT_GENERAL};
+
+    VkAttachmentReference depth_stencil_ref = {0, VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL};
+
+    VkSubpassDescription subpass = {0,      VK_PIPELINE_BIND_POINT_GRAPHICS, 0, nullptr, 0, nullptr, nullptr, &depth_stencil_ref, 0,
+                                    nullptr};
+
+    VkRenderPassCreateInfo rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, nullptr, 0, 1, &description, 1, &subpass, 0, nullptr};
+
+    // VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL but depth cleared
+    TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported, "VUID-VkRenderPassCreateInfo-pAttachments-00836",
+                         "VUID-VkRenderPassCreateInfo2KHR-pAttachments-02522");
+
+    if (maintenance2Supported) {
+        // VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL but depth cleared
+        depth_stencil_ref.layout = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL;
+
+        TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported,
+                             "VUID-VkRenderPassCreateInfo-pAttachments-01566", nullptr);
+
+        // VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL but depth cleared
+        depth_stencil_ref.layout = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL;
+
+        TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported,
+                             "VUID-VkRenderPassCreateInfo-pAttachments-01567", nullptr);
+    }
+}
+
+TEST_F(VkLayerTest, RenderPassCreateAttachmentMismatchingLayoutsColor) {
+    TEST_DESCRIPTION("Attachment is used simultaneously as two color attachments with different layouts.");
+
+    // Check for VK_KHR_get_physical_device_properties2
+    if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+        m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    }
+
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+    bool rp2Supported = CheckCreateRenderPass2Support(this, m_device_extension_names);
+    ASSERT_NO_FATAL_FAILURE(InitState());
+
+    VkAttachmentDescription attach[] = {
+        {0, VK_FORMAT_R8G8B8A8_UNORM, VK_SAMPLE_COUNT_1_BIT, VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE,
+         VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE, VK_IMAGE_LAYOUT_UNDEFINED,
+         VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL},
+    };
+    VkAttachmentReference refs[] = {
+        {0, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL},
+        {0, VK_IMAGE_LAYOUT_GENERAL},
+    };
+    VkSubpassDescription subpasses[] = {
+        {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 0, nullptr, 2, refs, nullptr, nullptr, 0, nullptr},
+    };
+
+    VkRenderPassCreateInfo rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, nullptr, 0, 1, attach, 1, subpasses, 0, nullptr};
+
+    TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported,
+                         "subpass 0 already uses attachment 0 with a different image layout",
+                         "subpass 0 already uses attachment 0 with a different image layout");
+}
+
+TEST_F(VkLayerTest, RenderPassCreateAttachmentDescriptionInvalidFinalLayout) {
+    TEST_DESCRIPTION("VkAttachmentDescription's finalLayout must not be UNDEFINED or PREINITIALIZED");
+
+    // Check for VK_KHR_get_physical_device_properties2
+    if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+        m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    }
+
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+    bool rp2Supported = CheckCreateRenderPass2Support(this, m_device_extension_names);
+    if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_SEPARATE_DEPTH_STENCIL_LAYOUTS_EXTENSION_NAME)) {
+        m_device_extension_names.push_back(VK_KHR_SEPARATE_DEPTH_STENCIL_LAYOUTS_EXTENSION_NAME);
+    }
+    PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR =
+        (PFN_vkGetPhysicalDeviceFeatures2KHR)vk::GetInstanceProcAddr(instance(), "vkGetPhysicalDeviceFeatures2KHR");
+    auto separate_depth_stencil_layouts_features = lvl_init_struct<VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR>();
+    auto features2 = lvl_init_struct<VkPhysicalDeviceFeatures2KHR>(&separate_depth_stencil_layouts_features);
+    if (vkGetPhysicalDeviceFeatures2KHR) {
+        vkGetPhysicalDeviceFeatures2KHR(gpu(), &features2);
+    } else {
+        separate_depth_stencil_layouts_features.separateDepthStencilLayouts = VK_FALSE;
+    }
+    ASSERT_NO_FATAL_FAILURE(InitState(nullptr, (vkGetPhysicalDeviceFeatures2KHR) ? &features2 : nullptr));
+
+    VkAttachmentDescription attach_desc = {};
+    attach_desc.format = VK_FORMAT_R8G8B8A8_UNORM;
+    attach_desc.samples = VK_SAMPLE_COUNT_1_BIT;
+    attach_desc.loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR;
+    attach_desc.storeOp = VK_ATTACHMENT_STORE_OP_STORE;
+    attach_desc.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
+    attach_desc.stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
+    attach_desc.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+    attach_desc.finalLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+    VkAttachmentReference attach_ref = {};
+    attach_ref.attachment = 0;
+    attach_ref.layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
+    VkSubpassDescription subpass = {};
+    subpass.pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS;
+    subpass.colorAttachmentCount = 1;
+    subpass.pColorAttachments = &attach_ref;
+    VkRenderPassCreateInfo rpci = {};
+    rpci.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
+    rpci.attachmentCount = 1;
+    rpci.pAttachments = &attach_desc;
+    rpci.subpassCount = 1;
+    rpci.pSubpasses = &subpass;
+
+    TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported, "VUID-VkAttachmentDescription-finalLayout-00843",
+                         "VUID-VkAttachmentDescription2KHR-finalLayout-03061");
+
+    attach_desc.finalLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
+    TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported, "VUID-VkAttachmentDescription-finalLayout-00843",
+                         "VUID-VkAttachmentDescription2KHR-finalLayout-03061");
+
+    attach_desc.finalLayout = VK_IMAGE_LAYOUT_GENERAL;
+
+    auto depth_format = FindSupportedDepthOnlyFormat(gpu());
+    auto stencil_format = FindSupportedStencilOnlyFormat(gpu());
+    auto depth_stencil_format = FindSupportedDepthStencilFormat(gpu());
+
+    if (separate_depth_stencil_layouts_features.separateDepthStencilLayouts) {
+        attach_desc.initialLayout = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR;
+        TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported, "VUID-VkAttachmentDescription-format-03286",
+                             "VUID-VkAttachmentDescription2KHR-format-03300");
+        attach_desc.initialLayout = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR;
+        TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported, "VUID-VkAttachmentDescription-format-03286",
+                             "VUID-VkAttachmentDescription2KHR-format-03300");
+        attach_desc.initialLayout = VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR;
+        TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported, "VUID-VkAttachmentDescription-format-03286",
+                             "VUID-VkAttachmentDescription2KHR-format-03300");
+        attach_desc.initialLayout = VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR;
+        TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported, "VUID-VkAttachmentDescription-format-03286",
+                             "VUID-VkAttachmentDescription2KHR-format-03300");
+
+        attach_desc.initialLayout = VK_IMAGE_LAYOUT_GENERAL;
+
+        attach_desc.finalLayout = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR;
+        TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported, "VUID-VkAttachmentDescription-format-03287",
+                             "VUID-VkAttachmentDescription2KHR-format-03301");
+        attach_desc.finalLayout = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR;
+        TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported, "VUID-VkAttachmentDescription-format-03287",
+                             "VUID-VkAttachmentDescription2KHR-format-03301");
+        attach_desc.finalLayout = VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR;
+        TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported, "VUID-VkAttachmentDescription-format-03287",
+                             "VUID-VkAttachmentDescription2KHR-format-03301");
+        attach_desc.finalLayout = VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR;
+        TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported, "VUID-VkAttachmentDescription-format-03287",
+                             "VUID-VkAttachmentDescription2KHR-format-03301");
+
+        attach_desc.finalLayout = VK_IMAGE_LAYOUT_GENERAL;
+
+        if (depth_stencil_format) {
+            attach_desc.format = depth_stencil_format;
+
+            if (rp2Supported) {
+                safe_VkRenderPassCreateInfo2KHR rpci2;
+
+                attach_desc.initialLayout = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR;
+                ConvertVkRenderPassCreateInfoToV2KHR(rpci, &rpci2);
+                TestRenderPass2KHRCreate(m_errorMonitor, m_device->device(), rpci2.ptr(),
+                                         "VUID-VkAttachmentDescription2KHR-format-03302");
+                attach_desc.initialLayout = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR;
+                ConvertVkRenderPassCreateInfoToV2KHR(rpci, &rpci2);
+                TestRenderPass2KHRCreate(m_errorMonitor, m_device->device(), rpci2.ptr(),
+                                         "VUID-VkAttachmentDescription2KHR-format-03302");
+            } else {
+                attach_desc.initialLayout = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR;
+                TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported,
+                                     "VUID-VkAttachmentDescription-format-03288", "VUID-VkAttachmentDescription2KHR-format-03302");
+                attach_desc.initialLayout = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR;
+                TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported,
+                                     "VUID-VkAttachmentDescription-format-03288", "VUID-VkAttachmentDescription2KHR-format-03302");
+                attach_desc.initialLayout = VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR;
+                TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported,
+                                     "VUID-VkAttachmentDescription-format-03288", "VUID-VkAttachmentDescription2KHR-format-03302");
+                attach_desc.initialLayout = VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR;
+                TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported,
+                                     "VUID-VkAttachmentDescription-format-03288", "VUID-VkAttachmentDescription2KHR-format-03302");
+            }
+
+            attach_desc.initialLayout = VK_IMAGE_LAYOUT_GENERAL;
+
+            if (rp2Supported) {
+                safe_VkRenderPassCreateInfo2KHR rpci2;
+
+                attach_desc.finalLayout = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR;
+                ConvertVkRenderPassCreateInfoToV2KHR(rpci, &rpci2);
+                TestRenderPass2KHRCreate(m_errorMonitor, m_device->device(), rpci2.ptr(),
+                                         "VUID-VkAttachmentDescription2KHR-format-03303");
+                attach_desc.finalLayout = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR;
+                ConvertVkRenderPassCreateInfoToV2KHR(rpci, &rpci2);
+                TestRenderPass2KHRCreate(m_errorMonitor, m_device->device(), rpci2.ptr(),
+                                         "VUID-VkAttachmentDescription2KHR-format-03303");
+            } else {
+                attach_desc.finalLayout = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR;
+                TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported,
+                                     "VUID-VkAttachmentDescription-format-03289", "VUID-VkAttachmentDescription2KHR-format-03303");
+                attach_desc.finalLayout = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR;
+                TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported,
+                                     "VUID-VkAttachmentDescription-format-03289", "VUID-VkAttachmentDescription2KHR-format-03303");
+                attach_desc.finalLayout = VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR;
+                TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported,
+                                     "VUID-VkAttachmentDescription-format-03289", "VUID-VkAttachmentDescription2KHR-format-03303");
+                attach_desc.finalLayout = VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR;
+                TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported,
+                                     "VUID-VkAttachmentDescription-format-03289", "VUID-VkAttachmentDescription2KHR-format-03303");
+            }
+
+            attach_desc.finalLayout = VK_IMAGE_LAYOUT_GENERAL;
+        }
+
+        if (depth_format) {
+            attach_desc.format = depth_format;
+
+            attach_desc.initialLayout = VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR;
+            TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported,
+                                 "VUID-VkAttachmentDescription-format-03290", "VUID-VkAttachmentDescription2KHR-format-03304");
+            attach_desc.initialLayout = VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR;
+            TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported,
+                                 "VUID-VkAttachmentDescription-format-03290", "VUID-VkAttachmentDescription2KHR-format-03304");
+
+            attach_desc.initialLayout = VK_IMAGE_LAYOUT_GENERAL;
+
+            attach_desc.finalLayout = VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR;
+            TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported,
+                                 "VUID-VkAttachmentDescription-format-03291", "VUID-VkAttachmentDescription2KHR-format-03305");
+            attach_desc.finalLayout = VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR;
+            TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported,
+                                 "VUID-VkAttachmentDescription-format-03291", "VUID-VkAttachmentDescription2KHR-format-03305");
+
+            attach_desc.finalLayout = VK_IMAGE_LAYOUT_GENERAL;
+        }
+
+        if (stencil_format) {
+            attach_desc.format = stencil_format;
+
+            attach_desc.initialLayout = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR;
+            TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported,
+                                 "VUID-VkAttachmentDescription-format-03292", "VUID-VkAttachmentDescription2KHR-format-03306");
+            attach_desc.initialLayout = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR;
+            TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported,
+                                 "VUID-VkAttachmentDescription-format-03292", "VUID-VkAttachmentDescription2KHR-format-03306");
+
+            attach_desc.initialLayout = VK_IMAGE_LAYOUT_GENERAL;
+
+            attach_desc.finalLayout = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR;
+            TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported,
+                                 "VUID-VkAttachmentDescription-format-03293", "VUID-VkAttachmentDescription2KHR-format-03307");
+            attach_desc.finalLayout = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR;
+            TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported,
+                                 "VUID-VkAttachmentDescription-format-03293", "VUID-VkAttachmentDescription2KHR-format-03307");
+
+            attach_desc.finalLayout = VK_IMAGE_LAYOUT_GENERAL;
+        }
+
+        if (rp2Supported && depth_stencil_format) {
+            attach_desc.format = depth_stencil_format;
+            attach_desc.initialLayout = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR;
+
+            auto attachment_description_stencil_layout = lvl_init_struct<VkAttachmentDescriptionStencilLayoutKHR>();
+            attachment_description_stencil_layout.stencilInitialLayout = VK_IMAGE_LAYOUT_GENERAL;
+            attachment_description_stencil_layout.stencilFinalLayout = VK_IMAGE_LAYOUT_GENERAL;
+            safe_VkRenderPassCreateInfo2KHR rpci2;
+            ConvertVkRenderPassCreateInfoToV2KHR(rpci, &rpci2);
+            rpci2.pAttachments[0].pNext = &attachment_description_stencil_layout;
+
+            VkImageLayout forbidden_layouts[] = {
+                VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
+                VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR,
+                VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR,
+                VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL,
+                VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL,
+                VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL,
+                VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL,
+            };
+            auto forbidden_layouts_array_size = sizeof(forbidden_layouts) / sizeof(forbidden_layouts[0]);
+
+            for (size_t i = 0; i < forbidden_layouts_array_size; ++i) {
+                attachment_description_stencil_layout.stencilInitialLayout = forbidden_layouts[i];
+                TestRenderPass2KHRCreate(m_errorMonitor, m_device->device(), rpci2.ptr(),
+                                         "VUID-VkAttachmentDescriptionStencilLayoutKHR-stencilInitialLayout-03308");
+            }
+            attachment_description_stencil_layout.stencilInitialLayout = VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR;
+            for (size_t i = 0; i < forbidden_layouts_array_size; ++i) {
+                attachment_description_stencil_layout.stencilFinalLayout = forbidden_layouts[i];
+                TestRenderPass2KHRCreate(m_errorMonitor, m_device->device(), rpci2.ptr(),
+                                         "VUID-VkAttachmentDescriptionStencilLayoutKHR-stencilFinalLayout-03309");
+            }
+            attachment_description_stencil_layout.stencilFinalLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+            TestRenderPass2KHRCreate(m_errorMonitor, m_device->device(), rpci2.ptr(),
+                                     "VUID-VkAttachmentDescriptionStencilLayoutKHR-stencilFinalLayout-03310");
+            attachment_description_stencil_layout.stencilFinalLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
+            TestRenderPass2KHRCreate(m_errorMonitor, m_device->device(), rpci2.ptr(),
+                                     "VUID-VkAttachmentDescriptionStencilLayoutKHR-stencilFinalLayout-03310");
+
+            rpci2.pAttachments[0].pNext = nullptr;
+        }
+    } else {
+        if (depth_format) {
+            attach_desc.format = depth_format;
+
+            attach_desc.initialLayout = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR;
+            TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported,
+                                 "VUID-VkAttachmentDescription-separateDepthStencilLayouts-03284",
+                                 "VUID-VkAttachmentDescription2KHR-separateDepthStencilLayouts-03298");
+            attach_desc.initialLayout = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR;
+            TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported,
+                                 "VUID-VkAttachmentDescription-separateDepthStencilLayouts-03284",
+                                 "VUID-VkAttachmentDescription2KHR-separateDepthStencilLayouts-03298");
+
+            attach_desc.initialLayout = VK_IMAGE_LAYOUT_GENERAL;
+
+            attach_desc.finalLayout = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR;
+            TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported,
+                                 "VUID-VkAttachmentDescription-separateDepthStencilLayouts-03285",
+                                 "VUID-VkAttachmentDescription2KHR-separateDepthStencilLayouts-03299");
+            attach_desc.finalLayout = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR;
+            TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported,
+                                 "VUID-VkAttachmentDescription-separateDepthStencilLayouts-03285",
+                                 "VUID-VkAttachmentDescription2KHR-separateDepthStencilLayouts-03299");
+
+            attach_desc.finalLayout = VK_IMAGE_LAYOUT_GENERAL;
+        }
+        if (stencil_format) {
+            attach_desc.format = stencil_format;
+
+            attach_desc.initialLayout = VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR;
+            TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported,
+                                 "VUID-VkAttachmentDescription-separateDepthStencilLayouts-03284",
+                                 "VUID-VkAttachmentDescription2KHR-separateDepthStencilLayouts-03298");
+            attach_desc.initialLayout = VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR;
+            TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported,
+                                 "VUID-VkAttachmentDescription-separateDepthStencilLayouts-03284",
+                                 "VUID-VkAttachmentDescription2KHR-separateDepthStencilLayouts-03298");
+
+            attach_desc.initialLayout = VK_IMAGE_LAYOUT_GENERAL;
+
+            attach_desc.finalLayout = VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR;
+            TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported,
+                                 "VUID-VkAttachmentDescription-separateDepthStencilLayouts-03285",
+                                 "VUID-VkAttachmentDescription2KHR-separateDepthStencilLayouts-03299");
+            attach_desc.finalLayout = VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR;
+            TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported,
+                                 "VUID-VkAttachmentDescription-separateDepthStencilLayouts-03285",
+                                 "VUID-VkAttachmentDescription2KHR-separateDepthStencilLayouts-03299");
+
+            attach_desc.finalLayout = VK_IMAGE_LAYOUT_GENERAL;
+        }
+    }
+}
+
+TEST_F(VkLayerTest, RenderPassCreateAttachmentsMisc) {
+    TEST_DESCRIPTION(
+        "Ensure that CreateRenderPass produces the expected validation errors when a subpass's attachments violate the valid usage "
+        "conditions.");
+
+    // Check for VK_KHR_get_physical_device_properties2
+    if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+        m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    }
+
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+    bool rp2Supported = CheckCreateRenderPass2Support(this, m_device_extension_names);
+    ASSERT_NO_FATAL_FAILURE(InitState());
+
+    std::vector<VkAttachmentDescription> attachments = {
+        // input attachments
+        {0, VK_FORMAT_R8G8B8A8_UNORM, VK_SAMPLE_COUNT_4_BIT, VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE,
+         VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE, VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_GENERAL},
+        // color attachments
+        {0, VK_FORMAT_R8G8B8A8_UNORM, VK_SAMPLE_COUNT_4_BIT, VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE,
+         VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
+         VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL},
+        {0, VK_FORMAT_R8G8B8A8_UNORM, VK_SAMPLE_COUNT_4_BIT, VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE,
+         VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
+         VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL},
+        // depth attachment
+        {0, VK_FORMAT_D24_UNORM_S8_UINT, VK_SAMPLE_COUNT_4_BIT, VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE,
+         VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE, VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL,
+         VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL},
+        // resolve attachment
+        {0, VK_FORMAT_R8G8B8A8_UNORM, VK_SAMPLE_COUNT_1_BIT, VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE,
+         VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
+         VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL},
+        // preserve attachments
+        {0, VK_FORMAT_R8G8B8A8_UNORM, VK_SAMPLE_COUNT_4_BIT, VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE,
+         VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
+         VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL},
+    };
+
+    std::vector<VkAttachmentReference> input = {
+        {0, VK_IMAGE_LAYOUT_GENERAL},
+    };
+    std::vector<VkAttachmentReference> color = {
+        {1, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL},
+        {2, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL},
+    };
+    VkAttachmentReference depth = {3, VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL};
+    std::vector<VkAttachmentReference> resolve = {
+        {4, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL},
+        {VK_ATTACHMENT_UNUSED, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL},
+    };
+    std::vector<uint32_t> preserve = {5};
+
+    VkSubpassDescription subpass = {0,
+                                    VK_PIPELINE_BIND_POINT_GRAPHICS,
+                                    (uint32_t)input.size(),
+                                    input.data(),
+                                    (uint32_t)color.size(),
+                                    color.data(),
+                                    resolve.data(),
+                                    &depth,
+                                    (uint32_t)preserve.size(),
+                                    preserve.data()};
+
+    VkRenderPassCreateInfo rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO,
+                                   nullptr,
+                                   0,
+                                   (uint32_t)attachments.size(),
+                                   attachments.data(),
+                                   1,
+                                   &subpass,
+                                   0,
+                                   nullptr};
+
+    // Test too many color attachments
+    const uint32_t max_color_attachments = m_device->props.limits.maxColorAttachments;
+    const uint32_t too_big_max_attachments = 65536 + 1;  // let's say this is too much to allocate
+    if (max_color_attachments >= too_big_max_attachments) {
+        printf(
+            "%s VkPhysicalDeviceLimits::maxColorAttachments is too large to practically test against -- skipping part of test.\n",
+            kSkipPrefix);
+    } else {
+        std::vector<VkAttachmentReference> too_many_colors(max_color_attachments + 1, color[0]);
+        VkSubpassDescription test_subpass = subpass;
+        test_subpass.colorAttachmentCount = (uint32_t)too_many_colors.size();
+        test_subpass.pColorAttachments = too_many_colors.data();
+        test_subpass.pResolveAttachments = NULL;
+        VkRenderPassCreateInfo test_rpci = rpci;
+        test_rpci.pSubpasses = &test_subpass;
+
+        TestRenderPassCreate(m_errorMonitor, m_device->device(), &test_rpci, rp2Supported,
+                             "VUID-VkSubpassDescription-colorAttachmentCount-00845",
+                             "VUID-VkSubpassDescription2KHR-colorAttachmentCount-03063");
+    }
+
+    // Test sample count mismatch between color buffers
+    attachments[subpass.pColorAttachments[1].attachment].samples = VK_SAMPLE_COUNT_8_BIT;
+    depth.attachment = VK_ATTACHMENT_UNUSED;  // Avoids triggering 01418
+
+    TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported,
+                         "VUID-VkSubpassDescription-pColorAttachments-01417",
+                         "VUID-VkSubpassDescription2KHR-pColorAttachments-03069");
+
+    depth.attachment = 3;
+    attachments[subpass.pColorAttachments[1].attachment].samples = attachments[subpass.pColorAttachments[0].attachment].samples;
+
+    // Test sample count mismatch between color buffers and depth buffer
+    attachments[subpass.pDepthStencilAttachment->attachment].samples = VK_SAMPLE_COUNT_8_BIT;
+    subpass.colorAttachmentCount = 1;
+
+    TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported,
+                         "VUID-VkSubpassDescription-pDepthStencilAttachment-01418",
+                         "VUID-VkSubpassDescription2KHR-pDepthStencilAttachment-03071");
+
+    attachments[subpass.pDepthStencilAttachment->attachment].samples = attachments[subpass.pColorAttachments[0].attachment].samples;
+    subpass.colorAttachmentCount = (uint32_t)color.size();
+
+    // Test resolve attachment with UNUSED color attachment
+    color[0].attachment = VK_ATTACHMENT_UNUSED;
+
+    TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported,
+                         "VUID-VkSubpassDescription-pResolveAttachments-00847",
+                         "VUID-VkSubpassDescription2KHR-pResolveAttachments-03065");
+
+    color[0].attachment = 1;
+
+    // Test resolve from a single-sampled color attachment
+    attachments[subpass.pColorAttachments[0].attachment].samples = VK_SAMPLE_COUNT_1_BIT;
+    subpass.colorAttachmentCount = 1;           // avoid mismatch (00337), and avoid double report
+    subpass.pDepthStencilAttachment = nullptr;  // avoid mismatch (01418)
+
+    TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported,
+                         "VUID-VkSubpassDescription-pResolveAttachments-00848",
+                         "VUID-VkSubpassDescription2KHR-pResolveAttachments-03066");
+
+    attachments[subpass.pColorAttachments[0].attachment].samples = VK_SAMPLE_COUNT_4_BIT;
+    subpass.colorAttachmentCount = (uint32_t)color.size();
+    subpass.pDepthStencilAttachment = &depth;
+
+    // Test resolve to a multi-sampled resolve attachment
+    attachments[subpass.pResolveAttachments[0].attachment].samples = VK_SAMPLE_COUNT_4_BIT;
+
+    TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported,
+                         "VUID-VkSubpassDescription-pResolveAttachments-00849",
+                         "VUID-VkSubpassDescription2KHR-pResolveAttachments-03067");
+
+    attachments[subpass.pResolveAttachments[0].attachment].samples = VK_SAMPLE_COUNT_1_BIT;
+
+    // Test with color/resolve format mismatch
+    attachments[subpass.pColorAttachments[0].attachment].format = VK_FORMAT_R8G8B8A8_SRGB;
+
+    TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported,
+                         "VUID-VkSubpassDescription-pResolveAttachments-00850",
+                         "VUID-VkSubpassDescription2KHR-pResolveAttachments-03068");
+
+    attachments[subpass.pColorAttachments[0].attachment].format = attachments[subpass.pResolveAttachments[0].attachment].format;
+
+    // Test for UNUSED preserve attachments
+    preserve[0] = VK_ATTACHMENT_UNUSED;
+
+    TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported, "VUID-VkSubpassDescription-attachment-00853",
+                         "VUID-VkSubpassDescription2KHR-attachment-03073");
+
+    preserve[0] = 5;
+    // Test for preserve attachments used elsewhere in the subpass
+    color[0].attachment = preserve[0];
+
+    TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported,
+                         "VUID-VkSubpassDescription-pPreserveAttachments-00854",
+                         "VUID-VkSubpassDescription2KHR-pPreserveAttachments-03074");
+
+    color[0].attachment = 1;
+    input[0].attachment = 0;
+    input[0].layout = VK_IMAGE_LAYOUT_GENERAL;
+
+    // Test for attachment used first as input with loadOp=CLEAR
+    {
+        std::vector<VkSubpassDescription> subpasses = {subpass, subpass, subpass};
+        subpasses[0].inputAttachmentCount = 0;
+        subpasses[1].inputAttachmentCount = 0;
+        attachments[input[0].attachment].loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR;
+        VkRenderPassCreateInfo rpci_multipass = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO,
+                                                 nullptr,
+                                                 0,
+                                                 (uint32_t)attachments.size(),
+                                                 attachments.data(),
+                                                 (uint32_t)subpasses.size(),
+                                                 subpasses.data(),
+                                                 0,
+                                                 nullptr};
+
+        TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci_multipass, rp2Supported,
+                             "VUID-VkSubpassDescription-loadOp-00846", "VUID-VkSubpassDescription2KHR-loadOp-03064");
+
+        attachments[input[0].attachment].loadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
+    }
+}
+
+TEST_F(VkLayerTest, RenderPassCreateAttachmentReferenceInvalidLayout) {
+    TEST_DESCRIPTION("Attachment reference uses PREINITIALIZED or UNDEFINED layouts");
+
+    // Check for VK_KHR_get_physical_device_properties2
+    if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+        m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    }
+
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+    bool rp2Supported = CheckCreateRenderPass2Support(this, m_device_extension_names);
+    if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_SEPARATE_DEPTH_STENCIL_LAYOUTS_EXTENSION_NAME)) {
+        m_device_extension_names.push_back(VK_KHR_SEPARATE_DEPTH_STENCIL_LAYOUTS_EXTENSION_NAME);
+    }
+    PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR =
+        (PFN_vkGetPhysicalDeviceFeatures2KHR)vk::GetInstanceProcAddr(instance(), "vkGetPhysicalDeviceFeatures2KHR");
+    auto separate_depth_stencil_layouts_features = lvl_init_struct<VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR>();
+    auto features2 = lvl_init_struct<VkPhysicalDeviceFeatures2KHR>(&separate_depth_stencil_layouts_features);
+    if (vkGetPhysicalDeviceFeatures2KHR) {
+        vkGetPhysicalDeviceFeatures2KHR(gpu(), &features2);
+    } else {
+        separate_depth_stencil_layouts_features.separateDepthStencilLayouts = VK_FALSE;
+    }
+    ASSERT_NO_FATAL_FAILURE(InitState(nullptr, (vkGetPhysicalDeviceFeatures2KHR) ? &features2 : nullptr));
+
+    VkAttachmentDescription attach[] = {
+        {0, VK_FORMAT_R8G8B8A8_UNORM, VK_SAMPLE_COUNT_1_BIT, VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE,
+         VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE, VK_IMAGE_LAYOUT_UNDEFINED,
+         VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL},
+    };
+    VkAttachmentReference refs[] = {
+        {0, VK_IMAGE_LAYOUT_UNDEFINED},
+    };
+    VkSubpassDescription subpasses[] = {
+        {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 0, nullptr, 1, refs, nullptr, nullptr, 0, nullptr},
+    };
+
+    VkRenderPassCreateInfo rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, nullptr, 0, 1, attach, 1, subpasses, 0, nullptr};
+
+    // Use UNDEFINED layout
+    TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported, "VUID-VkAttachmentReference-layout-00857",
+                         "VUID-VkAttachmentReference2KHR-layout-03077");
+
+    // Use PREINITIALIZED layout
+    refs[0].layout = VK_IMAGE_LAYOUT_PREINITIALIZED;
+    TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported, "VUID-VkAttachmentReference-layout-00857",
+                         "VUID-VkAttachmentReference2KHR-layout-03077");
+
+    if (rp2Supported) {
+        safe_VkRenderPassCreateInfo2KHR rpci2;
+        ConvertVkRenderPassCreateInfoToV2KHR(rpci, &rpci2);
+
+        if (separate_depth_stencil_layouts_features.separateDepthStencilLayouts) {
+            rpci2.pSubpasses[0].pColorAttachments[0].aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+
+            rpci2.pSubpasses[0].pColorAttachments[0].layout = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR;
+            TestRenderPass2KHRCreate(m_errorMonitor, m_device->device(), rpci2.ptr(),
+                                     "VUID-VkAttachmentReference2KHR-attachment-03314");
+            rpci2.pSubpasses[0].pColorAttachments[0].layout = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR;
+            TestRenderPass2KHRCreate(m_errorMonitor, m_device->device(), rpci2.ptr(),
+                                     "VUID-VkAttachmentReference2KHR-attachment-03314");
+            rpci2.pSubpasses[0].pColorAttachments[0].layout = VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR;
+            TestRenderPass2KHRCreate(m_errorMonitor, m_device->device(), rpci2.ptr(),
+                                     "VUID-VkAttachmentReference2KHR-attachment-03314");
+            rpci2.pSubpasses[0].pColorAttachments[0].layout = VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR;
+            TestRenderPass2KHRCreate(m_errorMonitor, m_device->device(), rpci2.ptr(),
+                                     "VUID-VkAttachmentReference2KHR-attachment-03314");
+
+            rpci2.pSubpasses[0].pColorAttachments[0].aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT;
+
+            rpci2.pSubpasses[0].pColorAttachments[0].layout = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR;
+            TestRenderPass2KHRCreate(m_errorMonitor, m_device->device(), rpci2.ptr(),
+                                     "VUID-VkAttachmentReference2KHR-attachment-03315");
+            rpci2.pSubpasses[0].pColorAttachments[0].layout = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR;
+            TestRenderPass2KHRCreate(m_errorMonitor, m_device->device(), rpci2.ptr(),
+                                     "VUID-VkAttachmentReference2KHR-attachment-03315");
+
+            rpci2.pSubpasses[0].pColorAttachments[0].aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
+
+            rpci2.pSubpasses[0].pColorAttachments[0].layout = VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR;
+            TestRenderPass2KHRCreate(m_errorMonitor, m_device->device(), rpci2.ptr(),
+                                     "VUID-VkAttachmentReference2KHR-attachment-03315");
+            rpci2.pSubpasses[0].pColorAttachments[0].layout = VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR;
+            TestRenderPass2KHRCreate(m_errorMonitor, m_device->device(), rpci2.ptr(),
+                                     "VUID-VkAttachmentReference2KHR-attachment-03315");
+
+            rpci2.pSubpasses[0].pColorAttachments[0].aspectMask = VK_IMAGE_ASPECT_STENCIL_BIT;
+
+            rpci2.pSubpasses[0].pColorAttachments[0].layout = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR;
+            TestRenderPass2KHRCreate(m_errorMonitor, m_device->device(), rpci2.ptr(),
+                                     "VUID-VkAttachmentReference2KHR-attachment-03317");
+            rpci2.pSubpasses[0].pColorAttachments[0].layout = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR;
+            TestRenderPass2KHRCreate(m_errorMonitor, m_device->device(), rpci2.ptr(),
+                                     "VUID-VkAttachmentReference2KHR-attachment-03317");
+
+            auto attachment_reference_stencil_layout = lvl_init_struct<VkAttachmentReferenceStencilLayoutKHR>();
+            rpci2.pSubpasses[0].pColorAttachments[0].pNext = &attachment_reference_stencil_layout;
+
+            VkImageLayout forbidden_layouts[] = {VK_IMAGE_LAYOUT_PREINITIALIZED,
+                                                 VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
+                                                 VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR,
+                                                 VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR,
+                                                 VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL,
+                                                 VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL,
+                                                 VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL,
+                                                 VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL,
+                                                 VK_IMAGE_LAYOUT_PRESENT_SRC_KHR};
+            rpci2.pSubpasses[0].pColorAttachments[0].aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT;
+            rpci2.pSubpasses[0].pColorAttachments[0].layout = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR;
+            for (size_t i = 0; i < (sizeof(forbidden_layouts) / sizeof(forbidden_layouts[0])); ++i) {
+                attachment_reference_stencil_layout.stencilLayout = forbidden_layouts[i];
+                TestRenderPass2KHRCreate(m_errorMonitor, m_device->device(), rpci2.ptr(),
+                                         "VUID-VkAttachmentReferenceStencilLayoutKHR-stencilLayout-03318");
+            }
+
+            rpci2.pSubpasses[0].pColorAttachments[0].pNext = nullptr;
+        } else {
+            rpci2.pSubpasses[0].pColorAttachments[0].layout = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR;
+            TestRenderPass2KHRCreate(m_errorMonitor, m_device->device(), rpci2.ptr(),
+                                     "VUID-VkAttachmentReference2KHR-separateDepthStencilLayouts-03313");
+            rpci2.pSubpasses[0].pColorAttachments[0].layout = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR;
+            TestRenderPass2KHRCreate(m_errorMonitor, m_device->device(), rpci2.ptr(),
+                                     "VUID-VkAttachmentReference2KHR-separateDepthStencilLayouts-03313");
+            rpci2.pSubpasses[0].pColorAttachments[0].layout = VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR;
+            TestRenderPass2KHRCreate(m_errorMonitor, m_device->device(), rpci2.ptr(),
+                                     "VUID-VkAttachmentReference2KHR-separateDepthStencilLayouts-03313");
+            rpci2.pSubpasses[0].pColorAttachments[0].layout = VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR;
+            TestRenderPass2KHRCreate(m_errorMonitor, m_device->device(), rpci2.ptr(),
+                                     "VUID-VkAttachmentReference2KHR-separateDepthStencilLayouts-03313");
+        }
+    }
+}
+
+TEST_F(VkLayerTest, RenderPassCreateOverlappingCorrelationMasks) {
+    TEST_DESCRIPTION("Create a subpass with overlapping correlation masks");
+
+    // Check for VK_KHR_get_physical_device_properties2
+    if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+        m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    }
+
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+    bool rp2Supported = CheckCreateRenderPass2Support(this, m_device_extension_names);
+
+    if (!rp2Supported) {
+        if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_MULTIVIEW_EXTENSION_NAME)) {
+            m_device_extension_names.push_back(VK_KHR_MULTIVIEW_EXTENSION_NAME);
+        } else {
+            printf("%s Extension %s is not supported.\n", kSkipPrefix, VK_KHR_MULTIVIEW_EXTENSION_NAME);
+            return;
+        }
+    }
+
+    ASSERT_NO_FATAL_FAILURE(InitState());
+
+    VkSubpassDescription subpass = {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 0, nullptr, 0, nullptr, nullptr, nullptr, 0, nullptr};
+    uint32_t viewMasks[] = {0x3u};
+    uint32_t correlationMasks[] = {0x1u, 0x3u};
+    VkRenderPassMultiviewCreateInfo rpmvci = {
+        VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO, nullptr, 1, viewMasks, 0, nullptr, 2, correlationMasks};
+
+    VkRenderPassCreateInfo rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, &rpmvci, 0, 0, nullptr, 1, &subpass, 0, nullptr};
+
+    // Correlation masks must not overlap
+    TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported,
+                         "VUID-VkRenderPassMultiviewCreateInfo-pCorrelationMasks-00841",
+                         "VUID-VkRenderPassCreateInfo2KHR-pCorrelatedViewMasks-03056");
+
+    // Check for more specific "don't set any correlation masks when multiview is not enabled"
+    if (rp2Supported) {
+        viewMasks[0] = 0;
+        correlationMasks[0] = 0;
+        correlationMasks[1] = 0;
+        safe_VkRenderPassCreateInfo2KHR safe_rpci2;
+        ConvertVkRenderPassCreateInfoToV2KHR(rpci, &safe_rpci2);
+
+        TestRenderPass2KHRCreate(m_errorMonitor, m_device->device(), safe_rpci2.ptr(),
+                                 "VUID-VkRenderPassCreateInfo2KHR-viewMask-03057");
+    }
+}
+
+TEST_F(VkLayerTest, RenderPassCreateInvalidViewMasks) {
+    TEST_DESCRIPTION("Create a subpass with the wrong number of view masks, or inconsistent setting of view masks");
+
+    // Check for VK_KHR_get_physical_device_properties2
+    if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+        m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    }
+
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+    bool rp2Supported = CheckCreateRenderPass2Support(this, m_device_extension_names);
+
+    if (!rp2Supported) {
+        if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_MULTIVIEW_EXTENSION_NAME)) {
+            m_device_extension_names.push_back(VK_KHR_MULTIVIEW_EXTENSION_NAME);
+        } else {
+            printf("%s Extension %s is not supported.\n", kSkipPrefix, VK_KHR_MULTIVIEW_EXTENSION_NAME);
+            return;
+        }
+    }
+
+    ASSERT_NO_FATAL_FAILURE(InitState());
+
+    VkSubpassDescription subpasses[] = {
+        {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 0, nullptr, 0, nullptr, nullptr, nullptr, 0, nullptr},
+        {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 0, nullptr, 0, nullptr, nullptr, nullptr, 0, nullptr},
+    };
+    uint32_t viewMasks[] = {0x3u, 0u};
+    VkRenderPassMultiviewCreateInfo rpmvci = {
+        VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO, nullptr, 1, viewMasks, 0, nullptr, 0, nullptr};
+
+    VkRenderPassCreateInfo rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, &rpmvci, 0, 0, nullptr, 2, subpasses, 0, nullptr};
+
+    // Not enough view masks
+    TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported, "VUID-VkRenderPassCreateInfo-pNext-01928",
+                         "VUID-VkRenderPassCreateInfo2KHR-viewMask-03058");
+}
+
+TEST_F(VkLayerTest, RenderPassCreateInvalidInputAttachmentReferences) {
+    TEST_DESCRIPTION("Create a subpass with the meta data aspect mask set for an input attachment");
+
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+
+    if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_MAINTENANCE2_EXTENSION_NAME)) {
+        m_device_extension_names.push_back(VK_KHR_MAINTENANCE2_EXTENSION_NAME);
+    } else {
+        printf("%s Extension %s is not supported.\n", kSkipPrefix, VK_KHR_MAINTENANCE2_EXTENSION_NAME);
+        return;
+    }
+
+    ASSERT_NO_FATAL_FAILURE(InitState());
+
+    VkAttachmentDescription attach = {0,
+                                      VK_FORMAT_R8G8B8A8_UNORM,
+                                      VK_SAMPLE_COUNT_1_BIT,
+                                      VK_ATTACHMENT_LOAD_OP_DONT_CARE,
+                                      VK_ATTACHMENT_STORE_OP_DONT_CARE,
+                                      VK_ATTACHMENT_LOAD_OP_DONT_CARE,
+                                      VK_ATTACHMENT_STORE_OP_DONT_CARE,
+                                      VK_IMAGE_LAYOUT_UNDEFINED,
+                                      VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL};
+    VkAttachmentReference ref = {0, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL};
+
+    VkSubpassDescription subpass = {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 1, &ref, 0, nullptr, nullptr, nullptr, 0, nullptr};
+    VkInputAttachmentAspectReference iaar = {0, 0, VK_IMAGE_ASPECT_METADATA_BIT};
+    VkRenderPassInputAttachmentAspectCreateInfo rpiaaci = {VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO,
+                                                           nullptr, 1, &iaar};
+
+    VkRenderPassCreateInfo rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, &rpiaaci, 0, 1, &attach, 1, &subpass, 0, nullptr};
+
+    // Invalid meta data aspect
+    m_errorMonitor->SetDesiredFailureMsg(
+        VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkRenderPassCreateInfo-pNext-01963");  // Cannot/should not avoid getting this one too
+    TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, false, "VUID-VkInputAttachmentAspectReference-aspectMask-01964",
+                         nullptr);
+
+    // Aspect not present
+    iaar.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
+    TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, false, "VUID-VkRenderPassCreateInfo-pNext-01963", nullptr);
+
+    // Invalid subpass index
+    iaar.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    iaar.subpass = 1;
+    TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, false, "VUID-VkRenderPassCreateInfo-pNext-01926", nullptr);
+    iaar.subpass = 0;
+
+    // Invalid input attachment index
+    iaar.inputAttachmentIndex = 1;
+    TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, false, "VUID-VkRenderPassCreateInfo-pNext-01927", nullptr);
+}
+
+TEST_F(VkLayerTest, RenderPassCreateInvalidFragmentDensityMapReferences) {
+    TEST_DESCRIPTION("Create a subpass with the wrong attachment information for a fragment density map ");
+
+    // Check for VK_KHR_get_physical_device_properties2
+    if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+        m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    } else {
+        printf("%s Extension %s is not supported.\n", kSkipPrefix, VK_EXT_FRAGMENT_DENSITY_MAP_EXTENSION_NAME);
+        return;
+    }
+
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+
+    if (DeviceExtensionSupported(gpu(), nullptr, VK_EXT_FRAGMENT_DENSITY_MAP_EXTENSION_NAME)) {
+        m_device_extension_names.push_back(VK_EXT_FRAGMENT_DENSITY_MAP_EXTENSION_NAME);
+    } else {
+        printf("%s Extension %s is not supported.\n", kSkipPrefix, VK_EXT_FRAGMENT_DENSITY_MAP_EXTENSION_NAME);
+        return;
+    }
+
+    ASSERT_NO_FATAL_FAILURE(InitState());
+
+    VkAttachmentDescription attach = {0,
+                                      VK_FORMAT_R8G8_UNORM,
+                                      VK_SAMPLE_COUNT_1_BIT,
+                                      VK_ATTACHMENT_LOAD_OP_LOAD,
+                                      VK_ATTACHMENT_STORE_OP_DONT_CARE,
+                                      VK_ATTACHMENT_LOAD_OP_DONT_CARE,
+                                      VK_ATTACHMENT_STORE_OP_DONT_CARE,
+                                      VK_IMAGE_LAYOUT_UNDEFINED,
+                                      VK_IMAGE_LAYOUT_FRAGMENT_DENSITY_MAP_OPTIMAL_EXT};
+    // Set 1 instead of 0
+    VkAttachmentReference ref = {1, VK_IMAGE_LAYOUT_FRAGMENT_DENSITY_MAP_OPTIMAL_EXT};
+    VkSubpassDescription subpass = {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 1, &ref, 0, nullptr, nullptr, nullptr, 0, nullptr};
+    VkRenderPassFragmentDensityMapCreateInfoEXT rpfdmi = {VK_STRUCTURE_TYPE_RENDER_PASS_FRAGMENT_DENSITY_MAP_CREATE_INFO_EXT,
+                                                          nullptr, ref};
+
+    VkRenderPassCreateInfo rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, &rpfdmi, 0, 1, &attach, 1, &subpass, 0, nullptr};
+
+    TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, false,
+                         "VUID-VkRenderPassFragmentDensityMapCreateInfoEXT-fragmentDensityMapAttachment-02547", nullptr);
+
+    // Set wrong VkImageLayout
+    ref = {0, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL};
+    subpass = {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 1, &ref, 0, nullptr, nullptr, nullptr, 0, nullptr};
+    rpfdmi = {VK_STRUCTURE_TYPE_RENDER_PASS_FRAGMENT_DENSITY_MAP_CREATE_INFO_EXT, nullptr, ref};
+    rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, &rpfdmi, 0, 1, &attach, 1, &subpass, 0, nullptr};
+
+    TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, false,
+                         "VUID-VkRenderPassFragmentDensityMapCreateInfoEXT-fragmentDensityMapAttachment-02549", nullptr);
+
+    // Set wrong load operation
+    attach = {0,
+              VK_FORMAT_R8G8_UNORM,
+              VK_SAMPLE_COUNT_1_BIT,
+              VK_ATTACHMENT_LOAD_OP_CLEAR,
+              VK_ATTACHMENT_STORE_OP_DONT_CARE,
+              VK_ATTACHMENT_LOAD_OP_DONT_CARE,
+              VK_ATTACHMENT_STORE_OP_DONT_CARE,
+              VK_IMAGE_LAYOUT_UNDEFINED,
+              VK_IMAGE_LAYOUT_FRAGMENT_DENSITY_MAP_OPTIMAL_EXT};
+
+    ref = {0, VK_IMAGE_LAYOUT_FRAGMENT_DENSITY_MAP_OPTIMAL_EXT};
+    subpass = {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 1, &ref, 0, nullptr, nullptr, nullptr, 0, nullptr};
+    rpfdmi = {VK_STRUCTURE_TYPE_RENDER_PASS_FRAGMENT_DENSITY_MAP_CREATE_INFO_EXT, nullptr, ref};
+    rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, &rpfdmi, 0, 1, &attach, 1, &subpass, 0, nullptr};
+
+    TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, false,
+                         "VUID-VkRenderPassFragmentDensityMapCreateInfoEXT-fragmentDensityMapAttachment-02550", nullptr);
+
+    // Set wrong store operation
+    attach = {0,
+              VK_FORMAT_R8G8_UNORM,
+              VK_SAMPLE_COUNT_1_BIT,
+              VK_ATTACHMENT_LOAD_OP_LOAD,
+              VK_ATTACHMENT_STORE_OP_STORE,
+              VK_ATTACHMENT_LOAD_OP_DONT_CARE,
+              VK_ATTACHMENT_STORE_OP_DONT_CARE,
+              VK_IMAGE_LAYOUT_UNDEFINED,
+              VK_IMAGE_LAYOUT_FRAGMENT_DENSITY_MAP_OPTIMAL_EXT};
+
+    ref = {0, VK_IMAGE_LAYOUT_FRAGMENT_DENSITY_MAP_OPTIMAL_EXT};
+    subpass = {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 1, &ref, 0, nullptr, nullptr, nullptr, 0, nullptr};
+    rpfdmi = {VK_STRUCTURE_TYPE_RENDER_PASS_FRAGMENT_DENSITY_MAP_CREATE_INFO_EXT, nullptr, ref};
+    rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, &rpfdmi, 0, 1, &attach, 1, &subpass, 0, nullptr};
+
+    TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, false,
+                         "VUID-VkRenderPassFragmentDensityMapCreateInfoEXT-fragmentDensityMapAttachment-02551", nullptr);
+}
+
+TEST_F(VkLayerTest, RenderPassCreateSubpassNonGraphicsPipeline) {
+    TEST_DESCRIPTION("Create a subpass with the compute pipeline bind point");
+    // Check for VK_KHR_get_physical_device_properties2
+    if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+        m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    }
+
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+    bool rp2Supported = CheckCreateRenderPass2Support(this, m_device_extension_names);
+    ASSERT_NO_FATAL_FAILURE(InitState());
+
+    VkSubpassDescription subpasses[] = {
+        {0, VK_PIPELINE_BIND_POINT_COMPUTE, 0, nullptr, 0, nullptr, nullptr, nullptr, 0, nullptr},
+    };
+
+    VkRenderPassCreateInfo rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, nullptr, 0, 0, nullptr, 1, subpasses, 0, nullptr};
+
+    TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported,
+                         "VUID-VkSubpassDescription-pipelineBindPoint-00844",
+                         "VUID-VkSubpassDescription2KHR-pipelineBindPoint-03062");
+}
+
+TEST_F(VkLayerTest, RenderPassCreateSubpassMissingAttributesBitMultiviewNVX) {
+    TEST_DESCRIPTION("Create a subpass with the VK_SUBPASS_DESCRIPTION_PER_VIEW_ATTRIBUTES_BIT_NVX flag missing");
+
+    // Check for VK_KHR_get_physical_device_properties2
+    if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+        m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    } else {
+        printf("%s Extension %s is not supported.\n", kSkipPrefix, VK_NVX_MULTIVIEW_PER_VIEW_ATTRIBUTES_EXTENSION_NAME);
+        return;
+    }
+
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+
+    if (DeviceExtensionSupported(gpu(), nullptr, VK_NVX_MULTIVIEW_PER_VIEW_ATTRIBUTES_EXTENSION_NAME) &&
+        DeviceExtensionSupported(gpu(), nullptr, VK_KHR_MULTIVIEW_EXTENSION_NAME)) {
+        m_device_extension_names.push_back(VK_NVX_MULTIVIEW_PER_VIEW_ATTRIBUTES_EXTENSION_NAME);
+        m_device_extension_names.push_back(VK_KHR_MULTIVIEW_EXTENSION_NAME);
+    } else {
+        printf("%s Extension %s is not supported.\n", kSkipPrefix, VK_NVX_MULTIVIEW_PER_VIEW_ATTRIBUTES_EXTENSION_NAME);
+        return;
+    }
+
+    bool rp2Supported = CheckCreateRenderPass2Support(this, m_device_extension_names);
+    ASSERT_NO_FATAL_FAILURE(InitState());
+
+    VkSubpassDescription subpasses[] = {
+        {VK_SUBPASS_DESCRIPTION_PER_VIEW_POSITION_X_ONLY_BIT_NVX, VK_PIPELINE_BIND_POINT_GRAPHICS, 0, nullptr, 0, nullptr, nullptr,
+         nullptr, 0, nullptr},
+    };
+
+    VkRenderPassCreateInfo rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, nullptr, 0, 0, nullptr, 1, subpasses, 0, nullptr};
+
+    TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported, "VUID-VkSubpassDescription-flags-00856",
+                         "VUID-VkSubpassDescription2KHR-flags-03076");
+}
+
+TEST_F(VkLayerTest, RenderPassCreate2SubpassInvalidInputAttachmentParameters) {
+    TEST_DESCRIPTION("Create a subpass with parameters in the input attachment ref which are invalid");
+
+    // Check for VK_KHR_get_physical_device_properties2
+    if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+        m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    } else {
+        printf("%s Extension %s is not supported.\n", kSkipPrefix, VK_KHR_CREATE_RENDERPASS_2_EXTENSION_NAME);
+        return;
+    }
+
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+
+    bool rp2Supported = CheckCreateRenderPass2Support(this, m_device_extension_names);
+
+    if (!rp2Supported) {
+        printf("%s Extension %s is not supported.\n", kSkipPrefix, VK_KHR_CREATE_RENDERPASS_2_EXTENSION_NAME);
+        return;
+    }
+
+    ASSERT_NO_FATAL_FAILURE(InitState());
+
+    VkAttachmentDescription2KHR attach_desc = {};
+    attach_desc.sType = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_2_KHR;
+    attach_desc.format = VK_FORMAT_UNDEFINED;
+    attach_desc.samples = VK_SAMPLE_COUNT_1_BIT;
+    attach_desc.finalLayout = VK_IMAGE_LAYOUT_GENERAL;
+
+    VkAttachmentReference2KHR reference = {};
+    reference.sType = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2_KHR;
+    reference.layout = VK_IMAGE_LAYOUT_GENERAL;
+    reference.aspectMask = 0;
+
+    VkSubpassDescription2KHR subpass = {VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_2_KHR,
+                                        nullptr,
+                                        0,
+                                        VK_PIPELINE_BIND_POINT_GRAPHICS,
+                                        0,
+                                        1,
+                                        &reference,
+                                        0,
+                                        nullptr,
+                                        nullptr,
+                                        nullptr,
+                                        0,
+                                        nullptr};
+
+    VkRenderPassCreateInfo2KHR rpci2 = {
+        VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO_2_KHR, nullptr, 0, 1, &attach_desc, 1, &subpass, 0, nullptr, 0, nullptr};
+
+    // Test for aspect mask of 0
+    TestRenderPass2KHRCreate(m_errorMonitor, m_device->device(), &rpci2, "VUID-VkSubpassDescription2KHR-attachment-02800");
+
+    // Test for invalid aspect mask bits
+    reference.aspectMask = 0x40000000;  // invalid VkImageAspectFlagBits value
+    TestRenderPass2KHRCreate(m_errorMonitor, m_device->device(), &rpci2, "VUID-VkSubpassDescription2KHR-attachment-02799");
+}
+
+TEST_F(VkLayerTest, RenderPassCreateInvalidSubpassDependencies) {
+    // Check for VK_KHR_get_physical_device_properties2
+    if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+        m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    }
+
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+
+    bool rp2_supported = CheckCreateRenderPass2Support(this, m_device_extension_names);
+    bool multiviewSupported = rp2_supported;
+
+    if (!rp2_supported && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_MULTIVIEW_EXTENSION_NAME)) {
+        m_device_extension_names.push_back(VK_KHR_MULTIVIEW_EXTENSION_NAME);
+        multiviewSupported = true;
+    }
+
+    // Add a device features struct enabling NO features
+    VkPhysicalDeviceFeatures features = {0};
+    ASSERT_NO_FATAL_FAILURE(InitState(&features));
+
+    if (m_device->props.apiVersion >= VK_API_VERSION_1_1) {
+        multiviewSupported = true;
+    }
+
+    // Create two dummy subpasses
+    VkSubpassDescription subpasses[] = {
+        {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 0, nullptr, 0, nullptr, nullptr, nullptr, 0, nullptr},
+        {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 0, nullptr, 0, nullptr, nullptr, nullptr, 0, nullptr},
+    };
+
+    VkSubpassDependency dependency;
+    VkRenderPassCreateInfo rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, nullptr, 0, 0, nullptr, 2, subpasses, 1, &dependency};
+
+    // Non graphics stages in subpass dependency
+    dependency = {0, 1, VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT | VK_PIPELINE_STAGE_TRANSFER_BIT,
+                  VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT};
+    TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2_supported,
+                         "VUID-VkRenderPassCreateInfo-pDependencies-00837", "VUID-VkRenderPassCreateInfo2KHR-pDependencies-03054");
+
+    dependency = {0, 1, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, 0, 0, 0};
+    TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2_supported,
+                         "VUID-VkRenderPassCreateInfo-pDependencies-00837", "VUID-VkRenderPassCreateInfo2KHR-pDependencies-03054");
+
+    dependency = {0, 1, VK_PIPELINE_STAGE_HOST_BIT, VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, 0, 0, 0};
+    TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2_supported,
+                         "VUID-VkRenderPassCreateInfo-pDependencies-00837", "VUID-VkRenderPassCreateInfo2KHR-pDependencies-03054");
+
+    dependency = {0, 1, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT,
+                  VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT};
+    TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2_supported,
+                         "VUID-VkRenderPassCreateInfo-pDependencies-00838", "VUID-VkRenderPassCreateInfo2KHR-pDependencies-03055");
+
+    dependency = {0, 1, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_HOST_BIT, 0, 0, 0};
+    TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2_supported,
+                         "VUID-VkRenderPassCreateInfo-pDependencies-00838", "VUID-VkRenderPassCreateInfo2KHR-pDependencies-03055");
+
+    dependency = {0, VK_SUBPASS_EXTERNAL, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, 0, 0, 0};
+    TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2_supported,
+                         "VUID-VkRenderPassCreateInfo-pDependencies-00837", "VUID-VkRenderPassCreateInfo2KHR-pDependencies-03054");
+
+    dependency = {VK_SUBPASS_EXTERNAL, 0, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, 0, 0, 0};
+    TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2_supported,
+                         "VUID-VkRenderPassCreateInfo-pDependencies-00838", "VUID-VkRenderPassCreateInfo2KHR-pDependencies-03055");
+
+    dependency = {0, 0, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, 0, 0, 0};
+    TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2_supported,
+                         "VUID-VkRenderPassCreateInfo-pDependencies-00837", "VUID-VkRenderPassCreateInfo2KHR-pDependencies-03054");
+
+    // Geometry shaders not enabled source
+    dependency = {0, 1, VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT, VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, 0, 0, 0};
+
+    TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2_supported, "VUID-VkSubpassDependency-srcStageMask-00860",
+                         "VUID-VkSubpassDependency2KHR-srcStageMask-03080");
+
+    // Geometry shaders not enabled destination
+    dependency = {0, 1, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT, 0, 0, 0};
+
+    TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2_supported, "VUID-VkSubpassDependency-dstStageMask-00861",
+                         "VUID-VkSubpassDependency2KHR-dstStageMask-03081");
+
+    // Tessellation not enabled source
+    dependency = {0, 1, VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT, VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, 0, 0, 0};
+
+    TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2_supported, "VUID-VkSubpassDependency-srcStageMask-00862",
+                         "VUID-VkSubpassDependency2KHR-srcStageMask-03082");
+
+    // Tessellation not enabled destination
+    dependency = {0, 1, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT, 0, 0, 0};
+
+    TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2_supported, "VUID-VkSubpassDependency-dstStageMask-00863",
+                         "VUID-VkSubpassDependency2KHR-dstStageMask-03083");
+
+    // Potential cyclical dependency
+    dependency = {1, 0, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, 0, 0, 0};
+
+    TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2_supported, "VUID-VkSubpassDependency-srcSubpass-00864",
+                         "VUID-VkSubpassDependency2KHR-srcSubpass-03084");
+
+    // EXTERNAL to EXTERNAL dependency
+    dependency = {
+        VK_SUBPASS_EXTERNAL, VK_SUBPASS_EXTERNAL, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, 0, 0, 0};
+
+    TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2_supported, "VUID-VkSubpassDependency-srcSubpass-00865",
+                         "VUID-VkSubpassDependency2KHR-srcSubpass-03085");
+
+    // Logically later source stages in self dependency
+    dependency = {0, 0, VK_PIPELINE_STAGE_VERTEX_SHADER_BIT, VK_PIPELINE_STAGE_VERTEX_INPUT_BIT, 0, 0, 0};
+
+    TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2_supported, "VUID-VkSubpassDependency-srcSubpass-00867",
+                         "VUID-VkSubpassDependency2KHR-srcSubpass-03087");
+
+    // Source access mask mismatch with source stage mask
+    dependency = {0, 1, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, VK_ACCESS_UNIFORM_READ_BIT, 0, 0};
+
+    TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2_supported, "VUID-VkSubpassDependency-srcAccessMask-00868",
+                         "VUID-VkSubpassDependency2KHR-srcAccessMask-03088");
+
+    // Destination access mask mismatch with destination stage mask
+    dependency = {
+        0, 1, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, 0, VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT, 0};
+
+    TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2_supported, "VUID-VkSubpassDependency-dstAccessMask-00869",
+                         "VUID-VkSubpassDependency2KHR-dstAccessMask-03089");
+
+    if (multiviewSupported) {
+        // VIEW_LOCAL_BIT but multiview is not enabled
+        dependency = {0, 1, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT,
+                      0, 0, VK_DEPENDENCY_VIEW_LOCAL_BIT};
+
+        TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2_supported, nullptr,
+                             "VUID-VkRenderPassCreateInfo2KHR-viewMask-03059");
+
+        // Enable multiview
+        uint32_t pViewMasks[2] = {0x3u, 0x3u};
+        int32_t pViewOffsets[2] = {0, 0};
+        VkRenderPassMultiviewCreateInfo rpmvci = {
+            VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO, nullptr, 2, pViewMasks, 0, nullptr, 0, nullptr};
+        rpci.pNext = &rpmvci;
+
+        // Excessive view offsets
+        dependency = {0, 1, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT,
+                      0, 0, VK_DEPENDENCY_VIEW_LOCAL_BIT};
+        rpmvci.pViewOffsets = pViewOffsets;
+        rpmvci.dependencyCount = 2;
+
+        TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, false, "VUID-VkRenderPassCreateInfo-pNext-01929", nullptr);
+
+        rpmvci.dependencyCount = 0;
+
+        // View offset with subpass self dependency
+        dependency = {0, 0, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT,
+                      0, 0, VK_DEPENDENCY_VIEW_LOCAL_BIT};
+        rpmvci.pViewOffsets = pViewOffsets;
+        pViewOffsets[0] = 1;
+        rpmvci.dependencyCount = 1;
+
+        TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, false, "VUID-VkRenderPassCreateInfo-pNext-01930", nullptr);
+
+        rpmvci.dependencyCount = 0;
+
+        // View offset with no view local bit
+        if (rp2_supported) {
+            dependency = {0, VK_SUBPASS_EXTERNAL, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, 0, 0, 0};
+            rpmvci.pViewOffsets = pViewOffsets;
+            pViewOffsets[0] = 1;
+            rpmvci.dependencyCount = 1;
+
+            TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2_supported, nullptr,
+                                 "VUID-VkSubpassDependency2KHR-dependencyFlags-03092");
+
+            rpmvci.dependencyCount = 0;
+        }
+
+        // EXTERNAL subpass with VIEW_LOCAL_BIT - source subpass
+        dependency = {VK_SUBPASS_EXTERNAL,         1, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, 0, 0,
+                      VK_DEPENDENCY_VIEW_LOCAL_BIT};
+
+        TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2_supported,
+                             "VUID-VkSubpassDependency-dependencyFlags-02520",
+                             "VUID-VkSubpassDependency2KHR-dependencyFlags-03090");
+
+        // EXTERNAL subpass with VIEW_LOCAL_BIT - destination subpass
+        dependency = {0, VK_SUBPASS_EXTERNAL,         VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, 0,
+                      0, VK_DEPENDENCY_VIEW_LOCAL_BIT};
+
+        TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2_supported,
+                             "VUID-VkSubpassDependency-dependencyFlags-02521",
+                             "VUID-VkSubpassDependency2KHR-dependencyFlags-03091");
+
+        // Multiple views but no view local bit in self-dependency
+        dependency = {0, 0, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, 0, 0, 0};
+
+        TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2_supported, "VUID-VkSubpassDependency-srcSubpass-00872",
+                             "VUID-VkRenderPassCreateInfo2KHR-pDependencies-03060");
+    }
+}
+
+TEST_F(VkLayerTest, RenderPassCreateInvalidMixedAttachmentSamplesAMD) {
+    TEST_DESCRIPTION("Verify error messages for supported and unsupported sample counts in render pass attachments.");
+
+    // Check for VK_KHR_get_physical_device_properties2
+    if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+        m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    }
+
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+
+    if (DeviceExtensionSupported(gpu(), nullptr, VK_AMD_MIXED_ATTACHMENT_SAMPLES_EXTENSION_NAME)) {
+        m_device_extension_names.push_back(VK_AMD_MIXED_ATTACHMENT_SAMPLES_EXTENSION_NAME);
+    } else {
+        printf("%s Extension %s is not supported.\n", kSkipPrefix, VK_AMD_MIXED_ATTACHMENT_SAMPLES_EXTENSION_NAME);
+        return;
+    }
+
+    bool rp2Supported = CheckCreateRenderPass2Support(this, m_device_extension_names);
+
+    ASSERT_NO_FATAL_FAILURE(InitState());
+
+    std::vector<VkAttachmentDescription> attachments;
+
+    {
+        VkAttachmentDescription att = {};
+        att.format = VK_FORMAT_R8G8B8A8_UNORM;
+        att.samples = VK_SAMPLE_COUNT_1_BIT;
+        att.loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR;
+        att.storeOp = VK_ATTACHMENT_STORE_OP_STORE;
+        att.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
+        att.stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
+        att.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+        att.finalLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
+
+        attachments.push_back(att);
+
+        att.format = VK_FORMAT_D16_UNORM;
+        att.samples = VK_SAMPLE_COUNT_4_BIT;
+        att.loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR;
+        att.storeOp = VK_ATTACHMENT_STORE_OP_STORE;
+        att.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_CLEAR;
+        att.stencilStoreOp = VK_ATTACHMENT_STORE_OP_STORE;
+        att.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+        att.finalLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
+
+        attachments.push_back(att);
+    }
+
+    VkAttachmentReference color_ref = {};
+    color_ref.attachment = 0;
+    color_ref.layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
+
+    VkAttachmentReference depth_ref = {};
+    depth_ref.attachment = 1;
+    depth_ref.layout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
+
+    VkSubpassDescription subpass = {};
+    subpass.pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS;
+    subpass.colorAttachmentCount = 1;
+    subpass.pColorAttachments = &color_ref;
+    subpass.pDepthStencilAttachment = &depth_ref;
+
+    VkRenderPassCreateInfo rpci = {};
+    rpci.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
+    rpci.attachmentCount = attachments.size();
+    rpci.pAttachments = attachments.data();
+    rpci.subpassCount = 1;
+    rpci.pSubpasses = &subpass;
+
+    m_errorMonitor->ExpectSuccess();
+
+    VkRenderPass rp;
+    VkResult err;
+
+    err = vk::CreateRenderPass(device(), &rpci, NULL, &rp);
+    m_errorMonitor->VerifyNotFound();
+    if (err == VK_SUCCESS) vk::DestroyRenderPass(m_device->device(), rp, nullptr);
+
+    // Expect an error message for invalid sample counts
+    attachments[0].samples = VK_SAMPLE_COUNT_4_BIT;
+    attachments[1].samples = VK_SAMPLE_COUNT_1_BIT;
+
+    TestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2Supported,
+                         "VUID-VkSubpassDescription-pColorAttachments-01506",
+                         "VUID-VkSubpassDescription2KHR-pColorAttachments-03070");
+}
+
+TEST_F(VkLayerTest, RenderPassBeginInvalidRenderArea) {
+    TEST_DESCRIPTION("Generate INVALID_RENDER_AREA error by beginning renderpass with extent outside of framebuffer");
+    // Check for VK_KHR_get_physical_device_properties2
+    if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+        m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    }
+
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+    bool rp2Supported = CheckCreateRenderPass2Support(this, m_device_extension_names);
+    ASSERT_NO_FATAL_FAILURE(InitState(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
+
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    // Framebuffer for render target is 256x256, exceed that for INVALID_RENDER_AREA
+    m_renderPassBeginInfo.renderArea.extent.width = 257;
+    m_renderPassBeginInfo.renderArea.extent.height = 257;
+
+    TestRenderPassBegin(m_errorMonitor, m_device->device(), m_commandBuffer->handle(), &m_renderPassBeginInfo, rp2Supported,
+                        "Cannot execute a render pass with renderArea not within the bound of the framebuffer.",
+                        "Cannot execute a render pass with renderArea not within the bound of the framebuffer.");
+}
+
+TEST_F(VkLayerTest, RenderPassBeginWithinRenderPass) {
+    // Check for VK_KHR_get_physical_device_properties2
+    if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+        m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    }
+
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+    PFN_vkCmdBeginRenderPass2KHR vkCmdBeginRenderPass2KHR = nullptr;
+    bool rp2Supported = CheckCreateRenderPass2Support(this, m_device_extension_names);
+    ASSERT_NO_FATAL_FAILURE(InitState());
+
+    if (rp2Supported) {
+        vkCmdBeginRenderPass2KHR =
+            (PFN_vkCmdBeginRenderPass2KHR)vk::GetDeviceProcAddr(m_device->device(), "vkCmdBeginRenderPass2KHR");
+    }
+
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    // Bind a BeginRenderPass within an active RenderPass
+    m_commandBuffer->begin();
+    m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
+
+    // Just use a dummy Renderpass
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBeginRenderPass-renderpass");
+    vk::CmdBeginRenderPass(m_commandBuffer->handle(), &m_renderPassBeginInfo, VK_SUBPASS_CONTENTS_INLINE);
+
+    m_errorMonitor->VerifyFound();
+
+    if (rp2Supported) {
+        VkSubpassBeginInfoKHR subpassBeginInfo = {VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO_KHR, nullptr, VK_SUBPASS_CONTENTS_INLINE};
+
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBeginRenderPass2KHR-renderpass");
+        vkCmdBeginRenderPass2KHR(m_commandBuffer->handle(), &m_renderPassBeginInfo, &subpassBeginInfo);
+        m_errorMonitor->VerifyFound();
+    }
+}
+
+TEST_F(VkLayerTest, RenderPassBeginIncompatibleFramebufferRenderPass) {
+    TEST_DESCRIPTION("Test that renderpass begin is compatible with the framebuffer renderpass ");
+
+    ASSERT_NO_FATAL_FAILURE(Init(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
+
+    // Create a depth stencil image view
+    VkImageObj image(m_device);
+
+    image.Init(128, 128, 1, VK_FORMAT_D16_UNORM, VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL);
+    ASSERT_TRUE(image.initialized());
+
+    VkImageView dsv;
+    VkImageViewCreateInfo dsvci = {};
+    dsvci.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
+    dsvci.pNext = nullptr;
+    dsvci.image = image.handle();
+    dsvci.viewType = VK_IMAGE_VIEW_TYPE_2D;
+    dsvci.format = VK_FORMAT_D16_UNORM;
+    dsvci.subresourceRange.layerCount = 1;
+    dsvci.subresourceRange.baseMipLevel = 0;
+    dsvci.subresourceRange.levelCount = 1;
+    dsvci.subresourceRange.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
+    vk::CreateImageView(m_device->device(), &dsvci, NULL, &dsv);
+
+    // Create a renderPass with a single attachment that uses loadOp CLEAR
+    VkAttachmentDescription description = {0,
+                                           VK_FORMAT_D16_UNORM,
+                                           VK_SAMPLE_COUNT_1_BIT,
+                                           VK_ATTACHMENT_LOAD_OP_LOAD,
+                                           VK_ATTACHMENT_STORE_OP_DONT_CARE,
+                                           VK_ATTACHMENT_LOAD_OP_CLEAR,
+                                           VK_ATTACHMENT_STORE_OP_DONT_CARE,
+                                           VK_IMAGE_LAYOUT_GENERAL,
+                                           VK_IMAGE_LAYOUT_GENERAL};
+
+    VkAttachmentReference depth_stencil_ref = {0, VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL};
+
+    VkSubpassDescription subpass = {0,      VK_PIPELINE_BIND_POINT_GRAPHICS, 0, nullptr, 0, nullptr, nullptr, &depth_stencil_ref, 0,
+                                    nullptr};
+
+    VkRenderPassCreateInfo rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, nullptr, 0, 1, &description, 1, &subpass, 0, nullptr};
+    VkRenderPass rp1, rp2;
+
+    vk::CreateRenderPass(m_device->device(), &rpci, NULL, &rp1);
+    subpass.pDepthStencilAttachment = nullptr;
+    vk::CreateRenderPass(m_device->device(), &rpci, NULL, &rp2);
+
+    // Create a framebuffer
+
+    VkFramebufferCreateInfo fbci = {VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, nullptr, 0, rp1, 1, &dsv, 128, 128, 1};
+    VkFramebuffer fb;
+
+    vk::CreateFramebuffer(m_device->handle(), &fbci, nullptr, &fb);
+
+    VkRenderPassBeginInfo rp_begin = {VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO, nullptr, rp2, fb, {{0, 0}, {128, 128}}, 0, nullptr};
+
+    TestRenderPassBegin(m_errorMonitor, m_device->device(), m_commandBuffer->handle(), &rp_begin, false,
+                        "VUID-VkRenderPassBeginInfo-renderPass-00904", nullptr);
+
+    vk::DestroyRenderPass(m_device->device(), rp1, nullptr);
+    vk::DestroyRenderPass(m_device->device(), rp2, nullptr);
+    vk::DestroyFramebuffer(m_device->device(), fb, nullptr);
+    vk::DestroyImageView(m_device->device(), dsv, nullptr);
+}
+
+TEST_F(VkLayerTest, RenderPassBeginLayoutsFramebufferImageUsageMismatches) {
+    TEST_DESCRIPTION(
+        "Test that renderpass initial/final layouts match up with the usage bits set for each attachment of the framebuffer");
+
+    // Check for VK_KHR_get_physical_device_properties2
+    if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+        m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    }
+
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+    bool rp2Supported = CheckCreateRenderPass2Support(this, m_device_extension_names);
+    bool maintenance2Supported = rp2Supported;
+
+    // Check for VK_KHR_maintenance2
+    if (!rp2Supported && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_MAINTENANCE2_EXTENSION_NAME)) {
+        m_device_extension_names.push_back(VK_KHR_MAINTENANCE2_EXTENSION_NAME);
+        maintenance2Supported = true;
+    }
+
+    ASSERT_NO_FATAL_FAILURE(InitState(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
+
+    if (m_device->props.apiVersion >= VK_API_VERSION_1_1) {
+        maintenance2Supported = true;
+    }
+
+    // Create an input attachment view
+    VkImageObj iai(m_device);
+
+    iai.InitNoLayout(128, 128, 1, VK_FORMAT_R8G8B8A8_UNORM, VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL);
+    ASSERT_TRUE(iai.initialized());
+
+    VkImageView iav;
+    VkImageViewCreateInfo iavci = {};
+    iavci.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
+    iavci.pNext = nullptr;
+    iavci.image = iai.handle();
+    iavci.viewType = VK_IMAGE_VIEW_TYPE_2D;
+    iavci.format = VK_FORMAT_R8G8B8A8_UNORM;
+    iavci.subresourceRange.layerCount = 1;
+    iavci.subresourceRange.baseMipLevel = 0;
+    iavci.subresourceRange.levelCount = 1;
+    iavci.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    vk::CreateImageView(m_device->device(), &iavci, NULL, &iav);
+
+    // Create a color attachment view
+    VkImageObj cai(m_device);
+
+    cai.InitNoLayout(128, 128, 1, VK_FORMAT_R8G8B8A8_UNORM, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL);
+    ASSERT_TRUE(cai.initialized());
+
+    VkImageView cav;
+    VkImageViewCreateInfo cavci = {};
+    cavci.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
+    cavci.pNext = nullptr;
+    cavci.image = cai.handle();
+    cavci.viewType = VK_IMAGE_VIEW_TYPE_2D;
+    cavci.format = VK_FORMAT_R8G8B8A8_UNORM;
+    cavci.subresourceRange.layerCount = 1;
+    cavci.subresourceRange.baseMipLevel = 0;
+    cavci.subresourceRange.levelCount = 1;
+    cavci.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    vk::CreateImageView(m_device->device(), &cavci, NULL, &cav);
+
+    // Create a renderPass with those attachments
+    VkAttachmentDescription descriptions[] = {
+        {0, VK_FORMAT_R8G8B8A8_UNORM, VK_SAMPLE_COUNT_1_BIT, VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE,
+         VK_ATTACHMENT_LOAD_OP_CLEAR, VK_ATTACHMENT_STORE_OP_DONT_CARE, VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_GENERAL},
+        {1, VK_FORMAT_R8G8B8A8_UNORM, VK_SAMPLE_COUNT_1_BIT, VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE,
+         VK_ATTACHMENT_LOAD_OP_CLEAR, VK_ATTACHMENT_STORE_OP_DONT_CARE, VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_GENERAL}};
+
+    VkAttachmentReference input_ref = {0, VK_IMAGE_LAYOUT_GENERAL};
+    VkAttachmentReference color_ref = {1, VK_IMAGE_LAYOUT_GENERAL};
+
+    VkSubpassDescription subpass = {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 1, &input_ref, 1, &color_ref, nullptr, nullptr, 0, nullptr};
+
+    VkRenderPassCreateInfo rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, nullptr, 0, 2, descriptions, 1, &subpass, 0, nullptr};
+
+    VkRenderPass rp;
+
+    vk::CreateRenderPass(m_device->device(), &rpci, NULL, &rp);
+
+    // Create a framebuffer
+
+    VkImageView views[] = {iav, cav};
+
+    VkFramebufferCreateInfo fbci = {VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, nullptr, 0, rp, 2, views, 128, 128, 1};
+    VkFramebuffer fb;
+
+    vk::CreateFramebuffer(m_device->handle(), &fbci, nullptr, &fb);
+
+    VkRenderPassBeginInfo rp_begin = {VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO, nullptr, rp, fb, {{0, 0}, {128, 128}}, 0, nullptr};
+
+    VkRenderPass rp_invalid;
+
+    // Initial layout is VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL but attachment doesn't support IMAGE_USAGE_COLOR_ATTACHMENT_BIT
+    descriptions[0].initialLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
+    vk::CreateRenderPass(m_device->device(), &rpci, NULL, &rp_invalid);
+    rp_begin.renderPass = rp_invalid;
+    TestRenderPassBegin(m_errorMonitor, m_device->device(), m_commandBuffer->handle(), &rp_begin, rp2Supported,
+                        "VUID-vkCmdBeginRenderPass-initialLayout-00895", "VUID-vkCmdBeginRenderPass2KHR-initialLayout-03094");
+
+    vk::DestroyRenderPass(m_device->handle(), rp_invalid, nullptr);
+
+    // Initial layout is VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL but attachment doesn't support VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT
+    // / VK_IMAGE_USAGE_SAMPLED_BIT
+    descriptions[0].initialLayout = VK_IMAGE_LAYOUT_GENERAL;
+    descriptions[1].initialLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
+    vk::CreateRenderPass(m_device->device(), &rpci, NULL, &rp_invalid);
+    rp_begin.renderPass = rp_invalid;
+
+    TestRenderPassBegin(m_errorMonitor, m_device->device(), m_commandBuffer->handle(), &rp_begin, rp2Supported,
+                        "VUID-vkCmdBeginRenderPass-initialLayout-00897", "VUID-vkCmdBeginRenderPass2KHR-initialLayout-03097");
+
+    vk::DestroyRenderPass(m_device->handle(), rp_invalid, nullptr);
+    descriptions[1].initialLayout = VK_IMAGE_LAYOUT_GENERAL;
+
+    // Initial layout is VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL but attachment doesn't support VK_IMAGE_USAGE_TRANSFER_SRC_BIT
+    descriptions[0].initialLayout = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL;
+    vk::CreateRenderPass(m_device->device(), &rpci, NULL, &rp_invalid);
+    rp_begin.renderPass = rp_invalid;
+
+    TestRenderPassBegin(m_errorMonitor, m_device->device(), m_commandBuffer->handle(), &rp_begin, rp2Supported,
+                        "VUID-vkCmdBeginRenderPass-initialLayout-00898", "VUID-vkCmdBeginRenderPass2KHR-initialLayout-03098");
+
+    vk::DestroyRenderPass(m_device->handle(), rp_invalid, nullptr);
+
+    // Initial layout is VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL but attachment doesn't support VK_IMAGE_USAGE_TRANSFER_DST_BIT
+    descriptions[0].initialLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
+    vk::CreateRenderPass(m_device->device(), &rpci, NULL, &rp_invalid);
+    rp_begin.renderPass = rp_invalid;
+
+    TestRenderPassBegin(m_errorMonitor, m_device->device(), m_commandBuffer->handle(), &rp_begin, rp2Supported,
+                        "VUID-vkCmdBeginRenderPass-initialLayout-00899", "VUID-vkCmdBeginRenderPass2KHR-initialLayout-03099");
+
+    vk::DestroyRenderPass(m_device->handle(), rp_invalid, nullptr);
+
+    // Initial layout is VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL but attachment doesn't support
+    // VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT
+    descriptions[0].initialLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
+    vk::CreateRenderPass(m_device->device(), &rpci, NULL, &rp_invalid);
+    rp_begin.renderPass = rp_invalid;
+    const char *initial_layout_vuid_rp1 =
+        maintenance2Supported ? "VUID-vkCmdBeginRenderPass-initialLayout-01758" : "VUID-vkCmdBeginRenderPass-initialLayout-00896";
+
+    TestRenderPassBegin(m_errorMonitor, m_device->device(), m_commandBuffer->handle(), &rp_begin, rp2Supported,
+                        initial_layout_vuid_rp1, "VUID-vkCmdBeginRenderPass2KHR-initialLayout-03096");
+
+    vk::DestroyRenderPass(m_device->handle(), rp_invalid, nullptr);
+
+    // Initial layout is VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL but attachment doesn't support
+    // VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT
+    descriptions[0].initialLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL;
+    vk::CreateRenderPass(m_device->device(), &rpci, NULL, &rp_invalid);
+    rp_begin.renderPass = rp_invalid;
+
+    TestRenderPassBegin(m_errorMonitor, m_device->device(), m_commandBuffer->handle(), &rp_begin, rp2Supported,
+                        initial_layout_vuid_rp1, "VUID-vkCmdBeginRenderPass2KHR-initialLayout-03096");
+
+    vk::DestroyRenderPass(m_device->handle(), rp_invalid, nullptr);
+
+    if (maintenance2Supported || rp2Supported) {
+        // Initial layout is VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL but attachment doesn't support
+        // VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT
+        descriptions[0].initialLayout = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL;
+        vk::CreateRenderPass(m_device->device(), &rpci, NULL, &rp_invalid);
+        rp_begin.renderPass = rp_invalid;
+
+        TestRenderPassBegin(m_errorMonitor, m_device->device(), m_commandBuffer->handle(), &rp_begin, rp2Supported,
+                            "VUID-vkCmdBeginRenderPass-initialLayout-01758", "VUID-vkCmdBeginRenderPass2KHR-initialLayout-03096");
+
+        vk::DestroyRenderPass(m_device->handle(), rp_invalid, nullptr);
+
+        // Initial layout is VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL but attachment doesn't support
+        // VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT
+        descriptions[0].initialLayout = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL;
+        vk::CreateRenderPass(m_device->device(), &rpci, NULL, &rp_invalid);
+        rp_begin.renderPass = rp_invalid;
+
+        TestRenderPassBegin(m_errorMonitor, m_device->device(), m_commandBuffer->handle(), &rp_begin, rp2Supported,
+                            "VUID-vkCmdBeginRenderPass-initialLayout-01758", "VUID-vkCmdBeginRenderPass2KHR-initialLayout-03096");
+
+        vk::DestroyRenderPass(m_device->handle(), rp_invalid, nullptr);
+    }
+
+    vk::DestroyRenderPass(m_device->device(), rp, nullptr);
+    vk::DestroyFramebuffer(m_device->device(), fb, nullptr);
+    vk::DestroyImageView(m_device->device(), iav, nullptr);
+    vk::DestroyImageView(m_device->device(), cav, nullptr);
+}
+
+TEST_F(VkLayerTest, RenderPassBeginClearOpMismatch) {
+    TEST_DESCRIPTION(
+        "Begin a renderPass where clearValueCount is less than the number of renderPass attachments that use "
+        "loadOp VK_ATTACHMENT_LOAD_OP_CLEAR.");
+
+    // Check for VK_KHR_get_physical_device_properties2
+    if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+        m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    }
+
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+    bool rp2Supported = CheckCreateRenderPass2Support(this, m_device_extension_names);
+    ASSERT_NO_FATAL_FAILURE(InitState(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
+
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    // Create a renderPass with a single attachment that uses loadOp CLEAR
+    VkAttachmentReference attach = {};
+    attach.layout = VK_IMAGE_LAYOUT_GENERAL;
+    VkSubpassDescription subpass = {};
+    subpass.colorAttachmentCount = 1;
+    subpass.pColorAttachments = &attach;
+    VkRenderPassCreateInfo rpci = {};
+    rpci.subpassCount = 1;
+    rpci.pSubpasses = &subpass;
+    rpci.attachmentCount = 1;
+    VkAttachmentDescription attach_desc = {};
+    attach_desc.format = VK_FORMAT_B8G8R8A8_UNORM;
+    // Set loadOp to CLEAR
+    attach_desc.loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR;
+    attach_desc.samples = VK_SAMPLE_COUNT_1_BIT;
+    attach_desc.finalLayout = VK_IMAGE_LAYOUT_GENERAL;
+    rpci.pAttachments = &attach_desc;
+    rpci.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
+    VkRenderPass rp;
+    vk::CreateRenderPass(m_device->device(), &rpci, NULL, &rp);
+
+    VkRenderPassBeginInfo rp_begin = {};
+    rp_begin.sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO;
+    rp_begin.pNext = NULL;
+    rp_begin.renderPass = renderPass();
+    rp_begin.framebuffer = framebuffer();
+    rp_begin.clearValueCount = 0;  // Should be 1
+
+    TestRenderPassBegin(m_errorMonitor, m_device->device(), m_commandBuffer->handle(), &rp_begin, rp2Supported,
+                        "VUID-VkRenderPassBeginInfo-clearValueCount-00902", "VUID-VkRenderPassBeginInfo-clearValueCount-00902");
+
+    vk::DestroyRenderPass(m_device->device(), rp, NULL);
+}
+
+TEST_F(VkLayerTest, RenderPassBeginSampleLocationsInvalidIndicesEXT) {
+    TEST_DESCRIPTION("Test that attachment indices and subpass indices specifed by sample locations structures are valid");
+
+    // Check for VK_KHR_get_physical_device_properties2
+    if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+        m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    }
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+    if (DeviceExtensionSupported(gpu(), nullptr, VK_EXT_SAMPLE_LOCATIONS_EXTENSION_NAME)) {
+        m_device_extension_names.push_back(VK_EXT_SAMPLE_LOCATIONS_EXTENSION_NAME);
+    } else {
+        printf("%s Extension %s is not supported.\n", kSkipPrefix, VK_EXT_SAMPLE_LOCATIONS_EXTENSION_NAME);
+        return;
+    }
+
+    ASSERT_NO_FATAL_FAILURE(InitState(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
+
+    // Create a depth stencil image view
+    VkImageObj image(m_device);
+
+    image.Init(128, 128, 1, VK_FORMAT_D16_UNORM, VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL);
+    ASSERT_TRUE(image.initialized());
+
+    VkImageView dsv;
+    VkImageViewCreateInfo dsvci = {};
+    dsvci.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
+    dsvci.pNext = nullptr;
+    dsvci.image = image.handle();
+    dsvci.viewType = VK_IMAGE_VIEW_TYPE_2D;
+    dsvci.format = VK_FORMAT_D16_UNORM;
+    dsvci.subresourceRange.layerCount = 1;
+    dsvci.subresourceRange.baseMipLevel = 0;
+    dsvci.subresourceRange.levelCount = 1;
+    dsvci.subresourceRange.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
+    vk::CreateImageView(m_device->device(), &dsvci, NULL, &dsv);
+
+    // Create a renderPass with a single attachment that uses loadOp CLEAR
+    VkAttachmentDescription description = {0,
+                                           VK_FORMAT_D16_UNORM,
+                                           VK_SAMPLE_COUNT_1_BIT,
+                                           VK_ATTACHMENT_LOAD_OP_LOAD,
+                                           VK_ATTACHMENT_STORE_OP_DONT_CARE,
+                                           VK_ATTACHMENT_LOAD_OP_CLEAR,
+                                           VK_ATTACHMENT_STORE_OP_DONT_CARE,
+                                           VK_IMAGE_LAYOUT_GENERAL,
+                                           VK_IMAGE_LAYOUT_GENERAL};
+
+    VkAttachmentReference depth_stencil_ref = {0, VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL};
+
+    VkSubpassDescription subpass = {0,      VK_PIPELINE_BIND_POINT_GRAPHICS, 0, nullptr, 0, nullptr, nullptr, &depth_stencil_ref, 0,
+                                    nullptr};
+
+    VkRenderPassCreateInfo rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, nullptr, 0, 1, &description, 1, &subpass, 0, nullptr};
+    VkRenderPass rp;
+
+    vk::CreateRenderPass(m_device->device(), &rpci, NULL, &rp);
+
+    // Create a framebuffer
+
+    VkFramebufferCreateInfo fbci = {VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, nullptr, 0, rp, 1, &dsv, 128, 128, 1};
+    VkFramebuffer fb;
+
+    vk::CreateFramebuffer(m_device->handle(), &fbci, nullptr, &fb);
+
+    VkSampleLocationEXT sample_location = {0.5, 0.5};
+
+    VkSampleLocationsInfoEXT sample_locations_info = {
+        VK_STRUCTURE_TYPE_SAMPLE_LOCATIONS_INFO_EXT, nullptr, VK_SAMPLE_COUNT_1_BIT, {1, 1}, 1, &sample_location};
+
+    VkAttachmentSampleLocationsEXT attachment_sample_locations = {0, sample_locations_info};
+    VkSubpassSampleLocationsEXT subpass_sample_locations = {0, sample_locations_info};
+
+    VkRenderPassSampleLocationsBeginInfoEXT rp_sl_begin = {VK_STRUCTURE_TYPE_RENDER_PASS_SAMPLE_LOCATIONS_BEGIN_INFO_EXT,
+                                                           nullptr,
+                                                           1,
+                                                           &attachment_sample_locations,
+                                                           1,
+                                                           &subpass_sample_locations};
+
+    VkRenderPassBeginInfo rp_begin = {
+        VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO, &rp_sl_begin, rp, fb, {{0, 0}, {128, 128}}, 0, nullptr};
+
+    attachment_sample_locations.attachmentIndex = 1;
+    TestRenderPassBegin(m_errorMonitor, m_device->device(), m_commandBuffer->handle(), &rp_begin, false,
+                        "VUID-VkAttachmentSampleLocationsEXT-attachmentIndex-01531", nullptr);
+    attachment_sample_locations.attachmentIndex = 0;
+
+    subpass_sample_locations.subpassIndex = 1;
+    TestRenderPassBegin(m_errorMonitor, m_device->device(), m_commandBuffer->handle(), &rp_begin, false,
+                        "VUID-VkSubpassSampleLocationsEXT-subpassIndex-01532", nullptr);
+    subpass_sample_locations.subpassIndex = 0;
+
+    vk::DestroyRenderPass(m_device->device(), rp, nullptr);
+    vk::DestroyFramebuffer(m_device->device(), fb, nullptr);
+    vk::DestroyImageView(m_device->device(), dsv, nullptr);
+}
+
+TEST_F(VkLayerTest, RenderPassNextSubpassExcessive) {
+    TEST_DESCRIPTION("Test that an error is produced when CmdNextSubpass is called too many times in a renderpass instance");
+
+    // Check for VK_KHR_get_physical_device_properties2
+    if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+        m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    }
+
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+    PFN_vkCmdNextSubpass2KHR vkCmdNextSubpass2KHR = nullptr;
+    bool rp2Supported = CheckCreateRenderPass2Support(this, m_device_extension_names);
+    ASSERT_NO_FATAL_FAILURE(InitState());
+
+    if (rp2Supported) {
+        vkCmdNextSubpass2KHR = (PFN_vkCmdNextSubpass2KHR)vk::GetDeviceProcAddr(m_device->device(), "vkCmdNextSubpass2KHR");
+    }
+
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    m_commandBuffer->begin();
+    m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdNextSubpass-None-00909");
+    vk::CmdNextSubpass(m_commandBuffer->handle(), VK_SUBPASS_CONTENTS_INLINE);
+    m_errorMonitor->VerifyFound();
+
+    if (rp2Supported) {
+        VkSubpassBeginInfoKHR subpassBeginInfo = {VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO_KHR, nullptr, VK_SUBPASS_CONTENTS_INLINE};
+        VkSubpassEndInfoKHR subpassEndInfo = {VK_STRUCTURE_TYPE_SUBPASS_END_INFO_KHR, nullptr};
+
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdNextSubpass2KHR-None-03102");
+
+        vkCmdNextSubpass2KHR(m_commandBuffer->handle(), &subpassBeginInfo, &subpassEndInfo);
+        m_errorMonitor->VerifyFound();
+    }
+
+    m_commandBuffer->EndRenderPass();
+    m_commandBuffer->end();
+}
+
+TEST_F(VkLayerTest, RenderPassEndBeforeFinalSubpass) {
+    TEST_DESCRIPTION("Test that an error is produced when CmdEndRenderPass is called before the final subpass has been reached");
+
+    // Check for VK_KHR_get_physical_device_properties2
+    if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+        m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    }
+
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+    PFN_vkCmdEndRenderPass2KHR vkCmdEndRenderPass2KHR = nullptr;
+    bool rp2Supported = CheckCreateRenderPass2Support(this, m_device_extension_names);
+    ASSERT_NO_FATAL_FAILURE(InitState(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
+
+    if (rp2Supported) {
+        vkCmdEndRenderPass2KHR = (PFN_vkCmdEndRenderPass2KHR)vk::GetDeviceProcAddr(m_device->device(), "vkCmdEndRenderPass2KHR");
+    }
+
+    VkSubpassDescription sd[2] = {{0, VK_PIPELINE_BIND_POINT_GRAPHICS, 0, nullptr, 0, nullptr, nullptr, nullptr, 0, nullptr},
+                                  {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 0, nullptr, 0, nullptr, nullptr, nullptr, 0, nullptr}};
+
+    VkRenderPassCreateInfo rcpi = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, nullptr, 0, 0, nullptr, 2, sd, 0, nullptr};
+
+    VkRenderPass rp;
+    VkResult err = vk::CreateRenderPass(m_device->device(), &rcpi, nullptr, &rp);
+    ASSERT_VK_SUCCESS(err);
+
+    VkFramebufferCreateInfo fbci = {VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, nullptr, 0, rp, 0, nullptr, 16, 16, 1};
+
+    VkFramebuffer fb;
+    err = vk::CreateFramebuffer(m_device->device(), &fbci, nullptr, &fb);
+    ASSERT_VK_SUCCESS(err);
+
+    m_commandBuffer->begin();
+
+    VkRenderPassBeginInfo rpbi = {VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO, nullptr, rp, fb, {{0, 0}, {16, 16}}, 0, nullptr};
+
+    vk::CmdBeginRenderPass(m_commandBuffer->handle(), &rpbi, VK_SUBPASS_CONTENTS_INLINE);
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdEndRenderPass-None-00910");
+    vk::CmdEndRenderPass(m_commandBuffer->handle());
+    m_errorMonitor->VerifyFound();
+
+    if (rp2Supported) {
+        VkSubpassEndInfoKHR subpassEndInfo = {VK_STRUCTURE_TYPE_SUBPASS_END_INFO_KHR, nullptr};
+
+        m_commandBuffer->reset();
+        m_commandBuffer->begin();
+        vk::CmdBeginRenderPass(m_commandBuffer->handle(), &rpbi, VK_SUBPASS_CONTENTS_INLINE);
+
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdEndRenderPass2KHR-None-03103");
+        vkCmdEndRenderPass2KHR(m_commandBuffer->handle(), &subpassEndInfo);
+        m_errorMonitor->VerifyFound();
+    }
+
+    // Clean up.
+    vk::DestroyFramebuffer(m_device->device(), fb, nullptr);
+    vk::DestroyRenderPass(m_device->device(), rp, nullptr);
+}
+
+TEST_F(VkLayerTest, RenderPassDestroyWhileInUse) {
+    TEST_DESCRIPTION("Delete in-use renderPass.");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    // Create simple renderpass
+    VkAttachmentReference attach = {};
+    attach.layout = VK_IMAGE_LAYOUT_GENERAL;
+    VkSubpassDescription subpass = {};
+    subpass.colorAttachmentCount = 1;
+    subpass.pColorAttachments = &attach;
+    VkRenderPassCreateInfo rpci = {};
+    rpci.subpassCount = 1;
+    rpci.pSubpasses = &subpass;
+    rpci.attachmentCount = 1;
+    VkAttachmentDescription attach_desc = {};
+    attach_desc.format = VK_FORMAT_B8G8R8A8_UNORM;
+    attach_desc.samples = VK_SAMPLE_COUNT_1_BIT;
+    attach_desc.finalLayout = VK_IMAGE_LAYOUT_GENERAL;
+    rpci.pAttachments = &attach_desc;
+    rpci.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
+    VkRenderPass rp;
+    VkResult err = vk::CreateRenderPass(m_device->device(), &rpci, NULL, &rp);
+    ASSERT_VK_SUCCESS(err);
+
+    m_errorMonitor->ExpectSuccess();
+
+    m_commandBuffer->begin();
+    VkRenderPassBeginInfo rpbi = {};
+    rpbi.sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO;
+    rpbi.framebuffer = m_framebuffer;
+    rpbi.renderPass = rp;
+    m_commandBuffer->BeginRenderPass(rpbi);
+    m_commandBuffer->EndRenderPass();
+    m_commandBuffer->end();
+
+    VkSubmitInfo submit_info = {};
+    submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+    submit_info.commandBufferCount = 1;
+    submit_info.pCommandBuffers = &m_commandBuffer->handle();
+    vk::QueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+    m_errorMonitor->VerifyNotFound();
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkDestroyRenderPass-renderPass-00873");
+    vk::DestroyRenderPass(m_device->device(), rp, nullptr);
+    m_errorMonitor->VerifyFound();
+
+    // Wait for queue to complete so we can safely destroy rp
+    vk::QueueWaitIdle(m_device->m_queue);
+    m_errorMonitor->SetUnexpectedError("If renderPass is not VK_NULL_HANDLE, renderPass must be a valid VkRenderPass handle");
+    m_errorMonitor->SetUnexpectedError("Was it created? Has it already been destroyed?");
+    vk::DestroyRenderPass(m_device->device(), rp, nullptr);
+}
+
+TEST_F(VkLayerTest, FramebufferCreateErrors) {
+    TEST_DESCRIPTION(
+        "Hit errors when attempting to create a framebuffer :\n"
+        " 1. Mismatch between framebuffer & renderPass attachmentCount\n"
+        " 2. Use a color image as depthStencil attachment\n"
+        " 3. Mismatch framebuffer & renderPass attachment formats\n"
+        " 4. Mismatch framebuffer & renderPass attachment #samples\n"
+        " 5. Framebuffer attachment w/ non-1 mip-levels\n"
+        " 6. Framebuffer attachment where dimensions don't match\n"
+        " 7. Framebuffer attachment where dimensions don't match\n"
+        " 8. Framebuffer attachment w/o identity swizzle\n"
+        " 9. framebuffer dimensions exceed physical device limits\n"
+        "10. null pAttachments\n");
+
+    // Check for VK_KHR_get_physical_device_properties2
+    bool push_physical_device_properties_2_support =
+        InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    if (push_physical_device_properties_2_support) {
+        m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    }
+
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+
+    bool push_fragment_density_support = false;
+
+    if (push_physical_device_properties_2_support) {
+        push_fragment_density_support = DeviceExtensionSupported(gpu(), nullptr, VK_EXT_FRAGMENT_DENSITY_MAP_EXTENSION_NAME);
+        if (push_fragment_density_support) m_device_extension_names.push_back(VK_EXT_FRAGMENT_DENSITY_MAP_EXTENSION_NAME);
+    }
+
+    ASSERT_NO_FATAL_FAILURE(InitState(nullptr, nullptr, 0));
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkFramebufferCreateInfo-attachmentCount-00876");
+
+    // Create a renderPass with a single color attachment
+    VkAttachmentReference attach = {};
+    attach.layout = VK_IMAGE_LAYOUT_GENERAL;
+    VkSubpassDescription subpass = {};
+    subpass.pColorAttachments = &attach;
+    VkRenderPassCreateInfo rpci = {};
+    rpci.subpassCount = 1;
+    rpci.pSubpasses = &subpass;
+    rpci.attachmentCount = 1;
+    VkAttachmentDescription attach_desc = {};
+    attach_desc.format = VK_FORMAT_B8G8R8A8_UNORM;
+    attach_desc.samples = VK_SAMPLE_COUNT_1_BIT;
+    attach_desc.finalLayout = VK_IMAGE_LAYOUT_GENERAL;
+    rpci.pAttachments = &attach_desc;
+    rpci.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
+    VkRenderPass rp;
+    VkResult err = vk::CreateRenderPass(m_device->device(), &rpci, NULL, &rp);
+    ASSERT_VK_SUCCESS(err);
+
+    VkImageView ivs[2];
+    ivs[0] = m_renderTargets[0]->targetView(VK_FORMAT_B8G8R8A8_UNORM);
+    ivs[1] = m_renderTargets[0]->targetView(VK_FORMAT_B8G8R8A8_UNORM);
+    VkFramebufferCreateInfo fb_info = {};
+    fb_info.sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO;
+    fb_info.pNext = NULL;
+    fb_info.renderPass = rp;
+    // Set mis-matching attachmentCount
+    fb_info.attachmentCount = 2;
+    fb_info.pAttachments = ivs;
+    fb_info.width = 100;
+    fb_info.height = 100;
+    fb_info.layers = 1;
+
+    VkFramebuffer fb;
+    err = vk::CreateFramebuffer(device(), &fb_info, NULL, &fb);
+
+    m_errorMonitor->VerifyFound();
+    if (err == VK_SUCCESS) {
+        vk::DestroyFramebuffer(m_device->device(), fb, NULL);
+    }
+    vk::DestroyRenderPass(m_device->device(), rp, NULL);
+
+    // Create a renderPass with a depth-stencil attachment created with
+    // IMAGE_USAGE_COLOR_ATTACHMENT
+    // Add our color attachment to pDepthStencilAttachment
+    subpass.pDepthStencilAttachment = &attach;
+    subpass.pColorAttachments = NULL;
+    VkRenderPass rp_ds;
+    err = vk::CreateRenderPass(m_device->device(), &rpci, NULL, &rp_ds);
+    ASSERT_VK_SUCCESS(err);
+    // Set correct attachment count, but attachment has COLOR usage bit set
+    fb_info.attachmentCount = 1;
+    fb_info.renderPass = rp_ds;
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkFramebufferCreateInfo-pAttachments-02633");
+    err = vk::CreateFramebuffer(device(), &fb_info, NULL, &fb);
+
+    m_errorMonitor->VerifyFound();
+    if (err == VK_SUCCESS) {
+        vk::DestroyFramebuffer(m_device->device(), fb, NULL);
+    }
+    vk::DestroyRenderPass(m_device->device(), rp_ds, NULL);
+
+    // Create new renderpass with alternate attachment format from fb
+    attach_desc.format = VK_FORMAT_R8G8B8A8_UNORM;
+    subpass.pDepthStencilAttachment = NULL;
+    subpass.pColorAttachments = &attach;
+    err = vk::CreateRenderPass(m_device->device(), &rpci, NULL, &rp);
+    ASSERT_VK_SUCCESS(err);
+
+    // Cause error due to mis-matched formats between rp & fb
+    //  rp attachment 0 now has RGBA8 but corresponding fb attach is BGRA8
+    fb_info.renderPass = rp;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkFramebufferCreateInfo-pAttachments-00880");
+    err = vk::CreateFramebuffer(device(), &fb_info, NULL, &fb);
+
+    m_errorMonitor->VerifyFound();
+    if (err == VK_SUCCESS) {
+        vk::DestroyFramebuffer(m_device->device(), fb, NULL);
+    }
+    vk::DestroyRenderPass(m_device->device(), rp, NULL);
+
+    // Create new renderpass with alternate sample count from fb
+    attach_desc.format = VK_FORMAT_B8G8R8A8_UNORM;
+    attach_desc.samples = VK_SAMPLE_COUNT_4_BIT;
+    err = vk::CreateRenderPass(m_device->device(), &rpci, NULL, &rp);
+    ASSERT_VK_SUCCESS(err);
+
+    // Cause error due to mis-matched sample count between rp & fb
+    fb_info.renderPass = rp;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkFramebufferCreateInfo-pAttachments-00881");
+    err = vk::CreateFramebuffer(device(), &fb_info, NULL, &fb);
+
+    m_errorMonitor->VerifyFound();
+    if (err == VK_SUCCESS) {
+        vk::DestroyFramebuffer(m_device->device(), fb, NULL);
+    }
+
+    vk::DestroyRenderPass(m_device->device(), rp, NULL);
+
+    {
+        // Create an image with 2 mip levels.
+        VkImageObj image(m_device);
+        image.Init(128, 128, 2, VK_FORMAT_B8G8R8A8_UNORM, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
+        ASSERT_TRUE(image.initialized());
+
+        // Create a image view with two mip levels.
+        VkImageView view;
+        VkImageViewCreateInfo ivci = {};
+        ivci.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
+        ivci.image = image.handle();
+        ivci.viewType = VK_IMAGE_VIEW_TYPE_2D;
+        ivci.format = VK_FORMAT_B8G8R8A8_UNORM;
+        ivci.subresourceRange.layerCount = 1;
+        ivci.subresourceRange.baseMipLevel = 0;
+        // Set level count to 2 (only 1 is allowed for FB attachment)
+        ivci.subresourceRange.levelCount = 2;
+        ivci.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+        err = vk::CreateImageView(m_device->device(), &ivci, NULL, &view);
+        ASSERT_VK_SUCCESS(err);
+
+        // Re-create renderpass to have matching sample count
+        attach_desc.samples = VK_SAMPLE_COUNT_1_BIT;
+        err = vk::CreateRenderPass(m_device->device(), &rpci, NULL, &rp);
+        ASSERT_VK_SUCCESS(err);
+
+        fb_info.renderPass = rp;
+        fb_info.pAttachments = &view;
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkFramebufferCreateInfo-pAttachments-00883");
+        err = vk::CreateFramebuffer(device(), &fb_info, NULL, &fb);
+
+        m_errorMonitor->VerifyFound();
+        if (err == VK_SUCCESS) {
+            vk::DestroyFramebuffer(m_device->device(), fb, NULL);
+        }
+        vk::DestroyImageView(m_device->device(), view, NULL);
+    }
+
+    // Update view to original color buffer and grow FB dimensions too big
+    fb_info.pAttachments = ivs;
+    fb_info.height = 1024;
+    fb_info.width = 1024;
+    fb_info.layers = 2;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkFramebufferCreateInfo-pAttachments-00882");
+    err = vk::CreateFramebuffer(device(), &fb_info, NULL, &fb);
+
+    m_errorMonitor->VerifyFound();
+    if (err == VK_SUCCESS) {
+        vk::DestroyFramebuffer(m_device->device(), fb, NULL);
+    }
+
+    {
+        if (!push_fragment_density_support) {
+            printf("%s VK_EXT_fragment_density_map Extension not supported, skipping tests\n", kSkipPrefix);
+        } else {
+            uint32_t attachment_width = 512;
+            uint32_t attachment_height = 512;
+            VkFormat attachment_format = VK_FORMAT_R8G8_UNORM;
+            uint32_t frame_width = 512;
+            uint32_t frame_height = 512;
+
+            // Create a renderPass with a single color attachment for fragment density map
+            VkAttachmentReference attach_fragment_density_map = {};
+            attach_fragment_density_map.layout = VK_IMAGE_LAYOUT_GENERAL;
+            VkSubpassDescription subpass_fragment_density_map = {};
+            subpass_fragment_density_map.pColorAttachments = &attach_fragment_density_map;
+            VkRenderPassCreateInfo rpci_fragment_density_map = {};
+            rpci_fragment_density_map.subpassCount = 1;
+            rpci_fragment_density_map.pSubpasses = &subpass_fragment_density_map;
+            rpci_fragment_density_map.attachmentCount = 1;
+            VkAttachmentDescription attach_desc_fragment_density_map = {};
+            attach_desc_fragment_density_map.format = attachment_format;
+            attach_desc_fragment_density_map.samples = VK_SAMPLE_COUNT_1_BIT;
+            attach_desc_fragment_density_map.finalLayout = VK_IMAGE_LAYOUT_FRAGMENT_DENSITY_MAP_OPTIMAL_EXT;
+            rpci_fragment_density_map.pAttachments = &attach_desc_fragment_density_map;
+            rpci_fragment_density_map.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
+            VkRenderPass rp_fragment_density_map;
+
+            err = vk::CreateRenderPass(m_device->device(), &rpci_fragment_density_map, NULL, &rp_fragment_density_map);
+            ASSERT_VK_SUCCESS(err);
+
+            // Create view attachment
+            VkImageView view_fragment_density_map;
+            VkImageViewCreateInfo ivci = {};
+            ivci.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
+            ivci.viewType = VK_IMAGE_VIEW_TYPE_2D;
+            ivci.format = attachment_format;
+            ivci.flags = 0;
+            ivci.subresourceRange.layerCount = 1;
+            ivci.subresourceRange.baseMipLevel = 0;
+            ivci.subresourceRange.levelCount = 1;
+            ivci.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+
+            VkFramebufferAttachmentImageInfoKHR fb_fdm = {};
+            fb_fdm.sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO_KHR;
+            fb_fdm.usage = VK_IMAGE_USAGE_FRAGMENT_DENSITY_MAP_BIT_EXT;
+            fb_fdm.width = frame_width;
+            fb_fdm.height = frame_height;
+            fb_fdm.layerCount = 1;
+            fb_fdm.viewFormatCount = 1;
+            fb_fdm.pViewFormats = &attachment_format;
+            VkFramebufferAttachmentsCreateInfoKHR fb_aci_fdm = {};
+            fb_aci_fdm.sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO_KHR;
+            fb_aci_fdm.attachmentImageInfoCount = 1;
+            fb_aci_fdm.pAttachmentImageInfos = &fb_fdm;
+
+            VkFramebufferCreateInfo fbci = {};
+            fbci.sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO;
+            fbci.pNext = &fb_aci_fdm;
+            fbci.flags = 0;
+            fbci.width = frame_width;
+            fbci.height = frame_height;
+            fbci.layers = 1;
+            fbci.renderPass = rp_fragment_density_map;
+            fbci.attachmentCount = 1;
+            fbci.pAttachments = &view_fragment_density_map;
+
+            // Set small width
+            VkImageObj image2(m_device);
+            image2.Init(16, attachment_height, 1, attachment_format, VK_IMAGE_USAGE_FRAGMENT_DENSITY_MAP_BIT_EXT,
+                        VK_IMAGE_TILING_LINEAR, 0);
+            ASSERT_TRUE(image2.initialized());
+
+            ivci.image = image2.handle();
+            err = vk::CreateImageView(m_device->device(), &ivci, NULL, &view_fragment_density_map);
+            ASSERT_VK_SUCCESS(err);
+
+            fbci.pAttachments = &view_fragment_density_map;
+
+            m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkFramebufferCreateInfo-pAttachments-02555");
+            err = vk::CreateFramebuffer(device(), &fbci, NULL, &fb);
+
+            m_errorMonitor->VerifyFound();
+            if (err == VK_SUCCESS) {
+                vk::DestroyFramebuffer(m_device->device(), fb, NULL);
+            }
+
+            vk::DestroyImageView(m_device->device(), view_fragment_density_map, NULL);
+
+            // Set small height
+            VkImageObj image3(m_device);
+            image3.Init(attachment_width, 16, 1, attachment_format, VK_IMAGE_USAGE_FRAGMENT_DENSITY_MAP_BIT_EXT,
+                        VK_IMAGE_TILING_LINEAR, 0);
+            ASSERT_TRUE(image3.initialized());
+
+            ivci.image = image3.handle();
+            err = vk::CreateImageView(m_device->device(), &ivci, NULL, &view_fragment_density_map);
+            ASSERT_VK_SUCCESS(err);
+
+            fbci.pAttachments = &view_fragment_density_map;
+
+            m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkFramebufferCreateInfo-pAttachments-02556");
+            err = vk::CreateFramebuffer(device(), &fbci, NULL, &fb);
+
+            m_errorMonitor->VerifyFound();
+            if (err == VK_SUCCESS) {
+                vk::DestroyFramebuffer(m_device->device(), fb, NULL);
+            }
+
+            vk::DestroyImageView(m_device->device(), view_fragment_density_map, NULL);
+
+            vk::DestroyRenderPass(m_device->device(), rp_fragment_density_map, NULL);
+        }
+    }
+
+    {
+        // Create an image with one mip level.
+        VkImageObj image(m_device);
+        image.Init(128, 128, 1, VK_FORMAT_B8G8R8A8_UNORM, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
+        ASSERT_TRUE(image.initialized());
+
+        // Create view attachment with non-identity swizzle
+        VkImageView view;
+        VkImageViewCreateInfo ivci = {};
+        ivci.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
+        ivci.image = image.handle();
+        ivci.viewType = VK_IMAGE_VIEW_TYPE_2D;
+        ivci.format = VK_FORMAT_B8G8R8A8_UNORM;
+        ivci.subresourceRange.layerCount = 1;
+        ivci.subresourceRange.baseMipLevel = 0;
+        ivci.subresourceRange.levelCount = 1;
+        ivci.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+        ivci.components.r = VK_COMPONENT_SWIZZLE_G;
+        ivci.components.g = VK_COMPONENT_SWIZZLE_R;
+        ivci.components.b = VK_COMPONENT_SWIZZLE_A;
+        ivci.components.a = VK_COMPONENT_SWIZZLE_B;
+        err = vk::CreateImageView(m_device->device(), &ivci, NULL, &view);
+        ASSERT_VK_SUCCESS(err);
+
+        fb_info.pAttachments = &view;
+        fb_info.height = 100;
+        fb_info.width = 100;
+        fb_info.layers = 1;
+
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkFramebufferCreateInfo-pAttachments-00884");
+        err = vk::CreateFramebuffer(device(), &fb_info, NULL, &fb);
+
+        m_errorMonitor->VerifyFound();
+        if (err == VK_SUCCESS) {
+            vk::DestroyFramebuffer(m_device->device(), fb, NULL);
+        }
+        vk::DestroyImageView(m_device->device(), view, NULL);
+    }
+
+    // reset attachment to color attachment
+    fb_info.pAttachments = ivs;
+
+    // Request fb that exceeds max width
+    fb_info.width = m_device->props.limits.maxFramebufferWidth + 1;
+    fb_info.height = 100;
+    fb_info.layers = 1;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkFramebufferCreateInfo-width-00886");
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkFramebufferCreateInfo-pAttachments-00882");
+    err = vk::CreateFramebuffer(device(), &fb_info, NULL, &fb);
+    m_errorMonitor->VerifyFound();
+    if (err == VK_SUCCESS) {
+        vk::DestroyFramebuffer(m_device->device(), fb, NULL);
+    }
+    // and width=0
+    fb_info.width = 0;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkFramebufferCreateInfo-width-00885");
+    err = vk::CreateFramebuffer(device(), &fb_info, NULL, &fb);
+    m_errorMonitor->VerifyFound();
+    if (err == VK_SUCCESS) {
+        vk::DestroyFramebuffer(m_device->device(), fb, NULL);
+    }
+
+    // Request fb that exceeds max height
+    fb_info.width = 100;
+    fb_info.height = m_device->props.limits.maxFramebufferHeight + 1;
+    fb_info.layers = 1;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkFramebufferCreateInfo-height-00888");
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkFramebufferCreateInfo-pAttachments-00882");
+    err = vk::CreateFramebuffer(device(), &fb_info, NULL, &fb);
+    m_errorMonitor->VerifyFound();
+    if (err == VK_SUCCESS) {
+        vk::DestroyFramebuffer(m_device->device(), fb, NULL);
+    }
+    // and height=0
+    fb_info.height = 0;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkFramebufferCreateInfo-height-00887");
+    err = vk::CreateFramebuffer(device(), &fb_info, NULL, &fb);
+    m_errorMonitor->VerifyFound();
+    if (err == VK_SUCCESS) {
+        vk::DestroyFramebuffer(m_device->device(), fb, NULL);
+    }
+
+    // Request fb that exceeds max layers
+    fb_info.width = 100;
+    fb_info.height = 100;
+    fb_info.layers = m_device->props.limits.maxFramebufferLayers + 1;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkFramebufferCreateInfo-layers-00890");
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkFramebufferCreateInfo-pAttachments-00882");
+    err = vk::CreateFramebuffer(device(), &fb_info, NULL, &fb);
+    m_errorMonitor->VerifyFound();
+    if (err == VK_SUCCESS) {
+        vk::DestroyFramebuffer(m_device->device(), fb, NULL);
+    }
+    // and layers=0
+    fb_info.layers = 0;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkFramebufferCreateInfo-layers-00889");
+    err = vk::CreateFramebuffer(device(), &fb_info, NULL, &fb);
+    m_errorMonitor->VerifyFound();
+    if (err == VK_SUCCESS) {
+        vk::DestroyFramebuffer(m_device->device(), fb, NULL);
+    }
+
+    // Try to create with pAttachments = NULL
+    fb_info.layers = 1;
+    fb_info.pAttachments = NULL;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID_Undefined");
+    err = vk::CreateFramebuffer(device(), &fb_info, NULL, &fb);
+    m_errorMonitor->VerifyFound();
+    if (err == VK_SUCCESS) {
+        vk::DestroyFramebuffer(m_device->device(), fb, NULL);
+    }
+
+    vk::DestroyRenderPass(m_device->device(), rp, NULL);
+}
+
+TEST_F(VkLayerTest, AllocDescriptorFromEmptyPool) {
+    TEST_DESCRIPTION("Attempt to allocate more sets and descriptors than descriptor pool has available.");
+    VkResult err;
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    // This test is valid for Vulkan 1.0 only -- skip if device has an API version greater than 1.0.
+    if (m_device->props.apiVersion >= VK_API_VERSION_1_1) {
+        printf("%s Device has apiVersion greater than 1.0 -- skipping Descriptor Set checks.\n", kSkipPrefix);
+        return;
+    }
+
+    // Create Pool w/ 1 Sampler descriptor, but try to alloc Uniform Buffer
+    // descriptor from it
+    VkDescriptorPoolSize ds_type_count = {};
+    ds_type_count.type = VK_DESCRIPTOR_TYPE_SAMPLER;
+    ds_type_count.descriptorCount = 2;
+
+    VkDescriptorPoolCreateInfo ds_pool_ci = {};
+    ds_pool_ci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
+    ds_pool_ci.pNext = NULL;
+    ds_pool_ci.flags = 0;
+    ds_pool_ci.maxSets = 1;
+    ds_pool_ci.poolSizeCount = 1;
+    ds_pool_ci.pPoolSizes = &ds_type_count;
+
+    VkDescriptorPool ds_pool;
+    err = vk::CreateDescriptorPool(m_device->device(), &ds_pool_ci, NULL, &ds_pool);
+    ASSERT_VK_SUCCESS(err);
+
+    VkDescriptorSetLayoutBinding dsl_binding_samp = {};
+    dsl_binding_samp.binding = 0;
+    dsl_binding_samp.descriptorType = VK_DESCRIPTOR_TYPE_SAMPLER;
+    dsl_binding_samp.descriptorCount = 1;
+    dsl_binding_samp.stageFlags = VK_SHADER_STAGE_ALL;
+    dsl_binding_samp.pImmutableSamplers = NULL;
+
+    const VkDescriptorSetLayoutObj ds_layout_samp(m_device, {dsl_binding_samp});
+
+    // Try to allocate 2 sets when pool only has 1 set
+    VkDescriptorSet descriptor_sets[2];
+    VkDescriptorSetLayout set_layouts[2] = {ds_layout_samp.handle(), ds_layout_samp.handle()};
+    VkDescriptorSetAllocateInfo alloc_info = {};
+    alloc_info.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
+    alloc_info.descriptorSetCount = 2;
+    alloc_info.descriptorPool = ds_pool;
+    alloc_info.pSetLayouts = set_layouts;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "VUID-VkDescriptorSetAllocateInfo-descriptorSetCount-00306");
+    err = vk::AllocateDescriptorSets(m_device->device(), &alloc_info, descriptor_sets);
+    m_errorMonitor->VerifyFound();
+
+    alloc_info.descriptorSetCount = 1;
+    // Create layout w/ descriptor type not available in pool
+    VkDescriptorSetLayoutBinding dsl_binding = {};
+    dsl_binding.binding = 0;
+    dsl_binding.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
+    dsl_binding.descriptorCount = 1;
+    dsl_binding.stageFlags = VK_SHADER_STAGE_ALL;
+    dsl_binding.pImmutableSamplers = NULL;
+
+    const VkDescriptorSetLayoutObj ds_layout_ub(m_device, {dsl_binding});
+
+    VkDescriptorSet descriptor_set;
+    alloc_info.descriptorSetCount = 1;
+    alloc_info.pSetLayouts = &ds_layout_ub.handle();
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkDescriptorSetAllocateInfo-descriptorPool-00307");
+    err = vk::AllocateDescriptorSets(m_device->device(), &alloc_info, &descriptor_set);
+
+    m_errorMonitor->VerifyFound();
+
+    vk::DestroyDescriptorPool(m_device->device(), ds_pool, NULL);
+}
+
+TEST_F(VkLayerTest, FreeDescriptorFromOneShotPool) {
+    VkResult err;
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkFreeDescriptorSets-descriptorPool-00312");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    VkDescriptorPoolSize ds_type_count = {};
+    ds_type_count.type = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
+    ds_type_count.descriptorCount = 1;
+
+    VkDescriptorPoolCreateInfo ds_pool_ci = {};
+    ds_pool_ci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
+    ds_pool_ci.pNext = NULL;
+    ds_pool_ci.maxSets = 1;
+    ds_pool_ci.poolSizeCount = 1;
+    ds_pool_ci.flags = 0;
+    // Not specifying VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT means
+    // app can only call vk::ResetDescriptorPool on this pool.;
+    ds_pool_ci.pPoolSizes = &ds_type_count;
+
+    VkDescriptorPool ds_pool;
+    err = vk::CreateDescriptorPool(m_device->device(), &ds_pool_ci, NULL, &ds_pool);
+    ASSERT_VK_SUCCESS(err);
+
+    VkDescriptorSetLayoutBinding dsl_binding = {};
+    dsl_binding.binding = 0;
+    dsl_binding.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
+    dsl_binding.descriptorCount = 1;
+    dsl_binding.stageFlags = VK_SHADER_STAGE_ALL;
+    dsl_binding.pImmutableSamplers = NULL;
+
+    const VkDescriptorSetLayoutObj ds_layout(m_device, {dsl_binding});
+
+    VkDescriptorSet descriptorSet;
+    VkDescriptorSetAllocateInfo alloc_info = {};
+    alloc_info.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
+    alloc_info.descriptorSetCount = 1;
+    alloc_info.descriptorPool = ds_pool;
+    alloc_info.pSetLayouts = &ds_layout.handle();
+    err = vk::AllocateDescriptorSets(m_device->device(), &alloc_info, &descriptorSet);
+    ASSERT_VK_SUCCESS(err);
+
+    err = vk::FreeDescriptorSets(m_device->device(), ds_pool, 1, &descriptorSet);
+    m_errorMonitor->VerifyFound();
+
+    vk::DestroyDescriptorPool(m_device->device(), ds_pool, NULL);
+}
+
+TEST_F(VkLayerTest, InvalidDescriptorPool) {
+    // Attempt to clear Descriptor Pool with bad object.
+    // ObjectTracker should catch this.
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkResetDescriptorPool-descriptorPool-parameter");
+    uint64_t fake_pool_handle = 0xbaad6001;
+    VkDescriptorPool bad_pool = reinterpret_cast<VkDescriptorPool &>(fake_pool_handle);
+    vk::ResetDescriptorPool(device(), bad_pool, 0);
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, InvalidDescriptorSet) {
+    // Attempt to bind an invalid Descriptor Set to a valid Command Buffer
+    // ObjectTracker should catch this.
+    // Create a valid cmd buffer
+    // call vk::CmdBindDescriptorSets w/ false Descriptor Set
+
+    uint64_t fake_set_handle = 0xbaad6001;
+    VkDescriptorSet bad_set = reinterpret_cast<VkDescriptorSet &>(fake_set_handle);
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBindDescriptorSets-pDescriptorSets-parameter");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    VkDescriptorSetLayoutBinding layout_binding = {};
+    layout_binding.binding = 0;
+    layout_binding.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
+    layout_binding.descriptorCount = 1;
+    layout_binding.stageFlags = VK_SHADER_STAGE_VERTEX_BIT;
+    layout_binding.pImmutableSamplers = NULL;
+
+    const VkDescriptorSetLayoutObj descriptor_set_layout(m_device, {layout_binding});
+
+    const VkPipelineLayoutObj pipeline_layout(DeviceObj(), {&descriptor_set_layout});
+
+    m_commandBuffer->begin();
+    vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_layout.handle(), 0, 1, &bad_set,
+                              0, NULL);
+    m_errorMonitor->VerifyFound();
+    m_commandBuffer->end();
+}
+
+TEST_F(VkLayerTest, InvalidDescriptorSetLayout) {
+    // Attempt to create a Pipeline Layout with an invalid Descriptor Set Layout.
+    // ObjectTracker should catch this.
+    uint64_t fake_layout_handle = 0xbaad6001;
+    VkDescriptorSetLayout bad_layout = reinterpret_cast<VkDescriptorSetLayout &>(fake_layout_handle);
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-parameter");
+    ASSERT_NO_FATAL_FAILURE(Init());
+    VkPipelineLayout pipeline_layout;
+    VkPipelineLayoutCreateInfo plci = {};
+    plci.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO;
+    plci.pNext = NULL;
+    plci.setLayoutCount = 1;
+    plci.pSetLayouts = &bad_layout;
+    vk::CreatePipelineLayout(device(), &plci, NULL, &pipeline_layout);
+
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, WriteDescriptorSetIntegrityCheck) {
+    TEST_DESCRIPTION(
+        "This test verifies some requirements of chapter 13.2.3 of the Vulkan Spec "
+        "1) A uniform buffer update must have a valid buffer index. "
+        "2) When using an array of descriptors in a single WriteDescriptor, the descriptor types and stageflags "
+        "must all be the same. "
+        "3) Immutable Sampler state must match across descriptors. "
+        "4) That sampled image descriptors have required layouts. ");
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkWriteDescriptorSet-descriptorType-00324");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    VkSamplerCreateInfo sampler_ci = SafeSaneSamplerCreateInfo();
+    VkSampler sampler;
+    VkResult err = vk::CreateSampler(m_device->device(), &sampler_ci, NULL, &sampler);
+    ASSERT_VK_SUCCESS(err);
+
+    OneOffDescriptorSet::Bindings bindings = {
+        {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_ALL, NULL},
+        {1, VK_DESCRIPTOR_TYPE_SAMPLER, 1, VK_SHADER_STAGE_FRAGMENT_BIT, NULL},
+        {2, VK_DESCRIPTOR_TYPE_SAMPLER, 1, VK_SHADER_STAGE_FRAGMENT_BIT, static_cast<VkSampler *>(&sampler)},
+        {3, VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, 1, VK_SHADER_STAGE_FRAGMENT_BIT, NULL}};
+    OneOffDescriptorSet descriptor_set(m_device, bindings);
+    ASSERT_TRUE(descriptor_set.Initialized());
+
+    VkWriteDescriptorSet descriptor_write = {};
+    descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
+    descriptor_write.dstSet = descriptor_set.set_;
+    descriptor_write.dstBinding = 0;
+    descriptor_write.descriptorCount = 1;
+    descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
+
+    // 1) The uniform buffer is intentionally invalid here
+    vk::UpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
+    m_errorMonitor->VerifyFound();
+
+    // Create a buffer to update the descriptor with
+    uint32_t qfi = 0;
+    VkBufferCreateInfo buffCI = {};
+    buffCI.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
+    buffCI.size = 1024;
+    buffCI.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT;
+    buffCI.queueFamilyIndexCount = 1;
+    buffCI.pQueueFamilyIndices = &qfi;
+
+    VkBufferObj dynamic_uniform_buffer;
+    dynamic_uniform_buffer.init(*m_device, buffCI);
+
+    VkDescriptorBufferInfo buffInfo[2] = {};
+    buffInfo[0].buffer = dynamic_uniform_buffer.handle();
+    buffInfo[0].offset = 0;
+    buffInfo[0].range = 1024;
+    buffInfo[1].buffer = dynamic_uniform_buffer.handle();
+    buffInfo[1].offset = 0;
+    buffInfo[1].range = 1024;
+    descriptor_write.pBufferInfo = buffInfo;
+    descriptor_write.descriptorCount = 2;
+
+    // 2) The stateFlags don't match between the first and second descriptor
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkWriteDescriptorSet-dstArrayElement-00321");
+    vk::UpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
+    m_errorMonitor->VerifyFound();
+
+    // 3) The second descriptor has a null_ptr pImmutableSamplers and
+    // the third descriptor contains an immutable sampler
+    descriptor_write.dstBinding = 1;
+    descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_SAMPLER;
+
+    // Make pImageInfo index non-null to avoid complaints of it missing
+    VkDescriptorImageInfo imageInfo = {};
+    imageInfo.imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
+    descriptor_write.pImageInfo = &imageInfo;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkWriteDescriptorSet-dstArrayElement-00321");
+    vk::UpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
+    m_errorMonitor->VerifyFound();
+
+    // 4) That sampled image descriptors have required layouts
+    // Create images to update the descriptor with
+    VkImageObj image(m_device);
+    const VkFormat tex_format = VK_FORMAT_B8G8R8A8_UNORM;
+    image.Init(32, 32, 1, tex_format, VK_IMAGE_USAGE_SAMPLED_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
+    ASSERT_TRUE(image.initialized());
+
+    // Attmept write with incorrect layout for sampled descriptor
+    imageInfo.sampler = VK_NULL_HANDLE;
+    imageInfo.imageView = image.targetView(tex_format);
+    imageInfo.imageLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+
+    descriptor_write.dstBinding = 3;
+    descriptor_write.descriptorCount = 1;
+    descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkWriteDescriptorSet-descriptorType-01403");
+    vk::UpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
+    m_errorMonitor->VerifyFound();
+
+    vk::DestroySampler(m_device->device(), sampler, NULL);
+}
+
+TEST_F(VkLayerTest, WriteDescriptorSetConsecutiveUpdates) {
+    TEST_DESCRIPTION(
+        "Verifies that updates rolling over to next descriptor work correctly by destroying buffer from consecutive update known "
+        "to be used in descriptor set and verifying that error is flagged.");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitViewport());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    OneOffDescriptorSet descriptor_set(m_device, {
+                                                     {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 2, VK_SHADER_STAGE_ALL, nullptr},
+                                                     {1, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
+                                                 });
+
+    uint32_t qfi = 0;
+    VkBufferCreateInfo bci = {};
+    bci.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
+    bci.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT;
+    bci.size = 2048;
+    bci.queueFamilyIndexCount = 1;
+    bci.pQueueFamilyIndices = &qfi;
+    VkBufferObj buffer0;
+    buffer0.init(*m_device, bci);
+    CreatePipelineHelper pipe(*this);
+    {  // Scope 2nd buffer to cause early destruction
+        VkBufferObj buffer1;
+        bci.size = 1024;
+        buffer1.init(*m_device, bci);
+
+        VkDescriptorBufferInfo buffer_info[3] = {};
+        buffer_info[0].buffer = buffer0.handle();
+        buffer_info[0].offset = 0;
+        buffer_info[0].range = 1024;
+        buffer_info[1].buffer = buffer0.handle();
+        buffer_info[1].offset = 1024;
+        buffer_info[1].range = 1024;
+        buffer_info[2].buffer = buffer1.handle();
+        buffer_info[2].offset = 0;
+        buffer_info[2].range = 1024;
+
+        VkWriteDescriptorSet descriptor_write = {};
+        descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
+        descriptor_write.dstSet = descriptor_set.set_;  // descriptor_set;
+        descriptor_write.dstBinding = 0;
+        descriptor_write.descriptorCount = 3;
+        descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
+        descriptor_write.pBufferInfo = buffer_info;
+
+        // Update descriptor
+        vk::UpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
+
+        // Create PSO that uses the uniform buffers
+        char const *fsSource =
+            "#version 450\n"
+            "\n"
+            "layout(location=0) out vec4 x;\n"
+            "layout(set=0) layout(binding=0) uniform foo { int x; int y; } bar;\n"
+            "layout(set=0) layout(binding=1) uniform blah { int x; } duh;\n"
+            "void main(){\n"
+            "   x = vec4(duh.x, bar.y, bar.x, 1);\n"
+            "}\n";
+        VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+        pipe.InitInfo();
+        pipe.shader_stages_ = {pipe.vs_->GetStageCreateInfo(), fs.GetStageCreateInfo()};
+        const VkDynamicState dyn_states[] = {VK_DYNAMIC_STATE_VIEWPORT, VK_DYNAMIC_STATE_SCISSOR};
+        VkPipelineDynamicStateCreateInfo dyn_state_ci = {};
+        dyn_state_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO;
+        dyn_state_ci.dynamicStateCount = size(dyn_states);
+        dyn_state_ci.pDynamicStates = dyn_states;
+        pipe.dyn_state_ci_ = dyn_state_ci;
+        pipe.InitState();
+        pipe.pipeline_layout_ = VkPipelineLayoutObj(m_device, {&descriptor_set.layout_});
+        pipe.CreateGraphicsPipeline();
+
+        m_commandBuffer->begin();
+        m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
+
+        vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.pipeline_);
+        vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.pipeline_layout_.handle(), 0, 1,
+                                  &descriptor_set.set_, 0, nullptr);
+
+        VkViewport viewport = {0, 0, 16, 16, 0, 1};
+        vk::CmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
+        VkRect2D scissor = {{0, 0}, {16, 16}};
+        vk::CmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
+        vk::CmdDraw(m_commandBuffer->handle(), 3, 1, 0, 0);
+        vk::CmdEndRenderPass(m_commandBuffer->handle());
+        m_commandBuffer->end();
+    }
+    // buffer2 just went out of scope and was destroyed along with its memory
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "UNASSIGNED-CoreValidation-DrawState-InvalidCommandBuffer-VkBuffer");
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "UNASSIGNED-CoreValidation-DrawState-InvalidCommandBuffer-VkDeviceMemory");
+
+    VkSubmitInfo submit_info = {};
+    submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+    submit_info.commandBufferCount = 1;
+    submit_info.pCommandBuffers = &m_commandBuffer->handle();
+    vk::QueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, InvalidCmdBufferDescriptorSetBufferDestroyed) {
+    TEST_DESCRIPTION(
+        "Attempt to draw with a command buffer that is invalid due to a bound descriptor set with a buffer dependency being "
+        "destroyed.");
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitViewport());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    CreatePipelineHelper pipe(*this);
+    {
+        // Create a buffer to update the descriptor with
+        uint32_t qfi = 0;
+        VkBufferCreateInfo buffCI = {};
+        buffCI.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
+        buffCI.size = 1024;
+        buffCI.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT;
+        buffCI.queueFamilyIndexCount = 1;
+        buffCI.pQueueFamilyIndices = &qfi;
+
+        VkBufferObj buffer;
+        buffer.init(*m_device, buffCI);
+
+        // Create PSO to be used for draw-time errors below
+        char const *fsSource =
+            "#version 450\n"
+            "\n"
+            "layout(location=0) out vec4 x;\n"
+            "layout(set=0) layout(binding=0) uniform foo { int x; int y; } bar;\n"
+            "void main(){\n"
+            "   x = vec4(bar.y);\n"
+            "}\n";
+        VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+        pipe.InitInfo();
+        pipe.shader_stages_ = {pipe.vs_->GetStageCreateInfo(), fs.GetStageCreateInfo()};
+        const VkDynamicState dyn_states[] = {VK_DYNAMIC_STATE_VIEWPORT, VK_DYNAMIC_STATE_SCISSOR};
+        VkPipelineDynamicStateCreateInfo dyn_state_ci = {};
+        dyn_state_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO;
+        dyn_state_ci.dynamicStateCount = size(dyn_states);
+        dyn_state_ci.pDynamicStates = dyn_states;
+        pipe.dyn_state_ci_ = dyn_state_ci;
+        pipe.InitState();
+        pipe.CreateGraphicsPipeline();
+
+        // Correctly update descriptor to avoid "NOT_UPDATED" error
+        pipe.descriptor_set_->WriteDescriptorBufferInfo(0, buffer.handle(), 1024);
+        pipe.descriptor_set_->UpdateDescriptorSets();
+
+        m_commandBuffer->begin();
+        m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
+        vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.pipeline_);
+        vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.pipeline_layout_.handle(), 0, 1,
+                                  &pipe.descriptor_set_->set_, 0, NULL);
+
+        vk::CmdSetViewport(m_commandBuffer->handle(), 0, 1, &m_viewports[0]);
+        vk::CmdSetScissor(m_commandBuffer->handle(), 0, 1, &m_scissors[0]);
+
+        m_commandBuffer->Draw(1, 0, 0, 0);
+        m_commandBuffer->EndRenderPass();
+        m_commandBuffer->end();
+    }
+    // Destroy buffer should invalidate the cmd buffer, causing error on submit
+
+    // Attempt to submit cmd buffer
+    VkSubmitInfo submit_info = {};
+    submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+    submit_info.commandBufferCount = 1;
+    submit_info.pCommandBuffers = &m_commandBuffer->handle();
+    // Invalid VkBuffe
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "UNASSIGNED-CoreValidation-DrawState-InvalidCommandBuffe");
+    // Invalid VkDeviceMemory
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, " that is invalid because bound ");
+    vk::QueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, InvalidCmdBufferDescriptorSetImageSamplerDestroyed) {
+    TEST_DESCRIPTION(
+        "Attempt to draw with a command buffer that is invalid due to a bound descriptor sets with a combined image sampler having "
+        "their image, sampler, and descriptor set each respectively destroyed and then attempting to submit associated cmd "
+        "buffers. Attempt to destroy a DescriptorSet that is in use.");
+    ASSERT_NO_FATAL_FAILURE(Init(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
+    ASSERT_NO_FATAL_FAILURE(InitViewport());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    VkDescriptorPoolSize ds_type_count = {};
+    ds_type_count.type = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
+    ds_type_count.descriptorCount = 1;
+
+    VkDescriptorPoolCreateInfo ds_pool_ci = {};
+    ds_pool_ci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
+    ds_pool_ci.pNext = NULL;
+    ds_pool_ci.flags = VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT;
+    ds_pool_ci.maxSets = 1;
+    ds_pool_ci.poolSizeCount = 1;
+    ds_pool_ci.pPoolSizes = &ds_type_count;
+
+    VkDescriptorPool ds_pool;
+    VkResult err = vk::CreateDescriptorPool(m_device->device(), &ds_pool_ci, NULL, &ds_pool);
+    ASSERT_VK_SUCCESS(err);
+
+    VkDescriptorSetLayoutBinding dsl_binding = {};
+    dsl_binding.binding = 0;
+    dsl_binding.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
+    dsl_binding.descriptorCount = 1;
+    dsl_binding.stageFlags = VK_SHADER_STAGE_ALL;
+    dsl_binding.pImmutableSamplers = NULL;
+
+    const VkDescriptorSetLayoutObj ds_layout(m_device, {dsl_binding});
+
+    VkDescriptorSet descriptorSet;
+    VkDescriptorSetAllocateInfo alloc_info = {};
+    alloc_info.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
+    alloc_info.descriptorSetCount = 1;
+    alloc_info.descriptorPool = ds_pool;
+    alloc_info.pSetLayouts = &ds_layout.handle();
+    err = vk::AllocateDescriptorSets(m_device->device(), &alloc_info, &descriptorSet);
+    ASSERT_VK_SUCCESS(err);
+
+    const VkPipelineLayoutObj pipeline_layout(m_device, {&ds_layout});
+
+    // Create images to update the descriptor with
+    VkImage image;
+    VkImage image2;
+    const VkFormat tex_format = VK_FORMAT_B8G8R8A8_UNORM;
+    const int32_t tex_width = 32;
+    const int32_t tex_height = 32;
+    VkImageCreateInfo image_create_info = {};
+    image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+    image_create_info.pNext = NULL;
+    image_create_info.imageType = VK_IMAGE_TYPE_2D;
+    image_create_info.format = tex_format;
+    image_create_info.extent.width = tex_width;
+    image_create_info.extent.height = tex_height;
+    image_create_info.extent.depth = 1;
+    image_create_info.mipLevels = 1;
+    image_create_info.arrayLayers = 1;
+    image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
+    image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
+    image_create_info.usage = VK_IMAGE_USAGE_SAMPLED_BIT;
+    image_create_info.flags = 0;
+    err = vk::CreateImage(m_device->device(), &image_create_info, NULL, &image);
+    ASSERT_VK_SUCCESS(err);
+    err = vk::CreateImage(m_device->device(), &image_create_info, NULL, &image2);
+    ASSERT_VK_SUCCESS(err);
+
+    VkMemoryRequirements memory_reqs;
+    VkDeviceMemory image_memory;
+    bool pass;
+    VkMemoryAllocateInfo memory_info = {};
+    memory_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+    memory_info.pNext = NULL;
+    memory_info.allocationSize = 0;
+    memory_info.memoryTypeIndex = 0;
+    vk::GetImageMemoryRequirements(m_device->device(), image, &memory_reqs);
+    // Allocate enough memory for both images
+    VkDeviceSize align_mod = memory_reqs.size % memory_reqs.alignment;
+    VkDeviceSize aligned_size = ((align_mod == 0) ? memory_reqs.size : (memory_reqs.size + memory_reqs.alignment - align_mod));
+    memory_info.allocationSize = aligned_size * 2;
+    pass = m_device->phy().set_memory_type(memory_reqs.memoryTypeBits, &memory_info, 0);
+    ASSERT_TRUE(pass);
+    err = vk::AllocateMemory(m_device->device(), &memory_info, NULL, &image_memory);
+    ASSERT_VK_SUCCESS(err);
+    err = vk::BindImageMemory(m_device->device(), image, image_memory, 0);
+    ASSERT_VK_SUCCESS(err);
+    // Bind second image to memory right after first image
+    err = vk::BindImageMemory(m_device->device(), image2, image_memory, aligned_size);
+    ASSERT_VK_SUCCESS(err);
+
+    VkImageViewCreateInfo image_view_create_info = {};
+    image_view_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
+    image_view_create_info.image = image;
+    image_view_create_info.viewType = VK_IMAGE_VIEW_TYPE_2D;
+    image_view_create_info.format = tex_format;
+    image_view_create_info.subresourceRange.layerCount = 1;
+    image_view_create_info.subresourceRange.baseMipLevel = 0;
+    image_view_create_info.subresourceRange.levelCount = 1;
+    image_view_create_info.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+
+    VkImageView tmp_view;  // First test deletes this view
+    VkImageView view;
+    VkImageView view2;
+    err = vk::CreateImageView(m_device->device(), &image_view_create_info, NULL, &tmp_view);
+    ASSERT_VK_SUCCESS(err);
+    err = vk::CreateImageView(m_device->device(), &image_view_create_info, NULL, &view);
+    ASSERT_VK_SUCCESS(err);
+    image_view_create_info.image = image2;
+    err = vk::CreateImageView(m_device->device(), &image_view_create_info, NULL, &view2);
+    ASSERT_VK_SUCCESS(err);
+    // Create Samplers
+    VkSamplerCreateInfo sampler_ci = SafeSaneSamplerCreateInfo();
+    VkSampler sampler;
+    VkSampler sampler2;
+    err = vk::CreateSampler(m_device->device(), &sampler_ci, NULL, &sampler);
+    ASSERT_VK_SUCCESS(err);
+    err = vk::CreateSampler(m_device->device(), &sampler_ci, NULL, &sampler2);
+    ASSERT_VK_SUCCESS(err);
+    // Update descriptor with image and sampler
+    VkDescriptorImageInfo img_info = {};
+    img_info.sampler = sampler;
+    img_info.imageView = tmp_view;
+    img_info.imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
+
+    VkWriteDescriptorSet descriptor_write;
+    memset(&descriptor_write, 0, sizeof(descriptor_write));
+    descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
+    descriptor_write.dstSet = descriptorSet;
+    descriptor_write.dstBinding = 0;
+    descriptor_write.descriptorCount = 1;
+    descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
+    descriptor_write.pImageInfo = &img_info;
+
+    vk::UpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
+
+    // Create PSO to be used for draw-time errors below
+    char const *fsSource =
+        "#version 450\n"
+        "\n"
+        "layout(set=0, binding=0) uniform sampler2D s;\n"
+        "layout(location=0) out vec4 x;\n"
+        "void main(){\n"
+        "   x = texture(s, vec2(1));\n"
+        "}\n";
+    VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
+    VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+    VkPipelineObj pipe(m_device);
+    pipe.AddShader(&vs);
+    pipe.AddShader(&fs);
+    pipe.AddDefaultColorAttachment();
+    pipe.CreateVKPipeline(pipeline_layout.handle(), renderPass());
+
+    // First error case is destroying sampler prior to cmd buffer submission
+    m_commandBuffer->begin();
+
+    // Transit image layout from VK_IMAGE_LAYOUT_UNDEFINED into VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL
+    VkImageMemoryBarrier barrier = {};
+    barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
+    barrier.oldLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+    barrier.newLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
+    barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
+    barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
+    barrier.image = image;
+    barrier.srcAccessMask = 0;
+    barrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT;
+    barrier.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    barrier.subresourceRange.baseMipLevel = 0;
+    barrier.subresourceRange.levelCount = 1;
+    barrier.subresourceRange.baseArrayLayer = 0;
+    barrier.subresourceRange.layerCount = 1;
+    vk::CmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, 0,
+                           nullptr, 0, nullptr, 1, &barrier);
+
+    m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
+    vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.handle());
+    vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_layout.handle(), 0, 1,
+                              &descriptorSet, 0, NULL);
+    VkViewport viewport = {0, 0, 16, 16, 0, 1};
+    VkRect2D scissor = {{0, 0}, {16, 16}};
+    vk::CmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
+    vk::CmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
+    m_commandBuffer->Draw(1, 0, 0, 0);
+    m_commandBuffer->EndRenderPass();
+    m_commandBuffer->end();
+    VkSubmitInfo submit_info = {};
+    submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+    submit_info.commandBufferCount = 1;
+    submit_info.pCommandBuffers = &m_commandBuffer->handle();
+    // This first submit should be successful
+    vk::QueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+    vk::QueueWaitIdle(m_device->m_queue);
+
+    // Now destroy imageview and reset cmdBuffer
+    vk::DestroyImageView(m_device->device(), tmp_view, NULL);
+    m_commandBuffer->reset(0);
+    m_commandBuffer->begin();
+    m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
+    vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.handle());
+    vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_layout.handle(), 0, 1,
+                              &descriptorSet, 0, NULL);
+    vk::CmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
+    vk::CmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, " that is invalid or has been destroyed.");
+    m_commandBuffer->Draw(1, 0, 0, 0);
+    m_errorMonitor->VerifyFound();
+    m_commandBuffer->EndRenderPass();
+    m_commandBuffer->end();
+
+    // Re-update descriptor with new view
+    img_info.imageView = view;
+    vk::UpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
+    // Now test destroying sampler prior to cmd buffer submission
+    m_commandBuffer->begin();
+    m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
+    vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.handle());
+    vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_layout.handle(), 0, 1,
+                              &descriptorSet, 0, NULL);
+    vk::CmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
+    vk::CmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
+    m_commandBuffer->Draw(1, 0, 0, 0);
+    m_commandBuffer->EndRenderPass();
+    m_commandBuffer->end();
+    // Destroy sampler invalidates the cmd buffer, causing error on submit
+    vk::DestroySampler(m_device->device(), sampler, NULL);
+    // Attempt to submit cmd buffer
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "UNASSIGNED-CoreValidation-DrawState-InvalidCommandBuffer-VkSampler");
+    submit_info = {};
+    submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+    submit_info.commandBufferCount = 1;
+    submit_info.pCommandBuffers = &m_commandBuffer->handle();
+    vk::QueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+    m_errorMonitor->VerifyFound();
+
+    // Now re-update descriptor with valid sampler and delete image
+    img_info.sampler = sampler2;
+    vk::UpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
+
+    VkCommandBufferBeginInfo info = {};
+    info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
+    info.flags = VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "UNASSIGNED-CoreValidation-DrawState-InvalidCommandBuffer-VkImage");
+    m_commandBuffer->begin(&info);
+    m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
+    vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.handle());
+    vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_layout.handle(), 0, 1,
+                              &descriptorSet, 0, NULL);
+    vk::CmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
+    vk::CmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
+    m_commandBuffer->Draw(1, 0, 0, 0);
+    m_commandBuffer->EndRenderPass();
+    m_commandBuffer->end();
+    // Destroy image invalidates the cmd buffer, causing error on submit
+    vk::DestroyImage(m_device->device(), image, NULL);
+    // Attempt to submit cmd buffer
+    submit_info = {};
+    submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+    submit_info.commandBufferCount = 1;
+    submit_info.pCommandBuffers = &m_commandBuffer->handle();
+    vk::QueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+    m_errorMonitor->VerifyFound();
+    // Now update descriptor to be valid, but then free descriptor
+    img_info.imageView = view2;
+    vk::UpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
+    m_commandBuffer->begin(&info);
+
+    // Transit image2 layout from VK_IMAGE_LAYOUT_UNDEFINED into VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL
+    barrier.image = image2;
+    vk::CmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, 0,
+                           nullptr, 0, nullptr, 1, &barrier);
+
+    m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
+    vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.handle());
+    vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_layout.handle(), 0, 1,
+                              &descriptorSet, 0, NULL);
+    vk::CmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
+    vk::CmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
+    m_commandBuffer->Draw(1, 0, 0, 0);
+    m_commandBuffer->EndRenderPass();
+    m_commandBuffer->end();
+    vk::QueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+
+    // Immediately try to destroy the descriptor set in the active command buffer - failure expected
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkFreeDescriptorSets-pDescriptorSets-00309");
+    vk::FreeDescriptorSets(m_device->device(), ds_pool, 1, &descriptorSet);
+    m_errorMonitor->VerifyFound();
+
+    // Try again once the queue is idle - should succeed w/o error
+    // TODO - though the particular error above doesn't re-occur, there are other 'unexpecteds' still to clean up
+    vk::QueueWaitIdle(m_device->m_queue);
+    m_errorMonitor->SetUnexpectedError(
+        "pDescriptorSets must be a valid pointer to an array of descriptorSetCount VkDescriptorSet handles, each element of which "
+        "must either be a valid handle or VK_NULL_HANDLE");
+    m_errorMonitor->SetUnexpectedError("Unable to remove DescriptorSet obj");
+    vk::FreeDescriptorSets(m_device->device(), ds_pool, 1, &descriptorSet);
+
+    // Attempt to submit cmd buffer containing the freed descriptor set
+    submit_info = {};
+    submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+    submit_info.commandBufferCount = 1;
+    submit_info.pCommandBuffers = &m_commandBuffer->handle();
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "UNASSIGNED-CoreValidation-DrawState-InvalidCommandBuffer-VkDescriptorSet");
+    vk::QueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+    m_errorMonitor->VerifyFound();
+
+    // Cleanup
+    vk::FreeMemory(m_device->device(), image_memory, NULL);
+    vk::DestroySampler(m_device->device(), sampler2, NULL);
+    vk::DestroyImage(m_device->device(), image2, NULL);
+    vk::DestroyImageView(m_device->device(), view, NULL);
+    vk::DestroyImageView(m_device->device(), view2, NULL);
+    vk::DestroyDescriptorPool(m_device->device(), ds_pool, NULL);
+}
+
+TEST_F(VkLayerTest, InvalidDescriptorSetSamplerDestroyed) {
+    TEST_DESCRIPTION("Attempt to draw with a bound descriptor sets with a combined image sampler where sampler has been deleted.");
+    ASSERT_NO_FATAL_FAILURE(Init(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
+    ASSERT_NO_FATAL_FAILURE(InitViewport());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    OneOffDescriptorSet descriptor_set(m_device,
+                                       {
+                                           {0, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, 1, VK_SHADER_STAGE_ALL, nullptr},
+                                           {1, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, 1, VK_SHADER_STAGE_ALL, nullptr},
+                                       });
+
+    const VkPipelineLayoutObj pipeline_layout(m_device, {&descriptor_set.layout_});
+    // Create images to update the descriptor with
+    VkImageObj image(m_device);
+    const VkFormat tex_format = VK_FORMAT_B8G8R8A8_UNORM;
+    image.Init(32, 32, 1, tex_format, VK_IMAGE_USAGE_SAMPLED_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
+    ASSERT_TRUE(image.initialized());
+
+    VkImageViewCreateInfo image_view_create_info = {};
+    image_view_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
+    image_view_create_info.image = image.handle();
+    image_view_create_info.viewType = VK_IMAGE_VIEW_TYPE_2D;
+    image_view_create_info.format = tex_format;
+    image_view_create_info.subresourceRange.layerCount = 1;
+    image_view_create_info.subresourceRange.baseMipLevel = 0;
+    image_view_create_info.subresourceRange.levelCount = 1;
+    image_view_create_info.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+
+    VkImageView view;
+    VkResult err = vk::CreateImageView(m_device->device(), &image_view_create_info, NULL, &view);
+    ASSERT_VK_SUCCESS(err);
+    // Create Samplers
+    VkSamplerCreateInfo sampler_ci = SafeSaneSamplerCreateInfo();
+    VkSampler sampler;
+    err = vk::CreateSampler(m_device->device(), &sampler_ci, NULL, &sampler);
+    ASSERT_VK_SUCCESS(err);
+    VkSampler sampler1;
+    err = vk::CreateSampler(m_device->device(), &sampler_ci, NULL, &sampler1);
+    ASSERT_VK_SUCCESS(err);
+    // Update descriptor with image and sampler
+    VkDescriptorImageInfo img_info = {};
+    img_info.sampler = sampler;
+    img_info.imageView = view;
+    img_info.imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
+
+    VkDescriptorImageInfo img_info1 = img_info;
+    img_info1.sampler = sampler1;
+
+    VkWriteDescriptorSet descriptor_write;
+    memset(&descriptor_write, 0, sizeof(descriptor_write));
+    descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
+    descriptor_write.dstSet = descriptor_set.set_;
+    descriptor_write.dstBinding = 0;
+    descriptor_write.descriptorCount = 1;
+    descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
+    descriptor_write.pImageInfo = &img_info;
+
+    std::array<VkWriteDescriptorSet, 2> descriptor_writes = {descriptor_write, descriptor_write};
+    descriptor_writes[1].dstBinding = 1;
+    descriptor_writes[1].pImageInfo = &img_info1;
+
+    vk::UpdateDescriptorSets(m_device->device(), 2, descriptor_writes.data(), 0, NULL);
+
+    // Destroy the sampler before it's bound to the cmd buffer
+    vk::DestroySampler(m_device->device(), sampler1, NULL);
+
+    // Create PSO to be used for draw-time errors below
+    char const *fsSource =
+        "#version 450\n"
+        "\n"
+        "layout(set=0, binding=0) uniform sampler2D s;\n"
+        "layout(set=0, binding=1) uniform sampler2D s1;\n"
+        "layout(location=0) out vec4 x;\n"
+        "void main(){\n"
+        "   x = texture(s, vec2(1));\n"
+        "   x = texture(s1, vec2(1));\n"
+        "}\n";
+    VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
+    VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+    VkPipelineObj pipe(m_device);
+    pipe.AddShader(&vs);
+    pipe.AddShader(&fs);
+    pipe.AddDefaultColorAttachment();
+    pipe.CreateVKPipeline(pipeline_layout.handle(), renderPass());
+
+    // First error case is destroying sampler prior to cmd buffer submission
+    m_commandBuffer->begin();
+    m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
+    vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.handle());
+    vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_layout.handle(), 0, 1,
+                              &descriptor_set.set_, 0, NULL);
+    VkViewport viewport = {0, 0, 16, 16, 0, 1};
+    VkRect2D scissor = {{0, 0}, {16, 16}};
+    vk::CmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
+    vk::CmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, " Descriptor in binding #1 index 0 is using sampler ");
+    m_commandBuffer->Draw(1, 0, 0, 0);
+    m_errorMonitor->VerifyFound();
+
+    m_commandBuffer->EndRenderPass();
+    m_commandBuffer->end();
+
+    vk::DestroySampler(m_device->device(), sampler, NULL);
+    vk::DestroyImageView(m_device->device(), view, NULL);
+}
+
+TEST_F(VkLayerTest, ImageDescriptorLayoutMismatch) {
+    TEST_DESCRIPTION("Create an image sampler layout->image layout mismatch within/without a command buffer");
+
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+    bool maint2_support = DeviceExtensionSupported(gpu(), nullptr, VK_KHR_MAINTENANCE2_EXTENSION_NAME);
+    if (maint2_support) {
+        m_device_extension_names.push_back(VK_KHR_MAINTENANCE2_EXTENSION_NAME);
+    } else {
+        printf("%s Relaxed layout matching subtest requires API >= 1.1 or KHR_MAINTENANCE2 extension, unavailable - skipped.\n",
+               kSkipPrefix);
+    }
+    ASSERT_NO_FATAL_FAILURE(InitState(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
+
+    ASSERT_NO_FATAL_FAILURE(InitViewport());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    OneOffDescriptorSet descriptor_set(m_device,
+                                       {
+                                           {0, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, 1, VK_SHADER_STAGE_ALL, nullptr},
+                                       });
+    VkDescriptorSet descriptorSet = descriptor_set.set_;
+
+    const VkPipelineLayoutObj pipeline_layout(m_device, {&descriptor_set.layout_});
+
+    // Create image, view, and sampler
+    const VkFormat format = VK_FORMAT_B8G8R8A8_UNORM;
+    VkImageObj image(m_device);
+    image.Init(32, 32, 1, format, VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL, VK_IMAGE_TILING_OPTIMAL,
+               0);
+    ASSERT_TRUE(image.initialized());
+
+    vk_testing::ImageView view;
+    auto image_view_create_info = SafeSaneImageViewCreateInfo(image, format, VK_IMAGE_ASPECT_COLOR_BIT);
+    view.init(*m_device, image_view_create_info);
+    ASSERT_TRUE(view.initialized());
+
+    // Create Sampler
+    vk_testing::Sampler sampler;
+    VkSamplerCreateInfo sampler_ci = SafeSaneSamplerCreateInfo();
+    sampler.init(*m_device, sampler_ci);
+    ASSERT_TRUE(sampler.initialized());
+
+    // Setup structure for descriptor update with sampler, for update in do_test below
+    VkDescriptorImageInfo img_info = {};
+    img_info.sampler = sampler.handle();
+
+    VkWriteDescriptorSet descriptor_write;
+    memset(&descriptor_write, 0, sizeof(descriptor_write));
+    descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
+    descriptor_write.dstSet = descriptorSet;
+    descriptor_write.dstBinding = 0;
+    descriptor_write.descriptorCount = 1;
+    descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
+    descriptor_write.pImageInfo = &img_info;
+
+    // Create PSO to be used for draw-time errors below
+    VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
+    VkShaderObj fs(m_device, bindStateFragSamplerShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+    VkPipelineObj pipe(m_device);
+    pipe.AddShader(&vs);
+    pipe.AddShader(&fs);
+    pipe.AddDefaultColorAttachment();
+    pipe.CreateVKPipeline(pipeline_layout.handle(), renderPass());
+
+    VkViewport viewport = {0, 0, 16, 16, 0, 1};
+    VkRect2D scissor = {{0, 0}, {16, 16}};
+
+    VkCommandBufferObj cmd_buf(m_device, m_commandPool);
+
+    VkSubmitInfo submit_info = {};
+    submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+    submit_info.commandBufferCount = 1;
+    submit_info.pCommandBuffers = &cmd_buf.handle();
+
+    enum TestType {
+        kInternal,  // Image layout mismatch is *within* a given command buffer
+        kExternal   // Image layout mismatch is with the current state of the image, found at QueueSubmit
+    };
+    std::array<TestType, 2> test_list = {kInternal, kExternal};
+    const std::vector<std::string> internal_errors = {"VUID-VkDescriptorImageInfo-imageLayout-00344",
+                                                      "UNASSIGNED-CoreValidation-DrawState-DescriptorSetNotUpdated"};
+    const std::vector<std::string> external_errors = {"UNASSIGNED-CoreValidation-DrawState-InvalidImageLayout"};
+
+    // Common steps to create the two classes of errors (or two classes of positives)
+    auto do_test = [&](VkImageObj *image, vk_testing::ImageView *view, VkImageAspectFlags aspect_mask, VkImageLayout image_layout,
+                       VkImageLayout descriptor_layout, const bool positive_test) {
+        // Set up the descriptor
+        img_info.imageView = view->handle();
+        img_info.imageLayout = descriptor_layout;
+        vk::UpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
+
+        for (TestType test_type : test_list) {
+            cmd_buf.begin();
+            // record layout different than actual descriptor layout.
+            const VkFlags read_write = VK_ACCESS_MEMORY_READ_BIT | VK_ACCESS_MEMORY_WRITE_BIT;
+            auto image_barrier = image->image_memory_barrier(read_write, read_write, VK_IMAGE_LAYOUT_UNDEFINED, image_layout,
+                                                             image->subresource_range(aspect_mask));
+            cmd_buf.PipelineBarrier(VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT, VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT, 0, 0, nullptr, 0,
+                                    nullptr, 1, &image_barrier);
+
+            if (test_type == kExternal) {
+                // The image layout is external to the command buffer we are recording to test.  Submit to push to instance scope.
+                cmd_buf.end();
+                vk::QueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+                vk::QueueWaitIdle(m_device->m_queue);
+                cmd_buf.begin();
+            }
+
+            cmd_buf.BeginRenderPass(m_renderPassBeginInfo);
+            vk::CmdBindPipeline(cmd_buf.handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.handle());
+            vk::CmdBindDescriptorSets(cmd_buf.handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_layout.handle(), 0, 1,
+                                      &descriptorSet, 0, NULL);
+            vk::CmdSetViewport(cmd_buf.handle(), 0, 1, &viewport);
+            vk::CmdSetScissor(cmd_buf.handle(), 0, 1, &scissor);
+
+            // At draw time the update layout will mis-match the actual layout
+            if (positive_test || (test_type == kExternal)) {
+                m_errorMonitor->ExpectSuccess();
+            } else {
+                for (const auto &err : internal_errors) {
+                    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, err.c_str());
+                }
+            }
+            cmd_buf.Draw(1, 0, 0, 0);
+            if (positive_test || (test_type == kExternal)) {
+                m_errorMonitor->VerifyNotFound();
+            } else {
+                m_errorMonitor->VerifyFound();
+            }
+
+            m_errorMonitor->ExpectSuccess();
+            cmd_buf.EndRenderPass();
+            cmd_buf.end();
+            m_errorMonitor->VerifyNotFound();
+
+            // Submit cmd buffer
+            if (positive_test || (test_type == kInternal)) {
+                m_errorMonitor->ExpectSuccess();
+            } else {
+                for (const auto &err : external_errors) {
+                    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, err.c_str());
+                }
+            }
+            vk::QueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+            vk::QueueWaitIdle(m_device->m_queue);
+            if (positive_test || (test_type == kInternal)) {
+                m_errorMonitor->VerifyNotFound();
+            } else {
+                m_errorMonitor->VerifyFound();
+            }
+        }
+    };
+    do_test(&image, &view, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
+            VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL, /* positive */ false);
+
+    // Create depth stencil image and views
+    const VkFormat format_ds = m_depth_stencil_fmt = FindSupportedDepthStencilFormat(gpu());
+    bool ds_test_support = maint2_support && (format_ds != VK_FORMAT_UNDEFINED);
+    VkImageObj image_ds(m_device);
+    vk_testing::ImageView stencil_view;
+    vk_testing::ImageView depth_view;
+    const VkImageLayout ds_image_layout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL;
+    const VkImageLayout depth_descriptor_layout = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL;
+    const VkImageLayout stencil_descriptor_layout = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL;
+    const VkImageAspectFlags depth_stencil = VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT;
+    if (ds_test_support) {
+        image_ds.Init(32, 32, 1, format_ds, VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT,
+                      VK_IMAGE_TILING_OPTIMAL, 0);
+        ASSERT_TRUE(image_ds.initialized());
+        auto ds_view_ci = SafeSaneImageViewCreateInfo(image_ds, format_ds, VK_IMAGE_ASPECT_DEPTH_BIT);
+        depth_view.init(*m_device, ds_view_ci);
+        ds_view_ci.subresourceRange.aspectMask = VK_IMAGE_ASPECT_STENCIL_BIT;
+        stencil_view.init(*m_device, ds_view_ci);
+        do_test(&image_ds, &depth_view, depth_stencil, ds_image_layout, depth_descriptor_layout, /* positive */ true);
+        do_test(&image_ds, &depth_view, depth_stencil, ds_image_layout, VK_IMAGE_LAYOUT_GENERAL, /* positive */ false);
+        do_test(&image_ds, &stencil_view, depth_stencil, ds_image_layout, stencil_descriptor_layout, /* positive */ true);
+        do_test(&image_ds, &stencil_view, depth_stencil, ds_image_layout, VK_IMAGE_LAYOUT_GENERAL, /* positive */ false);
+    }
+}
+
+TEST_F(VkLayerTest, DescriptorPoolInUseResetSignaled) {
+    TEST_DESCRIPTION("Reset a DescriptorPool with a DescriptorSet that is in use.");
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitViewport());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    OneOffDescriptorSet descriptor_set(m_device,
+                                       {
+                                           {0, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, 1, VK_SHADER_STAGE_ALL, nullptr},
+                                       });
+
+    const VkPipelineLayoutObj pipeline_layout(m_device, {&descriptor_set.layout_});
+
+    // Create image to update the descriptor with
+    VkImageObj image(m_device);
+    image.Init(32, 32, 1, VK_FORMAT_B8G8R8A8_UNORM, VK_IMAGE_USAGE_SAMPLED_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
+    ASSERT_TRUE(image.initialized());
+
+    VkImageView view = image.targetView(VK_FORMAT_B8G8R8A8_UNORM);
+    // Create Sampler
+    VkSamplerCreateInfo sampler_ci = SafeSaneSamplerCreateInfo();
+    VkSampler sampler;
+    VkResult err = vk::CreateSampler(m_device->device(), &sampler_ci, nullptr, &sampler);
+    ASSERT_VK_SUCCESS(err);
+    // Update descriptor with image and sampler
+    descriptor_set.WriteDescriptorImageInfo(0, view, sampler);
+    descriptor_set.UpdateDescriptorSets();
+
+    // Create PSO to be used for draw-time errors below
+    VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
+    VkShaderObj fs(m_device, bindStateFragSamplerShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+    VkPipelineObj pipe(m_device);
+    pipe.AddShader(&vs);
+    pipe.AddShader(&fs);
+    pipe.AddDefaultColorAttachment();
+    pipe.CreateVKPipeline(pipeline_layout.handle(), renderPass());
+
+    m_commandBuffer->begin();
+    m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
+    vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.handle());
+    vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_layout.handle(), 0, 1,
+                              &descriptor_set.set_, 0, nullptr);
+
+    VkViewport viewport = {0, 0, 16, 16, 0, 1};
+    VkRect2D scissor = {{0, 0}, {16, 16}};
+    vk::CmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
+    vk::CmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
+
+    m_commandBuffer->Draw(1, 0, 0, 0);
+    m_commandBuffer->EndRenderPass();
+    m_commandBuffer->end();
+    // Submit cmd buffer to put pool in-flight
+    VkSubmitInfo submit_info = {};
+    submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+    submit_info.commandBufferCount = 1;
+    submit_info.pCommandBuffers = &m_commandBuffer->handle();
+    vk::QueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+    // Reset pool while in-flight, causing error
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkResetDescriptorPool-descriptorPool-00313");
+    vk::ResetDescriptorPool(m_device->device(), descriptor_set.pool_, 0);
+    m_errorMonitor->VerifyFound();
+    vk::QueueWaitIdle(m_device->m_queue);
+    // Cleanup
+    vk::DestroySampler(m_device->device(), sampler, nullptr);
+    m_errorMonitor->SetUnexpectedError(
+        "If descriptorPool is not VK_NULL_HANDLE, descriptorPool must be a valid VkDescriptorPool handle");
+    m_errorMonitor->SetUnexpectedError("Unable to remove DescriptorPool obj");
+}
+
+TEST_F(VkLayerTest, DescriptorImageUpdateNoMemoryBound) {
+    TEST_DESCRIPTION("Attempt an image descriptor set update where image's bound memory has been freed.");
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitViewport());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    OneOffDescriptorSet descriptor_set(m_device,
+                                       {
+                                           {0, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, 1, VK_SHADER_STAGE_ALL, nullptr},
+                                       });
+
+    // Create images to update the descriptor with
+    VkImage image;
+    const VkFormat tex_format = VK_FORMAT_B8G8R8A8_UNORM;
+    const int32_t tex_width = 32;
+    const int32_t tex_height = 32;
+    VkImageCreateInfo image_create_info = {};
+    image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+    image_create_info.pNext = NULL;
+    image_create_info.imageType = VK_IMAGE_TYPE_2D;
+    image_create_info.format = tex_format;
+    image_create_info.extent.width = tex_width;
+    image_create_info.extent.height = tex_height;
+    image_create_info.extent.depth = 1;
+    image_create_info.mipLevels = 1;
+    image_create_info.arrayLayers = 1;
+    image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
+    image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
+    image_create_info.usage = VK_IMAGE_USAGE_SAMPLED_BIT;
+    image_create_info.flags = 0;
+    VkResult err = vk::CreateImage(m_device->device(), &image_create_info, NULL, &image);
+    ASSERT_VK_SUCCESS(err);
+    // Initially bind memory to avoid error at bind view time. We'll break binding before update.
+    VkMemoryRequirements memory_reqs;
+    VkDeviceMemory image_memory;
+    bool pass;
+    VkMemoryAllocateInfo memory_info = {};
+    memory_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+    memory_info.pNext = NULL;
+    memory_info.allocationSize = 0;
+    memory_info.memoryTypeIndex = 0;
+    vk::GetImageMemoryRequirements(m_device->device(), image, &memory_reqs);
+    // Allocate enough memory for image
+    memory_info.allocationSize = memory_reqs.size;
+    pass = m_device->phy().set_memory_type(memory_reqs.memoryTypeBits, &memory_info, 0);
+    ASSERT_TRUE(pass);
+    err = vk::AllocateMemory(m_device->device(), &memory_info, NULL, &image_memory);
+    ASSERT_VK_SUCCESS(err);
+    err = vk::BindImageMemory(m_device->device(), image, image_memory, 0);
+    ASSERT_VK_SUCCESS(err);
+
+    VkImageViewCreateInfo image_view_create_info = {};
+    image_view_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
+    image_view_create_info.image = image;
+    image_view_create_info.viewType = VK_IMAGE_VIEW_TYPE_2D;
+    image_view_create_info.format = tex_format;
+    image_view_create_info.subresourceRange.layerCount = 1;
+    image_view_create_info.subresourceRange.baseMipLevel = 0;
+    image_view_create_info.subresourceRange.levelCount = 1;
+    image_view_create_info.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+
+    VkImageView view;
+    err = vk::CreateImageView(m_device->device(), &image_view_create_info, NULL, &view);
+    ASSERT_VK_SUCCESS(err);
+    // Create Samplers
+    VkSamplerCreateInfo sampler_ci = SafeSaneSamplerCreateInfo();
+    VkSampler sampler;
+    err = vk::CreateSampler(m_device->device(), &sampler_ci, NULL, &sampler);
+    ASSERT_VK_SUCCESS(err);
+    // Update descriptor with image and sampler
+    descriptor_set.WriteDescriptorImageInfo(0, view, sampler);
+    // Break memory binding and attempt update
+    vk::FreeMemory(m_device->device(), image_memory, nullptr);
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         " previously bound memory was freed. Memory must not be freed prior to this operation.");
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "vkUpdateDescriptorSets() failed write update validation for ");
+    descriptor_set.UpdateDescriptorSets();
+    m_errorMonitor->VerifyFound();
+    // Cleanup
+    vk::DestroyImage(m_device->device(), image, NULL);
+    vk::DestroySampler(m_device->device(), sampler, NULL);
+    vk::DestroyImageView(m_device->device(), view, NULL);
+}
+
+TEST_F(VkLayerTest, InvalidDynamicOffsetCases) {
+    // Create a descriptorSet w/ dynamic descriptor and then hit 3 offset error
+    // cases:
+    // 1. No dynamicOffset supplied
+    // 2. Too many dynamicOffsets supplied
+    // 3. Dynamic offset oversteps buffer being updated
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         " requires 1 dynamicOffsets, but only 0 dynamicOffsets are left in pDynamicOffsets ");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitViewport());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    OneOffDescriptorSet descriptor_set(m_device,
+                                       {
+                                           {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC, 1, VK_SHADER_STAGE_ALL, nullptr},
+                                       });
+
+    const VkPipelineLayoutObj pipeline_layout(m_device, {&descriptor_set.layout_});
+
+    // Create a buffer to update the descriptor with
+    uint32_t qfi = 0;
+    VkBufferCreateInfo buffCI = {};
+    buffCI.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
+    buffCI.size = 1024;
+    buffCI.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT;
+    buffCI.queueFamilyIndexCount = 1;
+    buffCI.pQueueFamilyIndices = &qfi;
+
+    VkBufferObj dynamic_uniform_buffer;
+    dynamic_uniform_buffer.init(*m_device, buffCI);
+
+    // Correctly update descriptor to avoid "NOT_UPDATED" error
+    descriptor_set.WriteDescriptorBufferInfo(0, dynamic_uniform_buffer.handle(), 1024, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC);
+    descriptor_set.UpdateDescriptorSets();
+
+    m_commandBuffer->begin();
+    m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
+    vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_layout.handle(), 0, 1,
+                              &descriptor_set.set_, 0, NULL);
+    m_errorMonitor->VerifyFound();
+    uint32_t pDynOff[2] = {512, 756};
+    // Now cause error b/c too many dynOffsets in array for # of dyn descriptors
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "Attempting to bind 1 descriptorSets with 1 dynamic descriptors, but ");
+    vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_layout.handle(), 0, 1,
+                              &descriptor_set.set_, 2, pDynOff);
+    m_errorMonitor->VerifyFound();
+    // Finally cause error due to dynamicOffset being too big
+    m_errorMonitor->SetDesiredFailureMsg(
+        VK_DEBUG_REPORT_ERROR_BIT_EXT,
+        " dynamic offset 512 combined with offset 0 and range 1024 that oversteps the buffer size of 1024");
+    // Create PSO to be used for draw-time errors below
+    VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
+    VkShaderObj fs(m_device, bindStateFragUniformShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+    VkPipelineObj pipe(m_device);
+    pipe.AddShader(&vs);
+    pipe.AddShader(&fs);
+    pipe.AddDefaultColorAttachment();
+    pipe.CreateVKPipeline(pipeline_layout.handle(), renderPass());
+
+    VkViewport viewport = {0, 0, 16, 16, 0, 1};
+    vk::CmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
+    VkRect2D scissor = {{0, 0}, {16, 16}};
+    vk::CmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
+
+    vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.handle());
+    // This update should succeed, but offset size of 512 will overstep buffer
+    // /w range 1024 & size 1024
+    vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_layout.handle(), 0, 1,
+                              &descriptor_set.set_, 1, pDynOff);
+    m_commandBuffer->Draw(1, 0, 0, 0);
+    m_errorMonitor->VerifyFound();
+
+    m_commandBuffer->EndRenderPass();
+    m_commandBuffer->end();
+}
+
+TEST_F(VkLayerTest, DescriptorBufferUpdateNoMemoryBound) {
+    TEST_DESCRIPTION("Attempt to update a descriptor with a non-sparse buffer that doesn't have memory bound");
+    VkResult err;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         " used with no memory bound. Memory should be bound by calling vkBindBufferMemory().");
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "vkUpdateDescriptorSets() failed write update validation for ");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitViewport());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    OneOffDescriptorSet descriptor_set(m_device,
+                                       {
+                                           {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC, 1, VK_SHADER_STAGE_ALL, nullptr},
+                                       });
+
+    // Create a buffer to update the descriptor with
+    uint32_t qfi = 0;
+    VkBufferCreateInfo buffCI = {};
+    buffCI.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
+    buffCI.size = 1024;
+    buffCI.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT;
+    buffCI.queueFamilyIndexCount = 1;
+    buffCI.pQueueFamilyIndices = &qfi;
+
+    VkBuffer dynamic_uniform_buffer;
+    err = vk::CreateBuffer(m_device->device(), &buffCI, NULL, &dynamic_uniform_buffer);
+    ASSERT_VK_SUCCESS(err);
+
+    // Attempt to update descriptor without binding memory to it
+    descriptor_set.WriteDescriptorBufferInfo(0, dynamic_uniform_buffer, 1024, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC);
+    descriptor_set.UpdateDescriptorSets();
+    m_errorMonitor->VerifyFound();
+    vk::DestroyBuffer(m_device->device(), dynamic_uniform_buffer, NULL);
+}
+
+TEST_F(VkLayerTest, DescriptorSetCompatibility) {
+    // Test various desriptorSet errors with bad binding combinations
+    using std::vector;
+    VkResult err;
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitViewport());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    static const uint32_t NUM_DESCRIPTOR_TYPES = 5;
+    VkDescriptorPoolSize ds_type_count[NUM_DESCRIPTOR_TYPES] = {};
+    ds_type_count[0].type = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
+    ds_type_count[0].descriptorCount = 10;
+    ds_type_count[1].type = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE;
+    ds_type_count[1].descriptorCount = 2;
+    ds_type_count[2].type = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE;
+    ds_type_count[2].descriptorCount = 2;
+    ds_type_count[3].type = VK_DESCRIPTOR_TYPE_SAMPLER;
+    ds_type_count[3].descriptorCount = 5;
+    // TODO : LunarG ILO driver currently asserts in desc.c w/ INPUT_ATTACHMENT
+    // type
+    // ds_type_count[4].type = VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT;
+    ds_type_count[4].type = VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER;
+    ds_type_count[4].descriptorCount = 2;
+
+    VkDescriptorPoolCreateInfo ds_pool_ci = {};
+    ds_pool_ci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
+    ds_pool_ci.pNext = NULL;
+    ds_pool_ci.maxSets = 5;
+    ds_pool_ci.poolSizeCount = NUM_DESCRIPTOR_TYPES;
+    ds_pool_ci.pPoolSizes = ds_type_count;
+
+    VkDescriptorPool ds_pool;
+    err = vk::CreateDescriptorPool(m_device->device(), &ds_pool_ci, NULL, &ds_pool);
+    ASSERT_VK_SUCCESS(err);
+
+    static const uint32_t MAX_DS_TYPES_IN_LAYOUT = 2;
+    VkDescriptorSetLayoutBinding dsl_binding[MAX_DS_TYPES_IN_LAYOUT] = {};
+    dsl_binding[0].binding = 0;
+    dsl_binding[0].descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
+    dsl_binding[0].descriptorCount = 5;
+    dsl_binding[0].stageFlags = VK_SHADER_STAGE_ALL;
+    dsl_binding[0].pImmutableSamplers = NULL;
+
+    // Create layout identical to set0 layout but w/ different stageFlags
+    VkDescriptorSetLayoutBinding dsl_fs_stage_only = {};
+    dsl_fs_stage_only.binding = 0;
+    dsl_fs_stage_only.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
+    dsl_fs_stage_only.descriptorCount = 5;
+    dsl_fs_stage_only.stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT;  // Different stageFlags to cause error at
+                                                                  // bind time
+    dsl_fs_stage_only.pImmutableSamplers = NULL;
+
+    vector<VkDescriptorSetLayoutObj> ds_layouts;
+    // Create 4 unique layouts for full pipelineLayout, and 1 special fs-only
+    // layout for error case
+    ds_layouts.emplace_back(m_device, std::vector<VkDescriptorSetLayoutBinding>(1, dsl_binding[0]));
+
+    const VkDescriptorSetLayoutObj ds_layout_fs_only(m_device, {dsl_fs_stage_only});
+
+    dsl_binding[0].binding = 0;
+    dsl_binding[0].descriptorType = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE;
+    dsl_binding[0].descriptorCount = 2;
+    dsl_binding[1].binding = 1;
+    dsl_binding[1].descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE;
+    dsl_binding[1].descriptorCount = 2;
+    dsl_binding[1].stageFlags = VK_SHADER_STAGE_ALL;
+    dsl_binding[1].pImmutableSamplers = NULL;
+    ds_layouts.emplace_back(m_device, std::vector<VkDescriptorSetLayoutBinding>({dsl_binding[0], dsl_binding[1]}));
+
+    dsl_binding[0].binding = 0;
+    dsl_binding[0].descriptorType = VK_DESCRIPTOR_TYPE_SAMPLER;
+    dsl_binding[0].descriptorCount = 5;
+    ds_layouts.emplace_back(m_device, std::vector<VkDescriptorSetLayoutBinding>(1, dsl_binding[0]));
+
+    dsl_binding[0].descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER;
+    dsl_binding[0].descriptorCount = 2;
+    ds_layouts.emplace_back(m_device, std::vector<VkDescriptorSetLayoutBinding>(1, dsl_binding[0]));
+
+    const auto &ds_vk_layouts = MakeVkHandles<VkDescriptorSetLayout>(ds_layouts);
+
+    static const uint32_t NUM_SETS = 4;
+    VkDescriptorSet descriptorSet[NUM_SETS] = {};
+    VkDescriptorSetAllocateInfo alloc_info = {};
+    alloc_info.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
+    alloc_info.descriptorPool = ds_pool;
+    alloc_info.descriptorSetCount = ds_vk_layouts.size();
+    alloc_info.pSetLayouts = ds_vk_layouts.data();
+    err = vk::AllocateDescriptorSets(m_device->device(), &alloc_info, descriptorSet);
+    ASSERT_VK_SUCCESS(err);
+    VkDescriptorSet ds0_fs_only = {};
+    alloc_info.descriptorSetCount = 1;
+    alloc_info.pSetLayouts = &ds_layout_fs_only.handle();
+    err = vk::AllocateDescriptorSets(m_device->device(), &alloc_info, &ds0_fs_only);
+    ASSERT_VK_SUCCESS(err);
+
+    const VkPipelineLayoutObj pipeline_layout(m_device, {&ds_layouts[0], &ds_layouts[1]});
+    // Create pipelineLayout with only one setLayout
+    const VkPipelineLayoutObj single_pipe_layout(m_device, {&ds_layouts[0]});
+    // Create pipelineLayout with 2 descriptor setLayout at index 0
+    const VkPipelineLayoutObj pipe_layout_one_desc(m_device, {&ds_layouts[3]});
+    // Create pipelineLayout with 5 SAMPLER descriptor setLayout at index 0
+    const VkPipelineLayoutObj pipe_layout_five_samp(m_device, {&ds_layouts[2]});
+    // Create pipelineLayout with UB type, but stageFlags for FS only
+    VkPipelineLayoutObj pipe_layout_fs_only(m_device, {&ds_layout_fs_only});
+    // Create pipelineLayout w/ incompatible set0 layout, but set1 is fine
+    const VkPipelineLayoutObj pipe_layout_bad_set0(m_device, {&ds_layout_fs_only, &ds_layouts[1]});
+
+    // Add buffer binding for UBO
+    uint32_t qfi = 0;
+    VkBufferCreateInfo bci = {};
+    bci.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
+    bci.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT;
+    bci.size = 8;
+    bci.queueFamilyIndexCount = 1;
+    bci.pQueueFamilyIndices = &qfi;
+    VkBufferObj buffer;
+    buffer.init(*m_device, bci);
+    VkDescriptorBufferInfo buffer_info;
+    buffer_info.buffer = buffer.handle();
+    buffer_info.offset = 0;
+    buffer_info.range = VK_WHOLE_SIZE;
+    VkWriteDescriptorSet descriptor_write = {};
+    descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
+    descriptor_write.dstSet = descriptorSet[0];
+    descriptor_write.dstBinding = 0;
+    descriptor_write.descriptorCount = 1;
+    descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
+    descriptor_write.pBufferInfo = &buffer_info;
+    vk::UpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
+
+    // Create PSO to be used for draw-time errors below
+    VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
+    VkShaderObj fs(m_device, bindStateFragUniformShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+    VkPipelineObj pipe(m_device);
+    pipe.AddShader(&vs);
+    pipe.AddShader(&fs);
+    pipe.AddDefaultColorAttachment();
+    pipe.CreateVKPipeline(pipe_layout_fs_only.handle(), renderPass());
+
+    m_commandBuffer->begin();
+    m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
+
+    vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.handle());
+    // TODO : Want to cause various binding incompatibility issues here to test
+    // DrawState
+    //  First cause various verify_layout_compatibility() fails
+    //  Second disturb early and late sets and verify INFO msgs
+    // VerifySetLayoutCompatibility fail cases:
+    // 1. invalid VkPipelineLayout (layout) passed into vk::CmdBindDescriptorSets
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBindDescriptorSets-layout-parameter");
+    vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS,
+                              CastToHandle<VkPipelineLayout, uintptr_t>(0xbaadb1be), 0, 1, &descriptorSet[0], 0, NULL);
+    m_errorMonitor->VerifyFound();
+
+    // 2. layoutIndex exceeds # of layouts in layout
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, " attempting to bind set to index 1");
+    vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, single_pipe_layout.handle(), 0, 2,
+                              &descriptorSet[0], 0, NULL);
+    m_errorMonitor->VerifyFound();
+
+    // 3. Pipeline setLayout[0] has 2 descriptors, but set being bound has 5
+    // descriptors
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, " has 2 total descriptors, but ");
+    vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe_layout_one_desc.handle(), 0, 1,
+                              &descriptorSet[0], 0, NULL);
+    m_errorMonitor->VerifyFound();
+
+    // 4. same # of descriptors but mismatch in type
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, " is type 'VK_DESCRIPTOR_TYPE_SAMPLER' but binding ");
+    vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe_layout_five_samp.handle(), 0, 1,
+                              &descriptorSet[0], 0, NULL);
+    m_errorMonitor->VerifyFound();
+
+    // 5. same # of descriptors but mismatch in stageFlags
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         " has stageFlags VK_SHADER_STAGE_FRAGMENT_BIT but binding 0 for ");
+    vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe_layout_fs_only.handle(), 0, 1,
+                              &descriptorSet[0], 0, NULL);
+    m_errorMonitor->VerifyFound();
+
+    // Now that we're done actively using the pipelineLayout that gfx pipeline
+    //  was created with, we should be able to delete it. Do that now to verify
+    //  that validation obeys pipelineLayout lifetime
+    pipe_layout_fs_only.Reset();
+
+    // Cause draw-time errors due to PSO incompatibilities
+    // 1. Error due to not binding required set (we actually use same code as
+    // above to disturb set0)
+    vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_layout.handle(), 0, 2,
+                              &descriptorSet[0], 0, NULL);
+    vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe_layout_bad_set0.handle(), 1, 1,
+                              &descriptorSet[1], 0, NULL);
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, " uses set #0 but that set is not bound.");
+
+    VkViewport viewport = {0, 0, 16, 16, 0, 1};
+    VkRect2D scissor = {{0, 0}, {16, 16}};
+    vk::CmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
+    vk::CmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
+
+    m_commandBuffer->Draw(1, 0, 0, 0);
+    m_errorMonitor->VerifyFound();
+
+    // 2. Error due to bound set not being compatible with PSO's
+    // VkPipelineLayout (diff stageFlags in this case)
+    vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_layout.handle(), 0, 2,
+                              &descriptorSet[0], 0, NULL);
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, " bound as set #0 is not compatible with ");
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdDraw-None-02697");
+    m_commandBuffer->Draw(1, 0, 0, 0);
+    m_errorMonitor->VerifyFound();
+
+    // Remaining clean-up
+    m_commandBuffer->EndRenderPass();
+    m_commandBuffer->end();
+
+    vk::DestroyDescriptorPool(m_device->device(), ds_pool, NULL);
+}
+
+TEST_F(VkLayerTest, NullRenderPass) {
+    // Bind a NULL RenderPass
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "vkCmdBeginRenderPass: required parameter pRenderPassBegin specified as NULL");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    m_commandBuffer->begin();
+    // Don't care about RenderPass handle b/c error should be flagged before
+    // that
+    vk::CmdBeginRenderPass(m_commandBuffer->handle(), NULL, VK_SUBPASS_CONTENTS_INLINE);
+
+    m_errorMonitor->VerifyFound();
+
+    m_commandBuffer->end();
+}
+
+TEST_F(VkLayerTest, EndCommandBufferWithinRenderPass) {
+    TEST_DESCRIPTION("End a command buffer with an active render pass");
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkEndCommandBuffer-commandBuffer-00060");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    m_commandBuffer->begin();
+    m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
+    vk::EndCommandBuffer(m_commandBuffer->handle());
+
+    m_errorMonitor->VerifyFound();
+
+    // End command buffer properly to avoid driver issues. This is safe -- the
+    // previous vk::EndCommandBuffer should not have reached the driver.
+    m_commandBuffer->EndRenderPass();
+    m_commandBuffer->end();
+
+    // TODO: Add test for VK_COMMAND_BUFFER_LEVEL_SECONDARY
+    // TODO: Add test for VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT
+}
+
+TEST_F(VkLayerTest, DSUsageBitsErrors) {
+    TEST_DESCRIPTION("Attempt to update descriptor sets for images and buffers that do not have correct usage bits sets.");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    const VkFormat buffer_format = VK_FORMAT_R8_UNORM;
+    VkFormatProperties format_properties;
+    vk::GetPhysicalDeviceFormatProperties(gpu(), buffer_format, &format_properties);
+    if (!(format_properties.bufferFeatures & VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT)) {
+        printf("%s Device does not support VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT for this format; skipped.\n", kSkipPrefix);
+        return;
+    }
+
+    std::array<VkDescriptorPoolSize, VK_DESCRIPTOR_TYPE_RANGE_SIZE> ds_type_count;
+    for (uint32_t i = 0; i < ds_type_count.size(); ++i) {
+        ds_type_count[i].type = VkDescriptorType(i);
+        ds_type_count[i].descriptorCount = 1;
+    }
+
+    vk_testing::DescriptorPool ds_pool;
+    ds_pool.init(*m_device, vk_testing::DescriptorPool::create_info(0, VK_DESCRIPTOR_TYPE_RANGE_SIZE, ds_type_count));
+    ASSERT_TRUE(ds_pool.initialized());
+
+    std::vector<VkDescriptorSetLayoutBinding> dsl_bindings(1);
+    dsl_bindings[0].binding = 0;
+    dsl_bindings[0].descriptorType = VkDescriptorType(0);
+    dsl_bindings[0].descriptorCount = 1;
+    dsl_bindings[0].stageFlags = VK_SHADER_STAGE_ALL;
+    dsl_bindings[0].pImmutableSamplers = NULL;
+
+    // Create arrays of layout and descriptor objects
+    using UpDescriptorSet = std::unique_ptr<vk_testing::DescriptorSet>;
+    std::vector<UpDescriptorSet> descriptor_sets;
+    using UpDescriptorSetLayout = std::unique_ptr<VkDescriptorSetLayoutObj>;
+    std::vector<UpDescriptorSetLayout> ds_layouts;
+    descriptor_sets.reserve(VK_DESCRIPTOR_TYPE_RANGE_SIZE);
+    ds_layouts.reserve(VK_DESCRIPTOR_TYPE_RANGE_SIZE);
+    for (uint32_t i = 0; i < VK_DESCRIPTOR_TYPE_RANGE_SIZE; ++i) {
+        dsl_bindings[0].descriptorType = VkDescriptorType(i);
+        ds_layouts.push_back(UpDescriptorSetLayout(new VkDescriptorSetLayoutObj(m_device, dsl_bindings)));
+        descriptor_sets.push_back(UpDescriptorSet(ds_pool.alloc_sets(*m_device, *ds_layouts.back())));
+        ASSERT_TRUE(descriptor_sets.back()->initialized());
+    }
+
+    // Create a buffer & bufferView to be used for invalid updates
+    const VkDeviceSize buffer_size = 256;
+    uint8_t data[buffer_size];
+    VkConstantBufferObj buffer(m_device, buffer_size, data, VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT);
+    VkConstantBufferObj storage_texel_buffer(m_device, buffer_size, data, VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT);
+    ASSERT_TRUE(buffer.initialized() && storage_texel_buffer.initialized());
+
+    auto buff_view_ci = vk_testing::BufferView::createInfo(buffer.handle(), VK_FORMAT_R8_UNORM);
+    vk_testing::BufferView buffer_view_obj, storage_texel_buffer_view_obj;
+    buffer_view_obj.init(*m_device, buff_view_ci);
+    buff_view_ci.buffer = storage_texel_buffer.handle();
+    storage_texel_buffer_view_obj.init(*m_device, buff_view_ci);
+    ASSERT_TRUE(buffer_view_obj.initialized() && storage_texel_buffer_view_obj.initialized());
+    VkBufferView buffer_view = buffer_view_obj.handle();
+    VkBufferView storage_texel_buffer_view = storage_texel_buffer_view_obj.handle();
+
+    // Create an image to be used for invalid updates
+    VkImageObj image_obj(m_device);
+    image_obj.InitNoLayout(64, 64, 1, VK_FORMAT_R8G8B8A8_UNORM, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
+    ASSERT_TRUE(image_obj.initialized());
+    VkImageView image_view = image_obj.targetView(VK_FORMAT_R8G8B8A8_UNORM);
+
+    VkDescriptorBufferInfo buff_info = {};
+    buff_info.buffer = buffer.handle();
+    VkDescriptorImageInfo img_info = {};
+    img_info.imageView = image_view;
+    VkWriteDescriptorSet descriptor_write = {};
+    descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
+    descriptor_write.dstBinding = 0;
+    descriptor_write.descriptorCount = 1;
+    descriptor_write.pTexelBufferView = &buffer_view;
+    descriptor_write.pBufferInfo = &buff_info;
+    descriptor_write.pImageInfo = &img_info;
+
+    // These error messages align with VkDescriptorType struct
+    std::string error_codes[] = {
+        "UNASSIGNED-CoreValidation-DrawState-InvalidImageView",  // placeholder, no error for SAMPLER descriptor
+        "UNASSIGNED-CoreValidation-DrawState-InvalidImageView",  // COMBINED_IMAGE_SAMPLER
+        "UNASSIGNED-CoreValidation-DrawState-InvalidImageView",  // SAMPLED_IMAGE
+        "UNASSIGNED-CoreValidation-DrawState-InvalidImageView",  // STORAGE_IMAGE
+        "VUID-VkWriteDescriptorSet-descriptorType-00334",        // UNIFORM_TEXEL_BUFFER
+        "VUID-VkWriteDescriptorSet-descriptorType-00335",        // STORAGE_TEXEL_BUFFER
+        "VUID-VkWriteDescriptorSet-descriptorType-00330",        // UNIFORM_BUFFER
+        "VUID-VkWriteDescriptorSet-descriptorType-00331",        // STORAGE_BUFFER
+        "VUID-VkWriteDescriptorSet-descriptorType-00330",        // UNIFORM_BUFFER_DYNAMIC
+        "VUID-VkWriteDescriptorSet-descriptorType-00331",        // STORAGE_BUFFER_DYNAMIC
+        "UNASSIGNED-CoreValidation-DrawState-InvalidImageView"   // INPUT_ATTACHMENT
+    };
+    // Start loop at 1 as SAMPLER desc type has no usage bit error
+    for (uint32_t i = 1; i < VK_DESCRIPTOR_TYPE_RANGE_SIZE; ++i) {
+        if (VkDescriptorType(i) == VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER) {
+            // Now check for UNIFORM_TEXEL_BUFFER using storage_texel_buffer_view
+            descriptor_write.pTexelBufferView = &storage_texel_buffer_view;
+        }
+        descriptor_write.descriptorType = VkDescriptorType(i);
+        descriptor_write.dstSet = descriptor_sets[i]->handle();
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, error_codes[i]);
+
+        vk::UpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
+
+        m_errorMonitor->VerifyFound();
+        if (VkDescriptorType(i) == VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER) {
+            descriptor_write.pTexelBufferView = &buffer_view;
+        }
+    }
+}
+
+TEST_F(VkLayerTest, DSBufferInfoErrors) {
+    TEST_DESCRIPTION(
+        "Attempt to update buffer descriptor set that has incorrect parameters in VkDescriptorBufferInfo struct. This includes:\n"
+        "1. offset value greater than or equal to buffer size\n"
+        "2. range value of 0\n"
+        "3. range value greater than buffer (size - offset)");
+
+    // GPDDP2 needed for push descriptors support below
+    bool gpdp2_support = InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME,
+                                                    VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_SPEC_VERSION);
+    if (gpdp2_support) {
+        m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    }
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+    bool update_template_support = DeviceExtensionSupported(gpu(), nullptr, VK_KHR_DESCRIPTOR_UPDATE_TEMPLATE_EXTENSION_NAME);
+    if (update_template_support) {
+        m_device_extension_names.push_back(VK_KHR_DESCRIPTOR_UPDATE_TEMPLATE_EXTENSION_NAME);
+    } else {
+        printf("%s Descriptor Update Template Extensions not supported, template cases skipped.\n", kSkipPrefix);
+    }
+
+    // Note: Includes workaround for some implementations which incorrectly return 0 maxPushDescriptors
+    bool push_descriptor_support = gpdp2_support &&
+                                   DeviceExtensionSupported(gpu(), nullptr, VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME) &&
+                                   (GetPushDescriptorProperties(instance(), gpu()).maxPushDescriptors > 0);
+    if (push_descriptor_support) {
+        m_device_extension_names.push_back(VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME);
+    } else {
+        printf("%s Push Descriptor Extension not supported, push descriptor cases skipped.\n", kSkipPrefix);
+    }
+
+    ASSERT_NO_FATAL_FAILURE(InitState(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
+
+    std::vector<VkDescriptorSetLayoutBinding> ds_bindings = {
+        {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr}};
+    OneOffDescriptorSet descriptor_set(m_device, ds_bindings);
+
+    // Create a buffer to be used for invalid updates
+    VkBufferCreateInfo buff_ci = {};
+    buff_ci.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
+    buff_ci.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT;
+    buff_ci.size = m_device->props.limits.minUniformBufferOffsetAlignment;
+    buff_ci.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
+    VkBufferObj buffer;
+    buffer.init(*m_device, buff_ci);
+
+    VkDescriptorBufferInfo buff_info = {};
+    buff_info.buffer = buffer.handle();
+    VkWriteDescriptorSet descriptor_write = {};
+    descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
+    descriptor_write.dstBinding = 0;
+    descriptor_write.descriptorCount = 1;
+    descriptor_write.pTexelBufferView = nullptr;
+    descriptor_write.pBufferInfo = &buff_info;
+    descriptor_write.pImageInfo = nullptr;
+
+    descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
+    descriptor_write.dstSet = descriptor_set.set_;
+
+    // Relying on the "return nullptr for non-enabled extensions
+    auto vkCreateDescriptorUpdateTemplateKHR =
+        (PFN_vkCreateDescriptorUpdateTemplateKHR)vk::GetDeviceProcAddr(m_device->device(), "vkCreateDescriptorUpdateTemplateKHR");
+    auto vkDestroyDescriptorUpdateTemplateKHR =
+        (PFN_vkDestroyDescriptorUpdateTemplateKHR)vk::GetDeviceProcAddr(m_device->device(), "vkDestroyDescriptorUpdateTemplateKHR");
+    auto vkUpdateDescriptorSetWithTemplateKHR =
+        (PFN_vkUpdateDescriptorSetWithTemplateKHR)vk::GetDeviceProcAddr(m_device->device(), "vkUpdateDescriptorSetWithTemplateKHR");
+
+    if (update_template_support) {
+        ASSERT_NE(vkCreateDescriptorUpdateTemplateKHR, nullptr);
+        ASSERT_NE(vkDestroyDescriptorUpdateTemplateKHR, nullptr);
+        ASSERT_NE(vkUpdateDescriptorSetWithTemplateKHR, nullptr);
+    }
+
+    // Setup for update w/ template tests
+    // Create a template of descriptor set updates
+    struct SimpleTemplateData {
+        uint8_t padding[7];
+        VkDescriptorBufferInfo buff_info;
+        uint32_t other_padding[4];
+    };
+    SimpleTemplateData update_template_data = {};
+
+    VkDescriptorUpdateTemplateEntry update_template_entry = {};
+    update_template_entry.dstBinding = 0;
+    update_template_entry.dstArrayElement = 0;
+    update_template_entry.descriptorCount = 1;
+    update_template_entry.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
+    update_template_entry.offset = offsetof(SimpleTemplateData, buff_info);
+    update_template_entry.stride = sizeof(SimpleTemplateData);
+
+    auto update_template_ci = lvl_init_struct<VkDescriptorUpdateTemplateCreateInfoKHR>();
+    update_template_ci.descriptorUpdateEntryCount = 1;
+    update_template_ci.pDescriptorUpdateEntries = &update_template_entry;
+    update_template_ci.templateType = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET;
+    update_template_ci.descriptorSetLayout = descriptor_set.layout_.handle();
+
+    VkDescriptorUpdateTemplate update_template = VK_NULL_HANDLE;
+    if (update_template_support) {
+        auto result = vkCreateDescriptorUpdateTemplateKHR(m_device->device(), &update_template_ci, nullptr, &update_template);
+        ASSERT_VK_SUCCESS(result);
+    }
+
+    // VK_KHR_push_descriptor support
+    auto vkCmdPushDescriptorSetKHR =
+        (PFN_vkCmdPushDescriptorSetKHR)vk::GetDeviceProcAddr(m_device->device(), "vkCmdPushDescriptorSetKHR");
+    auto vkCmdPushDescriptorSetWithTemplateKHR = (PFN_vkCmdPushDescriptorSetWithTemplateKHR)vk::GetDeviceProcAddr(
+        m_device->device(), "vkCmdPushDescriptorSetWithTemplateKHR");
+
+    std::unique_ptr<VkDescriptorSetLayoutObj> push_dsl = nullptr;
+    std::unique_ptr<VkPipelineLayoutObj> pipeline_layout = nullptr;
+    VkDescriptorUpdateTemplate push_template = VK_NULL_HANDLE;
+    if (push_descriptor_support) {
+        ASSERT_NE(vkCmdPushDescriptorSetKHR, nullptr);
+        push_dsl.reset(
+            new VkDescriptorSetLayoutObj(m_device, ds_bindings, VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR));
+        pipeline_layout.reset(new VkPipelineLayoutObj(m_device, {push_dsl.get()}));
+        ASSERT_TRUE(push_dsl->initialized());
+
+        if (update_template_support) {
+            ASSERT_NE(vkCmdPushDescriptorSetWithTemplateKHR, nullptr);
+            auto push_template_ci = lvl_init_struct<VkDescriptorUpdateTemplateCreateInfoKHR>();
+            push_template_ci.descriptorUpdateEntryCount = 1;
+            push_template_ci.pDescriptorUpdateEntries = &update_template_entry;
+            push_template_ci.templateType = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR;
+            push_template_ci.descriptorSetLayout = VK_NULL_HANDLE;
+            push_template_ci.pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS;
+            push_template_ci.pipelineLayout = pipeline_layout->handle();
+            push_template_ci.set = 0;
+            auto result = vkCreateDescriptorUpdateTemplateKHR(m_device->device(), &push_template_ci, nullptr, &push_template);
+            ASSERT_VK_SUCCESS(result);
+        }
+    }
+
+    auto do_test = [&](const char *desired_failure) {
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, desired_failure);
+        vk::UpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
+        m_errorMonitor->VerifyFound();
+
+        if (push_descriptor_support) {
+            m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, desired_failure);
+            m_commandBuffer->begin();
+            vkCmdPushDescriptorSetKHR(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_layout->handle(), 0, 1,
+                                      &descriptor_write);
+            m_commandBuffer->end();
+            m_errorMonitor->VerifyFound();
+        }
+
+        if (update_template_support) {
+            update_template_data.buff_info = buff_info;  // copy the test case information into our "pData"
+            m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, desired_failure);
+            vkUpdateDescriptorSetWithTemplateKHR(m_device->device(), descriptor_set.set_, update_template, &update_template_data);
+            m_errorMonitor->VerifyFound();
+            if (push_descriptor_support) {
+                m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, desired_failure);
+                m_commandBuffer->begin();
+                vkCmdPushDescriptorSetWithTemplateKHR(m_commandBuffer->handle(), push_template, pipeline_layout->handle(), 0,
+                                                      &update_template_data);
+                m_commandBuffer->end();
+                m_errorMonitor->VerifyFound();
+            }
+        }
+    };
+
+    // Cause error due to offset out of range
+    buff_info.offset = buff_ci.size;
+    buff_info.range = VK_WHOLE_SIZE;
+    do_test("VUID-VkDescriptorBufferInfo-offset-00340");
+
+    // Now cause error due to range of 0
+    buff_info.offset = 0;
+    buff_info.range = 0;
+    do_test("VUID-VkDescriptorBufferInfo-range-00341");
+
+    // Now cause error due to range exceeding buffer size - offset
+    buff_info.offset = 0;
+    buff_info.range = buff_ci.size + 1;
+    do_test("VUID-VkDescriptorBufferInfo-range-00342");
+
+    if (update_template_support) {
+        vkDestroyDescriptorUpdateTemplateKHR(m_device->device(), update_template, nullptr);
+        if (push_descriptor_support) {
+            vkDestroyDescriptorUpdateTemplateKHR(m_device->device(), push_template, nullptr);
+        }
+    }
+}
+
+TEST_F(VkLayerTest, DSBufferLimitErrors) {
+    TEST_DESCRIPTION(
+        "Attempt to update buffer descriptor set that has VkDescriptorBufferInfo values that violate device limits.\n"
+        "Test cases include:\n"
+        "1. range of uniform buffer update exceeds maxUniformBufferRange\n"
+        "2. offset of uniform buffer update is not multiple of minUniformBufferOffsetAlignment\n"
+        "3. using VK_WHOLE_SIZE with uniform buffer size exceeding maxUniformBufferRange\n"
+        "4. range of storage buffer update exceeds maxStorageBufferRange\n"
+        "5. offset of storage buffer update is not multiple of minStorageBufferOffsetAlignment\n"
+        "6. using VK_WHOLE_SIZE with storage buffer size exceeding maxStorageBufferRange");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    struct TestCase {
+        VkDescriptorType descriptor_type;
+        VkBufferUsageFlagBits buffer_usage;
+        VkDeviceSize max_range;
+        std::string max_range_vu;
+        VkDeviceSize min_align;
+        std::string min_align_vu;
+    };
+
+    for (const auto &test_case : {
+             TestCase({VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT,
+                       m_device->props.limits.maxUniformBufferRange, "VUID-VkWriteDescriptorSet-descriptorType-00332",
+                       m_device->props.limits.minUniformBufferOffsetAlignment, "VUID-VkWriteDescriptorSet-descriptorType-00327"}),
+             TestCase({VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, VK_BUFFER_USAGE_STORAGE_BUFFER_BIT,
+                       m_device->props.limits.maxStorageBufferRange, "VUID-VkWriteDescriptorSet-descriptorType-00333",
+                       m_device->props.limits.minStorageBufferOffsetAlignment, "VUID-VkWriteDescriptorSet-descriptorType-00328"}),
+         }) {
+        // Create layout with single buffer
+        OneOffDescriptorSet descriptor_set(m_device, {
+                                                         {0, test_case.descriptor_type, 1, VK_SHADER_STAGE_ALL, nullptr},
+                                                     });
+
+        // Create a buffer to be used for invalid updates
+        VkBufferCreateInfo bci = {};
+        bci.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
+        bci.usage = test_case.buffer_usage;
+        bci.size = test_case.max_range + test_case.min_align;  // Make buffer bigger than range limit
+        bci.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
+        VkBuffer buffer;
+        VkResult err = vk::CreateBuffer(m_device->device(), &bci, NULL, &buffer);
+        ASSERT_VK_SUCCESS(err);
+
+        // Have to bind memory to buffer before descriptor update
+        VkMemoryRequirements mem_reqs;
+        vk::GetBufferMemoryRequirements(m_device->device(), buffer, &mem_reqs);
+
+        VkMemoryAllocateInfo mem_alloc = {};
+        mem_alloc.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+        mem_alloc.pNext = NULL;
+        mem_alloc.allocationSize = mem_reqs.size;
+        bool pass = m_device->phy().set_memory_type(mem_reqs.memoryTypeBits, &mem_alloc, 0);
+        if (!pass) {
+            printf("%s Failed to allocate memory in DSBufferLimitErrors; skipped.\n", kSkipPrefix);
+            vk::DestroyBuffer(m_device->device(), buffer, NULL);
+            continue;
+        }
+
+        VkDeviceMemory mem;
+        err = vk::AllocateMemory(m_device->device(), &mem_alloc, NULL, &mem);
+        if (VK_SUCCESS != err) {
+            printf("%s Failed to allocate memory in DSBufferLimitErrors; skipped.\n", kSkipPrefix);
+            vk::DestroyBuffer(m_device->device(), buffer, NULL);
+            continue;
+        }
+        err = vk::BindBufferMemory(m_device->device(), buffer, mem, 0);
+        ASSERT_VK_SUCCESS(err);
+
+        VkDescriptorBufferInfo buff_info = {};
+        buff_info.buffer = buffer;
+        VkWriteDescriptorSet descriptor_write = {};
+        descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
+        descriptor_write.dstBinding = 0;
+        descriptor_write.descriptorCount = 1;
+        descriptor_write.pTexelBufferView = nullptr;
+        descriptor_write.pBufferInfo = &buff_info;
+        descriptor_write.pImageInfo = nullptr;
+        descriptor_write.descriptorType = test_case.descriptor_type;
+        descriptor_write.dstSet = descriptor_set.set_;
+
+        // Exceed range limit
+        if (test_case.max_range != UINT32_MAX) {
+            buff_info.range = test_case.max_range + 1;
+            buff_info.offset = 0;
+            m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, test_case.max_range_vu);
+            vk::UpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
+            m_errorMonitor->VerifyFound();
+        }
+
+        // Reduce size of range to acceptable limit and cause offset error
+        if (test_case.min_align > 1) {
+            buff_info.range = test_case.max_range;
+            buff_info.offset = test_case.min_align - 1;
+            m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, test_case.min_align_vu);
+            vk::UpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
+            m_errorMonitor->VerifyFound();
+        }
+
+        // Exceed effective range limit by using VK_WHOLE_SIZE
+        buff_info.range = VK_WHOLE_SIZE;
+        buff_info.offset = 0;
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, test_case.max_range_vu);
+        vk::UpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
+        m_errorMonitor->VerifyFound();
+
+        // Cleanup
+        vk::FreeMemory(m_device->device(), mem, NULL);
+        vk::DestroyBuffer(m_device->device(), buffer, NULL);
+    }
+}
+
+TEST_F(VkLayerTest, DSAspectBitsErrors) {
+    // TODO : Initially only catching case where DEPTH & STENCIL aspect bits
+    //  are set, but could expand this test to hit more cases.
+    TEST_DESCRIPTION("Attempt to update descriptor sets for images that do not have correct aspect bits sets.");
+    VkResult err;
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    auto depth_format = FindSupportedDepthStencilFormat(gpu());
+    if (!depth_format) {
+        printf("%s No Depth + Stencil format found. Skipped.\n", kSkipPrefix);
+        return;
+    }
+
+    OneOffDescriptorSet descriptor_set(m_device, {
+                                                     {0, VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, 1, VK_SHADER_STAGE_ALL, nullptr},
+                                                 });
+
+    // Create an image to be used for invalid updates
+    VkImageObj image_obj(m_device);
+    VkFormatProperties fmt_props;
+    vk::GetPhysicalDeviceFormatProperties(m_device->phy().handle(), depth_format, &fmt_props);
+    if (!image_obj.IsCompatible(VK_IMAGE_USAGE_SAMPLED_BIT, fmt_props.linearTilingFeatures) &&
+        !image_obj.IsCompatible(VK_IMAGE_USAGE_SAMPLED_BIT, fmt_props.optimalTilingFeatures)) {
+        printf("%s Depth + Stencil format cannot be sampled. Skipped.\n", kSkipPrefix);
+        return;
+    }
+    image_obj.Init(64, 64, 1, depth_format, VK_IMAGE_USAGE_SAMPLED_BIT);
+    ASSERT_TRUE(image_obj.initialized());
+    VkImage image = image_obj.image();
+
+    // Now create view for image
+    VkImageViewCreateInfo image_view_ci = {};
+    image_view_ci.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
+    image_view_ci.image = image;
+    image_view_ci.format = depth_format;
+    image_view_ci.viewType = VK_IMAGE_VIEW_TYPE_2D;
+    image_view_ci.subresourceRange.layerCount = 1;
+    image_view_ci.subresourceRange.baseArrayLayer = 0;
+    image_view_ci.subresourceRange.levelCount = 1;
+    // Setting both depth & stencil aspect bits is illegal for an imageView used
+    // to populate a descriptor set.
+    image_view_ci.subresourceRange.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT;
+
+    VkImageView image_view;
+    err = vk::CreateImageView(m_device->device(), &image_view_ci, NULL, &image_view);
+    ASSERT_VK_SUCCESS(err);
+    descriptor_set.WriteDescriptorImageInfo(0, image_view, VK_NULL_HANDLE, VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT);
+
+    const char *error_msg = "VUID-VkDescriptorImageInfo-imageView-01976";
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, error_msg);
+    descriptor_set.UpdateDescriptorSets();
+    m_errorMonitor->VerifyFound();
+    vk::DestroyImageView(m_device->device(), image_view, NULL);
+}
+
+TEST_F(VkLayerTest, DSTypeMismatch) {
+    // Create DS w/ layout of one type and attempt Update w/ mis-matched type
+    VkResult err;
+
+    m_errorMonitor->SetDesiredFailureMsg(
+        VK_DEBUG_REPORT_ERROR_BIT_EXT,
+        " binding #0 with type VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER but update type is VK_DESCRIPTOR_TYPE_SAMPLER");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    OneOffDescriptorSet descriptor_set(m_device, {
+                                                     {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
+                                                 });
+
+    VkSamplerCreateInfo sampler_ci = SafeSaneSamplerCreateInfo();
+    VkSampler sampler;
+    err = vk::CreateSampler(m_device->device(), &sampler_ci, NULL, &sampler);
+    ASSERT_VK_SUCCESS(err);
+
+    descriptor_set.WriteDescriptorImageInfo(0, VK_NULL_HANDLE, sampler, VK_DESCRIPTOR_TYPE_SAMPLER);
+    descriptor_set.UpdateDescriptorSets();
+
+    m_errorMonitor->VerifyFound();
+
+    vk::DestroySampler(m_device->device(), sampler, NULL);
+}
+
+TEST_F(VkLayerTest, DSUpdateOutOfBounds) {
+    // For overlapping Update, have arrayIndex exceed that of layout
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkWriteDescriptorSet-dstArrayElement-00321");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    OneOffDescriptorSet descriptor_set(m_device, {
+                                                     {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
+                                                 });
+
+    VkBufferTest buffer_test(m_device, VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT);
+    if (!buffer_test.GetBufferCurrent()) {
+        // Something prevented creation of buffer so abort
+        printf("%s Buffer creation failed, skipping test\n", kSkipPrefix);
+        return;
+    }
+
+    // Correctly update descriptor to avoid "NOT_UPDATED" error
+    VkDescriptorBufferInfo buff_info = {};
+    buff_info.buffer = buffer_test.GetBuffer();
+    buff_info.offset = 0;
+    buff_info.range = 1024;
+
+    VkWriteDescriptorSet descriptor_write;
+    memset(&descriptor_write, 0, sizeof(descriptor_write));
+    descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
+    descriptor_write.dstSet = descriptor_set.set_;
+    descriptor_write.dstArrayElement = 1; /* This index out of bounds for the update */
+    descriptor_write.descriptorCount = 1;
+    descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
+    descriptor_write.pBufferInfo = &buff_info;
+
+    vk::UpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
+
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, InvalidDSUpdateIndex) {
+    // Create layout w/ count of 1 and attempt update to that layout w/ binding index 2
+    VkResult err;
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkWriteDescriptorSet-dstBinding-00315");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    OneOffDescriptorSet descriptor_set(m_device, {
+                                                     {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
+                                                 });
+
+    VkSamplerCreateInfo sampler_ci = SafeSaneSamplerCreateInfo();
+    VkSampler sampler;
+    err = vk::CreateSampler(m_device->device(), &sampler_ci, NULL, &sampler);
+    ASSERT_VK_SUCCESS(err);
+
+    // This is the wrong type, but out of bounds will be flagged first
+    descriptor_set.WriteDescriptorImageInfo(2, VK_NULL_HANDLE, sampler, VK_DESCRIPTOR_TYPE_SAMPLER);
+    descriptor_set.UpdateDescriptorSets();
+    m_errorMonitor->VerifyFound();
+
+    vk::DestroySampler(m_device->device(), sampler, NULL);
+}
+
+TEST_F(VkLayerTest, DSUpdateEmptyBinding) {
+    // Create layout w/ empty binding and attempt to update it
+    VkResult err;
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    OneOffDescriptorSet descriptor_set(m_device, {
+                                                     {0, VK_DESCRIPTOR_TYPE_SAMPLER, 0 /* !! */, VK_SHADER_STAGE_ALL, nullptr},
+                                                 });
+
+    VkSamplerCreateInfo sampler_ci = SafeSaneSamplerCreateInfo();
+    VkSampler sampler;
+    err = vk::CreateSampler(m_device->device(), &sampler_ci, NULL, &sampler);
+    ASSERT_VK_SUCCESS(err);
+
+    // descriptor_write.descriptorCount = 1, Lie here to avoid parameter_validation error
+    // This is the wrong type, but empty binding error will be flagged first
+    descriptor_set.WriteDescriptorImageInfo(0, VK_NULL_HANDLE, sampler, VK_DESCRIPTOR_TYPE_SAMPLER);
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkWriteDescriptorSet-dstBinding-00316");
+    descriptor_set.UpdateDescriptorSets();
+    m_errorMonitor->VerifyFound();
+
+    vk::DestroySampler(m_device->device(), sampler, NULL);
+}
+
+TEST_F(VkLayerTest, InvalidDSUpdateStruct) {
+    // Call UpdateDS w/ struct type other than valid VK_STRUCTUR_TYPE_UPDATE_*
+    // types
+    VkResult err;
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, ".sType must be VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    OneOffDescriptorSet descriptor_set(m_device, {
+                                                     {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
+                                                 });
+
+    VkSamplerCreateInfo sampler_ci = SafeSaneSamplerCreateInfo();
+    VkSampler sampler;
+    err = vk::CreateSampler(m_device->device(), &sampler_ci, NULL, &sampler);
+    ASSERT_VK_SUCCESS(err);
+
+    VkDescriptorImageInfo info = {};
+    info.sampler = sampler;
+
+    VkWriteDescriptorSet descriptor_write;
+    memset(&descriptor_write, 0, sizeof(descriptor_write));
+    descriptor_write.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO; /* Intentionally broken struct type */
+    descriptor_write.dstSet = descriptor_set.set_;
+    descriptor_write.descriptorCount = 1;
+    // This is the wrong type, but out of bounds will be flagged first
+    descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_SAMPLER;
+    descriptor_write.pImageInfo = &info;
+
+    vk::UpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
+
+    m_errorMonitor->VerifyFound();
+
+    vk::DestroySampler(m_device->device(), sampler, NULL);
+}
+
+TEST_F(VkLayerTest, SampleDescriptorUpdateError) {
+    // Create a single Sampler descriptor and send it an invalid Sampler
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkWriteDescriptorSet-descriptorType-00325");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    OneOffDescriptorSet descriptor_set(m_device, {
+                                                     {0, VK_DESCRIPTOR_TYPE_SAMPLER, 1, VK_SHADER_STAGE_ALL, nullptr},
+                                                 });
+
+    VkSampler sampler = CastToHandle<VkSampler, uintptr_t>(0xbaadbeef);  // Sampler with invalid handle
+
+    descriptor_set.WriteDescriptorImageInfo(0, VK_NULL_HANDLE, sampler, VK_DESCRIPTOR_TYPE_SAMPLER);
+    descriptor_set.UpdateDescriptorSets();
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, ImageViewDescriptorUpdateError) {
+    // Create a single combined Image/Sampler descriptor and send it an invalid
+    // imageView
+    VkResult err;
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkWriteDescriptorSet-descriptorType-00326");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    OneOffDescriptorSet descriptor_set(m_device,
+                                       {
+                                           {0, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, 1, VK_SHADER_STAGE_ALL, nullptr},
+                                       });
+
+    VkSamplerCreateInfo sampler_ci = SafeSaneSamplerCreateInfo();
+    VkSampler sampler;
+    err = vk::CreateSampler(m_device->device(), &sampler_ci, NULL, &sampler);
+    ASSERT_VK_SUCCESS(err);
+
+    VkImageView view = CastToHandle<VkImageView, uintptr_t>(0xbaadbeef);  // invalid imageView object
+
+    descriptor_set.WriteDescriptorImageInfo(0, view, sampler, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER);
+    descriptor_set.UpdateDescriptorSets();
+    m_errorMonitor->VerifyFound();
+
+    vk::DestroySampler(m_device->device(), sampler, NULL);
+}
+
+TEST_F(VkLayerTest, CopyDescriptorUpdateErrors) {
+    // Create DS w/ layout of 2 types, write update 1 and attempt to copy-update
+    // into the other
+    VkResult err;
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         " binding #1 with type VK_DESCRIPTOR_TYPE_SAMPLER. Types do not match.");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    OneOffDescriptorSet descriptor_set(m_device, {
+                                                     {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
+                                                     {1, VK_DESCRIPTOR_TYPE_SAMPLER, 1, VK_SHADER_STAGE_ALL, nullptr},
+                                                 });
+
+    VkSamplerCreateInfo sampler_ci = SafeSaneSamplerCreateInfo();
+    VkSampler sampler;
+    err = vk::CreateSampler(m_device->device(), &sampler_ci, NULL, &sampler);
+    ASSERT_VK_SUCCESS(err);
+
+    // SAMPLER binding from layout above
+    // This write update should succeed
+    descriptor_set.WriteDescriptorImageInfo(1, VK_NULL_HANDLE, sampler, VK_DESCRIPTOR_TYPE_SAMPLER);
+    descriptor_set.UpdateDescriptorSets();
+    // Now perform a copy update that fails due to type mismatch
+    VkCopyDescriptorSet copy_ds_update;
+    memset(&copy_ds_update, 0, sizeof(VkCopyDescriptorSet));
+    copy_ds_update.sType = VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET;
+    copy_ds_update.srcSet = descriptor_set.set_;
+    copy_ds_update.srcBinding = 1;  // Copy from SAMPLER binding
+    copy_ds_update.dstSet = descriptor_set.set_;
+    copy_ds_update.dstBinding = 0;       // ERROR : copy to UNIFORM binding
+    copy_ds_update.descriptorCount = 1;  // copy 1 descriptor
+    vk::UpdateDescriptorSets(m_device->device(), 0, NULL, 1, &copy_ds_update);
+
+    m_errorMonitor->VerifyFound();
+    // Now perform a copy update that fails due to binding out of bounds
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, " does not have copy update src binding of 3.");
+    memset(&copy_ds_update, 0, sizeof(VkCopyDescriptorSet));
+    copy_ds_update.sType = VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET;
+    copy_ds_update.srcSet = descriptor_set.set_;
+    copy_ds_update.srcBinding = 3;  // ERROR : Invalid binding for matching layout
+    copy_ds_update.dstSet = descriptor_set.set_;
+    copy_ds_update.dstBinding = 0;
+    copy_ds_update.descriptorCount = 1;  // Copy 1 descriptor
+    vk::UpdateDescriptorSets(m_device->device(), 0, NULL, 1, &copy_ds_update);
+
+    m_errorMonitor->VerifyFound();
+
+    // Now perform a copy update that fails due to binding out of bounds
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         " binding#1 with offset index of 1 plus update array offset of 0 and update of 5 "
+                                         "descriptors oversteps total number of descriptors in set: 2.");
+
+    memset(&copy_ds_update, 0, sizeof(VkCopyDescriptorSet));
+    copy_ds_update.sType = VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET;
+    copy_ds_update.srcSet = descriptor_set.set_;
+    copy_ds_update.srcBinding = 1;
+    copy_ds_update.dstSet = descriptor_set.set_;
+    copy_ds_update.dstBinding = 0;
+    copy_ds_update.descriptorCount = 5;  // ERROR copy 5 descriptors (out of bounds for layout)
+    vk::UpdateDescriptorSets(m_device->device(), 0, NULL, 1, &copy_ds_update);
+
+    m_errorMonitor->VerifyFound();
+
+    vk::DestroySampler(m_device->device(), sampler, NULL);
+}
+
+TEST_F(VkLayerTest, DrawWithPipelineIncompatibleWithRenderPass) {
+    TEST_DESCRIPTION(
+        "Hit RenderPass incompatible cases. Initial case is drawing with an active renderpass that's not compatible with the bound "
+        "pipeline state object's creation renderpass");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    OneOffDescriptorSet descriptor_set(m_device, {
+                                                     {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
+                                                 });
+
+    const VkPipelineLayoutObj pipeline_layout(m_device, {&descriptor_set.layout_});
+
+    VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
+    VkShaderObj fs(m_device, bindStateFragShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);  // We shouldn't need a fragment shader
+    // but add it to be able to run on more devices
+    // Create a renderpass that will be incompatible with default renderpass
+    VkAttachmentReference color_att = {};
+    color_att.layout = VK_IMAGE_LAYOUT_GENERAL;
+    VkSubpassDescription subpass = {};
+    subpass.colorAttachmentCount = 1;
+    subpass.pColorAttachments = &color_att;
+    VkRenderPassCreateInfo rpci = {};
+    rpci.subpassCount = 1;
+    rpci.pSubpasses = &subpass;
+    rpci.attachmentCount = 1;
+    VkAttachmentDescription attach_desc = {};
+    attach_desc.samples = VK_SAMPLE_COUNT_1_BIT;
+    // Format incompatible with PSO RP color attach format B8G8R8A8_UNORM
+    attach_desc.format = VK_FORMAT_R8G8B8A8_UNORM;
+    attach_desc.finalLayout = VK_IMAGE_LAYOUT_GENERAL;
+    rpci.pAttachments = &attach_desc;
+    rpci.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
+    VkRenderPass rp;
+    vk::CreateRenderPass(m_device->device(), &rpci, NULL, &rp);
+    VkPipelineObj pipe(m_device);
+    pipe.AddShader(&vs);
+    pipe.AddShader(&fs);
+    pipe.AddDefaultColorAttachment();
+    VkViewport viewport = {0.0f, 0.0f, 64.0f, 64.0f, 0.0f, 1.0f};
+    m_viewports.push_back(viewport);
+    pipe.SetViewport(m_viewports);
+    VkRect2D rect = {{0, 0}, {64, 64}};
+    m_scissors.push_back(rect);
+    pipe.SetScissor(m_scissors);
+    pipe.CreateVKPipeline(pipeline_layout.handle(), rp);
+
+    VkCommandBufferInheritanceInfo cbii = {};
+    cbii.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO;
+    cbii.renderPass = rp;
+    cbii.subpass = 0;
+    VkCommandBufferBeginInfo cbbi = {};
+    cbbi.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
+    cbbi.pInheritanceInfo = &cbii;
+    vk::BeginCommandBuffer(m_commandBuffer->handle(), &cbbi);
+    vk::CmdBeginRenderPass(m_commandBuffer->handle(), &m_renderPassBeginInfo, VK_SUBPASS_CONTENTS_INLINE);
+    vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.handle());
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdDraw-renderPass-02684");
+    // Render triangle (the error should trigger on the attempt to draw).
+    m_commandBuffer->Draw(3, 1, 0, 0);
+
+    // Finalize recording of the command buffer
+    m_commandBuffer->EndRenderPass();
+    m_commandBuffer->end();
+
+    m_errorMonitor->VerifyFound();
+
+    vk::DestroyRenderPass(m_device->device(), rp, NULL);
+}
+
+TEST_F(VkLayerTest, Maint1BindingSliceOf3DImage) {
+    TEST_DESCRIPTION(
+        "Attempt to bind a slice of a 3D texture in a descriptor set. This is explicitly disallowed by KHR_maintenance1 to keep "
+        "things simple for drivers.");
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+    if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_MAINTENANCE1_EXTENSION_NAME)) {
+        m_device_extension_names.push_back(VK_KHR_MAINTENANCE1_EXTENSION_NAME);
+    } else {
+        printf("%s %s is not supported; skipping\n", kSkipPrefix, VK_KHR_MAINTENANCE1_EXTENSION_NAME);
+        return;
+    }
+    ASSERT_NO_FATAL_FAILURE(InitState());
+
+    VkResult err;
+
+    OneOffDescriptorSet descriptor_set(m_device,
+                                       {
+                                           {0, VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, 1, VK_SHADER_STAGE_FRAGMENT_BIT, nullptr},
+                                       });
+
+    VkImageCreateInfo ici = {VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
+                             nullptr,
+                             VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT_KHR,
+                             VK_IMAGE_TYPE_3D,
+                             VK_FORMAT_R8G8B8A8_UNORM,
+                             {32, 32, 32},
+                             1,
+                             1,
+                             VK_SAMPLE_COUNT_1_BIT,
+                             VK_IMAGE_TILING_OPTIMAL,
+                             VK_IMAGE_USAGE_SAMPLED_BIT,
+                             VK_SHARING_MODE_EXCLUSIVE,
+                             0,
+                             nullptr,
+                             VK_IMAGE_LAYOUT_UNDEFINED};
+    VkImageObj image(m_device);
+    image.init(&ici);
+    ASSERT_TRUE(image.initialized());
+
+    VkImageViewCreateInfo ivci = {
+        VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
+        nullptr,
+        0,
+        image.handle(),
+        VK_IMAGE_VIEW_TYPE_2D,
+        VK_FORMAT_R8G8B8A8_UNORM,
+        {VK_COMPONENT_SWIZZLE_IDENTITY, VK_COMPONENT_SWIZZLE_IDENTITY, VK_COMPONENT_SWIZZLE_IDENTITY,
+         VK_COMPONENT_SWIZZLE_IDENTITY},
+        {VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 1},
+    };
+    VkImageView view;
+    err = vk::CreateImageView(m_device->device(), &ivci, nullptr, &view);
+    ASSERT_VK_SUCCESS(err);
+
+    // Meat of the test.
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkDescriptorImageInfo-imageView-00343");
+
+    VkDescriptorImageInfo dii = {VK_NULL_HANDLE, view, VK_IMAGE_LAYOUT_GENERAL};
+    VkWriteDescriptorSet write = {VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
+                                  nullptr,
+                                  descriptor_set.set_,
+                                  0,
+                                  0,
+                                  1,
+                                  VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE,
+                                  &dii,
+                                  nullptr,
+                                  nullptr};
+    vk::UpdateDescriptorSets(m_device->device(), 1, &write, 0, nullptr);
+
+    m_errorMonitor->VerifyFound();
+
+    vk::DestroyImageView(m_device->device(), view, nullptr);
+}
+
+TEST_F(VkLayerTest, UpdateDestroyDescriptorSetLayout) {
+    TEST_DESCRIPTION("Attempt updates to descriptor sets with destroyed descriptor set layouts");
+    // TODO: Update to match the descriptor set layout specific VUIDs/VALIDATION_ERROR_* when present
+    const auto kWriteDestroyedLayout = "VUID-VkWriteDescriptorSet-dstSet-00320";
+    const auto kCopyDstDestroyedLayout = "VUID-VkCopyDescriptorSet-dstSet-parameter";
+    const auto kCopySrcDestroyedLayout = "VUID-VkCopyDescriptorSet-srcSet-parameter";
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    // Set up the descriptor (resource) and write/copy operations to use.
+    float data[16] = {};
+    VkConstantBufferObj buffer(m_device, sizeof(data), data, VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT);
+    ASSERT_TRUE(buffer.initialized());
+
+    VkDescriptorBufferInfo info = {};
+    info.buffer = buffer.handle();
+    info.range = VK_WHOLE_SIZE;
+
+    VkWriteDescriptorSet write_descriptor = {};
+    write_descriptor.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
+    write_descriptor.dstSet = VK_NULL_HANDLE;  // must update this
+    write_descriptor.dstBinding = 0;
+    write_descriptor.descriptorCount = 1;
+    write_descriptor.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
+    write_descriptor.pBufferInfo = &info;
+
+    VkCopyDescriptorSet copy_descriptor = {};
+    copy_descriptor.sType = VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET;
+    copy_descriptor.srcSet = VK_NULL_HANDLE;  // must update
+    copy_descriptor.srcBinding = 0;
+    copy_descriptor.dstSet = VK_NULL_HANDLE;  // must update
+    copy_descriptor.dstBinding = 0;
+    copy_descriptor.descriptorCount = 1;
+
+    // Create valid and invalid source and destination descriptor sets
+    std::vector<VkDescriptorSetLayoutBinding> one_uniform_buffer = {
+        {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
+    };
+    OneOffDescriptorSet good_dst(m_device, one_uniform_buffer);
+    ASSERT_TRUE(good_dst.Initialized());
+
+    OneOffDescriptorSet bad_dst(m_device, one_uniform_buffer);
+    // Must assert before invalidating it below
+    ASSERT_TRUE(bad_dst.Initialized());
+    bad_dst.layout_ = VkDescriptorSetLayoutObj();
+
+    OneOffDescriptorSet good_src(m_device, one_uniform_buffer);
+    ASSERT_TRUE(good_src.Initialized());
+
+    // Put valid data in the good and bad sources, simultaneously doing a positive test on write and copy operations
+    m_errorMonitor->ExpectSuccess();
+    write_descriptor.dstSet = good_src.set_;
+    vk::UpdateDescriptorSets(m_device->device(), 1, &write_descriptor, 0, NULL);
+    m_errorMonitor->VerifyNotFound();
+
+    OneOffDescriptorSet bad_src(m_device, one_uniform_buffer);
+    ASSERT_TRUE(bad_src.Initialized());
+
+    // to complete our positive testing use copy, where above we used write.
+    copy_descriptor.srcSet = good_src.set_;
+    copy_descriptor.dstSet = bad_src.set_;
+    vk::UpdateDescriptorSets(m_device->device(), 0, nullptr, 1, &copy_descriptor);
+    bad_src.layout_ = VkDescriptorSetLayoutObj();
+    m_errorMonitor->VerifyNotFound();
+
+    // Trigger the three invalid use errors
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, kWriteDestroyedLayout);
+    write_descriptor.dstSet = bad_dst.set_;
+    vk::UpdateDescriptorSets(m_device->device(), 1, &write_descriptor, 0, NULL);
+    m_errorMonitor->VerifyFound();
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, kCopyDstDestroyedLayout);
+    copy_descriptor.dstSet = bad_dst.set_;
+    vk::UpdateDescriptorSets(m_device->device(), 0, nullptr, 1, &copy_descriptor);
+    m_errorMonitor->VerifyFound();
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, kCopySrcDestroyedLayout);
+    copy_descriptor.srcSet = bad_src.set_;
+    copy_descriptor.dstSet = good_dst.set_;
+    vk::UpdateDescriptorSets(m_device->device(), 0, nullptr, 1, &copy_descriptor);
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, FramebufferIncompatible) {
+    TEST_DESCRIPTION(
+        "Bind a secondary command buffer with a framebuffer that does not match the framebuffer for the active renderpass.");
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    // A renderpass with one color attachment.
+    VkAttachmentDescription attachment = {0,
+                                          VK_FORMAT_B8G8R8A8_UNORM,
+                                          VK_SAMPLE_COUNT_1_BIT,
+                                          VK_ATTACHMENT_LOAD_OP_DONT_CARE,
+                                          VK_ATTACHMENT_STORE_OP_STORE,
+                                          VK_ATTACHMENT_LOAD_OP_DONT_CARE,
+                                          VK_ATTACHMENT_STORE_OP_DONT_CARE,
+                                          VK_IMAGE_LAYOUT_UNDEFINED,
+                                          VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL};
+
+    VkAttachmentReference att_ref = {0, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL};
+
+    VkSubpassDescription subpass = {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 0, nullptr, 1, &att_ref, nullptr, nullptr, 0, nullptr};
+
+    VkRenderPassCreateInfo rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, nullptr, 0, 1, &attachment, 1, &subpass, 0, nullptr};
+
+    VkRenderPass rp;
+    VkResult err = vk::CreateRenderPass(m_device->device(), &rpci, nullptr, &rp);
+    ASSERT_VK_SUCCESS(err);
+
+    // A compatible framebuffer.
+    VkImageObj image(m_device);
+    image.Init(32, 32, 1, VK_FORMAT_B8G8R8A8_UNORM, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
+    ASSERT_TRUE(image.initialized());
+
+    VkImageViewCreateInfo ivci = {
+        VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
+        nullptr,
+        0,
+        image.handle(),
+        VK_IMAGE_VIEW_TYPE_2D,
+        VK_FORMAT_B8G8R8A8_UNORM,
+        {VK_COMPONENT_SWIZZLE_IDENTITY, VK_COMPONENT_SWIZZLE_IDENTITY, VK_COMPONENT_SWIZZLE_IDENTITY,
+         VK_COMPONENT_SWIZZLE_IDENTITY},
+        {VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 1},
+    };
+    VkImageView view;
+    err = vk::CreateImageView(m_device->device(), &ivci, nullptr, &view);
+    ASSERT_VK_SUCCESS(err);
+
+    VkFramebufferCreateInfo fci = {VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, nullptr, 0, rp, 1, &view, 32, 32, 1};
+    VkFramebuffer fb;
+    err = vk::CreateFramebuffer(m_device->device(), &fci, nullptr, &fb);
+    ASSERT_VK_SUCCESS(err);
+
+    VkCommandBufferAllocateInfo cbai = {};
+    cbai.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
+    cbai.commandPool = m_commandPool->handle();
+    cbai.level = VK_COMMAND_BUFFER_LEVEL_SECONDARY;
+    cbai.commandBufferCount = 1;
+
+    VkCommandBuffer sec_cb;
+    err = vk::AllocateCommandBuffers(m_device->device(), &cbai, &sec_cb);
+    ASSERT_VK_SUCCESS(err);
+    VkCommandBufferBeginInfo cbbi = {};
+    VkCommandBufferInheritanceInfo cbii = {};
+    cbii.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO;
+    cbii.renderPass = renderPass();
+    cbii.framebuffer = fb;
+    cbbi.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
+    cbbi.pNext = NULL;
+    cbbi.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT | VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT;
+    cbbi.pInheritanceInfo = &cbii;
+    vk::BeginCommandBuffer(sec_cb, &cbbi);
+    vk::EndCommandBuffer(sec_cb);
+
+    VkCommandBufferBeginInfo cbbi2 = {VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO, nullptr, 0, nullptr};
+    vk::BeginCommandBuffer(m_commandBuffer->handle(), &cbbi2);
+    vk::CmdBeginRenderPass(m_commandBuffer->handle(), &m_renderPassBeginInfo, VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS);
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdExecuteCommands-pCommandBuffers-00099");
+    vk::CmdExecuteCommands(m_commandBuffer->handle(), 1, &sec_cb);
+    m_errorMonitor->VerifyFound();
+    // Cleanup
+
+    vk::CmdEndRenderPass(m_commandBuffer->handle());
+    vk::EndCommandBuffer(m_commandBuffer->handle());
+
+    vk::DestroyImageView(m_device->device(), view, NULL);
+    vk::DestroyRenderPass(m_device->device(), rp, NULL);
+    vk::DestroyFramebuffer(m_device->device(), fb, NULL);
+}
+
+TEST_F(VkLayerTest, RenderPassMissingAttachment) {
+    TEST_DESCRIPTION("Begin render pass with missing framebuffer attachment");
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    // Create a renderPass with a single color attachment
+    VkAttachmentReference attach = {};
+    attach.layout = VK_IMAGE_LAYOUT_GENERAL;
+    VkSubpassDescription subpass = {};
+    subpass.pColorAttachments = &attach;
+    VkRenderPassCreateInfo rpci = {};
+    rpci.subpassCount = 1;
+    rpci.pSubpasses = &subpass;
+    rpci.attachmentCount = 1;
+    VkAttachmentDescription attach_desc = {};
+    attach_desc.format = VK_FORMAT_B8G8R8A8_UNORM;
+    attach_desc.samples = VK_SAMPLE_COUNT_1_BIT;
+    attach_desc.finalLayout = VK_IMAGE_LAYOUT_GENERAL;
+    rpci.pAttachments = &attach_desc;
+    rpci.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
+    VkRenderPass rp;
+    VkResult err = vk::CreateRenderPass(m_device->device(), &rpci, NULL, &rp);
+    ASSERT_VK_SUCCESS(err);
+
+    auto createView = lvl_init_struct<VkImageViewCreateInfo>();
+    createView.image = m_renderTargets[0]->handle();
+    createView.viewType = VK_IMAGE_VIEW_TYPE_2D;
+    createView.format = VK_FORMAT_B8G8R8A8_UNORM;
+    createView.components.r = VK_COMPONENT_SWIZZLE_R;
+    createView.components.g = VK_COMPONENT_SWIZZLE_G;
+    createView.components.b = VK_COMPONENT_SWIZZLE_B;
+    createView.components.a = VK_COMPONENT_SWIZZLE_A;
+    createView.subresourceRange = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 1};
+    createView.flags = 0;
+
+    VkImageView iv;
+    vk::CreateImageView(m_device->handle(), &createView, nullptr, &iv);
+
+    auto fb_info = lvl_init_struct<VkFramebufferCreateInfo>();
+    fb_info.renderPass = rp;
+    fb_info.attachmentCount = 1;
+    fb_info.pAttachments = &iv;
+    fb_info.width = 100;
+    fb_info.height = 100;
+    fb_info.layers = 1;
+
+    // Create the framebuffer then destory the view it uses.
+    VkFramebuffer fb;
+    err = vk::CreateFramebuffer(device(), &fb_info, NULL, &fb);
+    vk::DestroyImageView(device(), iv, NULL);
+    ASSERT_VK_SUCCESS(err);
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkRenderPassBeginInfo-framebuffer-parameter");
+
+    auto rpbi = lvl_init_struct<VkRenderPassBeginInfo>();
+    rpbi.renderPass = rp;
+    rpbi.framebuffer = fb;
+    rpbi.renderArea = {{0, 0}, {32, 32}};
+
+    m_commandBuffer->begin();
+    vk::CmdBeginRenderPass(m_commandBuffer->handle(), &rpbi, VK_SUBPASS_CONTENTS_INLINE);
+    // Don't call vk::CmdEndRenderPass; as the begin has been "skipped" based on the error condition
+    m_errorMonitor->VerifyFound();
+    m_commandBuffer->end();
+
+    vk::DestroyFramebuffer(m_device->device(), fb, NULL);
+    vk::DestroyRenderPass(m_device->device(), rp, NULL);
+}
+
+TEST_F(VkLayerTest, AttachmentDescriptionUndefinedFormat) {
+    TEST_DESCRIPTION("Create a render pass with an attachment description format set to VK_FORMAT_UNDEFINED");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_WARNING_BIT_EXT, "format is VK_FORMAT_UNDEFINED");
+
+    VkAttachmentReference color_attach = {};
+    color_attach.layout = VK_IMAGE_LAYOUT_GENERAL;
+    color_attach.attachment = 0;
+    VkSubpassDescription subpass = {};
+    subpass.colorAttachmentCount = 1;
+    subpass.pColorAttachments = &color_attach;
+
+    VkRenderPassCreateInfo rpci = {};
+    rpci.subpassCount = 1;
+    rpci.pSubpasses = &subpass;
+    rpci.attachmentCount = 1;
+    VkAttachmentDescription attach_desc = {};
+    attach_desc.format = VK_FORMAT_UNDEFINED;
+    attach_desc.samples = VK_SAMPLE_COUNT_1_BIT;
+    attach_desc.finalLayout = VK_IMAGE_LAYOUT_GENERAL;
+    rpci.pAttachments = &attach_desc;
+    rpci.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
+    VkRenderPass rp;
+    VkResult result = vk::CreateRenderPass(m_device->device(), &rpci, NULL, &rp);
+
+    m_errorMonitor->VerifyFound();
+
+    if (result == VK_SUCCESS) {
+        vk::DestroyRenderPass(m_device->device(), rp, NULL);
+    }
+}
+
+TEST_F(VkLayerTest, InvalidCreateDescriptorPool) {
+    TEST_DESCRIPTION("Attempt to create descriptor pool with invalid parameters");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    const uint32_t default_descriptor_count = 1;
+    const VkDescriptorPoolSize dp_size_template{VK_DESCRIPTOR_TYPE_SAMPLER, default_descriptor_count};
+
+    const VkDescriptorPoolCreateInfo dp_ci_template{VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO,
+                                                    nullptr,  // pNext
+                                                    0,        // flags
+                                                    1,        // maxSets
+                                                    1,        // poolSizeCount
+                                                    &dp_size_template};
+
+    // try maxSets = 0
+    {
+        VkDescriptorPoolCreateInfo invalid_dp_ci = dp_ci_template;
+        invalid_dp_ci.maxSets = 0;  // invalid maxSets value
+
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkDescriptorPoolCreateInfo-maxSets-00301");
+        {
+            VkDescriptorPool pool;
+            vk::CreateDescriptorPool(m_device->device(), &invalid_dp_ci, nullptr, &pool);
+        }
+        m_errorMonitor->VerifyFound();
+    }
+
+    // try descriptorCount = 0
+    {
+        VkDescriptorPoolSize invalid_dp_size = dp_size_template;
+        invalid_dp_size.descriptorCount = 0;  // invalid descriptorCount value
+
+        VkDescriptorPoolCreateInfo dp_ci = dp_ci_template;
+        dp_ci.pPoolSizes = &invalid_dp_size;
+
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkDescriptorPoolSize-descriptorCount-00302");
+        {
+            VkDescriptorPool pool;
+            vk::CreateDescriptorPool(m_device->device(), &dp_ci, nullptr, &pool);
+        }
+        m_errorMonitor->VerifyFound();
+    }
+}
+
+TEST_F(VkLayerTest, DuplicateDescriptorBinding) {
+    TEST_DESCRIPTION("Create a descriptor set layout with a duplicate binding number.");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    // Create layout where two binding #s are "1"
+    static const uint32_t NUM_BINDINGS = 3;
+    VkDescriptorSetLayoutBinding dsl_binding[NUM_BINDINGS] = {};
+    dsl_binding[0].binding = 1;
+    dsl_binding[0].descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
+    dsl_binding[0].descriptorCount = 1;
+    dsl_binding[0].stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT;
+    dsl_binding[0].pImmutableSamplers = NULL;
+    dsl_binding[1].binding = 0;
+    dsl_binding[1].descriptorCount = 1;
+    dsl_binding[1].descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
+    dsl_binding[1].descriptorCount = 1;
+    dsl_binding[1].stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT;
+    dsl_binding[1].pImmutableSamplers = NULL;
+    dsl_binding[2].binding = 1;  // Duplicate binding should cause error
+    dsl_binding[2].descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
+    dsl_binding[2].descriptorCount = 1;
+    dsl_binding[2].stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT;
+    dsl_binding[2].pImmutableSamplers = NULL;
+
+    VkDescriptorSetLayoutCreateInfo ds_layout_ci = {};
+    ds_layout_ci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
+    ds_layout_ci.pNext = NULL;
+    ds_layout_ci.bindingCount = NUM_BINDINGS;
+    ds_layout_ci.pBindings = dsl_binding;
+    VkDescriptorSetLayout ds_layout;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkDescriptorSetLayoutCreateInfo-binding-00279");
+    vk::CreateDescriptorSetLayout(m_device->device(), &ds_layout_ci, NULL, &ds_layout);
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, InvalidPushDescriptorSetLayout) {
+    TEST_DESCRIPTION("Create a push descriptor set layout with invalid bindings.");
+
+    if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+        m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    } else {
+        printf("%s Did not find VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME; skipped.\n", kSkipPrefix);
+        return;
+    }
+
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+    if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME)) {
+        m_device_extension_names.push_back(VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME);
+    } else {
+        printf("%s %s Extension not supported, skipping tests\n", kSkipPrefix, VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME);
+        return;
+    }
+
+    ASSERT_NO_FATAL_FAILURE(InitState());
+
+    // Get the push descriptor limits
+    auto push_descriptor_prop = GetPushDescriptorProperties(instance(), gpu());
+    if (push_descriptor_prop.maxPushDescriptors < 1) {
+        // Some implementations report an invalid maxPushDescriptors of 0
+        printf("%s maxPushDescriptors is zero, skipping tests\n", kSkipPrefix);
+        return;
+    }
+
+    VkDescriptorSetLayoutBinding binding = {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC, 1, VK_SHADER_STAGE_FRAGMENT_BIT, nullptr};
+
+    auto ds_layout_ci = lvl_init_struct<VkDescriptorSetLayoutCreateInfo>();
+    ds_layout_ci.flags = VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR;
+    ds_layout_ci.bindingCount = 1;
+    ds_layout_ci.pBindings = &binding;
+
+    // Note that as binding is referenced in ds_layout_ci, it is effectively in the closure by reference as well.
+    auto test_create_ds_layout = [&ds_layout_ci, this](std::string error) {
+        VkDescriptorSetLayout ds_layout = VK_NULL_HANDLE;
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, error);
+        vk::CreateDescriptorSetLayout(m_device->handle(), &ds_layout_ci, nullptr, &ds_layout);
+        m_errorMonitor->VerifyFound();
+        vk::DestroyDescriptorSetLayout(m_device->handle(), ds_layout, nullptr);
+    };
+
+    // Starting with the initial VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC type set above..
+    test_create_ds_layout("VUID-VkDescriptorSetLayoutCreateInfo-flags-00280");
+
+    binding.descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC;
+    test_create_ds_layout(
+        "VUID-VkDescriptorSetLayoutCreateInfo-flags-00280");  // This is the same VUID as above, just a second error condition.
+
+    if (!(push_descriptor_prop.maxPushDescriptors == std::numeric_limits<uint32_t>::max())) {
+        binding.descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
+        binding.descriptorCount = push_descriptor_prop.maxPushDescriptors + 1;
+        test_create_ds_layout("VUID-VkDescriptorSetLayoutCreateInfo-flags-00281");
+    } else {
+        printf("%s maxPushDescriptors is set to maximum unit32_t value, skipping 'out of range test'.\n", kSkipPrefix);
+    }
+}
+
+TEST_F(VkLayerTest, PushDescriptorSetLayoutWithoutExtension) {
+    TEST_DESCRIPTION("Create a push descriptor set layout without loading the needed extension.");
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    VkDescriptorSetLayoutBinding binding = {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_FRAGMENT_BIT, nullptr};
+
+    auto ds_layout_ci = lvl_init_struct<VkDescriptorSetLayoutCreateInfo>();
+    ds_layout_ci.flags = VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR;
+    ds_layout_ci.bindingCount = 1;
+    ds_layout_ci.pBindings = &binding;
+
+    std::string error = "Attempted to use VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR in ";
+    error = error + "VkDescriptorSetLayoutCreateInfo::flags but its required extension ";
+    error = error + VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME;
+    error = error + " has not been enabled.";
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, error.c_str());
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkDescriptorSetLayoutCreateInfo-flags-00281");
+    VkDescriptorSetLayout ds_layout = VK_NULL_HANDLE;
+    vk::CreateDescriptorSetLayout(m_device->handle(), &ds_layout_ci, nullptr, &ds_layout);
+    m_errorMonitor->VerifyFound();
+    vk::DestroyDescriptorSetLayout(m_device->handle(), ds_layout, nullptr);
+}
+
+TEST_F(VkLayerTest, DescriptorIndexingSetLayoutWithoutExtension) {
+    TEST_DESCRIPTION("Create an update_after_bind set layout without loading the needed extension.");
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    auto ds_layout_ci = lvl_init_struct<VkDescriptorSetLayoutCreateInfo>();
+    ds_layout_ci.flags = VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT;
+
+    std::string error = "Attemped to use VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT in ";
+    error = error + "VkDescriptorSetLayoutCreateInfo::flags but its required extension ";
+    error = error + VK_EXT_DESCRIPTOR_INDEXING_EXTENSION_NAME;
+    error = error + " has not been enabled.";
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, error.c_str());
+    VkDescriptorSetLayout ds_layout = VK_NULL_HANDLE;
+    vk::CreateDescriptorSetLayout(m_device->handle(), &ds_layout_ci, nullptr, &ds_layout);
+    m_errorMonitor->VerifyFound();
+    vk::DestroyDescriptorSetLayout(m_device->handle(), ds_layout, nullptr);
+}
+
+TEST_F(VkLayerTest, DescriptorIndexingSetLayout) {
+    TEST_DESCRIPTION("Exercise various create/allocate-time errors related to VK_EXT_descriptor_indexing.");
+
+    if (!(CheckDescriptorIndexingSupportAndInitFramework(this, m_instance_extension_names, m_device_extension_names, NULL,
+                                                         m_errorMonitor))) {
+        printf("%s Descriptor indexing or one of its dependencies not supported, skipping tests\n.", kSkipPrefix);
+        return;
+    }
+
+    PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR =
+        (PFN_vkGetPhysicalDeviceFeatures2KHR)vk::GetInstanceProcAddr(instance(), "vkGetPhysicalDeviceFeatures2KHR");
+    ASSERT_TRUE(vkGetPhysicalDeviceFeatures2KHR != nullptr);
+
+    // Create a device that enables all supported indexing features except descriptorBindingUniformBufferUpdateAfterBind
+    auto indexing_features = lvl_init_struct<VkPhysicalDeviceDescriptorIndexingFeaturesEXT>();
+    auto features2 = lvl_init_struct<VkPhysicalDeviceFeatures2KHR>(&indexing_features);
+    vkGetPhysicalDeviceFeatures2KHR(gpu(), &features2);
+
+    indexing_features.descriptorBindingUniformBufferUpdateAfterBind = VK_FALSE;
+
+    ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &features2));
+
+    std::array<VkDescriptorBindingFlagsEXT, 2> flags = {VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT,
+                                                        VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT};
+    auto flags_create_info = lvl_init_struct<VkDescriptorSetLayoutBindingFlagsCreateInfoEXT>();
+    flags_create_info.bindingCount = (uint32_t)flags.size();
+    flags_create_info.pBindingFlags = flags.data();
+
+    VkDescriptorSetLayoutBinding binding = {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_FRAGMENT_BIT, nullptr};
+    auto ds_layout_ci = lvl_init_struct<VkDescriptorSetLayoutCreateInfo>(&flags_create_info);
+    ds_layout_ci.bindingCount = 1;
+    ds_layout_ci.pBindings = &binding;
+    VkDescriptorSetLayout ds_layout = VK_NULL_HANDLE;
+
+    // VU for VkDescriptorSetLayoutBindingFlagsCreateInfoEXT::bindingCount
+    flags_create_info.bindingCount = 2;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "VUID-VkDescriptorSetLayoutBindingFlagsCreateInfoEXT-bindingCount-03002");
+    VkResult err = vk::CreateDescriptorSetLayout(m_device->handle(), &ds_layout_ci, nullptr, &ds_layout);
+    m_errorMonitor->VerifyFound();
+    vk::DestroyDescriptorSetLayout(m_device->handle(), ds_layout, nullptr);
+
+    flags_create_info.bindingCount = 1;
+
+    // set is missing UPDATE_AFTER_BIND_POOL flag.
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkDescriptorSetLayoutCreateInfo-flags-03000");
+    // binding uses a feature we disabled
+    m_errorMonitor->SetDesiredFailureMsg(
+        VK_DEBUG_REPORT_ERROR_BIT_EXT,
+        "VUID-VkDescriptorSetLayoutBindingFlagsCreateInfoEXT-descriptorBindingUniformBufferUpdateAfterBind-03005");
+    err = vk::CreateDescriptorSetLayout(m_device->handle(), &ds_layout_ci, nullptr, &ds_layout);
+    m_errorMonitor->VerifyFound();
+    vk::DestroyDescriptorSetLayout(m_device->handle(), ds_layout, nullptr);
+
+    ds_layout_ci.flags = VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT;
+    ds_layout_ci.bindingCount = 0;
+    flags_create_info.bindingCount = 0;
+    err = vk::CreateDescriptorSetLayout(m_device->handle(), &ds_layout_ci, nullptr, &ds_layout);
+    ASSERT_VK_SUCCESS(err);
+
+    VkDescriptorPoolSize pool_size = {binding.descriptorType, binding.descriptorCount};
+    auto dspci = lvl_init_struct<VkDescriptorPoolCreateInfo>();
+    dspci.poolSizeCount = 1;
+    dspci.pPoolSizes = &pool_size;
+    dspci.maxSets = 1;
+    VkDescriptorPool pool;
+    err = vk::CreateDescriptorPool(m_device->handle(), &dspci, nullptr, &pool);
+    ASSERT_VK_SUCCESS(err);
+
+    auto ds_alloc_info = lvl_init_struct<VkDescriptorSetAllocateInfo>();
+    ds_alloc_info.descriptorPool = pool;
+    ds_alloc_info.descriptorSetCount = 1;
+    ds_alloc_info.pSetLayouts = &ds_layout;
+
+    VkDescriptorSet ds = VK_NULL_HANDLE;
+    // mismatch between descriptor set and pool
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkDescriptorSetAllocateInfo-pSetLayouts-03044");
+    vk::AllocateDescriptorSets(m_device->handle(), &ds_alloc_info, &ds);
+    m_errorMonitor->VerifyFound();
+
+    vk::DestroyDescriptorSetLayout(m_device->handle(), ds_layout, nullptr);
+    vk::DestroyDescriptorPool(m_device->handle(), pool, nullptr);
+
+    if (indexing_features.descriptorBindingVariableDescriptorCount) {
+        ds_layout_ci.flags = 0;
+        ds_layout_ci.bindingCount = 1;
+        flags_create_info.bindingCount = 1;
+        flags[0] = VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT_EXT;
+        err = vk::CreateDescriptorSetLayout(m_device->handle(), &ds_layout_ci, nullptr, &ds_layout);
+        ASSERT_VK_SUCCESS(err);
+
+        pool_size = {binding.descriptorType, binding.descriptorCount};
+        dspci = lvl_init_struct<VkDescriptorPoolCreateInfo>();
+        dspci.poolSizeCount = 1;
+        dspci.pPoolSizes = &pool_size;
+        dspci.maxSets = 1;
+        err = vk::CreateDescriptorPool(m_device->handle(), &dspci, nullptr, &pool);
+        ASSERT_VK_SUCCESS(err);
+
+        auto count_alloc_info = lvl_init_struct<VkDescriptorSetVariableDescriptorCountAllocateInfoEXT>();
+        count_alloc_info.descriptorSetCount = 1;
+        // Set variable count larger than what was in the descriptor binding
+        uint32_t variable_count = 2;
+        count_alloc_info.pDescriptorCounts = &variable_count;
+
+        ds_alloc_info = lvl_init_struct<VkDescriptorSetAllocateInfo>(&count_alloc_info);
+        ds_alloc_info.descriptorPool = pool;
+        ds_alloc_info.descriptorSetCount = 1;
+        ds_alloc_info.pSetLayouts = &ds_layout;
+
+        ds = VK_NULL_HANDLE;
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                             "VUID-VkDescriptorSetVariableDescriptorCountAllocateInfoEXT-pSetLayouts-03046");
+        vk::AllocateDescriptorSets(m_device->handle(), &ds_alloc_info, &ds);
+        m_errorMonitor->VerifyFound();
+
+        vk::DestroyDescriptorSetLayout(m_device->handle(), ds_layout, nullptr);
+        vk::DestroyDescriptorPool(m_device->handle(), pool, nullptr);
+    }
+}
+
+TEST_F(VkLayerTest, DescriptorIndexingUpdateAfterBind) {
+    TEST_DESCRIPTION("Exercise errors for updating a descriptor set after it is bound.");
+
+    if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+        m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    } else {
+        printf("%s %s Extension not supported, skipping tests\n", kSkipPrefix,
+               VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+        return;
+    }
+
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+    if (DeviceExtensionSupported(gpu(), nullptr, VK_EXT_DESCRIPTOR_INDEXING_EXTENSION_NAME) &&
+        DeviceExtensionSupported(gpu(), nullptr, VK_KHR_MAINTENANCE3_EXTENSION_NAME)) {
+        m_device_extension_names.push_back(VK_EXT_DESCRIPTOR_INDEXING_EXTENSION_NAME);
+        m_device_extension_names.push_back(VK_KHR_MAINTENANCE3_EXTENSION_NAME);
+    } else {
+        printf("%s Descriptor Indexing or Maintenance3 Extension not supported, skipping tests\n", kSkipPrefix);
+        return;
+    }
+
+    PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR =
+        (PFN_vkGetPhysicalDeviceFeatures2KHR)vk::GetInstanceProcAddr(instance(), "vkGetPhysicalDeviceFeatures2KHR");
+    ASSERT_TRUE(vkGetPhysicalDeviceFeatures2KHR != nullptr);
+
+    // Create a device that enables all supported indexing features except descriptorBindingUniformBufferUpdateAfterBind
+    auto indexing_features = lvl_init_struct<VkPhysicalDeviceDescriptorIndexingFeaturesEXT>();
+    auto features2 = lvl_init_struct<VkPhysicalDeviceFeatures2KHR>(&indexing_features);
+    vkGetPhysicalDeviceFeatures2KHR(gpu(), &features2);
+
+    indexing_features.descriptorBindingUniformBufferUpdateAfterBind = VK_FALSE;
+
+    if (VK_FALSE == indexing_features.descriptorBindingStorageBufferUpdateAfterBind) {
+        printf("%s Test requires (unsupported) descriptorBindingStorageBufferUpdateAfterBind, skipping\n", kSkipPrefix);
+        return;
+    }
+    if (VK_FALSE == features2.features.fragmentStoresAndAtomics) {
+        printf("%s Test requires (unsupported) fragmentStoresAndAtomics, skipping\n", kSkipPrefix);
+        return;
+    }
+
+    ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &features2, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
+    ASSERT_NO_FATAL_FAILURE(InitViewport());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    VkDescriptorBindingFlagsEXT flags[3] = {0, VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT,
+                                            VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT};
+    auto flags_create_info = lvl_init_struct<VkDescriptorSetLayoutBindingFlagsCreateInfoEXT>();
+    flags_create_info.bindingCount = 3;
+    flags_create_info.pBindingFlags = &flags[0];
+
+    // Descriptor set has two bindings - only the second is update_after_bind
+    VkDescriptorSetLayoutBinding binding[3] = {
+        {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_FRAGMENT_BIT, nullptr},
+        {1, VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, 1, VK_SHADER_STAGE_FRAGMENT_BIT, nullptr},
+        {2, VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, 1, VK_SHADER_STAGE_FRAGMENT_BIT, nullptr},
+    };
+    auto ds_layout_ci = lvl_init_struct<VkDescriptorSetLayoutCreateInfo>(&flags_create_info);
+    ds_layout_ci.flags = VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT;
+    ds_layout_ci.bindingCount = 3;
+    ds_layout_ci.pBindings = &binding[0];
+    VkDescriptorSetLayout ds_layout = VK_NULL_HANDLE;
+
+    VkResult err = vk::CreateDescriptorSetLayout(m_device->handle(), &ds_layout_ci, nullptr, &ds_layout);
+
+    VkDescriptorPoolSize pool_sizes[3] = {
+        {binding[0].descriptorType, binding[0].descriptorCount},
+        {binding[1].descriptorType, binding[1].descriptorCount},
+        {binding[2].descriptorType, binding[2].descriptorCount},
+    };
+    auto dspci = lvl_init_struct<VkDescriptorPoolCreateInfo>();
+    dspci.flags = VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT_EXT;
+    dspci.poolSizeCount = 3;
+    dspci.pPoolSizes = &pool_sizes[0];
+    dspci.maxSets = 1;
+    VkDescriptorPool pool;
+    err = vk::CreateDescriptorPool(m_device->handle(), &dspci, nullptr, &pool);
+    ASSERT_VK_SUCCESS(err);
+
+    auto ds_alloc_info = lvl_init_struct<VkDescriptorSetAllocateInfo>();
+    ds_alloc_info.descriptorPool = pool;
+    ds_alloc_info.descriptorSetCount = 1;
+    ds_alloc_info.pSetLayouts = &ds_layout;
+
+    VkDescriptorSet ds = VK_NULL_HANDLE;
+    vk::AllocateDescriptorSets(m_device->handle(), &ds_alloc_info, &ds);
+    ASSERT_VK_SUCCESS(err);
+
+    VkBufferCreateInfo buffCI = {};
+    buffCI.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
+    buffCI.size = 1024;
+    buffCI.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT | VK_BUFFER_USAGE_STORAGE_BUFFER_BIT;
+
+    VkBuffer dynamic_uniform_buffer;
+    err = vk::CreateBuffer(m_device->device(), &buffCI, NULL, &dynamic_uniform_buffer);
+    ASSERT_VK_SUCCESS(err);
+
+    VkDeviceMemory mem;
+    VkMemoryRequirements mem_reqs;
+    vk::GetBufferMemoryRequirements(m_device->device(), dynamic_uniform_buffer, &mem_reqs);
+
+    VkMemoryAllocateInfo mem_alloc_info = {};
+    mem_alloc_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+    mem_alloc_info.allocationSize = mem_reqs.size;
+    m_device->phy().set_memory_type(mem_reqs.memoryTypeBits, &mem_alloc_info, VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
+    err = vk::AllocateMemory(m_device->device(), &mem_alloc_info, NULL, &mem);
+    ASSERT_VK_SUCCESS(err);
+
+    err = vk::BindBufferMemory(m_device->device(), dynamic_uniform_buffer, mem, 0);
+    ASSERT_VK_SUCCESS(err);
+
+    VkDescriptorBufferInfo buffInfo[2] = {};
+    buffInfo[0].buffer = dynamic_uniform_buffer;
+    buffInfo[0].offset = 0;
+    buffInfo[0].range = 1024;
+
+    VkWriteDescriptorSet descriptor_write[2] = {};
+    descriptor_write[0].sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
+    descriptor_write[0].dstSet = ds;
+    descriptor_write[0].dstBinding = 0;
+    descriptor_write[0].descriptorCount = 1;
+    descriptor_write[0].descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
+    descriptor_write[0].pBufferInfo = buffInfo;
+    descriptor_write[1] = descriptor_write[0];
+    descriptor_write[1].dstBinding = 1;
+    descriptor_write[1].descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
+
+    VkPipelineLayout pipeline_layout;
+    VkPipelineLayoutCreateInfo pipeline_layout_ci = {};
+    pipeline_layout_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO;
+    pipeline_layout_ci.setLayoutCount = 1;
+    pipeline_layout_ci.pSetLayouts = &ds_layout;
+
+    vk::CreatePipelineLayout(m_device->device(), &pipeline_layout_ci, NULL, &pipeline_layout);
+
+    // Create a dummy pipeline, since VL inspects which bindings are actually used at draw time
+    char const *fsSource =
+        "#version 450\n"
+        "\n"
+        "layout(location=0) out vec4 color;\n"
+        "layout(set=0, binding=0) uniform foo0 { float x0; } bar0;\n"
+        "layout(set=0, binding=1) buffer  foo1 { float x1; } bar1;\n"
+        "layout(set=0, binding=2) buffer  foo2 { float x2; } bar2;\n"
+        "void main(){\n"
+        "   color = vec4(bar0.x0 + bar1.x1 + bar2.x2);\n"
+        "}\n";
+
+    VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
+    VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+    VkPipelineObj pipe(m_device);
+    pipe.SetViewport(m_viewports);
+    pipe.SetScissor(m_scissors);
+    pipe.AddDefaultColorAttachment();
+    pipe.AddShader(&vs);
+    pipe.AddShader(&fs);
+    pipe.CreateVKPipeline(pipeline_layout, m_renderPass);
+
+    // Make both bindings valid before binding to the command buffer
+    vk::UpdateDescriptorSets(m_device->device(), 2, &descriptor_write[0], 0, NULL);
+    m_errorMonitor->VerifyNotFound();
+
+    VkSubmitInfo submit_info = {};
+    submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+    submit_info.commandBufferCount = 1;
+    submit_info.pCommandBuffers = &m_commandBuffer->handle();
+
+    // Two subtests. First only updates the update_after_bind binding and expects
+    // no error. Second updates the other binding and expects an error when the
+    // command buffer is ended.
+    for (uint32_t i = 0; i < 2; ++i) {
+        m_commandBuffer->begin();
+
+        vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_layout, 0, 1, &ds, 0, NULL);
+
+        m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
+        vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.handle());
+        vk::CmdDraw(m_commandBuffer->handle(), 0, 0, 0, 0);
+        vk::CmdEndRenderPass(m_commandBuffer->handle());
+
+        m_errorMonitor->VerifyNotFound();
+        // Valid to update binding 1 after being bound
+        vk::UpdateDescriptorSets(m_device->device(), 1, &descriptor_write[1], 0, NULL);
+        m_errorMonitor->VerifyNotFound();
+
+        if (i == 0) {
+            // expect no errors
+            m_commandBuffer->end();
+            m_errorMonitor->VerifyNotFound();
+            m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                                 "UNASSIGNED-CoreValidation-DrawState-DescriptorSetNotUpdated");
+            vk::QueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+            m_errorMonitor->VerifyFound();
+            vk::QueueWaitIdle(m_device->m_queue);
+        } else {
+            // Invalid to update binding 0 after being bound. But the error is actually
+            // generated during vk::EndCommandBuffer
+            vk::UpdateDescriptorSets(m_device->device(), 1, &descriptor_write[0], 0, NULL);
+            m_errorMonitor->VerifyNotFound();
+
+            m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                                 "UNASSIGNED-CoreValidation-DrawState-InvalidCommandBuffer-VkDescriptorSet");
+
+            vk::EndCommandBuffer(m_commandBuffer->handle());
+            m_errorMonitor->VerifyFound();
+        }
+    }
+
+    vk::DestroyDescriptorSetLayout(m_device->handle(), ds_layout, nullptr);
+    vk::DestroyDescriptorPool(m_device->handle(), pool, nullptr);
+    vk::DestroyBuffer(m_device->handle(), dynamic_uniform_buffer, NULL);
+    vk::FreeMemory(m_device->handle(), mem, NULL);
+    vk::DestroyPipelineLayout(m_device->handle(), pipeline_layout, NULL);
+}
+
+TEST_F(VkLayerTest, AllocatePushDescriptorSet) {
+    TEST_DESCRIPTION("Attempt to allocate a push descriptor set.");
+    if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+        m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    } else {
+        printf("%s %s Extension not supported, skipping tests\n", kSkipPrefix,
+               VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+        return;
+    }
+
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+    if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME)) {
+        m_device_extension_names.push_back(VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME);
+    } else {
+        printf("%s %s Extension not supported, skipping tests\n", kSkipPrefix, VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME);
+        return;
+    }
+    ASSERT_NO_FATAL_FAILURE(InitState());
+
+    auto push_descriptor_prop = GetPushDescriptorProperties(instance(), gpu());
+    if (push_descriptor_prop.maxPushDescriptors < 1) {
+        // Some implementations report an invalid maxPushDescriptors of 0
+        printf("%s maxPushDescriptors is zero, skipping tests\n", kSkipPrefix);
+        return;
+    }
+
+    VkDescriptorSetLayoutBinding binding = {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_FRAGMENT_BIT, nullptr};
+    auto ds_layout_ci = lvl_init_struct<VkDescriptorSetLayoutCreateInfo>();
+    ds_layout_ci.flags = VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR;
+    ds_layout_ci.bindingCount = 1;
+    ds_layout_ci.pBindings = &binding;
+    VkDescriptorSetLayout ds_layout = VK_NULL_HANDLE;
+    VkResult err = vk::CreateDescriptorSetLayout(m_device->handle(), &ds_layout_ci, nullptr, &ds_layout);
+    ASSERT_VK_SUCCESS(err);
+
+    VkDescriptorPoolSize pool_size = {binding.descriptorType, binding.descriptorCount};
+    auto dspci = lvl_init_struct<VkDescriptorPoolCreateInfo>();
+    dspci.poolSizeCount = 1;
+    dspci.pPoolSizes = &pool_size;
+    dspci.maxSets = 1;
+    VkDescriptorPool pool;
+    err = vk::CreateDescriptorPool(m_device->handle(), &dspci, nullptr, &pool);
+    ASSERT_VK_SUCCESS(err);
+
+    auto ds_alloc_info = lvl_init_struct<VkDescriptorSetAllocateInfo>();
+    ds_alloc_info.descriptorPool = pool;
+    ds_alloc_info.descriptorSetCount = 1;
+    ds_alloc_info.pSetLayouts = &ds_layout;
+
+    VkDescriptorSet ds = VK_NULL_HANDLE;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkDescriptorSetAllocateInfo-pSetLayouts-00308");
+    vk::AllocateDescriptorSets(m_device->handle(), &ds_alloc_info, &ds);
+    m_errorMonitor->VerifyFound();
+
+    vk::DestroyDescriptorPool(m_device->handle(), pool, nullptr);
+    vk::DestroyDescriptorSetLayout(m_device->handle(), ds_layout, nullptr);
+}
+
+TEST_F(VkLayerTest, CreateDescriptorUpdateTemplate) {
+    TEST_DESCRIPTION("Verify error messages for invalid vkCreateDescriptorUpdateTemplate calls.");
+
+    if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+        m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    } else {
+        printf("%s Did not find VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME; skipped.\n", kSkipPrefix);
+        return;
+    }
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+    // Note: Includes workaround for some implementations which incorrectly return 0 maxPushDescriptors
+    if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME) &&
+        DeviceExtensionSupported(gpu(), nullptr, VK_KHR_DESCRIPTOR_UPDATE_TEMPLATE_EXTENSION_NAME) &&
+        (GetPushDescriptorProperties(instance(), gpu()).maxPushDescriptors > 0)) {
+        m_device_extension_names.push_back(VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME);
+        m_device_extension_names.push_back(VK_KHR_DESCRIPTOR_UPDATE_TEMPLATE_EXTENSION_NAME);
+    } else {
+        printf("%s Push Descriptors and Descriptor Update Template Extensions not supported, skipping tests\n", kSkipPrefix);
+        return;
+    }
+    ASSERT_NO_FATAL_FAILURE(InitState());
+
+    VkDescriptorSetLayoutBinding dsl_binding = {};
+    dsl_binding.binding = 0;
+    dsl_binding.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
+    dsl_binding.descriptorCount = 1;
+    dsl_binding.stageFlags = VK_SHADER_STAGE_ALL;
+    dsl_binding.pImmutableSamplers = NULL;
+
+    const VkDescriptorSetLayoutObj ds_layout_ub(m_device, {dsl_binding});
+    const VkDescriptorSetLayoutObj ds_layout_ub1(m_device, {dsl_binding});
+    const VkDescriptorSetLayoutObj ds_layout_ub_push(m_device, {dsl_binding},
+                                                     VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR);
+    const VkPipelineLayoutObj pipeline_layout(m_device, {{&ds_layout_ub, &ds_layout_ub1, &ds_layout_ub_push}});
+    PFN_vkCreateDescriptorUpdateTemplateKHR vkCreateDescriptorUpdateTemplateKHR =
+        (PFN_vkCreateDescriptorUpdateTemplateKHR)vk::GetDeviceProcAddr(m_device->device(), "vkCreateDescriptorUpdateTemplateKHR");
+    ASSERT_NE(vkCreateDescriptorUpdateTemplateKHR, nullptr);
+    PFN_vkDestroyDescriptorUpdateTemplateKHR vkDestroyDescriptorUpdateTemplateKHR =
+        (PFN_vkDestroyDescriptorUpdateTemplateKHR)vk::GetDeviceProcAddr(m_device->device(), "vkDestroyDescriptorUpdateTemplateKHR");
+    ASSERT_NE(vkDestroyDescriptorUpdateTemplateKHR, nullptr);
+
+    VkDescriptorUpdateTemplateEntry entries = {0, 0, 1, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 0, sizeof(VkBuffer)};
+    VkDescriptorUpdateTemplateCreateInfo create_info = {};
+    create_info.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO;
+    create_info.pNext = nullptr;
+    create_info.flags = 0;
+    create_info.descriptorUpdateEntryCount = 1;
+    create_info.pDescriptorUpdateEntries = &entries;
+
+    auto do_test = [&](std::string err) {
+        VkDescriptorUpdateTemplateKHR dut = VK_NULL_HANDLE;
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, err);
+        if (VK_SUCCESS == vkCreateDescriptorUpdateTemplateKHR(m_device->handle(), &create_info, nullptr, &dut)) {
+            vkDestroyDescriptorUpdateTemplateKHR(m_device->handle(), dut, nullptr);
+        }
+        m_errorMonitor->VerifyFound();
+    };
+
+    // Descriptor set type template
+    create_info.templateType = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET;
+    // descriptorSetLayout is NULL
+    do_test("VUID-VkDescriptorUpdateTemplateCreateInfo-templateType-00350");
+
+    // Push descriptor type template
+    create_info.templateType = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR;
+    create_info.pipelineBindPoint = VK_PIPELINE_BIND_POINT_COMPUTE;
+    create_info.pipelineLayout = pipeline_layout.handle();
+    create_info.set = 2;
+
+    // Bad bindpoint -- force fuzz the bind point
+    memset(&create_info.pipelineBindPoint, 0xFE, sizeof(create_info.pipelineBindPoint));
+    do_test("VUID-VkDescriptorUpdateTemplateCreateInfo-templateType-00351");
+    create_info.pipelineBindPoint = VK_PIPELINE_BIND_POINT_COMPUTE;
+
+    // Bad pipeline layout
+    create_info.pipelineLayout = VK_NULL_HANDLE;
+    do_test("VUID-VkDescriptorUpdateTemplateCreateInfo-templateType-00352");
+    create_info.pipelineLayout = pipeline_layout.handle();
+
+    // Wrong set #
+    create_info.set = 0;
+    do_test("VUID-VkDescriptorUpdateTemplateCreateInfo-templateType-00353");
+
+    // Invalid set #
+    create_info.set = 42;
+    do_test("VUID-VkDescriptorUpdateTemplateCreateInfo-templateType-00353");
+}
+
+TEST_F(VkLayerTest, InlineUniformBlockEXT) {
+    TEST_DESCRIPTION("Test VK_EXT_inline_uniform_block.");
+
+    if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+        m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    } else {
+        printf("%s Did not find required instance extension %s; skipped.\n", kSkipPrefix,
+               VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+        return;
+    }
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+    std::array<const char *, 2> required_device_extensions = {VK_KHR_MAINTENANCE1_EXTENSION_NAME,
+                                                              VK_EXT_INLINE_UNIFORM_BLOCK_EXTENSION_NAME};
+    for (auto device_extension : required_device_extensions) {
+        if (DeviceExtensionSupported(gpu(), nullptr, device_extension)) {
+            m_device_extension_names.push_back(device_extension);
+        } else {
+            printf("%s %s Extension not supported, skipping tests\n", kSkipPrefix, device_extension);
+            return;
+        }
+    }
+
+    // Enable descriptor indexing if supported, but don't require it.
+    bool supportsDescriptorIndexing = true;
+    required_device_extensions = {VK_KHR_MAINTENANCE3_EXTENSION_NAME, VK_EXT_DESCRIPTOR_INDEXING_EXTENSION_NAME};
+    for (auto device_extension : required_device_extensions) {
+        if (DeviceExtensionSupported(gpu(), nullptr, device_extension)) {
+            m_device_extension_names.push_back(device_extension);
+        } else {
+            printf("%s %s Extension not supported, skipping tests\n", kSkipPrefix, device_extension);
+            supportsDescriptorIndexing = false;
+            return;
+        }
+    }
+
+    PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR =
+        (PFN_vkGetPhysicalDeviceFeatures2KHR)vk::GetInstanceProcAddr(instance(), "vkGetPhysicalDeviceFeatures2KHR");
+    ASSERT_TRUE(vkGetPhysicalDeviceFeatures2KHR != nullptr);
+
+    auto descriptor_indexing_features = lvl_init_struct<VkPhysicalDeviceDescriptorIndexingFeaturesEXT>();
+    void *pNext = supportsDescriptorIndexing ? &descriptor_indexing_features : nullptr;
+    // Create a device that enables inline_uniform_block
+    auto inline_uniform_block_features = lvl_init_struct<VkPhysicalDeviceInlineUniformBlockFeaturesEXT>(pNext);
+    auto features2 = lvl_init_struct<VkPhysicalDeviceFeatures2KHR>(&inline_uniform_block_features);
+    vkGetPhysicalDeviceFeatures2KHR(gpu(), &features2);
+
+    PFN_vkGetPhysicalDeviceProperties2KHR vkGetPhysicalDeviceProperties2KHR =
+        (PFN_vkGetPhysicalDeviceProperties2KHR)vk::GetInstanceProcAddr(instance(), "vkGetPhysicalDeviceProperties2KHR");
+    assert(vkGetPhysicalDeviceProperties2KHR != nullptr);
+
+    // Get the inline uniform block limits
+    auto inline_uniform_props = lvl_init_struct<VkPhysicalDeviceInlineUniformBlockPropertiesEXT>();
+    auto prop2 = lvl_init_struct<VkPhysicalDeviceProperties2KHR>(&inline_uniform_props);
+    vkGetPhysicalDeviceProperties2KHR(gpu(), &prop2);
+
+    ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &features2));
+
+    VkDescriptorSetLayoutBinding dslb = {};
+    std::vector<VkDescriptorSetLayoutBinding> dslb_vec = {};
+    VkDescriptorSetLayoutCreateInfo ds_layout_ci = {};
+    ds_layout_ci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
+    VkDescriptorSetLayout ds_layout = {};
+
+    // Test too many bindings
+    dslb_vec.clear();
+    dslb.binding = 0;
+    dslb.descriptorType = VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT;
+    dslb.descriptorCount = 4;
+    dslb.stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT;
+
+    if (inline_uniform_props.maxInlineUniformBlockSize < dslb.descriptorCount) {
+        printf("%sDescriptorCount exceeds InlineUniformBlockSize limit, skipping tests\n", kSkipPrefix);
+        return;
+    }
+
+    uint32_t maxBlocks = std::max(inline_uniform_props.maxPerStageDescriptorInlineUniformBlocks,
+                                  inline_uniform_props.maxDescriptorSetInlineUniformBlocks);
+    for (uint32_t i = 0; i < 1 + maxBlocks; ++i) {
+        dslb.binding = i;
+        dslb_vec.push_back(dslb);
+    }
+
+    ds_layout_ci.bindingCount = dslb_vec.size();
+    ds_layout_ci.pBindings = dslb_vec.data();
+    VkResult err = vk::CreateDescriptorSetLayout(m_device->device(), &ds_layout_ci, NULL, &ds_layout);
+    ASSERT_VK_SUCCESS(err);
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkPipelineLayoutCreateInfo-descriptorType-02214");
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkPipelineLayoutCreateInfo-descriptorType-02216");
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkPipelineLayoutCreateInfo-descriptorType-02215");
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkPipelineLayoutCreateInfo-descriptorType-02217");
+
+    VkPipelineLayoutCreateInfo pipeline_layout_ci = {};
+    pipeline_layout_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO;
+    pipeline_layout_ci.pNext = NULL;
+    pipeline_layout_ci.setLayoutCount = 1;
+    pipeline_layout_ci.pSetLayouts = &ds_layout;
+    VkPipelineLayout pipeline_layout = VK_NULL_HANDLE;
+
+    err = vk::CreatePipelineLayout(m_device->device(), &pipeline_layout_ci, NULL, &pipeline_layout);
+    m_errorMonitor->VerifyFound();
+    vk::DestroyPipelineLayout(m_device->device(), pipeline_layout, NULL);
+    pipeline_layout = VK_NULL_HANDLE;
+    vk::DestroyDescriptorSetLayout(m_device->device(), ds_layout, nullptr);
+    ds_layout = VK_NULL_HANDLE;
+
+    // Single binding that's too large and is not a multiple of 4
+    dslb.binding = 0;
+    dslb.descriptorCount = inline_uniform_props.maxInlineUniformBlockSize + 1;
+
+    ds_layout_ci.bindingCount = 1;
+    ds_layout_ci.pBindings = &dslb;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkDescriptorSetLayoutBinding-descriptorType-02209");
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkDescriptorSetLayoutBinding-descriptorType-02210");
+    err = vk::CreateDescriptorSetLayout(m_device->device(), &ds_layout_ci, NULL, &ds_layout);
+    m_errorMonitor->VerifyFound();
+    vk::DestroyDescriptorSetLayout(m_device->device(), ds_layout, nullptr);
+    ds_layout = VK_NULL_HANDLE;
+
+    // Pool size must be a multiple of 4
+    VkDescriptorPoolSize ds_type_count = {};
+    ds_type_count.type = VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT;
+    ds_type_count.descriptorCount = 33;
+
+    VkDescriptorPoolCreateInfo ds_pool_ci = {};
+    ds_pool_ci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
+    ds_pool_ci.pNext = NULL;
+    ds_pool_ci.flags = 0;
+    ds_pool_ci.maxSets = 2;
+    ds_pool_ci.poolSizeCount = 1;
+    ds_pool_ci.pPoolSizes = &ds_type_count;
+
+    VkDescriptorPool ds_pool = VK_NULL_HANDLE;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkDescriptorPoolSize-type-02218");
+    err = vk::CreateDescriptorPool(m_device->device(), &ds_pool_ci, NULL, &ds_pool);
+    m_errorMonitor->VerifyFound();
+    if (ds_pool) {
+        vk::DestroyDescriptorPool(m_device->handle(), ds_pool, nullptr);
+        ds_pool = VK_NULL_HANDLE;
+    }
+
+    // Create a valid pool
+    ds_type_count.descriptorCount = 32;
+    err = vk::CreateDescriptorPool(m_device->device(), &ds_pool_ci, NULL, &ds_pool);
+    m_errorMonitor->VerifyNotFound();
+
+    // Create two valid sets with 8 bytes each
+    dslb_vec.clear();
+    dslb.binding = 0;
+    dslb.descriptorType = VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT;
+    dslb.descriptorCount = 8;
+    dslb.stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT;
+    dslb_vec.push_back(dslb);
+    dslb.binding = 1;
+    dslb_vec.push_back(dslb);
+
+    ds_layout_ci.bindingCount = dslb_vec.size();
+    ds_layout_ci.pBindings = &dslb_vec[0];
+
+    err = vk::CreateDescriptorSetLayout(m_device->device(), &ds_layout_ci, NULL, &ds_layout);
+    m_errorMonitor->VerifyNotFound();
+
+    VkDescriptorSet descriptor_sets[2];
+    VkDescriptorSetLayout set_layouts[2] = {ds_layout, ds_layout};
+    VkDescriptorSetAllocateInfo alloc_info = {};
+    alloc_info.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
+    alloc_info.descriptorSetCount = 2;
+    alloc_info.descriptorPool = ds_pool;
+    alloc_info.pSetLayouts = set_layouts;
+    err = vk::AllocateDescriptorSets(m_device->device(), &alloc_info, descriptor_sets);
+    m_errorMonitor->VerifyNotFound();
+
+    // Test invalid VkWriteDescriptorSet parameters (array element and size must be multiple of 4)
+    VkWriteDescriptorSet descriptor_write = {};
+    descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
+    descriptor_write.dstSet = descriptor_sets[0];
+    descriptor_write.dstBinding = 0;
+    descriptor_write.dstArrayElement = 0;
+    descriptor_write.descriptorCount = 3;
+    descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT;
+
+    uint32_t dummyData[8] = {};
+    VkWriteDescriptorSetInlineUniformBlockEXT write_inline_uniform = {};
+    write_inline_uniform.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK_EXT;
+    write_inline_uniform.dataSize = 3;
+    write_inline_uniform.pData = &dummyData[0];
+    descriptor_write.pNext = &write_inline_uniform;
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkWriteDescriptorSet-descriptorType-02220");
+    vk::UpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
+    m_errorMonitor->VerifyFound();
+
+    descriptor_write.dstArrayElement = 1;
+    descriptor_write.descriptorCount = 4;
+    write_inline_uniform.dataSize = 4;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkWriteDescriptorSet-descriptorType-02219");
+    vk::UpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
+    m_errorMonitor->VerifyFound();
+
+    descriptor_write.pNext = nullptr;
+    descriptor_write.dstArrayElement = 0;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkWriteDescriptorSet-descriptorType-02221");
+    vk::UpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
+    m_errorMonitor->VerifyFound();
+
+    descriptor_write.pNext = &write_inline_uniform;
+    vk::UpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
+    m_errorMonitor->VerifyNotFound();
+
+    // Test invalid VkCopyDescriptorSet parameters (array element and size must be multiple of 4)
+    VkCopyDescriptorSet copy_ds_update = {};
+    copy_ds_update.sType = VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET;
+    copy_ds_update.srcSet = descriptor_sets[0];
+    copy_ds_update.srcBinding = 0;
+    copy_ds_update.srcArrayElement = 0;
+    copy_ds_update.dstSet = descriptor_sets[1];
+    copy_ds_update.dstBinding = 0;
+    copy_ds_update.dstArrayElement = 0;
+    copy_ds_update.descriptorCount = 4;
+
+    copy_ds_update.srcArrayElement = 1;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkCopyDescriptorSet-srcBinding-02223");
+    vk::UpdateDescriptorSets(m_device->device(), 0, NULL, 1, &copy_ds_update);
+    m_errorMonitor->VerifyFound();
+
+    copy_ds_update.srcArrayElement = 0;
+    copy_ds_update.dstArrayElement = 1;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkCopyDescriptorSet-dstBinding-02224");
+    vk::UpdateDescriptorSets(m_device->device(), 0, NULL, 1, &copy_ds_update);
+    m_errorMonitor->VerifyFound();
+
+    copy_ds_update.dstArrayElement = 0;
+    copy_ds_update.descriptorCount = 5;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkCopyDescriptorSet-srcBinding-02225");
+    vk::UpdateDescriptorSets(m_device->device(), 0, NULL, 1, &copy_ds_update);
+    m_errorMonitor->VerifyFound();
+
+    copy_ds_update.descriptorCount = 4;
+    vk::UpdateDescriptorSets(m_device->device(), 0, NULL, 1, &copy_ds_update);
+    m_errorMonitor->VerifyNotFound();
+
+    vk::DestroyDescriptorPool(m_device->handle(), ds_pool, nullptr);
+    vk::DestroyDescriptorSetLayout(m_device->device(), ds_layout, nullptr);
+}
+
+TEST_F(VkLayerTest, WrongdstArrayElement) {
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    OneOffDescriptorSet descriptor_set(m_device, {
+                                                     {0, VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, 1, VK_SHADER_STAGE_ALL, nullptr},
+                                                     {1, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
+                                                 });
+    VkImageObj image(m_device);
+    image.Init(32, 32, 1, VK_FORMAT_B8G8R8A8_UNORM, VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
+    VkImageView view = image.targetView(VK_FORMAT_B8G8R8A8_UNORM);
+
+    VkDescriptorImageInfo image_info = {};
+    image_info.imageView = view;
+    image_info.sampler = VK_NULL_HANDLE;
+    image_info.imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
+    descriptor_set.image_infos.emplace_back(image_info);
+
+    VkWriteDescriptorSet descriptor_write;
+    memset(&descriptor_write, 0, sizeof(descriptor_write));
+    descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
+    descriptor_write.dstSet = descriptor_set.set_;
+    descriptor_write.dstBinding = 0;
+    descriptor_write.descriptorCount = 1;
+    descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT;
+    descriptor_write.pImageInfo = descriptor_set.image_infos.data();
+    descriptor_write.pBufferInfo = nullptr;
+    descriptor_write.pTexelBufferView = nullptr;
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkWriteDescriptorSet-dstArrayElement-00321");
+    descriptor_write.dstArrayElement = 1;
+    descriptor_set.descriptor_writes.emplace_back(descriptor_write);
+    descriptor_set.UpdateDescriptorSets();
+    m_errorMonitor->VerifyFound();
+
+    OneOffDescriptorSet descriptor_set2(m_device, {
+                                                      {0, VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, 2, VK_SHADER_STAGE_ALL, nullptr},
+                                                      {1, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
+                                                  });
+
+    descriptor_set2.image_infos.emplace_back(image_info);
+    descriptor_set2.image_infos.emplace_back(image_info);
+
+    descriptor_write.dstSet = descriptor_set2.set_;
+    descriptor_write.descriptorCount = 2;
+    descriptor_write.pImageInfo = descriptor_set2.image_infos.data();
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkWriteDescriptorSet-dstArrayElement-00321");
+    descriptor_write.dstArrayElement = 1;
+    descriptor_set2.descriptor_writes.emplace_back(descriptor_write);
+    descriptor_set2.UpdateDescriptorSets();
+    m_errorMonitor->VerifyFound();
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkWriteDescriptorSet-dstArrayElement-00321");
+    descriptor_write.dstArrayElement = 3;
+    descriptor_set2.descriptor_writes.clear();
+    descriptor_set2.descriptor_writes.emplace_back(descriptor_write);
+    descriptor_set2.UpdateDescriptorSets();
+    m_errorMonitor->VerifyFound();
+}
diff --git a/src/third_party/vulkan-validation-layers/src/tests/vklayertests_imageless_framebuffer.cpp b/src/third_party/vulkan-validation-layers/src/tests/vklayertests_imageless_framebuffer.cpp
new file mode 100644
index 0000000..38749e8
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/tests/vklayertests_imageless_framebuffer.cpp
@@ -0,0 +1,1099 @@
+/*
+ * Copyright (c) 2015-2019 The Khronos Group Inc.
+ * Copyright (c) 2015-2019 Valve Corporation
+ * Copyright (c) 2015-2019 LunarG, Inc.
+ * Copyright (c) 2015-2019 Google, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Author: Chia-I Wu <olvaffe@gmail.com>
+ * Author: Chris Forbes <chrisf@ijw.co.nz>
+ * Author: Courtney Goeltzenleuchter <courtney@LunarG.com>
+ * Author: Mark Lobodzinski <mark@lunarg.com>
+ * Author: Mike Stroyan <mike@LunarG.com>
+ * Author: Tobin Ehlis <tobine@google.com>
+ * Author: Tony Barbour <tony@LunarG.com>
+ * Author: Cody Northrop <cnorthrop@google.com>
+ * Author: Dave Houlton <daveh@lunarg.com>
+ * Author: Jeremy Kniager <jeremyk@lunarg.com>
+ * Author: Shannon McPherson <shannon@lunarg.com>
+ * Author: John Zulauf <jzulauf@lunarg.com>
+ * Author: Tobias Hector <tobias.hector@amd.com>
+ */
+
+#include "cast_utils.h"
+#include "layer_validation_tests.h"
+
+TEST_F(VkLayerTest, ImagelessFramebufferRenderPassBeginImageViewMismatchTests) {
+    TEST_DESCRIPTION(
+        "Begin a renderPass where the image views specified do not match the parameters used to create the framebuffer and render "
+        "pass.");
+
+    if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+        m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    } else {
+        printf("%s Did not find required device extension %s; skipped.\n", kSkipPrefix,
+               VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+        return;
+    }
+
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+    bool rp2Supported = CheckCreateRenderPass2Support(this, m_device_extension_names);
+
+    if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_IMAGELESS_FRAMEBUFFER_EXTENSION_NAME)) {
+        m_device_extension_names.push_back(VK_KHR_MAINTENANCE2_EXTENSION_NAME);
+        m_device_extension_names.push_back(VK_KHR_IMAGE_FORMAT_LIST_EXTENSION_NAME);
+        m_device_extension_names.push_back(VK_KHR_IMAGELESS_FRAMEBUFFER_EXTENSION_NAME);
+    } else {
+        printf("%s test requires VK_KHR_imageless_framebuffer, not available.  Skipping.\n", kSkipPrefix);
+        return;
+    }
+
+    VkPhysicalDeviceImagelessFramebufferFeaturesKHR physicalDeviceImagelessFramebufferFeatures = {};
+    physicalDeviceImagelessFramebufferFeatures.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES_KHR;
+    physicalDeviceImagelessFramebufferFeatures.imagelessFramebuffer = VK_TRUE;
+    VkPhysicalDeviceFeatures2 physicalDeviceFeatures2 = {};
+    physicalDeviceFeatures2.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2;
+    physicalDeviceFeatures2.pNext = &physicalDeviceImagelessFramebufferFeatures;
+
+    ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &physicalDeviceFeatures2, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
+
+    uint32_t attachmentWidth = 512;
+    uint32_t attachmentHeight = 512;
+    VkFormat attachmentFormats[2] = {VK_FORMAT_R8G8B8A8_UNORM, VK_FORMAT_B8G8R8A8_UNORM};
+    VkFormat framebufferAttachmentFormats[3] = {VK_FORMAT_R8G8B8A8_UNORM, VK_FORMAT_B8G8R8A8_UNORM, VK_FORMAT_B8G8R8A8_UNORM};
+
+    // Create a renderPass with a single attachment
+    VkAttachmentDescription attachmentDescription = {};
+    attachmentDescription.format = attachmentFormats[0];
+    attachmentDescription.samples = VK_SAMPLE_COUNT_1_BIT;
+    attachmentDescription.finalLayout = VK_IMAGE_LAYOUT_GENERAL;
+    VkAttachmentReference attachmentReference = {};
+    attachmentReference.layout = VK_IMAGE_LAYOUT_GENERAL;
+    VkSubpassDescription subpassDescription = {};
+    subpassDescription.colorAttachmentCount = 1;
+    subpassDescription.pColorAttachments = &attachmentReference;
+    VkRenderPassCreateInfo renderPassCreateInfo = {};
+    renderPassCreateInfo.subpassCount = 1;
+    renderPassCreateInfo.pSubpasses = &subpassDescription;
+    renderPassCreateInfo.attachmentCount = 1;
+    renderPassCreateInfo.pAttachments = &attachmentDescription;
+    renderPassCreateInfo.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
+    VkRenderPass renderPass;
+    vk::CreateRenderPass(m_device->device(), &renderPassCreateInfo, NULL, &renderPass);
+
+    VkFramebufferAttachmentImageInfoKHR framebufferAttachmentImageInfo = {};
+    framebufferAttachmentImageInfo.sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO_KHR;
+    framebufferAttachmentImageInfo.flags = VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT;
+    framebufferAttachmentImageInfo.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
+    framebufferAttachmentImageInfo.width = attachmentWidth;
+    framebufferAttachmentImageInfo.height = attachmentHeight;
+    framebufferAttachmentImageInfo.layerCount = 1;
+    framebufferAttachmentImageInfo.viewFormatCount = 2;
+    framebufferAttachmentImageInfo.pViewFormats = framebufferAttachmentFormats;
+    VkFramebufferAttachmentsCreateInfoKHR framebufferAttachmentsCreateInfo = {};
+    framebufferAttachmentsCreateInfo.sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO_KHR;
+    framebufferAttachmentsCreateInfo.attachmentImageInfoCount = 1;
+    framebufferAttachmentsCreateInfo.pAttachmentImageInfos = &framebufferAttachmentImageInfo;
+    VkFramebufferCreateInfo framebufferCreateInfo = {};
+    framebufferCreateInfo.sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO;
+    framebufferCreateInfo.pNext = &framebufferAttachmentsCreateInfo;
+    framebufferCreateInfo.flags = VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR;
+    framebufferCreateInfo.width = attachmentWidth;
+    framebufferCreateInfo.height = attachmentHeight;
+    framebufferCreateInfo.layers = 1;
+    framebufferCreateInfo.attachmentCount = 1;
+    framebufferCreateInfo.pAttachments = nullptr;
+    framebufferCreateInfo.renderPass = renderPass;
+    VkFramebuffer framebuffer;
+
+    VkImageFormatListCreateInfoKHR imageFormatListCreateInfo = {};
+    imageFormatListCreateInfo.sType = VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO_KHR;
+    imageFormatListCreateInfo.viewFormatCount = 2;
+    imageFormatListCreateInfo.pViewFormats = attachmentFormats;
+    VkImageCreateInfo imageCreateInfo = {};
+    imageCreateInfo.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+    imageCreateInfo.pNext = &imageFormatListCreateInfo;
+    imageCreateInfo.flags = VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT;
+    imageCreateInfo.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
+    imageCreateInfo.extent.width = attachmentWidth;
+    imageCreateInfo.extent.height = attachmentHeight;
+    imageCreateInfo.extent.depth = 1;
+    imageCreateInfo.arrayLayers = 1;
+    imageCreateInfo.mipLevels = 10;
+    imageCreateInfo.imageType = VK_IMAGE_TYPE_2D;
+    imageCreateInfo.samples = VK_SAMPLE_COUNT_1_BIT;
+    imageCreateInfo.format = attachmentFormats[0];
+
+    VkImageObj imageObject(m_device);
+    imageObject.init(&imageCreateInfo);
+    VkImage image = imageObject.image();
+
+    VkImageViewCreateInfo imageViewCreateInfo = {};
+    imageViewCreateInfo.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
+    imageViewCreateInfo.image = image;
+    imageViewCreateInfo.viewType = VK_IMAGE_VIEW_TYPE_2D;
+    imageViewCreateInfo.format = attachmentFormats[0];
+    imageViewCreateInfo.subresourceRange.layerCount = 1;
+    imageViewCreateInfo.subresourceRange.levelCount = 1;
+    imageViewCreateInfo.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    VkImageView imageView;
+    vk::CreateImageView(m_device->device(), &imageViewCreateInfo, NULL, &imageView);
+
+    VkRenderPassAttachmentBeginInfoKHR renderPassAttachmentBeginInfo = {};
+    renderPassAttachmentBeginInfo.sType = VK_STRUCTURE_TYPE_RENDER_PASS_ATTACHMENT_BEGIN_INFO_KHR;
+    renderPassAttachmentBeginInfo.pNext = nullptr;
+    renderPassAttachmentBeginInfo.attachmentCount = 1;
+    renderPassAttachmentBeginInfo.pAttachments = &imageView;
+    VkRenderPassBeginInfo renderPassBeginInfo = {};
+    renderPassBeginInfo.sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO;
+    renderPassBeginInfo.pNext = &renderPassAttachmentBeginInfo;
+    renderPassBeginInfo.renderPass = renderPass;
+    renderPassBeginInfo.renderArea.extent.width = attachmentWidth;
+    renderPassBeginInfo.renderArea.extent.height = attachmentHeight;
+
+    // Positive test first
+    VkCommandBufferBeginInfo cmd_begin_info = {VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO, nullptr,
+                                               VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT, nullptr};
+    framebufferCreateInfo.pAttachments = nullptr;
+    framebufferCreateInfo.flags = VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR;
+    vk::CreateFramebuffer(m_device->device(), &framebufferCreateInfo, nullptr, &framebuffer);
+    renderPassBeginInfo.framebuffer = framebuffer;
+    vk::BeginCommandBuffer(m_commandBuffer->handle(), &cmd_begin_info);
+    m_errorMonitor->ExpectSuccess();
+    vk::CmdBeginRenderPass(m_commandBuffer->handle(), &renderPassBeginInfo, VK_SUBPASS_CONTENTS_INLINE);
+    m_errorMonitor->VerifyNotFound();
+    vk::ResetCommandBuffer(m_commandBuffer->handle(), 0);
+    vk::DestroyFramebuffer(m_device->device(), framebuffer, nullptr);
+
+    // Imageless framebuffer creation bit not present
+    framebufferCreateInfo.pAttachments = &imageView;
+    framebufferCreateInfo.flags = 0;
+    vk::CreateFramebuffer(m_device->device(), &framebufferCreateInfo, nullptr, &framebuffer);
+    renderPassBeginInfo.framebuffer = framebuffer;
+    TestRenderPassBegin(m_errorMonitor, m_device->device(), m_commandBuffer->handle(), &renderPassBeginInfo, rp2Supported,
+                        "VUID-VkRenderPassBeginInfo-framebuffer-03207", "VUID-VkRenderPassBeginInfo-framebuffer-03207");
+    vk::DestroyFramebuffer(m_device->device(), framebuffer, nullptr);
+    framebufferCreateInfo.pAttachments = nullptr;
+    framebufferCreateInfo.flags = VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR;
+
+    vk::CreateFramebuffer(m_device->device(), &framebufferCreateInfo, nullptr, &framebuffer);
+    renderPassAttachmentBeginInfo.attachmentCount = 2;
+    renderPassBeginInfo.framebuffer = framebuffer;
+    TestRenderPassBegin(m_errorMonitor, m_device->device(), m_commandBuffer->handle(), &renderPassBeginInfo, rp2Supported,
+                        "VUID-VkRenderPassBeginInfo-framebuffer-03208", "VUID-VkRenderPassBeginInfo-framebuffer-03208");
+    renderPassAttachmentBeginInfo.attachmentCount = 1;
+    vk::DestroyFramebuffer(m_device->device(), framebuffer, nullptr);
+
+    // Mismatched number of attachments
+    vk::CreateFramebuffer(m_device->device(), &framebufferCreateInfo, nullptr, &framebuffer);
+    renderPassAttachmentBeginInfo.attachmentCount = 2;
+    renderPassBeginInfo.framebuffer = framebuffer;
+    TestRenderPassBegin(m_errorMonitor, m_device->device(), m_commandBuffer->handle(), &renderPassBeginInfo, rp2Supported,
+                        "VUID-VkRenderPassBeginInfo-framebuffer-03208", "VUID-VkRenderPassBeginInfo-framebuffer-03208");
+    renderPassAttachmentBeginInfo.attachmentCount = 1;
+    vk::DestroyFramebuffer(m_device->device(), framebuffer, nullptr);
+
+    // Mismatched flags
+    framebufferAttachmentImageInfo.flags = 0;
+    vk::CreateFramebuffer(m_device->device(), &framebufferCreateInfo, nullptr, &framebuffer);
+    renderPassBeginInfo.framebuffer = framebuffer;
+    TestRenderPassBegin(m_errorMonitor, m_device->device(), m_commandBuffer->handle(), &renderPassBeginInfo, rp2Supported,
+                        "VUID-VkRenderPassBeginInfo-framebuffer-03209", "VUID-VkRenderPassBeginInfo-framebuffer-03209");
+    vk::DestroyFramebuffer(m_device->device(), framebuffer, nullptr);
+    framebufferAttachmentImageInfo.flags = VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT;
+
+    // Mismatched usage
+    framebufferAttachmentImageInfo.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_SAMPLED_BIT;
+    vk::CreateFramebuffer(m_device->device(), &framebufferCreateInfo, nullptr, &framebuffer);
+    renderPassBeginInfo.framebuffer = framebuffer;
+    TestRenderPassBegin(m_errorMonitor, m_device->device(), m_commandBuffer->handle(), &renderPassBeginInfo, rp2Supported,
+                        "VUID-VkRenderPassBeginInfo-framebuffer-03210", "VUID-VkRenderPassBeginInfo-framebuffer-03210");
+    vk::DestroyFramebuffer(m_device->device(), framebuffer, nullptr);
+    framebufferAttachmentImageInfo.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
+
+    // Mismatched width
+    framebufferAttachmentImageInfo.width += 1;
+    vk::CreateFramebuffer(m_device->device(), &framebufferCreateInfo, nullptr, &framebuffer);
+    renderPassBeginInfo.framebuffer = framebuffer;
+    TestRenderPassBegin(m_errorMonitor, m_device->device(), m_commandBuffer->handle(), &renderPassBeginInfo, rp2Supported,
+                        "VUID-VkRenderPassBeginInfo-framebuffer-03211", "VUID-VkRenderPassBeginInfo-framebuffer-03211");
+    vk::DestroyFramebuffer(m_device->device(), framebuffer, nullptr);
+    framebufferAttachmentImageInfo.width -= 1;
+
+    // Mismatched height
+    framebufferAttachmentImageInfo.height += 1;
+    vk::CreateFramebuffer(m_device->device(), &framebufferCreateInfo, nullptr, &framebuffer);
+    renderPassBeginInfo.framebuffer = framebuffer;
+    TestRenderPassBegin(m_errorMonitor, m_device->device(), m_commandBuffer->handle(), &renderPassBeginInfo, rp2Supported,
+                        "VUID-VkRenderPassBeginInfo-framebuffer-03212", "VUID-VkRenderPassBeginInfo-framebuffer-03212");
+    vk::DestroyFramebuffer(m_device->device(), framebuffer, nullptr);
+    framebufferAttachmentImageInfo.height -= 1;
+
+    // Mismatched layer count
+    framebufferAttachmentImageInfo.layerCount += 1;
+    vk::CreateFramebuffer(m_device->device(), &framebufferCreateInfo, nullptr, &framebuffer);
+    renderPassBeginInfo.framebuffer = framebuffer;
+    TestRenderPassBegin(m_errorMonitor, m_device->device(), m_commandBuffer->handle(), &renderPassBeginInfo, rp2Supported,
+                        "VUID-VkRenderPassBeginInfo-framebuffer-03213", "VUID-VkRenderPassBeginInfo-framebuffer-03213");
+    vk::DestroyFramebuffer(m_device->device(), framebuffer, nullptr);
+    framebufferAttachmentImageInfo.layerCount -= 1;
+
+    // Mismatched view format count
+    framebufferAttachmentImageInfo.viewFormatCount = 3;
+    vk::CreateFramebuffer(m_device->device(), &framebufferCreateInfo, nullptr, &framebuffer);
+    renderPassBeginInfo.framebuffer = framebuffer;
+    TestRenderPassBegin(m_errorMonitor, m_device->device(), m_commandBuffer->handle(), &renderPassBeginInfo, rp2Supported,
+                        "VUID-VkRenderPassBeginInfo-framebuffer-03214", "VUID-VkRenderPassBeginInfo-framebuffer-03214");
+    vk::DestroyFramebuffer(m_device->device(), framebuffer, nullptr);
+    framebufferAttachmentImageInfo.viewFormatCount = 2;
+
+    // Mismatched format lists
+    framebufferAttachmentFormats[1] = VK_FORMAT_B8G8R8A8_SRGB;
+    vk::CreateFramebuffer(m_device->device(), &framebufferCreateInfo, nullptr, &framebuffer);
+    renderPassBeginInfo.framebuffer = framebuffer;
+    TestRenderPassBegin(m_errorMonitor, m_device->device(), m_commandBuffer->handle(), &renderPassBeginInfo, rp2Supported,
+                        "VUID-VkRenderPassBeginInfo-framebuffer-03215", "VUID-VkRenderPassBeginInfo-framebuffer-03215");
+    vk::DestroyFramebuffer(m_device->device(), framebuffer, nullptr);
+    framebufferAttachmentFormats[1] = VK_FORMAT_B8G8R8A8_UNORM;
+
+    // Mismatched formats
+    VkImageView imageView2;
+    imageViewCreateInfo.format = attachmentFormats[1];
+    vk::CreateImageView(m_device->device(), &imageViewCreateInfo, nullptr, &imageView2);
+    renderPassAttachmentBeginInfo.pAttachments = &imageView2;
+    vk::CreateFramebuffer(m_device->device(), &framebufferCreateInfo, nullptr, &framebuffer);
+    renderPassBeginInfo.framebuffer = framebuffer;
+    TestRenderPassBegin(m_errorMonitor, m_device->device(), m_commandBuffer->handle(), &renderPassBeginInfo, rp2Supported,
+                        "VUID-VkRenderPassBeginInfo-framebuffer-03216", "VUID-VkRenderPassBeginInfo-framebuffer-03216");
+    vk::DestroyFramebuffer(m_device->device(), framebuffer, nullptr);
+    vk::DestroyImageView(m_device->device(), imageView2, nullptr);
+    renderPassAttachmentBeginInfo.pAttachments = &imageView;
+    imageViewCreateInfo.format = attachmentFormats[0];
+
+    // Mismatched sample counts
+    imageCreateInfo.samples = VK_SAMPLE_COUNT_4_BIT;
+    imageCreateInfo.mipLevels = 1;
+    VkImageObj imageObject2(m_device);
+    imageObject2.init(&imageCreateInfo);
+    imageViewCreateInfo.image = imageObject2.image();
+    vk::CreateImageView(m_device->device(), &imageViewCreateInfo, nullptr, &imageView2);
+    renderPassAttachmentBeginInfo.pAttachments = &imageView2;
+    vk::CreateFramebuffer(m_device->device(), &framebufferCreateInfo, nullptr, &framebuffer);
+    renderPassBeginInfo.framebuffer = framebuffer;
+    TestRenderPassBegin(m_errorMonitor, m_device->device(), m_commandBuffer->handle(), &renderPassBeginInfo, rp2Supported,
+                        "VUID-VkRenderPassBeginInfo-framebuffer-03217", "VUID-VkRenderPassBeginInfo-framebuffer-03217");
+    vk::DestroyFramebuffer(m_device->device(), framebuffer, nullptr);
+    vk::DestroyImageView(m_device->device(), imageView2, nullptr);
+    renderPassAttachmentBeginInfo.pAttachments = &imageView;
+    imageViewCreateInfo.image = imageObject.image();
+    imageCreateInfo.samples = VK_SAMPLE_COUNT_1_BIT;
+    imageCreateInfo.mipLevels = 10;
+
+    // Mismatched level counts
+    imageViewCreateInfo.subresourceRange.levelCount = 2;
+    vk::CreateImageView(m_device->device(), &imageViewCreateInfo, nullptr, &imageView2);
+    renderPassAttachmentBeginInfo.pAttachments = &imageView2;
+    vk::CreateFramebuffer(m_device->device(), &framebufferCreateInfo, nullptr, &framebuffer);
+    renderPassBeginInfo.framebuffer = framebuffer;
+    TestRenderPassBegin(m_errorMonitor, m_device->device(), m_commandBuffer->handle(), &renderPassBeginInfo, rp2Supported,
+                        "VUID-VkRenderPassAttachmentBeginInfoKHR-pAttachments-03218",
+                        "VUID-VkRenderPassAttachmentBeginInfoKHR-pAttachments-03218");
+    vk::DestroyFramebuffer(m_device->device(), framebuffer, nullptr);
+    vk::DestroyImageView(m_device->device(), imageView2, nullptr);
+    renderPassAttachmentBeginInfo.pAttachments = &imageView;
+    imageViewCreateInfo.subresourceRange.levelCount = 1;
+
+    // Non-identity component swizzle
+    imageViewCreateInfo.components.r = VK_COMPONENT_SWIZZLE_A;
+    vk::CreateImageView(m_device->device(), &imageViewCreateInfo, nullptr, &imageView2);
+    renderPassAttachmentBeginInfo.pAttachments = &imageView2;
+    vk::CreateFramebuffer(m_device->device(), &framebufferCreateInfo, nullptr, &framebuffer);
+    renderPassBeginInfo.framebuffer = framebuffer;
+    TestRenderPassBegin(m_errorMonitor, m_device->device(), m_commandBuffer->handle(), &renderPassBeginInfo, rp2Supported,
+                        "VUID-VkRenderPassAttachmentBeginInfoKHR-pAttachments-03219",
+                        "VUID-VkRenderPassAttachmentBeginInfoKHR-pAttachments-03219");
+    vk::DestroyFramebuffer(m_device->device(), framebuffer, nullptr);
+    vk::DestroyImageView(m_device->device(), imageView2, nullptr);
+    renderPassAttachmentBeginInfo.pAttachments = &imageView;
+    imageViewCreateInfo.components.r = VK_COMPONENT_SWIZZLE_IDENTITY;
+
+    vk::DestroyRenderPass(m_device->device(), renderPass, nullptr);
+    //    vkDestroyFramebuffer(m_device->device(), framebuffer, nullptr);
+    vk::DestroyImageView(m_device->device(), imageView, nullptr);
+}
+
+TEST_F(VkLayerTest, ImagelessFramebufferFeatureEnableTest) {
+    TEST_DESCRIPTION("Use imageless framebuffer functionality without enabling the feature");
+
+    if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+        m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    } else {
+        printf("%s Did not find required instance extension %s; skipped.\n", kSkipPrefix,
+               VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+        return;
+    }
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+
+    if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_IMAGELESS_FRAMEBUFFER_EXTENSION_NAME)) {
+        m_device_extension_names.push_back(VK_KHR_MAINTENANCE2_EXTENSION_NAME);
+        m_device_extension_names.push_back(VK_KHR_IMAGE_FORMAT_LIST_EXTENSION_NAME);
+        m_device_extension_names.push_back(VK_KHR_IMAGELESS_FRAMEBUFFER_EXTENSION_NAME);
+    } else {
+        printf("%s Did not find required device extension %s; skipped.\n", kSkipPrefix,
+               VK_KHR_IMAGELESS_FRAMEBUFFER_EXTENSION_NAME);
+        return;
+    }
+
+    ASSERT_NO_FATAL_FAILURE(InitState(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
+
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    uint32_t attachmentWidth = 512;
+    uint32_t attachmentHeight = 512;
+    VkFormat attachmentFormat = VK_FORMAT_R8G8B8A8_UNORM;
+
+    // Create a renderPass with a single attachment
+    VkAttachmentDescription attachmentDescription = {};
+    attachmentDescription.format = attachmentFormat;
+    attachmentDescription.samples = VK_SAMPLE_COUNT_1_BIT;
+    attachmentDescription.finalLayout = VK_IMAGE_LAYOUT_GENERAL;
+    VkAttachmentReference attachmentReference = {};
+    attachmentReference.layout = VK_IMAGE_LAYOUT_GENERAL;
+    VkSubpassDescription subpassDescription = {};
+    subpassDescription.colorAttachmentCount = 1;
+    subpassDescription.pColorAttachments = &attachmentReference;
+    VkRenderPassCreateInfo renderPassCreateInfo = {};
+    renderPassCreateInfo.subpassCount = 1;
+    renderPassCreateInfo.pSubpasses = &subpassDescription;
+    renderPassCreateInfo.attachmentCount = 1;
+    renderPassCreateInfo.pAttachments = &attachmentDescription;
+    renderPassCreateInfo.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
+    VkRenderPass renderPass;
+    vk::CreateRenderPass(m_device->device(), &renderPassCreateInfo, NULL, &renderPass);
+
+    VkFramebufferAttachmentImageInfoKHR framebufferAttachmentImageInfo = {};
+    framebufferAttachmentImageInfo.sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO_KHR;
+    framebufferAttachmentImageInfo.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
+    framebufferAttachmentImageInfo.width = attachmentWidth;
+    framebufferAttachmentImageInfo.height = attachmentHeight;
+    framebufferAttachmentImageInfo.layerCount = 1;
+    framebufferAttachmentImageInfo.viewFormatCount = 1;
+    framebufferAttachmentImageInfo.pViewFormats = &attachmentFormat;
+    VkFramebufferAttachmentsCreateInfoKHR framebufferAttachmentsCreateInfo = {};
+    framebufferAttachmentsCreateInfo.sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO_KHR;
+    framebufferAttachmentsCreateInfo.attachmentImageInfoCount = 1;
+    framebufferAttachmentsCreateInfo.pAttachmentImageInfos = &framebufferAttachmentImageInfo;
+    VkFramebufferCreateInfo framebufferCreateInfo = {};
+    framebufferCreateInfo.sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO;
+    framebufferCreateInfo.pNext = &framebufferAttachmentsCreateInfo;
+    framebufferCreateInfo.flags = VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR;
+    framebufferCreateInfo.width = attachmentWidth;
+    framebufferCreateInfo.height = attachmentHeight;
+    framebufferCreateInfo.layers = 1;
+    framebufferCreateInfo.renderPass = renderPass;
+    framebufferCreateInfo.attachmentCount = 1;
+    VkFramebuffer framebuffer = VK_NULL_HANDLE;
+
+    // Imageless framebuffer creation bit not present
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkFramebufferCreateInfo-flags-03189");
+    vk::CreateFramebuffer(m_device->device(), &framebufferCreateInfo, nullptr, &framebuffer);
+    m_errorMonitor->VerifyFound();
+
+    if (framebuffer != VK_NULL_HANDLE) {
+        vk::DestroyFramebuffer(m_device->device(), framebuffer, nullptr);
+    }
+    vk::DestroyRenderPass(m_device->device(), renderPass, nullptr);
+}
+
+TEST_F(VkLayerTest, ImagelessFramebufferCreationTests) {
+    TEST_DESCRIPTION("Create an imageless framebuffer in various invalid ways");
+
+    if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+        m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    } else {
+        printf("%s Did not find required instance extension %s; skipped.\n", kSkipPrefix,
+               VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+        return;
+    }
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+    bool rp2Supported = CheckCreateRenderPass2Support(this, m_device_extension_names);
+
+    bool multiviewSupported = rp2Supported;
+    if (!rp2Supported) {
+        if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_MULTIVIEW_EXTENSION_NAME)) {
+            m_device_extension_names.push_back(VK_KHR_MULTIVIEW_EXTENSION_NAME);
+            multiviewSupported = true;
+        }
+    }
+
+    if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_IMAGELESS_FRAMEBUFFER_EXTENSION_NAME)) {
+        m_device_extension_names.push_back(VK_KHR_MAINTENANCE2_EXTENSION_NAME);
+        m_device_extension_names.push_back(VK_KHR_IMAGE_FORMAT_LIST_EXTENSION_NAME);
+        m_device_extension_names.push_back(VK_KHR_IMAGELESS_FRAMEBUFFER_EXTENSION_NAME);
+    } else {
+        printf("%s Did not find required device extension %s; skipped.\n", kSkipPrefix,
+               VK_KHR_IMAGELESS_FRAMEBUFFER_EXTENSION_NAME);
+        return;
+    }
+
+    VkPhysicalDeviceImagelessFramebufferFeaturesKHR physicalDeviceImagelessFramebufferFeatures = {};
+    physicalDeviceImagelessFramebufferFeatures.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES_KHR;
+    physicalDeviceImagelessFramebufferFeatures.imagelessFramebuffer = VK_TRUE;
+    VkPhysicalDeviceFeatures2 physicalDeviceFeatures2 = {};
+    physicalDeviceFeatures2.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2;
+    physicalDeviceFeatures2.pNext = &physicalDeviceImagelessFramebufferFeatures;
+    ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &physicalDeviceFeatures2, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
+
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    uint32_t attachmentWidth = 512;
+    uint32_t attachmentHeight = 512;
+    VkFormat attachmentFormat = VK_FORMAT_R8G8B8A8_UNORM;
+
+    // Create a renderPass with a single attachment
+    VkAttachmentDescription attachmentDescription = {};
+    attachmentDescription.format = attachmentFormat;
+    attachmentDescription.samples = VK_SAMPLE_COUNT_1_BIT;
+    attachmentDescription.finalLayout = VK_IMAGE_LAYOUT_GENERAL;
+    VkAttachmentReference attachmentReference = {};
+    attachmentReference.layout = VK_IMAGE_LAYOUT_GENERAL;
+    VkSubpassDescription subpassDescription = {};
+    subpassDescription.colorAttachmentCount = 1;
+    subpassDescription.pColorAttachments = &attachmentReference;
+    VkRenderPassCreateInfo renderPassCreateInfo = {};
+    renderPassCreateInfo.subpassCount = 1;
+    renderPassCreateInfo.pSubpasses = &subpassDescription;
+    renderPassCreateInfo.attachmentCount = 1;
+    renderPassCreateInfo.pAttachments = &attachmentDescription;
+    renderPassCreateInfo.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
+    VkRenderPass renderPass;
+    vk::CreateRenderPass(m_device->device(), &renderPassCreateInfo, NULL, &renderPass);
+
+    VkFramebufferAttachmentImageInfoKHR framebufferAttachmentImageInfo = {};
+    framebufferAttachmentImageInfo.sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO_KHR;
+    framebufferAttachmentImageInfo.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
+    framebufferAttachmentImageInfo.width = attachmentWidth;
+    framebufferAttachmentImageInfo.height = attachmentHeight;
+    framebufferAttachmentImageInfo.layerCount = 1;
+    framebufferAttachmentImageInfo.viewFormatCount = 1;
+    framebufferAttachmentImageInfo.pViewFormats = &attachmentFormat;
+    VkFramebufferAttachmentsCreateInfoKHR framebufferAttachmentsCreateInfo = {};
+    framebufferAttachmentsCreateInfo.sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO_KHR;
+    framebufferAttachmentsCreateInfo.attachmentImageInfoCount = 1;
+    framebufferAttachmentsCreateInfo.pAttachmentImageInfos = &framebufferAttachmentImageInfo;
+    VkFramebufferCreateInfo framebufferCreateInfo = {};
+    framebufferCreateInfo.sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO;
+    framebufferCreateInfo.pNext = &framebufferAttachmentsCreateInfo;
+    framebufferCreateInfo.flags = VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR;
+    framebufferCreateInfo.width = attachmentWidth;
+    framebufferCreateInfo.height = attachmentHeight;
+    framebufferCreateInfo.layers = 1;
+    framebufferCreateInfo.renderPass = renderPass;
+    framebufferCreateInfo.attachmentCount = 1;
+    VkFramebuffer framebuffer = VK_NULL_HANDLE;
+
+    // Attachments info not present
+    framebufferCreateInfo.pNext = nullptr;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkFramebufferCreateInfo-flags-03190");
+    vk::CreateFramebuffer(m_device->device(), &framebufferCreateInfo, nullptr, &framebuffer);
+    m_errorMonitor->VerifyFound();
+    if (framebuffer != VK_NULL_HANDLE) {
+        vk::DestroyFramebuffer(m_device->device(), framebuffer, nullptr);
+    }
+    framebufferCreateInfo.pNext = &framebufferAttachmentsCreateInfo;
+
+    // Mismatched attachment counts
+    framebufferAttachmentsCreateInfo.attachmentImageInfoCount = 2;
+    VkFramebufferAttachmentImageInfoKHR framebufferAttachmentImageInfos[2] = {framebufferAttachmentImageInfo,
+                                                                              framebufferAttachmentImageInfo};
+    framebufferAttachmentsCreateInfo.pAttachmentImageInfos = framebufferAttachmentImageInfos;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkFramebufferCreateInfo-flags-03191");
+    vk::CreateFramebuffer(m_device->device(), &framebufferCreateInfo, nullptr, &framebuffer);
+    m_errorMonitor->VerifyFound();
+    if (framebuffer != VK_NULL_HANDLE) {
+        vk::DestroyFramebuffer(m_device->device(), framebuffer, nullptr);
+    }
+    framebufferAttachmentsCreateInfo.pAttachmentImageInfos = &framebufferAttachmentImageInfo;
+    framebufferAttachmentsCreateInfo.attachmentImageInfoCount = 1;
+
+    // Mismatched format list
+    attachmentFormat = VK_FORMAT_B8G8R8A8_UNORM;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkFramebufferCreateInfo-flags-03205");
+    vk::CreateFramebuffer(m_device->device(), &framebufferCreateInfo, nullptr, &framebuffer);
+    m_errorMonitor->VerifyFound();
+    if (framebuffer != VK_NULL_HANDLE) {
+        vk::DestroyFramebuffer(m_device->device(), framebuffer, nullptr);
+    }
+    attachmentFormat = VK_FORMAT_R8G8B8A8_UNORM;
+
+    // Mismatched format list
+    attachmentFormat = VK_FORMAT_B8G8R8A8_UNORM;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkFramebufferCreateInfo-flags-03205");
+    vk::CreateFramebuffer(m_device->device(), &framebufferCreateInfo, nullptr, &framebuffer);
+    m_errorMonitor->VerifyFound();
+    if (framebuffer != VK_NULL_HANDLE) {
+        vk::DestroyFramebuffer(m_device->device(), framebuffer, nullptr);
+    }
+    attachmentFormat = VK_FORMAT_R8G8B8A8_UNORM;
+
+    // Mismatched layer count, multiview disabled
+    framebufferCreateInfo.layers = 2;
+    const char* mismatchedLayersNoMultiviewVuid =
+        multiviewSupported ? "VUID-VkFramebufferCreateInfo-renderPass-03199" : "VUID-VkFramebufferCreateInfo-flags-03200";
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, mismatchedLayersNoMultiviewVuid);
+    vk::CreateFramebuffer(m_device->device(), &framebufferCreateInfo, nullptr, &framebuffer);
+    m_errorMonitor->VerifyFound();
+    if (framebuffer != VK_NULL_HANDLE) {
+        vk::DestroyFramebuffer(m_device->device(), framebuffer, nullptr);
+    }
+    framebufferCreateInfo.layers = 1;
+
+    // Mismatched width
+    framebufferCreateInfo.width += 1;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkFramebufferCreateInfo-flags-03192");
+    vk::CreateFramebuffer(m_device->device(), &framebufferCreateInfo, nullptr, &framebuffer);
+    m_errorMonitor->VerifyFound();
+    if (framebuffer != VK_NULL_HANDLE) {
+        vk::DestroyFramebuffer(m_device->device(), framebuffer, nullptr);
+    }
+    framebufferCreateInfo.width -= 1;
+
+    // Mismatched height
+    framebufferCreateInfo.height += 1;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkFramebufferCreateInfo-flags-03193");
+    vk::CreateFramebuffer(m_device->device(), &framebufferCreateInfo, nullptr, &framebuffer);
+    m_errorMonitor->VerifyFound();
+    if (framebuffer != VK_NULL_HANDLE) {
+        vk::DestroyFramebuffer(m_device->device(), framebuffer, nullptr);
+    }
+    framebufferCreateInfo.height -= 1;
+
+    vk::DestroyRenderPass(m_device->device(), renderPass, nullptr);
+}
+
+TEST_F(VkLayerTest, ImagelessFramebufferAttachmentImageUsageMismatchTests) {
+    TEST_DESCRIPTION("Create an imageless framebuffer with mismatched attachment image usage");
+
+    if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+        m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    } else {
+        printf("%s Did not find required instance extension %s; skipped.\n", kSkipPrefix,
+               VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+        return;
+    }
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+
+    if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_IMAGELESS_FRAMEBUFFER_EXTENSION_NAME)) {
+        m_device_extension_names.push_back(VK_KHR_MAINTENANCE2_EXTENSION_NAME);
+        m_device_extension_names.push_back(VK_KHR_IMAGE_FORMAT_LIST_EXTENSION_NAME);
+        m_device_extension_names.push_back(VK_KHR_IMAGELESS_FRAMEBUFFER_EXTENSION_NAME);
+    } else {
+        printf("%s Did not find required device extension %s; skipped.\n", kSkipPrefix,
+               VK_KHR_IMAGELESS_FRAMEBUFFER_EXTENSION_NAME);
+        return;
+    }
+
+    VkPhysicalDeviceImagelessFramebufferFeaturesKHR physicalDeviceImagelessFramebufferFeatures = {};
+    physicalDeviceImagelessFramebufferFeatures.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES_KHR;
+    physicalDeviceImagelessFramebufferFeatures.imagelessFramebuffer = VK_TRUE;
+    VkPhysicalDeviceFeatures2 physicalDeviceFeatures2 = {};
+    physicalDeviceFeatures2.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2;
+    physicalDeviceFeatures2.pNext = &physicalDeviceImagelessFramebufferFeatures;
+    ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &physicalDeviceFeatures2, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
+
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    uint32_t attachmentWidth = 512;
+    uint32_t attachmentHeight = 512;
+    VkFormat colorAndInputAttachmentFormat = VK_FORMAT_R8G8B8A8_UNORM;
+    VkFormat depthStencilAttachmentFormat = VK_FORMAT_D32_SFLOAT_S8_UINT;
+
+    VkAttachmentDescription attachmentDescriptions[4] = {};
+    // Color attachment
+    attachmentDescriptions[0].format = colorAndInputAttachmentFormat;
+    attachmentDescriptions[0].samples = VK_SAMPLE_COUNT_4_BIT;
+    attachmentDescriptions[0].finalLayout = VK_IMAGE_LAYOUT_GENERAL;
+    // Color resolve attachment
+    attachmentDescriptions[1].format = colorAndInputAttachmentFormat;
+    attachmentDescriptions[1].samples = VK_SAMPLE_COUNT_1_BIT;
+    attachmentDescriptions[1].finalLayout = VK_IMAGE_LAYOUT_GENERAL;
+    // Depth stencil attachment
+    attachmentDescriptions[2].format = depthStencilAttachmentFormat;
+    attachmentDescriptions[2].samples = VK_SAMPLE_COUNT_4_BIT;
+    attachmentDescriptions[2].finalLayout = VK_IMAGE_LAYOUT_GENERAL;
+    // Input attachment
+    attachmentDescriptions[3].format = colorAndInputAttachmentFormat;
+    attachmentDescriptions[3].samples = VK_SAMPLE_COUNT_1_BIT;
+    attachmentDescriptions[3].finalLayout = VK_IMAGE_LAYOUT_GENERAL;
+
+    VkAttachmentReference colorAttachmentReference = {};
+    colorAttachmentReference.layout = VK_IMAGE_LAYOUT_GENERAL;
+    colorAttachmentReference.attachment = 0;
+    VkAttachmentReference colorResolveAttachmentReference = {};
+    colorResolveAttachmentReference.layout = VK_IMAGE_LAYOUT_GENERAL;
+    colorResolveAttachmentReference.attachment = 1;
+    VkAttachmentReference depthStencilAttachmentReference = {};
+    depthStencilAttachmentReference.layout = VK_IMAGE_LAYOUT_GENERAL;
+    depthStencilAttachmentReference.attachment = 2;
+    VkAttachmentReference inputAttachmentReference = {};
+    inputAttachmentReference.layout = VK_IMAGE_LAYOUT_GENERAL;
+    inputAttachmentReference.attachment = 3;
+    VkSubpassDescription subpassDescription = {};
+    subpassDescription.colorAttachmentCount = 1;
+    subpassDescription.pColorAttachments = &colorAttachmentReference;
+    subpassDescription.pResolveAttachments = &colorResolveAttachmentReference;
+    subpassDescription.pDepthStencilAttachment = &depthStencilAttachmentReference;
+    subpassDescription.inputAttachmentCount = 1;
+    subpassDescription.pInputAttachments = &inputAttachmentReference;
+
+    VkRenderPassCreateInfo renderPassCreateInfo = {};
+    renderPassCreateInfo.attachmentCount = 4;
+    renderPassCreateInfo.subpassCount = 1;
+    renderPassCreateInfo.pSubpasses = &subpassDescription;
+    renderPassCreateInfo.pAttachments = attachmentDescriptions;
+    renderPassCreateInfo.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
+    VkRenderPass renderPass;
+    vk::CreateRenderPass(m_device->device(), &renderPassCreateInfo, nullptr, &renderPass);
+
+    VkFramebufferAttachmentImageInfoKHR framebufferAttachmentImageInfos[4] = {};
+    // Color attachment
+    framebufferAttachmentImageInfos[0].sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO_KHR;
+    framebufferAttachmentImageInfos[0].width = attachmentWidth;
+    framebufferAttachmentImageInfos[0].height = attachmentHeight;
+    framebufferAttachmentImageInfos[0].usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
+    framebufferAttachmentImageInfos[0].layerCount = 1;
+    framebufferAttachmentImageInfos[0].viewFormatCount = 1;
+    framebufferAttachmentImageInfos[0].pViewFormats = &colorAndInputAttachmentFormat;
+    // Color resolve attachment
+    framebufferAttachmentImageInfos[1].sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO_KHR;
+    framebufferAttachmentImageInfos[1].width = attachmentWidth;
+    framebufferAttachmentImageInfos[1].height = attachmentHeight;
+    framebufferAttachmentImageInfos[1].usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
+    framebufferAttachmentImageInfos[1].layerCount = 1;
+    framebufferAttachmentImageInfos[1].viewFormatCount = 1;
+    framebufferAttachmentImageInfos[1].pViewFormats = &colorAndInputAttachmentFormat;
+    // Depth stencil attachment
+    framebufferAttachmentImageInfos[2].sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO_KHR;
+    framebufferAttachmentImageInfos[2].width = attachmentWidth;
+    framebufferAttachmentImageInfos[2].height = attachmentHeight;
+    framebufferAttachmentImageInfos[2].usage = VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT;
+    framebufferAttachmentImageInfos[2].layerCount = 1;
+    framebufferAttachmentImageInfos[2].viewFormatCount = 1;
+    framebufferAttachmentImageInfos[2].pViewFormats = &depthStencilAttachmentFormat;
+    // Input attachment
+    framebufferAttachmentImageInfos[3].sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO_KHR;
+    framebufferAttachmentImageInfos[3].width = attachmentWidth;
+    framebufferAttachmentImageInfos[3].height = attachmentHeight;
+    framebufferAttachmentImageInfos[3].usage = VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT;
+    framebufferAttachmentImageInfos[3].layerCount = 1;
+    framebufferAttachmentImageInfos[3].viewFormatCount = 1;
+    framebufferAttachmentImageInfos[3].pViewFormats = &colorAndInputAttachmentFormat;
+    VkFramebufferAttachmentsCreateInfoKHR framebufferAttachmentsCreateInfo = {};
+    framebufferAttachmentsCreateInfo.sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO_KHR;
+    framebufferAttachmentsCreateInfo.attachmentImageInfoCount = 4;
+    framebufferAttachmentsCreateInfo.pAttachmentImageInfos = framebufferAttachmentImageInfos;
+    VkFramebufferCreateInfo framebufferCreateInfo = {};
+    framebufferCreateInfo.sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO;
+    framebufferCreateInfo.pNext = &framebufferAttachmentsCreateInfo;
+    framebufferCreateInfo.flags = VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR;
+    framebufferCreateInfo.width = attachmentWidth;
+    framebufferCreateInfo.height = attachmentHeight;
+    framebufferCreateInfo.layers = 1;
+    framebufferCreateInfo.renderPass = renderPass;
+    framebufferCreateInfo.attachmentCount = 4;
+    VkFramebuffer framebuffer = VK_NULL_HANDLE;
+
+    // Color attachment, mismatched usage
+    framebufferAttachmentImageInfos[0].usage = VK_IMAGE_USAGE_SAMPLED_BIT;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkFramebufferCreateInfo-flags-03201");
+    vk::CreateFramebuffer(m_device->device(), &framebufferCreateInfo, nullptr, &framebuffer);
+    m_errorMonitor->VerifyFound();
+    if (framebuffer != VK_NULL_HANDLE) {
+        vk::DestroyFramebuffer(m_device->device(), framebuffer, nullptr);
+    }
+    framebufferAttachmentImageInfos[0].usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
+
+    // Color resolve attachment, mismatched usage
+    framebufferAttachmentImageInfos[1].usage = VK_IMAGE_USAGE_SAMPLED_BIT;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkFramebufferCreateInfo-flags-03201");
+    vk::CreateFramebuffer(m_device->device(), &framebufferCreateInfo, nullptr, &framebuffer);
+    m_errorMonitor->VerifyFound();
+    if (framebuffer != VK_NULL_HANDLE) {
+        vk::DestroyFramebuffer(m_device->device(), framebuffer, nullptr);
+    }
+    framebufferAttachmentImageInfos[1].usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
+
+    // Depth stencil attachment, mismatched usage
+    framebufferAttachmentImageInfos[2].usage = VK_IMAGE_USAGE_SAMPLED_BIT;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkFramebufferCreateInfo-flags-03202");
+    vk::CreateFramebuffer(m_device->device(), &framebufferCreateInfo, nullptr, &framebuffer);
+    m_errorMonitor->VerifyFound();
+    if (framebuffer != VK_NULL_HANDLE) {
+        vk::DestroyFramebuffer(m_device->device(), framebuffer, nullptr);
+    }
+    framebufferAttachmentImageInfos[2].usage = VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT;
+
+    // Color attachment, mismatched usage
+    framebufferAttachmentImageInfos[3].usage = VK_IMAGE_USAGE_SAMPLED_BIT;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkFramebufferCreateInfo-flags-03204");
+    vk::CreateFramebuffer(m_device->device(), &framebufferCreateInfo, nullptr, &framebuffer);
+    m_errorMonitor->VerifyFound();
+    if (framebuffer != VK_NULL_HANDLE) {
+        vk::DestroyFramebuffer(m_device->device(), framebuffer, nullptr);
+    }
+    framebufferAttachmentImageInfos[3].usage = VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT;
+
+    vk::DestroyRenderPass(m_device->device(), renderPass, nullptr);
+}
+
+TEST_F(VkLayerTest, ImagelessFramebufferAttachmentMultiviewImageLayerCountMismatchTests) {
+    TEST_DESCRIPTION("Create an imageless framebuffer against a multiview-enabled render pass with mismatched layer counts");
+
+    if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+        m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    } else {
+        printf("%s Did not find required instance extension %s; skipped.\n", kSkipPrefix,
+               VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+        return;
+    }
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+
+    if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_MULTIVIEW_EXTENSION_NAME)) {
+        m_device_extension_names.push_back(VK_KHR_MULTIVIEW_EXTENSION_NAME);
+    } else {
+        printf("%s Did not find required device extension %s; skipped.\n", kSkipPrefix, VK_KHR_MULTIVIEW_EXTENSION_NAME);
+        return;
+    }
+
+    if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_IMAGELESS_FRAMEBUFFER_EXTENSION_NAME)) {
+        m_device_extension_names.push_back(VK_KHR_MAINTENANCE2_EXTENSION_NAME);
+        m_device_extension_names.push_back(VK_KHR_IMAGE_FORMAT_LIST_EXTENSION_NAME);
+        m_device_extension_names.push_back(VK_KHR_IMAGELESS_FRAMEBUFFER_EXTENSION_NAME);
+    } else {
+        printf("%s Did not find required device extension %s; skipped.\n", kSkipPrefix,
+               VK_KHR_IMAGELESS_FRAMEBUFFER_EXTENSION_NAME);
+        return;
+    }
+
+    VkPhysicalDeviceImagelessFramebufferFeaturesKHR physicalDeviceImagelessFramebufferFeatures = {};
+    physicalDeviceImagelessFramebufferFeatures.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES_KHR;
+    physicalDeviceImagelessFramebufferFeatures.imagelessFramebuffer = VK_TRUE;
+    VkPhysicalDeviceFeatures2 physicalDeviceFeatures2 = {};
+    physicalDeviceFeatures2.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2;
+    physicalDeviceFeatures2.pNext = &physicalDeviceImagelessFramebufferFeatures;
+    ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &physicalDeviceFeatures2, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
+
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    uint32_t attachmentWidth = 512;
+    uint32_t attachmentHeight = 512;
+    VkFormat colorAndInputAttachmentFormat = VK_FORMAT_R8G8B8A8_UNORM;
+    VkFormat depthStencilAttachmentFormat = VK_FORMAT_D32_SFLOAT_S8_UINT;
+
+    VkAttachmentDescription attachmentDescriptions[4] = {};
+    // Color attachment
+    attachmentDescriptions[0].format = colorAndInputAttachmentFormat;
+    attachmentDescriptions[0].samples = VK_SAMPLE_COUNT_4_BIT;
+    attachmentDescriptions[0].finalLayout = VK_IMAGE_LAYOUT_GENERAL;
+    // Color resolve attachment
+    attachmentDescriptions[1].format = colorAndInputAttachmentFormat;
+    attachmentDescriptions[1].samples = VK_SAMPLE_COUNT_1_BIT;
+    attachmentDescriptions[1].finalLayout = VK_IMAGE_LAYOUT_GENERAL;
+    // Depth stencil attachment
+    attachmentDescriptions[2].format = depthStencilAttachmentFormat;
+    attachmentDescriptions[2].samples = VK_SAMPLE_COUNT_4_BIT;
+    attachmentDescriptions[2].finalLayout = VK_IMAGE_LAYOUT_GENERAL;
+    // Input attachment
+    attachmentDescriptions[3].format = colorAndInputAttachmentFormat;
+    attachmentDescriptions[3].samples = VK_SAMPLE_COUNT_1_BIT;
+    attachmentDescriptions[3].finalLayout = VK_IMAGE_LAYOUT_GENERAL;
+
+    VkAttachmentReference colorAttachmentReference = {};
+    colorAttachmentReference.layout = VK_IMAGE_LAYOUT_GENERAL;
+    colorAttachmentReference.attachment = 0;
+    VkAttachmentReference colorResolveAttachmentReference = {};
+    colorResolveAttachmentReference.layout = VK_IMAGE_LAYOUT_GENERAL;
+    colorResolveAttachmentReference.attachment = 1;
+    VkAttachmentReference depthStencilAttachmentReference = {};
+    depthStencilAttachmentReference.layout = VK_IMAGE_LAYOUT_GENERAL;
+    depthStencilAttachmentReference.attachment = 2;
+    VkAttachmentReference inputAttachmentReference = {};
+    inputAttachmentReference.layout = VK_IMAGE_LAYOUT_GENERAL;
+    inputAttachmentReference.attachment = 3;
+    VkSubpassDescription subpassDescription = {};
+    subpassDescription.colorAttachmentCount = 1;
+    subpassDescription.pColorAttachments = &colorAttachmentReference;
+    subpassDescription.pResolveAttachments = &colorResolveAttachmentReference;
+    subpassDescription.pDepthStencilAttachment = &depthStencilAttachmentReference;
+    subpassDescription.inputAttachmentCount = 1;
+    subpassDescription.pInputAttachments = &inputAttachmentReference;
+
+    uint32_t viewMask = 0x3u;
+    VkRenderPassMultiviewCreateInfo renderPassMultiviewCreateInfo = {};
+    renderPassMultiviewCreateInfo.sType = VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO;
+    renderPassMultiviewCreateInfo.subpassCount = 1;
+    renderPassMultiviewCreateInfo.pViewMasks = &viewMask;
+    VkRenderPassCreateInfo renderPassCreateInfo = {};
+    renderPassCreateInfo.pNext = &renderPassMultiviewCreateInfo;
+    renderPassCreateInfo.attachmentCount = 4;
+    renderPassCreateInfo.subpassCount = 1;
+    renderPassCreateInfo.pSubpasses = &subpassDescription;
+    renderPassCreateInfo.pAttachments = attachmentDescriptions;
+    renderPassCreateInfo.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
+    VkRenderPass renderPass;
+    vk::CreateRenderPass(m_device->device(), &renderPassCreateInfo, nullptr, &renderPass);
+
+    VkFramebufferAttachmentImageInfoKHR framebufferAttachmentImageInfos[4] = {};
+    // Color attachment
+    framebufferAttachmentImageInfos[0].sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO_KHR;
+    framebufferAttachmentImageInfos[0].width = attachmentWidth;
+    framebufferAttachmentImageInfos[0].height = attachmentHeight;
+    framebufferAttachmentImageInfos[0].usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
+    framebufferAttachmentImageInfos[0].layerCount = 2;
+    framebufferAttachmentImageInfos[0].viewFormatCount = 1;
+    framebufferAttachmentImageInfos[0].pViewFormats = &colorAndInputAttachmentFormat;
+    // Color resolve attachment
+    framebufferAttachmentImageInfos[1].sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO_KHR;
+    framebufferAttachmentImageInfos[1].width = attachmentWidth;
+    framebufferAttachmentImageInfos[1].height = attachmentHeight;
+    framebufferAttachmentImageInfos[1].usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
+    framebufferAttachmentImageInfos[1].layerCount = 2;
+    framebufferAttachmentImageInfos[1].viewFormatCount = 1;
+    framebufferAttachmentImageInfos[1].pViewFormats = &colorAndInputAttachmentFormat;
+    // Depth stencil attachment
+    framebufferAttachmentImageInfos[2].sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO_KHR;
+    framebufferAttachmentImageInfos[2].width = attachmentWidth;
+    framebufferAttachmentImageInfos[2].height = attachmentHeight;
+    framebufferAttachmentImageInfos[2].usage = VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT;
+    framebufferAttachmentImageInfos[2].layerCount = 2;
+    framebufferAttachmentImageInfos[2].viewFormatCount = 1;
+    framebufferAttachmentImageInfos[2].pViewFormats = &depthStencilAttachmentFormat;
+    // Input attachment
+    framebufferAttachmentImageInfos[3].sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO_KHR;
+    framebufferAttachmentImageInfos[3].width = attachmentWidth;
+    framebufferAttachmentImageInfos[3].height = attachmentHeight;
+    framebufferAttachmentImageInfos[3].usage = VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT;
+    framebufferAttachmentImageInfos[3].layerCount = 2;
+    framebufferAttachmentImageInfos[3].viewFormatCount = 1;
+    framebufferAttachmentImageInfos[3].pViewFormats = &colorAndInputAttachmentFormat;
+    VkFramebufferAttachmentsCreateInfoKHR framebufferAttachmentsCreateInfo = {};
+    framebufferAttachmentsCreateInfo.sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO_KHR;
+    framebufferAttachmentsCreateInfo.attachmentImageInfoCount = 4;
+    framebufferAttachmentsCreateInfo.pAttachmentImageInfos = framebufferAttachmentImageInfos;
+    VkFramebufferCreateInfo framebufferCreateInfo = {};
+    framebufferCreateInfo.sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO;
+    framebufferCreateInfo.pNext = &framebufferAttachmentsCreateInfo;
+    framebufferCreateInfo.flags = VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR;
+    framebufferCreateInfo.width = attachmentWidth;
+    framebufferCreateInfo.height = attachmentHeight;
+    framebufferCreateInfo.layers = 1;
+    framebufferCreateInfo.renderPass = renderPass;
+    framebufferCreateInfo.attachmentCount = 4;
+    VkFramebuffer framebuffer = VK_NULL_HANDLE;
+
+    // Color attachment, mismatched layer count
+    framebufferAttachmentImageInfos[0].layerCount = 1;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkFramebufferCreateInfo-renderPass-03198");
+    vk::CreateFramebuffer(m_device->device(), &framebufferCreateInfo, nullptr, &framebuffer);
+    m_errorMonitor->VerifyFound();
+    if (framebuffer != VK_NULL_HANDLE) {
+        vk::DestroyFramebuffer(m_device->device(), framebuffer, nullptr);
+    }
+    framebufferAttachmentImageInfos[0].layerCount = 2;
+
+    // Color resolve attachment, mismatched layer count
+    framebufferAttachmentImageInfos[1].layerCount = 1;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkFramebufferCreateInfo-renderPass-03198");
+    vk::CreateFramebuffer(m_device->device(), &framebufferCreateInfo, nullptr, &framebuffer);
+    m_errorMonitor->VerifyFound();
+    if (framebuffer != VK_NULL_HANDLE) {
+        vk::DestroyFramebuffer(m_device->device(), framebuffer, nullptr);
+    }
+    framebufferAttachmentImageInfos[1].layerCount = 2;
+
+    // Depth stencil attachment, mismatched layer count
+    framebufferAttachmentImageInfos[2].layerCount = 1;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkFramebufferCreateInfo-renderPass-03198");
+    vk::CreateFramebuffer(m_device->device(), &framebufferCreateInfo, nullptr, &framebuffer);
+    m_errorMonitor->VerifyFound();
+    if (framebuffer != VK_NULL_HANDLE) {
+        vk::DestroyFramebuffer(m_device->device(), framebuffer, nullptr);
+    }
+    framebufferAttachmentImageInfos[2].layerCount = 2;
+
+    // Input attachment, mismatched layer count
+    framebufferAttachmentImageInfos[3].layerCount = 1;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkFramebufferCreateInfo-renderPass-03198");
+    vk::CreateFramebuffer(m_device->device(), &framebufferCreateInfo, nullptr, &framebuffer);
+    m_errorMonitor->VerifyFound();
+    if (framebuffer != VK_NULL_HANDLE) {
+        vk::DestroyFramebuffer(m_device->device(), framebuffer, nullptr);
+    }
+    framebufferAttachmentImageInfos[3].layerCount = 2;
+
+    vk::DestroyRenderPass(m_device->device(), renderPass, nullptr);
+}
+
+TEST_F(VkLayerTest, ImagelessFramebufferDepthStencilResolveAttachmentTests) {
+    TEST_DESCRIPTION(
+        "Create an imageless framebuffer against a render pass using depth stencil resolve, with mismatched information");
+
+    if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+        m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    } else {
+        printf("%s Did not find required instance extension %s; skipped.\n", kSkipPrefix,
+               VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+        return;
+    }
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+
+    bool rp2Supported = CheckCreateRenderPass2Support(this, m_device_extension_names);
+    if (!rp2Supported) {
+        printf("%s Did not find required device extension %s; skipped.\n", kSkipPrefix, VK_KHR_CREATE_RENDERPASS_2_EXTENSION_NAME);
+        return;
+    }
+
+    if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_DEPTH_STENCIL_RESOLVE_EXTENSION_NAME)) {
+        m_device_extension_names.push_back(VK_KHR_DEPTH_STENCIL_RESOLVE_EXTENSION_NAME);
+    } else {
+        printf("%s Did not find required device extension %s; skipped.\n", kSkipPrefix,
+               VK_KHR_DEPTH_STENCIL_RESOLVE_EXTENSION_NAME);
+        return;
+    }
+
+    if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_IMAGELESS_FRAMEBUFFER_EXTENSION_NAME)) {
+        m_device_extension_names.push_back(VK_KHR_MAINTENANCE2_EXTENSION_NAME);
+        m_device_extension_names.push_back(VK_KHR_IMAGE_FORMAT_LIST_EXTENSION_NAME);
+        m_device_extension_names.push_back(VK_KHR_IMAGELESS_FRAMEBUFFER_EXTENSION_NAME);
+    } else {
+        printf("%s Did not find required device extension %s; skipped.\n", kSkipPrefix,
+               VK_KHR_IMAGELESS_FRAMEBUFFER_EXTENSION_NAME);
+        return;
+    }
+
+    VkPhysicalDeviceImagelessFramebufferFeaturesKHR physicalDeviceImagelessFramebufferFeatures = {};
+    physicalDeviceImagelessFramebufferFeatures.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES_KHR;
+    physicalDeviceImagelessFramebufferFeatures.imagelessFramebuffer = VK_TRUE;
+    VkPhysicalDeviceFeatures2 physicalDeviceFeatures2 = {};
+    physicalDeviceFeatures2.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2;
+    physicalDeviceFeatures2.pNext = &physicalDeviceImagelessFramebufferFeatures;
+    ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &physicalDeviceFeatures2, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
+
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    uint32_t attachmentWidth = 512;
+    uint32_t attachmentHeight = 512;
+    VkFormat attachmentFormat = VK_FORMAT_D32_SFLOAT_S8_UINT;
+
+    VkAttachmentDescription2KHR attachmentDescriptions[2] = {};
+    // Depth/stencil attachment
+    attachmentDescriptions[0].sType = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_2_KHR;
+    attachmentDescriptions[0].format = attachmentFormat;
+    attachmentDescriptions[0].samples = VK_SAMPLE_COUNT_4_BIT;
+    attachmentDescriptions[0].finalLayout = VK_IMAGE_LAYOUT_GENERAL;
+    // Depth/stencil resolve attachment
+    attachmentDescriptions[1].sType = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_2_KHR;
+    attachmentDescriptions[1].format = attachmentFormat;
+    attachmentDescriptions[1].samples = VK_SAMPLE_COUNT_1_BIT;
+    attachmentDescriptions[1].finalLayout = VK_IMAGE_LAYOUT_GENERAL;
+
+    VkAttachmentReference2KHR depthStencilAttachmentReference = {};
+    depthStencilAttachmentReference.sType = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2_KHR;
+    depthStencilAttachmentReference.layout = VK_IMAGE_LAYOUT_GENERAL;
+    depthStencilAttachmentReference.attachment = 0;
+    VkAttachmentReference2KHR depthStencilResolveAttachmentReference = {};
+    depthStencilResolveAttachmentReference.sType = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2_KHR;
+    depthStencilResolveAttachmentReference.layout = VK_IMAGE_LAYOUT_GENERAL;
+    depthStencilResolveAttachmentReference.attachment = 1;
+    VkSubpassDescriptionDepthStencilResolveKHR subpassDescriptionDepthStencilResolve = {};
+    subpassDescriptionDepthStencilResolve.sType = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE_KHR;
+    subpassDescriptionDepthStencilResolve.pDepthStencilResolveAttachment = &depthStencilResolveAttachmentReference;
+    subpassDescriptionDepthStencilResolve.depthResolveMode = VK_RESOLVE_MODE_SAMPLE_ZERO_BIT_KHR;
+    subpassDescriptionDepthStencilResolve.stencilResolveMode = VK_RESOLVE_MODE_SAMPLE_ZERO_BIT_KHR;
+    VkSubpassDescription2KHR subpassDescription = {};
+    subpassDescription.sType = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_2_KHR;
+    subpassDescription.pNext = &subpassDescriptionDepthStencilResolve;
+    subpassDescription.pDepthStencilAttachment = &depthStencilAttachmentReference;
+    subpassDescription.viewMask = 0x3u;
+
+    VkRenderPassCreateInfo2KHR renderPassCreateInfo = {};
+    renderPassCreateInfo.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO_2_KHR;
+    renderPassCreateInfo.pNext = nullptr;
+    renderPassCreateInfo.attachmentCount = 2;
+    renderPassCreateInfo.subpassCount = 1;
+    renderPassCreateInfo.pSubpasses = &subpassDescription;
+    renderPassCreateInfo.pAttachments = attachmentDescriptions;
+    VkRenderPass renderPass;
+    PFN_vkCreateRenderPass2KHR vkCreateRenderPass2KHR =
+        (PFN_vkCreateRenderPass2KHR)vk::GetDeviceProcAddr(m_device->device(), "vkCreateRenderPass2KHR");
+    vkCreateRenderPass2KHR(m_device->device(), &renderPassCreateInfo, nullptr, &renderPass);
+
+    VkFramebufferAttachmentImageInfoKHR framebufferAttachmentImageInfos[2] = {};
+    // Depth/stencil attachment
+    framebufferAttachmentImageInfos[0].sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO_KHR;
+    framebufferAttachmentImageInfos[0].width = attachmentWidth;
+    framebufferAttachmentImageInfos[0].height = attachmentHeight;
+    framebufferAttachmentImageInfos[0].usage = VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT;
+    framebufferAttachmentImageInfos[0].layerCount = 2;
+    framebufferAttachmentImageInfos[0].viewFormatCount = 1;
+    framebufferAttachmentImageInfos[0].pViewFormats = &attachmentFormat;
+    // Depth/stencil resolve attachment
+    framebufferAttachmentImageInfos[1].sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO_KHR;
+    framebufferAttachmentImageInfos[1].width = attachmentWidth;
+    framebufferAttachmentImageInfos[1].height = attachmentHeight;
+    framebufferAttachmentImageInfos[1].usage = VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT;
+    framebufferAttachmentImageInfos[1].layerCount = 2;
+    framebufferAttachmentImageInfos[1].viewFormatCount = 1;
+    framebufferAttachmentImageInfos[1].pViewFormats = &attachmentFormat;
+    VkFramebufferAttachmentsCreateInfoKHR framebufferAttachmentsCreateInfo = {};
+    framebufferAttachmentsCreateInfo.sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO_KHR;
+    framebufferAttachmentsCreateInfo.attachmentImageInfoCount = 2;
+    framebufferAttachmentsCreateInfo.pAttachmentImageInfos = framebufferAttachmentImageInfos;
+    VkFramebufferCreateInfo framebufferCreateInfo = {};
+    framebufferCreateInfo.sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO;
+    framebufferCreateInfo.pNext = &framebufferAttachmentsCreateInfo;
+    framebufferCreateInfo.flags = VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR;
+    framebufferCreateInfo.width = attachmentWidth;
+    framebufferCreateInfo.height = attachmentHeight;
+    framebufferCreateInfo.layers = 1;
+    framebufferCreateInfo.renderPass = renderPass;
+    framebufferCreateInfo.attachmentCount = 2;
+    VkFramebuffer framebuffer = VK_NULL_HANDLE;
+
+    // Color attachment, mismatched layer count
+    framebufferAttachmentImageInfos[0].layerCount = 1;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkFramebufferCreateInfo-renderPass-03198");
+    vk::CreateFramebuffer(m_device->device(), &framebufferCreateInfo, nullptr, &framebuffer);
+    m_errorMonitor->VerifyFound();
+    if (framebuffer != VK_NULL_HANDLE) {
+        vk::DestroyFramebuffer(m_device->device(), framebuffer, nullptr);
+    }
+    framebufferAttachmentImageInfos[0].layerCount = 2;
+
+    // Depth resolve attachment, mismatched image usage
+    framebufferAttachmentImageInfos[1].usage = VK_IMAGE_USAGE_SAMPLED_BIT;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkFramebufferCreateInfo-flags-03203");
+    vk::CreateFramebuffer(m_device->device(), &framebufferCreateInfo, nullptr, &framebuffer);
+    m_errorMonitor->VerifyFound();
+    if (framebuffer != VK_NULL_HANDLE) {
+        vk::DestroyFramebuffer(m_device->device(), framebuffer, nullptr);
+    }
+    framebufferAttachmentImageInfos[1].usage = VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT;
+
+    // Depth resolve attachment, mismatched layer count
+    framebufferAttachmentImageInfos[1].layerCount = 1;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkFramebufferCreateInfo-renderPass-03198");
+    vk::CreateFramebuffer(m_device->device(), &framebufferCreateInfo, nullptr, &framebuffer);
+    m_errorMonitor->VerifyFound();
+    if (framebuffer != VK_NULL_HANDLE) {
+        vk::DestroyFramebuffer(m_device->device(), framebuffer, nullptr);
+    }
+    framebufferAttachmentImageInfos[1].layerCount = 2;
+
+    vk::DestroyRenderPass(m_device->device(), renderPass, nullptr);
+}
diff --git a/src/third_party/vulkan-validation-layers/src/tests/vklayertests_others.cpp b/src/third_party/vulkan-validation-layers/src/tests/vklayertests_others.cpp
new file mode 100644
index 0000000..d0759bc
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/tests/vklayertests_others.cpp
@@ -0,0 +1,7218 @@
+/*
+ * Copyright (c) 2015-2019 The Khronos Group Inc.
+ * Copyright (c) 2015-2019 Valve Corporation
+ * Copyright (c) 2015-2019 LunarG, Inc.
+ * Copyright (c) 2015-2019 Google, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Author: Chia-I Wu <olvaffe@gmail.com>
+ * Author: Chris Forbes <chrisf@ijw.co.nz>
+ * Author: Courtney Goeltzenleuchter <courtney@LunarG.com>
+ * Author: Mark Lobodzinski <mark@lunarg.com>
+ * Author: Mike Stroyan <mike@LunarG.com>
+ * Author: Tobin Ehlis <tobine@google.com>
+ * Author: Tony Barbour <tony@LunarG.com>
+ * Author: Cody Northrop <cnorthrop@google.com>
+ * Author: Dave Houlton <daveh@lunarg.com>
+ * Author: Jeremy Kniager <jeremyk@lunarg.com>
+ * Author: Shannon McPherson <shannon@lunarg.com>
+ * Author: John Zulauf <jzulauf@lunarg.com>
+ */
+
+#include "cast_utils.h"
+#include "layer_validation_tests.h"
+
+TEST_F(VkLayerTest, RequiredParameter) {
+    TEST_DESCRIPTION("Specify VK_NULL_HANDLE, NULL, and 0 for required handle, pointer, array, and array count parameters");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "required parameter pFeatures specified as NULL");
+    // Specify NULL for a pointer to a handle
+    // Expected to trigger an error with
+    // parameter_validation::validate_required_pointer
+    vk::GetPhysicalDeviceFeatures(gpu(), NULL);
+    m_errorMonitor->VerifyFound();
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "required parameter pQueueFamilyPropertyCount specified as NULL");
+    // Specify NULL for pointer to array count
+    // Expected to trigger an error with parameter_validation::validate_array
+    vk::GetPhysicalDeviceQueueFamilyProperties(gpu(), NULL, NULL);
+    m_errorMonitor->VerifyFound();
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdSetViewport-viewportCount-arraylength");
+    // Specify 0 for a required array count
+    // Expected to trigger an error with parameter_validation::validate_array
+    VkViewport viewport = {0.0f, 0.0f, 64.0f, 64.0f, 0.0f, 1.0f};
+    m_commandBuffer->SetViewport(0, 0, &viewport);
+    m_errorMonitor->VerifyFound();
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCreateImage-pCreateInfo-parameter");
+    // Specify a null pImageCreateInfo struct pointer
+    VkImage test_image;
+    vk::CreateImage(device(), NULL, NULL, &test_image);
+    m_errorMonitor->VerifyFound();
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdSetViewport-pViewports-parameter");
+    // Specify NULL for a required array
+    // Expected to trigger an error with parameter_validation::validate_array
+    m_commandBuffer->SetViewport(0, 1, NULL);
+    m_errorMonitor->VerifyFound();
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "required parameter memory specified as VK_NULL_HANDLE");
+    // Specify VK_NULL_HANDLE for a required handle
+    // Expected to trigger an error with
+    // parameter_validation::validate_required_handle
+    vk::UnmapMemory(device(), VK_NULL_HANDLE);
+    m_errorMonitor->VerifyFound();
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "required parameter pFences[0] specified as VK_NULL_HANDLE");
+    // Specify VK_NULL_HANDLE for a required handle array entry
+    // Expected to trigger an error with
+    // parameter_validation::validate_required_handle_array
+    VkFence fence = VK_NULL_HANDLE;
+    vk::ResetFences(device(), 1, &fence);
+    m_errorMonitor->VerifyFound();
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "required parameter pAllocateInfo specified as NULL");
+    // Specify NULL for a required struct pointer
+    // Expected to trigger an error with
+    // parameter_validation::validate_struct_type
+    VkDeviceMemory memory = VK_NULL_HANDLE;
+    vk::AllocateMemory(device(), NULL, NULL, &memory);
+    m_errorMonitor->VerifyFound();
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "value of faceMask must not be 0");
+    // Specify 0 for a required VkFlags parameter
+    // Expected to trigger an error with parameter_validation::validate_flags
+    m_commandBuffer->SetStencilReference(0, 0);
+    m_errorMonitor->VerifyFound();
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "value of pSubmits[0].pWaitDstStageMask[0] must not be 0");
+    // Specify 0 for a required VkFlags array entry
+    // Expected to trigger an error with
+    // parameter_validation::validate_flags_array
+    VkSemaphore semaphore = VK_NULL_HANDLE;
+    VkPipelineStageFlags stageFlags = 0;
+    VkSubmitInfo submitInfo = {};
+    submitInfo.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+    submitInfo.waitSemaphoreCount = 1;
+    submitInfo.pWaitSemaphores = &semaphore;
+    submitInfo.pWaitDstStageMask = &stageFlags;
+    vk::QueueSubmit(m_device->m_queue, 1, &submitInfo, VK_NULL_HANDLE);
+    m_errorMonitor->VerifyFound();
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkSubmitInfo-sType-sType");
+    stageFlags = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
+    // Set a bogus sType and see what happens
+    submitInfo.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
+    submitInfo.waitSemaphoreCount = 1;
+    submitInfo.pWaitSemaphores = &semaphore;
+    submitInfo.pWaitDstStageMask = &stageFlags;
+    vk::QueueSubmit(m_device->m_queue, 1, &submitInfo, VK_NULL_HANDLE);
+    m_errorMonitor->VerifyFound();
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkSubmitInfo-pWaitSemaphores-parameter");
+    stageFlags = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
+    submitInfo.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+    submitInfo.waitSemaphoreCount = 1;
+    // Set a null pointer for pWaitSemaphores
+    submitInfo.pWaitSemaphores = NULL;
+    submitInfo.pWaitDstStageMask = &stageFlags;
+    vk::QueueSubmit(m_device->m_queue, 1, &submitInfo, VK_NULL_HANDLE);
+    m_errorMonitor->VerifyFound();
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCreateRenderPass-pCreateInfo-parameter");
+    VkRenderPass render_pass;
+    vk::CreateRenderPass(device(), nullptr, nullptr, &render_pass);
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, PnextOnlyStructValidation) {
+    TEST_DESCRIPTION("See if checks occur on structs ONLY used in pnext chains.");
+
+    if (!(CheckDescriptorIndexingSupportAndInitFramework(this, m_instance_extension_names, m_device_extension_names, NULL,
+                                                         m_errorMonitor))) {
+        printf("Descriptor indexing or one of its dependencies not supported, skipping tests\n");
+        return;
+    }
+
+    PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR =
+        (PFN_vkGetPhysicalDeviceFeatures2KHR)vk::GetInstanceProcAddr(instance(), "vkGetPhysicalDeviceFeatures2KHR");
+    ASSERT_TRUE(vkGetPhysicalDeviceFeatures2KHR != nullptr);
+
+    // Create a device passing in a bad PdevFeatures2 value
+    auto indexing_features = lvl_init_struct<VkPhysicalDeviceDescriptorIndexingFeaturesEXT>();
+    auto features2 = lvl_init_struct<VkPhysicalDeviceFeatures2KHR>(&indexing_features);
+    vkGetPhysicalDeviceFeatures2KHR(gpu(), &features2);
+    // Set one of the features values to an invalid boolean value
+    indexing_features.descriptorBindingUniformBufferUpdateAfterBind = 800;
+
+    uint32_t queue_node_count;
+    vk::GetPhysicalDeviceQueueFamilyProperties(gpu(), &queue_node_count, NULL);
+    VkQueueFamilyProperties *queue_props = new VkQueueFamilyProperties[queue_node_count];
+    vk::GetPhysicalDeviceQueueFamilyProperties(gpu(), &queue_node_count, queue_props);
+    float priorities[] = {1.0f};
+    VkDeviceQueueCreateInfo queue_info{};
+    queue_info.sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO;
+    queue_info.pNext = NULL;
+    queue_info.flags = 0;
+    queue_info.queueFamilyIndex = 0;
+    queue_info.queueCount = 1;
+    queue_info.pQueuePriorities = &priorities[0];
+    VkDeviceCreateInfo dev_info = {};
+    dev_info.sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO;
+    dev_info.pNext = NULL;
+    dev_info.queueCreateInfoCount = 1;
+    dev_info.pQueueCreateInfos = &queue_info;
+    dev_info.enabledLayerCount = 0;
+    dev_info.ppEnabledLayerNames = NULL;
+    dev_info.enabledExtensionCount = m_device_extension_names.size();
+    dev_info.ppEnabledExtensionNames = m_device_extension_names.data();
+    dev_info.pNext = &features2;
+    VkDevice dev;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_WARNING_BIT_EXT, "is neither VK_TRUE nor VK_FALSE");
+    m_errorMonitor->SetUnexpectedError("Failed to create");
+    vk::CreateDevice(gpu(), &dev_info, NULL, &dev);
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, ReservedParameter) {
+    TEST_DESCRIPTION("Specify a non-zero value for a reserved parameter");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, " must be 0");
+    // Specify 0 for a reserved VkFlags parameter
+    // Expected to trigger an error with
+    // parameter_validation::validate_reserved_flags
+    VkEvent event_handle = VK_NULL_HANDLE;
+    VkEventCreateInfo event_info = {};
+    event_info.sType = VK_STRUCTURE_TYPE_EVENT_CREATE_INFO;
+    event_info.flags = 1;
+    vk::CreateEvent(device(), &event_info, NULL, &event_handle);
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, DebugMarkerNameTest) {
+    TEST_DESCRIPTION("Ensure debug marker object names are printed in debug report output");
+
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+    if (DeviceExtensionSupported(gpu(), "VK_LAYER_LUNARG_core_validation", VK_EXT_DEBUG_MARKER_EXTENSION_NAME)) {
+        m_device_extension_names.push_back(VK_EXT_DEBUG_MARKER_EXTENSION_NAME);
+    } else {
+        printf("%s Debug Marker Extension not supported, skipping test\n", kSkipPrefix);
+        return;
+    }
+    ASSERT_NO_FATAL_FAILURE(InitState());
+
+    PFN_vkDebugMarkerSetObjectNameEXT fpvkDebugMarkerSetObjectNameEXT =
+        (PFN_vkDebugMarkerSetObjectNameEXT)vk::GetInstanceProcAddr(instance(), "vkDebugMarkerSetObjectNameEXT");
+    if (!(fpvkDebugMarkerSetObjectNameEXT)) {
+        printf("%s Can't find fpvkDebugMarkerSetObjectNameEXT; skipped.\n", kSkipPrefix);
+        return;
+    }
+
+    if (DeviceSimulation()) {
+        printf("%sSkipping object naming test.\n", kSkipPrefix);
+        return;
+    }
+
+    VkBuffer buffer;
+    VkDeviceMemory memory_1, memory_2;
+    std::string memory_name = "memory_name";
+
+    VkBufferCreateInfo buffer_create_info = {};
+    buffer_create_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
+    buffer_create_info.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT;
+    buffer_create_info.size = 1;
+
+    vk::CreateBuffer(device(), &buffer_create_info, nullptr, &buffer);
+
+    VkMemoryRequirements memRequirements;
+    vk::GetBufferMemoryRequirements(device(), buffer, &memRequirements);
+
+    VkMemoryAllocateInfo memory_allocate_info = {};
+    memory_allocate_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+    memory_allocate_info.allocationSize = memRequirements.size;
+    memory_allocate_info.memoryTypeIndex = 0;
+
+    vk::AllocateMemory(device(), &memory_allocate_info, nullptr, &memory_1);
+    vk::AllocateMemory(device(), &memory_allocate_info, nullptr, &memory_2);
+
+    VkDebugMarkerObjectNameInfoEXT name_info = {};
+    name_info.sType = VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_NAME_INFO_EXT;
+    name_info.pNext = nullptr;
+    name_info.object = (uint64_t)memory_2;
+    name_info.objectType = VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT;
+    name_info.pObjectName = memory_name.c_str();
+    fpvkDebugMarkerSetObjectNameEXT(device(), &name_info);
+
+    vk::BindBufferMemory(device(), buffer, memory_1, 0);
+
+    // Test core_validation layer
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, memory_name);
+    vk::BindBufferMemory(device(), buffer, memory_2, 0);
+    m_errorMonitor->VerifyFound();
+
+    vk::FreeMemory(device(), memory_1, nullptr);
+    memory_1 = VK_NULL_HANDLE;
+    vk::FreeMemory(device(), memory_2, nullptr);
+    memory_2 = VK_NULL_HANDLE;
+    vk::DestroyBuffer(device(), buffer, nullptr);
+    buffer = VK_NULL_HANDLE;
+
+    VkCommandBuffer commandBuffer;
+    std::string commandBuffer_name = "command_buffer_name";
+    VkCommandPool commandpool_1;
+    VkCommandPool commandpool_2;
+    VkCommandPoolCreateInfo pool_create_info{};
+    pool_create_info.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
+    pool_create_info.queueFamilyIndex = m_device->graphics_queue_node_index_;
+    pool_create_info.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
+    vk::CreateCommandPool(device(), &pool_create_info, nullptr, &commandpool_1);
+    vk::CreateCommandPool(device(), &pool_create_info, nullptr, &commandpool_2);
+
+    VkCommandBufferAllocateInfo command_buffer_allocate_info{};
+    command_buffer_allocate_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
+    command_buffer_allocate_info.commandPool = commandpool_1;
+    command_buffer_allocate_info.commandBufferCount = 1;
+    command_buffer_allocate_info.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
+    vk::AllocateCommandBuffers(device(), &command_buffer_allocate_info, &commandBuffer);
+
+    name_info.object = (uint64_t)commandBuffer;
+    name_info.objectType = VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT;
+    name_info.pObjectName = commandBuffer_name.c_str();
+    fpvkDebugMarkerSetObjectNameEXT(device(), &name_info);
+
+    VkCommandBufferBeginInfo cb_begin_Info = {};
+    cb_begin_Info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
+    cb_begin_Info.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT;
+    vk::BeginCommandBuffer(commandBuffer, &cb_begin_Info);
+
+    const VkRect2D scissor = {{-1, 0}, {16, 16}};
+    const VkRect2D scissors[] = {scissor, scissor};
+
+    // Test parameter_validation layer
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, commandBuffer_name);
+    vk::CmdSetScissor(commandBuffer, 1, 1, scissors);
+    m_errorMonitor->VerifyFound();
+
+    // Test object_tracker layer
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, commandBuffer_name);
+    vk::FreeCommandBuffers(device(), commandpool_2, 1, &commandBuffer);
+    m_errorMonitor->VerifyFound();
+
+    vk::DestroyCommandPool(device(), commandpool_1, NULL);
+    vk::DestroyCommandPool(device(), commandpool_2, NULL);
+}
+
+TEST_F(VkLayerTest, DebugUtilsNameTest) {
+    TEST_DESCRIPTION("Ensure debug utils object names are printed in debug messenger output");
+
+    // Skip test if extension not supported
+    if (InstanceExtensionSupported(VK_EXT_DEBUG_UTILS_EXTENSION_NAME)) {
+        m_instance_extension_names.push_back(VK_EXT_DEBUG_UTILS_EXTENSION_NAME);
+    } else {
+        printf("%s Debug Utils Extension not supported, skipping test\n", kSkipPrefix);
+        return;
+    }
+
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+    ASSERT_NO_FATAL_FAILURE(InitState());
+
+    PFN_vkSetDebugUtilsObjectNameEXT fpvkSetDebugUtilsObjectNameEXT =
+        (PFN_vkSetDebugUtilsObjectNameEXT)vk::GetInstanceProcAddr(instance(), "vkSetDebugUtilsObjectNameEXT");
+    ASSERT_TRUE(fpvkSetDebugUtilsObjectNameEXT);  // Must be extant if extension is enabled
+    PFN_vkCreateDebugUtilsMessengerEXT fpvkCreateDebugUtilsMessengerEXT =
+        (PFN_vkCreateDebugUtilsMessengerEXT)vk::GetInstanceProcAddr(instance(), "vkCreateDebugUtilsMessengerEXT");
+    ASSERT_TRUE(fpvkCreateDebugUtilsMessengerEXT);  // Must be extant if extension is enabled
+    PFN_vkDestroyDebugUtilsMessengerEXT fpvkDestroyDebugUtilsMessengerEXT =
+        (PFN_vkDestroyDebugUtilsMessengerEXT)vk::GetInstanceProcAddr(instance(), "vkDestroyDebugUtilsMessengerEXT");
+    ASSERT_TRUE(fpvkDestroyDebugUtilsMessengerEXT);  // Must be extant if extension is enabled
+    PFN_vkCmdInsertDebugUtilsLabelEXT fpvkCmdInsertDebugUtilsLabelEXT =
+        (PFN_vkCmdInsertDebugUtilsLabelEXT)vk::GetInstanceProcAddr(instance(), "vkCmdInsertDebugUtilsLabelEXT");
+    ASSERT_TRUE(fpvkCmdInsertDebugUtilsLabelEXT);  // Must be extant if extension is enabled
+
+    if (DeviceSimulation()) {
+        printf("%sSkipping object naming test.\n", kSkipPrefix);
+        return;
+    }
+
+    DebugUtilsLabelCheckData callback_data;
+    auto empty_callback = [](const VkDebugUtilsMessengerCallbackDataEXT *pCallbackData, DebugUtilsLabelCheckData *data) {
+        data->count++;
+    };
+    callback_data.count = 0;
+    callback_data.callback = empty_callback;
+
+    auto callback_create_info = lvl_init_struct<VkDebugUtilsMessengerCreateInfoEXT>();
+    callback_create_info.messageSeverity =
+        VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT | VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT;
+    callback_create_info.messageType = VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT;
+    callback_create_info.pfnUserCallback = DebugUtilsCallback;
+    callback_create_info.pUserData = &callback_data;
+    VkDebugUtilsMessengerEXT my_messenger = VK_NULL_HANDLE;
+    fpvkCreateDebugUtilsMessengerEXT(instance(), &callback_create_info, nullptr, &my_messenger);
+
+    VkBuffer buffer;
+    VkDeviceMemory memory_1, memory_2;
+    std::string memory_name = "memory_name";
+
+    VkBufferCreateInfo buffer_create_info = {};
+    buffer_create_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
+    buffer_create_info.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT;
+    buffer_create_info.size = 1;
+
+    vk::CreateBuffer(device(), &buffer_create_info, nullptr, &buffer);
+
+    VkMemoryRequirements memRequirements;
+    vk::GetBufferMemoryRequirements(device(), buffer, &memRequirements);
+
+    VkMemoryAllocateInfo memory_allocate_info = {};
+    memory_allocate_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+    memory_allocate_info.allocationSize = memRequirements.size;
+    memory_allocate_info.memoryTypeIndex = 0;
+
+    vk::AllocateMemory(device(), &memory_allocate_info, nullptr, &memory_1);
+    vk::AllocateMemory(device(), &memory_allocate_info, nullptr, &memory_2);
+
+    VkDebugUtilsObjectNameInfoEXT name_info = {};
+    name_info.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT;
+    name_info.pNext = nullptr;
+    name_info.objectType = VK_OBJECT_TYPE_DEVICE_MEMORY;
+    name_info.pObjectName = memory_name.c_str();
+
+    // Pass in bad handle make sure ObjectTracker catches it
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkDebugUtilsObjectNameInfoEXT-objectType-02590");
+    name_info.objectHandle = (uint64_t)0xcadecade;
+    fpvkSetDebugUtilsObjectNameEXT(device(), &name_info);
+    m_errorMonitor->VerifyFound();
+
+    // Pass in 'unknown' object type and see if parameter validation catches it
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkDebugUtilsObjectNameInfoEXT-objectType-02589");
+    name_info.objectHandle = (uint64_t)memory_2;
+    name_info.objectType = VK_OBJECT_TYPE_UNKNOWN;
+    fpvkSetDebugUtilsObjectNameEXT(device(), &name_info);
+    m_errorMonitor->VerifyFound();
+
+    name_info.objectType = VK_OBJECT_TYPE_DEVICE_MEMORY;
+    fpvkSetDebugUtilsObjectNameEXT(device(), &name_info);
+
+    vk::BindBufferMemory(device(), buffer, memory_1, 0);
+
+    // Test core_validation layer
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, memory_name);
+    vk::BindBufferMemory(device(), buffer, memory_2, 0);
+    m_errorMonitor->VerifyFound();
+
+    vk::FreeMemory(device(), memory_1, nullptr);
+    memory_1 = VK_NULL_HANDLE;
+    vk::FreeMemory(device(), memory_2, nullptr);
+    memory_2 = VK_NULL_HANDLE;
+    vk::DestroyBuffer(device(), buffer, nullptr);
+    buffer = VK_NULL_HANDLE;
+
+    VkCommandBuffer commandBuffer;
+    std::string commandBuffer_name = "command_buffer_name";
+    VkCommandPool commandpool_1;
+    VkCommandPool commandpool_2;
+    VkCommandPoolCreateInfo pool_create_info{};
+    pool_create_info.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
+    pool_create_info.queueFamilyIndex = m_device->graphics_queue_node_index_;
+    pool_create_info.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
+    vk::CreateCommandPool(device(), &pool_create_info, nullptr, &commandpool_1);
+    vk::CreateCommandPool(device(), &pool_create_info, nullptr, &commandpool_2);
+
+    VkCommandBufferAllocateInfo command_buffer_allocate_info{};
+    command_buffer_allocate_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
+    command_buffer_allocate_info.commandPool = commandpool_1;
+    command_buffer_allocate_info.commandBufferCount = 1;
+    command_buffer_allocate_info.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
+    vk::AllocateCommandBuffers(device(), &command_buffer_allocate_info, &commandBuffer);
+
+    name_info.objectHandle = (uint64_t)commandBuffer;
+    name_info.objectType = VK_OBJECT_TYPE_COMMAND_BUFFER;
+    name_info.pObjectName = commandBuffer_name.c_str();
+    fpvkSetDebugUtilsObjectNameEXT(device(), &name_info);
+
+    VkCommandBufferBeginInfo cb_begin_Info = {};
+    cb_begin_Info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
+    cb_begin_Info.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT;
+    vk::BeginCommandBuffer(commandBuffer, &cb_begin_Info);
+
+    const VkRect2D scissor = {{-1, 0}, {16, 16}};
+    const VkRect2D scissors[] = {scissor, scissor};
+
+    auto command_label = lvl_init_struct<VkDebugUtilsLabelEXT>();
+    command_label.pLabelName = "Command Label 0123";
+    command_label.color[0] = 0.;
+    command_label.color[1] = 1.;
+    command_label.color[2] = 2.;
+    command_label.color[3] = 3.0;
+    bool command_label_test = false;
+    auto command_label_callback = [command_label, &command_label_test](const VkDebugUtilsMessengerCallbackDataEXT *pCallbackData,
+                                                                       DebugUtilsLabelCheckData *data) {
+        data->count++;
+        command_label_test = false;
+        if (pCallbackData->cmdBufLabelCount == 1) {
+            command_label_test = pCallbackData->pCmdBufLabels[0] == command_label;
+        }
+    };
+    callback_data.callback = command_label_callback;
+
+    fpvkCmdInsertDebugUtilsLabelEXT(commandBuffer, &command_label);
+    // Test parameter_validation layer
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, commandBuffer_name);
+    vk::CmdSetScissor(commandBuffer, 1, 1, scissors);
+    m_errorMonitor->VerifyFound();
+
+    // Check the label test
+    if (!command_label_test) {
+        ADD_FAILURE() << "Command label '" << command_label.pLabelName << "' not passed to callback.";
+    }
+
+    // Test object_tracker layer
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, commandBuffer_name);
+    vk::FreeCommandBuffers(device(), commandpool_2, 1, &commandBuffer);
+    m_errorMonitor->VerifyFound();
+
+    vk::DestroyCommandPool(device(), commandpool_1, NULL);
+    vk::DestroyCommandPool(device(), commandpool_2, NULL);
+    fpvkDestroyDebugUtilsMessengerEXT(instance(), my_messenger, nullptr);
+}
+
+TEST_F(VkLayerTest, InvalidStructSType) {
+    TEST_DESCRIPTION("Specify an invalid VkStructureType for a Vulkan structure's sType field");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "parameter pAllocateInfo->sType must be");
+    // Zero struct memory, effectively setting sType to
+    // VK_STRUCTURE_TYPE_APPLICATION_INFO
+    // Expected to trigger an error with
+    // parameter_validation::validate_struct_type
+    VkMemoryAllocateInfo alloc_info = {};
+    VkDeviceMemory memory = VK_NULL_HANDLE;
+    vk::AllocateMemory(device(), &alloc_info, NULL, &memory);
+    m_errorMonitor->VerifyFound();
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "parameter pSubmits[0].sType must be");
+    // Zero struct memory, effectively setting sType to
+    // VK_STRUCTURE_TYPE_APPLICATION_INFO
+    // Expected to trigger an error with
+    // parameter_validation::validate_struct_type_array
+    VkSubmitInfo submit_info = {};
+    vk::QueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, InvalidStructPNext) {
+    TEST_DESCRIPTION("Specify an invalid value for a Vulkan structure's pNext field");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_WARNING_BIT_EXT, "value of pCreateInfo->pNext must be NULL");
+    // Set VkMemoryAllocateInfo::pNext to a non-NULL value, when pNext must be NULL.
+    // Need to pick a function that has no allowed pNext structure types.
+    // Expected to trigger an error with parameter_validation::validate_struct_pnext
+    VkEvent event = VK_NULL_HANDLE;
+    VkEventCreateInfo event_alloc_info = {};
+    // Zero-initialization will provide the correct sType
+    VkApplicationInfo app_info = {};
+    event_alloc_info.sType = VK_STRUCTURE_TYPE_EVENT_CREATE_INFO;
+    event_alloc_info.pNext = &app_info;
+    vk::CreateEvent(device(), &event_alloc_info, NULL, &event);
+    m_errorMonitor->VerifyFound();
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_WARNING_BIT_EXT,
+                                         " chain includes a structure with unexpected VkStructureType ");
+    // Set VkMemoryAllocateInfo::pNext to a non-NULL value, but use
+    // a function that has allowed pNext structure types and specify
+    // a structure type that is not allowed.
+    // Expected to trigger an error with parameter_validation::validate_struct_pnext
+    VkDeviceMemory memory = VK_NULL_HANDLE;
+    VkMemoryAllocateInfo memory_alloc_info = {};
+    memory_alloc_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+    memory_alloc_info.pNext = &app_info;
+    vk::AllocateMemory(device(), &memory_alloc_info, NULL, &memory);
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, UnrecognizedValueOutOfRange) {
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "does not fall within the begin..end range of the core VkFormat enumeration tokens");
+    // Specify an invalid VkFormat value
+    // Expected to trigger an error with
+    // parameter_validation::validate_ranged_enum
+    VkFormatProperties format_properties;
+    vk::GetPhysicalDeviceFormatProperties(gpu(), static_cast<VkFormat>(8000), &format_properties);
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, UnrecognizedValueBadMask) {
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "contains flag bits that are not recognized members of");
+    // Specify an invalid VkFlags bitmask value
+    // Expected to trigger an error with parameter_validation::validate_flags
+    VkImageFormatProperties image_format_properties;
+    vk::GetPhysicalDeviceImageFormatProperties(gpu(), VK_FORMAT_R8G8B8A8_UNORM, VK_IMAGE_TYPE_2D, VK_IMAGE_TILING_OPTIMAL,
+                                               static_cast<VkImageUsageFlags>(1 << 25), 0, &image_format_properties);
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, UnrecognizedValueBadFlag) {
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "contains flag bits that are not recognized members of");
+    // Specify an invalid VkFlags array entry
+    // Expected to trigger an error with parameter_validation::validate_flags_array
+    VkSemaphore semaphore;
+    VkSemaphoreCreateInfo semaphore_create_info{};
+    semaphore_create_info.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO;
+    vk::CreateSemaphore(m_device->device(), &semaphore_create_info, nullptr, &semaphore);
+    // `stage_flags` is set to a value which, currently, is not a defined stage flag
+    // `VK_IMAGE_ASPECT_FLAG_BITS_MAX_ENUM` works well for this
+    VkPipelineStageFlags stage_flags = VK_IMAGE_ASPECT_FLAG_BITS_MAX_ENUM;
+    // `waitSemaphoreCount` *must* be greater than 0 to perform this check
+    VkSubmitInfo submit_info = {};
+    submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+    submit_info.waitSemaphoreCount = 1;
+    submit_info.pWaitSemaphores = &semaphore;
+    submit_info.pWaitDstStageMask = &stage_flags;
+    vk::QueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+    vk::DestroySemaphore(m_device->device(), semaphore, nullptr);
+
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, UnrecognizedValueBadBool) {
+    // Make sure using VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE doesn't trigger a false positive.
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+    if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_SAMPLER_MIRROR_CLAMP_TO_EDGE_EXTENSION_NAME)) {
+        m_device_extension_names.push_back(VK_KHR_SAMPLER_MIRROR_CLAMP_TO_EDGE_EXTENSION_NAME);
+    } else {
+        printf("%s VK_KHR_sampler_mirror_clamp_to_edge extension not supported, skipping test\n", kSkipPrefix);
+        return;
+    }
+    ASSERT_NO_FATAL_FAILURE(InitState());
+
+    // Specify an invalid VkBool32 value, expecting a warning with parameter_validation::validate_bool32
+    VkSamplerCreateInfo sampler_info = SafeSaneSamplerCreateInfo();
+    sampler_info.addressModeU = VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE;
+    sampler_info.addressModeV = VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE;
+    sampler_info.addressModeW = VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE;
+
+    // Not VK_TRUE or VK_FALSE
+    sampler_info.anisotropyEnable = 3;
+    CreateSamplerTest(*this, &sampler_info, "is neither VK_TRUE nor VK_FALSE");
+}
+
+TEST_F(VkLayerTest, UnrecognizedValueMaxEnum) {
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    // Specify MAX_ENUM
+    VkFormatProperties format_properties;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "does not fall within the begin..end range");
+    vk::GetPhysicalDeviceFormatProperties(gpu(), VK_FORMAT_MAX_ENUM, &format_properties);
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, SubmitSignaledFence) {
+    vk_testing::Fence testFence;
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "submitted in SIGNALED state.  Fences must be reset before being submitted");
+
+    VkFenceCreateInfo fenceInfo = {};
+    fenceInfo.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
+    fenceInfo.pNext = NULL;
+    fenceInfo.flags = VK_FENCE_CREATE_SIGNALED_BIT;
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitViewport());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    m_commandBuffer->begin();
+    m_commandBuffer->ClearAllBuffers(m_renderTargets, m_clear_color, nullptr, m_depth_clear_color, m_stencil_clear_color);
+    m_commandBuffer->end();
+
+    testFence.init(*m_device, fenceInfo);
+
+    VkSubmitInfo submit_info;
+    submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+    submit_info.pNext = NULL;
+    submit_info.waitSemaphoreCount = 0;
+    submit_info.pWaitSemaphores = NULL;
+    submit_info.pWaitDstStageMask = NULL;
+    submit_info.commandBufferCount = 1;
+    submit_info.pCommandBuffers = &m_commandBuffer->handle();
+    submit_info.signalSemaphoreCount = 0;
+    submit_info.pSignalSemaphores = NULL;
+
+    vk::QueueSubmit(m_device->m_queue, 1, &submit_info, testFence.handle());
+    vk::QueueWaitIdle(m_device->m_queue);
+
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, LeakAnObject) {
+    TEST_DESCRIPTION("Create a fence and destroy its device without first destroying the fence.");
+
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+
+    // Workaround for overzealous layers checking even the guaranteed 0th queue family
+    const auto q_props = vk_testing::PhysicalDevice(gpu()).queue_properties();
+    ASSERT_TRUE(q_props.size() > 0);
+    ASSERT_TRUE(q_props[0].queueCount > 0);
+
+    const float q_priority[] = {1.0f};
+    VkDeviceQueueCreateInfo queue_ci = {};
+    queue_ci.sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO;
+    queue_ci.queueFamilyIndex = 0;
+    queue_ci.queueCount = 1;
+    queue_ci.pQueuePriorities = q_priority;
+
+    VkDeviceCreateInfo device_ci = {};
+    device_ci.sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO;
+    device_ci.queueCreateInfoCount = 1;
+    device_ci.pQueueCreateInfos = &queue_ci;
+
+    VkDevice leaky_device;
+    ASSERT_VK_SUCCESS(vk::CreateDevice(gpu(), &device_ci, nullptr, &leaky_device));
+
+    const VkFenceCreateInfo fence_ci = {VK_STRUCTURE_TYPE_FENCE_CREATE_INFO};
+    VkFence leaked_fence;
+    ASSERT_VK_SUCCESS(vk::CreateFence(leaky_device, &fence_ci, nullptr, &leaked_fence));
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkDestroyDevice-device-00378");
+    vk::DestroyDevice(leaky_device, nullptr);
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, UseObjectWithWrongDevice) {
+    TEST_DESCRIPTION(
+        "Try to destroy a render pass object using a device other than the one it was created on. This should generate a distinct "
+        "error from the invalid handle error.");
+    // Create first device and renderpass
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    // Create second device
+    float priorities[] = {1.0f};
+    VkDeviceQueueCreateInfo queue_info{};
+    queue_info.sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO;
+    queue_info.pNext = NULL;
+    queue_info.flags = 0;
+    queue_info.queueFamilyIndex = 0;
+    queue_info.queueCount = 1;
+    queue_info.pQueuePriorities = &priorities[0];
+
+    VkDeviceCreateInfo device_create_info = {};
+    auto features = m_device->phy().features();
+    device_create_info.sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO;
+    device_create_info.pNext = NULL;
+    device_create_info.queueCreateInfoCount = 1;
+    device_create_info.pQueueCreateInfos = &queue_info;
+    device_create_info.enabledLayerCount = 0;
+    device_create_info.ppEnabledLayerNames = NULL;
+    device_create_info.pEnabledFeatures = &features;
+
+    VkDevice second_device;
+    ASSERT_VK_SUCCESS(vk::CreateDevice(gpu(), &device_create_info, NULL, &second_device));
+
+    // Try to destroy the renderpass from the first device using the second device
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkDestroyRenderPass-renderPass-parent");
+    vk::DestroyRenderPass(second_device, m_renderPass, NULL);
+    m_errorMonitor->VerifyFound();
+
+    vk::DestroyDevice(second_device, NULL);
+}
+
+TEST_F(VkLayerTest, InvalidAllocationCallbacks) {
+    TEST_DESCRIPTION("Test with invalid VkAllocationCallbacks");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    // vk::CreateInstance, and vk::CreateDevice tend to crash in the Loader Trampoline ATM, so choosing vk::CreateCommandPool
+    const VkCommandPoolCreateInfo cpci = {VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO, nullptr, 0,
+                                          DeviceObj()->QueueFamilyMatching(0, 0, true)};
+    VkCommandPool cmdPool;
+
+    struct Alloc {
+        static VKAPI_ATTR void *VKAPI_CALL alloc(void *, size_t, size_t, VkSystemAllocationScope) { return nullptr; };
+        static VKAPI_ATTR void *VKAPI_CALL realloc(void *, void *, size_t, size_t, VkSystemAllocationScope) { return nullptr; };
+        static VKAPI_ATTR void VKAPI_CALL free(void *, void *){};
+        static VKAPI_ATTR void VKAPI_CALL internalAlloc(void *, size_t, VkInternalAllocationType, VkSystemAllocationScope){};
+        static VKAPI_ATTR void VKAPI_CALL internalFree(void *, size_t, VkInternalAllocationType, VkSystemAllocationScope){};
+    };
+
+    {
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkAllocationCallbacks-pfnAllocation-00632");
+        const VkAllocationCallbacks allocator = {nullptr, nullptr, Alloc::realloc, Alloc::free, nullptr, nullptr};
+        vk::CreateCommandPool(device(), &cpci, &allocator, &cmdPool);
+        m_errorMonitor->VerifyFound();
+    }
+
+    {
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkAllocationCallbacks-pfnReallocation-00633");
+        const VkAllocationCallbacks allocator = {nullptr, Alloc::alloc, nullptr, Alloc::free, nullptr, nullptr};
+        vk::CreateCommandPool(device(), &cpci, &allocator, &cmdPool);
+        m_errorMonitor->VerifyFound();
+    }
+
+    {
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkAllocationCallbacks-pfnFree-00634");
+        const VkAllocationCallbacks allocator = {nullptr, Alloc::alloc, Alloc::realloc, nullptr, nullptr, nullptr};
+        vk::CreateCommandPool(device(), &cpci, &allocator, &cmdPool);
+        m_errorMonitor->VerifyFound();
+    }
+
+    {
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                             "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+        const VkAllocationCallbacks allocator = {nullptr, Alloc::alloc, Alloc::realloc, Alloc::free, nullptr, Alloc::internalFree};
+        vk::CreateCommandPool(device(), &cpci, &allocator, &cmdPool);
+        m_errorMonitor->VerifyFound();
+    }
+
+    {
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                             "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635");
+        const VkAllocationCallbacks allocator = {nullptr, Alloc::alloc, Alloc::realloc, Alloc::free, Alloc::internalAlloc, nullptr};
+        vk::CreateCommandPool(device(), &cpci, &allocator, &cmdPool);
+        m_errorMonitor->VerifyFound();
+    }
+}
+
+TEST_F(VkLayerTest, MismatchedQueueFamiliesOnSubmit) {
+    TEST_DESCRIPTION(
+        "Submit command buffer created using one queue family and attempt to submit them on a queue created in a different queue "
+        "family.");
+
+    ASSERT_NO_FATAL_FAILURE(Init());  // assumes it initializes all queue families on vk::CreateDevice
+
+    // This test is meaningless unless we have multiple queue families
+    auto queue_family_properties = m_device->phy().queue_properties();
+    std::vector<uint32_t> queue_families;
+    for (uint32_t i = 0; i < queue_family_properties.size(); ++i)
+        if (queue_family_properties[i].queueCount > 0) queue_families.push_back(i);
+
+    if (queue_families.size() < 2) {
+        printf("%s Device only has one queue family; skipped.\n", kSkipPrefix);
+        return;
+    }
+
+    const uint32_t queue_family = queue_families[0];
+
+    const uint32_t other_queue_family = queue_families[1];
+    VkQueue other_queue;
+    vk::GetDeviceQueue(m_device->device(), other_queue_family, 0, &other_queue);
+
+    VkCommandPoolObj cmd_pool(m_device, queue_family);
+    VkCommandBufferObj cmd_buff(m_device, &cmd_pool);
+
+    cmd_buff.begin();
+    cmd_buff.end();
+
+    // Submit on the wrong queue
+    VkSubmitInfo submit_info = {};
+    submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+    submit_info.commandBufferCount = 1;
+    submit_info.pCommandBuffers = &cmd_buff.handle();
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkQueueSubmit-pCommandBuffers-00074");
+    vk::QueueSubmit(other_queue, 1, &submit_info, VK_NULL_HANDLE);
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, TemporaryExternalSemaphore) {
+#ifdef _WIN32
+    const auto extension_name = VK_KHR_EXTERNAL_SEMAPHORE_WIN32_EXTENSION_NAME;
+    const auto handle_type = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_KHR;
+#else
+    const auto extension_name = VK_KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME;
+    const auto handle_type = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT_KHR;
+#endif
+    // Check for external semaphore instance extensions
+    if (InstanceExtensionSupported(VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_EXTENSION_NAME)) {
+        m_instance_extension_names.push_back(VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_EXTENSION_NAME);
+        m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    } else {
+        printf("%s External semaphore extension not supported, skipping test\n", kSkipPrefix);
+        return;
+    }
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+
+    // Check for external semaphore device extensions
+    if (DeviceExtensionSupported(gpu(), nullptr, extension_name)) {
+        m_device_extension_names.push_back(extension_name);
+        m_device_extension_names.push_back(VK_KHR_EXTERNAL_SEMAPHORE_EXTENSION_NAME);
+    } else {
+        printf("%s External semaphore extension not supported, skipping test\n", kSkipPrefix);
+        return;
+    }
+    ASSERT_NO_FATAL_FAILURE(InitState());
+
+    // Check for external semaphore import and export capability
+    VkPhysicalDeviceExternalSemaphoreInfoKHR esi = {VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO_KHR, nullptr,
+                                                    handle_type};
+    VkExternalSemaphorePropertiesKHR esp = {VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES_KHR, nullptr};
+    auto vkGetPhysicalDeviceExternalSemaphorePropertiesKHR =
+        (PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR)vk::GetInstanceProcAddr(
+            instance(), "vkGetPhysicalDeviceExternalSemaphorePropertiesKHR");
+    vkGetPhysicalDeviceExternalSemaphorePropertiesKHR(gpu(), &esi, &esp);
+
+    if (!(esp.externalSemaphoreFeatures & VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT_KHR) ||
+        !(esp.externalSemaphoreFeatures & VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT_KHR)) {
+        printf("%s External semaphore does not support importing and exporting, skipping test\n", kSkipPrefix);
+        return;
+    }
+
+    VkResult err;
+
+    // Create a semaphore to export payload from
+    VkExportSemaphoreCreateInfoKHR esci = {VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO_KHR, nullptr, handle_type};
+    VkSemaphoreCreateInfo sci = {VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO, &esci, 0};
+
+    VkSemaphore export_semaphore;
+    err = vk::CreateSemaphore(m_device->device(), &sci, nullptr, &export_semaphore);
+    ASSERT_VK_SUCCESS(err);
+
+    // Create a semaphore to import payload into
+    sci.pNext = nullptr;
+    VkSemaphore import_semaphore;
+    err = vk::CreateSemaphore(m_device->device(), &sci, nullptr, &import_semaphore);
+    ASSERT_VK_SUCCESS(err);
+
+#ifdef _WIN32
+    // Export semaphore payload to an opaque handle
+    HANDLE handle = nullptr;
+    VkSemaphoreGetWin32HandleInfoKHR ghi = {VK_STRUCTURE_TYPE_SEMAPHORE_GET_WIN32_HANDLE_INFO_KHR, nullptr, export_semaphore,
+                                            handle_type};
+    auto vkGetSemaphoreWin32HandleKHR =
+        (PFN_vkGetSemaphoreWin32HandleKHR)vk::GetDeviceProcAddr(m_device->device(), "vkGetSemaphoreWin32HandleKHR");
+    err = vkGetSemaphoreWin32HandleKHR(m_device->device(), &ghi, &handle);
+    ASSERT_VK_SUCCESS(err);
+
+    // Import opaque handle exported above *temporarily*
+    VkImportSemaphoreWin32HandleInfoKHR ihi = {VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR,
+                                               nullptr,
+                                               import_semaphore,
+                                               VK_SEMAPHORE_IMPORT_TEMPORARY_BIT_KHR,
+                                               handle_type,
+                                               handle,
+                                               nullptr};
+    auto vkImportSemaphoreWin32HandleKHR =
+        (PFN_vkImportSemaphoreWin32HandleKHR)vk::GetDeviceProcAddr(m_device->device(), "vkImportSemaphoreWin32HandleKHR");
+    err = vkImportSemaphoreWin32HandleKHR(m_device->device(), &ihi);
+    ASSERT_VK_SUCCESS(err);
+#else
+    // Export semaphore payload to an opaque handle
+    int fd = 0;
+    VkSemaphoreGetFdInfoKHR ghi = {VK_STRUCTURE_TYPE_SEMAPHORE_GET_FD_INFO_KHR, nullptr, export_semaphore, handle_type};
+    auto vkGetSemaphoreFdKHR = (PFN_vkGetSemaphoreFdKHR)vk::GetDeviceProcAddr(m_device->device(), "vkGetSemaphoreFdKHR");
+    err = vkGetSemaphoreFdKHR(m_device->device(), &ghi, &fd);
+    ASSERT_VK_SUCCESS(err);
+
+    // Import opaque handle exported above *temporarily*
+    VkImportSemaphoreFdInfoKHR ihi = {VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_FD_INFO_KHR, nullptr,     import_semaphore,
+                                      VK_SEMAPHORE_IMPORT_TEMPORARY_BIT_KHR,          handle_type, fd};
+    auto vkImportSemaphoreFdKHR = (PFN_vkImportSemaphoreFdKHR)vk::GetDeviceProcAddr(m_device->device(), "vkImportSemaphoreFdKHR");
+    err = vkImportSemaphoreFdKHR(m_device->device(), &ihi);
+    ASSERT_VK_SUCCESS(err);
+#endif
+
+    // Wait on the imported semaphore twice in vk::QueueSubmit, the second wait should be an error
+    VkPipelineStageFlags flags = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT;
+    VkSubmitInfo si[] = {
+        {VK_STRUCTURE_TYPE_SUBMIT_INFO, nullptr, 0, nullptr, &flags, 0, nullptr, 1, &export_semaphore},
+        {VK_STRUCTURE_TYPE_SUBMIT_INFO, nullptr, 1, &import_semaphore, &flags, 0, nullptr, 0, nullptr},
+        {VK_STRUCTURE_TYPE_SUBMIT_INFO, nullptr, 0, nullptr, &flags, 0, nullptr, 1, &export_semaphore},
+        {VK_STRUCTURE_TYPE_SUBMIT_INFO, nullptr, 1, &import_semaphore, &flags, 0, nullptr, 0, nullptr},
+    };
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "has no way to be signaled");
+    vk::QueueSubmit(m_device->m_queue, 4, si, VK_NULL_HANDLE);
+    m_errorMonitor->VerifyFound();
+
+    auto index = m_device->graphics_queue_node_index_;
+    if (m_device->queue_props[index].queueFlags & VK_QUEUE_SPARSE_BINDING_BIT) {
+        // Wait on the imported semaphore twice in vk::QueueBindSparse, the second wait should be an error
+        VkBindSparseInfo bi[] = {
+            {VK_STRUCTURE_TYPE_BIND_SPARSE_INFO, nullptr, 0, nullptr, 0, nullptr, 0, nullptr, 0, nullptr, 1, &export_semaphore},
+            {VK_STRUCTURE_TYPE_BIND_SPARSE_INFO, nullptr, 1, &import_semaphore, 0, nullptr, 0, nullptr, 0, nullptr, 0, nullptr},
+            {VK_STRUCTURE_TYPE_BIND_SPARSE_INFO, nullptr, 0, nullptr, 0, nullptr, 0, nullptr, 0, nullptr, 1, &export_semaphore},
+            {VK_STRUCTURE_TYPE_BIND_SPARSE_INFO, nullptr, 1, &import_semaphore, 0, nullptr, 0, nullptr, 0, nullptr, 0, nullptr},
+        };
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "has no way to be signaled");
+        vk::QueueBindSparse(m_device->m_queue, 4, bi, VK_NULL_HANDLE);
+        m_errorMonitor->VerifyFound();
+    }
+
+    // Cleanup
+    err = vk::QueueWaitIdle(m_device->m_queue);
+    ASSERT_VK_SUCCESS(err);
+    vk::DestroySemaphore(m_device->device(), export_semaphore, nullptr);
+    vk::DestroySemaphore(m_device->device(), import_semaphore, nullptr);
+}
+
+TEST_F(VkLayerTest, TemporaryExternalFence) {
+#ifdef _WIN32
+    const auto extension_name = VK_KHR_EXTERNAL_FENCE_WIN32_EXTENSION_NAME;
+    const auto handle_type = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_BIT_KHR;
+#else
+    const auto extension_name = VK_KHR_EXTERNAL_FENCE_FD_EXTENSION_NAME;
+    const auto handle_type = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_FD_BIT_KHR;
+#endif
+    // Check for external fence instance extensions
+    if (InstanceExtensionSupported(VK_KHR_EXTERNAL_FENCE_CAPABILITIES_EXTENSION_NAME)) {
+        m_instance_extension_names.push_back(VK_KHR_EXTERNAL_FENCE_CAPABILITIES_EXTENSION_NAME);
+        m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    } else {
+        printf("%s External fence extension not supported, skipping test\n", kSkipPrefix);
+        return;
+    }
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+
+    // Check for external fence device extensions
+    if (DeviceExtensionSupported(gpu(), nullptr, extension_name)) {
+        m_device_extension_names.push_back(extension_name);
+        m_device_extension_names.push_back(VK_KHR_EXTERNAL_FENCE_EXTENSION_NAME);
+    } else {
+        printf("%s External fence extension not supported, skipping test\n", kSkipPrefix);
+        return;
+    }
+    ASSERT_NO_FATAL_FAILURE(InitState());
+
+    // Check for external fence import and export capability
+    VkPhysicalDeviceExternalFenceInfoKHR efi = {VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO_KHR, nullptr, handle_type};
+    VkExternalFencePropertiesKHR efp = {VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES_KHR, nullptr};
+    auto vkGetPhysicalDeviceExternalFencePropertiesKHR = (PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR)vk::GetInstanceProcAddr(
+        instance(), "vkGetPhysicalDeviceExternalFencePropertiesKHR");
+    vkGetPhysicalDeviceExternalFencePropertiesKHR(gpu(), &efi, &efp);
+
+    if (!(efp.externalFenceFeatures & VK_EXTERNAL_FENCE_FEATURE_EXPORTABLE_BIT_KHR) ||
+        !(efp.externalFenceFeatures & VK_EXTERNAL_FENCE_FEATURE_IMPORTABLE_BIT_KHR)) {
+        printf("%s External fence does not support importing and exporting, skipping test\n", kSkipPrefix);
+        return;
+    }
+
+    VkResult err;
+
+    // Create a fence to export payload from
+    VkFence export_fence;
+    {
+        VkExportFenceCreateInfoKHR efci = {VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO_KHR, nullptr, handle_type};
+        VkFenceCreateInfo fci = {VK_STRUCTURE_TYPE_FENCE_CREATE_INFO, &efci, 0};
+        err = vk::CreateFence(m_device->device(), &fci, nullptr, &export_fence);
+        ASSERT_VK_SUCCESS(err);
+    }
+
+    // Create a fence to import payload into
+    VkFence import_fence;
+    {
+        VkFenceCreateInfo fci = {VK_STRUCTURE_TYPE_FENCE_CREATE_INFO, nullptr, 0};
+        err = vk::CreateFence(m_device->device(), &fci, nullptr, &import_fence);
+        ASSERT_VK_SUCCESS(err);
+    }
+
+#ifdef _WIN32
+    // Export fence payload to an opaque handle
+    HANDLE handle = nullptr;
+    {
+        VkFenceGetWin32HandleInfoKHR ghi = {VK_STRUCTURE_TYPE_FENCE_GET_WIN32_HANDLE_INFO_KHR, nullptr, export_fence, handle_type};
+        auto vkGetFenceWin32HandleKHR =
+            (PFN_vkGetFenceWin32HandleKHR)vk::GetDeviceProcAddr(m_device->device(), "vkGetFenceWin32HandleKHR");
+        err = vkGetFenceWin32HandleKHR(m_device->device(), &ghi, &handle);
+        ASSERT_VK_SUCCESS(err);
+    }
+
+    // Import opaque handle exported above
+    {
+        VkImportFenceWin32HandleInfoKHR ifi = {VK_STRUCTURE_TYPE_IMPORT_FENCE_WIN32_HANDLE_INFO_KHR,
+                                               nullptr,
+                                               import_fence,
+                                               VK_FENCE_IMPORT_TEMPORARY_BIT_KHR,
+                                               handle_type,
+                                               handle,
+                                               nullptr};
+        auto vkImportFenceWin32HandleKHR =
+            (PFN_vkImportFenceWin32HandleKHR)vk::GetDeviceProcAddr(m_device->device(), "vkImportFenceWin32HandleKHR");
+        err = vkImportFenceWin32HandleKHR(m_device->device(), &ifi);
+        ASSERT_VK_SUCCESS(err);
+    }
+#else
+    // Export fence payload to an opaque handle
+    int fd = 0;
+    {
+        VkFenceGetFdInfoKHR gfi = {VK_STRUCTURE_TYPE_FENCE_GET_FD_INFO_KHR, nullptr, export_fence, handle_type};
+        auto vkGetFenceFdKHR = (PFN_vkGetFenceFdKHR)vk::GetDeviceProcAddr(m_device->device(), "vkGetFenceFdKHR");
+        err = vkGetFenceFdKHR(m_device->device(), &gfi, &fd);
+        ASSERT_VK_SUCCESS(err);
+    }
+
+    // Import opaque handle exported above
+    {
+        VkImportFenceFdInfoKHR ifi = {VK_STRUCTURE_TYPE_IMPORT_FENCE_FD_INFO_KHR, nullptr,     import_fence,
+                                      VK_FENCE_IMPORT_TEMPORARY_BIT_KHR,          handle_type, fd};
+        auto vkImportFenceFdKHR = (PFN_vkImportFenceFdKHR)vk::GetDeviceProcAddr(m_device->device(), "vkImportFenceFdKHR");
+        err = vkImportFenceFdKHR(m_device->device(), &ifi);
+        ASSERT_VK_SUCCESS(err);
+    }
+#endif
+
+    // Undo the temporary import
+    vk::ResetFences(m_device->device(), 1, &import_fence);
+
+    // Signal the previously imported fence twice, the second signal should produce a validation error
+    vk::QueueSubmit(m_device->m_queue, 0, nullptr, import_fence);
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "is already in use by another submission.");
+    vk::QueueSubmit(m_device->m_queue, 0, nullptr, import_fence);
+    m_errorMonitor->VerifyFound();
+
+    // Cleanup
+    err = vk::QueueWaitIdle(m_device->m_queue);
+    ASSERT_VK_SUCCESS(err);
+    vk::DestroyFence(m_device->device(), export_fence, nullptr);
+    vk::DestroyFence(m_device->device(), import_fence, nullptr);
+}
+
+TEST_F(VkLayerTest, InvalidCmdBufferEventDestroyed) {
+    TEST_DESCRIPTION("Attempt to draw with a command buffer that is invalid due to an event dependency being destroyed.");
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    VkEvent event;
+    VkEventCreateInfo evci = {};
+    evci.sType = VK_STRUCTURE_TYPE_EVENT_CREATE_INFO;
+    VkResult result = vk::CreateEvent(m_device->device(), &evci, NULL, &event);
+    ASSERT_VK_SUCCESS(result);
+
+    m_commandBuffer->begin();
+    vk::CmdSetEvent(m_commandBuffer->handle(), event, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT);
+    m_commandBuffer->end();
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "UNASSIGNED-CoreValidation-DrawState-InvalidCommandBuffer-VkEvent");
+    // Destroy event dependency prior to submit to cause ERROR
+    vk::DestroyEvent(m_device->device(), event, NULL);
+
+    VkSubmitInfo submit_info = {};
+    submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+    submit_info.commandBufferCount = 1;
+    submit_info.pCommandBuffers = &m_commandBuffer->handle();
+    vk::QueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, InvalidCmdBufferQueryPoolDestroyed) {
+    TEST_DESCRIPTION("Attempt to draw with a command buffer that is invalid due to a query pool dependency being destroyed.");
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    VkQueryPool query_pool;
+    VkQueryPoolCreateInfo qpci{};
+    qpci.sType = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO;
+    qpci.queryType = VK_QUERY_TYPE_TIMESTAMP;
+    qpci.queryCount = 1;
+    VkResult result = vk::CreateQueryPool(m_device->device(), &qpci, nullptr, &query_pool);
+    ASSERT_VK_SUCCESS(result);
+
+    m_commandBuffer->begin();
+    vk::CmdResetQueryPool(m_commandBuffer->handle(), query_pool, 0, 1);
+    m_commandBuffer->end();
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "UNASSIGNED-CoreValidation-DrawState-InvalidCommandBuffer-VkQueryPool");
+    // Destroy query pool dependency prior to submit to cause ERROR
+    vk::DestroyQueryPool(m_device->device(), query_pool, NULL);
+
+    VkSubmitInfo submit_info = {};
+    submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+    submit_info.commandBufferCount = 1;
+    submit_info.pCommandBuffers = &m_commandBuffer->handle();
+    vk::QueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, DeviceFeature2AndVertexAttributeDivisorExtensionUnenabled) {
+    TEST_DESCRIPTION(
+        "Test unenabled VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME & "
+        "VK_EXT_VERTEX_ATTRIBUTE_DIVISOR_EXTENSION_NAME.");
+
+    VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT vadf = {};
+    vadf.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_EXT;
+    VkPhysicalDeviceFeatures2 pd_features2 = {};
+    pd_features2.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2;
+    pd_features2.pNext = &vadf;
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    vk_testing::QueueCreateInfoArray queue_info(m_device->queue_props);
+    VkDeviceCreateInfo device_create_info = {};
+    device_create_info.sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO;
+    device_create_info.pNext = &pd_features2;
+    device_create_info.queueCreateInfoCount = queue_info.size();
+    device_create_info.pQueueCreateInfos = queue_info.data();
+    VkDevice testDevice;
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "VK_KHR_get_physical_device_properties2 must be enabled when it creates an instance");
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "VK_EXT_vertex_attribute_divisor must be enabled when it creates a device");
+    m_errorMonitor->SetUnexpectedError("Failed to create device chain");
+    vk::CreateDevice(gpu(), &device_create_info, NULL, &testDevice);
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, BeginQueryOnTimestampPool) {
+    TEST_DESCRIPTION("Call CmdBeginQuery on a TIMESTAMP query pool.");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    VkQueryPool query_pool;
+    VkQueryPoolCreateInfo query_pool_create_info{};
+    query_pool_create_info.sType = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO;
+    query_pool_create_info.queryType = VK_QUERY_TYPE_TIMESTAMP;
+    query_pool_create_info.queryCount = 1;
+    vk::CreateQueryPool(m_device->device(), &query_pool_create_info, nullptr, &query_pool);
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBeginQuery-queryType-02804");
+    VkCommandBufferBeginInfo begin_info{};
+    begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
+
+    vk::BeginCommandBuffer(m_commandBuffer->handle(), &begin_info);
+    vk::CmdResetQueryPool(m_commandBuffer->handle(), query_pool, 0, 1);
+    vk::CmdBeginQuery(m_commandBuffer->handle(), query_pool, 0, 0);
+    vk::EndCommandBuffer(m_commandBuffer->handle());
+    m_errorMonitor->VerifyFound();
+
+    vk::DestroyQueryPool(m_device->device(), query_pool, nullptr);
+}
+
+TEST_F(VkLayerTest, SwapchainAcquireImageNoSync) {
+    TEST_DESCRIPTION("Test vkAcquireNextImageKHR with VK_NULL_HANDLE semaphore and fence");
+
+    if (!AddSurfaceInstanceExtension()) {
+        printf("%s surface extensions not supported, skipping test\n", kSkipPrefix);
+        return;
+    }
+
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+
+    if (!AddSwapchainDeviceExtension()) {
+        printf("%s swapchain extensions not supported, skipping test\n", kSkipPrefix);
+        return;
+    }
+
+    ASSERT_NO_FATAL_FAILURE(InitState());
+    ASSERT_TRUE(InitSwapchain());
+
+    {
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkAcquireNextImageKHR-semaphore-01780");
+        uint32_t dummy;
+        vk::AcquireNextImageKHR(device(), m_swapchain, UINT64_MAX, VK_NULL_HANDLE, VK_NULL_HANDLE, &dummy);
+        m_errorMonitor->VerifyFound();
+    }
+
+    DestroySwapchain();
+}
+
+TEST_F(VkLayerTest, SwapchainAcquireImageNoSync2KHR) {
+    TEST_DESCRIPTION("Test vkAcquireNextImage2KHR with VK_NULL_HANDLE semaphore and fence");
+    SetTargetApiVersion(VK_API_VERSION_1_1);
+
+    bool extension_dependency_satisfied = false;
+    if (InstanceExtensionSupported(VK_KHR_DEVICE_GROUP_CREATION_EXTENSION_NAME)) {
+        m_instance_extension_names.push_back(VK_KHR_DEVICE_GROUP_CREATION_EXTENSION_NAME);
+        extension_dependency_satisfied = true;
+    } else if (m_instance_api_version < VK_API_VERSION_1_1) {
+        printf("%s vkAcquireNextImage2KHR not supported, skipping test\n", kSkipPrefix);
+        return;
+    }
+
+    if (!AddSurfaceInstanceExtension()) {
+        printf("%s surface extensions not supported, skipping test\n", kSkipPrefix);
+        return;
+    }
+
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+
+    if (extension_dependency_satisfied && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_DEVICE_GROUP_EXTENSION_NAME)) {
+        m_device_extension_names.push_back(VK_KHR_DEVICE_GROUP_EXTENSION_NAME);
+    } else if (DeviceValidationVersion() < VK_API_VERSION_1_1) {
+        printf("%s vkAcquireNextImage2KHR not supported, skipping test\n", kSkipPrefix);
+        return;
+    }
+
+    if (!AddSwapchainDeviceExtension()) {
+        printf("%s swapchain extensions not supported, skipping test\n", kSkipPrefix);
+        return;
+    }
+
+    ASSERT_NO_FATAL_FAILURE(InitState());
+    ASSERT_TRUE(InitSwapchain());
+
+    {
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkAcquireNextImageInfoKHR-semaphore-01782");
+        VkAcquireNextImageInfoKHR acquire_info = {VK_STRUCTURE_TYPE_ACQUIRE_NEXT_IMAGE_INFO_KHR};
+        acquire_info.swapchain = m_swapchain;
+        acquire_info.timeout = UINT64_MAX;
+        acquire_info.semaphore = VK_NULL_HANDLE;
+        acquire_info.fence = VK_NULL_HANDLE;
+        acquire_info.deviceMask = 0x1;
+
+        uint32_t dummy;
+        vk::AcquireNextImage2KHR(device(), &acquire_info, &dummy);
+        m_errorMonitor->VerifyFound();
+    }
+
+    DestroySwapchain();
+}
+
+TEST_F(VkLayerTest, SwapchainAcquireImageNoBinarySemaphore) {
+    TEST_DESCRIPTION("Test vkAcquireNextImageKHR with non-binary semaphore");
+
+    if (!AddSurfaceInstanceExtension()) {
+        printf("%s surface extensions not supported, skipping test\n", kSkipPrefix);
+        return;
+    }
+
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+
+    if (!AddSwapchainDeviceExtension()) {
+        printf("%s swapchain extensions not supported, skipping test\n", kSkipPrefix);
+        return;
+    }
+
+    ASSERT_NO_FATAL_FAILURE(InitState());
+    ASSERT_TRUE(InitSwapchain());
+
+    VkSemaphoreTypeCreateInfoKHR semaphore_type_create_info{};
+    semaphore_type_create_info.sType = VK_STRUCTURE_TYPE_SEMAPHORE_TYPE_CREATE_INFO_KHR;
+    semaphore_type_create_info.semaphoreType = VK_SEMAPHORE_TYPE_TIMELINE_KHR;
+
+    VkSemaphoreCreateInfo semaphore_create_info{};
+    semaphore_create_info.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO;
+    semaphore_create_info.pNext = &semaphore_type_create_info;
+
+    VkSemaphore semaphore;
+    ASSERT_VK_SUCCESS(vk::CreateSemaphore(m_device->device(), &semaphore_create_info, nullptr, &semaphore));
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkAcquireNextImageKHR-semaphore-03265");
+    uint32_t image_i;
+    vk::AcquireNextImageKHR(device(), m_swapchain, UINT64_MAX, semaphore, VK_NULL_HANDLE, &image_i);
+    m_errorMonitor->VerifyFound();
+
+    vk::DestroySemaphore(m_device->device(), semaphore, nullptr);
+    DestroySwapchain();
+}
+
+TEST_F(VkLayerTest, SwapchainAcquireImageNoBinarySemaphore2KHR) {
+    TEST_DESCRIPTION("Test vkAcquireNextImage2KHR with non-binary semaphore");
+
+    TEST_DESCRIPTION("Test vkAcquireNextImage2KHR with VK_NULL_HANDLE semaphore and fence");
+    SetTargetApiVersion(VK_API_VERSION_1_1);
+
+    bool extension_dependency_satisfied = false;
+    if (InstanceExtensionSupported(VK_KHR_DEVICE_GROUP_CREATION_EXTENSION_NAME)) {
+        m_instance_extension_names.push_back(VK_KHR_DEVICE_GROUP_CREATION_EXTENSION_NAME);
+        extension_dependency_satisfied = true;
+    } else if (m_instance_api_version < VK_API_VERSION_1_1) {
+        printf("%s vkAcquireNextImage2KHR not supported, skipping test\n", kSkipPrefix);
+        return;
+    }
+
+    if (!AddSurfaceInstanceExtension()) {
+        printf("%s surface extensions not supported, skipping test\n", kSkipPrefix);
+        return;
+    }
+
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+
+    if (extension_dependency_satisfied && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_DEVICE_GROUP_EXTENSION_NAME)) {
+        m_device_extension_names.push_back(VK_KHR_DEVICE_GROUP_EXTENSION_NAME);
+    } else if (DeviceValidationVersion() < VK_API_VERSION_1_1) {
+        printf("%s vkAcquireNextImage2KHR not supported, skipping test\n", kSkipPrefix);
+        return;
+    }
+
+    if (!AddSwapchainDeviceExtension()) {
+        printf("%s swapchain extensions not supported, skipping test\n", kSkipPrefix);
+        return;
+    }
+
+    ASSERT_NO_FATAL_FAILURE(InitState());
+    ASSERT_TRUE(InitSwapchain());
+
+    VkSemaphoreTypeCreateInfoKHR semaphore_type_create_info{};
+    semaphore_type_create_info.sType = VK_STRUCTURE_TYPE_SEMAPHORE_TYPE_CREATE_INFO_KHR;
+    semaphore_type_create_info.semaphoreType = VK_SEMAPHORE_TYPE_TIMELINE_KHR;
+
+    VkSemaphoreCreateInfo semaphore_create_info{};
+    semaphore_create_info.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO;
+    semaphore_create_info.pNext = &semaphore_type_create_info;
+
+    VkSemaphore semaphore;
+    ASSERT_VK_SUCCESS(vk::CreateSemaphore(m_device->device(), &semaphore_create_info, nullptr, &semaphore));
+
+    VkAcquireNextImageInfoKHR acquire_info = {};
+    acquire_info.sType = VK_STRUCTURE_TYPE_ACQUIRE_NEXT_IMAGE_INFO_KHR;
+    acquire_info.swapchain = m_swapchain;
+    acquire_info.timeout = UINT64_MAX;
+    acquire_info.semaphore = semaphore;
+    acquire_info.deviceMask = 0x1;
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkAcquireNextImageInfoKHR-semaphore-03266");
+    uint32_t image_i;
+    vk::AcquireNextImage2KHR(device(), &acquire_info, &image_i);
+    m_errorMonitor->VerifyFound();
+
+    vk::DestroySemaphore(m_device->device(), semaphore, nullptr);
+    DestroySwapchain();
+}
+
+TEST_F(VkLayerTest, SwapchainAcquireTooManyImages) {
+    TEST_DESCRIPTION("Acquiring invalid amount of images from the swapchain.");
+
+    if (!AddSurfaceInstanceExtension()) return;
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+    if (!AddSwapchainDeviceExtension()) return;
+
+    ASSERT_NO_FATAL_FAILURE(InitState());
+    ASSERT_TRUE(InitSwapchain());
+    uint32_t image_count;
+    ASSERT_VK_SUCCESS(vk::GetSwapchainImagesKHR(device(), m_swapchain, &image_count, nullptr));
+    VkSurfaceCapabilitiesKHR caps;
+    ASSERT_VK_SUCCESS(vk::GetPhysicalDeviceSurfaceCapabilitiesKHR(gpu(), m_surface, &caps));
+
+    const uint32_t acquirable_count = image_count - caps.minImageCount + 1;
+    std::vector<VkFenceObj> fences(acquirable_count);
+    for (uint32_t i = 0; i < acquirable_count; ++i) {
+        fences[i].init(*m_device, VkFenceObj::create_info());
+        uint32_t image_i = i;  // WORKAROUND: MockICD does not modify the value, so we have to or the validator state gets corrupted
+        const auto res = vk::AcquireNextImageKHR(device(), m_swapchain, UINT64_MAX, VK_NULL_HANDLE, fences[i].handle(), &image_i);
+        ASSERT_TRUE(res == VK_SUCCESS || res == VK_SUBOPTIMAL_KHR);
+    }
+    VkFenceObj error_fence;
+    error_fence.init(*m_device, VkFenceObj::create_info());
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkAcquireNextImageKHR-swapchain-01802");
+    uint32_t image_i;
+    vk::AcquireNextImageKHR(device(), m_swapchain, UINT64_MAX, VK_NULL_HANDLE, error_fence.handle(), &image_i);
+    m_errorMonitor->VerifyFound();
+
+    // Cleanup
+    vk::WaitForFences(device(), fences.size(), MakeVkHandles<VkFence>(fences).data(), VK_TRUE, UINT64_MAX);
+    DestroySwapchain();
+}
+
+TEST_F(VkLayerTest, SwapchainAcquireTooManyImages2KHR) {
+    TEST_DESCRIPTION("Acquiring invalid amount of images from the swapchain via vkAcquireNextImage2KHR.");
+    SetTargetApiVersion(VK_API_VERSION_1_1);
+
+    bool extension_dependency_satisfied = false;
+    if (InstanceExtensionSupported(VK_KHR_DEVICE_GROUP_CREATION_EXTENSION_NAME)) {
+        m_instance_extension_names.push_back(VK_KHR_DEVICE_GROUP_CREATION_EXTENSION_NAME);
+        extension_dependency_satisfied = true;
+    } else if (m_instance_api_version < VK_API_VERSION_1_1) {
+        printf("%s vkAcquireNextImage2KHR not supported, skipping test\n", kSkipPrefix);
+        return;
+    }
+
+    if (!AddSurfaceInstanceExtension()) return;
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+
+    if (extension_dependency_satisfied && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_DEVICE_GROUP_EXTENSION_NAME)) {
+        m_device_extension_names.push_back(VK_KHR_DEVICE_GROUP_EXTENSION_NAME);
+    } else if (DeviceValidationVersion() < VK_API_VERSION_1_1) {
+        printf("%s vkAcquireNextImage2KHR not supported, skipping test\n", kSkipPrefix);
+        return;
+    }
+
+    if (!AddSwapchainDeviceExtension()) return;
+
+    ASSERT_NO_FATAL_FAILURE(InitState());
+    ASSERT_TRUE(InitSwapchain());
+    uint32_t image_count;
+    ASSERT_VK_SUCCESS(vk::GetSwapchainImagesKHR(device(), m_swapchain, &image_count, nullptr));
+    VkSurfaceCapabilitiesKHR caps;
+    ASSERT_VK_SUCCESS(vk::GetPhysicalDeviceSurfaceCapabilitiesKHR(gpu(), m_surface, &caps));
+
+    const uint32_t acquirable_count = image_count - caps.minImageCount + 1;
+    std::vector<VkFenceObj> fences(acquirable_count);
+    for (uint32_t i = 0; i < acquirable_count; ++i) {
+        fences[i].init(*m_device, VkFenceObj::create_info());
+        uint32_t image_i = i;  // WORKAROUND: MockICD does not modify the value, so we have to or the validator state gets corrupted
+        const auto res = vk::AcquireNextImageKHR(device(), m_swapchain, UINT64_MAX, VK_NULL_HANDLE, fences[i].handle(), &image_i);
+        ASSERT_TRUE(res == VK_SUCCESS || res == VK_SUBOPTIMAL_KHR);
+    }
+    VkFenceObj error_fence;
+    error_fence.init(*m_device, VkFenceObj::create_info());
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkAcquireNextImage2KHR-swapchain-01803");
+    VkAcquireNextImageInfoKHR acquire_info = {VK_STRUCTURE_TYPE_ACQUIRE_NEXT_IMAGE_INFO_KHR};
+    acquire_info.swapchain = m_swapchain;
+    acquire_info.timeout = UINT64_MAX;
+    acquire_info.fence = error_fence.handle();
+    acquire_info.deviceMask = 0x1;
+
+    uint32_t image_i;
+    vk::AcquireNextImage2KHR(device(), &acquire_info, &image_i);
+    m_errorMonitor->VerifyFound();
+
+    // Cleanup
+    vk::WaitForFences(device(), fences.size(), MakeVkHandles<VkFence>(fences).data(), VK_TRUE, UINT64_MAX);
+    DestroySwapchain();
+}
+
+TEST_F(VkLayerTest, InvalidDeviceMask) {
+    TEST_DESCRIPTION("Invalid deviceMask.");
+    SetTargetApiVersion(VK_API_VERSION_1_1);
+
+    bool support_surface = true;
+    if (!AddSurfaceInstanceExtension()) {
+        printf("%s surface extensions not supported, skipping VkAcquireNextImageInfoKHR test\n", kSkipPrefix);
+        support_surface = false;
+    }
+
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+
+    if (support_surface) {
+        if (!AddSwapchainDeviceExtension()) {
+            printf("%s swapchain extensions not supported, skipping BindSwapchainImageMemory test\n", kSkipPrefix);
+            support_surface = false;
+        }
+    }
+
+    if (DeviceValidationVersion() < VK_API_VERSION_1_1) {
+        printf("%s Device Groups requires Vulkan 1.1+, skipping test\n", kSkipPrefix);
+        return;
+    }
+    uint32_t physical_device_group_count = 0;
+    vk::EnumeratePhysicalDeviceGroups(instance(), &physical_device_group_count, nullptr);
+
+    if (physical_device_group_count == 0) {
+        printf("%s physical_device_group_count is 0, skipping test\n", kSkipPrefix);
+        return;
+    }
+
+    std::vector<VkPhysicalDeviceGroupProperties> physical_device_group(physical_device_group_count,
+                                                                       {VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES});
+    vk::EnumeratePhysicalDeviceGroups(instance(), &physical_device_group_count, physical_device_group.data());
+    VkDeviceGroupDeviceCreateInfo create_device_pnext = {};
+    create_device_pnext.sType = VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO;
+    create_device_pnext.physicalDeviceCount = physical_device_group[0].physicalDeviceCount;
+    create_device_pnext.pPhysicalDevices = physical_device_group[0].physicalDevices;
+    ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &create_device_pnext, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    if (!InitSwapchain()) {
+        printf("%s Cannot create surface or swapchain, skipping VkAcquireNextImageInfoKHR test\n", kSkipPrefix);
+        support_surface = false;
+    }
+
+    // Test VkMemoryAllocateFlagsInfo
+    VkMemoryAllocateFlagsInfo alloc_flags_info = {};
+    alloc_flags_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO;
+    alloc_flags_info.flags = VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT;
+    alloc_flags_info.deviceMask = 0xFFFFFFFF;
+    VkMemoryAllocateInfo alloc_info = {};
+    alloc_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+    alloc_info.pNext = &alloc_flags_info;
+    alloc_info.memoryTypeIndex = 0;
+    alloc_info.allocationSize = 32;
+
+    VkDeviceMemory mem;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkMemoryAllocateFlagsInfo-deviceMask-00675");
+    vk::AllocateMemory(m_device->device(), &alloc_info, NULL, &mem);
+    m_errorMonitor->VerifyFound();
+
+    alloc_flags_info.deviceMask = 0;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkMemoryAllocateFlagsInfo-deviceMask-00676");
+    vk::AllocateMemory(m_device->device(), &alloc_info, NULL, &mem);
+    m_errorMonitor->VerifyFound();
+
+    // Test VkDeviceGroupCommandBufferBeginInfo
+    VkDeviceGroupCommandBufferBeginInfo dev_grp_cmd_buf_info = {};
+    dev_grp_cmd_buf_info.sType = VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO;
+    dev_grp_cmd_buf_info.deviceMask = 0xFFFFFFFF;
+    VkCommandBufferBeginInfo cmd_buf_info = {};
+    cmd_buf_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
+    cmd_buf_info.pNext = &dev_grp_cmd_buf_info;
+
+    m_commandBuffer->reset();
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "VUID-VkDeviceGroupCommandBufferBeginInfo-deviceMask-00106");
+    vk::BeginCommandBuffer(m_commandBuffer->handle(), &cmd_buf_info);
+    m_errorMonitor->VerifyFound();
+
+    dev_grp_cmd_buf_info.deviceMask = 0;
+    m_commandBuffer->reset();
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "VUID-VkDeviceGroupCommandBufferBeginInfo-deviceMask-00107");
+    vk::BeginCommandBuffer(m_commandBuffer->handle(), &cmd_buf_info);
+    m_errorMonitor->VerifyFound();
+
+    // Test VkDeviceGroupRenderPassBeginInfo
+    dev_grp_cmd_buf_info.deviceMask = 0x00000001;
+    m_commandBuffer->reset();
+    vk::BeginCommandBuffer(m_commandBuffer->handle(), &cmd_buf_info);
+
+    VkDeviceGroupRenderPassBeginInfo dev_grp_rp_info = {};
+    dev_grp_rp_info.sType = VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO;
+    dev_grp_rp_info.deviceMask = 0xFFFFFFFF;
+    m_renderPassBeginInfo.pNext = &dev_grp_rp_info;
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkDeviceGroupRenderPassBeginInfo-deviceMask-00905");
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkDeviceGroupRenderPassBeginInfo-deviceMask-00907");
+    vk::CmdBeginRenderPass(m_commandBuffer->handle(), &m_renderPassBeginInfo, VK_SUBPASS_CONTENTS_INLINE);
+    m_errorMonitor->VerifyFound();
+
+    dev_grp_rp_info.deviceMask = 0;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkDeviceGroupRenderPassBeginInfo-deviceMask-00906");
+    vk::CmdBeginRenderPass(m_commandBuffer->handle(), &m_renderPassBeginInfo, VK_SUBPASS_CONTENTS_INLINE);
+    m_errorMonitor->VerifyFound();
+
+    dev_grp_rp_info.deviceMask = 0x00000001;
+    dev_grp_rp_info.deviceRenderAreaCount = physical_device_group[0].physicalDeviceCount + 1;
+    std::vector<VkRect2D> device_render_areas(dev_grp_rp_info.deviceRenderAreaCount, m_renderPassBeginInfo.renderArea);
+    dev_grp_rp_info.pDeviceRenderAreas = device_render_areas.data();
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "VUID-VkDeviceGroupRenderPassBeginInfo-deviceRenderAreaCount-00908");
+    vk::CmdBeginRenderPass(m_commandBuffer->handle(), &m_renderPassBeginInfo, VK_SUBPASS_CONTENTS_INLINE);
+    m_errorMonitor->VerifyFound();
+
+    // Test vk::CmdSetDeviceMask()
+    vk::CmdSetDeviceMask(m_commandBuffer->handle(), 0x00000001);
+
+    dev_grp_rp_info.deviceRenderAreaCount = physical_device_group[0].physicalDeviceCount;
+    vk::CmdBeginRenderPass(m_commandBuffer->handle(), &m_renderPassBeginInfo, VK_SUBPASS_CONTENTS_INLINE);
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdSetDeviceMask-deviceMask-00108");
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdSetDeviceMask-deviceMask-00110");
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdSetDeviceMask-deviceMask-00111");
+    vk::CmdSetDeviceMask(m_commandBuffer->handle(), 0xFFFFFFFF);
+    m_errorMonitor->VerifyFound();
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdSetDeviceMask-deviceMask-00109");
+    vk::CmdSetDeviceMask(m_commandBuffer->handle(), 0);
+    m_errorMonitor->VerifyFound();
+
+    VkSemaphoreCreateInfo semaphore_create_info = {};
+    semaphore_create_info.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO;
+    VkSemaphore semaphore;
+    ASSERT_VK_SUCCESS(vk::CreateSemaphore(m_device->device(), &semaphore_create_info, nullptr, &semaphore));
+    VkSemaphore semaphore2;
+    ASSERT_VK_SUCCESS(vk::CreateSemaphore(m_device->device(), &semaphore_create_info, nullptr, &semaphore2));
+    VkFenceCreateInfo fence_create_info = {};
+    fence_create_info.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
+    VkFence fence;
+    ASSERT_VK_SUCCESS(vk::CreateFence(m_device->device(), &fence_create_info, nullptr, &fence));
+
+    if (support_surface) {
+        // Test VkAcquireNextImageInfoKHR
+        uint32_t imageIndex = 0;
+        VkAcquireNextImageInfoKHR acquire_next_image_info = {};
+        acquire_next_image_info.sType = VK_STRUCTURE_TYPE_ACQUIRE_NEXT_IMAGE_INFO_KHR;
+        acquire_next_image_info.semaphore = semaphore;
+        acquire_next_image_info.swapchain = m_swapchain;
+        acquire_next_image_info.fence = fence;
+        acquire_next_image_info.deviceMask = 0xFFFFFFFF;
+
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkAcquireNextImageInfoKHR-deviceMask-01290");
+        vk::AcquireNextImage2KHR(m_device->device(), &acquire_next_image_info, &imageIndex);
+        m_errorMonitor->VerifyFound();
+
+        vk::WaitForFences(m_device->device(), 1, &fence, VK_TRUE, std::numeric_limits<int>::max());
+        vk::ResetFences(m_device->device(), 1, &fence);
+
+        acquire_next_image_info.semaphore = semaphore2;
+        acquire_next_image_info.deviceMask = 0;
+
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkAcquireNextImageInfoKHR-deviceMask-01291");
+        vk::AcquireNextImage2KHR(m_device->device(), &acquire_next_image_info, &imageIndex);
+        m_errorMonitor->VerifyFound();
+        DestroySwapchain();
+    }
+
+    // Test VkDeviceGroupSubmitInfo
+    VkDeviceGroupSubmitInfo device_group_submit_info = {};
+    device_group_submit_info.sType = VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO;
+    device_group_submit_info.commandBufferCount = 1;
+    std::array<uint32_t, 1> command_buffer_device_masks = {0xFFFFFFFF};
+    device_group_submit_info.pCommandBufferDeviceMasks = command_buffer_device_masks.data();
+
+    VkSubmitInfo submit_info = {};
+    submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+    submit_info.pNext = &device_group_submit_info;
+    submit_info.commandBufferCount = 1;
+    submit_info.pCommandBuffers = &m_commandBuffer->handle();
+
+    m_commandBuffer->reset();
+    vk::BeginCommandBuffer(m_commandBuffer->handle(), &cmd_buf_info);
+    vk::EndCommandBuffer(m_commandBuffer->handle());
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "VUID-VkDeviceGroupSubmitInfo-pCommandBufferDeviceMasks-00086");
+    vk::QueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+    m_errorMonitor->VerifyFound();
+    vk::QueueWaitIdle(m_device->m_queue);
+
+    vk::WaitForFences(m_device->device(), 1, &fence, VK_TRUE, std::numeric_limits<int>::max());
+    vk::DestroyFence(m_device->device(), fence, nullptr);
+    vk::DestroySemaphore(m_device->device(), semaphore, nullptr);
+    vk::DestroySemaphore(m_device->device(), semaphore2, nullptr);
+}
+
+TEST_F(VkLayerTest, ValidationCacheTestBadMerge) {
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+    if (DeviceExtensionSupported(gpu(), "VK_LAYER_LUNARG_core_validation", VK_EXT_VALIDATION_CACHE_EXTENSION_NAME)) {
+        m_device_extension_names.push_back(VK_EXT_VALIDATION_CACHE_EXTENSION_NAME);
+    } else {
+        printf("%s %s not supported, skipping test\n", kSkipPrefix, VK_EXT_VALIDATION_CACHE_EXTENSION_NAME);
+        return;
+    }
+    ASSERT_NO_FATAL_FAILURE(InitState());
+
+    // Load extension functions
+    auto fpCreateValidationCache =
+        (PFN_vkCreateValidationCacheEXT)vk::GetDeviceProcAddr(m_device->device(), "vkCreateValidationCacheEXT");
+    auto fpDestroyValidationCache =
+        (PFN_vkDestroyValidationCacheEXT)vk::GetDeviceProcAddr(m_device->device(), "vkDestroyValidationCacheEXT");
+    auto fpMergeValidationCaches =
+        (PFN_vkMergeValidationCachesEXT)vk::GetDeviceProcAddr(m_device->device(), "vkMergeValidationCachesEXT");
+    if (!fpCreateValidationCache || !fpDestroyValidationCache || !fpMergeValidationCaches) {
+        printf("%s Failed to load function pointers for %s\n", kSkipPrefix, VK_EXT_VALIDATION_CACHE_EXTENSION_NAME);
+        return;
+    }
+
+    VkValidationCacheCreateInfoEXT validationCacheCreateInfo;
+    validationCacheCreateInfo.sType = VK_STRUCTURE_TYPE_VALIDATION_CACHE_CREATE_INFO_EXT;
+    validationCacheCreateInfo.pNext = NULL;
+    validationCacheCreateInfo.initialDataSize = 0;
+    validationCacheCreateInfo.pInitialData = NULL;
+    validationCacheCreateInfo.flags = 0;
+    VkValidationCacheEXT validationCache = VK_NULL_HANDLE;
+    VkResult res = fpCreateValidationCache(m_device->device(), &validationCacheCreateInfo, nullptr, &validationCache);
+    ASSERT_VK_SUCCESS(res);
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkMergeValidationCachesEXT-dstCache-01536");
+    res = fpMergeValidationCaches(m_device->device(), validationCache, 1, &validationCache);
+    m_errorMonitor->VerifyFound();
+
+    fpDestroyValidationCache(m_device->device(), validationCache, nullptr);
+}
+
+TEST_F(VkLayerTest, InvalidQueueFamilyIndex) {
+    // Miscellaneous queueFamilyIndex validation tests
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+    VkBufferCreateInfo buffCI = {};
+    buffCI.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
+    buffCI.size = 1024;
+    buffCI.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT;
+    buffCI.queueFamilyIndexCount = 2;
+    // Introduce failure by specifying invalid queue_family_index
+    uint32_t qfi[2];
+    qfi[0] = 777;
+    qfi[1] = 0;
+
+    buffCI.pQueueFamilyIndices = qfi;
+    buffCI.sharingMode = VK_SHARING_MODE_CONCURRENT;  // qfi only matters in CONCURRENT mode
+
+    // Test for queue family index out of range
+    CreateBufferTest(*this, &buffCI, "VUID-VkBufferCreateInfo-sharingMode-01419");
+
+    // Test for non-unique QFI in array
+    qfi[0] = 0;
+    CreateBufferTest(*this, &buffCI, "VUID-VkBufferCreateInfo-sharingMode-01419");
+
+    if (m_device->queue_props.size() > 2) {
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "which was not created allowing concurrent");
+
+        // Create buffer shared to queue families 1 and 2, but submitted on queue family 0
+        buffCI.queueFamilyIndexCount = 2;
+        qfi[0] = 1;
+        qfi[1] = 2;
+        VkBufferObj ib;
+        ib.init(*m_device, buffCI);
+
+        m_commandBuffer->begin();
+        vk::CmdFillBuffer(m_commandBuffer->handle(), ib.handle(), 0, 16, 5);
+        m_commandBuffer->end();
+        m_commandBuffer->QueueCommandBuffer(false);
+        m_errorMonitor->VerifyFound();
+    }
+}
+
+TEST_F(VkLayerTest, InvalidQueryPoolCreate) {
+    TEST_DESCRIPTION("Attempt to create a query pool for PIPELINE_STATISTICS without enabling pipeline stats for the device.");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    vk_testing::QueueCreateInfoArray queue_info(m_device->queue_props);
+
+    VkDevice local_device;
+    VkDeviceCreateInfo device_create_info = {};
+    auto features = m_device->phy().features();
+    // Intentionally disable pipeline stats
+    features.pipelineStatisticsQuery = VK_FALSE;
+    device_create_info.sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO;
+    device_create_info.pNext = NULL;
+    device_create_info.queueCreateInfoCount = queue_info.size();
+    device_create_info.pQueueCreateInfos = queue_info.data();
+    device_create_info.enabledLayerCount = 0;
+    device_create_info.ppEnabledLayerNames = NULL;
+    device_create_info.pEnabledFeatures = &features;
+    VkResult err = vk::CreateDevice(gpu(), &device_create_info, nullptr, &local_device);
+    ASSERT_VK_SUCCESS(err);
+
+    VkQueryPoolCreateInfo qpci{};
+    qpci.sType = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO;
+    qpci.queryType = VK_QUERY_TYPE_PIPELINE_STATISTICS;
+    qpci.queryCount = 1;
+    VkQueryPool query_pool;
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkQueryPoolCreateInfo-queryType-00791");
+    vk::CreateQueryPool(local_device, &qpci, nullptr, &query_pool);
+    m_errorMonitor->VerifyFound();
+
+    vk::DestroyDevice(local_device, nullptr);
+}
+
+TEST_F(VkLayerTest, UnclosedQuery) {
+    TEST_DESCRIPTION("End a command buffer with a query still in progress.");
+
+    const char *invalid_query = "VUID-vkEndCommandBuffer-commandBuffer-00061";
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    VkEvent event;
+    VkEventCreateInfo event_create_info{};
+    event_create_info.sType = VK_STRUCTURE_TYPE_EVENT_CREATE_INFO;
+    vk::CreateEvent(m_device->device(), &event_create_info, nullptr, &event);
+
+    VkQueue queue = VK_NULL_HANDLE;
+    vk::GetDeviceQueue(m_device->device(), m_device->graphics_queue_node_index_, 0, &queue);
+
+    m_commandBuffer->begin();
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, invalid_query);
+
+    VkQueryPool query_pool;
+    VkQueryPoolCreateInfo query_pool_create_info = {};
+    query_pool_create_info.sType = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO;
+    query_pool_create_info.queryType = VK_QUERY_TYPE_OCCLUSION;
+    query_pool_create_info.queryCount = 1;
+    vk::CreateQueryPool(m_device->device(), &query_pool_create_info, nullptr, &query_pool);
+
+    vk::CmdResetQueryPool(m_commandBuffer->handle(), query_pool, 0 /*startQuery*/, 1 /*queryCount*/);
+    vk::CmdBeginQuery(m_commandBuffer->handle(), query_pool, 0, 0);
+
+    vk::EndCommandBuffer(m_commandBuffer->handle());
+    m_errorMonitor->VerifyFound();
+
+    vk::DestroyQueryPool(m_device->device(), query_pool, nullptr);
+    vk::DestroyEvent(m_device->device(), event, nullptr);
+}
+
+TEST_F(VkLayerTest, QueryPreciseBit) {
+    TEST_DESCRIPTION("Check for correct Query Precise Bit circumstances.");
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    // These tests require that the device support pipeline statistics query
+    VkPhysicalDeviceFeatures device_features = {};
+    ASSERT_NO_FATAL_FAILURE(GetPhysicalDeviceFeatures(&device_features));
+    if (VK_TRUE != device_features.pipelineStatisticsQuery) {
+        printf("%s Test requires unsupported pipelineStatisticsQuery feature. Skipped.\n", kSkipPrefix);
+        return;
+    }
+
+    std::vector<const char *> device_extension_names;
+    auto features = m_device->phy().features();
+
+    // Test for precise bit when query type is not OCCLUSION
+    if (features.occlusionQueryPrecise) {
+        VkEvent event;
+        VkEventCreateInfo event_create_info{};
+        event_create_info.sType = VK_STRUCTURE_TYPE_EVENT_CREATE_INFO;
+        vk::CreateEvent(m_device->handle(), &event_create_info, nullptr, &event);
+
+        m_commandBuffer->begin();
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBeginQuery-queryType-00800");
+
+        VkQueryPool query_pool;
+        VkQueryPoolCreateInfo query_pool_create_info = {};
+        query_pool_create_info.sType = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO;
+        query_pool_create_info.queryType = VK_QUERY_TYPE_PIPELINE_STATISTICS;
+        query_pool_create_info.queryCount = 1;
+        vk::CreateQueryPool(m_device->handle(), &query_pool_create_info, nullptr, &query_pool);
+
+        vk::CmdResetQueryPool(m_commandBuffer->handle(), query_pool, 0, 1);
+        vk::CmdBeginQuery(m_commandBuffer->handle(), query_pool, 0, VK_QUERY_CONTROL_PRECISE_BIT);
+        m_errorMonitor->VerifyFound();
+
+        m_commandBuffer->end();
+        vk::DestroyQueryPool(m_device->handle(), query_pool, nullptr);
+        vk::DestroyEvent(m_device->handle(), event, nullptr);
+    }
+
+    // Test for precise bit when precise feature is not available
+    features.occlusionQueryPrecise = false;
+    VkDeviceObj test_device(0, gpu(), device_extension_names, &features);
+
+    VkCommandPoolCreateInfo pool_create_info{};
+    pool_create_info.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
+    pool_create_info.queueFamilyIndex = test_device.graphics_queue_node_index_;
+
+    VkCommandPool command_pool;
+    vk::CreateCommandPool(test_device.handle(), &pool_create_info, nullptr, &command_pool);
+
+    VkCommandBufferAllocateInfo cmd = {};
+    cmd.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
+    cmd.pNext = NULL;
+    cmd.commandPool = command_pool;
+    cmd.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
+    cmd.commandBufferCount = 1;
+
+    VkCommandBuffer cmd_buffer;
+    VkResult err = vk::AllocateCommandBuffers(test_device.handle(), &cmd, &cmd_buffer);
+    ASSERT_VK_SUCCESS(err);
+
+    VkEvent event;
+    VkEventCreateInfo event_create_info{};
+    event_create_info.sType = VK_STRUCTURE_TYPE_EVENT_CREATE_INFO;
+    vk::CreateEvent(test_device.handle(), &event_create_info, nullptr, &event);
+
+    VkCommandBufferBeginInfo begin_info = {VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO, nullptr,
+                                           VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT, nullptr};
+
+    vk::BeginCommandBuffer(cmd_buffer, &begin_info);
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBeginQuery-queryType-00800");
+
+    VkQueryPool query_pool;
+    VkQueryPoolCreateInfo query_pool_create_info = {};
+    query_pool_create_info.sType = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO;
+    query_pool_create_info.queryType = VK_QUERY_TYPE_OCCLUSION;
+    query_pool_create_info.queryCount = 1;
+    vk::CreateQueryPool(test_device.handle(), &query_pool_create_info, nullptr, &query_pool);
+
+    vk::CmdResetQueryPool(cmd_buffer, query_pool, 0, 1);
+    vk::CmdBeginQuery(cmd_buffer, query_pool, 0, VK_QUERY_CONTROL_PRECISE_BIT);
+    m_errorMonitor->VerifyFound();
+
+    vk::EndCommandBuffer(cmd_buffer);
+    vk::DestroyQueryPool(test_device.handle(), query_pool, nullptr);
+    vk::DestroyEvent(test_device.handle(), event, nullptr);
+    vk::DestroyCommandPool(test_device.handle(), command_pool, nullptr);
+}
+
+TEST_F(VkLayerTest, StageMaskGsTsEnabled) {
+    TEST_DESCRIPTION(
+        "Attempt to use a stageMask w/ geometry shader and tesselation shader bits enabled when those features are disabled on the "
+        "device.");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    std::vector<const char *> device_extension_names;
+    auto features = m_device->phy().features();
+    // Make sure gs & ts are disabled
+    features.geometryShader = false;
+    features.tessellationShader = false;
+    // The sacrificial device object
+    VkDeviceObj test_device(0, gpu(), device_extension_names, &features);
+
+    VkCommandPoolCreateInfo pool_create_info{};
+    pool_create_info.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
+    pool_create_info.queueFamilyIndex = test_device.graphics_queue_node_index_;
+
+    VkCommandPool command_pool;
+    vk::CreateCommandPool(test_device.handle(), &pool_create_info, nullptr, &command_pool);
+
+    VkCommandBufferAllocateInfo cmd = {};
+    cmd.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
+    cmd.pNext = NULL;
+    cmd.commandPool = command_pool;
+    cmd.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
+    cmd.commandBufferCount = 1;
+
+    VkCommandBuffer cmd_buffer;
+    VkResult err = vk::AllocateCommandBuffers(test_device.handle(), &cmd, &cmd_buffer);
+    ASSERT_VK_SUCCESS(err);
+
+    VkEvent event;
+    VkEventCreateInfo evci = {};
+    evci.sType = VK_STRUCTURE_TYPE_EVENT_CREATE_INFO;
+    VkResult result = vk::CreateEvent(test_device.handle(), &evci, NULL, &event);
+    ASSERT_VK_SUCCESS(result);
+
+    VkCommandBufferBeginInfo cbbi = {};
+    cbbi.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
+    vk::BeginCommandBuffer(cmd_buffer, &cbbi);
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdSetEvent-stageMask-01150");
+    vk::CmdSetEvent(cmd_buffer, event, VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT);
+    m_errorMonitor->VerifyFound();
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdSetEvent-stageMask-01151");
+    vk::CmdSetEvent(cmd_buffer, event, VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT);
+    m_errorMonitor->VerifyFound();
+
+    vk::DestroyEvent(test_device.handle(), event, NULL);
+    vk::DestroyCommandPool(test_device.handle(), command_pool, NULL);
+}
+
+TEST_F(VkLayerTest, DescriptorPoolInUseDestroyedSignaled) {
+    TEST_DESCRIPTION("Delete a DescriptorPool with a DescriptorSet that is in use.");
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitViewport());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    // Create image to update the descriptor with
+    VkImageObj image(m_device);
+    image.Init(32, 32, 1, VK_FORMAT_B8G8R8A8_UNORM, VK_IMAGE_USAGE_SAMPLED_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
+    ASSERT_TRUE(image.initialized());
+
+    VkImageView view = image.targetView(VK_FORMAT_B8G8R8A8_UNORM);
+    // Create Sampler
+    VkSamplerCreateInfo sampler_ci = SafeSaneSamplerCreateInfo();
+    VkSampler sampler;
+    VkResult err = vk::CreateSampler(m_device->device(), &sampler_ci, NULL, &sampler);
+    ASSERT_VK_SUCCESS(err);
+
+    // Create PSO to be used for draw-time errors below
+    VkShaderObj fs(m_device, bindStateFragSamplerShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+    CreatePipelineHelper pipe(*this);
+    pipe.InitInfo();
+    pipe.shader_stages_ = {pipe.vs_->GetStageCreateInfo(), fs.GetStageCreateInfo()};
+    pipe.dsl_bindings_ = {
+        {0, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, 1, VK_SHADER_STAGE_ALL, nullptr},
+    };
+    const VkDynamicState dyn_states[] = {VK_DYNAMIC_STATE_VIEWPORT, VK_DYNAMIC_STATE_SCISSOR};
+    VkPipelineDynamicStateCreateInfo dyn_state_ci = {};
+    dyn_state_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO;
+    dyn_state_ci.dynamicStateCount = size(dyn_states);
+    dyn_state_ci.pDynamicStates = dyn_states;
+    pipe.dyn_state_ci_ = dyn_state_ci;
+    pipe.InitState();
+    pipe.CreateGraphicsPipeline();
+
+    // Update descriptor with image and sampler
+    pipe.descriptor_set_->WriteDescriptorImageInfo(0, view, sampler, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER);
+    pipe.descriptor_set_->UpdateDescriptorSets();
+
+    m_commandBuffer->begin();
+    m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
+    vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.pipeline_);
+    vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.pipeline_layout_.handle(), 0, 1,
+                              &pipe.descriptor_set_->set_, 0, NULL);
+
+    VkViewport viewport = {0, 0, 16, 16, 0, 1};
+    VkRect2D scissor = {{0, 0}, {16, 16}};
+    vk::CmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
+    vk::CmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
+
+    m_commandBuffer->Draw(1, 0, 0, 0);
+    m_commandBuffer->EndRenderPass();
+    m_commandBuffer->end();
+    // Submit cmd buffer to put pool in-flight
+    VkSubmitInfo submit_info = {};
+    submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+    submit_info.commandBufferCount = 1;
+    submit_info.pCommandBuffers = &m_commandBuffer->handle();
+    vk::QueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+    // Destroy pool while in-flight, causing error
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkDestroyDescriptorPool-descriptorPool-00303");
+    vk::DestroyDescriptorPool(m_device->device(), pipe.descriptor_set_->pool_, NULL);
+    m_errorMonitor->VerifyFound();
+    vk::QueueWaitIdle(m_device->m_queue);
+    // Cleanup
+    vk::DestroySampler(m_device->device(), sampler, NULL);
+    m_errorMonitor->SetUnexpectedError(
+        "If descriptorPool is not VK_NULL_HANDLE, descriptorPool must be a valid VkDescriptorPool handle");
+    m_errorMonitor->SetUnexpectedError("Unable to remove DescriptorPool obj");
+    // TODO : It seems Validation layers think ds_pool was already destroyed, even though it wasn't?
+}
+
+TEST_F(VkLayerTest, FramebufferInUseDestroyedSignaled) {
+    TEST_DESCRIPTION("Delete in-use framebuffer.");
+    ASSERT_NO_FATAL_FAILURE(Init());
+    VkFormatProperties format_properties;
+    VkResult err = VK_SUCCESS;
+    vk::GetPhysicalDeviceFormatProperties(gpu(), VK_FORMAT_B8G8R8A8_UNORM, &format_properties);
+
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    VkImageObj image(m_device);
+    image.Init(256, 256, 1, VK_FORMAT_B8G8R8A8_UNORM, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
+    ASSERT_TRUE(image.initialized());
+    VkImageView view = image.targetView(VK_FORMAT_B8G8R8A8_UNORM);
+
+    VkFramebufferCreateInfo fci = {VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, nullptr, 0, m_renderPass, 1, &view, 256, 256, 1};
+    VkFramebuffer fb;
+    err = vk::CreateFramebuffer(m_device->device(), &fci, nullptr, &fb);
+    ASSERT_VK_SUCCESS(err);
+
+    // Just use default renderpass with our framebuffer
+    m_renderPassBeginInfo.framebuffer = fb;
+    // Create Null cmd buffer for submit
+    m_commandBuffer->begin();
+    m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
+    m_commandBuffer->EndRenderPass();
+    m_commandBuffer->end();
+    // Submit cmd buffer to put it in-flight
+    VkSubmitInfo submit_info = {};
+    submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+    submit_info.commandBufferCount = 1;
+    submit_info.pCommandBuffers = &m_commandBuffer->handle();
+    vk::QueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+    // Destroy framebuffer while in-flight
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkDestroyFramebuffer-framebuffer-00892");
+    vk::DestroyFramebuffer(m_device->device(), fb, NULL);
+    m_errorMonitor->VerifyFound();
+    // Wait for queue to complete so we can safely destroy everything
+    vk::QueueWaitIdle(m_device->m_queue);
+    m_errorMonitor->SetUnexpectedError("If framebuffer is not VK_NULL_HANDLE, framebuffer must be a valid VkFramebuffer handle");
+    m_errorMonitor->SetUnexpectedError("Unable to remove Framebuffer obj");
+    vk::DestroyFramebuffer(m_device->device(), fb, nullptr);
+}
+
+TEST_F(VkLayerTest, FramebufferImageInUseDestroyedSignaled) {
+    TEST_DESCRIPTION("Delete in-use image that's child of framebuffer.");
+    ASSERT_NO_FATAL_FAILURE(Init());
+    VkFormatProperties format_properties;
+    VkResult err = VK_SUCCESS;
+    vk::GetPhysicalDeviceFormatProperties(gpu(), VK_FORMAT_B8G8R8A8_UNORM, &format_properties);
+
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    VkImageCreateInfo image_ci = {};
+    image_ci.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+    image_ci.pNext = NULL;
+    image_ci.imageType = VK_IMAGE_TYPE_2D;
+    image_ci.format = VK_FORMAT_B8G8R8A8_UNORM;
+    image_ci.extent.width = 256;
+    image_ci.extent.height = 256;
+    image_ci.extent.depth = 1;
+    image_ci.mipLevels = 1;
+    image_ci.arrayLayers = 1;
+    image_ci.samples = VK_SAMPLE_COUNT_1_BIT;
+    image_ci.tiling = VK_IMAGE_TILING_OPTIMAL;
+    image_ci.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
+    image_ci.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+    image_ci.flags = 0;
+    VkImageObj image(m_device);
+    image.init(&image_ci);
+
+    VkImageView view = image.targetView(VK_FORMAT_B8G8R8A8_UNORM);
+
+    VkFramebufferCreateInfo fci = {VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, nullptr, 0, m_renderPass, 1, &view, 256, 256, 1};
+    VkFramebuffer fb;
+    err = vk::CreateFramebuffer(m_device->device(), &fci, nullptr, &fb);
+    ASSERT_VK_SUCCESS(err);
+
+    // Just use default renderpass with our framebuffer
+    m_renderPassBeginInfo.framebuffer = fb;
+    // Create Null cmd buffer for submit
+    m_commandBuffer->begin();
+    m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
+    m_commandBuffer->EndRenderPass();
+    m_commandBuffer->end();
+    // Submit cmd buffer to put it (and attached imageView) in-flight
+    VkSubmitInfo submit_info = {};
+    submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+    submit_info.commandBufferCount = 1;
+    submit_info.pCommandBuffers = &m_commandBuffer->handle();
+    // Submit cmd buffer to put framebuffer and children in-flight
+    vk::QueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+    // Destroy image attached to framebuffer while in-flight
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkDestroyImage-image-01000");
+    vk::DestroyImage(m_device->device(), image.handle(), NULL);
+    m_errorMonitor->VerifyFound();
+    // Wait for queue to complete so we can safely destroy image and other objects
+    vk::QueueWaitIdle(m_device->m_queue);
+    m_errorMonitor->SetUnexpectedError("If image is not VK_NULL_HANDLE, image must be a valid VkImage handle");
+    m_errorMonitor->SetUnexpectedError("Unable to remove Image obj");
+    vk::DestroyFramebuffer(m_device->device(), fb, nullptr);
+}
+
+TEST_F(VkLayerTest, EventInUseDestroyedSignaled) {
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    m_commandBuffer->begin();
+
+    VkEvent event;
+    VkEventCreateInfo event_create_info = {};
+    event_create_info.sType = VK_STRUCTURE_TYPE_EVENT_CREATE_INFO;
+    vk::CreateEvent(m_device->device(), &event_create_info, nullptr, &event);
+    vk::CmdSetEvent(m_commandBuffer->handle(), event, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT);
+
+    m_commandBuffer->end();
+    vk::DestroyEvent(m_device->device(), event, nullptr);
+
+    VkSubmitInfo submit_info = {};
+    submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+    submit_info.commandBufferCount = 1;
+    submit_info.pCommandBuffers = &m_commandBuffer->handle();
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "that is invalid because bound");
+    vk::QueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, InUseDestroyedSignaled) {
+    TEST_DESCRIPTION(
+        "Use vkCmdExecuteCommands with invalid state in primary and secondary command buffers. Delete objects that are in use. "
+        "Call VkQueueSubmit with an event that has been deleted.");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    m_errorMonitor->ExpectSuccess();
+
+    VkSemaphoreCreateInfo semaphore_create_info = {};
+    semaphore_create_info.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO;
+    VkSemaphore semaphore;
+    ASSERT_VK_SUCCESS(vk::CreateSemaphore(m_device->device(), &semaphore_create_info, nullptr, &semaphore));
+    VkFenceCreateInfo fence_create_info = {};
+    fence_create_info.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
+    VkFence fence;
+    ASSERT_VK_SUCCESS(vk::CreateFence(m_device->device(), &fence_create_info, nullptr, &fence));
+
+    VkBufferTest buffer_test(m_device, VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT);
+
+    CreatePipelineHelper pipe(*this);
+    pipe.InitInfo();
+    pipe.InitState();
+    pipe.CreateGraphicsPipeline();
+
+    pipe.descriptor_set_->WriteDescriptorBufferInfo(0, buffer_test.GetBuffer(), 1024, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER);
+    pipe.descriptor_set_->UpdateDescriptorSets();
+
+    VkEvent event;
+    VkEventCreateInfo event_create_info = {};
+    event_create_info.sType = VK_STRUCTURE_TYPE_EVENT_CREATE_INFO;
+    vk::CreateEvent(m_device->device(), &event_create_info, nullptr, &event);
+
+    m_commandBuffer->begin();
+
+    vk::CmdSetEvent(m_commandBuffer->handle(), event, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT);
+
+    vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.pipeline_);
+    vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.pipeline_layout_.handle(), 0, 1,
+                              &pipe.descriptor_set_->set_, 0, NULL);
+
+    m_commandBuffer->end();
+
+    VkSubmitInfo submit_info = {};
+    submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+    submit_info.commandBufferCount = 1;
+    submit_info.pCommandBuffers = &m_commandBuffer->handle();
+    submit_info.signalSemaphoreCount = 1;
+    submit_info.pSignalSemaphores = &semaphore;
+    vk::QueueSubmit(m_device->m_queue, 1, &submit_info, fence);
+    m_errorMonitor->Reset();  // resume logmsg processing
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkDestroyEvent-event-01145");
+    vk::DestroyEvent(m_device->device(), event, nullptr);
+    m_errorMonitor->VerifyFound();
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkDestroySemaphore-semaphore-01137");
+    vk::DestroySemaphore(m_device->device(), semaphore, nullptr);
+    m_errorMonitor->VerifyFound();
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkDestroyFence-fence-01120");
+    vk::DestroyFence(m_device->device(), fence, nullptr);
+    m_errorMonitor->VerifyFound();
+
+    vk::QueueWaitIdle(m_device->m_queue);
+    m_errorMonitor->SetUnexpectedError("If semaphore is not VK_NULL_HANDLE, semaphore must be a valid VkSemaphore handle");
+    m_errorMonitor->SetUnexpectedError("Unable to remove Semaphore obj");
+    vk::DestroySemaphore(m_device->device(), semaphore, nullptr);
+    m_errorMonitor->SetUnexpectedError("If fence is not VK_NULL_HANDLE, fence must be a valid VkFence handle");
+    m_errorMonitor->SetUnexpectedError("Unable to remove Fence obj");
+    vk::DestroyFence(m_device->device(), fence, nullptr);
+    m_errorMonitor->SetUnexpectedError("If event is not VK_NULL_HANDLE, event must be a valid VkEvent handle");
+    m_errorMonitor->SetUnexpectedError("Unable to remove Event obj");
+    vk::DestroyEvent(m_device->device(), event, nullptr);
+}
+
+TEST_F(VkLayerTest, EventStageMaskOneCommandBufferPass) {
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    VkCommandBufferObj commandBuffer1(m_device, m_commandPool);
+    VkCommandBufferObj commandBuffer2(m_device, m_commandPool);
+
+    VkEvent event;
+    VkEventCreateInfo event_create_info = {};
+    event_create_info.sType = VK_STRUCTURE_TYPE_EVENT_CREATE_INFO;
+    vk::CreateEvent(m_device->device(), &event_create_info, nullptr, &event);
+
+    commandBuffer1.begin();
+    vk::CmdSetEvent(commandBuffer1.handle(), event, VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT);
+    vk::CmdWaitEvents(commandBuffer1.handle(), 1, &event, VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT,
+                      VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, 0, nullptr, 0, nullptr, 0, nullptr);
+    commandBuffer1.end();
+
+    VkSubmitInfo submit_info = {};
+    submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+    submit_info.commandBufferCount = 1;
+    submit_info.pCommandBuffers = &commandBuffer1.handle();
+    m_errorMonitor->ExpectSuccess();
+    vk::QueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+    m_errorMonitor->VerifyNotFound();
+    vk::QueueWaitIdle(m_device->m_queue);
+
+    vk::DestroyEvent(m_device->device(), event, nullptr);
+}
+
+TEST_F(VkLayerTest, EventStageMaskOneCommandBufferFail) {
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    VkCommandBufferObj commandBuffer1(m_device, m_commandPool);
+    VkCommandBufferObj commandBuffer2(m_device, m_commandPool);
+
+    VkEvent event;
+    VkEventCreateInfo event_create_info = {};
+    event_create_info.sType = VK_STRUCTURE_TYPE_EVENT_CREATE_INFO;
+    vk::CreateEvent(m_device->device(), &event_create_info, nullptr, &event);
+
+    commandBuffer1.begin();
+    vk::CmdSetEvent(commandBuffer1.handle(), event, VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT);
+    // wrong srcStageMask
+    vk::CmdWaitEvents(commandBuffer1.handle(), 1, &event, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT,
+                      0, nullptr, 0, nullptr, 0, nullptr);
+    commandBuffer1.end();
+
+    VkSubmitInfo submit_info = {};
+    submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+    submit_info.commandBufferCount = 1;
+    submit_info.pCommandBuffers = &commandBuffer1.handle();
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdWaitEvents-srcStageMask-parameter");
+    vk::QueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+    m_errorMonitor->VerifyFound();
+    vk::QueueWaitIdle(m_device->m_queue);
+
+    vk::DestroyEvent(m_device->device(), event, nullptr);
+}
+
+TEST_F(VkLayerTest, EventStageMaskTwoCommandBufferPass) {
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    VkCommandBufferObj commandBuffer1(m_device, m_commandPool);
+    VkCommandBufferObj commandBuffer2(m_device, m_commandPool);
+
+    VkEvent event;
+    VkEventCreateInfo event_create_info = {};
+    event_create_info.sType = VK_STRUCTURE_TYPE_EVENT_CREATE_INFO;
+    vk::CreateEvent(m_device->device(), &event_create_info, nullptr, &event);
+
+    commandBuffer1.begin();
+    vk::CmdSetEvent(commandBuffer1.handle(), event, VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT);
+    commandBuffer1.end();
+
+    VkSubmitInfo submit_info = {};
+    submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+    submit_info.commandBufferCount = 1;
+    submit_info.pCommandBuffers = &commandBuffer1.handle();
+    vk::QueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+
+    commandBuffer2.begin();
+    vk::CmdWaitEvents(commandBuffer2.handle(), 1, &event, VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT,
+                      VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, 0, nullptr, 0, nullptr, 0, nullptr);
+    commandBuffer2.end();
+
+    submit_info.pCommandBuffers = &commandBuffer2.handle();
+    m_errorMonitor->ExpectSuccess();
+    vk::QueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+    m_errorMonitor->VerifyNotFound();
+    vk::QueueWaitIdle(m_device->m_queue);
+
+    vk::DestroyEvent(m_device->device(), event, nullptr);
+}
+
+TEST_F(VkLayerTest, EventStageMaskTwoCommandBufferFail) {
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    VkCommandBufferObj commandBuffer1(m_device, m_commandPool);
+    VkCommandBufferObj commandBuffer2(m_device, m_commandPool);
+
+    VkEvent event;
+    VkEventCreateInfo event_create_info = {};
+    event_create_info.sType = VK_STRUCTURE_TYPE_EVENT_CREATE_INFO;
+    vk::CreateEvent(m_device->device(), &event_create_info, nullptr, &event);
+
+    commandBuffer1.begin();
+    vk::CmdSetEvent(commandBuffer1.handle(), event, VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT);
+    commandBuffer1.end();
+
+    VkSubmitInfo submit_info = {};
+    submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+    submit_info.commandBufferCount = 1;
+    submit_info.pCommandBuffers = &commandBuffer1.handle();
+    vk::QueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+
+    commandBuffer2.begin();
+    // wrong srcStageMask
+    vk::CmdWaitEvents(commandBuffer2.handle(), 1, &event, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT,
+                      0, nullptr, 0, nullptr, 0, nullptr);
+    commandBuffer2.end();
+
+    submit_info.pCommandBuffers = &commandBuffer2.handle();
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdWaitEvents-srcStageMask-parameter");
+    vk::QueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+    m_errorMonitor->VerifyFound();
+    vk::QueueWaitIdle(m_device->m_queue);
+
+    vk::DestroyEvent(m_device->device(), event, nullptr);
+}
+
+TEST_F(VkLayerTest, QueryPoolPartialTimestamp) {
+    TEST_DESCRIPTION("Request partial result on timestamp query.");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    VkQueryPool query_pool;
+    VkQueryPoolCreateInfo query_pool_ci{};
+    query_pool_ci.sType = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO;
+    query_pool_ci.queryType = VK_QUERY_TYPE_TIMESTAMP;
+    query_pool_ci.queryCount = 1;
+    vk::CreateQueryPool(m_device->device(), &query_pool_ci, nullptr, &query_pool);
+
+    // Use setup as a positive test...
+    m_errorMonitor->ExpectSuccess();
+    m_commandBuffer->begin();
+    vk::CmdResetQueryPool(m_commandBuffer->handle(), query_pool, 0, 1);
+    vk::CmdWriteTimestamp(m_commandBuffer->handle(), VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, query_pool, 0);
+    m_commandBuffer->end();
+
+    // Submit cmd buffer and wait for it.
+    VkSubmitInfo submit_info = {};
+    submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+    submit_info.commandBufferCount = 1;
+    submit_info.pCommandBuffers = &m_commandBuffer->handle();
+    vk::QueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+    vk::QueueWaitIdle(m_device->m_queue);
+    m_errorMonitor->VerifyNotFound();
+
+    // Attempt to obtain partial results.
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkGetQueryPoolResults-queryType-00818");
+    uint32_t data_space[16];
+    m_errorMonitor->SetUnexpectedError("Cannot get query results on queryPool");
+    vk::GetQueryPoolResults(m_device->handle(), query_pool, 0, 1, sizeof(data_space), &data_space, sizeof(uint32_t),
+                            VK_QUERY_RESULT_PARTIAL_BIT);
+    m_errorMonitor->VerifyFound();
+
+    // Destroy query pool.
+    vk::DestroyQueryPool(m_device->handle(), query_pool, NULL);
+}
+
+TEST_F(VkLayerTest, QueryPoolInUseDestroyedSignaled) {
+    TEST_DESCRIPTION("Delete in-use query pool.");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    VkQueryPool query_pool;
+    VkQueryPoolCreateInfo query_pool_ci{};
+    query_pool_ci.sType = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO;
+    query_pool_ci.queryType = VK_QUERY_TYPE_TIMESTAMP;
+    query_pool_ci.queryCount = 1;
+    vk::CreateQueryPool(m_device->device(), &query_pool_ci, nullptr, &query_pool);
+
+    m_commandBuffer->begin();
+    // Use query pool to create binding with cmd buffer
+    vk::CmdResetQueryPool(m_commandBuffer->handle(), query_pool, 0, 1);
+    vk::CmdWriteTimestamp(m_commandBuffer->handle(), VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, query_pool, 0);
+    m_commandBuffer->end();
+
+    // Submit cmd buffer and then destroy query pool while in-flight
+    VkSubmitInfo submit_info = {};
+    submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+    submit_info.commandBufferCount = 1;
+    submit_info.pCommandBuffers = &m_commandBuffer->handle();
+    vk::QueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkDestroyQueryPool-queryPool-00793");
+    vk::DestroyQueryPool(m_device->handle(), query_pool, NULL);
+    m_errorMonitor->VerifyFound();
+
+    vk::QueueWaitIdle(m_device->m_queue);
+    // Now that cmd buffer done we can safely destroy query_pool
+    m_errorMonitor->SetUnexpectedError("If queryPool is not VK_NULL_HANDLE, queryPool must be a valid VkQueryPool handle");
+    m_errorMonitor->SetUnexpectedError("Unable to remove QueryPool obj");
+    vk::DestroyQueryPool(m_device->handle(), query_pool, NULL);
+}
+
+TEST_F(VkLayerTest, PipelineInUseDestroyedSignaled) {
+    TEST_DESCRIPTION("Delete in-use pipeline.");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    const VkPipelineLayoutObj pipeline_layout(m_device);
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkDestroyPipeline-pipeline-00765");
+    // Create PSO to be used for draw-time errors below
+
+    // Store pipeline handle so we can actually delete it before test finishes
+    VkPipeline delete_this_pipeline;
+    {  // Scope pipeline so it will be auto-deleted
+        CreatePipelineHelper pipe(*this);
+        pipe.InitInfo();
+        pipe.InitState();
+        pipe.CreateGraphicsPipeline();
+
+        delete_this_pipeline = pipe.pipeline_;
+
+        m_commandBuffer->begin();
+        // Bind pipeline to cmd buffer
+        vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.pipeline_);
+
+        m_commandBuffer->end();
+
+        VkSubmitInfo submit_info = {};
+        submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+        submit_info.commandBufferCount = 1;
+        submit_info.pCommandBuffers = &m_commandBuffer->handle();
+        // Submit cmd buffer and then pipeline destroyed while in-flight
+        vk::QueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+    }  // Pipeline deletion triggered here
+    m_errorMonitor->VerifyFound();
+    // Make sure queue finished and then actually delete pipeline
+    vk::QueueWaitIdle(m_device->m_queue);
+    m_errorMonitor->SetUnexpectedError("If pipeline is not VK_NULL_HANDLE, pipeline must be a valid VkPipeline handle");
+    m_errorMonitor->SetUnexpectedError("Unable to remove Pipeline obj");
+    vk::DestroyPipeline(m_device->handle(), delete_this_pipeline, nullptr);
+}
+
+TEST_F(VkLayerTest, ImageViewInUseDestroyedSignaled) {
+    TEST_DESCRIPTION("Delete in-use imageView.");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    VkSamplerCreateInfo sampler_ci = SafeSaneSamplerCreateInfo();
+    VkSampler sampler;
+
+    VkResult err;
+    err = vk::CreateSampler(m_device->device(), &sampler_ci, NULL, &sampler);
+    ASSERT_VK_SUCCESS(err);
+
+    VkImageObj image(m_device);
+    image.Init(128, 128, 1, VK_FORMAT_R8G8B8A8_UNORM, VK_IMAGE_USAGE_SAMPLED_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
+    ASSERT_TRUE(image.initialized());
+
+    VkImageView view = image.targetView(VK_FORMAT_R8G8B8A8_UNORM);
+
+    // Create PSO to use the sampler
+    VkShaderObj fs(m_device, bindStateFragSamplerShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+    CreatePipelineHelper pipe(*this);
+    pipe.InitInfo();
+    pipe.shader_stages_ = {pipe.vs_->GetStageCreateInfo(), fs.GetStageCreateInfo()};
+    pipe.dsl_bindings_ = {
+        {0, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, 1, VK_SHADER_STAGE_ALL, nullptr},
+    };
+    const VkDynamicState dyn_states[] = {VK_DYNAMIC_STATE_VIEWPORT, VK_DYNAMIC_STATE_SCISSOR};
+    VkPipelineDynamicStateCreateInfo dyn_state_ci = {};
+    dyn_state_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO;
+    dyn_state_ci.dynamicStateCount = size(dyn_states);
+    dyn_state_ci.pDynamicStates = dyn_states;
+    pipe.dyn_state_ci_ = dyn_state_ci;
+    pipe.InitState();
+    pipe.CreateGraphicsPipeline();
+
+    pipe.descriptor_set_->WriteDescriptorImageInfo(0, view, sampler, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER);
+    pipe.descriptor_set_->UpdateDescriptorSets();
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkDestroyImageView-imageView-01026");
+
+    m_commandBuffer->begin();
+    m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
+    // Bind pipeline to cmd buffer
+    vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.pipeline_);
+    vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.pipeline_layout_.handle(), 0, 1,
+                              &pipe.descriptor_set_->set_, 0, nullptr);
+
+    VkViewport viewport = {0, 0, 16, 16, 0, 1};
+    VkRect2D scissor = {{0, 0}, {16, 16}};
+    vk::CmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
+    vk::CmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
+
+    m_commandBuffer->Draw(1, 0, 0, 0);
+    m_commandBuffer->EndRenderPass();
+    m_commandBuffer->end();
+    // Submit cmd buffer then destroy sampler
+    VkSubmitInfo submit_info = {};
+    submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+    submit_info.commandBufferCount = 1;
+    submit_info.pCommandBuffers = &m_commandBuffer->handle();
+    // Submit cmd buffer and then destroy imageView while in-flight
+    vk::QueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+
+    vk::DestroyImageView(m_device->device(), view, nullptr);
+    m_errorMonitor->VerifyFound();
+    vk::QueueWaitIdle(m_device->m_queue);
+    // Now we can actually destroy imageView
+    m_errorMonitor->SetUnexpectedError("If imageView is not VK_NULL_HANDLE, imageView must be a valid VkImageView handle");
+    m_errorMonitor->SetUnexpectedError("Unable to remove ImageView obj");
+    vk::DestroySampler(m_device->device(), sampler, nullptr);
+}
+
+TEST_F(VkLayerTest, BufferViewInUseDestroyedSignaled) {
+    TEST_DESCRIPTION("Delete in-use bufferView.");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    uint32_t queue_family_index = 0;
+    VkBufferCreateInfo buffer_create_info = {};
+    buffer_create_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
+    buffer_create_info.size = 1024;
+    buffer_create_info.usage = VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT;
+    buffer_create_info.queueFamilyIndexCount = 1;
+    buffer_create_info.pQueueFamilyIndices = &queue_family_index;
+    VkBufferObj buffer;
+    buffer.init(*m_device, buffer_create_info);
+
+    VkBufferView view;
+    VkBufferViewCreateInfo bvci = {};
+    bvci.sType = VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO;
+    bvci.buffer = buffer.handle();
+    bvci.format = VK_FORMAT_R32_SFLOAT;
+    bvci.range = VK_WHOLE_SIZE;
+
+    VkResult err = vk::CreateBufferView(m_device->device(), &bvci, NULL, &view);
+    ASSERT_VK_SUCCESS(err);
+
+    char const *fsSource =
+        "#version 450\n"
+        "\n"
+        "layout(set=0, binding=0, r32f) uniform readonly imageBuffer s;\n"
+        "layout(location=0) out vec4 x;\n"
+        "void main(){\n"
+        "   x = imageLoad(s, 0);\n"
+        "}\n";
+    VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+    CreatePipelineHelper pipe(*this);
+    pipe.InitInfo();
+    pipe.shader_stages_ = {pipe.vs_->GetStageCreateInfo(), fs.GetStageCreateInfo()};
+    pipe.dsl_bindings_ = {
+        {0, VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
+    };
+    const VkDynamicState dyn_states[] = {VK_DYNAMIC_STATE_VIEWPORT, VK_DYNAMIC_STATE_SCISSOR};
+    VkPipelineDynamicStateCreateInfo dyn_state_ci = {};
+    dyn_state_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO;
+    dyn_state_ci.dynamicStateCount = size(dyn_states);
+    dyn_state_ci.pDynamicStates = dyn_states;
+    pipe.dyn_state_ci_ = dyn_state_ci;
+    pipe.InitState();
+    pipe.CreateGraphicsPipeline();
+
+    pipe.descriptor_set_->WriteDescriptorBufferView(0, view, VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER);
+    pipe.descriptor_set_->UpdateDescriptorSets();
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkDestroyBufferView-bufferView-00936");
+
+    m_commandBuffer->begin();
+    m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
+    VkViewport viewport = {0, 0, 16, 16, 0, 1};
+    vk::CmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
+    VkRect2D scissor = {{0, 0}, {16, 16}};
+    vk::CmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
+    // Bind pipeline to cmd buffer
+    vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.pipeline_);
+    vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.pipeline_layout_.handle(), 0, 1,
+                              &pipe.descriptor_set_->set_, 0, nullptr);
+    m_commandBuffer->Draw(1, 0, 0, 0);
+    m_commandBuffer->EndRenderPass();
+    m_commandBuffer->end();
+
+    VkSubmitInfo submit_info = {};
+    submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+    submit_info.commandBufferCount = 1;
+    submit_info.pCommandBuffers = &m_commandBuffer->handle();
+    // Submit cmd buffer and then destroy bufferView while in-flight
+    vk::QueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+
+    vk::DestroyBufferView(m_device->device(), view, nullptr);
+    m_errorMonitor->VerifyFound();
+    vk::QueueWaitIdle(m_device->m_queue);
+    // Now we can actually destroy bufferView
+    m_errorMonitor->SetUnexpectedError("If bufferView is not VK_NULL_HANDLE, bufferView must be a valid VkBufferView handle");
+    m_errorMonitor->SetUnexpectedError("Unable to remove BufferView obj");
+    vk::DestroyBufferView(m_device->device(), view, NULL);
+}
+
+TEST_F(VkLayerTest, SamplerInUseDestroyedSignaled) {
+    TEST_DESCRIPTION("Delete in-use sampler.");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    VkSamplerCreateInfo sampler_ci = SafeSaneSamplerCreateInfo();
+    VkSampler sampler;
+
+    VkResult err;
+    err = vk::CreateSampler(m_device->device(), &sampler_ci, NULL, &sampler);
+    ASSERT_VK_SUCCESS(err);
+
+    VkImageObj image(m_device);
+    image.Init(128, 128, 1, VK_FORMAT_R8G8B8A8_UNORM, VK_IMAGE_USAGE_SAMPLED_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
+    ASSERT_TRUE(image.initialized());
+
+    VkImageView view = image.targetView(VK_FORMAT_R8G8B8A8_UNORM);
+
+    // Create PSO to use the sampler
+    VkShaderObj fs(m_device, bindStateFragSamplerShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+    CreatePipelineHelper pipe(*this);
+    pipe.InitInfo();
+    pipe.shader_stages_ = {pipe.vs_->GetStageCreateInfo(), fs.GetStageCreateInfo()};
+    pipe.dsl_bindings_ = {
+        {0, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, 1, VK_SHADER_STAGE_ALL, nullptr},
+    };
+    const VkDynamicState dyn_states[] = {VK_DYNAMIC_STATE_VIEWPORT, VK_DYNAMIC_STATE_SCISSOR};
+    VkPipelineDynamicStateCreateInfo dyn_state_ci = {};
+    dyn_state_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO;
+    dyn_state_ci.dynamicStateCount = size(dyn_states);
+    dyn_state_ci.pDynamicStates = dyn_states;
+    pipe.dyn_state_ci_ = dyn_state_ci;
+    pipe.InitState();
+    pipe.CreateGraphicsPipeline();
+
+    pipe.descriptor_set_->WriteDescriptorImageInfo(0, view, sampler, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER);
+    pipe.descriptor_set_->UpdateDescriptorSets();
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkDestroySampler-sampler-01082");
+
+    m_commandBuffer->begin();
+    m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
+    // Bind pipeline to cmd buffer
+    vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.pipeline_);
+    vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.pipeline_layout_.handle(), 0, 1,
+                              &pipe.descriptor_set_->set_, 0, nullptr);
+
+    VkViewport viewport = {0, 0, 16, 16, 0, 1};
+    VkRect2D scissor = {{0, 0}, {16, 16}};
+    vk::CmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
+    vk::CmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
+
+    m_commandBuffer->Draw(1, 0, 0, 0);
+    m_commandBuffer->EndRenderPass();
+    m_commandBuffer->end();
+    // Submit cmd buffer then destroy sampler
+    VkSubmitInfo submit_info = {};
+    submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+    submit_info.commandBufferCount = 1;
+    submit_info.pCommandBuffers = &m_commandBuffer->handle();
+    // Submit cmd buffer and then destroy sampler while in-flight
+    vk::QueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+
+    vk::DestroySampler(m_device->device(), sampler, nullptr);  // Destroyed too soon
+    m_errorMonitor->VerifyFound();
+    vk::QueueWaitIdle(m_device->m_queue);
+
+    // Now we can actually destroy sampler
+    m_errorMonitor->SetUnexpectedError("If sampler is not VK_NULL_HANDLE, sampler must be a valid VkSampler handle");
+    m_errorMonitor->SetUnexpectedError("Unable to remove Sampler obj");
+    vk::DestroySampler(m_device->device(), sampler, NULL);  // Destroyed for real
+}
+
+TEST_F(VkLayerTest, QueueForwardProgressFenceWait) {
+    TEST_DESCRIPTION("Call VkQueueSubmit with a semaphore that is already signaled but not waited on by the queue.");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    const char *queue_forward_progress_message = "UNASSIGNED-CoreValidation-DrawState-QueueForwardProgress";
+
+    VkCommandBufferObj cb1(m_device, m_commandPool);
+    cb1.begin();
+    cb1.end();
+
+    VkSemaphoreCreateInfo semaphore_create_info = {};
+    semaphore_create_info.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO;
+    VkSemaphore semaphore;
+    ASSERT_VK_SUCCESS(vk::CreateSemaphore(m_device->device(), &semaphore_create_info, nullptr, &semaphore));
+    VkSubmitInfo submit_info = {};
+    submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+    submit_info.commandBufferCount = 1;
+    submit_info.pCommandBuffers = &cb1.handle();
+    submit_info.signalSemaphoreCount = 1;
+    submit_info.pSignalSemaphores = &semaphore;
+    vk::QueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+
+    m_commandBuffer->begin();
+    m_commandBuffer->end();
+    submit_info.pCommandBuffers = &m_commandBuffer->handle();
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, queue_forward_progress_message);
+    vk::QueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+    m_errorMonitor->VerifyFound();
+
+    vk::DeviceWaitIdle(m_device->device());
+    vk::DestroySemaphore(m_device->device(), semaphore, nullptr);
+}
+
+#if GTEST_IS_THREADSAFE
+TEST_F(VkLayerTest, ThreadCommandBufferCollision) {
+    test_platform_thread thread;
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "THREADING ERROR");
+    m_errorMonitor->SetAllowedFailureMsg("THREADING ERROR");  // Ignore any extra threading errors found beyond the first one
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitViewport());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    // Calls AllocateCommandBuffers
+    VkCommandBufferObj commandBuffer(m_device, m_commandPool);
+
+    commandBuffer.begin();
+
+    VkEventCreateInfo event_info;
+    VkEvent event;
+    VkResult err;
+
+    memset(&event_info, 0, sizeof(event_info));
+    event_info.sType = VK_STRUCTURE_TYPE_EVENT_CREATE_INFO;
+
+    err = vk::CreateEvent(device(), &event_info, NULL, &event);
+    ASSERT_VK_SUCCESS(err);
+
+    err = vk::ResetEvent(device(), event);
+    ASSERT_VK_SUCCESS(err);
+
+    struct thread_data_struct data;
+    data.commandBuffer = commandBuffer.handle();
+    data.event = event;
+    bool bailout = false;
+    data.bailout = &bailout;
+    m_errorMonitor->SetBailout(data.bailout);
+
+    // First do some correct operations using multiple threads.
+    // Add many entries to command buffer from another thread.
+    test_platform_thread_create(&thread, AddToCommandBuffer, (void *)&data);
+    // Make non-conflicting calls from this thread at the same time.
+    for (int i = 0; i < 80000; i++) {
+        uint32_t count;
+        vk::EnumeratePhysicalDevices(instance(), &count, NULL);
+    }
+    test_platform_thread_join(thread, NULL);
+
+    // Then do some incorrect operations using multiple threads.
+    // Add many entries to command buffer from another thread.
+    test_platform_thread_create(&thread, AddToCommandBuffer, (void *)&data);
+    // Add many entries to command buffer from this thread at the same time.
+    AddToCommandBuffer(&data);
+
+    test_platform_thread_join(thread, NULL);
+    commandBuffer.end();
+
+    m_errorMonitor->SetBailout(NULL);
+
+    m_errorMonitor->VerifyFound();
+
+    vk::DestroyEvent(device(), event, NULL);
+}
+
+TEST_F(VkLayerTest, ThreadUpdateDescriptorCollision) {
+    TEST_DESCRIPTION("Two threads updating the same descriptor set, expected to generate a threading error");
+    test_platform_thread thread;
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "THREADING ERROR");
+    m_errorMonitor->SetAllowedFailureMsg("THREADING ERROR");  // Ignore any extra threading errors found beyond the first one
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitViewport());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    OneOffDescriptorSet normal_descriptor_set(m_device,
+                                              {
+                                                  {0, VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, 1, VK_SHADER_STAGE_COMPUTE_BIT, nullptr},
+                                                  {1, VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, 1, VK_SHADER_STAGE_COMPUTE_BIT, nullptr},
+                                              },
+                                              0);
+
+    VkBufferObj buffer;
+    buffer.init(*m_device, 256, VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT, VK_BUFFER_USAGE_STORAGE_BUFFER_BIT);
+
+    struct thread_data_struct data;
+    data.device = device();
+    data.descriptorSet = normal_descriptor_set.set_;
+    data.binding = 0;
+    data.buffer = buffer.handle();
+    bool bailout = false;
+    data.bailout = &bailout;
+    m_errorMonitor->SetBailout(data.bailout);
+
+    // Update descriptors from another thread.
+    test_platform_thread_create(&thread, UpdateDescriptor, (void *)&data);
+    // Update descriptors from this thread at the same time.
+
+    struct thread_data_struct data2;
+    data2.device = device();
+    data2.descriptorSet = normal_descriptor_set.set_;
+    data2.binding = 1;
+    data2.buffer = buffer.handle();
+    data2.bailout = &bailout;
+
+    UpdateDescriptor(&data2);
+
+    test_platform_thread_join(thread, NULL);
+
+    m_errorMonitor->SetBailout(NULL);
+
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, ThreadUpdateDescriptorUpdateAfterBindNoCollision) {
+    TEST_DESCRIPTION("Two threads updating the same UAB descriptor set, expected not to generate a threading error");
+    test_platform_thread thread;
+    m_errorMonitor->ExpectSuccess();
+
+    if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+        m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    } else {
+        printf("%s %s Extension not supported, skipping tests\n", kSkipPrefix,
+               VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+        return;
+    }
+
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+    if (DeviceExtensionSupported(gpu(), nullptr, VK_EXT_DESCRIPTOR_INDEXING_EXTENSION_NAME) &&
+        DeviceExtensionSupported(gpu(), nullptr, VK_KHR_MAINTENANCE3_EXTENSION_NAME)) {
+        m_device_extension_names.push_back(VK_EXT_DESCRIPTOR_INDEXING_EXTENSION_NAME);
+        m_device_extension_names.push_back(VK_KHR_MAINTENANCE3_EXTENSION_NAME);
+    } else {
+        printf("%s Descriptor Indexing or Maintenance3 Extension not supported, skipping tests\n", kSkipPrefix);
+        return;
+    }
+
+    PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR =
+        (PFN_vkGetPhysicalDeviceFeatures2KHR)vk::GetInstanceProcAddr(instance(), "vkGetPhysicalDeviceFeatures2KHR");
+    ASSERT_TRUE(vkGetPhysicalDeviceFeatures2KHR != nullptr);
+
+    // Create a device that enables descriptorBindingStorageBufferUpdateAfterBind
+    auto indexing_features = lvl_init_struct<VkPhysicalDeviceDescriptorIndexingFeaturesEXT>();
+    auto features2 = lvl_init_struct<VkPhysicalDeviceFeatures2KHR>(&indexing_features);
+    vkGetPhysicalDeviceFeatures2KHR(gpu(), &features2);
+
+    if (VK_FALSE == indexing_features.descriptorBindingStorageBufferUpdateAfterBind) {
+        printf("%s Test requires (unsupported) descriptorBindingStorageBufferUpdateAfterBind, skipping\n", kSkipPrefix);
+        return;
+    }
+
+    ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &features2, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
+    ASSERT_NO_FATAL_FAILURE(InitViewport());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    std::array<VkDescriptorBindingFlagsEXT, 2> flags = {VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT,
+                                                        VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT};
+    auto flags_create_info = lvl_init_struct<VkDescriptorSetLayoutBindingFlagsCreateInfoEXT>();
+    flags_create_info.bindingCount = (uint32_t)flags.size();
+    flags_create_info.pBindingFlags = flags.data();
+
+    OneOffDescriptorSet normal_descriptor_set(m_device,
+                                              {
+                                                  {0, VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, 1, VK_SHADER_STAGE_COMPUTE_BIT, nullptr},
+                                                  {1, VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, 1, VK_SHADER_STAGE_COMPUTE_BIT, nullptr},
+                                              },
+                                              VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT, &flags_create_info,
+                                              VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT_EXT);
+
+    VkBufferObj buffer;
+    buffer.init(*m_device, 256, VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT, VK_BUFFER_USAGE_STORAGE_BUFFER_BIT);
+
+    struct thread_data_struct data;
+    data.device = device();
+    data.descriptorSet = normal_descriptor_set.set_;
+    data.binding = 0;
+    data.buffer = buffer.handle();
+    bool bailout = false;
+    data.bailout = &bailout;
+    m_errorMonitor->SetBailout(data.bailout);
+
+    // Update descriptors from another thread.
+    test_platform_thread_create(&thread, UpdateDescriptor, (void *)&data);
+    // Update descriptors from this thread at the same time.
+
+    struct thread_data_struct data2;
+    data2.device = device();
+    data2.descriptorSet = normal_descriptor_set.set_;
+    data2.binding = 1;
+    data2.buffer = buffer.handle();
+    data2.bailout = &bailout;
+
+    UpdateDescriptor(&data2);
+
+    test_platform_thread_join(thread, NULL);
+
+    m_errorMonitor->SetBailout(NULL);
+
+    m_errorMonitor->VerifyNotFound();
+}
+#endif  // GTEST_IS_THREADSAFE
+
+TEST_F(VkLayerTest, ExecuteUnrecordedPrimaryCB) {
+    TEST_DESCRIPTION("Attempt vkQueueSubmit with a CB in the initial state");
+    ASSERT_NO_FATAL_FAILURE(Init());
+    // never record m_commandBuffer
+
+    VkSubmitInfo si = {};
+    si.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+    si.commandBufferCount = 1;
+    si.pCommandBuffers = &m_commandBuffer->handle();
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkQueueSubmit-pCommandBuffers-00072");
+    vk::QueueSubmit(m_device->m_queue, 1, &si, VK_NULL_HANDLE);
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, Maintenance1AndNegativeViewport) {
+    TEST_DESCRIPTION("Attempt to enable AMD_negative_viewport_height and Maintenance1_KHR extension simultaneously");
+
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+    if (!((DeviceExtensionSupported(gpu(), nullptr, VK_KHR_MAINTENANCE1_EXTENSION_NAME)) &&
+          (DeviceExtensionSupported(gpu(), nullptr, VK_AMD_NEGATIVE_VIEWPORT_HEIGHT_EXTENSION_NAME)))) {
+        printf("%s Maintenance1 and AMD_negative viewport height extensions not supported, skipping test\n", kSkipPrefix);
+        return;
+    }
+    ASSERT_NO_FATAL_FAILURE(InitState());
+
+    vk_testing::QueueCreateInfoArray queue_info(m_device->queue_props);
+    const char *extension_names[2] = {"VK_KHR_maintenance1", "VK_AMD_negative_viewport_height"};
+    VkDevice testDevice;
+    VkDeviceCreateInfo device_create_info = {};
+    auto features = m_device->phy().features();
+    device_create_info.sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO;
+    device_create_info.pNext = NULL;
+    device_create_info.queueCreateInfoCount = queue_info.size();
+    device_create_info.pQueueCreateInfos = queue_info.data();
+    device_create_info.enabledLayerCount = 0;
+    device_create_info.ppEnabledLayerNames = NULL;
+    device_create_info.enabledExtensionCount = 2;
+    device_create_info.ppEnabledExtensionNames = (const char *const *)extension_names;
+    device_create_info.pEnabledFeatures = &features;
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkDeviceCreateInfo-ppEnabledExtensionNames-00374");
+    // The following unexpected error is coming from the LunarG loader. Do not make it a desired message because platforms that do
+    // not use the LunarG loader (e.g. Android) will not see the message and the test will fail.
+    m_errorMonitor->SetUnexpectedError("Failed to create device chain.");
+    vk::CreateDevice(gpu(), &device_create_info, NULL, &testDevice);
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, InstanceDebugReportCallback) {
+    TEST_DESCRIPTION("Test that a pNext-installed debug callback will catch a CreateInstance-time error.");
+
+    // This instance extension requires that the VK_KHR_get_surface_capabilities2 also be enabled
+    if (!InstanceExtensionSupported(VK_KHR_SURFACE_PROTECTED_CAPABILITIES_EXTENSION_NAME)) {
+        printf("%s Did not find required instance extension %s; skipped.\n", kSkipPrefix,
+               VK_KHR_SURFACE_PROTECTED_CAPABILITIES_EXTENSION_NAME);
+        return;
+    }
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCreateInstance-ppEnabledExtensionNames-01388");
+    // Enable the instance extension, but none of the extensions it depends on
+    m_instance_extension_names.push_back(VK_KHR_SURFACE_PROTECTED_CAPABILITIES_EXTENSION_NAME);
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, HostQueryResetNotEnabled) {
+    TEST_DESCRIPTION("Use vkResetQueryPoolEXT without enabling the feature");
+
+    if (!InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+        printf("%s Did not find required instance extension %s; skipped.\n", kSkipPrefix,
+               VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+        return;
+    }
+
+    m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+
+    if (!DeviceExtensionSupported(gpu(), nullptr, VK_EXT_HOST_QUERY_RESET_EXTENSION_NAME)) {
+        printf("%s Extension %s not supported by device; skipped.\n", kSkipPrefix, VK_EXT_HOST_QUERY_RESET_EXTENSION_NAME);
+        return;
+    }
+
+    m_device_extension_names.push_back(VK_EXT_HOST_QUERY_RESET_EXTENSION_NAME);
+    ASSERT_NO_FATAL_FAILURE(InitState());
+
+    auto fpvkResetQueryPoolEXT = (PFN_vkResetQueryPoolEXT)vk::GetDeviceProcAddr(m_device->device(), "vkResetQueryPoolEXT");
+
+    VkQueryPool query_pool;
+    VkQueryPoolCreateInfo query_pool_create_info{};
+    query_pool_create_info.sType = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO;
+    query_pool_create_info.queryType = VK_QUERY_TYPE_TIMESTAMP;
+    query_pool_create_info.queryCount = 1;
+    vk::CreateQueryPool(m_device->device(), &query_pool_create_info, nullptr, &query_pool);
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkResetQueryPoolEXT-None-02665");
+    fpvkResetQueryPoolEXT(m_device->device(), query_pool, 0, 1);
+    m_errorMonitor->VerifyFound();
+
+    vk::DestroyQueryPool(m_device->device(), query_pool, nullptr);
+}
+
+TEST_F(VkLayerTest, HostQueryResetBadFirstQuery) {
+    TEST_DESCRIPTION("Bad firstQuery in vkResetQueryPoolEXT");
+
+    if (!InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+        printf("%s Did not find required instance extension %s; skipped.\n", kSkipPrefix,
+               VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+        return;
+    }
+
+    m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+
+    if (!DeviceExtensionSupported(gpu(), nullptr, VK_EXT_HOST_QUERY_RESET_EXTENSION_NAME)) {
+        printf("%s Extension %s not supported by device; skipped.\n", kSkipPrefix, VK_EXT_HOST_QUERY_RESET_EXTENSION_NAME);
+        return;
+    }
+
+    m_device_extension_names.push_back(VK_EXT_HOST_QUERY_RESET_EXTENSION_NAME);
+
+    VkPhysicalDeviceHostQueryResetFeaturesEXT host_query_reset_features{};
+    host_query_reset_features.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES_EXT;
+    host_query_reset_features.hostQueryReset = VK_TRUE;
+
+    VkPhysicalDeviceFeatures2 pd_features2{};
+    pd_features2.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2;
+    pd_features2.pNext = &host_query_reset_features;
+
+    ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &pd_features2));
+
+    auto fpvkResetQueryPoolEXT = (PFN_vkResetQueryPoolEXT)vk::GetDeviceProcAddr(m_device->device(), "vkResetQueryPoolEXT");
+
+    VkQueryPool query_pool;
+    VkQueryPoolCreateInfo query_pool_create_info{};
+    query_pool_create_info.sType = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO;
+    query_pool_create_info.queryType = VK_QUERY_TYPE_TIMESTAMP;
+    query_pool_create_info.queryCount = 1;
+    vk::CreateQueryPool(m_device->device(), &query_pool_create_info, nullptr, &query_pool);
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkResetQueryPoolEXT-firstQuery-02666");
+    fpvkResetQueryPoolEXT(m_device->device(), query_pool, 1, 0);
+    m_errorMonitor->VerifyFound();
+
+    vk::DestroyQueryPool(m_device->device(), query_pool, nullptr);
+}
+
+TEST_F(VkLayerTest, HostQueryResetBadRange) {
+    TEST_DESCRIPTION("Bad range in vkResetQueryPoolEXT");
+
+    if (!InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+        printf("%s Did not find required instance extension %s; skipped.\n", kSkipPrefix,
+               VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+        return;
+    }
+
+    m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+
+    if (!DeviceExtensionSupported(gpu(), nullptr, VK_EXT_HOST_QUERY_RESET_EXTENSION_NAME)) {
+        printf("%s Extension %s not supported by device; skipped.\n", kSkipPrefix, VK_EXT_HOST_QUERY_RESET_EXTENSION_NAME);
+        return;
+    }
+
+    m_device_extension_names.push_back(VK_EXT_HOST_QUERY_RESET_EXTENSION_NAME);
+
+    VkPhysicalDeviceHostQueryResetFeaturesEXT host_query_reset_features{};
+    host_query_reset_features.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES_EXT;
+    host_query_reset_features.hostQueryReset = VK_TRUE;
+
+    VkPhysicalDeviceFeatures2 pd_features2{};
+    pd_features2.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2;
+    pd_features2.pNext = &host_query_reset_features;
+
+    ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &pd_features2));
+
+    auto fpvkResetQueryPoolEXT = (PFN_vkResetQueryPoolEXT)vk::GetDeviceProcAddr(m_device->device(), "vkResetQueryPoolEXT");
+
+    VkQueryPool query_pool;
+    VkQueryPoolCreateInfo query_pool_create_info{};
+    query_pool_create_info.sType = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO;
+    query_pool_create_info.queryType = VK_QUERY_TYPE_TIMESTAMP;
+    query_pool_create_info.queryCount = 1;
+    vk::CreateQueryPool(m_device->device(), &query_pool_create_info, nullptr, &query_pool);
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkResetQueryPoolEXT-firstQuery-02667");
+    fpvkResetQueryPoolEXT(m_device->device(), query_pool, 0, 2);
+    m_errorMonitor->VerifyFound();
+
+    vk::DestroyQueryPool(m_device->device(), query_pool, nullptr);
+}
+
+TEST_F(VkLayerTest, HostQueryResetInvalidQueryPool) {
+    TEST_DESCRIPTION("Invalid queryPool in vkResetQueryPoolEXT");
+
+    if (!InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+        printf("%s Did not find required instance extension %s; skipped.\n", kSkipPrefix,
+               VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+        return;
+    }
+
+    m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+
+    if (!DeviceExtensionSupported(gpu(), nullptr, VK_EXT_HOST_QUERY_RESET_EXTENSION_NAME)) {
+        printf("%s Extension %s not supported by device; skipped.\n", kSkipPrefix, VK_EXT_HOST_QUERY_RESET_EXTENSION_NAME);
+        return;
+    }
+
+    m_device_extension_names.push_back(VK_EXT_HOST_QUERY_RESET_EXTENSION_NAME);
+
+    VkPhysicalDeviceHostQueryResetFeaturesEXT host_query_reset_features{};
+    host_query_reset_features.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES_EXT;
+    host_query_reset_features.hostQueryReset = VK_TRUE;
+
+    VkPhysicalDeviceFeatures2 pd_features2{};
+    pd_features2.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2;
+    pd_features2.pNext = &host_query_reset_features;
+
+    ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &pd_features2));
+
+    auto fpvkResetQueryPoolEXT = (PFN_vkResetQueryPoolEXT)vk::GetDeviceProcAddr(m_device->device(), "vkResetQueryPoolEXT");
+
+    // Create and destroy a query pool.
+    VkQueryPool query_pool;
+    VkQueryPoolCreateInfo query_pool_create_info{};
+    query_pool_create_info.sType = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO;
+    query_pool_create_info.queryType = VK_QUERY_TYPE_TIMESTAMP;
+    query_pool_create_info.queryCount = 1;
+    vk::CreateQueryPool(m_device->device(), &query_pool_create_info, nullptr, &query_pool);
+    vk::DestroyQueryPool(m_device->device(), query_pool, nullptr);
+
+    // Attempt to reuse the query pool handle.
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkResetQueryPoolEXT-queryPool-parameter");
+    fpvkResetQueryPoolEXT(m_device->device(), query_pool, 0, 1);
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, HostQueryResetWrongDevice) {
+    TEST_DESCRIPTION("Device not matching queryPool in vkResetQueryPoolEXT");
+
+    if (!InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+        printf("%s Did not find required instance extension %s; skipped.\n", kSkipPrefix,
+               VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+        return;
+    }
+
+    m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+
+    if (!DeviceExtensionSupported(gpu(), nullptr, VK_EXT_HOST_QUERY_RESET_EXTENSION_NAME)) {
+        printf("%s Extension %s not supported by device; skipped.\n", kSkipPrefix, VK_EXT_HOST_QUERY_RESET_EXTENSION_NAME);
+        return;
+    }
+
+    m_device_extension_names.push_back(VK_EXT_HOST_QUERY_RESET_EXTENSION_NAME);
+
+    VkPhysicalDeviceHostQueryResetFeaturesEXT host_query_reset_features{};
+    host_query_reset_features.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES_EXT;
+    host_query_reset_features.hostQueryReset = VK_TRUE;
+
+    VkPhysicalDeviceFeatures2 pd_features2{};
+    pd_features2.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2;
+    pd_features2.pNext = &host_query_reset_features;
+
+    ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &pd_features2));
+
+    auto fpvkResetQueryPoolEXT = (PFN_vkResetQueryPoolEXT)vk::GetDeviceProcAddr(m_device->device(), "vkResetQueryPoolEXT");
+
+    VkQueryPool query_pool;
+    VkQueryPoolCreateInfo query_pool_create_info{};
+    query_pool_create_info.sType = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO;
+    query_pool_create_info.queryType = VK_QUERY_TYPE_TIMESTAMP;
+    query_pool_create_info.queryCount = 1;
+    vk::CreateQueryPool(m_device->device(), &query_pool_create_info, nullptr, &query_pool);
+
+    // Create a second device with the feature enabled.
+    vk_testing::QueueCreateInfoArray queue_info(m_device->queue_props);
+    auto features = m_device->phy().features();
+
+    VkDeviceCreateInfo device_create_info = {};
+    device_create_info.sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO;
+    device_create_info.pNext = &host_query_reset_features;
+    device_create_info.queueCreateInfoCount = queue_info.size();
+    device_create_info.pQueueCreateInfos = queue_info.data();
+    device_create_info.pEnabledFeatures = &features;
+    device_create_info.enabledExtensionCount = m_device_extension_names.size();
+    device_create_info.ppEnabledExtensionNames = m_device_extension_names.data();
+
+    VkDevice second_device;
+    ASSERT_VK_SUCCESS(vk::CreateDevice(gpu(), &device_create_info, nullptr, &second_device));
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkResetQueryPoolEXT-queryPool-parent");
+    // Run vk::ResetQueryPoolExt on the wrong device.
+    fpvkResetQueryPoolEXT(second_device, query_pool, 0, 1);
+    m_errorMonitor->VerifyFound();
+
+    vk::DestroyQueryPool(m_device->device(), query_pool, nullptr);
+    vk::DestroyDevice(second_device, nullptr);
+}
+
+TEST_F(VkLayerTest, ResetEventThenSet) {
+    TEST_DESCRIPTION("Reset an event then set it after the reset has been submitted.");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    VkEvent event;
+    VkEventCreateInfo event_create_info{};
+    event_create_info.sType = VK_STRUCTURE_TYPE_EVENT_CREATE_INFO;
+    vk::CreateEvent(m_device->device(), &event_create_info, nullptr, &event);
+
+    VkCommandPool command_pool;
+    VkCommandPoolCreateInfo pool_create_info{};
+    pool_create_info.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
+    pool_create_info.queueFamilyIndex = m_device->graphics_queue_node_index_;
+    pool_create_info.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
+    vk::CreateCommandPool(m_device->device(), &pool_create_info, nullptr, &command_pool);
+
+    VkCommandBuffer command_buffer;
+    VkCommandBufferAllocateInfo command_buffer_allocate_info{};
+    command_buffer_allocate_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
+    command_buffer_allocate_info.commandPool = command_pool;
+    command_buffer_allocate_info.commandBufferCount = 1;
+    command_buffer_allocate_info.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
+    vk::AllocateCommandBuffers(m_device->device(), &command_buffer_allocate_info, &command_buffer);
+
+    VkQueue queue = VK_NULL_HANDLE;
+    vk::GetDeviceQueue(m_device->device(), m_device->graphics_queue_node_index_, 0, &queue);
+
+    {
+        VkCommandBufferBeginInfo begin_info{};
+        begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
+        vk::BeginCommandBuffer(command_buffer, &begin_info);
+
+        vk::CmdResetEvent(command_buffer, event, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT);
+        vk::EndCommandBuffer(command_buffer);
+    }
+    {
+        VkSubmitInfo submit_info{};
+        submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+        submit_info.commandBufferCount = 1;
+        submit_info.pCommandBuffers = &command_buffer;
+        submit_info.signalSemaphoreCount = 0;
+        submit_info.pSignalSemaphores = nullptr;
+        vk::QueueSubmit(queue, 1, &submit_info, VK_NULL_HANDLE);
+    }
+    {
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "that is already in use by a command buffer.");
+        vk::SetEvent(m_device->device(), event);
+        m_errorMonitor->VerifyFound();
+    }
+
+    vk::QueueWaitIdle(queue);
+
+    vk::DestroyEvent(m_device->device(), event, nullptr);
+    vk::FreeCommandBuffers(m_device->device(), command_pool, 1, &command_buffer);
+    vk::DestroyCommandPool(m_device->device(), command_pool, NULL);
+}
+
+TEST_F(VkLayerTest, ShadingRateImageNV) {
+    TEST_DESCRIPTION("Test VK_NV_shading_rate_image.");
+
+    if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+        m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    } else {
+        printf("%s Did not find required instance extension %s; skipped.\n", kSkipPrefix,
+               VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+        return;
+    }
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+    std::array<const char *, 1> required_device_extensions = {{VK_NV_SHADING_RATE_IMAGE_EXTENSION_NAME}};
+    for (auto device_extension : required_device_extensions) {
+        if (DeviceExtensionSupported(gpu(), nullptr, device_extension)) {
+            m_device_extension_names.push_back(device_extension);
+        } else {
+            printf("%s %s Extension not supported, skipping tests\n", kSkipPrefix, device_extension);
+            return;
+        }
+    }
+
+    if (DeviceIsMockICD() || DeviceSimulation()) {
+        printf("%s Test not supported by MockICD, skipping tests\n", kSkipPrefix);
+        return;
+    }
+
+    PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR =
+        (PFN_vkGetPhysicalDeviceFeatures2KHR)vk::GetInstanceProcAddr(instance(), "vkGetPhysicalDeviceFeatures2KHR");
+    ASSERT_TRUE(vkGetPhysicalDeviceFeatures2KHR != nullptr);
+
+    // Create a device that enables shading_rate_image but disables multiViewport
+    auto shading_rate_image_features = lvl_init_struct<VkPhysicalDeviceShadingRateImageFeaturesNV>();
+    auto features2 = lvl_init_struct<VkPhysicalDeviceFeatures2KHR>(&shading_rate_image_features);
+    vkGetPhysicalDeviceFeatures2KHR(gpu(), &features2);
+
+    features2.features.multiViewport = VK_FALSE;
+
+    ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &features2));
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    // Test shading rate image creation
+    VkResult result = VK_RESULT_MAX_ENUM;
+    VkImageCreateInfo image_create_info = {};
+    image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+    image_create_info.pNext = NULL;
+    image_create_info.imageType = VK_IMAGE_TYPE_2D;
+    image_create_info.format = VK_FORMAT_R8_UINT;
+    image_create_info.extent.width = 4;
+    image_create_info.extent.height = 4;
+    image_create_info.extent.depth = 1;
+    image_create_info.mipLevels = 1;
+    image_create_info.arrayLayers = 1;
+    image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
+    image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
+    image_create_info.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+    image_create_info.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV;
+    image_create_info.queueFamilyIndexCount = 0;
+    image_create_info.pQueueFamilyIndices = NULL;
+    image_create_info.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
+    image_create_info.flags = VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT;
+
+    // image type must be 2D
+    image_create_info.imageType = VK_IMAGE_TYPE_3D;
+    CreateImageTest(*this, &image_create_info, "VUID-VkImageCreateInfo-imageType-02082");
+
+    image_create_info.imageType = VK_IMAGE_TYPE_2D;
+
+    // must be single sample
+    image_create_info.samples = VK_SAMPLE_COUNT_2_BIT;
+    CreateImageTest(*this, &image_create_info, "VUID-VkImageCreateInfo-samples-02083");
+
+    image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
+
+    // tiling must be optimal
+    image_create_info.tiling = VK_IMAGE_TILING_LINEAR;
+    CreateImageTest(*this, &image_create_info, "VUID-VkImageCreateInfo-tiling-02084");
+
+    image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
+
+    // Should succeed.
+    VkImageObj image(m_device);
+    image.init(&image_create_info);
+
+    // Test image view creation
+    VkImageView view;
+    VkImageViewCreateInfo ivci = {};
+    ivci.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
+    ivci.image = image.handle();
+    ivci.viewType = VK_IMAGE_VIEW_TYPE_2D;
+    ivci.format = VK_FORMAT_R8_UINT;
+    ivci.subresourceRange.layerCount = 1;
+    ivci.subresourceRange.baseMipLevel = 0;
+    ivci.subresourceRange.levelCount = 1;
+    ivci.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+
+    // view type must be 2D or 2D_ARRAY
+    ivci.viewType = VK_IMAGE_VIEW_TYPE_CUBE;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageViewCreateInfo-image-02086");
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageViewCreateInfo-image-01003");
+    result = vk::CreateImageView(m_device->device(), &ivci, nullptr, &view);
+    m_errorMonitor->VerifyFound();
+    if (VK_SUCCESS == result) {
+        vk::DestroyImageView(m_device->device(), view, NULL);
+        view = VK_NULL_HANDLE;
+    }
+    ivci.viewType = VK_IMAGE_VIEW_TYPE_2D;
+
+    // format must be R8_UINT
+    ivci.format = VK_FORMAT_R8_UNORM;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageViewCreateInfo-image-02087");
+    result = vk::CreateImageView(m_device->device(), &ivci, nullptr, &view);
+    m_errorMonitor->VerifyFound();
+    if (VK_SUCCESS == result) {
+        vk::DestroyImageView(m_device->device(), view, NULL);
+        view = VK_NULL_HANDLE;
+    }
+    ivci.format = VK_FORMAT_R8_UINT;
+
+    vk::CreateImageView(m_device->device(), &ivci, nullptr, &view);
+    m_errorMonitor->VerifyNotFound();
+
+    // Test pipeline creation
+    VkPipelineViewportShadingRateImageStateCreateInfoNV vsrisci = {
+        VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SHADING_RATE_IMAGE_STATE_CREATE_INFO_NV};
+
+    VkViewport viewport = {0.0f, 0.0f, 64.0f, 64.0f, 0.0f, 1.0f};
+    VkViewport viewports[20] = {viewport, viewport};
+    VkRect2D scissor = {{0, 0}, {64, 64}};
+    VkRect2D scissors[20] = {scissor, scissor};
+    VkDynamicState dynPalette = VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV;
+    VkPipelineDynamicStateCreateInfo dyn = {VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO, nullptr, 0, 1, &dynPalette};
+
+    // viewportCount must be 0 or 1 when multiViewport is disabled
+    {
+        const auto break_vp = [&](CreatePipelineHelper &helper) {
+            helper.vp_state_ci_.viewportCount = 2;
+            helper.vp_state_ci_.pViewports = viewports;
+            helper.vp_state_ci_.scissorCount = 2;
+            helper.vp_state_ci_.pScissors = scissors;
+            helper.vp_state_ci_.pNext = &vsrisci;
+            helper.dyn_state_ci_ = dyn;
+
+            vsrisci.shadingRateImageEnable = VK_TRUE;
+            vsrisci.viewportCount = 2;
+        };
+        CreatePipelineHelper::OneshotTest(
+            *this, break_vp, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+            vector<std::string>({"VUID-VkPipelineViewportShadingRateImageStateCreateInfoNV-viewportCount-02054",
+                                 "VUID-VkPipelineViewportStateCreateInfo-viewportCount-01216",
+                                 "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01217"}));
+    }
+
+    // viewportCounts must match
+    {
+        const auto break_vp = [&](CreatePipelineHelper &helper) {
+            helper.vp_state_ci_.viewportCount = 1;
+            helper.vp_state_ci_.pViewports = viewports;
+            helper.vp_state_ci_.scissorCount = 1;
+            helper.vp_state_ci_.pScissors = scissors;
+            helper.vp_state_ci_.pNext = &vsrisci;
+            helper.dyn_state_ci_ = dyn;
+
+            vsrisci.shadingRateImageEnable = VK_TRUE;
+            vsrisci.viewportCount = 0;
+        };
+        CreatePipelineHelper::OneshotTest(
+            *this, break_vp, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+            vector<std::string>({"VUID-VkPipelineViewportShadingRateImageStateCreateInfoNV-shadingRateImageEnable-02056"}));
+    }
+
+    // pShadingRatePalettes must not be NULL.
+    {
+        const auto break_vp = [&](CreatePipelineHelper &helper) {
+            helper.vp_state_ci_.viewportCount = 1;
+            helper.vp_state_ci_.pViewports = viewports;
+            helper.vp_state_ci_.scissorCount = 1;
+            helper.vp_state_ci_.pScissors = scissors;
+            helper.vp_state_ci_.pNext = &vsrisci;
+
+            vsrisci.shadingRateImageEnable = VK_TRUE;
+            vsrisci.viewportCount = 1;
+        };
+        CreatePipelineHelper::OneshotTest(
+            *this, break_vp, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+            vector<std::string>({"VUID-VkPipelineViewportShadingRateImageStateCreateInfoNV-pDynamicStates-02057"}));
+    }
+
+    // Create an image without the SRI bit
+    VkImageObj nonSRIimage(m_device);
+    nonSRIimage.Init(256, 256, 1, VK_FORMAT_B8G8R8A8_UNORM, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
+    ASSERT_TRUE(nonSRIimage.initialized());
+    VkImageView nonSRIview = nonSRIimage.targetView(VK_FORMAT_B8G8R8A8_UNORM);
+
+    // Test SRI layout on non-SRI image
+    VkImageMemoryBarrier img_barrier = {};
+    img_barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
+    img_barrier.pNext = nullptr;
+    img_barrier.srcAccessMask = 0;
+    img_barrier.dstAccessMask = 0;
+    img_barrier.oldLayout = VK_IMAGE_LAYOUT_GENERAL;
+    img_barrier.newLayout = VK_IMAGE_LAYOUT_SHADING_RATE_OPTIMAL_NV;
+    img_barrier.image = nonSRIimage.handle();
+    img_barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
+    img_barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
+    img_barrier.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    img_barrier.subresourceRange.baseArrayLayer = 0;
+    img_barrier.subresourceRange.baseMipLevel = 0;
+    img_barrier.subresourceRange.layerCount = 1;
+    img_barrier.subresourceRange.levelCount = 1;
+
+    m_commandBuffer->begin();
+
+    // Error trying to convert it to SRI layout
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageMemoryBarrier-oldLayout-02088");
+    vk::CmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, 0, 0,
+                           nullptr, 0, nullptr, 1, &img_barrier);
+    m_errorMonitor->VerifyFound();
+
+    // succeed converting it to GENERAL
+    img_barrier.newLayout = VK_IMAGE_LAYOUT_GENERAL;
+    vk::CmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, 0, 0,
+                           nullptr, 0, nullptr, 1, &img_barrier);
+    m_errorMonitor->VerifyNotFound();
+
+    // Test vk::CmdBindShadingRateImageNV errors
+    auto vkCmdBindShadingRateImageNV =
+        (PFN_vkCmdBindShadingRateImageNV)vk::GetDeviceProcAddr(m_device->device(), "vkCmdBindShadingRateImageNV");
+
+    // if the view is non-NULL, it must be R8_UINT, USAGE_SRI, image layout must match, layout must be valid
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBindShadingRateImageNV-imageView-02060");
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBindShadingRateImageNV-imageView-02061");
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBindShadingRateImageNV-imageView-02062");
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBindShadingRateImageNV-imageLayout-02063");
+    vkCmdBindShadingRateImageNV(m_commandBuffer->handle(), nonSRIview, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL);
+    m_errorMonitor->VerifyFound();
+
+    // Test vk::CmdSetViewportShadingRatePaletteNV errors
+    auto vkCmdSetViewportShadingRatePaletteNV =
+        (PFN_vkCmdSetViewportShadingRatePaletteNV)vk::GetDeviceProcAddr(m_device->device(), "vkCmdSetViewportShadingRatePaletteNV");
+
+    VkShadingRatePaletteEntryNV paletteEntries[100] = {};
+    VkShadingRatePaletteNV palette = {100, paletteEntries};
+    VkShadingRatePaletteNV palettes[] = {palette, palette};
+
+    // errors on firstViewport/viewportCount
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "VUID-vkCmdSetViewportShadingRatePaletteNV-firstViewport-02066");
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "VUID-vkCmdSetViewportShadingRatePaletteNV-firstViewport-02067");
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "VUID-vkCmdSetViewportShadingRatePaletteNV-firstViewport-02068");
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "VUID-vkCmdSetViewportShadingRatePaletteNV-viewportCount-02069");
+    vkCmdSetViewportShadingRatePaletteNV(m_commandBuffer->handle(), 20, 2, palettes);
+    m_errorMonitor->VerifyFound();
+
+    // shadingRatePaletteEntryCount must be in range
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "VUID-VkShadingRatePaletteNV-shadingRatePaletteEntryCount-02071");
+    vkCmdSetViewportShadingRatePaletteNV(m_commandBuffer->handle(), 0, 1, palettes);
+    m_errorMonitor->VerifyFound();
+
+    VkCoarseSampleLocationNV locations[100] = {
+        {0, 0, 0},    {0, 0, 1}, {0, 1, 0}, {0, 1, 1}, {0, 1, 1},  // duplicate
+        {1000, 0, 0},                                              // pixelX too large
+        {0, 1000, 0},                                              // pixelY too large
+        {0, 0, 1000},                                              // sample too large
+    };
+
+    // Test custom sample orders, both via pipeline state and via dynamic state
+    {
+        VkCoarseSampleOrderCustomNV sampOrdBadShadingRate = {VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_PIXEL_NV, 1, 1,
+                                                             locations};
+        VkCoarseSampleOrderCustomNV sampOrdBadSampleCount = {VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_1X2_PIXELS_NV, 3, 1,
+                                                             locations};
+        VkCoarseSampleOrderCustomNV sampOrdBadSampleLocationCount = {VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_1X2_PIXELS_NV,
+                                                                     2, 2, locations};
+        VkCoarseSampleOrderCustomNV sampOrdDuplicateLocations = {VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_1X2_PIXELS_NV, 2,
+                                                                 1 * 2 * 2, &locations[1]};
+        VkCoarseSampleOrderCustomNV sampOrdOutOfRangeLocations = {VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_1X2_PIXELS_NV, 2,
+                                                                  1 * 2 * 2, &locations[4]};
+        VkCoarseSampleOrderCustomNV sampOrdTooLargeSampleLocationCount = {
+            VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_4X4_PIXELS_NV, 4, 64, &locations[8]};
+        VkCoarseSampleOrderCustomNV sampOrdGood = {VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_1X2_PIXELS_NV, 2, 1 * 2 * 2,
+                                                   &locations[0]};
+
+        VkPipelineViewportCoarseSampleOrderStateCreateInfoNV csosci = {
+            VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_COARSE_SAMPLE_ORDER_STATE_CREATE_INFO_NV};
+        csosci.sampleOrderType = VK_COARSE_SAMPLE_ORDER_TYPE_CUSTOM_NV;
+        csosci.customSampleOrderCount = 1;
+
+        using std::vector;
+        struct TestCase {
+            const VkCoarseSampleOrderCustomNV *order;
+            vector<std::string> vuids;
+        };
+
+        vector<TestCase> test_cases = {
+            {&sampOrdBadShadingRate, {"VUID-VkCoarseSampleOrderCustomNV-shadingRate-02073"}},
+            {&sampOrdBadSampleCount,
+             {"VUID-VkCoarseSampleOrderCustomNV-sampleCount-02074", "VUID-VkCoarseSampleOrderCustomNV-sampleLocationCount-02075"}},
+            {&sampOrdBadSampleLocationCount, {"VUID-VkCoarseSampleOrderCustomNV-sampleLocationCount-02075"}},
+            {&sampOrdDuplicateLocations, {"VUID-VkCoarseSampleOrderCustomNV-pSampleLocations-02077"}},
+            {&sampOrdOutOfRangeLocations,
+             {"VUID-VkCoarseSampleOrderCustomNV-pSampleLocations-02077", "VUID-VkCoarseSampleLocationNV-pixelX-02078",
+              "VUID-VkCoarseSampleLocationNV-pixelY-02079", "VUID-VkCoarseSampleLocationNV-sample-02080"}},
+            {&sampOrdTooLargeSampleLocationCount,
+             {"VUID-VkCoarseSampleOrderCustomNV-sampleLocationCount-02076",
+              "VUID-VkCoarseSampleOrderCustomNV-pSampleLocations-02077"}},
+            {&sampOrdGood, {}},
+        };
+
+        for (const auto &test_case : test_cases) {
+            const auto break_vp = [&](CreatePipelineHelper &helper) {
+                helper.vp_state_ci_.pNext = &csosci;
+                csosci.pCustomSampleOrders = test_case.order;
+            };
+            CreatePipelineHelper::OneshotTest(*this, break_vp, VK_DEBUG_REPORT_ERROR_BIT_EXT, test_case.vuids);
+        }
+
+        // Test vk::CmdSetCoarseSampleOrderNV errors
+        auto vkCmdSetCoarseSampleOrderNV =
+            (PFN_vkCmdSetCoarseSampleOrderNV)vk::GetDeviceProcAddr(m_device->device(), "vkCmdSetCoarseSampleOrderNV");
+
+        for (const auto &test_case : test_cases) {
+            for (uint32_t i = 0; i < test_case.vuids.size(); ++i) {
+                m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, test_case.vuids[i]);
+            }
+            vkCmdSetCoarseSampleOrderNV(m_commandBuffer->handle(), VK_COARSE_SAMPLE_ORDER_TYPE_CUSTOM_NV, 1, test_case.order);
+            if (test_case.vuids.size()) {
+                m_errorMonitor->VerifyFound();
+            } else {
+                m_errorMonitor->VerifyNotFound();
+            }
+        }
+
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                             "VUID-vkCmdSetCoarseSampleOrderNV-sampleOrderType-02081");
+        vkCmdSetCoarseSampleOrderNV(m_commandBuffer->handle(), VK_COARSE_SAMPLE_ORDER_TYPE_PIXEL_MAJOR_NV, 1, &sampOrdGood);
+        m_errorMonitor->VerifyFound();
+    }
+
+    m_commandBuffer->end();
+
+    vk::DestroyImageView(m_device->device(), view, NULL);
+}
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+#include "android_ndk_types.h"
+
+TEST_F(VkLayerTest, AndroidHardwareBufferImageCreate) {
+    TEST_DESCRIPTION("Verify AndroidHardwareBuffer image create info.");
+
+    SetTargetApiVersion(VK_API_VERSION_1_1);
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+
+    if ((DeviceExtensionSupported(gpu(), nullptr, VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME)) &&
+        // Also skip on devices that advertise AHB, but not the pre-requisite foreign_queue extension
+        (DeviceExtensionSupported(gpu(), nullptr, VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME))) {
+        m_device_extension_names.push_back(VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME);
+        m_device_extension_names.push_back(VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
+        m_device_extension_names.push_back(VK_KHR_MAINTENANCE1_EXTENSION_NAME);
+        m_device_extension_names.push_back(VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
+        m_device_extension_names.push_back(VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
+        m_device_extension_names.push_back(VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME);
+        m_device_extension_names.push_back(VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME);
+    } else {
+        printf("%s %s extension not supported, skipping tests\n", kSkipPrefix,
+               VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME);
+        return;
+    }
+
+    ASSERT_NO_FATAL_FAILURE(InitState());
+    VkDevice dev = m_device->device();
+
+    VkImage img = VK_NULL_HANDLE;
+    auto reset_img = [&img, dev]() {
+        if (VK_NULL_HANDLE != img) vk::DestroyImage(dev, img, NULL);
+        img = VK_NULL_HANDLE;
+    };
+
+    VkImageCreateInfo ici = {};
+    ici.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+    ici.pNext = nullptr;
+    ici.imageType = VK_IMAGE_TYPE_2D;
+    ici.arrayLayers = 1;
+    ici.extent = {64, 64, 1};
+    ici.format = VK_FORMAT_UNDEFINED;
+    ici.mipLevels = 1;
+    ici.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+    ici.samples = VK_SAMPLE_COUNT_1_BIT;
+    ici.tiling = VK_IMAGE_TILING_OPTIMAL;
+    ici.usage = VK_IMAGE_USAGE_SAMPLED_BIT;
+
+    // undefined format
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCreateInfo-pNext-01975");
+    m_errorMonitor->SetUnexpectedError("VUID_Undefined");
+    vk::CreateImage(dev, &ici, NULL, &img);
+    m_errorMonitor->VerifyFound();
+    reset_img();
+
+    // also undefined format
+    VkExternalFormatANDROID efa = {};
+    efa.sType = VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID;
+    efa.externalFormat = 0;
+    ici.pNext = &efa;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCreateInfo-pNext-01975");
+    vk::CreateImage(dev, &ici, NULL, &img);
+    m_errorMonitor->VerifyFound();
+    reset_img();
+
+    // undefined format with an unknown external format
+    efa.externalFormat = 0xBADC0DE;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkExternalFormatANDROID-externalFormat-01894");
+    vk::CreateImage(dev, &ici, NULL, &img);
+    m_errorMonitor->VerifyFound();
+    reset_img();
+
+    AHardwareBuffer *ahb;
+    AHardwareBuffer_Desc ahb_desc = {};
+    ahb_desc.format = AHARDWAREBUFFER_FORMAT_R8G8B8X8_UNORM;
+    ahb_desc.usage = AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE;
+    ahb_desc.width = 64;
+    ahb_desc.height = 64;
+    ahb_desc.layers = 1;
+    // Allocate an AHardwareBuffer
+    AHardwareBuffer_allocate(&ahb_desc, &ahb);
+
+    // Retrieve it's properties to make it's external format 'known' (AHARDWAREBUFFER_FORMAT_R8G8B8X8_UNORM)
+    VkAndroidHardwareBufferFormatPropertiesANDROID ahb_fmt_props = {};
+    ahb_fmt_props.sType = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID;
+    VkAndroidHardwareBufferPropertiesANDROID ahb_props = {};
+    ahb_props.sType = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID;
+    ahb_props.pNext = &ahb_fmt_props;
+    PFN_vkGetAndroidHardwareBufferPropertiesANDROID pfn_GetAHBProps =
+        (PFN_vkGetAndroidHardwareBufferPropertiesANDROID)vk::GetDeviceProcAddr(dev, "vkGetAndroidHardwareBufferPropertiesANDROID");
+    ASSERT_TRUE(pfn_GetAHBProps != nullptr);
+    pfn_GetAHBProps(dev, ahb, &ahb_props);
+
+    // a defined image format with a non-zero external format
+    ici.format = VK_FORMAT_R8G8B8A8_UNORM;
+    efa.externalFormat = ahb_fmt_props.externalFormat;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCreateInfo-pNext-01974");
+    vk::CreateImage(dev, &ici, NULL, &img);
+    m_errorMonitor->VerifyFound();
+    reset_img();
+    ici.format = VK_FORMAT_UNDEFINED;
+
+    // external format while MUTABLE
+    ici.flags = VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCreateInfo-pNext-02396");
+    vk::CreateImage(dev, &ici, NULL, &img);
+    m_errorMonitor->VerifyFound();
+    reset_img();
+    ici.flags = 0;
+
+    // external format while usage other than SAMPLED
+    ici.usage |= VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCreateInfo-pNext-02397");
+    vk::CreateImage(dev, &ici, NULL, &img);
+    m_errorMonitor->VerifyFound();
+    reset_img();
+    ici.usage = VK_IMAGE_USAGE_SAMPLED_BIT;
+
+    // external format while tiline other than OPTIMAL
+    ici.tiling = VK_IMAGE_TILING_LINEAR;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCreateInfo-pNext-02398");
+    vk::CreateImage(dev, &ici, NULL, &img);
+    m_errorMonitor->VerifyFound();
+    reset_img();
+    ici.tiling = VK_IMAGE_TILING_OPTIMAL;
+
+    // imageType
+    VkExternalMemoryImageCreateInfo emici = {};
+    emici.sType = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO;
+    emici.handleTypes = VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID;
+    ici.pNext = &emici;  // remove efa from chain, insert emici
+    ici.format = VK_FORMAT_R8G8B8A8_UNORM;
+    ici.imageType = VK_IMAGE_TYPE_3D;
+    ici.extent = {64, 64, 64};
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCreateInfo-pNext-02393");
+    vk::CreateImage(dev, &ici, NULL, &img);
+    m_errorMonitor->VerifyFound();
+    reset_img();
+
+    // wrong mipLevels
+    ici.imageType = VK_IMAGE_TYPE_2D;
+    ici.extent = {64, 64, 1};
+    ici.mipLevels = 6;  // should be 7
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageCreateInfo-pNext-02394");
+    vk::CreateImage(dev, &ici, NULL, &img);
+    m_errorMonitor->VerifyFound();
+    reset_img();
+}
+
+TEST_F(VkLayerTest, AndroidHardwareBufferFetchUnboundImageInfo) {
+    TEST_DESCRIPTION("Verify AndroidHardwareBuffer retreive image properties while memory unbound.");
+
+    SetTargetApiVersion(VK_API_VERSION_1_1);
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+
+    if ((DeviceExtensionSupported(gpu(), nullptr, VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME)) &&
+        // Also skip on devices that advertise AHB, but not the pre-requisite foreign_queue extension
+        (DeviceExtensionSupported(gpu(), nullptr, VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME))) {
+        m_device_extension_names.push_back(VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME);
+        m_device_extension_names.push_back(VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
+        m_device_extension_names.push_back(VK_KHR_MAINTENANCE1_EXTENSION_NAME);
+        m_device_extension_names.push_back(VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
+        m_device_extension_names.push_back(VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
+        m_device_extension_names.push_back(VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME);
+        m_device_extension_names.push_back(VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME);
+    } else {
+        printf("%s %s extension not supported, skipping tests\n", kSkipPrefix,
+               VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME);
+        return;
+    }
+
+    ASSERT_NO_FATAL_FAILURE(InitState());
+    VkDevice dev = m_device->device();
+
+    VkImage img = VK_NULL_HANDLE;
+    auto reset_img = [&img, dev]() {
+        if (VK_NULL_HANDLE != img) vk::DestroyImage(dev, img, NULL);
+        img = VK_NULL_HANDLE;
+    };
+
+    VkImageCreateInfo ici = {};
+    ici.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+    ici.pNext = nullptr;
+    ici.imageType = VK_IMAGE_TYPE_2D;
+    ici.arrayLayers = 1;
+    ici.extent = {64, 64, 1};
+    ici.format = VK_FORMAT_R8G8B8A8_UNORM;
+    ici.mipLevels = 1;
+    ici.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+    ici.samples = VK_SAMPLE_COUNT_1_BIT;
+    ici.tiling = VK_IMAGE_TILING_LINEAR;
+    ici.usage = VK_IMAGE_USAGE_SAMPLED_BIT;
+
+    VkExternalMemoryImageCreateInfo emici = {};
+    emici.sType = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO;
+    emici.handleTypes = VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID;
+    ici.pNext = &emici;
+
+    m_errorMonitor->ExpectSuccess();
+    vk::CreateImage(dev, &ici, NULL, &img);
+    m_errorMonitor->VerifyNotFound();
+
+    // attempt to fetch layout from unbound image
+    VkImageSubresource sub_rsrc = {};
+    sub_rsrc.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    VkSubresourceLayout sub_layout = {};
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkGetImageSubresourceLayout-image-01895");
+    vk::GetImageSubresourceLayout(dev, img, &sub_rsrc, &sub_layout);
+    m_errorMonitor->VerifyFound();
+
+    // attempt to get memory reqs from unbound image
+    VkImageMemoryRequirementsInfo2 imri = {};
+    imri.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2;
+    imri.image = img;
+    VkMemoryRequirements2 mem_reqs = {};
+    mem_reqs.sType = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageMemoryRequirementsInfo2-image-01897");
+    vk::GetImageMemoryRequirements2(dev, &imri, &mem_reqs);
+    m_errorMonitor->VerifyFound();
+
+    reset_img();
+}
+
+TEST_F(VkLayerTest, AndroidHardwareBufferMemoryAllocation) {
+    TEST_DESCRIPTION("Verify AndroidHardwareBuffer memory allocation.");
+
+    SetTargetApiVersion(VK_API_VERSION_1_1);
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+
+    if ((DeviceExtensionSupported(gpu(), nullptr, VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME)) &&
+        // Also skip on devices that advertise AHB, but not the pre-requisite foreign_queue extension
+        (DeviceExtensionSupported(gpu(), nullptr, VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME))) {
+        m_device_extension_names.push_back(VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME);
+        m_device_extension_names.push_back(VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
+        m_device_extension_names.push_back(VK_KHR_MAINTENANCE1_EXTENSION_NAME);
+        m_device_extension_names.push_back(VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
+        m_device_extension_names.push_back(VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
+        m_device_extension_names.push_back(VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME);
+        m_device_extension_names.push_back(VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME);
+    } else {
+        printf("%s %s extension not supported, skipping tests\n", kSkipPrefix,
+               VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME);
+        return;
+    }
+
+    ASSERT_NO_FATAL_FAILURE(InitState());
+    VkDevice dev = m_device->device();
+
+    VkImage img = VK_NULL_HANDLE;
+    auto reset_img = [&img, dev]() {
+        if (VK_NULL_HANDLE != img) vk::DestroyImage(dev, img, NULL);
+        img = VK_NULL_HANDLE;
+    };
+    VkDeviceMemory mem_handle = VK_NULL_HANDLE;
+    auto reset_mem = [&mem_handle, dev]() {
+        if (VK_NULL_HANDLE != mem_handle) vk::FreeMemory(dev, mem_handle, NULL);
+        mem_handle = VK_NULL_HANDLE;
+    };
+
+    PFN_vkGetAndroidHardwareBufferPropertiesANDROID pfn_GetAHBProps =
+        (PFN_vkGetAndroidHardwareBufferPropertiesANDROID)vk::GetDeviceProcAddr(dev, "vkGetAndroidHardwareBufferPropertiesANDROID");
+    ASSERT_TRUE(pfn_GetAHBProps != nullptr);
+
+    // AHB structs
+    AHardwareBuffer *ahb = nullptr;
+    AHardwareBuffer_Desc ahb_desc = {};
+    VkAndroidHardwareBufferFormatPropertiesANDROID ahb_fmt_props = {};
+    ahb_fmt_props.sType = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID;
+    VkAndroidHardwareBufferPropertiesANDROID ahb_props = {};
+    ahb_props.sType = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID;
+    ahb_props.pNext = &ahb_fmt_props;
+    VkImportAndroidHardwareBufferInfoANDROID iahbi = {};
+    iahbi.sType = VK_STRUCTURE_TYPE_IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID;
+
+    // destroy and re-acquire an AHB, and fetch it's properties
+    auto recreate_ahb = [&ahb, &iahbi, &ahb_desc, &ahb_props, dev, pfn_GetAHBProps]() {
+        if (ahb) AHardwareBuffer_release(ahb);
+        ahb = nullptr;
+        AHardwareBuffer_allocate(&ahb_desc, &ahb);
+        if (ahb) {
+            pfn_GetAHBProps(dev, ahb, &ahb_props);
+            iahbi.buffer = ahb;
+        }
+    };
+
+    // Allocate an AHardwareBuffer
+    ahb_desc.format = AHARDWAREBUFFER_FORMAT_R8G8B8X8_UNORM;
+    ahb_desc.usage = AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE;
+    ahb_desc.width = 64;
+    ahb_desc.height = 64;
+    ahb_desc.layers = 1;
+    recreate_ahb();
+
+    // Create an image w/ external format
+    VkExternalFormatANDROID efa = {};
+    efa.sType = VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID;
+    efa.externalFormat = ahb_fmt_props.externalFormat;
+
+    VkImageCreateInfo ici = {};
+    ici.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+    ici.pNext = &efa;
+    ici.imageType = VK_IMAGE_TYPE_2D;
+    ici.arrayLayers = 1;
+    ici.extent = {64, 64, 1};
+    ici.format = VK_FORMAT_UNDEFINED;
+    ici.mipLevels = 1;
+    ici.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+    ici.samples = VK_SAMPLE_COUNT_1_BIT;
+    ici.tiling = VK_IMAGE_TILING_OPTIMAL;
+    ici.usage = VK_IMAGE_USAGE_SAMPLED_BIT;
+    VkResult res = vk::CreateImage(dev, &ici, NULL, &img);
+    ASSERT_VK_SUCCESS(res);
+
+    VkMemoryAllocateInfo mai = {};
+    mai.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+    mai.pNext = &iahbi;  // Chained import struct
+    mai.allocationSize = ahb_props.allocationSize;
+    mai.memoryTypeIndex = 32;
+    // Set index to match one of the bits in ahb_props
+    for (int i = 0; i < 32; i++) {
+        if (ahb_props.memoryTypeBits & (1 << i)) {
+            mai.memoryTypeIndex = i;
+            break;
+        }
+    }
+    ASSERT_NE(32, mai.memoryTypeIndex);
+
+    // Import w/ non-dedicated memory allocation
+
+    // Import requires format AHB_FMT_BLOB and usage AHB_USAGE_GPU_DATA_BUFFER
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkMemoryAllocateInfo-pNext-02384");
+    vk::AllocateMemory(dev, &mai, NULL, &mem_handle);
+    m_errorMonitor->VerifyFound();
+    reset_mem();
+
+    // Allocation size mismatch
+    ahb_desc.format = AHARDWAREBUFFER_FORMAT_BLOB;
+    ahb_desc.usage = AHARDWAREBUFFER_USAGE_GPU_DATA_BUFFER;
+    ahb_desc.height = 1;
+    recreate_ahb();
+    mai.allocationSize = ahb_props.allocationSize + 1;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkMemoryAllocateInfo-allocationSize-02383");
+    vk::AllocateMemory(dev, &mai, NULL, &mem_handle);
+    m_errorMonitor->VerifyFound();
+    mai.allocationSize = ahb_props.allocationSize;
+    reset_mem();
+
+    // memoryTypeIndex mismatch
+    mai.memoryTypeIndex++;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkMemoryAllocateInfo-memoryTypeIndex-02385");
+    vk::AllocateMemory(dev, &mai, NULL, &mem_handle);
+    m_errorMonitor->VerifyFound();
+    mai.memoryTypeIndex--;
+    reset_mem();
+
+    // Insert dedicated image memory allocation to mai chain
+    VkMemoryDedicatedAllocateInfo mdai = {};
+    mdai.sType = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO;
+    mdai.image = img;
+    mdai.buffer = VK_NULL_HANDLE;
+    mdai.pNext = mai.pNext;
+    mai.pNext = &mdai;
+
+    // Dedicated allocation with unmatched usage bits
+    ahb_desc.format = AHARDWAREBUFFER_FORMAT_R8G8B8A8_UNORM;
+    ahb_desc.usage = AHARDWAREBUFFER_USAGE_GPU_COLOR_OUTPUT;
+    ahb_desc.height = 64;
+    recreate_ahb();
+    mai.allocationSize = ahb_props.allocationSize;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkMemoryAllocateInfo-pNext-02390");
+    vk::AllocateMemory(dev, &mai, NULL, &mem_handle);
+    m_errorMonitor->VerifyFound();
+    reset_mem();
+
+    // Dedicated allocation with incomplete mip chain
+    reset_img();
+    ici.mipLevels = 2;
+    vk::CreateImage(dev, &ici, NULL, &img);
+    mdai.image = img;
+    ahb_desc.usage = AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE | AHARDWAREBUFFER_USAGE_GPU_MIPMAP_COMPLETE;
+    recreate_ahb();
+
+    if (ahb) {
+        mai.allocationSize = ahb_props.allocationSize;
+        for (int i = 0; i < 32; i++) {
+            if (ahb_props.memoryTypeBits & (1 << i)) {
+                mai.memoryTypeIndex = i;
+                break;
+            }
+        }
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkMemoryAllocateInfo-pNext-02389");
+        vk::AllocateMemory(dev, &mai, NULL, &mem_handle);
+        m_errorMonitor->VerifyFound();
+        reset_mem();
+    } else {
+        // ERROR: AHardwareBuffer_allocate() with MIPMAP_COMPLETE fails. It returns -12, NO_MEMORY.
+        // The problem seems to happen in Pixel 2, not Pixel 3.
+        printf("%s AHARDWAREBUFFER_USAGE_GPU_MIPMAP_COMPLETE not supported, skipping tests\n", kSkipPrefix);
+    }
+
+    // Dedicated allocation with mis-matched dimension
+    ahb_desc.usage = AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE;
+    ahb_desc.height = 32;
+    ahb_desc.width = 128;
+    recreate_ahb();
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkMemoryAllocateInfo-pNext-02388");
+    vk::AllocateMemory(dev, &mai, NULL, &mem_handle);
+    m_errorMonitor->VerifyFound();
+    reset_mem();
+
+    // Dedicated allocation with mis-matched VkFormat
+    ahb_desc.usage = AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE;
+    ahb_desc.height = 64;
+    ahb_desc.width = 64;
+    recreate_ahb();
+    ici.mipLevels = 1;
+    ici.format = VK_FORMAT_B8G8R8A8_UNORM;
+    ici.pNext = NULL;
+    VkImage img2;
+    vk::CreateImage(dev, &ici, NULL, &img2);
+    mdai.image = img2;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkMemoryAllocateInfo-pNext-02387");
+    vk::AllocateMemory(dev, &mai, NULL, &mem_handle);
+    m_errorMonitor->VerifyFound();
+    vk::DestroyImage(dev, img2, NULL);
+    mdai.image = img;
+    reset_mem();
+
+    // Missing required ahb usage
+    ahb_desc.usage = AHARDWAREBUFFER_USAGE_PROTECTED_CONTENT;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "VUID-vkGetAndroidHardwareBufferPropertiesANDROID-buffer-01884");
+    recreate_ahb();
+    m_errorMonitor->VerifyFound();
+
+    // Dedicated allocation with missing usage bits
+    // Setting up this test also triggers a slew of others
+    mai.allocationSize = ahb_props.allocationSize + 1;
+    mai.memoryTypeIndex = 0;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkMemoryAllocateInfo-pNext-02390");
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkMemoryAllocateInfo-memoryTypeIndex-02385");
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkMemoryAllocateInfo-allocationSize-02383");
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkMemoryAllocateInfo-pNext-02386");
+    vk::AllocateMemory(dev, &mai, NULL, &mem_handle);
+    m_errorMonitor->VerifyFound();
+    reset_mem();
+
+    // Non-import allocation - replace import struct in chain with export struct
+    VkExportMemoryAllocateInfo emai = {};
+    emai.sType = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO;
+    emai.handleTypes = VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID;
+    mai.pNext = &emai;
+    emai.pNext = &mdai;  // still dedicated
+    mdai.pNext = nullptr;
+
+    // Export with allocation size non-zero
+    ahb_desc.usage = AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE;
+    recreate_ahb();
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkMemoryAllocateInfo-pNext-01874");
+    vk::AllocateMemory(dev, &mai, NULL, &mem_handle);
+    m_errorMonitor->VerifyFound();
+    reset_mem();
+
+    AHardwareBuffer_release(ahb);
+    reset_mem();
+    reset_img();
+}
+
+TEST_F(VkLayerTest, AndroidHardwareBufferCreateYCbCrSampler) {
+    TEST_DESCRIPTION("Verify AndroidHardwareBuffer YCbCr sampler creation.");
+
+    SetTargetApiVersion(VK_API_VERSION_1_1);
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+
+    if ((DeviceExtensionSupported(gpu(), nullptr, VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME)) &&
+        // Also skip on devices that advertise AHB, but not the pre-requisite foreign_queue extension
+        (DeviceExtensionSupported(gpu(), nullptr, VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME))) {
+        m_device_extension_names.push_back(VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME);
+        m_device_extension_names.push_back(VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
+        m_device_extension_names.push_back(VK_KHR_MAINTENANCE1_EXTENSION_NAME);
+        m_device_extension_names.push_back(VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
+        m_device_extension_names.push_back(VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
+        m_device_extension_names.push_back(VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME);
+        m_device_extension_names.push_back(VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME);
+    } else {
+        printf("%s %s extension not supported, skipping tests\n", kSkipPrefix,
+               VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME);
+        return;
+    }
+
+    ASSERT_NO_FATAL_FAILURE(InitState());
+    VkDevice dev = m_device->device();
+
+    VkSamplerYcbcrConversion ycbcr_conv = VK_NULL_HANDLE;
+    VkSamplerYcbcrConversionCreateInfo sycci = {};
+    sycci.sType = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO;
+    sycci.format = VK_FORMAT_UNDEFINED;
+    sycci.ycbcrModel = VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY;
+    sycci.ycbcrRange = VK_SAMPLER_YCBCR_RANGE_ITU_FULL;
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkSamplerYcbcrConversionCreateInfo-format-01904");
+    vk::CreateSamplerYcbcrConversion(dev, &sycci, NULL, &ycbcr_conv);
+    m_errorMonitor->VerifyFound();
+
+    VkExternalFormatANDROID efa = {};
+    efa.sType = VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID;
+    efa.externalFormat = AHARDWAREBUFFER_FORMAT_R8G8B8X8_UNORM;
+    sycci.format = VK_FORMAT_R8G8B8A8_UNORM;
+    sycci.pNext = &efa;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkSamplerYcbcrConversionCreateInfo-format-01904");
+    vk::CreateSamplerYcbcrConversion(dev, &sycci, NULL, &ycbcr_conv);
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, AndroidHardwareBufferPhysDevImageFormatProp2) {
+    TEST_DESCRIPTION("Verify AndroidHardwareBuffer GetPhysicalDeviceImageFormatProperties.");
+
+    SetTargetApiVersion(VK_API_VERSION_1_1);
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+
+    if ((DeviceExtensionSupported(gpu(), nullptr, VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME)) &&
+        // Also skip on devices that advertise AHB, but not the pre-requisite foreign_queue extension
+        (DeviceExtensionSupported(gpu(), nullptr, VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME))) {
+        m_device_extension_names.push_back(VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME);
+        m_device_extension_names.push_back(VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
+        m_device_extension_names.push_back(VK_KHR_MAINTENANCE1_EXTENSION_NAME);
+        m_device_extension_names.push_back(VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
+        m_device_extension_names.push_back(VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
+        m_device_extension_names.push_back(VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME);
+        m_device_extension_names.push_back(VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME);
+    } else {
+        printf("%s %s extension not supported, skipping test\n", kSkipPrefix,
+               VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME);
+        return;
+    }
+
+    ASSERT_NO_FATAL_FAILURE(InitState());
+
+    if ((m_instance_api_version < VK_API_VERSION_1_1) &&
+        !InstanceExtensionEnabled(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+        printf("%s %s extension not supported, skipping test\n", kSkipPrefix,
+               VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+        return;
+    }
+
+    VkImageFormatProperties2 ifp = {};
+    ifp.sType = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2;
+    VkPhysicalDeviceImageFormatInfo2 pdifi = {};
+    pdifi.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2;
+    pdifi.format = VK_FORMAT_R8G8B8A8_UNORM;
+    pdifi.tiling = VK_IMAGE_TILING_OPTIMAL;
+    pdifi.type = VK_IMAGE_TYPE_2D;
+    pdifi.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
+    VkAndroidHardwareBufferUsageANDROID ahbu = {};
+    ahbu.sType = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_USAGE_ANDROID;
+    ahbu.androidHardwareBufferUsage = AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE;
+    ifp.pNext = &ahbu;
+
+    // AHB_usage chained to input without a matching external image format struc chained to output
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "VUID-vkGetPhysicalDeviceImageFormatProperties2-pNext-01868");
+    vk::GetPhysicalDeviceImageFormatProperties2(m_device->phy().handle(), &pdifi, &ifp);
+    m_errorMonitor->VerifyFound();
+
+    // output struct chained, but does not include VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID usage
+    VkPhysicalDeviceExternalImageFormatInfo pdeifi = {};
+    pdeifi.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO;
+    pdeifi.handleType = VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT;
+    pdifi.pNext = &pdeifi;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "VUID-vkGetPhysicalDeviceImageFormatProperties2-pNext-01868");
+    vk::GetPhysicalDeviceImageFormatProperties2(m_device->phy().handle(), &pdifi, &ifp);
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, AndroidHardwareBufferCreateImageView) {
+    TEST_DESCRIPTION("Verify AndroidHardwareBuffer image view creation.");
+
+    SetTargetApiVersion(VK_API_VERSION_1_1);
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+
+    if ((DeviceExtensionSupported(gpu(), nullptr, VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME)) &&
+        // Also skip on devices that advertise AHB, but not the pre-requisite foreign_queue extension
+        (DeviceExtensionSupported(gpu(), nullptr, VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME))) {
+        m_device_extension_names.push_back(VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME);
+        m_device_extension_names.push_back(VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
+        m_device_extension_names.push_back(VK_KHR_MAINTENANCE1_EXTENSION_NAME);
+        m_device_extension_names.push_back(VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
+        m_device_extension_names.push_back(VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
+        m_device_extension_names.push_back(VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME);
+        m_device_extension_names.push_back(VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME);
+    } else {
+        printf("%s %s extension not supported, skipping tests\n", kSkipPrefix,
+               VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME);
+        return;
+    }
+
+    ASSERT_NO_FATAL_FAILURE(InitState());
+    VkDevice dev = m_device->device();
+
+    // Allocate an AHB and fetch its properties
+    AHardwareBuffer *ahb = nullptr;
+    AHardwareBuffer_Desc ahb_desc = {};
+    ahb_desc.format = AHARDWAREBUFFER_FORMAT_R5G6B5_UNORM;
+    ahb_desc.usage = AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE;
+    ahb_desc.width = 64;
+    ahb_desc.height = 64;
+    ahb_desc.layers = 1;
+    AHardwareBuffer_allocate(&ahb_desc, &ahb);
+
+    // Retrieve AHB properties to make it's external format 'known'
+    VkAndroidHardwareBufferFormatPropertiesANDROID ahb_fmt_props = {};
+    ahb_fmt_props.sType = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID;
+    VkAndroidHardwareBufferPropertiesANDROID ahb_props = {};
+    ahb_props.sType = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID;
+    ahb_props.pNext = &ahb_fmt_props;
+    PFN_vkGetAndroidHardwareBufferPropertiesANDROID pfn_GetAHBProps =
+        (PFN_vkGetAndroidHardwareBufferPropertiesANDROID)vk::GetDeviceProcAddr(dev, "vkGetAndroidHardwareBufferPropertiesANDROID");
+    ASSERT_TRUE(pfn_GetAHBProps != nullptr);
+    pfn_GetAHBProps(dev, ahb, &ahb_props);
+    AHardwareBuffer_release(ahb);
+
+    // Give image an external format
+    VkExternalFormatANDROID efa = {};
+    efa.sType = VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID;
+    efa.externalFormat = ahb_fmt_props.externalFormat;
+
+    ahb_desc.format = AHARDWAREBUFFER_FORMAT_R8G8B8A8_UNORM;
+    ahb_desc.usage = AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE;
+    ahb_desc.width = 64;
+    ahb_desc.height = 1;
+    ahb_desc.layers = 1;
+    AHardwareBuffer_allocate(&ahb_desc, &ahb);
+
+    // Create another VkExternalFormatANDROID for test VUID-VkImageViewCreateInfo-image-02400
+    VkAndroidHardwareBufferFormatPropertiesANDROID ahb_fmt_props_Ycbcr = {};
+    ahb_fmt_props_Ycbcr.sType = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID;
+    VkAndroidHardwareBufferPropertiesANDROID ahb_props_Ycbcr = {};
+    ahb_props_Ycbcr.sType = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID;
+    ahb_props_Ycbcr.pNext = &ahb_fmt_props_Ycbcr;
+    pfn_GetAHBProps(dev, ahb, &ahb_props_Ycbcr);
+    AHardwareBuffer_release(ahb);
+
+    VkExternalFormatANDROID efa_Ycbcr = {};
+    efa_Ycbcr.sType = VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID;
+    efa_Ycbcr.externalFormat = ahb_fmt_props_Ycbcr.externalFormat;
+
+    // Create the image
+    VkImage img = VK_NULL_HANDLE;
+    VkImageCreateInfo ici = {};
+    ici.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+    ici.pNext = &efa;
+    ici.imageType = VK_IMAGE_TYPE_2D;
+    ici.arrayLayers = 1;
+    ici.extent = {64, 64, 1};
+    ici.format = VK_FORMAT_UNDEFINED;
+    ici.mipLevels = 1;
+    ici.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+    ici.samples = VK_SAMPLE_COUNT_1_BIT;
+    ici.tiling = VK_IMAGE_TILING_OPTIMAL;
+    ici.usage = VK_IMAGE_USAGE_SAMPLED_BIT;
+    vk::CreateImage(dev, &ici, NULL, &img);
+
+    // Set up memory allocation
+    VkDeviceMemory img_mem = VK_NULL_HANDLE;
+    VkMemoryAllocateInfo mai = {};
+    mai.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+    mai.allocationSize = 64 * 64 * 4;
+    mai.memoryTypeIndex = 0;
+    vk::AllocateMemory(dev, &mai, NULL, &img_mem);
+
+    // It shouldn't use vk::GetImageMemoryRequirements for AndroidHardwareBuffer.
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "UNASSIGNED-CoreValidation-vkBindImageMemory-invalid-requirements");
+    VkMemoryRequirements img_mem_reqs = {};
+    vk::GetImageMemoryRequirements(m_device->device(), img, &img_mem_reqs);
+    vk::BindImageMemory(dev, img, img_mem, 0);
+    m_errorMonitor->VerifyFound();
+
+    // Bind image to memory
+    vk::DestroyImage(dev, img, NULL);
+    vk::FreeMemory(dev, img_mem, NULL);
+    vk::CreateImage(dev, &ici, NULL, &img);
+    vk::AllocateMemory(dev, &mai, NULL, &img_mem);
+    vk::BindImageMemory(dev, img, img_mem, 0);
+
+    // Create a YCbCr conversion, with different external format, chain to view
+    VkSamplerYcbcrConversion ycbcr_conv = VK_NULL_HANDLE;
+    VkSamplerYcbcrConversionCreateInfo sycci = {};
+    sycci.sType = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO;
+    sycci.pNext = &efa_Ycbcr;
+    sycci.format = VK_FORMAT_UNDEFINED;
+    sycci.ycbcrModel = VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY;
+    sycci.ycbcrRange = VK_SAMPLER_YCBCR_RANGE_ITU_FULL;
+    vk::CreateSamplerYcbcrConversion(dev, &sycci, NULL, &ycbcr_conv);
+    VkSamplerYcbcrConversionInfo syci = {};
+    syci.sType = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO;
+    syci.conversion = ycbcr_conv;
+
+    // Create a view
+    VkImageView image_view = VK_NULL_HANDLE;
+    VkImageViewCreateInfo ivci = {};
+    ivci.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
+    ivci.pNext = &syci;
+    ivci.image = img;
+    ivci.viewType = VK_IMAGE_VIEW_TYPE_2D;
+    ivci.format = VK_FORMAT_UNDEFINED;
+    ivci.subresourceRange = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 1};
+
+    auto reset_view = [&image_view, dev]() {
+        if (VK_NULL_HANDLE != image_view) vk::DestroyImageView(dev, image_view, NULL);
+        image_view = VK_NULL_HANDLE;
+    };
+
+    // Up to this point, no errors expected
+    m_errorMonitor->VerifyNotFound();
+
+    // Chained ycbcr conversion has different (external) format than image
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageViewCreateInfo-image-02400");
+    // Also causes "unsupported format" - should be removed in future spec update
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageViewCreateInfo-None-02273");
+    vk::CreateImageView(dev, &ivci, NULL, &image_view);
+    m_errorMonitor->VerifyFound();
+
+    reset_view();
+    vk::DestroySamplerYcbcrConversion(dev, ycbcr_conv, NULL);
+    sycci.pNext = &efa;
+    vk::CreateSamplerYcbcrConversion(dev, &sycci, NULL, &ycbcr_conv);
+    syci.conversion = ycbcr_conv;
+
+    // View component swizzle not IDENTITY
+    ivci.components.r = VK_COMPONENT_SWIZZLE_B;
+    ivci.components.b = VK_COMPONENT_SWIZZLE_R;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageViewCreateInfo-image-02401");
+    // Also causes "unsupported format" - should be removed in future spec update
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageViewCreateInfo-None-02273");
+    vk::CreateImageView(dev, &ivci, NULL, &image_view);
+    m_errorMonitor->VerifyFound();
+
+    reset_view();
+    ivci.components.r = VK_COMPONENT_SWIZZLE_IDENTITY;
+    ivci.components.b = VK_COMPONENT_SWIZZLE_IDENTITY;
+
+    // View with external format, when format is not UNDEFINED
+    ivci.format = VK_FORMAT_R5G6B5_UNORM_PACK16;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageViewCreateInfo-image-02399");
+    // Also causes "view format different from image format"
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkImageViewCreateInfo-image-01019");
+    vk::CreateImageView(dev, &ivci, NULL, &image_view);
+    m_errorMonitor->VerifyFound();
+
+    reset_view();
+    vk::DestroySamplerYcbcrConversion(dev, ycbcr_conv, NULL);
+    vk::DestroyImageView(dev, image_view, NULL);
+    vk::DestroyImage(dev, img, NULL);
+    vk::FreeMemory(dev, img_mem, NULL);
+}
+
+TEST_F(VkLayerTest, AndroidHardwareBufferImportBuffer) {
+    TEST_DESCRIPTION("Verify AndroidHardwareBuffer import as buffer.");
+
+    SetTargetApiVersion(VK_API_VERSION_1_1);
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+
+    if ((DeviceExtensionSupported(gpu(), nullptr, VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME)) &&
+        // Also skip on devices that advertise AHB, but not the pre-requisite foreign_queue extension
+        (DeviceExtensionSupported(gpu(), nullptr, VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME))) {
+        m_device_extension_names.push_back(VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME);
+        m_device_extension_names.push_back(VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
+        m_device_extension_names.push_back(VK_KHR_MAINTENANCE1_EXTENSION_NAME);
+        m_device_extension_names.push_back(VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
+        m_device_extension_names.push_back(VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
+        m_device_extension_names.push_back(VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME);
+        m_device_extension_names.push_back(VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME);
+    } else {
+        printf("%s %s extension not supported, skipping tests\n", kSkipPrefix,
+               VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME);
+        return;
+    }
+
+    ASSERT_NO_FATAL_FAILURE(InitState());
+    VkDevice dev = m_device->device();
+
+    VkDeviceMemory mem_handle = VK_NULL_HANDLE;
+    auto reset_mem = [&mem_handle, dev]() {
+        if (VK_NULL_HANDLE != mem_handle) vk::FreeMemory(dev, mem_handle, NULL);
+        mem_handle = VK_NULL_HANDLE;
+    };
+
+    PFN_vkGetAndroidHardwareBufferPropertiesANDROID pfn_GetAHBProps =
+        (PFN_vkGetAndroidHardwareBufferPropertiesANDROID)vk::GetDeviceProcAddr(dev, "vkGetAndroidHardwareBufferPropertiesANDROID");
+    ASSERT_TRUE(pfn_GetAHBProps != nullptr);
+
+    // AHB structs
+    AHardwareBuffer *ahb = nullptr;
+    AHardwareBuffer_Desc ahb_desc = {};
+    VkAndroidHardwareBufferPropertiesANDROID ahb_props = {};
+    ahb_props.sType = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID;
+    VkImportAndroidHardwareBufferInfoANDROID iahbi = {};
+    iahbi.sType = VK_STRUCTURE_TYPE_IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID;
+
+    // Allocate an AHardwareBuffer
+    ahb_desc.format = AHARDWAREBUFFER_FORMAT_BLOB;
+    ahb_desc.usage = AHARDWAREBUFFER_USAGE_SENSOR_DIRECT_DATA;
+    ahb_desc.width = 512;
+    ahb_desc.height = 1;
+    ahb_desc.layers = 1;
+    AHardwareBuffer_allocate(&ahb_desc, &ahb);
+    m_errorMonitor->SetUnexpectedError("VUID-vkGetAndroidHardwareBufferPropertiesANDROID-buffer-01884");
+    pfn_GetAHBProps(dev, ahb, &ahb_props);
+    iahbi.buffer = ahb;
+
+    // Create export and import buffers
+    VkExternalMemoryBufferCreateInfo ext_buf_info = {};
+    ext_buf_info.sType = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO_KHR;
+    ext_buf_info.handleTypes = VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT;
+
+    VkBufferCreateInfo bci = {};
+    bci.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
+    bci.pNext = &ext_buf_info;
+    bci.size = ahb_props.allocationSize;
+    bci.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
+
+    VkBuffer buf = VK_NULL_HANDLE;
+    vk::CreateBuffer(dev, &bci, NULL, &buf);
+    VkMemoryRequirements mem_reqs;
+    vk::GetBufferMemoryRequirements(dev, buf, &mem_reqs);
+
+    // Allocation info
+    VkMemoryAllocateInfo mai = vk_testing::DeviceMemory::get_resource_alloc_info(*m_device, mem_reqs, 0);
+    mai.pNext = &iahbi;  // Chained import struct
+    VkPhysicalDeviceMemoryProperties memory_info;
+    vk::GetPhysicalDeviceMemoryProperties(gpu(), &memory_info);
+    unsigned int i;
+    for (i = 0; i < memory_info.memoryTypeCount; i++) {
+        if ((ahb_props.memoryTypeBits & (1 << i))) {
+            mai.memoryTypeIndex = i;
+            break;
+        }
+    }
+    if (i >= memory_info.memoryTypeCount) {
+        printf("%s No invalid memory type index could be found; skipped.\n", kSkipPrefix);
+        AHardwareBuffer_release(ahb);
+        reset_mem();
+        vk::DestroyBuffer(dev, buf, NULL);
+        return;
+    }
+
+    // Import as buffer requires format AHB_FMT_BLOB and usage AHB_USAGE_GPU_DATA_BUFFER
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "VUID-VkImportAndroidHardwareBufferInfoANDROID-buffer-01881");
+    // Also causes "non-dedicated allocation format/usage" error
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkMemoryAllocateInfo-pNext-02384");
+    vk::AllocateMemory(dev, &mai, NULL, &mem_handle);
+    m_errorMonitor->VerifyFound();
+
+    AHardwareBuffer_release(ahb);
+    reset_mem();
+    vk::DestroyBuffer(dev, buf, NULL);
+}
+
+TEST_F(VkLayerTest, AndroidHardwareBufferExporttBuffer) {
+    TEST_DESCRIPTION("Verify AndroidHardwareBuffer export memory as AHB.");
+
+    SetTargetApiVersion(VK_API_VERSION_1_1);
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+
+    if ((DeviceExtensionSupported(gpu(), nullptr, VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME)) &&
+        // Also skip on devices that advertise AHB, but not the pre-requisite foreign_queue extension
+        (DeviceExtensionSupported(gpu(), nullptr, VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME))) {
+        m_device_extension_names.push_back(VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME);
+        m_device_extension_names.push_back(VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
+        m_device_extension_names.push_back(VK_KHR_MAINTENANCE1_EXTENSION_NAME);
+        m_device_extension_names.push_back(VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
+        m_device_extension_names.push_back(VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
+        m_device_extension_names.push_back(VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME);
+        m_device_extension_names.push_back(VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME);
+    } else {
+        printf("%s %s extension not supported, skipping tests\n", kSkipPrefix,
+               VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME);
+        return;
+    }
+
+    ASSERT_NO_FATAL_FAILURE(InitState());
+    VkDevice dev = m_device->device();
+
+    VkDeviceMemory mem_handle = VK_NULL_HANDLE;
+
+    // Allocate device memory, no linked export struct indicating AHB handle type
+    VkMemoryAllocateInfo mai = {};
+    mai.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+    mai.allocationSize = 65536;
+    mai.memoryTypeIndex = 0;
+    vk::AllocateMemory(dev, &mai, NULL, &mem_handle);
+
+    PFN_vkGetMemoryAndroidHardwareBufferANDROID pfn_GetMemAHB =
+        (PFN_vkGetMemoryAndroidHardwareBufferANDROID)vk::GetDeviceProcAddr(dev, "vkGetMemoryAndroidHardwareBufferANDROID");
+    ASSERT_TRUE(pfn_GetMemAHB != nullptr);
+
+    VkMemoryGetAndroidHardwareBufferInfoANDROID mgahbi = {};
+    mgahbi.sType = VK_STRUCTURE_TYPE_MEMORY_GET_ANDROID_HARDWARE_BUFFER_INFO_ANDROID;
+    mgahbi.memory = mem_handle;
+    AHardwareBuffer *ahb = nullptr;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "VUID-VkMemoryGetAndroidHardwareBufferInfoANDROID-handleTypes-01882");
+    pfn_GetMemAHB(dev, &mgahbi, &ahb);
+    m_errorMonitor->VerifyFound();
+
+    if (ahb) AHardwareBuffer_release(ahb);
+    ahb = nullptr;
+    if (VK_NULL_HANDLE != mem_handle) vk::FreeMemory(dev, mem_handle, NULL);
+    mem_handle = VK_NULL_HANDLE;
+
+    // Add an export struct with AHB handle type to allocation info
+    VkExportMemoryAllocateInfo emai = {};
+    emai.sType = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO;
+    emai.handleTypes = VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID;
+    mai.pNext = &emai;
+
+    // Create an image, do not bind memory
+    VkImage img = VK_NULL_HANDLE;
+    VkImageCreateInfo ici = {};
+    ici.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+    ici.imageType = VK_IMAGE_TYPE_2D;
+    ici.arrayLayers = 1;
+    ici.extent = {128, 128, 1};
+    ici.format = VK_FORMAT_R8G8B8A8_UNORM;
+    ici.mipLevels = 1;
+    ici.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+    ici.samples = VK_SAMPLE_COUNT_1_BIT;
+    ici.tiling = VK_IMAGE_TILING_OPTIMAL;
+    ici.usage = VK_IMAGE_USAGE_SAMPLED_BIT;
+    vk::CreateImage(dev, &ici, NULL, &img);
+    ASSERT_TRUE(VK_NULL_HANDLE != img);
+
+    // Add image to allocation chain as dedicated info, re-allocate
+    VkMemoryDedicatedAllocateInfo mdai = {VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO};
+    mdai.image = img;
+    emai.pNext = &mdai;
+    mai.allocationSize = 0;
+    vk::AllocateMemory(dev, &mai, NULL, &mem_handle);
+    mgahbi.memory = mem_handle;
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "VUID-VkMemoryGetAndroidHardwareBufferInfoANDROID-pNext-01883");
+    pfn_GetMemAHB(dev, &mgahbi, &ahb);
+    m_errorMonitor->VerifyFound();
+
+    if (ahb) AHardwareBuffer_release(ahb);
+    if (VK_NULL_HANDLE != mem_handle) vk::FreeMemory(dev, mem_handle, NULL);
+    vk::DestroyImage(dev, img, NULL);
+}
+
+#endif  // VK_USE_PLATFORM_ANDROID_KHR
+
+TEST_F(VkLayerTest, ValidateStride) {
+    TEST_DESCRIPTION("Validate Stride.");
+    ASSERT_NO_FATAL_FAILURE(Init(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
+    ASSERT_NO_FATAL_FAILURE(InitViewport());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    VkQueryPool query_pool;
+    VkQueryPoolCreateInfo query_pool_ci{};
+    query_pool_ci.sType = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO;
+    query_pool_ci.queryType = VK_QUERY_TYPE_TIMESTAMP;
+    query_pool_ci.queryCount = 1;
+    vk::CreateQueryPool(m_device->device(), &query_pool_ci, nullptr, &query_pool);
+
+    m_commandBuffer->begin();
+    vk::CmdResetQueryPool(m_commandBuffer->handle(), query_pool, 0, 1);
+    vk::CmdWriteTimestamp(m_commandBuffer->handle(), VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, query_pool, 0);
+    m_commandBuffer->end();
+
+    VkSubmitInfo submit_info = {};
+    submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+    submit_info.commandBufferCount = 1;
+    submit_info.pCommandBuffers = &m_commandBuffer->handle();
+    vk::QueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+    vk::QueueWaitIdle(m_device->m_queue);
+
+    char data_space;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkGetQueryPoolResults-flags-02827");
+    vk::GetQueryPoolResults(m_device->handle(), query_pool, 0, 1, sizeof(data_space), &data_space, 1, VK_QUERY_RESULT_WAIT_BIT);
+    m_errorMonitor->VerifyFound();
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkGetQueryPoolResults-flags-00815");
+    vk::GetQueryPoolResults(m_device->handle(), query_pool, 0, 1, sizeof(data_space), &data_space, 1,
+                            (VK_QUERY_RESULT_WAIT_BIT | VK_QUERY_RESULT_64_BIT));
+    m_errorMonitor->VerifyFound();
+
+    char data_space4[4] = "";
+    m_errorMonitor->ExpectSuccess();
+    vk::GetQueryPoolResults(m_device->handle(), query_pool, 0, 1, sizeof(data_space4), &data_space4, 4, VK_QUERY_RESULT_WAIT_BIT);
+    m_errorMonitor->VerifyNotFound();
+
+    char data_space8[8] = "";
+    m_errorMonitor->ExpectSuccess();
+    vk::GetQueryPoolResults(m_device->handle(), query_pool, 0, 1, sizeof(data_space8), &data_space8, 8,
+                            (VK_QUERY_RESULT_WAIT_BIT | VK_QUERY_RESULT_64_BIT));
+    m_errorMonitor->VerifyNotFound();
+
+    uint32_t qfi = 0;
+    VkBufferCreateInfo buff_create_info = {};
+    buff_create_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
+    buff_create_info.size = 128;
+    buff_create_info.usage =
+        VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT | VK_BUFFER_USAGE_INDEX_BUFFER_BIT;
+    buff_create_info.queueFamilyIndexCount = 1;
+    buff_create_info.pQueueFamilyIndices = &qfi;
+    VkBufferObj buffer;
+    buffer.init(*m_device, buff_create_info);
+
+    m_commandBuffer->reset();
+    m_commandBuffer->begin();
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdCopyQueryPoolResults-flags-00822");
+    vk::CmdCopyQueryPoolResults(m_commandBuffer->handle(), query_pool, 0, 1, buffer.handle(), 1, 1, 0);
+    m_errorMonitor->VerifyFound();
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdCopyQueryPoolResults-flags-00823");
+    vk::CmdCopyQueryPoolResults(m_commandBuffer->handle(), query_pool, 0, 1, buffer.handle(), 1, 1, VK_QUERY_RESULT_64_BIT);
+    m_errorMonitor->VerifyFound();
+
+    m_errorMonitor->ExpectSuccess();
+    vk::CmdCopyQueryPoolResults(m_commandBuffer->handle(), query_pool, 0, 1, buffer.handle(), 4, 4, 0);
+    m_errorMonitor->VerifyNotFound();
+
+    m_errorMonitor->ExpectSuccess();
+    vk::CmdCopyQueryPoolResults(m_commandBuffer->handle(), query_pool, 0, 1, buffer.handle(), 8, 8, VK_QUERY_RESULT_64_BIT);
+    m_errorMonitor->VerifyNotFound();
+
+    if (m_device->phy().features().multiDrawIndirect) {
+        CreatePipelineHelper helper(*this);
+        helper.InitInfo();
+        helper.InitState();
+        helper.CreateGraphicsPipeline();
+        m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
+        vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, helper.pipeline_);
+
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdDrawIndirect-drawCount-00476");
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdDrawIndirect-drawCount-00488");
+        vk::CmdDrawIndirect(m_commandBuffer->handle(), buffer.handle(), 0, 100, 2);
+        m_errorMonitor->VerifyFound();
+
+        m_errorMonitor->ExpectSuccess();
+        vk::CmdDrawIndirect(m_commandBuffer->handle(), buffer.handle(), 0, 2, 24);
+        m_errorMonitor->VerifyNotFound();
+
+        vk::CmdBindIndexBuffer(m_commandBuffer->handle(), buffer.handle(), 0, VK_INDEX_TYPE_UINT16);
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdDrawIndexedIndirect-drawCount-00528");
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdDrawIndexedIndirect-drawCount-00540");
+        vk::CmdDrawIndexedIndirect(m_commandBuffer->handle(), buffer.handle(), 0, 100, 2);
+        m_errorMonitor->VerifyFound();
+
+        m_errorMonitor->ExpectSuccess();
+        vk::CmdDrawIndexedIndirect(m_commandBuffer->handle(), buffer.handle(), 0, 2, 24);
+        m_errorMonitor->VerifyNotFound();
+
+        vk::CmdEndRenderPass(m_commandBuffer->handle());
+        m_commandBuffer->end();
+
+    } else {
+        printf("%s Test requires unsupported multiDrawIndirect feature. Skipped.\n", kSkipPrefix);
+    }
+    vk::DestroyQueryPool(m_device->handle(), query_pool, NULL);
+}
+
+TEST_F(VkLayerTest, WarningSwapchainCreateInfoPreTransform) {
+    TEST_DESCRIPTION("Print warning when preTransform doesn't match curretTransform");
+
+    if (!AddSurfaceInstanceExtension()) {
+        printf("%s surface extensions not supported, skipping test\n", kSkipPrefix);
+        return;
+    }
+
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+
+    if (!AddSwapchainDeviceExtension()) {
+        printf("%s swapchain extensions not supported, skipping test\n", kSkipPrefix);
+        return;
+    }
+
+    ASSERT_NO_FATAL_FAILURE(InitState());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT,
+                                         "UNASSIGNED-CoreValidation-SwapchainPreTransform");
+    m_errorMonitor->SetUnexpectedError("VUID-VkSwapchainCreateInfoKHR-preTransform-01279");
+    InitSwapchain(VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_SURFACE_TRANSFORM_INHERIT_BIT_KHR);
+    m_errorMonitor->VerifyFound();
+    DestroySwapchain();
+}
+
+bool InitFrameworkForRayTracingTest(VkRenderFramework *renderFramework, std::vector<const char *> &instance_extension_names,
+                                    std::vector<const char *> &device_extension_names, void *user_data,
+                                    bool need_gpu_validation = false, bool need_push_descriptors = false) {
+    const std::array<const char *, 1> required_instance_extensions = {VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME};
+    for (const char *required_instance_extension : required_instance_extensions) {
+        if (renderFramework->InstanceExtensionSupported(required_instance_extension)) {
+            instance_extension_names.push_back(required_instance_extension);
+        } else {
+            printf("%s %s instance extension not supported, skipping test\n", kSkipPrefix, required_instance_extension);
+            return false;
+        }
+    }
+
+    VkValidationFeatureEnableEXT enables[] = {VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_EXT};
+    VkValidationFeaturesEXT features = {};
+    features.sType = VK_STRUCTURE_TYPE_VALIDATION_FEATURES_EXT;
+    features.enabledValidationFeatureCount = 1;
+    features.pEnabledValidationFeatures = enables;
+
+    VkValidationFeaturesEXT *enabled_features = need_gpu_validation ? &features : nullptr;
+
+    renderFramework->InitFramework(myDbgFunc, user_data, enabled_features);
+
+    if (renderFramework->DeviceIsMockICD() || renderFramework->DeviceSimulation()) {
+        printf("%s Test not supported by MockICD, skipping tests\n", kSkipPrefix);
+        return false;
+    }
+
+    std::vector<const char *> required_device_extensions = {
+        VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME,
+        VK_NV_RAY_TRACING_EXTENSION_NAME,
+    };
+    if (need_push_descriptors) {
+        required_device_extensions.push_back(VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME);
+    }
+
+    for (const char *required_device_extension : required_device_extensions) {
+        if (renderFramework->DeviceExtensionSupported(renderFramework->gpu(), nullptr, required_device_extension)) {
+            device_extension_names.push_back(required_device_extension);
+        } else {
+            printf("%s %s device extension not supported, skipping test\n", kSkipPrefix, required_device_extension);
+            return false;
+        }
+    }
+    renderFramework->InitState();
+    return true;
+}
+
+TEST_F(VkLayerTest, ValidateGeometryNV) {
+    TEST_DESCRIPTION("Validate acceleration structure geometries.");
+    if (!InitFrameworkForRayTracingTest(this, m_instance_extension_names, m_device_extension_names, m_errorMonitor)) {
+        return;
+    }
+
+    VkBufferObj vbo;
+    vbo.init(*m_device, 1024, VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT,
+             VK_BUFFER_USAGE_RAY_TRACING_BIT_NV);
+
+    VkBufferObj ibo;
+    ibo.init(*m_device, 1024, VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT,
+             VK_BUFFER_USAGE_RAY_TRACING_BIT_NV);
+
+    VkBufferObj tbo;
+    tbo.init(*m_device, 1024, VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT,
+             VK_BUFFER_USAGE_RAY_TRACING_BIT_NV);
+
+    VkBufferObj aabbbo;
+    aabbbo.init(*m_device, 1024, VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT,
+                VK_BUFFER_USAGE_RAY_TRACING_BIT_NV);
+
+    VkBufferCreateInfo unbound_buffer_ci = {};
+    unbound_buffer_ci.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
+    unbound_buffer_ci.size = 1024;
+    unbound_buffer_ci.usage = VK_BUFFER_USAGE_RAY_TRACING_BIT_NV;
+    VkBufferObj unbound_buffer;
+    unbound_buffer.init_no_mem(*m_device, unbound_buffer_ci);
+
+    const std::vector<float> vertices = {1.0f, 0.0f, 0.0f, 0.0f, 1.0f, 0.0f, -1.0f, 0.0f, 0.0f};
+    const std::vector<uint32_t> indicies = {0, 1, 2};
+    const std::vector<float> aabbs = {0.0f, 0.0f, 0.0f, 1.0f, 1.0f, 1.0f};
+    const std::vector<float> transforms = {1.0f, 0.0f, 0.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, 0.0f, 0.0f, 1.0f, 0.0f};
+
+    uint8_t *mapped_vbo_buffer_data = (uint8_t *)vbo.memory().map();
+    std::memcpy(mapped_vbo_buffer_data, (uint8_t *)vertices.data(), sizeof(float) * vertices.size());
+    vbo.memory().unmap();
+
+    uint8_t *mapped_ibo_buffer_data = (uint8_t *)ibo.memory().map();
+    std::memcpy(mapped_ibo_buffer_data, (uint8_t *)indicies.data(), sizeof(uint32_t) * indicies.size());
+    ibo.memory().unmap();
+
+    uint8_t *mapped_tbo_buffer_data = (uint8_t *)tbo.memory().map();
+    std::memcpy(mapped_tbo_buffer_data, (uint8_t *)transforms.data(), sizeof(float) * transforms.size());
+    tbo.memory().unmap();
+
+    uint8_t *mapped_aabbbo_buffer_data = (uint8_t *)aabbbo.memory().map();
+    std::memcpy(mapped_aabbbo_buffer_data, (uint8_t *)aabbs.data(), sizeof(float) * aabbs.size());
+    aabbbo.memory().unmap();
+
+    VkGeometryNV valid_geometry_triangles = {};
+    valid_geometry_triangles.sType = VK_STRUCTURE_TYPE_GEOMETRY_NV;
+    valid_geometry_triangles.geometryType = VK_GEOMETRY_TYPE_TRIANGLES_NV;
+    valid_geometry_triangles.geometry.triangles.sType = VK_STRUCTURE_TYPE_GEOMETRY_TRIANGLES_NV;
+    valid_geometry_triangles.geometry.triangles.vertexData = vbo.handle();
+    valid_geometry_triangles.geometry.triangles.vertexOffset = 0;
+    valid_geometry_triangles.geometry.triangles.vertexCount = 3;
+    valid_geometry_triangles.geometry.triangles.vertexStride = 12;
+    valid_geometry_triangles.geometry.triangles.vertexFormat = VK_FORMAT_R32G32B32_SFLOAT;
+    valid_geometry_triangles.geometry.triangles.indexData = ibo.handle();
+    valid_geometry_triangles.geometry.triangles.indexOffset = 0;
+    valid_geometry_triangles.geometry.triangles.indexCount = 3;
+    valid_geometry_triangles.geometry.triangles.indexType = VK_INDEX_TYPE_UINT32;
+    valid_geometry_triangles.geometry.triangles.transformData = tbo.handle();
+    valid_geometry_triangles.geometry.triangles.transformOffset = 0;
+    valid_geometry_triangles.geometry.aabbs = {};
+    valid_geometry_triangles.geometry.aabbs.sType = VK_STRUCTURE_TYPE_GEOMETRY_AABB_NV;
+
+    VkGeometryNV valid_geometry_aabbs = {};
+    valid_geometry_aabbs.sType = VK_STRUCTURE_TYPE_GEOMETRY_NV;
+    valid_geometry_aabbs.geometryType = VK_GEOMETRY_TYPE_AABBS_NV;
+    valid_geometry_aabbs.geometry.triangles = {};
+    valid_geometry_aabbs.geometry.triangles.sType = VK_STRUCTURE_TYPE_GEOMETRY_TRIANGLES_NV;
+    valid_geometry_aabbs.geometry.aabbs = {};
+    valid_geometry_aabbs.geometry.aabbs.sType = VK_STRUCTURE_TYPE_GEOMETRY_AABB_NV;
+    valid_geometry_aabbs.geometry.aabbs.aabbData = aabbbo.handle();
+    valid_geometry_aabbs.geometry.aabbs.numAABBs = 1;
+    valid_geometry_aabbs.geometry.aabbs.offset = 0;
+    valid_geometry_aabbs.geometry.aabbs.stride = 24;
+
+    PFN_vkCreateAccelerationStructureNV vkCreateAccelerationStructureNV = reinterpret_cast<PFN_vkCreateAccelerationStructureNV>(
+        vk::GetDeviceProcAddr(m_device->handle(), "vkCreateAccelerationStructureNV"));
+    assert(vkCreateAccelerationStructureNV != nullptr);
+
+    const auto GetCreateInfo = [](const VkGeometryNV &geometry) {
+        VkAccelerationStructureCreateInfoNV as_create_info = {};
+        as_create_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_NV;
+        as_create_info.info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_INFO_NV;
+        as_create_info.info.type = VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_NV;
+        as_create_info.info.instanceCount = 0;
+        as_create_info.info.geometryCount = 1;
+        as_create_info.info.pGeometries = &geometry;
+        return as_create_info;
+    };
+
+    VkAccelerationStructureNV as;
+
+    // Invalid vertex format.
+    {
+        VkGeometryNV geometry = valid_geometry_triangles;
+        geometry.geometry.triangles.vertexFormat = VK_FORMAT_R64_UINT;
+
+        VkAccelerationStructureCreateInfoNV as_create_info = GetCreateInfo(geometry);
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkGeometryTrianglesNV-vertexFormat-02430");
+        vkCreateAccelerationStructureNV(m_device->handle(), &as_create_info, nullptr, &as);
+        m_errorMonitor->VerifyFound();
+    }
+    // Invalid vertex offset - not multiple of component size.
+    {
+        VkGeometryNV geometry = valid_geometry_triangles;
+        geometry.geometry.triangles.vertexOffset = 1;
+
+        VkAccelerationStructureCreateInfoNV as_create_info = GetCreateInfo(geometry);
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkGeometryTrianglesNV-vertexOffset-02429");
+        vkCreateAccelerationStructureNV(m_device->handle(), &as_create_info, nullptr, &as);
+        m_errorMonitor->VerifyFound();
+    }
+    // Invalid vertex offset - bigger than buffer.
+    {
+        VkGeometryNV geometry = valid_geometry_triangles;
+        geometry.geometry.triangles.vertexOffset = 12 * 1024;
+
+        VkAccelerationStructureCreateInfoNV as_create_info = GetCreateInfo(geometry);
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkGeometryTrianglesNV-vertexOffset-02428");
+        vkCreateAccelerationStructureNV(m_device->handle(), &as_create_info, nullptr, &as);
+        m_errorMonitor->VerifyFound();
+    }
+    // Invalid vertex buffer - no such buffer.
+    {
+        VkGeometryNV geometry = valid_geometry_triangles;
+        geometry.geometry.triangles.vertexData = VkBuffer(123456789);
+
+        VkAccelerationStructureCreateInfoNV as_create_info = GetCreateInfo(geometry);
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkGeometryTrianglesNV-vertexData-parameter");
+        vkCreateAccelerationStructureNV(m_device->handle(), &as_create_info, nullptr, &as);
+        m_errorMonitor->VerifyFound();
+    }
+    // Invalid vertex buffer - no memory bound.
+    {
+        VkGeometryNV geometry = valid_geometry_triangles;
+        geometry.geometry.triangles.vertexData = unbound_buffer.handle();
+
+        VkAccelerationStructureCreateInfoNV as_create_info = GetCreateInfo(geometry);
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkGeometryTrianglesNV-vertexOffset-02428");
+        vkCreateAccelerationStructureNV(m_device->handle(), &as_create_info, nullptr, &as);
+        m_errorMonitor->VerifyFound();
+    }
+
+    // Invalid index offset - not multiple of index size.
+    {
+        VkGeometryNV geometry = valid_geometry_triangles;
+        geometry.geometry.triangles.indexOffset = 1;
+
+        VkAccelerationStructureCreateInfoNV as_create_info = GetCreateInfo(geometry);
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkGeometryTrianglesNV-indexOffset-02432");
+        vkCreateAccelerationStructureNV(m_device->handle(), &as_create_info, nullptr, &as);
+        m_errorMonitor->VerifyFound();
+    }
+    // Invalid index offset - bigger than buffer.
+    {
+        VkGeometryNV geometry = valid_geometry_triangles;
+        geometry.geometry.triangles.indexOffset = 2048;
+
+        VkAccelerationStructureCreateInfoNV as_create_info = GetCreateInfo(geometry);
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkGeometryTrianglesNV-indexOffset-02431");
+        vkCreateAccelerationStructureNV(m_device->handle(), &as_create_info, nullptr, &as);
+        m_errorMonitor->VerifyFound();
+    }
+    // Invalid index count - must be 0 if type is VK_INDEX_TYPE_NONE_NV.
+    {
+        VkGeometryNV geometry = valid_geometry_triangles;
+        geometry.geometry.triangles.indexType = VK_INDEX_TYPE_NONE_NV;
+        geometry.geometry.triangles.indexData = VK_NULL_HANDLE;
+        geometry.geometry.triangles.indexCount = 1;
+
+        VkAccelerationStructureCreateInfoNV as_create_info = GetCreateInfo(geometry);
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkGeometryTrianglesNV-indexCount-02436");
+        vkCreateAccelerationStructureNV(m_device->handle(), &as_create_info, nullptr, &as);
+        m_errorMonitor->VerifyFound();
+    }
+    // Invalid index data - must be VK_NULL_HANDLE if type is VK_INDEX_TYPE_NONE_NV.
+    {
+        VkGeometryNV geometry = valid_geometry_triangles;
+        geometry.geometry.triangles.indexType = VK_INDEX_TYPE_NONE_NV;
+        geometry.geometry.triangles.indexData = ibo.handle();
+        geometry.geometry.triangles.indexCount = 0;
+
+        VkAccelerationStructureCreateInfoNV as_create_info = GetCreateInfo(geometry);
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkGeometryTrianglesNV-indexData-02434");
+        vkCreateAccelerationStructureNV(m_device->handle(), &as_create_info, nullptr, &as);
+        m_errorMonitor->VerifyFound();
+    }
+
+    // Invalid transform offset - not multiple of 16.
+    {
+        VkGeometryNV geometry = valid_geometry_triangles;
+        geometry.geometry.triangles.transformOffset = 1;
+
+        VkAccelerationStructureCreateInfoNV as_create_info = GetCreateInfo(geometry);
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkGeometryTrianglesNV-transformOffset-02438");
+        vkCreateAccelerationStructureNV(m_device->handle(), &as_create_info, nullptr, &as);
+        m_errorMonitor->VerifyFound();
+    }
+    // Invalid transform offset - bigger than buffer.
+    {
+        VkGeometryNV geometry = valid_geometry_triangles;
+        geometry.geometry.triangles.transformOffset = 2048;
+
+        VkAccelerationStructureCreateInfoNV as_create_info = GetCreateInfo(geometry);
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkGeometryTrianglesNV-transformOffset-02437");
+        vkCreateAccelerationStructureNV(m_device->handle(), &as_create_info, nullptr, &as);
+        m_errorMonitor->VerifyFound();
+    }
+
+    // Invalid aabb offset - not multiple of 8.
+    {
+        VkGeometryNV geometry = valid_geometry_aabbs;
+        geometry.geometry.aabbs.offset = 1;
+
+        VkAccelerationStructureCreateInfoNV as_create_info = GetCreateInfo(geometry);
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkGeometryAABBNV-offset-02440");
+        vkCreateAccelerationStructureNV(m_device->handle(), &as_create_info, nullptr, &as);
+        m_errorMonitor->VerifyFound();
+    }
+    // Invalid aabb offset - bigger than buffer.
+    {
+        VkGeometryNV geometry = valid_geometry_aabbs;
+        geometry.geometry.aabbs.offset = 8 * 1024;
+
+        VkAccelerationStructureCreateInfoNV as_create_info = GetCreateInfo(geometry);
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkGeometryAABBNV-offset-02439");
+        vkCreateAccelerationStructureNV(m_device->handle(), &as_create_info, nullptr, &as);
+        m_errorMonitor->VerifyFound();
+    }
+    // Invalid aabb stride - not multiple of 8.
+    {
+        VkGeometryNV geometry = valid_geometry_aabbs;
+        geometry.geometry.aabbs.stride = 1;
+
+        VkAccelerationStructureCreateInfoNV as_create_info = GetCreateInfo(geometry);
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkGeometryAABBNV-stride-02441");
+        vkCreateAccelerationStructureNV(m_device->handle(), &as_create_info, nullptr, &as);
+        m_errorMonitor->VerifyFound();
+    }
+}
+
+void GetSimpleGeometryForAccelerationStructureTests(const VkDeviceObj &device, VkBufferObj *vbo, VkBufferObj *ibo,
+                                                    VkGeometryNV *geometry) {
+    vbo->init(device, 1024, VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT,
+              VK_BUFFER_USAGE_RAY_TRACING_BIT_NV);
+    ibo->init(device, 1024, VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT,
+              VK_BUFFER_USAGE_RAY_TRACING_BIT_NV);
+
+    const std::vector<float> vertices = {1.0f, 0.0f, 0.0f, 0.0f, 1.0f, 0.0f, -1.0f, 0.0f, 0.0f};
+    const std::vector<uint32_t> indicies = {0, 1, 2};
+
+    uint8_t *mapped_vbo_buffer_data = (uint8_t *)vbo->memory().map();
+    std::memcpy(mapped_vbo_buffer_data, (uint8_t *)vertices.data(), sizeof(float) * vertices.size());
+    vbo->memory().unmap();
+
+    uint8_t *mapped_ibo_buffer_data = (uint8_t *)ibo->memory().map();
+    std::memcpy(mapped_ibo_buffer_data, (uint8_t *)indicies.data(), sizeof(uint32_t) * indicies.size());
+    ibo->memory().unmap();
+
+    *geometry = {};
+    geometry->sType = VK_STRUCTURE_TYPE_GEOMETRY_NV;
+    geometry->geometryType = VK_GEOMETRY_TYPE_TRIANGLES_NV;
+    geometry->geometry.triangles.sType = VK_STRUCTURE_TYPE_GEOMETRY_TRIANGLES_NV;
+    geometry->geometry.triangles.vertexData = vbo->handle();
+    geometry->geometry.triangles.vertexOffset = 0;
+    geometry->geometry.triangles.vertexCount = 3;
+    geometry->geometry.triangles.vertexStride = 12;
+    geometry->geometry.triangles.vertexFormat = VK_FORMAT_R32G32B32_SFLOAT;
+    geometry->geometry.triangles.indexData = ibo->handle();
+    geometry->geometry.triangles.indexOffset = 0;
+    geometry->geometry.triangles.indexCount = 3;
+    geometry->geometry.triangles.indexType = VK_INDEX_TYPE_UINT32;
+    geometry->geometry.triangles.transformData = VK_NULL_HANDLE;
+    geometry->geometry.triangles.transformOffset = 0;
+    geometry->geometry.aabbs = {};
+    geometry->geometry.aabbs.sType = VK_STRUCTURE_TYPE_GEOMETRY_AABB_NV;
+}
+
+TEST_F(VkLayerTest, ValidateCreateAccelerationStructureNV) {
+    TEST_DESCRIPTION("Validate acceleration structure creation.");
+    if (!InitFrameworkForRayTracingTest(this, m_instance_extension_names, m_device_extension_names, m_errorMonitor)) {
+        return;
+    }
+
+    PFN_vkCreateAccelerationStructureNV vkCreateAccelerationStructureNV = reinterpret_cast<PFN_vkCreateAccelerationStructureNV>(
+        vk::GetDeviceProcAddr(m_device->handle(), "vkCreateAccelerationStructureNV"));
+    assert(vkCreateAccelerationStructureNV != nullptr);
+
+    VkBufferObj vbo;
+    VkBufferObj ibo;
+    VkGeometryNV geometry;
+    GetSimpleGeometryForAccelerationStructureTests(*m_device, &vbo, &ibo, &geometry);
+
+    VkAccelerationStructureCreateInfoNV as_create_info = {};
+    as_create_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_NV;
+    as_create_info.info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_INFO_NV;
+
+    VkAccelerationStructureNV as = VK_NULL_HANDLE;
+
+    // Top level can not have geometry
+    {
+        VkAccelerationStructureCreateInfoNV bad_top_level_create_info = as_create_info;
+        bad_top_level_create_info.info.type = VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_NV;
+        bad_top_level_create_info.info.instanceCount = 0;
+        bad_top_level_create_info.info.geometryCount = 1;
+        bad_top_level_create_info.info.pGeometries = &geometry;
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkAccelerationStructureInfoNV-type-02425");
+        vkCreateAccelerationStructureNV(m_device->handle(), &bad_top_level_create_info, nullptr, &as);
+        m_errorMonitor->VerifyFound();
+    }
+
+    // Bot level can not have instances
+    {
+        VkAccelerationStructureCreateInfoNV bad_bot_level_create_info = as_create_info;
+        bad_bot_level_create_info.info.type = VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_NV;
+        bad_bot_level_create_info.info.instanceCount = 1;
+        bad_bot_level_create_info.info.geometryCount = 0;
+        bad_bot_level_create_info.info.pGeometries = nullptr;
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkAccelerationStructureInfoNV-type-02426");
+        vkCreateAccelerationStructureNV(m_device->handle(), &bad_bot_level_create_info, nullptr, &as);
+        m_errorMonitor->VerifyFound();
+    }
+
+    // Can not prefer both fast trace and fast build
+    {
+        VkAccelerationStructureCreateInfoNV bad_flags_level_create_info = as_create_info;
+        bad_flags_level_create_info.info.type = VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_NV;
+        bad_flags_level_create_info.info.instanceCount = 0;
+        bad_flags_level_create_info.info.geometryCount = 1;
+        bad_flags_level_create_info.info.pGeometries = &geometry;
+        bad_flags_level_create_info.info.flags =
+            VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_TRACE_BIT_NV | VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_BUILD_BIT_NV;
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkAccelerationStructureInfoNV-flags-02592");
+        vkCreateAccelerationStructureNV(m_device->handle(), &bad_flags_level_create_info, nullptr, &as);
+        m_errorMonitor->VerifyFound();
+    }
+
+    // Can not have geometry or instance for compacting
+    {
+        VkAccelerationStructureCreateInfoNV bad_compacting_as_create_info = as_create_info;
+        bad_compacting_as_create_info.info.type = VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_NV;
+        bad_compacting_as_create_info.info.instanceCount = 0;
+        bad_compacting_as_create_info.info.geometryCount = 1;
+        bad_compacting_as_create_info.info.pGeometries = &geometry;
+        bad_compacting_as_create_info.info.flags = 0;
+        bad_compacting_as_create_info.compactedSize = 1024;
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                             "VUID-VkAccelerationStructureCreateInfoNV-compactedSize-02421");
+        vkCreateAccelerationStructureNV(m_device->handle(), &bad_compacting_as_create_info, nullptr, &as);
+        m_errorMonitor->VerifyFound();
+    }
+
+    // Can not mix different geometry types into single bottom level acceleration structure
+    {
+        VkGeometryNV aabb_geometry = {};
+        aabb_geometry.sType = VK_STRUCTURE_TYPE_GEOMETRY_NV;
+        aabb_geometry.geometryType = VK_GEOMETRY_TYPE_AABBS_NV;
+        aabb_geometry.geometry.triangles.sType = VK_STRUCTURE_TYPE_GEOMETRY_TRIANGLES_NV;
+        aabb_geometry.geometry.aabbs = {};
+        aabb_geometry.geometry.aabbs.sType = VK_STRUCTURE_TYPE_GEOMETRY_AABB_NV;
+        // Buffer contents do not matter for this test.
+        aabb_geometry.geometry.aabbs.aabbData = geometry.geometry.triangles.vertexData;
+        aabb_geometry.geometry.aabbs.numAABBs = 1;
+        aabb_geometry.geometry.aabbs.offset = 0;
+        aabb_geometry.geometry.aabbs.stride = 24;
+
+        std::vector<VkGeometryNV> geometries = {geometry, aabb_geometry};
+
+        VkAccelerationStructureCreateInfoNV mix_geometry_types_as_create_info = as_create_info;
+        mix_geometry_types_as_create_info.info.type = VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_NV;
+        mix_geometry_types_as_create_info.info.instanceCount = 0;
+        mix_geometry_types_as_create_info.info.geometryCount = static_cast<uint32_t>(geometries.size());
+        mix_geometry_types_as_create_info.info.pGeometries = geometries.data();
+        mix_geometry_types_as_create_info.info.flags = 0;
+
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                             "UNASSIGNED-VkAccelerationStructureInfoNV-pGeometries-XXXX");
+        vkCreateAccelerationStructureNV(m_device->handle(), &mix_geometry_types_as_create_info, nullptr, &as);
+        m_errorMonitor->VerifyFound();
+    }
+}
+
+TEST_F(VkLayerTest, ValidateBindAccelerationStructureNV) {
+    TEST_DESCRIPTION("Validate acceleration structure binding.");
+    if (!InitFrameworkForRayTracingTest(this, m_instance_extension_names, m_device_extension_names, m_errorMonitor)) {
+        return;
+    }
+
+    PFN_vkBindAccelerationStructureMemoryNV vkBindAccelerationStructureMemoryNV =
+        reinterpret_cast<PFN_vkBindAccelerationStructureMemoryNV>(
+            vk::GetDeviceProcAddr(m_device->handle(), "vkBindAccelerationStructureMemoryNV"));
+    assert(vkBindAccelerationStructureMemoryNV != nullptr);
+
+    VkBufferObj vbo;
+    VkBufferObj ibo;
+    VkGeometryNV geometry;
+    GetSimpleGeometryForAccelerationStructureTests(*m_device, &vbo, &ibo, &geometry);
+
+    VkAccelerationStructureCreateInfoNV as_create_info = {};
+    as_create_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_NV;
+    as_create_info.info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_INFO_NV;
+    as_create_info.info.type = VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_NV;
+    as_create_info.info.geometryCount = 1;
+    as_create_info.info.pGeometries = &geometry;
+    as_create_info.info.instanceCount = 0;
+
+    VkAccelerationStructureObj as(*m_device, as_create_info, false);
+    m_errorMonitor->VerifyNotFound();
+
+    VkMemoryRequirements as_memory_requirements = as.memory_requirements().memoryRequirements;
+
+    VkBindAccelerationStructureMemoryInfoNV as_bind_info = {};
+    as_bind_info.sType = VK_STRUCTURE_TYPE_BIND_ACCELERATION_STRUCTURE_MEMORY_INFO_NV;
+    as_bind_info.accelerationStructure = as.handle();
+
+    VkMemoryAllocateInfo as_memory_alloc = {};
+    as_memory_alloc.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+    as_memory_alloc.allocationSize = as_memory_requirements.size;
+    ASSERT_TRUE(m_device->phy().set_memory_type(as_memory_requirements.memoryTypeBits, &as_memory_alloc, 0));
+
+    // Can not bind already freed memory
+    {
+        VkDeviceMemory as_memory_freed = VK_NULL_HANDLE;
+        ASSERT_VK_SUCCESS(vk::AllocateMemory(device(), &as_memory_alloc, NULL, &as_memory_freed));
+        vk::FreeMemory(device(), as_memory_freed, NULL);
+
+        VkBindAccelerationStructureMemoryInfoNV as_bind_info_freed = as_bind_info;
+        as_bind_info_freed.memory = as_memory_freed;
+        as_bind_info_freed.memoryOffset = 0;
+
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                             "VUID-VkBindAccelerationStructureMemoryInfoNV-memory-parameter");
+        (void)vkBindAccelerationStructureMemoryNV(device(), 1, &as_bind_info_freed);
+        m_errorMonitor->VerifyFound();
+    }
+
+    // Can not bind with bad alignment
+    if (as_memory_requirements.alignment > 1) {
+        VkMemoryAllocateInfo as_memory_alloc_bad_alignment = as_memory_alloc;
+        as_memory_alloc_bad_alignment.allocationSize += 1;
+
+        VkDeviceMemory as_memory_bad_alignment = VK_NULL_HANDLE;
+        ASSERT_VK_SUCCESS(vk::AllocateMemory(device(), &as_memory_alloc_bad_alignment, NULL, &as_memory_bad_alignment));
+
+        VkBindAccelerationStructureMemoryInfoNV as_bind_info_bad_alignment = as_bind_info;
+        as_bind_info_bad_alignment.memory = as_memory_bad_alignment;
+        as_bind_info_bad_alignment.memoryOffset = 1;
+
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                             "VUID-VkBindAccelerationStructureMemoryInfoNV-memoryOffset-02594");
+        (void)vkBindAccelerationStructureMemoryNV(device(), 1, &as_bind_info_bad_alignment);
+        m_errorMonitor->VerifyFound();
+
+        vk::FreeMemory(device(), as_memory_bad_alignment, NULL);
+    }
+
+    // Can not bind with offset outside the allocation
+    {
+        VkDeviceMemory as_memory_bad_offset = VK_NULL_HANDLE;
+        ASSERT_VK_SUCCESS(vk::AllocateMemory(device(), &as_memory_alloc, NULL, &as_memory_bad_offset));
+
+        VkBindAccelerationStructureMemoryInfoNV as_bind_info_bad_offset = as_bind_info;
+        as_bind_info_bad_offset.memory = as_memory_bad_offset;
+        as_bind_info_bad_offset.memoryOffset =
+            (as_memory_alloc.allocationSize + as_memory_requirements.alignment) & ~(as_memory_requirements.alignment - 1);
+
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                             "VUID-VkBindAccelerationStructureMemoryInfoNV-memoryOffset-02451");
+        (void)vkBindAccelerationStructureMemoryNV(device(), 1, &as_bind_info_bad_offset);
+        m_errorMonitor->VerifyFound();
+
+        vk::FreeMemory(device(), as_memory_bad_offset, NULL);
+    }
+
+    // Can not bind with offset that doesn't leave enough size
+    {
+        VkDeviceSize offset = (as_memory_requirements.size - 1) & ~(as_memory_requirements.alignment - 1);
+        if (offset > 0 && (as_memory_requirements.size < (as_memory_alloc.allocationSize - as_memory_requirements.alignment))) {
+            VkDeviceMemory as_memory_bad_offset = VK_NULL_HANDLE;
+            ASSERT_VK_SUCCESS(vk::AllocateMemory(device(), &as_memory_alloc, NULL, &as_memory_bad_offset));
+
+            VkBindAccelerationStructureMemoryInfoNV as_bind_info_bad_offset = as_bind_info;
+            as_bind_info_bad_offset.memory = as_memory_bad_offset;
+            as_bind_info_bad_offset.memoryOffset = offset;
+
+            m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                                 "VUID-VkBindAccelerationStructureMemoryInfoNV-size-02595");
+            (void)vkBindAccelerationStructureMemoryNV(device(), 1, &as_bind_info_bad_offset);
+            m_errorMonitor->VerifyFound();
+
+            vk::FreeMemory(device(), as_memory_bad_offset, NULL);
+        }
+    }
+
+    // Can not bind with memory that has unsupported memory type
+    {
+        VkPhysicalDeviceMemoryProperties memory_properties = {};
+        vk::GetPhysicalDeviceMemoryProperties(m_device->phy().handle(), &memory_properties);
+
+        uint32_t supported_memory_type_bits = as_memory_requirements.memoryTypeBits;
+        uint32_t unsupported_mem_type_bits = ((1 << memory_properties.memoryTypeCount) - 1) & ~supported_memory_type_bits;
+        if (unsupported_mem_type_bits != 0) {
+            VkMemoryAllocateInfo as_memory_alloc_bad_type = as_memory_alloc;
+            ASSERT_TRUE(m_device->phy().set_memory_type(unsupported_mem_type_bits, &as_memory_alloc_bad_type, 0));
+
+            VkDeviceMemory as_memory_bad_type = VK_NULL_HANDLE;
+            ASSERT_VK_SUCCESS(vk::AllocateMemory(device(), &as_memory_alloc_bad_type, NULL, &as_memory_bad_type));
+
+            VkBindAccelerationStructureMemoryInfoNV as_bind_info_bad_type = as_bind_info;
+            as_bind_info_bad_type.memory = as_memory_bad_type;
+
+            m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                                 "VUID-VkBindAccelerationStructureMemoryInfoNV-memory-02593");
+            (void)vkBindAccelerationStructureMemoryNV(device(), 1, &as_bind_info_bad_type);
+            m_errorMonitor->VerifyFound();
+
+            vk::FreeMemory(device(), as_memory_bad_type, NULL);
+        }
+    }
+
+    // Can not bind memory twice
+    {
+        VkAccelerationStructureObj as_twice(*m_device, as_create_info, false);
+
+        VkDeviceMemory as_memory_twice_1 = VK_NULL_HANDLE;
+        VkDeviceMemory as_memory_twice_2 = VK_NULL_HANDLE;
+        ASSERT_VK_SUCCESS(vk::AllocateMemory(device(), &as_memory_alloc, NULL, &as_memory_twice_1));
+        ASSERT_VK_SUCCESS(vk::AllocateMemory(device(), &as_memory_alloc, NULL, &as_memory_twice_2));
+        VkBindAccelerationStructureMemoryInfoNV as_bind_info_twice_1 = as_bind_info;
+        VkBindAccelerationStructureMemoryInfoNV as_bind_info_twice_2 = as_bind_info;
+        as_bind_info_twice_1.accelerationStructure = as_twice.handle();
+        as_bind_info_twice_2.accelerationStructure = as_twice.handle();
+        as_bind_info_twice_1.memory = as_memory_twice_1;
+        as_bind_info_twice_2.memory = as_memory_twice_2;
+
+        ASSERT_VK_SUCCESS(vkBindAccelerationStructureMemoryNV(device(), 1, &as_bind_info_twice_1));
+        m_errorMonitor->VerifyNotFound();
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                             "VUID-VkBindAccelerationStructureMemoryInfoNV-accelerationStructure-02450");
+        (void)vkBindAccelerationStructureMemoryNV(device(), 1, &as_bind_info_twice_2);
+        m_errorMonitor->VerifyFound();
+
+        vk::FreeMemory(device(), as_memory_twice_1, NULL);
+        vk::FreeMemory(device(), as_memory_twice_2, NULL);
+    }
+}
+
+TEST_F(VkLayerTest, ValidateCmdBuildAccelerationStructureNV) {
+    TEST_DESCRIPTION("Validate acceleration structure building.");
+    if (!InitFrameworkForRayTracingTest(this, m_instance_extension_names, m_device_extension_names, m_errorMonitor)) {
+        return;
+    }
+
+    PFN_vkCmdBuildAccelerationStructureNV vkCmdBuildAccelerationStructureNV =
+        reinterpret_cast<PFN_vkCmdBuildAccelerationStructureNV>(
+            vk::GetDeviceProcAddr(m_device->handle(), "vkCmdBuildAccelerationStructureNV"));
+    assert(vkCmdBuildAccelerationStructureNV != nullptr);
+
+    VkBufferObj vbo;
+    VkBufferObj ibo;
+    VkGeometryNV geometry;
+    GetSimpleGeometryForAccelerationStructureTests(*m_device, &vbo, &ibo, &geometry);
+
+    VkAccelerationStructureCreateInfoNV bot_level_as_create_info = {};
+    bot_level_as_create_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_NV;
+    bot_level_as_create_info.info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_INFO_NV;
+    bot_level_as_create_info.info.type = VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_NV;
+    bot_level_as_create_info.info.instanceCount = 0;
+    bot_level_as_create_info.info.geometryCount = 1;
+    bot_level_as_create_info.info.pGeometries = &geometry;
+
+    VkAccelerationStructureObj bot_level_as(*m_device, bot_level_as_create_info);
+    m_errorMonitor->VerifyNotFound();
+
+    VkBufferObj bot_level_as_scratch;
+    bot_level_as.create_scratch_buffer(*m_device, &bot_level_as_scratch);
+
+    // Command buffer must be in recording state
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "VUID-vkCmdBuildAccelerationStructureNV-commandBuffer-recording");
+    vkCmdBuildAccelerationStructureNV(m_commandBuffer->handle(), &bot_level_as_create_info.info, VK_NULL_HANDLE, 0, VK_FALSE,
+                                      bot_level_as.handle(), VK_NULL_HANDLE, bot_level_as_scratch.handle(), 0);
+    m_errorMonitor->VerifyFound();
+
+    m_commandBuffer->begin();
+
+    // Incompatible type
+    VkAccelerationStructureInfoNV as_build_info_with_incompatible_type = bot_level_as_create_info.info;
+    as_build_info_with_incompatible_type.type = VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_NV;
+    as_build_info_with_incompatible_type.instanceCount = 1;
+    as_build_info_with_incompatible_type.geometryCount = 0;
+
+    // This is duplicated since it triggers one error for different types and one error for lower instance count - the
+    // build info is incompatible but still needs to be valid to get past the stateless checks.
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBuildAccelerationStructureNV-dst-02488");
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBuildAccelerationStructureNV-dst-02488");
+    vkCmdBuildAccelerationStructureNV(m_commandBuffer->handle(), &as_build_info_with_incompatible_type, VK_NULL_HANDLE, 0, VK_FALSE,
+                                      bot_level_as.handle(), VK_NULL_HANDLE, bot_level_as_scratch.handle(), 0);
+    m_errorMonitor->VerifyFound();
+
+    // Incompatible flags
+    VkAccelerationStructureInfoNV as_build_info_with_incompatible_flags = bot_level_as_create_info.info;
+    as_build_info_with_incompatible_flags.flags = VK_BUILD_ACCELERATION_STRUCTURE_LOW_MEMORY_BIT_NV;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBuildAccelerationStructureNV-dst-02488");
+    vkCmdBuildAccelerationStructureNV(m_commandBuffer->handle(), &as_build_info_with_incompatible_flags, VK_NULL_HANDLE, 0,
+                                      VK_FALSE, bot_level_as.handle(), VK_NULL_HANDLE, bot_level_as_scratch.handle(), 0);
+    m_errorMonitor->VerifyFound();
+
+    // Incompatible build size
+    VkGeometryNV geometry_with_more_vertices = geometry;
+    geometry_with_more_vertices.geometry.triangles.vertexCount += 1;
+
+    VkAccelerationStructureInfoNV as_build_info_with_incompatible_geometry = bot_level_as_create_info.info;
+    as_build_info_with_incompatible_geometry.pGeometries = &geometry_with_more_vertices;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBuildAccelerationStructureNV-dst-02488");
+    vkCmdBuildAccelerationStructureNV(m_commandBuffer->handle(), &as_build_info_with_incompatible_geometry, VK_NULL_HANDLE, 0,
+                                      VK_FALSE, bot_level_as.handle(), VK_NULL_HANDLE, bot_level_as_scratch.handle(), 0);
+    m_errorMonitor->VerifyFound();
+
+    // Scratch buffer too small
+    VkBufferCreateInfo too_small_scratch_buffer_info = {};
+    too_small_scratch_buffer_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
+    too_small_scratch_buffer_info.usage = VK_BUFFER_USAGE_RAY_TRACING_BIT_NV;
+    too_small_scratch_buffer_info.size = 1;
+    VkBufferObj too_small_scratch_buffer(*m_device, too_small_scratch_buffer_info);
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBuildAccelerationStructureNV-update-02491");
+    vkCmdBuildAccelerationStructureNV(m_commandBuffer->handle(), &bot_level_as_create_info.info, VK_NULL_HANDLE, 0, VK_FALSE,
+                                      bot_level_as.handle(), VK_NULL_HANDLE, too_small_scratch_buffer.handle(), 0);
+    m_errorMonitor->VerifyFound();
+
+    // Scratch buffer with offset too small
+    VkDeviceSize scratch_buffer_offset = 5;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBuildAccelerationStructureNV-update-02491");
+    vkCmdBuildAccelerationStructureNV(m_commandBuffer->handle(), &bot_level_as_create_info.info, VK_NULL_HANDLE, 0, VK_FALSE,
+                                      bot_level_as.handle(), VK_NULL_HANDLE, bot_level_as_scratch.handle(), scratch_buffer_offset);
+    m_errorMonitor->VerifyFound();
+
+    // Src must have been built before
+    VkAccelerationStructureObj bot_level_as_updated(*m_device, bot_level_as_create_info);
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBuildAccelerationStructureNV-update-02489");
+    vkCmdBuildAccelerationStructureNV(m_commandBuffer->handle(), &bot_level_as_create_info.info, VK_NULL_HANDLE, 0, VK_TRUE,
+                                      bot_level_as_updated.handle(), bot_level_as.handle(), bot_level_as_scratch.handle(), 0);
+    m_errorMonitor->VerifyFound();
+
+    // Src must have been built before with the VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_NV flag
+    vkCmdBuildAccelerationStructureNV(m_commandBuffer->handle(), &bot_level_as_create_info.info, VK_NULL_HANDLE, 0, VK_FALSE,
+                                      bot_level_as.handle(), VK_NULL_HANDLE, bot_level_as_scratch.handle(), 0);
+    m_errorMonitor->VerifyNotFound();
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBuildAccelerationStructureNV-update-02489");
+    vkCmdBuildAccelerationStructureNV(m_commandBuffer->handle(), &bot_level_as_create_info.info, VK_NULL_HANDLE, 0, VK_TRUE,
+                                      bot_level_as_updated.handle(), bot_level_as.handle(), bot_level_as_scratch.handle(), 0);
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, ValidateGetAccelerationStructureHandleNV) {
+    TEST_DESCRIPTION("Validate acceleration structure handle querying.");
+    if (!InitFrameworkForRayTracingTest(this, m_instance_extension_names, m_device_extension_names, m_errorMonitor)) {
+        return;
+    }
+
+    PFN_vkGetAccelerationStructureHandleNV vkGetAccelerationStructureHandleNV =
+        reinterpret_cast<PFN_vkGetAccelerationStructureHandleNV>(
+            vk::GetDeviceProcAddr(m_device->handle(), "vkGetAccelerationStructureHandleNV"));
+    assert(vkGetAccelerationStructureHandleNV != nullptr);
+
+    VkBufferObj vbo;
+    VkBufferObj ibo;
+    VkGeometryNV geometry;
+    GetSimpleGeometryForAccelerationStructureTests(*m_device, &vbo, &ibo, &geometry);
+
+    VkAccelerationStructureCreateInfoNV bot_level_as_create_info = {};
+    bot_level_as_create_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_NV;
+    bot_level_as_create_info.info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_INFO_NV;
+    bot_level_as_create_info.info.type = VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_NV;
+    bot_level_as_create_info.info.instanceCount = 0;
+    bot_level_as_create_info.info.geometryCount = 1;
+    bot_level_as_create_info.info.pGeometries = &geometry;
+
+    // Not enough space for the handle
+    {
+        VkAccelerationStructureObj bot_level_as(*m_device, bot_level_as_create_info);
+        m_errorMonitor->VerifyNotFound();
+
+        uint64_t handle = 0;
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                             "VUID-vkGetAccelerationStructureHandleNV-dataSize-02240");
+        vkGetAccelerationStructureHandleNV(m_device->handle(), bot_level_as.handle(), sizeof(uint8_t), &handle);
+        m_errorMonitor->VerifyFound();
+    }
+
+    // No memory bound to acceleration structure
+    {
+        VkAccelerationStructureObj bot_level_as(*m_device, bot_level_as_create_info, /*init_memory=*/false);
+        m_errorMonitor->VerifyNotFound();
+
+        uint64_t handle = 0;
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                             "UNASSIGNED-vkGetAccelerationStructureHandleNV-accelerationStructure-XXXX");
+        vkGetAccelerationStructureHandleNV(m_device->handle(), bot_level_as.handle(), sizeof(uint64_t), &handle);
+        m_errorMonitor->VerifyFound();
+    }
+}
+
+TEST_F(VkLayerTest, ValidateCmdCopyAccelerationStructureNV) {
+    TEST_DESCRIPTION("Validate acceleration structure copying.");
+    if (!InitFrameworkForRayTracingTest(this, m_instance_extension_names, m_device_extension_names, m_errorMonitor)) {
+        return;
+    }
+
+    PFN_vkCmdCopyAccelerationStructureNV vkCmdCopyAccelerationStructureNV = reinterpret_cast<PFN_vkCmdCopyAccelerationStructureNV>(
+        vk::GetDeviceProcAddr(m_device->handle(), "vkCmdCopyAccelerationStructureNV"));
+    assert(vkCmdCopyAccelerationStructureNV != nullptr);
+
+    VkBufferObj vbo;
+    VkBufferObj ibo;
+    VkGeometryNV geometry;
+    GetSimpleGeometryForAccelerationStructureTests(*m_device, &vbo, &ibo, &geometry);
+
+    VkAccelerationStructureCreateInfoNV as_create_info = {};
+    as_create_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_NV;
+    as_create_info.info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_INFO_NV;
+    as_create_info.info.type = VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_NV;
+    as_create_info.info.instanceCount = 0;
+    as_create_info.info.geometryCount = 1;
+    as_create_info.info.pGeometries = &geometry;
+
+    VkAccelerationStructureObj src_as(*m_device, as_create_info);
+    VkAccelerationStructureObj dst_as(*m_device, as_create_info);
+    VkAccelerationStructureObj dst_as_without_mem(*m_device, as_create_info, false);
+    m_errorMonitor->VerifyNotFound();
+
+    // Command buffer must be in recording state
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "VUID-vkCmdCopyAccelerationStructureNV-commandBuffer-recording");
+    vkCmdCopyAccelerationStructureNV(m_commandBuffer->handle(), dst_as.handle(), src_as.handle(),
+                                     VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_NV);
+    m_errorMonitor->VerifyFound();
+
+    m_commandBuffer->begin();
+
+    // Src must have been created with allow compaction flag
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdCopyAccelerationStructureNV-src-02497");
+    vkCmdCopyAccelerationStructureNV(m_commandBuffer->handle(), dst_as.handle(), src_as.handle(),
+                                     VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_NV);
+    m_errorMonitor->VerifyFound();
+
+    // Dst must have been bound with memory
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "UNASSIGNED-CoreValidation-DrawState-InvalidCommandBuffer-VkAccelerationStructureNV");
+    vkCmdCopyAccelerationStructureNV(m_commandBuffer->handle(), dst_as_without_mem.handle(), src_as.handle(),
+                                     VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_NV);
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, GpuBuildAccelerationStructureValidationInvalidHandle) {
+    TEST_DESCRIPTION(
+        "Acceleration structure gpu validation should report an invalid handle when trying to build a top level "
+        "acceleration structure with an invalid handle for a bottom level acceleration structure.");
+
+    if (!InitFrameworkForRayTracingTest(this, m_instance_extension_names, m_device_extension_names, m_errorMonitor,
+                                        /*need_gpu_validation=*/true)) {
+        return;
+    }
+
+    PFN_vkCmdBuildAccelerationStructureNV vkCmdBuildAccelerationStructureNV =
+        reinterpret_cast<PFN_vkCmdBuildAccelerationStructureNV>(
+            vk::GetDeviceProcAddr(m_device->handle(), "vkCmdBuildAccelerationStructureNV"));
+    assert(vkCmdBuildAccelerationStructureNV != nullptr);
+
+    VkBufferObj vbo;
+    VkBufferObj ibo;
+    VkGeometryNV geometry;
+    GetSimpleGeometryForAccelerationStructureTests(*m_device, &vbo, &ibo, &geometry);
+
+    VkAccelerationStructureCreateInfoNV top_level_as_create_info = {};
+    top_level_as_create_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_NV;
+    top_level_as_create_info.info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_INFO_NV;
+    top_level_as_create_info.info.type = VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_NV;
+    top_level_as_create_info.info.instanceCount = 1;
+    top_level_as_create_info.info.geometryCount = 0;
+
+    VkCommandPoolObj command_pool(m_device, 0, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT);
+
+    struct VkGeometryInstanceNV {
+        float transform[12];
+        uint32_t instanceCustomIndex : 24;
+        uint32_t mask : 8;
+        uint32_t instanceOffset : 24;
+        uint32_t flags : 8;
+        uint64_t accelerationStructureHandle;
+    };
+
+    VkGeometryInstanceNV instance = {
+        {
+            // clang-format off
+            1.0f, 0.0f, 0.0f, 0.0f,
+            0.0f, 1.0f, 0.0f, 0.0f,
+            0.0f, 0.0f, 1.0f, 0.0f,
+            // clang-format on
+        },
+        0,
+        0xFF,
+        0,
+        VK_GEOMETRY_INSTANCE_TRIANGLE_CULL_DISABLE_BIT_NV,
+        1234567890,  // invalid
+    };
+
+    VkDeviceSize instance_buffer_size = sizeof(VkGeometryInstanceNV);
+    VkBufferObj instance_buffer;
+    instance_buffer.init(*m_device, instance_buffer_size,
+                         VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT,
+                         VK_BUFFER_USAGE_RAY_TRACING_BIT_NV);
+
+    uint8_t *mapped_instance_buffer_data = (uint8_t *)instance_buffer.memory().map();
+    std::memcpy(mapped_instance_buffer_data, (uint8_t *)&instance, static_cast<std::size_t>(instance_buffer_size));
+    instance_buffer.memory().unmap();
+
+    VkAccelerationStructureObj top_level_as(*m_device, top_level_as_create_info);
+    m_errorMonitor->VerifyNotFound();
+
+    VkBufferObj top_level_as_scratch;
+    top_level_as.create_scratch_buffer(*m_device, &top_level_as_scratch);
+
+    VkCommandBufferObj command_buffer(m_device, &command_pool);
+    command_buffer.begin();
+    vkCmdBuildAccelerationStructureNV(command_buffer.handle(), &top_level_as_create_info.info, instance_buffer.handle(), 0,
+                                      VK_FALSE, top_level_as.handle(), VK_NULL_HANDLE, top_level_as_scratch.handle(), 0);
+    command_buffer.end();
+
+    m_errorMonitor->SetDesiredFailureMsg(
+        VK_DEBUG_REPORT_ERROR_BIT_EXT,
+        "Attempted to build top level acceleration structure using invalid bottom level acceleration structure handle");
+
+    VkSubmitInfo submit_info = {};
+    submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+    submit_info.commandBufferCount = 1;
+    submit_info.pCommandBuffers = &command_buffer.handle();
+    vk::QueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+    vk::QueueWaitIdle(m_device->m_queue);
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, GpuBuildAccelerationStructureValidationBottomLevelNotYetBuilt) {
+    TEST_DESCRIPTION(
+        "Acceleration structure gpu validation should report an invalid handle when trying to build a top level "
+        "acceleration structure with a handle for a bottom level acceleration structure that has not yet been built.");
+
+    if (!InitFrameworkForRayTracingTest(this, m_instance_extension_names, m_device_extension_names, m_errorMonitor,
+                                        /*need_gpu_validation=*/true)) {
+        return;
+    }
+
+    PFN_vkCmdBuildAccelerationStructureNV vkCmdBuildAccelerationStructureNV =
+        reinterpret_cast<PFN_vkCmdBuildAccelerationStructureNV>(
+            vk::GetDeviceProcAddr(m_device->handle(), "vkCmdBuildAccelerationStructureNV"));
+    assert(vkCmdBuildAccelerationStructureNV != nullptr);
+
+    VkBufferObj vbo;
+    VkBufferObj ibo;
+    VkGeometryNV geometry;
+    GetSimpleGeometryForAccelerationStructureTests(*m_device, &vbo, &ibo, &geometry);
+
+    VkAccelerationStructureCreateInfoNV bot_level_as_create_info = {};
+    bot_level_as_create_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_NV;
+    bot_level_as_create_info.info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_INFO_NV;
+    bot_level_as_create_info.info.type = VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_NV;
+    bot_level_as_create_info.info.instanceCount = 0;
+    bot_level_as_create_info.info.geometryCount = 1;
+    bot_level_as_create_info.info.pGeometries = &geometry;
+
+    VkAccelerationStructureCreateInfoNV top_level_as_create_info = {};
+    top_level_as_create_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_NV;
+    top_level_as_create_info.info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_INFO_NV;
+    top_level_as_create_info.info.type = VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_NV;
+    top_level_as_create_info.info.instanceCount = 1;
+    top_level_as_create_info.info.geometryCount = 0;
+
+    VkCommandPoolObj command_pool(m_device, 0, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT);
+
+    struct VkGeometryInstanceNV {
+        float transform[12];
+        uint32_t instanceCustomIndex : 24;
+        uint32_t mask : 8;
+        uint32_t instanceOffset : 24;
+        uint32_t flags : 8;
+        uint64_t accelerationStructureHandle;
+    };
+
+    VkAccelerationStructureObj bot_level_as_never_built(*m_device, bot_level_as_create_info);
+    m_errorMonitor->VerifyNotFound();
+
+    VkGeometryInstanceNV instance = {
+        {
+            // clang-format off
+            1.0f, 0.0f, 0.0f, 0.0f,
+            0.0f, 1.0f, 0.0f, 0.0f,
+            0.0f, 0.0f, 1.0f, 0.0f,
+            // clang-format on
+        },
+        0,
+        0xFF,
+        0,
+        VK_GEOMETRY_INSTANCE_TRIANGLE_CULL_DISABLE_BIT_NV,
+        bot_level_as_never_built.opaque_handle(),
+    };
+
+    VkDeviceSize instance_buffer_size = sizeof(VkGeometryInstanceNV);
+    VkBufferObj instance_buffer;
+    instance_buffer.init(*m_device, instance_buffer_size,
+                         VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT,
+                         VK_BUFFER_USAGE_RAY_TRACING_BIT_NV);
+
+    uint8_t *mapped_instance_buffer_data = (uint8_t *)instance_buffer.memory().map();
+    std::memcpy(mapped_instance_buffer_data, (uint8_t *)&instance, static_cast<std::size_t>(instance_buffer_size));
+    instance_buffer.memory().unmap();
+
+    VkAccelerationStructureObj top_level_as(*m_device, top_level_as_create_info);
+    m_errorMonitor->VerifyNotFound();
+
+    VkBufferObj top_level_as_scratch;
+    top_level_as.create_scratch_buffer(*m_device, &top_level_as_scratch);
+
+    VkCommandBufferObj command_buffer(m_device, &command_pool);
+    command_buffer.begin();
+    vkCmdBuildAccelerationStructureNV(command_buffer.handle(), &top_level_as_create_info.info, instance_buffer.handle(), 0,
+                                      VK_FALSE, top_level_as.handle(), VK_NULL_HANDLE, top_level_as_scratch.handle(), 0);
+    command_buffer.end();
+
+    m_errorMonitor->SetDesiredFailureMsg(
+        VK_DEBUG_REPORT_ERROR_BIT_EXT,
+        "Attempted to build top level acceleration structure using invalid bottom level acceleration structure handle");
+
+    VkSubmitInfo submit_info = {};
+    submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+    submit_info.commandBufferCount = 1;
+    submit_info.pCommandBuffers = &command_buffer.handle();
+    vk::QueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+    vk::QueueWaitIdle(m_device->m_queue);
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, GpuBuildAccelerationStructureValidationBottomLevelDestroyed) {
+    TEST_DESCRIPTION(
+        "Acceleration structure gpu validation should report an invalid handle when trying to build a top level "
+        "acceleration structure with a handle for a destroyed bottom level acceleration structure.");
+
+    if (!InitFrameworkForRayTracingTest(this, m_instance_extension_names, m_device_extension_names, m_errorMonitor,
+                                        /*need_gpu_validation=*/true)) {
+        return;
+    }
+
+    PFN_vkCmdBuildAccelerationStructureNV vkCmdBuildAccelerationStructureNV =
+        reinterpret_cast<PFN_vkCmdBuildAccelerationStructureNV>(
+            vk::GetDeviceProcAddr(m_device->handle(), "vkCmdBuildAccelerationStructureNV"));
+    assert(vkCmdBuildAccelerationStructureNV != nullptr);
+
+    VkBufferObj vbo;
+    VkBufferObj ibo;
+    VkGeometryNV geometry;
+    GetSimpleGeometryForAccelerationStructureTests(*m_device, &vbo, &ibo, &geometry);
+
+    VkAccelerationStructureCreateInfoNV bot_level_as_create_info = {};
+    bot_level_as_create_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_NV;
+    bot_level_as_create_info.info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_INFO_NV;
+    bot_level_as_create_info.info.type = VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_NV;
+    bot_level_as_create_info.info.instanceCount = 0;
+    bot_level_as_create_info.info.geometryCount = 1;
+    bot_level_as_create_info.info.pGeometries = &geometry;
+
+    VkAccelerationStructureCreateInfoNV top_level_as_create_info = {};
+    top_level_as_create_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_NV;
+    top_level_as_create_info.info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_INFO_NV;
+    top_level_as_create_info.info.type = VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_NV;
+    top_level_as_create_info.info.instanceCount = 1;
+    top_level_as_create_info.info.geometryCount = 0;
+
+    VkCommandPoolObj command_pool(m_device, 0, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT);
+
+    struct VkGeometryInstanceNV {
+        float transform[12];
+        uint32_t instanceCustomIndex : 24;
+        uint32_t mask : 8;
+        uint32_t instanceOffset : 24;
+        uint32_t flags : 8;
+        uint64_t accelerationStructureHandle;
+    };
+
+    uint64_t destroyed_bot_level_as_handle = 0;
+    {
+        VkAccelerationStructureObj destroyed_bot_level_as(*m_device, bot_level_as_create_info);
+        m_errorMonitor->VerifyNotFound();
+
+        destroyed_bot_level_as_handle = destroyed_bot_level_as.opaque_handle();
+
+        VkBufferObj bot_level_as_scratch;
+        destroyed_bot_level_as.create_scratch_buffer(*m_device, &bot_level_as_scratch);
+
+        VkCommandBufferObj command_buffer(m_device, &command_pool);
+        command_buffer.begin();
+        vkCmdBuildAccelerationStructureNV(command_buffer.handle(), &bot_level_as_create_info.info, VK_NULL_HANDLE, 0, VK_FALSE,
+                                          destroyed_bot_level_as.handle(), VK_NULL_HANDLE, bot_level_as_scratch.handle(), 0);
+        command_buffer.end();
+
+        VkSubmitInfo submit_info = {};
+        submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+        submit_info.commandBufferCount = 1;
+        submit_info.pCommandBuffers = &command_buffer.handle();
+        vk::QueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+        vk::QueueWaitIdle(m_device->m_queue);
+        m_errorMonitor->VerifyNotFound();
+
+        // vk::DestroyAccelerationStructureNV called on destroyed_bot_level_as during destruction.
+    }
+
+    VkGeometryInstanceNV instance = {
+        {
+            // clang-format off
+            1.0f, 0.0f, 0.0f, 0.0f,
+            0.0f, 1.0f, 0.0f, 0.0f,
+            0.0f, 0.0f, 1.0f, 0.0f,
+            // clang-format on
+        },
+        0,
+        0xFF,
+        0,
+        VK_GEOMETRY_INSTANCE_TRIANGLE_CULL_DISABLE_BIT_NV,
+        destroyed_bot_level_as_handle,
+    };
+
+    VkDeviceSize instance_buffer_size = sizeof(VkGeometryInstanceNV);
+    VkBufferObj instance_buffer;
+    instance_buffer.init(*m_device, instance_buffer_size,
+                         VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT,
+                         VK_BUFFER_USAGE_RAY_TRACING_BIT_NV);
+
+    uint8_t *mapped_instance_buffer_data = (uint8_t *)instance_buffer.memory().map();
+    std::memcpy(mapped_instance_buffer_data, (uint8_t *)&instance, static_cast<std::size_t>(instance_buffer_size));
+    instance_buffer.memory().unmap();
+
+    VkAccelerationStructureObj top_level_as(*m_device, top_level_as_create_info);
+    m_errorMonitor->VerifyNotFound();
+
+    VkBufferObj top_level_as_scratch;
+    top_level_as.create_scratch_buffer(*m_device, &top_level_as_scratch);
+
+    VkCommandBufferObj command_buffer(m_device, &command_pool);
+    command_buffer.begin();
+    vkCmdBuildAccelerationStructureNV(command_buffer.handle(), &top_level_as_create_info.info, instance_buffer.handle(), 0,
+                                      VK_FALSE, top_level_as.handle(), VK_NULL_HANDLE, top_level_as_scratch.handle(), 0);
+    command_buffer.end();
+
+    m_errorMonitor->SetDesiredFailureMsg(
+        VK_DEBUG_REPORT_ERROR_BIT_EXT,
+        "Attempted to build top level acceleration structure using invalid bottom level acceleration structure handle");
+
+    VkSubmitInfo submit_info = {};
+    submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+    submit_info.commandBufferCount = 1;
+    submit_info.pCommandBuffers = &command_buffer.handle();
+    vk::QueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+    vk::QueueWaitIdle(m_device->m_queue);
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, GpuBuildAccelerationStructureValidationRestoresState) {
+    TEST_DESCRIPTION("Validate that acceleration structure gpu validation correctly restores compute state.");
+
+    if (!InitFrameworkForRayTracingTest(this, m_instance_extension_names, m_device_extension_names, m_errorMonitor,
+                                        /*need_gpu_validation=*/true, /*need_push_descriptors=*/true)) {
+        return;
+    }
+
+    PFN_vkCmdBuildAccelerationStructureNV vkCmdBuildAccelerationStructureNV =
+        reinterpret_cast<PFN_vkCmdBuildAccelerationStructureNV>(
+            vk::GetDeviceProcAddr(m_device->handle(), "vkCmdBuildAccelerationStructureNV"));
+    assert(vkCmdBuildAccelerationStructureNV != nullptr);
+
+    PFN_vkCmdPushDescriptorSetKHR vkCmdPushDescriptorSetKHR =
+        (PFN_vkCmdPushDescriptorSetKHR)vk::GetDeviceProcAddr(m_device->handle(), "vkCmdPushDescriptorSetKHR");
+    assert(vkCmdPushDescriptorSetKHR != nullptr);
+
+    VkBufferObj vbo;
+    VkBufferObj ibo;
+    VkGeometryNV geometry;
+    GetSimpleGeometryForAccelerationStructureTests(*m_device, &vbo, &ibo, &geometry);
+
+    VkAccelerationStructureCreateInfoNV top_level_as_create_info = {};
+    top_level_as_create_info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_NV;
+    top_level_as_create_info.info.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_INFO_NV;
+    top_level_as_create_info.info.type = VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_NV;
+    top_level_as_create_info.info.instanceCount = 1;
+    top_level_as_create_info.info.geometryCount = 0;
+
+    VkCommandPoolObj command_pool(m_device, 0, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT);
+
+    struct VkGeometryInstanceNV {
+        float transform[12];
+        uint32_t instanceCustomIndex : 24;
+        uint32_t mask : 8;
+        uint32_t instanceOffset : 24;
+        uint32_t flags : 8;
+        uint64_t accelerationStructureHandle;
+    };
+
+    VkGeometryInstanceNV instance = {
+        {
+            // clang-format off
+            1.0f, 0.0f, 0.0f, 0.0f,
+            0.0f, 1.0f, 0.0f, 0.0f,
+            0.0f, 0.0f, 1.0f, 0.0f,
+            // clang-format on
+        },
+        0,
+        0xFF,
+        0,
+        VK_GEOMETRY_INSTANCE_TRIANGLE_CULL_DISABLE_BIT_NV,
+        1234567,
+    };
+
+    VkDeviceSize instance_buffer_size = sizeof(VkGeometryInstanceNV);
+    VkBufferObj instance_buffer;
+    instance_buffer.init(*m_device, instance_buffer_size,
+                         VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT,
+                         VK_BUFFER_USAGE_RAY_TRACING_BIT_NV);
+
+    uint8_t *mapped_instance_buffer_data = (uint8_t *)instance_buffer.memory().map();
+    std::memcpy(mapped_instance_buffer_data, (uint8_t *)&instance, static_cast<std::size_t>(instance_buffer_size));
+    instance_buffer.memory().unmap();
+
+    VkAccelerationStructureObj top_level_as(*m_device, top_level_as_create_info);
+    m_errorMonitor->VerifyNotFound();
+
+    VkBufferObj top_level_as_scratch;
+    top_level_as.create_scratch_buffer(*m_device, &top_level_as_scratch);
+
+    struct ComputeOutput {
+        uint32_t push_constant_value;
+        uint32_t push_descriptor_value;
+        uint32_t normal_descriptor_value;
+    };
+
+    VkBufferObj push_descriptor_buffer;
+    push_descriptor_buffer.init(*m_device, 4, VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT,
+                                VK_BUFFER_USAGE_STORAGE_BUFFER_BIT);
+
+    VkBufferObj normal_descriptor_buffer;
+    normal_descriptor_buffer.init(*m_device, 4, VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT,
+                                  VK_BUFFER_USAGE_STORAGE_BUFFER_BIT);
+
+    VkDeviceSize output_descriptor_buffer_size = static_cast<VkDeviceSize>(sizeof(ComputeOutput));
+    VkBufferObj output_descriptor_buffer;
+    output_descriptor_buffer.init(*m_device, output_descriptor_buffer_size,
+                                  VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT,
+                                  VK_BUFFER_USAGE_STORAGE_BUFFER_BIT);
+
+    const std::string cs_source = R"glsl(#version 450
+        layout(local_size_x = 1, local_size_y = 1, local_size_z = 1) in;
+
+        layout(push_constant) uniform PushConstants { uint value; } push_constant;
+        layout(set = 0, binding = 0, std430) buffer PushDescriptorBuffer { uint value; } push_descriptor;
+        layout(set = 1, binding = 0, std430) buffer NormalDescriptorBuffer { uint value; } normal_descriptor;
+
+        layout(set = 2, binding = 0, std430) buffer ComputeOutputBuffer {
+            uint push_constant_value;
+            uint push_descriptor_value;
+            uint normal_descriptor_value;
+        } compute_output;
+
+        void main() {
+            compute_output.push_constant_value = push_constant.value;
+            compute_output.push_descriptor_value = push_descriptor.value;
+            compute_output.normal_descriptor_value = normal_descriptor.value;
+        }
+    )glsl";
+    VkShaderObj cs(m_device, cs_source.c_str(), VK_SHADER_STAGE_COMPUTE_BIT, this);
+
+    OneOffDescriptorSet push_descriptor_set(m_device,
+                                            {
+                                                {0, VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, 1, VK_SHADER_STAGE_COMPUTE_BIT, nullptr},
+                                            },
+                                            VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR);
+    OneOffDescriptorSet normal_descriptor_set(m_device,
+                                              {
+                                                  {0, VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, 1, VK_SHADER_STAGE_COMPUTE_BIT, nullptr},
+                                              });
+    OneOffDescriptorSet output_descriptor_set(m_device,
+                                              {
+                                                  {0, VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, 1, VK_SHADER_STAGE_COMPUTE_BIT, nullptr},
+                                              });
+
+    VkPushConstantRange push_constant_range = {};
+    push_constant_range.stageFlags = VK_SHADER_STAGE_COMPUTE_BIT;
+    push_constant_range.size = 4;
+    push_constant_range.offset = 0;
+
+    const VkPipelineLayoutObj compute_pipeline_layout(m_device,
+                                                      {
+                                                          &push_descriptor_set.layout_,
+                                                          &normal_descriptor_set.layout_,
+                                                          &output_descriptor_set.layout_,
+                                                      },
+                                                      {push_constant_range});
+
+    VkComputePipelineCreateInfo compute_pipeline_ci = {};
+    compute_pipeline_ci.sType = VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO;
+    compute_pipeline_ci.layout = compute_pipeline_layout.handle();
+    compute_pipeline_ci.stage = cs.GetStageCreateInfo();
+
+    VkPipeline compute_pipeline;
+    ASSERT_VK_SUCCESS(
+        vk::CreateComputePipelines(m_device->device(), VK_NULL_HANDLE, 1, &compute_pipeline_ci, nullptr, &compute_pipeline));
+
+    normal_descriptor_set.WriteDescriptorBufferInfo(0, normal_descriptor_buffer.handle(), 4, VK_DESCRIPTOR_TYPE_STORAGE_BUFFER);
+    normal_descriptor_set.UpdateDescriptorSets();
+
+    output_descriptor_set.WriteDescriptorBufferInfo(0, output_descriptor_buffer.handle(), output_descriptor_buffer_size,
+                                                    VK_DESCRIPTOR_TYPE_STORAGE_BUFFER);
+    output_descriptor_set.UpdateDescriptorSets();
+
+    // Set input data
+    const uint32_t push_constant_value = 1234567890;
+    const uint32_t push_descriptor_value = 98765432;
+    const uint32_t normal_descriptor_value = 1111111;
+
+    uint32_t *mapped_push_descriptor_buffer_data = (uint32_t *)push_descriptor_buffer.memory().map();
+    *mapped_push_descriptor_buffer_data = push_descriptor_value;
+    push_descriptor_buffer.memory().unmap();
+
+    uint32_t *mapped_normal_descriptor_buffer_data = (uint32_t *)normal_descriptor_buffer.memory().map();
+    *mapped_normal_descriptor_buffer_data = normal_descriptor_value;
+    normal_descriptor_buffer.memory().unmap();
+
+    ComputeOutput *mapped_output_buffer_data = (ComputeOutput *)output_descriptor_buffer.memory().map();
+    mapped_output_buffer_data->push_constant_value = 0;
+    mapped_output_buffer_data->push_descriptor_value = 0;
+    mapped_output_buffer_data->normal_descriptor_value = 0;
+    output_descriptor_buffer.memory().unmap();
+
+    VkDescriptorBufferInfo push_descriptor_buffer_info = {};
+    push_descriptor_buffer_info.buffer = push_descriptor_buffer.handle();
+    push_descriptor_buffer_info.offset = 0;
+    push_descriptor_buffer_info.range = 4;
+    VkWriteDescriptorSet push_descriptor_set_write = {};
+    push_descriptor_set_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
+    push_descriptor_set_write.descriptorCount = 1;
+    push_descriptor_set_write.descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
+    push_descriptor_set_write.dstBinding = 0;
+    push_descriptor_set_write.pBufferInfo = &push_descriptor_buffer_info;
+
+    VkCommandBufferObj command_buffer(m_device, &command_pool);
+    command_buffer.begin();
+    vk::CmdBindPipeline(command_buffer.handle(), VK_PIPELINE_BIND_POINT_COMPUTE, compute_pipeline);
+    vk::CmdPushConstants(command_buffer.handle(), compute_pipeline_layout.handle(), VK_SHADER_STAGE_COMPUTE_BIT, 0, 4,
+                         &push_constant_value);
+    vkCmdPushDescriptorSetKHR(command_buffer.handle(), VK_PIPELINE_BIND_POINT_COMPUTE, compute_pipeline_layout.handle(), 0, 1,
+                              &push_descriptor_set_write);
+    vk::CmdBindDescriptorSets(command_buffer.handle(), VK_PIPELINE_BIND_POINT_COMPUTE, compute_pipeline_layout.handle(), 1, 1,
+                              &normal_descriptor_set.set_, 0, nullptr);
+    vk::CmdBindDescriptorSets(command_buffer.handle(), VK_PIPELINE_BIND_POINT_COMPUTE, compute_pipeline_layout.handle(), 2, 1,
+                              &output_descriptor_set.set_, 0, nullptr);
+
+    vkCmdBuildAccelerationStructureNV(command_buffer.handle(), &top_level_as_create_info.info, instance_buffer.handle(), 0,
+                                      VK_FALSE, top_level_as.handle(), VK_NULL_HANDLE, top_level_as_scratch.handle(), 0);
+
+    vk::CmdDispatch(command_buffer.handle(), 1, 1, 1);
+    command_buffer.end();
+
+    m_errorMonitor->SetDesiredFailureMsg(
+        VK_DEBUG_REPORT_ERROR_BIT_EXT,
+        "Attempted to build top level acceleration structure using invalid bottom level acceleration structure handle");
+
+    VkSubmitInfo submit_info = {};
+    submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+    submit_info.commandBufferCount = 1;
+    submit_info.pCommandBuffers = &command_buffer.handle();
+    vk::QueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+    vk::QueueWaitIdle(m_device->m_queue);
+
+    m_errorMonitor->VerifyFound();
+
+    mapped_output_buffer_data = (ComputeOutput *)output_descriptor_buffer.memory().map();
+    EXPECT_EQ(mapped_output_buffer_data->push_constant_value, push_constant_value);
+    EXPECT_EQ(mapped_output_buffer_data->push_descriptor_value, push_descriptor_value);
+    EXPECT_EQ(mapped_output_buffer_data->normal_descriptor_value, normal_descriptor_value);
+    output_descriptor_buffer.memory().unmap();
+
+    // Clean up
+    vk::DestroyPipeline(m_device->device(), compute_pipeline, nullptr);
+}
+
+TEST_F(VkLayerTest, QueryPerformanceCreation) {
+    TEST_DESCRIPTION("Create performance query without support");
+
+    if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+        m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    } else {
+        printf("%s Extension %s is not supported.\n", kSkipPrefix, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+        return;
+    }
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+
+    if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_PERFORMANCE_QUERY_EXTENSION_NAME)) {
+        m_device_extension_names.push_back(VK_KHR_PERFORMANCE_QUERY_EXTENSION_NAME);
+    } else {
+        printf("%s Extension %s is not supported.\n", kSkipPrefix, VK_KHR_PERFORMANCE_QUERY_EXTENSION_NAME);
+        return;
+    }
+
+    PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR =
+        (PFN_vkGetPhysicalDeviceFeatures2KHR)vk::GetInstanceProcAddr(instance(), "vkGetPhysicalDeviceFeatures2KHR");
+    ASSERT_TRUE(vkGetPhysicalDeviceFeatures2KHR != nullptr);
+    VkPhysicalDeviceFeatures2KHR features2 = {};
+    auto performance_features = lvl_init_struct<VkPhysicalDevicePerformanceQueryFeaturesKHR>();
+    features2 = lvl_init_struct<VkPhysicalDeviceFeatures2KHR>(&performance_features);
+    vkGetPhysicalDeviceFeatures2KHR(gpu(), &features2);
+    if (!performance_features.performanceCounterQueryPools) {
+        printf("%s Performance query pools are not supported.\n", kSkipPrefix);
+        return;
+    }
+
+    ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &performance_features));
+    PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR
+        vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR =
+            (PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR)vk::GetInstanceProcAddr(
+                instance(), "vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR");
+    ASSERT_TRUE(vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR != nullptr);
+
+    auto queueFamilyProperties = m_device->phy().queue_properties();
+    uint32_t queueFamilyIndex = queueFamilyProperties.size();
+    std::vector<VkPerformanceCounterKHR> counters;
+
+    for (uint32_t idx = 0; idx < queueFamilyProperties.size(); idx++) {
+        uint32_t nCounters;
+
+        vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(gpu(), idx, &nCounters, nullptr, nullptr);
+        if (nCounters == 0) continue;
+
+        counters.resize(nCounters);
+        for (auto &c : counters) {
+            c.sType = VK_STRUCTURE_TYPE_PERFORMANCE_COUNTER_KHR;
+            c.pNext = nullptr;
+        }
+        vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(gpu(), idx, &nCounters, &counters[0], nullptr);
+        queueFamilyIndex = idx;
+        break;
+    }
+
+    if (counters.empty()) {
+        printf("%s No queue reported any performance counter.\n", kSkipPrefix);
+        return;
+    }
+
+    VkQueryPoolPerformanceCreateInfoKHR perf_query_pool_ci{};
+    perf_query_pool_ci.sType = VK_STRUCTURE_TYPE_QUERY_POOL_PERFORMANCE_CREATE_INFO_KHR;
+    perf_query_pool_ci.queueFamilyIndex = queueFamilyIndex;
+    perf_query_pool_ci.counterIndexCount = counters.size();
+    std::vector<uint32_t> counterIndices;
+    for (uint32_t c = 0; c < counters.size(); c++) counterIndices.push_back(c);
+    perf_query_pool_ci.pCounterIndices = &counterIndices[0];
+    VkQueryPoolCreateInfo query_pool_ci{};
+    query_pool_ci.sType = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO;
+    query_pool_ci.queryType = VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR;
+    query_pool_ci.queryCount = 1;
+
+    // Missing pNext
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkQueryPoolCreateInfo-queryType-03222");
+    VkQueryPool query_pool;
+    vk::CreateQueryPool(m_device->device(), &query_pool_ci, nullptr, &query_pool);
+    m_errorMonitor->VerifyFound();
+
+    query_pool_ci.pNext = &perf_query_pool_ci;
+
+    // Invalid counter indices
+    counterIndices.push_back(counters.size());
+    perf_query_pool_ci.counterIndexCount++;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "VUID-VkQueryPoolPerformanceCreateInfoKHR-pCounterIndices-03321");
+    vk::CreateQueryPool(m_device->device(), &query_pool_ci, nullptr, &query_pool);
+    m_errorMonitor->VerifyFound();
+    perf_query_pool_ci.counterIndexCount--;
+    counterIndices.pop_back();
+
+    // Success
+    m_errorMonitor->ExpectSuccess(VK_DEBUG_REPORT_ERROR_BIT_EXT);
+    vk::CreateQueryPool(m_device->device(), &query_pool_ci, nullptr, &query_pool);
+    m_errorMonitor->VerifyNotFound();
+
+    m_commandBuffer->begin();
+
+    // Missing acquire lock
+    {
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBeginQuery-queryPool-03223");
+        vk::CmdBeginQuery(m_commandBuffer->handle(), query_pool, 0, 0);
+        m_errorMonitor->VerifyFound();
+    }
+
+    m_commandBuffer->end();
+
+    vk::DestroyQueryPool(m_device->device(), query_pool, NULL);
+}
+
+TEST_F(VkLayerTest, QueryPerformanceCounterCommandbufferScope) {
+    TEST_DESCRIPTION("Insert a performance query begin/end with respect to the command buffer counter scope");
+
+    if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+        m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    } else {
+        printf("%s Extension %s is not supported.\n", kSkipPrefix, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+        return;
+    }
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+
+    if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_PERFORMANCE_QUERY_EXTENSION_NAME)) {
+        m_device_extension_names.push_back(VK_KHR_PERFORMANCE_QUERY_EXTENSION_NAME);
+    } else {
+        printf("%s Extension %s is not supported.\n", kSkipPrefix, VK_KHR_PERFORMANCE_QUERY_EXTENSION_NAME);
+        return;
+    }
+
+    PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR =
+        (PFN_vkGetPhysicalDeviceFeatures2KHR)vk::GetInstanceProcAddr(instance(), "vkGetPhysicalDeviceFeatures2KHR");
+    ASSERT_TRUE(vkGetPhysicalDeviceFeatures2KHR != nullptr);
+    VkPhysicalDeviceFeatures2KHR features2 = {};
+    auto performanceFeatures = lvl_init_struct<VkPhysicalDevicePerformanceQueryFeaturesKHR>();
+    features2 = lvl_init_struct<VkPhysicalDeviceFeatures2KHR>(&performanceFeatures);
+    vkGetPhysicalDeviceFeatures2KHR(gpu(), &features2);
+    if (!performanceFeatures.performanceCounterQueryPools) {
+        printf("%s Performance query pools are not supported.\n", kSkipPrefix);
+        return;
+    }
+
+    VkCommandPoolCreateFlags pool_flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
+    ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &performanceFeatures, pool_flags));
+    PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR
+        vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR =
+            (PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR)vk::GetInstanceProcAddr(
+                instance(), "vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR");
+    ASSERT_TRUE(vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR != nullptr);
+
+    auto queueFamilyProperties = m_device->phy().queue_properties();
+    uint32_t queueFamilyIndex = queueFamilyProperties.size();
+    std::vector<VkPerformanceCounterKHR> counters;
+    std::vector<uint32_t> counterIndices;
+
+    // Find a single counter with VK_QUERY_SCOPE_COMMAND_BUFFER_KHR scope.
+    for (uint32_t idx = 0; idx < queueFamilyProperties.size(); idx++) {
+        uint32_t nCounters;
+
+        vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(gpu(), idx, &nCounters, nullptr, nullptr);
+        if (nCounters == 0) continue;
+
+        counters.resize(nCounters);
+        for (auto &c : counters) {
+            c.sType = VK_STRUCTURE_TYPE_PERFORMANCE_COUNTER_KHR;
+            c.pNext = nullptr;
+        }
+        vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(gpu(), idx, &nCounters, &counters[0], nullptr);
+        queueFamilyIndex = idx;
+
+        for (uint32_t counterIdx = 0; counterIdx < counters.size(); counterIdx++) {
+            if (counters[counterIdx].scope == VK_QUERY_SCOPE_COMMAND_BUFFER_KHR) {
+                counterIndices.push_back(counterIdx);
+                break;
+            }
+        }
+
+        if (counterIndices.empty()) {
+            counters.clear();
+            continue;
+        }
+        break;
+    }
+
+    if (counterIndices.empty()) {
+        printf("%s No queue reported any performance counter with command buffer scope.\n", kSkipPrefix);
+        return;
+    }
+
+    VkQueryPoolPerformanceCreateInfoKHR perf_query_pool_ci{};
+    perf_query_pool_ci.sType = VK_STRUCTURE_TYPE_QUERY_POOL_PERFORMANCE_CREATE_INFO_KHR;
+    perf_query_pool_ci.queueFamilyIndex = queueFamilyIndex;
+    perf_query_pool_ci.counterIndexCount = counterIndices.size();
+    perf_query_pool_ci.pCounterIndices = &counterIndices[0];
+    VkQueryPoolCreateInfo query_pool_ci{};
+    query_pool_ci.sType = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO;
+    query_pool_ci.pNext = &perf_query_pool_ci;
+    query_pool_ci.queryType = VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR;
+    query_pool_ci.queryCount = 1;
+
+    VkQueryPool query_pool;
+    vk::CreateQueryPool(device(), &query_pool_ci, nullptr, &query_pool);
+
+    VkQueue queue = VK_NULL_HANDLE;
+    vk::GetDeviceQueue(device(), queueFamilyIndex, 0, &queue);
+
+    PFN_vkAcquireProfilingLockKHR vkAcquireProfilingLockKHR =
+        (PFN_vkAcquireProfilingLockKHR)vk::GetInstanceProcAddr(instance(), "vkAcquireProfilingLockKHR");
+    ASSERT_TRUE(vkAcquireProfilingLockKHR != nullptr);
+    PFN_vkReleaseProfilingLockKHR vkReleaseProfilingLockKHR =
+        (PFN_vkReleaseProfilingLockKHR)vk::GetInstanceProcAddr(instance(), "vkReleaseProfilingLockKHR");
+    ASSERT_TRUE(vkReleaseProfilingLockKHR != nullptr);
+
+    {
+        VkAcquireProfilingLockInfoKHR lock_info{};
+        lock_info.sType = VK_STRUCTURE_TYPE_ACQUIRE_PROFILING_LOCK_INFO_KHR;
+        VkResult result = vkAcquireProfilingLockKHR(device(), &lock_info);
+        ASSERT_TRUE(result == VK_SUCCESS);
+    }
+
+    // Not the first command.
+    {
+        VkBufferCreateInfo buf_info = {};
+        buf_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
+        buf_info.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT;
+        buf_info.size = 4096;
+        buf_info.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
+        VkBuffer buffer;
+        VkResult err = vk::CreateBuffer(device(), &buf_info, NULL, &buffer);
+        ASSERT_VK_SUCCESS(err);
+
+        VkMemoryRequirements mem_reqs;
+        vk::GetBufferMemoryRequirements(device(), buffer, &mem_reqs);
+
+        VkMemoryAllocateInfo alloc_info = {};
+        alloc_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+        alloc_info.allocationSize = 4096;
+        VkDeviceMemory mem;
+        err = vk::AllocateMemory(device(), &alloc_info, NULL, &mem);
+        ASSERT_VK_SUCCESS(err);
+        vk::BindBufferMemory(device(), buffer, mem, 0);
+
+        m_commandBuffer->begin();
+        vk::CmdFillBuffer(m_commandBuffer->handle(), buffer, 0, 4096, 0);
+
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBeginQuery-queryPool-03224");
+        vk::CmdBeginQuery(m_commandBuffer->handle(), query_pool, 0, 0);
+        m_errorMonitor->VerifyFound();
+
+        m_commandBuffer->end();
+
+        VkSubmitInfo submit_info;
+        submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+        submit_info.pNext = NULL;
+        submit_info.waitSemaphoreCount = 0;
+        submit_info.pWaitSemaphores = NULL;
+        submit_info.pWaitDstStageMask = NULL;
+        submit_info.commandBufferCount = 1;
+        submit_info.pCommandBuffers = &m_commandBuffer->handle();
+        submit_info.signalSemaphoreCount = 0;
+        submit_info.pSignalSemaphores = NULL;
+        vk::QueueSubmit(queue, 1, &submit_info, VK_NULL_HANDLE);
+        vk::QueueWaitIdle(queue);
+
+        vk::DestroyBuffer(device(), buffer, nullptr);
+        vk::FreeMemory(device(), mem, NULL);
+    }
+
+    // First command: success.
+    {
+        VkBufferCreateInfo buf_info = {};
+        buf_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
+        buf_info.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT;
+        buf_info.size = 4096;
+        buf_info.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
+        VkBuffer buffer;
+        VkResult err = vk::CreateBuffer(device(), &buf_info, NULL, &buffer);
+        ASSERT_VK_SUCCESS(err);
+
+        VkMemoryRequirements mem_reqs;
+        vk::GetBufferMemoryRequirements(device(), buffer, &mem_reqs);
+
+        VkMemoryAllocateInfo alloc_info = {};
+        alloc_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+        alloc_info.allocationSize = 4096;
+        VkDeviceMemory mem;
+        err = vk::AllocateMemory(device(), &alloc_info, NULL, &mem);
+        ASSERT_VK_SUCCESS(err);
+        vk::BindBufferMemory(device(), buffer, mem, 0);
+
+        m_commandBuffer->begin();
+
+        m_errorMonitor->ExpectSuccess(VK_DEBUG_REPORT_ERROR_BIT_EXT);
+        vk::CmdBeginQuery(m_commandBuffer->handle(), query_pool, 0, 0);
+        m_errorMonitor->VerifyNotFound();
+
+        vk::CmdFillBuffer(m_commandBuffer->handle(), buffer, 0, 4096, 0);
+
+        vk::CmdEndQuery(m_commandBuffer->handle(), query_pool, 0);
+
+        m_commandBuffer->end();
+
+        VkSubmitInfo submit_info;
+        submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+        submit_info.pNext = NULL;
+        submit_info.waitSemaphoreCount = 0;
+        submit_info.pWaitSemaphores = NULL;
+        submit_info.pWaitDstStageMask = NULL;
+        submit_info.commandBufferCount = 1;
+        submit_info.pCommandBuffers = &m_commandBuffer->handle();
+        submit_info.signalSemaphoreCount = 0;
+        submit_info.pSignalSemaphores = NULL;
+        vk::QueueSubmit(queue, 1, &submit_info, VK_NULL_HANDLE);
+        vk::QueueWaitIdle(queue);
+
+        vk::DestroyBuffer(device(), buffer, nullptr);
+        vk::FreeMemory(device(), mem, NULL);
+    }
+
+    vk::DestroyQueryPool(m_device->device(), query_pool, NULL);
+
+    vkReleaseProfilingLockKHR(device());
+}
+
+TEST_F(VkLayerTest, QueryPerformanceCounterRenderPassScope) {
+    TEST_DESCRIPTION("Insert a performance query begin/end with respect to the render pass counter scope");
+
+    if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+        m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    } else {
+        printf("%s Extension %s is not supported.\n", kSkipPrefix, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+        return;
+    }
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+
+    if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_PERFORMANCE_QUERY_EXTENSION_NAME)) {
+        m_device_extension_names.push_back(VK_KHR_PERFORMANCE_QUERY_EXTENSION_NAME);
+    } else {
+        printf("%s Extension %s is not supported.\n", kSkipPrefix, VK_KHR_PERFORMANCE_QUERY_EXTENSION_NAME);
+        return;
+    }
+
+    PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR =
+        (PFN_vkGetPhysicalDeviceFeatures2KHR)vk::GetInstanceProcAddr(instance(), "vkGetPhysicalDeviceFeatures2KHR");
+    ASSERT_TRUE(vkGetPhysicalDeviceFeatures2KHR != nullptr);
+    VkPhysicalDeviceFeatures2KHR features2 = {};
+    auto performanceFeatures = lvl_init_struct<VkPhysicalDevicePerformanceQueryFeaturesKHR>();
+    features2 = lvl_init_struct<VkPhysicalDeviceFeatures2KHR>(&performanceFeatures);
+    vkGetPhysicalDeviceFeatures2KHR(gpu(), &features2);
+    if (!performanceFeatures.performanceCounterQueryPools) {
+        printf("%s Performance query pools are not supported.\n", kSkipPrefix);
+        return;
+    }
+
+    VkCommandPoolCreateFlags pool_flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
+    ASSERT_NO_FATAL_FAILURE(InitState(nullptr, nullptr, pool_flags));
+    PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR
+        vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR =
+            (PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR)vk::GetInstanceProcAddr(
+                instance(), "vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR");
+    ASSERT_TRUE(vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR != nullptr);
+
+    auto queueFamilyProperties = m_device->phy().queue_properties();
+    uint32_t queueFamilyIndex = queueFamilyProperties.size();
+    std::vector<VkPerformanceCounterKHR> counters;
+    std::vector<uint32_t> counterIndices;
+
+    // Find a single counter with VK_QUERY_SCOPE_RENDER_PASS_KHR scope.
+    for (uint32_t idx = 0; idx < queueFamilyProperties.size(); idx++) {
+        uint32_t nCounters;
+
+        vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(gpu(), idx, &nCounters, nullptr, nullptr);
+        if (nCounters == 0) continue;
+
+        counters.resize(nCounters);
+        for (auto &c : counters) {
+            c.sType = VK_STRUCTURE_TYPE_PERFORMANCE_COUNTER_KHR;
+            c.pNext = nullptr;
+        }
+        vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(gpu(), idx, &nCounters, &counters[0], nullptr);
+        queueFamilyIndex = idx;
+
+        for (uint32_t counterIdx = 0; counterIdx < counters.size(); counterIdx++) {
+            if (counters[counterIdx].scope == VK_QUERY_SCOPE_RENDER_PASS_KHR) {
+                counterIndices.push_back(counterIdx);
+                break;
+            }
+        }
+
+        if (counterIndices.empty()) {
+            counters.clear();
+            continue;
+        }
+        break;
+    }
+
+    if (counterIndices.empty()) {
+        printf("%s No queue reported any performance counter with render pass scope.\n", kSkipPrefix);
+        return;
+    }
+
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    VkQueryPoolPerformanceCreateInfoKHR perf_query_pool_ci{};
+    perf_query_pool_ci.sType = VK_STRUCTURE_TYPE_QUERY_POOL_PERFORMANCE_CREATE_INFO_KHR;
+    perf_query_pool_ci.queueFamilyIndex = queueFamilyIndex;
+    perf_query_pool_ci.counterIndexCount = counterIndices.size();
+    perf_query_pool_ci.pCounterIndices = &counterIndices[0];
+    VkQueryPoolCreateInfo query_pool_ci{};
+    query_pool_ci.sType = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO;
+    query_pool_ci.pNext = &perf_query_pool_ci;
+    query_pool_ci.queryType = VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR;
+    query_pool_ci.queryCount = 1;
+
+    VkQueryPool query_pool;
+    vk::CreateQueryPool(device(), &query_pool_ci, nullptr, &query_pool);
+
+    VkQueue queue = VK_NULL_HANDLE;
+    vk::GetDeviceQueue(device(), queueFamilyIndex, 0, &queue);
+
+    PFN_vkAcquireProfilingLockKHR vkAcquireProfilingLockKHR =
+        (PFN_vkAcquireProfilingLockKHR)vk::GetInstanceProcAddr(instance(), "vkAcquireProfilingLockKHR");
+    ASSERT_TRUE(vkAcquireProfilingLockKHR != nullptr);
+    PFN_vkReleaseProfilingLockKHR vkReleaseProfilingLockKHR =
+        (PFN_vkReleaseProfilingLockKHR)vk::GetInstanceProcAddr(instance(), "vkReleaseProfilingLockKHR");
+    ASSERT_TRUE(vkReleaseProfilingLockKHR != nullptr);
+
+    {
+        VkAcquireProfilingLockInfoKHR lock_info{};
+        lock_info.sType = VK_STRUCTURE_TYPE_ACQUIRE_PROFILING_LOCK_INFO_KHR;
+        VkResult result = vkAcquireProfilingLockKHR(device(), &lock_info);
+        ASSERT_TRUE(result == VK_SUCCESS);
+    }
+
+    // Inside a render pass.
+    {
+        m_commandBuffer->begin();
+        m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
+
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBeginQuery-queryPool-03225");
+        vk::CmdBeginQuery(m_commandBuffer->handle(), query_pool, 0, 0);
+        m_errorMonitor->VerifyFound();
+
+        m_commandBuffer->EndRenderPass();
+        m_commandBuffer->end();
+
+        VkSubmitInfo submit_info;
+        submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+        submit_info.pNext = NULL;
+        submit_info.waitSemaphoreCount = 0;
+        submit_info.pWaitSemaphores = NULL;
+        submit_info.pWaitDstStageMask = NULL;
+        submit_info.commandBufferCount = 1;
+        submit_info.pCommandBuffers = &m_commandBuffer->handle();
+        submit_info.signalSemaphoreCount = 0;
+        submit_info.pSignalSemaphores = NULL;
+        vk::QueueSubmit(queue, 1, &submit_info, VK_NULL_HANDLE);
+        vk::QueueWaitIdle(queue);
+    }
+
+    vkReleaseProfilingLockKHR(device());
+
+    vk::DestroyQueryPool(m_device->device(), query_pool, NULL);
+}
+
+TEST_F(VkLayerTest, QueryPerformanceReleaseProfileLockBeforeSubmit) {
+    TEST_DESCRIPTION("Verify that we get an error if we release the profiling lock during the recording of performance queries");
+
+    if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+        m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    } else {
+        printf("%s Extension %s is not supported.\n", kSkipPrefix, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+        return;
+    }
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+
+    if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_PERFORMANCE_QUERY_EXTENSION_NAME)) {
+        m_device_extension_names.push_back(VK_KHR_PERFORMANCE_QUERY_EXTENSION_NAME);
+    } else {
+        printf("%s Extension %s is not supported.\n", kSkipPrefix, VK_KHR_PERFORMANCE_QUERY_EXTENSION_NAME);
+        return;
+    }
+
+    PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR =
+        (PFN_vkGetPhysicalDeviceFeatures2KHR)vk::GetInstanceProcAddr(instance(), "vkGetPhysicalDeviceFeatures2KHR");
+    ASSERT_TRUE(vkGetPhysicalDeviceFeatures2KHR != nullptr);
+    VkPhysicalDeviceFeatures2KHR features2 = {};
+    auto performanceFeatures = lvl_init_struct<VkPhysicalDevicePerformanceQueryFeaturesKHR>();
+    features2 = lvl_init_struct<VkPhysicalDeviceFeatures2KHR>(&performanceFeatures);
+    vkGetPhysicalDeviceFeatures2KHR(gpu(), &features2);
+    if (!performanceFeatures.performanceCounterQueryPools) {
+        printf("%s Performance query pools are not supported.\n", kSkipPrefix);
+        return;
+    }
+
+    VkCommandPoolCreateFlags pool_flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
+    ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &performanceFeatures, pool_flags));
+    PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR
+        vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR =
+            (PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR)vk::GetInstanceProcAddr(
+                instance(), "vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR");
+    ASSERT_TRUE(vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR != nullptr);
+
+    auto queueFamilyProperties = m_device->phy().queue_properties();
+    uint32_t queueFamilyIndex = queueFamilyProperties.size();
+    std::vector<VkPerformanceCounterKHR> counters;
+    std::vector<uint32_t> counterIndices;
+
+    // Find a single counter with VK_QUERY_SCOPE_RENDER_PASS_KHR scope.
+    for (uint32_t idx = 0; idx < queueFamilyProperties.size(); idx++) {
+        uint32_t nCounters;
+
+        vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(gpu(), idx, &nCounters, nullptr, nullptr);
+        if (nCounters == 0) continue;
+
+        counters.resize(nCounters);
+        for (auto &c : counters) {
+            c.sType = VK_STRUCTURE_TYPE_PERFORMANCE_COUNTER_KHR;
+            c.pNext = nullptr;
+        }
+        vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(gpu(), idx, &nCounters, &counters[0], nullptr);
+        queueFamilyIndex = idx;
+
+        for (uint32_t counterIdx = 0; counterIdx < counters.size(); counterIdx++) {
+            counterIndices.push_back(counterIdx);
+            break;
+        }
+
+        if (counterIndices.empty()) {
+            counters.clear();
+            continue;
+        }
+        break;
+    }
+
+    if (counterIndices.empty()) {
+        printf("%s No queue reported any performance counter with render pass scope.\n", kSkipPrefix);
+        return;
+    }
+
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    VkQueryPoolPerformanceCreateInfoKHR perf_query_pool_ci{};
+    perf_query_pool_ci.sType = VK_STRUCTURE_TYPE_QUERY_POOL_PERFORMANCE_CREATE_INFO_KHR;
+    perf_query_pool_ci.queueFamilyIndex = queueFamilyIndex;
+    perf_query_pool_ci.counterIndexCount = counterIndices.size();
+    perf_query_pool_ci.pCounterIndices = &counterIndices[0];
+    VkQueryPoolCreateInfo query_pool_ci{};
+    query_pool_ci.sType = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO;
+    query_pool_ci.pNext = &perf_query_pool_ci;
+    query_pool_ci.queryType = VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR;
+    query_pool_ci.queryCount = 1;
+
+    VkQueryPool query_pool;
+    vk::CreateQueryPool(device(), &query_pool_ci, nullptr, &query_pool);
+
+    VkQueue queue = VK_NULL_HANDLE;
+    vk::GetDeviceQueue(device(), queueFamilyIndex, 0, &queue);
+
+    PFN_vkAcquireProfilingLockKHR vkAcquireProfilingLockKHR =
+        (PFN_vkAcquireProfilingLockKHR)vk::GetInstanceProcAddr(instance(), "vkAcquireProfilingLockKHR");
+    ASSERT_TRUE(vkAcquireProfilingLockKHR != nullptr);
+    PFN_vkReleaseProfilingLockKHR vkReleaseProfilingLockKHR =
+        (PFN_vkReleaseProfilingLockKHR)vk::GetInstanceProcAddr(instance(), "vkReleaseProfilingLockKHR");
+    ASSERT_TRUE(vkReleaseProfilingLockKHR != nullptr);
+
+    {
+        VkAcquireProfilingLockInfoKHR lock_info{};
+        lock_info.sType = VK_STRUCTURE_TYPE_ACQUIRE_PROFILING_LOCK_INFO_KHR;
+        VkResult result = vkAcquireProfilingLockKHR(device(), &lock_info);
+        ASSERT_TRUE(result == VK_SUCCESS);
+    }
+
+    {
+        VkBufferCreateInfo buf_info = {};
+        buf_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
+        buf_info.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT;
+        buf_info.size = 4096;
+        buf_info.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
+        VkBuffer buffer;
+        VkResult err = vk::CreateBuffer(device(), &buf_info, NULL, &buffer);
+        ASSERT_VK_SUCCESS(err);
+
+        VkMemoryRequirements mem_reqs;
+        vk::GetBufferMemoryRequirements(device(), buffer, &mem_reqs);
+
+        VkMemoryAllocateInfo alloc_info = {};
+        alloc_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+        alloc_info.allocationSize = 4096;
+        VkDeviceMemory mem;
+        err = vk::AllocateMemory(device(), &alloc_info, NULL, &mem);
+        ASSERT_VK_SUCCESS(err);
+        vk::BindBufferMemory(device(), buffer, mem, 0);
+
+        m_commandBuffer->begin();
+
+        vk::CmdBeginQuery(m_commandBuffer->handle(), query_pool, 0, 0);
+
+        // Relase while recording.
+        vkReleaseProfilingLockKHR(device());
+        {
+            VkAcquireProfilingLockInfoKHR lock_info{};
+            lock_info.sType = VK_STRUCTURE_TYPE_ACQUIRE_PROFILING_LOCK_INFO_KHR;
+            VkResult result = vkAcquireProfilingLockKHR(device(), &lock_info);
+            ASSERT_TRUE(result == VK_SUCCESS);
+        }
+
+        vk::CmdEndQuery(m_commandBuffer->handle(), query_pool, 0);
+
+        m_commandBuffer->end();
+
+        VkSubmitInfo submit_info;
+        submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+        submit_info.pNext = NULL;
+        submit_info.waitSemaphoreCount = 0;
+        submit_info.pWaitSemaphores = NULL;
+        submit_info.pWaitDstStageMask = NULL;
+        submit_info.commandBufferCount = 1;
+        submit_info.pCommandBuffers = &m_commandBuffer->handle();
+        submit_info.signalSemaphoreCount = 0;
+        submit_info.pSignalSemaphores = NULL;
+
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkQueueSubmit-pCommandBuffers-03220");
+        vk::QueueSubmit(queue, 1, &submit_info, VK_NULL_HANDLE);
+        m_errorMonitor->VerifyFound();
+
+        vk::QueueWaitIdle(queue);
+
+        vk::DestroyBuffer(device(), buffer, nullptr);
+        vk::FreeMemory(device(), mem, NULL);
+    }
+
+    vkReleaseProfilingLockKHR(device());
+
+    vk::DestroyQueryPool(m_device->device(), query_pool, NULL);
+}
+
+TEST_F(VkLayerTest, QueryPerformanceIncompletePasses) {
+    TEST_DESCRIPTION("Verify that we get an error if we don't submit a command buffer for each passes before getting the results.");
+
+    if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+        m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    } else {
+        printf("%s Extension %s is not supported.\n", kSkipPrefix, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+        return;
+    }
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+
+    if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_PERFORMANCE_QUERY_EXTENSION_NAME)) {
+        m_device_extension_names.push_back(VK_KHR_PERFORMANCE_QUERY_EXTENSION_NAME);
+    } else {
+        printf("%s Extension %s is not supported.\n", kSkipPrefix, VK_KHR_PERFORMANCE_QUERY_EXTENSION_NAME);
+        return;
+    }
+    if (DeviceExtensionSupported(gpu(), nullptr, VK_EXT_HOST_QUERY_RESET_EXTENSION_NAME)) {
+        m_device_extension_names.push_back(VK_EXT_HOST_QUERY_RESET_EXTENSION_NAME);
+    } else {
+        printf("%s Extension %s is not supported.\n", kSkipPrefix, VK_EXT_HOST_QUERY_RESET_EXTENSION_NAME);
+        return;
+    }
+
+    PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR =
+        (PFN_vkGetPhysicalDeviceFeatures2KHR)vk::GetInstanceProcAddr(instance(), "vkGetPhysicalDeviceFeatures2KHR");
+    ASSERT_TRUE(vkGetPhysicalDeviceFeatures2KHR != nullptr);
+    VkPhysicalDeviceFeatures2KHR features2 = {};
+    auto hostQueryResetFeatures = lvl_init_struct<VkPhysicalDeviceHostQueryResetFeaturesEXT>();
+    auto performanceFeatures = lvl_init_struct<VkPhysicalDevicePerformanceQueryFeaturesKHR>(&hostQueryResetFeatures);
+    features2 = lvl_init_struct<VkPhysicalDeviceFeatures2KHR>(&performanceFeatures);
+    vkGetPhysicalDeviceFeatures2KHR(gpu(), &features2);
+    if (!performanceFeatures.performanceCounterQueryPools) {
+        printf("%s Performance query pools are not supported.\n", kSkipPrefix);
+        return;
+    }
+    if (!hostQueryResetFeatures.hostQueryReset) {
+        printf("%s Missing host query reset.\n", kSkipPrefix);
+        return;
+    }
+
+    VkCommandPoolCreateFlags pool_flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
+    ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &performanceFeatures, pool_flags));
+    PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR
+        vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR =
+            (PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR)vk::GetInstanceProcAddr(
+                instance(), "vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR");
+    ASSERT_TRUE(vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR != nullptr);
+
+    PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR =
+        (PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR)vk::GetInstanceProcAddr(
+            instance(), "vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR");
+    ASSERT_TRUE(vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR != nullptr);
+
+    auto queueFamilyProperties = m_device->phy().queue_properties();
+    uint32_t queueFamilyIndex = queueFamilyProperties.size();
+    std::vector<VkPerformanceCounterKHR> counters;
+    std::vector<uint32_t> counterIndices;
+    uint32_t nPasses = 0;
+
+    // Find a single counter with VK_QUERY_SCOPE_RENDER_PASS_KHR scope.
+    for (uint32_t idx = 0; idx < queueFamilyProperties.size(); idx++) {
+        uint32_t nCounters;
+
+        vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(gpu(), idx, &nCounters, nullptr, nullptr);
+        if (nCounters == 0) continue;
+
+        counters.resize(nCounters);
+        for (auto &c : counters) {
+            c.sType = VK_STRUCTURE_TYPE_PERFORMANCE_COUNTER_KHR;
+            c.pNext = nullptr;
+        }
+        vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(gpu(), idx, &nCounters, &counters[0], nullptr);
+        queueFamilyIndex = idx;
+
+        for (uint32_t counterIdx = 0; counterIdx < counters.size(); counterIdx++) counterIndices.push_back(counterIdx);
+
+        VkQueryPoolPerformanceCreateInfoKHR create_info{};
+        create_info.sType = VK_STRUCTURE_TYPE_QUERY_POOL_PERFORMANCE_CREATE_INFO_KHR;
+        create_info.queueFamilyIndex = idx;
+        create_info.counterIndexCount = counterIndices.size();
+        create_info.pCounterIndices = &counterIndices[0];
+
+        vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR(gpu(), &create_info, &nPasses);
+
+        if (nPasses < 2) {
+            counters.clear();
+            continue;
+        }
+        break;
+    }
+
+    if (counterIndices.empty()) {
+        printf("%s No queue reported a set of counters that needs more than one pass.\n", kSkipPrefix);
+        return;
+    }
+
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    VkQueryPoolPerformanceCreateInfoKHR perf_query_pool_ci{};
+    perf_query_pool_ci.sType = VK_STRUCTURE_TYPE_QUERY_POOL_PERFORMANCE_CREATE_INFO_KHR;
+    perf_query_pool_ci.queueFamilyIndex = queueFamilyIndex;
+    perf_query_pool_ci.counterIndexCount = counterIndices.size();
+    perf_query_pool_ci.pCounterIndices = &counterIndices[0];
+    VkQueryPoolCreateInfo query_pool_ci{};
+    query_pool_ci.sType = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO;
+    query_pool_ci.pNext = &perf_query_pool_ci;
+    query_pool_ci.queryType = VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR;
+    query_pool_ci.queryCount = 1;
+
+    VkQueryPool query_pool;
+    vk::CreateQueryPool(device(), &query_pool_ci, nullptr, &query_pool);
+
+    VkQueue queue = VK_NULL_HANDLE;
+    vk::GetDeviceQueue(device(), queueFamilyIndex, 0, &queue);
+
+    PFN_vkAcquireProfilingLockKHR vkAcquireProfilingLockKHR =
+        (PFN_vkAcquireProfilingLockKHR)vk::GetInstanceProcAddr(instance(), "vkAcquireProfilingLockKHR");
+    ASSERT_TRUE(vkAcquireProfilingLockKHR != nullptr);
+    PFN_vkReleaseProfilingLockKHR vkReleaseProfilingLockKHR =
+        (PFN_vkReleaseProfilingLockKHR)vk::GetInstanceProcAddr(instance(), "vkReleaseProfilingLockKHR");
+    ASSERT_TRUE(vkReleaseProfilingLockKHR != nullptr);
+    PFN_vkResetQueryPoolEXT fpvkResetQueryPoolEXT =
+        (PFN_vkResetQueryPoolEXT)vk::GetInstanceProcAddr(instance(), "vkResetQueryPoolEXT");
+
+    {
+        VkAcquireProfilingLockInfoKHR lock_info{};
+        lock_info.sType = VK_STRUCTURE_TYPE_ACQUIRE_PROFILING_LOCK_INFO_KHR;
+        VkResult result = vkAcquireProfilingLockKHR(device(), &lock_info);
+        ASSERT_TRUE(result == VK_SUCCESS);
+    }
+
+    {
+        VkBufferCreateInfo buf_info = {};
+        buf_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
+        buf_info.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT;
+        buf_info.size = 4096;
+        buf_info.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
+        VkBuffer buffer;
+        VkResult err = vk::CreateBuffer(device(), &buf_info, NULL, &buffer);
+        ASSERT_VK_SUCCESS(err);
+
+        VkMemoryRequirements mem_reqs;
+        vk::GetBufferMemoryRequirements(device(), buffer, &mem_reqs);
+
+        VkMemoryAllocateInfo alloc_info = {};
+        alloc_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+        alloc_info.allocationSize = 4096;
+        VkDeviceMemory mem;
+        err = vk::AllocateMemory(device(), &alloc_info, NULL, &mem);
+        ASSERT_VK_SUCCESS(err);
+        vk::BindBufferMemory(device(), buffer, mem, 0);
+
+        VkCommandBufferBeginInfo command_buffer_begin_info{};
+        command_buffer_begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
+        command_buffer_begin_info.flags = VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT;
+
+        fpvkResetQueryPoolEXT(m_device->device(), query_pool, 0, 0);
+
+        m_commandBuffer->begin(&command_buffer_begin_info);
+        vk::CmdBeginQuery(m_commandBuffer->handle(), query_pool, 0, 0);
+        vk::CmdFillBuffer(m_commandBuffer->handle(), buffer, 0, 4096, 0);
+        vk::CmdEndQuery(m_commandBuffer->handle(), query_pool, 0);
+        m_commandBuffer->end();
+
+        // Invalid pass index
+        {
+            VkPerformanceQuerySubmitInfoKHR perf_submit_info{};
+            perf_submit_info.sType = VK_STRUCTURE_TYPE_PERFORMANCE_QUERY_SUBMIT_INFO_KHR;
+            perf_submit_info.counterPassIndex = nPasses;
+            VkSubmitInfo submit_info{};
+            submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+            submit_info.pNext = &perf_submit_info;
+            submit_info.waitSemaphoreCount = 0;
+            submit_info.pWaitSemaphores = NULL;
+            submit_info.pWaitDstStageMask = NULL;
+            submit_info.commandBufferCount = 1;
+            submit_info.pCommandBuffers = &m_commandBuffer->handle();
+            submit_info.signalSemaphoreCount = 0;
+            submit_info.pSignalSemaphores = NULL;
+
+            m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                                 "VUID-VkPerformanceQuerySubmitInfoKHR-counterPassIndex-03221");
+            vk::QueueSubmit(queue, 1, &submit_info, VK_NULL_HANDLE);
+            m_errorMonitor->VerifyFound();
+        }
+
+        // Leave the last pass out.
+        for (uint32_t passIdx = 0; passIdx < (nPasses - 1); passIdx++) {
+            VkPerformanceQuerySubmitInfoKHR perf_submit_info{};
+            perf_submit_info.sType = VK_STRUCTURE_TYPE_PERFORMANCE_QUERY_SUBMIT_INFO_KHR;
+            perf_submit_info.counterPassIndex = passIdx;
+            VkSubmitInfo submit_info{};
+            submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+            submit_info.pNext = &perf_submit_info;
+            submit_info.waitSemaphoreCount = 0;
+            submit_info.pWaitSemaphores = NULL;
+            submit_info.pWaitDstStageMask = NULL;
+            submit_info.commandBufferCount = 1;
+            submit_info.pCommandBuffers = &m_commandBuffer->handle();
+            submit_info.signalSemaphoreCount = 0;
+            submit_info.pSignalSemaphores = NULL;
+
+            vk::QueueSubmit(queue, 1, &submit_info, VK_NULL_HANDLE);
+        }
+
+        vk::QueueWaitIdle(queue);
+
+        std::vector<VkPerformanceCounterResultKHR> results;
+        results.resize(counterIndices.size());
+
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkGetQueryPoolResults-queryType-03231");
+        vk::GetQueryPoolResults(device(), query_pool, 0, 1, sizeof(VkPerformanceCounterResultKHR) * results.size(), &results[0],
+                                sizeof(VkPerformanceCounterResultKHR), VK_QUERY_RESULT_WAIT_BIT);
+        m_errorMonitor->VerifyFound();
+
+        {
+            VkPerformanceQuerySubmitInfoKHR perf_submit_info{};
+            perf_submit_info.sType = VK_STRUCTURE_TYPE_PERFORMANCE_QUERY_SUBMIT_INFO_KHR;
+            perf_submit_info.counterPassIndex = nPasses - 1;
+            VkSubmitInfo submit_info{};
+            submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+            submit_info.pNext = &perf_submit_info;
+            submit_info.waitSemaphoreCount = 0;
+            submit_info.pWaitSemaphores = NULL;
+            submit_info.pWaitDstStageMask = NULL;
+            submit_info.commandBufferCount = 1;
+            submit_info.pCommandBuffers = &m_commandBuffer->handle();
+            submit_info.signalSemaphoreCount = 0;
+            submit_info.pSignalSemaphores = NULL;
+
+            vk::QueueSubmit(queue, 1, &submit_info, VK_NULL_HANDLE);
+        }
+
+        vk::QueueWaitIdle(queue);
+
+        // Invalid stride
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkGetQueryPoolResults-queryType-03229");
+        vk::GetQueryPoolResults(device(), query_pool, 0, 1, sizeof(VkPerformanceCounterResultKHR) * results.size(), &results[0],
+                                sizeof(VkPerformanceCounterResultKHR) + 4, VK_QUERY_RESULT_WAIT_BIT);
+        m_errorMonitor->VerifyFound();
+
+        // Invalid flags
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkGetQueryPoolResults-queryType-03230");
+        vk::GetQueryPoolResults(device(), query_pool, 0, 1, sizeof(VkPerformanceCounterResultKHR) * results.size(), &results[0],
+                                sizeof(VkPerformanceCounterResultKHR), VK_QUERY_RESULT_WITH_AVAILABILITY_BIT);
+        m_errorMonitor->VerifyFound();
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkGetQueryPoolResults-queryType-03230");
+        vk::GetQueryPoolResults(device(), query_pool, 0, 1, sizeof(VkPerformanceCounterResultKHR) * results.size(), &results[0],
+                                sizeof(VkPerformanceCounterResultKHR), VK_QUERY_RESULT_PARTIAL_BIT);
+        m_errorMonitor->VerifyFound();
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkGetQueryPoolResults-queryType-03230");
+        vk::GetQueryPoolResults(device(), query_pool, 0, 1, sizeof(VkPerformanceCounterResultKHR) * results.size(), &results[0],
+                                sizeof(VkPerformanceCounterResultKHR), VK_QUERY_RESULT_64_BIT);
+        m_errorMonitor->VerifyFound();
+
+        m_errorMonitor->ExpectSuccess(VK_DEBUG_REPORT_ERROR_BIT_EXT);
+        vk::GetQueryPoolResults(device(), query_pool, 0, 1, sizeof(VkPerformanceCounterResultKHR) * results.size(), &results[0],
+                                sizeof(VkPerformanceCounterResultKHR), VK_QUERY_RESULT_WAIT_BIT);
+        m_errorMonitor->VerifyNotFound();
+
+        vk::DestroyBuffer(device(), buffer, nullptr);
+        vk::FreeMemory(device(), mem, NULL);
+    }
+
+    vkReleaseProfilingLockKHR(device());
+
+    vk::DestroyQueryPool(m_device->device(), query_pool, NULL);
+}
+
+TEST_F(VkLayerTest, QueueSubmitNoTimelineSemaphoreInfo) {
+    TEST_DESCRIPTION("Submit a queue with a timeline semaphore but not a VkTimelineSemaphoreSubmitInfoKHR.");
+
+    if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+        m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    } else {
+        printf("%s Extension %s is not supported.\n", kSkipPrefix, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+        return;
+    }
+
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+
+    if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_TIMELINE_SEMAPHORE_EXTENSION_NAME)) {
+        m_device_extension_names.push_back(VK_KHR_TIMELINE_SEMAPHORE_EXTENSION_NAME);
+    } else {
+        printf("%s Extension %s not supported by device; skipped.\n", kSkipPrefix, VK_KHR_TIMELINE_SEMAPHORE_EXTENSION_NAME);
+        return;
+    }
+
+    PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR =
+        (PFN_vkGetPhysicalDeviceFeatures2KHR)vk::GetInstanceProcAddr(instance(), "vkGetPhysicalDeviceFeatures2KHR");
+    ASSERT_TRUE(vkGetPhysicalDeviceFeatures2KHR != nullptr);
+    auto timelinefeatures = lvl_init_struct<VkPhysicalDeviceTimelineSemaphoreFeaturesKHR>();
+    auto features2 = lvl_init_struct<VkPhysicalDeviceFeatures2KHR>(&timelinefeatures);
+    vkGetPhysicalDeviceFeatures2KHR(gpu(), &features2);
+    if (!timelinefeatures.timelineSemaphore) {
+        printf("%s Timeline semaphores are not supported.\n", kSkipPrefix);
+        return;
+    }
+
+    ASSERT_NO_FATAL_FAILURE(InitState());
+
+    VkSemaphoreTypeCreateInfoKHR semaphore_type_create_info{};
+    semaphore_type_create_info.sType = VK_STRUCTURE_TYPE_SEMAPHORE_TYPE_CREATE_INFO_KHR;
+    semaphore_type_create_info.semaphoreType = VK_SEMAPHORE_TYPE_TIMELINE_KHR;
+
+    VkSemaphoreCreateInfo semaphore_create_info{};
+    semaphore_create_info.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO;
+    semaphore_create_info.pNext = &semaphore_type_create_info;
+
+    VkSemaphore semaphore;
+    ASSERT_VK_SUCCESS(vk::CreateSemaphore(m_device->device(), &semaphore_create_info, nullptr, &semaphore));
+
+    VkPipelineStageFlags stageFlags = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
+    VkSubmitInfo submit_info[2] = {};
+    submit_info[0].sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+    submit_info[0].commandBufferCount = 0;
+    submit_info[0].pWaitDstStageMask = &stageFlags;
+    submit_info[0].signalSemaphoreCount = 1;
+    submit_info[0].pSignalSemaphores = &semaphore;
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkSubmitInfo-pWaitSemaphores-03239");
+    vk::QueueSubmit(m_device->m_queue, 1, submit_info, VK_NULL_HANDLE);
+    m_errorMonitor->VerifyFound();
+
+    VkTimelineSemaphoreSubmitInfoKHR timeline_semaphore_submit_info{};
+    uint64_t signalValue = 1;
+    timeline_semaphore_submit_info.sType = VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO_KHR;
+    timeline_semaphore_submit_info.signalSemaphoreValueCount = 1;
+    timeline_semaphore_submit_info.pSignalSemaphoreValues = &signalValue;
+    submit_info[0].pNext = &timeline_semaphore_submit_info;
+
+    submit_info[1].sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+    submit_info[1].commandBufferCount = 0;
+    submit_info[1].pWaitDstStageMask = &stageFlags;
+    submit_info[1].waitSemaphoreCount = 1;
+    submit_info[1].pWaitSemaphores = &semaphore;
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkSubmitInfo-pWaitSemaphores-03239");
+    vk::QueueSubmit(m_device->m_queue, 2, submit_info, VK_NULL_HANDLE);
+    m_errorMonitor->VerifyFound();
+
+    vk::DestroySemaphore(m_device->device(), semaphore, nullptr);
+}
+
+TEST_F(VkLayerTest, QueueSubmitTimelineSemaphoreBadValue) {
+    TEST_DESCRIPTION("Submit a queue with a timeline semaphore using a wrong payload value.");
+
+    if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+        m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    } else {
+        printf("%s Extension %s is not supported.\n", kSkipPrefix, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+        return;
+    }
+
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+
+    if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_TIMELINE_SEMAPHORE_EXTENSION_NAME)) {
+        m_device_extension_names.push_back(VK_KHR_TIMELINE_SEMAPHORE_EXTENSION_NAME);
+    } else {
+        printf("%s Extension %s not supported by device; skipped.\n", kSkipPrefix, VK_KHR_TIMELINE_SEMAPHORE_EXTENSION_NAME);
+        return;
+    }
+
+    PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR =
+        (PFN_vkGetPhysicalDeviceFeatures2KHR)vk::GetInstanceProcAddr(instance(), "vkGetPhysicalDeviceFeatures2KHR");
+    ASSERT_TRUE(vkGetPhysicalDeviceFeatures2KHR != nullptr);
+    auto timelinefeatures = lvl_init_struct<VkPhysicalDeviceTimelineSemaphoreFeaturesKHR>();
+    auto features2 = lvl_init_struct<VkPhysicalDeviceFeatures2KHR>(&timelinefeatures);
+    vkGetPhysicalDeviceFeatures2KHR(gpu(), &features2);
+    if (!timelinefeatures.timelineSemaphore) {
+        printf("%s Timeline semaphores are not supported.\n", kSkipPrefix);
+        return;
+    }
+
+    PFN_vkGetPhysicalDeviceProperties2KHR vkGetPhysicalDeviceProperties2KHR =
+        (PFN_vkGetPhysicalDeviceProperties2KHR)vk::GetInstanceProcAddr(instance(), "vkGetPhysicalDeviceProperties2KHR");
+    ASSERT_TRUE(vkGetPhysicalDeviceProperties2KHR != nullptr);
+    auto timelineproperties = lvl_init_struct<VkPhysicalDeviceTimelineSemaphorePropertiesKHR>();
+    auto prop2 = lvl_init_struct<VkPhysicalDeviceProperties2KHR>(&timelineproperties);
+    vkGetPhysicalDeviceProperties2KHR(gpu(), &prop2);
+
+    ASSERT_NO_FATAL_FAILURE(InitState());
+
+    VkSemaphoreTypeCreateInfoKHR semaphore_type_create_info{};
+    semaphore_type_create_info.sType = VK_STRUCTURE_TYPE_SEMAPHORE_TYPE_CREATE_INFO_KHR;
+    semaphore_type_create_info.semaphoreType = VK_SEMAPHORE_TYPE_TIMELINE_KHR;
+
+    VkSemaphoreCreateInfo semaphore_create_info{};
+    semaphore_create_info.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO;
+    semaphore_create_info.pNext = &semaphore_type_create_info;
+
+    VkSemaphore semaphore;
+    ASSERT_VK_SUCCESS(vk::CreateSemaphore(m_device->device(), &semaphore_create_info, nullptr, &semaphore));
+
+    VkTimelineSemaphoreSubmitInfoKHR timeline_semaphore_submit_info = {};
+    uint64_t signalValue = 1;
+    uint64_t waitValue = 3;
+    timeline_semaphore_submit_info.sType = VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO_KHR;
+    timeline_semaphore_submit_info.signalSemaphoreValueCount = 1;
+    timeline_semaphore_submit_info.pSignalSemaphoreValues = &signalValue;
+    timeline_semaphore_submit_info.waitSemaphoreValueCount = 1;
+    timeline_semaphore_submit_info.pWaitSemaphoreValues = &waitValue;
+
+    VkPipelineStageFlags stageFlags = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
+    VkSubmitInfo submit_info[2] = {};
+    submit_info[0].sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+    submit_info[0].pNext = &timeline_semaphore_submit_info;
+    submit_info[0].pWaitDstStageMask = &stageFlags;
+    submit_info[0].signalSemaphoreCount = 1;
+    submit_info[0].pSignalSemaphores = &semaphore;
+
+    submit_info[1].sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+    submit_info[1].pNext = &timeline_semaphore_submit_info;
+    submit_info[1].pWaitDstStageMask = &stageFlags;
+    submit_info[1].waitSemaphoreCount = 1;
+    submit_info[1].pWaitSemaphores = &semaphore;
+
+    timeline_semaphore_submit_info.signalSemaphoreValueCount = 0;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkSubmitInfo-pNext-03241");
+    vk::QueueSubmit(m_device->m_queue, 1, submit_info, VK_NULL_HANDLE);
+    m_errorMonitor->VerifyFound();
+
+    timeline_semaphore_submit_info.signalSemaphoreValueCount = 1;
+    timeline_semaphore_submit_info.waitSemaphoreValueCount = 0;
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkSubmitInfo-pNext-03240");
+    vk::QueueSubmit(m_device->m_queue, 2, submit_info, VK_NULL_HANDLE);
+    m_errorMonitor->VerifyFound();
+
+    vk::DestroySemaphore(m_device->device(), semaphore, nullptr);
+
+    timeline_semaphore_submit_info.waitSemaphoreValueCount = 1;
+    semaphore_type_create_info.initialValue = 5;
+    ASSERT_VK_SUCCESS(vk::CreateSemaphore(m_device->device(), &semaphore_create_info, nullptr, &semaphore));
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkSubmitInfo-pSignalSemaphores-03242");
+    vk::QueueSubmit(m_device->m_queue, 1, submit_info, VK_NULL_HANDLE);
+    m_errorMonitor->VerifyFound();
+
+    vk::DestroySemaphore(m_device->device(), semaphore, nullptr);
+
+    // Check if we can test violations of maxTimelineSemaphoreValueDifference
+    if (timelineproperties.maxTimelineSemaphoreValueDifference < UINT64_MAX) {
+        semaphore_type_create_info.initialValue = 0;
+
+        ASSERT_VK_SUCCESS(vk::CreateSemaphore(m_device->device(), &semaphore_create_info, nullptr, &semaphore));
+
+        signalValue = timelineproperties.maxTimelineSemaphoreValueDifference + 1;
+        timeline_semaphore_submit_info.pSignalSemaphoreValues = &signalValue;
+
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkSubmitInfo-pSignalSemaphores-03244");
+        vk::QueueSubmit(m_device->m_queue, 1, submit_info, VK_NULL_HANDLE);
+        m_errorMonitor->VerifyFound();
+
+        if (signalValue < UINT64_MAX) {
+            waitValue = signalValue + 1;
+            signalValue = 1;
+
+            timeline_semaphore_submit_info.waitSemaphoreValueCount = 1;
+            timeline_semaphore_submit_info.pWaitSemaphoreValues = &waitValue;
+
+            m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkSubmitInfo-pWaitSemaphores-03243");
+            vk::QueueSubmit(m_device->m_queue, 2, submit_info, VK_NULL_HANDLE);
+            m_errorMonitor->VerifyFound();
+        }
+
+        vk::DestroySemaphore(m_device->device(), semaphore, nullptr);
+    }
+}
diff --git a/src/third_party/vulkan-validation-layers/src/tests/vklayertests_pipeline_shader.cpp b/src/third_party/vulkan-validation-layers/src/tests/vklayertests_pipeline_shader.cpp
new file mode 100644
index 0000000..3396e15
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/tests/vklayertests_pipeline_shader.cpp
@@ -0,0 +1,6908 @@
+/*
+ * Copyright (c) 2015-2019 The Khronos Group Inc.
+ * Copyright (c) 2015-2019 Valve Corporation
+ * Copyright (c) 2015-2019 LunarG, Inc.
+ * Copyright (c) 2015-2019 Google, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Author: Chia-I Wu <olvaffe@gmail.com>
+ * Author: Chris Forbes <chrisf@ijw.co.nz>
+ * Author: Courtney Goeltzenleuchter <courtney@LunarG.com>
+ * Author: Mark Lobodzinski <mark@lunarg.com>
+ * Author: Mike Stroyan <mike@LunarG.com>
+ * Author: Tobin Ehlis <tobine@google.com>
+ * Author: Tony Barbour <tony@LunarG.com>
+ * Author: Cody Northrop <cnorthrop@google.com>
+ * Author: Dave Houlton <daveh@lunarg.com>
+ * Author: Jeremy Kniager <jeremyk@lunarg.com>
+ * Author: Shannon McPherson <shannon@lunarg.com>
+ * Author: John Zulauf <jzulauf@lunarg.com>
+ */
+
+#include "cast_utils.h"
+#include "layer_validation_tests.h"
+
+TEST_F(VkLayerTest, PSOPolygonModeInvalid) {
+    TEST_DESCRIPTION("Attempt to use invalid polygon fill modes.");
+    VkPhysicalDeviceFeatures device_features = {};
+    device_features.fillModeNonSolid = VK_FALSE;
+    // The sacrificial device object
+    ASSERT_NO_FATAL_FAILURE(Init(&device_features));
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    VkPipelineRasterizationStateCreateInfo rs_ci = {};
+    rs_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO;
+    rs_ci.pNext = nullptr;
+    rs_ci.lineWidth = 1.0f;
+    rs_ci.rasterizerDiscardEnable = VK_TRUE;
+
+    auto set_polygonMode = [&](CreatePipelineHelper &helper) { helper.rs_state_ci_ = rs_ci; };
+
+    // Set polygonMode to POINT while the non-solid fill mode feature is disabled.
+    // Introduce failure by setting unsupported polygon mode
+    rs_ci.polygonMode = VK_POLYGON_MODE_POINT;
+    CreatePipelineHelper::OneshotTest(*this, set_polygonMode, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                      "polygonMode cannot be VK_POLYGON_MODE_POINT or VK_POLYGON_MODE_LINE");
+
+    // Set polygonMode to LINE while the non-solid fill mode feature is disabled.
+    // Introduce failure by setting unsupported polygon mode
+    rs_ci.polygonMode = VK_POLYGON_MODE_LINE;
+    CreatePipelineHelper::OneshotTest(*this, set_polygonMode, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                      "polygonMode cannot be VK_POLYGON_MODE_POINT or VK_POLYGON_MODE_LINE");
+
+    // Set polygonMode to FILL_RECTANGLE_NV while the extension is not enabled.
+    // Introduce failure by setting unsupported polygon mode
+    rs_ci.polygonMode = VK_POLYGON_MODE_FILL_RECTANGLE_NV;
+    CreatePipelineHelper::OneshotTest(*this, set_polygonMode, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                      "VUID-VkPipelineRasterizationStateCreateInfo-polygonMode-01414");
+}
+
+TEST_F(VkLayerTest, PipelineNotBound) {
+    TEST_DESCRIPTION("Pass in an invalid pipeline object handle into a Vulkan API call.");
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBindPipeline-pipeline-parameter");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    VkPipeline badPipeline = CastToHandle<VkPipeline, uintptr_t>(0xbaadb1be);
+
+    m_commandBuffer->begin();
+    vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, badPipeline);
+
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, PipelineWrongBindPointGraphics) {
+    TEST_DESCRIPTION("Bind a compute pipeline in the graphics bind point");
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBindPipeline-pipelineBindPoint-00779");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    CreateComputePipelineHelper pipe(*this);
+    pipe.InitInfo();
+    pipe.InitState();
+    pipe.CreateComputePipeline();
+
+    m_commandBuffer->begin();
+    vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.pipeline_);
+
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, PipelineWrongBindPointCompute) {
+    TEST_DESCRIPTION("Bind a graphics pipeline in the compute bind point");
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBindPipeline-pipelineBindPoint-00780");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    CreatePipelineHelper pipe(*this);
+    pipe.InitInfo();
+    pipe.InitState();
+    pipe.CreateGraphicsPipeline();
+
+    m_commandBuffer->begin();
+    vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_COMPUTE, pipe.pipeline_);
+
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, PipelineWrongBindPointRayTracing) {
+    TEST_DESCRIPTION("Bind a graphics pipeline in the ray-tracing bind point");
+
+    if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+        m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    } else {
+        printf("%s Extension %s is not supported.\n", kSkipPrefix, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+        return;
+    }
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+
+    if (DeviceExtensionSupported(gpu(), nullptr, VK_NV_RAY_TRACING_EXTENSION_NAME)) {
+        m_device_extension_names.push_back(VK_NV_RAY_TRACING_EXTENSION_NAME);
+        m_device_extension_names.push_back(VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
+    } else {
+        printf("%s Extension %s is not supported.\n", kSkipPrefix, VK_NV_RAY_TRACING_EXTENSION_NAME);
+        return;
+    }
+    ASSERT_NO_FATAL_FAILURE(InitState());
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBindPipeline-pipelineBindPoint-02392");
+
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    if (!EnableDeviceProfileLayer()) {
+        printf("%s Failed to enable device profile layer.\n", kSkipPrefix);
+        return;
+    }
+
+    CreatePipelineHelper pipe(*this);
+    pipe.InitInfo();
+    pipe.InitState();
+    pipe.CreateGraphicsPipeline();
+
+    m_commandBuffer->begin();
+    vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_RAY_TRACING_NV, pipe.pipeline_);
+
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, CreatePipelineBadVertexAttributeFormat) {
+    TEST_DESCRIPTION("Test that pipeline validation catches invalid vertex attribute formats");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    VkVertexInputBindingDescription input_binding;
+    memset(&input_binding, 0, sizeof(input_binding));
+
+    VkVertexInputAttributeDescription input_attribs;
+    memset(&input_attribs, 0, sizeof(input_attribs));
+
+    // Pick a really bad format for this purpose and make sure it should fail
+    input_attribs.format = VK_FORMAT_BC2_UNORM_BLOCK;
+    VkFormatProperties format_props = m_device->format_properties(input_attribs.format);
+    if ((format_props.bufferFeatures & VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT) != 0) {
+        printf("%s Format unsuitable for test; skipped.\n", kSkipPrefix);
+        return;
+    }
+
+    input_attribs.location = 0;
+
+    auto set_info = [&](CreatePipelineHelper &helper) {
+        helper.vi_ci_.pVertexBindingDescriptions = &input_binding;
+        helper.vi_ci_.vertexBindingDescriptionCount = 1;
+        helper.vi_ci_.pVertexAttributeDescriptions = &input_attribs;
+        helper.vi_ci_.vertexAttributeDescriptionCount = 1;
+    };
+    CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                      "VUID-VkVertexInputAttributeDescription-format-00623");
+}
+
+TEST_F(VkLayerTest, DisabledIndependentBlend) {
+    TEST_DESCRIPTION(
+        "Generate INDEPENDENT_BLEND by disabling independent blend and then specifying different blend states for two "
+        "attachments");
+    VkPhysicalDeviceFeatures features = {};
+    features.independentBlend = VK_FALSE;
+    ASSERT_NO_FATAL_FAILURE(Init(&features));
+
+    m_errorMonitor->SetDesiredFailureMsg(
+        VK_DEBUG_REPORT_ERROR_BIT_EXT,
+        "Invalid Pipeline CreateInfo: If independent blend feature not enabled, all elements of pAttachments must be identical");
+
+    VkDescriptorSetObj descriptorSet(m_device);
+    descriptorSet.AppendDummy();
+    descriptorSet.CreateVKDescriptorSet(m_commandBuffer);
+
+    VkPipelineObj pipeline(m_device);
+    // Create a renderPass with two color attachments
+    VkAttachmentReference attachments[2] = {};
+    attachments[0].layout = VK_IMAGE_LAYOUT_GENERAL;
+    attachments[1].attachment = 1;
+    attachments[1].layout = VK_IMAGE_LAYOUT_GENERAL;
+
+    VkSubpassDescription subpass = {};
+    subpass.pColorAttachments = attachments;
+    subpass.colorAttachmentCount = 2;
+
+    VkRenderPassCreateInfo rpci = {};
+    rpci.subpassCount = 1;
+    rpci.pSubpasses = &subpass;
+    rpci.attachmentCount = 2;
+
+    VkAttachmentDescription attach_desc[2] = {};
+    attach_desc[0].format = VK_FORMAT_B8G8R8A8_UNORM;
+    attach_desc[0].samples = VK_SAMPLE_COUNT_1_BIT;
+    attach_desc[0].initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+    attach_desc[0].finalLayout = VK_IMAGE_LAYOUT_GENERAL;
+    attach_desc[1].format = VK_FORMAT_B8G8R8A8_UNORM;
+    attach_desc[1].samples = VK_SAMPLE_COUNT_1_BIT;
+    attach_desc[1].initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+    attach_desc[1].finalLayout = VK_IMAGE_LAYOUT_GENERAL;
+
+    rpci.pAttachments = attach_desc;
+    rpci.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
+
+    VkRenderPass renderpass;
+    vk::CreateRenderPass(m_device->device(), &rpci, NULL, &renderpass);
+    VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
+    pipeline.AddShader(&vs);
+
+    VkPipelineColorBlendAttachmentState att_state1 = {}, att_state2 = {};
+    att_state1.dstAlphaBlendFactor = VK_BLEND_FACTOR_CONSTANT_COLOR;
+    att_state1.blendEnable = VK_TRUE;
+    att_state2.dstAlphaBlendFactor = VK_BLEND_FACTOR_CONSTANT_COLOR;
+    att_state2.blendEnable = VK_FALSE;
+    pipeline.AddColorAttachment(0, att_state1);
+    pipeline.AddColorAttachment(1, att_state2);
+    pipeline.CreateVKPipeline(descriptorSet.GetPipelineLayout(), renderpass);
+    m_errorMonitor->VerifyFound();
+    vk::DestroyRenderPass(m_device->device(), renderpass, NULL);
+}
+
+// Is the Pipeline compatible with the expectations of the Renderpass/subpasses?
+TEST_F(VkLayerTest, PipelineRenderpassCompatibility) {
+    TEST_DESCRIPTION(
+        "Create a graphics pipeline that is incompatible with the requirements of its contained Renderpass/subpasses.");
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    VkPipelineColorBlendAttachmentState att_state1 = {};
+    att_state1.dstAlphaBlendFactor = VK_BLEND_FACTOR_CONSTANT_COLOR;
+    att_state1.blendEnable = VK_TRUE;
+
+    auto set_info = [&](CreatePipelineHelper &helper) {
+        helper.cb_attachments_ = att_state1;
+        helper.gp_ci_.pColorBlendState = nullptr;
+    };
+    CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                      "VUID-VkGraphicsPipelineCreateInfo-rasterizerDiscardEnable-00753");
+}
+
+TEST_F(VkLayerTest, PointSizeFailure) {
+    TEST_DESCRIPTION("Create a pipeline using TOPOLOGY_POINT_LIST but do not set PointSize in vertex shader.");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+    ASSERT_NO_FATAL_FAILURE(InitViewport());
+
+    // Create VS declaring PointSize but not writing to it
+    const char NoPointSizeVertShader[] =
+        "#version 450\n"
+        "vec2 vertices[3];\n"
+        "out gl_PerVertex\n"
+        "{\n"
+        "    vec4 gl_Position;\n"
+        "    float gl_PointSize;\n"
+        "};\n"
+        "void main() {\n"
+        "    vertices[0] = vec2(-1.0, -1.0);\n"
+        "    vertices[1] = vec2( 1.0, -1.0);\n"
+        "    vertices[2] = vec2( 0.0,  1.0);\n"
+        "    gl_Position = vec4(vertices[gl_VertexIndex % 3], 0.0, 1.0);\n"
+        "}\n";
+    VkShaderObj vs(m_device, NoPointSizeVertShader, VK_SHADER_STAGE_VERTEX_BIT, this);
+
+    // Set Input Assembly to TOPOLOGY POINT LIST
+    auto set_info = [&](CreatePipelineHelper &helper) {
+        // Set Input Assembly to TOPOLOGY POINT LIST
+        helper.ia_ci_.topology = VK_PRIMITIVE_TOPOLOGY_POINT_LIST;
+
+        helper.shader_stages_ = {vs.GetStageCreateInfo(), helper.fs_->GetStageCreateInfo()};
+    };
+    CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT, "Pipeline topology is set to POINT_LIST");
+}
+
+TEST_F(VkLayerTest, InvalidTopology) {
+    TEST_DESCRIPTION("InvalidTopology.");
+    VkPhysicalDeviceFeatures deviceFeatures = {};
+    deviceFeatures.geometryShader = VK_FALSE;
+    deviceFeatures.tessellationShader = VK_FALSE;
+
+    ASSERT_NO_FATAL_FAILURE(Init(&deviceFeatures));
+    ASSERT_NO_FATAL_FAILURE(InitViewport());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    VkShaderObj vs(m_device, bindStateVertPointSizeShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
+
+    VkPrimitiveTopology topology;
+
+    auto set_info = [&](CreatePipelineHelper &helper) {
+        helper.ia_ci_.topology = topology;
+        helper.ia_ci_.primitiveRestartEnable = VK_TRUE;
+        helper.shader_stages_ = {vs.GetStageCreateInfo(), helper.fs_->GetStageCreateInfo()};
+    };
+
+    topology = VK_PRIMITIVE_TOPOLOGY_POINT_LIST;
+    CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                      "VUID-VkPipelineInputAssemblyStateCreateInfo-topology-00428");
+
+    topology = VK_PRIMITIVE_TOPOLOGY_LINE_LIST;
+    CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                      "VUID-VkPipelineInputAssemblyStateCreateInfo-topology-00428");
+
+    topology = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST;
+    CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                      "VUID-VkPipelineInputAssemblyStateCreateInfo-topology-00428");
+
+    topology = VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY;
+    CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                      std::vector<string>{"VUID-VkPipelineInputAssemblyStateCreateInfo-topology-00428",
+                                                          "VUID-VkPipelineInputAssemblyStateCreateInfo-topology-00429"});
+
+    topology = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY;
+    CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                      std::vector<string>{"VUID-VkPipelineInputAssemblyStateCreateInfo-topology-00428",
+                                                          "VUID-VkPipelineInputAssemblyStateCreateInfo-topology-00429"});
+
+    topology = VK_PRIMITIVE_TOPOLOGY_PATCH_LIST;
+    CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                      std::vector<string>{"VUID-VkPipelineInputAssemblyStateCreateInfo-topology-00428",
+                                                          "VUID-VkPipelineInputAssemblyStateCreateInfo-topology-00430",
+                                                          "VUID-VkGraphicsPipelineCreateInfo-topology-00737"});
+
+    topology = VK_PRIMITIVE_TOPOLOGY_LINE_STRIP_WITH_ADJACENCY;
+    CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                      "VUID-VkPipelineInputAssemblyStateCreateInfo-topology-00429");
+
+    topology = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY;
+    CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                      "VUID-VkPipelineInputAssemblyStateCreateInfo-topology-00429");
+}
+
+TEST_F(VkLayerTest, PointSizeGeomShaderFailure) {
+    TEST_DESCRIPTION(
+        "Create a pipeline using TOPOLOGY_POINT_LIST, set PointSize vertex shader, but not in the final geometry stage.");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    if ((!m_device->phy().features().geometryShader) || (!m_device->phy().features().shaderTessellationAndGeometryPointSize)) {
+        printf("%s Device does not support the required geometry shader features; skipped.\n", kSkipPrefix);
+        return;
+    }
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+    ASSERT_NO_FATAL_FAILURE(InitViewport());
+
+    // Create VS declaring PointSize and writing to it
+    static char const *gsSource =
+        "#version 450\n"
+        "layout (points) in;\n"
+        "layout (points) out;\n"
+        "layout (max_vertices = 1) out;\n"
+        "void main() {\n"
+        "   gl_Position = vec4(1.0, 0.5, 0.5, 0.0);\n"
+        "   EmitVertex();\n"
+        "}\n";
+
+    VkShaderObj vs(m_device, bindStateVertPointSizeShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
+    VkShaderObj gs(m_device, gsSource, VK_SHADER_STAGE_GEOMETRY_BIT, this);
+
+    auto set_info = [&](CreatePipelineHelper &helper) {
+        helper.ia_ci_.topology = VK_PRIMITIVE_TOPOLOGY_POINT_LIST;
+        helper.shader_stages_ = {vs.GetStageCreateInfo(), gs.GetStageCreateInfo(), helper.fs_->GetStageCreateInfo()};
+    };
+
+    CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT, "Pipeline topology is set to POINT_LIST");
+}
+
+TEST_F(VkLayerTest, BuiltinBlockOrderMismatchVsGs) {
+    TEST_DESCRIPTION("Use different order of gl_Position and gl_PointSize in builtin block interface between VS and GS.");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    if (!m_device->phy().features().geometryShader) {
+        printf("%s Device does not support geometry shaders; Skipped.\n", kSkipPrefix);
+        return;
+    }
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+    ASSERT_NO_FATAL_FAILURE(InitViewport());
+
+    // Compiled using the GLSL code below. GlslangValidator rearranges the members, but here they are kept in the order provided.
+    // #version 450
+    // layout (points) in;
+    // layout (points) out;
+    // layout (max_vertices = 1) out;
+    // in gl_PerVertex {
+    //     float gl_PointSize;
+    //     vec4 gl_Position;
+    // } gl_in[];
+    // void main() {
+    //     gl_Position = gl_in[0].gl_Position;
+    //     gl_PointSize = gl_in[0].gl_PointSize;
+    //     EmitVertex();
+    // }
+
+    const std::string gsSource = R"(
+               OpCapability Geometry
+               OpCapability GeometryPointSize
+          %1 = OpExtInstImport "GLSL.std.450"
+               OpMemoryModel Logical GLSL450
+               OpEntryPoint Geometry %main "main" %_ %gl_in
+               OpExecutionMode %main InputPoints
+               OpExecutionMode %main Invocations 1
+               OpExecutionMode %main OutputPoints
+               OpExecutionMode %main OutputVertices 1
+               OpSource GLSL 450
+               OpMemberDecorate %gl_PerVertex 0 BuiltIn Position
+               OpMemberDecorate %gl_PerVertex 1 BuiltIn PointSize
+               OpMemberDecorate %gl_PerVertex 2 BuiltIn ClipDistance
+               OpMemberDecorate %gl_PerVertex 3 BuiltIn CullDistance
+               OpDecorate %gl_PerVertex Block
+               OpMemberDecorate %gl_PerVertex_0 0 BuiltIn PointSize
+               OpMemberDecorate %gl_PerVertex_0 1 BuiltIn Position
+               OpDecorate %gl_PerVertex_0 Block
+       %void = OpTypeVoid
+          %3 = OpTypeFunction %void
+      %float = OpTypeFloat 32
+    %v4float = OpTypeVector %float 4
+       %uint = OpTypeInt 32 0
+     %uint_1 = OpConstant %uint 1
+%_arr_float_uint_1 = OpTypeArray %float %uint_1
+%gl_PerVertex = OpTypeStruct %v4float %float %_arr_float_uint_1 %_arr_float_uint_1
+%_ptr_Output_gl_PerVertex = OpTypePointer Output %gl_PerVertex
+          %_ = OpVariable %_ptr_Output_gl_PerVertex Output
+        %int = OpTypeInt 32 1
+      %int_0 = OpConstant %int 0
+%gl_PerVertex_0 = OpTypeStruct %float %v4float
+%_arr_gl_PerVertex_0_uint_1 = OpTypeArray %gl_PerVertex_0 %uint_1
+%_ptr_Input__arr_gl_PerVertex_0_uint_1 = OpTypePointer Input %_arr_gl_PerVertex_0_uint_1
+      %gl_in = OpVariable %_ptr_Input__arr_gl_PerVertex_0_uint_1 Input
+%_ptr_Input_v4float = OpTypePointer Input %v4float
+%_ptr_Output_v4float = OpTypePointer Output %v4float
+      %int_1 = OpConstant %int 1
+%_ptr_Input_float = OpTypePointer Input %float
+%_ptr_Output_float = OpTypePointer Output %float
+       %main = OpFunction %void None %3
+          %5 = OpLabel
+         %21 = OpAccessChain %_ptr_Input_v4float %gl_in %int_0 %int_1
+         %22 = OpLoad %v4float %21
+         %24 = OpAccessChain %_ptr_Output_v4float %_ %int_0
+               OpStore %24 %22
+         %27 = OpAccessChain %_ptr_Input_float %gl_in %int_0 %int_0
+         %28 = OpLoad %float %27
+         %30 = OpAccessChain %_ptr_Output_float %_ %int_1
+               OpStore %30 %28
+               OpEmitVertex
+               OpReturn
+               OpFunctionEnd
+        )";
+
+    VkShaderObj vs(m_device, bindStateVertPointSizeShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
+    VkShaderObj gs(m_device, gsSource, VK_SHADER_STAGE_GEOMETRY_BIT, this);
+
+    auto set_info = [&](CreatePipelineHelper &helper) {
+        helper.ia_ci_.topology = VK_PRIMITIVE_TOPOLOGY_POINT_LIST;
+        helper.shader_stages_ = {vs.GetStageCreateInfo(), gs.GetStageCreateInfo(), helper.fs_->GetStageCreateInfo()};
+    };
+    CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                      "Builtin variable inside block doesn't match between");
+}
+
+TEST_F(VkLayerTest, BuiltinBlockSizeMismatchVsGs) {
+    TEST_DESCRIPTION("Use different number of elements in builtin block interface between VS and GS.");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    if (!m_device->phy().features().geometryShader) {
+        printf("%s Device does not support geometry shaders; Skipped.\n", kSkipPrefix);
+        return;
+    }
+
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+    ASSERT_NO_FATAL_FAILURE(InitViewport());
+
+    static const char *gsSource =
+        "#version 450\n"
+        "layout (points) in;\n"
+        "layout (points) out;\n"
+        "layout (max_vertices = 1) out;\n"
+        "in gl_PerVertex\n"
+        "{\n"
+        "    vec4 gl_Position;\n"
+        "    float gl_PointSize;\n"
+        "    float gl_ClipDistance[];\n"
+        "} gl_in[];\n"
+        "void main()\n"
+        "{\n"
+        "    gl_Position = gl_in[0].gl_Position;\n"
+        "    gl_PointSize = gl_in[0].gl_PointSize;\n"
+        "    EmitVertex();\n"
+        "}\n";
+
+    VkShaderObj vs(m_device, bindStateVertPointSizeShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
+    VkShaderObj gs(m_device, gsSource, VK_SHADER_STAGE_GEOMETRY_BIT, this);
+
+    auto set_info = [&](CreatePipelineHelper &helper) {
+        helper.ia_ci_.topology = VK_PRIMITIVE_TOPOLOGY_POINT_LIST;
+        helper.shader_stages_ = {vs.GetStageCreateInfo(), gs.GetStageCreateInfo(), helper.fs_->GetStageCreateInfo()};
+    };
+    CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                      "Number of elements inside builtin block differ between stages");
+}
+
+TEST_F(VkLayerTest, CreatePipelineLayoutExceedsSetLimit) {
+    TEST_DESCRIPTION("Attempt to create a pipeline layout using more than the physical limit of SetLayouts.");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    VkDescriptorSetLayoutBinding layout_binding = {};
+    layout_binding.binding = 0;
+    layout_binding.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
+    layout_binding.descriptorCount = 1;
+    layout_binding.stageFlags = VK_SHADER_STAGE_VERTEX_BIT;
+    layout_binding.pImmutableSamplers = NULL;
+
+    VkDescriptorSetLayoutCreateInfo ds_layout_ci = {};
+    ds_layout_ci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
+    ds_layout_ci.bindingCount = 1;
+    ds_layout_ci.pBindings = &layout_binding;
+    VkDescriptorSetLayout ds_layout = {};
+    VkResult err = vk::CreateDescriptorSetLayout(m_device->device(), &ds_layout_ci, NULL, &ds_layout);
+    ASSERT_VK_SUCCESS(err);
+
+    // Create an array of DSLs, one larger than the physical limit
+    const auto excess_layouts = 1 + m_device->phy().properties().limits.maxBoundDescriptorSets;
+    std::vector<VkDescriptorSetLayout> dsl_array(excess_layouts, ds_layout);
+
+    VkPipelineLayoutCreateInfo pipeline_layout_ci = {};
+    pipeline_layout_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO;
+    pipeline_layout_ci.pNext = NULL;
+    pipeline_layout_ci.setLayoutCount = excess_layouts;
+    pipeline_layout_ci.pSetLayouts = dsl_array.data();
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkPipelineLayoutCreateInfo-setLayoutCount-00286");
+    VkPipelineLayout pipeline_layout = VK_NULL_HANDLE;
+    err = vk::CreatePipelineLayout(m_device->device(), &pipeline_layout_ci, NULL, &pipeline_layout);
+    m_errorMonitor->VerifyFound();
+
+    // Clean up
+    vk::DestroyDescriptorSetLayout(m_device->device(), ds_layout, NULL);
+}
+
+TEST_F(VkLayerTest, CreatePipelineLayoutExcessPerStageDescriptors) {
+    TEST_DESCRIPTION("Attempt to create a pipeline layout where total descriptors exceed per-stage limits");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    uint32_t max_uniform_buffers = m_device->phy().properties().limits.maxPerStageDescriptorUniformBuffers;
+    uint32_t max_storage_buffers = m_device->phy().properties().limits.maxPerStageDescriptorStorageBuffers;
+    uint32_t max_sampled_images = m_device->phy().properties().limits.maxPerStageDescriptorSampledImages;
+    uint32_t max_storage_images = m_device->phy().properties().limits.maxPerStageDescriptorStorageImages;
+    uint32_t max_samplers = m_device->phy().properties().limits.maxPerStageDescriptorSamplers;
+    uint32_t max_combined = std::min(max_samplers, max_sampled_images);
+    uint32_t max_input_attachments = m_device->phy().properties().limits.maxPerStageDescriptorInputAttachments;
+
+    uint32_t sum_dyn_uniform_buffers = m_device->phy().properties().limits.maxDescriptorSetUniformBuffersDynamic;
+    uint32_t sum_uniform_buffers = m_device->phy().properties().limits.maxDescriptorSetUniformBuffers;
+    uint32_t sum_dyn_storage_buffers = m_device->phy().properties().limits.maxDescriptorSetStorageBuffersDynamic;
+    uint32_t sum_storage_buffers = m_device->phy().properties().limits.maxDescriptorSetStorageBuffers;
+    uint32_t sum_sampled_images = m_device->phy().properties().limits.maxDescriptorSetSampledImages;
+    uint32_t sum_storage_images = m_device->phy().properties().limits.maxDescriptorSetStorageImages;
+    uint32_t sum_samplers = m_device->phy().properties().limits.maxDescriptorSetSamplers;
+    uint32_t sum_input_attachments = m_device->phy().properties().limits.maxDescriptorSetInputAttachments;
+
+    // Devices that report UINT32_MAX for any of these limits can't run this test
+    if (UINT32_MAX == std::max({max_uniform_buffers, max_storage_buffers, max_sampled_images, max_storage_images, max_samplers})) {
+        printf("%s Physical device limits report as 2^32-1. Skipping test.\n", kSkipPrefix);
+        return;
+    }
+
+    VkDescriptorSetLayoutBinding dslb = {};
+    std::vector<VkDescriptorSetLayoutBinding> dslb_vec = {};
+    VkDescriptorSetLayout ds_layout = VK_NULL_HANDLE;
+    VkDescriptorSetLayoutCreateInfo ds_layout_ci = {};
+    ds_layout_ci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
+    ds_layout_ci.pNext = NULL;
+    VkPipelineLayoutCreateInfo pipeline_layout_ci = {};
+    pipeline_layout_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO;
+    pipeline_layout_ci.pNext = NULL;
+    pipeline_layout_ci.setLayoutCount = 1;
+    pipeline_layout_ci.pSetLayouts = &ds_layout;
+    VkPipelineLayout pipeline_layout = VK_NULL_HANDLE;
+
+    // VU 0fe0023e - too many sampler type descriptors in fragment stage
+    dslb_vec.clear();
+    dslb.binding = 0;
+    dslb.descriptorType = VK_DESCRIPTOR_TYPE_SAMPLER;
+    dslb.descriptorCount = max_samplers;
+    dslb.stageFlags = VK_SHADER_STAGE_ALL_GRAPHICS;
+    dslb.pImmutableSamplers = NULL;
+    dslb_vec.push_back(dslb);
+    dslb.binding = 1;
+    dslb.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
+    dslb.descriptorCount = max_combined;
+    dslb.stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT;
+    dslb_vec.push_back(dslb);
+
+    ds_layout_ci.bindingCount = dslb_vec.size();
+    ds_layout_ci.pBindings = dslb_vec.data();
+    VkResult err = vk::CreateDescriptorSetLayout(m_device->device(), &ds_layout_ci, NULL, &ds_layout);
+    ASSERT_VK_SUCCESS(err);
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-00287");
+    if ((max_samplers + max_combined) > sum_samplers) {
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                             "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01677");  // expect all-stages sum too
+    }
+    if (max_combined > sum_sampled_images) {
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                             "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01682");  // expect all-stages sum too
+    }
+    err = vk::CreatePipelineLayout(m_device->device(), &pipeline_layout_ci, NULL, &pipeline_layout);
+    m_errorMonitor->VerifyFound();
+    vk::DestroyPipelineLayout(m_device->device(), pipeline_layout, NULL);  // Unnecessary but harmless if test passed
+    pipeline_layout = VK_NULL_HANDLE;
+    vk::DestroyDescriptorSetLayout(m_device->device(), ds_layout, NULL);
+
+    // VU 0fe00240 - too many uniform buffer type descriptors in vertex stage
+    dslb_vec.clear();
+    dslb.binding = 0;
+    dslb.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
+    dslb.descriptorCount = max_uniform_buffers + 1;
+    dslb.stageFlags = VK_SHADER_STAGE_VERTEX_BIT | VK_SHADER_STAGE_FRAGMENT_BIT;
+    dslb_vec.push_back(dslb);
+    dslb.binding = 1;
+    dslb.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC;
+    dslb.stageFlags = VK_SHADER_STAGE_VERTEX_BIT;
+    dslb_vec.push_back(dslb);
+
+    ds_layout_ci.bindingCount = dslb_vec.size();
+    ds_layout_ci.pBindings = dslb_vec.data();
+    err = vk::CreateDescriptorSetLayout(m_device->device(), &ds_layout_ci, NULL, &ds_layout);
+    ASSERT_VK_SUCCESS(err);
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-00288");
+    if (dslb.descriptorCount > sum_uniform_buffers) {
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                             "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01678");  // expect all-stages sum too
+    }
+    if (dslb.descriptorCount > sum_dyn_uniform_buffers) {
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                             "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01679");  // expect all-stages sum too
+    }
+    err = vk::CreatePipelineLayout(m_device->device(), &pipeline_layout_ci, NULL, &pipeline_layout);
+    m_errorMonitor->VerifyFound();
+    vk::DestroyPipelineLayout(m_device->device(), pipeline_layout, NULL);  // Unnecessary but harmless if test passed
+    pipeline_layout = VK_NULL_HANDLE;
+    vk::DestroyDescriptorSetLayout(m_device->device(), ds_layout, NULL);
+
+    // VU 0fe00242 - too many storage buffer type descriptors in compute stage
+    dslb_vec.clear();
+    dslb.binding = 0;
+    dslb.descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
+    dslb.descriptorCount = max_storage_buffers + 1;
+    dslb.stageFlags = VK_SHADER_STAGE_ALL;
+    dslb_vec.push_back(dslb);
+    dslb.binding = 1;
+    dslb.descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC;
+    dslb_vec.push_back(dslb);
+    dslb.binding = 2;
+    dslb.descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
+    dslb.stageFlags = VK_SHADER_STAGE_COMPUTE_BIT;
+    dslb_vec.push_back(dslb);
+
+    ds_layout_ci.bindingCount = dslb_vec.size();
+    ds_layout_ci.pBindings = dslb_vec.data();
+    err = vk::CreateDescriptorSetLayout(m_device->device(), &ds_layout_ci, NULL, &ds_layout);
+    ASSERT_VK_SUCCESS(err);
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-00289");
+    if (dslb.descriptorCount > sum_dyn_storage_buffers) {
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                             "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01681");  // expect all-stages sum too
+    }
+    if (dslb_vec[0].descriptorCount + dslb_vec[2].descriptorCount > sum_storage_buffers) {
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                             "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01680");  // expect all-stages sum too
+    }
+    err = vk::CreatePipelineLayout(m_device->device(), &pipeline_layout_ci, NULL, &pipeline_layout);
+    m_errorMonitor->VerifyFound();
+    vk::DestroyPipelineLayout(m_device->device(), pipeline_layout, NULL);  // Unnecessary but harmless if test passed
+    pipeline_layout = VK_NULL_HANDLE;
+    vk::DestroyDescriptorSetLayout(m_device->device(), ds_layout, NULL);
+
+    // VU 0fe00244 - too many sampled image type descriptors in multiple stages
+    dslb_vec.clear();
+    dslb.binding = 0;
+    dslb.descriptorType = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE;
+    dslb.descriptorCount = max_sampled_images;
+    dslb.stageFlags = VK_SHADER_STAGE_VERTEX_BIT | VK_SHADER_STAGE_FRAGMENT_BIT;
+    dslb_vec.push_back(dslb);
+    dslb.binding = 1;
+    dslb.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER;
+    dslb.stageFlags = VK_SHADER_STAGE_ALL_GRAPHICS;
+    dslb_vec.push_back(dslb);
+    dslb.binding = 2;
+    dslb.descriptorCount = max_combined;
+    dslb.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
+    dslb_vec.push_back(dslb);
+
+    ds_layout_ci.bindingCount = dslb_vec.size();
+    ds_layout_ci.pBindings = dslb_vec.data();
+    err = vk::CreateDescriptorSetLayout(m_device->device(), &ds_layout_ci, NULL, &ds_layout);
+    ASSERT_VK_SUCCESS(err);
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-00290");
+    if (max_combined + 2 * max_sampled_images > sum_sampled_images) {
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                             "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01682");  // expect all-stages sum too
+    }
+    if (max_combined > sum_samplers) {
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                             "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01677");  // expect all-stages sum too
+    }
+    err = vk::CreatePipelineLayout(m_device->device(), &pipeline_layout_ci, NULL, &pipeline_layout);
+    m_errorMonitor->VerifyFound();
+    vk::DestroyPipelineLayout(m_device->device(), pipeline_layout, NULL);  // Unnecessary but harmless if test passed
+    pipeline_layout = VK_NULL_HANDLE;
+    vk::DestroyDescriptorSetLayout(m_device->device(), ds_layout, NULL);
+
+    // VU 0fe00246 - too many storage image type descriptors in fragment stage
+    dslb_vec.clear();
+    dslb.binding = 0;
+    dslb.descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE;
+    dslb.descriptorCount = 1 + (max_storage_images / 2);
+    dslb.stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT;
+    dslb_vec.push_back(dslb);
+    dslb.binding = 1;
+    dslb.descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER;
+    dslb.stageFlags = VK_SHADER_STAGE_VERTEX_BIT | VK_SHADER_STAGE_FRAGMENT_BIT | VK_SHADER_STAGE_COMPUTE_BIT;
+    dslb_vec.push_back(dslb);
+
+    ds_layout_ci.bindingCount = dslb_vec.size();
+    ds_layout_ci.pBindings = dslb_vec.data();
+    err = vk::CreateDescriptorSetLayout(m_device->device(), &ds_layout_ci, NULL, &ds_layout);
+    ASSERT_VK_SUCCESS(err);
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-00291");
+    if (2 * dslb.descriptorCount > sum_storage_images) {
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                             "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01683");  // expect all-stages sum too
+    }
+    err = vk::CreatePipelineLayout(m_device->device(), &pipeline_layout_ci, NULL, &pipeline_layout);
+    m_errorMonitor->VerifyFound();
+    vk::DestroyPipelineLayout(m_device->device(), pipeline_layout, NULL);  // Unnecessary but harmless if test passed
+    pipeline_layout = VK_NULL_HANDLE;
+    vk::DestroyDescriptorSetLayout(m_device->device(), ds_layout, NULL);
+
+    // VU 0fe00d18 - too many input attachments in fragment stage
+    dslb_vec.clear();
+    dslb.binding = 0;
+    dslb.descriptorType = VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT;
+    dslb.descriptorCount = 1 + max_input_attachments;
+    dslb.stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT;
+    dslb_vec.push_back(dslb);
+
+    ds_layout_ci.bindingCount = dslb_vec.size();
+    ds_layout_ci.pBindings = dslb_vec.data();
+    err = vk::CreateDescriptorSetLayout(m_device->device(), &ds_layout_ci, NULL, &ds_layout);
+    ASSERT_VK_SUCCESS(err);
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01676");
+    if (dslb.descriptorCount > sum_input_attachments) {
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                             "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01684");  // expect all-stages sum too
+    }
+    err = vk::CreatePipelineLayout(m_device->device(), &pipeline_layout_ci, NULL, &pipeline_layout);
+    m_errorMonitor->VerifyFound();
+    vk::DestroyPipelineLayout(m_device->device(), pipeline_layout, NULL);  // Unnecessary but harmless if test passed
+    pipeline_layout = VK_NULL_HANDLE;
+    vk::DestroyDescriptorSetLayout(m_device->device(), ds_layout, NULL);
+}
+
+TEST_F(VkLayerTest, CreatePipelineLayoutExcessDescriptorsOverall) {
+    TEST_DESCRIPTION("Attempt to create a pipeline layout where total descriptors exceed limits");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    uint32_t max_uniform_buffers = m_device->phy().properties().limits.maxPerStageDescriptorUniformBuffers;
+    uint32_t max_storage_buffers = m_device->phy().properties().limits.maxPerStageDescriptorStorageBuffers;
+    uint32_t max_sampled_images = m_device->phy().properties().limits.maxPerStageDescriptorSampledImages;
+    uint32_t max_storage_images = m_device->phy().properties().limits.maxPerStageDescriptorStorageImages;
+    uint32_t max_samplers = m_device->phy().properties().limits.maxPerStageDescriptorSamplers;
+    uint32_t max_input_attachments = m_device->phy().properties().limits.maxPerStageDescriptorInputAttachments;
+
+    uint32_t sum_dyn_uniform_buffers = m_device->phy().properties().limits.maxDescriptorSetUniformBuffersDynamic;
+    uint32_t sum_uniform_buffers = m_device->phy().properties().limits.maxDescriptorSetUniformBuffers;
+    uint32_t sum_dyn_storage_buffers = m_device->phy().properties().limits.maxDescriptorSetStorageBuffersDynamic;
+    uint32_t sum_storage_buffers = m_device->phy().properties().limits.maxDescriptorSetStorageBuffers;
+    uint32_t sum_sampled_images = m_device->phy().properties().limits.maxDescriptorSetSampledImages;
+    uint32_t sum_storage_images = m_device->phy().properties().limits.maxDescriptorSetStorageImages;
+    uint32_t sum_samplers = m_device->phy().properties().limits.maxDescriptorSetSamplers;
+    uint32_t sum_input_attachments = m_device->phy().properties().limits.maxDescriptorSetInputAttachments;
+
+    // Devices that report UINT32_MAX for any of these limits can't run this test
+    if (UINT32_MAX == std::max({sum_dyn_uniform_buffers, sum_uniform_buffers, sum_dyn_storage_buffers, sum_storage_buffers,
+                                sum_sampled_images, sum_storage_images, sum_samplers, sum_input_attachments})) {
+        printf("%s Physical device limits report as 2^32-1. Skipping test.\n", kSkipPrefix);
+        return;
+    }
+
+    VkDescriptorSetLayoutBinding dslb = {};
+    std::vector<VkDescriptorSetLayoutBinding> dslb_vec = {};
+    VkDescriptorSetLayout ds_layout = VK_NULL_HANDLE;
+    VkDescriptorSetLayoutCreateInfo ds_layout_ci = {};
+    ds_layout_ci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
+    ds_layout_ci.pNext = NULL;
+    VkPipelineLayoutCreateInfo pipeline_layout_ci = {};
+    pipeline_layout_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO;
+    pipeline_layout_ci.pNext = NULL;
+    pipeline_layout_ci.setLayoutCount = 1;
+    pipeline_layout_ci.pSetLayouts = &ds_layout;
+    VkPipelineLayout pipeline_layout = VK_NULL_HANDLE;
+
+    // VU 0fe00d1a - too many sampler type descriptors overall
+    dslb_vec.clear();
+    dslb.binding = 0;
+    dslb.descriptorType = VK_DESCRIPTOR_TYPE_SAMPLER;
+    dslb.descriptorCount = sum_samplers / 2;
+    dslb.stageFlags = VK_SHADER_STAGE_VERTEX_BIT;
+    dslb.pImmutableSamplers = NULL;
+    dslb_vec.push_back(dslb);
+    dslb.binding = 1;
+    dslb.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
+    dslb.descriptorCount = sum_samplers - dslb.descriptorCount + 1;
+    dslb.stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT;
+    dslb_vec.push_back(dslb);
+
+    ds_layout_ci.bindingCount = dslb_vec.size();
+    ds_layout_ci.pBindings = dslb_vec.data();
+    VkResult err = vk::CreateDescriptorSetLayout(m_device->device(), &ds_layout_ci, NULL, &ds_layout);
+    ASSERT_VK_SUCCESS(err);
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01677");
+    if (dslb.descriptorCount > max_samplers) {
+        m_errorMonitor->SetDesiredFailureMsg(
+            VK_DEBUG_REPORT_ERROR_BIT_EXT,
+            "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-00287");  // Expect max-per-stage samplers exceeds limits
+    }
+    if (dslb.descriptorCount > sum_sampled_images) {
+        m_errorMonitor->SetDesiredFailureMsg(
+            VK_DEBUG_REPORT_ERROR_BIT_EXT,
+            "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01682");  // Expect max overall sampled image count exceeds limits
+    }
+    if (dslb.descriptorCount > max_sampled_images) {
+        m_errorMonitor->SetDesiredFailureMsg(
+            VK_DEBUG_REPORT_ERROR_BIT_EXT,
+            "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-00290");  // Expect max per-stage sampled image count exceeds limits
+    }
+    err = vk::CreatePipelineLayout(m_device->device(), &pipeline_layout_ci, NULL, &pipeline_layout);
+    m_errorMonitor->VerifyFound();
+    vk::DestroyPipelineLayout(m_device->device(), pipeline_layout, NULL);  // Unnecessary but harmless if test passed
+    pipeline_layout = VK_NULL_HANDLE;
+    vk::DestroyDescriptorSetLayout(m_device->device(), ds_layout, NULL);
+
+    // VU 0fe00d1c - too many uniform buffer type descriptors overall
+    dslb_vec.clear();
+    dslb.binding = 0;
+    dslb.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
+    dslb.descriptorCount = sum_uniform_buffers + 1;
+    dslb.stageFlags = VK_SHADER_STAGE_VERTEX_BIT | VK_SHADER_STAGE_FRAGMENT_BIT;
+    dslb.pImmutableSamplers = NULL;
+    dslb_vec.push_back(dslb);
+
+    ds_layout_ci.bindingCount = dslb_vec.size();
+    ds_layout_ci.pBindings = dslb_vec.data();
+    err = vk::CreateDescriptorSetLayout(m_device->device(), &ds_layout_ci, NULL, &ds_layout);
+    ASSERT_VK_SUCCESS(err);
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01678");
+    if (dslb.descriptorCount > max_uniform_buffers) {
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                             "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-00288");  // expect max-per-stage too
+    }
+    err = vk::CreatePipelineLayout(m_device->device(), &pipeline_layout_ci, NULL, &pipeline_layout);
+    m_errorMonitor->VerifyFound();
+    vk::DestroyPipelineLayout(m_device->device(), pipeline_layout, NULL);  // Unnecessary but harmless if test passed
+    pipeline_layout = VK_NULL_HANDLE;
+    vk::DestroyDescriptorSetLayout(m_device->device(), ds_layout, NULL);
+
+    // VU 0fe00d1e - too many dynamic uniform buffer type descriptors overall
+    dslb_vec.clear();
+    dslb.binding = 0;
+    dslb.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC;
+    dslb.descriptorCount = sum_dyn_uniform_buffers + 1;
+    dslb.stageFlags = VK_SHADER_STAGE_VERTEX_BIT | VK_SHADER_STAGE_FRAGMENT_BIT;
+    dslb.pImmutableSamplers = NULL;
+    dslb_vec.push_back(dslb);
+
+    ds_layout_ci.bindingCount = dslb_vec.size();
+    ds_layout_ci.pBindings = dslb_vec.data();
+    err = vk::CreateDescriptorSetLayout(m_device->device(), &ds_layout_ci, NULL, &ds_layout);
+    ASSERT_VK_SUCCESS(err);
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01679");
+    if (dslb.descriptorCount > max_uniform_buffers) {
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                             "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-00288");  // expect max-per-stage too
+    }
+    err = vk::CreatePipelineLayout(m_device->device(), &pipeline_layout_ci, NULL, &pipeline_layout);
+    m_errorMonitor->VerifyFound();
+    vk::DestroyPipelineLayout(m_device->device(), pipeline_layout, NULL);  // Unnecessary but harmless if test passed
+    pipeline_layout = VK_NULL_HANDLE;
+    vk::DestroyDescriptorSetLayout(m_device->device(), ds_layout, NULL);
+
+    // VU 0fe00d20 - too many storage buffer type descriptors overall
+    dslb_vec.clear();
+    dslb.binding = 0;
+    dslb.descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
+    dslb.descriptorCount = sum_storage_buffers + 1;
+    dslb.stageFlags = VK_SHADER_STAGE_VERTEX_BIT | VK_SHADER_STAGE_FRAGMENT_BIT;
+    dslb.pImmutableSamplers = NULL;
+    dslb_vec.push_back(dslb);
+
+    ds_layout_ci.bindingCount = dslb_vec.size();
+    ds_layout_ci.pBindings = dslb_vec.data();
+    err = vk::CreateDescriptorSetLayout(m_device->device(), &ds_layout_ci, NULL, &ds_layout);
+    ASSERT_VK_SUCCESS(err);
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01680");
+    if (dslb.descriptorCount > max_storage_buffers) {
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                             "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-00289");  // expect max-per-stage too
+    }
+    err = vk::CreatePipelineLayout(m_device->device(), &pipeline_layout_ci, NULL, &pipeline_layout);
+    m_errorMonitor->VerifyFound();
+    vk::DestroyPipelineLayout(m_device->device(), pipeline_layout, NULL);  // Unnecessary but harmless if test passed
+    pipeline_layout = VK_NULL_HANDLE;
+    vk::DestroyDescriptorSetLayout(m_device->device(), ds_layout, NULL);
+
+    // VU 0fe00d22 - too many dynamic storage buffer type descriptors overall
+    dslb_vec.clear();
+    dslb.binding = 0;
+    dslb.descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC;
+    dslb.descriptorCount = sum_dyn_storage_buffers + 1;
+    dslb.stageFlags = VK_SHADER_STAGE_VERTEX_BIT | VK_SHADER_STAGE_FRAGMENT_BIT;
+    dslb.pImmutableSamplers = NULL;
+    dslb_vec.push_back(dslb);
+
+    ds_layout_ci.bindingCount = dslb_vec.size();
+    ds_layout_ci.pBindings = dslb_vec.data();
+    err = vk::CreateDescriptorSetLayout(m_device->device(), &ds_layout_ci, NULL, &ds_layout);
+    ASSERT_VK_SUCCESS(err);
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01681");
+    if (dslb.descriptorCount > max_storage_buffers) {
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                             "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-00289");  // expect max-per-stage too
+    }
+    err = vk::CreatePipelineLayout(m_device->device(), &pipeline_layout_ci, NULL, &pipeline_layout);
+    m_errorMonitor->VerifyFound();
+    vk::DestroyPipelineLayout(m_device->device(), pipeline_layout, NULL);  // Unnecessary but harmless if test passed
+    pipeline_layout = VK_NULL_HANDLE;
+    vk::DestroyDescriptorSetLayout(m_device->device(), ds_layout, NULL);
+
+    // VU 0fe00d24 - too many sampled image type descriptors overall
+    dslb_vec.clear();
+    dslb.binding = 0;
+    dslb.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
+    dslb.descriptorCount = max_samplers;
+    dslb.stageFlags = VK_SHADER_STAGE_VERTEX_BIT;
+    dslb.pImmutableSamplers = NULL;
+    dslb_vec.push_back(dslb);
+    dslb.binding = 1;
+    dslb.descriptorType = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE;
+    // revisit: not robust to odd limits.
+    uint32_t remaining = (max_samplers > sum_sampled_images ? 0 : (sum_sampled_images - max_samplers) / 2);
+    dslb.descriptorCount = 1 + remaining;
+    dslb.stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT;
+    dslb_vec.push_back(dslb);
+    dslb.binding = 2;
+    dslb.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER;
+    dslb.stageFlags = VK_SHADER_STAGE_COMPUTE_BIT;
+    dslb_vec.push_back(dslb);
+
+    ds_layout_ci.bindingCount = dslb_vec.size();
+    ds_layout_ci.pBindings = dslb_vec.data();
+    err = vk::CreateDescriptorSetLayout(m_device->device(), &ds_layout_ci, NULL, &ds_layout);
+    ASSERT_VK_SUCCESS(err);
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01682");
+    if (std::max(dslb_vec[0].descriptorCount, dslb_vec[1].descriptorCount) > max_sampled_images) {
+        m_errorMonitor->SetDesiredFailureMsg(
+            VK_DEBUG_REPORT_ERROR_BIT_EXT,
+            "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-00290");  // Expect max-per-stage sampled images to exceed limits
+    }
+    err = vk::CreatePipelineLayout(m_device->device(), &pipeline_layout_ci, NULL, &pipeline_layout);
+    m_errorMonitor->VerifyFound();
+    vk::DestroyPipelineLayout(m_device->device(), pipeline_layout, NULL);  // Unnecessary but harmless if test passed
+    pipeline_layout = VK_NULL_HANDLE;
+    vk::DestroyDescriptorSetLayout(m_device->device(), ds_layout, NULL);
+
+    // VU 0fe00d26 - too many storage image type descriptors overall
+    dslb_vec.clear();
+    dslb.binding = 0;
+    dslb.descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE;
+    dslb.descriptorCount = sum_storage_images / 2;
+    dslb.stageFlags = VK_SHADER_STAGE_VERTEX_BIT;
+    dslb.pImmutableSamplers = NULL;
+    dslb_vec.push_back(dslb);
+    dslb.binding = 1;
+    dslb.descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER;
+    dslb.descriptorCount = sum_storage_images - dslb.descriptorCount + 1;
+    dslb.stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT;
+    dslb_vec.push_back(dslb);
+
+    ds_layout_ci.bindingCount = dslb_vec.size();
+    ds_layout_ci.pBindings = dslb_vec.data();
+    err = vk::CreateDescriptorSetLayout(m_device->device(), &ds_layout_ci, NULL, &ds_layout);
+    ASSERT_VK_SUCCESS(err);
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01683");
+    if (dslb.descriptorCount > max_storage_images) {
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                             "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-00291");  // expect max-per-stage too
+    }
+    err = vk::CreatePipelineLayout(m_device->device(), &pipeline_layout_ci, NULL, &pipeline_layout);
+    m_errorMonitor->VerifyFound();
+    vk::DestroyPipelineLayout(m_device->device(), pipeline_layout, NULL);  // Unnecessary but harmless if test passed
+    pipeline_layout = VK_NULL_HANDLE;
+    vk::DestroyDescriptorSetLayout(m_device->device(), ds_layout, NULL);
+
+    // VU 0fe00d28 - too many input attachment type descriptors overall
+    dslb_vec.clear();
+    dslb.binding = 0;
+    dslb.descriptorType = VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT;
+    dslb.descriptorCount = sum_input_attachments + 1;
+    dslb.stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT;
+    dslb.pImmutableSamplers = NULL;
+    dslb_vec.push_back(dslb);
+
+    ds_layout_ci.bindingCount = dslb_vec.size();
+    ds_layout_ci.pBindings = dslb_vec.data();
+    err = vk::CreateDescriptorSetLayout(m_device->device(), &ds_layout_ci, NULL, &ds_layout);
+    ASSERT_VK_SUCCESS(err);
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01684");
+    if (dslb.descriptorCount > max_input_attachments) {
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                             "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01676");  // expect max-per-stage too
+    }
+    err = vk::CreatePipelineLayout(m_device->device(), &pipeline_layout_ci, NULL, &pipeline_layout);
+    m_errorMonitor->VerifyFound();
+    vk::DestroyPipelineLayout(m_device->device(), pipeline_layout, NULL);  // Unnecessary but harmless if test passed
+    pipeline_layout = VK_NULL_HANDLE;
+    vk::DestroyDescriptorSetLayout(m_device->device(), ds_layout, NULL);
+}
+
+TEST_F(VkLayerTest, InvalidCmdBufferPipelineDestroyed) {
+    TEST_DESCRIPTION("Attempt to draw with a command buffer that is invalid due to a pipeline dependency being destroyed.");
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    {
+        // Use helper to create graphics pipeline
+        CreatePipelineHelper helper(*this);
+        helper.InitInfo();
+        helper.InitState();
+        helper.CreateGraphicsPipeline();
+
+        // Bind helper pipeline to command buffer
+        m_commandBuffer->begin();
+        vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, helper.pipeline_);
+        m_commandBuffer->end();
+
+        // pipeline will be destroyed when helper goes out of scope
+    }
+
+    // Cause error by submitting command buffer that references destroyed pipeline
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "UNASSIGNED-CoreValidation-DrawState-InvalidCommandBuffer-VkPipeline");
+    m_commandBuffer->QueueCommandBuffer(false);
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, InvalidPipeline) {
+    uint64_t fake_pipeline_handle = 0xbaad6001;
+    VkPipeline bad_pipeline = reinterpret_cast<VkPipeline &>(fake_pipeline_handle);
+
+    // Enable VK_KHR_draw_indirect_count for KHR variants
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+    if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_DRAW_INDIRECT_COUNT_EXTENSION_NAME)) {
+        m_device_extension_names.push_back(VK_KHR_DRAW_INDIRECT_COUNT_EXTENSION_NAME);
+    }
+    ASSERT_NO_FATAL_FAILURE(InitState());
+    bool has_khr_indirect = DeviceExtensionEnabled(VK_KHR_DRAW_INDIRECT_COUNT_EXTENSION_NAME);
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    // Attempt to bind an invalid Pipeline to a valid Command Buffer
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdBindPipeline-pipeline-parameter");
+    m_commandBuffer->begin();
+    vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, bad_pipeline);
+    m_errorMonitor->VerifyFound();
+
+    // Try each of the 6 flavors of Draw()
+    m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);  // Draw*() calls must be submitted within a renderpass
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdDraw-None-02700");
+    m_commandBuffer->Draw(1, 0, 0, 0);
+    m_errorMonitor->VerifyFound();
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdDrawIndexed-None-02700");
+    m_commandBuffer->DrawIndexed(1, 1, 0, 0, 0);
+    m_errorMonitor->VerifyFound();
+
+    VkBufferObj buffer;
+    VkBufferCreateInfo ci = {};
+    ci.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
+    ci.usage = VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT;
+    ci.size = 1024;
+    buffer.init(*m_device, ci);
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdDrawIndirect-None-02700");
+    vk::CmdDrawIndirect(m_commandBuffer->handle(), buffer.handle(), 0, 1, 0);
+    m_errorMonitor->VerifyFound();
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdDrawIndexedIndirect-None-02700");
+    vk::CmdDrawIndexedIndirect(m_commandBuffer->handle(), buffer.handle(), 0, 1, 0);
+    m_errorMonitor->VerifyFound();
+
+    if (has_khr_indirect) {
+        auto fpCmdDrawIndirectCountKHR =
+            (PFN_vkCmdDrawIndirectCountKHR)vk::GetDeviceProcAddr(m_device->device(), "vkCmdDrawIndirectCountKHR");
+        ASSERT_NE(fpCmdDrawIndirectCountKHR, nullptr);
+
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdDrawIndirectCountKHR-None-02700");
+        // stride must be a multiple of 4 and must be greater than or equal to sizeof(VkDrawIndirectCommand)
+        fpCmdDrawIndirectCountKHR(m_commandBuffer->handle(), buffer.handle(), 0, buffer.handle(), 512, 1, 512);
+        m_errorMonitor->VerifyFound();
+
+        auto fpCmdDrawIndexedIndirectCountKHR =
+            (PFN_vkCmdDrawIndexedIndirectCountKHR)vk::GetDeviceProcAddr(m_device->device(), "vkCmdDrawIndexedIndirectCountKHR");
+        ASSERT_NE(fpCmdDrawIndexedIndirectCountKHR, nullptr);
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdDrawIndexedIndirectCountKHR-None-02700");
+        // stride must be a multiple of 4 and must be greater than or equal to sizeof(VkDrawIndexedIndirectCommand)
+        fpCmdDrawIndexedIndirectCountKHR(m_commandBuffer->handle(), buffer.handle(), 0, buffer.handle(), 512, 1, 512);
+        m_errorMonitor->VerifyFound();
+    }
+
+    // Also try the Dispatch variants
+    vk::CmdEndRenderPass(m_commandBuffer->handle());  // Compute submissions must be outside a renderpass
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdDispatch-None-02700");
+    vk::CmdDispatch(m_commandBuffer->handle(), 0, 0, 0);
+    m_errorMonitor->VerifyFound();
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdDispatchIndirect-None-02700");
+    vk::CmdDispatchIndirect(m_commandBuffer->handle(), buffer.handle(), 0);
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, CmdDispatchExceedLimits) {
+    TEST_DESCRIPTION("Compute dispatch with dimensions that exceed device limits");
+
+    // Enable KHX device group extensions, if available
+    if (InstanceExtensionSupported(VK_KHR_DEVICE_GROUP_CREATION_EXTENSION_NAME)) {
+        m_instance_extension_names.push_back(VK_KHR_DEVICE_GROUP_CREATION_EXTENSION_NAME);
+    }
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+    bool khx_dg_ext_available = false;
+    if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_DEVICE_GROUP_EXTENSION_NAME)) {
+        m_device_extension_names.push_back(VK_KHR_DEVICE_GROUP_EXTENSION_NAME);
+        khx_dg_ext_available = true;
+    }
+    ASSERT_NO_FATAL_FAILURE(InitState());
+
+    uint32_t x_count_limit = m_device->props.limits.maxComputeWorkGroupCount[0];
+    uint32_t y_count_limit = m_device->props.limits.maxComputeWorkGroupCount[1];
+    uint32_t z_count_limit = m_device->props.limits.maxComputeWorkGroupCount[2];
+    if (std::max({x_count_limit, y_count_limit, z_count_limit}) == UINT32_MAX) {
+        printf("%s device maxComputeWorkGroupCount limit reports UINT32_MAX, test not possible, skipping.\n", kSkipPrefix);
+        return;
+    }
+
+    uint32_t x_size_limit = m_device->props.limits.maxComputeWorkGroupSize[0];
+    uint32_t y_size_limit = m_device->props.limits.maxComputeWorkGroupSize[1];
+    uint32_t z_size_limit = m_device->props.limits.maxComputeWorkGroupSize[2];
+
+    std::string spv_source = R"(
+        OpCapability Shader
+        OpMemoryModel Logical GLSL450
+        OpEntryPoint GLCompute %main "main"
+        OpExecutionMode %main LocalSize )";
+    spv_source.append(std::to_string(x_size_limit + 1) + " " + std::to_string(y_size_limit + 1) + " " +
+                      std::to_string(z_size_limit + 1));
+    spv_source.append(R"(
+        %void = OpTypeVoid
+           %3 = OpTypeFunction %void
+        %main = OpFunction %void None %3
+           %5 = OpLabel
+                OpReturn
+                OpFunctionEnd)");
+
+    CreateComputePipelineHelper pipe(*this);
+    pipe.InitInfo();
+    pipe.cs_.reset(new VkShaderObj(m_device, spv_source, VK_SHADER_STAGE_COMPUTE_BIT, this));
+    pipe.InitState();
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "exceeds device limit maxComputeWorkGroupSize[0]");
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "exceeds device limit maxComputeWorkGroupSize[1]");
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "exceeds device limit maxComputeWorkGroupSize[2]");
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "features-limits-maxComputeWorkGroupInvocations");
+    pipe.CreateComputePipeline();
+    m_errorMonitor->VerifyFound();
+
+    // Create a minimal compute pipeline
+    x_size_limit = (x_size_limit > 1024) ? 1024 : x_size_limit;
+    y_size_limit = (y_size_limit > 1024) ? 1024 : y_size_limit;
+    z_size_limit = (z_size_limit > 64) ? 64 : z_size_limit;
+
+    uint32_t invocations_limit = m_device->props.limits.maxComputeWorkGroupInvocations;
+    x_size_limit = (x_size_limit > invocations_limit) ? invocations_limit : x_size_limit;
+    invocations_limit /= x_size_limit;
+    y_size_limit = (y_size_limit > invocations_limit) ? invocations_limit : y_size_limit;
+    invocations_limit /= y_size_limit;
+    z_size_limit = (z_size_limit > invocations_limit) ? invocations_limit : z_size_limit;
+
+    char cs_text[128] = "";
+    sprintf(cs_text, "#version 450\nlayout(local_size_x = %d, local_size_y = %d, local_size_z = %d) in;\nvoid main() {}\n",
+            x_size_limit, y_size_limit, z_size_limit);
+
+    VkShaderObj cs_obj(m_device, cs_text, VK_SHADER_STAGE_COMPUTE_BIT, this);
+    pipe.cs_.reset(new VkShaderObj(m_device, cs_text, VK_SHADER_STAGE_COMPUTE_BIT, this));
+    pipe.CreateComputePipeline();
+
+    // Bind pipeline to command buffer
+    m_commandBuffer->begin();
+    vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_COMPUTE, pipe.pipeline_);
+
+    // Dispatch counts that exceed device limits
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdDispatch-groupCountX-00386");
+    vk::CmdDispatch(m_commandBuffer->handle(), x_count_limit + 1, y_count_limit, z_count_limit);
+    m_errorMonitor->VerifyFound();
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdDispatch-groupCountY-00387");
+    vk::CmdDispatch(m_commandBuffer->handle(), x_count_limit, y_count_limit + 1, z_count_limit);
+    m_errorMonitor->VerifyFound();
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdDispatch-groupCountZ-00388");
+    vk::CmdDispatch(m_commandBuffer->handle(), x_count_limit, y_count_limit, z_count_limit + 1);
+    m_errorMonitor->VerifyFound();
+
+    if (khx_dg_ext_available) {
+        PFN_vkCmdDispatchBaseKHR fp_vkCmdDispatchBaseKHR =
+            (PFN_vkCmdDispatchBaseKHR)vk::GetInstanceProcAddr(instance(), "vkCmdDispatchBaseKHR");
+
+        // Base equals or exceeds limit
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdDispatchBase-baseGroupX-00421");
+        fp_vkCmdDispatchBaseKHR(m_commandBuffer->handle(), x_count_limit, y_count_limit - 1, z_count_limit - 1, 0, 0, 0);
+        m_errorMonitor->VerifyFound();
+
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdDispatchBase-baseGroupX-00422");
+        fp_vkCmdDispatchBaseKHR(m_commandBuffer->handle(), x_count_limit - 1, y_count_limit, z_count_limit - 1, 0, 0, 0);
+        m_errorMonitor->VerifyFound();
+
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdDispatchBase-baseGroupZ-00423");
+        fp_vkCmdDispatchBaseKHR(m_commandBuffer->handle(), x_count_limit - 1, y_count_limit - 1, z_count_limit, 0, 0, 0);
+        m_errorMonitor->VerifyFound();
+
+        // (Base + count) exceeds limit
+        uint32_t x_base = x_count_limit / 2;
+        uint32_t y_base = y_count_limit / 2;
+        uint32_t z_base = z_count_limit / 2;
+        x_count_limit -= x_base;
+        y_count_limit -= y_base;
+        z_count_limit -= z_base;
+
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdDispatchBase-groupCountX-00424");
+        fp_vkCmdDispatchBaseKHR(m_commandBuffer->handle(), x_base, y_base, z_base, x_count_limit + 1, y_count_limit, z_count_limit);
+        m_errorMonitor->VerifyFound();
+
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdDispatchBase-groupCountY-00425");
+        fp_vkCmdDispatchBaseKHR(m_commandBuffer->handle(), x_base, y_base, z_base, x_count_limit, y_count_limit + 1, z_count_limit);
+        m_errorMonitor->VerifyFound();
+
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdDispatchBase-groupCountZ-00426");
+        fp_vkCmdDispatchBaseKHR(m_commandBuffer->handle(), x_base, y_base, z_base, x_count_limit, y_count_limit, z_count_limit + 1);
+        m_errorMonitor->VerifyFound();
+    } else {
+        printf("%s KHX_DEVICE_GROUP_* extensions not supported, skipping CmdDispatchBaseKHR() tests.\n", kSkipPrefix);
+    }
+}
+
+TEST_F(VkLayerTest, InvalidPipelineCreateState) {
+    // Attempt to Create Gfx Pipeline w/o a VS
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+    ASSERT_NO_FATAL_FAILURE(InitViewport());
+
+    VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
+    VkShaderObj fs(m_device, bindStateFragShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+    VkPipelineShaderStageCreateInfo shaderStage = fs.GetStageCreateInfo();  // should be: vs.GetStageCreateInfo();
+
+    auto set_info = [&](CreatePipelineHelper &helper) { helper.shader_stages_ = {shaderStage}; };
+    CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                      "Invalid Pipeline CreateInfo State: Vertex Shader required");
+
+    // Finally, check the string validation for the shader stage pName variable.  Correct the shader stage data, and bork the
+    // string before calling again
+    shaderStage = vs.GetStageCreateInfo();
+    const uint8_t cont_char = 0xf8;
+    char bad_string[] = {static_cast<char>(cont_char), static_cast<char>(cont_char), static_cast<char>(cont_char),
+                         static_cast<char>(cont_char)};
+    shaderStage.pName = bad_string;
+
+    CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                      "contains invalid characters or is badly formed");
+}
+
+TEST_F(VkLayerTest, InvalidPipelineSampleRateFeatureDisable) {
+    // Enable sample shading in pipeline when the feature is disabled.
+    // Disable sampleRateShading here
+    VkPhysicalDeviceFeatures device_features = {};
+    device_features.sampleRateShading = VK_FALSE;
+
+    ASSERT_NO_FATAL_FAILURE(Init(&device_features));
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    // Cause the error by enabling sample shading...
+    auto set_shading_enable = [](CreatePipelineHelper &helper) { helper.pipe_ms_state_ci_.sampleShadingEnable = VK_TRUE; };
+    CreatePipelineHelper::OneshotTest(*this, set_shading_enable, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                      "VUID-VkPipelineMultisampleStateCreateInfo-sampleShadingEnable-00784");
+}
+
+TEST_F(VkLayerTest, InvalidPipelineSampleRateFeatureEnable) {
+    // Enable sample shading in pipeline when the feature is disabled.
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+
+    // Require sampleRateShading here
+    VkPhysicalDeviceFeatures device_features = {};
+    ASSERT_NO_FATAL_FAILURE(GetPhysicalDeviceFeatures(&device_features));
+    if (device_features.sampleRateShading == VK_FALSE) {
+        printf("%s SampleRateShading feature is disabled -- skipping related checks.\n", kSkipPrefix);
+        return;
+    }
+
+    ASSERT_NO_FATAL_FAILURE(InitState(&device_features));
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    auto range_test = [this](float value, bool positive_test) {
+        auto info_override = [value](CreatePipelineHelper &helper) {
+            helper.pipe_ms_state_ci_.sampleShadingEnable = VK_TRUE;
+            helper.pipe_ms_state_ci_.minSampleShading = value;
+        };
+        CreatePipelineHelper::OneshotTest(*this, info_override, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                          "VUID-VkPipelineMultisampleStateCreateInfo-minSampleShading-00786", positive_test);
+    };
+
+    range_test(NearestSmaller(0.0F), false);
+    range_test(NearestGreater(1.0F), false);
+    range_test(0.0F, /* positive_test= */ true);
+    range_test(1.0F, /* positive_test= */ true);
+}
+
+TEST_F(VkLayerTest, InvalidPipelineSamplePNext) {
+    // Enable sample shading in pipeline when the feature is disabled.
+    // Check for VK_KHR_get_physical_device_properties2
+    if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+        m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    }
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+
+    // Set up the extension structs
+    auto sampleLocations = chain_util::Init<VkPipelineSampleLocationsStateCreateInfoEXT>();
+    sampleLocations.sampleLocationsInfo.sType = VK_STRUCTURE_TYPE_SAMPLE_LOCATIONS_INFO_EXT;
+    auto coverageToColor = chain_util::Init<VkPipelineCoverageToColorStateCreateInfoNV>();
+    auto coverageModulation = chain_util::Init<VkPipelineCoverageModulationStateCreateInfoNV>();
+    auto discriminatrix = [this](const char *name) { return DeviceExtensionSupported(gpu(), nullptr, name); };
+    chain_util::ExtensionChain chain(discriminatrix, &m_device_extension_names);
+    chain.Add(VK_EXT_SAMPLE_LOCATIONS_EXTENSION_NAME, sampleLocations);
+    chain.Add(VK_NV_FRAGMENT_COVERAGE_TO_COLOR_EXTENSION_NAME, coverageToColor);
+    chain.Add(VK_NV_FRAMEBUFFER_MIXED_SAMPLES_EXTENSION_NAME, coverageModulation);
+    const void *extension_head = chain.Head();
+
+    ASSERT_NO_FATAL_FAILURE(InitState());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    if (extension_head) {
+        auto good_chain = [extension_head](CreatePipelineHelper &helper) { helper.pipe_ms_state_ci_.pNext = extension_head; };
+        CreatePipelineHelper::OneshotTest(*this, good_chain, (VK_DEBUG_REPORT_ERROR_BIT_EXT | VK_DEBUG_REPORT_WARNING_BIT_EXT),
+                                          "No error", true);
+    } else {
+        printf("%s Required extension not present -- skipping positive checks.\n", kSkipPrefix);
+    }
+
+    auto instance_ci = chain_util::Init<VkInstanceCreateInfo>();
+    auto bad_chain = [&instance_ci](CreatePipelineHelper &helper) { helper.pipe_ms_state_ci_.pNext = &instance_ci; };
+    CreatePipelineHelper::OneshotTest(*this, bad_chain, VK_DEBUG_REPORT_WARNING_BIT_EXT,
+                                      "VUID-VkPipelineMultisampleStateCreateInfo-pNext-pNext");
+}
+
+TEST_F(VkLayerTest, CreateGraphicsPipelineWithBadBasePointer) {
+    TEST_DESCRIPTION("Create Graphics Pipeline with pointers that must be ignored by layers");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    m_depth_stencil_fmt = FindSupportedDepthStencilFormat(gpu());
+    ASSERT_TRUE(m_depth_stencil_fmt != 0);
+
+    m_depthStencil->Init(m_device, static_cast<int32_t>(m_width), static_cast<int32_t>(m_height), m_depth_stencil_fmt);
+
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget(m_depthStencil->BindInfo()));
+
+    VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
+
+    const VkPipelineVertexInputStateCreateInfo pipeline_vertex_input_state_create_info{
+        VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO, nullptr, 0, 0, nullptr, 0, nullptr};
+
+    const VkPipelineInputAssemblyStateCreateInfo pipeline_input_assembly_state_create_info{
+        VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO, nullptr, 0, VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST, VK_FALSE};
+
+    const VkPipelineRasterizationStateCreateInfo pipeline_rasterization_state_create_info_template{
+        VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO,
+        nullptr,
+        0,
+        VK_FALSE,
+        VK_FALSE,
+        VK_POLYGON_MODE_FILL,
+        VK_CULL_MODE_NONE,
+        VK_FRONT_FACE_COUNTER_CLOCKWISE,
+        VK_FALSE,
+        0.0f,
+        0.0f,
+        0.0f,
+        1.0f};
+
+    VkPipelineLayout pipeline_layout;
+    auto pipeline_layout_create_info = lvl_init_struct<VkPipelineLayoutCreateInfo>();
+    VkResult err = vk::CreatePipelineLayout(m_device->device(), &pipeline_layout_create_info, nullptr, &pipeline_layout);
+    ASSERT_VK_SUCCESS(err);
+
+    VkPipelineRasterizationStateCreateInfo pipeline_rasterization_state_create_info =
+        pipeline_rasterization_state_create_info_template;
+    pipeline_rasterization_state_create_info.rasterizerDiscardEnable = VK_TRUE;
+
+    uint64_t fake_pipeline_id = 0xCADECADE;
+    VkPipeline fake_pipeline_handle = reinterpret_cast<VkPipeline &>(fake_pipeline_id);
+
+    VkGraphicsPipelineCreateInfo graphics_pipeline_create_info{VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO,
+                                                               nullptr,
+                                                               VK_PIPELINE_CREATE_DERIVATIVE_BIT,
+                                                               1,
+                                                               &vs.GetStageCreateInfo(),
+                                                               &pipeline_vertex_input_state_create_info,
+                                                               &pipeline_input_assembly_state_create_info,
+                                                               nullptr,
+                                                               nullptr,
+                                                               &pipeline_rasterization_state_create_info,
+                                                               nullptr,
+                                                               nullptr,
+                                                               nullptr,
+                                                               nullptr,
+                                                               pipeline_layout,
+                                                               m_renderPass,
+                                                               0,
+                                                               fake_pipeline_handle,
+                                                               -1};
+
+    VkPipeline pipeline;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkGraphicsPipelineCreateInfo-flags-00722");
+    vk::CreateGraphicsPipelines(m_device->handle(), VK_NULL_HANDLE, 1, &graphics_pipeline_create_info, nullptr, &pipeline);
+    m_errorMonitor->VerifyFound();
+
+    graphics_pipeline_create_info.basePipelineHandle = VK_NULL_HANDLE;
+    graphics_pipeline_create_info.basePipelineIndex = 6;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkGraphicsPipelineCreateInfo-flags-00723");
+    vk::CreateGraphicsPipelines(m_device->handle(), VK_NULL_HANDLE, 1, &graphics_pipeline_create_info, nullptr, &pipeline);
+    m_errorMonitor->VerifyFound();
+
+    vk::DestroyPipelineLayout(m_device->handle(), pipeline_layout, nullptr);
+}
+
+TEST_F(VkLayerTest, VertexAttributeDivisorExtension) {
+    TEST_DESCRIPTION("Test VUIDs added with VK_EXT_vertex_attribute_divisor extension.");
+
+    bool inst_ext = InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    if (inst_ext) {
+        m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+        ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+    }
+    if (inst_ext && DeviceExtensionSupported(gpu(), nullptr, VK_EXT_VERTEX_ATTRIBUTE_DIVISOR_EXTENSION_NAME)) {
+        m_device_extension_names.push_back(VK_EXT_VERTEX_ATTRIBUTE_DIVISOR_EXTENSION_NAME);
+    } else {
+        printf("%s %s Extension not supported, skipping tests\n", kSkipPrefix, VK_EXT_VERTEX_ATTRIBUTE_DIVISOR_EXTENSION_NAME);
+        return;
+    }
+
+    VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT vadf = {};
+    vadf.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_EXT;
+    vadf.vertexAttributeInstanceRateDivisor = VK_TRUE;
+    vadf.vertexAttributeInstanceRateZeroDivisor = VK_TRUE;
+
+    VkPhysicalDeviceFeatures2 pd_features2 = {};
+    pd_features2.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2;
+    pd_features2.pNext = &vadf;
+
+    ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &pd_features2));
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    const VkPhysicalDeviceLimits &dev_limits = m_device->props.limits;
+    VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT pdvad_props = {};
+    pdvad_props.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_EXT;
+    VkPhysicalDeviceProperties2 pd_props2 = {};
+    pd_props2.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2;
+    pd_props2.pNext = &pdvad_props;
+    vk::GetPhysicalDeviceProperties2(gpu(), &pd_props2);
+
+    VkVertexInputBindingDivisorDescriptionEXT vibdd = {};
+    VkPipelineVertexInputDivisorStateCreateInfoEXT pvids_ci = {};
+    pvids_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_EXT;
+    pvids_ci.vertexBindingDivisorCount = 1;
+    pvids_ci.pVertexBindingDivisors = &vibdd;
+    VkVertexInputBindingDescription vibd = {};
+    vibd.stride = 12;
+    vibd.inputRate = VK_VERTEX_INPUT_RATE_VERTEX;
+
+    if (pdvad_props.maxVertexAttribDivisor < pvids_ci.vertexBindingDivisorCount) {
+        printf("%sThis device does not support %d vertexBindingDivisors, skipping tests\n", kSkipPrefix,
+               pvids_ci.vertexBindingDivisorCount);
+        return;
+    }
+
+    using std::vector;
+    struct TestCase {
+        uint32_t div_binding;
+        uint32_t div_divisor;
+        uint32_t desc_binding;
+        VkVertexInputRate desc_rate;
+        vector<std::string> vuids;
+    };
+
+    // clang-format off
+    vector<TestCase> test_cases = {
+        {   0,
+            1,
+            0,
+            VK_VERTEX_INPUT_RATE_VERTEX,
+            {"VUID-VkVertexInputBindingDivisorDescriptionEXT-inputRate-01871"}
+        },
+        {   dev_limits.maxVertexInputBindings + 1,
+            1,
+            0,
+            VK_VERTEX_INPUT_RATE_INSTANCE,
+            {"VUID-VkVertexInputBindingDivisorDescriptionEXT-binding-01869",
+             "VUID-VkVertexInputBindingDivisorDescriptionEXT-inputRate-01871"}
+        }
+    };
+
+    if (UINT32_MAX != pdvad_props.maxVertexAttribDivisor) {  // Can't test overflow if maxVAD is UINT32_MAX
+        test_cases.push_back(
+            {   0,
+                pdvad_props.maxVertexAttribDivisor + 1,
+                0,
+                VK_VERTEX_INPUT_RATE_INSTANCE,
+                {"VUID-VkVertexInputBindingDivisorDescriptionEXT-divisor-01870"}
+            } );
+    }
+    // clang-format on
+
+    for (const auto &test_case : test_cases) {
+        const auto bad_divisor_state = [&test_case, &vibdd, &pvids_ci, &vibd](CreatePipelineHelper &helper) {
+            vibdd.binding = test_case.div_binding;
+            vibdd.divisor = test_case.div_divisor;
+            vibd.binding = test_case.desc_binding;
+            vibd.inputRate = test_case.desc_rate;
+            helper.vi_ci_.pNext = &pvids_ci;
+            helper.vi_ci_.vertexBindingDescriptionCount = 1;
+            helper.vi_ci_.pVertexBindingDescriptions = &vibd;
+        };
+        CreatePipelineHelper::OneshotTest(*this, bad_divisor_state, VK_DEBUG_REPORT_ERROR_BIT_EXT, test_case.vuids);
+    }
+}
+
+TEST_F(VkLayerTest, VertexAttributeDivisorDisabled) {
+    TEST_DESCRIPTION("Test instance divisor feature disabled for VK_EXT_vertex_attribute_divisor extension.");
+
+    bool inst_ext = InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    if (inst_ext) {
+        m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+        ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+    }
+    if (inst_ext && DeviceExtensionSupported(gpu(), nullptr, VK_EXT_VERTEX_ATTRIBUTE_DIVISOR_EXTENSION_NAME)) {
+        m_device_extension_names.push_back(VK_EXT_VERTEX_ATTRIBUTE_DIVISOR_EXTENSION_NAME);
+    } else {
+        printf("%s %s Extension not supported, skipping tests\n", kSkipPrefix, VK_EXT_VERTEX_ATTRIBUTE_DIVISOR_EXTENSION_NAME);
+        return;
+    }
+
+    VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT vadf = {};
+    vadf.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_EXT;
+    vadf.vertexAttributeInstanceRateDivisor = VK_FALSE;
+    vadf.vertexAttributeInstanceRateZeroDivisor = VK_FALSE;
+    VkPhysicalDeviceFeatures2 pd_features2 = {};
+    pd_features2.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2;
+    pd_features2.pNext = &vadf;
+
+    ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &pd_features2));
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT pdvad_props = {};
+    pdvad_props.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_EXT;
+    VkPhysicalDeviceProperties2 pd_props2 = {};
+    pd_props2.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2;
+    pd_props2.pNext = &pdvad_props;
+    vk::GetPhysicalDeviceProperties2(gpu(), &pd_props2);
+
+    VkVertexInputBindingDivisorDescriptionEXT vibdd = {};
+    vibdd.binding = 0;
+    vibdd.divisor = 2;
+    VkPipelineVertexInputDivisorStateCreateInfoEXT pvids_ci = {};
+    pvids_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_EXT;
+    pvids_ci.vertexBindingDivisorCount = 1;
+    pvids_ci.pVertexBindingDivisors = &vibdd;
+    VkVertexInputBindingDescription vibd = {};
+    vibd.binding = vibdd.binding;
+    vibd.stride = 12;
+    vibd.inputRate = VK_VERTEX_INPUT_RATE_INSTANCE;
+
+    if (pdvad_props.maxVertexAttribDivisor < pvids_ci.vertexBindingDivisorCount) {
+        printf("%sThis device does not support %d vertexBindingDivisors, skipping tests\n", kSkipPrefix,
+               pvids_ci.vertexBindingDivisorCount);
+        return;
+    }
+
+    const auto instance_rate = [&pvids_ci, &vibd](CreatePipelineHelper &helper) {
+        helper.vi_ci_.pNext = &pvids_ci;
+        helper.vi_ci_.vertexBindingDescriptionCount = 1;
+        helper.vi_ci_.pVertexBindingDescriptions = &vibd;
+    };
+    CreatePipelineHelper::OneshotTest(*this, instance_rate, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                      "VUID-VkVertexInputBindingDivisorDescriptionEXT-vertexAttributeInstanceRateDivisor-02229");
+}
+
+TEST_F(VkLayerTest, VertexAttributeDivisorInstanceRateZero) {
+    TEST_DESCRIPTION("Test instanceRateZero feature of VK_EXT_vertex_attribute_divisor extension.");
+
+    bool inst_ext = InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    if (inst_ext) {
+        m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+        ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+    }
+    if (inst_ext && DeviceExtensionSupported(gpu(), nullptr, VK_EXT_VERTEX_ATTRIBUTE_DIVISOR_EXTENSION_NAME)) {
+        m_device_extension_names.push_back(VK_EXT_VERTEX_ATTRIBUTE_DIVISOR_EXTENSION_NAME);
+    } else {
+        printf("%s %s Extension not supported, skipping tests\n", kSkipPrefix, VK_EXT_VERTEX_ATTRIBUTE_DIVISOR_EXTENSION_NAME);
+        return;
+    }
+
+    VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT vadf = {};
+    vadf.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_EXT;
+    vadf.vertexAttributeInstanceRateDivisor = VK_TRUE;
+    vadf.vertexAttributeInstanceRateZeroDivisor = VK_FALSE;
+    VkPhysicalDeviceFeatures2 pd_features2 = {};
+    pd_features2.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2;
+    pd_features2.pNext = &vadf;
+
+    ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &pd_features2));
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    VkVertexInputBindingDivisorDescriptionEXT vibdd = {};
+    vibdd.binding = 0;
+    vibdd.divisor = 0;
+    VkPipelineVertexInputDivisorStateCreateInfoEXT pvids_ci = {};
+    pvids_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_EXT;
+    pvids_ci.vertexBindingDivisorCount = 1;
+    pvids_ci.pVertexBindingDivisors = &vibdd;
+    VkVertexInputBindingDescription vibd = {};
+    vibd.binding = vibdd.binding;
+    vibd.stride = 12;
+    vibd.inputRate = VK_VERTEX_INPUT_RATE_INSTANCE;
+
+    const auto instance_rate = [&pvids_ci, &vibd](CreatePipelineHelper &helper) {
+        helper.vi_ci_.pNext = &pvids_ci;
+        helper.vi_ci_.vertexBindingDescriptionCount = 1;
+        helper.vi_ci_.pVertexBindingDescriptions = &vibd;
+    };
+    CreatePipelineHelper::OneshotTest(
+        *this, instance_rate, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+        "VUID-VkVertexInputBindingDivisorDescriptionEXT-vertexAttributeInstanceRateZeroDivisor-02228");
+}
+
+/*// TODO : This test should be good, but needs Tess support in compiler to run
+TEST_F(VkLayerTest, InvalidPatchControlPoints)
+{
+    // Attempt to Create Gfx Pipeline w/o a VS
+    VkResult        err;
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+        "Invalid Pipeline CreateInfo State: VK_PRIMITIVE_TOPOLOGY_PATCH
+primitive ");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    VkDescriptorPoolSize ds_type_count = {};
+        ds_type_count.type = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
+        ds_type_count.descriptorCount = 1;
+
+    VkDescriptorPoolCreateInfo ds_pool_ci = {};
+        ds_pool_ci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
+        ds_pool_ci.pNext = NULL;
+        ds_pool_ci.poolSizeCount = 1;
+        ds_pool_ci.pPoolSizes = &ds_type_count;
+
+    VkDescriptorPool ds_pool;
+    err = vk::CreateDescriptorPool(m_device->device(),
+VK_DESCRIPTOR_POOL_USAGE_NON_FREE, 1, &ds_pool_ci, NULL, &ds_pool);
+    ASSERT_VK_SUCCESS(err);
+
+    VkDescriptorSetLayoutBinding dsl_binding = {};
+        dsl_binding.binding = 0;
+        dsl_binding.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
+        dsl_binding.descriptorCount = 1;
+        dsl_binding.stageFlags = VK_SHADER_STAGE_ALL;
+        dsl_binding.pImmutableSamplers = NULL;
+
+    VkDescriptorSetLayoutCreateInfo ds_layout_ci = {};
+        ds_layout_ci.sType =
+VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
+        ds_layout_ci.pNext = NULL;
+        ds_layout_ci.bindingCount = 1;
+        ds_layout_ci.pBindings = &dsl_binding;
+
+    VkDescriptorSetLayout ds_layout;
+    err = vk::CreateDescriptorSetLayout(m_device->device(), &ds_layout_ci, NULL,
+&ds_layout);
+    ASSERT_VK_SUCCESS(err);
+
+    VkDescriptorSet descriptorSet;
+    err = vk::AllocateDescriptorSets(m_device->device(), ds_pool,
+VK_DESCRIPTOR_SET_USAGE_NON_FREE, 1, &ds_layout, &descriptorSet);
+    ASSERT_VK_SUCCESS(err);
+
+    VkPipelineLayoutCreateInfo pipeline_layout_ci = {};
+        pipeline_layout_ci.sType =
+VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO;
+        pipeline_layout_ci.pNext = NULL;
+        pipeline_layout_ci.setLayoutCount = 1;
+        pipeline_layout_ci.pSetLayouts = &ds_layout;
+
+    VkPipelineLayout pipeline_layout;
+    err = vk::CreatePipelineLayout(m_device->device(), &pipeline_layout_ci, NULL,
+&pipeline_layout);
+    ASSERT_VK_SUCCESS(err);
+
+    VkPipelineShaderStageCreateInfo shaderStages[3];
+    memset(&shaderStages, 0, 3 * sizeof(VkPipelineShaderStageCreateInfo));
+
+    VkShaderObj vs(m_device,bindStateVertShaderText,VK_SHADER_STAGE_VERTEX_BIT,
+this);
+    // Just using VS txt for Tess shaders as we don't care about functionality
+    VkShaderObj
+tc(m_device,bindStateVertShaderText,VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT,
+this);
+    VkShaderObj
+te(m_device,bindStateVertShaderText,VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT,
+this);
+
+    shaderStages[0].sType  =
+VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
+    shaderStages[0].stage  = VK_SHADER_STAGE_VERTEX_BIT;
+    shaderStages[0].shader = vs.handle();
+    shaderStages[1].sType  =
+VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
+    shaderStages[1].stage  = VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT;
+    shaderStages[1].shader = tc.handle();
+    shaderStages[2].sType  =
+VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
+    shaderStages[2].stage  = VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT;
+    shaderStages[2].shader = te.handle();
+
+    VkPipelineInputAssemblyStateCreateInfo iaCI = {};
+        iaCI.sType =
+VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO;
+        iaCI.topology = VK_PRIMITIVE_TOPOLOGY_PATCH_LIST;
+
+    VkPipelineTessellationStateCreateInfo tsCI = {};
+        tsCI.sType = VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO;
+        tsCI.patchControlPoints = 0; // This will cause an error
+
+    VkGraphicsPipelineCreateInfo gp_ci = {};
+        gp_ci.sType = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO;
+        gp_ci.pNext = NULL;
+        gp_ci.stageCount = 3;
+        gp_ci.pStages = shaderStages;
+        gp_ci.pVertexInputState = NULL;
+        gp_ci.pInputAssemblyState = &iaCI;
+        gp_ci.pTessellationState = &tsCI;
+        gp_ci.pViewportState = NULL;
+        gp_ci.pRasterizationState = NULL;
+        gp_ci.pMultisampleState = NULL;
+        gp_ci.pDepthStencilState = NULL;
+        gp_ci.pColorBlendState = NULL;
+        gp_ci.flags = VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT;
+        gp_ci.layout = pipeline_layout;
+        gp_ci.renderPass = renderPass();
+
+    VkPipelineCacheCreateInfo pc_ci = {};
+        pc_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO;
+        pc_ci.pNext = NULL;
+        pc_ci.initialSize = 0;
+        pc_ci.initialData = 0;
+        pc_ci.maxSize = 0;
+
+    VkPipeline pipeline;
+    VkPipelineCache pipelineCache;
+
+    err = vk::CreatePipelineCache(m_device->device(), &pc_ci, NULL,
+&pipelineCache);
+    ASSERT_VK_SUCCESS(err);
+    err = vk::CreateGraphicsPipelines(m_device->device(), pipelineCache, 1,
+&gp_ci, NULL, &pipeline);
+
+    m_errorMonitor->VerifyFound();
+
+    vk::DestroyPipelineCache(m_device->device(), pipelineCache, NULL);
+    vk::DestroyPipelineLayout(m_device->device(), pipeline_layout, NULL);
+    vk::DestroyDescriptorSetLayout(m_device->device(), ds_layout, NULL);
+    vk::DestroyDescriptorPool(m_device->device(), ds_pool, NULL);
+}
+*/
+
+TEST_F(VkLayerTest, PSOViewportStateTests) {
+    TEST_DESCRIPTION("Test VkPipelineViewportStateCreateInfo viewport and scissor count validation for non-multiViewport");
+
+    VkPhysicalDeviceFeatures features{};
+    ASSERT_NO_FATAL_FAILURE(Init(&features));
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    const auto break_vp_state = [](CreatePipelineHelper &helper) {
+        helper.rs_state_ci_.rasterizerDiscardEnable = VK_FALSE;
+        helper.gp_ci_.pViewportState = nullptr;
+    };
+    CreatePipelineHelper::OneshotTest(*this, break_vp_state, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                      "VUID-VkGraphicsPipelineCreateInfo-rasterizerDiscardEnable-00750");
+
+    VkViewport viewport = {0.0f, 0.0f, 64.0f, 64.0f, 0.0f, 1.0f};
+    VkViewport viewports[] = {viewport, viewport};
+    VkRect2D scissor = {{0, 0}, {64, 64}};
+    VkRect2D scissors[] = {scissor, scissor};
+
+    // test viewport and scissor arrays
+    using std::vector;
+    struct TestCase {
+        uint32_t viewport_count;
+        VkViewport *viewports;
+        uint32_t scissor_count;
+        VkRect2D *scissors;
+
+        vector<std::string> vuids;
+    };
+
+    vector<TestCase> test_cases = {
+        {0,
+         viewports,
+         1,
+         scissors,
+         {"VUID-VkPipelineViewportStateCreateInfo-viewportCount-01216",
+          "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01220"}},
+        {2,
+         viewports,
+         1,
+         scissors,
+         {"VUID-VkPipelineViewportStateCreateInfo-viewportCount-01216",
+          "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01220"}},
+        {1,
+         viewports,
+         0,
+         scissors,
+         {"VUID-VkPipelineViewportStateCreateInfo-scissorCount-01217",
+          "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01220"}},
+        {1,
+         viewports,
+         2,
+         scissors,
+         {"VUID-VkPipelineViewportStateCreateInfo-scissorCount-01217",
+          "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01220"}},
+        {0,
+         viewports,
+         0,
+         scissors,
+         {"VUID-VkPipelineViewportStateCreateInfo-viewportCount-01216",
+          "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01217"}},
+        {2,
+         viewports,
+         2,
+         scissors,
+         {"VUID-VkPipelineViewportStateCreateInfo-viewportCount-01216",
+          "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01217"}},
+        {0,
+         viewports,
+         2,
+         scissors,
+         {"VUID-VkPipelineViewportStateCreateInfo-viewportCount-01216", "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01217",
+          "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01220"}},
+        {2,
+         viewports,
+         0,
+         scissors,
+         {"VUID-VkPipelineViewportStateCreateInfo-viewportCount-01216", "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01217",
+          "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01220"}},
+        {1, nullptr, 1, scissors, {"VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-00747"}},
+        {1, viewports, 1, nullptr, {"VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-00748"}},
+        {1,
+         nullptr,
+         1,
+         nullptr,
+         {"VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-00747", "VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-00748"}},
+        {2,
+         nullptr,
+         3,
+         nullptr,
+         {"VUID-VkPipelineViewportStateCreateInfo-viewportCount-01216", "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01217",
+          "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01220", "VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-00747",
+          "VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-00748"}},
+        {0,
+         nullptr,
+         0,
+         nullptr,
+         {"VUID-VkPipelineViewportStateCreateInfo-viewportCount-01216",
+          "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01217"}},
+    };
+
+    for (const auto &test_case : test_cases) {
+        const auto break_vp = [&test_case](CreatePipelineHelper &helper) {
+            helper.vp_state_ci_.viewportCount = test_case.viewport_count;
+            helper.vp_state_ci_.pViewports = test_case.viewports;
+            helper.vp_state_ci_.scissorCount = test_case.scissor_count;
+            helper.vp_state_ci_.pScissors = test_case.scissors;
+        };
+        CreatePipelineHelper::OneshotTest(*this, break_vp, VK_DEBUG_REPORT_ERROR_BIT_EXT, test_case.vuids);
+    }
+
+    vector<TestCase> dyn_test_cases = {
+        {0,
+         viewports,
+         1,
+         scissors,
+         {"VUID-VkPipelineViewportStateCreateInfo-viewportCount-01216",
+          "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01220"}},
+        {2,
+         viewports,
+         1,
+         scissors,
+         {"VUID-VkPipelineViewportStateCreateInfo-viewportCount-01216",
+          "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01220"}},
+        {1,
+         viewports,
+         0,
+         scissors,
+         {"VUID-VkPipelineViewportStateCreateInfo-scissorCount-01217",
+          "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01220"}},
+        {1,
+         viewports,
+         2,
+         scissors,
+         {"VUID-VkPipelineViewportStateCreateInfo-scissorCount-01217",
+          "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01220"}},
+        {0,
+         viewports,
+         0,
+         scissors,
+         {"VUID-VkPipelineViewportStateCreateInfo-viewportCount-01216",
+          "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01217"}},
+        {2,
+         viewports,
+         2,
+         scissors,
+         {"VUID-VkPipelineViewportStateCreateInfo-viewportCount-01216",
+          "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01217"}},
+        {0,
+         viewports,
+         2,
+         scissors,
+         {"VUID-VkPipelineViewportStateCreateInfo-viewportCount-01216", "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01217",
+          "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01220"}},
+        {2,
+         viewports,
+         0,
+         scissors,
+         {"VUID-VkPipelineViewportStateCreateInfo-viewportCount-01216", "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01217",
+          "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01220"}},
+        {2,
+         nullptr,
+         3,
+         nullptr,
+         {"VUID-VkPipelineViewportStateCreateInfo-viewportCount-01216", "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01217",
+          "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01220"}},
+        {0,
+         nullptr,
+         0,
+         nullptr,
+         {"VUID-VkPipelineViewportStateCreateInfo-viewportCount-01216",
+          "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01217"}},
+    };
+
+    const VkDynamicState dyn_states[] = {VK_DYNAMIC_STATE_VIEWPORT, VK_DYNAMIC_STATE_SCISSOR};
+
+    for (const auto &test_case : dyn_test_cases) {
+        const auto break_vp = [&](CreatePipelineHelper &helper) {
+            VkPipelineDynamicStateCreateInfo dyn_state_ci = {};
+            dyn_state_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO;
+            dyn_state_ci.dynamicStateCount = size(dyn_states);
+            dyn_state_ci.pDynamicStates = dyn_states;
+            helper.dyn_state_ci_ = dyn_state_ci;
+
+            helper.vp_state_ci_.viewportCount = test_case.viewport_count;
+            helper.vp_state_ci_.pViewports = test_case.viewports;
+            helper.vp_state_ci_.scissorCount = test_case.scissor_count;
+            helper.vp_state_ci_.pScissors = test_case.scissors;
+        };
+        CreatePipelineHelper::OneshotTest(*this, break_vp, VK_DEBUG_REPORT_ERROR_BIT_EXT, test_case.vuids);
+    }
+}
+
+// Set Extension dynamic states without enabling the required Extensions.
+TEST_F(VkLayerTest, ExtensionDynamicStatesSetWOExtensionEnabled) {
+    TEST_DESCRIPTION("Create a graphics pipeline with Extension dynamic states without enabling the required Extensions.");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    using std::vector;
+    struct TestCase {
+        uint32_t dynamic_state_count;
+        VkDynamicState dynamic_state;
+
+        char const *errmsg;
+    };
+
+    vector<TestCase> dyn_test_cases = {
+        {1, VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV,
+         "contains VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV, but VK_NV_clip_space_w_scaling"},
+        {1, VK_DYNAMIC_STATE_DISCARD_RECTANGLE_EXT,
+         "contains VK_DYNAMIC_STATE_DISCARD_RECTANGLE_EXT, but VK_EXT_discard_rectangles"},
+        {1, VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_EXT, "contains VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_EXT, but VK_EXT_sample_locations"},
+    };
+
+    for (const auto &test_case : dyn_test_cases) {
+        VkDynamicState state[1];
+        state[0] = test_case.dynamic_state;
+        const auto break_vp = [&](CreatePipelineHelper &helper) {
+            VkPipelineDynamicStateCreateInfo dyn_state_ci = {};
+            dyn_state_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO;
+            dyn_state_ci.dynamicStateCount = test_case.dynamic_state_count;
+            dyn_state_ci.pDynamicStates = state;
+            helper.dyn_state_ci_ = dyn_state_ci;
+        };
+        CreatePipelineHelper::OneshotTest(*this, break_vp, VK_DEBUG_REPORT_ERROR_BIT_EXT, test_case.errmsg);
+    }
+}
+
+TEST_F(VkLayerTest, PSOViewportStateMultiViewportTests) {
+    TEST_DESCRIPTION("Test VkPipelineViewportStateCreateInfo viewport and scissor count validation for multiViewport feature");
+
+    ASSERT_NO_FATAL_FAILURE(Init());  // enables all supported features
+
+    if (!m_device->phy().features().multiViewport) {
+        printf("%s VkPhysicalDeviceFeatures::multiViewport is not supported -- skipping test.\n", kSkipPrefix);
+        return;
+    }
+    // at least 16 viewports supported from here on
+
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    VkViewport viewport = {0.0f, 0.0f, 64.0f, 64.0f, 0.0f, 1.0f};
+    VkViewport viewports[] = {viewport, viewport};
+    VkRect2D scissor = {{0, 0}, {64, 64}};
+    VkRect2D scissors[] = {scissor, scissor};
+
+    using std::vector;
+    struct TestCase {
+        uint32_t viewport_count;
+        VkViewport *viewports;
+        uint32_t scissor_count;
+        VkRect2D *scissors;
+
+        vector<std::string> vuids;
+    };
+
+    vector<TestCase> test_cases = {
+        {0,
+         viewports,
+         2,
+         scissors,
+         {"VUID-VkPipelineViewportStateCreateInfo-viewportCount-arraylength",
+          "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01220"}},
+        {2,
+         viewports,
+         0,
+         scissors,
+         {"VUID-VkPipelineViewportStateCreateInfo-scissorCount-arraylength",
+          "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01220"}},
+        {0,
+         viewports,
+         0,
+         scissors,
+         {"VUID-VkPipelineViewportStateCreateInfo-viewportCount-arraylength",
+          "VUID-VkPipelineViewportStateCreateInfo-scissorCount-arraylength"}},
+        {2, nullptr, 2, scissors, {"VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-00747"}},
+        {2, viewports, 2, nullptr, {"VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-00748"}},
+        {2,
+         nullptr,
+         2,
+         nullptr,
+         {"VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-00747", "VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-00748"}},
+        {0,
+         nullptr,
+         0,
+         nullptr,
+         {"VUID-VkPipelineViewportStateCreateInfo-viewportCount-arraylength",
+          "VUID-VkPipelineViewportStateCreateInfo-scissorCount-arraylength"}},
+    };
+
+    const auto max_viewports = m_device->phy().properties().limits.maxViewports;
+    const bool max_viewports_maxxed = max_viewports == std::numeric_limits<decltype(max_viewports)>::max();
+    if (max_viewports_maxxed) {
+        printf("%s VkPhysicalDeviceLimits::maxViewports is UINT32_MAX -- skipping part of test requiring to exceed maxViewports.\n",
+               kSkipPrefix);
+    } else {
+        const auto too_much_viewports = max_viewports + 1;
+        // avoid potentially big allocations by using only nullptr
+        test_cases.push_back({too_much_viewports,
+                              nullptr,
+                              2,
+                              scissors,
+                              {"VUID-VkPipelineViewportStateCreateInfo-viewportCount-01218",
+                               "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01220",
+                               "VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-00747"}});
+        test_cases.push_back({2,
+                              viewports,
+                              too_much_viewports,
+                              nullptr,
+                              {"VUID-VkPipelineViewportStateCreateInfo-scissorCount-01219",
+                               "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01220",
+                               "VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-00748"}});
+        test_cases.push_back(
+            {too_much_viewports,
+             nullptr,
+             too_much_viewports,
+             nullptr,
+             {"VUID-VkPipelineViewportStateCreateInfo-viewportCount-01218",
+              "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01219", "VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-00747",
+              "VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-00748"}});
+    }
+
+    for (const auto &test_case : test_cases) {
+        const auto break_vp = [&test_case](CreatePipelineHelper &helper) {
+            helper.vp_state_ci_.viewportCount = test_case.viewport_count;
+            helper.vp_state_ci_.pViewports = test_case.viewports;
+            helper.vp_state_ci_.scissorCount = test_case.scissor_count;
+            helper.vp_state_ci_.pScissors = test_case.scissors;
+        };
+        CreatePipelineHelper::OneshotTest(*this, break_vp, VK_DEBUG_REPORT_ERROR_BIT_EXT, test_case.vuids);
+    }
+
+    vector<TestCase> dyn_test_cases = {
+        {0,
+         viewports,
+         2,
+         scissors,
+         {"VUID-VkPipelineViewportStateCreateInfo-viewportCount-arraylength",
+          "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01220"}},
+        {2,
+         viewports,
+         0,
+         scissors,
+         {"VUID-VkPipelineViewportStateCreateInfo-scissorCount-arraylength",
+          "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01220"}},
+        {0,
+         viewports,
+         0,
+         scissors,
+         {"VUID-VkPipelineViewportStateCreateInfo-viewportCount-arraylength",
+          "VUID-VkPipelineViewportStateCreateInfo-scissorCount-arraylength"}},
+        {0,
+         nullptr,
+         0,
+         nullptr,
+         {"VUID-VkPipelineViewportStateCreateInfo-viewportCount-arraylength",
+          "VUID-VkPipelineViewportStateCreateInfo-scissorCount-arraylength"}},
+    };
+
+    if (!max_viewports_maxxed) {
+        const auto too_much_viewports = max_viewports + 1;
+        // avoid potentially big allocations by using only nullptr
+        dyn_test_cases.push_back({too_much_viewports,
+                                  nullptr,
+                                  2,
+                                  scissors,
+                                  {"VUID-VkPipelineViewportStateCreateInfo-viewportCount-01218",
+                                   "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01220"}});
+        dyn_test_cases.push_back({2,
+                                  viewports,
+                                  too_much_viewports,
+                                  nullptr,
+                                  {"VUID-VkPipelineViewportStateCreateInfo-scissorCount-01219",
+                                   "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01220"}});
+        dyn_test_cases.push_back({too_much_viewports,
+                                  nullptr,
+                                  too_much_viewports,
+                                  nullptr,
+                                  {"VUID-VkPipelineViewportStateCreateInfo-viewportCount-01218",
+                                   "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01219"}});
+    }
+
+    const VkDynamicState dyn_states[] = {VK_DYNAMIC_STATE_VIEWPORT, VK_DYNAMIC_STATE_SCISSOR};
+
+    for (const auto &test_case : dyn_test_cases) {
+        const auto break_vp = [&](CreatePipelineHelper &helper) {
+            VkPipelineDynamicStateCreateInfo dyn_state_ci = {};
+            dyn_state_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO;
+            dyn_state_ci.dynamicStateCount = size(dyn_states);
+            dyn_state_ci.pDynamicStates = dyn_states;
+            helper.dyn_state_ci_ = dyn_state_ci;
+
+            helper.vp_state_ci_.viewportCount = test_case.viewport_count;
+            helper.vp_state_ci_.pViewports = test_case.viewports;
+            helper.vp_state_ci_.scissorCount = test_case.scissor_count;
+            helper.vp_state_ci_.pScissors = test_case.scissors;
+        };
+        CreatePipelineHelper::OneshotTest(*this, break_vp, VK_DEBUG_REPORT_ERROR_BIT_EXT, test_case.vuids);
+    }
+}
+
+TEST_F(VkLayerTest, DynViewportAndScissorUndefinedDrawState) {
+    TEST_DESCRIPTION("Test viewport and scissor dynamic state that is not set before draw");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    // TODO: should also test on !multiViewport
+    if (!m_device->phy().features().multiViewport) {
+        printf("%s Device does not support multiple viewports/scissors; skipped.\n", kSkipPrefix);
+        return;
+    }
+
+    ASSERT_NO_FATAL_FAILURE(InitViewport());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
+    VkShaderObj fs(m_device, bindStateFragShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+    const VkPipelineLayoutObj pipeline_layout(m_device);
+
+    VkPipelineObj pipeline_dyn_vp(m_device);
+    pipeline_dyn_vp.AddShader(&vs);
+    pipeline_dyn_vp.AddShader(&fs);
+    pipeline_dyn_vp.AddDefaultColorAttachment();
+    pipeline_dyn_vp.MakeDynamic(VK_DYNAMIC_STATE_VIEWPORT);
+    pipeline_dyn_vp.SetScissor(m_scissors);
+    ASSERT_VK_SUCCESS(pipeline_dyn_vp.CreateVKPipeline(pipeline_layout.handle(), m_renderPass));
+
+    VkPipelineObj pipeline_dyn_sc(m_device);
+    pipeline_dyn_sc.AddShader(&vs);
+    pipeline_dyn_sc.AddShader(&fs);
+    pipeline_dyn_sc.AddDefaultColorAttachment();
+    pipeline_dyn_sc.SetViewport(m_viewports);
+    pipeline_dyn_sc.MakeDynamic(VK_DYNAMIC_STATE_SCISSOR);
+    ASSERT_VK_SUCCESS(pipeline_dyn_sc.CreateVKPipeline(pipeline_layout.handle(), m_renderPass));
+
+    m_commandBuffer->begin();
+    m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "Dynamic viewport(s) 0 are used by pipeline state object, ");
+    vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_dyn_vp.handle());
+    vk::CmdSetViewport(m_commandBuffer->handle(), 1, 1,
+                       &m_viewports[0]);  // Forgetting to set needed 0th viewport (PSO viewportCount == 1)
+    m_commandBuffer->Draw(1, 0, 0, 0);
+    m_errorMonitor->VerifyFound();
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "Dynamic scissor(s) 0 are used by pipeline state object, ");
+    vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_dyn_sc.handle());
+    vk::CmdSetScissor(m_commandBuffer->handle(), 1, 1,
+                      &m_scissors[0]);  // Forgetting to set needed 0th scissor (PSO scissorCount == 1)
+    m_commandBuffer->Draw(1, 0, 0, 0);
+    m_errorMonitor->VerifyFound();
+
+    m_commandBuffer->EndRenderPass();
+    m_commandBuffer->end();
+}
+
+TEST_F(VkLayerTest, PSOLineWidthInvalid) {
+    TEST_DESCRIPTION("Test non-1.0 lineWidth errors when pipeline is created and in vkCmdSetLineWidth");
+    VkPhysicalDeviceFeatures features{};
+    ASSERT_NO_FATAL_FAILURE(Init(&features));
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    const std::vector<float> test_cases = {-1.0f, 0.0f, NearestSmaller(1.0f), NearestGreater(1.0f), NAN};
+
+    // test VkPipelineRasterizationStateCreateInfo::lineWidth
+    for (const auto test_case : test_cases) {
+        const auto set_lineWidth = [&](CreatePipelineHelper &helper) { helper.rs_state_ci_.lineWidth = test_case; };
+        CreatePipelineHelper::OneshotTest(*this, set_lineWidth, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                          "VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-00749");
+    }
+
+    // test vk::CmdSetLineWidth
+    m_commandBuffer->begin();
+
+    for (const auto test_case : test_cases) {
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdSetLineWidth-lineWidth-00788");
+        vk::CmdSetLineWidth(m_commandBuffer->handle(), test_case);
+        m_errorMonitor->VerifyFound();
+    }
+}
+
+TEST_F(VkLayerTest, VUID_VkVertexInputBindingDescription_binding_00618) {
+    TEST_DESCRIPTION(
+        "Test VUID-VkVertexInputBindingDescription-binding-00618: binding must be less than "
+        "VkPhysicalDeviceLimits::maxVertexInputBindings");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    // Test when binding is greater than or equal to VkPhysicalDeviceLimits::maxVertexInputBindings.
+    VkVertexInputBindingDescription vertex_input_binding_description{};
+    vertex_input_binding_description.binding = m_device->props.limits.maxVertexInputBindings;
+
+    const auto set_binding = [&](CreatePipelineHelper &helper) {
+        helper.vi_ci_.pVertexBindingDescriptions = &vertex_input_binding_description;
+        helper.vi_ci_.vertexBindingDescriptionCount = 1;
+    };
+    CreatePipelineHelper::OneshotTest(*this, set_binding, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                      "VUID-VkVertexInputBindingDescription-binding-00618");
+}
+
+TEST_F(VkLayerTest, VUID_VkVertexInputBindingDescription_stride_00619) {
+    TEST_DESCRIPTION(
+        "Test VUID-VkVertexInputBindingDescription-stride-00619: stride must be less than or equal to "
+        "VkPhysicalDeviceLimits::maxVertexInputBindingStride");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    // Test when stride is greater than VkPhysicalDeviceLimits::maxVertexInputBindingStride.
+    VkVertexInputBindingDescription vertex_input_binding_description{};
+    vertex_input_binding_description.stride = m_device->props.limits.maxVertexInputBindingStride + 1;
+
+    const auto set_binding = [&](CreatePipelineHelper &helper) {
+        helper.vi_ci_.pVertexBindingDescriptions = &vertex_input_binding_description;
+        helper.vi_ci_.vertexBindingDescriptionCount = 1;
+    };
+    CreatePipelineHelper::OneshotTest(*this, set_binding, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                      "VUID-VkVertexInputBindingDescription-stride-00619");
+}
+
+TEST_F(VkLayerTest, VUID_VkVertexInputAttributeDescription_location_00620) {
+    TEST_DESCRIPTION(
+        "Test VUID-VkVertexInputAttributeDescription-location-00620: location must be less than "
+        "VkPhysicalDeviceLimits::maxVertexInputAttributes");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    // Test when location is greater than or equal to VkPhysicalDeviceLimits::maxVertexInputAttributes.
+    VkVertexInputAttributeDescription vertex_input_attribute_description{};
+    vertex_input_attribute_description.location = m_device->props.limits.maxVertexInputAttributes;
+
+    const auto set_attribute = [&](CreatePipelineHelper &helper) {
+        helper.vi_ci_.pVertexAttributeDescriptions = &vertex_input_attribute_description;
+        helper.vi_ci_.vertexAttributeDescriptionCount = 1;
+    };
+    CreatePipelineHelper::OneshotTest(*this, set_attribute, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                      vector<string>{"VUID-VkVertexInputAttributeDescription-location-00620",
+                                                     "VUID-VkPipelineVertexInputStateCreateInfo-binding-00615"});
+}
+
+TEST_F(VkLayerTest, VUID_VkVertexInputAttributeDescription_binding_00621) {
+    TEST_DESCRIPTION(
+        "Test VUID-VkVertexInputAttributeDescription-binding-00621: binding must be less than "
+        "VkPhysicalDeviceLimits::maxVertexInputBindings");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    // Test when binding is greater than or equal to VkPhysicalDeviceLimits::maxVertexInputBindings.
+    VkVertexInputAttributeDescription vertex_input_attribute_description{};
+    vertex_input_attribute_description.binding = m_device->props.limits.maxVertexInputBindings;
+
+    const auto set_attribute = [&](CreatePipelineHelper &helper) {
+        helper.vi_ci_.pVertexAttributeDescriptions = &vertex_input_attribute_description;
+        helper.vi_ci_.vertexAttributeDescriptionCount = 1;
+    };
+    CreatePipelineHelper::OneshotTest(*this, set_attribute, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                      vector<string>{"VUID-VkVertexInputAttributeDescription-binding-00621",
+                                                     "VUID-VkPipelineVertexInputStateCreateInfo-binding-00615"});
+}
+
+TEST_F(VkLayerTest, VUID_VkVertexInputAttributeDescription_offset_00622) {
+    TEST_DESCRIPTION(
+        "Test VUID-VkVertexInputAttributeDescription-offset-00622: offset must be less than or equal to "
+        "VkPhysicalDeviceLimits::maxVertexInputAttributeOffset");
+
+    EnableDeviceProfileLayer();
+
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+
+    uint32_t maxVertexInputAttributeOffset = 0;
+    {
+        VkPhysicalDeviceProperties device_props = {};
+        vk::GetPhysicalDeviceProperties(gpu(), &device_props);
+        maxVertexInputAttributeOffset = device_props.limits.maxVertexInputAttributeOffset;
+        if (maxVertexInputAttributeOffset == 0xFFFFFFFF) {
+            // Attempt to artificially lower maximum offset
+            PFN_vkSetPhysicalDeviceLimitsEXT fpvkSetPhysicalDeviceLimitsEXT =
+                (PFN_vkSetPhysicalDeviceLimitsEXT)vk::GetInstanceProcAddr(instance(), "vkSetPhysicalDeviceLimitsEXT");
+            if (!fpvkSetPhysicalDeviceLimitsEXT) {
+                printf("%s All offsets are valid & device_profile_api not found; skipped.\n", kSkipPrefix);
+                return;
+            }
+            device_props.limits.maxVertexInputAttributeOffset = device_props.limits.maxVertexInputBindingStride - 2;
+            fpvkSetPhysicalDeviceLimitsEXT(gpu(), &device_props.limits);
+            maxVertexInputAttributeOffset = device_props.limits.maxVertexInputAttributeOffset;
+        }
+    }
+    ASSERT_NO_FATAL_FAILURE(InitState());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    VkVertexInputBindingDescription vertex_input_binding_description{};
+    vertex_input_binding_description.binding = 0;
+    vertex_input_binding_description.stride = m_device->props.limits.maxVertexInputBindingStride;
+    vertex_input_binding_description.inputRate = VK_VERTEX_INPUT_RATE_VERTEX;
+    // Test when offset is greater than maximum.
+    VkVertexInputAttributeDescription vertex_input_attribute_description{};
+    vertex_input_attribute_description.format = VK_FORMAT_R8_UNORM;
+    vertex_input_attribute_description.offset = maxVertexInputAttributeOffset + 1;
+
+    const auto set_attribute = [&](CreatePipelineHelper &helper) {
+        helper.vi_ci_.pVertexBindingDescriptions = &vertex_input_binding_description;
+        helper.vi_ci_.vertexBindingDescriptionCount = 1;
+        helper.vi_ci_.pVertexAttributeDescriptions = &vertex_input_attribute_description;
+        helper.vi_ci_.vertexAttributeDescriptionCount = 1;
+    };
+    CreatePipelineHelper::OneshotTest(*this, set_attribute, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                      "VUID-VkVertexInputAttributeDescription-offset-00622");
+}
+
+TEST_F(VkLayerTest, NumSamplesMismatch) {
+    // Create CommandBuffer where MSAA samples doesn't match RenderPass
+    // sampleCount
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "Num samples mismatch! ");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    OneOffDescriptorSet descriptor_set(m_device, {
+                                                     {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
+                                                 });
+
+    VkPipelineMultisampleStateCreateInfo pipe_ms_state_ci = {};
+    pipe_ms_state_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO;
+    pipe_ms_state_ci.pNext = NULL;
+    pipe_ms_state_ci.rasterizationSamples = VK_SAMPLE_COUNT_4_BIT;
+    pipe_ms_state_ci.sampleShadingEnable = 0;
+    pipe_ms_state_ci.minSampleShading = 1.0;
+    pipe_ms_state_ci.pSampleMask = NULL;
+
+    const VkPipelineLayoutObj pipeline_layout(m_device, {&descriptor_set.layout_});
+
+    VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
+    VkShaderObj fs(m_device, bindStateFragShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);  // We shouldn't need a fragment shader
+    // but add it to be able to run on more devices
+    VkPipelineObj pipe(m_device);
+    pipe.AddShader(&vs);
+    pipe.AddShader(&fs);
+    pipe.AddDefaultColorAttachment();
+    pipe.SetMSAA(&pipe_ms_state_ci);
+
+    m_errorMonitor->SetUnexpectedError("VUID-VkGraphicsPipelineCreateInfo-subpass-00757");
+    pipe.CreateVKPipeline(pipeline_layout.handle(), renderPass());
+
+    m_commandBuffer->begin();
+    m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
+    vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.handle());
+
+    VkViewport viewport = {0, 0, 16, 16, 0, 1};
+    vk::CmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
+    VkRect2D scissor = {{0, 0}, {16, 16}};
+    vk::CmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
+
+    // Render triangle (the error should trigger on the attempt to draw).
+    m_commandBuffer->Draw(3, 1, 0, 0);
+
+    // Finalize recording of the command buffer
+    m_commandBuffer->EndRenderPass();
+    m_commandBuffer->end();
+
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, NumBlendAttachMismatch) {
+    // Create Pipeline where the number of blend attachments doesn't match the
+    // number of color attachments.  In this case, we don't add any color
+    // blend attachments even though we have a color attachment.
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    VkPipelineMultisampleStateCreateInfo pipe_ms_state_ci = {};
+    pipe_ms_state_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO;
+    pipe_ms_state_ci.pNext = NULL;
+    pipe_ms_state_ci.rasterizationSamples = VK_SAMPLE_COUNT_1_BIT;
+    pipe_ms_state_ci.sampleShadingEnable = 0;
+    pipe_ms_state_ci.minSampleShading = 1.0;
+    pipe_ms_state_ci.pSampleMask = NULL;
+
+    const auto set_MSAA = [&](CreatePipelineHelper &helper) {
+        helper.pipe_ms_state_ci_ = pipe_ms_state_ci;
+        helper.cb_ci_.attachmentCount = 0;
+    };
+    CreatePipelineHelper::OneshotTest(*this, set_MSAA, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                      "VUID-VkGraphicsPipelineCreateInfo-attachmentCount-00746");
+}
+
+TEST_F(VkLayerTest, CmdClearAttachmentTests) {
+    TEST_DESCRIPTION("Various tests for validating usage of vkCmdClearAttachments");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    m_commandBuffer->begin();
+    m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
+
+    // Main thing we care about for this test is that the VkImage obj we're
+    // clearing matches Color Attachment of FB
+    //  Also pass down other dummy params to keep driver and paramchecker happy
+    VkClearAttachment color_attachment;
+    color_attachment.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    color_attachment.clearValue.color.float32[0] = 1.0;
+    color_attachment.clearValue.color.float32[1] = 1.0;
+    color_attachment.clearValue.color.float32[2] = 1.0;
+    color_attachment.clearValue.color.float32[3] = 1.0;
+    color_attachment.colorAttachment = 0;
+    VkClearRect clear_rect = {{{0, 0}, {(uint32_t)m_width, (uint32_t)m_height}}, 0, 1};
+
+    clear_rect.rect.extent.width = renderPassBeginInfo().renderArea.extent.width + 4;
+    clear_rect.rect.extent.height = clear_rect.rect.extent.height / 2;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdClearAttachments-pRects-00016");
+    vk::CmdClearAttachments(m_commandBuffer->handle(), 1, &color_attachment, 1, &clear_rect);
+    m_errorMonitor->VerifyFound();
+
+    // baseLayer >= view layers
+    clear_rect.rect.extent.width = (uint32_t)m_width;
+    clear_rect.baseArrayLayer = 1;
+    clear_rect.layerCount = 1;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdClearAttachments-pRects-00017");
+    vk::CmdClearAttachments(m_commandBuffer->handle(), 1, &color_attachment, 1, &clear_rect);
+    m_errorMonitor->VerifyFound();
+
+    // baseLayer + layerCount > view layers
+    clear_rect.rect.extent.width = (uint32_t)m_width;
+    clear_rect.baseArrayLayer = 0;
+    clear_rect.layerCount = 2;
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdClearAttachments-pRects-00017");
+    vk::CmdClearAttachments(m_commandBuffer->handle(), 1, &color_attachment, 1, &clear_rect);
+    m_errorMonitor->VerifyFound();
+
+    m_commandBuffer->EndRenderPass();
+    m_commandBuffer->end();
+}
+
+TEST_F(VkLayerTest, VtxBufferBadIndex) {
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT,
+                                         "UNASSIGNED-CoreValidation-DrawState-VtxIndexOutOfBounds");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitViewport());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    VkPipelineMultisampleStateCreateInfo pipe_ms_state_ci = {};
+    pipe_ms_state_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO;
+    pipe_ms_state_ci.pNext = NULL;
+    pipe_ms_state_ci.rasterizationSamples = VK_SAMPLE_COUNT_1_BIT;
+    pipe_ms_state_ci.sampleShadingEnable = 0;
+    pipe_ms_state_ci.minSampleShading = 1.0;
+    pipe_ms_state_ci.pSampleMask = NULL;
+
+    CreatePipelineHelper pipe(*this);
+    pipe.InitInfo();
+    pipe.pipe_ms_state_ci_ = pipe_ms_state_ci;
+    pipe.InitState();
+    pipe.CreateGraphicsPipeline();
+
+    m_commandBuffer->begin();
+    m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
+    vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.pipeline_);
+    // Don't care about actual data, just need to get to draw to flag error
+    const float vbo_data[3] = {1.f, 0.f, 1.f};
+    VkConstantBufferObj vbo(m_device, sizeof(vbo_data), (const void *)&vbo_data, VK_BUFFER_USAGE_VERTEX_BUFFER_BIT);
+    m_commandBuffer->BindVertexBuffer(&vbo, (VkDeviceSize)0, 1);  // VBO idx 1, but no VBO in PSO
+    m_commandBuffer->Draw(1, 0, 0, 0);
+
+    m_errorMonitor->VerifyFound();
+
+    m_commandBuffer->EndRenderPass();
+    m_commandBuffer->end();
+}
+
+TEST_F(VkLayerTest, InvalidVertexBindingDescriptions) {
+    TEST_DESCRIPTION(
+        "Attempt to create a graphics pipeline where:"
+        "1) count of vertex bindings exceeds device's maxVertexInputBindings limit"
+        "2) requested bindings include a duplicate binding value");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    const uint32_t binding_count = m_device->props.limits.maxVertexInputBindings + 1;
+
+    std::vector<VkVertexInputBindingDescription> input_bindings(binding_count);
+    for (uint32_t i = 0; i < binding_count; ++i) {
+        input_bindings[i].binding = i;
+        input_bindings[i].stride = 4;
+        input_bindings[i].inputRate = VK_VERTEX_INPUT_RATE_VERTEX;
+    }
+    // Let the last binding description use same binding as the first one
+    input_bindings[binding_count - 1].binding = 0;
+
+    VkVertexInputAttributeDescription input_attrib;
+    input_attrib.binding = 0;
+    input_attrib.location = 0;
+    input_attrib.format = VK_FORMAT_R32G32B32_SFLOAT;
+    input_attrib.offset = 0;
+
+    const auto set_Info = [&](CreatePipelineHelper &helper) {
+        helper.vi_ci_.pVertexBindingDescriptions = input_bindings.data();
+        helper.vi_ci_.vertexBindingDescriptionCount = binding_count;
+        helper.vi_ci_.pVertexAttributeDescriptions = &input_attrib;
+        helper.vi_ci_.vertexAttributeDescriptionCount = 1;
+    };
+    CreatePipelineHelper::OneshotTest(
+        *this, set_Info, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+        vector<string>{"VUID-VkPipelineVertexInputStateCreateInfo-vertexBindingDescriptionCount-00613",
+                       "VUID-VkPipelineVertexInputStateCreateInfo-pVertexBindingDescriptions-00616"});
+}
+
+TEST_F(VkLayerTest, InvalidVertexAttributeDescriptions) {
+    TEST_DESCRIPTION(
+        "Attempt to create a graphics pipeline where:"
+        "1) count of vertex attributes exceeds device's maxVertexInputAttributes limit"
+        "2) requested location include a duplicate location value"
+        "3) binding used by one attribute is not defined by a binding description");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    VkVertexInputBindingDescription input_binding;
+    input_binding.binding = 0;
+    input_binding.stride = 4;
+    input_binding.inputRate = VK_VERTEX_INPUT_RATE_VERTEX;
+
+    const uint32_t attribute_count = m_device->props.limits.maxVertexInputAttributes + 1;
+    std::vector<VkVertexInputAttributeDescription> input_attribs(attribute_count);
+    for (uint32_t i = 0; i < attribute_count; ++i) {
+        input_attribs[i].binding = 0;
+        input_attribs[i].location = i;
+        input_attribs[i].format = VK_FORMAT_R32G32B32_SFLOAT;
+        input_attribs[i].offset = 0;
+    }
+    // Let the last input_attribs description use same location as the first one
+    input_attribs[attribute_count - 1].location = 0;
+    // Let the last input_attribs description use binding which is not defined
+    input_attribs[attribute_count - 1].binding = 1;
+
+    const auto set_Info = [&](CreatePipelineHelper &helper) {
+        helper.vi_ci_.pVertexBindingDescriptions = &input_binding;
+        helper.vi_ci_.vertexBindingDescriptionCount = 1;
+        helper.vi_ci_.pVertexAttributeDescriptions = input_attribs.data();
+        helper.vi_ci_.vertexAttributeDescriptionCount = attribute_count;
+    };
+    CreatePipelineHelper::OneshotTest(
+        *this, set_Info, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+        vector<string>{"VUID-VkPipelineVertexInputStateCreateInfo-vertexAttributeDescriptionCount-00614",
+                       "VUID-VkPipelineVertexInputStateCreateInfo-binding-00615",
+                       "VUID-VkPipelineVertexInputStateCreateInfo-pVertexAttributeDescriptions-00617"});
+}
+
+TEST_F(VkLayerTest, ColorBlendInvalidLogicOp) {
+    TEST_DESCRIPTION("Attempt to use invalid VkPipelineColorBlendStateCreateInfo::logicOp value.");
+
+    ASSERT_NO_FATAL_FAILURE(Init());  // enables all supported features
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    if (!m_device->phy().features().logicOp) {
+        printf("%s Device does not support logicOp feature; skipped.\n", kSkipPrefix);
+        return;
+    }
+
+    const auto set_shading_enable = [](CreatePipelineHelper &helper) {
+        helper.cb_ci_.logicOpEnable = VK_TRUE;
+        helper.cb_ci_.logicOp = static_cast<VkLogicOp>(VK_LOGIC_OP_END_RANGE + 1);  // invalid logicOp to be tested
+    };
+    CreatePipelineHelper::OneshotTest(*this, set_shading_enable, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                      "VUID-VkPipelineColorBlendStateCreateInfo-logicOpEnable-00607");
+}
+
+TEST_F(VkLayerTest, ColorBlendUnsupportedLogicOp) {
+    TEST_DESCRIPTION("Attempt enabling VkPipelineColorBlendStateCreateInfo::logicOpEnable when logicOp feature is disabled.");
+
+    VkPhysicalDeviceFeatures features{};
+    ASSERT_NO_FATAL_FAILURE(Init(&features));
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    const auto set_shading_enable = [](CreatePipelineHelper &helper) { helper.cb_ci_.logicOpEnable = VK_TRUE; };
+    CreatePipelineHelper::OneshotTest(*this, set_shading_enable, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                      "VUID-VkPipelineColorBlendStateCreateInfo-logicOpEnable-00606");
+}
+
+TEST_F(VkLayerTest, ColorBlendUnsupportedDualSourceBlend) {
+    TEST_DESCRIPTION("Attempt to use dual-source blending when dualSrcBlend feature is disabled.");
+
+    VkPhysicalDeviceFeatures features{};
+    ASSERT_NO_FATAL_FAILURE(Init(&features));
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    VkPipelineColorBlendAttachmentState cb_attachments = {};
+
+    const auto set_dsb_src_color_enable = [&](CreatePipelineHelper &helper) { helper.cb_attachments_ = cb_attachments; };
+
+    cb_attachments.blendEnable = VK_TRUE;
+    cb_attachments.srcColorBlendFactor = VK_BLEND_FACTOR_SRC1_COLOR;  // bad!
+    cb_attachments.dstColorBlendFactor = VK_BLEND_FACTOR_ONE_MINUS_SRC_COLOR;
+    cb_attachments.colorBlendOp = VK_BLEND_OP_ADD;
+    cb_attachments.srcAlphaBlendFactor = VK_BLEND_FACTOR_SRC_ALPHA;
+    cb_attachments.dstAlphaBlendFactor = VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA;
+    cb_attachments.alphaBlendOp = VK_BLEND_OP_ADD;
+    CreatePipelineHelper::OneshotTest(*this, set_dsb_src_color_enable, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                      "VUID-VkPipelineColorBlendAttachmentState-srcColorBlendFactor-00608");
+
+    cb_attachments.blendEnable = VK_TRUE;
+    cb_attachments.srcColorBlendFactor = VK_BLEND_FACTOR_SRC_COLOR;
+    cb_attachments.dstColorBlendFactor = VK_BLEND_FACTOR_ONE_MINUS_SRC1_COLOR;  // bad
+    cb_attachments.colorBlendOp = VK_BLEND_OP_ADD;
+    cb_attachments.srcAlphaBlendFactor = VK_BLEND_FACTOR_SRC_ALPHA;
+    cb_attachments.dstAlphaBlendFactor = VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA;
+    cb_attachments.alphaBlendOp = VK_BLEND_OP_ADD;
+    CreatePipelineHelper::OneshotTest(*this, set_dsb_src_color_enable, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                      "VUID-VkPipelineColorBlendAttachmentState-dstColorBlendFactor-00609");
+
+    cb_attachments.blendEnable = VK_TRUE;
+    cb_attachments.srcColorBlendFactor = VK_BLEND_FACTOR_SRC_COLOR;
+    cb_attachments.dstColorBlendFactor = VK_BLEND_FACTOR_ONE_MINUS_SRC_COLOR;
+    cb_attachments.colorBlendOp = VK_BLEND_OP_ADD;
+    cb_attachments.srcAlphaBlendFactor = VK_BLEND_FACTOR_SRC1_ALPHA;  // bad
+    cb_attachments.dstAlphaBlendFactor = VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA;
+    cb_attachments.alphaBlendOp = VK_BLEND_OP_ADD;
+    CreatePipelineHelper::OneshotTest(*this, set_dsb_src_color_enable, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                      "VUID-VkPipelineColorBlendAttachmentState-srcAlphaBlendFactor-00610");
+
+    cb_attachments.blendEnable = VK_TRUE;
+    cb_attachments.srcColorBlendFactor = VK_BLEND_FACTOR_SRC_COLOR;
+    cb_attachments.dstColorBlendFactor = VK_BLEND_FACTOR_ONE_MINUS_SRC_COLOR;
+    cb_attachments.colorBlendOp = VK_BLEND_OP_ADD;
+    cb_attachments.srcAlphaBlendFactor = VK_BLEND_FACTOR_SRC_ALPHA;
+    cb_attachments.dstAlphaBlendFactor = VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA;  // bad!
+    cb_attachments.alphaBlendOp = VK_BLEND_OP_ADD;
+    CreatePipelineHelper::OneshotTest(*this, set_dsb_src_color_enable, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                      "VUID-VkPipelineColorBlendAttachmentState-dstAlphaBlendFactor-00611");
+}
+
+TEST_F(VkLayerTest, InvalidSPIRVCodeSize) {
+    TEST_DESCRIPTION("Test that errors are produced for a spirv modules with invalid code sizes");
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "Invalid SPIR-V header");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    VkShaderModule module;
+    VkShaderModuleCreateInfo moduleCreateInfo;
+    struct icd_spv_header spv;
+
+    spv.magic = ICD_SPV_MAGIC;
+    spv.version = ICD_SPV_VERSION;
+    spv.gen_magic = 0;
+
+    moduleCreateInfo.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
+    moduleCreateInfo.pNext = NULL;
+    moduleCreateInfo.pCode = (const uint32_t *)&spv;
+    moduleCreateInfo.codeSize = 4;
+    moduleCreateInfo.flags = 0;
+    vk::CreateShaderModule(m_device->device(), &moduleCreateInfo, NULL, &module);
+
+    m_errorMonitor->VerifyFound();
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkShaderModuleCreateInfo-pCode-01376");
+    std::vector<unsigned int> shader;
+    VkShaderModuleCreateInfo module_create_info;
+    VkShaderModule shader_module;
+    module_create_info.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
+    module_create_info.pNext = NULL;
+    this->GLSLtoSPV(&m_device->props.limits, VK_SHADER_STAGE_VERTEX_BIT, bindStateVertShaderText, shader);
+    module_create_info.pCode = shader.data();
+    // Introduce failure by making codeSize a non-multiple of 4
+    module_create_info.codeSize = shader.size() * sizeof(unsigned int) - 1;
+    module_create_info.flags = 0;
+    vk::CreateShaderModule(m_device->handle(), &module_create_info, NULL, &shader_module);
+
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, InvalidSPIRVMagic) {
+    TEST_DESCRIPTION("Test that an error is produced for a spirv module with a bad magic number");
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "Invalid SPIR-V magic number");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    VkShaderModule module;
+    VkShaderModuleCreateInfo moduleCreateInfo;
+    struct icd_spv_header spv;
+
+    spv.magic = (uint32_t)~ICD_SPV_MAGIC;
+    spv.version = ICD_SPV_VERSION;
+    spv.gen_magic = 0;
+
+    moduleCreateInfo.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
+    moduleCreateInfo.pNext = NULL;
+    moduleCreateInfo.pCode = (const uint32_t *)&spv;
+    moduleCreateInfo.codeSize = sizeof(spv) + 16;
+    moduleCreateInfo.flags = 0;
+    vk::CreateShaderModule(m_device->device(), &moduleCreateInfo, NULL, &module);
+
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, CreatePipelineVertexOutputNotConsumed) {
+    TEST_DESCRIPTION("Test that a warning is produced for a vertex output that is not consumed by the fragment stage");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    char const *vsSource =
+        "#version 450\n"
+        "layout(location=0) out float x;\n"
+        "void main(){\n"
+        "   gl_Position = vec4(1);\n"
+        "   x = 0;\n"
+        "}\n";
+    VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
+
+    const auto set_info = [&](CreatePipelineHelper &helper) {
+        helper.shader_stages_ = {vs.GetStageCreateInfo(), helper.fs_->GetStageCreateInfo()};
+    };
+    CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT,
+                                      "not consumed by fragment shader");
+}
+
+TEST_F(VkLayerTest, CreatePipelineCheckShaderSpecializationApplied) {
+    TEST_DESCRIPTION(
+        "Make sure specialization constants get applied during shader validation by using a value that breaks compilation.");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    // Size an array using a specialization constant of default value equal to 1.
+    std::string const fs_src = R"(
+               OpCapability Shader
+          %1 = OpExtInstImport "GLSL.std.450"
+               OpMemoryModel Logical GLSL450
+               OpEntryPoint Fragment %main "main"
+               OpExecutionMode %main OriginUpperLeft
+               OpSource GLSL 450
+               OpName %main "main"
+               OpName %size "size"
+               OpName %array "array"
+               OpDecorate %size SpecId 0
+       %void = OpTypeVoid
+          %3 = OpTypeFunction %void
+      %float = OpTypeFloat 32
+        %int = OpTypeInt 32 1
+       %size = OpSpecConstant %int 1
+%_arr_float_size = OpTypeArray %float %size
+%_ptr_Function__arr_float_size = OpTypePointer Function %_arr_float_size
+      %int_0 = OpConstant %int 0
+    %float_0 = OpConstant %float 0
+%_ptr_Function_float = OpTypePointer Function %float
+       %main = OpFunction %void None %3
+          %5 = OpLabel
+      %array = OpVariable %_ptr_Function__arr_float_size Function
+         %15 = OpAccessChain %_ptr_Function_float %array %int_0
+               OpStore %15 %float_0
+               OpReturn
+               OpFunctionEnd)";
+    VkShaderObj fs(m_device, fs_src, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+    // Set the specialization constant to 0.
+    const VkSpecializationMapEntry entry = {
+        0,                // id
+        0,                // offset
+        sizeof(uint32_t)  // size
+    };
+    uint32_t data = 0;
+    const VkSpecializationInfo specialization_info = {
+        1,
+        &entry,
+        1 * sizeof(uint32_t),
+        &data,
+    };
+
+    const auto set_info = [&](CreatePipelineHelper &helper) {
+        helper.shader_stages_ = {helper.vs_->GetStageCreateInfo(), fs.GetStageCreateInfo()};
+        helper.shader_stages_[1].pSpecializationInfo = &specialization_info;
+    };
+    CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT, "does not contain valid spirv");
+}
+
+TEST_F(VkLayerTest, CreatePipelineCheckShaderBadSpecializationOffsetOutOfBounds) {
+    TEST_DESCRIPTION("Challenge core_validation with shader validation issues related to vkCreateGraphicsPipelines.");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    char const *fsSource =
+        "#version 450\n"
+        "layout (constant_id = 0) const float r = 0.0f;\n"
+        "layout(location = 0) out vec4 uFragColor;\n"
+        "void main(){\n"
+        "   uFragColor = vec4(r,1,0,1);\n"
+        "}\n";
+    VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+    // Entry offset is greater than dataSize.
+    const VkSpecializationMapEntry entry = {0, 5, sizeof(uint32_t)};
+
+    uint32_t data = 1;
+    const VkSpecializationInfo specialization_info = {
+        1,
+        &entry,
+        1 * sizeof(float),
+        &data,
+    };
+
+    const auto set_info = [&](CreatePipelineHelper &helper) {
+        helper.shader_stages_ = {helper.vs_->GetStageCreateInfo(), fs.GetStageCreateInfo()};
+        helper.shader_stages_[1].pSpecializationInfo = &specialization_info;
+    };
+    CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkSpecializationInfo-offset-00773");
+}
+
+TEST_F(VkLayerTest, CreatePipelineCheckShaderBadSpecializationSizeOutOfBounds) {
+    TEST_DESCRIPTION("Challenge core_validation with shader validation issues related to vkCreateGraphicsPipelines.");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    char const *fsSource =
+        "#version 450\n"
+        "layout (constant_id = 0) const float r = 0.0f;\n"
+        "layout(location = 0) out vec4 uFragColor;\n"
+        "void main(){\n"
+        "   uFragColor = vec4(r,1,0,1);\n"
+        "}\n";
+    VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+    // Entry size is greater than dataSize minus offset.
+    const VkSpecializationMapEntry entry = {0, 3, sizeof(uint32_t)};
+
+    uint32_t data = 1;
+    const VkSpecializationInfo specialization_info = {
+        1,
+        &entry,
+        1 * sizeof(float),
+        &data,
+    };
+
+    const auto set_info = [&](CreatePipelineHelper &helper) {
+        helper.shader_stages_ = {helper.vs_->GetStageCreateInfo(), fs.GetStageCreateInfo()};
+        helper.shader_stages_[1].pSpecializationInfo = &specialization_info;
+    };
+    CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                      "VUID-VkSpecializationInfo-pMapEntries-00774");
+}
+
+TEST_F(VkLayerTest, CreatePipelineCheckShaderDescriptorTypeMismatch) {
+    TEST_DESCRIPTION("Challenge core_validation with shader validation issues related to vkCreateGraphicsPipelines.");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    OneOffDescriptorSet descriptor_set(m_device, {
+                                                     {0, VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
+                                                 });
+
+    char const *vsSource =
+        "#version 450\n"
+        "\n"
+        "layout (std140, set = 0, binding = 0) uniform buf {\n"
+        "    mat4 mvp;\n"
+        "} ubuf;\n"
+        "void main(){\n"
+        "   gl_Position = ubuf.mvp * vec4(1);\n"
+        "}\n";
+
+    VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
+
+    CreatePipelineHelper pipe(*this);
+    pipe.InitInfo();
+    pipe.shader_stages_ = {vs.GetStageCreateInfo(), pipe.fs_->GetStageCreateInfo()};
+    pipe.InitState();
+    pipe.pipeline_layout_ = VkPipelineLayoutObj(m_device, {&descriptor_set.layout_});
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "Type mismatch on descriptor slot 0.0 ");
+    pipe.CreateGraphicsPipeline();
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, CreatePipelineCheckShaderDescriptorNotAccessible) {
+    TEST_DESCRIPTION(
+        "Create a pipeline in which a descriptor used by a shader stage does not include that stage in its stageFlags.");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    OneOffDescriptorSet ds(m_device, {
+                                         {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_FRAGMENT_BIT /*!*/, nullptr},
+                                     });
+
+    char const *vsSource =
+        "#version 450\n"
+        "\n"
+        "layout (std140, set = 0, binding = 0) uniform buf {\n"
+        "    mat4 mvp;\n"
+        "} ubuf;\n"
+        "void main(){\n"
+        "   gl_Position = ubuf.mvp * vec4(1);\n"
+        "}\n";
+
+    VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
+
+    CreatePipelineHelper pipe(*this);
+    pipe.InitInfo();
+    pipe.shader_stages_ = {vs.GetStageCreateInfo(), pipe.fs_->GetStageCreateInfo()};
+    pipe.InitState();
+    pipe.pipeline_layout_ = VkPipelineLayoutObj(m_device, {&ds.layout_});
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "Shader uses descriptor slot 0.0 ");
+    pipe.CreateGraphicsPipeline();
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, CreatePipelineCheckShaderPushConstantNotDeclared) {
+    TEST_DESCRIPTION(
+        "Create a graphics pipeline in which a push constant range containing a push constant block member is not declared in the "
+        "layout.");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    char const *vsSource =
+        "#version 450\n"
+        "\n"
+        "layout(push_constant, std430) uniform foo { float x; } consts;\n"
+        "void main(){\n"
+        "   gl_Position = vec4(consts.x);\n"
+        "}\n";
+
+    VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
+
+    // Set up a push constant range
+    VkPushConstantRange push_constant_range = {};
+    // Set to the wrong stage to challenge core_validation
+    push_constant_range.stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT;
+    push_constant_range.size = 4;
+
+    const VkPipelineLayoutObj pipeline_layout(m_device, {}, {push_constant_range});
+
+    CreatePipelineHelper pipe(*this);
+    pipe.InitInfo();
+    pipe.shader_stages_ = {vs.GetStageCreateInfo(), pipe.fs_->GetStageCreateInfo()};
+    pipe.InitState();
+    pipe.pipeline_layout_ = VkPipelineLayoutObj(m_device, {}, {push_constant_range});
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "Push constant range covering variable starting at offset 0 not declared in layout");
+    pipe.CreateGraphicsPipeline();
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, CreatePipelineCheckShaderNotEnabled) {
+    TEST_DESCRIPTION(
+        "Create a graphics pipeline in which a capability declared by the shader requires a feature not enabled on the device.");
+
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+
+    // Some awkward steps are required to test with custom device features.
+    VkPhysicalDeviceFeatures device_features = {};
+    // Disable support for 64 bit floats
+    device_features.shaderFloat64 = false;
+    // The sacrificial device object
+    ASSERT_NO_FATAL_FAILURE(InitState(&device_features));
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    char const *fsSource =
+        "#version 450\n"
+        "\n"
+        "layout(location=0) out vec4 color;\n"
+        "void main(){\n"
+        "   dvec4 green = vec4(0.0, 1.0, 0.0, 1.0);\n"
+        "   color = vec4(green);\n"
+        "}\n";
+    VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+    CreatePipelineHelper pipe(*this);
+    pipe.InitInfo();
+    pipe.shader_stages_ = {pipe.vs_->GetStageCreateInfo(), fs.GetStageCreateInfo()};
+    pipe.InitState();
+    pipe.pipeline_layout_ = VkPipelineLayoutObj(m_device);
+
+    m_errorMonitor->SetDesiredFailureMsg(
+        VK_DEBUG_REPORT_ERROR_BIT_EXT, "Shader requires VkPhysicalDeviceFeatures::shaderFloat64 but is not enabled on the device");
+    pipe.CreateGraphicsPipeline();
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, CreateShaderModuleCheckBadCapability) {
+    TEST_DESCRIPTION("Create a shader in which a capability declared by the shader is not supported.");
+    // Note that this failure message comes from spirv-tools, specifically the validator.
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    const std::string spv_source = R"(
+                  OpCapability ImageRect
+                  OpEntryPoint Vertex %main "main"
+          %main = OpFunction %void None %3
+                  OpReturn
+                  OpFunctionEnd
+        )";
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "Capability ImageRect is not allowed by Vulkan");
+
+    std::vector<unsigned int> spv;
+    VkShaderModuleCreateInfo module_create_info;
+    VkShaderModule shader_module;
+    module_create_info.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
+    module_create_info.pNext = NULL;
+    ASMtoSPV(SPV_ENV_VULKAN_1_0, 0, spv_source.data(), spv);
+    module_create_info.pCode = spv.data();
+    module_create_info.codeSize = spv.size() * sizeof(unsigned int);
+    module_create_info.flags = 0;
+
+    VkResult err = vk::CreateShaderModule(m_device->handle(), &module_create_info, NULL, &shader_module);
+    m_errorMonitor->VerifyFound();
+    if (err == VK_SUCCESS) {
+        vk::DestroyShaderModule(m_device->handle(), shader_module, NULL);
+    }
+}
+
+TEST_F(VkLayerTest, CreatePipelineFragmentInputNotProvided) {
+    TEST_DESCRIPTION(
+        "Test that an error is produced for a fragment shader input which is not present in the outputs of the previous stage");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    char const *fsSource =
+        "#version 450\n"
+        "\n"
+        "layout(location=0) in float x;\n"
+        "layout(location=0) out vec4 color;\n"
+        "void main(){\n"
+        "   color = vec4(x);\n"
+        "}\n";
+    VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+    const auto set_info = [&](CreatePipelineHelper &helper) {
+        helper.shader_stages_ = {helper.vs_->GetStageCreateInfo(), fs.GetStageCreateInfo()};
+    };
+    CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT, "not written by vertex shader");
+}
+
+TEST_F(VkLayerTest, CreatePipelineFragmentInputNotProvidedInBlock) {
+    TEST_DESCRIPTION(
+        "Test that an error is produced for a fragment shader input within an interace block, which is not present in the outputs "
+        "of the previous stage.");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    char const *fsSource =
+        "#version 450\n"
+        "\n"
+        "in block { layout(location=0) float x; } ins;\n"
+        "layout(location=0) out vec4 color;\n"
+        "void main(){\n"
+        "   color = vec4(ins.x);\n"
+        "}\n";
+
+    VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+    const auto set_info = [&](CreatePipelineHelper &helper) {
+        helper.shader_stages_ = {helper.vs_->GetStageCreateInfo(), fs.GetStageCreateInfo()};
+    };
+    CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT, "not written by vertex shader");
+}
+
+TEST_F(VkLayerTest, CreatePipelineVsFsTypeMismatchArraySize) {
+    TEST_DESCRIPTION("Test that an error is produced for mismatched array sizes across the vertex->fragment shader interface");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    char const *vsSource =
+        "#version 450\n"
+        "\n"
+        "layout(location=0) out float x[2];\n"
+        "void main(){\n"
+        "   x[0] = 0; x[1] = 0;\n"
+        "   gl_Position = vec4(1);\n"
+        "}\n";
+    char const *fsSource =
+        "#version 450\n"
+        "\n"
+        "layout(location=0) in float x[1];\n"
+        "layout(location=0) out vec4 color;\n"
+        "void main(){\n"
+        "   color = vec4(x[0]);\n"
+        "}\n";
+
+    VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
+    VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+    const auto set_info = [&](CreatePipelineHelper &helper) {
+        helper.shader_stages_ = {vs.GetStageCreateInfo(), fs.GetStageCreateInfo()};
+    };
+    CreatePipelineHelper::OneshotTest(
+        *this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+        "Type mismatch on location 0.0: 'ptr to output arr[2] of float32' vs 'ptr to input arr[1] of float32'");
+}
+
+TEST_F(VkLayerTest, CreatePipelineVsFsTypeMismatch) {
+    TEST_DESCRIPTION("Test that an error is produced for mismatched types across the vertex->fragment shader interface");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    char const *vsSource =
+        "#version 450\n"
+        "\n"
+        "layout(location=0) out int x;\n"
+        "void main(){\n"
+        "   x = 0;\n"
+        "   gl_Position = vec4(1);\n"
+        "}\n";
+    char const *fsSource =
+        "#version 450\n"
+        "\n"
+        "layout(location=0) in float x;\n" /* VS writes int */
+        "layout(location=0) out vec4 color;\n"
+        "void main(){\n"
+        "   color = vec4(x);\n"
+        "}\n";
+
+    VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
+    VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+    const auto set_info = [&](CreatePipelineHelper &helper) {
+        helper.shader_stages_ = {vs.GetStageCreateInfo(), fs.GetStageCreateInfo()};
+    };
+    CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT, "Type mismatch on location 0");
+}
+
+TEST_F(VkLayerTest, CreatePipelineVsFsTypeMismatchInBlock) {
+    TEST_DESCRIPTION(
+        "Test that an error is produced for mismatched types across the vertex->fragment shader interface, when the variable is "
+        "contained within an interface block");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    char const *vsSource =
+        "#version 450\n"
+        "\n"
+        "out block { layout(location=0) int x; } outs;\n"
+        "void main(){\n"
+        "   outs.x = 0;\n"
+        "   gl_Position = vec4(1);\n"
+        "}\n";
+    char const *fsSource =
+        "#version 450\n"
+        "\n"
+        "in block { layout(location=0) float x; } ins;\n" /* VS writes int */
+        "layout(location=0) out vec4 color;\n"
+        "void main(){\n"
+        "   color = vec4(ins.x);\n"
+        "}\n";
+
+    VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
+    VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+    const auto set_info = [&](CreatePipelineHelper &helper) {
+        helper.shader_stages_ = {vs.GetStageCreateInfo(), fs.GetStageCreateInfo()};
+    };
+    CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT, "Type mismatch on location 0");
+}
+
+TEST_F(VkLayerTest, CreatePipelineVsFsMismatchByLocation) {
+    TEST_DESCRIPTION(
+        "Test that an error is produced for location mismatches across the vertex->fragment shader interface; This should manifest "
+        "as a not-written/not-consumed pair, but flushes out broken walking of the interfaces");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    char const *vsSource =
+        "#version 450\n"
+        "\n"
+        "out block { layout(location=1) float x; } outs;\n"
+        "void main(){\n"
+        "   outs.x = 0;\n"
+        "   gl_Position = vec4(1);\n"
+        "}\n";
+    char const *fsSource =
+        "#version 450\n"
+        "\n"
+        "in block { layout(location=0) float x; } ins;\n"
+        "layout(location=0) out vec4 color;\n"
+        "void main(){\n"
+        "   color = vec4(ins.x);\n"
+        "}\n";
+
+    VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
+    VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+    const auto set_info = [&](CreatePipelineHelper &helper) {
+        helper.shader_stages_ = {vs.GetStageCreateInfo(), fs.GetStageCreateInfo()};
+    };
+    CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                      "location 0.0 which is not written by vertex shader");
+}
+
+TEST_F(VkLayerTest, CreatePipelineVsFsMismatchByComponent) {
+    TEST_DESCRIPTION(
+        "Test that an error is produced for component mismatches across the vertex->fragment shader interface. It's not enough to "
+        "have the same set of locations in use; matching is defined in terms of spirv variables.");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    char const *vsSource =
+        "#version 450\n"
+        "\n"
+        "out block { layout(location=0, component=0) float x; } outs;\n"
+        "void main(){\n"
+        "   outs.x = 0;\n"
+        "   gl_Position = vec4(1);\n"
+        "}\n";
+    char const *fsSource =
+        "#version 450\n"
+        "\n"
+        "in block { layout(location=0, component=1) float x; } ins;\n"
+        "layout(location=0) out vec4 color;\n"
+        "void main(){\n"
+        "   color = vec4(ins.x);\n"
+        "}\n";
+
+    VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
+    VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+    const auto set_info = [&](CreatePipelineHelper &helper) {
+        helper.shader_stages_ = {vs.GetStageCreateInfo(), fs.GetStageCreateInfo()};
+    };
+    CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                      "location 0.1 which is not written by vertex shader");
+}
+
+TEST_F(VkLayerTest, CreatePipelineVsFsMismatchByPrecision) {
+    TEST_DESCRIPTION("Test that the RelaxedPrecision decoration is validated to match");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    char const *vsSource =
+        "#version 450\n"
+        "layout(location=0) out mediump float x;\n"
+        "void main() { gl_Position = vec4(0); x = 1.0; }\n";
+    char const *fsSource =
+        "#version 450\n"
+        "layout(location=0) in highp float x;\n"
+        "layout(location=0) out vec4 color;\n"
+        "void main() { color = vec4(x); }\n";
+
+    VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
+    VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+    const auto set_info = [&](CreatePipelineHelper &helper) {
+        helper.shader_stages_ = {vs.GetStageCreateInfo(), fs.GetStageCreateInfo()};
+    };
+    CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT, "differ in precision");
+}
+
+TEST_F(VkLayerTest, CreatePipelineVsFsMismatchByPrecisionBlock) {
+    TEST_DESCRIPTION("Test that the RelaxedPrecision decoration is validated to match");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    char const *vsSource =
+        "#version 450\n"
+        "out block { layout(location=0) mediump float x; };\n"
+        "void main() { gl_Position = vec4(0); x = 1.0; }\n";
+    char const *fsSource =
+        "#version 450\n"
+        "in block { layout(location=0) highp float x; };\n"
+        "layout(location=0) out vec4 color;\n"
+        "void main() { color = vec4(x); }\n";
+
+    VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
+    VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+    const auto set_info = [&](CreatePipelineHelper &helper) {
+        helper.shader_stages_ = {vs.GetStageCreateInfo(), fs.GetStageCreateInfo()};
+    };
+    CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT, "differ in precision");
+}
+
+TEST_F(VkLayerTest, CreatePipelineAttribNotConsumed) {
+    TEST_DESCRIPTION("Test that a warning is produced for a vertex attribute which is not consumed by the vertex shader");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    VkVertexInputBindingDescription input_binding;
+    memset(&input_binding, 0, sizeof(input_binding));
+
+    VkVertexInputAttributeDescription input_attrib;
+    memset(&input_attrib, 0, sizeof(input_attrib));
+    input_attrib.format = VK_FORMAT_R32_SFLOAT;
+
+    const auto set_info = [&](CreatePipelineHelper &helper) {
+        helper.vi_ci_.pVertexBindingDescriptions = &input_binding;
+        helper.vi_ci_.vertexBindingDescriptionCount = 1;
+        helper.vi_ci_.pVertexAttributeDescriptions = &input_attrib;
+        helper.vi_ci_.vertexAttributeDescriptionCount = 1;
+    };
+    CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT,
+                                      "location 0 not consumed by vertex shader");
+}
+
+TEST_F(VkLayerTest, CreatePipelineAttribLocationMismatch) {
+    TEST_DESCRIPTION(
+        "Test that a warning is produced for a location mismatch on vertex attributes. This flushes out bad behavior in the "
+        "interface walker");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    VkVertexInputBindingDescription input_binding;
+    memset(&input_binding, 0, sizeof(input_binding));
+
+    VkVertexInputAttributeDescription input_attrib;
+    memset(&input_attrib, 0, sizeof(input_attrib));
+    input_attrib.format = VK_FORMAT_R32_SFLOAT;
+
+    const auto set_info = [&](CreatePipelineHelper &helper) {
+        helper.vi_ci_.pVertexBindingDescriptions = &input_binding;
+        helper.vi_ci_.vertexBindingDescriptionCount = 1;
+        helper.vi_ci_.pVertexAttributeDescriptions = &input_attrib;
+        helper.vi_ci_.vertexAttributeDescriptionCount = 1;
+    };
+    m_errorMonitor->SetUnexpectedError("Vertex shader consumes input at location 1 but not provided");
+
+    CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT,
+                                      "location 0 not consumed by vertex shader");
+}
+
+TEST_F(VkLayerTest, CreatePipelineAttribNotProvided) {
+    TEST_DESCRIPTION("Test that an error is produced for a vertex shader input which is not provided by a vertex attribute");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    char const *vsSource =
+        "#version 450\n"
+        "\n"
+        "layout(location=0) in vec4 x;\n" /* not provided */
+        "void main(){\n"
+        "   gl_Position = x;\n"
+        "}\n";
+    VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
+
+    const auto set_info = [&](CreatePipelineHelper &helper) {
+        helper.shader_stages_ = {vs.GetStageCreateInfo(), helper.fs_->GetStageCreateInfo()};
+    };
+    CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                      "Vertex shader consumes input at location 0 but not provided");
+}
+
+TEST_F(VkLayerTest, CreatePipelineAttribTypeMismatch) {
+    TEST_DESCRIPTION(
+        "Test that an error is produced for a mismatch between the fundamental type (float/int/uint) of an attribute and the "
+        "vertex shader input that consumes it");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    VkVertexInputBindingDescription input_binding;
+    memset(&input_binding, 0, sizeof(input_binding));
+
+    VkVertexInputAttributeDescription input_attrib;
+    memset(&input_attrib, 0, sizeof(input_attrib));
+    input_attrib.format = VK_FORMAT_R32_SFLOAT;
+
+    char const *vsSource =
+        "#version 450\n"
+        "\n"
+        "layout(location=0) in int x;\n" /* attrib provided float */
+        "void main(){\n"
+        "   gl_Position = vec4(x);\n"
+        "}\n";
+    VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
+
+    const auto set_info = [&](CreatePipelineHelper &helper) {
+        helper.shader_stages_ = {vs.GetStageCreateInfo(), helper.fs_->GetStageCreateInfo()};
+        helper.vi_ci_.pVertexBindingDescriptions = &input_binding;
+        helper.vi_ci_.vertexBindingDescriptionCount = 1;
+        helper.vi_ci_.pVertexAttributeDescriptions = &input_attrib;
+        helper.vi_ci_.vertexAttributeDescriptionCount = 1;
+    };
+    CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                      "location 0 does not match vertex shader input type");
+}
+
+TEST_F(VkLayerTest, CreatePipelineDuplicateStage) {
+    TEST_DESCRIPTION("Test that an error is produced for a pipeline containing multiple shaders for the same stage");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    const auto set_info = [&](CreatePipelineHelper &helper) {
+        helper.shader_stages_ = {helper.vs_->GetStageCreateInfo(), helper.vs_->GetStageCreateInfo(),
+                                 helper.fs_->GetStageCreateInfo()};
+    };
+    CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                      "Multiple shaders provided for stage VK_SHADER_STAGE_VERTEX_BIT");
+}
+
+TEST_F(VkLayerTest, CreatePipelineMissingEntrypoint) {
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    VkShaderObj fs(m_device, bindStateFragShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this, "foo");
+
+    const auto set_info = [&](CreatePipelineHelper &helper) {
+        helper.shader_stages_ = {helper.vs_->GetStageCreateInfo(), fs.GetStageCreateInfo()};
+    };
+    CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT, "No entrypoint found named `foo`");
+}
+
+TEST_F(VkLayerTest, CreatePipelineDepthStencilRequired) {
+    m_errorMonitor->SetDesiredFailureMsg(
+        VK_DEBUG_REPORT_ERROR_BIT_EXT,
+        "pDepthStencilState is NULL when rasterization is enabled and subpass uses a depth/stencil attachment");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
+    VkShaderObj fs(m_device, bindStateFragShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+    VkPipelineObj pipe(m_device);
+    pipe.AddDefaultColorAttachment();
+    pipe.AddShader(&vs);
+    pipe.AddShader(&fs);
+
+    VkDescriptorSetObj descriptorSet(m_device);
+    descriptorSet.AppendDummy();
+    descriptorSet.CreateVKDescriptorSet(m_commandBuffer);
+
+    VkAttachmentDescription attachments[] = {
+        {
+            0,
+            VK_FORMAT_B8G8R8A8_UNORM,
+            VK_SAMPLE_COUNT_1_BIT,
+            VK_ATTACHMENT_LOAD_OP_DONT_CARE,
+            VK_ATTACHMENT_STORE_OP_DONT_CARE,
+            VK_ATTACHMENT_LOAD_OP_DONT_CARE,
+            VK_ATTACHMENT_STORE_OP_DONT_CARE,
+            VK_IMAGE_LAYOUT_UNDEFINED,
+            VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
+        },
+        {
+            0,
+            VK_FORMAT_D16_UNORM,
+            VK_SAMPLE_COUNT_1_BIT,
+            VK_ATTACHMENT_LOAD_OP_DONT_CARE,
+            VK_ATTACHMENT_STORE_OP_DONT_CARE,
+            VK_ATTACHMENT_LOAD_OP_DONT_CARE,
+            VK_ATTACHMENT_STORE_OP_DONT_CARE,
+            VK_IMAGE_LAYOUT_UNDEFINED,
+            VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL,
+        },
+    };
+    VkAttachmentReference refs[] = {
+        {0, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL},
+        {1, VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL},
+    };
+    VkSubpassDescription subpass = {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 0, nullptr, 1, &refs[0], nullptr, &refs[1], 0, nullptr};
+    VkRenderPassCreateInfo rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, nullptr, 0, 2, attachments, 1, &subpass, 0, nullptr};
+    VkRenderPass rp;
+    VkResult err = vk::CreateRenderPass(m_device->device(), &rpci, nullptr, &rp);
+    ASSERT_VK_SUCCESS(err);
+
+    pipe.CreateVKPipeline(descriptorSet.GetPipelineLayout(), rp);
+
+    m_errorMonitor->VerifyFound();
+
+    vk::DestroyRenderPass(m_device->device(), rp, nullptr);
+}
+
+TEST_F(VkLayerTest, CreatePipelineTessPatchDecorationMismatch) {
+    TEST_DESCRIPTION(
+        "Test that an error is produced for a variable output from the TCS without the patch decoration, but consumed in the TES "
+        "with the decoration.");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    if (!m_device->phy().features().tessellationShader) {
+        printf("%s Device does not support tessellation shaders; skipped.\n", kSkipPrefix);
+        return;
+    }
+
+    char const *tcsSource =
+        "#version 450\n"
+        "layout(location=0) out int x[];\n"
+        "layout(vertices=3) out;\n"
+        "void main(){\n"
+        "   gl_TessLevelOuter[0] = gl_TessLevelOuter[1] = gl_TessLevelOuter[2] = 1;\n"
+        "   gl_TessLevelInner[0] = 1;\n"
+        "   x[gl_InvocationID] = gl_InvocationID;\n"
+        "}\n";
+    char const *tesSource =
+        "#version 450\n"
+        "layout(triangles, equal_spacing, cw) in;\n"
+        "layout(location=0) patch in int x;\n"
+        "void main(){\n"
+        "   gl_Position.xyz = gl_TessCoord;\n"
+        "   gl_Position.w = x;\n"
+        "}\n";
+    VkShaderObj tcs(m_device, tcsSource, VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT, this);
+    VkShaderObj tes(m_device, tesSource, VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT, this);
+
+    VkPipelineInputAssemblyStateCreateInfo iasci{VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO, nullptr, 0,
+                                                 VK_PRIMITIVE_TOPOLOGY_PATCH_LIST, VK_FALSE};
+
+    VkPipelineTessellationStateCreateInfo tsci{VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO, nullptr, 0, 3};
+
+    const auto set_info = [&](CreatePipelineHelper &helper) {
+        helper.gp_ci_.pTessellationState = &tsci;
+        helper.gp_ci_.pInputAssemblyState = &iasci;
+        helper.shader_stages_.emplace_back(tcs.GetStageCreateInfo());
+        helper.shader_stages_.emplace_back(tes.GetStageCreateInfo());
+    };
+    CreatePipelineHelper::OneshotTest(
+        *this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+        "is per-vertex in tessellation control shader stage but per-patch in tessellation evaluation shader stage");
+}
+
+TEST_F(VkLayerTest, CreatePipelineTessErrors) {
+    TEST_DESCRIPTION("Test various errors when creating a graphics pipeline with tessellation stages active.");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    if (!m_device->phy().features().tessellationShader) {
+        printf("%s Device does not support tessellation shaders; skipped.\n", kSkipPrefix);
+        return;
+    }
+
+    char const *tcsSource =
+        "#version 450\n"
+        "layout(vertices=3) out;\n"
+        "void main(){\n"
+        "   gl_TessLevelOuter[0] = gl_TessLevelOuter[1] = gl_TessLevelOuter[2] = 1;\n"
+        "   gl_TessLevelInner[0] = 1;\n"
+        "}\n";
+    char const *tesSource =
+        "#version 450\n"
+        "layout(triangles, equal_spacing, cw) in;\n"
+        "void main(){\n"
+        "   gl_Position.xyz = gl_TessCoord;\n"
+        "   gl_Position.w = 0;\n"
+        "}\n";
+    VkShaderObj tcs(m_device, tcsSource, VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT, this);
+    VkShaderObj tes(m_device, tesSource, VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT, this);
+
+    VkPipelineInputAssemblyStateCreateInfo iasci{VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO, nullptr, 0,
+                                                 VK_PRIMITIVE_TOPOLOGY_PATCH_LIST, VK_FALSE};
+
+    VkPipelineTessellationStateCreateInfo tsci{VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO, nullptr, 0, 3};
+
+    std::vector<VkPipelineShaderStageCreateInfo> shader_stages = {};
+    VkPipelineInputAssemblyStateCreateInfo iasci_bad = iasci;
+    VkPipelineInputAssemblyStateCreateInfo *p_iasci = nullptr;
+    VkPipelineTessellationStateCreateInfo tsci_bad = tsci;
+    VkPipelineTessellationStateCreateInfo *p_tsci = nullptr;
+
+    const auto set_info = [&](CreatePipelineHelper &helper) {
+        helper.gp_ci_.pTessellationState = p_tsci;
+        helper.gp_ci_.pInputAssemblyState = p_iasci;
+        helper.shader_stages_ = {helper.vs_->GetStageCreateInfo(), helper.fs_->GetStageCreateInfo()};
+        helper.shader_stages_.insert(helper.shader_stages_.end(), shader_stages.begin(), shader_stages.end());
+    };
+
+    iasci_bad.topology = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST;  // otherwise we get a failure about invalid topology
+    p_iasci = &iasci_bad;
+    // Pass a tess control shader without a tess eval shader
+    shader_stages = {tcs.GetStageCreateInfo()};
+    CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                      "VUID-VkGraphicsPipelineCreateInfo-pStages-00729");
+
+    // Pass a tess eval shader without a tess control shader
+    shader_stages = {tes.GetStageCreateInfo()};
+    CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                      "VUID-VkGraphicsPipelineCreateInfo-pStages-00730");
+
+    p_iasci = &iasci;
+    shader_stages = {};
+    // Pass patch topology without tessellation shaders
+    CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                      "VUID-VkGraphicsPipelineCreateInfo-topology-00737");
+
+    shader_stages = {tcs.GetStageCreateInfo(), tes.GetStageCreateInfo()};
+    // Pass a NULL pTessellationState (with active tessellation shader stages)
+    CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                      "VUID-VkGraphicsPipelineCreateInfo-pStages-00731");
+
+    // Pass an invalid pTessellationState (bad sType)
+    tsci_bad.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+    p_tsci = &tsci_bad;
+    shader_stages = {tcs.GetStageCreateInfo(), tes.GetStageCreateInfo()};
+    CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                      "VUID-VkPipelineTessellationStateCreateInfo-sType-sType");
+
+    // Pass out-of-range patchControlPoints
+    p_iasci = &iasci;
+    tsci_bad = tsci;
+    tsci_bad.patchControlPoints = 0;
+    CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                      "VUID-VkPipelineTessellationStateCreateInfo-patchControlPoints-01214");
+
+    tsci_bad.patchControlPoints = m_device->props.limits.maxTessellationPatchSize + 1;
+    CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                      "VUID-VkPipelineTessellationStateCreateInfo-patchControlPoints-01214");
+
+    p_tsci = &tsci;
+    // Pass an invalid primitive topology
+    iasci_bad = iasci;
+    iasci_bad.topology = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST;
+    p_iasci = &iasci_bad;
+    CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                      "VUID-VkGraphicsPipelineCreateInfo-pStages-00736");
+}
+
+TEST_F(VkLayerTest, CreatePipelineAttribBindingConflict) {
+    TEST_DESCRIPTION(
+        "Test that an error is produced for a vertex attribute setup where multiple bindings provide the same location");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    /* Two binding descriptions for binding 0 */
+    VkVertexInputBindingDescription input_bindings[2];
+    memset(input_bindings, 0, sizeof(input_bindings));
+
+    VkVertexInputAttributeDescription input_attrib;
+    memset(&input_attrib, 0, sizeof(input_attrib));
+    input_attrib.format = VK_FORMAT_R32_SFLOAT;
+
+    char const *vsSource =
+        "#version 450\n"
+        "\n"
+        "layout(location=0) in float x;\n" /* attrib provided float */
+        "void main(){\n"
+        "   gl_Position = vec4(x);\n"
+        "}\n";
+
+    VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
+
+    m_errorMonitor->VerifyFound();
+    const auto set_info = [&](CreatePipelineHelper &helper) {
+        helper.shader_stages_ = {vs.GetStageCreateInfo(), helper.fs_->GetStageCreateInfo()};
+        helper.vi_ci_.pVertexBindingDescriptions = input_bindings;
+        helper.vi_ci_.vertexBindingDescriptionCount = 2;
+        helper.vi_ci_.pVertexAttributeDescriptions = &input_attrib;
+        helper.vi_ci_.vertexAttributeDescriptionCount = 1;
+    };
+    m_errorMonitor->SetUnexpectedError("VUID-VkPipelineVertexInputStateCreateInfo-pVertexBindingDescriptions-00616 ");
+    CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                      "Duplicate vertex input binding descriptions for binding 0");
+}
+
+TEST_F(VkLayerTest, CreatePipelineFragmentOutputNotWritten) {
+    TEST_DESCRIPTION(
+        "Test that an error is produced for a fragment shader which does not provide an output for one of the pipeline's color "
+        "attachments");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    VkShaderObj fs(m_device, bindStateMinimalShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+    const auto set_info = [&](CreatePipelineHelper &helper) {
+        helper.shader_stages_ = {helper.vs_->GetStageCreateInfo(), fs.GetStageCreateInfo()};
+        helper.cb_attachments_.colorWriteMask = 1;
+    };
+    CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_WARNING_BIT_EXT,
+                                      "Attachment 0 not written by fragment shader");
+}
+
+TEST_F(VkLayerTest, CreatePipelineFragmentOutputNotConsumed) {
+    TEST_DESCRIPTION(
+        "Test that a warning is produced for a fragment shader which provides a spurious output with no matching attachment");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    char const *fsSource =
+        "#version 450\n"
+        "\n"
+        "layout(location=0) out vec4 x;\n"
+        "layout(location=1) out vec4 y;\n" /* no matching attachment for this */
+        "void main(){\n"
+        "   x = vec4(1);\n"
+        "   y = vec4(1);\n"
+        "}\n";
+    VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+    const auto set_info = [&](CreatePipelineHelper &helper) {
+        helper.shader_stages_ = {helper.vs_->GetStageCreateInfo(), fs.GetStageCreateInfo()};
+    };
+    CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_WARNING_BIT_EXT,
+                                      "fragment shader writes to output location 1 with no matching attachment");
+}
+
+TEST_F(VkLayerTest, CreatePipelineFragmentNoOutputLocation0ButAlphaToCoverageEnabled) {
+    TEST_DESCRIPTION("Test that an error is produced when alpha to coverage is enabled but no output at location 0 is declared.");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget(0u));
+
+    VkShaderObj fs(m_device, bindStateMinimalShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+    VkPipelineMultisampleStateCreateInfo ms_state_ci = {};
+    ms_state_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO;
+    ms_state_ci.rasterizationSamples = VK_SAMPLE_COUNT_1_BIT;
+    ms_state_ci.alphaToCoverageEnable = VK_TRUE;
+
+    const auto set_info = [&](CreatePipelineHelper &helper) {
+        helper.shader_stages_ = {helper.vs_->GetStageCreateInfo(), fs.GetStageCreateInfo()};
+        helper.pipe_ms_state_ci_ = ms_state_ci;
+    };
+    CreatePipelineHelper::OneshotTest(
+        *this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+        "fragment shader doesn't declare alpha output at location 0 even though alpha to coverage is enabled.");
+}
+
+TEST_F(VkLayerTest, CreatePipelineFragmentNoAlphaLocation0ButAlphaToCoverageEnabled) {
+    TEST_DESCRIPTION(
+        "Test that an error is produced when alpha to coverage is enabled but output at location 0 doesn't have alpha channel.");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget(0u));
+
+    char const *fsSource =
+        "#version 450\n"
+        "layout(location=0) out vec3 x;\n"
+        "\n"
+        "void main(){\n"
+        "   x = vec3(1);\n"
+        "}\n";
+    VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+    VkPipelineMultisampleStateCreateInfo ms_state_ci = {};
+    ms_state_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO;
+    ms_state_ci.rasterizationSamples = VK_SAMPLE_COUNT_1_BIT;
+    ms_state_ci.alphaToCoverageEnable = VK_TRUE;
+
+    const auto set_info = [&](CreatePipelineHelper &helper) {
+        helper.shader_stages_ = {helper.vs_->GetStageCreateInfo(), fs.GetStageCreateInfo()};
+        helper.pipe_ms_state_ci_ = ms_state_ci;
+    };
+    CreatePipelineHelper::OneshotTest(
+        *this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+        "fragment shader doesn't declare alpha output at location 0 even though alpha to coverage is enabled.");
+}
+
+TEST_F(VkLayerTest, CreatePipelineFragmentOutputTypeMismatch) {
+    TEST_DESCRIPTION(
+        "Test that an error is produced for a mismatch between the fundamental type of an fragment shader output variable, and the "
+        "format of the corresponding attachment");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    char const *fsSource =
+        "#version 450\n"
+        "\n"
+        "layout(location=0) out ivec4 x;\n" /* not UNORM */
+        "void main(){\n"
+        "   x = ivec4(1);\n"
+        "}\n";
+
+    VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+    const auto set_info = [&](CreatePipelineHelper &helper) {
+        helper.shader_stages_ = {helper.vs_->GetStageCreateInfo(), fs.GetStageCreateInfo()};
+    };
+    CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_WARNING_BIT_EXT,
+                                      "does not match fragment shader output type");
+}
+
+TEST_F(VkLayerTest, CreatePipelineExceedMaxVertexOutputComponents) {
+    TEST_DESCRIPTION(
+        "Test that an error is produced when the number of output components from the vertex stage exceeds the device limit");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    // overflow == 0: no overflow, 1: too many components, 2: location number too large
+    for (int overflow = 0; overflow < 3; ++overflow) {
+        m_errorMonitor->Reset();
+
+        const uint32_t maxVsOutComp = m_device->props.limits.maxVertexOutputComponents + overflow;
+        std::string vsSourceStr = "#version 450\n\n";
+        const uint32_t numVec4 = maxVsOutComp / 4;
+        uint32_t location = 0;
+        if (overflow == 2) {
+            vsSourceStr += "layout(location=" + std::to_string(numVec4 + 1) + ") out vec4 vn;\n";
+        } else {
+            for (uint32_t i = 0; i < numVec4; i++) {
+                vsSourceStr += "layout(location=" + std::to_string(location) + ") out vec4 v" + std::to_string(i) + ";\n";
+                location += 1;
+            }
+            const uint32_t remainder = maxVsOutComp % 4;
+            if (remainder != 0) {
+                if (remainder == 1) {
+                    vsSourceStr += "layout(location=" + std::to_string(location) + ") out float" + " vn;\n";
+                } else {
+                    vsSourceStr +=
+                        "layout(location=" + std::to_string(location) + ") out vec" + std::to_string(remainder) + " vn;\n";
+                }
+                location += 1;
+            }
+        }
+        vsSourceStr +=
+            "void main(){\n"
+            "}\n";
+
+        std::string fsSourceStr =
+            "#version 450\n"
+            "\n"
+            "layout(location=0) out vec4 color;\n"
+            "\n"
+            "void main(){\n"
+            "    color = vec4(1);\n"
+            "}\n";
+
+        VkShaderObj vs(m_device, vsSourceStr.c_str(), VK_SHADER_STAGE_VERTEX_BIT, this);
+        VkShaderObj fs(m_device, fsSourceStr.c_str(), VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+        const auto set_info = [&](CreatePipelineHelper &helper) {
+            helper.shader_stages_ = {vs.GetStageCreateInfo(), fs.GetStageCreateInfo()};
+        };
+
+        switch (overflow) {
+            case 2:
+                CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                                  "Vertex shader output variable uses location that exceeds component limit "
+                                                  "VkPhysicalDeviceLimits::maxVertexOutputComponents");
+                break;
+            case 1:
+                CreatePipelineHelper::OneshotTest(
+                    *this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                    vector<string>{"Vertex shader exceeds VkPhysicalDeviceLimits::maxVertexOutputComponents",
+                                   "Vertex shader output variable uses location that exceeds component limit "
+                                   "VkPhysicalDeviceLimits::maxVertexOutputComponents"});
+                break;
+            default:
+                assert(0);
+            case 0:
+                CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT, "", true);
+                break;
+        }
+    }
+}
+
+TEST_F(VkLayerTest, CreatePipelineExceedMaxTessellationControlInputOutputComponents) {
+    TEST_DESCRIPTION(
+        "Test that errors are produced when the number of per-vertex input and/or output components to the tessellation control "
+        "stage exceeds the device limit");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    // overflow == 0: no overflow, 1: too many components, 2: location number too large
+    for (int overflow = 0; overflow < 3; ++overflow) {
+        m_errorMonitor->Reset();
+        VkPhysicalDeviceFeatures feat;
+        vk::GetPhysicalDeviceFeatures(gpu(), &feat);
+        if (!feat.tessellationShader) {
+            printf("%s tessellation shader stage(s) unsupported.\n", kSkipPrefix);
+            return;
+        }
+
+        // Tessellation control stage
+        std::string tcsSourceStr =
+            "#version 450\n"
+            "\n";
+        // Input components
+        const uint32_t maxTescInComp = m_device->props.limits.maxTessellationControlPerVertexInputComponents + overflow;
+        const uint32_t numInVec4 = maxTescInComp / 4;
+        uint32_t inLocation = 0;
+        if (overflow == 2) {
+            tcsSourceStr += "layout(location=" + std::to_string(numInVec4 + 1) + ") in vec4 vnIn[];\n";
+        } else {
+            for (uint32_t i = 0; i < numInVec4; i++) {
+                tcsSourceStr += "layout(location=" + std::to_string(inLocation) + ") in vec4 v" + std::to_string(i) + "In[];\n";
+                inLocation += 1;
+            }
+            const uint32_t inRemainder = maxTescInComp % 4;
+            if (inRemainder != 0) {
+                if (inRemainder == 1) {
+                    tcsSourceStr += "layout(location=" + std::to_string(inLocation) + ") in float" + " vnIn[];\n";
+                } else {
+                    tcsSourceStr +=
+                        "layout(location=" + std::to_string(inLocation) + ") in vec" + std::to_string(inRemainder) + " vnIn[];\n";
+                }
+                inLocation += 1;
+            }
+        }
+
+        // Output components
+        const uint32_t maxTescOutComp = m_device->props.limits.maxTessellationControlPerVertexOutputComponents + overflow;
+        const uint32_t numOutVec4 = maxTescOutComp / 4;
+        uint32_t outLocation = 0;
+        if (overflow == 2) {
+            tcsSourceStr += "layout(location=" + std::to_string(numOutVec4 + 1) + ") out vec4 vnOut[3];\n";
+        } else {
+            for (uint32_t i = 0; i < numOutVec4; i++) {
+                tcsSourceStr += "layout(location=" + std::to_string(outLocation) + ") out vec4 v" + std::to_string(i) + "Out[3];\n";
+                outLocation += 1;
+            }
+            const uint32_t outRemainder = maxTescOutComp % 4;
+            if (outRemainder != 0) {
+                if (outRemainder == 1) {
+                    tcsSourceStr += "layout(location=" + std::to_string(outLocation) + ") out float" + " vnOut[3];\n";
+                } else {
+                    tcsSourceStr += "layout(location=" + std::to_string(outLocation) + ") out vec" + std::to_string(outRemainder) +
+                                    " vnOut[3];\n";
+                }
+                outLocation += 1;
+            }
+        }
+
+        tcsSourceStr += "layout(vertices=3) out;\n";
+        // Finalize
+        tcsSourceStr +=
+            "\n"
+            "void main(){\n"
+            "}\n";
+
+        VkShaderObj tcs(m_device, tcsSourceStr.c_str(), VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT, this);
+        VkShaderObj tes(m_device, bindStateTeshaderText, VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT, this);
+
+        VkPipelineInputAssemblyStateCreateInfo inputAssemblyInfo = {};
+        inputAssemblyInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO;
+        inputAssemblyInfo.pNext = NULL;
+        inputAssemblyInfo.flags = 0;
+        inputAssemblyInfo.topology = VK_PRIMITIVE_TOPOLOGY_PATCH_LIST;
+        inputAssemblyInfo.primitiveRestartEnable = VK_FALSE;
+
+        VkPipelineTessellationStateCreateInfo tessInfo = {};
+        tessInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO;
+        tessInfo.pNext = NULL;
+        tessInfo.flags = 0;
+        tessInfo.patchControlPoints = 3;
+
+        m_errorMonitor->SetUnexpectedError("UNASSIGNED-CoreValidation-Shader-InputNotProduced");
+
+        const auto set_info = [&](CreatePipelineHelper &helper) {
+            helper.shader_stages_ = {helper.vs_->GetStageCreateInfo(), tcs.GetStageCreateInfo(), tes.GetStageCreateInfo(),
+                                     helper.fs_->GetStageCreateInfo()};
+            helper.gp_ci_.pTessellationState = &tessInfo;
+            helper.gp_ci_.pInputAssemblyState = &inputAssemblyInfo;
+        };
+
+        switch (overflow) {
+            case 2:
+                CreatePipelineHelper::OneshotTest(
+                    *this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                    vector<string>{"Tessellation control shader input variable uses location that exceeds component limit "
+                                   "VkPhysicalDeviceLimits::maxTessellationControlPerVertexInputComponents",
+                                   "Tessellation control shader output variable uses location that exceeds component limit "
+                                   "VkPhysicalDeviceLimits::maxTessellationControlPerVertexOutputComponents"});
+                break;
+            case 1:
+                CreatePipelineHelper::OneshotTest(
+                    *this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                    vector<string>{"Tessellation control shader exceeds "
+                                   "VkPhysicalDeviceLimits::maxTessellationControlPerVertexInputComponents",
+                                   "Tessellation control shader exceeds "
+                                   "VkPhysicalDeviceLimits::maxTessellationControlPerVertexOutputComponents",
+                                   "Tessellation control shader input variable uses location that exceeds component limit "
+                                   "VkPhysicalDeviceLimits::maxTessellationControlPerVertexInputComponents",
+                                   "Tessellation control shader output variable uses location that exceeds component limit "
+                                   "VkPhysicalDeviceLimits::maxTessellationControlPerVertexOutputComponents"});
+                break;
+            default:
+                assert(0);
+            case 0:
+                CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT, "", true);
+                break;
+        }
+    }
+}
+
+TEST_F(VkLayerTest, CreatePipelineExceedMaxTessellationEvaluationInputOutputComponents) {
+    TEST_DESCRIPTION(
+        "Test that errors are produced when the number of input and/or output components to the tessellation evaluation stage "
+        "exceeds the device limit");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    // overflow == 0: no overflow, 1: too many components, 2: location number too large
+    for (int overflow = 0; overflow < 3; ++overflow) {
+        m_errorMonitor->Reset();
+        VkPhysicalDeviceFeatures feat;
+        vk::GetPhysicalDeviceFeatures(gpu(), &feat);
+        if (!feat.tessellationShader) {
+            printf("%s tessellation shader stage(s) unsupported.\n", kSkipPrefix);
+            return;
+        }
+
+        // Tessellation evaluation stage
+        std::string tesSourceStr =
+            "#version 450\n"
+            "\n"
+            "layout (triangles) in;\n"
+            "\n";
+        // Input components
+        const uint32_t maxTeseInComp = m_device->props.limits.maxTessellationEvaluationInputComponents + overflow;
+        const uint32_t numInVec4 = maxTeseInComp / 4;
+        uint32_t inLocation = 0;
+        if (overflow == 2) {
+            tesSourceStr += "layout(location=" + std::to_string(numInVec4 + 1) + ") in vec4 vnIn[];\n";
+        } else {
+            for (uint32_t i = 0; i < numInVec4; i++) {
+                tesSourceStr += "layout(location=" + std::to_string(inLocation) + ") in vec4 v" + std::to_string(i) + "In[];\n";
+                inLocation += 1;
+            }
+            const uint32_t inRemainder = maxTeseInComp % 4;
+            if (inRemainder != 0) {
+                if (inRemainder == 1) {
+                    tesSourceStr += "layout(location=" + std::to_string(inLocation) + ") in float" + " vnIn[];\n";
+                } else {
+                    tesSourceStr +=
+                        "layout(location=" + std::to_string(inLocation) + ") in vec" + std::to_string(inRemainder) + " vnIn[];\n";
+                }
+                inLocation += 1;
+            }
+        }
+
+        // Output components
+        const uint32_t maxTeseOutComp = m_device->props.limits.maxTessellationEvaluationOutputComponents + overflow;
+        const uint32_t numOutVec4 = maxTeseOutComp / 4;
+        uint32_t outLocation = 0;
+        if (overflow == 2) {
+            tesSourceStr += "layout(location=" + std::to_string(numOutVec4 + 1) + ") out vec4 vnOut;\n";
+        } else {
+            for (uint32_t i = 0; i < numOutVec4; i++) {
+                tesSourceStr += "layout(location=" + std::to_string(outLocation) + ") out vec4 v" + std::to_string(i) + "Out;\n";
+                outLocation += 1;
+            }
+            const uint32_t outRemainder = maxTeseOutComp % 4;
+            if (outRemainder != 0) {
+                if (outRemainder == 1) {
+                    tesSourceStr += "layout(location=" + std::to_string(outLocation) + ") out float" + " vnOut;\n";
+                } else {
+                    tesSourceStr +=
+                        "layout(location=" + std::to_string(outLocation) + ") out vec" + std::to_string(outRemainder) + " vnOut;\n";
+                }
+                outLocation += 1;
+            }
+        }
+
+        // Finalize
+        tesSourceStr +=
+            "\n"
+            "void main(){\n"
+            "}\n";
+
+        VkShaderObj tcs(m_device, bindStateTscShaderText, VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT, this);
+        VkShaderObj tes(m_device, tesSourceStr.c_str(), VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT, this);
+
+        VkPipelineInputAssemblyStateCreateInfo inputAssemblyInfo = {};
+        inputAssemblyInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO;
+        inputAssemblyInfo.pNext = NULL;
+        inputAssemblyInfo.flags = 0;
+        inputAssemblyInfo.topology = VK_PRIMITIVE_TOPOLOGY_PATCH_LIST;
+        inputAssemblyInfo.primitiveRestartEnable = VK_FALSE;
+
+        VkPipelineTessellationStateCreateInfo tessInfo = {};
+        tessInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO;
+        tessInfo.pNext = NULL;
+        tessInfo.flags = 0;
+        tessInfo.patchControlPoints = 3;
+
+        m_errorMonitor->SetUnexpectedError("UNASSIGNED-CoreValidation-Shader-InputNotProduced");
+
+        const auto set_info = [&](CreatePipelineHelper &helper) {
+            helper.shader_stages_ = {helper.vs_->GetStageCreateInfo(), tcs.GetStageCreateInfo(), tes.GetStageCreateInfo(),
+                                     helper.fs_->GetStageCreateInfo()};
+            helper.gp_ci_.pTessellationState = &tessInfo;
+            helper.gp_ci_.pInputAssemblyState = &inputAssemblyInfo;
+        };
+
+        switch (overflow) {
+            case 2:
+                CreatePipelineHelper::OneshotTest(
+                    *this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                    vector<string>{"Tessellation evaluation shader input variable uses location that exceeds component limit "
+                                   "VkPhysicalDeviceLimits::maxTessellationEvaluationInputComponents",
+                                   "Tessellation evaluation shader output variable uses location that exceeds component limit "
+                                   "VkPhysicalDeviceLimits::maxTessellationEvaluationOutputComponents"});
+                break;
+            case 1:
+                CreatePipelineHelper::OneshotTest(
+                    *this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                    vector<string>{
+                        "Tessellation evaluation shader exceeds VkPhysicalDeviceLimits::maxTessellationEvaluationInputComponents",
+                        "Tessellation evaluation shader exceeds VkPhysicalDeviceLimits::maxTessellationEvaluationOutputComponents",
+                        "Tessellation evaluation shader input variable uses location that exceeds component limit "
+                        "VkPhysicalDeviceLimits::maxTessellationEvaluationInputComponents",
+                        "Tessellation evaluation shader output variable uses location that exceeds component limit "
+                        "VkPhysicalDeviceLimits::maxTessellationEvaluationOutputComponents"});
+                break;
+            default:
+                assert(0);
+            case 0:
+                CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT, "", true);
+                break;
+        }
+    }
+}
+
+TEST_F(VkLayerTest, CreatePipelineExceedMaxGeometryInputOutputComponents) {
+    TEST_DESCRIPTION(
+        "Test that errors are produced when the number of input and/or output components to the geometry stage exceeds the device "
+        "limit");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    // overflow == 0: no overflow, 1: too many components, 2: location number too large
+    for (int overflow = 0; overflow < 3; ++overflow) {
+        m_errorMonitor->Reset();
+        VkPhysicalDeviceFeatures feat;
+        vk::GetPhysicalDeviceFeatures(gpu(), &feat);
+        if (!feat.geometryShader) {
+            printf("%s geometry shader stage unsupported.\n", kSkipPrefix);
+            return;
+        }
+
+        std::string gsSourceStr =
+            "#version 450\n"
+            "\n"
+            "layout(triangles) in;\n"
+            "layout(invocations=1) in;\n";
+
+        // Input components
+        const uint32_t maxGeomInComp = m_device->props.limits.maxGeometryInputComponents + overflow;
+        const uint32_t numInVec4 = maxGeomInComp / 4;
+        uint32_t inLocation = 0;
+        if (overflow == 2) {
+            gsSourceStr += "layout(location=" + std::to_string(numInVec4 + 1) + ") in vec4 vnIn[];\n";
+        } else {
+            for (uint32_t i = 0; i < numInVec4; i++) {
+                gsSourceStr += "layout(location=" + std::to_string(inLocation) + ") in vec4 v" + std::to_string(i) + "In[];\n";
+                inLocation += 1;
+            }
+            const uint32_t inRemainder = maxGeomInComp % 4;
+            if (inRemainder != 0) {
+                if (inRemainder == 1) {
+                    gsSourceStr += "layout(location=" + std::to_string(inLocation) + ") in float" + " vnIn[];\n";
+                } else {
+                    gsSourceStr +=
+                        "layout(location=" + std::to_string(inLocation) + ") in vec" + std::to_string(inRemainder) + " vnIn[];\n";
+                }
+                inLocation += 1;
+            }
+        }
+
+        // Output components
+        const uint32_t maxGeomOutComp = m_device->props.limits.maxGeometryOutputComponents + overflow;
+        const uint32_t numOutVec4 = maxGeomOutComp / 4;
+        uint32_t outLocation = 0;
+        if (overflow == 2) {
+            gsSourceStr += "layout(location=" + std::to_string(numOutVec4) + ") out vec4 vnOut;\n";
+        } else {
+            for (uint32_t i = 0; i < numOutVec4; i++) {
+                gsSourceStr += "layout(location=" + std::to_string(outLocation) + ") out vec4 v" + std::to_string(i) + "Out;\n";
+                outLocation += 1;
+            }
+            const uint32_t outRemainder = maxGeomOutComp % 4;
+            if (outRemainder != 0) {
+                if (outRemainder == 1) {
+                    gsSourceStr += "layout(location=" + std::to_string(outLocation) + ") out float" + " vnOut;\n";
+                } else {
+                    gsSourceStr +=
+                        "layout(location=" + std::to_string(outLocation) + ") out vec" + std::to_string(outRemainder) + " vnOut;\n";
+                }
+                outLocation += 1;
+            }
+        }
+
+        // Finalize
+        int max_vertices = overflow ? (m_device->props.limits.maxGeometryTotalOutputComponents / maxGeomOutComp + 1) : 1;
+        gsSourceStr += "layout(triangle_strip, max_vertices = " + std::to_string(max_vertices) +
+                       ") out;\n"
+                       "\n"
+                       "void main(){\n"
+                       "}\n";
+
+        VkShaderObj gs(m_device, gsSourceStr.c_str(), VK_SHADER_STAGE_GEOMETRY_BIT, this);
+
+        m_errorMonitor->SetUnexpectedError("UNASSIGNED-CoreValidation-Shader-InputNotProduced");
+
+        const auto set_info = [&](CreatePipelineHelper &helper) {
+            helper.shader_stages_ = {helper.vs_->GetStageCreateInfo(), gs.GetStageCreateInfo(), helper.fs_->GetStageCreateInfo()};
+        };
+
+        switch (overflow) {
+            case 2:
+                CreatePipelineHelper::OneshotTest(
+                    *this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                    vector<string>{"Geometry shader input variable uses location that exceeds component limit "
+                                   "VkPhysicalDeviceLimits::maxGeometryInputComponents",
+                                   "Geometry shader output variable uses location that exceeds component limit "
+                                   "VkPhysicalDeviceLimits::maxGeometryOutputComponents"});
+                break;
+            case 1:
+                CreatePipelineHelper::OneshotTest(
+                    *this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                    vector<string>{"Geometry shader exceeds VkPhysicalDeviceLimits::maxGeometryInputComponents",
+                                   "Geometry shader exceeds VkPhysicalDeviceLimits::maxGeometryOutputComponents",
+                                   "Geometry shader input variable uses location that exceeds component limit "
+                                   "VkPhysicalDeviceLimits::maxGeometryInputComponents",
+                                   "Geometry shader output variable uses location that exceeds component limit "
+                                   "VkPhysicalDeviceLimits::maxGeometryOutputComponents",
+                                   "Geometry shader exceeds VkPhysicalDeviceLimits::maxGeometryTotalOutputComponents"});
+                break;
+            default:
+                assert(0);
+            case 0:
+                CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT, "", true);
+                break;
+        }
+    }
+}
+
+TEST_F(VkLayerTest, CreatePipelineExceedMaxFragmentInputComponents) {
+    TEST_DESCRIPTION(
+        "Test that an error is produced when the number of input components from the fragment stage exceeds the device limit");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    // overflow == 0: no overflow, 1: too many components, 2: location number too large
+    for (int overflow = 0; overflow < 3; ++overflow) {
+        m_errorMonitor->Reset();
+
+        const uint32_t maxFsInComp = m_device->props.limits.maxFragmentInputComponents + overflow;
+        std::string fsSourceStr = "#version 450\n\n";
+        const uint32_t numVec4 = maxFsInComp / 4;
+        uint32_t location = 0;
+        if (overflow == 2) {
+            fsSourceStr += "layout(location=" + std::to_string(numVec4 + 1) + ") in float" + " vn;\n";
+        } else {
+            for (uint32_t i = 0; i < numVec4; i++) {
+                fsSourceStr += "layout(location=" + std::to_string(location) + ") in vec4 v" + std::to_string(i) + ";\n";
+                location += 1;
+            }
+            const uint32_t remainder = maxFsInComp % 4;
+            if (remainder != 0) {
+                if (remainder == 1) {
+                    fsSourceStr += "layout(location=" + std::to_string(location) + ") in float" + " vn;\n";
+                } else {
+                    fsSourceStr +=
+                        "layout(location=" + std::to_string(location) + ") in vec" + std::to_string(remainder) + " vn;\n";
+                }
+                location += 1;
+            }
+        }
+        fsSourceStr +=
+            "layout(location=0) out vec4 color;"
+            "\n"
+            "void main(){\n"
+            "    color = vec4(1);\n"
+            "}\n";
+        VkShaderObj fs(m_device, fsSourceStr.c_str(), VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+        m_errorMonitor->SetUnexpectedError("UNASSIGNED-CoreValidation-Shader-InputNotProduced");
+        const auto set_info = [&](CreatePipelineHelper &helper) {
+            helper.shader_stages_ = {helper.vs_->GetStageCreateInfo(), fs.GetStageCreateInfo()};
+        };
+        switch (overflow) {
+            case 2:
+                CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                                  "Fragment shader input variable uses location that exceeds component limit "
+                                                  "VkPhysicalDeviceLimits::maxFragmentInputComponents");
+                break;
+            case 1:
+                CreatePipelineHelper::OneshotTest(
+                    *this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                    vector<string>{"Fragment shader exceeds VkPhysicalDeviceLimits::maxFragmentInputComponents",
+                                   "Fragment shader input variable uses location that exceeds component limit "
+                                   "VkPhysicalDeviceLimits::maxFragmentInputComponents"});
+                break;
+            default:
+                assert(0);
+            case 0:
+                CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT, "", true);
+                break;
+        }
+    }
+}
+
+TEST_F(VkLayerTest, CreatePipelineExceedMaxGeometryInstanceVertexCount) {
+    TEST_DESCRIPTION(
+        "Test that errors are produced when the number of output vertices/instances in the geometry stage exceeds the device "
+        "limit");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    for (int overflow = 0; overflow < 2; ++overflow) {
+        m_errorMonitor->Reset();
+        VkPhysicalDeviceFeatures feat;
+        vk::GetPhysicalDeviceFeatures(gpu(), &feat);
+        if (!feat.geometryShader) {
+            printf("%s geometry shader stage unsupported.\n", kSkipPrefix);
+            return;
+        }
+
+        std::string gsSourceStr = R"(
+               OpCapability Geometry
+               OpMemoryModel Logical GLSL450
+               OpEntryPoint Geometry %main "main"
+               OpExecutionMode %main InputPoints
+               OpExecutionMode %main OutputTriangleStrip
+               )";
+        if (overflow) {
+            gsSourceStr += "OpExecutionMode %main Invocations " +
+                           std::to_string(m_device->props.limits.maxGeometryShaderInvocations + 1) +
+                           "\n\
+                OpExecutionMode %main OutputVertices " +
+                           std::to_string(m_device->props.limits.maxGeometryOutputVertices + 1);
+        } else {
+            gsSourceStr += R"(
+               OpExecutionMode %main Invocations 1
+               OpExecutionMode %main OutputVertices 1
+               )";
+        }
+        gsSourceStr += R"(
+               OpSource GLSL 450
+       %void = OpTypeVoid
+          %3 = OpTypeFunction %void
+       %main = OpFunction %void None %3
+          %5 = OpLabel
+               OpReturn
+               OpFunctionEnd
+        )";
+        VkShaderObj gs(m_device, gsSourceStr, VK_SHADER_STAGE_GEOMETRY_BIT, this);
+
+        const auto set_info = [&](CreatePipelineHelper &helper) {
+            helper.shader_stages_ = {helper.vs_->GetStageCreateInfo(), gs.GetStageCreateInfo(), helper.fs_->GetStageCreateInfo()};
+        };
+        if (overflow) {
+            CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                              vector<string>{"VUID-VkPipelineShaderStageCreateInfo-stage-00714",
+                                                             "VUID-VkPipelineShaderStageCreateInfo-stage-00715"});
+        } else {
+            CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT, "", true);
+        }
+    }
+}
+
+TEST_F(VkLayerTest, CreatePipelineUniformBlockNotProvided) {
+    TEST_DESCRIPTION(
+        "Test that an error is produced for a shader consuming a uniform block which has no corresponding binding in the pipeline "
+        "layout");
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "not declared in pipeline layout");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
+    VkShaderObj fs(m_device, bindStateFragUniformShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+    VkPipelineObj pipe(m_device);
+    pipe.AddShader(&vs);
+    pipe.AddShader(&fs);
+
+    /* set up CB 0; type is UNORM by default */
+    pipe.AddDefaultColorAttachment();
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    VkDescriptorSetObj descriptorSet(m_device);
+    descriptorSet.CreateVKDescriptorSet(m_commandBuffer);
+
+    pipe.CreateVKPipeline(descriptorSet.GetPipelineLayout(), renderPass());
+
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, CreatePipelinePushConstantsNotInLayout) {
+    TEST_DESCRIPTION(
+        "Test that an error is produced for a shader consuming push constants which are not provided in the pipeline layout");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    char const *vsSource =
+        "#version 450\n"
+        "\n"
+        "layout(push_constant, std430) uniform foo { float x; } consts;\n"
+        "void main(){\n"
+        "   gl_Position = vec4(consts.x);\n"
+        "}\n";
+
+    VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
+
+    CreatePipelineHelper pipe(*this);
+    pipe.InitInfo();
+    pipe.shader_stages_ = {vs.GetStageCreateInfo(), pipe.fs_->GetStageCreateInfo()};
+    pipe.InitState();
+    pipe.pipeline_layout_ = VkPipelineLayoutObj(m_device, {});
+    /* should have generated an error -- no push constant ranges provided! */
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "not declared in layout");
+    pipe.CreateGraphicsPipeline();
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, CreatePipelineInputAttachmentMissing) {
+    TEST_DESCRIPTION(
+        "Test that an error is produced for a shader consuming an input attachment which is not included in the subpass "
+        "description");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    char const *fsSource =
+        "#version 450\n"
+        "\n"
+        "layout(input_attachment_index=0, set=0, binding=0) uniform subpassInput x;\n"
+        "layout(location=0) out vec4 color;\n"
+        "void main() {\n"
+        "   color = subpassLoad(x);\n"
+        "}\n";
+
+    VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+    const auto set_info = [&](CreatePipelineHelper &helper) {
+        helper.shader_stages_ = {helper.vs_->GetStageCreateInfo(), fs.GetStageCreateInfo()};
+        helper.dsl_bindings_ = {{0, VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, 1, VK_SHADER_STAGE_FRAGMENT_BIT, nullptr}};
+    };
+    CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                      "consumes input attachment index 0 but not provided in subpass");
+}
+
+TEST_F(VkLayerTest, CreatePipelineInputAttachmentTypeMismatch) {
+    TEST_DESCRIPTION(
+        "Test that an error is produced for a shader consuming an input attachment with a format having a different fundamental "
+        "type");
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "input attachment 0 format of VK_FORMAT_R8G8B8A8_UINT does not match");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    char const *fsSource =
+        "#version 450\n"
+        "\n"
+        "layout(input_attachment_index=0, set=0, binding=0) uniform subpassInput x;\n"
+        "layout(location=0) out vec4 color;\n"
+        "void main() {\n"
+        "   color = subpassLoad(x);\n"
+        "}\n";
+
+    VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
+    VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+    VkPipelineObj pipe(m_device);
+    pipe.AddShader(&vs);
+    pipe.AddShader(&fs);
+    pipe.AddDefaultColorAttachment();
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    VkDescriptorSetLayoutBinding dslb = {0, VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, 1, VK_SHADER_STAGE_FRAGMENT_BIT, nullptr};
+    const VkDescriptorSetLayoutObj dsl(m_device, {dslb});
+
+    const VkPipelineLayoutObj pl(m_device, {&dsl});
+
+    VkAttachmentDescription descs[2] = {
+        {0, VK_FORMAT_R8G8B8A8_UNORM, VK_SAMPLE_COUNT_1_BIT, VK_ATTACHMENT_LOAD_OP_LOAD, VK_ATTACHMENT_STORE_OP_STORE,
+         VK_ATTACHMENT_LOAD_OP_LOAD, VK_ATTACHMENT_STORE_OP_STORE, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
+         VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL},
+        {0, VK_FORMAT_R8G8B8A8_UINT, VK_SAMPLE_COUNT_1_BIT, VK_ATTACHMENT_LOAD_OP_LOAD, VK_ATTACHMENT_STORE_OP_STORE,
+         VK_ATTACHMENT_LOAD_OP_LOAD, VK_ATTACHMENT_STORE_OP_STORE, VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_GENERAL},
+    };
+    VkAttachmentReference color = {
+        0,
+        VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
+    };
+    VkAttachmentReference input = {
+        1,
+        VK_IMAGE_LAYOUT_GENERAL,
+    };
+
+    VkSubpassDescription sd = {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 1, &input, 1, &color, nullptr, nullptr, 0, nullptr};
+
+    VkRenderPassCreateInfo rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, nullptr, 0, 2, descs, 1, &sd, 0, nullptr};
+    VkRenderPass rp;
+    VkResult err = vk::CreateRenderPass(m_device->device(), &rpci, nullptr, &rp);
+    ASSERT_VK_SUCCESS(err);
+
+    // error here.
+    pipe.CreateVKPipeline(pl.handle(), rp);
+
+    m_errorMonitor->VerifyFound();
+
+    vk::DestroyRenderPass(m_device->device(), rp, nullptr);
+}
+
+TEST_F(VkLayerTest, CreatePipelineInputAttachmentMissingArray) {
+    TEST_DESCRIPTION(
+        "Test that an error is produced for a shader consuming an input attachment which is not included in the subpass "
+        "description -- array case");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    char const *fsSource =
+        "#version 450\n"
+        "\n"
+        "layout(input_attachment_index=0, set=0, binding=0) uniform subpassInput xs[1];\n"
+        "layout(location=0) out vec4 color;\n"
+        "void main() {\n"
+        "   color = subpassLoad(xs[0]);\n"
+        "}\n";
+
+    VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+    const auto set_info = [&](CreatePipelineHelper &helper) {
+        helper.shader_stages_ = {helper.vs_->GetStageCreateInfo(), fs.GetStageCreateInfo()};
+        helper.dsl_bindings_ = {{0, VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, 2, VK_SHADER_STAGE_FRAGMENT_BIT, nullptr}};
+    };
+    CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                      "consumes input attachment index 0 but not provided in subpass");
+}
+
+TEST_F(VkLayerTest, CreateComputePipelineMissingDescriptor) {
+    TEST_DESCRIPTION(
+        "Test that an error is produced for a compute pipeline consuming a descriptor which is not provided in the pipeline "
+        "layout");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    char const *csSource =
+        "#version 450\n"
+        "\n"
+        "layout(local_size_x=1) in;\n"
+        "layout(set=0, binding=0) buffer block { vec4 x; };\n"
+        "void main(){\n"
+        "   x = vec4(1);\n"
+        "}\n";
+
+    CreateComputePipelineHelper pipe(*this);
+    pipe.InitInfo();
+    pipe.cs_.reset(new VkShaderObj(m_device, csSource, VK_SHADER_STAGE_COMPUTE_BIT, this));
+    pipe.InitState();
+    pipe.pipeline_layout_ = VkPipelineLayoutObj(m_device, {});
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "Shader uses descriptor slot 0.0");
+    pipe.CreateComputePipeline();
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, CreateComputePipelineDescriptorTypeMismatch) {
+    TEST_DESCRIPTION("Test that an error is produced for a pipeline consuming a descriptor-backed resource of a mismatched type");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    char const *csSource =
+        "#version 450\n"
+        "\n"
+        "layout(local_size_x=1) in;\n"
+        "layout(set=0, binding=0) buffer block { vec4 x; };\n"
+        "void main() {\n"
+        "   x.x = 1.0f;\n"
+        "}\n";
+
+    const auto set_info = [&](CreateComputePipelineHelper &helper) {
+        helper.cs_.reset(new VkShaderObj(m_device, csSource, VK_SHADER_STAGE_COMPUTE_BIT, this));
+        helper.dsl_bindings_ = {{0, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, 1, VK_SHADER_STAGE_COMPUTE_BIT, nullptr}};
+    };
+    CreateComputePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                             "but descriptor of type VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER");
+}
+
+TEST_F(VkLayerTest, MultiplePushDescriptorSets) {
+    TEST_DESCRIPTION("Verify an error message for multiple push descriptor sets.");
+
+    if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+        m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    } else {
+        printf("%s Did not find VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME; skipped.\n", kSkipPrefix);
+        return;
+    }
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+    if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME)) {
+        m_device_extension_names.push_back(VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME);
+    } else {
+        printf("%s Push Descriptors Extension not supported, skipping tests\n", kSkipPrefix);
+        return;
+    }
+    ASSERT_NO_FATAL_FAILURE(InitState());
+
+    auto push_descriptor_prop = GetPushDescriptorProperties(instance(), gpu());
+    if (push_descriptor_prop.maxPushDescriptors < 1) {
+        // Some implementations report an invalid maxPushDescriptors of 0
+        printf("%s maxPushDescriptors is zero, skipping tests\n", kSkipPrefix);
+        return;
+    }
+
+    VkDescriptorSetLayoutBinding dsl_binding = {};
+    dsl_binding.binding = 0;
+    dsl_binding.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
+    dsl_binding.descriptorCount = 1;
+    dsl_binding.stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT;
+    dsl_binding.pImmutableSamplers = NULL;
+
+    const unsigned int descriptor_set_layout_count = 2;
+    std::vector<VkDescriptorSetLayoutObj> ds_layouts;
+    for (uint32_t i = 0; i < descriptor_set_layout_count; ++i) {
+        dsl_binding.binding = i;
+        ds_layouts.emplace_back(m_device, std::vector<VkDescriptorSetLayoutBinding>(1, dsl_binding),
+                                VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR);
+    }
+    const auto &ds_vk_layouts = MakeVkHandles<VkDescriptorSetLayout>(ds_layouts);
+
+    VkPipelineLayout pipeline_layout;
+    VkPipelineLayoutCreateInfo pipeline_layout_ci = {};
+    pipeline_layout_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO;
+    pipeline_layout_ci.pNext = NULL;
+    pipeline_layout_ci.pushConstantRangeCount = 0;
+    pipeline_layout_ci.pPushConstantRanges = NULL;
+    pipeline_layout_ci.setLayoutCount = ds_vk_layouts.size();
+    pipeline_layout_ci.pSetLayouts = ds_vk_layouts.data();
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-00293");
+    vk::CreatePipelineLayout(m_device->device(), &pipeline_layout_ci, NULL, &pipeline_layout);
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, AMDMixedAttachmentSamplesValidateGraphicsPipeline) {
+    TEST_DESCRIPTION("Verify an error message for an incorrect graphics pipeline rasterization sample count.");
+
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+    if (DeviceExtensionSupported(gpu(), nullptr, VK_AMD_MIXED_ATTACHMENT_SAMPLES_EXTENSION_NAME)) {
+        m_device_extension_names.push_back(VK_AMD_MIXED_ATTACHMENT_SAMPLES_EXTENSION_NAME);
+    } else {
+        printf("%s Extension %s is not supported.\n", kSkipPrefix, VK_AMD_MIXED_ATTACHMENT_SAMPLES_EXTENSION_NAME);
+        return;
+    }
+    ASSERT_NO_FATAL_FAILURE(InitState());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    // Set a mismatched sample count
+    VkPipelineMultisampleStateCreateInfo ms_state_ci = {};
+    ms_state_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO;
+    ms_state_ci.rasterizationSamples = VK_SAMPLE_COUNT_4_BIT;
+
+    const auto set_info = [&](CreatePipelineHelper &helper) { helper.pipe_ms_state_ci_ = ms_state_ci; };
+    CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                      "VUID-VkGraphicsPipelineCreateInfo-subpass-01505");
+}
+
+TEST_F(VkLayerTest, FramebufferMixedSamplesNV) {
+    TEST_DESCRIPTION("Verify VK_NV_framebuffer_mixed_samples.");
+
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+
+    if (DeviceExtensionSupported(gpu(), nullptr, VK_NV_FRAMEBUFFER_MIXED_SAMPLES_EXTENSION_NAME)) {
+        m_device_extension_names.push_back(VK_NV_FRAMEBUFFER_MIXED_SAMPLES_EXTENSION_NAME);
+    } else {
+        printf("%s %s Extension not supported, skipping tests\n", kSkipPrefix, VK_NV_FRAMEBUFFER_MIXED_SAMPLES_EXTENSION_NAME);
+        return;
+    }
+
+    VkPhysicalDeviceFeatures device_features = {};
+    ASSERT_NO_FATAL_FAILURE(GetPhysicalDeviceFeatures(&device_features));
+    if (VK_TRUE != device_features.sampleRateShading) {
+        printf("%s Test requires unsupported sampleRateShading feature.\n", kSkipPrefix);
+        return;
+    }
+
+    ASSERT_NO_FATAL_FAILURE(InitState());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    struct TestCase {
+        VkSampleCountFlagBits color_samples;
+        VkSampleCountFlagBits depth_samples;
+        VkSampleCountFlagBits raster_samples;
+        VkBool32 depth_test;
+        VkBool32 sample_shading;
+        uint32_t table_count;
+        bool positiveTest;
+        std::string vuid;
+    };
+
+    std::vector<TestCase> test_cases = {
+        {VK_SAMPLE_COUNT_4_BIT, VK_SAMPLE_COUNT_4_BIT, VK_SAMPLE_COUNT_4_BIT, VK_FALSE, VK_FALSE, 1, true,
+         "VUID-VkGraphicsPipelineCreateInfo-subpass-00757"},
+        {VK_SAMPLE_COUNT_4_BIT, VK_SAMPLE_COUNT_1_BIT, VK_SAMPLE_COUNT_8_BIT, VK_FALSE, VK_FALSE, 4, false,
+         "VUID-VkPipelineCoverageModulationStateCreateInfoNV-coverageModulationTableEnable-01405"},
+        {VK_SAMPLE_COUNT_4_BIT, VK_SAMPLE_COUNT_1_BIT, VK_SAMPLE_COUNT_8_BIT, VK_FALSE, VK_FALSE, 2, true,
+         "VUID-VkPipelineCoverageModulationStateCreateInfoNV-coverageModulationTableEnable-01405"},
+        {VK_SAMPLE_COUNT_1_BIT, VK_SAMPLE_COUNT_4_BIT, VK_SAMPLE_COUNT_8_BIT, VK_TRUE, VK_FALSE, 1, false,
+         "VUID-VkGraphicsPipelineCreateInfo-subpass-01411"},
+        {VK_SAMPLE_COUNT_1_BIT, VK_SAMPLE_COUNT_8_BIT, VK_SAMPLE_COUNT_8_BIT, VK_TRUE, VK_FALSE, 1, true,
+         "VUID-VkGraphicsPipelineCreateInfo-subpass-01411"},
+        {VK_SAMPLE_COUNT_4_BIT, VK_SAMPLE_COUNT_1_BIT, VK_SAMPLE_COUNT_1_BIT, VK_FALSE, VK_FALSE, 1, false,
+         "VUID-VkGraphicsPipelineCreateInfo-subpass-01412"},
+        {VK_SAMPLE_COUNT_4_BIT, VK_SAMPLE_COUNT_1_BIT, VK_SAMPLE_COUNT_4_BIT, VK_FALSE, VK_FALSE, 1, true,
+         "VUID-VkGraphicsPipelineCreateInfo-subpass-01412"},
+        {VK_SAMPLE_COUNT_1_BIT, VK_SAMPLE_COUNT_4_BIT, VK_SAMPLE_COUNT_4_BIT, VK_FALSE, VK_TRUE, 1, false,
+         "VUID-VkPipelineMultisampleStateCreateInfo-rasterizationSamples-01415"},
+        {VK_SAMPLE_COUNT_1_BIT, VK_SAMPLE_COUNT_4_BIT, VK_SAMPLE_COUNT_4_BIT, VK_FALSE, VK_FALSE, 1, true,
+         "VUID-VkPipelineMultisampleStateCreateInfo-rasterizationSamples-01415"},
+        {VK_SAMPLE_COUNT_1_BIT, VK_SAMPLE_COUNT_4_BIT, VK_SAMPLE_COUNT_8_BIT, VK_FALSE, VK_FALSE, 1, true,
+         "VUID-VkGraphicsPipelineCreateInfo-subpass-00757"}};
+
+    for (const auto &test_case : test_cases) {
+        VkAttachmentDescription att[2] = {{}, {}};
+        att[0].format = VK_FORMAT_R8G8B8A8_UNORM;
+        att[0].samples = test_case.color_samples;
+        att[0].initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+        att[0].finalLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
+
+        att[1].format = VK_FORMAT_D24_UNORM_S8_UINT;
+        att[1].samples = test_case.depth_samples;
+        att[1].initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+        att[1].finalLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
+
+        VkAttachmentReference cr = {0, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL};
+        VkAttachmentReference dr = {1, VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL};
+
+        VkSubpassDescription sp = {};
+        sp.pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS;
+        sp.colorAttachmentCount = 1;
+        sp.pColorAttachments = &cr;
+        sp.pResolveAttachments = NULL;
+        sp.pDepthStencilAttachment = &dr;
+
+        VkRenderPassCreateInfo rpi = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO};
+        rpi.attachmentCount = 2;
+        rpi.pAttachments = att;
+        rpi.subpassCount = 1;
+        rpi.pSubpasses = &sp;
+
+        VkRenderPass rp;
+
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                             "VUID-VkSubpassDescription-pDepthStencilAttachment-01418");
+        VkResult err = vk::CreateRenderPass(m_device->device(), &rpi, nullptr, &rp);
+        m_errorMonitor->VerifyNotFound();
+
+        ASSERT_VK_SUCCESS(err);
+
+        VkPipelineDepthStencilStateCreateInfo ds = {VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO};
+        VkPipelineCoverageModulationStateCreateInfoNV cmi = {VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_MODULATION_STATE_CREATE_INFO_NV};
+
+        // Create a dummy modulation table that can be used for the positive
+        // coverageModulationTableCount test.
+        std::vector<float> cm_table{};
+
+        const auto break_samples = [&cmi, &rp, &ds, &cm_table, &test_case](CreatePipelineHelper &helper) {
+            cm_table.resize(test_case.raster_samples / test_case.color_samples);
+
+            cmi.flags = 0;
+            cmi.coverageModulationTableEnable = (test_case.table_count > 1);
+            cmi.coverageModulationTableCount = test_case.table_count;
+            cmi.pCoverageModulationTable = cm_table.data();
+
+            ds.depthTestEnable = test_case.depth_test;
+
+            helper.pipe_ms_state_ci_.pNext = &cmi;
+            helper.pipe_ms_state_ci_.rasterizationSamples = test_case.raster_samples;
+            helper.pipe_ms_state_ci_.sampleShadingEnable = test_case.sample_shading;
+
+            helper.gp_ci_.renderPass = rp;
+            helper.gp_ci_.pDepthStencilState = &ds;
+        };
+
+        CreatePipelineHelper::OneshotTest(*this, break_samples, VK_DEBUG_REPORT_ERROR_BIT_EXT, test_case.vuid,
+                                          test_case.positiveTest);
+
+        vk::DestroyRenderPass(m_device->device(), rp, nullptr);
+    }
+}
+
+TEST_F(VkLayerTest, FramebufferMixedSamples) {
+    TEST_DESCRIPTION("Verify that the expected VUIds are hits when VK_NV_framebuffer_mixed_samples is disabled.");
+
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+    ASSERT_NO_FATAL_FAILURE(InitState());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    struct TestCase {
+        VkSampleCountFlagBits color_samples;
+        VkSampleCountFlagBits depth_samples;
+        VkSampleCountFlagBits raster_samples;
+        bool positiveTest;
+    };
+
+    std::vector<TestCase> test_cases = {
+        {VK_SAMPLE_COUNT_2_BIT, VK_SAMPLE_COUNT_4_BIT, VK_SAMPLE_COUNT_8_BIT,
+         false},  // Fails vk::CreateRenderPass and vk::CreateGraphicsPipeline
+        {VK_SAMPLE_COUNT_4_BIT, VK_SAMPLE_COUNT_4_BIT, VK_SAMPLE_COUNT_8_BIT, false},  // Fails vk::CreateGraphicsPipeline
+        {VK_SAMPLE_COUNT_4_BIT, VK_SAMPLE_COUNT_4_BIT, VK_SAMPLE_COUNT_4_BIT, true}    // Pass
+    };
+
+    for (const auto &test_case : test_cases) {
+        VkAttachmentDescription att[2] = {{}, {}};
+        att[0].format = VK_FORMAT_R8G8B8A8_UNORM;
+        att[0].samples = test_case.color_samples;
+        att[0].initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+        att[0].finalLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
+
+        att[1].format = VK_FORMAT_D24_UNORM_S8_UINT;
+        att[1].samples = test_case.depth_samples;
+        att[1].initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+        att[1].finalLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
+
+        VkAttachmentReference cr = {0, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL};
+        VkAttachmentReference dr = {1, VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL};
+
+        VkSubpassDescription sp = {};
+        sp.pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS;
+        sp.colorAttachmentCount = 1;
+        sp.pColorAttachments = &cr;
+        sp.pResolveAttachments = NULL;
+        sp.pDepthStencilAttachment = &dr;
+
+        VkRenderPassCreateInfo rpi = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO};
+        rpi.attachmentCount = 2;
+        rpi.pAttachments = att;
+        rpi.subpassCount = 1;
+        rpi.pSubpasses = &sp;
+
+        VkRenderPass rp;
+
+        if (test_case.color_samples == test_case.depth_samples) {
+            m_errorMonitor->ExpectSuccess();
+        } else {
+            m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                                 "VUID-VkSubpassDescription-pDepthStencilAttachment-01418");
+        }
+
+        VkResult err = vk::CreateRenderPass(m_device->device(), &rpi, nullptr, &rp);
+
+        if (test_case.color_samples == test_case.depth_samples) {
+            m_errorMonitor->VerifyNotFound();
+        } else {
+            m_errorMonitor->VerifyFound();
+            continue;
+        }
+
+        ASSERT_VK_SUCCESS(err);
+
+        VkPipelineDepthStencilStateCreateInfo ds = {VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO};
+
+        const auto break_samples = [&rp, &ds, &test_case](CreatePipelineHelper &helper) {
+            helper.pipe_ms_state_ci_.rasterizationSamples = test_case.raster_samples;
+
+            helper.gp_ci_.renderPass = rp;
+            helper.gp_ci_.pDepthStencilState = &ds;
+        };
+
+        CreatePipelineHelper::OneshotTest(*this, break_samples, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                          "VUID-VkGraphicsPipelineCreateInfo-subpass-00757", test_case.positiveTest);
+
+        vk::DestroyRenderPass(m_device->device(), rp, nullptr);
+    }
+}
+
+TEST_F(VkLayerTest, FragmentCoverageToColorNV) {
+    TEST_DESCRIPTION("Verify VK_NV_fragment_coverage_to_color.");
+
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+
+    if (DeviceExtensionSupported(gpu(), nullptr, VK_NV_FRAGMENT_COVERAGE_TO_COLOR_EXTENSION_NAME)) {
+        m_device_extension_names.push_back(VK_NV_FRAGMENT_COVERAGE_TO_COLOR_EXTENSION_NAME);
+    } else {
+        printf("%s %s Extension not supported, skipping tests\n", kSkipPrefix, VK_NV_FRAGMENT_COVERAGE_TO_COLOR_EXTENSION_NAME);
+        return;
+    }
+
+    ASSERT_NO_FATAL_FAILURE(InitState());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    struct TestCase {
+        VkFormat format;
+        VkBool32 enabled;
+        uint32_t location;
+        bool positive;
+    };
+
+    const std::array<TestCase, 9> test_cases = {{
+        {VK_FORMAT_R8G8B8A8_UNORM, VK_FALSE, 0, true},
+        {VK_FORMAT_R8_UINT, VK_TRUE, 1, true},
+        {VK_FORMAT_R16_UINT, VK_TRUE, 1, true},
+        {VK_FORMAT_R16_SINT, VK_TRUE, 1, true},
+        {VK_FORMAT_R32_UINT, VK_TRUE, 1, true},
+        {VK_FORMAT_R32_SINT, VK_TRUE, 1, true},
+        {VK_FORMAT_R32_SINT, VK_TRUE, 2, false},
+        {VK_FORMAT_R8_SINT, VK_TRUE, 3, false},
+        {VK_FORMAT_R8G8B8A8_UNORM, VK_TRUE, 1, false},
+    }};
+
+    for (const auto &test_case : test_cases) {
+        std::array<VkAttachmentDescription, 2> att = {{{}, {}}};
+        att[0].format = VK_FORMAT_R8G8B8A8_UNORM;
+        att[0].samples = VK_SAMPLE_COUNT_1_BIT;
+        att[0].initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+        att[0].finalLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
+
+        att[1].format = VK_FORMAT_R8G8B8A8_UNORM;
+        att[1].samples = VK_SAMPLE_COUNT_1_BIT;
+        att[1].initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+        att[1].finalLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
+
+        if (test_case.location < att.size()) {
+            att[test_case.location].format = test_case.format;
+        }
+
+        const std::array<VkAttachmentReference, 3> cr = {{{0, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL},
+                                                          {1, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL},
+                                                          {VK_ATTACHMENT_UNUSED, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL}}};
+
+        VkSubpassDescription sp = {};
+        sp.pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS;
+        sp.colorAttachmentCount = cr.size();
+        sp.pColorAttachments = cr.data();
+
+        VkRenderPassCreateInfo rpi = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO};
+        rpi.attachmentCount = att.size();
+        rpi.pAttachments = att.data();
+        rpi.subpassCount = 1;
+        rpi.pSubpasses = &sp;
+
+        const std::array<VkPipelineColorBlendAttachmentState, 3> cba = {{{}, {}, {}}};
+
+        VkPipelineColorBlendStateCreateInfo cbi = {VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO};
+        cbi.attachmentCount = cba.size();
+        cbi.pAttachments = cba.data();
+
+        VkRenderPass rp;
+        VkResult err = vk::CreateRenderPass(m_device->device(), &rpi, nullptr, &rp);
+        ASSERT_VK_SUCCESS(err);
+
+        VkPipelineCoverageToColorStateCreateInfoNV cci = {VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_TO_COLOR_STATE_CREATE_INFO_NV};
+
+        const auto break_samples = [&cci, &cbi, &rp, &test_case](CreatePipelineHelper &helper) {
+            cci.coverageToColorEnable = test_case.enabled;
+            cci.coverageToColorLocation = test_case.location;
+
+            helper.pipe_ms_state_ci_.pNext = &cci;
+            helper.gp_ci_.renderPass = rp;
+            helper.gp_ci_.pColorBlendState = &cbi;
+        };
+
+        CreatePipelineHelper::OneshotTest(*this, break_samples, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                          "VUID-VkPipelineCoverageToColorStateCreateInfoNV-coverageToColorEnable-01404",
+                                          test_case.positive);
+
+        vk::DestroyRenderPass(m_device->device(), rp, nullptr);
+    }
+}
+
+TEST_F(VkLayerTest, ViewportSwizzleNV) {
+    TEST_DESCRIPTION("Verify VK_NV_viewprot_swizzle.");
+
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+
+    if (DeviceExtensionSupported(gpu(), nullptr, VK_NV_VIEWPORT_SWIZZLE_EXTENSION_NAME)) {
+        m_device_extension_names.push_back(VK_NV_VIEWPORT_SWIZZLE_EXTENSION_NAME);
+    } else {
+        printf("%s %s Extension not supported, skipping tests\n", kSkipPrefix, VK_NV_VIEWPORT_SWIZZLE_EXTENSION_NAME);
+        return;
+    }
+
+    ASSERT_NO_FATAL_FAILURE(InitState());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    VkViewportSwizzleNV invalid_swizzles = {
+        VkViewportCoordinateSwizzleNV(-1),
+        VkViewportCoordinateSwizzleNV(-1),
+        VkViewportCoordinateSwizzleNV(-1),
+        VkViewportCoordinateSwizzleNV(-1),
+    };
+
+    VkPipelineViewportSwizzleStateCreateInfoNV vp_swizzle_state = {
+        VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SWIZZLE_STATE_CREATE_INFO_NV};
+    vp_swizzle_state.viewportCount = 1;
+    vp_swizzle_state.pViewportSwizzles = &invalid_swizzles;
+
+    const std::vector<std::string> expected_vuids = {"VUID-VkViewportSwizzleNV-x-parameter", "VUID-VkViewportSwizzleNV-y-parameter",
+                                                     "VUID-VkViewportSwizzleNV-z-parameter",
+                                                     "VUID-VkViewportSwizzleNV-w-parameter"};
+
+    auto break_swizzles = [&vp_swizzle_state](CreatePipelineHelper &helper) { helper.vp_state_ci_.pNext = &vp_swizzle_state; };
+
+    CreatePipelineHelper::OneshotTest(*this, break_swizzles, VK_DEBUG_REPORT_ERROR_BIT_EXT, expected_vuids);
+
+    struct TestCase {
+        VkBool32 rasterizerDiscardEnable;
+        uint32_t vp_count;
+        uint32_t swizzel_vp_count;
+        bool positive;
+    };
+
+    const std::array<TestCase, 3> test_cases = {{{VK_TRUE, 1, 2, true}, {VK_FALSE, 1, 1, true}, {VK_FALSE, 1, 2, false}}};
+
+    std::array<VkViewportSwizzleNV, 2> swizzles = {
+        {{VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_X_NV, VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_Y_NV,
+          VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_Z_NV, VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_W_NV},
+         {VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_X_NV, VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_Y_NV,
+          VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_Z_NV, VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_W_NV}}};
+
+    for (const auto &test_case : test_cases) {
+        assert(test_case.vp_count <= swizzles.size());
+
+        vp_swizzle_state.viewportCount = test_case.swizzel_vp_count;
+        vp_swizzle_state.pViewportSwizzles = swizzles.data();
+
+        auto break_vp_count = [&vp_swizzle_state, &test_case](CreatePipelineHelper &helper) {
+            helper.rs_state_ci_.rasterizerDiscardEnable = test_case.rasterizerDiscardEnable;
+            helper.vp_state_ci_.viewportCount = test_case.vp_count;
+
+            helper.vp_state_ci_.pNext = &vp_swizzle_state;
+        };
+
+        CreatePipelineHelper::OneshotTest(*this, break_vp_count, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                          "VUID-VkPipelineViewportSwizzleStateCreateInfoNV-viewportCount-01215",
+                                          test_case.positive);
+    }
+}
+
+TEST_F(VkLayerTest, CooperativeMatrixNV) {
+    TEST_DESCRIPTION("Test VK_NV_cooperative_matrix.");
+
+    if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+        m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    } else {
+        printf("%s Did not find required instance extension %s; skipped.\n", kSkipPrefix,
+               VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+        return;
+    }
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+    std::array<const char *, 2> required_device_extensions = {
+        {VK_NV_COOPERATIVE_MATRIX_EXTENSION_NAME, VK_KHR_SHADER_FLOAT16_INT8_EXTENSION_NAME}};
+    for (auto device_extension : required_device_extensions) {
+        if (DeviceExtensionSupported(gpu(), nullptr, device_extension)) {
+            m_device_extension_names.push_back(device_extension);
+        } else {
+            printf("%s %s Extension not supported, skipping tests\n", kSkipPrefix, device_extension);
+            return;
+        }
+    }
+
+    if (DeviceIsMockICD() || DeviceSimulation()) {
+        printf("%s Test not supported by MockICD, skipping tests\n", kSkipPrefix);
+        return;
+    }
+
+    PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR =
+        (PFN_vkGetPhysicalDeviceFeatures2KHR)vk::GetInstanceProcAddr(instance(), "vkGetPhysicalDeviceFeatures2KHR");
+    ASSERT_TRUE(vkGetPhysicalDeviceFeatures2KHR != nullptr);
+
+    auto float16_features = lvl_init_struct<VkPhysicalDeviceFloat16Int8FeaturesKHR>();
+    auto cooperative_matrix_features = lvl_init_struct<VkPhysicalDeviceCooperativeMatrixFeaturesNV>(&float16_features);
+    auto features2 = lvl_init_struct<VkPhysicalDeviceFeatures2KHR>(&cooperative_matrix_features);
+    vkGetPhysicalDeviceFeatures2KHR(gpu(), &features2);
+
+    ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &features2));
+
+    std::vector<VkDescriptorSetLayoutBinding> bindings(0);
+    const VkDescriptorSetLayoutObj dsl(m_device, bindings);
+    const VkPipelineLayoutObj pl(m_device, {&dsl});
+
+    char const *csSource =
+        "#version 450\n"
+        "#extension GL_NV_cooperative_matrix : enable\n"
+        "#extension GL_KHR_shader_subgroup_basic : enable\n"
+        "#extension GL_KHR_memory_scope_semantics : enable\n"
+        "#extension GL_EXT_shader_explicit_arithmetic_types_float16 : enable\n"
+        "layout(local_size_x = 32) in;\n"
+        "layout(constant_id = 0) const uint C0 = 1;"
+        "layout(constant_id = 1) const uint C1 = 1;"
+        "void main() {\n"
+        // Bad type
+        "   fcoopmatNV<16, gl_ScopeSubgroup, 3, 5> badSize = fcoopmatNV<16, gl_ScopeSubgroup, 3, 5>(float16_t(0.0));\n"
+        // Not a valid multiply when C0 != C1
+        "   fcoopmatNV<16, gl_ScopeSubgroup, C0, C1> A;\n"
+        "   fcoopmatNV<16, gl_ScopeSubgroup, C0, C1> B;\n"
+        "   fcoopmatNV<16, gl_ScopeSubgroup, C0, C1> C;\n"
+        "   coopMatMulAddNV(A, B, C);\n"
+        "}\n";
+
+    const uint32_t specData[] = {
+        16,
+        8,
+    };
+    VkSpecializationMapEntry entries[] = {
+        {0, sizeof(uint32_t) * 0, sizeof(uint32_t)},
+        {1, sizeof(uint32_t) * 1, sizeof(uint32_t)},
+    };
+
+    VkSpecializationInfo specInfo = {
+        2,
+        entries,
+        sizeof(specData),
+        specData,
+    };
+
+    CreateComputePipelineHelper pipe(*this);
+    pipe.InitInfo();
+    pipe.cs_.reset(new VkShaderObj(m_device, csSource, VK_SHADER_STAGE_COMPUTE_BIT, this, "main", false, &specInfo));
+    pipe.InitState();
+    pipe.pipeline_layout_ = VkPipelineLayoutObj(m_device, {});
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkPipelineShaderStageCreateInfo-module-parameter");
+    pipe.CreateComputePipeline();
+    m_errorMonitor->VerifyFound();
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "UNASSIGNED-CoreValidation-Shader-CooperativeMatrixType");
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "UNASSIGNED-CoreValidation-Shader-CooperativeMatrixMulAdd");
+    m_errorMonitor->SetUnexpectedError("VUID-VkPipelineShaderStageCreateInfo-module-parameter");
+    pipe.CreateComputePipeline();
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, SubgroupSupportedOperations) {
+    TEST_DESCRIPTION("Test shader validation support for subgroup supportedOperations.");
+
+    SetTargetApiVersion(VK_API_VERSION_1_1);
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+    ASSERT_NO_FATAL_FAILURE(InitState());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    // 1.1 and up only.
+    if (m_device->props.apiVersion < VK_API_VERSION_1_1) {
+        printf("%s Vulkan 1.1 not supported, skipping test\n", kSkipPrefix);
+        return;
+    }
+
+    if (DeviceIsMockICD() || DeviceSimulation()) {
+        printf("%s DevSim doesn't support Vulkan 1.1, skipping tests\n", kSkipPrefix);
+        return;
+    }
+
+    VkPhysicalDeviceSubgroupProperties subgroup_prop = GetSubgroupProperties(instance(), gpu());
+
+    // CreatePipelineLayout
+    VkPipelineLayoutCreateInfo pipeline_layout_ci = {};
+    pipeline_layout_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO;
+    pipeline_layout_ci.pNext = NULL;
+    pipeline_layout_ci.flags = 0;
+    pipeline_layout_ci.setLayoutCount = 0;
+    pipeline_layout_ci.pSetLayouts = VK_NULL_HANDLE;
+    VkPipelineLayout pipeline_layout = VK_NULL_HANDLE;
+    vk::CreatePipelineLayout(m_device->device(), &pipeline_layout_ci, NULL, &pipeline_layout);
+
+    const std::pair<const char *, VkSubgroupFeatureFlagBits> capabilities[] = {
+        {"GroupNonUniform", VK_SUBGROUP_FEATURE_BASIC_BIT},
+        {"GroupNonUniformVote", VK_SUBGROUP_FEATURE_VOTE_BIT},
+        {"GroupNonUniformArithmetic", VK_SUBGROUP_FEATURE_ARITHMETIC_BIT},
+        {"GroupNonUniformBallot", VK_SUBGROUP_FEATURE_BALLOT_BIT},
+        {"GroupNonUniformShuffle", VK_SUBGROUP_FEATURE_SHUFFLE_BIT},
+        {"GroupNonUniformShuffleRelative", VK_SUBGROUP_FEATURE_SHUFFLE_RELATIVE_BIT},
+        {"GroupNonUniformClustered", VK_SUBGROUP_FEATURE_CLUSTERED_BIT},
+        {"GroupNonUniformQuad", VK_SUBGROUP_FEATURE_QUAD_BIT},
+    };
+
+    for (auto &capability : capabilities) {
+        std::string spv_source[3];
+
+        spv_source[0] = "OpCapability " + std::string(capability.first) + "\n" + R"(
+                   OpCapability Shader
+                   OpMemoryModel Logical GLSL450
+                   OpEntryPoint GLCompute %main "main"
+                   OpExecutionMode %main LocalSize 1 1 1
+           %void = OpTypeVoid
+           %func = OpTypeFunction %void
+           %main = OpFunction %void None %func
+             %40 = OpLabel
+                   OpReturn
+                   OpFunctionEnd
+        )";
+
+        spv_source[1] = "OpCapability " + std::string(capability.first) + "\n" + R"(
+                   OpCapability Shader
+                   OpMemoryModel Logical GLSL450
+                   OpEntryPoint Vertex %main "main"
+           %void = OpTypeVoid
+           %func = OpTypeFunction %void
+           %main = OpFunction %void None %func
+             %40 = OpLabel
+                   OpReturn
+                   OpFunctionEnd
+        )";
+
+        spv_source[2] = "OpCapability " + std::string(capability.first) + "\n" + R"(
+                   OpCapability Shader
+                   OpMemoryModel Logical GLSL450
+                   OpEntryPoint Fragment %main "main"
+                   OpExecutionMode %main OriginUpperLeft
+           %void = OpTypeVoid
+           %func = OpTypeFunction %void
+           %main = OpFunction %void None %func
+             %40 = OpLabel
+                   OpReturn
+                   OpFunctionEnd
+        )";
+
+        VkShaderModule shader_module[3];
+        VkPipelineShaderStageCreateInfo stage[3];
+
+        for (int i = 0; i < 3; ++i) {
+            // CreateShaderModule
+            std::vector<unsigned int> spv;
+            VkShaderModuleCreateInfo module_create_info;
+            module_create_info.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
+            module_create_info.pNext = NULL;
+            ASMtoSPV(SPV_ENV_VULKAN_1_1, 0, spv_source[i].data(), spv);
+            module_create_info.pCode = spv.data();
+            module_create_info.codeSize = spv.size() * sizeof(unsigned int);
+            module_create_info.flags = 0;
+
+            VkResult result = vk::CreateShaderModule(m_device->handle(), &module_create_info, NULL, &shader_module[i]);
+
+            // NOTE: It appears that for the case of invalid capabilities some drivers (recent AMD) fail at CreateShaderModule time.
+            //       Likely the capability test should be moved up to CSM time, implementing ShaderModuleCreateInfo-pCode-01090
+            //       Note(2) -- yes I truncated the above VUID s.t. the VUID checking tools would not catch it.
+            if (result != VK_SUCCESS) shader_module[i] = VK_NULL_HANDLE;
+
+            stage[i].sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
+            stage[i].pNext = nullptr;
+            stage[i].flags = 0;
+            // stage[i].stage initialized later;
+            stage[i].module = shader_module[i];
+            stage[i].pName = "main";
+            stage[i].pSpecializationInfo = nullptr;
+        }
+
+        // CreateComputePipelines
+        VkComputePipelineCreateInfo pipeline_info = {};
+        pipeline_info.sType = VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO;
+        pipeline_info.pNext = nullptr;
+        pipeline_info.flags = 0;
+        pipeline_info.layout = pipeline_layout;
+        pipeline_info.basePipelineHandle = VK_NULL_HANDLE;
+        pipeline_info.basePipelineIndex = -1;
+        pipeline_info.stage = stage[0];
+        pipeline_info.stage.stage = VK_SHADER_STAGE_COMPUTE_BIT;
+
+        if (pipeline_info.stage.module != VK_NULL_HANDLE) {
+            if (!(subgroup_prop.supportedOperations & capability.second)) {
+                m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                                     "VkPhysicalDeviceSubgroupProperties::supportedOperations");
+            }
+            if (!(subgroup_prop.supportedStages & VK_SHADER_STAGE_COMPUTE_BIT)) {
+                m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                                     "VkPhysicalDeviceSubgroupProperties::supportedStages");
+            }
+
+            VkPipeline cs_pipeline;
+            vk::CreateComputePipelines(device(), VK_NULL_HANDLE, 1, &pipeline_info, nullptr, &cs_pipeline);
+            vk::DestroyPipeline(device(), cs_pipeline, nullptr);
+
+            m_errorMonitor->VerifyFound();
+        }
+
+        if ((stage[1].module != VK_NULL_HANDLE) && (stage[2].module != VK_NULL_HANDLE)) {
+            stage[1].stage = VK_SHADER_STAGE_VERTEX_BIT;
+            stage[2].stage = VK_SHADER_STAGE_FRAGMENT_BIT;
+
+            VkPipelineObj pipe(m_device);
+            pipe.AddShader(stage[1]);
+            pipe.AddShader(stage[2]);
+            pipe.AddDefaultColorAttachment();
+
+            if (!(subgroup_prop.supportedOperations & capability.second)) {
+                m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                                     "VkPhysicalDeviceSubgroupProperties::supportedOperations");
+            }
+            if (!(subgroup_prop.supportedStages & VK_SHADER_STAGE_VERTEX_BIT)) {
+                m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                                     "VkPhysicalDeviceSubgroupProperties::supportedStages");
+            }
+            if (!(subgroup_prop.supportedOperations & capability.second)) {
+                m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                                     "VkPhysicalDeviceSubgroupProperties::supportedOperations");
+            }
+            if (!(subgroup_prop.supportedStages & VK_SHADER_STAGE_FRAGMENT_BIT)) {
+                m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                                     "VkPhysicalDeviceSubgroupProperties::supportedStages");
+            }
+            pipe.CreateVKPipeline(pipeline_layout, renderPass());
+
+            m_errorMonitor->VerifyFound();
+        }
+
+        vk::DestroyShaderModule(device(), shader_module[0], nullptr);
+        vk::DestroyShaderModule(device(), shader_module[1], nullptr);
+        vk::DestroyShaderModule(device(), shader_module[2], nullptr);
+    }
+
+    vk::DestroyPipelineLayout(device(), pipeline_layout, nullptr);
+}
+
+TEST_F(VkLayerTest, SubgroupRequired) {
+    TEST_DESCRIPTION("Test that the minimum required functionality for subgroups is present.");
+
+    SetTargetApiVersion(VK_API_VERSION_1_1);
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+    ASSERT_NO_FATAL_FAILURE(InitState());
+
+    // 1.1 and up only.
+    if (m_device->props.apiVersion < VK_API_VERSION_1_1) {
+        printf("%s Vulkan 1.1 not supported, skipping test\n", kSkipPrefix);
+        return;
+    }
+
+    if (DeviceIsMockICD() || DeviceSimulation()) {
+        printf("%s DevSim doesn't support Vulkan 1.1, skipping tests\n", kSkipPrefix);
+        return;
+    }
+
+    VkPhysicalDeviceSubgroupProperties subgroup_prop = GetSubgroupProperties(instance(), gpu());
+
+    auto queue_family_properties = m_device->phy().queue_properties();
+
+    bool foundGraphics = false;
+    bool foundCompute = false;
+
+    for (auto queue_family : queue_family_properties) {
+        if (queue_family.queueFlags & VK_QUEUE_COMPUTE_BIT) {
+            foundCompute = true;
+            break;
+        }
+
+        if (queue_family.queueFlags & VK_QUEUE_GRAPHICS_BIT) {
+            foundGraphics = true;
+        }
+    }
+
+    if (!(foundGraphics || foundCompute)) return;
+
+    ASSERT_GE(subgroup_prop.subgroupSize, 1u);
+
+    if (foundCompute) {
+        ASSERT_TRUE(subgroup_prop.supportedStages & VK_SHADER_STAGE_COMPUTE_BIT);
+    }
+
+    ASSERT_TRUE(subgroup_prop.supportedOperations & VK_SUBGROUP_FEATURE_BASIC_BIT);
+}
+
+TEST_F(VkLayerTest, SubgroupExtendedTypesEnabled) {
+    TEST_DESCRIPTION("Test VK_KHR_shader_subgroup_extended_types.");
+    SetTargetApiVersion(VK_API_VERSION_1_1);
+
+    if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+        m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    } else {
+        printf("%s Did not find required instance extension %s; skipped.\n", kSkipPrefix,
+               VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+        return;
+    }
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+    std::array<const char *, 2> required_device_extensions = {
+        {VK_KHR_SHADER_SUBGROUP_EXTENDED_TYPES_EXTENSION_NAME, VK_KHR_SHADER_FLOAT16_INT8_EXTENSION_NAME}};
+    for (auto device_extension : required_device_extensions) {
+        if (DeviceExtensionSupported(gpu(), nullptr, device_extension)) {
+            m_device_extension_names.push_back(device_extension);
+        } else {
+            printf("%s %s Extension not supported, skipping tests\n", kSkipPrefix, device_extension);
+            return;
+        }
+    }
+
+    PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR =
+        (PFN_vkGetPhysicalDeviceFeatures2KHR)vk::GetInstanceProcAddr(instance(), "vkGetPhysicalDeviceFeatures2KHR");
+    ASSERT_TRUE(vkGetPhysicalDeviceFeatures2KHR != nullptr);
+
+    auto float16_features = lvl_init_struct<VkPhysicalDeviceFloat16Int8FeaturesKHR>();
+    auto extended_types_features = lvl_init_struct<VkPhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR>(&float16_features);
+    auto features2 = lvl_init_struct<VkPhysicalDeviceFeatures2KHR>(&extended_types_features);
+    vkGetPhysicalDeviceFeatures2KHR(gpu(), &features2);
+
+    VkPhysicalDeviceSubgroupProperties subgroup_prop = GetSubgroupProperties(instance(), gpu());
+    if (!(subgroup_prop.supportedOperations & VK_SUBGROUP_FEATURE_ARITHMETIC_BIT) ||
+        !(subgroup_prop.supportedStages & VK_SHADER_STAGE_COMPUTE_BIT) || !float16_features.shaderFloat16 ||
+        !extended_types_features.shaderSubgroupExtendedTypes) {
+        printf("%s Required features not supported, skipping tests\n", kSkipPrefix);
+        return;
+    }
+
+    ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &features2));
+
+    std::vector<VkDescriptorSetLayoutBinding> bindings(0);
+    const VkDescriptorSetLayoutObj dsl(m_device, bindings);
+    const VkPipelineLayoutObj pl(m_device, {&dsl});
+
+    char const *csSource =
+        "#version 450\n"
+        "#extension GL_KHR_shader_subgroup_arithmetic : enable\n"
+        "#extension GL_EXT_shader_subgroup_extended_types_float16 : enable\n"
+        "#extension GL_EXT_shader_explicit_arithmetic_types_float16 : enable\n"
+        "layout(local_size_x = 32) in;\n"
+        "void main() {\n"
+        "   subgroupAdd(float16_t(0.0));\n"
+        "}\n";
+
+    CreateComputePipelineHelper pipe(*this);
+    pipe.InitInfo();
+    pipe.cs_.reset(
+        new VkShaderObj(m_device, csSource, VK_SHADER_STAGE_COMPUTE_BIT, this, "main", false, nullptr, /*SPIR-V 1.3*/ 3));
+    pipe.InitState();
+    pipe.pipeline_layout_ = VkPipelineLayoutObj(m_device, {});
+    pipe.CreateComputePipeline();
+    m_errorMonitor->VerifyNotFound();
+}
+
+TEST_F(VkLayerTest, SubgroupExtendedTypesDisabled) {
+    TEST_DESCRIPTION("Test VK_KHR_shader_subgroup_extended_types.");
+    SetTargetApiVersion(VK_API_VERSION_1_1);
+
+    if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+        m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    } else {
+        printf("%s Did not find required instance extension %s; skipped.\n", kSkipPrefix,
+               VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+        return;
+    }
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+    std::array<const char *, 2> required_device_extensions = {
+        {VK_KHR_SHADER_SUBGROUP_EXTENDED_TYPES_EXTENSION_NAME, VK_KHR_SHADER_FLOAT16_INT8_EXTENSION_NAME}};
+    for (auto device_extension : required_device_extensions) {
+        if (DeviceExtensionSupported(gpu(), nullptr, device_extension)) {
+            m_device_extension_names.push_back(device_extension);
+        } else {
+            printf("%s %s Extension not supported, skipping tests\n", kSkipPrefix, device_extension);
+            return;
+        }
+    }
+
+    PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR =
+        (PFN_vkGetPhysicalDeviceFeatures2KHR)vk::GetInstanceProcAddr(instance(), "vkGetPhysicalDeviceFeatures2KHR");
+    ASSERT_TRUE(vkGetPhysicalDeviceFeatures2KHR != nullptr);
+
+    auto float16_features = lvl_init_struct<VkPhysicalDeviceFloat16Int8FeaturesKHR>();
+    auto extended_types_features = lvl_init_struct<VkPhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR>(&float16_features);
+    auto features2 = lvl_init_struct<VkPhysicalDeviceFeatures2KHR>(&extended_types_features);
+    vkGetPhysicalDeviceFeatures2KHR(gpu(), &features2);
+
+    VkPhysicalDeviceSubgroupProperties subgroup_prop = GetSubgroupProperties(instance(), gpu());
+    if (!(subgroup_prop.supportedOperations & VK_SUBGROUP_FEATURE_ARITHMETIC_BIT) ||
+        !(subgroup_prop.supportedStages & VK_SHADER_STAGE_COMPUTE_BIT) || !float16_features.shaderFloat16) {
+        printf("%s Required features not supported, skipping tests\n", kSkipPrefix);
+        return;
+    }
+
+    // Disabled extended types support, and expect an error
+    extended_types_features.shaderSubgroupExtendedTypes = VK_FALSE;
+
+    ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &features2));
+
+    std::vector<VkDescriptorSetLayoutBinding> bindings(0);
+    const VkDescriptorSetLayoutObj dsl(m_device, bindings);
+    const VkPipelineLayoutObj pl(m_device, {&dsl});
+
+    char const *csSource =
+        "#version 450\n"
+        "#extension GL_KHR_shader_subgroup_arithmetic : enable\n"
+        "#extension GL_EXT_shader_subgroup_extended_types_float16 : enable\n"
+        "#extension GL_EXT_shader_explicit_arithmetic_types_float16 : enable\n"
+        "layout(local_size_x = 32) in;\n"
+        "void main() {\n"
+        "   subgroupAdd(float16_t(0.0));\n"
+        "}\n";
+
+    CreateComputePipelineHelper pipe(*this);
+    pipe.InitInfo();
+    pipe.cs_.reset(
+        new VkShaderObj(m_device, csSource, VK_SHADER_STAGE_COMPUTE_BIT, this, "main", false, nullptr, /*SPIR-V 1.3*/ 3));
+    pipe.InitState();
+    pipe.pipeline_layout_ = VkPipelineLayoutObj(m_device, {});
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "VkPhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR::shaderSubgroupExtendedTypes");
+    pipe.CreateComputePipeline();
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, GraphicsPipelineStageCreationFeedbackCount) {
+    TEST_DESCRIPTION("Test graphics pipeline feedback stage count check.");
+
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+    if (DeviceExtensionSupported(gpu(), nullptr, VK_EXT_PIPELINE_CREATION_FEEDBACK_EXTENSION_NAME)) {
+        m_device_extension_names.push_back(VK_EXT_PIPELINE_CREATION_FEEDBACK_EXTENSION_NAME);
+    } else {
+        printf("%s Extension %s is not supported.\n", kSkipPrefix, VK_EXT_PIPELINE_CREATION_FEEDBACK_EXTENSION_NAME);
+        return;
+    }
+    ASSERT_NO_FATAL_FAILURE(InitState());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    auto feedback_info = lvl_init_struct<VkPipelineCreationFeedbackCreateInfoEXT>();
+    VkPipelineCreationFeedbackEXT feedbacks[3] = {};
+    // Set flags to known value that the driver has to overwrite
+    feedbacks[0].flags = VK_PIPELINE_CREATION_FEEDBACK_FLAG_BITS_MAX_ENUM_EXT;
+
+    feedback_info.pPipelineCreationFeedback = &feedbacks[0];
+    feedback_info.pipelineStageCreationFeedbackCount = 2;
+    feedback_info.pPipelineStageCreationFeedbacks = &feedbacks[1];
+
+    auto set_feedback = [&feedback_info](CreatePipelineHelper &helper) { helper.gp_ci_.pNext = &feedback_info; };
+
+    CreatePipelineHelper::OneshotTest(*this, set_feedback, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                      "VUID-VkPipelineCreationFeedbackCreateInfoEXT-pipelineStageCreationFeedbackCount-02668",
+                                      true);
+
+    if (DeviceIsMockICD() || DeviceSimulation()) {
+        printf("%s Driver data writeback check not supported by MockICD, skipping.\n", kSkipPrefix);
+    } else {
+        m_errorMonitor->ExpectSuccess();
+        if (feedback_info.pPipelineCreationFeedback->flags == VK_PIPELINE_CREATION_FEEDBACK_FLAG_BITS_MAX_ENUM_EXT) {
+            m_errorMonitor->SetError("ValidationLayers did not return GraphicsPipelineFeedback driver data properly.");
+        }
+        m_errorMonitor->VerifyNotFound();
+    }
+
+    feedback_info.pipelineStageCreationFeedbackCount = 1;
+    CreatePipelineHelper::OneshotTest(*this, set_feedback, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                      "VUID-VkPipelineCreationFeedbackCreateInfoEXT-pipelineStageCreationFeedbackCount-02668",
+                                      false);
+}
+
+TEST_F(VkLayerTest, ComputePipelineStageCreationFeedbackCount) {
+    TEST_DESCRIPTION("Test compute pipeline feedback stage count check.");
+
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+    if (DeviceExtensionSupported(gpu(), nullptr, VK_EXT_PIPELINE_CREATION_FEEDBACK_EXTENSION_NAME)) {
+        m_device_extension_names.push_back(VK_EXT_PIPELINE_CREATION_FEEDBACK_EXTENSION_NAME);
+    } else {
+        printf("%s Extension %s is not supported.\n", kSkipPrefix, VK_EXT_PIPELINE_CREATION_FEEDBACK_EXTENSION_NAME);
+        return;
+    }
+    ASSERT_NO_FATAL_FAILURE(InitState());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    VkPipelineCreationFeedbackCreateInfoEXT feedback_info = {};
+    VkPipelineCreationFeedbackEXT feedbacks[3] = {};
+    feedback_info.sType = VK_STRUCTURE_TYPE_PIPELINE_CREATION_FEEDBACK_CREATE_INFO_EXT;
+    feedback_info.pPipelineCreationFeedback = &feedbacks[0];
+    feedback_info.pipelineStageCreationFeedbackCount = 1;
+    feedback_info.pPipelineStageCreationFeedbacks = &feedbacks[1];
+
+    const auto set_info = [&](CreateComputePipelineHelper &helper) { helper.cp_ci_.pNext = &feedback_info; };
+
+    CreateComputePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT, "", true);
+
+    feedback_info.pipelineStageCreationFeedbackCount = 2;
+    CreateComputePipelineHelper::OneshotTest(
+        *this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+        "VUID-VkPipelineCreationFeedbackCreateInfoEXT-pipelineStageCreationFeedbackCount-02669");
+}
+
+TEST_F(VkLayerTest, NVRayTracingPipelineStageCreationFeedbackCount) {
+    TEST_DESCRIPTION("Test NV ray tracing pipeline feedback stage count check.");
+
+    if (!CreateNVRayTracingPipelineHelper::InitInstanceExtensions(*this, m_instance_extension_names)) {
+        return;
+    }
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+
+    if (DeviceExtensionSupported(gpu(), nullptr, VK_EXT_PIPELINE_CREATION_FEEDBACK_EXTENSION_NAME)) {
+        m_device_extension_names.push_back(VK_EXT_PIPELINE_CREATION_FEEDBACK_EXTENSION_NAME);
+    } else {
+        printf("%s Extension %s is not supported.\n", kSkipPrefix, VK_EXT_PIPELINE_CREATION_FEEDBACK_EXTENSION_NAME);
+        return;
+    }
+
+    if (!CreateNVRayTracingPipelineHelper::InitDeviceExtensions(*this, m_device_extension_names)) {
+        return;
+    }
+    ASSERT_NO_FATAL_FAILURE(InitState());
+
+    auto feedback_info = lvl_init_struct<VkPipelineCreationFeedbackCreateInfoEXT>();
+    VkPipelineCreationFeedbackEXT feedbacks[4] = {};
+
+    feedback_info.pPipelineCreationFeedback = &feedbacks[0];
+    feedback_info.pipelineStageCreationFeedbackCount = 2;
+    feedback_info.pPipelineStageCreationFeedbacks = &feedbacks[1];
+
+    auto set_feedback = [&feedback_info](CreateNVRayTracingPipelineHelper &helper) { helper.rp_ci_.pNext = &feedback_info; };
+
+    feedback_info.pipelineStageCreationFeedbackCount = 3;
+    CreateNVRayTracingPipelineHelper::OneshotPositiveTest(*this, set_feedback);
+
+    feedback_info.pipelineStageCreationFeedbackCount = 2;
+    CreateNVRayTracingPipelineHelper::OneshotTest(
+        *this, set_feedback, "VUID-VkPipelineCreationFeedbackCreateInfoEXT-pipelineStageCreationFeedbackCount-02670");
+}
+
+TEST_F(VkLayerTest, CreatePipelineCheckShaderImageFootprintEnabled) {
+    TEST_DESCRIPTION("Create a pipeline requiring the shader image footprint feature which has not enabled on the device.");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    if (!DeviceExtensionSupported(gpu(), nullptr, VK_NV_SHADER_IMAGE_FOOTPRINT_EXTENSION_NAME)) {
+        printf("%s Extension %s is not supported.\n", kSkipPrefix, VK_NV_SHADER_IMAGE_FOOTPRINT_EXTENSION_NAME);
+        return;
+    }
+
+    std::vector<const char *> device_extension_names;
+    auto features = m_device->phy().features();
+
+    // Disable the image footprint feature.
+    auto image_footprint_features = lvl_init_struct<VkPhysicalDeviceShaderImageFootprintFeaturesNV>();
+    image_footprint_features.imageFootprint = VK_FALSE;
+
+    VkDeviceObj test_device(0, gpu(), device_extension_names, &features, &image_footprint_features);
+
+    char const *fsSource =
+        "#version 450\n"
+        "#extension GL_NV_shader_texture_footprint  : require\n"
+        "layout(set=0, binding=0) uniform sampler2D s;\n"
+        "layout(location=0) out vec4 color;\n"
+        "void main(){\n"
+        "  gl_TextureFootprint2DNV footprint;\n"
+        "  if (textureFootprintNV(s, vec2(1.0), 5, false, footprint)) {\n"
+        "    color = vec4(0.0, 1.0, 0.0, 1.0);\n"
+        "  } else {\n"
+        "    color = vec4(vec2(footprint.anchor), vec2(footprint.offset));\n"
+        "  }\n"
+        "}\n";
+
+    VkShaderObj vs(&test_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
+    VkShaderObj fs(&test_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+    VkRenderpassObj render_pass(&test_device);
+
+    VkPipelineObj pipe(&test_device);
+    pipe.AddDefaultColorAttachment();
+    pipe.AddShader(&vs);
+    pipe.AddShader(&fs);
+
+    VkDescriptorSetLayoutBinding binding = {0, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, 1, VK_SHADER_STAGE_FRAGMENT_BIT, nullptr};
+    const VkDescriptorSetLayoutObj ds_layout(&test_device, {binding});
+    ASSERT_TRUE(ds_layout.initialized());
+
+    const VkPipelineLayoutObj pipeline_layout(&test_device, {&ds_layout});
+
+    m_errorMonitor->SetDesiredFailureMsg(
+        VK_DEBUG_REPORT_ERROR_BIT_EXT,
+        "Shader requires VkPhysicalDeviceShaderImageFootprintFeaturesNV::imageFootprint but is not enabled on the device");
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "Shader requires extension VkPhysicalDeviceShaderImageFootprintFeaturesNV::imageFootprint "
+                                         "but is not enabled on the device");
+    pipe.CreateVKPipeline(pipeline_layout.handle(), render_pass.handle());
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, CreatePipelineCheckFragmentShaderBarycentricEnabled) {
+    TEST_DESCRIPTION("Create a pipeline requiring the fragment shader barycentric feature which has not enabled on the device.");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    std::vector<const char *> device_extension_names;
+    auto features = m_device->phy().features();
+
+    // Disable the fragment shader barycentric feature.
+    auto fragment_shader_barycentric_features = lvl_init_struct<VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV>();
+    fragment_shader_barycentric_features.fragmentShaderBarycentric = VK_FALSE;
+
+    VkDeviceObj test_device(0, gpu(), device_extension_names, &features, &fragment_shader_barycentric_features);
+
+    char const *fsSource =
+        "#version 450\n"
+        "#extension GL_NV_fragment_shader_barycentric : require\n"
+        "layout(location=0) out float value;\n"
+        "void main(){\n"
+        "  value = gl_BaryCoordNV.x;\n"
+        "}\n";
+
+    VkShaderObj vs(&test_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
+    VkShaderObj fs(&test_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+    VkRenderpassObj render_pass(&test_device);
+
+    VkPipelineObj pipe(&test_device);
+    pipe.AddDefaultColorAttachment();
+    pipe.AddShader(&vs);
+    pipe.AddShader(&fs);
+
+    const VkPipelineLayoutObj pipeline_layout(&test_device);
+
+    m_errorMonitor->SetDesiredFailureMsg(
+        VK_DEBUG_REPORT_ERROR_BIT_EXT,
+        "Shader requires VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV::fragmentShaderBarycentric but is not enabled on the "
+        "device");
+    m_errorMonitor->SetDesiredFailureMsg(
+        VK_DEBUG_REPORT_ERROR_BIT_EXT,
+        "Shader requires extension VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV::fragmentShaderBarycentric but is not "
+        "enabled on the device");
+    pipe.CreateVKPipeline(pipeline_layout.handle(), render_pass.handle());
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, CreatePipelineCheckComputeShaderDerivativesEnabled) {
+    TEST_DESCRIPTION("Create a pipeline requiring the compute shader derivatives feature which has not enabled on the device.");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    std::vector<const char *> device_extension_names;
+    auto features = m_device->phy().features();
+
+    // Disable the compute shader derivatives features.
+    auto compute_shader_derivatives_features = lvl_init_struct<VkPhysicalDeviceComputeShaderDerivativesFeaturesNV>();
+    compute_shader_derivatives_features.computeDerivativeGroupLinear = VK_FALSE;
+    compute_shader_derivatives_features.computeDerivativeGroupQuads = VK_FALSE;
+
+    VkDeviceObj test_device(0, gpu(), device_extension_names, &features, &compute_shader_derivatives_features);
+
+    VkDescriptorSetLayoutBinding binding = {0, VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, 1, VK_SHADER_STAGE_COMPUTE_BIT, nullptr};
+    const VkDescriptorSetLayoutObj dsl(&test_device, {binding});
+    const VkPipelineLayoutObj pl(&test_device, {&dsl});
+
+    char const *csSource =
+        "#version 450\n"
+        "#extension GL_NV_compute_shader_derivatives : require\n"
+        "\n"
+        "layout(local_size_x=2, local_size_y=4) in;\n"
+        "layout(derivative_group_quadsNV) in;\n"
+        "\n"
+        "layout(set=0, binding=0) buffer InputOutputBuffer {\n"
+        "  float values[];\n"
+        "};\n"
+        "\n"
+        "void main(){\n"
+        "   values[gl_LocalInvocationIndex] = dFdx(values[gl_LocalInvocationIndex]);"
+        "}\n";
+
+    VkShaderObj cs(&test_device, csSource, VK_SHADER_STAGE_COMPUTE_BIT, this);
+
+    VkComputePipelineCreateInfo cpci = {VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO,
+                                        nullptr,
+                                        0,
+                                        {VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO, nullptr, 0,
+                                         VK_SHADER_STAGE_COMPUTE_BIT, cs.handle(), "main", nullptr},
+                                        pl.handle(),
+                                        VK_NULL_HANDLE,
+                                        -1};
+
+    m_errorMonitor->SetDesiredFailureMsg(
+        VK_DEBUG_REPORT_ERROR_BIT_EXT,
+        "Shader requires VkPhysicalDeviceComputeShaderDerivativesFeaturesNV::computeDerivativeGroupQuads but is not enabled on the "
+        "device");
+    m_errorMonitor->SetDesiredFailureMsg(
+        VK_DEBUG_REPORT_ERROR_BIT_EXT,
+        "Shader requires extension VkPhysicalDeviceComputeShaderDerivativesFeaturesNV::computeDerivativeGroupQuads but is not "
+        "enabled on the device");
+
+    VkPipeline pipe = VK_NULL_HANDLE;
+    vk::CreateComputePipelines(test_device.device(), VK_NULL_HANDLE, 1, &cpci, nullptr, &pipe);
+    m_errorMonitor->VerifyFound();
+    vk::DestroyPipeline(test_device.device(), pipe, nullptr);
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, CreatePipelineCheckFragmentShaderInterlockEnabled) {
+    TEST_DESCRIPTION("Create a pipeline requiring the fragment shader interlock feature which has not enabled on the device.");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    std::vector<const char *> device_extension_names;
+    if (DeviceExtensionSupported(gpu(), nullptr, VK_EXT_FRAGMENT_SHADER_INTERLOCK_EXTENSION_NAME)) {
+        // Note: we intentionally do not add the required extension to the device extension list.
+        //       in order to create the error below
+    } else {
+        // We skip this test if the extension is not supported by the driver as in some cases this will cause
+        // the vk::CreateShaderModule to fail without generating an error message
+        printf("%s Extension %s is not supported.\n", kSkipPrefix, VK_EXT_FRAGMENT_SHADER_INTERLOCK_EXTENSION_NAME);
+        return;
+    }
+
+    auto features = m_device->phy().features();
+
+    // Disable the fragment shader interlock feature.
+    auto fragment_shader_interlock_features = lvl_init_struct<VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT>();
+    fragment_shader_interlock_features.fragmentShaderSampleInterlock = VK_FALSE;
+    fragment_shader_interlock_features.fragmentShaderPixelInterlock = VK_FALSE;
+    fragment_shader_interlock_features.fragmentShaderShadingRateInterlock = VK_FALSE;
+
+    VkDeviceObj test_device(0, gpu(), device_extension_names, &features, &fragment_shader_interlock_features);
+
+    char const *fsSource =
+        "#version 450\n"
+        "#extension GL_ARB_fragment_shader_interlock : require\n"
+        "layout(sample_interlock_ordered) in;\n"
+        "void main(){\n"
+        "}\n";
+
+    VkShaderObj vs(&test_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
+    VkShaderObj fs(&test_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+    VkRenderpassObj render_pass(&test_device);
+
+    VkPipelineObj pipe(&test_device);
+    pipe.AddDefaultColorAttachment();
+    pipe.AddShader(&vs);
+    pipe.AddShader(&fs);
+
+    const VkPipelineLayoutObj pipeline_layout(&test_device);
+
+    m_errorMonitor->SetDesiredFailureMsg(
+        VK_DEBUG_REPORT_ERROR_BIT_EXT,
+        "Shader requires VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT::fragmentShaderSampleInterlock but is not enabled on "
+        "the device");
+    m_errorMonitor->SetDesiredFailureMsg(
+        VK_DEBUG_REPORT_ERROR_BIT_EXT,
+        "Shader requires extension VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT::fragmentShaderSampleInterlock but is not "
+        "enabled on the device");
+    pipe.CreateVKPipeline(pipeline_layout.handle(), render_pass.handle());
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, CreatePipelineCheckDemoteToHelperInvocation) {
+    TEST_DESCRIPTION("Create a pipeline requiring the demote to helper invocation feature which has not enabled on the device.");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    std::vector<const char *> device_extension_names;
+    if (DeviceExtensionSupported(gpu(), nullptr, VK_EXT_SHADER_DEMOTE_TO_HELPER_INVOCATION_EXTENSION_NAME)) {
+        // Note: we intentionally do not add the required extension to the device extension list.
+        //       in order to create the error below
+    } else {
+        // We skip this test if the extension is not supported by the driver as in some cases this will cause
+        // the vk::CreateShaderModule to fail without generating an error message
+        printf("%s Extension %s is not supported.\n", kSkipPrefix, VK_EXT_SHADER_DEMOTE_TO_HELPER_INVOCATION_EXTENSION_NAME);
+        return;
+    }
+
+    auto features = m_device->phy().features();
+
+    // Disable the demote to helper invocation feature.
+    auto demote_features = lvl_init_struct<VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT>();
+    demote_features.shaderDemoteToHelperInvocation = VK_FALSE;
+
+    VkDeviceObj test_device(0, gpu(), device_extension_names, &features, &demote_features);
+
+    char const *fsSource =
+        "#version 450\n"
+        "#extension GL_EXT_demote_to_helper_invocation : require\n"
+        "void main(){\n"
+        "    demote;\n"
+        "}\n";
+
+    VkShaderObj vs(&test_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
+    VkShaderObj fs(&test_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+    VkRenderpassObj render_pass(&test_device);
+
+    VkPipelineObj pipe(&test_device);
+    pipe.AddDefaultColorAttachment();
+    pipe.AddShader(&vs);
+    pipe.AddShader(&fs);
+
+    const VkPipelineLayoutObj pipeline_layout(&test_device);
+
+    m_errorMonitor->SetDesiredFailureMsg(
+        VK_DEBUG_REPORT_ERROR_BIT_EXT,
+        "Shader requires VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT::shaderDemoteToHelperInvocation but is not "
+        "enabled on "
+        "the device");
+    m_errorMonitor->SetDesiredFailureMsg(
+        VK_DEBUG_REPORT_ERROR_BIT_EXT,
+        "Shader requires extension VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT::shaderDemoteToHelperInvocation but "
+        "is not "
+        "enabled on the device");
+    pipe.CreateVKPipeline(pipeline_layout.handle(), render_pass.handle());
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, CreatePipelineCheckLineRasterization) {
+    TEST_DESCRIPTION("Test VK_EXT_line_rasterization state against feature enables.");
+
+    if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+        m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    } else {
+        printf("%s Did not find required instance extension %s; skipped.\n", kSkipPrefix,
+               VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+        return;
+    }
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+    std::array<const char *, 1> required_device_extensions = {{VK_EXT_LINE_RASTERIZATION_EXTENSION_NAME}};
+    for (auto device_extension : required_device_extensions) {
+        if (DeviceExtensionSupported(gpu(), nullptr, device_extension)) {
+            m_device_extension_names.push_back(device_extension);
+        } else {
+            printf("%s %s Extension not supported, skipping tests\n", kSkipPrefix, device_extension);
+            return;
+        }
+    }
+
+    PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR =
+        (PFN_vkGetPhysicalDeviceFeatures2KHR)vk::GetInstanceProcAddr(instance(), "vkGetPhysicalDeviceFeatures2KHR");
+    ASSERT_TRUE(vkGetPhysicalDeviceFeatures2KHR != nullptr);
+
+    auto line_rasterization_features = lvl_init_struct<VkPhysicalDeviceLineRasterizationFeaturesEXT>();
+    auto features2 = lvl_init_struct<VkPhysicalDeviceFeatures2KHR>(&line_rasterization_features);
+    vkGetPhysicalDeviceFeatures2KHR(gpu(), &features2);
+
+    line_rasterization_features.rectangularLines = VK_FALSE;
+    line_rasterization_features.bresenhamLines = VK_FALSE;
+    line_rasterization_features.smoothLines = VK_FALSE;
+    line_rasterization_features.stippledRectangularLines = VK_FALSE;
+    line_rasterization_features.stippledBresenhamLines = VK_FALSE;
+    line_rasterization_features.stippledSmoothLines = VK_FALSE;
+
+    ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &features2, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    CreatePipelineHelper::OneshotTest(
+        *this,
+        [&](CreatePipelineHelper &helper) {
+            helper.line_state_ci_.lineRasterizationMode = VK_LINE_RASTERIZATION_MODE_BRESENHAM_EXT;
+            helper.pipe_ms_state_ci_.alphaToCoverageEnable = VK_TRUE;
+        },
+        VK_DEBUG_REPORT_ERROR_BIT_EXT,
+        std::vector<const char *>{"VUID-VkGraphicsPipelineCreateInfo-lineRasterizationMode-02766",
+                                  "VUID-VkPipelineRasterizationLineStateCreateInfoEXT-lineRasterizationMode-02769"});
+
+    CreatePipelineHelper::OneshotTest(
+        *this,
+        [&](CreatePipelineHelper &helper) {
+            helper.line_state_ci_.lineRasterizationMode = VK_LINE_RASTERIZATION_MODE_BRESENHAM_EXT;
+            helper.line_state_ci_.stippledLineEnable = VK_TRUE;
+        },
+        VK_DEBUG_REPORT_ERROR_BIT_EXT,
+        std::vector<const char *>{"VUID-VkGraphicsPipelineCreateInfo-stippledLineEnable-02767",
+                                  "VUID-VkPipelineRasterizationLineStateCreateInfoEXT-lineRasterizationMode-02769",
+                                  "VUID-VkPipelineRasterizationLineStateCreateInfoEXT-stippledLineEnable-02772"});
+
+    CreatePipelineHelper::OneshotTest(
+        *this,
+        [&](CreatePipelineHelper &helper) {
+            helper.line_state_ci_.lineRasterizationMode = VK_LINE_RASTERIZATION_MODE_RECTANGULAR_EXT;
+            helper.line_state_ci_.stippledLineEnable = VK_TRUE;
+        },
+        VK_DEBUG_REPORT_ERROR_BIT_EXT,
+        std::vector<const char *>{"VUID-VkGraphicsPipelineCreateInfo-stippledLineEnable-02767",
+                                  "VUID-VkPipelineRasterizationLineStateCreateInfoEXT-lineRasterizationMode-02768",
+                                  "VUID-VkPipelineRasterizationLineStateCreateInfoEXT-stippledLineEnable-02771"});
+
+    CreatePipelineHelper::OneshotTest(
+        *this,
+        [&](CreatePipelineHelper &helper) {
+            helper.line_state_ci_.lineRasterizationMode = VK_LINE_RASTERIZATION_MODE_RECTANGULAR_SMOOTH_EXT;
+            helper.line_state_ci_.stippledLineEnable = VK_TRUE;
+        },
+        VK_DEBUG_REPORT_ERROR_BIT_EXT,
+        std::vector<const char *>{"VUID-VkGraphicsPipelineCreateInfo-stippledLineEnable-02767",
+                                  "VUID-VkPipelineRasterizationLineStateCreateInfoEXT-lineRasterizationMode-02770",
+                                  "VUID-VkPipelineRasterizationLineStateCreateInfoEXT-stippledLineEnable-02773"});
+
+    CreatePipelineHelper::OneshotTest(
+        *this,
+        [&](CreatePipelineHelper &helper) {
+            helper.line_state_ci_.lineRasterizationMode = VK_LINE_RASTERIZATION_MODE_DEFAULT_EXT;
+            helper.line_state_ci_.stippledLineEnable = VK_TRUE;
+        },
+        VK_DEBUG_REPORT_ERROR_BIT_EXT,
+        std::vector<const char *>{"VUID-VkGraphicsPipelineCreateInfo-stippledLineEnable-02767",
+                                  "VUID-VkPipelineRasterizationLineStateCreateInfoEXT-stippledLineEnable-02774"});
+
+    PFN_vkCmdSetLineStippleEXT vkCmdSetLineStippleEXT =
+        (PFN_vkCmdSetLineStippleEXT)vk::GetDeviceProcAddr(m_device->device(), "vkCmdSetLineStippleEXT");
+    ASSERT_TRUE(vkCmdSetLineStippleEXT != nullptr);
+
+    m_commandBuffer->begin();
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdSetLineStippleEXT-lineStippleFactor-02776");
+    vkCmdSetLineStippleEXT(m_commandBuffer->handle(), 0, 0);
+    m_errorMonitor->VerifyFound();
+    vkCmdSetLineStippleEXT(m_commandBuffer->handle(), 1, 1);
+    m_errorMonitor->VerifyFound();
+}
+
+TEST_F(VkLayerTest, FillRectangleNV) {
+    TEST_DESCRIPTION("Verify VK_NV_fill_rectangle");
+
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+
+    VkPhysicalDeviceFeatures device_features = {};
+    ASSERT_NO_FATAL_FAILURE(GetPhysicalDeviceFeatures(&device_features));
+
+    // Disable non-solid fill modes to make sure that the usage of VK_POLYGON_MODE_LINE and
+    // VK_POLYGON_MODE_POINT will cause an error when the VK_NV_fill_rectangle extension is enabled.
+    device_features.fillModeNonSolid = VK_FALSE;
+
+    if (DeviceExtensionSupported(gpu(), nullptr, VK_NV_FILL_RECTANGLE_EXTENSION_NAME)) {
+        m_device_extension_names.push_back(VK_NV_FILL_RECTANGLE_EXTENSION_NAME);
+    } else {
+        printf("%s %s Extension not supported, skipping tests\n", kSkipPrefix, VK_NV_FILL_RECTANGLE_EXTENSION_NAME);
+        return;
+    }
+
+    ASSERT_NO_FATAL_FAILURE(InitState(&device_features));
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    VkPolygonMode polygon_mode = VK_POLYGON_MODE_LINE;
+
+    auto set_polygon_mode = [&polygon_mode](CreatePipelineHelper &helper) { helper.rs_state_ci_.polygonMode = polygon_mode; };
+
+    // Set unsupported polygon mode VK_POLYGON_MODE_LINE
+    CreatePipelineHelper::OneshotTest(*this, set_polygon_mode, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                      "VUID-VkPipelineRasterizationStateCreateInfo-polygonMode-01507", false);
+
+    // Set unsupported polygon mode VK_POLYGON_MODE_POINT
+    polygon_mode = VK_POLYGON_MODE_POINT;
+    CreatePipelineHelper::OneshotTest(*this, set_polygon_mode, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                      "VUID-VkPipelineRasterizationStateCreateInfo-polygonMode-01507", false);
+
+    // Set supported polygon mode VK_POLYGON_MODE_FILL
+    polygon_mode = VK_POLYGON_MODE_FILL;
+    CreatePipelineHelper::OneshotTest(*this, set_polygon_mode, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                      "VUID-VkPipelineRasterizationStateCreateInfo-polygonMode-01507", true);
+
+    // Set supported polygon mode VK_POLYGON_MODE_FILL_RECTANGLE_NV
+    polygon_mode = VK_POLYGON_MODE_FILL_RECTANGLE_NV;
+    CreatePipelineHelper::OneshotTest(*this, set_polygon_mode, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                      "VUID-VkPipelineRasterizationStateCreateInfo-polygonMode-01507", true);
+}
+
+TEST_F(VkLayerTest, NotCompatibleForSet) {
+    TEST_DESCRIPTION("Check that validation path catches pipeline layout inconsistencies for bind vs. dispatch");
+    m_errorMonitor->ExpectSuccess();
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    auto c_queue = m_device->GetDefaultComputeQueue();
+    if (nullptr == c_queue) {
+        printf("Compute not supported, skipping test\n");
+        return;
+    }
+
+    uint32_t qfi = 0;
+    VkBufferCreateInfo bci = {};
+    bci.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
+    bci.usage = VK_BUFFER_USAGE_STORAGE_BUFFER_BIT;
+    bci.size = 4;
+    bci.queueFamilyIndexCount = 1;
+    bci.pQueueFamilyIndices = &qfi;
+    VkBufferObj storage_buffer;
+    VkMemoryPropertyFlags mem_props = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
+    storage_buffer.init(*m_device, bci, mem_props);
+
+    VkBufferObj uniform_buffer;
+    bci.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT;
+    bci.size = 20;
+    uniform_buffer.init(*m_device, bci, mem_props);
+
+    OneOffDescriptorSet::Bindings binding_defs = {
+        {0, VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
+        {1, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
+    };
+    const VkDescriptorSetLayoutObj pipeline_dsl(m_device, binding_defs);
+    const VkPipelineLayoutObj pipeline_layout(m_device, {&pipeline_dsl});
+
+    // We now will use a slightly different Layout definition for the descriptors we acutally bind with (but that would still be
+    // correct for the shader
+    binding_defs[1].stageFlags = VK_SHADER_STAGE_COMPUTE_BIT;
+    OneOffDescriptorSet binding_descriptor_set(m_device, binding_defs);
+    const VkPipelineLayoutObj binding_pipeline_layout(m_device, {&binding_descriptor_set.layout_});
+
+    VkDescriptorBufferInfo storage_buffer_info = {storage_buffer.handle(), 0, sizeof(uint32_t)};
+    VkDescriptorBufferInfo uniform_buffer_info = {uniform_buffer.handle(), 0, 5 * sizeof(uint32_t)};
+
+    VkWriteDescriptorSet descriptor_writes[2] = {};
+    descriptor_writes[0].sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
+    descriptor_writes[0].dstSet = binding_descriptor_set.set_;
+    descriptor_writes[0].dstBinding = 0;
+    descriptor_writes[0].descriptorCount = 1;
+    descriptor_writes[0].descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
+    descriptor_writes[0].pBufferInfo = &storage_buffer_info;
+
+    descriptor_writes[1].sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
+    descriptor_writes[1].dstSet = binding_descriptor_set.set_;
+    descriptor_writes[1].dstBinding = 1;
+    descriptor_writes[1].descriptorCount = 1;  // Write 4 bytes to val
+    descriptor_writes[1].descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
+    descriptor_writes[1].pBufferInfo = &uniform_buffer_info;
+    vk::UpdateDescriptorSets(m_device->device(), 2, descriptor_writes, 0, NULL);
+
+    char const *csSource =
+        "#version 450\n"
+        "#extension GL_EXT_nonuniform_qualifier : enable\n "
+        "layout(set = 0, binding = 0) buffer StorageBuffer { uint index; } u_index;"
+        "layout(set = 0, binding = 1) uniform UniformStruct { ivec4 dummy; int val; } ubo;\n"
+
+        "void main() {\n"
+        "    u_index.index = ubo.val;\n"
+        "}\n";
+
+    VkShaderObj shader_module(m_device, csSource, VK_SHADER_STAGE_COMPUTE_BIT, this);
+
+    VkPipelineShaderStageCreateInfo stage;
+    stage.sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
+    stage.pNext = nullptr;
+    stage.flags = 0;
+    stage.stage = VK_SHADER_STAGE_COMPUTE_BIT;
+    stage.module = shader_module.handle();
+    stage.pName = "main";
+    stage.pSpecializationInfo = nullptr;
+
+    // CreateComputePipelines
+    VkComputePipelineCreateInfo pipeline_info = {};
+    pipeline_info.sType = VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO;
+    pipeline_info.pNext = nullptr;
+    pipeline_info.flags = 0;
+    pipeline_info.layout = pipeline_layout.handle();
+    pipeline_info.basePipelineHandle = VK_NULL_HANDLE;
+    pipeline_info.basePipelineIndex = -1;
+    pipeline_info.stage = stage;
+
+    VkPipeline c_pipeline;
+    vk::CreateComputePipelines(device(), VK_NULL_HANDLE, 1, &pipeline_info, nullptr, &c_pipeline);
+
+    m_commandBuffer->begin();
+    vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_COMPUTE, c_pipeline);
+    vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_COMPUTE, binding_pipeline_layout.handle(), 0, 1,
+                              &binding_descriptor_set.set_, 0, nullptr);
+    m_errorMonitor->VerifyNotFound();
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdDispatch-None-02697");
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                         "UNASSIGNED-CoreValidation-DrawState-PipelineLayoutsIncompatible");
+    vk::CmdDispatch(m_commandBuffer->handle(), 1, 1, 1);
+    m_errorMonitor->VerifyFound();
+    m_commandBuffer->end();
+
+    vk::DestroyPipeline(device(), c_pipeline, nullptr);
+}
+
+TEST_F(VkLayerTest, RayTracingPipelineShaderGroups) {
+    TEST_DESCRIPTION("Validate shader groups during ray-tracing pipeline creation");
+
+    if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+        m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    } else {
+        printf("%s Extension %s is not supported.\n", kSkipPrefix, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+        return;
+    }
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+
+    if (DeviceExtensionSupported(gpu(), nullptr, VK_NV_RAY_TRACING_EXTENSION_NAME)) {
+        m_device_extension_names.push_back(VK_NV_RAY_TRACING_EXTENSION_NAME);
+        m_device_extension_names.push_back(VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
+    } else {
+        printf("%s Extension %s is not supported.\n", kSkipPrefix, VK_NV_RAY_TRACING_EXTENSION_NAME);
+        return;
+    }
+    ASSERT_NO_FATAL_FAILURE(InitState());
+
+    const VkPipelineLayoutObj empty_pipeline_layout(m_device, {});
+
+    const std::string empty_shader = R"glsl(#version 460
+        #extension GL_NV_ray_tracing : require
+        void main() {}
+    )glsl";
+
+    VkShaderObj rgen_shader(m_device, empty_shader.c_str(), VK_SHADER_STAGE_RAYGEN_BIT_NV, this, "main");
+    VkShaderObj ahit_shader(m_device, empty_shader.c_str(), VK_SHADER_STAGE_ANY_HIT_BIT_NV, this, "main");
+    VkShaderObj chit_shader(m_device, empty_shader.c_str(), VK_SHADER_STAGE_CLOSEST_HIT_BIT_NV, this, "main");
+    VkShaderObj miss_shader(m_device, empty_shader.c_str(), VK_SHADER_STAGE_MISS_BIT_NV, this, "main");
+    VkShaderObj intr_shader(m_device, empty_shader.c_str(), VK_SHADER_STAGE_INTERSECTION_BIT_NV, this, "main");
+    VkShaderObj call_shader(m_device, empty_shader.c_str(), VK_SHADER_STAGE_CALLABLE_BIT_NV, this, "main");
+
+    m_errorMonitor->VerifyNotFound();
+
+    PFN_vkCreateRayTracingPipelinesNV vkCreateRayTracingPipelinesNV =
+        reinterpret_cast<PFN_vkCreateRayTracingPipelinesNV>(vk::GetInstanceProcAddr(instance(), "vkCreateRayTracingPipelinesNV"));
+    ASSERT_TRUE(vkCreateRayTracingPipelinesNV != nullptr);
+
+    VkPipeline pipeline = VK_NULL_HANDLE;
+
+    // No raygen stage
+    {
+        VkPipelineShaderStageCreateInfo stage_create_info = {};
+        stage_create_info.sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
+        stage_create_info.stage = VK_SHADER_STAGE_CLOSEST_HIT_BIT_NV;
+        stage_create_info.module = chit_shader.handle();
+        stage_create_info.pName = "main";
+
+        VkRayTracingShaderGroupCreateInfoNV group_create_info = {};
+        group_create_info.sType = VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV;
+        group_create_info.type = VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_NV;
+        group_create_info.generalShader = VK_SHADER_UNUSED_NV;
+        group_create_info.closestHitShader = 0;
+        group_create_info.anyHitShader = VK_SHADER_UNUSED_NV;
+        group_create_info.intersectionShader = VK_SHADER_UNUSED_NV;
+
+        VkRayTracingPipelineCreateInfoNV pipeline_ci = {};
+        pipeline_ci.sType = VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_NV;
+        pipeline_ci.stageCount = 1;
+        pipeline_ci.pStages = &stage_create_info;
+        pipeline_ci.groupCount = 1;
+        pipeline_ci.pGroups = &group_create_info;
+        pipeline_ci.layout = empty_pipeline_layout.handle();
+
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkRayTracingPipelineCreateInfoNV-stage-02408");
+        vkCreateRayTracingPipelinesNV(m_device->handle(), VK_NULL_HANDLE, 1, &pipeline_ci, nullptr, &pipeline);
+        m_errorMonitor->VerifyFound();
+    }
+
+    // Two raygen stages
+    {
+        VkPipelineShaderStageCreateInfo stage_create_infos[2] = {};
+        stage_create_infos[0].sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
+        stage_create_infos[0].stage = VK_SHADER_STAGE_RAYGEN_BIT_NV;
+        stage_create_infos[0].module = rgen_shader.handle();
+        stage_create_infos[0].pName = "main";
+
+        stage_create_infos[1].sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
+        stage_create_infos[1].stage = VK_SHADER_STAGE_RAYGEN_BIT_NV;
+        stage_create_infos[1].module = rgen_shader.handle();
+        stage_create_infos[1].pName = "main";
+
+        VkRayTracingShaderGroupCreateInfoNV group_create_infos[2] = {};
+        group_create_infos[0].sType = VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV;
+        group_create_infos[0].type = VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_NV;
+        group_create_infos[0].generalShader = 0;
+        group_create_infos[0].closestHitShader = VK_SHADER_UNUSED_NV;
+        group_create_infos[0].anyHitShader = VK_SHADER_UNUSED_NV;
+        group_create_infos[0].intersectionShader = VK_SHADER_UNUSED_NV;
+
+        group_create_infos[1].sType = VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV;
+        group_create_infos[1].type = VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_NV;
+        group_create_infos[1].generalShader = 1;
+        group_create_infos[1].closestHitShader = VK_SHADER_UNUSED_NV;
+        group_create_infos[1].anyHitShader = VK_SHADER_UNUSED_NV;
+        group_create_infos[1].intersectionShader = VK_SHADER_UNUSED_NV;
+
+        VkRayTracingPipelineCreateInfoNV pipeline_ci = {};
+        pipeline_ci.sType = VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_NV;
+        pipeline_ci.stageCount = 2;
+        pipeline_ci.pStages = stage_create_infos;
+        pipeline_ci.groupCount = 2;
+        pipeline_ci.pGroups = group_create_infos;
+        pipeline_ci.layout = empty_pipeline_layout.handle();
+
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkRayTracingPipelineCreateInfoNV-stage-02408");
+        vkCreateRayTracingPipelinesNV(m_device->handle(), VK_NULL_HANDLE, 1, &pipeline_ci, nullptr, &pipeline);
+        m_errorMonitor->VerifyFound();
+    }
+
+    // General shader index doesn't exist
+    {
+        VkPipelineShaderStageCreateInfo stage_create_info = {};
+        stage_create_info.sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
+        stage_create_info.stage = VK_SHADER_STAGE_RAYGEN_BIT_NV;
+        stage_create_info.module = rgen_shader.handle();
+        stage_create_info.pName = "main";
+
+        VkRayTracingShaderGroupCreateInfoNV group_create_info = {};
+        group_create_info.sType = VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV;
+        group_create_info.type = VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_NV;
+        group_create_info.generalShader = 1;  // Bad index here
+        group_create_info.closestHitShader = VK_SHADER_UNUSED_NV;
+        group_create_info.anyHitShader = VK_SHADER_UNUSED_NV;
+        group_create_info.intersectionShader = VK_SHADER_UNUSED_NV;
+
+        VkRayTracingPipelineCreateInfoNV pipeline_ci = {};
+        pipeline_ci.sType = VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_NV;
+        pipeline_ci.stageCount = 1;
+        pipeline_ci.pStages = &stage_create_info;
+        pipeline_ci.groupCount = 1;
+        pipeline_ci.pGroups = &group_create_info;
+        pipeline_ci.layout = empty_pipeline_layout.handle();
+
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkRayTracingShaderGroupCreateInfoNV-type-02413");
+        vkCreateRayTracingPipelinesNV(m_device->handle(), VK_NULL_HANDLE, 1, &pipeline_ci, nullptr, &pipeline);
+        m_errorMonitor->VerifyFound();
+    }
+
+    // General shader index doesn't correspond to a raygen/miss/callable shader
+    {
+        VkPipelineShaderStageCreateInfo stage_create_infos[2] = {};
+        stage_create_infos[0].sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
+        stage_create_infos[0].stage = VK_SHADER_STAGE_RAYGEN_BIT_NV;
+        stage_create_infos[0].module = rgen_shader.handle();
+        stage_create_infos[0].pName = "main";
+
+        stage_create_infos[1].sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
+        stage_create_infos[1].stage = VK_SHADER_STAGE_CLOSEST_HIT_BIT_NV;
+        stage_create_infos[1].module = chit_shader.handle();
+        stage_create_infos[1].pName = "main";
+
+        VkRayTracingShaderGroupCreateInfoNV group_create_infos[2] = {};
+        group_create_infos[0].sType = VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV;
+        group_create_infos[0].type = VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_NV;
+        group_create_infos[0].generalShader = 0;
+        group_create_infos[0].closestHitShader = VK_SHADER_UNUSED_NV;
+        group_create_infos[0].anyHitShader = VK_SHADER_UNUSED_NV;
+        group_create_infos[0].intersectionShader = VK_SHADER_UNUSED_NV;
+
+        group_create_infos[1].sType = VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV;
+        group_create_infos[1].type = VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_NV;
+        group_create_infos[1].generalShader = 1;  // Index 1 corresponds to a closest hit shader
+        group_create_infos[1].closestHitShader = VK_SHADER_UNUSED_NV;
+        group_create_infos[1].anyHitShader = VK_SHADER_UNUSED_NV;
+        group_create_infos[1].intersectionShader = VK_SHADER_UNUSED_NV;
+
+        VkRayTracingPipelineCreateInfoNV pipeline_ci = {};
+        pipeline_ci.sType = VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_NV;
+        pipeline_ci.stageCount = 2;
+        pipeline_ci.pStages = stage_create_infos;
+        pipeline_ci.groupCount = 2;
+        pipeline_ci.pGroups = group_create_infos;
+        pipeline_ci.layout = empty_pipeline_layout.handle();
+
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkRayTracingShaderGroupCreateInfoNV-type-02413");
+        vkCreateRayTracingPipelinesNV(m_device->handle(), VK_NULL_HANDLE, 1, &pipeline_ci, nullptr, &pipeline);
+        m_errorMonitor->VerifyFound();
+    }
+
+    // General shader group should not specify non general shader
+    {
+        VkPipelineShaderStageCreateInfo stage_create_infos[2] = {};
+        stage_create_infos[0].sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
+        stage_create_infos[0].stage = VK_SHADER_STAGE_RAYGEN_BIT_NV;
+        stage_create_infos[0].module = rgen_shader.handle();
+        stage_create_infos[0].pName = "main";
+
+        stage_create_infos[1].sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
+        stage_create_infos[1].stage = VK_SHADER_STAGE_CLOSEST_HIT_BIT_NV;
+        stage_create_infos[1].module = chit_shader.handle();
+        stage_create_infos[1].pName = "main";
+
+        VkRayTracingShaderGroupCreateInfoNV group_create_infos[2] = {};
+        group_create_infos[0].sType = VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV;
+        group_create_infos[0].type = VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_NV;
+        group_create_infos[0].generalShader = 0;
+        group_create_infos[0].closestHitShader = VK_SHADER_UNUSED_NV;
+        group_create_infos[0].anyHitShader = VK_SHADER_UNUSED_NV;
+        group_create_infos[0].intersectionShader = VK_SHADER_UNUSED_NV;
+
+        group_create_infos[1].sType = VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV;
+        group_create_infos[1].type = VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_NV;
+        group_create_infos[1].generalShader = 0;
+        group_create_infos[1].closestHitShader = 0;  // This should not be set for a general shader group
+        group_create_infos[1].anyHitShader = VK_SHADER_UNUSED_NV;
+        group_create_infos[1].intersectionShader = VK_SHADER_UNUSED_NV;
+
+        VkRayTracingPipelineCreateInfoNV pipeline_ci = {};
+        pipeline_ci.sType = VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_NV;
+        pipeline_ci.stageCount = 2;
+        pipeline_ci.pStages = stage_create_infos;
+        pipeline_ci.groupCount = 2;
+        pipeline_ci.pGroups = group_create_infos;
+        pipeline_ci.layout = empty_pipeline_layout.handle();
+
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkRayTracingShaderGroupCreateInfoNV-type-02414");
+        vkCreateRayTracingPipelinesNV(m_device->handle(), VK_NULL_HANDLE, 1, &pipeline_ci, nullptr, &pipeline);
+        m_errorMonitor->VerifyFound();
+    }
+
+    // Intersection shader invalid index
+    {
+        VkPipelineShaderStageCreateInfo stage_create_infos[2] = {};
+        stage_create_infos[0].sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
+        stage_create_infos[0].stage = VK_SHADER_STAGE_RAYGEN_BIT_NV;
+        stage_create_infos[0].module = rgen_shader.handle();
+        stage_create_infos[0].pName = "main";
+
+        stage_create_infos[1].sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
+        stage_create_infos[1].stage = VK_SHADER_STAGE_INTERSECTION_BIT_NV;
+        stage_create_infos[1].module = intr_shader.handle();
+        stage_create_infos[1].pName = "main";
+
+        VkRayTracingShaderGroupCreateInfoNV group_create_infos[2] = {};
+        group_create_infos[0].sType = VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV;
+        group_create_infos[0].type = VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_NV;
+        group_create_infos[0].generalShader = 0;
+        group_create_infos[0].closestHitShader = VK_SHADER_UNUSED_NV;
+        group_create_infos[0].anyHitShader = VK_SHADER_UNUSED_NV;
+        group_create_infos[0].intersectionShader = VK_SHADER_UNUSED_NV;
+
+        group_create_infos[1].sType = VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV;
+        group_create_infos[1].type = VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_NV;
+        group_create_infos[1].generalShader = VK_SHADER_UNUSED_NV;
+        group_create_infos[1].closestHitShader = VK_SHADER_UNUSED_NV;
+        group_create_infos[1].anyHitShader = VK_SHADER_UNUSED_NV;
+        group_create_infos[1].intersectionShader = 5;  // invalid index
+
+        VkRayTracingPipelineCreateInfoNV pipeline_ci = {};
+        pipeline_ci.sType = VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_NV;
+        pipeline_ci.stageCount = 2;
+        pipeline_ci.pStages = stage_create_infos;
+        pipeline_ci.groupCount = 2;
+        pipeline_ci.pGroups = group_create_infos;
+        pipeline_ci.layout = empty_pipeline_layout.handle();
+
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkRayTracingShaderGroupCreateInfoNV-type-02415");
+        vkCreateRayTracingPipelinesNV(m_device->handle(), VK_NULL_HANDLE, 1, &pipeline_ci, nullptr, &pipeline);
+        m_errorMonitor->VerifyFound();
+    }
+
+    // Intersection shader index does not correspond to intersection shader
+    {
+        VkPipelineShaderStageCreateInfo stage_create_infos[2] = {};
+        stage_create_infos[0].sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
+        stage_create_infos[0].stage = VK_SHADER_STAGE_RAYGEN_BIT_NV;
+        stage_create_infos[0].module = rgen_shader.handle();
+        stage_create_infos[0].pName = "main";
+
+        stage_create_infos[1].sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
+        stage_create_infos[1].stage = VK_SHADER_STAGE_INTERSECTION_BIT_NV;
+        stage_create_infos[1].module = intr_shader.handle();
+        stage_create_infos[1].pName = "main";
+
+        VkRayTracingShaderGroupCreateInfoNV group_create_infos[2] = {};
+        group_create_infos[0].sType = VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV;
+        group_create_infos[0].type = VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_NV;
+        group_create_infos[0].generalShader = 0;
+        group_create_infos[0].closestHitShader = VK_SHADER_UNUSED_NV;
+        group_create_infos[0].anyHitShader = VK_SHADER_UNUSED_NV;
+        group_create_infos[0].intersectionShader = VK_SHADER_UNUSED_NV;
+
+        group_create_infos[1].sType = VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV;
+        group_create_infos[1].type = VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_NV;
+        group_create_infos[1].generalShader = VK_SHADER_UNUSED_NV;
+        group_create_infos[1].closestHitShader = VK_SHADER_UNUSED_NV;
+        group_create_infos[1].anyHitShader = VK_SHADER_UNUSED_NV;
+        group_create_infos[1].intersectionShader = 0;  // Index 0 corresponds to a raygen shader
+
+        VkRayTracingPipelineCreateInfoNV pipeline_ci = {};
+        pipeline_ci.sType = VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_NV;
+        pipeline_ci.stageCount = 2;
+        pipeline_ci.pStages = stage_create_infos;
+        pipeline_ci.groupCount = 2;
+        pipeline_ci.pGroups = group_create_infos;
+        pipeline_ci.layout = empty_pipeline_layout.handle();
+
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkRayTracingShaderGroupCreateInfoNV-type-02415");
+        vkCreateRayTracingPipelinesNV(m_device->handle(), VK_NULL_HANDLE, 1, &pipeline_ci, nullptr, &pipeline);
+        m_errorMonitor->VerifyFound();
+    }
+
+    // Intersection shader must not be specified for triangle hit group
+    {
+        VkPipelineShaderStageCreateInfo stage_create_infos[2] = {};
+        stage_create_infos[0].sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
+        stage_create_infos[0].stage = VK_SHADER_STAGE_RAYGEN_BIT_NV;
+        stage_create_infos[0].module = rgen_shader.handle();
+        stage_create_infos[0].pName = "main";
+
+        stage_create_infos[1].sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
+        stage_create_infos[1].stage = VK_SHADER_STAGE_INTERSECTION_BIT_NV;
+        stage_create_infos[1].module = intr_shader.handle();
+        stage_create_infos[1].pName = "main";
+
+        VkRayTracingShaderGroupCreateInfoNV group_create_infos[2] = {};
+        group_create_infos[0].sType = VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV;
+        group_create_infos[0].type = VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_NV;
+        group_create_infos[0].generalShader = 0;
+        group_create_infos[0].closestHitShader = VK_SHADER_UNUSED_NV;
+        group_create_infos[0].anyHitShader = VK_SHADER_UNUSED_NV;
+        group_create_infos[0].intersectionShader = VK_SHADER_UNUSED_NV;
+
+        group_create_infos[1].sType = VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV;
+        group_create_infos[1].type = VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_NV;
+        group_create_infos[1].generalShader = VK_SHADER_UNUSED_NV;
+        group_create_infos[1].closestHitShader = VK_SHADER_UNUSED_NV;
+        group_create_infos[1].anyHitShader = VK_SHADER_UNUSED_NV;
+        group_create_infos[1].intersectionShader = 1;
+
+        VkRayTracingPipelineCreateInfoNV pipeline_ci = {};
+        pipeline_ci.sType = VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_NV;
+        pipeline_ci.stageCount = 2;
+        pipeline_ci.pStages = stage_create_infos;
+        pipeline_ci.groupCount = 2;
+        pipeline_ci.pGroups = group_create_infos;
+        pipeline_ci.layout = empty_pipeline_layout.handle();
+
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-VkRayTracingShaderGroupCreateInfoNV-type-02416");
+        vkCreateRayTracingPipelinesNV(m_device->handle(), VK_NULL_HANDLE, 1, &pipeline_ci, nullptr, &pipeline);
+        m_errorMonitor->VerifyFound();
+    }
+
+    // Any hit shader index invalid
+    {
+        VkPipelineShaderStageCreateInfo stage_create_infos[2] = {};
+        stage_create_infos[0].sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
+        stage_create_infos[0].stage = VK_SHADER_STAGE_RAYGEN_BIT_NV;
+        stage_create_infos[0].module = rgen_shader.handle();
+        stage_create_infos[0].pName = "main";
+
+        stage_create_infos[1].sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
+        stage_create_infos[1].stage = VK_SHADER_STAGE_ANY_HIT_BIT_NV;
+        stage_create_infos[1].module = ahit_shader.handle();
+        stage_create_infos[1].pName = "main";
+
+        VkRayTracingShaderGroupCreateInfoNV group_create_infos[2] = {};
+        group_create_infos[0].sType = VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV;
+        group_create_infos[0].type = VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_NV;
+        group_create_infos[0].generalShader = 0;
+        group_create_infos[0].closestHitShader = VK_SHADER_UNUSED_NV;
+        group_create_infos[0].anyHitShader = VK_SHADER_UNUSED_NV;
+        group_create_infos[0].intersectionShader = VK_SHADER_UNUSED_NV;
+
+        group_create_infos[1].sType = VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV;
+        group_create_infos[1].type = VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_NV;
+        group_create_infos[1].generalShader = VK_SHADER_UNUSED_NV;
+        group_create_infos[1].closestHitShader = VK_SHADER_UNUSED_NV;
+        group_create_infos[1].anyHitShader = 5;  // Invalid index
+        group_create_infos[1].intersectionShader = VK_SHADER_UNUSED_NV;
+
+        VkRayTracingPipelineCreateInfoNV pipeline_ci = {};
+        pipeline_ci.sType = VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_NV;
+        pipeline_ci.stageCount = 2;
+        pipeline_ci.pStages = stage_create_infos;
+        pipeline_ci.groupCount = 2;
+        pipeline_ci.pGroups = group_create_infos;
+        pipeline_ci.layout = empty_pipeline_layout.handle();
+
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                             "VUID-VkRayTracingShaderGroupCreateInfoNV-anyHitShader-02418");
+        vkCreateRayTracingPipelinesNV(m_device->handle(), VK_NULL_HANDLE, 1, &pipeline_ci, nullptr, &pipeline);
+        m_errorMonitor->VerifyFound();
+    }
+
+    // Any hit shader index does not correspond to an any hit shader
+    {
+        VkPipelineShaderStageCreateInfo stage_create_infos[2] = {};
+        stage_create_infos[0].sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
+        stage_create_infos[0].stage = VK_SHADER_STAGE_RAYGEN_BIT_NV;
+        stage_create_infos[0].module = rgen_shader.handle();
+        stage_create_infos[0].pName = "main";
+
+        stage_create_infos[1].sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
+        stage_create_infos[1].stage = VK_SHADER_STAGE_CLOSEST_HIT_BIT_NV;
+        stage_create_infos[1].module = chit_shader.handle();
+        stage_create_infos[1].pName = "main";
+
+        VkRayTracingShaderGroupCreateInfoNV group_create_infos[2] = {};
+        group_create_infos[0].sType = VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV;
+        group_create_infos[0].type = VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_NV;
+        group_create_infos[0].generalShader = 0;
+        group_create_infos[0].closestHitShader = VK_SHADER_UNUSED_NV;
+        group_create_infos[0].anyHitShader = VK_SHADER_UNUSED_NV;
+        group_create_infos[0].intersectionShader = VK_SHADER_UNUSED_NV;
+
+        group_create_infos[1].sType = VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV;
+        group_create_infos[1].type = VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_NV;
+        group_create_infos[1].generalShader = VK_SHADER_UNUSED_NV;
+        group_create_infos[1].closestHitShader = VK_SHADER_UNUSED_NV;
+        group_create_infos[1].anyHitShader = 1;  // Index 1 corresponds to a closest hit shader
+        group_create_infos[1].intersectionShader = VK_SHADER_UNUSED_NV;
+
+        VkRayTracingPipelineCreateInfoNV pipeline_ci = {};
+        pipeline_ci.sType = VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_NV;
+        pipeline_ci.stageCount = 2;
+        pipeline_ci.pStages = stage_create_infos;
+        pipeline_ci.groupCount = 2;
+        pipeline_ci.pGroups = group_create_infos;
+        pipeline_ci.layout = empty_pipeline_layout.handle();
+
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                             "VUID-VkRayTracingShaderGroupCreateInfoNV-anyHitShader-02418");
+        vkCreateRayTracingPipelinesNV(m_device->handle(), VK_NULL_HANDLE, 1, &pipeline_ci, nullptr, &pipeline);
+        m_errorMonitor->VerifyFound();
+    }
+
+    // Closest hit shader index invalid
+    {
+        VkPipelineShaderStageCreateInfo stage_create_infos[2] = {};
+        stage_create_infos[0].sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
+        stage_create_infos[0].stage = VK_SHADER_STAGE_RAYGEN_BIT_NV;
+        stage_create_infos[0].module = rgen_shader.handle();
+        stage_create_infos[0].pName = "main";
+
+        stage_create_infos[1].sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
+        stage_create_infos[1].stage = VK_SHADER_STAGE_CLOSEST_HIT_BIT_NV;
+        stage_create_infos[1].module = chit_shader.handle();
+        stage_create_infos[1].pName = "main";
+
+        VkRayTracingShaderGroupCreateInfoNV group_create_infos[2] = {};
+        group_create_infos[0].sType = VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV;
+        group_create_infos[0].type = VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_NV;
+        group_create_infos[0].generalShader = 0;
+        group_create_infos[0].closestHitShader = VK_SHADER_UNUSED_NV;
+        group_create_infos[0].anyHitShader = VK_SHADER_UNUSED_NV;
+        group_create_infos[0].intersectionShader = VK_SHADER_UNUSED_NV;
+
+        group_create_infos[1].sType = VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV;
+        group_create_infos[1].type = VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_NV;
+        group_create_infos[1].generalShader = VK_SHADER_UNUSED_NV;
+        group_create_infos[1].closestHitShader = 5;  // Invalid index
+        group_create_infos[1].anyHitShader = VK_SHADER_UNUSED_NV;
+        group_create_infos[1].intersectionShader = VK_SHADER_UNUSED_NV;
+
+        VkRayTracingPipelineCreateInfoNV pipeline_ci = {};
+        pipeline_ci.sType = VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_NV;
+        pipeline_ci.stageCount = 2;
+        pipeline_ci.pStages = stage_create_infos;
+        pipeline_ci.groupCount = 2;
+        pipeline_ci.pGroups = group_create_infos;
+        pipeline_ci.layout = empty_pipeline_layout.handle();
+
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                             "VUID-VkRayTracingShaderGroupCreateInfoNV-closestHitShader-02417");
+        vkCreateRayTracingPipelinesNV(m_device->handle(), VK_NULL_HANDLE, 1, &pipeline_ci, nullptr, &pipeline);
+        m_errorMonitor->VerifyFound();
+    }
+
+    // Closest hit shader index does not correspond to an closest hit shader
+    {
+        VkPipelineShaderStageCreateInfo stage_create_infos[2] = {};
+        stage_create_infos[0].sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
+        stage_create_infos[0].stage = VK_SHADER_STAGE_RAYGEN_BIT_NV;
+        stage_create_infos[0].module = rgen_shader.handle();
+        stage_create_infos[0].pName = "main";
+
+        stage_create_infos[1].sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
+        stage_create_infos[1].stage = VK_SHADER_STAGE_ANY_HIT_BIT_NV;
+        stage_create_infos[1].module = ahit_shader.handle();
+        stage_create_infos[1].pName = "main";
+
+        VkRayTracingShaderGroupCreateInfoNV group_create_infos[2] = {};
+        group_create_infos[0].sType = VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV;
+        group_create_infos[0].type = VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_NV;
+        group_create_infos[0].generalShader = 0;
+        group_create_infos[0].closestHitShader = VK_SHADER_UNUSED_NV;
+        group_create_infos[0].anyHitShader = VK_SHADER_UNUSED_NV;
+        group_create_infos[0].intersectionShader = VK_SHADER_UNUSED_NV;
+
+        group_create_infos[1].sType = VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV;
+        group_create_infos[1].type = VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_NV;
+        group_create_infos[1].generalShader = VK_SHADER_UNUSED_NV;
+        group_create_infos[1].closestHitShader = 1;  // Index 1 corresponds to an any hit shader
+        group_create_infos[1].anyHitShader = VK_SHADER_UNUSED_NV;
+        group_create_infos[1].intersectionShader = VK_SHADER_UNUSED_NV;
+
+        VkRayTracingPipelineCreateInfoNV pipeline_ci = {};
+        pipeline_ci.sType = VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_NV;
+        pipeline_ci.stageCount = 2;
+        pipeline_ci.pStages = stage_create_infos;
+        pipeline_ci.groupCount = 2;
+        pipeline_ci.pGroups = group_create_infos;
+        pipeline_ci.layout = empty_pipeline_layout.handle();
+
+        m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT,
+                                             "VUID-VkRayTracingShaderGroupCreateInfoNV-closestHitShader-02417");
+        vkCreateRayTracingPipelinesNV(m_device->handle(), VK_NULL_HANDLE, 1, &pipeline_ci, nullptr, &pipeline);
+        m_errorMonitor->VerifyFound();
+    }
+}
+
+TEST_F(VkLayerTest, PipelineStageConditionalRenderingWithWrongQueue) {
+    TEST_DESCRIPTION("Run CmdPipelineBarrier with VK_PIPELINE_STAGE_CONDITIONAL_RENDERING_BIT_EXT and wrong VkQueueFlagBits");
+    ASSERT_NO_FATAL_FAILURE(Init());
+    // m_device->m_queue = m_device->dma_queues()[0]->handle();
+    uint32_t only_transfer_queueFamilyIndex = UINT32_MAX;
+
+    const auto q_props = vk_testing::PhysicalDevice(gpu()).queue_properties();
+    ASSERT_TRUE(q_props.size() > 0);
+    ASSERT_TRUE(q_props[0].queueCount > 0);
+
+    for (uint32_t i = 0; i < (uint32_t)q_props.size(); i++) {
+        if (q_props[i].queueFlags == VK_QUEUE_TRANSFER_BIT) {
+            only_transfer_queueFamilyIndex = i;
+            break;
+        }
+    }
+
+    if (only_transfer_queueFamilyIndex == UINT32_MAX) {
+        printf("%s Only VK_QUEUE_TRANSFER_BIT Queue is not supported.\n", kSkipPrefix);
+        return;
+    }
+
+    // A renderpass with a single subpass that declared a self-dependency
+    VkAttachmentDescription attach[] = {
+        {0, VK_FORMAT_R8G8B8A8_UNORM, VK_SAMPLE_COUNT_1_BIT, VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE,
+         VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE, VK_IMAGE_LAYOUT_UNDEFINED,
+         VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL},
+    };
+    VkAttachmentReference ref = {0, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL};
+    VkSubpassDescription subpasses[] = {
+        {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 0, nullptr, 1, &ref, nullptr, nullptr, 0, nullptr},
+    };
+
+    VkSubpassDependency dependency = {0,
+                                      0,
+                                      VK_PIPELINE_STAGE_VERTEX_SHADER_BIT,
+                                      VK_PIPELINE_STAGE_CONDITIONAL_RENDERING_BIT_EXT,
+                                      VK_ACCESS_SHADER_WRITE_BIT,
+                                      VK_ACCESS_CONDITIONAL_RENDERING_READ_BIT_EXT,
+                                      (VkDependencyFlags)0};
+    VkRenderPassCreateInfo rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, nullptr, 0, 1, attach, 1, subpasses, 1, &dependency};
+    VkRenderPass rp;
+
+    vk::CreateRenderPass(m_device->device(), &rpci, nullptr, &rp);
+
+    VkImageObj image(m_device);
+    image.Init(32, 32, 1, VK_FORMAT_R8G8B8A8_UNORM, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT);
+    VkImageView imageView = image.targetView(VK_FORMAT_R8G8B8A8_UNORM);
+
+    VkFramebufferCreateInfo fbci = {VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, nullptr, 0, rp, 1, &imageView, 32, 32, 1};
+    VkFramebuffer fb;
+    vk::CreateFramebuffer(m_device->device(), &fbci, nullptr, &fb);
+
+    VkCommandPoolObj commandPool(m_device, only_transfer_queueFamilyIndex);
+    VkCommandBufferObj commandBuffer(m_device, &commandPool);
+
+    commandBuffer.begin();
+    VkRenderPassBeginInfo rpbi = {VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO,
+                                  nullptr,
+                                  rp,
+                                  fb,
+                                  {{
+                                       0,
+                                       0,
+                                   },
+                                   {32, 32}},
+                                  0,
+                                  nullptr};
+    vk::CmdBeginRenderPass(commandBuffer.handle(), &rpbi, VK_SUBPASS_CONTENTS_INLINE);
+
+    VkImageMemoryBarrier imb = {};
+    imb.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
+    imb.pNext = nullptr;
+    imb.srcAccessMask = VK_ACCESS_SHADER_WRITE_BIT;
+    imb.dstAccessMask = VK_ACCESS_CONDITIONAL_RENDERING_READ_BIT_EXT;
+    imb.oldLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
+    imb.newLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
+    imb.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
+    imb.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
+    imb.image = image.handle();
+    imb.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    imb.subresourceRange.baseMipLevel = 0;
+    imb.subresourceRange.levelCount = 1;
+    imb.subresourceRange.baseArrayLayer = 0;
+    imb.subresourceRange.layerCount = 1;
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdPipelineBarrier-srcStageMask-01183");
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdPipelineBarrier-srcStageMask-01183");
+    vk::CmdPipelineBarrier(commandBuffer.handle(), VK_PIPELINE_STAGE_VERTEX_SHADER_BIT,
+                           VK_PIPELINE_STAGE_CONDITIONAL_RENDERING_BIT_EXT, 0, 0, nullptr, 0, nullptr, 1, &imb);
+    m_errorMonitor->VerifyFound();
+
+    vk::CmdEndRenderPass(commandBuffer.handle());
+    commandBuffer.end();
+    vk::DestroyRenderPass(m_device->device(), rp, nullptr);
+    vk::DestroyFramebuffer(m_device->device(), fb, nullptr);
+}
diff --git a/src/third_party/vulkan-validation-layers/src/tests/vkpositivelayertests.cpp b/src/third_party/vulkan-validation-layers/src/tests/vkpositivelayertests.cpp
new file mode 100644
index 0000000..deee792
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/tests/vkpositivelayertests.cpp
@@ -0,0 +1,8789 @@
+/*
+ * Copyright (c) 2015-2019 The Khronos Group Inc.
+ * Copyright (c) 2015-2019 Valve Corporation
+ * Copyright (c) 2015-2019 LunarG, Inc.
+ * Copyright (c) 2015-2019 Google, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Author: Chia-I Wu <olvaffe@gmail.com>
+ * Author: Chris Forbes <chrisf@ijw.co.nz>
+ * Author: Courtney Goeltzenleuchter <courtney@LunarG.com>
+ * Author: Mark Lobodzinski <mark@lunarg.com>
+ * Author: Mike Stroyan <mike@LunarG.com>
+ * Author: Tobin Ehlis <tobine@google.com>
+ * Author: Tony Barbour <tony@LunarG.com>
+ * Author: Cody Northrop <cnorthrop@google.com>
+ * Author: Dave Houlton <daveh@lunarg.com>
+ * Author: Jeremy Kniager <jeremyk@lunarg.com>
+ * Author: Shannon McPherson <shannon@lunarg.com>
+ * Author: John Zulauf <jzulauf@lunarg.com>
+ */
+
+#include "cast_utils.h"
+#include "layer_validation_tests.h"
+//
+// POSITIVE VALIDATION TESTS
+//
+// These tests do not expect to encounter ANY validation errors pass only if this is true
+
+TEST_F(VkPositiveLayerTest, NullFunctionPointer) {
+    TEST_DESCRIPTION("On 1_0 instance , call GetDeviceProcAddr on promoted 1_1 device-level entrypoint");
+    SetTargetApiVersion(VK_API_VERSION_1_0);
+
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+
+    if (DeviceExtensionSupported(gpu(), nullptr, "VK_KHR_get_memory_requirements2")) {
+        m_device_extension_names.push_back("VK_KHR_get_memory_requirements2");
+    } else {
+        printf("%s VK_KHR_get_memory_reqirements2 extension not supported, skipping NullFunctionPointer test\n", kSkipPrefix);
+        return;
+    }
+
+    ASSERT_NO_FATAL_FAILURE(InitState());
+
+    m_errorMonitor->ExpectSuccess();
+    auto fpGetBufferMemoryRequirements =
+        (PFN_vkGetBufferMemoryRequirements2)vk::GetDeviceProcAddr(m_device->device(), "vkGetBufferMemoryRequirements2");
+    if (fpGetBufferMemoryRequirements) {
+        m_errorMonitor->SetError("Null was expected!");
+    }
+    m_errorMonitor->VerifyNotFound();
+}
+
+TEST_F(VkPositiveLayerTest, SecondaryCommandBufferBarrier) {
+    TEST_DESCRIPTION("Add a pipeline barrier in a secondary command buffer");
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    m_errorMonitor->ExpectSuccess();
+
+    // A renderpass with a single subpass that declared a self-dependency
+    VkAttachmentDescription attach[] = {
+        {0, VK_FORMAT_R8G8B8A8_UNORM, VK_SAMPLE_COUNT_1_BIT, VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE,
+         VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE, VK_IMAGE_LAYOUT_UNDEFINED,
+         VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL},
+    };
+    VkAttachmentReference ref = {0, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL};
+    VkSubpassDescription subpasses[] = {
+        {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 0, nullptr, 1, &ref, nullptr, nullptr, 0, nullptr},
+    };
+    VkSubpassDependency dep = {0,
+                               0,
+                               VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT,
+                               VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT,
+                               VK_ACCESS_SHADER_WRITE_BIT,
+                               VK_ACCESS_SHADER_WRITE_BIT,
+                               VK_DEPENDENCY_BY_REGION_BIT};
+    VkRenderPassCreateInfo rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, nullptr, 0, 1, attach, 1, subpasses, 1, &dep};
+    VkRenderPass rp;
+
+    VkResult err = vk::CreateRenderPass(m_device->device(), &rpci, nullptr, &rp);
+    ASSERT_VK_SUCCESS(err);
+
+    VkImageObj image(m_device);
+    image.Init(32, 32, 1, VK_FORMAT_R8G8B8A8_UNORM, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
+    VkImageView imageView = image.targetView(VK_FORMAT_R8G8B8A8_UNORM);
+
+    VkFramebufferCreateInfo fbci = {VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, nullptr, 0, rp, 1, &imageView, 32, 32, 1};
+    VkFramebuffer fb;
+    err = vk::CreateFramebuffer(m_device->device(), &fbci, nullptr, &fb);
+    ASSERT_VK_SUCCESS(err);
+
+    m_commandBuffer->begin();
+
+    VkRenderPassBeginInfo rpbi = {VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO,
+                                  nullptr,
+                                  rp,
+                                  fb,
+                                  {{
+                                       0,
+                                       0,
+                                   },
+                                   {32, 32}},
+                                  0,
+                                  nullptr};
+
+    vk::CmdBeginRenderPass(m_commandBuffer->handle(), &rpbi, VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS);
+
+    VkCommandPoolObj pool(m_device, m_device->graphics_queue_node_index_, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT);
+    VkCommandBufferObj secondary(m_device, &pool, VK_COMMAND_BUFFER_LEVEL_SECONDARY);
+
+    VkCommandBufferInheritanceInfo cbii = {VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO,
+                                           nullptr,
+                                           rp,
+                                           0,
+                                           VK_NULL_HANDLE,  // Set to NULL FB handle intentionally to flesh out any errors
+                                           VK_FALSE,
+                                           0,
+                                           0};
+    VkCommandBufferBeginInfo cbbi = {VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO, nullptr,
+                                     VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT | VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT,
+                                     &cbii};
+    vk::BeginCommandBuffer(secondary.handle(), &cbbi);
+    VkMemoryBarrier mem_barrier = {};
+    mem_barrier.sType = VK_STRUCTURE_TYPE_MEMORY_BARRIER;
+    mem_barrier.pNext = NULL;
+    mem_barrier.srcAccessMask = VK_ACCESS_SHADER_WRITE_BIT;
+    mem_barrier.dstAccessMask = VK_ACCESS_SHADER_WRITE_BIT;
+    vk::CmdPipelineBarrier(secondary.handle(), VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT,
+                           VK_DEPENDENCY_BY_REGION_BIT, 1, &mem_barrier, 0, nullptr, 0, nullptr);
+
+    image.ImageMemoryBarrier(&secondary, VK_IMAGE_ASPECT_COLOR_BIT, VK_ACCESS_SHADER_WRITE_BIT, VK_ACCESS_SHADER_WRITE_BIT,
+                             VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT,
+                             VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT);
+    secondary.end();
+
+    vk::CmdExecuteCommands(m_commandBuffer->handle(), 1, &secondary.handle());
+    vk::CmdEndRenderPass(m_commandBuffer->handle());
+    m_commandBuffer->end();
+
+    VkSubmitInfo submit_info = {};
+    submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+    submit_info.commandBufferCount = 1;
+    submit_info.pCommandBuffers = &m_commandBuffer->handle();
+    vk::QueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+    vk::QueueWaitIdle(m_device->m_queue);
+
+    vk::DestroyFramebuffer(m_device->device(), fb, nullptr);
+    vk::DestroyRenderPass(m_device->device(), rp, nullptr);
+    m_errorMonitor->VerifyNotFound();
+}
+
+TEST_F(VkPositiveLayerTest, RenderPassCreateAttachmentUsedTwiceOK) {
+    TEST_DESCRIPTION("Attachment is used simultaneously as color and input, with the same layout. This is OK.");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    VkAttachmentDescription attach[] = {
+        {0, VK_FORMAT_R8G8B8A8_UNORM, VK_SAMPLE_COUNT_1_BIT, VK_ATTACHMENT_LOAD_OP_LOAD, VK_ATTACHMENT_STORE_OP_DONT_CARE,
+         VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE, VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_GENERAL},
+    };
+    VkAttachmentReference ref = {0, VK_IMAGE_LAYOUT_GENERAL};
+    VkSubpassDescription subpasses[] = {
+        {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 1, &ref, 1, &ref, nullptr, nullptr, 0, nullptr},
+    };
+
+    VkRenderPassCreateInfo rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, nullptr, 0, 1, attach, 1, subpasses, 0, nullptr};
+    VkRenderPass rp;
+
+    m_errorMonitor->ExpectSuccess();
+    vk::CreateRenderPass(m_device->device(), &rpci, nullptr, &rp);
+    m_errorMonitor->VerifyNotFound();
+    vk::DestroyRenderPass(m_device->device(), rp, nullptr);
+}
+
+TEST_F(VkPositiveLayerTest, RenderPassCreateInitialLayoutUndefined) {
+    TEST_DESCRIPTION(
+        "Ensure that CmdBeginRenderPass with an attachment's initialLayout of VK_IMAGE_LAYOUT_UNDEFINED works when the command "
+        "buffer has prior knowledge of that attachment's layout.");
+
+    m_errorMonitor->ExpectSuccess();
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    // A renderpass with one color attachment.
+    VkAttachmentDescription attachment = {0,
+                                          VK_FORMAT_R8G8B8A8_UNORM,
+                                          VK_SAMPLE_COUNT_1_BIT,
+                                          VK_ATTACHMENT_LOAD_OP_DONT_CARE,
+                                          VK_ATTACHMENT_STORE_OP_STORE,
+                                          VK_ATTACHMENT_LOAD_OP_DONT_CARE,
+                                          VK_ATTACHMENT_STORE_OP_DONT_CARE,
+                                          VK_IMAGE_LAYOUT_UNDEFINED,
+                                          VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL};
+
+    VkAttachmentReference att_ref = {0, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL};
+
+    VkSubpassDescription subpass = {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 0, nullptr, 1, &att_ref, nullptr, nullptr, 0, nullptr};
+
+    VkRenderPassCreateInfo rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, nullptr, 0, 1, &attachment, 1, &subpass, 0, nullptr};
+
+    VkRenderPass rp;
+    VkResult err = vk::CreateRenderPass(m_device->device(), &rpci, nullptr, &rp);
+    ASSERT_VK_SUCCESS(err);
+
+    // A compatible framebuffer.
+    VkImageObj image(m_device);
+    image.Init(32, 32, 1, VK_FORMAT_R8G8B8A8_UNORM, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
+    ASSERT_TRUE(image.initialized());
+
+    VkImageViewCreateInfo ivci = {
+        VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
+        nullptr,
+        0,
+        image.handle(),
+        VK_IMAGE_VIEW_TYPE_2D,
+        VK_FORMAT_R8G8B8A8_UNORM,
+        {VK_COMPONENT_SWIZZLE_IDENTITY, VK_COMPONENT_SWIZZLE_IDENTITY, VK_COMPONENT_SWIZZLE_IDENTITY,
+         VK_COMPONENT_SWIZZLE_IDENTITY},
+        {VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 1},
+    };
+    VkImageView view;
+    err = vk::CreateImageView(m_device->device(), &ivci, nullptr, &view);
+    ASSERT_VK_SUCCESS(err);
+
+    VkFramebufferCreateInfo fci = {VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, nullptr, 0, rp, 1, &view, 32, 32, 1};
+    VkFramebuffer fb;
+    err = vk::CreateFramebuffer(m_device->device(), &fci, nullptr, &fb);
+    ASSERT_VK_SUCCESS(err);
+
+    // Record a single command buffer which uses this renderpass twice. The
+    // bug is triggered at the beginning of the second renderpass, when the
+    // command buffer already has a layout recorded for the attachment.
+    VkRenderPassBeginInfo rpbi = {VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO, nullptr, rp, fb, {{0, 0}, {32, 32}}, 0, nullptr};
+    m_commandBuffer->begin();
+    vk::CmdBeginRenderPass(m_commandBuffer->handle(), &rpbi, VK_SUBPASS_CONTENTS_INLINE);
+    vk::CmdEndRenderPass(m_commandBuffer->handle());
+    vk::CmdBeginRenderPass(m_commandBuffer->handle(), &rpbi, VK_SUBPASS_CONTENTS_INLINE);
+
+    m_errorMonitor->VerifyNotFound();
+
+    vk::CmdEndRenderPass(m_commandBuffer->handle());
+    m_commandBuffer->end();
+
+    vk::DestroyFramebuffer(m_device->device(), fb, nullptr);
+    vk::DestroyRenderPass(m_device->device(), rp, nullptr);
+    vk::DestroyImageView(m_device->device(), view, nullptr);
+}
+
+TEST_F(VkPositiveLayerTest, RenderPassCreateAttachmentLayoutWithLoadOpThenReadOnly) {
+    TEST_DESCRIPTION(
+        "Positive test where we create a renderpass with an attachment that uses LOAD_OP_CLEAR, the first subpass has a valid "
+        "layout, and a second subpass then uses a valid *READ_ONLY* layout.");
+    m_errorMonitor->ExpectSuccess();
+    ASSERT_NO_FATAL_FAILURE(Init());
+    auto depth_format = FindSupportedDepthStencilFormat(gpu());
+    if (!depth_format) {
+        printf("%s No Depth + Stencil format found. Skipped.\n", kSkipPrefix);
+        return;
+    }
+
+    VkAttachmentReference attach[2] = {};
+    attach[0].attachment = 0;
+    attach[0].layout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
+    attach[1].attachment = 0;
+    attach[1].layout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL;
+    VkSubpassDescription subpasses[2] = {};
+    // First subpass clears DS attach on load
+    subpasses[0].pDepthStencilAttachment = &attach[0];
+    // 2nd subpass reads in DS as input attachment
+    subpasses[1].inputAttachmentCount = 1;
+    subpasses[1].pInputAttachments = &attach[1];
+    VkAttachmentDescription attach_desc = {};
+    attach_desc.format = depth_format;
+    attach_desc.samples = VK_SAMPLE_COUNT_1_BIT;
+    attach_desc.storeOp = VK_ATTACHMENT_STORE_OP_STORE;
+    attach_desc.stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
+    attach_desc.loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR;
+    attach_desc.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_CLEAR;
+    attach_desc.initialLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
+    attach_desc.finalLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL;
+    VkRenderPassCreateInfo rpci = {};
+    rpci.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
+    rpci.attachmentCount = 1;
+    rpci.pAttachments = &attach_desc;
+    rpci.subpassCount = 2;
+    rpci.pSubpasses = subpasses;
+
+    // Now create RenderPass and verify no errors
+    VkRenderPass rp;
+    vk::CreateRenderPass(m_device->device(), &rpci, NULL, &rp);
+    m_errorMonitor->VerifyNotFound();
+
+    vk::DestroyRenderPass(m_device->device(), rp, NULL);
+}
+
+TEST_F(VkPositiveLayerTest, RenderPassBeginSubpassZeroTransitionsApplied) {
+    TEST_DESCRIPTION("Ensure that CmdBeginRenderPass applies the layout transitions for the first subpass");
+
+    m_errorMonitor->ExpectSuccess();
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    // A renderpass with one color attachment.
+    VkAttachmentDescription attachment = {0,
+                                          VK_FORMAT_R8G8B8A8_UNORM,
+                                          VK_SAMPLE_COUNT_1_BIT,
+                                          VK_ATTACHMENT_LOAD_OP_DONT_CARE,
+                                          VK_ATTACHMENT_STORE_OP_STORE,
+                                          VK_ATTACHMENT_LOAD_OP_DONT_CARE,
+                                          VK_ATTACHMENT_STORE_OP_DONT_CARE,
+                                          VK_IMAGE_LAYOUT_UNDEFINED,
+                                          VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL};
+
+    VkAttachmentReference att_ref = {0, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL};
+
+    VkSubpassDescription subpass = {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 0, nullptr, 1, &att_ref, nullptr, nullptr, 0, nullptr};
+
+    VkSubpassDependency dep = {0,
+                               0,
+                               VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
+                               VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
+                               VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
+                               VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
+                               VK_DEPENDENCY_BY_REGION_BIT};
+
+    VkRenderPassCreateInfo rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, nullptr, 0, 1, &attachment, 1, &subpass, 1, &dep};
+
+    VkResult err;
+    VkRenderPass rp;
+    err = vk::CreateRenderPass(m_device->device(), &rpci, nullptr, &rp);
+    ASSERT_VK_SUCCESS(err);
+
+    // A compatible framebuffer.
+    VkImageObj image(m_device);
+    image.Init(32, 32, 1, VK_FORMAT_R8G8B8A8_UNORM, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
+    ASSERT_TRUE(image.initialized());
+
+    VkImageView view = image.targetView(VK_FORMAT_R8G8B8A8_UNORM);
+
+    VkFramebufferCreateInfo fci = {VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, nullptr, 0, rp, 1, &view, 32, 32, 1};
+    VkFramebuffer fb;
+    err = vk::CreateFramebuffer(m_device->device(), &fci, nullptr, &fb);
+    ASSERT_VK_SUCCESS(err);
+
+    // Record a single command buffer which issues a pipeline barrier w/
+    // image memory barrier for the attachment. This detects the previously
+    // missing tracking of the subpass layout by throwing a validation error
+    // if it doesn't occur.
+    VkRenderPassBeginInfo rpbi = {VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO, nullptr, rp, fb, {{0, 0}, {32, 32}}, 0, nullptr};
+    m_commandBuffer->begin();
+    vk::CmdBeginRenderPass(m_commandBuffer->handle(), &rpbi, VK_SUBPASS_CONTENTS_INLINE);
+
+    image.ImageMemoryBarrier(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
+                             VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
+                             VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT);
+
+    vk::CmdEndRenderPass(m_commandBuffer->handle());
+    m_errorMonitor->VerifyNotFound();
+    m_commandBuffer->end();
+
+    vk::DestroyFramebuffer(m_device->device(), fb, nullptr);
+    vk::DestroyRenderPass(m_device->device(), rp, nullptr);
+}
+
+TEST_F(VkPositiveLayerTest, RenderPassBeginTransitionsAttachmentUnused) {
+    TEST_DESCRIPTION(
+        "Ensure that layout transitions work correctly without errors, when an attachment reference is VK_ATTACHMENT_UNUSED");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    m_errorMonitor->ExpectSuccess();
+
+    // A renderpass with no attachments
+    VkAttachmentReference att_ref = {VK_ATTACHMENT_UNUSED, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL};
+
+    VkSubpassDescription subpass = {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 0, nullptr, 1, &att_ref, nullptr, nullptr, 0, nullptr};
+
+    VkRenderPassCreateInfo rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, nullptr, 0, 0, nullptr, 1, &subpass, 0, nullptr};
+
+    VkRenderPass rp;
+    VkResult err = vk::CreateRenderPass(m_device->device(), &rpci, nullptr, &rp);
+    ASSERT_VK_SUCCESS(err);
+
+    // A compatible framebuffer.
+    VkFramebufferCreateInfo fci = {VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, nullptr, 0, rp, 0, nullptr, 32, 32, 1};
+    VkFramebuffer fb;
+    err = vk::CreateFramebuffer(m_device->device(), &fci, nullptr, &fb);
+    ASSERT_VK_SUCCESS(err);
+
+    // Record a command buffer which just begins and ends the renderpass. The
+    // bug manifests in BeginRenderPass.
+    VkRenderPassBeginInfo rpbi = {VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO, nullptr, rp, fb, {{0, 0}, {32, 32}}, 0, nullptr};
+    m_commandBuffer->begin();
+    vk::CmdBeginRenderPass(m_commandBuffer->handle(), &rpbi, VK_SUBPASS_CONTENTS_INLINE);
+    vk::CmdEndRenderPass(m_commandBuffer->handle());
+    m_errorMonitor->VerifyNotFound();
+    m_commandBuffer->end();
+
+    vk::DestroyFramebuffer(m_device->device(), fb, nullptr);
+    vk::DestroyRenderPass(m_device->device(), rp, nullptr);
+}
+
+TEST_F(VkPositiveLayerTest, RenderPassBeginStencilLoadOp) {
+    TEST_DESCRIPTION("Create a stencil-only attachment with a LOAD_OP set to CLEAR. stencil[Load|Store]Op used to be ignored.");
+    VkResult result = VK_SUCCESS;
+    ASSERT_NO_FATAL_FAILURE(Init());
+    auto depth_format = FindSupportedDepthStencilFormat(gpu());
+    if (!depth_format) {
+        printf("%s No Depth + Stencil format found. Skipped.\n", kSkipPrefix);
+        return;
+    }
+    VkImageFormatProperties formatProps;
+    vk::GetPhysicalDeviceImageFormatProperties(gpu(), depth_format, VK_IMAGE_TYPE_2D, VK_IMAGE_TILING_OPTIMAL,
+                                               VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT, 0,
+                                               &formatProps);
+    if (formatProps.maxExtent.width < 100 || formatProps.maxExtent.height < 100) {
+        printf("%s Image format max extent is too small.\n", kSkipPrefix);
+        return;
+    }
+
+    VkFormat depth_stencil_fmt = depth_format;
+    m_depthStencil->Init(m_device, 100, 100, depth_stencil_fmt,
+                         VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT);
+    VkAttachmentDescription att = {};
+    VkAttachmentReference ref = {};
+    att.format = depth_stencil_fmt;
+    att.samples = VK_SAMPLE_COUNT_1_BIT;
+    att.loadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
+    att.storeOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
+    att.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_CLEAR;
+    att.stencilStoreOp = VK_ATTACHMENT_STORE_OP_STORE;
+    att.initialLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
+    att.finalLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
+
+    VkClearValue clear;
+    clear.depthStencil.depth = 1.0;
+    clear.depthStencil.stencil = 0;
+    ref.attachment = 0;
+    ref.layout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
+
+    VkSubpassDescription subpass = {};
+    subpass.pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS;
+    subpass.flags = 0;
+    subpass.inputAttachmentCount = 0;
+    subpass.pInputAttachments = NULL;
+    subpass.colorAttachmentCount = 0;
+    subpass.pColorAttachments = NULL;
+    subpass.pResolveAttachments = NULL;
+    subpass.pDepthStencilAttachment = &ref;
+    subpass.preserveAttachmentCount = 0;
+    subpass.pPreserveAttachments = NULL;
+
+    VkRenderPass rp;
+    VkRenderPassCreateInfo rp_info = {};
+    rp_info.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
+    rp_info.attachmentCount = 1;
+    rp_info.pAttachments = &att;
+    rp_info.subpassCount = 1;
+    rp_info.pSubpasses = &subpass;
+    result = vk::CreateRenderPass(device(), &rp_info, NULL, &rp);
+    ASSERT_VK_SUCCESS(result);
+
+    VkImageView *depthView = m_depthStencil->BindInfo();
+    VkFramebufferCreateInfo fb_info = {};
+    fb_info.sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO;
+    fb_info.pNext = NULL;
+    fb_info.renderPass = rp;
+    fb_info.attachmentCount = 1;
+    fb_info.pAttachments = depthView;
+    fb_info.width = 100;
+    fb_info.height = 100;
+    fb_info.layers = 1;
+    VkFramebuffer fb;
+    result = vk::CreateFramebuffer(device(), &fb_info, NULL, &fb);
+    ASSERT_VK_SUCCESS(result);
+
+    VkRenderPassBeginInfo rpbinfo = {};
+    rpbinfo.clearValueCount = 1;
+    rpbinfo.pClearValues = &clear;
+    rpbinfo.pNext = NULL;
+    rpbinfo.renderPass = rp;
+    rpbinfo.sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO;
+    rpbinfo.renderArea.extent.width = 100;
+    rpbinfo.renderArea.extent.height = 100;
+    rpbinfo.renderArea.offset.x = 0;
+    rpbinfo.renderArea.offset.y = 0;
+    rpbinfo.framebuffer = fb;
+
+    VkFenceObj fence;
+    fence.init(*m_device, VkFenceObj::create_info());
+    ASSERT_TRUE(fence.initialized());
+
+    m_commandBuffer->begin();
+    m_commandBuffer->BeginRenderPass(rpbinfo);
+    m_commandBuffer->EndRenderPass();
+    m_commandBuffer->end();
+    m_commandBuffer->QueueCommandBuffer(fence);
+
+    VkImageObj destImage(m_device);
+    destImage.Init(100, 100, 1, depth_stencil_fmt, VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT,
+                   VK_IMAGE_TILING_OPTIMAL, 0);
+    fence.wait(VK_TRUE, UINT64_MAX);
+    VkCommandBufferObj cmdbuf(m_device, m_commandPool);
+    cmdbuf.begin();
+
+    m_depthStencil->ImageMemoryBarrier(&cmdbuf, VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT,
+                                       VK_ACCESS_TRANSFER_READ_BIT | VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT,
+                                       VK_ACCESS_TRANSFER_WRITE_BIT | VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT,
+                                       VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL);
+
+    destImage.ImageMemoryBarrier(&cmdbuf, VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT,
+                                 VK_ACCESS_TRANSFER_WRITE_BIT | VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT, 0,
+                                 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL);
+    VkImageCopy cregion;
+    cregion.srcSubresource.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT;
+    cregion.srcSubresource.mipLevel = 0;
+    cregion.srcSubresource.baseArrayLayer = 0;
+    cregion.srcSubresource.layerCount = 1;
+    cregion.srcOffset.x = 0;
+    cregion.srcOffset.y = 0;
+    cregion.srcOffset.z = 0;
+    cregion.dstSubresource.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT;
+    cregion.dstSubresource.mipLevel = 0;
+    cregion.dstSubresource.baseArrayLayer = 0;
+    cregion.dstSubresource.layerCount = 1;
+    cregion.dstOffset.x = 0;
+    cregion.dstOffset.y = 0;
+    cregion.dstOffset.z = 0;
+    cregion.extent.width = 100;
+    cregion.extent.height = 100;
+    cregion.extent.depth = 1;
+    cmdbuf.CopyImage(m_depthStencil->handle(), VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, destImage.handle(),
+                     VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, &cregion);
+    cmdbuf.end();
+
+    VkSubmitInfo submit_info;
+    submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+    submit_info.pNext = NULL;
+    submit_info.waitSemaphoreCount = 0;
+    submit_info.pWaitSemaphores = NULL;
+    submit_info.pWaitDstStageMask = NULL;
+    submit_info.commandBufferCount = 1;
+    submit_info.pCommandBuffers = &cmdbuf.handle();
+    submit_info.signalSemaphoreCount = 0;
+    submit_info.pSignalSemaphores = NULL;
+
+    m_errorMonitor->ExpectSuccess();
+    vk::QueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+    m_errorMonitor->VerifyNotFound();
+
+    vk::QueueWaitIdle(m_device->m_queue);
+    vk::DestroyRenderPass(m_device->device(), rp, nullptr);
+    vk::DestroyFramebuffer(m_device->device(), fb, nullptr);
+}
+
+TEST_F(VkPositiveLayerTest, RenderPassBeginInlineAndSecondaryCommandBuffers) {
+    m_errorMonitor->ExpectSuccess();
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    m_commandBuffer->begin();
+
+    vk::CmdBeginRenderPass(m_commandBuffer->handle(), &m_renderPassBeginInfo, VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS);
+    vk::CmdEndRenderPass(m_commandBuffer->handle());
+    m_errorMonitor->VerifyNotFound();
+    vk::CmdBeginRenderPass(m_commandBuffer->handle(), &m_renderPassBeginInfo, VK_SUBPASS_CONTENTS_INLINE);
+    m_errorMonitor->VerifyNotFound();
+    vk::CmdEndRenderPass(m_commandBuffer->handle());
+    m_errorMonitor->VerifyNotFound();
+
+    m_commandBuffer->end();
+    m_errorMonitor->VerifyNotFound();
+}
+
+TEST_F(VkPositiveLayerTest, RenderPassBeginDepthStencilLayoutTransitionFromUndefined) {
+    TEST_DESCRIPTION(
+        "Create a render pass with depth-stencil attachment where layout transition from UNDEFINED TO DS_READ_ONLY_OPTIMAL is set "
+        "by render pass and verify that transition has correctly occurred at queue submit time with no validation errors.");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    auto depth_format = FindSupportedDepthStencilFormat(gpu());
+    if (!depth_format) {
+        printf("%s No Depth + Stencil format found. Skipped.\n", kSkipPrefix);
+        return;
+    }
+    VkImageFormatProperties format_props;
+    vk::GetPhysicalDeviceImageFormatProperties(gpu(), depth_format, VK_IMAGE_TYPE_2D, VK_IMAGE_TILING_OPTIMAL,
+                                               VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT, 0, &format_props);
+    if (format_props.maxExtent.width < 32 || format_props.maxExtent.height < 32) {
+        printf("%s Depth extent too small, RenderPassDepthStencilLayoutTransition skipped.\n", kSkipPrefix);
+        return;
+    }
+
+    m_errorMonitor->ExpectSuccess();
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    // A renderpass with one depth/stencil attachment.
+    VkAttachmentDescription attachment = {0,
+                                          depth_format,
+                                          VK_SAMPLE_COUNT_1_BIT,
+                                          VK_ATTACHMENT_LOAD_OP_DONT_CARE,
+                                          VK_ATTACHMENT_STORE_OP_DONT_CARE,
+                                          VK_ATTACHMENT_LOAD_OP_DONT_CARE,
+                                          VK_ATTACHMENT_STORE_OP_DONT_CARE,
+                                          VK_IMAGE_LAYOUT_UNDEFINED,
+                                          VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL};
+
+    VkAttachmentReference att_ref = {0, VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL};
+
+    VkSubpassDescription subpass = {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 0, nullptr, 0, nullptr, nullptr, &att_ref, 0, nullptr};
+
+    VkRenderPassCreateInfo rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, nullptr, 0, 1, &attachment, 1, &subpass, 0, nullptr};
+
+    VkRenderPass rp;
+    VkResult err = vk::CreateRenderPass(m_device->device(), &rpci, nullptr, &rp);
+    ASSERT_VK_SUCCESS(err);
+    // A compatible ds image.
+    VkImageObj image(m_device);
+    image.Init(32, 32, 1, depth_format, VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
+    ASSERT_TRUE(image.initialized());
+
+    VkImageViewCreateInfo ivci = {
+        VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
+        nullptr,
+        0,
+        image.handle(),
+        VK_IMAGE_VIEW_TYPE_2D,
+        depth_format,
+        {VK_COMPONENT_SWIZZLE_IDENTITY, VK_COMPONENT_SWIZZLE_IDENTITY, VK_COMPONENT_SWIZZLE_IDENTITY,
+         VK_COMPONENT_SWIZZLE_IDENTITY},
+        {VK_IMAGE_ASPECT_DEPTH_BIT, 0, 1, 0, 1},
+    };
+    VkImageView view;
+    err = vk::CreateImageView(m_device->device(), &ivci, nullptr, &view);
+    ASSERT_VK_SUCCESS(err);
+
+    VkFramebufferCreateInfo fci = {VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, nullptr, 0, rp, 1, &view, 32, 32, 1};
+    VkFramebuffer fb;
+    err = vk::CreateFramebuffer(m_device->device(), &fci, nullptr, &fb);
+    ASSERT_VK_SUCCESS(err);
+
+    VkRenderPassBeginInfo rpbi = {VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO, nullptr, rp, fb, {{0, 0}, {32, 32}}, 0, nullptr};
+    m_commandBuffer->begin();
+    vk::CmdBeginRenderPass(m_commandBuffer->handle(), &rpbi, VK_SUBPASS_CONTENTS_INLINE);
+    vk::CmdEndRenderPass(m_commandBuffer->handle());
+    m_commandBuffer->end();
+    m_commandBuffer->QueueCommandBuffer(false);
+    m_errorMonitor->VerifyNotFound();
+
+    // Cleanup
+    vk::DestroyImageView(m_device->device(), view, NULL);
+    vk::DestroyRenderPass(m_device->device(), rp, NULL);
+    vk::DestroyFramebuffer(m_device->device(), fb, NULL);
+}
+
+TEST_F(VkPositiveLayerTest, DestroyPipelineRenderPass) {
+    TEST_DESCRIPTION("Draw using a pipeline whose create renderPass has been destroyed.");
+    m_errorMonitor->ExpectSuccess();
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    VkResult err;
+
+    // Create a renderPass that's compatible with Draw-time renderPass
+    VkAttachmentDescription att = {};
+    att.format = m_render_target_fmt;
+    att.samples = VK_SAMPLE_COUNT_1_BIT;
+    att.loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR;
+    att.storeOp = VK_ATTACHMENT_STORE_OP_STORE;
+    att.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
+    att.stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
+    att.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+    att.finalLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
+
+    VkAttachmentReference ref = {};
+    ref.layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
+    ref.attachment = 0;
+
+    m_renderPassClearValues.clear();
+    VkClearValue clear = {};
+    clear.color = m_clear_color;
+
+    VkSubpassDescription subpass = {};
+    subpass.pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS;
+    subpass.flags = 0;
+    subpass.inputAttachmentCount = 0;
+    subpass.pInputAttachments = NULL;
+    subpass.colorAttachmentCount = 1;
+    subpass.pColorAttachments = &ref;
+    subpass.pResolveAttachments = NULL;
+
+    subpass.pDepthStencilAttachment = NULL;
+    subpass.preserveAttachmentCount = 0;
+    subpass.pPreserveAttachments = NULL;
+
+    VkRenderPassCreateInfo rp_info = {};
+    rp_info.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
+    rp_info.attachmentCount = 1;
+    rp_info.pAttachments = &att;
+    rp_info.subpassCount = 1;
+    rp_info.pSubpasses = &subpass;
+
+    VkRenderPass rp;
+    err = vk::CreateRenderPass(device(), &rp_info, NULL, &rp);
+    ASSERT_VK_SUCCESS(err);
+
+    VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
+    VkShaderObj fs(m_device, bindStateFragShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+    VkPipelineObj pipe(m_device);
+    pipe.AddDefaultColorAttachment();
+    pipe.AddShader(&vs);
+    pipe.AddShader(&fs);
+    VkViewport viewport = {0.0f, 0.0f, 64.0f, 64.0f, 0.0f, 1.0f};
+    m_viewports.push_back(viewport);
+    pipe.SetViewport(m_viewports);
+    VkRect2D rect = {{0, 0}, {64, 64}};
+    m_scissors.push_back(rect);
+    pipe.SetScissor(m_scissors);
+
+    const VkPipelineLayoutObj pl(m_device);
+    pipe.CreateVKPipeline(pl.handle(), rp);
+
+    m_commandBuffer->begin();
+    m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
+    vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.handle());
+    // Destroy renderPass before pipeline is used in Draw
+    //  We delay until after CmdBindPipeline to verify that invalid binding isn't
+    //  created between CB & renderPass, which we used to do.
+    vk::DestroyRenderPass(m_device->device(), rp, nullptr);
+    vk::CmdDraw(m_commandBuffer->handle(), 3, 1, 0, 0);
+    vk::CmdEndRenderPass(m_commandBuffer->handle());
+    m_commandBuffer->end();
+
+    VkSubmitInfo submit_info = {};
+    submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+    submit_info.commandBufferCount = 1;
+    submit_info.pCommandBuffers = &m_commandBuffer->handle();
+    vk::QueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+    m_errorMonitor->VerifyNotFound();
+    vk::QueueWaitIdle(m_device->m_queue);
+}
+
+TEST_F(VkPositiveLayerTest, ResetQueryPoolFromDifferentCB) {
+    TEST_DESCRIPTION("Reset a query on one CB and use it in another.");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    m_errorMonitor->ExpectSuccess();
+
+    VkQueryPool query_pool;
+    VkQueryPoolCreateInfo query_pool_create_info{};
+    query_pool_create_info.sType = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO;
+    query_pool_create_info.queryType = VK_QUERY_TYPE_OCCLUSION;
+    query_pool_create_info.queryCount = 1;
+    vk::CreateQueryPool(m_device->device(), &query_pool_create_info, nullptr, &query_pool);
+
+    VkCommandBuffer command_buffer[2];
+    VkCommandBufferAllocateInfo command_buffer_allocate_info{};
+    command_buffer_allocate_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
+    command_buffer_allocate_info.commandPool = m_commandPool->handle();
+    command_buffer_allocate_info.commandBufferCount = 2;
+    command_buffer_allocate_info.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
+    vk::AllocateCommandBuffers(m_device->device(), &command_buffer_allocate_info, command_buffer);
+
+    {
+        VkCommandBufferBeginInfo begin_info{};
+        begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
+
+        vk::BeginCommandBuffer(command_buffer[0], &begin_info);
+        vk::CmdResetQueryPool(command_buffer[0], query_pool, 0, 1);
+        vk::EndCommandBuffer(command_buffer[0]);
+
+        vk::BeginCommandBuffer(command_buffer[1], &begin_info);
+        vk::CmdBeginQuery(command_buffer[1], query_pool, 0, 0);
+        vk::CmdEndQuery(command_buffer[1], query_pool, 0);
+        vk::EndCommandBuffer(command_buffer[1]);
+    }
+    {
+        VkSubmitInfo submit_info[2]{};
+        submit_info[0].sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+        submit_info[0].commandBufferCount = 1;
+        submit_info[0].pCommandBuffers = &command_buffer[0];
+        submit_info[0].signalSemaphoreCount = 0;
+        submit_info[0].pSignalSemaphores = nullptr;
+
+        submit_info[1].sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+        submit_info[1].commandBufferCount = 1;
+        submit_info[1].pCommandBuffers = &command_buffer[1];
+        submit_info[1].signalSemaphoreCount = 0;
+        submit_info[1].pSignalSemaphores = nullptr;
+
+        vk::QueueSubmit(m_device->m_queue, 2, &submit_info[0], VK_NULL_HANDLE);
+    }
+
+    vk::QueueWaitIdle(m_device->m_queue);
+
+    vk::DestroyQueryPool(m_device->device(), query_pool, nullptr);
+    vk::FreeCommandBuffers(m_device->device(), m_commandPool->handle(), 2, command_buffer);
+
+    m_errorMonitor->VerifyNotFound();
+}
+
+TEST_F(VkPositiveLayerTest, BasicQuery) {
+    TEST_DESCRIPTION("Use a couple occlusion queries");
+    m_errorMonitor->ExpectSuccess();
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    uint32_t qfi = 0;
+    VkBufferCreateInfo bci = {};
+    bci.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
+    bci.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
+    bci.size = 4 * sizeof(uint64_t);
+    bci.queueFamilyIndexCount = 1;
+    bci.pQueueFamilyIndices = &qfi;
+    VkBufferObj buffer;
+    VkMemoryPropertyFlags mem_props = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
+    buffer.init(*m_device, bci, mem_props);
+
+    VkQueryPool query_pool;
+    VkQueryPoolCreateInfo query_pool_info;
+    query_pool_info.sType = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO;
+    query_pool_info.pNext = NULL;
+    query_pool_info.queryType = VK_QUERY_TYPE_OCCLUSION;
+    query_pool_info.flags = 0;
+    query_pool_info.queryCount = 2;
+    query_pool_info.pipelineStatistics = 0;
+
+    VkResult res = vk::CreateQueryPool(m_device->handle(), &query_pool_info, NULL, &query_pool);
+    ASSERT_VK_SUCCESS(res);
+
+    CreatePipelineHelper pipe(*this);
+    pipe.InitInfo();
+    pipe.InitState();
+    pipe.CreateGraphicsPipeline();
+
+    m_commandBuffer->begin();
+    vk::CmdResetQueryPool(m_commandBuffer->handle(), query_pool, 0, 2);
+    m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
+    vk::CmdBeginQuery(m_commandBuffer->handle(), query_pool, 0, 0);
+    vk::CmdEndQuery(m_commandBuffer->handle(), query_pool, 0);
+    vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.pipeline_);
+    vk::CmdBeginQuery(m_commandBuffer->handle(), query_pool, 1, 0);
+    vk::CmdDraw(m_commandBuffer->handle(), 3, 1, 0, 0);
+    vk::CmdEndRenderPass(m_commandBuffer->handle());
+    vk::CmdEndQuery(m_commandBuffer->handle(), query_pool, 1);
+    vk::CmdCopyQueryPoolResults(m_commandBuffer->handle(), query_pool, 0, 2, buffer.handle(), 0, sizeof(uint64_t),
+                                VK_QUERY_RESULT_64_BIT | VK_QUERY_RESULT_WAIT_BIT);
+    m_commandBuffer->end();
+
+    VkSubmitInfo submit_info = {};
+    submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+    submit_info.commandBufferCount = 1;
+    submit_info.pCommandBuffers = &m_commandBuffer->handle();
+    vk::QueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+
+    vk::QueueWaitIdle(m_device->m_queue);
+    uint64_t samples_passed[4];
+    res = vk::GetQueryPoolResults(m_device->handle(), query_pool, 0, 2, sizeof(samples_passed), samples_passed, sizeof(uint64_t),
+                                  VK_QUERY_RESULT_64_BIT | VK_QUERY_RESULT_WAIT_BIT);
+    ASSERT_VK_SUCCESS(res);
+    m_errorMonitor->VerifyNotFound();
+    vk::DestroyQueryPool(m_device->handle(), query_pool, NULL);
+}
+
+TEST_F(VkPositiveLayerTest, MultiplaneGetImageSubresourceLayout) {
+    TEST_DESCRIPTION("Positive test, query layout of a single plane of a multiplane image. (repro Github #2530)");
+
+    // Enable KHR multiplane req'd extensions
+    bool mp_extensions = InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME,
+                                                    VK_KHR_GET_MEMORY_REQUIREMENTS_2_SPEC_VERSION);
+    if (mp_extensions) {
+        m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    }
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+    mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_MAINTENANCE1_EXTENSION_NAME);
+    mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
+    mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
+    mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
+    if (mp_extensions) {
+        m_device_extension_names.push_back(VK_KHR_MAINTENANCE1_EXTENSION_NAME);
+        m_device_extension_names.push_back(VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
+        m_device_extension_names.push_back(VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
+        m_device_extension_names.push_back(VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
+    } else {
+        printf("%s test requires KHR multiplane extensions, not available.  Skipping.\n", kSkipPrefix);
+        return;
+    }
+    ASSERT_NO_FATAL_FAILURE(InitState());
+
+    VkImageCreateInfo ci = {};
+    ci.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+    ci.pNext = NULL;
+    ci.flags = 0;
+    ci.imageType = VK_IMAGE_TYPE_2D;
+    ci.format = VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM_KHR;
+    ci.extent = {128, 128, 1};
+    ci.mipLevels = 1;
+    ci.arrayLayers = 1;
+    ci.samples = VK_SAMPLE_COUNT_1_BIT;
+    ci.tiling = VK_IMAGE_TILING_LINEAR;
+    ci.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
+    ci.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
+    ci.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+
+    // Verify format
+    bool supported = ImageFormatAndFeaturesSupported(instance(), gpu(), ci, VK_FORMAT_FEATURE_TRANSFER_SRC_BIT);
+    if (!supported) {
+        printf("%s Multiplane image format not supported.  Skipping test.\n", kSkipPrefix);
+        return;  // Assume there's low ROI on searching for different mp formats
+    }
+
+    VkImage image;
+    VkResult err = vk::CreateImage(device(), &ci, NULL, &image);
+    ASSERT_VK_SUCCESS(err);
+
+    // Query layout of 3rd plane
+    VkImageSubresource subres = {};
+    subres.aspectMask = VK_IMAGE_ASPECT_PLANE_2_BIT_KHR;
+    subres.mipLevel = 0;
+    subres.arrayLayer = 0;
+    VkSubresourceLayout layout = {};
+
+    m_errorMonitor->ExpectSuccess();
+    vk::GetImageSubresourceLayout(device(), image, &subres, &layout);
+    m_errorMonitor->VerifyNotFound();
+
+    vk::DestroyImage(device(), image, NULL);
+}
+
+TEST_F(VkPositiveLayerTest, OwnershipTranfersImage) {
+    TEST_DESCRIPTION("Valid image ownership transfers that shouldn't create errors");
+    ASSERT_NO_FATAL_FAILURE(Init(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
+
+    uint32_t no_gfx = m_device->QueueFamilyWithoutCapabilities(VK_QUEUE_GRAPHICS_BIT);
+    if (no_gfx == UINT32_MAX) {
+        printf("%s Required queue families not present (non-graphics capable required).\n", kSkipPrefix);
+        return;
+    }
+    VkQueueObj *no_gfx_queue = m_device->queue_family_queues(no_gfx)[0].get();
+
+    VkCommandPoolObj no_gfx_pool(m_device, no_gfx, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT);
+    VkCommandBufferObj no_gfx_cb(m_device, &no_gfx_pool, VK_COMMAND_BUFFER_LEVEL_PRIMARY, no_gfx_queue);
+
+    // Create an "exclusive" image owned by the graphics queue.
+    VkImageObj image(m_device);
+    VkFlags image_use = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT;
+    image.Init(32, 32, 1, VK_FORMAT_B8G8R8A8_UNORM, image_use, VK_IMAGE_TILING_OPTIMAL, 0);
+    ASSERT_TRUE(image.initialized());
+    auto image_subres = image.subresource_range(VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 1);
+    auto image_barrier = image.image_memory_barrier(0, 0, image.Layout(), image.Layout(), image_subres);
+    image_barrier.srcQueueFamilyIndex = m_device->graphics_queue_node_index_;
+    image_barrier.dstQueueFamilyIndex = no_gfx;
+
+    ValidOwnershipTransfer(m_errorMonitor, m_commandBuffer, &no_gfx_cb, VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT,
+                           VK_PIPELINE_STAGE_TRANSFER_BIT, nullptr, &image_barrier);
+
+    // Change layouts while changing ownership
+    image_barrier.srcQueueFamilyIndex = no_gfx;
+    image_barrier.dstQueueFamilyIndex = m_device->graphics_queue_node_index_;
+    image_barrier.oldLayout = image.Layout();
+    // Make sure the new layout is different from the old
+    if (image_barrier.oldLayout == VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL) {
+        image_barrier.newLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
+    } else {
+        image_barrier.newLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
+    }
+
+    ValidOwnershipTransfer(m_errorMonitor, &no_gfx_cb, m_commandBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT,
+                           VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT, nullptr, &image_barrier);
+}
+
+TEST_F(VkPositiveLayerTest, OwnershipTranfersBuffer) {
+    TEST_DESCRIPTION("Valid buffer ownership transfers that shouldn't create errors");
+    ASSERT_NO_FATAL_FAILURE(Init(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
+
+    uint32_t no_gfx = m_device->QueueFamilyWithoutCapabilities(VK_QUEUE_GRAPHICS_BIT);
+    if (no_gfx == UINT32_MAX) {
+        printf("%s Required queue families not present (non-graphics capable required).\n", kSkipPrefix);
+        return;
+    }
+    VkQueueObj *no_gfx_queue = m_device->queue_family_queues(no_gfx)[0].get();
+
+    VkCommandPoolObj no_gfx_pool(m_device, no_gfx, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT);
+    VkCommandBufferObj no_gfx_cb(m_device, &no_gfx_pool, VK_COMMAND_BUFFER_LEVEL_PRIMARY, no_gfx_queue);
+
+    // Create a buffer
+    const VkDeviceSize buffer_size = 256;
+    uint8_t data[buffer_size] = {0xFF};
+    VkConstantBufferObj buffer(m_device, buffer_size, data, VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT);
+    ASSERT_TRUE(buffer.initialized());
+    auto buffer_barrier = buffer.buffer_memory_barrier(0, 0, 0, VK_WHOLE_SIZE);
+
+    // Let gfx own it.
+    buffer_barrier.srcQueueFamilyIndex = m_device->graphics_queue_node_index_;
+    buffer_barrier.dstQueueFamilyIndex = m_device->graphics_queue_node_index_;
+    ValidOwnershipTransferOp(m_errorMonitor, m_commandBuffer, VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT,
+                             &buffer_barrier, nullptr);
+
+    // Transfer it to non-gfx
+    buffer_barrier.dstQueueFamilyIndex = no_gfx;
+    ValidOwnershipTransfer(m_errorMonitor, m_commandBuffer, &no_gfx_cb, VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT,
+                           VK_PIPELINE_STAGE_TRANSFER_BIT, &buffer_barrier, nullptr);
+
+    // Transfer it to gfx
+    buffer_barrier.srcQueueFamilyIndex = no_gfx;
+    buffer_barrier.dstQueueFamilyIndex = m_device->graphics_queue_node_index_;
+    ValidOwnershipTransfer(m_errorMonitor, &no_gfx_cb, m_commandBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT,
+                           VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT, &buffer_barrier, nullptr);
+}
+
+TEST_F(VkPositiveLayerTest, LayoutFromPresentWithoutAccessMemoryRead) {
+    // Transition an image away from PRESENT_SRC_KHR without ACCESS_MEMORY_READ
+    // in srcAccessMask.
+
+    // The required behavior here was a bit unclear in earlier versions of the
+    // spec, but there is no memory dependency required here, so this should
+    // work without warnings.
+
+    m_errorMonitor->ExpectSuccess();
+    ASSERT_NO_FATAL_FAILURE(Init());
+    VkImageObj image(m_device);
+    image.Init(128, 128, 1, VK_FORMAT_B8G8R8A8_UNORM, (VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT),
+               VK_IMAGE_TILING_OPTIMAL, 0);
+    ASSERT_TRUE(image.initialized());
+
+    VkImageMemoryBarrier barrier = {};
+    VkImageSubresourceRange range;
+    barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
+    barrier.srcAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
+    barrier.dstAccessMask = 0;
+    barrier.oldLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
+    barrier.newLayout = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR;
+    barrier.image = image.handle();
+    range.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    range.baseMipLevel = 0;
+    range.levelCount = 1;
+    range.baseArrayLayer = 0;
+    range.layerCount = 1;
+    barrier.subresourceRange = range;
+    VkCommandBufferObj cmdbuf(m_device, m_commandPool);
+    cmdbuf.begin();
+    cmdbuf.PipelineBarrier(VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, 0, nullptr, 0, nullptr, 1,
+                           &barrier);
+    barrier.oldLayout = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR;
+    barrier.newLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
+    barrier.srcAccessMask = 0;
+    barrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
+    cmdbuf.PipelineBarrier(VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, 0, nullptr, 0, nullptr, 1,
+                           &barrier);
+
+    m_errorMonitor->VerifyNotFound();
+}
+
+TEST_F(VkPositiveLayerTest, CopyNonupdatedDescriptors) {
+    TEST_DESCRIPTION("Copy non-updated descriptors");
+    unsigned int i;
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    OneOffDescriptorSet src_descriptor_set(m_device, {
+                                                         {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
+                                                         {1, VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, 1, VK_SHADER_STAGE_ALL, nullptr},
+                                                         {2, VK_DESCRIPTOR_TYPE_SAMPLER, 1, VK_SHADER_STAGE_ALL, nullptr},
+                                                     });
+    OneOffDescriptorSet dst_descriptor_set(m_device, {
+                                                         {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
+                                                         {1, VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, 1, VK_SHADER_STAGE_ALL, nullptr},
+                                                     });
+
+    m_errorMonitor->ExpectSuccess();
+
+    const unsigned int copy_size = 2;
+    VkCopyDescriptorSet copy_ds_update[copy_size];
+    memset(copy_ds_update, 0, sizeof(copy_ds_update));
+    for (i = 0; i < copy_size; i++) {
+        copy_ds_update[i].sType = VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET;
+        copy_ds_update[i].srcSet = src_descriptor_set.set_;
+        copy_ds_update[i].srcBinding = i;
+        copy_ds_update[i].dstSet = dst_descriptor_set.set_;
+        copy_ds_update[i].dstBinding = i;
+        copy_ds_update[i].descriptorCount = 1;
+    }
+    vk::UpdateDescriptorSets(m_device->device(), 0, NULL, copy_size, copy_ds_update);
+
+    m_errorMonitor->VerifyNotFound();
+}
+
+TEST_F(VkPositiveLayerTest, ConfirmNoVLErrorWhenVkCmdClearAttachmentsCalledInSecondaryCB) {
+    TEST_DESCRIPTION(
+        "This test is to verify that when vkCmdClearAttachments is called by a secondary commandbuffer, the validation layers do "
+        "not throw an error if the primary commandbuffer begins a renderpass before executing the secondary commandbuffer.");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    VkCommandBufferObj secondary(m_device, m_commandPool, VK_COMMAND_BUFFER_LEVEL_SECONDARY);
+
+    VkCommandBufferBeginInfo info = {};
+    VkCommandBufferInheritanceInfo hinfo = {};
+    info.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT | VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT;
+    info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
+    info.pInheritanceInfo = &hinfo;
+    hinfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO;
+    hinfo.pNext = NULL;
+    hinfo.renderPass = renderPass();
+    hinfo.subpass = 0;
+    hinfo.framebuffer = m_framebuffer;
+    hinfo.occlusionQueryEnable = VK_FALSE;
+    hinfo.queryFlags = 0;
+    hinfo.pipelineStatistics = 0;
+
+    secondary.begin(&info);
+    VkClearAttachment color_attachment;
+    color_attachment.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    color_attachment.clearValue.color.float32[0] = 0.0;
+    color_attachment.clearValue.color.float32[1] = 0.0;
+    color_attachment.clearValue.color.float32[2] = 0.0;
+    color_attachment.clearValue.color.float32[3] = 0.0;
+    color_attachment.colorAttachment = 0;
+    VkClearRect clear_rect = {{{0, 0}, {(uint32_t)m_width, (uint32_t)m_height}}, 0, 1};
+    vk::CmdClearAttachments(secondary.handle(), 1, &color_attachment, 1, &clear_rect);
+    secondary.end();
+    // Modify clear rect here to verify that it doesn't cause validation error
+    clear_rect = {{{0, 0}, {99999999, 99999999}}, 0, 0};
+
+    m_commandBuffer->begin();
+    vk::CmdBeginRenderPass(m_commandBuffer->handle(), &m_renderPassBeginInfo, VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS);
+    vk::CmdExecuteCommands(m_commandBuffer->handle(), 1, &secondary.handle());
+    vk::CmdEndRenderPass(m_commandBuffer->handle());
+    m_commandBuffer->end();
+    m_errorMonitor->VerifyNotFound();
+}
+
+TEST_F(VkPositiveLayerTest, CreatePipelineComplexTypes) {
+    TEST_DESCRIPTION("Smoke test for complex types across VS/FS boundary");
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    if (!m_device->phy().features().tessellationShader) {
+        printf("%s Device does not support tessellation shaders; skipped.\n", kSkipPrefix);
+        return;
+    }
+
+    m_errorMonitor->ExpectSuccess();
+
+    VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
+    VkShaderObj tcs(m_device, bindStateTscShaderText, VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT, this);
+    VkShaderObj tes(m_device, bindStateTeshaderText, VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT, this);
+    VkShaderObj fs(m_device, bindStateFragShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+    VkPipelineInputAssemblyStateCreateInfo iasci{VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO, nullptr, 0,
+                                                 VK_PRIMITIVE_TOPOLOGY_PATCH_LIST, VK_FALSE};
+    VkPipelineTessellationStateCreateInfo tsci{VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO, nullptr, 0, 3};
+
+    CreatePipelineHelper pipe(*this);
+    pipe.InitInfo();
+    pipe.gp_ci_.pTessellationState = &tsci;
+    pipe.gp_ci_.pInputAssemblyState = &iasci;
+    pipe.shader_stages_ = {vs.GetStageCreateInfo(), tcs.GetStageCreateInfo(), tes.GetStageCreateInfo(), fs.GetStageCreateInfo()};
+    pipe.InitState();
+    pipe.CreateGraphicsPipeline();
+    m_errorMonitor->VerifyNotFound();
+}
+
+TEST_F(VkPositiveLayerTest, ShaderRelaxedBlockLayout) {
+    // This is a positive test, no errors expected
+    // Verifies the ability to relax block layout rules with a shader that requires them to be relaxed
+    TEST_DESCRIPTION("Create a shader that requires relaxed block layout.");
+
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+
+    // The Relaxed Block Layout extension was promoted to core in 1.1.
+    // Go ahead and check for it and turn it on in case a 1.0 device has it.
+    if (!DeviceExtensionSupported(gpu(), nullptr, VK_KHR_RELAXED_BLOCK_LAYOUT_EXTENSION_NAME)) {
+        printf("%s Extension %s not supported, skipping this pass. \n", kSkipPrefix, VK_KHR_RELAXED_BLOCK_LAYOUT_EXTENSION_NAME);
+        return;
+    }
+    m_device_extension_names.push_back(VK_KHR_RELAXED_BLOCK_LAYOUT_EXTENSION_NAME);
+    ASSERT_NO_FATAL_FAILURE(InitState());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    // Vertex shader requiring relaxed layout.
+    // Without relaxed layout, we would expect a message like:
+    // "Structure id 2 decorated as Block for variable in Uniform storage class
+    // must follow standard uniform buffer layout rules: member 1 at offset 4 is not aligned to 16"
+
+    const std::string spv_source = R"(
+                  OpCapability Shader
+                  OpMemoryModel Logical GLSL450
+                  OpEntryPoint Vertex %main "main"
+                  OpSource GLSL 450
+                  OpMemberDecorate %S 0 Offset 0
+                  OpMemberDecorate %S 1 Offset 4
+                  OpDecorate %S Block
+                  OpDecorate %B DescriptorSet 0
+                  OpDecorate %B Binding 0
+          %void = OpTypeVoid
+             %3 = OpTypeFunction %void
+         %float = OpTypeFloat 32
+       %v3float = OpTypeVector %float 3
+             %S = OpTypeStruct %float %v3float
+%_ptr_Uniform_S = OpTypePointer Uniform %S
+             %B = OpVariable %_ptr_Uniform_S Uniform
+          %main = OpFunction %void None %3
+             %5 = OpLabel
+                  OpReturn
+                  OpFunctionEnd
+        )";
+    m_errorMonitor->ExpectSuccess();
+    VkShaderObj vs(m_device, spv_source, VK_SHADER_STAGE_VERTEX_BIT, this);
+    m_errorMonitor->VerifyNotFound();
+}
+
+TEST_F(VkPositiveLayerTest, ShaderUboStd430Layout) {
+    // This is a positive test, no errors expected
+    // Verifies the ability to scalar block layout rules with a shader that requires them to be relaxed
+    TEST_DESCRIPTION("Create a shader that requires UBO std430 layout.");
+    // Enable req'd extensions
+    if (!InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+        printf("%s Extension %s not supported, skipping this pass. \n", kSkipPrefix,
+               VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+        return;
+    }
+    m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+
+    // Check for the UBO standard block layout extension and turn it on if it's available
+    if (!DeviceExtensionSupported(gpu(), nullptr, VK_KHR_UNIFORM_BUFFER_STANDARD_LAYOUT_EXTENSION_NAME)) {
+        printf("%s Extension %s not supported, skipping this pass. \n", kSkipPrefix,
+               VK_KHR_UNIFORM_BUFFER_STANDARD_LAYOUT_EXTENSION_NAME);
+        return;
+    }
+    m_device_extension_names.push_back(VK_KHR_UNIFORM_BUFFER_STANDARD_LAYOUT_EXTENSION_NAME);
+
+    PFN_vkGetPhysicalDeviceFeatures2 vkGetPhysicalDeviceFeatures2 =
+        (PFN_vkGetPhysicalDeviceFeatures2)vk::GetInstanceProcAddr(instance(), "vkGetPhysicalDeviceFeatures2KHR");
+
+    auto uniform_buffer_standard_layout_features = lvl_init_struct<VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR>(NULL);
+    uniform_buffer_standard_layout_features.uniformBufferStandardLayout = VK_TRUE;
+    auto query_features2 = lvl_init_struct<VkPhysicalDeviceFeatures2>(&uniform_buffer_standard_layout_features);
+    vkGetPhysicalDeviceFeatures2(gpu(), &query_features2);
+
+    auto set_features2 = lvl_init_struct<VkPhysicalDeviceFeatures2>(&uniform_buffer_standard_layout_features);
+
+    ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &set_features2));
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    // Vertex shader requiring std430 in a uniform buffer.
+    // Without uniform buffer standard layout, we would expect a message like:
+    // "Structure id 3 decorated as Block for variable in Uniform storage class
+    // must follow standard uniform buffer layout rules: member 0 is an array
+    // with stride 4 not satisfying alignment to 16"
+
+    const std::string spv_source = R"(
+               OpCapability Shader
+               OpMemoryModel Logical GLSL450
+               OpEntryPoint Vertex %main "main"
+               OpSource GLSL 460
+               OpDecorate %_arr_float_uint_8 ArrayStride 4
+               OpMemberDecorate %foo 0 Offset 0
+               OpDecorate %foo Block
+               OpDecorate %b DescriptorSet 0
+               OpDecorate %b Binding 0
+       %void = OpTypeVoid
+          %3 = OpTypeFunction %void
+      %float = OpTypeFloat 32
+       %uint = OpTypeInt 32 0
+     %uint_8 = OpConstant %uint 8
+%_arr_float_uint_8 = OpTypeArray %float %uint_8
+        %foo = OpTypeStruct %_arr_float_uint_8
+%_ptr_Uniform_foo = OpTypePointer Uniform %foo
+          %b = OpVariable %_ptr_Uniform_foo Uniform
+       %main = OpFunction %void None %3
+          %5 = OpLabel
+               OpReturn
+               OpFunctionEnd
+        )";
+
+    std::vector<unsigned int> spv;
+    VkShaderModuleCreateInfo module_create_info;
+    VkShaderModule shader_module;
+    module_create_info.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
+    module_create_info.pNext = NULL;
+    ASMtoSPV(SPV_ENV_VULKAN_1_0, 0, spv_source.data(), spv);
+    module_create_info.pCode = spv.data();
+    module_create_info.codeSize = spv.size() * sizeof(unsigned int);
+    module_create_info.flags = 0;
+
+    m_errorMonitor->ExpectSuccess();
+    VkResult err = vk::CreateShaderModule(m_device->handle(), &module_create_info, NULL, &shader_module);
+    m_errorMonitor->VerifyNotFound();
+    if (err == VK_SUCCESS) {
+        vk::DestroyShaderModule(m_device->handle(), shader_module, NULL);
+    }
+}
+
+TEST_F(VkPositiveLayerTest, ShaderScalarBlockLayout) {
+    // This is a positive test, no errors expected
+    // Verifies the ability to scalar block layout rules with a shader that requires them to be relaxed
+    TEST_DESCRIPTION("Create a shader that requires scalar block layout.");
+    // Enable req'd extensions
+    if (!InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+        printf("%s Extension %s not supported, skipping this pass. \n", kSkipPrefix,
+               VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+        return;
+    }
+    m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+
+    // Check for the Scalar Block Layout extension and turn it on if it's available
+    if (!DeviceExtensionSupported(gpu(), nullptr, VK_EXT_SCALAR_BLOCK_LAYOUT_EXTENSION_NAME)) {
+        printf("%s Extension %s not supported, skipping this pass. \n", kSkipPrefix, VK_EXT_SCALAR_BLOCK_LAYOUT_EXTENSION_NAME);
+        return;
+    }
+    m_device_extension_names.push_back(VK_EXT_SCALAR_BLOCK_LAYOUT_EXTENSION_NAME);
+
+    PFN_vkGetPhysicalDeviceFeatures2 vkGetPhysicalDeviceFeatures2 =
+        (PFN_vkGetPhysicalDeviceFeatures2)vk::GetInstanceProcAddr(instance(), "vkGetPhysicalDeviceFeatures2KHR");
+
+    auto scalar_block_features = lvl_init_struct<VkPhysicalDeviceScalarBlockLayoutFeaturesEXT>(NULL);
+    scalar_block_features.scalarBlockLayout = VK_TRUE;
+    auto query_features2 = lvl_init_struct<VkPhysicalDeviceFeatures2>(&scalar_block_features);
+    vkGetPhysicalDeviceFeatures2(gpu(), &query_features2);
+
+    auto set_features2 = lvl_init_struct<VkPhysicalDeviceFeatures2>(&scalar_block_features);
+
+    ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &set_features2));
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    // Vertex shader requiring scalar layout.
+    // Without scalar layout, we would expect a message like:
+    // "Structure id 2 decorated as Block for variable in Uniform storage class
+    // must follow standard uniform buffer layout rules: member 1 at offset 4 is not aligned to 16"
+
+    const std::string spv_source = R"(
+                  OpCapability Shader
+                  OpMemoryModel Logical GLSL450
+                  OpEntryPoint Vertex %main "main"
+                  OpSource GLSL 450
+                  OpMemberDecorate %S 0 Offset 0
+                  OpMemberDecorate %S 1 Offset 4
+                  OpMemberDecorate %S 2 Offset 8
+                  OpDecorate %S Block
+                  OpDecorate %B DescriptorSet 0
+                  OpDecorate %B Binding 0
+          %void = OpTypeVoid
+             %3 = OpTypeFunction %void
+         %float = OpTypeFloat 32
+       %v3float = OpTypeVector %float 3
+             %S = OpTypeStruct %float %float %v3float
+%_ptr_Uniform_S = OpTypePointer Uniform %S
+             %B = OpVariable %_ptr_Uniform_S Uniform
+          %main = OpFunction %void None %3
+             %5 = OpLabel
+                  OpReturn
+                  OpFunctionEnd
+        )";
+
+    m_errorMonitor->ExpectSuccess();
+    VkShaderObj vs(m_device, spv_source, VK_SHADER_STAGE_VERTEX_BIT, this);
+    m_errorMonitor->VerifyNotFound();
+}
+
+TEST_F(VkPositiveLayerTest, SpirvGroupDecorations) {
+    TEST_DESCRIPTION("Test shader validation support for group decorations.");
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+    ASSERT_NO_FATAL_FAILURE(InitState());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    const std::string spv_source = R"(
+              OpCapability Shader
+               OpMemoryModel Logical GLSL450
+               OpEntryPoint GLCompute %main "main" %gl_GlobalInvocationID
+               OpExecutionMode %main LocalSize 1 1 1
+               OpSource GLSL 430
+               OpName %main "main"
+               OpName %gl_GlobalInvocationID "gl_GlobalInvocationID"
+               OpDecorate %gl_GlobalInvocationID BuiltIn GlobalInvocationId
+               OpDecorate %_runtimearr_float ArrayStride 4
+               OpDecorate %4 BufferBlock
+               OpDecorate %5 Offset 0
+          %4 = OpDecorationGroup
+          %5 = OpDecorationGroup
+               OpGroupDecorate %4 %_struct_6 %_struct_7 %_struct_8 %_struct_9 %_struct_10 %_struct_11
+               OpGroupMemberDecorate %5 %_struct_6 0 %_struct_7 0 %_struct_8 0 %_struct_9 0 %_struct_10 0 %_struct_11 0
+               OpDecorate %12 DescriptorSet 0
+               OpDecorate %13 DescriptorSet 0
+               OpDecorate %13 NonWritable
+               OpDecorate %13 Restrict
+         %14 = OpDecorationGroup
+         %12 = OpDecorationGroup
+         %13 = OpDecorationGroup
+               OpGroupDecorate %12 %15
+               OpGroupDecorate %12 %15
+               OpGroupDecorate %12 %15
+               OpDecorate %15 DescriptorSet 0
+               OpDecorate %15 Binding 5
+               OpGroupDecorate %14 %16
+               OpDecorate %16 DescriptorSet 0
+               OpDecorate %16 Binding 0
+               OpGroupDecorate %12 %17
+               OpDecorate %17 Binding 1
+               OpGroupDecorate %13 %18 %19
+               OpDecorate %18 Binding 2
+               OpDecorate %19 Binding 3
+               OpGroupDecorate %14 %20
+               OpGroupDecorate %12 %20
+               OpGroupDecorate %13 %20
+               OpDecorate %20 Binding 4
+       %bool = OpTypeBool
+       %void = OpTypeVoid
+         %23 = OpTypeFunction %void
+       %uint = OpTypeInt 32 0
+        %int = OpTypeInt 32 1
+      %float = OpTypeFloat 32
+     %v3uint = OpTypeVector %uint 3
+    %v3float = OpTypeVector %float 3
+%_ptr_Input_v3uint = OpTypePointer Input %v3uint
+%_ptr_Uniform_int = OpTypePointer Uniform %int
+%_ptr_Uniform_float = OpTypePointer Uniform %float
+%_runtimearr_int = OpTypeRuntimeArray %int
+%_runtimearr_float = OpTypeRuntimeArray %float
+%gl_GlobalInvocationID = OpVariable %_ptr_Input_v3uint Input
+      %int_0 = OpConstant %int 0
+  %_struct_6 = OpTypeStruct %_runtimearr_float
+%_ptr_Uniform__struct_6 = OpTypePointer Uniform %_struct_6
+         %15 = OpVariable %_ptr_Uniform__struct_6 Uniform
+  %_struct_7 = OpTypeStruct %_runtimearr_float
+%_ptr_Uniform__struct_7 = OpTypePointer Uniform %_struct_7
+         %16 = OpVariable %_ptr_Uniform__struct_7 Uniform
+  %_struct_8 = OpTypeStruct %_runtimearr_float
+%_ptr_Uniform__struct_8 = OpTypePointer Uniform %_struct_8
+         %17 = OpVariable %_ptr_Uniform__struct_8 Uniform
+  %_struct_9 = OpTypeStruct %_runtimearr_float
+%_ptr_Uniform__struct_9 = OpTypePointer Uniform %_struct_9
+         %18 = OpVariable %_ptr_Uniform__struct_9 Uniform
+ %_struct_10 = OpTypeStruct %_runtimearr_float
+%_ptr_Uniform__struct_10 = OpTypePointer Uniform %_struct_10
+         %19 = OpVariable %_ptr_Uniform__struct_10 Uniform
+ %_struct_11 = OpTypeStruct %_runtimearr_float
+%_ptr_Uniform__struct_11 = OpTypePointer Uniform %_struct_11
+         %20 = OpVariable %_ptr_Uniform__struct_11 Uniform
+       %main = OpFunction %void None %23
+         %40 = OpLabel
+         %41 = OpLoad %v3uint %gl_GlobalInvocationID
+         %42 = OpCompositeExtract %uint %41 0
+         %43 = OpAccessChain %_ptr_Uniform_float %16 %int_0 %42
+         %44 = OpAccessChain %_ptr_Uniform_float %17 %int_0 %42
+         %45 = OpAccessChain %_ptr_Uniform_float %18 %int_0 %42
+         %46 = OpAccessChain %_ptr_Uniform_float %19 %int_0 %42
+         %47 = OpAccessChain %_ptr_Uniform_float %20 %int_0 %42
+         %48 = OpAccessChain %_ptr_Uniform_float %15 %int_0 %42
+         %49 = OpLoad %float %43
+         %50 = OpLoad %float %44
+         %51 = OpLoad %float %45
+         %52 = OpLoad %float %46
+         %53 = OpLoad %float %47
+         %54 = OpFAdd %float %49 %50
+         %55 = OpFAdd %float %54 %51
+         %56 = OpFAdd %float %55 %52
+         %57 = OpFAdd %float %56 %53
+               OpStore %48 %57
+               OpReturn
+               OpFunctionEnd
+)";
+
+    // CreateDescriptorSetLayout
+    VkDescriptorSetLayoutBinding dslb[6] = {};
+    size_t dslb_size = size(dslb);
+    for (size_t i = 0; i < dslb_size; i++) {
+        dslb[i].binding = i;
+        dslb[i].descriptorCount = 1;
+        dslb[i].descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
+        dslb[i].pImmutableSamplers = NULL;
+        dslb[i].stageFlags = VK_SHADER_STAGE_COMPUTE_BIT | VK_SHADER_STAGE_ALL;
+    }
+    if (m_device->props.limits.maxPerStageDescriptorStorageBuffers < dslb_size) {
+        printf("%sNeeded storage buffer bindings exceeds this devices limit.  Skipping tests.\n", kSkipPrefix);
+        return;
+    }
+
+    CreateComputePipelineHelper pipe(*this);
+    pipe.InitInfo();
+    pipe.dsl_bindings_.resize(dslb_size);
+    memcpy(pipe.dsl_bindings_.data(), dslb, dslb_size * sizeof(VkDescriptorSetLayoutBinding));
+    pipe.cs_.reset(new VkShaderObj(m_device, bindStateMinimalShaderText, VK_SHADER_STAGE_COMPUTE_BIT, this));
+    pipe.InitState();
+    m_errorMonitor->ExpectSuccess();
+    pipe.CreateComputePipeline();
+    m_errorMonitor->VerifyNotFound();
+}
+
+TEST_F(VkPositiveLayerTest, CreatePipelineCheckShaderCapabilityExtension1of2) {
+    // This is a positive test, no errors expected
+    // Verifies the ability to deal with a shader that declares a non-unique SPIRV capability ID
+    TEST_DESCRIPTION("Create a shader in which uses a non-unique capability ID extension, 1 of 2");
+
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+    if (!DeviceExtensionSupported(gpu(), nullptr, VK_EXT_SHADER_VIEWPORT_INDEX_LAYER_EXTENSION_NAME)) {
+        printf("%s Extension %s not supported, skipping this pass. \n", kSkipPrefix,
+               VK_EXT_SHADER_VIEWPORT_INDEX_LAYER_EXTENSION_NAME);
+        return;
+    }
+    m_device_extension_names.push_back(VK_EXT_SHADER_VIEWPORT_INDEX_LAYER_EXTENSION_NAME);
+    ASSERT_NO_FATAL_FAILURE(InitState());
+
+    // These tests require that the device support multiViewport
+    if (!m_device->phy().features().multiViewport) {
+        printf("%s Device does not support multiViewport, test skipped.\n", kSkipPrefix);
+        return;
+    }
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    // Vertex shader using viewport array capability
+    char const *vsSource =
+        "#version 450\n"
+        "#extension GL_ARB_shader_viewport_layer_array : enable\n"
+        "void main() {\n"
+        "    gl_ViewportIndex = 1;\n"
+        "}\n";
+
+    VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
+
+    CreatePipelineHelper pipe(*this);
+    pipe.InitInfo();
+    pipe.shader_stages_ = {vs.GetStageCreateInfo()};
+    pipe.InitState();
+    m_errorMonitor->ExpectSuccess();
+    pipe.CreateGraphicsPipeline();
+    m_errorMonitor->VerifyNotFound();
+}
+
+TEST_F(VkPositiveLayerTest, CreatePipelineCheckShaderCapabilityExtension2of2) {
+    // This is a positive test, no errors expected
+    // Verifies the ability to deal with a shader that declares a non-unique SPIRV capability ID
+    TEST_DESCRIPTION("Create a shader in which uses a non-unique capability ID extension, 2 of 2");
+
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+    if (!DeviceExtensionSupported(gpu(), nullptr, VK_NV_VIEWPORT_ARRAY2_EXTENSION_NAME)) {
+        printf("%s Extension %s not supported, skipping this pass. \n", kSkipPrefix, VK_NV_VIEWPORT_ARRAY2_EXTENSION_NAME);
+        return;
+    }
+    m_device_extension_names.push_back(VK_NV_VIEWPORT_ARRAY2_EXTENSION_NAME);
+    ASSERT_NO_FATAL_FAILURE(InitState());
+
+    // These tests require that the device support multiViewport
+    if (!m_device->phy().features().multiViewport) {
+        printf("%s Device does not support multiViewport, test skipped.\n", kSkipPrefix);
+        return;
+    }
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    // Vertex shader using viewport array capability
+    char const *vsSource =
+        "#version 450\n"
+        "#extension GL_ARB_shader_viewport_layer_array : enable\n"
+        "void main() {\n"
+        "    gl_ViewportIndex = 1;\n"
+        "}\n";
+
+    VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
+
+    CreatePipelineHelper pipe(*this);
+    pipe.InitInfo();
+    pipe.shader_stages_ = {vs.GetStageCreateInfo()};
+    pipe.InitState();
+    m_errorMonitor->ExpectSuccess();
+    pipe.CreateGraphicsPipeline();
+    m_errorMonitor->VerifyNotFound();
+}
+
+TEST_F(VkPositiveLayerTest, CreatePipelineFragmentOutputNotWrittenButMasked) {
+    TEST_DESCRIPTION(
+        "Test that no error is produced when the fragment shader fails to declare an output, but the corresponding attachment's "
+        "write mask is 0.");
+    m_errorMonitor->ExpectSuccess();
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    char const *fsSource =
+        "#version 450\n"
+        "\n"
+        "void main(){\n"
+        "}\n";
+
+    VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
+    VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+    VkPipelineObj pipe(m_device);
+    pipe.AddShader(&vs);
+    pipe.AddShader(&fs);
+
+    /* set up CB 0, not written, but also masked */
+    pipe.AddDefaultColorAttachment(0);
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    VkDescriptorSetObj descriptorSet(m_device);
+    descriptorSet.AppendDummy();
+    descriptorSet.CreateVKDescriptorSet(m_commandBuffer);
+
+    pipe.CreateVKPipeline(descriptorSet.GetPipelineLayout(), renderPass());
+
+    m_errorMonitor->VerifyNotFound();
+}
+
+TEST_F(VkPositiveLayerTest, StatelessValidationDisable) {
+    TEST_DESCRIPTION("Specify a non-zero value for a reserved parameter with stateless validation disabled");
+
+    VkValidationFeatureDisableEXT disables[] = {VK_VALIDATION_FEATURE_DISABLE_API_PARAMETERS_EXT};
+    VkValidationFeaturesEXT features = {};
+    features.sType = VK_STRUCTURE_TYPE_VALIDATION_FEATURES_EXT;
+    features.disabledValidationFeatureCount = 1;
+    features.pDisabledValidationFeatures = disables;
+    VkCommandPoolCreateFlags pool_flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
+    ASSERT_NO_FATAL_FAILURE(Init(nullptr, nullptr, pool_flags, &features));
+
+    m_errorMonitor->ExpectSuccess();
+    // Specify 0 for a reserved VkFlags parameter. Normally this is expected to trigger an stateless validation error, but this
+    // validation was disabled via the features extension, so no errors should be forthcoming.
+    VkEvent event_handle = VK_NULL_HANDLE;
+    VkEventCreateInfo event_info = {};
+    event_info.sType = VK_STRUCTURE_TYPE_EVENT_CREATE_INFO;
+    event_info.flags = 1;
+    vk::CreateEvent(device(), &event_info, NULL, &event_handle);
+    vk::DestroyEvent(device(), event_handle, NULL);
+    m_errorMonitor->VerifyNotFound();
+}
+
+TEST_F(VkPositiveLayerTest, PointSizeWriteInFunction) {
+    TEST_DESCRIPTION("Create a pipeline using TOPOLOGY_POINT_LIST and write PointSize in vertex shader function.");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    m_errorMonitor->ExpectSuccess();
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+    ASSERT_NO_FATAL_FAILURE(InitViewport());
+
+    // Create VS declaring PointSize and write to it in a function call.
+    VkShaderObj vs(m_device, bindStateVertPointSizeShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
+    VkShaderObj ps(m_device, bindStateFragShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+    {
+        CreatePipelineHelper pipe(*this);
+        pipe.InitInfo();
+        pipe.shader_stages_ = {vs.GetStageCreateInfo(), ps.GetStageCreateInfo()};
+        pipe.ia_ci_.topology = VK_PRIMITIVE_TOPOLOGY_POINT_LIST;
+        pipe.InitState();
+        pipe.CreateGraphicsPipeline();
+    }
+    m_errorMonitor->VerifyNotFound();
+}
+
+TEST_F(VkPositiveLayerTest, PointSizeGeomShaderSuccess) {
+    TEST_DESCRIPTION(
+        "Create a pipeline using TOPOLOGY_POINT_LIST, set PointSize vertex shader, and write in the final geometry stage.");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    m_errorMonitor->ExpectSuccess();
+
+    if ((!m_device->phy().features().geometryShader) || (!m_device->phy().features().shaderTessellationAndGeometryPointSize)) {
+        printf("%s Device does not support the required geometry shader features; skipped.\n", kSkipPrefix);
+        return;
+    }
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+    ASSERT_NO_FATAL_FAILURE(InitViewport());
+
+    // Create VS declaring PointSize and writing to it
+    VkShaderObj vs(m_device, bindStateVertPointSizeShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
+    VkShaderObj gs(m_device, bindStateGeomPointSizeShaderText, VK_SHADER_STAGE_GEOMETRY_BIT, this);
+    VkShaderObj ps(m_device, bindStateFragShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+    CreatePipelineHelper pipe(*this);
+    pipe.InitInfo();
+    pipe.shader_stages_ = {vs.GetStageCreateInfo(), gs.GetStageCreateInfo(), ps.GetStageCreateInfo()};
+    // Set Input Assembly to TOPOLOGY POINT LIST
+    pipe.ia_ci_.topology = VK_PRIMITIVE_TOPOLOGY_POINT_LIST;
+    pipe.InitState();
+    pipe.CreateGraphicsPipeline();
+    m_errorMonitor->VerifyNotFound();
+}
+
+TEST_F(VkPositiveLayerTest, LoosePointSizeWrite) {
+    TEST_DESCRIPTION("Create a pipeline using TOPOLOGY_POINT_LIST and write PointSize outside of a structure.");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    m_errorMonitor->ExpectSuccess();
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+    ASSERT_NO_FATAL_FAILURE(InitViewport());
+
+    const std::string LoosePointSizeWrite = R"(
+                                       OpCapability Shader
+                                  %1 = OpExtInstImport "GLSL.std.450"
+                                       OpMemoryModel Logical GLSL450
+                                       OpEntryPoint Vertex %main "main" %glposition %glpointsize %gl_VertexIndex
+                                       OpSource GLSL 450
+                                       OpName %main "main"
+                                       OpName %vertices "vertices"
+                                       OpName %glposition "glposition"
+                                       OpName %glpointsize "glpointsize"
+                                       OpName %gl_VertexIndex "gl_VertexIndex"
+                                       OpDecorate %glposition BuiltIn Position
+                                       OpDecorate %glpointsize BuiltIn PointSize
+                                       OpDecorate %gl_VertexIndex BuiltIn VertexIndex
+                               %void = OpTypeVoid
+                                  %3 = OpTypeFunction %void
+                              %float = OpTypeFloat 32
+                            %v2float = OpTypeVector %float 2
+                               %uint = OpTypeInt 32 0
+                             %uint_3 = OpConstant %uint 3
+                %_arr_v2float_uint_3 = OpTypeArray %v2float %uint_3
+   %_ptr_Private__arr_v2float_uint_3 = OpTypePointer Private %_arr_v2float_uint_3
+                           %vertices = OpVariable %_ptr_Private__arr_v2float_uint_3 Private
+                                %int = OpTypeInt 32 1
+                              %int_0 = OpConstant %int 0
+                           %float_n1 = OpConstant %float -1
+                                 %16 = OpConstantComposite %v2float %float_n1 %float_n1
+               %_ptr_Private_v2float = OpTypePointer Private %v2float
+                              %int_1 = OpConstant %int 1
+                            %float_1 = OpConstant %float 1
+                                 %21 = OpConstantComposite %v2float %float_1 %float_n1
+                              %int_2 = OpConstant %int 2
+                            %float_0 = OpConstant %float 0
+                                 %25 = OpConstantComposite %v2float %float_0 %float_1
+                            %v4float = OpTypeVector %float 4
+            %_ptr_Output_gl_Position = OpTypePointer Output %v4float
+                         %glposition = OpVariable %_ptr_Output_gl_Position Output
+           %_ptr_Output_gl_PointSize = OpTypePointer Output %float
+                        %glpointsize = OpVariable %_ptr_Output_gl_PointSize Output
+                     %_ptr_Input_int = OpTypePointer Input %int
+                     %gl_VertexIndex = OpVariable %_ptr_Input_int Input
+                              %int_3 = OpConstant %int 3
+                %_ptr_Output_v4float = OpTypePointer Output %v4float
+                  %_ptr_Output_float = OpTypePointer Output %float
+                               %main = OpFunction %void None %3
+                                  %5 = OpLabel
+                                 %18 = OpAccessChain %_ptr_Private_v2float %vertices %int_0
+                                       OpStore %18 %16
+                                 %22 = OpAccessChain %_ptr_Private_v2float %vertices %int_1
+                                       OpStore %22 %21
+                                 %26 = OpAccessChain %_ptr_Private_v2float %vertices %int_2
+                                       OpStore %26 %25
+                                 %33 = OpLoad %int %gl_VertexIndex
+                                 %35 = OpSMod %int %33 %int_3
+                                 %36 = OpAccessChain %_ptr_Private_v2float %vertices %35
+                                 %37 = OpLoad %v2float %36
+                                 %38 = OpCompositeExtract %float %37 0
+                                 %39 = OpCompositeExtract %float %37 1
+                                 %40 = OpCompositeConstruct %v4float %38 %39 %float_0 %float_1
+                                 %42 = OpAccessChain %_ptr_Output_v4float %glposition
+                                       OpStore %42 %40
+                                       OpStore %glpointsize %float_1
+                                       OpReturn
+                                       OpFunctionEnd
+        )";
+
+    // Create VS declaring PointSize and write to it in a function call.
+    VkShaderObj vs(m_device, LoosePointSizeWrite, VK_SHADER_STAGE_VERTEX_BIT, this);
+    VkShaderObj ps(m_device, bindStateFragShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+    {
+        CreatePipelineHelper pipe(*this);
+        pipe.InitInfo();
+        pipe.shader_stages_ = {vs.GetStageCreateInfo(), ps.GetStageCreateInfo()};
+        // Set Input Assembly to TOPOLOGY POINT LIST
+        pipe.ia_ci_.topology = VK_PRIMITIVE_TOPOLOGY_POINT_LIST;
+        pipe.InitState();
+        pipe.CreateGraphicsPipeline();
+    }
+    m_errorMonitor->VerifyNotFound();
+}
+
+TEST_F(VkPositiveLayerTest, UncompressedToCompressedImageCopy) {
+    TEST_DESCRIPTION("Image copies between compressed and uncompressed images");
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    // Verify format support
+    // Size-compatible (64-bit) formats. Uncompressed is 64 bits per texel, compressed is 64 bits per 4x4 block (or 4bpt).
+    if (!ImageFormatAndFeaturesSupported(gpu(), VK_FORMAT_R16G16B16A16_UINT, VK_IMAGE_TILING_OPTIMAL,
+                                         VK_FORMAT_FEATURE_TRANSFER_SRC_BIT_KHR | VK_FORMAT_FEATURE_TRANSFER_DST_BIT_KHR) ||
+        !ImageFormatAndFeaturesSupported(gpu(), VK_FORMAT_BC1_RGBA_SRGB_BLOCK, VK_IMAGE_TILING_OPTIMAL,
+                                         VK_FORMAT_FEATURE_TRANSFER_SRC_BIT_KHR | VK_FORMAT_FEATURE_TRANSFER_DST_BIT_KHR)) {
+        printf("%s Required formats/features not supported - UncompressedToCompressedImageCopy skipped.\n", kSkipPrefix);
+        return;
+    }
+
+    VkImageObj uncomp_10x10t_image(m_device);       // Size = 10 * 10 * 64 = 6400
+    VkImageObj comp_10x10b_40x40t_image(m_device);  // Size = 40 * 40 * 4  = 6400
+
+    uncomp_10x10t_image.Init(10, 10, 1, VK_FORMAT_R16G16B16A16_UINT,
+                             VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT, VK_IMAGE_TILING_OPTIMAL);
+    comp_10x10b_40x40t_image.Init(40, 40, 1, VK_FORMAT_BC1_RGBA_SRGB_BLOCK,
+                                  VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT, VK_IMAGE_TILING_OPTIMAL);
+
+    if (!uncomp_10x10t_image.initialized() || !comp_10x10b_40x40t_image.initialized()) {
+        printf("%s Unable to initialize surfaces - UncompressedToCompressedImageCopy skipped.\n", kSkipPrefix);
+        return;
+    }
+
+    // Both copies represent the same number of bytes. Bytes Per Texel = 1 for bc6, 16 for uncompressed
+    // Copy compressed to uncompressed
+    VkImageCopy copy_region = {};
+    copy_region.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    copy_region.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    copy_region.srcSubresource.mipLevel = 0;
+    copy_region.dstSubresource.mipLevel = 0;
+    copy_region.srcSubresource.baseArrayLayer = 0;
+    copy_region.dstSubresource.baseArrayLayer = 0;
+    copy_region.srcSubresource.layerCount = 1;
+    copy_region.dstSubresource.layerCount = 1;
+    copy_region.srcOffset = {0, 0, 0};
+    copy_region.dstOffset = {0, 0, 0};
+
+    m_errorMonitor->ExpectSuccess();
+    m_commandBuffer->begin();
+
+    // Copy from uncompressed to compressed
+    copy_region.extent = {10, 10, 1};  // Dimensions in (uncompressed) texels
+    vk::CmdCopyImage(m_commandBuffer->handle(), uncomp_10x10t_image.handle(), VK_IMAGE_LAYOUT_GENERAL,
+                     comp_10x10b_40x40t_image.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &copy_region);
+
+    // And from compressed to uncompressed
+    copy_region.extent = {40, 40, 1};  // Dimensions in (compressed) texels
+    vk::CmdCopyImage(m_commandBuffer->handle(), comp_10x10b_40x40t_image.handle(), VK_IMAGE_LAYOUT_GENERAL,
+                     uncomp_10x10t_image.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &copy_region);
+
+    m_errorMonitor->VerifyNotFound();
+    m_commandBuffer->end();
+}
+
+TEST_F(VkPositiveLayerTest, DeleteDescriptorSetLayoutsBeforeDescriptorSets) {
+    TEST_DESCRIPTION("Create DSLayouts and DescriptorSets and then delete the DSLayouts before the DescriptorSets.");
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+    VkResult err;
+
+    m_errorMonitor->ExpectSuccess();
+
+    VkDescriptorPoolSize ds_type_count = {};
+    ds_type_count.type = VK_DESCRIPTOR_TYPE_SAMPLER;
+    ds_type_count.descriptorCount = 1;
+
+    VkDescriptorPoolCreateInfo ds_pool_ci = {};
+    ds_pool_ci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
+    ds_pool_ci.pNext = NULL;
+    ds_pool_ci.flags = VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT;
+    ds_pool_ci.maxSets = 1;
+    ds_pool_ci.poolSizeCount = 1;
+    ds_pool_ci.pPoolSizes = &ds_type_count;
+
+    VkDescriptorPool ds_pool_one;
+    err = vk::CreateDescriptorPool(m_device->device(), &ds_pool_ci, NULL, &ds_pool_one);
+    ASSERT_VK_SUCCESS(err);
+
+    VkDescriptorSetLayoutBinding dsl_binding = {};
+    dsl_binding.binding = 0;
+    dsl_binding.descriptorType = VK_DESCRIPTOR_TYPE_SAMPLER;
+    dsl_binding.descriptorCount = 1;
+    dsl_binding.stageFlags = VK_SHADER_STAGE_ALL;
+    dsl_binding.pImmutableSamplers = NULL;
+
+    VkDescriptorSet descriptorSet;
+    {
+        const VkDescriptorSetLayoutObj ds_layout(m_device, {dsl_binding});
+
+        VkDescriptorSetAllocateInfo alloc_info = {};
+        alloc_info.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
+        alloc_info.descriptorSetCount = 1;
+        alloc_info.descriptorPool = ds_pool_one;
+        alloc_info.pSetLayouts = &ds_layout.handle();
+        err = vk::AllocateDescriptorSets(m_device->device(), &alloc_info, &descriptorSet);
+        ASSERT_VK_SUCCESS(err);
+    }  // ds_layout destroyed
+    err = vk::FreeDescriptorSets(m_device->device(), ds_pool_one, 1, &descriptorSet);
+
+    vk::DestroyDescriptorPool(m_device->device(), ds_pool_one, NULL);
+    m_errorMonitor->VerifyNotFound();
+}
+
+TEST_F(VkPositiveLayerTest, CommandPoolDeleteWithReferences) {
+    TEST_DESCRIPTION("Ensure the validation layers bookkeeping tracks the implicit command buffer frees.");
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    VkCommandPoolCreateInfo cmd_pool_info = {};
+    cmd_pool_info.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
+    cmd_pool_info.pNext = NULL;
+    cmd_pool_info.queueFamilyIndex = m_device->graphics_queue_node_index_;
+    cmd_pool_info.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
+    cmd_pool_info.flags = 0;
+
+    VkCommandPool secondary_cmd_pool;
+    VkResult res = vk::CreateCommandPool(m_device->handle(), &cmd_pool_info, NULL, &secondary_cmd_pool);
+    ASSERT_VK_SUCCESS(res);
+
+    VkCommandBufferAllocateInfo cmdalloc = vk_testing::CommandBuffer::create_info(secondary_cmd_pool);
+    cmdalloc.level = VK_COMMAND_BUFFER_LEVEL_SECONDARY;
+
+    VkCommandBuffer secondary_cmds;
+    res = vk::AllocateCommandBuffers(m_device->handle(), &cmdalloc, &secondary_cmds);
+
+    VkCommandBufferInheritanceInfo cmd_buf_inheritance_info = {};
+    cmd_buf_inheritance_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO;
+    cmd_buf_inheritance_info.pNext = NULL;
+    cmd_buf_inheritance_info.renderPass = VK_NULL_HANDLE;
+    cmd_buf_inheritance_info.subpass = 0;
+    cmd_buf_inheritance_info.framebuffer = VK_NULL_HANDLE;
+    cmd_buf_inheritance_info.occlusionQueryEnable = VK_FALSE;
+    cmd_buf_inheritance_info.queryFlags = 0;
+    cmd_buf_inheritance_info.pipelineStatistics = 0;
+
+    VkCommandBufferBeginInfo secondary_begin = {};
+    secondary_begin.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
+    secondary_begin.pNext = NULL;
+    secondary_begin.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT;
+    secondary_begin.pInheritanceInfo = &cmd_buf_inheritance_info;
+
+    res = vk::BeginCommandBuffer(secondary_cmds, &secondary_begin);
+    ASSERT_VK_SUCCESS(res);
+    vk::EndCommandBuffer(secondary_cmds);
+
+    m_commandBuffer->begin();
+    vk::CmdExecuteCommands(m_commandBuffer->handle(), 1, &secondary_cmds);
+    m_commandBuffer->end();
+
+    // DestroyCommandPool *implicitly* frees the command buffers allocated from it
+    vk::DestroyCommandPool(m_device->handle(), secondary_cmd_pool, NULL);
+    // If bookkeeping has been lax, validating the reset will attempt to touch deleted data
+    res = vk::ResetCommandPool(m_device->handle(), m_commandPool->handle(), 0);
+    ASSERT_VK_SUCCESS(res);
+}
+
+TEST_F(VkPositiveLayerTest, SecondaryCommandBufferClearColorAttachments) {
+    TEST_DESCRIPTION("Create a secondary command buffer and record a CmdClearAttachments call into it");
+    m_errorMonitor->ExpectSuccess();
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    VkCommandBufferAllocateInfo command_buffer_allocate_info = {};
+    command_buffer_allocate_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
+    command_buffer_allocate_info.commandPool = m_commandPool->handle();
+    command_buffer_allocate_info.level = VK_COMMAND_BUFFER_LEVEL_SECONDARY;
+    command_buffer_allocate_info.commandBufferCount = 1;
+
+    VkCommandBuffer secondary_command_buffer;
+    ASSERT_VK_SUCCESS(vk::AllocateCommandBuffers(m_device->device(), &command_buffer_allocate_info, &secondary_command_buffer));
+    VkCommandBufferBeginInfo command_buffer_begin_info = {};
+    VkCommandBufferInheritanceInfo command_buffer_inheritance_info = {};
+    command_buffer_inheritance_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO;
+    command_buffer_inheritance_info.renderPass = m_renderPass;
+    command_buffer_inheritance_info.framebuffer = m_framebuffer;
+
+    command_buffer_begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
+    command_buffer_begin_info.flags =
+        VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT | VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT;
+    command_buffer_begin_info.pInheritanceInfo = &command_buffer_inheritance_info;
+
+    vk::BeginCommandBuffer(secondary_command_buffer, &command_buffer_begin_info);
+    VkClearAttachment color_attachment;
+    color_attachment.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    color_attachment.clearValue.color.float32[0] = 0;
+    color_attachment.clearValue.color.float32[1] = 0;
+    color_attachment.clearValue.color.float32[2] = 0;
+    color_attachment.clearValue.color.float32[3] = 0;
+    color_attachment.colorAttachment = 0;
+    VkClearRect clear_rect = {{{0, 0}, {32, 32}}, 0, 1};
+    vk::CmdClearAttachments(secondary_command_buffer, 1, &color_attachment, 1, &clear_rect);
+    vk::EndCommandBuffer(secondary_command_buffer);
+    m_commandBuffer->begin();
+    vk::CmdBeginRenderPass(m_commandBuffer->handle(), &m_renderPassBeginInfo, VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS);
+    vk::CmdExecuteCommands(m_commandBuffer->handle(), 1, &secondary_command_buffer);
+    vk::CmdEndRenderPass(m_commandBuffer->handle());
+    m_commandBuffer->end();
+    m_errorMonitor->VerifyNotFound();
+}
+
+TEST_F(VkPositiveLayerTest, SecondaryCommandBufferImageLayoutTransitions) {
+    TEST_DESCRIPTION("Perform an image layout transition in a secondary command buffer followed by a transition in the primary.");
+    VkResult err;
+    m_errorMonitor->ExpectSuccess();
+    ASSERT_NO_FATAL_FAILURE(Init());
+    auto depth_format = FindSupportedDepthStencilFormat(gpu());
+    if (!depth_format) {
+        printf("%s Couldn't find depth stencil format.\n", kSkipPrefix);
+        return;
+    }
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+    // Allocate a secondary and primary cmd buffer
+    VkCommandBufferAllocateInfo command_buffer_allocate_info = {};
+    command_buffer_allocate_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
+    command_buffer_allocate_info.commandPool = m_commandPool->handle();
+    command_buffer_allocate_info.level = VK_COMMAND_BUFFER_LEVEL_SECONDARY;
+    command_buffer_allocate_info.commandBufferCount = 1;
+
+    VkCommandBuffer secondary_command_buffer;
+    ASSERT_VK_SUCCESS(vk::AllocateCommandBuffers(m_device->device(), &command_buffer_allocate_info, &secondary_command_buffer));
+    command_buffer_allocate_info.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
+    VkCommandBuffer primary_command_buffer;
+    ASSERT_VK_SUCCESS(vk::AllocateCommandBuffers(m_device->device(), &command_buffer_allocate_info, &primary_command_buffer));
+    VkCommandBufferBeginInfo command_buffer_begin_info = {};
+    VkCommandBufferInheritanceInfo command_buffer_inheritance_info = {};
+    command_buffer_inheritance_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO;
+    command_buffer_begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
+    command_buffer_begin_info.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT;
+    command_buffer_begin_info.pInheritanceInfo = &command_buffer_inheritance_info;
+
+    err = vk::BeginCommandBuffer(secondary_command_buffer, &command_buffer_begin_info);
+    ASSERT_VK_SUCCESS(err);
+    VkImageObj image(m_device);
+    image.Init(128, 128, 1, depth_format, VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
+    ASSERT_TRUE(image.initialized());
+    VkImageMemoryBarrier img_barrier = {};
+    img_barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
+    img_barrier.srcAccessMask = VK_ACCESS_HOST_WRITE_BIT;
+    img_barrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT;
+    img_barrier.oldLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+    img_barrier.newLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
+    img_barrier.image = image.handle();
+    img_barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
+    img_barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
+    img_barrier.subresourceRange.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT;
+    img_barrier.subresourceRange.baseArrayLayer = 0;
+    img_barrier.subresourceRange.baseMipLevel = 0;
+    img_barrier.subresourceRange.layerCount = 1;
+    img_barrier.subresourceRange.levelCount = 1;
+    vk::CmdPipelineBarrier(secondary_command_buffer, VK_PIPELINE_STAGE_HOST_BIT, VK_PIPELINE_STAGE_VERTEX_SHADER_BIT, 0, 0, nullptr,
+                           0, nullptr, 1, &img_barrier);
+    err = vk::EndCommandBuffer(secondary_command_buffer);
+    ASSERT_VK_SUCCESS(err);
+
+    // Now update primary cmd buffer to execute secondary and transitions image
+    command_buffer_begin_info.pInheritanceInfo = nullptr;
+    err = vk::BeginCommandBuffer(primary_command_buffer, &command_buffer_begin_info);
+    ASSERT_VK_SUCCESS(err);
+    vk::CmdExecuteCommands(primary_command_buffer, 1, &secondary_command_buffer);
+    VkImageMemoryBarrier img_barrier2 = {};
+    img_barrier2.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
+    img_barrier2.srcAccessMask = VK_ACCESS_HOST_WRITE_BIT;
+    img_barrier2.dstAccessMask = VK_ACCESS_SHADER_READ_BIT;
+    img_barrier2.oldLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
+    img_barrier2.newLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
+    img_barrier2.image = image.handle();
+    img_barrier2.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
+    img_barrier2.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
+    img_barrier2.subresourceRange.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT;
+    img_barrier2.subresourceRange.baseArrayLayer = 0;
+    img_barrier2.subresourceRange.baseMipLevel = 0;
+    img_barrier2.subresourceRange.layerCount = 1;
+    img_barrier2.subresourceRange.levelCount = 1;
+    vk::CmdPipelineBarrier(primary_command_buffer, VK_PIPELINE_STAGE_HOST_BIT, VK_PIPELINE_STAGE_VERTEX_SHADER_BIT, 0, 0, nullptr,
+                           0, nullptr, 1, &img_barrier2);
+    err = vk::EndCommandBuffer(primary_command_buffer);
+    ASSERT_VK_SUCCESS(err);
+    VkSubmitInfo submit_info = {};
+    submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+    submit_info.commandBufferCount = 1;
+    submit_info.pCommandBuffers = &primary_command_buffer;
+    err = vk::QueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+    ASSERT_VK_SUCCESS(err);
+    m_errorMonitor->VerifyNotFound();
+    err = vk::DeviceWaitIdle(m_device->device());
+    ASSERT_VK_SUCCESS(err);
+    vk::FreeCommandBuffers(m_device->device(), m_commandPool->handle(), 1, &secondary_command_buffer);
+    vk::FreeCommandBuffers(m_device->device(), m_commandPool->handle(), 1, &primary_command_buffer);
+}
+
+// This is a positive test. No failures are expected.
+TEST_F(VkPositiveLayerTest, IgnoreUnrelatedDescriptor) {
+    TEST_DESCRIPTION(
+        "Ensure that the vkUpdateDescriptorSets validation code is ignoring VkWriteDescriptorSet members that are not related to "
+        "the descriptor type specified by VkWriteDescriptorSet::descriptorType.  Correct validation behavior will result in the "
+        "test running to completion without validation errors.");
+
+    const uintptr_t invalid_ptr = 0xcdcdcdcd;
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    // Verify VK_FORMAT_R8_UNORM supports VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT
+    const VkFormat format_texel_case = VK_FORMAT_R8_UNORM;
+    const char *format_texel_case_string = "VK_FORMAT_R8_UNORM";
+    VkFormatProperties format_properties;
+    vk::GetPhysicalDeviceFormatProperties(gpu(), format_texel_case, &format_properties);
+    if (!(format_properties.bufferFeatures & VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT)) {
+        printf("%s Test requires %s to support VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT\n", kSkipPrefix, format_texel_case_string);
+        return;
+    }
+
+    // Image Case
+    {
+        m_errorMonitor->ExpectSuccess();
+
+        VkImageObj image(m_device);
+        image.Init(32, 32, 1, VK_FORMAT_B8G8R8A8_UNORM, VK_IMAGE_USAGE_SAMPLED_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
+
+        VkImageView view = image.targetView(VK_FORMAT_B8G8R8A8_UNORM);
+
+        OneOffDescriptorSet descriptor_set(m_device, {
+                                                         {0, VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, 1, VK_SHADER_STAGE_ALL, nullptr},
+                                                     });
+
+        VkDescriptorImageInfo image_info = {};
+        image_info.imageView = view;
+        image_info.imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
+
+        VkWriteDescriptorSet descriptor_write;
+        memset(&descriptor_write, 0, sizeof(descriptor_write));
+        descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
+        descriptor_write.dstSet = descriptor_set.set_;
+        descriptor_write.dstBinding = 0;
+        descriptor_write.descriptorCount = 1;
+        descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE;
+        descriptor_write.pImageInfo = &image_info;
+
+        // Set pBufferInfo and pTexelBufferView to invalid values, which should
+        // be
+        //  ignored for descriptorType == VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE.
+        // This will most likely produce a crash if the parameter_validation
+        // layer
+        // does not correctly ignore pBufferInfo.
+        descriptor_write.pBufferInfo = reinterpret_cast<const VkDescriptorBufferInfo *>(invalid_ptr);
+        descriptor_write.pTexelBufferView = reinterpret_cast<const VkBufferView *>(invalid_ptr);
+
+        vk::UpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
+
+        m_errorMonitor->VerifyNotFound();
+    }
+
+    // Buffer Case
+    {
+        m_errorMonitor->ExpectSuccess();
+
+        uint32_t queue_family_index = 0;
+        VkBufferCreateInfo buffer_create_info = {};
+        buffer_create_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
+        buffer_create_info.size = 1024;
+        buffer_create_info.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT;
+        buffer_create_info.queueFamilyIndexCount = 1;
+        buffer_create_info.pQueueFamilyIndices = &queue_family_index;
+
+        VkBufferObj buffer;
+        buffer.init(*m_device, buffer_create_info);
+
+        OneOffDescriptorSet descriptor_set(m_device, {
+                                                         {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
+                                                     });
+
+        VkDescriptorBufferInfo buffer_info = {};
+        buffer_info.buffer = buffer.handle();
+        buffer_info.offset = 0;
+        buffer_info.range = 1024;
+
+        VkWriteDescriptorSet descriptor_write;
+        memset(&descriptor_write, 0, sizeof(descriptor_write));
+        descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
+        descriptor_write.dstSet = descriptor_set.set_;
+        descriptor_write.dstBinding = 0;
+        descriptor_write.descriptorCount = 1;
+        descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
+        descriptor_write.pBufferInfo = &buffer_info;
+
+        // Set pImageInfo and pTexelBufferView to invalid values, which should
+        // be
+        //  ignored for descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER.
+        // This will most likely produce a crash if the parameter_validation
+        // layer
+        // does not correctly ignore pImageInfo.
+        descriptor_write.pImageInfo = reinterpret_cast<const VkDescriptorImageInfo *>(invalid_ptr);
+        descriptor_write.pTexelBufferView = reinterpret_cast<const VkBufferView *>(invalid_ptr);
+
+        vk::UpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
+
+        m_errorMonitor->VerifyNotFound();
+    }
+
+    // Texel Buffer Case
+    {
+        m_errorMonitor->ExpectSuccess();
+
+        uint32_t queue_family_index = 0;
+        VkBufferCreateInfo buffer_create_info = {};
+        buffer_create_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
+        buffer_create_info.size = 1024;
+        buffer_create_info.usage = VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT;
+        buffer_create_info.queueFamilyIndexCount = 1;
+        buffer_create_info.pQueueFamilyIndices = &queue_family_index;
+
+        VkBufferObj buffer;
+        buffer.init(*m_device, buffer_create_info);
+
+        VkBufferViewCreateInfo buff_view_ci = {};
+        buff_view_ci.sType = VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO;
+        buff_view_ci.buffer = buffer.handle();
+        buff_view_ci.format = format_texel_case;
+        buff_view_ci.range = VK_WHOLE_SIZE;
+        VkBufferView buffer_view;
+        VkResult err = vk::CreateBufferView(m_device->device(), &buff_view_ci, NULL, &buffer_view);
+        ASSERT_VK_SUCCESS(err);
+        OneOffDescriptorSet descriptor_set(m_device,
+                                           {
+                                               {0, VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
+                                           });
+
+        VkWriteDescriptorSet descriptor_write;
+        memset(&descriptor_write, 0, sizeof(descriptor_write));
+        descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
+        descriptor_write.dstSet = descriptor_set.set_;
+        descriptor_write.dstBinding = 0;
+        descriptor_write.descriptorCount = 1;
+        descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER;
+        descriptor_write.pTexelBufferView = &buffer_view;
+
+        // Set pImageInfo and pBufferInfo to invalid values, which should be
+        //  ignored for descriptorType ==
+        //  VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER.
+        // This will most likely produce a crash if the parameter_validation
+        // layer
+        // does not correctly ignore pImageInfo and pBufferInfo.
+        descriptor_write.pImageInfo = reinterpret_cast<const VkDescriptorImageInfo *>(invalid_ptr);
+        descriptor_write.pBufferInfo = reinterpret_cast<const VkDescriptorBufferInfo *>(invalid_ptr);
+
+        vk::UpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
+
+        m_errorMonitor->VerifyNotFound();
+
+        vk::DestroyBufferView(m_device->device(), buffer_view, NULL);
+    }
+}
+
+TEST_F(VkPositiveLayerTest, ImmutableSamplerOnlyDescriptor) {
+    TEST_DESCRIPTION("Bind a DescriptorSet with only an immutable sampler and make sure that we don't warn for no update.");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    OneOffDescriptorSet descriptor_set(m_device, {
+                                                     {0, VK_DESCRIPTOR_TYPE_SAMPLER, 1, VK_SHADER_STAGE_FRAGMENT_BIT, nullptr},
+                                                 });
+
+    VkSamplerCreateInfo sampler_ci = SafeSaneSamplerCreateInfo();
+    VkSampler sampler;
+    VkResult err = vk::CreateSampler(m_device->device(), &sampler_ci, NULL, &sampler);
+    ASSERT_VK_SUCCESS(err);
+
+    const VkPipelineLayoutObj pipeline_layout(m_device, {&descriptor_set.layout_});
+
+    m_errorMonitor->ExpectSuccess();
+    m_commandBuffer->begin();
+    m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
+
+    vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_layout.handle(), 0, 1,
+                              &descriptor_set.set_, 0, nullptr);
+    m_errorMonitor->VerifyNotFound();
+
+    vk::DestroySampler(m_device->device(), sampler, NULL);
+
+    m_commandBuffer->EndRenderPass();
+    m_commandBuffer->end();
+}
+
+// This is a positive test. No failures are expected.
+TEST_F(VkPositiveLayerTest, EmptyDescriptorUpdateTest) {
+    TEST_DESCRIPTION("Update last descriptor in a set that includes an empty binding");
+    VkResult err;
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    m_errorMonitor->ExpectSuccess();
+
+    // Create layout with two uniform buffer descriptors w/ empty binding between them
+    OneOffDescriptorSet ds(m_device, {
+                                         {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
+                                         {1, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 0 /*!*/, 0, nullptr},
+                                         {2, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
+                                     });
+
+    // Create a buffer to be used for update
+    VkBufferCreateInfo buff_ci = {};
+    buff_ci.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
+    buff_ci.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT;
+    buff_ci.size = 256;
+    buff_ci.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
+    VkBuffer buffer;
+    err = vk::CreateBuffer(m_device->device(), &buff_ci, NULL, &buffer);
+    ASSERT_VK_SUCCESS(err);
+    // Have to bind memory to buffer before descriptor update
+    VkMemoryAllocateInfo mem_alloc = {};
+    mem_alloc.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+    mem_alloc.pNext = NULL;
+    mem_alloc.allocationSize = 512;  // one allocation for both buffers
+    mem_alloc.memoryTypeIndex = 0;
+
+    VkMemoryRequirements mem_reqs;
+    vk::GetBufferMemoryRequirements(m_device->device(), buffer, &mem_reqs);
+    bool pass = m_device->phy().set_memory_type(mem_reqs.memoryTypeBits, &mem_alloc, 0);
+    if (!pass) {
+        printf("%s Failed to allocate memory.\n", kSkipPrefix);
+        vk::DestroyBuffer(m_device->device(), buffer, NULL);
+        return;
+    }
+    // Make sure allocation is sufficiently large to accommodate buffer requirements
+    if (mem_reqs.size > mem_alloc.allocationSize) {
+        mem_alloc.allocationSize = mem_reqs.size;
+    }
+
+    VkDeviceMemory mem;
+    err = vk::AllocateMemory(m_device->device(), &mem_alloc, NULL, &mem);
+    ASSERT_VK_SUCCESS(err);
+    err = vk::BindBufferMemory(m_device->device(), buffer, mem, 0);
+    ASSERT_VK_SUCCESS(err);
+
+    // Only update the descriptor at binding 2
+    VkDescriptorBufferInfo buff_info = {};
+    buff_info.buffer = buffer;
+    buff_info.offset = 0;
+    buff_info.range = VK_WHOLE_SIZE;
+    VkWriteDescriptorSet descriptor_write = {};
+    descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
+    descriptor_write.dstBinding = 2;
+    descriptor_write.descriptorCount = 1;
+    descriptor_write.pTexelBufferView = nullptr;
+    descriptor_write.pBufferInfo = &buff_info;
+    descriptor_write.pImageInfo = nullptr;
+    descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
+    descriptor_write.dstSet = ds.set_;
+
+    vk::UpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
+
+    m_errorMonitor->VerifyNotFound();
+    // Cleanup
+    vk::FreeMemory(m_device->device(), mem, NULL);
+    vk::DestroyBuffer(m_device->device(), buffer, NULL);
+}
+
+// This is a positive test. No failures are expected.
+TEST_F(VkPositiveLayerTest, PushDescriptorNullDstSetTest) {
+    TEST_DESCRIPTION("Use null dstSet in CmdPushDescriptorSetKHR");
+
+    if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+        m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    } else {
+        printf("%s Did not find VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME; skipped.\n", kSkipPrefix);
+        return;
+    }
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+    if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME)) {
+        m_device_extension_names.push_back(VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME);
+    } else {
+        printf("%s Push Descriptors Extension not supported, skipping tests\n", kSkipPrefix);
+        return;
+    }
+    ASSERT_NO_FATAL_FAILURE(InitState());
+    m_errorMonitor->ExpectSuccess();
+
+    auto push_descriptor_prop = GetPushDescriptorProperties(instance(), gpu());
+    if (push_descriptor_prop.maxPushDescriptors < 1) {
+        // Some implementations report an invalid maxPushDescriptors of 0
+        printf("%s maxPushDescriptors is zero, skipping tests\n", kSkipPrefix);
+        return;
+    }
+
+    ASSERT_NO_FATAL_FAILURE(InitViewport());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    VkDescriptorSetLayoutBinding dsl_binding = {};
+    dsl_binding.binding = 2;
+    dsl_binding.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
+    dsl_binding.descriptorCount = 1;
+    dsl_binding.stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT;
+    dsl_binding.pImmutableSamplers = NULL;
+
+    const VkDescriptorSetLayoutObj ds_layout(m_device, {dsl_binding});
+    // Create push descriptor set layout
+    const VkDescriptorSetLayoutObj push_ds_layout(m_device, {dsl_binding}, VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR);
+
+    // Use helper to create graphics pipeline
+    CreatePipelineHelper helper(*this);
+    helper.InitInfo();
+    helper.InitState();
+    helper.pipeline_layout_ = VkPipelineLayoutObj(m_device, {&push_ds_layout, &ds_layout});
+    helper.CreateGraphicsPipeline();
+
+    const float vbo_data[3] = {1.f, 0.f, 1.f};
+    VkConstantBufferObj vbo(m_device, sizeof(vbo_data), (const void *)&vbo_data, VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT);
+
+    VkDescriptorBufferInfo buff_info;
+    buff_info.buffer = vbo.handle();
+    buff_info.offset = 0;
+    buff_info.range = sizeof(vbo_data);
+    VkWriteDescriptorSet descriptor_write = {};
+    descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
+    descriptor_write.dstBinding = 2;
+    descriptor_write.descriptorCount = 1;
+    descriptor_write.pTexelBufferView = nullptr;
+    descriptor_write.pBufferInfo = &buff_info;
+    descriptor_write.pImageInfo = nullptr;
+    descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
+    descriptor_write.dstSet = 0;  // Should not cause a validation error
+
+    // Find address of extension call and make the call
+    PFN_vkCmdPushDescriptorSetKHR vkCmdPushDescriptorSetKHR =
+        (PFN_vkCmdPushDescriptorSetKHR)vk::GetDeviceProcAddr(m_device->device(), "vkCmdPushDescriptorSetKHR");
+    assert(vkCmdPushDescriptorSetKHR != nullptr);
+
+    m_commandBuffer->begin();
+
+    // In Intel GPU, it needs to bind pipeline before push descriptor set.
+    vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, helper.pipeline_);
+    vkCmdPushDescriptorSetKHR(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, helper.pipeline_layout_.handle(), 0, 1,
+                              &descriptor_write);
+
+    m_errorMonitor->VerifyNotFound();
+}
+
+// This is a positive test. No failures are expected.
+TEST_F(VkPositiveLayerTest, PushDescriptorUnboundSetTest) {
+    TEST_DESCRIPTION("Ensure that no validation errors are produced for not bound push descriptor sets");
+    if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+        m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    } else {
+        printf("%s Did not find VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME; skipped.\n", kSkipPrefix);
+        return;
+    }
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+    if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME)) {
+        m_device_extension_names.push_back(VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME);
+    } else {
+        printf("%s Push Descriptors Extension not supported, skipping tests\n", kSkipPrefix);
+        return;
+    }
+    ASSERT_NO_FATAL_FAILURE(InitState());
+
+    auto push_descriptor_prop = GetPushDescriptorProperties(instance(), gpu());
+    if (push_descriptor_prop.maxPushDescriptors < 1) {
+        // Some implementations report an invalid maxPushDescriptors of 0
+        printf("%s maxPushDescriptors is zero, skipping tests\n", kSkipPrefix);
+        return;
+    }
+
+    ASSERT_NO_FATAL_FAILURE(InitViewport());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+    m_errorMonitor->ExpectSuccess();
+
+    // Create descriptor set layout
+    VkDescriptorSetLayoutBinding dsl_binding = {};
+    dsl_binding.binding = 2;
+    dsl_binding.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
+    dsl_binding.descriptorCount = 1;
+    dsl_binding.stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT;
+    dsl_binding.pImmutableSamplers = NULL;
+
+    OneOffDescriptorSet descriptor_set(m_device, {dsl_binding}, 0, nullptr, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT,
+                                       nullptr);
+
+    // Create push descriptor set layout
+    const VkDescriptorSetLayoutObj push_ds_layout(m_device, {dsl_binding}, VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR);
+
+    // Create PSO
+    char const fsSource[] =
+        "#version 450\n"
+        "\n"
+        "layout(location=0) out vec4 x;\n"
+        "layout(set=0) layout(binding=2) uniform foo1 { float x; } bar1;\n"
+        "layout(set=1) layout(binding=2) uniform foo2 { float y; } bar2;\n"
+        "void main(){\n"
+        "   x = vec4(bar1.x) + vec4(bar2.y);\n"
+        "}\n";
+    VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
+    VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+    CreatePipelineHelper pipe(*this);
+    pipe.InitInfo();
+    pipe.shader_stages_ = {vs.GetStageCreateInfo(), fs.GetStageCreateInfo()};
+    pipe.InitState();
+    // Now use the descriptor layouts to create a pipeline layout
+    pipe.pipeline_layout_ = VkPipelineLayoutObj(m_device, {&push_ds_layout, &descriptor_set.layout_});
+    pipe.CreateGraphicsPipeline();
+
+    const float bo_data[1] = {1.f};
+    VkConstantBufferObj buffer(m_device, sizeof(bo_data), (const void *)&bo_data, VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT);
+
+    // Update descriptor set
+    descriptor_set.WriteDescriptorBufferInfo(2, buffer.handle(), sizeof(bo_data));
+    descriptor_set.UpdateDescriptorSets();
+
+    PFN_vkCmdPushDescriptorSetKHR vkCmdPushDescriptorSetKHR =
+        (PFN_vkCmdPushDescriptorSetKHR)vk::GetDeviceProcAddr(m_device->device(), "vkCmdPushDescriptorSetKHR");
+    assert(vkCmdPushDescriptorSetKHR != nullptr);
+
+    m_commandBuffer->begin();
+    m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
+    vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.pipeline_);
+
+    // Push descriptors and bind descriptor set
+    vkCmdPushDescriptorSetKHR(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.pipeline_layout_.handle(), 0, 1,
+                              descriptor_set.descriptor_writes.data());
+    vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.pipeline_layout_.handle(), 1, 1,
+                              &descriptor_set.set_, 0, NULL);
+
+    // No errors should be generated.
+    vk::CmdDraw(m_commandBuffer->handle(), 3, 1, 0, 0);
+
+    m_errorMonitor->VerifyNotFound();
+
+    m_commandBuffer->EndRenderPass();
+    m_commandBuffer->end();
+}
+
+TEST_F(VkPositiveLayerTest, PushDescriptorSetUpdatingSetNumber) {
+    TEST_DESCRIPTION(
+        "Ensure that no validation errors are produced when the push descriptor set number changes "
+        "between two vk::CmdPushDescriptorSetKHR calls.");
+
+    if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+        m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    } else {
+        printf("%s %s Extension not supported, skipping tests\n", kSkipPrefix,
+               VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+        return;
+    }
+
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+    if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME)) {
+        m_device_extension_names.push_back(VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME);
+    } else {
+        printf("%s %s Extension not supported, skipping tests\n", kSkipPrefix, VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME);
+        return;
+    }
+    ASSERT_NO_FATAL_FAILURE(InitState());
+    auto push_descriptor_prop = GetPushDescriptorProperties(instance(), gpu());
+    if (push_descriptor_prop.maxPushDescriptors < 1) {
+        // Some implementations report an invalid maxPushDescriptors of 0
+        printf("%s maxPushDescriptors is zero, skipping tests\n", kSkipPrefix);
+        return;
+    }
+    ASSERT_NO_FATAL_FAILURE(InitViewport());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+    m_errorMonitor->ExpectSuccess();
+
+    // Create a descriptor to push
+    const uint32_t buffer_data[4] = {4, 5, 6, 7};
+    VkConstantBufferObj buffer_obj(
+        m_device, sizeof(buffer_data), &buffer_data,
+        VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT);
+    ASSERT_TRUE(buffer_obj.initialized());
+
+    VkDescriptorBufferInfo buffer_info = {buffer_obj.handle(), 0, VK_WHOLE_SIZE};
+
+    PFN_vkCmdPushDescriptorSetKHR vkCmdPushDescriptorSetKHR =
+        (PFN_vkCmdPushDescriptorSetKHR)vk::GetDeviceProcAddr(m_device->device(), "vkCmdPushDescriptorSetKHR");
+    ASSERT_TRUE(vkCmdPushDescriptorSetKHR != nullptr);
+
+    const VkDescriptorSetLayoutBinding ds_binding_0 = {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_FRAGMENT_BIT,
+                                                       nullptr};
+    const VkDescriptorSetLayoutBinding ds_binding_1 = {1, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_FRAGMENT_BIT,
+                                                       nullptr};
+    const VkDescriptorSetLayoutObj ds_layout(m_device, {ds_binding_0, ds_binding_1});
+    ASSERT_TRUE(ds_layout.initialized());
+
+    const VkDescriptorSetLayoutBinding push_ds_binding_0 = {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_FRAGMENT_BIT,
+                                                            nullptr};
+    const VkDescriptorSetLayoutObj push_ds_layout(m_device, {push_ds_binding_0},
+                                                  VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR);
+    ASSERT_TRUE(push_ds_layout.initialized());
+
+    m_commandBuffer->begin();
+    m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
+
+    VkPipelineObj pipe0(m_device);
+    VkPipelineObj pipe1(m_device);
+    {
+        // Note: the push descriptor set is set number 2.
+        const VkPipelineLayoutObj pipeline_layout(m_device, {&ds_layout, &ds_layout, &push_ds_layout, &ds_layout});
+        ASSERT_TRUE(pipeline_layout.initialized());
+
+        char const *fsSource =
+            "#version 450\n"
+            "\n"
+            "layout(location=0) out vec4 x;\n"
+            "layout(set=2) layout(binding=0) uniform foo { vec4 y; } bar;\n"
+            "void main(){\n"
+            "   x = bar.y;\n"
+            "}\n";
+
+        VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
+        VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+        VkPipelineObj &pipe = pipe0;
+        pipe.SetViewport(m_viewports);
+        pipe.SetScissor(m_scissors);
+        pipe.AddShader(&vs);
+        pipe.AddShader(&fs);
+        pipe.AddDefaultColorAttachment();
+        pipe.CreateVKPipeline(pipeline_layout.handle(), renderPass());
+
+        vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.handle());
+
+        const VkWriteDescriptorSet descriptor_write = vk_testing::Device::write_descriptor_set(
+            vk_testing::DescriptorSet(), 0, 0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, &buffer_info);
+
+        // Note: pushing to desciptor set number 2.
+        vkCmdPushDescriptorSetKHR(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_layout.handle(), 2, 1,
+                                  &descriptor_write);
+        vk::CmdDraw(m_commandBuffer->handle(), 3, 1, 0, 0);
+    }
+
+    m_errorMonitor->VerifyNotFound();
+
+    {
+        // Note: the push descriptor set is now set number 3.
+        const VkPipelineLayoutObj pipeline_layout(m_device, {&ds_layout, &ds_layout, &ds_layout, &push_ds_layout});
+        ASSERT_TRUE(pipeline_layout.initialized());
+
+        const VkWriteDescriptorSet descriptor_write = vk_testing::Device::write_descriptor_set(
+            vk_testing::DescriptorSet(), 0, 0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, &buffer_info);
+
+        char const *fsSource =
+            "#version 450\n"
+            "\n"
+            "layout(location=0) out vec4 x;\n"
+            "layout(set=3) layout(binding=0) uniform foo { vec4 y; } bar;\n"
+            "void main(){\n"
+            "   x = bar.y;\n"
+            "}\n";
+
+        VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
+        VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+        VkPipelineObj &pipe = pipe1;
+        pipe.SetViewport(m_viewports);
+        pipe.SetScissor(m_scissors);
+        pipe.AddShader(&vs);
+        pipe.AddShader(&fs);
+        pipe.AddDefaultColorAttachment();
+        pipe.CreateVKPipeline(pipeline_layout.handle(), renderPass());
+
+        vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.handle());
+
+        // Note: now pushing to desciptor set number 3.
+        vkCmdPushDescriptorSetKHR(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_layout.handle(), 3, 1,
+                                  &descriptor_write);
+        vk::CmdDraw(m_commandBuffer->handle(), 3, 1, 0, 0);
+    }
+
+    m_errorMonitor->VerifyNotFound();
+
+    m_commandBuffer->EndRenderPass();
+    m_commandBuffer->end();
+}
+
+// This is a positive test. No failures are expected.
+TEST_F(VkPositiveLayerTest, TestAliasedMemoryTracking) {
+    VkResult err;
+    bool pass;
+
+    TEST_DESCRIPTION(
+        "Create a buffer, allocate memory, bind memory, destroy the buffer, create an image, and bind the same memory to it");
+
+    m_errorMonitor->ExpectSuccess();
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    VkBuffer buffer;
+    VkImage image;
+    VkDeviceMemory mem;
+    VkMemoryRequirements mem_reqs;
+
+    VkBufferCreateInfo buf_info = {};
+    buf_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
+    buf_info.pNext = NULL;
+    buf_info.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT;
+    buf_info.size = 256;
+    buf_info.queueFamilyIndexCount = 0;
+    buf_info.pQueueFamilyIndices = NULL;
+    buf_info.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
+    buf_info.flags = 0;
+    err = vk::CreateBuffer(m_device->device(), &buf_info, NULL, &buffer);
+    ASSERT_VK_SUCCESS(err);
+
+    vk::GetBufferMemoryRequirements(m_device->device(), buffer, &mem_reqs);
+
+    VkMemoryAllocateInfo alloc_info = {};
+    alloc_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+    alloc_info.pNext = NULL;
+    alloc_info.memoryTypeIndex = 0;
+
+    // Ensure memory is big enough for both bindings
+    alloc_info.allocationSize = 0x10000;
+
+    pass = m_device->phy().set_memory_type(mem_reqs.memoryTypeBits, &alloc_info, VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
+    if (!pass) {
+        printf("%s Failed to allocate memory.\n", kSkipPrefix);
+        vk::DestroyBuffer(m_device->device(), buffer, NULL);
+        return;
+    }
+
+    err = vk::AllocateMemory(m_device->device(), &alloc_info, NULL, &mem);
+    ASSERT_VK_SUCCESS(err);
+
+    uint8_t *pData;
+    err = vk::MapMemory(m_device->device(), mem, 0, mem_reqs.size, 0, (void **)&pData);
+    ASSERT_VK_SUCCESS(err);
+
+    memset(pData, 0xCADECADE, static_cast<size_t>(mem_reqs.size));
+
+    vk::UnmapMemory(m_device->device(), mem);
+
+    err = vk::BindBufferMemory(m_device->device(), buffer, mem, 0);
+    ASSERT_VK_SUCCESS(err);
+
+    // NOW, destroy the buffer. Obviously, the resource no longer occupies this
+    // memory. In fact, it was never used by the GPU.
+    // Just be sure, wait for idle.
+    vk::DestroyBuffer(m_device->device(), buffer, NULL);
+    vk::DeviceWaitIdle(m_device->device());
+
+    // Use optimal as some platforms report linear support but then fail image creation
+    VkImageTiling image_tiling = VK_IMAGE_TILING_OPTIMAL;
+    VkImageFormatProperties image_format_properties;
+    vk::GetPhysicalDeviceImageFormatProperties(gpu(), VK_FORMAT_R8G8B8A8_UNORM, VK_IMAGE_TYPE_2D, image_tiling,
+                                               VK_IMAGE_USAGE_TRANSFER_SRC_BIT, 0, &image_format_properties);
+    if (image_format_properties.maxExtent.width == 0) {
+        printf("%s Image format not supported; skipped.\n", kSkipPrefix);
+        vk::FreeMemory(m_device->device(), mem, NULL);
+        return;
+    }
+    VkImageCreateInfo image_create_info = {};
+    image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+    image_create_info.pNext = NULL;
+    image_create_info.imageType = VK_IMAGE_TYPE_2D;
+    image_create_info.format = VK_FORMAT_R8G8B8A8_UNORM;
+    image_create_info.extent.width = 64;
+    image_create_info.extent.height = 64;
+    image_create_info.extent.depth = 1;
+    image_create_info.mipLevels = 1;
+    image_create_info.arrayLayers = 1;
+    image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
+    image_create_info.tiling = image_tiling;
+    image_create_info.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
+    image_create_info.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
+    image_create_info.queueFamilyIndexCount = 0;
+    image_create_info.pQueueFamilyIndices = NULL;
+    image_create_info.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
+    image_create_info.flags = 0;
+
+    /* Create a mappable image.  It will be the texture if linear images are OK
+     * to be textures or it will be the staging image if they are not.
+     */
+    err = vk::CreateImage(m_device->device(), &image_create_info, NULL, &image);
+    ASSERT_VK_SUCCESS(err);
+
+    vk::GetImageMemoryRequirements(m_device->device(), image, &mem_reqs);
+
+    VkMemoryAllocateInfo mem_alloc = {};
+    mem_alloc.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+    mem_alloc.pNext = NULL;
+    mem_alloc.allocationSize = 0;
+    mem_alloc.memoryTypeIndex = 0;
+    mem_alloc.allocationSize = mem_reqs.size;
+
+    pass = m_device->phy().set_memory_type(mem_reqs.memoryTypeBits, &mem_alloc, VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
+    if (!pass) {
+        printf("%s Failed to allocate memory.\n", kSkipPrefix);
+        vk::FreeMemory(m_device->device(), mem, NULL);
+        vk::DestroyImage(m_device->device(), image, NULL);
+        return;
+    }
+
+    // VALIDATION FAILURE:
+    err = vk::BindImageMemory(m_device->device(), image, mem, 0);
+    ASSERT_VK_SUCCESS(err);
+
+    m_errorMonitor->VerifyNotFound();
+
+    vk::FreeMemory(m_device->device(), mem, NULL);
+    vk::DestroyImage(m_device->device(), image, NULL);
+}
+
+// This is a positive test. No failures are expected.
+TEST_F(VkPositiveLayerTest, TestDestroyFreeNullHandles) {
+    VkResult err;
+
+    TEST_DESCRIPTION("Call all applicable destroy and free routines with NULL handles, expecting no validation errors");
+
+    m_errorMonitor->ExpectSuccess();
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    vk::DestroyBuffer(m_device->device(), VK_NULL_HANDLE, NULL);
+    vk::DestroyBufferView(m_device->device(), VK_NULL_HANDLE, NULL);
+    vk::DestroyCommandPool(m_device->device(), VK_NULL_HANDLE, NULL);
+    vk::DestroyDescriptorPool(m_device->device(), VK_NULL_HANDLE, NULL);
+    vk::DestroyDescriptorSetLayout(m_device->device(), VK_NULL_HANDLE, NULL);
+    vk::DestroyDevice(VK_NULL_HANDLE, NULL);
+    vk::DestroyEvent(m_device->device(), VK_NULL_HANDLE, NULL);
+    vk::DestroyFence(m_device->device(), VK_NULL_HANDLE, NULL);
+    vk::DestroyFramebuffer(m_device->device(), VK_NULL_HANDLE, NULL);
+    vk::DestroyImage(m_device->device(), VK_NULL_HANDLE, NULL);
+    vk::DestroyImageView(m_device->device(), VK_NULL_HANDLE, NULL);
+    vk::DestroyInstance(VK_NULL_HANDLE, NULL);
+    vk::DestroyPipeline(m_device->device(), VK_NULL_HANDLE, NULL);
+    vk::DestroyPipelineCache(m_device->device(), VK_NULL_HANDLE, NULL);
+    vk::DestroyPipelineLayout(m_device->device(), VK_NULL_HANDLE, NULL);
+    vk::DestroyQueryPool(m_device->device(), VK_NULL_HANDLE, NULL);
+    vk::DestroyRenderPass(m_device->device(), VK_NULL_HANDLE, NULL);
+    vk::DestroySampler(m_device->device(), VK_NULL_HANDLE, NULL);
+    vk::DestroySemaphore(m_device->device(), VK_NULL_HANDLE, NULL);
+    vk::DestroyShaderModule(m_device->device(), VK_NULL_HANDLE, NULL);
+
+    VkCommandPool command_pool;
+    VkCommandPoolCreateInfo pool_create_info{};
+    pool_create_info.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
+    pool_create_info.queueFamilyIndex = m_device->graphics_queue_node_index_;
+    pool_create_info.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
+    vk::CreateCommandPool(m_device->device(), &pool_create_info, nullptr, &command_pool);
+    VkCommandBuffer command_buffers[3] = {};
+    VkCommandBufferAllocateInfo command_buffer_allocate_info{};
+    command_buffer_allocate_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
+    command_buffer_allocate_info.commandPool = command_pool;
+    command_buffer_allocate_info.commandBufferCount = 1;
+    command_buffer_allocate_info.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
+    vk::AllocateCommandBuffers(m_device->device(), &command_buffer_allocate_info, &command_buffers[1]);
+    vk::FreeCommandBuffers(m_device->device(), command_pool, 3, command_buffers);
+    vk::DestroyCommandPool(m_device->device(), command_pool, NULL);
+
+    VkDescriptorPoolSize ds_type_count = {};
+    ds_type_count.type = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC;
+    ds_type_count.descriptorCount = 1;
+
+    VkDescriptorPoolCreateInfo ds_pool_ci = {};
+    ds_pool_ci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
+    ds_pool_ci.pNext = NULL;
+    ds_pool_ci.maxSets = 1;
+    ds_pool_ci.poolSizeCount = 1;
+    ds_pool_ci.flags = VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT;
+    ds_pool_ci.pPoolSizes = &ds_type_count;
+
+    VkDescriptorPool ds_pool;
+    err = vk::CreateDescriptorPool(m_device->device(), &ds_pool_ci, NULL, &ds_pool);
+    ASSERT_VK_SUCCESS(err);
+
+    VkDescriptorSetLayoutBinding dsl_binding = {};
+    dsl_binding.binding = 2;
+    dsl_binding.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC;
+    dsl_binding.descriptorCount = 1;
+    dsl_binding.stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT;
+    dsl_binding.pImmutableSamplers = NULL;
+
+    const VkDescriptorSetLayoutObj ds_layout(m_device, {dsl_binding});
+
+    VkDescriptorSet descriptor_sets[3] = {};
+    VkDescriptorSetAllocateInfo alloc_info = {};
+    alloc_info.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
+    alloc_info.descriptorSetCount = 1;
+    alloc_info.descriptorPool = ds_pool;
+    alloc_info.pSetLayouts = &ds_layout.handle();
+    err = vk::AllocateDescriptorSets(m_device->device(), &alloc_info, &descriptor_sets[1]);
+    ASSERT_VK_SUCCESS(err);
+    vk::FreeDescriptorSets(m_device->device(), ds_pool, 3, descriptor_sets);
+    vk::DestroyDescriptorPool(m_device->device(), ds_pool, NULL);
+
+    vk::FreeMemory(m_device->device(), VK_NULL_HANDLE, NULL);
+
+    m_errorMonitor->VerifyNotFound();
+}
+
+TEST_F(VkPositiveLayerTest, QueueSubmitSemaphoresAndLayoutTracking) {
+    TEST_DESCRIPTION("Submit multiple command buffers with chained semaphore signals and layout transitions");
+
+    m_errorMonitor->ExpectSuccess();
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    VkCommandBuffer cmd_bufs[4];
+    VkCommandBufferAllocateInfo alloc_info;
+    alloc_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
+    alloc_info.pNext = NULL;
+    alloc_info.commandBufferCount = 4;
+    alloc_info.commandPool = m_commandPool->handle();
+    alloc_info.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
+    vk::AllocateCommandBuffers(m_device->device(), &alloc_info, cmd_bufs);
+    VkImageObj image(m_device);
+    image.Init(128, 128, 1, VK_FORMAT_B8G8R8A8_UNORM,
+               (VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT),
+               VK_IMAGE_TILING_OPTIMAL, 0);
+    ASSERT_TRUE(image.initialized());
+    VkCommandBufferBeginInfo cb_binfo;
+    cb_binfo.pNext = NULL;
+    cb_binfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
+    cb_binfo.pInheritanceInfo = VK_NULL_HANDLE;
+    cb_binfo.flags = 0;
+    // Use 4 command buffers, each with an image layout transition, ColorAO->General->ColorAO->TransferSrc->TransferDst
+    vk::BeginCommandBuffer(cmd_bufs[0], &cb_binfo);
+    VkImageMemoryBarrier img_barrier = {};
+    img_barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
+    img_barrier.pNext = NULL;
+    img_barrier.srcAccessMask = VK_ACCESS_HOST_WRITE_BIT;
+    img_barrier.dstAccessMask = VK_ACCESS_HOST_WRITE_BIT;
+    img_barrier.oldLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
+    img_barrier.newLayout = VK_IMAGE_LAYOUT_GENERAL;
+    img_barrier.image = image.handle();
+    img_barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
+    img_barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
+    img_barrier.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    img_barrier.subresourceRange.baseArrayLayer = 0;
+    img_barrier.subresourceRange.baseMipLevel = 0;
+    img_barrier.subresourceRange.layerCount = 1;
+    img_barrier.subresourceRange.levelCount = 1;
+    vk::CmdPipelineBarrier(cmd_bufs[0], VK_PIPELINE_STAGE_HOST_BIT, VK_PIPELINE_STAGE_HOST_BIT, 0, 0, nullptr, 0, nullptr, 1,
+                           &img_barrier);
+    vk::EndCommandBuffer(cmd_bufs[0]);
+    vk::BeginCommandBuffer(cmd_bufs[1], &cb_binfo);
+    img_barrier.oldLayout = VK_IMAGE_LAYOUT_GENERAL;
+    img_barrier.newLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
+    vk::CmdPipelineBarrier(cmd_bufs[1], VK_PIPELINE_STAGE_HOST_BIT, VK_PIPELINE_STAGE_HOST_BIT, 0, 0, nullptr, 0, nullptr, 1,
+                           &img_barrier);
+    vk::EndCommandBuffer(cmd_bufs[1]);
+    vk::BeginCommandBuffer(cmd_bufs[2], &cb_binfo);
+    img_barrier.oldLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
+    img_barrier.newLayout = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL;
+    vk::CmdPipelineBarrier(cmd_bufs[2], VK_PIPELINE_STAGE_HOST_BIT, VK_PIPELINE_STAGE_HOST_BIT, 0, 0, nullptr, 0, nullptr, 1,
+                           &img_barrier);
+    vk::EndCommandBuffer(cmd_bufs[2]);
+    vk::BeginCommandBuffer(cmd_bufs[3], &cb_binfo);
+    img_barrier.oldLayout = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL;
+    img_barrier.newLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
+    vk::CmdPipelineBarrier(cmd_bufs[3], VK_PIPELINE_STAGE_HOST_BIT, VK_PIPELINE_STAGE_HOST_BIT, 0, 0, nullptr, 0, nullptr, 1,
+                           &img_barrier);
+    vk::EndCommandBuffer(cmd_bufs[3]);
+
+    // Submit 4 command buffers in 3 submits, with submits 2 and 3 waiting for semaphores from submits 1 and 2
+    VkSemaphore semaphore1, semaphore2;
+    VkSemaphoreCreateInfo semaphore_create_info{};
+    semaphore_create_info.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO;
+    vk::CreateSemaphore(m_device->device(), &semaphore_create_info, nullptr, &semaphore1);
+    vk::CreateSemaphore(m_device->device(), &semaphore_create_info, nullptr, &semaphore2);
+    VkPipelineStageFlags flags[]{VK_PIPELINE_STAGE_ALL_COMMANDS_BIT};
+    VkSubmitInfo submit_info[3];
+    submit_info[0].sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+    submit_info[0].pNext = nullptr;
+    submit_info[0].commandBufferCount = 1;
+    submit_info[0].pCommandBuffers = &cmd_bufs[0];
+    submit_info[0].signalSemaphoreCount = 1;
+    submit_info[0].pSignalSemaphores = &semaphore1;
+    submit_info[0].waitSemaphoreCount = 0;
+    submit_info[0].pWaitDstStageMask = nullptr;
+    submit_info[0].pWaitDstStageMask = flags;
+    submit_info[1].sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+    submit_info[1].pNext = nullptr;
+    submit_info[1].commandBufferCount = 1;
+    submit_info[1].pCommandBuffers = &cmd_bufs[1];
+    submit_info[1].waitSemaphoreCount = 1;
+    submit_info[1].pWaitSemaphores = &semaphore1;
+    submit_info[1].signalSemaphoreCount = 1;
+    submit_info[1].pSignalSemaphores = &semaphore2;
+    submit_info[1].pWaitDstStageMask = flags;
+    submit_info[2].sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+    submit_info[2].pNext = nullptr;
+    submit_info[2].commandBufferCount = 2;
+    submit_info[2].pCommandBuffers = &cmd_bufs[2];
+    submit_info[2].waitSemaphoreCount = 1;
+    submit_info[2].pWaitSemaphores = &semaphore2;
+    submit_info[2].signalSemaphoreCount = 0;
+    submit_info[2].pSignalSemaphores = nullptr;
+    submit_info[2].pWaitDstStageMask = flags;
+    vk::QueueSubmit(m_device->m_queue, 3, submit_info, VK_NULL_HANDLE);
+    vk::QueueWaitIdle(m_device->m_queue);
+
+    vk::DestroySemaphore(m_device->device(), semaphore1, NULL);
+    vk::DestroySemaphore(m_device->device(), semaphore2, NULL);
+    m_errorMonitor->VerifyNotFound();
+}
+
+TEST_F(VkPositiveLayerTest, DynamicOffsetWithInactiveBinding) {
+    // Create a descriptorSet w/ dynamic descriptors where 1 binding is inactive
+    // We previously had a bug where dynamic offset of inactive bindings was still being used
+    m_errorMonitor->ExpectSuccess();
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitViewport());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    OneOffDescriptorSet descriptor_set(m_device,
+                                       {
+                                           {2, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC, 1, VK_SHADER_STAGE_FRAGMENT_BIT, nullptr},
+                                           {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC, 1, VK_SHADER_STAGE_FRAGMENT_BIT, nullptr},
+                                           {1, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC, 1, VK_SHADER_STAGE_FRAGMENT_BIT, nullptr},
+                                       });
+
+    // Create two buffers to update the descriptors with
+    // The first will be 2k and used for bindings 0 & 1, the second is 1k for binding 2
+    uint32_t qfi = 0;
+    VkBufferCreateInfo buffCI = {};
+    buffCI.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
+    buffCI.size = 2048;
+    buffCI.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT;
+    buffCI.queueFamilyIndexCount = 1;
+    buffCI.pQueueFamilyIndices = &qfi;
+
+    VkBufferObj dynamic_uniform_buffer_1, dynamic_uniform_buffer_2;
+    dynamic_uniform_buffer_1.init(*m_device, buffCI);
+    buffCI.size = 1024;
+    dynamic_uniform_buffer_2.init(*m_device, buffCI);
+
+    // Update descriptors
+    const uint32_t BINDING_COUNT = 3;
+    VkDescriptorBufferInfo buff_info[BINDING_COUNT] = {};
+    buff_info[0].buffer = dynamic_uniform_buffer_1.handle();
+    buff_info[0].offset = 0;
+    buff_info[0].range = 256;
+    buff_info[1].buffer = dynamic_uniform_buffer_1.handle();
+    buff_info[1].offset = 256;
+    buff_info[1].range = 512;
+    buff_info[2].buffer = dynamic_uniform_buffer_2.handle();
+    buff_info[2].offset = 0;
+    buff_info[2].range = 512;
+
+    VkWriteDescriptorSet descriptor_write;
+    memset(&descriptor_write, 0, sizeof(descriptor_write));
+    descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
+    descriptor_write.dstSet = descriptor_set.set_;
+    descriptor_write.dstBinding = 0;
+    descriptor_write.descriptorCount = BINDING_COUNT;
+    descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC;
+    descriptor_write.pBufferInfo = buff_info;
+
+    vk::UpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
+
+    m_commandBuffer->begin();
+    m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
+
+    // Create PSO to be used for draw-time errors below
+    char const *fsSource =
+        "#version 450\n"
+        "\n"
+        "layout(location=0) out vec4 x;\n"
+        "layout(set=0) layout(binding=0) uniform foo1 { int x; int y; } bar1;\n"
+        "layout(set=0) layout(binding=2) uniform foo2 { int x; int y; } bar2;\n"
+        "void main(){\n"
+        "   x = vec4(bar1.y) + vec4(bar2.y);\n"
+        "}\n";
+    VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
+    VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+    CreatePipelineHelper pipe(*this);
+    pipe.InitInfo();
+    pipe.InitState();
+    pipe.shader_stages_ = {vs.GetStageCreateInfo(), fs.GetStageCreateInfo()};
+    pipe.pipeline_layout_ = VkPipelineLayoutObj(m_device, {&descriptor_set.layout_});
+    pipe.CreateGraphicsPipeline();
+
+    vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.pipeline_);
+    // This update should succeed, but offset of inactive binding 1 oversteps binding 2 buffer size
+    //   we used to have a bug in this case.
+    uint32_t dyn_off[BINDING_COUNT] = {0, 1024, 256};
+    vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.pipeline_layout_.handle(), 0, 1,
+                              &descriptor_set.set_, BINDING_COUNT, dyn_off);
+    m_commandBuffer->Draw(1, 0, 0, 0);
+    m_errorMonitor->VerifyNotFound();
+
+    m_commandBuffer->EndRenderPass();
+    m_commandBuffer->end();
+}
+
+TEST_F(VkPositiveLayerTest, NonCoherentMemoryMapping) {
+    TEST_DESCRIPTION(
+        "Ensure that validations handling of non-coherent memory mapping while using VK_WHOLE_SIZE does not cause access "
+        "violations");
+    VkResult err;
+    uint8_t *pData;
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    VkDeviceMemory mem;
+    VkMemoryRequirements mem_reqs;
+    mem_reqs.memoryTypeBits = 0xFFFFFFFF;
+    const VkDeviceSize atom_size = m_device->props.limits.nonCoherentAtomSize;
+    VkMemoryAllocateInfo alloc_info = {};
+    alloc_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+    alloc_info.pNext = NULL;
+    alloc_info.memoryTypeIndex = 0;
+
+    static const VkDeviceSize allocation_size = 32 * atom_size;
+    alloc_info.allocationSize = allocation_size;
+
+    // Find a memory configurations WITHOUT a COHERENT bit, otherwise exit
+    bool pass = m_device->phy().set_memory_type(mem_reqs.memoryTypeBits, &alloc_info, VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT,
+                                                VK_MEMORY_PROPERTY_HOST_COHERENT_BIT);
+    if (!pass) {
+        pass = m_device->phy().set_memory_type(mem_reqs.memoryTypeBits, &alloc_info,
+                                               VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT,
+                                               VK_MEMORY_PROPERTY_HOST_COHERENT_BIT);
+        if (!pass) {
+            pass = m_device->phy().set_memory_type(
+                mem_reqs.memoryTypeBits, &alloc_info,
+                VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT,
+                VK_MEMORY_PROPERTY_HOST_COHERENT_BIT);
+            if (!pass) {
+                printf("%s Couldn't find a memory type wihtout a COHERENT bit.\n", kSkipPrefix);
+                return;
+            }
+        }
+    }
+
+    err = vk::AllocateMemory(m_device->device(), &alloc_info, NULL, &mem);
+    ASSERT_VK_SUCCESS(err);
+
+    // Map/Flush/Invalidate using WHOLE_SIZE and zero offsets and entire mapped range
+    m_errorMonitor->ExpectSuccess();
+    err = vk::MapMemory(m_device->device(), mem, 0, VK_WHOLE_SIZE, 0, (void **)&pData);
+    ASSERT_VK_SUCCESS(err);
+    VkMappedMemoryRange mmr = {};
+    mmr.sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE;
+    mmr.memory = mem;
+    mmr.offset = 0;
+    mmr.size = VK_WHOLE_SIZE;
+    err = vk::FlushMappedMemoryRanges(m_device->device(), 1, &mmr);
+    ASSERT_VK_SUCCESS(err);
+    err = vk::InvalidateMappedMemoryRanges(m_device->device(), 1, &mmr);
+    ASSERT_VK_SUCCESS(err);
+    m_errorMonitor->VerifyNotFound();
+    vk::UnmapMemory(m_device->device(), mem);
+
+    // Map/Flush/Invalidate using WHOLE_SIZE and an offset and entire mapped range
+    m_errorMonitor->ExpectSuccess();
+    err = vk::MapMemory(m_device->device(), mem, 5 * atom_size, VK_WHOLE_SIZE, 0, (void **)&pData);
+    ASSERT_VK_SUCCESS(err);
+    mmr.sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE;
+    mmr.memory = mem;
+    mmr.offset = 6 * atom_size;
+    mmr.size = VK_WHOLE_SIZE;
+    err = vk::FlushMappedMemoryRanges(m_device->device(), 1, &mmr);
+    ASSERT_VK_SUCCESS(err);
+    err = vk::InvalidateMappedMemoryRanges(m_device->device(), 1, &mmr);
+    ASSERT_VK_SUCCESS(err);
+    m_errorMonitor->VerifyNotFound();
+    vk::UnmapMemory(m_device->device(), mem);
+
+    // Map with offset and size
+    // Flush/Invalidate subrange of mapped area with offset and size
+    m_errorMonitor->ExpectSuccess();
+    err = vk::MapMemory(m_device->device(), mem, 3 * atom_size, 9 * atom_size, 0, (void **)&pData);
+    ASSERT_VK_SUCCESS(err);
+    mmr.sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE;
+    mmr.memory = mem;
+    mmr.offset = 4 * atom_size;
+    mmr.size = 2 * atom_size;
+    err = vk::FlushMappedMemoryRanges(m_device->device(), 1, &mmr);
+    ASSERT_VK_SUCCESS(err);
+    err = vk::InvalidateMappedMemoryRanges(m_device->device(), 1, &mmr);
+    ASSERT_VK_SUCCESS(err);
+    m_errorMonitor->VerifyNotFound();
+    vk::UnmapMemory(m_device->device(), mem);
+
+    // Map without offset and flush WHOLE_SIZE with two separate offsets
+    m_errorMonitor->ExpectSuccess();
+    err = vk::MapMemory(m_device->device(), mem, 0, VK_WHOLE_SIZE, 0, (void **)&pData);
+    ASSERT_VK_SUCCESS(err);
+    mmr.sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE;
+    mmr.memory = mem;
+    mmr.offset = allocation_size - (4 * atom_size);
+    mmr.size = VK_WHOLE_SIZE;
+    err = vk::FlushMappedMemoryRanges(m_device->device(), 1, &mmr);
+    ASSERT_VK_SUCCESS(err);
+    mmr.offset = allocation_size - (6 * atom_size);
+    mmr.size = VK_WHOLE_SIZE;
+    err = vk::FlushMappedMemoryRanges(m_device->device(), 1, &mmr);
+    ASSERT_VK_SUCCESS(err);
+    m_errorMonitor->VerifyNotFound();
+    vk::UnmapMemory(m_device->device(), mem);
+
+    vk::FreeMemory(m_device->device(), mem, NULL);
+}
+
+// This is a positive test. We used to expect error in this case but spec now allows it
+TEST_F(VkPositiveLayerTest, ResetUnsignaledFence) {
+    m_errorMonitor->ExpectSuccess();
+    vk_testing::Fence testFence;
+    VkFenceCreateInfo fenceInfo = {};
+    fenceInfo.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
+    fenceInfo.pNext = NULL;
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    testFence.init(*m_device, fenceInfo);
+    VkFence fences[1] = {testFence.handle()};
+    VkResult result = vk::ResetFences(m_device->device(), 1, fences);
+    ASSERT_VK_SUCCESS(result);
+
+    m_errorMonitor->VerifyNotFound();
+}
+
+TEST_F(VkPositiveLayerTest, CommandBufferSimultaneousUseSync) {
+    m_errorMonitor->ExpectSuccess();
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    VkResult err;
+
+    // Record (empty!) command buffer that can be submitted multiple times
+    // simultaneously.
+    VkCommandBufferBeginInfo cbbi = {VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO, nullptr,
+                                     VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT, nullptr};
+    m_commandBuffer->begin(&cbbi);
+    m_commandBuffer->end();
+
+    VkFenceCreateInfo fci = {VK_STRUCTURE_TYPE_FENCE_CREATE_INFO, nullptr, 0};
+    VkFence fence;
+    err = vk::CreateFence(m_device->device(), &fci, nullptr, &fence);
+    ASSERT_VK_SUCCESS(err);
+
+    VkSemaphoreCreateInfo sci = {VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO, nullptr, 0};
+    VkSemaphore s1, s2;
+    err = vk::CreateSemaphore(m_device->device(), &sci, nullptr, &s1);
+    ASSERT_VK_SUCCESS(err);
+    err = vk::CreateSemaphore(m_device->device(), &sci, nullptr, &s2);
+    ASSERT_VK_SUCCESS(err);
+
+    // Submit CB once signaling s1, with fence so we can roll forward to its retirement.
+    VkSubmitInfo si = {VK_STRUCTURE_TYPE_SUBMIT_INFO, nullptr, 0, nullptr, nullptr, 1, &m_commandBuffer->handle(), 1, &s1};
+    err = vk::QueueSubmit(m_device->m_queue, 1, &si, fence);
+    ASSERT_VK_SUCCESS(err);
+
+    // Submit CB again, signaling s2.
+    si.pSignalSemaphores = &s2;
+    err = vk::QueueSubmit(m_device->m_queue, 1, &si, VK_NULL_HANDLE);
+    ASSERT_VK_SUCCESS(err);
+
+    // Wait for fence.
+    err = vk::WaitForFences(m_device->device(), 1, &fence, VK_TRUE, UINT64_MAX);
+    ASSERT_VK_SUCCESS(err);
+
+    // CB is still in flight from second submission, but semaphore s1 is no
+    // longer in flight. delete it.
+    vk::DestroySemaphore(m_device->device(), s1, nullptr);
+
+    m_errorMonitor->VerifyNotFound();
+
+    // Force device idle and clean up remaining objects
+    vk::DeviceWaitIdle(m_device->device());
+    vk::DestroySemaphore(m_device->device(), s2, nullptr);
+    vk::DestroyFence(m_device->device(), fence, nullptr);
+}
+
+TEST_F(VkPositiveLayerTest, FenceCreateSignaledWaitHandling) {
+    m_errorMonitor->ExpectSuccess();
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    VkResult err;
+
+    // A fence created signaled
+    VkFenceCreateInfo fci1 = {VK_STRUCTURE_TYPE_FENCE_CREATE_INFO, nullptr, VK_FENCE_CREATE_SIGNALED_BIT};
+    VkFence f1;
+    err = vk::CreateFence(m_device->device(), &fci1, nullptr, &f1);
+    ASSERT_VK_SUCCESS(err);
+
+    // A fence created not
+    VkFenceCreateInfo fci2 = {VK_STRUCTURE_TYPE_FENCE_CREATE_INFO, nullptr, 0};
+    VkFence f2;
+    err = vk::CreateFence(m_device->device(), &fci2, nullptr, &f2);
+    ASSERT_VK_SUCCESS(err);
+
+    // Submit the unsignaled fence
+    VkSubmitInfo si = {VK_STRUCTURE_TYPE_SUBMIT_INFO, nullptr, 0, nullptr, nullptr, 0, nullptr, 0, nullptr};
+    err = vk::QueueSubmit(m_device->m_queue, 1, &si, f2);
+
+    // Wait on both fences, with signaled first.
+    VkFence fences[] = {f1, f2};
+    vk::WaitForFences(m_device->device(), 2, fences, VK_TRUE, UINT64_MAX);
+
+    // Should have both retired!
+    vk::DestroyFence(m_device->device(), f1, nullptr);
+    vk::DestroyFence(m_device->device(), f2, nullptr);
+
+    m_errorMonitor->VerifyNotFound();
+}
+
+TEST_F(VkPositiveLayerTest, CreateImageViewFollowsParameterCompatibilityRequirements) {
+    TEST_DESCRIPTION("Verify that creating an ImageView with valid usage does not generate validation errors.");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    m_errorMonitor->ExpectSuccess();
+
+    VkImageCreateInfo imgInfo = {VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
+                                 nullptr,
+                                 VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT,
+                                 VK_IMAGE_TYPE_2D,
+                                 VK_FORMAT_R8G8B8A8_UNORM,
+                                 {128, 128, 1},
+                                 1,
+                                 1,
+                                 VK_SAMPLE_COUNT_1_BIT,
+                                 VK_IMAGE_TILING_OPTIMAL,
+                                 VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT,
+                                 VK_SHARING_MODE_EXCLUSIVE,
+                                 0,
+                                 nullptr,
+                                 VK_IMAGE_LAYOUT_UNDEFINED};
+    VkImageObj image(m_device);
+    image.init(&imgInfo);
+    ASSERT_TRUE(image.initialized());
+    image.targetView(VK_FORMAT_R8G8B8A8_UNORM);
+    m_errorMonitor->VerifyNotFound();
+}
+
+TEST_F(VkPositiveLayerTest, ValidUsage) {
+    TEST_DESCRIPTION("Verify that creating an image view from an image with valid usage doesn't generate validation errors");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    m_errorMonitor->ExpectSuccess();
+    // Verify that we can create a view with usage INPUT_ATTACHMENT
+    VkImageObj image(m_device);
+    image.Init(128, 128, 1, VK_FORMAT_R8G8B8A8_UNORM, VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
+    ASSERT_TRUE(image.initialized());
+    VkImageView imageView;
+    VkImageViewCreateInfo ivci = {};
+    ivci.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
+    ivci.image = image.handle();
+    ivci.viewType = VK_IMAGE_VIEW_TYPE_2D;
+    ivci.format = VK_FORMAT_R8G8B8A8_UNORM;
+    ivci.subresourceRange.layerCount = 1;
+    ivci.subresourceRange.baseMipLevel = 0;
+    ivci.subresourceRange.levelCount = 1;
+    ivci.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+
+    vk::CreateImageView(m_device->device(), &ivci, NULL, &imageView);
+    m_errorMonitor->VerifyNotFound();
+    vk::DestroyImageView(m_device->device(), imageView, NULL);
+}
+
+// This is a positive test. No failures are expected.
+TEST_F(VkPositiveLayerTest, BindSparse) {
+    TEST_DESCRIPTION("Bind 2 memory ranges to one image using vkQueueBindSparse, destroy the image and then free the memory");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    auto index = m_device->graphics_queue_node_index_;
+    if (!(m_device->queue_props[index].queueFlags & VK_QUEUE_SPARSE_BINDING_BIT)) {
+        printf("%s Graphics queue does not have sparse binding bit.\n", kSkipPrefix);
+        return;
+    }
+    if (!m_device->phy().features().sparseBinding) {
+        printf("%s Device does not support sparse bindings.\n", kSkipPrefix);
+        return;
+    }
+
+    m_errorMonitor->ExpectSuccess(VK_DEBUG_REPORT_ERROR_BIT_EXT | VK_DEBUG_REPORT_WARNING_BIT_EXT);
+
+    VkImage image;
+    VkImageCreateInfo image_create_info = {};
+    image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+    image_create_info.pNext = NULL;
+    image_create_info.imageType = VK_IMAGE_TYPE_2D;
+    image_create_info.format = VK_FORMAT_B8G8R8A8_UNORM;
+    image_create_info.extent.width = 64;
+    image_create_info.extent.height = 64;
+    image_create_info.extent.depth = 1;
+    image_create_info.mipLevels = 1;
+    image_create_info.arrayLayers = 1;
+    image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
+    image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
+    image_create_info.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
+    image_create_info.flags = VK_IMAGE_CREATE_SPARSE_BINDING_BIT;
+    VkResult err = vk::CreateImage(m_device->device(), &image_create_info, NULL, &image);
+    ASSERT_VK_SUCCESS(err);
+
+    VkMemoryRequirements memory_reqs;
+    VkDeviceMemory memory_one, memory_two;
+    bool pass;
+    VkMemoryAllocateInfo memory_info = {};
+    memory_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+    memory_info.pNext = NULL;
+    memory_info.allocationSize = 0;
+    memory_info.memoryTypeIndex = 0;
+    vk::GetImageMemoryRequirements(m_device->device(), image, &memory_reqs);
+    // Find an image big enough to allow sparse mapping of 2 memory regions
+    // Increase the image size until it is at least twice the
+    // size of the required alignment, to ensure we can bind both
+    // allocated memory blocks to the image on aligned offsets.
+    while (memory_reqs.size < (memory_reqs.alignment * 2)) {
+        vk::DestroyImage(m_device->device(), image, nullptr);
+        image_create_info.extent.width *= 2;
+        image_create_info.extent.height *= 2;
+        err = vk::CreateImage(m_device->device(), &image_create_info, nullptr, &image);
+        ASSERT_VK_SUCCESS(err);
+        vk::GetImageMemoryRequirements(m_device->device(), image, &memory_reqs);
+    }
+    // Allocate 2 memory regions of minimum alignment size, bind one at 0, the other
+    // at the end of the first
+    memory_info.allocationSize = memory_reqs.alignment;
+    pass = m_device->phy().set_memory_type(memory_reqs.memoryTypeBits, &memory_info, 0);
+    ASSERT_TRUE(pass);
+    err = vk::AllocateMemory(m_device->device(), &memory_info, NULL, &memory_one);
+    ASSERT_VK_SUCCESS(err);
+    err = vk::AllocateMemory(m_device->device(), &memory_info, NULL, &memory_two);
+    ASSERT_VK_SUCCESS(err);
+    VkSparseMemoryBind binds[2];
+    binds[0].flags = 0;
+    binds[0].memory = memory_one;
+    binds[0].memoryOffset = 0;
+    binds[0].resourceOffset = 0;
+    binds[0].size = memory_info.allocationSize;
+    binds[1].flags = 0;
+    binds[1].memory = memory_two;
+    binds[1].memoryOffset = 0;
+    binds[1].resourceOffset = memory_info.allocationSize;
+    binds[1].size = memory_info.allocationSize;
+
+    VkSparseImageOpaqueMemoryBindInfo opaqueBindInfo;
+    opaqueBindInfo.image = image;
+    opaqueBindInfo.bindCount = 2;
+    opaqueBindInfo.pBinds = binds;
+
+    VkFence fence = VK_NULL_HANDLE;
+    VkBindSparseInfo bindSparseInfo = {};
+    bindSparseInfo.sType = VK_STRUCTURE_TYPE_BIND_SPARSE_INFO;
+    bindSparseInfo.imageOpaqueBindCount = 1;
+    bindSparseInfo.pImageOpaqueBinds = &opaqueBindInfo;
+
+    vk::QueueBindSparse(m_device->m_queue, 1, &bindSparseInfo, fence);
+    vk::QueueWaitIdle(m_device->m_queue);
+    vk::DestroyImage(m_device->device(), image, NULL);
+    vk::FreeMemory(m_device->device(), memory_one, NULL);
+    vk::FreeMemory(m_device->device(), memory_two, NULL);
+    m_errorMonitor->VerifyNotFound();
+}
+
+TEST_F(VkPositiveLayerTest, BindSparseMetadata) {
+    TEST_DESCRIPTION("Bind memory for the metadata aspect of a sparse image");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    auto index = m_device->graphics_queue_node_index_;
+    if (!(m_device->queue_props[index].queueFlags & VK_QUEUE_SPARSE_BINDING_BIT)) {
+        printf("%s Graphics queue does not have sparse binding bit.\n", kSkipPrefix);
+        return;
+    }
+    if (!m_device->phy().features().sparseResidencyImage2D) {
+        printf("%s Device does not support sparse residency for images.\n", kSkipPrefix);
+        return;
+    }
+
+    m_errorMonitor->ExpectSuccess(VK_DEBUG_REPORT_ERROR_BIT_EXT | VK_DEBUG_REPORT_WARNING_BIT_EXT);
+
+    // Create a sparse image
+    VkImage image;
+    VkImageCreateInfo image_create_info = {};
+    image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+    image_create_info.pNext = NULL;
+    image_create_info.imageType = VK_IMAGE_TYPE_2D;
+    image_create_info.format = VK_FORMAT_B8G8R8A8_UNORM;
+    image_create_info.extent.width = 64;
+    image_create_info.extent.height = 64;
+    image_create_info.extent.depth = 1;
+    image_create_info.mipLevels = 1;
+    image_create_info.arrayLayers = 1;
+    image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
+    image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
+    image_create_info.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
+    image_create_info.flags = VK_IMAGE_CREATE_SPARSE_BINDING_BIT | VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT;
+    VkResult err = vk::CreateImage(m_device->device(), &image_create_info, NULL, &image);
+    ASSERT_VK_SUCCESS(err);
+
+    // Query image memory requirements
+    VkMemoryRequirements memory_reqs;
+    vk::GetImageMemoryRequirements(m_device->device(), image, &memory_reqs);
+
+    // Query sparse memory requirements
+    uint32_t sparse_reqs_count = 0;
+    vk::GetImageSparseMemoryRequirements(m_device->device(), image, &sparse_reqs_count, nullptr);
+    std::vector<VkSparseImageMemoryRequirements> sparse_reqs(sparse_reqs_count);
+    vk::GetImageSparseMemoryRequirements(m_device->device(), image, &sparse_reqs_count, sparse_reqs.data());
+
+    // Find requirements for metadata aspect
+    const VkSparseImageMemoryRequirements *metadata_reqs = nullptr;
+    for (auto const &aspect_sparse_reqs : sparse_reqs) {
+        if (aspect_sparse_reqs.formatProperties.aspectMask == VK_IMAGE_ASPECT_METADATA_BIT) {
+            metadata_reqs = &aspect_sparse_reqs;
+        }
+    }
+
+    if (!metadata_reqs) {
+        printf("%s Sparse image does not require memory for metadata.\n", kSkipPrefix);
+    } else {
+        // Allocate memory for the metadata
+        VkDeviceMemory metadata_memory = VK_NULL_HANDLE;
+        VkMemoryAllocateInfo metadata_memory_info = {};
+        metadata_memory_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+        metadata_memory_info.allocationSize = metadata_reqs->imageMipTailSize;
+        m_device->phy().set_memory_type(memory_reqs.memoryTypeBits, &metadata_memory_info, 0);
+        err = vk::AllocateMemory(m_device->device(), &metadata_memory_info, NULL, &metadata_memory);
+        ASSERT_VK_SUCCESS(err);
+
+        // Bind metadata
+        VkSparseMemoryBind sparse_bind = {};
+        sparse_bind.resourceOffset = metadata_reqs->imageMipTailOffset;
+        sparse_bind.size = metadata_reqs->imageMipTailSize;
+        sparse_bind.memory = metadata_memory;
+        sparse_bind.memoryOffset = 0;
+        sparse_bind.flags = VK_SPARSE_MEMORY_BIND_METADATA_BIT;
+
+        VkSparseImageOpaqueMemoryBindInfo opaque_bind_info = {};
+        opaque_bind_info.image = image;
+        opaque_bind_info.bindCount = 1;
+        opaque_bind_info.pBinds = &sparse_bind;
+
+        VkBindSparseInfo bind_info = {};
+        bind_info.sType = VK_STRUCTURE_TYPE_BIND_SPARSE_INFO;
+        bind_info.imageOpaqueBindCount = 1;
+        bind_info.pImageOpaqueBinds = &opaque_bind_info;
+
+        vk::QueueBindSparse(m_device->m_queue, 1, &bind_info, VK_NULL_HANDLE);
+        m_errorMonitor->VerifyNotFound();
+
+        // Cleanup
+        vk::QueueWaitIdle(m_device->m_queue);
+        vk::FreeMemory(m_device->device(), metadata_memory, NULL);
+    }
+
+    vk::DestroyImage(m_device->device(), image, NULL);
+}
+
+TEST_F(VkPositiveLayerTest, FramebufferBindingDestroyCommandPool) {
+    TEST_DESCRIPTION(
+        "This test should pass. Create a Framebuffer and command buffer, bind them together, then destroy command pool and "
+        "framebuffer and verify there are no errors.");
+
+    m_errorMonitor->ExpectSuccess();
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    // A renderpass with one color attachment.
+    VkAttachmentDescription attachment = {0,
+                                          VK_FORMAT_R8G8B8A8_UNORM,
+                                          VK_SAMPLE_COUNT_1_BIT,
+                                          VK_ATTACHMENT_LOAD_OP_DONT_CARE,
+                                          VK_ATTACHMENT_STORE_OP_STORE,
+                                          VK_ATTACHMENT_LOAD_OP_DONT_CARE,
+                                          VK_ATTACHMENT_STORE_OP_DONT_CARE,
+                                          VK_IMAGE_LAYOUT_UNDEFINED,
+                                          VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL};
+
+    VkAttachmentReference att_ref = {0, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL};
+
+    VkSubpassDescription subpass = {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 0, nullptr, 1, &att_ref, nullptr, nullptr, 0, nullptr};
+
+    VkRenderPassCreateInfo rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, nullptr, 0, 1, &attachment, 1, &subpass, 0, nullptr};
+
+    VkRenderPass rp;
+    VkResult err = vk::CreateRenderPass(m_device->device(), &rpci, nullptr, &rp);
+    ASSERT_VK_SUCCESS(err);
+
+    // A compatible framebuffer.
+    VkImageObj image(m_device);
+    image.Init(32, 32, 1, VK_FORMAT_R8G8B8A8_UNORM, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
+    ASSERT_TRUE(image.initialized());
+
+    VkImageView view = image.targetView(VK_FORMAT_R8G8B8A8_UNORM);
+
+    VkFramebufferCreateInfo fci = {VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, nullptr, 0, rp, 1, &view, 32, 32, 1};
+    VkFramebuffer fb;
+    err = vk::CreateFramebuffer(m_device->device(), &fci, nullptr, &fb);
+    ASSERT_VK_SUCCESS(err);
+
+    // Explicitly create a command buffer to bind the FB to so that we can then
+    //  destroy the command pool in order to implicitly free command buffer
+    VkCommandPool command_pool;
+    VkCommandPoolCreateInfo pool_create_info{};
+    pool_create_info.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
+    pool_create_info.queueFamilyIndex = m_device->graphics_queue_node_index_;
+    pool_create_info.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
+    vk::CreateCommandPool(m_device->device(), &pool_create_info, nullptr, &command_pool);
+
+    VkCommandBuffer command_buffer;
+    VkCommandBufferAllocateInfo command_buffer_allocate_info{};
+    command_buffer_allocate_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
+    command_buffer_allocate_info.commandPool = command_pool;
+    command_buffer_allocate_info.commandBufferCount = 1;
+    command_buffer_allocate_info.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
+    vk::AllocateCommandBuffers(m_device->device(), &command_buffer_allocate_info, &command_buffer);
+
+    // Begin our cmd buffer with renderpass using our framebuffer
+    VkRenderPassBeginInfo rpbi = {VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO, nullptr, rp, fb, {{0, 0}, {32, 32}}, 0, nullptr};
+    VkCommandBufferBeginInfo begin_info{};
+    begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
+    vk::BeginCommandBuffer(command_buffer, &begin_info);
+
+    vk::CmdBeginRenderPass(command_buffer, &rpbi, VK_SUBPASS_CONTENTS_INLINE);
+    vk::CmdEndRenderPass(command_buffer);
+    vk::EndCommandBuffer(command_buffer);
+    // Destroy command pool to implicitly free command buffer
+    vk::DestroyCommandPool(m_device->device(), command_pool, NULL);
+    vk::DestroyFramebuffer(m_device->device(), fb, nullptr);
+    vk::DestroyRenderPass(m_device->device(), rp, nullptr);
+    m_errorMonitor->VerifyNotFound();
+}
+
+TEST_F(VkPositiveLayerTest, FramebufferCreateDepthStencilLayoutTransitionForDepthOnlyImageView) {
+    TEST_DESCRIPTION(
+        "Validate that when an imageView of a depth/stencil image is used as a depth/stencil framebuffer attachment, the "
+        "aspectMask is ignored and both depth and stencil image subresources are used.");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    VkFormatProperties format_properties;
+    vk::GetPhysicalDeviceFormatProperties(gpu(), VK_FORMAT_D32_SFLOAT_S8_UINT, &format_properties);
+    if (!(format_properties.optimalTilingFeatures & VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT)) {
+        printf("%s Image format does not support sampling.\n", kSkipPrefix);
+        return;
+    }
+
+    m_errorMonitor->ExpectSuccess();
+
+    VkAttachmentDescription attachment = {0,
+                                          VK_FORMAT_D32_SFLOAT_S8_UINT,
+                                          VK_SAMPLE_COUNT_1_BIT,
+                                          VK_ATTACHMENT_LOAD_OP_DONT_CARE,
+                                          VK_ATTACHMENT_STORE_OP_STORE,
+                                          VK_ATTACHMENT_LOAD_OP_DONT_CARE,
+                                          VK_ATTACHMENT_STORE_OP_DONT_CARE,
+                                          VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL,
+                                          VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL};
+
+    VkAttachmentReference att_ref = {0, VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL};
+
+    VkSubpassDescription subpass = {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 0, nullptr, 0, nullptr, nullptr, &att_ref, 0, nullptr};
+
+    VkSubpassDependency dep = {0,
+                               0,
+                               VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
+                               VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
+                               VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
+                               VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
+                               VK_DEPENDENCY_BY_REGION_BIT};
+
+    VkRenderPassCreateInfo rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, nullptr, 0, 1, &attachment, 1, &subpass, 1, &dep};
+
+    VkResult err;
+    VkRenderPass rp;
+    err = vk::CreateRenderPass(m_device->device(), &rpci, nullptr, &rp);
+    ASSERT_VK_SUCCESS(err);
+
+    VkImageObj image(m_device);
+    image.InitNoLayout(32, 32, 1, VK_FORMAT_D32_SFLOAT_S8_UINT,
+                       0x26,  // usage
+                       VK_IMAGE_TILING_OPTIMAL, 0);
+    ASSERT_TRUE(image.initialized());
+    image.SetLayout(0x6, VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL);
+
+    VkImageView view = image.targetView(VK_FORMAT_D32_SFLOAT_S8_UINT, VK_IMAGE_ASPECT_DEPTH_BIT);
+
+    VkFramebufferCreateInfo fci = {VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, nullptr, 0, rp, 1, &view, 32, 32, 1};
+    VkFramebuffer fb;
+    err = vk::CreateFramebuffer(m_device->device(), &fci, nullptr, &fb);
+    ASSERT_VK_SUCCESS(err);
+
+    m_commandBuffer->begin();
+
+    VkImageMemoryBarrier imb = {};
+    imb.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
+    imb.pNext = nullptr;
+    imb.srcAccessMask = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT;
+    imb.dstAccessMask = VK_ACCESS_SHADER_READ_BIT;
+    imb.oldLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
+    imb.newLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
+    imb.srcQueueFamilyIndex = 0;
+    imb.dstQueueFamilyIndex = 0;
+    imb.image = image.handle();
+    imb.subresourceRange.aspectMask = 0x6;
+    imb.subresourceRange.baseMipLevel = 0;
+    imb.subresourceRange.levelCount = 0x1;
+    imb.subresourceRange.baseArrayLayer = 0;
+    imb.subresourceRange.layerCount = 0x1;
+
+    vk::CmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT,
+                           VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, VK_DEPENDENCY_BY_REGION_BIT, 0, nullptr, 0, nullptr, 1, &imb);
+
+    m_commandBuffer->end();
+    m_commandBuffer->QueueCommandBuffer(false);
+    m_errorMonitor->VerifyNotFound();
+
+    vk::DestroyFramebuffer(m_device->device(), fb, nullptr);
+    vk::DestroyRenderPass(m_device->device(), rp, nullptr);
+}
+
+// This is a positive test.  No errors should be generated.
+TEST_F(VkPositiveLayerTest, BarrierLayoutToImageUsage) {
+    TEST_DESCRIPTION("Ensure barriers' new and old VkImageLayout are compatible with their images' VkImageUsageFlags");
+
+    m_errorMonitor->ExpectSuccess();
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    auto depth_format = FindSupportedDepthStencilFormat(gpu());
+    if (!depth_format) {
+        printf("%s No Depth + Stencil format found. Skipped.\n", kSkipPrefix);
+        return;
+    }
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    VkImageMemoryBarrier img_barrier = {};
+    img_barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
+    img_barrier.pNext = NULL;
+    img_barrier.srcAccessMask = VK_ACCESS_HOST_WRITE_BIT;
+    img_barrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT;
+    img_barrier.oldLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
+    img_barrier.newLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
+    img_barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
+    img_barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
+    img_barrier.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    img_barrier.subresourceRange.baseArrayLayer = 0;
+    img_barrier.subresourceRange.baseMipLevel = 0;
+    img_barrier.subresourceRange.layerCount = 1;
+    img_barrier.subresourceRange.levelCount = 1;
+
+    {
+        VkImageObj img_color(m_device);
+        img_color.Init(128, 128, 1, VK_FORMAT_B8G8R8A8_UNORM, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL);
+        ASSERT_TRUE(img_color.initialized());
+
+        VkImageObj img_ds1(m_device);
+        img_ds1.Init(128, 128, 1, depth_format, VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL);
+        ASSERT_TRUE(img_ds1.initialized());
+
+        VkImageObj img_ds2(m_device);
+        img_ds2.Init(128, 128, 1, depth_format, VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL);
+        ASSERT_TRUE(img_ds2.initialized());
+
+        VkImageObj img_xfer_src(m_device);
+        img_xfer_src.Init(128, 128, 1, VK_FORMAT_B8G8R8A8_UNORM, VK_IMAGE_USAGE_TRANSFER_SRC_BIT, VK_IMAGE_TILING_OPTIMAL);
+        ASSERT_TRUE(img_xfer_src.initialized());
+
+        VkImageObj img_xfer_dst(m_device);
+        img_xfer_dst.Init(128, 128, 1, VK_FORMAT_B8G8R8A8_UNORM, VK_IMAGE_USAGE_TRANSFER_DST_BIT, VK_IMAGE_TILING_OPTIMAL);
+        ASSERT_TRUE(img_xfer_dst.initialized());
+
+        VkImageObj img_sampled(m_device);
+        img_sampled.Init(32, 32, 1, VK_FORMAT_B8G8R8A8_UNORM, VK_IMAGE_USAGE_SAMPLED_BIT, VK_IMAGE_TILING_OPTIMAL);
+        ASSERT_TRUE(img_sampled.initialized());
+
+        VkImageObj img_input(m_device);
+        img_input.Init(128, 128, 1, VK_FORMAT_R8G8B8A8_UNORM, VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL);
+        ASSERT_TRUE(img_input.initialized());
+
+        const struct {
+            VkImageObj &image_obj;
+            VkImageLayout old_layout;
+            VkImageLayout new_layout;
+        } buffer_layouts[] = {
+            // clang-format off
+            {img_color,    VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,         VK_IMAGE_LAYOUT_GENERAL},
+            {img_ds1,      VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL, VK_IMAGE_LAYOUT_GENERAL},
+            {img_ds2,      VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL,  VK_IMAGE_LAYOUT_GENERAL},
+            {img_sampled,  VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL,         VK_IMAGE_LAYOUT_GENERAL},
+            {img_input,    VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL,         VK_IMAGE_LAYOUT_GENERAL},
+            {img_xfer_src, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,             VK_IMAGE_LAYOUT_GENERAL},
+            {img_xfer_dst, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,             VK_IMAGE_LAYOUT_GENERAL},
+            // clang-format on
+        };
+        const uint32_t layout_count = sizeof(buffer_layouts) / sizeof(buffer_layouts[0]);
+
+        m_commandBuffer->begin();
+        for (uint32_t i = 0; i < layout_count; ++i) {
+            img_barrier.image = buffer_layouts[i].image_obj.handle();
+            const VkImageUsageFlags usage = buffer_layouts[i].image_obj.usage();
+            img_barrier.subresourceRange.aspectMask = (usage == VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT)
+                                                          ? (VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT)
+                                                          : VK_IMAGE_ASPECT_COLOR_BIT;
+
+            img_barrier.oldLayout = buffer_layouts[i].old_layout;
+            img_barrier.newLayout = buffer_layouts[i].new_layout;
+            vk::CmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_HOST_BIT, VK_PIPELINE_STAGE_VERTEX_SHADER_BIT, 0, 0,
+                                   nullptr, 0, nullptr, 1, &img_barrier);
+
+            img_barrier.oldLayout = buffer_layouts[i].new_layout;
+            img_barrier.newLayout = buffer_layouts[i].old_layout;
+            vk::CmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_HOST_BIT, VK_PIPELINE_STAGE_VERTEX_SHADER_BIT, 0, 0,
+                                   nullptr, 0, nullptr, 1, &img_barrier);
+        }
+        m_commandBuffer->end();
+
+        img_barrier.oldLayout = VK_IMAGE_LAYOUT_GENERAL;
+        img_barrier.newLayout = VK_IMAGE_LAYOUT_GENERAL;
+    }
+    m_errorMonitor->VerifyNotFound();
+}
+
+// This is a positive test.  No errors should be generated.
+TEST_F(VkPositiveLayerTest, WaitEventThenSet) {
+    TEST_DESCRIPTION("Wait on a event then set it after the wait has been submitted.");
+
+    m_errorMonitor->ExpectSuccess();
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    VkEvent event;
+    VkEventCreateInfo event_create_info{};
+    event_create_info.sType = VK_STRUCTURE_TYPE_EVENT_CREATE_INFO;
+    vk::CreateEvent(m_device->device(), &event_create_info, nullptr, &event);
+
+    VkCommandPool command_pool;
+    VkCommandPoolCreateInfo pool_create_info{};
+    pool_create_info.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
+    pool_create_info.queueFamilyIndex = m_device->graphics_queue_node_index_;
+    pool_create_info.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
+    vk::CreateCommandPool(m_device->device(), &pool_create_info, nullptr, &command_pool);
+
+    VkCommandBuffer command_buffer;
+    VkCommandBufferAllocateInfo command_buffer_allocate_info{};
+    command_buffer_allocate_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
+    command_buffer_allocate_info.commandPool = command_pool;
+    command_buffer_allocate_info.commandBufferCount = 1;
+    command_buffer_allocate_info.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
+    vk::AllocateCommandBuffers(m_device->device(), &command_buffer_allocate_info, &command_buffer);
+
+    VkQueue queue = VK_NULL_HANDLE;
+    vk::GetDeviceQueue(m_device->device(), m_device->graphics_queue_node_index_, 0, &queue);
+
+    {
+        VkCommandBufferBeginInfo begin_info{};
+        begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
+        vk::BeginCommandBuffer(command_buffer, &begin_info);
+
+        vk::CmdWaitEvents(command_buffer, 1, &event, VK_PIPELINE_STAGE_HOST_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, nullptr, 0,
+                          nullptr, 0, nullptr);
+        vk::CmdResetEvent(command_buffer, event, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT);
+        vk::EndCommandBuffer(command_buffer);
+    }
+    {
+        VkSubmitInfo submit_info{};
+        submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+        submit_info.commandBufferCount = 1;
+        submit_info.pCommandBuffers = &command_buffer;
+        submit_info.signalSemaphoreCount = 0;
+        submit_info.pSignalSemaphores = nullptr;
+        vk::QueueSubmit(queue, 1, &submit_info, VK_NULL_HANDLE);
+    }
+    { vk::SetEvent(m_device->device(), event); }
+
+    vk::QueueWaitIdle(queue);
+
+    vk::DestroyEvent(m_device->device(), event, nullptr);
+    vk::FreeCommandBuffers(m_device->device(), command_pool, 1, &command_buffer);
+    vk::DestroyCommandPool(m_device->device(), command_pool, NULL);
+
+    m_errorMonitor->VerifyNotFound();
+}
+// This is a positive test.  No errors should be generated.
+TEST_F(VkPositiveLayerTest, QueryAndCopySecondaryCommandBuffers) {
+    TEST_DESCRIPTION("Issue a query on a secondary command buffer and copy it on a primary.");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    if ((m_device->queue_props.empty()) || (m_device->queue_props[0].queueCount < 2)) {
+        printf("%s Queue family needs to have multiple queues to run this test.\n", kSkipPrefix);
+        return;
+    }
+
+    m_errorMonitor->ExpectSuccess();
+
+    VkQueryPool query_pool;
+    VkQueryPoolCreateInfo query_pool_create_info{};
+    query_pool_create_info.sType = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO;
+    query_pool_create_info.queryType = VK_QUERY_TYPE_TIMESTAMP;
+    query_pool_create_info.queryCount = 1;
+    vk::CreateQueryPool(m_device->device(), &query_pool_create_info, nullptr, &query_pool);
+
+    VkCommandPoolObj command_pool(m_device, m_device->graphics_queue_node_index_, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT);
+    VkCommandBufferObj primary_buffer(m_device, &command_pool);
+    VkCommandBufferObj secondary_buffer(m_device, &command_pool, VK_COMMAND_BUFFER_LEVEL_SECONDARY);
+
+    VkQueue queue = VK_NULL_HANDLE;
+    vk::GetDeviceQueue(m_device->device(), m_device->graphics_queue_node_index_, 1, &queue);
+
+    uint32_t qfi = 0;
+    VkBufferCreateInfo buff_create_info = {};
+    buff_create_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
+    buff_create_info.size = 1024;
+    buff_create_info.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT;
+    buff_create_info.queueFamilyIndexCount = 1;
+    buff_create_info.pQueueFamilyIndices = &qfi;
+
+    VkBufferObj buffer;
+    buffer.init(*m_device, buff_create_info);
+
+    VkCommandBufferInheritanceInfo hinfo = {};
+    hinfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO;
+    hinfo.renderPass = VK_NULL_HANDLE;
+    hinfo.subpass = 0;
+    hinfo.framebuffer = VK_NULL_HANDLE;
+    hinfo.occlusionQueryEnable = VK_FALSE;
+    hinfo.queryFlags = 0;
+    hinfo.pipelineStatistics = 0;
+
+    {
+        VkCommandBufferBeginInfo begin_info{};
+        begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
+        begin_info.pInheritanceInfo = &hinfo;
+        secondary_buffer.begin(&begin_info);
+        vk::CmdResetQueryPool(secondary_buffer.handle(), query_pool, 0, 1);
+        vk::CmdWriteTimestamp(secondary_buffer.handle(), VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT, query_pool, 0);
+        secondary_buffer.end();
+
+        primary_buffer.begin();
+        vk::CmdExecuteCommands(primary_buffer.handle(), 1, &secondary_buffer.handle());
+        vk::CmdCopyQueryPoolResults(primary_buffer.handle(), query_pool, 0, 1, buffer.handle(), 0, 0, VK_QUERY_RESULT_WAIT_BIT);
+        primary_buffer.end();
+    }
+
+    primary_buffer.QueueCommandBuffer();
+    vk::QueueWaitIdle(queue);
+
+    vk::DestroyQueryPool(m_device->device(), query_pool, nullptr);
+    m_errorMonitor->VerifyNotFound();
+}
+
+// This is a positive test.  No errors should be generated.
+TEST_F(VkPositiveLayerTest, QueryAndCopyMultipleCommandBuffers) {
+    TEST_DESCRIPTION("Issue a query and copy from it on a second command buffer.");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    if ((m_device->queue_props.empty()) || (m_device->queue_props[0].queueCount < 2)) {
+        printf("%s Queue family needs to have multiple queues to run this test.\n", kSkipPrefix);
+        return;
+    }
+
+    m_errorMonitor->ExpectSuccess();
+
+    VkQueryPool query_pool;
+    VkQueryPoolCreateInfo query_pool_create_info{};
+    query_pool_create_info.sType = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO;
+    query_pool_create_info.queryType = VK_QUERY_TYPE_TIMESTAMP;
+    query_pool_create_info.queryCount = 1;
+    vk::CreateQueryPool(m_device->device(), &query_pool_create_info, nullptr, &query_pool);
+
+    VkCommandPool command_pool;
+    VkCommandPoolCreateInfo pool_create_info{};
+    pool_create_info.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
+    pool_create_info.queueFamilyIndex = m_device->graphics_queue_node_index_;
+    pool_create_info.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
+    vk::CreateCommandPool(m_device->device(), &pool_create_info, nullptr, &command_pool);
+
+    VkCommandBuffer command_buffer[2];
+    VkCommandBufferAllocateInfo command_buffer_allocate_info{};
+    command_buffer_allocate_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
+    command_buffer_allocate_info.commandPool = command_pool;
+    command_buffer_allocate_info.commandBufferCount = 2;
+    command_buffer_allocate_info.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
+    vk::AllocateCommandBuffers(m_device->device(), &command_buffer_allocate_info, command_buffer);
+
+    VkQueue queue = VK_NULL_HANDLE;
+    vk::GetDeviceQueue(m_device->device(), m_device->graphics_queue_node_index_, 1, &queue);
+
+    uint32_t qfi = 0;
+    VkBufferCreateInfo buff_create_info = {};
+    buff_create_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
+    buff_create_info.size = 1024;
+    buff_create_info.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT;
+    buff_create_info.queueFamilyIndexCount = 1;
+    buff_create_info.pQueueFamilyIndices = &qfi;
+
+    VkBufferObj buffer;
+    buffer.init(*m_device, buff_create_info);
+
+    {
+        VkCommandBufferBeginInfo begin_info{};
+        begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
+        vk::BeginCommandBuffer(command_buffer[0], &begin_info);
+
+        vk::CmdResetQueryPool(command_buffer[0], query_pool, 0, 1);
+        vk::CmdWriteTimestamp(command_buffer[0], VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT, query_pool, 0);
+
+        vk::EndCommandBuffer(command_buffer[0]);
+
+        vk::BeginCommandBuffer(command_buffer[1], &begin_info);
+
+        vk::CmdCopyQueryPoolResults(command_buffer[1], query_pool, 0, 1, buffer.handle(), 0, 0, VK_QUERY_RESULT_WAIT_BIT);
+
+        vk::EndCommandBuffer(command_buffer[1]);
+    }
+    {
+        VkSubmitInfo submit_info{};
+        submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+        submit_info.commandBufferCount = 2;
+        submit_info.pCommandBuffers = command_buffer;
+        submit_info.signalSemaphoreCount = 0;
+        submit_info.pSignalSemaphores = nullptr;
+        vk::QueueSubmit(queue, 1, &submit_info, VK_NULL_HANDLE);
+    }
+
+    vk::QueueWaitIdle(queue);
+
+    vk::DestroyQueryPool(m_device->device(), query_pool, nullptr);
+    vk::FreeCommandBuffers(m_device->device(), command_pool, 2, command_buffer);
+    vk::DestroyCommandPool(m_device->device(), command_pool, NULL);
+
+    m_errorMonitor->VerifyNotFound();
+}
+
+// This is a positive test.  No errors should be generated.
+TEST_F(VkPositiveLayerTest, TwoFencesThreeFrames) {
+    TEST_DESCRIPTION(
+        "Two command buffers with two separate fences are each run through a Submit & WaitForFences cycle 3 times. This previously "
+        "revealed a bug so running this positive test to prevent a regression.");
+    m_errorMonitor->ExpectSuccess();
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    VkQueue queue = VK_NULL_HANDLE;
+    vk::GetDeviceQueue(m_device->device(), m_device->graphics_queue_node_index_, 0, &queue);
+
+    static const uint32_t NUM_OBJECTS = 2;
+    static const uint32_t NUM_FRAMES = 3;
+    VkCommandBuffer cmd_buffers[NUM_OBJECTS] = {};
+    VkFence fences[NUM_OBJECTS] = {};
+
+    VkCommandPool cmd_pool;
+    VkCommandPoolCreateInfo cmd_pool_ci = {};
+    cmd_pool_ci.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
+    cmd_pool_ci.queueFamilyIndex = m_device->graphics_queue_node_index_;
+    cmd_pool_ci.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
+    VkResult err = vk::CreateCommandPool(m_device->device(), &cmd_pool_ci, nullptr, &cmd_pool);
+    ASSERT_VK_SUCCESS(err);
+
+    VkCommandBufferAllocateInfo cmd_buf_info = {};
+    cmd_buf_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
+    cmd_buf_info.commandPool = cmd_pool;
+    cmd_buf_info.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
+    cmd_buf_info.commandBufferCount = 1;
+
+    VkFenceCreateInfo fence_ci = {};
+    fence_ci.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
+    fence_ci.pNext = nullptr;
+    fence_ci.flags = 0;
+
+    for (uint32_t i = 0; i < NUM_OBJECTS; ++i) {
+        err = vk::AllocateCommandBuffers(m_device->device(), &cmd_buf_info, &cmd_buffers[i]);
+        ASSERT_VK_SUCCESS(err);
+        err = vk::CreateFence(m_device->device(), &fence_ci, nullptr, &fences[i]);
+        ASSERT_VK_SUCCESS(err);
+    }
+
+    for (uint32_t frame = 0; frame < NUM_FRAMES; ++frame) {
+        for (uint32_t obj = 0; obj < NUM_OBJECTS; ++obj) {
+            // Create empty cmd buffer
+            VkCommandBufferBeginInfo cmdBufBeginDesc = {};
+            cmdBufBeginDesc.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
+
+            err = vk::BeginCommandBuffer(cmd_buffers[obj], &cmdBufBeginDesc);
+            ASSERT_VK_SUCCESS(err);
+            err = vk::EndCommandBuffer(cmd_buffers[obj]);
+            ASSERT_VK_SUCCESS(err);
+
+            VkSubmitInfo submit_info = {};
+            submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+            submit_info.commandBufferCount = 1;
+            submit_info.pCommandBuffers = &cmd_buffers[obj];
+            // Submit cmd buffer and wait for fence
+            err = vk::QueueSubmit(queue, 1, &submit_info, fences[obj]);
+            ASSERT_VK_SUCCESS(err);
+            err = vk::WaitForFences(m_device->device(), 1, &fences[obj], VK_TRUE, UINT64_MAX);
+            ASSERT_VK_SUCCESS(err);
+            err = vk::ResetFences(m_device->device(), 1, &fences[obj]);
+            ASSERT_VK_SUCCESS(err);
+        }
+    }
+    m_errorMonitor->VerifyNotFound();
+    vk::DestroyCommandPool(m_device->device(), cmd_pool, NULL);
+    for (uint32_t i = 0; i < NUM_OBJECTS; ++i) {
+        vk::DestroyFence(m_device->device(), fences[i], nullptr);
+    }
+}
+// This is a positive test.  No errors should be generated.
+TEST_F(VkPositiveLayerTest, TwoQueueSubmitsSeparateQueuesWithSemaphoreAndOneFenceQWI) {
+    TEST_DESCRIPTION(
+        "Two command buffers, each in a separate QueueSubmit call submitted on separate queues followed by a QueueWaitIdle.");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    if ((m_device->queue_props.empty()) || (m_device->queue_props[0].queueCount < 2)) {
+        printf("%s Queue family needs to have multiple queues to run this test.\n", kSkipPrefix);
+        return;
+    }
+
+    m_errorMonitor->ExpectSuccess();
+
+    VkSemaphore semaphore;
+    VkSemaphoreCreateInfo semaphore_create_info{};
+    semaphore_create_info.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO;
+    vk::CreateSemaphore(m_device->device(), &semaphore_create_info, nullptr, &semaphore);
+
+    VkCommandPool command_pool;
+    VkCommandPoolCreateInfo pool_create_info{};
+    pool_create_info.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
+    pool_create_info.queueFamilyIndex = m_device->graphics_queue_node_index_;
+    pool_create_info.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
+    vk::CreateCommandPool(m_device->device(), &pool_create_info, nullptr, &command_pool);
+
+    VkCommandBuffer command_buffer[2];
+    VkCommandBufferAllocateInfo command_buffer_allocate_info{};
+    command_buffer_allocate_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
+    command_buffer_allocate_info.commandPool = command_pool;
+    command_buffer_allocate_info.commandBufferCount = 2;
+    command_buffer_allocate_info.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
+    vk::AllocateCommandBuffers(m_device->device(), &command_buffer_allocate_info, command_buffer);
+
+    VkQueue queue = VK_NULL_HANDLE;
+    vk::GetDeviceQueue(m_device->device(), m_device->graphics_queue_node_index_, 1, &queue);
+
+    {
+        VkCommandBufferBeginInfo begin_info{};
+        begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
+        vk::BeginCommandBuffer(command_buffer[0], &begin_info);
+
+        vk::CmdPipelineBarrier(command_buffer[0], VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, 0,
+                               nullptr, 0, nullptr, 0, nullptr);
+
+        VkViewport viewport{};
+        viewport.maxDepth = 1.0f;
+        viewport.minDepth = 0.0f;
+        viewport.width = 512;
+        viewport.height = 512;
+        viewport.x = 0;
+        viewport.y = 0;
+        vk::CmdSetViewport(command_buffer[0], 0, 1, &viewport);
+        vk::EndCommandBuffer(command_buffer[0]);
+    }
+    {
+        VkCommandBufferBeginInfo begin_info{};
+        begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
+        vk::BeginCommandBuffer(command_buffer[1], &begin_info);
+
+        VkViewport viewport{};
+        viewport.maxDepth = 1.0f;
+        viewport.minDepth = 0.0f;
+        viewport.width = 512;
+        viewport.height = 512;
+        viewport.x = 0;
+        viewport.y = 0;
+        vk::CmdSetViewport(command_buffer[1], 0, 1, &viewport);
+        vk::EndCommandBuffer(command_buffer[1]);
+    }
+    {
+        VkSubmitInfo submit_info{};
+        submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+        submit_info.commandBufferCount = 1;
+        submit_info.pCommandBuffers = &command_buffer[0];
+        submit_info.signalSemaphoreCount = 1;
+        submit_info.pSignalSemaphores = &semaphore;
+        vk::QueueSubmit(queue, 1, &submit_info, VK_NULL_HANDLE);
+    }
+    {
+        VkPipelineStageFlags flags[]{VK_PIPELINE_STAGE_ALL_COMMANDS_BIT};
+        VkSubmitInfo submit_info{};
+        submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+        submit_info.commandBufferCount = 1;
+        submit_info.pCommandBuffers = &command_buffer[1];
+        submit_info.waitSemaphoreCount = 1;
+        submit_info.pWaitSemaphores = &semaphore;
+        submit_info.pWaitDstStageMask = flags;
+        vk::QueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+    }
+
+    vk::QueueWaitIdle(m_device->m_queue);
+
+    vk::DestroySemaphore(m_device->device(), semaphore, nullptr);
+    vk::FreeCommandBuffers(m_device->device(), command_pool, 2, &command_buffer[0]);
+    vk::DestroyCommandPool(m_device->device(), command_pool, NULL);
+
+    m_errorMonitor->VerifyNotFound();
+}
+
+// This is a positive test.  No errors should be generated.
+TEST_F(VkPositiveLayerTest, TwoQueueSubmitsSeparateQueuesWithSemaphoreAndOneFenceQWIFence) {
+    TEST_DESCRIPTION(
+        "Two command buffers, each in a separate QueueSubmit call submitted on separate queues, the second having a fence followed "
+        "by a QueueWaitIdle.");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    if ((m_device->queue_props.empty()) || (m_device->queue_props[0].queueCount < 2)) {
+        printf("%s Queue family needs to have multiple queues to run this test.\n", kSkipPrefix);
+        return;
+    }
+
+    m_errorMonitor->ExpectSuccess();
+
+    VkFence fence;
+    VkFenceCreateInfo fence_create_info{};
+    fence_create_info.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
+    vk::CreateFence(m_device->device(), &fence_create_info, nullptr, &fence);
+
+    VkSemaphore semaphore;
+    VkSemaphoreCreateInfo semaphore_create_info{};
+    semaphore_create_info.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO;
+    vk::CreateSemaphore(m_device->device(), &semaphore_create_info, nullptr, &semaphore);
+
+    VkCommandPool command_pool;
+    VkCommandPoolCreateInfo pool_create_info{};
+    pool_create_info.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
+    pool_create_info.queueFamilyIndex = m_device->graphics_queue_node_index_;
+    pool_create_info.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
+    vk::CreateCommandPool(m_device->device(), &pool_create_info, nullptr, &command_pool);
+
+    VkCommandBuffer command_buffer[2];
+    VkCommandBufferAllocateInfo command_buffer_allocate_info{};
+    command_buffer_allocate_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
+    command_buffer_allocate_info.commandPool = command_pool;
+    command_buffer_allocate_info.commandBufferCount = 2;
+    command_buffer_allocate_info.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
+    vk::AllocateCommandBuffers(m_device->device(), &command_buffer_allocate_info, command_buffer);
+
+    VkQueue queue = VK_NULL_HANDLE;
+    vk::GetDeviceQueue(m_device->device(), m_device->graphics_queue_node_index_, 1, &queue);
+
+    {
+        VkCommandBufferBeginInfo begin_info{};
+        begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
+        vk::BeginCommandBuffer(command_buffer[0], &begin_info);
+
+        vk::CmdPipelineBarrier(command_buffer[0], VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, 0,
+                               nullptr, 0, nullptr, 0, nullptr);
+
+        VkViewport viewport{};
+        viewport.maxDepth = 1.0f;
+        viewport.minDepth = 0.0f;
+        viewport.width = 512;
+        viewport.height = 512;
+        viewport.x = 0;
+        viewport.y = 0;
+        vk::CmdSetViewport(command_buffer[0], 0, 1, &viewport);
+        vk::EndCommandBuffer(command_buffer[0]);
+    }
+    {
+        VkCommandBufferBeginInfo begin_info{};
+        begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
+        vk::BeginCommandBuffer(command_buffer[1], &begin_info);
+
+        VkViewport viewport{};
+        viewport.maxDepth = 1.0f;
+        viewport.minDepth = 0.0f;
+        viewport.width = 512;
+        viewport.height = 512;
+        viewport.x = 0;
+        viewport.y = 0;
+        vk::CmdSetViewport(command_buffer[1], 0, 1, &viewport);
+        vk::EndCommandBuffer(command_buffer[1]);
+    }
+    {
+        VkSubmitInfo submit_info{};
+        submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+        submit_info.commandBufferCount = 1;
+        submit_info.pCommandBuffers = &command_buffer[0];
+        submit_info.signalSemaphoreCount = 1;
+        submit_info.pSignalSemaphores = &semaphore;
+        vk::QueueSubmit(queue, 1, &submit_info, VK_NULL_HANDLE);
+    }
+    {
+        VkPipelineStageFlags flags[]{VK_PIPELINE_STAGE_ALL_COMMANDS_BIT};
+        VkSubmitInfo submit_info{};
+        submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+        submit_info.commandBufferCount = 1;
+        submit_info.pCommandBuffers = &command_buffer[1];
+        submit_info.waitSemaphoreCount = 1;
+        submit_info.pWaitSemaphores = &semaphore;
+        submit_info.pWaitDstStageMask = flags;
+        vk::QueueSubmit(m_device->m_queue, 1, &submit_info, fence);
+    }
+
+    vk::QueueWaitIdle(m_device->m_queue);
+
+    vk::DestroyFence(m_device->device(), fence, nullptr);
+    vk::DestroySemaphore(m_device->device(), semaphore, nullptr);
+    vk::FreeCommandBuffers(m_device->device(), command_pool, 2, &command_buffer[0]);
+    vk::DestroyCommandPool(m_device->device(), command_pool, NULL);
+
+    m_errorMonitor->VerifyNotFound();
+}
+
+// This is a positive test.  No errors should be generated.
+TEST_F(VkPositiveLayerTest, TwoQueueSubmitsSeparateQueuesWithSemaphoreAndOneFenceTwoWFF) {
+    TEST_DESCRIPTION(
+        "Two command buffers, each in a separate QueueSubmit call submitted on separate queues, the second having a fence followed "
+        "by two consecutive WaitForFences calls on the same fence.");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    if ((m_device->queue_props.empty()) || (m_device->queue_props[0].queueCount < 2)) {
+        printf("%s Queue family needs to have multiple queues to run this test.\n", kSkipPrefix);
+        return;
+    }
+
+    m_errorMonitor->ExpectSuccess();
+
+    VkFence fence;
+    VkFenceCreateInfo fence_create_info{};
+    fence_create_info.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
+    vk::CreateFence(m_device->device(), &fence_create_info, nullptr, &fence);
+
+    VkSemaphore semaphore;
+    VkSemaphoreCreateInfo semaphore_create_info{};
+    semaphore_create_info.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO;
+    vk::CreateSemaphore(m_device->device(), &semaphore_create_info, nullptr, &semaphore);
+
+    VkCommandPool command_pool;
+    VkCommandPoolCreateInfo pool_create_info{};
+    pool_create_info.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
+    pool_create_info.queueFamilyIndex = m_device->graphics_queue_node_index_;
+    pool_create_info.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
+    vk::CreateCommandPool(m_device->device(), &pool_create_info, nullptr, &command_pool);
+
+    VkCommandBuffer command_buffer[2];
+    VkCommandBufferAllocateInfo command_buffer_allocate_info{};
+    command_buffer_allocate_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
+    command_buffer_allocate_info.commandPool = command_pool;
+    command_buffer_allocate_info.commandBufferCount = 2;
+    command_buffer_allocate_info.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
+    vk::AllocateCommandBuffers(m_device->device(), &command_buffer_allocate_info, command_buffer);
+
+    VkQueue queue = VK_NULL_HANDLE;
+    vk::GetDeviceQueue(m_device->device(), m_device->graphics_queue_node_index_, 1, &queue);
+
+    {
+        VkCommandBufferBeginInfo begin_info{};
+        begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
+        vk::BeginCommandBuffer(command_buffer[0], &begin_info);
+
+        vk::CmdPipelineBarrier(command_buffer[0], VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, 0,
+                               nullptr, 0, nullptr, 0, nullptr);
+
+        VkViewport viewport{};
+        viewport.maxDepth = 1.0f;
+        viewport.minDepth = 0.0f;
+        viewport.width = 512;
+        viewport.height = 512;
+        viewport.x = 0;
+        viewport.y = 0;
+        vk::CmdSetViewport(command_buffer[0], 0, 1, &viewport);
+        vk::EndCommandBuffer(command_buffer[0]);
+    }
+    {
+        VkCommandBufferBeginInfo begin_info{};
+        begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
+        vk::BeginCommandBuffer(command_buffer[1], &begin_info);
+
+        VkViewport viewport{};
+        viewport.maxDepth = 1.0f;
+        viewport.minDepth = 0.0f;
+        viewport.width = 512;
+        viewport.height = 512;
+        viewport.x = 0;
+        viewport.y = 0;
+        vk::CmdSetViewport(command_buffer[1], 0, 1, &viewport);
+        vk::EndCommandBuffer(command_buffer[1]);
+    }
+    {
+        VkSubmitInfo submit_info{};
+        submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+        submit_info.commandBufferCount = 1;
+        submit_info.pCommandBuffers = &command_buffer[0];
+        submit_info.signalSemaphoreCount = 1;
+        submit_info.pSignalSemaphores = &semaphore;
+        vk::QueueSubmit(queue, 1, &submit_info, VK_NULL_HANDLE);
+    }
+    {
+        VkPipelineStageFlags flags[]{VK_PIPELINE_STAGE_ALL_COMMANDS_BIT};
+        VkSubmitInfo submit_info{};
+        submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+        submit_info.commandBufferCount = 1;
+        submit_info.pCommandBuffers = &command_buffer[1];
+        submit_info.waitSemaphoreCount = 1;
+        submit_info.pWaitSemaphores = &semaphore;
+        submit_info.pWaitDstStageMask = flags;
+        vk::QueueSubmit(m_device->m_queue, 1, &submit_info, fence);
+    }
+
+    vk::WaitForFences(m_device->device(), 1, &fence, VK_TRUE, UINT64_MAX);
+    vk::WaitForFences(m_device->device(), 1, &fence, VK_TRUE, UINT64_MAX);
+
+    vk::DestroyFence(m_device->device(), fence, nullptr);
+    vk::DestroySemaphore(m_device->device(), semaphore, nullptr);
+    vk::FreeCommandBuffers(m_device->device(), command_pool, 2, &command_buffer[0]);
+    vk::DestroyCommandPool(m_device->device(), command_pool, NULL);
+
+    m_errorMonitor->VerifyNotFound();
+}
+
+TEST_F(VkPositiveLayerTest, TwoQueuesEnsureCorrectRetirementWithWorkStolen) {
+    ASSERT_NO_FATAL_FAILURE(Init());
+    if ((m_device->queue_props.empty()) || (m_device->queue_props[0].queueCount < 2)) {
+        printf("%s Test requires two queues, skipping\n", kSkipPrefix);
+        return;
+    }
+
+    VkResult err;
+
+    m_errorMonitor->ExpectSuccess();
+
+    VkQueue q0 = m_device->m_queue;
+    VkQueue q1 = nullptr;
+    vk::GetDeviceQueue(m_device->device(), m_device->graphics_queue_node_index_, 1, &q1);
+    ASSERT_NE(q1, nullptr);
+
+    // An (empty) command buffer. We must have work in the first submission --
+    // the layer treats unfenced work differently from fenced work.
+    VkCommandPoolCreateInfo cpci = {VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO, nullptr, 0, 0};
+    VkCommandPool pool;
+    err = vk::CreateCommandPool(m_device->device(), &cpci, nullptr, &pool);
+    ASSERT_VK_SUCCESS(err);
+    VkCommandBufferAllocateInfo cbai = {VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO, nullptr, pool,
+                                        VK_COMMAND_BUFFER_LEVEL_PRIMARY, 1};
+    VkCommandBuffer cb;
+    err = vk::AllocateCommandBuffers(m_device->device(), &cbai, &cb);
+    ASSERT_VK_SUCCESS(err);
+    VkCommandBufferBeginInfo cbbi = {VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO, nullptr, 0, nullptr};
+    err = vk::BeginCommandBuffer(cb, &cbbi);
+    ASSERT_VK_SUCCESS(err);
+    err = vk::EndCommandBuffer(cb);
+    ASSERT_VK_SUCCESS(err);
+
+    // A semaphore
+    VkSemaphoreCreateInfo sci = {VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO, nullptr, 0};
+    VkSemaphore s;
+    err = vk::CreateSemaphore(m_device->device(), &sci, nullptr, &s);
+    ASSERT_VK_SUCCESS(err);
+
+    // First submission, to q0
+    VkSubmitInfo s0 = {VK_STRUCTURE_TYPE_SUBMIT_INFO, nullptr, 0, nullptr, nullptr, 1, &cb, 1, &s};
+
+    err = vk::QueueSubmit(q0, 1, &s0, VK_NULL_HANDLE);
+    ASSERT_VK_SUCCESS(err);
+
+    // Second submission, to q1, waiting on s
+    VkFlags waitmask = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;  // doesn't really matter what this value is.
+    VkSubmitInfo s1 = {VK_STRUCTURE_TYPE_SUBMIT_INFO, nullptr, 1, &s, &waitmask, 0, nullptr, 0, nullptr};
+
+    err = vk::QueueSubmit(q1, 1, &s1, VK_NULL_HANDLE);
+    ASSERT_VK_SUCCESS(err);
+
+    // Wait for q0 idle
+    err = vk::QueueWaitIdle(q0);
+    ASSERT_VK_SUCCESS(err);
+
+    // Command buffer should have been completed (it was on q0); reset the pool.
+    vk::FreeCommandBuffers(m_device->device(), pool, 1, &cb);
+
+    m_errorMonitor->VerifyNotFound();
+
+    // Force device completely idle and clean up resources
+    vk::DeviceWaitIdle(m_device->device());
+    vk::DestroyCommandPool(m_device->device(), pool, nullptr);
+    vk::DestroySemaphore(m_device->device(), s, nullptr);
+}
+
+// This is a positive test.  No errors should be generated.
+TEST_F(VkPositiveLayerTest, TwoQueueSubmitsSeparateQueuesWithSemaphoreAndOneFence) {
+    TEST_DESCRIPTION(
+        "Two command buffers, each in a separate QueueSubmit call submitted on separate queues, the second having a fence, "
+        "followed by a WaitForFences call.");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    if ((m_device->queue_props.empty()) || (m_device->queue_props[0].queueCount < 2)) {
+        printf("%s Queue family needs to have multiple queues to run this test.\n", kSkipPrefix);
+        return;
+    }
+
+    m_errorMonitor->ExpectSuccess();
+
+    VkFence fence;
+    VkFenceCreateInfo fence_create_info{};
+    fence_create_info.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
+    vk::CreateFence(m_device->device(), &fence_create_info, nullptr, &fence);
+
+    VkSemaphore semaphore;
+    VkSemaphoreCreateInfo semaphore_create_info{};
+    semaphore_create_info.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO;
+    vk::CreateSemaphore(m_device->device(), &semaphore_create_info, nullptr, &semaphore);
+
+    VkCommandPool command_pool;
+    VkCommandPoolCreateInfo pool_create_info{};
+    pool_create_info.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
+    pool_create_info.queueFamilyIndex = m_device->graphics_queue_node_index_;
+    pool_create_info.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
+    vk::CreateCommandPool(m_device->device(), &pool_create_info, nullptr, &command_pool);
+
+    VkCommandBuffer command_buffer[2];
+    VkCommandBufferAllocateInfo command_buffer_allocate_info{};
+    command_buffer_allocate_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
+    command_buffer_allocate_info.commandPool = command_pool;
+    command_buffer_allocate_info.commandBufferCount = 2;
+    command_buffer_allocate_info.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
+    vk::AllocateCommandBuffers(m_device->device(), &command_buffer_allocate_info, command_buffer);
+
+    VkQueue queue = VK_NULL_HANDLE;
+    vk::GetDeviceQueue(m_device->device(), m_device->graphics_queue_node_index_, 1, &queue);
+
+    {
+        VkCommandBufferBeginInfo begin_info{};
+        begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
+        vk::BeginCommandBuffer(command_buffer[0], &begin_info);
+
+        vk::CmdPipelineBarrier(command_buffer[0], VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, 0,
+                               nullptr, 0, nullptr, 0, nullptr);
+
+        VkViewport viewport{};
+        viewport.maxDepth = 1.0f;
+        viewport.minDepth = 0.0f;
+        viewport.width = 512;
+        viewport.height = 512;
+        viewport.x = 0;
+        viewport.y = 0;
+        vk::CmdSetViewport(command_buffer[0], 0, 1, &viewport);
+        vk::EndCommandBuffer(command_buffer[0]);
+    }
+    {
+        VkCommandBufferBeginInfo begin_info{};
+        begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
+        vk::BeginCommandBuffer(command_buffer[1], &begin_info);
+
+        VkViewport viewport{};
+        viewport.maxDepth = 1.0f;
+        viewport.minDepth = 0.0f;
+        viewport.width = 512;
+        viewport.height = 512;
+        viewport.x = 0;
+        viewport.y = 0;
+        vk::CmdSetViewport(command_buffer[1], 0, 1, &viewport);
+        vk::EndCommandBuffer(command_buffer[1]);
+    }
+    {
+        VkSubmitInfo submit_info{};
+        submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+        submit_info.commandBufferCount = 1;
+        submit_info.pCommandBuffers = &command_buffer[0];
+        submit_info.signalSemaphoreCount = 1;
+        submit_info.pSignalSemaphores = &semaphore;
+        vk::QueueSubmit(queue, 1, &submit_info, VK_NULL_HANDLE);
+    }
+    {
+        VkPipelineStageFlags flags[]{VK_PIPELINE_STAGE_ALL_COMMANDS_BIT};
+        VkSubmitInfo submit_info{};
+        submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+        submit_info.commandBufferCount = 1;
+        submit_info.pCommandBuffers = &command_buffer[1];
+        submit_info.waitSemaphoreCount = 1;
+        submit_info.pWaitSemaphores = &semaphore;
+        submit_info.pWaitDstStageMask = flags;
+        vk::QueueSubmit(m_device->m_queue, 1, &submit_info, fence);
+    }
+
+    vk::WaitForFences(m_device->device(), 1, &fence, VK_TRUE, UINT64_MAX);
+
+    vk::DestroyFence(m_device->device(), fence, nullptr);
+    vk::DestroySemaphore(m_device->device(), semaphore, nullptr);
+    vk::FreeCommandBuffers(m_device->device(), command_pool, 2, &command_buffer[0]);
+    vk::DestroyCommandPool(m_device->device(), command_pool, NULL);
+
+    m_errorMonitor->VerifyNotFound();
+}
+
+// This is a positive test.  No errors should be generated.
+TEST_F(VkPositiveLayerTest, TwoQueueSubmitsOneQueueWithSemaphoreAndOneFence) {
+    TEST_DESCRIPTION(
+        "Two command buffers, each in a separate QueueSubmit call on the same queue, sharing a signal/wait semaphore, the second "
+        "having a fence, followed by a WaitForFences call.");
+
+    m_errorMonitor->ExpectSuccess();
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    VkFence fence;
+    VkFenceCreateInfo fence_create_info{};
+    fence_create_info.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
+    vk::CreateFence(m_device->device(), &fence_create_info, nullptr, &fence);
+
+    VkSemaphore semaphore;
+    VkSemaphoreCreateInfo semaphore_create_info{};
+    semaphore_create_info.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO;
+    vk::CreateSemaphore(m_device->device(), &semaphore_create_info, nullptr, &semaphore);
+
+    VkCommandPool command_pool;
+    VkCommandPoolCreateInfo pool_create_info{};
+    pool_create_info.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
+    pool_create_info.queueFamilyIndex = m_device->graphics_queue_node_index_;
+    pool_create_info.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
+    vk::CreateCommandPool(m_device->device(), &pool_create_info, nullptr, &command_pool);
+
+    VkCommandBuffer command_buffer[2];
+    VkCommandBufferAllocateInfo command_buffer_allocate_info{};
+    command_buffer_allocate_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
+    command_buffer_allocate_info.commandPool = command_pool;
+    command_buffer_allocate_info.commandBufferCount = 2;
+    command_buffer_allocate_info.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
+    vk::AllocateCommandBuffers(m_device->device(), &command_buffer_allocate_info, command_buffer);
+
+    {
+        VkCommandBufferBeginInfo begin_info{};
+        begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
+        vk::BeginCommandBuffer(command_buffer[0], &begin_info);
+
+        vk::CmdPipelineBarrier(command_buffer[0], VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, 0,
+                               nullptr, 0, nullptr, 0, nullptr);
+
+        VkViewport viewport{};
+        viewport.maxDepth = 1.0f;
+        viewport.minDepth = 0.0f;
+        viewport.width = 512;
+        viewport.height = 512;
+        viewport.x = 0;
+        viewport.y = 0;
+        vk::CmdSetViewport(command_buffer[0], 0, 1, &viewport);
+        vk::EndCommandBuffer(command_buffer[0]);
+    }
+    {
+        VkCommandBufferBeginInfo begin_info{};
+        begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
+        vk::BeginCommandBuffer(command_buffer[1], &begin_info);
+
+        VkViewport viewport{};
+        viewport.maxDepth = 1.0f;
+        viewport.minDepth = 0.0f;
+        viewport.width = 512;
+        viewport.height = 512;
+        viewport.x = 0;
+        viewport.y = 0;
+        vk::CmdSetViewport(command_buffer[1], 0, 1, &viewport);
+        vk::EndCommandBuffer(command_buffer[1]);
+    }
+    {
+        VkSubmitInfo submit_info{};
+        submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+        submit_info.commandBufferCount = 1;
+        submit_info.pCommandBuffers = &command_buffer[0];
+        submit_info.signalSemaphoreCount = 1;
+        submit_info.pSignalSemaphores = &semaphore;
+        vk::QueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+    }
+    {
+        VkPipelineStageFlags flags[]{VK_PIPELINE_STAGE_ALL_COMMANDS_BIT};
+        VkSubmitInfo submit_info{};
+        submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+        submit_info.commandBufferCount = 1;
+        submit_info.pCommandBuffers = &command_buffer[1];
+        submit_info.waitSemaphoreCount = 1;
+        submit_info.pWaitSemaphores = &semaphore;
+        submit_info.pWaitDstStageMask = flags;
+        vk::QueueSubmit(m_device->m_queue, 1, &submit_info, fence);
+    }
+
+    vk::WaitForFences(m_device->device(), 1, &fence, VK_TRUE, UINT64_MAX);
+
+    vk::DestroyFence(m_device->device(), fence, nullptr);
+    vk::DestroySemaphore(m_device->device(), semaphore, nullptr);
+    vk::FreeCommandBuffers(m_device->device(), command_pool, 2, &command_buffer[0]);
+    vk::DestroyCommandPool(m_device->device(), command_pool, NULL);
+
+    m_errorMonitor->VerifyNotFound();
+}
+
+// This is a positive test.  No errors should be generated.
+TEST_F(VkPositiveLayerTest, TwoQueueSubmitsOneQueueNullQueueSubmitWithFence) {
+    TEST_DESCRIPTION(
+        "Two command buffers, each in a separate QueueSubmit call on the same queue, no fences, followed by a third QueueSubmit "
+        "with NO SubmitInfos but with a fence, followed by a WaitForFences call.");
+
+    m_errorMonitor->ExpectSuccess();
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    VkFence fence;
+    VkFenceCreateInfo fence_create_info{};
+    fence_create_info.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
+    vk::CreateFence(m_device->device(), &fence_create_info, nullptr, &fence);
+
+    VkCommandPool command_pool;
+    VkCommandPoolCreateInfo pool_create_info{};
+    pool_create_info.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
+    pool_create_info.queueFamilyIndex = m_device->graphics_queue_node_index_;
+    pool_create_info.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
+    vk::CreateCommandPool(m_device->device(), &pool_create_info, nullptr, &command_pool);
+
+    VkCommandBuffer command_buffer[2];
+    VkCommandBufferAllocateInfo command_buffer_allocate_info{};
+    command_buffer_allocate_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
+    command_buffer_allocate_info.commandPool = command_pool;
+    command_buffer_allocate_info.commandBufferCount = 2;
+    command_buffer_allocate_info.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
+    vk::AllocateCommandBuffers(m_device->device(), &command_buffer_allocate_info, command_buffer);
+
+    {
+        VkCommandBufferBeginInfo begin_info{};
+        begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
+        vk::BeginCommandBuffer(command_buffer[0], &begin_info);
+
+        vk::CmdPipelineBarrier(command_buffer[0], VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, 0,
+                               nullptr, 0, nullptr, 0, nullptr);
+
+        VkViewport viewport{};
+        viewport.maxDepth = 1.0f;
+        viewport.minDepth = 0.0f;
+        viewport.width = 512;
+        viewport.height = 512;
+        viewport.x = 0;
+        viewport.y = 0;
+        vk::CmdSetViewport(command_buffer[0], 0, 1, &viewport);
+        vk::EndCommandBuffer(command_buffer[0]);
+    }
+    {
+        VkCommandBufferBeginInfo begin_info{};
+        begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
+        vk::BeginCommandBuffer(command_buffer[1], &begin_info);
+
+        VkViewport viewport{};
+        viewport.maxDepth = 1.0f;
+        viewport.minDepth = 0.0f;
+        viewport.width = 512;
+        viewport.height = 512;
+        viewport.x = 0;
+        viewport.y = 0;
+        vk::CmdSetViewport(command_buffer[1], 0, 1, &viewport);
+        vk::EndCommandBuffer(command_buffer[1]);
+    }
+    {
+        VkSubmitInfo submit_info{};
+        submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+        submit_info.commandBufferCount = 1;
+        submit_info.pCommandBuffers = &command_buffer[0];
+        submit_info.signalSemaphoreCount = 0;
+        submit_info.pSignalSemaphores = VK_NULL_HANDLE;
+        vk::QueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+    }
+    {
+        VkPipelineStageFlags flags[]{VK_PIPELINE_STAGE_ALL_COMMANDS_BIT};
+        VkSubmitInfo submit_info{};
+        submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+        submit_info.commandBufferCount = 1;
+        submit_info.pCommandBuffers = &command_buffer[1];
+        submit_info.waitSemaphoreCount = 0;
+        submit_info.pWaitSemaphores = VK_NULL_HANDLE;
+        submit_info.pWaitDstStageMask = flags;
+        vk::QueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+    }
+
+    vk::QueueSubmit(m_device->m_queue, 0, NULL, fence);
+
+    VkResult err = vk::WaitForFences(m_device->device(), 1, &fence, VK_TRUE, UINT64_MAX);
+    ASSERT_VK_SUCCESS(err);
+
+    vk::DestroyFence(m_device->device(), fence, nullptr);
+    vk::FreeCommandBuffers(m_device->device(), command_pool, 2, &command_buffer[0]);
+    vk::DestroyCommandPool(m_device->device(), command_pool, NULL);
+
+    m_errorMonitor->VerifyNotFound();
+}
+
+// This is a positive test.  No errors should be generated.
+TEST_F(VkPositiveLayerTest, TwoQueueSubmitsOneQueueOneFence) {
+    TEST_DESCRIPTION(
+        "Two command buffers, each in a separate QueueSubmit call on the same queue, the second having a fence, followed by a "
+        "WaitForFences call.");
+
+    m_errorMonitor->ExpectSuccess();
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    VkFence fence;
+    VkFenceCreateInfo fence_create_info{};
+    fence_create_info.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
+    vk::CreateFence(m_device->device(), &fence_create_info, nullptr, &fence);
+
+    VkCommandPool command_pool;
+    VkCommandPoolCreateInfo pool_create_info{};
+    pool_create_info.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
+    pool_create_info.queueFamilyIndex = m_device->graphics_queue_node_index_;
+    pool_create_info.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
+    vk::CreateCommandPool(m_device->device(), &pool_create_info, nullptr, &command_pool);
+
+    VkCommandBuffer command_buffer[2];
+    VkCommandBufferAllocateInfo command_buffer_allocate_info{};
+    command_buffer_allocate_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
+    command_buffer_allocate_info.commandPool = command_pool;
+    command_buffer_allocate_info.commandBufferCount = 2;
+    command_buffer_allocate_info.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
+    vk::AllocateCommandBuffers(m_device->device(), &command_buffer_allocate_info, command_buffer);
+
+    {
+        VkCommandBufferBeginInfo begin_info{};
+        begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
+        vk::BeginCommandBuffer(command_buffer[0], &begin_info);
+
+        vk::CmdPipelineBarrier(command_buffer[0], VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, 0,
+                               nullptr, 0, nullptr, 0, nullptr);
+
+        VkViewport viewport{};
+        viewport.maxDepth = 1.0f;
+        viewport.minDepth = 0.0f;
+        viewport.width = 512;
+        viewport.height = 512;
+        viewport.x = 0;
+        viewport.y = 0;
+        vk::CmdSetViewport(command_buffer[0], 0, 1, &viewport);
+        vk::EndCommandBuffer(command_buffer[0]);
+    }
+    {
+        VkCommandBufferBeginInfo begin_info{};
+        begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
+        vk::BeginCommandBuffer(command_buffer[1], &begin_info);
+
+        VkViewport viewport{};
+        viewport.maxDepth = 1.0f;
+        viewport.minDepth = 0.0f;
+        viewport.width = 512;
+        viewport.height = 512;
+        viewport.x = 0;
+        viewport.y = 0;
+        vk::CmdSetViewport(command_buffer[1], 0, 1, &viewport);
+        vk::EndCommandBuffer(command_buffer[1]);
+    }
+    {
+        VkSubmitInfo submit_info{};
+        submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+        submit_info.commandBufferCount = 1;
+        submit_info.pCommandBuffers = &command_buffer[0];
+        submit_info.signalSemaphoreCount = 0;
+        submit_info.pSignalSemaphores = VK_NULL_HANDLE;
+        vk::QueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+    }
+    {
+        VkPipelineStageFlags flags[]{VK_PIPELINE_STAGE_ALL_COMMANDS_BIT};
+        VkSubmitInfo submit_info{};
+        submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+        submit_info.commandBufferCount = 1;
+        submit_info.pCommandBuffers = &command_buffer[1];
+        submit_info.waitSemaphoreCount = 0;
+        submit_info.pWaitSemaphores = VK_NULL_HANDLE;
+        submit_info.pWaitDstStageMask = flags;
+        vk::QueueSubmit(m_device->m_queue, 1, &submit_info, fence);
+    }
+
+    vk::WaitForFences(m_device->device(), 1, &fence, VK_TRUE, UINT64_MAX);
+
+    vk::DestroyFence(m_device->device(), fence, nullptr);
+    vk::FreeCommandBuffers(m_device->device(), command_pool, 2, &command_buffer[0]);
+    vk::DestroyCommandPool(m_device->device(), command_pool, NULL);
+
+    m_errorMonitor->VerifyNotFound();
+}
+
+// This is a positive test.  No errors should be generated.
+TEST_F(VkPositiveLayerTest, TwoSubmitInfosWithSemaphoreOneQueueSubmitsOneFence) {
+    TEST_DESCRIPTION(
+        "Two command buffers each in a separate SubmitInfo sent in a single QueueSubmit call followed by a WaitForFences call.");
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    m_errorMonitor->ExpectSuccess();
+
+    VkFence fence;
+    VkFenceCreateInfo fence_create_info{};
+    fence_create_info.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
+    vk::CreateFence(m_device->device(), &fence_create_info, nullptr, &fence);
+
+    VkSemaphore semaphore;
+    VkSemaphoreCreateInfo semaphore_create_info{};
+    semaphore_create_info.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO;
+    vk::CreateSemaphore(m_device->device(), &semaphore_create_info, nullptr, &semaphore);
+
+    VkCommandPool command_pool;
+    VkCommandPoolCreateInfo pool_create_info{};
+    pool_create_info.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
+    pool_create_info.queueFamilyIndex = m_device->graphics_queue_node_index_;
+    pool_create_info.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
+    vk::CreateCommandPool(m_device->device(), &pool_create_info, nullptr, &command_pool);
+
+    VkCommandBuffer command_buffer[2];
+    VkCommandBufferAllocateInfo command_buffer_allocate_info{};
+    command_buffer_allocate_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
+    command_buffer_allocate_info.commandPool = command_pool;
+    command_buffer_allocate_info.commandBufferCount = 2;
+    command_buffer_allocate_info.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
+    vk::AllocateCommandBuffers(m_device->device(), &command_buffer_allocate_info, command_buffer);
+
+    {
+        VkCommandBufferBeginInfo begin_info{};
+        begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
+        vk::BeginCommandBuffer(command_buffer[0], &begin_info);
+
+        vk::CmdPipelineBarrier(command_buffer[0], VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, 0,
+                               nullptr, 0, nullptr, 0, nullptr);
+
+        VkViewport viewport{};
+        viewport.maxDepth = 1.0f;
+        viewport.minDepth = 0.0f;
+        viewport.width = 512;
+        viewport.height = 512;
+        viewport.x = 0;
+        viewport.y = 0;
+        vk::CmdSetViewport(command_buffer[0], 0, 1, &viewport);
+        vk::EndCommandBuffer(command_buffer[0]);
+    }
+    {
+        VkCommandBufferBeginInfo begin_info{};
+        begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
+        vk::BeginCommandBuffer(command_buffer[1], &begin_info);
+
+        VkViewport viewport{};
+        viewport.maxDepth = 1.0f;
+        viewport.minDepth = 0.0f;
+        viewport.width = 512;
+        viewport.height = 512;
+        viewport.x = 0;
+        viewport.y = 0;
+        vk::CmdSetViewport(command_buffer[1], 0, 1, &viewport);
+        vk::EndCommandBuffer(command_buffer[1]);
+    }
+    {
+        VkSubmitInfo submit_info[2];
+        VkPipelineStageFlags flags[]{VK_PIPELINE_STAGE_ALL_COMMANDS_BIT};
+
+        submit_info[0].sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+        submit_info[0].pNext = NULL;
+        submit_info[0].commandBufferCount = 1;
+        submit_info[0].pCommandBuffers = &command_buffer[0];
+        submit_info[0].signalSemaphoreCount = 1;
+        submit_info[0].pSignalSemaphores = &semaphore;
+        submit_info[0].waitSemaphoreCount = 0;
+        submit_info[0].pWaitSemaphores = NULL;
+        submit_info[0].pWaitDstStageMask = 0;
+
+        submit_info[1].sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+        submit_info[1].pNext = NULL;
+        submit_info[1].commandBufferCount = 1;
+        submit_info[1].pCommandBuffers = &command_buffer[1];
+        submit_info[1].waitSemaphoreCount = 1;
+        submit_info[1].pWaitSemaphores = &semaphore;
+        submit_info[1].pWaitDstStageMask = flags;
+        submit_info[1].signalSemaphoreCount = 0;
+        submit_info[1].pSignalSemaphores = NULL;
+        vk::QueueSubmit(m_device->m_queue, 2, &submit_info[0], fence);
+    }
+
+    vk::WaitForFences(m_device->device(), 1, &fence, VK_TRUE, UINT64_MAX);
+
+    vk::DestroyFence(m_device->device(), fence, nullptr);
+    vk::FreeCommandBuffers(m_device->device(), command_pool, 2, &command_buffer[0]);
+    vk::DestroyCommandPool(m_device->device(), command_pool, NULL);
+    vk::DestroySemaphore(m_device->device(), semaphore, nullptr);
+
+    m_errorMonitor->VerifyNotFound();
+}
+
+TEST_F(VkPositiveLayerTest, CreatePipelineAttribMatrixType) {
+    TEST_DESCRIPTION("Test that pipeline validation accepts matrices passed as vertex attributes");
+    m_errorMonitor->ExpectSuccess();
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    VkVertexInputBindingDescription input_binding;
+    memset(&input_binding, 0, sizeof(input_binding));
+
+    VkVertexInputAttributeDescription input_attribs[2];
+    memset(input_attribs, 0, sizeof(input_attribs));
+
+    for (int i = 0; i < 2; i++) {
+        input_attribs[i].format = VK_FORMAT_R32G32B32A32_SFLOAT;
+        input_attribs[i].location = i;
+    }
+
+    char const *vsSource =
+        "#version 450\n"
+        "\n"
+        "layout(location=0) in mat2x4 x;\n"
+        "void main(){\n"
+        "   gl_Position = x[0] + x[1];\n"
+        "}\n";
+
+    VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
+    VkShaderObj fs(m_device, bindStateFragShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+    CreatePipelineHelper pipe(*this);
+    pipe.InitInfo();
+    pipe.vi_ci_.pVertexBindingDescriptions = &input_binding;
+    pipe.vi_ci_.vertexBindingDescriptionCount = 1;
+    pipe.vi_ci_.pVertexAttributeDescriptions = input_attribs;
+    pipe.vi_ci_.vertexAttributeDescriptionCount = 2;
+    pipe.shader_stages_ = {vs.GetStageCreateInfo(), fs.GetStageCreateInfo()};
+    pipe.InitState();
+    pipe.CreateGraphicsPipeline();
+    /* expect success */
+    m_errorMonitor->VerifyNotFound();
+}
+
+TEST_F(VkPositiveLayerTest, CreatePipelineAttribArrayType) {
+    m_errorMonitor->ExpectSuccess();
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    VkVertexInputBindingDescription input_binding;
+    memset(&input_binding, 0, sizeof(input_binding));
+
+    VkVertexInputAttributeDescription input_attribs[2];
+    memset(input_attribs, 0, sizeof(input_attribs));
+
+    for (int i = 0; i < 2; i++) {
+        input_attribs[i].format = VK_FORMAT_R32G32B32A32_SFLOAT;
+        input_attribs[i].location = i;
+    }
+
+    char const *vsSource =
+        "#version 450\n"
+        "\n"
+        "layout(location=0) in vec4 x[2];\n"
+        "void main(){\n"
+        "   gl_Position = x[0] + x[1];\n"
+        "}\n";
+
+    VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
+    VkShaderObj fs(m_device, bindStateFragShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+    CreatePipelineHelper pipe(*this);
+    pipe.InitInfo();
+    pipe.vi_ci_.pVertexBindingDescriptions = &input_binding;
+    pipe.vi_ci_.vertexBindingDescriptionCount = 1;
+    pipe.vi_ci_.pVertexAttributeDescriptions = input_attribs;
+    pipe.vi_ci_.vertexAttributeDescriptionCount = 2;
+    pipe.shader_stages_ = {vs.GetStageCreateInfo(), fs.GetStageCreateInfo()};
+    pipe.InitState();
+    pipe.CreateGraphicsPipeline();
+
+    m_errorMonitor->VerifyNotFound();
+}
+
+TEST_F(VkPositiveLayerTest, CreatePipelineAttribComponents) {
+    TEST_DESCRIPTION(
+        "Test that pipeline validation accepts consuming a vertex attribute through multiple vertex shader inputs, each consuming "
+        "a different subset of the components, and that fragment shader-attachment validation tolerates multiple duplicate "
+        "location outputs");
+    m_errorMonitor->ExpectSuccess(VK_DEBUG_REPORT_ERROR_BIT_EXT | VK_DEBUG_REPORT_WARNING_BIT_EXT);
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    VkVertexInputBindingDescription input_binding;
+    memset(&input_binding, 0, sizeof(input_binding));
+
+    VkVertexInputAttributeDescription input_attribs[3];
+    memset(input_attribs, 0, sizeof(input_attribs));
+
+    for (int i = 0; i < 3; i++) {
+        input_attribs[i].format = VK_FORMAT_R32G32B32A32_SFLOAT;
+        input_attribs[i].location = i;
+    }
+
+    char const *vsSource =
+        "#version 450\n"
+        "\n"
+        "layout(location=0) in vec4 x;\n"
+        "layout(location=1) in vec3 y1;\n"
+        "layout(location=1, component=3) in float y2;\n"
+        "layout(location=2) in vec4 z;\n"
+        "void main(){\n"
+        "   gl_Position = x + vec4(y1, y2) + z;\n"
+        "}\n";
+    char const *fsSource =
+        "#version 450\n"
+        "\n"
+        "layout(location=0, component=0) out float color0;\n"
+        "layout(location=0, component=1) out float color1;\n"
+        "layout(location=0, component=2) out float color2;\n"
+        "layout(location=0, component=3) out float color3;\n"
+        "layout(location=1, component=0) out vec2 second_color0;\n"
+        "layout(location=1, component=2) out vec2 second_color1;\n"
+        "void main(){\n"
+        "   color0 = float(1);\n"
+        "   second_color0 = vec2(1);\n"
+        "}\n";
+
+    VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
+    VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+    VkPipelineObj pipe(m_device);
+
+    VkDescriptorSetObj descriptorSet(m_device);
+    descriptorSet.AppendDummy();
+    descriptorSet.CreateVKDescriptorSet(m_commandBuffer);
+
+    // Create a renderPass with two color attachments
+    VkAttachmentReference attachments[2] = {};
+    attachments[0].layout = VK_IMAGE_LAYOUT_GENERAL;
+    attachments[1].attachment = 1;
+    attachments[1].layout = VK_IMAGE_LAYOUT_GENERAL;
+
+    VkSubpassDescription subpass = {};
+    subpass.pColorAttachments = attachments;
+    subpass.colorAttachmentCount = 2;
+
+    VkRenderPassCreateInfo rpci = {};
+    rpci.subpassCount = 1;
+    rpci.pSubpasses = &subpass;
+    rpci.attachmentCount = 2;
+
+    VkAttachmentDescription attach_desc[2] = {};
+    attach_desc[0].format = VK_FORMAT_B8G8R8A8_UNORM;
+    attach_desc[0].samples = VK_SAMPLE_COUNT_1_BIT;
+    attach_desc[0].initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+    attach_desc[0].finalLayout = VK_IMAGE_LAYOUT_GENERAL;
+    attach_desc[0].loadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
+    attach_desc[1].format = VK_FORMAT_B8G8R8A8_UNORM;
+    attach_desc[1].samples = VK_SAMPLE_COUNT_1_BIT;
+    attach_desc[1].initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+    attach_desc[1].finalLayout = VK_IMAGE_LAYOUT_GENERAL;
+    attach_desc[1].loadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
+
+    rpci.pAttachments = attach_desc;
+    rpci.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
+
+    VkRenderPass renderpass;
+    vk::CreateRenderPass(m_device->device(), &rpci, NULL, &renderpass);
+    pipe.AddShader(&vs);
+    pipe.AddShader(&fs);
+
+    VkPipelineColorBlendAttachmentState att_state1 = {};
+    att_state1.dstAlphaBlendFactor = VK_BLEND_FACTOR_CONSTANT_COLOR;
+    att_state1.blendEnable = VK_FALSE;
+
+    pipe.AddColorAttachment(0, att_state1);
+    pipe.AddColorAttachment(1, att_state1);
+    pipe.AddVertexInputBindings(&input_binding, 1);
+    pipe.AddVertexInputAttribs(input_attribs, 3);
+    pipe.CreateVKPipeline(descriptorSet.GetPipelineLayout(), renderpass);
+    vk::DestroyRenderPass(m_device->device(), renderpass, nullptr);
+
+    m_errorMonitor->VerifyNotFound();
+}
+
+TEST_F(VkPositiveLayerTest, CreatePipelineSimplePositive) {
+    m_errorMonitor->ExpectSuccess();
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    CreatePipelineHelper pipe(*this);
+    pipe.InitInfo();
+    pipe.InitState();
+    pipe.CreateGraphicsPipeline();
+
+    m_errorMonitor->VerifyNotFound();
+}
+
+TEST_F(VkPositiveLayerTest, CreatePipelineRelaxedTypeMatch) {
+    TEST_DESCRIPTION(
+        "Test that pipeline validation accepts the relaxed type matching rules set out in 14.1.3: fundamental type must match, and "
+        "producer side must have at least as many components");
+    m_errorMonitor->ExpectSuccess();
+
+    // VK 1.0.8 Specification, 14.1.3 "Additionally,..." block
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    char const *vsSource =
+        "#version 450\n"
+        "layout(location=0) out vec3 x;\n"
+        "layout(location=1) out ivec3 y;\n"
+        "layout(location=2) out vec3 z;\n"
+        "void main(){\n"
+        "   gl_Position = vec4(0);\n"
+        "   x = vec3(0); y = ivec3(0); z = vec3(0);\n"
+        "}\n";
+    char const *fsSource =
+        "#version 450\n"
+        "\n"
+        "layout(location=0) out vec4 color;\n"
+        "layout(location=0) in float x;\n"
+        "layout(location=1) flat in int y;\n"
+        "layout(location=2) in vec2 z;\n"
+        "void main(){\n"
+        "   color = vec4(1 + x + y + z.x);\n"
+        "}\n";
+
+    VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
+    VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+    CreatePipelineHelper pipe(*this);
+    pipe.InitInfo();
+    pipe.shader_stages_ = {vs.GetStageCreateInfo(), fs.GetStageCreateInfo()};
+    pipe.InitState();
+    pipe.CreateGraphicsPipeline();
+
+    m_errorMonitor->VerifyNotFound();
+}
+
+TEST_F(VkPositiveLayerTest, CreatePipelineTessPerVertex) {
+    TEST_DESCRIPTION("Test that pipeline validation accepts per-vertex variables passed between the TCS and TES stages");
+    m_errorMonitor->ExpectSuccess();
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    if (!m_device->phy().features().tessellationShader) {
+        printf("%s Device does not support tessellation shaders; skipped.\n", kSkipPrefix);
+        return;
+    }
+
+    char const *tcsSource =
+        "#version 450\n"
+        "layout(location=0) out int x[];\n"
+        "layout(vertices=3) out;\n"
+        "void main(){\n"
+        "   gl_TessLevelOuter[0] = gl_TessLevelOuter[1] = gl_TessLevelOuter[2] = 1;\n"
+        "   gl_TessLevelInner[0] = 1;\n"
+        "   x[gl_InvocationID] = gl_InvocationID;\n"
+        "}\n";
+    char const *tesSource =
+        "#version 450\n"
+        "layout(triangles, equal_spacing, cw) in;\n"
+        "layout(location=0) in int x[];\n"
+        "void main(){\n"
+        "   gl_Position.xyz = gl_TessCoord;\n"
+        "   gl_Position.w = x[0] + x[1] + x[2];\n"
+        "}\n";
+
+    VkShaderObj vs(m_device, bindStateMinimalShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
+    VkShaderObj tcs(m_device, tcsSource, VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT, this);
+    VkShaderObj tes(m_device, tesSource, VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT, this);
+    VkShaderObj fs(m_device, bindStateFragShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+    VkPipelineInputAssemblyStateCreateInfo iasci{VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO, nullptr, 0,
+                                                 VK_PRIMITIVE_TOPOLOGY_PATCH_LIST, VK_FALSE};
+
+    VkPipelineTessellationStateCreateInfo tsci{VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO, nullptr, 0, 3};
+
+    CreatePipelineHelper pipe(*this);
+    pipe.InitInfo();
+    pipe.gp_ci_.pTessellationState = &tsci;
+    pipe.gp_ci_.pInputAssemblyState = &iasci;
+    pipe.shader_stages_ = {vs.GetStageCreateInfo(), tcs.GetStageCreateInfo(), tes.GetStageCreateInfo(), fs.GetStageCreateInfo()};
+    pipe.InitState();
+    pipe.CreateGraphicsPipeline();
+    m_errorMonitor->VerifyNotFound();
+}
+
+TEST_F(VkPositiveLayerTest, CreatePipelineGeometryInputBlockPositive) {
+    TEST_DESCRIPTION(
+        "Test that pipeline validation accepts a user-defined interface block passed into the geometry shader. This is interesting "
+        "because the 'extra' array level is not present on the member type, but on the block instance.");
+    m_errorMonitor->ExpectSuccess();
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    if (!m_device->phy().features().geometryShader) {
+        printf("%s Device does not support geometry shaders; skipped.\n", kSkipPrefix);
+        return;
+    }
+
+    char const *gsSource =
+        "#version 450\n"
+        "layout(triangles) in;\n"
+        "layout(triangle_strip, max_vertices=3) out;\n"
+        "layout(location=0) in VertexData { vec4 x; } gs_in[];\n"
+        "void main() {\n"
+        "   gl_Position = gs_in[0].x;\n"
+        "   EmitVertex();\n"
+        "}\n";
+
+    VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
+    VkShaderObj gs(m_device, gsSource, VK_SHADER_STAGE_GEOMETRY_BIT, this);
+    VkShaderObj fs(m_device, bindStateFragShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+    CreatePipelineHelper pipe(*this);
+    pipe.InitInfo();
+    pipe.shader_stages_ = {vs.GetStageCreateInfo(), gs.GetStageCreateInfo(), fs.GetStageCreateInfo()};
+    pipe.InitState();
+    pipe.CreateGraphicsPipeline();
+    m_errorMonitor->VerifyNotFound();
+}
+
+TEST_F(VkPositiveLayerTest, CreatePipeline64BitAttributesPositive) {
+    TEST_DESCRIPTION(
+        "Test that pipeline validation accepts basic use of 64bit vertex attributes. This is interesting because they consume "
+        "multiple locations.");
+    m_errorMonitor->ExpectSuccess();
+
+    if (!EnableDeviceProfileLayer()) {
+        printf("%s Failed to enable device profile layer.\n", kSkipPrefix);
+        return;
+    }
+
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+    ASSERT_NO_FATAL_FAILURE(InitState());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    if (!m_device->phy().features().shaderFloat64) {
+        printf("%s Device does not support 64bit vertex attributes; skipped.\n", kSkipPrefix);
+        return;
+    }
+    // Set 64bit format to support VTX Buffer feature
+    PFN_vkSetPhysicalDeviceFormatPropertiesEXT fpvkSetPhysicalDeviceFormatPropertiesEXT = nullptr;
+    PFN_vkGetOriginalPhysicalDeviceFormatPropertiesEXT fpvkGetOriginalPhysicalDeviceFormatPropertiesEXT = nullptr;
+
+    // Load required functions
+    if (!LoadDeviceProfileLayer(fpvkSetPhysicalDeviceFormatPropertiesEXT, fpvkGetOriginalPhysicalDeviceFormatPropertiesEXT)) {
+        return;
+    }
+    VkFormatProperties format_props;
+    fpvkGetOriginalPhysicalDeviceFormatPropertiesEXT(gpu(), VK_FORMAT_R64G64B64A64_SFLOAT, &format_props);
+    format_props.bufferFeatures |= VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT;
+    fpvkSetPhysicalDeviceFormatPropertiesEXT(gpu(), VK_FORMAT_R64G64B64A64_SFLOAT, format_props);
+
+    VkVertexInputBindingDescription input_bindings[1];
+    memset(input_bindings, 0, sizeof(input_bindings));
+
+    VkVertexInputAttributeDescription input_attribs[4];
+    memset(input_attribs, 0, sizeof(input_attribs));
+    input_attribs[0].location = 0;
+    input_attribs[0].offset = 0;
+    input_attribs[0].format = VK_FORMAT_R64G64B64A64_SFLOAT;
+    input_attribs[1].location = 2;
+    input_attribs[1].offset = 32;
+    input_attribs[1].format = VK_FORMAT_R64G64B64A64_SFLOAT;
+    input_attribs[2].location = 4;
+    input_attribs[2].offset = 64;
+    input_attribs[2].format = VK_FORMAT_R64G64B64A64_SFLOAT;
+    input_attribs[3].location = 6;
+    input_attribs[3].offset = 96;
+    input_attribs[3].format = VK_FORMAT_R64G64B64A64_SFLOAT;
+
+    char const *vsSource =
+        "#version 450\n"
+        "\n"
+        "layout(location=0) in dmat4 x;\n"
+        "void main(){\n"
+        "   gl_Position = vec4(x[0][0]);\n"
+        "}\n";
+
+    VkShaderObj vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
+    VkShaderObj fs(m_device, bindStateFragShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+    CreatePipelineHelper pipe(*this);
+    pipe.InitInfo();
+    pipe.vi_ci_.pVertexBindingDescriptions = input_bindings;
+    pipe.vi_ci_.vertexBindingDescriptionCount = 1;
+    pipe.vi_ci_.pVertexAttributeDescriptions = input_attribs;
+    pipe.vi_ci_.vertexAttributeDescriptionCount = 4;
+    pipe.shader_stages_ = {vs.GetStageCreateInfo(), fs.GetStageCreateInfo()};
+    pipe.InitState();
+    pipe.CreateGraphicsPipeline();
+    m_errorMonitor->VerifyNotFound();
+}
+
+TEST_F(VkPositiveLayerTest, CreatePipelineInputAttachmentPositive) {
+    TEST_DESCRIPTION("Positive test for a correctly matched input attachment");
+    m_errorMonitor->ExpectSuccess();
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    char const *fsSource =
+        "#version 450\n"
+        "\n"
+        "layout(input_attachment_index=0, set=0, binding=0) uniform subpassInput x;\n"
+        "layout(location=0) out vec4 color;\n"
+        "void main() {\n"
+        "   color = subpassLoad(x);\n"
+        "}\n";
+
+    VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
+    VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+    VkPipelineObj pipe(m_device);
+    pipe.AddShader(&vs);
+    pipe.AddShader(&fs);
+    pipe.AddDefaultColorAttachment();
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    VkDescriptorSetLayoutBinding dslb = {0, VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, 1, VK_SHADER_STAGE_FRAGMENT_BIT, nullptr};
+    const VkDescriptorSetLayoutObj dsl(m_device, {dslb});
+    const VkPipelineLayoutObj pl(m_device, {&dsl});
+
+    VkAttachmentDescription descs[2] = {
+        {0, VK_FORMAT_R8G8B8A8_UNORM, VK_SAMPLE_COUNT_1_BIT, VK_ATTACHMENT_LOAD_OP_LOAD, VK_ATTACHMENT_STORE_OP_STORE,
+         VK_ATTACHMENT_LOAD_OP_LOAD, VK_ATTACHMENT_STORE_OP_STORE, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
+         VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL},
+        {0, VK_FORMAT_R8G8B8A8_UNORM, VK_SAMPLE_COUNT_1_BIT, VK_ATTACHMENT_LOAD_OP_LOAD, VK_ATTACHMENT_STORE_OP_STORE,
+         VK_ATTACHMENT_LOAD_OP_LOAD, VK_ATTACHMENT_STORE_OP_STORE, VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_GENERAL},
+    };
+    VkAttachmentReference color = {
+        0,
+        VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
+    };
+    VkAttachmentReference input = {
+        1,
+        VK_IMAGE_LAYOUT_GENERAL,
+    };
+
+    VkSubpassDescription sd = {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 1, &input, 1, &color, nullptr, nullptr, 0, nullptr};
+
+    VkRenderPassCreateInfo rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, nullptr, 0, 2, descs, 1, &sd, 0, nullptr};
+    VkRenderPass rp;
+    VkResult err = vk::CreateRenderPass(m_device->device(), &rpci, nullptr, &rp);
+    ASSERT_VK_SUCCESS(err);
+
+    // should be OK. would go wrong here if it's going to...
+    pipe.CreateVKPipeline(pl.handle(), rp);
+
+    m_errorMonitor->VerifyNotFound();
+
+    vk::DestroyRenderPass(m_device->device(), rp, nullptr);
+}
+
+TEST_F(VkPositiveLayerTest, CreateComputePipelineMissingDescriptorUnusedPositive) {
+    TEST_DESCRIPTION(
+        "Test that pipeline validation accepts a compute pipeline which declares a descriptor-backed resource which is not "
+        "provided, but the shader does not statically use it. This is interesting because it requires compute pipelines to have a "
+        "proper descriptor use walk, which they didn't for some time.");
+    m_errorMonitor->ExpectSuccess();
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    char const *csSource =
+        "#version 450\n"
+        "\n"
+        "layout(local_size_x=1) in;\n"
+        "layout(set=0, binding=0) buffer block { vec4 x; };\n"
+        "void main(){\n"
+        "   // x is not used.\n"
+        "}\n";
+
+    CreateComputePipelineHelper pipe(*this);
+    pipe.InitInfo();
+    pipe.cs_.reset(new VkShaderObj(m_device, csSource, VK_SHADER_STAGE_COMPUTE_BIT, this));
+    pipe.InitState();
+    pipe.CreateComputePipeline();
+
+    m_errorMonitor->VerifyNotFound();
+}
+
+TEST_F(VkPositiveLayerTest, CreateComputePipelineCombinedImageSamplerConsumedAsSampler) {
+    TEST_DESCRIPTION(
+        "Test that pipeline validation accepts a shader consuming only the sampler portion of a combined image + sampler");
+    m_errorMonitor->ExpectSuccess();
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    std::vector<VkDescriptorSetLayoutBinding> bindings = {
+        {0, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, 1, VK_SHADER_STAGE_COMPUTE_BIT, nullptr},
+        {1, VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, 1, VK_SHADER_STAGE_COMPUTE_BIT, nullptr},
+        {2, VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, 1, VK_SHADER_STAGE_COMPUTE_BIT, nullptr},
+    };
+
+    char const *csSource =
+        "#version 450\n"
+        "\n"
+        "layout(local_size_x=1) in;\n"
+        "layout(set=0, binding=0) uniform sampler s;\n"
+        "layout(set=0, binding=1) uniform texture2D t;\n"
+        "layout(set=0, binding=2) buffer block { vec4 x; };\n"
+        "void main() {\n"
+        "   x = texture(sampler2D(t, s), vec2(0));\n"
+        "}\n";
+    CreateComputePipelineHelper pipe(*this);
+    pipe.InitInfo();
+    pipe.dsl_bindings_.resize(bindings.size());
+    memcpy(pipe.dsl_bindings_.data(), bindings.data(), bindings.size() * sizeof(VkDescriptorSetLayoutBinding));
+    pipe.cs_.reset(new VkShaderObj(m_device, csSource, VK_SHADER_STAGE_COMPUTE_BIT, this));
+    pipe.InitState();
+    m_errorMonitor->ExpectSuccess();
+    pipe.CreateComputePipeline();
+
+    m_errorMonitor->VerifyNotFound();
+}
+
+TEST_F(VkPositiveLayerTest, CreateComputePipelineCombinedImageSamplerConsumedAsImage) {
+    TEST_DESCRIPTION(
+        "Test that pipeline validation accepts a shader consuming only the image portion of a combined image + sampler");
+    m_errorMonitor->ExpectSuccess();
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    std::vector<VkDescriptorSetLayoutBinding> bindings = {
+        {0, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, 1, VK_SHADER_STAGE_COMPUTE_BIT, nullptr},
+        {1, VK_DESCRIPTOR_TYPE_SAMPLER, 1, VK_SHADER_STAGE_COMPUTE_BIT, nullptr},
+        {2, VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, 1, VK_SHADER_STAGE_COMPUTE_BIT, nullptr},
+    };
+
+    char const *csSource =
+        "#version 450\n"
+        "\n"
+        "layout(local_size_x=1) in;\n"
+        "layout(set=0, binding=0) uniform texture2D t;\n"
+        "layout(set=0, binding=1) uniform sampler s;\n"
+        "layout(set=0, binding=2) buffer block { vec4 x; };\n"
+        "void main() {\n"
+        "   x = texture(sampler2D(t, s), vec2(0));\n"
+        "}\n";
+    CreateComputePipelineHelper pipe(*this);
+    pipe.InitInfo();
+    pipe.dsl_bindings_.resize(bindings.size());
+    memcpy(pipe.dsl_bindings_.data(), bindings.data(), bindings.size() * sizeof(VkDescriptorSetLayoutBinding));
+    pipe.cs_.reset(new VkShaderObj(m_device, csSource, VK_SHADER_STAGE_COMPUTE_BIT, this));
+    pipe.InitState();
+    m_errorMonitor->ExpectSuccess();
+    pipe.CreateComputePipeline();
+
+    m_errorMonitor->VerifyNotFound();
+}
+
+TEST_F(VkPositiveLayerTest, CreateComputePipelineCombinedImageSamplerConsumedAsBoth) {
+    TEST_DESCRIPTION(
+        "Test that pipeline validation accepts a shader consuming both the sampler and the image of a combined image+sampler but "
+        "via separate variables");
+    m_errorMonitor->ExpectSuccess();
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    std::vector<VkDescriptorSetLayoutBinding> bindings = {
+        {0, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, 1, VK_SHADER_STAGE_COMPUTE_BIT, nullptr},
+        {1, VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, 1, VK_SHADER_STAGE_COMPUTE_BIT, nullptr},
+    };
+
+    char const *csSource =
+        "#version 450\n"
+        "\n"
+        "layout(local_size_x=1) in;\n"
+        "layout(set=0, binding=0) uniform texture2D t;\n"
+        "layout(set=0, binding=0) uniform sampler s;  // both binding 0!\n"
+        "layout(set=0, binding=1) buffer block { vec4 x; };\n"
+        "void main() {\n"
+        "   x = texture(sampler2D(t, s), vec2(0));\n"
+        "}\n";
+    CreateComputePipelineHelper pipe(*this);
+    pipe.InitInfo();
+    pipe.dsl_bindings_.resize(bindings.size());
+    memcpy(pipe.dsl_bindings_.data(), bindings.data(), bindings.size() * sizeof(VkDescriptorSetLayoutBinding));
+    pipe.cs_.reset(new VkShaderObj(m_device, csSource, VK_SHADER_STAGE_COMPUTE_BIT, this));
+    pipe.InitState();
+    m_errorMonitor->ExpectSuccess();
+    pipe.CreateComputePipeline();
+
+    m_errorMonitor->VerifyNotFound();
+}
+
+TEST_F(VkPositiveLayerTest, CreateDescriptorSetBindingWithIgnoredSamplers) {
+    TEST_DESCRIPTION("Test that layers conditionally do ignore the pImmutableSamplers on vkCreateDescriptorSetLayout");
+
+    bool prop2_found = false;
+    if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+        m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+        prop2_found = true;
+    } else {
+        printf("%s %s Extension not supported, skipping push descriptor sub-tests\n", kSkipPrefix,
+               VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    }
+
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+    bool push_descriptor_found = false;
+    if (prop2_found && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME)) {
+        m_device_extension_names.push_back(VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME);
+
+        // In addition to the extension being supported we need to have at least one available
+        // Some implementations report an invalid maxPushDescriptors of 0
+        push_descriptor_found = GetPushDescriptorProperties(instance(), gpu()).maxPushDescriptors > 0;
+    } else {
+        printf("%s %s Extension not supported, skipping push descriptor sub-tests\n", kSkipPrefix,
+               VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME);
+    }
+
+    ASSERT_NO_FATAL_FAILURE(InitState());
+    const uint64_t fake_address_64 = 0xCDCDCDCDCDCDCDCD;
+    const uint64_t fake_address_32 = 0xCDCDCDCD;
+    const void *fake_pointer =
+        sizeof(void *) == 8 ? reinterpret_cast<void *>(fake_address_64) : reinterpret_cast<void *>(fake_address_32);
+    const VkSampler *hopefully_undereferencable_pointer = reinterpret_cast<const VkSampler *>(fake_pointer);
+
+    // regular descriptors
+    m_errorMonitor->ExpectSuccess();
+    {
+        const VkDescriptorSetLayoutBinding non_sampler_bindings[] = {
+            {0, VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, 1, VK_SHADER_STAGE_FRAGMENT_BIT, hopefully_undereferencable_pointer},
+            {1, VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, 1, VK_SHADER_STAGE_FRAGMENT_BIT, hopefully_undereferencable_pointer},
+            {2, VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER, 1, VK_SHADER_STAGE_FRAGMENT_BIT, hopefully_undereferencable_pointer},
+            {3, VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER, 1, VK_SHADER_STAGE_FRAGMENT_BIT, hopefully_undereferencable_pointer},
+            {4, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_FRAGMENT_BIT, hopefully_undereferencable_pointer},
+            {5, VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, 1, VK_SHADER_STAGE_FRAGMENT_BIT, hopefully_undereferencable_pointer},
+            {6, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC, 1, VK_SHADER_STAGE_FRAGMENT_BIT, hopefully_undereferencable_pointer},
+            {7, VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC, 1, VK_SHADER_STAGE_FRAGMENT_BIT, hopefully_undereferencable_pointer},
+            {8, VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, 1, VK_SHADER_STAGE_FRAGMENT_BIT, hopefully_undereferencable_pointer},
+        };
+        const VkDescriptorSetLayoutCreateInfo dslci = {VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO, nullptr, 0,
+                                                       static_cast<uint32_t>(size(non_sampler_bindings)), non_sampler_bindings};
+        VkDescriptorSetLayout dsl;
+        const VkResult err = vk::CreateDescriptorSetLayout(m_device->device(), &dslci, nullptr, &dsl);
+        ASSERT_VK_SUCCESS(err);
+        vk::DestroyDescriptorSetLayout(m_device->device(), dsl, nullptr);
+    }
+    m_errorMonitor->VerifyNotFound();
+
+    if (push_descriptor_found) {
+        // push descriptors
+        m_errorMonitor->ExpectSuccess();
+        {
+            const VkDescriptorSetLayoutBinding non_sampler_bindings[] = {
+                {0, VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, 1, VK_SHADER_STAGE_FRAGMENT_BIT, hopefully_undereferencable_pointer},
+                {1, VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, 1, VK_SHADER_STAGE_FRAGMENT_BIT, hopefully_undereferencable_pointer},
+                {2, VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER, 1, VK_SHADER_STAGE_FRAGMENT_BIT, hopefully_undereferencable_pointer},
+                {3, VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER, 1, VK_SHADER_STAGE_FRAGMENT_BIT, hopefully_undereferencable_pointer},
+                {4, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_FRAGMENT_BIT, hopefully_undereferencable_pointer},
+                {5, VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, 1, VK_SHADER_STAGE_FRAGMENT_BIT, hopefully_undereferencable_pointer},
+                {6, VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, 1, VK_SHADER_STAGE_FRAGMENT_BIT, hopefully_undereferencable_pointer},
+            };
+            const VkDescriptorSetLayoutCreateInfo dslci = {VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO, nullptr,
+                                                           VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR,
+                                                           static_cast<uint32_t>(size(non_sampler_bindings)), non_sampler_bindings};
+            VkDescriptorSetLayout dsl;
+            const VkResult err = vk::CreateDescriptorSetLayout(m_device->device(), &dslci, nullptr, &dsl);
+            ASSERT_VK_SUCCESS(err);
+            vk::DestroyDescriptorSetLayout(m_device->device(), dsl, nullptr);
+        }
+        m_errorMonitor->VerifyNotFound();
+    }
+}
+TEST_F(VkPositiveLayerTest, GpuValidationInlineUniformBlockAndMiscGpu) {
+    TEST_DESCRIPTION(
+        "GPU validation: Make sure inline uniform blocks don't generate false validation errors, verify reserved descriptor slot "
+        "and verify pipeline recovery");
+    m_errorMonitor->ExpectSuccess();
+    VkValidationFeatureEnableEXT enables[] = {VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_EXT,
+                                              VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_RESERVE_BINDING_SLOT_EXT};
+    VkValidationFeaturesEXT features = {};
+    features.sType = VK_STRUCTURE_TYPE_VALIDATION_FEATURES_EXT;
+    features.enabledValidationFeatureCount = 2;
+    features.pEnabledValidationFeatures = enables;
+    bool descriptor_indexing = CheckDescriptorIndexingSupportAndInitFramework(this, m_instance_extension_names,
+                                                                              m_device_extension_names, &features, m_errorMonitor);
+    if (DeviceIsMockICD() || DeviceSimulation()) {
+        printf("%s Test not supported by MockICD, skipping tests\n", kSkipPrefix);
+        return;
+    }
+    VkPhysicalDeviceFeatures2KHR features2 = {};
+    auto indexing_features = lvl_init_struct<VkPhysicalDeviceDescriptorIndexingFeaturesEXT>();
+    auto inline_uniform_block_features = lvl_init_struct<VkPhysicalDeviceInlineUniformBlockFeaturesEXT>(&indexing_features);
+    bool inline_uniform_block = DeviceExtensionSupported(gpu(), nullptr, VK_EXT_INLINE_UNIFORM_BLOCK_EXTENSION_NAME);
+    if (!(descriptor_indexing && inline_uniform_block)) {
+        printf("Descriptor indexing and/or inline uniform block not supported Skipping test\n");
+        return;
+    }
+    m_device_extension_names.push_back(VK_KHR_MAINTENANCE1_EXTENSION_NAME);
+    m_device_extension_names.push_back(VK_EXT_INLINE_UNIFORM_BLOCK_EXTENSION_NAME);
+    PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR =
+        (PFN_vkGetPhysicalDeviceFeatures2KHR)vk::GetInstanceProcAddr(instance(), "vkGetPhysicalDeviceFeatures2KHR");
+    ASSERT_TRUE(vkGetPhysicalDeviceFeatures2KHR != nullptr);
+
+    features2 = lvl_init_struct<VkPhysicalDeviceFeatures2KHR>(&inline_uniform_block_features);
+    vkGetPhysicalDeviceFeatures2KHR(gpu(), &features2);
+    if (!indexing_features.descriptorBindingPartiallyBound || !inline_uniform_block_features.inlineUniformBlock) {
+        printf("Not all features supported, skipping test\n");
+        return;
+    }
+    auto inline_uniform_props = lvl_init_struct<VkPhysicalDeviceInlineUniformBlockPropertiesEXT>();
+    auto prop2 = lvl_init_struct<VkPhysicalDeviceProperties2KHR>(&inline_uniform_props);
+    vk::GetPhysicalDeviceProperties2(gpu(), &prop2);
+
+    VkCommandPoolCreateFlags pool_flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
+    ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &features2, pool_flags));
+    if (m_device->props.apiVersion < VK_API_VERSION_1_1) {
+        printf("%s GPU-Assisted validation test requires Vulkan 1.1+.\n", kSkipPrefix);
+        return;
+    }
+    auto c_queue = m_device->GetDefaultComputeQueue();
+    if (nullptr == c_queue) {
+        printf("Compute not supported, skipping test\n");
+        return;
+    }
+
+    uint32_t qfi = 0;
+    VkBufferCreateInfo bci = {};
+    bci.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
+    bci.usage = VK_BUFFER_USAGE_STORAGE_BUFFER_BIT;
+    bci.size = 4;
+    bci.queueFamilyIndexCount = 1;
+    bci.pQueueFamilyIndices = &qfi;
+    VkBufferObj buffer0;
+    VkMemoryPropertyFlags mem_props = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
+    buffer0.init(*m_device, bci, mem_props);
+
+    VkDescriptorBindingFlagsEXT ds_binding_flags[2] = {};
+    ds_binding_flags[1] = VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT_EXT;
+    VkDescriptorSetLayoutBindingFlagsCreateInfoEXT layout_createinfo_binding_flags[1] = {};
+    layout_createinfo_binding_flags[0].sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT;
+    layout_createinfo_binding_flags[0].pNext = NULL;
+    layout_createinfo_binding_flags[0].bindingCount = 2;
+    layout_createinfo_binding_flags[0].pBindingFlags = ds_binding_flags;
+
+    OneOffDescriptorSet descriptor_set(m_device,
+                                       {
+                                           {0, VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
+                                           {1, VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT, 20, VK_SHADER_STAGE_ALL,
+                                            nullptr},  // 16 bytes for ivec4, 4 more for int
+                                       },
+                                       0, layout_createinfo_binding_flags, 0);
+    const VkPipelineLayoutObj pipeline_layout(m_device, {&descriptor_set.layout_});
+
+    VkDescriptorBufferInfo buffer_info[1] = {};
+    buffer_info[0].buffer = buffer0.handle();
+    buffer_info[0].offset = 0;
+    buffer_info[0].range = sizeof(uint32_t);
+
+    const uint32_t test_data = 0xdeadca7;
+    VkWriteDescriptorSetInlineUniformBlockEXT write_inline_uniform = {};
+    write_inline_uniform.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK_EXT;
+    write_inline_uniform.dataSize = 4;
+    write_inline_uniform.pData = &test_data;
+
+    VkWriteDescriptorSet descriptor_writes[2] = {};
+    descriptor_writes[0].sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
+    descriptor_writes[0].dstSet = descriptor_set.set_;
+    descriptor_writes[0].dstBinding = 0;
+    descriptor_writes[0].descriptorCount = 1;
+    descriptor_writes[0].descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
+    descriptor_writes[0].pBufferInfo = buffer_info;
+
+    descriptor_writes[1].sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
+    descriptor_writes[1].dstSet = descriptor_set.set_;
+    descriptor_writes[1].dstBinding = 1;
+    descriptor_writes[1].dstArrayElement = 16;  // Skip first 16 bytes (dummy)
+    descriptor_writes[1].descriptorCount = 4;   // Write 4 bytes to val
+    descriptor_writes[1].descriptorType = VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT;
+    descriptor_writes[1].pNext = &write_inline_uniform;
+    vk::UpdateDescriptorSets(m_device->device(), 2, descriptor_writes, 0, NULL);
+
+    char const *csSource =
+        "#version 450\n"
+        "#extension GL_EXT_nonuniform_qualifier : enable\n "
+        "layout(set = 0, binding = 0) buffer StorageBuffer { uint index; } u_index;"
+        "layout(set = 0, binding = 1) uniform inlineubodef { ivec4 dummy; int val; } inlineubo;\n"
+
+        "void main() {\n"
+        "    u_index.index = inlineubo.val;\n"
+        "}\n";
+
+    auto shader_module = new VkShaderObj(m_device, csSource, VK_SHADER_STAGE_COMPUTE_BIT, this);
+
+    VkPipelineShaderStageCreateInfo stage;
+    stage.sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
+    stage.pNext = nullptr;
+    stage.flags = 0;
+    stage.stage = VK_SHADER_STAGE_COMPUTE_BIT;
+    stage.module = shader_module->handle();
+    stage.pName = "main";
+    stage.pSpecializationInfo = nullptr;
+
+    // CreateComputePipelines
+    VkComputePipelineCreateInfo pipeline_info = {};
+    pipeline_info.sType = VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO;
+    pipeline_info.pNext = nullptr;
+    pipeline_info.flags = 0;
+    pipeline_info.layout = pipeline_layout.handle();
+    pipeline_info.basePipelineHandle = VK_NULL_HANDLE;
+    pipeline_info.basePipelineIndex = -1;
+    pipeline_info.stage = stage;
+
+    VkPipeline c_pipeline;
+    vk::CreateComputePipelines(device(), VK_NULL_HANDLE, 1, &pipeline_info, nullptr, &c_pipeline);
+
+    m_commandBuffer->begin();
+    vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_COMPUTE, c_pipeline);
+    vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_COMPUTE, pipeline_layout.handle(), 0, 1,
+                              &descriptor_set.set_, 0, nullptr);
+    vk::CmdDispatch(m_commandBuffer->handle(), 1, 1, 1);
+    m_commandBuffer->end();
+
+    VkSubmitInfo submit_info = {};
+    submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+    submit_info.commandBufferCount = 1;
+    submit_info.pCommandBuffers = &m_commandBuffer->handle();
+    vk::QueueSubmit(c_queue->handle(), 1, &submit_info, VK_NULL_HANDLE);
+    vk::QueueWaitIdle(m_device->m_queue);
+    m_errorMonitor->VerifyNotFound();
+    vk::DestroyPipeline(m_device->handle(), c_pipeline, NULL);
+
+    uint32_t *data = (uint32_t *)buffer0.memory().map();
+    ASSERT_TRUE(*data = test_data);
+    *data = 0;
+    buffer0.memory().unmap();
+
+    // Also verify that binding slot reservation is working
+    VkInstanceCreateInfo inst_info = {};
+    VkInstance test_inst;
+    inst_info.sType = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO;
+    vk::CreateInstance(&inst_info, NULL, &test_inst);
+    uint32_t gpu_count;
+    VkPhysicalDevice objs[4];
+    vk::EnumeratePhysicalDevices(test_inst, &gpu_count, NULL);
+    if (gpu_count > 4) gpu_count = 4;
+    vk::EnumeratePhysicalDevices(test_inst, &gpu_count, objs);
+    VkPhysicalDeviceProperties properties;
+    vk::GetPhysicalDeviceProperties(objs[0], &properties);
+    if (m_device->props.limits.maxBoundDescriptorSets != properties.limits.maxBoundDescriptorSets - 1)
+        m_errorMonitor->SetError("VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_RESERVE_BINDING_SLOT_EXT not functioning as expected");
+    vk::DestroyInstance(test_inst, NULL);
+
+    // Now be sure that recovery from an unavailable descriptor set works and that uninstrumented shaders are used
+    VkDescriptorSetLayoutBinding dsl_binding[2] = {};
+    dsl_binding[0].binding = 0;
+    dsl_binding[0].descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
+    dsl_binding[0].descriptorCount = 1;
+    dsl_binding[0].stageFlags = VK_SHADER_STAGE_ALL;
+    dsl_binding[1].binding = 1;
+    dsl_binding[1].descriptorType = VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT;
+    dsl_binding[1].descriptorCount = 20;
+    dsl_binding[1].stageFlags = VK_SHADER_STAGE_ALL;
+    VkDescriptorSetLayout layouts[32];
+    VkDescriptorSetLayoutCreateInfo dsl_create_info = {};
+    dsl_create_info.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
+    dsl_create_info.pNext = layout_createinfo_binding_flags;
+    dsl_create_info.pBindings = dsl_binding;
+    dsl_create_info.bindingCount = 2;
+    for (int i = 0; i < 32; i++) {
+        vk::CreateDescriptorSetLayout(m_device->handle(), &dsl_create_info, NULL, &layouts[i]);
+    }
+    VkPipelineLayoutCreateInfo pl_create_info = {};
+    VkPipelineLayout pl_layout;
+    pl_create_info.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO;
+    pl_create_info.setLayoutCount = 32;
+    pl_create_info.pSetLayouts = layouts;
+    vk::CreatePipelineLayout(m_device->handle(), &pl_create_info, NULL, &pl_layout);
+    pipeline_info.layout = pl_layout;
+    vk::CreateComputePipelines(device(), VK_NULL_HANDLE, 1, &pipeline_info, nullptr, &c_pipeline);
+    m_commandBuffer->begin();
+    vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_COMPUTE, c_pipeline);
+    vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_COMPUTE, pl_layout, 0, 1, &descriptor_set.set_, 0,
+                              nullptr);
+    vk::CmdDispatch(m_commandBuffer->handle(), 1, 1, 1);
+    m_commandBuffer->end();
+    vk::QueueSubmit(c_queue->handle(), 1, &submit_info, VK_NULL_HANDLE);
+    vk::QueueWaitIdle(m_device->m_queue);
+    vk::DestroyShaderModule(m_device->handle(), shader_module->handle(), NULL);
+    vk::DestroyPipelineLayout(m_device->handle(), pl_layout, NULL);
+    vk::DestroyPipeline(m_device->handle(), c_pipeline, NULL);
+    for (int i = 0; i < 32; i++) {
+        vk::DestroyDescriptorSetLayout(m_device->handle(), layouts[i], NULL);
+    }
+    m_errorMonitor->VerifyNotFound();
+    data = (uint32_t *)buffer0.memory().map();
+    if (*data != test_data) m_errorMonitor->SetError("Pipeline recovery when resources unavailable not functioning as expected");
+    buffer0.memory().unmap();
+}
+
+TEST_F(VkPositiveLayerTest, Maintenance1Tests) {
+    TEST_DESCRIPTION("Validate various special cases for the Maintenance1_KHR extension");
+
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+    if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_MAINTENANCE1_EXTENSION_NAME)) {
+        m_device_extension_names.push_back(VK_KHR_MAINTENANCE1_EXTENSION_NAME);
+    } else {
+        printf("%s Maintenance1 Extension not supported, skipping tests\n", kSkipPrefix);
+        return;
+    }
+    ASSERT_NO_FATAL_FAILURE(InitState());
+
+    m_errorMonitor->ExpectSuccess();
+
+    VkCommandBufferObj cmd_buf(m_device, m_commandPool);
+    cmd_buf.begin();
+    // Set Negative height, should give error if Maintenance 1 is not enabled
+    VkViewport viewport = {0, 0, 16, -16, 0, 1};
+    vk::CmdSetViewport(cmd_buf.handle(), 0, 1, &viewport);
+    cmd_buf.end();
+
+    m_errorMonitor->VerifyNotFound();
+}
+
+TEST_F(VkPositiveLayerTest, ValidStructPNext) {
+    TEST_DESCRIPTION("Verify that a valid pNext value is handled correctly");
+
+    // Positive test to check parameter_validation and unique_objects support for NV_dedicated_allocation
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+    if (DeviceExtensionSupported(gpu(), nullptr, VK_NV_DEDICATED_ALLOCATION_EXTENSION_NAME)) {
+        m_device_extension_names.push_back(VK_NV_DEDICATED_ALLOCATION_EXTENSION_NAME);
+    } else {
+        printf("%s VK_NV_DEDICATED_ALLOCATION_EXTENSION_NAME Extension not supported, skipping test\n", kSkipPrefix);
+        return;
+    }
+    ASSERT_NO_FATAL_FAILURE(InitState());
+
+    m_errorMonitor->ExpectSuccess();
+
+    VkDedicatedAllocationBufferCreateInfoNV dedicated_buffer_create_info = {};
+    dedicated_buffer_create_info.sType = VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_BUFFER_CREATE_INFO_NV;
+    dedicated_buffer_create_info.pNext = nullptr;
+    dedicated_buffer_create_info.dedicatedAllocation = VK_TRUE;
+
+    uint32_t queue_family_index = 0;
+    VkBufferCreateInfo buffer_create_info = {};
+    buffer_create_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
+    buffer_create_info.pNext = &dedicated_buffer_create_info;
+    buffer_create_info.size = 1024;
+    buffer_create_info.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT;
+    buffer_create_info.queueFamilyIndexCount = 1;
+    buffer_create_info.pQueueFamilyIndices = &queue_family_index;
+
+    VkBuffer buffer;
+    VkResult err = vk::CreateBuffer(m_device->device(), &buffer_create_info, NULL, &buffer);
+    ASSERT_VK_SUCCESS(err);
+
+    VkMemoryRequirements memory_reqs;
+    vk::GetBufferMemoryRequirements(m_device->device(), buffer, &memory_reqs);
+
+    VkDedicatedAllocationMemoryAllocateInfoNV dedicated_memory_info = {};
+    dedicated_memory_info.sType = VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_MEMORY_ALLOCATE_INFO_NV;
+    dedicated_memory_info.pNext = nullptr;
+    dedicated_memory_info.buffer = buffer;
+    dedicated_memory_info.image = VK_NULL_HANDLE;
+
+    VkMemoryAllocateInfo memory_info = {};
+    memory_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+    memory_info.pNext = &dedicated_memory_info;
+    memory_info.allocationSize = memory_reqs.size;
+
+    bool pass;
+    pass = m_device->phy().set_memory_type(memory_reqs.memoryTypeBits, &memory_info, 0);
+    ASSERT_TRUE(pass);
+
+    VkDeviceMemory buffer_memory;
+    err = vk::AllocateMemory(m_device->device(), &memory_info, NULL, &buffer_memory);
+    ASSERT_VK_SUCCESS(err);
+
+    err = vk::BindBufferMemory(m_device->device(), buffer, buffer_memory, 0);
+    ASSERT_VK_SUCCESS(err);
+
+    vk::DestroyBuffer(m_device->device(), buffer, NULL);
+    vk::FreeMemory(m_device->device(), buffer_memory, NULL);
+
+    m_errorMonitor->VerifyNotFound();
+}
+
+TEST_F(VkPositiveLayerTest, PSOPolygonModeValid) {
+    TEST_DESCRIPTION("Verify that using a solid polygon fill mode works correctly.");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    std::vector<const char *> device_extension_names;
+    auto features = m_device->phy().features();
+    // Artificially disable support for non-solid fill modes
+    features.fillModeNonSolid = false;
+    // The sacrificial device object
+    VkDeviceObj test_device(0, gpu(), device_extension_names, &features);
+
+    VkRenderpassObj render_pass(&test_device);
+
+    const VkPipelineLayoutObj pipeline_layout(&test_device);
+
+    VkPipelineRasterizationStateCreateInfo rs_ci = {};
+    rs_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO;
+    rs_ci.pNext = nullptr;
+    rs_ci.lineWidth = 1.0f;
+    rs_ci.rasterizerDiscardEnable = false;
+
+    VkShaderObj vs(&test_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
+    VkShaderObj fs(&test_device, bindStateFragShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+    // Set polygonMode=FILL. No error is expected
+    m_errorMonitor->ExpectSuccess();
+    {
+        VkPipelineObj pipe(&test_device);
+        pipe.AddShader(&vs);
+        pipe.AddShader(&fs);
+        pipe.AddDefaultColorAttachment();
+        // Set polygonMode to a good value
+        rs_ci.polygonMode = VK_POLYGON_MODE_FILL;
+        pipe.SetRasterization(&rs_ci);
+        pipe.CreateVKPipeline(pipeline_layout.handle(), render_pass.handle());
+    }
+    m_errorMonitor->VerifyNotFound();
+}
+
+TEST_F(VkPositiveLayerTest, LongSemaphoreChain) {
+    m_errorMonitor->ExpectSuccess();
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    VkResult err;
+
+    std::vector<VkSemaphore> semaphores;
+
+    const int chainLength = 32768;
+    VkPipelineStageFlags flags = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT;
+
+    for (int i = 0; i < chainLength; i++) {
+        VkSemaphoreCreateInfo sci = {VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO, nullptr, 0};
+        VkSemaphore semaphore;
+        err = vk::CreateSemaphore(m_device->device(), &sci, nullptr, &semaphore);
+        ASSERT_VK_SUCCESS(err);
+
+        semaphores.push_back(semaphore);
+
+        VkSubmitInfo si = {VK_STRUCTURE_TYPE_SUBMIT_INFO,
+                           nullptr,
+                           semaphores.size() > 1 ? 1u : 0u,
+                           semaphores.size() > 1 ? &semaphores[semaphores.size() - 2] : nullptr,
+                           &flags,
+                           0,
+                           nullptr,
+                           1,
+                           &semaphores[semaphores.size() - 1]};
+        err = vk::QueueSubmit(m_device->m_queue, 1, &si, VK_NULL_HANDLE);
+        ASSERT_VK_SUCCESS(err);
+    }
+
+    VkFenceCreateInfo fci = {VK_STRUCTURE_TYPE_FENCE_CREATE_INFO, nullptr, 0};
+    VkFence fence;
+    err = vk::CreateFence(m_device->device(), &fci, nullptr, &fence);
+    ASSERT_VK_SUCCESS(err);
+    VkSubmitInfo si = {VK_STRUCTURE_TYPE_SUBMIT_INFO, nullptr, 1, &semaphores.back(), &flags, 0, nullptr, 0, nullptr};
+    err = vk::QueueSubmit(m_device->m_queue, 1, &si, fence);
+    ASSERT_VK_SUCCESS(err);
+
+    vk::WaitForFences(m_device->device(), 1, &fence, VK_TRUE, UINT64_MAX);
+
+    for (auto semaphore : semaphores) vk::DestroySemaphore(m_device->device(), semaphore, nullptr);
+
+    vk::DestroyFence(m_device->device(), fence, nullptr);
+
+    m_errorMonitor->VerifyNotFound();
+}
+
+TEST_F(VkPositiveLayerTest, ExternalSemaphore) {
+#ifdef _WIN32
+    const auto extension_name = VK_KHR_EXTERNAL_SEMAPHORE_WIN32_EXTENSION_NAME;
+    const auto handle_type = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_KHR;
+#else
+    const auto extension_name = VK_KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME;
+    const auto handle_type = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT_KHR;
+#endif
+    // Check for external semaphore instance extensions
+    if (InstanceExtensionSupported(VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_EXTENSION_NAME)) {
+        m_instance_extension_names.push_back(VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_EXTENSION_NAME);
+        m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    } else {
+        printf("%s External semaphore extension not supported, skipping test\n", kSkipPrefix);
+        return;
+    }
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+
+    // Check for external semaphore device extensions
+    if (DeviceExtensionSupported(gpu(), nullptr, extension_name)) {
+        m_device_extension_names.push_back(extension_name);
+        m_device_extension_names.push_back(VK_KHR_EXTERNAL_SEMAPHORE_EXTENSION_NAME);
+    } else {
+        printf("%s External semaphore extension not supported, skipping test\n", kSkipPrefix);
+        return;
+    }
+    ASSERT_NO_FATAL_FAILURE(InitState());
+
+    // Check for external semaphore import and export capability
+    VkPhysicalDeviceExternalSemaphoreInfoKHR esi = {VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO_KHR, nullptr,
+                                                    handle_type};
+    VkExternalSemaphorePropertiesKHR esp = {VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES_KHR, nullptr};
+    auto vkGetPhysicalDeviceExternalSemaphorePropertiesKHR =
+        (PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR)vk::GetInstanceProcAddr(
+            instance(), "vkGetPhysicalDeviceExternalSemaphorePropertiesKHR");
+    vkGetPhysicalDeviceExternalSemaphorePropertiesKHR(gpu(), &esi, &esp);
+
+    if (!(esp.externalSemaphoreFeatures & VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT_KHR) ||
+        !(esp.externalSemaphoreFeatures & VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT_KHR)) {
+        printf("%s External semaphore does not support importing and exporting, skipping test\n", kSkipPrefix);
+        return;
+    }
+
+    VkResult err;
+    m_errorMonitor->ExpectSuccess();
+
+    // Create a semaphore to export payload from
+    VkExportSemaphoreCreateInfoKHR esci = {VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO_KHR, nullptr, handle_type};
+    VkSemaphoreCreateInfo sci = {VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO, &esci, 0};
+
+    VkSemaphore export_semaphore;
+    err = vk::CreateSemaphore(m_device->device(), &sci, nullptr, &export_semaphore);
+    ASSERT_VK_SUCCESS(err);
+
+    // Create a semaphore to import payload into
+    sci.pNext = nullptr;
+    VkSemaphore import_semaphore;
+    err = vk::CreateSemaphore(m_device->device(), &sci, nullptr, &import_semaphore);
+    ASSERT_VK_SUCCESS(err);
+
+#ifdef _WIN32
+    // Export semaphore payload to an opaque handle
+    HANDLE handle = nullptr;
+    VkSemaphoreGetWin32HandleInfoKHR ghi = {VK_STRUCTURE_TYPE_SEMAPHORE_GET_WIN32_HANDLE_INFO_KHR, nullptr, export_semaphore,
+                                            handle_type};
+    auto vkGetSemaphoreWin32HandleKHR =
+        (PFN_vkGetSemaphoreWin32HandleKHR)vk::GetDeviceProcAddr(m_device->device(), "vkGetSemaphoreWin32HandleKHR");
+    err = vkGetSemaphoreWin32HandleKHR(m_device->device(), &ghi, &handle);
+    ASSERT_VK_SUCCESS(err);
+
+    // Import opaque handle exported above
+    VkImportSemaphoreWin32HandleInfoKHR ihi = {
+        VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR, nullptr, import_semaphore, 0, handle_type, handle, nullptr};
+    auto vkImportSemaphoreWin32HandleKHR =
+        (PFN_vkImportSemaphoreWin32HandleKHR)vk::GetDeviceProcAddr(m_device->device(), "vkImportSemaphoreWin32HandleKHR");
+    err = vkImportSemaphoreWin32HandleKHR(m_device->device(), &ihi);
+    ASSERT_VK_SUCCESS(err);
+#else
+    // Export semaphore payload to an opaque handle
+    int fd = 0;
+    VkSemaphoreGetFdInfoKHR ghi = {VK_STRUCTURE_TYPE_SEMAPHORE_GET_FD_INFO_KHR, nullptr, export_semaphore, handle_type};
+    auto vkGetSemaphoreFdKHR = (PFN_vkGetSemaphoreFdKHR)vk::GetDeviceProcAddr(m_device->device(), "vkGetSemaphoreFdKHR");
+    err = vkGetSemaphoreFdKHR(m_device->device(), &ghi, &fd);
+    ASSERT_VK_SUCCESS(err);
+
+    // Import opaque handle exported above
+    VkImportSemaphoreFdInfoKHR ihi = {
+        VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_FD_INFO_KHR, nullptr, import_semaphore, 0, handle_type, fd};
+    auto vkImportSemaphoreFdKHR = (PFN_vkImportSemaphoreFdKHR)vk::GetDeviceProcAddr(m_device->device(), "vkImportSemaphoreFdKHR");
+    err = vkImportSemaphoreFdKHR(m_device->device(), &ihi);
+    ASSERT_VK_SUCCESS(err);
+#endif
+
+    // Signal the exported semaphore and wait on the imported semaphore
+    VkPipelineStageFlags flags = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT;
+    VkSubmitInfo si[] = {
+        {VK_STRUCTURE_TYPE_SUBMIT_INFO, nullptr, 0, nullptr, &flags, 0, nullptr, 1, &export_semaphore},
+        {VK_STRUCTURE_TYPE_SUBMIT_INFO, nullptr, 1, &import_semaphore, &flags, 0, nullptr, 0, nullptr},
+        {VK_STRUCTURE_TYPE_SUBMIT_INFO, nullptr, 0, nullptr, &flags, 0, nullptr, 1, &export_semaphore},
+        {VK_STRUCTURE_TYPE_SUBMIT_INFO, nullptr, 1, &import_semaphore, &flags, 0, nullptr, 0, nullptr},
+    };
+    err = vk::QueueSubmit(m_device->m_queue, 4, si, VK_NULL_HANDLE);
+    ASSERT_VK_SUCCESS(err);
+
+    if (m_device->phy().features().sparseBinding) {
+        // Signal the imported semaphore and wait on the exported semaphore
+        VkBindSparseInfo bi[] = {
+            {VK_STRUCTURE_TYPE_BIND_SPARSE_INFO, nullptr, 0, nullptr, 0, nullptr, 0, nullptr, 0, nullptr, 1, &import_semaphore},
+            {VK_STRUCTURE_TYPE_BIND_SPARSE_INFO, nullptr, 1, &export_semaphore, 0, nullptr, 0, nullptr, 0, nullptr, 0, nullptr},
+            {VK_STRUCTURE_TYPE_BIND_SPARSE_INFO, nullptr, 0, nullptr, 0, nullptr, 0, nullptr, 0, nullptr, 1, &import_semaphore},
+            {VK_STRUCTURE_TYPE_BIND_SPARSE_INFO, nullptr, 1, &export_semaphore, 0, nullptr, 0, nullptr, 0, nullptr, 0, nullptr},
+        };
+        err = vk::QueueBindSparse(m_device->m_queue, 4, bi, VK_NULL_HANDLE);
+        ASSERT_VK_SUCCESS(err);
+    }
+
+    // Cleanup
+    err = vk::QueueWaitIdle(m_device->m_queue);
+    ASSERT_VK_SUCCESS(err);
+    vk::DestroySemaphore(m_device->device(), export_semaphore, nullptr);
+    vk::DestroySemaphore(m_device->device(), import_semaphore, nullptr);
+
+    m_errorMonitor->VerifyNotFound();
+}
+
+TEST_F(VkPositiveLayerTest, ExternalFence) {
+#ifdef _WIN32
+    const auto extension_name = VK_KHR_EXTERNAL_FENCE_WIN32_EXTENSION_NAME;
+    const auto handle_type = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_BIT_KHR;
+#else
+    const auto extension_name = VK_KHR_EXTERNAL_FENCE_FD_EXTENSION_NAME;
+    const auto handle_type = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_FD_BIT_KHR;
+#endif
+    // Check for external fence instance extensions
+    if (InstanceExtensionSupported(VK_KHR_EXTERNAL_FENCE_CAPABILITIES_EXTENSION_NAME)) {
+        m_instance_extension_names.push_back(VK_KHR_EXTERNAL_FENCE_CAPABILITIES_EXTENSION_NAME);
+        m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    } else {
+        printf("%s External fence extension not supported, skipping test\n", kSkipPrefix);
+        return;
+    }
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+
+    // Check for external fence device extensions
+    if (DeviceExtensionSupported(gpu(), nullptr, extension_name)) {
+        m_device_extension_names.push_back(extension_name);
+        m_device_extension_names.push_back(VK_KHR_EXTERNAL_FENCE_EXTENSION_NAME);
+    } else {
+        printf("%s External fence extension not supported, skipping test\n", kSkipPrefix);
+        return;
+    }
+    ASSERT_NO_FATAL_FAILURE(InitState());
+
+    // Check for external fence import and export capability
+    VkPhysicalDeviceExternalFenceInfoKHR efi = {VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO_KHR, nullptr, handle_type};
+    VkExternalFencePropertiesKHR efp = {VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES_KHR, nullptr};
+    auto vkGetPhysicalDeviceExternalFencePropertiesKHR = (PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR)vk::GetInstanceProcAddr(
+        instance(), "vkGetPhysicalDeviceExternalFencePropertiesKHR");
+    vkGetPhysicalDeviceExternalFencePropertiesKHR(gpu(), &efi, &efp);
+
+    if (!(efp.externalFenceFeatures & VK_EXTERNAL_FENCE_FEATURE_EXPORTABLE_BIT_KHR) ||
+        !(efp.externalFenceFeatures & VK_EXTERNAL_FENCE_FEATURE_IMPORTABLE_BIT_KHR)) {
+        printf("%s External fence does not support importing and exporting, skipping test\n", kSkipPrefix);
+        return;
+    }
+
+    VkResult err;
+    m_errorMonitor->ExpectSuccess();
+
+    // Create a fence to export payload from
+    VkFence export_fence;
+    {
+        VkExportFenceCreateInfoKHR efci = {VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO_KHR, nullptr, handle_type};
+        VkFenceCreateInfo fci = {VK_STRUCTURE_TYPE_FENCE_CREATE_INFO, &efci, 0};
+        err = vk::CreateFence(m_device->device(), &fci, nullptr, &export_fence);
+        ASSERT_VK_SUCCESS(err);
+    }
+
+    // Create a fence to import payload into
+    VkFence import_fence;
+    {
+        VkFenceCreateInfo fci = {VK_STRUCTURE_TYPE_FENCE_CREATE_INFO, nullptr, 0};
+        err = vk::CreateFence(m_device->device(), &fci, nullptr, &import_fence);
+        ASSERT_VK_SUCCESS(err);
+    }
+
+#ifdef _WIN32
+    // Export fence payload to an opaque handle
+    HANDLE handle = nullptr;
+    {
+        VkFenceGetWin32HandleInfoKHR ghi = {VK_STRUCTURE_TYPE_FENCE_GET_WIN32_HANDLE_INFO_KHR, nullptr, export_fence, handle_type};
+        auto vkGetFenceWin32HandleKHR =
+            (PFN_vkGetFenceWin32HandleKHR)vk::GetDeviceProcAddr(m_device->device(), "vkGetFenceWin32HandleKHR");
+        err = vkGetFenceWin32HandleKHR(m_device->device(), &ghi, &handle);
+        ASSERT_VK_SUCCESS(err);
+    }
+
+    // Import opaque handle exported above
+    {
+        VkImportFenceWin32HandleInfoKHR ifi = {
+            VK_STRUCTURE_TYPE_IMPORT_FENCE_WIN32_HANDLE_INFO_KHR, nullptr, import_fence, 0, handle_type, handle, nullptr};
+        auto vkImportFenceWin32HandleKHR =
+            (PFN_vkImportFenceWin32HandleKHR)vk::GetDeviceProcAddr(m_device->device(), "vkImportFenceWin32HandleKHR");
+        err = vkImportFenceWin32HandleKHR(m_device->device(), &ifi);
+        ASSERT_VK_SUCCESS(err);
+    }
+#else
+    // Export fence payload to an opaque handle
+    int fd = 0;
+    {
+        VkFenceGetFdInfoKHR gfi = {VK_STRUCTURE_TYPE_FENCE_GET_FD_INFO_KHR, nullptr, export_fence, handle_type};
+        auto vkGetFenceFdKHR = (PFN_vkGetFenceFdKHR)vk::GetDeviceProcAddr(m_device->device(), "vkGetFenceFdKHR");
+        err = vkGetFenceFdKHR(m_device->device(), &gfi, &fd);
+        ASSERT_VK_SUCCESS(err);
+    }
+
+    // Import opaque handle exported above
+    {
+        VkImportFenceFdInfoKHR ifi = {VK_STRUCTURE_TYPE_IMPORT_FENCE_FD_INFO_KHR, nullptr, import_fence, 0, handle_type, fd};
+        auto vkImportFenceFdKHR = (PFN_vkImportFenceFdKHR)vk::GetDeviceProcAddr(m_device->device(), "vkImportFenceFdKHR");
+        err = vkImportFenceFdKHR(m_device->device(), &ifi);
+        ASSERT_VK_SUCCESS(err);
+    }
+#endif
+
+    // Signal the exported fence and wait on the imported fence
+    vk::QueueSubmit(m_device->m_queue, 0, nullptr, export_fence);
+    vk::WaitForFences(m_device->device(), 1, &import_fence, VK_TRUE, 1000000000);
+    vk::ResetFences(m_device->device(), 1, &import_fence);
+    vk::QueueSubmit(m_device->m_queue, 0, nullptr, export_fence);
+    vk::WaitForFences(m_device->device(), 1, &import_fence, VK_TRUE, 1000000000);
+    vk::ResetFences(m_device->device(), 1, &import_fence);
+
+    // Signal the imported fence and wait on the exported fence
+    vk::QueueSubmit(m_device->m_queue, 0, nullptr, import_fence);
+    vk::WaitForFences(m_device->device(), 1, &export_fence, VK_TRUE, 1000000000);
+    vk::ResetFences(m_device->device(), 1, &export_fence);
+    vk::QueueSubmit(m_device->m_queue, 0, nullptr, import_fence);
+    vk::WaitForFences(m_device->device(), 1, &export_fence, VK_TRUE, 1000000000);
+    vk::ResetFences(m_device->device(), 1, &export_fence);
+
+    // Cleanup
+    err = vk::QueueWaitIdle(m_device->m_queue);
+    ASSERT_VK_SUCCESS(err);
+    vk::DestroyFence(m_device->device(), export_fence, nullptr);
+    vk::DestroyFence(m_device->device(), import_fence, nullptr);
+
+    m_errorMonitor->VerifyNotFound();
+}
+
+TEST_F(VkPositiveLayerTest, ThreadNullFenceCollision) {
+    test_platform_thread thread;
+
+    m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "THREADING ERROR");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    struct thread_data_struct data;
+    data.device = m_device->device();
+    bool bailout = false;
+    data.bailout = &bailout;
+    m_errorMonitor->SetBailout(data.bailout);
+
+    // Call vk::DestroyFence of VK_NULL_HANDLE repeatedly using multiple threads.
+    // There should be no validation error from collision of that non-object.
+    test_platform_thread_create(&thread, ReleaseNullFence, (void *)&data);
+    for (int i = 0; i < 40000; i++) {
+        vk::DestroyFence(m_device->device(), VK_NULL_HANDLE, NULL);
+    }
+    test_platform_thread_join(thread, NULL);
+
+    m_errorMonitor->SetBailout(NULL);
+
+    m_errorMonitor->VerifyNotFound();
+}
+
+TEST_F(VkPositiveLayerTest, ClearColorImageWithValidRange) {
+    TEST_DESCRIPTION("Record clear color with a valid VkImageSubresourceRange");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    VkImageObj image(m_device);
+    image.Init(32, 32, 1, VK_FORMAT_B8G8R8A8_UNORM, VK_IMAGE_USAGE_TRANSFER_DST_BIT, VK_IMAGE_TILING_OPTIMAL);
+    ASSERT_TRUE(image.create_info().arrayLayers == 1);
+    ASSERT_TRUE(image.initialized());
+    image.SetLayout(VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL);
+
+    const VkClearColorValue clear_color = {{0.0f, 0.0f, 0.0f, 1.0f}};
+
+    m_commandBuffer->begin();
+    const auto cb_handle = m_commandBuffer->handle();
+
+    // Try good case
+    {
+        m_errorMonitor->ExpectSuccess();
+        VkImageSubresourceRange range = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 1};
+        vk::CmdClearColorImage(cb_handle, image.handle(), image.Layout(), &clear_color, 1, &range);
+        m_errorMonitor->VerifyNotFound();
+    }
+
+    // Try good case with VK_REMAINING
+    {
+        m_errorMonitor->ExpectSuccess();
+        VkImageSubresourceRange range = {VK_IMAGE_ASPECT_COLOR_BIT, 0, VK_REMAINING_MIP_LEVELS, 0, VK_REMAINING_ARRAY_LAYERS};
+        vk::CmdClearColorImage(cb_handle, image.handle(), image.Layout(), &clear_color, 1, &range);
+        m_errorMonitor->VerifyNotFound();
+    }
+}
+
+TEST_F(VkPositiveLayerTest, ClearDepthStencilWithValidRange) {
+    TEST_DESCRIPTION("Record clear depth with a valid VkImageSubresourceRange");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    auto depth_format = FindSupportedDepthStencilFormat(gpu());
+    if (!depth_format) {
+        printf("%s No Depth + Stencil format found. Skipped.\n", kSkipPrefix);
+        return;
+    }
+
+    VkImageObj image(m_device);
+    image.Init(32, 32, 1, depth_format, VK_IMAGE_USAGE_TRANSFER_DST_BIT, VK_IMAGE_TILING_OPTIMAL);
+    ASSERT_TRUE(image.create_info().arrayLayers == 1);
+    ASSERT_TRUE(image.initialized());
+    const VkImageAspectFlags ds_aspect = VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT;
+    image.SetLayout(ds_aspect, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL);
+
+    const VkClearDepthStencilValue clear_value = {};
+
+    m_commandBuffer->begin();
+    const auto cb_handle = m_commandBuffer->handle();
+
+    // Try good case
+    {
+        m_errorMonitor->ExpectSuccess();
+        VkImageSubresourceRange range = {ds_aspect, 0, 1, 0, 1};
+        vk::CmdClearDepthStencilImage(cb_handle, image.handle(), image.Layout(), &clear_value, 1, &range);
+        m_errorMonitor->VerifyNotFound();
+    }
+
+    // Try good case with VK_REMAINING
+    {
+        m_errorMonitor->ExpectSuccess();
+        VkImageSubresourceRange range = {ds_aspect, 0, VK_REMAINING_MIP_LEVELS, 0, VK_REMAINING_ARRAY_LAYERS};
+        vk::CmdClearDepthStencilImage(cb_handle, image.handle(), image.Layout(), &clear_value, 1, &range);
+        m_errorMonitor->VerifyNotFound();
+    }
+}
+
+TEST_F(VkPositiveLayerTest, CreateGraphicsPipelineWithIgnoredPointers) {
+    TEST_DESCRIPTION("Create Graphics Pipeline with pointers that must be ignored by layers");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    m_depth_stencil_fmt = FindSupportedDepthStencilFormat(gpu());
+    ASSERT_TRUE(m_depth_stencil_fmt != 0);
+
+    m_depthStencil->Init(m_device, static_cast<int32_t>(m_width), static_cast<int32_t>(m_height), m_depth_stencil_fmt);
+
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget(m_depthStencil->BindInfo()));
+
+    const uint64_t fake_address_64 = 0xCDCDCDCDCDCDCDCD;
+    const uint64_t fake_address_32 = 0xCDCDCDCD;
+    void *hopefully_undereferencable_pointer =
+        sizeof(void *) == 8 ? reinterpret_cast<void *>(fake_address_64) : reinterpret_cast<void *>(fake_address_32);
+
+    VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
+
+    const VkPipelineVertexInputStateCreateInfo pipeline_vertex_input_state_create_info{
+        VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO,
+        nullptr,  // pNext
+        0,        // flags
+        0,
+        nullptr,  // bindings
+        0,
+        nullptr  // attributes
+    };
+
+    const VkPipelineInputAssemblyStateCreateInfo pipeline_input_assembly_state_create_info{
+        VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO,
+        nullptr,  // pNext
+        0,        // flags
+        VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST,
+        VK_FALSE  // primitive restart
+    };
+
+    const VkPipelineRasterizationStateCreateInfo pipeline_rasterization_state_create_info_template{
+        VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO,
+        nullptr,   // pNext
+        0,         // flags
+        VK_FALSE,  // depthClamp
+        VK_FALSE,  // rasterizerDiscardEnable
+        VK_POLYGON_MODE_FILL,
+        VK_CULL_MODE_NONE,
+        VK_FRONT_FACE_COUNTER_CLOCKWISE,
+        VK_FALSE,  // depthBias
+        0.0f,
+        0.0f,
+        0.0f,  // depthBias params
+        1.0f   // lineWidth
+    };
+
+    VkPipelineLayout pipeline_layout;
+    {
+        VkPipelineLayoutCreateInfo pipeline_layout_create_info{
+            VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO,
+            nullptr,  // pNext
+            0,        // flags
+            0,
+            nullptr,  // layouts
+            0,
+            nullptr  // push constants
+        };
+
+        VkResult err = vk::CreatePipelineLayout(m_device->device(), &pipeline_layout_create_info, nullptr, &pipeline_layout);
+        ASSERT_VK_SUCCESS(err);
+    }
+
+    // try disabled rasterizer and no tessellation
+    {
+        m_errorMonitor->ExpectSuccess();
+
+        VkPipelineRasterizationStateCreateInfo pipeline_rasterization_state_create_info =
+            pipeline_rasterization_state_create_info_template;
+        pipeline_rasterization_state_create_info.rasterizerDiscardEnable = VK_TRUE;
+
+        VkGraphicsPipelineCreateInfo graphics_pipeline_create_info{
+            VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO,
+            nullptr,  // pNext
+            0,        // flags
+            1,        // stageCount
+            &vs.GetStageCreateInfo(),
+            &pipeline_vertex_input_state_create_info,
+            &pipeline_input_assembly_state_create_info,
+            reinterpret_cast<const VkPipelineTessellationStateCreateInfo *>(hopefully_undereferencable_pointer),
+            reinterpret_cast<const VkPipelineViewportStateCreateInfo *>(hopefully_undereferencable_pointer),
+            &pipeline_rasterization_state_create_info,
+            reinterpret_cast<const VkPipelineMultisampleStateCreateInfo *>(hopefully_undereferencable_pointer),
+            reinterpret_cast<const VkPipelineDepthStencilStateCreateInfo *>(hopefully_undereferencable_pointer),
+            reinterpret_cast<const VkPipelineColorBlendStateCreateInfo *>(hopefully_undereferencable_pointer),
+            nullptr,  // dynamic states
+            pipeline_layout,
+            m_renderPass,
+            0,  // subpass
+            VK_NULL_HANDLE,
+            0};
+
+        VkPipeline pipeline;
+        vk::CreateGraphicsPipelines(m_device->handle(), VK_NULL_HANDLE, 1, &graphics_pipeline_create_info, nullptr, &pipeline);
+
+        m_errorMonitor->VerifyNotFound();
+
+        vk::DestroyPipeline(m_device->handle(), pipeline, nullptr);
+    }
+
+    const VkPipelineMultisampleStateCreateInfo pipeline_multisample_state_create_info{
+        VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO,
+        nullptr,  // pNext
+        0,        // flags
+        VK_SAMPLE_COUNT_1_BIT,
+        VK_FALSE,  // sample shading
+        0.0f,      // minSampleShading
+        nullptr,   // pSampleMask
+        VK_FALSE,  // alphaToCoverageEnable
+        VK_FALSE   // alphaToOneEnable
+    };
+
+    // try enabled rasterizer but no subpass attachments
+    {
+        m_errorMonitor->ExpectSuccess();
+
+        VkPipelineRasterizationStateCreateInfo pipeline_rasterization_state_create_info =
+            pipeline_rasterization_state_create_info_template;
+        pipeline_rasterization_state_create_info.rasterizerDiscardEnable = VK_FALSE;
+
+        VkViewport viewport = {0.0f, 0.0f, 1.0f, 1.0f, 0.0f, 1.0f};
+        VkRect2D scissor = {{0, 0}, {static_cast<uint32_t>(m_width), static_cast<uint32_t>(m_height)}};
+
+        const VkPipelineViewportStateCreateInfo pipeline_viewport_state_create_info{
+            VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO,
+            nullptr,  // pNext
+            0,        // flags
+            1,
+            &viewport,
+            1,
+            &scissor};
+
+        VkRenderPass render_pass;
+        {
+            VkSubpassDescription subpass_desc = {};
+
+            VkRenderPassCreateInfo render_pass_create_info{
+                VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO,
+                nullptr,  // pNext
+                0,        // flags
+                0,
+                nullptr,  // attachments
+                1,
+                &subpass_desc,
+                0,
+                nullptr  // subpass dependencies
+            };
+
+            VkResult err = vk::CreateRenderPass(m_device->handle(), &render_pass_create_info, nullptr, &render_pass);
+            ASSERT_VK_SUCCESS(err);
+        }
+
+        VkGraphicsPipelineCreateInfo graphics_pipeline_create_info{
+            VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO,
+            nullptr,  // pNext
+            0,        // flags
+            1,        // stageCount
+            &vs.GetStageCreateInfo(),
+            &pipeline_vertex_input_state_create_info,
+            &pipeline_input_assembly_state_create_info,
+            nullptr,
+            &pipeline_viewport_state_create_info,
+            &pipeline_rasterization_state_create_info,
+            &pipeline_multisample_state_create_info,
+            reinterpret_cast<const VkPipelineDepthStencilStateCreateInfo *>(hopefully_undereferencable_pointer),
+            reinterpret_cast<const VkPipelineColorBlendStateCreateInfo *>(hopefully_undereferencable_pointer),
+            nullptr,  // dynamic states
+            pipeline_layout,
+            render_pass,
+            0,  // subpass
+            VK_NULL_HANDLE,
+            0};
+
+        VkPipeline pipeline;
+        vk::CreateGraphicsPipelines(m_device->handle(), VK_NULL_HANDLE, 1, &graphics_pipeline_create_info, nullptr, &pipeline);
+
+        m_errorMonitor->VerifyNotFound();
+
+        vk::DestroyPipeline(m_device->handle(), pipeline, nullptr);
+        vk::DestroyRenderPass(m_device->handle(), render_pass, nullptr);
+    }
+
+    // try dynamic viewport and scissor
+    {
+        m_errorMonitor->ExpectSuccess();
+
+        VkPipelineRasterizationStateCreateInfo pipeline_rasterization_state_create_info =
+            pipeline_rasterization_state_create_info_template;
+        pipeline_rasterization_state_create_info.rasterizerDiscardEnable = VK_FALSE;
+
+        const VkPipelineViewportStateCreateInfo pipeline_viewport_state_create_info{
+            VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO,
+            nullptr,  // pNext
+            0,        // flags
+            1,
+            reinterpret_cast<const VkViewport *>(hopefully_undereferencable_pointer),
+            1,
+            reinterpret_cast<const VkRect2D *>(hopefully_undereferencable_pointer)};
+
+        const VkPipelineDepthStencilStateCreateInfo pipeline_depth_stencil_state_create_info{
+            VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO,
+            nullptr,  // pNext
+            0,        // flags
+        };
+
+        const VkPipelineColorBlendAttachmentState pipeline_color_blend_attachment_state = {};
+
+        const VkPipelineColorBlendStateCreateInfo pipeline_color_blend_state_create_info{
+            VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO,
+            nullptr,  // pNext
+            0,        // flags
+            VK_FALSE,
+            VK_LOGIC_OP_CLEAR,
+            1,
+            &pipeline_color_blend_attachment_state,
+            {0.0f, 0.0f, 0.0f, 0.0f}};
+
+        const VkDynamicState dynamic_states[2] = {VK_DYNAMIC_STATE_VIEWPORT, VK_DYNAMIC_STATE_SCISSOR};
+
+        const VkPipelineDynamicStateCreateInfo pipeline_dynamic_state_create_info{
+            VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO,
+            nullptr,  // pNext
+            0,        // flags
+            2, dynamic_states};
+
+        VkGraphicsPipelineCreateInfo graphics_pipeline_create_info{VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO,
+                                                                   nullptr,  // pNext
+                                                                   0,        // flags
+                                                                   1,        // stageCount
+                                                                   &vs.GetStageCreateInfo(),
+                                                                   &pipeline_vertex_input_state_create_info,
+                                                                   &pipeline_input_assembly_state_create_info,
+                                                                   nullptr,
+                                                                   &pipeline_viewport_state_create_info,
+                                                                   &pipeline_rasterization_state_create_info,
+                                                                   &pipeline_multisample_state_create_info,
+                                                                   &pipeline_depth_stencil_state_create_info,
+                                                                   &pipeline_color_blend_state_create_info,
+                                                                   &pipeline_dynamic_state_create_info,  // dynamic states
+                                                                   pipeline_layout,
+                                                                   m_renderPass,
+                                                                   0,  // subpass
+                                                                   VK_NULL_HANDLE,
+                                                                   0};
+
+        VkPipeline pipeline;
+        vk::CreateGraphicsPipelines(m_device->handle(), VK_NULL_HANDLE, 1, &graphics_pipeline_create_info, nullptr, &pipeline);
+
+        m_errorMonitor->VerifyNotFound();
+
+        vk::DestroyPipeline(m_device->handle(), pipeline, nullptr);
+    }
+
+    vk::DestroyPipelineLayout(m_device->handle(), pipeline_layout, nullptr);
+}
+
+TEST_F(VkPositiveLayerTest, ExternalMemory) {
+    TEST_DESCRIPTION("Perform a copy through a pair of buffers linked by external memory");
+
+#ifdef _WIN32
+    const auto ext_mem_extension_name = VK_KHR_EXTERNAL_MEMORY_WIN32_EXTENSION_NAME;
+    const auto handle_type = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT_KHR;
+#else
+    const auto ext_mem_extension_name = VK_KHR_EXTERNAL_MEMORY_FD_EXTENSION_NAME;
+    const auto handle_type = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT_KHR;
+#endif
+
+    // Check for external memory instance extensions
+    std::vector<const char *> reqd_instance_extensions = {
+        {VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME, VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME}};
+    for (auto extension_name : reqd_instance_extensions) {
+        if (InstanceExtensionSupported(extension_name)) {
+            m_instance_extension_names.push_back(extension_name);
+        } else {
+            printf("%s Required instance extension %s not supported, skipping test\n", kSkipPrefix, extension_name);
+            return;
+        }
+    }
+
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+
+    // Check for import/export capability
+    VkPhysicalDeviceExternalBufferInfoKHR ebi = {VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO_KHR, nullptr, 0,
+                                                 VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT, handle_type};
+    VkExternalBufferPropertiesKHR ebp = {VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES_KHR, nullptr, {0, 0, 0}};
+    auto vkGetPhysicalDeviceExternalBufferPropertiesKHR =
+        (PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR)vk::GetInstanceProcAddr(
+            instance(), "vkGetPhysicalDeviceExternalBufferPropertiesKHR");
+    ASSERT_TRUE(vkGetPhysicalDeviceExternalBufferPropertiesKHR != nullptr);
+    vkGetPhysicalDeviceExternalBufferPropertiesKHR(gpu(), &ebi, &ebp);
+    if (!(ebp.externalMemoryProperties.compatibleHandleTypes & handle_type) ||
+        !(ebp.externalMemoryProperties.externalMemoryFeatures & VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT_KHR) ||
+        !(ebp.externalMemoryProperties.externalMemoryFeatures & VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT_KHR)) {
+        printf("%s External buffer does not support importing and exporting, skipping test\n", kSkipPrefix);
+        return;
+    }
+
+    // Check if dedicated allocation is required
+    bool dedicated_allocation =
+        ebp.externalMemoryProperties.externalMemoryFeatures & VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT_KHR;
+    if (dedicated_allocation) {
+        if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_DEDICATED_ALLOCATION_EXTENSION_NAME)) {
+            m_device_extension_names.push_back(VK_KHR_DEDICATED_ALLOCATION_EXTENSION_NAME);
+            m_device_extension_names.push_back(VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
+        } else {
+            printf("%s Dedicated allocation extension not supported, skipping test\n", kSkipPrefix);
+            return;
+        }
+    }
+
+    // Check for external memory device extensions
+    if (DeviceExtensionSupported(gpu(), nullptr, ext_mem_extension_name)) {
+        m_device_extension_names.push_back(ext_mem_extension_name);
+        m_device_extension_names.push_back(VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME);
+    } else {
+        printf("%s External memory extension not supported, skipping test\n", kSkipPrefix);
+        return;
+    }
+    ASSERT_NO_FATAL_FAILURE(InitState());
+
+    m_errorMonitor->ExpectSuccess(VK_DEBUG_REPORT_ERROR_BIT_EXT | VK_DEBUG_REPORT_WARNING_BIT_EXT);
+
+    VkMemoryPropertyFlags mem_flags = 0;
+    const VkDeviceSize buffer_size = 1024;
+
+    // Create export and import buffers
+    const VkExternalMemoryBufferCreateInfoKHR external_buffer_info = {VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO_KHR,
+                                                                      nullptr, handle_type};
+    auto buffer_info = VkBufferObj::create_info(buffer_size, VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT);
+    buffer_info.pNext = &external_buffer_info;
+    VkBufferObj buffer_export;
+    buffer_export.init_no_mem(*m_device, buffer_info);
+    VkBufferObj buffer_import;
+    buffer_import.init_no_mem(*m_device, buffer_info);
+
+    // Allocation info
+    auto alloc_info = vk_testing::DeviceMemory::get_resource_alloc_info(*m_device, buffer_export.memory_requirements(), mem_flags);
+
+    // Add export allocation info to pNext chain
+    VkExportMemoryAllocateInfoKHR export_info = {VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_KHR, nullptr, handle_type};
+    alloc_info.pNext = &export_info;
+
+    // Add dedicated allocation info to pNext chain if required
+    VkMemoryDedicatedAllocateInfoKHR dedicated_info = {VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR, nullptr,
+                                                       VK_NULL_HANDLE, buffer_export.handle()};
+    if (dedicated_allocation) {
+        export_info.pNext = &dedicated_info;
+    }
+
+    // Allocate memory to be exported
+    vk_testing::DeviceMemory memory_export;
+    memory_export.init(*m_device, alloc_info);
+
+    // Bind exported memory
+    buffer_export.bind_memory(memory_export, 0);
+
+#ifdef _WIN32
+    // Export memory to handle
+    auto vkGetMemoryWin32HandleKHR =
+        (PFN_vkGetMemoryWin32HandleKHR)vk::GetInstanceProcAddr(instance(), "vkGetMemoryWin32HandleKHR");
+    ASSERT_TRUE(vkGetMemoryWin32HandleKHR != nullptr);
+    VkMemoryGetWin32HandleInfoKHR mghi = {VK_STRUCTURE_TYPE_MEMORY_GET_WIN32_HANDLE_INFO_KHR, nullptr, memory_export.handle(),
+                                          handle_type};
+    HANDLE handle;
+    ASSERT_VK_SUCCESS(vkGetMemoryWin32HandleKHR(m_device->device(), &mghi, &handle));
+
+    VkImportMemoryWin32HandleInfoKHR import_info = {VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_KHR, nullptr, handle_type,
+                                                    handle};
+#else
+    // Export memory to fd
+    auto vkGetMemoryFdKHR = (PFN_vkGetMemoryFdKHR)vk::GetInstanceProcAddr(instance(), "vkGetMemoryFdKHR");
+    ASSERT_TRUE(vkGetMemoryFdKHR != nullptr);
+    VkMemoryGetFdInfoKHR mgfi = {VK_STRUCTURE_TYPE_MEMORY_GET_FD_INFO_KHR, nullptr, memory_export.handle(), handle_type};
+    int fd;
+    ASSERT_VK_SUCCESS(vkGetMemoryFdKHR(m_device->device(), &mgfi, &fd));
+
+    VkImportMemoryFdInfoKHR import_info = {VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR, nullptr, handle_type, fd};
+#endif
+
+    // Import memory
+    alloc_info = vk_testing::DeviceMemory::get_resource_alloc_info(*m_device, buffer_import.memory_requirements(), mem_flags);
+    alloc_info.pNext = &import_info;
+    vk_testing::DeviceMemory memory_import;
+    memory_import.init(*m_device, alloc_info);
+
+    // Bind imported memory
+    buffer_import.bind_memory(memory_import, 0);
+
+    // Create test buffers and fill input buffer
+    VkMemoryPropertyFlags mem_prop = VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
+    VkBufferObj buffer_input;
+    buffer_input.init_as_src_and_dst(*m_device, buffer_size, mem_prop);
+    auto input_mem = (uint8_t *)buffer_input.memory().map();
+    for (uint32_t i = 0; i < buffer_size; i++) {
+        input_mem[i] = (i & 0xFF);
+    }
+    buffer_input.memory().unmap();
+    VkBufferObj buffer_output;
+    buffer_output.init_as_src_and_dst(*m_device, buffer_size, mem_prop);
+
+    // Copy from input buffer to output buffer through the exported/imported memory
+    m_commandBuffer->begin();
+    VkBufferCopy copy_info = {0, 0, buffer_size};
+    vk::CmdCopyBuffer(m_commandBuffer->handle(), buffer_input.handle(), buffer_export.handle(), 1, &copy_info);
+    // Insert memory barrier to guarantee copy order
+    VkMemoryBarrier mem_barrier = {VK_STRUCTURE_TYPE_MEMORY_BARRIER, nullptr, VK_ACCESS_TRANSFER_WRITE_BIT,
+                                   VK_ACCESS_TRANSFER_READ_BIT};
+    vk::CmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 1,
+                           &mem_barrier, 0, nullptr, 0, nullptr);
+    vk::CmdCopyBuffer(m_commandBuffer->handle(), buffer_import.handle(), buffer_output.handle(), 1, &copy_info);
+    m_commandBuffer->end();
+    m_commandBuffer->QueueCommandBuffer();
+
+    m_errorMonitor->VerifyNotFound();
+}
+
+TEST_F(VkPositiveLayerTest, ParameterLayerFeatures2Capture) {
+    TEST_DESCRIPTION("Ensure parameter_validation_layer correctly captures physical device features");
+    if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+        m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    } else {
+        printf("%s Did not find VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME; skipped.\n", kSkipPrefix);
+        return;
+    }
+
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+
+    PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR =
+        (PFN_vkGetPhysicalDeviceFeatures2KHR)vk::GetInstanceProcAddr(instance(), "vkGetPhysicalDeviceFeatures2KHR");
+    ASSERT_TRUE(vkGetPhysicalDeviceFeatures2KHR != nullptr);
+
+    VkResult err;
+    m_errorMonitor->ExpectSuccess();
+
+    VkPhysicalDeviceFeatures2KHR features2;
+    features2.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2_KHR;
+    features2.pNext = nullptr;
+
+    vkGetPhysicalDeviceFeatures2KHR(gpu(), &features2);
+
+    // We're not creating a valid m_device, but the phy wrapper is useful
+    vk_testing::PhysicalDevice physical_device(gpu());
+    vk_testing::QueueCreateInfoArray queue_info(physical_device.queue_properties());
+    // Only request creation with queuefamilies that have at least one queue
+    std::vector<VkDeviceQueueCreateInfo> create_queue_infos;
+    auto qci = queue_info.data();
+    for (uint32_t i = 0; i < queue_info.size(); ++i) {
+        if (qci[i].queueCount) {
+            create_queue_infos.push_back(qci[i]);
+        }
+    }
+
+    VkDeviceCreateInfo dev_info = {};
+    dev_info.sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO;
+    dev_info.pNext = &features2;
+    dev_info.flags = 0;
+    dev_info.queueCreateInfoCount = create_queue_infos.size();
+    dev_info.pQueueCreateInfos = create_queue_infos.data();
+    dev_info.enabledLayerCount = 0;
+    dev_info.ppEnabledLayerNames = nullptr;
+    dev_info.enabledExtensionCount = 0;
+    dev_info.ppEnabledExtensionNames = nullptr;
+    dev_info.pEnabledFeatures = nullptr;
+
+    VkDevice device;
+    err = vk::CreateDevice(gpu(), &dev_info, nullptr, &device);
+    ASSERT_VK_SUCCESS(err);
+
+    if (features2.features.samplerAnisotropy) {
+        // Test that the parameter layer is caching the features correctly using CreateSampler
+        VkSamplerCreateInfo sampler_ci = SafeSaneSamplerCreateInfo();
+        // If the features were not captured correctly, this should cause an error
+        sampler_ci.anisotropyEnable = VK_TRUE;
+        sampler_ci.maxAnisotropy = physical_device.properties().limits.maxSamplerAnisotropy;
+
+        VkSampler sampler = VK_NULL_HANDLE;
+        err = vk::CreateSampler(device, &sampler_ci, nullptr, &sampler);
+        ASSERT_VK_SUCCESS(err);
+        vk::DestroySampler(device, sampler, nullptr);
+    } else {
+        printf("%s Feature samplerAnisotropy not enabled;  parameter_layer check skipped.\n", kSkipPrefix);
+    }
+
+    // Verify the core validation layer has captured the physical device features by creating a a query pool.
+    if (features2.features.pipelineStatisticsQuery) {
+        VkQueryPool query_pool;
+        VkQueryPoolCreateInfo qpci{};
+        qpci.sType = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO;
+        qpci.queryType = VK_QUERY_TYPE_PIPELINE_STATISTICS;
+        qpci.queryCount = 1;
+        err = vk::CreateQueryPool(device, &qpci, nullptr, &query_pool);
+        ASSERT_VK_SUCCESS(err);
+
+        vk::DestroyQueryPool(device, query_pool, nullptr);
+    } else {
+        printf("%s Feature pipelineStatisticsQuery not enabled;  core_validation_layer check skipped.\n", kSkipPrefix);
+    }
+
+    vk::DestroyDevice(device, nullptr);
+
+    m_errorMonitor->VerifyNotFound();
+}
+
+TEST_F(VkPositiveLayerTest, GetMemoryRequirements2) {
+    TEST_DESCRIPTION(
+        "Get memory requirements with VK_KHR_get_memory_requirements2 instead of core entry points and verify layers do not emit "
+        "errors when objects are bound and used");
+
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+
+    // Check for VK_KHR_get_memory_requirementes2 extensions
+    if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME)) {
+        m_device_extension_names.push_back(VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
+    } else {
+        printf("%s %s not supported, skipping test\n", kSkipPrefix, VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
+        return;
+    }
+
+    ASSERT_NO_FATAL_FAILURE(InitState());
+
+    m_errorMonitor->ExpectSuccess(VK_DEBUG_REPORT_ERROR_BIT_EXT | VK_DEBUG_REPORT_WARNING_BIT_EXT);
+
+    // Create a test buffer
+    VkBufferObj buffer;
+    buffer.init_no_mem(*m_device,
+                       VkBufferObj::create_info(1024, VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT));
+
+    // Use extension to get buffer memory requirements
+    auto vkGetBufferMemoryRequirements2KHR = reinterpret_cast<PFN_vkGetBufferMemoryRequirements2KHR>(
+        vk::GetDeviceProcAddr(m_device->device(), "vkGetBufferMemoryRequirements2KHR"));
+    ASSERT_TRUE(vkGetBufferMemoryRequirements2KHR != nullptr);
+    VkBufferMemoryRequirementsInfo2KHR buffer_info = {VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR, nullptr,
+                                                      buffer.handle()};
+    VkMemoryRequirements2KHR buffer_reqs = {VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR};
+    vkGetBufferMemoryRequirements2KHR(m_device->device(), &buffer_info, &buffer_reqs);
+
+    // Allocate and bind buffer memory
+    vk_testing::DeviceMemory buffer_memory;
+    buffer_memory.init(*m_device, vk_testing::DeviceMemory::get_resource_alloc_info(*m_device, buffer_reqs.memoryRequirements, 0));
+    vk::BindBufferMemory(m_device->device(), buffer.handle(), buffer_memory.handle(), 0);
+
+    // Create a test image
+    auto image_ci = vk_testing::Image::create_info();
+    image_ci.imageType = VK_IMAGE_TYPE_2D;
+    image_ci.extent.width = 32;
+    image_ci.extent.height = 32;
+    image_ci.format = VK_FORMAT_R8G8B8A8_UNORM;
+    image_ci.tiling = VK_IMAGE_TILING_OPTIMAL;
+    image_ci.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT;
+    vk_testing::Image image;
+    image.init_no_mem(*m_device, image_ci);
+
+    // Use extension to get image memory requirements
+    auto vkGetImageMemoryRequirements2KHR = reinterpret_cast<PFN_vkGetImageMemoryRequirements2KHR>(
+        vk::GetDeviceProcAddr(m_device->device(), "vkGetImageMemoryRequirements2KHR"));
+    ASSERT_TRUE(vkGetImageMemoryRequirements2KHR != nullptr);
+    VkImageMemoryRequirementsInfo2KHR image_info = {VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR, nullptr,
+                                                    image.handle()};
+    VkMemoryRequirements2KHR image_reqs = {VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR};
+    vkGetImageMemoryRequirements2KHR(m_device->device(), &image_info, &image_reqs);
+
+    // Allocate and bind image memory
+    vk_testing::DeviceMemory image_memory;
+    image_memory.init(*m_device, vk_testing::DeviceMemory::get_resource_alloc_info(*m_device, image_reqs.memoryRequirements, 0));
+    vk::BindImageMemory(m_device->device(), image.handle(), image_memory.handle(), 0);
+
+    // Now execute arbitrary commands that use the test buffer and image
+    m_commandBuffer->begin();
+
+    // Fill buffer with 0
+    vk::CmdFillBuffer(m_commandBuffer->handle(), buffer.handle(), 0, VK_WHOLE_SIZE, 0);
+
+    // Transition and clear image
+    const auto subresource_range = image.subresource_range(VK_IMAGE_ASPECT_COLOR_BIT);
+    const auto barrier = image.image_memory_barrier(0, VK_ACCESS_TRANSFER_WRITE_BIT, VK_IMAGE_LAYOUT_UNDEFINED,
+                                                    VK_IMAGE_LAYOUT_GENERAL, subresource_range);
+    vk::CmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, 0,
+                           nullptr, 0, nullptr, 1, &barrier);
+    const VkClearColorValue color = {};
+    vk::CmdClearColorImage(m_commandBuffer->handle(), image.handle(), VK_IMAGE_LAYOUT_GENERAL, &color, 1, &subresource_range);
+
+    // Submit and verify no validation errors
+    m_commandBuffer->end();
+    m_commandBuffer->QueueCommandBuffer();
+    m_errorMonitor->VerifyNotFound();
+}
+
+TEST_F(VkPositiveLayerTest, BindMemory2) {
+    TEST_DESCRIPTION(
+        "Bind memory with VK_KHR_bind_memory2 instead of core entry points and verify layers do not emit errors when objects are "
+        "used");
+
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+
+    // Check for VK_KHR_get_memory_requirementes2 extensions
+    if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_BIND_MEMORY_2_EXTENSION_NAME)) {
+        m_device_extension_names.push_back(VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
+    } else {
+        printf("%s %s not supported, skipping test\n", kSkipPrefix, VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
+        return;
+    }
+
+    ASSERT_NO_FATAL_FAILURE(InitState());
+
+    m_errorMonitor->ExpectSuccess(VK_DEBUG_REPORT_ERROR_BIT_EXT | VK_DEBUG_REPORT_WARNING_BIT_EXT);
+
+    // Create a test buffer
+    VkBufferObj buffer;
+    buffer.init_no_mem(*m_device, VkBufferObj::create_info(1024, VK_BUFFER_USAGE_TRANSFER_DST_BIT));
+
+    // Allocate buffer memory
+    vk_testing::DeviceMemory buffer_memory;
+    buffer_memory.init(*m_device, vk_testing::DeviceMemory::get_resource_alloc_info(*m_device, buffer.memory_requirements(), 0));
+
+    // Bind buffer memory with extension
+    auto vkBindBufferMemory2KHR =
+        reinterpret_cast<PFN_vkBindBufferMemory2KHR>(vk::GetDeviceProcAddr(m_device->device(), "vkBindBufferMemory2KHR"));
+    ASSERT_TRUE(vkBindBufferMemory2KHR != nullptr);
+    VkBindBufferMemoryInfoKHR buffer_bind_info = {VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO_KHR, nullptr, buffer.handle(),
+                                                  buffer_memory.handle(), 0};
+    vkBindBufferMemory2KHR(m_device->device(), 1, &buffer_bind_info);
+
+    // Create a test image
+    auto image_ci = vk_testing::Image::create_info();
+    image_ci.imageType = VK_IMAGE_TYPE_2D;
+    image_ci.extent.width = 32;
+    image_ci.extent.height = 32;
+    image_ci.format = VK_FORMAT_R8G8B8A8_UNORM;
+    image_ci.tiling = VK_IMAGE_TILING_OPTIMAL;
+    image_ci.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT;
+    vk_testing::Image image;
+    image.init_no_mem(*m_device, image_ci);
+
+    // Allocate image memory
+    vk_testing::DeviceMemory image_memory;
+    image_memory.init(*m_device, vk_testing::DeviceMemory::get_resource_alloc_info(*m_device, image.memory_requirements(), 0));
+
+    // Bind image memory with extension
+    auto vkBindImageMemory2KHR =
+        reinterpret_cast<PFN_vkBindImageMemory2KHR>(vk::GetDeviceProcAddr(m_device->device(), "vkBindImageMemory2KHR"));
+    ASSERT_TRUE(vkBindImageMemory2KHR != nullptr);
+    VkBindImageMemoryInfoKHR image_bind_info = {VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO_KHR, nullptr, image.handle(),
+                                                image_memory.handle(), 0};
+    vkBindImageMemory2KHR(m_device->device(), 1, &image_bind_info);
+
+    // Now execute arbitrary commands that use the test buffer and image
+    m_commandBuffer->begin();
+
+    // Fill buffer with 0
+    vk::CmdFillBuffer(m_commandBuffer->handle(), buffer.handle(), 0, VK_WHOLE_SIZE, 0);
+
+    // Transition and clear image
+    const auto subresource_range = image.subresource_range(VK_IMAGE_ASPECT_COLOR_BIT);
+    const auto barrier = image.image_memory_barrier(0, VK_ACCESS_TRANSFER_WRITE_BIT, VK_IMAGE_LAYOUT_UNDEFINED,
+                                                    VK_IMAGE_LAYOUT_GENERAL, subresource_range);
+    vk::CmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, 0,
+                           nullptr, 0, nullptr, 1, &barrier);
+    const VkClearColorValue color = {};
+    vk::CmdClearColorImage(m_commandBuffer->handle(), image.handle(), VK_IMAGE_LAYOUT_GENERAL, &color, 1, &subresource_range);
+
+    // Submit and verify no validation errors
+    m_commandBuffer->end();
+    m_commandBuffer->QueueCommandBuffer();
+    m_errorMonitor->VerifyNotFound();
+}
+
+TEST_F(VkPositiveLayerTest, CreatePipelineWithCoreChecksDisabled) {
+    TEST_DESCRIPTION("Test CreatePipeline while the CoreChecks validation object is disabled");
+
+    // Enable KHR validation features extension
+    VkValidationFeatureDisableEXT disables[] = {VK_VALIDATION_FEATURE_DISABLE_CORE_CHECKS_EXT};
+    VkValidationFeaturesEXT features = {};
+    features.sType = VK_STRUCTURE_TYPE_VALIDATION_FEATURES_EXT;
+    features.disabledValidationFeatureCount = 1;
+    features.pDisabledValidationFeatures = disables;
+
+    VkCommandPoolCreateFlags pool_flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
+    ASSERT_NO_FATAL_FAILURE(Init(nullptr, nullptr, pool_flags, &features));
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+    VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
+    VkShaderObj fs(m_device, bindStateFragShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+    VkPipelineInputAssemblyStateCreateInfo iasci{VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO, nullptr, 0,
+                                                 VK_PRIMITIVE_TOPOLOGY_PATCH_LIST, VK_FALSE};
+
+    CreatePipelineHelper pipe(*this);
+    pipe.InitInfo();
+    pipe.gp_ci_.pInputAssemblyState = &iasci;
+    pipe.shader_stages_ = {vs.GetStageCreateInfo(), fs.GetStageCreateInfo()};
+    pipe.InitState();
+    m_errorMonitor->ExpectSuccess();
+    pipe.CreateGraphicsPipeline();
+    m_errorMonitor->VerifyNotFound();
+}
+
+TEST_F(VkPositiveLayerTest, CreatePipeineWithTessellationDomainOrigin) {
+    TEST_DESCRIPTION(
+        "Test CreatePipeline when VkPipelineTessellationStateCreateInfo.pNext include "
+        "VkPipelineTessellationDomainOriginStateCreateInfo");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    if (!m_device->phy().features().tessellationShader) {
+        printf("%s Device does not support tessellation shaders; skipped.\n", kSkipPrefix);
+        return;
+    }
+
+    VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
+    VkShaderObj tcs(m_device, bindStateTscShaderText, VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT, this);
+    VkShaderObj tes(m_device, bindStateTeshaderText, VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT, this);
+    VkShaderObj fs(m_device, bindStateFragShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+    VkPipelineInputAssemblyStateCreateInfo iasci{VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO, nullptr, 0,
+                                                 VK_PRIMITIVE_TOPOLOGY_PATCH_LIST, VK_FALSE};
+
+    VkPipelineTessellationDomainOriginStateCreateInfo tessellationDomainOriginStateInfo = {
+        VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO, VK_NULL_HANDLE,
+        VK_TESSELLATION_DOMAIN_ORIGIN_UPPER_LEFT};
+
+    VkPipelineTessellationStateCreateInfo tsci{VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO,
+                                               &tessellationDomainOriginStateInfo, 0, 3};
+
+    CreatePipelineHelper pipe(*this);
+    pipe.InitInfo();
+    pipe.gp_ci_.pTessellationState = &tsci;
+    pipe.gp_ci_.pInputAssemblyState = &iasci;
+    pipe.shader_stages_ = {vs.GetStageCreateInfo(), tcs.GetStageCreateInfo(), tes.GetStageCreateInfo(), fs.GetStageCreateInfo()};
+    pipe.InitState();
+    m_errorMonitor->ExpectSuccess();
+    pipe.CreateGraphicsPipeline();
+    m_errorMonitor->VerifyNotFound();
+}
+
+TEST_F(VkPositiveLayerTest, MultiplaneImageCopyBufferToImage) {
+    TEST_DESCRIPTION("Positive test of multiplane copy buffer to image");
+    // Enable KHR multiplane req'd extensions
+    bool mp_extensions = InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME,
+                                                    VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_SPEC_VERSION);
+    if (mp_extensions) {
+        m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    }
+    SetTargetApiVersion(VK_API_VERSION_1_1);
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+    mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_MAINTENANCE1_EXTENSION_NAME);
+    mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
+    mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
+    mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
+    if (mp_extensions) {
+        m_device_extension_names.push_back(VK_KHR_MAINTENANCE1_EXTENSION_NAME);
+        m_device_extension_names.push_back(VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
+        m_device_extension_names.push_back(VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
+        m_device_extension_names.push_back(VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
+    } else {
+        printf("%s test requires KHR multiplane extensions, not available.  Skipping.\n", kSkipPrefix);
+        return;
+    }
+    ASSERT_NO_FATAL_FAILURE(InitState(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    VkImageCreateInfo ci = {};
+    ci.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+    ci.pNext = NULL;
+    ci.flags = 0;
+    ci.imageType = VK_IMAGE_TYPE_2D;
+    ci.format = VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM_KHR;  // All planes of equal extent
+    ci.tiling = VK_IMAGE_TILING_OPTIMAL;
+    ci.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT;
+    ci.extent = {16, 16, 1};
+    ci.mipLevels = 1;
+    ci.arrayLayers = 1;
+    ci.samples = VK_SAMPLE_COUNT_1_BIT;
+    ci.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
+    ci.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+
+    VkFormatFeatureFlags features = VK_FORMAT_FEATURE_TRANSFER_SRC_BIT | VK_FORMAT_FEATURE_TRANSFER_DST_BIT;
+    bool supported = ImageFormatAndFeaturesSupported(instance(), gpu(), ci, features);
+    if (!supported) {
+        printf("%s Multiplane image format not supported.  Skipping test.\n", kSkipPrefix);
+        return;  // Assume there's low ROI on searching for different mp formats
+    }
+
+    VkImageObj image(m_device);
+    image.init(&ci);
+
+    m_commandBuffer->reset();
+    m_errorMonitor->ExpectSuccess();
+    m_commandBuffer->begin();
+    image.ImageMemoryBarrier(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, 0, VK_ACCESS_TRANSFER_WRITE_BIT,
+                             VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL);
+
+    std::array<VkImageAspectFlagBits, 3> aspects = {VK_IMAGE_ASPECT_PLANE_0_BIT, VK_IMAGE_ASPECT_PLANE_1_BIT,
+                                                    VK_IMAGE_ASPECT_PLANE_2_BIT};
+    std::array<VkBufferObj, 3> buffers;
+    VkMemoryPropertyFlags reqs = 0;
+
+    VkBufferImageCopy copy = {};
+    copy.imageSubresource.layerCount = 1;
+    copy.imageExtent.depth = 1;
+    copy.imageExtent.height = 16;
+    copy.imageExtent.width = 16;
+
+    for (size_t i = 0; i < aspects.size(); ++i) {
+        buffers[i].init_as_src(*m_device, (VkDeviceSize)16 * 16 * 1, reqs);
+        copy.imageSubresource.aspectMask = aspects[i];
+        vk::CmdCopyBufferToImage(m_commandBuffer->handle(), buffers[i].handle(), image.handle(),
+                                 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, &copy);
+    }
+    m_commandBuffer->end();
+    m_errorMonitor->VerifyNotFound();
+}
+
+TEST_F(VkPositiveLayerTest, MultiplaneImageTests) {
+    TEST_DESCRIPTION("Positive test of multiplane image operations");
+
+    // Enable KHR multiplane req'd extensions
+    bool mp_extensions = InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME,
+                                                    VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_SPEC_VERSION);
+    if (mp_extensions) {
+        m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    }
+    SetTargetApiVersion(VK_API_VERSION_1_1);
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+    mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_MAINTENANCE1_EXTENSION_NAME);
+    mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
+    mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
+    if (mp_extensions) {
+        m_device_extension_names.push_back(VK_KHR_MAINTENANCE1_EXTENSION_NAME);
+        m_device_extension_names.push_back(VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
+        m_device_extension_names.push_back(VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
+    } else {
+        printf("%s test requires KHR multiplane extensions, not available.  Skipping.\n", kSkipPrefix);
+        return;
+    }
+    ASSERT_NO_FATAL_FAILURE(InitState(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    // Create aliased function pointers for 1.0 and 1.1 contexts
+
+    PFN_vkBindImageMemory2KHR vkBindImageMemory2Function = nullptr;
+    PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2Function = nullptr;
+    PFN_vkGetPhysicalDeviceMemoryProperties2KHR vkGetPhysicalDeviceMemoryProperties2Function = nullptr;
+
+    if (DeviceValidationVersion() >= VK_API_VERSION_1_1) {
+        vkBindImageMemory2Function = vk::BindImageMemory2;
+        vkGetImageMemoryRequirements2Function = vk::GetImageMemoryRequirements2;
+        vkGetPhysicalDeviceMemoryProperties2Function = vk::GetPhysicalDeviceMemoryProperties2;
+    } else {
+        vkBindImageMemory2Function = (PFN_vkBindImageMemory2KHR)vk::GetDeviceProcAddr(m_device->handle(), "vkBindImageMemory2KHR");
+        vkGetImageMemoryRequirements2Function =
+            (PFN_vkGetImageMemoryRequirements2KHR)vk::GetDeviceProcAddr(m_device->handle(), "vkGetImageMemoryRequirements2KHR");
+        vkGetPhysicalDeviceMemoryProperties2Function = (PFN_vkGetPhysicalDeviceMemoryProperties2KHR)vk::GetDeviceProcAddr(
+            m_device->handle(), "vkGetPhysicalDeviceMemoryProperties2KHR");
+    }
+
+    if (!vkBindImageMemory2Function || !vkGetImageMemoryRequirements2Function || !vkGetPhysicalDeviceMemoryProperties2Function) {
+        printf("%s Did not find required device extension support; test skipped.\n", kSkipPrefix);
+        return;
+    }
+
+    VkImageCreateInfo ci = {};
+    ci.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+    ci.pNext = NULL;
+    ci.flags = 0;
+    ci.imageType = VK_IMAGE_TYPE_2D;
+    ci.format = VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM_KHR;  // All planes of equal extent
+    ci.tiling = VK_IMAGE_TILING_OPTIMAL;
+    ci.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT;
+    ci.extent = {128, 128, 1};
+    ci.mipLevels = 1;
+    ci.arrayLayers = 1;
+    ci.samples = VK_SAMPLE_COUNT_1_BIT;
+    ci.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
+    ci.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+
+    // Verify format
+    VkFormatFeatureFlags features = VK_FORMAT_FEATURE_TRANSFER_SRC_BIT | VK_FORMAT_FEATURE_TRANSFER_DST_BIT;
+    bool supported = ImageFormatAndFeaturesSupported(instance(), gpu(), ci, features);
+    if (!supported) {
+        printf("%s Multiplane image format not supported.  Skipping test.\n", kSkipPrefix);
+        return;  // Assume there's low ROI on searching for different mp formats
+    }
+
+    VkImage image;
+    ASSERT_VK_SUCCESS(vk::CreateImage(device(), &ci, NULL, &image));
+
+    // Allocate & bind memory
+    VkPhysicalDeviceMemoryProperties phys_mem_props;
+    vk::GetPhysicalDeviceMemoryProperties(gpu(), &phys_mem_props);
+    VkMemoryRequirements mem_reqs;
+    vk::GetImageMemoryRequirements(device(), image, &mem_reqs);
+    VkDeviceMemory mem_obj = VK_NULL_HANDLE;
+    VkMemoryPropertyFlagBits mem_props = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
+    for (uint32_t type = 0; type < phys_mem_props.memoryTypeCount; type++) {
+        if ((mem_reqs.memoryTypeBits & (1 << type)) &&
+            ((phys_mem_props.memoryTypes[type].propertyFlags & mem_props) == mem_props)) {
+            VkMemoryAllocateInfo alloc_info = {};
+            alloc_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+            alloc_info.allocationSize = mem_reqs.size;
+            alloc_info.memoryTypeIndex = type;
+            ASSERT_VK_SUCCESS(vk::AllocateMemory(device(), &alloc_info, NULL, &mem_obj));
+            break;
+        }
+    }
+
+    if (VK_NULL_HANDLE == mem_obj) {
+        printf("%s Unable to allocate image memory. Skipping test.\n", kSkipPrefix);
+        vk::DestroyImage(device(), image, NULL);
+        return;
+    }
+    ASSERT_VK_SUCCESS(vk::BindImageMemory(device(), image, mem_obj, 0));
+
+    // Copy plane 0 to plane 2
+    VkImageCopy copyRegion = {};
+    copyRegion.srcSubresource.aspectMask = VK_IMAGE_ASPECT_PLANE_0_BIT_KHR;
+    copyRegion.srcSubresource.mipLevel = 0;
+    copyRegion.srcSubresource.baseArrayLayer = 0;
+    copyRegion.srcSubresource.layerCount = 1;
+    copyRegion.srcOffset = {0, 0, 0};
+    copyRegion.dstSubresource.aspectMask = VK_IMAGE_ASPECT_PLANE_2_BIT_KHR;
+    copyRegion.dstSubresource.mipLevel = 0;
+    copyRegion.dstSubresource.baseArrayLayer = 0;
+    copyRegion.dstSubresource.layerCount = 1;
+    copyRegion.dstOffset = {0, 0, 0};
+    copyRegion.extent.width = 128;
+    copyRegion.extent.height = 128;
+    copyRegion.extent.depth = 1;
+
+    m_errorMonitor->ExpectSuccess();
+    m_commandBuffer->begin();
+    m_commandBuffer->CopyImage(image, VK_IMAGE_LAYOUT_GENERAL, image, VK_IMAGE_LAYOUT_GENERAL, 1, &copyRegion);
+    m_commandBuffer->end();
+    m_errorMonitor->VerifyNotFound();
+
+    vk::FreeMemory(device(), mem_obj, NULL);
+    vk::DestroyImage(device(), image, NULL);
+
+    // Repeat bind test on a DISJOINT multi-planar image, with per-plane memory objects, using API2 variants
+    //
+    features |= VK_FORMAT_FEATURE_DISJOINT_BIT;
+    ci.flags = VK_IMAGE_CREATE_DISJOINT_BIT;
+    if (ImageFormatAndFeaturesSupported(instance(), gpu(), ci, features)) {
+        ASSERT_VK_SUCCESS(vk::CreateImage(device(), &ci, NULL, &image));
+
+        // Allocate & bind memory
+        VkPhysicalDeviceMemoryProperties2 phys_mem_props2 = {VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2};
+        vkGetPhysicalDeviceMemoryProperties2Function(gpu(), &phys_mem_props2);
+        VkImagePlaneMemoryRequirementsInfo image_plane_req = {VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO};
+        VkImageMemoryRequirementsInfo2 mem_req_info2 = {VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2};
+        mem_req_info2.pNext = &image_plane_req;
+        mem_req_info2.image = image;
+        VkMemoryRequirements2 mem_reqs2 = {VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2};
+
+        VkDeviceMemory p0_mem, p1_mem, p2_mem;
+        mem_props = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
+        VkMemoryAllocateInfo alloc_info = {VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO};
+
+        // Plane 0
+        image_plane_req.planeAspect = VK_IMAGE_ASPECT_PLANE_0_BIT;
+        vkGetImageMemoryRequirements2Function(device(), &mem_req_info2, &mem_reqs2);
+        uint32_t mem_type = 0;
+        for (mem_type = 0; mem_type < phys_mem_props2.memoryProperties.memoryTypeCount; mem_type++) {
+            if ((mem_reqs2.memoryRequirements.memoryTypeBits & (1 << mem_type)) &&
+                ((phys_mem_props2.memoryProperties.memoryTypes[mem_type].propertyFlags & mem_props) == mem_props)) {
+                alloc_info.memoryTypeIndex = mem_type;
+                break;
+            }
+        }
+        alloc_info.allocationSize = mem_reqs2.memoryRequirements.size;
+        ASSERT_VK_SUCCESS(vk::AllocateMemory(device(), &alloc_info, NULL, &p0_mem));
+
+        // Plane 1 & 2 use same memory type
+        image_plane_req.planeAspect = VK_IMAGE_ASPECT_PLANE_1_BIT;
+        vkGetImageMemoryRequirements2Function(device(), &mem_req_info2, &mem_reqs2);
+        alloc_info.allocationSize = mem_reqs2.memoryRequirements.size;
+        ASSERT_VK_SUCCESS(vk::AllocateMemory(device(), &alloc_info, NULL, &p1_mem));
+
+        image_plane_req.planeAspect = VK_IMAGE_ASPECT_PLANE_2_BIT;
+        vkGetImageMemoryRequirements2Function(device(), &mem_req_info2, &mem_reqs2);
+        alloc_info.allocationSize = mem_reqs2.memoryRequirements.size;
+        ASSERT_VK_SUCCESS(vk::AllocateMemory(device(), &alloc_info, NULL, &p2_mem));
+
+        // Set up 3-plane binding
+        VkBindImageMemoryInfo bind_info[3];
+        for (int plane = 0; plane < 3; plane++) {
+            bind_info[plane].sType = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO;
+            bind_info[plane].pNext = nullptr;
+            bind_info[plane].image = image;
+            bind_info[plane].memoryOffset = 0;
+        }
+        bind_info[0].memory = p0_mem;
+        bind_info[1].memory = p1_mem;
+        bind_info[2].memory = p2_mem;
+
+        m_errorMonitor->ExpectSuccess();
+        vkBindImageMemory2Function(device(), 3, bind_info);
+        m_errorMonitor->VerifyNotFound();
+
+        vk::FreeMemory(device(), p0_mem, NULL);
+        vk::FreeMemory(device(), p1_mem, NULL);
+        vk::FreeMemory(device(), p2_mem, NULL);
+        vk::DestroyImage(device(), image, NULL);
+    }
+
+    // Test that changing the layout of ASPECT_COLOR also changes the layout of the individual planes
+    VkBufferObj buffer;
+    VkMemoryPropertyFlags reqs = 0;
+    buffer.init_as_src(*m_device, (VkDeviceSize)128 * 128 * 3, reqs);
+    VkImageObj mpimage(m_device);
+    mpimage.Init(256, 256, 1, VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM_KHR, VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT,
+                 VK_IMAGE_TILING_OPTIMAL, 0);
+    VkBufferImageCopy copy_region = {};
+    copy_region.bufferRowLength = 128;
+    copy_region.bufferImageHeight = 128;
+    copy_region.imageSubresource.aspectMask = VK_IMAGE_ASPECT_PLANE_1_BIT_KHR;
+    copy_region.imageSubresource.layerCount = 1;
+    copy_region.imageExtent.height = 64;
+    copy_region.imageExtent.width = 64;
+    copy_region.imageExtent.depth = 1;
+
+    vk::ResetCommandBuffer(m_commandBuffer->handle(), 0);
+    m_commandBuffer->begin();
+    mpimage.ImageMemoryBarrier(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, 0, 0, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL);
+    vk::CmdCopyBufferToImage(m_commandBuffer->handle(), buffer.handle(), mpimage.handle(), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1,
+                             &copy_region);
+    m_commandBuffer->end();
+    m_commandBuffer->QueueCommandBuffer(false);
+    m_errorMonitor->VerifyNotFound();
+
+    // Test to verify that views of multiplanar images have layouts tracked correctly
+    // by changing the image's layout then using a view of that image
+    VkImageView view;
+    VkImageViewCreateInfo ivci = {};
+    ivci.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
+    ivci.image = mpimage.handle();
+    ivci.viewType = VK_IMAGE_VIEW_TYPE_2D;
+    ivci.format = VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM_KHR;
+    ivci.subresourceRange.layerCount = 1;
+    ivci.subresourceRange.baseMipLevel = 0;
+    ivci.subresourceRange.levelCount = 1;
+    ivci.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    vk::CreateImageView(m_device->device(), &ivci, nullptr, &view);
+
+    OneOffDescriptorSet descriptor_set(m_device,
+                                       {
+                                           {0, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, 1, VK_SHADER_STAGE_FRAGMENT_BIT, nullptr},
+                                       });
+
+    VkSamplerCreateInfo sampler_ci = SafeSaneSamplerCreateInfo();
+    VkSampler sampler;
+
+    VkResult err;
+    err = vk::CreateSampler(m_device->device(), &sampler_ci, NULL, &sampler);
+    ASSERT_VK_SUCCESS(err);
+
+    const VkPipelineLayoutObj pipeline_layout(m_device, {&descriptor_set.layout_});
+    descriptor_set.WriteDescriptorImageInfo(0, view, sampler);
+    descriptor_set.UpdateDescriptorSets();
+
+    VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
+    VkShaderObj fs(m_device, bindStateFragSamplerShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+    VkPipelineObj pipe(m_device);
+    pipe.AddShader(&vs);
+    pipe.AddShader(&fs);
+    pipe.AddDefaultColorAttachment();
+    pipe.CreateVKPipeline(pipeline_layout.handle(), renderPass());
+
+    m_errorMonitor->ExpectSuccess();
+    m_commandBuffer->begin();
+    VkImageMemoryBarrier img_barrier = {};
+    img_barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
+    img_barrier.srcAccessMask = VK_ACCESS_SHADER_WRITE_BIT;
+    img_barrier.dstAccessMask = VK_ACCESS_SHADER_WRITE_BIT;
+    img_barrier.oldLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
+    img_barrier.newLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
+    img_barrier.image = mpimage.handle();
+    img_barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
+    img_barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
+    img_barrier.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    img_barrier.subresourceRange.baseArrayLayer = 0;
+    img_barrier.subresourceRange.baseMipLevel = 0;
+    img_barrier.subresourceRange.layerCount = 1;
+    img_barrier.subresourceRange.levelCount = 1;
+    vk::CmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT,
+                           VK_DEPENDENCY_BY_REGION_BIT, 0, nullptr, 0, nullptr, 1, &img_barrier);
+    m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
+    vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.handle());
+    vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_layout.handle(), 0, 1,
+                              &descriptor_set.set_, 0, nullptr);
+
+    VkViewport viewport = {0, 0, 16, 16, 0, 1};
+    VkRect2D scissor = {{0, 0}, {16, 16}};
+    vk::CmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
+    vk::CmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
+
+    m_commandBuffer->Draw(1, 0, 0, 0);
+    m_commandBuffer->EndRenderPass();
+    m_commandBuffer->end();
+    VkSubmitInfo submit_info = {};
+    submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+    submit_info.commandBufferCount = 1;
+    submit_info.pCommandBuffers = &m_commandBuffer->handle();
+    vk::QueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+    m_errorMonitor->VerifyNotFound();
+
+    vk::QueueWaitIdle(m_device->m_queue);
+    vk::DestroyImageView(m_device->device(), view, NULL);
+    vk::DestroySampler(m_device->device(), sampler, nullptr);
+}
+
+TEST_F(VkPositiveLayerTest, ApiVersionZero) {
+    TEST_DESCRIPTION("Check that apiVersion = 0 is valid.");
+    m_errorMonitor->ExpectSuccess();
+    app_info.apiVersion = 0U;
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+    m_errorMonitor->VerifyNotFound();
+}
+
+TEST_F(VkPositiveLayerTest, RayTracingPipelineNV) {
+    TEST_DESCRIPTION("Test VK_NV_ray_tracing.");
+
+    if (!CreateNVRayTracingPipelineHelper::InitInstanceExtensions(*this, m_instance_extension_names)) {
+        return;
+    }
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+
+    PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR =
+        (PFN_vkGetPhysicalDeviceFeatures2KHR)vk::GetInstanceProcAddr(instance(), "vkGetPhysicalDeviceFeatures2KHR");
+    ASSERT_TRUE(vkGetPhysicalDeviceFeatures2KHR != nullptr);
+
+    if (!CreateNVRayTracingPipelineHelper::InitDeviceExtensions(*this, m_device_extension_names)) {
+        return;
+    }
+    ASSERT_NO_FATAL_FAILURE(InitState());
+
+    auto ignore_update = [](CreateNVRayTracingPipelineHelper &helper) {};
+    CreateNVRayTracingPipelineHelper::OneshotPositiveTest(*this, ignore_update);
+}
+
+TEST_F(VkPositiveLayerTest, ViewportArray2NV) {
+    TEST_DESCRIPTION("Test to validate VK_NV_viewport_array2");
+
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+
+    VkPhysicalDeviceFeatures available_features = {};
+    ASSERT_NO_FATAL_FAILURE(GetPhysicalDeviceFeatures(&available_features));
+
+    if (!available_features.multiViewport) {
+        printf("%s VkPhysicalDeviceFeatures::multiViewport is not supported, skipping tests\n", kSkipPrefix);
+        return;
+    }
+    if (!available_features.tessellationShader) {
+        printf("%s VkPhysicalDeviceFeatures::tessellationShader is not supported, skipping tests\n", kSkipPrefix);
+        return;
+    }
+    if (!available_features.geometryShader) {
+        printf("%s VkPhysicalDeviceFeatures::geometryShader is not supported, skipping tests\n", kSkipPrefix);
+        return;
+    }
+
+    if (DeviceExtensionSupported(gpu(), nullptr, VK_NV_VIEWPORT_ARRAY2_EXTENSION_NAME)) {
+        m_device_extension_names.push_back(VK_NV_VIEWPORT_ARRAY2_EXTENSION_NAME);
+    } else {
+        printf("%s %s Extension not supported, skipping tests\n", kSkipPrefix, VK_NV_VIEWPORT_ARRAY2_EXTENSION_NAME);
+        return;
+    }
+
+    ASSERT_NO_FATAL_FAILURE(InitState());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    const char tcs_src[] = R"(
+        #version 450
+        layout(vertices = 3) out;
+
+        void main() {
+            gl_TessLevelOuter[0] = 4.0f;
+            gl_TessLevelOuter[1] = 4.0f;
+            gl_TessLevelOuter[2] = 4.0f;
+            gl_TessLevelInner[0] = 3.0f;
+
+            gl_out[gl_InvocationID].gl_Position = gl_in[gl_InvocationID].gl_Position;
+        })";
+
+    // Create tessellation control and fragment shader here since they will not be
+    // modified by the different test cases.
+    VkShaderObj tcs(m_device, tcs_src, VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT, this);
+    VkShaderObj fs(m_device, bindStateFragShaderText, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+    std::vector<VkViewport> vps = {{0.0f, 0.0f, m_width / 2.0f, m_height}, {m_width / 2.0f, 0.0f, m_width / 2.0f, m_height}};
+    std::vector<VkRect2D> scs = {
+        {{0, 0}, {static_cast<uint32_t>(m_width) / 2, static_cast<uint32_t>(m_height)}},
+        {{static_cast<int32_t>(m_width) / 2, 0}, {static_cast<uint32_t>(m_width) / 2, static_cast<uint32_t>(m_height)}}};
+
+    enum class TestStage { VERTEX = 0, TESSELLATION_EVAL = 1, GEOMETRY = 2 };
+    std::array<TestStage, 3> vertex_stages = {{TestStage::VERTEX, TestStage::TESSELLATION_EVAL, TestStage::GEOMETRY}};
+
+    // Verify that the usage of gl_ViewportMask[] in the allowed vertex processing
+    // stages does not cause any errors.
+    for (auto stage : vertex_stages) {
+        m_errorMonitor->ExpectSuccess();
+
+        VkPipelineInputAssemblyStateCreateInfo iaci = {VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO};
+        iaci.topology = (stage != TestStage::VERTEX) ? VK_PRIMITIVE_TOPOLOGY_PATCH_LIST : VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST;
+
+        VkPipelineTessellationStateCreateInfo tsci = {VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO};
+        tsci.patchControlPoints = 3;
+
+        const VkPipelineLayoutObj pl(m_device);
+
+        VkPipelineObj pipe(m_device);
+        pipe.AddDefaultColorAttachment();
+        pipe.SetInputAssembly(&iaci);
+        pipe.SetViewport(vps);
+        pipe.SetScissor(scs);
+        pipe.AddShader(&fs);
+
+        std::stringstream vs_src, tes_src, geom_src;
+
+        vs_src << R"(
+            #version 450
+            #extension GL_NV_viewport_array2 : require
+
+            vec2 positions[3] = { vec2( 0.0f, -0.5f),
+                                  vec2( 0.5f,  0.5f),
+                                  vec2(-0.5f,  0.5f)
+                                };
+            void main() {)";
+        // Write viewportMask if the vertex shader is the last vertex processing stage.
+        if (stage == TestStage::VERTEX) {
+            vs_src << "gl_ViewportMask[0] = 3;\n";
+        }
+        vs_src << R"(
+                gl_Position = vec4(positions[gl_VertexIndex % 3], 0.0, 1.0);
+            })";
+
+        VkShaderObj vs(m_device, vs_src.str().c_str(), VK_SHADER_STAGE_VERTEX_BIT, this);
+        pipe.AddShader(&vs);
+
+        std::unique_ptr<VkShaderObj> tes, geom;
+
+        if (stage >= TestStage::TESSELLATION_EVAL) {
+            tes_src << R"(
+                #version 450
+                #extension GL_NV_viewport_array2 : require
+                layout(triangles) in;
+
+                void main() {
+                   gl_Position = (gl_in[0].gl_Position * gl_TessCoord.x +
+                                  gl_in[1].gl_Position * gl_TessCoord.y +
+                                  gl_in[2].gl_Position * gl_TessCoord.z);)";
+            // Write viewportMask if the tess eval shader is the last vertex processing stage.
+            if (stage == TestStage::TESSELLATION_EVAL) {
+                tes_src << "gl_ViewportMask[0] = 3;\n";
+            }
+            tes_src << "}";
+
+            tes = std::unique_ptr<VkShaderObj>(
+                new VkShaderObj(m_device, tes_src.str().c_str(), VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT, this));
+            pipe.AddShader(tes.get());
+            pipe.AddShader(&tcs);
+            pipe.SetTessellation(&tsci);
+        }
+
+        if (stage >= TestStage::GEOMETRY) {
+            geom_src << R"(
+                #version 450
+                #extension GL_NV_viewport_array2 : require
+                layout(triangles)   in;
+                layout(triangle_strip, max_vertices = 3) out;
+
+                void main() {
+                   gl_ViewportMask[0] = 3;
+                   for(int i = 0; i < 3; ++i) {
+                       gl_Position = gl_in[i].gl_Position;
+                       EmitVertex();
+                    }
+                })";
+
+            geom =
+                std::unique_ptr<VkShaderObj>(new VkShaderObj(m_device, geom_src.str().c_str(), VK_SHADER_STAGE_GEOMETRY_BIT, this));
+            pipe.AddShader(geom.get());
+        }
+
+        pipe.CreateVKPipeline(pl.handle(), renderPass());
+        m_errorMonitor->VerifyNotFound();
+    }
+}
+
+TEST_F(VkPositiveLayerTest, HostQueryResetSuccess) {
+    // This is a positive test. No failures are expected.
+    TEST_DESCRIPTION("Use vkResetQueryPoolEXT normally");
+
+    if (!InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
+        printf("%s Did not find required instance extension %s; skipped.\n", kSkipPrefix,
+               VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+        return;
+    }
+
+    m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+
+    if (!DeviceExtensionSupported(gpu(), nullptr, VK_EXT_HOST_QUERY_RESET_EXTENSION_NAME)) {
+        printf("%s Extension %s not supported by device; skipped.\n", kSkipPrefix, VK_EXT_HOST_QUERY_RESET_EXTENSION_NAME);
+        return;
+    }
+
+    m_device_extension_names.push_back(VK_EXT_HOST_QUERY_RESET_EXTENSION_NAME);
+
+    VkPhysicalDeviceHostQueryResetFeaturesEXT host_query_reset_features{};
+    host_query_reset_features.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES_EXT;
+    host_query_reset_features.hostQueryReset = VK_TRUE;
+
+    VkPhysicalDeviceFeatures2 pd_features2{};
+    pd_features2.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2;
+    pd_features2.pNext = &host_query_reset_features;
+
+    ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &pd_features2));
+
+    auto fpvkResetQueryPoolEXT = (PFN_vkResetQueryPoolEXT)vk::GetDeviceProcAddr(m_device->device(), "vkResetQueryPoolEXT");
+
+    m_errorMonitor->ExpectSuccess();
+
+    VkQueryPool query_pool;
+    VkQueryPoolCreateInfo query_pool_create_info{};
+    query_pool_create_info.sType = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO;
+    query_pool_create_info.queryType = VK_QUERY_TYPE_TIMESTAMP;
+    query_pool_create_info.queryCount = 1;
+    vk::CreateQueryPool(m_device->device(), &query_pool_create_info, nullptr, &query_pool);
+    fpvkResetQueryPoolEXT(m_device->device(), query_pool, 0, 1);
+    vk::DestroyQueryPool(m_device->device(), query_pool, nullptr);
+
+    m_errorMonitor->VerifyNotFound();
+}
+
+TEST_F(VkPositiveLayerTest, CreatePipelineFragmentOutputNotConsumedButAlphaToCoverageEnabled) {
+    TEST_DESCRIPTION(
+        "Test that no warning is produced when writing to non-existing color attachment if alpha to coverage is enabled.");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget(0u));
+
+    VkPipelineMultisampleStateCreateInfo ms_state_ci = {};
+    ms_state_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO;
+    ms_state_ci.rasterizationSamples = VK_SAMPLE_COUNT_1_BIT;
+    ms_state_ci.alphaToCoverageEnable = VK_TRUE;
+
+    const auto set_info = [&](CreatePipelineHelper &helper) {
+        helper.pipe_ms_state_ci_ = ms_state_ci;
+        helper.cb_ci_.attachmentCount = 0;
+    };
+    CreatePipelineHelper::OneshotTest(*this, set_info, VK_DEBUG_REPORT_ERROR_BIT_EXT | VK_DEBUG_REPORT_WARNING_BIT_EXT, "", true);
+}
+
+TEST_F(VkPositiveLayerTest, CreatePipelineAttachmentUnused) {
+    TEST_DESCRIPTION("Make sure unused attachments are correctly ignored.");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    char const *fsSource =
+        "#version 450\n"
+        "\n"
+        "layout(location=0) out vec4 x;\n"
+        "void main(){\n"
+        "   x = vec4(1);\n"  // attachment is unused
+        "}\n";
+    VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+    VkAttachmentReference const color_attachments[1]{{VK_ATTACHMENT_UNUSED, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL}};
+
+    VkSubpassDescription const subpass_descriptions[1]{
+        {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 0, nullptr, 1, color_attachments, nullptr, nullptr, 0, nullptr}};
+
+    VkAttachmentDescription const attachment_descriptions[1]{{0, VK_FORMAT_B8G8R8A8_UNORM, VK_SAMPLE_COUNT_1_BIT,
+                                                              VK_ATTACHMENT_LOAD_OP_CLEAR, VK_ATTACHMENT_STORE_OP_STORE,
+                                                              VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE,
+                                                              VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL}};
+
+    VkRenderPassCreateInfo const render_pass_info{
+        VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, nullptr, 0, 1, attachment_descriptions, 1, subpass_descriptions, 0, nullptr};
+
+    VkRenderPass render_pass;
+    auto result = vk::CreateRenderPass(m_device->device(), &render_pass_info, nullptr, &render_pass);
+    ASSERT_VK_SUCCESS(result);
+
+    const auto override_info = [&](CreatePipelineHelper &helper) {
+        helper.shader_stages_ = {helper.vs_->GetStageCreateInfo(), fs.GetStageCreateInfo()};
+        helper.gp_ci_.renderPass = render_pass;
+    };
+    CreatePipelineHelper::OneshotTest(*this, override_info, VK_DEBUG_REPORT_ERROR_BIT_EXT | VK_DEBUG_REPORT_WARNING_BIT_EXT, "",
+                                      true);
+
+    vk::DestroyRenderPass(m_device->device(), render_pass, nullptr);
+}
+
+TEST_F(VkPositiveLayerTest, UseFirstQueueUnqueried) {
+    TEST_DESCRIPTION("Use first queue family and one queue without first querying with vkGetPhysicalDeviceQueueFamilyProperties");
+
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+
+    const float q_priority[] = {1.0f};
+    VkDeviceQueueCreateInfo queue_ci = {};
+    queue_ci.sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO;
+    queue_ci.queueFamilyIndex = 0;
+    queue_ci.queueCount = 1;
+    queue_ci.pQueuePriorities = q_priority;
+
+    VkDeviceCreateInfo device_ci = {};
+    device_ci.sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO;
+    device_ci.queueCreateInfoCount = 1;
+    device_ci.pQueueCreateInfos = &queue_ci;
+
+    m_errorMonitor->ExpectSuccess();
+    VkDevice test_device;
+    vk::CreateDevice(gpu(), &device_ci, nullptr, &test_device);
+    m_errorMonitor->VerifyNotFound();
+
+    vk::DestroyDevice(test_device, nullptr);
+}
+
+// Android loader returns an error in this case
+#if !defined(ANDROID)
+TEST_F(VkPositiveLayerTest, GetDevProcAddrNullPtr) {
+    TEST_DESCRIPTION("Call GetDeviceProcAddr on an enabled instance extension expecting nullptr");
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+
+    if (InstanceExtensionSupported(VK_KHR_SURFACE_EXTENSION_NAME)) {
+        m_instance_extension_names.push_back(VK_KHR_SURFACE_EXTENSION_NAME);
+    } else {
+        printf("%s %s not supported, skipping test\n", kSkipPrefix, VK_KHR_SURFACE_EXTENSION_NAME);
+        return;
+    }
+    ASSERT_NO_FATAL_FAILURE(InitState());
+
+    m_errorMonitor->ExpectSuccess();
+    auto fpDestroySurface = (PFN_vkCreateValidationCacheEXT)vk::GetDeviceProcAddr(m_device->device(), "vkDestroySurfaceKHR");
+    if (fpDestroySurface) {
+        m_errorMonitor->SetError("Null was expected!");
+    }
+    m_errorMonitor->VerifyNotFound();
+}
+
+TEST_F(VkPositiveLayerTest, GetDevProcAddrExtensions) {
+    TEST_DESCRIPTION("Call GetDeviceProcAddr with and without extension enabled");
+    SetTargetApiVersion(VK_API_VERSION_1_1);
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+    if (DeviceValidationVersion() < VK_API_VERSION_1_1) {
+        printf("%s GetDevProcAddrExtensions requires Vulkan 1.1+, skipping test\n", kSkipPrefix);
+        return;
+    }
+    ASSERT_NO_FATAL_FAILURE(InitState());
+
+    m_errorMonitor->ExpectSuccess();
+    auto vkTrimCommandPool = vk::GetDeviceProcAddr(m_device->device(), "vkTrimCommandPool");
+    auto vkTrimCommandPoolKHR = vk::GetDeviceProcAddr(m_device->device(), "vkTrimCommandPoolKHR");
+    if (nullptr == vkTrimCommandPool) m_errorMonitor->SetError("Unexpected null pointer");
+    if (nullptr != vkTrimCommandPoolKHR) m_errorMonitor->SetError("Didn't receive expected null pointer");
+
+    const char *const extension = {VK_KHR_MAINTENANCE1_EXTENSION_NAME};
+    const float q_priority[] = {1.0f};
+    VkDeviceQueueCreateInfo queue_ci = {};
+    queue_ci.sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO;
+    queue_ci.queueFamilyIndex = 0;
+    queue_ci.queueCount = 1;
+    queue_ci.pQueuePriorities = q_priority;
+
+    VkDeviceCreateInfo device_ci = {};
+    device_ci.sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO;
+    device_ci.enabledExtensionCount = 1;
+    device_ci.ppEnabledExtensionNames = &extension;
+    device_ci.queueCreateInfoCount = 1;
+    device_ci.pQueueCreateInfos = &queue_ci;
+
+    VkDevice device;
+    vk::CreateDevice(gpu(), &device_ci, NULL, &device);
+
+    vkTrimCommandPoolKHR = vk::GetDeviceProcAddr(device, "vkTrimCommandPoolKHR");
+    if (nullptr == vkTrimCommandPoolKHR) m_errorMonitor->SetError("Unexpected null pointer");
+    m_errorMonitor->VerifyNotFound();
+    vk::DestroyDevice(device, nullptr);
+}
+#endif
+
+TEST_F(VkPositiveLayerTest, CmdCopySwapchainImage) {
+    TEST_DESCRIPTION("Run vkCmdCopyImage with a swapchain image");
+
+#if defined(VK_USE_PLATFORM_ANDROID_KHR)
+    printf(
+        "%s According to valid usage, VkBindImageMemoryInfo-memory should be NULL. But Android will crash if memory is NULL, "
+        "skipping CmdCopySwapchainImage test\n",
+        kSkipPrefix);
+    return;
+#endif
+
+    SetTargetApiVersion(VK_API_VERSION_1_1);
+
+    if (!AddSurfaceInstanceExtension()) {
+        printf("%s surface extensions not supported, skipping CmdCopySwapchainImage test\n", kSkipPrefix);
+        return;
+    }
+
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+
+    if (!AddSwapchainDeviceExtension()) {
+        printf("%s swapchain extensions not supported, skipping CmdCopySwapchainImage test\n", kSkipPrefix);
+        return;
+    }
+
+    if (DeviceValidationVersion() < VK_API_VERSION_1_1) {
+        printf("%s VkBindImageMemoryInfo requires Vulkan 1.1+, skipping test\n", kSkipPrefix);
+        return;
+    }
+
+    ASSERT_NO_FATAL_FAILURE(InitState());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+    if (!InitSwapchain()) {
+        printf("%s Cannot create surface or swapchain, skipping CmdCopySwapchainImage test\n", kSkipPrefix);
+        return;
+    }
+
+    auto image_create_info = lvl_init_struct<VkImageCreateInfo>();
+    image_create_info.imageType = VK_IMAGE_TYPE_2D;
+    image_create_info.format = VK_FORMAT_R8G8B8A8_UNORM;
+    image_create_info.extent.width = 64;
+    image_create_info.extent.height = 64;
+    image_create_info.extent.depth = 1;
+    image_create_info.mipLevels = 1;
+    image_create_info.arrayLayers = 1;
+    image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
+    image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
+    image_create_info.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
+    image_create_info.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
+    image_create_info.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
+
+    VkImageObj srcImage(m_device);
+    srcImage.init(&image_create_info);
+
+    image_create_info.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT;
+
+    auto image_swapchain_create_info = lvl_init_struct<VkImageSwapchainCreateInfoKHR>();
+    image_swapchain_create_info.swapchain = m_swapchain;
+    image_create_info.pNext = &image_swapchain_create_info;
+
+    VkImage image_from_swapchain;
+    vk::CreateImage(device(), &image_create_info, NULL, &image_from_swapchain);
+
+    auto bind_swapchain_info = lvl_init_struct<VkBindImageMemorySwapchainInfoKHR>();
+    bind_swapchain_info.swapchain = m_swapchain;
+    bind_swapchain_info.imageIndex = 0;
+
+    auto bind_info = lvl_init_struct<VkBindImageMemoryInfo>(&bind_swapchain_info);
+    bind_info.image = image_from_swapchain;
+    bind_info.memory = VK_NULL_HANDLE;
+    bind_info.memoryOffset = 0;
+
+    vk::BindImageMemory2(m_device->device(), 1, &bind_info);
+
+    VkImageCopy copy_region = {};
+    copy_region.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    copy_region.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    copy_region.srcSubresource.mipLevel = 0;
+    copy_region.dstSubresource.mipLevel = 0;
+    copy_region.srcSubresource.baseArrayLayer = 0;
+    copy_region.dstSubresource.baseArrayLayer = 0;
+    copy_region.srcSubresource.layerCount = 1;
+    copy_region.dstSubresource.layerCount = 1;
+    copy_region.srcOffset = {0, 0, 0};
+    copy_region.dstOffset = {0, 0, 0};
+    copy_region.extent = {10, 10, 1};
+
+    m_commandBuffer->begin();
+
+    m_errorMonitor->ExpectSuccess();
+    vk::CmdCopyImage(m_commandBuffer->handle(), srcImage.handle(), VK_IMAGE_LAYOUT_GENERAL, image_from_swapchain,
+                     VK_IMAGE_LAYOUT_GENERAL, 1, &copy_region);
+    m_errorMonitor->VerifyNotFound();
+
+    vk::DestroyImage(m_device->device(), image_from_swapchain, NULL);
+    DestroySwapchain();
+}
+
+TEST_F(VkPositiveLayerTest, TransferImageToSwapchainDeviceGroup) {
+    TEST_DESCRIPTION("Transfer an image to a swapchain's image  between device group");
+
+#if defined(VK_USE_PLATFORM_ANDROID_KHR)
+    printf(
+        "%s According to valid usage, VkBindImageMemoryInfo-memory should be NULL. But Android will crash if memory is NULL, "
+        "skipping test\n",
+        kSkipPrefix);
+    return;
+#endif
+
+    SetTargetApiVersion(VK_API_VERSION_1_1);
+
+    if (!AddSurfaceInstanceExtension()) {
+        printf("%s surface extensions not supported, skipping test\n", kSkipPrefix);
+        return;
+    }
+
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+
+    if (!AddSwapchainDeviceExtension()) {
+        printf("%s swapchain extensions not supported, skipping test\n", kSkipPrefix);
+        return;
+    }
+
+    if (DeviceValidationVersion() < VK_API_VERSION_1_1) {
+        printf("%s VkBindImageMemoryInfo requires Vulkan 1.1+, skipping test\n", kSkipPrefix);
+        return;
+    }
+    uint32_t physical_device_group_count = 0;
+    vk::EnumeratePhysicalDeviceGroups(instance(), &physical_device_group_count, nullptr);
+
+    if (physical_device_group_count == 0) {
+        printf("%s physical_device_group_count is 0, skipping test\n", kSkipPrefix);
+        return;
+    }
+
+    std::vector<VkPhysicalDeviceGroupProperties> physical_device_group(physical_device_group_count,
+                                                                       {VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES});
+    vk::EnumeratePhysicalDeviceGroups(instance(), &physical_device_group_count, physical_device_group.data());
+    VkDeviceGroupDeviceCreateInfo create_device_pnext = {};
+    create_device_pnext.sType = VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO;
+    create_device_pnext.physicalDeviceCount = physical_device_group[0].physicalDeviceCount;
+    create_device_pnext.pPhysicalDevices = physical_device_group[0].physicalDevices;
+    ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &create_device_pnext));
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+    if (!InitSwapchain(VK_IMAGE_USAGE_TRANSFER_DST_BIT)) {
+        printf("%s Cannot create surface or swapchain, skipping test\n", kSkipPrefix);
+        return;
+    }
+
+    auto image_create_info = lvl_init_struct<VkImageCreateInfo>();
+    image_create_info.imageType = VK_IMAGE_TYPE_2D;
+    image_create_info.format = VK_FORMAT_R8G8B8A8_UNORM;
+    image_create_info.extent.width = 64;
+    image_create_info.extent.height = 64;
+    image_create_info.extent.depth = 1;
+    image_create_info.mipLevels = 1;
+    image_create_info.arrayLayers = 1;
+    image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
+    image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
+    image_create_info.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
+    image_create_info.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
+    image_create_info.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
+
+    VkImageObj src_Image(m_device);
+    src_Image.init(&image_create_info);
+
+    image_create_info.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT;
+    image_create_info.flags = VK_IMAGE_CREATE_ALIAS_BIT;
+
+    auto image_swapchain_create_info = lvl_init_struct<VkImageSwapchainCreateInfoKHR>();
+    image_swapchain_create_info.swapchain = m_swapchain;
+    image_create_info.pNext = &image_swapchain_create_info;
+
+    VkImage peer_image;
+    vk::CreateImage(device(), &image_create_info, NULL, &peer_image);
+
+    auto bind_devicegroup_info = lvl_init_struct<VkBindImageMemoryDeviceGroupInfo>();
+    bind_devicegroup_info.deviceIndexCount = 2;
+    std::array<uint32_t, 2> deviceIndices = {0, 0};
+    bind_devicegroup_info.pDeviceIndices = deviceIndices.data();
+    bind_devicegroup_info.splitInstanceBindRegionCount = 0;
+    bind_devicegroup_info.pSplitInstanceBindRegions = nullptr;
+
+    auto bind_swapchain_info = lvl_init_struct<VkBindImageMemorySwapchainInfoKHR>(&bind_devicegroup_info);
+    bind_swapchain_info.swapchain = m_swapchain;
+    bind_swapchain_info.imageIndex = 0;
+
+    auto bind_info = lvl_init_struct<VkBindImageMemoryInfo>(&bind_swapchain_info);
+    bind_info.image = peer_image;
+    bind_info.memory = VK_NULL_HANDLE;
+    bind_info.memoryOffset = 0;
+
+    vk::BindImageMemory2(m_device->device(), 1, &bind_info);
+
+    uint32_t swapchain_images_count = 0;
+    vk::GetSwapchainImagesKHR(device(), m_swapchain, &swapchain_images_count, nullptr);
+    std::vector<VkImage> swapchain_images;
+    swapchain_images.resize(swapchain_images_count);
+    vk::GetSwapchainImagesKHR(device(), m_swapchain, &swapchain_images_count, swapchain_images.data());
+
+    m_commandBuffer->begin();
+
+    auto img_barrier = lvl_init_struct<VkImageMemoryBarrier>();
+    img_barrier.oldLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+    img_barrier.newLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
+    img_barrier.image = swapchain_images[0];
+    img_barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
+    img_barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
+    img_barrier.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    img_barrier.subresourceRange.baseArrayLayer = 0;
+    img_barrier.subresourceRange.baseMipLevel = 0;
+    img_barrier.subresourceRange.layerCount = 1;
+    img_barrier.subresourceRange.levelCount = 1;
+    vk::CmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, 0, 0,
+                           nullptr, 0, nullptr, 1, &img_barrier);
+
+    VkImageCopy copy_region = {};
+    copy_region.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    copy_region.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    copy_region.srcSubresource.mipLevel = 0;
+    copy_region.dstSubresource.mipLevel = 0;
+    copy_region.srcSubresource.baseArrayLayer = 0;
+    copy_region.dstSubresource.baseArrayLayer = 0;
+    copy_region.srcSubresource.layerCount = 1;
+    copy_region.dstSubresource.layerCount = 1;
+    copy_region.srcOffset = {0, 0, 0};
+    copy_region.dstOffset = {0, 0, 0};
+    copy_region.extent = {10, 10, 1};
+    vk::CmdCopyImage(m_commandBuffer->handle(), src_Image.handle(), VK_IMAGE_LAYOUT_GENERAL, peer_image,
+                     VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, &copy_region);
+
+    m_commandBuffer->end();
+    m_errorMonitor->ExpectSuccess();
+    m_commandBuffer->QueueCommandBuffer();
+    m_errorMonitor->VerifyNotFound();
+
+    vk::DestroyImage(m_device->device(), peer_image, NULL);
+    DestroySwapchain();
+}
+
+TEST_F(VkPositiveLayerTest, RenderPassValidStages) {
+    TEST_DESCRIPTION("Create render pass with valid stages");
+
+    bool rp2_supported = InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+    if (rp2_supported) m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
+
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+    if (rp2_supported) rp2_supported = CheckCreateRenderPass2Support(this, m_device_extension_names);
+    ASSERT_NO_FATAL_FAILURE(InitState());
+
+    VkSubpassDescription sci[2] = {};
+    sci[0].pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS;
+    sci[1].pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS;
+
+    VkSubpassDependency dependency = {};
+    // to be filled later by tests
+
+    VkRenderPassCreateInfo rpci = {};
+    rpci.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
+    rpci.subpassCount = 2;
+    rpci.pSubpasses = sci;
+    rpci.dependencyCount = 1;
+    rpci.pDependencies = &dependency;
+
+    const VkPipelineStageFlags kGraphicsStages =
+        VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT | VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT | VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT |
+        VK_PIPELINE_STAGE_VERTEX_INPUT_BIT | VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT |
+        VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT | VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT |
+        VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT | VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT;
+
+    dependency.srcSubpass = 0;
+    dependency.dstSubpass = 1;
+    dependency.srcStageMask = kGraphicsStages;
+    dependency.dstStageMask = kGraphicsStages;
+    PositiveTestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2_supported);
+
+    dependency.srcSubpass = VK_SUBPASS_EXTERNAL;
+    dependency.dstSubpass = 0;
+    dependency.srcStageMask = kGraphicsStages | VK_PIPELINE_STAGE_HOST_BIT;
+    dependency.dstStageMask = kGraphicsStages;
+    PositiveTestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2_supported);
+
+    dependency.srcSubpass = 0;
+    dependency.dstSubpass = VK_SUBPASS_EXTERNAL;
+    dependency.srcStageMask = kGraphicsStages;
+    dependency.dstStageMask = VK_PIPELINE_STAGE_HOST_BIT;
+    PositiveTestRenderPassCreate(m_errorMonitor, m_device->device(), &rpci, rp2_supported);
+}
+
+TEST_F(VkPositiveLayerTest, SampleMaskOverrideCoverageNV) {
+    TEST_DESCRIPTION("Test to validate VK_NV_sample_mask_override_coverage");
+
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+
+    if (DeviceExtensionSupported(gpu(), nullptr, VK_NV_SAMPLE_MASK_OVERRIDE_COVERAGE_EXTENSION_NAME)) {
+        m_device_extension_names.push_back(VK_NV_SAMPLE_MASK_OVERRIDE_COVERAGE_EXTENSION_NAME);
+    } else {
+        printf("%s %s Extension not supported, skipping tests\n", kSkipPrefix, VK_NV_SAMPLE_MASK_OVERRIDE_COVERAGE_EXTENSION_NAME);
+        return;
+    }
+
+    ASSERT_NO_FATAL_FAILURE(InitState());
+
+    const char vs_src[] = R"(
+        #version 450
+        layout(location=0) out vec4  fragColor;
+
+        const vec2 pos[3] = { vec2( 0.0f, -0.5f),
+                              vec2( 0.5f,  0.5f),
+                              vec2(-0.5f,  0.5f)
+                            };
+        void main()
+        {
+            gl_Position = vec4(pos[gl_VertexIndex % 3], 0.0f, 1.0f);
+            fragColor = vec4(0.0f, 1.0f, 0.0f, 1.0f);
+        })";
+
+    const char fs_src[] = R"(
+        #version 450
+        #extension GL_NV_sample_mask_override_coverage : require
+
+        layout(location = 0) in  vec4 fragColor;
+        layout(location = 0) out vec4 outColor;
+
+        layout(override_coverage) out int gl_SampleMask[];
+
+        void main()
+        {
+            gl_SampleMask[0] = 0xff;
+            outColor = fragColor;
+        })";
+
+    m_errorMonitor->ExpectSuccess();
+
+    const VkSampleCountFlagBits sampleCount = VK_SAMPLE_COUNT_8_BIT;
+
+    VkAttachmentDescription cAttachment = {};
+    cAttachment.format = VK_FORMAT_B8G8R8A8_UNORM;
+    cAttachment.samples = sampleCount;
+    cAttachment.loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR;
+    cAttachment.storeOp = VK_ATTACHMENT_STORE_OP_STORE;
+    cAttachment.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
+    cAttachment.stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
+    cAttachment.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+    cAttachment.finalLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
+
+    VkAttachmentReference cAttachRef = {};
+    cAttachRef.attachment = 0;
+    cAttachRef.layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
+
+    VkSubpassDescription subpass = {};
+    subpass.pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS;
+    subpass.colorAttachmentCount = 1;
+    subpass.pColorAttachments = &cAttachRef;
+
+    VkRenderPassCreateInfo rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO};
+    rpci.attachmentCount = 1;
+    rpci.pAttachments = &cAttachment;
+    rpci.subpassCount = 1;
+    rpci.pSubpasses = &subpass;
+
+    VkRenderPass rp;
+    vk::CreateRenderPass(m_device->device(), &rpci, nullptr, &rp);
+
+    const VkPipelineLayoutObj pl(m_device);
+
+    VkSampleMask sampleMask = 0x01;
+    VkPipelineMultisampleStateCreateInfo msaa = {VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO};
+    msaa.rasterizationSamples = sampleCount;
+    msaa.sampleShadingEnable = VK_FALSE;
+    msaa.pSampleMask = &sampleMask;
+
+    VkPipelineObj pipe(m_device);
+    pipe.AddDefaultColorAttachment();
+    pipe.SetMSAA(&msaa);
+
+    VkShaderObj vs(m_device, vs_src, VK_SHADER_STAGE_VERTEX_BIT, this);
+    pipe.AddShader(&vs);
+
+    VkShaderObj fs(m_device, fs_src, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+    pipe.AddShader(&fs);
+
+    // Create pipeline and make sure that the usage of NV_sample_mask_override_coverage
+    // in the fragment shader does not cause any errors.
+    pipe.CreateVKPipeline(pl.handle(), rp);
+
+    vk::DestroyRenderPass(m_device->device(), rp, nullptr);
+
+    m_errorMonitor->VerifyNotFound();
+}
+
+TEST_F(VkPositiveLayerTest, TestRasterizationDiscardEnableTrue) {
+    TEST_DESCRIPTION("Ensure it doesn't crash and trigger error msg when rasterizerDiscardEnable = true");
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    VkAttachmentDescription att[1] = {{}};
+    att[0].format = VK_FORMAT_R8G8B8A8_UNORM;
+    att[0].samples = VK_SAMPLE_COUNT_4_BIT;
+    att[0].initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+    att[0].finalLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
+    VkAttachmentReference cr = {0, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL};
+    VkSubpassDescription sp = {};
+    sp.pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS;
+    sp.colorAttachmentCount = 1;
+    sp.pColorAttachments = &cr;
+    VkRenderPassCreateInfo rpi = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO};
+    rpi.attachmentCount = 1;
+    rpi.pAttachments = att;
+    rpi.subpassCount = 1;
+    rpi.pSubpasses = &sp;
+    VkRenderPass rp;
+    vk::CreateRenderPass(m_device->device(), &rpi, nullptr, &rp);
+
+    CreatePipelineHelper pipe(*this);
+    pipe.InitInfo();
+    pipe.gp_ci_.pViewportState = nullptr;
+    pipe.gp_ci_.pMultisampleState = nullptr;
+    pipe.gp_ci_.pDepthStencilState = nullptr;
+    pipe.gp_ci_.pColorBlendState = nullptr;
+    pipe.gp_ci_.renderPass = rp;
+
+    m_errorMonitor->ExpectSuccess();
+    // Skip the test in NexusPlayer. The driver crashes when pViewportState, pMultisampleState, pDepthStencilState, pColorBlendState
+    // are NULL.
+    pipe.rs_state_ci_.rasterizerDiscardEnable = VK_TRUE;
+    pipe.InitState();
+    pipe.CreateGraphicsPipeline();
+    m_errorMonitor->VerifyNotFound();
+    vk::DestroyRenderPass(m_device->device(), rp, nullptr);
+}
+
+TEST_F(VkPositiveLayerTest, TestSamplerDataForCombinedImageSampler) {
+    TEST_DESCRIPTION("Shader code uses sampler data for CombinedImageSampler");
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    const std::string fsSource = R"(
+                   OpCapability Shader
+                   OpMemoryModel Logical GLSL450
+                   OpEntryPoint Fragment %main "main"
+                   OpExecutionMode %main OriginUpperLeft
+
+                   OpDecorate %InputData DescriptorSet 0
+                   OpDecorate %InputData Binding 0
+                   OpDecorate %SamplerData DescriptorSet 0
+                   OpDecorate %SamplerData Binding 0
+
+               %void = OpTypeVoid
+                %f32 = OpTypeFloat 32
+              %Image = OpTypeImage %f32 2D 0 0 0 1 Rgba32f
+           %ImagePtr = OpTypePointer UniformConstant %Image
+          %InputData = OpVariable %ImagePtr UniformConstant
+            %Sampler = OpTypeSampler
+         %SamplerPtr = OpTypePointer UniformConstant %Sampler
+        %SamplerData = OpVariable %SamplerPtr UniformConstant
+       %SampledImage = OpTypeSampledImage %Image
+
+               %func = OpTypeFunction %void
+               %main = OpFunction %void None %func
+                 %40 = OpLabel
+           %call_smp = OpLoad %Sampler %SamplerData
+                   OpReturn
+                   OpFunctionEnd)";
+
+    VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+    CreatePipelineHelper pipe(*this);
+    pipe.InitInfo();
+    pipe.dsl_bindings_ = {
+        {0, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, 1, VK_SHADER_STAGE_ALL, nullptr},
+    };
+    pipe.shader_stages_ = {fs.GetStageCreateInfo(), pipe.vs_->GetStageCreateInfo()};
+    pipe.InitState();
+    pipe.CreateGraphicsPipeline();
+
+    VkImageObj image(m_device);
+    image.Init(32, 32, 1, VK_FORMAT_R8G8B8A8_UNORM, VK_IMAGE_USAGE_SAMPLED_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
+    VkImageView view = image.targetView(VK_FORMAT_R8G8B8A8_UNORM);
+
+    VkSamplerCreateInfo sampler_ci = SafeSaneSamplerCreateInfo();
+    VkSampler sampler;
+    vk::CreateSampler(m_device->device(), &sampler_ci, nullptr, &sampler);
+
+    uint32_t qfi = 0;
+    VkBufferCreateInfo buffer_create_info = {};
+    buffer_create_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
+    buffer_create_info.size = 1024;
+    buffer_create_info.usage = VK_BUFFER_USAGE_STORAGE_BUFFER_BIT;
+    buffer_create_info.queueFamilyIndexCount = 1;
+    buffer_create_info.pQueueFamilyIndices = &qfi;
+
+    VkBufferObj buffer;
+    buffer.init(*m_device, buffer_create_info);
+
+    pipe.descriptor_set_->WriteDescriptorImageInfo(0, view, sampler, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER);
+    pipe.descriptor_set_->UpdateDescriptorSets();
+
+    m_commandBuffer->begin();
+    m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
+    vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.pipeline_);
+    vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.pipeline_layout_.handle(), 0, 1,
+                              &pipe.descriptor_set_->set_, 0, NULL);
+
+    m_errorMonitor->ExpectSuccess();
+    vk::CmdDraw(m_commandBuffer->handle(), 3, 1, 0, 0);
+    m_errorMonitor->VerifyNotFound();
+
+    vk::CmdEndRenderPass(m_commandBuffer->handle());
+    m_commandBuffer->end();
+    vk::DestroySampler(m_device->device(), sampler, NULL);
+}
+
+TEST_F(VkPositiveLayerTest, NotPointSizeGeometryShaderSuccess) {
+    TEST_DESCRIPTION("Create a pipeline using TOPOLOGY_POINT_LIST, but geometry shader doesn't include PointSize.");
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    if ((!m_device->phy().features().geometryShader)) {
+        printf("%s Device does not support the required geometry shader features; skipped.\n", kSkipPrefix);
+        return;
+    }
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+    ASSERT_NO_FATAL_FAILURE(InitViewport());
+
+    VkShaderObj gs(m_device, bindStateGeomShaderText, VK_SHADER_STAGE_GEOMETRY_BIT, this);
+
+    CreatePipelineHelper pipe(*this);
+    pipe.InitInfo();
+    pipe.shader_stages_ = {pipe.vs_->GetStageCreateInfo(), gs.GetStageCreateInfo(), pipe.fs_->GetStageCreateInfo()};
+    pipe.ia_ci_.topology = VK_PRIMITIVE_TOPOLOGY_POINT_LIST;
+    pipe.InitState();
+
+    m_errorMonitor->ExpectSuccess();
+    pipe.CreateGraphicsPipeline();
+    m_errorMonitor->VerifyNotFound();
+}
+
+TEST_F(VkPositiveLayerTest, SubpassWithReadOnlyLayoutWithoutDependency) {
+    TEST_DESCRIPTION("When both subpasses' attachments are the same and layouts are read-only, they don't need dependency.");
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    auto depth_format = FindSupportedDepthStencilFormat(gpu());
+    if (!depth_format) {
+        printf("%s No Depth + Stencil format found. Skipped.\n", kSkipPrefix);
+        return;
+    }
+
+    // A renderpass with one color attachment.
+    VkAttachmentDescription attachment = {0,
+                                          depth_format,
+                                          VK_SAMPLE_COUNT_1_BIT,
+                                          VK_ATTACHMENT_LOAD_OP_DONT_CARE,
+                                          VK_ATTACHMENT_STORE_OP_STORE,
+                                          VK_ATTACHMENT_LOAD_OP_DONT_CARE,
+                                          VK_ATTACHMENT_STORE_OP_DONT_CARE,
+                                          VK_IMAGE_LAYOUT_UNDEFINED,
+                                          VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL};
+    const int size = 2;
+    std::array<VkAttachmentDescription, size> attachments = {attachment, attachment};
+
+    VkAttachmentReference att_ref_depth_stencil = {0, VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL};
+
+    std::array<VkSubpassDescription, size> subpasses;
+    subpasses[0] = {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 0, 0, 0, nullptr, nullptr, &att_ref_depth_stencil, 0, nullptr};
+    subpasses[1] = {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 0, 0, 0, nullptr, nullptr, &att_ref_depth_stencil, 0, nullptr};
+
+    VkRenderPassCreateInfo rpci = {
+        VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, nullptr, 0, size, attachments.data(), size, subpasses.data(), 0, nullptr};
+
+    VkRenderPass rp;
+    VkResult err = vk::CreateRenderPass(m_device->device(), &rpci, nullptr, &rp);
+    ASSERT_VK_SUCCESS(err);
+
+    // A compatible framebuffer.
+    VkImageObj image(m_device);
+    image.Init(32, 32, 1, depth_format, VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT, VK_IMAGE_TILING_LINEAR, 0);
+    ASSERT_TRUE(image.initialized());
+
+    VkImageViewCreateInfo ivci = {VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
+                                  nullptr,
+                                  0,
+                                  image.handle(),
+                                  VK_IMAGE_VIEW_TYPE_2D,
+                                  depth_format,
+                                  {VK_COMPONENT_SWIZZLE_IDENTITY, VK_COMPONENT_SWIZZLE_IDENTITY, VK_COMPONENT_SWIZZLE_IDENTITY,
+                                   VK_COMPONENT_SWIZZLE_IDENTITY},
+                                  {VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT, 0, 1, 0, 1}};
+
+    VkImageView view;
+    err = vk::CreateImageView(m_device->device(), &ivci, nullptr, &view);
+    ASSERT_VK_SUCCESS(err);
+    std::array<VkImageView, size> views = {view, view};
+
+    VkFramebufferCreateInfo fci = {VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, nullptr, 0, rp, size, views.data(), 32, 32, 1};
+    VkFramebuffer fb;
+    err = vk::CreateFramebuffer(m_device->device(), &fci, nullptr, &fb);
+    ASSERT_VK_SUCCESS(err);
+
+    VkRenderPassBeginInfo rpbi = {VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO, nullptr, rp, fb, {{0, 0}, {32, 32}}, 0, nullptr};
+    m_commandBuffer->begin();
+    vk::CmdBeginRenderPass(m_commandBuffer->handle(), &rpbi, VK_SUBPASS_CONTENTS_INLINE);
+    vk::CmdNextSubpass(m_commandBuffer->handle(), VK_SUBPASS_CONTENTS_INLINE);
+    vk::CmdEndRenderPass(m_commandBuffer->handle());
+    m_commandBuffer->end();
+
+    vk::DestroyFramebuffer(m_device->device(), fb, nullptr);
+    vk::DestroyRenderPass(m_device->device(), rp, nullptr);
+    vk::DestroyImageView(m_device->device(), view, nullptr);
+}
+
+TEST_F(VkPositiveLayerTest, GeometryShaderPassthroughNV) {
+    TEST_DESCRIPTION("Test to validate VK_NV_geometry_shader_passthrough");
+
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+
+    VkPhysicalDeviceFeatures available_features = {};
+    ASSERT_NO_FATAL_FAILURE(GetPhysicalDeviceFeatures(&available_features));
+
+    if (!available_features.geometryShader) {
+        printf("%s VkPhysicalDeviceFeatures::geometryShader is not supported, skipping test\n", kSkipPrefix);
+        return;
+    }
+
+    if (DeviceExtensionSupported(gpu(), nullptr, VK_NV_GEOMETRY_SHADER_PASSTHROUGH_EXTENSION_NAME)) {
+        m_device_extension_names.push_back(VK_NV_GEOMETRY_SHADER_PASSTHROUGH_EXTENSION_NAME);
+    } else {
+        printf("%s %s Extension not supported, skipping tests\n", kSkipPrefix, VK_NV_GEOMETRY_SHADER_PASSTHROUGH_EXTENSION_NAME);
+        return;
+    }
+
+    ASSERT_NO_FATAL_FAILURE(InitState());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    const char vs_src[] = R"(
+        #version 450
+
+        out gl_PerVertex {
+            vec4 gl_Position;
+        };
+
+        layout(location = 0) out ColorBlock {vec4 vertexColor;};
+
+        const vec2 positions[3] = { vec2( 0.0f, -0.5f),
+                                    vec2( 0.5f,  0.5f),
+                                    vec2(-0.5f,  0.5f)
+                                  };
+
+        const vec4 colors[3] = { vec4(1.0f, 0.0f, 0.0f, 1.0f),
+                                 vec4(0.0f, 1.0f, 0.0f, 1.0f),
+                                 vec4(0.0f, 0.0f, 1.0f, 1.0f)
+                               };
+        void main()
+        {
+            vertexColor = colors[gl_VertexIndex % 3];
+            gl_Position = vec4(positions[gl_VertexIndex % 3], 0.0, 1.0);
+        })";
+
+    const char gs_src[] = R"(
+        #version 450
+        #extension GL_NV_geometry_shader_passthrough: require
+
+        layout(triangles) in;
+        layout(triangle_strip, max_vertices = 3) out;
+
+        layout(passthrough) in gl_PerVertex {vec4 gl_Position;};
+        layout(location = 0, passthrough) in ColorBlock {vec4 vertexColor;};
+
+        void main()
+        {
+           gl_Layer = 0;
+        })";
+
+    const char fs_src[] = R"(
+        #version 450
+
+        layout(location = 0) in ColorBlock {vec4 vertexColor;};
+        layout(location = 0) out vec4 outColor;
+
+        void main() {
+            outColor = vertexColor;
+        })";
+
+    m_errorMonitor->ExpectSuccess();
+
+    const VkPipelineLayoutObj pl(m_device);
+
+    VkPipelineObj pipe(m_device);
+    pipe.AddDefaultColorAttachment();
+
+    VkShaderObj vs(m_device, vs_src, VK_SHADER_STAGE_VERTEX_BIT, this);
+    pipe.AddShader(&vs);
+
+    VkShaderObj gs(m_device, gs_src, VK_SHADER_STAGE_GEOMETRY_BIT, this);
+    pipe.AddShader(&gs);
+
+    VkShaderObj fs(m_device, fs_src, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+    pipe.AddShader(&fs);
+
+    // Create pipeline and make sure that the usage of NV_geometry_shader_passthrough
+    // in the fragment shader does not cause any errors.
+    pipe.CreateVKPipeline(pl.handle(), renderPass());
+
+    m_errorMonitor->VerifyNotFound();
+}
+
+TEST_F(VkPositiveLayerTest, SwapchainImageLayout) {
+    if (!AddSurfaceInstanceExtension()) {
+        printf("%s surface extensions not supported, skipping CmdCopySwapchainImage test\n", kSkipPrefix);
+        return;
+    }
+    ASSERT_NO_FATAL_FAILURE(InitFramework(myDbgFunc, m_errorMonitor));
+    if (!AddSwapchainDeviceExtension()) {
+        printf("%s swapchain extensions not supported, skipping CmdCopySwapchainImage test\n", kSkipPrefix);
+        return;
+    }
+    ASSERT_NO_FATAL_FAILURE(InitState());
+    if (!InitSwapchain()) {
+        printf("%s Cannot create surface or swapchain, skipping CmdCopySwapchainImage test\n", kSkipPrefix);
+        return;
+    }
+    uint32_t image_index = 0, image_count;
+    PFN_vkGetSwapchainImagesKHR fpGetSwapchainImagesKHR =
+        (PFN_vkGetSwapchainImagesKHR)vk::GetDeviceProcAddr(m_device->handle(), "vkGetSwapchainImagesKHR");
+    fpGetSwapchainImagesKHR(m_device->handle(), m_swapchain, &image_count, NULL);
+    VkImage *swapchainImages = (VkImage *)malloc(image_count * sizeof(VkImage));
+    fpGetSwapchainImagesKHR(m_device->handle(), m_swapchain, &image_count, swapchainImages);
+    VkFenceCreateInfo fenceci = {VK_STRUCTURE_TYPE_FENCE_CREATE_INFO, nullptr, 0};
+    VkFence fence;
+    VkResult ret = vk::CreateFence(m_device->device(), &fenceci, nullptr, &fence);
+    ASSERT_VK_SUCCESS(ret);
+    PFN_vkAcquireNextImageKHR fpAcquireNextImageKHR =
+        (PFN_vkAcquireNextImageKHR)vk::GetDeviceProcAddr(m_device->handle(), "vkAcquireNextImageKHR");
+    ret = fpAcquireNextImageKHR(m_device->handle(), m_swapchain, UINT64_MAX, VK_NULL_HANDLE, fence, &image_index);
+    ASSERT_VK_SUCCESS(ret);
+    VkAttachmentDescription attach[] = {
+        {0, VK_FORMAT_B8G8R8A8_UNORM, VK_SAMPLE_COUNT_1_BIT, VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE,
+         VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE, VK_IMAGE_LAYOUT_UNDEFINED,
+         VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL},
+    };
+    VkAttachmentReference att_ref = {0, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL};
+
+    VkSubpassDescription subpass = {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 0, nullptr, 1, &att_ref, nullptr, nullptr, 0, nullptr};
+    VkRenderPassCreateInfo rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, nullptr, 0, 1, attach, 1, &subpass, 0, nullptr};
+    VkRenderPass rp1, rp2;
+
+    ret = vk::CreateRenderPass(m_device->device(), &rpci, nullptr, &rp1);
+    ASSERT_VK_SUCCESS(ret);
+    attach[0].initialLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
+    ret = vk::CreateRenderPass(m_device->device(), &rpci, nullptr, &rp2);
+    VkImageViewCreateInfo ivci = {
+        VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
+        nullptr,
+        0,
+        swapchainImages[image_index],
+        VK_IMAGE_VIEW_TYPE_2D,
+        VK_FORMAT_B8G8R8A8_UNORM,
+        {VK_COMPONENT_SWIZZLE_IDENTITY, VK_COMPONENT_SWIZZLE_IDENTITY, VK_COMPONENT_SWIZZLE_IDENTITY,
+         VK_COMPONENT_SWIZZLE_IDENTITY},
+        {VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 1},
+    };
+    VkImageView view;
+    ret = vk::CreateImageView(m_device->device(), &ivci, nullptr, &view);
+    ASSERT_VK_SUCCESS(ret);
+    VkFramebufferCreateInfo fci = {VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, nullptr, 0, rp1, 1, &view, 32, 32, 1};
+    VkFramebuffer fb1, fb2;
+    ret = vk::CreateFramebuffer(m_device->device(), &fci, nullptr, &fb1);
+    fci.renderPass = rp2;
+    ret = vk::CreateFramebuffer(m_device->device(), &fci, nullptr, &fb2);
+    ASSERT_VK_SUCCESS(ret);
+    VkRenderPassBeginInfo rpbi = {VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO, nullptr, rp1, fb1, {{0, 0}, {32, 32}}, 0, nullptr};
+    m_commandBuffer->begin();
+    vk::CmdBeginRenderPass(m_commandBuffer->handle(), &rpbi, VK_SUBPASS_CONTENTS_INLINE);
+    vk::CmdEndRenderPass(m_commandBuffer->handle());
+    rpbi.framebuffer = fb2;
+    rpbi.renderPass = rp2;
+    vk::CmdBeginRenderPass(m_commandBuffer->handle(), &rpbi, VK_SUBPASS_CONTENTS_INLINE);
+    vk::CmdEndRenderPass(m_commandBuffer->handle());
+
+    VkImageMemoryBarrier img_barrier = {};
+    img_barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
+    img_barrier.srcAccessMask = 0;
+    img_barrier.dstAccessMask = 0;
+    img_barrier.oldLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
+    img_barrier.newLayout = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR;
+    img_barrier.image = swapchainImages[image_index];
+    img_barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
+    img_barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
+    img_barrier.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    img_barrier.subresourceRange.baseArrayLayer = 0;
+    img_barrier.subresourceRange.baseMipLevel = 0;
+    img_barrier.subresourceRange.layerCount = 1;
+    img_barrier.subresourceRange.levelCount = 1;
+    vk::CmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, 0, 0,
+                           nullptr, 0, nullptr, 1, &img_barrier);
+    m_commandBuffer->end();
+    VkSubmitInfo submit_info;
+    submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+    submit_info.pNext = NULL;
+    submit_info.waitSemaphoreCount = 0;
+    submit_info.pWaitSemaphores = NULL;
+    submit_info.pWaitDstStageMask = NULL;
+    submit_info.commandBufferCount = 1;
+    submit_info.pCommandBuffers = &m_commandBuffer->handle();
+    submit_info.signalSemaphoreCount = 0;
+    submit_info.pSignalSemaphores = NULL;
+    vk::WaitForFences(m_device->device(), 1, &fence, VK_TRUE, UINT64_MAX);
+    m_errorMonitor->ExpectSuccess();
+    vk::QueueSubmit(m_device->m_queue, 1, &submit_info, VK_NULL_HANDLE);
+    m_errorMonitor->VerifyNotFound();
+
+    free(swapchainImages);
+    vk::DestroyFramebuffer(m_device->device(), fb1, NULL);
+    vk::DestroyRenderPass(m_device->device(), rp1, NULL);
+    vk::DestroyFramebuffer(m_device->device(), fb2, NULL);
+    vk::DestroyRenderPass(m_device->device(), rp2, NULL);
+    vk::DestroyFence(m_device->device(), fence, NULL);
+    vk::DestroyImageView(m_device->device(), view, NULL);
+}
+
+TEST_F(VkPositiveLayerTest, PipelineStageConditionalRendering) {
+    TEST_DESCRIPTION("Create renderpass and CmdPipelineBarrier with VK_PIPELINE_STAGE_CONDITIONAL_RENDERING_BIT_EXT");
+    ASSERT_NO_FATAL_FAILURE(Init());
+
+    // A renderpass with a single subpass that declared a self-dependency
+    VkAttachmentDescription attach[] = {
+        {0, VK_FORMAT_R8G8B8A8_UNORM, VK_SAMPLE_COUNT_1_BIT, VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE,
+         VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE, VK_IMAGE_LAYOUT_UNDEFINED,
+         VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL},
+    };
+    VkAttachmentReference ref = {0, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL};
+    VkSubpassDescription subpasses[] = {
+        {0, VK_PIPELINE_BIND_POINT_GRAPHICS, 0, nullptr, 1, &ref, nullptr, nullptr, 0, nullptr},
+    };
+
+    VkSubpassDependency dependency = {0,
+                                      0,
+                                      VK_PIPELINE_STAGE_VERTEX_SHADER_BIT,
+                                      VK_PIPELINE_STAGE_CONDITIONAL_RENDERING_BIT_EXT,
+                                      VK_ACCESS_SHADER_WRITE_BIT,
+                                      VK_ACCESS_CONDITIONAL_RENDERING_READ_BIT_EXT,
+                                      (VkDependencyFlags)0};
+    VkRenderPassCreateInfo rpci = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, nullptr, 0, 1, attach, 1, subpasses, 1, &dependency};
+    VkRenderPass rp;
+
+    m_errorMonitor->ExpectSuccess();
+    vk::CreateRenderPass(m_device->device(), &rpci, nullptr, &rp);
+    m_errorMonitor->VerifyNotFound();
+
+    VkImageObj image(m_device);
+    image.Init(32, 32, 1, VK_FORMAT_R8G8B8A8_UNORM, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT);
+    VkImageView imageView = image.targetView(VK_FORMAT_R8G8B8A8_UNORM);
+
+    VkFramebufferCreateInfo fbci = {VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, nullptr, 0, rp, 1, &imageView, 32, 32, 1};
+    VkFramebuffer fb;
+    vk::CreateFramebuffer(m_device->device(), &fbci, nullptr, &fb);
+
+    m_commandBuffer->begin();
+    VkRenderPassBeginInfo rpbi = {VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO,
+                                  nullptr,
+                                  rp,
+                                  fb,
+                                  {{
+                                       0,
+                                       0,
+                                   },
+                                   {32, 32}},
+                                  0,
+                                  nullptr};
+    vk::CmdBeginRenderPass(m_commandBuffer->handle(), &rpbi, VK_SUBPASS_CONTENTS_INLINE);
+
+    VkImageMemoryBarrier imb = {};
+    imb.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
+    imb.pNext = nullptr;
+    imb.srcAccessMask = VK_ACCESS_SHADER_WRITE_BIT;
+    imb.dstAccessMask = VK_ACCESS_CONDITIONAL_RENDERING_READ_BIT_EXT;
+    imb.oldLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
+    imb.newLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
+    imb.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
+    imb.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
+    imb.image = image.handle();
+    imb.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    imb.subresourceRange.baseMipLevel = 0;
+    imb.subresourceRange.levelCount = 1;
+    imb.subresourceRange.baseArrayLayer = 0;
+    imb.subresourceRange.layerCount = 1;
+
+    m_errorMonitor->ExpectSuccess();
+    vk::CmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_VERTEX_SHADER_BIT,
+                           VK_PIPELINE_STAGE_CONDITIONAL_RENDERING_BIT_EXT, 0, 0, nullptr, 0, nullptr, 1, &imb);
+    m_errorMonitor->VerifyNotFound();
+
+    vk::CmdEndRenderPass(m_commandBuffer->handle());
+    m_commandBuffer->end();
+    vk::DestroyRenderPass(m_device->device(), rp, nullptr);
+    vk::DestroyFramebuffer(m_device->device(), fb, nullptr);
+}
+
+TEST_F(VkPositiveLayerTest, CreatePipelineOverlappingPushConstantRange) {
+    TEST_DESCRIPTION("Test overlapping push-constant ranges.");
+
+    m_errorMonitor->ExpectSuccess();
+
+    ASSERT_NO_FATAL_FAILURE(Init());
+    ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
+
+    char const *const vsSource =
+        "#version 450\n"
+        "\n"
+        "layout(push_constant, std430) uniform foo { float x[8]; } constants;\n"
+        "void main(){\n"
+        "   gl_Position = vec4(constants.x[0]);\n"
+        "}\n";
+
+    char const *const fsSource =
+        "#version 450\n"
+        "\n"
+        "layout(push_constant, std430) uniform foo { float x[4]; } constants;\n"
+        "void main(){\n"
+        "}\n";
+
+    VkShaderObj const vs(m_device, vsSource, VK_SHADER_STAGE_VERTEX_BIT, this);
+    VkShaderObj const fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
+
+    VkPushConstantRange push_constant_ranges[2]{{VK_SHADER_STAGE_VERTEX_BIT, 0, sizeof(float) * 8},
+                                                {VK_SHADER_STAGE_FRAGMENT_BIT, 0, sizeof(float) * 4}};
+
+    VkPipelineLayoutCreateInfo const pipeline_layout_info{
+        VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO, nullptr, 0, 0, nullptr, 2, push_constant_ranges};
+
+    CreatePipelineHelper pipe(*this);
+    pipe.InitInfo();
+    pipe.shader_stages_ = {vs.GetStageCreateInfo(), fs.GetStageCreateInfo()};
+    pipe.pipeline_layout_ci_ = pipeline_layout_info;
+    pipe.InitState();
+
+    pipe.CreateGraphicsPipeline();
+
+    m_errorMonitor->VerifyNotFound();
+}
diff --git a/src/third_party/vulkan-validation-layers/src/tests/vkrenderframework.cpp b/src/third_party/vulkan-validation-layers/src/tests/vkrenderframework.cpp
new file mode 100644
index 0000000..7c6ef0e
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/tests/vkrenderframework.cpp
@@ -0,0 +1,2097 @@
+/*
+ * Copyright (c) 2015-2019 The Khronos Group Inc.
+ * Copyright (c) 2015-2019 Valve Corporation
+ * Copyright (c) 2015-2019 LunarG, Inc.
+ * Copyright (c) 2015-2019 Google, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Courtney Goeltzenleuchter <courtney@LunarG.com>
+ * Author: Tony Barbour <tony@LunarG.com>
+ * Author: Dave Houlton <daveh@lunarg.com>
+ */
+
+#include "vkrenderframework.h"
+#include "vk_format_utils.h"
+
+#define ARRAY_SIZE(a) (sizeof(a) / sizeof(a[0]))
+
+ErrorMonitor::ErrorMonitor() {
+    test_platform_thread_create_mutex(&mutex_);
+    test_platform_thread_lock_mutex(&mutex_);
+    Reset();
+    test_platform_thread_unlock_mutex(&mutex_);
+}
+
+ErrorMonitor::~ErrorMonitor() { test_platform_thread_delete_mutex(&mutex_); }
+
+void ErrorMonitor::Reset() {
+    message_flags_ = 0;
+    bailout_ = NULL;
+    message_found_ = VK_FALSE;
+    failure_message_strings_.clear();
+    desired_message_strings_.clear();
+    ignore_message_strings_.clear();
+    allowed_message_strings_.clear();
+    other_messages_.clear();
+}
+
+void ErrorMonitor::SetDesiredFailureMsg(const VkFlags msgFlags, const std::string msg) {
+    SetDesiredFailureMsg(msgFlags, msg.c_str());
+}
+
+void ErrorMonitor::SetDesiredFailureMsg(const VkFlags msgFlags, const char *const msgString) {
+    test_platform_thread_lock_mutex(&mutex_);
+    desired_message_strings_.insert(msgString);
+    message_flags_ |= msgFlags;
+    test_platform_thread_unlock_mutex(&mutex_);
+}
+
+void ErrorMonitor::SetAllowedFailureMsg(const char *const msg) {
+    test_platform_thread_lock_mutex(&mutex_);
+    allowed_message_strings_.emplace_back(msg);
+    test_platform_thread_unlock_mutex(&mutex_);
+}
+
+void ErrorMonitor::SetUnexpectedError(const char *const msg) {
+    test_platform_thread_lock_mutex(&mutex_);
+
+    ignore_message_strings_.emplace_back(msg);
+
+    test_platform_thread_unlock_mutex(&mutex_);
+}
+
+VkBool32 ErrorMonitor::CheckForDesiredMsg(const char *const msgString) {
+    VkBool32 result = VK_FALSE;
+    test_platform_thread_lock_mutex(&mutex_);
+    if (bailout_ != nullptr) {
+        *bailout_ = true;
+    }
+    string errorString(msgString);
+    bool found_expected = false;
+
+    if (!IgnoreMessage(errorString)) {
+        for (auto desired_msg_it = desired_message_strings_.begin(); desired_msg_it != desired_message_strings_.end();
+             ++desired_msg_it) {
+            if ((*desired_msg_it).length() == 0) {
+                // An empty desired_msg string "" indicates a positive test - not expecting an error.
+                // Return true to avoid calling layers/driver with this error.
+                // And don't erase the "" string, so it remains if another error is found.
+                result = VK_TRUE;
+                found_expected = true;
+                message_found_ = true;
+                failure_message_strings_.insert(errorString);
+            } else if (errorString.find(*desired_msg_it) != string::npos) {
+                found_expected = true;
+                failure_message_strings_.insert(errorString);
+                message_found_ = true;
+                result = VK_TRUE;
+                // Remove a maximum of one failure message from the set
+                // Multiset mutation is acceptable because `break` causes flow of control to exit the for loop
+                desired_message_strings_.erase(desired_msg_it);
+                break;
+            }
+        }
+
+        if (!found_expected && allowed_message_strings_.size()) {
+            for (auto allowed_msg_it = allowed_message_strings_.begin(); allowed_msg_it != allowed_message_strings_.end();
+                 ++allowed_msg_it) {
+                if (errorString.find(*allowed_msg_it) != string::npos) {
+                    found_expected = true;
+                    break;
+                }
+            }
+        }
+
+        if (!found_expected) {
+            printf("Unexpected: %s\n", msgString);
+            other_messages_.push_back(errorString);
+        }
+    }
+
+    test_platform_thread_unlock_mutex(&mutex_);
+    return result;
+}
+
+vector<string> ErrorMonitor::GetOtherFailureMsgs() const { return other_messages_; }
+
+VkDebugReportFlagsEXT ErrorMonitor::GetMessageFlags() const { return message_flags_; }
+
+bool ErrorMonitor::AnyDesiredMsgFound() const { return message_found_; }
+
+bool ErrorMonitor::AllDesiredMsgsFound() const { return desired_message_strings_.empty(); }
+
+void ErrorMonitor::SetError(const char *const errorString) {
+    message_found_ = true;
+    failure_message_strings_.insert(errorString);
+}
+
+void ErrorMonitor::SetBailout(bool *bailout) { bailout_ = bailout; }
+
+void ErrorMonitor::DumpFailureMsgs() const {
+    vector<string> otherMsgs = GetOtherFailureMsgs();
+    if (otherMsgs.size()) {
+        cout << "Other error messages logged for this test were:" << endl;
+        for (auto iter = otherMsgs.begin(); iter != otherMsgs.end(); iter++) {
+            cout << "     " << *iter << endl;
+        }
+    }
+}
+
+void ErrorMonitor::ExpectSuccess(VkDebugReportFlagsEXT const message_flag_mask) {
+    // Match ANY message matching specified type
+    SetDesiredFailureMsg(message_flag_mask, "");
+    message_flags_ = message_flag_mask;  // override mask handling in SetDesired...
+}
+
+void ErrorMonitor::VerifyFound() {
+    // Not receiving expected message(s) is a failure. /Before/ throwing, dump any other messages
+    if (!AllDesiredMsgsFound()) {
+        DumpFailureMsgs();
+        for (const auto desired_msg : desired_message_strings_) {
+            ADD_FAILURE() << "Did not receive expected error '" << desired_msg << "'";
+        }
+    } else if (GetOtherFailureMsgs().size() > 0) {
+        // Fail test case for any unexpected errors
+#if defined(ANDROID)
+        // This will get unexpected errors into the adb log
+        for (auto msg : other_messages_) {
+            __android_log_print(ANDROID_LOG_INFO, "VulkanLayerValidationTests", "[ UNEXPECTED_ERR ] '%s'", msg.c_str());
+        }
+#else
+        ADD_FAILURE() << "Received unexpected error(s).";
+#endif
+    }
+    Reset();
+}
+
+void ErrorMonitor::VerifyNotFound() {
+    // ExpectSuccess() configured us to match anything. Any error is a failure.
+    if (AnyDesiredMsgFound()) {
+        DumpFailureMsgs();
+        for (const auto msg : failure_message_strings_) {
+            ADD_FAILURE() << "Expected to succeed but got error: " << msg;
+        }
+    } else if (GetOtherFailureMsgs().size() > 0) {
+        // Fail test case for any unexpected errors
+#if defined(ANDROID)
+        // This will get unexpected errors into the adb log
+        for (auto msg : other_messages_) {
+            __android_log_print(ANDROID_LOG_INFO, "VulkanLayerValidationTests", "[ UNEXPECTED_ERR ] '%s'", msg.c_str());
+        }
+#else
+        ADD_FAILURE() << "Received unexpected error(s).";
+#endif
+    }
+    Reset();
+}
+
+bool ErrorMonitor::IgnoreMessage(std::string const &msg) const {
+    if (ignore_message_strings_.empty()) {
+        return false;
+    }
+
+    return std::find_if(ignore_message_strings_.begin(), ignore_message_strings_.end(), [&msg](std::string const &str) {
+               return msg.find(str) != std::string::npos;
+           }) != ignore_message_strings_.end();
+}
+
+VkRenderFramework::VkRenderFramework()
+    : inst(VK_NULL_HANDLE),
+      m_device(NULL),
+      m_commandPool(VK_NULL_HANDLE),
+      m_commandBuffer(NULL),
+      m_renderPass(VK_NULL_HANDLE),
+      m_framebuffer(VK_NULL_HANDLE),
+      m_surface(VK_NULL_HANDLE),
+      m_swapchain(VK_NULL_HANDLE),
+      m_addRenderPassSelfDependency(false),
+      m_width(256.0),   // default window width
+      m_height(256.0),  // default window height
+      m_render_target_fmt(VK_FORMAT_R8G8B8A8_UNORM),
+      m_depth_stencil_fmt(VK_FORMAT_UNDEFINED),
+      m_clear_via_load_op(true),
+      m_depth_clear_color(1.0),
+      m_stencil_clear_color(0),
+      m_depthStencil(NULL),
+      m_CreateDebugReportCallback(VK_NULL_HANDLE),
+      m_DestroyDebugReportCallback(VK_NULL_HANDLE),
+      m_globalMsgCallback(VK_NULL_HANDLE),
+      m_devMsgCallback(VK_NULL_HANDLE) {
+    memset(&m_renderPassBeginInfo, 0, sizeof(m_renderPassBeginInfo));
+    m_renderPassBeginInfo.sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO;
+
+    m_errorMonitor = new ErrorMonitor;
+
+    // clear the back buffer to dark grey
+    m_clear_color.float32[0] = 0.25f;
+    m_clear_color.float32[1] = 0.25f;
+    m_clear_color.float32[2] = 0.25f;
+    m_clear_color.float32[3] = 0.0f;
+}
+
+VkRenderFramework::~VkRenderFramework() { ShutdownFramework(); }
+
+VkPhysicalDevice VkRenderFramework::gpu() {
+    EXPECT_NE((VkInstance)0, inst);  // Invalid to request gpu before instance exists
+    return objs[0];
+}
+
+// Return true if layer name is found and spec+implementation values are >= requested values
+bool VkRenderFramework::InstanceLayerSupported(const char *name, uint32_t spec, uint32_t implementation) {
+    uint32_t layer_count = 0;
+    std::vector<VkLayerProperties> layer_props;
+
+    VkResult res = vk::EnumerateInstanceLayerProperties(&layer_count, NULL);
+    if (VK_SUCCESS != res) return false;
+    if (0 == layer_count) return false;
+
+    layer_props.resize(layer_count);
+    res = vk::EnumerateInstanceLayerProperties(&layer_count, layer_props.data());
+    if (VK_SUCCESS != res) return false;
+
+    for (auto &it : layer_props) {
+        if (0 == strncmp(name, it.layerName, VK_MAX_EXTENSION_NAME_SIZE)) {
+            return ((it.specVersion >= spec) && (it.implementationVersion >= implementation));
+        }
+    }
+    return false;
+}
+
+// Enable device profile as last layer on stack overriding devsim if there, or return if not available
+bool VkRenderFramework::EnableDeviceProfileLayer() {
+    if (InstanceLayerSupported("VK_LAYER_LUNARG_device_profile_api")) {
+        if (VkTestFramework::m_devsim_layer) {
+            assert(0 == strcmp(m_instance_layer_names.back(), "VK_LAYER_LUNARG_device_simulation"));
+            m_instance_layer_names.pop_back();
+            m_instance_layer_names.push_back("VK_LAYER_LUNARG_device_profile_api");
+        } else {
+            m_instance_layer_names.push_back("VK_LAYER_LUNARG_device_profile_api");
+        }
+    } else {
+        printf("             Did not find VK_LAYER_LUNARG_device_profile_api layer; skipped.\n");
+        return false;
+    }
+    return true;
+}
+
+// Return true if extension name is found and spec value is >= requested spec value
+bool VkRenderFramework::InstanceExtensionSupported(const char *ext_name, uint32_t spec) {
+    uint32_t ext_count = 0;
+    std::vector<VkExtensionProperties> ext_props;
+    VkResult res = vk::EnumerateInstanceExtensionProperties(nullptr, &ext_count, nullptr);
+    if (VK_SUCCESS != res) return false;
+    if (0 == ext_count) return false;
+
+    ext_props.resize(ext_count);
+    res = vk::EnumerateInstanceExtensionProperties(nullptr, &ext_count, ext_props.data());
+    if (VK_SUCCESS != res) return false;
+
+    for (auto &it : ext_props) {
+        if (0 == strncmp(ext_name, it.extensionName, VK_MAX_EXTENSION_NAME_SIZE)) {
+            return (it.specVersion >= spec);
+        }
+    }
+    return false;
+}
+
+// Return true if instance exists and extension name is in the list
+bool VkRenderFramework::InstanceExtensionEnabled(const char *ext_name) {
+    if (!inst) return false;
+
+    bool ext_found = false;
+    for (auto ext : m_instance_extension_names) {
+        if (!strcmp(ext, ext_name)) {
+            ext_found = true;
+            break;
+        }
+    }
+    return ext_found;
+}
+
+// Return true if extension name is found and spec value is >= requested spec value
+bool VkRenderFramework::DeviceExtensionSupported(VkPhysicalDevice dev, const char *layer, const char *ext_name, uint32_t spec) {
+    if (!inst) {
+        EXPECT_NE((VkInstance)0, inst);  // Complain, not cool without an instance
+        return false;
+    }
+    uint32_t ext_count = 0;
+    std::vector<VkExtensionProperties> ext_props;
+    VkResult res = vk::EnumerateDeviceExtensionProperties(dev, layer, &ext_count, nullptr);
+    if (VK_SUCCESS != res) return false;
+    if (0 == ext_count) return false;
+
+    ext_props.resize(ext_count);
+    res = vk::EnumerateDeviceExtensionProperties(dev, layer, &ext_count, ext_props.data());
+    if (VK_SUCCESS != res) return false;
+
+    for (auto &it : ext_props) {
+        if (0 == strncmp(ext_name, it.extensionName, VK_MAX_EXTENSION_NAME_SIZE)) {
+            return (it.specVersion >= spec);
+        }
+    }
+    return false;
+}
+
+// Return true if device is created and extension name is found in the list
+bool VkRenderFramework::DeviceExtensionEnabled(const char *ext_name) {
+    if (NULL == m_device) return false;
+
+    bool ext_found = false;
+    for (auto ext : m_device_extension_names) {
+        if (!strcmp(ext, ext_name)) {
+            ext_found = true;
+            break;
+        }
+    }
+    return ext_found;
+}
+
+// WARNING:  The DevSim layer can override the properties that are tested here, making the result of
+// this function dubious when DevSim is active.
+bool VkRenderFramework::DeviceIsMockICD() {
+    VkPhysicalDeviceProperties props = vk_testing::PhysicalDevice(gpu()).properties();
+    if ((props.vendorID == 0xba5eba11) && (props.deviceID == 0xf005ba11) && (0 == strcmp("Vulkan Mock Device", props.deviceName))) {
+        return true;
+    }
+    return false;
+}
+
+// Some tests may need to be skipped if the devsim layer is in use.
+bool VkRenderFramework::DeviceSimulation() { return m_devsim_layer; }
+
+void VkRenderFramework::InitFramework(PFN_vkDebugReportCallbackEXT dbgFunction, void *userData, void *instance_pnext) {
+    // Only enable device profile layer by default if devsim is not enabled
+    if (!VkTestFramework::m_devsim_layer && InstanceLayerSupported("VK_LAYER_LUNARG_device_profile_api")) {
+        m_instance_layer_names.push_back("VK_LAYER_LUNARG_device_profile_api");
+    }
+
+    // Assert not already initialized
+    ASSERT_EQ((VkInstance)0, inst);
+
+    // Remove any unsupported layer names from list
+    for (auto layer = m_instance_layer_names.begin(); layer != m_instance_layer_names.end();) {
+        if (!InstanceLayerSupported(*layer)) {
+            ADD_FAILURE() << "InitFramework(): Requested layer " << *layer << " was not found. Disabled.";
+            layer = m_instance_layer_names.erase(layer);
+        } else {
+            ++layer;
+        }
+    }
+
+    // Remove any unsupported instance extension names from list
+    for (auto ext = m_instance_extension_names.begin(); ext != m_instance_extension_names.end();) {
+        if (!InstanceExtensionSupported(*ext)) {
+            ADD_FAILURE() << "InitFramework(): Requested extension " << *ext << " was not found. Disabled.";
+            ext = m_instance_extension_names.erase(ext);
+        } else {
+            ++ext;
+        }
+    }
+
+    VkInstanceCreateInfo instInfo = {};
+    instInfo.sType = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO;
+    instInfo.pNext = instance_pnext;
+    instInfo.pApplicationInfo = &app_info;
+    instInfo.enabledLayerCount = m_instance_layer_names.size();
+    instInfo.ppEnabledLayerNames = m_instance_layer_names.data();
+    instInfo.enabledExtensionCount = m_instance_extension_names.size();
+    instInfo.ppEnabledExtensionNames = m_instance_extension_names.data();
+
+    VkDebugReportCallbackCreateInfoEXT dbgCreateInfo;
+    if (dbgFunction) {
+        // Enable create time debug messages
+        memset(&dbgCreateInfo, 0, sizeof(dbgCreateInfo));
+        dbgCreateInfo.sType = VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT;
+        dbgCreateInfo.flags =
+            VK_DEBUG_REPORT_ERROR_BIT_EXT | VK_DEBUG_REPORT_WARNING_BIT_EXT | VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT;
+        dbgCreateInfo.pfnCallback = dbgFunction;
+        dbgCreateInfo.pUserData = userData;
+
+        dbgCreateInfo.pNext = instInfo.pNext;
+        instInfo.pNext = &dbgCreateInfo;
+    }
+
+    VkResult err;
+
+    err = vk::CreateInstance(&instInfo, NULL, &this->inst);
+    ASSERT_VK_SUCCESS(err);
+
+    err = vk::EnumeratePhysicalDevices(inst, &this->gpu_count, NULL);
+    ASSERT_LE(this->gpu_count, ARRAY_SIZE(objs)) << "Too many gpus";
+    ASSERT_VK_SUCCESS(err);
+    err = vk::EnumeratePhysicalDevices(inst, &this->gpu_count, objs);
+    ASSERT_VK_SUCCESS(err);
+    ASSERT_GE(this->gpu_count, (uint32_t)1) << "No GPU available";
+
+    if (dbgFunction) {
+        m_CreateDebugReportCallback =
+            (PFN_vkCreateDebugReportCallbackEXT)vk::GetInstanceProcAddr(this->inst, "vkCreateDebugReportCallbackEXT");
+        ASSERT_NE(m_CreateDebugReportCallback, (PFN_vkCreateDebugReportCallbackEXT)NULL)
+            << "Did not get function pointer for CreateDebugReportCallback";
+        if (m_CreateDebugReportCallback) {
+            dbgCreateInfo.pNext = nullptr;  // clean up from usage in CreateInstance above
+            err = m_CreateDebugReportCallback(this->inst, &dbgCreateInfo, NULL, &m_globalMsgCallback);
+            ASSERT_VK_SUCCESS(err);
+
+            m_DestroyDebugReportCallback =
+                (PFN_vkDestroyDebugReportCallbackEXT)vk::GetInstanceProcAddr(this->inst, "vkDestroyDebugReportCallbackEXT");
+            ASSERT_NE(m_DestroyDebugReportCallback, (PFN_vkDestroyDebugReportCallbackEXT)NULL)
+                << "Did not get function pointer for DestroyDebugReportCallback";
+            m_DebugReportMessage = (PFN_vkDebugReportMessageEXT)vk::GetInstanceProcAddr(this->inst, "vkDebugReportMessageEXT");
+            ASSERT_NE(m_DebugReportMessage, (PFN_vkDebugReportMessageEXT)NULL)
+                << "Did not get function pointer for DebugReportMessage";
+        }
+    }
+}
+
+void VkRenderFramework::ShutdownFramework() {
+    // Nothing to shut down without a VkInstance
+    if (!this->inst) return;
+
+    delete m_commandBuffer;
+    m_commandBuffer = nullptr;
+    delete m_commandPool;
+    m_commandPool = nullptr;
+    if (m_framebuffer) vk::DestroyFramebuffer(device(), m_framebuffer, NULL);
+    m_framebuffer = VK_NULL_HANDLE;
+    if (m_renderPass) vk::DestroyRenderPass(device(), m_renderPass, NULL);
+    m_renderPass = VK_NULL_HANDLE;
+
+    if (m_globalMsgCallback) m_DestroyDebugReportCallback(this->inst, m_globalMsgCallback, NULL);
+    m_globalMsgCallback = VK_NULL_HANDLE;
+    if (m_devMsgCallback) m_DestroyDebugReportCallback(this->inst, m_devMsgCallback, NULL);
+    m_devMsgCallback = VK_NULL_HANDLE;
+
+    m_renderTargets.clear();
+
+    delete m_depthStencil;
+    m_depthStencil = nullptr;
+
+    // reset the driver
+    delete m_device;
+    m_device = nullptr;
+
+    if (this->inst) vk::DestroyInstance(this->inst, NULL);
+    delete m_errorMonitor;
+    this->inst = (VkInstance)0;  // In case we want to re-initialize
+}
+
+ErrorMonitor *VkRenderFramework::Monitor() { return m_errorMonitor; }
+
+void VkRenderFramework::GetPhysicalDeviceFeatures(VkPhysicalDeviceFeatures *features) {
+    if (NULL == m_device) {
+        VkDeviceObj *temp_device = new VkDeviceObj(0, objs[0], m_device_extension_names);
+        *features = temp_device->phy().features();
+        delete (temp_device);
+    } else {
+        *features = m_device->phy().features();
+    }
+}
+
+void VkRenderFramework::GetPhysicalDeviceProperties(VkPhysicalDeviceProperties *props) {
+    *props = vk_testing::PhysicalDevice(gpu()).properties();
+}
+
+void VkRenderFramework::InitState(VkPhysicalDeviceFeatures *features, void *create_device_pnext,
+                                  const VkCommandPoolCreateFlags flags) {
+    // Remove any unsupported device extension names from list
+    for (auto ext = m_device_extension_names.begin(); ext != m_device_extension_names.end();) {
+        if (!DeviceExtensionSupported(objs[0], nullptr, *ext)) {
+            bool found = false;
+            for (auto layer = m_instance_layer_names.begin(); layer != m_instance_layer_names.end(); ++layer) {
+                if (DeviceExtensionSupported(objs[0], *layer, *ext)) {
+                    found = true;
+                    break;
+                }
+            }
+            if (!found) {
+                ADD_FAILURE() << "InitState(): The requested device extension " << *ext << " was not found. Disabled.";
+                ext = m_device_extension_names.erase(ext);
+            } else {
+                ++ext;
+            }
+        } else {
+            ++ext;
+        }
+    }
+
+    m_device = new VkDeviceObj(0, objs[0], m_device_extension_names, features, create_device_pnext);
+    m_device->SetDeviceQueue();
+
+    m_depthStencil = new VkDepthStencilObj(m_device);
+
+    m_render_target_fmt = VkTestFramework::GetFormat(inst, m_device);
+
+    m_lineWidth = 1.0f;
+
+    m_depthBiasConstantFactor = 0.0f;
+    m_depthBiasClamp = 0.0f;
+    m_depthBiasSlopeFactor = 0.0f;
+
+    m_blendConstants[0] = 1.0f;
+    m_blendConstants[1] = 1.0f;
+    m_blendConstants[2] = 1.0f;
+    m_blendConstants[3] = 1.0f;
+
+    m_minDepthBounds = 0.f;
+    m_maxDepthBounds = 1.f;
+
+    m_compareMask = 0xff;
+    m_writeMask = 0xff;
+    m_reference = 0;
+
+    m_commandPool = new VkCommandPoolObj(m_device, m_device->graphics_queue_node_index_, flags);
+
+    m_commandBuffer = new VkCommandBufferObj(m_device, m_commandPool);
+}
+
+void VkRenderFramework::InitViewport(float width, float height) {
+    VkViewport viewport;
+    VkRect2D scissor;
+    viewport.x = 0;
+    viewport.y = 0;
+    viewport.width = 1.f * width;
+    viewport.height = 1.f * height;
+    viewport.minDepth = 0.f;
+    viewport.maxDepth = 1.f;
+    m_viewports.push_back(viewport);
+
+    scissor.extent.width = (int32_t)width;
+    scissor.extent.height = (int32_t)height;
+    scissor.offset.x = 0;
+    scissor.offset.y = 0;
+    m_scissors.push_back(scissor);
+
+    m_width = width;
+    m_height = height;
+}
+
+void VkRenderFramework::InitViewport() { InitViewport(m_width, m_height); }
+
+bool VkRenderFramework::InitSurface() { return InitSurface(m_width, m_height); }
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+LRESULT CALLBACK WindowProc(HWND hwnd, UINT uMsg, WPARAM wParam, LPARAM lParam) {
+    return DefWindowProc(hwnd, uMsg, wParam, lParam);
+}
+#endif  // VK_USE_PLATFORM_WIN32_KHR
+
+bool VkRenderFramework::InitSurface(float width, float height) {
+#if defined(VK_USE_PLATFORM_WIN32_KHR)
+    HINSTANCE window_instance = GetModuleHandle(nullptr);
+    const char class_name[] = "test";
+    WNDCLASS wc = {};
+    wc.lpfnWndProc = WindowProc;
+    wc.hInstance = window_instance;
+    wc.lpszClassName = class_name;
+    RegisterClass(&wc);
+    HWND window = CreateWindowEx(0, class_name, 0, 0, 0, 0, (int)m_width, (int)m_height, NULL, NULL, window_instance, NULL);
+    ShowWindow(window, SW_HIDE);
+
+    VkWin32SurfaceCreateInfoKHR surface_create_info = {};
+    surface_create_info.sType = VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR;
+    surface_create_info.hinstance = window_instance;
+    surface_create_info.hwnd = window;
+    VkResult err = vk::CreateWin32SurfaceKHR(instance(), &surface_create_info, nullptr, &m_surface);
+    if (err != VK_SUCCESS) return false;
+#endif
+
+#if defined(VK_USE_PLATFORM_ANDROID_KHR) && defined(VALIDATION_APK)
+    VkAndroidSurfaceCreateInfoKHR surface_create_info = {};
+    surface_create_info.sType = VK_STRUCTURE_TYPE_ANDROID_SURFACE_CREATE_INFO_KHR;
+    surface_create_info.window = VkTestFramework::window;
+    VkResult err = vk::CreateAndroidSurfaceKHR(instance(), &surface_create_info, nullptr, &m_surface);
+    if (err != VK_SUCCESS) return false;
+#endif
+
+#if defined(VK_USE_PLATFORM_XLIB_KHR)
+    Display *dpy = XOpenDisplay(NULL);
+    if (dpy) {
+        int s = DefaultScreen(dpy);
+        Window window = XCreateSimpleWindow(dpy, RootWindow(dpy, s), 0, 0, (int)m_width, (int)m_height, 1, BlackPixel(dpy, s),
+                                            WhitePixel(dpy, s));
+        VkXlibSurfaceCreateInfoKHR surface_create_info = {};
+        surface_create_info.sType = VK_STRUCTURE_TYPE_XLIB_SURFACE_CREATE_INFO_KHR;
+        surface_create_info.dpy = dpy;
+        surface_create_info.window = window;
+        VkResult err = vk::CreateXlibSurfaceKHR(instance(), &surface_create_info, nullptr, &m_surface);
+        if (err != VK_SUCCESS) return false;
+    }
+#endif
+
+#if defined(VK_USE_PLATFORM_XCB_KHR)
+    if (m_surface == VK_NULL_HANDLE) {
+        xcb_connection_t *connection = xcb_connect(NULL, NULL);
+        if (connection) {
+            xcb_window_t window = xcb_generate_id(connection);
+            VkXcbSurfaceCreateInfoKHR surface_create_info = {};
+            surface_create_info.sType = VK_STRUCTURE_TYPE_XCB_SURFACE_CREATE_INFO_KHR;
+            surface_create_info.connection = connection;
+            surface_create_info.window = window;
+            VkResult err = vk::CreateXcbSurfaceKHR(instance(), &surface_create_info, nullptr, &m_surface);
+            if (err != VK_SUCCESS) return false;
+        }
+    }
+#endif
+
+    return (m_surface == VK_NULL_HANDLE) ? false : true;
+}
+
+bool VkRenderFramework::InitSwapchain(VkImageUsageFlags imageUsage, VkSurfaceTransformFlagBitsKHR preTransform) {
+    if (InitSurface()) {
+        return InitSwapchain(m_surface, imageUsage, preTransform);
+    }
+    return false;
+}
+
+bool VkRenderFramework::InitSwapchain(VkSurfaceKHR &surface, VkImageUsageFlags imageUsage,
+                                      VkSurfaceTransformFlagBitsKHR preTransform) {
+    for (size_t i = 0; i < m_device->queue_props.size(); ++i) {
+        VkBool32 presentSupport = false;
+        vk::GetPhysicalDeviceSurfaceSupportKHR(m_device->phy().handle(), i, surface, &presentSupport);
+    }
+
+    VkSurfaceCapabilitiesKHR capabilities;
+    vk::GetPhysicalDeviceSurfaceCapabilitiesKHR(m_device->phy().handle(), surface, &capabilities);
+
+    uint32_t format_count;
+    vk::GetPhysicalDeviceSurfaceFormatsKHR(m_device->phy().handle(), surface, &format_count, nullptr);
+    std::vector<VkSurfaceFormatKHR> formats;
+    if (format_count != 0) {
+        formats.resize(format_count);
+        vk::GetPhysicalDeviceSurfaceFormatsKHR(m_device->phy().handle(), surface, &format_count, formats.data());
+    }
+
+    uint32_t present_mode_count;
+    vk::GetPhysicalDeviceSurfacePresentModesKHR(m_device->phy().handle(), surface, &present_mode_count, nullptr);
+    std::vector<VkPresentModeKHR> present_modes;
+    if (present_mode_count != 0) {
+        present_modes.resize(present_mode_count);
+        vk::GetPhysicalDeviceSurfacePresentModesKHR(m_device->phy().handle(), surface, &present_mode_count, present_modes.data());
+    }
+
+    VkSwapchainCreateInfoKHR swapchain_create_info = {};
+    swapchain_create_info.sType = VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR;
+    swapchain_create_info.pNext = 0;
+    swapchain_create_info.surface = surface;
+    swapchain_create_info.minImageCount = capabilities.minImageCount;
+    swapchain_create_info.imageFormat = formats[0].format;
+    swapchain_create_info.imageColorSpace = formats[0].colorSpace;
+    swapchain_create_info.imageExtent = {capabilities.minImageExtent.width, capabilities.minImageExtent.height};
+    swapchain_create_info.imageArrayLayers = 1;
+    swapchain_create_info.imageUsage = imageUsage;
+    swapchain_create_info.imageSharingMode = VK_SHARING_MODE_EXCLUSIVE;
+    swapchain_create_info.preTransform = preTransform;
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+    swapchain_create_info.compositeAlpha = VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR;
+#else
+    swapchain_create_info.compositeAlpha = VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR;
+#endif
+    swapchain_create_info.presentMode = present_modes[0];
+    swapchain_create_info.clipped = VK_FALSE;
+    swapchain_create_info.oldSwapchain = 0;
+
+    VkResult err = vk::CreateSwapchainKHR(device(), &swapchain_create_info, nullptr, &m_swapchain);
+    if (err != VK_SUCCESS) {
+        return false;
+    }
+    uint32_t imageCount = 0;
+    vk::GetSwapchainImagesKHR(device(), m_swapchain, &imageCount, nullptr);
+    std::vector<VkImage> swapchainImages;
+    swapchainImages.resize(imageCount);
+    vk::GetSwapchainImagesKHR(device(), m_swapchain, &imageCount, swapchainImages.data());
+    return true;
+}
+
+void VkRenderFramework::DestroySwapchain() {
+    if (m_swapchain != VK_NULL_HANDLE) {
+        vk::DestroySwapchainKHR(device(), m_swapchain, nullptr);
+        m_swapchain = VK_NULL_HANDLE;
+    }
+    if (m_surface != VK_NULL_HANDLE) {
+        vk::DestroySurfaceKHR(instance(), m_surface, nullptr);
+        m_surface = VK_NULL_HANDLE;
+    }
+}
+
+void VkRenderFramework::InitRenderTarget() { InitRenderTarget(1); }
+
+void VkRenderFramework::InitRenderTarget(uint32_t targets) { InitRenderTarget(targets, NULL); }
+
+void VkRenderFramework::InitRenderTarget(VkImageView *dsBinding) { InitRenderTarget(1, dsBinding); }
+
+void VkRenderFramework::InitRenderTarget(uint32_t targets, VkImageView *dsBinding) {
+    std::vector<VkAttachmentDescription> attachments;
+    std::vector<VkAttachmentReference> color_references;
+    std::vector<VkImageView> bindings;
+    attachments.reserve(targets + 1);  // +1 for dsBinding
+    color_references.reserve(targets);
+    bindings.reserve(targets + 1);  // +1 for dsBinding
+
+    VkAttachmentDescription att = {};
+    att.format = m_render_target_fmt;
+    att.samples = VK_SAMPLE_COUNT_1_BIT;
+    att.loadOp = (m_clear_via_load_op) ? VK_ATTACHMENT_LOAD_OP_CLEAR : VK_ATTACHMENT_LOAD_OP_LOAD;
+    att.storeOp = VK_ATTACHMENT_STORE_OP_STORE;
+    att.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
+    att.stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
+    att.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+    att.finalLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
+
+    VkAttachmentReference ref = {};
+    ref.layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
+
+    m_renderPassClearValues.clear();
+    VkClearValue clear = {};
+    clear.color = m_clear_color;
+
+    for (uint32_t i = 0; i < targets; i++) {
+        attachments.push_back(att);
+
+        ref.attachment = i;
+        color_references.push_back(ref);
+
+        m_renderPassClearValues.push_back(clear);
+
+        std::unique_ptr<VkImageObj> img(new VkImageObj(m_device));
+
+        VkFormatProperties props;
+
+        vk::GetPhysicalDeviceFormatProperties(m_device->phy().handle(), m_render_target_fmt, &props);
+
+        if (props.linearTilingFeatures & VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT) {
+            img->Init((uint32_t)m_width, (uint32_t)m_height, 1, m_render_target_fmt,
+                      VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT,
+                      VK_IMAGE_TILING_LINEAR);
+        } else if (props.optimalTilingFeatures & VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT) {
+            img->Init((uint32_t)m_width, (uint32_t)m_height, 1, m_render_target_fmt,
+                      VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT,
+                      VK_IMAGE_TILING_OPTIMAL);
+        } else {
+            FAIL() << "Neither Linear nor Optimal allowed for render target";
+        }
+
+        bindings.push_back(img->targetView(m_render_target_fmt));
+        m_renderTargets.push_back(std::move(img));
+    }
+
+    VkSubpassDescription subpass = {};
+    subpass.pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS;
+    subpass.flags = 0;
+    subpass.inputAttachmentCount = 0;
+    subpass.pInputAttachments = NULL;
+    subpass.colorAttachmentCount = targets;
+    subpass.pColorAttachments = color_references.data();
+    subpass.pResolveAttachments = NULL;
+
+    VkAttachmentReference ds_reference;
+    if (dsBinding) {
+        att.format = m_depth_stencil_fmt;
+        att.loadOp = (m_clear_via_load_op) ? VK_ATTACHMENT_LOAD_OP_CLEAR : VK_ATTACHMENT_LOAD_OP_LOAD;
+        ;
+        att.storeOp = VK_ATTACHMENT_STORE_OP_STORE;
+        att.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_LOAD;
+        att.stencilStoreOp = VK_ATTACHMENT_STORE_OP_STORE;
+        att.initialLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
+        att.finalLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
+        attachments.push_back(att);
+
+        clear.depthStencil.depth = m_depth_clear_color;
+        clear.depthStencil.stencil = m_stencil_clear_color;
+        m_renderPassClearValues.push_back(clear);
+
+        bindings.push_back(*dsBinding);
+
+        ds_reference.attachment = targets;
+        ds_reference.layout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
+        subpass.pDepthStencilAttachment = &ds_reference;
+    } else {
+        subpass.pDepthStencilAttachment = NULL;
+    }
+
+    subpass.preserveAttachmentCount = 0;
+    subpass.pPreserveAttachments = NULL;
+
+    VkRenderPassCreateInfo rp_info = {};
+    rp_info.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
+    rp_info.attachmentCount = attachments.size();
+    rp_info.pAttachments = attachments.data();
+    rp_info.subpassCount = 1;
+    rp_info.pSubpasses = &subpass;
+    VkSubpassDependency subpass_dep = {};
+    if (m_addRenderPassSelfDependency) {
+        // Add a subpass self-dependency to subpass 0 of default renderPass
+        subpass_dep.srcSubpass = 0;
+        subpass_dep.dstSubpass = 0;
+        // Just using all framebuffer-space pipeline stages in order to get a reasonably large
+        //  set of bits that can be used for both src & dst
+        subpass_dep.srcStageMask = VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT |
+                                   VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT | VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
+        subpass_dep.dstStageMask = VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT |
+                                   VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT | VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
+        // Add all of the gfx mem access bits that correlate to the fb-space pipeline stages
+        subpass_dep.srcAccessMask = VK_ACCESS_UNIFORM_READ_BIT | VK_ACCESS_INPUT_ATTACHMENT_READ_BIT | VK_ACCESS_SHADER_READ_BIT |
+                                    VK_ACCESS_SHADER_WRITE_BIT | VK_ACCESS_COLOR_ATTACHMENT_READ_BIT |
+                                    VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT | VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT |
+                                    VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT;
+        subpass_dep.dstAccessMask = VK_ACCESS_UNIFORM_READ_BIT | VK_ACCESS_INPUT_ATTACHMENT_READ_BIT | VK_ACCESS_SHADER_READ_BIT |
+                                    VK_ACCESS_SHADER_WRITE_BIT | VK_ACCESS_COLOR_ATTACHMENT_READ_BIT |
+                                    VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT | VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT |
+                                    VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT;
+        // Must include dep_by_region bit when src & dst both include framebuffer-space stages
+        subpass_dep.dependencyFlags = VK_DEPENDENCY_BY_REGION_BIT;
+        rp_info.dependencyCount = 1;
+        rp_info.pDependencies = &subpass_dep;
+    }
+
+    vk::CreateRenderPass(device(), &rp_info, NULL, &m_renderPass);
+    renderPass_info_ = rp_info;  // Save away a copy for tests that need access to the render pass state
+    // Create Framebuffer and RenderPass with color attachments and any
+    // depth/stencil attachment
+    VkFramebufferCreateInfo fb_info = {};
+    fb_info.sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO;
+    fb_info.pNext = NULL;
+    fb_info.renderPass = m_renderPass;
+    fb_info.attachmentCount = bindings.size();
+    fb_info.pAttachments = bindings.data();
+    fb_info.width = (uint32_t)m_width;
+    fb_info.height = (uint32_t)m_height;
+    fb_info.layers = 1;
+
+    vk::CreateFramebuffer(device(), &fb_info, NULL, &m_framebuffer);
+
+    m_renderPassBeginInfo.renderPass = m_renderPass;
+    m_renderPassBeginInfo.framebuffer = m_framebuffer;
+    m_renderPassBeginInfo.renderArea.extent.width = (int32_t)m_width;
+    m_renderPassBeginInfo.renderArea.extent.height = (int32_t)m_height;
+    m_renderPassBeginInfo.clearValueCount = m_renderPassClearValues.size();
+    m_renderPassBeginInfo.pClearValues = m_renderPassClearValues.data();
+}
+
+void VkRenderFramework::DestroyRenderTarget() {
+    vk::DestroyRenderPass(device(), m_renderPass, nullptr);
+    m_renderPass = VK_NULL_HANDLE;
+    vk::DestroyFramebuffer(device(), m_framebuffer, nullptr);
+    m_framebuffer = VK_NULL_HANDLE;
+}
+
+VkDeviceObj::VkDeviceObj(uint32_t id, VkPhysicalDevice obj) : vk_testing::Device(obj), id(id) {
+    init();
+
+    props = phy().properties();
+    queue_props = phy().queue_properties();
+}
+
+VkDeviceObj::VkDeviceObj(uint32_t id, VkPhysicalDevice obj, std::vector<const char *> &extension_names,
+                         VkPhysicalDeviceFeatures *features, void *create_device_pnext)
+    : vk_testing::Device(obj), id(id) {
+    init(extension_names, features, create_device_pnext);
+
+    props = phy().properties();
+    queue_props = phy().queue_properties();
+}
+
+uint32_t VkDeviceObj::QueueFamilyMatching(VkQueueFlags with, VkQueueFlags without, bool all_bits) {
+    // Find a queue family with and without desired capabilities
+    for (uint32_t i = 0; i < queue_props.size(); i++) {
+        auto flags = queue_props[i].queueFlags;
+        bool matches = all_bits ? (flags & with) == with : (flags & with) != 0;
+        if (matches && ((flags & without) == 0) && (queue_props[i].queueCount > 0)) {
+            return i;
+        }
+    }
+    return UINT32_MAX;
+}
+
+void VkDeviceObj::SetDeviceQueue() {
+    ASSERT_NE(true, graphics_queues().empty());
+    m_queue = graphics_queues()[0]->handle();
+}
+
+VkQueueObj *VkDeviceObj::GetDefaultQueue() {
+    if (graphics_queues().empty()) return nullptr;
+    return graphics_queues()[0];
+}
+
+VkQueueObj *VkDeviceObj::GetDefaultComputeQueue() {
+    if (compute_queues().empty()) return nullptr;
+    return compute_queues()[0];
+}
+
+VkDescriptorSetLayoutObj::VkDescriptorSetLayoutObj(const VkDeviceObj *device,
+                                                   const std::vector<VkDescriptorSetLayoutBinding> &descriptor_set_bindings,
+                                                   VkDescriptorSetLayoutCreateFlags flags, void *pNext) {
+    VkDescriptorSetLayoutCreateInfo dsl_ci = {};
+    dsl_ci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
+    dsl_ci.pNext = pNext;
+    dsl_ci.flags = flags;
+    dsl_ci.bindingCount = static_cast<uint32_t>(descriptor_set_bindings.size());
+    dsl_ci.pBindings = descriptor_set_bindings.data();
+
+    init(*device, dsl_ci);
+}
+
+VkDescriptorSetObj::VkDescriptorSetObj(VkDeviceObj *device) : m_device(device), m_nextSlot(0) {}
+
+VkDescriptorSetObj::~VkDescriptorSetObj() {
+    if (m_set) {
+        delete m_set;
+    }
+}
+
+int VkDescriptorSetObj::AppendDummy() {
+    /* request a descriptor but do not update it */
+    VkDescriptorSetLayoutBinding binding = {};
+    binding.descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
+    binding.descriptorCount = 1;
+    binding.binding = m_layout_bindings.size();
+    binding.stageFlags = VK_SHADER_STAGE_ALL;
+    binding.pImmutableSamplers = NULL;
+
+    m_layout_bindings.push_back(binding);
+    m_type_counts[VK_DESCRIPTOR_TYPE_STORAGE_BUFFER] += binding.descriptorCount;
+
+    return m_nextSlot++;
+}
+
+int VkDescriptorSetObj::AppendBuffer(VkDescriptorType type, VkConstantBufferObj &constantBuffer) {
+    assert(type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER || type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC ||
+           type == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER || type == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC);
+    VkDescriptorSetLayoutBinding binding = {};
+    binding.descriptorType = type;
+    binding.descriptorCount = 1;
+    binding.binding = m_layout_bindings.size();
+    binding.stageFlags = VK_SHADER_STAGE_ALL;
+    binding.pImmutableSamplers = NULL;
+
+    m_layout_bindings.push_back(binding);
+    m_type_counts[type] += binding.descriptorCount;
+
+    m_writes.push_back(vk_testing::Device::write_descriptor_set(vk_testing::DescriptorSet(), m_nextSlot, 0, type, 1,
+                                                                &constantBuffer.m_descriptorBufferInfo));
+
+    return m_nextSlot++;
+}
+
+int VkDescriptorSetObj::AppendSamplerTexture(VkSamplerObj *sampler, VkTextureObj *texture) {
+    VkDescriptorSetLayoutBinding binding = {};
+    binding.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
+    binding.descriptorCount = 1;
+    binding.binding = m_layout_bindings.size();
+    binding.stageFlags = VK_SHADER_STAGE_ALL;
+    binding.pImmutableSamplers = NULL;
+
+    m_layout_bindings.push_back(binding);
+    m_type_counts[VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER] += binding.descriptorCount;
+    VkDescriptorImageInfo tmp = texture->DescriptorImageInfo();
+    tmp.sampler = sampler->handle();
+    m_imageSamplerDescriptors.push_back(tmp);
+
+    m_writes.push_back(vk_testing::Device::write_descriptor_set(vk_testing::DescriptorSet(), m_nextSlot, 0,
+                                                                VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, 1, &tmp));
+
+    return m_nextSlot++;
+}
+
+VkPipelineLayout VkDescriptorSetObj::GetPipelineLayout() const { return m_pipeline_layout.handle(); }
+
+VkDescriptorSet VkDescriptorSetObj::GetDescriptorSetHandle() const {
+    if (m_set)
+        return m_set->handle();
+    else
+        return VK_NULL_HANDLE;
+}
+
+void VkDescriptorSetObj::CreateVKDescriptorSet(VkCommandBufferObj *commandBuffer) {
+    if (m_type_counts.size()) {
+        // create VkDescriptorPool
+        VkDescriptorPoolSize poolSize;
+        vector<VkDescriptorPoolSize> sizes;
+        for (auto it = m_type_counts.begin(); it != m_type_counts.end(); ++it) {
+            poolSize.descriptorCount = it->second;
+            poolSize.type = it->first;
+            sizes.push_back(poolSize);
+        }
+        VkDescriptorPoolCreateInfo pool = {};
+        pool.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
+        pool.poolSizeCount = sizes.size();
+        pool.maxSets = 1;
+        pool.pPoolSizes = sizes.data();
+        init(*m_device, pool);
+    }
+
+    // create VkDescriptorSetLayout
+    VkDescriptorSetLayoutCreateInfo layout = {};
+    layout.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
+    layout.bindingCount = m_layout_bindings.size();
+    layout.pBindings = m_layout_bindings.data();
+
+    m_layout.init(*m_device, layout);
+    vector<const vk_testing::DescriptorSetLayout *> layouts;
+    layouts.push_back(&m_layout);
+
+    // create VkPipelineLayout
+    VkPipelineLayoutCreateInfo pipeline_layout = {};
+    pipeline_layout.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO;
+    pipeline_layout.setLayoutCount = layouts.size();
+    pipeline_layout.pSetLayouts = NULL;
+
+    m_pipeline_layout.init(*m_device, pipeline_layout, layouts);
+
+    if (m_type_counts.size()) {
+        // create VkDescriptorSet
+        m_set = alloc_sets(*m_device, m_layout);
+
+        // build the update array
+        size_t imageSamplerCount = 0;
+        for (std::vector<VkWriteDescriptorSet>::iterator it = m_writes.begin(); it != m_writes.end(); it++) {
+            it->dstSet = m_set->handle();
+            if (it->descriptorType == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER)
+                it->pImageInfo = &m_imageSamplerDescriptors[imageSamplerCount++];
+        }
+
+        // do the updates
+        m_device->update_descriptor_sets(m_writes);
+    }
+}
+
+VkRenderpassObj::VkRenderpassObj(VkDeviceObj *dev) {
+    // Create a renderPass with a single color attachment
+    VkAttachmentReference attach = {};
+    attach.layout = VK_IMAGE_LAYOUT_GENERAL;
+
+    VkSubpassDescription subpass = {};
+    subpass.pColorAttachments = &attach;
+    subpass.colorAttachmentCount = 1;
+
+    VkRenderPassCreateInfo rpci = {};
+    rpci.subpassCount = 1;
+    rpci.pSubpasses = &subpass;
+    rpci.attachmentCount = 1;
+
+    VkAttachmentDescription attach_desc = {};
+    attach_desc.format = VK_FORMAT_B8G8R8A8_UNORM;
+    attach_desc.samples = VK_SAMPLE_COUNT_1_BIT;
+    attach_desc.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+    attach_desc.finalLayout = VK_IMAGE_LAYOUT_GENERAL;
+
+    rpci.pAttachments = &attach_desc;
+    rpci.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
+
+    device = dev->device();
+    vk::CreateRenderPass(device, &rpci, NULL, &m_renderpass);
+}
+
+VkRenderpassObj::~VkRenderpassObj() { vk::DestroyRenderPass(device, m_renderpass, NULL); }
+
+VkImageObj::VkImageObj(VkDeviceObj *dev) {
+    m_device = dev;
+    m_descriptorImageInfo.imageView = VK_NULL_HANDLE;
+    m_descriptorImageInfo.imageLayout = VK_IMAGE_LAYOUT_GENERAL;
+}
+
+// clang-format off
+void VkImageObj::ImageMemoryBarrier(VkCommandBufferObj *cmd_buf, VkImageAspectFlags aspect,
+                                    VkFlags output_mask /*=
+                                    VK_ACCESS_HOST_WRITE_BIT |
+                                    VK_ACCESS_SHADER_WRITE_BIT |
+                                    VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT |
+                                    VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT |
+                                    VK_MEMORY_OUTPUT_COPY_BIT*/, 
+                                    VkFlags input_mask /*=
+                                    VK_ACCESS_HOST_READ_BIT |
+                                    VK_ACCESS_INDIRECT_COMMAND_READ_BIT |
+                                    VK_ACCESS_INDEX_READ_BIT |
+                                    VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT |
+                                    VK_ACCESS_UNIFORM_READ_BIT |
+                                    VK_ACCESS_SHADER_READ_BIT |
+                                    VK_ACCESS_COLOR_ATTACHMENT_READ_BIT |
+                                    VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT |
+                                    VK_MEMORY_INPUT_COPY_BIT*/, VkImageLayout image_layout,
+                                    VkPipelineStageFlags src_stages, VkPipelineStageFlags dest_stages,
+                                    uint32_t srcQueueFamilyIndex, uint32_t dstQueueFamilyIndex) {
+    // clang-format on
+    // TODO: Mali device crashing with VK_REMAINING_MIP_LEVELS
+    const VkImageSubresourceRange subresourceRange =
+        subresource_range(aspect, 0, /*VK_REMAINING_MIP_LEVELS*/ 1, 0, 1 /*VK_REMAINING_ARRAY_LAYERS*/);
+    VkImageMemoryBarrier barrier;
+    barrier = image_memory_barrier(output_mask, input_mask, Layout(), image_layout, subresourceRange, srcQueueFamilyIndex,
+                                   dstQueueFamilyIndex);
+
+    VkImageMemoryBarrier *pmemory_barrier = &barrier;
+
+    // write barrier to the command buffer
+    vk::CmdPipelineBarrier(cmd_buf->handle(), src_stages, dest_stages, VK_DEPENDENCY_BY_REGION_BIT, 0, NULL, 0, NULL, 1,
+                           pmemory_barrier);
+}
+
+void VkImageObj::SetLayout(VkCommandBufferObj *cmd_buf, VkImageAspectFlags aspect, VkImageLayout image_layout) {
+    VkFlags src_mask, dst_mask;
+    const VkFlags all_cache_outputs = VK_ACCESS_HOST_WRITE_BIT | VK_ACCESS_SHADER_WRITE_BIT | VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT |
+                                      VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT | VK_ACCESS_TRANSFER_WRITE_BIT;
+    const VkFlags all_cache_inputs = VK_ACCESS_HOST_READ_BIT | VK_ACCESS_INDIRECT_COMMAND_READ_BIT | VK_ACCESS_INDEX_READ_BIT |
+                                     VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT | VK_ACCESS_UNIFORM_READ_BIT | VK_ACCESS_SHADER_READ_BIT |
+                                     VK_ACCESS_COLOR_ATTACHMENT_READ_BIT | VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT |
+                                     VK_ACCESS_MEMORY_READ_BIT;
+
+    if (image_layout == m_descriptorImageInfo.imageLayout) {
+        return;
+    }
+
+    switch (image_layout) {
+        case VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL:
+            if (m_descriptorImageInfo.imageLayout == VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL)
+                src_mask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
+            else
+                src_mask = VK_ACCESS_TRANSFER_WRITE_BIT;
+            dst_mask = VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_TRANSFER_READ_BIT;
+            break;
+
+        case VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL:
+            if (m_descriptorImageInfo.imageLayout == VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL)
+                src_mask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
+            else if (m_descriptorImageInfo.imageLayout == VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL)
+                src_mask = VK_ACCESS_INPUT_ATTACHMENT_READ_BIT;
+            else
+                src_mask = VK_ACCESS_TRANSFER_WRITE_BIT;
+            dst_mask = VK_ACCESS_TRANSFER_WRITE_BIT;
+            break;
+
+        case VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL:
+            if (m_descriptorImageInfo.imageLayout == VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL)
+                src_mask = VK_ACCESS_TRANSFER_WRITE_BIT;
+            else
+                src_mask = VK_ACCESS_INPUT_ATTACHMENT_READ_BIT;
+            dst_mask = VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_MEMORY_READ_BIT;
+            break;
+
+        case VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL:
+            if (m_descriptorImageInfo.imageLayout == VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL)
+                src_mask = VK_ACCESS_TRANSFER_READ_BIT;
+            else
+                src_mask = 0;
+            dst_mask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
+            break;
+
+        case VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL:
+            dst_mask = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT;
+            src_mask = all_cache_outputs;
+            break;
+
+        default:
+            src_mask = all_cache_outputs;
+            dst_mask = all_cache_inputs;
+            break;
+    }
+
+    if (m_descriptorImageInfo.imageLayout == VK_IMAGE_LAYOUT_UNDEFINED) src_mask = 0;
+
+    ImageMemoryBarrier(cmd_buf, aspect, src_mask, dst_mask, image_layout);
+    m_descriptorImageInfo.imageLayout = image_layout;
+}
+
+void VkImageObj::SetLayout(VkImageAspectFlags aspect, VkImageLayout image_layout) {
+    if (image_layout == m_descriptorImageInfo.imageLayout) {
+        return;
+    }
+
+    VkCommandPoolObj pool(m_device, m_device->graphics_queue_node_index_);
+    VkCommandBufferObj cmd_buf(m_device, &pool);
+
+    /* Build command buffer to set image layout in the driver */
+    cmd_buf.begin();
+    SetLayout(&cmd_buf, aspect, image_layout);
+    cmd_buf.end();
+
+    cmd_buf.QueueCommandBuffer();
+}
+
+bool VkImageObj::IsCompatible(const VkImageUsageFlags usages, const VkFormatFeatureFlags features) {
+    VkFormatFeatureFlags all_feature_flags =
+        VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT | VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT | VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT |
+        VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT | VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT |
+        VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_ATOMIC_BIT | VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT |
+        VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT | VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT |
+        VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT | VK_FORMAT_FEATURE_BLIT_SRC_BIT | VK_FORMAT_FEATURE_BLIT_DST_BIT |
+        VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT;
+    if (m_device->IsEnabledExtension(VK_IMG_FILTER_CUBIC_EXTENSION_NAME)) {
+        all_feature_flags |= VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_IMG;
+    }
+
+    if (m_device->IsEnabledExtension(VK_KHR_MAINTENANCE1_EXTENSION_NAME)) {
+        all_feature_flags |= VK_FORMAT_FEATURE_TRANSFER_SRC_BIT_KHR | VK_FORMAT_FEATURE_TRANSFER_DST_BIT_KHR;
+    }
+
+    if (m_device->IsEnabledExtension(VK_EXT_SAMPLER_FILTER_MINMAX_EXTENSION_NAME)) {
+        all_feature_flags |= VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT_EXT;
+    }
+
+    if (m_device->IsEnabledExtension(VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME)) {
+        all_feature_flags |= VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT_KHR |
+                             VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT_KHR |
+                             VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT_KHR |
+                             VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT_KHR |
+                             VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT_KHR |
+                             VK_FORMAT_FEATURE_DISJOINT_BIT_KHR | VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT_KHR;
+    }
+
+    if ((features & all_feature_flags) == 0) return false;  // whole format unsupported
+
+    if ((usages & VK_IMAGE_USAGE_SAMPLED_BIT) && !(features & VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT)) return false;
+    if ((usages & VK_IMAGE_USAGE_STORAGE_BIT) && !(features & VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT)) return false;
+    if ((usages & VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT) && !(features & VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT)) return false;
+    if ((usages & VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT) && !(features & VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT))
+        return false;
+
+    if (m_device->IsEnabledExtension(VK_KHR_MAINTENANCE1_EXTENSION_NAME)) {
+        // WORKAROUND: for DevSim not reporting extended enums, and possibly some drivers too
+        const auto all_nontransfer_feature_flags =
+            all_feature_flags ^ (VK_FORMAT_FEATURE_TRANSFER_SRC_BIT_KHR | VK_FORMAT_FEATURE_TRANSFER_DST_BIT_KHR);
+        const bool transfer_probably_supported_anyway = (features & all_nontransfer_feature_flags) > 0;
+        if (!transfer_probably_supported_anyway) {
+            if ((usages & VK_IMAGE_USAGE_TRANSFER_SRC_BIT) && !(features & VK_FORMAT_FEATURE_TRANSFER_SRC_BIT_KHR)) return false;
+            if ((usages & VK_IMAGE_USAGE_TRANSFER_DST_BIT) && !(features & VK_FORMAT_FEATURE_TRANSFER_DST_BIT_KHR)) return false;
+        }
+    }
+
+    return true;
+}
+
+void VkImageObj::InitNoLayout(uint32_t const width, uint32_t const height, uint32_t const mipLevels, VkFormat const format,
+                              VkFlags const usage, VkImageTiling const requested_tiling, VkMemoryPropertyFlags const reqs,
+                              const std::vector<uint32_t> *queue_families, bool memory) {
+    VkFormatProperties image_fmt;
+    VkImageTiling tiling = VK_IMAGE_TILING_OPTIMAL;
+
+    vk::GetPhysicalDeviceFormatProperties(m_device->phy().handle(), format, &image_fmt);
+
+    if (requested_tiling == VK_IMAGE_TILING_LINEAR) {
+        if (IsCompatible(usage, image_fmt.linearTilingFeatures)) {
+            tiling = VK_IMAGE_TILING_LINEAR;
+        } else if (IsCompatible(usage, image_fmt.optimalTilingFeatures)) {
+            tiling = VK_IMAGE_TILING_OPTIMAL;
+        } else {
+            FAIL() << "VkImageObj::init() error: unsupported tiling configuration. Usage: " << std::hex << std::showbase << usage
+                   << ", supported linear features: " << image_fmt.linearTilingFeatures;
+        }
+    } else if (IsCompatible(usage, image_fmt.optimalTilingFeatures)) {
+        tiling = VK_IMAGE_TILING_OPTIMAL;
+    } else if (IsCompatible(usage, image_fmt.linearTilingFeatures)) {
+        tiling = VK_IMAGE_TILING_LINEAR;
+    } else {
+        FAIL() << "VkImageObj::init() error: unsupported tiling configuration. Usage: " << std::hex << std::showbase << usage
+               << ", supported optimal features: " << image_fmt.optimalTilingFeatures;
+    }
+
+    VkImageCreateInfo imageCreateInfo = vk_testing::Image::create_info();
+    imageCreateInfo.imageType = VK_IMAGE_TYPE_2D;
+    imageCreateInfo.format = format;
+    imageCreateInfo.extent.width = width;
+    imageCreateInfo.extent.height = height;
+    imageCreateInfo.mipLevels = mipLevels;
+    imageCreateInfo.tiling = tiling;
+    imageCreateInfo.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+
+    // Automatically set sharing mode etc. based on queue family information
+    if (queue_families && (queue_families->size() > 1)) {
+        imageCreateInfo.sharingMode = VK_SHARING_MODE_CONCURRENT;
+        imageCreateInfo.queueFamilyIndexCount = static_cast<uint32_t>(queue_families->size());
+        imageCreateInfo.pQueueFamilyIndices = queue_families->data();
+    }
+
+    Layout(imageCreateInfo.initialLayout);
+    imageCreateInfo.usage = usage;
+    if (memory)
+        vk_testing::Image::init(*m_device, imageCreateInfo, reqs);
+    else
+        vk_testing::Image::init_no_mem(*m_device, imageCreateInfo);
+}
+
+void VkImageObj::Init(uint32_t const width, uint32_t const height, uint32_t const mipLevels, VkFormat const format,
+                      VkFlags const usage, VkImageTiling const requested_tiling, VkMemoryPropertyFlags const reqs,
+                      const std::vector<uint32_t> *queue_families, bool memory) {
+    InitNoLayout(width, height, mipLevels, format, usage, requested_tiling, reqs, queue_families, memory);
+
+    if (!initialized() || !memory) return;  // We don't have a valid handle from early stage init, and thus SetLayout will fail
+
+    VkImageLayout newLayout;
+    if (usage & VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT)
+        newLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
+    else if (usage & VK_IMAGE_USAGE_SAMPLED_BIT)
+        newLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
+    else
+        newLayout = m_descriptorImageInfo.imageLayout;
+
+    VkImageAspectFlags image_aspect = 0;
+    if (FormatIsDepthAndStencil(format)) {
+        image_aspect = VK_IMAGE_ASPECT_STENCIL_BIT | VK_IMAGE_ASPECT_DEPTH_BIT;
+    } else if (FormatIsDepthOnly(format)) {
+        image_aspect = VK_IMAGE_ASPECT_DEPTH_BIT;
+    } else if (FormatIsStencilOnly(format)) {
+        image_aspect = VK_IMAGE_ASPECT_STENCIL_BIT;
+    } else {  // color
+        image_aspect = VK_IMAGE_ASPECT_COLOR_BIT;
+    }
+    SetLayout(image_aspect, newLayout);
+}
+
+void VkImageObj::init(const VkImageCreateInfo *create_info) {
+    VkFormatProperties image_fmt;
+    vk::GetPhysicalDeviceFormatProperties(m_device->phy().handle(), create_info->format, &image_fmt);
+
+    switch (create_info->tiling) {
+        case VK_IMAGE_TILING_OPTIMAL:
+            if (!IsCompatible(create_info->usage, image_fmt.optimalTilingFeatures)) {
+                FAIL() << "VkImageObj::init() error: unsupported tiling configuration. Usage: " << std::hex << std::showbase
+                       << create_info->usage << ", supported optimal features: " << image_fmt.optimalTilingFeatures;
+            }
+            break;
+        case VK_IMAGE_TILING_LINEAR:
+            if (!IsCompatible(create_info->usage, image_fmt.linearTilingFeatures)) {
+                FAIL() << "VkImageObj::init() error: unsupported tiling configuration. Usage: " << std::hex << std::showbase
+                       << create_info->usage << ", supported linear features: " << image_fmt.linearTilingFeatures;
+            }
+            break;
+        default:
+            break;
+    }
+    Layout(create_info->initialLayout);
+
+    vk_testing::Image::init(*m_device, *create_info, 0);
+
+    VkImageAspectFlags image_aspect = 0;
+    if (FormatIsDepthAndStencil(create_info->format)) {
+        image_aspect = VK_IMAGE_ASPECT_STENCIL_BIT | VK_IMAGE_ASPECT_DEPTH_BIT;
+    } else if (FormatIsDepthOnly(create_info->format)) {
+        image_aspect = VK_IMAGE_ASPECT_DEPTH_BIT;
+    } else if (FormatIsStencilOnly(create_info->format)) {
+        image_aspect = VK_IMAGE_ASPECT_STENCIL_BIT;
+    } else {  // color
+        image_aspect = VK_IMAGE_ASPECT_COLOR_BIT;
+    }
+    SetLayout(image_aspect, VK_IMAGE_LAYOUT_GENERAL);
+}
+
+VkResult VkImageObj::CopyImage(VkImageObj &src_image) {
+    VkImageLayout src_image_layout, dest_image_layout;
+
+    VkCommandPoolObj pool(m_device, m_device->graphics_queue_node_index_);
+    VkCommandBufferObj cmd_buf(m_device, &pool);
+
+    /* Build command buffer to copy staging texture to usable texture */
+    cmd_buf.begin();
+
+    /* TODO: Can we determine image aspect from image object? */
+    src_image_layout = src_image.Layout();
+    src_image.SetLayout(&cmd_buf, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL);
+
+    dest_image_layout = (this->Layout() == VK_IMAGE_LAYOUT_UNDEFINED) ? VK_IMAGE_LAYOUT_GENERAL : this->Layout();
+    this->SetLayout(&cmd_buf, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL);
+
+    VkImageCopy copy_region = {};
+    copy_region.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    copy_region.srcSubresource.baseArrayLayer = 0;
+    copy_region.srcSubresource.mipLevel = 0;
+    copy_region.srcSubresource.layerCount = 1;
+    copy_region.srcOffset.x = 0;
+    copy_region.srcOffset.y = 0;
+    copy_region.srcOffset.z = 0;
+    copy_region.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    copy_region.dstSubresource.baseArrayLayer = 0;
+    copy_region.dstSubresource.mipLevel = 0;
+    copy_region.dstSubresource.layerCount = 1;
+    copy_region.dstOffset.x = 0;
+    copy_region.dstOffset.y = 0;
+    copy_region.dstOffset.z = 0;
+    copy_region.extent = src_image.extent();
+
+    vk::CmdCopyImage(cmd_buf.handle(), src_image.handle(), src_image.Layout(), handle(), Layout(), 1, &copy_region);
+
+    src_image.SetLayout(&cmd_buf, VK_IMAGE_ASPECT_COLOR_BIT, src_image_layout);
+
+    this->SetLayout(&cmd_buf, VK_IMAGE_ASPECT_COLOR_BIT, dest_image_layout);
+
+    cmd_buf.end();
+
+    cmd_buf.QueueCommandBuffer();
+
+    return VK_SUCCESS;
+}
+
+// Same as CopyImage, but in the opposite direction
+VkResult VkImageObj::CopyImageOut(VkImageObj &dst_image) {
+    VkImageLayout src_image_layout, dest_image_layout;
+
+    VkCommandPoolObj pool(m_device, m_device->graphics_queue_node_index_);
+    VkCommandBufferObj cmd_buf(m_device, &pool);
+
+    cmd_buf.begin();
+
+    src_image_layout = this->Layout();
+    this->SetLayout(&cmd_buf, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL);
+
+    dest_image_layout = (dst_image.Layout() == VK_IMAGE_LAYOUT_UNDEFINED) ? VK_IMAGE_LAYOUT_GENERAL : dst_image.Layout();
+    dst_image.SetLayout(&cmd_buf, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL);
+
+    VkImageCopy copy_region = {};
+    copy_region.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    copy_region.srcSubresource.baseArrayLayer = 0;
+    copy_region.srcSubresource.mipLevel = 0;
+    copy_region.srcSubresource.layerCount = 1;
+    copy_region.srcOffset.x = 0;
+    copy_region.srcOffset.y = 0;
+    copy_region.srcOffset.z = 0;
+    copy_region.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    copy_region.dstSubresource.baseArrayLayer = 0;
+    copy_region.dstSubresource.mipLevel = 0;
+    copy_region.dstSubresource.layerCount = 1;
+    copy_region.dstOffset.x = 0;
+    copy_region.dstOffset.y = 0;
+    copy_region.dstOffset.z = 0;
+    copy_region.extent = dst_image.extent();
+
+    vk::CmdCopyImage(cmd_buf.handle(), handle(), Layout(), dst_image.handle(), dst_image.Layout(), 1, &copy_region);
+
+    this->SetLayout(&cmd_buf, VK_IMAGE_ASPECT_COLOR_BIT, src_image_layout);
+
+    dst_image.SetLayout(&cmd_buf, VK_IMAGE_ASPECT_COLOR_BIT, dest_image_layout);
+
+    cmd_buf.end();
+
+    cmd_buf.QueueCommandBuffer();
+
+    return VK_SUCCESS;
+}
+
+// Return 16x16 pixel block
+std::array<std::array<uint32_t, 16>, 16> VkImageObj::Read() {
+    VkImageObj stagingImage(m_device);
+    VkMemoryPropertyFlags reqs = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
+
+    stagingImage.Init(16, 16, 1, VK_FORMAT_B8G8R8A8_UNORM, VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT,
+                      VK_IMAGE_TILING_LINEAR, reqs);
+    stagingImage.SetLayout(VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
+    VkSubresourceLayout layout = stagingImage.subresource_layout(subresource(VK_IMAGE_ASPECT_COLOR_BIT, 0, 0));
+    CopyImageOut(stagingImage);
+    void *data = stagingImage.MapMemory();
+    std::array<std::array<uint32_t, 16>, 16> m = {};
+    if (data) {
+        for (uint32_t y = 0; y < stagingImage.extent().height; y++) {
+            uint32_t *row = (uint32_t *)((char *)data + layout.rowPitch * y);
+            for (uint32_t x = 0; x < stagingImage.extent().width; x++) m[y][x] = row[x];
+        }
+    }
+    stagingImage.UnmapMemory();
+    return m;
+}
+
+VkTextureObj::VkTextureObj(VkDeviceObj *device, uint32_t *colors) : VkImageObj(device) {
+    m_device = device;
+    const VkFormat tex_format = VK_FORMAT_B8G8R8A8_UNORM;
+    uint32_t tex_colors[2] = {0xffff0000, 0xff00ff00};
+    void *data;
+    uint32_t x, y;
+    VkImageObj stagingImage(device);
+    VkMemoryPropertyFlags reqs = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
+
+    stagingImage.Init(16, 16, 1, tex_format, VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT,
+                      VK_IMAGE_TILING_LINEAR, reqs);
+    VkSubresourceLayout layout = stagingImage.subresource_layout(subresource(VK_IMAGE_ASPECT_COLOR_BIT, 0, 0));
+
+    if (colors == NULL) colors = tex_colors;
+
+    VkImageViewCreateInfo view = {};
+    view.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
+    view.pNext = NULL;
+    view.image = VK_NULL_HANDLE;
+    view.viewType = VK_IMAGE_VIEW_TYPE_2D;
+    view.format = tex_format;
+    view.components.r = VK_COMPONENT_SWIZZLE_R;
+    view.components.g = VK_COMPONENT_SWIZZLE_G;
+    view.components.b = VK_COMPONENT_SWIZZLE_B;
+    view.components.a = VK_COMPONENT_SWIZZLE_A;
+    view.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+    view.subresourceRange.baseMipLevel = 0;
+    view.subresourceRange.levelCount = 1;
+    view.subresourceRange.baseArrayLayer = 0;
+    view.subresourceRange.layerCount = 1;
+
+    /* create image */
+    Init(16, 16, 1, tex_format, VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT, VK_IMAGE_TILING_OPTIMAL);
+    stagingImage.SetLayout(VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
+
+    /* create image view */
+    view.image = handle();
+    m_textureView.init(*m_device, view);
+    m_descriptorImageInfo.imageView = m_textureView.handle();
+
+    data = stagingImage.MapMemory();
+
+    for (y = 0; y < extent().height; y++) {
+        uint32_t *row = (uint32_t *)((char *)data + layout.rowPitch * y);
+        for (x = 0; x < extent().width; x++) row[x] = colors[(x & 1) ^ (y & 1)];
+    }
+    stagingImage.UnmapMemory();
+    stagingImage.SetLayout(VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL);
+    VkImageObj::CopyImage(stagingImage);
+}
+
+VkSamplerObj::VkSamplerObj(VkDeviceObj *device) {
+    m_device = device;
+
+    VkSamplerCreateInfo samplerCreateInfo;
+    memset(&samplerCreateInfo, 0, sizeof(samplerCreateInfo));
+    samplerCreateInfo.sType = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO;
+    samplerCreateInfo.magFilter = VK_FILTER_NEAREST;
+    samplerCreateInfo.minFilter = VK_FILTER_NEAREST;
+    samplerCreateInfo.mipmapMode = VK_SAMPLER_MIPMAP_MODE_NEAREST;
+    samplerCreateInfo.addressModeU = VK_SAMPLER_ADDRESS_MODE_REPEAT;
+    samplerCreateInfo.addressModeV = VK_SAMPLER_ADDRESS_MODE_REPEAT;
+    samplerCreateInfo.addressModeW = VK_SAMPLER_ADDRESS_MODE_REPEAT;
+    samplerCreateInfo.mipLodBias = 0.0;
+    samplerCreateInfo.anisotropyEnable = VK_FALSE;
+    samplerCreateInfo.maxAnisotropy = 1;
+    samplerCreateInfo.compareOp = VK_COMPARE_OP_NEVER;
+    samplerCreateInfo.minLod = 0.0;
+    samplerCreateInfo.maxLod = 0.0;
+    samplerCreateInfo.borderColor = VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE;
+    samplerCreateInfo.unnormalizedCoordinates = VK_FALSE;
+
+    init(*m_device, samplerCreateInfo);
+}
+
+/*
+ * Basic ConstantBuffer constructor. Then use create methods to fill in the
+ * details.
+ */
+VkConstantBufferObj::VkConstantBufferObj(VkDeviceObj *device, VkBufferUsageFlags usage) {
+    m_device = device;
+
+    memset(&m_descriptorBufferInfo, 0, sizeof(m_descriptorBufferInfo));
+
+    // Special case for usages outside of original limits of framework
+    if ((VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT) != usage) {
+        init_no_mem(*m_device, create_info(0, usage));
+    }
+}
+
+VkConstantBufferObj::VkConstantBufferObj(VkDeviceObj *device, VkDeviceSize allocationSize, const void *data,
+                                         VkBufferUsageFlags usage) {
+    m_device = device;
+
+    memset(&m_descriptorBufferInfo, 0, sizeof(m_descriptorBufferInfo));
+
+    VkMemoryPropertyFlags reqs = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
+
+    if ((VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT) == usage) {
+        init_as_src_and_dst(*m_device, allocationSize, reqs);
+    } else {
+        init(*m_device, create_info(allocationSize, usage), reqs);
+    }
+
+    void *pData = memory().map();
+    memcpy(pData, data, static_cast<size_t>(allocationSize));
+    memory().unmap();
+
+    /*
+     * Constant buffers are going to be used as vertex input buffers
+     * or as shader uniform buffers. So, we'll create the shaderbuffer
+     * descriptor here so it's ready if needed.
+     */
+    this->m_descriptorBufferInfo.buffer = handle();
+    this->m_descriptorBufferInfo.offset = 0;
+    this->m_descriptorBufferInfo.range = allocationSize;
+}
+
+VkPipelineShaderStageCreateInfo const &VkShaderObj::GetStageCreateInfo() const { return m_stage_info; }
+
+VkShaderObj::VkShaderObj(VkDeviceObj *device, const char *shader_code, VkShaderStageFlagBits stage, VkRenderFramework *framework,
+                         char const *name, bool debug, VkSpecializationInfo *specInfo, uint32_t spirv_minor_version) {
+    m_device = device;
+    m_stage_info.sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
+    m_stage_info.pNext = nullptr;
+    m_stage_info.flags = 0;
+    m_stage_info.stage = stage;
+    m_stage_info.module = VK_NULL_HANDLE;
+    m_stage_info.pName = name;
+    m_stage_info.pSpecializationInfo = specInfo;
+
+    std::vector<unsigned int> spv;
+    framework->GLSLtoSPV(&device->props.limits, stage, shader_code, spv, debug, spirv_minor_version);
+
+    VkShaderModuleCreateInfo moduleCreateInfo = {};
+    moduleCreateInfo.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
+    moduleCreateInfo.codeSize = spv.size() * sizeof(unsigned int);
+    moduleCreateInfo.pCode = spv.data();
+
+    init(*m_device, moduleCreateInfo);
+    m_stage_info.module = handle();
+}
+
+VkShaderObj::VkShaderObj(VkDeviceObj *device, const std::string spv_source, VkShaderStageFlagBits stage,
+                         VkRenderFramework *framework, char const *name, VkSpecializationInfo *specInfo) {
+    m_device = device;
+    m_stage_info.sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
+    m_stage_info.pNext = nullptr;
+    m_stage_info.flags = 0;
+    m_stage_info.stage = stage;
+    m_stage_info.module = VK_NULL_HANDLE;
+    m_stage_info.pName = name;
+    m_stage_info.pSpecializationInfo = specInfo;
+
+    std::vector<unsigned int> spv;
+    framework->ASMtoSPV(SPV_ENV_VULKAN_1_0, 0, spv_source.data(), spv);
+
+    VkShaderModuleCreateInfo moduleCreateInfo = {};
+    moduleCreateInfo.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
+    moduleCreateInfo.codeSize = spv.size() * sizeof(unsigned int);
+    moduleCreateInfo.pCode = spv.data();
+
+    init(*m_device, moduleCreateInfo);
+    m_stage_info.module = handle();
+}
+
+VkPipelineLayoutObj::VkPipelineLayoutObj(VkDeviceObj *device,
+                                         const std::vector<const VkDescriptorSetLayoutObj *> &descriptor_layouts,
+                                         const std::vector<VkPushConstantRange> &push_constant_ranges) {
+    VkPipelineLayoutCreateInfo pl_ci = {};
+    pl_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO;
+    pl_ci.pushConstantRangeCount = static_cast<uint32_t>(push_constant_ranges.size());
+    pl_ci.pPushConstantRanges = push_constant_ranges.data();
+
+    auto descriptor_layouts_unwrapped = MakeTestbindingHandles<const vk_testing::DescriptorSetLayout>(descriptor_layouts);
+
+    init(*device, pl_ci, descriptor_layouts_unwrapped);
+}
+
+void VkPipelineLayoutObj::Reset() { *this = VkPipelineLayoutObj(); }
+
+VkPipelineObj::VkPipelineObj(VkDeviceObj *device) {
+    m_device = device;
+
+    m_vi_state.sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO;
+    m_vi_state.pNext = nullptr;
+    m_vi_state.flags = 0;
+    m_vi_state.vertexBindingDescriptionCount = 0;
+    m_vi_state.pVertexBindingDescriptions = nullptr;
+    m_vi_state.vertexAttributeDescriptionCount = 0;
+    m_vi_state.pVertexAttributeDescriptions = nullptr;
+
+    m_ia_state.sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO;
+    m_ia_state.pNext = nullptr;
+    m_ia_state.flags = 0;
+    m_ia_state.topology = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST;
+    m_ia_state.primitiveRestartEnable = VK_FALSE;
+
+    m_te_state = nullptr;
+
+    m_vp_state.sType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO;
+    m_vp_state.pNext = VK_NULL_HANDLE;
+    m_vp_state.flags = 0;
+    m_vp_state.viewportCount = 1;
+    m_vp_state.scissorCount = 1;
+    m_vp_state.pViewports = nullptr;
+    m_vp_state.pScissors = nullptr;
+
+    m_rs_state.sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO;
+    m_rs_state.pNext = &m_line_state;
+    m_rs_state.flags = 0;
+    m_rs_state.depthClampEnable = VK_FALSE;
+    m_rs_state.rasterizerDiscardEnable = VK_FALSE;
+    m_rs_state.polygonMode = VK_POLYGON_MODE_FILL;
+    m_rs_state.cullMode = VK_CULL_MODE_BACK_BIT;
+    m_rs_state.frontFace = VK_FRONT_FACE_CLOCKWISE;
+    m_rs_state.depthBiasEnable = VK_FALSE;
+    m_rs_state.depthBiasConstantFactor = 0.0f;
+    m_rs_state.depthBiasClamp = 0.0f;
+    m_rs_state.depthBiasSlopeFactor = 0.0f;
+    m_rs_state.lineWidth = 1.0f;
+
+    m_line_state.sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO_EXT;
+    m_line_state.pNext = nullptr;
+    m_line_state.lineRasterizationMode = VK_LINE_RASTERIZATION_MODE_DEFAULT_EXT;
+    m_line_state.stippledLineEnable = VK_FALSE;
+    m_line_state.lineStippleFactor = 0;
+    m_line_state.lineStipplePattern = 0;
+
+    m_ms_state.sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO;
+    m_ms_state.pNext = nullptr;
+    m_ms_state.flags = 0;
+    m_ms_state.rasterizationSamples = VK_SAMPLE_COUNT_1_BIT;
+    m_ms_state.sampleShadingEnable = VK_FALSE;
+    m_ms_state.minSampleShading = 0.0f;
+    m_ms_state.pSampleMask = nullptr;
+    m_ms_state.alphaToCoverageEnable = VK_FALSE;
+    m_ms_state.alphaToOneEnable = VK_FALSE;
+
+    m_ds_state = nullptr;
+
+    memset(&m_cb_state, 0, sizeof(m_cb_state));
+    m_cb_state.sType = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO;
+    m_cb_state.blendConstants[0] = 1.0f;
+    m_cb_state.blendConstants[1] = 1.0f;
+    m_cb_state.blendConstants[2] = 1.0f;
+    m_cb_state.blendConstants[3] = 1.0f;
+
+    memset(&m_pd_state, 0, sizeof(m_pd_state));
+}
+
+void VkPipelineObj::AddShader(VkShaderObj *shader) { m_shaderStages.push_back(shader->GetStageCreateInfo()); }
+
+void VkPipelineObj::AddShader(VkPipelineShaderStageCreateInfo const &createInfo) { m_shaderStages.push_back(createInfo); }
+
+void VkPipelineObj::AddVertexInputAttribs(VkVertexInputAttributeDescription *vi_attrib, uint32_t count) {
+    m_vi_state.pVertexAttributeDescriptions = vi_attrib;
+    m_vi_state.vertexAttributeDescriptionCount = count;
+}
+
+void VkPipelineObj::AddVertexInputBindings(VkVertexInputBindingDescription *vi_binding, uint32_t count) {
+    m_vi_state.pVertexBindingDescriptions = vi_binding;
+    m_vi_state.vertexBindingDescriptionCount = count;
+}
+
+void VkPipelineObj::AddColorAttachment(uint32_t binding, const VkPipelineColorBlendAttachmentState &att) {
+    if (binding + 1 > m_colorAttachments.size()) {
+        m_colorAttachments.resize(binding + 1);
+    }
+    m_colorAttachments[binding] = att;
+}
+
+void VkPipelineObj::SetDepthStencil(const VkPipelineDepthStencilStateCreateInfo *ds_state) { m_ds_state = ds_state; }
+
+void VkPipelineObj::SetViewport(const vector<VkViewport> viewports) {
+    m_viewports = viewports;
+    // If we explicitly set a null viewport, pass it through to create info
+    // but preserve viewportCount because it musn't change
+    if (m_viewports.size() == 0) {
+        m_vp_state.pViewports = nullptr;
+    }
+}
+
+void VkPipelineObj::SetScissor(const vector<VkRect2D> scissors) {
+    m_scissors = scissors;
+    // If we explicitly set a null scissor, pass it through to create info
+    // but preserve scissorCount because it musn't change
+    if (m_scissors.size() == 0) {
+        m_vp_state.pScissors = nullptr;
+    }
+}
+
+void VkPipelineObj::MakeDynamic(VkDynamicState state) {
+    /* Only add a state once */
+    for (auto it = m_dynamic_state_enables.begin(); it != m_dynamic_state_enables.end(); it++) {
+        if ((*it) == state) return;
+    }
+    m_dynamic_state_enables.push_back(state);
+}
+
+void VkPipelineObj::SetMSAA(const VkPipelineMultisampleStateCreateInfo *ms_state) { m_ms_state = *ms_state; }
+
+void VkPipelineObj::SetInputAssembly(const VkPipelineInputAssemblyStateCreateInfo *ia_state) { m_ia_state = *ia_state; }
+
+void VkPipelineObj::SetRasterization(const VkPipelineRasterizationStateCreateInfo *rs_state) {
+    m_rs_state = *rs_state;
+    m_rs_state.pNext = &m_line_state;
+}
+
+void VkPipelineObj::SetTessellation(const VkPipelineTessellationStateCreateInfo *te_state) { m_te_state = te_state; }
+
+void VkPipelineObj::SetLineState(const VkPipelineRasterizationLineStateCreateInfoEXT *line_state) { m_line_state = *line_state; }
+
+void VkPipelineObj::InitGraphicsPipelineCreateInfo(VkGraphicsPipelineCreateInfo *gp_ci) {
+    gp_ci->stageCount = m_shaderStages.size();
+    gp_ci->pStages = m_shaderStages.size() ? m_shaderStages.data() : nullptr;
+
+    m_vi_state.sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO;
+    gp_ci->pVertexInputState = &m_vi_state;
+
+    m_ia_state.sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO;
+    gp_ci->pInputAssemblyState = &m_ia_state;
+
+    gp_ci->sType = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO;
+    gp_ci->pNext = NULL;
+    gp_ci->flags = 0;
+
+    m_cb_state.attachmentCount = m_colorAttachments.size();
+    m_cb_state.pAttachments = m_colorAttachments.data();
+
+    if (m_viewports.size() > 0) {
+        m_vp_state.viewportCount = m_viewports.size();
+        m_vp_state.pViewports = m_viewports.data();
+    } else {
+        MakeDynamic(VK_DYNAMIC_STATE_VIEWPORT);
+    }
+
+    if (m_scissors.size() > 0) {
+        m_vp_state.scissorCount = m_scissors.size();
+        m_vp_state.pScissors = m_scissors.data();
+    } else {
+        MakeDynamic(VK_DYNAMIC_STATE_SCISSOR);
+    }
+
+    memset(&m_pd_state, 0, sizeof(m_pd_state));
+    if (m_dynamic_state_enables.size() > 0) {
+        m_pd_state.sType = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO;
+        m_pd_state.dynamicStateCount = m_dynamic_state_enables.size();
+        m_pd_state.pDynamicStates = m_dynamic_state_enables.data();
+        gp_ci->pDynamicState = &m_pd_state;
+    }
+
+    gp_ci->subpass = 0;
+    gp_ci->pViewportState = &m_vp_state;
+    gp_ci->pRasterizationState = &m_rs_state;
+    gp_ci->pMultisampleState = &m_ms_state;
+    gp_ci->pDepthStencilState = m_ds_state;
+    gp_ci->pColorBlendState = &m_cb_state;
+    gp_ci->pTessellationState = m_te_state;
+}
+
+VkResult VkPipelineObj::CreateVKPipeline(VkPipelineLayout layout, VkRenderPass render_pass, VkGraphicsPipelineCreateInfo *gp_ci) {
+    VkGraphicsPipelineCreateInfo info = {};
+
+    // if not given a CreateInfo, create and initialize a local one.
+    if (gp_ci == nullptr) {
+        gp_ci = &info;
+        InitGraphicsPipelineCreateInfo(gp_ci);
+    }
+
+    gp_ci->layout = layout;
+    gp_ci->renderPass = render_pass;
+
+    return init_try(*m_device, *gp_ci);
+}
+
+VkCommandBufferObj::VkCommandBufferObj(VkDeviceObj *device, VkCommandPoolObj *pool, VkCommandBufferLevel level, VkQueueObj *queue) {
+    m_device = device;
+    if (queue) {
+        m_queue = queue;
+    } else {
+        m_queue = m_device->GetDefaultQueue();
+    }
+    assert(m_queue);
+
+    auto create_info = vk_testing::CommandBuffer::create_info(pool->handle());
+    create_info.level = level;
+    init(*device, create_info);
+}
+
+void VkCommandBufferObj::PipelineBarrier(VkPipelineStageFlags src_stages, VkPipelineStageFlags dest_stages,
+                                         VkDependencyFlags dependencyFlags, uint32_t memoryBarrierCount,
+                                         const VkMemoryBarrier *pMemoryBarriers, uint32_t bufferMemoryBarrierCount,
+                                         const VkBufferMemoryBarrier *pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount,
+                                         const VkImageMemoryBarrier *pImageMemoryBarriers) {
+    vk::CmdPipelineBarrier(handle(), src_stages, dest_stages, dependencyFlags, memoryBarrierCount, pMemoryBarriers,
+                           bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers);
+}
+
+void VkCommandBufferObj::ClearAllBuffers(const vector<std::unique_ptr<VkImageObj>> &color_objs, VkClearColorValue clear_color,
+                                         VkDepthStencilObj *depth_stencil_obj, float depth_clear_value,
+                                         uint32_t stencil_clear_value) {
+    // whatever we want to do, we do it to the whole buffer
+    VkImageSubresourceRange subrange = {};
+    // srRange.aspectMask to be set later
+    subrange.baseMipLevel = 0;
+    // TODO: Mali device crashing with VK_REMAINING_MIP_LEVELS
+    subrange.levelCount = 1;  // VK_REMAINING_MIP_LEVELS;
+    subrange.baseArrayLayer = 0;
+    // TODO: Mesa crashing with VK_REMAINING_ARRAY_LAYERS
+    subrange.layerCount = 1;  // VK_REMAINING_ARRAY_LAYERS;
+
+    const VkImageLayout clear_layout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
+
+    for (const auto &color_obj : color_objs) {
+        subrange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+        color_obj->Layout(VK_IMAGE_LAYOUT_UNDEFINED);
+        color_obj->SetLayout(this, subrange.aspectMask, clear_layout);
+        ClearColorImage(color_obj->image(), clear_layout, &clear_color, 1, &subrange);
+    }
+
+    if (depth_stencil_obj && depth_stencil_obj->Initialized()) {
+        subrange.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT;
+        if (FormatIsDepthOnly(depth_stencil_obj->format())) subrange.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
+        if (FormatIsStencilOnly(depth_stencil_obj->format())) subrange.aspectMask = VK_IMAGE_ASPECT_STENCIL_BIT;
+
+        depth_stencil_obj->Layout(VK_IMAGE_LAYOUT_UNDEFINED);
+        depth_stencil_obj->SetLayout(this, subrange.aspectMask, clear_layout);
+
+        VkClearDepthStencilValue clear_value = {depth_clear_value, stencil_clear_value};
+        ClearDepthStencilImage(depth_stencil_obj->handle(), clear_layout, &clear_value, 1, &subrange);
+    }
+}
+
+void VkCommandBufferObj::FillBuffer(VkBuffer buffer, VkDeviceSize offset, VkDeviceSize fill_size, uint32_t data) {
+    vk::CmdFillBuffer(handle(), buffer, offset, fill_size, data);
+}
+
+void VkCommandBufferObj::UpdateBuffer(VkBuffer buffer, VkDeviceSize dstOffset, VkDeviceSize dataSize, const void *pData) {
+    vk::CmdUpdateBuffer(handle(), buffer, dstOffset, dataSize, pData);
+}
+
+void VkCommandBufferObj::CopyImage(VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout,
+                                   uint32_t regionCount, const VkImageCopy *pRegions) {
+    vk::CmdCopyImage(handle(), srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions);
+}
+
+void VkCommandBufferObj::ResolveImage(VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage,
+                                      VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageResolve *pRegions) {
+    vk::CmdResolveImage(handle(), srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions);
+}
+
+void VkCommandBufferObj::ClearColorImage(VkImage image, VkImageLayout imageLayout, const VkClearColorValue *pColor,
+                                         uint32_t rangeCount, const VkImageSubresourceRange *pRanges) {
+    vk::CmdClearColorImage(handle(), image, imageLayout, pColor, rangeCount, pRanges);
+}
+
+void VkCommandBufferObj::ClearDepthStencilImage(VkImage image, VkImageLayout imageLayout, const VkClearDepthStencilValue *pColor,
+                                                uint32_t rangeCount, const VkImageSubresourceRange *pRanges) {
+    vk::CmdClearDepthStencilImage(handle(), image, imageLayout, pColor, rangeCount, pRanges);
+}
+
+void VkCommandBufferObj::BuildAccelerationStructure(VkAccelerationStructureObj *as, VkBuffer scratchBuffer) {
+    BuildAccelerationStructure(as, scratchBuffer, VK_NULL_HANDLE);
+}
+
+void VkCommandBufferObj::BuildAccelerationStructure(VkAccelerationStructureObj *as, VkBuffer scratchBuffer, VkBuffer instanceData) {
+    PFN_vkCmdBuildAccelerationStructureNV vkCmdBuildAccelerationStructureNV =
+        (PFN_vkCmdBuildAccelerationStructureNV)vk::GetDeviceProcAddr(as->dev(), "vkCmdBuildAccelerationStructureNV");
+    assert(vkCmdBuildAccelerationStructureNV != nullptr);
+
+    vkCmdBuildAccelerationStructureNV(handle(), &as->info(), instanceData, 0, VK_FALSE, as->handle(), VK_NULL_HANDLE, scratchBuffer,
+                                      0);
+}
+
+void VkCommandBufferObj::PrepareAttachments(const vector<std::unique_ptr<VkImageObj>> &color_atts,
+                                            VkDepthStencilObj *depth_stencil_att) {
+    for (const auto &color_att : color_atts) {
+        color_att->SetLayout(this, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL);
+    }
+
+    if (depth_stencil_att && depth_stencil_att->Initialized()) {
+        VkImageAspectFlags aspect = VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT;
+        if (FormatIsDepthOnly(depth_stencil_att->Format())) aspect = VK_IMAGE_ASPECT_DEPTH_BIT;
+        if (FormatIsStencilOnly(depth_stencil_att->Format())) aspect = VK_IMAGE_ASPECT_STENCIL_BIT;
+
+        depth_stencil_att->SetLayout(this, aspect, VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL);
+    }
+}
+
+void VkCommandBufferObj::BeginRenderPass(const VkRenderPassBeginInfo &info) {
+    vk::CmdBeginRenderPass(handle(), &info, VK_SUBPASS_CONTENTS_INLINE);
+}
+
+void VkCommandBufferObj::EndRenderPass() { vk::CmdEndRenderPass(handle()); }
+
+void VkCommandBufferObj::SetViewport(uint32_t firstViewport, uint32_t viewportCount, const VkViewport *pViewports) {
+    vk::CmdSetViewport(handle(), firstViewport, viewportCount, pViewports);
+}
+
+void VkCommandBufferObj::SetStencilReference(VkStencilFaceFlags faceMask, uint32_t reference) {
+    vk::CmdSetStencilReference(handle(), faceMask, reference);
+}
+
+void VkCommandBufferObj::DrawIndexed(uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset,
+                                     uint32_t firstInstance) {
+    vk::CmdDrawIndexed(handle(), indexCount, instanceCount, firstIndex, vertexOffset, firstInstance);
+}
+
+void VkCommandBufferObj::Draw(uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance) {
+    vk::CmdDraw(handle(), vertexCount, instanceCount, firstVertex, firstInstance);
+}
+
+void VkCommandBufferObj::QueueCommandBuffer(bool checkSuccess) {
+    VkFenceObj nullFence;
+    QueueCommandBuffer(nullFence, checkSuccess);
+}
+
+void VkCommandBufferObj::QueueCommandBuffer(const VkFenceObj &fence, bool checkSuccess) {
+    VkResult err = VK_SUCCESS;
+
+    err = m_queue->submit(*this, fence, checkSuccess);
+    if (checkSuccess) {
+        ASSERT_VK_SUCCESS(err);
+    }
+
+    err = m_queue->wait();
+    if (checkSuccess) {
+        ASSERT_VK_SUCCESS(err);
+    }
+
+    // TODO: Determine if we really want this serialization here
+    // Wait for work to finish before cleaning up.
+    vk::DeviceWaitIdle(m_device->device());
+}
+
+void VkCommandBufferObj::BindDescriptorSet(VkDescriptorSetObj &descriptorSet) {
+    VkDescriptorSet set_obj = descriptorSet.GetDescriptorSetHandle();
+
+    // bind pipeline, vertex buffer (descriptor set) and WVP (dynamic buffer view)
+    if (set_obj) {
+        vk::CmdBindDescriptorSets(handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, descriptorSet.GetPipelineLayout(), 0, 1, &set_obj, 0,
+                                  NULL);
+    }
+}
+
+void VkCommandBufferObj::BindIndexBuffer(VkBufferObj *indexBuffer, VkDeviceSize offset, VkIndexType indexType) {
+    vk::CmdBindIndexBuffer(handle(), indexBuffer->handle(), offset, indexType);
+}
+
+void VkCommandBufferObj::BindVertexBuffer(VkConstantBufferObj *vertexBuffer, VkDeviceSize offset, uint32_t binding) {
+    vk::CmdBindVertexBuffers(handle(), binding, 1, &vertexBuffer->handle(), &offset);
+}
+
+VkCommandPoolObj::VkCommandPoolObj(VkDeviceObj *device, uint32_t queue_family_index, VkCommandPoolCreateFlags flags) {
+    init(*device, vk_testing::CommandPool::create_info(queue_family_index, flags));
+}
+
+bool VkDepthStencilObj::Initialized() { return m_initialized; }
+VkDepthStencilObj::VkDepthStencilObj(VkDeviceObj *device) : VkImageObj(device) { m_initialized = false; }
+
+VkImageView *VkDepthStencilObj::BindInfo() { return &m_attachmentBindInfo; }
+
+VkFormat VkDepthStencilObj::Format() const { return this->m_depth_stencil_fmt; }
+
+void VkDepthStencilObj::Init(VkDeviceObj *device, int32_t width, int32_t height, VkFormat format, VkImageUsageFlags usage) {
+    VkImageViewCreateInfo view_info = {};
+
+    m_device = device;
+    m_initialized = true;
+    m_depth_stencil_fmt = format;
+
+    /* create image */
+    VkImageObj::Init(width, height, 1, m_depth_stencil_fmt, usage, VK_IMAGE_TILING_OPTIMAL);
+
+    VkImageAspectFlags aspect = VK_IMAGE_ASPECT_STENCIL_BIT | VK_IMAGE_ASPECT_DEPTH_BIT;
+    if (FormatIsDepthOnly(format))
+        aspect = VK_IMAGE_ASPECT_DEPTH_BIT;
+    else if (FormatIsStencilOnly(format))
+        aspect = VK_IMAGE_ASPECT_STENCIL_BIT;
+
+    SetLayout(aspect, VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL);
+
+    view_info.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
+    view_info.pNext = NULL;
+    view_info.image = VK_NULL_HANDLE;
+    view_info.subresourceRange.aspectMask = aspect;
+    view_info.subresourceRange.baseMipLevel = 0;
+    view_info.subresourceRange.levelCount = 1;
+    view_info.subresourceRange.baseArrayLayer = 0;
+    view_info.subresourceRange.layerCount = 1;
+    view_info.flags = 0;
+    view_info.format = m_depth_stencil_fmt;
+    view_info.image = handle();
+    view_info.viewType = VK_IMAGE_VIEW_TYPE_2D;
+    m_imageView.init(*m_device, view_info);
+
+    m_attachmentBindInfo = m_imageView.handle();
+}
diff --git a/src/third_party/vulkan-validation-layers/src/tests/vkrenderframework.h b/src/third_party/vulkan-validation-layers/src/tests/vkrenderframework.h
new file mode 100644
index 0000000..db69131
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/tests/vkrenderframework.h
@@ -0,0 +1,566 @@
+/*
+ * Copyright (c) 2015-2019 The Khronos Group Inc.
+ * Copyright (c) 2015-2019 Valve Corporation
+ * Copyright (c) 2015-2019 LunarG, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Courtney Goeltzenleuchter <courtney@LunarG.com>
+ * Author: Dave Houlton <daveh@lunarg.com>
+ */
+
+#ifndef VKRENDERFRAMEWORK_H
+#define VKRENDERFRAMEWORK_H
+
+#include "lvt_function_pointers.h"
+
+#ifdef ANDROID
+#include "vktestframeworkandroid.h"
+class VkImageObj;
+#else
+#include "vktestframework.h"
+#endif
+
+#if defined(ANDROID)
+#include <android/log.h>
+#if defined(VALIDATION_APK)
+#include <android_native_app_glue.h>
+#endif
+#endif
+
+#include <algorithm>
+#include <array>
+#include <map>
+#include <memory>
+#include <vector>
+#include <unordered_set>
+
+using namespace std;
+
+using vk_testing::MakeVkHandles;
+
+template <class Dst, class Src>
+std::vector<Dst *> MakeTestbindingHandles(const std::vector<Src *> &v) {
+    std::vector<Dst *> handles;
+    handles.reserve(v.size());
+    std::transform(v.begin(), v.end(), std::back_inserter(handles), [](const Src *o) { return static_cast<Dst *>(o); });
+    return handles;
+}
+
+typedef vk_testing::Queue VkQueueObj;
+class VkDeviceObj : public vk_testing::Device {
+  public:
+    VkDeviceObj(uint32_t id, VkPhysicalDevice obj);
+    VkDeviceObj(uint32_t id, VkPhysicalDevice obj, std::vector<const char *> &extension_names,
+                VkPhysicalDeviceFeatures *features = nullptr, void *create_device_pnext = nullptr);
+
+    uint32_t QueueFamilyMatching(VkQueueFlags with, VkQueueFlags without, bool all_bits = true);
+    uint32_t QueueFamilyWithoutCapabilities(VkQueueFlags capabilities) {
+        // an all_bits match with 0 matches all
+        return QueueFamilyMatching(VkQueueFlags(0), capabilities, true /* all_bits with */);
+    }
+
+    VkDevice device() { return handle(); }
+    void SetDeviceQueue();
+    VkQueueObj *GetDefaultQueue();
+    VkQueueObj *GetDefaultComputeQueue();
+
+    uint32_t id;
+    VkPhysicalDeviceProperties props;
+    std::vector<VkQueueFamilyProperties> queue_props;
+
+    VkQueue m_queue;
+};
+
+// ErrorMonitor Usage:
+//
+// Call SetDesiredFailureMsg with a string to be compared against all
+// encountered log messages, or a validation error enum identifying
+// desired error message. Passing NULL or VALIDATION_ERROR_MAX_ENUM
+// will match all log messages. logMsg will return true for skipCall
+// only if msg is matched or NULL.
+//
+// Call VerifyFound to determine if all desired failure messages
+// were encountered. Call VerifyNotFound to determine if any unexpected
+// failure was encountered.
+class ErrorMonitor {
+  public:
+    ErrorMonitor();
+
+    ~ErrorMonitor() NOEXCEPT;
+
+    // Set monitor to pristine state
+    void Reset();
+
+    // ErrorMonitor will look for an error message containing the specified string(s)
+    void SetDesiredFailureMsg(const VkFlags msgFlags, const std::string msg);
+    void SetDesiredFailureMsg(const VkFlags msgFlags, const char *const msgString);
+
+    // ErrorMonitor will look for an error message containing the specified string(s)
+    template <typename Iter>
+    void SetDesiredFailureMsg(const VkFlags msgFlags, Iter iter, const Iter end) {
+        for (; iter != end; ++iter) {
+            SetDesiredFailureMsg(msgFlags, *iter);
+        }
+    }
+
+    // Set an error that the error monitor will ignore. Do not use this function if you are creating a new test.
+    // TODO: This is stopgap to block new unexpected errors from being introduced. The long-term goal is to remove the use of this
+    // function and its definition.
+    void SetUnexpectedError(const char *const msg);
+
+    // Set an error that should not cause a test failure
+    void SetAllowedFailureMsg(const char *const msg);
+
+    VkBool32 CheckForDesiredMsg(const char *const msgString);
+    vector<string> GetOtherFailureMsgs() const;
+    VkDebugReportFlagsEXT GetMessageFlags() const;
+    bool AnyDesiredMsgFound() const;
+    bool AllDesiredMsgsFound() const;
+    void SetError(const char *const errorString);
+    void SetBailout(bool *bailout);
+    void DumpFailureMsgs() const;
+
+    // Helpers
+
+    // ExpectSuccess now takes an optional argument allowing a custom combination of debug flags
+    void ExpectSuccess(VkDebugReportFlagsEXT const message_flag_mask = VK_DEBUG_REPORT_ERROR_BIT_EXT);
+
+    void VerifyFound();
+    void VerifyNotFound();
+
+  private:
+    // TODO: This is stopgap to block new unexpected errors from being introduced. The long-term goal is to remove the use of this
+    // function and its definition.
+    bool IgnoreMessage(std::string const &msg) const;
+
+    VkFlags message_flags_;
+    std::unordered_multiset<std::string> desired_message_strings_;
+    std::unordered_multiset<std::string> failure_message_strings_;
+    std::vector<std::string> ignore_message_strings_;
+    std::vector<std::string> allowed_message_strings_;
+    vector<string> other_messages_;
+    test_platform_thread_mutex mutex_;
+    bool *bailout_;
+    bool message_found_;
+};
+
+class VkCommandPoolObj;
+class VkCommandBufferObj;
+class VkDepthStencilObj;
+
+class VkRenderFramework : public VkTestFramework {
+  public:
+    VkInstance instance() { return inst; }
+    VkDevice device() { return m_device->device(); }
+    VkDeviceObj *DeviceObj() const { return m_device; }
+    VkPhysicalDevice gpu();
+    VkRenderPass renderPass() { return m_renderPass; }
+    const VkRenderPassCreateInfo &RenderPassInfo() const { return renderPass_info_; };
+    VkFramebuffer framebuffer() { return m_framebuffer; }
+    ErrorMonitor *Monitor();
+
+    void InitViewport(float width, float height);
+    void InitViewport();
+    bool InitSurface();
+    bool InitSurface(float width, float height);
+    bool InitSwapchain(VkSurfaceKHR &surface, VkImageUsageFlags imageUsage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT,
+                       VkSurfaceTransformFlagBitsKHR preTransform = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR);
+    bool InitSwapchain(VkImageUsageFlags imageUsage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT,
+                       VkSurfaceTransformFlagBitsKHR preTransform = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR);
+    void DestroySwapchain();
+    void InitRenderTarget();
+    void InitRenderTarget(uint32_t targets);
+    void InitRenderTarget(VkImageView *dsBinding);
+    void InitRenderTarget(uint32_t targets, VkImageView *dsBinding);
+    void DestroyRenderTarget();
+    void InitFramework(PFN_vkDebugReportCallbackEXT = NULL, void *userData = NULL, void *instance_pnext = NULL);
+
+    void ShutdownFramework();
+    void GetPhysicalDeviceFeatures(VkPhysicalDeviceFeatures *features);
+    void GetPhysicalDeviceProperties(VkPhysicalDeviceProperties *props);
+    void InitState(VkPhysicalDeviceFeatures *features = nullptr, void *create_device_pnext = nullptr,
+                   const VkCommandPoolCreateFlags flags = 0);
+
+    const VkRenderPassBeginInfo &renderPassBeginInfo() const { return m_renderPassBeginInfo; }
+
+    bool InstanceLayerSupported(const char *name, uint32_t specVersion = 0, uint32_t implementationVersion = 0);
+    bool EnableDeviceProfileLayer();
+    bool InstanceExtensionSupported(const char *name, uint32_t specVersion = 0);
+    bool InstanceExtensionEnabled(const char *name);
+    bool DeviceExtensionSupported(VkPhysicalDevice dev, const char *layer, const char *name, uint32_t specVersion = 0);
+    bool DeviceExtensionEnabled(const char *name);
+    bool DeviceIsMockICD();
+    bool DeviceSimulation();
+
+  protected:
+    VkRenderFramework();
+    virtual ~VkRenderFramework() = 0;
+
+    VkApplicationInfo app_info;
+    VkInstance inst;
+    VkPhysicalDevice objs[16];
+    uint32_t gpu_count;
+    VkDeviceObj *m_device;
+    VkCommandPoolObj *m_commandPool;
+    VkCommandBufferObj *m_commandBuffer;
+    VkRenderPass m_renderPass;
+    VkRenderPassCreateInfo renderPass_info_ = {};
+    VkFramebuffer m_framebuffer;
+    VkSurfaceKHR m_surface;
+    VkSwapchainKHR m_swapchain;
+    ErrorMonitor *m_errorMonitor = {};
+    std::vector<VkViewport> m_viewports;
+    std::vector<VkRect2D> m_scissors;
+    float m_lineWidth;
+    float m_depthBiasConstantFactor;
+    float m_depthBiasClamp;
+    float m_depthBiasSlopeFactor;
+    float m_blendConstants[4];
+    float m_minDepthBounds;
+    float m_maxDepthBounds;
+    uint32_t m_compareMask;
+    uint32_t m_writeMask;
+    uint32_t m_reference;
+    bool m_addRenderPassSelfDependency;
+    std::vector<VkClearValue> m_renderPassClearValues;
+    VkRenderPassBeginInfo m_renderPassBeginInfo;
+    vector<std::unique_ptr<VkImageObj>> m_renderTargets;
+    float m_width, m_height;
+    VkFormat m_render_target_fmt;
+    VkFormat m_depth_stencil_fmt;
+    VkClearColorValue m_clear_color;
+    bool m_clear_via_load_op;
+    float m_depth_clear_color;
+    uint32_t m_stencil_clear_color;
+    VkDepthStencilObj *m_depthStencil;
+    PFN_vkCreateDebugReportCallbackEXT m_CreateDebugReportCallback;
+    PFN_vkDestroyDebugReportCallbackEXT m_DestroyDebugReportCallback;
+    PFN_vkDebugReportMessageEXT m_DebugReportMessage;
+    VkDebugReportCallbackEXT m_globalMsgCallback;
+    VkDebugReportCallbackEXT m_devMsgCallback;
+
+    std::vector<const char *> m_instance_layer_names;
+    std::vector<const char *> m_instance_extension_names;
+    std::vector<const char *> m_device_extension_names;
+};
+
+class VkDescriptorSetObj;
+class VkConstantBufferObj;
+class VkPipelineObj;
+class VkDescriptorSetObj;
+typedef vk_testing::Fence VkFenceObj;
+typedef vk_testing::Buffer VkBufferObj;
+typedef vk_testing::AccelerationStructure VkAccelerationStructureObj;
+
+class VkCommandPoolObj : public vk_testing::CommandPool {
+  public:
+    VkCommandPoolObj(VkDeviceObj *device, uint32_t queue_family_index, VkCommandPoolCreateFlags flags = 0);
+};
+
+class VkCommandBufferObj : public vk_testing::CommandBuffer {
+  public:
+    VkCommandBufferObj(VkDeviceObj *device, VkCommandPoolObj *pool, VkCommandBufferLevel level = VK_COMMAND_BUFFER_LEVEL_PRIMARY,
+                       VkQueueObj *queue = nullptr);
+    void PipelineBarrier(VkPipelineStageFlags src_stages, VkPipelineStageFlags dest_stages, VkDependencyFlags dependencyFlags,
+                         uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers, uint32_t bufferMemoryBarrierCount,
+                         const VkBufferMemoryBarrier *pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount,
+                         const VkImageMemoryBarrier *pImageMemoryBarriers);
+    void ClearAllBuffers(const vector<std::unique_ptr<VkImageObj>> &color_objs, VkClearColorValue clear_color,
+                         VkDepthStencilObj *depth_stencil_obj, float depth_clear_value, uint32_t stencil_clear_value);
+    void PrepareAttachments(const vector<std::unique_ptr<VkImageObj>> &color_atts, VkDepthStencilObj *depth_stencil_att);
+    void BindDescriptorSet(VkDescriptorSetObj &descriptorSet);
+    void BindIndexBuffer(VkBufferObj *indexBuffer, VkDeviceSize offset, VkIndexType indexType);
+    void BindVertexBuffer(VkConstantBufferObj *vertexBuffer, VkDeviceSize offset, uint32_t binding);
+    void BeginRenderPass(const VkRenderPassBeginInfo &info);
+    void EndRenderPass();
+    void FillBuffer(VkBuffer buffer, VkDeviceSize offset, VkDeviceSize fill_size, uint32_t data);
+    void Draw(uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance);
+    void DrawIndexed(uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset,
+                     uint32_t firstInstance);
+    void QueueCommandBuffer(bool checkSuccess = true);
+    void QueueCommandBuffer(const VkFenceObj &fence, bool checkSuccess = true);
+    void SetViewport(uint32_t firstViewport, uint32_t viewportCount, const VkViewport *pViewports);
+    void SetStencilReference(VkStencilFaceFlags faceMask, uint32_t reference);
+    void UpdateBuffer(VkBuffer buffer, VkDeviceSize dstOffset, VkDeviceSize dataSize, const void *pData);
+    void CopyImage(VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout,
+                   uint32_t regionCount, const VkImageCopy *pRegions);
+    void ResolveImage(VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout,
+                      uint32_t regionCount, const VkImageResolve *pRegions);
+    void ClearColorImage(VkImage image, VkImageLayout imageLayout, const VkClearColorValue *pColor, uint32_t rangeCount,
+                         const VkImageSubresourceRange *pRanges);
+    void ClearDepthStencilImage(VkImage image, VkImageLayout imageLayout, const VkClearDepthStencilValue *pColor,
+                                uint32_t rangeCount, const VkImageSubresourceRange *pRanges);
+    void BuildAccelerationStructure(VkAccelerationStructureObj *as, VkBuffer scratchBuffer);
+    void BuildAccelerationStructure(VkAccelerationStructureObj *as, VkBuffer scratchBuffer, VkBuffer instanceData);
+
+  protected:
+    VkDeviceObj *m_device;
+    VkQueueObj *m_queue;
+};
+
+class VkConstantBufferObj : public VkBufferObj {
+  public:
+    VkConstantBufferObj(VkDeviceObj *device,
+                        VkBufferUsageFlags usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT);
+    VkConstantBufferObj(VkDeviceObj *device, VkDeviceSize size, const void *data,
+                        VkBufferUsageFlags usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT);
+
+    VkDescriptorBufferInfo m_descriptorBufferInfo;
+
+  protected:
+    VkDeviceObj *m_device;
+};
+
+class VkRenderpassObj {
+  public:
+    VkRenderpassObj(VkDeviceObj *device);
+    ~VkRenderpassObj() NOEXCEPT;
+    VkRenderPass handle() { return m_renderpass; }
+
+  protected:
+    VkRenderPass m_renderpass;
+    VkDevice device;
+};
+
+class VkImageObj : public vk_testing::Image {
+  public:
+    VkImageObj(VkDeviceObj *dev);
+    bool IsCompatible(VkImageUsageFlags usages, VkFormatFeatureFlags features);
+
+  public:
+    void Init(uint32_t const width, uint32_t const height, uint32_t const mipLevels, VkFormat const format, VkFlags const usage,
+              VkImageTiling const tiling = VK_IMAGE_TILING_LINEAR, VkMemoryPropertyFlags const reqs = 0,
+              const std::vector<uint32_t> *queue_families = nullptr, bool memory = true);
+    void init(const VkImageCreateInfo *create_info);
+
+    void InitNoLayout(uint32_t const width, uint32_t const height, uint32_t const mipLevels, VkFormat const format,
+                      VkFlags const usage, VkImageTiling tiling = VK_IMAGE_TILING_LINEAR, VkMemoryPropertyFlags reqs = 0,
+                      const std::vector<uint32_t> *queue_families = nullptr, bool memory = true);
+
+    //    void clear( CommandBuffer*, uint32_t[4] );
+
+    void Layout(VkImageLayout const layout) { m_descriptorImageInfo.imageLayout = layout; }
+
+    VkDeviceMemory memory() const { return Image::memory().handle(); }
+
+    void *MapMemory() { return Image::memory().map(); }
+
+    void UnmapMemory() { Image::memory().unmap(); }
+
+    void ImageMemoryBarrier(VkCommandBufferObj *cmd, VkImageAspectFlags aspect, VkFlags output_mask, VkFlags input_mask,
+                            VkImageLayout image_layout, VkPipelineStageFlags src_stages = VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
+                            VkPipelineStageFlags dest_stages = VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
+                            uint32_t srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
+                            uint32_t dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED);
+
+    VkResult CopyImage(VkImageObj &src_image);
+
+    VkResult CopyImageOut(VkImageObj &dst_image);
+
+    std::array<std::array<uint32_t, 16>, 16> Read();
+
+    VkImage image() const { return handle(); }
+
+    VkImageView targetView(VkFormat format, VkImageAspectFlags aspect = VK_IMAGE_ASPECT_COLOR_BIT) {
+        if (!m_targetView.initialized()) {
+            VkImageViewCreateInfo createView = {};
+            createView.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
+            createView.image = handle();
+            createView.viewType = VK_IMAGE_VIEW_TYPE_2D;
+            createView.format = format;
+            createView.components.r = VK_COMPONENT_SWIZZLE_R;
+            createView.components.g = VK_COMPONENT_SWIZZLE_G;
+            createView.components.b = VK_COMPONENT_SWIZZLE_B;
+            createView.components.a = VK_COMPONENT_SWIZZLE_A;
+            createView.subresourceRange = {aspect, 0, 1, 0, 1};
+            createView.flags = 0;
+            m_targetView.init(*m_device, createView);
+        }
+        return m_targetView.handle();
+    }
+
+    void SetLayout(VkCommandBufferObj *cmd_buf, VkImageAspectFlags aspect, VkImageLayout image_layout);
+    void SetLayout(VkImageAspectFlags aspect, VkImageLayout image_layout);
+
+    VkImageLayout Layout() const { return m_descriptorImageInfo.imageLayout; }
+    uint32_t width() const { return extent().width; }
+    uint32_t height() const { return extent().height; }
+    VkDeviceObj *device() const { return m_device; }
+
+  protected:
+    VkDeviceObj *m_device;
+
+    vk_testing::ImageView m_targetView;
+    VkDescriptorImageInfo m_descriptorImageInfo;
+};
+
+class VkTextureObj : public VkImageObj {
+  public:
+    VkTextureObj(VkDeviceObj *device, uint32_t *colors = NULL);
+
+    const VkDescriptorImageInfo &DescriptorImageInfo() const { return m_descriptorImageInfo; }
+
+  protected:
+    VkDeviceObj *m_device;
+    vk_testing::ImageView m_textureView;
+};
+
+class VkDepthStencilObj : public VkImageObj {
+  public:
+    VkDepthStencilObj(VkDeviceObj *device);
+    void Init(VkDeviceObj *device, int32_t width, int32_t height, VkFormat format,
+              VkImageUsageFlags usage = VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT);
+    bool Initialized();
+    VkImageView *BindInfo();
+
+    VkFormat Format() const;
+
+  protected:
+    VkDeviceObj *m_device;
+    bool m_initialized;
+    vk_testing::ImageView m_imageView;
+    VkFormat m_depth_stencil_fmt;
+    VkImageView m_attachmentBindInfo;
+};
+
+class VkSamplerObj : public vk_testing::Sampler {
+  public:
+    VkSamplerObj(VkDeviceObj *device);
+
+  protected:
+    VkDeviceObj *m_device;
+};
+
+class VkDescriptorSetLayoutObj : public vk_testing::DescriptorSetLayout {
+  public:
+    VkDescriptorSetLayoutObj() = default;
+    VkDescriptorSetLayoutObj(const VkDeviceObj *device,
+                             const std::vector<VkDescriptorSetLayoutBinding> &descriptor_set_bindings = {},
+                             VkDescriptorSetLayoutCreateFlags flags = 0, void *pNext = NULL);
+
+    // Move constructor and move assignment operator for Visual Studio 2013
+    VkDescriptorSetLayoutObj(VkDescriptorSetLayoutObj &&src) NOEXCEPT : DescriptorSetLayout(std::move(src)){};
+    VkDescriptorSetLayoutObj &operator=(VkDescriptorSetLayoutObj &&src) NOEXCEPT {
+        DescriptorSetLayout::operator=(std::move(src));
+        return *this;
+    }
+};
+
+class VkDescriptorSetObj : public vk_testing::DescriptorPool {
+  public:
+    VkDescriptorSetObj(VkDeviceObj *device);
+    ~VkDescriptorSetObj() NOEXCEPT;
+
+    int AppendDummy();
+    int AppendBuffer(VkDescriptorType type, VkConstantBufferObj &constantBuffer);
+    int AppendSamplerTexture(VkSamplerObj *sampler, VkTextureObj *texture);
+    void CreateVKDescriptorSet(VkCommandBufferObj *commandBuffer);
+
+    VkDescriptorSet GetDescriptorSetHandle() const;
+    VkPipelineLayout GetPipelineLayout() const;
+
+  protected:
+    VkDeviceObj *m_device;
+    std::vector<VkDescriptorSetLayoutBinding> m_layout_bindings;
+    std::map<VkDescriptorType, int> m_type_counts;
+    int m_nextSlot;
+
+    vector<VkDescriptorImageInfo> m_imageSamplerDescriptors;
+    vector<VkWriteDescriptorSet> m_writes;
+
+    vk_testing::DescriptorSetLayout m_layout;
+    vk_testing::PipelineLayout m_pipeline_layout;
+    vk_testing::DescriptorSet *m_set = NULL;
+};
+
+class VkShaderObj : public vk_testing::ShaderModule {
+  public:
+    VkShaderObj(VkDeviceObj *device, const char *shaderText, VkShaderStageFlagBits stage, VkRenderFramework *framework,
+                char const *name = "main", bool debug = false, VkSpecializationInfo *specInfo = nullptr,
+                uint32_t spirv_minor_version = 0);
+    VkShaderObj(VkDeviceObj *device, const std::string spv_source, VkShaderStageFlagBits stage, VkRenderFramework *framework,
+                char const *name = "main", VkSpecializationInfo *specInfo = nullptr);
+    VkPipelineShaderStageCreateInfo const &GetStageCreateInfo() const;
+
+  protected:
+    VkPipelineShaderStageCreateInfo m_stage_info;
+    VkDeviceObj *m_device;
+};
+
+class VkPipelineLayoutObj : public vk_testing::PipelineLayout {
+  public:
+    VkPipelineLayoutObj() = default;
+    VkPipelineLayoutObj(VkDeviceObj *device, const std::vector<const VkDescriptorSetLayoutObj *> &descriptor_layouts = {},
+                        const std::vector<VkPushConstantRange> &push_constant_ranges = {});
+
+    // Move constructor and move assignment operator for Visual Studio 2013
+    VkPipelineLayoutObj(VkPipelineLayoutObj &&src) NOEXCEPT : PipelineLayout(std::move(src)) {}
+    VkPipelineLayoutObj &operator=(VkPipelineLayoutObj &&src) NOEXCEPT {
+        PipelineLayout::operator=(std::move(src));
+        return *this;
+    }
+
+    void Reset();
+};
+
+class VkPipelineObj : public vk_testing::Pipeline {
+  public:
+    VkPipelineObj(VkDeviceObj *device);
+    void AddShader(VkShaderObj *shaderObj);
+    void AddShader(VkPipelineShaderStageCreateInfo const &createInfo);
+    void AddVertexInputAttribs(VkVertexInputAttributeDescription *vi_attrib, uint32_t count);
+    void AddVertexInputBindings(VkVertexInputBindingDescription *vi_binding, uint32_t count);
+    void AddColorAttachment(uint32_t binding, const VkPipelineColorBlendAttachmentState &att);
+    void MakeDynamic(VkDynamicState state);
+
+    void AddDefaultColorAttachment(VkColorComponentFlags writeMask = 0xf /*=R|G|B|A*/) {
+        VkPipelineColorBlendAttachmentState att = {};
+        att.blendEnable = VK_FALSE;
+        att.colorWriteMask = writeMask;
+        AddColorAttachment(0, att);
+    }
+
+    void SetDepthStencil(const VkPipelineDepthStencilStateCreateInfo *);
+    void SetMSAA(const VkPipelineMultisampleStateCreateInfo *ms_state);
+    void SetInputAssembly(const VkPipelineInputAssemblyStateCreateInfo *ia_state);
+    void SetRasterization(const VkPipelineRasterizationStateCreateInfo *rs_state);
+    void SetTessellation(const VkPipelineTessellationStateCreateInfo *te_state);
+    void SetViewport(const vector<VkViewport> viewports);
+    void SetScissor(const vector<VkRect2D> scissors);
+    void SetLineState(const VkPipelineRasterizationLineStateCreateInfoEXT *line_state);
+
+    void InitGraphicsPipelineCreateInfo(VkGraphicsPipelineCreateInfo *gp_ci);
+
+    VkResult CreateVKPipeline(VkPipelineLayout layout, VkRenderPass render_pass, VkGraphicsPipelineCreateInfo *gp_ci = nullptr);
+
+  protected:
+    VkPipelineVertexInputStateCreateInfo m_vi_state;
+    VkPipelineInputAssemblyStateCreateInfo m_ia_state;
+    VkPipelineRasterizationStateCreateInfo m_rs_state;
+    VkPipelineColorBlendStateCreateInfo m_cb_state;
+    VkPipelineDepthStencilStateCreateInfo const *m_ds_state;
+    VkPipelineViewportStateCreateInfo m_vp_state;
+    VkPipelineMultisampleStateCreateInfo m_ms_state;
+    VkPipelineTessellationStateCreateInfo const *m_te_state;
+    VkPipelineDynamicStateCreateInfo m_pd_state;
+    VkPipelineRasterizationLineStateCreateInfoEXT m_line_state;
+    vector<VkDynamicState> m_dynamic_state_enables;
+    vector<VkViewport> m_viewports;
+    vector<VkRect2D> m_scissors;
+    VkDeviceObj *m_device;
+    vector<VkPipelineShaderStageCreateInfo> m_shaderStages;
+    vector<VkPipelineColorBlendAttachmentState> m_colorAttachments;
+};
+
+#endif  // VKRENDERFRAMEWORK_H
diff --git a/src/third_party/vulkan-validation-layers/src/tests/vktestbinding.cpp b/src/third_party/vulkan-validation-layers/src/tests/vktestbinding.cpp
new file mode 100644
index 0000000..7b6d9c1
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/tests/vktestbinding.cpp
@@ -0,0 +1,874 @@
+/*
+ * Copyright (c) 2015-2019 The Khronos Group Inc.
+ * Copyright (c) 2015-2019 Valve Corporation
+ * Copyright (c) 2015-2019 LunarG, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Courtney Goeltzenleuchter <courtney@LunarG.com>
+ * Author: Tony Barbour <tony@LunarG.com>
+ */
+
+#include "vktestbinding.h"
+
+#include <string.h>  // memset(), memcmp()
+#include <algorithm>
+#include <cassert>
+#include <iostream>
+#include <vector>
+
+#include "test_common.h"
+#include "vk_typemap_helper.h"
+
+namespace {
+
+#define NON_DISPATCHABLE_HANDLE_INIT(create_func, dev, ...)                              \
+    do {                                                                                 \
+        handle_type handle;                                                              \
+        if (EXPECT(create_func(dev.handle(), __VA_ARGS__, NULL, &handle) == VK_SUCCESS)) \
+            NonDispHandle::init(dev.handle(), handle);                                   \
+    } while (0)
+
+#define NON_DISPATCHABLE_HANDLE_DTOR(cls, destroy_func)            \
+    cls::~cls() {                                                  \
+        if (initialized()) destroy_func(device(), handle(), NULL); \
+    }
+
+#define STRINGIFY(x) #x
+#define EXPECT(expr) ((expr) ? true : expect_failure(STRINGIFY(expr), __FILE__, __LINE__, __FUNCTION__))
+
+vk_testing::ErrorCallback error_callback;
+
+bool expect_failure(const char *expr, const char *file, unsigned int line, const char *function) {
+    if (error_callback) {
+        error_callback(expr, file, line, function);
+    } else {
+        std::cerr << file << ":" << line << ": " << function << ": Expectation `" << expr << "' failed.\n";
+    }
+
+    return false;
+}
+
+}  // namespace
+
+namespace vk_testing {
+
+void set_error_callback(ErrorCallback callback) { error_callback = callback; }
+
+VkPhysicalDeviceProperties PhysicalDevice::properties() const {
+    VkPhysicalDeviceProperties info;
+
+    vk::GetPhysicalDeviceProperties(handle(), &info);
+
+    return info;
+}
+
+std::vector<VkQueueFamilyProperties> PhysicalDevice::queue_properties() const {
+    std::vector<VkQueueFamilyProperties> info;
+    uint32_t count;
+
+    // Call once with NULL data to receive count
+    vk::GetPhysicalDeviceQueueFamilyProperties(handle(), &count, NULL);
+    info.resize(count);
+    vk::GetPhysicalDeviceQueueFamilyProperties(handle(), &count, info.data());
+
+    return info;
+}
+
+VkPhysicalDeviceMemoryProperties PhysicalDevice::memory_properties() const {
+    VkPhysicalDeviceMemoryProperties info;
+
+    vk::GetPhysicalDeviceMemoryProperties(handle(), &info);
+
+    return info;
+}
+
+VkPhysicalDeviceFeatures PhysicalDevice::features() const {
+    VkPhysicalDeviceFeatures features;
+    vk::GetPhysicalDeviceFeatures(handle(), &features);
+    return features;
+}
+
+/*
+ * Return list of Global layers available
+ */
+std::vector<VkLayerProperties> GetGlobalLayers() {
+    VkResult err;
+    std::vector<VkLayerProperties> layers;
+    uint32_t layer_count;
+
+    do {
+        layer_count = 0;
+        err = vk::EnumerateInstanceLayerProperties(&layer_count, NULL);
+
+        if (err == VK_SUCCESS) {
+            layers.reserve(layer_count);
+            err = vk::EnumerateInstanceLayerProperties(&layer_count, layers.data());
+        }
+    } while (err == VK_INCOMPLETE);
+
+    assert(err == VK_SUCCESS);
+
+    return layers;
+}
+
+/*
+ * Return list of Global extensions provided by the ICD / Loader
+ */
+std::vector<VkExtensionProperties> GetGlobalExtensions() { return GetGlobalExtensions(NULL); }
+
+/*
+ * Return list of Global extensions provided by the specified layer
+ * If pLayerName is NULL, will return extensions implemented by the loader /
+ * ICDs
+ */
+std::vector<VkExtensionProperties> GetGlobalExtensions(const char *pLayerName) {
+    std::vector<VkExtensionProperties> exts;
+    uint32_t ext_count;
+    VkResult err;
+
+    do {
+        ext_count = 0;
+        err = vk::EnumerateInstanceExtensionProperties(pLayerName, &ext_count, NULL);
+
+        if (err == VK_SUCCESS) {
+            exts.resize(ext_count);
+            err = vk::EnumerateInstanceExtensionProperties(pLayerName, &ext_count, exts.data());
+        }
+    } while (err == VK_INCOMPLETE);
+
+    assert(err == VK_SUCCESS);
+
+    return exts;
+}
+
+/*
+ * Return list of PhysicalDevice extensions provided by the ICD / Loader
+ */
+std::vector<VkExtensionProperties> PhysicalDevice::extensions() const { return extensions(NULL); }
+
+/*
+ * Return list of PhysicalDevice extensions provided by the specified layer
+ * If pLayerName is NULL, will return extensions for ICD / loader.
+ */
+std::vector<VkExtensionProperties> PhysicalDevice::extensions(const char *pLayerName) const {
+    std::vector<VkExtensionProperties> exts;
+    VkResult err;
+
+    do {
+        uint32_t extCount = 0;
+        err = vk::EnumerateDeviceExtensionProperties(handle(), pLayerName, &extCount, NULL);
+
+        if (err == VK_SUCCESS) {
+            exts.resize(extCount);
+            err = vk::EnumerateDeviceExtensionProperties(handle(), pLayerName, &extCount, exts.data());
+        }
+    } while (err == VK_INCOMPLETE);
+
+    assert(err == VK_SUCCESS);
+
+    return exts;
+}
+
+bool PhysicalDevice::set_memory_type(const uint32_t type_bits, VkMemoryAllocateInfo *info, const VkFlags properties,
+                                     const VkFlags forbid) const {
+    uint32_t type_mask = type_bits;
+    // Search memtypes to find first index with those properties
+    for (uint32_t i = 0; i < memory_properties_.memoryTypeCount; i++) {
+        if ((type_mask & 1) == 1) {
+            // Type is available, does it match user properties?
+            if ((memory_properties_.memoryTypes[i].propertyFlags & properties) == properties &&
+                (memory_properties_.memoryTypes[i].propertyFlags & forbid) == 0) {
+                info->memoryTypeIndex = i;
+                return true;
+            }
+        }
+        type_mask >>= 1;
+    }
+    // No memory types matched, return failure
+    return false;
+}
+
+/*
+ * Return list of PhysicalDevice layers
+ */
+std::vector<VkLayerProperties> PhysicalDevice::layers() const {
+    std::vector<VkLayerProperties> layer_props;
+    VkResult err;
+
+    do {
+        uint32_t layer_count = 0;
+        err = vk::EnumerateDeviceLayerProperties(handle(), &layer_count, NULL);
+
+        if (err == VK_SUCCESS) {
+            layer_props.reserve(layer_count);
+            err = vk::EnumerateDeviceLayerProperties(handle(), &layer_count, layer_props.data());
+        }
+    } while (err == VK_INCOMPLETE);
+
+    assert(err == VK_SUCCESS);
+
+    return layer_props;
+}
+
+QueueCreateInfoArray::QueueCreateInfoArray(const std::vector<VkQueueFamilyProperties> &queue_props)
+    : queue_info_(), queue_priorities_() {
+    queue_info_.reserve(queue_props.size());
+
+    for (uint32_t i = 0; i < (uint32_t)queue_props.size(); ++i) {
+        if (queue_props[i].queueCount > 0) {
+            VkDeviceQueueCreateInfo qi = {};
+            qi.sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO;
+            qi.pNext = NULL;
+            qi.queueFamilyIndex = i;
+            qi.queueCount = queue_props[i].queueCount;
+            queue_priorities_.emplace_back(qi.queueCount, 0.0f);
+            qi.pQueuePriorities = queue_priorities_[i].data();
+            queue_info_.push_back(qi);
+        }
+    }
+}
+
+Device::~Device() {
+    if (!initialized()) return;
+
+    vk::DestroyDevice(handle(), NULL);
+}
+
+void Device::init(std::vector<const char *> &extensions, VkPhysicalDeviceFeatures *features, void *create_device_pnext) {
+    // request all queues
+    const std::vector<VkQueueFamilyProperties> queue_props = phy_.queue_properties();
+    QueueCreateInfoArray queue_info(phy_.queue_properties());
+    for (uint32_t i = 0; i < (uint32_t)queue_props.size(); i++) {
+        if (queue_props[i].queueFlags & VK_QUEUE_GRAPHICS_BIT) {
+            graphics_queue_node_index_ = i;
+            break;
+        }
+    }
+    // Only request creation with queuefamilies that have at least one queue
+    std::vector<VkDeviceQueueCreateInfo> create_queue_infos;
+    auto qci = queue_info.data();
+    for (uint32_t j = 0; j < queue_info.size(); ++j) {
+        if (qci[j].queueCount) {
+            create_queue_infos.push_back(qci[j]);
+        }
+    }
+
+    enabled_extensions_ = extensions;
+
+    VkDeviceCreateInfo dev_info = {};
+    dev_info.sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO;
+    dev_info.pNext = create_device_pnext;
+    dev_info.queueCreateInfoCount = create_queue_infos.size();
+    dev_info.pQueueCreateInfos = create_queue_infos.data();
+    dev_info.enabledLayerCount = 0;
+    dev_info.ppEnabledLayerNames = NULL;
+    dev_info.enabledExtensionCount = extensions.size();
+    dev_info.ppEnabledExtensionNames = extensions.data();
+
+    VkPhysicalDeviceFeatures all_features;
+    // Let VkPhysicalDeviceFeatures2 take priority over VkPhysicalDeviceFeatures,
+    // since it supports extensions
+
+    if (!(lvl_find_in_chain<VkPhysicalDeviceFeatures2>(dev_info.pNext))) {
+        if (features) {
+            dev_info.pEnabledFeatures = features;
+        } else {
+            // request all supportable features enabled
+            all_features = phy().features();
+            dev_info.pEnabledFeatures = &all_features;
+        }
+    }
+
+    init(dev_info);
+}
+
+void Device::init(const VkDeviceCreateInfo &info) {
+    VkDevice dev;
+
+    if (EXPECT(vk::CreateDevice(phy_.handle(), &info, NULL, &dev) == VK_SUCCESS)) Handle::init(dev);
+
+    init_queues();
+    init_formats();
+}
+
+void Device::init_queues() {
+    uint32_t queue_node_count;
+    vk::GetPhysicalDeviceQueueFamilyProperties(phy_.handle(), &queue_node_count, NULL);
+    EXPECT(queue_node_count >= 1);
+
+    std::vector<VkQueueFamilyProperties> queue_props(queue_node_count);
+    vk::GetPhysicalDeviceQueueFamilyProperties(phy_.handle(), &queue_node_count, queue_props.data());
+
+    queue_families_.resize(queue_node_count);
+    for (uint32_t queue_family_i = 0; queue_family_i < queue_node_count; ++queue_family_i) {
+        const auto &queue_prop = queue_props[queue_family_i];
+
+        QueueFamilyQueues &queue_storage = queue_families_[queue_family_i];
+        queue_storage.reserve(queue_prop.queueCount);
+        for (uint32_t queue_i = 0; queue_i < queue_prop.queueCount; ++queue_i) {
+            // TODO: Need to add support for separate MEMMGR and work queues,
+            // including synchronization
+            VkQueue queue;
+            vk::GetDeviceQueue(handle(), queue_family_i, queue_i, &queue);
+
+            // Store single copy of the queue object that will self destruct
+            queue_storage.emplace_back(new Queue(queue, queue_family_i));
+
+            if (queue_prop.queueFlags & VK_QUEUE_GRAPHICS_BIT) {
+                queues_[GRAPHICS].push_back(queue_storage.back().get());
+            }
+
+            if (queue_prop.queueFlags & VK_QUEUE_COMPUTE_BIT) {
+                queues_[COMPUTE].push_back(queue_storage.back().get());
+            }
+
+            if (queue_prop.queueFlags & VK_QUEUE_TRANSFER_BIT) {
+                queues_[DMA].push_back(queue_storage.back().get());
+            }
+        }
+    }
+
+    EXPECT(!queues_[GRAPHICS].empty() || !queues_[COMPUTE].empty());
+}
+
+const Device::QueueFamilyQueues &Device::queue_family_queues(uint32_t queue_family) const {
+    assert(queue_family < queue_families_.size());
+    return queue_families_[queue_family];
+}
+
+void Device::init_formats() {
+    for (int f = VK_FORMAT_BEGIN_RANGE; f <= VK_FORMAT_END_RANGE; f++) {
+        const VkFormat fmt = static_cast<VkFormat>(f);
+        const VkFormatProperties props = format_properties(fmt);
+
+        if (props.linearTilingFeatures) {
+            const Format tmp = {fmt, VK_IMAGE_TILING_LINEAR, props.linearTilingFeatures};
+            formats_.push_back(tmp);
+        }
+
+        if (props.optimalTilingFeatures) {
+            const Format tmp = {fmt, VK_IMAGE_TILING_OPTIMAL, props.optimalTilingFeatures};
+            formats_.push_back(tmp);
+        }
+    }
+
+    EXPECT(!formats_.empty());
+}
+
+bool Device::IsEnabledExtension(const char *extension) {
+    const auto is_x = [&extension](const char *enabled_extension) { return strcmp(extension, enabled_extension) == 0; };
+    return std::any_of(enabled_extensions_.begin(), enabled_extensions_.end(), is_x);
+}
+
+VkFormatProperties Device::format_properties(VkFormat format) {
+    VkFormatProperties data;
+    vk::GetPhysicalDeviceFormatProperties(phy().handle(), format, &data);
+
+    return data;
+}
+
+void Device::wait() { EXPECT(vk::DeviceWaitIdle(handle()) == VK_SUCCESS); }
+
+VkResult Device::wait(const std::vector<const Fence *> &fences, bool wait_all, uint64_t timeout) {
+    const std::vector<VkFence> fence_handles = MakeVkHandles<VkFence>(fences);
+    VkResult err = vk::WaitForFences(handle(), fence_handles.size(), fence_handles.data(), wait_all, timeout);
+    EXPECT(err == VK_SUCCESS || err == VK_TIMEOUT);
+
+    return err;
+}
+
+void Device::update_descriptor_sets(const std::vector<VkWriteDescriptorSet> &writes,
+                                    const std::vector<VkCopyDescriptorSet> &copies) {
+    vk::UpdateDescriptorSets(handle(), writes.size(), writes.data(), copies.size(), copies.data());
+}
+
+VkResult Queue::submit(const std::vector<const CommandBuffer *> &cmds, const Fence &fence, bool expect_success) {
+    const std::vector<VkCommandBuffer> cmd_handles = MakeVkHandles<VkCommandBuffer>(cmds);
+    VkSubmitInfo submit_info;
+    submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
+    submit_info.pNext = NULL;
+    submit_info.waitSemaphoreCount = 0;
+    submit_info.pWaitSemaphores = NULL;
+    submit_info.pWaitDstStageMask = NULL;
+    submit_info.commandBufferCount = (uint32_t)cmd_handles.size();
+    submit_info.pCommandBuffers = cmd_handles.data();
+    submit_info.signalSemaphoreCount = 0;
+    submit_info.pSignalSemaphores = NULL;
+
+    VkResult result = vk::QueueSubmit(handle(), 1, &submit_info, fence.handle());
+    if (expect_success) EXPECT(result == VK_SUCCESS);
+    return result;
+}
+
+VkResult Queue::submit(const CommandBuffer &cmd, const Fence &fence, bool expect_success) {
+    return submit(std::vector<const CommandBuffer *>(1, &cmd), fence, expect_success);
+}
+
+VkResult Queue::submit(const CommandBuffer &cmd, bool expect_success) {
+    Fence fence;
+    return submit(cmd, fence);
+}
+
+VkResult Queue::wait() {
+    VkResult result = vk::QueueWaitIdle(handle());
+    EXPECT(result == VK_SUCCESS);
+    return result;
+}
+
+DeviceMemory::~DeviceMemory() {
+    if (initialized()) vk::FreeMemory(device(), handle(), NULL);
+}
+
+void DeviceMemory::init(const Device &dev, const VkMemoryAllocateInfo &info) {
+    NON_DISPATCHABLE_HANDLE_INIT(vk::AllocateMemory, dev, &info);
+}
+
+const void *DeviceMemory::map(VkFlags flags) const {
+    void *data;
+    if (!EXPECT(vk::MapMemory(device(), handle(), 0, VK_WHOLE_SIZE, flags, &data) == VK_SUCCESS)) data = NULL;
+
+    return data;
+}
+
+void *DeviceMemory::map(VkFlags flags) {
+    void *data;
+    if (!EXPECT(vk::MapMemory(device(), handle(), 0, VK_WHOLE_SIZE, flags, &data) == VK_SUCCESS)) data = NULL;
+
+    return data;
+}
+
+void DeviceMemory::unmap() const { vk::UnmapMemory(device(), handle()); }
+
+VkMemoryAllocateInfo DeviceMemory::get_resource_alloc_info(const Device &dev, const VkMemoryRequirements &reqs,
+                                                           VkMemoryPropertyFlags mem_props) {
+    // Find appropriate memory type for given reqs
+    VkPhysicalDeviceMemoryProperties dev_mem_props = dev.phy().memory_properties();
+    uint32_t mem_type_index = 0;
+    for (mem_type_index = 0; mem_type_index < dev_mem_props.memoryTypeCount; ++mem_type_index) {
+        if (mem_props == (mem_props & dev_mem_props.memoryTypes[mem_type_index].propertyFlags)) break;
+    }
+    // If we exceeded types, then this device doesn't have the memory we need
+    assert(mem_type_index < dev_mem_props.memoryTypeCount);
+    VkMemoryAllocateInfo info = alloc_info(reqs.size, mem_type_index);
+    EXPECT(dev.phy().set_memory_type(reqs.memoryTypeBits, &info, mem_props));
+    return info;
+}
+
+NON_DISPATCHABLE_HANDLE_DTOR(Fence, vk::DestroyFence)
+
+void Fence::init(const Device &dev, const VkFenceCreateInfo &info) { NON_DISPATCHABLE_HANDLE_INIT(vk::CreateFence, dev, &info); }
+
+VkResult Fence::wait(VkBool32 wait_all, uint64_t timeout) const {
+    VkFence fence = handle();
+    return vk::WaitForFences(device(), 1, &fence, wait_all, timeout);
+}
+
+NON_DISPATCHABLE_HANDLE_DTOR(Semaphore, vk::DestroySemaphore)
+
+void Semaphore::init(const Device &dev, const VkSemaphoreCreateInfo &info) {
+    NON_DISPATCHABLE_HANDLE_INIT(vk::CreateSemaphore, dev, &info);
+}
+
+NON_DISPATCHABLE_HANDLE_DTOR(Event, vk::DestroyEvent)
+
+void Event::init(const Device &dev, const VkEventCreateInfo &info) { NON_DISPATCHABLE_HANDLE_INIT(vk::CreateEvent, dev, &info); }
+
+void Event::set() { EXPECT(vk::SetEvent(device(), handle()) == VK_SUCCESS); }
+
+void Event::reset() { EXPECT(vk::ResetEvent(device(), handle()) == VK_SUCCESS); }
+
+NON_DISPATCHABLE_HANDLE_DTOR(QueryPool, vk::DestroyQueryPool)
+
+void QueryPool::init(const Device &dev, const VkQueryPoolCreateInfo &info) {
+    NON_DISPATCHABLE_HANDLE_INIT(vk::CreateQueryPool, dev, &info);
+}
+
+VkResult QueryPool::results(uint32_t first, uint32_t count, size_t size, void *data, size_t stride) {
+    VkResult err = vk::GetQueryPoolResults(device(), handle(), first, count, size, data, stride, 0);
+    EXPECT(err == VK_SUCCESS || err == VK_NOT_READY);
+
+    return err;
+}
+
+NON_DISPATCHABLE_HANDLE_DTOR(Buffer, vk::DestroyBuffer)
+
+void Buffer::init(const Device &dev, const VkBufferCreateInfo &info, VkMemoryPropertyFlags mem_props) {
+    init_no_mem(dev, info);
+
+    internal_mem_.init(dev, DeviceMemory::get_resource_alloc_info(dev, memory_requirements(), mem_props));
+    bind_memory(internal_mem_, 0);
+}
+
+void Buffer::init_no_mem(const Device &dev, const VkBufferCreateInfo &info) {
+    NON_DISPATCHABLE_HANDLE_INIT(vk::CreateBuffer, dev, &info);
+    create_info_ = info;
+}
+
+VkMemoryRequirements Buffer::memory_requirements() const {
+    VkMemoryRequirements reqs;
+
+    vk::GetBufferMemoryRequirements(device(), handle(), &reqs);
+
+    return reqs;
+}
+
+void Buffer::bind_memory(const DeviceMemory &mem, VkDeviceSize mem_offset) {
+    EXPECT(vk::BindBufferMemory(device(), handle(), mem.handle(), mem_offset) == VK_SUCCESS);
+}
+
+NON_DISPATCHABLE_HANDLE_DTOR(BufferView, vk::DestroyBufferView)
+
+void BufferView::init(const Device &dev, const VkBufferViewCreateInfo &info) {
+    NON_DISPATCHABLE_HANDLE_INIT(vk::CreateBufferView, dev, &info);
+}
+
+NON_DISPATCHABLE_HANDLE_DTOR(Image, vk::DestroyImage)
+
+void Image::init(const Device &dev, const VkImageCreateInfo &info, VkMemoryPropertyFlags mem_props) {
+    init_no_mem(dev, info);
+
+    if (initialized()) {
+        internal_mem_.init(dev, DeviceMemory::get_resource_alloc_info(dev, memory_requirements(), mem_props));
+        bind_memory(internal_mem_, 0);
+    }
+}
+
+void Image::init_no_mem(const Device &dev, const VkImageCreateInfo &info) {
+    NON_DISPATCHABLE_HANDLE_INIT(vk::CreateImage, dev, &info);
+    if (initialized()) {
+        init_info(dev, info);
+    }
+}
+
+void Image::init_info(const Device &dev, const VkImageCreateInfo &info) {
+    create_info_ = info;
+
+    for (std::vector<Device::Format>::const_iterator it = dev.formats().begin(); it != dev.formats().end(); it++) {
+        if (memcmp(&it->format, &create_info_.format, sizeof(it->format)) == 0 && it->tiling == create_info_.tiling) {
+            format_features_ = it->features;
+            break;
+        }
+    }
+}
+
+VkMemoryRequirements Image::memory_requirements() const {
+    VkMemoryRequirements reqs;
+
+    vk::GetImageMemoryRequirements(device(), handle(), &reqs);
+
+    return reqs;
+}
+
+void Image::bind_memory(const DeviceMemory &mem, VkDeviceSize mem_offset) {
+    EXPECT(vk::BindImageMemory(device(), handle(), mem.handle(), mem_offset) == VK_SUCCESS);
+}
+
+VkSubresourceLayout Image::subresource_layout(const VkImageSubresource &subres) const {
+    VkSubresourceLayout data;
+    size_t size = sizeof(data);
+    vk::GetImageSubresourceLayout(device(), handle(), &subres, &data);
+    if (size != sizeof(data)) memset(&data, 0, sizeof(data));
+
+    return data;
+}
+
+VkSubresourceLayout Image::subresource_layout(const VkImageSubresourceLayers &subrescopy) const {
+    VkSubresourceLayout data;
+    VkImageSubresource subres = subresource(subrescopy.aspectMask, subrescopy.mipLevel, subrescopy.baseArrayLayer);
+    size_t size = sizeof(data);
+    vk::GetImageSubresourceLayout(device(), handle(), &subres, &data);
+    if (size != sizeof(data)) memset(&data, 0, sizeof(data));
+
+    return data;
+}
+
+bool Image::transparent() const {
+    return (create_info_.tiling == VK_IMAGE_TILING_LINEAR && create_info_.samples == VK_SAMPLE_COUNT_1_BIT &&
+            !(create_info_.usage & (VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT)));
+}
+
+NON_DISPATCHABLE_HANDLE_DTOR(ImageView, vk::DestroyImageView)
+
+void ImageView::init(const Device &dev, const VkImageViewCreateInfo &info) {
+    NON_DISPATCHABLE_HANDLE_INIT(vk::CreateImageView, dev, &info);
+}
+
+AccelerationStructure::~AccelerationStructure() {
+    if (initialized()) {
+        PFN_vkDestroyAccelerationStructureNV vkDestroyAccelerationStructureNV =
+            (PFN_vkDestroyAccelerationStructureNV)vk::GetDeviceProcAddr(device(), "vkDestroyAccelerationStructureNV");
+        assert(vkDestroyAccelerationStructureNV != nullptr);
+
+        vkDestroyAccelerationStructureNV(device(), handle(), nullptr);
+    }
+}
+
+VkMemoryRequirements2 AccelerationStructure::memory_requirements() const {
+    PFN_vkGetAccelerationStructureMemoryRequirementsNV vkGetAccelerationStructureMemoryRequirementsNV =
+        (PFN_vkGetAccelerationStructureMemoryRequirementsNV)vk::GetDeviceProcAddr(device(),
+                                                                                  "vkGetAccelerationStructureMemoryRequirementsNV");
+    assert(vkGetAccelerationStructureMemoryRequirementsNV != nullptr);
+
+    VkAccelerationStructureMemoryRequirementsInfoNV memoryRequirementsInfo = {};
+    memoryRequirementsInfo.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV;
+    memoryRequirementsInfo.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV;
+    memoryRequirementsInfo.accelerationStructure = handle();
+
+    VkMemoryRequirements2 memoryRequirements = {};
+    vkGetAccelerationStructureMemoryRequirementsNV(device(), &memoryRequirementsInfo, &memoryRequirements);
+    return memoryRequirements;
+}
+
+VkMemoryRequirements2 AccelerationStructure::build_scratch_memory_requirements() const {
+    PFN_vkGetAccelerationStructureMemoryRequirementsNV vkGetAccelerationStructureMemoryRequirementsNV =
+        (PFN_vkGetAccelerationStructureMemoryRequirementsNV)vk::GetDeviceProcAddr(device(),
+                                                                                  "vkGetAccelerationStructureMemoryRequirementsNV");
+    assert(vkGetAccelerationStructureMemoryRequirementsNV != nullptr);
+
+    VkAccelerationStructureMemoryRequirementsInfoNV memoryRequirementsInfo = {};
+    memoryRequirementsInfo.sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV;
+    memoryRequirementsInfo.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV;
+    memoryRequirementsInfo.accelerationStructure = handle();
+
+    VkMemoryRequirements2 memoryRequirements = {};
+    vkGetAccelerationStructureMemoryRequirementsNV(device(), &memoryRequirementsInfo, &memoryRequirements);
+    return memoryRequirements;
+}
+
+void AccelerationStructure::init(const Device &dev, const VkAccelerationStructureCreateInfoNV &info, bool init_memory) {
+    PFN_vkCreateAccelerationStructureNV vkCreateAccelerationStructureNV =
+        (PFN_vkCreateAccelerationStructureNV)vk::GetDeviceProcAddr(dev.handle(), "vkCreateAccelerationStructureNV");
+    assert(vkCreateAccelerationStructureNV != nullptr);
+
+    NON_DISPATCHABLE_HANDLE_INIT(vkCreateAccelerationStructureNV, dev, &info);
+
+    info_ = info.info;
+
+    if (init_memory) {
+        memory_.init(dev, DeviceMemory::get_resource_alloc_info(dev, memory_requirements().memoryRequirements,
+                                                                VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT));
+
+        PFN_vkBindAccelerationStructureMemoryNV vkBindAccelerationStructureMemoryNV =
+            (PFN_vkBindAccelerationStructureMemoryNV)vk::GetDeviceProcAddr(dev.handle(), "vkBindAccelerationStructureMemoryNV");
+        assert(vkBindAccelerationStructureMemoryNV != nullptr);
+
+        VkBindAccelerationStructureMemoryInfoNV bind_info = {};
+        bind_info.sType = VK_STRUCTURE_TYPE_BIND_ACCELERATION_STRUCTURE_MEMORY_INFO_NV;
+        bind_info.accelerationStructure = handle();
+        bind_info.memory = memory_.handle();
+        EXPECT(vkBindAccelerationStructureMemoryNV(dev.handle(), 1, &bind_info) == VK_SUCCESS);
+
+        PFN_vkGetAccelerationStructureHandleNV vkGetAccelerationStructureHandleNV =
+            (PFN_vkGetAccelerationStructureHandleNV)vk::GetDeviceProcAddr(dev.handle(), "vkGetAccelerationStructureHandleNV");
+        assert(vkGetAccelerationStructureHandleNV != nullptr);
+        EXPECT(vkGetAccelerationStructureHandleNV(dev.handle(), handle(), sizeof(uint64_t), &opaque_handle_) == VK_SUCCESS);
+    }
+}
+
+void AccelerationStructure::create_scratch_buffer(const Device &dev, Buffer *buffer) {
+    VkMemoryRequirements scratch_buffer_memory_requirements = build_scratch_memory_requirements().memoryRequirements;
+
+    VkBufferCreateInfo create_info = {};
+    create_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
+    create_info.size = scratch_buffer_memory_requirements.size;
+    create_info.usage = VK_BUFFER_USAGE_RAY_TRACING_BIT_NV;
+    return buffer->init(dev, create_info, VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT);
+}
+
+NON_DISPATCHABLE_HANDLE_DTOR(ShaderModule, vk::DestroyShaderModule)
+
+void ShaderModule::init(const Device &dev, const VkShaderModuleCreateInfo &info) {
+    NON_DISPATCHABLE_HANDLE_INIT(vk::CreateShaderModule, dev, &info);
+}
+
+VkResult ShaderModule::init_try(const Device &dev, const VkShaderModuleCreateInfo &info) {
+    VkShaderModule mod;
+
+    VkResult err = vk::CreateShaderModule(dev.handle(), &info, NULL, &mod);
+    if (err == VK_SUCCESS) NonDispHandle::init(dev.handle(), mod);
+
+    return err;
+}
+
+NON_DISPATCHABLE_HANDLE_DTOR(Pipeline, vk::DestroyPipeline)
+
+void Pipeline::init(const Device &dev, const VkGraphicsPipelineCreateInfo &info) {
+    VkPipelineCache cache;
+    VkPipelineCacheCreateInfo ci;
+    memset((void *)&ci, 0, sizeof(VkPipelineCacheCreateInfo));
+    ci.sType = VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO;
+    VkResult err = vk::CreatePipelineCache(dev.handle(), &ci, NULL, &cache);
+    if (err == VK_SUCCESS) {
+        NON_DISPATCHABLE_HANDLE_INIT(vk::CreateGraphicsPipelines, dev, cache, 1, &info);
+        vk::DestroyPipelineCache(dev.handle(), cache, NULL);
+    }
+}
+
+VkResult Pipeline::init_try(const Device &dev, const VkGraphicsPipelineCreateInfo &info) {
+    VkPipeline pipe;
+    VkPipelineCache cache;
+    VkPipelineCacheCreateInfo ci;
+    memset((void *)&ci, 0, sizeof(VkPipelineCacheCreateInfo));
+    ci.sType = VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO;
+    VkResult err = vk::CreatePipelineCache(dev.handle(), &ci, NULL, &cache);
+    EXPECT(err == VK_SUCCESS);
+    if (err == VK_SUCCESS) {
+        err = vk::CreateGraphicsPipelines(dev.handle(), cache, 1, &info, NULL, &pipe);
+        if (err == VK_SUCCESS) {
+            NonDispHandle::init(dev.handle(), pipe);
+        }
+        vk::DestroyPipelineCache(dev.handle(), cache, NULL);
+    }
+
+    return err;
+}
+
+void Pipeline::init(const Device &dev, const VkComputePipelineCreateInfo &info) {
+    VkPipelineCache cache;
+    VkPipelineCacheCreateInfo ci;
+    memset((void *)&ci, 0, sizeof(VkPipelineCacheCreateInfo));
+    ci.sType = VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO;
+    VkResult err = vk::CreatePipelineCache(dev.handle(), &ci, NULL, &cache);
+    if (err == VK_SUCCESS) {
+        NON_DISPATCHABLE_HANDLE_INIT(vk::CreateComputePipelines, dev, cache, 1, &info);
+        vk::DestroyPipelineCache(dev.handle(), cache, NULL);
+    }
+}
+
+NON_DISPATCHABLE_HANDLE_DTOR(PipelineLayout, vk::DestroyPipelineLayout)
+
+void PipelineLayout::init(const Device &dev, VkPipelineLayoutCreateInfo &info,
+                          const std::vector<const DescriptorSetLayout *> &layouts) {
+    const std::vector<VkDescriptorSetLayout> layout_handles = MakeVkHandles<VkDescriptorSetLayout>(layouts);
+    info.setLayoutCount = layout_handles.size();
+    info.pSetLayouts = layout_handles.data();
+
+    NON_DISPATCHABLE_HANDLE_INIT(vk::CreatePipelineLayout, dev, &info);
+}
+
+NON_DISPATCHABLE_HANDLE_DTOR(Sampler, vk::DestroySampler)
+
+void Sampler::init(const Device &dev, const VkSamplerCreateInfo &info) {
+    NON_DISPATCHABLE_HANDLE_INIT(vk::CreateSampler, dev, &info);
+}
+
+NON_DISPATCHABLE_HANDLE_DTOR(DescriptorSetLayout, vk::DestroyDescriptorSetLayout)
+
+void DescriptorSetLayout::init(const Device &dev, const VkDescriptorSetLayoutCreateInfo &info) {
+    NON_DISPATCHABLE_HANDLE_INIT(vk::CreateDescriptorSetLayout, dev, &info);
+}
+
+NON_DISPATCHABLE_HANDLE_DTOR(DescriptorPool, vk::DestroyDescriptorPool)
+
+void DescriptorPool::init(const Device &dev, const VkDescriptorPoolCreateInfo &info) {
+    setDynamicUsage(info.flags & VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT);
+    NON_DISPATCHABLE_HANDLE_INIT(vk::CreateDescriptorPool, dev, &info);
+}
+
+void DescriptorPool::reset() { EXPECT(vk::ResetDescriptorPool(device(), handle(), 0) == VK_SUCCESS); }
+
+std::vector<DescriptorSet *> DescriptorPool::alloc_sets(const Device &dev,
+                                                        const std::vector<const DescriptorSetLayout *> &layouts) {
+    const std::vector<VkDescriptorSetLayout> layout_handles = MakeVkHandles<VkDescriptorSetLayout>(layouts);
+
+    std::vector<VkDescriptorSet> set_handles;
+    set_handles.resize(layout_handles.size());
+
+    VkDescriptorSetAllocateInfo alloc_info = {};
+    alloc_info.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
+    alloc_info.descriptorSetCount = layout_handles.size();
+    alloc_info.descriptorPool = handle();
+    alloc_info.pSetLayouts = layout_handles.data();
+    VkResult err = vk::AllocateDescriptorSets(device(), &alloc_info, set_handles.data());
+    EXPECT(err == VK_SUCCESS);
+
+    std::vector<DescriptorSet *> sets;
+    for (std::vector<VkDescriptorSet>::const_iterator it = set_handles.begin(); it != set_handles.end(); it++) {
+        // do descriptor sets need memories bound?
+        DescriptorSet *descriptorSet = new DescriptorSet(dev, this, *it);
+        sets.push_back(descriptorSet);
+    }
+    return sets;
+}
+
+std::vector<DescriptorSet *> DescriptorPool::alloc_sets(const Device &dev, const DescriptorSetLayout &layout, uint32_t count) {
+    return alloc_sets(dev, std::vector<const DescriptorSetLayout *>(count, &layout));
+}
+
+DescriptorSet *DescriptorPool::alloc_sets(const Device &dev, const DescriptorSetLayout &layout) {
+    std::vector<DescriptorSet *> set = alloc_sets(dev, layout, 1);
+    return (set.empty()) ? NULL : set[0];
+}
+
+DescriptorSet::~DescriptorSet() {
+    if (initialized()) {
+        // Only call vk::Free* on sets allocated from pool with usage *_DYNAMIC
+        if (containing_pool_->getDynamicUsage()) {
+            VkDescriptorSet sets[1] = {handle()};
+            EXPECT(vk::FreeDescriptorSets(device(), containing_pool_->GetObj(), 1, sets) == VK_SUCCESS);
+        }
+    }
+}
+
+NON_DISPATCHABLE_HANDLE_DTOR(CommandPool, vk::DestroyCommandPool)
+
+void CommandPool::init(const Device &dev, const VkCommandPoolCreateInfo &info) {
+    NON_DISPATCHABLE_HANDLE_INIT(vk::CreateCommandPool, dev, &info);
+}
+
+CommandBuffer::~CommandBuffer() {
+    if (initialized()) {
+        VkCommandBuffer cmds[] = {handle()};
+        vk::FreeCommandBuffers(dev_handle_, cmd_pool_, 1, cmds);
+    }
+}
+
+void CommandBuffer::init(const Device &dev, const VkCommandBufferAllocateInfo &info) {
+    VkCommandBuffer cmd;
+
+    // Make sure commandPool is set
+    assert(info.commandPool);
+
+    if (EXPECT(vk::AllocateCommandBuffers(dev.handle(), &info, &cmd) == VK_SUCCESS)) {
+        Handle::init(cmd);
+        dev_handle_ = dev.handle();
+        cmd_pool_ = info.commandPool;
+    }
+}
+
+void CommandBuffer::begin(const VkCommandBufferBeginInfo *info) { EXPECT(vk::BeginCommandBuffer(handle(), info) == VK_SUCCESS); }
+
+void CommandBuffer::begin() {
+    VkCommandBufferBeginInfo info = {};
+    VkCommandBufferInheritanceInfo hinfo = {};
+    info.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT;
+    info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
+    info.pInheritanceInfo = &hinfo;
+    hinfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO;
+    hinfo.pNext = NULL;
+    hinfo.renderPass = VK_NULL_HANDLE;
+    hinfo.subpass = 0;
+    hinfo.framebuffer = VK_NULL_HANDLE;
+    hinfo.occlusionQueryEnable = VK_FALSE;
+    hinfo.queryFlags = 0;
+    hinfo.pipelineStatistics = 0;
+
+    begin(&info);
+}
+
+void CommandBuffer::end() { EXPECT(vk::EndCommandBuffer(handle()) == VK_SUCCESS); }
+
+void CommandBuffer::reset(VkCommandBufferResetFlags flags) { EXPECT(vk::ResetCommandBuffer(handle(), flags) == VK_SUCCESS); }
+
+}  // namespace vk_testing
diff --git a/src/third_party/vulkan-validation-layers/src/tests/vktestbinding.h b/src/third_party/vulkan-validation-layers/src/tests/vktestbinding.h
new file mode 100644
index 0000000..c932777
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/tests/vktestbinding.h
@@ -0,0 +1,1026 @@
+/*
+ * Copyright (c) 2015-2016, 2019 The Khronos Group Inc.
+ * Copyright (c) 2015-2016, 2019 Valve Corporation
+ * Copyright (c) 2015-2016, 2019 LunarG, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Courtney Goeltzenleuchter <courtney@LunarG.com>
+ * Author: Cody Northrop <cody@lunarg.com>
+ * Author: John Zulauf <jzulauf@lunarg.com>
+ */
+
+#ifndef VKTESTBINDING_H
+#define VKTESTBINDING_H
+
+#include <algorithm>
+#include <cassert>
+#include <iterator>
+#include <memory>
+#include <vector>
+
+#include "lvt_function_pointers.h"
+#include "test_common.h"
+
+namespace vk_testing {
+
+template <class Dst, class Src>
+std::vector<Dst> MakeVkHandles(const std::vector<Src> &v) {
+    std::vector<Dst> handles;
+    handles.reserve(v.size());
+    std::transform(v.begin(), v.end(), std::back_inserter(handles), [](const Src &o) { return o.handle(); });
+    return handles;
+}
+
+template <class Dst, class Src>
+std::vector<Dst> MakeVkHandles(const std::vector<Src *> &v) {
+    std::vector<Dst> handles;
+    handles.reserve(v.size());
+    std::transform(v.begin(), v.end(), std::back_inserter(handles), [](const Src *o) { return o->handle(); });
+    return handles;
+}
+
+typedef void (*ErrorCallback)(const char *expr, const char *file, unsigned int line, const char *function);
+void set_error_callback(ErrorCallback callback);
+
+class PhysicalDevice;
+class Device;
+class Queue;
+class DeviceMemory;
+class Fence;
+class Semaphore;
+class Event;
+class QueryPool;
+class Buffer;
+class BufferView;
+class Image;
+class ImageView;
+class DepthStencilView;
+class Shader;
+class Pipeline;
+class PipelineDelta;
+class Sampler;
+class DescriptorSetLayout;
+class PipelineLayout;
+class DescriptorSetPool;
+class DescriptorSet;
+class CommandBuffer;
+class CommandPool;
+
+std::vector<VkLayerProperties> GetGlobalLayers();
+std::vector<VkExtensionProperties> GetGlobalExtensions();
+std::vector<VkExtensionProperties> GetGlobalExtensions(const char *pLayerName);
+
+namespace internal {
+
+template <typename T>
+class Handle {
+  public:
+    const T &handle() const NOEXCEPT { return handle_; }
+    bool initialized() const NOEXCEPT { return (handle_ != T{}); }
+
+  protected:
+    typedef T handle_type;
+
+    explicit Handle() NOEXCEPT : handle_{} {}
+    explicit Handle(T handle) NOEXCEPT : handle_(handle) {}
+
+    // handles are non-copyable
+    Handle(const Handle &) = delete;
+    Handle &operator=(const Handle &) = delete;
+
+    // handles can be moved out
+    Handle(Handle &&src) NOEXCEPT : handle_{src.handle_} { src.handle_ = {}; }
+    Handle &operator=(Handle &&src) NOEXCEPT {
+        handle_ = src.handle_;
+        src.handle_ = {};
+        return *this;
+    }
+
+    void init(T handle) NOEXCEPT {
+        assert(!initialized());
+        handle_ = handle;
+    }
+
+  private:
+    T handle_;
+};
+
+template <typename T>
+class NonDispHandle : public Handle<T> {
+  protected:
+    explicit NonDispHandle() NOEXCEPT : Handle<T>(), dev_handle_(VK_NULL_HANDLE) {}
+    explicit NonDispHandle(VkDevice dev, T handle) NOEXCEPT : Handle<T>(handle), dev_handle_(dev) {}
+
+    NonDispHandle(NonDispHandle &&src) NOEXCEPT : Handle<T>(std::move(src)) {
+        dev_handle_ = src.dev_handle_;
+        src.dev_handle_ = VK_NULL_HANDLE;
+    }
+    NonDispHandle &operator=(NonDispHandle &&src) NOEXCEPT {
+        Handle<T>::operator=(std::move(src));
+        dev_handle_ = src.dev_handle_;
+        src.dev_handle_ = VK_NULL_HANDLE;
+        return *this;
+    }
+
+    const VkDevice &device() const NOEXCEPT { return dev_handle_; }
+
+    void init(VkDevice dev, T handle) NOEXCEPT {
+        assert(!Handle<T>::initialized() && dev_handle_ == VK_NULL_HANDLE);
+        Handle<T>::init(handle);
+        dev_handle_ = dev;
+    }
+
+  private:
+    VkDevice dev_handle_;
+};
+
+}  // namespace internal
+
+class PhysicalDevice : public internal::Handle<VkPhysicalDevice> {
+  public:
+    explicit PhysicalDevice(VkPhysicalDevice phy) : Handle(phy) {
+        memory_properties_ = memory_properties();
+        device_properties_ = properties();
+    }
+
+    VkPhysicalDeviceProperties properties() const;
+    VkPhysicalDeviceMemoryProperties memory_properties() const;
+    std::vector<VkQueueFamilyProperties> queue_properties() const;
+    VkPhysicalDeviceFeatures features() const;
+
+    bool set_memory_type(const uint32_t type_bits, VkMemoryAllocateInfo *info, const VkMemoryPropertyFlags properties,
+                         const VkMemoryPropertyFlags forbid = 0) const;
+
+    // vkEnumerateDeviceExtensionProperties()
+    std::vector<VkExtensionProperties> extensions() const;
+    std::vector<VkExtensionProperties> extensions(const char *pLayerName) const;
+
+    // vkEnumerateLayers()
+    std::vector<VkLayerProperties> layers() const;
+
+  private:
+    void add_extension_dependencies(uint32_t dependency_count, VkExtensionProperties *depencency_props,
+                                    std::vector<VkExtensionProperties> &ext_list);
+
+    VkPhysicalDeviceMemoryProperties memory_properties_;
+
+    VkPhysicalDeviceProperties device_properties_;
+};
+
+class QueueCreateInfoArray {
+  private:
+    std::vector<VkDeviceQueueCreateInfo> queue_info_;
+    std::vector<std::vector<float>> queue_priorities_;
+
+  public:
+    QueueCreateInfoArray(const std::vector<VkQueueFamilyProperties> &queue_props);
+    size_t size() const { return queue_info_.size(); }
+    const VkDeviceQueueCreateInfo *data() const { return queue_info_.data(); }
+};
+
+class Device : public internal::Handle<VkDevice> {
+  public:
+    explicit Device(VkPhysicalDevice phy) : phy_(phy) {}
+    ~Device() NOEXCEPT;
+
+    // vkCreateDevice()
+    void init(const VkDeviceCreateInfo &info);
+    void init(std::vector<const char *> &extensions, VkPhysicalDeviceFeatures *features = nullptr,
+              void *create_device_pnext = nullptr);  // all queues, all extensions, etc
+    void init() {
+        std::vector<const char *> extensions;
+        init(extensions);
+    };
+
+    const PhysicalDevice &phy() const { return phy_; }
+
+    std::vector<const char *> GetEnabledExtensions() { return enabled_extensions_; }
+    bool IsEnabledExtension(const char *extension);
+
+    // vkGetDeviceProcAddr()
+    PFN_vkVoidFunction get_proc(const char *name) const { return vk::GetDeviceProcAddr(handle(), name); }
+
+    // vkGetDeviceQueue()
+    const std::vector<Queue *> &graphics_queues() const { return queues_[GRAPHICS]; }
+    const std::vector<Queue *> &compute_queues() { return queues_[COMPUTE]; }
+    const std::vector<Queue *> &dma_queues() { return queues_[DMA]; }
+
+    typedef std::vector<std::unique_ptr<Queue>> QueueFamilyQueues;
+    typedef std::vector<QueueFamilyQueues> QueueFamilies;
+    const QueueFamilyQueues &queue_family_queues(uint32_t queue_family) const;
+
+    uint32_t graphics_queue_node_index_;
+
+    struct Format {
+        VkFormat format;
+        VkImageTiling tiling;
+        VkFlags features;
+    };
+    // vkGetFormatInfo()
+    VkFormatProperties format_properties(VkFormat format);
+    const std::vector<Format> &formats() const { return formats_; }
+
+    // vkDeviceWaitIdle()
+    void wait();
+
+    // vkWaitForFences()
+    VkResult wait(const std::vector<const Fence *> &fences, bool wait_all, uint64_t timeout);
+    VkResult wait(const Fence &fence) { return wait(std::vector<const Fence *>(1, &fence), true, (uint64_t)-1); }
+
+    // vkUpdateDescriptorSets()
+    void update_descriptor_sets(const std::vector<VkWriteDescriptorSet> &writes, const std::vector<VkCopyDescriptorSet> &copies);
+    void update_descriptor_sets(const std::vector<VkWriteDescriptorSet> &writes) {
+        return update_descriptor_sets(writes, std::vector<VkCopyDescriptorSet>());
+    }
+
+    static VkWriteDescriptorSet write_descriptor_set(const DescriptorSet &set, uint32_t binding, uint32_t array_element,
+                                                     VkDescriptorType type, uint32_t count,
+                                                     const VkDescriptorImageInfo *image_info);
+    static VkWriteDescriptorSet write_descriptor_set(const DescriptorSet &set, uint32_t binding, uint32_t array_element,
+                                                     VkDescriptorType type, uint32_t count,
+                                                     const VkDescriptorBufferInfo *buffer_info);
+    static VkWriteDescriptorSet write_descriptor_set(const DescriptorSet &set, uint32_t binding, uint32_t array_element,
+                                                     VkDescriptorType type, uint32_t count, const VkBufferView *buffer_views);
+    static VkWriteDescriptorSet write_descriptor_set(const DescriptorSet &set, uint32_t binding, uint32_t array_element,
+                                                     VkDescriptorType type, const std::vector<VkDescriptorImageInfo> &image_info);
+    static VkWriteDescriptorSet write_descriptor_set(const DescriptorSet &set, uint32_t binding, uint32_t array_element,
+                                                     VkDescriptorType type, const std::vector<VkDescriptorBufferInfo> &buffer_info);
+    static VkWriteDescriptorSet write_descriptor_set(const DescriptorSet &set, uint32_t binding, uint32_t array_element,
+                                                     VkDescriptorType type, const std::vector<VkBufferView> &buffer_views);
+
+    static VkCopyDescriptorSet copy_descriptor_set(const DescriptorSet &src_set, uint32_t src_binding, uint32_t src_array_element,
+                                                   const DescriptorSet &dst_set, uint32_t dst_binding, uint32_t dst_array_element,
+                                                   uint32_t count);
+
+  private:
+    enum QueueIndex {
+        GRAPHICS,
+        COMPUTE,
+        DMA,
+        QUEUE_COUNT,
+    };
+
+    void init_queues();
+    void init_formats();
+
+    PhysicalDevice phy_;
+
+    std::vector<const char *> enabled_extensions_;
+
+    QueueFamilies queue_families_;
+    std::vector<Queue *> queues_[QUEUE_COUNT];
+    std::vector<Format> formats_;
+};
+
+class Queue : public internal::Handle<VkQueue> {
+  public:
+    explicit Queue(VkQueue queue, int index) : Handle(queue) { family_index_ = index; }
+
+    // vkQueueSubmit()
+    VkResult submit(const std::vector<const CommandBuffer *> &cmds, const Fence &fence, bool expect_success = true);
+    VkResult submit(const CommandBuffer &cmd, const Fence &fence, bool expect_success = true);
+    VkResult submit(const CommandBuffer &cmd, bool expect_success = true);
+
+    // vkQueueWaitIdle()
+    VkResult wait();
+
+    int get_family_index() { return family_index_; }
+
+  private:
+    int family_index_;
+};
+
+class DeviceMemory : public internal::NonDispHandle<VkDeviceMemory> {
+  public:
+    ~DeviceMemory() NOEXCEPT;
+
+    // vkAllocateMemory()
+    void init(const Device &dev, const VkMemoryAllocateInfo &info);
+
+    // vkMapMemory()
+    const void *map(VkFlags flags) const;
+    void *map(VkFlags flags);
+    const void *map() const { return map(0); }
+    void *map() { return map(0); }
+
+    // vkUnmapMemory()
+    void unmap() const;
+
+    static VkMemoryAllocateInfo alloc_info(VkDeviceSize size, uint32_t memory_type_index);
+    static VkMemoryAllocateInfo get_resource_alloc_info(const vk_testing::Device &dev, const VkMemoryRequirements &reqs,
+                                                        VkMemoryPropertyFlags mem_props);
+};
+
+class Fence : public internal::NonDispHandle<VkFence> {
+  public:
+    ~Fence() NOEXCEPT;
+
+    // vkCreateFence()
+    void init(const Device &dev, const VkFenceCreateInfo &info);
+
+    // vkGetFenceStatus()
+    VkResult status() const { return vk::GetFenceStatus(device(), handle()); }
+    VkResult wait(VkBool32 wait_all, uint64_t timeout) const;
+
+    static VkFenceCreateInfo create_info(VkFenceCreateFlags flags);
+    static VkFenceCreateInfo create_info();
+};
+
+class Semaphore : public internal::NonDispHandle<VkSemaphore> {
+  public:
+    ~Semaphore() NOEXCEPT;
+
+    // vkCreateSemaphore()
+    void init(const Device &dev, const VkSemaphoreCreateInfo &info);
+
+    static VkSemaphoreCreateInfo create_info(VkFlags flags);
+};
+
+class Event : public internal::NonDispHandle<VkEvent> {
+  public:
+    ~Event() NOEXCEPT;
+
+    // vkCreateEvent()
+    void init(const Device &dev, const VkEventCreateInfo &info);
+
+    // vkGetEventStatus()
+    // vkSetEvent()
+    // vkResetEvent()
+    VkResult status() const { return vk::GetEventStatus(device(), handle()); }
+    void set();
+    void reset();
+
+    static VkEventCreateInfo create_info(VkFlags flags);
+};
+
+class QueryPool : public internal::NonDispHandle<VkQueryPool> {
+  public:
+    ~QueryPool() NOEXCEPT;
+
+    // vkCreateQueryPool()
+    void init(const Device &dev, const VkQueryPoolCreateInfo &info);
+
+    // vkGetQueryPoolResults()
+    VkResult results(uint32_t first, uint32_t count, size_t size, void *data, size_t stride);
+
+    static VkQueryPoolCreateInfo create_info(VkQueryType type, uint32_t slot_count);
+};
+
+class Buffer : public internal::NonDispHandle<VkBuffer> {
+  public:
+    explicit Buffer() : NonDispHandle() {}
+    explicit Buffer(const Device &dev, const VkBufferCreateInfo &info) { init(dev, info); }
+    explicit Buffer(const Device &dev, VkDeviceSize size) { init(dev, size); }
+
+    ~Buffer() NOEXCEPT;
+
+    // vkCreateBuffer()
+    void init(const Device &dev, const VkBufferCreateInfo &info, VkMemoryPropertyFlags mem_props);
+    void init(const Device &dev, const VkBufferCreateInfo &info) { init(dev, info, 0); }
+    void init(const Device &dev, VkDeviceSize size, VkMemoryPropertyFlags mem_props,
+              VkBufferUsageFlags usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT, const std::vector<uint32_t> &queue_families = {}) {
+        init(dev, create_info(size, usage, &queue_families), mem_props);
+    }
+    void init(const Device &dev, VkDeviceSize size) { init(dev, size, 0); }
+    void init_as_src(const Device &dev, VkDeviceSize size, VkMemoryPropertyFlags &reqs,
+                     const std::vector<uint32_t> *queue_families = nullptr) {
+        init(dev, create_info(size, VK_BUFFER_USAGE_TRANSFER_SRC_BIT, queue_families), reqs);
+    }
+    void init_as_dst(const Device &dev, VkDeviceSize size, VkMemoryPropertyFlags &reqs,
+                     const std::vector<uint32_t> *queue_families = nullptr) {
+        init(dev, create_info(size, VK_BUFFER_USAGE_TRANSFER_DST_BIT, queue_families), reqs);
+    }
+    void init_as_src_and_dst(const Device &dev, VkDeviceSize size, VkMemoryPropertyFlags &reqs,
+                             const std::vector<uint32_t> *queue_families = nullptr, bool memory = true) {
+        if (memory)
+            init(dev, create_info(size, VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT, queue_families), reqs);
+        else
+            init_no_mem(dev,
+                        create_info(size, VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT, queue_families));
+    }
+    void init_no_mem(const Device &dev, const VkBufferCreateInfo &info);
+
+    // get the internal memory
+    const DeviceMemory &memory() const { return internal_mem_; }
+    DeviceMemory &memory() { return internal_mem_; }
+
+    // vkGetObjectMemoryRequirements()
+    VkMemoryRequirements memory_requirements() const;
+
+    // vkBindObjectMemory()
+    void bind_memory(const DeviceMemory &mem, VkDeviceSize mem_offset);
+
+    const VkBufferCreateInfo &create_info() const { return create_info_; }
+    static VkBufferCreateInfo create_info(VkDeviceSize size, VkFlags usage, const std::vector<uint32_t> *queue_families = nullptr);
+
+    VkBufferMemoryBarrier buffer_memory_barrier(VkFlags output_mask, VkFlags input_mask, VkDeviceSize offset,
+                                                VkDeviceSize size) const {
+        VkBufferMemoryBarrier barrier = {};
+        barrier.sType = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER;
+        barrier.buffer = handle();
+        barrier.srcAccessMask = output_mask;
+        barrier.dstAccessMask = input_mask;
+        barrier.offset = offset;
+        barrier.size = size;
+        if (create_info_.sharingMode == VK_SHARING_MODE_CONCURRENT) {
+            barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
+            barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
+        }
+        return barrier;
+    }
+
+  private:
+    VkBufferCreateInfo create_info_;
+
+    DeviceMemory internal_mem_;
+};
+
+class BufferView : public internal::NonDispHandle<VkBufferView> {
+  public:
+    ~BufferView() NOEXCEPT;
+
+    // vkCreateBufferView()
+    void init(const Device &dev, const VkBufferViewCreateInfo &info);
+    static VkBufferViewCreateInfo createInfo(VkBuffer buffer, VkFormat format, VkDeviceSize offset = 0,
+                                             VkDeviceSize range = VK_WHOLE_SIZE);
+};
+
+inline VkBufferViewCreateInfo BufferView::createInfo(VkBuffer buffer, VkFormat format, VkDeviceSize offset, VkDeviceSize range) {
+    VkBufferViewCreateInfo info = {};
+    info.sType = VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO;
+    info.pNext = nullptr;
+    info.flags = VkFlags(0);
+    info.buffer = buffer;
+    info.format = format;
+    info.offset = offset;
+    info.range = range;
+    return info;
+}
+
+class Image : public internal::NonDispHandle<VkImage> {
+  public:
+    explicit Image() : NonDispHandle(), format_features_(0) {}
+    explicit Image(const Device &dev, const VkImageCreateInfo &info) : format_features_(0) { init(dev, info); }
+
+    ~Image() NOEXCEPT;
+
+    // vkCreateImage()
+    void init(const Device &dev, const VkImageCreateInfo &info, VkMemoryPropertyFlags mem_props);
+    void init(const Device &dev, const VkImageCreateInfo &info) { init(dev, info, 0); }
+    void init_no_mem(const Device &dev, const VkImageCreateInfo &info);
+
+    // get the internal memory
+    const DeviceMemory &memory() const { return internal_mem_; }
+    DeviceMemory &memory() { return internal_mem_; }
+
+    // vkGetObjectMemoryRequirements()
+    VkMemoryRequirements memory_requirements() const;
+
+    // vkBindObjectMemory()
+    void bind_memory(const DeviceMemory &mem, VkDeviceSize mem_offset);
+
+    // vkGetImageSubresourceLayout()
+    VkSubresourceLayout subresource_layout(const VkImageSubresource &subres) const;
+    VkSubresourceLayout subresource_layout(const VkImageSubresourceLayers &subres) const;
+
+    bool transparent() const;
+    bool copyable() const { return (format_features_ & VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT); }
+
+    VkImageSubresourceRange subresource_range(VkImageAspectFlags aspect) const { return subresource_range(create_info_, aspect); }
+    VkExtent3D extent() const { return create_info_.extent; }
+    VkExtent3D extent(uint32_t mip_level) const { return extent(create_info_.extent, mip_level); }
+    VkFormat format() const { return create_info_.format; }
+    VkImageUsageFlags usage() const { return create_info_.usage; }
+    VkSharingMode sharing_mode() const { return create_info_.sharingMode; }
+    VkImageMemoryBarrier image_memory_barrier(VkFlags output_mask, VkFlags input_mask, VkImageLayout old_layout,
+                                              VkImageLayout new_layout, const VkImageSubresourceRange &range,
+                                              uint32_t srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
+                                              uint32_t dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED) const {
+        VkImageMemoryBarrier barrier = {};
+        barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
+        barrier.srcAccessMask = output_mask;
+        barrier.dstAccessMask = input_mask;
+        barrier.oldLayout = old_layout;
+        barrier.newLayout = new_layout;
+        barrier.image = handle();
+        barrier.subresourceRange = range;
+        barrier.srcQueueFamilyIndex = srcQueueFamilyIndex;
+        barrier.dstQueueFamilyIndex = dstQueueFamilyIndex;
+        return barrier;
+    }
+
+    static VkImageCreateInfo create_info();
+    static VkImageSubresource subresource(VkImageAspectFlags aspect, uint32_t mip_level, uint32_t array_layer);
+    static VkImageSubresource subresource(const VkImageSubresourceRange &range, uint32_t mip_level, uint32_t array_layer);
+    static VkImageSubresourceLayers subresource(VkImageAspectFlags aspect, uint32_t mip_level, uint32_t array_layer,
+                                                uint32_t array_size);
+    static VkImageSubresourceLayers subresource(const VkImageSubresourceRange &range, uint32_t mip_level, uint32_t array_layer,
+                                                uint32_t array_size);
+    static VkImageSubresourceRange subresource_range(VkImageAspectFlags aspect_mask, uint32_t base_mip_level, uint32_t mip_levels,
+                                                     uint32_t base_array_layer, uint32_t num_layers);
+    static VkImageSubresourceRange subresource_range(const VkImageCreateInfo &info, VkImageAspectFlags aspect_mask);
+    static VkImageSubresourceRange subresource_range(const VkImageSubresource &subres);
+
+    static VkExtent2D extent(int32_t width, int32_t height);
+    static VkExtent2D extent(const VkExtent2D &extent, uint32_t mip_level);
+    static VkExtent2D extent(const VkExtent3D &extent);
+
+    static VkExtent3D extent(int32_t width, int32_t height, int32_t depth);
+    static VkExtent3D extent(const VkExtent3D &extent, uint32_t mip_level);
+
+  private:
+    void init_info(const Device &dev, const VkImageCreateInfo &info);
+
+    VkImageCreateInfo create_info_;
+    VkFlags format_features_;
+
+    DeviceMemory internal_mem_;
+};
+
+class ImageView : public internal::NonDispHandle<VkImageView> {
+  public:
+    ~ImageView() NOEXCEPT;
+
+    // vkCreateImageView()
+    void init(const Device &dev, const VkImageViewCreateInfo &info);
+};
+
+class AccelerationStructure : public internal::NonDispHandle<VkAccelerationStructureNV> {
+  public:
+    explicit AccelerationStructure(const Device &dev, const VkAccelerationStructureCreateInfoNV &info, bool init_memory = true) {
+        init(dev, info, init_memory);
+    }
+    ~AccelerationStructure();
+
+    // vkCreateAccelerationStructureNV
+    void init(const Device &dev, const VkAccelerationStructureCreateInfoNV &info, bool init_memory = true);
+
+    // vkGetAccelerationStructureMemoryRequirementsNV()
+    VkMemoryRequirements2 memory_requirements() const;
+    VkMemoryRequirements2 build_scratch_memory_requirements() const;
+
+    uint64_t opaque_handle() const { return opaque_handle_; }
+
+    const VkAccelerationStructureInfoNV &info() const { return info_; }
+
+    const VkDevice &dev() const { return device(); }
+
+    void create_scratch_buffer(const Device &dev, Buffer *buffer);
+
+  private:
+    VkAccelerationStructureInfoNV info_;
+    DeviceMemory memory_;
+    uint64_t opaque_handle_;
+};
+
+class ShaderModule : public internal::NonDispHandle<VkShaderModule> {
+  public:
+    ~ShaderModule() NOEXCEPT;
+
+    // vkCreateShaderModule()
+    void init(const Device &dev, const VkShaderModuleCreateInfo &info);
+    VkResult init_try(const Device &dev, const VkShaderModuleCreateInfo &info);
+
+    static VkShaderModuleCreateInfo create_info(size_t code_size, const uint32_t *code, VkFlags flags);
+};
+
+class Pipeline : public internal::NonDispHandle<VkPipeline> {
+  public:
+    ~Pipeline() NOEXCEPT;
+
+    // vkCreateGraphicsPipeline()
+    void init(const Device &dev, const VkGraphicsPipelineCreateInfo &info);
+    // vkCreateGraphicsPipelineDerivative()
+    void init(const Device &dev, const VkGraphicsPipelineCreateInfo &info, const VkPipeline basePipeline);
+    // vkCreateComputePipeline()
+    void init(const Device &dev, const VkComputePipelineCreateInfo &info);
+    // vkLoadPipeline()
+    void init(const Device &dev, size_t size, const void *data);
+    // vkLoadPipelineDerivative()
+    void init(const Device &dev, size_t size, const void *data, VkPipeline basePipeline);
+
+    // vkCreateGraphicsPipeline with error return
+    VkResult init_try(const Device &dev, const VkGraphicsPipelineCreateInfo &info);
+
+    // vkStorePipeline()
+    size_t store(size_t size, void *data);
+};
+
+class PipelineLayout : public internal::NonDispHandle<VkPipelineLayout> {
+  public:
+    PipelineLayout() NOEXCEPT : NonDispHandle() {}
+    ~PipelineLayout() NOEXCEPT;
+
+    // Move constructor for Visual Studio 2013
+    PipelineLayout(PipelineLayout &&src) NOEXCEPT : NonDispHandle(std::move(src)){};
+
+    PipelineLayout &operator=(PipelineLayout &&src) NOEXCEPT {
+        this->~PipelineLayout();
+        this->NonDispHandle::operator=(std::move(src));
+        return *this;
+    };
+
+    // vCreatePipelineLayout()
+    void init(const Device &dev, VkPipelineLayoutCreateInfo &info, const std::vector<const DescriptorSetLayout *> &layouts);
+};
+
+class Sampler : public internal::NonDispHandle<VkSampler> {
+  public:
+    ~Sampler() NOEXCEPT;
+
+    // vkCreateSampler()
+    void init(const Device &dev, const VkSamplerCreateInfo &info);
+};
+
+class DescriptorSetLayout : public internal::NonDispHandle<VkDescriptorSetLayout> {
+  public:
+    DescriptorSetLayout() NOEXCEPT : NonDispHandle(){};
+    ~DescriptorSetLayout() NOEXCEPT;
+
+    // Move constructor for Visual Studio 2013
+    DescriptorSetLayout(DescriptorSetLayout &&src) NOEXCEPT : NonDispHandle(std::move(src)){};
+
+    DescriptorSetLayout &operator=(DescriptorSetLayout &&src) NOEXCEPT {
+        this->~DescriptorSetLayout();
+        this->NonDispHandle::operator=(std::move(src));
+        return *this;
+    }
+
+    // vkCreateDescriptorSetLayout()
+    void init(const Device &dev, const VkDescriptorSetLayoutCreateInfo &info);
+};
+
+class DescriptorPool : public internal::NonDispHandle<VkDescriptorPool> {
+  public:
+    ~DescriptorPool() NOEXCEPT;
+
+    // Descriptor sets allocated from this pool will need access to the original
+    // object
+    VkDescriptorPool GetObj() { return pool_; }
+
+    // vkCreateDescriptorPool()
+    void init(const Device &dev, const VkDescriptorPoolCreateInfo &info);
+
+    // vkResetDescriptorPool()
+    void reset();
+
+    // vkFreeDescriptorSet()
+    void setDynamicUsage(bool isDynamic) { dynamic_usage_ = isDynamic; }
+    bool getDynamicUsage() { return dynamic_usage_; }
+
+    // vkAllocateDescriptorSets()
+    std::vector<DescriptorSet *> alloc_sets(const Device &dev, const std::vector<const DescriptorSetLayout *> &layouts);
+    std::vector<DescriptorSet *> alloc_sets(const Device &dev, const DescriptorSetLayout &layout, uint32_t count);
+    DescriptorSet *alloc_sets(const Device &dev, const DescriptorSetLayout &layout);
+
+    template <typename PoolSizes>
+    static VkDescriptorPoolCreateInfo create_info(VkDescriptorPoolCreateFlags flags, uint32_t max_sets,
+                                                  const PoolSizes &pool_sizes);
+
+  private:
+    VkDescriptorPool pool_;
+
+    // Track whether this pool's usage is VK_DESCRIPTOR_POOL_USAGE_DYNAMIC
+    bool dynamic_usage_;
+};
+
+template <typename PoolSizes>
+inline VkDescriptorPoolCreateInfo DescriptorPool::create_info(VkDescriptorPoolCreateFlags flags, uint32_t max_sets,
+                                                              const PoolSizes &pool_sizes) {
+    VkDescriptorPoolCreateInfo info{};
+    info.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
+    info.pNext = nullptr;
+    info.flags = flags;
+    info.maxSets = max_sets;
+    info.poolSizeCount = pool_sizes.size();
+    info.pPoolSizes = (info.poolSizeCount) ? pool_sizes.data() : nullptr;
+    return info;
+}
+
+class DescriptorSet : public internal::NonDispHandle<VkDescriptorSet> {
+  public:
+    ~DescriptorSet() NOEXCEPT;
+
+    explicit DescriptorSet() : NonDispHandle() {}
+    explicit DescriptorSet(const Device &dev, DescriptorPool *pool, VkDescriptorSet set) : NonDispHandle(dev.handle(), set) {
+        containing_pool_ = pool;
+    }
+
+  private:
+    DescriptorPool *containing_pool_;
+};
+
+class CommandPool : public internal::NonDispHandle<VkCommandPool> {
+  public:
+    ~CommandPool() NOEXCEPT;
+
+    explicit CommandPool() : NonDispHandle() {}
+    explicit CommandPool(const Device &dev, const VkCommandPoolCreateInfo &info) { init(dev, info); }
+
+    void init(const Device &dev, const VkCommandPoolCreateInfo &info);
+
+    static VkCommandPoolCreateInfo create_info(uint32_t queue_family_index, VkCommandPoolCreateFlags flags);
+};
+
+inline VkCommandPoolCreateInfo CommandPool::create_info(uint32_t queue_family_index, VkCommandPoolCreateFlags flags) {
+    VkCommandPoolCreateInfo info = {};
+    info.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
+    info.queueFamilyIndex = queue_family_index;
+    info.flags = flags;
+    return info;
+}
+
+class CommandBuffer : public internal::Handle<VkCommandBuffer> {
+  public:
+    ~CommandBuffer() NOEXCEPT;
+
+    explicit CommandBuffer() : Handle() {}
+    explicit CommandBuffer(const Device &dev, const VkCommandBufferAllocateInfo &info) { init(dev, info); }
+
+    // vkAllocateCommandBuffers()
+    void init(const Device &dev, const VkCommandBufferAllocateInfo &info);
+
+    // vkBeginCommandBuffer()
+    void begin(const VkCommandBufferBeginInfo *info);
+    void begin();
+
+    // vkEndCommandBuffer()
+    // vkResetCommandBuffer()
+    void end();
+    void reset(VkCommandBufferResetFlags flags);
+    void reset() { reset(VK_COMMAND_BUFFER_RESET_RELEASE_RESOURCES_BIT); }
+
+    static VkCommandBufferAllocateInfo create_info(VkCommandPool const &pool);
+
+  private:
+    VkDevice dev_handle_;
+    VkCommandPool cmd_pool_;
+};
+
+inline VkMemoryAllocateInfo DeviceMemory::alloc_info(VkDeviceSize size, uint32_t memory_type_index) {
+    VkMemoryAllocateInfo info = {};
+    info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
+    info.allocationSize = size;
+    info.memoryTypeIndex = memory_type_index;
+    return info;
+}
+
+inline VkBufferCreateInfo Buffer::create_info(VkDeviceSize size, VkFlags usage, const std::vector<uint32_t> *queue_families) {
+    VkBufferCreateInfo info = {};
+    info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
+    info.size = size;
+    info.usage = usage;
+
+    if (queue_families && queue_families->size() > 1) {
+        info.sharingMode = VK_SHARING_MODE_CONCURRENT;
+        info.queueFamilyIndexCount = static_cast<uint32_t>(queue_families->size());
+        info.pQueueFamilyIndices = queue_families->data();
+    }
+
+    return info;
+}
+
+inline VkFenceCreateInfo Fence::create_info(VkFenceCreateFlags flags) {
+    VkFenceCreateInfo info = {};
+    info.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
+    info.flags = flags;
+    return info;
+}
+
+inline VkFenceCreateInfo Fence::create_info() {
+    VkFenceCreateInfo info = {};
+    info.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
+    return info;
+}
+
+inline VkSemaphoreCreateInfo Semaphore::create_info(VkFlags flags) {
+    VkSemaphoreCreateInfo info = {};
+    info.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO;
+    info.flags = flags;
+    return info;
+}
+
+inline VkEventCreateInfo Event::create_info(VkFlags flags) {
+    VkEventCreateInfo info = {};
+    info.sType = VK_STRUCTURE_TYPE_EVENT_CREATE_INFO;
+    info.flags = flags;
+    return info;
+}
+
+inline VkQueryPoolCreateInfo QueryPool::create_info(VkQueryType type, uint32_t slot_count) {
+    VkQueryPoolCreateInfo info = {};
+    info.sType = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO;
+    info.queryType = type;
+    info.queryCount = slot_count;
+    return info;
+}
+
+inline VkImageCreateInfo Image::create_info() {
+    VkImageCreateInfo info = {};
+    info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+    info.extent.width = 1;
+    info.extent.height = 1;
+    info.extent.depth = 1;
+    info.mipLevels = 1;
+    info.arrayLayers = 1;
+    info.samples = VK_SAMPLE_COUNT_1_BIT;
+    return info;
+}
+
+inline VkImageSubresource Image::subresource(VkImageAspectFlags aspect, uint32_t mip_level, uint32_t array_layer) {
+    VkImageSubresource subres = {};
+    if (aspect == 0) {
+        assert(!"Invalid VkImageAspectFlags");
+    }
+    subres.aspectMask = aspect;
+    subres.mipLevel = mip_level;
+    subres.arrayLayer = array_layer;
+    return subres;
+}
+
+inline VkImageSubresource Image::subresource(const VkImageSubresourceRange &range, uint32_t mip_level, uint32_t array_layer) {
+    return subresource(range.aspectMask, range.baseMipLevel + mip_level, range.baseArrayLayer + array_layer);
+}
+
+inline VkImageSubresourceLayers Image::subresource(VkImageAspectFlags aspect, uint32_t mip_level, uint32_t array_layer,
+                                                   uint32_t array_size) {
+    VkImageSubresourceLayers subres = {};
+    switch (aspect) {
+        case VK_IMAGE_ASPECT_COLOR_BIT:
+        case VK_IMAGE_ASPECT_DEPTH_BIT:
+        case VK_IMAGE_ASPECT_STENCIL_BIT:
+        case VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT:
+            /* valid */
+            break;
+        default:
+            assert(!"Invalid VkImageAspectFlags");
+    }
+    subres.aspectMask = aspect;
+    subres.mipLevel = mip_level;
+    subres.baseArrayLayer = array_layer;
+    subres.layerCount = array_size;
+    return subres;
+}
+
+inline VkImageSubresourceLayers Image::subresource(const VkImageSubresourceRange &range, uint32_t mip_level, uint32_t array_layer,
+                                                   uint32_t array_size) {
+    return subresource(range.aspectMask, range.baseMipLevel + mip_level, range.baseArrayLayer + array_layer, array_size);
+}
+
+inline VkImageSubresourceRange Image::subresource_range(VkImageAspectFlags aspect_mask, uint32_t base_mip_level,
+                                                        uint32_t mip_levels, uint32_t base_array_layer, uint32_t num_layers) {
+    VkImageSubresourceRange range = {};
+    if (aspect_mask == 0) {
+        assert(!"Invalid VkImageAspectFlags");
+    }
+    range.aspectMask = aspect_mask;
+    range.baseMipLevel = base_mip_level;
+    range.levelCount = mip_levels;
+    range.baseArrayLayer = base_array_layer;
+    range.layerCount = num_layers;
+    return range;
+}
+
+inline VkImageSubresourceRange Image::subresource_range(const VkImageCreateInfo &info, VkImageAspectFlags aspect_mask) {
+    return subresource_range(aspect_mask, 0, info.mipLevels, 0, info.arrayLayers);
+}
+
+inline VkImageSubresourceRange Image::subresource_range(const VkImageSubresource &subres) {
+    return subresource_range(subres.aspectMask, subres.mipLevel, 1, subres.arrayLayer, 1);
+}
+
+inline VkExtent2D Image::extent(int32_t width, int32_t height) {
+    VkExtent2D extent = {};
+    extent.width = width;
+    extent.height = height;
+    return extent;
+}
+
+inline VkExtent2D Image::extent(const VkExtent2D &extent, uint32_t mip_level) {
+    const int32_t width = (extent.width >> mip_level) ? extent.width >> mip_level : 1;
+    const int32_t height = (extent.height >> mip_level) ? extent.height >> mip_level : 1;
+    return Image::extent(width, height);
+}
+
+inline VkExtent2D Image::extent(const VkExtent3D &extent) { return Image::extent(extent.width, extent.height); }
+
+inline VkExtent3D Image::extent(int32_t width, int32_t height, int32_t depth) {
+    VkExtent3D extent = {};
+    extent.width = width;
+    extent.height = height;
+    extent.depth = depth;
+    return extent;
+}
+
+inline VkExtent3D Image::extent(const VkExtent3D &extent, uint32_t mip_level) {
+    const int32_t width = (extent.width >> mip_level) ? extent.width >> mip_level : 1;
+    const int32_t height = (extent.height >> mip_level) ? extent.height >> mip_level : 1;
+    const int32_t depth = (extent.depth >> mip_level) ? extent.depth >> mip_level : 1;
+    return Image::extent(width, height, depth);
+}
+
+inline VkShaderModuleCreateInfo ShaderModule::create_info(size_t code_size, const uint32_t *code, VkFlags flags) {
+    VkShaderModuleCreateInfo info = {};
+    info.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
+    info.codeSize = code_size;
+    info.pCode = code;
+    info.flags = flags;
+    return info;
+}
+
+inline VkWriteDescriptorSet Device::write_descriptor_set(const DescriptorSet &set, uint32_t binding, uint32_t array_element,
+                                                         VkDescriptorType type, uint32_t count,
+                                                         const VkDescriptorImageInfo *image_info) {
+    VkWriteDescriptorSet write = {};
+    write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
+    write.dstSet = set.handle();
+    write.dstBinding = binding;
+    write.dstArrayElement = array_element;
+    write.descriptorCount = count;
+    write.descriptorType = type;
+    write.pImageInfo = image_info;
+    return write;
+}
+
+inline VkWriteDescriptorSet Device::write_descriptor_set(const DescriptorSet &set, uint32_t binding, uint32_t array_element,
+                                                         VkDescriptorType type, uint32_t count,
+                                                         const VkDescriptorBufferInfo *buffer_info) {
+    VkWriteDescriptorSet write = {};
+    write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
+    write.dstSet = set.handle();
+    write.dstBinding = binding;
+    write.dstArrayElement = array_element;
+    write.descriptorCount = count;
+    write.descriptorType = type;
+    write.pBufferInfo = buffer_info;
+    return write;
+}
+
+inline VkWriteDescriptorSet Device::write_descriptor_set(const DescriptorSet &set, uint32_t binding, uint32_t array_element,
+                                                         VkDescriptorType type, uint32_t count, const VkBufferView *buffer_views) {
+    VkWriteDescriptorSet write = {};
+    write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
+    write.dstSet = set.handle();
+    write.dstBinding = binding;
+    write.dstArrayElement = array_element;
+    write.descriptorCount = count;
+    write.descriptorType = type;
+    write.pTexelBufferView = buffer_views;
+    return write;
+}
+
+inline VkWriteDescriptorSet Device::write_descriptor_set(const DescriptorSet &set, uint32_t binding, uint32_t array_element,
+                                                         VkDescriptorType type,
+                                                         const std::vector<VkDescriptorImageInfo> &image_info) {
+    return write_descriptor_set(set, binding, array_element, type, image_info.size(), &image_info[0]);
+}
+
+inline VkWriteDescriptorSet Device::write_descriptor_set(const DescriptorSet &set, uint32_t binding, uint32_t array_element,
+                                                         VkDescriptorType type,
+                                                         const std::vector<VkDescriptorBufferInfo> &buffer_info) {
+    return write_descriptor_set(set, binding, array_element, type, buffer_info.size(), &buffer_info[0]);
+}
+
+inline VkWriteDescriptorSet Device::write_descriptor_set(const DescriptorSet &set, uint32_t binding, uint32_t array_element,
+                                                         VkDescriptorType type, const std::vector<VkBufferView> &buffer_views) {
+    return write_descriptor_set(set, binding, array_element, type, buffer_views.size(), &buffer_views[0]);
+}
+
+inline VkCopyDescriptorSet Device::copy_descriptor_set(const DescriptorSet &src_set, uint32_t src_binding,
+                                                       uint32_t src_array_element, const DescriptorSet &dst_set,
+                                                       uint32_t dst_binding, uint32_t dst_array_element, uint32_t count) {
+    VkCopyDescriptorSet copy = {};
+    copy.sType = VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET;
+    copy.srcSet = src_set.handle();
+    copy.srcBinding = src_binding;
+    copy.srcArrayElement = src_array_element;
+    copy.dstSet = dst_set.handle();
+    copy.dstBinding = dst_binding;
+    copy.dstArrayElement = dst_array_element;
+    copy.descriptorCount = count;
+
+    return copy;
+}
+
+inline VkCommandBufferAllocateInfo CommandBuffer::create_info(VkCommandPool const &pool) {
+    VkCommandBufferAllocateInfo info = {};
+    info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
+    info.commandPool = pool;
+    info.commandBufferCount = 1;
+    return info;
+}
+
+}  // namespace vk_testing
+
+#endif  // VKTESTBINDING_H
diff --git a/src/third_party/vulkan-validation-layers/src/tests/vktestframework.cpp b/src/third_party/vulkan-validation-layers/src/tests/vktestframework.cpp
new file mode 100644
index 0000000..df0d8e2
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/tests/vktestframework.cpp
@@ -0,0 +1,866 @@
+/*
+ * Copyright (c) 2015-2019 The Khronos Group Inc.
+ * Copyright (c) 2015-2019 Valve Corporation
+ * Copyright (c) 2015-2019 LunarG, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Chia-I Wu <olvaffe@gmail.com>
+ * Author: Courtney Goeltzenleuchter <courtney@LunarG.com>
+ * Author: Tony Barbour <tony@LunarG.com>
+ */
+
+#include "vktestframework.h"
+#include "vkrenderframework.h"
+
+// For versions prior to VS 2015, suppress the warning
+// caused by the inconsistent redefinition of snprintf
+// between a vulkan header and a glslang header.
+#if (defined(_MSC_VER) && _MSC_VER < 1900 /*vs2015*/)
+#pragma warning(push)
+#pragma warning(disable : 4005)
+#endif
+// TODO FIXME remove this once glslang doesn't define this
+#undef BadValue
+#include "SPIRV/GlslangToSpv.h"
+#include "SPIRV/SPVRemapper.h"
+#if (defined(_MSC_VER) && _MSC_VER < 1900 /*vs2015*/)
+#pragma warning(pop)
+#endif
+#include <limits.h>
+#include <cmath>
+
+#if defined(PATH_MAX) && !defined(MAX_PATH)
+#define MAX_PATH PATH_MAX
+#endif
+
+#ifdef _WIN32
+#define ERR_EXIT(err_msg, err_class)                 \
+    do {                                             \
+        MessageBox(NULL, err_msg, err_class, MB_OK); \
+        exit(1);                                     \
+    } while (0)
+#else  // _WIN32
+
+#define ERR_EXIT(err_msg, err_class) \
+    do {                             \
+        printf(err_msg);             \
+        fflush(stdout);              \
+        exit(1);                     \
+    } while (0)
+#endif  // _WIN32
+
+// Command-line options
+enum TOptions {
+    EOptionNone = 0x000,
+    EOptionIntermediate = 0x001,
+    EOptionSuppressInfolog = 0x002,
+    EOptionMemoryLeakMode = 0x004,
+    EOptionRelaxedErrors = 0x008,
+    EOptionGiveWarnings = 0x010,
+    EOptionLinkProgram = 0x020,
+    EOptionMultiThreaded = 0x040,
+    EOptionDumpConfig = 0x080,
+    EOptionDumpReflection = 0x100,
+    EOptionSuppressWarnings = 0x200,
+    EOptionDumpVersions = 0x400,
+    EOptionSpv = 0x800,
+    EOptionDefaultDesktop = 0x1000,
+};
+
+struct SwapchainBuffers {
+    VkImage image;
+    VkCommandBuffer cmd;
+    VkImageView view;
+};
+
+#ifndef _WIN32
+
+#include <errno.h>
+
+int fopen_s(FILE **pFile, const char *filename, const char *mode) {
+    if (!pFile || !filename || !mode) {
+        return EINVAL;
+    }
+
+    FILE *f = fopen(filename, mode);
+    if (!f) {
+        if (errno != 0) {
+            return errno;
+        } else {
+            return ENOENT;
+        }
+    }
+    *pFile = f;
+
+    return 0;
+}
+
+#endif
+
+// Set up environment for GLSL compiler
+// Must be done once per process
+void TestEnvironment::SetUp() {
+    // Initialize GLSL to SPV compiler utility
+    glslang::InitializeProcess();
+
+    vk_testing::set_error_callback(test_error_callback);
+
+    vk::InitDispatchTable();
+}
+
+void TestEnvironment::TearDown() { glslang::FinalizeProcess(); }
+
+VkTestFramework::VkTestFramework() : m_compile_options(0), m_num_shader_strings(0) {}
+
+VkTestFramework::~VkTestFramework() {}
+
+// Define all the static elements
+bool VkTestFramework::m_canonicalize_spv = false;
+bool VkTestFramework::m_strip_spv = false;
+bool VkTestFramework::m_do_everything_spv = false;
+bool VkTestFramework::m_devsim_layer = false;
+int VkTestFramework::m_width = 0;
+int VkTestFramework::m_height = 0;
+
+bool VkTestFramework::optionMatch(const char *option, char *optionLine) {
+    if (strncmp(option, optionLine, strlen(option)) == 0)
+        return true;
+    else
+        return false;
+}
+
+void VkTestFramework::InitArgs(int *argc, char *argv[]) {
+    int i, n;
+
+    for (i = 1, n = 1; i < *argc; i++) {
+        if (optionMatch("--strip-SPV", argv[i]))
+            m_strip_spv = true;
+        else if (optionMatch("--canonicalize-SPV", argv[i]))
+            m_canonicalize_spv = true;
+        else if (optionMatch("--devsim", argv[i]))
+            m_devsim_layer = true;
+        else if (optionMatch("--help", argv[i]) || optionMatch("-h", argv[i])) {
+            printf("\nOther options:\n");
+            printf(
+                "\t--show-images\n"
+                "\t\tDisplay test images in viewer after tests complete.\n");
+            printf(
+                "\t--save-images\n"
+                "\t\tSave tests images as ppm files in current working directory.\n"
+                "\t\tUsed to generate golden images for compare-images.\n");
+            printf(
+                "\t--compare-images\n"
+                "\t\tCompare test images to 'golden' image in golden folder.\n"
+                "\t\tAlso saves the generated test image in current working\n"
+                "\t\t\tdirectory but only if the image is different from the golden\n"
+                "\t\tSetting RENDERTEST_GOLDEN_DIR environment variable can specify\n"
+                "\t\t\tdifferent directory for golden images\n"
+                "\t\tSignal test failure if different.\n");
+            printf(
+                "\t--no-SPV\n"
+                "\t\tUse built-in GLSL compiler rather than SPV code path.\n");
+            printf(
+                "\t--strip-SPV\n"
+                "\t\tStrip SPIR-V debug information (line numbers, names, etc).\n");
+            printf(
+                "\t--canonicalize-SPV\n"
+                "\t\tRemap SPIR-V ids before submission to aid compression.\n");
+            exit(0);
+        } else {
+            printf("\nUnrecognized option: %s\n", argv[i]);
+            printf("\nUse --help or -h for option list.\n");
+            exit(0);
+        }
+
+        /*
+         * Since the above "consume" inputs, update argv
+         * so that it contains the trimmed list of args for glutInit
+         */
+
+        argv[n] = argv[i];
+        n++;
+    }
+}
+
+VkFormat VkTestFramework::GetFormat(VkInstance instance, vk_testing::Device *device) {
+    VkFormatProperties format_props;
+
+    vk::GetPhysicalDeviceFormatProperties(device->phy().handle(), VK_FORMAT_B8G8R8A8_UNORM, &format_props);
+    if (format_props.linearTilingFeatures & VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT ||
+        format_props.optimalTilingFeatures & VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT) {
+        return VK_FORMAT_B8G8R8A8_UNORM;
+    }
+    vk::GetPhysicalDeviceFormatProperties(device->phy().handle(), VK_FORMAT_R8G8B8A8_UNORM, &format_props);
+    if (format_props.linearTilingFeatures & VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT ||
+        format_props.optimalTilingFeatures & VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT) {
+        return VK_FORMAT_R8G8B8A8_UNORM;
+    }
+    printf("Error - device does not support VK_FORMAT_B8G8R8A8_UNORM nor VK_FORMAT_R8G8B8A8_UNORM - exiting\n");
+    exit(1);
+}
+
+void VkTestFramework::Finish() {}
+
+//
+// These are the default resources for TBuiltInResources, used for both
+//  - parsing this string for the case where the user didn't supply one
+//  - dumping out a template for user construction of a config file
+//
+static const char *DefaultConfig =
+    "MaxLights 32\n"
+    "MaxClipPlanes 6\n"
+    "MaxTextureUnits 32\n"
+    "MaxTextureCoords 32\n"
+    "MaxVertexAttribs 64\n"
+    "MaxVertexUniformComponents 4096\n"
+    "MaxVaryingFloats 64\n"
+    "MaxVertexTextureImageUnits 32\n"
+    "MaxCombinedTextureImageUnits 80\n"
+    "MaxTextureImageUnits 32\n"
+    "MaxFragmentUniformComponents 4096\n"
+    "MaxDrawBuffers 32\n"
+    "MaxVertexUniformVectors 128\n"
+    "MaxVaryingVectors 8\n"
+    "MaxFragmentUniformVectors 16\n"
+    "MaxVertexOutputVectors 16\n"
+    "MaxFragmentInputVectors 15\n"
+    "MinProgramTexelOffset -8\n"
+    "MaxProgramTexelOffset 7\n"
+    "MaxClipDistances 8\n"
+    "MaxComputeWorkGroupCountX 65535\n"
+    "MaxComputeWorkGroupCountY 65535\n"
+    "MaxComputeWorkGroupCountZ 65535\n"
+    "MaxComputeWorkGroupSizeX 1024\n"
+    "MaxComputeWorkGroupSizeY 1024\n"
+    "MaxComputeWorkGroupSizeZ 64\n"
+    "MaxComputeUniformComponents 1024\n"
+    "MaxComputeTextureImageUnits 16\n"
+    "MaxComputeImageUniforms 8\n"
+    "MaxComputeAtomicCounters 8\n"
+    "MaxComputeAtomicCounterBuffers 1\n"
+    "MaxVaryingComponents 60\n"
+    "MaxVertexOutputComponents 64\n"
+    "MaxGeometryInputComponents 64\n"
+    "MaxGeometryOutputComponents 128\n"
+    "MaxFragmentInputComponents 128\n"
+    "MaxImageUnits 8\n"
+    "MaxCombinedImageUnitsAndFragmentOutputs 8\n"
+    "MaxCombinedShaderOutputResources 8\n"
+    "MaxImageSamples 0\n"
+    "MaxVertexImageUniforms 0\n"
+    "MaxTessControlImageUniforms 0\n"
+    "MaxTessEvaluationImageUniforms 0\n"
+    "MaxGeometryImageUniforms 0\n"
+    "MaxFragmentImageUniforms 8\n"
+    "MaxCombinedImageUniforms 8\n"
+    "MaxGeometryTextureImageUnits 16\n"
+    "MaxGeometryOutputVertices 256\n"
+    "MaxGeometryTotalOutputComponents 1024\n"
+    "MaxGeometryUniformComponents 1024\n"
+    "MaxGeometryVaryingComponents 64\n"
+    "MaxTessControlInputComponents 128\n"
+    "MaxTessControlOutputComponents 128\n"
+    "MaxTessControlTextureImageUnits 16\n"
+    "MaxTessControlUniformComponents 1024\n"
+    "MaxTessControlTotalOutputComponents 4096\n"
+    "MaxTessEvaluationInputComponents 128\n"
+    "MaxTessEvaluationOutputComponents 128\n"
+    "MaxTessEvaluationTextureImageUnits 16\n"
+    "MaxTessEvaluationUniformComponents 1024\n"
+    "MaxTessPatchComponents 120\n"
+    "MaxPatchVertices 32\n"
+    "MaxTessGenLevel 64\n"
+    "MaxViewports 16\n"
+    "MaxVertexAtomicCounters 0\n"
+    "MaxTessControlAtomicCounters 0\n"
+    "MaxTessEvaluationAtomicCounters 0\n"
+    "MaxGeometryAtomicCounters 0\n"
+    "MaxFragmentAtomicCounters 8\n"
+    "MaxCombinedAtomicCounters 8\n"
+    "MaxAtomicCounterBindings 1\n"
+    "MaxVertexAtomicCounterBuffers 0\n"
+    "MaxTessControlAtomicCounterBuffers 0\n"
+    "MaxTessEvaluationAtomicCounterBuffers 0\n"
+    "MaxGeometryAtomicCounterBuffers 0\n"
+    "MaxFragmentAtomicCounterBuffers 1\n"
+    "MaxCombinedAtomicCounterBuffers 1\n"
+    "MaxAtomicCounterBufferSize 16384\n"
+    "MaxTransformFeedbackBuffers 4\n"
+    "MaxTransformFeedbackInterleavedComponents 64\n"
+    "MaxCullDistances 8\n"
+    "MaxCombinedClipAndCullDistances 8\n"
+    "MaxSamples 4\n"
+    "MaxMeshOutputVerticesNV 256\n"
+    "MaxMeshOutputPrimitivesNV 512\n"
+    "MaxMeshWorkGroupSizeX_NV 32\n"
+    "MaxMeshWorkGroupSizeY_NV 1\n"
+    "MaxMeshWorkGroupSizeZ_NV 1\n"
+    "MaxTaskWorkGroupSizeX_NV 32\n"
+    "MaxTaskWorkGroupSizeY_NV 1\n"
+    "MaxTaskWorkGroupSizeZ_NV 1\n"
+    "MaxMeshViewCountNV 4\n"
+
+    "nonInductiveForLoops 1\n"
+    "whileLoops 1\n"
+    "doWhileLoops 1\n"
+    "generalUniformIndexing 1\n"
+    "generalAttributeMatrixVectorIndexing 1\n"
+    "generalVaryingIndexing 1\n"
+    "generalSamplerIndexing 1\n"
+    "generalVariableIndexing 1\n"
+    "generalConstantMatrixVectorIndexing 1\n";
+
+//
+// *.conf => this is a config file that can set limits/resources
+//
+bool VkTestFramework::SetConfigFile(const std::string &name) {
+    if (name.size() < 5) return false;
+
+    if (name.compare(name.size() - 5, 5, ".conf") == 0) {
+        ConfigFile = name;
+        return true;
+    }
+
+    return false;
+}
+
+//
+// Parse either a .conf file provided by the user or the default string above.
+//
+void VkTestFramework::ProcessConfigFile(VkPhysicalDeviceLimits const *const device_limits) {
+    char **configStrings = 0;
+    char *config = 0;
+    bool config_file_specified = false;
+    if (ConfigFile.size() > 0) {
+        configStrings = ReadFileData(ConfigFile.c_str());
+        if (configStrings) {
+            config = *configStrings;
+            config_file_specified = true;
+        } else {
+            printf("Error opening configuration file; will instead use the default configuration\n");
+        }
+    }
+
+    if (config == 0) {
+        config = (char *)alloca(strlen(DefaultConfig) + 1);
+        strcpy(config, DefaultConfig);
+    }
+
+    const char *delims = " \t\n\r";
+    const char *token = strtok(config, delims);
+    while (token) {
+        const char *valueStr = strtok(0, delims);
+        if (valueStr == 0 || !(valueStr[0] == '-' || (valueStr[0] >= '0' && valueStr[0] <= '9'))) {
+            printf("Error: '%s' bad .conf file.  Each name must be followed by one number.\n", valueStr ? valueStr : "");
+            return;
+        }
+        int value = atoi(valueStr);
+
+        if (strcmp(token, "MaxLights") == 0)
+            Resources.maxLights = value;
+        else if (strcmp(token, "MaxClipPlanes") == 0)
+            Resources.maxClipPlanes = value;
+        else if (strcmp(token, "MaxTextureUnits") == 0)
+            Resources.maxTextureUnits = value;
+        else if (strcmp(token, "MaxTextureCoords") == 0)
+            Resources.maxTextureCoords = value;
+        else if (strcmp(token, "MaxVertexAttribs") == 0)
+            Resources.maxVertexAttribs = value;
+        else if (strcmp(token, "MaxVertexUniformComponents") == 0)
+            Resources.maxVertexUniformComponents = value;
+        else if (strcmp(token, "MaxVaryingFloats") == 0)
+            Resources.maxVaryingFloats = value;
+        else if (strcmp(token, "MaxVertexTextureImageUnits") == 0)
+            Resources.maxVertexTextureImageUnits = value;
+        else if (strcmp(token, "MaxCombinedTextureImageUnits") == 0)
+            Resources.maxCombinedTextureImageUnits = value;
+        else if (strcmp(token, "MaxTextureImageUnits") == 0)
+            Resources.maxTextureImageUnits = value;
+        else if (strcmp(token, "MaxFragmentUniformComponents") == 0)
+            Resources.maxFragmentUniformComponents = value;
+        else if (strcmp(token, "MaxDrawBuffers") == 0)
+            Resources.maxDrawBuffers = value;
+        else if (strcmp(token, "MaxVertexUniformVectors") == 0)
+            Resources.maxVertexUniformVectors = value;
+        else if (strcmp(token, "MaxVaryingVectors") == 0)
+            Resources.maxVaryingVectors = value;
+        else if (strcmp(token, "MaxFragmentUniformVectors") == 0)
+            Resources.maxFragmentUniformVectors = value;
+        else if (strcmp(token, "MaxVertexOutputVectors") == 0)
+            Resources.maxVertexOutputVectors = value;
+        else if (strcmp(token, "MaxFragmentInputVectors") == 0)
+            Resources.maxFragmentInputVectors = value;
+        else if (strcmp(token, "MinProgramTexelOffset") == 0)
+            Resources.minProgramTexelOffset = value;
+        else if (strcmp(token, "MaxProgramTexelOffset") == 0)
+            Resources.maxProgramTexelOffset = value;
+        else if (strcmp(token, "MaxClipDistances") == 0)
+            Resources.maxClipDistances = (config_file_specified ? value : device_limits->maxClipDistances);
+        else if (strcmp(token, "MaxComputeWorkGroupCountX") == 0)
+            Resources.maxComputeWorkGroupCountX = (config_file_specified ? value : device_limits->maxComputeWorkGroupCount[0]);
+        else if (strcmp(token, "MaxComputeWorkGroupCountY") == 0)
+            Resources.maxComputeWorkGroupCountY = (config_file_specified ? value : device_limits->maxComputeWorkGroupCount[1]);
+        else if (strcmp(token, "MaxComputeWorkGroupCountZ") == 0)
+            Resources.maxComputeWorkGroupCountZ = (config_file_specified ? value : device_limits->maxComputeWorkGroupCount[2]);
+        else if (strcmp(token, "MaxComputeWorkGroupSizeX") == 0)
+            Resources.maxComputeWorkGroupSizeX = (config_file_specified ? value : device_limits->maxComputeWorkGroupSize[0]);
+        else if (strcmp(token, "MaxComputeWorkGroupSizeY") == 0)
+            Resources.maxComputeWorkGroupSizeY = (config_file_specified ? value : device_limits->maxComputeWorkGroupSize[1]);
+        else if (strcmp(token, "MaxComputeWorkGroupSizeZ") == 0)
+            Resources.maxComputeWorkGroupSizeZ = (config_file_specified ? value : device_limits->maxComputeWorkGroupSize[2]);
+        else if (strcmp(token, "MaxComputeUniformComponents") == 0)
+            Resources.maxComputeUniformComponents = value;
+        else if (strcmp(token, "MaxComputeTextureImageUnits") == 0)
+            Resources.maxComputeTextureImageUnits = value;
+        else if (strcmp(token, "MaxComputeImageUniforms") == 0)
+            Resources.maxComputeImageUniforms = value;
+        else if (strcmp(token, "MaxComputeAtomicCounters") == 0)
+            Resources.maxComputeAtomicCounters = value;
+        else if (strcmp(token, "MaxComputeAtomicCounterBuffers") == 0)
+            Resources.maxComputeAtomicCounterBuffers = value;
+        else if (strcmp(token, "MaxVaryingComponents") == 0)
+            Resources.maxVaryingComponents = value;
+        else if (strcmp(token, "MaxVertexOutputComponents") == 0)
+            Resources.maxVertexOutputComponents = (config_file_specified ? value : device_limits->maxVertexOutputComponents);
+        else if (strcmp(token, "MaxGeometryInputComponents") == 0)
+            Resources.maxGeometryInputComponents = (config_file_specified ? value : device_limits->maxGeometryInputComponents);
+        else if (strcmp(token, "MaxGeometryOutputComponents") == 0)
+            Resources.maxGeometryOutputComponents = (config_file_specified ? value : device_limits->maxGeometryOutputComponents);
+        else if (strcmp(token, "MaxFragmentInputComponents") == 0)
+            Resources.maxFragmentInputComponents = (config_file_specified ? value : device_limits->maxFragmentInputComponents);
+        else if (strcmp(token, "MaxImageUnits") == 0)
+            Resources.maxImageUnits = value;
+        else if (strcmp(token, "MaxCombinedImageUnitsAndFragmentOutputs") == 0)
+            Resources.maxCombinedImageUnitsAndFragmentOutputs = value;
+        else if (strcmp(token, "MaxCombinedShaderOutputResources") == 0)
+            Resources.maxCombinedShaderOutputResources = value;
+        else if (strcmp(token, "MaxImageSamples") == 0)
+            Resources.maxImageSamples = value;
+        else if (strcmp(token, "MaxVertexImageUniforms") == 0)
+            Resources.maxVertexImageUniforms = value;
+        else if (strcmp(token, "MaxTessControlImageUniforms") == 0)
+            Resources.maxTessControlImageUniforms = value;
+        else if (strcmp(token, "MaxTessEvaluationImageUniforms") == 0)
+            Resources.maxTessEvaluationImageUniforms = value;
+        else if (strcmp(token, "MaxGeometryImageUniforms") == 0)
+            Resources.maxGeometryImageUniforms = value;
+        else if (strcmp(token, "MaxFragmentImageUniforms") == 0)
+            Resources.maxFragmentImageUniforms = value;
+        else if (strcmp(token, "MaxCombinedImageUniforms") == 0)
+            Resources.maxCombinedImageUniforms = value;
+        else if (strcmp(token, "MaxGeometryTextureImageUnits") == 0)
+            Resources.maxGeometryTextureImageUnits = value;
+        else if (strcmp(token, "MaxGeometryOutputVertices") == 0)
+            Resources.maxGeometryOutputVertices = (config_file_specified ? value : device_limits->maxGeometryOutputVertices);
+        else if (strcmp(token, "MaxGeometryTotalOutputComponents") == 0)
+            Resources.maxGeometryTotalOutputComponents =
+                (config_file_specified ? value : device_limits->maxGeometryTotalOutputComponents);
+        else if (strcmp(token, "MaxGeometryUniformComponents") == 0)
+            Resources.maxGeometryUniformComponents = value;
+        else if (strcmp(token, "MaxGeometryVaryingComponents") == 0)
+            Resources.maxGeometryVaryingComponents = value;
+        else if (strcmp(token, "MaxTessControlInputComponents") == 0)
+            Resources.maxTessControlInputComponents = value;
+        else if (strcmp(token, "MaxTessControlOutputComponents") == 0)
+            Resources.maxTessControlOutputComponents = value;
+        else if (strcmp(token, "MaxTessControlTextureImageUnits") == 0)
+            Resources.maxTessControlTextureImageUnits = value;
+        else if (strcmp(token, "MaxTessControlUniformComponents") == 0)
+            Resources.maxTessControlUniformComponents = value;
+        else if (strcmp(token, "MaxTessControlTotalOutputComponents") == 0)
+            Resources.maxTessControlTotalOutputComponents = value;
+        else if (strcmp(token, "MaxTessEvaluationInputComponents") == 0)
+            Resources.maxTessEvaluationInputComponents = value;
+        else if (strcmp(token, "MaxTessEvaluationOutputComponents") == 0)
+            Resources.maxTessEvaluationOutputComponents = value;
+        else if (strcmp(token, "MaxTessEvaluationTextureImageUnits") == 0)
+            Resources.maxTessEvaluationTextureImageUnits = value;
+        else if (strcmp(token, "MaxTessEvaluationUniformComponents") == 0)
+            Resources.maxTessEvaluationUniformComponents = value;
+        else if (strcmp(token, "MaxTessPatchComponents") == 0)
+            Resources.maxTessPatchComponents = value;
+        else if (strcmp(token, "MaxPatchVertices") == 0)
+            Resources.maxPatchVertices = value;
+        else if (strcmp(token, "MaxTessGenLevel") == 0)
+            Resources.maxTessGenLevel = value;
+        else if (strcmp(token, "MaxViewports") == 0)
+            Resources.maxViewports = (config_file_specified ? value : device_limits->maxViewports);
+        else if (strcmp(token, "MaxVertexAtomicCounters") == 0)
+            Resources.maxVertexAtomicCounters = value;
+        else if (strcmp(token, "MaxTessControlAtomicCounters") == 0)
+            Resources.maxTessControlAtomicCounters = value;
+        else if (strcmp(token, "MaxTessEvaluationAtomicCounters") == 0)
+            Resources.maxTessEvaluationAtomicCounters = value;
+        else if (strcmp(token, "MaxGeometryAtomicCounters") == 0)
+            Resources.maxGeometryAtomicCounters = value;
+        else if (strcmp(token, "MaxFragmentAtomicCounters") == 0)
+            Resources.maxFragmentAtomicCounters = value;
+        else if (strcmp(token, "MaxCombinedAtomicCounters") == 0)
+            Resources.maxCombinedAtomicCounters = value;
+        else if (strcmp(token, "MaxAtomicCounterBindings") == 0)
+            Resources.maxAtomicCounterBindings = value;
+        else if (strcmp(token, "MaxVertexAtomicCounterBuffers") == 0)
+            Resources.maxVertexAtomicCounterBuffers = value;
+        else if (strcmp(token, "MaxTessControlAtomicCounterBuffers") == 0)
+            Resources.maxTessControlAtomicCounterBuffers = value;
+        else if (strcmp(token, "MaxTessEvaluationAtomicCounterBuffers") == 0)
+            Resources.maxTessEvaluationAtomicCounterBuffers = value;
+        else if (strcmp(token, "MaxGeometryAtomicCounterBuffers") == 0)
+            Resources.maxGeometryAtomicCounterBuffers = value;
+        else if (strcmp(token, "MaxFragmentAtomicCounterBuffers") == 0)
+            Resources.maxFragmentAtomicCounterBuffers = value;
+        else if (strcmp(token, "MaxCombinedAtomicCounterBuffers") == 0)
+            Resources.maxCombinedAtomicCounterBuffers = value;
+        else if (strcmp(token, "MaxAtomicCounterBufferSize") == 0)
+            Resources.maxAtomicCounterBufferSize = value;
+        else if (strcmp(token, "MaxTransformFeedbackBuffers") == 0)
+            Resources.maxTransformFeedbackBuffers = value;
+        else if (strcmp(token, "MaxTransformFeedbackInterleavedComponents") == 0)
+            Resources.maxTransformFeedbackInterleavedComponents = value;
+        else if (strcmp(token, "MaxCullDistances") == 0)
+            Resources.maxCullDistances = (config_file_specified ? value : device_limits->maxCullDistances);
+        else if (strcmp(token, "MaxCombinedClipAndCullDistances") == 0)
+            Resources.maxCombinedClipAndCullDistances = value;
+        else if (strcmp(token, "MaxSamples") == 0)
+            Resources.maxSamples = value;
+        else if (strcmp(token, "MaxMeshOutputVerticesNV") == 0)
+            Resources.maxMeshOutputVerticesNV = value;
+        else if (strcmp(token, "MaxMeshOutputPrimitivesNV") == 0)
+            Resources.maxMeshOutputPrimitivesNV = value;
+        else if (strcmp(token, "MaxMeshWorkGroupSizeX_NV") == 0)
+            Resources.maxMeshWorkGroupSizeX_NV = value;
+        else if (strcmp(token, "MaxMeshWorkGroupSizeY_NV") == 0)
+            Resources.maxMeshWorkGroupSizeY_NV = value;
+        else if (strcmp(token, "MaxMeshWorkGroupSizeZ_NV") == 0)
+            Resources.maxMeshWorkGroupSizeZ_NV = value;
+        else if (strcmp(token, "MaxTaskWorkGroupSizeX_NV") == 0)
+            Resources.maxTaskWorkGroupSizeX_NV = value;
+        else if (strcmp(token, "MaxTaskWorkGroupSizeY_NV") == 0)
+            Resources.maxTaskWorkGroupSizeY_NV = value;
+        else if (strcmp(token, "MaxTaskWorkGroupSizeZ_NV") == 0)
+            Resources.maxTaskWorkGroupSizeZ_NV = value;
+        else if (strcmp(token, "MaxMeshViewCountNV") == 0)
+            Resources.maxMeshViewCountNV = value;
+
+        else if (strcmp(token, "nonInductiveForLoops") == 0)
+            Resources.limits.nonInductiveForLoops = (value != 0);
+        else if (strcmp(token, "whileLoops") == 0)
+            Resources.limits.whileLoops = (value != 0);
+        else if (strcmp(token, "doWhileLoops") == 0)
+            Resources.limits.doWhileLoops = (value != 0);
+        else if (strcmp(token, "generalUniformIndexing") == 0)
+            Resources.limits.generalUniformIndexing = (value != 0);
+        else if (strcmp(token, "generalAttributeMatrixVectorIndexing") == 0)
+            Resources.limits.generalAttributeMatrixVectorIndexing = (value != 0);
+        else if (strcmp(token, "generalVaryingIndexing") == 0)
+            Resources.limits.generalVaryingIndexing = (value != 0);
+        else if (strcmp(token, "generalSamplerIndexing") == 0)
+            Resources.limits.generalSamplerIndexing = (value != 0);
+        else if (strcmp(token, "generalVariableIndexing") == 0)
+            Resources.limits.generalVariableIndexing = (value != 0);
+        else if (strcmp(token, "generalConstantMatrixVectorIndexing") == 0)
+            Resources.limits.generalConstantMatrixVectorIndexing = (value != 0);
+        else
+            printf("Warning: unrecognized limit (%s) in configuration file.\n", token);
+
+        token = strtok(0, delims);
+    }
+    if (configStrings) FreeFileData(configStrings);
+}
+
+void VkTestFramework::SetMessageOptions(EShMessages &messages) {
+    if (m_compile_options & EOptionRelaxedErrors) messages = (EShMessages)(messages | EShMsgRelaxedErrors);
+    if (m_compile_options & EOptionIntermediate) messages = (EShMessages)(messages | EShMsgAST);
+    if (m_compile_options & EOptionSuppressWarnings) messages = (EShMessages)(messages | EShMsgSuppressWarnings);
+}
+
+//
+//   Malloc a string of sufficient size and read a string into it.
+//
+char **VkTestFramework::ReadFileData(const char *fileName) {
+    FILE *in;
+#if defined(_WIN32) && defined(__GNUC__)
+    in = fopen(fileName, "r");
+    int errorCode = in ? 0 : 1;
+#else
+    int errorCode = fopen_s(&in, fileName, "r");
+#endif
+
+    char *fdata;
+    size_t count = 0;
+    const int maxSourceStrings = 5;
+    char **return_data = (char **)malloc(sizeof(char *) * (maxSourceStrings + 1));
+
+    if (errorCode) {
+        printf("Error: unable to open input file: %s\n", fileName);
+        return 0;
+    }
+
+    while (fgetc(in) != EOF) count++;
+
+    fseek(in, 0, SEEK_SET);
+
+    if (!(fdata = (char *)malloc(count + 2))) {
+        printf("Error allocating memory\n");
+        return 0;
+    }
+    if (fread(fdata, 1, count, in) != count) {
+        printf("Error reading input file: %s\n", fileName);
+        return 0;
+    }
+    fdata[count] = '\0';
+    fclose(in);
+    if (count == 0) {
+        return_data[0] = (char *)malloc(count + 2);
+        return_data[0][0] = '\0';
+        m_num_shader_strings = 0;
+        return return_data;
+    } else
+        m_num_shader_strings = 1;
+
+    size_t len = (int)(ceil)((float)count / (float)m_num_shader_strings);
+    size_t ptr_len = 0, i = 0;
+    while (count > 0) {
+        return_data[i] = (char *)malloc(len + 2);
+        memcpy(return_data[i], fdata + ptr_len, len);
+        return_data[i][len] = '\0';
+        count -= (len);
+        ptr_len += (len);
+        if (count < len) {
+            if (count == 0) {
+                m_num_shader_strings = (i + 1);
+                break;
+            }
+            len = count;
+        }
+        ++i;
+    }
+    return return_data;
+}
+
+void VkTestFramework::FreeFileData(char **data) {
+    for (int i = 0; i < m_num_shader_strings; i++) free(data[i]);
+}
+
+//
+//   Deduce the language from the filename.  Files must end in one of the
+//   following extensions:
+//
+//   .vert = vertex
+//   .tesc = tessellation control
+//   .tese = tessellation evaluation
+//   .geom = geometry
+//   .frag = fragment
+//   .comp = compute
+//
+EShLanguage VkTestFramework::FindLanguage(const std::string &name) {
+    size_t ext = name.rfind('.');
+    if (ext == std::string::npos) {
+        return EShLangVertex;
+    }
+
+    std::string suffix = name.substr(ext + 1, std::string::npos);
+    if (suffix == "vert")
+        return EShLangVertex;
+    else if (suffix == "tesc")
+        return EShLangTessControl;
+    else if (suffix == "tese")
+        return EShLangTessEvaluation;
+    else if (suffix == "geom")
+        return EShLangGeometry;
+    else if (suffix == "frag")
+        return EShLangFragment;
+    else if (suffix == "comp")
+        return EShLangCompute;
+
+    return EShLangVertex;
+}
+
+//
+// Convert VK shader type to compiler's
+//
+EShLanguage VkTestFramework::FindLanguage(const VkShaderStageFlagBits shader_type) {
+    switch (shader_type) {
+        case VK_SHADER_STAGE_VERTEX_BIT:
+            return EShLangVertex;
+
+        case VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT:
+            return EShLangTessControl;
+
+        case VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT:
+            return EShLangTessEvaluation;
+
+        case VK_SHADER_STAGE_GEOMETRY_BIT:
+            return EShLangGeometry;
+
+        case VK_SHADER_STAGE_FRAGMENT_BIT:
+            return EShLangFragment;
+
+        case VK_SHADER_STAGE_COMPUTE_BIT:
+            return EShLangCompute;
+
+        case VK_SHADER_STAGE_RAYGEN_BIT_NV:
+            return EShLangRayGenNV;
+
+        case VK_SHADER_STAGE_ANY_HIT_BIT_NV:
+            return EShLangAnyHitNV;
+
+        case VK_SHADER_STAGE_CLOSEST_HIT_BIT_NV:
+            return EShLangClosestHitNV;
+
+        case VK_SHADER_STAGE_MISS_BIT_NV:
+            return EShLangMissNV;
+
+        case VK_SHADER_STAGE_INTERSECTION_BIT_NV:
+            return EShLangIntersectNV;
+
+        case VK_SHADER_STAGE_CALLABLE_BIT_NV:
+            return EShLangCallableNV;
+
+        case VK_SHADER_STAGE_TASK_BIT_NV:
+            return EShLangTaskNV;
+
+        case VK_SHADER_STAGE_MESH_BIT_NV:
+            return EShLangMeshNV;
+
+        default:
+            return EShLangVertex;
+    }
+}
+
+//
+// Compile a given string containing GLSL into SPV for use by VK
+// Return value of false means an error was encountered.
+//
+bool VkTestFramework::GLSLtoSPV(VkPhysicalDeviceLimits const *const device_limits, const VkShaderStageFlagBits shader_type,
+                                const char *pshader, std::vector<unsigned int> &spirv, bool debug, uint32_t spirv_minor_version) {
+    glslang::TProgram program;
+    const char *shaderStrings[1];
+
+    // TODO: Do we want to load a special config file depending on the
+    // shader source? Optional name maybe?
+    //    SetConfigFile(fileName);
+
+    ProcessConfigFile(device_limits);
+
+    EShMessages messages = EShMsgDefault;
+    SetMessageOptions(messages);
+    messages = static_cast<EShMessages>(messages | EShMsgSpvRules | EShMsgVulkanRules);
+    if (debug) {
+        messages = static_cast<EShMessages>(messages | EShMsgDebugInfo);
+    }
+
+    EShLanguage stage = FindLanguage(shader_type);
+    glslang::TShader *shader = new glslang::TShader(stage);
+    switch (spirv_minor_version) {
+        default:
+        case 0:
+            shader->setEnvTarget(glslang::EShTargetSpv, glslang::EShTargetSpv_1_0);
+            break;
+        case 1:
+            shader->setEnvTarget(glslang::EShTargetSpv, glslang::EShTargetSpv_1_1);
+            break;
+        case 2:
+            shader->setEnvTarget(glslang::EShTargetSpv, glslang::EShTargetSpv_1_2);
+            break;
+        case 3:
+            shader->setEnvTarget(glslang::EShTargetSpv, glslang::EShTargetSpv_1_3);
+            break;
+        case 4:
+            shader->setEnvTarget(glslang::EShTargetSpv, glslang::EShTargetSpv_1_4);
+            break;
+        case 5:
+            shader->setEnvTarget(glslang::EShTargetSpv, glslang::EShTargetSpv_1_5);
+            break;
+    }
+
+    shaderStrings[0] = pshader;
+    shader->setStrings(shaderStrings, 1);
+
+    if (!shader->parse(&Resources, (m_compile_options & EOptionDefaultDesktop) ? 110 : 100, false, messages)) {
+        if (!(m_compile_options & EOptionSuppressInfolog)) {
+            puts(shader->getInfoLog());
+            puts(shader->getInfoDebugLog());
+        }
+
+        return false;  // something didn't work
+    }
+
+    program.addShader(shader);
+
+    //
+    // Program-level processing...
+    //
+
+    if (!program.link(messages)) {
+        if (!(m_compile_options & EOptionSuppressInfolog)) {
+            puts(shader->getInfoLog());
+            puts(shader->getInfoDebugLog());
+        }
+
+        return false;
+    }
+
+    if (m_compile_options & EOptionDumpReflection) {
+        program.buildReflection();
+        program.dumpReflection();
+    }
+
+    glslang::SpvOptions spv_options;
+    if (debug) {
+        spv_options.generateDebugInfo = true;
+    }
+    glslang::GlslangToSpv(*program.getIntermediate(stage), spirv, &spv_options);
+
+    //
+    // Test the different modes of SPIR-V modification
+    //
+    if (this->m_canonicalize_spv) {
+        spv::spirvbin_t(0).remap(spirv, spv::spirvbin_t::ALL_BUT_STRIP);
+    }
+
+    if (this->m_strip_spv) {
+        spv::spirvbin_t(0).remap(spirv, spv::spirvbin_t::STRIP);
+    }
+
+    if (this->m_do_everything_spv) {
+        spv::spirvbin_t(0).remap(spirv, spv::spirvbin_t::DO_EVERYTHING);
+    }
+
+    delete shader;
+
+    return true;
+}
+
+//
+// Compile a given string containing SPIR-V assembly into SPV for use by VK
+// Return value of false means an error was encountered.
+//
+bool VkTestFramework::ASMtoSPV(const spv_target_env target_env, const uint32_t options, const char *pasm,
+                               std::vector<unsigned int> &spv) {
+    spv_binary binary;
+    spv_diagnostic diagnostic = nullptr;
+    spv_context context = spvContextCreate(target_env);
+    spv_result_t error = spvTextToBinaryWithOptions(context, pasm, strlen(pasm), options, &binary, &diagnostic);
+    spvContextDestroy(context);
+    if (error) {
+        spvDiagnosticPrint(diagnostic);
+        spvDiagnosticDestroy(diagnostic);
+        return false;
+    }
+    spv.insert(spv.end(), binary->code, binary->code + binary->wordCount);
+    spvBinaryDestroy(binary);
+
+    return true;
+}
diff --git a/src/third_party/vulkan-validation-layers/src/tests/vktestframework.h b/src/third_party/vulkan-validation-layers/src/tests/vktestframework.h
new file mode 100644
index 0000000..1a56ab4
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/tests/vktestframework.h
@@ -0,0 +1,98 @@
+/*
+ * Copyright (c) 2015-2019 The Khronos Group Inc.
+ * Copyright (c) 2015-2019 Valve Corporation
+ * Copyright (c) 2015-2019 LunarG, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Courtney Goeltzenleuchter <courtney@LunarG.com>
+ * Author: Tony Barbour <tony@LunarG.com>
+ */
+
+#ifndef VKTESTFRAMEWORK_H
+#define VKTESTFRAMEWORK_H
+
+#include "SPIRV/GLSL.std.450.h"
+#include "spirv-tools/libspirv.h"
+#include "glslang/Public/ShaderLang.h"
+#include "icd-spv.h"
+#include "test_common.h"
+#include "test_environment.h"
+
+#include <fstream>
+#include <iostream>
+#include <list>
+#include <stdbool.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+
+#ifdef _WIN32
+#ifndef WIN32_LEAN_AND_MEAN
+#define WIN32_LEAN_AND_MEAN
+#endif
+#include <windows.h>
+#endif
+
+// Can be used by tests to record additional details / description of test
+#define TEST_DESCRIPTION(desc) RecordProperty("description", desc)
+
+using namespace std;
+
+class VkImageObj;
+
+class VkTestFramework : public ::testing::Test {
+  public:
+    VkFormat GetFormat(VkInstance instance, vk_testing::Device *device);
+    static bool optionMatch(const char *option, char *optionLine);
+    static void InitArgs(int *argc, char *argv[]);
+    static void Finish();
+
+    bool GLSLtoSPV(VkPhysicalDeviceLimits const *const device_limits, const VkShaderStageFlagBits shader_type, const char *pshader,
+                   std::vector<unsigned int> &spv, bool debug = false, uint32_t spirv_minor_version = 0);
+    bool ASMtoSPV(const spv_target_env target_env, const uint32_t options, const char *pasm, std::vector<unsigned int> &spv);
+    static bool m_canonicalize_spv;
+    static bool m_strip_spv;
+    static bool m_do_everything_spv;
+    static bool m_devsim_layer;
+
+    char **ReadFileData(const char *fileName);
+    void FreeFileData(char **data);
+
+  protected:
+    VkTestFramework();
+    virtual ~VkTestFramework() = 0;
+
+  private:
+    int m_compile_options;
+    int m_num_shader_strings;
+    TBuiltInResource Resources;
+    void SetMessageOptions(EShMessages &messages);
+    void ProcessConfigFile(VkPhysicalDeviceLimits const *const device_limits);
+    EShLanguage FindLanguage(const std::string &name);
+    EShLanguage FindLanguage(const VkShaderStageFlagBits shader_type);
+    std::string ConfigFile;
+    bool SetConfigFile(const std::string &name);
+    static int m_width;
+    static int m_height;
+    string m_testName;
+};
+
+class TestEnvironment : public ::testing::Environment {
+  public:
+    void SetUp();
+
+    void TearDown();
+};
+
+#endif  // VKTESTFRAMEWORK_H
diff --git a/src/third_party/vulkan-validation-layers/src/tests/vktestframeworkandroid.cpp b/src/third_party/vulkan-validation-layers/src/tests/vktestframeworkandroid.cpp
new file mode 100644
index 0000000..f6b594a
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/tests/vktestframeworkandroid.cpp
@@ -0,0 +1,166 @@
+//  VK tests
+//
+//  Copyright (c) 2015-2019 The Khronos Group Inc.
+//  Copyright (c) 2015-2019 Valve Corporation
+//  Copyright (c) 2015-2019 LunarG, Inc.
+//  Copyright (c) 2015-2019 Google, Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "vktestframeworkandroid.h"
+#include "shaderc/shaderc.hpp"
+#include <android/log.h>
+
+VkTestFramework::VkTestFramework() {}
+VkTestFramework::~VkTestFramework() {}
+
+// Define static elements
+bool VkTestFramework::m_devsim_layer = false;
+ANativeWindow *VkTestFramework::window = nullptr;
+
+VkFormat VkTestFramework::GetFormat(VkInstance instance, vk_testing::Device *device) {
+    VkFormatProperties format_props;
+    vk::GetPhysicalDeviceFormatProperties(device->phy().handle(), VK_FORMAT_B8G8R8A8_UNORM, &format_props);
+    if (format_props.linearTilingFeatures & VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT ||
+        format_props.optimalTilingFeatures & VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT) {
+        return VK_FORMAT_B8G8R8A8_UNORM;
+    }
+    vk::GetPhysicalDeviceFormatProperties(device->phy().handle(), VK_FORMAT_R8G8B8A8_UNORM, &format_props);
+    if (format_props.linearTilingFeatures & VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT ||
+        format_props.optimalTilingFeatures & VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT) {
+        return VK_FORMAT_R8G8B8A8_UNORM;
+    }
+    printf("Error - device does not support VK_FORMAT_B8G8R8A8_UNORM nor VK_FORMAT_R8G8B8A8_UNORM - exiting\n");
+    exit(0);
+}
+
+void VkTestFramework::InitArgs(int *argc, char *argv[]) {}
+void VkTestFramework::Finish() {}
+
+void TestEnvironment::SetUp() {
+    vk_testing::set_error_callback(test_error_callback);
+
+    vk::InitDispatchTable();
+}
+
+void TestEnvironment::TearDown() {}
+
+// Android specific helper functions for shaderc.
+struct shader_type_mapping {
+    VkShaderStageFlagBits vkshader_type;
+    shaderc_shader_kind shaderc_type;
+};
+
+static const shader_type_mapping shader_map_table[] = {
+    {VK_SHADER_STAGE_VERTEX_BIT, shaderc_glsl_vertex_shader},
+    {VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT, shaderc_glsl_tess_control_shader},
+    {VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT, shaderc_glsl_tess_evaluation_shader},
+    {VK_SHADER_STAGE_GEOMETRY_BIT, shaderc_glsl_geometry_shader},
+    {VK_SHADER_STAGE_FRAGMENT_BIT, shaderc_glsl_fragment_shader},
+    {VK_SHADER_STAGE_COMPUTE_BIT, shaderc_glsl_compute_shader},
+};
+
+shaderc_shader_kind MapShadercType(VkShaderStageFlagBits vkShader) {
+    for (auto shader : shader_map_table) {
+        if (shader.vkshader_type == vkShader) {
+            return shader.shaderc_type;
+        }
+    }
+    assert(false);
+    return shaderc_glsl_infer_from_source;
+}
+
+// Compile a given string containing GLSL into SPIR-V
+// Return value of false means an error was encountered
+bool VkTestFramework::GLSLtoSPV(VkPhysicalDeviceLimits const *const device_limits, const VkShaderStageFlagBits shader_type,
+                                const char *pshader, std::vector<unsigned int> &spirv, bool debug, uint32_t spirv_minor_version) {
+    // On Android, use shaderc instead.
+    shaderc::Compiler compiler;
+    shaderc::CompileOptions options;
+    if (debug) {
+        options.SetOptimizationLevel(shaderc_optimization_level_zero);
+        options.SetGenerateDebugInfo();
+    }
+
+    switch (spirv_minor_version) {
+        default:
+        case 0:
+            options.SetTargetSpirv(shaderc_spirv_version_1_0);
+            break;
+        case 1:
+            options.SetTargetSpirv(shaderc_spirv_version_1_1);
+            break;
+        case 2:
+            options.SetTargetSpirv(shaderc_spirv_version_1_2);
+            break;
+        case 3:
+            options.SetTargetSpirv(shaderc_spirv_version_1_3);
+            break;
+        case 4:
+            options.SetTargetSpirv(shaderc_spirv_version_1_4);
+            break;
+    }
+
+    // Override glslang built-in GL settings with actual hardware values
+    options.SetLimit(shaderc_limit_max_clip_distances, device_limits->maxClipDistances);
+    options.SetLimit(shaderc_limit_max_compute_work_group_count_x, device_limits->maxComputeWorkGroupCount[0]);
+    options.SetLimit(shaderc_limit_max_compute_work_group_count_y, device_limits->maxComputeWorkGroupCount[1]);
+    options.SetLimit(shaderc_limit_max_compute_work_group_count_z, device_limits->maxComputeWorkGroupCount[2]);
+    options.SetLimit(shaderc_limit_max_compute_work_group_size_x, device_limits->maxComputeWorkGroupSize[0]);
+    options.SetLimit(shaderc_limit_max_compute_work_group_size_y, device_limits->maxComputeWorkGroupSize[1]);
+    options.SetLimit(shaderc_limit_max_compute_work_group_size_z, device_limits->maxComputeWorkGroupSize[2]);
+    options.SetLimit(shaderc_limit_max_cull_distances, device_limits->maxCullDistances);
+    options.SetLimit(shaderc_limit_max_fragment_input_components, device_limits->maxFragmentInputComponents);
+    options.SetLimit(shaderc_limit_max_geometry_input_components, device_limits->maxGeometryInputComponents);
+    options.SetLimit(shaderc_limit_max_geometry_output_components, device_limits->maxGeometryOutputComponents);
+    options.SetLimit(shaderc_limit_max_geometry_output_vertices, device_limits->maxGeometryOutputVertices);
+    options.SetLimit(shaderc_limit_max_geometry_total_output_components, device_limits->maxGeometryTotalOutputComponents);
+    options.SetLimit(shaderc_limit_max_vertex_output_components, device_limits->maxVertexOutputComponents);
+    options.SetLimit(shaderc_limit_max_viewports, device_limits->maxViewports);
+
+    shaderc::SpvCompilationResult result =
+        compiler.CompileGlslToSpv(pshader, strlen(pshader), MapShadercType(shader_type), "shader", options);
+    if (result.GetCompilationStatus() != shaderc_compilation_status_success) {
+        __android_log_print(ANDROID_LOG_ERROR, "VulkanLayerValidationTests", "GLSLtoSPV compilation failed: %s",
+                            result.GetErrorMessage().c_str());
+        return false;
+    }
+
+    for (auto iter = result.begin(); iter != result.end(); iter++) {
+        spirv.push_back(*iter);
+    }
+
+    return true;
+}
+
+//
+// Compile a given string containing SPIR-V assembly into SPV for use by VK
+// Return value of false means an error was encountered.
+//
+bool VkTestFramework::ASMtoSPV(const spv_target_env target_env, const uint32_t options, const char *pasm,
+                               std::vector<unsigned int> &spv) {
+    spv_binary binary;
+    spv_diagnostic diagnostic = nullptr;
+    spv_context context = spvContextCreate(target_env);
+    spv_result_t error = spvTextToBinaryWithOptions(context, pasm, strlen(pasm), options, &binary, &diagnostic);
+    spvContextDestroy(context);
+    if (error) {
+        __android_log_print(ANDROID_LOG_ERROR, "VkLayerValidationTest", "ASMtoSPV compilation failed");
+        spvDiagnosticDestroy(diagnostic);
+        return false;
+    }
+    spv.insert(spv.end(), binary->code, binary->code + binary->wordCount);
+    spvBinaryDestroy(binary);
+
+    return true;
+}
diff --git a/src/third_party/vulkan-validation-layers/src/tests/vktestframeworkandroid.h b/src/third_party/vulkan-validation-layers/src/tests/vktestframeworkandroid.h
new file mode 100644
index 0000000..577622d
--- /dev/null
+++ b/src/third_party/vulkan-validation-layers/src/tests/vktestframeworkandroid.h
@@ -0,0 +1,54 @@
+//  VK tests
+//
+//  Copyright (c) 2015-2019 The Khronos Group Inc.
+//  Copyright (c) 2015-2019 Valve Corporation
+//  Copyright (c) 2015-2019 LunarG, Inc.
+//  Copyright (c) 2015-2019 Google, Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#ifndef VKTESTFRAMEWORKANDROID_H
+#define VKTESTFRAMEWORKANDROID_H
+
+#include "spirv-tools/libspirv.h"
+#include "test_common.h"
+
+// Can be used by tests to record additional details / description of test
+#define TEST_DESCRIPTION(desc) RecordProperty("description", desc)
+
+#define ICD_SPV_MAGIC 0x07230203
+
+class VkTestFramework : public ::testing::Test {
+  public:
+    VkTestFramework();
+    ~VkTestFramework();
+
+    static void InitArgs(int *argc, char *argv[]);
+    static void Finish();
+
+    VkFormat GetFormat(VkInstance instance, vk_testing::Device *device);
+    bool GLSLtoSPV(VkPhysicalDeviceLimits const *const device_limits, const VkShaderStageFlagBits shader_type, const char *pshader,
+                   std::vector<unsigned int> &spv, bool debug = false, uint32_t spirv_minor_version = 0);
+    bool ASMtoSPV(const spv_target_env target_env, const uint32_t options, const char *pasm, std::vector<unsigned int> &spv);
+    static bool m_devsim_layer;
+    static ANativeWindow *window;
+};
+
+class TestEnvironment : public ::testing::Environment {
+  public:
+    void SetUp();
+
+    void TearDown();
+};
+
+#endif
diff --git a/src/third_party/zlib/zlib.gyp b/src/third_party/zlib/zlib.gyp
index d8bdf96..89238fb 100644
--- a/src/third_party/zlib/zlib.gyp
+++ b/src/third_party/zlib/zlib.gyp
@@ -51,6 +51,12 @@
           'include_dirs': [
             '.',
           ],
+          'all_dependent_settings': {
+            'defines': [
+               # To prevent Zlib ever from redefining const keyword in zconf.h
+              'STDC'
+            ]
+          },
           'direct_dependent_settings': {
             'include_dirs': [
               '.',
diff --git a/src/url/run_all_unittests.cc b/src/url/run_all_unittests.cc
index cfc6a43..916a3eb 100644
--- a/src/url/run_all_unittests.cc
+++ b/src/url/run_all_unittests.cc
@@ -2,15 +2,28 @@
 // Use of this source code is governed by a BSD-style license that can be
 // found in the LICENSE file.
 
-#include <memory>
-
 #include "base/bind.h"
-#include "base/test/launcher/unit_test_launcher.h"
-#include "base/test/test_io_thread.h"
 #include "base/test/test_suite.h"
 #include "build/build_config.h"
 
-#if !defined(OS_IOS) && !defined(STARBOARD)
+#if defined(STARBOARD)
+
+#include "starboard/client_porting/wrap_main/wrap_main.h"
+
+int TestSuiteRun(int argc, char** argv) {
+  base::TestSuite test_suite(argc, argv);
+  return base::TestSuite(argc, argv).Run();
+}
+
+STARBOARD_WRAP_SIMPLE_MAIN(TestSuiteRun);
+
+#else
+
+#include <memory>
+#include "base/test/launcher/unit_test_launcher.h"
+#include "base/test/test_io_thread.h"
+
+#if !defined(OS_IOS)
 #include "mojo/core/embedder/embedder.h"  // nogncheck
 #endif
 
@@ -25,3 +38,5 @@
       argc, argv,
       base::BindOnce(&base::TestSuite::Run, base::Unretained(&test_suite)));
 }
+
+#endif
diff --git a/src/url/url.gyp b/src/url/url.gyp
index 5102339..f83135b 100644
--- a/src/url/url.gyp
+++ b/src/url/url.gyp
@@ -88,8 +88,8 @@
       'dependencies': [
         '<(DEPTH)/base/base.gyp:base',
         '<(DEPTH)/base/base.gyp:base_i18n',
-        '<(DEPTH)/base/base.gyp:run_all_unittests',
         '<(DEPTH)/base/base.gyp:test_support_base',
+        '<(DEPTH)/testing/gmock.gyp:gmock',
         '<(DEPTH)/testing/gtest.gyp:gtest',
         '<(DEPTH)/third_party/icu/icu.gyp:icuuc',
         'url',
@@ -97,6 +97,7 @@
       'sources': [
         'gurl_unittest.cc',
         'origin_unittest.cc',
+        'run_all_unittests.cc',
         'scheme_host_port_unittest.cc',
         'url_canon_icu_unittest.cc',
         'url_canon_unittest.cc',